From 46e681f4fc6877607c9cacca501c5b42795fdeef Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Fri, 5 Jul 2024 00:42:29 -0700 Subject: [PATCH 0001/1635] Improve redaction for stream error messages (#120867) --- homeassistant/components/stream/worker.py | 17 +++++++++++++---- tests/components/stream/test_worker.py | 7 +++++-- 2 files changed, 18 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/stream/worker.py b/homeassistant/components/stream/worker.py index 4fd9b27d02f..64eac965241 100644 --- a/homeassistant/components/stream/worker.py +++ b/homeassistant/components/stream/worker.py @@ -48,6 +48,14 @@ class StreamWorkerError(Exception): """An exception thrown while processing a stream.""" +def redact_av_error_string(err: av.AVError) -> str: + """Return an error string with credentials redacted from the url.""" + parts = [str(err.type), err.strerror] + if err.filename is not None: + parts.append(redact_credentials(err.filename)) + return ", ".join(parts) + + class StreamEndedError(StreamWorkerError): """Raised when the stream is complete, exposed for facilitating testing.""" @@ -517,8 +525,7 @@ def stream_worker( container = av.open(source, options=pyav_options, timeout=SOURCE_TIMEOUT) except av.AVError as err: raise StreamWorkerError( - f"Error opening stream ({err.type}, {err.strerror})" - f" {redact_credentials(str(source))}" + f"Error opening stream ({redact_av_error_string(err)})" ) from err try: video_stream = container.streams.video[0] @@ -593,7 +600,7 @@ def stream_worker( except av.AVError as ex: container.close() raise StreamWorkerError( - f"Error demuxing stream while finding first packet: {ex!s}" + f"Error demuxing stream while finding first packet ({redact_av_error_string(ex)})" ) from ex muxer = StreamMuxer( @@ -618,7 +625,9 @@ def stream_worker( except StopIteration as ex: raise StreamEndedError("Stream ended; no additional packets") from ex except av.AVError as ex: - raise StreamWorkerError(f"Error demuxing stream: {ex!s}") from ex + raise StreamWorkerError( + f"Error demuxing stream ({redact_av_error_string(ex)})" + ) from ex muxer.mux_packet(packet) diff --git a/tests/components/stream/test_worker.py b/tests/components/stream/test_worker.py index 2cb90c5ee9a..7226adc7d7e 100644 --- a/tests/components/stream/test_worker.py +++ b/tests/components/stream/test_worker.py @@ -772,12 +772,15 @@ async def test_worker_log( with patch("av.open") as av_open: # pylint: disable-next=c-extension-no-member - av_open.side_effect = av.error.InvalidDataError(-2, "error") + av_open.side_effect = av.error.InvalidDataError( + code=-2, message="Invalid data", filename=stream_url + ) with pytest.raises(StreamWorkerError) as err: run_worker(hass, stream, stream_url) await hass.async_block_till_done() assert ( - str(err.value) == f"Error opening stream (ERRORTYPE_-2, error) {redacted_url}" + str(err.value) + == f"Error opening stream (ERRORTYPE_-2, Invalid data, {redacted_url})" ) assert stream_url not in caplog.text From 59cf01e2525e67949c7cbfb311c8edcb6b4eb09f Mon Sep 17 00:00:00 2001 From: dougiteixeira <31328123+dougiteixeira@users.noreply.github.com> Date: Thu, 4 Jul 2024 16:36:31 -0300 Subject: [PATCH 0002/1635] Add device class translations in Random (#120890) --- homeassistant/components/random/strings.json | 90 ++++++++++++++++++++ 1 file changed, 90 insertions(+) diff --git a/homeassistant/components/random/strings.json b/homeassistant/components/random/strings.json index 0faad1d8093..98072a21fe1 100644 --- a/homeassistant/components/random/strings.json +++ b/homeassistant/components/random/strings.json @@ -44,5 +44,95 @@ "title": "[%key:component::random::config::step::sensor::title%]" } } + }, + + "selector": { + "binary_sensor_device_class": { + "options": { + "battery": "[%key:component::binary_sensor::entity_component::battery::name%]", + "battery_charging": "[%key:component::binary_sensor::entity_component::battery_charging::name%]", + "carbon_monoxide": "[%key:component::binary_sensor::entity_component::carbon_monoxide::name%]", + "connectivity": "[%key:component::binary_sensor::entity_component::connectivity::name%]", + "cold": "[%key:component::binary_sensor::entity_component::cold::name%]", + "door": "[%key:component::binary_sensor::entity_component::door::name%]", + "garage_door": "[%key:component::binary_sensor::entity_component::garage_door::name%]", + "gas": "[%key:component::binary_sensor::entity_component::gas::name%]", + "heat": "[%key:component::binary_sensor::entity_component::heat::name%]", + "light": "[%key:component::binary_sensor::entity_component::light::name%]", + "lock": "[%key:component::binary_sensor::entity_component::lock::name%]", + "moisture": "[%key:component::binary_sensor::entity_component::moisture::name%]", + "motion": "[%key:component::binary_sensor::entity_component::motion::name%]", + "moving": "[%key:component::binary_sensor::entity_component::moving::name%]", + "occupancy": "[%key:component::binary_sensor::entity_component::occupancy::name%]", + "opening": "[%key:component::binary_sensor::entity_component::opening::name%]", + "plug": "[%key:component::binary_sensor::entity_component::plug::name%]", + "power": "[%key:component::binary_sensor::entity_component::power::name%]", + "presence": "[%key:component::binary_sensor::entity_component::presence::name%]", + "problem": "[%key:component::binary_sensor::entity_component::problem::name%]", + "running": "[%key:component::binary_sensor::entity_component::running::name%]", + "safety": "[%key:component::binary_sensor::entity_component::safety::name%]", + "smoke": "[%key:component::binary_sensor::entity_component::smoke::name%]", + "sound": "[%key:component::binary_sensor::entity_component::sound::name%]", + "tamper": "[%key:component::binary_sensor::entity_component::tamper::name%]", + "update": "[%key:component::binary_sensor::entity_component::update::name%]", + "vibration": "[%key:component::binary_sensor::entity_component::vibration::name%]", + "window": "[%key:component::binary_sensor::entity_component::window::name%]" + } + }, + "sensor_device_class": { + "options": { + "apparent_power": "[%key:component::sensor::entity_component::apparent_power::name%]", + "aqi": "[%key:component::sensor::entity_component::aqi::name%]", + "atmospheric_pressure": "[%key:component::sensor::entity_component::atmospheric_pressure::name%]", + "battery": "[%key:component::sensor::entity_component::battery::name%]", + "carbon_dioxide": "[%key:component::sensor::entity_component::carbon_dioxide::name%]", + "carbon_monoxide": "[%key:component::sensor::entity_component::carbon_monoxide::name%]", + "conductivity": "[%key:component::sensor::entity_component::conductivity::name%]", + "current": "[%key:component::sensor::entity_component::current::name%]", + "data_rate": "[%key:component::sensor::entity_component::data_rate::name%]", + "data_size": "[%key:component::sensor::entity_component::data_size::name%]", + "date": "[%key:component::sensor::entity_component::date::name%]", + "distance": "[%key:component::sensor::entity_component::distance::name%]", + "duration": "[%key:component::sensor::entity_component::duration::name%]", + "energy": "[%key:component::sensor::entity_component::energy::name%]", + "energy_storage": "[%key:component::sensor::entity_component::energy_storage::name%]", + "frequency": "[%key:component::sensor::entity_component::frequency::name%]", + "gas": "[%key:component::sensor::entity_component::gas::name%]", + "humidity": "[%key:component::sensor::entity_component::humidity::name%]", + "illuminance": "[%key:component::sensor::entity_component::illuminance::name%]", + "irradiance": "[%key:component::sensor::entity_component::irradiance::name%]", + "moisture": "[%key:component::sensor::entity_component::moisture::name%]", + "monetary": "[%key:component::sensor::entity_component::monetary::name%]", + "nitrogen_dioxide": "[%key:component::sensor::entity_component::nitrogen_dioxide::name%]", + "nitrogen_monoxide": "[%key:component::sensor::entity_component::nitrogen_monoxide::name%]", + "nitrous_oxide": "[%key:component::sensor::entity_component::nitrous_oxide::name%]", + "ozone": "[%key:component::sensor::entity_component::ozone::name%]", + "ph": "[%key:component::sensor::entity_component::ph::name%]", + "pm1": "[%key:component::sensor::entity_component::pm1::name%]", + "pm10": "[%key:component::sensor::entity_component::pm10::name%]", + "pm25": "[%key:component::sensor::entity_component::pm25::name%]", + "power": "[%key:component::sensor::entity_component::power::name%]", + "power_factor": "[%key:component::sensor::entity_component::power_factor::name%]", + "precipitation": "[%key:component::sensor::entity_component::precipitation::name%]", + "precipitation_intensity": "[%key:component::sensor::entity_component::precipitation_intensity::name%]", + "pressure": "[%key:component::sensor::entity_component::pressure::name%]", + "reactive_power": "[%key:component::sensor::entity_component::reactive_power::name%]", + "signal_strength": "[%key:component::sensor::entity_component::signal_strength::name%]", + "sound_pressure": "[%key:component::sensor::entity_component::sound_pressure::name%]", + "speed": "[%key:component::sensor::entity_component::speed::name%]", + "sulphur_dioxide": "[%key:component::sensor::entity_component::sulphur_dioxide::name%]", + "temperature": "[%key:component::sensor::entity_component::temperature::name%]", + "timestamp": "[%key:component::sensor::entity_component::timestamp::name%]", + "volatile_organic_compounds": "[%key:component::sensor::entity_component::volatile_organic_compounds::name%]", + "volatile_organic_compounds_parts": "[%key:component::sensor::entity_component::volatile_organic_compounds::name%]", + "voltage": "[%key:component::sensor::entity_component::voltage::name%]", + "volume": "[%key:component::sensor::entity_component::volume::name%]", + "volume_flow_rate": "[%key:component::sensor::entity_component::volume_flow_rate::name%]", + "volume_storage": "[%key:component::sensor::entity_component::volume_storage::name%]", + "water": "[%key:component::sensor::entity_component::water::name%]", + "weight": "[%key:component::sensor::entity_component::weight::name%]", + "wind_speed": "[%key:component::sensor::entity_component::wind_speed::name%]" + } + } } } From ba1cf84ea5e272f44a89d5bf74ec9f567a7907e3 Mon Sep 17 00:00:00 2001 From: Marcel van der Veldt Date: Thu, 4 Jul 2024 09:20:55 +0200 Subject: [PATCH 0003/1635] Fix locking/unlocking transition state in Matter lock platform (#121099) --- homeassistant/components/matter/lock.py | 30 ++++++++++++++--------- tests/components/matter/test_door_lock.py | 10 +++----- 2 files changed, 22 insertions(+), 18 deletions(-) diff --git a/homeassistant/components/matter/lock.py b/homeassistant/components/matter/lock.py index 5456554a535..1cc85fa897e 100644 --- a/homeassistant/components/matter/lock.py +++ b/homeassistant/components/matter/lock.py @@ -90,6 +90,9 @@ class MatterLock(MatterEntity, LockEntity): async def async_lock(self, **kwargs: Any) -> None: """Lock the lock with pin if needed.""" + # optimistically signal locking to state machine + self._attr_is_locking = True + self.async_write_ha_state() code: str | None = kwargs.get(ATTR_CODE) code_bytes = code.encode() if code else None await self.send_device_command( @@ -98,6 +101,9 @@ class MatterLock(MatterEntity, LockEntity): async def async_unlock(self, **kwargs: Any) -> None: """Unlock the lock with pin if needed.""" + # optimistically signal unlocking to state machine + self._attr_is_unlocking = True + self.async_write_ha_state() code: str | None = kwargs.get(ATTR_CODE) code_bytes = code.encode() if code else None if self.supports_unbolt: @@ -114,6 +120,9 @@ class MatterLock(MatterEntity, LockEntity): async def async_open(self, **kwargs: Any) -> None: """Open the door latch.""" + # optimistically signal unlocking to state machine + self._attr_is_unlocking = True + self.async_write_ha_state() code: str | None = kwargs.get(ATTR_CODE) code_bytes = code.encode() if code else None await self.send_device_command( @@ -135,26 +144,23 @@ class MatterLock(MatterEntity, LockEntity): clusters.DoorLock.Attributes.LockState ) + # always reset the optimisically (un)locking state on state update + self._attr_is_locking = False + self._attr_is_unlocking = False + LOGGER.debug("Lock state: %s for %s", lock_state, self.entity_id) if lock_state is clusters.DoorLock.Enums.DlLockState.kLocked: self._attr_is_locked = True - self._attr_is_locking = False - self._attr_is_unlocking = False - elif lock_state is clusters.DoorLock.Enums.DlLockState.kUnlocked: + elif lock_state in ( + clusters.DoorLock.Enums.DlLockState.kUnlocked, + clusters.DoorLock.Enums.DlLockState.kUnlatched, + clusters.DoorLock.Enums.DlLockState.kNotFullyLocked, + ): self._attr_is_locked = False - self._attr_is_locking = False - self._attr_is_unlocking = False - elif lock_state is clusters.DoorLock.Enums.DlLockState.kNotFullyLocked: - if self.is_locked is True: - self._attr_is_unlocking = True - elif self.is_locked is False: - self._attr_is_locking = True else: # According to the matter docs a null state can happen during device startup. self._attr_is_locked = None - self._attr_is_locking = None - self._attr_is_unlocking = None if self.supports_door_position_sensor: door_state = self.get_matter_attribute_value( diff --git a/tests/components/matter/test_door_lock.py b/tests/components/matter/test_door_lock.py index a0664612aba..84f0e58a647 100644 --- a/tests/components/matter/test_door_lock.py +++ b/tests/components/matter/test_door_lock.py @@ -8,13 +8,11 @@ import pytest from homeassistant.components.lock import ( STATE_LOCKED, - STATE_LOCKING, STATE_OPEN, STATE_UNLOCKED, - STATE_UNLOCKING, LockEntityFeature, ) -from homeassistant.const import ATTR_CODE, STATE_UNKNOWN +from homeassistant.const import ATTR_CODE, STATE_LOCKING, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError import homeassistant.helpers.entity_registry as er @@ -68,14 +66,14 @@ async def test_lock( state = hass.states.get("lock.mock_door_lock_lock") assert state - assert state.state == STATE_LOCKED + assert state.state == STATE_LOCKING set_node_attribute(door_lock, 1, 257, 0, 0) await trigger_subscription_callback(hass, matter_client) state = hass.states.get("lock.mock_door_lock_lock") assert state - assert state.state == STATE_UNLOCKING + assert state.state == STATE_UNLOCKED set_node_attribute(door_lock, 1, 257, 0, 2) await trigger_subscription_callback(hass, matter_client) @@ -89,7 +87,7 @@ async def test_lock( state = hass.states.get("lock.mock_door_lock_lock") assert state - assert state.state == STATE_LOCKING + assert state.state == STATE_UNLOCKED set_node_attribute(door_lock, 1, 257, 0, None) await trigger_subscription_callback(hass, matter_client) From fc4af481795ddcd38a11bbbd0af57b548af70030 Mon Sep 17 00:00:00 2001 From: Christoph Date: Wed, 3 Jul 2024 22:19:59 +0200 Subject: [PATCH 0004/1635] Fix HmIP-ESI GAS sensor DeviceClass (#121106) should be SensorDeviceClass:GAS instead of SensorDeviceClass:VOLUME to be supported in the Energy Dashboard --- homeassistant/components/homematicip_cloud/sensor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/homematicip_cloud/sensor.py b/homeassistant/components/homematicip_cloud/sensor.py index 6cdff6caef3..6bf128a1663 100644 --- a/homeassistant/components/homematicip_cloud/sensor.py +++ b/homeassistant/components/homematicip_cloud/sensor.py @@ -516,7 +516,7 @@ SENSORS_ESI = { HmipEsiSensorEntityDescription( key=ESI_TYPE_CURRENT_GAS_VOLUME, native_unit_of_measurement=UnitOfVolume.CUBIC_METERS, - device_class=SensorDeviceClass.VOLUME, + device_class=SensorDeviceClass.GAS, state_class=SensorStateClass.TOTAL_INCREASING, value_fn=lambda device: device.functional_channel.gasVolume, exists_fn=lambda channel: channel.gasVolume is not None, From 930cd0dc50627c150013443ea0e0f9d10ba7c44b Mon Sep 17 00:00:00 2001 From: Pavel Skuratovich Date: Thu, 4 Jul 2024 22:54:39 +0300 Subject: [PATCH 0005/1635] Starline: Fix "Error updating SLNet token" message in Log (#121122) Fixes https://github.com/home-assistant/core/issues/116715 --- homeassistant/components/starline/account.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/starline/account.py b/homeassistant/components/starline/account.py index 6122ccbb3c2..4b1425ae7d9 100644 --- a/homeassistant/components/starline/account.py +++ b/homeassistant/components/starline/account.py @@ -9,7 +9,7 @@ from typing import Any from starline import StarlineApi, StarlineDevice from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.event import async_track_time_interval from homeassistant.util import dt as dt_util @@ -65,9 +65,9 @@ class StarlineAccount: ) self._api.set_slnet_token(slnet_token) self._api.set_user_id(user_id) - self._hass.config_entries.async_update_entry( - self._config_entry, - data={ + self._hass.add_job( + self._save_slnet_token, + { **self._config_entry.data, DATA_SLNET_TOKEN: slnet_token, DATA_EXPIRES: slnet_token_expires, @@ -77,6 +77,13 @@ class StarlineAccount: except Exception as err: # noqa: BLE001 _LOGGER.error("Error updating SLNet token: %s", err) + @callback + def _save_slnet_token(self, data) -> None: + self._hass.config_entries.async_update_entry( + self._config_entry, + data=data, + ) + def _update_data(self): """Update StarLine data.""" self._check_slnet_token(self._update_interval) From 15933bb16f6e3e480450e9bc2142febc311dc750 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 3 Jul 2024 23:51:05 -0500 Subject: [PATCH 0006/1635] Bump inkbird-ble to 0.5.8 (#121134) --- homeassistant/components/inkbird/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/inkbird/manifest.json b/homeassistant/components/inkbird/manifest.json index fb74d1c565a..c1922004317 100644 --- a/homeassistant/components/inkbird/manifest.json +++ b/homeassistant/components/inkbird/manifest.json @@ -28,5 +28,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/inkbird", "iot_class": "local_push", - "requirements": ["inkbird-ble==0.5.7"] + "requirements": ["inkbird-ble==0.5.8"] } diff --git a/requirements_all.txt b/requirements_all.txt index 5fd773147dc..93b518d4d85 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1161,7 +1161,7 @@ influxdb-client==1.24.0 influxdb==5.3.1 # homeassistant.components.inkbird -inkbird-ble==0.5.7 +inkbird-ble==0.5.8 # homeassistant.components.insteon insteon-frontend-home-assistant==0.5.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index d5c4945da95..ff50501d3f4 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -951,7 +951,7 @@ influxdb-client==1.24.0 influxdb==5.3.1 # homeassistant.components.inkbird -inkbird-ble==0.5.7 +inkbird-ble==0.5.8 # homeassistant.components.insteon insteon-frontend-home-assistant==0.5.0 From 21815e16213bd175c6cbf05089ce7cf7ac0a68b5 Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Thu, 4 Jul 2024 01:18:28 +0200 Subject: [PATCH 0007/1635] Fix broken pathlib import in august integration (#121135) --- homeassistant/components/august/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/august/__init__.py b/homeassistant/components/august/__init__.py index eec794896f6..53aa3cdffd8 100644 --- a/homeassistant/components/august/__init__.py +++ b/homeassistant/components/august/__init__.py @@ -2,10 +2,10 @@ from __future__ import annotations +from pathlib import Path from typing import cast from aiohttp import ClientResponseError -from path import Path from yalexs.exceptions import AugustApiAIOHTTPError from yalexs.manager.exceptions import CannotConnect, InvalidAuth, RequireValidation from yalexs.manager.gateway import Config as YaleXSConfig From 0acd1dc5d1bbc29ca6eff50553cfff092f521f40 Mon Sep 17 00:00:00 2001 From: Maikel Punie Date: Fri, 5 Jul 2024 09:12:47 +0200 Subject: [PATCH 0008/1635] Bump velbusaio to 2024.7.5 (#121156) * Bump velbusaio to 2024.7.4 * bump to 2024.7.5 to remove print functions --- homeassistant/components/velbus/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/velbus/manifest.json b/homeassistant/components/velbus/manifest.json index f778533cad8..4e9478ae575 100644 --- a/homeassistant/components/velbus/manifest.json +++ b/homeassistant/components/velbus/manifest.json @@ -13,7 +13,7 @@ "velbus-packet", "velbus-protocol" ], - "requirements": ["velbus-aio==2024.5.1"], + "requirements": ["velbus-aio==2024.7.5"], "usb": [ { "vid": "10CF", diff --git a/requirements_all.txt b/requirements_all.txt index 93b518d4d85..db3d9de6e07 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2830,7 +2830,7 @@ vallox-websocket-api==5.3.0 vehicle==2.2.1 # homeassistant.components.velbus -velbus-aio==2024.5.1 +velbus-aio==2024.7.5 # homeassistant.components.venstar venstarcolortouch==0.19 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index ff50501d3f4..9722c2d4d77 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2204,7 +2204,7 @@ vallox-websocket-api==5.3.0 vehicle==2.2.1 # homeassistant.components.velbus -velbus-aio==2024.5.1 +velbus-aio==2024.7.5 # homeassistant.components.venstar venstarcolortouch==0.19 From ee276aff44fb1dd984490baf1e19a62648a62b3f Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Fri, 5 Jul 2024 09:40:43 +0200 Subject: [PATCH 0009/1635] Fix `pulse counter frequency` sensors for Shelly Plus Uni (#121178) Co-authored-by: Maciej Bieniek <478555+bieniu@users.noreply.github.com> --- homeassistant/components/shelly/sensor.py | 29 +++++++++++++++-------- tests/components/shelly/conftest.py | 4 +++- tests/components/shelly/test_sensor.py | 26 ++++++++++++++++++++ 3 files changed, 48 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/shelly/sensor.py b/homeassistant/components/shelly/sensor.py index 743c7c7ff01..5a6f03fd90c 100644 --- a/homeassistant/components/shelly/sensor.py +++ b/homeassistant/components/shelly/sensor.py @@ -960,14 +960,18 @@ RPC_SENSORS: Final = { name="Analog input", native_unit_of_measurement=PERCENTAGE, state_class=SensorStateClass.MEASUREMENT, - removal_condition=lambda config, _status, key: (config[key]["enable"] is False), + removal_condition=lambda config, _, key: ( + config[key]["type"] != "analog" or config[key]["enable"] is False + ), ), "analoginput_xpercent": RpcSensorDescription( key="input", sub_key="xpercent", name="Analog value", removal_condition=lambda config, status, key: ( - config[key]["enable"] is False or status[key].get("xpercent") is None + config[key]["type"] != "analog" + or config[key]["enable"] is False + or status[key].get("xpercent") is None ), ), "pulse_counter": RpcSensorDescription( @@ -977,7 +981,9 @@ RPC_SENSORS: Final = { native_unit_of_measurement="pulse", state_class=SensorStateClass.TOTAL, value=lambda status, _: status["total"], - removal_condition=lambda config, _status, key: (config[key]["enable"] is False), + removal_condition=lambda config, _status, key: ( + config[key]["type"] != "count" or config[key]["enable"] is False + ), ), "counter_value": RpcSensorDescription( key="input", @@ -985,26 +991,29 @@ RPC_SENSORS: Final = { name="Counter value", value=lambda status, _: status["xtotal"], removal_condition=lambda config, status, key: ( - config[key]["enable"] is False + config[key]["type"] != "count" + or config[key]["enable"] is False or status[key]["counts"].get("xtotal") is None ), ), "counter_frequency": RpcSensorDescription( key="input", - sub_key="counts", + sub_key="freq", name="Pulse counter frequency", native_unit_of_measurement=UnitOfFrequency.HERTZ, state_class=SensorStateClass.MEASUREMENT, - value=lambda status, _: status["freq"], - removal_condition=lambda config, status, key: (config[key]["enable"] is False), + removal_condition=lambda config, _, key: ( + config[key]["type"] != "count" or config[key]["enable"] is False + ), ), "counter_frequency_value": RpcSensorDescription( key="input", - sub_key="counts", + sub_key="xfreq", name="Pulse counter frequency value", - value=lambda status, _: status["xfreq"], removal_condition=lambda config, status, key: ( - config[key]["enable"] is False or status[key]["counts"].get("xfreq") is None + config[key]["type"] != "count" + or config[key]["enable"] is False + or status[key].get("xfreq") is None ), ), } diff --git a/tests/components/shelly/conftest.py b/tests/components/shelly/conftest.py index a16cc62fbae..14c06d3f86a 100644 --- a/tests/components/shelly/conftest.py +++ b/tests/components/shelly/conftest.py @@ -228,7 +228,9 @@ MOCK_STATUS_RPC = { "input:1": {"id": 1, "percent": 89, "xpercent": 8.9}, "input:2": { "id": 2, - "counts": {"total": 56174, "xtotal": 561.74, "freq": 208.00, "xfreq": 6.11}, + "counts": {"total": 56174, "xtotal": 561.74}, + "freq": 208.00, + "xfreq": 6.11, }, "light:0": {"output": True, "brightness": 53.0}, "light:1": {"output": True, "brightness": 53.0}, diff --git a/tests/components/shelly/test_sensor.py b/tests/components/shelly/test_sensor.py index 513bcd875e2..c62a1f6f6ca 100644 --- a/tests/components/shelly/test_sensor.py +++ b/tests/components/shelly/test_sensor.py @@ -828,3 +828,29 @@ async def test_rpc_pulse_counter_frequency_sensors( entry = entity_registry.async_get(entity_id) assert entry assert entry.unique_id == "123456789ABC-input:2-counter_frequency_value" + + +async def test_rpc_disabled_xfreq( + hass: HomeAssistant, + mock_rpc_device: Mock, + entity_registry: EntityRegistry, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test RPC input with the xfreq sensor disabled.""" + status = deepcopy(mock_rpc_device.status) + status["input:2"] = { + "id": 2, + "counts": {"total": 56174, "xtotal": 561.74}, + "freq": 208.00, + } + monkeypatch.setattr(mock_rpc_device, "status", status) + + await init_integration(hass, 2) + + entity_id = f"{SENSOR_DOMAIN}.gas_pulse_counter_frequency_value" + + state = hass.states.get(entity_id) + assert not state + + entry = entity_registry.async_get(entity_id) + assert not entry From f403afb012eee0904955e15b2b440dbec9494fd8 Mon Sep 17 00:00:00 2001 From: Gerben Jongerius Date: Thu, 4 Jul 2024 20:51:57 +0200 Subject: [PATCH 0010/1635] Bump youless library version 2.1.2 (#121181) --- homeassistant/components/youless/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/youless/snapshots/test_sensor.ambr | 8 ++++---- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/youless/manifest.json b/homeassistant/components/youless/manifest.json index 9a81de38388..1ccc8cda0ff 100644 --- a/homeassistant/components/youless/manifest.json +++ b/homeassistant/components/youless/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/youless", "iot_class": "local_polling", "loggers": ["youless_api"], - "requirements": ["youless-api==2.1.0"] + "requirements": ["youless-api==2.1.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index db3d9de6e07..1e289ddbc52 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2945,7 +2945,7 @@ yeelightsunflower==0.0.10 yolink-api==0.4.4 # homeassistant.components.youless -youless-api==2.1.0 +youless-api==2.1.2 # homeassistant.components.youtube youtubeaio==1.1.5 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 9722c2d4d77..1bfef00a2c8 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2301,7 +2301,7 @@ yeelight==0.7.14 yolink-api==0.4.4 # homeassistant.components.youless -youless-api==2.1.0 +youless-api==2.1.2 # homeassistant.components.youtube youtubeaio==1.1.5 diff --git a/tests/components/youless/snapshots/test_sensor.ambr b/tests/components/youless/snapshots/test_sensor.ambr index 22e480c390e..bcfd0139e5c 100644 --- a/tests/components/youless/snapshots/test_sensor.ambr +++ b/tests/components/youless/snapshots/test_sensor.ambr @@ -47,7 +47,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', + 'state': '0.0', }) # --- # name: test_sensors[sensor.energy_delivery_low-entry] @@ -98,7 +98,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', + 'state': '0.029', }) # --- # name: test_sensors[sensor.energy_high-entry] @@ -405,7 +405,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '1234.564', + 'state': '1624.264', }) # --- # name: test_sensors[sensor.phase_1_current-entry] @@ -967,6 +967,6 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', + 'state': '1234.564', }) # --- From b2f23c1a5e996862450d098da1d8f203fe7a3299 Mon Sep 17 00:00:00 2001 From: Steven B <51370195+sdb9696@users.noreply.github.com> Date: Thu, 4 Jul 2024 10:31:58 +0100 Subject: [PATCH 0011/1635] Bump python-kasa to 0.7.0.3 (#121183) --- homeassistant/components/tplink/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/tplink/manifest.json b/homeassistant/components/tplink/manifest.json index 1270bb3469b..3786a2565c2 100644 --- a/homeassistant/components/tplink/manifest.json +++ b/homeassistant/components/tplink/manifest.json @@ -297,5 +297,5 @@ "iot_class": "local_polling", "loggers": ["kasa"], "quality_scale": "platinum", - "requirements": ["python-kasa[speedups]==0.7.0.2"] + "requirements": ["python-kasa[speedups]==0.7.0.3"] } diff --git a/requirements_all.txt b/requirements_all.txt index 1e289ddbc52..07c579c3013 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2275,7 +2275,7 @@ python-join-api==0.0.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.7.0.2 +python-kasa[speedups]==0.7.0.3 # homeassistant.components.lirc # python-lirc==1.2.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 1bfef00a2c8..f326ad62d94 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1775,7 +1775,7 @@ python-izone==1.2.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.7.0.2 +python-kasa[speedups]==0.7.0.3 # homeassistant.components.matter python-matter-server==6.2.2 From d2b695e7b5229d49c636003c6d8201358e812bdb Mon Sep 17 00:00:00 2001 From: Marcel van der Veldt Date: Fri, 5 Jul 2024 09:41:41 +0200 Subject: [PATCH 0012/1635] Fix Matter light discovery schema for DimmerSwitch (#121185) --- homeassistant/components/matter/light.py | 2 ++ homeassistant/components/matter/switch.py | 1 + 2 files changed, 3 insertions(+) diff --git a/homeassistant/components/matter/light.py b/homeassistant/components/matter/light.py index 749d82fd661..9ff6f45177e 100644 --- a/homeassistant/components/matter/light.py +++ b/homeassistant/components/matter/light.py @@ -446,6 +446,8 @@ DISCOVERY_SCHEMAS = [ device_types.DimmablePlugInUnit, device_types.ExtendedColorLight, device_types.OnOffLight, + device_types.DimmerSwitch, + device_types.ColorDimmerSwitch, ), ), # Additional schema to match (HS Color) lights with incorrect/missing device type diff --git a/homeassistant/components/matter/switch.py b/homeassistant/components/matter/switch.py index efa78446fc5..2fb325b8808 100644 --- a/homeassistant/components/matter/switch.py +++ b/homeassistant/components/matter/switch.py @@ -114,6 +114,7 @@ DISCOVERY_SCHEMAS = [ device_types.ColorTemperatureLight, device_types.DimmableLight, device_types.ExtendedColorLight, + device_types.DimmerSwitch, device_types.ColorDimmerSwitch, device_types.OnOffLight, device_types.AirPurifier, From 0b970f9a853dc2361683ece247b5442c6306c1b6 Mon Sep 17 00:00:00 2001 From: Marcel van der Veldt Date: Fri, 5 Jul 2024 09:41:57 +0200 Subject: [PATCH 0013/1635] Listen for attribute changes of OnOff cluster in appliances (#121198) --- homeassistant/components/matter/climate.py | 1 + homeassistant/components/matter/fan.py | 1 + 2 files changed, 2 insertions(+) diff --git a/homeassistant/components/matter/climate.py b/homeassistant/components/matter/climate.py index 192cb6b3bb4..713aadf5620 100644 --- a/homeassistant/components/matter/climate.py +++ b/homeassistant/components/matter/climate.py @@ -350,6 +350,7 @@ DISCOVERY_SCHEMAS = [ clusters.Thermostat.Attributes.TemperatureSetpointHold, clusters.Thermostat.Attributes.UnoccupiedCoolingSetpoint, clusters.Thermostat.Attributes.UnoccupiedHeatingSetpoint, + clusters.OnOff.Attributes.OnOff, ), device_type=(device_types.Thermostat, device_types.RoomAirConditioner), ), diff --git a/homeassistant/components/matter/fan.py b/homeassistant/components/matter/fan.py index 86f03dc7a03..8cbd24977e3 100644 --- a/homeassistant/components/matter/fan.py +++ b/homeassistant/components/matter/fan.py @@ -313,6 +313,7 @@ DISCOVERY_SCHEMAS = [ clusters.FanControl.Attributes.RockSetting, clusters.FanControl.Attributes.WindSetting, clusters.FanControl.Attributes.AirflowDirection, + clusters.OnOff.Attributes.OnOff, ), ), ] From 1bb4d62a3e4726111c0a5b29f6ede33d37a52153 Mon Sep 17 00:00:00 2001 From: Luke Lashley Date: Thu, 4 Jul 2024 12:05:22 -0400 Subject: [PATCH 0014/1635] Bump anova-wifi to 0.15.0 (#121222) --- homeassistant/components/anova/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/anova/manifest.json b/homeassistant/components/anova/manifest.json index d75a791a6f5..7e605edc217 100644 --- a/homeassistant/components/anova/manifest.json +++ b/homeassistant/components/anova/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/anova", "iot_class": "cloud_push", "loggers": ["anova_wifi"], - "requirements": ["anova-wifi==0.14.0"] + "requirements": ["anova-wifi==0.15.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 07c579c3013..40cca4909fb 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -449,7 +449,7 @@ androidtvremote2==0.1.1 anel-pwrctrl-homeassistant==0.0.1.dev2 # homeassistant.components.anova -anova-wifi==0.14.0 +anova-wifi==0.15.0 # homeassistant.components.anthemav anthemav==1.4.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f326ad62d94..9d2ac6894c3 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -413,7 +413,7 @@ androidtv[async]==0.0.73 androidtvremote2==0.1.1 # homeassistant.components.anova -anova-wifi==0.14.0 +anova-wifi==0.15.0 # homeassistant.components.anthemav anthemav==1.4.1 From ac668dce7db317af4cb3d302cc3651f7f0af8fac Mon Sep 17 00:00:00 2001 From: Thomas55555 <59625598+Thomas55555@users.noreply.github.com> Date: Fri, 5 Jul 2024 09:41:21 +0200 Subject: [PATCH 0015/1635] Fix work area sensor in Husqvarna Automower (#121228) --- .../components/husqvarna_automower/sensor.py | 11 +++++-- .../husqvarna_automower/strings.json | 3 +- .../snapshots/test_sensor.ambr | 2 ++ .../husqvarna_automower/test_sensor.py | 32 +++++++++++++++++++ 4 files changed, 45 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/husqvarna_automower/sensor.py b/homeassistant/components/husqvarna_automower/sensor.py index 146ef17a6e4..2c8d369ea3a 100644 --- a/homeassistant/components/husqvarna_automower/sensor.py +++ b/homeassistant/components/husqvarna_automower/sensor.py @@ -184,6 +184,8 @@ RESTRICTED_REASONS: list = [ RestrictedReasons.WEEK_SCHEDULE.lower(), ] +STATE_NO_WORK_AREA_ACTIVE = "no_work_area_active" + @callback def _get_work_area_names(data: MowerAttributes) -> list[str]: @@ -191,16 +193,21 @@ def _get_work_area_names(data: MowerAttributes) -> list[str]: if TYPE_CHECKING: # Sensor does not get created if it is None assert data.work_areas is not None - return [data.work_areas[work_area_id].name for work_area_id in data.work_areas] + work_area_list = [ + data.work_areas[work_area_id].name for work_area_id in data.work_areas + ] + work_area_list.append(STATE_NO_WORK_AREA_ACTIVE) + return work_area_list @callback def _get_current_work_area_name(data: MowerAttributes) -> str: """Return the name of the current work area.""" + if data.mower.work_area_id is None: + return STATE_NO_WORK_AREA_ACTIVE if TYPE_CHECKING: # Sensor does not get created if values are None assert data.work_areas is not None - assert data.mower.work_area_id is not None return data.work_areas[data.mower.work_area_id].name diff --git a/homeassistant/components/husqvarna_automower/strings.json b/homeassistant/components/husqvarna_automower/strings.json index 6cb1c17421a..be17cc25e32 100644 --- a/homeassistant/components/husqvarna_automower/strings.json +++ b/homeassistant/components/husqvarna_automower/strings.json @@ -252,7 +252,8 @@ "work_area": { "name": "Work area", "state": { - "my_lawn": "My lawn" + "my_lawn": "My lawn", + "no_work_area_active": "No work area active" } } }, diff --git a/tests/components/husqvarna_automower/snapshots/test_sensor.ambr b/tests/components/husqvarna_automower/snapshots/test_sensor.ambr index 0b0d76620d3..935303e48fb 100644 --- a/tests/components/husqvarna_automower/snapshots/test_sensor.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_sensor.ambr @@ -1059,6 +1059,7 @@ 'Front lawn', 'Back lawn', 'my_lawn', + 'no_work_area_active', ]), }), 'config_entry_id': , @@ -1097,6 +1098,7 @@ 'Front lawn', 'Back lawn', 'my_lawn', + 'no_work_area_active', ]), }), 'context': , diff --git a/tests/components/husqvarna_automower/test_sensor.py b/tests/components/husqvarna_automower/test_sensor.py index 8f30a3dcb04..314bcaaa00c 100644 --- a/tests/components/husqvarna_automower/test_sensor.py +++ b/tests/components/husqvarna_automower/test_sensor.py @@ -87,6 +87,38 @@ async def test_next_start_sensor( assert state.state == STATE_UNKNOWN +async def test_work_area_sensor( + hass: HomeAssistant, + mock_automower_client: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the work area sensor.""" + await setup_integration(hass, mock_config_entry) + state = hass.states.get("sensor.test_mower_1_work_area") + assert state is not None + assert state.state == "Front lawn" + + values = mower_list_to_dictionary_dataclass( + load_json_value_fixture("mower.json", DOMAIN) + ) + values[TEST_MOWER_ID].mower.work_area_id = None + mock_automower_client.get_status.return_value = values + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + state = hass.states.get("sensor.test_mower_1_work_area") + assert state.state == "no_work_area_active" + + values[TEST_MOWER_ID].mower.work_area_id = 0 + mock_automower_client.get_status.return_value = values + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + state = hass.states.get("sensor.test_mower_1_work_area") + assert state.state == "my_lawn" + + @pytest.mark.parametrize( ("sensor_to_test"), [ From b906daa4939254b9db394794d838da14b5577218 Mon Sep 17 00:00:00 2001 From: hahn-th <15319212+hahn-th@users.noreply.github.com> Date: Fri, 5 Jul 2024 09:45:20 +0200 Subject: [PATCH 0016/1635] Revert Homematic IP Cloud unique ID changes (#121231) --- .../homematicip_cloud/generic_entity.py | 11 +- .../components/homematicip_cloud/sensor.py | 288 +++++++++++------- .../fixtures/homematicip_cloud.json | 135 ++++++++ .../homematicip_cloud/test_device.py | 2 +- .../homematicip_cloud/test_sensor.py | 36 +++ 5 files changed, 351 insertions(+), 121 deletions(-) diff --git a/homeassistant/components/homematicip_cloud/generic_entity.py b/homeassistant/components/homematicip_cloud/generic_entity.py index 5cd48515ad7..163f3eec75e 100644 --- a/homeassistant/components/homematicip_cloud/generic_entity.py +++ b/homeassistant/components/homematicip_cloud/generic_entity.py @@ -216,14 +216,13 @@ class HomematicipGenericEntity(Entity): @property def unique_id(self) -> str: """Return a unique ID.""" - suffix = "" - if self._post is not None: - suffix = f"_{self._post}" - + unique_id = f"{self.__class__.__name__}_{self._device.id}" if self._is_multi_channel: - return f"{self.__class__.__name__}_Channel{self._channel}_{self._device.id}{suffix}" + unique_id = ( + f"{self.__class__.__name__}_Channel{self._channel}_{self._device.id}" + ) - return f"{self.__class__.__name__}_{self._device.id}{suffix}" + return unique_id @property def icon(self) -> str | None: diff --git a/homeassistant/components/homematicip_cloud/sensor.py b/homeassistant/components/homematicip_cloud/sensor.py index 6bf128a1663..1f76c6cce1f 100644 --- a/homeassistant/components/homematicip_cloud/sensor.py +++ b/homeassistant/components/homematicip_cloud/sensor.py @@ -3,7 +3,6 @@ from __future__ import annotations from collections.abc import Callable -from dataclasses import dataclass from typing import Any from homematicip.aio.device import ( @@ -36,7 +35,6 @@ from homematicip.base.functionalChannels import FunctionalChannel from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, - SensorEntityDescription, SensorStateClass, ) from homeassistant.config_entries import ConfigEntry @@ -163,19 +161,28 @@ async def async_setup_entry( for ch in get_channels_from_device( device, FunctionalChannelType.ENERGY_SENSORS_INTERFACE_CHANNEL ): - if ch.connectedEnergySensorType not in SENSORS_ESI: - continue + if ch.connectedEnergySensorType == ESI_CONNECTED_SENSOR_TYPE_IEC: + if ch.currentPowerConsumption is not None: + entities.append(HmipEsiIecPowerConsumption(hap, device)) + if ch.energyCounterOneType != ESI_TYPE_UNKNOWN: + entities.append(HmipEsiIecEnergyCounterHighTariff(hap, device)) + if ch.energyCounterTwoType != ESI_TYPE_UNKNOWN: + entities.append(HmipEsiIecEnergyCounterLowTariff(hap, device)) + if ch.energyCounterThreeType != ESI_TYPE_UNKNOWN: + entities.append( + HmipEsiIecEnergyCounterInputSingleTariff(hap, device) + ) - new_entities = [ - HmipEsiSensorEntity(hap, device, ch.index, description) - for description in SENSORS_ESI[ch.connectedEnergySensorType] - ] + if ch.connectedEnergySensorType == ESI_CONNECTED_SENSOR_TYPE_GAS: + if ch.currentGasFlow is not None: + entities.append(HmipEsiGasCurrentGasFlow(hap, device)) + if ch.gasVolume is not None: + entities.append(HmipEsiGasGasVolume(hap, device)) - entities.extend( - entity - for entity in new_entities - if entity.entity_description.exists_fn(ch) - ) + if ch.connectedEnergySensorType == ESI_CONNECTED_SENSOR_TYPE_LED: + if ch.currentPowerConsumption is not None: + entities.append(HmipEsiLedCurrentPowerConsumption(hap, device)) + entities.append(HmipEsiLedEnergyCounterHighTariff(hap, device)) async_add_entities(entities) @@ -434,132 +441,185 @@ class HomematicpTemperatureExternalSensorDelta(HomematicipGenericEntity, SensorE return self._device.temperatureExternalDelta -@dataclass(kw_only=True, frozen=True) -class HmipEsiSensorEntityDescription(SensorEntityDescription): - """SensorEntityDescription for HmIP Sensors.""" - - value_fn: Callable[[AsyncEnergySensorsInterface], StateType] - exists_fn: Callable[[FunctionalChannel], bool] - type_fn: Callable[[AsyncEnergySensorsInterface], str] - - -SENSORS_ESI = { - ESI_CONNECTED_SENSOR_TYPE_IEC: [ - HmipEsiSensorEntityDescription( - key=ESI_TYPE_CURRENT_POWER_CONSUMPTION, - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda device: device.functional_channel.currentPowerConsumption, - exists_fn=lambda channel: channel.currentPowerConsumption is not None, - type_fn=lambda device: "CurrentPowerConsumption", - ), - HmipEsiSensorEntityDescription( - key=ESI_TYPE_ENERGY_COUNTER_USAGE_HIGH_TARIFF, - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, - device_class=SensorDeviceClass.ENERGY, - state_class=SensorStateClass.TOTAL_INCREASING, - value_fn=lambda device: device.functional_channel.energyCounterOne, - exists_fn=lambda channel: channel.energyCounterOneType != ESI_TYPE_UNKNOWN, - type_fn=lambda device: device.functional_channel.energyCounterOneType, - ), - HmipEsiSensorEntityDescription( - key=ESI_TYPE_ENERGY_COUNTER_USAGE_LOW_TARIFF, - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, - device_class=SensorDeviceClass.ENERGY, - state_class=SensorStateClass.TOTAL_INCREASING, - value_fn=lambda device: device.functional_channel.energyCounterTwo, - exists_fn=lambda channel: channel.energyCounterTwoType != ESI_TYPE_UNKNOWN, - type_fn=lambda device: device.functional_channel.energyCounterTwoType, - ), - HmipEsiSensorEntityDescription( - key=ESI_TYPE_ENERGY_COUNTER_INPUT_SINGLE_TARIFF, - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, - device_class=SensorDeviceClass.ENERGY, - state_class=SensorStateClass.TOTAL_INCREASING, - value_fn=lambda device: device.functional_channel.energyCounterThree, - exists_fn=lambda channel: channel.energyCounterThreeType - != ESI_TYPE_UNKNOWN, - type_fn=lambda device: device.functional_channel.energyCounterThreeType, - ), - ], - ESI_CONNECTED_SENSOR_TYPE_LED: [ - HmipEsiSensorEntityDescription( - key=ESI_TYPE_CURRENT_POWER_CONSUMPTION, - native_unit_of_measurement=UnitOfPower.WATT, - device_class=SensorDeviceClass.POWER, - state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda device: device.functional_channel.currentPowerConsumption, - exists_fn=lambda channel: channel.currentPowerConsumption is not None, - type_fn=lambda device: "CurrentPowerConsumption", - ), - HmipEsiSensorEntityDescription( - key=ESI_TYPE_ENERGY_COUNTER_USAGE_HIGH_TARIFF, - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, - device_class=SensorDeviceClass.ENERGY, - state_class=SensorStateClass.TOTAL_INCREASING, - value_fn=lambda device: device.functional_channel.energyCounterOne, - exists_fn=lambda channel: channel.energyCounterOne is not None, - type_fn=lambda device: ESI_TYPE_ENERGY_COUNTER_USAGE_HIGH_TARIFF, - ), - ], - ESI_CONNECTED_SENSOR_TYPE_GAS: [ - HmipEsiSensorEntityDescription( - key=ESI_TYPE_CURRENT_GAS_FLOW, - native_unit_of_measurement=UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR, - device_class=SensorDeviceClass.VOLUME_FLOW_RATE, - state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda device: device.functional_channel.currentGasFlow, - exists_fn=lambda channel: channel.currentGasFlow is not None, - type_fn=lambda device: "CurrentGasFlow", - ), - HmipEsiSensorEntityDescription( - key=ESI_TYPE_CURRENT_GAS_VOLUME, - native_unit_of_measurement=UnitOfVolume.CUBIC_METERS, - device_class=SensorDeviceClass.GAS, - state_class=SensorStateClass.TOTAL_INCREASING, - value_fn=lambda device: device.functional_channel.gasVolume, - exists_fn=lambda channel: channel.gasVolume is not None, - type_fn=lambda device: "GasVolume", - ), - ], -} - - class HmipEsiSensorEntity(HomematicipGenericEntity, SensorEntity): """EntityDescription for HmIP-ESI Sensors.""" - entity_description: HmipEsiSensorEntityDescription - def __init__( self, hap: HomematicipHAP, device: HomematicipGenericEntity, - channel_index: int, - entity_description: HmipEsiSensorEntityDescription, + key: str, + value_fn: Callable[[FunctionalChannel], StateType], + type_fn: Callable[[FunctionalChannel], str], ) -> None: """Initialize Sensor Entity.""" super().__init__( hap=hap, device=device, - channel=channel_index, - post=entity_description.key, + channel=1, + post=key, is_multi_channel=False, ) - self.entity_description = entity_description + + self._value_fn = value_fn + self._type_fn = type_fn @property def extra_state_attributes(self) -> dict[str, Any]: """Return the state attributes of the esi sensor.""" state_attr = super().extra_state_attributes - state_attr[ATTR_ESI_TYPE] = self.entity_description.type_fn(self) + state_attr[ATTR_ESI_TYPE] = self._type_fn(self.functional_channel) return state_attr @property def native_value(self) -> str | None: """Return the state of the sensor.""" - return str(self.entity_description.value_fn(self)) + return str(self._value_fn(self.functional_channel)) + + +class HmipEsiIecPowerConsumption(HmipEsiSensorEntity): + """Representation of the Hmip-ESI IEC currentPowerConsumption sensor.""" + + _attr_device_class = SensorDeviceClass.POWER + _attr_native_unit_of_measurement = UnitOfPower.WATT + _attr_state_class = SensorStateClass.MEASUREMENT + + def __init__(self, hap: HomematicipHAP, device) -> None: + """Initialize the device.""" + super().__init__( + hap, + device, + key="CurrentPowerConsumption", + value_fn=lambda channel: channel.currentPowerConsumption, + type_fn=lambda channel: "CurrentPowerConsumption", + ) + + +class HmipEsiIecEnergyCounterHighTariff(HmipEsiSensorEntity): + """Representation of the Hmip-ESI IEC energyCounterOne sensor.""" + + _attr_device_class = SensorDeviceClass.ENERGY + _attr_native_unit_of_measurement = UnitOfEnergy.KILO_WATT_HOUR + _attr_state_class = SensorStateClass.TOTAL_INCREASING + + def __init__(self, hap: HomematicipHAP, device) -> None: + """Initialize the device.""" + super().__init__( + hap, + device, + key=ESI_TYPE_ENERGY_COUNTER_USAGE_HIGH_TARIFF, + value_fn=lambda channel: channel.energyCounterOne, + type_fn=lambda channel: channel.energyCounterOneType, + ) + + +class HmipEsiIecEnergyCounterLowTariff(HmipEsiSensorEntity): + """Representation of the Hmip-ESI IEC energyCounterTwo sensor.""" + + _attr_device_class = SensorDeviceClass.ENERGY + _attr_native_unit_of_measurement = UnitOfEnergy.KILO_WATT_HOUR + _attr_state_class = SensorStateClass.TOTAL_INCREASING + + def __init__(self, hap: HomematicipHAP, device) -> None: + """Initialize the device.""" + super().__init__( + hap, + device, + key=ESI_TYPE_ENERGY_COUNTER_USAGE_LOW_TARIFF, + value_fn=lambda channel: channel.energyCounterTwo, + type_fn=lambda channel: channel.energyCounterTwoType, + ) + + +class HmipEsiIecEnergyCounterInputSingleTariff(HmipEsiSensorEntity): + """Representation of the Hmip-ESI IEC energyCounterThree sensor.""" + + _attr_device_class = SensorDeviceClass.ENERGY + _attr_native_unit_of_measurement = UnitOfEnergy.KILO_WATT_HOUR + _attr_state_class = SensorStateClass.TOTAL_INCREASING + + def __init__(self, hap: HomematicipHAP, device) -> None: + """Initialize the device.""" + super().__init__( + hap, + device, + key=ESI_TYPE_ENERGY_COUNTER_INPUT_SINGLE_TARIFF, + value_fn=lambda channel: channel.energyCounterThree, + type_fn=lambda channel: channel.energyCounterThreeType, + ) + + +class HmipEsiGasCurrentGasFlow(HmipEsiSensorEntity): + """Representation of the Hmip-ESI Gas currentGasFlow sensor.""" + + _attr_device_class = SensorDeviceClass.VOLUME_FLOW_RATE + _attr_native_unit_of_measurement = UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR + _attr_state_class = SensorStateClass.MEASUREMENT + + def __init__(self, hap: HomematicipHAP, device) -> None: + """Initialize the device.""" + super().__init__( + hap, + device, + key="CurrentGasFlow", + value_fn=lambda channel: channel.currentGasFlow, + type_fn=lambda channel: "CurrentGasFlow", + ) + + +class HmipEsiGasGasVolume(HmipEsiSensorEntity): + """Representation of the Hmip-ESI Gas gasVolume sensor.""" + + _attr_device_class = SensorDeviceClass.GAS + _attr_native_unit_of_measurement = UnitOfVolume.CUBIC_METERS + _attr_state_class = SensorStateClass.TOTAL_INCREASING + + def __init__(self, hap: HomematicipHAP, device) -> None: + """Initialize the device.""" + super().__init__( + hap, + device, + key="GasVolume", + value_fn=lambda channel: channel.gasVolume, + type_fn=lambda channel: "GasVolume", + ) + + +class HmipEsiLedCurrentPowerConsumption(HmipEsiSensorEntity): + """Representation of the Hmip-ESI LED currentPowerConsumption sensor.""" + + _attr_device_class = SensorDeviceClass.POWER + _attr_native_unit_of_measurement = UnitOfPower.WATT + _attr_state_class = SensorStateClass.MEASUREMENT + + def __init__(self, hap: HomematicipHAP, device) -> None: + """Initialize the device.""" + super().__init__( + hap, + device, + key="CurrentPowerConsumption", + value_fn=lambda channel: channel.currentPowerConsumption, + type_fn=lambda channel: "CurrentPowerConsumption", + ) + + +class HmipEsiLedEnergyCounterHighTariff(HmipEsiSensorEntity): + """Representation of the Hmip-ESI LED energyCounterOne sensor.""" + + _attr_device_class = SensorDeviceClass.ENERGY + _attr_native_unit_of_measurement = UnitOfEnergy.KILO_WATT_HOUR + _attr_state_class = SensorStateClass.TOTAL_INCREASING + + def __init__(self, hap: HomematicipHAP, device) -> None: + """Initialize the device.""" + super().__init__( + hap, + device, + key=ESI_TYPE_ENERGY_COUNTER_USAGE_HIGH_TARIFF, + value_fn=lambda channel: channel.energyCounterOne, + type_fn=lambda channel: ESI_TYPE_ENERGY_COUNTER_USAGE_HIGH_TARIFF, + ) class HomematicipPassageDetectorDeltaCounter(HomematicipGenericEntity, SensorEntity): diff --git a/tests/components/homematicip_cloud/fixtures/homematicip_cloud.json b/tests/components/homematicip_cloud/fixtures/homematicip_cloud.json index eba2c803b1f..e67ffd78467 100644 --- a/tests/components/homematicip_cloud/fixtures/homematicip_cloud.json +++ b/tests/components/homematicip_cloud/fixtures/homematicip_cloud.json @@ -7757,6 +7757,141 @@ "serializedGlobalTradeItemNumber": "3014F711000000000ESIIEC2", "type": "ENERGY_SENSORS_INTERFACE", "updateState": "UP_TO_DATE" + }, + "3014F7110000000000ESIIE3": { + "availableFirmwareVersion": "0.0.0", + "connectionType": "HMIP_RF", + "deviceArchetype": "HMIP", + "firmwareVersion": "1.0.6", + "firmwareVersionInteger": 65542, + "functionalChannels": { + "0": { + "busConfigMismatch": null, + "coProFaulty": false, + "coProRestartNeeded": false, + "coProUpdateFailure": false, + "configPending": false, + "controlsMountingOrientation": null, + "daliBusState": null, + "defaultLinkedGroup": [], + "deviceCommunicationError": null, + "deviceDriveError": null, + "deviceDriveModeError": null, + "deviceId": "3014F7110000000000ESIIE3", + "deviceOperationMode": null, + "deviceOverheated": false, + "deviceOverloaded": false, + "devicePowerFailureDetected": false, + "deviceUndervoltage": false, + "displayContrast": null, + "dutyCycle": false, + "functionalChannelType": "DEVICE_BASE", + "groupIndex": 0, + "groups": ["00000000-0000-0000-0000-000000000031"], + "index": 0, + "label": "", + "lockJammed": null, + "lowBat": false, + "mountingOrientation": null, + "multicastRoutingEnabled": false, + "particulateMatterSensorCommunicationError": null, + "particulateMatterSensorError": null, + "powerShortCircuit": null, + "profilePeriodLimitReached": null, + "routerModuleEnabled": false, + "routerModuleSupported": false, + "rssiDeviceValue": -94, + "rssiPeerValue": null, + "sensorCommunicationError": false, + "sensorError": true, + "shortCircuitDataLine": null, + "supportedOptionalFeatures": { + "IFeatureBusConfigMismatch": false, + "IFeatureDeviceCoProError": false, + "IFeatureDeviceCoProRestart": false, + "IFeatureDeviceCoProUpdate": false, + "IFeatureDeviceCommunicationError": false, + "IFeatureDeviceDaliBusError": false, + "IFeatureDeviceDriveError": false, + "IFeatureDeviceDriveModeError": false, + "IFeatureDeviceIdentify": false, + "IFeatureDeviceOverheated": false, + "IFeatureDeviceOverloaded": false, + "IFeatureDeviceParticulateMatterSensorCommunicationError": false, + "IFeatureDeviceParticulateMatterSensorError": false, + "IFeatureDevicePowerFailure": false, + "IFeatureDeviceSensorCommunicationError": true, + "IFeatureDeviceSensorError": true, + "IFeatureDeviceTemperatureHumiditySensorCommunicationError": false, + "IFeatureDeviceTemperatureHumiditySensorError": false, + "IFeatureDeviceTemperatureOutOfRange": false, + "IFeatureDeviceUndervoltage": false, + "IFeatureMulticastRouter": false, + "IFeaturePowerShortCircuit": false, + "IFeatureProfilePeriodLimit": false, + "IFeatureRssiValue": true, + "IFeatureShortCircuitDataLine": false, + "IOptionalFeatureDefaultLinkedGroup": false, + "IOptionalFeatureDeviceErrorLockJammed": false, + "IOptionalFeatureDeviceOperationMode": false, + "IOptionalFeatureDisplayContrast": false, + "IOptionalFeatureDutyCycle": true, + "IOptionalFeatureLowBat": true, + "IOptionalFeatureMountingOrientation": false + }, + "temperatureHumiditySensorCommunicationError": null, + "temperatureHumiditySensorError": null, + "temperatureOutOfRange": false, + "unreach": false + }, + "1": { + "channelRole": "ENERGY_SENSOR", + "connectedEnergySensorType": "ES_LED", + "currentGasFlow": null, + "currentPowerConsumption": 189.15, + "deviceId": "3014F7110000000000ESIIE3", + "energyCounterOne": 23825.748, + "energyCounterOneType": "UNKNOWN", + "energyCounterThree": null, + "energyCounterThreeType": "UNKNOWN", + "energyCounterTwo": null, + "energyCounterTwoType": "UNKNOWN", + "functionalChannelType": "ENERGY_SENSORS_INTERFACE_CHANNEL", + "gasVolume": null, + "gasVolumePerImpulse": 0.01, + "groupIndex": 1, + "groups": ["00000000-0000-0000-0000-000000000057"], + "impulsesPerKWH": 1000, + "index": 1, + "label": "", + "supportedOptionalFeatures": { + "IOptionalFeatureCounterOffset": true, + "IOptionalFeatureCurrentGasFlow": false, + "IOptionalFeatureCurrentPowerConsumption": true, + "IOptionalFeatureEnergyCounterOne": true, + "IOptionalFeatureEnergyCounterThree": false, + "IOptionalFeatureEnergyCounterTwo": false, + "IOptionalFeatureGasVolume": false, + "IOptionalFeatureGasVolumePerImpulse": false, + "IOptionalFeatureImpulsesPerKWH": true + } + } + }, + "homeId": "00000000-0000-0000-0000-000000000001", + "id": "3014F7110000000000ESIIE3", + "label": "esi_led", + "lastStatusUpdate": 1702420986697, + "liveUpdateState": "LIVE_UPDATE_NOT_SUPPORTED", + "manuallyUpdateForced": false, + "manufacturerCode": 1, + "measuredAttributes": {}, + "modelId": 509, + "modelType": "HmIP-ESI", + "oem": "eQ-3", + "permanentlyReachable": false, + "serializedGlobalTradeItemNumber": "3014F7110000000000ESIIE3", + "type": "ENERGY_SENSORS_INTERFACE", + "updateState": "UP_TO_DATE" } }, "groups": { diff --git a/tests/components/homematicip_cloud/test_device.py b/tests/components/homematicip_cloud/test_device.py index 348171b3187..074a30e94b2 100644 --- a/tests/components/homematicip_cloud/test_device.py +++ b/tests/components/homematicip_cloud/test_device.py @@ -26,7 +26,7 @@ async def test_hmip_load_all_supported_devices( test_devices=None, test_groups=None ) - assert len(mock_hap.hmip_device_by_entity_id) == 290 + assert len(mock_hap.hmip_device_by_entity_id) == 293 async def test_hmip_remove_device( diff --git a/tests/components/homematicip_cloud/test_sensor.py b/tests/components/homematicip_cloud/test_sensor.py index 6951b750b2f..2b62c46fd72 100644 --- a/tests/components/homematicip_cloud/test_sensor.py +++ b/tests/components/homematicip_cloud/test_sensor.py @@ -634,3 +634,39 @@ async def test_hmip_esi_gas_gas_volume( ) assert ha_state.state == "1019.26" + + +async def test_hmip_esi_led_current_power_consumption( + hass: HomeAssistant, default_mock_hap_factory +) -> None: + """Test ESI-IEC currentPowerConsumption Sensor.""" + entity_id = "sensor.esi_led_currentPowerConsumption" + entity_name = "esi_led CurrentPowerConsumption" + device_model = "HmIP-ESI" + mock_hap = await default_mock_hap_factory.async_get_mock_hap( + test_devices=["esi_led"] + ) + + ha_state, hmip_device = get_and_check_entity_basics( + hass, mock_hap, entity_id, entity_name, device_model + ) + + assert ha_state.state == "189.15" + + +async def test_hmip_esi_led_energy_counter_usage_high_tariff( + hass: HomeAssistant, default_mock_hap_factory +) -> None: + """Test ESI-IEC ENERGY_COUNTER_USAGE_HIGH_TARIFF.""" + entity_id = "sensor.esi_led_energy_counter_usage_high_tariff" + entity_name = "esi_led ENERGY_COUNTER_USAGE_HIGH_TARIFF" + device_model = "HmIP-ESI" + mock_hap = await default_mock_hap_factory.async_get_mock_hap( + test_devices=["esi_led"] + ) + + ha_state, hmip_device = get_and_check_entity_basics( + hass, mock_hap, entity_id, entity_name, device_model + ) + + assert ha_state.state == "23825.748" From eda450838e4a1d8e8c8da2fc1264c5397861ed96 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Thu, 4 Jul 2024 21:03:33 +0200 Subject: [PATCH 0017/1635] Bump deebot-client to 8.1.1 (#121241) --- homeassistant/components/ecovacs/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/ecovacs/manifest.json b/homeassistant/components/ecovacs/manifest.json index 9568bf2c3ac..6ca9b9e3edc 100644 --- a/homeassistant/components/ecovacs/manifest.json +++ b/homeassistant/components/ecovacs/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/ecovacs", "iot_class": "cloud_push", "loggers": ["sleekxmppfs", "sucks", "deebot_client"], - "requirements": ["py-sucks==0.9.10", "deebot-client==8.1.0"] + "requirements": ["py-sucks==0.9.10", "deebot-client==8.1.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 40cca4909fb..681e7aba41e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -709,7 +709,7 @@ debugpy==1.8.1 # decora==0.6 # homeassistant.components.ecovacs -deebot-client==8.1.0 +deebot-client==8.1.1 # homeassistant.components.ihc # homeassistant.components.namecheapdns diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 9d2ac6894c3..322b5d0d7c5 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -590,7 +590,7 @@ dbus-fast==2.22.1 debugpy==1.8.1 # homeassistant.components.ecovacs -deebot-client==8.1.0 +deebot-client==8.1.1 # homeassistant.components.ihc # homeassistant.components.namecheapdns From 651439ea0602e9cdf5a7a5d6beec7aad9ba0937f Mon Sep 17 00:00:00 2001 From: Shay Levy Date: Fri, 5 Jul 2024 00:19:24 +0300 Subject: [PATCH 0018/1635] Fix WebOS TV media player status when OFF after IDLE (#121251) --- homeassistant/components/webostv/media_player.py | 3 ++- tests/components/webostv/test_media_player.py | 4 ++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/webostv/media_player.py b/homeassistant/components/webostv/media_player.py index 6aef47515db..099b5a73784 100644 --- a/homeassistant/components/webostv/media_player.py +++ b/homeassistant/components/webostv/media_player.py @@ -239,7 +239,8 @@ class LgWebOSMediaPlayerEntity(RestoreEntity, MediaPlayerEntity): self._attr_assumed_state = True if ( - self._client.media_state is not None + self._client.is_on + and self._client.media_state is not None and self._client.media_state.get("foregroundAppInfo") is not None ): self._attr_assumed_state = False diff --git a/tests/components/webostv/test_media_player.py b/tests/components/webostv/test_media_player.py index 6c4aeb5e984..f0d17057b33 100644 --- a/tests/components/webostv/test_media_player.py +++ b/tests/components/webostv/test_media_player.py @@ -832,3 +832,7 @@ async def test_update_media_state(hass: HomeAssistant, client, monkeypatch) -> N monkeypatch.setattr(client, "media_state", data) await client.mock_state_update() assert hass.states.get(ENTITY_ID).state == MediaPlayerState.IDLE + + monkeypatch.setattr(client, "is_on", False) + await client.mock_state_update() + assert hass.states.get(ENTITY_ID).state == STATE_OFF From a54223661425ddb04be408eb14c93bf8aa5bead7 Mon Sep 17 00:00:00 2001 From: Jordi Date: Fri, 5 Jul 2024 00:05:35 +0200 Subject: [PATCH 0019/1635] Bump aioaquacell to 0.1.8 (#121253) --- homeassistant/components/aquacell/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/aquacell/manifest.json b/homeassistant/components/aquacell/manifest.json index 1f43fa214d3..559bdf345bb 100644 --- a/homeassistant/components/aquacell/manifest.json +++ b/homeassistant/components/aquacell/manifest.json @@ -8,5 +8,5 @@ "integration_type": "device", "iot_class": "cloud_polling", "loggers": ["aioaquacell"], - "requirements": ["aioaquacell==0.1.7"] + "requirements": ["aioaquacell==0.1.8"] } diff --git a/requirements_all.txt b/requirements_all.txt index 681e7aba41e..935f9571602 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -195,7 +195,7 @@ aioambient==2024.01.0 aioapcaccess==0.4.2 # homeassistant.components.aquacell -aioaquacell==0.1.7 +aioaquacell==0.1.8 # homeassistant.components.aseko_pool_live aioaseko==0.1.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 322b5d0d7c5..b86bd8b9548 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -174,7 +174,7 @@ aioambient==2024.01.0 aioapcaccess==0.4.2 # homeassistant.components.aquacell -aioaquacell==0.1.7 +aioaquacell==0.1.8 # homeassistant.components.aseko_pool_live aioaseko==0.1.1 From f4e362c5d0f54d5933c69356a535c9304041fda7 Mon Sep 17 00:00:00 2001 From: Shay Levy Date: Fri, 5 Jul 2024 01:27:56 +0300 Subject: [PATCH 0020/1635] Bump aiowebostv to 0.4.2 (#121258) --- homeassistant/components/webostv/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/webostv/manifest.json b/homeassistant/components/webostv/manifest.json index bcafb82a4b0..679bad9b9f5 100644 --- a/homeassistant/components/webostv/manifest.json +++ b/homeassistant/components/webostv/manifest.json @@ -7,7 +7,7 @@ "iot_class": "local_push", "loggers": ["aiowebostv"], "quality_scale": "platinum", - "requirements": ["aiowebostv==0.4.1"], + "requirements": ["aiowebostv==0.4.2"], "ssdp": [ { "st": "urn:lge-com:service:webos-second-screen:1" diff --git a/requirements_all.txt b/requirements_all.txt index 935f9571602..029077a0f7f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -404,7 +404,7 @@ aiowaqi==3.1.0 aiowatttime==0.1.1 # homeassistant.components.webostv -aiowebostv==0.4.1 +aiowebostv==0.4.2 # homeassistant.components.withings aiowithings==3.0.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index b86bd8b9548..946052ec8de 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -377,7 +377,7 @@ aiowaqi==3.1.0 aiowatttime==0.1.1 # homeassistant.components.webostv -aiowebostv==0.4.1 +aiowebostv==0.4.2 # homeassistant.components.withings aiowithings==3.0.2 From b015611a2ab5eeca58ed633818be0912987b395d Mon Sep 17 00:00:00 2001 From: G Johansson Date: Fri, 5 Jul 2024 10:02:52 +0200 Subject: [PATCH 0021/1635] Bump python-holidays to 0.52 (#121283) --- homeassistant/components/holiday/manifest.json | 2 +- homeassistant/components/workday/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/holiday/manifest.json b/homeassistant/components/holiday/manifest.json index cb67039f374..075285bbdb9 100644 --- a/homeassistant/components/holiday/manifest.json +++ b/homeassistant/components/holiday/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/holiday", "iot_class": "local_polling", - "requirements": ["holidays==0.51", "babel==2.15.0"] + "requirements": ["holidays==0.52", "babel==2.15.0"] } diff --git a/homeassistant/components/workday/manifest.json b/homeassistant/components/workday/manifest.json index 1148f46e2d1..ad609954a57 100644 --- a/homeassistant/components/workday/manifest.json +++ b/homeassistant/components/workday/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_polling", "loggers": ["holidays"], "quality_scale": "internal", - "requirements": ["holidays==0.51"] + "requirements": ["holidays==0.52"] } diff --git a/requirements_all.txt b/requirements_all.txt index 029077a0f7f..21d20d675c3 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1087,7 +1087,7 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.51 +holidays==0.52 # homeassistant.components.frontend home-assistant-frontend==20240703.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 946052ec8de..72c223cbac1 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -892,7 +892,7 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.51 +holidays==0.52 # homeassistant.components.frontend home-assistant-frontend==20240703.0 From 994d6f552cbca60d3436bc19f2c9e0899d3d6c1a Mon Sep 17 00:00:00 2001 From: Steven B <51370195+sdb9696@users.noreply.github.com> Date: Fri, 5 Jul 2024 10:19:04 +0100 Subject: [PATCH 0022/1635] Fix tplink light effect behaviour when activating a scene (#121288) --- homeassistant/components/tplink/light.py | 7 +- tests/components/tplink/__init__.py | 51 +++++++++++--- tests/components/tplink/test_light.py | 88 +++++++++++++++++++++++- 3 files changed, 135 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/tplink/light.py b/homeassistant/components/tplink/light.py index a736a0ba1e1..22e7c523d1a 100644 --- a/homeassistant/components/tplink/light.py +++ b/homeassistant/components/tplink/light.py @@ -382,7 +382,12 @@ class TPLinkLightEffectEntity(TPLinkLightEntity): async def async_turn_on(self, **kwargs: Any) -> None: """Turn the light on.""" brightness, transition = self._async_extract_brightness_transition(**kwargs) - if ATTR_EFFECT in kwargs: + if ( + (effect := kwargs.get(ATTR_EFFECT)) + # Effect is unlikely to be LIGHT_EFFECTS_OFF but check for it anyway + and effect not in {LightEffect.LIGHT_EFFECTS_OFF, EFFECT_OFF} + and effect in self._effect_module.effect_list + ): await self._effect_module.set_effect( kwargs[ATTR_EFFECT], brightness=brightness, transition=transition ) diff --git a/tests/components/tplink/__init__.py b/tests/components/tplink/__init__.py index d12858017cc..b554cf07015 100644 --- a/tests/components/tplink/__init__.py +++ b/tests/components/tplink/__init__.py @@ -210,7 +210,8 @@ def _mocked_device( if modules: device.modules = { - module_name: MODULE_TO_MOCK_GEN[module_name]() for module_name in modules + module_name: MODULE_TO_MOCK_GEN[module_name](device) + for module_name in modules } if features: @@ -298,7 +299,7 @@ def _mocked_feature( return feature -def _mocked_light_module() -> Light: +def _mocked_light_module(device) -> Light: light = MagicMock(spec=Light, name="Mocked light module") light.update = AsyncMock() light.brightness = 50 @@ -314,26 +315,58 @@ def _mocked_light_module() -> Light: light.hsv = (10, 30, 5) light.valid_temperature_range = ColorTempRange(min=4000, max=9000) light.hw_info = {"sw_ver": "1.0.0", "hw_ver": "1.0.0"} - light.set_state = AsyncMock() - light.set_brightness = AsyncMock() - light.set_hsv = AsyncMock() - light.set_color_temp = AsyncMock() + + async def _set_state(state, *_, **__): + light.state = state + + light.set_state = AsyncMock(wraps=_set_state) + + async def _set_brightness(brightness, *_, **__): + light.state.brightness = brightness + light.state.light_on = brightness > 0 + + light.set_brightness = AsyncMock(wraps=_set_brightness) + + async def _set_hsv(h, s, v, *_, **__): + light.state.hue = h + light.state.saturation = s + light.state.brightness = v + light.state.light_on = True + + light.set_hsv = AsyncMock(wraps=_set_hsv) + + async def _set_color_temp(temp, *_, **__): + light.state.color_temp = temp + light.state.light_on = True + + light.set_color_temp = AsyncMock(wraps=_set_color_temp) light.protocol = _mock_protocol() return light -def _mocked_light_effect_module() -> LightEffect: +def _mocked_light_effect_module(device) -> LightEffect: effect = MagicMock(spec=LightEffect, name="Mocked light effect") effect.has_effects = True effect.has_custom_effects = True effect.effect = "Effect1" effect.effect_list = ["Off", "Effect1", "Effect2"] - effect.set_effect = AsyncMock() + + async def _set_effect(effect_name, *_, **__): + assert ( + effect_name in effect.effect_list + ), f"set_effect '{effect_name}' not in {effect.effect_list}" + assert device.modules[ + Module.Light + ], "Need a light module to test set_effect method" + device.modules[Module.Light].state.light_on = True + effect.effect = effect_name + + effect.set_effect = AsyncMock(wraps=_set_effect) effect.set_custom_effect = AsyncMock() return effect -def _mocked_fan_module() -> Fan: +def _mocked_fan_module(effect) -> Fan: fan = MagicMock(auto_spec=Fan, name="Mocked fan") fan.fan_speed_level = 0 fan.set_fan_speed_level = AsyncMock() diff --git a/tests/components/tplink/test_light.py b/tests/components/tplink/test_light.py index 6fce04ec454..bb814d1f5d3 100644 --- a/tests/components/tplink/test_light.py +++ b/tests/components/tplink/test_light.py @@ -5,6 +5,7 @@ from __future__ import annotations from datetime import timedelta from unittest.mock import MagicMock, PropertyMock +from freezegun.api import FrozenDateTimeFactory from kasa import ( AuthenticationError, DeviceType, @@ -36,7 +37,13 @@ from homeassistant.components.light import ( ) from homeassistant.components.tplink.const import DOMAIN from homeassistant.config_entries import SOURCE_REAUTH -from homeassistant.const import ATTR_ENTITY_ID, CONF_HOST, STATE_OFF, STATE_ON +from homeassistant.const import ( + ATTR_ENTITY_ID, + CONF_HOST, + STATE_OFF, + STATE_ON, + STATE_UNKNOWN, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er @@ -920,3 +927,82 @@ async def test_light_child( assert child_entity assert child_entity.unique_id == f"{DEVICE_ID}0{light_id}" assert child_entity.device_id == entity.device_id + + +async def test_scene_effect_light( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, +) -> None: + """Test activating a scene works with effects. + + i.e. doesn't try to set the effect to 'off' + """ + already_migrated_config_entry = MockConfigEntry( + domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS + ) + already_migrated_config_entry.add_to_hass(hass) + device = _mocked_device( + modules=[Module.Light, Module.LightEffect], alias="my_light" + ) + light_effect = device.modules[Module.LightEffect] + light_effect.effect = LightEffect.LIGHT_EFFECTS_OFF + + with _patch_discovery(device=device), _patch_connect(device=device): + assert await async_setup_component(hass, tplink.DOMAIN, {tplink.DOMAIN: {}}) + assert await async_setup_component(hass, "scene", {}) + await hass.async_block_till_done() + + entity_id = "light.my_light" + + await hass.services.async_call( + LIGHT_DOMAIN, "turn_on", {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + await hass.async_block_till_done() + freezer.tick(5) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state.state is STATE_ON + assert state.attributes["effect"] is EFFECT_OFF + + await hass.services.async_call( + "scene", + "create", + {"scene_id": "effect_off_scene", "snapshot_entities": [entity_id]}, + blocking=True, + ) + await hass.async_block_till_done() + scene_state = hass.states.get("scene.effect_off_scene") + assert scene_state.state is STATE_UNKNOWN + + await hass.services.async_call( + LIGHT_DOMAIN, "turn_off", {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + await hass.async_block_till_done() + freezer.tick(5) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state.state is STATE_OFF + + await hass.services.async_call( + "scene", + "turn_on", + { + "entity_id": "scene.effect_off_scene", + }, + blocking=True, + ) + await hass.async_block_till_done() + scene_state = hass.states.get("scene.effect_off_scene") + assert scene_state.state is not STATE_UNKNOWN + + freezer.tick(5) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state.state is STATE_ON + assert state.attributes["effect"] is EFFECT_OFF From dfccd4abf93d949bc1acac999367b08bed7307fb Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Fri, 5 Jul 2024 11:21:36 +0200 Subject: [PATCH 0023/1635] Bump version to 2024.7.1 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index d5c64823890..33087b0bfc1 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -24,7 +24,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 7 -PATCH_VERSION: Final = "0" +PATCH_VERSION: Final = "1" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index 777ec8bb6a4..138d1dd80b8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.7.0" +version = "2024.7.1" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From e3958d4adb64c6bc3500db7433f4ea868f709ef1 Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Fri, 5 Jul 2024 14:35:21 +0200 Subject: [PATCH 0024/1635] Update frontend to 20240705.0 (#121295) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 525ba507121..186f725c643 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240703.0"] + "requirements": ["home-assistant-frontend==20240705.0"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 6058a781e2a..41d3af2ad47 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -32,7 +32,7 @@ habluetooth==3.1.3 hass-nabucasa==0.81.1 hassil==1.7.1 home-assistant-bluetooth==1.12.2 -home-assistant-frontend==20240703.0 +home-assistant-frontend==20240705.0 home-assistant-intents==2024.7.3 httpx==0.27.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 21d20d675c3..f1d77644104 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1090,7 +1090,7 @@ hole==0.8.0 holidays==0.52 # homeassistant.components.frontend -home-assistant-frontend==20240703.0 +home-assistant-frontend==20240705.0 # homeassistant.components.conversation home-assistant-intents==2024.7.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 72c223cbac1..90e59778751 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -895,7 +895,7 @@ hole==0.8.0 holidays==0.52 # homeassistant.components.frontend -home-assistant-frontend==20240703.0 +home-assistant-frontend==20240705.0 # homeassistant.components.conversation home-assistant-intents==2024.7.3 From 440d83d754709fada7c7e08da2466c1f753d75c2 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 29 Jun 2024 07:50:53 -0500 Subject: [PATCH 0025/1635] Remove legacy foreign key constraint from sqlite states table (#120779) --- .../components/recorder/migration.py | 76 ++++++++++++++-- tests/components/recorder/test_migrate.py | 89 ++++++++++++++++++- .../components/recorder/test_v32_migration.py | 9 +- 3 files changed, 162 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index 561b446f493..cf003f72af4 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -24,7 +24,7 @@ from sqlalchemy.exc import ( SQLAlchemyError, ) from sqlalchemy.orm.session import Session -from sqlalchemy.schema import AddConstraint, DropConstraint +from sqlalchemy.schema import AddConstraint, CreateTable, DropConstraint from sqlalchemy.sql.expression import true from sqlalchemy.sql.lambdas import StatementLambdaElement @@ -1738,14 +1738,15 @@ def cleanup_legacy_states_event_ids(instance: Recorder) -> bool: # Only drop the index if there are no more event_ids in the states table # ex all NULL assert instance.engine is not None, "engine should never be None" - if instance.dialect_name != SupportedDialect.SQLITE: + if instance.dialect_name == SupportedDialect.SQLITE: # SQLite does not support dropping foreign key constraints - # so we can't drop the index at this time but we can avoid - # looking for legacy rows during purge + # so we have to rebuild the table + rebuild_sqlite_table(session_maker, instance.engine, States) + else: _drop_foreign_key_constraints( session_maker, instance.engine, TABLE_STATES, ["event_id"] ) - _drop_index(session_maker, "states", LEGACY_STATES_EVENT_ID_INDEX) + _drop_index(session_maker, "states", LEGACY_STATES_EVENT_ID_INDEX) instance.use_legacy_events_index = False return True @@ -1894,3 +1895,68 @@ def _mark_migration_done( migration_id=migration.migration_id, version=migration.migration_version ) ) + + +def rebuild_sqlite_table( + session_maker: Callable[[], Session], engine: Engine, table: type[Base] +) -> None: + """Rebuild an SQLite table. + + This must only be called after all migrations are complete + and the database is in a consistent state. + + If the table is not migrated to the current schema this + will likely fail. + """ + table_table = cast(Table, table.__table__) + orig_name = table_table.name + temp_name = f"{table_table.name}_temp_{int(time())}" + + _LOGGER.warning( + "Rebuilding SQLite table %s; This will take a while; Please be patient!", + orig_name, + ) + + try: + # 12 step SQLite table rebuild + # https://www.sqlite.org/lang_altertable.html + with session_scope(session=session_maker()) as session: + # Step 1 - Disable foreign keys + session.connection().execute(text("PRAGMA foreign_keys=OFF")) + # Step 2 - create a transaction + with session_scope(session=session_maker()) as session: + # Step 3 - we know all the indexes, triggers, and views associated with table X + new_sql = str(CreateTable(table_table).compile(engine)).strip("\n") + ";" + source_sql = f"CREATE TABLE {orig_name}" + replacement_sql = f"CREATE TABLE {temp_name}" + assert source_sql in new_sql, f"{source_sql} should be in new_sql" + new_sql = new_sql.replace(source_sql, replacement_sql) + # Step 4 - Create temp table + session.execute(text(new_sql)) + column_names = ",".join([column.name for column in table_table.columns]) + # Step 5 - Transfer content + sql = f"INSERT INTO {temp_name} SELECT {column_names} FROM {orig_name};" # noqa: S608 + session.execute(text(sql)) + # Step 6 - Drop the original table + session.execute(text(f"DROP TABLE {orig_name}")) + # Step 7 - Rename the temp table + session.execute(text(f"ALTER TABLE {temp_name} RENAME TO {orig_name}")) + # Step 8 - Recreate indexes + for index in table_table.indexes: + index.create(session.connection()) + # Step 9 - Recreate views (there are none) + # Step 10 - Check foreign keys + session.execute(text("PRAGMA foreign_key_check")) + # Step 11 - Commit transaction + session.commit() + except SQLAlchemyError: + _LOGGER.exception("Error recreating SQLite table %s", table_table.name) + # Swallow the exception since we do not want to ever raise + # an integrity error as it would cause the database + # to be discarded and recreated from scratch + else: + _LOGGER.warning("Rebuilding SQLite table %s finished", orig_name) + finally: + with session_scope(session=session_maker()) as session: + # Step 12 - Re-enable foreign keys + session.connection().execute(text("PRAGMA foreign_keys=ON")) diff --git a/tests/components/recorder/test_migrate.py b/tests/components/recorder/test_migrate.py index a21f4771616..cb8e402f65a 100644 --- a/tests/components/recorder/test_migrate.py +++ b/tests/components/recorder/test_migrate.py @@ -16,7 +16,7 @@ from sqlalchemy.exc import ( ProgrammingError, SQLAlchemyError, ) -from sqlalchemy.orm import Session +from sqlalchemy.orm import Session, scoped_session, sessionmaker from sqlalchemy.pool import StaticPool from homeassistant.bootstrap import async_setup_component @@ -24,6 +24,7 @@ from homeassistant.components import persistent_notification as pn, recorder from homeassistant.components.recorder import db_schema, migration from homeassistant.components.recorder.db_schema import ( SCHEMA_VERSION, + Events, RecorderRuns, States, ) @@ -633,3 +634,89 @@ def test_raise_if_exception_missing_empty_cause_str() -> None: with pytest.raises(ProgrammingError): migration.raise_if_exception_missing_str(programming_exc, ["not present"]) + + +def test_rebuild_sqlite_states_table(recorder_db_url: str) -> None: + """Test that we can rebuild the states table in SQLite.""" + if not recorder_db_url.startswith("sqlite://"): + # This test is specific for SQLite + return + + engine = create_engine(recorder_db_url) + session_maker = scoped_session(sessionmaker(bind=engine, future=True)) + with session_scope(session=session_maker()) as session: + db_schema.Base.metadata.create_all(engine) + with session_scope(session=session_maker()) as session: + session.add(States(state="on")) + session.commit() + + migration.rebuild_sqlite_table(session_maker, engine, States) + + with session_scope(session=session_maker()) as session: + assert session.query(States).count() == 1 + assert session.query(States).first().state == "on" + + engine.dispose() + + +def test_rebuild_sqlite_states_table_missing_fails( + recorder_db_url: str, caplog: pytest.LogCaptureFixture +) -> None: + """Test handling missing states table when attempting rebuild.""" + if not recorder_db_url.startswith("sqlite://"): + # This test is specific for SQLite + return + + engine = create_engine(recorder_db_url) + session_maker = scoped_session(sessionmaker(bind=engine, future=True)) + with session_scope(session=session_maker()) as session: + db_schema.Base.metadata.create_all(engine) + + with session_scope(session=session_maker()) as session: + session.add(Events(event_type="state_changed", event_data="{}")) + session.connection().execute(text("DROP TABLE states")) + session.commit() + + migration.rebuild_sqlite_table(session_maker, engine, States) + assert "Error recreating SQLite table states" in caplog.text + caplog.clear() + + # Now rebuild the events table to make sure the database did not + # get corrupted + migration.rebuild_sqlite_table(session_maker, engine, Events) + + with session_scope(session=session_maker()) as session: + assert session.query(Events).count() == 1 + assert session.query(Events).first().event_type == "state_changed" + assert session.query(Events).first().event_data == "{}" + + engine.dispose() + + +def test_rebuild_sqlite_states_table_extra_columns( + recorder_db_url: str, caplog: pytest.LogCaptureFixture +) -> None: + """Test handling extra columns when rebuilding the states table.""" + if not recorder_db_url.startswith("sqlite://"): + # This test is specific for SQLite + return + + engine = create_engine(recorder_db_url) + session_maker = scoped_session(sessionmaker(bind=engine, future=True)) + with session_scope(session=session_maker()) as session: + db_schema.Base.metadata.create_all(engine) + with session_scope(session=session_maker()) as session: + session.add(States(state="on")) + session.commit() + session.connection().execute( + text("ALTER TABLE states ADD COLUMN extra_column TEXT") + ) + + migration.rebuild_sqlite_table(session_maker, engine, States) + assert "Error recreating SQLite table states" not in caplog.text + + with session_scope(session=session_maker()) as session: + assert session.query(States).count() == 1 + assert session.query(States).first().state == "on" + + engine.dispose() diff --git a/tests/components/recorder/test_v32_migration.py b/tests/components/recorder/test_v32_migration.py index a07c63b3376..e3398fbf0e3 100644 --- a/tests/components/recorder/test_v32_migration.py +++ b/tests/components/recorder/test_v32_migration.py @@ -211,10 +211,9 @@ async def test_migrate_times(caplog: pytest.LogCaptureFixture, tmp_path: Path) - ) states_index_names = {index["name"] for index in states_indexes} - # sqlite does not support dropping foreign keys so the - # ix_states_event_id index is not dropped in this case - # but use_legacy_events_index is still False - assert "ix_states_event_id" in states_index_names + # sqlite does not support dropping foreign keys so we had to + # create a new table and copy the data over + assert "ix_states_event_id" not in states_index_names assert recorder.get_instance(hass).use_legacy_events_index is False @@ -342,8 +341,6 @@ async def test_migrate_can_resume_entity_id_post_migration( await hass.async_stop() await hass.async_block_till_done() - assert "ix_states_entity_id_last_updated_ts" in states_index_names - async with async_test_home_assistant() as hass: recorder_helper.async_initialize_recorder(hass) assert await async_setup_component( From c54717707e0b92a5aa55651a05dbf801944be1f8 Mon Sep 17 00:00:00 2001 From: "Robert C. Maehl" Date: Sat, 6 Jul 2024 05:12:38 -0400 Subject: [PATCH 0026/1635] Direct Users to App-Specific Passwords for iCloud integration to prevent MFA spam (#120945) Co-authored-by: Franck Nijhof --- homeassistant/components/icloud/strings.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/icloud/strings.json b/homeassistant/components/icloud/strings.json index 96db11d4656..22c711e919a 100644 --- a/homeassistant/components/icloud/strings.json +++ b/homeassistant/components/icloud/strings.json @@ -6,7 +6,7 @@ "description": "Enter your credentials", "data": { "username": "[%key:common::config_flow::data::email%]", - "password": "[%key:common::config_flow::data::password%]", + "password": "App-specific password", "with_family": "With family" } }, @@ -14,7 +14,7 @@ "title": "[%key:common::config_flow::title::reauth%]", "description": "Your previously entered password for {username} is no longer working. Update your password to keep using this integration.", "data": { - "password": "[%key:common::config_flow::data::password%]" + "password": "App-specific password" } }, "trusted_device": { From a06af7ee9394fd39c256a1b540657ce316be0367 Mon Sep 17 00:00:00 2001 From: Alan Date: Sat, 6 Jul 2024 10:41:18 +0100 Subject: [PATCH 0027/1635] LLM to handle int attributes (#121037) --- homeassistant/helpers/llm.py | 2 +- tests/helpers/test_llm.py | 6 ++---- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/homeassistant/helpers/llm.py b/homeassistant/helpers/llm.py index ba307a785ac..506cadbf168 100644 --- a/homeassistant/helpers/llm.py +++ b/homeassistant/helpers/llm.py @@ -483,7 +483,7 @@ def _get_exposed_entities( if attributes := { attr_name: str(attr_value) - if isinstance(attr_value, (Enum, Decimal)) + if isinstance(attr_value, (Enum, Decimal, int)) else attr_value for attr_name, attr_value in state.attributes.items() if attr_name in interesting_attributes diff --git a/tests/helpers/test_llm.py b/tests/helpers/test_llm.py index ad18aa53071..81fa573852e 100644 --- a/tests/helpers/test_llm.py +++ b/tests/helpers/test_llm.py @@ -408,7 +408,7 @@ async def test_assist_api_prompt( hass.states.async_set( entry1.entity_id, "on", - {"friendly_name": "Kitchen", "temperature": Decimal("0.9")}, + {"friendly_name": "Kitchen", "temperature": Decimal("0.9"), "humidity": 65}, ) hass.states.async_set(entry2.entity_id, "on", {"friendly_name": "Living Room"}) @@ -517,9 +517,7 @@ async def test_assist_api_prompt( entry1.entity_id: { "names": "Kitchen", "state": "on", - "attributes": { - "temperature": "0.9", - }, + "attributes": {"temperature": "0.9", "humidity": "65"}, }, entry2.entity_id: { "areas": "Test Area, Alternative name", From 1133c41fa8e88df02bec6bf3e5df319e907bff91 Mon Sep 17 00:00:00 2001 From: Rasmus Lundsgaard Date: Sat, 6 Jul 2024 19:20:14 +0200 Subject: [PATCH 0028/1635] Fix empty list in kodi media_player (#121250) Co-authored-by: Franck Nijhof --- homeassistant/components/kodi/media_player.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/kodi/media_player.py b/homeassistant/components/kodi/media_player.py index 290b3b1e566..46dee891e3a 100644 --- a/homeassistant/components/kodi/media_player.py +++ b/homeassistant/components/kodi/media_player.py @@ -641,12 +641,10 @@ class KodiEntity(MediaPlayerEntity): if self.state == MediaPlayerState.OFF: return state_attr - hdr_type = ( - self._item.get("streamdetails", {}).get("video", [{}])[0].get("hdrtype") - ) - if hdr_type == "": - state_attr["dynamic_range"] = "sdr" - else: + state_attr["dynamic_range"] = "sdr" + if (video_details := self._item.get("streamdetails", {}).get("video")) and ( + hdr_type := video_details[0].get("hdrtype") + ): state_attr["dynamic_range"] = hdr_type return state_attr From 803d9c5a8e80db67c1571cb10665da65012f598d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Teme=C5=A1inko?= Date: Fri, 5 Jul 2024 19:12:09 +0200 Subject: [PATCH 0029/1635] Fix ombi configuration validation (#121314) --- homeassistant/components/ombi/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/ombi/__init__.py b/homeassistant/components/ombi/__init__.py index a4cbe39f3e0..d63f72592f8 100644 --- a/homeassistant/components/ombi/__init__.py +++ b/homeassistant/components/ombi/__init__.py @@ -73,7 +73,7 @@ CONFIG_SCHEMA = vol.Schema( vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean, } ), - cv.has_at_least_one_key("auth"), + cv.has_at_least_one_key(CONF_API_KEY, CONF_PASSWORD), ) }, extra=vol.ALLOW_EXTRA, From 711bdaf37367b49b22b9d7d7e9a2ef192e22dbdf Mon Sep 17 00:00:00 2001 From: Luke Lashley Date: Sat, 6 Jul 2024 02:39:24 -0400 Subject: [PATCH 0030/1635] Bump anova-wifi to 0.17.0 (#121337) * Bump anova-wifi to 0.16.0 * Bump to .17 --- homeassistant/components/anova/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/anova/manifest.json b/homeassistant/components/anova/manifest.json index 7e605edc217..7e032f0e361 100644 --- a/homeassistant/components/anova/manifest.json +++ b/homeassistant/components/anova/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/anova", "iot_class": "cloud_push", "loggers": ["anova_wifi"], - "requirements": ["anova-wifi==0.15.0"] + "requirements": ["anova-wifi==0.17.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index f1d77644104..ecbc48d6d8a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -449,7 +449,7 @@ androidtvremote2==0.1.1 anel-pwrctrl-homeassistant==0.0.1.dev2 # homeassistant.components.anova -anova-wifi==0.15.0 +anova-wifi==0.17.0 # homeassistant.components.anthemav anthemav==1.4.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 90e59778751..fa92d55f013 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -413,7 +413,7 @@ androidtv[async]==0.0.73 androidtvremote2==0.1.1 # homeassistant.components.anova -anova-wifi==0.15.0 +anova-wifi==0.17.0 # homeassistant.components.anthemav anthemav==1.4.1 From 1d7bddf449766f3c108c0131f7802b214be3f845 Mon Sep 17 00:00:00 2001 From: Brett Adams Date: Sat, 6 Jul 2024 18:54:09 +1000 Subject: [PATCH 0031/1635] Fix initial Wall Connector values in Tessie (#121353) --- homeassistant/components/tessie/entity.py | 2 +- tests/components/tessie/snapshots/test_sensor.ambr | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/tessie/entity.py b/homeassistant/components/tessie/entity.py index d2a59f205fc..4c077ce19db 100644 --- a/homeassistant/components/tessie/entity.py +++ b/homeassistant/components/tessie/entity.py @@ -42,6 +42,7 @@ class TessieBaseEntity( self.key = key self._attr_translation_key = key super().__init__(coordinator) + self._async_update_attrs() @property def _value(self) -> Any: @@ -132,7 +133,6 @@ class TessieEnergyEntity(TessieBaseEntity): self._attr_device_info = data.device super().__init__(coordinator, key) - self._async_update_attrs() class TessieWallConnectorEntity(TessieBaseEntity): diff --git a/tests/components/tessie/snapshots/test_sensor.ambr b/tests/components/tessie/snapshots/test_sensor.ambr index afe229feba0..0a5ff4603aa 100644 --- a/tests/components/tessie/snapshots/test_sensor.ambr +++ b/tests/components/tessie/snapshots/test_sensor.ambr @@ -2120,7 +2120,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '0.0', }) # --- # name: test_sensors[sensor.wall_connector_power_2-entry] @@ -2177,7 +2177,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '0.0', }) # --- # name: test_sensors[sensor.wall_connector_state-entry] @@ -2249,7 +2249,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': 'disconnected', }) # --- # name: test_sensors[sensor.wall_connector_state_2-entry] @@ -2321,7 +2321,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': 'disconnected', }) # --- # name: test_sensors[sensor.wall_connector_vehicle-entry] From a5f4c25a2cbeba113f97ce3d5607e1740d4c0bf1 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Sat, 6 Jul 2024 19:09:00 +0200 Subject: [PATCH 0032/1635] Bump psutil to 6.0.0 (#121385) --- homeassistant/components/systemmonitor/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/systemmonitor/conftest.py | 10 +++++----- tests/components/systemmonitor/test_util.py | 12 +++++------- 5 files changed, 13 insertions(+), 15 deletions(-) diff --git a/homeassistant/components/systemmonitor/manifest.json b/homeassistant/components/systemmonitor/manifest.json index 5e1ef6c02de..236f25bb1ed 100644 --- a/homeassistant/components/systemmonitor/manifest.json +++ b/homeassistant/components/systemmonitor/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/systemmonitor", "iot_class": "local_push", "loggers": ["psutil"], - "requirements": ["psutil-home-assistant==0.0.1", "psutil==5.9.8"] + "requirements": ["psutil-home-assistant==0.0.1", "psutil==6.0.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index ecbc48d6d8a..7d7dda06b0a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1598,7 +1598,7 @@ proxmoxer==2.0.1 psutil-home-assistant==0.0.1 # homeassistant.components.systemmonitor -psutil==5.9.8 +psutil==6.0.0 # homeassistant.components.pulseaudio_loopback pulsectl==23.5.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index fa92d55f013..a0c20bfe3ec 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1275,7 +1275,7 @@ prometheus-client==0.17.1 psutil-home-assistant==0.0.1 # homeassistant.components.systemmonitor -psutil==5.9.8 +psutil==6.0.0 # homeassistant.components.androidtv pure-python-adb[async]==0.3.0.dev0 diff --git a/tests/components/systemmonitor/conftest.py b/tests/components/systemmonitor/conftest.py index e16debdf263..25611481433 100644 --- a/tests/components/systemmonitor/conftest.py +++ b/tests/components/systemmonitor/conftest.py @@ -174,11 +174,11 @@ def mock_psutil(mock_process: list[MockProcess]) -> Generator: "cpu0-thermal": [shwtemp("cpu0-thermal", 50.0, 60.0, 70.0)] } mock_psutil.disk_partitions.return_value = [ - sdiskpart("test", "/", "ext4", "", 1, 1), - sdiskpart("test2", "/media/share", "ext4", "", 1, 1), - sdiskpart("test3", "/incorrect", "", "", 1, 1), - sdiskpart("hosts", "/etc/hosts", "bind", "", 1, 1), - sdiskpart("proc", "/proc/run", "proc", "", 1, 1), + sdiskpart("test", "/", "ext4", ""), + sdiskpart("test2", "/media/share", "ext4", ""), + sdiskpart("test3", "/incorrect", "", ""), + sdiskpart("hosts", "/etc/hosts", "bind", ""), + sdiskpart("proc", "/proc/run", "proc", ""), ] mock_psutil.boot_time.return_value = 1708786800.0 mock_psutil.NoSuchProcess = NoSuchProcess diff --git a/tests/components/systemmonitor/test_util.py b/tests/components/systemmonitor/test_util.py index b35c7b2e96c..582707f3574 100644 --- a/tests/components/systemmonitor/test_util.py +++ b/tests/components/systemmonitor/test_util.py @@ -50,21 +50,19 @@ async def test_disk_util( """Test the disk failures.""" mock_psutil.psutil.disk_partitions.return_value = [ - sdiskpart("test", "/", "ext4", "", 1, 1), # Should be ok - sdiskpart("test2", "/media/share", "ext4", "", 1, 1), # Should be ok - sdiskpart("test3", "/incorrect", "", "", 1, 1), # Should be skipped as no type + sdiskpart("test", "/", "ext4", ""), # Should be ok + sdiskpart("test2", "/media/share", "ext4", ""), # Should be ok + sdiskpart("test3", "/incorrect", "", ""), # Should be skipped as no type sdiskpart( - "proc", "/proc/run", "proc", "", 1, 1 + "proc", "/proc/run", "proc", "" ), # Should be skipped as in skipped disk types sdiskpart( "test4", "/tmpfs/", # noqa: S108 "tmpfs", "", - 1, - 1, ), # Should be skipped as in skipped disk types - sdiskpart("test5", "E:", "cd", "cdrom", 1, 1), # Should be skipped as cdrom + sdiskpart("test5", "E:", "cd", "cdrom"), # Should be skipped as cdrom ] mock_config_entry.add_to_hass(hass) From 8017386c73fffa58ef384c9a072917a7c71650ce Mon Sep 17 00:00:00 2001 From: G Johansson Date: Sat, 6 Jul 2024 19:32:27 +0200 Subject: [PATCH 0033/1635] Fix unnecessary logging of turn on/off feature flags in Climate (#121387) --- homeassistant/components/climate/__init__.py | 8 +++ tests/components/climate/test_init.py | 62 ++++++++++++++++++++ 2 files changed, 70 insertions(+) diff --git a/homeassistant/components/climate/__init__.py b/homeassistant/components/climate/__init__.py index bc81ce6e241..98500d97eb7 100644 --- a/homeassistant/components/climate/__init__.py +++ b/homeassistant/components/climate/__init__.py @@ -377,6 +377,14 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): # Return if integration has migrated already return + supported_features = self.supported_features + if supported_features & ( + ClimateEntityFeature.TURN_ON | ClimateEntityFeature.TURN_ON + ): + # The entity supports both turn_on and turn_off, the backwards compatibility + # checks are not needed + return + supported_features = self.supported_features if not supported_features & ClimateEntityFeature.TURN_OFF and ( type(self).async_turn_off is not ClimateEntity.async_turn_off diff --git a/tests/components/climate/test_init.py b/tests/components/climate/test_init.py index a459b991203..4756c265aea 100644 --- a/tests/components/climate/test_init.py +++ b/tests/components/climate/test_init.py @@ -709,6 +709,68 @@ async def test_no_warning_integration_has_migrated( ) +async def test_no_warning_integration_implement_feature_flags( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None +) -> None: + """Test no warning when integration uses the correct feature flags.""" + + class MockClimateEntityTest(MockClimateEntity): + """Mock Climate device.""" + + _attr_supported_features = ( + ClimateEntityFeature.FAN_MODE + | ClimateEntityFeature.PRESET_MODE + | ClimateEntityFeature.SWING_MODE + | ClimateEntityFeature.TURN_OFF + | ClimateEntityFeature.TURN_ON + ) + + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) + return True + + async def async_setup_entry_climate_platform( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: + """Set up test climate platform via config entry.""" + async_add_entities( + [MockClimateEntityTest(name="test", entity_id="climate.test")] + ) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=async_setup_entry_init, + ), + built_in=False, + ) + mock_platform( + hass, + "test.climate", + MockPlatform(async_setup_entry=async_setup_entry_climate_platform), + ) + + with patch.object( + MockClimateEntityTest, "__module__", "tests.custom_components.climate.test_init" + ): + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("climate.test") + assert state is not None + + assert "does not set ClimateEntityFeature" not in caplog.text + assert "implements HVACMode(s):" not in caplog.text + + async def test_turn_on_off_toggle(hass: HomeAssistant) -> None: """Test turn_on/turn_off/toggle methods.""" From 37621e77aecdac8fb7772365711ecfd3eb85060f Mon Sep 17 00:00:00 2001 From: G Johansson Date: Sat, 6 Jul 2024 21:18:02 +0200 Subject: [PATCH 0034/1635] Fix timezone issue in smhi weather (#121389) --- homeassistant/components/smhi/weather.py | 4 ++- .../smhi/snapshots/test_weather.ambr | 36 +++++++++---------- 2 files changed, 21 insertions(+), 19 deletions(-) diff --git a/homeassistant/components/smhi/weather.py b/homeassistant/components/smhi/weather.py index 3d5642a2784..aac4c5d24be 100644 --- a/homeassistant/components/smhi/weather.py +++ b/homeassistant/components/smhi/weather.py @@ -218,7 +218,9 @@ class SmhiWeather(WeatherEntity): data.append( { - ATTR_FORECAST_TIME: forecast.valid_time.isoformat(), + ATTR_FORECAST_TIME: forecast.valid_time.replace( + tzinfo=dt_util.UTC + ).isoformat(), ATTR_FORECAST_NATIVE_TEMP: forecast.temperature_max, ATTR_FORECAST_NATIVE_TEMP_LOW: forecast.temperature_min, ATTR_FORECAST_NATIVE_PRECIPITATION: forecast.total_precipitation, diff --git a/tests/components/smhi/snapshots/test_weather.ambr b/tests/components/smhi/snapshots/test_weather.ambr index 0d2f6b3b3bf..d825e22d470 100644 --- a/tests/components/smhi/snapshots/test_weather.ambr +++ b/tests/components/smhi/snapshots/test_weather.ambr @@ -6,7 +6,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'clear-night', - 'datetime': '2023-08-08T00:00:00', + 'datetime': '2023-08-08T00:00:00+00:00', 'humidity': 100, 'precipitation': 0.0, 'pressure': 992.0, @@ -19,7 +19,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'clear-night', - 'datetime': '2023-08-08T01:00:00', + 'datetime': '2023-08-08T01:00:00+00:00', 'humidity': 100, 'precipitation': 0.0, 'pressure': 992.0, @@ -32,7 +32,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'clear-night', - 'datetime': '2023-08-08T02:00:00', + 'datetime': '2023-08-08T02:00:00+00:00', 'humidity': 97, 'precipitation': 0.0, 'pressure': 992.0, @@ -45,7 +45,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'sunny', - 'datetime': '2023-08-08T03:00:00', + 'datetime': '2023-08-08T03:00:00+00:00', 'humidity': 96, 'precipitation': 0.0, 'pressure': 991.0, @@ -223,7 +223,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'cloudy', - 'datetime': '2023-08-07T12:00:00', + 'datetime': '2023-08-07T12:00:00+00:00', 'humidity': 96, 'precipitation': 0.0, 'pressure': 991.0, @@ -236,7 +236,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'rainy', - 'datetime': '2023-08-08T12:00:00', + 'datetime': '2023-08-08T12:00:00+00:00', 'humidity': 97, 'precipitation': 10.6, 'pressure': 984.0, @@ -249,7 +249,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'rainy', - 'datetime': '2023-08-09T12:00:00', + 'datetime': '2023-08-09T12:00:00+00:00', 'humidity': 95, 'precipitation': 6.3, 'pressure': 1001.0, @@ -262,7 +262,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'cloudy', - 'datetime': '2023-08-10T12:00:00', + 'datetime': '2023-08-10T12:00:00+00:00', 'humidity': 75, 'precipitation': 4.8, 'pressure': 1011.0, @@ -275,7 +275,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'cloudy', - 'datetime': '2023-08-11T12:00:00', + 'datetime': '2023-08-11T12:00:00+00:00', 'humidity': 69, 'precipitation': 0.6, 'pressure': 1015.0, @@ -288,7 +288,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'cloudy', - 'datetime': '2023-08-12T12:00:00', + 'datetime': '2023-08-12T12:00:00+00:00', 'humidity': 82, 'precipitation': 0.0, 'pressure': 1014.0, @@ -301,7 +301,7 @@ dict({ 'cloud_coverage': 75, 'condition': 'partlycloudy', - 'datetime': '2023-08-13T12:00:00', + 'datetime': '2023-08-13T12:00:00+00:00', 'humidity': 59, 'precipitation': 0.0, 'pressure': 1013.0, @@ -314,7 +314,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'partlycloudy', - 'datetime': '2023-08-14T12:00:00', + 'datetime': '2023-08-14T12:00:00+00:00', 'humidity': 56, 'precipitation': 0.0, 'pressure': 1015.0, @@ -327,7 +327,7 @@ dict({ 'cloud_coverage': 88, 'condition': 'partlycloudy', - 'datetime': '2023-08-15T12:00:00', + 'datetime': '2023-08-15T12:00:00+00:00', 'humidity': 64, 'precipitation': 3.6, 'pressure': 1014.0, @@ -340,7 +340,7 @@ dict({ 'cloud_coverage': 75, 'condition': 'partlycloudy', - 'datetime': '2023-08-16T12:00:00', + 'datetime': '2023-08-16T12:00:00+00:00', 'humidity': 61, 'precipitation': 2.4, 'pressure': 1014.0, @@ -358,7 +358,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'cloudy', - 'datetime': '2023-08-07T12:00:00', + 'datetime': '2023-08-07T12:00:00+00:00', 'humidity': 96, 'precipitation': 0.0, 'pressure': 991.0, @@ -373,7 +373,7 @@ dict({ 'cloud_coverage': 75, 'condition': 'partlycloudy', - 'datetime': '2023-08-13T12:00:00', + 'datetime': '2023-08-13T12:00:00+00:00', 'humidity': 59, 'precipitation': 0.0, 'pressure': 1013.0, @@ -388,7 +388,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'fog', - 'datetime': '2023-08-07T09:00:00', + 'datetime': '2023-08-07T09:00:00+00:00', 'humidity': 100, 'precipitation': 0.0, 'pressure': 992.0, @@ -403,7 +403,7 @@ dict({ 'cloud_coverage': 100, 'condition': 'cloudy', - 'datetime': '2023-08-07T15:00:00', + 'datetime': '2023-08-07T15:00:00+00:00', 'humidity': 89, 'precipitation': 0.0, 'pressure': 991.0, From 780f7254c1aac9fc8696f3cbf8a8daf28f42ecd3 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Sat, 6 Jul 2024 23:36:53 +0200 Subject: [PATCH 0035/1635] Fix feature flag in climate (#121398) --- homeassistant/components/climate/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/climate/__init__.py b/homeassistant/components/climate/__init__.py index 98500d97eb7..2c891779c37 100644 --- a/homeassistant/components/climate/__init__.py +++ b/homeassistant/components/climate/__init__.py @@ -379,7 +379,7 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): supported_features = self.supported_features if supported_features & ( - ClimateEntityFeature.TURN_ON | ClimateEntityFeature.TURN_ON + ClimateEntityFeature.TURN_ON | ClimateEntityFeature.TURN_OFF ): # The entity supports both turn_on and turn_off, the backwards compatibility # checks are not needed From a72cc3c248bef12dcb482f2220e169679f811872 Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Sun, 7 Jul 2024 15:11:28 +0200 Subject: [PATCH 0036/1635] Allow current empty feeds to be configured in Feedreader (#121421) --- .../components/feedreader/config_flow.py | 14 ------ .../components/feedreader/strings.json | 7 +-- .../components/feedreader/test_config_flow.py | 50 +++---------------- 3 files changed, 7 insertions(+), 64 deletions(-) diff --git a/homeassistant/components/feedreader/config_flow.py b/homeassistant/components/feedreader/config_flow.py index 6fa153b8177..d367432ff8c 100644 --- a/homeassistant/components/feedreader/config_flow.py +++ b/homeassistant/components/feedreader/config_flow.py @@ -107,13 +107,6 @@ class FeedReaderConfigFlow(ConfigFlow, domain=DOMAIN): return self.abort_on_import_error(user_input[CONF_URL], "url_error") return self.show_user_form(user_input, {"base": "url_error"}) - if not feed.entries: - if self.context["source"] == SOURCE_IMPORT: - return self.abort_on_import_error( - user_input[CONF_URL], "no_feed_entries" - ) - return self.show_user_form(user_input, {"base": "no_feed_entries"}) - feed_title = feed["feed"]["title"] return self.async_create_entry( @@ -161,13 +154,6 @@ class FeedReaderConfigFlow(ConfigFlow, domain=DOMAIN): step_id="reconfigure_confirm", errors={"base": "url_error"}, ) - if not feed.entries: - return self.show_user_form( - user_input=user_input, - description_placeholders={"name": self._config_entry.title}, - step_id="reconfigure_confirm", - errors={"base": "no_feed_entries"}, - ) self.hass.config_entries.async_update_entry(self._config_entry, data=user_input) return self.async_abort(reason="reconfigure_successful") diff --git a/homeassistant/components/feedreader/strings.json b/homeassistant/components/feedreader/strings.json index 31881b4112a..da66333fa5b 100644 --- a/homeassistant/components/feedreader/strings.json +++ b/homeassistant/components/feedreader/strings.json @@ -18,8 +18,7 @@ "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" }, "error": { - "url_error": "The URL could not be opened.", - "no_feed_entries": "The URL seems not to serve any feed entries." + "url_error": "The URL could not be opened." } }, "options": { @@ -38,10 +37,6 @@ "import_yaml_error_url_error": { "title": "The Feedreader YAML configuration import failed", "description": "Configuring the Feedreader using YAML is being removed but there was a connection error when trying to import the YAML configuration for `{url}`.\n\nPlease verify that url is reachable and accessable for Home Assistant and restart Home Assistant to try again or remove the Feedreader YAML configuration from your configuration.yaml file and continue to set up the integration manually." - }, - "import_yaml_error_no_feed_entries": { - "title": "[%key:component::feedreader::issues::import_yaml_error_url_error::title%]", - "description": "Configuring the Feedreader using YAML is being removed but when trying to import the YAML configuration for `{url}` no feed entries were found.\n\nPlease verify that url serves any feed entries and restart Home Assistant to try again or remove the Feedreader YAML configuration from your configuration.yaml file and continue to set up the integration manually." } } } diff --git a/tests/components/feedreader/test_config_flow.py b/tests/components/feedreader/test_config_flow.py index 48c341492e0..669ca665f6b 100644 --- a/tests/components/feedreader/test_config_flow.py +++ b/tests/components/feedreader/test_config_flow.py @@ -83,16 +83,6 @@ async def test_user_errors( assert result["step_id"] == "user" assert result["errors"] == {"base": "url_error"} - # no feed entries returned - feedparser.side_effect = None - feedparser.return_value = None - result = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={CONF_URL: URL} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - assert result["errors"] == {"base": "no_feed_entries"} - # success feedparser.side_effect = None feedparser.return_value = feed_one_event @@ -141,40 +131,25 @@ async def test_import( assert issue_registry.async_get_issue(HA_DOMAIN, "deprecated_yaml_feedreader") -@pytest.mark.parametrize( - ("side_effect", "return_value", "expected_issue_id"), - [ - ( - urllib.error.URLError("Test"), - None, - "import_yaml_error_feedreader_url_error_http_some_rss_local_rss_feed_xml", - ), - ( - None, - None, - "import_yaml_error_feedreader_no_feed_entries_http_some_rss_local_rss_feed_xml", - ), - ], -) async def test_import_errors( hass: HomeAssistant, issue_registry: ir.IssueRegistry, feedparser, setup_entry, feed_one_event, - side_effect, - return_value, - expected_issue_id, ) -> None: """Test starting an import flow which results in an URL error.""" config_entries = hass.config_entries.async_entries(DOMAIN) assert not config_entries # raise URLError - feedparser.side_effect = side_effect - feedparser.return_value = return_value + feedparser.side_effect = urllib.error.URLError("Test") + feedparser.return_value = None assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_URLS: [URL]}}) - assert issue_registry.async_get_issue(DOMAIN, expected_issue_id) + assert issue_registry.async_get_issue( + DOMAIN, + "import_yaml_error_feedreader_url_error_http_some_rss_local_rss_feed_xml", + ) async def test_reconfigure(hass: HomeAssistant, feedparser) -> None: @@ -248,19 +223,6 @@ async def test_reconfigure_errors( assert result["step_id"] == "reconfigure_confirm" assert result["errors"] == {"base": "url_error"} - # no feed entries returned - feedparser.side_effect = None - feedparser.return_value = None - result = await hass.config_entries.flow.async_configure( - result["flow_id"], - user_input={ - CONF_URL: "http://other.rss.local/rss_feed.xml", - }, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reconfigure_confirm" - assert result["errors"] == {"base": "no_feed_entries"} - # success feedparser.side_effect = None feedparser.return_value = feed_one_event From 8825c50671e9cb5d62ef4b02266a654a78213a2c Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sun, 7 Jul 2024 14:54:29 +0200 Subject: [PATCH 0037/1635] Fix MPD config flow (#121431) --- homeassistant/components/mpd/manifest.json | 1 + homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 2 +- 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/mpd/manifest.json b/homeassistant/components/mpd/manifest.json index e03005fb95a..a361152670a 100644 --- a/homeassistant/components/mpd/manifest.json +++ b/homeassistant/components/mpd/manifest.json @@ -2,6 +2,7 @@ "domain": "mpd", "name": "Music Player Daemon (MPD)", "codeowners": [], + "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/mpd", "iot_class": "local_polling", "loggers": ["mpd"], diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index 463a38feb9f..0020dc91ccd 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -352,6 +352,7 @@ FLOWS = { "motionblinds_ble", "motioneye", "motionmount", + "mpd", "mqtt", "mullvad", "mutesync", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 0ad8ac09c9e..a3db08e57c2 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -3814,7 +3814,7 @@ "mpd": { "name": "Music Player Daemon (MPD)", "integration_type": "hub", - "config_flow": false, + "config_flow": true, "iot_class": "local_polling" }, "mqtt": { From cadd8521aeed818374e0214e8d7a593e46bbf8e6 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sun, 7 Jul 2024 15:01:58 +0200 Subject: [PATCH 0038/1635] Sort mealie mealplans (#121433) --- homeassistant/components/mealie/calendar.py | 3 ++- .../mealie/snapshots/test_calendar.ambr | 16 ++++++++-------- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/mealie/calendar.py b/homeassistant/components/mealie/calendar.py index fb628754f06..1b1c14c2ca0 100644 --- a/homeassistant/components/mealie/calendar.py +++ b/homeassistant/components/mealie/calendar.py @@ -60,7 +60,8 @@ class MealieMealplanCalendarEntity(MealieEntity, CalendarEntity): mealplans = self.coordinator.data[self._entry_type] if not mealplans: return None - return _get_event_from_mealplan(mealplans[0]) + sorted_mealplans = sorted(mealplans, key=lambda x: x.mealplan_date) + return _get_event_from_mealplan(sorted_mealplans[0]) async def async_get_events( self, hass: HomeAssistant, start_date: datetime, end_date: datetime diff --git a/tests/components/mealie/snapshots/test_calendar.ambr b/tests/components/mealie/snapshots/test_calendar.ambr index c3b26e1e9e2..e5a0a697157 100644 --- a/tests/components/mealie/snapshots/test_calendar.ambr +++ b/tests/components/mealie/snapshots/test_calendar.ambr @@ -252,12 +252,12 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'all_day': True, - 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", - 'end_time': '2024-01-23 00:00:00', + 'description': 'Dineren met de boys', + 'end_time': '2024-01-22 00:00:00', 'friendly_name': 'Mealie Dinner', 'location': '', - 'message': 'Zoete aardappel curry traybake', - 'start_time': '2024-01-22 00:00:00', + 'message': 'Aquavite', + 'start_time': '2024-01-21 00:00:00', }), 'context': , 'entity_id': 'calendar.mealie_dinner', @@ -304,12 +304,12 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'all_day': True, - 'description': 'Te explicamos paso a paso, de manera sencilla, la elaboración de la receta de pollo al curry con leche de coco en 10 minutos. Ingredientes, tiempo de...', - 'end_time': '2024-01-24 00:00:00', + 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', + 'end_time': '2024-01-23 00:00:00', 'friendly_name': 'Mealie Lunch', 'location': '', - 'message': 'Receta de pollo al curry en 10 minutos (con vídeo incluido)', - 'start_time': '2024-01-23 00:00:00', + 'message': 'All-American Beef Stew Recipe', + 'start_time': '2024-01-22 00:00:00', }), 'context': , 'entity_id': 'calendar.mealie_lunch', From ec105e526508fb719700320dfd66db91f0f69cd1 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sun, 7 Jul 2024 15:03:32 +0200 Subject: [PATCH 0039/1635] Fix Mealie URL field (#121434) --- homeassistant/components/mealie/strings.json | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/mealie/strings.json b/homeassistant/components/mealie/strings.json index 0d67bb89759..7a89fb85128 100644 --- a/homeassistant/components/mealie/strings.json +++ b/homeassistant/components/mealie/strings.json @@ -3,8 +3,11 @@ "step": { "user": { "data": { - "host": "[%key:common::config_flow::data::host%]", + "host": "[%key:common::config_flow::data::url%]", "api_token": "[%key:common::config_flow::data::api_token%]" + }, + "data_description": { + "host": "The URL of your Mealie instance." } } }, From ab94422c18ae4b6da0bb7c0960d3a0b7d24b48fa Mon Sep 17 00:00:00 2001 From: jan iversen Date: Sun, 7 Jul 2024 17:38:17 +0200 Subject: [PATCH 0040/1635] Bump pymodbus to 3.6.9 (#121445) Bump pymodbus 3.6.9. --- homeassistant/components/modbus/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/modbus/manifest.json b/homeassistant/components/modbus/manifest.json index 5635adc9392..292a2ee86a8 100644 --- a/homeassistant/components/modbus/manifest.json +++ b/homeassistant/components/modbus/manifest.json @@ -6,5 +6,5 @@ "iot_class": "local_polling", "loggers": ["pymodbus"], "quality_scale": "platinum", - "requirements": ["pymodbus==3.6.8"] + "requirements": ["pymodbus==3.6.9"] } diff --git a/requirements_all.txt b/requirements_all.txt index 7d7dda06b0a..fdb00d10e4d 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2004,7 +2004,7 @@ pymitv==1.4.3 pymochad==0.2.0 # homeassistant.components.modbus -pymodbus==3.6.8 +pymodbus==3.6.9 # homeassistant.components.monoprice pymonoprice==0.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a0c20bfe3ec..3f89f1a2182 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1576,7 +1576,7 @@ pymeteoclimatic==0.1.0 pymochad==0.2.0 # homeassistant.components.modbus -pymodbus==3.6.8 +pymodbus==3.6.9 # homeassistant.components.monoprice pymonoprice==0.4 From 9512f9eec3e9fb7447d2766f7d843e7bd8f37921 Mon Sep 17 00:00:00 2001 From: "Jason R. Coombs" Date: Sun, 7 Jul 2024 12:02:47 -0400 Subject: [PATCH 0041/1635] Bump jaraco.abode to 5.2.1 (#121446) Bump dependency on jaraco.abode to 5.2.1. Closes #121300 --- homeassistant/components/abode/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/abode/manifest.json b/homeassistant/components/abode/manifest.json index de1000319f1..225edea40ca 100644 --- a/homeassistant/components/abode/manifest.json +++ b/homeassistant/components/abode/manifest.json @@ -9,5 +9,5 @@ }, "iot_class": "cloud_push", "loggers": ["jaraco.abode", "lomond"], - "requirements": ["jaraco.abode==5.1.2"] + "requirements": ["jaraco.abode==5.2.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index fdb00d10e4d..93b0853f977 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1179,7 +1179,7 @@ isal==1.6.1 ismartgate==5.0.1 # homeassistant.components.abode -jaraco.abode==5.1.2 +jaraco.abode==5.2.1 # homeassistant.components.jellyfin jellyfin-apiclient-python==1.9.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 3f89f1a2182..518f9bef6ae 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -966,7 +966,7 @@ isal==1.6.1 ismartgate==5.0.1 # homeassistant.components.abode -jaraco.abode==5.1.2 +jaraco.abode==5.2.1 # homeassistant.components.jellyfin jellyfin-apiclient-python==1.9.2 From 0a1b46c52fc81e8d575b5068157cd40cf290523e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 7 Jul 2024 13:41:31 -0700 Subject: [PATCH 0042/1635] Bump yalexs to 6.4.2 (#121467) --- homeassistant/components/august/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- .../components/august/fixtures/get_lock.low_keypad_battery.json | 2 +- tests/components/august/test_sensor.py | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/august/manifest.json b/homeassistant/components/august/manifest.json index f898ce64ce6..83d0e985b8a 100644 --- a/homeassistant/components/august/manifest.json +++ b/homeassistant/components/august/manifest.json @@ -28,5 +28,5 @@ "documentation": "https://www.home-assistant.io/integrations/august", "iot_class": "cloud_push", "loggers": ["pubnub", "yalexs"], - "requirements": ["yalexs==6.4.1", "yalexs-ble==2.4.3"] + "requirements": ["yalexs==6.4.2", "yalexs-ble==2.4.3"] } diff --git a/requirements_all.txt b/requirements_all.txt index 93b0853f977..5ac963d62a4 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2933,7 +2933,7 @@ yalesmartalarmclient==0.3.9 yalexs-ble==2.4.3 # homeassistant.components.august -yalexs==6.4.1 +yalexs==6.4.2 # homeassistant.components.yeelight yeelight==0.7.14 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 518f9bef6ae..0196de3f0cb 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2292,7 +2292,7 @@ yalesmartalarmclient==0.3.9 yalexs-ble==2.4.3 # homeassistant.components.august -yalexs==6.4.1 +yalexs==6.4.2 # homeassistant.components.yeelight yeelight==0.7.14 diff --git a/tests/components/august/fixtures/get_lock.low_keypad_battery.json b/tests/components/august/fixtures/get_lock.low_keypad_battery.json index 08bdfaa76ed..43b5513a527 100644 --- a/tests/components/august/fixtures/get_lock.low_keypad_battery.json +++ b/tests/components/august/fixtures/get_lock.low_keypad_battery.json @@ -36,7 +36,7 @@ "currentFirmwareVersion": "2.27.0", "battery": {}, "batteryLevel": "Low", - "batteryRaw": 170 + "batteryRaw": 128 }, "OfflineKeys": { "created": [], diff --git a/tests/components/august/test_sensor.py b/tests/components/august/test_sensor.py index 0227ee64ef1..67223e9dff0 100644 --- a/tests/components/august/test_sensor.py +++ b/tests/components/august/test_sensor.py @@ -88,7 +88,7 @@ async def test_create_lock_with_linked_keypad( assert entry.unique_id == "A6697750D607098BAE8D6BAA11EF8063_device_battery" state = hass.states.get("sensor.front_door_lock_keypad_battery") - assert state.state == "60" + assert state.state == "62" assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE entry = entity_registry.async_get("sensor.front_door_lock_keypad_battery") assert entry From 21309eeb5d14098a34a28317ef32dbc9c55d76ed Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 28 Jun 2024 13:35:34 +0200 Subject: [PATCH 0043/1635] Bump xiaomi-ble to 0.30.1 (#120743) --- homeassistant/components/xiaomi_ble/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/xiaomi_ble/manifest.json b/homeassistant/components/xiaomi_ble/manifest.json index 1e0a09015ee..f901b9b412e 100644 --- a/homeassistant/components/xiaomi_ble/manifest.json +++ b/homeassistant/components/xiaomi_ble/manifest.json @@ -24,5 +24,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/xiaomi_ble", "iot_class": "local_push", - "requirements": ["xiaomi-ble==0.30.0"] + "requirements": ["xiaomi-ble==0.30.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 5ac963d62a4..a458f4bd0e4 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2906,7 +2906,7 @@ wyoming==1.5.4 xbox-webapi==2.0.11 # homeassistant.components.xiaomi_ble -xiaomi-ble==0.30.0 +xiaomi-ble==0.30.1 # homeassistant.components.knx xknx==2.12.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 0196de3f0cb..69b08293542 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2268,7 +2268,7 @@ wyoming==1.5.4 xbox-webapi==2.0.11 # homeassistant.components.xiaomi_ble -xiaomi-ble==0.30.0 +xiaomi-ble==0.30.1 # homeassistant.components.knx xknx==2.12.2 From 6bf9ec69f31bc2751ba877d94d472643a033cf22 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ovidiu=20D=2E=20Ni=C8=9Ban?= Date: Mon, 8 Jul 2024 00:13:43 +0300 Subject: [PATCH 0044/1635] Bump xiaomi-ble to 0.30.2 (#121471) --- homeassistant/components/xiaomi_ble/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/xiaomi_ble/manifest.json b/homeassistant/components/xiaomi_ble/manifest.json index f901b9b412e..21e9bc45bb8 100644 --- a/homeassistant/components/xiaomi_ble/manifest.json +++ b/homeassistant/components/xiaomi_ble/manifest.json @@ -24,5 +24,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/xiaomi_ble", "iot_class": "local_push", - "requirements": ["xiaomi-ble==0.30.1"] + "requirements": ["xiaomi-ble==0.30.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index a458f4bd0e4..76ce0add19c 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2906,7 +2906,7 @@ wyoming==1.5.4 xbox-webapi==2.0.11 # homeassistant.components.xiaomi_ble -xiaomi-ble==0.30.1 +xiaomi-ble==0.30.2 # homeassistant.components.knx xknx==2.12.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 69b08293542..1635796ddf8 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2268,7 +2268,7 @@ wyoming==1.5.4 xbox-webapi==2.0.11 # homeassistant.components.xiaomi_ble -xiaomi-ble==0.30.1 +xiaomi-ble==0.30.2 # homeassistant.components.knx xknx==2.12.2 From 51a6bb1c22ce3aa59518de20bb070449db909447 Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Mon, 8 Jul 2024 11:00:21 +0200 Subject: [PATCH 0045/1635] Include hass device ID in mobile app get_config webhook (#121496) --- homeassistant/components/mobile_app/webhook.py | 5 +++++ tests/components/mobile_app/test_webhook.py | 4 +++- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/mobile_app/webhook.py b/homeassistant/components/mobile_app/webhook.py index e93b4c5ea99..125e4d27247 100644 --- a/homeassistant/components/mobile_app/webhook.py +++ b/homeassistant/components/mobile_app/webhook.py @@ -721,10 +721,15 @@ async def webhook_get_config( """Handle a get config webhook.""" hass_config = hass.config.as_dict() + device: dr.DeviceEntry = hass.data[DOMAIN][DATA_DEVICES][ + config_entry.data[CONF_WEBHOOK_ID] + ] + resp = { "latitude": hass_config["latitude"], "longitude": hass_config["longitude"], "elevation": hass_config["elevation"], + "hass_device_id": device.id, "unit_system": hass_config["unit_system"], "location_name": hass_config["location_name"], "time_zone": hass_config["time_zone"], diff --git a/tests/components/mobile_app/test_webhook.py b/tests/components/mobile_app/test_webhook.py index ca5c9936409..77798c57f10 100644 --- a/tests/components/mobile_app/test_webhook.py +++ b/tests/components/mobile_app/test_webhook.py @@ -10,7 +10,7 @@ from nacl.secret import SecretBox import pytest from homeassistant.components.camera import CameraEntityFeature -from homeassistant.components.mobile_app.const import CONF_SECRET, DOMAIN +from homeassistant.components.mobile_app.const import CONF_SECRET, DATA_DEVICES, DOMAIN from homeassistant.components.tag import EVENT_TAG_SCANNED from homeassistant.components.zone import DOMAIN as ZONE_DOMAIN from homeassistant.const import ( @@ -243,6 +243,7 @@ async def test_webhook_handle_get_config( """Test that we can get config properly.""" webhook_id = create_registrations[1]["webhook_id"] webhook_url = f"/api/webhook/{webhook_id}" + device: dr.DeviceEntry = hass.data[DOMAIN][DATA_DEVICES][webhook_id] # Create two entities for sensor in ( @@ -280,6 +281,7 @@ async def test_webhook_handle_get_config( "latitude": hass_config["latitude"], "longitude": hass_config["longitude"], "elevation": hass_config["elevation"], + "hass_device_id": device.id, "unit_system": hass_config["unit_system"], "location_name": hass_config["location_name"], "time_zone": hass_config["time_zone"], From 4f2c3df518d7c2f2943aa363f01faabb8bff867d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 8 Jul 2024 07:58:18 -0700 Subject: [PATCH 0046/1635] Fix person tracking in unifiprotect (#121528) --- homeassistant/components/unifiprotect/binary_sensor.py | 2 +- homeassistant/components/unifiprotect/switch.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/unifiprotect/binary_sensor.py b/homeassistant/components/unifiprotect/binary_sensor.py index c4e1aa87df2..75156308b1a 100644 --- a/homeassistant/components/unifiprotect/binary_sensor.py +++ b/homeassistant/components/unifiprotect/binary_sensor.py @@ -284,7 +284,7 @@ CAMERA_SENSORS: tuple[ProtectBinaryEntityDescription, ...] = ( name="Tracking: person", icon="mdi:walk", entity_category=EntityCategory.DIAGNOSTIC, - ufp_required_field="is_ptz", + ufp_required_field="feature_flags.is_ptz", ufp_value="is_person_tracking_enabled", ufp_perm=PermRequired.NO_WRITE, ), diff --git a/homeassistant/components/unifiprotect/switch.py b/homeassistant/components/unifiprotect/switch.py index ca56a602209..50372d47ea8 100644 --- a/homeassistant/components/unifiprotect/switch.py +++ b/homeassistant/components/unifiprotect/switch.py @@ -319,7 +319,7 @@ CAMERA_SWITCHES: tuple[ProtectSwitchEntityDescription, ...] = ( name="Tracking: person", icon="mdi:walk", entity_category=EntityCategory.CONFIG, - ufp_required_field="is_ptz", + ufp_required_field="feature_flags.is_ptz", ufp_value="is_person_tracking_enabled", ufp_set_method="set_person_track", ufp_perm=PermRequired.WRITE, From e0b01ee94ea9105fe3a38601f8ac8ff5d7d5838f Mon Sep 17 00:00:00 2001 From: Christoph Date: Mon, 8 Jul 2024 21:38:09 +0200 Subject: [PATCH 0047/1635] Remove homematic state_class from GAS_POWER sensor (#121533) --- homeassistant/components/homematic/sensor.py | 1 - 1 file changed, 1 deletion(-) diff --git a/homeassistant/components/homematic/sensor.py b/homeassistant/components/homematic/sensor.py index eebcad95446..b33a725db0f 100644 --- a/homeassistant/components/homematic/sensor.py +++ b/homeassistant/components/homematic/sensor.py @@ -156,7 +156,6 @@ SENSOR_DESCRIPTIONS: dict[str, SensorEntityDescription] = { key="GAS_POWER", native_unit_of_measurement=UnitOfVolume.CUBIC_METERS, device_class=SensorDeviceClass.GAS, - state_class=SensorStateClass.MEASUREMENT, ), "GAS_ENERGY_COUNTER": SensorEntityDescription( key="GAS_ENERGY_COUNTER", From 138b68ecc02f12abc41fadc2bf45360a8da31d11 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 9 Jul 2024 08:30:10 +0200 Subject: [PATCH 0048/1635] Update vehicle to 2.2.2 (#121556) --- homeassistant/components/rdw/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/rdw/manifest.json b/homeassistant/components/rdw/manifest.json index f44dc7e0f12..7af3e861347 100644 --- a/homeassistant/components/rdw/manifest.json +++ b/homeassistant/components/rdw/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "quality_scale": "platinum", - "requirements": ["vehicle==2.2.1"] + "requirements": ["vehicle==2.2.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 76ce0add19c..2006d9f37bc 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2827,7 +2827,7 @@ vacuum-map-parser-roborock==0.1.2 vallox-websocket-api==5.3.0 # homeassistant.components.rdw -vehicle==2.2.1 +vehicle==2.2.2 # homeassistant.components.velbus velbus-aio==2024.7.5 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 1635796ddf8..6479e1958f3 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2201,7 +2201,7 @@ vacuum-map-parser-roborock==0.1.2 vallox-websocket-api==5.3.0 # homeassistant.components.rdw -vehicle==2.2.1 +vehicle==2.2.2 # homeassistant.components.velbus velbus-aio==2024.7.5 From 50802f84f0895bbb53d8f528585f23f45cb1ccd6 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 9 Jul 2024 08:29:57 +0200 Subject: [PATCH 0049/1635] Update tailscale to 0.6.1 (#121557) --- homeassistant/components/tailscale/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/tailscale/manifest.json b/homeassistant/components/tailscale/manifest.json index 14f4206f44f..24f485fcdbd 100644 --- a/homeassistant/components/tailscale/manifest.json +++ b/homeassistant/components/tailscale/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "cloud_polling", "quality_scale": "platinum", - "requirements": ["tailscale==0.6.0"] + "requirements": ["tailscale==0.6.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 2006d9f37bc..97ccc3b2a4a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2683,7 +2683,7 @@ systembridgeconnector==4.0.3 systembridgemodels==4.0.4 # homeassistant.components.tailscale -tailscale==0.6.0 +tailscale==0.6.1 # homeassistant.components.tank_utility tank-utility==1.5.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 6479e1958f3..ecb39324da3 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2096,7 +2096,7 @@ systembridgeconnector==4.0.3 systembridgemodels==4.0.4 # homeassistant.components.tailscale -tailscale==0.6.0 +tailscale==0.6.1 # homeassistant.components.tellduslive tellduslive==0.10.11 From 5a04a886cfcd1b7f7bcc33cd79b0519fd3f1879f Mon Sep 17 00:00:00 2001 From: Glenn Waters Date: Tue, 9 Jul 2024 02:32:34 -0400 Subject: [PATCH 0050/1635] Fix upb config flow connect (#121571) --- homeassistant/components/upb/config_flow.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/upb/config_flow.py b/homeassistant/components/upb/config_flow.py index 1db0b0b6fe3..40f49e57c60 100644 --- a/homeassistant/components/upb/config_flow.py +++ b/homeassistant/components/upb/config_flow.py @@ -39,12 +39,13 @@ async def _validate_input(data): url = _make_url_from_data(data) upb = upb_lib.UpbPim({"url": url, "UPStartExportFile": file_path}) + + upb.connect(_connected_callback) + if not upb.config_ok: _LOGGER.error("Missing or invalid UPB file: %s", file_path) raise InvalidUpbFile - upb.connect(_connected_callback) - with suppress(TimeoutError): async with asyncio.timeout(VALIDATE_TIMEOUT): await connected_event.wait() From 73d1973625a2fdd5c96c9b359beae3d1e7a5210c Mon Sep 17 00:00:00 2001 From: Arie Catsman <120491684+catsmanac@users.noreply.github.com> Date: Tue, 9 Jul 2024 12:52:04 +0200 Subject: [PATCH 0051/1635] Bump pyenphase to 1.20.6 (#121583) bump pyenphase to 1.20.6 --- homeassistant/components/enphase_envoy/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/enphase_envoy/manifest.json b/homeassistant/components/enphase_envoy/manifest.json index b3c117556bf..f5d2778fc9d 100644 --- a/homeassistant/components/enphase_envoy/manifest.json +++ b/homeassistant/components/enphase_envoy/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/enphase_envoy", "iot_class": "local_polling", "loggers": ["pyenphase"], - "requirements": ["pyenphase==1.20.3"], + "requirements": ["pyenphase==1.20.6"], "zeroconf": [ { "type": "_enphase-envoy._tcp.local." diff --git a/requirements_all.txt b/requirements_all.txt index 97ccc3b2a4a..b3bb75aac31 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1827,7 +1827,7 @@ pyeiscp==0.0.7 pyemoncms==0.0.7 # homeassistant.components.enphase_envoy -pyenphase==1.20.3 +pyenphase==1.20.6 # homeassistant.components.envisalink pyenvisalink==4.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index ecb39324da3..f6f634d091b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1435,7 +1435,7 @@ pyefergy==22.5.0 pyegps==0.2.5 # homeassistant.components.enphase_envoy -pyenphase==1.20.3 +pyenphase==1.20.6 # homeassistant.components.everlights pyeverlights==0.1.0 From 37c09dbdb6f8c102d4b65b53fbbe74268b899c75 Mon Sep 17 00:00:00 2001 From: Joakim Plate Date: Tue, 9 Jul 2024 21:10:15 +0200 Subject: [PATCH 0052/1635] Allow ambilight when we have connection (philips_js) (#121620) --- homeassistant/components/philips_js/light.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/philips_js/light.py b/homeassistant/components/philips_js/light.py index 8e500592704..1d63b2062e6 100644 --- a/homeassistant/components/philips_js/light.py +++ b/homeassistant/components/philips_js/light.py @@ -385,6 +385,6 @@ class PhilipsTVLightEntity(PhilipsJsEntity, LightEntity): """Return true if entity is available.""" if not super().available: return False - if not self.coordinator.api.on: + if not self._tv.on: return False - return self.coordinator.api.powerstate == "On" + return True From a4c5dee082c6d30a84edbf5ab1f7ed1f155d4221 Mon Sep 17 00:00:00 2001 From: Paul Bottein Date: Wed, 10 Jul 2024 09:29:22 +0200 Subject: [PATCH 0053/1635] Update frontend to 20240710.0 (#121651) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 186f725c643..57de177da9c 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240705.0"] + "requirements": ["home-assistant-frontend==20240710.0"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 41d3af2ad47..6b43f288762 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -32,7 +32,7 @@ habluetooth==3.1.3 hass-nabucasa==0.81.1 hassil==1.7.1 home-assistant-bluetooth==1.12.2 -home-assistant-frontend==20240705.0 +home-assistant-frontend==20240710.0 home-assistant-intents==2024.7.3 httpx==0.27.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index b3bb75aac31..38f8b6a44cb 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1090,7 +1090,7 @@ hole==0.8.0 holidays==0.52 # homeassistant.components.frontend -home-assistant-frontend==20240705.0 +home-assistant-frontend==20240710.0 # homeassistant.components.conversation home-assistant-intents==2024.7.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f6f634d091b..eb46f1e9c40 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -895,7 +895,7 @@ hole==0.8.0 holidays==0.52 # homeassistant.components.frontend -home-assistant-frontend==20240705.0 +home-assistant-frontend==20240710.0 # homeassistant.components.conversation home-assistant-intents==2024.7.3 From fd0c26cd567a5904c646251a3129bfa09720667d Mon Sep 17 00:00:00 2001 From: Maikel Punie Date: Wed, 10 Jul 2024 11:11:54 +0200 Subject: [PATCH 0054/1635] Small fix in velbus cover for the assumed states (#121656) --- homeassistant/components/velbus/cover.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/velbus/cover.py b/homeassistant/components/velbus/cover.py index 823d682d339..8b9d927f3d7 100644 --- a/homeassistant/components/velbus/cover.py +++ b/homeassistant/components/velbus/cover.py @@ -66,12 +66,16 @@ class VelbusCover(VelbusEntity, CoverEntity): @property def is_opening(self) -> bool: """Return if the cover is opening.""" - return self._channel.is_opening() + if opening := self._channel.is_opening(): + self._assumed_closed = False + return opening @property def is_closing(self) -> bool: """Return if the cover is closing.""" - return self._channel.is_closing() + if closing := self._channel.is_closing(): + self._assumed_closed = True + return closing @property def current_cover_position(self) -> int | None: @@ -89,13 +93,11 @@ class VelbusCover(VelbusEntity, CoverEntity): async def async_open_cover(self, **kwargs: Any) -> None: """Open the cover.""" await self._channel.open() - self._assumed_closed = False @api_call async def async_close_cover(self, **kwargs: Any) -> None: """Close the cover.""" await self._channel.close() - self._assumed_closed = True @api_call async def async_stop_cover(self, **kwargs: Any) -> None: From ec0910e3da20ec1ad813b2e89eb8966d95f11d8f Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Wed, 10 Jul 2024 09:47:15 +0200 Subject: [PATCH 0055/1635] Block icloud3 custom integration from breaking the recorder (#121658) --- homeassistant/loader.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/homeassistant/loader.py b/homeassistant/loader.py index 9afad610420..a654904d69b 100644 --- a/homeassistant/loader.py +++ b/homeassistant/loader.py @@ -102,6 +102,13 @@ BLOCKED_CUSTOM_INTEGRATIONS: dict[str, BlockedIntegration] = { "mydolphin_plus": BlockedIntegration( AwesomeVersion("1.0.13"), "crashes Home Assistant" ), + # Added in 2024.7.2 because of + # https://github.com/gcobb321/icloud3/issues/349 + # Note: Current version 3.0.5.2, the fixed version is a guesstimate, + # as no solution is available at time of writing. + "icloud3": BlockedIntegration( + AwesomeVersion("3.0.5.3"), "prevents recorder from working" + ), } DATA_COMPONENTS: HassKey[dict[str, ModuleType | ComponentProtocol]] = HassKey( From ac3eecc879b537c25974c37fae5ca76c54e8913f Mon Sep 17 00:00:00 2001 From: tronikos Date: Wed, 10 Jul 2024 02:21:38 -0700 Subject: [PATCH 0056/1635] Handle errors in Fully Kiosk camera (#121659) --- homeassistant/components/fully_kiosk/camera.py | 11 +++++++++-- tests/components/fully_kiosk/test_camera.py | 7 +++++++ 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/fully_kiosk/camera.py b/homeassistant/components/fully_kiosk/camera.py index 99419271c26..d55875e094f 100644 --- a/homeassistant/components/fully_kiosk/camera.py +++ b/homeassistant/components/fully_kiosk/camera.py @@ -2,9 +2,12 @@ from __future__ import annotations +from fullykiosk import FullyKioskError + from homeassistant.components.camera import Camera, CameraEntityFeature from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import DOMAIN @@ -36,8 +39,12 @@ class FullyCameraEntity(FullyKioskEntity, Camera): self, width: int | None = None, height: int | None = None ) -> bytes | None: """Return bytes of camera image.""" - image_bytes: bytes = await self.coordinator.fully.getCamshot() - return image_bytes + try: + image_bytes: bytes = await self.coordinator.fully.getCamshot() + except FullyKioskError as err: + raise HomeAssistantError(err) from err + else: + return image_bytes async def async_turn_on(self) -> None: """Turn on camera.""" diff --git a/tests/components/fully_kiosk/test_camera.py b/tests/components/fully_kiosk/test_camera.py index 4e48749eebb..a2e7067ff1b 100644 --- a/tests/components/fully_kiosk/test_camera.py +++ b/tests/components/fully_kiosk/test_camera.py @@ -2,6 +2,7 @@ from unittest.mock import MagicMock +from fullykiosk import FullyKioskError import pytest from homeassistant.components.camera import async_get_image @@ -41,6 +42,12 @@ async def test_camera( assert mock_fully_kiosk.getCamshot.call_count == 1 assert image.content == b"image_bytes" + fully_kiosk_error = FullyKioskError("error", "status") + mock_fully_kiosk.getCamshot.side_effect = fully_kiosk_error + with pytest.raises(HomeAssistantError) as error: + await async_get_image(hass, entity_camera) + assert error.value.args[0] == fully_kiosk_error + mock_fully_kiosk.getSettings.return_value = {"motionDetection": False} await hass.services.async_call( "camera", From 9c83af37894ad43c6f8590d6f1eedc3afe631463 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Wed, 10 Jul 2024 10:24:44 +0200 Subject: [PATCH 0057/1635] Block places <=2.7.0 custom integration from breaking the recorder (#121662) --- homeassistant/loader.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/homeassistant/loader.py b/homeassistant/loader.py index a654904d69b..f889f8fcb6e 100644 --- a/homeassistant/loader.py +++ b/homeassistant/loader.py @@ -109,6 +109,11 @@ BLOCKED_CUSTOM_INTEGRATIONS: dict[str, BlockedIntegration] = { "icloud3": BlockedIntegration( AwesomeVersion("3.0.5.3"), "prevents recorder from working" ), + # Added in 2024.7.2 because of + # https://github.com/custom-components/places/issues/289 + "places": BlockedIntegration( + AwesomeVersion("2.7.1"), "prevents recorder from working" + ), } DATA_COMPONENTS: HassKey[dict[str, ModuleType | ComponentProtocol]] = HassKey( From 2151086b0a14de80ced8add03543a5c83ca22daa Mon Sep 17 00:00:00 2001 From: Marcel van der Veldt Date: Wed, 10 Jul 2024 10:32:42 +0200 Subject: [PATCH 0058/1635] Fix state for Matter Locks (including optional door sensor) (#121665) --- .../components/matter/binary_sensor.py | 16 ++++ homeassistant/components/matter/lock.py | 73 ++++++++++++------- 2 files changed, 62 insertions(+), 27 deletions(-) diff --git a/homeassistant/components/matter/binary_sensor.py b/homeassistant/components/matter/binary_sensor.py index b71c35c9cce..a6d68682e9d 100644 --- a/homeassistant/components/matter/binary_sensor.py +++ b/homeassistant/components/matter/binary_sensor.py @@ -145,4 +145,20 @@ DISCOVERY_SCHEMAS = [ required_attributes=(clusters.BooleanState.Attributes.StateValue,), device_type=(device_types.RainSensor,), ), + MatterDiscoverySchema( + platform=Platform.BINARY_SENSOR, + entity_description=MatterBinarySensorEntityDescription( + key="LockDoorStateSensor", + device_class=BinarySensorDeviceClass.DOOR, + # pylint: disable=unnecessary-lambda + measurement_to_ha=lambda x: { + clusters.DoorLock.Enums.DoorStateEnum.kDoorOpen: True, + clusters.DoorLock.Enums.DoorStateEnum.kDoorJammed: True, + clusters.DoorLock.Enums.DoorStateEnum.kDoorForcedOpen: True, + clusters.DoorLock.Enums.DoorStateEnum.kDoorClosed: False, + }.get(x), + ), + entity_class=MatterBinarySensor, + required_attributes=(clusters.DoorLock.Attributes.DoorState,), + ), ] diff --git a/homeassistant/components/matter/lock.py b/homeassistant/components/matter/lock.py index 1cc85fa897e..66b5040184c 100644 --- a/homeassistant/components/matter/lock.py +++ b/homeassistant/components/matter/lock.py @@ -2,6 +2,7 @@ from __future__ import annotations +import asyncio from typing import Any from chip.clusters import Objects as clusters @@ -38,6 +39,7 @@ class MatterLock(MatterEntity, LockEntity): """Representation of a Matter lock.""" features: int | None = None + _optimistic_timer: asyncio.TimerHandle | None = None @property def code_format(self) -> str | None: @@ -90,9 +92,15 @@ class MatterLock(MatterEntity, LockEntity): async def async_lock(self, **kwargs: Any) -> None: """Lock the lock with pin if needed.""" - # optimistically signal locking to state machine - self._attr_is_locking = True - self.async_write_ha_state() + if not self._attr_is_locked: + # optimistically signal locking to state machine + self._attr_is_locking = True + self.async_write_ha_state() + # the lock should acknowledge the command with an attribute update + # but bad things may happen, so guard against it with a timer. + self._optimistic_timer = self.hass.loop.call_later( + 5, self._reset_optimistic_state + ) code: str | None = kwargs.get(ATTR_CODE) code_bytes = code.encode() if code else None await self.send_device_command( @@ -101,9 +109,15 @@ class MatterLock(MatterEntity, LockEntity): async def async_unlock(self, **kwargs: Any) -> None: """Unlock the lock with pin if needed.""" - # optimistically signal unlocking to state machine - self._attr_is_unlocking = True - self.async_write_ha_state() + if self._attr_is_locked: + # optimistically signal unlocking to state machine + self._attr_is_unlocking = True + self.async_write_ha_state() + # the lock should acknowledge the command with an attribute update + # but bad things may happen, so guard against it with a timer. + self._optimistic_timer = self.hass.loop.call_later( + 5, self._reset_optimistic_state + ) code: str | None = kwargs.get(ATTR_CODE) code_bytes = code.encode() if code else None if self.supports_unbolt: @@ -120,9 +134,14 @@ class MatterLock(MatterEntity, LockEntity): async def async_open(self, **kwargs: Any) -> None: """Open the door latch.""" - # optimistically signal unlocking to state machine - self._attr_is_unlocking = True + # optimistically signal opening to state machine + self._attr_is_opening = True self.async_write_ha_state() + # the lock should acknowledge the command with an attribute update + # but bad things may happen, so guard against it with a timer. + self._optimistic_timer = self.hass.loop.call_later( + 5, self._reset_optimistic_state + ) code: str | None = kwargs.get(ATTR_CODE) code_bytes = code.encode() if code else None await self.send_device_command( @@ -145,38 +164,38 @@ class MatterLock(MatterEntity, LockEntity): ) # always reset the optimisically (un)locking state on state update - self._attr_is_locking = False - self._attr_is_unlocking = False + self._reset_optimistic_state(write_state=False) LOGGER.debug("Lock state: %s for %s", lock_state, self.entity_id) + if lock_state is clusters.DoorLock.Enums.DlLockState.kUnlatched: + self._attr_is_locked = False + self._attr_is_open = True if lock_state is clusters.DoorLock.Enums.DlLockState.kLocked: self._attr_is_locked = True + self._attr_is_open = False elif lock_state in ( clusters.DoorLock.Enums.DlLockState.kUnlocked, - clusters.DoorLock.Enums.DlLockState.kUnlatched, clusters.DoorLock.Enums.DlLockState.kNotFullyLocked, ): self._attr_is_locked = False + self._attr_is_open = False else: - # According to the matter docs a null state can happen during device startup. + # Treat any other state as unknown. + # NOTE: A null state can happen during device startup. self._attr_is_locked = None + self._attr_is_open = None - if self.supports_door_position_sensor: - door_state = self.get_matter_attribute_value( - clusters.DoorLock.Attributes.DoorState - ) - - assert door_state is not None - - LOGGER.debug("Door state: %s for %s", door_state, self.entity_id) - - self._attr_is_jammed = ( - door_state is clusters.DoorLock.Enums.DoorStateEnum.kDoorJammed - ) - self._attr_is_open = ( - door_state is clusters.DoorLock.Enums.DoorStateEnum.kDoorOpen - ) + @callback + def _reset_optimistic_state(self, write_state: bool = True) -> None: + if self._optimistic_timer and not self._optimistic_timer.cancelled(): + self._optimistic_timer.cancel() + self._optimistic_timer = None + self._attr_is_locking = False + self._attr_is_unlocking = False + self._attr_is_opening = False + if write_state: + self.async_write_ha_state() DISCOVERY_SCHEMAS = [ From 05ce3d35b3f2eef7ba30398ee59ceb6f27f635e7 Mon Sep 17 00:00:00 2001 From: Marcel van der Veldt Date: Wed, 10 Jul 2024 11:20:26 +0200 Subject: [PATCH 0059/1635] Matter lock state follow-up (#121669) --- homeassistant/components/matter/lock.py | 6 +++--- tests/components/matter/fixtures/nodes/door-lock.json | 2 +- tests/components/matter/test_door_lock.py | 11 ++++++++--- 3 files changed, 12 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/matter/lock.py b/homeassistant/components/matter/lock.py index 66b5040184c..ae01faa3bc7 100644 --- a/homeassistant/components/matter/lock.py +++ b/homeassistant/components/matter/lock.py @@ -99,7 +99,7 @@ class MatterLock(MatterEntity, LockEntity): # the lock should acknowledge the command with an attribute update # but bad things may happen, so guard against it with a timer. self._optimistic_timer = self.hass.loop.call_later( - 5, self._reset_optimistic_state + 30, self._reset_optimistic_state ) code: str | None = kwargs.get(ATTR_CODE) code_bytes = code.encode() if code else None @@ -116,7 +116,7 @@ class MatterLock(MatterEntity, LockEntity): # the lock should acknowledge the command with an attribute update # but bad things may happen, so guard against it with a timer. self._optimistic_timer = self.hass.loop.call_later( - 5, self._reset_optimistic_state + 30, self._reset_optimistic_state ) code: str | None = kwargs.get(ATTR_CODE) code_bytes = code.encode() if code else None @@ -140,7 +140,7 @@ class MatterLock(MatterEntity, LockEntity): # the lock should acknowledge the command with an attribute update # but bad things may happen, so guard against it with a timer. self._optimistic_timer = self.hass.loop.call_later( - 5, self._reset_optimistic_state + 30 if self._attr_is_locked else 5, self._reset_optimistic_state ) code: str | None = kwargs.get(ATTR_CODE) code_bytes = code.encode() if code else None diff --git a/tests/components/matter/fixtures/nodes/door-lock.json b/tests/components/matter/fixtures/nodes/door-lock.json index 8a3f0fd68dd..b6231e04af4 100644 --- a/tests/components/matter/fixtures/nodes/door-lock.json +++ b/tests/components/matter/fixtures/nodes/door-lock.json @@ -469,7 +469,7 @@ "1/47/65531": [ 0, 1, 2, 14, 15, 16, 19, 65528, 65529, 65530, 65531, 65532, 65533 ], - "1/257/0": 1, + "1/257/0": 0, "1/257/1": 0, "1/257/2": true, "1/257/3": 1, diff --git a/tests/components/matter/test_door_lock.py b/tests/components/matter/test_door_lock.py index 84f0e58a647..461cc1b7f3d 100644 --- a/tests/components/matter/test_door_lock.py +++ b/tests/components/matter/test_door_lock.py @@ -8,11 +8,10 @@ import pytest from homeassistant.components.lock import ( STATE_LOCKED, - STATE_OPEN, STATE_UNLOCKED, LockEntityFeature, ) -from homeassistant.const import ATTR_CODE, STATE_LOCKING, STATE_UNKNOWN +from homeassistant.const import ATTR_CODE, STATE_LOCKING, STATE_OPENING, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError import homeassistant.helpers.entity_registry as er @@ -64,6 +63,7 @@ async def test_lock( ) matter_client.send_device_command.reset_mock() + await hass.async_block_till_done() state = hass.states.get("lock.mock_door_lock_lock") assert state assert state.state == STATE_LOCKING @@ -208,9 +208,14 @@ async def test_lock_with_unbolt( timed_request_timeout_ms=1000, ) + await hass.async_block_till_done() + state = hass.states.get("lock.mock_door_lock_lock") + assert state + assert state.state == STATE_OPENING + set_node_attribute(door_lock_with_unbolt, 1, 257, 3, 0) await trigger_subscription_callback(hass, matter_client) state = hass.states.get("lock.mock_door_lock_lock") assert state - assert state.state == STATE_OPEN + assert state.state == STATE_LOCKED From 38a44676ebc626752b9844325ccc11d42da9c5bf Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Wed, 10 Jul 2024 11:30:16 +0200 Subject: [PATCH 0060/1635] Block variable <=3.4.4 custom integration from breaking the recorder (#121670) --- homeassistant/loader.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/homeassistant/loader.py b/homeassistant/loader.py index f889f8fcb6e..9acc1682602 100644 --- a/homeassistant/loader.py +++ b/homeassistant/loader.py @@ -114,6 +114,11 @@ BLOCKED_CUSTOM_INTEGRATIONS: dict[str, BlockedIntegration] = { "places": BlockedIntegration( AwesomeVersion("2.7.1"), "prevents recorder from working" ), + # Added in 2024.7.2 because of + # https://github.com/enkama/hass-variables/issues/120 + "variable": BlockedIntegration( + AwesomeVersion("3.4.4"), "prevents recorder from working" + ), } DATA_COMPONENTS: HassKey[dict[str, ModuleType | ComponentProtocol]] = HassKey( From 71370758a882d1123c7fdb541186baadcd6e078f Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Wed, 10 Jul 2024 12:06:02 +0200 Subject: [PATCH 0061/1635] Bump version to 2024.7.2 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index 33087b0bfc1..8587f9e6137 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -24,7 +24,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 7 -PATCH_VERSION: Final = "1" +PATCH_VERSION: Final = "2" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index 138d1dd80b8..82c29948e3c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.7.1" +version = "2024.7.2" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From ef7d68bfd6dcbdf6d797a73eeb65f0bc03828153 Mon Sep 17 00:00:00 2001 From: "Mr. Bubbles" Date: Fri, 19 Jul 2024 12:26:40 +0200 Subject: [PATCH 0062/1635] Fix reauth error and exception in ista EcoTrend integration (#121482) --- .../components/ista_ecotrend/coordinator.py | 50 +++++++++---------- 1 file changed, 24 insertions(+), 26 deletions(-) diff --git a/homeassistant/components/ista_ecotrend/coordinator.py b/homeassistant/components/ista_ecotrend/coordinator.py index 8d55574f0a1..0f14cd06fe3 100644 --- a/homeassistant/components/ista_ecotrend/coordinator.py +++ b/homeassistant/components/ista_ecotrend/coordinator.py @@ -8,6 +8,7 @@ from typing import Any from pyecotrend_ista import KeycloakError, LoginError, PyEcotrendIsta, ServerError +from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_EMAIL from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed @@ -21,6 +22,8 @@ _LOGGER = logging.getLogger(__name__) class IstaCoordinator(DataUpdateCoordinator[dict[str, Any]]): """Ista EcoTrend data update coordinator.""" + config_entry: ConfigEntry + def __init__(self, hass: HomeAssistant, ista: PyEcotrendIsta) -> None: """Initialize ista EcoTrend data update coordinator.""" super().__init__( @@ -35,11 +38,14 @@ class IstaCoordinator(DataUpdateCoordinator[dict[str, Any]]): async def _async_update_data(self): """Fetch ista EcoTrend data.""" - if not self.details: - self.details = await self.async_get_details() - try: + await self.hass.async_add_executor_job(self.ista.login) + + if not self.details: + self.details = await self.async_get_details() + return await self.hass.async_add_executor_job(self.get_consumption_data) + except ServerError as e: raise UpdateFailed( "Unable to connect and retrieve data from ista EcoTrend, try again later" @@ -48,7 +54,9 @@ class IstaCoordinator(DataUpdateCoordinator[dict[str, Any]]): raise ConfigEntryAuthFailed( translation_domain=DOMAIN, translation_key="authentication_exception", - translation_placeholders={CONF_EMAIL: self.ista._email}, # noqa: SLF001 + translation_placeholders={ + CONF_EMAIL: self.config_entry.data[CONF_EMAIL] + }, ) from e def get_consumption_data(self) -> dict[str, Any]: @@ -61,26 +69,16 @@ class IstaCoordinator(DataUpdateCoordinator[dict[str, Any]]): async def async_get_details(self) -> dict[str, Any]: """Retrieve details of consumption units.""" - try: - result = await self.hass.async_add_executor_job( - self.ista.get_consumption_unit_details + + result = await self.hass.async_add_executor_job( + self.ista.get_consumption_unit_details + ) + + return { + consumption_unit: next( + details + for details in result["consumptionUnits"] + if details["id"] == consumption_unit ) - except ServerError as e: - raise UpdateFailed( - "Unable to connect and retrieve data from ista EcoTrend, try again later" - ) from e - except (LoginError, KeycloakError) as e: - raise ConfigEntryAuthFailed( - translation_domain=DOMAIN, - translation_key="authentication_exception", - translation_placeholders={CONF_EMAIL: self.ista._email}, # noqa: SLF001 - ) from e - else: - return { - consumption_unit: next( - details - for details in result["consumptionUnits"] - if details["id"] == consumption_unit - ) - for consumption_unit in self.ista.get_uuids() - } + for consumption_unit in self.ista.get_uuids() + } From 37f37f728783f192ac61f24e2cf1fca5d3e046d6 Mon Sep 17 00:00:00 2001 From: Jan Stienstra <65826735+j-stienstra@users.noreply.github.com> Date: Wed, 10 Jul 2024 23:54:02 +0200 Subject: [PATCH 0063/1635] Retain Jellyfin config flow input on connection issue (#121618) --- homeassistant/components/jellyfin/config_flow.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/jellyfin/config_flow.py b/homeassistant/components/jellyfin/config_flow.py index 4798a07b9cd..baecbcfb941 100644 --- a/homeassistant/components/jellyfin/config_flow.py +++ b/homeassistant/components/jellyfin/config_flow.py @@ -97,7 +97,11 @@ class JellyfinConfigFlow(ConfigFlow, domain=DOMAIN): ) return self.async_show_form( - step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors + step_id="user", + data_schema=self.add_suggested_values_to_schema( + STEP_USER_DATA_SCHEMA, user_input + ), + errors=errors, ) async def async_step_reauth( From ec8e6398048882601f443a303b98b496d2033d8d Mon Sep 17 00:00:00 2001 From: Tomek Porozynski <36776636+ontaptom@users.noreply.github.com> Date: Wed, 10 Jul 2024 23:48:08 +0200 Subject: [PATCH 0064/1635] Update Supla async_set_cover_position to use "REVEAL_PARTIALLY" (#121663) --- homeassistant/components/supla/cover.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/supla/cover.py b/homeassistant/components/supla/cover.py index 4cdee04b149..37b64c375eb 100644 --- a/homeassistant/components/supla/cover.py +++ b/homeassistant/components/supla/cover.py @@ -71,7 +71,9 @@ class SuplaCoverEntity(SuplaEntity, CoverEntity): async def async_set_cover_position(self, **kwargs: Any) -> None: """Move the cover to a specific position.""" - await self.async_action("REVEAL", percentage=kwargs.get(ATTR_POSITION)) + await self.async_action( + "REVEAL_PARTIALLY", percentage=kwargs.get(ATTR_POSITION) + ) @property def is_closed(self) -> bool | None: From 10cdf64f90a199ab1d7a1cf3e30cbfc804977915 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lucas=20Mind=C3=AAllo=20de=20Andrade?= Date: Wed, 10 Jul 2024 15:16:36 -0300 Subject: [PATCH 0065/1635] Bump sunweg 3.0.2 (#121684) --- homeassistant/components/sunweg/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/sunweg/manifest.json b/homeassistant/components/sunweg/manifest.json index bcf1ad9dae2..998d3610735 100644 --- a/homeassistant/components/sunweg/manifest.json +++ b/homeassistant/components/sunweg/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/sunweg/", "iot_class": "cloud_polling", "loggers": ["sunweg"], - "requirements": ["sunweg==3.0.1"] + "requirements": ["sunweg==3.0.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 38f8b6a44cb..0a6550c8c43 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2662,7 +2662,7 @@ stringcase==1.2.0 subarulink==0.7.11 # homeassistant.components.sunweg -sunweg==3.0.1 +sunweg==3.0.2 # homeassistant.components.surepetcare surepy==0.9.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index eb46f1e9c40..9f354c8b646 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2081,7 +2081,7 @@ stringcase==1.2.0 subarulink==0.7.11 # homeassistant.components.sunweg -sunweg==3.0.1 +sunweg==3.0.2 # homeassistant.components.surepetcare surepy==0.9.0 From ad5cbf0da67ea4c33ec7aa39aa7feecffdd6205c Mon Sep 17 00:00:00 2001 From: Sid <27780930+autinerd@users.noreply.github.com> Date: Wed, 10 Jul 2024 17:46:39 +0200 Subject: [PATCH 0066/1635] Allow enigma2 devices to use different source bouquets (#121686) --- homeassistant/components/enigma2/__init__.py | 6 +++++- homeassistant/components/enigma2/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 8 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/enigma2/__init__.py b/homeassistant/components/enigma2/__init__.py index 4e4f8bdb687..de8283a5533 100644 --- a/homeassistant/components/enigma2/__init__.py +++ b/homeassistant/components/enigma2/__init__.py @@ -16,6 +16,8 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_create_clientsession +from .const import CONF_SOURCE_BOUQUET + type Enigma2ConfigEntry = ConfigEntry[OpenWebIfDevice] PLATFORMS = [Platform.MEDIA_PLAYER] @@ -35,7 +37,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: Enigma2ConfigEntry) -> b hass, verify_ssl=entry.data[CONF_VERIFY_SSL], base_url=base_url ) - entry.runtime_data = OpenWebIfDevice(session) + entry.runtime_data = OpenWebIfDevice( + session, source_bouquet=entry.options.get(CONF_SOURCE_BOUQUET) + ) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True diff --git a/homeassistant/components/enigma2/manifest.json b/homeassistant/components/enigma2/manifest.json index ef08314e541..538cfb56388 100644 --- a/homeassistant/components/enigma2/manifest.json +++ b/homeassistant/components/enigma2/manifest.json @@ -7,5 +7,5 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["openwebif"], - "requirements": ["openwebifpy==4.2.4"] + "requirements": ["openwebifpy==4.2.5"] } diff --git a/requirements_all.txt b/requirements_all.txt index 0a6550c8c43..3be48c03ae9 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1492,7 +1492,7 @@ openhomedevice==2.2.0 opensensemap-api==0.2.0 # homeassistant.components.enigma2 -openwebifpy==4.2.4 +openwebifpy==4.2.5 # homeassistant.components.luci openwrt-luci-rpc==1.1.17 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 9f354c8b646..d9c49acb59b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1207,7 +1207,7 @@ openerz-api==0.3.0 openhomedevice==2.2.0 # homeassistant.components.enigma2 -openwebifpy==4.2.4 +openwebifpy==4.2.5 # homeassistant.components.opower opower==0.4.7 From 269fb235279bc681d12bc06c798834c036cc19cb Mon Sep 17 00:00:00 2001 From: Steven B <51370195+sdb9696@users.noreply.github.com> Date: Thu, 11 Jul 2024 16:10:47 +0100 Subject: [PATCH 0067/1635] Fix tplink bug changing color temp on bulbs with light effects (#121696) --- homeassistant/components/tplink/light.py | 4 ++-- tests/components/tplink/test_light.py | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/tplink/light.py b/homeassistant/components/tplink/light.py index 22e7c523d1a..9b7dd499c97 100644 --- a/homeassistant/components/tplink/light.py +++ b/homeassistant/components/tplink/light.py @@ -392,11 +392,11 @@ class TPLinkLightEffectEntity(TPLinkLightEntity): kwargs[ATTR_EFFECT], brightness=brightness, transition=transition ) elif ATTR_COLOR_TEMP_KELVIN in kwargs: - if self.effect: + if self.effect and self.effect != EFFECT_OFF: # If there is an effect in progress # we have to clear the effect # before we can set a color temp - await self._light_module.set_hsv(0, 0, brightness) + await self._effect_module.set_effect(LightEffect.LIGHT_EFFECTS_OFF) await self._async_set_color_temp( kwargs[ATTR_COLOR_TEMP_KELVIN], brightness, transition ) diff --git a/tests/components/tplink/test_light.py b/tests/components/tplink/test_light.py index bb814d1f5d3..590274b8405 100644 --- a/tests/components/tplink/test_light.py +++ b/tests/components/tplink/test_light.py @@ -533,16 +533,16 @@ async def test_smart_strip_effects(hass: HomeAssistant) -> None: assert state.attributes[ATTR_EFFECT_LIST] == ["Off", "Effect1", "Effect2"] # Ensure setting color temp when an effect - # is in progress calls set_hsv to clear the effect + # is in progress calls set_effect to clear the effect await hass.services.async_call( LIGHT_DOMAIN, "turn_on", {ATTR_ENTITY_ID: entity_id, ATTR_COLOR_TEMP_KELVIN: 4000}, blocking=True, ) - light.set_hsv.assert_called_once_with(0, 0, None) + light_effect.set_effect.assert_called_once_with(LightEffect.LIGHT_EFFECTS_OFF) light.set_color_temp.assert_called_once_with(4000, brightness=None, transition=None) - light.set_hsv.reset_mock() + light_effect.set_effect.reset_mock() light.set_color_temp.reset_mock() await hass.services.async_call( From 98df46f3ea9324476ba132fc903ed88247b084b7 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Wed, 10 Jul 2024 21:53:11 +0200 Subject: [PATCH 0068/1635] Bump knocki to 0.3.0 (#121704) --- homeassistant/components/knocki/config_flow.py | 4 +++- homeassistant/components/knocki/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/knocki/test_config_flow.py | 8 ++++++-- 5 files changed, 12 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/knocki/config_flow.py b/homeassistant/components/knocki/config_flow.py index 724c65f83df..654dd4a4d1f 100644 --- a/homeassistant/components/knocki/config_flow.py +++ b/homeassistant/components/knocki/config_flow.py @@ -4,7 +4,7 @@ from __future__ import annotations from typing import Any -from knocki import KnockiClient, KnockiConnectionError +from knocki import KnockiClient, KnockiConnectionError, KnockiInvalidAuthError import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult @@ -45,6 +45,8 @@ class KnockiConfigFlow(ConfigFlow, domain=DOMAIN): raise except KnockiConnectionError: errors["base"] = "cannot_connect" + except KnockiInvalidAuthError: + errors["base"] = "invalid_auth" except Exception: # noqa: BLE001 LOGGER.exception("Error logging into the Knocki API") errors["base"] = "unknown" diff --git a/homeassistant/components/knocki/manifest.json b/homeassistant/components/knocki/manifest.json index e78e9856d62..cad3e156c6c 100644 --- a/homeassistant/components/knocki/manifest.json +++ b/homeassistant/components/knocki/manifest.json @@ -7,5 +7,5 @@ "integration_type": "device", "iot_class": "cloud_push", "loggers": ["knocki"], - "requirements": ["knocki==0.2.0"] + "requirements": ["knocki==0.3.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 3be48c03ae9..0919effd0bc 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1203,7 +1203,7 @@ kegtron-ble==0.4.0 kiwiki-client==0.1.1 # homeassistant.components.knocki -knocki==0.2.0 +knocki==0.3.0 # homeassistant.components.knx knx-frontend==2024.1.20.105944 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index d9c49acb59b..51bc970af1f 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -981,7 +981,7 @@ justnimbus==0.7.4 kegtron-ble==0.4.0 # homeassistant.components.knocki -knocki==0.2.0 +knocki==0.3.0 # homeassistant.components.knx knx-frontend==2024.1.20.105944 diff --git a/tests/components/knocki/test_config_flow.py b/tests/components/knocki/test_config_flow.py index baf43c3ad30..188175035da 100644 --- a/tests/components/knocki/test_config_flow.py +++ b/tests/components/knocki/test_config_flow.py @@ -2,7 +2,7 @@ from unittest.mock import AsyncMock -from knocki import KnockiConnectionError +from knocki import KnockiConnectionError, KnockiInvalidAuthError import pytest from homeassistant.components.knocki.const import DOMAIN @@ -72,7 +72,11 @@ async def test_duplcate_entry( @pytest.mark.parametrize(("field"), ["login", "link"]) @pytest.mark.parametrize( ("exception", "error"), - [(KnockiConnectionError, "cannot_connect"), (Exception, "unknown")], + [ + (KnockiConnectionError, "cannot_connect"), + (KnockiInvalidAuthError, "invalid_auth"), + (Exception, "unknown"), + ], ) async def test_exceptions( hass: HomeAssistant, From 372649069e861f2cea781ad3099e974ae20d8e9a Mon Sep 17 00:00:00 2001 From: "Mr. Bubbles" Date: Wed, 10 Jul 2024 23:08:25 +0200 Subject: [PATCH 0069/1635] Bump pyloadapi to v1.3.2 (#121709) --- .../components/pyload/coordinator.py | 2 +- homeassistant/components/pyload/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/pyload/test_sensor.py | 22 +++++++++++++++++++ 5 files changed, 26 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/pyload/coordinator.py b/homeassistant/components/pyload/coordinator.py index c55ca4c1630..7eadefcd260 100644 --- a/homeassistant/components/pyload/coordinator.py +++ b/homeassistant/components/pyload/coordinator.py @@ -30,7 +30,7 @@ class PyLoadData: speed: float download: bool reconnect: bool - captcha: bool + captcha: bool | None = None free_space: int diff --git a/homeassistant/components/pyload/manifest.json b/homeassistant/components/pyload/manifest.json index fe1888478f8..788cdd1eb05 100644 --- a/homeassistant/components/pyload/manifest.json +++ b/homeassistant/components/pyload/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_polling", "loggers": ["pyloadapi"], "quality_scale": "platinum", - "requirements": ["PyLoadAPI==1.2.0"] + "requirements": ["PyLoadAPI==1.3.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 0919effd0bc..ec8dd306c4a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -60,7 +60,7 @@ PyFlume==0.6.5 PyFronius==0.7.3 # homeassistant.components.pyload -PyLoadAPI==1.2.0 +PyLoadAPI==1.3.2 # homeassistant.components.mvglive PyMVGLive==1.1.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 51bc970af1f..4873cddfc04 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -51,7 +51,7 @@ PyFlume==0.6.5 PyFronius==0.7.3 # homeassistant.components.pyload -PyLoadAPI==1.2.0 +PyLoadAPI==1.3.2 # homeassistant.components.met_eireann PyMetEireann==2021.8.0 diff --git a/tests/components/pyload/test_sensor.py b/tests/components/pyload/test_sensor.py index a44c9c8bf91..3e18faca12b 100644 --- a/tests/components/pyload/test_sensor.py +++ b/tests/components/pyload/test_sensor.py @@ -157,3 +157,25 @@ async def test_deprecated_yaml( assert issue_registry.async_get_issue( domain=HOMEASSISTANT_DOMAIN, issue_id=f"deprecated_yaml_{DOMAIN}" ) + + +async def test_pyload_pre_0_5_0( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pyloadapi: AsyncMock, +) -> None: + """Test setup of the pyload sensor platform.""" + mock_pyloadapi.get_status.return_value = { + "pause": False, + "active": 1, + "queue": 6, + "total": 37, + "speed": 5405963.0, + "download": True, + "reconnect": False, + } + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED From 4ab180f01644ab88ac5d1e1cbf9102043e40a338 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 10 Jul 2024 14:06:58 -0700 Subject: [PATCH 0070/1635] Fix update happening too early in unifiprotect (#121714) --- homeassistant/components/unifiprotect/entity.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/unifiprotect/entity.py b/homeassistant/components/unifiprotect/entity.py index 7eceb861955..97325135de5 100644 --- a/homeassistant/components/unifiprotect/entity.py +++ b/homeassistant/components/unifiprotect/entity.py @@ -189,7 +189,6 @@ class BaseProtectEntity(Entity): self._async_get_ufp_enabled = description.get_ufp_enabled self._async_set_device_info() - self._async_update_device_from_protect(device) self._state_getters = tuple( partial(attrgetter(attr), self) for attr in self._state_attrs ) @@ -264,6 +263,7 @@ class BaseProtectEntity(Entity): self.async_on_remove( self.data.async_subscribe(self.device.mac, self._async_updated_event) ) + self._async_update_device_from_protect(self.device) class ProtectDeviceEntity(BaseProtectEntity): From ebe7bc06866f6605c8e491ddb8ff130c7e98ddcc Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Wed, 10 Jul 2024 23:22:03 +0200 Subject: [PATCH 0071/1635] Bump knocki to 0.3.1 (#121717) --- homeassistant/components/knocki/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/knocki/manifest.json b/homeassistant/components/knocki/manifest.json index cad3e156c6c..f35827b8213 100644 --- a/homeassistant/components/knocki/manifest.json +++ b/homeassistant/components/knocki/manifest.json @@ -7,5 +7,5 @@ "integration_type": "device", "iot_class": "cloud_push", "loggers": ["knocki"], - "requirements": ["knocki==0.3.0"] + "requirements": ["knocki==0.3.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index ec8dd306c4a..bfa675fed76 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1203,7 +1203,7 @@ kegtron-ble==0.4.0 kiwiki-client==0.1.1 # homeassistant.components.knocki -knocki==0.3.0 +knocki==0.3.1 # homeassistant.components.knx knx-frontend==2024.1.20.105944 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 4873cddfc04..09b458b53e3 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -981,7 +981,7 @@ justnimbus==0.7.4 kegtron-ble==0.4.0 # homeassistant.components.knocki -knocki==0.3.0 +knocki==0.3.1 # homeassistant.components.knx knx-frontend==2024.1.20.105944 From 85952421425ff84c8893c9e988fc7ecaa0c54247 Mon Sep 17 00:00:00 2001 From: Robert Svensson Date: Wed, 10 Jul 2024 23:53:11 +0200 Subject: [PATCH 0072/1635] Fix bad access to UniFi runtime_data when not assigned (#121725) * Fix bad access to runtime_data when not assigned * Fix review comment * Clean up if statements --- homeassistant/components/unifi/config_flow.py | 13 +++---- tests/components/unifi/test_config_flow.py | 39 +++++++++++++++++++ 2 files changed, 45 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/unifi/config_flow.py b/homeassistant/components/unifi/config_flow.py index e93b59b0673..b5ad1ea2ff0 100644 --- a/homeassistant/components/unifi/config_flow.py +++ b/homeassistant/components/unifi/config_flow.py @@ -164,13 +164,12 @@ class UnifiFlowHandler(ConfigFlow, domain=UNIFI_DOMAIN): config_entry = self.reauth_config_entry abort_reason = "reauth_successful" - if ( - config_entry is not None - and config_entry.state is not ConfigEntryState.NOT_LOADED - ): - hub = config_entry.runtime_data - - if hub and hub.available: + if config_entry: + if ( + config_entry.state is ConfigEntryState.LOADED + and (hub := config_entry.runtime_data) + and hub.available + ): return self.async_abort(reason="already_configured") return self.async_update_reload_and_abort( diff --git a/tests/components/unifi/test_config_flow.py b/tests/components/unifi/test_config_flow.py index 7b37437cd1d..9ae3af19b46 100644 --- a/tests/components/unifi/test_config_flow.py +++ b/tests/components/unifi/test_config_flow.py @@ -1,5 +1,6 @@ """Test UniFi Network config flow.""" +from collections.abc import Callable import socket from unittest.mock import PropertyMock, patch @@ -338,6 +339,44 @@ async def test_reauth_flow_update_configuration( assert config_entry.data[CONF_PASSWORD] == "new_pass" +async def test_reauth_flow_update_configuration_on_not_loaded_entry( + hass: HomeAssistant, config_entry_factory: Callable[[], ConfigEntry] +) -> None: + """Verify reauth flow can update hub configuration on a not loaded entry.""" + with patch("aiounifi.Controller.login", side_effect=aiounifi.errors.RequestError): + config_entry = await config_entry_factory() + + result = await hass.config_entries.flow.async_init( + UNIFI_DOMAIN, + context={ + "source": SOURCE_REAUTH, + "unique_id": config_entry.unique_id, + "entry_id": config_entry.entry_id, + }, + data=config_entry.data, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "1.2.3.4", + CONF_USERNAME: "new_name", + CONF_PASSWORD: "new_pass", + CONF_PORT: 1234, + CONF_VERIFY_SSL: True, + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + assert config_entry.data[CONF_HOST] == "1.2.3.4" + assert config_entry.data[CONF_USERNAME] == "new_name" + assert config_entry.data[CONF_PASSWORD] == "new_pass" + + @pytest.mark.parametrize("client_payload", [CLIENTS]) @pytest.mark.parametrize("device_payload", [DEVICES]) @pytest.mark.parametrize("wlan_payload", [WLANS]) From 3d8afe7cb87a8e8f1ca91220155f68e4245f2ded Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ab=C3=ADlio=20Costa?= Date: Thu, 11 Jul 2024 08:07:18 +0100 Subject: [PATCH 0073/1635] Update Idasen Desk library to 2.6.2 (#121729) --- homeassistant/components/idasen_desk/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/idasen_desk/manifest.json b/homeassistant/components/idasen_desk/manifest.json index a09d155b5b0..17a5f519274 100644 --- a/homeassistant/components/idasen_desk/manifest.json +++ b/homeassistant/components/idasen_desk/manifest.json @@ -12,5 +12,5 @@ "documentation": "https://www.home-assistant.io/integrations/idasen_desk", "iot_class": "local_push", "quality_scale": "silver", - "requirements": ["idasen-ha==2.6.1"] + "requirements": ["idasen-ha==2.6.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index bfa675fed76..e9aec3fd756 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1137,7 +1137,7 @@ ical==8.1.1 icmplib==3.0 # homeassistant.components.idasen_desk -idasen-ha==2.6.1 +idasen-ha==2.6.2 # homeassistant.components.network ifaddr==0.2.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 09b458b53e3..f02183553a6 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -933,7 +933,7 @@ ical==8.1.1 icmplib==3.0 # homeassistant.components.idasen_desk -idasen-ha==2.6.1 +idasen-ha==2.6.2 # homeassistant.components.network ifaddr==0.2.0 From 68841b3d8a46afe85396adc5e40d84fdb1d82274 Mon Sep 17 00:00:00 2001 From: tronikos Date: Thu, 11 Jul 2024 01:14:11 -0700 Subject: [PATCH 0074/1635] Bump opower to 0.5.2 to fix 403 forbidden errors for users with multiple accounts (#121762) --- homeassistant/components/opower/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/opower/manifest.json b/homeassistant/components/opower/manifest.json index d419fdcb043..28c2e8ba2a8 100644 --- a/homeassistant/components/opower/manifest.json +++ b/homeassistant/components/opower/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/opower", "iot_class": "cloud_polling", "loggers": ["opower"], - "requirements": ["opower==0.4.7"] + "requirements": ["opower==0.5.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index e9aec3fd756..28b1f2f67b6 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1501,7 +1501,7 @@ openwrt-luci-rpc==1.1.17 openwrt-ubus-rpc==0.0.2 # homeassistant.components.opower -opower==0.4.7 +opower==0.5.2 # homeassistant.components.oralb oralb-ble==0.17.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f02183553a6..ef01832108b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1210,7 +1210,7 @@ openhomedevice==2.2.0 openwebifpy==4.2.5 # homeassistant.components.opower -opower==0.4.7 +opower==0.5.2 # homeassistant.components.oralb oralb-ble==0.17.6 From 3b8e736fe39e0a720fa694c1b9ea00e60ae46d1c Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Thu, 11 Jul 2024 08:23:10 -0700 Subject: [PATCH 0075/1635] Pin mashumaro version >= 3.13.1 for python 3.12.4 compatibility. (#121782) Pin mashumaro version for python 3.12.4 compatibility. --- homeassistant/package_constraints.txt | 3 +++ script/gen_requirements_all.py | 3 +++ 2 files changed, 6 insertions(+) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 6b43f288762..fcf79258c25 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -136,6 +136,9 @@ backoff>=2.0 # v2 has breaking changes (#99218). pydantic==1.10.17 +# Required for Python 3.12.4 compatibility (#119223). +mashumaro>=3.13.1 + # Breaks asyncio # https://github.com/pubnub/python/issues/130 pubnub!=6.4.0 diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index 434b4d0071f..3c593a2bdf7 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -157,6 +157,9 @@ backoff>=2.0 # v2 has breaking changes (#99218). pydantic==1.10.17 +# Required for Python 3.12.4 compatibility (#119223). +mashumaro>=3.13.1 + # Breaks asyncio # https://github.com/pubnub/python/issues/130 pubnub!=6.4.0 From 6aaaba6419949b4706283c22de17beb6221c2274 Mon Sep 17 00:00:00 2001 From: Josef Zweck <24647999+zweckj@users.noreply.github.com> Date: Thu, 11 Jul 2024 17:22:10 +0200 Subject: [PATCH 0076/1635] Bump pytedee_async to 0.2.20 (#121783) --- homeassistant/components/tedee/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/tedee/manifest.json b/homeassistant/components/tedee/manifest.json index 24df4cff95c..4f071267a25 100644 --- a/homeassistant/components/tedee/manifest.json +++ b/homeassistant/components/tedee/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_push", "loggers": ["pytedee_async"], "quality_scale": "platinum", - "requirements": ["pytedee-async==0.2.17"] + "requirements": ["pytedee-async==0.2.20"] } diff --git a/requirements_all.txt b/requirements_all.txt index 28b1f2f67b6..1d0d96bbbd7 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2209,7 +2209,7 @@ pyswitchbee==1.8.0 pytautulli==23.1.1 # homeassistant.components.tedee -pytedee-async==0.2.17 +pytedee-async==0.2.20 # homeassistant.components.tfiac pytfiac==0.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index ef01832108b..5a831e5cd59 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1742,7 +1742,7 @@ pyswitchbee==1.8.0 pytautulli==23.1.1 # homeassistant.components.tedee -pytedee-async==0.2.17 +pytedee-async==0.2.20 # homeassistant.components.motionmount python-MotionMount==2.0.0 From 63b14d14c17e5a1c195f72f30ad0c772ab6500bb Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 11 Jul 2024 10:18:45 -0500 Subject: [PATCH 0077/1635] Add some missing tplink ouis (#121785) --- homeassistant/components/tplink/manifest.json | 8 ++++++-- homeassistant/generated/dhcp.py | 9 +++++++-- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/tplink/manifest.json b/homeassistant/components/tplink/manifest.json index 3786a2565c2..337e05726ac 100644 --- a/homeassistant/components/tplink/manifest.json +++ b/homeassistant/components/tplink/manifest.json @@ -181,7 +181,7 @@ "macaddress": "1C61B4*" }, { - "hostname": "l5*", + "hostname": "l[59]*", "macaddress": "5CE931*" }, { @@ -189,9 +189,13 @@ "macaddress": "3C52A1*" }, { - "hostname": "l5*", + "hostname": "l[59]*", "macaddress": "5C628B*" }, + { + "hostname": "l[59]*", + "macaddress": "14EBB6*" + }, { "hostname": "tp*", "macaddress": "5C628B*" diff --git a/homeassistant/generated/dhcp.py b/homeassistant/generated/dhcp.py index e898f64d128..f6df799d01e 100644 --- a/homeassistant/generated/dhcp.py +++ b/homeassistant/generated/dhcp.py @@ -827,7 +827,7 @@ DHCP: Final[list[dict[str, str | bool]]] = [ }, { "domain": "tplink", - "hostname": "l5*", + "hostname": "l[59]*", "macaddress": "5CE931*", }, { @@ -837,9 +837,14 @@ DHCP: Final[list[dict[str, str | bool]]] = [ }, { "domain": "tplink", - "hostname": "l5*", + "hostname": "l[59]*", "macaddress": "5C628B*", }, + { + "domain": "tplink", + "hostname": "l[59]*", + "macaddress": "14EBB6*", + }, { "domain": "tplink", "hostname": "tp*", From 1e6c96c6eb19a121e5a8012059ea413f9ef50bc8 Mon Sep 17 00:00:00 2001 From: Glenn Waters Date: Thu, 11 Jul 2024 17:14:22 -0400 Subject: [PATCH 0078/1635] Use async_connect in newly bumped 0.5.8 UPB library (#121789) --- homeassistant/components/upb/__init__.py | 2 +- homeassistant/components/upb/config_flow.py | 2 +- homeassistant/components/upb/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/upb/test_config_flow.py | 6 +++--- 6 files changed, 8 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/upb/__init__.py b/homeassistant/components/upb/__init__.py index f2db6ff1b3c..2e5a69393d4 100644 --- a/homeassistant/components/upb/__init__.py +++ b/homeassistant/components/upb/__init__.py @@ -26,7 +26,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b file = config_entry.data[CONF_FILE_PATH] upb = upb_lib.UpbPim({"url": url, "UPStartExportFile": file}) - upb.connect() + await upb.async_connect() hass.data.setdefault(DOMAIN, {}) hass.data[DOMAIN][config_entry.entry_id] = {"upb": upb} diff --git a/homeassistant/components/upb/config_flow.py b/homeassistant/components/upb/config_flow.py index 40f49e57c60..fec93a51202 100644 --- a/homeassistant/components/upb/config_flow.py +++ b/homeassistant/components/upb/config_flow.py @@ -40,7 +40,7 @@ async def _validate_input(data): upb = upb_lib.UpbPim({"url": url, "UPStartExportFile": file_path}) - upb.connect(_connected_callback) + await upb.async_connect(_connected_callback) if not upb.config_ok: _LOGGER.error("Missing or invalid UPB file: %s", file_path) diff --git a/homeassistant/components/upb/manifest.json b/homeassistant/components/upb/manifest.json index b208edbc0e5..6b49c859771 100644 --- a/homeassistant/components/upb/manifest.json +++ b/homeassistant/components/upb/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/upb", "iot_class": "local_push", "loggers": ["upb_lib"], - "requirements": ["upb-lib==0.5.7"] + "requirements": ["upb-lib==0.5.8"] } diff --git a/requirements_all.txt b/requirements_all.txt index 1d0d96bbbd7..453accf6064 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2807,7 +2807,7 @@ unifiled==0.11 universal-silabs-flasher==0.0.20 # homeassistant.components.upb -upb-lib==0.5.7 +upb-lib==0.5.8 # homeassistant.components.upcloud upcloud-api==2.5.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5a831e5cd59..7f0fdcf52c0 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2181,7 +2181,7 @@ unifi-discovery==1.2.0 universal-silabs-flasher==0.0.20 # homeassistant.components.upb -upb-lib==0.5.7 +upb-lib==0.5.8 # homeassistant.components.upcloud upcloud-api==2.5.1 diff --git a/tests/components/upb/test_config_flow.py b/tests/components/upb/test_config_flow.py index d5d6d70bb68..54aeb00e89a 100644 --- a/tests/components/upb/test_config_flow.py +++ b/tests/components/upb/test_config_flow.py @@ -1,7 +1,7 @@ """Test the UPB Control config flow.""" from asyncio import TimeoutError -from unittest.mock import MagicMock, PropertyMock, patch +from unittest.mock import AsyncMock, PropertyMock, patch from homeassistant import config_entries from homeassistant.components.upb.const import DOMAIN @@ -15,11 +15,11 @@ def mocked_upb(sync_complete=True, config_ok=True): def _upb_lib_connect(callback): callback() - upb_mock = MagicMock() + upb_mock = AsyncMock() type(upb_mock).network_id = PropertyMock(return_value="42") type(upb_mock).config_ok = PropertyMock(return_value=config_ok) if sync_complete: - upb_mock.connect.side_effect = _upb_lib_connect + upb_mock.async_connect.side_effect = _upb_lib_connect return patch( "homeassistant.components.upb.config_flow.upb_lib.UpbPim", return_value=upb_mock ) From 0f69c58ba9349f726e8266b47104648435fe5f01 Mon Sep 17 00:00:00 2001 From: Steven B <51370195+sdb9696@users.noreply.github.com> Date: Thu, 11 Jul 2024 18:19:31 +0100 Subject: [PATCH 0079/1635] Bump python-kasa to 0.7.0.4 (#121791) --- homeassistant/components/tplink/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/tplink/manifest.json b/homeassistant/components/tplink/manifest.json index 337e05726ac..a345f64e4b2 100644 --- a/homeassistant/components/tplink/manifest.json +++ b/homeassistant/components/tplink/manifest.json @@ -301,5 +301,5 @@ "iot_class": "local_polling", "loggers": ["kasa"], "quality_scale": "platinum", - "requirements": ["python-kasa[speedups]==0.7.0.3"] + "requirements": ["python-kasa[speedups]==0.7.0.4"] } diff --git a/requirements_all.txt b/requirements_all.txt index 453accf6064..5da52fbb075 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2275,7 +2275,7 @@ python-join-api==0.0.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.7.0.3 +python-kasa[speedups]==0.7.0.4 # homeassistant.components.lirc # python-lirc==1.2.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 7f0fdcf52c0..23d8fd9adda 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1775,7 +1775,7 @@ python-izone==1.2.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.7.0.3 +python-kasa[speedups]==0.7.0.4 # homeassistant.components.matter python-matter-server==6.2.2 From 976902f22ce0d52969a53f07d36115252fb71230 Mon Sep 17 00:00:00 2001 From: Jan-Philipp Benecke Date: Fri, 12 Jul 2024 15:51:18 +0200 Subject: [PATCH 0080/1635] Add missing translations to Roborock (#121796) --- homeassistant/components/roborock/strings.json | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/roborock/strings.json b/homeassistant/components/roborock/strings.json index c7fc34386fd..5d95812e845 100644 --- a/homeassistant/components/roborock/strings.json +++ b/homeassistant/components/roborock/strings.json @@ -214,7 +214,8 @@ "unknown": "Unknown", "locked": "Locked", "air_drying_stopping": "Air drying stopping", - "egg_attack": "Cupid mode" + "egg_attack": "Cupid mode", + "mapping": "Mapping" } }, "total_cleaning_time": { @@ -282,7 +283,8 @@ "deep": "Deep", "deep_plus": "Deep+", "custom": "Custom", - "fast": "Fast" + "fast": "Fast", + "smart_mode": "SmartPlan" } }, "mop_intensity": { @@ -293,10 +295,12 @@ "mild": "Mild", "medium": "Medium", "moderate": "Moderate", + "max": "Max", "high": "High", "intense": "Intense", "custom": "[%key:component::roborock::entity::select::mop_mode::state::custom%]", - "custom_water_flow": "Custom water flow" + "custom_water_flow": "Custom water flow", + "smart_mode": "[%key:component::roborock::entity::select::mop_mode::state::smart_mode%]" } } }, @@ -338,13 +342,14 @@ "custom": "[%key:component::roborock::entity::select::mop_mode::state::custom%]", "gentle": "Gentle", "off": "[%key:common::state::off%]", - "max": "Max", + "max": "[%key:component::roborock::entity::select::mop_intensity::state::max%]", "max_plus": "Max plus", "medium": "Medium", "quiet": "Quiet", "silent": "Silent", "standard": "[%key:component::roborock::entity::select::mop_mode::state::standard%]", - "turbo": "Turbo" + "turbo": "Turbo", + "smart_mode": "[%key:component::roborock::entity::select::mop_mode::state::smart_mode%]" } } } From e0b90c4b36bc7672126b37d990482da35be438e4 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Sat, 13 Jul 2024 16:10:09 +0200 Subject: [PATCH 0081/1635] Fix alexa does to check `current_position` correctly when handling cover range changes (#121798) --- homeassistant/components/alexa/handlers.py | 2 +- tests/components/alexa/test_smart_home.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/alexa/handlers.py b/homeassistant/components/alexa/handlers.py index 47e09db1166..1849dcd1862 100644 --- a/homeassistant/components/alexa/handlers.py +++ b/homeassistant/components/alexa/handlers.py @@ -1497,7 +1497,7 @@ async def async_api_adjust_range( if instance == f"{cover.DOMAIN}.{cover.ATTR_POSITION}": range_delta = int(range_delta * 20) if range_delta_default else int(range_delta) service = SERVICE_SET_COVER_POSITION - if not (current := entity.attributes.get(cover.ATTR_POSITION)): + if not (current := entity.attributes.get(cover.ATTR_CURRENT_POSITION)): msg = f"Unable to determine {entity.entity_id} current position" raise AlexaInvalidValueError(msg) position = response_value = min(100, max(0, range_delta + current)) diff --git a/tests/components/alexa/test_smart_home.py b/tests/components/alexa/test_smart_home.py index d502dce7d01..fb27c91eea7 100644 --- a/tests/components/alexa/test_smart_home.py +++ b/tests/components/alexa/test_smart_home.py @@ -1979,7 +1979,7 @@ async def test_cover_position( "friendly_name": "Test cover range", "device_class": "blind", "supported_features": supported_features, - "position": position, + "current_position": position, }, ) appliance = await discovery_test(device, hass) @@ -2296,7 +2296,7 @@ async def test_cover_position_range( "friendly_name": "Test cover range", "device_class": "blind", "supported_features": 7, - "position": 30, + "current_position": 30, }, ) appliance = await discovery_test(device, hass) @@ -4658,7 +4658,7 @@ async def test_cover_semantics_position_and_tilt(hass: HomeAssistant) -> None: "friendly_name": "Test cover semantics", "device_class": "blind", "supported_features": 255, - "position": 30, + "current_position": 30, "tilt_position": 30, }, ) From 56a9167ed28a1dc4c62772ad3d30020efcbc59df Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Fri, 12 Jul 2024 09:13:55 +0200 Subject: [PATCH 0082/1635] Reolink media second lens (#121800) DUO lens camera distinguish between lenses for media playback --- homeassistant/components/reolink/media_source.py | 4 ++++ tests/components/reolink/test_media_source.py | 5 ++++- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/reolink/media_source.py b/homeassistant/components/reolink/media_source.py index 7a77e482f56..ae865b77913 100644 --- a/homeassistant/components/reolink/media_source.py +++ b/homeassistant/components/reolink/media_source.py @@ -5,6 +5,7 @@ from __future__ import annotations import datetime as dt import logging +from reolink_aio.api import DUAL_LENS_MODELS from reolink_aio.enums import VodRequestType from homeassistant.components.camera import DOMAIN as CAM_DOMAIN, DynamicStreamSettings @@ -184,6 +185,9 @@ class ReolinkVODMediaSource(MediaSource): if device.name_by_user is not None: device_name = device.name_by_user + if host.api.model in DUAL_LENS_MODELS: + device_name = f"{device_name} lens {ch}" + children.append( BrowseMediaSource( domain=DOMAIN, diff --git a/tests/components/reolink/test_media_source.py b/tests/components/reolink/test_media_source.py index 0d86106e8e5..66ed32ca823 100644 --- a/tests/components/reolink/test_media_source.py +++ b/tests/components/reolink/test_media_source.py @@ -54,6 +54,7 @@ TEST_FILE_NAME = f"{TEST_YEAR}{TEST_MONTH}{TEST_DAY}{TEST_HOUR}{TEST_MINUTE}00" TEST_FILE_NAME_MP4 = f"{TEST_YEAR}{TEST_MONTH}{TEST_DAY}{TEST_HOUR}{TEST_MINUTE}00.mp4" TEST_STREAM = "main" TEST_CHANNEL = "0" +TEST_CAM_NAME = "Cam new name" TEST_MIME_TYPE = "application/x-mpegURL" TEST_MIME_TYPE_MP4 = "video/mp4" @@ -130,6 +131,7 @@ async def test_browsing( """Test browsing the Reolink three.""" entry_id = config_entry.entry_id reolink_connect.api_version.return_value = 1 + reolink_connect.model = "Reolink TrackMix PoE" with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): assert await hass.config_entries.async_setup(entry_id) is True @@ -137,7 +139,7 @@ async def test_browsing( entries = dr.async_entries_for_config_entry(device_registry, entry_id) assert len(entries) > 0 - device_registry.async_update_device(entries[0].id, name_by_user="Cam new name") + device_registry.async_update_device(entries[0].id, name_by_user=TEST_CAM_NAME) caplog.set_level(logging.DEBUG) @@ -149,6 +151,7 @@ async def test_browsing( assert browse.title == "Reolink" assert browse.identifier is None assert browse.children[0].identifier == browse_root_id + assert browse.children[0].title == f"{TEST_CAM_NAME} lens 0" # browse resolution select browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}/{browse_root_id}") From e9344ae101a8abd413e052e157f6b687c4a2333f Mon Sep 17 00:00:00 2001 From: ollo69 <60491700+ollo69@users.noreply.github.com> Date: Thu, 11 Jul 2024 23:12:33 +0200 Subject: [PATCH 0083/1635] Bump PySwitchbot to 0.48.1 (#121804) --- homeassistant/components/switchbot/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/switchbot/manifest.json b/homeassistant/components/switchbot/manifest.json index dc858a688cb..0cbbd70a805 100644 --- a/homeassistant/components/switchbot/manifest.json +++ b/homeassistant/components/switchbot/manifest.json @@ -39,5 +39,5 @@ "documentation": "https://www.home-assistant.io/integrations/switchbot", "iot_class": "local_push", "loggers": ["switchbot"], - "requirements": ["PySwitchbot==0.48.0"] + "requirements": ["PySwitchbot==0.48.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 5da52fbb075..95dfb023ab0 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -90,7 +90,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.48.0 +PySwitchbot==0.48.1 # homeassistant.components.switchmate PySwitchmate==0.5.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 23d8fd9adda..32d9722af73 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -78,7 +78,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.48.0 +PySwitchbot==0.48.1 # homeassistant.components.syncthru PySyncThru==0.7.10 From 41104324ecf125cdea7f1488a7c1338039b8076f Mon Sep 17 00:00:00 2001 From: Avi Miller Date: Fri, 5 Jul 2024 13:26:44 +1000 Subject: [PATCH 0084/1635] Bump aiolifx to 1.0.4 (#121267) --- CODEOWNERS | 2 ++ homeassistant/components/lifx/manifest.json | 4 ++-- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 6 insertions(+), 4 deletions(-) diff --git a/CODEOWNERS b/CODEOWNERS index 765f1624c33..560f2efc0cc 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -777,6 +777,8 @@ build.json @home-assistant/supervisor /tests/components/lg_netcast/ @Drafteed @splinter98 /homeassistant/components/lidarr/ @tkdrob /tests/components/lidarr/ @tkdrob +/homeassistant/components/lifx/ @Djelibeybi +/tests/components/lifx/ @Djelibeybi /homeassistant/components/light/ @home-assistant/core /tests/components/light/ @home-assistant/core /homeassistant/components/linear_garage_door/ @IceBotYT diff --git a/homeassistant/components/lifx/manifest.json b/homeassistant/components/lifx/manifest.json index 6aa7fdc6305..5e68c1bab35 100644 --- a/homeassistant/components/lifx/manifest.json +++ b/homeassistant/components/lifx/manifest.json @@ -1,7 +1,7 @@ { "domain": "lifx", "name": "LIFX", - "codeowners": [], + "codeowners": ["@Djelibeybi"], "config_flow": true, "dependencies": ["network"], "dhcp": [ @@ -48,7 +48,7 @@ "iot_class": "local_polling", "loggers": ["aiolifx", "aiolifx_effects", "bitstring"], "requirements": [ - "aiolifx==1.0.2", + "aiolifx==1.0.4", "aiolifx-effects==0.3.2", "aiolifx-themes==0.4.15" ] diff --git a/requirements_all.txt b/requirements_all.txt index 95dfb023ab0..896d541e69d 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -282,7 +282,7 @@ aiolifx-effects==0.3.2 aiolifx-themes==0.4.15 # homeassistant.components.lifx -aiolifx==1.0.2 +aiolifx==1.0.4 # homeassistant.components.livisi aiolivisi==0.0.19 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 32d9722af73..f6a7328e927 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -255,7 +255,7 @@ aiolifx-effects==0.3.2 aiolifx-themes==0.4.15 # homeassistant.components.lifx -aiolifx==1.0.2 +aiolifx==1.0.4 # homeassistant.components.livisi aiolivisi==0.0.19 From ad07bdb62bd0614164187fde099cb7e65ef2b0c5 Mon Sep 17 00:00:00 2001 From: Avi Miller Date: Fri, 12 Jul 2024 13:21:45 +1000 Subject: [PATCH 0085/1635] Bump aiolifx to 1.0.5 (#121824) --- homeassistant/components/lifx/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/lifx/manifest.json b/homeassistant/components/lifx/manifest.json index 5e68c1bab35..3d0bd1d73d1 100644 --- a/homeassistant/components/lifx/manifest.json +++ b/homeassistant/components/lifx/manifest.json @@ -48,7 +48,7 @@ "iot_class": "local_polling", "loggers": ["aiolifx", "aiolifx_effects", "bitstring"], "requirements": [ - "aiolifx==1.0.4", + "aiolifx==1.0.5", "aiolifx-effects==0.3.2", "aiolifx-themes==0.4.15" ] diff --git a/requirements_all.txt b/requirements_all.txt index 896d541e69d..5583044dd7c 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -282,7 +282,7 @@ aiolifx-effects==0.3.2 aiolifx-themes==0.4.15 # homeassistant.components.lifx -aiolifx==1.0.4 +aiolifx==1.0.5 # homeassistant.components.livisi aiolivisi==0.0.19 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f6a7328e927..59afcd79a95 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -255,7 +255,7 @@ aiolifx-effects==0.3.2 aiolifx-themes==0.4.15 # homeassistant.components.lifx -aiolifx==1.0.4 +aiolifx==1.0.5 # homeassistant.components.livisi aiolivisi==0.0.19 From a835750252ea94caf9de23d1274f84ad4d8245d7 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 12 Jul 2024 08:54:38 -0500 Subject: [PATCH 0086/1635] Log add/remove index complete at the same level as when it starts (#121852) --- .../components/recorder/migration.py | 34 ++++++++----------- 1 file changed, 14 insertions(+), 20 deletions(-) diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index cf003f72af4..41a429b7269 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -313,11 +313,9 @@ def _create_index( index = index_list[0] _LOGGER.debug("Creating %s index", index_name) _LOGGER.warning( - ( - "Adding index `%s` to table `%s`. Note: this can take several " - "minutes on large databases and slow computers. Please " - "be patient!" - ), + "Adding index `%s` to table `%s`. Note: this can take several " + "minutes on large databases and slow computers. Please " + "be patient!", index_name, table_name, ) @@ -331,7 +329,7 @@ def _create_index( "Index %s already exists on %s, continuing", index_name, table_name ) - _LOGGER.debug("Finished creating %s", index_name) + _LOGGER.warning("Finished adding index `%s` to table `%s`", index_name, table_name) def _execute_or_collect_error( @@ -364,11 +362,9 @@ def _drop_index( DO NOT USE THIS FUNCTION IN ANY OPERATION THAT TAKES USER INPUT. """ _LOGGER.warning( - ( - "Dropping index `%s` from table `%s`. Note: this can take several " - "minutes on large databases and slow computers. Please " - "be patient!" - ), + "Dropping index `%s` from table `%s`. Note: this can take several " + "minutes on large databases and slow computers. Please " + "be patient!", index_name, table_name, ) @@ -377,8 +373,8 @@ def _drop_index( index_to_drop = get_index_by_name(session, table_name, index_name) if index_to_drop is None: - _LOGGER.debug( - "The index %s on table %s no longer exists", index_name, table_name + _LOGGER.warning( + "The index `%s` on table `%s` no longer exists", index_name, table_name ) return @@ -395,18 +391,16 @@ def _drop_index( f"DROP INDEX {index_to_drop}", ): if _execute_or_collect_error(session_maker, query, errors): - _LOGGER.debug( - "Finished dropping index %s from table %s", index_name, table_name + _LOGGER.warning( + "Finished dropping index `%s` from table `%s`", index_name, table_name ) return if not quiet: _LOGGER.warning( - ( - "Failed to drop index `%s` from table `%s`. Schema " - "Migration will continue; this is not a " - "critical operation: %s" - ), + "Failed to drop index `%s` from table `%s`. Schema " + "Migration will continue; this is not a " + "critical operation: %s", index_name, table_name, errors, From 24ed003471d8554691e5a2a5214337c34b5ef3e5 Mon Sep 17 00:00:00 2001 From: mvn23 Date: Mon, 15 Jul 2024 09:20:32 +0200 Subject: [PATCH 0087/1635] Fix opentherm_gw availability (#121892) --- homeassistant/components/opentherm_gw/__init__.py | 5 +++++ homeassistant/components/opentherm_gw/binary_sensor.py | 7 ++----- homeassistant/components/opentherm_gw/climate.py | 2 +- homeassistant/components/opentherm_gw/sensor.py | 7 ++----- 4 files changed, 10 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/opentherm_gw/__init__.py b/homeassistant/components/opentherm_gw/__init__.py index 46cc6f3daa0..a0d791fddd4 100644 --- a/homeassistant/components/opentherm_gw/__init__.py +++ b/homeassistant/components/opentherm_gw/__init__.py @@ -470,3 +470,8 @@ class OpenThermGatewayDevice: async_dispatcher_send(self.hass, self.update_signal, status) self.gateway.subscribe(handle_report) + + @property + def connected(self): + """Report whether or not we are connected to the gateway.""" + return self.gateway.connection.connected diff --git a/homeassistant/components/opentherm_gw/binary_sensor.py b/homeassistant/components/opentherm_gw/binary_sensor.py index ad8d09afa89..7c3760653e8 100644 --- a/homeassistant/components/opentherm_gw/binary_sensor.py +++ b/homeassistant/components/opentherm_gw/binary_sensor.py @@ -48,6 +48,7 @@ class OpenThermBinarySensor(BinarySensorEntity): _attr_should_poll = False _attr_entity_registry_enabled_default = False + _attr_available = False def __init__(self, gw_dev, var, source, device_class, friendly_name_format): """Initialize the binary sensor.""" @@ -85,14 +86,10 @@ class OpenThermBinarySensor(BinarySensorEntity): _LOGGER.debug("Removing OpenTherm Gateway binary sensor %s", self._attr_name) self._unsub_updates() - @property - def available(self): - """Return availability of the sensor.""" - return self._attr_is_on is not None - @callback def receive_report(self, status): """Handle status updates from the component.""" + self._attr_available = self._gateway.connected state = status[self._source].get(self._var) self._attr_is_on = None if state is None else bool(state) self.async_write_ha_state() diff --git a/homeassistant/components/opentherm_gw/climate.py b/homeassistant/components/opentherm_gw/climate.py index 2d9f1687463..5eb1246e55f 100644 --- a/homeassistant/components/opentherm_gw/climate.py +++ b/homeassistant/components/opentherm_gw/climate.py @@ -138,7 +138,7 @@ class OpenThermClimate(ClimateEntity): @callback def receive_report(self, status): """Receive and handle a new report from the Gateway.""" - self._attr_available = status != gw_vars.DEFAULT_STATUS + self._attr_available = self._gateway.connected ch_active = status[gw_vars.BOILER].get(gw_vars.DATA_SLAVE_CH_ACTIVE) flame_on = status[gw_vars.BOILER].get(gw_vars.DATA_SLAVE_FLAME_ON) cooling_active = status[gw_vars.BOILER].get(gw_vars.DATA_SLAVE_COOLING_ACTIVE) diff --git a/homeassistant/components/opentherm_gw/sensor.py b/homeassistant/components/opentherm_gw/sensor.py index 9171292c21b..8c17aca4516 100644 --- a/homeassistant/components/opentherm_gw/sensor.py +++ b/homeassistant/components/opentherm_gw/sensor.py @@ -45,6 +45,7 @@ class OpenThermSensor(SensorEntity): _attr_should_poll = False _attr_entity_registry_enabled_default = False + _attr_available = False def __init__( self, @@ -94,14 +95,10 @@ class OpenThermSensor(SensorEntity): _LOGGER.debug("Removing OpenTherm Gateway sensor %s", self._attr_name) self._unsub_updates() - @property - def available(self): - """Return availability of the sensor.""" - return self._attr_native_value is not None - @callback def receive_report(self, status): """Handle status updates from the component.""" + self._attr_available = self._gateway.connected value = status[self._source].get(self._var) self._attr_native_value = value self.async_write_ha_state() From f9b359ae3010b9283e52525bb2f7286fc1a486a7 Mon Sep 17 00:00:00 2001 From: Scott K Logan Date: Mon, 15 Jul 2024 02:10:50 -0500 Subject: [PATCH 0088/1635] Fix rainforest_raven closing device due to timeout (#121905) --- homeassistant/components/rainforest_raven/coordinator.py | 2 +- homeassistant/components/rainforest_raven/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/rainforest_raven/coordinator.py b/homeassistant/components/rainforest_raven/coordinator.py index 37e44b12eba..d08a10c2670 100644 --- a/homeassistant/components/rainforest_raven/coordinator.py +++ b/homeassistant/components/rainforest_raven/coordinator.py @@ -167,7 +167,7 @@ class RAVEnDataCoordinator(DataUpdateCoordinator): await device.synchronize() self._device_info = await device.get_device_info() except: - await device.close() + await device.abort() raise self._raven_device = device diff --git a/homeassistant/components/rainforest_raven/manifest.json b/homeassistant/components/rainforest_raven/manifest.json index bc44c3fc30c..49bd11e8880 100644 --- a/homeassistant/components/rainforest_raven/manifest.json +++ b/homeassistant/components/rainforest_raven/manifest.json @@ -6,7 +6,7 @@ "dependencies": ["usb"], "documentation": "https://www.home-assistant.io/integrations/rainforest_raven", "iot_class": "local_polling", - "requirements": ["aioraven==0.6.0"], + "requirements": ["aioraven==0.7.0"], "usb": [ { "vid": "0403", diff --git a/requirements_all.txt b/requirements_all.txt index 5583044dd7c..1af4edb3b45 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -344,7 +344,7 @@ aiopyarr==23.4.0 aioqsw==0.3.5 # homeassistant.components.rainforest_raven -aioraven==0.6.0 +aioraven==0.7.0 # homeassistant.components.recollect_waste aiorecollect==2023.09.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 59afcd79a95..d5b8d307884 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -317,7 +317,7 @@ aiopyarr==23.4.0 aioqsw==0.3.5 # homeassistant.components.rainforest_raven -aioraven==0.6.0 +aioraven==0.7.0 # homeassistant.components.recollect_waste aiorecollect==2023.09.0 From bf89eaae2514581a8c0b28593ede59a9b5d64930 Mon Sep 17 00:00:00 2001 From: Tomasz Gorochowik Date: Mon, 15 Jul 2024 09:09:19 +0200 Subject: [PATCH 0089/1635] Fix enigma2 mute (#121928) --- homeassistant/components/enigma2/media_player.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/enigma2/media_player.py b/homeassistant/components/enigma2/media_player.py index 63acdd8be72..86ed9652106 100644 --- a/homeassistant/components/enigma2/media_player.py +++ b/homeassistant/components/enigma2/media_player.py @@ -199,7 +199,8 @@ class Enigma2Device(MediaPlayerEntity): async def async_mute_volume(self, mute: bool) -> None: """Mute or unmute.""" - await self._device.toggle_mute() + if mute != self._device.status.muted: + await self._device.toggle_mute() async def async_select_source(self, source: str) -> None: """Select input source.""" From 9bd822d693ab1d5a2a5c176d94af544a22bc8f8f Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Mon, 15 Jul 2024 08:31:44 +0200 Subject: [PATCH 0090/1635] Fix `configuration_url` for Shelly device using IPv6 (#121939) Co-authored-by: Maciej Bieniek <478555+bieniu@users.noreply.github.com> --- homeassistant/components/shelly/coordinator.py | 3 ++- homeassistant/components/shelly/utils.py | 16 +++++++++++++++- tests/components/shelly/test_utils.py | 17 +++++++++++++++++ 3 files changed, 34 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/shelly/coordinator.py b/homeassistant/components/shelly/coordinator.py index 33ed07c35de..c1be4577bf6 100644 --- a/homeassistant/components/shelly/coordinator.py +++ b/homeassistant/components/shelly/coordinator.py @@ -61,6 +61,7 @@ from .utils import ( async_create_issue_unsupported_firmware, get_block_device_sleep_period, get_device_entry_gen, + get_host, get_http_port, get_rpc_device_wakeup_period, update_device_fw_info, @@ -147,7 +148,7 @@ class ShellyCoordinatorBase[_DeviceT: BlockDevice | RpcDevice]( model=MODEL_NAMES.get(self.model, self.model), sw_version=self.sw_version, hw_version=f"gen{get_device_entry_gen(self.entry)} ({self.model})", - configuration_url=f"http://{self.entry.data[CONF_HOST]}:{get_http_port(self.entry.data)}", + configuration_url=f"http://{get_host(self.entry.data[CONF_HOST])}:{get_http_port(self.entry.data)}", ) self.device_id = device_entry.id diff --git a/homeassistant/components/shelly/utils.py b/homeassistant/components/shelly/utils.py index bcd5a859538..bb2263c8da7 100644 --- a/homeassistant/components/shelly/utils.py +++ b/homeassistant/components/shelly/utils.py @@ -3,7 +3,7 @@ from __future__ import annotations from datetime import datetime, timedelta -from ipaddress import IPv4Address +from ipaddress import IPv4Address, IPv6Address, ip_address from types import MappingProxyType from typing import Any, cast @@ -482,6 +482,20 @@ def get_http_port(data: MappingProxyType[str, Any]) -> int: return cast(int, data.get(CONF_PORT, DEFAULT_HTTP_PORT)) +def get_host(host: str) -> str: + """Get the device IP address or hostname.""" + try: + ip_object = ip_address(host) + except ValueError: + # host contains hostname + return host + + if isinstance(ip_object, IPv6Address): + return f"[{host}]" + + return host + + @callback def async_remove_shelly_rpc_entities( hass: HomeAssistant, domain: str, mac: str, keys: list[str] diff --git a/tests/components/shelly/test_utils.py b/tests/components/shelly/test_utils.py index 7c4ea8accae..5891f250fae 100644 --- a/tests/components/shelly/test_utils.py +++ b/tests/components/shelly/test_utils.py @@ -23,6 +23,7 @@ from homeassistant.components.shelly.utils import ( get_block_device_sleep_period, get_block_input_triggers, get_device_uptime, + get_host, get_number_of_channels, get_release_url, get_rpc_channel_name, @@ -274,3 +275,19 @@ def test_get_release_url( result = get_release_url(gen, model, beta) assert result is expected + + +@pytest.mark.parametrize( + ("host", "expected"), + [ + ("shelly_device.local", "shelly_device.local"), + ("192.168.178.12", "192.168.178.12"), + ( + "2001:0db8:85a3:0000:0000:8a2e:0370:7334", + "[2001:0db8:85a3:0000:0000:8a2e:0370:7334]", + ), + ], +) +def test_get_host(host: str, expected: str) -> None: + """Test get_host function.""" + assert get_host(host) == expected From 214b5efd72477a2a1a2f095e7bc9d9cb2b30603b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 14 Jul 2024 16:23:07 -0500 Subject: [PATCH 0091/1635] Narrow sqlite database corruption check to ensure disk image is malformed (#121947) * Narrow sqlite database corruption check to ensure disk image is malformed The database corruption check would also replace the database when it locked externally instead of only when its malformed. This was discovered in https://github.com/home-assistant/core/issues/121909#issuecomment-2227409124 when a user did a manual index creation while HA was online * tweak * tweak * fix * fix --- homeassistant/components/recorder/core.py | 10 +++++++++- tests/components/recorder/test_init.py | 4 +++- tests/components/recorder/test_migrate.py | 4 +++- tests/components/recorder/test_purge.py | 2 +- tests/components/recorder/test_purge_v32_schema.py | 2 +- 5 files changed, 17 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/recorder/core.py b/homeassistant/components/recorder/core.py index 4e5ac04c3bf..b699ea324f6 100644 --- a/homeassistant/components/recorder/core.py +++ b/homeassistant/components/recorder/core.py @@ -1178,7 +1178,15 @@ class Recorder(threading.Thread): def _handle_database_error(self, err: Exception) -> bool: """Handle a database error that may result in moving away the corrupt db.""" - if isinstance(err.__cause__, sqlite3.DatabaseError): + if ( + (cause := err.__cause__) + and isinstance(cause, sqlite3.DatabaseError) + and (cause_str := str(cause)) + # Make sure we do not move away a database when its only locked + # externally by another process. sqlite does not give us a named + # exception for this so we have to check the error message. + and ("malformed" in cause_str or "not a database" in cause_str) + ): _LOGGER.exception( "Unrecoverable sqlite3 database corruption detected: %s", err ) diff --git a/tests/components/recorder/test_init.py b/tests/components/recorder/test_init.py index 52947ce0c19..e1b22b2c245 100644 --- a/tests/components/recorder/test_init.py +++ b/tests/components/recorder/test_init.py @@ -1707,7 +1707,9 @@ async def test_database_corruption_while_running( hass.states.async_set("test.lost", "on", {}) sqlite3_exception = DatabaseError("statement", {}, []) - sqlite3_exception.__cause__ = sqlite3.DatabaseError() + sqlite3_exception.__cause__ = sqlite3.DatabaseError( + "database disk image is malformed" + ) await async_wait_recording_done(hass) with patch.object( diff --git a/tests/components/recorder/test_migrate.py b/tests/components/recorder/test_migrate.py index cb8e402f65a..d5b26eba680 100644 --- a/tests/components/recorder/test_migrate.py +++ b/tests/components/recorder/test_migrate.py @@ -165,7 +165,9 @@ async def test_database_migration_encounters_corruption( assert recorder.util.async_migration_in_progress(hass) is False sqlite3_exception = DatabaseError("statement", {}, []) - sqlite3_exception.__cause__ = sqlite3.DatabaseError() + sqlite3_exception.__cause__ = sqlite3.DatabaseError( + "database disk image is malformed" + ) with ( patch("homeassistant.components.recorder.ALLOW_IN_MEMORY_DB", True), diff --git a/tests/components/recorder/test_purge.py b/tests/components/recorder/test_purge.py index 1ccbaada265..1167fd4de73 100644 --- a/tests/components/recorder/test_purge.py +++ b/tests/components/recorder/test_purge.py @@ -210,7 +210,7 @@ async def test_purge_old_states_encouters_database_corruption( await async_wait_recording_done(hass) sqlite3_exception = DatabaseError("statement", {}, []) - sqlite3_exception.__cause__ = sqlite3.DatabaseError() + sqlite3_exception.__cause__ = sqlite3.DatabaseError("not a database") with ( patch( diff --git a/tests/components/recorder/test_purge_v32_schema.py b/tests/components/recorder/test_purge_v32_schema.py index fb636cfa9dc..8a641a2ce7f 100644 --- a/tests/components/recorder/test_purge_v32_schema.py +++ b/tests/components/recorder/test_purge_v32_schema.py @@ -173,7 +173,7 @@ async def test_purge_old_states_encouters_database_corruption( await async_wait_recording_done(hass) sqlite3_exception = DatabaseError("statement", {}, []) - sqlite3_exception.__cause__ = sqlite3.DatabaseError() + sqlite3_exception.__cause__ = sqlite3.DatabaseError("not a database") with ( patch( From 4b93fc61b5dfd7db6847fda7800a124250b8b5c4 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Tue, 16 Jul 2024 13:47:11 +0200 Subject: [PATCH 0092/1635] Bump python-holidays to 0.53 (#122021) --- homeassistant/components/holiday/manifest.json | 2 +- homeassistant/components/workday/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/holiday/manifest.json b/homeassistant/components/holiday/manifest.json index 075285bbdb9..ebe472d7f0e 100644 --- a/homeassistant/components/holiday/manifest.json +++ b/homeassistant/components/holiday/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/holiday", "iot_class": "local_polling", - "requirements": ["holidays==0.52", "babel==2.15.0"] + "requirements": ["holidays==0.53", "babel==2.15.0"] } diff --git a/homeassistant/components/workday/manifest.json b/homeassistant/components/workday/manifest.json index ad609954a57..69df8080fa5 100644 --- a/homeassistant/components/workday/manifest.json +++ b/homeassistant/components/workday/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_polling", "loggers": ["holidays"], "quality_scale": "internal", - "requirements": ["holidays==0.52"] + "requirements": ["holidays==0.53"] } diff --git a/requirements_all.txt b/requirements_all.txt index 1af4edb3b45..a84f565dd64 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1087,7 +1087,7 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.52 +holidays==0.53 # homeassistant.components.frontend home-assistant-frontend==20240710.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index d5b8d307884..c35ec3f64f8 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -892,7 +892,7 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.52 +holidays==0.53 # homeassistant.components.frontend home-assistant-frontend==20240710.0 From d9e44bab69f564341de98fd9716c3f692aeb2449 Mon Sep 17 00:00:00 2001 From: Robert Svensson Date: Tue, 16 Jul 2024 20:16:36 +0200 Subject: [PATCH 0093/1635] Mark UniFi power cycle button as unavailable if PoE is not enabled on port (#122035) --- homeassistant/components/unifi/button.py | 15 +++++++++-- tests/components/unifi/test_button.py | 32 ++++++++++++++++++++++++ 2 files changed, 45 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/unifi/button.py b/homeassistant/components/unifi/button.py index 6684e33e532..716d3734953 100644 --- a/homeassistant/components/unifi/button.py +++ b/homeassistant/components/unifi/button.py @@ -8,7 +8,7 @@ from __future__ import annotations from collections.abc import Callable, Coroutine from dataclasses import dataclass import secrets -from typing import Any +from typing import TYPE_CHECKING, Any import aiounifi from aiounifi.interfaces.api_handlers import ItemEvent @@ -44,6 +44,17 @@ from .entity import ( async_wlan_device_info_fn, ) +if TYPE_CHECKING: + from .hub import UnifiHub + + +@callback +def async_port_power_cycle_available_fn(hub: UnifiHub, obj_id: str) -> bool: + """Check if port allows power cycle action.""" + if not async_device_available_fn(hub, obj_id): + return False + return bool(hub.api.ports[obj_id].poe_enable) + async def async_restart_device_control_fn( api: aiounifi.Controller, obj_id: str @@ -96,7 +107,7 @@ ENTITY_DESCRIPTIONS: tuple[UnifiButtonEntityDescription, ...] = ( entity_category=EntityCategory.CONFIG, device_class=ButtonDeviceClass.RESTART, api_handler_fn=lambda api: api.ports, - available_fn=async_device_available_fn, + available_fn=async_port_power_cycle_available_fn, control_fn=async_power_cycle_port_control_fn, device_info_fn=async_device_device_info_fn, name_fn=lambda port: f"{port.name} Power Cycle", diff --git a/tests/components/unifi/test_button.py b/tests/components/unifi/test_button.py index b58d01e7724..b7bf19aedc2 100644 --- a/tests/components/unifi/test_button.py +++ b/tests/components/unifi/test_button.py @@ -1,9 +1,11 @@ """UniFi Network button platform tests.""" +from copy import deepcopy from datetime import timedelta from typing import Any from unittest.mock import patch +from aiounifi.models.message import MessageKey import pytest from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, ButtonDeviceClass @@ -319,3 +321,33 @@ async def test_wlan_button_entities( request_data, call, ) + + +@pytest.mark.parametrize("device_payload", [DEVICE_POWER_CYCLE_POE]) +@pytest.mark.usefixtures("config_entry_setup") +async def test_power_cycle_availability( + hass: HomeAssistant, + mock_websocket_message, + device_payload: dict[str, Any], +) -> None: + """Verify that disabling PoE marks entity as unavailable.""" + entity_id = "button.switch_port_1_power_cycle" + + assert hass.states.get(entity_id).state != STATE_UNAVAILABLE + + # PoE disabled + + device_1 = deepcopy(device_payload[0]) + device_1["port_table"][0]["poe_enable"] = False + mock_websocket_message(message=MessageKey.DEVICE, data=device_1) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE + + # PoE enabled + device_1 = deepcopy(device_payload[0]) + device_1["port_table"][0]["poe_enable"] = True + mock_websocket_message(message=MessageKey.DEVICE, data=device_1) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state != STATE_UNAVAILABLE From 002db3c3e93ef7ff72ae88d102e9cf5e74b0ef12 Mon Sep 17 00:00:00 2001 From: Harry Martland Date: Fri, 19 Jul 2024 12:47:28 +0100 Subject: [PATCH 0094/1635] Fix hive not updating when boosting (#122042) * fixes issue where hive does not update when boosting * formats files --- homeassistant/components/hive/climate.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/hive/climate.py b/homeassistant/components/hive/climate.py index cb1cc15a5bf..8f6db11babe 100644 --- a/homeassistant/components/hive/climate.py +++ b/homeassistant/components/hive/climate.py @@ -142,10 +142,10 @@ class HiveClimateEntity(HiveEntity, ClimateEntity): self.device = await self.hive.heating.getClimate(self.device) self._attr_available = self.device["deviceData"].get("online") if self._attr_available: - self._attr_hvac_mode = HIVE_TO_HASS_STATE[self.device["status"]["mode"]] - self._attr_hvac_action = HIVE_TO_HASS_HVAC_ACTION[ + self._attr_hvac_mode = HIVE_TO_HASS_STATE.get(self.device["status"]["mode"]) + self._attr_hvac_action = HIVE_TO_HASS_HVAC_ACTION.get( self.device["status"]["action"] - ] + ) self._attr_current_temperature = self.device["status"][ "current_temperature" ] @@ -154,5 +154,6 @@ class HiveClimateEntity(HiveEntity, ClimateEntity): self._attr_max_temp = self.device["max_temp"] if self.device["status"]["boost"] == "ON": self._attr_preset_mode = PRESET_BOOST + self._attr_hvac_mode = HVACMode.HEAT else: self._attr_preset_mode = PRESET_NONE From 977a55e3b84d8559e304b5fbb9b7e540a220d1e4 Mon Sep 17 00:00:00 2001 From: "Steven B." <51370195+sdb9696@users.noreply.github.com> Date: Wed, 17 Jul 2024 20:07:53 +0100 Subject: [PATCH 0095/1635] Update tplink device config during reauth flow (#122089) --- .../components/tplink/config_flow.py | 51 ++++-- tests/components/tplink/__init__.py | 46 +++-- tests/components/tplink/conftest.py | 23 ++- tests/components/tplink/test_config_flow.py | 162 +++++++++++++----- tests/components/tplink/test_init.py | 36 ++-- 5 files changed, 219 insertions(+), 99 deletions(-) diff --git a/homeassistant/components/tplink/config_flow.py b/homeassistant/components/tplink/config_flow.py index 5608ccfa72f..a0f0ca6eb76 100644 --- a/homeassistant/components/tplink/config_flow.py +++ b/homeassistant/components/tplink/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Mapping +import logging from typing import Any from kasa import ( @@ -52,6 +53,8 @@ from .const import ( DOMAIN, ) +_LOGGER = logging.getLogger(__name__) + STEP_AUTH_DATA_SCHEMA = vol.Schema( {vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str} ) @@ -88,15 +91,10 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): ) @callback - def _update_config_if_entry_in_setup_error( + def _get_config_updates( self, entry: ConfigEntry, host: str, config: dict - ) -> ConfigFlowResult | None: - """If discovery encounters a device that is in SETUP_ERROR or SETUP_RETRY update the device config.""" - if entry.state not in ( - ConfigEntryState.SETUP_ERROR, - ConfigEntryState.SETUP_RETRY, - ): - return None + ) -> dict | None: + """Return updates if the host or device config has changed.""" entry_data = entry.data entry_config_dict = entry_data.get(CONF_DEVICE_CONFIG) if entry_config_dict == config and entry_data[CONF_HOST] == host: @@ -110,11 +108,31 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): != config.get(CONF_CONNECTION_TYPE) ): updates.pop(CONF_CREDENTIALS_HASH, None) - return self.async_update_reload_and_abort( - entry, - data=updates, - reason="already_configured", - ) + _LOGGER.debug( + "Connection type changed for %s from %s to: %s", + host, + entry_config_dict.get(CONF_CONNECTION_TYPE), + config.get(CONF_CONNECTION_TYPE), + ) + return updates + + @callback + def _update_config_if_entry_in_setup_error( + self, entry: ConfigEntry, host: str, config: dict + ) -> ConfigFlowResult | None: + """If discovery encounters a device that is in SETUP_ERROR or SETUP_RETRY update the device config.""" + if entry.state not in ( + ConfigEntryState.SETUP_ERROR, + ConfigEntryState.SETUP_RETRY, + ): + return None + if updates := self._get_config_updates(entry, host, config): + return self.async_update_reload_and_abort( + entry, + data=updates, + reason="already_configured", + ) + return None async def _async_handle_discovery( self, host: str, formatted_mac: str, config: dict | None = None @@ -454,7 +472,7 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): password = user_input[CONF_PASSWORD] credentials = Credentials(username, password) try: - await self._async_try_discover_and_update( + device = await self._async_try_discover_and_update( host, credentials=credentials, raise_on_progress=True, @@ -467,6 +485,11 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): placeholders["error"] = str(ex) else: await set_credentials(self.hass, username, password) + config = device.config.to_dict(exclude_credentials=True) + if updates := self._get_config_updates(reauth_entry, host, config): + self.hass.config_entries.async_update_entry( + reauth_entry, data=updates + ) self.hass.async_create_task( self._async_reload_requires_auth_entries(), eager_start=False ) diff --git a/tests/components/tplink/__init__.py b/tests/components/tplink/__init__.py index b554cf07015..c51a451c847 100644 --- a/tests/components/tplink/__init__.py +++ b/tests/components/tplink/__init__.py @@ -57,25 +57,26 @@ CREDENTIALS_HASH_LEGACY = "" DEVICE_CONFIG_LEGACY = DeviceConfig(IP_ADDRESS) DEVICE_CONFIG_DICT_LEGACY = DEVICE_CONFIG_LEGACY.to_dict(exclude_credentials=True) CREDENTIALS = Credentials("foo", "bar") -CREDENTIALS_HASH_AUTH = "abcdefghijklmnopqrstuv==" -DEVICE_CONFIG_AUTH = DeviceConfig( +CREDENTIALS_HASH_AES = "AES/abcdefghijklmnopqrstuvabcdefghijklmnopqrstuv==" +CREDENTIALS_HASH_KLAP = "KLAP/abcdefghijklmnopqrstuv==" +DEVICE_CONFIG_KLAP = DeviceConfig( IP_ADDRESS, credentials=CREDENTIALS, connection_type=DeviceConnectionParameters( - DeviceFamily.IotSmartPlugSwitch, DeviceEncryptionType.Klap + DeviceFamily.SmartTapoPlug, DeviceEncryptionType.Klap ), uses_http=True, ) -DEVICE_CONFIG_AUTH2 = DeviceConfig( +DEVICE_CONFIG_AES = DeviceConfig( IP_ADDRESS2, credentials=CREDENTIALS, connection_type=DeviceConnectionParameters( - DeviceFamily.IotSmartPlugSwitch, DeviceEncryptionType.Klap + DeviceFamily.SmartTapoPlug, DeviceEncryptionType.Aes ), uses_http=True, ) -DEVICE_CONFIG_DICT_AUTH = DEVICE_CONFIG_AUTH.to_dict(exclude_credentials=True) -DEVICE_CONFIG_DICT_AUTH2 = DEVICE_CONFIG_AUTH2.to_dict(exclude_credentials=True) +DEVICE_CONFIG_DICT_KLAP = DEVICE_CONFIG_KLAP.to_dict(exclude_credentials=True) +DEVICE_CONFIG_DICT_AES = DEVICE_CONFIG_AES.to_dict(exclude_credentials=True) CREATE_ENTRY_DATA_LEGACY = { CONF_HOST: IP_ADDRESS, @@ -84,24 +85,28 @@ CREATE_ENTRY_DATA_LEGACY = { CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_LEGACY, } -CREATE_ENTRY_DATA_AUTH = { +CREATE_ENTRY_DATA_KLAP = { CONF_HOST: IP_ADDRESS, CONF_ALIAS: ALIAS, CONF_MODEL: MODEL, - CONF_CREDENTIALS_HASH: CREDENTIALS_HASH_AUTH, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH, + CONF_CREDENTIALS_HASH: CREDENTIALS_HASH_KLAP, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP, } -CREATE_ENTRY_DATA_AUTH2 = { +CREATE_ENTRY_DATA_AES = { CONF_HOST: IP_ADDRESS2, CONF_ALIAS: ALIAS, CONF_MODEL: MODEL, - CONF_CREDENTIALS_HASH: CREDENTIALS_HASH_AUTH, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH2, + CONF_CREDENTIALS_HASH: CREDENTIALS_HASH_AES, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AES, } -NEW_CONNECTION_TYPE = DeviceConnectionParameters( - DeviceFamily.IotSmartPlugSwitch, DeviceEncryptionType.Aes +CONNECTION_TYPE_KLAP = DeviceConnectionParameters( + DeviceFamily.SmartTapoPlug, DeviceEncryptionType.Klap ) -NEW_CONNECTION_TYPE_DICT = NEW_CONNECTION_TYPE.to_dict() +CONNECTION_TYPE_KLAP_DICT = CONNECTION_TYPE_KLAP.to_dict() +CONNECTION_TYPE_AES = DeviceConnectionParameters( + DeviceFamily.SmartTapoPlug, DeviceEncryptionType.Aes +) +CONNECTION_TYPE_AES_DICT = CONNECTION_TYPE_AES.to_dict() def _load_feature_fixtures(): @@ -187,7 +192,7 @@ def _mocked_device( device_id=DEVICE_ID, alias=ALIAS, model=MODEL, - ip_address=IP_ADDRESS, + ip_address: str | None = None, modules: list[str] | None = None, children: list[Device] | None = None, features: list[str | Feature] | None = None, @@ -202,12 +207,17 @@ def _mocked_device( device.mac = mac device.alias = alias device.model = model - device.host = ip_address device.device_id = device_id device.hw_info = {"sw_ver": "1.0.0", "hw_ver": "1.0.0"} device.modules = {} device.features = {} + if not ip_address: + ip_address = IP_ADDRESS + else: + device_config.host = ip_address + device.host = ip_address + if modules: device.modules = { module_name: MODULE_TO_MOCK_GEN[module_name](device) diff --git a/tests/components/tplink/conftest.py b/tests/components/tplink/conftest.py index f8d933de71e..b1256f437e7 100644 --- a/tests/components/tplink/conftest.py +++ b/tests/components/tplink/conftest.py @@ -11,8 +11,10 @@ from homeassistant.core import HomeAssistant from . import ( CREATE_ENTRY_DATA_LEGACY, - CREDENTIALS_HASH_AUTH, - DEVICE_CONFIG_AUTH, + CREDENTIALS_HASH_AES, + CREDENTIALS_HASH_KLAP, + DEVICE_CONFIG_AES, + DEVICE_CONFIG_KLAP, IP_ADDRESS, IP_ADDRESS2, MAC_ADDRESS, @@ -32,14 +34,14 @@ def mock_discovery(): discover_single=DEFAULT, ) as mock_discovery: device = _mocked_device( - device_config=copy.deepcopy(DEVICE_CONFIG_AUTH), - credentials_hash=CREDENTIALS_HASH_AUTH, + device_config=copy.deepcopy(DEVICE_CONFIG_KLAP), + credentials_hash=CREDENTIALS_HASH_KLAP, alias=None, ) devices = { "127.0.0.1": _mocked_device( - device_config=copy.deepcopy(DEVICE_CONFIG_AUTH), - credentials_hash=CREDENTIALS_HASH_AUTH, + device_config=copy.deepcopy(DEVICE_CONFIG_KLAP), + credentials_hash=CREDENTIALS_HASH_KLAP, alias=None, ) } @@ -55,12 +57,15 @@ def mock_connect(): with patch("homeassistant.components.tplink.Device.connect") as mock_connect: devices = { IP_ADDRESS: _mocked_device( - device_config=DEVICE_CONFIG_AUTH, credentials_hash=CREDENTIALS_HASH_AUTH + device_config=DEVICE_CONFIG_KLAP, + credentials_hash=CREDENTIALS_HASH_KLAP, + ip_address=IP_ADDRESS, ), IP_ADDRESS2: _mocked_device( - device_config=DEVICE_CONFIG_AUTH, - credentials_hash=CREDENTIALS_HASH_AUTH, + device_config=DEVICE_CONFIG_AES, + credentials_hash=CREDENTIALS_HASH_AES, mac=MAC_ADDRESS2, + ip_address=IP_ADDRESS2, ), } diff --git a/tests/components/tplink/test_config_flow.py b/tests/components/tplink/test_config_flow.py index e9ae7957520..ddd67f249e6 100644 --- a/tests/components/tplink/test_config_flow.py +++ b/tests/components/tplink/test_config_flow.py @@ -1,5 +1,6 @@ """Test the tplink config flow.""" +import logging from unittest.mock import AsyncMock, patch from kasa import TimeoutError @@ -11,6 +12,7 @@ from homeassistant.components.tplink import ( DOMAIN, AuthenticationError, Credentials, + Device, DeviceConfig, KasaException, ) @@ -33,19 +35,21 @@ from homeassistant.data_entry_flow import FlowResultType from . import ( ALIAS, - CREATE_ENTRY_DATA_AUTH, - CREATE_ENTRY_DATA_AUTH2, + CONNECTION_TYPE_KLAP_DICT, + CREATE_ENTRY_DATA_AES, + CREATE_ENTRY_DATA_KLAP, CREATE_ENTRY_DATA_LEGACY, - CREDENTIALS_HASH_AUTH, + CREDENTIALS_HASH_AES, + CREDENTIALS_HASH_KLAP, DEFAULT_ENTRY_TITLE, - DEVICE_CONFIG_DICT_AUTH, + DEVICE_CONFIG_DICT_AES, + DEVICE_CONFIG_DICT_KLAP, DEVICE_CONFIG_DICT_LEGACY, DHCP_FORMATTED_MAC_ADDRESS, IP_ADDRESS, MAC_ADDRESS, MAC_ADDRESS2, MODULE, - NEW_CONNECTION_TYPE_DICT, _mocked_device, _patch_connect, _patch_discovery, @@ -135,7 +139,7 @@ async def test_discovery_auth( CONF_HOST: IP_ADDRESS, CONF_MAC: MAC_ADDRESS, CONF_ALIAS: ALIAS, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP, }, ) await hass.async_block_till_done() @@ -154,7 +158,7 @@ async def test_discovery_auth( assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == DEFAULT_ENTRY_TITLE - assert result2["data"] == CREATE_ENTRY_DATA_AUTH + assert result2["data"] == CREATE_ENTRY_DATA_KLAP assert result2["context"]["unique_id"] == MAC_ADDRESS @@ -187,7 +191,7 @@ async def test_discovery_auth_errors( CONF_HOST: IP_ADDRESS, CONF_MAC: MAC_ADDRESS, CONF_ALIAS: ALIAS, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP, }, ) await hass.async_block_till_done() @@ -218,7 +222,7 @@ async def test_discovery_auth_errors( }, ) assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["data"] == CREATE_ENTRY_DATA_AUTH + assert result3["data"] == CREATE_ENTRY_DATA_KLAP assert result3["context"]["unique_id"] == MAC_ADDRESS @@ -238,7 +242,7 @@ async def test_discovery_new_credentials( CONF_HOST: IP_ADDRESS, CONF_MAC: MAC_ADDRESS, CONF_ALIAS: ALIAS, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP, }, ) await hass.async_block_till_done() @@ -267,7 +271,7 @@ async def test_discovery_new_credentials( {}, ) assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["data"] == CREATE_ENTRY_DATA_AUTH + assert result3["data"] == CREATE_ENTRY_DATA_KLAP assert result3["context"]["unique_id"] == MAC_ADDRESS @@ -290,7 +294,7 @@ async def test_discovery_new_credentials_invalid( CONF_HOST: IP_ADDRESS, CONF_MAC: MAC_ADDRESS, CONF_ALIAS: ALIAS, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP, }, ) await hass.async_block_till_done() @@ -323,7 +327,7 @@ async def test_discovery_new_credentials_invalid( }, ) assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["data"] == CREATE_ENTRY_DATA_AUTH + assert result3["data"] == CREATE_ENTRY_DATA_KLAP assert result3["context"]["unique_id"] == MAC_ADDRESS @@ -543,7 +547,7 @@ async def test_manual_auth( await hass.async_block_till_done() assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == DEFAULT_ENTRY_TITLE - assert result3["data"] == CREATE_ENTRY_DATA_AUTH + assert result3["data"] == CREATE_ENTRY_DATA_KLAP assert result3["context"]["unique_id"] == MAC_ADDRESS @@ -607,7 +611,7 @@ async def test_manual_auth_errors( }, ) assert result4["type"] is FlowResultType.CREATE_ENTRY - assert result4["data"] == CREATE_ENTRY_DATA_AUTH + assert result4["data"] == CREATE_ENTRY_DATA_KLAP assert result4["context"]["unique_id"] == MAC_ADDRESS await hass.async_block_till_done() @@ -791,16 +795,16 @@ async def test_integration_discovery_with_ip_change( CONF_HOST: "127.0.0.2", CONF_MAC: MAC_ADDRESS, CONF_ALIAS: ALIAS, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP, }, ) await hass.async_block_till_done() assert discovery_result["type"] is FlowResultType.ABORT assert discovery_result["reason"] == "already_configured" - assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AUTH + assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_KLAP assert mock_config_entry.data[CONF_HOST] == "127.0.0.2" - config = DeviceConfig.from_dict(DEVICE_CONFIG_DICT_AUTH) + config = DeviceConfig.from_dict(DEVICE_CONFIG_DICT_KLAP) mock_connect["connect"].reset_mock(side_effect=True) bulb = _mocked_device( @@ -832,8 +836,8 @@ async def test_integration_discovery_with_connection_change( mock_config_entry = MockConfigEntry( title="TPLink", domain=DOMAIN, - data=CREATE_ENTRY_DATA_AUTH, - unique_id=MAC_ADDRESS, + data=CREATE_ENTRY_DATA_AES, + unique_id=MAC_ADDRESS2, ) mock_config_entry.add_to_hass(hass) with patch("homeassistant.components.tplink.Discover.discover", return_value={}): @@ -849,13 +853,15 @@ async def test_integration_discovery_with_connection_change( ) == 0 ) - assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AUTH - assert mock_config_entry.data[CONF_DEVICE_CONFIG].get(CONF_HOST) == "127.0.0.1" - assert mock_config_entry.data[CONF_CREDENTIALS_HASH] == CREDENTIALS_HASH_AUTH + assert mock_config_entry.data[CONF_HOST] == "127.0.0.2" + assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AES + assert mock_config_entry.data[CONF_DEVICE_CONFIG].get(CONF_HOST) == "127.0.0.2" + assert mock_config_entry.data[CONF_CREDENTIALS_HASH] == CREDENTIALS_HASH_AES NEW_DEVICE_CONFIG = { - **DEVICE_CONFIG_DICT_AUTH, - CONF_CONNECTION_TYPE: NEW_CONNECTION_TYPE_DICT, + **DEVICE_CONFIG_DICT_KLAP, + CONF_CONNECTION_TYPE: CONNECTION_TYPE_KLAP_DICT, + CONF_HOST: "127.0.0.2", } config = DeviceConfig.from_dict(NEW_DEVICE_CONFIG) # Reset the connect mock so when the config flow reloads the entry it succeeds @@ -870,8 +876,8 @@ async def test_integration_discovery_with_connection_change( DOMAIN, context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, data={ - CONF_HOST: "127.0.0.1", - CONF_MAC: MAC_ADDRESS, + CONF_HOST: "127.0.0.2", + CONF_MAC: MAC_ADDRESS2, CONF_ALIAS: ALIAS, CONF_DEVICE_CONFIG: NEW_DEVICE_CONFIG, }, @@ -880,8 +886,8 @@ async def test_integration_discovery_with_connection_change( assert discovery_result["type"] is FlowResultType.ABORT assert discovery_result["reason"] == "already_configured" assert mock_config_entry.data[CONF_DEVICE_CONFIG] == NEW_DEVICE_CONFIG - assert mock_config_entry.data[CONF_HOST] == "127.0.0.1" - assert CREDENTIALS_HASH_AUTH not in mock_config_entry.data + assert mock_config_entry.data[CONF_HOST] == "127.0.0.2" + assert CREDENTIALS_HASH_AES not in mock_config_entry.data assert mock_config_entry.state is ConfigEntryState.LOADED @@ -953,6 +959,77 @@ async def test_reauth( await hass.async_block_till_done() +async def test_reauth_update_with_encryption_change( + hass: HomeAssistant, + mock_discovery: AsyncMock, + mock_connect: AsyncMock, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test reauth flow.""" + orig_side_effect = mock_connect["connect"].side_effect + mock_connect["connect"].side_effect = AuthenticationError() + mock_config_entry = MockConfigEntry( + title="TPLink", + domain=DOMAIN, + data={**CREATE_ENTRY_DATA_AES}, + unique_id=MAC_ADDRESS2, + ) + mock_config_entry.add_to_hass(hass) + assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AES + assert mock_config_entry.data[CONF_CREDENTIALS_HASH] == CREDENTIALS_HASH_AES + + with patch("homeassistant.components.tplink.Discover.discover", return_value={}): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + caplog.set_level(logging.DEBUG) + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + [result] = flows + assert result["step_id"] == "reauth_confirm" + assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AES + assert CONF_CREDENTIALS_HASH not in mock_config_entry.data + + new_config = DeviceConfig( + "127.0.0.2", + credentials=None, + connection_type=Device.ConnectionParameters( + Device.Family.SmartTapoPlug, Device.EncryptionType.Klap + ), + uses_http=True, + ) + mock_discovery["mock_device"].host = "127.0.0.2" + mock_discovery["mock_device"].config = new_config + mock_discovery["mock_device"].credentials_hash = None + mock_connect["mock_devices"]["127.0.0.2"].config = new_config + mock_connect["mock_devices"]["127.0.0.2"].credentials_hash = CREDENTIALS_HASH_KLAP + + mock_connect["connect"].side_effect = orig_side_effect + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_USERNAME: "fake_username", + CONF_PASSWORD: "fake_password", + }, + ) + await hass.async_block_till_done(wait_background_tasks=True) + assert "Connection type changed for 127.0.0.2" in caplog.text + credentials = Credentials("fake_username", "fake_password") + mock_discovery["discover_single"].assert_called_once_with( + "127.0.0.2", credentials=credentials + ) + mock_discovery["mock_device"].update.assert_called_once_with() + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reauth_successful" + assert mock_config_entry.state is ConfigEntryState.LOADED + assert mock_config_entry.data[CONF_DEVICE_CONFIG] == { + **DEVICE_CONFIG_DICT_KLAP, + CONF_HOST: "127.0.0.2", + } + assert mock_config_entry.data[CONF_CREDENTIALS_HASH] == CREDENTIALS_HASH_KLAP + + async def test_reauth_update_from_discovery( hass: HomeAssistant, mock_config_entry: MockConfigEntry, @@ -981,13 +1058,13 @@ async def test_reauth_update_from_discovery( CONF_HOST: IP_ADDRESS, CONF_MAC: MAC_ADDRESS, CONF_ALIAS: ALIAS, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP, }, ) await hass.async_block_till_done() assert discovery_result["type"] is FlowResultType.ABORT assert discovery_result["reason"] == "already_configured" - assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AUTH + assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_KLAP async def test_reauth_update_from_discovery_with_ip_change( @@ -1017,13 +1094,13 @@ async def test_reauth_update_from_discovery_with_ip_change( CONF_HOST: "127.0.0.2", CONF_MAC: MAC_ADDRESS, CONF_ALIAS: ALIAS, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP, }, ) await hass.async_block_till_done() assert discovery_result["type"] is FlowResultType.ABORT assert discovery_result["reason"] == "already_configured" - assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AUTH + assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_KLAP assert mock_config_entry.data[CONF_HOST] == "127.0.0.2" @@ -1040,7 +1117,7 @@ async def test_reauth_no_update_if_config_and_ip_the_same( mock_config_entry, data={ **mock_config_entry.data, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP, }, ) await hass.config_entries.async_setup(mock_config_entry.entry_id) @@ -1051,7 +1128,7 @@ async def test_reauth_no_update_if_config_and_ip_the_same( assert len(flows) == 1 [result] = flows assert result["step_id"] == "reauth_confirm" - assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AUTH + assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_KLAP discovery_result = await hass.config_entries.flow.async_init( DOMAIN, @@ -1060,13 +1137,13 @@ async def test_reauth_no_update_if_config_and_ip_the_same( CONF_HOST: IP_ADDRESS, CONF_MAC: MAC_ADDRESS, CONF_ALIAS: ALIAS, - CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_AUTH, + CONF_DEVICE_CONFIG: DEVICE_CONFIG_DICT_KLAP, }, ) await hass.async_block_till_done() assert discovery_result["type"] is FlowResultType.ABORT assert discovery_result["reason"] == "already_configured" - assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_AUTH + assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_KLAP assert mock_config_entry.data[CONF_HOST] == IP_ADDRESS @@ -1214,15 +1291,20 @@ async def test_discovery_timeout_connect( async def test_reauth_update_other_flows( hass: HomeAssistant, - mock_config_entry: MockConfigEntry, mock_discovery: AsyncMock, mock_connect: AsyncMock, ) -> None: """Test reauth updates other reauth flows.""" + mock_config_entry = MockConfigEntry( + title="TPLink", + domain=DOMAIN, + data={**CREATE_ENTRY_DATA_KLAP}, + unique_id=MAC_ADDRESS, + ) mock_config_entry2 = MockConfigEntry( title="TPLink", domain=DOMAIN, - data={**CREATE_ENTRY_DATA_AUTH2}, + data={**CREATE_ENTRY_DATA_AES}, unique_id=MAC_ADDRESS2, ) default_side_effect = mock_connect["connect"].side_effect @@ -1244,7 +1326,7 @@ async def test_reauth_update_other_flows( flows_by_entry_id = {flow["context"]["entry_id"]: flow for flow in flows} result = flows_by_entry_id[mock_config_entry.entry_id] assert result["step_id"] == "reauth_confirm" - assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_LEGACY + assert mock_config_entry.data[CONF_DEVICE_CONFIG] == DEVICE_CONFIG_DICT_KLAP result2 = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ diff --git a/tests/components/tplink/test_init.py b/tests/components/tplink/test_init.py index c5c5e2ce6db..5b3cf648b6e 100644 --- a/tests/components/tplink/test_init.py +++ b/tests/components/tplink/test_init.py @@ -33,9 +33,9 @@ from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util from . import ( - CREATE_ENTRY_DATA_AUTH, + CREATE_ENTRY_DATA_KLAP, CREATE_ENTRY_DATA_LEGACY, - DEVICE_CONFIG_AUTH, + DEVICE_CONFIG_KLAP, DEVICE_ID, DEVICE_ID_MAC, IP_ADDRESS, @@ -178,7 +178,7 @@ async def test_config_entry_device_config( mock_config_entry = MockConfigEntry( title="TPLink", domain=DOMAIN, - data={**CREATE_ENTRY_DATA_AUTH}, + data={**CREATE_ENTRY_DATA_KLAP}, unique_id=MAC_ADDRESS, ) mock_config_entry.add_to_hass(hass) @@ -197,7 +197,7 @@ async def test_config_entry_with_stored_credentials( mock_config_entry = MockConfigEntry( title="TPLink", domain=DOMAIN, - data={**CREATE_ENTRY_DATA_AUTH}, + data={**CREATE_ENTRY_DATA_KLAP}, unique_id=MAC_ADDRESS, ) auth = { @@ -210,7 +210,7 @@ async def test_config_entry_with_stored_credentials( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() assert mock_config_entry.state is ConfigEntryState.LOADED - config = DEVICE_CONFIG_AUTH + config = DEVICE_CONFIG_KLAP assert config.credentials != stored_credentials config.credentials = stored_credentials mock_connect["connect"].assert_called_once_with(config=config) @@ -223,7 +223,7 @@ async def test_config_entry_device_config_invalid( caplog: pytest.LogCaptureFixture, ) -> None: """Test that an invalid device config logs an error and loads the config entry.""" - entry_data = copy.deepcopy(CREATE_ENTRY_DATA_AUTH) + entry_data = copy.deepcopy(CREATE_ENTRY_DATA_KLAP) entry_data[CONF_DEVICE_CONFIG] = {"foo": "bar"} mock_config_entry = MockConfigEntry( title="TPLink", @@ -263,7 +263,7 @@ async def test_config_entry_errors( mock_config_entry = MockConfigEntry( title="TPLink", domain=DOMAIN, - data={**CREATE_ENTRY_DATA_AUTH}, + data={**CREATE_ENTRY_DATA_KLAP}, unique_id=MAC_ADDRESS, ) mock_config_entry.add_to_hass(hass) @@ -520,11 +520,11 @@ async def test_move_credentials_hash( from the device. """ device_config = { - **DEVICE_CONFIG_AUTH.to_dict( + **DEVICE_CONFIG_KLAP.to_dict( exclude_credentials=True, credentials_hash="theHash" ) } - entry_data = {**CREATE_ENTRY_DATA_AUTH, CONF_DEVICE_CONFIG: device_config} + entry_data = {**CREATE_ENTRY_DATA_KLAP, CONF_DEVICE_CONFIG: device_config} entry = MockConfigEntry( title="TPLink", @@ -567,11 +567,11 @@ async def test_move_credentials_hash_auth_error( in async_setup_entry. """ device_config = { - **DEVICE_CONFIG_AUTH.to_dict( + **DEVICE_CONFIG_KLAP.to_dict( exclude_credentials=True, credentials_hash="theHash" ) } - entry_data = {**CREATE_ENTRY_DATA_AUTH, CONF_DEVICE_CONFIG: device_config} + entry_data = {**CREATE_ENTRY_DATA_KLAP, CONF_DEVICE_CONFIG: device_config} entry = MockConfigEntry( title="TPLink", @@ -610,11 +610,11 @@ async def test_move_credentials_hash_other_error( at the end of the test. """ device_config = { - **DEVICE_CONFIG_AUTH.to_dict( + **DEVICE_CONFIG_KLAP.to_dict( exclude_credentials=True, credentials_hash="theHash" ) } - entry_data = {**CREATE_ENTRY_DATA_AUTH, CONF_DEVICE_CONFIG: device_config} + entry_data = {**CREATE_ENTRY_DATA_KLAP, CONF_DEVICE_CONFIG: device_config} entry = MockConfigEntry( title="TPLink", @@ -647,9 +647,9 @@ async def test_credentials_hash( hass: HomeAssistant, ) -> None: """Test credentials_hash used to call connect.""" - device_config = {**DEVICE_CONFIG_AUTH.to_dict(exclude_credentials=True)} + device_config = {**DEVICE_CONFIG_KLAP.to_dict(exclude_credentials=True)} entry_data = { - **CREATE_ENTRY_DATA_AUTH, + **CREATE_ENTRY_DATA_KLAP, CONF_DEVICE_CONFIG: device_config, CONF_CREDENTIALS_HASH: "theHash", } @@ -684,9 +684,9 @@ async def test_credentials_hash_auth_error( hass: HomeAssistant, ) -> None: """Test credentials_hash is deleted after an auth failure.""" - device_config = {**DEVICE_CONFIG_AUTH.to_dict(exclude_credentials=True)} + device_config = {**DEVICE_CONFIG_KLAP.to_dict(exclude_credentials=True)} entry_data = { - **CREATE_ENTRY_DATA_AUTH, + **CREATE_ENTRY_DATA_KLAP, CONF_DEVICE_CONFIG: device_config, CONF_CREDENTIALS_HASH: "theHash", } @@ -710,7 +710,7 @@ async def test_credentials_hash_auth_error( await hass.async_block_till_done() expected_config = DeviceConfig.from_dict( - DEVICE_CONFIG_AUTH.to_dict(exclude_credentials=True, credentials_hash="theHash") + DEVICE_CONFIG_KLAP.to_dict(exclude_credentials=True, credentials_hash="theHash") ) connect_mock.assert_called_with(config=expected_config) assert entry.state is ConfigEntryState.SETUP_ERROR From a3a99cc631532d1af884dea2ba4b1517d4e78bd9 Mon Sep 17 00:00:00 2001 From: Shay Levy Date: Thu, 18 Jul 2024 23:27:03 +0300 Subject: [PATCH 0096/1635] Prevent connecting to a Shelly device that is already connected (#122105) --- .../components/shelly/coordinator.py | 3 +++ tests/components/shelly/conftest.py | 1 + tests/components/shelly/test_binary_sensor.py | 1 + tests/components/shelly/test_config_flow.py | 1 + tests/components/shelly/test_coordinator.py | 20 +++++++++++++++++++ tests/components/shelly/test_init.py | 2 ++ tests/components/shelly/test_sensor.py | 2 ++ tests/components/shelly/test_update.py | 1 + 8 files changed, 31 insertions(+) diff --git a/homeassistant/components/shelly/coordinator.py b/homeassistant/components/shelly/coordinator.py index c1be4577bf6..469223a5857 100644 --- a/homeassistant/components/shelly/coordinator.py +++ b/homeassistant/components/shelly/coordinator.py @@ -668,6 +668,9 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]): """Handle device update.""" LOGGER.debug("Shelly %s handle update, type: %s", self.name, update_type) if update_type is RpcUpdateType.ONLINE: + if self.device.connected: + LOGGER.debug("Device %s already connected", self.name) + return self.entry.async_create_background_task( self.hass, self._async_device_connect_task(), diff --git a/tests/components/shelly/conftest.py b/tests/components/shelly/conftest.py index 14c06d3f86a..7caaae8621e 100644 --- a/tests/components/shelly/conftest.py +++ b/tests/components/shelly/conftest.py @@ -359,6 +359,7 @@ def _mock_rpc_device(version: str | None = None): status=MOCK_STATUS_RPC, firmware_version="some fw string", initialized=True, + connected=True, ) type(device).name = PropertyMock(return_value="Test name") return device diff --git a/tests/components/shelly/test_binary_sensor.py b/tests/components/shelly/test_binary_sensor.py index 3bfbf350f7e..dc68b657796 100644 --- a/tests/components/shelly/test_binary_sensor.py +++ b/tests/components/shelly/test_binary_sensor.py @@ -263,6 +263,7 @@ async def test_rpc_sleeping_binary_sensor( ) -> None: """Test RPC online sleeping binary sensor.""" entity_id = f"{BINARY_SENSOR_DOMAIN}.test_name_cloud" + monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) config_entry = await init_integration(hass, 2, sleep_period=1000) diff --git a/tests/components/shelly/test_config_flow.py b/tests/components/shelly/test_config_flow.py index a26c6eac405..a3040fc2eb8 100644 --- a/tests/components/shelly/test_config_flow.py +++ b/tests/components/shelly/test_config_flow.py @@ -1114,6 +1114,7 @@ async def test_zeroconf_sleeping_device_not_triggers_refresh( caplog: pytest.LogCaptureFixture, ) -> None: """Test zeroconf discovery does not triggers refresh for sleeping device.""" + monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) entry = MockConfigEntry( domain="shelly", diff --git a/tests/components/shelly/test_coordinator.py b/tests/components/shelly/test_coordinator.py index 35123a2db91..d3494c094f9 100644 --- a/tests/components/shelly/test_coordinator.py +++ b/tests/components/shelly/test_coordinator.py @@ -545,6 +545,7 @@ async def test_rpc_update_entry_sleep_period( monkeypatch: pytest.MonkeyPatch, ) -> None: """Test RPC update entry sleep period.""" + monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 600) entry = await init_integration(hass, 2, sleep_period=600) register_entity( @@ -578,6 +579,7 @@ async def test_rpc_sleeping_device_no_periodic_updates( ) -> None: """Test RPC sleeping device no periodic updates.""" entity_id = f"{SENSOR_DOMAIN}.test_name_temperature" + monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) entry = await init_integration(hass, 2, sleep_period=1000) register_entity( @@ -609,6 +611,7 @@ async def test_rpc_sleeping_device_firmware_unsupported( issue_registry: ir.IssueRegistry, ) -> None: """Test RPC sleeping device firmware not supported.""" + monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setattr(mock_rpc_device, "firmware_supported", False) entry = await init_integration(hass, 2, sleep_period=3600) @@ -912,6 +915,7 @@ async def test_rpc_sleeping_device_connection_error( hass, BINARY_SENSOR_DOMAIN, "test_name_cloud", "cloud-cloud", entry ) mock_restore_cache(hass, [State(entity_id, STATE_ON)]) + monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setattr(mock_rpc_device, "initialized", False) await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() @@ -939,3 +943,19 @@ async def test_rpc_sleeping_device_connection_error( assert "Sleeping device did not update" in caplog.text assert get_entity_state(hass, entity_id) == STATE_UNAVAILABLE + + +async def test_rpc_already_connected( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_rpc_device: Mock, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test RPC ignore connect event if already connected.""" + await init_integration(hass, 2) + + mock_rpc_device.mock_online() + await hass.async_block_till_done(wait_background_tasks=True) + + assert "already connected" in caplog.text + mock_rpc_device.initialize.assert_called_once() diff --git a/tests/components/shelly/test_init.py b/tests/components/shelly/test_init.py index 998d56fc6cc..46698c23c0a 100644 --- a/tests/components/shelly/test_init.py +++ b/tests/components/shelly/test_init.py @@ -279,6 +279,7 @@ async def test_sleeping_rpc_device_online( caplog: pytest.LogCaptureFixture, ) -> None: """Test sleeping RPC device online.""" + monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", device_sleep) entry = await init_integration(hass, 2, sleep_period=entry_sleep) assert "will resume when device is online" in caplog.text @@ -297,6 +298,7 @@ async def test_sleeping_rpc_device_online_new_firmware( caplog: pytest.LogCaptureFixture, ) -> None: """Test sleeping device Gen2 with firmware 1.0.0 or later.""" + monkeypatch.setattr(mock_rpc_device, "connected", False) entry = await init_integration(hass, 2, sleep_period=None) assert "will resume when device is online" in caplog.text diff --git a/tests/components/shelly/test_sensor.py b/tests/components/shelly/test_sensor.py index c62a1f6f6ca..9f510ba8fe9 100644 --- a/tests/components/shelly/test_sensor.py +++ b/tests/components/shelly/test_sensor.py @@ -449,6 +449,7 @@ async def test_rpc_sleeping_sensor( ) -> None: """Test RPC online sleeping sensor.""" entity_id = f"{SENSOR_DOMAIN}.test_name_temperature" + monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) entry = await init_integration(hass, 2, sleep_period=1000) @@ -600,6 +601,7 @@ async def test_rpc_sleeping_update_entity_service( await async_setup_component(hass, "homeassistant", {}) entity_id = f"{SENSOR_DOMAIN}.test_name_temperature" + monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) await init_integration(hass, 2, sleep_period=1000) diff --git a/tests/components/shelly/test_update.py b/tests/components/shelly/test_update.py index 8448c116815..721e86559a3 100644 --- a/tests/components/shelly/test_update.py +++ b/tests/components/shelly/test_update.py @@ -334,6 +334,7 @@ async def test_rpc_sleeping_update( monkeypatch: pytest.MonkeyPatch, ) -> None: """Test RPC sleeping device update entity.""" + monkeypatch.setattr(mock_rpc_device, "connected", False) monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) monkeypatch.setitem(mock_rpc_device.shelly, "ver", "1") monkeypatch.setitem( From c518c4756b6a383bf521773a2b1f09a78279dddc Mon Sep 17 00:00:00 2001 From: "Steven B." <51370195+sdb9696@users.noreply.github.com> Date: Thu, 18 Jul 2024 21:20:10 +0100 Subject: [PATCH 0097/1635] Bump tplink dependency python-kasa to 0.7.0.5 (#122119) --- homeassistant/components/tplink/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/tplink/manifest.json b/homeassistant/components/tplink/manifest.json index a345f64e4b2..f935d019541 100644 --- a/homeassistant/components/tplink/manifest.json +++ b/homeassistant/components/tplink/manifest.json @@ -301,5 +301,5 @@ "iot_class": "local_polling", "loggers": ["kasa"], "quality_scale": "platinum", - "requirements": ["python-kasa[speedups]==0.7.0.4"] + "requirements": ["python-kasa[speedups]==0.7.0.5"] } diff --git a/requirements_all.txt b/requirements_all.txt index a84f565dd64..49db85e79c8 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2275,7 +2275,7 @@ python-join-api==0.0.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.7.0.4 +python-kasa[speedups]==0.7.0.5 # homeassistant.components.lirc # python-lirc==1.2.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c35ec3f64f8..6af377ff73f 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1775,7 +1775,7 @@ python-izone==1.2.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.7.0.4 +python-kasa[speedups]==0.7.0.5 # homeassistant.components.matter python-matter-server==6.2.2 From d0d2fd7918917b47bf348734351ecb5377e3a26d Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Thu, 18 Jul 2024 14:07:31 +0200 Subject: [PATCH 0098/1635] Update yt-dlp to 2024.07.16 (#122124) --- homeassistant/components/media_extractor/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/media_extractor/manifest.json b/homeassistant/components/media_extractor/manifest.json index cfe44f5176b..cd312413db3 100644 --- a/homeassistant/components/media_extractor/manifest.json +++ b/homeassistant/components/media_extractor/manifest.json @@ -8,6 +8,6 @@ "iot_class": "calculated", "loggers": ["yt_dlp"], "quality_scale": "internal", - "requirements": ["yt-dlp==2024.07.01"], + "requirements": ["yt-dlp==2024.07.16"], "single_config_entry": true } diff --git a/requirements_all.txt b/requirements_all.txt index 49db85e79c8..1512acef5e8 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2951,7 +2951,7 @@ youless-api==2.1.2 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp==2024.07.01 +yt-dlp==2024.07.16 # homeassistant.components.zamg zamg==0.3.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 6af377ff73f..a7cbf46c54b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2307,7 +2307,7 @@ youless-api==2.1.2 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp==2024.07.01 +yt-dlp==2024.07.16 # homeassistant.components.zamg zamg==0.3.6 From 1ef4332af6d2cc59cb233100d0e5e6425f5d0dad Mon Sep 17 00:00:00 2001 From: "Mr. Bubbles" Date: Fri, 19 Jul 2024 09:11:29 +0200 Subject: [PATCH 0099/1635] Fix KeyError in config flow of Bring integration (#122136) --- homeassistant/components/bring/config_flow.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/bring/config_flow.py b/homeassistant/components/bring/config_flow.py index 333837a20f2..997342033e4 100644 --- a/homeassistant/components/bring/config_flow.py +++ b/homeassistant/components/bring/config_flow.py @@ -58,7 +58,7 @@ class BringConfigFlow(ConfigFlow, domain=DOMAIN): ): self._abort_if_unique_id_configured() return self.async_create_entry( - title=self.info["name"] or user_input[CONF_EMAIL], data=user_input + title=self.info.get("name") or user_input[CONF_EMAIL], data=user_input ) return self.async_show_form( From a08ffdc8d3103daafd90a8551daa03d94ebf17a1 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Fri, 19 Jul 2024 18:50:21 +0200 Subject: [PATCH 0100/1635] Bump version to 2024.7.3 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index 8587f9e6137..f706b2d1243 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -24,7 +24,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 7 -PATCH_VERSION: Final = "2" +PATCH_VERSION: Final = "3" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index 82c29948e3c..f044551ce1e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.7.2" +version = "2024.7.3" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From 74d10b9824444350a4007085200ea6810a678ee2 Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Mon, 22 Jul 2024 01:11:05 +0200 Subject: [PATCH 0101/1635] Bump `aiotractive` to 0.6.0 (#121155) Co-authored-by: Maciej Bieniek <478555+bieniu@users.noreply.github.com> Co-authored-by: J. Nick Koston --- homeassistant/components/tractive/__init__.py | 2 +- homeassistant/components/tractive/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/tractive/__init__.py b/homeassistant/components/tractive/__init__.py index fd5abe24c06..4f0de7b14cd 100644 --- a/homeassistant/components/tractive/__init__.py +++ b/homeassistant/components/tractive/__init__.py @@ -132,7 +132,7 @@ async def _generate_trackables( trackable = await trackable.details() # Check that the pet has tracker linked. - if not trackable["device_id"]: + if not trackable.get("device_id"): return None if "details" not in trackable: diff --git a/homeassistant/components/tractive/manifest.json b/homeassistant/components/tractive/manifest.json index 75ddf065bd7..903c5347d52 100644 --- a/homeassistant/components/tractive/manifest.json +++ b/homeassistant/components/tractive/manifest.json @@ -7,5 +7,5 @@ "integration_type": "device", "iot_class": "cloud_push", "loggers": ["aiotractive"], - "requirements": ["aiotractive==0.5.6"] + "requirements": ["aiotractive==0.6.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 1512acef5e8..6c87b9bd19a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -386,7 +386,7 @@ aiosyncthing==0.5.1 aiotankerkoenig==0.4.1 # homeassistant.components.tractive -aiotractive==0.5.6 +aiotractive==0.6.0 # homeassistant.components.unifi aiounifi==79 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a7cbf46c54b..a0b1497a185 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -359,7 +359,7 @@ aiosyncthing==0.5.1 aiotankerkoenig==0.4.1 # homeassistant.components.tractive -aiotractive==0.5.6 +aiotractive==0.6.0 # homeassistant.components.unifi aiounifi==79 From cf20e67f1f541a8d28703b4dceb218403c2ba1e4 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Sun, 21 Jul 2024 12:36:06 +0200 Subject: [PATCH 0102/1635] Ensure mqtt subscriptions are in a set (#122201) --- homeassistant/components/mqtt/__init__.py | 2 +- homeassistant/components/mqtt/client.py | 8 ++++---- homeassistant/components/mqtt/models.py | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/mqtt/__init__.py b/homeassistant/components/mqtt/__init__.py index f057dab8bc4..2b3aa21aa22 100644 --- a/homeassistant/components/mqtt/__init__.py +++ b/homeassistant/components/mqtt/__init__.py @@ -251,7 +251,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: mqtt_data.client.async_restore_tracked_subscriptions( mqtt_data.subscriptions_to_restore ) - mqtt_data.subscriptions_to_restore = [] + mqtt_data.subscriptions_to_restore = set() mqtt_data.reload_dispatchers.append( entry.add_update_listener(_async_config_entry_updated) ) diff --git a/homeassistant/components/mqtt/client.py b/homeassistant/components/mqtt/client.py index f65769badfa..d242ec019c6 100644 --- a/homeassistant/components/mqtt/client.py +++ b/homeassistant/components/mqtt/client.py @@ -428,12 +428,12 @@ class MQTT: await self.async_init_client() @property - def subscriptions(self) -> list[Subscription]: + def subscriptions(self) -> set[Subscription]: """Return the tracked subscriptions.""" - return [ + return { *chain.from_iterable(self._simple_subscriptions.values()), *self._wildcard_subscriptions, - ] + } def cleanup(self) -> None: """Clean up listeners.""" @@ -736,7 +736,7 @@ class MQTT: @callback def async_restore_tracked_subscriptions( - self, subscriptions: list[Subscription] + self, subscriptions: set[Subscription] ) -> None: """Restore tracked subscriptions after reload.""" for subscription in subscriptions: diff --git a/homeassistant/components/mqtt/models.py b/homeassistant/components/mqtt/models.py index e5a9a9c44da..c355510a5c2 100644 --- a/homeassistant/components/mqtt/models.py +++ b/homeassistant/components/mqtt/models.py @@ -423,7 +423,7 @@ class MqttData: reload_handlers: dict[str, CALLBACK_TYPE] = field(default_factory=dict) reload_schema: dict[str, VolSchemaType] = field(default_factory=dict) state_write_requests: EntityTopicState = field(default_factory=EntityTopicState) - subscriptions_to_restore: list[Subscription] = field(default_factory=list) + subscriptions_to_restore: set[Subscription] = field(default_factory=set) tags: dict[str, dict[str, MQTTTagScanner]] = field(default_factory=dict) From 683069cb9850e724594306ddd73fee92c713bbbf Mon Sep 17 00:00:00 2001 From: Alexander Schneider Date: Sun, 21 Jul 2024 14:51:10 -0700 Subject: [PATCH 0103/1635] Add Z-Wave discovery schema for ZVIDAR roller shades (#122332) Add discovery schema for ZVIDAR roller shades --- .../components/zwave_js/discovery.py | 9 + tests/components/zwave_js/conftest.py | 14 + .../zwave_js/fixtures/cover_zvidar_state.json | 1120 +++++++++++++++++ tests/components/zwave_js/test_discovery.py | 12 + 4 files changed, 1155 insertions(+) create mode 100644 tests/components/zwave_js/fixtures/cover_zvidar_state.json diff --git a/homeassistant/components/zwave_js/discovery.py b/homeassistant/components/zwave_js/discovery.py index 0b66567c036..6798e644a02 100644 --- a/homeassistant/components/zwave_js/discovery.py +++ b/homeassistant/components/zwave_js/discovery.py @@ -579,6 +579,15 @@ DISCOVERY_SCHEMAS = [ ), entity_registry_enabled_default=False, ), + # ZVIDAR Z-CM-V01 (SmartWings/Deyi WM25L/V Z-Wave Motor for Roller Shade) + ZWaveDiscoverySchema( + platform=Platform.COVER, + hint="shade", + manufacturer_id={0x045A}, + product_id={0x0507}, + product_type={0x0904}, + primary_value=SWITCH_MULTILEVEL_CURRENT_VALUE_SCHEMA, + ), # Vision Security ZL7432 In Wall Dual Relay Switch ZWaveDiscoverySchema( platform=Platform.SWITCH, diff --git a/tests/components/zwave_js/conftest.py b/tests/components/zwave_js/conftest.py index a2a4c217b8b..31c9c5affa5 100644 --- a/tests/components/zwave_js/conftest.py +++ b/tests/components/zwave_js/conftest.py @@ -472,6 +472,12 @@ def iblinds_v3_state_fixture(): return json.loads(load_fixture("zwave_js/cover_iblinds_v3_state.json")) +@pytest.fixture(name="zvidar_state", scope="package") +def zvidar_state_fixture(): + """Load the ZVIDAR node state fixture data.""" + return json.loads(load_fixture("zwave_js/cover_zvidar_state.json")) + + @pytest.fixture(name="qubino_shutter_state", scope="package") def qubino_shutter_state_fixture(): """Load the Qubino Shutter node state fixture data.""" @@ -1081,6 +1087,14 @@ def iblinds_v3_cover_fixture(client, iblinds_v3_state): return node +@pytest.fixture(name="zvidar") +def zvidar_cover_fixture(client, zvidar_state): + """Mock a ZVIDAR window cover node.""" + node = Node(client, copy.deepcopy(zvidar_state)) + client.driver.controller.nodes[node.node_id] = node + return node + + @pytest.fixture(name="qubino_shutter") def qubino_shutter_cover_fixture(client, qubino_shutter_state): """Mock a Qubino flush shutter node.""" diff --git a/tests/components/zwave_js/fixtures/cover_zvidar_state.json b/tests/components/zwave_js/fixtures/cover_zvidar_state.json new file mode 100644 index 00000000000..05118931026 --- /dev/null +++ b/tests/components/zwave_js/fixtures/cover_zvidar_state.json @@ -0,0 +1,1120 @@ +{ + "nodeId": 270, + "index": 0, + "installerIcon": 6656, + "userIcon": 6656, + "status": 4, + "ready": true, + "isListening": false, + "isRouting": false, + "isSecure": true, + "manufacturerId": 1114, + "productId": 1287, + "productType": 2308, + "firmwareVersion": "1.10.0", + "zwavePlusVersion": 2, + "name": "Window Blind Controller", + "location": "**REDACTED**", + "deviceConfig": { + "filename": "/snapshot/build/node_modules/@zwave-js/config/config/devices/0x045a/Z-CM-V01.json", + "isEmbedded": true, + "manufacturer": "ZVIDAR", + "manufacturerId": 1114, + "label": "Z-CM-V01", + "description": "Smart Curtain Motor", + "devices": [ + { + "productType": 2308, + "productId": 1287 + } + ], + "firmwareVersion": { + "min": "0.0", + "max": "255.255" + }, + "preferred": false, + "paramInformation": { + "_map": {} + }, + "compat": { + "removeCCs": {} + } + }, + "label": "Z-CM-V01", + "interviewAttempts": 0, + "isFrequentListening": "1000ms", + "maxDataRate": 100000, + "supportedDataRates": [100000], + "protocolVersion": 3, + "supportsBeaming": false, + "supportsSecurity": true, + "nodeType": 1, + "zwavePlusNodeType": 0, + "zwavePlusRoleType": 7, + "deviceClass": { + "basic": { + "key": 3, + "label": "End Node" + }, + "generic": { + "key": 17, + "label": "Multilevel Switch" + }, + "specific": { + "key": 0, + "label": "Unused" + } + }, + "interviewStage": "Complete", + "deviceDatabaseUrl": "https://devices.zwave-js.io/?jumpTo=0x045a:0x0904:0x0507:1.10.0", + "statistics": { + "commandsTX": 2, + "commandsRX": 1, + "commandsDroppedRX": 1, + "commandsDroppedTX": 0, + "timeoutResponse": 0, + "rtt": 357.6, + "lastSeen": "2024-07-21T16:42:38.086Z", + "rssi": -89, + "lwr": { + "protocolDataRate": 4, + "repeaters": [], + "rssi": -91, + "repeaterRSSI": [] + } + }, + "highestSecurityClass": 1, + "isControllerNode": false, + "keepAwake": false, + "lastSeen": "2024-07-21T16:42:38.086Z", + "protocol": 1, + "values": [ + { + "endpoint": 0, + "commandClass": 38, + "commandClassName": "Multilevel Switch", + "property": "targetValue", + "propertyName": "targetValue", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Target value", + "valueChangeOptions": ["transitionDuration"], + "min": 0, + "max": 99, + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 38, + "commandClassName": "Multilevel Switch", + "property": "duration", + "propertyName": "duration", + "ccVersion": 4, + "metadata": { + "type": "duration", + "readable": true, + "writeable": false, + "label": "Remaining duration", + "stateful": true, + "secret": false + }, + "value": "unknown" + }, + { + "endpoint": 0, + "commandClass": 38, + "commandClassName": "Multilevel Switch", + "property": "currentValue", + "propertyName": "currentValue", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Current value", + "min": 0, + "max": 99, + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 38, + "commandClassName": "Multilevel Switch", + "property": "Up", + "propertyName": "Up", + "ccVersion": 4, + "metadata": { + "type": "boolean", + "readable": false, + "writeable": true, + "label": "Perform a level change (Up)", + "ccSpecific": { + "switchType": 2 + }, + "valueChangeOptions": ["transitionDuration"], + "states": { + "true": "Start", + "false": "Stop" + }, + "stateful": true, + "secret": false + }, + "value": true + }, + { + "endpoint": 0, + "commandClass": 38, + "commandClassName": "Multilevel Switch", + "property": "Down", + "propertyName": "Down", + "ccVersion": 4, + "metadata": { + "type": "boolean", + "readable": false, + "writeable": true, + "label": "Perform a level change (Down)", + "ccSpecific": { + "switchType": 2 + }, + "valueChangeOptions": ["transitionDuration"], + "states": { + "true": "Start", + "false": "Stop" + }, + "stateful": true, + "secret": false + }, + "value": true + }, + { + "endpoint": 0, + "commandClass": 38, + "commandClassName": "Multilevel Switch", + "property": "restorePrevious", + "propertyName": "restorePrevious", + "ccVersion": 4, + "metadata": { + "type": "boolean", + "readable": false, + "writeable": true, + "label": "Restore previous value", + "states": { + "true": "Restore" + }, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 1, + "propertyName": "Hand Button Action", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Hand Button Action", + "default": 1, + "min": 0, + "max": 1, + "states": { + "0": "Close", + "1": "Open" + }, + "valueSize": 1, + "format": 0, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 1 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 2, + "propertyName": "Motor Direction", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Motor Direction", + "default": 1, + "min": 1, + "max": 3, + "states": { + "1": "Forward", + "2": "Opposite", + "3": "Reverse" + }, + "valueSize": 1, + "format": 0, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 1 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 3, + "propertyName": "Manually Set Open Boundary", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Manually Set Open Boundary", + "default": 0, + "min": 0, + "max": 1, + "states": { + "0": "Cancel", + "1": "Start" + }, + "valueSize": 1, + "format": 0, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 4, + "propertyName": "Manually Set Closed Boundary", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Manually Set Closed Boundary", + "default": 0, + "min": 0, + "max": 1, + "states": { + "0": "Cancel", + "1": "Start" + }, + "valueSize": 1, + "format": 0, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 5, + "propertyName": "Control Motor", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Control Motor", + "default": 3, + "min": 1, + "max": 3, + "states": { + "1": "Open (Up)", + "2": "Close (Down)", + "3": "Stop" + }, + "valueSize": 1, + "format": 0, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 3 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 6, + "propertyName": "Calibrate Limit Position", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Calibrate Limit Position", + "default": 1, + "min": 1, + "max": 3, + "states": { + "1": "Upper limit", + "2": "Lower limit", + "3": "Third limit" + }, + "valueSize": 1, + "format": 0, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 1 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 7, + "propertyName": "Delete Limit Position", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Delete Limit Position", + "default": 0, + "min": 0, + "max": 3, + "states": { + "0": "All limits", + "1": "Only upper limit", + "2": "Only lower limit", + "3": "Only third limit" + }, + "valueSize": 1, + "format": 0, + "allowManualEntry": false, + "isFromConfig": true + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 8, + "propertyName": "Low Battery Level Alarm Threshold", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Low Battery Level Alarm Threshold", + "default": 10, + "min": 0, + "max": 50, + "unit": "%", + "valueSize": 1, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 10 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 9, + "propertyName": "Battery Report Interval", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Battery Report Interval", + "default": 3600, + "min": 0, + "max": 2678400, + "unit": "seconds", + "valueSize": 4, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 3600 + }, + { + "endpoint": 0, + "commandClass": 112, + "commandClassName": "Configuration", + "property": 10, + "propertyName": "Battery Change Report Threshold", + "ccVersion": 4, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Battery Change Report Threshold", + "default": 5, + "min": 0, + "max": 50, + "unit": "%", + "valueSize": 1, + "format": 0, + "allowManualEntry": true, + "isFromConfig": true + }, + "value": 5 + }, + { + "endpoint": 0, + "commandClass": 113, + "commandClassName": "Notification", + "property": "Power Management", + "propertyKey": "Mains status", + "propertyName": "Power Management", + "propertyKeyName": "Mains status", + "ccVersion": 8, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Mains status", + "ccSpecific": { + "notificationType": 8 + }, + "min": 0, + "max": 255, + "states": { + "2": "AC mains disconnected", + "3": "AC mains re-connected" + }, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 113, + "commandClassName": "Notification", + "property": "alarmType", + "propertyName": "alarmType", + "ccVersion": 8, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Alarm Type", + "min": 0, + "max": 255, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 113, + "commandClassName": "Notification", + "property": "alarmLevel", + "propertyName": "alarmLevel", + "ccVersion": 8, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Alarm Level", + "min": 0, + "max": 255, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 114, + "commandClassName": "Manufacturer Specific", + "property": "manufacturerId", + "propertyName": "manufacturerId", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Manufacturer ID", + "min": 0, + "max": 65535, + "stateful": true, + "secret": false + }, + "value": 1114 + }, + { + "endpoint": 0, + "commandClass": 114, + "commandClassName": "Manufacturer Specific", + "property": "productType", + "propertyName": "productType", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Product type", + "min": 0, + "max": 65535, + "stateful": true, + "secret": false + }, + "value": 2308 + }, + { + "endpoint": 0, + "commandClass": 114, + "commandClassName": "Manufacturer Specific", + "property": "productId", + "propertyName": "productId", + "ccVersion": 2, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Product ID", + "min": 0, + "max": 65535, + "stateful": true, + "secret": false + }, + "value": 1287 + }, + { + "endpoint": 0, + "commandClass": 128, + "commandClassName": "Battery", + "property": "level", + "propertyName": "level", + "ccVersion": 1, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Battery level", + "min": 0, + "max": 100, + "unit": "%", + "stateful": true, + "secret": false + }, + "value": 86 + }, + { + "endpoint": 0, + "commandClass": 128, + "commandClassName": "Battery", + "property": "isLow", + "propertyName": "isLow", + "ccVersion": 1, + "metadata": { + "type": "boolean", + "readable": true, + "writeable": false, + "label": "Low battery level", + "stateful": true, + "secret": false + }, + "value": false + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "libraryType", + "propertyName": "libraryType", + "ccVersion": 3, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Library type", + "states": { + "0": "Unknown", + "1": "Static Controller", + "2": "Controller", + "3": "Enhanced Slave", + "4": "Slave", + "5": "Installer", + "6": "Routing Slave", + "7": "Bridge Controller", + "8": "Device under Test", + "9": "N/A", + "10": "AV Remote", + "11": "AV Device" + }, + "stateful": true, + "secret": false + }, + "value": 3 + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "protocolVersion", + "propertyName": "protocolVersion", + "ccVersion": 3, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "Z-Wave protocol version", + "stateful": true, + "secret": false + }, + "value": "7.16" + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "firmwareVersions", + "propertyName": "firmwareVersions", + "ccVersion": 3, + "metadata": { + "type": "string[]", + "readable": true, + "writeable": false, + "label": "Z-Wave chip firmware versions", + "stateful": true, + "secret": false + }, + "value": ["1.10"] + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "hardwareVersion", + "propertyName": "hardwareVersion", + "ccVersion": 3, + "metadata": { + "type": "number", + "readable": true, + "writeable": false, + "label": "Z-Wave chip hardware version", + "stateful": true, + "secret": false + }, + "value": 1 + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "sdkVersion", + "propertyName": "sdkVersion", + "ccVersion": 3, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "SDK version", + "stateful": true, + "secret": false + }, + "value": "7.16.3" + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "applicationFrameworkAPIVersion", + "propertyName": "applicationFrameworkAPIVersion", + "ccVersion": 3, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "Z-Wave application framework API version", + "stateful": true, + "secret": false + }, + "value": "10.16.3" + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "applicationFrameworkBuildNumber", + "propertyName": "applicationFrameworkBuildNumber", + "ccVersion": 3, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "Z-Wave application framework API build number", + "stateful": true, + "secret": false + }, + "value": 297 + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "hostInterfaceVersion", + "propertyName": "hostInterfaceVersion", + "ccVersion": 3, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "Serial API version", + "stateful": true, + "secret": false + }, + "value": "unused" + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "hostInterfaceBuildNumber", + "propertyName": "hostInterfaceBuildNumber", + "ccVersion": 3, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "Serial API build number", + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "zWaveProtocolVersion", + "propertyName": "zWaveProtocolVersion", + "ccVersion": 3, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "Z-Wave protocol version", + "stateful": true, + "secret": false + }, + "value": "7.16.3" + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "zWaveProtocolBuildNumber", + "propertyName": "zWaveProtocolBuildNumber", + "ccVersion": 3, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "Z-Wave protocol build number", + "stateful": true, + "secret": false + }, + "value": 297 + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "applicationVersion", + "propertyName": "applicationVersion", + "ccVersion": 3, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "Application version", + "stateful": true, + "secret": false + }, + "value": "1.10.0" + }, + { + "endpoint": 0, + "commandClass": 134, + "commandClassName": "Version", + "property": "applicationBuildNumber", + "propertyName": "applicationBuildNumber", + "ccVersion": 3, + "metadata": { + "type": "string", + "readable": true, + "writeable": false, + "label": "Application build number", + "stateful": true, + "secret": false + }, + "value": 43707 + }, + { + "endpoint": 0, + "commandClass": 135, + "commandClassName": "Indicator", + "property": 80, + "propertyKey": 3, + "propertyName": "Node Identify", + "propertyKeyName": "On/Off Period: Duration", + "ccVersion": 3, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "description": "Sets the duration of an on/off period in 1/10th seconds. Must be set together with \"On/Off Cycle Count\"", + "label": "Node Identify - On/Off Period: Duration", + "ccSpecific": { + "indicatorId": 80, + "propertyId": 3 + }, + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 135, + "commandClassName": "Indicator", + "property": 80, + "propertyKey": 4, + "propertyName": "Node Identify", + "propertyKeyName": "On/Off Cycle Count", + "ccVersion": 3, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "description": "Sets the number of on/off periods. 0xff means infinite. Must be set together with \"On/Off Period duration\"", + "label": "Node Identify - On/Off Cycle Count", + "ccSpecific": { + "indicatorId": 80, + "propertyId": 4 + }, + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 135, + "commandClassName": "Indicator", + "property": 80, + "propertyKey": 5, + "propertyName": "Node Identify", + "propertyKeyName": "On/Off Period: On time", + "ccVersion": 3, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "description": "This property is used to set the length of the On time during an On/Off period. It allows asymmetric On/Off periods. The value 0x00 MUST represent symmetric On/Off period (On time equal to Off time)", + "label": "Node Identify - On/Off Period: On time", + "ccSpecific": { + "indicatorId": 80, + "propertyId": 5 + }, + "stateful": true, + "secret": false + }, + "value": 0 + }, + { + "endpoint": 0, + "commandClass": 135, + "commandClassName": "Indicator", + "property": "value", + "propertyName": "value", + "ccVersion": 3, + "metadata": { + "type": "number", + "readable": true, + "writeable": true, + "label": "Indicator value", + "ccSpecific": { + "indicatorId": 0 + }, + "min": 0, + "max": 255, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 135, + "commandClassName": "Indicator", + "property": "identify", + "propertyName": "identify", + "ccVersion": 3, + "metadata": { + "type": "boolean", + "readable": false, + "writeable": true, + "label": "Identify", + "states": { + "true": "Identify" + }, + "stateful": true, + "secret": false + } + }, + { + "endpoint": 0, + "commandClass": 135, + "commandClassName": "Indicator", + "property": "timeout", + "propertyName": "timeout", + "ccVersion": 3, + "metadata": { + "type": "string", + "readable": true, + "writeable": true, + "label": "Timeout", + "stateful": true, + "secret": false + } + } + ], + "endpoints": [ + { + "nodeId": 261, + "index": 0, + "installerIcon": 6656, + "userIcon": 6656, + "deviceClass": { + "basic": { + "key": 3, + "label": "End Node" + }, + "generic": { + "key": 17, + "label": "Multilevel Switch" + }, + "specific": { + "key": 0, + "label": "Unused" + } + }, + "commandClasses": [ + { + "id": 94, + "name": "Z-Wave Plus Info", + "version": 2, + "isSecure": false + }, + { + "id": 85, + "name": "Transport Service", + "version": 2, + "isSecure": false + }, + { + "id": 159, + "name": "Security 2", + "version": 1, + "isSecure": true + }, + { + "id": 108, + "name": "Supervision", + "version": 1, + "isSecure": false + }, + { + "id": 38, + "name": "Multilevel Switch", + "version": 4, + "isSecure": true + }, + { + "id": 112, + "name": "Configuration", + "version": 4, + "isSecure": true + }, + { + "id": 133, + "name": "Association", + "version": 2, + "isSecure": true + }, + { + "id": 89, + "name": "Association Group Information", + "version": 3, + "isSecure": true + }, + { + "id": 142, + "name": "Multi Channel Association", + "version": 3, + "isSecure": true + }, + { + "id": 134, + "name": "Version", + "version": 3, + "isSecure": true + }, + { + "id": 114, + "name": "Manufacturer Specific", + "version": 2, + "isSecure": true + }, + { + "id": 90, + "name": "Device Reset Locally", + "version": 1, + "isSecure": true + }, + { + "id": 128, + "name": "Battery", + "version": 1, + "isSecure": true + }, + { + "id": 113, + "name": "Notification", + "version": 8, + "isSecure": true + }, + { + "id": 122, + "name": "Firmware Update Meta Data", + "version": 5, + "isSecure": true + }, + { + "id": 115, + "name": "Powerlevel", + "version": 1, + "isSecure": true + }, + { + "id": 135, + "name": "Indicator", + "version": 3, + "isSecure": true + } + ] + } + ] +} diff --git a/tests/components/zwave_js/test_discovery.py b/tests/components/zwave_js/test_discovery.py index 1179d8e843c..57841ef2a83 100644 --- a/tests/components/zwave_js/test_discovery.py +++ b/tests/components/zwave_js/test_discovery.py @@ -49,6 +49,18 @@ async def test_iblinds_v2(hass: HomeAssistant, client, iblinds_v2, integration) assert state +async def test_zvidar_state(hass: HomeAssistant, client, zvidar, integration) -> None: + """Test that an ZVIDAR Z-CM-V01 multilevel switch value is discovered as a cover.""" + node = zvidar + assert node.device_class.specific.label == "Unused" + + state = hass.states.get("light.window_blind_controller") + assert not state + + state = hass.states.get("cover.window_blind_controller") + assert state + + async def test_ge_12730(hass: HomeAssistant, client, ge_12730, integration) -> None: """Test GE 12730 Fan Controller v2.0 multilevel switch is discovered as a fan.""" node = ge_12730 From b63bc72450631c7d04ea6dd5430265bbb50c7e2a Mon Sep 17 00:00:00 2001 From: Christopher Fenner <9592452+CFenner@users.noreply.github.com> Date: Tue, 23 Jul 2024 21:56:46 +0200 Subject: [PATCH 0104/1635] Fix device class on sensor in ViCare (#122334) update device class on init --- homeassistant/components/vicare/sensor.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/homeassistant/components/vicare/sensor.py b/homeassistant/components/vicare/sensor.py index 0e98729e40f..0271ffc9798 100644 --- a/homeassistant/components/vicare/sensor.py +++ b/homeassistant/components/vicare/sensor.py @@ -950,6 +950,8 @@ class ViCareSensor(ViCareEntity, SensorEntity): """Initialize the sensor.""" super().__init__(device_config, api, description.key) self.entity_description = description + # run update to have device_class set depending on unit_of_measurement + self.update() @property def available(self) -> bool: From f739644735525d693e1d6a66a0a39a6256b6ef5b Mon Sep 17 00:00:00 2001 From: Denis Shulyaka Date: Mon, 22 Jul 2024 07:54:31 +0300 Subject: [PATCH 0105/1635] Goofle Generative AI: Fix string format (#122348) * Ignore format for string tool args * Add tests --- .../google_generative_ai_conversation/conversation.py | 2 ++ .../snapshots/test_conversation.ambr | 1 - 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/google_generative_ai_conversation/conversation.py b/homeassistant/components/google_generative_ai_conversation/conversation.py index 8052ee66f40..c80581a1f57 100644 --- a/homeassistant/components/google_generative_ai_conversation/conversation.py +++ b/homeassistant/components/google_generative_ai_conversation/conversation.py @@ -81,6 +81,8 @@ def _format_schema(schema: dict[str, Any]) -> dict[str, Any]: key = "type_" val = val.upper() elif key == "format": + if schema.get("type") == "string" and val != "enum": + continue key = "format_" elif key == "items": val = _format_schema(val) diff --git a/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr b/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr index 7f28c172970..66caf4c7218 100644 --- a/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr +++ b/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr @@ -449,7 +449,6 @@ description: "Test parameters" items { type_: STRING - format_: "lower" } } } From 9d6bd359c497de39b61dc8578fe180796698da5a Mon Sep 17 00:00:00 2001 From: Denis Shulyaka Date: Mon, 22 Jul 2024 12:11:09 +0300 Subject: [PATCH 0106/1635] Ensure script llm tool name does not start with a digit (#122349) * Ensure script tool name does not start with a digit * Fix test name --- homeassistant/helpers/llm.py | 5 ++++- tests/helpers/test_llm.py | 40 ++++++++++++++++++++++++++++++++++++ 2 files changed, 44 insertions(+), 1 deletion(-) diff --git a/homeassistant/helpers/llm.py b/homeassistant/helpers/llm.py index 506cadbf168..f386fb3ddec 100644 --- a/homeassistant/helpers/llm.py +++ b/homeassistant/helpers/llm.py @@ -615,6 +615,9 @@ class ScriptTool(Tool): entity_registry = er.async_get(hass) self.name = split_entity_id(script_entity_id)[1] + if self.name[0].isdigit(): + self.name = "_" + self.name + self._entity_id = script_entity_id self.parameters = vol.Schema({}) entity_entry = entity_registry.async_get(script_entity_id) if entity_entry and entity_entry.unique_id: @@ -715,7 +718,7 @@ class ScriptTool(Tool): SCRIPT_DOMAIN, SERVICE_TURN_ON, { - ATTR_ENTITY_ID: SCRIPT_DOMAIN + "." + self.name, + ATTR_ENTITY_ID: self._entity_id, ATTR_VARIABLES: tool_input.tool_args, }, context=llm_context.context, diff --git a/tests/helpers/test_llm.py b/tests/helpers/test_llm.py index 81fa573852e..e1f55942d10 100644 --- a/tests/helpers/test_llm.py +++ b/tests/helpers/test_llm.py @@ -780,6 +780,46 @@ async def test_script_tool( } +async def test_script_tool_name(hass: HomeAssistant) -> None: + """Test that script tool name is not started with a digit.""" + assert await async_setup_component(hass, "homeassistant", {}) + context = Context() + llm_context = llm.LLMContext( + platform="test_platform", + context=context, + user_prompt="test_text", + language="*", + assistant="conversation", + device_id=None, + ) + + # Create a script with a unique ID + assert await async_setup_component( + hass, + "script", + { + "script": { + "123456": { + "description": "This is a test script", + "sequence": [], + "fields": { + "beer": {"description": "Number of beers", "required": True}, + }, + }, + } + }, + ) + async_expose_entity(hass, "conversation", "script.123456", True) + + api = await llm.async_get_api(hass, "assist", llm_context) + + tools = [tool for tool in api.tools if isinstance(tool, llm.ScriptTool)] + assert len(tools) == 1 + + tool = tools[0] + assert tool.name == "_123456" + + async def test_selector_serializer( hass: HomeAssistant, llm_context: llm.LLMContext ) -> None: From 7135a919e3338741f4a8a816bec9f1eee02de4d4 Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Mon, 22 Jul 2024 11:09:03 +0200 Subject: [PATCH 0107/1635] Bump reolink-aio to 0.9.5 (#122366) --- homeassistant/components/reolink/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/reolink/manifest.json b/homeassistant/components/reolink/manifest.json index ee3ebe8a13a..c329289790b 100644 --- a/homeassistant/components/reolink/manifest.json +++ b/homeassistant/components/reolink/manifest.json @@ -18,5 +18,5 @@ "documentation": "https://www.home-assistant.io/integrations/reolink", "iot_class": "local_push", "loggers": ["reolink_aio"], - "requirements": ["reolink-aio==0.9.4"] + "requirements": ["reolink-aio==0.9.5"] } diff --git a/requirements_all.txt b/requirements_all.txt index 6c87b9bd19a..e8d846ef099 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2460,7 +2460,7 @@ renault-api==0.2.4 renson-endura-delta==1.7.1 # homeassistant.components.reolink -reolink-aio==0.9.4 +reolink-aio==0.9.5 # homeassistant.components.idteck_prox rfk101py==0.0.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a0b1497a185..787b2af38fc 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1924,7 +1924,7 @@ renault-api==0.2.4 renson-endura-delta==1.7.1 # homeassistant.components.reolink -reolink-aio==0.9.4 +reolink-aio==0.9.5 # homeassistant.components.rflink rflink==0.0.66 From 56f51d3e3511752c3ff8f746c64d57a5bd325606 Mon Sep 17 00:00:00 2001 From: Denis Shulyaka Date: Tue, 23 Jul 2024 03:56:13 +0300 Subject: [PATCH 0108/1635] Fix gemini api format conversion (#122403) * Fix gemini api format conversion * add tests * fix tests * fix tests * fix coverage --- .../conversation.py | 18 +++++++++++++++++- .../snapshots/test_conversation.ambr | 18 ++++++++++++++++++ .../test_conversation.py | 4 +++- 3 files changed, 38 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/google_generative_ai_conversation/conversation.py b/homeassistant/components/google_generative_ai_conversation/conversation.py index c80581a1f57..69a68121c7b 100644 --- a/homeassistant/components/google_generative_ai_conversation/conversation.py +++ b/homeassistant/components/google_generative_ai_conversation/conversation.py @@ -73,6 +73,14 @@ SUPPORTED_SCHEMA_KEYS = { def _format_schema(schema: dict[str, Any]) -> dict[str, Any]: """Format the schema to protobuf.""" + if (subschemas := schema.get("anyOf")) or (subschemas := schema.get("allOf")): + for subschema in subschemas: # Gemini API does not support anyOf and allOf keys + if "type" in subschema: # Fallback to first subschema with 'type' field + return _format_schema(subschema) + return _format_schema( + subschemas[0] + ) # Or, if not found, to any of the subschemas + result = {} for key, val in schema.items(): if key not in SUPPORTED_SCHEMA_KEYS: @@ -81,7 +89,9 @@ def _format_schema(schema: dict[str, Any]) -> dict[str, Any]: key = "type_" val = val.upper() elif key == "format": - if schema.get("type") == "string" and val != "enum": + if (schema.get("type") == "string" and val != "enum") or ( + schema.get("type") not in ("number", "integer", "string") + ): continue key = "format_" elif key == "items": @@ -89,6 +99,12 @@ def _format_schema(schema: dict[str, Any]) -> dict[str, Any]: elif key == "properties": val = {k: _format_schema(v) for k, v in val.items()} result[key] = val + + if result.get("type_") == "OBJECT" and not result.get("properties"): + # An object with undefined properties is not supported by Gemini API. + # Fallback to JSON string. This will probably fail for most tools that want it, + # but we don't have a better fallback strategy so far. + result["properties"] = {"json": {"type_": "STRING"}} return result diff --git a/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr b/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr index 66caf4c7218..abd3658e869 100644 --- a/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr +++ b/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr @@ -442,6 +442,24 @@ description: "Test function" parameters { type_: OBJECT + properties { + key: "param3" + value { + type_: OBJECT + properties { + key: "json" + value { + type_: STRING + } + } + } + } + properties { + key: "param2" + value { + type_: NUMBER + } + } properties { key: "param1" value { diff --git a/tests/components/google_generative_ai_conversation/test_conversation.py b/tests/components/google_generative_ai_conversation/test_conversation.py index 30016335f3b..a7ab2c1b337 100644 --- a/tests/components/google_generative_ai_conversation/test_conversation.py +++ b/tests/components/google_generative_ai_conversation/test_conversation.py @@ -185,7 +185,9 @@ async def test_function_call( { vol.Optional("param1", description="Test parameters"): [ vol.All(str, vol.Lower) - ] + ], + vol.Optional("param2"): vol.Any(float, int), + vol.Optional("param3"): dict, } ) From 75f0384a15d8ed2a61f8186911d8a2915f1e4cff Mon Sep 17 00:00:00 2001 From: Marcel van der Veldt Date: Wed, 24 Jul 2024 20:12:51 +0200 Subject: [PATCH 0109/1635] Fix typo in Matter lock platform (#122536) --- homeassistant/components/matter/lock.py | 4 ++-- .../matter/{test_door_lock.py => test_lock.py} | 16 ++++++++++++---- 2 files changed, 14 insertions(+), 6 deletions(-) rename tests/components/matter/{test_door_lock.py => test_lock.py} (95%) diff --git a/homeassistant/components/matter/lock.py b/homeassistant/components/matter/lock.py index ae01faa3bc7..31ae5e496ce 100644 --- a/homeassistant/components/matter/lock.py +++ b/homeassistant/components/matter/lock.py @@ -168,10 +168,10 @@ class MatterLock(MatterEntity, LockEntity): LOGGER.debug("Lock state: %s for %s", lock_state, self.entity_id) - if lock_state is clusters.DoorLock.Enums.DlLockState.kUnlatched: + if lock_state == clusters.DoorLock.Enums.DlLockState.kUnlatched: self._attr_is_locked = False self._attr_is_open = True - if lock_state is clusters.DoorLock.Enums.DlLockState.kLocked: + elif lock_state == clusters.DoorLock.Enums.DlLockState.kLocked: self._attr_is_locked = True self._attr_is_open = False elif lock_state in ( diff --git a/tests/components/matter/test_door_lock.py b/tests/components/matter/test_lock.py similarity index 95% rename from tests/components/matter/test_door_lock.py rename to tests/components/matter/test_lock.py index 461cc1b7f3d..1180e6ee469 100644 --- a/tests/components/matter/test_door_lock.py +++ b/tests/components/matter/test_lock.py @@ -8,6 +8,7 @@ import pytest from homeassistant.components.lock import ( STATE_LOCKED, + STATE_OPEN, STATE_UNLOCKED, LockEntityFeature, ) @@ -82,12 +83,12 @@ async def test_lock( assert state assert state.state == STATE_UNLOCKED - set_node_attribute(door_lock, 1, 257, 0, 0) + set_node_attribute(door_lock, 1, 257, 0, 1) await trigger_subscription_callback(hass, matter_client) state = hass.states.get("lock.mock_door_lock_lock") assert state - assert state.state == STATE_UNLOCKED + assert state.state == STATE_LOCKED set_node_attribute(door_lock, 1, 257, 0, None) await trigger_subscription_callback(hass, matter_client) @@ -213,9 +214,16 @@ async def test_lock_with_unbolt( assert state assert state.state == STATE_OPENING - set_node_attribute(door_lock_with_unbolt, 1, 257, 3, 0) + set_node_attribute(door_lock_with_unbolt, 1, 257, 0, 0) await trigger_subscription_callback(hass, matter_client) state = hass.states.get("lock.mock_door_lock_lock") assert state - assert state.state == STATE_LOCKED + assert state.state == STATE_UNLOCKED + + set_node_attribute(door_lock_with_unbolt, 1, 257, 0, 3) + await trigger_subscription_callback(hass, matter_client) + + state = hass.states.get("lock.mock_door_lock_lock") + assert state + assert state.state == STATE_OPEN From 586a0b12ab36043c1fa3d7b6ce6c5e85a4204025 Mon Sep 17 00:00:00 2001 From: Andrew Jackson Date: Wed, 24 Jul 2024 17:19:12 +0100 Subject: [PATCH 0110/1635] Fix target service attribute on Mastodon integration (#122546) * Fix target * Fix --- homeassistant/components/mastodon/notify.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/mastodon/notify.py b/homeassistant/components/mastodon/notify.py index f15b8c6f0ab..99999275aeb 100644 --- a/homeassistant/components/mastodon/notify.py +++ b/homeassistant/components/mastodon/notify.py @@ -3,7 +3,7 @@ from __future__ import annotations import mimetypes -from typing import Any +from typing import Any, cast from mastodon import Mastodon from mastodon.Mastodon import MastodonAPIError, MastodonUnauthorizedError @@ -71,11 +71,15 @@ class MastodonNotificationService(BaseNotificationService): def send_message(self, message: str = "", **kwargs: Any) -> None: """Toot a message, with media perhaps.""" + + target = None + if (target_list := kwargs.get(ATTR_TARGET)) is not None: + target = cast(list[str], target_list)[0] + data = kwargs.get(ATTR_DATA) media = None mediadata = None - target = None sensitive = False content_warning = None @@ -87,7 +91,6 @@ class MastodonNotificationService(BaseNotificationService): return mediadata = self._upload_media(media) - target = data.get(ATTR_TARGET) sensitive = data.get(ATTR_MEDIA_WARNING) content_warning = data.get(ATTR_CONTENT_WARNING) From 9940d0281bc527e7b85fc7ff9cd1792807c5287a Mon Sep 17 00:00:00 2001 From: Avi Miller Date: Thu, 25 Jul 2024 09:44:56 +1000 Subject: [PATCH 0111/1635] Bump aiolifx to 1.0.6 (#122569) --- homeassistant/components/lifx/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/lifx/manifest.json b/homeassistant/components/lifx/manifest.json index 3d0bd1d73d1..59b336373c2 100644 --- a/homeassistant/components/lifx/manifest.json +++ b/homeassistant/components/lifx/manifest.json @@ -48,7 +48,7 @@ "iot_class": "local_polling", "loggers": ["aiolifx", "aiolifx_effects", "bitstring"], "requirements": [ - "aiolifx==1.0.5", + "aiolifx==1.0.6", "aiolifx-effects==0.3.2", "aiolifx-themes==0.4.15" ] diff --git a/requirements_all.txt b/requirements_all.txt index e8d846ef099..1195f7b50eb 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -282,7 +282,7 @@ aiolifx-effects==0.3.2 aiolifx-themes==0.4.15 # homeassistant.components.lifx -aiolifx==1.0.5 +aiolifx==1.0.6 # homeassistant.components.livisi aiolivisi==0.0.19 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 787b2af38fc..9bfd0311b25 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -255,7 +255,7 @@ aiolifx-effects==0.3.2 aiolifx-themes==0.4.15 # homeassistant.components.lifx -aiolifx==1.0.5 +aiolifx==1.0.6 # homeassistant.components.livisi aiolivisi==0.0.19 From aa44c54a1912ec948adffb19fe295e7e5d56ac22 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Thu, 25 Jul 2024 21:23:14 +0200 Subject: [PATCH 0112/1635] Bump deebot-client to 8.2.0 (#122612) --- homeassistant/components/ecovacs/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/ecovacs/manifest.json b/homeassistant/components/ecovacs/manifest.json index 6ca9b9e3edc..5a21facab71 100644 --- a/homeassistant/components/ecovacs/manifest.json +++ b/homeassistant/components/ecovacs/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/ecovacs", "iot_class": "cloud_push", "loggers": ["sleekxmppfs", "sucks", "deebot_client"], - "requirements": ["py-sucks==0.9.10", "deebot-client==8.1.1"] + "requirements": ["py-sucks==0.9.10", "deebot-client==8.2.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 1195f7b50eb..f8c62aa75b0 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -709,7 +709,7 @@ debugpy==1.8.1 # decora==0.6 # homeassistant.components.ecovacs -deebot-client==8.1.1 +deebot-client==8.2.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 9bfd0311b25..579f24fb4bb 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -590,7 +590,7 @@ dbus-fast==2.22.1 debugpy==1.8.1 # homeassistant.components.ecovacs -deebot-client==8.1.1 +deebot-client==8.2.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns From 00c3b0d888e5d5427588a83e7b05036dfba83b02 Mon Sep 17 00:00:00 2001 From: Marcel van der Veldt Date: Mon, 29 Jul 2024 11:59:13 +0200 Subject: [PATCH 0113/1635] Bump aiohue to version 4.7.2 (#122651) --- homeassistant/components/hue/manifest.json | 2 +- homeassistant/components/hue/v2/hue_event.py | 4 ++-- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/hue/fixtures/v2_resources.json | 12 ++++++++++++ tests/components/hue/test_device_trigger_v2.py | 9 +++++++-- 6 files changed, 24 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/hue/manifest.json b/homeassistant/components/hue/manifest.json index e8d214da3c8..71aabd4c204 100644 --- a/homeassistant/components/hue/manifest.json +++ b/homeassistant/components/hue/manifest.json @@ -11,6 +11,6 @@ "iot_class": "local_push", "loggers": ["aiohue"], "quality_scale": "platinum", - "requirements": ["aiohue==4.7.1"], + "requirements": ["aiohue==4.7.2"], "zeroconf": ["_hue._tcp.local."] } diff --git a/homeassistant/components/hue/v2/hue_event.py b/homeassistant/components/hue/v2/hue_event.py index b0e0de234f1..b286a11aade 100644 --- a/homeassistant/components/hue/v2/hue_event.py +++ b/homeassistant/components/hue/v2/hue_event.py @@ -55,7 +55,7 @@ async def async_setup_hue_events(bridge: HueBridge): CONF_ID: slugify(f"{hue_device.metadata.name} Button"), CONF_DEVICE_ID: device.id, # type: ignore[union-attr] CONF_UNIQUE_ID: hue_resource.id, - CONF_TYPE: hue_resource.button.last_event.value, + CONF_TYPE: hue_resource.button.button_report.event.value, CONF_SUBTYPE: hue_resource.metadata.control_id, } hass.bus.async_fire(ATTR_HUE_EVENT, data) @@ -79,7 +79,7 @@ async def async_setup_hue_events(bridge: HueBridge): data = { CONF_DEVICE_ID: device.id, # type: ignore[union-attr] CONF_UNIQUE_ID: hue_resource.id, - CONF_TYPE: hue_resource.relative_rotary.last_event.action.value, + CONF_TYPE: hue_resource.relative_rotary.rotary_report.action.value, CONF_SUBTYPE: hue_resource.relative_rotary.last_event.rotation.direction.value, CONF_DURATION: hue_resource.relative_rotary.last_event.rotation.duration, CONF_STEPS: hue_resource.relative_rotary.last_event.rotation.steps, diff --git a/requirements_all.txt b/requirements_all.txt index f8c62aa75b0..6c7f7bbc000 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -264,7 +264,7 @@ aioharmony==0.2.10 aiohomekit==3.1.5 # homeassistant.components.hue -aiohue==4.7.1 +aiohue==4.7.2 # homeassistant.components.imap aioimaplib==1.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 579f24fb4bb..fc830f0fe7a 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -240,7 +240,7 @@ aioharmony==0.2.10 aiohomekit==3.1.5 # homeassistant.components.hue -aiohue==4.7.1 +aiohue==4.7.2 # homeassistant.components.imap aioimaplib==1.1.0 diff --git a/tests/components/hue/fixtures/v2_resources.json b/tests/components/hue/fixtures/v2_resources.json index 662e1107ca9..980086d0988 100644 --- a/tests/components/hue/fixtures/v2_resources.json +++ b/tests/components/hue/fixtures/v2_resources.json @@ -1487,6 +1487,10 @@ "on": { "on": true }, + "owner": { + "rid": "7cee478d-6455-483a-9e32-9f9fdcbcc4f6", + "rtype": "zone" + }, "type": "grouped_light" }, { @@ -1498,6 +1502,10 @@ "on": { "on": true }, + "owner": { + "rid": "7cee478d-6455-483a-9e32-9f9fdcbcc4f6", + "rtype": "zone" + }, "type": "grouped_light" }, { @@ -1509,6 +1517,10 @@ "on": { "on": false }, + "owner": { + "rid": "7cee478d-6455-483a-9e32-9f9fdcbcc4f6", + "rtype": "zone" + }, "type": "grouped_light" }, { diff --git a/tests/components/hue/test_device_trigger_v2.py b/tests/components/hue/test_device_trigger_v2.py index 0a89b3263c7..efdc33375a6 100644 --- a/tests/components/hue/test_device_trigger_v2.py +++ b/tests/components/hue/test_device_trigger_v2.py @@ -28,7 +28,12 @@ async def test_hue_event( # Emit button update event btn_event = { - "button": {"last_event": "initial_press"}, + "button": { + "button_report": { + "event": "initial_press", + "updated": "2021-10-01T12:00:00Z", + } + }, "id": "c658d3d8-a013-4b81-8ac6-78b248537e70", "metadata": {"control_id": 1}, "type": "button", @@ -41,7 +46,7 @@ async def test_hue_event( assert len(events) == 1 assert events[0].data["id"] == "wall_switch_with_2_controls_button" assert events[0].data["unique_id"] == btn_event["id"] - assert events[0].data["type"] == btn_event["button"]["last_event"] + assert events[0].data["type"] == btn_event["button"]["button_report"]["event"] assert events[0].data["subtype"] == btn_event["metadata"]["control_id"] From e5fd9819da291fef90726c1a88270586fe05bce2 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Fri, 26 Jul 2024 16:59:12 +0200 Subject: [PATCH 0114/1635] Return unknown when data is missing in Trafikverket Weather (#122652) Return unknown when data is missing --- .../trafikverket_weatherstation/sensor.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/trafikverket_weatherstation/sensor.py b/homeassistant/components/trafikverket_weatherstation/sensor.py index 4bd14448546..8856482d885 100644 --- a/homeassistant/components/trafikverket_weatherstation/sensor.py +++ b/homeassistant/components/trafikverket_weatherstation/sensor.py @@ -61,7 +61,7 @@ SENSOR_TYPES: tuple[TrafikverketSensorEntityDescription, ...] = ( TrafikverketSensorEntityDescription( key="air_temp", translation_key="air_temperature", - value_fn=lambda data: data.air_temp or 0, + value_fn=lambda data: data.air_temp, native_unit_of_measurement=UnitOfTemperature.CELSIUS, device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, @@ -69,7 +69,7 @@ SENSOR_TYPES: tuple[TrafikverketSensorEntityDescription, ...] = ( TrafikverketSensorEntityDescription( key="road_temp", translation_key="road_temperature", - value_fn=lambda data: data.road_temp or 0, + value_fn=lambda data: data.road_temp, native_unit_of_measurement=UnitOfTemperature.CELSIUS, device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, @@ -91,7 +91,7 @@ SENSOR_TYPES: tuple[TrafikverketSensorEntityDescription, ...] = ( ), TrafikverketSensorEntityDescription( key="wind_speed", - value_fn=lambda data: data.windforce or 0, + value_fn=lambda data: data.windforce, native_unit_of_measurement=UnitOfSpeed.METERS_PER_SECOND, device_class=SensorDeviceClass.WIND_SPEED, state_class=SensorStateClass.MEASUREMENT, @@ -99,7 +99,7 @@ SENSOR_TYPES: tuple[TrafikverketSensorEntityDescription, ...] = ( TrafikverketSensorEntityDescription( key="wind_speed_max", translation_key="wind_speed_max", - value_fn=lambda data: data.windforcemax or 0, + value_fn=lambda data: data.windforcemax, native_unit_of_measurement=UnitOfSpeed.METERS_PER_SECOND, device_class=SensorDeviceClass.WIND_SPEED, entity_registry_enabled_default=False, @@ -107,7 +107,7 @@ SENSOR_TYPES: tuple[TrafikverketSensorEntityDescription, ...] = ( ), TrafikverketSensorEntityDescription( key="humidity", - value_fn=lambda data: data.humidity or 0, + value_fn=lambda data: data.humidity, native_unit_of_measurement=PERCENTAGE, device_class=SensorDeviceClass.HUMIDITY, entity_registry_enabled_default=False, @@ -115,7 +115,7 @@ SENSOR_TYPES: tuple[TrafikverketSensorEntityDescription, ...] = ( ), TrafikverketSensorEntityDescription( key="precipitation_amount", - value_fn=lambda data: data.precipitation_amount or 0, + value_fn=lambda data: data.precipitation_amount, native_unit_of_measurement=UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR, device_class=SensorDeviceClass.PRECIPITATION_INTENSITY, state_class=SensorStateClass.MEASUREMENT, @@ -130,7 +130,7 @@ SENSOR_TYPES: tuple[TrafikverketSensorEntityDescription, ...] = ( TrafikverketSensorEntityDescription( key="dew_point", translation_key="dew_point", - value_fn=lambda data: data.dew_point or 0, + value_fn=lambda data: data.dew_point, native_unit_of_measurement=UnitOfTemperature.CELSIUS, device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, From d51d584aed3d2adfbde15a0afe133b51a7a45d1f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 29 Jul 2024 04:59:31 -0500 Subject: [PATCH 0115/1635] Retry later on OSError during apple_tv entry setup (#122747) --- homeassistant/components/apple_tv/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/apple_tv/__init__.py b/homeassistant/components/apple_tv/__init__.py index 4e5c8791acd..08372aa79ae 100644 --- a/homeassistant/components/apple_tv/__init__.py +++ b/homeassistant/components/apple_tv/__init__.py @@ -60,6 +60,7 @@ AUTH_EXCEPTIONS = ( exceptions.NoCredentialsError, ) CONNECTION_TIMEOUT_EXCEPTIONS = ( + OSError, asyncio.CancelledError, TimeoutError, exceptions.ConnectionLostError, From 02c592d6afe64ea1f19f0cc1ee7a86a29537b095 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Mon, 29 Jul 2024 14:40:02 +0200 Subject: [PATCH 0116/1635] Bump version to 2024.7.4 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index f706b2d1243..71b7d79cb01 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -24,7 +24,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 7 -PATCH_VERSION: Final = "3" +PATCH_VERSION: Final = "4" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index f044551ce1e..55f96c3e0b9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.7.3" +version = "2024.7.4" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From 70e368a57e5c3a09dd2b9546c1adac362663e19c Mon Sep 17 00:00:00 2001 From: Robert Svensson Date: Tue, 30 Jul 2024 07:14:56 +0200 Subject: [PATCH 0117/1635] Use snapshot in Axis switch tests (#122680) --- .../axis/snapshots/test_switch.ambr | 189 ++++++++++++++++++ tests/components/axis/test_switch.py | 143 +++++++------ 2 files changed, 259 insertions(+), 73 deletions(-) create mode 100644 tests/components/axis/snapshots/test_switch.ambr diff --git a/tests/components/axis/snapshots/test_switch.ambr b/tests/components/axis/snapshots/test_switch.ambr new file mode 100644 index 00000000000..dc4c75371cf --- /dev/null +++ b/tests/components/axis/snapshots/test_switch.ambr @@ -0,0 +1,189 @@ +# serializer version: 1 +# name: test_switches_with_port_cgi[root.IOPort.I0.Configurable=yes\nroot.IOPort.I0.Direction=output\nroot.IOPort.I0.Output.Name=Doorbell\nroot.IOPort.I0.Output.Active=closed\nroot.IOPort.I1.Configurable=yes\nroot.IOPort.I1.Direction=output\nroot.IOPort.I1.Output.Name=\nroot.IOPort.I1.Output.Active=open\n][switch.home_doorbell-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.home_doorbell', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Doorbell', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tns1:Device/Trigger/Relay-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches_with_port_cgi[root.IOPort.I0.Configurable=yes\nroot.IOPort.I0.Direction=output\nroot.IOPort.I0.Output.Name=Doorbell\nroot.IOPort.I0.Output.Active=closed\nroot.IOPort.I1.Configurable=yes\nroot.IOPort.I1.Direction=output\nroot.IOPort.I1.Output.Name=\nroot.IOPort.I1.Output.Active=open\n][switch.home_doorbell-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'home Doorbell', + }), + 'context': , + 'entity_id': 'switch.home_doorbell', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switches_with_port_cgi[root.IOPort.I0.Configurable=yes\nroot.IOPort.I0.Direction=output\nroot.IOPort.I0.Output.Name=Doorbell\nroot.IOPort.I0.Output.Active=closed\nroot.IOPort.I1.Configurable=yes\nroot.IOPort.I1.Direction=output\nroot.IOPort.I1.Output.Name=\nroot.IOPort.I1.Output.Active=open\n][switch.home_relay_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.home_relay_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Relay 1', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tns1:Device/Trigger/Relay-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches_with_port_cgi[root.IOPort.I0.Configurable=yes\nroot.IOPort.I0.Direction=output\nroot.IOPort.I0.Output.Name=Doorbell\nroot.IOPort.I0.Output.Active=closed\nroot.IOPort.I1.Configurable=yes\nroot.IOPort.I1.Direction=output\nroot.IOPort.I1.Output.Name=\nroot.IOPort.I1.Output.Active=open\n][switch.home_relay_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'home Relay 1', + }), + 'context': , + 'entity_id': 'switch.home_relay_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switches_with_port_management[port_management_payload0-api_discovery_items0][switch.home_doorbell-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.home_doorbell', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Doorbell', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tns1:Device/Trigger/Relay-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches_with_port_management[port_management_payload0-api_discovery_items0][switch.home_doorbell-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'home Doorbell', + }), + 'context': , + 'entity_id': 'switch.home_doorbell', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switches_with_port_management[port_management_payload0-api_discovery_items0][switch.home_relay_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.home_relay_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Relay 1', + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-tns1:Device/Trigger/Relay-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches_with_port_management[port_management_payload0-api_discovery_items0][switch.home_relay_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'home Relay 1', + }), + 'context': , + 'entity_id': 'switch.home_relay_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/axis/test_switch.py b/tests/components/axis/test_switch.py index 8a93c844042..964cfdae64c 100644 --- a/tests/components/axis/test_switch.py +++ b/tests/components/axis/test_switch.py @@ -4,20 +4,24 @@ from unittest.mock import patch from axis.models.api import CONTEXT import pytest +from syrupy import SnapshotAssertion from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON, - STATE_OFF, STATE_ON, + Platform, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er -from .conftest import RtspEventMock +from .conftest import ConfigEntryFactoryType, RtspEventMock from .const import API_DISCOVERY_PORT_MANAGEMENT, NAME +from tests.common import snapshot_platform + PORT_DATA = """root.IOPort.I0.Configurable=yes root.IOPort.I0.Direction=output root.IOPort.I0.Output.Name=Doorbell @@ -28,61 +32,6 @@ root.IOPort.I1.Output.Name= root.IOPort.I1.Output.Active=open """ - -@pytest.mark.parametrize("param_ports_payload", [PORT_DATA]) -@pytest.mark.usefixtures("config_entry_setup") -async def test_switches_with_port_cgi( - hass: HomeAssistant, - mock_rtsp_event: RtspEventMock, -) -> None: - """Test that switches are loaded properly using port.cgi.""" - mock_rtsp_event( - topic="tns1:Device/Trigger/Relay", - data_type="LogicalState", - data_value="inactive", - source_name="RelayToken", - source_idx="0", - ) - mock_rtsp_event( - topic="tns1:Device/Trigger/Relay", - data_type="LogicalState", - data_value="active", - source_name="RelayToken", - source_idx="1", - ) - await hass.async_block_till_done() - - assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 2 - - relay_1 = hass.states.get(f"{SWITCH_DOMAIN}.{NAME}_relay_1") - assert relay_1.state == STATE_ON - assert relay_1.name == f"{NAME} Relay 1" - - entity_id = f"{SWITCH_DOMAIN}.{NAME}_doorbell" - - relay_0 = hass.states.get(entity_id) - assert relay_0.state == STATE_OFF - assert relay_0.name == f"{NAME} Doorbell" - - with patch("axis.interfaces.vapix.Ports.close") as mock_turn_on: - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - mock_turn_on.assert_called_once_with("0") - - with patch("axis.interfaces.vapix.Ports.open") as mock_turn_off: - await hass.services.async_call( - SWITCH_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: entity_id}, - blocking=True, - ) - mock_turn_off.assert_called_once_with("0") - - PORT_MANAGEMENT_RESPONSE = { "apiVersion": "1.0", "method": "getPorts", @@ -113,14 +62,18 @@ PORT_MANAGEMENT_RESPONSE = { } -@pytest.mark.parametrize("api_discovery_items", [API_DISCOVERY_PORT_MANAGEMENT]) -@pytest.mark.parametrize("port_management_payload", [PORT_MANAGEMENT_RESPONSE]) -@pytest.mark.usefixtures("config_entry_setup") -async def test_switches_with_port_management( +@pytest.mark.parametrize("param_ports_payload", [PORT_DATA]) +async def test_switches_with_port_cgi( hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, mock_rtsp_event: RtspEventMock, + snapshot: SnapshotAssertion, ) -> None: - """Test that switches are loaded properly using port management.""" + """Test that switches are loaded properly using port.cgi.""" + with patch("homeassistant.components.axis.PLATFORMS", [Platform.SWITCH]): + config_entry = await config_entry_factory() + mock_rtsp_event( topic="tns1:Device/Trigger/Relay", data_type="LogicalState", @@ -137,30 +90,61 @@ async def test_switches_with_port_management( ) await hass.async_block_till_done() - assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 2 - - relay_1 = hass.states.get(f"{SWITCH_DOMAIN}.{NAME}_relay_1") - assert relay_1.state == STATE_ON - assert relay_1.name == f"{NAME} Relay 1" + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) entity_id = f"{SWITCH_DOMAIN}.{NAME}_doorbell" - relay_0 = hass.states.get(entity_id) - assert relay_0.state == STATE_OFF - assert relay_0.name == f"{NAME} Doorbell" + with patch("axis.interfaces.vapix.Ports.close") as mock_turn_on: + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + mock_turn_on.assert_called_once_with("0") - # State update + with patch("axis.interfaces.vapix.Ports.open") as mock_turn_off: + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + mock_turn_off.assert_called_once_with("0") + +@pytest.mark.parametrize("api_discovery_items", [API_DISCOVERY_PORT_MANAGEMENT]) +@pytest.mark.parametrize("port_management_payload", [PORT_MANAGEMENT_RESPONSE]) +async def test_switches_with_port_management( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + mock_rtsp_event: RtspEventMock, + snapshot: SnapshotAssertion, +) -> None: + """Test that switches are loaded properly using port management.""" + with patch("homeassistant.components.axis.PLATFORMS", [Platform.SWITCH]): + config_entry = await config_entry_factory() + + mock_rtsp_event( + topic="tns1:Device/Trigger/Relay", + data_type="LogicalState", + data_value="inactive", + source_name="RelayToken", + source_idx="0", + ) mock_rtsp_event( topic="tns1:Device/Trigger/Relay", data_type="LogicalState", data_value="active", source_name="RelayToken", - source_idx="0", + source_idx="1", ) await hass.async_block_till_done() - assert hass.states.get(f"{SWITCH_DOMAIN}.{NAME}_relay_1").state == STATE_ON + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + entity_id = f"{SWITCH_DOMAIN}.{NAME}_doorbell" with patch("axis.interfaces.vapix.IoPortManagement.close") as mock_turn_on: await hass.services.async_call( @@ -179,3 +163,16 @@ async def test_switches_with_port_management( blocking=True, ) mock_turn_off.assert_called_once_with("0") + + # State update + + mock_rtsp_event( + topic="tns1:Device/Trigger/Relay", + data_type="LogicalState", + data_value="active", + source_name="RelayToken", + source_idx="0", + ) + await hass.async_block_till_done() + + assert hass.states.get(f"{SWITCH_DOMAIN}.{NAME}_relay_1").state == STATE_ON From b4cba018709562892ef8f8b0bca74a2fb7cdb868 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 30 Jul 2024 10:17:01 +0200 Subject: [PATCH 0118/1635] Fix implicit-return in command_line (#122838) --- homeassistant/components/command_line/cover.py | 5 ++--- homeassistant/components/command_line/switch.py | 5 ++--- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/command_line/cover.py b/homeassistant/components/command_line/cover.py index 6400be7d92f..2c6ec78b689 100644 --- a/homeassistant/components/command_line/cover.py +++ b/homeassistant/components/command_line/cover.py @@ -4,7 +4,7 @@ from __future__ import annotations import asyncio from datetime import datetime, timedelta -from typing import TYPE_CHECKING, Any, cast +from typing import Any, cast from homeassistant.components.cover import CoverEntity from homeassistant.const import ( @@ -145,8 +145,7 @@ class CommandCover(ManualTriggerEntity, CoverEntity): if self._command_state: LOGGER.info("Running state value command: %s", self._command_state) return await async_check_output_or_log(self._command_state, self._timeout) - if TYPE_CHECKING: - return None + return None async def _update_entity_state(self, now: datetime | None = None) -> None: """Update the state of the entity.""" diff --git a/homeassistant/components/command_line/switch.py b/homeassistant/components/command_line/switch.py index 8a75276c8b4..f8e9d21cf23 100644 --- a/homeassistant/components/command_line/switch.py +++ b/homeassistant/components/command_line/switch.py @@ -4,7 +4,7 @@ from __future__ import annotations import asyncio from datetime import datetime, timedelta -from typing import TYPE_CHECKING, Any, cast +from typing import Any, cast from homeassistant.components.switch import ENTITY_ID_FORMAT, SwitchEntity from homeassistant.const import ( @@ -147,8 +147,7 @@ class CommandSwitch(ManualTriggerEntity, SwitchEntity): if self._value_template: return await self._async_query_state_value(self._command_state) return await self._async_query_state_code(self._command_state) - if TYPE_CHECKING: - return None + return None async def _update_entity_state(self, now: datetime | None = None) -> None: """Update the state of the entity.""" From fa53055485ca6d338b3abc554163e55ac969f8cb Mon Sep 17 00:00:00 2001 From: Denis Shulyaka Date: Tue, 30 Jul 2024 11:57:56 +0300 Subject: [PATCH 0119/1635] Bump voluptuous-openapi (#122828) --- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index f0c72a91501..b0e17bc2826 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -57,7 +57,7 @@ SQLAlchemy==2.0.31 typing-extensions>=4.12.2,<5.0 ulid-transform==0.10.1 urllib3>=1.26.5,<2 -voluptuous-openapi==0.0.4 +voluptuous-openapi==0.0.5 voluptuous-serialize==2.6.0 voluptuous==0.15.2 webrtc-noise-gain==1.2.3 diff --git a/pyproject.toml b/pyproject.toml index 70bfa1f18d8..eac18012ae3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -68,7 +68,7 @@ dependencies = [ "urllib3>=1.26.5,<2", "voluptuous==0.15.2", "voluptuous-serialize==2.6.0", - "voluptuous-openapi==0.0.4", + "voluptuous-openapi==0.0.5", "yarl==1.9.4", ] diff --git a/requirements.txt b/requirements.txt index 6e5ef50c187..5122cb99c41 100644 --- a/requirements.txt +++ b/requirements.txt @@ -40,5 +40,5 @@ ulid-transform==0.10.1 urllib3>=1.26.5,<2 voluptuous==0.15.2 voluptuous-serialize==2.6.0 -voluptuous-openapi==0.0.4 +voluptuous-openapi==0.0.5 yarl==1.9.4 From d825ac346ece81888d0231a51a028956e6917fc3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bruno=20Pantale=C3=A3o=20Gon=C3=A7alves?= <5808343+bgoncal@users.noreply.github.com> Date: Tue, 30 Jul 2024 11:00:08 +0200 Subject: [PATCH 0120/1635] Add 'use_custom_colors' to iOS Action configuration (#122767) --- homeassistant/components/ios/__init__.py | 2 ++ homeassistant/components/ios/const.py | 1 + 2 files changed, 3 insertions(+) diff --git a/homeassistant/components/ios/__init__.py b/homeassistant/components/ios/__init__.py index 4b2b92a482d..2a821166d8a 100644 --- a/homeassistant/components/ios/__init__.py +++ b/homeassistant/components/ios/__init__.py @@ -29,6 +29,7 @@ from .const import ( CONF_ACTION_NAME, CONF_ACTION_SHOW_IN_CARPLAY, CONF_ACTION_SHOW_IN_WATCH, + CONF_ACTION_USE_CUSTOM_COLORS, CONF_ACTIONS, DOMAIN, ) @@ -152,6 +153,7 @@ ACTION_SCHEMA = vol.Schema( }, vol.Optional(CONF_ACTION_SHOW_IN_CARPLAY): cv.boolean, vol.Optional(CONF_ACTION_SHOW_IN_WATCH): cv.boolean, + vol.Optional(CONF_ACTION_USE_CUSTOM_COLORS): cv.boolean, }, ) diff --git a/homeassistant/components/ios/const.py b/homeassistant/components/ios/const.py index 41da1954b44..181bbebd9a6 100644 --- a/homeassistant/components/ios/const.py +++ b/homeassistant/components/ios/const.py @@ -13,3 +13,4 @@ CONF_ACTION_ICON_ICON = "icon" CONF_ACTIONS = "actions" CONF_ACTION_SHOW_IN_CARPLAY = "show_in_carplay" CONF_ACTION_SHOW_IN_WATCH = "show_in_watch" +CONF_ACTION_USE_CUSTOM_COLORS = "use_custom_colors" From d78acd480a44dfd03eb88fce92c5c0d3eba6d22a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Cl=C3=A9ment?= Date: Tue, 30 Jul 2024 11:23:55 +0200 Subject: [PATCH 0121/1635] Add QBittorent switch to control alternative speed (#107637) * Fix key in strings.json for current_status in QBittorrent * Add switch on QBittorent to control alternative speed * Add switch file to .coveragerc * Fix some typo * Use coordinator for switch * Update to mach new lib * Import annotation Co-authored-by: Erik Montnemery * Remove quoted coordinator * Revert "Fix key in strings.json for current_status in QBittorrent" This reverts commit 962fd0474f0c9d6053bcf34898f68e48cf2bb715. --------- Co-authored-by: Erik Montnemery --- .../components/qbittorrent/__init__.py | 2 +- .../components/qbittorrent/coordinator.py | 22 +++- .../components/qbittorrent/helpers.py | 1 - .../components/qbittorrent/strings.json | 5 + .../components/qbittorrent/switch.py | 104 ++++++++++++++++++ 5 files changed, 131 insertions(+), 3 deletions(-) create mode 100644 homeassistant/components/qbittorrent/switch.py diff --git a/homeassistant/components/qbittorrent/__init__.py b/homeassistant/components/qbittorrent/__init__.py index fb781dd1a0c..d95136965f8 100644 --- a/homeassistant/components/qbittorrent/__init__.py +++ b/homeassistant/components/qbittorrent/__init__.py @@ -34,7 +34,7 @@ _LOGGER = logging.getLogger(__name__) CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) -PLATFORMS = [Platform.SENSOR] +PLATFORMS = [Platform.SENSOR, Platform.SWITCH] CONF_ENTRY = "entry" diff --git a/homeassistant/components/qbittorrent/coordinator.py b/homeassistant/components/qbittorrent/coordinator.py index 0ef36d2a954..c590bb9d81a 100644 --- a/homeassistant/components/qbittorrent/coordinator.py +++ b/homeassistant/components/qbittorrent/coordinator.py @@ -30,6 +30,7 @@ class QBittorrentDataCoordinator(DataUpdateCoordinator[SyncMainDataDictionary]): def __init__(self, hass: HomeAssistant, client: Client) -> None: """Initialize coordinator.""" self.client = client + self._is_alternative_mode_enabled = False # self.main_data: dict[str, int] = {} self.total_torrents: dict[str, int] = {} self.active_torrents: dict[str, int] = {} @@ -47,7 +48,13 @@ class QBittorrentDataCoordinator(DataUpdateCoordinator[SyncMainDataDictionary]): async def _async_update_data(self) -> SyncMainDataDictionary: try: - return await self.hass.async_add_executor_job(self.client.sync_maindata) + data = await self.hass.async_add_executor_job(self.client.sync_maindata) + self._is_alternative_mode_enabled = ( + await self.hass.async_add_executor_job( + self.client.transfer_speed_limits_mode + ) + == "1" + ) except (LoginFailed, Forbidden403Error) as exc: raise HomeAssistantError( translation_domain=DOMAIN, translation_key="login_error" @@ -56,6 +63,19 @@ class QBittorrentDataCoordinator(DataUpdateCoordinator[SyncMainDataDictionary]): raise HomeAssistantError( translation_domain=DOMAIN, translation_key="cannot_connect" ) from exc + return data + + def set_alt_speed_enabled(self, is_enabled: bool) -> None: + """Set the alternative speed mode.""" + self.client.transfer_toggle_speed_limits_mode(is_enabled) + + def toggle_alt_speed_enabled(self) -> None: + """Toggle the alternative speed mode.""" + self.client.transfer_toggle_speed_limits_mode() + + def get_alt_speed_enabled(self) -> bool: + """Get the alternative speed mode.""" + return self._is_alternative_mode_enabled async def get_torrents(self, torrent_filter: TorrentStatusesT) -> TorrentInfoList: """Async method to get QBittorrent torrents.""" diff --git a/homeassistant/components/qbittorrent/helpers.py b/homeassistant/components/qbittorrent/helpers.py index fac0a6033fa..6b459e99741 100644 --- a/homeassistant/components/qbittorrent/helpers.py +++ b/homeassistant/components/qbittorrent/helpers.py @@ -8,7 +8,6 @@ from qbittorrentapi import Client, TorrentDictionary, TorrentInfoList def setup_client(url: str, username: str, password: str, verify_ssl: bool) -> Client: """Create a qBittorrent client.""" - client = Client( url, username=username, password=password, VERIFY_WEBUI_CERTIFICATE=verify_ssl ) diff --git a/homeassistant/components/qbittorrent/strings.json b/homeassistant/components/qbittorrent/strings.json index 948e9dca8e9..fe27beb2a2d 100644 --- a/homeassistant/components/qbittorrent/strings.json +++ b/homeassistant/components/qbittorrent/strings.json @@ -47,6 +47,11 @@ "all_torrents": { "name": "All torrents" } + }, + "switch": { + "alternative_speed": { + "name": "Alternative speed" + } } }, "services": { diff --git a/homeassistant/components/qbittorrent/switch.py b/homeassistant/components/qbittorrent/switch.py new file mode 100644 index 00000000000..f12118e5233 --- /dev/null +++ b/homeassistant/components/qbittorrent/switch.py @@ -0,0 +1,104 @@ +"""Support for monitoring the qBittorrent API.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from typing import Any + +from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import QBittorrentDataCoordinator + + +@dataclass(frozen=True, kw_only=True) +class QBittorrentSwitchEntityDescription(SwitchEntityDescription): + """Describes qBittorren switch.""" + + is_on_func: Callable[[QBittorrentDataCoordinator], bool] + turn_on_fn: Callable[[QBittorrentDataCoordinator], None] + turn_off_fn: Callable[[QBittorrentDataCoordinator], None] + toggle_func: Callable[[QBittorrentDataCoordinator], None] + + +SWITCH_TYPES: tuple[QBittorrentSwitchEntityDescription, ...] = ( + QBittorrentSwitchEntityDescription( + key="alternative_speed", + translation_key="alternative_speed", + icon="mdi:speedometer-slow", + is_on_func=lambda coordinator: coordinator.get_alt_speed_enabled(), + turn_on_fn=lambda coordinator: coordinator.set_alt_speed_enabled(True), + turn_off_fn=lambda coordinator: coordinator.set_alt_speed_enabled(False), + toggle_func=lambda coordinator: coordinator.toggle_alt_speed_enabled(), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up qBittorrent switch entries.""" + + coordinator: QBittorrentDataCoordinator = hass.data[DOMAIN][config_entry.entry_id] + + async_add_entities( + QBittorrentSwitch(coordinator, config_entry, description) + for description in SWITCH_TYPES + ) + + +class QBittorrentSwitch(CoordinatorEntity[QBittorrentDataCoordinator], SwitchEntity): + """Representation of a qBittorrent switch.""" + + _attr_has_entity_name = True + entity_description: QBittorrentSwitchEntityDescription + + def __init__( + self, + coordinator: QBittorrentDataCoordinator, + config_entry: ConfigEntry, + entity_description: QBittorrentSwitchEntityDescription, + ) -> None: + """Initialize qBittorrent switch.""" + super().__init__(coordinator) + self.entity_description = entity_description + self._attr_unique_id = f"{config_entry.entry_id}-{entity_description.key}" + self._attr_device_info = DeviceInfo( + entry_type=DeviceEntryType.SERVICE, + identifiers={(DOMAIN, config_entry.entry_id)}, + manufacturer="QBittorrent", + ) + + @property + def is_on(self) -> bool: + """Return true if device is on.""" + return self.entity_description.is_on_func(self.coordinator) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on this switch.""" + await self.hass.async_add_executor_job( + self.entity_description.turn_on_fn, self.coordinator + ) + await self.coordinator.async_request_refresh() + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off this switch.""" + await self.hass.async_add_executor_job( + self.entity_description.turn_off_fn, self.coordinator + ) + await self.coordinator.async_request_refresh() + + async def async_toggle(self, **kwargs: Any) -> None: + """Toggle the device.""" + await self.hass.async_add_executor_job( + self.entity_description.toggle_func, self.coordinator + ) + await self.coordinator.async_request_refresh() From 53a59412bb12ff48dae11dd219bec593929cd4ec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kristof=20Mari=C3=ABn?= Date: Tue, 30 Jul 2024 02:34:30 -0700 Subject: [PATCH 0122/1635] Add Foscam sleep switch (#109491) * Add sleep switch * Replace awake with sleep switch --- homeassistant/components/foscam/__init__.py | 2 +- .../components/foscam/coordinator.py | 5 ++ homeassistant/components/foscam/strings.json | 7 ++ homeassistant/components/foscam/switch.py | 85 +++++++++++++++++++ 4 files changed, 98 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/foscam/switch.py diff --git a/homeassistant/components/foscam/__init__.py b/homeassistant/components/foscam/__init__.py index aed3ed637ae..f8708a589ce 100644 --- a/homeassistant/components/foscam/__init__.py +++ b/homeassistant/components/foscam/__init__.py @@ -17,7 +17,7 @@ from .config_flow import DEFAULT_RTSP_PORT from .const import CONF_RTSP_PORT, DOMAIN, LOGGER, SERVICE_PTZ, SERVICE_PTZ_PRESET from .coordinator import FoscamCoordinator -PLATFORMS = [Platform.CAMERA] +PLATFORMS = [Platform.CAMERA, Platform.SWITCH] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: diff --git a/homeassistant/components/foscam/coordinator.py b/homeassistant/components/foscam/coordinator.py index 063d5235c04..e7a8abf7d30 100644 --- a/homeassistant/components/foscam/coordinator.py +++ b/homeassistant/components/foscam/coordinator.py @@ -44,4 +44,9 @@ class FoscamCoordinator(DataUpdateCoordinator[dict[str, Any]]): self.session.get_product_all_info ) data["product_info"] = all_info[1] + + ret, is_asleep = await self.hass.async_add_executor_job( + self.session.is_asleep + ) + data["is_asleep"] = {"supported": ret == 0, "status": is_asleep} return data diff --git a/homeassistant/components/foscam/strings.json b/homeassistant/components/foscam/strings.json index 285f0f5a780..2784e541809 100644 --- a/homeassistant/components/foscam/strings.json +++ b/homeassistant/components/foscam/strings.json @@ -25,6 +25,13 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" } }, + "entity": { + "switch": { + "sleep_switch": { + "name": "Sleep" + } + } + }, "services": { "ptz": { "name": "PTZ", diff --git a/homeassistant/components/foscam/switch.py b/homeassistant/components/foscam/switch.py new file mode 100644 index 00000000000..9eae211881f --- /dev/null +++ b/homeassistant/components/foscam/switch.py @@ -0,0 +1,85 @@ +"""Component provides support for the Foscam Switch.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.components.switch import SwitchEntity +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import FoscamCoordinator +from .const import DOMAIN, LOGGER +from .entity import FoscamEntity + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up foscam switch from a config entry.""" + + coordinator: FoscamCoordinator = hass.data[DOMAIN][config_entry.entry_id] + + await coordinator.async_config_entry_first_refresh() + + if coordinator.data["is_asleep"]["supported"]: + async_add_entities([FoscamSleepSwitch(coordinator, config_entry)]) + + +class FoscamSleepSwitch(FoscamEntity, SwitchEntity): + """An implementation for Sleep Switch.""" + + def __init__( + self, + coordinator: FoscamCoordinator, + config_entry: ConfigEntry, + ) -> None: + """Initialize a Foscam Sleep Switch.""" + super().__init__(coordinator, config_entry.entry_id) + + self._attr_unique_id = "sleep_switch" + self._attr_translation_key = "sleep_switch" + self._attr_has_entity_name = True + + self.is_asleep = self.coordinator.data["is_asleep"]["status"] + + @property + def is_on(self): + """Return true if camera is asleep.""" + return self.is_asleep + + async def async_turn_off(self, **kwargs: Any) -> None: + """Wake camera.""" + LOGGER.debug("Wake camera") + + ret, _ = await self.hass.async_add_executor_job( + self.coordinator.session.wake_up + ) + + if ret != 0: + raise HomeAssistantError(f"Error waking up: {ret}") + + await self.coordinator.async_request_refresh() + + async def async_turn_on(self, **kwargs: Any) -> None: + """But camera is sleep.""" + LOGGER.debug("Sleep camera") + + ret, _ = await self.hass.async_add_executor_job(self.coordinator.session.sleep) + + if ret != 0: + raise HomeAssistantError(f"Error sleeping: {ret}") + + await self.coordinator.async_request_refresh() + + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + + self.is_asleep = self.coordinator.data["is_asleep"]["status"] + + self.async_write_ha_state() From 7c92287f972554826f4dd0bb99652ad88c7c64b6 Mon Sep 17 00:00:00 2001 From: Luke Wale Date: Tue, 30 Jul 2024 18:34:49 +0800 Subject: [PATCH 0123/1635] Add Airtouch5 cover tests (#122769) add airtouch5 cover tests --- tests/components/airtouch5/__init__.py | 12 ++ tests/components/airtouch5/conftest.py | 118 +++++++++++++++ .../airtouch5/snapshots/test_cover.ambr | 99 ++++++++++++ tests/components/airtouch5/test_cover.py | 143 ++++++++++++++++++ 4 files changed, 372 insertions(+) create mode 100644 tests/components/airtouch5/snapshots/test_cover.ambr create mode 100644 tests/components/airtouch5/test_cover.py diff --git a/tests/components/airtouch5/__init__.py b/tests/components/airtouch5/__init__.py index 2b76786e7e5..567be6af774 100644 --- a/tests/components/airtouch5/__init__.py +++ b/tests/components/airtouch5/__init__.py @@ -1 +1,13 @@ """Tests for the Airtouch 5 integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/airtouch5/conftest.py b/tests/components/airtouch5/conftest.py index ca678258c77..fab26e3f6cc 100644 --- a/tests/components/airtouch5/conftest.py +++ b/tests/components/airtouch5/conftest.py @@ -3,8 +3,22 @@ from collections.abc import Generator from unittest.mock import AsyncMock, patch +from airtouch5py.data_packet_factory import DataPacketFactory +from airtouch5py.packets.ac_ability import AcAbility +from airtouch5py.packets.ac_status import AcFanSpeed, AcMode, AcPowerState, AcStatus +from airtouch5py.packets.zone_name import ZoneName +from airtouch5py.packets.zone_status import ( + ControlMethod, + ZonePowerState, + ZoneStatusZone, +) import pytest +from homeassistant.components.airtouch5.const import DOMAIN +from homeassistant.const import CONF_HOST + +from tests.common import MockConfigEntry + @pytest.fixture def mock_setup_entry() -> Generator[AsyncMock]: @@ -13,3 +27,107 @@ def mock_setup_entry() -> Generator[AsyncMock]: "homeassistant.components.airtouch5.async_setup_entry", return_value=True ) as mock_setup_entry: yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock the config entry.""" + return MockConfigEntry( + domain=DOMAIN, + unique_id="1.1.1.1", + data={ + CONF_HOST: "1.1.1.1", + }, + ) + + +@pytest.fixture +def mock_airtouch5_client() -> Generator[AsyncMock]: + """Mock an Airtouch5 client.""" + + with ( + patch( + "homeassistant.components.airtouch5.Airtouch5SimpleClient", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.airtouch5.config_flow.Airtouch5SimpleClient", + new=mock_client, + ), + ): + client = mock_client.return_value + + # Default values for the tests using this mock : + client.data_packet_factory = DataPacketFactory() + client.ac = [ + AcAbility( + ac_number=1, + ac_name="AC 1", + start_zone_number=1, + zone_count=2, + supports_mode_cool=True, + supports_mode_fan=True, + supports_mode_dry=True, + supports_mode_heat=True, + supports_mode_auto=True, + supports_fan_speed_intelligent_auto=True, + supports_fan_speed_turbo=True, + supports_fan_speed_powerful=True, + supports_fan_speed_high=True, + supports_fan_speed_medium=True, + supports_fan_speed_low=True, + supports_fan_speed_quiet=True, + supports_fan_speed_auto=True, + min_cool_set_point=15, + max_cool_set_point=25, + min_heat_set_point=20, + max_heat_set_point=30, + ) + ] + client.latest_ac_status = { + 1: AcStatus( + ac_power_state=AcPowerState.ON, + ac_number=1, + ac_mode=AcMode.AUTO, + ac_fan_speed=AcFanSpeed.AUTO, + ac_setpoint=24, + turbo_active=False, + bypass_active=False, + spill_active=False, + timer_set=False, + temperature=24, + error_code=0, + ) + } + + client.zones = [ZoneName(1, "Zone 1"), ZoneName(2, "Zone 2")] + client.latest_zone_status = { + 1: ZoneStatusZone( + zone_power_state=ZonePowerState.ON, + zone_number=1, + control_method=ControlMethod.PERCENTAGE_CONTROL, + open_percentage=0.9, + set_point=24, + has_sensor=False, + temperature=24, + spill_active=False, + is_low_battery=False, + ), + 2: ZoneStatusZone( + zone_power_state=ZonePowerState.ON, + zone_number=1, + control_method=ControlMethod.TEMPERATURE_CONTROL, + open_percentage=1, + set_point=24, + has_sensor=True, + temperature=24, + spill_active=False, + is_low_battery=False, + ), + } + + client.connection_state_callbacks = [] + client.zone_status_callbacks = [] + client.ac_status_callbacks = [] + + yield client diff --git a/tests/components/airtouch5/snapshots/test_cover.ambr b/tests/components/airtouch5/snapshots/test_cover.ambr new file mode 100644 index 00000000000..a8e57f69527 --- /dev/null +++ b/tests/components/airtouch5/snapshots/test_cover.ambr @@ -0,0 +1,99 @@ +# serializer version: 1 +# name: test_all_entities[cover.zone_1_damper-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.zone_1_damper', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Damper', + 'platform': 'airtouch5', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'damper', + 'unique_id': 'zone_1_open_percentage', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[cover.zone_1_damper-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_position': 90, + 'device_class': 'damper', + 'friendly_name': 'Zone 1 Damper', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.zone_1_damper', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- +# name: test_all_entities[cover.zone_2_damper-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.zone_2_damper', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Damper', + 'platform': 'airtouch5', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'damper', + 'unique_id': 'zone_2_open_percentage', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[cover.zone_2_damper-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_position': 100, + 'device_class': 'damper', + 'friendly_name': 'Zone 2 Damper', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.zone_2_damper', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- diff --git a/tests/components/airtouch5/test_cover.py b/tests/components/airtouch5/test_cover.py new file mode 100644 index 00000000000..295535cd95d --- /dev/null +++ b/tests/components/airtouch5/test_cover.py @@ -0,0 +1,143 @@ +"""Tests for the Airtouch5 cover platform.""" + +from collections.abc import Callable +from unittest.mock import AsyncMock, patch + +from airtouch5py.packets.zone_status import ( + ControlMethod, + ZonePowerState, + ZoneStatusZone, +) +from syrupy import SnapshotAssertion + +from homeassistant.components.cover import ( + ATTR_CURRENT_POSITION, + ATTR_POSITION, + DOMAIN as COVER_DOMAIN, + SERVICE_CLOSE_COVER, + SERVICE_OPEN_COVER, + SERVICE_SET_COVER_POSITION, + STATE_OPEN, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_CLOSED, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + +COVER_ENTITY_ID = "cover.zone_1_damper" + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_airtouch5_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + + with patch("homeassistant.components.airtouch5.PLATFORMS", [Platform.COVER]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_cover_actions( + hass: HomeAssistant, + mock_airtouch5_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the actions of the Airtouch5 covers.""" + + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: COVER_ENTITY_ID}, + blocking=True, + ) + mock_airtouch5_client.send_packet.assert_called_once() + mock_airtouch5_client.reset_mock() + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: COVER_ENTITY_ID}, + blocking=True, + ) + mock_airtouch5_client.send_packet.assert_called_once() + mock_airtouch5_client.reset_mock() + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_SET_COVER_POSITION, + {ATTR_ENTITY_ID: COVER_ENTITY_ID, ATTR_POSITION: 50}, + blocking=True, + ) + mock_airtouch5_client.send_packet.assert_called_once() + mock_airtouch5_client.reset_mock() + + +async def test_cover_callbacks( + hass: HomeAssistant, + mock_airtouch5_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the callbacks of the Airtouch5 covers.""" + + await setup_integration(hass, mock_config_entry) + + # We find the callback method on the mock client + zone_status_callback: Callable[[dict[int, ZoneStatusZone]], None] = ( + mock_airtouch5_client.zone_status_callbacks[2] + ) + + # Define a method to simply call it + async def _call_zone_status_callback(open_percentage: int) -> None: + zsz = ZoneStatusZone( + zone_power_state=ZonePowerState.ON, + zone_number=1, + control_method=ControlMethod.PERCENTAGE_CONTROL, + open_percentage=open_percentage, + set_point=None, + has_sensor=False, + temperature=None, + spill_active=False, + is_low_battery=False, + ) + zone_status_callback({1: zsz}) + await hass.async_block_till_done() + + # And call it to effectively launch the callback as the server would do + + # Partly open + await _call_zone_status_callback(0.7) + state = hass.states.get(COVER_ENTITY_ID) + assert state + assert state.state == STATE_OPEN + assert state.attributes.get(ATTR_CURRENT_POSITION) == 70 + + # Fully open + await _call_zone_status_callback(1) + state = hass.states.get(COVER_ENTITY_ID) + assert state + assert state.state == STATE_OPEN + assert state.attributes.get(ATTR_CURRENT_POSITION) == 100 + + # Fully closed + await _call_zone_status_callback(0.0) + state = hass.states.get(COVER_ENTITY_ID) + assert state + assert state.state == STATE_CLOSED + assert state.attributes.get(ATTR_CURRENT_POSITION) == 0 + + # Partly reopened + await _call_zone_status_callback(0.3) + state = hass.states.get(COVER_ENTITY_ID) + assert state + assert state.state == STATE_OPEN + assert state.attributes.get(ATTR_CURRENT_POSITION) == 30 From b6f0893c336a575b9658b530eb89972b05abb8ff Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 30 Jul 2024 13:05:38 +0200 Subject: [PATCH 0124/1635] Fix implicit-return in denon (#122835) --- homeassistant/components/denon/media_player.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/denon/media_player.py b/homeassistant/components/denon/media_player.py index b3b3ba97baa..0a6fe18d986 100644 --- a/homeassistant/components/denon/media_player.py +++ b/homeassistant/components/denon/media_player.py @@ -253,11 +253,12 @@ class DenonDevice(MediaPlayerEntity): return SUPPORT_DENON @property - def source(self): + def source(self) -> str | None: """Return the current input source.""" for pretty_name, name in self._source_list.items(): if self._mediasource == name: return pretty_name + return None def turn_off(self) -> None: """Turn off media player.""" From 015c50bbdb13788828b2e15f234dae4a5fbddf69 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 30 Jul 2024 13:44:11 +0200 Subject: [PATCH 0125/1635] Fix implicit-return in ddwrt (#122837) --- homeassistant/components/ddwrt/device_tracker.py | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/ddwrt/device_tracker.py b/homeassistant/components/ddwrt/device_tracker.py index 30ab3af53fb..5d31d16a530 100644 --- a/homeassistant/components/ddwrt/device_tracker.py +++ b/homeassistant/components/ddwrt/device_tracker.py @@ -162,6 +162,7 @@ class DdWrtDeviceScanner(DeviceScanner): ) return None _LOGGER.error("Invalid response from DD-WRT: %s", response) + return None def _parse_ddwrt_response(data_str): From 956cc6a85cd13be520c2410989acde35a5538b88 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Tue, 30 Jul 2024 13:54:44 +0200 Subject: [PATCH 0126/1635] Add UI to create KNX switch and light entities (#122630) Update KNX frontend to 2024.7.25.204106 --- homeassistant/components/knx/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/knx/manifest.json b/homeassistant/components/knx/manifest.json index 3e8986641e7..5035239d1fb 100644 --- a/homeassistant/components/knx/manifest.json +++ b/homeassistant/components/knx/manifest.json @@ -13,7 +13,7 @@ "requirements": [ "xknx==2.12.2", "xknxproject==3.7.1", - "knx-frontend==2024.1.20.105944" + "knx-frontend==2024.7.25.204106" ], "single_config_entry": true } diff --git a/requirements_all.txt b/requirements_all.txt index 70565d41175..7a63bcb5602 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1213,7 +1213,7 @@ kiwiki-client==0.1.1 knocki==0.3.1 # homeassistant.components.knx -knx-frontend==2024.1.20.105944 +knx-frontend==2024.7.25.204106 # homeassistant.components.konnected konnected==1.2.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index fe01d45de76..9e8664f4821 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1009,7 +1009,7 @@ kegtron-ble==0.4.0 knocki==0.3.1 # homeassistant.components.knx -knx-frontend==2024.1.20.105944 +knx-frontend==2024.7.25.204106 # homeassistant.components.konnected konnected==1.2.0 From 72f9d85bbebb9c45b157974391728b12c6efb3ad Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 30 Jul 2024 14:57:43 +0200 Subject: [PATCH 0127/1635] Fix implicit-return in whirlpool tests (#122775) --- tests/components/whirlpool/conftest.py | 2 ++ tests/components/whirlpool/test_sensor.py | 2 ++ 2 files changed, 4 insertions(+) diff --git a/tests/components/whirlpool/conftest.py b/tests/components/whirlpool/conftest.py index a5926f55a94..50620b20b8b 100644 --- a/tests/components/whirlpool/conftest.py +++ b/tests/components/whirlpool/conftest.py @@ -145,6 +145,8 @@ def side_effect_function(*args, **kwargs): if args[0] == "WashCavity_OpStatusBulkDispense1Level": return "3" + return None + def get_sensor_mock(said): """Get a mock of a sensor.""" diff --git a/tests/components/whirlpool/test_sensor.py b/tests/components/whirlpool/test_sensor.py index 6af88c8a9f3..548025e29bd 100644 --- a/tests/components/whirlpool/test_sensor.py +++ b/tests/components/whirlpool/test_sensor.py @@ -42,6 +42,8 @@ def side_effect_function_open_door(*args, **kwargs): if args[0] == "WashCavity_OpStatusBulkDispense1Level": return "3" + return None + async def test_dryer_sensor_values( hass: HomeAssistant, From e7971f5a679b245e0aec0fdcb8afc8ff5d7d5f60 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Cl=C3=A9ment?= Date: Tue, 30 Jul 2024 15:03:36 +0200 Subject: [PATCH 0128/1635] Fix qbittorent current_status key in strings.json (#122848) --- homeassistant/components/qbittorrent/strings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/qbittorrent/strings.json b/homeassistant/components/qbittorrent/strings.json index fe27beb2a2d..88015dad5c3 100644 --- a/homeassistant/components/qbittorrent/strings.json +++ b/homeassistant/components/qbittorrent/strings.json @@ -26,7 +26,7 @@ "upload_speed": { "name": "Upload speed" }, - "transmission_status": { + "current_status": { "name": "Status", "state": { "idle": "[%key:common::state::idle%]", From fd7c92879cccfa456088fa03f3063a36a707f791 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 30 Jul 2024 15:23:04 +0200 Subject: [PATCH 0129/1635] Fix implicit-return in foursquare (#122843) --- homeassistant/components/foursquare/__init__.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/foursquare/__init__.py b/homeassistant/components/foursquare/__init__.py index c0eac33a6a8..12a29fd632e 100644 --- a/homeassistant/components/foursquare/__init__.py +++ b/homeassistant/components/foursquare/__init__.py @@ -3,6 +3,7 @@ from http import HTTPStatus import logging +from aiohttp import web import requests import voluptuous as vol @@ -85,11 +86,11 @@ class FoursquarePushReceiver(HomeAssistantView): url = "/api/foursquare" name = "foursquare" - def __init__(self, push_secret): + def __init__(self, push_secret: str) -> None: """Initialize the OAuth callback view.""" self.push_secret = push_secret - async def post(self, request): + async def post(self, request: web.Request) -> web.Response | None: """Accept the POST from Foursquare.""" try: data = await request.json() @@ -107,3 +108,4 @@ class FoursquarePushReceiver(HomeAssistantView): return self.json_message("Incorrect secret", HTTPStatus.BAD_REQUEST) request.app[KEY_HASS].bus.async_fire(EVENT_PUSH, data) + return None From 41c7414d9784d5401bce706d109f83e4258b6323 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 30 Jul 2024 15:23:53 +0200 Subject: [PATCH 0130/1635] Fix implicit-return in forked_daapd (#122842) --- homeassistant/components/forked_daapd/media_player.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/forked_daapd/media_player.py b/homeassistant/components/forked_daapd/media_player.py index 98ad2f28caf..b8b544c1a2c 100644 --- a/homeassistant/components/forked_daapd/media_player.py +++ b/homeassistant/components/forked_daapd/media_player.py @@ -827,12 +827,13 @@ class ForkedDaapdMaster(MediaPlayerEntity): return self._source[:-7] return "" - async def _pipe_call(self, pipe_name, base_function_name): - if self._pipe_control_api.get(pipe_name): - return await getattr( - self._pipe_control_api[pipe_name], + async def _pipe_call(self, pipe_name, base_function_name) -> None: + if pipe := self._pipe_control_api.get(pipe_name): + await getattr( + pipe, PIPE_FUNCTION_MAP[pipe_name][base_function_name], )() + return _LOGGER.warning("No pipe control available for %s", pipe_name) async def async_browse_media( From 27eba3cd4631a4efe3580e231532e0235bb4c367 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 30 Jul 2024 15:24:35 +0200 Subject: [PATCH 0131/1635] Fix implicit-return in fixer (#122841) --- homeassistant/components/fixer/sensor.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/fixer/sensor.py b/homeassistant/components/fixer/sensor.py index 4a03de5d6de..f8b4546d4c7 100644 --- a/homeassistant/components/fixer/sensor.py +++ b/homeassistant/components/fixer/sensor.py @@ -4,6 +4,7 @@ from __future__ import annotations from datetime import timedelta import logging +from typing import Any from fixerio import Fixerio from fixerio.exceptions import FixerioException @@ -89,13 +90,14 @@ class ExchangeRateSensor(SensorEntity): return self._state @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any] | None: """Return the state attributes.""" if self.data.rate is not None: return { ATTR_EXCHANGE_RATE: self.data.rate["rates"][self._target], ATTR_TARGET: self._target, } + return None def update(self) -> None: """Get the latest data and updates the states.""" From 7b5db6521c8a047570c968e95010d7279d486080 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 30 Jul 2024 15:29:23 +0200 Subject: [PATCH 0132/1635] Fix implicit-return in advantage_air (#122840) --- homeassistant/components/advantage_air/climate.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/advantage_air/climate.py b/homeassistant/components/advantage_air/climate.py index 7f9d3f2dc65..8da46cc7463 100644 --- a/homeassistant/components/advantage_air/climate.py +++ b/homeassistant/components/advantage_air/climate.py @@ -206,7 +206,8 @@ class AdvantageAirAC(AdvantageAirAcEntity, ClimateEntity): async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: """Set the HVAC Mode and State.""" if hvac_mode == HVACMode.OFF: - return await self.async_turn_off() + await self.async_turn_off() + return if hvac_mode == HVACMode.HEAT_COOL and self.preset_mode != ADVANTAGE_AIR_MYAUTO: raise ServiceValidationError("Heat/Cool is not supported in this mode") await self.async_update_ac( From 09cd79772f8e1a07397ad6ea4dda48b1a47ccdb8 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 30 Jul 2024 15:29:53 +0200 Subject: [PATCH 0133/1635] Fix implicit-return in airtouch4 (#122839) --- homeassistant/components/airtouch4/climate.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/airtouch4/climate.py b/homeassistant/components/airtouch4/climate.py index 29fd2bc4bed..dbb6f02859b 100644 --- a/homeassistant/components/airtouch4/climate.py +++ b/homeassistant/components/airtouch4/climate.py @@ -156,7 +156,8 @@ class AirtouchAC(CoordinatorEntity, ClimateEntity): raise ValueError(f"Unsupported HVAC mode: {hvac_mode}") if hvac_mode == HVACMode.OFF: - return await self.async_turn_off() + await self.async_turn_off() + return await self._airtouch.SetCoolingModeForAc( self._ac_number, HA_STATE_TO_AT[hvac_mode] ) @@ -262,7 +263,8 @@ class AirtouchGroup(CoordinatorEntity, ClimateEntity): raise ValueError(f"Unsupported HVAC mode: {hvac_mode}") if hvac_mode == HVACMode.OFF: - return await self.async_turn_off() + await self.async_turn_off() + return if self.hvac_mode == HVACMode.OFF: await self.async_turn_on() self._unit = self._airtouch.GetGroups()[self._group_number] From ea508b26298aed78676c93d28b520e4e1f6a4356 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 30 Jul 2024 15:32:29 +0200 Subject: [PATCH 0134/1635] Fix implicit-return in dialogflow (#122834) --- homeassistant/components/dialogflow/__init__.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/homeassistant/components/dialogflow/__init__.py b/homeassistant/components/dialogflow/__init__.py index 1c0da6b26eb..da6fbaf9969 100644 --- a/homeassistant/components/dialogflow/__init__.py +++ b/homeassistant/components/dialogflow/__init__.py @@ -103,6 +103,8 @@ def get_api_version(message): if message.get("responseId") is not None: return V2 + raise ValueError(f"Unable to extract API version from message: {message}") + async def async_handle_message(hass, message): """Handle a DialogFlow message.""" @@ -173,3 +175,5 @@ class DialogflowResponse: if self.api_version is V2: return {"fulfillmentText": self.speech, "source": SOURCE} + + raise ValueError(f"Invalid API version: {self.api_version}") From 2135691b90f4e7069c6981f373d21293d6420273 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 30 Jul 2024 15:33:05 +0200 Subject: [PATCH 0135/1635] Fix implicit-return in dublin bus transport (#122833) --- homeassistant/components/dublin_bus_transport/sensor.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/dublin_bus_transport/sensor.py b/homeassistant/components/dublin_bus_transport/sensor.py index 91773d08142..5fc3453fca6 100644 --- a/homeassistant/components/dublin_bus_transport/sensor.py +++ b/homeassistant/components/dublin_bus_transport/sensor.py @@ -9,6 +9,7 @@ from __future__ import annotations from contextlib import suppress from datetime import datetime, timedelta from http import HTTPStatus +from typing import Any import requests import voluptuous as vol @@ -102,7 +103,7 @@ class DublinPublicTransportSensor(SensorEntity): return self._state @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any] | None: """Return the state attributes.""" if self._times is not None: next_up = "None" @@ -117,6 +118,7 @@ class DublinPublicTransportSensor(SensorEntity): ATTR_ROUTE: self._times[0][ATTR_ROUTE], ATTR_NEXT_UP: next_up, } + return None @property def native_unit_of_measurement(self): From c8372a3aa5094a1a96c3a64bed704f93763e43f4 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 30 Jul 2024 15:33:57 +0200 Subject: [PATCH 0136/1635] Fix implicit-return in ecobee (#122832) --- homeassistant/components/ecobee/binary_sensor.py | 3 ++- homeassistant/components/ecobee/sensor.py | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/ecobee/binary_sensor.py b/homeassistant/components/ecobee/binary_sensor.py index 4286f2cf757..2a021442a63 100644 --- a/homeassistant/components/ecobee/binary_sensor.py +++ b/homeassistant/components/ecobee/binary_sensor.py @@ -46,7 +46,7 @@ class EcobeeBinarySensor(BinarySensorEntity): self.index = sensor_index @property - def unique_id(self): + def unique_id(self) -> str | None: """Return a unique identifier for this sensor.""" for sensor in self.data.ecobee.get_remote_sensors(self.index): if sensor["name"] == self.sensor_name: @@ -54,6 +54,7 @@ class EcobeeBinarySensor(BinarySensorEntity): return f"{sensor['code']}-{self.device_class}" thermostat = self.data.ecobee.get_thermostat(self.index) return f"{thermostat['identifier']}-{sensor['id']}-{self.device_class}" + return None @property def device_info(self) -> DeviceInfo | None: diff --git a/homeassistant/components/ecobee/sensor.py b/homeassistant/components/ecobee/sensor.py index 3e2e984cccb..fe0442fb885 100644 --- a/homeassistant/components/ecobee/sensor.py +++ b/homeassistant/components/ecobee/sensor.py @@ -112,7 +112,7 @@ class EcobeeSensor(SensorEntity): self._state = None @property - def unique_id(self): + def unique_id(self) -> str | None: """Return a unique identifier for this sensor.""" for sensor in self.data.ecobee.get_remote_sensors(self.index): if sensor["name"] == self.sensor_name: @@ -120,6 +120,7 @@ class EcobeeSensor(SensorEntity): return f"{sensor['code']}-{self.device_class}" thermostat = self.data.ecobee.get_thermostat(self.index) return f"{thermostat['identifier']}-{sensor['id']}-{self.device_class}" + return None @property def device_info(self) -> DeviceInfo | None: From 224228e4480b4f71c5722ed6ac33221c5b1b4710 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 30 Jul 2024 16:16:33 +0200 Subject: [PATCH 0137/1635] Fix Axis tests affecting other tests (#122857) --- tests/components/axis/conftest.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tests/components/axis/conftest.py b/tests/components/axis/conftest.py index 30e1b7335b9..c3377c15955 100644 --- a/tests/components/axis/conftest.py +++ b/tests/components/axis/conftest.py @@ -128,6 +128,13 @@ def fixture_config_entry_options() -> MappingProxyType[str, Any]: # Axis API fixtures +@pytest.fixture(autouse=True) +def reset_mock_requests() -> Generator[None]: + """Reset respx mock routes after the test.""" + yield + respx.mock.clear() + + @pytest.fixture(name="mock_requests") def fixture_request( respx_mock: respx.MockRouter, From d9e996def5ad3824504e5062640b5393b2ec0132 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 30 Jul 2024 16:18:47 +0200 Subject: [PATCH 0138/1635] Fix template binary sensor test (#122855) --- .../components/template/test_binary_sensor.py | 43 +++++++++++++------ 1 file changed, 29 insertions(+), 14 deletions(-) diff --git a/tests/components/template/test_binary_sensor.py b/tests/components/template/test_binary_sensor.py index 50cad5be9e1..eb51b3f53b4 100644 --- a/tests/components/template/test_binary_sensor.py +++ b/tests/components/template/test_binary_sensor.py @@ -1,5 +1,6 @@ """The tests for the Template Binary sensor platform.""" +from copy import deepcopy from datetime import UTC, datetime, timedelta import logging from unittest.mock import patch @@ -995,20 +996,32 @@ async def test_availability_icon_picture( ], ) @pytest.mark.parametrize( - ("extra_config", "restored_state", "initial_state"), + ("extra_config", "source_state", "restored_state", "initial_state"), [ - ({}, ON, OFF), - ({}, OFF, OFF), - ({}, STATE_UNAVAILABLE, OFF), - ({}, STATE_UNKNOWN, OFF), - ({"delay_off": 5}, ON, ON), - ({"delay_off": 5}, OFF, OFF), - ({"delay_off": 5}, STATE_UNAVAILABLE, STATE_UNKNOWN), - ({"delay_off": 5}, STATE_UNKNOWN, STATE_UNKNOWN), - ({"delay_on": 5}, ON, ON), - ({"delay_on": 5}, OFF, OFF), - ({"delay_on": 5}, STATE_UNAVAILABLE, STATE_UNKNOWN), - ({"delay_on": 5}, STATE_UNKNOWN, STATE_UNKNOWN), + ({}, OFF, ON, OFF), + ({}, OFF, OFF, OFF), + ({}, OFF, STATE_UNAVAILABLE, OFF), + ({}, OFF, STATE_UNKNOWN, OFF), + ({"delay_off": 5}, OFF, ON, ON), + ({"delay_off": 5}, OFF, OFF, OFF), + ({"delay_off": 5}, OFF, STATE_UNAVAILABLE, STATE_UNKNOWN), + ({"delay_off": 5}, OFF, STATE_UNKNOWN, STATE_UNKNOWN), + ({"delay_on": 5}, OFF, ON, OFF), + ({"delay_on": 5}, OFF, OFF, OFF), + ({"delay_on": 5}, OFF, STATE_UNAVAILABLE, OFF), + ({"delay_on": 5}, OFF, STATE_UNKNOWN, OFF), + ({}, ON, ON, ON), + ({}, ON, OFF, ON), + ({}, ON, STATE_UNAVAILABLE, ON), + ({}, ON, STATE_UNKNOWN, ON), + ({"delay_off": 5}, ON, ON, ON), + ({"delay_off": 5}, ON, OFF, ON), + ({"delay_off": 5}, ON, STATE_UNAVAILABLE, ON), + ({"delay_off": 5}, ON, STATE_UNKNOWN, ON), + ({"delay_on": 5}, ON, ON, ON), + ({"delay_on": 5}, ON, OFF, OFF), + ({"delay_on": 5}, ON, STATE_UNAVAILABLE, STATE_UNKNOWN), + ({"delay_on": 5}, ON, STATE_UNKNOWN, STATE_UNKNOWN), ], ) async def test_restore_state( @@ -1017,18 +1030,20 @@ async def test_restore_state( domain, config, extra_config, + source_state, restored_state, initial_state, ) -> None: """Test restoring template binary sensor.""" + hass.states.async_set("sensor.test_state", source_state) fake_state = State( "binary_sensor.test", restored_state, {}, ) mock_restore_cache(hass, (fake_state,)) - config = dict(config) + config = deepcopy(config) config["template"]["binary_sensor"].update(**extra_config) with assert_setup_component(count, domain): assert await async_setup_component( From a5136a10217f3e643952548402338312e5c3dd95 Mon Sep 17 00:00:00 2001 From: Thomas55555 <59625598+Thomas55555@users.noreply.github.com> Date: Tue, 30 Jul 2024 16:27:58 +0200 Subject: [PATCH 0139/1635] Speed up slow tests in Husqvarna Automower (#122854) --- .../husqvarna_automower/test_number.py | 20 ++++++++++++++++--- .../husqvarna_automower/test_switch.py | 12 +++++++++-- 2 files changed, 27 insertions(+), 5 deletions(-) diff --git a/tests/components/husqvarna_automower/test_number.py b/tests/components/husqvarna_automower/test_number.py index ac7353386ac..9f2f8793bba 100644 --- a/tests/components/husqvarna_automower/test_number.py +++ b/tests/components/husqvarna_automower/test_number.py @@ -1,13 +1,18 @@ """Tests for number platform.""" +from datetime import timedelta from unittest.mock import AsyncMock, patch from aioautomower.exceptions import ApiException from aioautomower.utils import mower_list_to_dictionary_dataclass +from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion -from homeassistant.components.husqvarna_automower.const import DOMAIN +from homeassistant.components.husqvarna_automower.const import ( + DOMAIN, + EXECUTION_TIME_DELAY, +) from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -16,7 +21,12 @@ from homeassistant.helpers import entity_registry as er from . import setup_integration from .const import TEST_MOWER_ID -from tests.common import MockConfigEntry, load_json_value_fixture, snapshot_platform +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + load_json_value_fixture, + snapshot_platform, +) @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -57,6 +67,7 @@ async def test_number_workarea_commands( hass: HomeAssistant, mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, ) -> None: """Test number commands.""" entity_id = "number.test_mower_1_front_lawn_cutting_height" @@ -75,8 +86,11 @@ async def test_number_workarea_commands( service="set_value", target={"entity_id": entity_id}, service_data={"value": "75"}, - blocking=True, + blocking=False, ) + freezer.tick(timedelta(seconds=EXECUTION_TIME_DELAY)) + async_fire_time_changed(hass) + await hass.async_block_till_done() mocked_method.assert_called_once_with(TEST_MOWER_ID, 75, 123456) state = hass.states.get(entity_id) assert state.state is not None diff --git a/tests/components/husqvarna_automower/test_switch.py b/tests/components/husqvarna_automower/test_switch.py index 24fd63be749..5b4e465e253 100644 --- a/tests/components/husqvarna_automower/test_switch.py +++ b/tests/components/husqvarna_automower/test_switch.py @@ -1,5 +1,6 @@ """Tests for switch platform.""" +from datetime import timedelta from unittest.mock import AsyncMock, patch from aioautomower.exceptions import ApiException @@ -9,7 +10,10 @@ from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion -from homeassistant.components.husqvarna_automower.const import DOMAIN +from homeassistant.components.husqvarna_automower.const import ( + DOMAIN, + EXECUTION_TIME_DELAY, +) from homeassistant.components.husqvarna_automower.coordinator import SCAN_INTERVAL from homeassistant.const import Platform from homeassistant.core import HomeAssistant @@ -109,6 +113,7 @@ async def test_stay_out_zone_switch_commands( excepted_state: str, mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, ) -> None: """Test switch commands.""" entity_id = "switch.test_mower_1_avoid_danger_zone" @@ -124,8 +129,11 @@ async def test_stay_out_zone_switch_commands( domain="switch", service=service, service_data={"entity_id": entity_id}, - blocking=True, + blocking=False, ) + freezer.tick(timedelta(seconds=EXECUTION_TIME_DELAY)) + async_fire_time_changed(hass) + await hass.async_block_till_done() mocked_method.assert_called_once_with(TEST_MOWER_ID, TEST_ZONE_ID, boolean) state = hass.states.get(entity_id) assert state is not None From b973455037a163b9753df3b493c82b545dac74f4 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 30 Jul 2024 16:28:55 +0200 Subject: [PATCH 0140/1635] Fix template image test affecting other tests (#122849) --- tests/components/template/test_image.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/components/template/test_image.py b/tests/components/template/test_image.py index d4e98d7a3ca..101b475956a 100644 --- a/tests/components/template/test_image.py +++ b/tests/components/template/test_image.py @@ -76,10 +76,12 @@ async def _assert_state( assert body == expected_image +@respx.mock @pytest.mark.freeze_time("2024-07-09 00:00:00+00:00") async def test_setup_config_entry( hass: HomeAssistant, snapshot: SnapshotAssertion, + imgbytes_jpg, ) -> None: """Test the config flow.""" @@ -538,6 +540,7 @@ async def test_trigger_image_custom_entity_picture( ) +@respx.mock async def test_device_id( hass: HomeAssistant, device_registry: dr.DeviceRegistry, From 1382f7a3dc12423ffb2f9a38bd76a72a799ae222 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 30 Jul 2024 16:29:59 +0200 Subject: [PATCH 0141/1635] Fix generic IP camera tests affecting other tests (#122858) --- tests/components/generic/conftest.py | 17 +++++++++++++---- tests/components/generic/test_config_flow.py | 1 + 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/tests/components/generic/conftest.py b/tests/components/generic/conftest.py index 34062aab954..69e6cc6b696 100644 --- a/tests/components/generic/conftest.py +++ b/tests/components/generic/conftest.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Generator from io import BytesIO from unittest.mock import AsyncMock, MagicMock, Mock, _patch, patch @@ -51,15 +52,23 @@ def fakeimgbytes_gif() -> bytes: @pytest.fixture -def fakeimg_png(fakeimgbytes_png: bytes) -> None: +def fakeimg_png(fakeimgbytes_png: bytes) -> Generator[None]: """Set up respx to respond to test url with fake image bytes.""" - respx.get("http://127.0.0.1/testurl/1").respond(stream=fakeimgbytes_png) + respx.get("http://127.0.0.1/testurl/1", name="fake_img").respond( + stream=fakeimgbytes_png + ) + yield + respx.pop("fake_img") @pytest.fixture -def fakeimg_gif(fakeimgbytes_gif: bytes) -> None: +def fakeimg_gif(fakeimgbytes_gif: bytes) -> Generator[None]: """Set up respx to respond to test url with fake image bytes.""" - respx.get("http://127.0.0.1/testurl/1").respond(stream=fakeimgbytes_gif) + respx.get("http://127.0.0.1/testurl/1", name="fake_img").respond( + stream=fakeimgbytes_gif + ) + yield + respx.pop("fake_img") @pytest.fixture(scope="package") diff --git a/tests/components/generic/test_config_flow.py b/tests/components/generic/test_config_flow.py index 456e41a8d60..e7af9383791 100644 --- a/tests/components/generic/test_config_flow.py +++ b/tests/components/generic/test_config_flow.py @@ -638,6 +638,7 @@ async def test_form_stream_other_error(hass: HomeAssistant, user_flow) -> None: @respx.mock +@pytest.mark.usefixtures("fakeimg_png") async def test_form_stream_worker_error( hass: HomeAssistant, user_flow: ConfigFlowResult ) -> None: From 4994e46ad04fec84f7e2aed0f167796ab158d192 Mon Sep 17 00:00:00 2001 From: Marius <33354141+Mariusthvdb@users.noreply.github.com> Date: Tue, 30 Jul 2024 16:44:04 +0200 Subject: [PATCH 0142/1635] Add mdi:alert-circle-outline to degrade status (#122859) --- homeassistant/components/synology_dsm/icons.json | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/synology_dsm/icons.json b/homeassistant/components/synology_dsm/icons.json index 8b4fad457d5..8e6d2b17f02 100644 --- a/homeassistant/components/synology_dsm/icons.json +++ b/homeassistant/components/synology_dsm/icons.json @@ -50,7 +50,10 @@ "default": "mdi:download" }, "volume_status": { - "default": "mdi:checkbox-marked-circle-outline" + "default": "mdi:checkbox-marked-circle-outline", + "state": { + "degrade": "mdi:alert-circle-outline" + } }, "volume_size_total": { "default": "mdi:chart-pie" From b3f7f379df235bcd7ebdd2b039932c98e2dff937 Mon Sep 17 00:00:00 2001 From: Joakim Plate Date: Tue, 30 Jul 2024 16:51:02 +0200 Subject: [PATCH 0143/1635] Upgrade dsmr-parser to 1.4.2 (#121929) --- homeassistant/components/dsmr/manifest.json | 2 +- homeassistant/components/dsmr/sensor.py | 141 ++++++++------ requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/dsmr/test_mbus_migration.py | 30 +-- tests/components/dsmr/test_sensor.py | 183 ++++++++++--------- 6 files changed, 195 insertions(+), 165 deletions(-) diff --git a/homeassistant/components/dsmr/manifest.json b/homeassistant/components/dsmr/manifest.json index c8f0a78f4dc..5490b2a6503 100644 --- a/homeassistant/components/dsmr/manifest.json +++ b/homeassistant/components/dsmr/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["dsmr_parser"], - "requirements": ["dsmr-parser==1.3.1"] + "requirements": ["dsmr-parser==1.4.2"] } diff --git a/homeassistant/components/dsmr/sensor.py b/homeassistant/components/dsmr/sensor.py index ae7b08b7f62..f794d1d05e9 100644 --- a/homeassistant/components/dsmr/sensor.py +++ b/homeassistant/components/dsmr/sensor.py @@ -4,10 +4,11 @@ from __future__ import annotations import asyncio from asyncio import CancelledError -from collections.abc import Callable +from collections.abc import Callable, Generator from contextlib import suppress from dataclasses import dataclass from datetime import timedelta +from enum import IntEnum from functools import partial from dsmr_parser.clients.protocol import create_dsmr_reader, create_tcp_dsmr_reader @@ -15,7 +16,7 @@ from dsmr_parser.clients.rfxtrx_protocol import ( create_rfxtrx_dsmr_reader, create_rfxtrx_tcp_dsmr_reader, ) -from dsmr_parser.objects import DSMRObject, Telegram +from dsmr_parser.objects import DSMRObject, MbusDevice, Telegram import serial from homeassistant.components.sensor import ( @@ -77,6 +78,13 @@ class DSMRSensorEntityDescription(SensorEntityDescription): obis_reference: str +class MbusDeviceType(IntEnum): + """Types of mbus devices (13757-3:2013).""" + + GAS = 3 + WATER = 7 + + SENSORS: tuple[DSMRSensorEntityDescription, ...] = ( DSMRSensorEntityDescription( key="timestamp", @@ -318,7 +326,7 @@ SENSORS: tuple[DSMRSensorEntityDescription, ...] = ( DSMRSensorEntityDescription( key="belgium_max_current_per_phase", translation_key="max_current_per_phase", - obis_reference="BELGIUM_MAX_CURRENT_PER_PHASE", + obis_reference="FUSE_THRESHOLD_L1", dsmr_versions={"5B"}, device_class=SensorDeviceClass.CURRENT, entity_registry_enabled_default=False, @@ -377,38 +385,36 @@ SENSORS: tuple[DSMRSensorEntityDescription, ...] = ( ), ) - -def create_mbus_entity( - mbus: int, mtype: int, telegram: Telegram -) -> DSMRSensorEntityDescription | None: - """Create a new MBUS Entity.""" - if mtype == 3 and hasattr(telegram, f"BELGIUM_MBUS{mbus}_METER_READING2"): - return DSMRSensorEntityDescription( - key=f"mbus{mbus}_gas_reading", +SENSORS_MBUS_DEVICE_TYPE: dict[int, tuple[DSMRSensorEntityDescription, ...]] = { + MbusDeviceType.GAS: ( + DSMRSensorEntityDescription( + key="gas_reading", translation_key="gas_meter_reading", - obis_reference=f"BELGIUM_MBUS{mbus}_METER_READING2", + obis_reference="MBUS_METER_READING", is_gas=True, device_class=SensorDeviceClass.GAS, state_class=SensorStateClass.TOTAL_INCREASING, - ) - if mtype == 7 and (hasattr(telegram, f"BELGIUM_MBUS{mbus}_METER_READING1")): - return DSMRSensorEntityDescription( - key=f"mbus{mbus}_water_reading", + ), + ), + MbusDeviceType.WATER: ( + DSMRSensorEntityDescription( + key="water_reading", translation_key="water_meter_reading", - obis_reference=f"BELGIUM_MBUS{mbus}_METER_READING1", + obis_reference="MBUS_METER_READING", is_water=True, device_class=SensorDeviceClass.WATER, state_class=SensorStateClass.TOTAL_INCREASING, - ) - return None + ), + ), +} def device_class_and_uom( - telegram: dict[str, DSMRObject], + data: Telegram | MbusDevice, entity_description: DSMRSensorEntityDescription, ) -> tuple[SensorDeviceClass | None, str | None]: """Get native unit of measurement from telegram,.""" - dsmr_object = getattr(telegram, entity_description.obis_reference) + dsmr_object = getattr(data, entity_description.obis_reference) uom: str | None = getattr(dsmr_object, "unit") or None with suppress(ValueError): if entity_description.device_class == SensorDeviceClass.GAS and ( @@ -460,37 +466,60 @@ def rename_old_gas_to_mbus( dev_reg.async_remove_device(device_id) +def is_supported_description( + data: Telegram | MbusDevice, + description: DSMRSensorEntityDescription, + dsmr_version: str, +) -> bool: + """Check if this is a supported description for this telegram.""" + return hasattr(data, description.obis_reference) and ( + description.dsmr_versions is None or dsmr_version in description.dsmr_versions + ) + + def create_mbus_entities( - hass: HomeAssistant, telegram: Telegram, entry: ConfigEntry -) -> list[DSMREntity]: + hass: HomeAssistant, telegram: Telegram, entry: ConfigEntry, dsmr_version: str +) -> Generator[DSMREntity]: """Create MBUS Entities.""" - entities = [] - for idx in range(1, 5): - if ( - device_type := getattr(telegram, f"BELGIUM_MBUS{idx}_DEVICE_TYPE", None) - ) is None: + mbus_devices: list[MbusDevice] = getattr(telegram, "MBUS_DEVICES", []) + for device in mbus_devices: + if (device_type := getattr(device, "MBUS_DEVICE_TYPE", None)) is None: continue - if (type_ := int(device_type.value)) not in (3, 7): - continue - if identifier := getattr( - telegram, f"BELGIUM_MBUS{idx}_EQUIPMENT_IDENTIFIER", None - ): + type_ = int(device_type.value) + + if identifier := getattr(device, "MBUS_EQUIPMENT_IDENTIFIER", None): serial_ = identifier.value rename_old_gas_to_mbus(hass, entry, serial_) else: serial_ = "" - if description := create_mbus_entity(idx, type_, telegram): - entities.append( - DSMREntity( - description, - entry, - telegram, - *device_class_and_uom(telegram, description), # type: ignore[arg-type] - serial_, - idx, - ) + + for description in SENSORS_MBUS_DEVICE_TYPE.get(type_, ()): + if not is_supported_description(device, description, dsmr_version): + continue + yield DSMREntity( + description, + entry, + telegram, + *device_class_and_uom(device, description), # type: ignore[arg-type] + serial_, + device.channel_id, ) - return entities + + +def get_dsmr_object( + telegram: Telegram | None, mbus_id: int, obis_reference: str +) -> DSMRObject | None: + """Extract DSMR object from telegram.""" + if not telegram: + return None + + telegram_or_device: Telegram | MbusDevice | None = telegram + if mbus_id: + telegram_or_device = telegram.get_mbus_device_by_channel(mbus_id) + if telegram_or_device is None: + return None + + return getattr(telegram_or_device, obis_reference, None) async def async_setup_entry( @@ -510,8 +539,7 @@ async def async_setup_entry( add_entities_handler() add_entities_handler = None - if dsmr_version == "5B": - entities.extend(create_mbus_entities(hass, telegram, entry)) + entities.extend(create_mbus_entities(hass, telegram, entry, dsmr_version)) entities.extend( [ @@ -522,12 +550,8 @@ async def async_setup_entry( *device_class_and_uom(telegram, description), # type: ignore[arg-type] ) for description in SENSORS - if ( - description.dsmr_versions is None - or dsmr_version in description.dsmr_versions - ) + if is_supported_description(telegram, description, dsmr_version) and (not description.is_gas or CONF_SERIAL_ID_GAS in entry.data) - and hasattr(telegram, description.obis_reference) ] ) async_add_entities(entities) @@ -723,6 +747,7 @@ class DSMREntity(SensorEntity): identifiers={(DOMAIN, device_serial)}, name=device_name, ) + self._mbus_id = mbus_id if mbus_id != 0: if serial_id: self._attr_unique_id = f"{device_serial}" @@ -737,20 +762,22 @@ class DSMREntity(SensorEntity): self.telegram = telegram if self.hass and ( telegram is None - or hasattr(telegram, self.entity_description.obis_reference) + or get_dsmr_object( + telegram, self._mbus_id, self.entity_description.obis_reference + ) ): self.async_write_ha_state() def get_dsmr_object_attr(self, attribute: str) -> str | None: """Read attribute from last received telegram for this DSMR object.""" - # Make sure telegram contains an object for this entities obis - if self.telegram is None or not hasattr( - self.telegram, self.entity_description.obis_reference - ): + # Get the object + dsmr_object = get_dsmr_object( + self.telegram, self._mbus_id, self.entity_description.obis_reference + ) + if dsmr_object is None: return None # Get the attribute value if the object has it - dsmr_object = getattr(self.telegram, self.entity_description.obis_reference) attr: str | None = getattr(dsmr_object, attribute) return attr diff --git a/requirements_all.txt b/requirements_all.txt index 7a63bcb5602..4d74e059a59 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -750,7 +750,7 @@ dremel3dpy==2.1.1 dropmqttapi==1.0.3 # homeassistant.components.dsmr -dsmr-parser==1.3.1 +dsmr-parser==1.4.2 # homeassistant.components.dwd_weather_warnings dwdwfsapi==1.0.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 9e8664f4821..86fdf2da7f9 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -640,7 +640,7 @@ dremel3dpy==2.1.1 dropmqttapi==1.0.3 # homeassistant.components.dsmr -dsmr-parser==1.3.1 +dsmr-parser==1.4.2 # homeassistant.components.dwd_weather_warnings dwdwfsapi==1.0.7 diff --git a/tests/components/dsmr/test_mbus_migration.py b/tests/components/dsmr/test_mbus_migration.py index cd3db27be8c..a28bc2c3a33 100644 --- a/tests/components/dsmr/test_mbus_migration.py +++ b/tests/components/dsmr/test_mbus_migration.py @@ -5,9 +5,9 @@ from decimal import Decimal from unittest.mock import MagicMock from dsmr_parser.obis_references import ( - BELGIUM_MBUS1_DEVICE_TYPE, - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, - BELGIUM_MBUS1_METER_READING2, + MBUS_DEVICE_TYPE, + MBUS_EQUIPMENT_IDENTIFIER, + MBUS_METER_READING, ) from dsmr_parser.objects import CosemObject, MBusObject, Telegram @@ -67,20 +67,20 @@ async def test_migrate_gas_to_mbus( telegram = Telegram() telegram.add( - BELGIUM_MBUS1_DEVICE_TYPE, + MBUS_DEVICE_TYPE, CosemObject((0, 1), [{"value": "003", "unit": ""}]), - "BELGIUM_MBUS1_DEVICE_TYPE", + "MBUS_DEVICE_TYPE", ) telegram.add( - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + MBUS_EQUIPMENT_IDENTIFIER, CosemObject( (0, 1), [{"value": "37464C4F32313139303333373331", "unit": ""}], ), - "BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER", + "MBUS_EQUIPMENT_IDENTIFIER", ) telegram.add( - BELGIUM_MBUS1_METER_READING2, + MBUS_METER_READING, MBusObject( (0, 1), [ @@ -88,7 +88,7 @@ async def test_migrate_gas_to_mbus( {"value": Decimal(745.695), "unit": "m3"}, ], ), - "BELGIUM_MBUS1_METER_READING2", + "MBUS_METER_READING", ) assert await hass.config_entries.async_setup(mock_entry.entry_id) @@ -184,20 +184,20 @@ async def test_migrate_gas_to_mbus_exists( telegram = Telegram() telegram.add( - BELGIUM_MBUS1_DEVICE_TYPE, + MBUS_DEVICE_TYPE, CosemObject((0, 0), [{"value": "003", "unit": ""}]), - "BELGIUM_MBUS1_DEVICE_TYPE", + "MBUS_DEVICE_TYPE", ) telegram.add( - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + MBUS_EQUIPMENT_IDENTIFIER, CosemObject( (0, 1), [{"value": "37464C4F32313139303333373331", "unit": ""}], ), - "BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER", + "MBUS_EQUIPMENT_IDENTIFIER", ) telegram.add( - BELGIUM_MBUS1_METER_READING2, + MBUS_METER_READING, MBusObject( (0, 1), [ @@ -205,7 +205,7 @@ async def test_migrate_gas_to_mbus_exists( {"value": Decimal(745.695), "unit": "m3"}, ], ), - "BELGIUM_MBUS1_METER_READING2", + "MBUS_METER_READING", ) assert await hass.config_entries.async_setup(mock_entry.entry_id) diff --git a/tests/components/dsmr/test_sensor.py b/tests/components/dsmr/test_sensor.py index 5b0cf6d7a15..b93dd8d18d2 100644 --- a/tests/components/dsmr/test_sensor.py +++ b/tests/components/dsmr/test_sensor.py @@ -15,33 +15,20 @@ from dsmr_parser import obis_references from dsmr_parser.obis_references import ( BELGIUM_CURRENT_AVERAGE_DEMAND, BELGIUM_MAXIMUM_DEMAND_MONTH, - BELGIUM_MBUS1_DEVICE_TYPE, - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, - BELGIUM_MBUS1_METER_READING1, - BELGIUM_MBUS1_METER_READING2, - BELGIUM_MBUS2_DEVICE_TYPE, - BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER, - BELGIUM_MBUS2_METER_READING1, - BELGIUM_MBUS2_METER_READING2, - BELGIUM_MBUS3_DEVICE_TYPE, - BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER, - BELGIUM_MBUS3_METER_READING1, - BELGIUM_MBUS3_METER_READING2, - BELGIUM_MBUS4_DEVICE_TYPE, - BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER, - BELGIUM_MBUS4_METER_READING1, - BELGIUM_MBUS4_METER_READING2, CURRENT_ELECTRICITY_USAGE, ELECTRICITY_ACTIVE_TARIFF, ELECTRICITY_EXPORTED_TOTAL, ELECTRICITY_IMPORTED_TOTAL, GAS_METER_READING, HOURLY_GAS_METER_READING, + MBUS_DEVICE_TYPE, + MBUS_EQUIPMENT_IDENTIFIER, + MBUS_METER_READING, ) from dsmr_parser.objects import CosemObject, MBusObject, Telegram import pytest -from homeassistant.components.dsmr.sensor import SENSORS +from homeassistant.components.dsmr.sensor import SENSORS, SENSORS_MBUS_DEVICE_TYPE from homeassistant.components.sensor import ( ATTR_OPTIONS, ATTR_STATE_CLASS, @@ -562,20 +549,20 @@ async def test_belgian_meter( "BELGIUM_MAXIMUM_DEMAND_MONTH", ) telegram.add( - BELGIUM_MBUS1_DEVICE_TYPE, + MBUS_DEVICE_TYPE, CosemObject((0, 1), [{"value": "003", "unit": ""}]), - "BELGIUM_MBUS1_DEVICE_TYPE", + "MBUS_DEVICE_TYPE", ) telegram.add( - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + MBUS_EQUIPMENT_IDENTIFIER, CosemObject( (0, 1), [{"value": "37464C4F32313139303333373331", "unit": ""}], ), - "BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER", + "MBUS_EQUIPMENT_IDENTIFIER", ) telegram.add( - BELGIUM_MBUS1_METER_READING2, + MBUS_METER_READING, MBusObject( (0, 1), [ @@ -583,23 +570,23 @@ async def test_belgian_meter( {"value": Decimal(745.695), "unit": "m3"}, ], ), - "BELGIUM_MBUS1_METER_READING2", + "MBUS_METER_READING", ) telegram.add( - BELGIUM_MBUS2_DEVICE_TYPE, + MBUS_DEVICE_TYPE, CosemObject((0, 2), [{"value": "007", "unit": ""}]), - "BELGIUM_MBUS2_DEVICE_TYPE", + "MBUS_DEVICE_TYPE", ) telegram.add( - BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER, + MBUS_EQUIPMENT_IDENTIFIER, CosemObject( (0, 2), [{"value": "37464C4F32313139303333373332", "unit": ""}], ), - "BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER", + "MBUS_EQUIPMENT_IDENTIFIER", ) telegram.add( - BELGIUM_MBUS2_METER_READING1, + MBUS_METER_READING, MBusObject( (0, 2), [ @@ -607,23 +594,23 @@ async def test_belgian_meter( {"value": Decimal(678.695), "unit": "m3"}, ], ), - "BELGIUM_MBUS2_METER_READING1", + "MBUS_METER_READING", ) telegram.add( - BELGIUM_MBUS3_DEVICE_TYPE, + MBUS_DEVICE_TYPE, CosemObject((0, 3), [{"value": "003", "unit": ""}]), - "BELGIUM_MBUS3_DEVICE_TYPE", + "MBUS_DEVICE_TYPE", ) telegram.add( - BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER, + MBUS_EQUIPMENT_IDENTIFIER, CosemObject( (0, 3), [{"value": "37464C4F32313139303333373333", "unit": ""}], ), - "BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER", + "MBUS_EQUIPMENT_IDENTIFIER", ) telegram.add( - BELGIUM_MBUS3_METER_READING2, + MBUS_METER_READING, MBusObject( (0, 3), [ @@ -631,23 +618,23 @@ async def test_belgian_meter( {"value": Decimal(12.12), "unit": "m3"}, ], ), - "BELGIUM_MBUS3_METER_READING2", + "MBUS_METER_READING", ) telegram.add( - BELGIUM_MBUS4_DEVICE_TYPE, + MBUS_DEVICE_TYPE, CosemObject((0, 4), [{"value": "007", "unit": ""}]), - "BELGIUM_MBUS4_DEVICE_TYPE", + "MBUS_DEVICE_TYPE", ) telegram.add( - BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER, + MBUS_EQUIPMENT_IDENTIFIER, CosemObject( (0, 4), [{"value": "37464C4F32313139303333373334", "unit": ""}], ), - "BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER", + "MBUS_EQUIPMENT_IDENTIFIER", ) telegram.add( - BELGIUM_MBUS4_METER_READING1, + MBUS_METER_READING, MBusObject( (0, 4), [ @@ -655,7 +642,7 @@ async def test_belgian_meter( {"value": Decimal(13.13), "unit": "m3"}, ], ), - "BELGIUM_MBUS4_METER_READING1", + "MBUS_METER_READING", ) telegram.add( ELECTRICITY_ACTIVE_TARIFF, @@ -777,20 +764,20 @@ async def test_belgian_meter_alt( telegram = Telegram() telegram.add( - BELGIUM_MBUS1_DEVICE_TYPE, + MBUS_DEVICE_TYPE, CosemObject((0, 1), [{"value": "007", "unit": ""}]), - "BELGIUM_MBUS1_DEVICE_TYPE", + "MBUS_DEVICE_TYPE", ) telegram.add( - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + MBUS_EQUIPMENT_IDENTIFIER, CosemObject( (0, 1), [{"value": "37464C4F32313139303333373331", "unit": ""}], ), - "BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER", + "MBUS_EQUIPMENT_IDENTIFIER", ) telegram.add( - BELGIUM_MBUS1_METER_READING1, + MBUS_METER_READING, MBusObject( (0, 1), [ @@ -798,23 +785,23 @@ async def test_belgian_meter_alt( {"value": Decimal(123.456), "unit": "m3"}, ], ), - "BELGIUM_MBUS1_METER_READING1", + "MBUS_METER_READING", ) telegram.add( - BELGIUM_MBUS2_DEVICE_TYPE, + MBUS_DEVICE_TYPE, CosemObject((0, 2), [{"value": "003", "unit": ""}]), - "BELGIUM_MBUS2_DEVICE_TYPE", + "MBUS_DEVICE_TYPE", ) telegram.add( - BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER, + MBUS_EQUIPMENT_IDENTIFIER, CosemObject( (0, 2), [{"value": "37464C4F32313139303333373332", "unit": ""}], ), - "BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER", + "MBUS_EQUIPMENT_IDENTIFIER", ) telegram.add( - BELGIUM_MBUS2_METER_READING2, + MBUS_METER_READING, MBusObject( (0, 2), [ @@ -822,23 +809,23 @@ async def test_belgian_meter_alt( {"value": Decimal(678.901), "unit": "m3"}, ], ), - "BELGIUM_MBUS2_METER_READING2", + "MBUS_METER_READING", ) telegram.add( - BELGIUM_MBUS3_DEVICE_TYPE, + MBUS_DEVICE_TYPE, CosemObject((0, 3), [{"value": "007", "unit": ""}]), - "BELGIUM_MBUS3_DEVICE_TYPE", + "MBUS_DEVICE_TYPE", ) telegram.add( - BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER, + MBUS_EQUIPMENT_IDENTIFIER, CosemObject( (0, 3), [{"value": "37464C4F32313139303333373333", "unit": ""}], ), - "BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER", + "MBUS_EQUIPMENT_IDENTIFIER", ) telegram.add( - BELGIUM_MBUS3_METER_READING1, + MBUS_METER_READING, MBusObject( (0, 3), [ @@ -846,23 +833,23 @@ async def test_belgian_meter_alt( {"value": Decimal(12.12), "unit": "m3"}, ], ), - "BELGIUM_MBUS3_METER_READING1", + "MBUS_METER_READING", ) telegram.add( - BELGIUM_MBUS4_DEVICE_TYPE, + MBUS_DEVICE_TYPE, CosemObject((0, 4), [{"value": "003", "unit": ""}]), - "BELGIUM_MBUS4_DEVICE_TYPE", + "MBUS_DEVICE_TYPE", ) telegram.add( - BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER, + MBUS_EQUIPMENT_IDENTIFIER, CosemObject( (0, 4), [{"value": "37464C4F32313139303333373334", "unit": ""}], ), - "BELGIUM_MBUS4_EQUIPMENT_IDENTIFIER", + "MBUS_EQUIPMENT_IDENTIFIER", ) telegram.add( - BELGIUM_MBUS4_METER_READING2, + MBUS_METER_READING, MBusObject( (0, 4), [ @@ -870,7 +857,7 @@ async def test_belgian_meter_alt( {"value": Decimal(13.13), "unit": "m3"}, ], ), - "BELGIUM_MBUS4_METER_READING2", + "MBUS_METER_READING", ) mock_entry = MockConfigEntry( @@ -970,46 +957,46 @@ async def test_belgian_meter_mbus( "ELECTRICITY_ACTIVE_TARIFF", ) telegram.add( - BELGIUM_MBUS1_DEVICE_TYPE, + MBUS_DEVICE_TYPE, CosemObject((0, 1), [{"value": "006", "unit": ""}]), - "BELGIUM_MBUS1_DEVICE_TYPE", + "MBUS_DEVICE_TYPE", ) telegram.add( - BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER, + MBUS_EQUIPMENT_IDENTIFIER, CosemObject( (0, 1), [{"value": "37464C4F32313139303333373331", "unit": ""}], ), - "BELGIUM_MBUS1_EQUIPMENT_IDENTIFIER", + "MBUS_EQUIPMENT_IDENTIFIER", ) telegram.add( - BELGIUM_MBUS2_DEVICE_TYPE, + MBUS_DEVICE_TYPE, CosemObject((0, 2), [{"value": "003", "unit": ""}]), - "BELGIUM_MBUS2_DEVICE_TYPE", + "MBUS_DEVICE_TYPE", ) telegram.add( - BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER, + MBUS_EQUIPMENT_IDENTIFIER, CosemObject( (0, 2), [{"value": "37464C4F32313139303333373332", "unit": ""}], ), - "BELGIUM_MBUS2_EQUIPMENT_IDENTIFIER", + "MBUS_EQUIPMENT_IDENTIFIER", ) telegram.add( - BELGIUM_MBUS3_DEVICE_TYPE, + MBUS_DEVICE_TYPE, CosemObject((0, 3), [{"value": "007", "unit": ""}]), - "BELGIUM_MBUS3_DEVICE_TYPE", + "MBUS_DEVICE_TYPE", ) telegram.add( - BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER, + MBUS_EQUIPMENT_IDENTIFIER, CosemObject( (0, 3), [{"value": "37464C4F32313139303333373333", "unit": ""}], ), - "BELGIUM_MBUS3_EQUIPMENT_IDENTIFIER", + "MBUS_EQUIPMENT_IDENTIFIER", ) telegram.add( - BELGIUM_MBUS3_METER_READING2, + MBUS_METER_READING, MBusObject( (0, 3), [ @@ -1017,15 +1004,15 @@ async def test_belgian_meter_mbus( {"value": Decimal(12.12), "unit": "m3"}, ], ), - "BELGIUM_MBUS3_METER_READING2", + "MBUS_METER_READING", ) telegram.add( - BELGIUM_MBUS4_DEVICE_TYPE, + MBUS_DEVICE_TYPE, CosemObject((0, 4), [{"value": "007", "unit": ""}]), - "BELGIUM_MBUS4_DEVICE_TYPE", + "MBUS_DEVICE_TYPE", ) telegram.add( - BELGIUM_MBUS4_METER_READING1, + MBUS_METER_READING, MBusObject( (0, 4), [ @@ -1033,7 +1020,7 @@ async def test_belgian_meter_mbus( {"value": Decimal(13.13), "unit": "m3"}, ], ), - "BELGIUM_MBUS4_METER_READING1", + "MBUS_METER_READING", ) mock_entry = MockConfigEntry( @@ -1057,20 +1044,32 @@ async def test_belgian_meter_mbus( active_tariff = hass.states.get("sensor.electricity_meter_active_tariff") assert active_tariff.state == "unknown" - # check if gas consumption mbus2 is parsed correctly + # check if gas consumption mbus1 is parsed correctly gas_consumption = hass.states.get("sensor.gas_meter_gas_consumption") assert gas_consumption is None - # check if water usage mbus3 is parsed correctly - water_consumption = hass.states.get("sensor.water_meter_water_consumption_2") - assert water_consumption is None - - # check if gas consumption mbus4 is parsed correctly + # check if gas consumption mbus2 is parsed correctly gas_consumption = hass.states.get("sensor.gas_meter_gas_consumption_2") assert gas_consumption is None - # check if gas consumption mbus4 is parsed correctly + # check if water usage mbus3 is parsed correctly water_consumption = hass.states.get("sensor.water_meter_water_consumption") + assert water_consumption + assert water_consumption.state == "12.12" + assert ( + water_consumption.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.WATER + ) + assert ( + water_consumption.attributes.get(ATTR_STATE_CLASS) + == SensorStateClass.TOTAL_INCREASING + ) + assert ( + water_consumption.attributes.get(ATTR_UNIT_OF_MEASUREMENT) + == UnitOfVolume.CUBIC_METERS + ) + + # check if gas consumption mbus4 is parsed correctly + water_consumption = hass.states.get("sensor.water_meter_water_consumption_2") assert water_consumption.state == "13.13" assert ( water_consumption.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.WATER @@ -1526,3 +1525,7 @@ def test_all_obis_references_exists(): """Verify that all attributes exist by name in database.""" for sensor in SENSORS: assert hasattr(obis_references, sensor.obis_reference) + + for sensors in SENSORS_MBUS_DEVICE_TYPE.values(): + for sensor in sensors: + assert hasattr(obis_references, sensor.obis_reference) From 4a34855a921fd6d6d7519ff2ef721e3cb72f31e6 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 30 Jul 2024 16:57:42 +0200 Subject: [PATCH 0144/1635] Fix implicit-return in scripts (#122831) --- script/install_integration_requirements.py | 1 + 1 file changed, 1 insertion(+) diff --git a/script/install_integration_requirements.py b/script/install_integration_requirements.py index ab91ea71557..91c9f6a8ed0 100644 --- a/script/install_integration_requirements.py +++ b/script/install_integration_requirements.py @@ -45,6 +45,7 @@ def main() -> int | None: cmd, check=True, ) + return None if __name__ == "__main__": From 6840f27bc6217ea57c1617261a320c624427acbf Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 30 Jul 2024 17:12:58 +0200 Subject: [PATCH 0145/1635] Verify respx mock routes are cleaned up when tests finish (#122852) --- tests/conftest.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/tests/conftest.py b/tests/conftest.py index 0d0fd826b44..0667edf4be2 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -34,6 +34,7 @@ import multidict import pytest import pytest_socket import requests_mock +import respx from syrupy.assertion import SnapshotAssertion from homeassistant import block_async_io @@ -398,6 +399,13 @@ def verify_cleanup( # Restore the default time zone to not break subsequent tests dt_util.DEFAULT_TIME_ZONE = datetime.UTC + try: + # Verify respx.mock has been cleaned up + assert not respx.mock.routes, "respx.mock routes not cleaned up, maybe the test needs to be decorated with @respx.mock" + finally: + # Clear mock routes not break subsequent tests + respx.mock.clear() + @pytest.fixture(autouse=True) def reset_hass_threading_local_object() -> Generator[None]: From b69b927795e4ede255b3f323e3594ce0a06acf6f Mon Sep 17 00:00:00 2001 From: Guido Schmitz Date: Tue, 30 Jul 2024 17:17:20 +0200 Subject: [PATCH 0146/1635] Set parallel updates in devolo_home_network (#122847) --- homeassistant/components/devolo_home_network/binary_sensor.py | 2 ++ homeassistant/components/devolo_home_network/button.py | 2 ++ homeassistant/components/devolo_home_network/device_tracker.py | 2 ++ homeassistant/components/devolo_home_network/image.py | 2 ++ homeassistant/components/devolo_home_network/sensor.py | 2 ++ homeassistant/components/devolo_home_network/switch.py | 2 ++ homeassistant/components/devolo_home_network/update.py | 2 ++ 7 files changed, 14 insertions(+) diff --git a/homeassistant/components/devolo_home_network/binary_sensor.py b/homeassistant/components/devolo_home_network/binary_sensor.py index 38d79951149..c96d0273a50 100644 --- a/homeassistant/components/devolo_home_network/binary_sensor.py +++ b/homeassistant/components/devolo_home_network/binary_sensor.py @@ -21,6 +21,8 @@ from . import DevoloHomeNetworkConfigEntry from .const import CONNECTED_PLC_DEVICES, CONNECTED_TO_ROUTER from .entity import DevoloCoordinatorEntity +PARALLEL_UPDATES = 1 + def _is_connected_to_router(entity: DevoloBinarySensorEntity) -> bool: """Check, if device is attached to the router.""" diff --git a/homeassistant/components/devolo_home_network/button.py b/homeassistant/components/devolo_home_network/button.py index 1f67912f020..ca17b572522 100644 --- a/homeassistant/components/devolo_home_network/button.py +++ b/homeassistant/components/devolo_home_network/button.py @@ -22,6 +22,8 @@ from . import DevoloHomeNetworkConfigEntry from .const import DOMAIN, IDENTIFY, PAIRING, RESTART, START_WPS from .entity import DevoloEntity +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class DevoloButtonEntityDescription(ButtonEntityDescription): diff --git a/homeassistant/components/devolo_home_network/device_tracker.py b/homeassistant/components/devolo_home_network/device_tracker.py index 0a221779622..960069191ee 100644 --- a/homeassistant/components/devolo_home_network/device_tracker.py +++ b/homeassistant/components/devolo_home_network/device_tracker.py @@ -22,6 +22,8 @@ from homeassistant.helpers.update_coordinator import ( from . import DevoloHomeNetworkConfigEntry from .const import CONNECTED_WIFI_CLIENTS, DOMAIN, WIFI_APTYPE, WIFI_BANDS +PARALLEL_UPDATES = 1 + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/devolo_home_network/image.py b/homeassistant/components/devolo_home_network/image.py index ee3b079da02..58052d3021e 100644 --- a/homeassistant/components/devolo_home_network/image.py +++ b/homeassistant/components/devolo_home_network/image.py @@ -20,6 +20,8 @@ from . import DevoloHomeNetworkConfigEntry from .const import IMAGE_GUEST_WIFI, SWITCH_GUEST_WIFI from .entity import DevoloCoordinatorEntity +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class DevoloImageEntityDescription(ImageEntityDescription): diff --git a/homeassistant/components/devolo_home_network/sensor.py b/homeassistant/components/devolo_home_network/sensor.py index ffd40acf42a..2fd8ab9220c 100644 --- a/homeassistant/components/devolo_home_network/sensor.py +++ b/homeassistant/components/devolo_home_network/sensor.py @@ -31,6 +31,8 @@ from .const import ( ) from .entity import DevoloCoordinatorEntity +PARALLEL_UPDATES = 1 + _CoordinatorDataT = TypeVar( "_CoordinatorDataT", bound=LogicalNetwork | DataRate | list[ConnectedStationInfo] | list[NeighborAPInfo], diff --git a/homeassistant/components/devolo_home_network/switch.py b/homeassistant/components/devolo_home_network/switch.py index 3df67287f3b..c3400916d78 100644 --- a/homeassistant/components/devolo_home_network/switch.py +++ b/homeassistant/components/devolo_home_network/switch.py @@ -21,6 +21,8 @@ from . import DevoloHomeNetworkConfigEntry from .const import DOMAIN, SWITCH_GUEST_WIFI, SWITCH_LEDS from .entity import DevoloCoordinatorEntity +PARALLEL_UPDATES = 1 + _DataT = TypeVar("_DataT", bound=WifiGuestAccessGet | bool) diff --git a/homeassistant/components/devolo_home_network/update.py b/homeassistant/components/devolo_home_network/update.py index 92f5cb0f094..29c0c8762b9 100644 --- a/homeassistant/components/devolo_home_network/update.py +++ b/homeassistant/components/devolo_home_network/update.py @@ -26,6 +26,8 @@ from . import DevoloHomeNetworkConfigEntry from .const import DOMAIN, REGULAR_FIRMWARE from .entity import DevoloCoordinatorEntity +PARALLEL_UPDATES = 1 + @dataclass(frozen=True, kw_only=True) class DevoloUpdateEntityDescription(UpdateEntityDescription): From 1ffde403f020e3aafe21e1fe09e9ffa9bff3d5a2 Mon Sep 17 00:00:00 2001 From: David Bonnes Date: Tue, 30 Jul 2024 16:18:33 +0100 Subject: [PATCH 0147/1635] Ensure evohome leaves no lingering timers (#122860) --- homeassistant/components/evohome/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/evohome/__init__.py b/homeassistant/components/evohome/__init__.py index 2df4ae1be6b..5a5d9d09521 100644 --- a/homeassistant/components/evohome/__init__.py +++ b/homeassistant/components/evohome/__init__.py @@ -243,6 +243,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: update_interval=config[DOMAIN][CONF_SCAN_INTERVAL], update_method=broker.async_update, ) + await coordinator.async_register_shutdown() hass.data[DOMAIN] = {"broker": broker, "coordinator": coordinator} From 896cd27bea14fdb11d3bde57d7a2902567b85342 Mon Sep 17 00:00:00 2001 From: Kim de Vos Date: Tue, 30 Jul 2024 17:20:56 +0200 Subject: [PATCH 0148/1635] Add sensors for Unifi latency (#116737) * Add sensors for Unifi latency * Add needed guard and casting * Use new types * Add WAN2 support * Add literals * Make ids for WAN and WAN2 unique * Make methods general * Update sensor.py * add more typing * Simplify usage make_wan_latency_sensors * Simplify further * Move latency entity creation to method * Make method internal * simplify tests * Apply feedback * Reduce boiler copied code and add support function * Add external method for share logic between * Remove none * Add more tests * Apply feedback and change code to improve code coverage --- homeassistant/components/unifi/sensor.py | 93 +++++++++- tests/components/unifi/test_sensor.py | 224 +++++++++++++++++++++++ 2 files changed, 315 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/unifi/sensor.py b/homeassistant/components/unifi/sensor.py index bb974864f60..d86b72d1b2f 100644 --- a/homeassistant/components/unifi/sensor.py +++ b/homeassistant/components/unifi/sensor.py @@ -11,6 +11,7 @@ from dataclasses import dataclass from datetime import date, datetime, timedelta from decimal import Decimal from functools import partial +from typing import TYPE_CHECKING, Literal from aiounifi.interfaces.api_handlers import ItemEvent from aiounifi.interfaces.clients import Clients @@ -20,7 +21,7 @@ from aiounifi.interfaces.ports import Ports from aiounifi.interfaces.wlans import Wlans from aiounifi.models.api import ApiItemT from aiounifi.models.client import Client -from aiounifi.models.device import Device +from aiounifi.models.device import Device, TypedDeviceUptimeStatsWanMonitor from aiounifi.models.outlet import Outlet from aiounifi.models.port import Port from aiounifi.models.wlan import Wlan @@ -32,7 +33,13 @@ from homeassistant.components.sensor import ( SensorStateClass, UnitOfTemperature, ) -from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfDataRate, UnitOfPower +from homeassistant.const import ( + PERCENTAGE, + EntityCategory, + UnitOfDataRate, + UnitOfPower, + UnitOfTime, +) from homeassistant.core import Event as core_Event, HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -192,6 +199,86 @@ def async_device_state_value_fn(hub: UnifiHub, device: Device) -> str: return DEVICE_STATES[device.state] +@callback +def async_device_wan_latency_supported_fn( + wan: Literal["WAN", "WAN2"], + monitor_target: str, + hub: UnifiHub, + obj_id: str, +) -> bool: + """Determine if an device have a latency monitor.""" + if (device := hub.api.devices[obj_id]) and device.uptime_stats: + return _device_wan_latency_monitor(wan, monitor_target, device) is not None + return False + + +@callback +def async_device_wan_latency_value_fn( + wan: Literal["WAN", "WAN2"], + monitor_target: str, + hub: UnifiHub, + device: Device, +) -> int | None: + """Retrieve the monitor target from WAN monitors.""" + target = _device_wan_latency_monitor(wan, monitor_target, device) + + if TYPE_CHECKING: + # Checked by async_device_wan_latency_supported_fn + assert target + + return target.get("latency_average", 0) + + +@callback +def _device_wan_latency_monitor( + wan: Literal["WAN", "WAN2"], monitor_target: str, device: Device +) -> TypedDeviceUptimeStatsWanMonitor | None: + """Return the target of the WAN latency monitor.""" + if device.uptime_stats and (uptime_stats_wan := device.uptime_stats.get(wan)): + for monitor in uptime_stats_wan["monitors"]: + if monitor_target in monitor["target"]: + return monitor + return None + + +def make_wan_latency_sensors() -> tuple[UnifiSensorEntityDescription, ...]: + """Create WAN latency sensors from WAN monitor data.""" + + def make_wan_latency_entity_description( + wan: Literal["WAN", "WAN2"], name: str, monitor_target: str + ) -> UnifiSensorEntityDescription: + return UnifiSensorEntityDescription[Devices, Device]( + key=f"{name} {wan} latency", + entity_category=EntityCategory.DIAGNOSTIC, + native_unit_of_measurement=UnitOfTime.MILLISECONDS, + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.DURATION, + entity_registry_enabled_default=False, + api_handler_fn=lambda api: api.devices, + available_fn=async_device_available_fn, + device_info_fn=async_device_device_info_fn, + name_fn=lambda _: f"{name} {wan} latency", + object_fn=lambda api, obj_id: api.devices[obj_id], + supported_fn=partial( + async_device_wan_latency_supported_fn, wan, monitor_target + ), + unique_id_fn=lambda hub, + obj_id: f"{name.lower}_{wan.lower}_latency-{obj_id}", + value_fn=partial(async_device_wan_latency_value_fn, wan, monitor_target), + ) + + wans: tuple[Literal["WAN"], Literal["WAN2"]] = ("WAN", "WAN2") + return tuple( + make_wan_latency_entity_description(wan, name, target) + for wan in wans + for name, target in ( + ("Microsoft", "microsoft"), + ("Google", "google"), + ("Cloudflare", "1.1.1.1"), + ) + ) + + @dataclass(frozen=True, kw_only=True) class UnifiSensorEntityDescription( SensorEntityDescription, UnifiEntityDescription[HandlerT, ApiItemT] @@ -456,6 +543,8 @@ ENTITY_DESCRIPTIONS: tuple[UnifiSensorEntityDescription, ...] = ( ), ) +ENTITY_DESCRIPTIONS += make_wan_latency_sensors() + async def async_setup_entry( hass: HomeAssistant, diff --git a/tests/components/unifi/test_sensor.py b/tests/components/unifi/test_sensor.py index e1893922f60..779df6660f0 100644 --- a/tests/components/unifi/test_sensor.py +++ b/tests/components/unifi/test_sensor.py @@ -1424,3 +1424,227 @@ async def test_device_uptime( entity_registry.async_get("sensor.device_uptime").entity_category is EntityCategory.DIAGNOSTIC ) + + +@pytest.mark.parametrize( + "device_payload", + [ + [ + { + "board_rev": 2, + "device_id": "mock-id", + "ip": "10.0.1.1", + "mac": "10:00:00:00:01:01", + "last_seen": 1562600145, + "model": "US16P150", + "name": "mock-name", + "port_overrides": [], + "uptime_stats": { + "WAN": { + "availability": 100.0, + "latency_average": 39, + "monitors": [ + { + "availability": 100.0, + "latency_average": 56, + "target": "www.microsoft.com", + "type": "icmp", + }, + { + "availability": 100.0, + "latency_average": 53, + "target": "google.com", + "type": "icmp", + }, + { + "availability": 100.0, + "latency_average": 30, + "target": "1.1.1.1", + "type": "icmp", + }, + ], + }, + "WAN2": { + "monitors": [ + { + "availability": 0.0, + "target": "www.microsoft.com", + "type": "icmp", + }, + { + "availability": 0.0, + "target": "google.com", + "type": "icmp", + }, + {"availability": 0.0, "target": "1.1.1.1", "type": "icmp"}, + ], + }, + }, + "state": 1, + "type": "usw", + "version": "4.0.42.10433", + } + ] + ], +) +@pytest.mark.parametrize( + ("entity_id", "state", "updated_state", "index_to_update"), + [ + # Microsoft + ("microsoft_wan", "56", "20", 0), + # Google + ("google_wan", "53", "90", 1), + # Cloudflare + ("cloudflare_wan", "30", "80", 2), + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_wan_monitor_latency( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_websocket_message, + device_payload: list[dict[str, Any]], + entity_id: str, + state: str, + updated_state: str, + index_to_update: int, +) -> None: + """Verify that wan latency sensors are working as expected.""" + + assert len(hass.states.async_all()) == 6 + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 + + latency_entry = entity_registry.async_get(f"sensor.mock_name_{entity_id}_latency") + assert latency_entry.disabled_by == RegistryEntryDisabler.INTEGRATION + assert latency_entry.entity_category is EntityCategory.DIAGNOSTIC + + # Enable entity + entity_registry.async_update_entity( + entity_id=f"sensor.mock_name_{entity_id}_latency", disabled_by=None + ) + + await hass.async_block_till_done() + + async_fire_time_changed( + hass, + dt_util.utcnow() + timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1), + ) + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == 7 + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 3 + + # Verify sensor attributes and state + latency_entry = hass.states.get(f"sensor.mock_name_{entity_id}_latency") + assert latency_entry.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DURATION + assert ( + latency_entry.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + ) + assert latency_entry.state == state + + # Verify state update + device = device_payload[0] + device["uptime_stats"]["WAN"]["monitors"][index_to_update]["latency_average"] = ( + updated_state + ) + + mock_websocket_message(message=MessageKey.DEVICE, data=device) + + assert ( + hass.states.get(f"sensor.mock_name_{entity_id}_latency").state == updated_state + ) + + +@pytest.mark.parametrize( + "device_payload", + [ + [ + { + "board_rev": 2, + "device_id": "mock-id", + "ip": "10.0.1.1", + "mac": "10:00:00:00:01:01", + "last_seen": 1562600145, + "model": "US16P150", + "name": "mock-name", + "port_overrides": [], + "uptime_stats": { + "WAN": { + "monitors": [ + { + "availability": 100.0, + "latency_average": 30, + "target": "1.2.3.4", + "type": "icmp", + }, + ], + }, + "WAN2": { + "monitors": [ + { + "availability": 0.0, + "target": "www.microsoft.com", + "type": "icmp", + }, + { + "availability": 0.0, + "target": "google.com", + "type": "icmp", + }, + {"availability": 0.0, "target": "1.1.1.1", "type": "icmp"}, + ], + }, + }, + "state": 1, + "type": "usw", + "version": "4.0.42.10433", + } + ] + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_wan_monitor_latency_with_no_entries( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, +) -> None: + """Verify that wan latency sensors is not created if there is no data.""" + + assert len(hass.states.async_all()) == 6 + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 + + latency_entry = entity_registry.async_get("sensor.mock_name_google_wan_latency") + assert latency_entry is None + + +@pytest.mark.parametrize( + "device_payload", + [ + [ + { + "board_rev": 2, + "device_id": "mock-id", + "ip": "10.0.1.1", + "mac": "10:00:00:00:01:01", + "last_seen": 1562600145, + "model": "US16P150", + "name": "mock-name", + "port_overrides": [], + "state": 1, + "type": "usw", + "version": "4.0.42.10433", + } + ] + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_wan_monitor_latency_with_no_uptime( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, +) -> None: + """Verify that wan latency sensors is not created if there is no data.""" + + assert len(hass.states.async_all()) == 6 + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 + + latency_entry = entity_registry.async_get("sensor.mock_name_google_wan_latency") + assert latency_entry is None From 8066c7dec60e7164001bfff952d68517d1477c65 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 30 Jul 2024 17:21:45 +0200 Subject: [PATCH 0149/1635] Fix implicit-return in deconz (#122836) --- homeassistant/components/deconz/fan.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/deconz/fan.py b/homeassistant/components/deconz/fan.py index dc65756eeeb..67c759afeda 100644 --- a/homeassistant/components/deconz/fan.py +++ b/homeassistant/components/deconz/fan.py @@ -95,7 +95,8 @@ class DeconzFan(DeconzDevice[Light], FanEntity): async def async_set_percentage(self, percentage: int) -> None: """Set the speed percentage of the fan.""" if percentage == 0: - return await self.async_turn_off() + await self.async_turn_off() + return await self.hub.api.lights.lights.set_state( id=self._device.resource_id, fan_speed=percentage_to_ordered_list_item( From be24475cee71d3369d3975da12afd3709d47056b Mon Sep 17 00:00:00 2001 From: Denis Shulyaka Date: Tue, 30 Jul 2024 18:24:03 +0300 Subject: [PATCH 0150/1635] Update selector converters for llm script tools (#122830) --- homeassistant/helpers/llm.py | 4 +- tests/helpers/test_llm.py | 73 ++++++++++++++++++++++++++++++++++-- 2 files changed, 71 insertions(+), 6 deletions(-) diff --git a/homeassistant/helpers/llm.py b/homeassistant/helpers/llm.py index 4c8e2df06a4..8ad576b7ea5 100644 --- a/homeassistant/helpers/llm.py +++ b/homeassistant/helpers/llm.py @@ -521,7 +521,7 @@ def _selector_serializer(schema: Any) -> Any: # noqa: C901 return convert(cv.CONDITIONS_SCHEMA) if isinstance(schema, selector.ConstantSelector): - return {"enum": [schema.config["value"]]} + return convert(vol.Schema(schema.config["value"])) result: dict[str, Any] if isinstance(schema, selector.ColorTempSelector): @@ -573,7 +573,7 @@ def _selector_serializer(schema: Any) -> Any: # noqa: C901 return result if isinstance(schema, selector.ObjectSelector): - return {"type": "object"} + return {"type": "object", "additionalProperties": True} if isinstance(schema, selector.SelectSelector): options = [ diff --git a/tests/helpers/test_llm.py b/tests/helpers/test_llm.py index 3ad5b23b731..ea6e628d1d4 100644 --- a/tests/helpers/test_llm.py +++ b/tests/helpers/test_llm.py @@ -842,13 +842,22 @@ async def test_selector_serializer( assert selector_serializer( selector.ColorTempSelector({"min_mireds": 100, "max_mireds": 1000}) ) == {"type": "number", "minimum": 100, "maximum": 1000} + assert selector_serializer(selector.ConditionSelector()) == { + "type": "array", + "items": {"nullable": True, "type": "string"}, + } assert selector_serializer(selector.ConfigEntrySelector()) == {"type": "string"} assert selector_serializer(selector.ConstantSelector({"value": "test"})) == { - "enum": ["test"] + "type": "string", + "enum": ["test"], + } + assert selector_serializer(selector.ConstantSelector({"value": 1})) == { + "type": "integer", + "enum": [1], } - assert selector_serializer(selector.ConstantSelector({"value": 1})) == {"enum": [1]} assert selector_serializer(selector.ConstantSelector({"value": True})) == { - "enum": [True] + "type": "boolean", + "enum": [True], } assert selector_serializer(selector.QrCodeSelector({"data": "test"})) == { "type": "string" @@ -876,6 +885,17 @@ async def test_selector_serializer( "type": "array", "items": {"type": "string"}, } + assert selector_serializer(selector.DurationSelector()) == { + "type": "object", + "properties": { + "days": {"type": "number"}, + "hours": {"type": "number"}, + "minutes": {"type": "number"}, + "seconds": {"type": "number"}, + "milliseconds": {"type": "number"}, + }, + "required": [], + } assert selector_serializer(selector.EntitySelector()) == { "type": "string", "format": "entity_id", @@ -929,7 +949,10 @@ async def test_selector_serializer( "minimum": 30, "maximum": 100, } - assert selector_serializer(selector.ObjectSelector()) == {"type": "object"} + assert selector_serializer(selector.ObjectSelector()) == { + "type": "object", + "additionalProperties": True, + } assert selector_serializer( selector.SelectSelector( { @@ -951,6 +974,48 @@ async def test_selector_serializer( assert selector_serializer( selector.StateSelector({"entity_id": "sensor.test"}) ) == {"type": "string"} + target_schema = selector_serializer(selector.TargetSelector()) + target_schema["properties"]["entity_id"]["anyOf"][0][ + "enum" + ].sort() # Order is not deterministic + assert target_schema == { + "type": "object", + "properties": { + "area_id": { + "anyOf": [ + {"type": "string", "enum": ["none"]}, + {"type": "array", "items": {"type": "string", "nullable": True}}, + ] + }, + "device_id": { + "anyOf": [ + {"type": "string", "enum": ["none"]}, + {"type": "array", "items": {"type": "string", "nullable": True}}, + ] + }, + "entity_id": { + "anyOf": [ + {"type": "string", "enum": ["all", "none"], "format": "lower"}, + {"type": "string", "nullable": True}, + {"type": "array", "items": {"type": "string"}}, + ] + }, + "floor_id": { + "anyOf": [ + {"type": "string", "enum": ["none"]}, + {"type": "array", "items": {"type": "string", "nullable": True}}, + ] + }, + "label_id": { + "anyOf": [ + {"type": "string", "enum": ["none"]}, + {"type": "array", "items": {"type": "string", "nullable": True}}, + ] + }, + }, + "required": [], + } + assert selector_serializer(selector.TemplateSelector()) == { "type": "string", "format": "jinja2", From 18a7d15d14bea31f340611a86c096f143cce0fd9 Mon Sep 17 00:00:00 2001 From: bdowden Date: Tue, 30 Jul 2024 11:26:08 -0400 Subject: [PATCH 0151/1635] Add Traffic Rule switches to UniFi Network (#118821) * Add Traffic Rule switches to UniFi Network * Retrieve Fix unifi traffic rule switches Poll for traffic rule updates; have immediate feedback in the UI for modifying traffic rules * Remove default values for unifi entity; Remove unnecessary code * Begin updating traffic rule unit tests * For the mock get request, allow for meta and data properties to not be appended to support v2 api requests Fix traffic rule unit tests; * inspect path to determine json response instead of passing an argument * Remove entity id parameter from tests; remove unused code; rename traffic rule unique ID prefix * Remove parameter with default. * More code removal; * Rename copy/paste variable; remove commented code; remove duplicate default code --------- Co-authored-by: ViViDboarder --- .../components/unifi/hub/entity_loader.py | 34 ++++++- homeassistant/components/unifi/switch.py | 31 ++++++- tests/components/unifi/conftest.py | 18 +++- tests/components/unifi/test_switch.py | 89 +++++++++++++++++++ 4 files changed, 164 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/unifi/hub/entity_loader.py b/homeassistant/components/unifi/hub/entity_loader.py index 29448a4114a..f11ddefec98 100644 --- a/homeassistant/components/unifi/hub/entity_loader.py +++ b/homeassistant/components/unifi/hub/entity_loader.py @@ -7,9 +7,10 @@ Make sure expected clients are available for platforms. from __future__ import annotations import asyncio +from collections.abc import Callable, Coroutine, Sequence from datetime import timedelta from functools import partial -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any from aiounifi.interfaces.api_handlers import ItemEvent @@ -18,6 +19,7 @@ from homeassistant.core import callback from homeassistant.helpers import entity_registry as er from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from ..const import LOGGER, UNIFI_WIRELESS_CLIENTS from ..entity import UnifiEntity, UnifiEntityDescription @@ -26,6 +28,7 @@ if TYPE_CHECKING: from .hub import UnifiHub CHECK_HEARTBEAT_INTERVAL = timedelta(seconds=1) +POLL_INTERVAL = timedelta(seconds=10) class UnifiEntityLoader: @@ -43,10 +46,24 @@ class UnifiEntityLoader: hub.api.port_forwarding.update, hub.api.sites.update, hub.api.system_information.update, + hub.api.traffic_rules.update, hub.api.wlans.update, ) + self.polling_api_updaters = (hub.api.traffic_rules.update,) self.wireless_clients = hub.hass.data[UNIFI_WIRELESS_CLIENTS] + self._dataUpdateCoordinator = DataUpdateCoordinator( + hub.hass, + LOGGER, + name="Unifi entity poller", + update_method=self._update_pollable_api_data, + update_interval=POLL_INTERVAL, + ) + + self._update_listener = self._dataUpdateCoordinator.async_add_listener( + update_callback=lambda: None + ) + self.platforms: list[ tuple[ AddEntitiesCallback, @@ -65,16 +82,25 @@ class UnifiEntityLoader: self._restore_inactive_clients() self.wireless_clients.update_clients(set(self.hub.api.clients.values())) - async def _refresh_api_data(self) -> None: - """Refresh API data from network application.""" + async def _refresh_data( + self, updaters: Sequence[Callable[[], Coroutine[Any, Any, None]]] + ) -> None: results = await asyncio.gather( - *[update() for update in self.api_updaters], + *[update() for update in updaters], return_exceptions=True, ) for result in results: if result is not None: LOGGER.warning("Exception on update %s", result) + async def _update_pollable_api_data(self) -> None: + """Refresh API data for pollable updaters.""" + await self._refresh_data(self.polling_api_updaters) + + async def _refresh_api_data(self) -> None: + """Refresh API data from network application.""" + await self._refresh_data(self.api_updaters) + @callback def _restore_inactive_clients(self) -> None: """Restore inactive clients. diff --git a/homeassistant/components/unifi/switch.py b/homeassistant/components/unifi/switch.py index ef30abb9349..93a0c81a24e 100644 --- a/homeassistant/components/unifi/switch.py +++ b/homeassistant/components/unifi/switch.py @@ -20,6 +20,7 @@ from aiounifi.interfaces.dpi_restriction_groups import DPIRestrictionGroups from aiounifi.interfaces.outlets import Outlets from aiounifi.interfaces.port_forwarding import PortForwarding from aiounifi.interfaces.ports import Ports +from aiounifi.interfaces.traffic_rules import TrafficRules from aiounifi.interfaces.wlans import Wlans from aiounifi.models.api import ApiItemT from aiounifi.models.client import Client, ClientBlockRequest @@ -30,6 +31,7 @@ from aiounifi.models.event import Event, EventKey from aiounifi.models.outlet import Outlet from aiounifi.models.port import Port from aiounifi.models.port_forward import PortForward, PortForwardEnableRequest +from aiounifi.models.traffic_rule import TrafficRule, TrafficRuleEnableRequest from aiounifi.models.wlan import Wlan, WlanEnableRequest from homeassistant.components.switch import ( @@ -94,8 +96,8 @@ def async_dpi_group_device_info_fn(hub: UnifiHub, obj_id: str) -> DeviceInfo: @callback -def async_port_forward_device_info_fn(hub: UnifiHub, obj_id: str) -> DeviceInfo: - """Create device registry entry for port forward.""" +def async_unifi_network_device_info_fn(hub: UnifiHub, obj_id: str) -> DeviceInfo: + """Create device registry entry for the UniFi Network application.""" unique_id = hub.config.entry.unique_id assert unique_id is not None return DeviceInfo( @@ -158,6 +160,16 @@ async def async_port_forward_control_fn( await hub.api.request(PortForwardEnableRequest.create(port_forward, target)) +async def async_traffic_rule_control_fn( + hub: UnifiHub, obj_id: str, target: bool +) -> None: + """Control traffic rule state.""" + traffic_rule = hub.api.traffic_rules[obj_id].raw + await hub.api.request(TrafficRuleEnableRequest.create(traffic_rule, target)) + # Update the traffic rules so the UI is updated appropriately + await hub.api.traffic_rules.update() + + async def async_wlan_control_fn(hub: UnifiHub, obj_id: str, target: bool) -> None: """Control outlet relay.""" await hub.api.request(WlanEnableRequest.create(obj_id, target)) @@ -232,12 +244,25 @@ ENTITY_DESCRIPTIONS: tuple[UnifiSwitchEntityDescription, ...] = ( icon="mdi:upload-network", api_handler_fn=lambda api: api.port_forwarding, control_fn=async_port_forward_control_fn, - device_info_fn=async_port_forward_device_info_fn, + device_info_fn=async_unifi_network_device_info_fn, is_on_fn=lambda hub, port_forward: port_forward.enabled, name_fn=lambda port_forward: f"{port_forward.name}", object_fn=lambda api, obj_id: api.port_forwarding[obj_id], unique_id_fn=lambda hub, obj_id: f"port_forward-{obj_id}", ), + UnifiSwitchEntityDescription[TrafficRules, TrafficRule]( + key="Traffic rule control", + device_class=SwitchDeviceClass.SWITCH, + entity_category=EntityCategory.CONFIG, + icon="mdi:security-network", + api_handler_fn=lambda api: api.traffic_rules, + control_fn=async_traffic_rule_control_fn, + device_info_fn=async_unifi_network_device_info_fn, + is_on_fn=lambda hub, traffic_rule: traffic_rule.enabled, + name_fn=lambda traffic_rule: traffic_rule.description, + object_fn=lambda api, obj_id: api.traffic_rules[obj_id], + unique_id_fn=lambda hub, obj_id: f"traffic_rule-{obj_id}", + ), UnifiSwitchEntityDescription[Ports, Port]( key="PoE port control", device_class=SwitchDeviceClass.OUTLET, diff --git a/tests/components/unifi/conftest.py b/tests/components/unifi/conftest.py index c20b8766bfc..4e460bab8f8 100644 --- a/tests/components/unifi/conftest.py +++ b/tests/components/unifi/conftest.py @@ -160,6 +160,7 @@ def fixture_request( dpi_app_payload: list[dict[str, Any]], dpi_group_payload: list[dict[str, Any]], port_forward_payload: list[dict[str, Any]], + traffic_rule_payload: list[dict[str, Any]], site_payload: list[dict[str, Any]], system_information_payload: list[dict[str, Any]], wlan_payload: list[dict[str, Any]], @@ -170,9 +171,16 @@ def fixture_request( url = f"https://{host}:{DEFAULT_PORT}" def mock_get_request(path: str, payload: list[dict[str, Any]]) -> None: + # APIV2 request respoonses have `meta` and `data` automatically appended + json = {} + if path.startswith("/v2"): + json = payload + else: + json = {"meta": {"rc": "OK"}, "data": payload} + aioclient_mock.get( f"{url}{path}", - json={"meta": {"rc": "OK"}, "data": payload}, + json=json, headers={"content-type": CONTENT_TYPE_JSON}, ) @@ -182,6 +190,7 @@ def fixture_request( json={"data": "login successful", "meta": {"rc": "ok"}}, headers={"content-type": CONTENT_TYPE_JSON}, ) + mock_get_request("/api/self/sites", site_payload) mock_get_request(f"/api/s/{site_id}/stat/sta", client_payload) mock_get_request(f"/api/s/{site_id}/rest/user", clients_all_payload) @@ -191,6 +200,7 @@ def fixture_request( mock_get_request(f"/api/s/{site_id}/rest/portforward", port_forward_payload) mock_get_request(f"/api/s/{site_id}/stat/sysinfo", system_information_payload) mock_get_request(f"/api/s/{site_id}/rest/wlanconf", wlan_payload) + mock_get_request(f"/v2/api/site/{site_id}/trafficrules", traffic_rule_payload) return __mock_requests @@ -262,6 +272,12 @@ def fixture_system_information_data() -> list[dict[str, Any]]: ] +@pytest.fixture(name="traffic_rule_payload") +def traffic_rule_payload_data() -> list[dict[str, Any]]: + """Traffic rule data.""" + return [] + + @pytest.fixture(name="wlan_payload") def fixture_wlan_data() -> list[dict[str, Any]]: """WLAN data.""" diff --git a/tests/components/unifi/test_switch.py b/tests/components/unifi/test_switch.py index b0ae8bde445..daf64301c8e 100644 --- a/tests/components/unifi/test_switch.py +++ b/tests/components/unifi/test_switch.py @@ -774,6 +774,37 @@ PORT_FORWARD_PLEX = { "src": "any", } +TRAFFIC_RULE = { + "_id": "6452cd9b859d5b11aa002ea1", + "action": "BLOCK", + "app_category_ids": [], + "app_ids": [], + "bandwidth_limit": { + "download_limit_kbps": 1024, + "enabled": False, + "upload_limit_kbps": 1024, + }, + "description": "Test Traffic Rule", + "name": "Test Traffic Rule", + "domains": [], + "enabled": True, + "ip_addresses": [], + "ip_ranges": [], + "matching_target": "INTERNET", + "network_ids": [], + "regions": [], + "schedule": { + "date_end": "2023-05-10", + "date_start": "2023-05-03", + "mode": "ALWAYS", + "repeat_on_days": [], + "time_all_day": False, + "time_range_end": "12:00", + "time_range_start": "09:00", + }, + "target_devices": [{"client_mac": CLIENT_1["mac"], "type": "CLIENT"}], +} + @pytest.mark.parametrize("client_payload", [[CONTROLLER_HOST]]) @pytest.mark.parametrize("device_payload", [[DEVICE_1]]) @@ -1072,6 +1103,64 @@ async def test_dpi_switches_add_second_app( assert hass.states.get("switch.block_media_streaming").state == STATE_ON +@pytest.mark.parametrize(("traffic_rule_payload"), [([TRAFFIC_RULE])]) +@pytest.mark.usefixtures("config_entry_setup") +async def test_traffic_rules( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + mock_websocket_message, + config_entry_setup: ConfigEntry, + traffic_rule_payload: list[dict[str, Any]], +) -> None: + """Test control of UniFi traffic rules.""" + + assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 + + # Validate state object + switch_1 = hass.states.get("switch.unifi_network_test_traffic_rule") + assert switch_1.state == STATE_ON + assert switch_1.attributes.get(ATTR_DEVICE_CLASS) == SwitchDeviceClass.SWITCH + + traffic_rule = deepcopy(traffic_rule_payload[0]) + + # Disable traffic rule + aioclient_mock.put( + f"https://{config_entry_setup.data[CONF_HOST]}:1234" + f"/v2/api/site/{config_entry_setup.data[CONF_SITE_ID]}/trafficrules/{traffic_rule['_id']}", + ) + + call_count = aioclient_mock.call_count + + await hass.services.async_call( + SWITCH_DOMAIN, + "turn_off", + {"entity_id": "switch.unifi_network_test_traffic_rule"}, + blocking=True, + ) + # Updating the value for traffic rules will make another call to retrieve the values + assert aioclient_mock.call_count == call_count + 2 + expected_disable_call = deepcopy(traffic_rule) + expected_disable_call["enabled"] = False + + assert aioclient_mock.mock_calls[call_count][2] == expected_disable_call + + call_count = aioclient_mock.call_count + + # Enable traffic rule + await hass.services.async_call( + SWITCH_DOMAIN, + "turn_on", + {"entity_id": "switch.unifi_network_test_traffic_rule"}, + blocking=True, + ) + + expected_enable_call = deepcopy(traffic_rule) + expected_enable_call["enabled"] = True + + assert aioclient_mock.call_count == call_count + 2 + assert aioclient_mock.mock_calls[call_count][2] == expected_enable_call + + @pytest.mark.parametrize( ("device_payload", "entity_id", "outlet_index", "expected_switches"), [ From ea727546d645fd55b4f722c53c643fba1f1730bc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Simon=20H=C3=B6rrle?= <7945681+CM000n@users.noreply.github.com> Date: Tue, 30 Jul 2024 18:11:08 +0200 Subject: [PATCH 0152/1635] Add apsystems power switch (#122447) * bring back power switch * fix pylint issues * add SWITCH to platform list * improve run_on and turn_off functions * ruff formatting * replace _state with _attr_is_on * Update homeassistant/components/apsystems/switch.py Co-authored-by: Joost Lekkerkerker * remove unused dependencies * Update homeassistant/components/apsystems/switch.py Co-authored-by: Joost Lekkerkerker * use async functions from api * convert Api IntEnum Status Information to bool * add translation key * implement async_update again * replace finally with else * better handling of bool value * Update homeassistant/components/apsystems/switch.py Co-authored-by: Joost Lekkerkerker * Update homeassistant/components/apsystems/switch.py Co-authored-by: Joost Lekkerkerker * rename power switch to inverter switch * add test_number and test_switch module * remove test_number * Add mock entry for get_device_power_status * Add mock entry for get_device_power_status * Update test snapshots --------- Co-authored-by: Joost Lekkerkerker --- .../components/apsystems/__init__.py | 2 +- .../components/apsystems/strings.json | 45 +++++++++++---- homeassistant/components/apsystems/switch.py | 56 +++++++++++++++++++ tests/components/apsystems/conftest.py | 3 +- .../apsystems/snapshots/test_switch.ambr | 48 ++++++++++++++++ tests/components/apsystems/test_switch.py | 31 ++++++++++ 6 files changed, 173 insertions(+), 12 deletions(-) create mode 100644 homeassistant/components/apsystems/switch.py create mode 100644 tests/components/apsystems/snapshots/test_switch.ambr create mode 100644 tests/components/apsystems/test_switch.py diff --git a/homeassistant/components/apsystems/__init__.py b/homeassistant/components/apsystems/__init__.py index 40e62a32475..91650201a87 100644 --- a/homeassistant/components/apsystems/__init__.py +++ b/homeassistant/components/apsystems/__init__.py @@ -13,7 +13,7 @@ from homeassistant.core import HomeAssistant from .const import DEFAULT_PORT from .coordinator import ApSystemsDataCoordinator -PLATFORMS: list[Platform] = [Platform.NUMBER, Platform.SENSOR] +PLATFORMS: list[Platform] = [Platform.NUMBER, Platform.SENSOR, Platform.SWITCH] @dataclass diff --git a/homeassistant/components/apsystems/strings.json b/homeassistant/components/apsystems/strings.json index 95499e96b4d..18200f7b49d 100644 --- a/homeassistant/components/apsystems/strings.json +++ b/homeassistant/components/apsystems/strings.json @@ -20,18 +20,43 @@ }, "entity": { "sensor": { - "total_power": { "name": "Total power" }, - "total_power_p1": { "name": "Power of P1" }, - "total_power_p2": { "name": "Power of P2" }, - "lifetime_production": { "name": "Total lifetime production" }, - "lifetime_production_p1": { "name": "Lifetime production of P1" }, - "lifetime_production_p2": { "name": "Lifetime production of P2" }, - "today_production": { "name": "Production of today" }, - "today_production_p1": { "name": "Production of today from P1" }, - "today_production_p2": { "name": "Production of today from P2" } + "total_power": { + "name": "Total power" + }, + "total_power_p1": { + "name": "Power of P1" + }, + "total_power_p2": { + "name": "Power of P2" + }, + "lifetime_production": { + "name": "Total lifetime production" + }, + "lifetime_production_p1": { + "name": "Lifetime production of P1" + }, + "lifetime_production_p2": { + "name": "Lifetime production of P2" + }, + "today_production": { + "name": "Production of today" + }, + "today_production_p1": { + "name": "Production of today from P1" + }, + "today_production_p2": { + "name": "Production of today from P2" + } }, "number": { - "max_output": { "name": "Max output" } + "max_output": { + "name": "Max output" + } + }, + "switch": { + "inverter_status": { + "name": "Inverter status" + } } } } diff --git a/homeassistant/components/apsystems/switch.py b/homeassistant/components/apsystems/switch.py new file mode 100644 index 00000000000..405adc94b27 --- /dev/null +++ b/homeassistant/components/apsystems/switch.py @@ -0,0 +1,56 @@ +"""The power switch which can be toggled via the APsystems local API integration.""" + +from __future__ import annotations + +from typing import Any + +from aiohttp.client_exceptions import ClientConnectionError +from APsystemsEZ1 import Status + +from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import ApSystemsConfigEntry, ApSystemsData +from .entity import ApSystemsEntity + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ApSystemsConfigEntry, + add_entities: AddEntitiesCallback, +) -> None: + """Set up the switch platform.""" + + add_entities([ApSystemsInverterSwitch(config_entry.runtime_data)], True) + + +class ApSystemsInverterSwitch(ApSystemsEntity, SwitchEntity): + """The switch class for APSystems switches.""" + + _attr_device_class = SwitchDeviceClass.SWITCH + _attr_translation_key = "inverter_status" + + def __init__(self, data: ApSystemsData) -> None: + """Initialize the switch.""" + super().__init__(data) + self._api = data.coordinator.api + self._attr_unique_id = f"{data.device_id}_inverter_status" + + async def async_update(self) -> None: + """Update switch status and availability.""" + try: + status = await self._api.get_device_power_status() + except (TimeoutError, ClientConnectionError): + self._attr_available = False + else: + self._attr_available = True + self._attr_is_on = status == Status.normal + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the switch on.""" + await self._api.set_device_power_status(0) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the switch off.""" + await self._api.set_device_power_status(1) diff --git a/tests/components/apsystems/conftest.py b/tests/components/apsystems/conftest.py index 682086be380..c191c7ca2dc 100644 --- a/tests/components/apsystems/conftest.py +++ b/tests/components/apsystems/conftest.py @@ -3,7 +3,7 @@ from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch -from APsystemsEZ1 import ReturnDeviceInfo, ReturnOutputData +from APsystemsEZ1 import ReturnDeviceInfo, ReturnOutputData, Status import pytest from homeassistant.components.apsystems.const import DOMAIN @@ -52,6 +52,7 @@ def mock_apsystems() -> Generator[MagicMock]: e2=6.0, te2=7.0, ) + mock_api.get_device_power_status.return_value = Status.normal yield mock_api diff --git a/tests/components/apsystems/snapshots/test_switch.ambr b/tests/components/apsystems/snapshots/test_switch.ambr new file mode 100644 index 00000000000..6daa9fd6e14 --- /dev/null +++ b/tests/components/apsystems/snapshots/test_switch.ambr @@ -0,0 +1,48 @@ +# serializer version: 1 +# name: test_all_entities[switch.mock_title_inverter_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.mock_title_inverter_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Inverter status', + 'platform': 'apsystems', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'inverter_status', + 'unique_id': 'MY_SERIAL_NUMBER_inverter_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[switch.mock_title_inverter_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Mock Title Inverter status', + }), + 'context': , + 'entity_id': 'switch.mock_title_inverter_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/apsystems/test_switch.py b/tests/components/apsystems/test_switch.py new file mode 100644 index 00000000000..afd889fe958 --- /dev/null +++ b/tests/components/apsystems/test_switch.py @@ -0,0 +1,31 @@ +"""Test the APSystem switch module.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_apsystems: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch( + "homeassistant.components.apsystems.PLATFORMS", + [Platform.SWITCH], + ): + await setup_integration(hass, mock_config_entry) + await snapshot_platform( + hass, entity_registry, snapshot, mock_config_entry.entry_id + ) From 50b35ac4bce293d55525adaae9b5df01b9157762 Mon Sep 17 00:00:00 2001 From: "Mr. Bubbles" Date: Tue, 30 Jul 2024 18:14:01 +0200 Subject: [PATCH 0153/1635] Add number platform to IronOS integration (#122801) * Add setpoint temperature number entity to IronOS integration * Add tests for number platform * Initialize settings in coordinator * Remove unused code --- homeassistant/components/iron_os/__init__.py | 2 +- homeassistant/components/iron_os/const.py | 3 + homeassistant/components/iron_os/icons.json | 8 +- homeassistant/components/iron_os/number.py | 96 ++++++++++++++++ homeassistant/components/iron_os/strings.json | 8 ++ .../iron_os/snapshots/test_number.ambr | 58 ++++++++++ tests/components/iron_os/test_number.py | 104 ++++++++++++++++++ 7 files changed, 275 insertions(+), 4 deletions(-) create mode 100644 homeassistant/components/iron_os/number.py create mode 100644 tests/components/iron_os/snapshots/test_number.ambr create mode 100644 tests/components/iron_os/test_number.py diff --git a/homeassistant/components/iron_os/__init__.py b/homeassistant/components/iron_os/__init__.py index bf3c6c34c83..11d99a1558a 100644 --- a/homeassistant/components/iron_os/__init__.py +++ b/homeassistant/components/iron_os/__init__.py @@ -16,7 +16,7 @@ from homeassistant.exceptions import ConfigEntryNotReady from .const import DOMAIN from .coordinator import IronOSCoordinator -PLATFORMS: list[Platform] = [Platform.SENSOR] +PLATFORMS: list[Platform] = [Platform.NUMBER, Platform.SENSOR] type IronOSConfigEntry = ConfigEntry[IronOSCoordinator] diff --git a/homeassistant/components/iron_os/const.py b/homeassistant/components/iron_os/const.py index 86b7d401f4f..34889636808 100644 --- a/homeassistant/components/iron_os/const.py +++ b/homeassistant/components/iron_os/const.py @@ -8,3 +8,6 @@ MODEL = "Pinecil V2" OHM = "Ω" DISCOVERY_SVC_UUID = "9eae1000-9d0d-48c5-aa55-33e27f9bc533" + +MAX_TEMP: int = 450 +MIN_TEMP: int = 10 diff --git a/homeassistant/components/iron_os/icons.json b/homeassistant/components/iron_os/icons.json index 0d207607a4f..fa14b8134d0 100644 --- a/homeassistant/components/iron_os/icons.json +++ b/homeassistant/components/iron_os/icons.json @@ -1,12 +1,14 @@ { "entity": { + "number": { + "setpoint_temperature": { + "default": "mdi:thermometer" + } + }, "sensor": { "live_temperature": { "default": "mdi:soldering-iron" }, - "setpoint_temperature": { - "default": "mdi:thermostat" - }, "voltage": { "default": "mdi:current-dc" }, diff --git a/homeassistant/components/iron_os/number.py b/homeassistant/components/iron_os/number.py new file mode 100644 index 00000000000..9230faec1f1 --- /dev/null +++ b/homeassistant/components/iron_os/number.py @@ -0,0 +1,96 @@ +"""Number platform for IronOS integration.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from enum import StrEnum + +from pynecil import CharSetting, CommunicationError, LiveDataResponse + +from homeassistant.components.number import ( + NumberDeviceClass, + NumberEntity, + NumberEntityDescription, + NumberMode, +) +from homeassistant.const import UnitOfTemperature +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import IronOSConfigEntry +from .const import DOMAIN, MAX_TEMP, MIN_TEMP +from .entity import IronOSBaseEntity + + +@dataclass(frozen=True, kw_only=True) +class IronOSNumberEntityDescription(NumberEntityDescription): + """Describes IronOS number entity.""" + + value_fn: Callable[[LiveDataResponse], float | int | None] + max_value_fn: Callable[[LiveDataResponse], float | int] + set_key: CharSetting + + +class PinecilNumber(StrEnum): + """Number controls for Pinecil device.""" + + SETPOINT_TEMP = "setpoint_temperature" + + +PINECIL_NUMBER_DESCRIPTIONS: tuple[IronOSNumberEntityDescription, ...] = ( + IronOSNumberEntityDescription( + key=PinecilNumber.SETPOINT_TEMP, + translation_key=PinecilNumber.SETPOINT_TEMP, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_class=NumberDeviceClass.TEMPERATURE, + value_fn=lambda data: data.setpoint_temp, + set_key=CharSetting.SETPOINT_TEMP, + mode=NumberMode.BOX, + native_min_value=MIN_TEMP, + native_step=5, + max_value_fn=lambda data: min(data.max_tip_temp_ability or MAX_TEMP, MAX_TEMP), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: IronOSConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up number entities from a config entry.""" + coordinator = entry.runtime_data + + async_add_entities( + IronOSNumberEntity(coordinator, description) + for description in PINECIL_NUMBER_DESCRIPTIONS + ) + + +class IronOSNumberEntity(IronOSBaseEntity, NumberEntity): + """Implementation of a IronOS number entity.""" + + entity_description: IronOSNumberEntityDescription + + async def async_set_native_value(self, value: float) -> None: + """Update the current value.""" + try: + await self.coordinator.device.write(self.entity_description.set_key, value) + except CommunicationError as e: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="submit_setting_failed", + ) from e + self.async_write_ha_state() + + @property + def native_value(self) -> float | int | None: + """Return sensor state.""" + return self.entity_description.value_fn(self.coordinator.data) + + @property + def native_max_value(self) -> float: + """Return sensor state.""" + return self.entity_description.max_value_fn(self.coordinator.data) diff --git a/homeassistant/components/iron_os/strings.json b/homeassistant/components/iron_os/strings.json index cb95330b768..75584fe191c 100644 --- a/homeassistant/components/iron_os/strings.json +++ b/homeassistant/components/iron_os/strings.json @@ -17,6 +17,11 @@ } }, "entity": { + "number": { + "setpoint_temperature": { + "name": "Setpoint temperature" + } + }, "sensor": { "live_temperature": { "name": "Tip temperature" @@ -79,6 +84,9 @@ }, "setup_device_connection_error_exception": { "message": "Connection to device {name} failed, try again later" + }, + "submit_setting_failed": { + "message": "Failed to submit setting to device, try again later" } } } diff --git a/tests/components/iron_os/snapshots/test_number.ambr b/tests/components/iron_os/snapshots/test_number.ambr new file mode 100644 index 00000000000..2f5ee62e37e --- /dev/null +++ b/tests/components/iron_os/snapshots/test_number.ambr @@ -0,0 +1,58 @@ +# serializer version: 1 +# name: test_state[number.pinecil_setpoint_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 450, + 'min': 10, + 'mode': , + 'step': 5, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': None, + 'entity_id': 'number.pinecil_setpoint_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Setpoint temperature', + 'platform': 'iron_os', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'c0:ff:ee:c0:ff:ee_setpoint_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_state[number.pinecil_setpoint_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Pinecil Setpoint temperature', + 'max': 450, + 'min': 10, + 'mode': , + 'step': 5, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.pinecil_setpoint_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '300', + }) +# --- diff --git a/tests/components/iron_os/test_number.py b/tests/components/iron_os/test_number.py new file mode 100644 index 00000000000..c091040668c --- /dev/null +++ b/tests/components/iron_os/test_number.py @@ -0,0 +1,104 @@ +"""Tests for the IronOS number platform.""" + +from collections.abc import AsyncGenerator +from unittest.mock import AsyncMock, patch + +from pynecil import CharSetting, CommunicationError +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.number import ( + ATTR_VALUE, + DOMAIN as NUMBER_DOMAIN, + SERVICE_SET_VALUE, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.fixture(autouse=True) +async def sensor_only() -> AsyncGenerator[None, None]: + """Enable only the number platform.""" + with patch( + "homeassistant.components.iron_os.PLATFORMS", + [Platform.NUMBER], + ): + yield + + +@pytest.mark.usefixtures( + "entity_registry_enabled_by_default", "mock_pynecil", "ble_device" +) +async def test_state( + hass: HomeAssistant, + config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Test the IronOS number platform states.""" + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default", "ble_device") +async def test_set_value( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pynecil: AsyncMock, +) -> None: + """Test the IronOS number platform set value service.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + service_data={ATTR_VALUE: 300}, + target={ATTR_ENTITY_ID: "number.pinecil_setpoint_temperature"}, + blocking=True, + ) + assert len(mock_pynecil.write.mock_calls) == 1 + mock_pynecil.write.assert_called_once_with(CharSetting.SETPOINT_TEMP, 300) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default", "ble_device") +async def test_set_value_exception( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pynecil: AsyncMock, +) -> None: + """Test the IronOS number platform set value service with exception.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + mock_pynecil.write.side_effect = CommunicationError + + with pytest.raises( + ServiceValidationError, + match="Failed to submit setting to device, try again later", + ): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + service_data={ATTR_VALUE: 300}, + target={ATTR_ENTITY_ID: "number.pinecil_setpoint_temperature"}, + blocking=True, + ) From fb229fcae85ff6719da7e03f6b8857de83a0110d Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 30 Jul 2024 18:40:36 +0200 Subject: [PATCH 0154/1635] Improve test coverage of the homeworks integration (#122865) * Improve test coverage of the homeworks integration * Revert changes from the future * Revert changes from the future --- .../components/homeworks/__init__.py | 14 +++++------ tests/components/homeworks/test_init.py | 23 +++++++++++++++++++ 2 files changed, 29 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/homeworks/__init__.py b/homeassistant/components/homeworks/__init__.py index e30778f7f15..f1a95102c3b 100644 --- a/homeassistant/components/homeworks/__init__.py +++ b/homeassistant/components/homeworks/__init__.py @@ -171,16 +171,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - if not await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - return False + if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): + data: HomeworksData = hass.data[DOMAIN].pop(entry.entry_id) + for keypad in data.keypads.values(): + keypad.unsubscribe() - data: HomeworksData = hass.data[DOMAIN].pop(entry.entry_id) - for keypad in data.keypads.values(): - keypad.unsubscribe() + await hass.async_add_executor_job(data.controller.close) - await hass.async_add_executor_job(data.controller.close) - - return True + return unload_ok async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: diff --git a/tests/components/homeworks/test_init.py b/tests/components/homeworks/test_init.py index 87aabb6258f..af43fcfba10 100644 --- a/tests/components/homeworks/test_init.py +++ b/tests/components/homeworks/test_init.py @@ -8,6 +8,7 @@ import pytest from homeassistant.components.homeworks import EVENT_BUTTON_PRESS, EVENT_BUTTON_RELEASE from homeassistant.components.homeworks.const import DOMAIN from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError @@ -165,3 +166,25 @@ async def test_send_command( blocking=True, ) assert len(mock_controller._send.mock_calls) == 0 + + +async def test_cleanup_on_ha_shutdown( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_homeworks: MagicMock, +) -> None: + """Test cleanup when HA shuts down.""" + mock_controller = MagicMock() + mock_homeworks.return_value = mock_controller + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY) + mock_controller.stop.assert_not_called() + + hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) + await hass.async_block_till_done() + + mock_controller.close.assert_called_once_with() From 5eff4f981641779972999fb040a3f5854d50ec16 Mon Sep 17 00:00:00 2001 From: Robert Svensson Date: Tue, 30 Jul 2024 19:33:25 +0200 Subject: [PATCH 0155/1635] Unifi improve fixture typing (#122864) * Improve typing of UniFi fixtures * Improve fixture typing, excluding image, sensor, switch * Improve fixture typing in image tests * Improve fixtures typing in sensor tests * Improve fixture typing in switch tests * Fix review comment --- tests/components/unifi/conftest.py | 37 +++++++--- tests/components/unifi/test_button.py | 26 ++++--- tests/components/unifi/test_config_flow.py | 13 ++-- tests/components/unifi/test_device_tracker.py | 43 +++++++----- tests/components/unifi/test_diagnostics.py | 4 +- tests/components/unifi/test_hub.py | 29 ++++---- tests/components/unifi/test_image.py | 18 +++-- tests/components/unifi/test_init.py | 19 +++--- tests/components/unifi/test_sensor.py | 61 +++++++++-------- tests/components/unifi/test_services.py | 16 ++--- tests/components/unifi/test_switch.py | 67 +++++++++++-------- tests/components/unifi/test_update.py | 21 ++++-- 12 files changed, 214 insertions(+), 140 deletions(-) diff --git a/tests/components/unifi/conftest.py b/tests/components/unifi/conftest.py index 4e460bab8f8..798b613b18d 100644 --- a/tests/components/unifi/conftest.py +++ b/tests/components/unifi/conftest.py @@ -3,10 +3,10 @@ from __future__ import annotations import asyncio -from collections.abc import Callable, Generator +from collections.abc import Callable, Coroutine, Generator from datetime import timedelta from types import MappingProxyType -from typing import Any +from typing import Any, Protocol from unittest.mock import AsyncMock, patch from aiounifi.models.message import MessageKey @@ -16,7 +16,6 @@ import pytest from homeassistant.components.unifi import STORAGE_KEY, STORAGE_VERSION from homeassistant.components.unifi.const import CONF_SITE_ID, DOMAIN as UNIFI_DOMAIN from homeassistant.components.unifi.hub.websocket import RETRY_TIMER -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -52,6 +51,20 @@ CONTROLLER_HOST = { "uptime": 1562600160, } +type ConfigEntryFactoryType = Callable[[], Coroutine[Any, Any, MockConfigEntry]] + + +class WebsocketMessageMock(Protocol): + """Fixture to mock websocket message.""" + + def __call__( + self, + *, + message: MessageKey | None = None, + data: list[dict[str, Any]] | dict[str, Any] | None = None, + ) -> None: + """Send websocket message.""" + @pytest.fixture(autouse=True, name="mock_discovery") def fixture_discovery(): @@ -96,7 +109,7 @@ def fixture_config_entry( hass: HomeAssistant, config_entry_data: MappingProxyType[str, Any], config_entry_options: MappingProxyType[str, Any], -) -> ConfigEntry: +) -> MockConfigEntry: """Define a config entry fixture.""" config_entry = MockConfigEntry( domain=UNIFI_DOMAIN, @@ -295,12 +308,12 @@ def fixture_default_requests( @pytest.fixture(name="config_entry_factory") async def fixture_config_entry_factory( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: MockConfigEntry, mock_requests: Callable[[str, str], None], -) -> Callable[[], ConfigEntry]: +) -> ConfigEntryFactoryType: """Fixture factory that can set up UniFi network integration.""" - async def __mock_setup_config_entry() -> ConfigEntry: + async def __mock_setup_config_entry() -> MockConfigEntry: mock_requests(config_entry.data[CONF_HOST], config_entry.data[CONF_SITE_ID]) await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() @@ -311,8 +324,8 @@ async def fixture_config_entry_factory( @pytest.fixture(name="config_entry_setup") async def fixture_config_entry_setup( - config_entry_factory: Callable[[], ConfigEntry], -) -> ConfigEntry: + config_entry_factory: ConfigEntryFactoryType, +) -> MockConfigEntry: """Fixture providing a set up instance of UniFi network integration.""" return await config_entry_factory() @@ -382,13 +395,15 @@ def fixture_aiounifi_websocket_state( @pytest.fixture(name="mock_websocket_message") -def fixture_aiounifi_websocket_message(_mock_websocket: AsyncMock): +def fixture_aiounifi_websocket_message( + _mock_websocket: AsyncMock, +) -> WebsocketMessageMock: """No real websocket allowed.""" def make_websocket_call( *, message: MessageKey | None = None, - data: list[dict] | dict | None = None, + data: list[dict[str, Any]] | dict[str, Any] | None = None, ) -> None: """Generate a websocket call.""" message_handler = _mock_websocket.call_args[0][0] diff --git a/tests/components/unifi/test_button.py b/tests/components/unifi/test_button.py index 9af96b64a50..fc3aeccea9f 100644 --- a/tests/components/unifi/test_button.py +++ b/tests/components/unifi/test_button.py @@ -11,7 +11,7 @@ from syrupy import SnapshotAssertion from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN from homeassistant.components.unifi.const import CONF_SITE_ID -from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY, ConfigEntry +from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY from homeassistant.const import ( CONF_HOST, CONTENT_TYPE_JSON, @@ -23,7 +23,13 @@ from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryDisabler import homeassistant.util.dt as dt_util -from tests.common import async_fire_time_changed, snapshot_platform +from .conftest import ( + ConfigEntryFactoryType, + WebsocketMessageMock, + WebsocketStateManager, +) + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker RANDOM_TOKEN = "random_token" @@ -134,7 +140,7 @@ WLAN_REGENERATE_PASSWORD = [ async def test_entity_and_device_data( hass: HomeAssistant, entity_registry: er.EntityRegistry, - config_entry_factory, + config_entry_factory: ConfigEntryFactoryType, site_payload: dict[str, Any], snapshot: SnapshotAssertion, ) -> None: @@ -150,8 +156,8 @@ async def test_entity_and_device_data( async def _test_button_entity( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - mock_websocket_state, - config_entry: ConfigEntry, + mock_websocket_state: WebsocketStateManager, + config_entry: MockConfigEntry, entity_id: str, request_method: str, request_path: str, @@ -221,8 +227,8 @@ async def _test_button_entity( async def test_device_button_entities( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - config_entry_setup: ConfigEntry, - mock_websocket_state, + config_entry_setup: MockConfigEntry, + mock_websocket_state: WebsocketStateManager, entity_id: str, request_method: str, request_path: str, @@ -269,8 +275,8 @@ async def test_wlan_button_entities( hass: HomeAssistant, entity_registry: er.EntityRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: ConfigEntry, - mock_websocket_state, + config_entry_setup: MockConfigEntry, + mock_websocket_state: WebsocketStateManager, entity_id: str, request_method: str, request_path: str, @@ -308,7 +314,7 @@ async def test_wlan_button_entities( @pytest.mark.usefixtures("config_entry_setup") async def test_power_cycle_availability( hass: HomeAssistant, - mock_websocket_message, + mock_websocket_message: WebsocketMessageMock, device_payload: dict[str, Any], ) -> None: """Verify that disabling PoE marks entity as unavailable.""" diff --git a/tests/components/unifi/test_config_flow.py b/tests/components/unifi/test_config_flow.py index fc0d2626eb6..1d745511dc5 100644 --- a/tests/components/unifi/test_config_flow.py +++ b/tests/components/unifi/test_config_flow.py @@ -1,6 +1,5 @@ """Test UniFi Network config flow.""" -from collections.abc import Callable import socket from unittest.mock import PropertyMock, patch @@ -25,7 +24,7 @@ from homeassistant.components.unifi.const import ( CONF_TRACK_WIRED_CLIENTS, DOMAIN as UNIFI_DOMAIN, ) -from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntry +from homeassistant.config_entries import SOURCE_REAUTH from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -36,6 +35,8 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from .conftest import ConfigEntryFactoryType + from tests.common import MockConfigEntry CLIENTS = [{"mac": "00:00:00:00:00:01"}] @@ -296,7 +297,7 @@ async def test_flow_fails_hub_unavailable(hass: HomeAssistant) -> None: async def test_reauth_flow_update_configuration( - hass: HomeAssistant, config_entry_setup: ConfigEntry + hass: HomeAssistant, config_entry_setup: MockConfigEntry ) -> None: """Verify reauth flow can update hub configuration.""" config_entry = config_entry_setup @@ -337,7 +338,7 @@ async def test_reauth_flow_update_configuration( async def test_reauth_flow_update_configuration_on_not_loaded_entry( - hass: HomeAssistant, config_entry_factory: Callable[[], ConfigEntry] + hass: HomeAssistant, config_entry_factory: ConfigEntryFactoryType ) -> None: """Verify reauth flow can update hub configuration on a not loaded entry.""" with patch("aiounifi.Controller.login", side_effect=aiounifi.errors.RequestError): @@ -379,7 +380,7 @@ async def test_reauth_flow_update_configuration_on_not_loaded_entry( @pytest.mark.parametrize("wlan_payload", [WLANS]) @pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) async def test_advanced_option_flow( - hass: HomeAssistant, config_entry_setup: ConfigEntry + hass: HomeAssistant, config_entry_setup: MockConfigEntry ) -> None: """Test advanced config flow options.""" config_entry = config_entry_setup @@ -463,7 +464,7 @@ async def test_advanced_option_flow( @pytest.mark.parametrize("client_payload", [CLIENTS]) async def test_simple_option_flow( - hass: HomeAssistant, config_entry_setup: ConfigEntry + hass: HomeAssistant, config_entry_setup: MockConfigEntry ) -> None: """Test simple config flow options.""" config_entry = config_entry_setup diff --git a/tests/components/unifi/test_device_tracker.py b/tests/components/unifi/test_device_tracker.py index f2480a4f050..c653370656d 100644 --- a/tests/components/unifi/test_device_tracker.py +++ b/tests/components/unifi/test_device_tracker.py @@ -1,6 +1,5 @@ """The tests for the UniFi Network device tracker platform.""" -from collections.abc import Callable from datetime import timedelta from types import MappingProxyType from typing import Any @@ -24,13 +23,18 @@ from homeassistant.components.unifi.const import ( DEFAULT_DETECTION_TIME, DOMAIN as UNIFI_DOMAIN, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import STATE_HOME, STATE_NOT_HOME, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant, State from homeassistant.helpers import entity_registry as er import homeassistant.util.dt as dt_util -from tests.common import async_fire_time_changed, snapshot_platform +from .conftest import ( + ConfigEntryFactoryType, + WebsocketMessageMock, + WebsocketStateManager, +) + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform WIRED_CLIENT_1 = { "hostname": "wd_client_1", @@ -96,7 +100,7 @@ SWITCH_1 = { async def test_entity_and_device_data( hass: HomeAssistant, entity_registry: er.EntityRegistry, - config_entry_factory, + config_entry_factory: ConfigEntryFactoryType, snapshot: SnapshotAssertion, ) -> None: """Validate entity and device data with and without admin rights.""" @@ -112,8 +116,8 @@ async def test_entity_and_device_data( @pytest.mark.usefixtures("mock_device_registry") async def test_client_state_update( hass: HomeAssistant, - mock_websocket_message, - config_entry_factory: Callable[[], ConfigEntry], + mock_websocket_message: WebsocketMessageMock, + config_entry_factory: ConfigEntryFactoryType, client_payload: list[dict[str, Any]], ) -> None: """Verify tracking of wireless clients.""" @@ -165,7 +169,7 @@ async def test_client_state_update( async def test_client_state_from_event_source( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - mock_websocket_message, + mock_websocket_message: WebsocketMessageMock, client_payload: list[dict[str, Any]], ) -> None: """Verify update state of client based on event source.""" @@ -247,8 +251,8 @@ async def test_client_state_from_event_source( async def test_tracked_device_state_change( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - config_entry_factory: Callable[[], ConfigEntry], - mock_websocket_message, + config_entry_factory: ConfigEntryFactoryType, + mock_websocket_message: WebsocketMessageMock, device_payload: list[dict[str, Any]], state: int, interval: int, @@ -289,7 +293,9 @@ async def test_tracked_device_state_change( @pytest.mark.usefixtures("config_entry_setup") @pytest.mark.usefixtures("mock_device_registry") async def test_remove_clients( - hass: HomeAssistant, mock_websocket_message, client_payload: list[dict[str, Any]] + hass: HomeAssistant, + mock_websocket_message: WebsocketMessageMock, + client_payload: list[dict[str, Any]], ) -> None: """Test the remove_items function with some clients.""" assert len(hass.states.async_entity_ids(TRACKER_DOMAIN)) == 2 @@ -309,7 +315,10 @@ async def test_remove_clients( @pytest.mark.parametrize("device_payload", [[SWITCH_1]]) @pytest.mark.usefixtures("config_entry_setup") @pytest.mark.usefixtures("mock_device_registry") -async def test_hub_state_change(hass: HomeAssistant, mock_websocket_state) -> None: +async def test_hub_state_change( + hass: HomeAssistant, + mock_websocket_state: WebsocketStateManager, +) -> None: """Verify entities state reflect on hub connection becoming unavailable.""" assert len(hass.states.async_entity_ids(TRACKER_DOMAIN)) == 2 assert hass.states.get("device_tracker.ws_client_1").state == STATE_NOT_HOME @@ -330,7 +339,7 @@ async def test_hub_state_change(hass: HomeAssistant, mock_websocket_state) -> No async def test_option_ssid_filter( hass: HomeAssistant, mock_websocket_message, - config_entry_factory: Callable[[], ConfigEntry], + config_entry_factory: ConfigEntryFactoryType, client_payload: list[dict[str, Any]], ) -> None: """Test the SSID filter works. @@ -434,7 +443,7 @@ async def test_option_ssid_filter( async def test_wireless_client_go_wired_issue( hass: HomeAssistant, mock_websocket_message, - config_entry_factory: Callable[[], ConfigEntry], + config_entry_factory: ConfigEntryFactoryType, client_payload: list[dict[str, Any]], ) -> None: """Test the solution to catch wireless device go wired UniFi issue. @@ -494,7 +503,7 @@ async def test_wireless_client_go_wired_issue( async def test_option_ignore_wired_bug( hass: HomeAssistant, mock_websocket_message, - config_entry_factory: Callable[[], ConfigEntry], + config_entry_factory: ConfigEntryFactoryType, client_payload: list[dict[str, Any]], ) -> None: """Test option to ignore wired bug.""" @@ -571,8 +580,8 @@ async def test_option_ignore_wired_bug( async def test_restoring_client( hass: HomeAssistant, entity_registry: er.EntityRegistry, - config_entry: ConfigEntry, - config_entry_factory: Callable[[], ConfigEntry], + config_entry: MockConfigEntry, + config_entry_factory: ConfigEntryFactoryType, client_payload: list[dict[str, Any]], clients_all_payload: list[dict[str, Any]], ) -> None: @@ -645,7 +654,7 @@ async def test_restoring_client( @pytest.mark.usefixtures("mock_device_registry") async def test_config_entry_options_track( hass: HomeAssistant, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, config_entry_options: MappingProxyType[str, Any], counts: tuple[int], expected: tuple[tuple[bool | None, ...], ...], diff --git a/tests/components/unifi/test_diagnostics.py b/tests/components/unifi/test_diagnostics.py index 3963de2deb3..80359a9c75c 100644 --- a/tests/components/unifi/test_diagnostics.py +++ b/tests/components/unifi/test_diagnostics.py @@ -9,9 +9,9 @@ from homeassistant.components.unifi.const import ( CONF_ALLOW_UPTIME_SENSORS, CONF_BLOCK_CLIENT, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant +from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator @@ -122,7 +122,7 @@ DPI_GROUP_DATA = [ async def test_entry_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" diff --git a/tests/components/unifi/test_hub.py b/tests/components/unifi/test_hub.py index 0d75a83c5f5..af134c7449b 100644 --- a/tests/components/unifi/test_hub.py +++ b/tests/components/unifi/test_hub.py @@ -1,6 +1,5 @@ """Test UniFi Network.""" -from collections.abc import Callable from http import HTTPStatus from types import MappingProxyType from typing import Any @@ -12,18 +11,21 @@ import pytest from homeassistant.components.unifi.const import DOMAIN as UNIFI_DOMAIN from homeassistant.components.unifi.errors import AuthenticationRequired, CannotConnect from homeassistant.components.unifi.hub import get_unifi_api -from homeassistant.config_entries import ConfigEntry, ConfigEntryState +from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr import homeassistant.util.dt as dt_util +from .conftest import ConfigEntryFactoryType, WebsocketStateManager + +from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker async def test_hub_setup( device_registry: dr.DeviceRegistry, - config_entry_factory: Callable[[], ConfigEntry], + config_entry_factory: ConfigEntryFactoryType, ) -> None: """Successful setup.""" with patch( @@ -54,7 +56,7 @@ async def test_hub_setup( async def test_reset_after_successful_setup( - hass: HomeAssistant, config_entry_setup: ConfigEntry + hass: HomeAssistant, config_entry_setup: MockConfigEntry ) -> None: """Calling reset when the entry has been setup.""" assert config_entry_setup.state is ConfigEntryState.LOADED @@ -64,7 +66,7 @@ async def test_reset_after_successful_setup( async def test_reset_fails( - hass: HomeAssistant, config_entry_setup: ConfigEntry + hass: HomeAssistant, config_entry_setup: MockConfigEntry ) -> None: """Calling reset when the entry has been setup can return false.""" assert config_entry_setup.state is ConfigEntryState.LOADED @@ -80,8 +82,8 @@ async def test_reset_fails( @pytest.mark.usefixtures("mock_device_registry") async def test_connection_state_signalling( hass: HomeAssistant, - mock_websocket_state, - config_entry_factory: Callable[[], ConfigEntry], + config_entry_factory: ConfigEntryFactoryType, + mock_websocket_state: WebsocketStateManager, client_payload: list[dict[str, Any]], ) -> None: """Verify connection statesignalling and connection state are working.""" @@ -110,8 +112,8 @@ async def test_connection_state_signalling( async def test_reconnect_mechanism( aioclient_mock: AiohttpClientMocker, - mock_websocket_state, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, + mock_websocket_state: WebsocketStateManager, ) -> None: """Verify reconnect prints only on first reconnection try.""" aioclient_mock.clear_requests() @@ -140,7 +142,10 @@ async def test_reconnect_mechanism( ], ) @pytest.mark.usefixtures("config_entry_setup") -async def test_reconnect_mechanism_exceptions(mock_websocket_state, exception) -> None: +async def test_reconnect_mechanism_exceptions( + mock_websocket_state: WebsocketStateManager, + exception: Exception, +) -> None: """Verify async_reconnect calls expected methods.""" with ( patch("aiounifi.Controller.login", side_effect=exception), @@ -170,8 +175,8 @@ async def test_reconnect_mechanism_exceptions(mock_websocket_state, exception) - ) async def test_get_unifi_api_fails_to_connect( hass: HomeAssistant, - side_effect, - raised_exception, + side_effect: Exception, + raised_exception: Exception, config_entry_data: MappingProxyType[str, Any], ) -> None: """Check that get_unifi_api can handle UniFi Network being unavailable.""" diff --git a/tests/components/unifi/test_image.py b/tests/components/unifi/test_image.py index 6733845c52f..dc37d7cb8b7 100644 --- a/tests/components/unifi/test_image.py +++ b/tests/components/unifi/test_image.py @@ -18,6 +18,12 @@ from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryDisabler from homeassistant.util import dt as dt_util +from .conftest import ( + ConfigEntryFactoryType, + WebsocketMessageMock, + WebsocketStateManager, +) + from tests.common import async_fire_time_changed, snapshot_platform from tests.typing import ClientSessionGenerator @@ -82,7 +88,7 @@ WLAN = { async def test_entity_and_device_data( hass: HomeAssistant, entity_registry: er.EntityRegistry, - config_entry_factory, + config_entry_factory: ConfigEntryFactoryType, site_payload: dict[str, Any], snapshot: SnapshotAssertion, ) -> None: @@ -102,7 +108,7 @@ async def test_wlan_qr_code( entity_registry: er.EntityRegistry, hass_client: ClientSessionGenerator, snapshot: SnapshotAssertion, - mock_websocket_message, + mock_websocket_message: WebsocketMessageMock, ) -> None: """Test the update_clients function when no clients are found.""" assert len(hass.states.async_entity_ids(IMAGE_DOMAIN)) == 0 @@ -151,7 +157,9 @@ async def test_wlan_qr_code( @pytest.mark.parametrize("wlan_payload", [[WLAN]]) @pytest.mark.usefixtures("config_entry_setup") @pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_hub_state_change(hass: HomeAssistant, mock_websocket_state) -> None: +async def test_hub_state_change( + hass: HomeAssistant, mock_websocket_state: WebsocketStateManager +) -> None: """Verify entities state reflect on hub becoming unavailable.""" assert hass.states.get("image.ssid_1_qr_code").state != STATE_UNAVAILABLE @@ -167,7 +175,9 @@ async def test_hub_state_change(hass: HomeAssistant, mock_websocket_state) -> No @pytest.mark.parametrize("wlan_payload", [[WLAN]]) @pytest.mark.usefixtures("config_entry_setup") @pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_source_availability(hass: HomeAssistant, mock_websocket_message) -> None: +async def test_source_availability( + hass: HomeAssistant, mock_websocket_message: WebsocketMessageMock +) -> None: """Verify entities state reflect on source becoming unavailable.""" assert hass.states.get("image.ssid_1_qr_code").state != STATE_UNAVAILABLE diff --git a/tests/components/unifi/test_init.py b/tests/components/unifi/test_init.py index de08ba2c6d7..68f80555cd6 100644 --- a/tests/components/unifi/test_init.py +++ b/tests/components/unifi/test_init.py @@ -1,6 +1,5 @@ """Test UniFi Network integration setup process.""" -from collections.abc import Callable from typing import Any from unittest.mock import patch @@ -15,19 +14,23 @@ from homeassistant.components.unifi.const import ( CONF_TRACK_DEVICES, ) from homeassistant.components.unifi.errors import AuthenticationRequired, CannotConnect -from homeassistant.config_entries import ConfigEntry, ConfigEntryState +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component -from .conftest import DEFAULT_CONFIG_ENTRY_ID +from .conftest import ( + DEFAULT_CONFIG_ENTRY_ID, + ConfigEntryFactoryType, + WebsocketMessageMock, +) from tests.common import flush_store from tests.typing import WebSocketGenerator async def test_setup_entry_fails_config_entry_not_ready( - config_entry_factory: Callable[[], ConfigEntry], + hass: HomeAssistant, config_entry_factory: ConfigEntryFactoryType ) -> None: """Failed authentication trigger a reauthentication flow.""" with patch( @@ -40,7 +43,7 @@ async def test_setup_entry_fails_config_entry_not_ready( async def test_setup_entry_fails_trigger_reauth_flow( - hass: HomeAssistant, config_entry_factory: Callable[[], ConfigEntry] + hass: HomeAssistant, config_entry_factory: ConfigEntryFactoryType ) -> None: """Failed authentication trigger a reauthentication flow.""" with ( @@ -78,7 +81,7 @@ async def test_setup_entry_fails_trigger_reauth_flow( async def test_wireless_clients( hass: HomeAssistant, hass_storage: dict[str, Any], - config_entry_factory: Callable[[], ConfigEntry], + config_entry_factory: ConfigEntryFactoryType, ) -> None: """Verify wireless clients class.""" hass_storage[unifi.STORAGE_KEY] = { @@ -163,10 +166,10 @@ async def test_wireless_clients( async def test_remove_config_entry_device( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - config_entry_factory: Callable[[], ConfigEntry], + config_entry_factory: ConfigEntryFactoryType, client_payload: list[dict[str, Any]], device_payload: list[dict[str, Any]], - mock_websocket_message, + mock_websocket_message: WebsocketMessageMock, hass_ws_client: WebSocketGenerator, ) -> None: """Verify removing a device manually.""" diff --git a/tests/components/unifi/test_sensor.py b/tests/components/unifi/test_sensor.py index 779df6660f0..5af4b297847 100644 --- a/tests/components/unifi/test_sensor.py +++ b/tests/components/unifi/test_sensor.py @@ -1,6 +1,5 @@ """UniFi Network sensor platform tests.""" -from collections.abc import Callable from copy import deepcopy from datetime import datetime, timedelta from types import MappingProxyType @@ -29,7 +28,7 @@ from homeassistant.components.unifi.const import ( DEFAULT_DETECTION_TIME, DEVICE_STATES, ) -from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY, ConfigEntry +from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_FRIENDLY_NAME, @@ -42,7 +41,13 @@ from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryDisabler import homeassistant.util.dt as dt_util -from tests.common import async_fire_time_changed +from .conftest import ( + ConfigEntryFactoryType, + WebsocketMessageMock, + WebsocketStateManager, +) + +from tests.common import MockConfigEntry, async_fire_time_changed DEVICE_1 = { "board_rev": 2, @@ -362,9 +367,9 @@ async def test_no_clients(hass: HomeAssistant) -> None: ) async def test_bandwidth_sensors( hass: HomeAssistant, - mock_websocket_message, + mock_websocket_message: WebsocketMessageMock, config_entry_options: MappingProxyType[str, Any], - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, client_payload: list[dict[str, Any]], ) -> None: """Verify that bandwidth sensors are working as expected.""" @@ -491,7 +496,9 @@ async def test_bandwidth_sensors( @pytest.mark.usefixtures("config_entry_setup") @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_remove_sensors( - hass: HomeAssistant, mock_websocket_message, client_payload: list[dict[str, Any]] + hass: HomeAssistant, + mock_websocket_message: WebsocketMessageMock, + client_payload: list[dict[str, Any]], ) -> None: """Verify removing of clients work as expected.""" assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 6 @@ -520,8 +527,8 @@ async def test_remove_sensors( async def test_poe_port_switches( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_websocket_message, - mock_websocket_state, + mock_websocket_message: WebsocketMessageMock, + mock_websocket_state: WebsocketStateManager, ) -> None: """Test the update_items function with some clients.""" assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 @@ -594,9 +601,9 @@ async def test_poe_port_switches( async def test_wlan_client_sensors( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_websocket_message, - mock_websocket_state, - config_entry_factory: Callable[[], ConfigEntry], + config_entry_factory: ConfigEntryFactoryType, + mock_websocket_message: WebsocketMessageMock, + mock_websocket_state: WebsocketStateManager, client_payload: list[dict[str, Any]], ) -> None: """Verify that WLAN client sensors are working as expected.""" @@ -736,7 +743,7 @@ async def test_wlan_client_sensors( async def test_outlet_power_readings( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_websocket_message, + mock_websocket_message: WebsocketMessageMock, device_payload: list[dict[str, Any]], entity_id: str, expected_unique_id: str, @@ -798,7 +805,7 @@ async def test_outlet_power_readings( async def test_device_temperature( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_websocket_message, + mock_websocket_message: WebsocketMessageMock, device_payload: list[dict[str, Any]], ) -> None: """Verify that temperature sensors are working as expected.""" @@ -847,7 +854,7 @@ async def test_device_temperature( async def test_device_state( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_websocket_message, + mock_websocket_message: WebsocketMessageMock, device_payload: list[dict[str, Any]], ) -> None: """Verify that state sensors are working as expected.""" @@ -884,7 +891,7 @@ async def test_device_state( async def test_device_system_stats( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_websocket_message, + mock_websocket_message: WebsocketMessageMock, device_payload: list[dict[str, Any]], ) -> None: """Verify that device stats sensors are working as expected.""" @@ -979,9 +986,9 @@ async def test_device_system_stats( async def test_bandwidth_port_sensors( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_websocket_message, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, config_entry_options: MappingProxyType[str, Any], + mock_websocket_message: WebsocketMessageMock, device_payload: list[dict[str, Any]], ) -> None: """Verify that port bandwidth sensors are working as expected.""" @@ -1096,9 +1103,9 @@ async def test_bandwidth_port_sensors( async def test_device_client_sensors( hass: HomeAssistant, entity_registry: er.EntityRegistry, - config_entry_factory, - mock_websocket_message, - client_payload, + config_entry_factory: ConfigEntryFactoryType, + mock_websocket_message: WebsocketMessageMock, + client_payload: dict[str, Any], ) -> None: """Verify that WLAN client sensors are working as expected.""" client_payload += [ @@ -1246,8 +1253,8 @@ async def test_sensor_sources( async def _test_uptime_entity( hass: HomeAssistant, freezer: FrozenDateTimeFactory, - mock_websocket_message, - config_entry_factory: Callable[[], ConfigEntry], + mock_websocket_message: WebsocketMessageMock, + config_entry_factory: ConfigEntryFactoryType, payload: dict[str, Any], entity_id: str, message_key: MessageKey, @@ -1326,9 +1333,9 @@ async def test_client_uptime( hass: HomeAssistant, entity_registry: er.EntityRegistry, freezer: FrozenDateTimeFactory, - mock_websocket_message, config_entry_options: MappingProxyType[str, Any], - config_entry_factory: Callable[[], ConfigEntry], + config_entry_factory: ConfigEntryFactoryType, + mock_websocket_message: WebsocketMessageMock, client_payload: list[dict[str, Any]], initial_uptime, event_uptime, @@ -1401,8 +1408,8 @@ async def test_device_uptime( hass: HomeAssistant, entity_registry: er.EntityRegistry, freezer: FrozenDateTimeFactory, - mock_websocket_message, - config_entry_factory: Callable[[], ConfigEntry], + config_entry_factory: ConfigEntryFactoryType, + mock_websocket_message: WebsocketMessageMock, device_payload: list[dict[str, Any]], ) -> None: """Verify that device uptime sensors are working as expected.""" @@ -1502,7 +1509,7 @@ async def test_device_uptime( async def test_wan_monitor_latency( hass: HomeAssistant, entity_registry: er.EntityRegistry, - mock_websocket_message, + mock_websocket_message: WebsocketMessageMock, device_payload: list[dict[str, Any]], entity_id: str, state: str, diff --git a/tests/components/unifi/test_services.py b/tests/components/unifi/test_services.py index bf7058e28ff..a7968a92e22 100644 --- a/tests/components/unifi/test_services.py +++ b/tests/components/unifi/test_services.py @@ -10,11 +10,11 @@ from homeassistant.components.unifi.services import ( SERVICE_RECONNECT_CLIENT, SERVICE_REMOVE_CLIENTS, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_DEVICE_ID, CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr +from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker @@ -25,7 +25,7 @@ async def test_reconnect_client( hass: HomeAssistant, device_registry: dr.DeviceRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, client_payload: list[dict[str, Any]], ) -> None: """Verify call to reconnect client is performed as expected.""" @@ -69,7 +69,7 @@ async def test_reconnect_device_without_mac( hass: HomeAssistant, device_registry: dr.DeviceRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, ) -> None: """Verify no call is made if device does not have a known mac.""" aioclient_mock.clear_requests() @@ -95,7 +95,7 @@ async def test_reconnect_client_hub_unavailable( hass: HomeAssistant, device_registry: dr.DeviceRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, client_payload: list[dict[str, Any]], ) -> None: """Verify no call is made if hub is unavailable.""" @@ -127,7 +127,7 @@ async def test_reconnect_client_unknown_mac( hass: HomeAssistant, device_registry: dr.DeviceRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, ) -> None: """Verify no call is made if trying to reconnect a mac unknown to hub.""" aioclient_mock.clear_requests() @@ -152,7 +152,7 @@ async def test_reconnect_wired_client( hass: HomeAssistant, device_registry: dr.DeviceRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, client_payload: list[dict[str, Any]], ) -> None: """Verify no call is made if client is wired.""" @@ -204,7 +204,7 @@ async def test_reconnect_wired_client( async def test_remove_clients( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, ) -> None: """Verify removing different variations of clients work.""" aioclient_mock.clear_requests() @@ -288,7 +288,7 @@ async def test_services_handle_unloaded_config_entry( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, device_registry: dr.DeviceRegistry, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, clients_all_payload: dict[str, Any], ) -> None: """Verify no call is made if config entry is unloaded.""" diff --git a/tests/components/unifi/test_switch.py b/tests/components/unifi/test_switch.py index daf64301c8e..6d85437a244 100644 --- a/tests/components/unifi/test_switch.py +++ b/tests/components/unifi/test_switch.py @@ -1,6 +1,5 @@ """UniFi Network switch platform tests.""" -from collections.abc import Callable from copy import deepcopy from datetime import timedelta from typing import Any @@ -22,7 +21,7 @@ from homeassistant.components.unifi.const import ( CONF_TRACK_DEVICES, DOMAIN as UNIFI_DOMAIN, ) -from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY, ConfigEntry +from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, @@ -37,9 +36,14 @@ from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryDisabler from homeassistant.util import dt as dt_util -from .conftest import CONTROLLER_HOST +from .conftest import ( + CONTROLLER_HOST, + ConfigEntryFactoryType, + WebsocketMessageMock, + WebsocketStateManager, +) -from tests.common import async_fire_time_changed +from tests.common import MockConfigEntry, async_fire_time_changed from tests.test_util.aiohttp import AiohttpClientMocker CLIENT_1 = { @@ -841,12 +845,11 @@ async def test_not_admin(hass: HomeAssistant) -> None: @pytest.mark.parametrize("clients_all_payload", [[BLOCKED, UNBLOCKED, CLIENT_1]]) @pytest.mark.parametrize("dpi_app_payload", [DPI_APPS]) @pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) -@pytest.mark.usefixtures("config_entry_setup") async def test_switches( hass: HomeAssistant, entity_registry: er.EntityRegistry, aioclient_mock: AiohttpClientMocker, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, ) -> None: """Test the update_items function with some clients.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 3 @@ -930,7 +933,9 @@ async def test_switches( @pytest.mark.parametrize("dpi_app_payload", [DPI_APPS]) @pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) @pytest.mark.usefixtures("config_entry_setup") -async def test_remove_switches(hass: HomeAssistant, mock_websocket_message) -> None: +async def test_remove_switches( + hass: HomeAssistant, mock_websocket_message: WebsocketMessageMock +) -> None: """Test the update_items function with some clients.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 2 @@ -967,8 +972,8 @@ async def test_remove_switches(hass: HomeAssistant, mock_websocket_message) -> N async def test_block_switches( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - mock_websocket_message, - config_entry_setup: ConfigEntry, + mock_websocket_message: WebsocketMessageMock, + config_entry_setup: MockConfigEntry, ) -> None: """Test the update_items function with some clients.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 2 @@ -1027,7 +1032,9 @@ async def test_block_switches( @pytest.mark.parametrize("dpi_app_payload", [DPI_APPS]) @pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) @pytest.mark.usefixtures("config_entry_setup") -async def test_dpi_switches(hass: HomeAssistant, mock_websocket_message) -> None: +async def test_dpi_switches( + hass: HomeAssistant, mock_websocket_message: WebsocketMessageMock +) -> None: """Test the update_items function with some clients.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 @@ -1053,7 +1060,7 @@ async def test_dpi_switches(hass: HomeAssistant, mock_websocket_message) -> None @pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) @pytest.mark.usefixtures("config_entry_setup") async def test_dpi_switches_add_second_app( - hass: HomeAssistant, mock_websocket_message + hass: HomeAssistant, mock_websocket_message: WebsocketMessageMock ) -> None: """Test the update_items function with some clients.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 @@ -1104,12 +1111,10 @@ async def test_dpi_switches_add_second_app( @pytest.mark.parametrize(("traffic_rule_payload"), [([TRAFFIC_RULE])]) -@pytest.mark.usefixtures("config_entry_setup") async def test_traffic_rules( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - mock_websocket_message, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, traffic_rule_payload: list[dict[str, Any]], ) -> None: """Test control of UniFi traffic rules.""" @@ -1172,8 +1177,8 @@ async def test_traffic_rules( async def test_outlet_switches( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - mock_websocket_message, - config_entry_setup: ConfigEntry, + mock_websocket_message: WebsocketMessageMock, + config_entry_setup: MockConfigEntry, device_payload: list[dict[str, Any]], entity_id: str, outlet_index: int, @@ -1268,7 +1273,7 @@ async def test_outlet_switches( ) @pytest.mark.usefixtures("config_entry_setup") async def test_new_client_discovered_on_block_control( - hass: HomeAssistant, mock_websocket_message + hass: HomeAssistant, mock_websocket_message: WebsocketMessageMock ) -> None: """Test if 2nd update has a new client.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 0 @@ -1286,7 +1291,9 @@ async def test_new_client_discovered_on_block_control( ) @pytest.mark.parametrize("clients_all_payload", [[BLOCKED, UNBLOCKED]]) async def test_option_block_clients( - hass: HomeAssistant, config_entry_setup: ConfigEntry, clients_all_payload + hass: HomeAssistant, + config_entry_setup: MockConfigEntry, + clients_all_payload: list[dict[str, Any]], ) -> None: """Test the changes to option reflects accordingly.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 @@ -1334,7 +1341,7 @@ async def test_option_block_clients( @pytest.mark.parametrize("dpi_app_payload", [DPI_APPS]) @pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) async def test_option_remove_switches( - hass: HomeAssistant, config_entry_setup: ConfigEntry + hass: HomeAssistant, config_entry_setup: MockConfigEntry ) -> None: """Test removal of DPI switch when options updated.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 @@ -1352,8 +1359,8 @@ async def test_poe_port_switches( hass: HomeAssistant, entity_registry: er.EntityRegistry, aioclient_mock: AiohttpClientMocker, - mock_websocket_message, - config_entry_setup: ConfigEntry, + mock_websocket_message: WebsocketMessageMock, + config_entry_setup: MockConfigEntry, device_payload: list[dict[str, Any]], ) -> None: """Test PoE port entities work.""" @@ -1451,8 +1458,8 @@ async def test_wlan_switches( hass: HomeAssistant, entity_registry: er.EntityRegistry, aioclient_mock: AiohttpClientMocker, - mock_websocket_message, - config_entry_setup: ConfigEntry, + mock_websocket_message: WebsocketMessageMock, + config_entry_setup: MockConfigEntry, wlan_payload: list[dict[str, Any]], ) -> None: """Test control of UniFi WLAN availability.""" @@ -1507,8 +1514,8 @@ async def test_port_forwarding_switches( hass: HomeAssistant, entity_registry: er.EntityRegistry, aioclient_mock: AiohttpClientMocker, - mock_websocket_message, - config_entry_setup: ConfigEntry, + mock_websocket_message: WebsocketMessageMock, + config_entry_setup: MockConfigEntry, port_forward_payload: list[dict[str, Any]], ) -> None: """Test control of UniFi port forwarding.""" @@ -1606,9 +1613,9 @@ async def test_port_forwarding_switches( async def test_updating_unique_id( hass: HomeAssistant, entity_registry: er.EntityRegistry, - config_entry_factory: Callable[[], ConfigEntry], - config_entry: ConfigEntry, - device_payload, + config_entry_factory: ConfigEntryFactoryType, + config_entry: MockConfigEntry, + device_payload: list[dict[str, Any]], ) -> None: """Verify outlet control and poe control unique ID update works.""" entity_registry.async_get_or_create( @@ -1644,7 +1651,9 @@ async def test_updating_unique_id( @pytest.mark.parametrize("wlan_payload", [[WLAN]]) @pytest.mark.usefixtures("config_entry_setup") @pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_hub_state_change(hass: HomeAssistant, mock_websocket_state) -> None: +async def test_hub_state_change( + hass: HomeAssistant, mock_websocket_state: WebsocketStateManager +) -> None: """Verify entities state reflect on hub connection becoming unavailable.""" entity_ids = ( "switch.block_client_2", diff --git a/tests/components/unifi/test_update.py b/tests/components/unifi/test_update.py index a8fe9231159..7bf4b9aec9d 100644 --- a/tests/components/unifi/test_update.py +++ b/tests/components/unifi/test_update.py @@ -16,7 +16,6 @@ from homeassistant.components.update import ( DOMAIN as UPDATE_DOMAIN, SERVICE_INSTALL, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( ATTR_ENTITY_ID, CONF_HOST, @@ -28,7 +27,13 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from tests.common import snapshot_platform +from .conftest import ( + ConfigEntryFactoryType, + WebsocketMessageMock, + WebsocketStateManager, +) + +from tests.common import MockConfigEntry, snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker # Device with new firmware available @@ -74,7 +79,7 @@ DEVICE_2 = { async def test_entity_and_device_data( hass: HomeAssistant, entity_registry: er.EntityRegistry, - config_entry_factory, + config_entry_factory: ConfigEntryFactoryType, snapshot: SnapshotAssertion, ) -> None: """Validate entity and device data with and without admin rights.""" @@ -85,7 +90,9 @@ async def test_entity_and_device_data( @pytest.mark.parametrize("device_payload", [[DEVICE_1]]) @pytest.mark.usefixtures("config_entry_setup") -async def test_device_updates(hass: HomeAssistant, mock_websocket_message) -> None: +async def test_device_updates( + hass: HomeAssistant, mock_websocket_message: WebsocketMessageMock +) -> None: """Test the update_items function with some devices.""" device_1_state = hass.states.get("update.device_1") assert device_1_state.state == STATE_ON @@ -122,7 +129,7 @@ async def test_device_updates(hass: HomeAssistant, mock_websocket_message) -> No async def test_install( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, - config_entry_setup: ConfigEntry, + config_entry_setup: MockConfigEntry, ) -> None: """Test the device update install call.""" device_state = hass.states.get("update.device_1") @@ -154,7 +161,9 @@ async def test_install( @pytest.mark.parametrize("device_payload", [[DEVICE_1]]) @pytest.mark.usefixtures("config_entry_setup") -async def test_hub_state_change(hass: HomeAssistant, mock_websocket_state) -> None: +async def test_hub_state_change( + hass: HomeAssistant, mock_websocket_state: WebsocketStateManager +) -> None: """Verify entities state reflect on hub becoming unavailable.""" assert hass.states.get("update.device_1").state == STATE_ON From 94c0b9fc069b02bab31dff04e71ae0457e03f336 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 30 Jul 2024 19:39:53 +0200 Subject: [PATCH 0156/1635] Bump pyhomeworks to 1.0.0 (#122867) --- homeassistant/components/homeworks/__init__.py | 14 ++++++++------ homeassistant/components/homeworks/config_flow.py | 7 ++++--- homeassistant/components/homeworks/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/homeworks/test_config_flow.py | 8 ++++++-- tests/components/homeworks/test_init.py | 7 +++++-- 7 files changed, 26 insertions(+), 16 deletions(-) diff --git a/homeassistant/components/homeworks/__init__.py b/homeassistant/components/homeworks/__init__.py index f1a95102c3b..cf39bc72ec6 100644 --- a/homeassistant/components/homeworks/__init__.py +++ b/homeassistant/components/homeworks/__init__.py @@ -8,6 +8,7 @@ from dataclasses import dataclass import logging from typing import Any +from pyhomeworks import exceptions as hw_exceptions from pyhomeworks.pyhomeworks import HW_BUTTON_PRESSED, HW_BUTTON_RELEASED, Homeworks import voluptuous as vol @@ -141,15 +142,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: dispatcher_send(hass, signal, msg_type, values) config = entry.options + controller = Homeworks(config[CONF_HOST], config[CONF_PORT], hw_callback) try: - controller = await hass.async_add_executor_job( - Homeworks, config[CONF_HOST], config[CONF_PORT], hw_callback - ) - except (ConnectionError, OSError) as err: + await hass.async_add_executor_job(controller.connect) + except hw_exceptions.HomeworksException as err: + _LOGGER.debug("Failed to connect: %s", err, exc_info=True) raise ConfigEntryNotReady from err + controller.start() def cleanup(event: Event) -> None: - controller.close() + controller.stop() entry.async_on_unload(hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, cleanup)) @@ -176,7 +178,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: for keypad in data.keypads.values(): keypad.unsubscribe() - await hass.async_add_executor_job(data.controller.close) + await hass.async_add_executor_job(data.controller.stop) return unload_ok diff --git a/homeassistant/components/homeworks/config_flow.py b/homeassistant/components/homeworks/config_flow.py index 81b31e4644e..ec381c3331f 100644 --- a/homeassistant/components/homeworks/config_flow.py +++ b/homeassistant/components/homeworks/config_flow.py @@ -6,6 +6,7 @@ from functools import partial import logging from typing import Any +from pyhomeworks import exceptions as hw_exceptions from pyhomeworks.pyhomeworks import Homeworks import voluptuous as vol @@ -128,18 +129,18 @@ async def _try_connection(user_input: dict[str, Any]) -> None: "Trying to connect to %s:%s", user_input[CONF_HOST], user_input[CONF_PORT] ) controller = Homeworks(host, port, lambda msg_types, values: None) + controller.connect() controller.close() - controller.join() hass = async_get_hass() try: await hass.async_add_executor_job( _try_connect, user_input[CONF_HOST], user_input[CONF_PORT] ) - except ConnectionError as err: + except hw_exceptions.HomeworksConnectionFailed as err: raise SchemaFlowError("connection_error") from err except Exception as err: - _LOGGER.exception("Caught unexpected exception") + _LOGGER.exception("Caught unexpected exception %s") raise SchemaFlowError("unknown_error") from err diff --git a/homeassistant/components/homeworks/manifest.json b/homeassistant/components/homeworks/manifest.json index c2520b910d9..9b447ef4aea 100644 --- a/homeassistant/components/homeworks/manifest.json +++ b/homeassistant/components/homeworks/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/homeworks", "iot_class": "local_push", "loggers": ["pyhomeworks"], - "requirements": ["pyhomeworks==0.0.6"] + "requirements": ["pyhomeworks==1.0.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 4d74e059a59..e0d502f3000 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1903,7 +1903,7 @@ pyhiveapi==0.5.16 pyhomematic==0.1.77 # homeassistant.components.homeworks -pyhomeworks==0.0.6 +pyhomeworks==1.0.0 # homeassistant.components.ialarm pyialarm==2.2.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 86fdf2da7f9..a1037d5109f 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1517,7 +1517,7 @@ pyhiveapi==0.5.16 pyhomematic==0.1.77 # homeassistant.components.homeworks -pyhomeworks==0.0.6 +pyhomeworks==1.0.0 # homeassistant.components.ialarm pyialarm==2.2.0 diff --git a/tests/components/homeworks/test_config_flow.py b/tests/components/homeworks/test_config_flow.py index 8f5334b21f9..3e359caf7f2 100644 --- a/tests/components/homeworks/test_config_flow.py +++ b/tests/components/homeworks/test_config_flow.py @@ -2,6 +2,7 @@ from unittest.mock import ANY, MagicMock +from pyhomeworks import exceptions as hw_exceptions import pytest from pytest_unordered import unordered @@ -55,7 +56,7 @@ async def test_user_flow( } mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY) mock_controller.close.assert_called_once_with() - mock_controller.join.assert_called_once_with() + mock_controller.join.assert_not_called() async def test_user_flow_already_exists( @@ -96,7 +97,10 @@ async def test_user_flow_already_exists( @pytest.mark.parametrize( ("side_effect", "error"), - [(ConnectionError, "connection_error"), (Exception, "unknown_error")], + [ + (hw_exceptions.HomeworksConnectionFailed, "connection_error"), + (Exception, "unknown_error"), + ], ) async def test_user_flow_cannot_connect( hass: HomeAssistant, diff --git a/tests/components/homeworks/test_init.py b/tests/components/homeworks/test_init.py index af43fcfba10..2363e0f157d 100644 --- a/tests/components/homeworks/test_init.py +++ b/tests/components/homeworks/test_init.py @@ -2,6 +2,7 @@ from unittest.mock import ANY, MagicMock +from pyhomeworks import exceptions as hw_exceptions from pyhomeworks.pyhomeworks import HW_BUTTON_PRESSED, HW_BUTTON_RELEASED import pytest @@ -41,7 +42,9 @@ async def test_config_entry_not_ready( mock_homeworks: MagicMock, ) -> None: """Test the Homeworks configuration entry not ready.""" - mock_homeworks.side_effect = ConnectionError + mock_homeworks.return_value.connect.side_effect = ( + hw_exceptions.HomeworksConnectionFailed + ) mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) @@ -187,4 +190,4 @@ async def test_cleanup_on_ha_shutdown( hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) await hass.async_block_till_done() - mock_controller.close.assert_called_once_with() + mock_controller.stop.assert_called_once_with() From 022e1b0c02a60c90628f2d4ad11edb0fad445fb4 Mon Sep 17 00:00:00 2001 From: Bill Flood Date: Tue, 30 Jul 2024 12:07:12 -0700 Subject: [PATCH 0157/1635] Add other medium types to Mopeka sensor (#122705) Co-authored-by: J. Nick Koston --- homeassistant/components/mopeka/__init__.py | 15 +++- .../components/mopeka/config_flow.py | 86 +++++++++++++++++-- homeassistant/components/mopeka/const.py | 8 ++ homeassistant/components/mopeka/strings.json | 18 +++- tests/components/mopeka/test_config_flow.py | 52 +++++++++-- 5 files changed, 163 insertions(+), 16 deletions(-) diff --git a/homeassistant/components/mopeka/__init__.py b/homeassistant/components/mopeka/__init__.py index 2538ec3d810..17a87efd6e6 100644 --- a/homeassistant/components/mopeka/__init__.py +++ b/homeassistant/components/mopeka/__init__.py @@ -4,7 +4,7 @@ from __future__ import annotations import logging -from mopeka_iot_ble import MopekaIOTBluetoothDeviceData +from mopeka_iot_ble import MediumType, MopekaIOTBluetoothDeviceData from homeassistant.components.bluetooth import BluetoothScanningMode from homeassistant.components.bluetooth.passive_update_processor import ( @@ -14,6 +14,8 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from .const import CONF_MEDIUM_TYPE + PLATFORMS: list[Platform] = [Platform.SENSOR] _LOGGER = logging.getLogger(__name__) @@ -26,7 +28,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: MopekaConfigEntry) -> bo """Set up Mopeka BLE device from a config entry.""" address = entry.unique_id assert address is not None - data = MopekaIOTBluetoothDeviceData() + + # Default sensors configured prior to the intorudction of MediumType + medium_type_str = entry.data.get(CONF_MEDIUM_TYPE, MediumType.PROPANE.value) + data = MopekaIOTBluetoothDeviceData(MediumType(medium_type_str)) coordinator = entry.runtime_data = PassiveBluetoothProcessorCoordinator( hass, _LOGGER, @@ -37,9 +42,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: MopekaConfigEntry) -> bo await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) # only start after all platforms have had a chance to subscribe entry.async_on_unload(coordinator.async_start()) + entry.async_on_unload(entry.add_update_listener(update_listener)) return True +async def update_listener(hass: HomeAssistant, entry: MopekaConfigEntry) -> None: + """Handle options update.""" + await hass.config_entries.async_reload(entry.entry_id) + + async def async_unload_entry(hass: HomeAssistant, entry: MopekaConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/mopeka/config_flow.py b/homeassistant/components/mopeka/config_flow.py index 1732157ce49..72e9386a47f 100644 --- a/homeassistant/components/mopeka/config_flow.py +++ b/homeassistant/components/mopeka/config_flow.py @@ -2,19 +2,43 @@ from __future__ import annotations +from enum import Enum from typing import Any from mopeka_iot_ble import MopekaIOTBluetoothDeviceData as DeviceData import voluptuous as vol +from homeassistant import config_entries from homeassistant.components.bluetooth import ( BluetoothServiceInfoBleak, async_discovered_service_info, ) from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_ADDRESS +from homeassistant.core import callback -from .const import DOMAIN +from .const import CONF_MEDIUM_TYPE, DEFAULT_MEDIUM_TYPE, DOMAIN, MediumType + + +def format_medium_type(medium_type: Enum) -> str: + """Format the medium type for human reading.""" + return medium_type.name.replace("_", " ").title() + + +MEDIUM_TYPES_BY_NAME = { + medium.value: format_medium_type(medium) for medium in MediumType +} + + +def async_generate_schema(medium_type: str | None = None) -> vol.Schema: + """Return the base schema with formatted medium types.""" + return vol.Schema( + { + vol.Required( + CONF_MEDIUM_TYPE, default=medium_type or DEFAULT_MEDIUM_TYPE + ): vol.In(MEDIUM_TYPES_BY_NAME) + } + ) class MopekaConfigFlow(ConfigFlow, domain=DOMAIN): @@ -28,6 +52,14 @@ class MopekaConfigFlow(ConfigFlow, domain=DOMAIN): self._discovered_device: DeviceData | None = None self._discovered_devices: dict[str, str] = {} + @callback + @staticmethod + def async_get_options_flow( + config_entry: config_entries.ConfigEntry, + ) -> MopekaOptionsFlow: + """Return the options flow for this handler.""" + return MopekaOptionsFlow(config_entry) + async def async_step_bluetooth( self, discovery_info: BluetoothServiceInfoBleak ) -> ConfigFlowResult: @@ -44,32 +76,39 @@ class MopekaConfigFlow(ConfigFlow, domain=DOMAIN): async def async_step_bluetooth_confirm( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: - """Confirm discovery.""" + """Confirm discovery and select medium type.""" assert self._discovered_device is not None device = self._discovered_device assert self._discovery_info is not None discovery_info = self._discovery_info title = device.title or device.get_device_name() or discovery_info.name if user_input is not None: - return self.async_create_entry(title=title, data={}) + self._discovered_devices[discovery_info.address] = title + return self.async_create_entry( + title=self._discovered_devices[discovery_info.address], + data={CONF_MEDIUM_TYPE: user_input[CONF_MEDIUM_TYPE]}, + ) self._set_confirm_only() placeholders = {"name": title} self.context["title_placeholders"] = placeholders return self.async_show_form( - step_id="bluetooth_confirm", description_placeholders=placeholders + step_id="bluetooth_confirm", + description_placeholders=placeholders, + data_schema=async_generate_schema(), ) async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: - """Handle the user step to pick discovered device.""" + """Handle the user step to pick discovered device and select medium type.""" if user_input is not None: address = user_input[CONF_ADDRESS] await self.async_set_unique_id(address, raise_on_progress=False) self._abort_if_unique_id_configured() return self.async_create_entry( - title=self._discovered_devices[address], data={} + title=self._discovered_devices[address], + data={CONF_MEDIUM_TYPE: user_input[CONF_MEDIUM_TYPE]}, ) current_addresses = self._async_current_ids() @@ -89,6 +128,39 @@ class MopekaConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="user", data_schema=vol.Schema( - {vol.Required(CONF_ADDRESS): vol.In(self._discovered_devices)} + { + vol.Required(CONF_ADDRESS): vol.In(self._discovered_devices), + **async_generate_schema().schema, + } + ), + ) + + +class MopekaOptionsFlow(config_entries.OptionsFlow): + """Handle options for the Mopeka component.""" + + def __init__(self, config_entry: config_entries.ConfigEntry) -> None: + """Initialize options flow.""" + self.config_entry = config_entry + + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle options flow.""" + if user_input is not None: + new_data = { + **self.config_entry.data, + CONF_MEDIUM_TYPE: user_input[CONF_MEDIUM_TYPE], + } + self.hass.config_entries.async_update_entry( + self.config_entry, data=new_data + ) + await self.hass.config_entries.async_reload(self.config_entry.entry_id) + return self.async_create_entry(title="", data={}) + + return self.async_show_form( + step_id="init", + data_schema=async_generate_schema( + self.config_entry.data.get(CONF_MEDIUM_TYPE) ), ) diff --git a/homeassistant/components/mopeka/const.py b/homeassistant/components/mopeka/const.py index 0d78146f5a8..e18828f2364 100644 --- a/homeassistant/components/mopeka/const.py +++ b/homeassistant/components/mopeka/const.py @@ -1,3 +1,11 @@ """Constants for the Mopeka integration.""" +from typing import Final + +from mopeka_iot_ble import MediumType + DOMAIN = "mopeka" + +CONF_MEDIUM_TYPE: Final = "medium_type" + +DEFAULT_MEDIUM_TYPE = MediumType.PROPANE.value diff --git a/homeassistant/components/mopeka/strings.json b/homeassistant/components/mopeka/strings.json index 16a80220a20..2455eea2f76 100644 --- a/homeassistant/components/mopeka/strings.json +++ b/homeassistant/components/mopeka/strings.json @@ -5,11 +5,15 @@ "user": { "description": "[%key:component::bluetooth::config::step::user::description%]", "data": { - "address": "[%key:common::config_flow::data::device%]" + "address": "[%key:common::config_flow::data::device%]", + "medium_type": "Medium Type" } }, "bluetooth_confirm": { - "description": "[%key:component::bluetooth::config::step::bluetooth_confirm::description%]" + "description": "[%key:component::bluetooth::config::step::bluetooth_confirm::description%]", + "data": { + "medium_type": "[%key:component::mopeka::config::step::user::data::medium_type%]" + } } }, "abort": { @@ -18,5 +22,15 @@ "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" } + }, + "options": { + "step": { + "init": { + "title": "Configure Mopeka", + "data": { + "medium_type": "[%key:component::mopeka::config::step::user::data::medium_type%]" + } + } + } } } diff --git a/tests/components/mopeka/test_config_flow.py b/tests/components/mopeka/test_config_flow.py index 826fe8db2aa..7a341052f22 100644 --- a/tests/components/mopeka/test_config_flow.py +++ b/tests/components/mopeka/test_config_flow.py @@ -2,8 +2,10 @@ from unittest.mock import patch +import voluptuous as vol + from homeassistant import config_entries -from homeassistant.components.mopeka.const import DOMAIN +from homeassistant.components.mopeka.const import CONF_MEDIUM_TYPE, DOMAIN, MediumType from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -21,13 +23,14 @@ async def test_async_step_bluetooth_valid_device(hass: HomeAssistant) -> None: ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "bluetooth_confirm" + with patch("homeassistant.components.mopeka.async_setup_entry", return_value=True): result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={} + result["flow_id"], user_input={CONF_MEDIUM_TYPE: MediumType.PROPANE.value} ) assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "Pro Plus EEFF" - assert result2["data"] == {} + assert result2["data"] == {CONF_MEDIUM_TYPE: MediumType.PROPANE.value} assert result2["result"].unique_id == "aa:bb:cc:dd:ee:ff" @@ -71,7 +74,10 @@ async def test_async_step_user_with_found_devices(hass: HomeAssistant) -> None: ) assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "Pro Plus EEFF" - assert result2["data"] == {} + assert CONF_MEDIUM_TYPE in result2["data"] + assert result2["data"][CONF_MEDIUM_TYPE] in [ + medium_type.value for medium_type in MediumType + ] assert result2["result"].unique_id == "aa:bb:cc:dd:ee:ff" @@ -190,8 +196,44 @@ async def test_async_step_user_takes_precedence_over_discovery( ) assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "Pro Plus EEFF" - assert result2["data"] == {} + assert CONF_MEDIUM_TYPE in result2["data"] + assert result2["data"][CONF_MEDIUM_TYPE] in [ + medium_type.value for medium_type in MediumType + ] assert result2["result"].unique_id == "aa:bb:cc:dd:ee:ff" # Verify the original one was aborted assert not hass.config_entries.flow.async_progress(DOMAIN) + + +async def test_async_step_reconfigure_options(hass: HomeAssistant) -> None: + """Test reconfig options: change MediumType from air to fresh water.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id="aa:bb:cc:dd:75:10", + title="TD40/TD200 7510", + data={CONF_MEDIUM_TYPE: MediumType.AIR.value}, + ) + entry.add_to_hass(hass) + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert entry.data[CONF_MEDIUM_TYPE] == MediumType.AIR.value + + result = await hass.config_entries.options.async_init(entry.entry_id) + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "init" + schema: vol.Schema = result["data_schema"] + medium_type_key = next( + iter(key for key in schema.schema if key == CONF_MEDIUM_TYPE) + ) + assert medium_type_key.default() == MediumType.AIR.value + + result2 = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={CONF_MEDIUM_TYPE: MediumType.FRESH_WATER.value}, + ) + assert result2["type"] == FlowResultType.CREATE_ENTRY + + # Verify the new configuration + assert entry.data[CONF_MEDIUM_TYPE] == MediumType.FRESH_WATER.value From 6362ca1052f4042ccf2759dea0293e32a1445b79 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 30 Jul 2024 21:52:25 +0200 Subject: [PATCH 0158/1635] Bump pyhomeworks to 1.1.0 (#122870) --- homeassistant/components/homeworks/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/homeworks/manifest.json b/homeassistant/components/homeworks/manifest.json index 9b447ef4aea..1ba0672c9f1 100644 --- a/homeassistant/components/homeworks/manifest.json +++ b/homeassistant/components/homeworks/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/homeworks", "iot_class": "local_push", "loggers": ["pyhomeworks"], - "requirements": ["pyhomeworks==1.0.0"] + "requirements": ["pyhomeworks==1.1.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index e0d502f3000..80f4ab6bc4a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1903,7 +1903,7 @@ pyhiveapi==0.5.16 pyhomematic==0.1.77 # homeassistant.components.homeworks -pyhomeworks==1.0.0 +pyhomeworks==1.1.0 # homeassistant.components.ialarm pyialarm==2.2.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a1037d5109f..fa24095e5e2 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1517,7 +1517,7 @@ pyhiveapi==0.5.16 pyhomematic==0.1.77 # homeassistant.components.homeworks -pyhomeworks==1.0.0 +pyhomeworks==1.1.0 # homeassistant.components.ialarm pyialarm==2.2.0 From da18aae2d89f146e49c5cbba288df250d8782e3f Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Tue, 30 Jul 2024 15:27:16 -0500 Subject: [PATCH 0159/1635] Bump intents to 2024.7.29 (#122811) --- homeassistant/components/conversation/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/conversation/manifest.json b/homeassistant/components/conversation/manifest.json index f308ae57647..65c79cef187 100644 --- a/homeassistant/components/conversation/manifest.json +++ b/homeassistant/components/conversation/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/conversation", "integration_type": "system", "quality_scale": "internal", - "requirements": ["hassil==1.7.4", "home-assistant-intents==2024.7.10"] + "requirements": ["hassil==1.7.4", "home-assistant-intents==2024.7.29"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index b0e17bc2826..70c05f35c33 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -32,7 +32,7 @@ hass-nabucasa==0.81.1 hassil==1.7.4 home-assistant-bluetooth==1.12.2 home-assistant-frontend==20240719.0 -home-assistant-intents==2024.7.10 +home-assistant-intents==2024.7.29 httpx==0.27.0 ifaddr==0.2.0 Jinja2==3.1.4 diff --git a/requirements_all.txt b/requirements_all.txt index 80f4ab6bc4a..c9e5ad7c264 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1093,7 +1093,7 @@ holidays==0.53 home-assistant-frontend==20240719.0 # homeassistant.components.conversation -home-assistant-intents==2024.7.10 +home-assistant-intents==2024.7.29 # homeassistant.components.home_connect homeconnect==0.8.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index fa24095e5e2..33c1ebcdb09 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -916,7 +916,7 @@ holidays==0.53 home-assistant-frontend==20240719.0 # homeassistant.components.conversation -home-assistant-intents==2024.7.10 +home-assistant-intents==2024.7.29 # homeassistant.components.home_connect homeconnect==0.8.0 From 6999c6b0cf9f6f37a493ff06e93f5f81bad8365e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 30 Jul 2024 16:40:38 -0500 Subject: [PATCH 0160/1635] Bump aiohttp to 3.10.0 (#122880) --- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 70c05f35c33..b259bef2c8b 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -4,7 +4,7 @@ aiodhcpwatcher==1.0.2 aiodiscover==2.1.0 aiodns==3.2.0 aiohttp-fast-zlib==0.1.1 -aiohttp==3.10.0rc0 +aiohttp==3.10.0 aiohttp_cors==0.7.0 aiozoneinfo==0.2.1 astral==2.2 diff --git a/pyproject.toml b/pyproject.toml index eac18012ae3..e9ee54b069e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ requires-python = ">=3.12.0" dependencies = [ "aiodns==3.2.0", - "aiohttp==3.10.0rc0", + "aiohttp==3.10.0", "aiohttp_cors==0.7.0", "aiohttp-fast-zlib==0.1.1", "aiozoneinfo==0.2.1", diff --git a/requirements.txt b/requirements.txt index 5122cb99c41..a8edc66cb9a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ # Home Assistant Core aiodns==3.2.0 -aiohttp==3.10.0rc0 +aiohttp==3.10.0 aiohttp_cors==0.7.0 aiohttp-fast-zlib==0.1.1 aiozoneinfo==0.2.1 From 067acce4deeb397717b6bb7f9ca3e5f0dab79c41 Mon Sep 17 00:00:00 2001 From: Jeef Date: Tue, 30 Jul 2024 15:42:10 -0600 Subject: [PATCH 0161/1635] Add SimpleFin sensor to show age of data (#122550) --- .../components/simplefin/manifest.json | 2 +- homeassistant/components/simplefin/sensor.py | 13 +- .../components/simplefin/strings.json | 3 + requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- .../simplefin/snapshots/test_sensor.ambr | 384 ++++++++++++++++++ 6 files changed, 401 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/simplefin/manifest.json b/homeassistant/components/simplefin/manifest.json index f3e312d9de5..a790e64c578 100644 --- a/homeassistant/components/simplefin/manifest.json +++ b/homeassistant/components/simplefin/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["simplefin"], - "requirements": ["simplefin4py==0.0.16"] + "requirements": ["simplefin4py==0.0.18"] } diff --git a/homeassistant/components/simplefin/sensor.py b/homeassistant/components/simplefin/sensor.py index 2fac42cbac5..b2167a2c014 100644 --- a/homeassistant/components/simplefin/sensor.py +++ b/homeassistant/components/simplefin/sensor.py @@ -4,6 +4,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass +from datetime import datetime from simplefin4py import Account @@ -13,6 +14,7 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) +from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType @@ -25,7 +27,7 @@ from .entity import SimpleFinEntity class SimpleFinSensorEntityDescription(SensorEntityDescription): """Describes a sensor entity.""" - value_fn: Callable[[Account], StateType] + value_fn: Callable[[Account], StateType | datetime] icon_fn: Callable[[Account], str] | None = None unit_fn: Callable[[Account], str] | None = None @@ -40,6 +42,13 @@ SIMPLEFIN_SENSORS: tuple[SimpleFinSensorEntityDescription, ...] = ( unit_fn=lambda account: account.currency, icon_fn=lambda account: account.inferred_account_type, ), + SimpleFinSensorEntityDescription( + key="age", + translation_key="age", + device_class=SensorDeviceClass.TIMESTAMP, + entity_category=EntityCategory.DIAGNOSTIC, + value_fn=lambda account: account.balance_date, + ), ) @@ -70,7 +79,7 @@ class SimpleFinSensor(SimpleFinEntity, SensorEntity): entity_description: SimpleFinSensorEntityDescription @property - def native_value(self) -> StateType: + def native_value(self) -> StateType | datetime | None: """Return the state.""" return self.entity_description.value_fn(self.account_data) diff --git a/homeassistant/components/simplefin/strings.json b/homeassistant/components/simplefin/strings.json index c54520a0451..d6690e604c5 100644 --- a/homeassistant/components/simplefin/strings.json +++ b/homeassistant/components/simplefin/strings.json @@ -24,6 +24,9 @@ "sensor": { "balance": { "name": "Balance" + }, + "age": { + "name": "Data age" } } } diff --git a/requirements_all.txt b/requirements_all.txt index c9e5ad7c264..8389d5ed2cd 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2580,7 +2580,7 @@ sharp_aquos_rc==0.3.2 shodan==1.28.0 # homeassistant.components.simplefin -simplefin4py==0.0.16 +simplefin4py==0.0.18 # homeassistant.components.sighthound simplehound==0.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 33c1ebcdb09..c5ad7e9da05 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2032,7 +2032,7 @@ sfrbox-api==0.0.8 sharkiq==1.0.2 # homeassistant.components.simplefin -simplefin4py==0.0.16 +simplefin4py==0.0.18 # homeassistant.components.sighthound simplehound==0.3 diff --git a/tests/components/simplefin/snapshots/test_sensor.ambr b/tests/components/simplefin/snapshots/test_sensor.ambr index 2660bbd74ca..c7dced9300e 100644 --- a/tests/components/simplefin/snapshots/test_sensor.ambr +++ b/tests/components/simplefin/snapshots/test_sensor.ambr @@ -52,6 +52,54 @@ 'state': '1000000.00', }) # --- +# name: test_all_entities[sensor.investments_dr_evil_data_age-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.investments_dr_evil_data_age', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data age', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'age', + 'unique_id': 'account_ACT-4k5l6m7n-8o9p-1q2r-3s4t_age', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.investments_dr_evil_data_age-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'timestamp', + 'friendly_name': 'Investments Dr Evil Data age', + }), + 'context': , + 'entity_id': 'sensor.investments_dr_evil_data_age', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-16T13:55:19+00:00', + }) +# --- # name: test_all_entities[sensor.investments_my_checking_balance-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -105,6 +153,54 @@ 'state': '12345.67', }) # --- +# name: test_all_entities[sensor.investments_my_checking_data_age-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.investments_my_checking_data_age', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data age', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'age', + 'unique_id': 'account_ACT-1k2l3m4n-5o6p-7q8r-9s0t_age', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.investments_my_checking_data_age-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'timestamp', + 'friendly_name': 'Investments My Checking Data age', + }), + 'context': , + 'entity_id': 'sensor.investments_my_checking_data_age', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-16T13:55:19+00:00', + }) +# --- # name: test_all_entities[sensor.investments_nerdcorp_series_b_balance-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -158,6 +254,54 @@ 'state': '13579.24', }) # --- +# name: test_all_entities[sensor.investments_nerdcorp_series_b_data_age-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.investments_nerdcorp_series_b_data_age', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data age', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'age', + 'unique_id': 'account_ACT-5k6l7m8n-9o0p-1q2r-3s4t_age', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.investments_nerdcorp_series_b_data_age-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'timestamp', + 'friendly_name': 'Investments NerdCorp Series B Data age', + }), + 'context': , + 'entity_id': 'sensor.investments_nerdcorp_series_b_data_age', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-16T13:55:19+00:00', + }) +# --- # name: test_all_entities[sensor.mythical_randomsavings_castle_mortgage_balance-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -211,6 +355,54 @@ 'state': '7500.00', }) # --- +# name: test_all_entities[sensor.mythical_randomsavings_castle_mortgage_data_age-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mythical_randomsavings_castle_mortgage_data_age', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data age', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'age', + 'unique_id': 'account_ACT-7a8b9c0d-1e2f-3g4h-5i6j_age', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.mythical_randomsavings_castle_mortgage_data_age-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'timestamp', + 'friendly_name': 'Mythical RandomSavings Castle Mortgage Data age', + }), + 'context': , + 'entity_id': 'sensor.mythical_randomsavings_castle_mortgage_data_age', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-16T18:16:42+00:00', + }) +# --- # name: test_all_entities[sensor.mythical_randomsavings_unicorn_pot_balance-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -264,6 +456,54 @@ 'state': '10000.00', }) # --- +# name: test_all_entities[sensor.mythical_randomsavings_unicorn_pot_data_age-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mythical_randomsavings_unicorn_pot_data_age', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data age', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'age', + 'unique_id': 'account_ACT-6a7b8c9d-0e1f-2g3h-4i5j_age', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.mythical_randomsavings_unicorn_pot_data_age-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'timestamp', + 'friendly_name': 'Mythical RandomSavings Unicorn Pot Data age', + }), + 'context': , + 'entity_id': 'sensor.mythical_randomsavings_unicorn_pot_data_age', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-16T18:16:42+00:00', + }) +# --- # name: test_all_entities[sensor.random_bank_costco_anywhere_visa_r_card_balance-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -317,6 +557,54 @@ 'state': '-532.69', }) # --- +# name: test_all_entities[sensor.random_bank_costco_anywhere_visa_r_card_data_age-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.random_bank_costco_anywhere_visa_r_card_data_age', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data age', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'age', + 'unique_id': 'account_ACT-3a4b5c6d-7e8f-9g0h-1i2j_age', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.random_bank_costco_anywhere_visa_r_card_data_age-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'timestamp', + 'friendly_name': 'Random Bank Costco Anywhere Visa® Card Data age', + }), + 'context': , + 'entity_id': 'sensor.random_bank_costco_anywhere_visa_r_card_data_age', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-16T18:16:42+00:00', + }) +# --- # name: test_all_entities[sensor.the_bank_of_go_prime_savings_balance-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -370,6 +658,54 @@ 'state': '9876.54', }) # --- +# name: test_all_entities[sensor.the_bank_of_go_prime_savings_data_age-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.the_bank_of_go_prime_savings_data_age', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data age', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'age', + 'unique_id': 'account_ACT-2a3b4c5d-6e7f-8g9h-0i1j_age', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.the_bank_of_go_prime_savings_data_age-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'timestamp', + 'friendly_name': 'The Bank of Go PRIME SAVINGS Data age', + }), + 'context': , + 'entity_id': 'sensor.the_bank_of_go_prime_savings_data_age', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-16T18:14:21+00:00', + }) +# --- # name: test_all_entities[sensor.the_bank_of_go_the_bank_balance-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -423,3 +759,51 @@ 'state': '7777.77', }) # --- +# name: test_all_entities[sensor.the_bank_of_go_the_bank_data_age-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.the_bank_of_go_the_bank_data_age', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Data age', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'age', + 'unique_id': 'account_ACT-1a2b3c4d-5e6f-7g8h-9i0j_age', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.the_bank_of_go_the_bank_data_age-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'timestamp', + 'friendly_name': 'The Bank of Go The Bank Data age', + }), + 'context': , + 'entity_id': 'sensor.the_bank_of_go_the_bank_data_age', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-01-16T14:04:03+00:00', + }) +# --- From aa801d9cc6b09050c3b4e5e4cf41d1607b21ae83 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 30 Jul 2024 18:20:55 -0500 Subject: [PATCH 0162/1635] Bump bluetooth-data-tools to 1.19.4 (#122886) --- homeassistant/components/bluetooth/manifest.json | 2 +- homeassistant/components/ld2410_ble/manifest.json | 2 +- homeassistant/components/led_ble/manifest.json | 2 +- homeassistant/components/private_ble_device/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/bluetooth/manifest.json b/homeassistant/components/bluetooth/manifest.json index 12bb37ac570..95d2b171c9f 100644 --- a/homeassistant/components/bluetooth/manifest.json +++ b/homeassistant/components/bluetooth/manifest.json @@ -18,7 +18,7 @@ "bleak-retry-connector==3.5.0", "bluetooth-adapters==0.19.3", "bluetooth-auto-recovery==1.4.2", - "bluetooth-data-tools==1.19.3", + "bluetooth-data-tools==1.19.4", "dbus-fast==2.22.1", "habluetooth==3.1.3" ] diff --git a/homeassistant/components/ld2410_ble/manifest.json b/homeassistant/components/ld2410_ble/manifest.json index 2389e3199e2..a1b0e9a1398 100644 --- a/homeassistant/components/ld2410_ble/manifest.json +++ b/homeassistant/components/ld2410_ble/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/ld2410_ble", "integration_type": "device", "iot_class": "local_push", - "requirements": ["bluetooth-data-tools==1.19.3", "ld2410-ble==0.1.1"] + "requirements": ["bluetooth-data-tools==1.19.4", "ld2410-ble==0.1.1"] } diff --git a/homeassistant/components/led_ble/manifest.json b/homeassistant/components/led_ble/manifest.json index bf15ab1cc66..e22d23fb971 100644 --- a/homeassistant/components/led_ble/manifest.json +++ b/homeassistant/components/led_ble/manifest.json @@ -35,5 +35,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/led_ble", "iot_class": "local_polling", - "requirements": ["bluetooth-data-tools==1.19.3", "led-ble==1.0.2"] + "requirements": ["bluetooth-data-tools==1.19.4", "led-ble==1.0.2"] } diff --git a/homeassistant/components/private_ble_device/manifest.json b/homeassistant/components/private_ble_device/manifest.json index bb29e2cf105..8b072361d34 100644 --- a/homeassistant/components/private_ble_device/manifest.json +++ b/homeassistant/components/private_ble_device/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/private_ble_device", "iot_class": "local_push", - "requirements": ["bluetooth-data-tools==1.19.3"] + "requirements": ["bluetooth-data-tools==1.19.4"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index b259bef2c8b..9fca07b6202 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -18,7 +18,7 @@ bleak-retry-connector==3.5.0 bleak==0.22.2 bluetooth-adapters==0.19.3 bluetooth-auto-recovery==1.4.2 -bluetooth-data-tools==1.19.3 +bluetooth-data-tools==1.19.4 cached_ipaddress==0.3.0 certifi>=2021.5.30 ciso8601==2.3.1 diff --git a/requirements_all.txt b/requirements_all.txt index 8389d5ed2cd..f46ed9dd53a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -600,7 +600,7 @@ bluetooth-auto-recovery==1.4.2 # homeassistant.components.ld2410_ble # homeassistant.components.led_ble # homeassistant.components.private_ble_device -bluetooth-data-tools==1.19.3 +bluetooth-data-tools==1.19.4 # homeassistant.components.bond bond-async==0.2.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c5ad7e9da05..2750dd4cc96 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -524,7 +524,7 @@ bluetooth-auto-recovery==1.4.2 # homeassistant.components.ld2410_ble # homeassistant.components.led_ble # homeassistant.components.private_ble_device -bluetooth-data-tools==1.19.3 +bluetooth-data-tools==1.19.4 # homeassistant.components.bond bond-async==0.2.1 From 823910b69ed9f8aab788279e4304643ee54f541c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 31 Jul 2024 00:20:09 -0500 Subject: [PATCH 0163/1635] Bump ulid-transform to 0.13.1 (#122884) * Bump ulid-transform to 0.13.0 changelog: https://github.com/bdraco/ulid-transform/compare/v0.10.1...v0.13.0 * Bump ulid-transform to 0.13.1 --- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 9fca07b6202..43e737a002d 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -55,7 +55,7 @@ PyYAML==6.0.1 requests==2.32.3 SQLAlchemy==2.0.31 typing-extensions>=4.12.2,<5.0 -ulid-transform==0.10.1 +ulid-transform==0.13.1 urllib3>=1.26.5,<2 voluptuous-openapi==0.0.5 voluptuous-serialize==2.6.0 diff --git a/pyproject.toml b/pyproject.toml index e9ee54b069e..f71e9bd6013 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,7 +61,7 @@ dependencies = [ "requests==2.32.3", "SQLAlchemy==2.0.31", "typing-extensions>=4.12.2,<5.0", - "ulid-transform==0.10.1", + "ulid-transform==0.13.1", # Constrain urllib3 to ensure we deal with CVE-2020-26137 and CVE-2021-33503 # Temporary setting an upper bound, to prevent compat issues with urllib3>=2 # https://github.com/home-assistant/core/issues/97248 diff --git a/requirements.txt b/requirements.txt index a8edc66cb9a..c851927f9c6 100644 --- a/requirements.txt +++ b/requirements.txt @@ -36,7 +36,7 @@ PyYAML==6.0.1 requests==2.32.3 SQLAlchemy==2.0.31 typing-extensions>=4.12.2,<5.0 -ulid-transform==0.10.1 +ulid-transform==0.13.1 urllib3>=1.26.5,<2 voluptuous==0.15.2 voluptuous-serialize==2.6.0 From 6d8bc84db3ae6374f6ac4085de25972c979ab9af Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 31 Jul 2024 08:02:15 +0200 Subject: [PATCH 0164/1635] Allow [##:##:##:##:##] type device address in homeworks (#122872) * Allow [##:##:##:##:##] type device address in homeworks * Simplify regex --- homeassistant/components/homeworks/config_flow.py | 2 +- tests/components/homeworks/test_config_flow.py | 9 ++++++++- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/homeworks/config_flow.py b/homeassistant/components/homeworks/config_flow.py index ec381c3331f..4508f3bd21d 100644 --- a/homeassistant/components/homeworks/config_flow.py +++ b/homeassistant/components/homeworks/config_flow.py @@ -89,7 +89,7 @@ BUTTON_EDIT: VolDictType = { } -validate_addr = cv.matches_regex(r"\[(?:\d\d:)?\d\d:\d\d:\d\d\]") +validate_addr = cv.matches_regex(r"\[(?:\d\d:){2,4}\d\d\]") async def validate_add_controller( diff --git a/tests/components/homeworks/test_config_flow.py b/tests/components/homeworks/test_config_flow.py index 3e359caf7f2..c4738e68ecc 100644 --- a/tests/components/homeworks/test_config_flow.py +++ b/tests/components/homeworks/test_config_flow.py @@ -436,7 +436,14 @@ async def test_options_add_remove_light_flow( ) -@pytest.mark.parametrize("keypad_address", ["[02:08:03:01]", "[02:08:03]"]) +@pytest.mark.parametrize( + "keypad_address", + [ + "[02:08:03]", + "[02:08:03:01]", + "[02:08:03:01:00]", + ], +) async def test_options_add_remove_keypad_flow( hass: HomeAssistant, mock_config_entry: MockConfigEntry, From 5766ea9541b998586fcf742340fdba0ed25f671b Mon Sep 17 00:00:00 2001 From: Lukas Kolletzki Date: Wed, 31 Jul 2024 08:26:57 +0200 Subject: [PATCH 0165/1635] Add generic URL handler to blueprint importer (#110576) * Add generic url handler to blueprint importer * Update tests/components/blueprint/test_importer.py * Update tests/components/blueprint/test_importer.py * Update test_importer.py --------- Co-authored-by: Erik Montnemery --- .../components/blueprint/importer.py | 34 +++++++++++++++---- tests/components/blueprint/test_importer.py | 29 +++++++++++++--- 2 files changed, 52 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/blueprint/importer.py b/homeassistant/components/blueprint/importer.py index 4517d134e69..c231a33991a 100644 --- a/homeassistant/components/blueprint/importer.py +++ b/homeassistant/components/blueprint/importer.py @@ -245,14 +245,36 @@ async def fetch_blueprint_from_website_url( return ImportedBlueprint(suggested_filename, raw_yaml, blueprint) +async def fetch_blueprint_from_generic_url( + hass: HomeAssistant, url: str +) -> ImportedBlueprint: + """Get a blueprint from a generic website.""" + session = aiohttp_client.async_get_clientsession(hass) + + resp = await session.get(url, raise_for_status=True) + raw_yaml = await resp.text() + data = yaml.parse_yaml(raw_yaml) + + assert isinstance(data, dict) + blueprint = Blueprint(data) + + parsed_import_url = yarl.URL(url) + suggested_filename = f"{parsed_import_url.host}/{parsed_import_url.parts[-1][:-5]}" + return ImportedBlueprint(suggested_filename, raw_yaml, blueprint) + + +FETCH_FUNCTIONS = ( + fetch_blueprint_from_community_post, + fetch_blueprint_from_github_url, + fetch_blueprint_from_github_gist_url, + fetch_blueprint_from_website_url, + fetch_blueprint_from_generic_url, +) + + async def fetch_blueprint_from_url(hass: HomeAssistant, url: str) -> ImportedBlueprint: """Get a blueprint from a url.""" - for func in ( - fetch_blueprint_from_community_post, - fetch_blueprint_from_github_url, - fetch_blueprint_from_github_gist_url, - fetch_blueprint_from_website_url, - ): + for func in FETCH_FUNCTIONS: with suppress(UnsupportedUrl): imported_bp = await func(hass, url) imported_bp.blueprint.update_metadata(source_url=url) diff --git a/tests/components/blueprint/test_importer.py b/tests/components/blueprint/test_importer.py index f135bbf23b8..94036d208ab 100644 --- a/tests/components/blueprint/test_importer.py +++ b/tests/components/blueprint/test_importer.py @@ -192,9 +192,28 @@ async def test_fetch_blueprint_from_website_url( assert imported_blueprint.blueprint.metadata["source_url"] == url -async def test_fetch_blueprint_from_unsupported_url(hass: HomeAssistant) -> None: - """Test fetching blueprint from an unsupported URL.""" - url = "https://example.com/unsupported.yaml" +async def test_fetch_blueprint_from_generic_url( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test fetching blueprint from url.""" + aioclient_mock.get( + "https://example.org/path/someblueprint.yaml", + text=Path( + hass.config.path("blueprints/automation/test_event_service.yaml") + ).read_text(encoding="utf8"), + ) - with pytest.raises(HomeAssistantError, match=r"^Unsupported URL$"): - await importer.fetch_blueprint_from_url(hass, url) + url = "https://example.org/path/someblueprint.yaml" + imported_blueprint = await importer.fetch_blueprint_from_url(hass, url) + assert isinstance(imported_blueprint, importer.ImportedBlueprint) + assert imported_blueprint.blueprint.domain == "automation" + assert imported_blueprint.suggested_filename == "example.org/someblueprint" + assert imported_blueprint.blueprint.metadata["source_url"] == url + + +def test_generic_importer_last() -> None: + """Test that generic importer is always the last one.""" + assert ( + importer.FETCH_FUNCTIONS.count(importer.fetch_blueprint_from_generic_url) == 1 + ) + assert importer.FETCH_FUNCTIONS[-1] == importer.fetch_blueprint_from_generic_url From 0d678120e42471710da358adacbb24452bac58e8 Mon Sep 17 00:00:00 2001 From: Thomas55555 <59625598+Thomas55555@users.noreply.github.com> Date: Wed, 31 Jul 2024 08:28:39 +0200 Subject: [PATCH 0166/1635] Bump aioautomower to 2024.7.3 (#121983) * Bump aioautomower to 2024.7.0 * tests * Bump to 2024.7.1 * bump to 2024.7.2 * use timezone Europe/Berlin * bump to 2024.7.3 --------- Co-authored-by: Joost Lekkerkerker Co-authored-by: Erik Montnemery --- .../husqvarna_automower/manifest.json | 2 +- .../components/husqvarna_automower/sensor.py | 8 +++++- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- .../husqvarna_automower/__init__.py | 6 +++- .../snapshots/test_diagnostics.ambr | 28 +++++++++++++++---- .../snapshots/test_sensor.ambr | 2 +- .../husqvarna_automower/test_sensor.py | 4 +-- 8 files changed, 40 insertions(+), 14 deletions(-) diff --git a/homeassistant/components/husqvarna_automower/manifest.json b/homeassistant/components/husqvarna_automower/manifest.json index f27b04ef0c0..bb03806e417 100644 --- a/homeassistant/components/husqvarna_automower/manifest.json +++ b/homeassistant/components/husqvarna_automower/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/husqvarna_automower", "iot_class": "cloud_push", "loggers": ["aioautomower"], - "requirements": ["aioautomower==2024.6.4"] + "requirements": ["aioautomower==2024.7.3"] } diff --git a/homeassistant/components/husqvarna_automower/sensor.py b/homeassistant/components/husqvarna_automower/sensor.py index 2c8d369ea3a..bd0b8561223 100644 --- a/homeassistant/components/husqvarna_automower/sensor.py +++ b/homeassistant/components/husqvarna_automower/sensor.py @@ -5,8 +5,10 @@ from dataclasses import dataclass from datetime import datetime import logging from typing import TYPE_CHECKING +from zoneinfo import ZoneInfo from aioautomower.model import MowerAttributes, MowerModes, RestrictedReasons +from aioautomower.utils import naive_to_aware from homeassistant.components.sensor import ( SensorDeviceClass, @@ -18,6 +20,7 @@ from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfLength, UnitOf from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType +from homeassistant.util import dt as dt_util from . import AutomowerConfigEntry from .coordinator import AutomowerDataUpdateCoordinator @@ -324,7 +327,10 @@ SENSOR_TYPES: tuple[AutomowerSensorEntityDescription, ...] = ( key="next_start_timestamp", translation_key="next_start_timestamp", device_class=SensorDeviceClass.TIMESTAMP, - value_fn=lambda data: data.planner.next_start_datetime, + value_fn=lambda data: naive_to_aware( + data.planner.next_start_datetime_naive, + ZoneInfo(str(dt_util.DEFAULT_TIME_ZONE)), + ), ), AutomowerSensorEntityDescription( key="error", diff --git a/requirements_all.txt b/requirements_all.txt index f46ed9dd53a..8aebf250fd4 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -198,7 +198,7 @@ aioaseko==0.2.0 aioasuswrt==1.4.0 # homeassistant.components.husqvarna_automower -aioautomower==2024.6.4 +aioautomower==2024.7.3 # homeassistant.components.azure_devops aioazuredevops==2.1.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 2750dd4cc96..859154b2837 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -186,7 +186,7 @@ aioaseko==0.2.0 aioasuswrt==1.4.0 # homeassistant.components.husqvarna_automower -aioautomower==2024.6.4 +aioautomower==2024.7.3 # homeassistant.components.azure_devops aioazuredevops==2.1.1 diff --git a/tests/components/husqvarna_automower/__init__.py b/tests/components/husqvarna_automower/__init__.py index 8c51d69ba3d..9473b68a5ed 100644 --- a/tests/components/husqvarna_automower/__init__.py +++ b/tests/components/husqvarna_automower/__init__.py @@ -7,6 +7,10 @@ from tests.common import MockConfigEntry async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: """Fixture for setting up the component.""" + # We lock the timezone, because the timezone is passed to the library to generate + # some values like the next start sensor. This is needed, as the device is not aware + # of its own timezone. So we assume the device is in the timezone which is selected in + # the Home Assistant config. + await hass.config.async_set_time_zone("Europe/Berlin") config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr index d8cd748c793..212be85ce51 100644 --- a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr @@ -7,18 +7,20 @@ 'calendar': dict({ 'events': list([ dict({ - 'end': '2024-03-02T00:00:00+00:00', + 'end': '2024-03-02T00:00:00', 'rrule': 'FREQ=WEEKLY;BYDAY=MO,WE,FR', - 'start': '2024-03-01T19:00:00+00:00', + 'start': '2024-03-01T19:00:00', 'uid': '1140_300_MO,WE,FR', 'work_area_id': None, + 'work_area_name': None, }), dict({ - 'end': '2024-03-02T08:00:00+00:00', + 'end': '2024-03-02T08:00:00', 'rrule': 'FREQ=WEEKLY;BYDAY=TU,TH,SA', - 'start': '2024-03-02T00:00:00+00:00', + 'start': '2024-03-02T00:00:00', 'uid': '0_480_TU,TH,SA', 'work_area_id': None, + 'work_area_name': None, }), ]), 'tasks': list([ @@ -33,6 +35,7 @@ 'tuesday': False, 'wednesday': True, 'work_area_id': None, + 'work_area_name': None, }), dict({ 'duration': 480, @@ -45,6 +48,7 @@ 'tuesday': True, 'wednesday': False, 'work_area_id': None, + 'work_area_name': None, }), ]), }), @@ -61,17 +65,18 @@ 'mower': dict({ 'activity': 'PARKED_IN_CS', 'error_code': 0, - 'error_datetime': None, 'error_datetime_naive': None, 'error_key': None, + 'error_timestamp': 0, 'inactive_reason': 'NONE', 'is_error_confirmable': False, 'mode': 'MAIN_AREA', 'state': 'RESTRICTED', 'work_area_id': 123456, + 'work_area_name': 'Front lawn', }), 'planner': dict({ - 'next_start_datetime': '2023-06-05T19:00:00+00:00', + 'next_start': 1685991600000, 'next_start_datetime_naive': '2023-06-05T19:00:00', 'override': dict({ 'action': 'NOT_ACTIVE', @@ -113,6 +118,17 @@ 'name': 'Test Mower 1', 'serial_number': 123, }), + 'work_area_dict': dict({ + '0': 'my_lawn', + '123456': 'Front lawn', + '654321': 'Back lawn', + }), + 'work_area_names': list([ + 'Front lawn', + 'Back lawn', + 'my_lawn', + 'no_work_area_active', + ]), 'work_areas': dict({ '0': dict({ 'cutting_height': 50, diff --git a/tests/components/husqvarna_automower/snapshots/test_sensor.ambr b/tests/components/husqvarna_automower/snapshots/test_sensor.ambr index 935303e48fb..730971a47dd 100644 --- a/tests/components/husqvarna_automower/snapshots/test_sensor.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_sensor.ambr @@ -548,7 +548,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2023-06-05T19:00:00+00:00', + 'state': '2023-06-05T17:00:00+00:00', }) # --- # name: test_sensor_snapshot[sensor.test_mower_1_none-entry] diff --git a/tests/components/husqvarna_automower/test_sensor.py b/tests/components/husqvarna_automower/test_sensor.py index 314bcaaa00c..1a4f545ac96 100644 --- a/tests/components/husqvarna_automower/test_sensor.py +++ b/tests/components/husqvarna_automower/test_sensor.py @@ -73,12 +73,12 @@ async def test_next_start_sensor( await setup_integration(hass, mock_config_entry) state = hass.states.get("sensor.test_mower_1_next_start") assert state is not None - assert state.state == "2023-06-05T19:00:00+00:00" + assert state.state == "2023-06-05T17:00:00+00:00" values = mower_list_to_dictionary_dataclass( load_json_value_fixture("mower.json", DOMAIN) ) - values[TEST_MOWER_ID].planner.next_start_datetime = None + values[TEST_MOWER_ID].planner.next_start_datetime_naive = None mock_automower_client.get_status.return_value = values freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) From 9351f300b037c680e606bb4886751010993139ac Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Wed, 31 Jul 2024 09:10:36 +0200 Subject: [PATCH 0167/1635] Update xknx to 3.0.0 - more DPT definitions (#122891) * Support DPTComplex objects and validate sensor types * Gracefully start and stop xknx device objects * Use non-awaitable XknxDevice callbacks * Use non-awaitable xknx.TelegramQueue callbacks * Use non-awaitable xknx.ConnectionManager callbacks * Remove unnecessary `hass.async_block_till_done()` calls * Wait for StateUpdater logic to proceed when receiving responses * Update import module paths for specific DPTs * Support Enum data types * New HVAC mode names * HVAC Enums instead of Enum member value strings * New date and time devices * Update xknx to 3.0.0 * Fix expose tests and DPTEnumData check * ruff and mypy fixes --- homeassistant/components/knx/__init__.py | 16 +++--- homeassistant/components/knx/binary_sensor.py | 2 +- homeassistant/components/knx/climate.py | 43 ++++++++++------ homeassistant/components/knx/const.py | 23 +++++---- homeassistant/components/knx/date.py | 32 ++++-------- homeassistant/components/knx/datetime.py | 40 ++++++--------- homeassistant/components/knx/device.py | 5 +- homeassistant/components/knx/expose.py | 50 +++++++++++-------- homeassistant/components/knx/knx_entity.py | 12 +++-- homeassistant/components/knx/light.py | 6 +-- homeassistant/components/knx/manifest.json | 2 +- homeassistant/components/knx/schema.py | 19 ++++--- homeassistant/components/knx/select.py | 13 ++--- homeassistant/components/knx/sensor.py | 2 +- homeassistant/components/knx/services.py | 27 ++++++---- homeassistant/components/knx/telegrams.py | 8 ++- homeassistant/components/knx/time.py | 33 ++++-------- homeassistant/components/knx/trigger.py | 6 +-- homeassistant/components/knx/validation.py | 3 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/knx/conftest.py | 6 +-- tests/components/knx/test_binary_sensor.py | 13 ----- tests/components/knx/test_climate.py | 49 +++++++----------- tests/components/knx/test_config_flow.py | 16 ------ tests/components/knx/test_datetime.py | 5 +- tests/components/knx/test_device_trigger.py | 1 - tests/components/knx/test_events.py | 3 -- tests/components/knx/test_expose.py | 22 +++++--- tests/components/knx/test_init.py | 1 - tests/components/knx/test_interface_device.py | 11 +--- tests/components/knx/test_light.py | 4 +- tests/components/knx/test_notify.py | 9 ---- tests/components/knx/test_sensor.py | 4 -- tests/components/knx/test_services.py | 4 -- tests/components/knx/test_trigger.py | 1 - tests/components/knx/test_weather.py | 8 +-- 37 files changed, 217 insertions(+), 286 deletions(-) diff --git a/homeassistant/components/knx/__init__.py b/homeassistant/components/knx/__init__.py index f7e9b161962..99b461dda1b 100644 --- a/homeassistant/components/knx/__init__.py +++ b/homeassistant/components/knx/__init__.py @@ -2,7 +2,6 @@ from __future__ import annotations -import asyncio import contextlib import logging from pathlib import Path @@ -225,7 +224,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: knx_module: KNXModule = hass.data[DOMAIN] for exposure in knx_module.exposures: - exposure.shutdown() + exposure.async_remove() unload_ok = await hass.config_entries.async_unload_platforms( entry, @@ -439,13 +438,13 @@ class KNXModule: threaded=True, ) - async def connection_state_changed_cb(self, state: XknxConnectionState) -> None: + def connection_state_changed_cb(self, state: XknxConnectionState) -> None: """Call invoked after a KNX connection state change was received.""" self.connected = state == XknxConnectionState.CONNECTED - if tasks := [device.after_update() for device in self.xknx.devices]: - await asyncio.gather(*tasks) + for device in self.xknx.devices: + device.after_update() - async def telegram_received_cb(self, telegram: Telegram) -> None: + def telegram_received_cb(self, telegram: Telegram) -> None: """Call invoked after a KNX telegram was received.""" # Not all telegrams have serializable data. data: int | tuple[int, ...] | None = None @@ -504,10 +503,7 @@ class KNXModule: transcoder := DPTBase.parse_transcoder(dpt) ): self._address_filter_transcoder.update( - { - _filter: transcoder # type: ignore[type-abstract] - for _filter in _filters - } + {_filter: transcoder for _filter in _filters} ) return self.xknx.telegram_queue.register_telegram_received_cb( diff --git a/homeassistant/components/knx/binary_sensor.py b/homeassistant/components/knx/binary_sensor.py index dee56608421..0423c1d7b32 100644 --- a/homeassistant/components/knx/binary_sensor.py +++ b/homeassistant/components/knx/binary_sensor.py @@ -75,7 +75,7 @@ class KNXBinarySensor(KnxEntity, BinarySensorEntity, RestoreEntity): if ( last_state := await self.async_get_last_state() ) and last_state.state not in (STATE_UNKNOWN, STATE_UNAVAILABLE): - await self._device.remote_value.update_value(last_state.state == STATE_ON) + self._device.remote_value.update_value(last_state.state == STATE_ON) @property def is_on(self) -> bool: diff --git a/homeassistant/components/knx/climate.py b/homeassistant/components/knx/climate.py index e1179641cdc..26be6a03a79 100644 --- a/homeassistant/components/knx/climate.py +++ b/homeassistant/components/knx/climate.py @@ -6,7 +6,7 @@ from typing import Any from xknx import XKNX from xknx.devices import Climate as XknxClimate, ClimateMode as XknxClimateMode -from xknx.dpt.dpt_hvac_mode import HVACControllerMode, HVACOperationMode +from xknx.dpt.dpt_20 import HVACControllerMode from homeassistant import config_entries from homeassistant.components.climate import ( @@ -80,7 +80,7 @@ def _create_climate(xknx: XKNX, config: ConfigType) -> XknxClimate: group_address_operation_mode_protection=config.get( ClimateSchema.CONF_OPERATION_MODE_FROST_PROTECTION_ADDRESS ), - group_address_operation_mode_night=config.get( + group_address_operation_mode_economy=config.get( ClimateSchema.CONF_OPERATION_MODE_NIGHT_ADDRESS ), group_address_operation_mode_comfort=config.get( @@ -199,10 +199,12 @@ class KNXClimate(KnxEntity, ClimateEntity): self.async_write_ha_state() return - if self._device.mode is not None and self._device.mode.supports_controller_mode: - knx_controller_mode = HVACControllerMode( - CONTROLLER_MODES_INV.get(self._last_hvac_mode) - ) + if ( + self._device.mode is not None + and self._device.mode.supports_controller_mode + and (knx_controller_mode := CONTROLLER_MODES_INV.get(self._last_hvac_mode)) + is not None + ): await self._device.mode.set_controller_mode(knx_controller_mode) self.async_write_ha_state() @@ -234,7 +236,7 @@ class KNXClimate(KnxEntity, ClimateEntity): return HVACMode.OFF if self._device.mode is not None and self._device.mode.supports_controller_mode: hvac_mode = CONTROLLER_MODES.get( - self._device.mode.controller_mode.value, self.default_hvac_mode + self._device.mode.controller_mode, self.default_hvac_mode ) if hvac_mode is not HVACMode.OFF: self._last_hvac_mode = hvac_mode @@ -247,7 +249,7 @@ class KNXClimate(KnxEntity, ClimateEntity): ha_controller_modes: list[HVACMode | None] = [] if self._device.mode is not None: ha_controller_modes.extend( - CONTROLLER_MODES.get(knx_controller_mode.value) + CONTROLLER_MODES.get(knx_controller_mode) for knx_controller_mode in self._device.mode.controller_modes ) @@ -278,9 +280,7 @@ class KNXClimate(KnxEntity, ClimateEntity): async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: """Set controller mode.""" if self._device.mode is not None and self._device.mode.supports_controller_mode: - knx_controller_mode = HVACControllerMode( - CONTROLLER_MODES_INV.get(hvac_mode) - ) + knx_controller_mode = CONTROLLER_MODES_INV.get(hvac_mode) if knx_controller_mode in self._device.mode.controller_modes: await self._device.mode.set_controller_mode(knx_controller_mode) @@ -298,7 +298,7 @@ class KNXClimate(KnxEntity, ClimateEntity): Requires ClimateEntityFeature.PRESET_MODE. """ if self._device.mode is not None and self._device.mode.supports_operation_mode: - return PRESET_MODES.get(self._device.mode.operation_mode.value, PRESET_AWAY) + return PRESET_MODES.get(self._device.mode.operation_mode, PRESET_AWAY) return None @property @@ -311,15 +311,18 @@ class KNXClimate(KnxEntity, ClimateEntity): return None presets = [ - PRESET_MODES.get(operation_mode.value) + PRESET_MODES.get(operation_mode) for operation_mode in self._device.mode.operation_modes ] return list(filter(None, presets)) async def async_set_preset_mode(self, preset_mode: str) -> None: """Set new preset mode.""" - if self._device.mode is not None and self._device.mode.supports_operation_mode: - knx_operation_mode = HVACOperationMode(PRESET_MODES_INV.get(preset_mode)) + if ( + self._device.mode is not None + and self._device.mode.supports_operation_mode + and (knx_operation_mode := PRESET_MODES_INV.get(preset_mode)) is not None + ): await self._device.mode.set_operation_mode(knx_operation_mode) self.async_write_ha_state() @@ -333,7 +336,15 @@ class KNXClimate(KnxEntity, ClimateEntity): return attr async def async_added_to_hass(self) -> None: - """Store register state change callback.""" + """Store register state change callback and start device object.""" await super().async_added_to_hass() if self._device.mode is not None: self._device.mode.register_device_updated_cb(self.after_update_callback) + self._device.mode.xknx.devices.async_add(self._device.mode) + + async def async_will_remove_from_hass(self) -> None: + """Disconnect device object when removed.""" + if self._device.mode is not None: + self._device.mode.unregister_device_updated_cb(self.after_update_callback) + self._device.mode.xknx.devices.async_remove(self._device.mode) + await super().async_will_remove_from_hass() diff --git a/homeassistant/components/knx/const.py b/homeassistant/components/knx/const.py index 0b7b517dca5..6400f0fe466 100644 --- a/homeassistant/components/knx/const.py +++ b/homeassistant/components/knx/const.py @@ -6,6 +6,7 @@ from collections.abc import Awaitable, Callable from enum import Enum from typing import Final, TypedDict +from xknx.dpt.dpt_20 import HVACControllerMode, HVACOperationMode from xknx.telegram import Telegram from homeassistant.components.climate import ( @@ -158,12 +159,12 @@ SUPPORTED_PLATFORMS_UI: Final = {Platform.SWITCH, Platform.LIGHT} # Map KNX controller modes to HA modes. This list might not be complete. CONTROLLER_MODES: Final = { # Map DPT 20.105 HVAC control modes - "Auto": HVACMode.AUTO, - "Heat": HVACMode.HEAT, - "Cool": HVACMode.COOL, - "Off": HVACMode.OFF, - "Fan only": HVACMode.FAN_ONLY, - "Dry": HVACMode.DRY, + HVACControllerMode.AUTO: HVACMode.AUTO, + HVACControllerMode.HEAT: HVACMode.HEAT, + HVACControllerMode.COOL: HVACMode.COOL, + HVACControllerMode.OFF: HVACMode.OFF, + HVACControllerMode.FAN_ONLY: HVACMode.FAN_ONLY, + HVACControllerMode.DEHUMIDIFICATION: HVACMode.DRY, } CURRENT_HVAC_ACTIONS: Final = { @@ -176,9 +177,9 @@ CURRENT_HVAC_ACTIONS: Final = { PRESET_MODES: Final = { # Map DPT 20.102 HVAC operating modes to HA presets - "Auto": PRESET_NONE, - "Frost Protection": PRESET_ECO, - "Night": PRESET_SLEEP, - "Standby": PRESET_AWAY, - "Comfort": PRESET_COMFORT, + HVACOperationMode.AUTO: PRESET_NONE, + HVACOperationMode.BUILDING_PROTECTION: PRESET_ECO, + HVACOperationMode.ECONOMY: PRESET_SLEEP, + HVACOperationMode.STANDBY: PRESET_AWAY, + HVACOperationMode.COMFORT: PRESET_COMFORT, } diff --git a/homeassistant/components/knx/date.py b/homeassistant/components/knx/date.py index fa20a8d04c5..98cd22e0751 100644 --- a/homeassistant/components/knx/date.py +++ b/homeassistant/components/knx/date.py @@ -3,11 +3,10 @@ from __future__ import annotations from datetime import date as dt_date -import time -from typing import Final from xknx import XKNX -from xknx.devices import DateTime as XknxDateTime +from xknx.devices import DateDevice as XknxDateDevice +from xknx.dpt.dpt_11 import KNXDate as XKNXDate from homeassistant import config_entries from homeassistant.components.date import DateEntity @@ -33,8 +32,6 @@ from .const import ( ) from .knx_entity import KnxEntity -_DATE_TRANSLATION_FORMAT: Final = "%Y-%m-%d" - async def async_setup_entry( hass: HomeAssistant, @@ -45,15 +42,14 @@ async def async_setup_entry( xknx: XKNX = hass.data[DOMAIN].xknx config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.DATE] - async_add_entities(KNXDate(xknx, entity_config) for entity_config in config) + async_add_entities(KNXDateEntity(xknx, entity_config) for entity_config in config) -def _create_xknx_device(xknx: XKNX, config: ConfigType) -> XknxDateTime: +def _create_xknx_device(xknx: XKNX, config: ConfigType) -> XknxDateDevice: """Return a XKNX DateTime object to be used within XKNX.""" - return XknxDateTime( + return XknxDateDevice( xknx, name=config[CONF_NAME], - broadcast_type="DATE", localtime=False, group_address=config[KNX_ADDRESS], group_address_state=config.get(CONF_STATE_ADDRESS), @@ -62,10 +58,10 @@ def _create_xknx_device(xknx: XKNX, config: ConfigType) -> XknxDateTime: ) -class KNXDate(KnxEntity, DateEntity, RestoreEntity): +class KNXDateEntity(KnxEntity, DateEntity, RestoreEntity): """Representation of a KNX date.""" - _device: XknxDateTime + _device: XknxDateDevice def __init__(self, xknx: XKNX, config: ConfigType) -> None: """Initialize a KNX time.""" @@ -81,21 +77,15 @@ class KNXDate(KnxEntity, DateEntity, RestoreEntity): and (last_state := await self.async_get_last_state()) is not None and last_state.state not in (STATE_UNKNOWN, STATE_UNAVAILABLE) ): - self._device.remote_value.value = time.strptime( - last_state.state, _DATE_TRANSLATION_FORMAT + self._device.remote_value.value = XKNXDate.from_date( + dt_date.fromisoformat(last_state.state) ) @property def native_value(self) -> dt_date | None: """Return the latest value.""" - if (time_struct := self._device.remote_value.value) is None: - return None - return dt_date( - year=time_struct.tm_year, - month=time_struct.tm_mon, - day=time_struct.tm_mday, - ) + return self._device.value async def async_set_value(self, value: dt_date) -> None: """Change the value.""" - await self._device.set(value.timetuple()) + await self._device.set(value) diff --git a/homeassistant/components/knx/datetime.py b/homeassistant/components/knx/datetime.py index 2a1a9e2f9c9..d4a25b522eb 100644 --- a/homeassistant/components/knx/datetime.py +++ b/homeassistant/components/knx/datetime.py @@ -5,7 +5,8 @@ from __future__ import annotations from datetime import datetime from xknx import XKNX -from xknx.devices import DateTime as XknxDateTime +from xknx.devices import DateTimeDevice as XknxDateTimeDevice +from xknx.dpt.dpt_19 import KNXDateTime as XKNXDateTime from homeassistant import config_entries from homeassistant.components.datetime import DateTimeEntity @@ -42,15 +43,16 @@ async def async_setup_entry( xknx: XKNX = hass.data[DOMAIN].xknx config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.DATETIME] - async_add_entities(KNXDateTime(xknx, entity_config) for entity_config in config) + async_add_entities( + KNXDateTimeEntity(xknx, entity_config) for entity_config in config + ) -def _create_xknx_device(xknx: XKNX, config: ConfigType) -> XknxDateTime: +def _create_xknx_device(xknx: XKNX, config: ConfigType) -> XknxDateTimeDevice: """Return a XKNX DateTime object to be used within XKNX.""" - return XknxDateTime( + return XknxDateTimeDevice( xknx, name=config[CONF_NAME], - broadcast_type="DATETIME", localtime=False, group_address=config[KNX_ADDRESS], group_address_state=config.get(CONF_STATE_ADDRESS), @@ -59,10 +61,10 @@ def _create_xknx_device(xknx: XKNX, config: ConfigType) -> XknxDateTime: ) -class KNXDateTime(KnxEntity, DateTimeEntity, RestoreEntity): +class KNXDateTimeEntity(KnxEntity, DateTimeEntity, RestoreEntity): """Representation of a KNX datetime.""" - _device: XknxDateTime + _device: XknxDateTimeDevice def __init__(self, xknx: XKNX, config: ConfigType) -> None: """Initialize a KNX time.""" @@ -78,29 +80,19 @@ class KNXDateTime(KnxEntity, DateTimeEntity, RestoreEntity): and (last_state := await self.async_get_last_state()) is not None and last_state.state not in (STATE_UNKNOWN, STATE_UNAVAILABLE) ): - self._device.remote_value.value = ( - datetime.fromisoformat(last_state.state) - .astimezone(dt_util.get_default_time_zone()) - .timetuple() + self._device.remote_value.value = XKNXDateTime.from_datetime( + datetime.fromisoformat(last_state.state).astimezone( + dt_util.get_default_time_zone() + ) ) @property def native_value(self) -> datetime | None: """Return the latest value.""" - if (time_struct := self._device.remote_value.value) is None: + if (naive_dt := self._device.value) is None: return None - return datetime( - year=time_struct.tm_year, - month=time_struct.tm_mon, - day=time_struct.tm_mday, - hour=time_struct.tm_hour, - minute=time_struct.tm_min, - second=min(time_struct.tm_sec, 59), # account for leap seconds - tzinfo=dt_util.get_default_time_zone(), - ) + return naive_dt.replace(tzinfo=dt_util.get_default_time_zone()) async def async_set_value(self, value: datetime) -> None: """Change the value.""" - await self._device.set( - value.astimezone(dt_util.get_default_time_zone()).timetuple() - ) + await self._device.set(value.astimezone(dt_util.get_default_time_zone())) diff --git a/homeassistant/components/knx/device.py b/homeassistant/components/knx/device.py index fd5abc6a072..b43b5926d86 100644 --- a/homeassistant/components/knx/device.py +++ b/homeassistant/components/knx/device.py @@ -19,6 +19,7 @@ class KNXInterfaceDevice: def __init__(self, hass: HomeAssistant, entry: ConfigEntry, xknx: XKNX) -> None: """Initialize interface device class.""" + self.hass = hass self.device_registry = dr.async_get(hass) self.gateway_descriptor: GatewayDescriptor | None = None self.xknx = xknx @@ -46,7 +47,7 @@ class KNXInterfaceDevice: else None, ) - async def connection_state_changed_cb(self, state: XknxConnectionState) -> None: + def connection_state_changed_cb(self, state: XknxConnectionState) -> None: """Call invoked after a KNX connection state change was received.""" if state is XknxConnectionState.CONNECTED: - await self.update() + self.hass.async_create_task(self.update()) diff --git a/homeassistant/components/knx/expose.py b/homeassistant/components/knx/expose.py index 695fe3b3851..29d0be998b6 100644 --- a/homeassistant/components/knx/expose.py +++ b/homeassistant/components/knx/expose.py @@ -6,7 +6,7 @@ from collections.abc import Callable import logging from xknx import XKNX -from xknx.devices import DateTime, ExposeSensor +from xknx.devices import DateDevice, DateTimeDevice, ExposeSensor, TimeDevice from xknx.dpt import DPTNumeric, DPTString from xknx.exceptions import ConversionError from xknx.remote_value import RemoteValueSensor @@ -60,6 +60,7 @@ def create_knx_exposure( xknx=xknx, config=config, ) + exposure.async_register() return exposure @@ -87,25 +88,23 @@ class KNXExposeSensor: self.value_template.hass = hass self._remove_listener: Callable[[], None] | None = None - self.device: ExposeSensor = self.async_register(config) - self._init_expose_state() - - @callback - def async_register(self, config: ConfigType) -> ExposeSensor: - """Register listener.""" - name = f"{self.entity_id}__{self.expose_attribute or "state"}" - device = ExposeSensor( + self.device: ExposeSensor = ExposeSensor( xknx=self.xknx, - name=name, + name=f"{self.entity_id}__{self.expose_attribute or "state"}", group_address=config[KNX_ADDRESS], respond_to_read=config[CONF_RESPOND_TO_READ], value_type=self.expose_type, cooldown=config[ExposeSchema.CONF_KNX_EXPOSE_COOLDOWN], ) + + @callback + def async_register(self) -> None: + """Register listener.""" self._remove_listener = async_track_state_change_event( self.hass, [self.entity_id], self._async_entity_changed ) - return device + self.xknx.devices.async_add(self.device) + self._init_expose_state() @callback def _init_expose_state(self) -> None: @@ -118,12 +117,12 @@ class KNXExposeSensor: _LOGGER.exception("Error during sending of expose sensor value") @callback - def shutdown(self) -> None: + def async_remove(self) -> None: """Prepare for deletion.""" if self._remove_listener is not None: self._remove_listener() self._remove_listener = None - self.device.shutdown() + self.xknx.devices.async_remove(self.device) def _get_expose_value(self, state: State | None) -> bool | int | float | str | None: """Extract value from state.""" @@ -196,21 +195,28 @@ class KNXExposeTime: def __init__(self, xknx: XKNX, config: ConfigType) -> None: """Initialize of Expose class.""" self.xknx = xknx - self.device: DateTime = self.async_register(config) - - @callback - def async_register(self, config: ConfigType) -> DateTime: - """Register listener.""" expose_type = config[ExposeSchema.CONF_KNX_EXPOSE_TYPE] - return DateTime( + xknx_device_cls: type[DateDevice | DateTimeDevice | TimeDevice] + match expose_type: + case ExposeSchema.CONF_DATE: + xknx_device_cls = DateDevice + case ExposeSchema.CONF_DATETIME: + xknx_device_cls = DateTimeDevice + case ExposeSchema.CONF_TIME: + xknx_device_cls = TimeDevice + self.device = xknx_device_cls( self.xknx, name=expose_type.capitalize(), - broadcast_type=expose_type.upper(), localtime=True, group_address=config[KNX_ADDRESS], ) @callback - def shutdown(self) -> None: + def async_register(self) -> None: + """Register listener.""" + self.xknx.devices.async_add(self.device) + + @callback + def async_remove(self) -> None: """Prepare for deletion.""" - self.device.shutdown() + self.xknx.devices.async_remove(self.device) diff --git a/homeassistant/components/knx/knx_entity.py b/homeassistant/components/knx/knx_entity.py index b03c59486e5..eebddbb0623 100644 --- a/homeassistant/components/knx/knx_entity.py +++ b/homeassistant/components/knx/knx_entity.py @@ -36,12 +36,16 @@ class KnxEntity(Entity): """Request a state update from KNX bus.""" await self._device.sync() - async def after_update_callback(self, device: XknxDevice) -> None: + def after_update_callback(self, _device: XknxDevice) -> None: """Call after device was updated.""" self.async_write_ha_state() async def async_added_to_hass(self) -> None: - """Store register state change callback.""" + """Store register state change callback and start device object.""" self._device.register_device_updated_cb(self.after_update_callback) - # will remove all callbacks and xknx tasks - self.async_on_remove(self._device.shutdown) + self._device.xknx.devices.async_add(self._device) + + async def async_will_remove_from_hass(self) -> None: + """Disconnect device object when removed.""" + self._device.unregister_device_updated_cb(self.after_update_callback) + self._device.xknx.devices.async_remove(self._device) diff --git a/homeassistant/components/knx/light.py b/homeassistant/components/knx/light.py index 425640a9915..8ec42f3ee56 100644 --- a/homeassistant/components/knx/light.py +++ b/homeassistant/components/knx/light.py @@ -312,8 +312,7 @@ class _KnxLight(KnxEntity, LightEntity): if self._device.supports_brightness: return self._device.current_brightness if self._device.current_xyy_color is not None: - _, brightness = self._device.current_xyy_color - return brightness + return self._device.current_xyy_color.brightness if self._device.supports_color or self._device.supports_rgbw: rgb, white = self._device.current_color if rgb is None: @@ -363,8 +362,7 @@ class _KnxLight(KnxEntity, LightEntity): def xy_color(self) -> tuple[float, float] | None: """Return the xy color value [float, float].""" if self._device.current_xyy_color is not None: - xy_color, _ = self._device.current_xyy_color - return xy_color + return self._device.current_xyy_color.color return None @property diff --git a/homeassistant/components/knx/manifest.json b/homeassistant/components/knx/manifest.json index 5035239d1fb..0f96970f3ae 100644 --- a/homeassistant/components/knx/manifest.json +++ b/homeassistant/components/knx/manifest.json @@ -11,7 +11,7 @@ "loggers": ["xknx", "xknxproject"], "quality_scale": "platinum", "requirements": [ - "xknx==2.12.2", + "xknx==3.0.0", "xknxproject==3.7.1", "knx-frontend==2024.7.25.204106" ], diff --git a/homeassistant/components/knx/schema.py b/homeassistant/components/knx/schema.py index 34a145eadb3..43037ad8188 100644 --- a/homeassistant/components/knx/schema.py +++ b/homeassistant/components/knx/schema.py @@ -9,6 +9,7 @@ from typing import ClassVar, Final import voluptuous as vol from xknx.devices.climate import SetpointShiftMode from xknx.dpt import DPTBase, DPTNumeric +from xknx.dpt.dpt_20 import HVACControllerMode, HVACOperationMode from xknx.exceptions import ConversionError, CouldNotParseTelegram from homeassistant.components.binary_sensor import ( @@ -51,12 +52,11 @@ from .const import ( CONF_RESPOND_TO_READ, CONF_STATE_ADDRESS, CONF_SYNC_STATE, - CONTROLLER_MODES, KNX_ADDRESS, - PRESET_MODES, ColorTempModes, ) from .validation import ( + dpt_base_type_validator, ga_list_validator, ga_validator, numeric_type_validator, @@ -173,7 +173,7 @@ class EventSchema: KNX_EVENT_FILTER_SCHEMA = vol.Schema( { vol.Required(KNX_ADDRESS): vol.All(cv.ensure_list, [cv.string]), - vol.Optional(CONF_TYPE): sensor_type_validator, + vol.Optional(CONF_TYPE): dpt_base_type_validator, } ) @@ -409,10 +409,10 @@ class ClimateSchema(KNXPlatformSchema): CONF_ON_OFF_INVERT, default=DEFAULT_ON_OFF_INVERT ): cv.boolean, vol.Optional(CONF_OPERATION_MODES): vol.All( - cv.ensure_list, [vol.In(PRESET_MODES)] + cv.ensure_list, [vol.All(vol.Upper, cv.enum(HVACOperationMode))] ), vol.Optional(CONF_CONTROLLER_MODES): vol.All( - cv.ensure_list, [vol.In(CONTROLLER_MODES)] + cv.ensure_list, [vol.All(vol.Upper, cv.enum(HVACControllerMode))] ), vol.Optional( CONF_DEFAULT_CONTROLLER_MODE, default=HVACMode.HEAT @@ -535,11 +535,10 @@ class ExposeSchema(KNXPlatformSchema): CONF_KNX_EXPOSE_BINARY = "binary" CONF_KNX_EXPOSE_COOLDOWN = "cooldown" CONF_KNX_EXPOSE_DEFAULT = "default" - EXPOSE_TIME_TYPES: Final = [ - "time", - "date", - "datetime", - ] + CONF_TIME = "time" + CONF_DATE = "date" + CONF_DATETIME = "datetime" + EXPOSE_TIME_TYPES: Final = [CONF_TIME, CONF_DATE, CONF_DATETIME] EXPOSE_TIME_SCHEMA = vol.Schema( { diff --git a/homeassistant/components/knx/select.py b/homeassistant/components/knx/select.py index 5d7532e0e5d..f338bf9feaf 100644 --- a/homeassistant/components/knx/select.py +++ b/homeassistant/components/knx/select.py @@ -81,17 +81,18 @@ class KNXSelect(KnxEntity, SelectEntity, RestoreEntity): if not self._device.remote_value.readable and ( last_state := await self.async_get_last_state() ): - if last_state.state not in (STATE_UNKNOWN, STATE_UNAVAILABLE): - await self._device.remote_value.update_value( - self._option_payloads.get(last_state.state) - ) + if ( + last_state.state not in (STATE_UNKNOWN, STATE_UNAVAILABLE) + and (option := self._option_payloads.get(last_state.state)) is not None + ): + self._device.remote_value.update_value(option) - async def after_update_callback(self, device: XknxDevice) -> None: + def after_update_callback(self, device: XknxDevice) -> None: """Call after device was updated.""" self._attr_current_option = self.option_from_payload( self._device.remote_value.value ) - await super().after_update_callback(device) + super().after_update_callback(device) def option_from_payload(self, payload: int | None) -> str | None: """Return the option a given payload is assigned to.""" diff --git a/homeassistant/components/knx/sensor.py b/homeassistant/components/knx/sensor.py index 173979f78dc..5a09a921901 100644 --- a/homeassistant/components/knx/sensor.py +++ b/homeassistant/components/knx/sensor.py @@ -208,7 +208,7 @@ class KNXSystemSensor(SensorEntity): return True return self.knx.xknx.connection_manager.state is XknxConnectionState.CONNECTED - async def after_update_callback(self, _: XknxConnectionState) -> None: + def after_update_callback(self, _: XknxConnectionState) -> None: """Call after device was updated.""" self.async_write_ha_state() diff --git a/homeassistant/components/knx/services.py b/homeassistant/components/knx/services.py index 24b9452cf60..8b82671deaa 100644 --- a/homeassistant/components/knx/services.py +++ b/homeassistant/components/knx/services.py @@ -8,6 +8,7 @@ from typing import TYPE_CHECKING import voluptuous as vol from xknx.dpt import DPTArray, DPTBase, DPTBinary +from xknx.exceptions import ConversionError from xknx.telegram import Telegram from xknx.telegram.address import parse_device_group_address from xknx.telegram.apci import GroupValueRead, GroupValueResponse, GroupValueWrite @@ -31,7 +32,7 @@ from .const import ( SERVICE_KNX_SEND, ) from .expose import create_knx_exposure -from .schema import ExposeSchema, ga_validator, sensor_type_validator +from .schema import ExposeSchema, dpt_base_type_validator, ga_validator if TYPE_CHECKING: from . import KNXModule @@ -95,7 +96,7 @@ SERVICE_KNX_EVENT_REGISTER_SCHEMA = vol.Schema( cv.ensure_list, [ga_validator], ), - vol.Optional(CONF_TYPE): sensor_type_validator, + vol.Optional(CONF_TYPE): dpt_base_type_validator, vol.Optional(SERVICE_KNX_ATTR_REMOVE, default=False): cv.boolean, } ) @@ -125,10 +126,7 @@ async def service_event_register_modify(hass: HomeAssistant, call: ServiceCall) transcoder := DPTBase.parse_transcoder(dpt) ): knx_module.group_address_transcoder.update( - { - _address: transcoder # type: ignore[type-abstract] - for _address in group_addresses - } + {_address: transcoder for _address in group_addresses} ) for group_address in group_addresses: if group_address in knx_module.knx_event_callback.group_addresses: @@ -173,7 +171,7 @@ async def service_exposure_register_modify( f"Could not find exposure for '{group_address}' to remove." ) from err - removed_exposure.shutdown() + removed_exposure.async_remove() return if group_address in knx_module.service_exposures: @@ -186,7 +184,7 @@ async def service_exposure_register_modify( group_address, replaced_exposure.device.name, ) - replaced_exposure.shutdown() + replaced_exposure.async_remove() exposure = create_knx_exposure(knx_module.hass, knx_module.xknx, call.data) knx_module.service_exposures[group_address] = exposure _LOGGER.debug( @@ -204,7 +202,7 @@ SERVICE_KNX_SEND_SCHEMA = vol.Any( [ga_validator], ), vol.Required(SERVICE_KNX_ATTR_PAYLOAD): cv.match_all, - vol.Required(SERVICE_KNX_ATTR_TYPE): sensor_type_validator, + vol.Required(SERVICE_KNX_ATTR_TYPE): dpt_base_type_validator, vol.Optional(SERVICE_KNX_ATTR_RESPONSE, default=False): cv.boolean, } ), @@ -237,8 +235,15 @@ async def service_send_to_knx_bus(hass: HomeAssistant, call: ServiceCall) -> Non if attr_type is not None: transcoder = DPTBase.parse_transcoder(attr_type) if transcoder is None: - raise ValueError(f"Invalid type for knx.send service: {attr_type}") - payload = transcoder.to_knx(attr_payload) + raise ServiceValidationError( + f"Invalid type for knx.send service: {attr_type}" + ) + try: + payload = transcoder.to_knx(attr_payload) + except ConversionError as err: + raise ServiceValidationError( + f"Invalid payload for knx.send service: {err}" + ) from err elif isinstance(attr_payload, int): payload = DPTBinary(attr_payload) else: diff --git a/homeassistant/components/knx/telegrams.py b/homeassistant/components/knx/telegrams.py index 82df78e748e..2ad46326b8e 100644 --- a/homeassistant/components/knx/telegrams.py +++ b/homeassistant/components/knx/telegrams.py @@ -7,6 +7,7 @@ from typing import Final, TypedDict from xknx import XKNX from xknx.dpt import DPTArray, DPTBase, DPTBinary +from xknx.dpt.dpt import DPTComplexData, DPTEnumData from xknx.exceptions import XKNXException from xknx.telegram import Telegram from xknx.telegram.apci import GroupValueResponse, GroupValueWrite @@ -93,7 +94,7 @@ class Telegrams: if self.recent_telegrams: await self._history_store.async_save(list(self.recent_telegrams)) - async def _xknx_telegram_cb(self, telegram: Telegram) -> None: + def _xknx_telegram_cb(self, telegram: Telegram) -> None: """Handle incoming and outgoing telegrams from xknx.""" telegram_dict = self.telegram_to_dict(telegram) self.recent_telegrams.append(telegram_dict) @@ -157,6 +158,11 @@ def decode_telegram_payload( except XKNXException: value = "Error decoding value" + if isinstance(value, DPTComplexData): + value = value.as_dict() + elif isinstance(value, DPTEnumData): + value = value.name.lower() + return DecodedTelegramPayload( dpt_main=transcoder.dpt_main_number, dpt_sub=transcoder.dpt_sub_number, diff --git a/homeassistant/components/knx/time.py b/homeassistant/components/knx/time.py index c11b40d13dc..28e1419233c 100644 --- a/homeassistant/components/knx/time.py +++ b/homeassistant/components/knx/time.py @@ -3,11 +3,11 @@ from __future__ import annotations from datetime import time as dt_time -import time as time_time from typing import Final from xknx import XKNX -from xknx.devices import DateTime as XknxDateTime +from xknx.devices import TimeDevice as XknxTimeDevice +from xknx.dpt.dpt_10 import KNXTime as XknxTime from homeassistant import config_entries from homeassistant.components.time import TimeEntity @@ -45,15 +45,14 @@ async def async_setup_entry( xknx: XKNX = hass.data[DOMAIN].xknx config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.TIME] - async_add_entities(KNXTime(xknx, entity_config) for entity_config in config) + async_add_entities(KNXTimeEntity(xknx, entity_config) for entity_config in config) -def _create_xknx_device(xknx: XKNX, config: ConfigType) -> XknxDateTime: +def _create_xknx_device(xknx: XKNX, config: ConfigType) -> XknxTimeDevice: """Return a XKNX DateTime object to be used within XKNX.""" - return XknxDateTime( + return XknxTimeDevice( xknx, name=config[CONF_NAME], - broadcast_type="TIME", localtime=False, group_address=config[KNX_ADDRESS], group_address_state=config.get(CONF_STATE_ADDRESS), @@ -62,10 +61,10 @@ def _create_xknx_device(xknx: XKNX, config: ConfigType) -> XknxDateTime: ) -class KNXTime(KnxEntity, TimeEntity, RestoreEntity): +class KNXTimeEntity(KnxEntity, TimeEntity, RestoreEntity): """Representation of a KNX time.""" - _device: XknxDateTime + _device: XknxTimeDevice def __init__(self, xknx: XKNX, config: ConfigType) -> None: """Initialize a KNX time.""" @@ -81,25 +80,15 @@ class KNXTime(KnxEntity, TimeEntity, RestoreEntity): and (last_state := await self.async_get_last_state()) is not None and last_state.state not in (STATE_UNKNOWN, STATE_UNAVAILABLE) ): - self._device.remote_value.value = time_time.strptime( - last_state.state, _TIME_TRANSLATION_FORMAT + self._device.remote_value.value = XknxTime.from_time( + dt_time.fromisoformat(last_state.state) ) @property def native_value(self) -> dt_time | None: """Return the latest value.""" - if (time_struct := self._device.remote_value.value) is None: - return None - return dt_time( - hour=time_struct.tm_hour, - minute=time_struct.tm_min, - second=min(time_struct.tm_sec, 59), # account for leap seconds - ) + return self._device.value async def async_set_value(self, value: dt_time) -> None: """Change the value.""" - time_struct = time_time.strptime( - value.strftime(_TIME_TRANSLATION_FORMAT), - _TIME_TRANSLATION_FORMAT, - ) - await self._device.set(time_struct) + await self._device.set(value) diff --git a/homeassistant/components/knx/trigger.py b/homeassistant/components/knx/trigger.py index 82149b21561..ae3ba088357 100644 --- a/homeassistant/components/knx/trigger.py +++ b/homeassistant/components/knx/trigger.py @@ -18,7 +18,7 @@ from homeassistant.helpers.typing import ConfigType, VolDictType from .const import DOMAIN from .schema import ga_validator from .telegrams import SIGNAL_KNX_TELEGRAM, TelegramDict, decode_telegram_payload -from .validation import sensor_type_validator +from .validation import dpt_base_type_validator TRIGGER_TELEGRAM: Final = "telegram" @@ -44,7 +44,7 @@ TELEGRAM_TRIGGER_SCHEMA: VolDictType = { TRIGGER_SCHEMA = cv.TRIGGER_BASE_SCHEMA.extend( { vol.Required(CONF_PLATFORM): PLATFORM_TYPE_TRIGGER_TELEGRAM, - vol.Optional(CONF_TYPE, default=None): vol.Any(sensor_type_validator, None), + vol.Optional(CONF_TYPE, default=None): vol.Any(dpt_base_type_validator, None), **TELEGRAM_TRIGGER_SCHEMA, } ) @@ -99,7 +99,7 @@ async def async_attach_trigger( ): decoded_payload = decode_telegram_payload( payload=telegram.payload.value, # type: ignore[union-attr] # checked via payload_apci - transcoder=trigger_transcoder, # type: ignore[type-abstract] # parse_transcoder don't return abstract classes + transcoder=trigger_transcoder, ) # overwrite decoded payload values in telegram_dict telegram_trigger_data = {**trigger_data, **telegram_dict, **decoded_payload} diff --git a/homeassistant/components/knx/validation.py b/homeassistant/components/knx/validation.py index 9ed4f32c920..422b8474fd9 100644 --- a/homeassistant/components/knx/validation.py +++ b/homeassistant/components/knx/validation.py @@ -30,9 +30,10 @@ def dpt_subclass_validator(dpt_base_class: type[DPTBase]) -> Callable[[Any], str return dpt_value_validator +dpt_base_type_validator = dpt_subclass_validator(DPTBase) # type: ignore[type-abstract] numeric_type_validator = dpt_subclass_validator(DPTNumeric) # type: ignore[type-abstract] -sensor_type_validator = dpt_subclass_validator(DPTBase) # type: ignore[type-abstract] string_type_validator = dpt_subclass_validator(DPTString) +sensor_type_validator = vol.Any(numeric_type_validator, string_type_validator) def ga_validator(value: Any) -> str | int: diff --git a/requirements_all.txt b/requirements_all.txt index 8aebf250fd4..2473bdf14b0 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2927,7 +2927,7 @@ xbox-webapi==2.0.11 xiaomi-ble==0.30.2 # homeassistant.components.knx -xknx==2.12.2 +xknx==3.0.0 # homeassistant.components.knx xknxproject==3.7.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 859154b2837..e5f8ec1bc43 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2310,7 +2310,7 @@ xbox-webapi==2.0.11 xiaomi-ble==0.30.2 # homeassistant.components.knx -xknx==2.12.2 +xknx==3.0.0 # homeassistant.components.knx xknxproject==3.7.1 diff --git a/tests/components/knx/conftest.py b/tests/components/knx/conftest.py index 76f1b6f3ebc..19f2bc4d845 100644 --- a/tests/components/knx/conftest.py +++ b/tests/components/knx/conftest.py @@ -83,7 +83,7 @@ class KNXTestKit: self.xknx.rate_limit = 0 # set XknxConnectionState.CONNECTED to avoid `unavailable` entities at startup # and start StateUpdater. This would be awaited on normal startup too. - await self.xknx.connection_manager.connection_state_changed( + self.xknx.connection_manager.connection_state_changed( state=XknxConnectionState.CONNECTED, connection_type=XknxConnectionType.TUNNEL_TCP, ) @@ -93,6 +93,7 @@ class KNXTestKit: mock = Mock() mock.start = AsyncMock(side_effect=patch_xknx_start) mock.stop = AsyncMock() + mock.gateway_info = AsyncMock() return mock def fish_xknx(*args, **kwargs): @@ -151,8 +152,6 @@ class KNXTestKit: ) -> None: """Assert outgoing telegram. One by one in timely order.""" await self.xknx.telegrams.join() - await self.hass.async_block_till_done() - await self.hass.async_block_till_done() try: telegram = self._outgoing_telegrams.get_nowait() except asyncio.QueueEmpty as err: @@ -247,6 +246,7 @@ class KNXTestKit: GroupValueResponse(payload_value), source=source, ) + await asyncio.sleep(0) # advance loop to allow StateUpdater to process async def receive_write( self, diff --git a/tests/components/knx/test_binary_sensor.py b/tests/components/knx/test_binary_sensor.py index b9216aa149a..1b304293a86 100644 --- a/tests/components/knx/test_binary_sensor.py +++ b/tests/components/knx/test_binary_sensor.py @@ -123,25 +123,21 @@ async def test_binary_sensor_ignore_internal_state( # receive initial ON telegram await knx.receive_write("1/1/1", True) await knx.receive_write("2/2/2", True) - await hass.async_block_till_done() assert len(events) == 2 # receive second ON telegram - ignore_internal_state shall force state_changed event await knx.receive_write("1/1/1", True) await knx.receive_write("2/2/2", True) - await hass.async_block_till_done() assert len(events) == 3 # receive first OFF telegram await knx.receive_write("1/1/1", False) await knx.receive_write("2/2/2", False) - await hass.async_block_till_done() assert len(events) == 5 # receive second OFF telegram - ignore_internal_state shall force state_changed event await knx.receive_write("1/1/1", False) await knx.receive_write("2/2/2", False) - await hass.async_block_till_done() assert len(events) == 6 @@ -166,21 +162,17 @@ async def test_binary_sensor_counter(hass: HomeAssistant, knx: KNXTestKit) -> No # receive initial ON telegram await knx.receive_write("2/2/2", True) - await hass.async_block_till_done() # no change yet - still in 1 sec context (additional async_block_till_done needed for time change) assert len(events) == 0 state = hass.states.get("binary_sensor.test") assert state.state is STATE_OFF assert state.attributes.get("counter") == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=context_timeout)) - await hass.async_block_till_done() await knx.xknx.task_registry.block_till_done() # state changed twice after context timeout - once to ON with counter 1 and once to counter 0 state = hass.states.get("binary_sensor.test") assert state.state is STATE_ON assert state.attributes.get("counter") == 0 - # additional async_block_till_done needed event capture - await hass.async_block_till_done() assert len(events) == 2 event = events.pop(0).data assert event.get("new_state").attributes.get("counter") == 1 @@ -198,7 +190,6 @@ async def test_binary_sensor_counter(hass: HomeAssistant, knx: KNXTestKit) -> No assert state.attributes.get("counter") == 0 async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=context_timeout)) await knx.xknx.task_registry.block_till_done() - await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") assert state.state is STATE_ON assert state.attributes.get("counter") == 0 @@ -230,12 +221,10 @@ async def test_binary_sensor_reset(hass: HomeAssistant, knx: KNXTestKit) -> None # receive ON telegram await knx.receive_write("2/2/2", True) - await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") assert state.state is STATE_ON async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=1)) await hass.async_block_till_done() - await hass.async_block_till_done() # state reset after after timeout state = hass.states.get("binary_sensor.test") assert state.state is STATE_OFF @@ -265,7 +254,6 @@ async def test_binary_sensor_restore_and_respond(hass: HomeAssistant, knx) -> No await knx.assert_telegram_count(0) await knx.receive_write(_ADDRESS, False) - await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") assert state.state is STATE_OFF @@ -296,6 +284,5 @@ async def test_binary_sensor_restore_invert(hass: HomeAssistant, knx) -> None: # inverted is on, make sure the state is off after it await knx.receive_write(_ADDRESS, True) - await hass.async_block_till_done() state = hass.states.get("binary_sensor.test") assert state.state is STATE_OFF diff --git a/tests/components/knx/test_climate.py b/tests/components/knx/test_climate.py index 9c431386b43..77eeeef3559 100644 --- a/tests/components/knx/test_climate.py +++ b/tests/components/knx/test_climate.py @@ -80,12 +80,6 @@ async def test_climate_on_off( ) } ) - - await hass.async_block_till_done() - # read heat/cool state - if heat_cool_ga: - await knx.assert_read("1/2/11") - await knx.receive_response("1/2/11", 0) # cool # read temperature state await knx.assert_read("1/2/3") await knx.receive_response("1/2/3", RAW_FLOAT_20_0) @@ -95,6 +89,10 @@ async def test_climate_on_off( # read on/off state await knx.assert_read("1/2/9") await knx.receive_response("1/2/9", 1) + # read heat/cool state + if heat_cool_ga: + await knx.assert_read("1/2/11") + await knx.receive_response("1/2/11", 0) # cool # turn off await hass.services.async_call( @@ -171,18 +169,15 @@ async def test_climate_hvac_mode( ) } ) - - await hass.async_block_till_done() # read states state updater - await knx.assert_read("1/2/7") - await knx.assert_read("1/2/3") - # StateUpdater initialize state - await knx.receive_response("1/2/7", (0x01,)) - await knx.receive_response("1/2/3", RAW_FLOAT_20_0) # StateUpdater semaphore allows 2 concurrent requests - # read target temperature state + await knx.assert_read("1/2/3") await knx.assert_read("1/2/5") + # StateUpdater initialize state + await knx.receive_response("1/2/3", RAW_FLOAT_20_0) await knx.receive_response("1/2/5", RAW_FLOAT_22_0) + await knx.assert_read("1/2/7") + await knx.receive_response("1/2/7", (0x01,)) # turn hvac mode to off - set_hvac_mode() doesn't send to on_off if dedicated hvac mode is available await hass.services.async_call( @@ -254,17 +249,14 @@ async def test_climate_preset_mode( ) events = async_capture_events(hass, "state_changed") - await hass.async_block_till_done() - # read states state updater - await knx.assert_read("1/2/7") - await knx.assert_read("1/2/3") # StateUpdater initialize state - await knx.receive_response("1/2/7", (0x01,)) - await knx.receive_response("1/2/3", RAW_FLOAT_21_0) # StateUpdater semaphore allows 2 concurrent requests - # read target temperature state + await knx.assert_read("1/2/3") await knx.assert_read("1/2/5") + await knx.receive_response("1/2/3", RAW_FLOAT_21_0) await knx.receive_response("1/2/5", RAW_FLOAT_22_0) + await knx.assert_read("1/2/7") + await knx.receive_response("1/2/7", (0x01,)) events.clear() # set preset mode @@ -294,8 +286,6 @@ async def test_climate_preset_mode( assert len(knx.xknx.devices[1].device_updated_cbs) == 2 # test removing also removes hooks entity_registry.async_remove("climate.test") - await hass.async_block_till_done() - # If we remove the entity the underlying devices should disappear too assert len(knx.xknx.devices) == 0 @@ -315,18 +305,15 @@ async def test_update_entity(hass: HomeAssistant, knx: KNXTestKit) -> None: } ) assert await async_setup_component(hass, "homeassistant", {}) - await hass.async_block_till_done() - await hass.async_block_till_done() # read states state updater - await knx.assert_read("1/2/7") await knx.assert_read("1/2/3") - # StateUpdater initialize state - await knx.receive_response("1/2/7", (0x01,)) - await knx.receive_response("1/2/3", RAW_FLOAT_21_0) - # StateUpdater semaphore allows 2 concurrent requests await knx.assert_read("1/2/5") + # StateUpdater initialize state + await knx.receive_response("1/2/3", RAW_FLOAT_21_0) await knx.receive_response("1/2/5", RAW_FLOAT_22_0) + await knx.assert_read("1/2/7") + await knx.receive_response("1/2/7", (0x01,)) # verify update entity retriggers group value reads to the bus await hass.services.async_call( @@ -354,8 +341,6 @@ async def test_command_value_idle_mode(hass: HomeAssistant, knx: KNXTestKit) -> } } ) - - await hass.async_block_till_done() # read states state updater await knx.assert_read("1/2/3") await knx.assert_read("1/2/5") diff --git a/tests/components/knx/test_config_flow.py b/tests/components/knx/test_config_flow.py index 3dad9320e21..a7da2d26600 100644 --- a/tests/components/knx/test_config_flow.py +++ b/tests/components/knx/test_config_flow.py @@ -184,7 +184,6 @@ async def test_routing_setup( CONF_KNX_INDIVIDUAL_ADDRESS: "1.1.110", }, ) - await hass.async_block_till_done() assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == "Routing as 1.1.110" assert result3["data"] == { @@ -259,7 +258,6 @@ async def test_routing_setup_advanced( CONF_KNX_LOCAL_IP: "192.168.1.112", }, ) - await hass.async_block_till_done() assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == "Routing as 1.1.110" assert result3["data"] == { @@ -350,7 +348,6 @@ async def test_routing_secure_manual_setup( CONF_KNX_ROUTING_SYNC_LATENCY_TOLERANCE: 2000, }, ) - await hass.async_block_till_done() assert secure_routing_manual["type"] is FlowResultType.CREATE_ENTRY assert secure_routing_manual["title"] == "Secure Routing as 0.0.123" assert secure_routing_manual["data"] == { @@ -419,7 +416,6 @@ async def test_routing_secure_keyfile( CONF_KNX_KNXKEY_PASSWORD: "password", }, ) - await hass.async_block_till_done() assert routing_secure_knxkeys["type"] is FlowResultType.CREATE_ENTRY assert routing_secure_knxkeys["title"] == "Secure Routing as 0.0.123" assert routing_secure_knxkeys["data"] == { @@ -552,7 +548,6 @@ async def test_tunneling_setup_manual( result2["flow_id"], user_input, ) - await hass.async_block_till_done() assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == title assert result3["data"] == config_entry_data @@ -681,7 +676,6 @@ async def test_tunneling_setup_manual_request_description_error( CONF_PORT: 3671, }, ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "Tunneling TCP @ 192.168.0.1" assert result["data"] == { @@ -772,7 +766,6 @@ async def test_tunneling_setup_for_local_ip( CONF_KNX_LOCAL_IP: "192.168.1.112", }, ) - await hass.async_block_till_done() assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["title"] == "Tunneling UDP @ 192.168.0.2" assert result3["data"] == { @@ -821,7 +814,6 @@ async def test_tunneling_setup_for_multiple_found_gateways( tunnel_flow["flow_id"], {CONF_KNX_GATEWAY: str(gateway)}, ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["data"] == { **DEFAULT_ENTRY_DATA, @@ -905,7 +897,6 @@ async def test_form_with_automatic_connection_handling( CONF_KNX_CONNECTION_TYPE: CONF_KNX_AUTOMATIC, }, ) - await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == CONF_KNX_AUTOMATIC.capitalize() assert result2["data"] == { @@ -1040,7 +1031,6 @@ async def test_configure_secure_tunnel_manual(hass: HomeAssistant, knx_setup) -> CONF_KNX_SECURE_DEVICE_AUTHENTICATION: "device_auth", }, ) - await hass.async_block_till_done() assert secure_tunnel_manual["type"] is FlowResultType.CREATE_ENTRY assert secure_tunnel_manual["data"] == { **DEFAULT_ENTRY_DATA, @@ -1086,7 +1076,6 @@ async def test_configure_secure_knxkeys(hass: HomeAssistant, knx_setup) -> None: {CONF_KNX_TUNNEL_ENDPOINT_IA: CONF_KNX_AUTOMATIC}, ) - await hass.async_block_till_done() assert secure_knxkeys["type"] is FlowResultType.CREATE_ENTRY assert secure_knxkeys["data"] == { **DEFAULT_ENTRY_DATA, @@ -1201,7 +1190,6 @@ async def test_options_flow_connection_type( CONF_KNX_GATEWAY: str(gateway), }, ) - await hass.async_block_till_done() assert result3["type"] is FlowResultType.CREATE_ENTRY assert not result3["data"] assert mock_config_entry.data == { @@ -1307,7 +1295,6 @@ async def test_options_flow_secure_manual_to_keyfile( {CONF_KNX_TUNNEL_ENDPOINT_IA: "1.0.1"}, ) - await hass.async_block_till_done() assert secure_knxkeys["type"] is FlowResultType.CREATE_ENTRY assert mock_config_entry.data == { **DEFAULT_ENTRY_DATA, @@ -1352,7 +1339,6 @@ async def test_options_communication_settings( CONF_KNX_TELEGRAM_LOG_SIZE: 3000, }, ) - await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert not result2.get("data") assert mock_config_entry.data == { @@ -1405,7 +1391,6 @@ async def test_options_update_keyfile(hass: HomeAssistant, knx_setup) -> None: CONF_KNX_KNXKEY_PASSWORD: "password", }, ) - await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert not result2.get("data") assert mock_config_entry.data == { @@ -1463,7 +1448,6 @@ async def test_options_keyfile_upload(hass: HomeAssistant, knx_setup) -> None: CONF_KNX_TUNNEL_ENDPOINT_IA: "1.0.1", }, ) - await hass.async_block_till_done() assert result3["type"] is FlowResultType.CREATE_ENTRY assert not result3.get("data") assert mock_config_entry.data == { diff --git a/tests/components/knx/test_datetime.py b/tests/components/knx/test_datetime.py index c8c6bd4f346..4b66769a8a3 100644 --- a/tests/components/knx/test_datetime.py +++ b/tests/components/knx/test_datetime.py @@ -34,7 +34,8 @@ async def test_datetime(hass: HomeAssistant, knx: KNXTestKit) -> None: ) await knx.assert_write( test_address, - (0x78, 0x01, 0x01, 0x73, 0x04, 0x05, 0x20, 0x80), + # service call in UTC, telegram in local time + (0x78, 0x01, 0x01, 0x13, 0x04, 0x05, 0x24, 0x00), ) state = hass.states.get("datetime.test") assert state.state == "2020-01-02T03:04:05+00:00" @@ -74,7 +75,7 @@ async def test_date_restore_and_respond(hass: HomeAssistant, knx: KNXTestKit) -> await knx.receive_read(test_address) await knx.assert_response( test_address, - (0x7A, 0x03, 0x03, 0x84, 0x04, 0x05, 0x20, 0x80), + (0x7A, 0x03, 0x03, 0x04, 0x04, 0x05, 0x24, 0x00), ) # don't respond to passive address diff --git a/tests/components/knx/test_device_trigger.py b/tests/components/knx/test_device_trigger.py index 9b49df080f5..e5f776a9404 100644 --- a/tests/components/knx/test_device_trigger.py +++ b/tests/components/knx/test_device_trigger.py @@ -391,7 +391,6 @@ async def test_invalid_device_trigger( ] }, ) - await hass.async_block_till_done() assert ( "Unnamed automation failed to setup triggers and has been disabled: " "extra keys not allowed @ data['invalid']. Got None" diff --git a/tests/components/knx/test_events.py b/tests/components/knx/test_events.py index ddb9d50240c..2228781ba89 100644 --- a/tests/components/knx/test_events.py +++ b/tests/components/knx/test_events.py @@ -31,7 +31,6 @@ async def test_knx_event( events = async_capture_events(hass, "knx_event") async def test_event_data(address, payload, value=None): - await hass.async_block_till_done() assert len(events) == 1 event = events.pop() assert event.data["data"] == payload @@ -69,7 +68,6 @@ async def test_knx_event( ) # no event received - await hass.async_block_till_done() assert len(events) == 0 # receive telegrams for group addresses matching the filter @@ -101,7 +99,6 @@ async def test_knx_event( await knx.receive_write("0/5/0", True) await knx.receive_write("1/7/0", True) await knx.receive_write("2/6/6", True) - await hass.async_block_till_done() assert len(events) == 0 # receive telegrams with wrong payload length diff --git a/tests/components/knx/test_expose.py b/tests/components/knx/test_expose.py index e0b4c78e322..96b00241ab6 100644 --- a/tests/components/knx/test_expose.py +++ b/tests/components/knx/test_expose.py @@ -1,9 +1,8 @@ """Test KNX expose.""" from datetime import timedelta -import time -from unittest.mock import patch +from freezegun import freeze_time import pytest from homeassistant.components.knx import CONF_KNX_EXPOSE, DOMAIN, KNX_ADDRESS @@ -327,25 +326,32 @@ async def test_expose_conversion_exception( ) -@patch("time.localtime") +@freeze_time("2022-1-7 9:13:14") +@pytest.mark.parametrize( + ("time_type", "raw"), + [ + ("time", (0xA9, 0x0D, 0x0E)), # localtime includes day of week + ("date", (0x07, 0x01, 0x16)), + ("datetime", (0x7A, 0x1, 0x7, 0xA9, 0xD, 0xE, 0x20, 0xC0)), + ], +) async def test_expose_with_date( - localtime, hass: HomeAssistant, knx: KNXTestKit + hass: HomeAssistant, knx: KNXTestKit, time_type: str, raw: tuple[int, ...] ) -> None: """Test an expose with a date.""" - localtime.return_value = time.struct_time([2022, 1, 7, 9, 13, 14, 6, 0, 0]) await knx.setup_integration( { CONF_KNX_EXPOSE: { - CONF_TYPE: "datetime", + CONF_TYPE: time_type, KNX_ADDRESS: "1/1/8", } } ) - await knx.assert_write("1/1/8", (0x7A, 0x1, 0x7, 0xE9, 0xD, 0xE, 0x20, 0x80)) + await knx.assert_write("1/1/8", raw) await knx.receive_read("1/1/8") - await knx.assert_response("1/1/8", (0x7A, 0x1, 0x7, 0xE9, 0xD, 0xE, 0x20, 0x80)) + await knx.assert_response("1/1/8", raw) entries = hass.config_entries.async_entries(DOMAIN) assert len(entries) == 1 diff --git a/tests/components/knx/test_init.py b/tests/components/knx/test_init.py index a317a6a298c..48cc46ef1ee 100644 --- a/tests/components/knx/test_init.py +++ b/tests/components/knx/test_init.py @@ -284,7 +284,6 @@ async def test_async_remove_entry( assert await hass.config_entries.async_remove(config_entry.entry_id) assert unlink_mock.call_count == 3 rmdir_mock.assert_called_once() - await hass.async_block_till_done() assert hass.config_entries.async_entries() == [] assert config_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/knx/test_interface_device.py b/tests/components/knx/test_interface_device.py index c21c25b6fad..8010496ef0d 100644 --- a/tests/components/knx/test_interface_device.py +++ b/tests/components/knx/test_interface_device.py @@ -66,25 +66,19 @@ async def test_diagnostic_entities( ): assert hass.states.get(entity_id).state == test_state - await knx.xknx.connection_manager.connection_state_changed( + knx.xknx.connection_manager.connection_state_changed( state=XknxConnectionState.DISCONNECTED ) await hass.async_block_till_done() - await hass.async_block_till_done() - await hass.async_block_till_done() - await hass.async_block_till_done() assert len(events) == 4 # 3 not always_available + 3 force_update - 2 disabled events.clear() knx.xknx.current_address = IndividualAddress("1.1.1") - await knx.xknx.connection_manager.connection_state_changed( + knx.xknx.connection_manager.connection_state_changed( state=XknxConnectionState.CONNECTED, connection_type=XknxConnectionType.TUNNEL_UDP, ) await hass.async_block_till_done() - await hass.async_block_till_done() - await hass.async_block_till_done() - await hass.async_block_till_done() assert len(events) == 6 # all diagnostic sensors - counters are reset on connect for entity_id, test_state in ( @@ -111,7 +105,6 @@ async def test_removed_entity( "sensor.knx_interface_connection_established", disabled_by=er.RegistryEntryDisabler.USER, ) - await hass.async_block_till_done() unregister_mock.assert_called_once() diff --git a/tests/components/knx/test_light.py b/tests/components/knx/test_light.py index 0c7a37979a8..8c966a77a0b 100644 --- a/tests/components/knx/test_light.py +++ b/tests/components/knx/test_light.py @@ -92,9 +92,7 @@ async def test_light_brightness(hass: HomeAssistant, knx: KNXTestKit) -> None: ) # StateUpdater initialize state await knx.assert_read(test_brightness_state) - await knx.xknx.connection_manager.connection_state_changed( - XknxConnectionState.CONNECTED - ) + knx.xknx.connection_manager.connection_state_changed(XknxConnectionState.CONNECTED) # turn on light via brightness await hass.services.async_call( "light", diff --git a/tests/components/knx/test_notify.py b/tests/components/knx/test_notify.py index 94f2d579fc8..b481675140b 100644 --- a/tests/components/knx/test_notify.py +++ b/tests/components/knx/test_notify.py @@ -21,17 +21,13 @@ async def test_legacy_notify_service_simple( } } ) - await hass.async_block_till_done() - await hass.services.async_call( "notify", "notify", {"target": "test", "message": "I love KNX"}, blocking=True ) - await knx.assert_write( "1/0/0", (73, 32, 108, 111, 118, 101, 32, 75, 78, 88, 0, 0, 0, 0), ) - await hass.services.async_call( "notify", "notify", @@ -41,7 +37,6 @@ async def test_legacy_notify_service_simple( }, blocking=True, ) - await knx.assert_write( "1/0/0", (73, 32, 108, 111, 118, 101, 32, 75, 78, 88, 44, 32, 98, 117), @@ -68,12 +63,9 @@ async def test_legacy_notify_service_multiple_sends_to_all_with_different_encodi ] } ) - await hass.async_block_till_done() - await hass.services.async_call( "notify", "notify", {"message": "Gänsefüßchen"}, blocking=True ) - await knx.assert_write( "1/0/0", # "G?nsef??chen" @@ -95,7 +87,6 @@ async def test_notify_simple(hass: HomeAssistant, knx: KNXTestKit) -> None: } } ) - await hass.services.async_call( notify.DOMAIN, notify.SERVICE_SEND_MESSAGE, diff --git a/tests/components/knx/test_sensor.py b/tests/components/knx/test_sensor.py index 22d9993b58f..41ffcfcb5c7 100644 --- a/tests/components/knx/test_sensor.py +++ b/tests/components/knx/test_sensor.py @@ -68,25 +68,21 @@ async def test_always_callback(hass: HomeAssistant, knx: KNXTestKit) -> None: # receive initial telegram await knx.receive_write("1/1/1", (0x42,)) await knx.receive_write("2/2/2", (0x42,)) - await hass.async_block_till_done() assert len(events) == 2 # receive second telegram with identical payload # always_callback shall force state_changed event await knx.receive_write("1/1/1", (0x42,)) await knx.receive_write("2/2/2", (0x42,)) - await hass.async_block_till_done() assert len(events) == 3 # receive telegram with different payload await knx.receive_write("1/1/1", (0xFA,)) await knx.receive_write("2/2/2", (0xFA,)) - await hass.async_block_till_done() assert len(events) == 5 # receive telegram with second payload again # always_callback shall force state_changed event await knx.receive_write("1/1/1", (0xFA,)) await knx.receive_write("2/2/2", (0xFA,)) - await hass.async_block_till_done() assert len(events) == 6 diff --git a/tests/components/knx/test_services.py b/tests/components/knx/test_services.py index 7f748af5ceb..f70389dbc92 100644 --- a/tests/components/knx/test_services.py +++ b/tests/components/knx/test_services.py @@ -154,7 +154,6 @@ async def test_event_register(hass: HomeAssistant, knx: KNXTestKit) -> None: # no event registered await knx.receive_write(test_address, True) - await hass.async_block_till_done() assert len(events) == 0 # register event with `type` @@ -165,7 +164,6 @@ async def test_event_register(hass: HomeAssistant, knx: KNXTestKit) -> None: blocking=True, ) await knx.receive_write(test_address, (0x04, 0xD2)) - await hass.async_block_till_done() assert len(events) == 1 typed_event = events.pop() assert typed_event.data["data"] == (0x04, 0xD2) @@ -179,7 +177,6 @@ async def test_event_register(hass: HomeAssistant, knx: KNXTestKit) -> None: blocking=True, ) await knx.receive_write(test_address, True) - await hass.async_block_till_done() assert len(events) == 0 # register event without `type` @@ -188,7 +185,6 @@ async def test_event_register(hass: HomeAssistant, knx: KNXTestKit) -> None: ) await knx.receive_write(test_address, True) await knx.receive_write(test_address, False) - await hass.async_block_till_done() assert len(events) == 2 untyped_event_2 = events.pop() assert untyped_event_2.data["data"] is False diff --git a/tests/components/knx/test_trigger.py b/tests/components/knx/test_trigger.py index 4565122aba6..73e8b10840e 100644 --- a/tests/components/knx/test_trigger.py +++ b/tests/components/knx/test_trigger.py @@ -334,7 +334,6 @@ async def test_invalid_trigger( ] }, ) - await hass.async_block_till_done() assert ( "Unnamed automation failed to setup triggers and has been disabled: " "extra keys not allowed @ data['invalid']. Got None" diff --git a/tests/components/knx/test_weather.py b/tests/components/knx/test_weather.py index 0adcc309252..5ba38d6cdf8 100644 --- a/tests/components/knx/test_weather.py +++ b/tests/components/knx/test_weather.py @@ -45,12 +45,12 @@ async def test_weather(hass: HomeAssistant, knx: KNXTestKit) -> None: # brightness await knx.assert_read("1/1/6") - await knx.receive_response("1/1/6", (0x7C, 0x5E)) await knx.assert_read("1/1/8") + await knx.receive_response("1/1/6", (0x7C, 0x5E)) await knx.receive_response("1/1/8", (0x7C, 0x5E)) + await knx.assert_read("1/1/5") await knx.assert_read("1/1/7") await knx.receive_response("1/1/7", (0x7C, 0x5E)) - await knx.assert_read("1/1/5") await knx.receive_response("1/1/5", (0x7C, 0x5E)) # wind speed @@ -64,10 +64,10 @@ async def test_weather(hass: HomeAssistant, knx: KNXTestKit) -> None: # alarms await knx.assert_read("1/1/2") await knx.receive_response("1/1/2", False) - await knx.assert_read("1/1/3") - await knx.receive_response("1/1/3", False) await knx.assert_read("1/1/1") + await knx.assert_read("1/1/3") await knx.receive_response("1/1/1", False) + await knx.receive_response("1/1/3", False) # day night await knx.assert_read("1/1/12") From 35bfd0b88fe3c17b5810dd96fb3f50b6237df869 Mon Sep 17 00:00:00 2001 From: David Bonnes Date: Wed, 31 Jul 2024 08:35:21 +0100 Subject: [PATCH 0168/1635] Evohome drops use of async_call_later to avoid lingering task (#122879) initial commit --- homeassistant/components/evohome/coordinator.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/homeassistant/components/evohome/coordinator.py b/homeassistant/components/evohome/coordinator.py index b83d2d20c6a..943bd6605b4 100644 --- a/homeassistant/components/evohome/coordinator.py +++ b/homeassistant/components/evohome/coordinator.py @@ -19,7 +19,6 @@ from evohomeasync2.schema.const import ( ) from homeassistant.helpers.dispatcher import async_dispatcher_send -from homeassistant.helpers.event import async_call_later from .const import CONF_LOCATION_IDX, DOMAIN, GWS, TCS, UTC_OFFSET from .helpers import handle_evo_exception @@ -107,7 +106,7 @@ class EvoBroker: return None if update_state: # wait a moment for system to quiesce before updating state - async_call_later(self.hass, 1, self._update_v2_api_state) + await self.hass.data[DOMAIN]["coordinator"].async_request_refresh() return result From 5a04d982d92b9dfba4c786dc5bf6dc07d5f4394b Mon Sep 17 00:00:00 2001 From: Denis Shulyaka Date: Wed, 31 Jul 2024 10:35:45 +0300 Subject: [PATCH 0169/1635] Bump ollama to 0.3.1 (#122866) --- homeassistant/components/ollama/conversation.py | 2 +- homeassistant/components/ollama/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/ollama/conversation.py b/homeassistant/components/ollama/conversation.py index ae0acef1077..ac367a5cf6a 100644 --- a/homeassistant/components/ollama/conversation.py +++ b/homeassistant/components/ollama/conversation.py @@ -271,7 +271,7 @@ class OllamaConversationEntity( _LOGGER.debug("Tool response: %s", tool_response) message_history.messages.append( ollama.Message( - role=MessageRole.TOOL.value, # type: ignore[typeddict-item] + role=MessageRole.TOOL.value, content=json.dumps(tool_response), ) ) diff --git a/homeassistant/components/ollama/manifest.json b/homeassistant/components/ollama/manifest.json index 4d4321b8e3d..64224eb06fb 100644 --- a/homeassistant/components/ollama/manifest.json +++ b/homeassistant/components/ollama/manifest.json @@ -8,5 +8,5 @@ "documentation": "https://www.home-assistant.io/integrations/ollama", "integration_type": "service", "iot_class": "local_polling", - "requirements": ["ollama==0.3.0"] + "requirements": ["ollama==0.3.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 2473bdf14b0..1abd6ec5be2 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1466,7 +1466,7 @@ odp-amsterdam==6.0.2 oemthermostat==1.1.1 # homeassistant.components.ollama -ollama==0.3.0 +ollama==0.3.1 # homeassistant.components.omnilogic omnilogic==0.4.5 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index e5f8ec1bc43..463cbae4cdf 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1205,7 +1205,7 @@ objgraph==3.5.0 odp-amsterdam==6.0.2 # homeassistant.components.ollama -ollama==0.3.0 +ollama==0.3.1 # homeassistant.components.omnilogic omnilogic==0.4.5 From beb2ef121ef3d682fa04c5a0750a5897ce410196 Mon Sep 17 00:00:00 2001 From: Denis Shulyaka Date: Wed, 31 Jul 2024 10:37:55 +0300 Subject: [PATCH 0170/1635] Update todo intent slot schema (#122335) * Update todo intent slot schema * Update intent.py * ruff --- homeassistant/components/todo/intent.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/todo/intent.py b/homeassistant/components/todo/intent.py index 50afe916b27..cd8ad7f02ab 100644 --- a/homeassistant/components/todo/intent.py +++ b/homeassistant/components/todo/intent.py @@ -2,6 +2,8 @@ from __future__ import annotations +import voluptuous as vol + from homeassistant.core import HomeAssistant from homeassistant.helpers import intent from homeassistant.helpers.entity_component import EntityComponent @@ -21,7 +23,10 @@ class ListAddItemIntent(intent.IntentHandler): intent_type = INTENT_LIST_ADD_ITEM description = "Add item to a todo list" - slot_schema = {"item": intent.non_empty_string, "name": intent.non_empty_string} + slot_schema = { + vol.Required("item"): intent.non_empty_string, + vol.Required("name"): intent.non_empty_string, + } platforms = {DOMAIN} async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse: From 7f4dabf546f93efa3271152c0f300a824ea06fa0 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Wed, 31 Jul 2024 02:42:45 -0500 Subject: [PATCH 0171/1635] Switch from WebRTC to microVAD (#122861) * Switch WebRTC to microVAD * Remove webrtc-noise-gain from licenses --- .../assist_pipeline/audio_enhancer.py | 82 +++++++++ .../components/assist_pipeline/const.py | 5 + .../components/assist_pipeline/manifest.json | 2 +- .../components/assist_pipeline/pipeline.py | 164 +++++++++--------- .../components/assist_pipeline/vad.py | 66 ++----- .../assist_pipeline/websocket_api.py | 20 ++- homeassistant/components/voip/voip.py | 38 ++-- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 6 +- requirements_test_all.txt | 6 +- script/licenses.py | 1 - tests/components/assist_pipeline/test_init.py | 43 ++--- tests/components/assist_pipeline/test_vad.py | 116 +++---------- .../assist_pipeline/test_websocket.py | 61 ++----- tests/components/voip/test_voip.py | 55 +++--- 15 files changed, 320 insertions(+), 347 deletions(-) create mode 100644 homeassistant/components/assist_pipeline/audio_enhancer.py diff --git a/homeassistant/components/assist_pipeline/audio_enhancer.py b/homeassistant/components/assist_pipeline/audio_enhancer.py new file mode 100644 index 00000000000..e7a149bd00e --- /dev/null +++ b/homeassistant/components/assist_pipeline/audio_enhancer.py @@ -0,0 +1,82 @@ +"""Audio enhancement for Assist.""" + +from abc import ABC, abstractmethod +from dataclasses import dataclass +import logging + +from pymicro_vad import MicroVad + +_LOGGER = logging.getLogger(__name__) + + +@dataclass(frozen=True, slots=True) +class EnhancedAudioChunk: + """Enhanced audio chunk and metadata.""" + + audio: bytes + """Raw PCM audio @ 16Khz with 16-bit mono samples""" + + timestamp_ms: int + """Timestamp relative to start of audio stream (milliseconds)""" + + is_speech: bool | None + """True if audio chunk likely contains speech, False if not, None if unknown""" + + +class AudioEnhancer(ABC): + """Base class for audio enhancement.""" + + def __init__( + self, auto_gain: int, noise_suppression: int, is_vad_enabled: bool + ) -> None: + """Initialize audio enhancer.""" + self.auto_gain = auto_gain + self.noise_suppression = noise_suppression + self.is_vad_enabled = is_vad_enabled + + @abstractmethod + def enhance_chunk(self, audio: bytes, timestamp_ms: int) -> EnhancedAudioChunk: + """Enhance chunk of PCM audio @ 16Khz with 16-bit mono samples.""" + + @property + @abstractmethod + def samples_per_chunk(self) -> int | None: + """Return number of samples per chunk or None if chunking isn't required.""" + + +class MicroVadEnhancer(AudioEnhancer): + """Audio enhancer that just runs microVAD.""" + + def __init__( + self, auto_gain: int, noise_suppression: int, is_vad_enabled: bool + ) -> None: + """Initialize audio enhancer.""" + super().__init__(auto_gain, noise_suppression, is_vad_enabled) + + self.vad: MicroVad | None = None + self.threshold = 0.5 + + if self.is_vad_enabled: + self.vad = MicroVad() + _LOGGER.debug("Initialized microVAD with threshold=%s", self.threshold) + + def enhance_chunk(self, audio: bytes, timestamp_ms: int) -> EnhancedAudioChunk: + """Enhance chunk of PCM audio @ 16Khz with 16-bit mono samples.""" + is_speech: bool | None = None + + if self.vad is not None: + # Run VAD + speech_prob = self.vad.Process10ms(audio) + is_speech = speech_prob > self.threshold + + return EnhancedAudioChunk( + audio=audio, timestamp_ms=timestamp_ms, is_speech=is_speech + ) + + @property + def samples_per_chunk(self) -> int | None: + """Return number of samples per chunk or None if chunking isn't required.""" + if self.is_vad_enabled: + return 160 # 10ms + + return None diff --git a/homeassistant/components/assist_pipeline/const.py b/homeassistant/components/assist_pipeline/const.py index 36b72dad69c..14b93a90372 100644 --- a/homeassistant/components/assist_pipeline/const.py +++ b/homeassistant/components/assist_pipeline/const.py @@ -15,3 +15,8 @@ DATA_LAST_WAKE_UP = f"{DOMAIN}.last_wake_up" WAKE_WORD_COOLDOWN = 2 # seconds EVENT_RECORDING = f"{DOMAIN}_recording" + +SAMPLE_RATE = 16000 # hertz +SAMPLE_WIDTH = 2 # bytes +SAMPLE_CHANNELS = 1 # mono +SAMPLES_PER_CHUNK = 240 # 20 ms @ 16Khz diff --git a/homeassistant/components/assist_pipeline/manifest.json b/homeassistant/components/assist_pipeline/manifest.json index 31b3b0d4e32..b22ce72b1eb 100644 --- a/homeassistant/components/assist_pipeline/manifest.json +++ b/homeassistant/components/assist_pipeline/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/assist_pipeline", "iot_class": "local_push", "quality_scale": "internal", - "requirements": ["webrtc-noise-gain==1.2.3"] + "requirements": ["pymicro-vad==1.0.0"] } diff --git a/homeassistant/components/assist_pipeline/pipeline.py b/homeassistant/components/assist_pipeline/pipeline.py index ecf361cb67c..845950caf8d 100644 --- a/homeassistant/components/assist_pipeline/pipeline.py +++ b/homeassistant/components/assist_pipeline/pipeline.py @@ -13,14 +13,11 @@ from pathlib import Path from queue import Empty, Queue from threading import Thread import time -from typing import TYPE_CHECKING, Any, Final, Literal, cast +from typing import Any, Literal, cast import wave import voluptuous as vol -if TYPE_CHECKING: - from webrtc_noise_gain import AudioProcessor - from homeassistant.components import ( conversation, media_source, @@ -52,12 +49,17 @@ from homeassistant.util import ( ) from homeassistant.util.limited_size_dict import LimitedSizeDict +from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, MicroVadEnhancer from .const import ( CONF_DEBUG_RECORDING_DIR, DATA_CONFIG, DATA_LAST_WAKE_UP, DATA_MIGRATIONS, DOMAIN, + SAMPLE_CHANNELS, + SAMPLE_RATE, + SAMPLE_WIDTH, + SAMPLES_PER_CHUNK, WAKE_WORD_COOLDOWN, ) from .error import ( @@ -111,9 +113,6 @@ STORED_PIPELINE_RUNS = 10 SAVE_DELAY = 10 -AUDIO_PROCESSOR_SAMPLES: Final = 160 # 10 ms @ 16 Khz -AUDIO_PROCESSOR_BYTES: Final = AUDIO_PROCESSOR_SAMPLES * 2 # 16-bit samples - @callback def _async_resolve_default_pipeline_settings( @@ -503,8 +502,8 @@ class AudioSettings: is_vad_enabled: bool = True """True if VAD is used to determine the end of the voice command.""" - is_chunking_enabled: bool = True - """True if audio is automatically split into 10 ms chunks (required for VAD, etc.)""" + samples_per_chunk: int | None = None + """Number of samples that will be in each audio chunk (None for no chunking).""" def __post_init__(self) -> None: """Verify settings post-initialization.""" @@ -514,9 +513,6 @@ class AudioSettings: if (self.auto_gain_dbfs < 0) or (self.auto_gain_dbfs > 31): raise ValueError("auto_gain_dbfs must be in [0, 31]") - if self.needs_processor and (not self.is_chunking_enabled): - raise ValueError("Chunking must be enabled for audio processing") - @property def needs_processor(self) -> bool: """True if an audio processor is needed.""" @@ -526,19 +522,10 @@ class AudioSettings: or (self.auto_gain_dbfs > 0) ) - -@dataclass(frozen=True, slots=True) -class ProcessedAudioChunk: - """Processed audio chunk and metadata.""" - - audio: bytes - """Raw PCM audio @ 16Khz with 16-bit mono samples""" - - timestamp_ms: int - """Timestamp relative to start of audio stream (milliseconds)""" - - is_speech: bool | None - """True if audio chunk likely contains speech, False if not, None if unknown""" + @property + def is_chunking_enabled(self) -> bool: + """True if chunk size is set.""" + return self.samples_per_chunk is not None @dataclass @@ -573,10 +560,10 @@ class PipelineRun: debug_recording_queue: Queue[str | bytes | None] | None = None """Queue to communicate with debug recording thread""" - audio_processor: AudioProcessor | None = None + audio_enhancer: AudioEnhancer | None = None """VAD/noise suppression/auto gain""" - audio_processor_buffer: AudioBuffer = field(init=False, repr=False) + audio_chunking_buffer: AudioBuffer | None = None """Buffer used when splitting audio into chunks for audio processing""" _device_id: str | None = None @@ -601,19 +588,16 @@ class PipelineRun: pipeline_data.pipeline_runs.add_run(self) # Initialize with audio settings - self.audio_processor_buffer = AudioBuffer(AUDIO_PROCESSOR_BYTES) - if self.audio_settings.needs_processor: - # Delay import of webrtc so HA start up is not crashing - # on older architectures (armhf). - # - # pylint: disable=import-outside-toplevel - from webrtc_noise_gain import AudioProcessor - - self.audio_processor = AudioProcessor( + if self.audio_settings.needs_processor and (self.audio_enhancer is None): + # Default audio enhancer + self.audio_enhancer = MicroVadEnhancer( self.audio_settings.auto_gain_dbfs, self.audio_settings.noise_suppression_level, + self.audio_settings.is_vad_enabled, ) + self.audio_chunking_buffer = AudioBuffer(self.samples_per_chunk * SAMPLE_WIDTH) + def __eq__(self, other: object) -> bool: """Compare pipeline runs by id.""" if isinstance(other, PipelineRun): @@ -621,6 +605,14 @@ class PipelineRun: return False + @property + def samples_per_chunk(self) -> int: + """Return number of samples expected in each audio chunk.""" + if self.audio_enhancer is not None: + return self.audio_enhancer.samples_per_chunk or SAMPLES_PER_CHUNK + + return self.audio_settings.samples_per_chunk or SAMPLES_PER_CHUNK + @callback def process_event(self, event: PipelineEvent) -> None: """Log an event and call listener.""" @@ -688,8 +680,8 @@ class PipelineRun: async def wake_word_detection( self, - stream: AsyncIterable[ProcessedAudioChunk], - audio_chunks_for_stt: list[ProcessedAudioChunk], + stream: AsyncIterable[EnhancedAudioChunk], + audio_chunks_for_stt: list[EnhancedAudioChunk], ) -> wake_word.DetectionResult | None: """Run wake-word-detection portion of pipeline. Returns detection result.""" metadata_dict = asdict( @@ -732,10 +724,11 @@ class PipelineRun: # Audio chunk buffer. This audio will be forwarded to speech-to-text # after wake-word-detection. num_audio_chunks_to_buffer = int( - (wake_word_settings.audio_seconds_to_buffer * 16000) - / AUDIO_PROCESSOR_SAMPLES + (wake_word_settings.audio_seconds_to_buffer * SAMPLE_RATE) + / self.samples_per_chunk ) - stt_audio_buffer: deque[ProcessedAudioChunk] | None = None + + stt_audio_buffer: deque[EnhancedAudioChunk] | None = None if num_audio_chunks_to_buffer > 0: stt_audio_buffer = deque(maxlen=num_audio_chunks_to_buffer) @@ -797,7 +790,7 @@ class PipelineRun: # speech-to-text so the user does not have to pause before # speaking the voice command. audio_chunks_for_stt.extend( - ProcessedAudioChunk( + EnhancedAudioChunk( audio=chunk_ts[0], timestamp_ms=chunk_ts[1], is_speech=False ) for chunk_ts in result.queued_audio @@ -819,18 +812,17 @@ class PipelineRun: async def _wake_word_audio_stream( self, - audio_stream: AsyncIterable[ProcessedAudioChunk], - stt_audio_buffer: deque[ProcessedAudioChunk] | None, + audio_stream: AsyncIterable[EnhancedAudioChunk], + stt_audio_buffer: deque[EnhancedAudioChunk] | None, wake_word_vad: VoiceActivityTimeout | None, - sample_rate: int = 16000, - sample_width: int = 2, + sample_rate: int = SAMPLE_RATE, + sample_width: int = SAMPLE_WIDTH, ) -> AsyncIterable[tuple[bytes, int]]: """Yield audio chunks with timestamps (milliseconds since start of stream). Adds audio to a ring buffer that will be forwarded to speech-to-text after detection. Times out if VAD detects enough silence. """ - chunk_seconds = AUDIO_PROCESSOR_SAMPLES / sample_rate async for chunk in audio_stream: if self.abort_wake_word_detection: raise WakeWordDetectionAborted @@ -845,6 +837,7 @@ class PipelineRun: stt_audio_buffer.append(chunk) if wake_word_vad is not None: + chunk_seconds = (len(chunk.audio) // sample_width) / sample_rate if not wake_word_vad.process(chunk_seconds, chunk.is_speech): raise WakeWordTimeoutError( code="wake-word-timeout", message="Wake word was not detected" @@ -881,7 +874,7 @@ class PipelineRun: async def speech_to_text( self, metadata: stt.SpeechMetadata, - stream: AsyncIterable[ProcessedAudioChunk], + stream: AsyncIterable[EnhancedAudioChunk], ) -> str: """Run speech-to-text portion of pipeline. Returns the spoken text.""" # Create a background task to prepare the conversation agent @@ -957,18 +950,18 @@ class PipelineRun: async def _speech_to_text_stream( self, - audio_stream: AsyncIterable[ProcessedAudioChunk], + audio_stream: AsyncIterable[EnhancedAudioChunk], stt_vad: VoiceCommandSegmenter | None, - sample_rate: int = 16000, - sample_width: int = 2, + sample_rate: int = SAMPLE_RATE, + sample_width: int = SAMPLE_WIDTH, ) -> AsyncGenerator[bytes]: """Yield audio chunks until VAD detects silence or speech-to-text completes.""" - chunk_seconds = AUDIO_PROCESSOR_SAMPLES / sample_rate sent_vad_start = False async for chunk in audio_stream: self._capture_chunk(chunk.audio) if stt_vad is not None: + chunk_seconds = (len(chunk.audio) // sample_width) / sample_rate if not stt_vad.process(chunk_seconds, chunk.is_speech): # Silence detected at the end of voice command self.process_event( @@ -1072,8 +1065,8 @@ class PipelineRun: tts_options[tts.ATTR_PREFERRED_FORMAT] = self.tts_audio_output if self.tts_audio_output == "wav": # 16 Khz, 16-bit mono - tts_options[tts.ATTR_PREFERRED_SAMPLE_RATE] = 16000 - tts_options[tts.ATTR_PREFERRED_SAMPLE_CHANNELS] = 1 + tts_options[tts.ATTR_PREFERRED_SAMPLE_RATE] = SAMPLE_RATE + tts_options[tts.ATTR_PREFERRED_SAMPLE_CHANNELS] = SAMPLE_CHANNELS try: options_supported = await tts.async_support_options( @@ -1220,12 +1213,15 @@ class PipelineRun: async def process_volume_only( self, audio_stream: AsyncIterable[bytes], - sample_rate: int = 16000, - sample_width: int = 2, - ) -> AsyncGenerator[ProcessedAudioChunk]: + sample_rate: int = SAMPLE_RATE, + sample_width: int = SAMPLE_WIDTH, + ) -> AsyncGenerator[EnhancedAudioChunk]: """Apply volume transformation only (no VAD/audio enhancements) with optional chunking.""" + assert self.audio_chunking_buffer is not None + + bytes_per_chunk = self.samples_per_chunk * sample_width ms_per_sample = sample_rate // 1000 - ms_per_chunk = (AUDIO_PROCESSOR_SAMPLES // sample_width) // ms_per_sample + ms_per_chunk = self.samples_per_chunk // ms_per_sample timestamp_ms = 0 async for chunk in audio_stream: @@ -1233,19 +1229,18 @@ class PipelineRun: chunk = _multiply_volume(chunk, self.audio_settings.volume_multiplier) if self.audio_settings.is_chunking_enabled: - # 10 ms chunking - for chunk_10ms in chunk_samples( - chunk, AUDIO_PROCESSOR_BYTES, self.audio_processor_buffer + for sub_chunk in chunk_samples( + chunk, bytes_per_chunk, self.audio_chunking_buffer ): - yield ProcessedAudioChunk( - audio=chunk_10ms, + yield EnhancedAudioChunk( + audio=sub_chunk, timestamp_ms=timestamp_ms, is_speech=None, # no VAD ) timestamp_ms += ms_per_chunk else: # No chunking - yield ProcessedAudioChunk( + yield EnhancedAudioChunk( audio=chunk, timestamp_ms=timestamp_ms, is_speech=None, # no VAD @@ -1255,14 +1250,19 @@ class PipelineRun: async def process_enhance_audio( self, audio_stream: AsyncIterable[bytes], - sample_rate: int = 16000, - sample_width: int = 2, - ) -> AsyncGenerator[ProcessedAudioChunk]: + sample_rate: int = SAMPLE_RATE, + sample_width: int = SAMPLE_WIDTH, + ) -> AsyncGenerator[EnhancedAudioChunk]: """Split audio into 10 ms chunks and apply VAD/noise suppression/auto gain/volume transformation.""" - assert self.audio_processor is not None + assert self.audio_enhancer is not None + assert self.audio_enhancer.samples_per_chunk is not None + assert self.audio_chunking_buffer is not None + bytes_per_chunk = self.audio_enhancer.samples_per_chunk * sample_width ms_per_sample = sample_rate // 1000 - ms_per_chunk = (AUDIO_PROCESSOR_SAMPLES // sample_width) // ms_per_sample + ms_per_chunk = ( + self.audio_enhancer.samples_per_chunk // sample_width + ) // ms_per_sample timestamp_ms = 0 async for dirty_samples in audio_stream: @@ -1272,17 +1272,11 @@ class PipelineRun: dirty_samples, self.audio_settings.volume_multiplier ) - # Split into 10ms chunks for audio enhancements/VAD - for dirty_10ms_chunk in chunk_samples( - dirty_samples, AUDIO_PROCESSOR_BYTES, self.audio_processor_buffer + # Split into chunks for audio enhancements/VAD + for dirty_chunk in chunk_samples( + dirty_samples, bytes_per_chunk, self.audio_chunking_buffer ): - ap_result = self.audio_processor.Process10ms(dirty_10ms_chunk) - yield ProcessedAudioChunk( - audio=ap_result.audio, - timestamp_ms=timestamp_ms, - is_speech=ap_result.is_speech, - ) - + yield self.audio_enhancer.enhance_chunk(dirty_chunk, timestamp_ms) timestamp_ms += ms_per_chunk @@ -1323,9 +1317,9 @@ def _pipeline_debug_recording_thread_proc( wav_path = run_recording_dir / f"{message}.wav" wav_writer = wave.open(str(wav_path), "wb") - wav_writer.setframerate(16000) - wav_writer.setsampwidth(2) - wav_writer.setnchannels(1) + wav_writer.setframerate(SAMPLE_RATE) + wav_writer.setsampwidth(SAMPLE_WIDTH) + wav_writer.setnchannels(SAMPLE_CHANNELS) elif isinstance(message, bytes): # Chunk of 16-bit mono audio at 16Khz if wav_writer is not None: @@ -1368,8 +1362,8 @@ class PipelineInput: """Run pipeline.""" self.run.start(device_id=self.device_id) current_stage: PipelineStage | None = self.run.start_stage - stt_audio_buffer: list[ProcessedAudioChunk] = [] - stt_processed_stream: AsyncIterable[ProcessedAudioChunk] | None = None + stt_audio_buffer: list[EnhancedAudioChunk] = [] + stt_processed_stream: AsyncIterable[EnhancedAudioChunk] | None = None if self.stt_stream is not None: if self.run.audio_settings.needs_processor: @@ -1423,7 +1417,7 @@ class PipelineInput: # Send audio in the buffer first to speech-to-text, then move on to stt_stream. # This is basically an async itertools.chain. async def buffer_then_audio_stream() -> ( - AsyncGenerator[ProcessedAudioChunk] + AsyncGenerator[EnhancedAudioChunk] ): # Buffered audio for chunk in stt_audio_buffer: diff --git a/homeassistant/components/assist_pipeline/vad.py b/homeassistant/components/assist_pipeline/vad.py index 5b3d1408f58..e3b425a2a7b 100644 --- a/homeassistant/components/assist_pipeline/vad.py +++ b/homeassistant/components/assist_pipeline/vad.py @@ -2,12 +2,11 @@ from __future__ import annotations -from abc import ABC, abstractmethod -from collections.abc import Iterable +from collections.abc import Callable, Iterable from dataclasses import dataclass from enum import StrEnum import logging -from typing import Final, cast +from typing import Final _LOGGER = logging.getLogger(__name__) @@ -35,44 +34,6 @@ class VadSensitivity(StrEnum): return 1.0 -class VoiceActivityDetector(ABC): - """Base class for voice activity detectors (VAD).""" - - @abstractmethod - def is_speech(self, chunk: bytes) -> bool: - """Return True if audio chunk contains speech.""" - - @property - @abstractmethod - def samples_per_chunk(self) -> int | None: - """Return number of samples per chunk or None if chunking is not required.""" - - -class WebRtcVad(VoiceActivityDetector): - """Voice activity detector based on webrtc.""" - - def __init__(self) -> None: - """Initialize webrtcvad.""" - # Delay import of webrtc so HA start up is not crashing - # on older architectures (armhf). - # - # pylint: disable=import-outside-toplevel - from webrtc_noise_gain import AudioProcessor - - # Just VAD: no noise suppression or auto gain - self._audio_processor = AudioProcessor(0, 0) - - def is_speech(self, chunk: bytes) -> bool: - """Return True if audio chunk contains speech.""" - result = self._audio_processor.Process10ms(chunk) - return cast(bool, result.is_speech) - - @property - def samples_per_chunk(self) -> int | None: - """Return 10 ms.""" - return int(0.01 * _SAMPLE_RATE) # 10 ms - - class AudioBuffer: """Fixed-sized audio buffer with variable internal length.""" @@ -176,29 +137,38 @@ class VoiceCommandSegmenter: if self._speech_seconds_left <= 0: # Inside voice command self.in_command = True + self._silence_seconds_left = self.silence_seconds + _LOGGER.debug("Voice command started") else: # Reset if enough silence self._reset_seconds_left -= chunk_seconds if self._reset_seconds_left <= 0: self._speech_seconds_left = self.speech_seconds + self._reset_seconds_left = self.reset_seconds elif not is_speech: + # Silence in command self._reset_seconds_left = self.reset_seconds self._silence_seconds_left -= chunk_seconds if self._silence_seconds_left <= 0: + # Command finished successfully self.reset() + _LOGGER.debug("Voice command finished") return False else: - # Reset if enough speech + # Speech in command. + # Reset silence counter if enough speech. self._reset_seconds_left -= chunk_seconds if self._reset_seconds_left <= 0: self._silence_seconds_left = self.silence_seconds + self._reset_seconds_left = self.reset_seconds return True def process_with_vad( self, chunk: bytes, - vad: VoiceActivityDetector, + vad_samples_per_chunk: int | None, + vad_is_speech: Callable[[bytes], bool], leftover_chunk_buffer: AudioBuffer | None, ) -> bool: """Process an audio chunk using an external VAD. @@ -207,20 +177,20 @@ class VoiceCommandSegmenter: Returns False when voice command is finished. """ - if vad.samples_per_chunk is None: + if vad_samples_per_chunk is None: # No chunking chunk_seconds = (len(chunk) // _SAMPLE_WIDTH) / _SAMPLE_RATE - is_speech = vad.is_speech(chunk) + is_speech = vad_is_speech(chunk) return self.process(chunk_seconds, is_speech) if leftover_chunk_buffer is None: raise ValueError("leftover_chunk_buffer is required when vad uses chunking") # With chunking - seconds_per_chunk = vad.samples_per_chunk / _SAMPLE_RATE - bytes_per_chunk = vad.samples_per_chunk * _SAMPLE_WIDTH + seconds_per_chunk = vad_samples_per_chunk / _SAMPLE_RATE + bytes_per_chunk = vad_samples_per_chunk * _SAMPLE_WIDTH for vad_chunk in chunk_samples(chunk, bytes_per_chunk, leftover_chunk_buffer): - is_speech = vad.is_speech(vad_chunk) + is_speech = vad_is_speech(vad_chunk) if not self.process(seconds_per_chunk, is_speech): return False diff --git a/homeassistant/components/assist_pipeline/websocket_api.py b/homeassistant/components/assist_pipeline/websocket_api.py index 3855bd7afc5..c96af655589 100644 --- a/homeassistant/components/assist_pipeline/websocket_api.py +++ b/homeassistant/components/assist_pipeline/websocket_api.py @@ -24,6 +24,9 @@ from .const import ( DEFAULT_WAKE_WORD_TIMEOUT, DOMAIN, EVENT_RECORDING, + SAMPLE_CHANNELS, + SAMPLE_RATE, + SAMPLE_WIDTH, ) from .error import PipelineNotFound from .pipeline import ( @@ -92,7 +95,6 @@ def async_register_websocket_api(hass: HomeAssistant) -> None: vol.Optional("volume_multiplier"): float, # Advanced use cases/testing vol.Optional("no_vad"): bool, - vol.Optional("no_chunking"): bool, } }, extra=vol.ALLOW_EXTRA, @@ -170,9 +172,14 @@ async def websocket_run( # Yield until we receive an empty chunk while chunk := await audio_queue.get(): - if incoming_sample_rate != 16000: + if incoming_sample_rate != SAMPLE_RATE: chunk, state = audioop.ratecv( - chunk, 2, 1, incoming_sample_rate, 16000, state + chunk, + SAMPLE_WIDTH, + SAMPLE_CHANNELS, + incoming_sample_rate, + SAMPLE_RATE, + state, ) yield chunk @@ -206,7 +213,6 @@ async def websocket_run( auto_gain_dbfs=msg_input.get("auto_gain_dbfs", 0), volume_multiplier=msg_input.get("volume_multiplier", 1.0), is_vad_enabled=not msg_input.get("no_vad", False), - is_chunking_enabled=not msg_input.get("no_chunking", False), ) elif start_stage == PipelineStage.INTENT: # Input to conversation agent @@ -424,9 +430,9 @@ def websocket_list_languages( connection.send_result( msg["id"], { - "languages": sorted(pipeline_languages) - if pipeline_languages - else pipeline_languages + "languages": ( + sorted(pipeline_languages) if pipeline_languages else pipeline_languages + ) }, ) diff --git a/homeassistant/components/voip/voip.py b/homeassistant/components/voip/voip.py index 5770d9d2b4a..243909629cf 100644 --- a/homeassistant/components/voip/voip.py +++ b/homeassistant/components/voip/voip.py @@ -31,12 +31,14 @@ from homeassistant.components.assist_pipeline import ( async_pipeline_from_audio_stream, select as pipeline_select, ) +from homeassistant.components.assist_pipeline.audio_enhancer import ( + AudioEnhancer, + MicroVadEnhancer, +) from homeassistant.components.assist_pipeline.vad import ( AudioBuffer, VadSensitivity, - VoiceActivityDetector, VoiceCommandSegmenter, - WebRtcVad, ) from homeassistant.const import __version__ from homeassistant.core import Context, HomeAssistant @@ -233,13 +235,13 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): try: # Wait for speech before starting pipeline segmenter = VoiceCommandSegmenter(silence_seconds=self.silence_seconds) - vad = WebRtcVad() + audio_enhancer = MicroVadEnhancer(0, 0, True) chunk_buffer: deque[bytes] = deque( maxlen=self.buffered_chunks_before_speech, ) speech_detected = await self._wait_for_speech( segmenter, - vad, + audio_enhancer, chunk_buffer, ) if not speech_detected: @@ -253,7 +255,7 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): try: async for chunk in self._segment_audio( segmenter, - vad, + audio_enhancer, chunk_buffer, ): yield chunk @@ -317,7 +319,7 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): async def _wait_for_speech( self, segmenter: VoiceCommandSegmenter, - vad: VoiceActivityDetector, + audio_enhancer: AudioEnhancer, chunk_buffer: MutableSequence[bytes], ): """Buffer audio chunks until speech is detected. @@ -329,13 +331,18 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): async with asyncio.timeout(self.audio_timeout): chunk = await self._audio_queue.get() - assert vad.samples_per_chunk is not None - vad_buffer = AudioBuffer(vad.samples_per_chunk * WIDTH) + assert audio_enhancer.samples_per_chunk is not None + vad_buffer = AudioBuffer(audio_enhancer.samples_per_chunk * WIDTH) while chunk: chunk_buffer.append(chunk) - segmenter.process_with_vad(chunk, vad, vad_buffer) + segmenter.process_with_vad( + chunk, + audio_enhancer.samples_per_chunk, + lambda x: audio_enhancer.enhance_chunk(x, 0).is_speech is True, + vad_buffer, + ) if segmenter.in_command: # Buffer until command starts if len(vad_buffer) > 0: @@ -351,7 +358,7 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): async def _segment_audio( self, segmenter: VoiceCommandSegmenter, - vad: VoiceActivityDetector, + audio_enhancer: AudioEnhancer, chunk_buffer: Sequence[bytes], ) -> AsyncIterable[bytes]: """Yield audio chunks until voice command has finished.""" @@ -364,11 +371,16 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): async with asyncio.timeout(self.audio_timeout): chunk = await self._audio_queue.get() - assert vad.samples_per_chunk is not None - vad_buffer = AudioBuffer(vad.samples_per_chunk * WIDTH) + assert audio_enhancer.samples_per_chunk is not None + vad_buffer = AudioBuffer(audio_enhancer.samples_per_chunk * WIDTH) while chunk: - if not segmenter.process_with_vad(chunk, vad, vad_buffer): + if not segmenter.process_with_vad( + chunk, + audio_enhancer.samples_per_chunk, + lambda x: audio_enhancer.enhance_chunk(x, 0).is_speech is True, + vad_buffer, + ): # Voice command is finished break diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 43e737a002d..c52ccfa6a8c 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -45,6 +45,7 @@ Pillow==10.4.0 pip>=21.3.1 psutil-home-assistant==0.0.1 PyJWT==2.8.0 +pymicro-vad==1.0.0 PyNaCl==1.5.0 pyOpenSSL==24.2.1 pyserial==3.5 @@ -60,7 +61,6 @@ urllib3>=1.26.5,<2 voluptuous-openapi==0.0.5 voluptuous-serialize==2.6.0 voluptuous==0.15.2 -webrtc-noise-gain==1.2.3 yarl==1.9.4 zeroconf==0.132.2 diff --git a/requirements_all.txt b/requirements_all.txt index 1abd6ec5be2..bc1a13f19ab 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2007,6 +2007,9 @@ pymelcloud==2.5.9 # homeassistant.components.meteoclimatic pymeteoclimatic==0.1.0 +# homeassistant.components.assist_pipeline +pymicro-vad==1.0.0 + # homeassistant.components.xiaomi_tv pymitv==1.4.3 @@ -2896,9 +2899,6 @@ weatherflow4py==0.2.21 # homeassistant.components.webmin webmin-xmlrpc==0.0.2 -# homeassistant.components.assist_pipeline -webrtc-noise-gain==1.2.3 - # homeassistant.components.whirlpool whirlpool-sixth-sense==0.18.8 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 463cbae4cdf..22de281aa61 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1603,6 +1603,9 @@ pymelcloud==2.5.9 # homeassistant.components.meteoclimatic pymeteoclimatic==0.1.0 +# homeassistant.components.assist_pipeline +pymicro-vad==1.0.0 + # homeassistant.components.mochad pymochad==0.2.0 @@ -2282,9 +2285,6 @@ weatherflow4py==0.2.21 # homeassistant.components.webmin webmin-xmlrpc==0.0.2 -# homeassistant.components.assist_pipeline -webrtc-noise-gain==1.2.3 - # homeassistant.components.whirlpool whirlpool-sixth-sense==0.18.8 diff --git a/script/licenses.py b/script/licenses.py index f2298e473a2..ad5ae8476b3 100644 --- a/script/licenses.py +++ b/script/licenses.py @@ -172,7 +172,6 @@ EXCEPTIONS = { "tapsaff", # https://github.com/bazwilliams/python-taps-aff/pull/5 "tellduslive", # https://github.com/molobrakos/tellduslive/pull/24 "tellsticknet", # https://github.com/molobrakos/tellsticknet/pull/33 - "webrtc_noise_gain", # https://github.com/rhasspy/webrtc-noise-gain/pull/24 "vincenty", # Public domain "zeversolar", # https://github.com/kvanzuijlen/zeversolar/pull/46 } diff --git a/tests/components/assist_pipeline/test_init.py b/tests/components/assist_pipeline/test_init.py index f9b91af3bf1..8fb7ce5b5a5 100644 --- a/tests/components/assist_pipeline/test_init.py +++ b/tests/components/assist_pipeline/test_init.py @@ -75,9 +75,7 @@ async def test_pipeline_from_audio_stream_auto( channel=stt.AudioChannels.CHANNEL_MONO, ), stt_stream=audio_data(), - audio_settings=assist_pipeline.AudioSettings( - is_vad_enabled=False, is_chunking_enabled=False - ), + audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), ) assert process_events(events) == snapshot @@ -140,9 +138,7 @@ async def test_pipeline_from_audio_stream_legacy( ), stt_stream=audio_data(), pipeline_id=pipeline_id, - audio_settings=assist_pipeline.AudioSettings( - is_vad_enabled=False, is_chunking_enabled=False - ), + audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), ) assert process_events(events) == snapshot @@ -205,9 +201,7 @@ async def test_pipeline_from_audio_stream_entity( ), stt_stream=audio_data(), pipeline_id=pipeline_id, - audio_settings=assist_pipeline.AudioSettings( - is_vad_enabled=False, is_chunking_enabled=False - ), + audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), ) assert process_events(events) == snapshot @@ -271,9 +265,7 @@ async def test_pipeline_from_audio_stream_no_stt( ), stt_stream=audio_data(), pipeline_id=pipeline_id, - audio_settings=assist_pipeline.AudioSettings( - is_vad_enabled=False, is_chunking_enabled=False - ), + audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), ) assert not events @@ -335,24 +327,25 @@ async def test_pipeline_from_audio_stream_wake_word( # [0, 2, ...] wake_chunk_2 = bytes(it.islice(it.cycle(range(0, 256, 2)), BYTES_ONE_SECOND)) - bytes_per_chunk = int(0.01 * BYTES_ONE_SECOND) + samples_per_chunk = 160 + bytes_per_chunk = samples_per_chunk * 2 # 16-bit async def audio_data(): - # 1 second in 10 ms chunks + # 1 second in chunks i = 0 while i < len(wake_chunk_1): yield wake_chunk_1[i : i + bytes_per_chunk] i += bytes_per_chunk - # 1 second in 30 ms chunks + # 1 second in chunks i = 0 while i < len(wake_chunk_2): yield wake_chunk_2[i : i + bytes_per_chunk] i += bytes_per_chunk - yield b"wake word!" - yield b"part1" - yield b"part2" + for chunk in (b"wake word!", b"part1", b"part2"): + yield chunk + bytes(bytes_per_chunk - len(chunk)) + yield b"" await assist_pipeline.async_pipeline_from_audio_stream( @@ -373,7 +366,7 @@ async def test_pipeline_from_audio_stream_wake_word( audio_seconds_to_buffer=1.5 ), audio_settings=assist_pipeline.AudioSettings( - is_vad_enabled=False, is_chunking_enabled=False + is_vad_enabled=False, samples_per_chunk=samples_per_chunk ), ) @@ -390,7 +383,9 @@ async def test_pipeline_from_audio_stream_wake_word( ) assert first_chunk == wake_chunk_1[len(wake_chunk_1) // 2 :] + wake_chunk_2 - assert mock_stt_provider.received[-3:] == [b"queued audio", b"part1", b"part2"] + assert mock_stt_provider.received[-3] == b"queued audio" + assert mock_stt_provider.received[-2].startswith(b"part1") + assert mock_stt_provider.received[-1].startswith(b"part2") async def test_pipeline_save_audio( @@ -438,9 +433,7 @@ async def test_pipeline_save_audio( pipeline_id=pipeline.id, start_stage=assist_pipeline.PipelineStage.WAKE_WORD, end_stage=assist_pipeline.PipelineStage.STT, - audio_settings=assist_pipeline.AudioSettings( - is_vad_enabled=False, is_chunking_enabled=False - ), + audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), ) pipeline_dirs = list(temp_dir.iterdir()) @@ -685,9 +678,7 @@ async def test_wake_word_detection_aborted( wake_word_settings=assist_pipeline.WakeWordSettings( audio_seconds_to_buffer=1.5 ), - audio_settings=assist_pipeline.AudioSettings( - is_vad_enabled=False, is_chunking_enabled=False - ), + audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), ), ) await pipeline_input.validate() diff --git a/tests/components/assist_pipeline/test_vad.py b/tests/components/assist_pipeline/test_vad.py index 139ae915263..17cb73a9139 100644 --- a/tests/components/assist_pipeline/test_vad.py +++ b/tests/components/assist_pipeline/test_vad.py @@ -1,11 +1,9 @@ """Tests for voice command segmenter.""" import itertools as it -from unittest.mock import patch from homeassistant.components.assist_pipeline.vad import ( AudioBuffer, - VoiceActivityDetector, VoiceCommandSegmenter, chunk_samples, ) @@ -44,59 +42,41 @@ def test_speech() -> None: def test_audio_buffer() -> None: """Test audio buffer wrapping.""" - class DisabledVad(VoiceActivityDetector): - def is_speech(self, chunk): - return False + samples_per_chunk = 160 # 10 ms + bytes_per_chunk = samples_per_chunk * 2 + leftover_buffer = AudioBuffer(bytes_per_chunk) - @property - def samples_per_chunk(self): - return 160 # 10 ms + # Partially fill audio buffer + half_chunk = bytes(it.islice(it.cycle(range(256)), bytes_per_chunk // 2)) + chunks = list(chunk_samples(half_chunk, bytes_per_chunk, leftover_buffer)) - vad = DisabledVad() - bytes_per_chunk = vad.samples_per_chunk * 2 - vad_buffer = AudioBuffer(bytes_per_chunk) - segmenter = VoiceCommandSegmenter() + assert not chunks + assert leftover_buffer.bytes() == half_chunk - with patch.object(vad, "is_speech", return_value=False) as mock_process: - # Partially fill audio buffer - half_chunk = bytes(it.islice(it.cycle(range(256)), bytes_per_chunk // 2)) - segmenter.process_with_vad(half_chunk, vad, vad_buffer) + # Fill and wrap with 1/4 chunk left over + three_quarters_chunk = bytes( + it.islice(it.cycle(range(256)), int(0.75 * bytes_per_chunk)) + ) + chunks = list(chunk_samples(three_quarters_chunk, bytes_per_chunk, leftover_buffer)) - assert not mock_process.called - assert vad_buffer is not None - assert vad_buffer.bytes() == half_chunk + assert len(chunks) == 1 + assert ( + leftover_buffer.bytes() + == three_quarters_chunk[len(three_quarters_chunk) - (bytes_per_chunk // 4) :] + ) + assert chunks[0] == half_chunk + three_quarters_chunk[: bytes_per_chunk // 2] - # Fill and wrap with 1/4 chunk left over - three_quarters_chunk = bytes( - it.islice(it.cycle(range(256)), int(0.75 * bytes_per_chunk)) - ) - segmenter.process_with_vad(three_quarters_chunk, vad, vad_buffer) + # Run 2 chunks through + leftover_buffer.clear() + assert len(leftover_buffer) == 0 - assert mock_process.call_count == 1 - assert ( - vad_buffer.bytes() - == three_quarters_chunk[ - len(three_quarters_chunk) - (bytes_per_chunk // 4) : - ] - ) - assert ( - mock_process.call_args[0][0] - == half_chunk + three_quarters_chunk[: bytes_per_chunk // 2] - ) + two_chunks = bytes(it.islice(it.cycle(range(256)), bytes_per_chunk * 2)) + chunks = list(chunk_samples(two_chunks, bytes_per_chunk, leftover_buffer)) - # Run 2 chunks through - segmenter.reset() - vad_buffer.clear() - assert len(vad_buffer) == 0 - - mock_process.reset_mock() - two_chunks = bytes(it.islice(it.cycle(range(256)), bytes_per_chunk * 2)) - segmenter.process_with_vad(two_chunks, vad, vad_buffer) - - assert mock_process.call_count == 2 - assert len(vad_buffer) == 0 - assert mock_process.call_args_list[0][0][0] == two_chunks[:bytes_per_chunk] - assert mock_process.call_args_list[1][0][0] == two_chunks[bytes_per_chunk:] + assert len(chunks) == 2 + assert len(leftover_buffer) == 0 + assert chunks[0] == two_chunks[:bytes_per_chunk] + assert chunks[1] == two_chunks[bytes_per_chunk:] def test_partial_chunk() -> None: @@ -125,43 +105,3 @@ def test_chunk_samples_leftover() -> None: assert len(chunks) == 1 assert leftover_chunk_buffer.bytes() == bytes([5, 6]) - - -def test_vad_no_chunking() -> None: - """Test VAD that doesn't require chunking.""" - - class VadNoChunk(VoiceActivityDetector): - def is_speech(self, chunk: bytes) -> bool: - return sum(chunk) > 0 - - @property - def samples_per_chunk(self) -> int | None: - return None - - vad = VadNoChunk() - segmenter = VoiceCommandSegmenter( - speech_seconds=1.0, silence_seconds=1.0, reset_seconds=0.5 - ) - silence = bytes([0] * 16000) - speech = bytes([255] * (16000 // 2)) - - # Test with differently-sized chunks - assert vad.is_speech(speech) - assert not vad.is_speech(silence) - - # Simulate voice command - assert segmenter.process_with_vad(silence, vad, None) - # begin - assert segmenter.process_with_vad(speech, vad, None) - assert segmenter.process_with_vad(speech, vad, None) - assert segmenter.process_with_vad(speech, vad, None) - # reset with silence - assert segmenter.process_with_vad(silence, vad, None) - # resume - assert segmenter.process_with_vad(speech, vad, None) - assert segmenter.process_with_vad(speech, vad, None) - assert segmenter.process_with_vad(speech, vad, None) - assert segmenter.process_with_vad(speech, vad, None) - # end - assert segmenter.process_with_vad(silence, vad, None) - assert not segmenter.process_with_vad(silence, vad, None) diff --git a/tests/components/assist_pipeline/test_websocket.py b/tests/components/assist_pipeline/test_websocket.py index de8ddc7ccc7..7d4a9b18c12 100644 --- a/tests/components/assist_pipeline/test_websocket.py +++ b/tests/components/assist_pipeline/test_websocket.py @@ -259,12 +259,7 @@ async def test_audio_pipeline_with_wake_word_no_timeout( "type": "assist_pipeline/run", "start_stage": "wake_word", "end_stage": "tts", - "input": { - "sample_rate": 16000, - "timeout": 0, - "no_vad": True, - "no_chunking": True, - }, + "input": {"sample_rate": 16000, "timeout": 0, "no_vad": True}, } ) @@ -1876,11 +1871,7 @@ async def test_wake_word_cooldown_same_id( "type": "assist_pipeline/run", "start_stage": "wake_word", "end_stage": "tts", - "input": { - "sample_rate": 16000, - "no_vad": True, - "no_chunking": True, - }, + "input": {"sample_rate": 16000, "no_vad": True}, } ) @@ -1889,11 +1880,7 @@ async def test_wake_word_cooldown_same_id( "type": "assist_pipeline/run", "start_stage": "wake_word", "end_stage": "tts", - "input": { - "sample_rate": 16000, - "no_vad": True, - "no_chunking": True, - }, + "input": {"sample_rate": 16000, "no_vad": True}, } ) @@ -1967,11 +1954,7 @@ async def test_wake_word_cooldown_different_ids( "type": "assist_pipeline/run", "start_stage": "wake_word", "end_stage": "tts", - "input": { - "sample_rate": 16000, - "no_vad": True, - "no_chunking": True, - }, + "input": {"sample_rate": 16000, "no_vad": True}, } ) @@ -1980,11 +1963,7 @@ async def test_wake_word_cooldown_different_ids( "type": "assist_pipeline/run", "start_stage": "wake_word", "end_stage": "tts", - "input": { - "sample_rate": 16000, - "no_vad": True, - "no_chunking": True, - }, + "input": {"sample_rate": 16000, "no_vad": True}, } ) @@ -2094,11 +2073,7 @@ async def test_wake_word_cooldown_different_entities( "pipeline": pipeline_id_1, "start_stage": "wake_word", "end_stage": "tts", - "input": { - "sample_rate": 16000, - "no_vad": True, - "no_chunking": True, - }, + "input": {"sample_rate": 16000, "no_vad": True}, } ) @@ -2109,11 +2084,7 @@ async def test_wake_word_cooldown_different_entities( "pipeline": pipeline_id_2, "start_stage": "wake_word", "end_stage": "tts", - "input": { - "sample_rate": 16000, - "no_vad": True, - "no_chunking": True, - }, + "input": {"sample_rate": 16000, "no_vad": True}, } ) @@ -2210,11 +2181,7 @@ async def test_device_capture( "type": "assist_pipeline/run", "start_stage": "stt", "end_stage": "stt", - "input": { - "sample_rate": 16000, - "no_vad": True, - "no_chunking": True, - }, + "input": {"sample_rate": 16000, "no_vad": True}, "device_id": satellite_device.id, } ) @@ -2315,11 +2282,7 @@ async def test_device_capture_override( "type": "assist_pipeline/run", "start_stage": "stt", "end_stage": "stt", - "input": { - "sample_rate": 16000, - "no_vad": True, - "no_chunking": True, - }, + "input": {"sample_rate": 16000, "no_vad": True}, "device_id": satellite_device.id, } ) @@ -2464,11 +2427,7 @@ async def test_device_capture_queue_full( "type": "assist_pipeline/run", "start_stage": "stt", "end_stage": "stt", - "input": { - "sample_rate": 16000, - "no_vad": True, - "no_chunking": True, - }, + "input": {"sample_rate": 16000, "no_vad": True}, "device_id": satellite_device.id, } ) diff --git a/tests/components/voip/test_voip.py b/tests/components/voip/test_voip.py index 6c292241237..c2978afc17f 100644 --- a/tests/components/voip/test_voip.py +++ b/tests/components/voip/test_voip.py @@ -43,9 +43,12 @@ async def test_pipeline( """Test that pipeline function is called from RTP protocol.""" assert await async_setup_component(hass, "voip", {}) - def is_speech(self, chunk): + def process_10ms(self, chunk): """Anything non-zero is speech.""" - return sum(chunk) > 0 + if sum(chunk) > 0: + return 1 + + return 0 done = asyncio.Event() @@ -98,8 +101,8 @@ async def test_pipeline( with ( patch( - "homeassistant.components.assist_pipeline.vad.WebRtcVad.is_speech", - new=is_speech, + "pymicro_vad.MicroVad.Process10ms", + new=process_10ms, ), patch( "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", @@ -238,9 +241,12 @@ async def test_tts_timeout( """Test that TTS will time out based on its length.""" assert await async_setup_component(hass, "voip", {}) - def is_speech(self, chunk): + def process_10ms(self, chunk): """Anything non-zero is speech.""" - return sum(chunk) > 0 + if sum(chunk) > 0: + return 1 + + return 0 done = asyncio.Event() @@ -298,8 +304,8 @@ async def test_tts_timeout( with ( patch( - "homeassistant.components.assist_pipeline.vad.WebRtcVad.is_speech", - new=is_speech, + "pymicro_vad.MicroVad.Process10ms", + new=process_10ms, ), patch( "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", @@ -361,9 +367,12 @@ async def test_tts_wrong_extension( """Test that TTS will only stream WAV audio.""" assert await async_setup_component(hass, "voip", {}) - def is_speech(self, chunk): + def process_10ms(self, chunk): """Anything non-zero is speech.""" - return sum(chunk) > 0 + if sum(chunk) > 0: + return 1 + + return 0 done = asyncio.Event() @@ -403,8 +412,8 @@ async def test_tts_wrong_extension( with ( patch( - "homeassistant.components.assist_pipeline.vad.WebRtcVad.is_speech", - new=is_speech, + "pymicro_vad.MicroVad.Process10ms", + new=process_10ms, ), patch( "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", @@ -456,9 +465,12 @@ async def test_tts_wrong_wav_format( """Test that TTS will only stream WAV audio with a specific format.""" assert await async_setup_component(hass, "voip", {}) - def is_speech(self, chunk): + def process_10ms(self, chunk): """Anything non-zero is speech.""" - return sum(chunk) > 0 + if sum(chunk) > 0: + return 1 + + return 0 done = asyncio.Event() @@ -505,8 +517,8 @@ async def test_tts_wrong_wav_format( with ( patch( - "homeassistant.components.assist_pipeline.vad.WebRtcVad.is_speech", - new=is_speech, + "pymicro_vad.MicroVad.Process10ms", + new=process_10ms, ), patch( "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", @@ -558,9 +570,12 @@ async def test_empty_tts_output( """Test that TTS will not stream when output is empty.""" assert await async_setup_component(hass, "voip", {}) - def is_speech(self, chunk): + def process_10ms(self, chunk): """Anything non-zero is speech.""" - return sum(chunk) > 0 + if sum(chunk) > 0: + return 1 + + return 0 async def async_pipeline_from_audio_stream(*args, **kwargs): stt_stream = kwargs["stt_stream"] @@ -591,8 +606,8 @@ async def test_empty_tts_output( with ( patch( - "homeassistant.components.assist_pipeline.vad.WebRtcVad.is_speech", - new=is_speech, + "pymicro_vad.MicroVad.Process10ms", + new=process_10ms, ), patch( "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", From e0a1aaa1b93f5302baf90b09548733aa622090d8 Mon Sep 17 00:00:00 2001 From: Paarth Shah Date: Wed, 31 Jul 2024 00:44:59 -0700 Subject: [PATCH 0172/1635] Fix matrix blocking call by running sync_forever in background_task (#122800) Fix blocking call by running sync_forever in background_task --- homeassistant/components/matrix/__init__.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/matrix/__init__.py b/homeassistant/components/matrix/__init__.py index 4c9af45e63f..77f13293519 100644 --- a/homeassistant/components/matrix/__init__.py +++ b/homeassistant/components/matrix/__init__.py @@ -209,15 +209,22 @@ class MatrixBot: await self._resolve_room_aliases(listening_rooms) self._load_commands(commands) await self._join_rooms() + # Sync once so that we don't respond to past events. + _LOGGER.debug("Starting initial sync for %s", self._mx_id) await self._client.sync(timeout=30_000) + _LOGGER.debug("Finished initial sync for %s", self._mx_id) self._client.add_event_callback(self._handle_room_message, RoomMessageText) - await self._client.sync_forever( - timeout=30_000, - loop_sleep_time=1_000, - ) # milliseconds. + _LOGGER.debug("Starting sync_forever for %s", self._mx_id) + self.hass.async_create_background_task( + self._client.sync_forever( + timeout=30_000, + loop_sleep_time=1_000, + ), # milliseconds. + name=f"{self.__class__.__name__}: sync_forever for '{self._mx_id}'", + ) self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, handle_startup) From 015a1a6ebc0e923f058cded62fdfe4ba1c9b97d8 Mon Sep 17 00:00:00 2001 From: Paarth Shah Date: Wed, 31 Jul 2024 00:45:30 -0700 Subject: [PATCH 0173/1635] Fix blocking event loop call in matrix (#122730) Wrap load_json_object in async_add_executor_job --- homeassistant/components/matrix/__init__.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/matrix/__init__.py b/homeassistant/components/matrix/__init__.py index 77f13293519..e1b488c0fce 100644 --- a/homeassistant/components/matrix/__init__.py +++ b/homeassistant/components/matrix/__init__.py @@ -349,7 +349,9 @@ class MatrixBot: async def _get_auth_tokens(self) -> JsonObjectType: """Read sorted authentication tokens from disk.""" try: - return load_json_object(self._session_filepath) + return await self.hass.async_add_executor_job( + load_json_object, self._session_filepath + ) except HomeAssistantError as ex: _LOGGER.warning( "Loading authentication tokens from file '%s' failed: %s", From f6f7459c364bd0684192cdd285f52a9703884871 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 31 Jul 2024 10:35:05 +0200 Subject: [PATCH 0174/1635] Add support for login credentials to homeworks (#122877) * Add support for login credentials to homeworks * Store credentials in config entry data --- .../components/homeworks/__init__.py | 20 ++- .../components/homeworks/config_flow.py | 50 ++++++- .../components/homeworks/strings.json | 19 ++- tests/components/homeworks/conftest.py | 99 ++++++++----- .../homeworks/test_binary_sensor.py | 2 +- .../components/homeworks/test_config_flow.py | 138 +++++++++++++++++- tests/components/homeworks/test_init.py | 57 +++++++- tests/components/homeworks/test_light.py | 4 +- 8 files changed, 331 insertions(+), 58 deletions(-) diff --git a/homeassistant/components/homeworks/__init__.py b/homeassistant/components/homeworks/__init__.py index cf39bc72ec6..448487cb8b0 100644 --- a/homeassistant/components/homeworks/__init__.py +++ b/homeassistant/components/homeworks/__init__.py @@ -9,7 +9,12 @@ import logging from typing import Any from pyhomeworks import exceptions as hw_exceptions -from pyhomeworks.pyhomeworks import HW_BUTTON_PRESSED, HW_BUTTON_RELEASED, Homeworks +from pyhomeworks.pyhomeworks import ( + HW_BUTTON_PRESSED, + HW_BUTTON_RELEASED, + HW_LOGIN_INCORRECT, + Homeworks, +) import voluptuous as vol from homeassistant.config_entries import ConfigEntry @@ -17,7 +22,9 @@ from homeassistant.const import ( CONF_HOST, CONF_ID, CONF_NAME, + CONF_PASSWORD, CONF_PORT, + CONF_USERNAME, EVENT_HOMEASSISTANT_STOP, Platform, ) @@ -137,12 +144,21 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: def hw_callback(msg_type: Any, values: Any) -> None: """Dispatch state changes.""" _LOGGER.debug("callback: %s, %s", msg_type, values) + if msg_type == HW_LOGIN_INCORRECT: + _LOGGER.debug("login incorrect") + return addr = values[0] signal = f"homeworks_entity_{controller_id}_{addr}" dispatcher_send(hass, signal, msg_type, values) config = entry.options - controller = Homeworks(config[CONF_HOST], config[CONF_PORT], hw_callback) + controller = Homeworks( + config[CONF_HOST], + config[CONF_PORT], + hw_callback, + entry.data.get(CONF_USERNAME), + entry.data.get(CONF_PASSWORD), + ) try: await hass.async_add_executor_job(controller.connect) except hw_exceptions.HomeworksException as err: diff --git a/homeassistant/components/homeworks/config_flow.py b/homeassistant/components/homeworks/config_flow.py index 4508f3bd21d..9247670b40b 100644 --- a/homeassistant/components/homeworks/config_flow.py +++ b/homeassistant/components/homeworks/config_flow.py @@ -14,7 +14,13 @@ from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAI from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT +from homeassistant.const import ( + CONF_HOST, + CONF_NAME, + CONF_PASSWORD, + CONF_PORT, + CONF_USERNAME, +) from homeassistant.core import async_get_hass, callback from homeassistant.data_entry_flow import AbortFlow from homeassistant.helpers import ( @@ -62,6 +68,10 @@ CONTROLLER_EDIT = { mode=selector.NumberSelectorMode.BOX, ) ), + vol.Optional(CONF_USERNAME): selector.TextSelector(), + vol.Optional(CONF_PASSWORD): selector.TextSelector( + selector.TextSelectorConfig(type=selector.TextSelectorType.PASSWORD) + ), } LIGHT_EDIT: VolDictType = { @@ -92,10 +102,17 @@ BUTTON_EDIT: VolDictType = { validate_addr = cv.matches_regex(r"\[(?:\d\d:){2,4}\d\d\]") +def _validate_credentials(user_input: dict[str, Any]) -> None: + """Validate credentials.""" + if CONF_PASSWORD in user_input and CONF_USERNAME not in user_input: + raise SchemaFlowError("need_username_with_password") + + async def validate_add_controller( handler: ConfigFlow | SchemaOptionsFlowHandler, user_input: dict[str, Any] ) -> dict[str, Any]: """Validate controller setup.""" + _validate_credentials(user_input) user_input[CONF_CONTROLLER_ID] = slugify(user_input[CONF_NAME]) user_input[CONF_PORT] = int(user_input[CONF_PORT]) try: @@ -128,7 +145,13 @@ async def _try_connection(user_input: dict[str, Any]) -> None: _LOGGER.debug( "Trying to connect to %s:%s", user_input[CONF_HOST], user_input[CONF_PORT] ) - controller = Homeworks(host, port, lambda msg_types, values: None) + controller = Homeworks( + host, + port, + lambda msg_types, values: None, + user_input.get(CONF_USERNAME), + user_input.get(CONF_PASSWORD), + ) controller.connect() controller.close() @@ -138,7 +161,14 @@ async def _try_connection(user_input: dict[str, Any]) -> None: _try_connect, user_input[CONF_HOST], user_input[CONF_PORT] ) except hw_exceptions.HomeworksConnectionFailed as err: + _LOGGER.debug("Caught HomeworksConnectionFailed") raise SchemaFlowError("connection_error") from err + except hw_exceptions.HomeworksInvalidCredentialsProvided as err: + _LOGGER.debug("Caught HomeworksInvalidCredentialsProvided") + raise SchemaFlowError("invalid_credentials") from err + except hw_exceptions.HomeworksNoCredentialsProvided as err: + _LOGGER.debug("Caught HomeworksNoCredentialsProvided") + raise SchemaFlowError("credentials_needed") from err except Exception as err: _LOGGER.exception("Caught unexpected exception %s") raise SchemaFlowError("unknown_error") from err @@ -529,6 +559,7 @@ class HomeworksConfigFlowHandler(ConfigFlow, domain=DOMAIN): self, user_input: dict[str, Any] ) -> dict[str, Any]: """Validate controller setup.""" + _validate_credentials(user_input) user_input[CONF_PORT] = int(user_input[CONF_PORT]) our_entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) @@ -569,12 +600,19 @@ class HomeworksConfigFlowHandler(ConfigFlow, domain=DOMAIN): except SchemaFlowError as err: errors["base"] = str(err) else: + password = user_input.pop(CONF_PASSWORD, None) + username = user_input.pop(CONF_USERNAME, None) + new_data = entry.data | { + CONF_PASSWORD: password, + CONF_USERNAME: username, + } new_options = entry.options | { CONF_HOST: user_input[CONF_HOST], CONF_PORT: user_input[CONF_PORT], } return self.async_update_reload_and_abort( entry, + data=new_data, options=new_options, reason="reconfigure_successful", reload_even_if_entry_is_unchanged=False, @@ -603,8 +641,14 @@ class HomeworksConfigFlowHandler(ConfigFlow, domain=DOMAIN): {CONF_HOST: user_input[CONF_HOST], CONF_PORT: user_input[CONF_PORT]} ) name = user_input.pop(CONF_NAME) + password = user_input.pop(CONF_PASSWORD, None) + username = user_input.pop(CONF_USERNAME, None) user_input |= {CONF_DIMMERS: [], CONF_KEYPADS: []} - return self.async_create_entry(title=name, data={}, options=user_input) + return self.async_create_entry( + title=name, + data={CONF_PASSWORD: password, CONF_USERNAME: username}, + options=user_input, + ) return self.async_show_form( step_id="user", diff --git a/homeassistant/components/homeworks/strings.json b/homeassistant/components/homeworks/strings.json index b0d0f6e61e1..a9dcab2f1e0 100644 --- a/homeassistant/components/homeworks/strings.json +++ b/homeassistant/components/homeworks/strings.json @@ -2,8 +2,11 @@ "config": { "error": { "connection_error": "Could not connect to the controller.", + "credentials_needed": "The controller needs credentials.", "duplicated_controller_id": "The controller name is already in use.", "duplicated_host_port": "The specified host and port is already configured.", + "invalid_credentials": "The provided credentials are not valid.", + "need_username_with_password": "Credentials must be either a username and a password or only a username.", "unknown_error": "[%key:common::config_flow::error::unknown%]" }, "step": { @@ -22,7 +25,13 @@ "reconfigure": { "data": { "host": "[%key:common::config_flow::data::host%]", - "port": "[%key:common::config_flow::data::port%]" + "port": "[%key:common::config_flow::data::port%]", + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "password": "Optional password, leave blank if your system does not need credentials or only needs a single credential", + "username": "Optional username, leave blank if your system does not need login credentials" }, "description": "Modify a Lutron Homeworks controller connection settings" }, @@ -30,10 +39,14 @@ "data": { "host": "[%key:common::config_flow::data::host%]", "name": "Controller name", - "port": "[%key:common::config_flow::data::port%]" + "port": "[%key:common::config_flow::data::port%]", + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" }, "data_description": { - "name": "A unique name identifying the Lutron Homeworks controller" + "name": "A unique name identifying the Lutron Homeworks controller", + "password": "[%key:component::homeworks::config::step::reconfigure::data_description::password%]", + "username": "[%key:component::homeworks::config::step::reconfigure::data_description::username%]" }, "description": "Add a Lutron Homeworks controller" } diff --git a/tests/components/homeworks/conftest.py b/tests/components/homeworks/conftest.py index 86c3381b7a0..9562063ab97 100644 --- a/tests/components/homeworks/conftest.py +++ b/tests/components/homeworks/conftest.py @@ -17,10 +17,55 @@ from homeassistant.components.homeworks.const import ( CONF_RELEASE_DELAY, DOMAIN, ) -from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT +from homeassistant.const import ( + CONF_HOST, + CONF_NAME, + CONF_PASSWORD, + CONF_PORT, + CONF_USERNAME, +) from tests.common import MockConfigEntry +CONFIG_ENTRY_OPTIONS = { + CONF_CONTROLLER_ID: "main_controller", + CONF_HOST: "192.168.0.1", + CONF_PORT: 1234, + CONF_DIMMERS: [ + { + CONF_ADDR: "[02:08:01:01]", + CONF_NAME: "Foyer Sconces", + CONF_RATE: 1.0, + } + ], + CONF_KEYPADS: [ + { + CONF_ADDR: "[02:08:02:01]", + CONF_NAME: "Foyer Keypad", + CONF_BUTTONS: [ + { + CONF_NAME: "Morning", + CONF_NUMBER: 1, + CONF_LED: True, + CONF_RELEASE_DELAY: None, + }, + { + CONF_NAME: "Relax", + CONF_NUMBER: 2, + CONF_LED: True, + CONF_RELEASE_DELAY: None, + }, + { + CONF_NAME: "Dim up", + CONF_NUMBER: 3, + CONF_LED: False, + CONF_RELEASE_DELAY: 0.2, + }, + ], + } + ], +} + @pytest.fixture def mock_config_entry() -> MockConfigEntry: @@ -28,45 +73,19 @@ def mock_config_entry() -> MockConfigEntry: return MockConfigEntry( title="Lutron Homeworks", domain=DOMAIN, - data={}, - options={ - CONF_CONTROLLER_ID: "main_controller", - CONF_HOST: "192.168.0.1", - CONF_PORT: 1234, - CONF_DIMMERS: [ - { - CONF_ADDR: "[02:08:01:01]", - CONF_NAME: "Foyer Sconces", - CONF_RATE: 1.0, - } - ], - CONF_KEYPADS: [ - { - CONF_ADDR: "[02:08:02:01]", - CONF_NAME: "Foyer Keypad", - CONF_BUTTONS: [ - { - CONF_NAME: "Morning", - CONF_NUMBER: 1, - CONF_LED: True, - CONF_RELEASE_DELAY: None, - }, - { - CONF_NAME: "Relax", - CONF_NUMBER: 2, - CONF_LED: True, - CONF_RELEASE_DELAY: None, - }, - { - CONF_NAME: "Dim up", - CONF_NUMBER: 3, - CONF_LED: False, - CONF_RELEASE_DELAY: 0.2, - }, - ], - } - ], - }, + data={CONF_PASSWORD: None, CONF_USERNAME: None}, + options=CONFIG_ENTRY_OPTIONS, + ) + + +@pytest.fixture +def mock_config_entry_username_password() -> MockConfigEntry: + """Return the default mocked config entry with credentials.""" + return MockConfigEntry( + title="Lutron Homeworks", + domain=DOMAIN, + data={CONF_PASSWORD: "hunter2", CONF_USERNAME: "username"}, + options=CONFIG_ENTRY_OPTIONS, ) diff --git a/tests/components/homeworks/test_binary_sensor.py b/tests/components/homeworks/test_binary_sensor.py index 0b21ae3b773..4bd42cc0a59 100644 --- a/tests/components/homeworks/test_binary_sensor.py +++ b/tests/components/homeworks/test_binary_sensor.py @@ -30,7 +30,7 @@ async def test_binary_sensor_attributes_state_update( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY) + mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY, None, None) hw_callback = mock_homeworks.mock_calls[0][1][2] assert entity_id in hass.states.async_entity_ids(BINARY_SENSOR_DOMAIN) diff --git a/tests/components/homeworks/test_config_flow.py b/tests/components/homeworks/test_config_flow.py index c4738e68ecc..d0693531006 100644 --- a/tests/components/homeworks/test_config_flow.py +++ b/tests/components/homeworks/test_config_flow.py @@ -18,7 +18,13 @@ from homeassistant.components.homeworks.const import ( DOMAIN, ) from homeassistant.config_entries import SOURCE_RECONFIGURE, SOURCE_USER -from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT +from homeassistant.const import ( + CONF_HOST, + CONF_NAME, + CONF_PASSWORD, + CONF_PORT, + CONF_USERNAME, +) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -46,7 +52,7 @@ async def test_user_flow( ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "Main controller" - assert result["data"] == {} + assert result["data"] == {"password": None, "username": None} assert result["options"] == { "controller_id": "main_controller", "dimmers": [], @@ -54,11 +60,109 @@ async def test_user_flow( "keypads": [], "port": 1234, } - mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY) + mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY, None, None) mock_controller.close.assert_called_once_with() mock_controller.join.assert_not_called() +async def test_user_flow_credentials( + hass: HomeAssistant, mock_homeworks: MagicMock, mock_setup_entry +) -> None: + """Test the user configuration flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + mock_controller = MagicMock() + mock_homeworks.return_value = mock_controller + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "192.168.0.1", + CONF_NAME: "Main controller", + CONF_PASSWORD: "hunter2", + CONF_PORT: 1234, + CONF_USERNAME: "username", + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Main controller" + assert result["data"] == {"password": "hunter2", "username": "username"} + assert result["options"] == { + "controller_id": "main_controller", + "dimmers": [], + "host": "192.168.0.1", + "keypads": [], + "port": 1234, + } + mock_homeworks.assert_called_once_with( + "192.168.0.1", 1234, ANY, "username", "hunter2" + ) + mock_controller.close.assert_called_once_with() + mock_controller.join.assert_not_called() + + +async def test_user_flow_credentials_user_only( + hass: HomeAssistant, mock_homeworks: MagicMock, mock_setup_entry +) -> None: + """Test the user configuration flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + mock_controller = MagicMock() + mock_homeworks.return_value = mock_controller + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "192.168.0.1", + CONF_NAME: "Main controller", + CONF_PORT: 1234, + CONF_USERNAME: "username", + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Main controller" + assert result["data"] == {"password": None, "username": "username"} + assert result["options"] == { + "controller_id": "main_controller", + "dimmers": [], + "host": "192.168.0.1", + "keypads": [], + "port": 1234, + } + mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY, "username", None) + mock_controller.close.assert_called_once_with() + mock_controller.join.assert_not_called() + + +async def test_user_flow_credentials_password_only( + hass: HomeAssistant, mock_homeworks: MagicMock, mock_setup_entry +) -> None: + """Test the user configuration flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + mock_controller = MagicMock() + mock_homeworks.return_value = mock_controller + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "192.168.0.1", + CONF_NAME: "Main controller", + CONF_PASSWORD: "hunter2", + CONF_PORT: 1234, + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": "need_username_with_password"} + + async def test_user_flow_already_exists( hass: HomeAssistant, mock_empty_config_entry: MockConfigEntry, mock_setup_entry ) -> None: @@ -99,6 +203,8 @@ async def test_user_flow_already_exists( ("side_effect", "error"), [ (hw_exceptions.HomeworksConnectionFailed, "connection_error"), + (hw_exceptions.HomeworksInvalidCredentialsProvided, "invalid_credentials"), + (hw_exceptions.HomeworksNoCredentialsProvided, "credentials_needed"), (Exception, "unknown_error"), ], ) @@ -270,6 +376,32 @@ async def test_reconfigure_flow_flow_no_change( } +async def test_reconfigure_flow_credentials_password_only( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_homeworks: MagicMock +) -> None: + """Test reconfigure flow.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config_entry.entry_id}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_HOST: "192.168.0.2", + CONF_PASSWORD: "hunter2", + CONF_PORT: 1234, + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + assert result["errors"] == {"base": "need_username_with_password"} + + async def test_options_add_light_flow( hass: HomeAssistant, mock_empty_config_entry: MockConfigEntry, diff --git a/tests/components/homeworks/test_init.py b/tests/components/homeworks/test_init.py index 2363e0f157d..2a4bd28138e 100644 --- a/tests/components/homeworks/test_init.py +++ b/tests/components/homeworks/test_init.py @@ -3,7 +3,11 @@ from unittest.mock import ANY, MagicMock from pyhomeworks import exceptions as hw_exceptions -from pyhomeworks.pyhomeworks import HW_BUTTON_PRESSED, HW_BUTTON_RELEASED +from pyhomeworks.pyhomeworks import ( + HW_BUTTON_PRESSED, + HW_BUTTON_RELEASED, + HW_LOGIN_INCORRECT, +) import pytest from homeassistant.components.homeworks import EVENT_BUTTON_PRESS, EVENT_BUTTON_RELEASE @@ -27,7 +31,7 @@ async def test_load_unload_config_entry( await hass.async_block_till_done() assert mock_config_entry.state is ConfigEntryState.LOADED - mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY) + mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY, None, None) await hass.config_entries.async_unload(mock_config_entry.entry_id) await hass.async_block_till_done() @@ -36,6 +40,51 @@ async def test_load_unload_config_entry( assert mock_config_entry.state is ConfigEntryState.NOT_LOADED +async def test_load_config_entry_with_credentials( + hass: HomeAssistant, + mock_config_entry_username_password: MockConfigEntry, + mock_homeworks: MagicMock, +) -> None: + """Test the Homeworks configuration entry loading/unloading.""" + mock_config_entry_username_password.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry_username_password.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry_username_password.state is ConfigEntryState.LOADED + mock_homeworks.assert_called_once_with( + "192.168.0.1", 1234, ANY, "username", "hunter2" + ) + + await hass.config_entries.async_unload(mock_config_entry_username_password.entry_id) + await hass.async_block_till_done() + + assert not hass.data.get(DOMAIN) + assert mock_config_entry_username_password.state is ConfigEntryState.NOT_LOADED + + +async def test_controller_credentials_changed( + hass: HomeAssistant, + mock_config_entry_username_password: MockConfigEntry, + mock_homeworks: MagicMock, +) -> None: + """Test controller credentials changed. + + Note: This just ensures we don't blow up when credentials changed, in the future a + reauth flow should be added. + """ + mock_config_entry_username_password.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry_username_password.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry_username_password.state is ConfigEntryState.LOADED + mock_homeworks.assert_called_once_with( + "192.168.0.1", 1234, ANY, "username", "hunter2" + ) + hw_callback = mock_homeworks.mock_calls[0][1][2] + + hw_callback(HW_LOGIN_INCORRECT, []) + + async def test_config_entry_not_ready( hass: HomeAssistant, mock_config_entry: MockConfigEntry, @@ -66,7 +115,7 @@ async def test_keypad_events( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY) + mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY, None, None) hw_callback = mock_homeworks.mock_calls[0][1][2] hw_callback(HW_BUTTON_PRESSED, ["[02:08:02:01]", 1]) @@ -184,7 +233,7 @@ async def test_cleanup_on_ha_shutdown( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY) + mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY, None, None) mock_controller.stop.assert_not_called() hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) diff --git a/tests/components/homeworks/test_light.py b/tests/components/homeworks/test_light.py index a5d94f736d5..1cd2951128c 100644 --- a/tests/components/homeworks/test_light.py +++ b/tests/components/homeworks/test_light.py @@ -35,7 +35,7 @@ async def test_light_attributes_state_update( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY) + mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY, None, None) hw_callback = mock_homeworks.mock_calls[0][1][2] assert len(mock_controller.request_dimmer_level.mock_calls) == 1 @@ -106,7 +106,7 @@ async def test_light_restore_brightness( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY) + mock_homeworks.assert_called_once_with("192.168.0.1", 1234, ANY, None, None) hw_callback = mock_homeworks.mock_calls[0][1][2] assert hass.states.async_entity_ids("light") == unordered([entity_id]) From 222011fc5cd38184337fb15ef75f78a7ce14d00e Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 31 Jul 2024 10:36:46 +0200 Subject: [PATCH 0175/1635] Log tests in test group (#122892) * Log tests in test group * Simplify print --- .github/workflows/ci.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 142839e77ff..b705064e078 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -904,6 +904,7 @@ jobs: cov_params+=(--cov-report=xml) fi + echo "Test group ${{ matrix.group }}: $(sed -n "${{ matrix.group }},1p" pytest_buckets.txt)" python3 -b -X dev -m pytest \ -qq \ --timeout=9 \ From 67ed8b207aa0dfb0fb6158ca4b1602bafbdc9ebd Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Wed, 31 Jul 2024 11:08:05 +0200 Subject: [PATCH 0176/1635] KNX: use xknx 3.0.0 eager telegram decoding (#122896) * Use KNX xknx 3.0.0 eager telegram decoding * review suggestion --- homeassistant/components/knx/__init__.py | 2 +- homeassistant/components/knx/project.py | 17 +++++++-- homeassistant/components/knx/telegrams.py | 45 ++++++++++++----------- homeassistant/components/knx/websocket.py | 1 + tests/components/knx/test_websocket.py | 2 +- 5 files changed, 41 insertions(+), 26 deletions(-) diff --git a/homeassistant/components/knx/__init__.py b/homeassistant/components/knx/__init__.py index 99b461dda1b..709a82b31fd 100644 --- a/homeassistant/components/knx/__init__.py +++ b/homeassistant/components/knx/__init__.py @@ -333,7 +333,7 @@ class KNXModule: async def start(self) -> None: """Start XKNX object. Connect to tunneling or Routing device.""" - await self.project.load_project() + await self.project.load_project(self.xknx) await self.config_store.load_data() await self.telegrams.load_history() await self.xknx.start() diff --git a/homeassistant/components/knx/project.py b/homeassistant/components/knx/project.py index 13e71dbbe38..3b3309dfc7d 100644 --- a/homeassistant/components/knx/project.py +++ b/homeassistant/components/knx/project.py @@ -6,6 +6,7 @@ from dataclasses import dataclass import logging from typing import Final +from xknx import XKNX from xknx.dpt import DPTBase from xknxproject import XKNXProj from xknxproject.models import ( @@ -80,15 +81,23 @@ class KNXProject: self.group_addresses = {} self.info = None - async def load_project(self, data: KNXProjectModel | None = None) -> None: + async def load_project( + self, xknx: XKNX, data: KNXProjectModel | None = None + ) -> None: """Load project data from storage.""" if project := data or await self._store.async_load(): self.devices = project["devices"] self.info = project["info"] + xknx.group_address_dpt.clear() + xknx_ga_dict = {} for ga_model in project["group_addresses"].values(): ga_info = _create_group_address_info(ga_model) self.group_addresses[ga_info.address] = ga_info + if (dpt_model := ga_model.get("dpt")) is not None: + xknx_ga_dict[ga_model["address"]] = dpt_model + + xknx.group_address_dpt.set(xknx_ga_dict) # type: ignore[arg-type] _LOGGER.debug( "Loaded KNX project data with %s group addresses from storage", @@ -96,7 +105,9 @@ class KNXProject: ) self.loaded = True - async def process_project_file(self, file_id: str, password: str) -> None: + async def process_project_file( + self, xknx: XKNX, file_id: str, password: str + ) -> None: """Process an uploaded project file.""" def _parse_project() -> KNXProjectModel: @@ -110,7 +121,7 @@ class KNXProject: project = await self.hass.async_add_executor_job(_parse_project) await self._store.async_save(project) - await self.load_project(data=project) + await self.load_project(xknx, data=project) async def remove_project_file(self) -> None: """Remove project file from storage.""" diff --git a/homeassistant/components/knx/telegrams.py b/homeassistant/components/knx/telegrams.py index 2ad46326b8e..a96d841a07d 100644 --- a/homeassistant/components/knx/telegrams.py +++ b/homeassistant/components/knx/telegrams.py @@ -35,7 +35,7 @@ class DecodedTelegramPayload(TypedDict): dpt_sub: int | None dpt_name: str | None unit: str | None - value: str | int | float | bool | None + value: bool | str | int | float | dict[str, str | int | float | bool] | None class TelegramDict(DecodedTelegramPayload): @@ -106,7 +106,7 @@ class Telegrams: payload_data: int | tuple[int, ...] | None = None src_name = "" transcoder = None - decoded_payload: DecodedTelegramPayload | None = None + value = None if ( ga_info := self.project.group_addresses.get( @@ -114,7 +114,6 @@ class Telegrams: ) ) is not None: dst_name = ga_info.name - transcoder = ga_info.transcoder if ( device := self.project.devices.get(f"{telegram.source_address}") @@ -123,45 +122,49 @@ class Telegrams: if isinstance(telegram.payload, (GroupValueWrite, GroupValueResponse)): payload_data = telegram.payload.value.value - if transcoder is not None: - decoded_payload = decode_telegram_payload( - payload=telegram.payload.value, transcoder=transcoder - ) + + if telegram.decoded_data is not None: + transcoder = telegram.decoded_data.transcoder + value = _serializable_decoded_data(telegram.decoded_data.value) return TelegramDict( destination=f"{telegram.destination_address}", destination_name=dst_name, direction=telegram.direction.value, - dpt_main=decoded_payload["dpt_main"] - if decoded_payload is not None - else None, - dpt_sub=decoded_payload["dpt_sub"] if decoded_payload is not None else None, - dpt_name=decoded_payload["dpt_name"] - if decoded_payload is not None - else None, + dpt_main=transcoder.dpt_main_number if transcoder is not None else None, + dpt_sub=transcoder.dpt_sub_number if transcoder is not None else None, + dpt_name=transcoder.value_type if transcoder is not None else None, payload=payload_data, source=f"{telegram.source_address}", source_name=src_name, telegramtype=telegram.payload.__class__.__name__, timestamp=dt_util.now().isoformat(), - unit=decoded_payload["unit"] if decoded_payload is not None else None, - value=decoded_payload["value"] if decoded_payload is not None else None, + unit=transcoder.unit if transcoder is not None else None, + value=value, ) +def _serializable_decoded_data( + value: bool | float | str | DPTComplexData | DPTEnumData, +) -> bool | str | int | float | dict[str, str | int | float | bool]: + """Return a serializable representation of decoded data.""" + if isinstance(value, DPTComplexData): + return value.as_dict() + if isinstance(value, DPTEnumData): + return value.name.lower() + return value + + def decode_telegram_payload( payload: DPTArray | DPTBinary, transcoder: type[DPTBase] ) -> DecodedTelegramPayload: - """Decode the payload of a KNX telegram.""" + """Decode the payload of a KNX telegram with custom transcoder.""" try: value = transcoder.from_knx(payload) except XKNXException: value = "Error decoding value" - if isinstance(value, DPTComplexData): - value = value.as_dict() - elif isinstance(value, DPTEnumData): - value = value.name.lower() + value = _serializable_decoded_data(value) return DecodedTelegramPayload( dpt_main=transcoder.dpt_main_number, diff --git a/homeassistant/components/knx/websocket.py b/homeassistant/components/knx/websocket.py index 97758dc87c9..4af3012741a 100644 --- a/homeassistant/components/knx/websocket.py +++ b/homeassistant/components/knx/websocket.py @@ -154,6 +154,7 @@ async def ws_project_file_process( knx: KNXModule = hass.data[DOMAIN] try: await knx.project.process_project_file( + xknx=knx.xknx, file_id=msg["file_id"], password=msg["password"], ) diff --git a/tests/components/knx/test_websocket.py b/tests/components/knx/test_websocket.py index eb22bac85bc..309ea111709 100644 --- a/tests/components/knx/test_websocket.py +++ b/tests/components/knx/test_websocket.py @@ -346,7 +346,7 @@ async def test_knx_subscribe_telegrams_command_project( assert res["event"]["destination"] == "0/1/1" assert res["event"]["destination_name"] == "percent" assert res["event"]["payload"] == 1 - assert res["event"]["value"] == "Error decoding value" + assert res["event"]["value"] is None assert res["event"]["telegramtype"] == "GroupValueWrite" assert res["event"]["source"] == "1.1.6" assert ( From 68f06e63e29ba31996d097392e068a6fd27ddb35 Mon Sep 17 00:00:00 2001 From: Diogo Gomes Date: Wed, 31 Jul 2024 10:32:13 +0100 Subject: [PATCH 0177/1635] Bump pytrydan to 0.8.0 (#122898) bump pytrydan to 0.8.0 --- homeassistant/components/v2c/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/v2c/snapshots/test_diagnostics.ambr | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/v2c/manifest.json b/homeassistant/components/v2c/manifest.json index ffe4b52ee6e..3a6eab0f335 100644 --- a/homeassistant/components/v2c/manifest.json +++ b/homeassistant/components/v2c/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/v2c", "iot_class": "local_polling", - "requirements": ["pytrydan==0.7.0"] + "requirements": ["pytrydan==0.8.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index bc1a13f19ab..6ba6601597d 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2378,7 +2378,7 @@ pytradfri[async]==9.0.1 pytrafikverket==1.0.0 # homeassistant.components.v2c -pytrydan==0.7.0 +pytrydan==0.8.0 # homeassistant.components.usb pyudev==0.24.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 22de281aa61..da3ac4cd662 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1881,7 +1881,7 @@ pytradfri[async]==9.0.1 pytrafikverket==1.0.0 # homeassistant.components.v2c -pytrydan==0.7.0 +pytrydan==0.8.0 # homeassistant.components.usb pyudev==0.24.1 diff --git a/tests/components/v2c/snapshots/test_diagnostics.ambr b/tests/components/v2c/snapshots/test_diagnostics.ambr index a4f6cad4cc8..cc34cae87f8 100644 --- a/tests/components/v2c/snapshots/test_diagnostics.ambr +++ b/tests/components/v2c/snapshots/test_diagnostics.ambr @@ -18,7 +18,7 @@ 'unique_id': 'ABC123', 'version': 1, }), - 'data': "TrydanData(ID='ABC123', charge_state=, ready_state=, charge_power=1500.27, charge_energy=1.8, slave_error=, charge_time=4355, house_power=0.0, fv_power=0.0, battery_power=0.0, paused=, locked=, timer=, intensity=6, dynamic=, min_intensity=6, max_intensity=16, pause_dynamic=, dynamic_power_mode=, contracted_power=4600, firmware_version='2.1.7')", + 'data': "TrydanData(ID='ABC123', charge_state=, ready_state=, charge_power=1500.27, voltage_installation=None, charge_energy=1.8, slave_error=, charge_time=4355, house_power=0.0, fv_power=0.0, battery_power=0.0, paused=, locked=, timer=, intensity=6, dynamic=, min_intensity=6, max_intensity=16, pause_dynamic=, dynamic_power_mode=, contracted_power=4600, firmware_version='2.1.7', SSID=None, IP=None, signal_status=None)", 'host_status': 200, 'raw_data': '{"ID":"ABC123","ChargeState":2,"ReadyState":0,"ChargePower":1500.27,"ChargeEnergy":1.8,"SlaveError":4,"ChargeTime":4355,"HousePower":0.0,"FVPower":0.0,"BatteryPower":0.0,"Paused":0,"Locked":0,"Timer":0,"Intensity":6,"Dynamic":0,"MinIntensity":6,"MaxIntensity":16,"PauseDynamic":0,"FirmwareVersion":"2.1.7","DynamicPowerMode":2,"ContractedPower":4600}', }) From 8b4f607806e263eea3aa9e944328c5bd8633ce47 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 11:39:51 +0200 Subject: [PATCH 0178/1635] Fix implicit-return in plant (#122903) --- homeassistant/components/plant/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/plant/__init__.py b/homeassistant/components/plant/__init__.py index b549dee2887..2a5253d3faa 100644 --- a/homeassistant/components/plant/__init__.py +++ b/homeassistant/components/plant/__init__.py @@ -268,6 +268,7 @@ class Plant(Entity): min_value = self._config[params["min"]] if value < min_value: return f"{sensor_name} low" + return None def _check_max(self, sensor_name, value, params): """If configured, check the value against the defined maximum value.""" From 233c04a469b283a66f21e4acec7594377d527442 Mon Sep 17 00:00:00 2001 From: Alex MF Date: Wed, 31 Jul 2024 11:22:07 +0100 Subject: [PATCH 0179/1635] Add number entity for Ecovacs mower cut direction (#122598) --- homeassistant/components/ecovacs/icons.json | 3 + homeassistant/components/ecovacs/number.py | 16 ++- homeassistant/components/ecovacs/strings.json | 3 + .../ecovacs/snapshots/test_number.ambr | 111 ++++++++++++++++++ tests/components/ecovacs/test_init.py | 2 +- tests/components/ecovacs/test_number.py | 28 ++++- 6 files changed, 156 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/ecovacs/icons.json b/homeassistant/components/ecovacs/icons.json index d129273e891..0c7178ced84 100644 --- a/homeassistant/components/ecovacs/icons.json +++ b/homeassistant/components/ecovacs/icons.json @@ -43,6 +43,9 @@ "clean_count": { "default": "mdi:counter" }, + "cut_direction": { + "default": "mdi:angle-acute" + }, "volume": { "default": "mdi:volume-high", "state": { diff --git a/homeassistant/components/ecovacs/number.py b/homeassistant/components/ecovacs/number.py index 3b24091ca34..2b9bdc1a425 100644 --- a/homeassistant/components/ecovacs/number.py +++ b/homeassistant/components/ecovacs/number.py @@ -7,14 +7,14 @@ from dataclasses import dataclass from typing import Generic from deebot_client.capabilities import CapabilitySet -from deebot_client.events import CleanCountEvent, VolumeEvent +from deebot_client.events import CleanCountEvent, CutDirectionEvent, VolumeEvent from homeassistant.components.number import ( NumberEntity, NumberEntityDescription, NumberMode, ) -from homeassistant.const import EntityCategory +from homeassistant.const import DEGREE, EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -53,6 +53,18 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsNumberEntityDescription, ...] = ( native_max_value=10, native_step=1.0, ), + EcovacsNumberEntityDescription[CutDirectionEvent]( + capability_fn=lambda caps: caps.settings.cut_direction, + value_fn=lambda e: e.angle, + key="cut_direction", + translation_key="cut_direction", + entity_registry_enabled_default=False, + entity_category=EntityCategory.CONFIG, + native_min_value=0, + native_max_value=180, + native_step=1.0, + native_unit_of_measurement=DEGREE, + ), EcovacsNumberEntityDescription[CleanCountEvent]( capability_fn=lambda caps: caps.clean.count, value_fn=lambda e: e.count, diff --git a/homeassistant/components/ecovacs/strings.json b/homeassistant/components/ecovacs/strings.json index d501c333a03..d2e385c79c7 100644 --- a/homeassistant/components/ecovacs/strings.json +++ b/homeassistant/components/ecovacs/strings.json @@ -91,6 +91,9 @@ "clean_count": { "name": "Clean count" }, + "cut_direction": { + "name": "Cut direction" + }, "volume": { "name": "Volume" } diff --git a/tests/components/ecovacs/snapshots/test_number.ambr b/tests/components/ecovacs/snapshots/test_number.ambr index da8406491b4..c80132784e1 100644 --- a/tests/components/ecovacs/snapshots/test_number.ambr +++ b/tests/components/ecovacs/snapshots/test_number.ambr @@ -1,4 +1,115 @@ # serializer version: 1 +# name: test_number_entities[5xu9h3][number.goat_g1_cut_direction:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 180, + 'min': 0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.goat_g1_cut_direction', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cut direction', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cut_direction', + 'unique_id': '8516fbb1-17f1-4194-0000000_cut_direction', + 'unit_of_measurement': '°', + }) +# --- +# name: test_number_entities[5xu9h3][number.goat_g1_cut_direction:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Goat G1 Cut direction', + 'max': 180, + 'min': 0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': '°', + }), + 'context': , + 'entity_id': 'number.goat_g1_cut_direction', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '45', + }) +# --- +# name: test_number_entities[5xu9h3][number.goat_g1_volume:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 11, + 'min': 0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.goat_g1_volume', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Volume', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'volume', + 'unique_id': '8516fbb1-17f1-4194-0000000_volume', + 'unit_of_measurement': None, + }) +# --- +# name: test_number_entities[5xu9h3][number.goat_g1_volume:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Goat G1 Volume', + 'max': 11, + 'min': 0, + 'mode': , + 'step': 1.0, + }), + 'context': , + 'entity_id': 'number.goat_g1_volume', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3', + }) +# --- # name: test_number_entities[yna5x1][number.ozmo_950_volume:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/ecovacs/test_init.py b/tests/components/ecovacs/test_init.py index c0c475217c1..ac4d5661a83 100644 --- a/tests/components/ecovacs/test_init.py +++ b/tests/components/ecovacs/test_init.py @@ -136,7 +136,7 @@ async def test_devices_in_dr( ("device_fixture", "entities"), [ ("yna5x1", 26), - ("5xu9h3", 24), + ("5xu9h3", 25), ("123", 1), ], ) diff --git a/tests/components/ecovacs/test_number.py b/tests/components/ecovacs/test_number.py index d444d6510a8..a735863d40a 100644 --- a/tests/components/ecovacs/test_number.py +++ b/tests/components/ecovacs/test_number.py @@ -3,8 +3,8 @@ from dataclasses import dataclass from deebot_client.command import Command -from deebot_client.commands.json import SetVolume -from deebot_client.events import Event, VolumeEvent +from deebot_client.commands.json import SetCutDirection, SetVolume +from deebot_client.events import CutDirectionEvent, Event, VolumeEvent import pytest from syrupy import SnapshotAssertion @@ -53,8 +53,23 @@ class NumberTestCase: ), ], ), + ( + "5xu9h3", + [ + NumberTestCase( + "number.goat_g1_volume", VolumeEvent(3, 11), "3", 7, SetVolume(7) + ), + NumberTestCase( + "number.goat_g1_cut_direction", + CutDirectionEvent(45), + "45", + 97, + SetCutDirection(97), + ), + ], + ), ], - ids=["yna5x1"], + ids=["yna5x1", "5xu9h3"], ) async def test_number_entities( hass: HomeAssistant, @@ -107,8 +122,12 @@ async def test_number_entities( "yna5x1", ["number.ozmo_950_volume"], ), + ( + "5xu9h3", + ["number.goat_g1_cut_direction", "number.goat_g1_volume"], + ), ], - ids=["yna5x1"], + ids=["yna5x1", "5xu9h3"], ) async def test_disabled_by_default_number_entities( hass: HomeAssistant, entity_registry: er.EntityRegistry, entity_ids: list[str] @@ -125,6 +144,7 @@ async def test_disabled_by_default_number_entities( @pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize(("device_fixture"), ["yna5x1"]) async def test_volume_maximum( hass: HomeAssistant, controller: EcovacsController, From 02d4d1a75ba664bb76e8be9cd6d6faf1644017b9 Mon Sep 17 00:00:00 2001 From: Diogo Gomes Date: Wed, 31 Jul 2024 11:31:35 +0100 Subject: [PATCH 0180/1635] Adds new sensors and configuration entities to V2C Trydan (#122883) * Adds new controls and sensors * update snapshot * Update homeassistant/components/v2c/strings.json Co-authored-by: Charles Garwood * Add unit * move icons to icons.json * update snapshot * missing translation fix --------- Co-authored-by: Charles Garwood --- homeassistant/components/v2c/icons.json | 9 + homeassistant/components/v2c/number.py | 24 +++ homeassistant/components/v2c/sensor.py | 41 +++- homeassistant/components/v2c/strings.json | 18 ++ .../components/v2c/snapshots/test_sensor.ambr | 197 +++++++++++++++++- 5 files changed, 284 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/v2c/icons.json b/homeassistant/components/v2c/icons.json index 1b76b669956..6b0a41bf752 100644 --- a/homeassistant/components/v2c/icons.json +++ b/homeassistant/components/v2c/icons.json @@ -21,6 +21,15 @@ }, "battery_power": { "default": "mdi:home-battery" + }, + "ssid": { + "default": "mdi:wifi" + }, + "ip_address": { + "default": "mdi:ip" + }, + "signal_status": { + "default": "mdi:signal" } }, "switch": { diff --git a/homeassistant/components/v2c/number.py b/homeassistant/components/v2c/number.py index 2ff70226132..1540b098cf1 100644 --- a/homeassistant/components/v2c/number.py +++ b/homeassistant/components/v2c/number.py @@ -13,6 +13,7 @@ from homeassistant.components.number import ( NumberEntity, NumberEntityDescription, ) +from homeassistant.const import EntityCategory, UnitOfElectricCurrent from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -37,11 +38,34 @@ TRYDAN_NUMBER_SETTINGS = ( key="intensity", translation_key="intensity", device_class=NumberDeviceClass.CURRENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, native_min_value=MIN_INTENSITY, native_max_value=MAX_INTENSITY, value_fn=lambda evse_data: evse_data.intensity, update_fn=lambda evse, value: evse.intensity(value), ), + V2CSettingsNumberEntityDescription( + key="min_intensity", + translation_key="min_intensity", + device_class=NumberDeviceClass.CURRENT, + entity_category=EntityCategory.CONFIG, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + native_min_value=MIN_INTENSITY, + native_max_value=MAX_INTENSITY, + value_fn=lambda evse_data: evse_data.min_intensity, + update_fn=lambda evse, value: evse.min_intensity(value), + ), + V2CSettingsNumberEntityDescription( + key="max_intensity", + translation_key="max_intensity", + device_class=NumberDeviceClass.CURRENT, + entity_category=EntityCategory.CONFIG, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + native_min_value=MIN_INTENSITY, + native_max_value=MAX_INTENSITY, + value_fn=lambda evse_data: evse_data.max_intensity, + update_fn=lambda evse, value: evse.max_intensity(value), + ), ) diff --git a/homeassistant/components/v2c/sensor.py b/homeassistant/components/v2c/sensor.py index fc0cc0bfaa8..97853740e9d 100644 --- a/homeassistant/components/v2c/sensor.py +++ b/homeassistant/components/v2c/sensor.py @@ -15,7 +15,13 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.const import UnitOfEnergy, UnitOfPower, UnitOfTime +from homeassistant.const import ( + EntityCategory, + UnitOfElectricPotential, + UnitOfEnergy, + UnitOfPower, + UnitOfTime, +) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType @@ -45,12 +51,20 @@ TRYDAN_SENSORS = ( V2CSensorEntityDescription( key="charge_power", translation_key="charge_power", - icon="mdi:ev-station", native_unit_of_measurement=UnitOfPower.WATT, state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.POWER, value_fn=lambda evse_data: evse_data.charge_power, ), + V2CSensorEntityDescription( + key="voltage_installation", + translation_key="voltage_installation", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.VOLTAGE, + value_fn=lambda evse_data: evse_data.voltage_installation, + entity_registry_enabled_default=False, + ), V2CSensorEntityDescription( key="charge_energy", translation_key="charge_energy", @@ -86,6 +100,7 @@ TRYDAN_SENSORS = ( V2CSensorEntityDescription( key="meter_error", translation_key="meter_error", + entity_category=EntityCategory.DIAGNOSTIC, value_fn=lambda evse_data: get_meter_value(evse_data.slave_error), entity_registry_enabled_default=False, device_class=SensorDeviceClass.ENUM, @@ -100,6 +115,28 @@ TRYDAN_SENSORS = ( value_fn=lambda evse_data: evse_data.battery_power, entity_registry_enabled_default=False, ), + V2CSensorEntityDescription( + key="ssid", + translation_key="ssid", + entity_category=EntityCategory.DIAGNOSTIC, + value_fn=lambda evse_data: evse_data.SSID, + entity_registry_enabled_default=False, + ), + V2CSensorEntityDescription( + key="ip_address", + translation_key="ip_address", + entity_category=EntityCategory.DIAGNOSTIC, + value_fn=lambda evse_data: evse_data.IP, + entity_registry_enabled_default=False, + ), + V2CSensorEntityDescription( + key="signal_status", + translation_key="signal_status", + entity_category=EntityCategory.DIAGNOSTIC, + state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda evse_data: evse_data.signal_status, + entity_registry_enabled_default=False, + ), ) diff --git a/homeassistant/components/v2c/strings.json b/homeassistant/components/v2c/strings.json index 3342652cfb4..d52b8f066f9 100644 --- a/homeassistant/components/v2c/strings.json +++ b/homeassistant/components/v2c/strings.json @@ -33,12 +33,21 @@ "number": { "intensity": { "name": "Intensity" + }, + "max_intensity": { + "name": "Max intensity" + }, + "min_intensity": { + "name": "Min intensity" } }, "sensor": { "charge_power": { "name": "Charge power" }, + "voltage_installation": { + "name": "Installation voltage" + }, "charge_energy": { "name": "Charge energy" }, @@ -93,6 +102,15 @@ "empty_message": "Empty message", "undefined_error": "Undefined error" } + }, + "ssid": { + "name": "SSID" + }, + "ip_address": { + "name": "IP address" + }, + "signal_status": { + "name": "Signal status" } }, "switch": { diff --git a/tests/components/v2c/snapshots/test_sensor.ambr b/tests/components/v2c/snapshots/test_sensor.ambr index cc8077333cb..7b9ae4a9ff3 100644 --- a/tests/components/v2c/snapshots/test_sensor.ambr +++ b/tests/components/v2c/snapshots/test_sensor.ambr @@ -126,7 +126,7 @@ 'options': dict({ }), 'original_device_class': , - 'original_icon': 'mdi:ev-station', + 'original_icon': None, 'original_name': 'Charge power', 'platform': 'v2c', 'previous_unique_id': None, @@ -141,7 +141,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'EVSE 1.1.1.1 Charge power', - 'icon': 'mdi:ev-station', 'state_class': , 'unit_of_measurement': , }), @@ -255,6 +254,103 @@ 'state': '0.0', }) # --- +# name: test_sensor[sensor.evse_1_1_1_1_installation_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.evse_1_1_1_1_installation_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Installation voltage', + 'platform': 'v2c', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'voltage_installation', + 'unique_id': 'da58ee91f38c2406c2a36d0a1a7f8569_voltage_installation', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.evse_1_1_1_1_installation_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'EVSE 1.1.1.1 Installation voltage', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.evse_1_1_1_1_installation_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensor[sensor.evse_1_1_1_1_ip_address-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.evse_1_1_1_1_ip_address', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IP address', + 'platform': 'v2c', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ip_address', + 'unique_id': 'da58ee91f38c2406c2a36d0a1a7f8569_ip_address', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.evse_1_1_1_1_ip_address-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'EVSE 1.1.1.1 IP address', + }), + 'context': , + 'entity_id': 'sensor.evse_1_1_1_1_ip_address', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- # name: test_sensor[sensor.evse_1_1_1_1_meter_error-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -304,7 +400,7 @@ 'device_id': , 'disabled_by': None, 'domain': 'sensor', - 'entity_category': None, + 'entity_category': , 'entity_id': 'sensor.evse_1_1_1_1_meter_error', 'has_entity_name': True, 'hidden_by': None, @@ -428,3 +524,98 @@ 'state': '0.0', }) # --- +# name: test_sensor[sensor.evse_1_1_1_1_signal_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.evse_1_1_1_1_signal_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Signal status', + 'platform': 'v2c', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'signal_status', + 'unique_id': 'da58ee91f38c2406c2a36d0a1a7f8569_signal_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.evse_1_1_1_1_signal_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'EVSE 1.1.1.1 Signal status', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.evse_1_1_1_1_signal_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_sensor[sensor.evse_1_1_1_1_ssid-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.evse_1_1_1_1_ssid', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'SSID', + 'platform': 'v2c', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ssid', + 'unique_id': 'da58ee91f38c2406c2a36d0a1a7f8569_ssid', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[sensor.evse_1_1_1_1_ssid-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'EVSE 1.1.1.1 SSID', + }), + 'context': , + 'entity_id': 'sensor.evse_1_1_1_1_ssid', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- From 6a45124878f66adf1b52ac642322f41b2e4ff2fc Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 12:38:15 +0200 Subject: [PATCH 0181/1635] Fix implicit-return in qnap (#122901) --- homeassistant/components/qnap/sensor.py | 34 +++++++++++++++++++++---- 1 file changed, 29 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/qnap/sensor.py b/homeassistant/components/qnap/sensor.py index e1739a900ce..526516bfcdd 100644 --- a/homeassistant/components/qnap/sensor.py +++ b/homeassistant/components/qnap/sensor.py @@ -3,6 +3,7 @@ from __future__ import annotations from datetime import timedelta +from typing import Any from homeassistant import config_entries from homeassistant.components.sensor import ( @@ -348,6 +349,8 @@ class QNAPCPUSensor(QNAPSensor): if self.entity_description.key == "cpu_usage": return self.coordinator.data["system_stats"]["cpu"]["usage_percent"] + return None + class QNAPMemorySensor(QNAPSensor): """A QNAP sensor that monitors memory stats.""" @@ -370,20 +373,25 @@ class QNAPMemorySensor(QNAPSensor): if self.entity_description.key == "memory_percent_used": return used / total * 100 + return None + # Deprecated since Home Assistant 2024.6.0 # Can be removed completely in 2024.12.0 @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any] | None: """Return the state attributes.""" if self.coordinator.data: data = self.coordinator.data["system_stats"]["memory"] size = round(float(data["total"]) / 1024, 2) return {ATTR_MEMORY_SIZE: f"{size} {UnitOfInformation.GIBIBYTES}"} + return None class QNAPNetworkSensor(QNAPSensor): """A QNAP sensor that monitors network stats.""" + monitor_device: str + @property def native_value(self): """Return the state of the sensor.""" @@ -404,10 +412,12 @@ class QNAPNetworkSensor(QNAPSensor): if self.entity_description.key == "network_rx": return data["rx"] + return None + # Deprecated since Home Assistant 2024.6.0 # Can be removed completely in 2024.12.0 @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any] | None: """Return the state attributes.""" if self.coordinator.data: data = self.coordinator.data["system_stats"]["nics"][self.monitor_device] @@ -418,6 +428,7 @@ class QNAPNetworkSensor(QNAPSensor): ATTR_MAX_SPEED: data["max_speed"], ATTR_PACKETS_ERR: data["err_packets"], } + return None class QNAPSystemSensor(QNAPSensor): @@ -442,10 +453,12 @@ class QNAPSystemSensor(QNAPSensor): ) return dt_util.now() - uptime_duration + return None + # Deprecated since Home Assistant 2024.6.0 # Can be removed completely in 2024.12.0 @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any] | None: """Return the state attributes.""" if self.coordinator.data: data = self.coordinator.data["system_stats"] @@ -459,11 +472,14 @@ class QNAPSystemSensor(QNAPSensor): ATTR_SERIAL: data["system"]["serial_number"], ATTR_UPTIME: f"{days:0>2d}d {hours:0>2d}h {minutes:0>2d}m", } + return None class QNAPDriveSensor(QNAPSensor): """A QNAP sensor that monitors HDD/SSD drive stats.""" + monitor_device: str + @property def native_value(self): """Return the state of the sensor.""" @@ -475,8 +491,10 @@ class QNAPDriveSensor(QNAPSensor): if self.entity_description.key == "drive_temp": return int(data["temp_c"]) if data["temp_c"] is not None else 0 + return None + @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any] | None: """Return the state attributes.""" if self.coordinator.data: data = self.coordinator.data["smart_drive_health"][self.monitor_device] @@ -486,11 +504,14 @@ class QNAPDriveSensor(QNAPSensor): ATTR_SERIAL: data["serial"], ATTR_TYPE: data["type"], } + return None class QNAPVolumeSensor(QNAPSensor): """A QNAP sensor that monitors storage volume stats.""" + monitor_device: str + @property def native_value(self): """Return the state of the sensor.""" @@ -511,10 +532,12 @@ class QNAPVolumeSensor(QNAPSensor): if self.entity_description.key == "volume_percentage_used": return used_gb / total_gb * 100 + return None + # Deprecated since Home Assistant 2024.6.0 # Can be removed completely in 2024.12.0 @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any] | None: """Return the state attributes.""" if self.coordinator.data: data = self.coordinator.data["volumes"][self.monitor_device] @@ -523,3 +546,4 @@ class QNAPVolumeSensor(QNAPSensor): return { ATTR_VOLUME_SIZE: f"{round(total_gb, 1)} {UnitOfInformation.GIBIBYTES}" } + return None From dbdb148e1225c7d79d6fc2fb122884566e1256ff Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 12:38:36 +0200 Subject: [PATCH 0182/1635] Fix implicit-return in plaato (#122902) --- homeassistant/components/plaato/entity.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/plaato/entity.py b/homeassistant/components/plaato/entity.py index d4c4622a998..7ab8367bd1d 100644 --- a/homeassistant/components/plaato/entity.py +++ b/homeassistant/components/plaato/entity.py @@ -1,5 +1,7 @@ """PlaatoEntity class.""" +from typing import Any + from pyplaato.models.device import PlaatoDevice from homeassistant.helpers import entity @@ -59,7 +61,7 @@ class PlaatoEntity(entity.Entity): return self._entry_data[SENSOR_DATA] @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any] | None: """Return the state attributes of the monitored installation.""" if self._attributes: return { @@ -68,6 +70,7 @@ class PlaatoEntity(entity.Entity): if plaato_key in self._attributes and self._attributes[plaato_key] is not None } + return None @property def available(self): From 47c96c52b1395d0ee671b430851e9be3fb0abab1 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 12:39:01 +0200 Subject: [PATCH 0183/1635] Fix implicit-return in niko_home_control (#122904) --- homeassistant/components/niko_home_control/light.py | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/niko_home_control/light.py b/homeassistant/components/niko_home_control/light.py index 360b45cceed..b2d41f3a41e 100644 --- a/homeassistant/components/niko_home_control/light.py +++ b/homeassistant/components/niko_home_control/light.py @@ -120,3 +120,4 @@ class NikoHomeControlData: if state["id"] == aid: return state["value1"] _LOGGER.error("Failed to retrieve state off unknown light") + return None From c32f1efad0828b2a5fd0b53af40dc21ee851c74b Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 12:39:21 +0200 Subject: [PATCH 0184/1635] Fix implicit-return in maxcube (#122907) --- homeassistant/components/maxcube/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/maxcube/__init__.py b/homeassistant/components/maxcube/__init__.py index 82cdc56e5d9..d4a3a45f441 100644 --- a/homeassistant/components/maxcube/__init__.py +++ b/homeassistant/components/maxcube/__init__.py @@ -98,7 +98,7 @@ class MaxCubeHandle: self.mutex = Lock() self._updatets = time.monotonic() - def update(self): + def update(self) -> None: """Pull the latest data from the MAX! Cube.""" # Acquire mutex to prevent simultaneous update from multiple threads with self.mutex: @@ -110,7 +110,7 @@ class MaxCubeHandle: self.cube.update() except TimeoutError: _LOGGER.error("Max!Cube connection failed") - return False + return self._updatets = time.monotonic() else: From 01f41a597e8d6709580f5809ccb4f32b084b8e2b Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 12:39:39 +0200 Subject: [PATCH 0185/1635] Fix implicit-return in melissa (#122908) --- homeassistant/components/melissa/climate.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/homeassistant/components/melissa/climate.py b/homeassistant/components/melissa/climate.py index fcb0820a6f0..0ad663faa2a 100644 --- a/homeassistant/components/melissa/climate.py +++ b/homeassistant/components/melissa/climate.py @@ -86,18 +86,21 @@ class MelissaClimate(ClimateEntity): """Return the current fan mode.""" if self._cur_settings is not None: return self.melissa_fan_to_hass(self._cur_settings[self._api.FAN]) + return None @property def current_temperature(self): """Return the current temperature.""" if self._data: return self._data[self._api.TEMP] + return None @property def current_humidity(self): """Return the current humidity value.""" if self._data: return self._data[self._api.HUMIDITY] + return None @property def target_temperature_step(self): @@ -224,6 +227,7 @@ class MelissaClimate(ClimateEntity): if mode == HVACMode.FAN_ONLY: return self._api.MODE_FAN _LOGGER.warning("Melissa have no setting for %s mode", mode) + return None def hass_fan_to_melissa(self, fan): """Translate hass fan modes to melissa modes.""" @@ -236,3 +240,4 @@ class MelissaClimate(ClimateEntity): if fan == FAN_HIGH: return self._api.FAN_HIGH _LOGGER.warning("Melissa have no setting for %s fan mode", fan) + return None From 8b1a5276025106672bafd63868d822d0971bcbc9 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 12:39:59 +0200 Subject: [PATCH 0186/1635] Fix implicit-return in meraki (#122909) --- homeassistant/components/meraki/device_tracker.py | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/meraki/device_tracker.py b/homeassistant/components/meraki/device_tracker.py index 95ed2ba9089..0eb3742a878 100644 --- a/homeassistant/components/meraki/device_tracker.py +++ b/homeassistant/components/meraki/device_tracker.py @@ -88,6 +88,7 @@ class MerakiView(HomeAssistantView): _LOGGER.debug("No observations found") return None self._handle(request.app[KEY_HASS], data) + return None @callback def _handle(self, hass, data): From c4398efbbb90b2620186b27a06a0990773fc8086 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 12:40:30 +0200 Subject: [PATCH 0187/1635] Fix implicit-return in meteo_france (#122910) --- homeassistant/components/meteo_france/weather.py | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/meteo_france/weather.py b/homeassistant/components/meteo_france/weather.py index 943d30fccfd..8305547afd3 100644 --- a/homeassistant/components/meteo_france/weather.py +++ b/homeassistant/components/meteo_france/weather.py @@ -165,6 +165,7 @@ class MeteoFranceWeather( wind_bearing = self.coordinator.data.current_forecast["wind"]["direction"] if wind_bearing != -1: return wind_bearing + return None def _forecast(self, mode: str) -> list[Forecast]: """Return the forecast.""" From cd552ceb2b15d5e65c4a5495f297004f1ce3d789 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 12:40:48 +0200 Subject: [PATCH 0188/1635] Fix implicit-return in mystrom (#122911) --- homeassistant/components/mystrom/binary_sensor.py | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/mystrom/binary_sensor.py b/homeassistant/components/mystrom/binary_sensor.py index 66ea2cc9679..17a1da75a96 100644 --- a/homeassistant/components/mystrom/binary_sensor.py +++ b/homeassistant/components/mystrom/binary_sensor.py @@ -67,6 +67,7 @@ class MyStromView(HomeAssistantView): else: new_state = self.buttons[entity_id].state == "off" self.buttons[entity_id].async_on_update(new_state) + return None class MyStromBinarySensor(BinarySensorEntity): From ed9c4e0c0dcb806aefb9426ada40cbdc5cea77e7 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 12:41:10 +0200 Subject: [PATCH 0189/1635] Fix implicit-return in landisgyr_heat_meter (#122912) --- homeassistant/components/landisgyr_heat_meter/__init__.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/landisgyr_heat_meter/__init__.py b/homeassistant/components/landisgyr_heat_meter/__init__.py index 4e52e246d81..a2fc1320c2b 100644 --- a/homeassistant/components/landisgyr_heat_meter/__init__.py +++ b/homeassistant/components/landisgyr_heat_meter/__init__.py @@ -3,13 +3,14 @@ from __future__ import annotations import logging +from typing import Any import ultraheat_api from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_DEVICE, Platform from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.entity_registry import async_migrate_entries +from homeassistant.helpers.entity_registry import RegistryEntry, async_migrate_entries from .const import DOMAIN from .coordinator import UltraheatCoordinator @@ -55,7 +56,9 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> device_number = config_entry.data["device_number"] @callback - def update_entity_unique_id(entity_entry): + def update_entity_unique_id( + entity_entry: RegistryEntry, + ) -> dict[str, Any] | None: """Update unique ID of entity entry.""" if entity_entry.platform in entity_entry.unique_id: return { @@ -64,6 +67,7 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> f"{device_number}", ) } + return None await async_migrate_entries( hass, config_entry.entry_id, update_entity_unique_id From c8dccec9562ab6f5a4f1cb85aa529e47871486c2 Mon Sep 17 00:00:00 2001 From: Maikel Punie Date: Wed, 31 Jul 2024 12:48:08 +0200 Subject: [PATCH 0190/1635] Bump velbusaio to 2024.07.06 (#122905) bumpo velbusaio to 2024.07.06 --- homeassistant/components/velbus/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/velbus/manifest.json b/homeassistant/components/velbus/manifest.json index 4e9478ae575..c1cf2951bbd 100644 --- a/homeassistant/components/velbus/manifest.json +++ b/homeassistant/components/velbus/manifest.json @@ -13,7 +13,7 @@ "velbus-packet", "velbus-protocol" ], - "requirements": ["velbus-aio==2024.7.5"], + "requirements": ["velbus-aio==2024.7.6"], "usb": [ { "vid": "10CF", diff --git a/requirements_all.txt b/requirements_all.txt index 6ba6601597d..3c8d5357568 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2851,7 +2851,7 @@ vallox-websocket-api==5.3.0 vehicle==2.2.2 # homeassistant.components.velbus -velbus-aio==2024.7.5 +velbus-aio==2024.7.6 # homeassistant.components.venstar venstarcolortouch==0.19 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index da3ac4cd662..b95d1b58922 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2246,7 +2246,7 @@ vallox-websocket-api==5.3.0 vehicle==2.2.2 # homeassistant.components.velbus -velbus-aio==2024.7.5 +velbus-aio==2024.7.6 # homeassistant.components.venstar venstarcolortouch==0.19 From bf3a2cf393ab6f97bcfe959babb22f917b84e2a5 Mon Sep 17 00:00:00 2001 From: Yuxin Wang Date: Wed, 31 Jul 2024 07:01:48 -0400 Subject: [PATCH 0191/1635] Add graceful handling for LASTSTEST sensor in APCUPSD (#113125) * Add handling for LASTSTEST sensor * Set the state to unknown instead of unavailable * Use LASTSTEST constant and revise the logic to add it to the entity list * Use LASTSTEST constant --- homeassistant/components/apcupsd/const.py | 3 ++ homeassistant/components/apcupsd/sensor.py | 22 +++++++++++--- tests/components/apcupsd/test_sensor.py | 34 +++++++++++++++++++++- 3 files changed, 54 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/apcupsd/const.py b/homeassistant/components/apcupsd/const.py index e24a66fdca1..56bf229579d 100644 --- a/homeassistant/components/apcupsd/const.py +++ b/homeassistant/components/apcupsd/const.py @@ -4,3 +4,6 @@ from typing import Final DOMAIN: Final = "apcupsd" CONNECTION_TIMEOUT: int = 10 + +# Field name of last self test retrieved from apcupsd. +LASTSTEST: Final = "laststest" diff --git a/homeassistant/components/apcupsd/sensor.py b/homeassistant/components/apcupsd/sensor.py index 8d2c1ee2af1..ff72208e9ce 100644 --- a/homeassistant/components/apcupsd/sensor.py +++ b/homeassistant/components/apcupsd/sensor.py @@ -13,6 +13,7 @@ from homeassistant.components.sensor import ( from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( PERCENTAGE, + STATE_UNKNOWN, UnitOfApparentPower, UnitOfElectricCurrent, UnitOfElectricPotential, @@ -25,7 +26,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN +from .const import DOMAIN, LASTSTEST from .coordinator import APCUPSdCoordinator PARALLEL_UPDATES = 0 @@ -156,8 +157,8 @@ SENSORS: dict[str, SensorEntityDescription] = { device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, ), - "laststest": SensorEntityDescription( - key="laststest", + LASTSTEST: SensorEntityDescription( + key=LASTSTEST, translation_key="last_self_test", ), "lastxfer": SensorEntityDescription( @@ -417,7 +418,12 @@ async def async_setup_entry( available_resources: set[str] = {k.lower() for k, _ in coordinator.data.items()} entities = [] - for resource in available_resources: + + # "laststest" is a special sensor that only appears when the APC UPS daemon has done a + # periodical (or manual) self test since last daemon restart. It might not be available + # when we set up the integration, and we do not know if it would ever be available. Here we + # add it anyway and mark it as unknown initially. + for resource in available_resources | {LASTSTEST}: if resource not in SENSORS: _LOGGER.warning("Invalid resource from APCUPSd: %s", resource.upper()) continue @@ -473,6 +479,14 @@ class APCUPSdSensor(CoordinatorEntity[APCUPSdCoordinator], SensorEntity): def _update_attrs(self) -> None: """Update sensor attributes based on coordinator data.""" key = self.entity_description.key.upper() + # For most sensors the key will always be available for each refresh. However, some sensors + # (e.g., "laststest") will only appear after certain event occurs (e.g., a self test is + # performed) and may disappear again after certain event. So we mark the state as "unknown" + # when it becomes unknown after such events. + if key not in self.coordinator.data: + self._attr_native_value = STATE_UNKNOWN + return + self._attr_native_value, inferred_unit = infer_unit(self.coordinator.data[key]) if not self.native_unit_of_measurement: self._attr_native_unit_of_measurement = inferred_unit diff --git a/tests/components/apcupsd/test_sensor.py b/tests/components/apcupsd/test_sensor.py index 0c7d174a5e8..0fe7f12ad27 100644 --- a/tests/components/apcupsd/test_sensor.py +++ b/tests/components/apcupsd/test_sensor.py @@ -15,6 +15,7 @@ from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, PERCENTAGE, STATE_UNAVAILABLE, + STATE_UNKNOWN, UnitOfElectricPotential, UnitOfPower, UnitOfTime, @@ -25,7 +26,7 @@ from homeassistant.setup import async_setup_component from homeassistant.util import slugify from homeassistant.util.dt import utcnow -from . import MOCK_STATUS, async_init_integration +from . import MOCK_MINIMAL_STATUS, MOCK_STATUS, async_init_integration from tests.common import async_fire_time_changed @@ -237,3 +238,34 @@ async def test_multiple_manual_update_entity(hass: HomeAssistant) -> None: blocking=True, ) assert mock_request_status.call_count == 1 + + +async def test_sensor_unknown(hass: HomeAssistant) -> None: + """Test if our integration can properly certain sensors as unknown when it becomes so.""" + await async_init_integration(hass, status=MOCK_MINIMAL_STATUS) + + assert hass.states.get("sensor.mode").state == MOCK_MINIMAL_STATUS["UPSMODE"] + # Last self test sensor should be added even if our status does not report it initially (it is + # a sensor that appears only after a periodical or manual self test is performed). + assert hass.states.get("sensor.last_self_test") is not None + assert hass.states.get("sensor.last_self_test").state == STATE_UNKNOWN + + # Simulate an event (a self test) such that "LASTSTEST" field is being reported, the state of + # the sensor should be properly updated with the corresponding value. + with patch("aioapcaccess.request_status") as mock_request_status: + mock_request_status.return_value = MOCK_MINIMAL_STATUS | { + "LASTSTEST": "1970-01-01 00:00:00 0000" + } + future = utcnow() + timedelta(minutes=2) + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + assert hass.states.get("sensor.last_self_test").state == "1970-01-01 00:00:00 0000" + + # Simulate another event (e.g., daemon restart) such that "LASTSTEST" is no longer reported. + with patch("aioapcaccess.request_status") as mock_request_status: + mock_request_status.return_value = MOCK_MINIMAL_STATUS + future = utcnow() + timedelta(minutes=2) + async_fire_time_changed(hass, future) + await hass.async_block_till_done() + # The state should become unknown again. + assert hass.states.get("sensor.last_self_test").state == STATE_UNKNOWN From 3bf00822b0118d95fee626d42fe118ab488561df Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 13:42:07 +0200 Subject: [PATCH 0192/1635] Fix implicit-return in kodi (#122914) --- homeassistant/components/kodi/media_player.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/kodi/media_player.py b/homeassistant/components/kodi/media_player.py index 46dee891e3a..cdbe4e334cb 100644 --- a/homeassistant/components/kodi/media_player.py +++ b/homeassistant/components/kodi/media_player.py @@ -529,10 +529,11 @@ class KodiEntity(MediaPlayerEntity): return not self._connection.can_subscribe @property - def volume_level(self): + def volume_level(self) -> float | None: """Volume level of the media player (0..1).""" if "volume" in self._app_properties: return int(self._app_properties["volume"]) / 100.0 + return None @property def is_volume_muted(self): From 7c7b408df115d8cdbbd7a21243adf9496554a924 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 14:21:58 +0200 Subject: [PATCH 0193/1635] Fix implicit-return in homekit_controller (#122920) --- homeassistant/components/homekit_controller/fan.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/homekit_controller/fan.py b/homeassistant/components/homekit_controller/fan.py index db01147494f..93ebbba62b1 100644 --- a/homeassistant/components/homekit_controller/fan.py +++ b/homeassistant/components/homekit_controller/fan.py @@ -144,7 +144,8 @@ class BaseHomeKitFan(HomeKitEntity, FanEntity): async def async_set_percentage(self, percentage: int) -> None: """Set the speed of the fan.""" if percentage == 0: - return await self.async_turn_off() + await self.async_turn_off() + return await self.async_put_characteristics( { From f14471112d0904a5480528c3ec1097d11b5d8ba4 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Wed, 31 Jul 2024 05:36:02 -0700 Subject: [PATCH 0194/1635] Improve LLM tool quality by more clearly specifying device_class slots (#122723) * Limit intent / llm API device_class slots to only necessary services and limited set of values * Fix ruff errors * Run ruff format * Fix typing and improve output schema * Fix schema and improve flattening * Revert conftest * Revert recorder * Fix ruff format errors * Update using latest version of voluptuous --- homeassistant/components/cover/intent.py | 4 +- homeassistant/components/intent/__init__.py | 15 +++++ .../components/media_player/intent.py | 7 ++- homeassistant/helpers/intent.py | 42 ++++++++++--- .../test_default_agent_intents.py | 28 +++++++++ tests/components/cover/test_intent.py | 49 ++++++++++++--- tests/helpers/test_intent.py | 59 ++++++++++++++++++- 7 files changed, 183 insertions(+), 21 deletions(-) diff --git a/homeassistant/components/cover/intent.py b/homeassistant/components/cover/intent.py index b38f698ac3d..7580cff063a 100644 --- a/homeassistant/components/cover/intent.py +++ b/homeassistant/components/cover/intent.py @@ -4,7 +4,7 @@ from homeassistant.const import SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER from homeassistant.core import HomeAssistant from homeassistant.helpers import intent -from . import DOMAIN +from . import DOMAIN, CoverDeviceClass INTENT_OPEN_COVER = "HassOpenCover" INTENT_CLOSE_COVER = "HassCloseCover" @@ -21,6 +21,7 @@ async def async_setup_intents(hass: HomeAssistant) -> None: "Opening {}", description="Opens a cover", platforms={DOMAIN}, + device_classes={CoverDeviceClass}, ), ) intent.async_register( @@ -32,5 +33,6 @@ async def async_setup_intents(hass: HomeAssistant) -> None: "Closing {}", description="Closes a cover", platforms={DOMAIN}, + device_classes={CoverDeviceClass}, ), ) diff --git a/homeassistant/components/intent/__init__.py b/homeassistant/components/intent/__init__.py index b1716a8d2d2..001f2515ebf 100644 --- a/homeassistant/components/intent/__init__.py +++ b/homeassistant/components/intent/__init__.py @@ -16,6 +16,7 @@ from homeassistant.components.cover import ( SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, + CoverDeviceClass, ) from homeassistant.components.http.data_validator import RequestDataValidator from homeassistant.components.lock import ( @@ -23,11 +24,14 @@ from homeassistant.components.lock import ( SERVICE_LOCK, SERVICE_UNLOCK, ) +from homeassistant.components.media_player import MediaPlayerDeviceClass +from homeassistant.components.switch import SwitchDeviceClass from homeassistant.components.valve import ( DOMAIN as VALVE_DOMAIN, SERVICE_CLOSE_VALVE, SERVICE_OPEN_VALVE, SERVICE_SET_VALVE_POSITION, + ValveDeviceClass, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -67,6 +71,13 @@ __all__ = [ "DOMAIN", ] +ONOFF_DEVICE_CLASSES = { + CoverDeviceClass, + ValveDeviceClass, + SwitchDeviceClass, + MediaPlayerDeviceClass, +} + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Intent component.""" @@ -85,6 +96,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: HOMEASSISTANT_DOMAIN, SERVICE_TURN_ON, description="Turns on/opens a device or entity", + device_classes=ONOFF_DEVICE_CLASSES, ), ) intent.async_register( @@ -94,6 +106,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: HOMEASSISTANT_DOMAIN, SERVICE_TURN_OFF, description="Turns off/closes a device or entity", + device_classes=ONOFF_DEVICE_CLASSES, ), ) intent.async_register( @@ -103,6 +116,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: HOMEASSISTANT_DOMAIN, SERVICE_TOGGLE, description="Toggles a device or entity", + device_classes=ONOFF_DEVICE_CLASSES, ), ) intent.async_register( @@ -358,6 +372,7 @@ class SetPositionIntentHandler(intent.DynamicServiceIntentHandler): }, description="Sets the position of a device or entity", platforms={COVER_DOMAIN, VALVE_DOMAIN}, + device_classes={CoverDeviceClass, ValveDeviceClass}, ) def get_domain_and_service( diff --git a/homeassistant/components/media_player/intent.py b/homeassistant/components/media_player/intent.py index 8a5d824112a..edfab2a668f 100644 --- a/homeassistant/components/media_player/intent.py +++ b/homeassistant/components/media_player/intent.py @@ -16,7 +16,7 @@ from homeassistant.const import ( from homeassistant.core import Context, HomeAssistant, State from homeassistant.helpers import intent -from . import ATTR_MEDIA_VOLUME_LEVEL, DOMAIN +from . import ATTR_MEDIA_VOLUME_LEVEL, DOMAIN, MediaPlayerDeviceClass from .const import MediaPlayerEntityFeature, MediaPlayerState INTENT_MEDIA_PAUSE = "HassMediaPause" @@ -69,6 +69,7 @@ async def async_setup_intents(hass: HomeAssistant) -> None: required_states={MediaPlayerState.PLAYING}, description="Skips a media player to the next item", platforms={DOMAIN}, + device_classes={MediaPlayerDeviceClass}, ), ) intent.async_register( @@ -82,6 +83,7 @@ async def async_setup_intents(hass: HomeAssistant) -> None: required_states={MediaPlayerState.PLAYING}, description="Replays the previous item for a media player", platforms={DOMAIN}, + device_classes={MediaPlayerDeviceClass}, ), ) intent.async_register( @@ -100,6 +102,7 @@ async def async_setup_intents(hass: HomeAssistant) -> None: }, description="Sets the volume of a media player", platforms={DOMAIN}, + device_classes={MediaPlayerDeviceClass}, ), ) @@ -118,6 +121,7 @@ class MediaPauseHandler(intent.ServiceIntentHandler): required_states={MediaPlayerState.PLAYING}, description="Pauses a media player", platforms={DOMAIN}, + device_classes={MediaPlayerDeviceClass}, ) self.last_paused = last_paused @@ -153,6 +157,7 @@ class MediaUnpauseHandler(intent.ServiceIntentHandler): required_states={MediaPlayerState.PAUSED}, description="Resumes a media player", platforms={DOMAIN}, + device_classes={MediaPlayerDeviceClass}, ) self.last_paused = last_paused diff --git a/homeassistant/helpers/intent.py b/homeassistant/helpers/intent.py index eeb160934ff..be9b57bf814 100644 --- a/homeassistant/helpers/intent.py +++ b/homeassistant/helpers/intent.py @@ -7,7 +7,7 @@ import asyncio from collections.abc import Callable, Collection, Coroutine, Iterable import dataclasses from dataclasses import dataclass, field -from enum import Enum, auto +from enum import Enum, StrEnum, auto from functools import cached_property from itertools import groupby import logging @@ -820,6 +820,7 @@ class DynamicServiceIntentHandler(IntentHandler): required_states: set[str] | None = None, description: str | None = None, platforms: set[str] | None = None, + device_classes: set[type[StrEnum]] | None = None, ) -> None: """Create Service Intent Handler.""" self.intent_type = intent_type @@ -829,6 +830,7 @@ class DynamicServiceIntentHandler(IntentHandler): self.required_states = required_states self.description = description self.platforms = platforms + self.device_classes = device_classes self.required_slots: _IntentSlotsType = {} if required_slots: @@ -851,13 +853,38 @@ class DynamicServiceIntentHandler(IntentHandler): @cached_property def slot_schema(self) -> dict: """Return a slot schema.""" + domain_validator = ( + vol.In(list(self.required_domains)) if self.required_domains else cv.string + ) slot_schema = { vol.Any("name", "area", "floor"): non_empty_string, - vol.Optional("domain"): vol.All(cv.ensure_list, [cv.string]), - vol.Optional("device_class"): vol.All(cv.ensure_list, [cv.string]), - vol.Optional("preferred_area_id"): cv.string, - vol.Optional("preferred_floor_id"): cv.string, + vol.Optional("domain"): vol.All(cv.ensure_list, [domain_validator]), } + if self.device_classes: + # The typical way to match enums is with vol.Coerce, but we build a + # flat list to make the API simpler to describe programmatically + flattened_device_classes = vol.In( + [ + device_class.value + for device_class_enum in self.device_classes + for device_class in device_class_enum + ] + ) + slot_schema.update( + { + vol.Optional("device_class"): vol.All( + cv.ensure_list, + [flattened_device_classes], + ) + } + ) + + slot_schema.update( + { + vol.Optional("preferred_area_id"): cv.string, + vol.Optional("preferred_floor_id"): cv.string, + } + ) if self.required_slots: slot_schema.update( @@ -910,9 +937,6 @@ class DynamicServiceIntentHandler(IntentHandler): if "domain" in slots: domains = set(slots["domain"]["value"]) - if self.required_domains: - # Must be a subset of intent's required domain(s) - domains.intersection_update(self.required_domains) if "device_class" in slots: device_classes = set(slots["device_class"]["value"]) @@ -1120,6 +1144,7 @@ class ServiceIntentHandler(DynamicServiceIntentHandler): required_states: set[str] | None = None, description: str | None = None, platforms: set[str] | None = None, + device_classes: set[type[StrEnum]] | None = None, ) -> None: """Create service handler.""" super().__init__( @@ -1132,6 +1157,7 @@ class ServiceIntentHandler(DynamicServiceIntentHandler): required_states=required_states, description=description, platforms=platforms, + device_classes=device_classes, ) self.domain = domain self.service = service diff --git a/tests/components/conversation/test_default_agent_intents.py b/tests/components/conversation/test_default_agent_intents.py index 8be25136df4..7bae9c43f70 100644 --- a/tests/components/conversation/test_default_agent_intents.py +++ b/tests/components/conversation/test_default_agent_intents.py @@ -123,6 +123,34 @@ async def test_cover_set_position( assert call.data == {"entity_id": entity_id, cover.ATTR_POSITION: 50} +async def test_cover_device_class( + hass: HomeAssistant, + init_components, +) -> None: + """Test the open position for covers by device class.""" + await cover_intent.async_setup_intents(hass) + + entity_id = f"{cover.DOMAIN}.front" + hass.states.async_set( + entity_id, STATE_CLOSED, attributes={"device_class": "garage"} + ) + async_expose_entity(hass, conversation.DOMAIN, entity_id, True) + + # Open service + calls = async_mock_service(hass, cover.DOMAIN, cover.SERVICE_OPEN_COVER) + result = await conversation.async_converse( + hass, "open the garage door", None, Context(), None + ) + await hass.async_block_till_done() + + response = result.response + assert response.response_type == intent.IntentResponseType.ACTION_DONE + assert response.speech["plain"]["speech"] == "Opened the garage" + assert len(calls) == 1 + call = calls[0] + assert call.data == {"entity_id": entity_id} + + async def test_valve_intents( hass: HomeAssistant, init_components, diff --git a/tests/components/cover/test_intent.py b/tests/components/cover/test_intent.py index 8ee621596db..1cf23c4c3df 100644 --- a/tests/components/cover/test_intent.py +++ b/tests/components/cover/test_intent.py @@ -1,5 +1,9 @@ """The tests for the cover platform.""" +from typing import Any + +import pytest + from homeassistant.components.cover import ( ATTR_CURRENT_POSITION, DOMAIN, @@ -16,15 +20,24 @@ from homeassistant.setup import async_setup_component from tests.common import async_mock_service -async def test_open_cover_intent(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("slots"), + [ + ({"name": {"value": "garage door"}}), + ({"device_class": {"value": "garage"}}), + ], +) +async def test_open_cover_intent(hass: HomeAssistant, slots: dict[str, Any]) -> None: """Test HassOpenCover intent.""" await cover_intent.async_setup_intents(hass) - hass.states.async_set(f"{DOMAIN}.garage_door", STATE_CLOSED) + hass.states.async_set( + f"{DOMAIN}.garage_door", STATE_CLOSED, attributes={"device_class": "garage"} + ) calls = async_mock_service(hass, DOMAIN, SERVICE_OPEN_COVER) response = await intent.async_handle( - hass, "test", cover_intent.INTENT_OPEN_COVER, {"name": {"value": "garage door"}} + hass, "test", cover_intent.INTENT_OPEN_COVER, slots ) await hass.async_block_till_done() @@ -36,18 +49,27 @@ async def test_open_cover_intent(hass: HomeAssistant) -> None: assert call.data == {"entity_id": f"{DOMAIN}.garage_door"} -async def test_close_cover_intent(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("slots"), + [ + ({"name": {"value": "garage door"}}), + ({"device_class": {"value": "garage"}}), + ], +) +async def test_close_cover_intent(hass: HomeAssistant, slots: dict[str, Any]) -> None: """Test HassCloseCover intent.""" await cover_intent.async_setup_intents(hass) - hass.states.async_set(f"{DOMAIN}.garage_door", STATE_OPEN) + hass.states.async_set( + f"{DOMAIN}.garage_door", STATE_OPEN, attributes={"device_class": "garage"} + ) calls = async_mock_service(hass, DOMAIN, SERVICE_CLOSE_COVER) response = await intent.async_handle( hass, "test", cover_intent.INTENT_CLOSE_COVER, - {"name": {"value": "garage door"}}, + slots, ) await hass.async_block_till_done() @@ -59,13 +81,22 @@ async def test_close_cover_intent(hass: HomeAssistant) -> None: assert call.data == {"entity_id": f"{DOMAIN}.garage_door"} -async def test_set_cover_position(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("slots"), + [ + ({"name": {"value": "test cover"}, "position": {"value": 50}}), + ({"device_class": {"value": "shade"}, "position": {"value": 50}}), + ], +) +async def test_set_cover_position(hass: HomeAssistant, slots: dict[str, Any]) -> None: """Test HassSetPosition intent for covers.""" assert await async_setup_component(hass, "intent", {}) entity_id = f"{DOMAIN}.test_cover" hass.states.async_set( - entity_id, STATE_CLOSED, attributes={ATTR_CURRENT_POSITION: 0} + entity_id, + STATE_CLOSED, + attributes={ATTR_CURRENT_POSITION: 0, "device_class": "shade"}, ) calls = async_mock_service(hass, DOMAIN, SERVICE_SET_COVER_POSITION) @@ -73,7 +104,7 @@ async def test_set_cover_position(hass: HomeAssistant) -> None: hass, "test", intent.INTENT_SET_POSITION, - {"name": {"value": "test cover"}, "position": {"value": 50}}, + slots, ) await hass.async_block_till_done() diff --git a/tests/helpers/test_intent.py b/tests/helpers/test_intent.py index c592fc50c0a..ae8c2ed65d0 100644 --- a/tests/helpers/test_intent.py +++ b/tests/helpers/test_intent.py @@ -765,7 +765,7 @@ async def test_service_intent_handler_required_domains(hass: HomeAssistant) -> N ) # Still fails even if we provide the domain - with pytest.raises(intent.MatchFailedError): + with pytest.raises(intent.InvalidSlotInfo): await intent.async_handle( hass, "test", @@ -777,7 +777,10 @@ async def test_service_intent_handler_required_domains(hass: HomeAssistant) -> N async def test_service_handler_empty_strings(hass: HomeAssistant) -> None: """Test that passing empty strings for filters fails in ServiceIntentHandler.""" handler = intent.ServiceIntentHandler( - "TestType", "light", "turn_on", "Turned {} on" + "TestType", + "light", + "turn_on", + "Turned {} on", ) intent.async_register(hass, handler) @@ -814,3 +817,55 @@ async def test_service_handler_no_filter(hass: HomeAssistant) -> None: "test", "TestType", ) + + +async def test_service_handler_device_classes( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test that passing empty strings for filters fails in ServiceIntentHandler.""" + + # Register a fake service and a switch intent handler + call_done = asyncio.Event() + calls = [] + + # Register a service that takes 0.1 seconds to execute + async def mock_service(call): + """Mock service.""" + call_done.set() + calls.append(call) + + hass.services.async_register("switch", "turn_on", mock_service) + + handler = intent.ServiceIntentHandler( + "TestType", + "switch", + "turn_on", + "Turned {} on", + device_classes={switch.SwitchDeviceClass}, + ) + intent.async_register(hass, handler) + + # Create a switch enttiy and match by device class + hass.states.async_set( + "switch.bedroom", "off", attributes={"device_class": "outlet"} + ) + hass.states.async_set("switch.living_room", "off") + + await intent.async_handle( + hass, + "test", + "TestType", + slots={"device_class": {"value": "outlet"}}, + ) + await call_done.wait() + assert [call.data.get("entity_id") for call in calls] == ["switch.bedroom"] + calls.clear() + + # Validate which device classes are allowed + with pytest.raises(intent.InvalidSlotInfo): + await intent.async_handle( + hass, + "test", + "TestType", + slots={"device_class": {"value": "light"}}, + ) From 8b96c7873f778f4873370ebd7ef9359aa16b48da Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Wed, 31 Jul 2024 14:36:53 +0200 Subject: [PATCH 0195/1635] Rename 'service' to 'action' in automations and scripts (#122845) --- homeassistant/const.py | 1 + homeassistant/helpers/config_validation.py | 25 ++- homeassistant/helpers/service.py | 6 +- tests/components/automation/test_init.py | 208 +++++++++++-------- tests/components/automation/test_recorder.py | 2 +- tests/components/script/test_blueprint.py | 2 +- tests/components/script/test_init.py | 124 ++++++----- tests/components/script/test_recorder.py | 2 +- tests/helpers/test_config_validation.py | 95 ++++++--- tests/helpers/test_script.py | 208 +++++++++++-------- tests/helpers/test_service.py | 16 +- 11 files changed, 414 insertions(+), 275 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index d0f1d4555d4..7d58bdb1e94 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -113,6 +113,7 @@ SUN_EVENT_SUNRISE: Final = "sunrise" # #### CONFIG #### CONF_ABOVE: Final = "above" CONF_ACCESS_TOKEN: Final = "access_token" +CONF_ACTION: Final = "action" CONF_ADDRESS: Final = "address" CONF_AFTER: Final = "after" CONF_ALIAS: Final = "alias" diff --git a/homeassistant/helpers/config_validation.py b/homeassistant/helpers/config_validation.py index a28c81e6da9..cd6670dc597 100644 --- a/homeassistant/helpers/config_validation.py +++ b/homeassistant/helpers/config_validation.py @@ -34,6 +34,7 @@ from homeassistant.const import ( ATTR_FLOOR_ID, ATTR_LABEL_ID, CONF_ABOVE, + CONF_ACTION, CONF_ALIAS, CONF_ATTRIBUTE, CONF_BELOW, @@ -1325,11 +1326,30 @@ EVENT_SCHEMA = vol.Schema( } ) + +def _backward_compat_service_schema(value: Any | None) -> Any: + """Backward compatibility for service schemas.""" + + if not isinstance(value, dict): + return value + + # `service` has been renamed to `action` + if CONF_SERVICE in value: + if CONF_ACTION in value: + raise vol.Invalid( + "Cannot specify both 'service' and 'action'. Please use 'action' only." + ) + value[CONF_ACTION] = value.pop(CONF_SERVICE) + + return value + + SERVICE_SCHEMA = vol.All( + _backward_compat_service_schema, vol.Schema( { **SCRIPT_ACTION_BASE_SCHEMA, - vol.Exclusive(CONF_SERVICE, "service name"): vol.Any( + vol.Exclusive(CONF_ACTION, "service name"): vol.Any( service, dynamic_template ), vol.Exclusive(CONF_SERVICE_TEMPLATE, "service name"): vol.Any( @@ -1348,7 +1368,7 @@ SERVICE_SCHEMA = vol.All( vol.Remove("metadata"): dict, } ), - has_at_least_one_key(CONF_SERVICE, CONF_SERVICE_TEMPLATE), + has_at_least_one_key(CONF_ACTION, CONF_SERVICE_TEMPLATE), ) NUMERIC_STATE_THRESHOLD_SCHEMA = vol.Any( @@ -1844,6 +1864,7 @@ ACTIONS_MAP = { CONF_WAIT_FOR_TRIGGER: SCRIPT_ACTION_WAIT_FOR_TRIGGER, CONF_VARIABLES: SCRIPT_ACTION_VARIABLES, CONF_IF: SCRIPT_ACTION_IF, + CONF_ACTION: SCRIPT_ACTION_CALL_SERVICE, CONF_SERVICE: SCRIPT_ACTION_CALL_SERVICE, CONF_SERVICE_TEMPLATE: SCRIPT_ACTION_CALL_SERVICE, CONF_STOP: SCRIPT_ACTION_STOP, diff --git a/homeassistant/helpers/service.py b/homeassistant/helpers/service.py index 35c682437cb..58cd4657301 100644 --- a/homeassistant/helpers/service.py +++ b/homeassistant/helpers/service.py @@ -20,8 +20,8 @@ from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_FLOOR_ID, ATTR_LABEL_ID, + CONF_ACTION, CONF_ENTITY_ID, - CONF_SERVICE, CONF_SERVICE_DATA, CONF_SERVICE_DATA_TEMPLATE, CONF_SERVICE_TEMPLATE, @@ -358,8 +358,8 @@ def async_prepare_call_from_config( f"Invalid config for calling service: {ex}" ) from ex - if CONF_SERVICE in config: - domain_service = config[CONF_SERVICE] + if CONF_ACTION in config: + domain_service = config[CONF_ACTION] else: domain_service = config[CONF_SERVICE_TEMPLATE] diff --git a/tests/components/automation/test_init.py b/tests/components/automation/test_init.py index d8078984630..d8f04f10458 100644 --- a/tests/components/automation/test_init.py +++ b/tests/components/automation/test_init.py @@ -88,7 +88,7 @@ async def test_service_data_not_a_dict( { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "data": 100}, + "action": {"action": "test.automation", "data": 100}, } }, ) @@ -111,7 +111,7 @@ async def test_service_data_single_template( automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "data": "{{ { 'foo': 'bar' } }}", }, } @@ -136,7 +136,7 @@ async def test_service_specify_data( "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "data_template": { "some": ( "{{ trigger.platform }} - {{ trigger.event.event_type }}" @@ -170,7 +170,7 @@ async def test_service_specify_entity_id( { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "entity_id": "hello.world"}, + "action": {"action": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -192,7 +192,7 @@ async def test_service_specify_entity_id_list( automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } @@ -216,7 +216,7 @@ async def test_two_triggers(hass: HomeAssistant, calls: list[ServiceCall]) -> No {"platform": "event", "event_type": "test_event"}, {"platform": "state", "entity_id": "test.entity"}, ], - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, } }, ) @@ -245,7 +245,7 @@ async def test_trigger_service_ignoring_condition( "entity_id": "non.existing", "above": "1", }, - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, } }, ) @@ -301,7 +301,7 @@ async def test_two_conditions_with_and( "below": 150, }, ], - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, } }, ) @@ -333,7 +333,7 @@ async def test_shorthand_conditions_template( automation.DOMAIN: { "trigger": [{"platform": "event", "event_type": "test_event"}], "condition": "{{ is_state('test.entity', 'hello') }}", - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, } }, ) @@ -360,11 +360,11 @@ async def test_automation_list_setting( automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, }, { "trigger": {"platform": "event", "event_type": "test_event_2"}, - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, }, ] }, @@ -390,8 +390,8 @@ async def test_automation_calling_two_actions( automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, "action": [ - {"service": "test.automation", "data": {"position": 0}}, - {"service": "test.automation", "data": {"position": 1}}, + {"action": "test.automation", "data": {"position": 0}}, + {"action": "test.automation", "data": {"position": 1}}, ], } }, @@ -420,7 +420,7 @@ async def test_shared_context(hass: HomeAssistant, calls: list[ServiceCall]) -> { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, }, ] }, @@ -486,7 +486,7 @@ async def test_services(hass: HomeAssistant, calls: list[ServiceCall]) -> None: automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, } }, ) @@ -569,7 +569,7 @@ async def test_reload_config_service( "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } @@ -597,7 +597,7 @@ async def test_reload_config_service( "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { - "service": "test.automation", + "action": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } @@ -650,7 +650,7 @@ async def test_reload_config_when_invalid_config( "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } @@ -690,7 +690,7 @@ async def test_reload_config_handles_load_fails( "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } @@ -735,7 +735,7 @@ async def test_automation_stops( "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, - {"service": "test.automation"}, + {"action": "test.automation"}, ], } } @@ -811,7 +811,7 @@ async def test_reload_unchanged_does_not_stop( "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, - {"service": "test.automation"}, + {"action": "test.automation"}, ], } } @@ -858,7 +858,7 @@ async def test_reload_single_unchanged_does_not_stop( "action": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, - {"service": "test.automation"}, + {"action": "test.automation"}, ], } } @@ -905,7 +905,7 @@ async def test_reload_single_add_automation( "id": "sun", "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], } } assert await async_setup_component(hass, automation.DOMAIN, config1) @@ -942,25 +942,25 @@ async def test_reload_single_parallel_calls( "id": "sun", "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_sun"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], }, { "id": "moon", "alias": "goodbye", "trigger": {"platform": "event", "event_type": "test_event_moon"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], }, { "id": "mars", "alias": "goodbye", "trigger": {"platform": "event", "event_type": "test_event_mars"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], }, { "id": "venus", "alias": "goodbye", "trigger": {"platform": "event", "event_type": "test_event_venus"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], }, ] } @@ -1055,7 +1055,7 @@ async def test_reload_single_remove_automation( "id": "sun", "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], } } config2 = {automation.DOMAIN: {}} @@ -1093,12 +1093,12 @@ async def test_reload_moved_automation_without_alias( automation.DOMAIN: [ { "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], }, { "alias": "automation_with_alias", "trigger": {"platform": "event", "event_type": "test_event2"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], }, ] } @@ -1149,17 +1149,17 @@ async def test_reload_identical_automations_without_id( { "alias": "dolly", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], }, { "alias": "dolly", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], }, { "alias": "dolly", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], }, ] } @@ -1246,12 +1246,12 @@ async def test_reload_identical_automations_without_id( [ { "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], }, # An automation using templates { "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"service": "{{ 'test.automation' }}"}], + "action": [{"action": "{{ 'test.automation' }}"}], }, # An automation using blueprint { @@ -1278,13 +1278,13 @@ async def test_reload_identical_automations_without_id( { "id": "sun", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"service": "test.automation"}], + "action": [{"action": "test.automation"}], }, # An automation using templates { "id": "sun", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": [{"service": "{{ 'test.automation' }}"}], + "action": [{"action": "{{ 'test.automation' }}"}], }, # An automation using blueprint { @@ -1424,12 +1424,12 @@ async def test_automation_restore_state(hass: HomeAssistant) -> None: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event_hello"}, - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, }, { "alias": "bye", "trigger": {"platform": "event", "event_type": "test_event_bye"}, - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, }, ] } @@ -1474,7 +1474,7 @@ async def test_initial_value_off(hass: HomeAssistant) -> None: "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "entity_id": "hello.world"}, + "action": {"action": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -1499,7 +1499,7 @@ async def test_initial_value_on(hass: HomeAssistant) -> None: "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "entity_id": ["hello.world", "hello.world2"], }, } @@ -1528,7 +1528,7 @@ async def test_initial_value_off_but_restore_on(hass: HomeAssistant) -> None: "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "entity_id": "hello.world"}, + "action": {"action": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -1553,7 +1553,7 @@ async def test_initial_value_on_but_restore_off(hass: HomeAssistant) -> None: "alias": "hello", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "entity_id": "hello.world"}, + "action": {"action": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -1576,7 +1576,7 @@ async def test_no_initial_value_and_restore_off(hass: HomeAssistant) -> None: automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "entity_id": "hello.world"}, + "action": {"action": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -1600,7 +1600,7 @@ async def test_automation_is_on_if_no_initial_state_or_restore( automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "entity_id": "hello.world"}, + "action": {"action": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -1623,7 +1623,7 @@ async def test_automation_not_trigger_on_bootstrap(hass: HomeAssistant) -> None: automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "entity_id": "hello.world"}, + "action": {"action": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -1714,7 +1714,7 @@ async def test_automation_bad_config_validation( "alias": "good_automation", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "entity_id": "hello.world", }, }, @@ -1756,7 +1756,7 @@ async def test_automation_bad_config_validation( "alias": "bad_automation", "trigger": {"platform": "event", "event_type": "test_event2"}, "action": { - "service": "test.automation", + "action": "test.automation", "data_template": {"event": "{{ trigger.event.event_type }}"}, }, } @@ -1785,7 +1785,7 @@ async def test_automation_with_error_in_script( automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "entity_id": "hello.world"}, + "action": {"action": "test.automation", "entity_id": "hello.world"}, } }, ) @@ -1811,7 +1811,7 @@ async def test_automation_with_error_in_script_2( automation.DOMAIN: { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": None, "entity_id": "hello.world"}, + "action": {"action": None, "entity_id": "hello.world"}, } }, ) @@ -1842,19 +1842,19 @@ async def test_automation_restore_last_triggered_with_initial_state( "alias": "hello", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, }, { "alias": "bye", "initial_state": "off", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, }, { "alias": "solong", "initial_state": "on", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation"}, + "action": {"action": "test.automation"}, }, ] } @@ -2013,11 +2013,11 @@ async def test_extraction_functions( }, "action": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.in_both"}, }, { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.in_first"}, }, { @@ -2027,15 +2027,15 @@ async def test_extraction_functions( "type": "turn_on", }, { - "service": "test.test", + "action": "test.test", "target": {"area_id": "area-in-both"}, }, { - "service": "test.test", + "action": "test.test", "target": {"floor_id": "floor-in-both"}, }, { - "service": "test.test", + "action": "test.test", "target": {"label_id": "label-in-both"}, }, ], @@ -2087,7 +2087,7 @@ async def test_extraction_functions( }, "action": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.in_both"}, }, { @@ -2140,7 +2140,7 @@ async def test_extraction_functions( }, "action": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.in_both"}, }, { @@ -2150,27 +2150,27 @@ async def test_extraction_functions( }, {"scene": "scene.hello"}, { - "service": "test.test", + "action": "test.test", "target": {"area_id": "area-in-both"}, }, { - "service": "test.test", + "action": "test.test", "target": {"area_id": "area-in-last"}, }, { - "service": "test.test", + "action": "test.test", "target": {"floor_id": "floor-in-both"}, }, { - "service": "test.test", + "action": "test.test", "target": {"floor_id": "floor-in-last"}, }, { - "service": "test.test", + "action": "test.test", "target": {"label_id": "label-in-both"}, }, { - "service": "test.test", + "action": "test.test", "target": {"label_id": "label-in-last"}, }, ], @@ -2289,7 +2289,7 @@ async def test_automation_variables( }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", @@ -2308,7 +2308,7 @@ async def test_automation_variables( "value_template": "{{ trigger.event.data.pass_condition }}", }, "action": { - "service": "test.automation", + "action": "test.automation", }, }, { @@ -2317,7 +2317,7 @@ async def test_automation_variables( }, "trigger": {"platform": "event", "event_type": "test_event_3"}, "action": { - "service": "test.automation", + "action": "test.automation", }, }, ] @@ -2373,7 +2373,7 @@ async def test_automation_trigger_variables( }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", @@ -2391,7 +2391,7 @@ async def test_automation_trigger_variables( }, "trigger": {"platform": "event", "event_type": "test_event_2"}, "action": { - "service": "test.automation", + "action": "test.automation", "data": { "value": "{{ test_var }}", "event_type": "{{ event_type }}", @@ -2438,7 +2438,7 @@ async def test_automation_bad_trigger_variables( }, "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", }, }, ] @@ -2465,7 +2465,7 @@ async def test_automation_this_var_always( { "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "data": { "this_template": "{{this.entity_id}}", }, @@ -2542,7 +2542,7 @@ async def test_blueprint_automation( "Blueprint 'Call service based on event' generated invalid automation", ( "value should be a string for dictionary value @" - " data['action'][0]['service']" + " data['action'][0]['action']" ), ), ], @@ -2640,7 +2640,7 @@ async def test_trigger_service(hass: HomeAssistant, calls: list[ServiceCall]) -> "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, "action": { - "service": "test.automation", + "action": "test.automation", "data_template": {"trigger": "{{ trigger }}"}, }, } @@ -2679,14 +2679,14 @@ async def test_trigger_condition_implicit_id( { "conditions": {"condition": "trigger", "id": [0, "2"]}, "sequence": { - "service": "test.automation", + "action": "test.automation", "data": {"param": "one"}, }, }, { "conditions": {"condition": "trigger", "id": "1"}, "sequence": { - "service": "test.automation", + "action": "test.automation", "data": {"param": "two"}, }, }, @@ -2730,14 +2730,14 @@ async def test_trigger_condition_explicit_id( { "conditions": {"condition": "trigger", "id": "one"}, "sequence": { - "service": "test.automation", + "action": "test.automation", "data": {"param": "one"}, }, }, { "conditions": {"condition": "trigger", "id": "two"}, "sequence": { - "service": "test.automation", + "action": "test.automation", "data": {"param": "two"}, }, }, @@ -2822,8 +2822,8 @@ async def test_recursive_automation_starting_script( f" {automation_runs} }}}}" ) }, - {"service": "script.script1"}, - {"service": "test.script_done"}, + {"action": "script.script1"}, + {"action": "test.script_done"}, ], }, } @@ -2840,9 +2840,9 @@ async def test_recursive_automation_starting_script( {"platform": "event", "event_type": "trigger_automation"}, ], "action": [ - {"service": "test.automation_started"}, + {"action": "test.automation_started"}, {"delay": 0.001}, - {"service": "script.script1"}, + {"action": "script.script1"}, ], } }, @@ -2923,7 +2923,7 @@ async def test_recursive_automation( ], "action": [ {"event": "trigger_automation"}, - {"service": "test.automation_done"}, + {"action": "test.automation_done"}, ], } }, @@ -2985,7 +2985,7 @@ async def test_recursive_automation_restart_mode( ], "action": [ {"event": "trigger_automation"}, - {"service": "test.automation_done"}, + {"action": "test.automation_done"}, ], } }, @@ -3021,7 +3021,7 @@ async def test_websocket_config( config = { "alias": "hello", "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "data": 100}, + "action": {"action": "test.automation", "data": 100}, } assert await async_setup_component( hass, automation.DOMAIN, {automation.DOMAIN: config} @@ -3095,7 +3095,7 @@ async def test_automation_turns_off_other_automation(hass: HomeAssistant) -> Non "from": "on", }, "action": { - "service": "automation.turn_off", + "action": "automation.turn_off", "target": { "entity_id": "automation.automation_1", }, @@ -3118,7 +3118,7 @@ async def test_automation_turns_off_other_automation(hass: HomeAssistant) -> Non }, }, "action": { - "service": "persistent_notification.create", + "action": "persistent_notification.create", "metadata": {}, "data": { "message": "Test race", @@ -3185,7 +3185,7 @@ async def test_two_automations_call_restart_script_same_time( "fire_toggle": { "sequence": [ { - "service": "input_boolean.toggle", + "action": "input_boolean.toggle", "target": {"entity_id": "input_boolean.test_1"}, } ] @@ -3206,7 +3206,7 @@ async def test_two_automations_call_restart_script_same_time( "to": "on", }, "action": { - "service": "script.fire_toggle", + "action": "script.fire_toggle", }, "id": "automation_0", "mode": "single", @@ -3218,7 +3218,7 @@ async def test_two_automations_call_restart_script_same_time( "to": "on", }, "action": { - "service": "script.fire_toggle", + "action": "script.fire_toggle", }, "id": "automation_1", "mode": "single", @@ -3301,3 +3301,29 @@ async def test_two_automation_call_restart_script_right_after_each_other( hass.states.async_set("input_boolean.test_2", "on") await hass.async_block_till_done() assert len(events) == 1 + + +async def test_action_service_backward_compatibility( + hass: HomeAssistant, calls: list[ServiceCall] +) -> None: + """Test we can still use the service call method.""" + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: { + "trigger": {"platform": "event", "event_type": "test_event"}, + "action": { + "service": "test.automation", + "entity_id": "hello.world", + "data": {"event": "{{ trigger.event.event_type }}"}, + }, + } + }, + ) + + hass.bus.async_fire("test_event") + await hass.async_block_till_done() + assert len(calls) == 1 + assert calls[0].data.get(ATTR_ENTITY_ID) == ["hello.world"] + assert calls[0].data.get("event") == "test_event" diff --git a/tests/components/automation/test_recorder.py b/tests/components/automation/test_recorder.py index af3d0c41151..be354abe9d2 100644 --- a/tests/components/automation/test_recorder.py +++ b/tests/components/automation/test_recorder.py @@ -40,7 +40,7 @@ async def test_exclude_attributes( { automation.DOMAIN: { "trigger": {"platform": "event", "event_type": "test_event"}, - "action": {"service": "test.automation", "entity_id": "hello.world"}, + "action": {"action": "test.automation", "entity_id": "hello.world"}, } }, ) diff --git a/tests/components/script/test_blueprint.py b/tests/components/script/test_blueprint.py index b956aa588cb..aef22b93bcf 100644 --- a/tests/components/script/test_blueprint.py +++ b/tests/components/script/test_blueprint.py @@ -74,7 +74,7 @@ async def test_confirmable_notification( "message": "Throw ring in mountain?", "confirm_action": [ { - "service": "homeassistant.turn_on", + "action": "homeassistant.turn_on", "target": {"entity_id": "mount.doom"}, } ], diff --git a/tests/components/script/test_init.py b/tests/components/script/test_init.py index 8362dfbcfb2..a5eda3757a9 100644 --- a/tests/components/script/test_init.py +++ b/tests/components/script/test_init.py @@ -85,7 +85,7 @@ async def test_passing_variables(hass: HomeAssistant) -> None: "script": { "test": { "sequence": { - "service": "test.script", + "action": "test.script", "data_template": {"hello": "{{ greeting }}"}, } } @@ -115,8 +115,14 @@ async def test_passing_variables(hass: HomeAssistant) -> None: @pytest.mark.parametrize("toggle", [False, True]) -async def test_turn_on_off_toggle(hass: HomeAssistant, toggle) -> None: - """Verify turn_on, turn_off & toggle services.""" +@pytest.mark.parametrize("action_schema_variations", ["action", "service"]) +async def test_turn_on_off_toggle( + hass: HomeAssistant, toggle: bool, action_schema_variations: str +) -> None: + """Verify turn_on, turn_off & toggle services. + + Ensures backward compatibility with the old service action schema is maintained. + """ event = "test_event" event_mock = Mock() @@ -132,9 +138,15 @@ async def test_turn_on_off_toggle(hass: HomeAssistant, toggle) -> None: async_track_state_change(hass, ENTITY_ID, state_listener, to_state="on") if toggle: - turn_off_step = {"service": "script.toggle", "entity_id": ENTITY_ID} + turn_off_step = { + action_schema_variations: "script.toggle", + "entity_id": ENTITY_ID, + } else: - turn_off_step = {"service": "script.turn_off", "entity_id": ENTITY_ID} + turn_off_step = { + action_schema_variations: "script.turn_off", + "entity_id": ENTITY_ID, + } assert await async_setup_component( hass, "script", @@ -165,7 +177,7 @@ async def test_turn_on_off_toggle(hass: HomeAssistant, toggle) -> None: invalid_configs = [ {"test": {}}, {"test hello world": {"sequence": [{"event": "bla"}]}}, - {"test": {"sequence": {"event": "test_event", "service": "homeassistant.turn_on"}}}, + {"test": {"sequence": {"event": "test_event", "action": "homeassistant.turn_on"}}}, ] @@ -180,7 +192,7 @@ invalid_configs = [ "test": { "sequence": { "event": "test_event", - "service": "homeassistant.turn_on", + "action": "homeassistant.turn_on", } } }, @@ -235,7 +247,7 @@ async def test_bad_config_validation_critical( "good_script": { "alias": "good_script", "sequence": { - "service": "test.automation", + "action": "test.automation", "entity_id": "hello.world", }, }, @@ -300,7 +312,7 @@ async def test_bad_config_validation( "good_script": { "alias": "good_script", "sequence": { - "service": "test.automation", + "action": "test.automation", "entity_id": "hello.world", }, }, @@ -342,7 +354,7 @@ async def test_bad_config_validation( object_id: { "alias": "bad_script", "sequence": { - "service": "test.automation", + "action": "test.automation", "entity_id": "hello.world", }, }, @@ -430,7 +442,7 @@ async def test_reload_unchanged_does_not_stop( "sequence": [ {"event": "running"}, {"wait_template": "{{ is_state('test.entity', 'goodbye') }}"}, - {"service": "test.script"}, + {"action": "test.script"}, ], } } @@ -473,13 +485,13 @@ async def test_reload_unchanged_does_not_stop( [ { "test": { - "sequence": [{"service": "test.script"}], + "sequence": [{"action": "test.script"}], } }, # A script using templates { "test": { - "sequence": [{"service": "{{ 'test.script' }}"}], + "sequence": [{"action": "{{ 'test.script' }}"}], } }, # A script using blueprint @@ -666,7 +678,7 @@ async def test_logging_script_error( assert await async_setup_component( hass, "script", - {"script": {"hello": {"sequence": [{"service": "non.existing"}]}}}, + {"script": {"hello": {"sequence": [{"action": "non.existing"}]}}}, ) with pytest.raises(ServiceNotFound) as err: await hass.services.async_call("script", "hello", blocking=True) @@ -690,7 +702,7 @@ async def test_async_get_descriptions_script(hass: HomeAssistant) -> None: """Test async_set_service_schema for the script integration.""" script_config = { DOMAIN: { - "test1": {"sequence": [{"service": "homeassistant.restart"}]}, + "test1": {"sequence": [{"action": "homeassistant.restart"}]}, "test2": { "description": "test2", "fields": { @@ -699,7 +711,7 @@ async def test_async_get_descriptions_script(hass: HomeAssistant) -> None: "example": "param_example", } }, - "sequence": [{"service": "homeassistant.restart"}], + "sequence": [{"action": "homeassistant.restart"}], }, } } @@ -795,11 +807,11 @@ async def test_extraction_functions( "test1": { "sequence": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.in_both"}, }, { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.in_first"}, }, { @@ -809,15 +821,15 @@ async def test_extraction_functions( "device_id": device_in_both.id, }, { - "service": "test.test", + "action": "test.test", "target": {"area_id": "area-in-both"}, }, { - "service": "test.test", + "action": "test.test", "target": {"floor_id": "floor-in-both"}, }, { - "service": "test.test", + "action": "test.test", "target": {"label_id": "label-in-both"}, }, ] @@ -825,7 +837,7 @@ async def test_extraction_functions( "test2": { "sequence": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.in_both"}, }, { @@ -851,7 +863,7 @@ async def test_extraction_functions( "test3": { "sequence": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.in_both"}, }, { @@ -861,27 +873,27 @@ async def test_extraction_functions( }, {"scene": "scene.hello"}, { - "service": "test.test", + "action": "test.test", "target": {"area_id": "area-in-both"}, }, { - "service": "test.test", + "action": "test.test", "target": {"area_id": "area-in-last"}, }, { - "service": "test.test", + "action": "test.test", "target": {"floor_id": "floor-in-both"}, }, { - "service": "test.test", + "action": "test.test", "target": {"floor_id": "floor-in-last"}, }, { - "service": "test.test", + "action": "test.test", "target": {"label_id": "label-in-both"}, }, { - "service": "test.test", + "action": "test.test", "target": {"label_id": "label-in-last"}, }, ], @@ -1028,11 +1040,11 @@ async def test_concurrent_script(hass: HomeAssistant, concurrently) -> None: """Test calling script concurrently or not.""" if concurrently: call_script_2 = { - "service": "script.turn_on", + "action": "script.turn_on", "data": {"entity_id": "script.script2"}, } else: - call_script_2 = {"service": "script.script2"} + call_script_2 = {"action": "script.script2"} assert await async_setup_component( hass, "script", @@ -1045,17 +1057,17 @@ async def test_concurrent_script(hass: HomeAssistant, concurrently) -> None: { "wait_template": "{{ is_state('input_boolean.test1', 'on') }}" }, - {"service": "test.script", "data": {"value": "script1"}}, + {"action": "test.script", "data": {"value": "script1"}}, ], }, "script2": { "mode": "parallel", "sequence": [ - {"service": "test.script", "data": {"value": "script2a"}}, + {"action": "test.script", "data": {"value": "script2a"}}, { "wait_template": "{{ is_state('input_boolean.test2', 'on') }}" }, - {"service": "test.script", "data": {"value": "script2b"}}, + {"action": "test.script", "data": {"value": "script2b"}}, ], }, } @@ -1126,7 +1138,7 @@ async def test_script_variables( }, "sequence": [ { - "service": "test.script", + "action": "test.script", "data": { "value": "{{ test_var }}", "templated_config_var": "{{ templated_config_var }}", @@ -1142,7 +1154,7 @@ async def test_script_variables( }, "sequence": [ { - "service": "test.script", + "action": "test.script", "data": { "value": "{{ test_var }}", }, @@ -1155,7 +1167,7 @@ async def test_script_variables( }, "sequence": [ { - "service": "test.script", + "action": "test.script", "data": { "value": "{{ test_var }}", }, @@ -1221,7 +1233,7 @@ async def test_script_this_var_always( "script1": { "sequence": [ { - "service": "test.script", + "action": "test.script", "data": { "this_template": "{{this.entity_id}}", }, @@ -1306,8 +1318,8 @@ async def test_recursive_script( "script1": { "mode": script_mode, "sequence": [ - {"service": "script.script1"}, - {"service": "test.script"}, + {"action": "script.script1"}, + {"action": "test.script"}, ], }, } @@ -1356,26 +1368,26 @@ async def test_recursive_script_indirect( "script1": { "mode": script_mode, "sequence": [ - {"service": "script.script2"}, + {"action": "script.script2"}, ], }, "script2": { "mode": script_mode, "sequence": [ - {"service": "script.script3"}, + {"action": "script.script3"}, ], }, "script3": { "mode": script_mode, "sequence": [ - {"service": "script.script4"}, + {"action": "script.script4"}, ], }, "script4": { "mode": script_mode, "sequence": [ - {"service": "script.script1"}, - {"service": "test.script"}, + {"action": "script.script1"}, + {"action": "test.script"}, ], }, } @@ -1440,10 +1452,10 @@ async def test_recursive_script_turn_on( "condition": "template", "value_template": "{{ request == 'step_2' }}", }, - "sequence": {"service": "test.script_done"}, + "sequence": {"action": "test.script_done"}, }, "default": { - "service": "script.turn_on", + "action": "script.turn_on", "data": { "entity_id": "script.script1", "variables": {"request": "step_2"}, @@ -1451,7 +1463,7 @@ async def test_recursive_script_turn_on( }, }, { - "service": "script.turn_on", + "action": "script.turn_on", "data": {"entity_id": "script.script1"}, }, ], @@ -1513,7 +1525,7 @@ async def test_websocket_config( """Test config command.""" config = { "alias": "hello", - "sequence": [{"service": "light.turn_on"}], + "sequence": [{"action": "light.turn_on"}], } assert await async_setup_component( hass, @@ -1577,7 +1589,7 @@ async def test_script_service_changed_entity_id( "script": { "test": { "sequence": { - "service": "test.script", + "action": "test.script", "data_template": {"entity_id": "{{ this.entity_id }}"}, } } @@ -1658,7 +1670,7 @@ async def test_blueprint_script(hass: HomeAssistant, calls: list[ServiceCall]) - "a_number": 5, }, "Blueprint 'Call service' generated invalid script", - "value should be a string for dictionary value @ data['sequence'][0]['service']", + "value should be a string for dictionary value @ data['sequence'][0]['action']", ), ], ) @@ -1839,10 +1851,10 @@ async def test_script_queued_mode(hass: HomeAssistant) -> None: "sequence": [ { "parallel": [ - {"service": "script.test_sub"}, - {"service": "script.test_sub"}, - {"service": "script.test_sub"}, - {"service": "script.test_sub"}, + {"action": "script.test_sub"}, + {"action": "script.test_sub"}, + {"action": "script.test_sub"}, + {"action": "script.test_sub"}, ] } ] @@ -1850,7 +1862,7 @@ async def test_script_queued_mode(hass: HomeAssistant) -> None: "test_sub": { "mode": "queued", "sequence": [ - {"service": "test.simulated_remote"}, + {"action": "test.simulated_remote"}, ], }, } diff --git a/tests/components/script/test_recorder.py b/tests/components/script/test_recorder.py index ca915cede6f..6358093014a 100644 --- a/tests/components/script/test_recorder.py +++ b/tests/components/script/test_recorder.py @@ -52,7 +52,7 @@ async def test_exclude_attributes( "script": { "test": { "sequence": { - "service": "test.script", + "action": "test.script", "data_template": {"hello": "{{ greeting }}"}, } } diff --git a/tests/helpers/test_config_validation.py b/tests/helpers/test_config_validation.py index cde319c0b87..cf72012a1f1 100644 --- a/tests/helpers/test_config_validation.py +++ b/tests/helpers/test_config_validation.py @@ -6,6 +6,7 @@ import enum import logging import os from socket import _GLOBAL_DEFAULT_TIMEOUT +from typing import Any from unittest.mock import Mock, patch import uuid @@ -416,27 +417,9 @@ def test_service() -> None: schema("homeassistant.turn_on") -def test_service_schema(hass: HomeAssistant) -> None: - """Test service_schema validation.""" - options = ( - {}, - None, - { - "service": "homeassistant.turn_on", - "service_template": "homeassistant.turn_on", - }, - {"data": {"entity_id": "light.kitchen"}}, - {"service": "homeassistant.turn_on", "data": None}, - { - "service": "homeassistant.turn_on", - "data_template": {"brightness": "{{ no_end"}, - }, - ) - for value in options: - with pytest.raises(vol.MultipleInvalid): - cv.SERVICE_SCHEMA(value) - - options = ( +@pytest.mark.parametrize( + "config", + [ {"service": "homeassistant.turn_on"}, {"service": "homeassistant.turn_on", "entity_id": "light.kitchen"}, {"service": "light.turn_on", "entity_id": "all"}, @@ -450,14 +433,70 @@ def test_service_schema(hass: HomeAssistant) -> None: "alias": "turn on kitchen lights", }, {"service": "scene.turn_on", "metadata": {}}, - ) - for value in options: - cv.SERVICE_SCHEMA(value) + {"action": "homeassistant.turn_on"}, + {"action": "homeassistant.turn_on", "entity_id": "light.kitchen"}, + {"action": "light.turn_on", "entity_id": "all"}, + { + "action": "homeassistant.turn_on", + "entity_id": ["light.kitchen", "light.ceiling"], + }, + { + "action": "light.turn_on", + "entity_id": "all", + "alias": "turn on kitchen lights", + }, + {"action": "scene.turn_on", "metadata": {}}, + ], +) +def test_service_schema(hass: HomeAssistant, config: dict[str, Any]) -> None: + """Test service_schema validation.""" + validated = cv.SERVICE_SCHEMA(config) - # Check metadata is removed from the validated output - assert cv.SERVICE_SCHEMA({"service": "scene.turn_on", "metadata": {}}) == { - "service": "scene.turn_on" - } + # Ensure metadata is removed from the validated output + assert "metadata" not in validated + + # Ensure service is migrated to action + assert "service" not in validated + assert "action" in validated + assert validated["action"] == config.get("service", config["action"]) + + +@pytest.mark.parametrize( + "config", + [ + {}, + None, + {"data": {"entity_id": "light.kitchen"}}, + { + "service": "homeassistant.turn_on", + "service_template": "homeassistant.turn_on", + }, + {"service": "homeassistant.turn_on", "data": None}, + { + "service": "homeassistant.turn_on", + "data_template": {"brightness": "{{ no_end"}, + }, + { + "service": "homeassistant.turn_on", + "action": "homeassistant.turn_on", + }, + { + "action": "homeassistant.turn_on", + "service_template": "homeassistant.turn_on", + }, + {"action": "homeassistant.turn_on", "data": None}, + { + "action": "homeassistant.turn_on", + "data_template": {"brightness": "{{ no_end"}, + }, + ], +) +def test_invalid_service_schema( + hass: HomeAssistant, config: dict[str, Any] | None +) -> None: + """Test service_schema validation fails.""" + with pytest.raises(vol.MultipleInvalid): + cv.SERVICE_SCHEMA(config) def test_entity_service_schema() -> None: diff --git a/tests/helpers/test_script.py b/tests/helpers/test_script.py index 52d9ff11059..1bc33140124 100644 --- a/tests/helpers/test_script.py +++ b/tests/helpers/test_script.py @@ -249,7 +249,7 @@ async def test_calling_service_basic( alias = "service step" sequence = cv.SCRIPT_SCHEMA( - {"alias": alias, "service": "test.script", "data": {"hello": "world"}} + {"alias": alias, "action": "test.script", "data": {"hello": "world"}} ) script_obj = script.Script(hass, sequence, "Test Name", "test_domain") @@ -352,13 +352,13 @@ async def test_calling_service_response_data( [ { "alias": "service step1", - "service": "test.script", + "action": "test.script", # Store the result of the service call as a variable "response_variable": "my_response", }, { "alias": "service step2", - "service": "test.script", + "action": "test.script", "data_template": { # Result of previous service call "key": "{{ my_response.data }}" @@ -441,7 +441,7 @@ async def test_service_response_data_errors( [ { "alias": "service step1", - "service": "test.script", + "action": "test.script", **params, }, ] @@ -458,7 +458,7 @@ async def test_data_template_with_templated_key(hass: HomeAssistant) -> None: calls = async_mock_service(hass, "test", "script") sequence = cv.SCRIPT_SCHEMA( - {"service": "test.script", "data_template": {"{{ hello_var }}": "world"}} + {"action": "test.script", "data_template": {"{{ hello_var }}": "world"}} ) script_obj = script.Script(hass, sequence, "Test Name", "test_domain") @@ -525,11 +525,11 @@ async def test_multiple_runs_no_wait(hass: HomeAssistant) -> None: sequence = cv.SCRIPT_SCHEMA( [ { - "service": "test.script", + "action": "test.script", "data_template": {"fire": "{{ fire1 }}", "listen": "{{ listen1 }}"}, }, { - "service": "test.script", + "action": "test.script", "data_template": {"fire": "{{ fire2 }}", "listen": "{{ listen2 }}"}, }, ] @@ -605,7 +605,7 @@ async def test_stop_no_wait(hass: HomeAssistant, count) -> None: hass.services.async_register("test", "script", async_simulate_long_service) - sequence = cv.SCRIPT_SCHEMA([{"service": "test.script"}, {"event": event}]) + sequence = cv.SCRIPT_SCHEMA([{"action": "test.script"}, {"event": event}]) script_obj = script.Script( hass, sequence, @@ -3894,7 +3894,7 @@ async def test_parallel_error( sequence = cv.SCRIPT_SCHEMA( { "parallel": [ - {"service": "epic.failure"}, + {"action": "epic.failure"}, ] } ) @@ -3946,7 +3946,7 @@ async def test_propagate_error_service_not_found(hass: HomeAssistant) -> None: await async_setup_component(hass, "homeassistant", {}) event = "test_event" events = async_capture_events(hass, event) - sequence = cv.SCRIPT_SCHEMA([{"service": "test.script"}, {"event": event}]) + sequence = cv.SCRIPT_SCHEMA([{"action": "test.script"}, {"event": event}]) script_obj = script.Script(hass, sequence, "Test Name", "test_domain") with pytest.raises(exceptions.ServiceNotFound): @@ -3980,7 +3980,7 @@ async def test_propagate_error_invalid_service_data(hass: HomeAssistant) -> None events = async_capture_events(hass, event) calls = async_mock_service(hass, "test", "script", vol.Schema({"text": str})) sequence = cv.SCRIPT_SCHEMA( - [{"service": "test.script", "data": {"text": 1}}, {"event": event}] + [{"action": "test.script", "data": {"text": 1}}, {"event": event}] ) script_obj = script.Script(hass, sequence, "Test Name", "test_domain") @@ -4022,7 +4022,7 @@ async def test_propagate_error_service_exception(hass: HomeAssistant) -> None: hass.services.async_register("test", "script", record_call) - sequence = cv.SCRIPT_SCHEMA([{"service": "test.script"}, {"event": event}]) + sequence = cv.SCRIPT_SCHEMA([{"action": "test.script"}, {"event": event}]) script_obj = script.Script(hass, sequence, "Test Name", "test_domain") with pytest.raises(ValueError): @@ -4057,35 +4057,35 @@ async def test_referenced_labels(hass: HomeAssistant) -> None: cv.SCRIPT_SCHEMA( [ { - "service": "test.script", + "action": "test.script", "data": {"label_id": "label_service_not_list"}, }, { - "service": "test.script", + "action": "test.script", "data": { "label_id": ["label_service_list_1", "label_service_list_2"] }, }, { - "service": "test.script", + "action": "test.script", "data": {"label_id": "{{ 'label_service_template' }}"}, }, { - "service": "test.script", + "action": "test.script", "target": {"label_id": "label_in_target"}, }, { - "service": "test.script", + "action": "test.script", "data_template": {"label_id": "label_in_data_template"}, }, - {"service": "test.script", "data": {"without": "label_id"}}, + {"action": "test.script", "data": {"without": "label_id"}}, { "choose": [ { "conditions": "{{ true == false }}", "sequence": [ { - "service": "test.script", + "action": "test.script", "data": {"label_id": "label_choice_1_seq"}, } ], @@ -4094,7 +4094,7 @@ async def test_referenced_labels(hass: HomeAssistant) -> None: "conditions": "{{ true == false }}", "sequence": [ { - "service": "test.script", + "action": "test.script", "data": {"label_id": "label_choice_2_seq"}, } ], @@ -4102,7 +4102,7 @@ async def test_referenced_labels(hass: HomeAssistant) -> None: ], "default": [ { - "service": "test.script", + "action": "test.script", "data": {"label_id": "label_default_seq"}, } ], @@ -4113,13 +4113,13 @@ async def test_referenced_labels(hass: HomeAssistant) -> None: "if": [], "then": [ { - "service": "test.script", + "action": "test.script", "data": {"label_id": "label_if_then"}, } ], "else": [ { - "service": "test.script", + "action": "test.script", "data": {"label_id": "label_if_else"}, } ], @@ -4127,7 +4127,7 @@ async def test_referenced_labels(hass: HomeAssistant) -> None: { "parallel": [ { - "service": "test.script", + "action": "test.script", "data": {"label_id": "label_parallel"}, } ], @@ -4161,33 +4161,33 @@ async def test_referenced_floors(hass: HomeAssistant) -> None: cv.SCRIPT_SCHEMA( [ { - "service": "test.script", + "action": "test.script", "data": {"floor_id": "floor_service_not_list"}, }, { - "service": "test.script", + "action": "test.script", "data": {"floor_id": ["floor_service_list"]}, }, { - "service": "test.script", + "action": "test.script", "data": {"floor_id": "{{ 'floor_service_template' }}"}, }, { - "service": "test.script", + "action": "test.script", "target": {"floor_id": "floor_in_target"}, }, { - "service": "test.script", + "action": "test.script", "data_template": {"floor_id": "floor_in_data_template"}, }, - {"service": "test.script", "data": {"without": "floor_id"}}, + {"action": "test.script", "data": {"without": "floor_id"}}, { "choose": [ { "conditions": "{{ true == false }}", "sequence": [ { - "service": "test.script", + "action": "test.script", "data": {"floor_id": "floor_choice_1_seq"}, } ], @@ -4196,7 +4196,7 @@ async def test_referenced_floors(hass: HomeAssistant) -> None: "conditions": "{{ true == false }}", "sequence": [ { - "service": "test.script", + "action": "test.script", "data": {"floor_id": "floor_choice_2_seq"}, } ], @@ -4204,7 +4204,7 @@ async def test_referenced_floors(hass: HomeAssistant) -> None: ], "default": [ { - "service": "test.script", + "action": "test.script", "data": {"floor_id": "floor_default_seq"}, } ], @@ -4215,13 +4215,13 @@ async def test_referenced_floors(hass: HomeAssistant) -> None: "if": [], "then": [ { - "service": "test.script", + "action": "test.script", "data": {"floor_id": "floor_if_then"}, } ], "else": [ { - "service": "test.script", + "action": "test.script", "data": {"floor_id": "floor_if_else"}, } ], @@ -4229,7 +4229,7 @@ async def test_referenced_floors(hass: HomeAssistant) -> None: { "parallel": [ { - "service": "test.script", + "action": "test.script", "data": {"floor_id": "floor_parallel"}, } ], @@ -4262,33 +4262,33 @@ async def test_referenced_areas(hass: HomeAssistant) -> None: cv.SCRIPT_SCHEMA( [ { - "service": "test.script", + "action": "test.script", "data": {"area_id": "area_service_not_list"}, }, { - "service": "test.script", + "action": "test.script", "data": {"area_id": ["area_service_list"]}, }, { - "service": "test.script", + "action": "test.script", "data": {"area_id": "{{ 'area_service_template' }}"}, }, { - "service": "test.script", + "action": "test.script", "target": {"area_id": "area_in_target"}, }, { - "service": "test.script", + "action": "test.script", "data_template": {"area_id": "area_in_data_template"}, }, - {"service": "test.script", "data": {"without": "area_id"}}, + {"action": "test.script", "data": {"without": "area_id"}}, { "choose": [ { "conditions": "{{ true == false }}", "sequence": [ { - "service": "test.script", + "action": "test.script", "data": {"area_id": "area_choice_1_seq"}, } ], @@ -4297,7 +4297,7 @@ async def test_referenced_areas(hass: HomeAssistant) -> None: "conditions": "{{ true == false }}", "sequence": [ { - "service": "test.script", + "action": "test.script", "data": {"area_id": "area_choice_2_seq"}, } ], @@ -4305,7 +4305,7 @@ async def test_referenced_areas(hass: HomeAssistant) -> None: ], "default": [ { - "service": "test.script", + "action": "test.script", "data": {"area_id": "area_default_seq"}, } ], @@ -4316,13 +4316,13 @@ async def test_referenced_areas(hass: HomeAssistant) -> None: "if": [], "then": [ { - "service": "test.script", + "action": "test.script", "data": {"area_id": "area_if_then"}, } ], "else": [ { - "service": "test.script", + "action": "test.script", "data": {"area_id": "area_if_else"}, } ], @@ -4330,7 +4330,7 @@ async def test_referenced_areas(hass: HomeAssistant) -> None: { "parallel": [ { - "service": "test.script", + "action": "test.script", "data": {"area_id": "area_parallel"}, } ], @@ -4364,27 +4364,27 @@ async def test_referenced_entities(hass: HomeAssistant) -> None: cv.SCRIPT_SCHEMA( [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.service_not_list"}, }, { - "service": "test.script", + "action": "test.script", "data": {"entity_id": ["light.service_list"]}, }, { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "{{ 'light.service_template' }}"}, }, { - "service": "test.script", + "action": "test.script", "entity_id": "light.direct_entity_referenced", }, { - "service": "test.script", + "action": "test.script", "target": {"entity_id": "light.entity_in_target"}, }, { - "service": "test.script", + "action": "test.script", "data_template": {"entity_id": "light.entity_in_data_template"}, }, { @@ -4392,7 +4392,7 @@ async def test_referenced_entities(hass: HomeAssistant) -> None: "entity_id": "sensor.condition", "state": "100", }, - {"service": "test.script", "data": {"without": "entity_id"}}, + {"action": "test.script", "data": {"without": "entity_id"}}, {"scene": "scene.hello"}, { "choose": [ @@ -4400,7 +4400,7 @@ async def test_referenced_entities(hass: HomeAssistant) -> None: "conditions": "{{ states.light.choice_1_cond == 'on' }}", "sequence": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.choice_1_seq"}, } ], @@ -4413,7 +4413,7 @@ async def test_referenced_entities(hass: HomeAssistant) -> None: }, "sequence": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.choice_2_seq"}, } ], @@ -4421,7 +4421,7 @@ async def test_referenced_entities(hass: HomeAssistant) -> None: ], "default": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.default_seq"}, } ], @@ -4432,13 +4432,13 @@ async def test_referenced_entities(hass: HomeAssistant) -> None: "if": [], "then": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.if_then"}, } ], "else": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.if_else"}, } ], @@ -4446,7 +4446,7 @@ async def test_referenced_entities(hass: HomeAssistant) -> None: { "parallel": [ { - "service": "test.script", + "action": "test.script", "data": {"entity_id": "light.parallel"}, } ], @@ -4491,19 +4491,19 @@ async def test_referenced_devices(hass: HomeAssistant) -> None: "domain": "switch", }, { - "service": "test.script", + "action": "test.script", "data": {"device_id": "data-string-id"}, }, { - "service": "test.script", + "action": "test.script", "data_template": {"device_id": "data-template-string-id"}, }, { - "service": "test.script", + "action": "test.script", "target": {"device_id": "target-string-id"}, }, { - "service": "test.script", + "action": "test.script", "target": {"device_id": ["target-list-id-1", "target-list-id-2"]}, }, { @@ -4515,7 +4515,7 @@ async def test_referenced_devices(hass: HomeAssistant) -> None: ), "sequence": [ { - "service": "test.script", + "action": "test.script", "target": { "device_id": "choice-1-seq-device-target" }, @@ -4530,7 +4530,7 @@ async def test_referenced_devices(hass: HomeAssistant) -> None: }, "sequence": [ { - "service": "test.script", + "action": "test.script", "target": { "device_id": "choice-2-seq-device-target" }, @@ -4540,7 +4540,7 @@ async def test_referenced_devices(hass: HomeAssistant) -> None: ], "default": [ { - "service": "test.script", + "action": "test.script", "target": {"device_id": "default-device-target"}, } ], @@ -4549,13 +4549,13 @@ async def test_referenced_devices(hass: HomeAssistant) -> None: "if": [], "then": [ { - "service": "test.script", + "action": "test.script", "data": {"device_id": "if-then"}, } ], "else": [ { - "service": "test.script", + "action": "test.script", "data": {"device_id": "if-else"}, } ], @@ -4563,7 +4563,7 @@ async def test_referenced_devices(hass: HomeAssistant) -> None: { "parallel": [ { - "service": "test.script", + "action": "test.script", "target": {"device_id": "parallel-device"}, } ], @@ -5104,7 +5104,7 @@ async def test_set_variable( sequence = cv.SCRIPT_SCHEMA( [ {"alias": alias, "variables": {"variable": "value"}}, - {"service": "test.script", "data": {"value": "{{ variable }}"}}, + {"action": "test.script", "data": {"value": "{{ variable }}"}}, ] ) script_obj = script.Script(hass, sequence, "test script", "test_domain") @@ -5143,9 +5143,9 @@ async def test_set_redefines_variable( sequence = cv.SCRIPT_SCHEMA( [ {"variables": {"variable": "1"}}, - {"service": "test.script", "data": {"value": "{{ variable }}"}}, + {"action": "test.script", "data": {"value": "{{ variable }}"}}, {"variables": {"variable": "{{ variable | int + 1 }}"}}, - {"service": "test.script", "data": {"value": "{{ variable }}"}}, + {"action": "test.script", "data": {"value": "{{ variable }}"}}, ] ) script_obj = script.Script(hass, sequence, "test script", "test_domain") @@ -5214,7 +5214,7 @@ async def test_validate_action_config( } configs = { - cv.SCRIPT_ACTION_CALL_SERVICE: {"service": "light.turn_on"}, + cv.SCRIPT_ACTION_CALL_SERVICE: {"action": "light.turn_on"}, cv.SCRIPT_ACTION_DELAY: {"delay": 5}, cv.SCRIPT_ACTION_WAIT_TEMPLATE: { "wait_template": "{{ states.light.kitchen.state == 'on' }}" @@ -5349,7 +5349,7 @@ async def test_embedded_wait_for_trigger_in_automation(hass: HomeAssistant) -> N } ] }, - {"service": "test.script"}, + {"action": "test.script"}, ], } }, @@ -5704,12 +5704,12 @@ async def test_continue_on_error(hass: HomeAssistant) -> None: {"event": "test_event"}, { "continue_on_error": True, - "service": "broken.service", + "action": "broken.service", }, {"event": "test_event"}, { "continue_on_error": False, - "service": "broken.service", + "action": "broken.service", }, {"event": "test_event"}, ] @@ -5786,7 +5786,7 @@ async def test_continue_on_error_automation_issue(hass: HomeAssistant) -> None: [ { "continue_on_error": True, - "service": "service.not_found", + "action": "service.not_found", }, ] ) @@ -5834,7 +5834,7 @@ async def test_continue_on_error_unknown_error(hass: HomeAssistant) -> None: [ { "continue_on_error": True, - "service": "some.service", + "action": "some.service", }, ] ) @@ -5884,7 +5884,7 @@ async def test_disabled_actions( { "alias": "Hello", "enabled": enabled_value, - "service": "broken.service", + "action": "broken.service", }, { "alias": "World", @@ -6255,7 +6255,7 @@ async def test_disallowed_recursion( context = Context() calls = 0 alias = "event step" - sequence1 = cv.SCRIPT_SCHEMA({"alias": alias, "service": "test.call_script_2"}) + sequence1 = cv.SCRIPT_SCHEMA({"alias": alias, "action": "test.call_script_2"}) script1_obj = script.Script( hass, sequence1, @@ -6265,7 +6265,7 @@ async def test_disallowed_recursion( running_description="test script1", ) - sequence2 = cv.SCRIPT_SCHEMA({"alias": alias, "service": "test.call_script_3"}) + sequence2 = cv.SCRIPT_SCHEMA({"alias": alias, "action": "test.call_script_3"}) script2_obj = script.Script( hass, sequence2, @@ -6275,7 +6275,7 @@ async def test_disallowed_recursion( running_description="test script2", ) - sequence3 = cv.SCRIPT_SCHEMA({"alias": alias, "service": "test.call_script_1"}) + sequence3 = cv.SCRIPT_SCHEMA({"alias": alias, "action": "test.call_script_1"}) script3_obj = script.Script( hass, sequence3, @@ -6315,3 +6315,43 @@ async def test_disallowed_recursion( "- test_domain2.Test Name2\n" "- test_domain3.Test Name3" ) in caplog.text + + +async def test_calling_service_backwards_compatible( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test the calling of a service with the service instead of the action key.""" + context = Context() + calls = async_mock_service(hass, "test", "script") + + alias = "service step" + sequence = cv.SCRIPT_SCHEMA( + {"alias": alias, "service": "test.script", "data": {"hello": "{{ 'world' }}"}} + ) + script_obj = script.Script(hass, sequence, "Test Name", "test_domain") + + await script_obj.async_run(context=context) + await hass.async_block_till_done() + + assert len(calls) == 1 + assert calls[0].context is context + assert calls[0].data.get("hello") == "world" + assert f"Executing step {alias}" in caplog.text + + assert_action_trace( + { + "0": [ + { + "result": { + "params": { + "domain": "test", + "service": "script", + "service_data": {"hello": "world"}, + "target": {}, + }, + "running_script": False, + } + } + ], + } + ) diff --git a/tests/helpers/test_service.py b/tests/helpers/test_service.py index b05cdf9c3ae..81cc189e1af 100644 --- a/tests/helpers/test_service.py +++ b/tests/helpers/test_service.py @@ -405,7 +405,7 @@ async def test_service_call(hass: HomeAssistant) -> None: """Test service call with templating.""" calls = async_mock_service(hass, "test_domain", "test_service") config = { - "service": "{{ 'test_domain.test_service' }}", + "action": "{{ 'test_domain.test_service' }}", "entity_id": "hello.world", "data": { "hello": "{{ 'goodbye' }}", @@ -435,7 +435,7 @@ async def test_service_call(hass: HomeAssistant) -> None: } config = { - "service": "{{ 'test_domain.test_service' }}", + "action": "{{ 'test_domain.test_service' }}", "target": { "area_id": ["area-42", "{{ 'area-51' }}"], "device_id": ["abcdef", "{{ 'fedcba' }}"], @@ -455,7 +455,7 @@ async def test_service_call(hass: HomeAssistant) -> None: } config = { - "service": "{{ 'test_domain.test_service' }}", + "action": "{{ 'test_domain.test_service' }}", "target": "{{ var_target }}", } @@ -542,7 +542,7 @@ async def test_split_entity_string(hass: HomeAssistant) -> None: await service.async_call_from_config( hass, { - "service": "test_domain.test_service", + "action": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", }, ) @@ -554,7 +554,7 @@ async def test_not_mutate_input(hass: HomeAssistant) -> None: """Test for immutable input.""" async_mock_service(hass, "test_domain", "test_service") config = { - "service": "test_domain.test_service", + "action": "test_domain.test_service", "entity_id": "hello.world, sensor.beer", "data": {"hello": 1}, "data_template": {"nested": {"value": "{{ 1 + 1 }}"}}, @@ -581,7 +581,7 @@ async def test_fail_silently_if_no_service(mock_log, hass: HomeAssistant) -> Non await service.async_call_from_config(hass, {}) assert mock_log.call_count == 2 - await service.async_call_from_config(hass, {"service": "invalid"}) + await service.async_call_from_config(hass, {"action": "invalid"}) assert mock_log.call_count == 3 @@ -597,7 +597,7 @@ async def test_service_call_entry_id( assert entry.entity_id == "hello.world" config = { - "service": "test_domain.test_service", + "action": "test_domain.test_service", "target": {"entity_id": entry.id}, } @@ -613,7 +613,7 @@ async def test_service_call_all_none(hass: HomeAssistant, target) -> None: calls = async_mock_service(hass, "test_domain", "test_service") config = { - "service": "test_domain.test_service", + "action": "test_domain.test_service", "target": {"entity_id": target}, } From 4f5eab4646e0560188f2dca6a9daae0e48b11dac Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Wed, 31 Jul 2024 05:37:39 -0700 Subject: [PATCH 0196/1635] Improve quality of ollama tool calling by repairing arguments (#122749) * Improve quality of ollama function calling by repairing function call arguments * Fix formatting of the tests * Run ruff format on ollama conversation * Add test for non-string arguments --- .../components/ollama/conversation.py | 30 ++++++++++++++++++- tests/components/ollama/test_conversation.py | 26 ++++++++++++++-- 2 files changed, 53 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/ollama/conversation.py b/homeassistant/components/ollama/conversation.py index ac367a5cf6a..f59e268394b 100644 --- a/homeassistant/components/ollama/conversation.py +++ b/homeassistant/components/ollama/conversation.py @@ -63,6 +63,34 @@ def _format_tool( return {"type": "function", "function": tool_spec} +def _fix_invalid_arguments(value: Any) -> Any: + """Attempt to repair incorrectly formatted json function arguments. + + Small models (for example llama3.1 8B) may produce invalid argument values + which we attempt to repair here. + """ + if not isinstance(value, str): + return value + if (value.startswith("[") and value.endswith("]")) or ( + value.startswith("{") and value.endswith("}") + ): + try: + return json.loads(value) + except json.decoder.JSONDecodeError: + pass + return value + + +def _parse_tool_args(arguments: dict[str, Any]) -> dict[str, Any]: + """Rewrite ollama tool arguments. + + This function improves tool use quality by fixing common mistakes made by + small local tool use models. This will repair invalid json arguments and + omit unnecessary arguments with empty values that will fail intent parsing. + """ + return {k: _fix_invalid_arguments(v) for k, v in arguments.items() if v} + + class OllamaConversationEntity( conversation.ConversationEntity, conversation.AbstractConversationAgent ): @@ -255,7 +283,7 @@ class OllamaConversationEntity( for tool_call in tool_calls: tool_input = llm.ToolInput( tool_name=tool_call["function"]["name"], - tool_args=tool_call["function"]["arguments"], + tool_args=_parse_tool_args(tool_call["function"]["arguments"]), ) _LOGGER.debug( "Tool call: %s(%s)", tool_input.tool_name, tool_input.tool_args diff --git a/tests/components/ollama/test_conversation.py b/tests/components/ollama/test_conversation.py index 9be6f3b33a3..b5a94cc6f57 100644 --- a/tests/components/ollama/test_conversation.py +++ b/tests/components/ollama/test_conversation.py @@ -1,5 +1,6 @@ """Tests for the Ollama integration.""" +from typing import Any from unittest.mock import AsyncMock, Mock, patch from ollama import Message, ResponseError @@ -116,12 +117,33 @@ async def test_template_variables( assert "The user id is 12345." in prompt +@pytest.mark.parametrize( + ("tool_args", "expected_tool_args"), + [ + ({"param1": "test_value"}, {"param1": "test_value"}), + ({"param1": 2}, {"param1": 2}), + ( + {"param1": "test_value", "floor": ""}, + {"param1": "test_value"}, # Omit empty arguments + ), + ( + {"domain": '["light"]'}, + {"domain": ["light"]}, # Repair invalid json arguments + ), + ( + {"domain": "['light']"}, + {"domain": "['light']"}, # Preserve invalid json that can't be parsed + ), + ], +) @patch("homeassistant.components.ollama.conversation.llm.AssistAPI._async_get_tools") async def test_function_call( mock_get_tools, hass: HomeAssistant, mock_config_entry_with_assist: MockConfigEntry, mock_init_component, + tool_args: dict[str, Any], + expected_tool_args: dict[str, Any], ) -> None: """Test function call from the assistant.""" agent_id = mock_config_entry_with_assist.entry_id @@ -154,7 +176,7 @@ async def test_function_call( { "function": { "name": "test_tool", - "arguments": {"param1": "test_value"}, + "arguments": tool_args, } } ], @@ -183,7 +205,7 @@ async def test_function_call( hass, llm.ToolInput( tool_name="test_tool", - tool_args={"param1": "test_value"}, + tool_args=expected_tool_args, ), llm.LLMContext( platform="ollama", From 8d0e998e549bc176134231b93fa0f6495a7d4591 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Wed, 31 Jul 2024 05:38:44 -0700 Subject: [PATCH 0197/1635] Improve conversation agent tracing to help with eval and data collection (#122542) --- .../components/conversation/default_agent.py | 11 +++++++++++ homeassistant/components/conversation/trace.py | 4 ++-- .../conversation.py | 1 + .../components/openai_conversation/conversation.py | 3 ++- homeassistant/helpers/llm.py | 2 +- tests/components/conversation/test_trace.py | 12 +++++++++++- .../test_conversation.py | 3 ++- .../openai_conversation/test_conversation.py | 3 ++- 8 files changed, 32 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/conversation/default_agent.py b/homeassistant/components/conversation/default_agent.py index 45393289ac8..1661d2ad30d 100644 --- a/homeassistant/components/conversation/default_agent.py +++ b/homeassistant/components/conversation/default_agent.py @@ -47,6 +47,7 @@ from homeassistant.util.json import JsonObjectType, json_loads_object from .const import DEFAULT_EXPOSED_ATTRIBUTES, DOMAIN, ConversationEntityFeature from .entity import ConversationEntity from .models import ConversationInput, ConversationResult +from .trace import ConversationTraceEventType, async_conversation_trace_append _LOGGER = logging.getLogger(__name__) _DEFAULT_ERROR_TEXT = "Sorry, I couldn't understand that" @@ -348,6 +349,16 @@ class DefaultAgent(ConversationEntity): } for entity in result.entities_list } + async_conversation_trace_append( + ConversationTraceEventType.TOOL_CALL, + { + "intent_name": result.intent.name, + "slots": { + entity.name: entity.value or entity.text + for entity in result.entities_list + }, + }, + ) try: intent_response = await intent.async_handle( diff --git a/homeassistant/components/conversation/trace.py b/homeassistant/components/conversation/trace.py index 08b271d9058..6f993aa326a 100644 --- a/homeassistant/components/conversation/trace.py +++ b/homeassistant/components/conversation/trace.py @@ -22,8 +22,8 @@ class ConversationTraceEventType(enum.StrEnum): AGENT_DETAIL = "agent_detail" """Event detail added by a conversation agent.""" - LLM_TOOL_CALL = "llm_tool_call" - """An LLM Tool call""" + TOOL_CALL = "tool_call" + """A conversation agent Tool call or default agent intent call.""" @dataclass(frozen=True) diff --git a/homeassistant/components/google_generative_ai_conversation/conversation.py b/homeassistant/components/google_generative_ai_conversation/conversation.py index 8dec62ad26b..a5c911bb757 100644 --- a/homeassistant/components/google_generative_ai_conversation/conversation.py +++ b/homeassistant/components/google_generative_ai_conversation/conversation.py @@ -286,6 +286,7 @@ class GoogleGenerativeAIConversationEntity( if supports_system_instruction else messages[2:], "prompt": prompt, + "tools": [*llm_api.tools] if llm_api else None, }, ) diff --git a/homeassistant/components/openai_conversation/conversation.py b/homeassistant/components/openai_conversation/conversation.py index dd42049e3d0..483b37945d6 100644 --- a/homeassistant/components/openai_conversation/conversation.py +++ b/homeassistant/components/openai_conversation/conversation.py @@ -225,7 +225,8 @@ class OpenAIConversationEntity( LOGGER.debug("Prompt: %s", messages) LOGGER.debug("Tools: %s", tools) trace.async_conversation_trace_append( - trace.ConversationTraceEventType.AGENT_DETAIL, {"messages": messages} + trace.ConversationTraceEventType.AGENT_DETAIL, + {"messages": messages, "tools": llm_api.tools if llm_api else None}, ) client = self.entry.runtime_data diff --git a/homeassistant/helpers/llm.py b/homeassistant/helpers/llm.py index 8ad576b7ea5..4ddb00166b6 100644 --- a/homeassistant/helpers/llm.py +++ b/homeassistant/helpers/llm.py @@ -167,7 +167,7 @@ class APIInstance: async def async_call_tool(self, tool_input: ToolInput) -> JsonObjectType: """Call a LLM tool, validate args and return the response.""" async_conversation_trace_append( - ConversationTraceEventType.LLM_TOOL_CALL, + ConversationTraceEventType.TOOL_CALL, {"tool_name": tool_input.tool_name, "tool_args": tool_input.tool_args}, ) diff --git a/tests/components/conversation/test_trace.py b/tests/components/conversation/test_trace.py index c586eb8865d..59cd10d2510 100644 --- a/tests/components/conversation/test_trace.py +++ b/tests/components/conversation/test_trace.py @@ -33,7 +33,7 @@ async def test_converation_trace( assert traces last_trace = traces[-1].as_dict() assert last_trace.get("events") - assert len(last_trace.get("events")) == 1 + assert len(last_trace.get("events")) == 2 trace_event = last_trace["events"][0] assert ( trace_event.get("event_type") == trace.ConversationTraceEventType.ASYNC_PROCESS @@ -50,6 +50,16 @@ async def test_converation_trace( == "Added apples" ) + trace_event = last_trace["events"][1] + assert trace_event.get("event_type") == trace.ConversationTraceEventType.TOOL_CALL + assert trace_event.get("data") == { + "intent_name": "HassListAddItem", + "slots": { + "name": "Shopping List", + "item": "apples ", + }, + } + async def test_converation_trace_error( hass: HomeAssistant, diff --git a/tests/components/google_generative_ai_conversation/test_conversation.py b/tests/components/google_generative_ai_conversation/test_conversation.py index afeb6d01faa..41f96c7b0ac 100644 --- a/tests/components/google_generative_ai_conversation/test_conversation.py +++ b/tests/components/google_generative_ai_conversation/test_conversation.py @@ -269,11 +269,12 @@ async def test_function_call( assert [event["event_type"] for event in trace_events] == [ trace.ConversationTraceEventType.ASYNC_PROCESS, trace.ConversationTraceEventType.AGENT_DETAIL, - trace.ConversationTraceEventType.LLM_TOOL_CALL, + trace.ConversationTraceEventType.TOOL_CALL, ] # AGENT_DETAIL event contains the raw prompt passed to the model detail_event = trace_events[1] assert "Answer in plain text" in detail_event["data"]["prompt"] + assert [t.name for t in detail_event["data"]["tools"]] == ["test_tool"] @patch( diff --git a/tests/components/openai_conversation/test_conversation.py b/tests/components/openai_conversation/test_conversation.py index fee1543a0d7..3364d822245 100644 --- a/tests/components/openai_conversation/test_conversation.py +++ b/tests/components/openai_conversation/test_conversation.py @@ -294,7 +294,7 @@ async def test_function_call( assert [event["event_type"] for event in trace_events] == [ trace.ConversationTraceEventType.ASYNC_PROCESS, trace.ConversationTraceEventType.AGENT_DETAIL, - trace.ConversationTraceEventType.LLM_TOOL_CALL, + trace.ConversationTraceEventType.TOOL_CALL, ] # AGENT_DETAIL event contains the raw prompt passed to the model detail_event = trace_events[1] @@ -303,6 +303,7 @@ async def test_function_call( "Today's date is 2024-06-03." in trace_events[1]["data"]["messages"][0]["content"] ) + assert [t.name for t in detail_event["data"]["tools"]] == ["test_tool"] # Call it again, make sure we have updated prompt with ( From e706ff0564d6b31304790b443e8c75ae290812b5 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 14:44:14 +0200 Subject: [PATCH 0198/1635] Fix implicit-return in transport_nsw (#122930) --- homeassistant/components/transport_nsw/sensor.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/transport_nsw/sensor.py b/homeassistant/components/transport_nsw/sensor.py index 787f3298e59..5628274b967 100644 --- a/homeassistant/components/transport_nsw/sensor.py +++ b/homeassistant/components/transport_nsw/sensor.py @@ -3,6 +3,7 @@ from __future__ import annotations from datetime import timedelta +from typing import Any from TransportNSW import TransportNSW import voluptuous as vol @@ -98,7 +99,7 @@ class TransportNSWSensor(SensorEntity): return self._state @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any] | None: """Return the state attributes.""" if self._times is not None: return { @@ -110,6 +111,7 @@ class TransportNSWSensor(SensorEntity): ATTR_DESTINATION: self._times[ATTR_DESTINATION], ATTR_MODE: self._times[ATTR_MODE], } + return None @property def native_unit_of_measurement(self): From 2f181cbe41064d2310c79d87bc66264340b856c0 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 14:53:05 +0200 Subject: [PATCH 0199/1635] Fix implicit-return in vera (#122934) --- homeassistant/components/vera/cover.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/vera/cover.py b/homeassistant/components/vera/cover.py index 542680925f2..25ffe987d5e 100644 --- a/homeassistant/components/vera/cover.py +++ b/homeassistant/components/vera/cover.py @@ -61,10 +61,11 @@ class VeraCover(VeraDevice[veraApi.VeraCurtain], CoverEntity): self.schedule_update_ha_state() @property - def is_closed(self) -> bool: + def is_closed(self) -> bool | None: """Return if the cover is closed.""" if self.current_cover_position is not None: return self.current_cover_position == 0 + return None def open_cover(self, **kwargs: Any) -> None: """Open the cover.""" From 5e1cca1c58f77ee9c1a1b6d549d00b6a34d001fe Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 14:54:52 +0200 Subject: [PATCH 0200/1635] Fix implicit-return in shelly (#122926) --- homeassistant/components/shelly/climate.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/shelly/climate.py b/homeassistant/components/shelly/climate.py index ab1e58583d9..b77f45afb3f 100644 --- a/homeassistant/components/shelly/climate.py +++ b/homeassistant/components/shelly/climate.py @@ -54,7 +54,8 @@ async def async_setup_entry( ) -> None: """Set up climate device.""" if get_device_entry_gen(config_entry) in RPC_GENERATIONS: - return async_setup_rpc_entry(hass, config_entry, async_add_entities) + async_setup_rpc_entry(hass, config_entry, async_add_entities) + return coordinator = config_entry.runtime_data.block assert coordinator From e64e3c27785a4c7d5c373e20270c870a711e4bde Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 15:00:53 +0200 Subject: [PATCH 0201/1635] Fix implicit-return in time_date (#122929) --- homeassistant/components/time_date/sensor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/time_date/sensor.py b/homeassistant/components/time_date/sensor.py index 442442f0e1d..245d10bebba 100644 --- a/homeassistant/components/time_date/sensor.py +++ b/homeassistant/components/time_date/sensor.py @@ -48,7 +48,7 @@ async def async_setup_platform( """Set up the Time and Date sensor.""" if hass.config.time_zone is None: _LOGGER.error("Timezone is not set in Home Assistant configuration") # type: ignore[unreachable] - return False + return async_add_entities( [TimeDateSensor(variable) for variable in config[CONF_DISPLAY_OPTIONS]] From cddb3bb668328b3ee0dac0dbf89d54539d33c7e9 Mon Sep 17 00:00:00 2001 From: Kevin Stillhammer Date: Wed, 31 Jul 2024 15:08:25 +0200 Subject: [PATCH 0202/1635] Add reconfigure step for here_travel_time (#114667) * Add reconfigure step for here_travel_time * Add comments, reuse step_user, TYPE_CHECKING, remove defaults --- .../here_travel_time/config_flow.py | 183 +++++++++++++----- .../components/here_travel_time/strings.json | 3 +- .../here_travel_time/test_config_flow.py | 102 ++++++++++ 3 files changed, 235 insertions(+), 53 deletions(-) diff --git a/homeassistant/components/here_travel_time/config_flow.py b/homeassistant/components/here_travel_time/config_flow.py index 36d5c1efe1e..b708fd9cd3d 100644 --- a/homeassistant/components/here_travel_time/config_flow.py +++ b/homeassistant/components/here_travel_time/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from typing import Any +from typing import TYPE_CHECKING, Any from here_routing import ( HERERoutingApi, @@ -104,6 +104,8 @@ class HERETravelTimeConfigFlow(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Init Config Flow.""" self._config: dict[str, Any] = {} + self._entry: ConfigEntry | None = None + self._is_reconfigure_flow: bool = False @staticmethod @callback @@ -119,21 +121,36 @@ class HERETravelTimeConfigFlow(ConfigFlow, domain=DOMAIN): """Handle the initial step.""" errors = {} user_input = user_input or {} - if user_input: - try: - await async_validate_api_key(user_input[CONF_API_KEY]) - except HERERoutingUnauthorizedError: - errors["base"] = "invalid_auth" - except (HERERoutingError, HERETransitError): - _LOGGER.exception("Unexpected exception") - errors["base"] = "unknown" - if not errors: - self._config = user_input - return await self.async_step_origin_menu() + if not self._is_reconfigure_flow: # Always show form first for reconfiguration + if user_input: + try: + await async_validate_api_key(user_input[CONF_API_KEY]) + except HERERoutingUnauthorizedError: + errors["base"] = "invalid_auth" + except (HERERoutingError, HERETransitError): + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + if not errors: + self._config[CONF_NAME] = user_input[CONF_NAME] + self._config[CONF_API_KEY] = user_input[CONF_API_KEY] + self._config[CONF_MODE] = user_input[CONF_MODE] + return await self.async_step_origin_menu() + self._is_reconfigure_flow = False return self.async_show_form( step_id="user", data_schema=get_user_step_schema(user_input), errors=errors ) + async def async_step_reconfigure( + self, _: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reconfiguration.""" + self._is_reconfigure_flow = True + self._entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) + if TYPE_CHECKING: + assert self._entry + self._config = self._entry.data.copy() + return await self.async_step_user(self._config) + async def async_step_origin_menu(self, _: None = None) -> ConfigFlowResult: """Show the origin menu.""" return self.async_show_menu( @@ -150,37 +167,57 @@ class HERETravelTimeConfigFlow(ConfigFlow, domain=DOMAIN): self._config[CONF_ORIGIN_LONGITUDE] = user_input[CONF_ORIGIN][ CONF_LONGITUDE ] + # Remove possible previous configuration using an entity_id + self._config.pop(CONF_ORIGIN_ENTITY_ID, None) return await self.async_step_destination_menu() - schema = vol.Schema( + schema = self.add_suggested_values_to_schema( + vol.Schema( + { + vol.Required( + CONF_ORIGIN, + ): LocationSelector() + } + ), { - vol.Required( - CONF_ORIGIN, - default={ - CONF_LATITUDE: self.hass.config.latitude, - CONF_LONGITUDE: self.hass.config.longitude, - }, - ): LocationSelector() - } + CONF_ORIGIN: { + CONF_LATITUDE: self._config.get(CONF_ORIGIN_LATITUDE) + or self.hass.config.latitude, + CONF_LONGITUDE: self._config.get(CONF_ORIGIN_LONGITUDE) + or self.hass.config.longitude, + } + }, ) return self.async_show_form(step_id="origin_coordinates", data_schema=schema) - async def async_step_destination_menu(self, _: None = None) -> ConfigFlowResult: - """Show the destination menu.""" - return self.async_show_menu( - step_id="destination_menu", - menu_options=["destination_coordinates", "destination_entity"], - ) - async def async_step_origin_entity( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Configure origin by using an entity.""" if user_input is not None: self._config[CONF_ORIGIN_ENTITY_ID] = user_input[CONF_ORIGIN_ENTITY_ID] + # Remove possible previous configuration using coordinates + self._config.pop(CONF_ORIGIN_LATITUDE, None) + self._config.pop(CONF_ORIGIN_LONGITUDE, None) return await self.async_step_destination_menu() - schema = vol.Schema({vol.Required(CONF_ORIGIN_ENTITY_ID): EntitySelector()}) + schema = self.add_suggested_values_to_schema( + vol.Schema( + { + vol.Required( + CONF_ORIGIN_ENTITY_ID, + ): EntitySelector() + } + ), + {CONF_ORIGIN_ENTITY_ID: self._config.get(CONF_ORIGIN_ENTITY_ID)}, + ) return self.async_show_form(step_id="origin_entity", data_schema=schema) + async def async_step_destination_menu(self, _: None = None) -> ConfigFlowResult: + """Show the destination menu.""" + return self.async_show_menu( + step_id="destination_menu", + menu_options=["destination_coordinates", "destination_entity"], + ) + async def async_step_destination_coordinates( self, user_input: dict[str, Any] | None = None, @@ -193,21 +230,36 @@ class HERETravelTimeConfigFlow(ConfigFlow, domain=DOMAIN): self._config[CONF_DESTINATION_LONGITUDE] = user_input[CONF_DESTINATION][ CONF_LONGITUDE ] + # Remove possible previous configuration using an entity_id + self._config.pop(CONF_DESTINATION_ENTITY_ID, None) + if self._entry: + return self.async_update_reload_and_abort( + self._entry, + title=self._config[CONF_NAME], + data=self._config, + reason="reconfigure_successful", + ) return self.async_create_entry( title=self._config[CONF_NAME], data=self._config, options=DEFAULT_OPTIONS, ) - schema = vol.Schema( + schema = self.add_suggested_values_to_schema( + vol.Schema( + { + vol.Required( + CONF_DESTINATION, + ): LocationSelector() + } + ), { - vol.Required( - CONF_DESTINATION, - default={ - CONF_LATITUDE: self.hass.config.latitude, - CONF_LONGITUDE: self.hass.config.longitude, - }, - ): LocationSelector() - } + CONF_DESTINATION: { + CONF_LATITUDE: self._config.get(CONF_DESTINATION_LATITUDE) + or self.hass.config.latitude, + CONF_LONGITUDE: self._config.get(CONF_DESTINATION_LONGITUDE) + or self.hass.config.longitude, + }, + }, ) return self.async_show_form( step_id="destination_coordinates", data_schema=schema @@ -222,13 +274,27 @@ class HERETravelTimeConfigFlow(ConfigFlow, domain=DOMAIN): self._config[CONF_DESTINATION_ENTITY_ID] = user_input[ CONF_DESTINATION_ENTITY_ID ] + # Remove possible previous configuration using coordinates + self._config.pop(CONF_DESTINATION_LATITUDE, None) + self._config.pop(CONF_DESTINATION_LONGITUDE, None) + if self._entry: + return self.async_update_reload_and_abort( + self._entry, data=self._config, reason="reconfigure_successful" + ) return self.async_create_entry( title=self._config[CONF_NAME], data=self._config, options=DEFAULT_OPTIONS, ) - schema = vol.Schema( - {vol.Required(CONF_DESTINATION_ENTITY_ID): EntitySelector()} + schema = self.add_suggested_values_to_schema( + vol.Schema( + { + vol.Required( + CONF_DESTINATION_ENTITY_ID, + ): EntitySelector() + } + ), + {CONF_DESTINATION_ENTITY_ID: self._config.get(CONF_DESTINATION_ENTITY_ID)}, ) return self.async_show_form(step_id="destination_entity", data_schema=schema) @@ -249,15 +315,22 @@ class HERETravelTimeOptionsFlow(OptionsFlow): self._config = user_input return await self.async_step_time_menu() - schema = vol.Schema( + schema = self.add_suggested_values_to_schema( + vol.Schema( + { + vol.Optional( + CONF_ROUTE_MODE, + default=self.config_entry.options.get( + CONF_ROUTE_MODE, DEFAULT_OPTIONS[CONF_ROUTE_MODE] + ), + ): vol.In(ROUTE_MODES), + } + ), { - vol.Optional( - CONF_ROUTE_MODE, - default=self.config_entry.options.get( - CONF_ROUTE_MODE, DEFAULT_OPTIONS[CONF_ROUTE_MODE] - ), - ): vol.In(ROUTE_MODES), - } + CONF_ROUTE_MODE: self.config_entry.options.get( + CONF_ROUTE_MODE, DEFAULT_OPTIONS[CONF_ROUTE_MODE] + ), + }, ) return self.async_show_form(step_id="init", data_schema=schema) @@ -283,8 +356,11 @@ class HERETravelTimeOptionsFlow(OptionsFlow): self._config[CONF_ARRIVAL_TIME] = user_input[CONF_ARRIVAL_TIME] return self.async_create_entry(title="", data=self._config) - schema = vol.Schema( - {vol.Required(CONF_ARRIVAL_TIME, default="00:00:00"): TimeSelector()} + schema = self.add_suggested_values_to_schema( + vol.Schema( + {vol.Required(CONF_ARRIVAL_TIME, default="00:00:00"): TimeSelector()} + ), + {CONF_ARRIVAL_TIME: "00:00:00"}, ) return self.async_show_form(step_id="arrival_time", data_schema=schema) @@ -297,8 +373,11 @@ class HERETravelTimeOptionsFlow(OptionsFlow): self._config[CONF_DEPARTURE_TIME] = user_input[CONF_DEPARTURE_TIME] return self.async_create_entry(title="", data=self._config) - schema = vol.Schema( - {vol.Required(CONF_DEPARTURE_TIME, default="00:00:00"): TimeSelector()} + schema = self.add_suggested_values_to_schema( + vol.Schema( + {vol.Required(CONF_DEPARTURE_TIME, default="00:00:00"): TimeSelector()} + ), + {CONF_DEPARTURE_TIME: "00:00:00"}, ) return self.async_show_form(step_id="departure_time", data_schema=schema) diff --git a/homeassistant/components/here_travel_time/strings.json b/homeassistant/components/here_travel_time/strings.json index 124aa070595..cfa14a3e3ca 100644 --- a/homeassistant/components/here_travel_time/strings.json +++ b/homeassistant/components/here_travel_time/strings.json @@ -52,7 +52,8 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" } }, "options": { diff --git a/tests/components/here_travel_time/test_config_flow.py b/tests/components/here_travel_time/test_config_flow.py index 9b15a42dd56..ea3de64ed0c 100644 --- a/tests/components/here_travel_time/test_config_flow.py +++ b/tests/components/here_travel_time/test_config_flow.py @@ -6,17 +6,20 @@ from here_routing import HERERoutingError, HERERoutingUnauthorizedError import pytest from homeassistant import config_entries +from homeassistant.components.here_travel_time.config_flow import DEFAULT_OPTIONS from homeassistant.components.here_travel_time.const import ( CONF_ARRIVAL_TIME, CONF_DEPARTURE_TIME, CONF_DESTINATION_ENTITY_ID, CONF_DESTINATION_LATITUDE, CONF_DESTINATION_LONGITUDE, + CONF_ORIGIN_ENTITY_ID, CONF_ORIGIN_LATITUDE, CONF_ORIGIN_LONGITUDE, CONF_ROUTE_MODE, DOMAIN, ROUTE_MODE_FASTEST, + TRAVEL_MODE_BICYCLE, TRAVEL_MODE_CAR, TRAVEL_MODE_PUBLIC, ) @@ -249,6 +252,105 @@ async def test_step_destination_entity( } +@pytest.mark.usefixtures("valid_response") +async def test_reconfigure_destination_entity(hass: HomeAssistant) -> None: + """Test reconfigure flow when choosing a destination entity.""" + origin_entity_selector_result = await do_common_reconfiguration_steps(hass) + menu_result = await hass.config_entries.flow.async_configure( + origin_entity_selector_result["flow_id"], {"next_step_id": "destination_entity"} + ) + assert menu_result["type"] is FlowResultType.FORM + + destination_entity_selector_result = await hass.config_entries.flow.async_configure( + menu_result["flow_id"], + {"destination_entity_id": "zone.home"}, + ) + assert destination_entity_selector_result["type"] is FlowResultType.ABORT + assert destination_entity_selector_result["reason"] == "reconfigure_successful" + entry = hass.config_entries.async_entries(DOMAIN)[0] + assert entry.data == { + CONF_NAME: "test", + CONF_API_KEY: API_KEY, + CONF_ORIGIN_ENTITY_ID: "zone.home", + CONF_DESTINATION_ENTITY_ID: "zone.home", + CONF_MODE: TRAVEL_MODE_BICYCLE, + } + + +@pytest.mark.usefixtures("valid_response") +async def test_reconfigure_destination_coordinates(hass: HomeAssistant) -> None: + """Test reconfigure flow when choosing destination coordinates.""" + origin_entity_selector_result = await do_common_reconfiguration_steps(hass) + menu_result = await hass.config_entries.flow.async_configure( + origin_entity_selector_result["flow_id"], + {"next_step_id": "destination_coordinates"}, + ) + assert menu_result["type"] is FlowResultType.FORM + + destination_entity_selector_result = await hass.config_entries.flow.async_configure( + menu_result["flow_id"], + { + "destination": { + "latitude": 43.0, + "longitude": -80.3, + "radius": 5.0, + } + }, + ) + assert destination_entity_selector_result["type"] is FlowResultType.ABORT + assert destination_entity_selector_result["reason"] == "reconfigure_successful" + entry = hass.config_entries.async_entries(DOMAIN)[0] + assert entry.data == { + CONF_NAME: "test", + CONF_API_KEY: API_KEY, + CONF_ORIGIN_ENTITY_ID: "zone.home", + CONF_DESTINATION_LATITUDE: 43.0, + CONF_DESTINATION_LONGITUDE: -80.3, + CONF_MODE: TRAVEL_MODE_BICYCLE, + } + + +async def do_common_reconfiguration_steps(hass: HomeAssistant) -> None: + """Walk through common flow steps for reconfiguring.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id="0123456789", + data=DEFAULT_CONFIG, + options=DEFAULT_OPTIONS, + ) + entry.add_to_hass(hass) + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + reconfigure_result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": entry.entry_id, + }, + ) + assert reconfigure_result["type"] is FlowResultType.FORM + assert reconfigure_result["step_id"] == "user" + + user_step_result = await hass.config_entries.flow.async_configure( + reconfigure_result["flow_id"], + { + CONF_API_KEY: API_KEY, + CONF_MODE: TRAVEL_MODE_BICYCLE, + CONF_NAME: "test", + }, + ) + await hass.async_block_till_done() + menu_result = await hass.config_entries.flow.async_configure( + user_step_result["flow_id"], {"next_step_id": "origin_entity"} + ) + return await hass.config_entries.flow.async_configure( + menu_result["flow_id"], + {"origin_entity_id": "zone.home"}, + ) + + async def test_form_invalid_auth(hass: HomeAssistant) -> None: """Test we handle invalid auth.""" result = await hass.config_entries.flow.async_init( From a35fa0e95a381582200d49d770ff89119ba9f98e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 31 Jul 2024 08:13:04 -0500 Subject: [PATCH 0203/1635] Warn that the minimum SQLite version will change to 3.40.1 as of 2025.2 (#104298) Co-authored-by: Robert Resch --- .../components/recorder/strings.json | 4 + homeassistant/components/recorder/util.py | 77 ++++++++++++++++--- tests/components/recorder/test_util.py | 71 +++++++++++++++-- 3 files changed, 132 insertions(+), 20 deletions(-) diff --git a/homeassistant/components/recorder/strings.json b/homeassistant/components/recorder/strings.json index bf5d95ae1fc..f891b4d18d2 100644 --- a/homeassistant/components/recorder/strings.json +++ b/homeassistant/components/recorder/strings.json @@ -16,6 +16,10 @@ "backup_failed_out_of_resources": { "title": "Database backup failed due to lack of resources", "description": "The database backup stated at {start_time} failed due to lack of resources. The backup cannot be trusted and must be restarted. This can happen if the database is too large or if the system is under heavy load. Consider upgrading the system hardware or reducing the size of the database by decreasing the number of history days to keep or creating a filter." + }, + "sqlite_too_old": { + "title": "Update SQLite to {min_version} or later to continue using the recorder", + "description": "Support for version {server_version} of SQLite is ending; the minimum supported version is {min_version}. Please upgrade your database software." } }, "services": { diff --git a/homeassistant/components/recorder/util.py b/homeassistant/components/recorder/util.py index 1ef85b28f8d..4d494aed7d5 100644 --- a/homeassistant/components/recorder/util.py +++ b/homeassistant/components/recorder/util.py @@ -96,6 +96,7 @@ MARIADB_WITH_FIXED_IN_QUERIES_108 = _simple_version("10.8.4") MIN_VERSION_MYSQL = _simple_version("8.0.0") MIN_VERSION_PGSQL = _simple_version("12.0") MIN_VERSION_SQLITE = _simple_version("3.31.0") +UPCOMING_MIN_VERSION_SQLITE = _simple_version("3.40.1") MIN_VERSION_SQLITE_MODERN_BIND_VARS = _simple_version("3.32.0") @@ -356,7 +357,7 @@ def _fail_unsupported_dialect(dialect_name: str) -> NoReturn: raise UnsupportedDialect -def _fail_unsupported_version( +def _raise_if_version_unsupported( server_version: str, dialect_name: str, minimum_version: str ) -> NoReturn: """Warn about unsupported database version.""" @@ -373,16 +374,54 @@ def _fail_unsupported_version( raise UnsupportedDialect +@callback +def _async_delete_issue_deprecated_version( + hass: HomeAssistant, dialect_name: str +) -> None: + """Delete the issue about upcoming unsupported database version.""" + ir.async_delete_issue(hass, DOMAIN, f"{dialect_name}_too_old") + + +@callback +def _async_create_issue_deprecated_version( + hass: HomeAssistant, + server_version: AwesomeVersion, + dialect_name: str, + min_version: AwesomeVersion, +) -> None: + """Warn about upcoming unsupported database version.""" + ir.async_create_issue( + hass, + DOMAIN, + f"{dialect_name}_too_old", + is_fixable=False, + severity=ir.IssueSeverity.CRITICAL, + translation_key=f"{dialect_name}_too_old", + translation_placeholders={ + "server_version": str(server_version), + "min_version": str(min_version), + }, + breaks_in_ha_version="2025.2.0", + ) + + +def _extract_version_from_server_response_or_raise( + server_response: str, +) -> AwesomeVersion: + """Extract version from server response.""" + return AwesomeVersion( + server_response, + ensure_strategy=AwesomeVersionStrategy.SIMPLEVER, + find_first_match=True, + ) + + def _extract_version_from_server_response( server_response: str, ) -> AwesomeVersion | None: """Attempt to extract version from server response.""" try: - return AwesomeVersion( - server_response, - ensure_strategy=AwesomeVersionStrategy.SIMPLEVER, - find_first_match=True, - ) + return _extract_version_from_server_response_or_raise(server_response) except AwesomeVersionException: return None @@ -475,13 +514,27 @@ def setup_connection_for_dialect( # as its persistent and isn't free to call every time. result = query_on_connection(dbapi_connection, "SELECT sqlite_version()") version_string = result[0][0] - version = _extract_version_from_server_response(version_string) + version = _extract_version_from_server_response_or_raise(version_string) - if not version or version < MIN_VERSION_SQLITE: - _fail_unsupported_version( + if version < MIN_VERSION_SQLITE: + _raise_if_version_unsupported( version or version_string, "SQLite", MIN_VERSION_SQLITE ) + # No elif here since _raise_if_version_unsupported raises + if version < UPCOMING_MIN_VERSION_SQLITE: + instance.hass.add_job( + _async_create_issue_deprecated_version, + instance.hass, + version or version_string, + dialect_name, + UPCOMING_MIN_VERSION_SQLITE, + ) + else: + instance.hass.add_job( + _async_delete_issue_deprecated_version, instance.hass, dialect_name + ) + if version and version > MIN_VERSION_SQLITE_MODERN_BIND_VARS: max_bind_vars = SQLITE_MODERN_MAX_BIND_VARS @@ -513,7 +566,7 @@ def setup_connection_for_dialect( if is_maria_db: if not version or version < MIN_VERSION_MARIA_DB: - _fail_unsupported_version( + _raise_if_version_unsupported( version or version_string, "MariaDB", MIN_VERSION_MARIA_DB ) if version and ( @@ -529,7 +582,7 @@ def setup_connection_for_dialect( ) elif not version or version < MIN_VERSION_MYSQL: - _fail_unsupported_version( + _raise_if_version_unsupported( version or version_string, "MySQL", MIN_VERSION_MYSQL ) @@ -551,7 +604,7 @@ def setup_connection_for_dialect( version_string = result[0][0] version = _extract_version_from_server_response(version_string) if not version or version < MIN_VERSION_PGSQL: - _fail_unsupported_version( + _raise_if_version_unsupported( version or version_string, "PostgreSQL", MIN_VERSION_PGSQL ) diff --git a/tests/components/recorder/test_util.py b/tests/components/recorder/test_util.py index 16bf06204e2..04fe762c780 100644 --- a/tests/components/recorder/test_util.py +++ b/tests/components/recorder/test_util.py @@ -26,6 +26,8 @@ from homeassistant.components.recorder.models import ( process_timestamp, ) from homeassistant.components.recorder.util import ( + MIN_VERSION_SQLITE, + UPCOMING_MIN_VERSION_SQLITE, end_incomplete_runs, is_second_sunday, resolve_period, @@ -223,9 +225,9 @@ def test_setup_connection_for_dialect_mysql(mysql_version) -> None: @pytest.mark.parametrize( "sqlite_version", - ["3.31.0"], + [str(UPCOMING_MIN_VERSION_SQLITE)], ) -def test_setup_connection_for_dialect_sqlite(sqlite_version) -> None: +def test_setup_connection_for_dialect_sqlite(sqlite_version: str) -> None: """Test setting up the connection for a sqlite dialect.""" instance_mock = MagicMock() execute_args = [] @@ -276,10 +278,10 @@ def test_setup_connection_for_dialect_sqlite(sqlite_version) -> None: @pytest.mark.parametrize( "sqlite_version", - ["3.31.0"], + [str(UPCOMING_MIN_VERSION_SQLITE)], ) def test_setup_connection_for_dialect_sqlite_zero_commit_interval( - sqlite_version, + sqlite_version: str, ) -> None: """Test setting up the connection for a sqlite dialect with a zero commit interval.""" instance_mock = MagicMock(commit_interval=0) @@ -503,10 +505,6 @@ def test_supported_pgsql(caplog: pytest.LogCaptureFixture, pgsql_version) -> Non "2.0.0", "Version 2.0.0 of SQLite is not supported; minimum supported version is 3.31.0.", ), - ( - "dogs", - "Version dogs of SQLite is not supported; minimum supported version is 3.31.0.", - ), ], ) def test_fail_outdated_sqlite( @@ -725,6 +723,63 @@ async def test_no_issue_for_mariadb_with_MDEV_25020( assert database_engine.optimizer.slow_range_in_select is False +async def test_issue_for_old_sqlite( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, +) -> None: + """Test we create and delete an issue for old sqlite versions.""" + instance_mock = MagicMock() + instance_mock.hass = hass + execute_args = [] + close_mock = MagicMock() + min_version = str(MIN_VERSION_SQLITE) + + def execute_mock(statement): + nonlocal execute_args + execute_args.append(statement) + + def fetchall_mock(): + nonlocal execute_args + if execute_args[-1] == "SELECT sqlite_version()": + return [[min_version]] + return None + + def _make_cursor_mock(*_): + return MagicMock(execute=execute_mock, close=close_mock, fetchall=fetchall_mock) + + dbapi_connection = MagicMock(cursor=_make_cursor_mock) + + database_engine = await hass.async_add_executor_job( + util.setup_connection_for_dialect, + instance_mock, + "sqlite", + dbapi_connection, + True, + ) + await hass.async_block_till_done() + + issue = issue_registry.async_get_issue(DOMAIN, "sqlite_too_old") + assert issue is not None + assert issue.translation_placeholders == { + "min_version": str(UPCOMING_MIN_VERSION_SQLITE), + "server_version": min_version, + } + + min_version = str(UPCOMING_MIN_VERSION_SQLITE) + database_engine = await hass.async_add_executor_job( + util.setup_connection_for_dialect, + instance_mock, + "sqlite", + dbapi_connection, + True, + ) + await hass.async_block_till_done() + + issue = issue_registry.async_get_issue(DOMAIN, "sqlite_too_old") + assert issue is None + assert database_engine is not None + + @pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) @pytest.mark.usefixtures("skip_by_db_engine") async def test_basic_sanity_check( From f7f0f490154eebc7ce43779456e89e3f162df0bb Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Wed, 31 Jul 2024 15:36:57 +0200 Subject: [PATCH 0204/1635] Move lifespan attributes into own sensors for legacy Ecovacs bots (#122740) * move available property to base entity class * add lifespan sensors * apply suggestion, simplify the method * don't touch internals in tests * apply suggestion * apply suggestions --- homeassistant/components/ecovacs/const.py | 6 + .../components/ecovacs/controller.py | 10 ++ homeassistant/components/ecovacs/entity.py | 5 + homeassistant/components/ecovacs/sensor.py | 84 +++++++++- homeassistant/components/ecovacs/strings.json | 3 + homeassistant/components/ecovacs/vacuum.py | 6 +- tests/components/ecovacs/conftest.py | 11 +- .../ecovacs/snapshots/test_sensor.ambr | 148 ++++++++++++++++++ tests/components/ecovacs/test_sensor.py | 33 ++++ 9 files changed, 294 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/ecovacs/const.py b/homeassistant/components/ecovacs/const.py index 65044c016f9..ac7a268f1bd 100644 --- a/homeassistant/components/ecovacs/const.py +++ b/homeassistant/components/ecovacs/const.py @@ -21,6 +21,12 @@ SUPPORTED_LIFESPANS = ( LifeSpan.ROUND_MOP, ) +LEGACY_SUPPORTED_LIFESPANS = ( + "main_brush", + "side_brush", + "filter", +) + class InstanceMode(StrEnum): """Instance mode.""" diff --git a/homeassistant/components/ecovacs/controller.py b/homeassistant/components/ecovacs/controller.py index 0bef2e8fdd7..c22fb240536 100644 --- a/homeassistant/components/ecovacs/controller.py +++ b/homeassistant/components/ecovacs/controller.py @@ -74,6 +74,8 @@ class EcovacsController: self._authenticator, ) + self._added_legacy_entities: set[str] = set() + async def initialize(self) -> None: """Init controller.""" mqtt_config_verfied = False @@ -117,6 +119,14 @@ class EcovacsController: await self._mqtt.disconnect() await self._authenticator.teardown() + def add_legacy_entity(self, device: VacBot, component: str) -> None: + """Add legacy entity.""" + self._added_legacy_entities.add(f"{device.vacuum['did']}_{component}") + + def legacy_entity_is_added(self, device: VacBot, component: str) -> bool: + """Check if legacy entity is added.""" + return f"{device.vacuum['did']}_{component}" in self._added_legacy_entities + @property def devices(self) -> list[Device]: """Return devices.""" diff --git a/homeassistant/components/ecovacs/entity.py b/homeassistant/components/ecovacs/entity.py index 9d3092f37b4..36103be4d11 100644 --- a/homeassistant/components/ecovacs/entity.py +++ b/homeassistant/components/ecovacs/entity.py @@ -150,6 +150,11 @@ class EcovacsLegacyEntity(Entity): self._event_listeners: list[EventListener] = [] + @property + def available(self) -> bool: + """Return True if the entity is available.""" + return super().available and self.state is not None + async def async_will_remove_from_hass(self) -> None: """Remove event listeners on entity remove.""" for listener in self._event_listeners: diff --git a/homeassistant/components/ecovacs/sensor.py b/homeassistant/components/ecovacs/sensor.py index 256198693fb..28c4efbd0c6 100644 --- a/homeassistant/components/ecovacs/sensor.py +++ b/homeassistant/components/ecovacs/sensor.py @@ -4,7 +4,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass -from typing import Generic +from typing import Any, Generic from deebot_client.capabilities import CapabilityEvent, CapabilityLifeSpan from deebot_client.events import ( @@ -17,6 +17,7 @@ from deebot_client.events import ( StatsEvent, TotalStatsEvent, ) +from sucks import VacBot from homeassistant.components.sensor import ( SensorDeviceClass, @@ -37,11 +38,12 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from . import EcovacsConfigEntry -from .const import SUPPORTED_LIFESPANS +from .const import LEGACY_SUPPORTED_LIFESPANS, SUPPORTED_LIFESPANS from .entity import ( EcovacsCapabilityEntityDescription, EcovacsDescriptionEntity, EcovacsEntity, + EcovacsLegacyEntity, EventT, ) from .util import get_supported_entitites @@ -158,6 +160,25 @@ LIFESPAN_ENTITY_DESCRIPTIONS = tuple( ) +@dataclass(kw_only=True, frozen=True) +class EcovacsLegacyLifespanSensorEntityDescription(SensorEntityDescription): + """Ecovacs lifespan sensor entity description.""" + + component: str + + +LEGACY_LIFESPAN_SENSORS = tuple( + EcovacsLegacyLifespanSensorEntityDescription( + component=component, + key=f"lifespan_{component}", + translation_key=f"lifespan_{component}", + native_unit_of_measurement=PERCENTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + ) + for component in LEGACY_SUPPORTED_LIFESPANS +) + + async def async_setup_entry( hass: HomeAssistant, config_entry: EcovacsConfigEntry, @@ -183,6 +204,32 @@ async def async_setup_entry( async_add_entities(entities) + async def _add_legacy_entities() -> None: + entities = [] + for device in controller.legacy_devices: + for description in LEGACY_LIFESPAN_SENSORS: + if ( + description.component in device.components + and not controller.legacy_entity_is_added( + device, description.component + ) + ): + controller.add_legacy_entity(device, description.component) + entities.append(EcovacsLegacyLifespanSensor(device, description)) + + if entities: + async_add_entities(entities) + + def _fire_ecovacs_legacy_lifespan_event(_: Any) -> None: + hass.create_task(_add_legacy_entities()) + + for device in controller.legacy_devices: + config_entry.async_on_unload( + device.lifespanEvents.subscribe( + _fire_ecovacs_legacy_lifespan_event + ).unsubscribe + ) + class EcovacsSensor( EcovacsDescriptionEntity[CapabilityEvent], @@ -253,3 +300,36 @@ class EcovacsErrorSensor( self.async_write_ha_state() self._subscribe(self._capability.event, on_event) + + +class EcovacsLegacyLifespanSensor(EcovacsLegacyEntity, SensorEntity): + """Legacy Lifespan sensor.""" + + entity_description: EcovacsLegacyLifespanSensorEntityDescription + + def __init__( + self, + device: VacBot, + description: EcovacsLegacyLifespanSensorEntityDescription, + ) -> None: + """Initialize the entity.""" + super().__init__(device) + self.entity_description = description + self._attr_unique_id = f"{device.vacuum['did']}_{description.key}" + + if (value := device.components.get(description.component)) is not None: + value = int(value * 100) + self._attr_native_value = value + + async def async_added_to_hass(self) -> None: + """Set up the event listeners now that hass is ready.""" + + def on_event(_: Any) -> None: + if ( + value := self.device.components.get(self.entity_description.component) + ) is not None: + value = int(value * 100) + self._attr_native_value = value + self.schedule_update_ha_state() + + self._event_listeners.append(self.device.lifespanEvents.subscribe(on_event)) diff --git a/homeassistant/components/ecovacs/strings.json b/homeassistant/components/ecovacs/strings.json index d2e385c79c7..ea216f11694 100644 --- a/homeassistant/components/ecovacs/strings.json +++ b/homeassistant/components/ecovacs/strings.json @@ -119,6 +119,9 @@ "lifespan_lens_brush": { "name": "Lens brush lifespan" }, + "lifespan_main_brush": { + "name": "[%key:component::ecovacs::entity::sensor::lifespan_brush::name%]" + }, "lifespan_side_brush": { "name": "Side brush lifespan" }, diff --git a/homeassistant/components/ecovacs/vacuum.py b/homeassistant/components/ecovacs/vacuum.py index d28e632580f..e690038ff71 100644 --- a/homeassistant/components/ecovacs/vacuum.py +++ b/homeassistant/components/ecovacs/vacuum.py @@ -142,11 +142,6 @@ class EcovacsLegacyVacuum(EcovacsLegacyEntity, StateVacuumEntity): return None - @property - def available(self) -> bool: - """Return True if the vacuum is available.""" - return super().available and self.state is not None - @property def battery_level(self) -> int | None: """Return the battery level of the vacuum cleaner.""" @@ -173,6 +168,7 @@ class EcovacsLegacyVacuum(EcovacsLegacyEntity, StateVacuumEntity): data: dict[str, Any] = {} data[ATTR_ERROR] = self.error + # these attributes are deprecated and can be removed in 2025.2 for key, val in self.device.components.items(): attr_name = ATTR_COMPONENT_PREFIX + key data[attr_name] = int(val * 100) diff --git a/tests/components/ecovacs/conftest.py b/tests/components/ecovacs/conftest.py index e53cfcc8a3d..22039d6c0bc 100644 --- a/tests/components/ecovacs/conftest.py +++ b/tests/components/ecovacs/conftest.py @@ -10,6 +10,7 @@ from deebot_client.device import Device from deebot_client.exceptions import ApiError from deebot_client.models import Credentials import pytest +from sucks import EventEmitter from homeassistant.components.ecovacs import PLATFORMS from homeassistant.components.ecovacs.const import DOMAIN @@ -128,10 +129,10 @@ def mock_vacbot(device_fixture: str) -> Generator[Mock]: vacbot.vacuum = load_json_object_fixture( f"devices/{device_fixture}/device.json", DOMAIN ) - vacbot.statusEvents = Mock() - vacbot.batteryEvents = Mock() - vacbot.lifespanEvents = Mock() - vacbot.errorEvents = Mock() + vacbot.statusEvents = EventEmitter() + vacbot.batteryEvents = EventEmitter() + vacbot.lifespanEvents = EventEmitter() + vacbot.errorEvents = EventEmitter() vacbot.battery_status = None vacbot.fan_speed = None vacbot.components = {} @@ -175,7 +176,7 @@ async def init_integration( mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) yield mock_config_entry diff --git a/tests/components/ecovacs/snapshots/test_sensor.ambr b/tests/components/ecovacs/snapshots/test_sensor.ambr index 07ebd400870..659edfde2cf 100644 --- a/tests/components/ecovacs/snapshots/test_sensor.ambr +++ b/tests/components/ecovacs/snapshots/test_sensor.ambr @@ -1,4 +1,152 @@ # serializer version: 1 +# name: test_legacy_sensors[123][sensor.e1234567890000000003_filter_lifespan:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.e1234567890000000003_filter_lifespan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Filter lifespan', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifespan_filter', + 'unique_id': 'E1234567890000000003_lifespan_filter', + 'unit_of_measurement': '%', + }) +# --- +# name: test_legacy_sensors[123][sensor.e1234567890000000003_filter_lifespan:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'E1234567890000000003 Filter lifespan', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.e1234567890000000003_filter_lifespan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '40', + }) +# --- +# name: test_legacy_sensors[123][sensor.e1234567890000000003_main_brush_lifespan:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.e1234567890000000003_main_brush_lifespan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Main brush lifespan', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifespan_main_brush', + 'unique_id': 'E1234567890000000003_lifespan_main_brush', + 'unit_of_measurement': '%', + }) +# --- +# name: test_legacy_sensors[123][sensor.e1234567890000000003_main_brush_lifespan:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'E1234567890000000003 Main brush lifespan', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.e1234567890000000003_main_brush_lifespan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '80', + }) +# --- +# name: test_legacy_sensors[123][sensor.e1234567890000000003_side_brush_lifespan:entity-registry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.e1234567890000000003_side_brush_lifespan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Side brush lifespan', + 'platform': 'ecovacs', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifespan_side_brush', + 'unique_id': 'E1234567890000000003_lifespan_side_brush', + 'unit_of_measurement': '%', + }) +# --- +# name: test_legacy_sensors[123][sensor.e1234567890000000003_side_brush_lifespan:state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'E1234567890000000003 Side brush lifespan', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.e1234567890000000003_side_brush_lifespan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60', + }) +# --- +# name: test_legacy_sensors[123][states] + list([ + 'sensor.e1234567890000000003_main_brush_lifespan', + 'sensor.e1234567890000000003_side_brush_lifespan', + 'sensor.e1234567890000000003_filter_lifespan', + ]) +# --- # name: test_sensors[5xu9h3][sensor.goat_g1_area_cleaned:entity-registry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/ecovacs/test_sensor.py b/tests/components/ecovacs/test_sensor.py index 19b4c8ce09b..53c57999776 100644 --- a/tests/components/ecovacs/test_sensor.py +++ b/tests/components/ecovacs/test_sensor.py @@ -1,5 +1,7 @@ """Tests for Ecovacs sensors.""" +from unittest.mock import Mock + from deebot_client.event_bus import EventBus from deebot_client.events import ( BatteryEvent, @@ -152,3 +154,34 @@ async def test_disabled_by_default_sensors( ), f"Entity registry entry for {entity_id} is missing" assert entry.disabled assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION + + +@pytest.mark.usefixtures( + "entity_registry_enabled_by_default", "mock_vacbot", "init_integration" +) +@pytest.mark.parametrize(("device_fixture"), ["123"]) +async def test_legacy_sensors( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_vacbot: Mock, +) -> None: + """Test that sensor entity snapshots match.""" + mock_vacbot.components = {"main_brush": 0.8, "side_brush": 0.6, "filter": 0.4} + mock_vacbot.lifespanEvents.notify("dummy_data") + await hass.async_block_till_done(wait_background_tasks=True) + + states = hass.states.async_entity_ids() + assert snapshot(name="states") == states + + for entity_id in hass.states.async_entity_ids(): + assert (state := hass.states.get(entity_id)), f"State of {entity_id} is missing" + assert snapshot(name=f"{entity_id}:state") == state + + assert (entity_entry := entity_registry.async_get(state.entity_id)) + assert snapshot(name=f"{entity_id}:entity-registry") == entity_entry + + assert entity_entry.device_id + assert (device_entry := device_registry.async_get(entity_entry.device_id)) + assert device_entry.identifiers == {(DOMAIN, "E1234567890000000003")} From 97de1c2b665b48033211ec2d9b3da9cf90e38143 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 15:46:13 +0200 Subject: [PATCH 0205/1635] Fix implicit-return in recorder (#122924) --- homeassistant/components/recorder/pool.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/recorder/pool.py b/homeassistant/components/recorder/pool.py index dcb19ddf044..0fa0e82e98b 100644 --- a/homeassistant/components/recorder/pool.py +++ b/homeassistant/components/recorder/pool.py @@ -71,7 +71,8 @@ class RecorderPool(SingletonThreadPool, NullPool): def _do_return_conn(self, record: ConnectionPoolEntry) -> None: if threading.get_ident() in self.recorder_and_worker_thread_ids: - return super()._do_return_conn(record) + super()._do_return_conn(record) + return record.close() def shutdown(self) -> None: From 3df78043c05ca7268e7e9ade8f2ce988f6e6ae0c Mon Sep 17 00:00:00 2001 From: karwosts <32912880+karwosts@users.noreply.github.com> Date: Wed, 31 Jul 2024 07:13:05 -0700 Subject: [PATCH 0206/1635] Add enable_millisecond to duration selector (#122821) * Add enable_milliseconds to duration selector. * One more test --- homeassistant/helpers/selector.py | 3 +++ tests/helpers/test_selector.py | 5 +++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/homeassistant/helpers/selector.py b/homeassistant/helpers/selector.py index 5a542657d10..025b8de8896 100644 --- a/homeassistant/helpers/selector.py +++ b/homeassistant/helpers/selector.py @@ -725,6 +725,7 @@ class DurationSelectorConfig(TypedDict, total=False): """Class to represent a duration selector config.""" enable_day: bool + enable_millisecond: bool allow_negative: bool @@ -739,6 +740,8 @@ class DurationSelector(Selector[DurationSelectorConfig]): # Enable day field in frontend. A selection with `days` set is allowed # even if `enable_day` is not set vol.Optional("enable_day"): cv.boolean, + # Enable millisecond field in frontend. + vol.Optional("enable_millisecond"): cv.boolean, # Allow negative durations. Will default to False in HA Core 2025.6.0. vol.Optional("allow_negative"): cv.boolean, } diff --git a/tests/helpers/test_selector.py b/tests/helpers/test_selector.py index e93ec3b8c22..de8c3555831 100644 --- a/tests/helpers/test_selector.py +++ b/tests/helpers/test_selector.py @@ -739,12 +739,13 @@ def test_attribute_selector_schema( ( {"seconds": 10}, {"days": 10}, # Days is allowed also if `enable_day` is not set + {"milliseconds": 500}, ), (None, {}), ), ( - {"enable_day": True}, - ({"seconds": 10}, {"days": 10}), + {"enable_day": True, "enable_millisecond": True}, + ({"seconds": 10}, {"days": 10}, {"milliseconds": 500}), (None, {}), ), ( From f76470562936dc4ce650346e79f60a49a61214c6 Mon Sep 17 00:00:00 2001 From: Christopher Fenner <9592452+CFenner@users.noreply.github.com> Date: Wed, 31 Jul 2024 16:23:27 +0200 Subject: [PATCH 0207/1635] Add support for ventilation device to ViCare (#114175) * add ventilation program & mode * add ventilation device * Update climate.py * Update climate.py * Update climate.py * Update climate.py * Update climate.py * Update const.py * Create fan.py * Update fan.py * Update types.py * add test case * add translation key * use translation key * update snapshot * fix ruff findings * fix ruff findings * add log messages to setter * adjust test case * reset climate entity * do not display speed if not in permanent mode * update snapshot * update test cases * add comment * mark fan as always on * prevent turning off device * allow to set permanent mode * make speed_count static * add debug outputs * add preset state translations * allow permanent mode * update snapshot * add test case * load programs only on init * comment on ventilation modes * adjust test cases * add exception message * ignore test coverage on fan.py * Update test_fan.py * simplify * Apply suggestions from code review * remove tests * remove extra state attributes * fix leftover * add missing labels * adjust label * change state keys * use _attr_preset_modes * fix ruff findings * fix attribute access * fix from_ha_mode * fix ruff findings * fix mypy findings * simplify * format * fix typo * fix ruff finding * Apply suggestions from code review * change fan mode handling * add test cases * remove turn_off * Apply suggestions from code review Co-authored-by: Erik Montnemery * Apply suggestions from code review * Update fan.py --------- Co-authored-by: Erik Montnemery --- homeassistant/components/vicare/const.py | 1 + homeassistant/components/vicare/fan.py | 124 +++++++++++++++++++ homeassistant/components/vicare/strings.json | 15 +++ homeassistant/components/vicare/types.py | 49 ++++++++ tests/components/vicare/test_types.py | 87 +++++++++++++ 5 files changed, 276 insertions(+) create mode 100644 homeassistant/components/vicare/fan.py create mode 100644 tests/components/vicare/test_types.py diff --git a/homeassistant/components/vicare/const.py b/homeassistant/components/vicare/const.py index 286dcbbdfa8..8f8ae3c94e3 100644 --- a/homeassistant/components/vicare/const.py +++ b/homeassistant/components/vicare/const.py @@ -10,6 +10,7 @@ PLATFORMS = [ Platform.BINARY_SENSOR, Platform.BUTTON, Platform.CLIMATE, + Platform.FAN, Platform.NUMBER, Platform.SENSOR, Platform.WATER_HEATER, diff --git a/homeassistant/components/vicare/fan.py b/homeassistant/components/vicare/fan.py new file mode 100644 index 00000000000..088e54c7354 --- /dev/null +++ b/homeassistant/components/vicare/fan.py @@ -0,0 +1,124 @@ +"""Viessmann ViCare ventilation device.""" + +from __future__ import annotations + +from contextlib import suppress +import logging + +from PyViCare.PyViCareDevice import Device as PyViCareDevice +from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig +from PyViCare.PyViCareUtils import ( + PyViCareInvalidDataError, + PyViCareNotSupportedFeatureError, + PyViCareRateLimitError, +) +from PyViCare.PyViCareVentilationDevice import ( + VentilationDevice as PyViCareVentilationDevice, +) +from requests.exceptions import ConnectionError as RequestConnectionError + +from homeassistant.components.fan import FanEntity, FanEntityFeature +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util.percentage import ( + ordered_list_item_to_percentage, + percentage_to_ordered_list_item, +) + +from .const import DEVICE_LIST, DOMAIN +from .entity import ViCareEntity +from .types import VentilationMode, VentilationProgram + +_LOGGER = logging.getLogger(__name__) + +ORDERED_NAMED_FAN_SPEEDS = [ + VentilationProgram.LEVEL_ONE, + VentilationProgram.LEVEL_TWO, + VentilationProgram.LEVEL_THREE, + VentilationProgram.LEVEL_FOUR, +] + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the ViCare fan platform.""" + + device_list = hass.data[DOMAIN][config_entry.entry_id][DEVICE_LIST] + + async_add_entities( + [ + ViCareFan(device.config, device.api) + for device in device_list + if isinstance(device.api, PyViCareVentilationDevice) + ] + ) + + +class ViCareFan(ViCareEntity, FanEntity): + """Representation of the ViCare ventilation device.""" + + _attr_preset_modes = list[str]( + [ + VentilationMode.PERMANENT, + VentilationMode.VENTILATION, + VentilationMode.SENSOR_DRIVEN, + VentilationMode.SENSOR_OVERRIDE, + ] + ) + _attr_speed_count = len(ORDERED_NAMED_FAN_SPEEDS) + _attr_supported_features = FanEntityFeature.SET_SPEED | FanEntityFeature.PRESET_MODE + _attr_translation_key = "ventilation" + _enable_turn_on_off_backwards_compatibility = False + + def __init__( + self, + device_config: PyViCareDeviceConfig, + device: PyViCareDevice, + ) -> None: + """Initialize the fan entity.""" + super().__init__(device_config, device, self._attr_translation_key) + + def update(self) -> None: + """Update state of fan.""" + try: + with suppress(PyViCareNotSupportedFeatureError): + self._attr_preset_mode = VentilationMode.from_vicare_mode( + self._api.getActiveMode() + ) + with suppress(PyViCareNotSupportedFeatureError): + self._attr_percentage = ordered_list_item_to_percentage( + ORDERED_NAMED_FAN_SPEEDS, self._api.getActiveProgram() + ) + except RequestConnectionError: + _LOGGER.error("Unable to retrieve data from ViCare server") + except ValueError: + _LOGGER.error("Unable to decode data from ViCare server") + except PyViCareRateLimitError as limit_exception: + _LOGGER.error("Vicare API rate limit exceeded: %s", limit_exception) + except PyViCareInvalidDataError as invalid_data_exception: + _LOGGER.error("Invalid data from Vicare server: %s", invalid_data_exception) + + @property + def is_on(self) -> bool | None: + """Return true if the entity is on.""" + # Viessmann ventilation unit cannot be turned off + return True + + def set_percentage(self, percentage: int) -> None: + """Set the speed of the fan, as a percentage.""" + if self._attr_preset_mode != str(VentilationMode.PERMANENT): + self.set_preset_mode(VentilationMode.PERMANENT) + + level = percentage_to_ordered_list_item(ORDERED_NAMED_FAN_SPEEDS, percentage) + _LOGGER.debug("changing ventilation level to %s", level) + self._api.setPermanentLevel(level) + + def set_preset_mode(self, preset_mode: str) -> None: + """Set new preset mode.""" + target_mode = VentilationMode.to_vicare_mode(preset_mode) + _LOGGER.debug("changing ventilation mode to %s", target_mode) + self._api.setActiveMode(target_mode) diff --git a/homeassistant/components/vicare/strings.json b/homeassistant/components/vicare/strings.json index de92d0ec271..7c0088d065f 100644 --- a/homeassistant/components/vicare/strings.json +++ b/homeassistant/components/vicare/strings.json @@ -65,6 +65,21 @@ "name": "Heating" } }, + "fan": { + "ventilation": { + "name": "Ventilation", + "state_attributes": { + "preset_mode": { + "state": { + "permanent": "permanent", + "ventilation": "schedule", + "sensor_driven": "sensor", + "sensor_override": "schedule with sensor-override" + } + } + } + } + }, "number": { "heating_curve_shift": { "name": "Heating curve shift" diff --git a/homeassistant/components/vicare/types.py b/homeassistant/components/vicare/types.py index 7e1ec7f8bee..596605fccdd 100644 --- a/homeassistant/components/vicare/types.py +++ b/homeassistant/components/vicare/types.py @@ -64,6 +64,55 @@ VICARE_TO_HA_PRESET_HEATING = { } +class VentilationMode(enum.StrEnum): + """ViCare ventilation modes.""" + + PERMANENT = "permanent" # on, speed controlled by program (levelOne-levelFour) + VENTILATION = "ventilation" # activated by schedule + SENSOR_DRIVEN = "sensor_driven" # activated by schedule, override by sensor + SENSOR_OVERRIDE = "sensor_override" # activated by sensor + + @staticmethod + def to_vicare_mode(mode: str | None) -> str | None: + """Return the mapped ViCare ventilation mode for the Home Assistant mode.""" + if mode: + try: + ventilation_mode = VentilationMode(mode) + except ValueError: + # ignore unsupported / unmapped modes + return None + return HA_TO_VICARE_MODE_VENTILATION.get(ventilation_mode) if mode else None + return None + + @staticmethod + def from_vicare_mode(vicare_mode: str | None) -> str | None: + """Return the mapped Home Assistant mode for the ViCare ventilation mode.""" + for mode in VentilationMode: + if HA_TO_VICARE_MODE_VENTILATION.get(VentilationMode(mode)) == vicare_mode: + return mode + return None + + +HA_TO_VICARE_MODE_VENTILATION = { + VentilationMode.PERMANENT: "permanent", + VentilationMode.VENTILATION: "ventilation", + VentilationMode.SENSOR_DRIVEN: "sensorDriven", + VentilationMode.SENSOR_OVERRIDE: "sensorOverride", +} + + +class VentilationProgram(enum.StrEnum): + """ViCare preset ventilation programs. + + As listed in https://github.com/somm15/PyViCare/blob/6c5b023ca6c8bb2d38141dd1746dc1705ec84ce8/PyViCare/PyViCareVentilationDevice.py#L37 + """ + + LEVEL_ONE = "levelOne" + LEVEL_TWO = "levelTwo" + LEVEL_THREE = "levelThree" + LEVEL_FOUR = "levelFour" + + @dataclass(frozen=True) class ViCareDevice: """Dataclass holding the device api and config.""" diff --git a/tests/components/vicare/test_types.py b/tests/components/vicare/test_types.py new file mode 100644 index 00000000000..575e549f0d9 --- /dev/null +++ b/tests/components/vicare/test_types.py @@ -0,0 +1,87 @@ +"""Test ViCare diagnostics.""" + +import pytest + +from homeassistant.components.climate import PRESET_COMFORT, PRESET_SLEEP +from homeassistant.components.vicare.types import HeatingProgram, VentilationMode + + +@pytest.mark.parametrize( + ("vicare_program", "expected_result"), + [ + ("", None), + (None, None), + ("anything", None), + (HeatingProgram.COMFORT, PRESET_COMFORT), + (HeatingProgram.COMFORT_HEATING, PRESET_COMFORT), + ], +) +async def test_heating_program_to_ha_preset( + vicare_program: str | None, + expected_result: str | None, +) -> None: + """Testing ViCare HeatingProgram to HA Preset.""" + + assert HeatingProgram.to_ha_preset(vicare_program) == expected_result + + +@pytest.mark.parametrize( + ("ha_preset", "expected_result"), + [ + ("", None), + (None, None), + ("anything", None), + (PRESET_SLEEP, HeatingProgram.REDUCED), + ], +) +async def test_ha_preset_to_heating_program( + ha_preset: str | None, + expected_result: str | None, +) -> None: + """Testing HA Preset tp ViCare HeatingProgram.""" + + supported_programs = [ + HeatingProgram.COMFORT, + HeatingProgram.ECO, + HeatingProgram.NORMAL, + HeatingProgram.REDUCED, + ] + assert ( + HeatingProgram.from_ha_preset(ha_preset, supported_programs) == expected_result + ) + + +@pytest.mark.parametrize( + ("vicare_mode", "expected_result"), + [ + ("", None), + (None, None), + ("anything", None), + ("sensorOverride", VentilationMode.SENSOR_OVERRIDE), + ], +) +async def test_ventilation_mode_to_ha_mode( + vicare_mode: str | None, + expected_result: str | None, +) -> None: + """Testing ViCare mode to VentilationMode.""" + + assert VentilationMode.from_vicare_mode(vicare_mode) == expected_result + + +@pytest.mark.parametrize( + ("ha_mode", "expected_result"), + [ + ("", None), + (None, None), + ("anything", None), + (VentilationMode.SENSOR_OVERRIDE, "sensorOverride"), + ], +) +async def test_ha_mode_to_ventilation_mode( + ha_mode: str | None, + expected_result: str | None, +) -> None: + """Testing VentilationMode to ViCare mode.""" + + assert VentilationMode.to_vicare_mode(ha_mode) == expected_result From 8c0d9a13203980d17027518427fe10dce2ee249c Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Wed, 31 Jul 2024 17:04:09 +0200 Subject: [PATCH 0208/1635] Add Reolink chime support (#122752) --- homeassistant/components/reolink/__init__.py | 16 +- .../components/reolink/binary_sensor.py | 16 +- homeassistant/components/reolink/button.py | 8 +- homeassistant/components/reolink/entity.py | 44 ++++- homeassistant/components/reolink/icons.json | 12 ++ homeassistant/components/reolink/number.py | 75 +++++++- homeassistant/components/reolink/select.py | 100 ++++++++++- homeassistant/components/reolink/sensor.py | 18 +- homeassistant/components/reolink/strings.json | 51 ++++++ homeassistant/components/reolink/switch.py | 79 ++++++++- homeassistant/components/reolink/update.py | 8 +- tests/components/reolink/test_init.py | 9 + tests/components/reolink/test_select.py | 167 ++++++++++++++++++ 13 files changed, 549 insertions(+), 54 deletions(-) create mode 100644 tests/components/reolink/test_select.py diff --git a/homeassistant/components/reolink/__init__.py b/homeassistant/components/reolink/__init__.py index 2077b4a5e29..cc293d970b2 100644 --- a/homeassistant/components/reolink/__init__.py +++ b/homeassistant/components/reolink/__init__.py @@ -186,7 +186,7 @@ async def async_remove_config_entry_device( ) -> bool: """Remove a device from a config entry.""" host: ReolinkHost = hass.data[DOMAIN][config_entry.entry_id].host - (device_uid, ch) = get_device_uid_and_ch(device, host) + (device_uid, ch, is_chime) = get_device_uid_and_ch(device, host) if not host.api.is_nvr or ch is None: _LOGGER.warning( @@ -227,20 +227,24 @@ async def async_remove_config_entry_device( def get_device_uid_and_ch( device: dr.DeviceEntry, host: ReolinkHost -) -> tuple[list[str], int | None]: +) -> tuple[list[str], int | None, bool]: """Get the channel and the split device_uid from a reolink DeviceEntry.""" device_uid = [ dev_id[1].split("_") for dev_id in device.identifiers if dev_id[0] == DOMAIN ][0] + is_chime = False if len(device_uid) < 2: # NVR itself ch = None elif device_uid[1].startswith("ch") and len(device_uid[1]) <= 5: ch = int(device_uid[1][2:]) + elif device_uid[1].startswith("chime"): + ch = int(device_uid[1][5:]) + is_chime = True else: ch = host.api.channel_for_uid(device_uid[1]) - return (device_uid, ch) + return (device_uid, ch, is_chime) def migrate_entity_ids( @@ -251,7 +255,7 @@ def migrate_entity_ids( devices = dr.async_entries_for_config_entry(device_reg, config_entry_id) ch_device_ids = {} for device in devices: - (device_uid, ch) = get_device_uid_and_ch(device, host) + (device_uid, ch, is_chime) = get_device_uid_and_ch(device, host) if host.api.supported(None, "UID") and device_uid[0] != host.unique_id: if ch is None: @@ -261,8 +265,8 @@ def migrate_entity_ids( new_identifiers = {(DOMAIN, new_device_id)} device_reg.async_update_device(device.id, new_identifiers=new_identifiers) - if ch is None: - continue # Do not consider the NVR itself + if ch is None or is_chime: + continue # Do not consider the NVR itself or chimes ch_device_ids[device.id] = ch if host.api.supported(ch, "UID") and device_uid[1] != host.api.camera_uid(ch): diff --git a/homeassistant/components/reolink/binary_sensor.py b/homeassistant/components/reolink/binary_sensor.py index d19987c3bc6..70c21849bc2 100644 --- a/homeassistant/components/reolink/binary_sensor.py +++ b/homeassistant/components/reolink/binary_sensor.py @@ -117,18 +117,14 @@ async def async_setup_entry( entities: list[ReolinkBinarySensorEntity] = [] for channel in reolink_data.host.api.channels: entities.extend( - [ - ReolinkPushBinarySensorEntity(reolink_data, channel, entity_description) - for entity_description in BINARY_PUSH_SENSORS - if entity_description.supported(reolink_data.host.api, channel) - ] + ReolinkPushBinarySensorEntity(reolink_data, channel, entity_description) + for entity_description in BINARY_PUSH_SENSORS + if entity_description.supported(reolink_data.host.api, channel) ) entities.extend( - [ - ReolinkBinarySensorEntity(reolink_data, channel, entity_description) - for entity_description in BINARY_SENSORS - if entity_description.supported(reolink_data.host.api, channel) - ] + ReolinkBinarySensorEntity(reolink_data, channel, entity_description) + for entity_description in BINARY_SENSORS + if entity_description.supported(reolink_data.host.api, channel) ) async_add_entities(entities) diff --git a/homeassistant/components/reolink/button.py b/homeassistant/components/reolink/button.py index 528807920d3..eba0570a3fb 100644 --- a/homeassistant/components/reolink/button.py +++ b/homeassistant/components/reolink/button.py @@ -164,11 +164,9 @@ async def async_setup_entry( if entity_description.supported(reolink_data.host.api, channel) ] entities.extend( - [ - ReolinkHostButtonEntity(reolink_data, entity_description) - for entity_description in HOST_BUTTON_ENTITIES - if entity_description.supported(reolink_data.host.api) - ] + ReolinkHostButtonEntity(reolink_data, entity_description) + for entity_description in HOST_BUTTON_ENTITIES + if entity_description.supported(reolink_data.host.api) ) async_add_entities(entities) diff --git a/homeassistant/components/reolink/entity.py b/homeassistant/components/reolink/entity.py index c07983175ae..053792ad667 100644 --- a/homeassistant/components/reolink/entity.py +++ b/homeassistant/components/reolink/entity.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass -from reolink_aio.api import DUAL_LENS_MODELS, Host +from reolink_aio.api import DUAL_LENS_MODELS, Chime, Host from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo from homeassistant.helpers.entity import EntityDescription @@ -59,8 +59,9 @@ class ReolinkHostCoordinatorEntity(CoordinatorEntity[DataUpdateCoordinator[None] http_s = "https" if self._host.api.use_https else "http" self._conf_url = f"{http_s}://{self._host.api.host}:{self._host.api.port}" + self._dev_id = self._host.unique_id self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, self._host.unique_id)}, + identifiers={(DOMAIN, self._dev_id)}, connections={(CONNECTION_NETWORK_MAC, self._host.api.mac_address)}, name=self._host.api.nvr_name, model=self._host.api.model, @@ -126,12 +127,14 @@ class ReolinkChannelCoordinatorEntity(ReolinkHostCoordinatorEntity): if self._host.api.is_nvr: if self._host.api.supported(dev_ch, "UID"): - dev_id = f"{self._host.unique_id}_{self._host.api.camera_uid(dev_ch)}" + self._dev_id = ( + f"{self._host.unique_id}_{self._host.api.camera_uid(dev_ch)}" + ) else: - dev_id = f"{self._host.unique_id}_ch{dev_ch}" + self._dev_id = f"{self._host.unique_id}_ch{dev_ch}" self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, dev_id)}, + identifiers={(DOMAIN, self._dev_id)}, via_device=(DOMAIN, self._host.unique_id), name=self._host.api.camera_name(dev_ch), model=self._host.api.camera_model(dev_ch), @@ -156,3 +159,34 @@ class ReolinkChannelCoordinatorEntity(ReolinkHostCoordinatorEntity): self._host.async_unregister_update_cmd(cmd_key, self._channel) await super().async_will_remove_from_hass() + + +class ReolinkChimeCoordinatorEntity(ReolinkChannelCoordinatorEntity): + """Parent class for Reolink chime entities connected.""" + + def __init__( + self, + reolink_data: ReolinkData, + chime: Chime, + coordinator: DataUpdateCoordinator[None] | None = None, + ) -> None: + """Initialize ReolinkChimeCoordinatorEntity for a chime.""" + super().__init__(reolink_data, chime.channel, coordinator) + + self._chime = chime + + self._attr_unique_id = ( + f"{self._host.unique_id}_chime{chime.dev_id}_{self.entity_description.key}" + ) + cam_dev_id = self._dev_id + self._dev_id = f"{self._host.unique_id}_chime{chime.dev_id}" + + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, self._dev_id)}, + via_device=(DOMAIN, cam_dev_id), + name=chime.name, + model="Reolink Chime", + manufacturer=self._host.api.manufacturer, + serial_number=str(chime.dev_id), + configuration_url=self._conf_url, + ) diff --git a/homeassistant/components/reolink/icons.json b/homeassistant/components/reolink/icons.json index 539c2461204..7ca4c2d7f2b 100644 --- a/homeassistant/components/reolink/icons.json +++ b/homeassistant/components/reolink/icons.json @@ -206,6 +206,15 @@ }, "hdr": { "default": "mdi:hdr" + }, + "motion_tone": { + "default": "mdi:music-note" + }, + "people_tone": { + "default": "mdi:music-note" + }, + "visitor_tone": { + "default": "mdi:music-note" } }, "sensor": { @@ -284,6 +293,9 @@ }, "pir_reduce_alarm": { "default": "mdi:motion-sensor" + }, + "led": { + "default": "mdi:lightning-bolt-circle" } } }, diff --git a/homeassistant/components/reolink/number.py b/homeassistant/components/reolink/number.py index a4ea89c5b26..1dc99c886e1 100644 --- a/homeassistant/components/reolink/number.py +++ b/homeassistant/components/reolink/number.py @@ -6,7 +6,7 @@ from collections.abc import Callable from dataclasses import dataclass from typing import Any -from reolink_aio.api import Host +from reolink_aio.api import Chime, Host from reolink_aio.exceptions import InvalidParameterError, ReolinkError from homeassistant.components.number import ( @@ -22,7 +22,11 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import ReolinkData from .const import DOMAIN -from .entity import ReolinkChannelCoordinatorEntity, ReolinkChannelEntityDescription +from .entity import ( + ReolinkChannelCoordinatorEntity, + ReolinkChannelEntityDescription, + ReolinkChimeCoordinatorEntity, +) @dataclass(frozen=True, kw_only=True) @@ -39,6 +43,18 @@ class ReolinkNumberEntityDescription( value: Callable[[Host, int], float | None] +@dataclass(frozen=True, kw_only=True) +class ReolinkChimeNumberEntityDescription( + NumberEntityDescription, + ReolinkChannelEntityDescription, +): + """A class that describes number entities for a chime.""" + + method: Callable[[Chime, float], Any] + mode: NumberMode = NumberMode.AUTO + value: Callable[[Chime], float | None] + + NUMBER_ENTITIES = ( ReolinkNumberEntityDescription( key="zoom", @@ -459,6 +475,20 @@ NUMBER_ENTITIES = ( ), ) +CHIME_NUMBER_ENTITIES = ( + ReolinkChimeNumberEntityDescription( + key="volume", + cmd_key="DingDongOpt", + translation_key="volume", + entity_category=EntityCategory.CONFIG, + native_step=1, + native_min_value=0, + native_max_value=4, + value=lambda chime: chime.volume, + method=lambda chime, value: chime.set_option(volume=int(value)), + ), +) + async def async_setup_entry( hass: HomeAssistant, @@ -468,12 +498,18 @@ async def async_setup_entry( """Set up a Reolink number entities.""" reolink_data: ReolinkData = hass.data[DOMAIN][config_entry.entry_id] - async_add_entities( + entities: list[ReolinkNumberEntity | ReolinkChimeNumberEntity] = [ ReolinkNumberEntity(reolink_data, channel, entity_description) for entity_description in NUMBER_ENTITIES for channel in reolink_data.host.api.channels if entity_description.supported(reolink_data.host.api, channel) + ] + entities.extend( + ReolinkChimeNumberEntity(reolink_data, chime, entity_description) + for entity_description in CHIME_NUMBER_ENTITIES + for chime in reolink_data.host.api.chime_list ) + async_add_entities(entities) class ReolinkNumberEntity(ReolinkChannelCoordinatorEntity, NumberEntity): @@ -515,3 +551,36 @@ class ReolinkNumberEntity(ReolinkChannelCoordinatorEntity, NumberEntity): except ReolinkError as err: raise HomeAssistantError(err) from err self.async_write_ha_state() + + +class ReolinkChimeNumberEntity(ReolinkChimeCoordinatorEntity, NumberEntity): + """Base number entity class for Reolink IP cameras.""" + + entity_description: ReolinkChimeNumberEntityDescription + + def __init__( + self, + reolink_data: ReolinkData, + chime: Chime, + entity_description: ReolinkChimeNumberEntityDescription, + ) -> None: + """Initialize Reolink chime number entity.""" + self.entity_description = entity_description + super().__init__(reolink_data, chime) + + self._attr_mode = entity_description.mode + + @property + def native_value(self) -> float | None: + """State of the number entity.""" + return self.entity_description.value(self._chime) + + async def async_set_native_value(self, value: float) -> None: + """Update the current value.""" + try: + await self.entity_description.method(self._chime, value) + except InvalidParameterError as err: + raise ServiceValidationError(err) from err + except ReolinkError as err: + raise HomeAssistantError(err) from err + self.async_write_ha_state() diff --git a/homeassistant/components/reolink/select.py b/homeassistant/components/reolink/select.py index cf32d7b45f9..94cfdf6751b 100644 --- a/homeassistant/components/reolink/select.py +++ b/homeassistant/components/reolink/select.py @@ -8,6 +8,8 @@ import logging from typing import Any from reolink_aio.api import ( + Chime, + ChimeToneEnum, DayNightEnum, HDREnum, Host, @@ -26,7 +28,11 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import ReolinkData from .const import DOMAIN -from .entity import ReolinkChannelCoordinatorEntity, ReolinkChannelEntityDescription +from .entity import ( + ReolinkChannelCoordinatorEntity, + ReolinkChannelEntityDescription, + ReolinkChimeCoordinatorEntity, +) _LOGGER = logging.getLogger(__name__) @@ -43,6 +49,18 @@ class ReolinkSelectEntityDescription( value: Callable[[Host, int], str] | None = None +@dataclass(frozen=True, kw_only=True) +class ReolinkChimeSelectEntityDescription( + SelectEntityDescription, + ReolinkChannelEntityDescription, +): + """A class that describes select entities for a chime.""" + + get_options: list[str] + method: Callable[[Chime, str], Any] + value: Callable[[Chime], str] + + def _get_quick_reply_id(api: Host, ch: int, mess: str) -> int: """Get the quick reply file id from the message string.""" return [k for k, v in api.quick_reply_dict(ch).items() if v == mess][0] @@ -132,6 +150,36 @@ SELECT_ENTITIES = ( ), ) +CHIME_SELECT_ENTITIES = ( + ReolinkChimeSelectEntityDescription( + key="motion_tone", + cmd_key="GetDingDongCfg", + translation_key="motion_tone", + entity_category=EntityCategory.CONFIG, + get_options=[method.name for method in ChimeToneEnum], + value=lambda chime: ChimeToneEnum(chime.tone("md")).name, + method=lambda chime, name: chime.set_tone("md", ChimeToneEnum[name].value), + ), + ReolinkChimeSelectEntityDescription( + key="people_tone", + cmd_key="GetDingDongCfg", + translation_key="people_tone", + entity_category=EntityCategory.CONFIG, + get_options=[method.name for method in ChimeToneEnum], + value=lambda chime: ChimeToneEnum(chime.tone("people")).name, + method=lambda chime, name: chime.set_tone("people", ChimeToneEnum[name].value), + ), + ReolinkChimeSelectEntityDescription( + key="visitor_tone", + cmd_key="GetDingDongCfg", + translation_key="visitor_tone", + entity_category=EntityCategory.CONFIG, + get_options=[method.name for method in ChimeToneEnum], + value=lambda chime: ChimeToneEnum(chime.tone("visitor")).name, + method=lambda chime, name: chime.set_tone("visitor", ChimeToneEnum[name].value), + ), +) + async def async_setup_entry( hass: HomeAssistant, @@ -141,12 +189,18 @@ async def async_setup_entry( """Set up a Reolink select entities.""" reolink_data: ReolinkData = hass.data[DOMAIN][config_entry.entry_id] - async_add_entities( + entities: list[ReolinkSelectEntity | ReolinkChimeSelectEntity] = [ ReolinkSelectEntity(reolink_data, channel, entity_description) for entity_description in SELECT_ENTITIES for channel in reolink_data.host.api.channels if entity_description.supported(reolink_data.host.api, channel) + ] + entities.extend( + ReolinkChimeSelectEntity(reolink_data, chime, entity_description) + for entity_description in CHIME_SELECT_ENTITIES + for chime in reolink_data.host.api.chime_list ) + async_add_entities(entities) class ReolinkSelectEntity(ReolinkChannelCoordinatorEntity, SelectEntity): @@ -196,3 +250,45 @@ class ReolinkSelectEntity(ReolinkChannelCoordinatorEntity, SelectEntity): except ReolinkError as err: raise HomeAssistantError(err) from err self.async_write_ha_state() + + +class ReolinkChimeSelectEntity(ReolinkChimeCoordinatorEntity, SelectEntity): + """Base select entity class for Reolink IP cameras.""" + + entity_description: ReolinkChimeSelectEntityDescription + + def __init__( + self, + reolink_data: ReolinkData, + chime: Chime, + entity_description: ReolinkChimeSelectEntityDescription, + ) -> None: + """Initialize Reolink select entity for a chime.""" + self.entity_description = entity_description + super().__init__(reolink_data, chime) + self._log_error = True + self._attr_options = entity_description.get_options + + @property + def current_option(self) -> str | None: + """Return the current option.""" + try: + option = self.entity_description.value(self._chime) + except ValueError: + if self._log_error: + _LOGGER.exception("Reolink '%s' has an unknown value", self.name) + self._log_error = False + return None + + self._log_error = True + return option + + async def async_select_option(self, option: str) -> None: + """Change the selected option.""" + try: + await self.entity_description.method(self._chime, option) + except InvalidParameterError as err: + raise ServiceValidationError(err) from err + except ReolinkError as err: + raise HomeAssistantError(err) from err + self.async_write_ha_state() diff --git a/homeassistant/components/reolink/sensor.py b/homeassistant/components/reolink/sensor.py index 419270a7082..988b091735e 100644 --- a/homeassistant/components/reolink/sensor.py +++ b/homeassistant/components/reolink/sensor.py @@ -141,19 +141,15 @@ async def async_setup_entry( if entity_description.supported(reolink_data.host.api, channel) ] entities.extend( - [ - ReolinkHostSensorEntity(reolink_data, entity_description) - for entity_description in HOST_SENSORS - if entity_description.supported(reolink_data.host.api) - ] + ReolinkHostSensorEntity(reolink_data, entity_description) + for entity_description in HOST_SENSORS + if entity_description.supported(reolink_data.host.api) ) entities.extend( - [ - ReolinkHddSensorEntity(reolink_data, hdd_index, entity_description) - for entity_description in HDD_SENSORS - for hdd_index in reolink_data.host.api.hdd_list - if entity_description.supported(reolink_data.host.api, hdd_index) - ] + ReolinkHddSensorEntity(reolink_data, hdd_index, entity_description) + for entity_description in HDD_SENSORS + for hdd_index in reolink_data.host.api.hdd_list + if entity_description.supported(reolink_data.host.api, hdd_index) ) async_add_entities(entities) diff --git a/homeassistant/components/reolink/strings.json b/homeassistant/components/reolink/strings.json index bcf1c71934d..cad09f71562 100644 --- a/homeassistant/components/reolink/strings.json +++ b/homeassistant/components/reolink/strings.json @@ -491,6 +491,54 @@ "on": "[%key:common::state::on%]", "auto": "Auto" } + }, + "motion_tone": { + "name": "Motion ringtone", + "state": { + "off": "[%key:common::state::off%]", + "citybird": "City bird", + "originaltune": "Original tune", + "pianokey": "Piano key", + "loop": "Loop", + "attraction": "Attraction", + "hophop": "Hop hop", + "goodday": "Good day", + "operetta": "Operetta", + "moonlight": "Moonlight", + "waybackhome": "Way back home" + } + }, + "people_tone": { + "name": "Person ringtone", + "state": { + "off": "[%key:common::state::off%]", + "citybird": "[%key:component::reolink::entity::select::motion_tone::state::citybird%]", + "originaltune": "[%key:component::reolink::entity::select::motion_tone::state::originaltune%]", + "pianokey": "[%key:component::reolink::entity::select::motion_tone::state::pianokey%]", + "loop": "[%key:component::reolink::entity::select::motion_tone::state::loop%]", + "attraction": "[%key:component::reolink::entity::select::motion_tone::state::attraction%]", + "hophop": "[%key:component::reolink::entity::select::motion_tone::state::hophop%]", + "goodday": "[%key:component::reolink::entity::select::motion_tone::state::goodday%]", + "operetta": "[%key:component::reolink::entity::select::motion_tone::state::operetta%]", + "moonlight": "[%key:component::reolink::entity::select::motion_tone::state::moonlight%]", + "waybackhome": "[%key:component::reolink::entity::select::motion_tone::state::waybackhome%]" + } + }, + "visitor_tone": { + "name": "Visitor ringtone", + "state": { + "off": "[%key:common::state::off%]", + "citybird": "[%key:component::reolink::entity::select::motion_tone::state::citybird%]", + "originaltune": "[%key:component::reolink::entity::select::motion_tone::state::originaltune%]", + "pianokey": "[%key:component::reolink::entity::select::motion_tone::state::pianokey%]", + "loop": "[%key:component::reolink::entity::select::motion_tone::state::loop%]", + "attraction": "[%key:component::reolink::entity::select::motion_tone::state::attraction%]", + "hophop": "[%key:component::reolink::entity::select::motion_tone::state::hophop%]", + "goodday": "[%key:component::reolink::entity::select::motion_tone::state::goodday%]", + "operetta": "[%key:component::reolink::entity::select::motion_tone::state::operetta%]", + "moonlight": "[%key:component::reolink::entity::select::motion_tone::state::moonlight%]", + "waybackhome": "[%key:component::reolink::entity::select::motion_tone::state::waybackhome%]" + } } }, "sensor": { @@ -574,6 +622,9 @@ }, "pir_reduce_alarm": { "name": "PIR reduce false alarm" + }, + "led": { + "name": "LED" } } } diff --git a/homeassistant/components/reolink/switch.py b/homeassistant/components/reolink/switch.py index cd74d774bb1..2bf7689b32f 100644 --- a/homeassistant/components/reolink/switch.py +++ b/homeassistant/components/reolink/switch.py @@ -6,7 +6,7 @@ from collections.abc import Callable from dataclasses import dataclass from typing import Any -from reolink_aio.api import Host +from reolink_aio.api import Chime, Host from reolink_aio.exceptions import ReolinkError from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription @@ -22,6 +22,7 @@ from .const import DOMAIN from .entity import ( ReolinkChannelCoordinatorEntity, ReolinkChannelEntityDescription, + ReolinkChimeCoordinatorEntity, ReolinkHostCoordinatorEntity, ReolinkHostEntityDescription, ) @@ -49,6 +50,17 @@ class ReolinkNVRSwitchEntityDescription( value: Callable[[Host], bool] +@dataclass(frozen=True, kw_only=True) +class ReolinkChimeSwitchEntityDescription( + SwitchEntityDescription, + ReolinkChannelEntityDescription, +): + """A class that describes switch entities for a chime.""" + + method: Callable[[Chime, bool], Any] + value: Callable[[Chime], bool | None] + + SWITCH_ENTITIES = ( ReolinkSwitchEntityDescription( key="ir_lights", @@ -245,6 +257,17 @@ NVR_SWITCH_ENTITIES = ( ), ) +CHIME_SWITCH_ENTITIES = ( + ReolinkChimeSwitchEntityDescription( + key="chime_led", + cmd_key="DingDongOpt", + translation_key="led", + entity_category=EntityCategory.CONFIG, + value=lambda chime: chime.led_state, + method=lambda chime, value: chime.set_option(led=value), + ), +) + # Can be removed in HA 2025.2.0 DEPRECATED_HDR = ReolinkSwitchEntityDescription( key="hdr", @@ -266,18 +289,23 @@ async def async_setup_entry( """Set up a Reolink switch entities.""" reolink_data: ReolinkData = hass.data[DOMAIN][config_entry.entry_id] - entities: list[ReolinkSwitchEntity | ReolinkNVRSwitchEntity] = [ + entities: list[ + ReolinkSwitchEntity | ReolinkNVRSwitchEntity | ReolinkChimeSwitchEntity + ] = [ ReolinkSwitchEntity(reolink_data, channel, entity_description) for entity_description in SWITCH_ENTITIES for channel in reolink_data.host.api.channels if entity_description.supported(reolink_data.host.api, channel) ] entities.extend( - [ - ReolinkNVRSwitchEntity(reolink_data, entity_description) - for entity_description in NVR_SWITCH_ENTITIES - if entity_description.supported(reolink_data.host.api) - ] + ReolinkNVRSwitchEntity(reolink_data, entity_description) + for entity_description in NVR_SWITCH_ENTITIES + if entity_description.supported(reolink_data.host.api) + ) + entities.extend( + ReolinkChimeSwitchEntity(reolink_data, chime, entity_description) + for entity_description in CHIME_SWITCH_ENTITIES + for chime in reolink_data.host.api.chime_list ) # Can be removed in HA 2025.2.0 @@ -378,3 +406,40 @@ class ReolinkNVRSwitchEntity(ReolinkHostCoordinatorEntity, SwitchEntity): except ReolinkError as err: raise HomeAssistantError(err) from err self.async_write_ha_state() + + +class ReolinkChimeSwitchEntity(ReolinkChimeCoordinatorEntity, SwitchEntity): + """Base switch entity class for a chime.""" + + entity_description: ReolinkChimeSwitchEntityDescription + + def __init__( + self, + reolink_data: ReolinkData, + chime: Chime, + entity_description: ReolinkChimeSwitchEntityDescription, + ) -> None: + """Initialize Reolink switch entity.""" + self.entity_description = entity_description + super().__init__(reolink_data, chime) + + @property + def is_on(self) -> bool | None: + """Return true if switch is on.""" + return self.entity_description.value(self._chime) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the entity on.""" + try: + await self.entity_description.method(self._chime, True) + except ReolinkError as err: + raise HomeAssistantError(err) from err + self.async_write_ha_state() + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the entity off.""" + try: + await self.entity_description.method(self._chime, False) + except ReolinkError as err: + raise HomeAssistantError(err) from err + self.async_write_ha_state() diff --git a/homeassistant/components/reolink/update.py b/homeassistant/components/reolink/update.py index da3dafe0130..9b710c6576d 100644 --- a/homeassistant/components/reolink/update.py +++ b/homeassistant/components/reolink/update.py @@ -81,11 +81,9 @@ async def async_setup_entry( if entity_description.supported(reolink_data.host.api, channel) ] entities.extend( - [ - ReolinkHostUpdateEntity(reolink_data, entity_description) - for entity_description in HOST_UPDATE_ENTITIES - if entity_description.supported(reolink_data.host.api) - ] + ReolinkHostUpdateEntity(reolink_data, entity_description) + for entity_description in HOST_UPDATE_ENTITIES + if entity_description.supported(reolink_data.host.api) ) async_add_entities(entities) diff --git a/tests/components/reolink/test_init.py b/tests/components/reolink/test_init.py index 85ce5d94657..4f745530b6b 100644 --- a/tests/components/reolink/test_init.py +++ b/tests/components/reolink/test_init.py @@ -282,6 +282,15 @@ async def test_removing_disconnected_cams( True, False, ), + ( + f"{TEST_MAC}_chime123456789_play_ringtone", + f"{TEST_UID}_chime123456789_play_ringtone", + f"{TEST_MAC}_chime123456789", + f"{TEST_UID}_chime123456789", + Platform.SELECT, + True, + False, + ), ( f"{TEST_MAC}_0_record_audio", f"{TEST_MAC}_{TEST_UID_CAM}_record_audio", diff --git a/tests/components/reolink/test_select.py b/tests/components/reolink/test_select.py new file mode 100644 index 00000000000..908c06dc16f --- /dev/null +++ b/tests/components/reolink/test_select.py @@ -0,0 +1,167 @@ +"""Test the Reolink select platform.""" + +from datetime import timedelta +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from reolink_aio.api import Chime +from reolink_aio.exceptions import InvalidParameterError, ReolinkError + +from homeassistant.components.reolink import DEVICE_UPDATE_INTERVAL +from homeassistant.components.select import DOMAIN as SELECT_DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_SELECT_OPTION, + STATE_UNKNOWN, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import entity_registry as er +from homeassistant.util.dt import utcnow + +from .conftest import TEST_NVR_NAME + +from tests.common import MockConfigEntry, async_fire_time_changed + + +async def test_floodlight_mode_select( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + entity_registry: er.EntityRegistry, +) -> None: + """Test select entity with floodlight_mode.""" + reolink_connect.whiteled_mode.return_value = 1 + reolink_connect.whiteled_mode_list.return_value = ["off", "auto"] + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SELECT]): + assert await hass.config_entries.async_setup(config_entry.entry_id) is True + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.SELECT}.{TEST_NVR_NAME}_floodlight_mode" + assert hass.states.is_state(entity_id, "auto") + + reolink_connect.set_whiteled = AsyncMock() + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, "option": "off"}, + blocking=True, + ) + reolink_connect.set_whiteled.assert_called_once() + + reolink_connect.set_whiteled = AsyncMock(side_effect=ReolinkError("Test error")) + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, "option": "off"}, + blocking=True, + ) + + reolink_connect.set_whiteled = AsyncMock( + side_effect=InvalidParameterError("Test error") + ) + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, "option": "off"}, + blocking=True, + ) + + +async def test_play_quick_reply_message( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + entity_registry: er.EntityRegistry, +) -> None: + """Test select play_quick_reply_message entity.""" + reolink_connect.quick_reply_dict.return_value = {0: "off", 1: "test message"} + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SELECT]): + assert await hass.config_entries.async_setup(config_entry.entry_id) is True + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.SELECT}.{TEST_NVR_NAME}_play_quick_reply_message" + assert hass.states.is_state(entity_id, STATE_UNKNOWN) + + reolink_connect.play_quick_reply = AsyncMock() + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, "option": "test message"}, + blocking=True, + ) + reolink_connect.play_quick_reply.assert_called_once() + + +async def test_chime_select( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + entity_registry: er.EntityRegistry, +) -> None: + """Test chime select entity.""" + TEST_CHIME = Chime( + host=reolink_connect, + dev_id=12345678, + channel=0, + name="Test chime", + event_info={ + "md": {"switch": 0, "musicId": 0}, + "people": {"switch": 0, "musicId": 1}, + "visitor": {"switch": 1, "musicId": 2}, + }, + ) + TEST_CHIME.volume = 3 + TEST_CHIME.led_state = True + + reolink_connect.chime_list = [TEST_CHIME] + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SELECT]): + assert await hass.config_entries.async_setup(config_entry.entry_id) is True + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.SELECT}.test_chime_visitor_ringtone" + assert hass.states.is_state(entity_id, "pianokey") + + TEST_CHIME.set_tone = AsyncMock() + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, "option": "off"}, + blocking=True, + ) + TEST_CHIME.set_tone.assert_called_once() + + TEST_CHIME.set_tone = AsyncMock(side_effect=ReolinkError("Test error")) + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, "option": "off"}, + blocking=True, + ) + + TEST_CHIME.set_tone = AsyncMock(side_effect=InvalidParameterError("Test error")) + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + {ATTR_ENTITY_ID: entity_id, "option": "off"}, + blocking=True, + ) + + TEST_CHIME.event_info = {} + async_fire_time_changed( + hass, utcnow() + DEVICE_UPDATE_INTERVAL + timedelta(seconds=30) + ) + await hass.async_block_till_done() + + assert hass.states.is_state(entity_id, STATE_UNKNOWN) From a1b8545568ae54ade7a3e45d61644afdb25414d8 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 17:13:53 +0200 Subject: [PATCH 0209/1635] Fix unnecessary-return-none in nest (#122951) --- homeassistant/components/nest/media_source.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/nest/media_source.py b/homeassistant/components/nest/media_source.py index 6c481806e4f..71501e72552 100644 --- a/homeassistant/components/nest/media_source.py +++ b/homeassistant/components/nest/media_source.py @@ -228,7 +228,7 @@ class NestEventMediaStore(EventMediaStore): def remove_media(filename: str) -> None: if not os.path.exists(filename): - return None + return _LOGGER.debug("Removing event media from disk store: %s", filename) os.remove(filename) From c359d4a419e7ee4133fff96732930aa8a598095b Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Wed, 31 Jul 2024 17:53:52 +0200 Subject: [PATCH 0210/1635] Update frontend to 20240731.0 (#122956) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index d7253b52b28..60cfa0a26ff 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240719.0"] + "requirements": ["home-assistant-frontend==20240731.0"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index c52ccfa6a8c..c7e5528d675 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -31,7 +31,7 @@ habluetooth==3.1.3 hass-nabucasa==0.81.1 hassil==1.7.4 home-assistant-bluetooth==1.12.2 -home-assistant-frontend==20240719.0 +home-assistant-frontend==20240731.0 home-assistant-intents==2024.7.29 httpx==0.27.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 3c8d5357568..472270c1dcf 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1090,7 +1090,7 @@ hole==0.8.0 holidays==0.53 # homeassistant.components.frontend -home-assistant-frontend==20240719.0 +home-assistant-frontend==20240731.0 # homeassistant.components.conversation home-assistant-intents==2024.7.29 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index b95d1b58922..b2021a6bc7f 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -913,7 +913,7 @@ hole==0.8.0 holidays==0.53 # homeassistant.components.frontend -home-assistant-frontend==20240719.0 +home-assistant-frontend==20240731.0 # homeassistant.components.conversation home-assistant-intents==2024.7.29 From 69f54656c44b9de30777abdce925cd0bb88be76a Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Wed, 31 Jul 2024 17:58:11 +0200 Subject: [PATCH 0211/1635] Fix cleanup of orphan device entries in AVM Fritz!Box Tools (#122937) * fix cleanup of orphan device entries * add test for cleanup button --- homeassistant/components/fritz/coordinator.py | 4 +- tests/components/fritz/test_button.py | 74 +++++++++++++++++-- 2 files changed, 72 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/fritz/coordinator.py b/homeassistant/components/fritz/coordinator.py index 8a55084d7ef..592bf37084e 100644 --- a/homeassistant/components/fritz/coordinator.py +++ b/homeassistant/components/fritz/coordinator.py @@ -666,7 +666,9 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]): entity_reg.async_remove(entity.entity_id) device_reg = dr.async_get(self.hass) - orphan_connections = {(CONNECTION_NETWORK_MAC, mac) for mac in orphan_macs} + orphan_connections = { + (CONNECTION_NETWORK_MAC, dr.format_mac(mac)) for mac in orphan_macs + } for device in dr.async_entries_for_config_entry( device_reg, config_entry.entry_id ): diff --git a/tests/components/fritz/test_button.py b/tests/components/fritz/test_button.py index 8666491eb7a..79639835003 100644 --- a/tests/components/fritz/test_button.py +++ b/tests/components/fritz/test_button.py @@ -1,6 +1,6 @@ """Tests for Fritz!Tools button platform.""" -import copy +from copy import deepcopy from datetime import timedelta from unittest.mock import patch @@ -11,9 +11,15 @@ from homeassistant.components.fritz.const import DOMAIN, MeshRoles from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.util.dt import utcnow -from .const import MOCK_MESH_DATA, MOCK_NEW_DEVICE_NODE, MOCK_USER_DATA +from .const import ( + MOCK_HOST_ATTRIBUTES_DATA, + MOCK_MESH_DATA, + MOCK_NEW_DEVICE_NODE, + MOCK_USER_DATA, +) from tests.common import MockConfigEntry, async_fire_time_changed @@ -120,7 +126,7 @@ async def test_wol_button_new_device( entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) entry.add_to_hass(hass) - mesh_data = copy.deepcopy(MOCK_MESH_DATA) + mesh_data = deepcopy(MOCK_MESH_DATA) await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() assert entry.state is ConfigEntryState.LOADED @@ -148,7 +154,7 @@ async def test_wol_button_absent_for_mesh_slave( entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) entry.add_to_hass(hass) - slave_mesh_data = copy.deepcopy(MOCK_MESH_DATA) + slave_mesh_data = deepcopy(MOCK_MESH_DATA) slave_mesh_data["nodes"][0]["mesh_role"] = MeshRoles.SLAVE fh_class_mock.get_mesh_topology.return_value = slave_mesh_data @@ -170,7 +176,7 @@ async def test_wol_button_absent_for_non_lan_device( entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) entry.add_to_hass(hass) - printer_wifi_data = copy.deepcopy(MOCK_MESH_DATA) + printer_wifi_data = deepcopy(MOCK_MESH_DATA) # initialization logic uses the connection type of the `node_interface_1_uid` pair of the printer # ni-230 is wifi interface of fritzbox printer_node_interface = printer_wifi_data["nodes"][1]["node_interfaces"][0] @@ -184,3 +190,61 @@ async def test_wol_button_absent_for_non_lan_device( button = hass.states.get("button.printer_wake_on_lan") assert button is None + + +async def test_cleanup_button( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + fc_class_mock, + fh_class_mock, +) -> None: + """Test cleanup of orphan devices.""" + + entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) + entry.add_to_hass(hass) + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert entry.state is ConfigEntryState.LOADED + + # check if tracked device is registered properly + device = device_registry.async_get_device( + connections={("mac", "aa:bb:cc:00:11:22")} + ) + assert device + + entities = [ + entity + for entity in er.async_entries_for_config_entry(entity_registry, entry.entry_id) + if entity.unique_id.startswith("AA:BB:CC:00:11:22") + ] + assert entities + assert len(entities) == 3 + + # removed tracked device and trigger cleanup + host_attributes = deepcopy(MOCK_HOST_ATTRIBUTES_DATA) + host_attributes.pop(0) + fh_class_mock.get_hosts_attributes.return_value = host_attributes + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: "button.mock_title_cleanup"}, + blocking=True, + ) + + await hass.async_block_till_done(wait_background_tasks=True) + + # check if orphan tracked device is removed + device = device_registry.async_get_device( + connections={("mac", "aa:bb:cc:00:11:22")} + ) + assert not device + + entities = [ + entity + for entity in er.async_entries_for_config_entry(entity_registry, entry.entry_id) + if entity.unique_id.startswith("AA:BB:CC:00:11:22") + ] + assert not entities From 172e2125f6c228c5225318d743cfef946f35b048 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 31 Jul 2024 11:17:45 -0500 Subject: [PATCH 0212/1635] Switch to using update for headers middleware (#122952) --- homeassistant/components/http/headers.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/http/headers.py b/homeassistant/components/http/headers.py index 3c845601183..ebc0594e15a 100644 --- a/homeassistant/components/http/headers.py +++ b/homeassistant/components/http/headers.py @@ -31,13 +31,10 @@ def setup_headers(app: Application, use_x_frame_options: bool) -> None: try: response = await handler(request) except HTTPException as err: - for key, value in added_headers.items(): - err.headers[key] = value + err.headers.update(added_headers) raise - for key, value in added_headers.items(): - response.headers[key] = value - + response.headers.update(added_headers) return response app.middlewares.append(headers_middleware) From c888908cc86b04aa3b5aa82c06edc44c1d3af43d Mon Sep 17 00:00:00 2001 From: Marcel van der Veldt Date: Wed, 31 Jul 2024 18:23:40 +0200 Subject: [PATCH 0213/1635] Add default warning for installing matter device updates (#122597) --- homeassistant/components/matter/update.py | 55 ++++++++++++++++++----- 1 file changed, 43 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/matter/update.py b/homeassistant/components/matter/update.py index 4e6733db045..736664e0101 100644 --- a/homeassistant/components/matter/update.py +++ b/homeassistant/components/matter/update.py @@ -8,7 +8,7 @@ from typing import Any from chip.clusters import Objects as clusters from matter_server.common.errors import UpdateCheckError, UpdateError -from matter_server.common.models import MatterSoftwareVersion +from matter_server.common.models import MatterSoftwareVersion, UpdateSource from homeassistant.components.update import ( ATTR_LATEST_VERSION, @@ -18,7 +18,7 @@ from homeassistant.components.update import ( UpdateEntityFeature, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import Platform +from homeassistant.const import STATE_ON, Platform from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -76,6 +76,12 @@ class MatterUpdate(MatterEntity, UpdateEntity): _attr_should_poll = True _software_update: MatterSoftwareVersion | None = None _cancel_update: CALLBACK_TYPE | None = None + _attr_supported_features = ( + UpdateEntityFeature.INSTALL + | UpdateEntityFeature.PROGRESS + | UpdateEntityFeature.SPECIFIC_VERSION + | UpdateEntityFeature.RELEASE_NOTES + ) @callback def _update_from_device(self) -> None: @@ -84,16 +90,6 @@ class MatterUpdate(MatterEntity, UpdateEntity): self._attr_installed_version = self.get_matter_attribute_value( clusters.BasicInformation.Attributes.SoftwareVersionString ) - - if self.get_matter_attribute_value( - clusters.OtaSoftwareUpdateRequestor.Attributes.UpdatePossible - ): - self._attr_supported_features = ( - UpdateEntityFeature.INSTALL - | UpdateEntityFeature.PROGRESS - | UpdateEntityFeature.SPECIFIC_VERSION - ) - update_state: clusters.OtaSoftwareUpdateRequestor.Enums.UpdateStateEnum = ( self.get_matter_attribute_value( clusters.OtaSoftwareUpdateRequestor.Attributes.UpdateState @@ -133,9 +129,39 @@ class MatterUpdate(MatterEntity, UpdateEntity): self._software_update = update_information self._attr_latest_version = update_information.software_version_string self._attr_release_url = update_information.release_notes_url + except UpdateCheckError as err: raise HomeAssistantError(f"Error finding applicable update: {err}") from err + async def async_release_notes(self) -> str | None: + """Return full release notes. + + This is suitable for a long changelog that does not fit in the release_summary + property. The returned string can contain markdown. + """ + if self._software_update is None: + return None + if self.state != STATE_ON: + return None + + release_notes = "" + + # insert extra heavy warning case the update is not from the main net + if self._software_update.update_source != UpdateSource.MAIN_NET_DCL: + release_notes += ( + "\n\n" + f"Update provided by {self._software_update.update_source.value}. " + "Installing this update is at your own risk and you may run into unexpected " + "problems such as the need to re-add and factory reset your device.\n\n" + ) + return release_notes + ( + "\n\nThe update process can take a while, " + "especially for battery powered devices. Please be patient and wait until the update " + "process is fully completed. Do not remove power from the device while it's updating. " + "The device may restart during the update process and be unavailable for several minutes." + "\n\n" + ) + async def async_added_to_hass(self) -> None: """Call when the entity is added to hass.""" await super().async_added_to_hass() @@ -172,6 +198,11 @@ class MatterUpdate(MatterEntity, UpdateEntity): ) -> None: """Install a new software version.""" + if not self.get_matter_attribute_value( + clusters.OtaSoftwareUpdateRequestor.Attributes.UpdatePossible + ): + raise HomeAssistantError("Device is not ready to install updates") + software_version: str | int | None = version if self._software_update is not None and ( version is None or version == self._software_update.software_version_string From 3f091470fd7ef580e3bd565ed87571962d3552b5 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 18:28:35 +0200 Subject: [PATCH 0214/1635] Use pytest.mark.usefixtures in risco tests (#122955) --- tests/components/risco/test_sensor.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/tests/components/risco/test_sensor.py b/tests/components/risco/test_sensor.py index 72444bdc9f2..4c8f7bb4180 100644 --- a/tests/components/risco/test_sensor.py +++ b/tests/components/risco/test_sensor.py @@ -174,11 +174,10 @@ def save_mock(): @pytest.mark.parametrize("events", [TEST_EVENTS]) +@pytest.mark.usefixtures("two_zone_cloud", "_set_utc_time_zone") async def test_cloud_setup( hass: HomeAssistant, entity_registry: er.EntityRegistry, - two_zone_cloud, - _set_utc_time_zone, save_mock, setup_risco_cloud, ) -> None: @@ -207,11 +206,9 @@ async def test_cloud_setup( _check_state(hass, category, entity_id) +@pytest.mark.usefixtures("setup_risco_local", "_no_zones_and_partitions") async def test_local_setup( - hass: HomeAssistant, entity_registry: er.EntityRegistry, - setup_risco_local, - _no_zones_and_partitions, ) -> None: """Test entity setup.""" for entity_id in ENTITY_IDS.values(): From 69a8c5dc9fb18178eb3091d0451150cf85073be5 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 18:44:36 +0200 Subject: [PATCH 0215/1635] Fix implicit-return in hddtemp (#122919) --- homeassistant/components/hddtemp/sensor.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/hddtemp/sensor.py b/homeassistant/components/hddtemp/sensor.py index 836e68abe9f..fbb6a6b48f9 100644 --- a/homeassistant/components/hddtemp/sensor.py +++ b/homeassistant/components/hddtemp/sensor.py @@ -6,6 +6,7 @@ from datetime import timedelta import logging import socket from telnetlib import Telnet # pylint: disable=deprecated-module +from typing import Any import voluptuous as vol @@ -82,10 +83,11 @@ class HddTempSensor(SensorEntity): self._details = None @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any] | None: """Return the state attributes of the sensor.""" if self._details is not None: return {ATTR_DEVICE: self._details[0], ATTR_MODEL: self._details[1]} + return None def update(self) -> None: """Get the latest data from HDDTemp daemon and updates the state.""" From ae9e8ca419413481f0e245e16cfbe304c1a2ac24 Mon Sep 17 00:00:00 2001 From: Louis Christ Date: Wed, 31 Jul 2024 19:04:17 +0200 Subject: [PATCH 0216/1635] Simplify async_setup_entry in bluesound integration (#122874) * Use async_added_to_hass and async_will_remove_from_hass * Remove self._hass --- .../components/bluesound/__init__.py | 11 -- .../components/bluesound/media_player.py | 143 ++++-------------- 2 files changed, 29 insertions(+), 125 deletions(-) diff --git a/homeassistant/components/bluesound/__init__.py b/homeassistant/components/bluesound/__init__.py index 0912a584fce..cbe95fc3abf 100644 --- a/homeassistant/components/bluesound/__init__.py +++ b/homeassistant/components/bluesound/__init__.py @@ -67,15 +67,4 @@ async def async_setup_entry( async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Unload a config entry.""" - player = None - for player in hass.data[DOMAIN]: - if player.unique_id == config_entry.unique_id: - break - - if player is None: - return False - - player.stop_polling() - hass.data[DOMAIN].remove(player) - return await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS) diff --git a/homeassistant/components/bluesound/media_player.py b/homeassistant/components/bluesound/media_player.py index b320566c74a..809ba293f89 100644 --- a/homeassistant/components/bluesound/media_player.py +++ b/homeassistant/components/bluesound/media_player.py @@ -3,8 +3,7 @@ from __future__ import annotations import asyncio -from asyncio import CancelledError -from collections.abc import Callable +from asyncio import CancelledError, Task from contextlib import suppress from datetime import datetime, timedelta import logging @@ -31,15 +30,11 @@ from homeassistant.const import ( CONF_HOSTS, CONF_NAME, CONF_PORT, - EVENT_HOMEASSISTANT_START, - EVENT_HOMEASSISTANT_STOP, ) from homeassistant.core import ( DOMAIN as HOMEASSISTANT_DOMAIN, - Event, HomeAssistant, ServiceCall, - callback, ) from homeassistant.data_entry_flow import FlowResultType from homeassistant.exceptions import ServiceValidationError @@ -50,7 +45,6 @@ from homeassistant.helpers.device_registry import ( format_mac, ) from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.event import async_track_time_interval from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.util import Throttle import homeassistant.util.dt as dt_util @@ -123,59 +117,6 @@ SERVICE_TO_METHOD = { } -def _add_player( - hass: HomeAssistant, - async_add_entities: AddEntitiesCallback, - host: str, - port: int, - player: Player, - sync_status: SyncStatus, -): - """Add Bluesound players.""" - - @callback - def _init_bluesound_player(event: Event | None = None): - """Start polling.""" - hass.async_create_task(bluesound_player.async_init()) - - @callback - def _start_polling(event: Event | None = None): - """Start polling.""" - bluesound_player.start_polling() - - @callback - def _stop_polling(event: Event | None = None): - """Stop polling.""" - bluesound_player.stop_polling() - - @callback - def _add_bluesound_player_cb(): - """Add player after first sync fetch.""" - if bluesound_player.id in [x.id for x in hass.data[DATA_BLUESOUND]]: - _LOGGER.warning("Player already added %s", bluesound_player.id) - return - - hass.data[DATA_BLUESOUND].append(bluesound_player) - async_add_entities([bluesound_player]) - _LOGGER.debug("Added device with name: %s", bluesound_player.name) - - if hass.is_running: - _start_polling() - else: - hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, _start_polling) - - hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _stop_polling) - - bluesound_player = BluesoundPlayer( - hass, host, port, player, sync_status, _add_bluesound_player_cb - ) - - if hass.is_running: - _init_bluesound_player() - else: - hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, _init_bluesound_player) - - async def _async_import(hass: HomeAssistant, config: ConfigType) -> None: """Import config entry from configuration.yaml.""" if not hass.config_entries.async_entries(DOMAIN): @@ -252,18 +193,16 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Bluesound entry.""" - host = config_entry.data[CONF_HOST] - port = config_entry.data[CONF_PORT] - - _add_player( - hass, - async_add_entities, - host, - port, + bluesound_player = BluesoundPlayer( + config_entry.data[CONF_HOST], + config_entry.data[CONF_PORT], config_entry.runtime_data.player, config_entry.runtime_data.sync_status, ) + hass.data[DATA_BLUESOUND].append(bluesound_player) + async_add_entities([bluesound_player]) + async def async_setup_platform( hass: HomeAssistant, @@ -290,36 +229,30 @@ class BluesoundPlayer(MediaPlayerEntity): def __init__( self, - hass: HomeAssistant, host: str, port: int, player: Player, sync_status: SyncStatus, - init_callback: Callable[[], None], ) -> None: """Initialize the media player.""" self.host = host - self._hass = hass self.port = port - self._polling_task = None # The actual polling task. - self._id = None + self._polling_task: Task[None] | None = None # The actual polling task. + self._id = sync_status.id self._last_status_update = None - self._sync_status: SyncStatus | None = None + self._sync_status = sync_status self._status: Status | None = None self._inputs: list[Input] = [] self._presets: list[Preset] = [] self._is_online = False - self._retry_remove = None self._muted = False self._master: BluesoundPlayer | None = None self._is_master = False self._group_name = None self._group_list: list[str] = [] - self._bluesound_device_name = None + self._bluesound_device_name = sync_status.name self._player = player - self._init_callback = init_callback - self._attr_unique_id = format_unique_id(sync_status.mac, port) # there should always be one player with the default port per mac if port is DEFAULT_PORT: @@ -349,23 +282,18 @@ class BluesoundPlayer(MediaPlayerEntity): except ValueError: return -1 - async def force_update_sync_status(self, on_updated_cb=None) -> bool: + async def force_update_sync_status(self) -> bool: """Update the internal status.""" sync_status = await self._player.sync_status() self._sync_status = sync_status - if not self._id: - self._id = sync_status.id - if not self._bluesound_device_name: - self._bluesound_device_name = sync_status.name - if sync_status.master is not None: self._is_master = False master_id = f"{sync_status.master.ip}:{sync_status.master.port}" master_device = [ device - for device in self._hass.data[DATA_BLUESOUND] + for device in self.hass.data[DATA_BLUESOUND] if device.id == master_id ] @@ -380,8 +308,6 @@ class BluesoundPlayer(MediaPlayerEntity): slaves = self._sync_status.slaves self._is_master = slaves is not None - if on_updated_cb: - on_updated_cb() return True async def _start_poll_command(self): @@ -401,32 +327,21 @@ class BluesoundPlayer(MediaPlayerEntity): _LOGGER.exception("Unexpected error in %s:%s", self.name, self.port) raise - def start_polling(self): + async def async_added_to_hass(self) -> None: """Start the polling task.""" - self._polling_task = self._hass.async_create_task(self._start_poll_command()) + await super().async_added_to_hass() - def stop_polling(self): + self._polling_task = self.hass.async_create_task(self._start_poll_command()) + + async def async_will_remove_from_hass(self) -> None: """Stop the polling task.""" - self._polling_task.cancel() + await super().async_will_remove_from_hass() - async def async_init(self, triggered=None): - """Initialize the player async.""" - try: - if self._retry_remove is not None: - self._retry_remove() - self._retry_remove = None + assert self._polling_task is not None + if self._polling_task.cancel(): + await self._polling_task - await self.force_update_sync_status(self._init_callback) - except (TimeoutError, ClientError): - _LOGGER.error("Node %s:%s is offline, retrying later", self.host, self.port) - self._retry_remove = async_track_time_interval( - self._hass, self.async_init, NODE_RETRY_INITIATION - ) - except Exception: - _LOGGER.exception( - "Unexpected when initiating error in %s:%s", self.host, self.port - ) - raise + self.hass.data[DATA_BLUESOUND].remove(self) async def async_update(self) -> None: """Update internal status of the entity.""" @@ -490,13 +405,13 @@ class BluesoundPlayer(MediaPlayerEntity): """Trigger sync status update on all devices.""" _LOGGER.debug("Trigger sync status on all devices") - for player in self._hass.data[DATA_BLUESOUND]: + for player in self.hass.data[DATA_BLUESOUND]: await player.force_update_sync_status() @Throttle(SYNC_STATUS_INTERVAL) - async def async_update_sync_status(self, on_updated_cb=None): + async def async_update_sync_status(self): """Update sync status.""" - await self.force_update_sync_status(on_updated_cb) + await self.force_update_sync_status() @Throttle(UPDATE_CAPTURE_INTERVAL) async def async_update_captures(self) -> list[Input] | None: @@ -615,7 +530,7 @@ class BluesoundPlayer(MediaPlayerEntity): if self._status is not None: volume = self._status.volume - if self.is_grouped and self._sync_status is not None: + if self.is_grouped: volume = self._sync_status.volume if volume is None: @@ -630,7 +545,7 @@ class BluesoundPlayer(MediaPlayerEntity): if self._status is not None: mute = self._status.mute - if self.is_grouped and self._sync_status is not None: + if self.is_grouped: mute = self._sync_status.mute_volume is not None return mute @@ -778,7 +693,7 @@ class BluesoundPlayer(MediaPlayerEntity): device_group = self._group_name.split("+") sorted_entities = sorted( - self._hass.data[DATA_BLUESOUND], + self.hass.data[DATA_BLUESOUND], key=lambda entity: entity.is_master, reverse=True, ) From a913587eb6dff0d5c9a5846d4067ef6974d3e405 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Wed, 31 Jul 2024 19:17:53 +0200 Subject: [PATCH 0217/1635] Climate validate temperature(s) out of range (#118649) * Climate temperature out of range * Fix test sensibo * use temp converting for min/max * Fix * Fix mqtt tests * Fix honeywell tests * Fix Balboa tests * Fix whirlpool test * Fix teslemetry test * Fix plugwise test * Fix tplink test * Fix generic thermostat test * Fix modbus test * Fix fritzbox tests * Honewell --- homeassistant/components/climate/__init__.py | 29 +++- homeassistant/components/climate/strings.json | 3 + homeassistant/components/honeywell/climate.py | 9 +- tests/components/balboa/test_climate.py | 2 + tests/components/climate/test_init.py | 138 +++++++++++++++++- tests/components/fritzbox/test_climate.py | 8 +- .../generic_thermostat/test_climate.py | 4 +- tests/components/honeywell/conftest.py | 1 + .../honeywell/snapshots/test_climate.ambr | 6 +- tests/components/honeywell/test_climate.py | 71 +++++---- tests/components/modbus/test_climate.py | 8 +- tests/components/mqtt/test_climate.py | 14 +- tests/components/plugwise/test_climate.py | 4 +- tests/components/sensibo/test_climate.py | 6 +- tests/components/teslemetry/test_climate.py | 2 +- tests/components/tplink/test_climate.py | 5 +- tests/components/whirlpool/test_climate.py | 4 +- 17 files changed, 245 insertions(+), 69 deletions(-) diff --git a/homeassistant/components/climate/__init__.py b/homeassistant/components/climate/__init__.py index 94cba54b247..f546ae0e671 100644 --- a/homeassistant/components/climate/__init__.py +++ b/homeassistant/components/climate/__init__.py @@ -914,12 +914,37 @@ async def async_service_temperature_set( """Handle set temperature service.""" hass = entity.hass kwargs = {} + min_temp = entity.min_temp + max_temp = entity.max_temp + temp_unit = entity.temperature_unit for value, temp in service_call.data.items(): if value in CONVERTIBLE_ATTRIBUTE: - kwargs[value] = TemperatureConverter.convert( - temp, hass.config.units.temperature_unit, entity.temperature_unit + kwargs[value] = check_temp = TemperatureConverter.convert( + temp, hass.config.units.temperature_unit, temp_unit ) + + _LOGGER.debug( + "Check valid temperature %d %s (%d %s) in range %d %s - %d %s", + check_temp, + entity.temperature_unit, + temp, + hass.config.units.temperature_unit, + min_temp, + temp_unit, + max_temp, + temp_unit, + ) + if check_temp < min_temp or check_temp > max_temp: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="temp_out_of_range", + translation_placeholders={ + "check_temp": str(check_temp), + "min_temp": str(min_temp), + "max_temp": str(max_temp), + }, + ) else: kwargs[value] = temp diff --git a/homeassistant/components/climate/strings.json b/homeassistant/components/climate/strings.json index dc212441824..1af21815b9f 100644 --- a/homeassistant/components/climate/strings.json +++ b/homeassistant/components/climate/strings.json @@ -266,6 +266,9 @@ }, "not_valid_fan_mode": { "message": "Fan mode {mode} is not valid. Valid fan modes are: {modes}." + }, + "temp_out_of_range": { + "message": "Provided temperature {check_temp} is not valid. Accepted range is {min_temp} to {max_temp}." } } } diff --git a/homeassistant/components/honeywell/climate.py b/homeassistant/components/honeywell/climate.py index d9260fc3be5..141cb87f117 100644 --- a/homeassistant/components/honeywell/climate.py +++ b/homeassistant/components/honeywell/climate.py @@ -42,6 +42,7 @@ from homeassistant.helpers import ( ) from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util.unit_conversion import TemperatureConverter from . import HoneywellData from .const import ( @@ -259,7 +260,9 @@ class HoneywellUSThermostat(ClimateEntity): self._device.raw_ui_data["HeatLowerSetptLimit"], ] ) - return DEFAULT_MIN_TEMP + return TemperatureConverter.convert( + DEFAULT_MIN_TEMP, UnitOfTemperature.CELSIUS, self.temperature_unit + ) @property def max_temp(self) -> float: @@ -275,7 +278,9 @@ class HoneywellUSThermostat(ClimateEntity): self._device.raw_ui_data["HeatUpperSetptLimit"], ] ) - return DEFAULT_MAX_TEMP + return TemperatureConverter.convert( + DEFAULT_MAX_TEMP, UnitOfTemperature.CELSIUS, self.temperature_unit + ) @property def current_humidity(self) -> int | None: diff --git a/tests/components/balboa/test_climate.py b/tests/components/balboa/test_climate.py index c877f2858cd..850184a7d71 100644 --- a/tests/components/balboa/test_climate.py +++ b/tests/components/balboa/test_climate.py @@ -85,6 +85,8 @@ async def test_spa_temperature( hass: HomeAssistant, client: MagicMock, integration: MockConfigEntry ) -> None: """Test spa temperature settings.""" + client.temperature_minimum = 110 + client.temperature_maximum = 250 # flip the spa into F # set temp to a valid number state = await _patch_spa_settemp(hass, client, 0, 100) diff --git a/tests/components/climate/test_init.py b/tests/components/climate/test_init.py index ced75ff7ef7..f306551e540 100644 --- a/tests/components/climate/test_init.py +++ b/tests/components/climate/test_init.py @@ -4,6 +4,7 @@ from __future__ import annotations from enum import Enum from types import ModuleType +from typing import Any from unittest.mock import MagicMock, Mock, patch import pytest @@ -17,9 +18,14 @@ from homeassistant.components.climate import ( HVACMode, ) from homeassistant.components.climate.const import ( + ATTR_CURRENT_TEMPERATURE, ATTR_FAN_MODE, + ATTR_MAX_TEMP, + ATTR_MIN_TEMP, ATTR_PRESET_MODE, ATTR_SWING_MODE, + ATTR_TARGET_TEMP_HIGH, + ATTR_TARGET_TEMP_LOW, SERVICE_SET_FAN_MODE, SERVICE_SET_PRESET_MODE, SERVICE_SET_SWING_MODE, @@ -27,7 +33,13 @@ from homeassistant.components.climate.const import ( ClimateEntityFeature, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import SERVICE_TURN_OFF, SERVICE_TURN_ON, UnitOfTemperature +from homeassistant.const import ( + ATTR_TEMPERATURE, + PRECISION_WHOLE, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + UnitOfTemperature, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import issue_registry as ir @@ -1152,3 +1164,127 @@ async def test_no_issue_no_aux_property( "the auxiliary heater methods in a subclass of ClimateEntity which is deprecated " "and will be unsupported from Home Assistant 2024.10." ) not in caplog.text + + +async def test_temperature_validation( + hass: HomeAssistant, config_flow_fixture: None +) -> None: + """Test validation for temperatures.""" + + class MockClimateEntityTemp(MockClimateEntity): + """Mock climate class with mocked aux heater.""" + + _attr_supported_features = ( + ClimateEntityFeature.FAN_MODE + | ClimateEntityFeature.PRESET_MODE + | ClimateEntityFeature.SWING_MODE + | ClimateEntityFeature.TARGET_TEMPERATURE + | ClimateEntityFeature.TARGET_TEMPERATURE_RANGE + ) + _attr_target_temperature = 15 + _attr_target_temperature_high = 18 + _attr_target_temperature_low = 10 + _attr_target_temperature_step = PRECISION_WHOLE + + def set_temperature(self, **kwargs: Any) -> None: + """Set new target temperature.""" + if ATTR_TEMPERATURE in kwargs: + self._attr_target_temperature = kwargs[ATTR_TEMPERATURE] + if ATTR_TARGET_TEMP_HIGH in kwargs: + self._attr_target_temperature_high = kwargs[ATTR_TARGET_TEMP_HIGH] + self._attr_target_temperature_low = kwargs[ATTR_TARGET_TEMP_LOW] + + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) + return True + + async def async_setup_entry_climate_platform( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: + """Set up test climate platform via config entry.""" + async_add_entities( + [MockClimateEntityTemp(name="test", entity_id="climate.test")] + ) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=async_setup_entry_init, + ), + built_in=False, + ) + mock_platform( + hass, + "test.climate", + MockPlatform(async_setup_entry=async_setup_entry_climate_platform), + ) + + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("climate.test") + assert state.attributes.get(ATTR_CURRENT_TEMPERATURE) is None + assert state.attributes.get(ATTR_MIN_TEMP) == 7 + assert state.attributes.get(ATTR_MAX_TEMP) == 35 + + with pytest.raises( + ServiceValidationError, + match="Provided temperature 40.0 is not valid. Accepted range is 7 to 35", + ) as exc: + await hass.services.async_call( + DOMAIN, + SERVICE_SET_TEMPERATURE, + { + "entity_id": "climate.test", + ATTR_TEMPERATURE: "40", + }, + blocking=True, + ) + assert ( + str(exc.value) + == "Provided temperature 40.0 is not valid. Accepted range is 7 to 35" + ) + assert exc.value.translation_key == "temp_out_of_range" + + with pytest.raises( + ServiceValidationError, + match="Provided temperature 0.0 is not valid. Accepted range is 7 to 35", + ) as exc: + await hass.services.async_call( + DOMAIN, + SERVICE_SET_TEMPERATURE, + { + "entity_id": "climate.test", + ATTR_TARGET_TEMP_HIGH: "25", + ATTR_TARGET_TEMP_LOW: "0", + }, + blocking=True, + ) + assert ( + str(exc.value) + == "Provided temperature 0.0 is not valid. Accepted range is 7 to 35" + ) + assert exc.value.translation_key == "temp_out_of_range" + + await hass.services.async_call( + DOMAIN, + SERVICE_SET_TEMPERATURE, + { + "entity_id": "climate.test", + ATTR_TARGET_TEMP_HIGH: "25", + ATTR_TARGET_TEMP_LOW: "10", + }, + blocking=True, + ) + + state = hass.states.get("climate.test") + assert state.attributes.get(ATTR_TARGET_TEMP_LOW) == 10 + assert state.attributes.get(ATTR_TARGET_TEMP_HIGH) == 25 diff --git a/tests/components/fritzbox/test_climate.py b/tests/components/fritzbox/test_climate.py index 8d1da9d09d5..853c09c534b 100644 --- a/tests/components/fritzbox/test_climate.py +++ b/tests/components/fritzbox/test_climate.py @@ -263,10 +263,10 @@ async def test_set_temperature_temperature(hass: HomeAssistant, fritz: Mock) -> await hass.services.async_call( DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 123}, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 23}, True, ) - assert device.set_target_temperature.call_args_list == [call(123)] + assert device.set_target_temperature.call_args_list == [call(23)] async def test_set_temperature_mode_off(hass: HomeAssistant, fritz: Mock) -> None: @@ -282,7 +282,7 @@ async def test_set_temperature_mode_off(hass: HomeAssistant, fritz: Mock) -> Non { ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.OFF, - ATTR_TEMPERATURE: 123, + ATTR_TEMPERATURE: 23, }, True, ) @@ -303,7 +303,7 @@ async def test_set_temperature_mode_heat(hass: HomeAssistant, fritz: Mock) -> No { ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.HEAT, - ATTR_TEMPERATURE: 123, + ATTR_TEMPERATURE: 23, }, True, ) diff --git a/tests/components/generic_thermostat/test_climate.py b/tests/components/generic_thermostat/test_climate.py index 18e31b9591f..0f438056fbd 100644 --- a/tests/components/generic_thermostat/test_climate.py +++ b/tests/components/generic_thermostat/test_climate.py @@ -1097,9 +1097,9 @@ async def setup_comp_9(hass: HomeAssistant) -> None: async def test_precision(hass: HomeAssistant) -> None: """Test that setting precision to tenths works as intended.""" hass.config.units = US_CUSTOMARY_SYSTEM - await common.async_set_temperature(hass, 23.27) + await common.async_set_temperature(hass, 55.27) state = hass.states.get(ENTITY) - assert state.attributes.get("temperature") == 23.3 + assert state.attributes.get("temperature") == 55.3 # check that target_temp_step defaults to precision assert state.attributes.get("target_temp_step") == 0.1 diff --git a/tests/components/honeywell/conftest.py b/tests/components/honeywell/conftest.py index 5c5b6c0a44a..e48664db9ae 100644 --- a/tests/components/honeywell/conftest.py +++ b/tests/components/honeywell/conftest.py @@ -86,6 +86,7 @@ def device(): mock_device.system_mode = "off" mock_device.name = "device1" mock_device.current_temperature = CURRENTTEMPERATURE + mock_device.temperature_unit = "C" mock_device.mac_address = "macaddress1" mock_device.outdoor_temperature = None mock_device.outdoor_humidity = None diff --git a/tests/components/honeywell/snapshots/test_climate.ambr b/tests/components/honeywell/snapshots/test_climate.ambr index d1faf9af9a0..25bb73851c6 100644 --- a/tests/components/honeywell/snapshots/test_climate.ambr +++ b/tests/components/honeywell/snapshots/test_climate.ambr @@ -3,7 +3,7 @@ ReadOnlyDict({ 'aux_heat': 'off', 'current_humidity': 50, - 'current_temperature': -6.7, + 'current_temperature': 20, 'fan_action': 'idle', 'fan_mode': 'auto', 'fan_modes': list([ @@ -20,9 +20,9 @@ , ]), 'max_humidity': 99, - 'max_temp': 1.7, + 'max_temp': 35, 'min_humidity': 30, - 'min_temp': -13.9, + 'min_temp': 7, 'permanent_hold': False, 'preset_mode': 'none', 'preset_modes': list([ diff --git a/tests/components/honeywell/test_climate.py b/tests/components/honeywell/test_climate.py index b57be5f1838..55a55f7d7e7 100644 --- a/tests/components/honeywell/test_climate.py +++ b/tests/components/honeywell/test_climate.py @@ -92,14 +92,13 @@ async def test_dynamic_attributes( hass: HomeAssistant, device: MagicMock, config_entry: MagicMock ) -> None: """Test dynamic attributes.""" - await init_integration(hass, config_entry) entity_id = f"climate.{device.name}" state = hass.states.get(entity_id) assert state.state == HVACMode.OFF attributes = state.attributes - assert attributes["current_temperature"] == -6.7 + assert attributes["current_temperature"] == 20 assert attributes["current_humidity"] == 50 device.system_mode = "cool" @@ -114,7 +113,7 @@ async def test_dynamic_attributes( state = hass.states.get(entity_id) assert state.state == HVACMode.COOL attributes = state.attributes - assert attributes["current_temperature"] == -6.1 + assert attributes["current_temperature"] == 21 assert attributes["current_humidity"] == 55 device.system_mode = "heat" @@ -129,7 +128,7 @@ async def test_dynamic_attributes( state = hass.states.get(entity_id) assert state.state == HVACMode.HEAT attributes = state.attributes - assert attributes["current_temperature"] == 16.1 + assert attributes["current_temperature"] == 61 assert attributes["current_humidity"] == 50 device.system_mode = "auto" @@ -142,7 +141,7 @@ async def test_dynamic_attributes( state = hass.states.get(entity_id) assert state.state == HVACMode.HEAT_COOL attributes = state.attributes - assert attributes["current_temperature"] == 16.1 + assert attributes["current_temperature"] == 61 assert attributes["current_humidity"] == 50 @@ -348,7 +347,7 @@ async def test_service_calls_off_mode( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 15}, + {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 35}, blocking=True, ) @@ -362,8 +361,8 @@ async def test_service_calls_off_mode( }, blocking=True, ) - device.set_setpoint_cool.assert_called_with(95) - device.set_setpoint_heat.assert_called_with(77) + device.set_setpoint_cool.assert_called_with(35) + device.set_setpoint_heat.assert_called_with(25) device.set_setpoint_heat.reset_mock() device.set_setpoint_heat.side_effect = aiosomecomfort.SomeComfortError @@ -375,13 +374,13 @@ async def test_service_calls_off_mode( SERVICE_SET_TEMPERATURE, { ATTR_ENTITY_ID: entity_id, - ATTR_TARGET_TEMP_LOW: 25.0, - ATTR_TARGET_TEMP_HIGH: 35.0, + ATTR_TARGET_TEMP_LOW: 24.0, + ATTR_TARGET_TEMP_HIGH: 34.0, }, blocking=True, ) - device.set_setpoint_cool.assert_called_with(95) - device.set_setpoint_heat.assert_called_with(77) + device.set_setpoint_cool.assert_called_with(34) + device.set_setpoint_heat.assert_called_with(24) assert "Invalid temperature" in caplog.text device.set_setpoint_heat.reset_mock() @@ -399,14 +398,14 @@ async def test_service_calls_off_mode( }, blocking=True, ) - device.set_setpoint_cool.assert_called_with(95) - device.set_setpoint_heat.assert_called_with(77) + device.set_setpoint_cool.assert_called_with(35) + device.set_setpoint_heat.assert_called_with(25) reset_mock(device) await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 15}, + {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 35}, blocking=True, ) device.set_setpoint_heat.assert_not_called() @@ -517,7 +516,7 @@ async def test_service_calls_cool_mode( {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 15}, blocking=True, ) - device.set_hold_cool.assert_called_once_with(datetime.time(2, 30), 59) + device.set_hold_cool.assert_called_once_with(datetime.time(2, 30), 15) device.set_hold_cool.reset_mock() await hass.services.async_call( @@ -525,13 +524,13 @@ async def test_service_calls_cool_mode( SERVICE_SET_TEMPERATURE, { ATTR_ENTITY_ID: entity_id, - ATTR_TARGET_TEMP_LOW: 25.0, - ATTR_TARGET_TEMP_HIGH: 35.0, + ATTR_TARGET_TEMP_LOW: 15.0, + ATTR_TARGET_TEMP_HIGH: 20.0, }, blocking=True, ) - device.set_setpoint_cool.assert_called_with(95) - device.set_setpoint_heat.assert_called_with(77) + device.set_setpoint_cool.assert_called_with(20) + device.set_setpoint_heat.assert_called_with(15) caplog.clear() device.set_setpoint_cool.reset_mock() @@ -543,13 +542,13 @@ async def test_service_calls_cool_mode( SERVICE_SET_TEMPERATURE, { ATTR_ENTITY_ID: entity_id, - ATTR_TARGET_TEMP_LOW: 25.0, - ATTR_TARGET_TEMP_HIGH: 35.0, + ATTR_TARGET_TEMP_LOW: 15.0, + ATTR_TARGET_TEMP_HIGH: 20.0, }, blocking=True, ) - device.set_setpoint_cool.assert_called_with(95) - device.set_setpoint_heat.assert_called_with(77) + device.set_setpoint_cool.assert_called_with(20) + device.set_setpoint_heat.assert_called_with(15) assert "Invalid temperature" in caplog.text reset_mock(device) @@ -733,10 +732,10 @@ async def test_service_calls_heat_mode( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 15}, + {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 25}, blocking=True, ) - device.set_hold_heat.assert_called_once_with(datetime.time(2, 30), 59) + device.set_hold_heat.assert_called_once_with(datetime.time(2, 30), 25) device.set_hold_heat.reset_mock() device.set_hold_heat.side_effect = aiosomecomfort.SomeComfortError @@ -744,10 +743,10 @@ async def test_service_calls_heat_mode( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 15}, + {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 25}, blocking=True, ) - device.set_hold_heat.assert_called_once_with(datetime.time(2, 30), 59) + device.set_hold_heat.assert_called_once_with(datetime.time(2, 30), 25) device.set_hold_heat.reset_mock() assert "Invalid temperature" in caplog.text @@ -756,10 +755,10 @@ async def test_service_calls_heat_mode( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 15}, + {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 25}, blocking=True, ) - device.set_hold_heat.assert_called_once_with(datetime.time(2, 30), 59) + device.set_hold_heat.assert_called_once_with(datetime.time(2, 30), 25) device.set_hold_heat.reset_mock() caplog.clear() @@ -773,8 +772,8 @@ async def test_service_calls_heat_mode( }, blocking=True, ) - device.set_setpoint_cool.assert_called_with(95) - device.set_setpoint_heat.assert_called_with(77) + device.set_setpoint_cool.assert_called_with(35) + device.set_setpoint_heat.assert_called_with(25) device.set_setpoint_heat.reset_mock() device.set_setpoint_heat.side_effect = aiosomecomfort.SomeComfortError @@ -789,8 +788,8 @@ async def test_service_calls_heat_mode( }, blocking=True, ) - device.set_setpoint_cool.assert_called_with(95) - device.set_setpoint_heat.assert_called_with(77) + device.set_setpoint_cool.assert_called_with(35) + device.set_setpoint_heat.assert_called_with(25) assert "Invalid temperature" in caplog.text reset_mock(device) @@ -984,8 +983,8 @@ async def test_service_calls_auto_mode( }, blocking=True, ) - device.set_setpoint_cool.assert_called_once_with(95) - device.set_setpoint_heat.assert_called_once_with(77) + device.set_setpoint_cool.assert_called_once_with(35) + device.set_setpoint_heat.assert_called_once_with(25) reset_mock(device) caplog.clear() diff --git a/tests/components/modbus/test_climate.py b/tests/components/modbus/test_climate.py index a52285b22d7..5578234ee6e 100644 --- a/tests/components/modbus/test_climate.py +++ b/tests/components/modbus/test_climate.py @@ -766,7 +766,7 @@ async def test_service_climate_swing_update( ("temperature", "result", "do_config"), [ ( - 35, + 31, [0x00], { CONF_CLIMATES: [ @@ -781,7 +781,7 @@ async def test_service_climate_swing_update( }, ), ( - 36, + 32, [0x00, 0x00], { CONF_CLIMATES: [ @@ -796,7 +796,7 @@ async def test_service_climate_swing_update( }, ), ( - 37.5, + 33.5, [0x00, 0x00], { CONF_CLIMATES: [ @@ -811,7 +811,7 @@ async def test_service_climate_swing_update( }, ), ( - "39", + "34", [0x00, 0x00, 0x00, 0x00], { CONF_CLIMATES: [ diff --git a/tests/components/mqtt/test_climate.py b/tests/components/mqtt/test_climate.py index f29c16f19ea..13bd6b5feda 100644 --- a/tests/components/mqtt/test_climate.py +++ b/tests/components/mqtt/test_climate.py @@ -654,11 +654,11 @@ async def test_set_target_temperature( assert state.state == "heat" mqtt_mock.async_publish.assert_called_once_with("mode-topic", "heat", 0, False) mqtt_mock.async_publish.reset_mock() - await common.async_set_temperature(hass, temperature=47, entity_id=ENTITY_CLIMATE) + await common.async_set_temperature(hass, temperature=35, entity_id=ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) - assert state.attributes.get("temperature") == 47 + assert state.attributes.get("temperature") == 35 mqtt_mock.async_publish.assert_called_once_with( - "temperature-topic", "47.0", 0, False + "temperature-topic", "35.0", 0, False ) # also test directly supplying the operation mode to set_temperature @@ -713,7 +713,7 @@ async def test_set_target_temperature_pessimistic( state = hass.states.get(ENTITY_CLIMATE) assert state.attributes.get("temperature") is None await common.async_set_hvac_mode(hass, "heat", ENTITY_CLIMATE) - await common.async_set_temperature(hass, temperature=47, entity_id=ENTITY_CLIMATE) + await common.async_set_temperature(hass, temperature=35, entity_id=ENTITY_CLIMATE) state = hass.states.get(ENTITY_CLIMATE) assert state.attributes.get("temperature") is None @@ -1590,13 +1590,13 @@ async def test_set_and_templates( assert state.attributes.get("swing_mode") == "on" # Temperature - await common.async_set_temperature(hass, temperature=47, entity_id=ENTITY_CLIMATE) + await common.async_set_temperature(hass, temperature=35, entity_id=ENTITY_CLIMATE) mqtt_mock.async_publish.assert_called_once_with( - "temperature-topic", "temp: 47.0", 0, False + "temperature-topic", "temp: 35.0", 0, False ) mqtt_mock.async_publish.reset_mock() state = hass.states.get(ENTITY_CLIMATE) - assert state.attributes.get("temperature") == 47 + assert state.attributes.get("temperature") == 35 # Temperature Low/High await common.async_set_temperature( diff --git a/tests/components/plugwise/test_climate.py b/tests/components/plugwise/test_climate.py index c91e4d37ba6..70cef16bcdc 100644 --- a/tests/components/plugwise/test_climate.py +++ b/tests/components/plugwise/test_climate.py @@ -14,7 +14,7 @@ from homeassistant.components.climate import ( HVACMode, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.util.dt import utcnow from tests.common import MockConfigEntry, async_fire_time_changed @@ -196,7 +196,7 @@ async def test_adam_climate_entity_climate_changes( "c50f167537524366a5af7aa3942feb1e", {"setpoint": 25.0} ) - with pytest.raises(ValueError): + with pytest.raises(ServiceValidationError): await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, diff --git a/tests/components/sensibo/test_climate.py b/tests/components/sensibo/test_climate.py index 6b4aedab828..b5a7be7bde0 100644 --- a/tests/components/sensibo/test_climate.py +++ b/tests/components/sensibo/test_climate.py @@ -400,6 +400,10 @@ async def test_climate_temperatures( "homeassistant.components.sensibo.util.SensiboClient.async_set_ac_state_property", return_value={"result": {"status": "Success"}}, ), + pytest.raises( + ServiceValidationError, + match="Provided temperature 24.0 is not valid. Accepted range is 10 to 20", + ), ): await hass.services.async_call( CLIMATE_DOMAIN, @@ -410,7 +414,7 @@ async def test_climate_temperatures( await hass.async_block_till_done() state2 = hass.states.get("climate.hallway") - assert state2.attributes["temperature"] == 20 + assert state2.attributes["temperature"] == 19 with ( patch( diff --git a/tests/components/teslemetry/test_climate.py b/tests/components/teslemetry/test_climate.py index 250413396c1..31a39f1f21a 100644 --- a/tests/components/teslemetry/test_climate.py +++ b/tests/components/teslemetry/test_climate.py @@ -199,7 +199,7 @@ async def test_climate( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: [entity_id], ATTR_TEMPERATURE: 25}, + {ATTR_ENTITY_ID: [entity_id], ATTR_TEMPERATURE: 34}, blocking=True, ) diff --git a/tests/components/tplink/test_climate.py b/tests/components/tplink/test_climate.py index a80a74a5697..2f24fa829f9 100644 --- a/tests/components/tplink/test_climate.py +++ b/tests/components/tplink/test_climate.py @@ -120,12 +120,13 @@ async def test_set_temperature( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mocked_hub: Device ) -> None: """Test that set_temperature service calls the setter.""" + mocked_thermostat = mocked_hub.children[0] + mocked_thermostat.features["target_temperature"].minimum_value = 0 + await setup_platform_for_device( hass, mock_config_entry, Platform.CLIMATE, mocked_hub ) - mocked_thermostat = mocked_hub.children[0] - await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, diff --git a/tests/components/whirlpool/test_climate.py b/tests/components/whirlpool/test_climate.py index 18016bd9c67..cdae28f4432 100644 --- a/tests/components/whirlpool/test_climate.py +++ b/tests/components/whirlpool/test_climate.py @@ -264,10 +264,10 @@ async def test_service_calls( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 15}, + {ATTR_ENTITY_ID: entity_id, ATTR_TEMPERATURE: 16}, blocking=True, ) - mock_instance.set_temp.assert_called_once_with(15) + mock_instance.set_temp.assert_called_once_with(16) mock_instance.set_mode.reset_mock() await hass.services.async_call( From cc1a6d60c07d2298e45bf5dbd9e028e64a0d1be6 Mon Sep 17 00:00:00 2001 From: Thomas55555 <59625598+Thomas55555@users.noreply.github.com> Date: Wed, 31 Jul 2024 19:28:46 +0200 Subject: [PATCH 0218/1635] Add override for work areas in Husqvarna Automower (#120427) Co-authored-by: Robert Resch --- .../components/husqvarna_automower/icons.json | 3 +- .../husqvarna_automower/lawn_mower.py | 41 ++++++- .../components/husqvarna_automower/sensor.py | 25 ++++- .../husqvarna_automower/services.yaml | 19 ++++ .../husqvarna_automower/strings.json | 25 +++++ .../snapshots/test_sensor.ambr | 5 + .../husqvarna_automower/test_lawn_mower.py | 103 ++++++++++++++++-- 7 files changed, 208 insertions(+), 13 deletions(-) diff --git a/homeassistant/components/husqvarna_automower/icons.json b/homeassistant/components/husqvarna_automower/icons.json index a9002c5b44a..9dc1cbeb667 100644 --- a/homeassistant/components/husqvarna_automower/icons.json +++ b/homeassistant/components/husqvarna_automower/icons.json @@ -34,6 +34,7 @@ } }, "services": { - "override_schedule": "mdi:debug-step-over" + "override_schedule": "mdi:debug-step-over", + "override_schedule_work_area": "mdi:land-fields" } } diff --git a/homeassistant/components/husqvarna_automower/lawn_mower.py b/homeassistant/components/husqvarna_automower/lawn_mower.py index dd2129599fb..ac0f1fd6af2 100644 --- a/homeassistant/components/husqvarna_automower/lawn_mower.py +++ b/homeassistant/components/husqvarna_automower/lawn_mower.py @@ -2,8 +2,9 @@ from datetime import timedelta import logging +from typing import TYPE_CHECKING -from aioautomower.model import MowerActivities, MowerStates +from aioautomower.model import MowerActivities, MowerStates, WorkArea import voluptuous as vol from homeassistant.components.lawn_mower import ( @@ -12,10 +13,12 @@ from homeassistant.components.lawn_mower import ( LawnMowerEntityFeature, ) from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import AutomowerConfigEntry +from .const import DOMAIN from .coordinator import AutomowerDataUpdateCoordinator from .entity import AutomowerAvailableEntity, handle_sending_exception @@ -67,6 +70,18 @@ async def async_setup_entry( }, "async_override_schedule", ) + platform.async_register_entity_service( + "override_schedule_work_area", + { + vol.Required("work_area_id"): vol.Coerce(int), + vol.Required("duration"): vol.All( + cv.time_period, + cv.positive_timedelta, + vol.Range(min=timedelta(minutes=1), max=timedelta(days=42)), + ), + }, + "async_override_schedule_work_area", + ) class AutomowerLawnMowerEntity(AutomowerAvailableEntity, LawnMowerEntity): @@ -98,6 +113,11 @@ class AutomowerLawnMowerEntity(AutomowerAvailableEntity, LawnMowerEntity): return LawnMowerActivity.DOCKED return LawnMowerActivity.ERROR + @property + def work_areas(self) -> dict[int, WorkArea] | None: + """Return the work areas of the mower.""" + return self.mower_attributes.work_areas + @handle_sending_exception() async def async_start_mowing(self) -> None: """Resume schedule.""" @@ -122,3 +142,22 @@ class AutomowerLawnMowerEntity(AutomowerAvailableEntity, LawnMowerEntity): await self.coordinator.api.commands.start_for(self.mower_id, duration) if override_mode == PARK: await self.coordinator.api.commands.park_for(self.mower_id, duration) + + @handle_sending_exception() + async def async_override_schedule_work_area( + self, work_area_id: int, duration: timedelta + ) -> None: + """Override the schedule with a certain work area.""" + if not self.mower_attributes.capabilities.work_areas: + raise ServiceValidationError( + translation_domain=DOMAIN, translation_key="work_areas_not_supported" + ) + if TYPE_CHECKING: + assert self.work_areas is not None + if work_area_id not in self.work_areas: + raise ServiceValidationError( + translation_domain=DOMAIN, translation_key="work_area_not_existing" + ) + await self.coordinator.api.commands.start_in_workarea( + self.mower_id, work_area_id, duration + ) diff --git a/homeassistant/components/husqvarna_automower/sensor.py b/homeassistant/components/husqvarna_automower/sensor.py index bd0b8561223..0e3e6771cec 100644 --- a/homeassistant/components/husqvarna_automower/sensor.py +++ b/homeassistant/components/husqvarna_automower/sensor.py @@ -1,10 +1,10 @@ """Creates the sensor entities for the mower.""" -from collections.abc import Callable +from collections.abc import Callable, Mapping from dataclasses import dataclass from datetime import datetime import logging -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any from zoneinfo import ZoneInfo from aioautomower.model import MowerAttributes, MowerModes, RestrictedReasons @@ -28,6 +28,8 @@ from .entity import AutomowerBaseEntity _LOGGER = logging.getLogger(__name__) +ATTR_WORK_AREA_ID_ASSIGNMENT = "work_area_id_assignment" + ERROR_KEY_LIST = [ "no_error", "alarm_mower_in_motion", @@ -214,11 +216,23 @@ def _get_current_work_area_name(data: MowerAttributes) -> str: return data.work_areas[data.mower.work_area_id].name +@callback +def _get_current_work_area_dict(data: MowerAttributes) -> Mapping[str, Any]: + """Return the name of the current work area.""" + if TYPE_CHECKING: + # Sensor does not get created if it is None + assert data.work_areas is not None + return {ATTR_WORK_AREA_ID_ASSIGNMENT: data.work_area_dict} + + @dataclass(frozen=True, kw_only=True) class AutomowerSensorEntityDescription(SensorEntityDescription): """Describes Automower sensor entity.""" exists_fn: Callable[[MowerAttributes], bool] = lambda _: True + extra_state_attributes_fn: Callable[[MowerAttributes], Mapping[str, Any] | None] = ( + lambda _: None + ) option_fn: Callable[[MowerAttributes], list[str] | None] = lambda _: None value_fn: Callable[[MowerAttributes], StateType | datetime] @@ -353,6 +367,7 @@ SENSOR_TYPES: tuple[AutomowerSensorEntityDescription, ...] = ( translation_key="work_area", device_class=SensorDeviceClass.ENUM, exists_fn=lambda data: data.capabilities.work_areas, + extra_state_attributes_fn=_get_current_work_area_dict, option_fn=_get_work_area_names, value_fn=_get_current_work_area_name, ), @@ -378,6 +393,7 @@ class AutomowerSensorEntity(AutomowerBaseEntity, SensorEntity): """Defining the Automower Sensors with AutomowerSensorEntityDescription.""" entity_description: AutomowerSensorEntityDescription + _unrecorded_attributes = frozenset({ATTR_WORK_AREA_ID_ASSIGNMENT}) def __init__( self, @@ -399,3 +415,8 @@ class AutomowerSensorEntity(AutomowerBaseEntity, SensorEntity): def options(self) -> list[str] | None: """Return the option of the sensor.""" return self.entity_description.option_fn(self.mower_attributes) + + @property + def extra_state_attributes(self) -> Mapping[str, Any] | None: + """Return the state attributes.""" + return self.entity_description.extra_state_attributes_fn(self.mower_attributes) diff --git a/homeassistant/components/husqvarna_automower/services.yaml b/homeassistant/components/husqvarna_automower/services.yaml index 94687a2ebfa..29c89360d1e 100644 --- a/homeassistant/components/husqvarna_automower/services.yaml +++ b/homeassistant/components/husqvarna_automower/services.yaml @@ -19,3 +19,22 @@ override_schedule: options: - "mow" - "park" + +override_schedule_work_area: + target: + entity: + integration: "husqvarna_automower" + domain: "lawn_mower" + fields: + duration: + required: true + example: "{'days': 1, 'hours': 12, 'minutes': 30}" + selector: + duration: + enable_day: true + work_area_id: + required: true + example: "123" + selector: + text: + type: number diff --git a/homeassistant/components/husqvarna_automower/strings.json b/homeassistant/components/husqvarna_automower/strings.json index be17cc25e32..c34a5dd3340 100644 --- a/homeassistant/components/husqvarna_automower/strings.json +++ b/homeassistant/components/husqvarna_automower/strings.json @@ -254,6 +254,11 @@ "state": { "my_lawn": "My lawn", "no_work_area_active": "No work area active" + }, + "state_attributes": { + "work_area_id_assignment": { + "name": "Work area ID assignment" + } } } }, @@ -269,6 +274,12 @@ "exceptions": { "command_send_failed": { "message": "Failed to send command: {exception}" + }, + "work_areas_not_supported": { + "message": "This mower does not support work areas." + }, + "work_area_not_existing": { + "message": "The selected work area does not exist." } }, "selector": { @@ -293,6 +304,20 @@ "description": "With which action the schedule should be overridden." } } + }, + "override_schedule_work_area": { + "name": "Override schedule work area", + "description": "Override the schedule of the mower for a duration of time in the selected work area.", + "fields": { + "duration": { + "name": "[%key:component::husqvarna_automower::services::override_schedule::fields::duration::name%]", + "description": "[%key:component::husqvarna_automower::services::override_schedule::fields::duration::description%]" + }, + "work_area_id": { + "name": "Work area ID", + "description": "In which work area the mower should mow." + } + } } } } diff --git a/tests/components/husqvarna_automower/snapshots/test_sensor.ambr b/tests/components/husqvarna_automower/snapshots/test_sensor.ambr index 730971a47dd..c727a49b71a 100644 --- a/tests/components/husqvarna_automower/snapshots/test_sensor.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_sensor.ambr @@ -1100,6 +1100,11 @@ 'my_lawn', 'no_work_area_active', ]), + 'work_area_id_assignment': dict({ + 0: 'my_lawn', + 123456: 'Front lawn', + 654321: 'Back lawn', + }), }), 'context': , 'entity_id': 'sensor.test_mower_1_work_area', diff --git a/tests/components/husqvarna_automower/test_lawn_mower.py b/tests/components/husqvarna_automower/test_lawn_mower.py index 5d5cacfc6bf..2ae427e0e1e 100644 --- a/tests/components/husqvarna_automower/test_lawn_mower.py +++ b/tests/components/husqvarna_automower/test_lawn_mower.py @@ -13,7 +13,7 @@ from homeassistant.components.husqvarna_automower.const import DOMAIN from homeassistant.components.husqvarna_automower.coordinator import SCAN_INTERVAL from homeassistant.components.lawn_mower import LawnMowerActivity from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from . import setup_integration from .const import TEST_MOWER_ID @@ -122,7 +122,7 @@ async def test_lawn_mower_commands( async def test_lawn_mower_service_commands( hass: HomeAssistant, aioautomower_command: str, - extra_data: int | None, + extra_data: timedelta, service: str, service_data: dict[str, int] | None, mock_automower_client: AsyncMock, @@ -158,27 +158,112 @@ async def test_lawn_mower_service_commands( @pytest.mark.parametrize( - ("service", "service_data"), + ("aioautomower_command", "extra_data1", "extra_data2", "service", "service_data"), [ ( - "override_schedule", + "start_in_workarea", + 123456, + timedelta(days=40), + "override_schedule_work_area", { - "duration": {"days": 1, "hours": 12, "minutes": 30}, - "override_mode": "fly_to_moon", + "work_area_id": 123456, + "duration": {"days": 40}, }, ), ], ) -async def test_lawn_mower_wrong_service_commands( +async def test_lawn_mower_override_work_area_command( hass: HomeAssistant, + aioautomower_command: str, + extra_data1: int, + extra_data2: timedelta, service: str, service_data: dict[str, int] | None, mock_automower_client: AsyncMock, mock_config_entry: MockConfigEntry, ) -> None: - """Test lawn_mower commands.""" + """Test lawn_mower work area override commands.""" await setup_integration(hass, mock_config_entry) - with pytest.raises(MultipleInvalid): + mocked_method = AsyncMock() + setattr(mock_automower_client.commands, aioautomower_command, mocked_method) + await hass.services.async_call( + domain=DOMAIN, + service=service, + target={"entity_id": "lawn_mower.test_mower_1"}, + service_data=service_data, + blocking=True, + ) + mocked_method.assert_called_once_with(TEST_MOWER_ID, extra_data1, extra_data2) + + getattr( + mock_automower_client.commands, aioautomower_command + ).side_effect = ApiException("Test error") + with pytest.raises( + HomeAssistantError, + match="Failed to send command: Test error", + ): + await hass.services.async_call( + domain=DOMAIN, + service=service, + target={"entity_id": "lawn_mower.test_mower_1"}, + service_data=service_data, + blocking=True, + ) + + +@pytest.mark.parametrize( + ("service", "service_data", "mower_support_wa", "exception"), + [ + ( + "override_schedule", + { + "duration": {"days": 1, "hours": 12, "minutes": 30}, + "override_mode": "fly_to_moon", + }, + False, + MultipleInvalid, + ), + ( + "override_schedule_work_area", + { + "work_area_id": 123456, + "duration": {"days": 40}, + }, + False, + ServiceValidationError, + ), + ( + "override_schedule_work_area", + { + "work_area_id": 12345, + "duration": {"days": 40}, + }, + True, + ServiceValidationError, + ), + ], +) +async def test_lawn_mower_wrong_service_commands( + hass: HomeAssistant, + service: str, + service_data: dict[str, int] | None, + mower_support_wa: bool, + exception, + mock_automower_client: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test lawn_mower commands.""" + await setup_integration(hass, mock_config_entry) + values = mower_list_to_dictionary_dataclass( + load_json_value_fixture("mower.json", DOMAIN) + ) + values[TEST_MOWER_ID].capabilities.work_areas = mower_support_wa + mock_automower_client.get_status.return_value = values + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + with pytest.raises(exception): await hass.services.async_call( domain=DOMAIN, service=service, From 9db42beadef9e7f49b35473878a72f1dffea6e64 Mon Sep 17 00:00:00 2001 From: "Steven B." <51370195+sdb9696@users.noreply.github.com> Date: Wed, 31 Jul 2024 18:57:12 +0100 Subject: [PATCH 0219/1635] Fix handling of tplink light effects for scenes (#122965) Co-authored-by: J. Nick Koston --- homeassistant/components/tplink/light.py | 30 ++++++++++++------- tests/components/tplink/test_light.py | 38 +++++++++++++++++++++++- 2 files changed, 56 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/tplink/light.py b/homeassistant/components/tplink/light.py index 9b7dd499c97..8d6ec27f81c 100644 --- a/homeassistant/components/tplink/light.py +++ b/homeassistant/components/tplink/light.py @@ -382,17 +382,25 @@ class TPLinkLightEffectEntity(TPLinkLightEntity): async def async_turn_on(self, **kwargs: Any) -> None: """Turn the light on.""" brightness, transition = self._async_extract_brightness_transition(**kwargs) - if ( - (effect := kwargs.get(ATTR_EFFECT)) - # Effect is unlikely to be LIGHT_EFFECTS_OFF but check for it anyway - and effect not in {LightEffect.LIGHT_EFFECTS_OFF, EFFECT_OFF} - and effect in self._effect_module.effect_list - ): - await self._effect_module.set_effect( - kwargs[ATTR_EFFECT], brightness=brightness, transition=transition - ) - elif ATTR_COLOR_TEMP_KELVIN in kwargs: - if self.effect and self.effect != EFFECT_OFF: + effect_off_called = False + if effect := kwargs.get(ATTR_EFFECT): + if effect in {LightEffect.LIGHT_EFFECTS_OFF, EFFECT_OFF}: + if self._effect_module.effect is not LightEffect.LIGHT_EFFECTS_OFF: + await self._effect_module.set_effect(LightEffect.LIGHT_EFFECTS_OFF) + effect_off_called = True + if len(kwargs) == 1: + return + elif effect in self._effect_module.effect_list: + await self._effect_module.set_effect( + kwargs[ATTR_EFFECT], brightness=brightness, transition=transition + ) + return + else: + _LOGGER.error("Invalid effect %s for %s", effect, self._device.host) + return + + if ATTR_COLOR_TEMP_KELVIN in kwargs: + if self.effect and self.effect != EFFECT_OFF and not effect_off_called: # If there is an effect in progress # we have to clear the effect # before we can set a color temp diff --git a/tests/components/tplink/test_light.py b/tests/components/tplink/test_light.py index 590274b8405..6998d8fbcc7 100644 --- a/tests/components/tplink/test_light.py +++ b/tests/components/tplink/test_light.py @@ -505,7 +505,9 @@ async def test_dimmer_turn_on_fix(hass: HomeAssistant) -> None: light.set_state.reset_mock() -async def test_smart_strip_effects(hass: HomeAssistant) -> None: +async def test_smart_strip_effects( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: """Test smart strip effects.""" already_migrated_config_entry = MockConfigEntry( domain=DOMAIN, data={CONF_HOST: "127.0.0.1"}, unique_id=MAC_ADDRESS @@ -555,6 +557,40 @@ async def test_smart_strip_effects(hass: HomeAssistant) -> None: "Effect2", brightness=None, transition=None ) light_effect.set_effect.reset_mock() + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=30)) + await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state.state == STATE_ON + assert state.attributes[ATTR_EFFECT] == "Effect2" + + # Test setting light effect off + await hass.services.async_call( + LIGHT_DOMAIN, + "turn_on", + {ATTR_ENTITY_ID: entity_id, ATTR_EFFECT: "off"}, + blocking=True, + ) + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=30)) + await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state.state == STATE_ON + assert state.attributes[ATTR_EFFECT] == "off" + light.set_state.assert_not_called() + + # Test setting light effect to invalid value + caplog.clear() + await hass.services.async_call( + LIGHT_DOMAIN, + "turn_on", + {ATTR_ENTITY_ID: entity_id, ATTR_EFFECT: "Effect3"}, + blocking=True, + ) + async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=30)) + await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state.state == STATE_ON + assert state.attributes[ATTR_EFFECT] == "off" + assert "Invalid effect Effect3 for" in caplog.text light_effect.effect = LightEffect.LIGHT_EFFECTS_OFF async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) From bc25657f0ad6fc26a401f291a59f49343b92c61e Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 20:21:04 +0200 Subject: [PATCH 0220/1635] Fix unnecessary-return-none in telnet (#122949) --- homeassistant/components/telnet/switch.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/telnet/switch.py b/homeassistant/components/telnet/switch.py index 805f037dbae..3b4b9e137d1 100644 --- a/homeassistant/components/telnet/switch.py +++ b/homeassistant/components/telnet/switch.py @@ -144,7 +144,7 @@ class TelnetSwitch(SwitchEntity): rendered = self._value_template.render_with_possible_json_value(response) else: _LOGGER.warning("Empty response for command: %s", self._command_state) - return None + return self._attr_is_on = rendered == "True" def turn_on(self, **kwargs: Any) -> None: From 93bcd413a79546e5aab29ab083981cd9b2bbf5ec Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 20:21:26 +0200 Subject: [PATCH 0221/1635] Fix unnecessary-return-none in iotty (#122947) --- homeassistant/components/iotty/switch.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/iotty/switch.py b/homeassistant/components/iotty/switch.py index 6609fb59400..ee489e88349 100644 --- a/homeassistant/components/iotty/switch.py +++ b/homeassistant/components/iotty/switch.py @@ -53,7 +53,7 @@ async def async_setup_entry( def async_update_data() -> None: """Handle updated data from the API endpoint.""" if not coordinator.last_update_success: - return None + return devices = coordinator.data.devices entities = [] From 7276b4b3ad064bdad943284a6a8640f643c624b5 Mon Sep 17 00:00:00 2001 From: "Steven B." <51370195+sdb9696@users.noreply.github.com> Date: Wed, 31 Jul 2024 19:31:53 +0100 Subject: [PATCH 0222/1635] Bump python-kasa to 0.7.1 (#122967) --- homeassistant/components/tplink/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/tplink/manifest.json b/homeassistant/components/tplink/manifest.json index f935d019541..10b0ef61153 100644 --- a/homeassistant/components/tplink/manifest.json +++ b/homeassistant/components/tplink/manifest.json @@ -301,5 +301,5 @@ "iot_class": "local_polling", "loggers": ["kasa"], "quality_scale": "platinum", - "requirements": ["python-kasa[speedups]==0.7.0.5"] + "requirements": ["python-kasa[speedups]==0.7.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 472270c1dcf..2cae4548956 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2292,7 +2292,7 @@ python-join-api==0.0.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.7.0.5 +python-kasa[speedups]==0.7.1 # homeassistant.components.linkplay python-linkplay==0.0.5 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index b2021a6bc7f..1d2f345cf1a 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1810,7 +1810,7 @@ python-izone==1.2.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.7.0.5 +python-kasa[speedups]==0.7.1 # homeassistant.components.linkplay python-linkplay==0.0.5 From 0189a05297b73c8e9437a6c8f484b5484d56460f Mon Sep 17 00:00:00 2001 From: Marcel van der Veldt Date: Wed, 31 Jul 2024 20:36:43 +0200 Subject: [PATCH 0223/1635] Extend Matter select entity (#122513) --- homeassistant/components/matter/select.py | 86 +++++++++++++++++--- homeassistant/components/matter/strings.json | 3 + tests/components/matter/test_select.py | 22 +++++ 3 files changed, 99 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/matter/select.py b/homeassistant/components/matter/select.py index bf528077b32..350712061ba 100644 --- a/homeassistant/components/matter/select.py +++ b/homeassistant/components/matter/select.py @@ -2,7 +2,12 @@ from __future__ import annotations +from dataclasses import dataclass +from typing import TYPE_CHECKING + from chip.clusters import Objects as clusters +from chip.clusters.Types import Nullable +from matter_server.common.helpers.util import create_attribute_path_from_attribute from homeassistant.components.select import SelectEntity, SelectEntityDescription from homeassistant.config_entries import ConfigEntry @@ -10,7 +15,7 @@ from homeassistant.const import EntityCategory, Platform from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .entity import MatterEntity +from .entity import MatterEntity, MatterEntityDescription from .helpers import get_matter from .models import MatterDiscoverySchema @@ -38,7 +43,41 @@ async def async_setup_entry( matter.register_platform_handler(Platform.SELECT, async_add_entities) -class MatterModeSelectEntity(MatterEntity, SelectEntity): +@dataclass(frozen=True) +class MatterSelectEntityDescription(SelectEntityDescription, MatterEntityDescription): + """Describe Matter select entities.""" + + +class MatterSelectEntity(MatterEntity, SelectEntity): + """Representation of a select entity from Matter Attribute read/write.""" + + entity_description: MatterSelectEntityDescription + + async def async_select_option(self, option: str) -> None: + """Change the selected mode.""" + value_convert = self.entity_description.ha_to_native_value + if TYPE_CHECKING: + assert value_convert is not None + await self.matter_client.write_attribute( + node_id=self._endpoint.node.node_id, + attribute_path=create_attribute_path_from_attribute( + self._endpoint.endpoint_id, self._entity_info.primary_attribute + ), + value=value_convert(option), + ) + + @callback + def _update_from_device(self) -> None: + """Update from device.""" + value: Nullable | int | None + value = self.get_matter_attribute_value(self._entity_info.primary_attribute) + value_convert = self.entity_description.measurement_to_ha + if TYPE_CHECKING: + assert value_convert is not None + self._attr_current_option = value_convert(value) + + +class MatterModeSelectEntity(MatterSelectEntity): """Representation of a select entity from Matter (Mode) Cluster attribute(s).""" async def async_select_option(self, option: str) -> None: @@ -77,7 +116,7 @@ class MatterModeSelectEntity(MatterEntity, SelectEntity): DISCOVERY_SCHEMAS = [ MatterDiscoverySchema( platform=Platform.SELECT, - entity_description=SelectEntityDescription( + entity_description=MatterSelectEntityDescription( key="MatterModeSelect", entity_category=EntityCategory.CONFIG, translation_key="mode", @@ -90,7 +129,7 @@ DISCOVERY_SCHEMAS = [ ), MatterDiscoverySchema( platform=Platform.SELECT, - entity_description=SelectEntityDescription( + entity_description=MatterSelectEntityDescription( key="MatterOvenMode", translation_key="mode", ), @@ -102,7 +141,7 @@ DISCOVERY_SCHEMAS = [ ), MatterDiscoverySchema( platform=Platform.SELECT, - entity_description=SelectEntityDescription( + entity_description=MatterSelectEntityDescription( key="MatterLaundryWasherMode", translation_key="mode", ), @@ -114,7 +153,7 @@ DISCOVERY_SCHEMAS = [ ), MatterDiscoverySchema( platform=Platform.SELECT, - entity_description=SelectEntityDescription( + entity_description=MatterSelectEntityDescription( key="MatterRefrigeratorAndTemperatureControlledCabinetMode", translation_key="mode", ), @@ -126,7 +165,7 @@ DISCOVERY_SCHEMAS = [ ), MatterDiscoverySchema( platform=Platform.SELECT, - entity_description=SelectEntityDescription( + entity_description=MatterSelectEntityDescription( key="MatterRvcRunMode", translation_key="mode", ), @@ -138,7 +177,7 @@ DISCOVERY_SCHEMAS = [ ), MatterDiscoverySchema( platform=Platform.SELECT, - entity_description=SelectEntityDescription( + entity_description=MatterSelectEntityDescription( key="MatterRvcCleanMode", translation_key="mode", ), @@ -150,7 +189,7 @@ DISCOVERY_SCHEMAS = [ ), MatterDiscoverySchema( platform=Platform.SELECT, - entity_description=SelectEntityDescription( + entity_description=MatterSelectEntityDescription( key="MatterDishwasherMode", translation_key="mode", ), @@ -162,7 +201,7 @@ DISCOVERY_SCHEMAS = [ ), MatterDiscoverySchema( platform=Platform.SELECT, - entity_description=SelectEntityDescription( + entity_description=MatterSelectEntityDescription( key="MatterMicrowaveOvenMode", translation_key="mode", ), @@ -174,7 +213,7 @@ DISCOVERY_SCHEMAS = [ ), MatterDiscoverySchema( platform=Platform.SELECT, - entity_description=SelectEntityDescription( + entity_description=MatterSelectEntityDescription( key="MatterEnergyEvseMode", translation_key="mode", ), @@ -186,7 +225,7 @@ DISCOVERY_SCHEMAS = [ ), MatterDiscoverySchema( platform=Platform.SELECT, - entity_description=SelectEntityDescription( + entity_description=MatterSelectEntityDescription( key="MatterDeviceEnergyManagementMode", translation_key="mode", ), @@ -196,4 +235,27 @@ DISCOVERY_SCHEMAS = [ clusters.DeviceEnergyManagementMode.Attributes.SupportedModes, ), ), + MatterDiscoverySchema( + platform=Platform.SELECT, + entity_description=MatterSelectEntityDescription( + key="MatterStartUpOnOff", + entity_category=EntityCategory.CONFIG, + translation_key="startup_on_off", + options=["On", "Off", "Toggle", "Previous"], + measurement_to_ha=lambda x: { + 0: "Off", + 1: "On", + 2: "Toggle", + None: "Previous", + }[x], + ha_to_native_value=lambda x: { + "Off": 0, + "On": 1, + "Toggle": 2, + "Previous": None, + }[x], + ), + entity_class=MatterSelectEntity, + required_attributes=(clusters.OnOff.Attributes.StartUpOnOff,), + ), ] diff --git a/homeassistant/components/matter/strings.json b/homeassistant/components/matter/strings.json index c23a2d6fe94..e69c7ae3090 100644 --- a/homeassistant/components/matter/strings.json +++ b/homeassistant/components/matter/strings.json @@ -134,6 +134,9 @@ "select": { "mode": { "name": "Mode" + }, + "startup_on_off": { + "name": "Power-on behavior on Startup" } }, "sensor": { diff --git a/tests/components/matter/test_select.py b/tests/components/matter/test_select.py index 0d4d5e71b81..9b774f0430b 100644 --- a/tests/components/matter/test_select.py +++ b/tests/components/matter/test_select.py @@ -107,3 +107,25 @@ async def test_microwave_select_entities( await trigger_subscription_callback(hass, matter_client) state = hass.states.get("select.microwave_oven_mode") assert state.state == "Defrost" + + +@pytest.mark.parametrize("expected_lingering_tasks", [True]) +async def test_attribute_select_entities( + hass: HomeAssistant, + matter_client: MagicMock, + light_node: MatterNode, +) -> None: + """Test select entities are created for attribute based discovery schema(s).""" + entity_id = "select.mock_dimmable_light_power_on_behavior_on_startup" + state = hass.states.get(entity_id) + assert state + assert state.state == "Previous" + assert state.attributes["options"] == ["On", "Off", "Toggle", "Previous"] + assert ( + state.attributes["friendly_name"] + == "Mock Dimmable Light Power-on behavior on Startup" + ) + set_node_attribute(light_node, 1, 6, 16387, 1) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get(entity_id) + assert state.state == "On" From f1084a57df5f38c470667e24b38983d26de1b8fa Mon Sep 17 00:00:00 2001 From: Pete Sage <76050312+PeteRager@users.noreply.github.com> Date: Wed, 31 Jul 2024 14:36:59 -0400 Subject: [PATCH 0224/1635] Fix Sonos media_player control may fail when grouping speakers (#121853) --- homeassistant/components/sonos/speaker.py | 21 ++- tests/components/sonos/conftest.py | 24 +++ .../sonos/fixtures/av_transport.json | 38 +++++ tests/components/sonos/fixtures/zgs_group.xml | 8 + .../sonos/fixtures/zgs_two_single.xml | 10 ++ tests/components/sonos/test_speaker.py | 146 +++++++++++++++++- 6 files changed, 241 insertions(+), 6 deletions(-) create mode 100644 tests/components/sonos/fixtures/av_transport.json create mode 100644 tests/components/sonos/fixtures/zgs_group.xml create mode 100644 tests/components/sonos/fixtures/zgs_two_single.xml diff --git a/homeassistant/components/sonos/speaker.py b/homeassistant/components/sonos/speaker.py index d77100a2236..d339e861a13 100644 --- a/homeassistant/components/sonos/speaker.py +++ b/homeassistant/components/sonos/speaker.py @@ -826,9 +826,6 @@ class SonosSpeaker: f"{SONOS_VANISHED}-{uid}", reason, ) - - if "zone_player_uui_ds_in_group" not in event.variables: - return self.event_stats.process(event) self.hass.async_create_background_task( self.create_update_groups_coro(event), @@ -857,8 +854,7 @@ class SonosSpeaker: async def _async_extract_group(event: SonosEvent | None) -> list[str]: """Extract group layout from a topology event.""" - group = event and event.zone_player_uui_ds_in_group - if group: + if group := (event and getattr(event, "zone_player_uui_ds_in_group", None)): assert isinstance(group, str) return group.split(",") @@ -867,11 +863,21 @@ class SonosSpeaker: @callback def _async_regroup(group: list[str]) -> None: """Rebuild internal group layout.""" + _LOGGER.debug("async_regroup %s %s", self.zone_name, group) if ( group == [self.soco.uid] and self.sonos_group == [self] and self.sonos_group_entities ): + # Single speakers do not have a coodinator, check and clear + if self.coordinator is not None: + _LOGGER.debug( + "Zone %s Cleared coordinator [%s]", + self.zone_name, + self.coordinator.zone_name, + ) + self.coordinator = None + self.async_write_entity_states() # Skip updating existing single speakers in polling mode return @@ -912,6 +918,11 @@ class SonosSpeaker: joined_speaker.coordinator = self joined_speaker.sonos_group = sonos_group joined_speaker.sonos_group_entities = sonos_group_entities + _LOGGER.debug( + "Zone %s Set coordinator [%s]", + joined_speaker.zone_name, + self.zone_name, + ) joined_speaker.async_write_entity_states() _LOGGER.debug("Regrouped %s: %s", self.zone_name, self.sonos_group_entities) diff --git a/tests/components/sonos/conftest.py b/tests/components/sonos/conftest.py index 4e5f704d322..bbec7a2308c 100644 --- a/tests/components/sonos/conftest.py +++ b/tests/components/sonos/conftest.py @@ -17,6 +17,7 @@ from homeassistant.components.media_player import DOMAIN as MP_DOMAIN from homeassistant.components.sonos import DOMAIN from homeassistant.const import CONF_HOSTS from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, load_fixture, load_json_value_fixture @@ -661,3 +662,26 @@ def zgs_event_fixture(hass: HomeAssistant, soco: SoCo, zgs_discovery: str): await hass.async_block_till_done(wait_background_tasks=True) return _wrapper + + +@pytest.fixture(name="sonos_setup_two_speakers") +async def sonos_setup_two_speakers( + hass: HomeAssistant, soco_factory: SoCoMockFactory +) -> list[MockSoCo]: + """Set up home assistant with two Sonos Speakers.""" + soco_lr = soco_factory.cache_mock(MockSoCo(), "10.10.10.1", "Living Room") + soco_br = soco_factory.cache_mock(MockSoCo(), "10.10.10.2", "Bedroom") + await async_setup_component( + hass, + DOMAIN, + { + DOMAIN: { + "media_player": { + "interface_addr": "127.0.0.1", + "hosts": ["10.10.10.1", "10.10.10.2"], + } + } + }, + ) + await hass.async_block_till_done() + return [soco_lr, soco_br] diff --git a/tests/components/sonos/fixtures/av_transport.json b/tests/components/sonos/fixtures/av_transport.json new file mode 100644 index 00000000000..743ac61e3ff --- /dev/null +++ b/tests/components/sonos/fixtures/av_transport.json @@ -0,0 +1,38 @@ +{ + "transport_state": "PLAYING", + "current_play_mode": "NORMAL", + "current_crossfade_mode": "0", + "number_of_tracks": "1", + "current_track": "1", + "current_section": "0", + "current_track_uri": "x-rincon:RINCON_test_10.10.10.2", + "current_track_duration": "", + "current_track_meta_data": "", + "next_track_uri": "", + "next_track_meta_data": "", + "enqueued_transport_uri": "", + "enqueued_transport_uri_meta_data": "", + "playback_storage_medium": "NETWORK", + "av_transport_uri": "x-rincon:RINCON_test_10.10.10.2", + "av_transport_uri_meta_data": "", + "next_av_transport_uri": "", + "next_av_transport_uri_meta_data": "", + "current_transport_actions": "Stop, Play", + "current_valid_play_modes": "CROSSFADE", + "direct_control_client_id": "", + "direct_control_is_suspended": "0", + "direct_control_account_id": "", + "transport_status": "OK", + "sleep_timer_generation": "0", + "alarm_running": "0", + "snooze_running": "0", + "restart_pending": "0", + "transport_play_speed": "NOT_IMPLEMENTED", + "current_media_duration": "NOT_IMPLEMENTED", + "record_storage_medium": "NOT_IMPLEMENTED", + "possible_playback_storage_media": "NONE, NETWORK", + "possible_record_storage_media": "NOT_IMPLEMENTED", + "record_medium_write_status": "NOT_IMPLEMENTED", + "current_record_quality_mode": "NOT_IMPLEMENTED", + "possible_record_quality_modes": "NOT_IMPLEMENTED" +} diff --git a/tests/components/sonos/fixtures/zgs_group.xml b/tests/components/sonos/fixtures/zgs_group.xml new file mode 100644 index 00000000000..58f40be0049 --- /dev/null +++ b/tests/components/sonos/fixtures/zgs_group.xml @@ -0,0 +1,8 @@ + + + + + + + + diff --git a/tests/components/sonos/fixtures/zgs_two_single.xml b/tests/components/sonos/fixtures/zgs_two_single.xml new file mode 100644 index 00000000000..18c3c9231c6 --- /dev/null +++ b/tests/components/sonos/fixtures/zgs_two_single.xml @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/tests/components/sonos/test_speaker.py b/tests/components/sonos/test_speaker.py index 2c4357060be..40d126c64f2 100644 --- a/tests/components/sonos/test_speaker.py +++ b/tests/components/sonos/test_speaker.py @@ -4,11 +4,18 @@ from unittest.mock import patch import pytest +from homeassistant.components.media_player import ( + DOMAIN as MP_DOMAIN, + SERVICE_MEDIA_PLAY, +) +from homeassistant.components.sonos import DOMAIN from homeassistant.components.sonos.const import DATA_SONOS, SCAN_INTERVAL from homeassistant.core import HomeAssistant from homeassistant.util import dt as dt_util -from tests.common import async_fire_time_changed +from .conftest import MockSoCo, SonosMockEvent + +from tests.common import async_fire_time_changed, load_fixture, load_json_value_fixture async def test_fallback_to_polling( @@ -67,3 +74,140 @@ async def test_subscription_creation_fails( await hass.async_block_till_done() assert speaker._subscriptions + + +def _create_zgs_sonos_event( + fixture_file: str, soco_1: MockSoCo, soco_2: MockSoCo, create_uui_ds: bool = True +) -> SonosMockEvent: + """Create a Sonos Event for zone group state, with the option of creating the uui_ds_in_group.""" + zgs = load_fixture(fixture_file, DOMAIN) + variables = {} + variables["ZoneGroupState"] = zgs + # Sonos does not always send this variable with zgs events + if create_uui_ds: + variables["zone_player_uui_ds_in_group"] = f"{soco_1.uid},{soco_2.uid}" + event = SonosMockEvent(soco_1, soco_1.zoneGroupTopology, variables) + if create_uui_ds: + event.zone_player_uui_ds_in_group = f"{soco_1.uid},{soco_2.uid}" + return event + + +def _create_avtransport_sonos_event( + fixture_file: str, soco: MockSoCo +) -> SonosMockEvent: + """Create a Sonos Event for an AVTransport update.""" + variables = load_json_value_fixture(fixture_file, DOMAIN) + return SonosMockEvent(soco, soco.avTransport, variables) + + +async def _media_play(hass: HomeAssistant, entity: str) -> None: + """Call media play service.""" + await hass.services.async_call( + MP_DOMAIN, + SERVICE_MEDIA_PLAY, + { + "entity_id": entity, + }, + blocking=True, + ) + + +async def test_zgs_event_group_speakers( + hass: HomeAssistant, sonos_setup_two_speakers: list[MockSoCo] +) -> None: + """Tests grouping and ungrouping two speakers.""" + # When Sonos speakers are grouped; one of the speakers is the coordinator and is in charge + # of playback across both speakers. Hence, service calls to play or pause on media_players + # that are part of the group are routed to the coordinator. + soco_lr = sonos_setup_two_speakers[0] + soco_br = sonos_setup_two_speakers[1] + + # Test 1 - Initial state - speakers are not grouped + state = hass.states.get("media_player.living_room") + assert state.attributes["group_members"] == ["media_player.living_room"] + state = hass.states.get("media_player.bedroom") + assert state.attributes["group_members"] == ["media_player.bedroom"] + # Each speaker is its own coordinator and calls should route to their SoCos + await _media_play(hass, "media_player.living_room") + assert soco_lr.play.call_count == 1 + await _media_play(hass, "media_player.bedroom") + assert soco_br.play.call_count == 1 + + soco_lr.play.reset_mock() + soco_br.play.reset_mock() + + # Test 2 - Group the speakers, living room is the coordinator + event = _create_zgs_sonos_event( + "zgs_group.xml", soco_lr, soco_br, create_uui_ds=True + ) + soco_lr.zoneGroupTopology.subscribe.return_value._callback(event) + soco_br.zoneGroupTopology.subscribe.return_value._callback(event) + await hass.async_block_till_done(wait_background_tasks=True) + state = hass.states.get("media_player.living_room") + assert state.attributes["group_members"] == [ + "media_player.living_room", + "media_player.bedroom", + ] + state = hass.states.get("media_player.bedroom") + assert state.attributes["group_members"] == [ + "media_player.living_room", + "media_player.bedroom", + ] + # Play calls should route to the living room SoCo + await _media_play(hass, "media_player.living_room") + await _media_play(hass, "media_player.bedroom") + assert soco_lr.play.call_count == 2 + assert soco_br.play.call_count == 0 + + soco_lr.play.reset_mock() + soco_br.play.reset_mock() + + # Test 3 - Ungroup the speakers + event = _create_zgs_sonos_event( + "zgs_two_single.xml", soco_lr, soco_br, create_uui_ds=False + ) + soco_lr.zoneGroupTopology.subscribe.return_value._callback(event) + soco_br.zoneGroupTopology.subscribe.return_value._callback(event) + await hass.async_block_till_done(wait_background_tasks=True) + state = hass.states.get("media_player.living_room") + assert state.attributes["group_members"] == ["media_player.living_room"] + state = hass.states.get("media_player.bedroom") + assert state.attributes["group_members"] == ["media_player.bedroom"] + # Calls should route to each speakers Soco + await _media_play(hass, "media_player.living_room") + assert soco_lr.play.call_count == 1 + await _media_play(hass, "media_player.bedroom") + assert soco_br.play.call_count == 1 + + +async def test_zgs_avtransport_group_speakers( + hass: HomeAssistant, sonos_setup_two_speakers: list[MockSoCo] +) -> None: + """Test processing avtransport and zgs events to change group membership.""" + soco_lr = sonos_setup_two_speakers[0] + soco_br = sonos_setup_two_speakers[1] + + # Test 1 - Send a transport event changing the coordinator + # for the living room speaker to the bedroom speaker. + event = _create_avtransport_sonos_event("av_transport.json", soco_lr) + soco_lr.avTransport.subscribe.return_value._callback(event) + await hass.async_block_till_done(wait_background_tasks=True) + # Call should route to the new coodinator which is the bedroom + await _media_play(hass, "media_player.living_room") + assert soco_lr.play.call_count == 0 + assert soco_br.play.call_count == 1 + + soco_lr.play.reset_mock() + soco_br.play.reset_mock() + + # Test 2- Send a zgs event to return living room to its own coordinator + event = _create_zgs_sonos_event( + "zgs_two_single.xml", soco_lr, soco_br, create_uui_ds=False + ) + soco_lr.zoneGroupTopology.subscribe.return_value._callback(event) + soco_br.zoneGroupTopology.subscribe.return_value._callback(event) + await hass.async_block_till_done(wait_background_tasks=True) + # Call should route to the living room + await _media_play(hass, "media_player.living_room") + assert soco_lr.play.call_count == 1 + assert soco_br.play.call_count == 0 From 8a4206da9914cae6e195b41d34f3c78ba83c5e49 Mon Sep 17 00:00:00 2001 From: Marcel van der Veldt Date: Wed, 31 Jul 2024 20:37:57 +0200 Subject: [PATCH 0225/1635] Matter handle FeatureMap update (#122544) --- homeassistant/components/matter/climate.py | 88 +++++++++++----------- homeassistant/components/matter/fan.py | 14 +++- homeassistant/components/matter/lock.py | 48 ++++++------ tests/components/matter/test_climate.py | 6 ++ tests/components/matter/test_fan.py | 5 ++ tests/components/matter/test_lock.py | 6 ++ 6 files changed, 93 insertions(+), 74 deletions(-) diff --git a/homeassistant/components/matter/climate.py b/homeassistant/components/matter/climate.py index 713aadf5620..ff00e4ee495 100644 --- a/homeassistant/components/matter/climate.py +++ b/homeassistant/components/matter/climate.py @@ -3,7 +3,7 @@ from __future__ import annotations from enum import IntEnum -from typing import TYPE_CHECKING, Any +from typing import Any from chip.clusters import Objects as clusters from matter_server.client.models import device_types @@ -30,12 +30,6 @@ from .entity import MatterEntity from .helpers import get_matter from .models import MatterDiscoverySchema -if TYPE_CHECKING: - from matter_server.client import MatterClient - from matter_server.client.models.node import MatterEndpoint - - from .discovery import MatterEntityInfo - TEMPERATURE_SCALING_FACTOR = 100 HVAC_SYSTEM_MODE_MAP = { HVACMode.OFF: 0, @@ -105,46 +99,9 @@ class MatterClimate(MatterEntity, ClimateEntity): _attr_temperature_unit: str = UnitOfTemperature.CELSIUS _attr_hvac_mode: HVACMode = HVACMode.OFF + _feature_map: int | None = None _enable_turn_on_off_backwards_compatibility = False - def __init__( - self, - matter_client: MatterClient, - endpoint: MatterEndpoint, - entity_info: MatterEntityInfo, - ) -> None: - """Initialize the Matter climate entity.""" - super().__init__(matter_client, endpoint, entity_info) - product_id = self._endpoint.node.device_info.productID - vendor_id = self._endpoint.node.device_info.vendorID - - # set hvac_modes based on feature map - self._attr_hvac_modes: list[HVACMode] = [HVACMode.OFF] - feature_map = int( - self.get_matter_attribute_value(clusters.Thermostat.Attributes.FeatureMap) - ) - self._attr_supported_features = ( - ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.TURN_OFF - ) - if feature_map & ThermostatFeature.kHeating: - self._attr_hvac_modes.append(HVACMode.HEAT) - if feature_map & ThermostatFeature.kCooling: - self._attr_hvac_modes.append(HVACMode.COOL) - if (vendor_id, product_id) in SUPPORT_DRY_MODE_DEVICES: - self._attr_hvac_modes.append(HVACMode.DRY) - if (vendor_id, product_id) in SUPPORT_FAN_MODE_DEVICES: - self._attr_hvac_modes.append(HVACMode.FAN_ONLY) - if feature_map & ThermostatFeature.kAutoMode: - self._attr_hvac_modes.append(HVACMode.HEAT_COOL) - # only enable temperature_range feature if the device actually supports that - - if (vendor_id, product_id) not in SINGLE_SETPOINT_DEVICES: - self._attr_supported_features |= ( - ClimateEntityFeature.TARGET_TEMPERATURE_RANGE - ) - if any(mode for mode in self.hvac_modes if mode != HVACMode.OFF): - self._attr_supported_features |= ClimateEntityFeature.TURN_ON - async def async_set_temperature(self, **kwargs: Any) -> None: """Set new target temperature.""" target_hvac_mode: HVACMode | None = kwargs.get(ATTR_HVAC_MODE) @@ -224,6 +181,7 @@ class MatterClimate(MatterEntity, ClimateEntity): @callback def _update_from_device(self) -> None: """Update from device.""" + self._calculate_features() self._attr_current_temperature = self._get_temperature_in_degrees( clusters.Thermostat.Attributes.LocalTemperature ) @@ -319,6 +277,46 @@ class MatterClimate(MatterEntity, ClimateEntity): else: self._attr_max_temp = DEFAULT_MAX_TEMP + @callback + def _calculate_features( + self, + ) -> None: + """Calculate features for HA Thermostat platform from Matter FeatureMap.""" + feature_map = int( + self.get_matter_attribute_value(clusters.Thermostat.Attributes.FeatureMap) + ) + # NOTE: the featuremap can dynamically change, so we need to update the + # supported features if the featuremap changes. + # work out supported features and presets from matter featuremap + if self._feature_map == feature_map: + return + self._feature_map = feature_map + product_id = self._endpoint.node.device_info.productID + vendor_id = self._endpoint.node.device_info.vendorID + self._attr_hvac_modes: list[HVACMode] = [HVACMode.OFF] + self._attr_supported_features = ( + ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.TURN_OFF + ) + if feature_map & ThermostatFeature.kHeating: + self._attr_hvac_modes.append(HVACMode.HEAT) + if feature_map & ThermostatFeature.kCooling: + self._attr_hvac_modes.append(HVACMode.COOL) + if (vendor_id, product_id) in SUPPORT_DRY_MODE_DEVICES: + self._attr_hvac_modes.append(HVACMode.DRY) + if (vendor_id, product_id) in SUPPORT_FAN_MODE_DEVICES: + self._attr_hvac_modes.append(HVACMode.FAN_ONLY) + if feature_map & ThermostatFeature.kAutoMode: + self._attr_hvac_modes.append(HVACMode.HEAT_COOL) + # only enable temperature_range feature if the device actually supports that + + if (vendor_id, product_id) not in SINGLE_SETPOINT_DEVICES: + self._attr_supported_features |= ( + ClimateEntityFeature.TARGET_TEMPERATURE_RANGE + ) + if any(mode for mode in self.hvac_modes if mode != HVACMode.OFF): + self._attr_supported_features |= ClimateEntityFeature.TURN_ON + + @callback def _get_temperature_in_degrees( self, attribute: type[clusters.ClusterAttributeDescriptor] ) -> float | None: diff --git a/homeassistant/components/matter/fan.py b/homeassistant/components/matter/fan.py index 8e5ef617304..458a57538eb 100644 --- a/homeassistant/components/matter/fan.py +++ b/homeassistant/components/matter/fan.py @@ -59,6 +59,7 @@ class MatterFan(MatterEntity, FanEntity): _last_known_preset_mode: str | None = None _last_known_percentage: int = 0 _enable_turn_on_off_backwards_compatibility = False + _feature_map: int | None = None async def async_turn_on( self, @@ -183,8 +184,7 @@ class MatterFan(MatterEntity, FanEntity): @callback def _update_from_device(self) -> None: """Update from device.""" - if not hasattr(self, "_attr_preset_modes"): - self._calculate_features() + self._calculate_features() if self.get_matter_attribute_value(clusters.OnOff.Attributes.OnOff) is False: # special case: the appliance has a dedicated Power switch on the OnOff cluster @@ -257,11 +257,17 @@ class MatterFan(MatterEntity, FanEntity): def _calculate_features( self, ) -> None: - """Calculate features and preset modes for HA Fan platform from Matter attributes..""" - # work out supported features and presets from matter featuremap + """Calculate features for HA Fan platform from Matter FeatureMap.""" feature_map = int( self.get_matter_attribute_value(clusters.FanControl.Attributes.FeatureMap) ) + # NOTE: the featuremap can dynamically change, so we need to update the + # supported features if the featuremap changes. + # work out supported features and presets from matter featuremap + if self._feature_map == feature_map: + return + self._feature_map = feature_map + self._attr_supported_features = FanEntityFeature(0) if feature_map & FanControlFeature.kMultiSpeed: self._attr_supported_features |= FanEntityFeature.SET_SPEED self._attr_speed_count = int( diff --git a/homeassistant/components/matter/lock.py b/homeassistant/components/matter/lock.py index 31ae5e496ce..8adaecd67ad 100644 --- a/homeassistant/components/matter/lock.py +++ b/homeassistant/components/matter/lock.py @@ -38,7 +38,7 @@ async def async_setup_entry( class MatterLock(MatterEntity, LockEntity): """Representation of a Matter lock.""" - features: int | None = None + _feature_map: int | None = None _optimistic_timer: asyncio.TimerHandle | None = None @property @@ -61,22 +61,6 @@ class MatterLock(MatterEntity, LockEntity): return None - @property - def supports_door_position_sensor(self) -> bool: - """Return True if the lock supports door position sensor.""" - if self.features is None: - return False - - return bool(self.features & DoorLockFeature.kDoorPositionSensor) - - @property - def supports_unbolt(self) -> bool: - """Return True if the lock supports unbolt.""" - if self.features is None: - return False - - return bool(self.features & DoorLockFeature.kUnbolt) - async def send_device_command( self, command: clusters.ClusterCommand, @@ -120,7 +104,7 @@ class MatterLock(MatterEntity, LockEntity): ) code: str | None = kwargs.get(ATTR_CODE) code_bytes = code.encode() if code else None - if self.supports_unbolt: + if self._attr_supported_features & LockEntityFeature.OPEN: # if the lock reports it has separate unbolt support, # the unlock command should unbolt only on the unlock command # and unlatch on the HA 'open' command. @@ -151,13 +135,8 @@ class MatterLock(MatterEntity, LockEntity): @callback def _update_from_device(self) -> None: """Update the entity from the device.""" - - if self.features is None: - self.features = int( - self.get_matter_attribute_value(clusters.DoorLock.Attributes.FeatureMap) - ) - if self.supports_unbolt: - self._attr_supported_features = LockEntityFeature.OPEN + # always calculate the features as they can dynamically change + self._calculate_features() lock_state = self.get_matter_attribute_value( clusters.DoorLock.Attributes.LockState @@ -197,6 +176,25 @@ class MatterLock(MatterEntity, LockEntity): if write_state: self.async_write_ha_state() + @callback + def _calculate_features( + self, + ) -> None: + """Calculate features for HA Lock platform from Matter FeatureMap.""" + feature_map = int( + self.get_matter_attribute_value(clusters.DoorLock.Attributes.FeatureMap) + ) + # NOTE: the featuremap can dynamically change, so we need to update the + # supported features if the featuremap changes. + if self._feature_map == feature_map: + return + self._feature_map = feature_map + supported_features = LockEntityFeature(0) + # determine if lock supports optional open/unbolt feature + if bool(feature_map & DoorLockFeature.kUnbolt): + supported_features |= LockEntityFeature.OPEN + self._attr_supported_features = supported_features + DISCOVERY_SCHEMAS = [ MatterDiscoverySchema( diff --git a/tests/components/matter/test_climate.py b/tests/components/matter/test_climate.py index e0015e8b445..4d6978edfde 100644 --- a/tests/components/matter/test_climate.py +++ b/tests/components/matter/test_climate.py @@ -350,3 +350,9 @@ async def test_room_airconditioner( state = hass.states.get("climate.room_airconditioner_thermostat") assert state assert state.state == HVACMode.DRY + + # test featuremap update + set_node_attribute(room_airconditioner, 1, 513, 65532, 1) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get("climate.room_airconditioner_thermostat") + assert state.attributes["supported_features"] & ClimateEntityFeature.TURN_ON diff --git a/tests/components/matter/test_fan.py b/tests/components/matter/test_fan.py index 18c2c2ed255..690209b1165 100644 --- a/tests/components/matter/test_fan.py +++ b/tests/components/matter/test_fan.py @@ -107,6 +107,11 @@ async def test_fan_base( state = hass.states.get(entity_id) assert state.attributes["preset_mode"] is None assert state.attributes["percentage"] == 0 + # test featuremap update + set_node_attribute(air_purifier, 1, 514, 65532, 1) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get(entity_id) + assert state.attributes["supported_features"] & FanEntityFeature.SET_SPEED @pytest.mark.parametrize("expected_lingering_tasks", [True]) diff --git a/tests/components/matter/test_lock.py b/tests/components/matter/test_lock.py index 1180e6ee469..f279430b393 100644 --- a/tests/components/matter/test_lock.py +++ b/tests/components/matter/test_lock.py @@ -97,6 +97,12 @@ async def test_lock( assert state assert state.state == STATE_UNKNOWN + # test featuremap update + set_node_attribute(door_lock, 1, 257, 65532, 4096) + await trigger_subscription_callback(hass, matter_client) + state = hass.states.get("lock.mock_door_lock_lock") + assert state.attributes["supported_features"] & LockEntityFeature.OPEN + # This tests needs to be adjusted to remove lingering tasks @pytest.mark.parametrize("expected_lingering_tasks", [True]) From d5388452d4e4bc29c8d684697bd9a0880bc9010e Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Wed, 31 Jul 2024 13:39:03 -0500 Subject: [PATCH 0226/1635] Use finished speaking detection in ESPHome/Wyoming (#122962) --- .../components/assist_pipeline/pipeline.py | 7 ++- .../components/assist_pipeline/vad.py | 2 +- homeassistant/components/esphome/select.py | 2 +- .../components/esphome/voice_assistant.py | 6 +++ homeassistant/components/wyoming/devices.py | 17 +++++++ homeassistant/components/wyoming/satellite.py | 4 ++ homeassistant/components/wyoming/select.py | 25 +++++++++- homeassistant/components/wyoming/strings.json | 8 ++++ tests/components/wyoming/test_select.py | 48 +++++++++++++++++++ 9 files changed, 115 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/assist_pipeline/pipeline.py b/homeassistant/components/assist_pipeline/pipeline.py index 845950caf8d..af29888eb07 100644 --- a/homeassistant/components/assist_pipeline/pipeline.py +++ b/homeassistant/components/assist_pipeline/pipeline.py @@ -505,6 +505,9 @@ class AudioSettings: samples_per_chunk: int | None = None """Number of samples that will be in each audio chunk (None for no chunking).""" + silence_seconds: float = 0.5 + """Seconds of silence after voice command has ended.""" + def __post_init__(self) -> None: """Verify settings post-initialization.""" if (self.noise_suppression_level < 0) or (self.noise_suppression_level > 4): @@ -909,7 +912,9 @@ class PipelineRun: # Transcribe audio stream stt_vad: VoiceCommandSegmenter | None = None if self.audio_settings.is_vad_enabled: - stt_vad = VoiceCommandSegmenter() + stt_vad = VoiceCommandSegmenter( + silence_seconds=self.audio_settings.silence_seconds + ) result = await self.stt_provider.async_process_audio_stream( metadata, diff --git a/homeassistant/components/assist_pipeline/vad.py b/homeassistant/components/assist_pipeline/vad.py index e3b425a2a7b..49496e66159 100644 --- a/homeassistant/components/assist_pipeline/vad.py +++ b/homeassistant/components/assist_pipeline/vad.py @@ -80,7 +80,7 @@ class VoiceCommandSegmenter: speech_seconds: float = 0.3 """Seconds of speech before voice command has started.""" - silence_seconds: float = 0.5 + silence_seconds: float = 1.0 """Seconds of silence after voice command has ended.""" timeout_seconds: float = 15.0 diff --git a/homeassistant/components/esphome/select.py b/homeassistant/components/esphome/select.py index ed37a9a6ab8..623946503eb 100644 --- a/homeassistant/components/esphome/select.py +++ b/homeassistant/components/esphome/select.py @@ -83,7 +83,7 @@ class EsphomeAssistPipelineSelect(EsphomeAssistEntity, AssistPipelineSelect): class EsphomeVadSensitivitySelect(EsphomeAssistEntity, VadSensitivitySelect): - """VAD sensitivity selector for VoIP devices.""" + """VAD sensitivity selector for ESPHome devices.""" def __init__(self, hass: HomeAssistant, entry_data: RuntimeEntryData) -> None: """Initialize a VAD sensitivity selector.""" diff --git a/homeassistant/components/esphome/voice_assistant.py b/homeassistant/components/esphome/voice_assistant.py index a6cedee30ab..eb55be2ced6 100644 --- a/homeassistant/components/esphome/voice_assistant.py +++ b/homeassistant/components/esphome/voice_assistant.py @@ -34,6 +34,7 @@ from homeassistant.components.assist_pipeline.error import ( WakeWordDetectionAborted, WakeWordDetectionError, ) +from homeassistant.components.assist_pipeline.vad import VadSensitivity from homeassistant.components.intent.timers import TimerEventType, TimerInfo from homeassistant.components.media_player import async_process_play_media_url from homeassistant.core import Context, HomeAssistant, callback @@ -243,6 +244,11 @@ class VoiceAssistantPipeline: auto_gain_dbfs=audio_settings.auto_gain, volume_multiplier=audio_settings.volume_multiplier, is_vad_enabled=bool(flags & VoiceAssistantCommandFlag.USE_VAD), + silence_seconds=VadSensitivity.to_seconds( + pipeline_select.get_vad_sensitivity( + self.hass, DOMAIN, self.device_info.mac_address + ) + ), ), ) diff --git a/homeassistant/components/wyoming/devices.py b/homeassistant/components/wyoming/devices.py index 2ca66f3b21a..2e00b31fd34 100644 --- a/homeassistant/components/wyoming/devices.py +++ b/homeassistant/components/wyoming/devices.py @@ -5,6 +5,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass +from homeassistant.components.assist_pipeline.vad import VadSensitivity from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import entity_registry as er @@ -23,6 +24,7 @@ class SatelliteDevice: noise_suppression_level: int = 0 auto_gain: int = 0 volume_multiplier: float = 1.0 + vad_sensitivity: VadSensitivity = VadSensitivity.DEFAULT _is_active_listener: Callable[[], None] | None = None _is_muted_listener: Callable[[], None] | None = None @@ -77,6 +79,14 @@ class SatelliteDevice: if self._audio_settings_listener is not None: self._audio_settings_listener() + @callback + def set_vad_sensitivity(self, vad_sensitivity: VadSensitivity) -> None: + """Set VAD sensitivity.""" + if vad_sensitivity != self.vad_sensitivity: + self.vad_sensitivity = vad_sensitivity + if self._audio_settings_listener is not None: + self._audio_settings_listener() + @callback def set_is_active_listener(self, is_active_listener: Callable[[], None]) -> None: """Listen for updates to is_active.""" @@ -140,3 +150,10 @@ class SatelliteDevice: return ent_reg.async_get_entity_id( "number", DOMAIN, f"{self.satellite_id}-volume_multiplier" ) + + def get_vad_sensitivity_entity_id(self, hass: HomeAssistant) -> str | None: + """Return entity id for VAD sensitivity.""" + ent_reg = er.async_get(hass) + return ent_reg.async_get_entity_id( + "select", DOMAIN, f"{self.satellite_id}-vad_sensitivity" + ) diff --git a/homeassistant/components/wyoming/satellite.py b/homeassistant/components/wyoming/satellite.py index 3ca86a42e5d..781f0706c68 100644 --- a/homeassistant/components/wyoming/satellite.py +++ b/homeassistant/components/wyoming/satellite.py @@ -25,6 +25,7 @@ from wyoming.wake import Detect, Detection from homeassistant.components import assist_pipeline, intent, stt, tts from homeassistant.components.assist_pipeline import select as pipeline_select +from homeassistant.components.assist_pipeline.vad import VadSensitivity from homeassistant.config_entries import ConfigEntry from homeassistant.core import Context, HomeAssistant, callback @@ -409,6 +410,9 @@ class WyomingSatellite: noise_suppression_level=self.device.noise_suppression_level, auto_gain_dbfs=self.device.auto_gain, volume_multiplier=self.device.volume_multiplier, + silence_seconds=VadSensitivity.to_seconds( + self.device.vad_sensitivity + ), ), device_id=self.device.device_id, wake_word_phrase=wake_word_phrase, diff --git a/homeassistant/components/wyoming/select.py b/homeassistant/components/wyoming/select.py index 99f26c3e440..f852b4d0434 100644 --- a/homeassistant/components/wyoming/select.py +++ b/homeassistant/components/wyoming/select.py @@ -4,7 +4,11 @@ from __future__ import annotations from typing import TYPE_CHECKING, Final -from homeassistant.components.assist_pipeline.select import AssistPipelineSelect +from homeassistant.components.assist_pipeline.select import ( + AssistPipelineSelect, + VadSensitivitySelect, +) +from homeassistant.components.assist_pipeline.vad import VadSensitivity from homeassistant.components.select import SelectEntity, SelectEntityDescription from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory @@ -45,6 +49,7 @@ async def async_setup_entry( [ WyomingSatellitePipelineSelect(hass, device), WyomingSatelliteNoiseSuppressionLevelSelect(device), + WyomingSatelliteVadSensitivitySelect(hass, device), ] ) @@ -92,3 +97,21 @@ class WyomingSatelliteNoiseSuppressionLevelSelect( self._attr_current_option = option self.async_write_ha_state() self._device.set_noise_suppression_level(_NOISE_SUPPRESSION_LEVEL[option]) + + +class WyomingSatelliteVadSensitivitySelect( + WyomingSatelliteEntity, VadSensitivitySelect +): + """VAD sensitivity selector for Wyoming satellites.""" + + def __init__(self, hass: HomeAssistant, device: SatelliteDevice) -> None: + """Initialize a VAD sensitivity selector.""" + self.device = device + + WyomingSatelliteEntity.__init__(self, device) + VadSensitivitySelect.__init__(self, hass, device.satellite_id) + + async def async_select_option(self, option: str) -> None: + """Select an option.""" + await super().async_select_option(option) + self.device.set_vad_sensitivity(VadSensitivity(option)) diff --git a/homeassistant/components/wyoming/strings.json b/homeassistant/components/wyoming/strings.json index f2768e45eb8..4a1a4c3a246 100644 --- a/homeassistant/components/wyoming/strings.json +++ b/homeassistant/components/wyoming/strings.json @@ -46,6 +46,14 @@ "high": "High", "max": "Max" } + }, + "vad_sensitivity": { + "name": "[%key:component::assist_pipeline::entity::select::vad_sensitivity::name%]", + "state": { + "default": "[%key:component::assist_pipeline::entity::select::vad_sensitivity::state::default%]", + "aggressive": "[%key:component::assist_pipeline::entity::select::vad_sensitivity::state::aggressive%]", + "relaxed": "[%key:component::assist_pipeline::entity::select::vad_sensitivity::state::relaxed%]" + } } }, "switch": { diff --git a/tests/components/wyoming/test_select.py b/tests/components/wyoming/test_select.py index e6ec2c4d432..2438d25b838 100644 --- a/tests/components/wyoming/test_select.py +++ b/tests/components/wyoming/test_select.py @@ -5,6 +5,7 @@ from unittest.mock import Mock, patch from homeassistant.components import assist_pipeline from homeassistant.components.assist_pipeline.pipeline import PipelineData from homeassistant.components.assist_pipeline.select import OPTION_PREFERRED +from homeassistant.components.assist_pipeline.vad import VadSensitivity from homeassistant.components.wyoming.devices import SatelliteDevice from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant @@ -140,3 +141,50 @@ async def test_noise_suppression_level_select( ) assert satellite_device.noise_suppression_level == 2 + + +async def test_vad_sensitivity_select( + hass: HomeAssistant, + satellite_config_entry: ConfigEntry, + satellite_device: SatelliteDevice, +) -> None: + """Test VAD sensitivity select.""" + vs_entity_id = satellite_device.get_vad_sensitivity_entity_id(hass) + assert vs_entity_id + + state = hass.states.get(vs_entity_id) + assert state is not None + assert state.state == VadSensitivity.DEFAULT + assert satellite_device.vad_sensitivity == VadSensitivity.DEFAULT + + # Change setting + with patch.object(satellite_device, "set_vad_sensitivity") as mock_vs_changed: + await hass.services.async_call( + "select", + "select_option", + {"entity_id": vs_entity_id, "option": VadSensitivity.AGGRESSIVE.value}, + blocking=True, + ) + + state = hass.states.get(vs_entity_id) + assert state is not None + assert state.state == VadSensitivity.AGGRESSIVE.value + + # set function should have been called + mock_vs_changed.assert_called_once_with(VadSensitivity.AGGRESSIVE) + + # test restore + satellite_device = await reload_satellite(hass, satellite_config_entry.entry_id) + + state = hass.states.get(vs_entity_id) + assert state is not None + assert state.state == VadSensitivity.AGGRESSIVE.value + + await hass.services.async_call( + "select", + "select_option", + {"entity_id": vs_entity_id, "option": VadSensitivity.RELAXED.value}, + blocking=True, + ) + + assert satellite_device.vad_sensitivity == VadSensitivity.RELAXED From a23b3f84f0d108842bb9d707ca4656968e937b45 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 20:41:44 +0200 Subject: [PATCH 0227/1635] Fix implicit-return in garadget (#122923) --- homeassistant/components/garadget/cover.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/garadget/cover.py b/homeassistant/components/garadget/cover.py index cb4f402d7bb..988c66b679c 100644 --- a/homeassistant/components/garadget/cover.py +++ b/homeassistant/components/garadget/cover.py @@ -213,23 +213,20 @@ class GaradgetCover(CoverEntity): def close_cover(self, **kwargs: Any) -> None: """Close the cover.""" if self._state not in ["close", "closing"]: - ret = self._put_command("setState", "close") + self._put_command("setState", "close") self._start_watcher("close") - return ret.get("return_value") == 1 def open_cover(self, **kwargs: Any) -> None: """Open the cover.""" if self._state not in ["open", "opening"]: - ret = self._put_command("setState", "open") + self._put_command("setState", "open") self._start_watcher("open") - return ret.get("return_value") == 1 def stop_cover(self, **kwargs: Any) -> None: """Stop the door where it is.""" if self._state not in ["stopped"]: - ret = self._put_command("setState", "stop") + self._put_command("setState", "stop") self._start_watcher("stop") - return ret["return_value"] == 1 def update(self) -> None: """Get updated status from API.""" From 177690bcb372816d340e1c0416ad112c055d2fe6 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 20:42:05 +0200 Subject: [PATCH 0228/1635] Rename variable in sensor tests (#122954) --- tests/components/sensor/test_recorder.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/tests/components/sensor/test_recorder.py b/tests/components/sensor/test_recorder.py index 2bd751a553c..27fab9c0b3b 100644 --- a/tests/components/sensor/test_recorder.py +++ b/tests/components/sensor/test_recorder.py @@ -2466,30 +2466,29 @@ async def test_list_statistic_ids( @pytest.mark.parametrize( - "_attributes", + "energy_attributes", [{**ENERGY_SENSOR_ATTRIBUTES, "last_reset": 0}, TEMPERATURE_SENSOR_ATTRIBUTES], ) async def test_list_statistic_ids_unsupported( hass: HomeAssistant, - caplog: pytest.LogCaptureFixture, - _attributes, + energy_attributes: dict[str, Any], ) -> None: """Test listing future statistic ids for unsupported sensor.""" await async_setup_component(hass, "sensor", {}) # Wait for the sensor recorder platform to be added await async_recorder_block_till_done(hass) - attributes = dict(_attributes) + attributes = dict(energy_attributes) hass.states.async_set("sensor.test1", 0, attributes=attributes) if "last_reset" in attributes: attributes.pop("unit_of_measurement") hass.states.async_set("last_reset.test2", 0, attributes=attributes) - attributes = dict(_attributes) + attributes = dict(energy_attributes) if "unit_of_measurement" in attributes: attributes["unit_of_measurement"] = "invalid" hass.states.async_set("sensor.test3", 0, attributes=attributes) attributes.pop("unit_of_measurement") hass.states.async_set("sensor.test4", 0, attributes=attributes) - attributes = dict(_attributes) + attributes = dict(energy_attributes) attributes["state_class"] = "invalid" hass.states.async_set("sensor.test5", 0, attributes=attributes) attributes.pop("state_class") From 7c179c33b5499ff15154b41f5f61f335535d961b Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 20:42:19 +0200 Subject: [PATCH 0229/1635] Fix unnecessary-return-none in tradfri (#122950) --- homeassistant/components/tradfri/switch.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/tradfri/switch.py b/homeassistant/components/tradfri/switch.py index 4ad1424aa9a..20695f26500 100644 --- a/homeassistant/components/tradfri/switch.py +++ b/homeassistant/components/tradfri/switch.py @@ -73,11 +73,11 @@ class TradfriSwitch(TradfriBaseEntity, SwitchEntity): async def async_turn_off(self, **kwargs: Any) -> None: """Instruct the switch to turn off.""" if not self._device_control: - return None + return await self._api(self._device_control.set_state(False)) async def async_turn_on(self, **kwargs: Any) -> None: """Instruct the switch to turn on.""" if not self._device_control: - return None + return await self._api(self._device_control.set_state(True)) From 8de0e4ca7cb63bac30972f98d2e2c8652170562e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 31 Jul 2024 13:42:33 -0500 Subject: [PATCH 0230/1635] Remove aiohappyeyeballs license exception (#122969) --- script/licenses.py | 1 - 1 file changed, 1 deletion(-) diff --git a/script/licenses.py b/script/licenses.py index ad5ae8476b3..3b9ec389b08 100644 --- a/script/licenses.py +++ b/script/licenses.py @@ -124,7 +124,6 @@ EXCEPTIONS = { "PyXiaomiGateway", # https://github.com/Danielhiversen/PyXiaomiGateway/pull/201 "aiocomelit", # https://github.com/chemelli74/aiocomelit/pull/138 "aioecowitt", # https://github.com/home-assistant-libs/aioecowitt/pull/180 - "aiohappyeyeballs", # PSF-2.0 license "aioopenexchangerates", # https://github.com/MartinHjelmare/aioopenexchangerates/pull/94 "aiooui", # https://github.com/Bluetooth-Devices/aiooui/pull/8 "aioruuvigateway", # https://github.com/akx/aioruuvigateway/pull/6 From c0fe65fa60110f3ca9e2772e90b39a99ddfcbb7c Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 20:42:42 +0200 Subject: [PATCH 0231/1635] Fix unnecessary-return-none in homematic (#122948) --- homeassistant/components/homematic/climate.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/homematic/climate.py b/homeassistant/components/homematic/climate.py index 16c345c5635..bf1295df6be 100644 --- a/homeassistant/components/homematic/climate.py +++ b/homeassistant/components/homematic/climate.py @@ -141,7 +141,7 @@ class HMThermostat(HMDevice, ClimateEntity): def set_temperature(self, **kwargs: Any) -> None: """Set new target temperature.""" if (temperature := kwargs.get(ATTR_TEMPERATURE)) is None: - return None + return self._hmdevice.writeNodeData(self._state, float(temperature)) From 79a741486ce76f8e91e2a0819050ea05546d8ca9 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 20:42:57 +0200 Subject: [PATCH 0232/1635] Fix implicit-return in wyoming (#122946) --- homeassistant/components/wyoming/wake_word.py | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/wyoming/wake_word.py b/homeassistant/components/wyoming/wake_word.py index 6eba0f7ca6d..64dfd60c068 100644 --- a/homeassistant/components/wyoming/wake_word.py +++ b/homeassistant/components/wyoming/wake_word.py @@ -89,6 +89,7 @@ class WyomingWakeWordProvider(wake_word.WakeWordDetectionEntity): """Get the next chunk from audio stream.""" async for chunk_bytes in stream: return chunk_bytes + return None try: async with AsyncTcpClient(self.service.host, self.service.port) as client: From 2f3f124aa10461e741a2557cc695e23d6f834b34 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 31 Jul 2024 13:44:47 -0500 Subject: [PATCH 0233/1635] Drop unnecessary lambdas in the entity filter (#122941) --- homeassistant/helpers/entityfilter.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/homeassistant/helpers/entityfilter.py b/homeassistant/helpers/entityfilter.py index 24b65cba82a..1eaa0fb1404 100644 --- a/homeassistant/helpers/entityfilter.py +++ b/homeassistant/helpers/entityfilter.py @@ -4,7 +4,8 @@ from __future__ import annotations from collections.abc import Callable import fnmatch -from functools import lru_cache +from functools import lru_cache, partial +import operator import re import voluptuous as vol @@ -195,7 +196,7 @@ def _generate_filter_from_sets_and_pattern_lists( # Case 1 - No filter # - All entities included if not have_include and not have_exclude: - return lambda entity_id: True + return bool # Case 2 - Only includes # - Entity listed in entities include: include @@ -280,4 +281,4 @@ def _generate_filter_from_sets_and_pattern_lists( # Case 6 - No Domain and/or glob includes or excludes # - Entity listed in entities include: include # - Otherwise: exclude - return lambda entity_id: entity_id in include_e + return partial(operator.contains, include_e) From d393317eb202a20ca6570420791f0e564fbb5140 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 20:45:10 +0200 Subject: [PATCH 0234/1635] Fix implicit-return in yamaha (#122942) --- homeassistant/components/yamaha/media_player.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/yamaha/media_player.py b/homeassistant/components/yamaha/media_player.py index bf217f12ca4..507f485fcc7 100644 --- a/homeassistant/components/yamaha/media_player.py +++ b/homeassistant/components/yamaha/media_player.py @@ -427,19 +427,21 @@ class YamahaDeviceZone(MediaPlayerEntity): self.zctrl.surround_program = sound_mode @property - def media_artist(self): + def media_artist(self) -> str | None: """Artist of current playing media.""" if self._play_status is not None: return self._play_status.artist + return None @property - def media_album_name(self): + def media_album_name(self) -> str | None: """Album of current playing media.""" if self._play_status is not None: return self._play_status.album + return None @property - def media_content_type(self): + def media_content_type(self) -> MediaType | None: """Content type of current playing media.""" # Loose assumption that if playback is supported, we are playing music if self._is_playback_supported: @@ -447,7 +449,7 @@ class YamahaDeviceZone(MediaPlayerEntity): return None @property - def media_title(self): + def media_title(self) -> str | None: """Artist of current playing media.""" if self._play_status is not None: song = self._play_status.song @@ -459,3 +461,4 @@ class YamahaDeviceZone(MediaPlayerEntity): return f"{station}: {song}" return song or station + return None From dde97a02f05b9df11af989eb58206de687cb9ebe Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 20:45:29 +0200 Subject: [PATCH 0235/1635] Fix implicit-return in xiaomi_aqara (#122940) --- homeassistant/components/xiaomi_aqara/binary_sensor.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/homeassistant/components/xiaomi_aqara/binary_sensor.py b/homeassistant/components/xiaomi_aqara/binary_sensor.py index cee2980fe07..75208b142dd 100644 --- a/homeassistant/components/xiaomi_aqara/binary_sensor.py +++ b/homeassistant/components/xiaomi_aqara/binary_sensor.py @@ -202,6 +202,8 @@ class XiaomiNatgasSensor(XiaomiBinarySensor): return True return False + return False + class XiaomiMotionSensor(XiaomiBinarySensor): """Representation of a XiaomiMotionSensor.""" @@ -298,6 +300,8 @@ class XiaomiMotionSensor(XiaomiBinarySensor): self._state = True return True + return False + class XiaomiDoorSensor(XiaomiBinarySensor, RestoreEntity): """Representation of a XiaomiDoorSensor.""" @@ -357,6 +361,8 @@ class XiaomiDoorSensor(XiaomiBinarySensor, RestoreEntity): return True return False + return False + class XiaomiWaterLeakSensor(XiaomiBinarySensor): """Representation of a XiaomiWaterLeakSensor.""" @@ -401,6 +407,8 @@ class XiaomiWaterLeakSensor(XiaomiBinarySensor): return True return False + return False + class XiaomiSmokeSensor(XiaomiBinarySensor): """Representation of a XiaomiSmokeSensor.""" @@ -443,6 +451,8 @@ class XiaomiSmokeSensor(XiaomiBinarySensor): return True return False + return False + class XiaomiVibration(XiaomiBinarySensor): """Representation of a Xiaomi Vibration Sensor.""" From a6aae4e8572381b77f43406b49503f87d113d3ae Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 20:45:48 +0200 Subject: [PATCH 0236/1635] Fix implicit-return in xiaomi_miio (#122939) --- homeassistant/components/xiaomi_miio/binary_sensor.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/xiaomi_miio/binary_sensor.py b/homeassistant/components/xiaomi_miio/binary_sensor.py index 7729ce27d29..6d1a81007dc 100644 --- a/homeassistant/components/xiaomi_miio/binary_sensor.py +++ b/homeassistant/components/xiaomi_miio/binary_sensor.py @@ -190,7 +190,8 @@ async def async_setup_entry( elif model in MODELS_HUMIDIFIER_MJJSQ: sensors = HUMIDIFIER_MJJSQ_BINARY_SENSORS elif model in MODELS_VACUUM: - return _setup_vacuum_sensors(hass, config_entry, async_add_entities) + _setup_vacuum_sensors(hass, config_entry, async_add_entities) + return for description in BINARY_SENSOR_TYPES: if description.key not in sensors: From 4aacec2de74938ec6e9e82598189652da345ba6f Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 20:46:30 +0200 Subject: [PATCH 0237/1635] Fix implicit-return in xiaomi (#122938) --- homeassistant/components/xiaomi/device_tracker.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/xiaomi/device_tracker.py b/homeassistant/components/xiaomi/device_tracker.py index b3983e76aaa..b14ec073938 100644 --- a/homeassistant/components/xiaomi/device_tracker.py +++ b/homeassistant/components/xiaomi/device_tracker.py @@ -172,7 +172,6 @@ def _get_token(host, username, password): ) _LOGGER.exception(error_message, url, data, result) return None - else: - _LOGGER.error( - "Invalid response: [%s] at url: [%s] with data [%s]", res, url, data - ) + + _LOGGER.error("Invalid response: [%s] at url: [%s] with data [%s]", res, url, data) + return None From c7f863a14123069774388358337aaac894f71589 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 31 Jul 2024 13:47:19 -0500 Subject: [PATCH 0238/1635] Drop some unnecessary lambdas in powerwall (#122936) --- homeassistant/components/powerwall/sensor.py | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/powerwall/sensor.py b/homeassistant/components/powerwall/sensor.py index 7113ab6ba70..9423d65b0fc 100644 --- a/homeassistant/components/powerwall/sensor.py +++ b/homeassistant/components/powerwall/sensor.py @@ -4,6 +4,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass +from operator import attrgetter, methodcaller from typing import TYPE_CHECKING, Generic, TypeVar from tesla_powerwall import GridState, MeterResponse, MeterType @@ -58,11 +59,6 @@ def _get_meter_frequency(meter: MeterResponse) -> float: return round(meter.frequency, 1) -def _get_meter_total_current(meter: MeterResponse) -> float: - """Get the current value in A.""" - return meter.get_instant_total_current() - - def _get_meter_average_voltage(meter: MeterResponse) -> float: """Get the current value in V.""" return round(meter.instant_average_voltage, 1) @@ -93,7 +89,7 @@ POWERWALL_INSTANT_SENSORS = ( device_class=SensorDeviceClass.CURRENT, native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, entity_registry_enabled_default=False, - value_fn=_get_meter_total_current, + value_fn=methodcaller("get_instant_total_current"), ), PowerwallSensorEntityDescription[MeterResponse, float]( key="instant_voltage", @@ -132,7 +128,7 @@ BATTERY_INSTANT_SENSORS: list[PowerwallSensorEntityDescription] = [ native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, suggested_display_precision=1, - value_fn=lambda battery_data: battery_data.capacity, + value_fn=attrgetter("capacity"), ), PowerwallSensorEntityDescription[BatteryResponse, float | None]( key="battery_instant_voltage", @@ -170,7 +166,7 @@ BATTERY_INSTANT_SENSORS: list[PowerwallSensorEntityDescription] = [ state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.POWER, native_unit_of_measurement=UnitOfPower.WATT, - value_fn=lambda battery_data: battery_data.p_out, + value_fn=attrgetter("p_out"), ), PowerwallSensorEntityDescription[BatteryResponse, float | None]( key="battery_export", @@ -181,7 +177,7 @@ BATTERY_INSTANT_SENSORS: list[PowerwallSensorEntityDescription] = [ native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, suggested_display_precision=0, - value_fn=lambda battery_data: battery_data.energy_discharged, + value_fn=attrgetter("energy_discharged"), ), PowerwallSensorEntityDescription[BatteryResponse, float | None]( key="battery_import", @@ -192,7 +188,7 @@ BATTERY_INSTANT_SENSORS: list[PowerwallSensorEntityDescription] = [ native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, suggested_display_precision=0, - value_fn=lambda battery_data: battery_data.energy_charged, + value_fn=attrgetter("energy_charged"), ), PowerwallSensorEntityDescription[BatteryResponse, int]( key="battery_remaining", @@ -203,7 +199,7 @@ BATTERY_INSTANT_SENSORS: list[PowerwallSensorEntityDescription] = [ native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, suggested_display_precision=1, - value_fn=lambda battery_data: battery_data.energy_remaining, + value_fn=attrgetter("energy_remaining"), ), PowerwallSensorEntityDescription[BatteryResponse, str]( key="grid_state", From 4fda025106cdb111a02d34cc753879801f013e45 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 20:47:33 +0200 Subject: [PATCH 0239/1635] Fix implicit-return in wsdot (#122935) --- homeassistant/components/wsdot/sensor.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/wsdot/sensor.py b/homeassistant/components/wsdot/sensor.py index 3aae6746ea9..73714b75c95 100644 --- a/homeassistant/components/wsdot/sensor.py +++ b/homeassistant/components/wsdot/sensor.py @@ -6,6 +6,7 @@ from datetime import datetime, timedelta, timezone from http import HTTPStatus import logging import re +from typing import Any import requests import voluptuous as vol @@ -125,7 +126,7 @@ class WashingtonStateTravelTimeSensor(WashingtonStateTransportSensor): self._state = self._data.get(ATTR_CURRENT_TIME) @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any] | None: """Return other details about the sensor state.""" if self._data is not None: attrs = {} @@ -140,6 +141,7 @@ class WashingtonStateTravelTimeSensor(WashingtonStateTransportSensor): self._data.get(ATTR_TIME_UPDATED) ) return attrs + return None def _parse_wsdot_timestamp(timestamp): From be8186126ef606d58e1770a78ce634e69dde0dc4 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 20:47:48 +0200 Subject: [PATCH 0240/1635] Fix implicit-return in valve (#122933) --- homeassistant/components/valve/__init__.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/valve/__init__.py b/homeassistant/components/valve/__init__.py index e97a68c2e82..3814275b703 100644 --- a/homeassistant/components/valve/__init__.py +++ b/homeassistant/components/valve/__init__.py @@ -223,7 +223,8 @@ class ValveEntity(Entity): async def async_handle_open_valve(self) -> None: """Open the valve.""" if self.supported_features & ValveEntityFeature.SET_POSITION: - return await self.async_set_valve_position(100) + await self.async_set_valve_position(100) + return await self.async_open_valve() def close_valve(self) -> None: @@ -238,7 +239,8 @@ class ValveEntity(Entity): async def async_handle_close_valve(self) -> None: """Close the valve.""" if self.supported_features & ValveEntityFeature.SET_POSITION: - return await self.async_set_valve_position(0) + await self.async_set_valve_position(0) + return await self.async_close_valve() async def async_toggle(self) -> None: From c702ffa7dd3bf7e5a3edc44d6e1686310ef280e9 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 20:48:30 +0200 Subject: [PATCH 0241/1635] Fix implicit-return in uk_transport (#122932) --- homeassistant/components/uk_transport/sensor.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/uk_transport/sensor.py b/homeassistant/components/uk_transport/sensor.py index 8e874be0bca..a86f7a1cc83 100644 --- a/homeassistant/components/uk_transport/sensor.py +++ b/homeassistant/components/uk_transport/sensor.py @@ -6,6 +6,7 @@ from datetime import datetime, timedelta from http import HTTPStatus import logging import re +from typing import Any import requests import voluptuous as vol @@ -196,10 +197,10 @@ class UkTransportLiveBusTimeSensor(UkTransportSensor): self._state = None @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any] | None: """Return other details about the sensor state.""" - attrs = {} if self._data is not None: + attrs = {ATTR_NEXT_BUSES: self._next_buses} for key in ( ATTR_ATCOCODE, ATTR_LOCALITY, @@ -207,8 +208,8 @@ class UkTransportLiveBusTimeSensor(UkTransportSensor): ATTR_REQUEST_TIME, ): attrs[key] = self._data.get(key) - attrs[ATTR_NEXT_BUSES] = self._next_buses return attrs + return None class UkTransportLiveTrainTimeSensor(UkTransportSensor): @@ -266,15 +267,17 @@ class UkTransportLiveTrainTimeSensor(UkTransportSensor): self._state = None @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any] | None: """Return other details about the sensor state.""" - attrs = {} if self._data is not None: - attrs[ATTR_STATION_CODE] = self._station_code - attrs[ATTR_CALLING_AT] = self._calling_at + attrs = { + ATTR_STATION_CODE: self._station_code, + ATTR_CALLING_AT: self._calling_at, + } if self._next_trains: attrs[ATTR_NEXT_TRAINS] = self._next_trains return attrs + return None def _delta_mins(hhmm_time_str): From 9023d80d1b2835f0b66d07675bd9a075bbf76c18 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 20:48:51 +0200 Subject: [PATCH 0242/1635] Fix implicit-return in twitter (#122931) --- homeassistant/components/twitter/notify.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/twitter/notify.py b/homeassistant/components/twitter/notify.py index 66b076126b5..eef51ca9613 100644 --- a/homeassistant/components/twitter/notify.py +++ b/homeassistant/components/twitter/notify.py @@ -129,10 +129,11 @@ class TwitterNotificationService(BaseNotificationService): else: _LOGGER.debug("Message posted: %s", resp.json()) - def upload_media_then_callback(self, callback, media_path=None): + def upload_media_then_callback(self, callback, media_path=None) -> None: """Upload media.""" if not media_path: - return callback() + callback() + return with open(media_path, "rb") as file: total_bytes = os.path.getsize(media_path) @@ -141,7 +142,7 @@ class TwitterNotificationService(BaseNotificationService): if 199 > resp.status_code < 300: self.log_error_resp(resp) - return None + return media_id = resp.json()["media_id"] media_id = self.upload_media_chunked(file, total_bytes, media_id) @@ -149,10 +150,11 @@ class TwitterNotificationService(BaseNotificationService): resp = self.upload_media_finalize(media_id) if 199 > resp.status_code < 300: self.log_error_resp(resp) - return None + return if resp.json().get("processing_info") is None: - return callback(media_id) + callback(media_id) + return self.check_status_until_done(media_id, callback) @@ -209,7 +211,7 @@ class TwitterNotificationService(BaseNotificationService): "media/upload", {"command": "FINALIZE", "media_id": media_id} ) - def check_status_until_done(self, media_id, callback, *args): + def check_status_until_done(self, media_id, callback, *args) -> None: """Upload media, STATUS phase.""" resp = self.api.request( "media/upload", @@ -223,7 +225,8 @@ class TwitterNotificationService(BaseNotificationService): _LOGGER.debug("media processing %s status: %s", media_id, processing_info) if processing_info["state"] in {"succeeded", "failed"}: - return callback(media_id) + callback(media_id) + return check_after_secs = processing_info["check_after_secs"] _LOGGER.debug( From b8ac86939b0d4fd21d48c82c48b581d6f6fe97bc Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 20:49:06 +0200 Subject: [PATCH 0243/1635] Fix implicit-return in smartthings (#122927) --- homeassistant/components/smartthings/smartapp.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/smartthings/smartapp.py b/homeassistant/components/smartthings/smartapp.py index e2593dd7b10..6b0da00b132 100644 --- a/homeassistant/components/smartthings/smartapp.py +++ b/homeassistant/components/smartthings/smartapp.py @@ -16,6 +16,7 @@ from pysmartthings import ( CAPABILITIES, CLASSIFICATION_AUTOMATION, App, + AppEntity, AppOAuth, AppSettings, InstalledAppStatus, @@ -63,7 +64,7 @@ def format_unique_id(app_id: str, location_id: str) -> str: return f"{app_id}_{location_id}" -async def find_app(hass: HomeAssistant, api): +async def find_app(hass: HomeAssistant, api: SmartThings) -> AppEntity | None: """Find an existing SmartApp for this installation of hass.""" apps = await api.apps() for app in [app for app in apps if app.app_name.startswith(APP_NAME_PREFIX)]: @@ -74,6 +75,7 @@ async def find_app(hass: HomeAssistant, api): == hass.data[DOMAIN][CONF_INSTANCE_ID] ): return app + return None async def validate_installed_app(api, installed_app_id: str): From 9860109db91ce9de2b2294069325088691dec2ed Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 20:49:19 +0200 Subject: [PATCH 0244/1635] Fix implicit-return in satel_integra (#122925) --- homeassistant/components/satel_integra/binary_sensor.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/satel_integra/binary_sensor.py b/homeassistant/components/satel_integra/binary_sensor.py index 209b6c38cda..8ff54940635 100644 --- a/homeassistant/components/satel_integra/binary_sensor.py +++ b/homeassistant/components/satel_integra/binary_sensor.py @@ -109,10 +109,11 @@ class SatelIntegraBinarySensor(BinarySensorEntity): return self._name @property - def icon(self): + def icon(self) -> str | None: """Icon for device by its type.""" if self._zone_type is BinarySensorDeviceClass.SMOKE: return "mdi:fire" + return None @property def is_on(self): From 4a4209647e69a3c8d19c90d119cdf7c540f0058c Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 20:49:40 +0200 Subject: [PATCH 0245/1635] Fix implicit-return in humidifier (#122921) --- homeassistant/components/humidifier/device_action.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/humidifier/device_action.py b/homeassistant/components/humidifier/device_action.py index de1d4c871e3..06440480277 100644 --- a/homeassistant/components/humidifier/device_action.py +++ b/homeassistant/components/humidifier/device_action.py @@ -99,9 +99,10 @@ async def async_call_action_from_config( service = const.SERVICE_SET_MODE service_data[ATTR_MODE] = config[ATTR_MODE] else: - return await toggle_entity.async_call_action_from_config( + await toggle_entity.async_call_action_from_config( hass, config, variables, context, DOMAIN ) + return await hass.services.async_call( DOMAIN, service, service_data, blocking=True, context=context From b31263b747860a27643f3f4373a6efd6980ce624 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 20:50:11 +0200 Subject: [PATCH 0246/1635] Fix implicit-return in itunes (#122917) --- homeassistant/components/itunes/media_player.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/homeassistant/components/itunes/media_player.py b/homeassistant/components/itunes/media_player.py index c32ca287793..0f241041c0d 100644 --- a/homeassistant/components/itunes/media_player.py +++ b/homeassistant/components/itunes/media_player.py @@ -135,6 +135,8 @@ class Itunes: path = f"/playlists/{playlist['id']}/play" return self._request("PUT", path) + raise ValueError(f"Playlist {playlist_id_or_name} not found") + def artwork_url(self): """Return a URL of the current track's album art.""" return f"{self._base_url}/artwork" From d878d744e7b56de8465be1ffeaf22f79b245a0a2 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 20:50:31 +0200 Subject: [PATCH 0247/1635] Fix implicit-return in irish_rail_transport (#122916) --- homeassistant/components/irish_rail_transport/sensor.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/irish_rail_transport/sensor.py b/homeassistant/components/irish_rail_transport/sensor.py index a96846558fa..39bf39bcbe0 100644 --- a/homeassistant/components/irish_rail_transport/sensor.py +++ b/homeassistant/components/irish_rail_transport/sensor.py @@ -3,6 +3,7 @@ from __future__ import annotations from datetime import timedelta +from typing import Any from pyirishrail.pyirishrail import IrishRailRTPI import voluptuous as vol @@ -104,7 +105,7 @@ class IrishRailTransportSensor(SensorEntity): return self._state @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any] | None: """Return the state attributes.""" if self._times: next_up = "None" @@ -127,6 +128,7 @@ class IrishRailTransportSensor(SensorEntity): ATTR_NEXT_UP: next_up, ATTR_TRAIN_TYPE: self._times[0][ATTR_TRAIN_TYPE], } + return None @property def native_unit_of_measurement(self): From 220f6860787fcbe869a5356137fed821b36f3dfa Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 31 Jul 2024 20:51:24 +0200 Subject: [PATCH 0248/1635] Remove invalid type hint and assignment in number (#122906) --- homeassistant/components/number/significant_change.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/number/significant_change.py b/homeassistant/components/number/significant_change.py index 14cb2246615..e8cdd78e321 100644 --- a/homeassistant/components/number/significant_change.py +++ b/homeassistant/components/number/significant_change.py @@ -44,7 +44,6 @@ def async_check_significant_change( if (device_class := new_attrs.get(ATTR_DEVICE_CLASS)) is None: return None - absolute_change: float | None = None percentage_change: float | None = None # special for temperature @@ -83,11 +82,8 @@ def async_check_significant_change( # Old state was invalid, we should report again return True - if absolute_change is not None and percentage_change is not None: + if percentage_change is not None: return _absolute_and_relative_change( float(old_state), float(new_state), absolute_change, percentage_change ) - if absolute_change is not None: - return check_absolute_change( - float(old_state), float(new_state), absolute_change - ) + return check_absolute_change(float(old_state), float(new_state), absolute_change) From 17f34b452ed4f3f3b8a2e98229fc19f7b51d3214 Mon Sep 17 00:00:00 2001 From: alexfp14 Date: Wed, 31 Jul 2024 20:52:32 +0200 Subject: [PATCH 0249/1635] =?UTF-8?q?Add=20HVAC=20mode=20support=20for=20A?= =?UTF-8?q?tlanticPassAPCHeatPumpMainComponent=20(heati=E2=80=A6=20(#12217?= =?UTF-8?q?5)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Mick Vleeshouwer --- CODEOWNERS | 4 +- .../overkiz/climate_entities/__init__.py | 4 ++ ...antic_pass_apc_heat_pump_main_component.py | 65 +++++++++++++++++++ homeassistant/components/overkiz/const.py | 1 + .../components/overkiz/manifest.json | 3 +- 5 files changed, 74 insertions(+), 3 deletions(-) create mode 100644 homeassistant/components/overkiz/climate_entities/atlantic_pass_apc_heat_pump_main_component.py diff --git a/CODEOWNERS b/CODEOWNERS index bf93676f962..4f8c03fe90f 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1051,8 +1051,8 @@ build.json @home-assistant/supervisor /tests/components/otbr/ @home-assistant/core /homeassistant/components/ourgroceries/ @OnFreund /tests/components/ourgroceries/ @OnFreund -/homeassistant/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev @tronix117 -/tests/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev @tronix117 +/homeassistant/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev @tronix117 @alexfp14 +/tests/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev @tronix117 @alexfp14 /homeassistant/components/ovo_energy/ @timmo001 /tests/components/ovo_energy/ @timmo001 /homeassistant/components/p1_monitor/ @klaasnicolaas diff --git a/homeassistant/components/overkiz/climate_entities/__init__.py b/homeassistant/components/overkiz/climate_entities/__init__.py index df997f7a68e..ac864686432 100644 --- a/homeassistant/components/overkiz/climate_entities/__init__.py +++ b/homeassistant/components/overkiz/climate_entities/__init__.py @@ -11,6 +11,9 @@ from .atlantic_electrical_heater_with_adjustable_temperature_setpoint import ( ) from .atlantic_electrical_towel_dryer import AtlanticElectricalTowelDryer from .atlantic_heat_recovery_ventilation import AtlanticHeatRecoveryVentilation +from .atlantic_pass_apc_heat_pump_main_component import ( + AtlanticPassAPCHeatPumpMainComponent, +) from .atlantic_pass_apc_heating_zone import AtlanticPassAPCHeatingZone from .atlantic_pass_apc_zone_control import AtlanticPassAPCZoneControl from .atlantic_pass_apc_zone_control_zone import AtlanticPassAPCZoneControlZone @@ -43,6 +46,7 @@ WIDGET_TO_CLIMATE_ENTITY = { UIWidget.SOMFY_HEATING_TEMPERATURE_INTERFACE: SomfyHeatingTemperatureInterface, UIWidget.SOMFY_THERMOSTAT: SomfyThermostat, UIWidget.VALVE_HEATING_TEMPERATURE_INTERFACE: ValveHeatingTemperatureInterface, + UIWidget.ATLANTIC_PASS_APC_HEAT_PUMP: AtlanticPassAPCHeatPumpMainComponent, } # For Atlantic APC, some devices are standalone and control themselves, some others needs to be diff --git a/homeassistant/components/overkiz/climate_entities/atlantic_pass_apc_heat_pump_main_component.py b/homeassistant/components/overkiz/climate_entities/atlantic_pass_apc_heat_pump_main_component.py new file mode 100644 index 00000000000..1cd13205b13 --- /dev/null +++ b/homeassistant/components/overkiz/climate_entities/atlantic_pass_apc_heat_pump_main_component.py @@ -0,0 +1,65 @@ +"""Support for Atlantic Pass APC Heat Pump Main Component.""" + +from __future__ import annotations + +from asyncio import sleep +from typing import cast + +from pyoverkiz.enums import OverkizCommand, OverkizCommandParam, OverkizState + +from homeassistant.components.climate import ( + ClimateEntity, + ClimateEntityFeature, + HVACMode, +) +from homeassistant.const import UnitOfTemperature + +from ..const import DOMAIN +from ..entity import OverkizEntity + +OVERKIZ_TO_HVAC_MODES: dict[str, HVACMode] = { + OverkizCommandParam.STOP: HVACMode.OFF, + OverkizCommandParam.HEATING: HVACMode.HEAT, + OverkizCommandParam.COOLING: HVACMode.COOL, +} + +HVAC_MODES_TO_OVERKIZ = {v: k for k, v in OVERKIZ_TO_HVAC_MODES.items()} + + +class AtlanticPassAPCHeatPumpMainComponent(OverkizEntity, ClimateEntity): + """Representation of Atlantic Pass APC Heat Pump Main Component. + + This component can only turn off the heating pump and select the working mode: heating or cooling. + To set new temperatures, they must be selected individually per Zones (ie: AtlanticPassAPCHeatingAndCoolingZone). + Once the Device is switched on into heating or cooling mode, the Heat Pump will be activated and will use + the default temperature configuration for each available zone. + """ + + _attr_hvac_modes = [*HVAC_MODES_TO_OVERKIZ] + _attr_supported_features = ( + ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON + ) + _attr_temperature_unit = UnitOfTemperature.CELSIUS + _attr_translation_key = DOMAIN + _enable_turn_on_off_backwards_compatibility = False + + @property + def hvac_mode(self) -> HVACMode: + """Return hvac current mode: stop, cooling, heating.""" + return OVERKIZ_TO_HVAC_MODES[ + cast( + str, self.executor.select_state(OverkizState.IO_PASS_APC_OPERATING_MODE) + ) + ] + + async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: + """Set new target hvac mode: stop, cooling, heating.""" + # They are mainly managed by the Zone Control device + # However, we can turn off or put the heat pump in cooling/ heating mode. + await self.executor.async_execute_command( + OverkizCommand.SET_PASS_APC_OPERATING_MODE, + HVAC_MODES_TO_OVERKIZ[hvac_mode], + ) + + # Wait for 2 seconds to ensure the HVAC mode change is properly applied and system stabilizes. + await sleep(2) diff --git a/homeassistant/components/overkiz/const.py b/homeassistant/components/overkiz/const.py index 59acc4ac232..a90260e0f0f 100644 --- a/homeassistant/components/overkiz/const.py +++ b/homeassistant/components/overkiz/const.py @@ -95,6 +95,7 @@ OVERKIZ_DEVICE_TO_PLATFORM: dict[UIClass | UIWidget, Platform | None] = { UIWidget.ATLANTIC_ELECTRICAL_TOWEL_DRYER: Platform.CLIMATE, # widgetName, uiClass is HeatingSystem (not supported) UIWidget.ATLANTIC_HEAT_RECOVERY_VENTILATION: Platform.CLIMATE, # widgetName, uiClass is HeatingSystem (not supported) UIWidget.ATLANTIC_PASS_APC_DHW: Platform.WATER_HEATER, # widgetName, uiClass is WaterHeatingSystem (not supported) + UIWidget.ATLANTIC_PASS_APC_HEAT_PUMP: Platform.CLIMATE, # widgetName, uiClass is HeatingSystem (not supported) UIWidget.ATLANTIC_PASS_APC_HEATING_AND_COOLING_ZONE: Platform.CLIMATE, # widgetName, uiClass is HeatingSystem (not supported) UIWidget.ATLANTIC_PASS_APC_HEATING_ZONE: Platform.CLIMATE, # widgetName, uiClass is HeatingSystem (not supported) UIWidget.ATLANTIC_PASS_APC_ZONE_CONTROL: Platform.CLIMATE, # widgetName, uiClass is HeatingSystem (not supported) diff --git a/homeassistant/components/overkiz/manifest.json b/homeassistant/components/overkiz/manifest.json index 8825c09e0ff..19850f0b57e 100644 --- a/homeassistant/components/overkiz/manifest.json +++ b/homeassistant/components/overkiz/manifest.json @@ -6,7 +6,8 @@ "@vlebourl", "@tetienne", "@nyroDev", - "@tronix117" + "@tronix117", + "@alexfp14" ], "config_flow": true, "dhcp": [ From 291036964756c01204c80788bf355d9ee7458876 Mon Sep 17 00:00:00 2001 From: Jack Gaino Date: Wed, 31 Jul 2024 15:00:04 -0400 Subject: [PATCH 0250/1635] Optionally return response data when calling services through the API (#115046) Co-authored-by: J. Nick Koston Co-authored-by: Erik Montnemery --- homeassistant/components/api/__init__.py | 29 ++++++++++- tests/components/api/test_init.py | 62 ++++++++++++++++++++++++ 2 files changed, 90 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/api/__init__.py b/homeassistant/components/api/__init__.py index b794b60b33d..ba71fb0def1 100644 --- a/homeassistant/components/api/__init__.py +++ b/homeassistant/components/api/__init__.py @@ -390,6 +390,27 @@ class APIDomainServicesView(HomeAssistantView): ) context = self.context(request) + if not hass.services.has_service(domain, service): + raise HTTPBadRequest from ServiceNotFound(domain, service) + + if response_requested := "return_response" in request.query: + if ( + hass.services.supports_response(domain, service) + is ha.SupportsResponse.NONE + ): + return self.json_message( + "Service does not support responses. Remove return_response from request.", + HTTPStatus.BAD_REQUEST, + ) + elif ( + hass.services.supports_response(domain, service) is ha.SupportsResponse.ONLY + ): + return self.json_message( + "Service call requires responses but caller did not ask for responses. " + "Add ?return_response to query parameters.", + HTTPStatus.BAD_REQUEST, + ) + changed_states: list[json_fragment] = [] @ha.callback @@ -406,13 +427,14 @@ class APIDomainServicesView(HomeAssistantView): try: # shield the service call from cancellation on connection drop - await shield( + response = await shield( hass.services.async_call( domain, service, data, # type: ignore[arg-type] blocking=True, context=context, + return_response=response_requested, ) ) except (vol.Invalid, ServiceNotFound) as ex: @@ -420,6 +442,11 @@ class APIDomainServicesView(HomeAssistantView): finally: cancel_listen() + if response_requested: + return self.json( + {"changed_states": changed_states, "service_response": response} + ) + return self.json(changed_states) diff --git a/tests/components/api/test_init.py b/tests/components/api/test_init.py index c283aeb718e..abce262fd12 100644 --- a/tests/components/api/test_init.py +++ b/tests/components/api/test_init.py @@ -3,6 +3,7 @@ import asyncio from http import HTTPStatus import json +from typing import Any from unittest.mock import patch from aiohttp import ServerDisconnectedError, web @@ -355,6 +356,67 @@ async def test_api_call_service_with_data( assert state["attributes"] == {"data": 1} +SERVICE_DICT = {"changed_states": [], "service_response": {"foo": "bar"}} +RESP_REQUIRED = { + "message": ( + "Service call requires responses but caller did not ask for " + "responses. Add ?return_response to query parameters." + ) +} +RESP_UNSUPPORTED = { + "message": "Service does not support responses. Remove return_response from request." +} + + +@pytest.mark.parametrize( + ( + "supports_response", + "requested_response", + "expected_number_of_service_calls", + "expected_status", + "expected_response", + ), + [ + (ha.SupportsResponse.ONLY, True, 1, HTTPStatus.OK, SERVICE_DICT), + (ha.SupportsResponse.ONLY, False, 0, HTTPStatus.BAD_REQUEST, RESP_REQUIRED), + (ha.SupportsResponse.OPTIONAL, True, 1, HTTPStatus.OK, SERVICE_DICT), + (ha.SupportsResponse.OPTIONAL, False, 1, HTTPStatus.OK, []), + (ha.SupportsResponse.NONE, True, 0, HTTPStatus.BAD_REQUEST, RESP_UNSUPPORTED), + (ha.SupportsResponse.NONE, False, 1, HTTPStatus.OK, []), + ], +) +async def test_api_call_service_returns_response_requested_response( + hass: HomeAssistant, + mock_api_client: TestClient, + supports_response: ha.SupportsResponse, + requested_response: bool, + expected_number_of_service_calls: int, + expected_status: int, + expected_response: Any, +) -> None: + """Test if the API allows us to call a service.""" + test_value = [] + + @ha.callback + def listener(service_call): + """Record that our service got called.""" + test_value.append(1) + return {"foo": "bar"} + + hass.services.async_register( + "test_domain", "test_service", listener, supports_response=supports_response + ) + + resp = await mock_api_client.post( + "/api/services/test_domain/test_service" + + ("?return_response" if requested_response else "") + ) + assert resp.status == expected_status + await hass.async_block_till_done() + assert len(test_value) == expected_number_of_service_calls + assert await resp.json() == expected_response + + async def test_api_call_service_client_closed( hass: HomeAssistant, mock_api_client: TestClient ) -> None: From 7bc2381a453dc88b7b456be2d1e6ef415c6fb230 Mon Sep 17 00:00:00 2001 From: "Mr. Bubbles" Date: Wed, 31 Jul 2024 21:24:15 +0200 Subject: [PATCH 0251/1635] Add Pinecil virtual integration supported by IronOS (#122803) --- homeassistant/components/pinecil/__init__.py | 1 + homeassistant/components/pinecil/manifest.json | 6 ++++++ homeassistant/generated/integrations.json | 5 +++++ 3 files changed, 12 insertions(+) create mode 100644 homeassistant/components/pinecil/__init__.py create mode 100644 homeassistant/components/pinecil/manifest.json diff --git a/homeassistant/components/pinecil/__init__.py b/homeassistant/components/pinecil/__init__.py new file mode 100644 index 00000000000..a0e84725435 --- /dev/null +++ b/homeassistant/components/pinecil/__init__.py @@ -0,0 +1 @@ +"""Pinecil integration.""" diff --git a/homeassistant/components/pinecil/manifest.json b/homeassistant/components/pinecil/manifest.json new file mode 100644 index 00000000000..4ec6e75cfcb --- /dev/null +++ b/homeassistant/components/pinecil/manifest.json @@ -0,0 +1,6 @@ +{ + "domain": "pinecil", + "name": "Pinecil", + "integration_type": "virtual", + "supported_by": "iron_os" +} diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 4de325a0c6e..597cd8be936 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -4599,6 +4599,11 @@ "config_flow": false, "iot_class": "local_push" }, + "pinecil": { + "name": "Pinecil", + "integration_type": "virtual", + "supported_by": "iron_os" + }, "ping": { "name": "Ping (ICMP)", "integration_type": "hub", From 5fefa606b6c3aaf38e7555e18037c18adb645a9b Mon Sep 17 00:00:00 2001 From: Simon <80467011+sorgfresser@users.noreply.github.com> Date: Wed, 31 Jul 2024 21:31:09 +0200 Subject: [PATCH 0252/1635] Add ElevenLabs text-to-speech integration (#115645) * Add ElevenLabs text-to-speech integration * Remove commented out code * Use model_id instead of model_name for elevenlabs api * Apply suggestions from code review Co-authored-by: Sid <27780930+autinerd@users.noreply.github.com> * Use async client instead of sync * Add ElevenLabs code owner * Apply suggestions from code review Co-authored-by: Paulus Schoutsen * Set entity title to voice * Rename to elevenlabs * Apply suggestions from code review Co-authored-by: Paulus Schoutsen * Allow multiple voices and options flow * Sort default voice at beginning * Rework config flow to include default model and reloading on options flow * Add error to strings * Add ElevenLabsData and suggestions from code review * Shorten options and config flow * Fix comments * Fix comments * Add wip * Fix * Cleanup * Bump elevenlabs version * Add data description * Fix --------- Co-authored-by: Sid <27780930+autinerd@users.noreply.github.com> Co-authored-by: Paulus Schoutsen Co-authored-by: Michael Hansen Co-authored-by: Joostlek Co-authored-by: Marc Mueller <30130371+cdce8p@users.noreply.github.com> --- .strict-typing | 1 + CODEOWNERS | 2 + .../components/elevenlabs/__init__.py | 71 +++++ .../components/elevenlabs/config_flow.py | 145 ++++++++++ homeassistant/components/elevenlabs/const.py | 7 + .../components/elevenlabs/manifest.json | 11 + .../components/elevenlabs/strings.json | 31 ++ homeassistant/components/elevenlabs/tts.py | 116 ++++++++ homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 6 + mypy.ini | 10 + requirements_all.txt | 3 + requirements_test_all.txt | 3 + tests/components/elevenlabs/__init__.py | 1 + tests/components/elevenlabs/conftest.py | 65 +++++ tests/components/elevenlabs/const.py | 52 ++++ .../components/elevenlabs/test_config_flow.py | 94 ++++++ tests/components/elevenlabs/test_tts.py | 270 ++++++++++++++++++ 18 files changed, 889 insertions(+) create mode 100644 homeassistant/components/elevenlabs/__init__.py create mode 100644 homeassistant/components/elevenlabs/config_flow.py create mode 100644 homeassistant/components/elevenlabs/const.py create mode 100644 homeassistant/components/elevenlabs/manifest.json create mode 100644 homeassistant/components/elevenlabs/strings.json create mode 100644 homeassistant/components/elevenlabs/tts.py create mode 100644 tests/components/elevenlabs/__init__.py create mode 100644 tests/components/elevenlabs/conftest.py create mode 100644 tests/components/elevenlabs/const.py create mode 100644 tests/components/elevenlabs/test_config_flow.py create mode 100644 tests/components/elevenlabs/test_tts.py diff --git a/.strict-typing b/.strict-typing index a4f6d198d97..02d9968d247 100644 --- a/.strict-typing +++ b/.strict-typing @@ -168,6 +168,7 @@ homeassistant.components.ecowitt.* homeassistant.components.efergy.* homeassistant.components.electrasmart.* homeassistant.components.electric_kiwi.* +homeassistant.components.elevenlabs.* homeassistant.components.elgato.* homeassistant.components.elkm1.* homeassistant.components.emulated_hue.* diff --git a/CODEOWNERS b/CODEOWNERS index 4f8c03fe90f..e90def993d2 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -376,6 +376,8 @@ build.json @home-assistant/supervisor /tests/components/electrasmart/ @jafar-atili /homeassistant/components/electric_kiwi/ @mikey0000 /tests/components/electric_kiwi/ @mikey0000 +/homeassistant/components/elevenlabs/ @sorgfresser +/tests/components/elevenlabs/ @sorgfresser /homeassistant/components/elgato/ @frenck /tests/components/elgato/ @frenck /homeassistant/components/elkm1/ @gwww @bdraco diff --git a/homeassistant/components/elevenlabs/__init__.py b/homeassistant/components/elevenlabs/__init__.py new file mode 100644 index 00000000000..99cddd783e2 --- /dev/null +++ b/homeassistant/components/elevenlabs/__init__.py @@ -0,0 +1,71 @@ +"""The ElevenLabs text-to-speech integration.""" + +from __future__ import annotations + +from dataclasses import dataclass + +from elevenlabs import Model +from elevenlabs.client import AsyncElevenLabs +from elevenlabs.core import ApiError + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_API_KEY, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryError + +from .const import CONF_MODEL + +PLATFORMS: list[Platform] = [Platform.TTS] + + +async def get_model_by_id(client: AsyncElevenLabs, model_id: str) -> Model | None: + """Get ElevenLabs model from their API by the model_id.""" + models = await client.models.get_all() + for maybe_model in models: + if maybe_model.model_id == model_id: + return maybe_model + return None + + +@dataclass(kw_only=True, slots=True) +class ElevenLabsData: + """ElevenLabs data type.""" + + client: AsyncElevenLabs + model: Model + + +type EleventLabsConfigEntry = ConfigEntry[ElevenLabsData] + + +async def async_setup_entry(hass: HomeAssistant, entry: EleventLabsConfigEntry) -> bool: + """Set up ElevenLabs text-to-speech from a config entry.""" + entry.add_update_listener(update_listener) + client = AsyncElevenLabs(api_key=entry.data[CONF_API_KEY]) + model_id = entry.options[CONF_MODEL] + try: + model = await get_model_by_id(client, model_id) + except ApiError as err: + raise ConfigEntryError("Auth failed") from err + + if model is None or (not model.languages): + raise ConfigEntryError("Model could not be resolved") + + entry.runtime_data = ElevenLabsData(client=client, model=model) + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry( + hass: HomeAssistant, entry: EleventLabsConfigEntry +) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +async def update_listener( + hass: HomeAssistant, config_entry: EleventLabsConfigEntry +) -> None: + """Handle options update.""" + await hass.config_entries.async_reload(config_entry.entry_id) diff --git a/homeassistant/components/elevenlabs/config_flow.py b/homeassistant/components/elevenlabs/config_flow.py new file mode 100644 index 00000000000..cf04304510a --- /dev/null +++ b/homeassistant/components/elevenlabs/config_flow.py @@ -0,0 +1,145 @@ +"""Config flow for ElevenLabs text-to-speech integration.""" + +from __future__ import annotations + +import logging +from typing import Any + +from elevenlabs.client import AsyncElevenLabs +from elevenlabs.core import ApiError +import voluptuous as vol + +from homeassistant.config_entries import ( + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, + OptionsFlowWithConfigEntry, +) +from homeassistant.const import CONF_API_KEY +from homeassistant.helpers.selector import ( + SelectOptionDict, + SelectSelector, + SelectSelectorConfig, +) + +from .const import CONF_MODEL, CONF_VOICE, DEFAULT_MODEL, DOMAIN + +USER_STEP_SCHEMA = vol.Schema({vol.Required(CONF_API_KEY): str}) + + +_LOGGER = logging.getLogger(__name__) + + +async def get_voices_models(api_key: str) -> tuple[dict[str, str], dict[str, str]]: + """Get available voices and models as dicts.""" + client = AsyncElevenLabs(api_key=api_key) + voices = (await client.voices.get_all()).voices + models = await client.models.get_all() + voices_dict = { + voice.voice_id: voice.name + for voice in sorted(voices, key=lambda v: v.name or "") + if voice.name + } + models_dict = { + model.model_id: model.name + for model in sorted(models, key=lambda m: m.name or "") + if model.name and model.can_do_text_to_speech + } + return voices_dict, models_dict + + +class ElevenLabsConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for ElevenLabs text-to-speech.""" + + VERSION = 1 + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + errors: dict[str, str] = {} + if user_input is not None: + try: + voices, _ = await get_voices_models(user_input[CONF_API_KEY]) + except ApiError: + errors["base"] = "invalid_api_key" + else: + return self.async_create_entry( + title="ElevenLabs", + data=user_input, + options={CONF_MODEL: DEFAULT_MODEL, CONF_VOICE: list(voices)[0]}, + ) + return self.async_show_form( + step_id="user", data_schema=USER_STEP_SCHEMA, errors=errors + ) + + @staticmethod + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> OptionsFlow: + """Create the options flow.""" + return ElevenLabsOptionsFlow(config_entry) + + +class ElevenLabsOptionsFlow(OptionsFlowWithConfigEntry): + """ElevenLabs options flow.""" + + def __init__(self, config_entry: ConfigEntry) -> None: + """Initialize options flow.""" + super().__init__(config_entry) + self.api_key: str = self.config_entry.data[CONF_API_KEY] + # id -> name + self.voices: dict[str, str] = {} + self.models: dict[str, str] = {} + + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Manage the options.""" + if not self.voices or not self.models: + self.voices, self.models = await get_voices_models(self.api_key) + + assert self.models and self.voices + + if user_input is not None: + return self.async_create_entry( + title="ElevenLabs", + data=user_input, + ) + + schema = self.elevenlabs_config_option_schema() + return self.async_show_form( + step_id="init", + data_schema=schema, + ) + + def elevenlabs_config_option_schema(self) -> vol.Schema: + """Elevenlabs options schema.""" + return self.add_suggested_values_to_schema( + vol.Schema( + { + vol.Required( + CONF_MODEL, + ): SelectSelector( + SelectSelectorConfig( + options=[ + SelectOptionDict(label=model_name, value=model_id) + for model_id, model_name in self.models.items() + ] + ) + ), + vol.Required( + CONF_VOICE, + ): SelectSelector( + SelectSelectorConfig( + options=[ + SelectOptionDict(label=voice_name, value=voice_id) + for voice_id, voice_name in self.voices.items() + ] + ) + ), + } + ), + self.options, + ) diff --git a/homeassistant/components/elevenlabs/const.py b/homeassistant/components/elevenlabs/const.py new file mode 100644 index 00000000000..c0fc3c7b1b0 --- /dev/null +++ b/homeassistant/components/elevenlabs/const.py @@ -0,0 +1,7 @@ +"""Constants for the ElevenLabs text-to-speech integration.""" + +CONF_VOICE = "voice" +CONF_MODEL = "model" +DOMAIN = "elevenlabs" + +DEFAULT_MODEL = "eleven_multilingual_v2" diff --git a/homeassistant/components/elevenlabs/manifest.json b/homeassistant/components/elevenlabs/manifest.json new file mode 100644 index 00000000000..968ea7b688a --- /dev/null +++ b/homeassistant/components/elevenlabs/manifest.json @@ -0,0 +1,11 @@ +{ + "domain": "elevenlabs", + "name": "ElevenLabs", + "codeowners": ["@sorgfresser"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/elevenlabs", + "integration_type": "service", + "iot_class": "cloud_polling", + "loggers": ["elevenlabs"], + "requirements": ["elevenlabs==1.6.1"] +} diff --git a/homeassistant/components/elevenlabs/strings.json b/homeassistant/components/elevenlabs/strings.json new file mode 100644 index 00000000000..16b40137090 --- /dev/null +++ b/homeassistant/components/elevenlabs/strings.json @@ -0,0 +1,31 @@ +{ + "config": { + "step": { + "user": { + "data": { + "api_key": "[%key:common::config_flow::data::api_key%]" + }, + "data_description": { + "api_key": "Your Elevenlabs API key." + } + } + }, + "error": { + "invalid_api_key": "[%key:common::config_flow::error::invalid_api_key%]" + } + }, + "options": { + "step": { + "init": { + "data": { + "voice": "Voice", + "model": "Model" + }, + "data_description": { + "voice": "Voice to use for the TTS.", + "model": "ElevenLabs model to use. Please note that not all models support all languages equally well." + } + } + } + } +} diff --git a/homeassistant/components/elevenlabs/tts.py b/homeassistant/components/elevenlabs/tts.py new file mode 100644 index 00000000000..35ba6053cd8 --- /dev/null +++ b/homeassistant/components/elevenlabs/tts.py @@ -0,0 +1,116 @@ +"""Support for the ElevenLabs text-to-speech service.""" + +from __future__ import annotations + +import logging +from typing import Any + +from elevenlabs.client import AsyncElevenLabs +from elevenlabs.core import ApiError +from elevenlabs.types import Model, Voice as ElevenLabsVoice + +from homeassistant.components.tts import ( + ATTR_VOICE, + TextToSpeechEntity, + TtsAudioType, + Voice, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import EleventLabsConfigEntry +from .const import CONF_VOICE, DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: EleventLabsConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up ElevenLabs tts platform via config entry.""" + client = config_entry.runtime_data.client + voices = (await client.voices.get_all()).voices + default_voice_id = config_entry.options[CONF_VOICE] + async_add_entities( + [ + ElevenLabsTTSEntity( + client, + config_entry.runtime_data.model, + voices, + default_voice_id, + config_entry.entry_id, + config_entry.title, + ) + ] + ) + + +class ElevenLabsTTSEntity(TextToSpeechEntity): + """The ElevenLabs API entity.""" + + _attr_supported_options = [ATTR_VOICE] + + def __init__( + self, + client: AsyncElevenLabs, + model: Model, + voices: list[ElevenLabsVoice], + default_voice_id: str, + entry_id: str, + title: str, + ) -> None: + """Init ElevenLabs TTS service.""" + self._client = client + self._model = model + self._default_voice_id = default_voice_id + self._voices = sorted( + (Voice(v.voice_id, v.name) for v in voices if v.name), + key=lambda v: v.name, + ) + # Default voice first + voice_indices = [ + idx for idx, v in enumerate(self._voices) if v.voice_id == default_voice_id + ] + if voice_indices: + self._voices.insert(0, self._voices.pop(voice_indices[0])) + self._attr_unique_id = entry_id + self._attr_name = title + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, entry_id)}, + manufacturer="ElevenLabs", + model=model.name, + entry_type=DeviceEntryType.SERVICE, + ) + self._attr_supported_languages = [ + lang.language_id for lang in self._model.languages or [] + ] + self._attr_default_language = self.supported_languages[0] + + def async_get_supported_voices(self, language: str) -> list[Voice]: + """Return a list of supported voices for a language.""" + return self._voices + + async def async_get_tts_audio( + self, message: str, language: str, options: dict[str, Any] + ) -> TtsAudioType: + """Load tts audio file from the engine.""" + _LOGGER.debug("Getting TTS audio for %s", message) + _LOGGER.debug("Options: %s", options) + voice_id = options[ATTR_VOICE] + try: + audio = await self._client.generate( + text=message, + voice=voice_id, + model=self._model.model_id, + ) + bytes_combined = b"".join([byte_seg async for byte_seg in audio]) + except ApiError as exc: + _LOGGER.warning( + "Error during processing of TTS request %s", exc, exc_info=True + ) + raise HomeAssistantError(exc) from exc + return "mp3", bytes_combined diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index d350a58f3c6..0c37cf9c412 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -150,6 +150,7 @@ FLOWS = { "efergy", "electrasmart", "electric_kiwi", + "elevenlabs", "elgato", "elkm1", "elmax", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 597cd8be936..3cc3ea71df9 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -1516,6 +1516,12 @@ "config_flow": true, "iot_class": "cloud_polling" }, + "elevenlabs": { + "name": "ElevenLabs", + "integration_type": "service", + "config_flow": true, + "iot_class": "cloud_polling" + }, "elgato": { "name": "Elgato", "integrations": { diff --git a/mypy.ini b/mypy.ini index dd7904d798b..0f4f8907612 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1436,6 +1436,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.elevenlabs.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.elgato.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/requirements_all.txt b/requirements_all.txt index 2cae4548956..d00605c9893 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -779,6 +779,9 @@ ecoaliface==0.4.0 # homeassistant.components.electric_kiwi electrickiwi-api==0.8.5 +# homeassistant.components.elevenlabs +elevenlabs==1.6.1 + # homeassistant.components.elgato elgato==5.1.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 1d2f345cf1a..fd200794990 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -660,6 +660,9 @@ easyenergy==2.1.2 # homeassistant.components.electric_kiwi electrickiwi-api==0.8.5 +# homeassistant.components.elevenlabs +elevenlabs==1.6.1 + # homeassistant.components.elgato elgato==5.1.2 diff --git a/tests/components/elevenlabs/__init__.py b/tests/components/elevenlabs/__init__.py new file mode 100644 index 00000000000..261286f04f7 --- /dev/null +++ b/tests/components/elevenlabs/__init__.py @@ -0,0 +1 @@ +"""Tests for the ElevenLabs integration.""" diff --git a/tests/components/elevenlabs/conftest.py b/tests/components/elevenlabs/conftest.py new file mode 100644 index 00000000000..13eb022243f --- /dev/null +++ b/tests/components/elevenlabs/conftest.py @@ -0,0 +1,65 @@ +"""Common fixtures for the ElevenLabs text-to-speech tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +from elevenlabs.core import ApiError +from elevenlabs.types import GetVoicesResponse +import pytest + +from homeassistant.components.elevenlabs.const import CONF_MODEL, CONF_VOICE +from homeassistant.const import CONF_API_KEY + +from .const import MOCK_MODELS, MOCK_VOICES + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock, None, None]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.elevenlabs.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_async_client() -> Generator[AsyncMock, None, None]: + """Override async ElevenLabs client.""" + client_mock = AsyncMock() + client_mock.voices.get_all.return_value = GetVoicesResponse(voices=MOCK_VOICES) + client_mock.models.get_all.return_value = MOCK_MODELS + with patch( + "elevenlabs.client.AsyncElevenLabs", return_value=client_mock + ) as mock_async_client: + yield mock_async_client + + +@pytest.fixture +def mock_async_client_fail() -> Generator[AsyncMock, None, None]: + """Override async ElevenLabs client.""" + with patch( + "homeassistant.components.elevenlabs.config_flow.AsyncElevenLabs", + return_value=AsyncMock(), + ) as mock_async_client: + mock_async_client.side_effect = ApiError + yield mock_async_client + + +@pytest.fixture +def mock_entry() -> MockConfigEntry: + """Mock a config entry.""" + entry = MockConfigEntry( + domain="elevenlabs", + data={ + CONF_API_KEY: "api_key", + }, + options={CONF_MODEL: "model1", CONF_VOICE: "voice1"}, + ) + entry.models = { + "model1": "model1", + } + + entry.voices = {"voice1": "voice1"} + return entry diff --git a/tests/components/elevenlabs/const.py b/tests/components/elevenlabs/const.py new file mode 100644 index 00000000000..e16e1fd1334 --- /dev/null +++ b/tests/components/elevenlabs/const.py @@ -0,0 +1,52 @@ +"""Constants for the Testing of the ElevenLabs text-to-speech integration.""" + +from elevenlabs.types import LanguageResponse, Model, Voice + +from homeassistant.components.elevenlabs.const import DEFAULT_MODEL + +MOCK_VOICES = [ + Voice( + voice_id="voice1", + name="Voice 1", + ), + Voice( + voice_id="voice2", + name="Voice 2", + ), +] + +MOCK_MODELS = [ + Model( + model_id="model1", + name="Model 1", + can_do_text_to_speech=True, + languages=[ + LanguageResponse(language_id="en", name="English"), + LanguageResponse(language_id="de", name="German"), + LanguageResponse(language_id="es", name="Spanish"), + LanguageResponse(language_id="ja", name="Japanese"), + ], + ), + Model( + model_id="model2", + name="Model 2", + can_do_text_to_speech=True, + languages=[ + LanguageResponse(language_id="en", name="English"), + LanguageResponse(language_id="de", name="German"), + LanguageResponse(language_id="es", name="Spanish"), + LanguageResponse(language_id="ja", name="Japanese"), + ], + ), + Model( + model_id=DEFAULT_MODEL, + name=DEFAULT_MODEL, + can_do_text_to_speech=True, + languages=[ + LanguageResponse(language_id="en", name="English"), + LanguageResponse(language_id="de", name="German"), + LanguageResponse(language_id="es", name="Spanish"), + LanguageResponse(language_id="ja", name="Japanese"), + ], + ), +] diff --git a/tests/components/elevenlabs/test_config_flow.py b/tests/components/elevenlabs/test_config_flow.py new file mode 100644 index 00000000000..853c49d48ff --- /dev/null +++ b/tests/components/elevenlabs/test_config_flow.py @@ -0,0 +1,94 @@ +"""Test the ElevenLabs text-to-speech config flow.""" + +from unittest.mock import AsyncMock + +from homeassistant.components.elevenlabs.const import ( + CONF_MODEL, + CONF_VOICE, + DEFAULT_MODEL, + DOMAIN, +) +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_API_KEY +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_user_step( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_async_client: AsyncMock, +) -> None: + """Test user step create entry result.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert not result["errors"] + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_API_KEY: "api_key", + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "ElevenLabs" + assert result["data"] == { + "api_key": "api_key", + } + assert result["options"] == {CONF_MODEL: DEFAULT_MODEL, CONF_VOICE: "voice1"} + + mock_setup_entry.assert_called_once() + + +async def test_invalid_api_key( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_async_client_fail: AsyncMock +) -> None: + """Test user step with invalid api key.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert not result["errors"] + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_API_KEY: "api_key", + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] + + mock_setup_entry.assert_not_called() + + +async def test_options_flow_init( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_async_client: AsyncMock, + mock_entry: MockConfigEntry, +) -> None: + """Test options flow init.""" + mock_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() + + result = await hass.config_entries.options.async_init(mock_entry.entry_id) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={CONF_MODEL: "model1", CONF_VOICE: "voice1"}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert mock_entry.options == {CONF_MODEL: "model1", CONF_VOICE: "voice1"} + + mock_setup_entry.assert_called_once() diff --git a/tests/components/elevenlabs/test_tts.py b/tests/components/elevenlabs/test_tts.py new file mode 100644 index 00000000000..7fa289f24ed --- /dev/null +++ b/tests/components/elevenlabs/test_tts.py @@ -0,0 +1,270 @@ +"""Tests for the ElevenLabs TTS entity.""" + +from __future__ import annotations + +from http import HTTPStatus +from typing import Any +from unittest.mock import AsyncMock, patch + +from elevenlabs.core import ApiError +from elevenlabs.types import GetVoicesResponse +import pytest + +from homeassistant.components import tts +from homeassistant.components.elevenlabs.const import CONF_MODEL, CONF_VOICE, DOMAIN +from homeassistant.components.media_player import ( + ATTR_MEDIA_CONTENT_ID, + DOMAIN as DOMAIN_MP, + SERVICE_PLAY_MEDIA, +) +from homeassistant.config import async_process_ha_core_config +from homeassistant.const import ATTR_ENTITY_ID, CONF_API_KEY +from homeassistant.core import HomeAssistant, ServiceCall + +from .const import MOCK_MODELS, MOCK_VOICES + +from tests.common import MockConfigEntry, async_mock_service +from tests.components.tts.common import retrieve_media +from tests.typing import ClientSessionGenerator + + +@pytest.fixture(autouse=True) +def tts_mutagen_mock_fixture_autouse(tts_mutagen_mock): + """Mock writing tags.""" + + +@pytest.fixture(autouse=True) +def mock_tts_cache_dir_autouse(mock_tts_cache_dir): + """Mock the TTS cache dir with empty dir.""" + return mock_tts_cache_dir + + +@pytest.fixture +async def calls(hass: HomeAssistant) -> list[ServiceCall]: + """Mock media player calls.""" + return async_mock_service(hass, DOMAIN_MP, SERVICE_PLAY_MEDIA) + + +@pytest.fixture(autouse=True) +async def setup_internal_url(hass: HomeAssistant) -> None: + """Set up internal url.""" + await async_process_ha_core_config( + hass, {"internal_url": "http://example.local:8123"} + ) + + +@pytest.fixture(name="setup") +async def setup_fixture( + hass: HomeAssistant, + config_data: dict[str, Any], + config_options: dict[str, Any], + request: pytest.FixtureRequest, + mock_async_client: AsyncMock, +) -> AsyncMock: + """Set up the test environment.""" + if request.param == "mock_config_entry_setup": + await mock_config_entry_setup(hass, config_data, config_options) + else: + raise RuntimeError("Invalid setup fixture") + + await hass.async_block_till_done() + return mock_async_client + + +@pytest.fixture(name="config_data") +def config_data_fixture() -> dict[str, Any]: + """Return config data.""" + return {} + + +@pytest.fixture(name="config_options") +def config_options_fixture() -> dict[str, Any]: + """Return config options.""" + return {} + + +async def mock_config_entry_setup( + hass: HomeAssistant, config_data: dict[str, Any], config_options: dict[str, Any] +) -> None: + """Mock config entry setup.""" + default_config_data = { + CONF_API_KEY: "api_key", + } + default_config_options = { + CONF_VOICE: "voice1", + CONF_MODEL: "model1", + } + config_entry = MockConfigEntry( + domain=DOMAIN, + data=default_config_data | config_data, + options=default_config_options | config_options, + ) + config_entry.add_to_hass(hass) + client_mock = AsyncMock() + client_mock.voices.get_all.return_value = GetVoicesResponse(voices=MOCK_VOICES) + client_mock.models.get_all.return_value = MOCK_MODELS + with patch( + "homeassistant.components.elevenlabs.AsyncElevenLabs", return_value=client_mock + ): + assert await hass.config_entries.async_setup(config_entry.entry_id) + + +@pytest.mark.parametrize( + "config_data", + [ + {}, + {tts.CONF_LANG: "de"}, + {tts.CONF_LANG: "en"}, + {tts.CONF_LANG: "ja"}, + {tts.CONF_LANG: "es"}, + ], +) +@pytest.mark.parametrize( + ("setup", "tts_service", "service_data"), + [ + ( + "mock_config_entry_setup", + "speak", + { + ATTR_ENTITY_ID: "tts.mock_title", + tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", + tts.ATTR_MESSAGE: "There is a person at the front door.", + tts.ATTR_OPTIONS: {tts.ATTR_VOICE: "voice2"}, + }, + ), + ], + indirect=["setup"], +) +async def test_tts_service_speak( + setup: AsyncMock, + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + calls: list[ServiceCall], + tts_service: str, + service_data: dict[str, Any], +) -> None: + """Test tts service.""" + tts_entity = hass.data[tts.DOMAIN].get_entity(service_data[ATTR_ENTITY_ID]) + tts_entity._client.generate.reset_mock() + + await hass.services.async_call( + tts.DOMAIN, + tts_service, + service_data, + blocking=True, + ) + + assert len(calls) == 1 + assert ( + await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + == HTTPStatus.OK + ) + + tts_entity._client.generate.assert_called_once_with( + text="There is a person at the front door.", voice="voice2", model="model1" + ) + + +@pytest.mark.parametrize( + ("setup", "tts_service", "service_data"), + [ + ( + "mock_config_entry_setup", + "speak", + { + ATTR_ENTITY_ID: "tts.mock_title", + tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", + tts.ATTR_MESSAGE: "There is a person at the front door.", + tts.ATTR_LANGUAGE: "de", + tts.ATTR_OPTIONS: {tts.ATTR_VOICE: "voice1"}, + }, + ), + ( + "mock_config_entry_setup", + "speak", + { + ATTR_ENTITY_ID: "tts.mock_title", + tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", + tts.ATTR_MESSAGE: "There is a person at the front door.", + tts.ATTR_LANGUAGE: "es", + tts.ATTR_OPTIONS: {tts.ATTR_VOICE: "voice1"}, + }, + ), + ], + indirect=["setup"], +) +async def test_tts_service_speak_lang_config( + setup: AsyncMock, + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + calls: list[ServiceCall], + tts_service: str, + service_data: dict[str, Any], +) -> None: + """Test service call say with other langcodes in the config.""" + tts_entity = hass.data[tts.DOMAIN].get_entity(service_data[ATTR_ENTITY_ID]) + tts_entity._client.generate.reset_mock() + + await hass.services.async_call( + tts.DOMAIN, + tts_service, + service_data, + blocking=True, + ) + + assert len(calls) == 1 + assert ( + await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + == HTTPStatus.OK + ) + + tts_entity._client.generate.assert_called_once_with( + text="There is a person at the front door.", voice="voice1", model="model1" + ) + + +@pytest.mark.parametrize( + ("setup", "tts_service", "service_data"), + [ + ( + "mock_config_entry_setup", + "speak", + { + ATTR_ENTITY_ID: "tts.mock_title", + tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", + tts.ATTR_MESSAGE: "There is a person at the front door.", + tts.ATTR_OPTIONS: {tts.ATTR_VOICE: "voice1"}, + }, + ), + ], + indirect=["setup"], +) +async def test_tts_service_speak_error( + setup: AsyncMock, + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + calls: list[ServiceCall], + tts_service: str, + service_data: dict[str, Any], +) -> None: + """Test service call say with http response 400.""" + tts_entity = hass.data[tts.DOMAIN].get_entity(service_data[ATTR_ENTITY_ID]) + tts_entity._client.generate.reset_mock() + tts_entity._client.generate.side_effect = ApiError + + await hass.services.async_call( + tts.DOMAIN, + tts_service, + service_data, + blocking=True, + ) + + assert len(calls) == 1 + assert ( + await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + == HTTPStatus.NOT_FOUND + ) + + tts_entity._client.generate.assert_called_once_with( + text="There is a person at the front door.", voice="voice1", model="model1" + ) From 6baee603a5d1396e78bfc9a384ed5545be0d34e0 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Wed, 31 Jul 2024 15:10:50 -0500 Subject: [PATCH 0253/1635] Bump pymicro-vad to 1.0.1 (#122973) --- homeassistant/components/assist_pipeline/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/assist_pipeline/manifest.json b/homeassistant/components/assist_pipeline/manifest.json index b22ce72b1eb..dd3ec77f165 100644 --- a/homeassistant/components/assist_pipeline/manifest.json +++ b/homeassistant/components/assist_pipeline/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/assist_pipeline", "iot_class": "local_push", "quality_scale": "internal", - "requirements": ["pymicro-vad==1.0.0"] + "requirements": ["pymicro-vad==1.0.1"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index c7e5528d675..58f39907269 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -45,7 +45,7 @@ Pillow==10.4.0 pip>=21.3.1 psutil-home-assistant==0.0.1 PyJWT==2.8.0 -pymicro-vad==1.0.0 +pymicro-vad==1.0.1 PyNaCl==1.5.0 pyOpenSSL==24.2.1 pyserial==3.5 diff --git a/requirements_all.txt b/requirements_all.txt index d00605c9893..ad08342230d 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2011,7 +2011,7 @@ pymelcloud==2.5.9 pymeteoclimatic==0.1.0 # homeassistant.components.assist_pipeline -pymicro-vad==1.0.0 +pymicro-vad==1.0.1 # homeassistant.components.xiaomi_tv pymitv==1.4.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index fd200794990..5bdf58ff217 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1607,7 +1607,7 @@ pymelcloud==2.5.9 pymeteoclimatic==0.1.0 # homeassistant.components.assist_pipeline -pymicro-vad==1.0.0 +pymicro-vad==1.0.1 # homeassistant.components.mochad pymochad==0.2.0 From 18afe07c169e05fe818e8eb5f7af2ec3303c3aed Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Wed, 31 Jul 2024 22:38:50 +0200 Subject: [PATCH 0254/1635] Bump version to 2024.8.0b0 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index 7d58bdb1e94..b315d8c2618 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -24,7 +24,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 8 -PATCH_VERSION: Final = "0.dev0" +PATCH_VERSION: Final = "0b0" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index f71e9bd6013..e705101e4ae 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.8.0.dev0" +version = "2024.8.0b0" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From bdd6f579753a75d32ab206d9ac940f157530c53d Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Wed, 31 Jul 2024 23:24:30 +0200 Subject: [PATCH 0255/1635] Bump version to 2024.9.0dev0 (#122975) --- .github/workflows/ci.yaml | 2 +- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index b705064e078..96ba319fdb9 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -36,7 +36,7 @@ env: CACHE_VERSION: 9 UV_CACHE_VERSION: 1 MYPY_CACHE_VERSION: 8 - HA_SHORT_VERSION: "2024.8" + HA_SHORT_VERSION: "2024.9" DEFAULT_PYTHON: "3.12" ALL_PYTHON_VERSIONS: "['3.12']" # 10.3 is the oldest supported version diff --git a/homeassistant/const.py b/homeassistant/const.py index 7d58bdb1e94..389aaf8fef3 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -23,7 +23,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 -MINOR_VERSION: Final = 8 +MINOR_VERSION: Final = 9 PATCH_VERSION: Final = "0.dev0" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" diff --git a/pyproject.toml b/pyproject.toml index f71e9bd6013..a9559b9e367 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.8.0.dev0" +version = "2024.9.0.dev0" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From 352f0953f3c0433d4b541d0beeb061d298925c42 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 1 Aug 2024 01:00:17 +0200 Subject: [PATCH 0256/1635] Skip binary wheels for pymicro-vad (#122982) --- .github/workflows/wheels.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index b74406b9c82..697535172d7 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -211,7 +211,7 @@ jobs: wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev" - skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic + skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements_old-cython.txt" @@ -226,7 +226,7 @@ jobs: wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm" - skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic + skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements_all.txtaa" @@ -240,7 +240,7 @@ jobs: wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm" - skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic + skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements_all.txtab" @@ -254,7 +254,7 @@ jobs: wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm" - skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic + skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements_all.txtac" From 8375b58eac5ea05fabfdf6ac74f774794793beef Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 1 Aug 2024 01:31:22 -0500 Subject: [PATCH 0257/1635] Update doorbird error notification to be a repair flow (#122987) --- homeassistant/components/doorbird/__init__.py | 37 ++++++----- .../components/doorbird/manifest.json | 2 +- homeassistant/components/doorbird/repairs.py | 55 +++++++++++++++++ .../components/doorbird/strings.json | 13 ++++ tests/components/doorbird/test_repairs.py | 61 +++++++++++++++++++ 5 files changed, 153 insertions(+), 15 deletions(-) create mode 100644 homeassistant/components/doorbird/repairs.py create mode 100644 tests/components/doorbird/test_repairs.py diff --git a/homeassistant/components/doorbird/__init__.py b/homeassistant/components/doorbird/__init__.py index 8989e0ec0be..113b8031d9b 100644 --- a/homeassistant/components/doorbird/__init__.py +++ b/homeassistant/components/doorbird/__init__.py @@ -3,11 +3,11 @@ from __future__ import annotations from http import HTTPStatus +import logging from aiohttp import ClientResponseError from doorbirdpy import DoorBird -from homeassistant.components import persistent_notification from homeassistant.const import ( CONF_HOST, CONF_NAME, @@ -17,6 +17,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady +from homeassistant.helpers import issue_registry as ir from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.typing import ConfigType @@ -30,6 +31,8 @@ CONF_CUSTOM_URL = "hass_url_override" CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) +_LOGGER = logging.getLogger(__name__) + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the DoorBird component.""" @@ -68,7 +71,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: DoorBirdConfigEntry) -> door_bird_data = DoorBirdData(door_station, info, event_entity_ids) door_station.update_events(events) # Subscribe to doorbell or motion events - if not await _async_register_events(hass, door_station): + if not await _async_register_events(hass, door_station, entry): raise ConfigEntryNotReady entry.async_on_unload(entry.add_update_listener(_update_listener)) @@ -84,24 +87,30 @@ async def async_unload_entry(hass: HomeAssistant, entry: DoorBirdConfigEntry) -> async def _async_register_events( - hass: HomeAssistant, door_station: ConfiguredDoorBird + hass: HomeAssistant, door_station: ConfiguredDoorBird, entry: DoorBirdConfigEntry ) -> bool: """Register events on device.""" + issue_id = f"doorbird_schedule_error_{entry.entry_id}" try: await door_station.async_register_events() - except ClientResponseError: - persistent_notification.async_create( + except ClientResponseError as ex: + ir.async_create_issue( hass, - ( - "Doorbird configuration failed. Please verify that API " - "Operator permission is enabled for the Doorbird user. " - "A restart will be required once permissions have been " - "verified." - ), - title="Doorbird Configuration Failure", - notification_id="doorbird_schedule_error", + DOMAIN, + issue_id, + severity=ir.IssueSeverity.ERROR, + translation_key="error_registering_events", + data={"entry_id": entry.entry_id}, + is_fixable=True, + translation_placeholders={ + "error": str(ex), + "name": door_station.name or entry.data[CONF_NAME], + }, ) + _LOGGER.debug("Error registering DoorBird events", exc_info=True) return False + else: + ir.async_delete_issue(hass, DOMAIN, issue_id) return True @@ -111,4 +120,4 @@ async def _update_listener(hass: HomeAssistant, entry: DoorBirdConfigEntry) -> N door_station = entry.runtime_data.door_station door_station.update_events(entry.options[CONF_EVENTS]) # Subscribe to doorbell or motion events - await _async_register_events(hass, door_station) + await _async_register_events(hass, door_station, entry) diff --git a/homeassistant/components/doorbird/manifest.json b/homeassistant/components/doorbird/manifest.json index e77f9aaf0a4..0e9f03c8ef8 100644 --- a/homeassistant/components/doorbird/manifest.json +++ b/homeassistant/components/doorbird/manifest.json @@ -3,7 +3,7 @@ "name": "DoorBird", "codeowners": ["@oblogic7", "@bdraco", "@flacjacket"], "config_flow": true, - "dependencies": ["http"], + "dependencies": ["http", "repairs"], "documentation": "https://www.home-assistant.io/integrations/doorbird", "iot_class": "local_push", "loggers": ["doorbirdpy"], diff --git a/homeassistant/components/doorbird/repairs.py b/homeassistant/components/doorbird/repairs.py new file mode 100644 index 00000000000..c8f9b73ecbd --- /dev/null +++ b/homeassistant/components/doorbird/repairs.py @@ -0,0 +1,55 @@ +"""Repairs for DoorBird.""" + +from __future__ import annotations + +import voluptuous as vol + +from homeassistant import data_entry_flow +from homeassistant.components.repairs import RepairsFlow +from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir + + +class DoorBirdReloadConfirmRepairFlow(RepairsFlow): + """Handler to show doorbird error and reload.""" + + def __init__(self, entry_id: str) -> None: + """Initialize the flow.""" + self.entry_id = entry_id + + async def async_step_init( + self, user_input: dict[str, str] | None = None + ) -> data_entry_flow.FlowResult: + """Handle the first step of a fix flow.""" + return await self.async_step_confirm() + + async def async_step_confirm( + self, user_input: dict[str, str] | None = None + ) -> data_entry_flow.FlowResult: + """Handle the confirm step of a fix flow.""" + if user_input is not None: + self.hass.config_entries.async_schedule_reload(self.entry_id) + return self.async_create_entry(data={}) + + issue_registry = ir.async_get(self.hass) + description_placeholders = None + if issue := issue_registry.async_get_issue(self.handler, self.issue_id): + description_placeholders = issue.translation_placeholders + + return self.async_show_form( + step_id="confirm", + data_schema=vol.Schema({}), + description_placeholders=description_placeholders, + ) + + +async def async_create_fix_flow( + hass: HomeAssistant, + issue_id: str, + data: dict[str, str | int | float | None] | None, +) -> RepairsFlow: + """Create flow.""" + assert data is not None + entry_id = data["entry_id"] + assert isinstance(entry_id, str) + return DoorBirdReloadConfirmRepairFlow(entry_id=entry_id) diff --git a/homeassistant/components/doorbird/strings.json b/homeassistant/components/doorbird/strings.json index 29c85ec7311..090ba4f161f 100644 --- a/homeassistant/components/doorbird/strings.json +++ b/homeassistant/components/doorbird/strings.json @@ -11,6 +11,19 @@ } } }, + "issues": { + "error_registering_events": { + "title": "DoorBird {name} configuration failure", + "fix_flow": { + "step": { + "confirm": { + "title": "[%key:component::doorbird::issues::error_registering_events::title%]", + "description": "Configuring DoorBird {name} failed with error: `{error}`. Please enable the API Operator permission for the DoorBird user and continue to reload the integration." + } + } + } + } + }, "config": { "step": { "user": { diff --git a/tests/components/doorbird/test_repairs.py b/tests/components/doorbird/test_repairs.py new file mode 100644 index 00000000000..7449250b718 --- /dev/null +++ b/tests/components/doorbird/test_repairs.py @@ -0,0 +1,61 @@ +"""Test repairs for doorbird.""" + +from __future__ import annotations + +from http import HTTPStatus + +from homeassistant.components.doorbird.const import DOMAIN +from homeassistant.components.repairs.issue_handler import ( + async_process_repairs_platforms, +) +from homeassistant.components.repairs.websocket_api import ( + RepairsFlowIndexView, + RepairsFlowResourceView, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir +from homeassistant.setup import async_setup_component + +from . import mock_not_found_exception +from .conftest import DoorbirdMockerType + +from tests.typing import ClientSessionGenerator + + +async def test_change_schedule_fails( + hass: HomeAssistant, + doorbird_mocker: DoorbirdMockerType, + hass_client: ClientSessionGenerator, +) -> None: + """Test a doorbird when change_schedule fails.""" + assert await async_setup_component(hass, "repairs", {}) + doorbird_entry = await doorbird_mocker( + favorites_side_effect=mock_not_found_exception() + ) + assert doorbird_entry.entry.state is ConfigEntryState.SETUP_RETRY + issue_reg = ir.async_get(hass) + assert len(issue_reg.issues) == 1 + issue = list(issue_reg.issues.values())[0] + issue_id = issue.issue_id + assert issue.domain == DOMAIN + + await async_process_repairs_platforms(hass) + client = await hass_client() + + url = RepairsFlowIndexView.url + resp = await client.post(url, json={"handler": DOMAIN, "issue_id": issue_id}) + assert resp.status == HTTPStatus.OK + data = await resp.json() + + flow_id = data["flow_id"] + placeholders = data["description_placeholders"] + assert "404" in placeholders["error"] + assert data["step_id"] == "confirm" + + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await client.post(url) + assert resp.status == HTTPStatus.OK + data = await resp.json() + + assert data["type"] == "create_entry" From 55e542844317d5488be01b51878b241327d0d137 Mon Sep 17 00:00:00 2001 From: Matrix Date: Thu, 1 Aug 2024 14:32:16 +0800 Subject: [PATCH 0258/1635] Fix yolink protocol changed (#122989) --- homeassistant/components/yolink/valve.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/yolink/valve.py b/homeassistant/components/yolink/valve.py index a24ad7d385d..d8c199697c3 100644 --- a/homeassistant/components/yolink/valve.py +++ b/homeassistant/components/yolink/valve.py @@ -37,7 +37,7 @@ DEVICE_TYPES: tuple[YoLinkValveEntityDescription, ...] = ( key="valve_state", translation_key="meter_valve_state", device_class=ValveDeviceClass.WATER, - value=lambda value: value == "closed" if value is not None else None, + value=lambda value: value != "open" if value is not None else None, exists_fn=lambda device: device.device_type == ATTR_DEVICE_WATER_METER_CONTROLLER and not device.device_model_name.startswith(DEV_MODEL_WATER_METER_YS5007), From 2fd3c42e633bfb267604229876bd4846e93e8a3b Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 1 Aug 2024 09:19:16 +0200 Subject: [PATCH 0259/1635] Fix implicit-return in squeezebox (#122928) --- homeassistant/components/squeezebox/config_flow.py | 6 ++++-- homeassistant/components/squeezebox/media_player.py | 3 ++- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/squeezebox/config_flow.py b/homeassistant/components/squeezebox/config_flow.py index 9ccac13223b..95af3e8032a 100644 --- a/homeassistant/components/squeezebox/config_flow.py +++ b/homeassistant/components/squeezebox/config_flow.py @@ -3,7 +3,7 @@ import asyncio from http import HTTPStatus import logging -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any from pysqueezebox import Server, async_discover import voluptuous as vol @@ -102,7 +102,7 @@ class SqueezeboxConfigFlow(ConfigFlow, domain=DOMAIN): # update with suggested values from discovery self.data_schema = _base_schema(self.discovery_info) - async def _validate_input(self, data): + async def _validate_input(self, data: dict[str, Any]) -> str | None: """Validate the user input allows us to connect. Retrieve unique id and abort if already configured. @@ -129,6 +129,8 @@ class SqueezeboxConfigFlow(ConfigFlow, domain=DOMAIN): await self.async_set_unique_id(status["uuid"]) self._abort_if_unique_id_configured() + return None + async def async_step_user(self, user_input=None): """Handle a flow initialized by the user.""" errors = {} diff --git a/homeassistant/components/squeezebox/media_player.py b/homeassistant/components/squeezebox/media_player.py index aaf64c34ddf..552b8ed800c 100644 --- a/homeassistant/components/squeezebox/media_player.py +++ b/homeassistant/components/squeezebox/media_player.py @@ -281,10 +281,11 @@ class SqueezeBoxEntity(MediaPlayerEntity): self.hass.data[DOMAIN][KNOWN_PLAYERS].remove(self) @property - def volume_level(self): + def volume_level(self) -> float | None: """Volume level of the media player (0..1).""" if self._player.volume: return int(float(self._player.volume)) / 100.0 + return None @property def is_volume_muted(self): From d16a2fac80b5e3e897442bef786d89e4aa393a94 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 1 Aug 2024 09:20:21 +0200 Subject: [PATCH 0260/1635] Rename variable in async tests (#122996) --- tests/util/test_async.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/util/test_async.py b/tests/util/test_async.py index ac927b1375a..373768788b7 100644 --- a/tests/util/test_async.py +++ b/tests/util/test_async.py @@ -14,7 +14,9 @@ from tests.common import extract_stack_to_frame @patch("concurrent.futures.Future") @patch("threading.get_ident") -def test_run_callback_threadsafe_from_inside_event_loop(mock_ident, _) -> None: +def test_run_callback_threadsafe_from_inside_event_loop( + mock_ident: MagicMock, mock_future: MagicMock +) -> None: """Testing calling run_callback_threadsafe from inside an event loop.""" callback = MagicMock() From 25d4dd82a05c6587134709c32c5bb46f16495ae8 Mon Sep 17 00:00:00 2001 From: Ivan Belokobylskiy Date: Thu, 1 Aug 2024 12:51:41 +0400 Subject: [PATCH 0261/1635] Bump aioymaps to 1.2.5 (#123005) Bump aiomaps, fix sessionId parsing --- homeassistant/components/yandex_transport/manifest.json | 2 +- homeassistant/components/yandex_transport/sensor.py | 4 ++-- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/yandex_transport/manifest.json b/homeassistant/components/yandex_transport/manifest.json index c29b4d3dc98..1d1219d5a95 100644 --- a/homeassistant/components/yandex_transport/manifest.json +++ b/homeassistant/components/yandex_transport/manifest.json @@ -4,5 +4,5 @@ "codeowners": ["@rishatik92", "@devbis"], "documentation": "https://www.home-assistant.io/integrations/yandex_transport", "iot_class": "cloud_polling", - "requirements": ["aioymaps==1.2.4"] + "requirements": ["aioymaps==1.2.5"] } diff --git a/homeassistant/components/yandex_transport/sensor.py b/homeassistant/components/yandex_transport/sensor.py index 30227e3261e..95c4785a341 100644 --- a/homeassistant/components/yandex_transport/sensor.py +++ b/homeassistant/components/yandex_transport/sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations from datetime import timedelta import logging -from aioymaps import CaptchaError, YandexMapsRequester +from aioymaps import CaptchaError, NoSessionError, YandexMapsRequester import voluptuous as vol from homeassistant.components.sensor import ( @@ -88,7 +88,7 @@ class DiscoverYandexTransport(SensorEntity): closer_time = None try: yandex_reply = await self.requester.get_stop_info(self._stop_id) - except CaptchaError as ex: + except (CaptchaError, NoSessionError) as ex: _LOGGER.error( "%s. You may need to disable the integration for some time", ex, diff --git a/requirements_all.txt b/requirements_all.txt index ad08342230d..3bd6cfa8b2a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -407,7 +407,7 @@ aiowebostv==0.4.2 aiowithings==3.0.2 # homeassistant.components.yandex_transport -aioymaps==1.2.4 +aioymaps==1.2.5 # homeassistant.components.airgradient airgradient==0.7.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5bdf58ff217..21c8fd5f677 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -389,7 +389,7 @@ aiowebostv==0.4.2 aiowithings==3.0.2 # homeassistant.components.yandex_transport -aioymaps==1.2.4 +aioymaps==1.2.5 # homeassistant.components.airgradient airgradient==0.7.1 From cd80cd5caafbfb45b4ddc03bdfcc2f4fad732eb4 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Thu, 1 Aug 2024 11:30:29 +0200 Subject: [PATCH 0262/1635] Update audit licenses run conditions [ci] (#123009) --- .core_files.yaml | 1 + .github/workflows/ci.yaml | 24 ++++++++++++++++++++++-- 2 files changed, 23 insertions(+), 2 deletions(-) diff --git a/.core_files.yaml b/.core_files.yaml index 08cabb71164..3f92ed87a84 100644 --- a/.core_files.yaml +++ b/.core_files.yaml @@ -146,6 +146,7 @@ requirements: &requirements - homeassistant/package_constraints.txt - requirements*.txt - pyproject.toml + - script/licenses.py any: - *base_platforms diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 96ba319fdb9..23a8b10c0d3 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -31,6 +31,10 @@ on: description: "Only run mypy" default: false type: boolean + audit-licenses-only: + description: "Only run audit licenses" + default: false + type: boolean env: CACHE_VERSION: 9 @@ -222,6 +226,7 @@ jobs: if: | github.event.inputs.pylint-only != 'true' && github.event.inputs.mypy-only != 'true' + && github.event.inputs.audit-licenses-only != 'true' needs: - info steps: @@ -343,6 +348,7 @@ jobs: pre-commit run --hook-stage manual ruff --all-files --show-diff-on-failure env: RUFF_OUTPUT_FORMAT: github + lint-other: name: Check other linters runs-on: ubuntu-24.04 @@ -518,6 +524,7 @@ jobs: if: | github.event.inputs.pylint-only != 'true' && github.event.inputs.mypy-only != 'true' + && github.event.inputs.audit-licenses-only != 'true' needs: - info - base @@ -556,6 +563,7 @@ jobs: if: | github.event.inputs.pylint-only != 'true' && github.event.inputs.mypy-only != 'true' + && github.event.inputs.audit-licenses-only != 'true' needs: - info - base @@ -589,7 +597,10 @@ jobs: - info - base if: | - needs.info.outputs.requirements == 'true' + (github.event.inputs.pylint-only != 'true' + && github.event.inputs.mypy-only != 'true' + || github.event.inputs.audit-licenses-only == 'true') + && needs.info.outputs.requirements == 'true' steps: - name: Check out code from GitHub uses: actions/checkout@v4.1.7 @@ -628,6 +639,7 @@ jobs: timeout-minutes: 20 if: | github.event.inputs.mypy-only != 'true' + && github.event.inputs.audit-licenses-only != 'true' || github.event.inputs.pylint-only == 'true' needs: - info @@ -672,7 +684,9 @@ jobs: runs-on: ubuntu-24.04 timeout-minutes: 20 if: | - (github.event.inputs.mypy-only != 'true' || github.event.inputs.pylint-only == 'true') + (github.event.inputs.mypy-only != 'true' + && github.event.inputs.audit-licenses-only != 'true' + || github.event.inputs.pylint-only == 'true') && (needs.info.outputs.tests_glob || needs.info.outputs.test_full_suite == 'true') needs: - info @@ -717,6 +731,7 @@ jobs: runs-on: ubuntu-24.04 if: | github.event.inputs.pylint-only != 'true' + && github.event.inputs.audit-licenses-only != 'true' || github.event.inputs.mypy-only == 'true' needs: - info @@ -781,6 +796,7 @@ jobs: && github.event.inputs.lint-only != 'true' && github.event.inputs.pylint-only != 'true' && github.event.inputs.mypy-only != 'true' + && github.event.inputs.audit-licenses-only != 'true' && needs.info.outputs.test_full_suite == 'true' needs: - info @@ -831,6 +847,7 @@ jobs: && github.event.inputs.lint-only != 'true' && github.event.inputs.pylint-only != 'true' && github.event.inputs.mypy-only != 'true' + && github.event.inputs.audit-licenses-only != 'true' && needs.info.outputs.test_full_suite == 'true' needs: - info @@ -951,6 +968,7 @@ jobs: && github.event.inputs.lint-only != 'true' && github.event.inputs.pylint-only != 'true' && github.event.inputs.mypy-only != 'true' + && github.event.inputs.audit-licenses-only != 'true' && needs.info.outputs.mariadb_groups != '[]' needs: - info @@ -1076,6 +1094,7 @@ jobs: && github.event.inputs.lint-only != 'true' && github.event.inputs.pylint-only != 'true' && github.event.inputs.mypy-only != 'true' + && github.event.inputs.audit-licenses-only != 'true' && needs.info.outputs.postgresql_groups != '[]' needs: - info @@ -1220,6 +1239,7 @@ jobs: && github.event.inputs.lint-only != 'true' && github.event.inputs.pylint-only != 'true' && github.event.inputs.mypy-only != 'true' + && github.event.inputs.audit-licenses-only != 'true' && needs.info.outputs.tests_glob && needs.info.outputs.test_full_suite == 'false' needs: From 6bf59a8dfcfe357ed94fca219b2d64563fe8dbdf Mon Sep 17 00:00:00 2001 From: Matrix Date: Thu, 1 Aug 2024 17:49:58 +0800 Subject: [PATCH 0263/1635] Bump yolink api to 0.4.6 (#123012) --- homeassistant/components/yolink/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/yolink/manifest.json b/homeassistant/components/yolink/manifest.json index 5353d5d5b8c..ceb4e4ceff3 100644 --- a/homeassistant/components/yolink/manifest.json +++ b/homeassistant/components/yolink/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["auth", "application_credentials"], "documentation": "https://www.home-assistant.io/integrations/yolink", "iot_class": "cloud_push", - "requirements": ["yolink-api==0.4.4"] + "requirements": ["yolink-api==0.4.6"] } diff --git a/requirements_all.txt b/requirements_all.txt index 3bd6cfa8b2a..0bf4b77e9d2 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2962,7 +2962,7 @@ yeelight==0.7.14 yeelightsunflower==0.0.10 # homeassistant.components.yolink -yolink-api==0.4.4 +yolink-api==0.4.6 # homeassistant.components.youless youless-api==2.1.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 21c8fd5f677..f9670987b70 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2339,7 +2339,7 @@ yalexs==6.4.3 yeelight==0.7.14 # homeassistant.components.yolink -yolink-api==0.4.4 +yolink-api==0.4.6 # homeassistant.components.youless youless-api==2.1.2 From bc91bd3293a78133f5781b3422dfd1e1f86b97fa Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Thu, 1 Aug 2024 11:51:45 +0200 Subject: [PATCH 0264/1635] Make the Android timer notification high priority (#123006) --- homeassistant/components/mobile_app/timers.py | 2 ++ tests/components/mobile_app/test_timers.py | 2 ++ 2 files changed, 4 insertions(+) diff --git a/homeassistant/components/mobile_app/timers.py b/homeassistant/components/mobile_app/timers.py index 93b4ac53be5..e092298c5d7 100644 --- a/homeassistant/components/mobile_app/timers.py +++ b/homeassistant/components/mobile_app/timers.py @@ -39,6 +39,8 @@ def async_handle_timer_event( # Android "channel": "Timers", "importance": "high", + "ttl": 0, + "priority": "high", # iOS "push": { "interruption-level": "time-sensitive", diff --git a/tests/components/mobile_app/test_timers.py b/tests/components/mobile_app/test_timers.py index 0eba88f7328..9f7d4cebc58 100644 --- a/tests/components/mobile_app/test_timers.py +++ b/tests/components/mobile_app/test_timers.py @@ -61,6 +61,8 @@ async def test_timer_events( "channel": "Timers", "group": "timers", "importance": "high", + "ttl": 0, + "priority": "high", "push": { "interruption-level": "time-sensitive", }, From c2a23bce5015961f2935a5aa90170c39dccc654b Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 1 Aug 2024 12:20:05 +0200 Subject: [PATCH 0265/1635] Fix implicit-return in python_script (#123004) --- homeassistant/components/python_script/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/python_script/__init__.py b/homeassistant/components/python_script/__init__.py index 72e2f3a824b..ab8cf17daa0 100644 --- a/homeassistant/components/python_script/__init__.py +++ b/homeassistant/components/python_script/__init__.py @@ -108,13 +108,13 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: return True -def discover_scripts(hass): +def discover_scripts(hass: HomeAssistant) -> None: """Discover python scripts in folder.""" path = hass.config.path(FOLDER) if not os.path.isdir(path): _LOGGER.warning("Folder %s not found in configuration folder", FOLDER) - return False + return def python_script_service_handler(call: ServiceCall) -> ServiceResponse: """Handle python script service calls.""" From ab522dab71a75e0851e7e6cac9101b73ecf0d10d Mon Sep 17 00:00:00 2001 From: karwosts <32912880+karwosts@users.noreply.github.com> Date: Thu, 1 Aug 2024 03:59:19 -0700 Subject: [PATCH 0266/1635] Restrict nws.get_forecasts_extra selector to nws weather entities (#122986) --- homeassistant/components/nws/services.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/nws/services.yaml b/homeassistant/components/nws/services.yaml index 0d439a9d278..a3d241c775d 100644 --- a/homeassistant/components/nws/services.yaml +++ b/homeassistant/components/nws/services.yaml @@ -2,6 +2,7 @@ get_forecasts_extra: target: entity: domain: weather + integration: nws fields: type: required: true From adf20b60dc31f2a9fb1667d6908904a26b1632f0 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 1 Aug 2024 13:43:09 +0200 Subject: [PATCH 0267/1635] Rename variable in landisgyr_heat_meter tests (#122995) --- tests/components/landisgyr_heat_meter/test_init.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/tests/components/landisgyr_heat_meter/test_init.py b/tests/components/landisgyr_heat_meter/test_init.py index c9768ec681f..76a376e441c 100644 --- a/tests/components/landisgyr_heat_meter/test_init.py +++ b/tests/components/landisgyr_heat_meter/test_init.py @@ -1,6 +1,6 @@ """Test the Landis + Gyr Heat Meter init.""" -from unittest.mock import patch +from unittest.mock import MagicMock, patch from homeassistant.components.landisgyr_heat_meter.const import ( DOMAIN as LANDISGYR_HEAT_METER_DOMAIN, @@ -17,7 +17,7 @@ API_HEAT_METER_SERVICE = ( @patch(API_HEAT_METER_SERVICE) -async def test_unload_entry(_, hass: HomeAssistant) -> None: +async def test_unload_entry(mock_meter_service: MagicMock, hass: HomeAssistant) -> None: """Test removing config entry.""" mock_entry_data = { "device": "/dev/USB0", @@ -41,7 +41,9 @@ async def test_unload_entry(_, hass: HomeAssistant) -> None: @patch(API_HEAT_METER_SERVICE) async def test_migrate_entry( - _, hass: HomeAssistant, entity_registry: er.EntityRegistry + mock_meter_service: MagicMock, + hass: HomeAssistant, + entity_registry: er.EntityRegistry, ) -> None: """Test successful migration of entry data from version 1 to 2.""" From faedba04079d2c999a479118b5189ef4c0bff060 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 1 Aug 2024 13:44:30 +0200 Subject: [PATCH 0268/1635] Rename variable in knx tests (#122994) * Rename variable in knx tests * Type hints * Type hints --- tests/components/knx/test_config_flow.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/tests/components/knx/test_config_flow.py b/tests/components/knx/test_config_flow.py index a7da2d26600..d82e5ae33c7 100644 --- a/tests/components/knx/test_config_flow.py +++ b/tests/components/knx/test_config_flow.py @@ -1,7 +1,7 @@ """Test the KNX config flow.""" from contextlib import contextmanager -from unittest.mock import Mock, patch +from unittest.mock import MagicMock, Mock, patch import pytest from xknx.exceptions.exception import CommunicationError, InvalidSecureConfiguration @@ -510,7 +510,7 @@ async def test_routing_secure_keyfile( return_value=GatewayScannerMock(), ) async def test_tunneling_setup_manual( - _gateway_scanner_mock, + gateway_scanner_mock: MagicMock, hass: HomeAssistant, knx_setup, user_input, @@ -559,7 +559,7 @@ async def test_tunneling_setup_manual( return_value=GatewayScannerMock(), ) async def test_tunneling_setup_manual_request_description_error( - _gateway_scanner_mock, + gateway_scanner_mock: MagicMock, hass: HomeAssistant, knx_setup, ) -> None: @@ -700,7 +700,10 @@ async def test_tunneling_setup_manual_request_description_error( return_value=_gateway_descriptor("192.168.0.2", 3675), ) async def test_tunneling_setup_for_local_ip( - _request_description_mock, _gateway_scanner_mock, hass: HomeAssistant, knx_setup + request_description_mock: MagicMock, + gateway_scanner_mock: MagicMock, + hass: HomeAssistant, + knx_setup, ) -> None: """Test tunneling if only one gateway is found.""" result = await hass.config_entries.flow.async_init( @@ -962,7 +965,7 @@ async def _get_menu_step_secure_tunnel(hass: HomeAssistant) -> FlowResult: ), ) async def test_get_secure_menu_step_manual_tunnelling( - _request_description_mock, + request_description_mock: MagicMock, hass: HomeAssistant, ) -> None: """Test flow reaches secure_tunnellinn menu step from manual tunnelling configuration.""" From f5e88b8293ebc02c950087cc811a8657c937760f Mon Sep 17 00:00:00 2001 From: DeerMaximum <43999966+DeerMaximum@users.noreply.github.com> Date: Thu, 1 Aug 2024 14:32:37 +0200 Subject: [PATCH 0269/1635] Velux use node id as fallback for unique id (#117508) Co-authored-by: Robert Resch --- homeassistant/components/velux/__init__.py | 8 ++++++-- homeassistant/components/velux/cover.py | 6 +++--- homeassistant/components/velux/light.py | 2 +- 3 files changed, 10 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/velux/__init__.py b/homeassistant/components/velux/__init__.py index 4b89fc66a84..1b7cbd1ff93 100644 --- a/homeassistant/components/velux/__init__.py +++ b/homeassistant/components/velux/__init__.py @@ -108,10 +108,14 @@ class VeluxEntity(Entity): _attr_should_poll = False - def __init__(self, node: Node) -> None: + def __init__(self, node: Node, config_entry_id: str) -> None: """Initialize the Velux device.""" self.node = node - self._attr_unique_id = node.serial_number + self._attr_unique_id = ( + node.serial_number + if node.serial_number + else f"{config_entry_id}_{node.node_id}" + ) self._attr_name = node.name if node.name else f"#{node.node_id}" @callback diff --git a/homeassistant/components/velux/cover.py b/homeassistant/components/velux/cover.py index c8688e4d186..cd7564eee81 100644 --- a/homeassistant/components/velux/cover.py +++ b/homeassistant/components/velux/cover.py @@ -29,7 +29,7 @@ async def async_setup_entry( """Set up cover(s) for Velux platform.""" module = hass.data[DOMAIN][config.entry_id] async_add_entities( - VeluxCover(node) + VeluxCover(node, config.entry_id) for node in module.pyvlx.nodes if isinstance(node, OpeningDevice) ) @@ -41,9 +41,9 @@ class VeluxCover(VeluxEntity, CoverEntity): _is_blind = False node: OpeningDevice - def __init__(self, node: OpeningDevice) -> None: + def __init__(self, node: OpeningDevice, config_entry_id: str) -> None: """Initialize VeluxCover.""" - super().__init__(node) + super().__init__(node, config_entry_id) self._attr_device_class = CoverDeviceClass.WINDOW if isinstance(node, Awning): self._attr_device_class = CoverDeviceClass.AWNING diff --git a/homeassistant/components/velux/light.py b/homeassistant/components/velux/light.py index bbe9822648e..e98632701f3 100644 --- a/homeassistant/components/velux/light.py +++ b/homeassistant/components/velux/light.py @@ -23,7 +23,7 @@ async def async_setup_entry( module = hass.data[DOMAIN][config.entry_id] async_add_entities( - VeluxLight(node) + VeluxLight(node, config.entry_id) for node in module.pyvlx.nodes if isinstance(node, LighteningDevice) ) From 4f3d6243533777525a6d12b2236722b7e816ffc7 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 1 Aug 2024 14:46:15 +0200 Subject: [PATCH 0270/1635] Enable pytest-fixture-param-without-value (PT019) rule in ruff (#122953) --- pyproject.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index a9559b9e367..eadd529b3e6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -820,7 +820,6 @@ ignore = [ "PLE0605", # temporarily disabled - "PT019", "PYI024", # Use typing.NamedTuple instead of collections.namedtuple "RET503", "RET501", From fef9c92eb76967332a780d24ce6a5d08e210bbe5 Mon Sep 17 00:00:00 2001 From: amccook <30292381+amccook@users.noreply.github.com> Date: Thu, 1 Aug 2024 07:24:09 -0700 Subject: [PATCH 0271/1635] Fix handling of directory type playlists in Plex (#122990) Ignore type directory --- homeassistant/components/plex/media_browser.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/plex/media_browser.py b/homeassistant/components/plex/media_browser.py index e47e6145761..87e9f47af66 100644 --- a/homeassistant/components/plex/media_browser.py +++ b/homeassistant/components/plex/media_browser.py @@ -132,7 +132,11 @@ def browse_media( # noqa: C901 "children": [], } for playlist in plex_server.playlists(): - if playlist.playlistType != "audio" and platform == "sonos": + if ( + playlist.type != "directory" + and playlist.playlistType != "audio" + and platform == "sonos" + ): continue try: playlists_info["children"].append(item_payload(playlist)) From a3b5dcc21b7f01f094b457a1cc7fbb0c03ebff0f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 1 Aug 2024 15:37:26 -0500 Subject: [PATCH 0272/1635] Fix doorbird models are missing the schedule API (#123033) * Fix doorbird models are missing the schedule API fixes #122997 * cover --- homeassistant/components/doorbird/device.py | 14 +++++++--- tests/components/doorbird/__init__.py | 29 ++++++++++----------- tests/components/doorbird/conftest.py | 2 ++ tests/components/doorbird/test_init.py | 10 +++++++ 4 files changed, 37 insertions(+), 18 deletions(-) diff --git a/homeassistant/components/doorbird/device.py b/homeassistant/components/doorbird/device.py index 866251f3d28..7cd45487464 100644 --- a/homeassistant/components/doorbird/device.py +++ b/homeassistant/components/doorbird/device.py @@ -5,9 +5,11 @@ from __future__ import annotations from collections import defaultdict from dataclasses import dataclass from functools import cached_property +from http import HTTPStatus import logging from typing import Any +from aiohttp import ClientResponseError from doorbirdpy import ( DoorBird, DoorBirdScheduleEntry, @@ -170,15 +172,21 @@ class ConfiguredDoorBird: ) -> DoorbirdEventConfig: """Get events and unconfigured favorites from http favorites.""" device = self.device - schedule = await device.schedule() + events: list[DoorbirdEvent] = [] + unconfigured_favorites: defaultdict[str, list[str]] = defaultdict(list) + try: + schedule = await device.schedule() + except ClientResponseError as ex: + if ex.status == HTTPStatus.NOT_FOUND: + # D301 models do not support schedules + return DoorbirdEventConfig(events, [], unconfigured_favorites) + raise favorite_input_type = { output.param: entry.input for entry in schedule for output in entry.output if output.event == HTTP_EVENT_TYPE } - events: list[DoorbirdEvent] = [] - unconfigured_favorites: defaultdict[str, list[str]] = defaultdict(list) default_event_types = { self._get_event_name(event): event_type for event, event_type in DEFAULT_EVENT_TYPES diff --git a/tests/components/doorbird/__init__.py b/tests/components/doorbird/__init__.py index 41def92f121..2d517dfcefe 100644 --- a/tests/components/doorbird/__init__.py +++ b/tests/components/doorbird/__init__.py @@ -47,31 +47,30 @@ def get_mock_doorbird_api( info: dict[str, Any] | None = None, info_side_effect: Exception | None = None, schedule: list[DoorBirdScheduleEntry] | None = None, + schedule_side_effect: Exception | None = None, favorites: dict[str, dict[str, Any]] | None = None, favorites_side_effect: Exception | None = None, change_schedule: tuple[bool, int] | None = None, ) -> DoorBird: """Return a mock DoorBirdAPI object with return values.""" doorbirdapi_mock = MagicMock(spec_set=DoorBird) - type(doorbirdapi_mock).info = AsyncMock( - side_effect=info_side_effect, return_value=info + api_mock_type = type(doorbirdapi_mock) + api_mock_type.info = AsyncMock(side_effect=info_side_effect, return_value=info) + api_mock_type.favorites = AsyncMock( + side_effect=favorites_side_effect, return_value=favorites ) - type(doorbirdapi_mock).favorites = AsyncMock( - side_effect=favorites_side_effect, - return_value=favorites, - ) - type(doorbirdapi_mock).change_favorite = AsyncMock(return_value=True) - type(doorbirdapi_mock).change_schedule = AsyncMock( + api_mock_type.change_favorite = AsyncMock(return_value=True) + api_mock_type.change_schedule = AsyncMock( return_value=change_schedule or (True, 200) ) - type(doorbirdapi_mock).schedule = AsyncMock(return_value=schedule) - type(doorbirdapi_mock).energize_relay = AsyncMock(return_value=True) - type(doorbirdapi_mock).turn_light_on = AsyncMock(return_value=True) - type(doorbirdapi_mock).delete_favorite = AsyncMock(return_value=True) - type(doorbirdapi_mock).get_image = AsyncMock(return_value=b"image") - type(doorbirdapi_mock).doorbell_state = AsyncMock( - side_effect=mock_unauthorized_exception() + api_mock_type.schedule = AsyncMock( + return_value=schedule, side_effect=schedule_side_effect ) + api_mock_type.energize_relay = AsyncMock(return_value=True) + api_mock_type.turn_light_on = AsyncMock(return_value=True) + api_mock_type.delete_favorite = AsyncMock(return_value=True) + api_mock_type.get_image = AsyncMock(return_value=b"image") + api_mock_type.doorbell_state = AsyncMock(side_effect=mock_unauthorized_exception()) return doorbirdapi_mock diff --git a/tests/components/doorbird/conftest.py b/tests/components/doorbird/conftest.py index 59ead250293..2e367e4e1d8 100644 --- a/tests/components/doorbird/conftest.py +++ b/tests/components/doorbird/conftest.py @@ -102,6 +102,7 @@ async def doorbird_mocker( info: dict[str, Any] | None = None, info_side_effect: Exception | None = None, schedule: list[DoorBirdScheduleEntry] | None = None, + schedule_side_effect: Exception | None = None, favorites: dict[str, dict[str, Any]] | None = None, favorites_side_effect: Exception | None = None, options: dict[str, Any] | None = None, @@ -118,6 +119,7 @@ async def doorbird_mocker( info=info or doorbird_info, info_side_effect=info_side_effect, schedule=schedule or doorbird_schedule, + schedule_side_effect=schedule_side_effect, favorites=favorites or doorbird_favorites, favorites_side_effect=favorites_side_effect, change_schedule=change_schedule, diff --git a/tests/components/doorbird/test_init.py b/tests/components/doorbird/test_init.py index fb8bad2fb46..31266c4acf0 100644 --- a/tests/components/doorbird/test_init.py +++ b/tests/components/doorbird/test_init.py @@ -56,6 +56,16 @@ async def test_http_favorites_request_fails( assert doorbird_entry.entry.state is ConfigEntryState.SETUP_RETRY +async def test_http_schedule_api_missing( + doorbird_mocker: DoorbirdMockerType, +) -> None: + """Test missing the schedule API is non-fatal as not all models support it.""" + doorbird_entry = await doorbird_mocker( + schedule_side_effect=mock_not_found_exception() + ) + assert doorbird_entry.entry.state is ConfigEntryState.LOADED + + async def test_events_changed( hass: HomeAssistant, doorbird_mocker: DoorbirdMockerType, From 80aa2c269b4e99ef21a3c5f950b31560272ae0f7 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Thu, 1 Aug 2024 15:39:17 -0500 Subject: [PATCH 0273/1635] Standardize assist pipelines on 10ms chunk size (#123024) * Make chunk size always 10ms * Fix voip --- .../components/assist_pipeline/__init__.py | 8 ++ .../assist_pipeline/audio_enhancer.py | 18 +--- .../components/assist_pipeline/const.py | 4 +- .../components/assist_pipeline/pipeline.py | 75 +++---------- homeassistant/components/voip/voip.py | 22 ++-- tests/components/assist_pipeline/conftest.py | 13 +++ .../snapshots/test_websocket.ambr | 2 +- tests/components/assist_pipeline/test_init.py | 71 +++++++------ .../assist_pipeline/test_websocket.py | 100 +++++++++++------- 9 files changed, 154 insertions(+), 159 deletions(-) diff --git a/homeassistant/components/assist_pipeline/__init__.py b/homeassistant/components/assist_pipeline/__init__.py index f481411e551..8ee053162b0 100644 --- a/homeassistant/components/assist_pipeline/__init__.py +++ b/homeassistant/components/assist_pipeline/__init__.py @@ -16,6 +16,10 @@ from .const import ( DATA_LAST_WAKE_UP, DOMAIN, EVENT_RECORDING, + SAMPLE_CHANNELS, + SAMPLE_RATE, + SAMPLE_WIDTH, + SAMPLES_PER_CHUNK, ) from .error import PipelineNotFound from .pipeline import ( @@ -53,6 +57,10 @@ __all__ = ( "PipelineNotFound", "WakeWordSettings", "EVENT_RECORDING", + "SAMPLES_PER_CHUNK", + "SAMPLE_RATE", + "SAMPLE_WIDTH", + "SAMPLE_CHANNELS", ) CONFIG_SCHEMA = vol.Schema( diff --git a/homeassistant/components/assist_pipeline/audio_enhancer.py b/homeassistant/components/assist_pipeline/audio_enhancer.py index e7a149bd00e..c9c60f421b1 100644 --- a/homeassistant/components/assist_pipeline/audio_enhancer.py +++ b/homeassistant/components/assist_pipeline/audio_enhancer.py @@ -6,6 +6,8 @@ import logging from pymicro_vad import MicroVad +from .const import BYTES_PER_CHUNK + _LOGGER = logging.getLogger(__name__) @@ -38,11 +40,6 @@ class AudioEnhancer(ABC): def enhance_chunk(self, audio: bytes, timestamp_ms: int) -> EnhancedAudioChunk: """Enhance chunk of PCM audio @ 16Khz with 16-bit mono samples.""" - @property - @abstractmethod - def samples_per_chunk(self) -> int | None: - """Return number of samples per chunk or None if chunking isn't required.""" - class MicroVadEnhancer(AudioEnhancer): """Audio enhancer that just runs microVAD.""" @@ -61,22 +58,15 @@ class MicroVadEnhancer(AudioEnhancer): _LOGGER.debug("Initialized microVAD with threshold=%s", self.threshold) def enhance_chunk(self, audio: bytes, timestamp_ms: int) -> EnhancedAudioChunk: - """Enhance chunk of PCM audio @ 16Khz with 16-bit mono samples.""" + """Enhance 10ms chunk of PCM audio @ 16Khz with 16-bit mono samples.""" is_speech: bool | None = None if self.vad is not None: # Run VAD + assert len(audio) == BYTES_PER_CHUNK speech_prob = self.vad.Process10ms(audio) is_speech = speech_prob > self.threshold return EnhancedAudioChunk( audio=audio, timestamp_ms=timestamp_ms, is_speech=is_speech ) - - @property - def samples_per_chunk(self) -> int | None: - """Return number of samples per chunk or None if chunking isn't required.""" - if self.is_vad_enabled: - return 160 # 10ms - - return None diff --git a/homeassistant/components/assist_pipeline/const.py b/homeassistant/components/assist_pipeline/const.py index 14b93a90372..f7306b89a54 100644 --- a/homeassistant/components/assist_pipeline/const.py +++ b/homeassistant/components/assist_pipeline/const.py @@ -19,4 +19,6 @@ EVENT_RECORDING = f"{DOMAIN}_recording" SAMPLE_RATE = 16000 # hertz SAMPLE_WIDTH = 2 # bytes SAMPLE_CHANNELS = 1 # mono -SAMPLES_PER_CHUNK = 240 # 20 ms @ 16Khz +MS_PER_CHUNK = 10 +SAMPLES_PER_CHUNK = SAMPLE_RATE // (1000 // MS_PER_CHUNK) # 10 ms @ 16Khz +BYTES_PER_CHUNK = SAMPLES_PER_CHUNK * SAMPLE_WIDTH * SAMPLE_CHANNELS # 16-bit diff --git a/homeassistant/components/assist_pipeline/pipeline.py b/homeassistant/components/assist_pipeline/pipeline.py index af29888eb07..9fada934ca1 100644 --- a/homeassistant/components/assist_pipeline/pipeline.py +++ b/homeassistant/components/assist_pipeline/pipeline.py @@ -51,11 +51,13 @@ from homeassistant.util.limited_size_dict import LimitedSizeDict from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, MicroVadEnhancer from .const import ( + BYTES_PER_CHUNK, CONF_DEBUG_RECORDING_DIR, DATA_CONFIG, DATA_LAST_WAKE_UP, DATA_MIGRATIONS, DOMAIN, + MS_PER_CHUNK, SAMPLE_CHANNELS, SAMPLE_RATE, SAMPLE_WIDTH, @@ -502,9 +504,6 @@ class AudioSettings: is_vad_enabled: bool = True """True if VAD is used to determine the end of the voice command.""" - samples_per_chunk: int | None = None - """Number of samples that will be in each audio chunk (None for no chunking).""" - silence_seconds: float = 0.5 """Seconds of silence after voice command has ended.""" @@ -525,11 +524,6 @@ class AudioSettings: or (self.auto_gain_dbfs > 0) ) - @property - def is_chunking_enabled(self) -> bool: - """True if chunk size is set.""" - return self.samples_per_chunk is not None - @dataclass class PipelineRun: @@ -566,7 +560,9 @@ class PipelineRun: audio_enhancer: AudioEnhancer | None = None """VAD/noise suppression/auto gain""" - audio_chunking_buffer: AudioBuffer | None = None + audio_chunking_buffer: AudioBuffer = field( + default_factory=lambda: AudioBuffer(BYTES_PER_CHUNK) + ) """Buffer used when splitting audio into chunks for audio processing""" _device_id: str | None = None @@ -599,8 +595,6 @@ class PipelineRun: self.audio_settings.is_vad_enabled, ) - self.audio_chunking_buffer = AudioBuffer(self.samples_per_chunk * SAMPLE_WIDTH) - def __eq__(self, other: object) -> bool: """Compare pipeline runs by id.""" if isinstance(other, PipelineRun): @@ -608,14 +602,6 @@ class PipelineRun: return False - @property - def samples_per_chunk(self) -> int: - """Return number of samples expected in each audio chunk.""" - if self.audio_enhancer is not None: - return self.audio_enhancer.samples_per_chunk or SAMPLES_PER_CHUNK - - return self.audio_settings.samples_per_chunk or SAMPLES_PER_CHUNK - @callback def process_event(self, event: PipelineEvent) -> None: """Log an event and call listener.""" @@ -728,7 +714,7 @@ class PipelineRun: # after wake-word-detection. num_audio_chunks_to_buffer = int( (wake_word_settings.audio_seconds_to_buffer * SAMPLE_RATE) - / self.samples_per_chunk + / SAMPLES_PER_CHUNK ) stt_audio_buffer: deque[EnhancedAudioChunk] | None = None @@ -1216,60 +1202,31 @@ class PipelineRun: self.debug_recording_thread = None async def process_volume_only( - self, - audio_stream: AsyncIterable[bytes], - sample_rate: int = SAMPLE_RATE, - sample_width: int = SAMPLE_WIDTH, + self, audio_stream: AsyncIterable[bytes] ) -> AsyncGenerator[EnhancedAudioChunk]: """Apply volume transformation only (no VAD/audio enhancements) with optional chunking.""" - assert self.audio_chunking_buffer is not None - - bytes_per_chunk = self.samples_per_chunk * sample_width - ms_per_sample = sample_rate // 1000 - ms_per_chunk = self.samples_per_chunk // ms_per_sample timestamp_ms = 0 - async for chunk in audio_stream: if self.audio_settings.volume_multiplier != 1.0: chunk = _multiply_volume(chunk, self.audio_settings.volume_multiplier) - if self.audio_settings.is_chunking_enabled: - for sub_chunk in chunk_samples( - chunk, bytes_per_chunk, self.audio_chunking_buffer - ): - yield EnhancedAudioChunk( - audio=sub_chunk, - timestamp_ms=timestamp_ms, - is_speech=None, # no VAD - ) - timestamp_ms += ms_per_chunk - else: - # No chunking + for sub_chunk in chunk_samples( + chunk, BYTES_PER_CHUNK, self.audio_chunking_buffer + ): yield EnhancedAudioChunk( - audio=chunk, + audio=sub_chunk, timestamp_ms=timestamp_ms, is_speech=None, # no VAD ) - timestamp_ms += (len(chunk) // sample_width) // ms_per_sample + timestamp_ms += MS_PER_CHUNK async def process_enhance_audio( - self, - audio_stream: AsyncIterable[bytes], - sample_rate: int = SAMPLE_RATE, - sample_width: int = SAMPLE_WIDTH, + self, audio_stream: AsyncIterable[bytes] ) -> AsyncGenerator[EnhancedAudioChunk]: - """Split audio into 10 ms chunks and apply VAD/noise suppression/auto gain/volume transformation.""" + """Split audio into chunks and apply VAD/noise suppression/auto gain/volume transformation.""" assert self.audio_enhancer is not None - assert self.audio_enhancer.samples_per_chunk is not None - assert self.audio_chunking_buffer is not None - bytes_per_chunk = self.audio_enhancer.samples_per_chunk * sample_width - ms_per_sample = sample_rate // 1000 - ms_per_chunk = ( - self.audio_enhancer.samples_per_chunk // sample_width - ) // ms_per_sample timestamp_ms = 0 - async for dirty_samples in audio_stream: if self.audio_settings.volume_multiplier != 1.0: # Static gain @@ -1279,10 +1236,10 @@ class PipelineRun: # Split into chunks for audio enhancements/VAD for dirty_chunk in chunk_samples( - dirty_samples, bytes_per_chunk, self.audio_chunking_buffer + dirty_samples, BYTES_PER_CHUNK, self.audio_chunking_buffer ): yield self.audio_enhancer.enhance_chunk(dirty_chunk, timestamp_ms) - timestamp_ms += ms_per_chunk + timestamp_ms += MS_PER_CHUNK def _multiply_volume(chunk: bytes, volume_multiplier: float) -> bytes: diff --git a/homeassistant/components/voip/voip.py b/homeassistant/components/voip/voip.py index 243909629cf..161e938a3b6 100644 --- a/homeassistant/components/voip/voip.py +++ b/homeassistant/components/voip/voip.py @@ -21,7 +21,7 @@ from voip_utils import ( VoipDatagramProtocol, ) -from homeassistant.components import stt, tts +from homeassistant.components import assist_pipeline, stt, tts from homeassistant.components.assist_pipeline import ( Pipeline, PipelineEvent, @@ -331,15 +331,14 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): async with asyncio.timeout(self.audio_timeout): chunk = await self._audio_queue.get() - assert audio_enhancer.samples_per_chunk is not None - vad_buffer = AudioBuffer(audio_enhancer.samples_per_chunk * WIDTH) + vad_buffer = AudioBuffer(assist_pipeline.SAMPLES_PER_CHUNK * WIDTH) while chunk: chunk_buffer.append(chunk) segmenter.process_with_vad( chunk, - audio_enhancer.samples_per_chunk, + assist_pipeline.SAMPLES_PER_CHUNK, lambda x: audio_enhancer.enhance_chunk(x, 0).is_speech is True, vad_buffer, ) @@ -371,13 +370,12 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): async with asyncio.timeout(self.audio_timeout): chunk = await self._audio_queue.get() - assert audio_enhancer.samples_per_chunk is not None - vad_buffer = AudioBuffer(audio_enhancer.samples_per_chunk * WIDTH) + vad_buffer = AudioBuffer(assist_pipeline.SAMPLES_PER_CHUNK * WIDTH) while chunk: if not segmenter.process_with_vad( chunk, - audio_enhancer.samples_per_chunk, + assist_pipeline.SAMPLES_PER_CHUNK, lambda x: audio_enhancer.enhance_chunk(x, 0).is_speech is True, vad_buffer, ): @@ -437,13 +435,13 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): sample_channels = wav_file.getnchannels() if ( - (sample_rate != 16000) - or (sample_width != 2) - or (sample_channels != 1) + (sample_rate != RATE) + or (sample_width != WIDTH) + or (sample_channels != CHANNELS) ): raise ValueError( - "Expected rate/width/channels as 16000/2/1," - " got {sample_rate}/{sample_width}/{sample_channels}}" + f"Expected rate/width/channels as {RATE}/{WIDTH}/{CHANNELS}," + f" got {sample_rate}/{sample_width}/{sample_channels}" ) audio_bytes = wav_file.readframes(wav_file.getnframes()) diff --git a/tests/components/assist_pipeline/conftest.py b/tests/components/assist_pipeline/conftest.py index c041a54d8fa..b2eca1e7ce1 100644 --- a/tests/components/assist_pipeline/conftest.py +++ b/tests/components/assist_pipeline/conftest.py @@ -11,6 +11,12 @@ import pytest from homeassistant.components import stt, tts, wake_word from homeassistant.components.assist_pipeline import DOMAIN, select as assist_select +from homeassistant.components.assist_pipeline.const import ( + BYTES_PER_CHUNK, + SAMPLE_CHANNELS, + SAMPLE_RATE, + SAMPLE_WIDTH, +) from homeassistant.components.assist_pipeline.pipeline import ( PipelineData, PipelineStorageCollection, @@ -33,6 +39,8 @@ from tests.common import ( _TRANSCRIPT = "test transcript" +BYTES_ONE_SECOND = SAMPLE_RATE * SAMPLE_WIDTH * SAMPLE_CHANNELS + @pytest.fixture(autouse=True) def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: @@ -462,3 +470,8 @@ def pipeline_data(hass: HomeAssistant, init_components) -> PipelineData: def pipeline_storage(pipeline_data) -> PipelineStorageCollection: """Return pipeline storage collection.""" return pipeline_data.pipeline_store + + +def make_10ms_chunk(header: bytes) -> bytes: + """Return 10ms of zeros with the given header.""" + return header + bytes(BYTES_PER_CHUNK - len(header)) diff --git a/tests/components/assist_pipeline/snapshots/test_websocket.ambr b/tests/components/assist_pipeline/snapshots/test_websocket.ambr index 0b04b67bb22..e5ae18d28f2 100644 --- a/tests/components/assist_pipeline/snapshots/test_websocket.ambr +++ b/tests/components/assist_pipeline/snapshots/test_websocket.ambr @@ -440,7 +440,7 @@ # --- # name: test_device_capture_override.2 dict({ - 'audio': 'Y2h1bmsx', + 'audio': 'Y2h1bmsxAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=', 'channels': 1, 'rate': 16000, 'type': 'audio', diff --git a/tests/components/assist_pipeline/test_init.py b/tests/components/assist_pipeline/test_init.py index 8fb7ce5b5a5..4206a288331 100644 --- a/tests/components/assist_pipeline/test_init.py +++ b/tests/components/assist_pipeline/test_init.py @@ -13,6 +13,7 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.components import assist_pipeline, media_source, stt, tts from homeassistant.components.assist_pipeline.const import ( + BYTES_PER_CHUNK, CONF_DEBUG_RECORDING_DIR, DOMAIN, ) @@ -20,16 +21,16 @@ from homeassistant.core import Context, HomeAssistant from homeassistant.setup import async_setup_component from .conftest import ( + BYTES_ONE_SECOND, MockSttProvider, MockSttProviderEntity, MockTTSProvider, MockWakeWordEntity, + make_10ms_chunk, ) from tests.typing import ClientSessionGenerator, WebSocketGenerator -BYTES_ONE_SECOND = 16000 * 2 - def process_events(events: list[assist_pipeline.PipelineEvent]) -> list[dict]: """Process events to remove dynamic values.""" @@ -58,8 +59,8 @@ async def test_pipeline_from_audio_stream_auto( events: list[assist_pipeline.PipelineEvent] = [] async def audio_data(): - yield b"part1" - yield b"part2" + yield make_10ms_chunk(b"part1") + yield make_10ms_chunk(b"part2") yield b"" await assist_pipeline.async_pipeline_from_audio_stream( @@ -79,7 +80,9 @@ async def test_pipeline_from_audio_stream_auto( ) assert process_events(events) == snapshot - assert mock_stt_provider.received == [b"part1", b"part2"] + assert len(mock_stt_provider.received) == 2 + assert mock_stt_provider.received[0].startswith(b"part1") + assert mock_stt_provider.received[1].startswith(b"part2") async def test_pipeline_from_audio_stream_legacy( @@ -98,8 +101,8 @@ async def test_pipeline_from_audio_stream_legacy( events: list[assist_pipeline.PipelineEvent] = [] async def audio_data(): - yield b"part1" - yield b"part2" + yield make_10ms_chunk(b"part1") + yield make_10ms_chunk(b"part2") yield b"" # Create a pipeline using an stt entity @@ -142,7 +145,9 @@ async def test_pipeline_from_audio_stream_legacy( ) assert process_events(events) == snapshot - assert mock_stt_provider.received == [b"part1", b"part2"] + assert len(mock_stt_provider.received) == 2 + assert mock_stt_provider.received[0].startswith(b"part1") + assert mock_stt_provider.received[1].startswith(b"part2") async def test_pipeline_from_audio_stream_entity( @@ -161,8 +166,8 @@ async def test_pipeline_from_audio_stream_entity( events: list[assist_pipeline.PipelineEvent] = [] async def audio_data(): - yield b"part1" - yield b"part2" + yield make_10ms_chunk(b"part1") + yield make_10ms_chunk(b"part2") yield b"" # Create a pipeline using an stt entity @@ -205,7 +210,9 @@ async def test_pipeline_from_audio_stream_entity( ) assert process_events(events) == snapshot - assert mock_stt_provider_entity.received == [b"part1", b"part2"] + assert len(mock_stt_provider_entity.received) == 2 + assert mock_stt_provider_entity.received[0].startswith(b"part1") + assert mock_stt_provider_entity.received[1].startswith(b"part2") async def test_pipeline_from_audio_stream_no_stt( @@ -224,8 +231,8 @@ async def test_pipeline_from_audio_stream_no_stt( events: list[assist_pipeline.PipelineEvent] = [] async def audio_data(): - yield b"part1" - yield b"part2" + yield make_10ms_chunk(b"part1") + yield make_10ms_chunk(b"part2") yield b"" # Create a pipeline without stt support @@ -285,8 +292,8 @@ async def test_pipeline_from_audio_stream_unknown_pipeline( events: list[assist_pipeline.PipelineEvent] = [] async def audio_data(): - yield b"part1" - yield b"part2" + yield make_10ms_chunk(b"part1") + yield make_10ms_chunk(b"part2") yield b"" # Try to use the created pipeline @@ -327,7 +334,7 @@ async def test_pipeline_from_audio_stream_wake_word( # [0, 2, ...] wake_chunk_2 = bytes(it.islice(it.cycle(range(0, 256, 2)), BYTES_ONE_SECOND)) - samples_per_chunk = 160 + samples_per_chunk = 160 # 10ms @ 16Khz bytes_per_chunk = samples_per_chunk * 2 # 16-bit async def audio_data(): @@ -343,8 +350,8 @@ async def test_pipeline_from_audio_stream_wake_word( yield wake_chunk_2[i : i + bytes_per_chunk] i += bytes_per_chunk - for chunk in (b"wake word!", b"part1", b"part2"): - yield chunk + bytes(bytes_per_chunk - len(chunk)) + for header in (b"wake word!", b"part1", b"part2"): + yield make_10ms_chunk(header) yield b"" @@ -365,9 +372,7 @@ async def test_pipeline_from_audio_stream_wake_word( wake_word_settings=assist_pipeline.WakeWordSettings( audio_seconds_to_buffer=1.5 ), - audio_settings=assist_pipeline.AudioSettings( - is_vad_enabled=False, samples_per_chunk=samples_per_chunk - ), + audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), ) assert process_events(events) == snapshot @@ -408,13 +413,11 @@ async def test_pipeline_save_audio( pipeline = assist_pipeline.async_get_pipeline(hass) events: list[assist_pipeline.PipelineEvent] = [] - # Pad out to an even number of bytes since these "samples" will be saved - # as 16-bit values. async def audio_data(): - yield b"wake word_" + yield make_10ms_chunk(b"wake word") # queued audio - yield b"part1_" - yield b"part2_" + yield make_10ms_chunk(b"part1") + yield make_10ms_chunk(b"part2") yield b"" await assist_pipeline.async_pipeline_from_audio_stream( @@ -457,12 +460,16 @@ async def test_pipeline_save_audio( # Verify wake file with wave.open(str(wake_file), "rb") as wake_wav: wake_data = wake_wav.readframes(wake_wav.getnframes()) - assert wake_data == b"wake word_" + assert wake_data.startswith(b"wake word") # Verify stt file with wave.open(str(stt_file), "rb") as stt_wav: stt_data = stt_wav.readframes(stt_wav.getnframes()) - assert stt_data == b"queued audiopart1_part2_" + assert stt_data.startswith(b"queued audio") + stt_data = stt_data[len(b"queued audio") :] + assert stt_data.startswith(b"part1") + stt_data = stt_data[BYTES_PER_CHUNK:] + assert stt_data.startswith(b"part2") async def test_pipeline_saved_audio_with_device_id( @@ -645,10 +652,10 @@ async def test_wake_word_detection_aborted( events: list[assist_pipeline.PipelineEvent] = [] async def audio_data(): - yield b"silence!" - yield b"wake word!" - yield b"part1" - yield b"part2" + yield make_10ms_chunk(b"silence!") + yield make_10ms_chunk(b"wake word!") + yield make_10ms_chunk(b"part1") + yield make_10ms_chunk(b"part2") yield b"" pipeline_store = pipeline_data.pipeline_store diff --git a/tests/components/assist_pipeline/test_websocket.py b/tests/components/assist_pipeline/test_websocket.py index 7d4a9b18c12..2da914f4252 100644 --- a/tests/components/assist_pipeline/test_websocket.py +++ b/tests/components/assist_pipeline/test_websocket.py @@ -8,7 +8,12 @@ from unittest.mock import ANY, patch import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.assist_pipeline.const import DOMAIN +from homeassistant.components.assist_pipeline.const import ( + DOMAIN, + SAMPLE_CHANNELS, + SAMPLE_RATE, + SAMPLE_WIDTH, +) from homeassistant.components.assist_pipeline.pipeline import ( DeviceAudioQueue, Pipeline, @@ -18,7 +23,13 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr -from .conftest import MockWakeWordEntity, MockWakeWordEntity2 +from .conftest import ( + BYTES_ONE_SECOND, + BYTES_PER_CHUNK, + MockWakeWordEntity, + MockWakeWordEntity2, + make_10ms_chunk, +) from tests.common import MockConfigEntry from tests.typing import WebSocketGenerator @@ -205,7 +216,7 @@ async def test_audio_pipeline_with_wake_word_timeout( "start_stage": "wake_word", "end_stage": "tts", "input": { - "sample_rate": 16000, + "sample_rate": SAMPLE_RATE, "timeout": 1, }, } @@ -229,7 +240,7 @@ async def test_audio_pipeline_with_wake_word_timeout( events.append(msg["event"]) # 2 seconds of silence - await client.send_bytes(bytes([1]) + bytes(16000 * 2 * 2)) + await client.send_bytes(bytes([1]) + bytes(2 * BYTES_ONE_SECOND)) # Time out error msg = await client.receive_json() @@ -259,7 +270,7 @@ async def test_audio_pipeline_with_wake_word_no_timeout( "type": "assist_pipeline/run", "start_stage": "wake_word", "end_stage": "tts", - "input": {"sample_rate": 16000, "timeout": 0, "no_vad": True}, + "input": {"sample_rate": SAMPLE_RATE, "timeout": 0, "no_vad": True}, } ) @@ -282,9 +293,10 @@ async def test_audio_pipeline_with_wake_word_no_timeout( events.append(msg["event"]) # "audio" - await client.send_bytes(bytes([handler_id]) + b"wake word") + await client.send_bytes(bytes([handler_id]) + make_10ms_chunk(b"wake word")) - msg = await client.receive_json() + async with asyncio.timeout(1): + msg = await client.receive_json() assert msg["event"]["type"] == "wake_word-end" assert msg["event"]["data"] == snapshot events.append(msg["event"]) @@ -365,7 +377,7 @@ async def test_audio_pipeline_no_wake_word_engine( "start_stage": "wake_word", "end_stage": "tts", "input": { - "sample_rate": 16000, + "sample_rate": SAMPLE_RATE, }, } ) @@ -402,7 +414,7 @@ async def test_audio_pipeline_no_wake_word_entity( "start_stage": "wake_word", "end_stage": "tts", "input": { - "sample_rate": 16000, + "sample_rate": SAMPLE_RATE, }, } ) @@ -1771,7 +1783,7 @@ async def test_audio_pipeline_with_enhancements( "start_stage": "stt", "end_stage": "tts", "input": { - "sample_rate": 16000, + "sample_rate": SAMPLE_RATE, # Enhancements "noise_suppression_level": 2, "auto_gain_dbfs": 15, @@ -1801,7 +1813,7 @@ async def test_audio_pipeline_with_enhancements( # One second of silence. # This will pass through the audio enhancement pipeline, but we don't test # the actual output. - await client.send_bytes(bytes([handler_id]) + bytes(16000 * 2)) + await client.send_bytes(bytes([handler_id]) + bytes(BYTES_ONE_SECOND)) # End of audio stream (handler id + empty payload) await client.send_bytes(bytes([handler_id])) @@ -1871,7 +1883,7 @@ async def test_wake_word_cooldown_same_id( "type": "assist_pipeline/run", "start_stage": "wake_word", "end_stage": "tts", - "input": {"sample_rate": 16000, "no_vad": True}, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, } ) @@ -1880,7 +1892,7 @@ async def test_wake_word_cooldown_same_id( "type": "assist_pipeline/run", "start_stage": "wake_word", "end_stage": "tts", - "input": {"sample_rate": 16000, "no_vad": True}, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, } ) @@ -1914,8 +1926,8 @@ async def test_wake_word_cooldown_same_id( assert msg["event"]["data"] == snapshot # Wake both up at the same time - await client_1.send_bytes(bytes([handler_id_1]) + b"wake word") - await client_2.send_bytes(bytes([handler_id_2]) + b"wake word") + await client_1.send_bytes(bytes([handler_id_1]) + make_10ms_chunk(b"wake word")) + await client_2.send_bytes(bytes([handler_id_2]) + make_10ms_chunk(b"wake word")) # Get response events error_data: dict[str, Any] | None = None @@ -1954,7 +1966,7 @@ async def test_wake_word_cooldown_different_ids( "type": "assist_pipeline/run", "start_stage": "wake_word", "end_stage": "tts", - "input": {"sample_rate": 16000, "no_vad": True}, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, } ) @@ -1963,7 +1975,7 @@ async def test_wake_word_cooldown_different_ids( "type": "assist_pipeline/run", "start_stage": "wake_word", "end_stage": "tts", - "input": {"sample_rate": 16000, "no_vad": True}, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, } ) @@ -1997,8 +2009,8 @@ async def test_wake_word_cooldown_different_ids( assert msg["event"]["data"] == snapshot # Wake both up at the same time, but they will have different wake word ids - await client_1.send_bytes(bytes([handler_id_1]) + b"wake word") - await client_2.send_bytes(bytes([handler_id_2]) + b"wake word") + await client_1.send_bytes(bytes([handler_id_1]) + make_10ms_chunk(b"wake word")) + await client_2.send_bytes(bytes([handler_id_2]) + make_10ms_chunk(b"wake word")) # Get response events msg = await client_1.receive_json() @@ -2073,7 +2085,7 @@ async def test_wake_word_cooldown_different_entities( "pipeline": pipeline_id_1, "start_stage": "wake_word", "end_stage": "tts", - "input": {"sample_rate": 16000, "no_vad": True}, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, } ) @@ -2084,7 +2096,7 @@ async def test_wake_word_cooldown_different_entities( "pipeline": pipeline_id_2, "start_stage": "wake_word", "end_stage": "tts", - "input": {"sample_rate": 16000, "no_vad": True}, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, } ) @@ -2119,8 +2131,8 @@ async def test_wake_word_cooldown_different_entities( # Wake both up at the same time. # They will have the same wake word id, but different entities. - await client_1.send_bytes(bytes([handler_id_1]) + b"wake word") - await client_2.send_bytes(bytes([handler_id_2]) + b"wake word") + await client_1.send_bytes(bytes([handler_id_1]) + make_10ms_chunk(b"wake word")) + await client_2.send_bytes(bytes([handler_id_2]) + make_10ms_chunk(b"wake word")) # Get response events error_data: dict[str, Any] | None = None @@ -2158,7 +2170,11 @@ async def test_device_capture( identifiers={("demo", "satellite-1234")}, ) - audio_chunks = [b"chunk1", b"chunk2", b"chunk3"] + audio_chunks = [ + make_10ms_chunk(b"chunk1"), + make_10ms_chunk(b"chunk2"), + make_10ms_chunk(b"chunk3"), + ] # Start capture client_capture = await hass_ws_client(hass) @@ -2181,7 +2197,7 @@ async def test_device_capture( "type": "assist_pipeline/run", "start_stage": "stt", "end_stage": "stt", - "input": {"sample_rate": 16000, "no_vad": True}, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, "device_id": satellite_device.id, } ) @@ -2232,9 +2248,9 @@ async def test_device_capture( # Verify audio chunks for i, audio_chunk in enumerate(audio_chunks): assert events[i]["type"] == "audio" - assert events[i]["rate"] == 16000 - assert events[i]["width"] == 2 - assert events[i]["channels"] == 1 + assert events[i]["rate"] == SAMPLE_RATE + assert events[i]["width"] == SAMPLE_WIDTH + assert events[i]["channels"] == SAMPLE_CHANNELS # Audio is base64 encoded assert events[i]["audio"] == base64.b64encode(audio_chunk).decode("ascii") @@ -2259,7 +2275,11 @@ async def test_device_capture_override( identifiers={("demo", "satellite-1234")}, ) - audio_chunks = [b"chunk1", b"chunk2", b"chunk3"] + audio_chunks = [ + make_10ms_chunk(b"chunk1"), + make_10ms_chunk(b"chunk2"), + make_10ms_chunk(b"chunk3"), + ] # Start first capture client_capture_1 = await hass_ws_client(hass) @@ -2282,7 +2302,7 @@ async def test_device_capture_override( "type": "assist_pipeline/run", "start_stage": "stt", "end_stage": "stt", - "input": {"sample_rate": 16000, "no_vad": True}, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, "device_id": satellite_device.id, } ) @@ -2365,9 +2385,9 @@ async def test_device_capture_override( # Verify all but first audio chunk for i, audio_chunk in enumerate(audio_chunks[1:]): assert events[i]["type"] == "audio" - assert events[i]["rate"] == 16000 - assert events[i]["width"] == 2 - assert events[i]["channels"] == 1 + assert events[i]["rate"] == SAMPLE_RATE + assert events[i]["width"] == SAMPLE_WIDTH + assert events[i]["channels"] == SAMPLE_CHANNELS # Audio is base64 encoded assert events[i]["audio"] == base64.b64encode(audio_chunk).decode("ascii") @@ -2427,7 +2447,7 @@ async def test_device_capture_queue_full( "type": "assist_pipeline/run", "start_stage": "stt", "end_stage": "stt", - "input": {"sample_rate": 16000, "no_vad": True}, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, "device_id": satellite_device.id, } ) @@ -2448,8 +2468,8 @@ async def test_device_capture_queue_full( assert msg["event"]["type"] == "stt-start" assert msg["event"]["data"] == snapshot - # Single sample will "overflow" the queue - await client_pipeline.send_bytes(bytes([handler_id, 0, 0])) + # Single chunk will "overflow" the queue + await client_pipeline.send_bytes(bytes([handler_id]) + bytes(BYTES_PER_CHUNK)) # End of audio stream await client_pipeline.send_bytes(bytes([handler_id])) @@ -2557,7 +2577,7 @@ async def test_stt_cooldown_same_id( "start_stage": "stt", "end_stage": "tts", "input": { - "sample_rate": 16000, + "sample_rate": SAMPLE_RATE, "wake_word_phrase": "ok_nabu", }, } @@ -2569,7 +2589,7 @@ async def test_stt_cooldown_same_id( "start_stage": "stt", "end_stage": "tts", "input": { - "sample_rate": 16000, + "sample_rate": SAMPLE_RATE, "wake_word_phrase": "ok_nabu", }, } @@ -2628,7 +2648,7 @@ async def test_stt_cooldown_different_ids( "start_stage": "stt", "end_stage": "tts", "input": { - "sample_rate": 16000, + "sample_rate": SAMPLE_RATE, "wake_word_phrase": "ok_nabu", }, } @@ -2640,7 +2660,7 @@ async def test_stt_cooldown_different_ids( "start_stage": "stt", "end_stage": "tts", "input": { - "sample_rate": 16000, + "sample_rate": SAMPLE_RATE, "wake_word_phrase": "hey_jarvis", }, } From 262d778a383aa9b05523c57663d80189d62997f9 Mon Sep 17 00:00:00 2001 From: Denis Shulyaka Date: Thu, 1 Aug 2024 23:50:10 +0300 Subject: [PATCH 0274/1635] Anthropic Claude conversation integration (#122526) * Initial commit * Use add_suggested_values * Update homeassistant/components/anthropic/conversation.py Co-authored-by: Joost Lekkerkerker * Update strings.json * Update config_flow.py * Update config_flow.py * Fix tests * Update homeassistant/components/anthropic/conversation.py Co-authored-by: Paulus Schoutsen * Removed agent registration * Moved message_convert inline function outside --------- Co-authored-by: Joost Lekkerkerker Co-authored-by: Paulus Schoutsen Co-authored-by: Paulus Schoutsen --- CODEOWNERS | 2 + .../components/anthropic/__init__.py | 46 ++ .../components/anthropic/config_flow.py | 210 ++++++++ homeassistant/components/anthropic/const.py | 15 + .../components/anthropic/conversation.py | 301 +++++++++++ .../components/anthropic/manifest.json | 12 + .../components/anthropic/strings.json | 34 ++ homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 6 + requirements_all.txt | 3 + requirements_test_all.txt | 3 + tests/components/anthropic/__init__.py | 1 + tests/components/anthropic/conftest.py | 51 ++ .../snapshots/test_conversation.ambr | 34 ++ .../components/anthropic/test_config_flow.py | 239 +++++++++ .../components/anthropic/test_conversation.py | 487 ++++++++++++++++++ tests/components/anthropic/test_init.py | 64 +++ 17 files changed, 1509 insertions(+) create mode 100644 homeassistant/components/anthropic/__init__.py create mode 100644 homeassistant/components/anthropic/config_flow.py create mode 100644 homeassistant/components/anthropic/const.py create mode 100644 homeassistant/components/anthropic/conversation.py create mode 100644 homeassistant/components/anthropic/manifest.json create mode 100644 homeassistant/components/anthropic/strings.json create mode 100644 tests/components/anthropic/__init__.py create mode 100644 tests/components/anthropic/conftest.py create mode 100644 tests/components/anthropic/snapshots/test_conversation.ambr create mode 100644 tests/components/anthropic/test_config_flow.py create mode 100644 tests/components/anthropic/test_conversation.py create mode 100644 tests/components/anthropic/test_init.py diff --git a/CODEOWNERS b/CODEOWNERS index e90def993d2..b53e0a929bb 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -108,6 +108,8 @@ build.json @home-assistant/supervisor /tests/components/anova/ @Lash-L /homeassistant/components/anthemav/ @hyralex /tests/components/anthemav/ @hyralex +/homeassistant/components/anthropic/ @Shulyaka +/tests/components/anthropic/ @Shulyaka /homeassistant/components/aosmith/ @bdr99 /tests/components/aosmith/ @bdr99 /homeassistant/components/apache_kafka/ @bachya diff --git a/homeassistant/components/anthropic/__init__.py b/homeassistant/components/anthropic/__init__.py new file mode 100644 index 00000000000..aa6cf509fa1 --- /dev/null +++ b/homeassistant/components/anthropic/__init__.py @@ -0,0 +1,46 @@ +"""The Anthropic integration.""" + +from __future__ import annotations + +import anthropic + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_API_KEY, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import config_validation as cv + +from .const import DOMAIN, LOGGER + +PLATFORMS = (Platform.CONVERSATION,) +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) + +type AnthropicConfigEntry = ConfigEntry[anthropic.AsyncClient] + + +async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) -> bool: + """Set up Anthropic from a config entry.""" + client = anthropic.AsyncAnthropic(api_key=entry.data[CONF_API_KEY]) + try: + await client.messages.create( + model="claude-3-haiku-20240307", + max_tokens=1, + messages=[{"role": "user", "content": "Hi"}], + timeout=10.0, + ) + except anthropic.AuthenticationError as err: + LOGGER.error("Invalid API key: %s", err) + return False + except anthropic.AnthropicError as err: + raise ConfigEntryNotReady(err) from err + + entry.runtime_data = client + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload Anthropic.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/anthropic/config_flow.py b/homeassistant/components/anthropic/config_flow.py new file mode 100644 index 00000000000..01e16ec5350 --- /dev/null +++ b/homeassistant/components/anthropic/config_flow.py @@ -0,0 +1,210 @@ +"""Config flow for Anthropic integration.""" + +from __future__ import annotations + +import logging +from types import MappingProxyType +from typing import Any + +import anthropic +import voluptuous as vol + +from homeassistant.config_entries import ( + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, +) +from homeassistant.const import CONF_API_KEY, CONF_LLM_HASS_API +from homeassistant.core import HomeAssistant +from homeassistant.helpers import llm +from homeassistant.helpers.selector import ( + NumberSelector, + NumberSelectorConfig, + SelectOptionDict, + SelectSelector, + SelectSelectorConfig, + TemplateSelector, +) + +from .const import ( + CONF_CHAT_MODEL, + CONF_MAX_TOKENS, + CONF_PROMPT, + CONF_RECOMMENDED, + CONF_TEMPERATURE, + DOMAIN, + RECOMMENDED_CHAT_MODEL, + RECOMMENDED_MAX_TOKENS, + RECOMMENDED_TEMPERATURE, +) + +_LOGGER = logging.getLogger(__name__) + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_API_KEY): str, + } +) + +RECOMMENDED_OPTIONS = { + CONF_RECOMMENDED: True, + CONF_LLM_HASS_API: llm.LLM_API_ASSIST, + CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT, +} + + +async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None: + """Validate the user input allows us to connect. + + Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user. + """ + client = anthropic.AsyncAnthropic(api_key=data[CONF_API_KEY]) + await client.messages.create( + model="claude-3-haiku-20240307", + max_tokens=1, + messages=[{"role": "user", "content": "Hi"}], + timeout=10.0, + ) + + +class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Anthropic.""" + + VERSION = 1 + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + errors = {} + + if user_input is not None: + try: + await validate_input(self.hass, user_input) + except anthropic.APITimeoutError: + errors["base"] = "timeout_connect" + except anthropic.APIConnectionError: + errors["base"] = "cannot_connect" + except anthropic.APIStatusError as e: + if isinstance(e.body, dict): + errors["base"] = e.body.get("error", {}).get("type", "unknown") + else: + errors["base"] = "unknown" + except Exception: + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + return self.async_create_entry( + title="Claude", + data=user_input, + options=RECOMMENDED_OPTIONS, + ) + + return self.async_show_form( + step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors or None + ) + + @staticmethod + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> OptionsFlow: + """Create the options flow.""" + return AnthropicOptionsFlow(config_entry) + + +class AnthropicOptionsFlow(OptionsFlow): + """Anthropic config flow options handler.""" + + def __init__(self, config_entry: ConfigEntry) -> None: + """Initialize options flow.""" + self.config_entry = config_entry + self.last_rendered_recommended = config_entry.options.get( + CONF_RECOMMENDED, False + ) + + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Manage the options.""" + options: dict[str, Any] | MappingProxyType[str, Any] = self.config_entry.options + + if user_input is not None: + if user_input[CONF_RECOMMENDED] == self.last_rendered_recommended: + if user_input[CONF_LLM_HASS_API] == "none": + user_input.pop(CONF_LLM_HASS_API) + return self.async_create_entry(title="", data=user_input) + + # Re-render the options again, now with the recommended options shown/hidden + self.last_rendered_recommended = user_input[CONF_RECOMMENDED] + + options = { + CONF_RECOMMENDED: user_input[CONF_RECOMMENDED], + CONF_PROMPT: user_input[CONF_PROMPT], + CONF_LLM_HASS_API: user_input[CONF_LLM_HASS_API], + } + + suggested_values = options.copy() + if not suggested_values.get(CONF_PROMPT): + suggested_values[CONF_PROMPT] = llm.DEFAULT_INSTRUCTIONS_PROMPT + + schema = self.add_suggested_values_to_schema( + vol.Schema(anthropic_config_option_schema(self.hass, options)), + suggested_values, + ) + + return self.async_show_form( + step_id="init", + data_schema=schema, + ) + + +def anthropic_config_option_schema( + hass: HomeAssistant, + options: dict[str, Any] | MappingProxyType[str, Any], +) -> dict: + """Return a schema for Anthropic completion options.""" + hass_apis: list[SelectOptionDict] = [ + SelectOptionDict( + label="No control", + value="none", + ) + ] + hass_apis.extend( + SelectOptionDict( + label=api.name, + value=api.id, + ) + for api in llm.async_get_apis(hass) + ) + + schema = { + vol.Optional(CONF_PROMPT): TemplateSelector(), + vol.Optional(CONF_LLM_HASS_API, default="none"): SelectSelector( + SelectSelectorConfig(options=hass_apis) + ), + vol.Required( + CONF_RECOMMENDED, default=options.get(CONF_RECOMMENDED, False) + ): bool, + } + + if options.get(CONF_RECOMMENDED): + return schema + + schema.update( + { + vol.Optional( + CONF_CHAT_MODEL, + default=RECOMMENDED_CHAT_MODEL, + ): str, + vol.Optional( + CONF_MAX_TOKENS, + default=RECOMMENDED_MAX_TOKENS, + ): int, + vol.Optional( + CONF_TEMPERATURE, + default=RECOMMENDED_TEMPERATURE, + ): NumberSelector(NumberSelectorConfig(min=0, max=1, step=0.05)), + } + ) + return schema diff --git a/homeassistant/components/anthropic/const.py b/homeassistant/components/anthropic/const.py new file mode 100644 index 00000000000..4ccf2c88faa --- /dev/null +++ b/homeassistant/components/anthropic/const.py @@ -0,0 +1,15 @@ +"""Constants for the Anthropic integration.""" + +import logging + +DOMAIN = "anthropic" +LOGGER = logging.getLogger(__package__) + +CONF_RECOMMENDED = "recommended" +CONF_PROMPT = "prompt" +CONF_CHAT_MODEL = "chat_model" +RECOMMENDED_CHAT_MODEL = "claude-3-5-sonnet-20240620" +CONF_MAX_TOKENS = "max_tokens" +RECOMMENDED_MAX_TOKENS = 1024 +CONF_TEMPERATURE = "temperature" +RECOMMENDED_TEMPERATURE = 1.0 diff --git a/homeassistant/components/anthropic/conversation.py b/homeassistant/components/anthropic/conversation.py new file mode 100644 index 00000000000..92a09ad8a10 --- /dev/null +++ b/homeassistant/components/anthropic/conversation.py @@ -0,0 +1,301 @@ +"""Conversation support for Anthropic.""" + +from collections.abc import Callable +import json +from typing import Any, Literal, cast + +import anthropic +from anthropic._types import NOT_GIVEN +from anthropic.types import ( + Message, + MessageParam, + TextBlock, + TextBlockParam, + ToolParam, + ToolResultBlockParam, + ToolUseBlock, + ToolUseBlockParam, +) +import voluptuous as vol +from voluptuous_openapi import convert + +from homeassistant.components import conversation +from homeassistant.components.conversation import trace +from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, TemplateError +from homeassistant.helpers import device_registry as dr, intent, llm, template +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import ulid + +from . import AnthropicConfigEntry +from .const import ( + CONF_CHAT_MODEL, + CONF_MAX_TOKENS, + CONF_PROMPT, + CONF_TEMPERATURE, + DOMAIN, + LOGGER, + RECOMMENDED_CHAT_MODEL, + RECOMMENDED_MAX_TOKENS, + RECOMMENDED_TEMPERATURE, +) + +# Max number of back and forth with the LLM to generate a response +MAX_TOOL_ITERATIONS = 10 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: AnthropicConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up conversation entities.""" + agent = AnthropicConversationEntity(config_entry) + async_add_entities([agent]) + + +def _format_tool( + tool: llm.Tool, custom_serializer: Callable[[Any], Any] | None +) -> ToolParam: + """Format tool specification.""" + return ToolParam( + name=tool.name, + description=tool.description or "", + input_schema=convert(tool.parameters, custom_serializer=custom_serializer), + ) + + +def _message_convert( + message: Message, +) -> MessageParam: + """Convert from class to TypedDict.""" + param_content: list[TextBlockParam | ToolUseBlockParam] = [] + + for message_content in message.content: + if isinstance(message_content, TextBlock): + param_content.append(TextBlockParam(type="text", text=message_content.text)) + elif isinstance(message_content, ToolUseBlock): + param_content.append( + ToolUseBlockParam( + type="tool_use", + id=message_content.id, + name=message_content.name, + input=message_content.input, + ) + ) + + return MessageParam(role=message.role, content=param_content) + + +class AnthropicConversationEntity( + conversation.ConversationEntity, conversation.AbstractConversationAgent +): + """Anthropic conversation agent.""" + + _attr_has_entity_name = True + _attr_name = None + + def __init__(self, entry: AnthropicConfigEntry) -> None: + """Initialize the agent.""" + self.entry = entry + self.history: dict[str, list[MessageParam]] = {} + self._attr_unique_id = entry.entry_id + self._attr_device_info = dr.DeviceInfo( + identifiers={(DOMAIN, entry.entry_id)}, + manufacturer="Anthropic", + model="Claude", + entry_type=dr.DeviceEntryType.SERVICE, + ) + if self.entry.options.get(CONF_LLM_HASS_API): + self._attr_supported_features = ( + conversation.ConversationEntityFeature.CONTROL + ) + + @property + def supported_languages(self) -> list[str] | Literal["*"]: + """Return a list of supported languages.""" + return MATCH_ALL + + async def async_process( + self, user_input: conversation.ConversationInput + ) -> conversation.ConversationResult: + """Process a sentence.""" + options = self.entry.options + intent_response = intent.IntentResponse(language=user_input.language) + llm_api: llm.APIInstance | None = None + tools: list[ToolParam] | None = None + user_name: str | None = None + llm_context = llm.LLMContext( + platform=DOMAIN, + context=user_input.context, + user_prompt=user_input.text, + language=user_input.language, + assistant=conversation.DOMAIN, + device_id=user_input.device_id, + ) + + if options.get(CONF_LLM_HASS_API): + try: + llm_api = await llm.async_get_api( + self.hass, + options[CONF_LLM_HASS_API], + llm_context, + ) + except HomeAssistantError as err: + LOGGER.error("Error getting LLM API: %s", err) + intent_response.async_set_error( + intent.IntentResponseErrorCode.UNKNOWN, + f"Error preparing LLM API: {err}", + ) + return conversation.ConversationResult( + response=intent_response, conversation_id=user_input.conversation_id + ) + tools = [ + _format_tool(tool, llm_api.custom_serializer) for tool in llm_api.tools + ] + + if user_input.conversation_id is None: + conversation_id = ulid.ulid_now() + messages = [] + + elif user_input.conversation_id in self.history: + conversation_id = user_input.conversation_id + messages = self.history[conversation_id] + + else: + # Conversation IDs are ULIDs. We generate a new one if not provided. + # If an old OLID is passed in, we will generate a new one to indicate + # a new conversation was started. If the user picks their own, they + # want to track a conversation and we respect it. + try: + ulid.ulid_to_bytes(user_input.conversation_id) + conversation_id = ulid.ulid_now() + except ValueError: + conversation_id = user_input.conversation_id + + messages = [] + + if ( + user_input.context + and user_input.context.user_id + and ( + user := await self.hass.auth.async_get_user(user_input.context.user_id) + ) + ): + user_name = user.name + + try: + prompt_parts = [ + template.Template( + llm.BASE_PROMPT + + options.get(CONF_PROMPT, llm.DEFAULT_INSTRUCTIONS_PROMPT), + self.hass, + ).async_render( + { + "ha_name": self.hass.config.location_name, + "user_name": user_name, + "llm_context": llm_context, + }, + parse_result=False, + ) + ] + + except TemplateError as err: + LOGGER.error("Error rendering prompt: %s", err) + intent_response.async_set_error( + intent.IntentResponseErrorCode.UNKNOWN, + f"Sorry, I had a problem with my template: {err}", + ) + return conversation.ConversationResult( + response=intent_response, conversation_id=conversation_id + ) + + if llm_api: + prompt_parts.append(llm_api.api_prompt) + + prompt = "\n".join(prompt_parts) + + # Create a copy of the variable because we attach it to the trace + messages = [*messages, MessageParam(role="user", content=user_input.text)] + + LOGGER.debug("Prompt: %s", messages) + LOGGER.debug("Tools: %s", tools) + trace.async_conversation_trace_append( + trace.ConversationTraceEventType.AGENT_DETAIL, + {"system": prompt, "messages": messages}, + ) + + client = self.entry.runtime_data + + # To prevent infinite loops, we limit the number of iterations + for _iteration in range(MAX_TOOL_ITERATIONS): + try: + response = await client.messages.create( + model=options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL), + messages=messages, + tools=tools or NOT_GIVEN, + max_tokens=options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS), + system=prompt, + temperature=options.get(CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE), + ) + except anthropic.AnthropicError as err: + intent_response.async_set_error( + intent.IntentResponseErrorCode.UNKNOWN, + f"Sorry, I had a problem talking to Anthropic: {err}", + ) + return conversation.ConversationResult( + response=intent_response, conversation_id=conversation_id + ) + + LOGGER.debug("Response %s", response) + + messages.append(_message_convert(response)) + + if response.stop_reason != "tool_use" or not llm_api: + break + + tool_results: list[ToolResultBlockParam] = [] + for tool_call in response.content: + if isinstance(tool_call, TextBlock): + LOGGER.info(tool_call.text) + + if not isinstance(tool_call, ToolUseBlock): + continue + + tool_input = llm.ToolInput( + tool_name=tool_call.name, + tool_args=cast(dict[str, Any], tool_call.input), + ) + LOGGER.debug( + "Tool call: %s(%s)", tool_input.tool_name, tool_input.tool_args + ) + + try: + tool_response = await llm_api.async_call_tool(tool_input) + except (HomeAssistantError, vol.Invalid) as e: + tool_response = {"error": type(e).__name__} + if str(e): + tool_response["error_text"] = str(e) + + LOGGER.debug("Tool response: %s", tool_response) + tool_results.append( + ToolResultBlockParam( + type="tool_result", + tool_use_id=tool_call.id, + content=json.dumps(tool_response), + ) + ) + + messages.append(MessageParam(role="user", content=tool_results)) + + self.history[conversation_id] = messages + + for content in response.content: + if isinstance(content, TextBlock): + intent_response.async_set_speech(content.text) + break + + return conversation.ConversationResult( + response=intent_response, conversation_id=conversation_id + ) diff --git a/homeassistant/components/anthropic/manifest.json b/homeassistant/components/anthropic/manifest.json new file mode 100644 index 00000000000..7d51c458e4d --- /dev/null +++ b/homeassistant/components/anthropic/manifest.json @@ -0,0 +1,12 @@ +{ + "domain": "anthropic", + "name": "Anthropic Conversation", + "after_dependencies": ["assist_pipeline", "intent"], + "codeowners": ["@Shulyaka"], + "config_flow": true, + "dependencies": ["conversation"], + "documentation": "https://www.home-assistant.io/integrations/anthropic", + "integration_type": "service", + "iot_class": "cloud_polling", + "requirements": ["anthropic==0.31.2"] +} diff --git a/homeassistant/components/anthropic/strings.json b/homeassistant/components/anthropic/strings.json new file mode 100644 index 00000000000..9550a1a6672 --- /dev/null +++ b/homeassistant/components/anthropic/strings.json @@ -0,0 +1,34 @@ +{ + "config": { + "step": { + "user": { + "data": { + "api_key": "[%key:common::config_flow::data::api_key%]" + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "timeout_connect": "[%key:common::config_flow::error::timeout_connect%]", + "authentication_error": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + } + }, + "options": { + "step": { + "init": { + "data": { + "prompt": "Instructions", + "chat_model": "[%key:common::generic::model%]", + "max_tokens": "Maximum tokens to return in response", + "temperature": "Temperature", + "llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]", + "recommended": "Recommended model settings" + }, + "data_description": { + "prompt": "Instruct how the LLM should respond. This can be a template." + } + } + } + } +} diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index 0c37cf9c412..67cffd25f28 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -53,6 +53,7 @@ FLOWS = { "androidtv_remote", "anova", "anthemav", + "anthropic", "aosmith", "apcupsd", "apple_tv", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 3cc3ea71df9..25a78e30017 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -315,6 +315,12 @@ "config_flow": true, "iot_class": "local_push" }, + "anthropic": { + "name": "Anthropic Conversation", + "integration_type": "service", + "config_flow": true, + "iot_class": "cloud_polling" + }, "anwb_energie": { "name": "ANWB Energie", "integration_type": "virtual", diff --git a/requirements_all.txt b/requirements_all.txt index 0bf4b77e9d2..1df7cc12072 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -451,6 +451,9 @@ anova-wifi==0.17.0 # homeassistant.components.anthemav anthemav==1.4.1 +# homeassistant.components.anthropic +anthropic==0.31.2 + # homeassistant.components.weatherkit apple_weatherkit==1.1.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f9670987b70..5831f7c23cf 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -424,6 +424,9 @@ anova-wifi==0.17.0 # homeassistant.components.anthemav anthemav==1.4.1 +# homeassistant.components.anthropic +anthropic==0.31.2 + # homeassistant.components.weatherkit apple_weatherkit==1.1.2 diff --git a/tests/components/anthropic/__init__.py b/tests/components/anthropic/__init__.py new file mode 100644 index 00000000000..99d7a5785a8 --- /dev/null +++ b/tests/components/anthropic/__init__.py @@ -0,0 +1 @@ +"""Tests for the Anthropic integration.""" diff --git a/tests/components/anthropic/conftest.py b/tests/components/anthropic/conftest.py new file mode 100644 index 00000000000..0a5ad5e5ac6 --- /dev/null +++ b/tests/components/anthropic/conftest.py @@ -0,0 +1,51 @@ +"""Tests helpers.""" + +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.const import CONF_LLM_HASS_API +from homeassistant.core import HomeAssistant +from homeassistant.helpers import llm +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_config_entry(hass): + """Mock a config entry.""" + entry = MockConfigEntry( + title="Claude", + domain="anthropic", + data={ + "api_key": "bla", + }, + ) + entry.add_to_hass(hass) + return entry + + +@pytest.fixture +def mock_config_entry_with_assist(hass, mock_config_entry): + """Mock a config entry with assist.""" + hass.config_entries.async_update_entry( + mock_config_entry, options={CONF_LLM_HASS_API: llm.LLM_API_ASSIST} + ) + return mock_config_entry + + +@pytest.fixture +async def mock_init_component(hass, mock_config_entry): + """Initialize integration.""" + with patch( + "anthropic.resources.messages.AsyncMessages.create", new_callable=AsyncMock + ): + assert await async_setup_component(hass, "anthropic", {}) + await hass.async_block_till_done() + + +@pytest.fixture(autouse=True) +async def setup_ha(hass: HomeAssistant) -> None: + """Set up Home Assistant.""" + assert await async_setup_component(hass, "homeassistant", {}) diff --git a/tests/components/anthropic/snapshots/test_conversation.ambr b/tests/components/anthropic/snapshots/test_conversation.ambr new file mode 100644 index 00000000000..e4dd7cd00bb --- /dev/null +++ b/tests/components/anthropic/snapshots/test_conversation.ambr @@ -0,0 +1,34 @@ +# serializer version: 1 +# name: test_unknown_hass_api + dict({ + 'conversation_id': None, + 'response': IntentResponse( + card=dict({ + }), + error_code=, + failed_results=list([ + ]), + intent=None, + intent_targets=list([ + ]), + language='en', + matched_states=list([ + ]), + reprompt=dict({ + }), + response_type=, + speech=dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Error preparing LLM API: API non-existing not found', + }), + }), + speech_slots=dict({ + }), + success_results=list([ + ]), + unmatched_states=list([ + ]), + ), + }) +# --- diff --git a/tests/components/anthropic/test_config_flow.py b/tests/components/anthropic/test_config_flow.py new file mode 100644 index 00000000000..df27352b7b2 --- /dev/null +++ b/tests/components/anthropic/test_config_flow.py @@ -0,0 +1,239 @@ +"""Test the Anthropic config flow.""" + +from unittest.mock import AsyncMock, patch + +from anthropic import ( + APIConnectionError, + APIResponseValidationError, + APITimeoutError, + AuthenticationError, + BadRequestError, + InternalServerError, +) +from httpx import URL, Request, Response +import pytest + +from homeassistant import config_entries +from homeassistant.components.anthropic.config_flow import RECOMMENDED_OPTIONS +from homeassistant.components.anthropic.const import ( + CONF_CHAT_MODEL, + CONF_MAX_TOKENS, + CONF_PROMPT, + CONF_RECOMMENDED, + CONF_TEMPERATURE, + DOMAIN, + RECOMMENDED_CHAT_MODEL, + RECOMMENDED_MAX_TOKENS, +) +from homeassistant.const import CONF_LLM_HASS_API +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_form(hass: HomeAssistant) -> None: + """Test we get the form.""" + # Pretend we already set up a config entry. + hass.config.components.add("anthropic") + MockConfigEntry( + domain=DOMAIN, + state=config_entries.ConfigEntryState.LOADED, + ).add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] is None + + with ( + patch( + "homeassistant.components.anthropic.config_flow.anthropic.resources.messages.AsyncMessages.create", + new_callable=AsyncMock, + ), + patch( + "homeassistant.components.anthropic.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "api_key": "bla", + }, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["data"] == { + "api_key": "bla", + } + assert result2["options"] == RECOMMENDED_OPTIONS + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_options( + hass: HomeAssistant, mock_config_entry, mock_init_component +) -> None: + """Test the options form.""" + options_flow = await hass.config_entries.options.async_init( + mock_config_entry.entry_id + ) + options = await hass.config_entries.options.async_configure( + options_flow["flow_id"], + { + "prompt": "Speak like a pirate", + "max_tokens": 200, + }, + ) + await hass.async_block_till_done() + assert options["type"] is FlowResultType.CREATE_ENTRY + assert options["data"]["prompt"] == "Speak like a pirate" + assert options["data"]["max_tokens"] == 200 + assert options["data"][CONF_CHAT_MODEL] == RECOMMENDED_CHAT_MODEL + + +@pytest.mark.parametrize( + ("side_effect", "error"), + [ + (APIConnectionError(request=None), "cannot_connect"), + (APITimeoutError(request=None), "timeout_connect"), + ( + BadRequestError( + message="Your credit balance is too low to access the Claude API. Please go to Plans & Billing to upgrade or purchase credits.", + response=Response( + status_code=400, + request=Request(method="POST", url=URL()), + ), + body={"type": "error", "error": {"type": "invalid_request_error"}}, + ), + "invalid_request_error", + ), + ( + AuthenticationError( + message="invalid x-api-key", + response=Response( + status_code=401, + request=Request(method="POST", url=URL()), + ), + body={"type": "error", "error": {"type": "authentication_error"}}, + ), + "authentication_error", + ), + ( + InternalServerError( + message=None, + response=Response( + status_code=500, + request=Request(method="POST", url=URL()), + ), + body=None, + ), + "unknown", + ), + ( + APIResponseValidationError( + response=Response( + status_code=200, + request=Request(method="POST", url=URL()), + ), + body=None, + ), + "unknown", + ), + ], +) +async def test_form_invalid_auth(hass: HomeAssistant, side_effect, error) -> None: + """Test we handle invalid auth.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with patch( + "homeassistant.components.anthropic.config_flow.anthropic.resources.messages.AsyncMessages.create", + new_callable=AsyncMock, + side_effect=side_effect, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "api_key": "bla", + }, + ) + + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": error} + + +@pytest.mark.parametrize( + ("current_options", "new_options", "expected_options"), + [ + ( + { + CONF_RECOMMENDED: True, + CONF_LLM_HASS_API: "none", + CONF_PROMPT: "bla", + }, + { + CONF_RECOMMENDED: False, + CONF_PROMPT: "Speak like a pirate", + CONF_TEMPERATURE: 0.3, + }, + { + CONF_RECOMMENDED: False, + CONF_PROMPT: "Speak like a pirate", + CONF_TEMPERATURE: 0.3, + CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL, + CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS, + }, + ), + ( + { + CONF_RECOMMENDED: False, + CONF_PROMPT: "Speak like a pirate", + CONF_TEMPERATURE: 0.3, + CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL, + CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS, + }, + { + CONF_RECOMMENDED: True, + CONF_LLM_HASS_API: "assist", + CONF_PROMPT: "", + }, + { + CONF_RECOMMENDED: True, + CONF_LLM_HASS_API: "assist", + CONF_PROMPT: "", + }, + ), + ], +) +async def test_options_switching( + hass: HomeAssistant, + mock_config_entry, + mock_init_component, + current_options, + new_options, + expected_options, +) -> None: + """Test the options form.""" + hass.config_entries.async_update_entry(mock_config_entry, options=current_options) + options_flow = await hass.config_entries.options.async_init( + mock_config_entry.entry_id + ) + if current_options.get(CONF_RECOMMENDED) != new_options.get(CONF_RECOMMENDED): + options_flow = await hass.config_entries.options.async_configure( + options_flow["flow_id"], + { + **current_options, + CONF_RECOMMENDED: new_options[CONF_RECOMMENDED], + }, + ) + options = await hass.config_entries.options.async_configure( + options_flow["flow_id"], + new_options, + ) + await hass.async_block_till_done() + assert options["type"] is FlowResultType.CREATE_ENTRY + assert options["data"] == expected_options diff --git a/tests/components/anthropic/test_conversation.py b/tests/components/anthropic/test_conversation.py new file mode 100644 index 00000000000..65ede877281 --- /dev/null +++ b/tests/components/anthropic/test_conversation.py @@ -0,0 +1,487 @@ +"""Tests for the Anthropic integration.""" + +from unittest.mock import AsyncMock, Mock, patch + +from anthropic import RateLimitError +from anthropic.types import Message, TextBlock, ToolUseBlock, Usage +from freezegun import freeze_time +from httpx import URL, Request, Response +from syrupy.assertion import SnapshotAssertion +import voluptuous as vol + +from homeassistant.components import conversation +from homeassistant.components.conversation import trace +from homeassistant.const import CONF_LLM_HASS_API +from homeassistant.core import Context, HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import intent, llm +from homeassistant.setup import async_setup_component +from homeassistant.util import ulid + +from tests.common import MockConfigEntry + + +async def test_entity( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_init_component, +) -> None: + """Test entity properties.""" + state = hass.states.get("conversation.claude") + assert state + assert state.attributes["supported_features"] == 0 + + hass.config_entries.async_update_entry( + mock_config_entry, + options={ + **mock_config_entry.options, + CONF_LLM_HASS_API: "assist", + }, + ) + with patch( + "anthropic.resources.messages.AsyncMessages.create", new_callable=AsyncMock + ): + await hass.config_entries.async_reload(mock_config_entry.entry_id) + + state = hass.states.get("conversation.claude") + assert state + assert ( + state.attributes["supported_features"] + == conversation.ConversationEntityFeature.CONTROL + ) + + +async def test_error_handling( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_init_component +) -> None: + """Test that the default prompt works.""" + with patch( + "anthropic.resources.messages.AsyncMessages.create", + new_callable=AsyncMock, + side_effect=RateLimitError( + message=None, + response=Response( + status_code=429, request=Request(method="POST", url=URL()) + ), + body=None, + ), + ): + result = await conversation.async_converse( + hass, "hello", None, Context(), agent_id="conversation.claude" + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR, result + assert result.response.error_code == "unknown", result + + +async def test_template_error( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test that template error handling works.""" + hass.config_entries.async_update_entry( + mock_config_entry, + options={ + "prompt": "talk like a {% if True %}smarthome{% else %}pirate please.", + }, + ) + with patch( + "anthropic.resources.messages.AsyncMessages.create", new_callable=AsyncMock + ): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + result = await conversation.async_converse( + hass, "hello", None, Context(), agent_id="conversation.claude" + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR, result + assert result.response.error_code == "unknown", result + + +async def test_template_variables( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test that template variables work.""" + context = Context(user_id="12345") + mock_user = Mock() + mock_user.id = "12345" + mock_user.name = "Test User" + + hass.config_entries.async_update_entry( + mock_config_entry, + options={ + "prompt": ( + "The user name is {{ user_name }}. " + "The user id is {{ llm_context.context.user_id }}." + ), + }, + ) + with ( + patch( + "anthropic.resources.messages.AsyncMessages.create", new_callable=AsyncMock + ) as mock_create, + patch("homeassistant.auth.AuthManager.async_get_user", return_value=mock_user), + ): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + result = await conversation.async_converse( + hass, "hello", None, context, agent_id="conversation.claude" + ) + + assert ( + result.response.response_type == intent.IntentResponseType.ACTION_DONE + ), result + assert "The user name is Test User." in mock_create.mock_calls[1][2]["system"] + assert "The user id is 12345." in mock_create.mock_calls[1][2]["system"] + + +async def test_conversation_agent( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_init_component, +) -> None: + """Test Anthropic Agent.""" + agent = conversation.agent_manager.async_get_agent(hass, "conversation.claude") + assert agent.supported_languages == "*" + + +@patch("homeassistant.components.anthropic.conversation.llm.AssistAPI._async_get_tools") +async def test_function_call( + mock_get_tools, + hass: HomeAssistant, + mock_config_entry_with_assist: MockConfigEntry, + mock_init_component, +) -> None: + """Test function call from the assistant.""" + agent_id = "conversation.claude" + context = Context() + + mock_tool = AsyncMock() + mock_tool.name = "test_tool" + mock_tool.description = "Test function" + mock_tool.parameters = vol.Schema( + {vol.Optional("param1", description="Test parameters"): str} + ) + mock_tool.async_call.return_value = "Test response" + + mock_get_tools.return_value = [mock_tool] + + def completion_result(*args, messages, **kwargs): + for message in messages: + for content in message["content"]: + if not isinstance(content, str) and content["type"] == "tool_use": + return Message( + type="message", + id="msg_1234567890ABCDEFGHIJKLMN", + content=[ + TextBlock( + type="text", + text="I have successfully called the function", + ) + ], + model="claude-3-5-sonnet-20240620", + role="assistant", + stop_reason="end_turn", + stop_sequence=None, + usage=Usage(input_tokens=8, output_tokens=12), + ) + + return Message( + type="message", + id="msg_1234567890ABCDEFGHIJKLMN", + content=[ + TextBlock(type="text", text="Certainly, calling it now!"), + ToolUseBlock( + type="tool_use", + id="toolu_0123456789AbCdEfGhIjKlM", + name="test_tool", + input={"param1": "test_value"}, + ), + ], + model="claude-3-5-sonnet-20240620", + role="assistant", + stop_reason="tool_use", + stop_sequence=None, + usage=Usage(input_tokens=8, output_tokens=12), + ) + + with ( + patch( + "anthropic.resources.messages.AsyncMessages.create", + new_callable=AsyncMock, + side_effect=completion_result, + ) as mock_create, + freeze_time("2024-06-03 23:00:00"), + ): + result = await conversation.async_converse( + hass, + "Please call the test function", + None, + context, + agent_id=agent_id, + ) + + assert "Today's date is 2024-06-03." in mock_create.mock_calls[1][2]["system"] + + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert mock_create.mock_calls[1][2]["messages"][2] == { + "role": "user", + "content": [ + { + "content": '"Test response"', + "tool_use_id": "toolu_0123456789AbCdEfGhIjKlM", + "type": "tool_result", + } + ], + } + mock_tool.async_call.assert_awaited_once_with( + hass, + llm.ToolInput( + tool_name="test_tool", + tool_args={"param1": "test_value"}, + ), + llm.LLMContext( + platform="anthropic", + context=context, + user_prompt="Please call the test function", + language="en", + assistant="conversation", + device_id=None, + ), + ) + + # Test Conversation tracing + traces = trace.async_get_traces() + assert traces + last_trace = traces[-1].as_dict() + trace_events = last_trace.get("events", []) + assert [event["event_type"] for event in trace_events] == [ + trace.ConversationTraceEventType.ASYNC_PROCESS, + trace.ConversationTraceEventType.AGENT_DETAIL, + trace.ConversationTraceEventType.TOOL_CALL, + ] + # AGENT_DETAIL event contains the raw prompt passed to the model + detail_event = trace_events[1] + assert "Answer in plain text" in detail_event["data"]["system"] + assert "Today's date is 2024-06-03." in trace_events[1]["data"]["system"] + + # Call it again, make sure we have updated prompt + with ( + patch( + "anthropic.resources.messages.AsyncMessages.create", + new_callable=AsyncMock, + side_effect=completion_result, + ) as mock_create, + freeze_time("2024-06-04 23:00:00"), + ): + result = await conversation.async_converse( + hass, + "Please call the test function", + None, + context, + agent_id=agent_id, + ) + + assert "Today's date is 2024-06-04." in mock_create.mock_calls[1][2]["system"] + # Test old assert message not updated + assert "Today's date is 2024-06-03." in trace_events[1]["data"]["system"] + + +@patch("homeassistant.components.anthropic.conversation.llm.AssistAPI._async_get_tools") +async def test_function_exception( + mock_get_tools, + hass: HomeAssistant, + mock_config_entry_with_assist: MockConfigEntry, + mock_init_component, +) -> None: + """Test function call with exception.""" + agent_id = "conversation.claude" + context = Context() + + mock_tool = AsyncMock() + mock_tool.name = "test_tool" + mock_tool.description = "Test function" + mock_tool.parameters = vol.Schema( + {vol.Optional("param1", description="Test parameters"): str} + ) + mock_tool.async_call.side_effect = HomeAssistantError("Test tool exception") + + mock_get_tools.return_value = [mock_tool] + + def completion_result(*args, messages, **kwargs): + for message in messages: + for content in message["content"]: + if not isinstance(content, str) and content["type"] == "tool_use": + return Message( + type="message", + id="msg_1234567890ABCDEFGHIJKLMN", + content=[ + TextBlock( + type="text", + text="There was an error calling the function", + ) + ], + model="claude-3-5-sonnet-20240620", + role="assistant", + stop_reason="end_turn", + stop_sequence=None, + usage=Usage(input_tokens=8, output_tokens=12), + ) + + return Message( + type="message", + id="msg_1234567890ABCDEFGHIJKLMN", + content=[ + TextBlock(type="text", text="Certainly, calling it now!"), + ToolUseBlock( + type="tool_use", + id="toolu_0123456789AbCdEfGhIjKlM", + name="test_tool", + input={"param1": "test_value"}, + ), + ], + model="claude-3-5-sonnet-20240620", + role="assistant", + stop_reason="tool_use", + stop_sequence=None, + usage=Usage(input_tokens=8, output_tokens=12), + ) + + with patch( + "anthropic.resources.messages.AsyncMessages.create", + new_callable=AsyncMock, + side_effect=completion_result, + ) as mock_create: + result = await conversation.async_converse( + hass, + "Please call the test function", + None, + context, + agent_id=agent_id, + ) + + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert mock_create.mock_calls[1][2]["messages"][2] == { + "role": "user", + "content": [ + { + "content": '{"error": "HomeAssistantError", "error_text": "Test tool exception"}', + "tool_use_id": "toolu_0123456789AbCdEfGhIjKlM", + "type": "tool_result", + } + ], + } + mock_tool.async_call.assert_awaited_once_with( + hass, + llm.ToolInput( + tool_name="test_tool", + tool_args={"param1": "test_value"}, + ), + llm.LLMContext( + platform="anthropic", + context=context, + user_prompt="Please call the test function", + language="en", + assistant="conversation", + device_id=None, + ), + ) + + +async def test_assist_api_tools_conversion( + hass: HomeAssistant, + mock_config_entry_with_assist: MockConfigEntry, + mock_init_component, +) -> None: + """Test that we are able to convert actual tools from Assist API.""" + for component in ( + "intent", + "todo", + "light", + "shopping_list", + "humidifier", + "climate", + "media_player", + "vacuum", + "cover", + "weather", + ): + assert await async_setup_component(hass, component, {}) + + agent_id = "conversation.claude" + with patch( + "anthropic.resources.messages.AsyncMessages.create", + new_callable=AsyncMock, + return_value=Message( + type="message", + id="msg_1234567890ABCDEFGHIJKLMN", + content=[TextBlock(type="text", text="Hello, how can I help you?")], + model="claude-3-5-sonnet-20240620", + role="assistant", + stop_reason="end_turn", + stop_sequence=None, + usage=Usage(input_tokens=8, output_tokens=12), + ), + ) as mock_create: + await conversation.async_converse( + hass, "hello", None, Context(), agent_id=agent_id + ) + + tools = mock_create.mock_calls[0][2]["tools"] + assert tools + + +async def test_unknown_hass_api( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + mock_init_component, +) -> None: + """Test when we reference an API that no longer exists.""" + hass.config_entries.async_update_entry( + mock_config_entry, + options={ + **mock_config_entry.options, + CONF_LLM_HASS_API: "non-existing", + }, + ) + + result = await conversation.async_converse( + hass, "hello", None, Context(), agent_id="conversation.claude" + ) + + assert result == snapshot + + +@patch("anthropic.resources.messages.AsyncMessages.create", new_callable=AsyncMock) +async def test_conversation_id( + mock_create, + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_init_component, +) -> None: + """Test conversation ID is honored.""" + result = await conversation.async_converse( + hass, "hello", None, None, agent_id="conversation.claude" + ) + + conversation_id = result.conversation_id + + result = await conversation.async_converse( + hass, "hello", conversation_id, None, agent_id="conversation.claude" + ) + + assert result.conversation_id == conversation_id + + unknown_id = ulid.ulid() + + result = await conversation.async_converse( + hass, "hello", unknown_id, None, agent_id="conversation.claude" + ) + + assert result.conversation_id != unknown_id + + result = await conversation.async_converse( + hass, "hello", "koala", None, agent_id="conversation.claude" + ) + + assert result.conversation_id == "koala" diff --git a/tests/components/anthropic/test_init.py b/tests/components/anthropic/test_init.py new file mode 100644 index 00000000000..ee87bb708d0 --- /dev/null +++ b/tests/components/anthropic/test_init.py @@ -0,0 +1,64 @@ +"""Tests for the Anthropic integration.""" + +from unittest.mock import AsyncMock, patch + +from anthropic import ( + APIConnectionError, + APITimeoutError, + AuthenticationError, + BadRequestError, +) +from httpx import URL, Request, Response +import pytest + +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry + + +@pytest.mark.parametrize( + ("side_effect", "error"), + [ + (APIConnectionError(request=None), "Connection error"), + (APITimeoutError(request=None), "Request timed out"), + ( + BadRequestError( + message="Your credit balance is too low to access the Claude API. Please go to Plans & Billing to upgrade or purchase credits.", + response=Response( + status_code=400, + request=Request(method="POST", url=URL()), + ), + body={"type": "error", "error": {"type": "invalid_request_error"}}, + ), + "anthropic integration not ready yet: Your credit balance is too low to access the Claude API", + ), + ( + AuthenticationError( + message="invalid x-api-key", + response=Response( + status_code=401, + request=Request(method="POST", url=URL()), + ), + body={"type": "error", "error": {"type": "authentication_error"}}, + ), + "Invalid API key", + ), + ], +) +async def test_init_error( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + caplog: pytest.LogCaptureFixture, + side_effect, + error, +) -> None: + """Test initialization errors.""" + with patch( + "anthropic.resources.messages.AsyncMessages.create", + new_callable=AsyncMock, + side_effect=side_effect, + ): + assert await async_setup_component(hass, "anthropic", {}) + await hass.async_block_till_done() + assert error in caplog.text From ed6d6575d7c9d359bbf84a702f8f90fb466207bb Mon Sep 17 00:00:00 2001 From: Denis Shulyaka Date: Fri, 2 Aug 2024 09:05:06 +0300 Subject: [PATCH 0275/1635] Add aliases to script llm tool description (#122380) * Add aliases to script llm tool description * Also add name --- homeassistant/helpers/llm.py | 13 +++++++++++++ tests/helpers/test_llm.py | 36 +++++++++++++++++++++++++++++------- 2 files changed, 42 insertions(+), 7 deletions(-) diff --git a/homeassistant/helpers/llm.py b/homeassistant/helpers/llm.py index 4ddb00166b6..e37aa0c532d 100644 --- a/homeassistant/helpers/llm.py +++ b/homeassistant/helpers/llm.py @@ -677,6 +677,19 @@ class ScriptTool(Tool): self.parameters = vol.Schema(schema) + aliases: list[str] = [] + if entity_entry.name: + aliases.append(entity_entry.name) + if entity_entry.aliases: + aliases.extend(entity_entry.aliases) + if aliases: + if self.description: + self.description = ( + self.description + ". Aliases: " + str(list(aliases)) + ) + else: + self.description = "Aliases: " + str(list(aliases)) + parameters_cache[entity_entry.unique_id] = ( self.description, self.parameters, diff --git a/tests/helpers/test_llm.py b/tests/helpers/test_llm.py index ea6e628d1d4..4d14abb9819 100644 --- a/tests/helpers/test_llm.py +++ b/tests/helpers/test_llm.py @@ -411,7 +411,9 @@ async def test_assist_api_prompt( ) hass.states.async_set(entry2.entity_id, "on", {"friendly_name": "Living Room"}) - def create_entity(device: dr.DeviceEntry, write_state=True) -> None: + def create_entity( + device: dr.DeviceEntry, write_state=True, aliases: set[str] | None = None + ) -> None: """Create an entity for a device and track entity_id.""" entity = entity_registry.async_get_or_create( "light", @@ -421,6 +423,8 @@ async def test_assist_api_prompt( original_name=str(device.name or "Unnamed Device"), suggested_object_id=str(device.name or "unnamed_device"), ) + if aliases: + entity_registry.async_update_entity(entity.entity_id, aliases=aliases) if write_state: entity.write_unavailable_state(hass) @@ -432,7 +436,8 @@ async def test_assist_api_prompt( manufacturer="Test Manufacturer", model="Test Model", suggested_area="Test Area", - ) + ), + aliases={"my test light"}, ) for i in range(3): create_entity( @@ -516,7 +521,7 @@ async def test_assist_api_prompt( domain: light state: 'on' areas: Test Area, Alternative name -- names: Test Device +- names: Test Device, my test light domain: light state: unavailable areas: Test Area, Alternative name @@ -616,6 +621,7 @@ async def test_assist_api_prompt( async def test_script_tool( hass: HomeAssistant, + entity_registry: er.EntityRegistry, area_registry: ar.AreaRegistry, floor_registry: fr.FloorRegistry, ) -> None: @@ -659,6 +665,10 @@ async def test_script_tool( ) async_expose_entity(hass, "conversation", "script.test_script", True) + entity_registry.async_update_entity( + "script.test_script", name="script name", aliases={"script alias"} + ) + area = area_registry.async_create("Living room") floor = floor_registry.async_create("2") @@ -671,7 +681,10 @@ async def test_script_tool( tool = tools[0] assert tool.name == "test_script" - assert tool.description == "This is a test script" + assert ( + tool.description + == "This is a test script. Aliases: ['script name', 'script alias']" + ) schema = { vol.Required("beer", description="Number of beers"): cv.string, vol.Optional("wine"): selector.NumberSelector({"min": 0, "max": 3}), @@ -684,7 +697,10 @@ async def test_script_tool( assert tool.parameters.schema == schema assert hass.data[llm.SCRIPT_PARAMETERS_CACHE] == { - "test_script": ("This is a test script", vol.Schema(schema)) + "test_script": ( + "This is a test script. Aliases: ['script name', 'script alias']", + vol.Schema(schema), + ) } tool_input = llm.ToolInput( @@ -754,12 +770,18 @@ async def test_script_tool( tool = tools[0] assert tool.name == "test_script" - assert tool.description == "This is a new test script" + assert ( + tool.description + == "This is a new test script. Aliases: ['script name', 'script alias']" + ) schema = {vol.Required("beer", description="Number of beers"): cv.string} assert tool.parameters.schema == schema assert hass.data[llm.SCRIPT_PARAMETERS_CACHE] == { - "test_script": ("This is a new test script", vol.Schema(schema)) + "test_script": ( + "This is a new test script. Aliases: ['script name', 'script alias']", + vol.Schema(schema), + ) } From 8ec8aef02ecb9b215009ff772560f68b54e6a88d Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Fri, 2 Aug 2024 08:48:41 +0200 Subject: [PATCH 0276/1635] Use freezer in KNX tests (#123044) use freezer in tests --- tests/components/knx/test_binary_sensor.py | 27 ++++++++++++------- tests/components/knx/test_button.py | 9 ++++--- tests/components/knx/test_expose.py | 13 ++++----- tests/components/knx/test_interface_device.py | 10 ++++--- tests/components/knx/test_light.py | 11 ++++---- 5 files changed, 44 insertions(+), 26 deletions(-) diff --git a/tests/components/knx/test_binary_sensor.py b/tests/components/knx/test_binary_sensor.py index 1b304293a86..dbb8d2ee832 100644 --- a/tests/components/knx/test_binary_sensor.py +++ b/tests/components/knx/test_binary_sensor.py @@ -2,6 +2,8 @@ from datetime import timedelta +from freezegun.api import FrozenDateTimeFactory + from homeassistant.components.knx.const import CONF_STATE_ADDRESS, CONF_SYNC_STATE from homeassistant.components.knx.schema import BinarySensorSchema from homeassistant.const import ( @@ -13,7 +15,6 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant, State from homeassistant.helpers import entity_registry as er -from homeassistant.util import dt as dt_util from .conftest import KNXTestKit @@ -141,9 +142,12 @@ async def test_binary_sensor_ignore_internal_state( assert len(events) == 6 -async def test_binary_sensor_counter(hass: HomeAssistant, knx: KNXTestKit) -> None: +async def test_binary_sensor_counter( + hass: HomeAssistant, + knx: KNXTestKit, + freezer: FrozenDateTimeFactory, +) -> None: """Test KNX binary_sensor with context timeout.""" - async_fire_time_changed(hass, dt_util.utcnow()) context_timeout = 1 await knx.setup_integration( @@ -167,7 +171,8 @@ async def test_binary_sensor_counter(hass: HomeAssistant, knx: KNXTestKit) -> No state = hass.states.get("binary_sensor.test") assert state.state is STATE_OFF assert state.attributes.get("counter") == 0 - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=context_timeout)) + freezer.tick(timedelta(seconds=context_timeout)) + async_fire_time_changed(hass) await knx.xknx.task_registry.block_till_done() # state changed twice after context timeout - once to ON with counter 1 and once to counter 0 state = hass.states.get("binary_sensor.test") @@ -188,7 +193,8 @@ async def test_binary_sensor_counter(hass: HomeAssistant, knx: KNXTestKit) -> No state = hass.states.get("binary_sensor.test") assert state.state is STATE_ON assert state.attributes.get("counter") == 0 - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=context_timeout)) + freezer.tick(timedelta(seconds=context_timeout)) + async_fire_time_changed(hass) await knx.xknx.task_registry.block_till_done() state = hass.states.get("binary_sensor.test") assert state.state is STATE_ON @@ -202,10 +208,12 @@ async def test_binary_sensor_counter(hass: HomeAssistant, knx: KNXTestKit) -> No assert event.get("old_state").attributes.get("counter") == 2 -async def test_binary_sensor_reset(hass: HomeAssistant, knx: KNXTestKit) -> None: +async def test_binary_sensor_reset( + hass: HomeAssistant, + knx: KNXTestKit, + freezer: FrozenDateTimeFactory, +) -> None: """Test KNX binary_sensor with reset_after function.""" - async_fire_time_changed(hass, dt_util.utcnow()) - await knx.setup_integration( { BinarySensorSchema.PLATFORM: [ @@ -223,7 +231,8 @@ async def test_binary_sensor_reset(hass: HomeAssistant, knx: KNXTestKit) -> None await knx.receive_write("2/2/2", True) state = hass.states.get("binary_sensor.test") assert state.state is STATE_ON - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=1)) + freezer.tick(timedelta(seconds=1)) + async_fire_time_changed(hass) await hass.async_block_till_done() # state reset after after timeout state = hass.states.get("binary_sensor.test") diff --git a/tests/components/knx/test_button.py b/tests/components/knx/test_button.py index 613208d5595..a05752eced1 100644 --- a/tests/components/knx/test_button.py +++ b/tests/components/knx/test_button.py @@ -3,20 +3,22 @@ from datetime import timedelta import logging +from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.knx.const import CONF_PAYLOAD_LENGTH, DOMAIN, KNX_ADDRESS from homeassistant.components.knx.schema import ButtonSchema from homeassistant.const import CONF_NAME, CONF_PAYLOAD, CONF_TYPE from homeassistant.core import HomeAssistant -from homeassistant.util import dt as dt_util from .conftest import KNXTestKit from tests.common import async_capture_events, async_fire_time_changed -async def test_button_simple(hass: HomeAssistant, knx: KNXTestKit) -> None: +async def test_button_simple( + hass: HomeAssistant, knx: KNXTestKit, freezer: FrozenDateTimeFactory +) -> None: """Test KNX button with default payload.""" await knx.setup_integration( { @@ -38,7 +40,8 @@ async def test_button_simple(hass: HomeAssistant, knx: KNXTestKit) -> None: # received telegrams on button GA are ignored by the entity old_state = hass.states.get("button.test") - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=3)) + freezer.tick(timedelta(seconds=3)) + async_fire_time_changed(hass) await knx.receive_write("1/2/3", False) await knx.receive_write("1/2/3", True) new_state = hass.states.get("button.test") diff --git a/tests/components/knx/test_expose.py b/tests/components/knx/test_expose.py index 96b00241ab6..c4d0acf0ce2 100644 --- a/tests/components/knx/test_expose.py +++ b/tests/components/knx/test_expose.py @@ -3,6 +3,7 @@ from datetime import timedelta from freezegun import freeze_time +from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.knx import CONF_KNX_EXPOSE, DOMAIN, KNX_ADDRESS @@ -14,11 +15,10 @@ from homeassistant.const import ( CONF_VALUE_TEMPLATE, ) from homeassistant.core import HomeAssistant -from homeassistant.util import dt as dt_util from .conftest import KNXTestKit -from tests.common import async_fire_time_changed_exact +from tests.common import async_fire_time_changed async def test_binary_expose(hass: HomeAssistant, knx: KNXTestKit) -> None: @@ -206,7 +206,9 @@ async def test_expose_string(hass: HomeAssistant, knx: KNXTestKit) -> None: ) -async def test_expose_cooldown(hass: HomeAssistant, knx: KNXTestKit) -> None: +async def test_expose_cooldown( + hass: HomeAssistant, knx: KNXTestKit, freezer: FrozenDateTimeFactory +) -> None: """Test an expose with cooldown.""" cooldown_time = 2 entity_id = "fake.entity" @@ -234,9 +236,8 @@ async def test_expose_cooldown(hass: HomeAssistant, knx: KNXTestKit) -> None: await hass.async_block_till_done() await knx.assert_no_telegram() # Wait for cooldown to pass - async_fire_time_changed_exact( - hass, dt_util.utcnow() + timedelta(seconds=cooldown_time) - ) + freezer.tick(timedelta(seconds=cooldown_time)) + async_fire_time_changed(hass) await hass.async_block_till_done() await knx.assert_write("1/1/8", (3,)) diff --git a/tests/components/knx/test_interface_device.py b/tests/components/knx/test_interface_device.py index 8010496ef0d..79114d4ffd5 100644 --- a/tests/components/knx/test_interface_device.py +++ b/tests/components/knx/test_interface_device.py @@ -2,6 +2,7 @@ from unittest.mock import patch +from freezegun.api import FrozenDateTimeFactory from xknx.core import XknxConnectionState, XknxConnectionType from xknx.telegram import IndividualAddress @@ -10,7 +11,6 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component -from homeassistant.util import dt as dt_util from .conftest import KNXTestKit @@ -19,7 +19,10 @@ from tests.typing import WebSocketGenerator async def test_diagnostic_entities( - hass: HomeAssistant, knx: KNXTestKit, entity_registry: er.EntityRegistry + hass: HomeAssistant, + knx: KNXTestKit, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Test diagnostic entities.""" await knx.setup_integration({}) @@ -50,7 +53,8 @@ async def test_diagnostic_entities( knx.xknx.connection_manager.cemi_count_outgoing_error = 2 events = async_capture_events(hass, "state_changed") - async_fire_time_changed(hass, dt_util.utcnow() + SCAN_INTERVAL) + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) await hass.async_block_till_done() assert len(events) == 3 # 5 polled sensors - 2 disabled diff --git a/tests/components/knx/test_light.py b/tests/components/knx/test_light.py index 8c966a77a0b..04f849bb555 100644 --- a/tests/components/knx/test_light.py +++ b/tests/components/knx/test_light.py @@ -4,6 +4,7 @@ from __future__ import annotations from datetime import timedelta +from freezegun.api import FrozenDateTimeFactory from xknx.core import XknxConnectionState from xknx.devices.light import Light as XknxLight @@ -19,7 +20,6 @@ from homeassistant.components.light import ( ) from homeassistant.const import CONF_NAME, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant -from homeassistant.util import dt as dt_util from . import KnxEntityGenerator from .conftest import KNXTestKit @@ -643,7 +643,9 @@ async def test_light_rgb_individual(hass: HomeAssistant, knx: KNXTestKit) -> Non await knx.assert_write(test_blue, (45,)) -async def test_light_rgbw_individual(hass: HomeAssistant, knx: KNXTestKit) -> None: +async def test_light_rgbw_individual( + hass: HomeAssistant, knx: KNXTestKit, freezer: FrozenDateTimeFactory +) -> None: """Test KNX light with rgbw color in individual GAs.""" test_red = "1/1/3" test_red_state = "1/1/4" @@ -763,9 +765,8 @@ async def test_light_rgbw_individual(hass: HomeAssistant, knx: KNXTestKit) -> No await knx.receive_write(test_green, (0,)) # # individual color debounce takes 0.2 seconds if not all 4 addresses received knx.assert_state("light.test", STATE_ON) - async_fire_time_changed( - hass, dt_util.utcnow() + timedelta(seconds=XknxLight.DEBOUNCE_TIMEOUT) - ) + freezer.tick(timedelta(seconds=XknxLight.DEBOUNCE_TIMEOUT)) + async_fire_time_changed(hass) await knx.xknx.task_registry.block_till_done() knx.assert_state("light.test", STATE_OFF) # turn ON from KNX From 4da385898bbc4c58814d3ce1f2f393abd502de2d Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Fri, 2 Aug 2024 08:50:19 +0200 Subject: [PATCH 0277/1635] Mitigate breaking change for KNX climate schema (#123043) --- homeassistant/components/knx/schema.py | 7 +++-- homeassistant/components/knx/validation.py | 34 ++++++++++++++++++++++ 2 files changed, 39 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/knx/schema.py b/homeassistant/components/knx/schema.py index 43037ad8188..c31b3d30ad0 100644 --- a/homeassistant/components/knx/schema.py +++ b/homeassistant/components/knx/schema.py @@ -56,6 +56,7 @@ from .const import ( ColorTempModes, ) from .validation import ( + backwards_compatible_xknx_climate_enum_member, dpt_base_type_validator, ga_list_validator, ga_validator, @@ -409,10 +410,12 @@ class ClimateSchema(KNXPlatformSchema): CONF_ON_OFF_INVERT, default=DEFAULT_ON_OFF_INVERT ): cv.boolean, vol.Optional(CONF_OPERATION_MODES): vol.All( - cv.ensure_list, [vol.All(vol.Upper, cv.enum(HVACOperationMode))] + cv.ensure_list, + [backwards_compatible_xknx_climate_enum_member(HVACOperationMode)], ), vol.Optional(CONF_CONTROLLER_MODES): vol.All( - cv.ensure_list, [vol.All(vol.Upper, cv.enum(HVACControllerMode))] + cv.ensure_list, + [backwards_compatible_xknx_climate_enum_member(HVACControllerMode)], ), vol.Optional( CONF_DEFAULT_CONTROLLER_MODE, default=HVACMode.HEAT diff --git a/homeassistant/components/knx/validation.py b/homeassistant/components/knx/validation.py index 422b8474fd9..0283b65f899 100644 --- a/homeassistant/components/knx/validation.py +++ b/homeassistant/components/knx/validation.py @@ -1,6 +1,7 @@ """Validation helpers for KNX config schemas.""" from collections.abc import Callable +from enum import Enum import ipaddress from typing import Any @@ -104,3 +105,36 @@ sync_state_validator = vol.Any( cv.boolean, cv.matches_regex(r"^(init|expire|every)( \d*)?$"), ) + + +def backwards_compatible_xknx_climate_enum_member(enumClass: type[Enum]) -> vol.All: + """Transform a string to an enum member. + + Backwards compatible with member names of xknx 2.x climate DPT Enums + due to unintentional breaking change in HA 2024.8. + """ + + def _string_transform(value: Any) -> str: + """Upper and slugify string and substitute old member names. + + Previously this was checked against Enum values instead of names. These + looked like `FAN_ONLY = "Fan only"`, therefore the upper & replace part. + """ + if not isinstance(value, str): + raise vol.Invalid("value should be a string") + name = value.upper().replace(" ", "_") + match name: + case "NIGHT": + return "ECONOMY" + case "FROST_PROTECTION": + return "BUILDING_PROTECTION" + case "DRY": + return "DEHUMIDIFICATION" + case _: + return name + + return vol.All( + _string_transform, + vol.In(enumClass.__members__), + enumClass.__getitem__, + ) From 0058d42ca274d2e07e581ce1ff8b221bb2d4f046 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 2 Aug 2024 11:49:47 +0200 Subject: [PATCH 0278/1635] Correct squeezebox service (#123060) --- homeassistant/components/squeezebox/media_player.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/squeezebox/media_player.py b/homeassistant/components/squeezebox/media_player.py index 552b8ed800c..97d55c2f2ef 100644 --- a/homeassistant/components/squeezebox/media_player.py +++ b/homeassistant/components/squeezebox/media_player.py @@ -185,7 +185,7 @@ async def async_setup_entry( {vol.Required(ATTR_OTHER_PLAYER): cv.string}, "async_sync", ) - platform.async_register_entity_service(SERVICE_UNSYNC, None, "async_unsync") + platform.async_register_entity_service(SERVICE_UNSYNC, {}, "async_unsync") # Start server discovery task if not already running entry.async_on_unload(async_at_start(hass, start_server_discovery)) From 449afe9e6fadcd1d798675ae0f51a466a2981493 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 2 Aug 2024 11:58:07 +0200 Subject: [PATCH 0279/1635] Correct type annotation for `EntityPlatform.async_register_entity_service` (#123054) Correct type annotation for EntityPlatform.async_register_entity_service Co-authored-by: Marc Mueller <30130371+cdce8p@users.noreply.github.com> --- homeassistant/helpers/entity_platform.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/helpers/entity_platform.py b/homeassistant/helpers/entity_platform.py index d868e582f8f..6774780f00f 100644 --- a/homeassistant/helpers/entity_platform.py +++ b/homeassistant/helpers/entity_platform.py @@ -985,7 +985,7 @@ class EntityPlatform: def async_register_entity_service( self, name: str, - schema: VolDictType | VolSchemaType | None, + schema: VolDictType | VolSchemaType, func: str | Callable[..., Any], required_features: Iterable[int] | None = None, supports_response: SupportsResponse = SupportsResponse.NONE, From adf85156984e14b17cb0107e0078ae7c8285f794 Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Fri, 2 Aug 2024 12:08:44 +0200 Subject: [PATCH 0280/1635] OpenAI make supported features reflect the config entry options (#123047) --- .../openai_conversation/conversation.py | 15 +++++++++++++++ .../openai_conversation/test_conversation.py | 2 ++ 2 files changed, 17 insertions(+) diff --git a/homeassistant/components/openai_conversation/conversation.py b/homeassistant/components/openai_conversation/conversation.py index 483b37945d6..b482126e27c 100644 --- a/homeassistant/components/openai_conversation/conversation.py +++ b/homeassistant/components/openai_conversation/conversation.py @@ -23,6 +23,7 @@ from voluptuous_openapi import convert from homeassistant.components import assist_pipeline, conversation from homeassistant.components.conversation import trace +from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, TemplateError @@ -109,6 +110,9 @@ class OpenAIConversationEntity( self.hass, "conversation", self.entry.entry_id, self.entity_id ) conversation.async_set_agent(self.hass, self.entry, self) + self.entry.async_on_unload( + self.entry.add_update_listener(self._async_entry_update_listener) + ) async def async_will_remove_from_hass(self) -> None: """When entity will be removed from Home Assistant.""" @@ -319,3 +323,14 @@ class OpenAIConversationEntity( return conversation.ConversationResult( response=intent_response, conversation_id=conversation_id ) + + async def _async_entry_update_listener( + self, hass: HomeAssistant, entry: ConfigEntry + ) -> None: + """Handle options update.""" + if entry.options.get(CONF_LLM_HASS_API): + self._attr_supported_features = ( + conversation.ConversationEntityFeature.CONTROL + ) + else: + self._attr_supported_features = conversation.ConversationEntityFeature(0) diff --git a/tests/components/openai_conversation/test_conversation.py b/tests/components/openai_conversation/test_conversation.py index 3364d822245..e0665bc449f 100644 --- a/tests/components/openai_conversation/test_conversation.py +++ b/tests/components/openai_conversation/test_conversation.py @@ -521,6 +521,8 @@ async def test_unknown_hass_api( }, ) + await hass.async_block_till_done() + result = await conversation.async_converse( hass, "hello", None, Context(), agent_id=mock_config_entry.entry_id ) From b2d5f9c742ff1002a841d69728782afed80a1795 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Fri, 2 Aug 2024 12:17:51 +0200 Subject: [PATCH 0281/1635] Update generator typing (#123052) --- tests/components/bluesound/conftest.py | 2 +- tests/components/bryant_evolution/conftest.py | 4 ++-- .../bryant_evolution/test_climate.py | 14 ++++++------- tests/components/elevenlabs/conftest.py | 6 +++--- tests/components/iotty/conftest.py | 20 +++++++++---------- tests/components/iron_os/conftest.py | 2 +- tests/components/iron_os/test_number.py | 2 +- tests/components/iron_os/test_sensor.py | 2 +- 8 files changed, 26 insertions(+), 26 deletions(-) diff --git a/tests/components/bluesound/conftest.py b/tests/components/bluesound/conftest.py index 02c73bcd62f..5d81b6863c6 100644 --- a/tests/components/bluesound/conftest.py +++ b/tests/components/bluesound/conftest.py @@ -40,7 +40,7 @@ def sync_status() -> SyncStatus: @pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock, None, None]: +def mock_setup_entry() -> Generator[AsyncMock]: """Override async_setup_entry.""" with patch( "homeassistant.components.bluesound.async_setup_entry", return_value=True diff --git a/tests/components/bryant_evolution/conftest.py b/tests/components/bryant_evolution/conftest.py index cc9dfbec1e1..fb12d7ebf29 100644 --- a/tests/components/bryant_evolution/conftest.py +++ b/tests/components/bryant_evolution/conftest.py @@ -15,7 +15,7 @@ from tests.common import MockConfigEntry @pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock, None, None]: +def mock_setup_entry() -> Generator[AsyncMock]: """Override async_setup_entry.""" with patch( "homeassistant.components.bryant_evolution.async_setup_entry", return_value=True @@ -31,7 +31,7 @@ for the Bryant Evolution integration. @pytest.fixture(autouse=True) -def mock_evolution_client_factory() -> Generator[AsyncMock, None, None]: +def mock_evolution_client_factory() -> Generator[AsyncMock]: """Mock an Evolution client.""" with patch( "evolutionhttp.BryantEvolutionLocalClient.get_client", diff --git a/tests/components/bryant_evolution/test_climate.py b/tests/components/bryant_evolution/test_climate.py index 42944c32bc2..0b527e02a10 100644 --- a/tests/components/bryant_evolution/test_climate.py +++ b/tests/components/bryant_evolution/test_climate.py @@ -52,7 +52,7 @@ async def test_setup_integration_success( async def test_set_temperature_mode_cool( hass: HomeAssistant, mock_evolution_entry: MockConfigEntry, - mock_evolution_client_factory: Generator[AsyncMock, None, None], + mock_evolution_client_factory: Generator[AsyncMock], freezer: FrozenDateTimeFactory, ) -> None: """Test setting the temperature in cool mode.""" @@ -83,7 +83,7 @@ async def test_set_temperature_mode_cool( async def test_set_temperature_mode_heat( hass: HomeAssistant, mock_evolution_entry: MockConfigEntry, - mock_evolution_client_factory: Generator[AsyncMock, None, None], + mock_evolution_client_factory: Generator[AsyncMock], freezer: FrozenDateTimeFactory, ) -> None: """Test setting the temperature in heat mode.""" @@ -111,7 +111,7 @@ async def test_set_temperature_mode_heat( async def test_set_temperature_mode_heat_cool( hass: HomeAssistant, mock_evolution_entry: MockConfigEntry, - mock_evolution_client_factory: Generator[AsyncMock, None, None], + mock_evolution_client_factory: Generator[AsyncMock], freezer: FrozenDateTimeFactory, ) -> None: """Test setting the temperature in heat_cool mode.""" @@ -147,7 +147,7 @@ async def test_set_temperature_mode_heat_cool( async def test_set_fan_mode( hass: HomeAssistant, mock_evolution_entry: MockConfigEntry, - mock_evolution_client_factory: Generator[AsyncMock, None, None], + mock_evolution_client_factory: Generator[AsyncMock], ) -> None: """Test that setting fan mode works.""" mock_client = await mock_evolution_client_factory(1, 1, "/dev/unused") @@ -175,7 +175,7 @@ async def test_set_fan_mode( async def test_set_hvac_mode( hass: HomeAssistant, mock_evolution_entry: MockConfigEntry, - mock_evolution_client_factory: Generator[AsyncMock, None, None], + mock_evolution_client_factory: Generator[AsyncMock], hvac_mode, evolution_mode, ) -> None: @@ -203,7 +203,7 @@ async def test_set_hvac_mode( async def test_read_hvac_action_heat_cool( hass: HomeAssistant, mock_evolution_entry: MockConfigEntry, - mock_evolution_client_factory: Generator[AsyncMock, None, None], + mock_evolution_client_factory: Generator[AsyncMock], freezer: FrozenDateTimeFactory, curr_temp: int, expected_action: HVACAction, @@ -236,7 +236,7 @@ async def test_read_hvac_action_heat_cool( async def test_read_hvac_action( hass: HomeAssistant, mock_evolution_entry: MockConfigEntry, - mock_evolution_client_factory: Generator[AsyncMock, None, None], + mock_evolution_client_factory: Generator[AsyncMock], freezer: FrozenDateTimeFactory, mode: str, active: bool, diff --git a/tests/components/elevenlabs/conftest.py b/tests/components/elevenlabs/conftest.py index 13eb022243f..c4d9a87b5ad 100644 --- a/tests/components/elevenlabs/conftest.py +++ b/tests/components/elevenlabs/conftest.py @@ -16,7 +16,7 @@ from tests.common import MockConfigEntry @pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock, None, None]: +def mock_setup_entry() -> Generator[AsyncMock]: """Override async_setup_entry.""" with patch( "homeassistant.components.elevenlabs.async_setup_entry", return_value=True @@ -25,7 +25,7 @@ def mock_setup_entry() -> Generator[AsyncMock, None, None]: @pytest.fixture -def mock_async_client() -> Generator[AsyncMock, None, None]: +def mock_async_client() -> Generator[AsyncMock]: """Override async ElevenLabs client.""" client_mock = AsyncMock() client_mock.voices.get_all.return_value = GetVoicesResponse(voices=MOCK_VOICES) @@ -37,7 +37,7 @@ def mock_async_client() -> Generator[AsyncMock, None, None]: @pytest.fixture -def mock_async_client_fail() -> Generator[AsyncMock, None, None]: +def mock_async_client_fail() -> Generator[AsyncMock]: """Override async ElevenLabs client.""" with patch( "homeassistant.components.elevenlabs.config_flow.AsyncElevenLabs", diff --git a/tests/components/iotty/conftest.py b/tests/components/iotty/conftest.py index 7961a4ce3a1..9f858879cb9 100644 --- a/tests/components/iotty/conftest.py +++ b/tests/components/iotty/conftest.py @@ -65,7 +65,7 @@ def aiohttp_client_session() -> None: @pytest.fixture -def mock_aioclient() -> Generator[AiohttpClientMocker, None, None]: +def mock_aioclient() -> Generator[AiohttpClientMocker]: """Fixture to mock aioclient calls.""" with mock_aiohttp_client() as mock_session: yield mock_session @@ -96,7 +96,7 @@ def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: @pytest.fixture -def mock_config_entries_async_forward_entry_setup() -> Generator[AsyncMock, None, None]: +def mock_config_entries_async_forward_entry_setup() -> Generator[AsyncMock]: """Mock async_forward_entry_setup.""" with patch( "homeassistant.config_entries.ConfigEntries.async_forward_entry_setups" @@ -105,7 +105,7 @@ def mock_config_entries_async_forward_entry_setup() -> Generator[AsyncMock, None @pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock, None, None]: +def mock_setup_entry() -> Generator[AsyncMock]: """Mock setting up a config entry.""" with patch( "homeassistant.components.iotty.async_setup_entry", return_value=True @@ -114,7 +114,7 @@ def mock_setup_entry() -> Generator[AsyncMock, None, None]: @pytest.fixture -def mock_iotty() -> Generator[None, MagicMock, None]: +def mock_iotty() -> Generator[MagicMock]: """Mock IottyProxy.""" with patch( "homeassistant.components.iotty.api.IottyProxy", autospec=True @@ -123,7 +123,7 @@ def mock_iotty() -> Generator[None, MagicMock, None]: @pytest.fixture -def mock_coordinator() -> Generator[None, MagicMock, None]: +def mock_coordinator() -> Generator[MagicMock]: """Mock IottyDataUpdateCoordinator.""" with patch( "homeassistant.components.iotty.coordinator.IottyDataUpdateCoordinator", @@ -133,7 +133,7 @@ def mock_coordinator() -> Generator[None, MagicMock, None]: @pytest.fixture -def mock_get_devices_nodevices() -> Generator[AsyncMock, None, None]: +def mock_get_devices_nodevices() -> Generator[AsyncMock]: """Mock for get_devices, returning two objects.""" with patch("iottycloud.cloudapi.CloudApi.get_devices") as mock_fn: @@ -141,7 +141,7 @@ def mock_get_devices_nodevices() -> Generator[AsyncMock, None, None]: @pytest.fixture -def mock_get_devices_twolightswitches() -> Generator[AsyncMock, None, None]: +def mock_get_devices_twolightswitches() -> Generator[AsyncMock]: """Mock for get_devices, returning two objects.""" with patch( @@ -151,7 +151,7 @@ def mock_get_devices_twolightswitches() -> Generator[AsyncMock, None, None]: @pytest.fixture -def mock_command_fn() -> Generator[AsyncMock, None, None]: +def mock_command_fn() -> Generator[AsyncMock]: """Mock for command.""" with patch("iottycloud.cloudapi.CloudApi.command", return_value=None) as mock_fn: @@ -159,7 +159,7 @@ def mock_command_fn() -> Generator[AsyncMock, None, None]: @pytest.fixture -def mock_get_status_filled_off() -> Generator[AsyncMock, None, None]: +def mock_get_status_filled_off() -> Generator[AsyncMock]: """Mock setting up a get_status.""" retval = {RESULT: {STATUS: STATUS_OFF}} @@ -170,7 +170,7 @@ def mock_get_status_filled_off() -> Generator[AsyncMock, None, None]: @pytest.fixture -def mock_get_status_filled() -> Generator[AsyncMock, None, None]: +def mock_get_status_filled() -> Generator[AsyncMock]: """Mock setting up a get_status.""" retval = {RESULT: {STATUS: STATUS_ON}} diff --git a/tests/components/iron_os/conftest.py b/tests/components/iron_os/conftest.py index b6983074441..f489d7b7bb5 100644 --- a/tests/components/iron_os/conftest.py +++ b/tests/components/iron_os/conftest.py @@ -108,7 +108,7 @@ def mock_ble_device() -> Generator[MagicMock]: @pytest.fixture -def mock_pynecil() -> Generator[AsyncMock, None, None]: +def mock_pynecil() -> Generator[AsyncMock]: """Mock Pynecil library.""" with patch( "homeassistant.components.iron_os.Pynecil", autospec=True diff --git a/tests/components/iron_os/test_number.py b/tests/components/iron_os/test_number.py index c091040668c..781492987ee 100644 --- a/tests/components/iron_os/test_number.py +++ b/tests/components/iron_os/test_number.py @@ -22,7 +22,7 @@ from tests.common import MockConfigEntry, snapshot_platform @pytest.fixture(autouse=True) -async def sensor_only() -> AsyncGenerator[None, None]: +async def sensor_only() -> AsyncGenerator[None]: """Enable only the number platform.""" with patch( "homeassistant.components.iron_os.PLATFORMS", diff --git a/tests/components/iron_os/test_sensor.py b/tests/components/iron_os/test_sensor.py index 0c35193e400..2f79487a7fd 100644 --- a/tests/components/iron_os/test_sensor.py +++ b/tests/components/iron_os/test_sensor.py @@ -18,7 +18,7 @@ from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_plat @pytest.fixture(autouse=True) -async def sensor_only() -> AsyncGenerator[None, None]: +async def sensor_only() -> AsyncGenerator[None]: """Enable only the sensor platform.""" with patch( "homeassistant.components.iron_os.PLATFORMS", From 7670ad0a7217db5baf923af85e12b3fc07c1a630 Mon Sep 17 00:00:00 2001 From: Fabian <115155196+Fabiann2205@users.noreply.github.com> Date: Fri, 2 Aug 2024 12:19:55 +0200 Subject: [PATCH 0282/1635] Add device class (#123059) --- homeassistant/components/google_travel_time/sensor.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/google_travel_time/sensor.py b/homeassistant/components/google_travel_time/sensor.py index 6c45033eeb7..618dda50bd4 100644 --- a/homeassistant/components/google_travel_time/sensor.py +++ b/homeassistant/components/google_travel_time/sensor.py @@ -8,7 +8,11 @@ import logging from googlemaps import Client from googlemaps.distance_matrix import distance_matrix -from homeassistant.components.sensor import SensorEntity +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorStateClass, +) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_API_KEY, @@ -72,6 +76,8 @@ class GoogleTravelTimeSensor(SensorEntity): _attr_attribution = ATTRIBUTION _attr_native_unit_of_measurement = UnitOfTime.MINUTES + _attr_device_class = SensorDeviceClass.DURATION + _attr_state_class = SensorStateClass.MEASUREMENT def __init__(self, config_entry, name, api_key, origin, destination, client): """Initialize the sensor.""" From 5446dd92a977615f3e83cdb9469c900f503a5824 Mon Sep 17 00:00:00 2001 From: "David F. Mulcahey" Date: Fri, 2 Aug 2024 06:22:36 -0400 Subject: [PATCH 0283/1635] Make ZHA load quirks earlier (#123027) --- homeassistant/components/zha/__init__.py | 4 +++- homeassistant/components/zha/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/zha/test_repairs.py | 10 +++++----- 5 files changed, 11 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/zha/__init__.py b/homeassistant/components/zha/__init__.py index 216261e3011..fc573b19ab1 100644 --- a/homeassistant/components/zha/__init__.py +++ b/homeassistant/components/zha/__init__.py @@ -117,6 +117,8 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b ha_zha_data.config_entry = config_entry zha_lib_data: ZHAData = create_zha_config(hass, ha_zha_data) + zha_gateway = await Gateway.async_from_config(zha_lib_data) + # Load and cache device trigger information early device_registry = dr.async_get(hass) radio_mgr = ZhaRadioManager.from_config_entry(hass, config_entry) @@ -140,7 +142,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b _LOGGER.debug("Trigger cache: %s", zha_lib_data.device_trigger_cache) try: - zha_gateway = await Gateway.async_from_config(zha_lib_data) + await zha_gateway.async_initialize() except NetworkSettingsInconsistent as exc: await warn_on_inconsistent_network_settings( hass, diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index d2d328cc84b..6e35339c53f 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -21,7 +21,7 @@ "zha", "universal_silabs_flasher" ], - "requirements": ["universal-silabs-flasher==0.0.22", "zha==0.0.24"], + "requirements": ["universal-silabs-flasher==0.0.22", "zha==0.0.25"], "usb": [ { "vid": "10C4", diff --git a/requirements_all.txt b/requirements_all.txt index 1df7cc12072..602c03c879d 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2989,7 +2989,7 @@ zeroconf==0.132.2 zeversolar==0.3.1 # homeassistant.components.zha -zha==0.0.24 +zha==0.0.25 # homeassistant.components.zhong_hong zhong-hong-hvac==1.0.12 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5831f7c23cf..623f0953721 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2363,7 +2363,7 @@ zeroconf==0.132.2 zeversolar==0.3.1 # homeassistant.components.zha -zha==0.0.24 +zha==0.0.25 # homeassistant.components.zwave_js zwave-js-server-python==0.57.0 diff --git a/tests/components/zha/test_repairs.py b/tests/components/zha/test_repairs.py index 7f9b2b4a016..c2925161748 100644 --- a/tests/components/zha/test_repairs.py +++ b/tests/components/zha/test_repairs.py @@ -148,7 +148,7 @@ async def test_multipan_firmware_repair( autospec=True, ), patch( - "homeassistant.components.zha.Gateway.async_from_config", + "homeassistant.components.zha.Gateway.async_initialize", side_effect=RuntimeError(), ), patch( @@ -199,7 +199,7 @@ async def test_multipan_firmware_no_repair_on_probe_failure( autospec=True, ), patch( - "homeassistant.components.zha.Gateway.async_from_config", + "homeassistant.components.zha.Gateway.async_initialize", side_effect=RuntimeError(), ), ): @@ -236,7 +236,7 @@ async def test_multipan_firmware_retry_on_probe_ezsp( autospec=True, ), patch( - "homeassistant.components.zha.Gateway.async_from_config", + "homeassistant.components.zha.Gateway.async_initialize", side_effect=RuntimeError(), ), ): @@ -311,7 +311,7 @@ async def test_inconsistent_settings_keep_new( old_state = network_backup with patch( - "homeassistant.components.zha.Gateway.async_from_config", + "homeassistant.components.zha.Gateway.async_initialize", side_effect=NetworkSettingsInconsistent( message="Network settings are inconsistent", new_state=new_state, @@ -390,7 +390,7 @@ async def test_inconsistent_settings_restore_old( old_state = network_backup with patch( - "homeassistant.components.zha.Gateway.async_from_config", + "homeassistant.components.zha.Gateway.async_initialize", side_effect=NetworkSettingsInconsistent( message="Network settings are inconsistent", new_state=new_state, From ad26db7dc8d72203c79c5a0b64362be06eec3b99 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 2 Aug 2024 12:24:03 +0200 Subject: [PATCH 0284/1635] Replace pylint broad-exception-raised rule with ruff (#123021) --- homeassistant/components/fritz/coordinator.py | 3 +-- homeassistant/components/starline/config_flow.py | 3 +-- pyproject.toml | 2 +- .../templates/config_flow_helper/tests/test_config_flow.py | 2 +- .../components/bluetooth/test_passive_update_processor.py | 6 ++---- tests/components/notify/test_legacy.py | 2 +- tests/components/profiler/test_init.py | 2 +- tests/components/roon/test_config_flow.py | 2 +- tests/components/stt/test_legacy.py | 2 +- tests/components/system_health/test_init.py | 2 +- tests/components/system_log/test_init.py | 2 +- tests/components/tts/test_init.py | 4 ++-- tests/components/tts/test_legacy.py | 2 +- tests/helpers/test_dispatcher.py | 6 ++---- tests/test_config_entries.py | 2 +- tests/test_core.py | 4 ++-- tests/test_runner.py | 5 ++--- tests/test_setup.py | 7 +++---- tests/util/test_logging.py | 3 +-- 19 files changed, 26 insertions(+), 35 deletions(-) diff --git a/homeassistant/components/fritz/coordinator.py b/homeassistant/components/fritz/coordinator.py index 592bf37084e..a67f385f3e8 100644 --- a/homeassistant/components/fritz/coordinator.py +++ b/homeassistant/components/fritz/coordinator.py @@ -568,8 +568,7 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]): self.fritz_hosts.get_mesh_topology ) ): - # pylint: disable-next=broad-exception-raised - raise Exception("Mesh supported but empty topology reported") + raise Exception("Mesh supported but empty topology reported") # noqa: TRY002 except FritzActionError: self.mesh_role = MeshRoles.SLAVE # Avoid duplicating device trackers diff --git a/homeassistant/components/starline/config_flow.py b/homeassistant/components/starline/config_flow.py index c13586d0bc3..6c38603a843 100644 --- a/homeassistant/components/starline/config_flow.py +++ b/homeassistant/components/starline/config_flow.py @@ -214,8 +214,7 @@ class StarlineFlowHandler(ConfigFlow, domain=DOMAIN): self._captcha_image = data["captchaImg"] return self._async_form_auth_captcha(error) - # pylint: disable=broad-exception-raised - raise Exception(data) + raise Exception(data) # noqa: TRY002 except Exception as err: # noqa: BLE001 _LOGGER.error("Error auth user: %s", err) return self._async_form_auth_user(ERROR_AUTH_USER) diff --git a/pyproject.toml b/pyproject.toml index eadd529b3e6..eb4bfc4970d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -312,6 +312,7 @@ disable = [ "no-else-return", # RET505 "broad-except", # BLE001 "protected-access", # SLF001 + "broad-exception-raised", # TRY002 # "no-self-use", # PLR6301 # Optional plugin, not enabled # Handled by mypy @@ -823,7 +824,6 @@ ignore = [ "PYI024", # Use typing.NamedTuple instead of collections.namedtuple "RET503", "RET501", - "TRY002", "TRY301" ] diff --git a/script/scaffold/templates/config_flow_helper/tests/test_config_flow.py b/script/scaffold/templates/config_flow_helper/tests/test_config_flow.py index 809902fa0dd..8e7854835d8 100644 --- a/script/scaffold/templates/config_flow_helper/tests/test_config_flow.py +++ b/script/scaffold/templates/config_flow_helper/tests/test_config_flow.py @@ -59,7 +59,7 @@ def get_suggested(schema, key): return None return k.description["suggested_value"] # Wanted key absent from schema - raise Exception + raise KeyError(f"Key `{key}` is missing from schema") @pytest.mark.parametrize("platform", ["sensor"]) diff --git a/tests/components/bluetooth/test_passive_update_processor.py b/tests/components/bluetooth/test_passive_update_processor.py index 079ac2200fc..d7a7a8ba08c 100644 --- a/tests/components/bluetooth/test_passive_update_processor.py +++ b/tests/components/bluetooth/test_passive_update_processor.py @@ -583,8 +583,7 @@ async def test_exception_from_update_method( nonlocal run_count run_count += 1 if run_count == 2: - # pylint: disable-next=broad-exception-raised - raise Exception("Test exception") + raise Exception("Test exception") # noqa: TRY002 return GENERIC_PASSIVE_BLUETOOTH_DATA_UPDATE coordinator = PassiveBluetoothProcessorCoordinator( @@ -1418,8 +1417,7 @@ async def test_exception_from_coordinator_update_method( nonlocal run_count run_count += 1 if run_count == 2: - # pylint: disable-next=broad-exception-raised - raise Exception("Test exception") + raise Exception("Test exception") # noqa: TRY002 return {"test": "data"} @callback diff --git a/tests/components/notify/test_legacy.py b/tests/components/notify/test_legacy.py index b499486b312..8be80650053 100644 --- a/tests/components/notify/test_legacy.py +++ b/tests/components/notify/test_legacy.py @@ -265,7 +265,7 @@ async def test_platform_setup_with_error( async def async_get_service(hass, config, discovery_info=None): """Return None for an invalid notify service.""" - raise Exception("Setup error") # pylint: disable=broad-exception-raised + raise Exception("Setup error") # noqa: TRY002 mock_notify_platform( hass, tmp_path, "testnotify", async_get_service=async_get_service diff --git a/tests/components/profiler/test_init.py b/tests/components/profiler/test_init.py index 2eca84b43fe..b172ae0e12d 100644 --- a/tests/components/profiler/test_init.py +++ b/tests/components/profiler/test_init.py @@ -181,7 +181,7 @@ async def test_dump_log_object( def __repr__(self): if self.fail: - raise Exception("failed") # pylint: disable=broad-exception-raised + raise Exception("failed") # noqa: TRY002 return "" obj1 = DumpLogDummy(False) diff --git a/tests/components/roon/test_config_flow.py b/tests/components/roon/test_config_flow.py index 9822c88fa48..9539a9c0f5b 100644 --- a/tests/components/roon/test_config_flow.py +++ b/tests/components/roon/test_config_flow.py @@ -48,7 +48,7 @@ class RoonApiMockException(RoonApiMock): @property def token(self): """Throw exception.""" - raise Exception # pylint: disable=broad-exception-raised + raise Exception # noqa: TRY002 class RoonDiscoveryMock: diff --git a/tests/components/stt/test_legacy.py b/tests/components/stt/test_legacy.py index 04068b012f1..20fa86b4d20 100644 --- a/tests/components/stt/test_legacy.py +++ b/tests/components/stt/test_legacy.py @@ -41,7 +41,7 @@ async def test_platform_setup_with_error( discovery_info: DiscoveryInfoType | None = None, ) -> Provider: """Raise exception during platform setup.""" - raise Exception("Setup error") # pylint: disable=broad-exception-raised + raise Exception("Setup error") # noqa: TRY002 mock_stt_platform(hass, tmp_path, "bad_stt", async_get_engine=async_get_engine) diff --git a/tests/components/system_health/test_init.py b/tests/components/system_health/test_init.py index e51ab8fab99..b93dccffb92 100644 --- a/tests/components/system_health/test_init.py +++ b/tests/components/system_health/test_init.py @@ -110,7 +110,7 @@ async def test_info_endpoint_register_callback_exc( """Test that the info endpoint requires auth.""" async def mock_info(hass): - raise Exception("TEST ERROR") # pylint: disable=broad-exception-raised + raise Exception("TEST ERROR") # noqa: TRY002 async_register_info(hass, "lovelace", mock_info) assert await async_setup_component(hass, "system_health", {}) diff --git a/tests/components/system_log/test_init.py b/tests/components/system_log/test_init.py index fb46d120acf..1f1c4464c71 100644 --- a/tests/components/system_log/test_init.py +++ b/tests/components/system_log/test_init.py @@ -36,7 +36,7 @@ async def get_error_log(hass_ws_client): def _generate_and_log_exception(exception, log): try: - raise Exception(exception) # pylint: disable=broad-exception-raised + raise Exception(exception) # noqa: TRY002 except Exception: _LOGGER.exception(log) diff --git a/tests/components/tts/test_init.py b/tests/components/tts/test_init.py index bf44f120134..7a54ecc26b0 100644 --- a/tests/components/tts/test_init.py +++ b/tests/components/tts/test_init.py @@ -1016,7 +1016,7 @@ class MockProviderBoom(MockProvider): ) -> tts.TtsAudioType: """Load TTS dat.""" # This should not be called, data should be fetched from cache - raise Exception("Boom!") # pylint: disable=broad-exception-raised + raise Exception("Boom!") # noqa: TRY002 class MockEntityBoom(MockTTSEntity): @@ -1027,7 +1027,7 @@ class MockEntityBoom(MockTTSEntity): ) -> tts.TtsAudioType: """Load TTS dat.""" # This should not be called, data should be fetched from cache - raise Exception("Boom!") # pylint: disable=broad-exception-raised + raise Exception("Boom!") # noqa: TRY002 @pytest.mark.parametrize("mock_provider", [MockProviderBoom(DEFAULT_LANG)]) diff --git a/tests/components/tts/test_legacy.py b/tests/components/tts/test_legacy.py index 05bb6dec10f..0d7f99e8cd1 100644 --- a/tests/components/tts/test_legacy.py +++ b/tests/components/tts/test_legacy.py @@ -123,7 +123,7 @@ async def test_platform_setup_with_error( discovery_info: DiscoveryInfoType | None = None, ) -> Provider: """Raise exception during platform setup.""" - raise Exception("Setup error") # pylint: disable=broad-exception-raised + raise Exception("Setup error") # noqa: TRY002 mock_integration(hass, MockModule(domain="bad_tts")) mock_platform(hass, "bad_tts.tts", BadPlatform(mock_provider)) diff --git a/tests/helpers/test_dispatcher.py b/tests/helpers/test_dispatcher.py index c2c8663f47c..0350b2e6e3a 100644 --- a/tests/helpers/test_dispatcher.py +++ b/tests/helpers/test_dispatcher.py @@ -188,8 +188,7 @@ async def test_callback_exception_gets_logged( @callback def bad_handler(*args): """Record calls.""" - # pylint: disable-next=broad-exception-raised - raise Exception("This is a bad message callback") + raise Exception("This is a bad message callback") # noqa: TRY002 # wrap in partial to test message logging. async_dispatcher_connect(hass, "test", partial(bad_handler)) @@ -209,8 +208,7 @@ async def test_coro_exception_gets_logged( async def bad_async_handler(*args): """Record calls.""" - # pylint: disable-next=broad-exception-raised - raise Exception("This is a bad message in a coro") + raise Exception("This is a bad message in a coro") # noqa: TRY002 # wrap in partial to test message logging. async_dispatcher_connect(hass, "test", bad_async_handler) diff --git a/tests/test_config_entries.py b/tests/test_config_entries.py index 2a5dff5c14a..9983886ce44 100644 --- a/tests/test_config_entries.py +++ b/tests/test_config_entries.py @@ -584,7 +584,7 @@ async def test_remove_entry_raises( async def mock_unload_entry(hass, entry): """Mock unload entry function.""" - raise Exception("BROKEN") # pylint: disable=broad-exception-raised + raise Exception("BROKEN") # noqa: TRY002 mock_integration(hass, MockModule("comp", async_unload_entry=mock_unload_entry)) diff --git a/tests/test_core.py b/tests/test_core.py index 8035236fd08..9ca57d1563f 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -424,11 +424,11 @@ async def test_async_get_hass_can_be_called(hass: HomeAssistant) -> None: try: if ha.async_get_hass() is hass: return True - raise Exception # pylint: disable=broad-exception-raised + raise Exception # noqa: TRY002 except HomeAssistantError: return False - raise Exception # pylint: disable=broad-exception-raised + raise Exception # noqa: TRY002 # Test scheduling a coroutine which calls async_get_hass via hass.async_create_task async def _async_create_task() -> None: diff --git a/tests/test_runner.py b/tests/test_runner.py index 141af4f4bc7..c61b8ed5628 100644 --- a/tests/test_runner.py +++ b/tests/test_runner.py @@ -105,7 +105,7 @@ def test_run_does_not_block_forever_with_shielded_task( try: await asyncio.sleep(2) except asyncio.CancelledError: - raise Exception # pylint: disable=broad-exception-raised + raise Exception # noqa: TRY002 async def async_shielded(*_): try: @@ -142,8 +142,7 @@ async def test_unhandled_exception_traceback( async def _unhandled_exception(): raised.set() - # pylint: disable-next=broad-exception-raised - raise Exception("This is unhandled") + raise Exception("This is unhandled") # noqa: TRY002 try: hass.loop.set_debug(True) diff --git a/tests/test_setup.py b/tests/test_setup.py index 3430c17960c..fae2b46f18d 100644 --- a/tests/test_setup.py +++ b/tests/test_setup.py @@ -338,7 +338,7 @@ async def test_component_exception_setup(hass: HomeAssistant) -> None: def exception_setup(hass, config): """Raise exception.""" - raise Exception("fail!") # pylint: disable=broad-exception-raised + raise Exception("fail!") # noqa: TRY002 mock_integration(hass, MockModule("comp", setup=exception_setup)) @@ -352,7 +352,7 @@ async def test_component_base_exception_setup(hass: HomeAssistant) -> None: def exception_setup(hass, config): """Raise exception.""" - raise BaseException("fail!") # pylint: disable=broad-exception-raised + raise BaseException("fail!") # noqa: TRY002 mock_integration(hass, MockModule("comp", setup=exception_setup)) @@ -372,8 +372,7 @@ async def test_component_setup_with_validation_and_dependency( """Test that config is passed in.""" if config.get("comp_a", {}).get("valid", False): return True - # pylint: disable-next=broad-exception-raised - raise Exception(f"Config not passed in: {config}") + raise Exception(f"Config not passed in: {config}") # noqa: TRY002 platform = MockPlatform() diff --git a/tests/util/test_logging.py b/tests/util/test_logging.py index 4667dbcbec8..795444c89bd 100644 --- a/tests/util/test_logging.py +++ b/tests/util/test_logging.py @@ -80,8 +80,7 @@ async def test_async_create_catching_coro( """Test exception logging of wrapped coroutine.""" async def job(): - # pylint: disable-next=broad-exception-raised - raise Exception("This is a bad coroutine") + raise Exception("This is a bad coroutine") # noqa: TRY002 hass.async_create_task(logging_util.async_create_catching_coro(job())) await hass.async_block_till_done() From 4a06e20318ba50fddb4be1f3ea20a988075622f6 Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Fri, 2 Aug 2024 12:31:31 +0200 Subject: [PATCH 0285/1635] Ollama implement CONTROL supported feature (#123049) --- .../components/ollama/conversation.py | 18 +++++++++++++ tests/components/ollama/test_conversation.py | 25 ++++++++++++++++++- 2 files changed, 42 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/ollama/conversation.py b/homeassistant/components/ollama/conversation.py index f59e268394b..9f66083f506 100644 --- a/homeassistant/components/ollama/conversation.py +++ b/homeassistant/components/ollama/conversation.py @@ -106,6 +106,10 @@ class OllamaConversationEntity( self._history: dict[str, MessageHistory] = {} self._attr_name = entry.title self._attr_unique_id = entry.entry_id + if self.entry.options.get(CONF_LLM_HASS_API): + self._attr_supported_features = ( + conversation.ConversationEntityFeature.CONTROL + ) async def async_added_to_hass(self) -> None: """When entity is added to Home Assistant.""" @@ -114,6 +118,9 @@ class OllamaConversationEntity( self.hass, "conversation", self.entry.entry_id, self.entity_id ) conversation.async_set_agent(self.hass, self.entry, self) + self.entry.async_on_unload( + self.entry.add_update_listener(self._async_entry_update_listener) + ) async def async_will_remove_from_hass(self) -> None: """When entity will be removed from Home Assistant.""" @@ -334,3 +341,14 @@ class OllamaConversationEntity( message_history.messages = [ message_history.messages[0] ] + message_history.messages[drop_index:] + + async def _async_entry_update_listener( + self, hass: HomeAssistant, entry: ConfigEntry + ) -> None: + """Handle options update.""" + if entry.options.get(CONF_LLM_HASS_API): + self._attr_supported_features = ( + conversation.ConversationEntityFeature.CONTROL + ) + else: + self._attr_supported_features = conversation.ConversationEntityFeature(0) diff --git a/tests/components/ollama/test_conversation.py b/tests/components/ollama/test_conversation.py index b5a94cc6f57..c83dce3b565 100644 --- a/tests/components/ollama/test_conversation.py +++ b/tests/components/ollama/test_conversation.py @@ -10,7 +10,7 @@ import voluptuous as vol from homeassistant.components import conversation, ollama from homeassistant.components.conversation import trace -from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL +from homeassistant.const import ATTR_SUPPORTED_FEATURES, CONF_LLM_HASS_API, MATCH_ALL from homeassistant.core import Context, HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import intent, llm @@ -554,3 +554,26 @@ async def test_conversation_agent( mock_config_entry.entry_id ) assert agent.supported_languages == MATCH_ALL + + state = hass.states.get("conversation.mock_title") + assert state + assert state.attributes[ATTR_SUPPORTED_FEATURES] == 0 + + +async def test_conversation_agent_with_assist( + hass: HomeAssistant, + mock_config_entry_with_assist: MockConfigEntry, + mock_init_component, +) -> None: + """Test OllamaConversationEntity.""" + agent = conversation.get_agent_manager(hass).async_get_agent( + mock_config_entry_with_assist.entry_id + ) + assert agent.supported_languages == MATCH_ALL + + state = hass.states.get("conversation.mock_title") + assert state + assert ( + state.attributes[ATTR_SUPPORTED_FEATURES] + == conversation.ConversationEntityFeature.CONTROL + ) From 42234e6a09b67bf95f46ceb2690040ed55ef5deb Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Fri, 2 Aug 2024 12:53:39 +0200 Subject: [PATCH 0286/1635] Address post-merge reviews for KNX integration (#123038) --- homeassistant/components/knx/__init__.py | 2 +- homeassistant/components/knx/binary_sensor.py | 13 +++---- homeassistant/components/knx/button.py | 14 ++++---- homeassistant/components/knx/climate.py | 14 +++++--- homeassistant/components/knx/cover.py | 13 +++---- homeassistant/components/knx/date.py | 14 +++++--- homeassistant/components/knx/datetime.py | 12 ++++--- homeassistant/components/knx/fan.py | 13 +++---- homeassistant/components/knx/knx_entity.py | 36 ++++++++++++++++--- homeassistant/components/knx/light.py | 30 ++++++++-------- homeassistant/components/knx/notify.py | 14 +++++--- homeassistant/components/knx/number.py | 12 ++++--- homeassistant/components/knx/project.py | 6 ++-- homeassistant/components/knx/scene.py | 13 +++---- homeassistant/components/knx/select.py | 12 ++++--- homeassistant/components/knx/sensor.py | 17 +++++---- .../components/knx/storage/config_store.py | 28 ++++++++------- homeassistant/components/knx/switch.py | 30 ++++++++-------- homeassistant/components/knx/text.py | 12 ++++--- homeassistant/components/knx/time.py | 17 +++++---- homeassistant/components/knx/weather.py | 14 +++++--- 21 files changed, 211 insertions(+), 125 deletions(-) diff --git a/homeassistant/components/knx/__init__.py b/homeassistant/components/knx/__init__.py index 709a82b31fd..fd46cad8489 100644 --- a/homeassistant/components/knx/__init__.py +++ b/homeassistant/components/knx/__init__.py @@ -302,7 +302,7 @@ class KNXModule: self.entry = entry self.project = KNXProject(hass=hass, entry=entry) - self.config_store = KNXConfigStore(hass=hass, entry=entry) + self.config_store = KNXConfigStore(hass=hass, config_entry=entry) self.xknx = XKNX( connection_config=self.connection_config(), diff --git a/homeassistant/components/knx/binary_sensor.py b/homeassistant/components/knx/binary_sensor.py index 0423c1d7b32..ff15f725fae 100644 --- a/homeassistant/components/knx/binary_sensor.py +++ b/homeassistant/components/knx/binary_sensor.py @@ -4,7 +4,6 @@ from __future__ import annotations from typing import Any -from xknx import XKNX from xknx.devices import BinarySensor as XknxBinarySensor from homeassistant import config_entries @@ -23,6 +22,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType +from . import KNXModule from .const import ATTR_COUNTER, ATTR_SOURCE, DATA_KNX_CONFIG, DOMAIN from .knx_entity import KnxEntity from .schema import BinarySensorSchema @@ -34,11 +34,11 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the KNX binary sensor platform.""" - xknx: XKNX = hass.data[DOMAIN].xknx + knx_module: KNXModule = hass.data[DOMAIN] config: ConfigType = hass.data[DATA_KNX_CONFIG] async_add_entities( - KNXBinarySensor(xknx, entity_config) + KNXBinarySensor(knx_module, entity_config) for entity_config in config[Platform.BINARY_SENSOR] ) @@ -48,11 +48,12 @@ class KNXBinarySensor(KnxEntity, BinarySensorEntity, RestoreEntity): _device: XknxBinarySensor - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize of KNX binary sensor.""" super().__init__( + knx_module=knx_module, device=XknxBinarySensor( - xknx, + xknx=knx_module.xknx, name=config[CONF_NAME], group_address_state=config[BinarySensorSchema.CONF_STATE_ADDRESS], invert=config[BinarySensorSchema.CONF_INVERT], @@ -62,7 +63,7 @@ class KNXBinarySensor(KnxEntity, BinarySensorEntity, RestoreEntity): ], context_timeout=config.get(BinarySensorSchema.CONF_CONTEXT_TIMEOUT), reset_after=config.get(BinarySensorSchema.CONF_RESET_AFTER), - ) + ), ) self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY) self._attr_device_class = config.get(CONF_DEVICE_CLASS) diff --git a/homeassistant/components/knx/button.py b/homeassistant/components/knx/button.py index a38d8ad1b6c..2eb68eebe43 100644 --- a/homeassistant/components/knx/button.py +++ b/homeassistant/components/knx/button.py @@ -2,7 +2,6 @@ from __future__ import annotations -from xknx import XKNX from xknx.devices import RawValue as XknxRawValue from homeassistant import config_entries @@ -12,6 +11,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType +from . import KNXModule from .const import CONF_PAYLOAD_LENGTH, DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS from .knx_entity import KnxEntity @@ -22,11 +22,12 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the KNX binary sensor platform.""" - xknx: XKNX = hass.data[DOMAIN].xknx + knx_module: KNXModule = hass.data[DOMAIN] config: ConfigType = hass.data[DATA_KNX_CONFIG] async_add_entities( - KNXButton(xknx, entity_config) for entity_config in config[Platform.BUTTON] + KNXButton(knx_module, entity_config) + for entity_config in config[Platform.BUTTON] ) @@ -35,15 +36,16 @@ class KNXButton(KnxEntity, ButtonEntity): _device: XknxRawValue - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize a KNX button.""" super().__init__( + knx_module=knx_module, device=XknxRawValue( - xknx, + xknx=knx_module.xknx, name=config[CONF_NAME], payload_length=config[CONF_PAYLOAD_LENGTH], group_address=config[KNX_ADDRESS], - ) + ), ) self._payload = config[CONF_PAYLOAD] self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY) diff --git a/homeassistant/components/knx/climate.py b/homeassistant/components/knx/climate.py index 26be6a03a79..7470d60ef4b 100644 --- a/homeassistant/components/knx/climate.py +++ b/homeassistant/components/knx/climate.py @@ -27,6 +27,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType +from . import KNXModule from .const import ( CONTROLLER_MODES, CURRENT_HVAC_ACTIONS, @@ -48,10 +49,12 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up climate(s) for KNX platform.""" - xknx: XKNX = hass.data[DOMAIN].xknx + knx_module: KNXModule = hass.data[DOMAIN] config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.CLIMATE] - async_add_entities(KNXClimate(xknx, entity_config) for entity_config in config) + async_add_entities( + KNXClimate(knx_module, entity_config) for entity_config in config + ) def _create_climate(xknx: XKNX, config: ConfigType) -> XknxClimate: @@ -137,9 +140,12 @@ class KNXClimate(KnxEntity, ClimateEntity): _attr_temperature_unit = UnitOfTemperature.CELSIUS _enable_turn_on_off_backwards_compatibility = False - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize of a KNX climate device.""" - super().__init__(_create_climate(xknx, config)) + super().__init__( + knx_module=knx_module, + device=_create_climate(knx_module.xknx, config), + ) self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY) self._attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE if self._device.supports_on_off: diff --git a/homeassistant/components/knx/cover.py b/homeassistant/components/knx/cover.py index 9d86d6ac272..1962db0ad3f 100644 --- a/homeassistant/components/knx/cover.py +++ b/homeassistant/components/knx/cover.py @@ -5,7 +5,6 @@ from __future__ import annotations from collections.abc import Callable from typing import Any -from xknx import XKNX from xknx.devices import Cover as XknxCover from homeassistant import config_entries @@ -26,6 +25,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType +from . import KNXModule from .const import DATA_KNX_CONFIG, DOMAIN from .knx_entity import KnxEntity from .schema import CoverSchema @@ -37,10 +37,10 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up cover(s) for KNX platform.""" - xknx: XKNX = hass.data[DOMAIN].xknx + knx_module: KNXModule = hass.data[DOMAIN] config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.COVER] - async_add_entities(KNXCover(xknx, entity_config) for entity_config in config) + async_add_entities(KNXCover(knx_module, entity_config) for entity_config in config) class KNXCover(KnxEntity, CoverEntity): @@ -48,11 +48,12 @@ class KNXCover(KnxEntity, CoverEntity): _device: XknxCover - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize the cover.""" super().__init__( + knx_module=knx_module, device=XknxCover( - xknx, + xknx=knx_module.xknx, name=config[CONF_NAME], group_address_long=config.get(CoverSchema.CONF_MOVE_LONG_ADDRESS), group_address_short=config.get(CoverSchema.CONF_MOVE_SHORT_ADDRESS), @@ -70,7 +71,7 @@ class KNXCover(KnxEntity, CoverEntity): invert_updown=config[CoverSchema.CONF_INVERT_UPDOWN], invert_position=config[CoverSchema.CONF_INVERT_POSITION], invert_angle=config[CoverSchema.CONF_INVERT_ANGLE], - ) + ), ) self._unsubscribe_auto_updater: Callable[[], None] | None = None diff --git a/homeassistant/components/knx/date.py b/homeassistant/components/knx/date.py index 98cd22e0751..80fea63d0a6 100644 --- a/homeassistant/components/knx/date.py +++ b/homeassistant/components/knx/date.py @@ -22,6 +22,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType +from . import KNXModule from .const import ( CONF_RESPOND_TO_READ, CONF_STATE_ADDRESS, @@ -39,10 +40,12 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up entities for KNX platform.""" - xknx: XKNX = hass.data[DOMAIN].xknx + knx_module: KNXModule = hass.data[DOMAIN] config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.DATE] - async_add_entities(KNXDateEntity(xknx, entity_config) for entity_config in config) + async_add_entities( + KNXDateEntity(knx_module, entity_config) for entity_config in config + ) def _create_xknx_device(xknx: XKNX, config: ConfigType) -> XknxDateDevice: @@ -63,9 +66,12 @@ class KNXDateEntity(KnxEntity, DateEntity, RestoreEntity): _device: XknxDateDevice - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize a KNX time.""" - super().__init__(_create_xknx_device(xknx, config)) + super().__init__( + knx_module=knx_module, + device=_create_xknx_device(knx_module.xknx, config), + ) self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY) self._attr_unique_id = str(self._device.remote_value.group_address) diff --git a/homeassistant/components/knx/datetime.py b/homeassistant/components/knx/datetime.py index d4a25b522eb..16ccb7474a7 100644 --- a/homeassistant/components/knx/datetime.py +++ b/homeassistant/components/knx/datetime.py @@ -23,6 +23,7 @@ from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType import homeassistant.util.dt as dt_util +from . import KNXModule from .const import ( CONF_RESPOND_TO_READ, CONF_STATE_ADDRESS, @@ -40,11 +41,11 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up entities for KNX platform.""" - xknx: XKNX = hass.data[DOMAIN].xknx + knx_module: KNXModule = hass.data[DOMAIN] config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.DATETIME] async_add_entities( - KNXDateTimeEntity(xknx, entity_config) for entity_config in config + KNXDateTimeEntity(knx_module, entity_config) for entity_config in config ) @@ -66,9 +67,12 @@ class KNXDateTimeEntity(KnxEntity, DateTimeEntity, RestoreEntity): _device: XknxDateTimeDevice - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize a KNX time.""" - super().__init__(_create_xknx_device(xknx, config)) + super().__init__( + knx_module=knx_module, + device=_create_xknx_device(knx_module.xknx, config), + ) self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY) self._attr_unique_id = str(self._device.remote_value.group_address) diff --git a/homeassistant/components/knx/fan.py b/homeassistant/components/knx/fan.py index 426a750f766..940e241ccda 100644 --- a/homeassistant/components/knx/fan.py +++ b/homeassistant/components/knx/fan.py @@ -5,7 +5,6 @@ from __future__ import annotations import math from typing import Any, Final -from xknx import XKNX from xknx.devices import Fan as XknxFan from homeassistant import config_entries @@ -20,6 +19,7 @@ from homeassistant.util.percentage import ( ) from homeassistant.util.scaling import int_states_in_range +from . import KNXModule from .const import DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS from .knx_entity import KnxEntity from .schema import FanSchema @@ -33,10 +33,10 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up fan(s) for KNX platform.""" - xknx: XKNX = hass.data[DOMAIN].xknx + knx_module: KNXModule = hass.data[DOMAIN] config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.FAN] - async_add_entities(KNXFan(xknx, entity_config) for entity_config in config) + async_add_entities(KNXFan(knx_module, entity_config) for entity_config in config) class KNXFan(KnxEntity, FanEntity): @@ -45,12 +45,13 @@ class KNXFan(KnxEntity, FanEntity): _device: XknxFan _enable_turn_on_off_backwards_compatibility = False - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize of KNX fan.""" max_step = config.get(FanSchema.CONF_MAX_STEP) super().__init__( + knx_module=knx_module, device=XknxFan( - xknx, + xknx=knx_module.xknx, name=config[CONF_NAME], group_address_speed=config.get(KNX_ADDRESS), group_address_speed_state=config.get(FanSchema.CONF_STATE_ADDRESS), @@ -61,7 +62,7 @@ class KNXFan(KnxEntity, FanEntity): FanSchema.CONF_OSCILLATION_STATE_ADDRESS ), max_step=max_step, - ) + ), ) # FanSpeedMode.STEP if max_step is set self._step_range: tuple[int, int] | None = (1, max_step) if max_step else None diff --git a/homeassistant/components/knx/knx_entity.py b/homeassistant/components/knx/knx_entity.py index eebddbb0623..2b8d2e71186 100644 --- a/homeassistant/components/knx/knx_entity.py +++ b/homeassistant/components/knx/knx_entity.py @@ -2,23 +2,29 @@ from __future__ import annotations -from typing import cast +from typing import TYPE_CHECKING from xknx.devices import Device as XknxDevice +from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity import Entity -from . import KNXModule from .const import DOMAIN +if TYPE_CHECKING: + from . import KNXModule + +SIGNAL_ENTITY_REMOVE = f"{DOMAIN}_entity_remove_signal.{{}}" + class KnxEntity(Entity): """Representation of a KNX entity.""" _attr_should_poll = False - def __init__(self, device: XknxDevice) -> None: + def __init__(self, knx_module: KNXModule, device: XknxDevice) -> None: """Set up device.""" + self._knx_module = knx_module self._device = device @property @@ -29,8 +35,7 @@ class KnxEntity(Entity): @property def available(self) -> bool: """Return True if entity is available.""" - knx_module = cast(KNXModule, self.hass.data[DOMAIN]) - return knx_module.connected + return self._knx_module.connected async def async_update(self) -> None: """Request a state update from KNX bus.""" @@ -44,8 +49,29 @@ class KnxEntity(Entity): """Store register state change callback and start device object.""" self._device.register_device_updated_cb(self.after_update_callback) self._device.xknx.devices.async_add(self._device) + # super call needed to have methods of mulit-inherited classes called + # eg. for restoring state (like _KNXSwitch) + await super().async_added_to_hass() async def async_will_remove_from_hass(self) -> None: """Disconnect device object when removed.""" self._device.unregister_device_updated_cb(self.after_update_callback) self._device.xknx.devices.async_remove(self._device) + + +class KnxUIEntity(KnxEntity): + """Representation of a KNX UI entity.""" + + _attr_unique_id: str + + async def async_added_to_hass(self) -> None: + """Register callbacks when entity added to hass.""" + await super().async_added_to_hass() + self._knx_module.config_store.entities.add(self._attr_unique_id) + self.async_on_remove( + async_dispatcher_connect( + self.hass, + SIGNAL_ENTITY_REMOVE.format(self._attr_unique_id), + self.async_remove, + ) + ) diff --git a/homeassistant/components/knx/light.py b/homeassistant/components/knx/light.py index 8ec42f3ee56..1197f09354b 100644 --- a/homeassistant/components/knx/light.py +++ b/homeassistant/components/knx/light.py @@ -27,7 +27,7 @@ import homeassistant.util.color as color_util from . import KNXModule from .const import CONF_SYNC_STATE, DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS, ColorTempModes -from .knx_entity import KnxEntity +from .knx_entity import KnxEntity, KnxUIEntity from .schema import LightSchema from .storage.const import ( CONF_COLOR_TEMP_MAX, @@ -65,10 +65,10 @@ async def async_setup_entry( knx_module: KNXModule = hass.data[DOMAIN] entities: list[KnxEntity] = [] - if yaml_config := hass.data[DATA_KNX_CONFIG].get(Platform.LIGHT): + if yaml_platform_config := hass.data[DATA_KNX_CONFIG].get(Platform.LIGHT): entities.extend( - KnxYamlLight(knx_module.xknx, entity_config) - for entity_config in yaml_config + KnxYamlLight(knx_module, entity_config) + for entity_config in yaml_platform_config ) if ui_config := knx_module.config_store.data["entities"].get(Platform.LIGHT): entities.extend( @@ -294,7 +294,7 @@ def _create_ui_light(xknx: XKNX, knx_config: ConfigType, name: str) -> XknxLight ) -class _KnxLight(KnxEntity, LightEntity): +class _KnxLight(LightEntity): """Representation of a KNX light.""" _attr_max_color_temp_kelvin: int @@ -519,14 +519,17 @@ class _KnxLight(KnxEntity, LightEntity): await self._device.set_off() -class KnxYamlLight(_KnxLight): +class KnxYamlLight(_KnxLight, KnxEntity): """Representation of a KNX light.""" _device: XknxLight - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize of KNX light.""" - super().__init__(_create_yaml_light(xknx, config)) + super().__init__( + knx_module=knx_module, + device=_create_yaml_light(knx_module.xknx, config), + ) self._attr_max_color_temp_kelvin: int = config[LightSchema.CONF_MAX_KELVIN] self._attr_min_color_temp_kelvin: int = config[LightSchema.CONF_MIN_KELVIN] self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY) @@ -543,20 +546,21 @@ class KnxYamlLight(_KnxLight): ) -class KnxUiLight(_KnxLight): +class KnxUiLight(_KnxLight, KnxUIEntity): """Representation of a KNX light.""" - _device: XknxLight _attr_has_entity_name = True + _device: XknxLight def __init__( self, knx_module: KNXModule, unique_id: str, config: ConfigType ) -> None: """Initialize of KNX light.""" super().__init__( - _create_ui_light( + knx_module=knx_module, + device=_create_ui_light( knx_module.xknx, config[DOMAIN], config[CONF_ENTITY][CONF_NAME] - ) + ), ) self._attr_max_color_temp_kelvin: int = config[DOMAIN][CONF_COLOR_TEMP_MAX] self._attr_min_color_temp_kelvin: int = config[DOMAIN][CONF_COLOR_TEMP_MIN] @@ -565,5 +569,3 @@ class KnxUiLight(_KnxLight): self._attr_unique_id = unique_id if device_info := config[CONF_ENTITY].get(CONF_DEVICE_INFO): self._attr_device_info = DeviceInfo(identifiers={(DOMAIN, device_info)}) - - knx_module.config_store.entities[unique_id] = self diff --git a/homeassistant/components/knx/notify.py b/homeassistant/components/knx/notify.py index 997bdb81057..b349681990c 100644 --- a/homeassistant/components/knx/notify.py +++ b/homeassistant/components/knx/notify.py @@ -18,6 +18,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType +from . import KNXModule from .const import DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS from .knx_entity import KnxEntity @@ -44,7 +45,7 @@ async def async_get_service( class KNXNotificationService(BaseNotificationService): - """Implement demo notification service.""" + """Implement notification service.""" def __init__(self, devices: list[XknxNotification]) -> None: """Initialize the service.""" @@ -86,10 +87,10 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up notify(s) for KNX platform.""" - xknx: XKNX = hass.data[DOMAIN].xknx + knx_module: KNXModule = hass.data[DOMAIN] config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.NOTIFY] - async_add_entities(KNXNotify(xknx, entity_config) for entity_config in config) + async_add_entities(KNXNotify(knx_module, entity_config) for entity_config in config) def _create_notification_instance(xknx: XKNX, config: ConfigType) -> XknxNotification: @@ -107,9 +108,12 @@ class KNXNotify(KnxEntity, NotifyEntity): _device: XknxNotification - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize a KNX notification.""" - super().__init__(_create_notification_instance(xknx, config)) + super().__init__( + knx_module=knx_module, + device=_create_notification_instance(knx_module.xknx, config), + ) self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY) self._attr_unique_id = str(self._device.remote_value.group_address) diff --git a/homeassistant/components/knx/number.py b/homeassistant/components/knx/number.py index 8a9f1dea87c..3d4af503dff 100644 --- a/homeassistant/components/knx/number.py +++ b/homeassistant/components/knx/number.py @@ -22,6 +22,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType +from . import KNXModule from .const import ( CONF_RESPOND_TO_READ, CONF_STATE_ADDRESS, @@ -39,10 +40,10 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up number(s) for KNX platform.""" - xknx: XKNX = hass.data[DOMAIN].xknx + knx_module: KNXModule = hass.data[DOMAIN] config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.NUMBER] - async_add_entities(KNXNumber(xknx, entity_config) for entity_config in config) + async_add_entities(KNXNumber(knx_module, entity_config) for entity_config in config) def _create_numeric_value(xknx: XKNX, config: ConfigType) -> NumericValue: @@ -62,9 +63,12 @@ class KNXNumber(KnxEntity, RestoreNumber): _device: NumericValue - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize a KNX number.""" - super().__init__(_create_numeric_value(xknx, config)) + super().__init__( + knx_module=knx_module, + device=_create_numeric_value(knx_module.xknx, config), + ) self._attr_native_max_value = config.get( NumberSchema.CONF_MAX, self._device.sensor_value.dpt_class.value_max, diff --git a/homeassistant/components/knx/project.py b/homeassistant/components/knx/project.py index 3b3309dfc7d..b5bafe00724 100644 --- a/homeassistant/components/knx/project.py +++ b/homeassistant/components/knx/project.py @@ -8,9 +8,11 @@ from typing import Final from xknx import XKNX from xknx.dpt import DPTBase +from xknx.telegram.address import DeviceAddressableType from xknxproject import XKNXProj from xknxproject.models import ( Device, + DPTType, GroupAddress as GroupAddressModel, KNXProject as KNXProjectModel, ProjectInfo, @@ -89,7 +91,7 @@ class KNXProject: self.devices = project["devices"] self.info = project["info"] xknx.group_address_dpt.clear() - xknx_ga_dict = {} + xknx_ga_dict: dict[DeviceAddressableType, DPTType] = {} for ga_model in project["group_addresses"].values(): ga_info = _create_group_address_info(ga_model) @@ -97,7 +99,7 @@ class KNXProject: if (dpt_model := ga_model.get("dpt")) is not None: xknx_ga_dict[ga_model["address"]] = dpt_model - xknx.group_address_dpt.set(xknx_ga_dict) # type: ignore[arg-type] + xknx.group_address_dpt.set(xknx_ga_dict) _LOGGER.debug( "Loaded KNX project data with %s group addresses from storage", diff --git a/homeassistant/components/knx/scene.py b/homeassistant/components/knx/scene.py index 342d0f9eb83..fc37f36dd01 100644 --- a/homeassistant/components/knx/scene.py +++ b/homeassistant/components/knx/scene.py @@ -4,7 +4,6 @@ from __future__ import annotations from typing import Any -from xknx import XKNX from xknx.devices import Scene as XknxScene from homeassistant import config_entries @@ -14,6 +13,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType +from . import KNXModule from .const import DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS from .knx_entity import KnxEntity from .schema import SceneSchema @@ -25,10 +25,10 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up scene(s) for KNX platform.""" - xknx: XKNX = hass.data[DOMAIN].xknx + knx_module: KNXModule = hass.data[DOMAIN] config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.SCENE] - async_add_entities(KNXScene(xknx, entity_config) for entity_config in config) + async_add_entities(KNXScene(knx_module, entity_config) for entity_config in config) class KNXScene(KnxEntity, Scene): @@ -36,15 +36,16 @@ class KNXScene(KnxEntity, Scene): _device: XknxScene - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Init KNX scene.""" super().__init__( + knx_module=knx_module, device=XknxScene( - xknx, + xknx=knx_module.xknx, name=config[CONF_NAME], group_address=config[KNX_ADDRESS], scene_number=config[SceneSchema.CONF_SCENE_NUMBER], - ) + ), ) self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY) self._attr_unique_id = ( diff --git a/homeassistant/components/knx/select.py b/homeassistant/components/knx/select.py index f338bf9feaf..1b862010c2a 100644 --- a/homeassistant/components/knx/select.py +++ b/homeassistant/components/knx/select.py @@ -20,6 +20,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType +from . import KNXModule from .const import ( CONF_PAYLOAD_LENGTH, CONF_RESPOND_TO_READ, @@ -39,10 +40,10 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up select(s) for KNX platform.""" - xknx: XKNX = hass.data[DOMAIN].xknx + knx_module: KNXModule = hass.data[DOMAIN] config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.SELECT] - async_add_entities(KNXSelect(xknx, entity_config) for entity_config in config) + async_add_entities(KNXSelect(knx_module, entity_config) for entity_config in config) def _create_raw_value(xknx: XKNX, config: ConfigType) -> RawValue: @@ -63,9 +64,12 @@ class KNXSelect(KnxEntity, SelectEntity, RestoreEntity): _device: RawValue - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize a KNX select.""" - super().__init__(_create_raw_value(xknx, config)) + super().__init__( + knx_module=knx_module, + device=_create_raw_value(knx_module.xknx, config), + ) self._option_payloads: dict[str, int] = { option[SelectSchema.CONF_OPTION]: option[CONF_PAYLOAD] for option in config[SelectSchema.CONF_OPTIONS] diff --git a/homeassistant/components/knx/sensor.py b/homeassistant/components/knx/sensor.py index 5a09a921901..ab363e2a35f 100644 --- a/homeassistant/components/knx/sensor.py +++ b/homeassistant/components/knx/sensor.py @@ -116,17 +116,17 @@ async def async_setup_entry( ) -> None: """Set up sensor(s) for KNX platform.""" knx_module: KNXModule = hass.data[DOMAIN] - - async_add_entities( + entities: list[SensorEntity] = [] + entities.extend( KNXSystemSensor(knx_module, description) for description in SYSTEM_ENTITY_DESCRIPTIONS ) - config: list[ConfigType] = hass.data[DATA_KNX_CONFIG].get(Platform.SENSOR) if config: - async_add_entities( - KNXSensor(knx_module.xknx, entity_config) for entity_config in config + entities.extend( + KNXSensor(knx_module, entity_config) for entity_config in config ) + async_add_entities(entities) def _create_sensor(xknx: XKNX, config: ConfigType) -> XknxSensor: @@ -146,9 +146,12 @@ class KNXSensor(KnxEntity, SensorEntity): _device: XknxSensor - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize of a KNX sensor.""" - super().__init__(_create_sensor(xknx, config)) + super().__init__( + knx_module=knx_module, + device=_create_sensor(knx_module.xknx, config), + ) if device_class := config.get(CONF_DEVICE_CLASS): self._attr_device_class = device_class else: diff --git a/homeassistant/components/knx/storage/config_store.py b/homeassistant/components/knx/storage/config_store.py index 7ea61e1dd3e..876fe19a4b9 100644 --- a/homeassistant/components/knx/storage/config_store.py +++ b/homeassistant/components/knx/storage/config_store.py @@ -2,21 +2,20 @@ from collections.abc import Callable import logging -from typing import TYPE_CHECKING, Any, Final, TypedDict +from typing import Any, Final, TypedDict from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PLATFORM, Platform from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.storage import Store from homeassistant.util.ulid import ulid_now from ..const import DOMAIN +from ..knx_entity import SIGNAL_ENTITY_REMOVE from .const import CONF_DATA -if TYPE_CHECKING: - from ..knx_entity import KnxEntity - _LOGGER = logging.getLogger(__name__) STORAGE_VERSION: Final = 1 @@ -40,15 +39,16 @@ class KNXConfigStore: def __init__( self, hass: HomeAssistant, - entry: ConfigEntry, + config_entry: ConfigEntry, ) -> None: """Initialize config store.""" self.hass = hass + self.config_entry = config_entry self._store = Store[KNXConfigStoreModel](hass, STORAGE_VERSION, STORAGE_KEY) self.data = KNXConfigStoreModel(entities={}) - # entities and async_add_entity are filled by platform setups - self.entities: dict[str, KnxEntity] = {} # unique_id as key + # entities and async_add_entity are filled by platform / entity setups + self.entities: set[str] = set() # unique_id as values self.async_add_entity: dict[ Platform, Callable[[str, dict[str, Any]], None] ] = {} @@ -108,7 +108,7 @@ class KNXConfigStore: raise ConfigStoreException( f"Entity not found in storage: {entity_id} - {unique_id}" ) - await self.entities.pop(unique_id).async_remove() + async_dispatcher_send(self.hass, SIGNAL_ENTITY_REMOVE.format(unique_id)) self.async_add_entity[platform](unique_id, data) # store data after entity is added to make sure config doesn't raise exceptions self.data["entities"][platform][unique_id] = data @@ -126,7 +126,7 @@ class KNXConfigStore: f"Entity not found in {entry.domain}: {entry.unique_id}" ) from err try: - del self.entities[entry.unique_id] + self.entities.remove(entry.unique_id) except KeyError: _LOGGER.warning("Entity not initialized when deleted: %s", entity_id) entity_registry.async_remove(entity_id) @@ -134,10 +134,14 @@ class KNXConfigStore: def get_entity_entries(self) -> list[er.RegistryEntry]: """Get entity_ids of all configured entities by platform.""" + entity_registry = er.async_get(self.hass) + return [ - entity.registry_entry - for entity in self.entities.values() - if entity.registry_entry is not None + registry_entry + for registry_entry in er.async_entries_for_config_entry( + entity_registry, self.config_entry.entry_id + ) + if registry_entry.unique_id in self.entities ] diff --git a/homeassistant/components/knx/switch.py b/homeassistant/components/knx/switch.py index 0a8a1dff964..a5f430e6157 100644 --- a/homeassistant/components/knx/switch.py +++ b/homeassistant/components/knx/switch.py @@ -4,7 +4,6 @@ from __future__ import annotations from typing import Any -from xknx import XKNX from xknx.devices import Switch as XknxSwitch from homeassistant import config_entries @@ -33,7 +32,7 @@ from .const import ( DOMAIN, KNX_ADDRESS, ) -from .knx_entity import KnxEntity +from .knx_entity import KnxEntity, KnxUIEntity from .schema import SwitchSchema from .storage.const import ( CONF_DEVICE_INFO, @@ -54,10 +53,10 @@ async def async_setup_entry( knx_module: KNXModule = hass.data[DOMAIN] entities: list[KnxEntity] = [] - if yaml_config := hass.data[DATA_KNX_CONFIG].get(Platform.SWITCH): + if yaml_platform_config := hass.data[DATA_KNX_CONFIG].get(Platform.SWITCH): entities.extend( - KnxYamlSwitch(knx_module.xknx, entity_config) - for entity_config in yaml_config + KnxYamlSwitch(knx_module, entity_config) + for entity_config in yaml_platform_config ) if ui_config := knx_module.config_store.data["entities"].get(Platform.SWITCH): entities.extend( @@ -75,7 +74,7 @@ async def async_setup_entry( knx_module.config_store.async_add_entity[Platform.SWITCH] = add_new_ui_switch -class _KnxSwitch(KnxEntity, SwitchEntity, RestoreEntity): +class _KnxSwitch(SwitchEntity, RestoreEntity): """Base class for a KNX switch.""" _device: XknxSwitch @@ -103,36 +102,41 @@ class _KnxSwitch(KnxEntity, SwitchEntity, RestoreEntity): await self._device.set_off() -class KnxYamlSwitch(_KnxSwitch): +class KnxYamlSwitch(_KnxSwitch, KnxEntity): """Representation of a KNX switch configured from YAML.""" - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + _device: XknxSwitch + + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize of KNX switch.""" super().__init__( + knx_module=knx_module, device=XknxSwitch( - xknx, + xknx=knx_module.xknx, name=config[CONF_NAME], group_address=config[KNX_ADDRESS], group_address_state=config.get(SwitchSchema.CONF_STATE_ADDRESS), respond_to_read=config[CONF_RESPOND_TO_READ], invert=config[SwitchSchema.CONF_INVERT], - ) + ), ) self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY) self._attr_device_class = config.get(CONF_DEVICE_CLASS) self._attr_unique_id = str(self._device.switch.group_address) -class KnxUiSwitch(_KnxSwitch): +class KnxUiSwitch(_KnxSwitch, KnxUIEntity): """Representation of a KNX switch configured from UI.""" _attr_has_entity_name = True + _device: XknxSwitch def __init__( self, knx_module: KNXModule, unique_id: str, config: dict[str, Any] ) -> None: """Initialize of KNX switch.""" super().__init__( + knx_module=knx_module, device=XknxSwitch( knx_module.xknx, name=config[CONF_ENTITY][CONF_NAME], @@ -144,11 +148,9 @@ class KnxUiSwitch(_KnxSwitch): respond_to_read=config[DOMAIN][CONF_RESPOND_TO_READ], sync_state=config[DOMAIN][CONF_SYNC_STATE], invert=config[DOMAIN][CONF_INVERT], - ) + ), ) self._attr_entity_category = config[CONF_ENTITY][CONF_ENTITY_CATEGORY] self._attr_unique_id = unique_id if device_info := config[CONF_ENTITY].get(CONF_DEVICE_INFO): self._attr_device_info = DeviceInfo(identifiers={(DOMAIN, device_info)}) - - knx_module.config_store.entities[unique_id] = self diff --git a/homeassistant/components/knx/text.py b/homeassistant/components/knx/text.py index 22d008cd5ce..9bca37434ac 100644 --- a/homeassistant/components/knx/text.py +++ b/homeassistant/components/knx/text.py @@ -22,6 +22,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType +from . import KNXModule from .const import ( CONF_RESPOND_TO_READ, CONF_STATE_ADDRESS, @@ -38,10 +39,10 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up sensor(s) for KNX platform.""" - xknx: XKNX = hass.data[DOMAIN].xknx + knx_module: KNXModule = hass.data[DOMAIN] config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.TEXT] - async_add_entities(KNXText(xknx, entity_config) for entity_config in config) + async_add_entities(KNXText(knx_module, entity_config) for entity_config in config) def _create_notification(xknx: XKNX, config: ConfigType) -> XknxNotification: @@ -62,9 +63,12 @@ class KNXText(KnxEntity, TextEntity, RestoreEntity): _device: XknxNotification _attr_native_max = 14 - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize a KNX text.""" - super().__init__(_create_notification(xknx, config)) + super().__init__( + knx_module=knx_module, + device=_create_notification(knx_module.xknx, config), + ) self._attr_mode = config[CONF_MODE] self._attr_pattern = ( r"[\u0000-\u00ff]*" # Latin-1 diff --git a/homeassistant/components/knx/time.py b/homeassistant/components/knx/time.py index 28e1419233c..5d9225a1e41 100644 --- a/homeassistant/components/knx/time.py +++ b/homeassistant/components/knx/time.py @@ -3,7 +3,6 @@ from __future__ import annotations from datetime import time as dt_time -from typing import Final from xknx import XKNX from xknx.devices import TimeDevice as XknxTimeDevice @@ -23,6 +22,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType +from . import KNXModule from .const import ( CONF_RESPOND_TO_READ, CONF_STATE_ADDRESS, @@ -33,8 +33,6 @@ from .const import ( ) from .knx_entity import KnxEntity -_TIME_TRANSLATION_FORMAT: Final = "%H:%M:%S" - async def async_setup_entry( hass: HomeAssistant, @@ -42,10 +40,12 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up entities for KNX platform.""" - xknx: XKNX = hass.data[DOMAIN].xknx + knx_module: KNXModule = hass.data[DOMAIN] config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.TIME] - async_add_entities(KNXTimeEntity(xknx, entity_config) for entity_config in config) + async_add_entities( + KNXTimeEntity(knx_module, entity_config) for entity_config in config + ) def _create_xknx_device(xknx: XKNX, config: ConfigType) -> XknxTimeDevice: @@ -66,9 +66,12 @@ class KNXTimeEntity(KnxEntity, TimeEntity, RestoreEntity): _device: XknxTimeDevice - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize a KNX time.""" - super().__init__(_create_xknx_device(xknx, config)) + super().__init__( + knx_module=knx_module, + device=_create_xknx_device(knx_module.xknx, config), + ) self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY) self._attr_unique_id = str(self._device.remote_value.group_address) diff --git a/homeassistant/components/knx/weather.py b/homeassistant/components/knx/weather.py index 584c9fd3323..11dae452e2f 100644 --- a/homeassistant/components/knx/weather.py +++ b/homeassistant/components/knx/weather.py @@ -19,6 +19,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType +from . import KNXModule from .const import DATA_KNX_CONFIG, DOMAIN from .knx_entity import KnxEntity from .schema import WeatherSchema @@ -30,10 +31,12 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up switch(es) for KNX platform.""" - xknx: XKNX = hass.data[DOMAIN].xknx + knx_module: KNXModule = hass.data[DOMAIN] config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.WEATHER] - async_add_entities(KNXWeather(xknx, entity_config) for entity_config in config) + async_add_entities( + KNXWeather(knx_module, entity_config) for entity_config in config + ) def _create_weather(xknx: XKNX, config: ConfigType) -> XknxWeather: @@ -80,9 +83,12 @@ class KNXWeather(KnxEntity, WeatherEntity): _attr_native_temperature_unit = UnitOfTemperature.CELSIUS _attr_native_wind_speed_unit = UnitOfSpeed.METERS_PER_SECOND - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize of a KNX sensor.""" - super().__init__(_create_weather(xknx, config)) + super().__init__( + knx_module=knx_module, + device=_create_weather(knx_module.xknx, config), + ) self._attr_unique_id = str(self._device._temperature.group_address_state) # noqa: SLF001 self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY) From b610b29d28250777a2a58d33bf37dd4b87fe6818 Mon Sep 17 00:00:00 2001 From: Philip Vanloo <26272906+dukeofphilberg@users.noreply.github.com> Date: Fri, 2 Aug 2024 13:23:45 +0200 Subject: [PATCH 0287/1635] LinkPlay: Bump python-linkplay to 0.0.6 (#123062) Bump python-linkplay to 0.0.6 --- homeassistant/components/linkplay/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/linkplay/manifest.json b/homeassistant/components/linkplay/manifest.json index 0345d4ad727..9ac2a9e66e6 100644 --- a/homeassistant/components/linkplay/manifest.json +++ b/homeassistant/components/linkplay/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/linkplay", "integration_type": "hub", "iot_class": "local_polling", - "requirements": ["python-linkplay==0.0.5"], + "requirements": ["python-linkplay==0.0.6"], "zeroconf": ["_linkplay._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index 602c03c879d..c27e5831cf5 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2301,7 +2301,7 @@ python-juicenet==1.1.0 python-kasa[speedups]==0.7.1 # homeassistant.components.linkplay -python-linkplay==0.0.5 +python-linkplay==0.0.6 # homeassistant.components.lirc # python-lirc==1.2.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 623f0953721..bce4d3a8487 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1819,7 +1819,7 @@ python-juicenet==1.1.0 python-kasa[speedups]==0.7.1 # homeassistant.components.linkplay -python-linkplay==0.0.5 +python-linkplay==0.0.6 # homeassistant.components.matter python-matter-server==6.3.0 From 3de88283584b352f5d984bc660e624454fce155b Mon Sep 17 00:00:00 2001 From: Philip Vanloo <26272906+dukeofphilberg@users.noreply.github.com> Date: Fri, 2 Aug 2024 13:38:05 +0200 Subject: [PATCH 0288/1635] Add additional items to REPEAT_MAP in LinkPlay (#123063) * Upgrade python-linkplay, add items to REPEAT_MAP * Undo dependency bump --- homeassistant/components/linkplay/media_player.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/homeassistant/components/linkplay/media_player.py b/homeassistant/components/linkplay/media_player.py index 103b09f46da..398add235bd 100644 --- a/homeassistant/components/linkplay/media_player.py +++ b/homeassistant/components/linkplay/media_player.py @@ -58,6 +58,8 @@ REPEAT_MAP: dict[LoopMode, RepeatMode] = { LoopMode.CONTINUOUS_PLAYBACK: RepeatMode.ALL, LoopMode.RANDOM_PLAYBACK: RepeatMode.ALL, LoopMode.LIST_CYCLE: RepeatMode.ALL, + LoopMode.SHUFF_DISABLED_REPEAT_DISABLED: RepeatMode.OFF, + LoopMode.SHUFF_ENABLED_REPEAT_ENABLED_LOOP_ONCE: RepeatMode.ALL, } REPEAT_MAP_INV: dict[RepeatMode, LoopMode] = {v: k for k, v in REPEAT_MAP.items()} From d2dd5ba0e6206c9d0bc9ad293235f44d95ab5071 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 2 Aug 2024 13:38:56 +0200 Subject: [PATCH 0289/1635] Do not raise repair issue about missing integration in safe mode (#123066) --- homeassistant/setup.py | 27 ++++++++++++++------------- tests/test_setup.py | 11 ++++++++++- 2 files changed, 24 insertions(+), 14 deletions(-) diff --git a/homeassistant/setup.py b/homeassistant/setup.py index 12dd17b289c..102c48e1d07 100644 --- a/homeassistant/setup.py +++ b/homeassistant/setup.py @@ -281,19 +281,20 @@ async def _async_setup_component( integration = await loader.async_get_integration(hass, domain) except loader.IntegrationNotFound: _log_error_setup_error(hass, domain, None, "Integration not found.") - ir.async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - f"integration_not_found.{domain}", - is_fixable=True, - issue_domain=HOMEASSISTANT_DOMAIN, - severity=IssueSeverity.ERROR, - translation_key="integration_not_found", - translation_placeholders={ - "domain": domain, - }, - data={"domain": domain}, - ) + if not hass.config.safe_mode: + ir.async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + f"integration_not_found.{domain}", + is_fixable=True, + issue_domain=HOMEASSISTANT_DOMAIN, + severity=IssueSeverity.ERROR, + translation_key="integration_not_found", + translation_placeholders={ + "domain": domain, + }, + data={"domain": domain}, + ) return False log_error = partial(_log_error_setup_error, hass, domain, integration) diff --git a/tests/test_setup.py b/tests/test_setup.py index fae2b46f18d..4b7df9563ba 100644 --- a/tests/test_setup.py +++ b/tests/test_setup.py @@ -245,7 +245,7 @@ async def test_validate_platform_config_4(hass: HomeAssistant) -> None: async def test_component_not_found( hass: HomeAssistant, issue_registry: IssueRegistry ) -> None: - """setup_component should not crash if component doesn't exist.""" + """setup_component should raise a repair issue if component doesn't exist.""" assert await setup.async_setup_component(hass, "non_existing", {}) is False assert len(issue_registry.issues) == 1 issue = issue_registry.async_get_issue( @@ -255,6 +255,15 @@ async def test_component_not_found( assert issue.translation_key == "integration_not_found" +async def test_component_missing_not_raising_in_safe_mode( + hass: HomeAssistant, issue_registry: IssueRegistry +) -> None: + """setup_component should not raise an issue if component doesn't exist in safe.""" + hass.config.safe_mode = True + assert await setup.async_setup_component(hass, "non_existing", {}) is False + assert len(issue_registry.issues) == 0 + + async def test_component_not_double_initialized(hass: HomeAssistant) -> None: """Test we do not set up a component twice.""" mock_setup = Mock(return_value=True) From a40dce449f85f11aafd8ae104b0d726ca12af7ff Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?H=2E=20=C3=81rkosi=20R=C3=B3bert?= Date: Fri, 2 Aug 2024 14:25:43 +0200 Subject: [PATCH 0290/1635] Add LinkPlay models (#123056) * Add some LinkPlay models * Update utils.py * Update utils.py * Update utils.py * Update homeassistant/components/linkplay/utils.py * Update homeassistant/components/linkplay/utils.py * Update utils.py --------- Co-authored-by: Joost Lekkerkerker --- homeassistant/components/linkplay/utils.py | 26 ++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/homeassistant/components/linkplay/utils.py b/homeassistant/components/linkplay/utils.py index 9ca76b3933d..7532c9b354a 100644 --- a/homeassistant/components/linkplay/utils.py +++ b/homeassistant/components/linkplay/utils.py @@ -3,9 +3,19 @@ from typing import Final MANUFACTURER_ARTSOUND: Final[str] = "ArtSound" +MANUFACTURER_ARYLIC: Final[str] = "Arylic" +MANUFACTURER_IEAST: Final[str] = "iEAST" MANUFACTURER_GENERIC: Final[str] = "Generic" MODELS_ARTSOUND_SMART_ZONE4: Final[str] = "Smart Zone 4 AMP" MODELS_ARTSOUND_SMART_HYDE: Final[str] = "Smart Hyde" +MODELS_ARYLIC_S50: Final[str] = "S50+" +MODELS_ARYLIC_S50_PRO: Final[str] = "S50 Pro" +MODELS_ARYLIC_A30: Final[str] = "A30" +MODELS_ARYLIC_A50S: Final[str] = "A50+" +MODELS_ARYLIC_UP2STREAM_AMP_V3: Final[str] = "Up2Stream Amp v3" +MODELS_ARYLIC_UP2STREAM_AMP_V4: Final[str] = "Up2Stream Amp v4" +MODELS_ARYLIC_UP2STREAM_PRO_V3: Final[str] = "Up2Stream Pro v3" +MODELS_IEAST_AUDIOCAST_M5: Final[str] = "AudioCast M5" MODELS_GENERIC: Final[str] = "Generic" @@ -16,5 +26,21 @@ def get_info_from_project(project: str) -> tuple[str, str]: return MANUFACTURER_ARTSOUND, MODELS_ARTSOUND_SMART_ZONE4 case "SMART_HYDE": return MANUFACTURER_ARTSOUND, MODELS_ARTSOUND_SMART_HYDE + case "ARYLIC_S50": + return MANUFACTURER_ARYLIC, MODELS_ARYLIC_S50 + case "RP0016_S50PRO_S": + return MANUFACTURER_ARYLIC, MODELS_ARYLIC_S50_PRO + case "RP0011_WB60_S": + return MANUFACTURER_ARYLIC, MODELS_ARYLIC_A30 + case "ARYLIC_A50S": + return MANUFACTURER_ARYLIC, MODELS_ARYLIC_A50S + case "UP2STREAM_AMP_V3": + return MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_AMP_V3 + case "UP2STREAM_AMP_V4": + return MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_AMP_V4 + case "UP2STREAM_PRO_V3": + return MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_PRO_V3 + case "iEAST-02": + return MANUFACTURER_IEAST, MODELS_IEAST_AUDIOCAST_M5 case _: return MANUFACTURER_GENERIC, MODELS_GENERIC From fb76e70c3fdf061cff0839e3231c4d986b0dba59 Mon Sep 17 00:00:00 2001 From: karwosts <32912880+karwosts@users.noreply.github.com> Date: Fri, 2 Aug 2024 06:10:04 -0700 Subject: [PATCH 0291/1635] Use text/multiple selector for input_select.set_options (#122539) --- homeassistant/components/input_select/services.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/input_select/services.yaml b/homeassistant/components/input_select/services.yaml index 92279e58a54..04a09e5366a 100644 --- a/homeassistant/components/input_select/services.yaml +++ b/homeassistant/components/input_select/services.yaml @@ -48,6 +48,7 @@ set_options: required: true example: '["Item A", "Item B", "Item C"]' selector: - object: + text: + multiple: true reload: From db238a75e3079f23a35d7c0a104f8cf93327ac3c Mon Sep 17 00:00:00 2001 From: Ryan Mattson Date: Fri, 2 Aug 2024 08:13:56 -0500 Subject: [PATCH 0292/1635] Lyric: Properly tie room accessories to the data coordinator (#115902) * properly tie lyric accessories to the data coordinator so sensors recieve updates * only check for accessories for LCC devices * revert: meant to give it its own branch and PR --- homeassistant/components/lyric/__init__.py | 22 ++++++++++++++++++---- homeassistant/components/lyric/sensor.py | 3 +-- 2 files changed, 19 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/lyric/__init__.py b/homeassistant/components/lyric/__init__.py index 7c002229741..e1eaed6602c 100644 --- a/homeassistant/components/lyric/__init__.py +++ b/homeassistant/components/lyric/__init__.py @@ -192,8 +192,8 @@ class LyricAccessoryEntity(LyricDeviceEntity): ) -> None: """Initialize the Honeywell Lyric accessory entity.""" super().__init__(coordinator, location, device, key) - self._room = room - self._accessory = accessory + self._room_id = room.id + self._accessory_id = accessory.id @property def device_info(self) -> DeviceInfo: @@ -202,11 +202,25 @@ class LyricAccessoryEntity(LyricDeviceEntity): identifiers={ ( f"{dr.CONNECTION_NETWORK_MAC}_room_accessory", - f"{self._mac_id}_room{self._room.id}_accessory{self._accessory.id}", + f"{self._mac_id}_room{self._room_id}_accessory{self._accessory_id}", ) }, manufacturer="Honeywell", model="RCHTSENSOR", - name=f"{self._room.roomName} Sensor", + name=f"{self.room.roomName} Sensor", via_device=(dr.CONNECTION_NETWORK_MAC, self._mac_id), ) + + @property + def room(self) -> LyricRoom: + """Get the Lyric Device.""" + return self.coordinator.data.rooms_dict[self._mac_id][self._room_id] + + @property + def accessory(self) -> LyricAccessories: + """Get the Lyric Device.""" + return next( + accessory + for accessory in self.room.accessories + if accessory.id == self._accessory_id + ) diff --git a/homeassistant/components/lyric/sensor.py b/homeassistant/components/lyric/sensor.py index 64f60fa6611..9f05354c399 100644 --- a/homeassistant/components/lyric/sensor.py +++ b/homeassistant/components/lyric/sensor.py @@ -244,7 +244,6 @@ class LyricAccessorySensor(LyricAccessoryEntity, SensorEntity): accessory, f"{parentDevice.macID}_room{room.id}_acc{accessory.id}_{description.key}", ) - self.room = room self.entity_description = description if description.device_class == SensorDeviceClass.TEMPERATURE: if parentDevice.units == "Fahrenheit": @@ -255,4 +254,4 @@ class LyricAccessorySensor(LyricAccessoryEntity, SensorEntity): @property def native_value(self) -> StateType | datetime: """Return the state.""" - return self.entity_description.value_fn(self._room, self._accessory) + return self.entity_description.value_fn(self.room, self.accessory) From b6c9fe86e1817f1bff307614b599a1d85f074c9f Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Fri, 2 Aug 2024 15:27:11 +0200 Subject: [PATCH 0293/1635] Ensure claude supported feature reflect latest config entry options (#123050) --- .../components/anthropic/conversation.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/homeassistant/components/anthropic/conversation.py b/homeassistant/components/anthropic/conversation.py index 92a09ad8a10..3d876bf3325 100644 --- a/homeassistant/components/anthropic/conversation.py +++ b/homeassistant/components/anthropic/conversation.py @@ -21,6 +21,7 @@ from voluptuous_openapi import convert from homeassistant.components import conversation from homeassistant.components.conversation import trace +from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, TemplateError @@ -117,6 +118,13 @@ class AnthropicConversationEntity( """Return a list of supported languages.""" return MATCH_ALL + async def async_added_to_hass(self) -> None: + """When entity is added to Home Assistant.""" + await super().async_added_to_hass() + self.entry.async_on_unload( + self.entry.add_update_listener(self._async_entry_update_listener) + ) + async def async_process( self, user_input: conversation.ConversationInput ) -> conversation.ConversationResult: @@ -299,3 +307,14 @@ class AnthropicConversationEntity( return conversation.ConversationResult( response=intent_response, conversation_id=conversation_id ) + + async def _async_entry_update_listener( + self, hass: HomeAssistant, entry: ConfigEntry + ) -> None: + """Handle options update.""" + if entry.options.get(CONF_LLM_HASS_API): + self._attr_supported_features = ( + conversation.ConversationEntityFeature.CONTROL + ) + else: + self._attr_supported_features = conversation.ConversationEntityFeature(0) From e734971d3382223bb23ac73833d900114e3f5dac Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 2 Aug 2024 15:28:32 +0200 Subject: [PATCH 0294/1635] Enable collections-named-tuple (PYI024) rule in ruff (#123019) --- homeassistant/components/ads/__init__.py | 2 +- homeassistant/components/asuswrt/bridge.py | 2 +- homeassistant/components/bbox/device_tracker.py | 2 +- homeassistant/components/bt_smarthub/device_tracker.py | 2 +- homeassistant/components/epic_games_store/calendar.py | 2 +- homeassistant/components/fints/sensor.py | 2 +- homeassistant/components/hitron_coda/device_tracker.py | 2 +- homeassistant/components/modbus/modbus.py | 4 ++-- homeassistant/components/modbus/validators.py | 4 ++-- homeassistant/components/ness_alarm/__init__.py | 2 +- homeassistant/components/netio/switch.py | 2 +- homeassistant/components/tellstick/sensor.py | 2 +- homeassistant/scripts/benchmark/__init__.py | 2 +- pyproject.toml | 1 - script/lint_and_test.py | 2 +- tests/components/hardware/test_websocket_api.py | 2 +- tests/components/jewish_calendar/__init__.py | 2 +- tests/components/logbook/test_init.py | 2 +- tests/components/russound_rio/const.py | 2 +- tests/components/tplink/__init__.py | 2 +- 20 files changed, 21 insertions(+), 22 deletions(-) diff --git a/homeassistant/components/ads/__init__.py b/homeassistant/components/ads/__init__.py index 7041a757a42..f5742718b12 100644 --- a/homeassistant/components/ads/__init__.py +++ b/homeassistant/components/ads/__init__.py @@ -136,7 +136,7 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: # Tuple to hold data needed for notification -NotificationItem = namedtuple( +NotificationItem = namedtuple( # noqa: PYI024 "NotificationItem", "hnotify huser name plc_datatype callback" ) diff --git a/homeassistant/components/asuswrt/bridge.py b/homeassistant/components/asuswrt/bridge.py index b193787f500..4e928d63666 100644 --- a/homeassistant/components/asuswrt/bridge.py +++ b/homeassistant/components/asuswrt/bridge.py @@ -52,7 +52,7 @@ SENSORS_TYPE_LOAD_AVG = "sensors_load_avg" SENSORS_TYPE_RATES = "sensors_rates" SENSORS_TYPE_TEMPERATURES = "sensors_temperatures" -WrtDevice = namedtuple("WrtDevice", ["ip", "name", "connected_to"]) +WrtDevice = namedtuple("WrtDevice", ["ip", "name", "connected_to"]) # noqa: PYI024 _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/bbox/device_tracker.py b/homeassistant/components/bbox/device_tracker.py index 6ced2c73c9a..7157c47830c 100644 --- a/homeassistant/components/bbox/device_tracker.py +++ b/homeassistant/components/bbox/device_tracker.py @@ -39,7 +39,7 @@ def get_scanner(hass: HomeAssistant, config: ConfigType) -> BboxDeviceScanner | return scanner if scanner.success_init else None -Device = namedtuple("Device", ["mac", "name", "ip", "last_update"]) +Device = namedtuple("Device", ["mac", "name", "ip", "last_update"]) # noqa: PYI024 class BboxDeviceScanner(DeviceScanner): diff --git a/homeassistant/components/bt_smarthub/device_tracker.py b/homeassistant/components/bt_smarthub/device_tracker.py index 10c8000fb93..4b52f38ff31 100644 --- a/homeassistant/components/bt_smarthub/device_tracker.py +++ b/homeassistant/components/bt_smarthub/device_tracker.py @@ -51,7 +51,7 @@ def _create_device(data): return _Device(ip_address, mac, host, status, name) -_Device = namedtuple("_Device", ["ip_address", "mac", "host", "status", "name"]) +_Device = namedtuple("_Device", ["ip_address", "mac", "host", "status", "name"]) # noqa: PYI024 class BTSmartHubScanner(DeviceScanner): diff --git a/homeassistant/components/epic_games_store/calendar.py b/homeassistant/components/epic_games_store/calendar.py index 75c448e8467..2ebb381341e 100644 --- a/homeassistant/components/epic_games_store/calendar.py +++ b/homeassistant/components/epic_games_store/calendar.py @@ -16,7 +16,7 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN, CalendarType from .coordinator import EGSCalendarUpdateCoordinator -DateRange = namedtuple("DateRange", ["start", "end"]) +DateRange = namedtuple("DateRange", ["start", "end"]) # noqa: PYI024 async def async_setup_entry( diff --git a/homeassistant/components/fints/sensor.py b/homeassistant/components/fints/sensor.py index 2f47fdc09eb..8a92850ad47 100644 --- a/homeassistant/components/fints/sensor.py +++ b/homeassistant/components/fints/sensor.py @@ -28,7 +28,7 @@ SCAN_INTERVAL = timedelta(hours=4) ICON = "mdi:currency-eur" -BankCredentials = namedtuple("BankCredentials", "blz login pin url") +BankCredentials = namedtuple("BankCredentials", "blz login pin url") # noqa: PYI024 CONF_BIN = "bank_identification_number" CONF_ACCOUNTS = "accounts" diff --git a/homeassistant/components/hitron_coda/device_tracker.py b/homeassistant/components/hitron_coda/device_tracker.py index 68d93e9719d..61199e4b2f7 100644 --- a/homeassistant/components/hitron_coda/device_tracker.py +++ b/homeassistant/components/hitron_coda/device_tracker.py @@ -42,7 +42,7 @@ def get_scanner( return scanner if scanner.success_init else None -Device = namedtuple("Device", ["mac", "name"]) +Device = namedtuple("Device", ["mac", "name"]) # noqa: PYI024 class HitronCODADeviceScanner(DeviceScanner): diff --git a/homeassistant/components/modbus/modbus.py b/homeassistant/components/modbus/modbus.py index 82caa772ac4..e70b9de50f0 100644 --- a/homeassistant/components/modbus/modbus.py +++ b/homeassistant/components/modbus/modbus.py @@ -76,8 +76,8 @@ from .validators import check_config _LOGGER = logging.getLogger(__name__) -ConfEntry = namedtuple("ConfEntry", "call_type attr func_name") -RunEntry = namedtuple("RunEntry", "attr func") +ConfEntry = namedtuple("ConfEntry", "call_type attr func_name") # noqa: PYI024 +RunEntry = namedtuple("RunEntry", "attr func") # noqa: PYI024 PB_CALL = [ ConfEntry( CALL_TYPE_COIL, diff --git a/homeassistant/components/modbus/validators.py b/homeassistant/components/modbus/validators.py index 90ef0b5f083..e1120094d01 100644 --- a/homeassistant/components/modbus/validators.py +++ b/homeassistant/components/modbus/validators.py @@ -46,7 +46,7 @@ from .const import ( _LOGGER = logging.getLogger(__name__) -ENTRY = namedtuple( +ENTRY = namedtuple( # noqa: PYI024 "ENTRY", [ "struct_id", @@ -60,7 +60,7 @@ ILLEGAL = "I" OPTIONAL = "O" DEMANDED = "D" -PARM_IS_LEGAL = namedtuple( +PARM_IS_LEGAL = namedtuple( # noqa: PYI024 "PARM_IS_LEGAL", [ "count", diff --git a/homeassistant/components/ness_alarm/__init__.py b/homeassistant/components/ness_alarm/__init__.py index a8202434ce5..730a9aff765 100644 --- a/homeassistant/components/ness_alarm/__init__.py +++ b/homeassistant/components/ness_alarm/__init__.py @@ -44,7 +44,7 @@ DEFAULT_INFER_ARMING_STATE = False SIGNAL_ZONE_CHANGED = "ness_alarm.zone_changed" SIGNAL_ARMING_STATE_CHANGED = "ness_alarm.arming_state_changed" -ZoneChangedData = namedtuple("ZoneChangedData", ["zone_id", "state"]) +ZoneChangedData = namedtuple("ZoneChangedData", ["zone_id", "state"]) # noqa: PYI024 DEFAULT_ZONE_TYPE = BinarySensorDeviceClass.MOTION ZONE_SCHEMA = vol.Schema( diff --git a/homeassistant/components/netio/switch.py b/homeassistant/components/netio/switch.py index f5627f5e56b..54bfef5e1da 100644 --- a/homeassistant/components/netio/switch.py +++ b/homeassistant/components/netio/switch.py @@ -38,7 +38,7 @@ CONF_OUTLETS = "outlets" DEFAULT_PORT = 1234 DEFAULT_USERNAME = "admin" -Device = namedtuple("Device", ["netio", "entities"]) +Device = namedtuple("Device", ["netio", "entities"]) # noqa: PYI024 DEVICES: dict[str, Device] = {} MIN_TIME_BETWEEN_SCANS = timedelta(seconds=10) diff --git a/homeassistant/components/tellstick/sensor.py b/homeassistant/components/tellstick/sensor.py index 2c304f259da..1e27511bd84 100644 --- a/homeassistant/components/tellstick/sensor.py +++ b/homeassistant/components/tellstick/sensor.py @@ -29,7 +29,7 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType _LOGGER = logging.getLogger(__name__) -DatatypeDescription = namedtuple( +DatatypeDescription = namedtuple( # noqa: PYI024 "DatatypeDescription", ["name", "unit", "device_class"] ) diff --git a/homeassistant/scripts/benchmark/__init__.py b/homeassistant/scripts/benchmark/__init__.py index 34bc536502f..d39b1b64861 100644 --- a/homeassistant/scripts/benchmark/__init__.py +++ b/homeassistant/scripts/benchmark/__init__.py @@ -322,7 +322,7 @@ def _create_state_changed_event_from_old_new( attributes_json = json.dumps(attributes, cls=JSONEncoder) if attributes_json == "null": attributes_json = "{}" - row = collections.namedtuple( + row = collections.namedtuple( # noqa: PYI024 "Row", [ "event_type" diff --git a/pyproject.toml b/pyproject.toml index eb4bfc4970d..93f8427ef7f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -821,7 +821,6 @@ ignore = [ "PLE0605", # temporarily disabled - "PYI024", # Use typing.NamedTuple instead of collections.namedtuple "RET503", "RET501", "TRY301" diff --git a/script/lint_and_test.py b/script/lint_and_test.py index e23870364b6..ff3db8aa1ed 100755 --- a/script/lint_and_test.py +++ b/script/lint_and_test.py @@ -20,7 +20,7 @@ except ImportError: RE_ASCII = re.compile(r"\033\[[^m]*m") -Error = namedtuple("Error", ["file", "line", "col", "msg", "skip"]) +Error = namedtuple("Error", ["file", "line", "col", "msg", "skip"]) # noqa: PYI024 PASS = "green" FAIL = "bold_red" diff --git a/tests/components/hardware/test_websocket_api.py b/tests/components/hardware/test_websocket_api.py index e8099069a9c..1379bdba120 100644 --- a/tests/components/hardware/test_websocket_api.py +++ b/tests/components/hardware/test_websocket_api.py @@ -61,7 +61,7 @@ async def test_system_status_subscription( response = await client.receive_json() assert response["success"] - VirtualMem = namedtuple("VirtualMemory", ["available", "percent", "total"]) + VirtualMem = namedtuple("VirtualMemory", ["available", "percent", "total"]) # noqa: PYI024 vmem = VirtualMem(10 * 1024**2, 50, 30 * 1024**2) with ( diff --git a/tests/components/jewish_calendar/__init__.py b/tests/components/jewish_calendar/__init__.py index 60726fc3a3e..440bffc2256 100644 --- a/tests/components/jewish_calendar/__init__.py +++ b/tests/components/jewish_calendar/__init__.py @@ -8,7 +8,7 @@ from freezegun import freeze_time as alter_time # noqa: F401 from homeassistant.components import jewish_calendar import homeassistant.util.dt as dt_util -_LatLng = namedtuple("_LatLng", ["lat", "lng"]) +_LatLng = namedtuple("_LatLng", ["lat", "lng"]) # noqa: PYI024 HDATE_DEFAULT_ALTITUDE = 754 NYC_LATLNG = _LatLng(40.7128, -74.0060) diff --git a/tests/components/logbook/test_init.py b/tests/components/logbook/test_init.py index 3534192a43e..34052cd8024 100644 --- a/tests/components/logbook/test_init.py +++ b/tests/components/logbook/test_init.py @@ -328,7 +328,7 @@ def create_state_changed_event_from_old_new( if new_state is not None: attributes = new_state.get("attributes") attributes_json = json.dumps(attributes, cls=JSONEncoder) - row = collections.namedtuple( + row = collections.namedtuple( # noqa: PYI024 "Row", [ "event_type", diff --git a/tests/components/russound_rio/const.py b/tests/components/russound_rio/const.py index d1f6aa7eead..527f4fe3377 100644 --- a/tests/components/russound_rio/const.py +++ b/tests/components/russound_rio/const.py @@ -12,5 +12,5 @@ MOCK_CONFIG = { "port": PORT, } -_CONTROLLER = namedtuple("Controller", ["mac_address", "controller_type"]) +_CONTROLLER = namedtuple("Controller", ["mac_address", "controller_type"]) # noqa: PYI024 MOCK_CONTROLLERS = {1: _CONTROLLER(mac_address=HARDWARE_MAC, controller_type=MODEL)} diff --git a/tests/components/tplink/__init__.py b/tests/components/tplink/__init__.py index c51a451c847..c63ca9139f1 100644 --- a/tests/components/tplink/__init__.py +++ b/tests/components/tplink/__init__.py @@ -39,7 +39,7 @@ from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, load_json_value_fixture -ColorTempRange = namedtuple("ColorTempRange", ["min", "max"]) +ColorTempRange = namedtuple("ColorTempRange", ["min", "max"]) # noqa: PYI024 MODULE = "homeassistant.components.tplink" MODULE_CONFIG_FLOW = "homeassistant.components.tplink.config_flow" From b609f8e96240c607d037ddc4c62562daf322f6ca Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 2 Aug 2024 15:30:29 +0200 Subject: [PATCH 0295/1635] Fix implicit-return in macos script (#122945) --- homeassistant/scripts/macos/__init__.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/homeassistant/scripts/macos/__init__.py b/homeassistant/scripts/macos/__init__.py index f629492ec39..0bf88da81dc 100644 --- a/homeassistant/scripts/macos/__init__.py +++ b/homeassistant/scripts/macos/__init__.py @@ -44,7 +44,7 @@ def uninstall_osx(): print("Home Assistant has been uninstalled.") -def run(args): +def run(args: list[str]) -> int: """Handle OSX commandline script.""" commands = "install", "uninstall", "restart" if not args or args[0] not in commands: @@ -63,3 +63,5 @@ def run(args): time.sleep(0.5) install_osx() return 0 + + raise ValueError(f"Invalid command {args[0]}") From 1eadb00fce1bcf78a104ad58ad74e7261d680e3c Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 2 Aug 2024 15:31:09 +0200 Subject: [PATCH 0296/1635] Fix implicit-return in google_assistant (#123002) --- homeassistant/components/google_assistant/trait.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/google_assistant/trait.py b/homeassistant/components/google_assistant/trait.py index e54684fbc64..2be20237a84 100644 --- a/homeassistant/components/google_assistant/trait.py +++ b/homeassistant/components/google_assistant/trait.py @@ -878,6 +878,8 @@ class StartStopTrait(_Trait): if domain in COVER_VALVE_DOMAINS: return {} + raise NotImplementedError(f"Unsupported domain {domain}") + def query_attributes(self): """Return StartStop query attributes.""" domain = self.state.domain @@ -898,13 +900,17 @@ class StartStopTrait(_Trait): ) } + raise NotImplementedError(f"Unsupported domain {domain}") + async def execute(self, command, data, params, challenge): """Execute a StartStop command.""" domain = self.state.domain if domain == vacuum.DOMAIN: - return await self._execute_vacuum(command, data, params, challenge) + await self._execute_vacuum(command, data, params, challenge) + return if domain in COVER_VALVE_DOMAINS: - return await self._execute_cover_or_valve(command, data, params, challenge) + await self._execute_cover_or_valve(command, data, params, challenge) + return async def _execute_vacuum(self, command, data, params, challenge): """Execute a StartStop command.""" From 115303faf591a61fd80023846da7a84fdc405d08 Mon Sep 17 00:00:00 2001 From: Christopher Fenner <9592452+CFenner@users.noreply.github.com> Date: Fri, 2 Aug 2024 15:44:19 +0200 Subject: [PATCH 0297/1635] Fix translation key for power exchange sensor in ViCare (#122339) --- homeassistant/components/vicare/strings.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/vicare/strings.json b/homeassistant/components/vicare/strings.json index 7c0088d065f..0452a560cb8 100644 --- a/homeassistant/components/vicare/strings.json +++ b/homeassistant/components/vicare/strings.json @@ -319,8 +319,8 @@ "ess_discharge_total": { "name": "Battery discharge total" }, - "pcc_current_power_exchange": { - "name": "Grid power exchange" + "pcc_transfer_power_exchange": { + "name": "Power exchange with grid" }, "pcc_energy_consumption": { "name": "Energy import from grid" From a4aefe43dc77c4c5e4310d5f2de2a644aac4507e Mon Sep 17 00:00:00 2001 From: Matrix Date: Fri, 2 Aug 2024 21:57:15 +0800 Subject: [PATCH 0298/1635] Yolink device model adaptation (#122824) --- homeassistant/components/yolink/const.py | 4 ++++ homeassistant/components/yolink/sensor.py | 14 +++++++++++++- 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/yolink/const.py b/homeassistant/components/yolink/const.py index 894c85d3f1b..686160d9248 100644 --- a/homeassistant/components/yolink/const.py +++ b/homeassistant/components/yolink/const.py @@ -17,5 +17,9 @@ YOLINK_OFFLINE_TIME = 32400 DEV_MODEL_WATER_METER_YS5007 = "YS5007" DEV_MODEL_MULTI_OUTLET_YS6801 = "YS6801" +DEV_MODEL_TH_SENSOR_YS8004_UC = "YS8004-UC" +DEV_MODEL_TH_SENSOR_YS8004_EC = "YS8004-EC" +DEV_MODEL_TH_SENSOR_YS8014_UC = "YS8014-UC" +DEV_MODEL_TH_SENSOR_YS8014_EC = "YS8014-EC" DEV_MODEL_TH_SENSOR_YS8017_UC = "YS8017-UC" DEV_MODEL_TH_SENSOR_YS8017_EC = "YS8017-EC" diff --git a/homeassistant/components/yolink/sensor.py b/homeassistant/components/yolink/sensor.py index 4426602f133..77bbccb2f6a 100644 --- a/homeassistant/components/yolink/sensor.py +++ b/homeassistant/components/yolink/sensor.py @@ -48,7 +48,15 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import percentage -from .const import DEV_MODEL_TH_SENSOR_YS8017_EC, DEV_MODEL_TH_SENSOR_YS8017_UC, DOMAIN +from .const import ( + DEV_MODEL_TH_SENSOR_YS8004_EC, + DEV_MODEL_TH_SENSOR_YS8004_UC, + DEV_MODEL_TH_SENSOR_YS8014_EC, + DEV_MODEL_TH_SENSOR_YS8014_UC, + DEV_MODEL_TH_SENSOR_YS8017_EC, + DEV_MODEL_TH_SENSOR_YS8017_UC, + DOMAIN, +) from .coordinator import YoLinkCoordinator from .entity import YoLinkEntity @@ -109,6 +117,10 @@ MCU_DEV_TEMPERATURE_SENSOR = [ ] NONE_HUMIDITY_SENSOR_MODELS = [ + DEV_MODEL_TH_SENSOR_YS8004_EC, + DEV_MODEL_TH_SENSOR_YS8004_UC, + DEV_MODEL_TH_SENSOR_YS8014_EC, + DEV_MODEL_TH_SENSOR_YS8014_UC, DEV_MODEL_TH_SENSOR_YS8017_UC, DEV_MODEL_TH_SENSOR_YS8017_EC, ] From b89a859f1484f15dbd53fec738ce1ba8e9fe62a2 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Fri, 2 Aug 2024 15:58:41 +0200 Subject: [PATCH 0299/1635] Fix and improve tedee lock states (#123022) Improve tedee lock states --- homeassistant/components/tedee/lock.py | 13 +++++++++++-- tests/components/tedee/test_lock.py | 22 ++++++++++++++++------ 2 files changed, 27 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/tedee/lock.py b/homeassistant/components/tedee/lock.py index d11c873a94a..8d5fa028e12 100644 --- a/homeassistant/components/tedee/lock.py +++ b/homeassistant/components/tedee/lock.py @@ -55,8 +55,13 @@ class TedeeLockEntity(TedeeEntity, LockEntity): super().__init__(lock, coordinator, "lock") @property - def is_locked(self) -> bool: + def is_locked(self) -> bool | None: """Return true if lock is locked.""" + if self._lock.state in ( + TedeeLockState.HALF_OPEN, + TedeeLockState.UNKNOWN, + ): + return None return self._lock.state == TedeeLockState.LOCKED @property @@ -87,7 +92,11 @@ class TedeeLockEntity(TedeeEntity, LockEntity): @property def available(self) -> bool: """Return True if entity is available.""" - return super().available and self._lock.is_connected + return ( + super().available + and self._lock.is_connected + and self._lock.state != TedeeLockState.UNCALIBRATED + ) async def async_unlock(self, **kwargs: Any) -> None: """Unlock the door.""" diff --git a/tests/components/tedee/test_lock.py b/tests/components/tedee/test_lock.py index ffc4a8c30d6..741bc3156cb 100644 --- a/tests/components/tedee/test_lock.py +++ b/tests/components/tedee/test_lock.py @@ -25,7 +25,7 @@ from homeassistant.components.lock import ( STATE_UNLOCKING, ) from homeassistant.components.webhook import async_generate_url -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -276,10 +276,21 @@ async def test_new_lock( assert state +@pytest.mark.parametrize( + ("lib_state", "expected_state"), + [ + (TedeeLockState.LOCKED, STATE_LOCKED), + (TedeeLockState.HALF_OPEN, STATE_UNKNOWN), + (TedeeLockState.UNKNOWN, STATE_UNKNOWN), + (TedeeLockState.UNCALIBRATED, STATE_UNAVAILABLE), + ], +) async def test_webhook_update( hass: HomeAssistant, mock_tedee: MagicMock, hass_client_no_auth: ClientSessionGenerator, + lib_state: TedeeLockState, + expected_state: str, ) -> None: """Test updated data set through webhook.""" @@ -287,10 +298,9 @@ async def test_webhook_update( assert state assert state.state == STATE_UNLOCKED - webhook_data = {"dummystate": 6} - mock_tedee.locks_dict[ - 12345 - ].state = TedeeLockState.LOCKED # is updated in the lib, so mock and assert in L296 + webhook_data = {"dummystate": lib_state.value} + # is updated in the lib, so mock and assert below + mock_tedee.locks_dict[12345].state = lib_state client = await hass_client_no_auth() webhook_url = async_generate_url(hass, WEBHOOK_ID) @@ -302,4 +312,4 @@ async def test_webhook_update( state = hass.states.get("lock.lock_1a2b") assert state - assert state.state == STATE_LOCKED + assert state.state == expected_state From a18166e3f879ff358b366424edc0a0573e0c0c24 Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Fri, 2 Aug 2024 16:48:37 +0200 Subject: [PATCH 0300/1635] Update frontend to 20240802.0 (#123072) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 60cfa0a26ff..95afe1221ec 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240731.0"] + "requirements": ["home-assistant-frontend==20240802.0"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 58f39907269..1cc6a0fa85d 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -31,7 +31,7 @@ habluetooth==3.1.3 hass-nabucasa==0.81.1 hassil==1.7.4 home-assistant-bluetooth==1.12.2 -home-assistant-frontend==20240731.0 +home-assistant-frontend==20240802.0 home-assistant-intents==2024.7.29 httpx==0.27.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index c27e5831cf5..2805ad2c716 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1096,7 +1096,7 @@ hole==0.8.0 holidays==0.53 # homeassistant.components.frontend -home-assistant-frontend==20240731.0 +home-assistant-frontend==20240802.0 # homeassistant.components.conversation home-assistant-intents==2024.7.29 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index bce4d3a8487..c7934e7fc84 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -919,7 +919,7 @@ hole==0.8.0 holidays==0.53 # homeassistant.components.frontend -home-assistant-frontend==20240731.0 +home-assistant-frontend==20240802.0 # homeassistant.components.conversation home-assistant-intents==2024.7.29 From b0ece4bbaa16efee2f75a3898f9be6bc0be7790a Mon Sep 17 00:00:00 2001 From: Markus Jacobsen Date: Fri, 2 Aug 2024 17:07:23 +0200 Subject: [PATCH 0301/1635] Improve Bang olufsen media_player dispatcher formatting (#123065) * Avoid repeating almost the same command 8 times * Remove debugging --- .../components/bang_olufsen/media_player.py | 75 +++++-------------- 1 file changed, 18 insertions(+), 57 deletions(-) diff --git a/homeassistant/components/bang_olufsen/media_player.py b/homeassistant/components/bang_olufsen/media_player.py index 5f8b7638125..8bc97858d0d 100644 --- a/homeassistant/components/bang_olufsen/media_player.py +++ b/homeassistant/components/bang_olufsen/media_player.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Callable import json import logging from typing import Any, cast @@ -137,65 +138,25 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): """Turn on the dispatchers.""" await self._initialize() - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{self._unique_id}_{CONNECTION_STATUS}", - self._async_update_connection_state, - ) - ) + signal_handlers: dict[str, Callable] = { + CONNECTION_STATUS: self._async_update_connection_state, + WebsocketNotification.PLAYBACK_ERROR: self._async_update_playback_error, + WebsocketNotification.PLAYBACK_METADATA: self._async_update_playback_metadata, + WebsocketNotification.PLAYBACK_PROGRESS: self._async_update_playback_progress, + WebsocketNotification.PLAYBACK_STATE: self._async_update_playback_state, + WebsocketNotification.REMOTE_MENU_CHANGED: self._async_update_sources, + WebsocketNotification.SOURCE_CHANGE: self._async_update_source_change, + WebsocketNotification.VOLUME: self._async_update_volume, + } - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{self._unique_id}_{WebsocketNotification.PLAYBACK_ERROR}", - self._async_update_playback_error, + for signal, signal_handler in signal_handlers.items(): + self.async_on_remove( + async_dispatcher_connect( + self.hass, + f"{self._unique_id}_{signal}", + signal_handler, + ) ) - ) - - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{self._unique_id}_{WebsocketNotification.PLAYBACK_METADATA}", - self._async_update_playback_metadata, - ) - ) - - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{self._unique_id}_{WebsocketNotification.PLAYBACK_PROGRESS}", - self._async_update_playback_progress, - ) - ) - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{self._unique_id}_{WebsocketNotification.PLAYBACK_STATE}", - self._async_update_playback_state, - ) - ) - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{self._unique_id}_{WebsocketNotification.REMOTE_MENU_CHANGED}", - self._async_update_sources, - ) - ) - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{self._unique_id}_{WebsocketNotification.SOURCE_CHANGE}", - self._async_update_source_change, - ) - ) - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{self._unique_id}_{WebsocketNotification.VOLUME}", - self._async_update_volume, - ) - ) async def _initialize(self) -> None: """Initialize connection dependent variables.""" From 2520fcd284e94f0eebee1165ef393966d0e9ede1 Mon Sep 17 00:00:00 2001 From: Ryan Mattson Date: Fri, 2 Aug 2024 08:13:56 -0500 Subject: [PATCH 0302/1635] Lyric: Properly tie room accessories to the data coordinator (#115902) * properly tie lyric accessories to the data coordinator so sensors recieve updates * only check for accessories for LCC devices * revert: meant to give it its own branch and PR --- homeassistant/components/lyric/__init__.py | 22 ++++++++++++++++++---- homeassistant/components/lyric/sensor.py | 3 +-- 2 files changed, 19 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/lyric/__init__.py b/homeassistant/components/lyric/__init__.py index 7c002229741..e1eaed6602c 100644 --- a/homeassistant/components/lyric/__init__.py +++ b/homeassistant/components/lyric/__init__.py @@ -192,8 +192,8 @@ class LyricAccessoryEntity(LyricDeviceEntity): ) -> None: """Initialize the Honeywell Lyric accessory entity.""" super().__init__(coordinator, location, device, key) - self._room = room - self._accessory = accessory + self._room_id = room.id + self._accessory_id = accessory.id @property def device_info(self) -> DeviceInfo: @@ -202,11 +202,25 @@ class LyricAccessoryEntity(LyricDeviceEntity): identifiers={ ( f"{dr.CONNECTION_NETWORK_MAC}_room_accessory", - f"{self._mac_id}_room{self._room.id}_accessory{self._accessory.id}", + f"{self._mac_id}_room{self._room_id}_accessory{self._accessory_id}", ) }, manufacturer="Honeywell", model="RCHTSENSOR", - name=f"{self._room.roomName} Sensor", + name=f"{self.room.roomName} Sensor", via_device=(dr.CONNECTION_NETWORK_MAC, self._mac_id), ) + + @property + def room(self) -> LyricRoom: + """Get the Lyric Device.""" + return self.coordinator.data.rooms_dict[self._mac_id][self._room_id] + + @property + def accessory(self) -> LyricAccessories: + """Get the Lyric Device.""" + return next( + accessory + for accessory in self.room.accessories + if accessory.id == self._accessory_id + ) diff --git a/homeassistant/components/lyric/sensor.py b/homeassistant/components/lyric/sensor.py index 64f60fa6611..9f05354c399 100644 --- a/homeassistant/components/lyric/sensor.py +++ b/homeassistant/components/lyric/sensor.py @@ -244,7 +244,6 @@ class LyricAccessorySensor(LyricAccessoryEntity, SensorEntity): accessory, f"{parentDevice.macID}_room{room.id}_acc{accessory.id}_{description.key}", ) - self.room = room self.entity_description = description if description.device_class == SensorDeviceClass.TEMPERATURE: if parentDevice.units == "Fahrenheit": @@ -255,4 +254,4 @@ class LyricAccessorySensor(LyricAccessoryEntity, SensorEntity): @property def native_value(self) -> StateType | datetime: """Return the state.""" - return self.entity_description.value_fn(self._room, self._accessory) + return self.entity_description.value_fn(self.room, self.accessory) From 1b1d86409cc4605485b129c930b07d6675a3e00c Mon Sep 17 00:00:00 2001 From: DeerMaximum <43999966+DeerMaximum@users.noreply.github.com> Date: Thu, 1 Aug 2024 14:32:37 +0200 Subject: [PATCH 0303/1635] Velux use node id as fallback for unique id (#117508) Co-authored-by: Robert Resch --- homeassistant/components/velux/__init__.py | 8 ++++++-- homeassistant/components/velux/cover.py | 6 +++--- homeassistant/components/velux/light.py | 2 +- 3 files changed, 10 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/velux/__init__.py b/homeassistant/components/velux/__init__.py index 4b89fc66a84..1b7cbd1ff93 100644 --- a/homeassistant/components/velux/__init__.py +++ b/homeassistant/components/velux/__init__.py @@ -108,10 +108,14 @@ class VeluxEntity(Entity): _attr_should_poll = False - def __init__(self, node: Node) -> None: + def __init__(self, node: Node, config_entry_id: str) -> None: """Initialize the Velux device.""" self.node = node - self._attr_unique_id = node.serial_number + self._attr_unique_id = ( + node.serial_number + if node.serial_number + else f"{config_entry_id}_{node.node_id}" + ) self._attr_name = node.name if node.name else f"#{node.node_id}" @callback diff --git a/homeassistant/components/velux/cover.py b/homeassistant/components/velux/cover.py index c8688e4d186..cd7564eee81 100644 --- a/homeassistant/components/velux/cover.py +++ b/homeassistant/components/velux/cover.py @@ -29,7 +29,7 @@ async def async_setup_entry( """Set up cover(s) for Velux platform.""" module = hass.data[DOMAIN][config.entry_id] async_add_entities( - VeluxCover(node) + VeluxCover(node, config.entry_id) for node in module.pyvlx.nodes if isinstance(node, OpeningDevice) ) @@ -41,9 +41,9 @@ class VeluxCover(VeluxEntity, CoverEntity): _is_blind = False node: OpeningDevice - def __init__(self, node: OpeningDevice) -> None: + def __init__(self, node: OpeningDevice, config_entry_id: str) -> None: """Initialize VeluxCover.""" - super().__init__(node) + super().__init__(node, config_entry_id) self._attr_device_class = CoverDeviceClass.WINDOW if isinstance(node, Awning): self._attr_device_class = CoverDeviceClass.AWNING diff --git a/homeassistant/components/velux/light.py b/homeassistant/components/velux/light.py index bbe9822648e..e98632701f3 100644 --- a/homeassistant/components/velux/light.py +++ b/homeassistant/components/velux/light.py @@ -23,7 +23,7 @@ async def async_setup_entry( module = hass.data[DOMAIN][config.entry_id] async_add_entities( - VeluxLight(node) + VeluxLight(node, config.entry_id) for node in module.pyvlx.nodes if isinstance(node, LighteningDevice) ) From 804d7aa4c040b3d4ca0f97811d380128a90daafa Mon Sep 17 00:00:00 2001 From: Christopher Fenner <9592452+CFenner@users.noreply.github.com> Date: Fri, 2 Aug 2024 15:44:19 +0200 Subject: [PATCH 0304/1635] Fix translation key for power exchange sensor in ViCare (#122339) --- homeassistant/components/vicare/strings.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/vicare/strings.json b/homeassistant/components/vicare/strings.json index 7c0088d065f..0452a560cb8 100644 --- a/homeassistant/components/vicare/strings.json +++ b/homeassistant/components/vicare/strings.json @@ -319,8 +319,8 @@ "ess_discharge_total": { "name": "Battery discharge total" }, - "pcc_current_power_exchange": { - "name": "Grid power exchange" + "pcc_transfer_power_exchange": { + "name": "Power exchange with grid" }, "pcc_energy_consumption": { "name": "Energy import from grid" From 1a7085b068a61f44802d129c2fc6573c0f2cacd6 Mon Sep 17 00:00:00 2001 From: Denis Shulyaka Date: Fri, 2 Aug 2024 09:05:06 +0300 Subject: [PATCH 0305/1635] Add aliases to script llm tool description (#122380) * Add aliases to script llm tool description * Also add name --- homeassistant/helpers/llm.py | 13 +++++++++++++ tests/helpers/test_llm.py | 36 +++++++++++++++++++++++++++++------- 2 files changed, 42 insertions(+), 7 deletions(-) diff --git a/homeassistant/helpers/llm.py b/homeassistant/helpers/llm.py index 4ddb00166b6..e37aa0c532d 100644 --- a/homeassistant/helpers/llm.py +++ b/homeassistant/helpers/llm.py @@ -677,6 +677,19 @@ class ScriptTool(Tool): self.parameters = vol.Schema(schema) + aliases: list[str] = [] + if entity_entry.name: + aliases.append(entity_entry.name) + if entity_entry.aliases: + aliases.extend(entity_entry.aliases) + if aliases: + if self.description: + self.description = ( + self.description + ". Aliases: " + str(list(aliases)) + ) + else: + self.description = "Aliases: " + str(list(aliases)) + parameters_cache[entity_entry.unique_id] = ( self.description, self.parameters, diff --git a/tests/helpers/test_llm.py b/tests/helpers/test_llm.py index ea6e628d1d4..4d14abb9819 100644 --- a/tests/helpers/test_llm.py +++ b/tests/helpers/test_llm.py @@ -411,7 +411,9 @@ async def test_assist_api_prompt( ) hass.states.async_set(entry2.entity_id, "on", {"friendly_name": "Living Room"}) - def create_entity(device: dr.DeviceEntry, write_state=True) -> None: + def create_entity( + device: dr.DeviceEntry, write_state=True, aliases: set[str] | None = None + ) -> None: """Create an entity for a device and track entity_id.""" entity = entity_registry.async_get_or_create( "light", @@ -421,6 +423,8 @@ async def test_assist_api_prompt( original_name=str(device.name or "Unnamed Device"), suggested_object_id=str(device.name or "unnamed_device"), ) + if aliases: + entity_registry.async_update_entity(entity.entity_id, aliases=aliases) if write_state: entity.write_unavailable_state(hass) @@ -432,7 +436,8 @@ async def test_assist_api_prompt( manufacturer="Test Manufacturer", model="Test Model", suggested_area="Test Area", - ) + ), + aliases={"my test light"}, ) for i in range(3): create_entity( @@ -516,7 +521,7 @@ async def test_assist_api_prompt( domain: light state: 'on' areas: Test Area, Alternative name -- names: Test Device +- names: Test Device, my test light domain: light state: unavailable areas: Test Area, Alternative name @@ -616,6 +621,7 @@ async def test_assist_api_prompt( async def test_script_tool( hass: HomeAssistant, + entity_registry: er.EntityRegistry, area_registry: ar.AreaRegistry, floor_registry: fr.FloorRegistry, ) -> None: @@ -659,6 +665,10 @@ async def test_script_tool( ) async_expose_entity(hass, "conversation", "script.test_script", True) + entity_registry.async_update_entity( + "script.test_script", name="script name", aliases={"script alias"} + ) + area = area_registry.async_create("Living room") floor = floor_registry.async_create("2") @@ -671,7 +681,10 @@ async def test_script_tool( tool = tools[0] assert tool.name == "test_script" - assert tool.description == "This is a test script" + assert ( + tool.description + == "This is a test script. Aliases: ['script name', 'script alias']" + ) schema = { vol.Required("beer", description="Number of beers"): cv.string, vol.Optional("wine"): selector.NumberSelector({"min": 0, "max": 3}), @@ -684,7 +697,10 @@ async def test_script_tool( assert tool.parameters.schema == schema assert hass.data[llm.SCRIPT_PARAMETERS_CACHE] == { - "test_script": ("This is a test script", vol.Schema(schema)) + "test_script": ( + "This is a test script. Aliases: ['script name', 'script alias']", + vol.Schema(schema), + ) } tool_input = llm.ToolInput( @@ -754,12 +770,18 @@ async def test_script_tool( tool = tools[0] assert tool.name == "test_script" - assert tool.description == "This is a new test script" + assert ( + tool.description + == "This is a new test script. Aliases: ['script name', 'script alias']" + ) schema = {vol.Required("beer", description="Number of beers"): cv.string} assert tool.parameters.schema == schema assert hass.data[llm.SCRIPT_PARAMETERS_CACHE] == { - "test_script": ("This is a new test script", vol.Schema(schema)) + "test_script": ( + "This is a new test script. Aliases: ['script name', 'script alias']", + vol.Schema(schema), + ) } From 6a6814af61216df481410f57ecf9d971037eccff Mon Sep 17 00:00:00 2001 From: karwosts <32912880+karwosts@users.noreply.github.com> Date: Fri, 2 Aug 2024 06:10:04 -0700 Subject: [PATCH 0306/1635] Use text/multiple selector for input_select.set_options (#122539) --- homeassistant/components/input_select/services.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/input_select/services.yaml b/homeassistant/components/input_select/services.yaml index 92279e58a54..04a09e5366a 100644 --- a/homeassistant/components/input_select/services.yaml +++ b/homeassistant/components/input_select/services.yaml @@ -48,6 +48,7 @@ set_options: required: true example: '["Item A", "Item B", "Item C"]' selector: - object: + text: + multiple: true reload: From dfb4e9c159e57f9731d7fd3745676ece4a657de0 Mon Sep 17 00:00:00 2001 From: Matrix Date: Fri, 2 Aug 2024 21:57:15 +0800 Subject: [PATCH 0307/1635] Yolink device model adaptation (#122824) --- homeassistant/components/yolink/const.py | 4 ++++ homeassistant/components/yolink/sensor.py | 14 +++++++++++++- 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/yolink/const.py b/homeassistant/components/yolink/const.py index 894c85d3f1b..686160d9248 100644 --- a/homeassistant/components/yolink/const.py +++ b/homeassistant/components/yolink/const.py @@ -17,5 +17,9 @@ YOLINK_OFFLINE_TIME = 32400 DEV_MODEL_WATER_METER_YS5007 = "YS5007" DEV_MODEL_MULTI_OUTLET_YS6801 = "YS6801" +DEV_MODEL_TH_SENSOR_YS8004_UC = "YS8004-UC" +DEV_MODEL_TH_SENSOR_YS8004_EC = "YS8004-EC" +DEV_MODEL_TH_SENSOR_YS8014_UC = "YS8014-UC" +DEV_MODEL_TH_SENSOR_YS8014_EC = "YS8014-EC" DEV_MODEL_TH_SENSOR_YS8017_UC = "YS8017-UC" DEV_MODEL_TH_SENSOR_YS8017_EC = "YS8017-EC" diff --git a/homeassistant/components/yolink/sensor.py b/homeassistant/components/yolink/sensor.py index 4426602f133..77bbccb2f6a 100644 --- a/homeassistant/components/yolink/sensor.py +++ b/homeassistant/components/yolink/sensor.py @@ -48,7 +48,15 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import percentage -from .const import DEV_MODEL_TH_SENSOR_YS8017_EC, DEV_MODEL_TH_SENSOR_YS8017_UC, DOMAIN +from .const import ( + DEV_MODEL_TH_SENSOR_YS8004_EC, + DEV_MODEL_TH_SENSOR_YS8004_UC, + DEV_MODEL_TH_SENSOR_YS8014_EC, + DEV_MODEL_TH_SENSOR_YS8014_UC, + DEV_MODEL_TH_SENSOR_YS8017_EC, + DEV_MODEL_TH_SENSOR_YS8017_UC, + DOMAIN, +) from .coordinator import YoLinkCoordinator from .entity import YoLinkEntity @@ -109,6 +117,10 @@ MCU_DEV_TEMPERATURE_SENSOR = [ ] NONE_HUMIDITY_SENSOR_MODELS = [ + DEV_MODEL_TH_SENSOR_YS8004_EC, + DEV_MODEL_TH_SENSOR_YS8004_UC, + DEV_MODEL_TH_SENSOR_YS8014_EC, + DEV_MODEL_TH_SENSOR_YS8014_UC, DEV_MODEL_TH_SENSOR_YS8017_UC, DEV_MODEL_TH_SENSOR_YS8017_EC, ] From 3b462906d9982999d971c99b7841b19ce88a2964 Mon Sep 17 00:00:00 2001 From: karwosts <32912880+karwosts@users.noreply.github.com> Date: Thu, 1 Aug 2024 03:59:19 -0700 Subject: [PATCH 0308/1635] Restrict nws.get_forecasts_extra selector to nws weather entities (#122986) --- homeassistant/components/nws/services.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/nws/services.yaml b/homeassistant/components/nws/services.yaml index 0d439a9d278..a3d241c775d 100644 --- a/homeassistant/components/nws/services.yaml +++ b/homeassistant/components/nws/services.yaml @@ -2,6 +2,7 @@ get_forecasts_extra: target: entity: domain: weather + integration: nws fields: type: required: true From cb37ae660843142bc8b951c3b91ccd0c5739863f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 1 Aug 2024 01:31:22 -0500 Subject: [PATCH 0309/1635] Update doorbird error notification to be a repair flow (#122987) --- homeassistant/components/doorbird/__init__.py | 37 ++++++----- .../components/doorbird/manifest.json | 2 +- homeassistant/components/doorbird/repairs.py | 55 +++++++++++++++++ .../components/doorbird/strings.json | 13 ++++ tests/components/doorbird/test_repairs.py | 61 +++++++++++++++++++ 5 files changed, 153 insertions(+), 15 deletions(-) create mode 100644 homeassistant/components/doorbird/repairs.py create mode 100644 tests/components/doorbird/test_repairs.py diff --git a/homeassistant/components/doorbird/__init__.py b/homeassistant/components/doorbird/__init__.py index 8989e0ec0be..113b8031d9b 100644 --- a/homeassistant/components/doorbird/__init__.py +++ b/homeassistant/components/doorbird/__init__.py @@ -3,11 +3,11 @@ from __future__ import annotations from http import HTTPStatus +import logging from aiohttp import ClientResponseError from doorbirdpy import DoorBird -from homeassistant.components import persistent_notification from homeassistant.const import ( CONF_HOST, CONF_NAME, @@ -17,6 +17,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady +from homeassistant.helpers import issue_registry as ir from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.typing import ConfigType @@ -30,6 +31,8 @@ CONF_CUSTOM_URL = "hass_url_override" CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) +_LOGGER = logging.getLogger(__name__) + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the DoorBird component.""" @@ -68,7 +71,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: DoorBirdConfigEntry) -> door_bird_data = DoorBirdData(door_station, info, event_entity_ids) door_station.update_events(events) # Subscribe to doorbell or motion events - if not await _async_register_events(hass, door_station): + if not await _async_register_events(hass, door_station, entry): raise ConfigEntryNotReady entry.async_on_unload(entry.add_update_listener(_update_listener)) @@ -84,24 +87,30 @@ async def async_unload_entry(hass: HomeAssistant, entry: DoorBirdConfigEntry) -> async def _async_register_events( - hass: HomeAssistant, door_station: ConfiguredDoorBird + hass: HomeAssistant, door_station: ConfiguredDoorBird, entry: DoorBirdConfigEntry ) -> bool: """Register events on device.""" + issue_id = f"doorbird_schedule_error_{entry.entry_id}" try: await door_station.async_register_events() - except ClientResponseError: - persistent_notification.async_create( + except ClientResponseError as ex: + ir.async_create_issue( hass, - ( - "Doorbird configuration failed. Please verify that API " - "Operator permission is enabled for the Doorbird user. " - "A restart will be required once permissions have been " - "verified." - ), - title="Doorbird Configuration Failure", - notification_id="doorbird_schedule_error", + DOMAIN, + issue_id, + severity=ir.IssueSeverity.ERROR, + translation_key="error_registering_events", + data={"entry_id": entry.entry_id}, + is_fixable=True, + translation_placeholders={ + "error": str(ex), + "name": door_station.name or entry.data[CONF_NAME], + }, ) + _LOGGER.debug("Error registering DoorBird events", exc_info=True) return False + else: + ir.async_delete_issue(hass, DOMAIN, issue_id) return True @@ -111,4 +120,4 @@ async def _update_listener(hass: HomeAssistant, entry: DoorBirdConfigEntry) -> N door_station = entry.runtime_data.door_station door_station.update_events(entry.options[CONF_EVENTS]) # Subscribe to doorbell or motion events - await _async_register_events(hass, door_station) + await _async_register_events(hass, door_station, entry) diff --git a/homeassistant/components/doorbird/manifest.json b/homeassistant/components/doorbird/manifest.json index e77f9aaf0a4..0e9f03c8ef8 100644 --- a/homeassistant/components/doorbird/manifest.json +++ b/homeassistant/components/doorbird/manifest.json @@ -3,7 +3,7 @@ "name": "DoorBird", "codeowners": ["@oblogic7", "@bdraco", "@flacjacket"], "config_flow": true, - "dependencies": ["http"], + "dependencies": ["http", "repairs"], "documentation": "https://www.home-assistant.io/integrations/doorbird", "iot_class": "local_push", "loggers": ["doorbirdpy"], diff --git a/homeassistant/components/doorbird/repairs.py b/homeassistant/components/doorbird/repairs.py new file mode 100644 index 00000000000..c8f9b73ecbd --- /dev/null +++ b/homeassistant/components/doorbird/repairs.py @@ -0,0 +1,55 @@ +"""Repairs for DoorBird.""" + +from __future__ import annotations + +import voluptuous as vol + +from homeassistant import data_entry_flow +from homeassistant.components.repairs import RepairsFlow +from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir + + +class DoorBirdReloadConfirmRepairFlow(RepairsFlow): + """Handler to show doorbird error and reload.""" + + def __init__(self, entry_id: str) -> None: + """Initialize the flow.""" + self.entry_id = entry_id + + async def async_step_init( + self, user_input: dict[str, str] | None = None + ) -> data_entry_flow.FlowResult: + """Handle the first step of a fix flow.""" + return await self.async_step_confirm() + + async def async_step_confirm( + self, user_input: dict[str, str] | None = None + ) -> data_entry_flow.FlowResult: + """Handle the confirm step of a fix flow.""" + if user_input is not None: + self.hass.config_entries.async_schedule_reload(self.entry_id) + return self.async_create_entry(data={}) + + issue_registry = ir.async_get(self.hass) + description_placeholders = None + if issue := issue_registry.async_get_issue(self.handler, self.issue_id): + description_placeholders = issue.translation_placeholders + + return self.async_show_form( + step_id="confirm", + data_schema=vol.Schema({}), + description_placeholders=description_placeholders, + ) + + +async def async_create_fix_flow( + hass: HomeAssistant, + issue_id: str, + data: dict[str, str | int | float | None] | None, +) -> RepairsFlow: + """Create flow.""" + assert data is not None + entry_id = data["entry_id"] + assert isinstance(entry_id, str) + return DoorBirdReloadConfirmRepairFlow(entry_id=entry_id) diff --git a/homeassistant/components/doorbird/strings.json b/homeassistant/components/doorbird/strings.json index 29c85ec7311..090ba4f161f 100644 --- a/homeassistant/components/doorbird/strings.json +++ b/homeassistant/components/doorbird/strings.json @@ -11,6 +11,19 @@ } } }, + "issues": { + "error_registering_events": { + "title": "DoorBird {name} configuration failure", + "fix_flow": { + "step": { + "confirm": { + "title": "[%key:component::doorbird::issues::error_registering_events::title%]", + "description": "Configuring DoorBird {name} failed with error: `{error}`. Please enable the API Operator permission for the DoorBird user and continue to reload the integration." + } + } + } + } + }, "config": { "step": { "user": { diff --git a/tests/components/doorbird/test_repairs.py b/tests/components/doorbird/test_repairs.py new file mode 100644 index 00000000000..7449250b718 --- /dev/null +++ b/tests/components/doorbird/test_repairs.py @@ -0,0 +1,61 @@ +"""Test repairs for doorbird.""" + +from __future__ import annotations + +from http import HTTPStatus + +from homeassistant.components.doorbird.const import DOMAIN +from homeassistant.components.repairs.issue_handler import ( + async_process_repairs_platforms, +) +from homeassistant.components.repairs.websocket_api import ( + RepairsFlowIndexView, + RepairsFlowResourceView, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir +from homeassistant.setup import async_setup_component + +from . import mock_not_found_exception +from .conftest import DoorbirdMockerType + +from tests.typing import ClientSessionGenerator + + +async def test_change_schedule_fails( + hass: HomeAssistant, + doorbird_mocker: DoorbirdMockerType, + hass_client: ClientSessionGenerator, +) -> None: + """Test a doorbird when change_schedule fails.""" + assert await async_setup_component(hass, "repairs", {}) + doorbird_entry = await doorbird_mocker( + favorites_side_effect=mock_not_found_exception() + ) + assert doorbird_entry.entry.state is ConfigEntryState.SETUP_RETRY + issue_reg = ir.async_get(hass) + assert len(issue_reg.issues) == 1 + issue = list(issue_reg.issues.values())[0] + issue_id = issue.issue_id + assert issue.domain == DOMAIN + + await async_process_repairs_platforms(hass) + client = await hass_client() + + url = RepairsFlowIndexView.url + resp = await client.post(url, json={"handler": DOMAIN, "issue_id": issue_id}) + assert resp.status == HTTPStatus.OK + data = await resp.json() + + flow_id = data["flow_id"] + placeholders = data["description_placeholders"] + assert "404" in placeholders["error"] + assert data["step_id"] == "confirm" + + url = RepairsFlowResourceView.url.format(flow_id=flow_id) + resp = await client.post(url) + assert resp.status == HTTPStatus.OK + data = await resp.json() + + assert data["type"] == "create_entry" From 0216455137c86a28f8a91b57a81262cac125fad2 Mon Sep 17 00:00:00 2001 From: Matrix Date: Thu, 1 Aug 2024 14:32:16 +0800 Subject: [PATCH 0310/1635] Fix yolink protocol changed (#122989) --- homeassistant/components/yolink/valve.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/yolink/valve.py b/homeassistant/components/yolink/valve.py index a24ad7d385d..d8c199697c3 100644 --- a/homeassistant/components/yolink/valve.py +++ b/homeassistant/components/yolink/valve.py @@ -37,7 +37,7 @@ DEVICE_TYPES: tuple[YoLinkValveEntityDescription, ...] = ( key="valve_state", translation_key="meter_valve_state", device_class=ValveDeviceClass.WATER, - value=lambda value: value == "closed" if value is not None else None, + value=lambda value: value != "open" if value is not None else None, exists_fn=lambda device: device.device_type == ATTR_DEVICE_WATER_METER_CONTROLLER and not device.device_model_name.startswith(DEV_MODEL_WATER_METER_YS5007), From acf523b5fb75cae738ef77c35c2331381bf99abe Mon Sep 17 00:00:00 2001 From: amccook <30292381+amccook@users.noreply.github.com> Date: Thu, 1 Aug 2024 07:24:09 -0700 Subject: [PATCH 0311/1635] Fix handling of directory type playlists in Plex (#122990) Ignore type directory --- homeassistant/components/plex/media_browser.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/plex/media_browser.py b/homeassistant/components/plex/media_browser.py index e47e6145761..87e9f47af66 100644 --- a/homeassistant/components/plex/media_browser.py +++ b/homeassistant/components/plex/media_browser.py @@ -132,7 +132,11 @@ def browse_media( # noqa: C901 "children": [], } for playlist in plex_server.playlists(): - if playlist.playlistType != "audio" and platform == "sonos": + if ( + playlist.type != "directory" + and playlist.playlistType != "audio" + and platform == "sonos" + ): continue try: playlists_info["children"].append(item_payload(playlist)) From 55abe68a5fcb3e74726a0321f80b188a602d1125 Mon Sep 17 00:00:00 2001 From: Ivan Belokobylskiy Date: Thu, 1 Aug 2024 12:51:41 +0400 Subject: [PATCH 0312/1635] Bump aioymaps to 1.2.5 (#123005) Bump aiomaps, fix sessionId parsing --- homeassistant/components/yandex_transport/manifest.json | 2 +- homeassistant/components/yandex_transport/sensor.py | 4 ++-- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/yandex_transport/manifest.json b/homeassistant/components/yandex_transport/manifest.json index c29b4d3dc98..1d1219d5a95 100644 --- a/homeassistant/components/yandex_transport/manifest.json +++ b/homeassistant/components/yandex_transport/manifest.json @@ -4,5 +4,5 @@ "codeowners": ["@rishatik92", "@devbis"], "documentation": "https://www.home-assistant.io/integrations/yandex_transport", "iot_class": "cloud_polling", - "requirements": ["aioymaps==1.2.4"] + "requirements": ["aioymaps==1.2.5"] } diff --git a/homeassistant/components/yandex_transport/sensor.py b/homeassistant/components/yandex_transport/sensor.py index 30227e3261e..95c4785a341 100644 --- a/homeassistant/components/yandex_transport/sensor.py +++ b/homeassistant/components/yandex_transport/sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations from datetime import timedelta import logging -from aioymaps import CaptchaError, YandexMapsRequester +from aioymaps import CaptchaError, NoSessionError, YandexMapsRequester import voluptuous as vol from homeassistant.components.sensor import ( @@ -88,7 +88,7 @@ class DiscoverYandexTransport(SensorEntity): closer_time = None try: yandex_reply = await self.requester.get_stop_info(self._stop_id) - except CaptchaError as ex: + except (CaptchaError, NoSessionError) as ex: _LOGGER.error( "%s. You may need to disable the integration for some time", ex, diff --git a/requirements_all.txt b/requirements_all.txt index ad08342230d..3bd6cfa8b2a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -407,7 +407,7 @@ aiowebostv==0.4.2 aiowithings==3.0.2 # homeassistant.components.yandex_transport -aioymaps==1.2.4 +aioymaps==1.2.5 # homeassistant.components.airgradient airgradient==0.7.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5bdf58ff217..21c8fd5f677 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -389,7 +389,7 @@ aiowebostv==0.4.2 aiowithings==3.0.2 # homeassistant.components.yandex_transport -aioymaps==1.2.4 +aioymaps==1.2.5 # homeassistant.components.airgradient airgradient==0.7.1 From e9bfe82582f94f92c3db5597eb869cf7c0fcf678 Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Thu, 1 Aug 2024 11:51:45 +0200 Subject: [PATCH 0313/1635] Make the Android timer notification high priority (#123006) --- homeassistant/components/mobile_app/timers.py | 2 ++ tests/components/mobile_app/test_timers.py | 2 ++ 2 files changed, 4 insertions(+) diff --git a/homeassistant/components/mobile_app/timers.py b/homeassistant/components/mobile_app/timers.py index 93b4ac53be5..e092298c5d7 100644 --- a/homeassistant/components/mobile_app/timers.py +++ b/homeassistant/components/mobile_app/timers.py @@ -39,6 +39,8 @@ def async_handle_timer_event( # Android "channel": "Timers", "importance": "high", + "ttl": 0, + "priority": "high", # iOS "push": { "interruption-level": "time-sensitive", diff --git a/tests/components/mobile_app/test_timers.py b/tests/components/mobile_app/test_timers.py index 0eba88f7328..9f7d4cebc58 100644 --- a/tests/components/mobile_app/test_timers.py +++ b/tests/components/mobile_app/test_timers.py @@ -61,6 +61,8 @@ async def test_timer_events( "channel": "Timers", "group": "timers", "importance": "high", + "ttl": 0, + "priority": "high", "push": { "interruption-level": "time-sensitive", }, From ecbff61332364059bf089b69d00248a63f4b4a2a Mon Sep 17 00:00:00 2001 From: Matrix Date: Thu, 1 Aug 2024 17:49:58 +0800 Subject: [PATCH 0314/1635] Bump yolink api to 0.4.6 (#123012) --- homeassistant/components/yolink/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/yolink/manifest.json b/homeassistant/components/yolink/manifest.json index 5353d5d5b8c..ceb4e4ceff3 100644 --- a/homeassistant/components/yolink/manifest.json +++ b/homeassistant/components/yolink/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["auth", "application_credentials"], "documentation": "https://www.home-assistant.io/integrations/yolink", "iot_class": "cloud_push", - "requirements": ["yolink-api==0.4.4"] + "requirements": ["yolink-api==0.4.6"] } diff --git a/requirements_all.txt b/requirements_all.txt index 3bd6cfa8b2a..0bf4b77e9d2 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2962,7 +2962,7 @@ yeelight==0.7.14 yeelightsunflower==0.0.10 # homeassistant.components.yolink -yolink-api==0.4.4 +yolink-api==0.4.6 # homeassistant.components.youless youless-api==2.1.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 21c8fd5f677..f9670987b70 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2339,7 +2339,7 @@ yalexs==6.4.3 yeelight==0.7.14 # homeassistant.components.yolink -yolink-api==0.4.4 +yolink-api==0.4.6 # homeassistant.components.youless youless-api==2.1.2 From a42615add0692b9752f2b1bfa395f8eeedd4e4cf Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Fri, 2 Aug 2024 15:58:41 +0200 Subject: [PATCH 0315/1635] Fix and improve tedee lock states (#123022) Improve tedee lock states --- homeassistant/components/tedee/lock.py | 13 +++++++++++-- tests/components/tedee/test_lock.py | 22 ++++++++++++++++------ 2 files changed, 27 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/tedee/lock.py b/homeassistant/components/tedee/lock.py index d11c873a94a..8d5fa028e12 100644 --- a/homeassistant/components/tedee/lock.py +++ b/homeassistant/components/tedee/lock.py @@ -55,8 +55,13 @@ class TedeeLockEntity(TedeeEntity, LockEntity): super().__init__(lock, coordinator, "lock") @property - def is_locked(self) -> bool: + def is_locked(self) -> bool | None: """Return true if lock is locked.""" + if self._lock.state in ( + TedeeLockState.HALF_OPEN, + TedeeLockState.UNKNOWN, + ): + return None return self._lock.state == TedeeLockState.LOCKED @property @@ -87,7 +92,11 @@ class TedeeLockEntity(TedeeEntity, LockEntity): @property def available(self) -> bool: """Return True if entity is available.""" - return super().available and self._lock.is_connected + return ( + super().available + and self._lock.is_connected + and self._lock.state != TedeeLockState.UNCALIBRATED + ) async def async_unlock(self, **kwargs: Any) -> None: """Unlock the door.""" diff --git a/tests/components/tedee/test_lock.py b/tests/components/tedee/test_lock.py index ffc4a8c30d6..741bc3156cb 100644 --- a/tests/components/tedee/test_lock.py +++ b/tests/components/tedee/test_lock.py @@ -25,7 +25,7 @@ from homeassistant.components.lock import ( STATE_UNLOCKING, ) from homeassistant.components.webhook import async_generate_url -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -276,10 +276,21 @@ async def test_new_lock( assert state +@pytest.mark.parametrize( + ("lib_state", "expected_state"), + [ + (TedeeLockState.LOCKED, STATE_LOCKED), + (TedeeLockState.HALF_OPEN, STATE_UNKNOWN), + (TedeeLockState.UNKNOWN, STATE_UNKNOWN), + (TedeeLockState.UNCALIBRATED, STATE_UNAVAILABLE), + ], +) async def test_webhook_update( hass: HomeAssistant, mock_tedee: MagicMock, hass_client_no_auth: ClientSessionGenerator, + lib_state: TedeeLockState, + expected_state: str, ) -> None: """Test updated data set through webhook.""" @@ -287,10 +298,9 @@ async def test_webhook_update( assert state assert state.state == STATE_UNLOCKED - webhook_data = {"dummystate": 6} - mock_tedee.locks_dict[ - 12345 - ].state = TedeeLockState.LOCKED # is updated in the lib, so mock and assert in L296 + webhook_data = {"dummystate": lib_state.value} + # is updated in the lib, so mock and assert below + mock_tedee.locks_dict[12345].state = lib_state client = await hass_client_no_auth() webhook_url = async_generate_url(hass, WEBHOOK_ID) @@ -302,4 +312,4 @@ async def test_webhook_update( state = hass.states.get("lock.lock_1a2b") assert state - assert state.state == STATE_LOCKED + assert state.state == expected_state From 5ce8a2d974a7cc80ac8e9a28640c5bb568fad9fc Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Thu, 1 Aug 2024 15:39:17 -0500 Subject: [PATCH 0316/1635] Standardize assist pipelines on 10ms chunk size (#123024) * Make chunk size always 10ms * Fix voip --- .../components/assist_pipeline/__init__.py | 8 ++ .../assist_pipeline/audio_enhancer.py | 18 +--- .../components/assist_pipeline/const.py | 4 +- .../components/assist_pipeline/pipeline.py | 75 +++---------- homeassistant/components/voip/voip.py | 22 ++-- tests/components/assist_pipeline/conftest.py | 13 +++ .../snapshots/test_websocket.ambr | 2 +- tests/components/assist_pipeline/test_init.py | 71 +++++++------ .../assist_pipeline/test_websocket.py | 100 +++++++++++------- 9 files changed, 154 insertions(+), 159 deletions(-) diff --git a/homeassistant/components/assist_pipeline/__init__.py b/homeassistant/components/assist_pipeline/__init__.py index f481411e551..8ee053162b0 100644 --- a/homeassistant/components/assist_pipeline/__init__.py +++ b/homeassistant/components/assist_pipeline/__init__.py @@ -16,6 +16,10 @@ from .const import ( DATA_LAST_WAKE_UP, DOMAIN, EVENT_RECORDING, + SAMPLE_CHANNELS, + SAMPLE_RATE, + SAMPLE_WIDTH, + SAMPLES_PER_CHUNK, ) from .error import PipelineNotFound from .pipeline import ( @@ -53,6 +57,10 @@ __all__ = ( "PipelineNotFound", "WakeWordSettings", "EVENT_RECORDING", + "SAMPLES_PER_CHUNK", + "SAMPLE_RATE", + "SAMPLE_WIDTH", + "SAMPLE_CHANNELS", ) CONFIG_SCHEMA = vol.Schema( diff --git a/homeassistant/components/assist_pipeline/audio_enhancer.py b/homeassistant/components/assist_pipeline/audio_enhancer.py index e7a149bd00e..c9c60f421b1 100644 --- a/homeassistant/components/assist_pipeline/audio_enhancer.py +++ b/homeassistant/components/assist_pipeline/audio_enhancer.py @@ -6,6 +6,8 @@ import logging from pymicro_vad import MicroVad +from .const import BYTES_PER_CHUNK + _LOGGER = logging.getLogger(__name__) @@ -38,11 +40,6 @@ class AudioEnhancer(ABC): def enhance_chunk(self, audio: bytes, timestamp_ms: int) -> EnhancedAudioChunk: """Enhance chunk of PCM audio @ 16Khz with 16-bit mono samples.""" - @property - @abstractmethod - def samples_per_chunk(self) -> int | None: - """Return number of samples per chunk or None if chunking isn't required.""" - class MicroVadEnhancer(AudioEnhancer): """Audio enhancer that just runs microVAD.""" @@ -61,22 +58,15 @@ class MicroVadEnhancer(AudioEnhancer): _LOGGER.debug("Initialized microVAD with threshold=%s", self.threshold) def enhance_chunk(self, audio: bytes, timestamp_ms: int) -> EnhancedAudioChunk: - """Enhance chunk of PCM audio @ 16Khz with 16-bit mono samples.""" + """Enhance 10ms chunk of PCM audio @ 16Khz with 16-bit mono samples.""" is_speech: bool | None = None if self.vad is not None: # Run VAD + assert len(audio) == BYTES_PER_CHUNK speech_prob = self.vad.Process10ms(audio) is_speech = speech_prob > self.threshold return EnhancedAudioChunk( audio=audio, timestamp_ms=timestamp_ms, is_speech=is_speech ) - - @property - def samples_per_chunk(self) -> int | None: - """Return number of samples per chunk or None if chunking isn't required.""" - if self.is_vad_enabled: - return 160 # 10ms - - return None diff --git a/homeassistant/components/assist_pipeline/const.py b/homeassistant/components/assist_pipeline/const.py index 14b93a90372..f7306b89a54 100644 --- a/homeassistant/components/assist_pipeline/const.py +++ b/homeassistant/components/assist_pipeline/const.py @@ -19,4 +19,6 @@ EVENT_RECORDING = f"{DOMAIN}_recording" SAMPLE_RATE = 16000 # hertz SAMPLE_WIDTH = 2 # bytes SAMPLE_CHANNELS = 1 # mono -SAMPLES_PER_CHUNK = 240 # 20 ms @ 16Khz +MS_PER_CHUNK = 10 +SAMPLES_PER_CHUNK = SAMPLE_RATE // (1000 // MS_PER_CHUNK) # 10 ms @ 16Khz +BYTES_PER_CHUNK = SAMPLES_PER_CHUNK * SAMPLE_WIDTH * SAMPLE_CHANNELS # 16-bit diff --git a/homeassistant/components/assist_pipeline/pipeline.py b/homeassistant/components/assist_pipeline/pipeline.py index af29888eb07..9fada934ca1 100644 --- a/homeassistant/components/assist_pipeline/pipeline.py +++ b/homeassistant/components/assist_pipeline/pipeline.py @@ -51,11 +51,13 @@ from homeassistant.util.limited_size_dict import LimitedSizeDict from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, MicroVadEnhancer from .const import ( + BYTES_PER_CHUNK, CONF_DEBUG_RECORDING_DIR, DATA_CONFIG, DATA_LAST_WAKE_UP, DATA_MIGRATIONS, DOMAIN, + MS_PER_CHUNK, SAMPLE_CHANNELS, SAMPLE_RATE, SAMPLE_WIDTH, @@ -502,9 +504,6 @@ class AudioSettings: is_vad_enabled: bool = True """True if VAD is used to determine the end of the voice command.""" - samples_per_chunk: int | None = None - """Number of samples that will be in each audio chunk (None for no chunking).""" - silence_seconds: float = 0.5 """Seconds of silence after voice command has ended.""" @@ -525,11 +524,6 @@ class AudioSettings: or (self.auto_gain_dbfs > 0) ) - @property - def is_chunking_enabled(self) -> bool: - """True if chunk size is set.""" - return self.samples_per_chunk is not None - @dataclass class PipelineRun: @@ -566,7 +560,9 @@ class PipelineRun: audio_enhancer: AudioEnhancer | None = None """VAD/noise suppression/auto gain""" - audio_chunking_buffer: AudioBuffer | None = None + audio_chunking_buffer: AudioBuffer = field( + default_factory=lambda: AudioBuffer(BYTES_PER_CHUNK) + ) """Buffer used when splitting audio into chunks for audio processing""" _device_id: str | None = None @@ -599,8 +595,6 @@ class PipelineRun: self.audio_settings.is_vad_enabled, ) - self.audio_chunking_buffer = AudioBuffer(self.samples_per_chunk * SAMPLE_WIDTH) - def __eq__(self, other: object) -> bool: """Compare pipeline runs by id.""" if isinstance(other, PipelineRun): @@ -608,14 +602,6 @@ class PipelineRun: return False - @property - def samples_per_chunk(self) -> int: - """Return number of samples expected in each audio chunk.""" - if self.audio_enhancer is not None: - return self.audio_enhancer.samples_per_chunk or SAMPLES_PER_CHUNK - - return self.audio_settings.samples_per_chunk or SAMPLES_PER_CHUNK - @callback def process_event(self, event: PipelineEvent) -> None: """Log an event and call listener.""" @@ -728,7 +714,7 @@ class PipelineRun: # after wake-word-detection. num_audio_chunks_to_buffer = int( (wake_word_settings.audio_seconds_to_buffer * SAMPLE_RATE) - / self.samples_per_chunk + / SAMPLES_PER_CHUNK ) stt_audio_buffer: deque[EnhancedAudioChunk] | None = None @@ -1216,60 +1202,31 @@ class PipelineRun: self.debug_recording_thread = None async def process_volume_only( - self, - audio_stream: AsyncIterable[bytes], - sample_rate: int = SAMPLE_RATE, - sample_width: int = SAMPLE_WIDTH, + self, audio_stream: AsyncIterable[bytes] ) -> AsyncGenerator[EnhancedAudioChunk]: """Apply volume transformation only (no VAD/audio enhancements) with optional chunking.""" - assert self.audio_chunking_buffer is not None - - bytes_per_chunk = self.samples_per_chunk * sample_width - ms_per_sample = sample_rate // 1000 - ms_per_chunk = self.samples_per_chunk // ms_per_sample timestamp_ms = 0 - async for chunk in audio_stream: if self.audio_settings.volume_multiplier != 1.0: chunk = _multiply_volume(chunk, self.audio_settings.volume_multiplier) - if self.audio_settings.is_chunking_enabled: - for sub_chunk in chunk_samples( - chunk, bytes_per_chunk, self.audio_chunking_buffer - ): - yield EnhancedAudioChunk( - audio=sub_chunk, - timestamp_ms=timestamp_ms, - is_speech=None, # no VAD - ) - timestamp_ms += ms_per_chunk - else: - # No chunking + for sub_chunk in chunk_samples( + chunk, BYTES_PER_CHUNK, self.audio_chunking_buffer + ): yield EnhancedAudioChunk( - audio=chunk, + audio=sub_chunk, timestamp_ms=timestamp_ms, is_speech=None, # no VAD ) - timestamp_ms += (len(chunk) // sample_width) // ms_per_sample + timestamp_ms += MS_PER_CHUNK async def process_enhance_audio( - self, - audio_stream: AsyncIterable[bytes], - sample_rate: int = SAMPLE_RATE, - sample_width: int = SAMPLE_WIDTH, + self, audio_stream: AsyncIterable[bytes] ) -> AsyncGenerator[EnhancedAudioChunk]: - """Split audio into 10 ms chunks and apply VAD/noise suppression/auto gain/volume transformation.""" + """Split audio into chunks and apply VAD/noise suppression/auto gain/volume transformation.""" assert self.audio_enhancer is not None - assert self.audio_enhancer.samples_per_chunk is not None - assert self.audio_chunking_buffer is not None - bytes_per_chunk = self.audio_enhancer.samples_per_chunk * sample_width - ms_per_sample = sample_rate // 1000 - ms_per_chunk = ( - self.audio_enhancer.samples_per_chunk // sample_width - ) // ms_per_sample timestamp_ms = 0 - async for dirty_samples in audio_stream: if self.audio_settings.volume_multiplier != 1.0: # Static gain @@ -1279,10 +1236,10 @@ class PipelineRun: # Split into chunks for audio enhancements/VAD for dirty_chunk in chunk_samples( - dirty_samples, bytes_per_chunk, self.audio_chunking_buffer + dirty_samples, BYTES_PER_CHUNK, self.audio_chunking_buffer ): yield self.audio_enhancer.enhance_chunk(dirty_chunk, timestamp_ms) - timestamp_ms += ms_per_chunk + timestamp_ms += MS_PER_CHUNK def _multiply_volume(chunk: bytes, volume_multiplier: float) -> bytes: diff --git a/homeassistant/components/voip/voip.py b/homeassistant/components/voip/voip.py index 243909629cf..161e938a3b6 100644 --- a/homeassistant/components/voip/voip.py +++ b/homeassistant/components/voip/voip.py @@ -21,7 +21,7 @@ from voip_utils import ( VoipDatagramProtocol, ) -from homeassistant.components import stt, tts +from homeassistant.components import assist_pipeline, stt, tts from homeassistant.components.assist_pipeline import ( Pipeline, PipelineEvent, @@ -331,15 +331,14 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): async with asyncio.timeout(self.audio_timeout): chunk = await self._audio_queue.get() - assert audio_enhancer.samples_per_chunk is not None - vad_buffer = AudioBuffer(audio_enhancer.samples_per_chunk * WIDTH) + vad_buffer = AudioBuffer(assist_pipeline.SAMPLES_PER_CHUNK * WIDTH) while chunk: chunk_buffer.append(chunk) segmenter.process_with_vad( chunk, - audio_enhancer.samples_per_chunk, + assist_pipeline.SAMPLES_PER_CHUNK, lambda x: audio_enhancer.enhance_chunk(x, 0).is_speech is True, vad_buffer, ) @@ -371,13 +370,12 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): async with asyncio.timeout(self.audio_timeout): chunk = await self._audio_queue.get() - assert audio_enhancer.samples_per_chunk is not None - vad_buffer = AudioBuffer(audio_enhancer.samples_per_chunk * WIDTH) + vad_buffer = AudioBuffer(assist_pipeline.SAMPLES_PER_CHUNK * WIDTH) while chunk: if not segmenter.process_with_vad( chunk, - audio_enhancer.samples_per_chunk, + assist_pipeline.SAMPLES_PER_CHUNK, lambda x: audio_enhancer.enhance_chunk(x, 0).is_speech is True, vad_buffer, ): @@ -437,13 +435,13 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): sample_channels = wav_file.getnchannels() if ( - (sample_rate != 16000) - or (sample_width != 2) - or (sample_channels != 1) + (sample_rate != RATE) + or (sample_width != WIDTH) + or (sample_channels != CHANNELS) ): raise ValueError( - "Expected rate/width/channels as 16000/2/1," - " got {sample_rate}/{sample_width}/{sample_channels}}" + f"Expected rate/width/channels as {RATE}/{WIDTH}/{CHANNELS}," + f" got {sample_rate}/{sample_width}/{sample_channels}" ) audio_bytes = wav_file.readframes(wav_file.getnframes()) diff --git a/tests/components/assist_pipeline/conftest.py b/tests/components/assist_pipeline/conftest.py index c041a54d8fa..b2eca1e7ce1 100644 --- a/tests/components/assist_pipeline/conftest.py +++ b/tests/components/assist_pipeline/conftest.py @@ -11,6 +11,12 @@ import pytest from homeassistant.components import stt, tts, wake_word from homeassistant.components.assist_pipeline import DOMAIN, select as assist_select +from homeassistant.components.assist_pipeline.const import ( + BYTES_PER_CHUNK, + SAMPLE_CHANNELS, + SAMPLE_RATE, + SAMPLE_WIDTH, +) from homeassistant.components.assist_pipeline.pipeline import ( PipelineData, PipelineStorageCollection, @@ -33,6 +39,8 @@ from tests.common import ( _TRANSCRIPT = "test transcript" +BYTES_ONE_SECOND = SAMPLE_RATE * SAMPLE_WIDTH * SAMPLE_CHANNELS + @pytest.fixture(autouse=True) def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: @@ -462,3 +470,8 @@ def pipeline_data(hass: HomeAssistant, init_components) -> PipelineData: def pipeline_storage(pipeline_data) -> PipelineStorageCollection: """Return pipeline storage collection.""" return pipeline_data.pipeline_store + + +def make_10ms_chunk(header: bytes) -> bytes: + """Return 10ms of zeros with the given header.""" + return header + bytes(BYTES_PER_CHUNK - len(header)) diff --git a/tests/components/assist_pipeline/snapshots/test_websocket.ambr b/tests/components/assist_pipeline/snapshots/test_websocket.ambr index 0b04b67bb22..e5ae18d28f2 100644 --- a/tests/components/assist_pipeline/snapshots/test_websocket.ambr +++ b/tests/components/assist_pipeline/snapshots/test_websocket.ambr @@ -440,7 +440,7 @@ # --- # name: test_device_capture_override.2 dict({ - 'audio': 'Y2h1bmsx', + 'audio': 'Y2h1bmsxAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=', 'channels': 1, 'rate': 16000, 'type': 'audio', diff --git a/tests/components/assist_pipeline/test_init.py b/tests/components/assist_pipeline/test_init.py index 8fb7ce5b5a5..4206a288331 100644 --- a/tests/components/assist_pipeline/test_init.py +++ b/tests/components/assist_pipeline/test_init.py @@ -13,6 +13,7 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.components import assist_pipeline, media_source, stt, tts from homeassistant.components.assist_pipeline.const import ( + BYTES_PER_CHUNK, CONF_DEBUG_RECORDING_DIR, DOMAIN, ) @@ -20,16 +21,16 @@ from homeassistant.core import Context, HomeAssistant from homeassistant.setup import async_setup_component from .conftest import ( + BYTES_ONE_SECOND, MockSttProvider, MockSttProviderEntity, MockTTSProvider, MockWakeWordEntity, + make_10ms_chunk, ) from tests.typing import ClientSessionGenerator, WebSocketGenerator -BYTES_ONE_SECOND = 16000 * 2 - def process_events(events: list[assist_pipeline.PipelineEvent]) -> list[dict]: """Process events to remove dynamic values.""" @@ -58,8 +59,8 @@ async def test_pipeline_from_audio_stream_auto( events: list[assist_pipeline.PipelineEvent] = [] async def audio_data(): - yield b"part1" - yield b"part2" + yield make_10ms_chunk(b"part1") + yield make_10ms_chunk(b"part2") yield b"" await assist_pipeline.async_pipeline_from_audio_stream( @@ -79,7 +80,9 @@ async def test_pipeline_from_audio_stream_auto( ) assert process_events(events) == snapshot - assert mock_stt_provider.received == [b"part1", b"part2"] + assert len(mock_stt_provider.received) == 2 + assert mock_stt_provider.received[0].startswith(b"part1") + assert mock_stt_provider.received[1].startswith(b"part2") async def test_pipeline_from_audio_stream_legacy( @@ -98,8 +101,8 @@ async def test_pipeline_from_audio_stream_legacy( events: list[assist_pipeline.PipelineEvent] = [] async def audio_data(): - yield b"part1" - yield b"part2" + yield make_10ms_chunk(b"part1") + yield make_10ms_chunk(b"part2") yield b"" # Create a pipeline using an stt entity @@ -142,7 +145,9 @@ async def test_pipeline_from_audio_stream_legacy( ) assert process_events(events) == snapshot - assert mock_stt_provider.received == [b"part1", b"part2"] + assert len(mock_stt_provider.received) == 2 + assert mock_stt_provider.received[0].startswith(b"part1") + assert mock_stt_provider.received[1].startswith(b"part2") async def test_pipeline_from_audio_stream_entity( @@ -161,8 +166,8 @@ async def test_pipeline_from_audio_stream_entity( events: list[assist_pipeline.PipelineEvent] = [] async def audio_data(): - yield b"part1" - yield b"part2" + yield make_10ms_chunk(b"part1") + yield make_10ms_chunk(b"part2") yield b"" # Create a pipeline using an stt entity @@ -205,7 +210,9 @@ async def test_pipeline_from_audio_stream_entity( ) assert process_events(events) == snapshot - assert mock_stt_provider_entity.received == [b"part1", b"part2"] + assert len(mock_stt_provider_entity.received) == 2 + assert mock_stt_provider_entity.received[0].startswith(b"part1") + assert mock_stt_provider_entity.received[1].startswith(b"part2") async def test_pipeline_from_audio_stream_no_stt( @@ -224,8 +231,8 @@ async def test_pipeline_from_audio_stream_no_stt( events: list[assist_pipeline.PipelineEvent] = [] async def audio_data(): - yield b"part1" - yield b"part2" + yield make_10ms_chunk(b"part1") + yield make_10ms_chunk(b"part2") yield b"" # Create a pipeline without stt support @@ -285,8 +292,8 @@ async def test_pipeline_from_audio_stream_unknown_pipeline( events: list[assist_pipeline.PipelineEvent] = [] async def audio_data(): - yield b"part1" - yield b"part2" + yield make_10ms_chunk(b"part1") + yield make_10ms_chunk(b"part2") yield b"" # Try to use the created pipeline @@ -327,7 +334,7 @@ async def test_pipeline_from_audio_stream_wake_word( # [0, 2, ...] wake_chunk_2 = bytes(it.islice(it.cycle(range(0, 256, 2)), BYTES_ONE_SECOND)) - samples_per_chunk = 160 + samples_per_chunk = 160 # 10ms @ 16Khz bytes_per_chunk = samples_per_chunk * 2 # 16-bit async def audio_data(): @@ -343,8 +350,8 @@ async def test_pipeline_from_audio_stream_wake_word( yield wake_chunk_2[i : i + bytes_per_chunk] i += bytes_per_chunk - for chunk in (b"wake word!", b"part1", b"part2"): - yield chunk + bytes(bytes_per_chunk - len(chunk)) + for header in (b"wake word!", b"part1", b"part2"): + yield make_10ms_chunk(header) yield b"" @@ -365,9 +372,7 @@ async def test_pipeline_from_audio_stream_wake_word( wake_word_settings=assist_pipeline.WakeWordSettings( audio_seconds_to_buffer=1.5 ), - audio_settings=assist_pipeline.AudioSettings( - is_vad_enabled=False, samples_per_chunk=samples_per_chunk - ), + audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False), ) assert process_events(events) == snapshot @@ -408,13 +413,11 @@ async def test_pipeline_save_audio( pipeline = assist_pipeline.async_get_pipeline(hass) events: list[assist_pipeline.PipelineEvent] = [] - # Pad out to an even number of bytes since these "samples" will be saved - # as 16-bit values. async def audio_data(): - yield b"wake word_" + yield make_10ms_chunk(b"wake word") # queued audio - yield b"part1_" - yield b"part2_" + yield make_10ms_chunk(b"part1") + yield make_10ms_chunk(b"part2") yield b"" await assist_pipeline.async_pipeline_from_audio_stream( @@ -457,12 +460,16 @@ async def test_pipeline_save_audio( # Verify wake file with wave.open(str(wake_file), "rb") as wake_wav: wake_data = wake_wav.readframes(wake_wav.getnframes()) - assert wake_data == b"wake word_" + assert wake_data.startswith(b"wake word") # Verify stt file with wave.open(str(stt_file), "rb") as stt_wav: stt_data = stt_wav.readframes(stt_wav.getnframes()) - assert stt_data == b"queued audiopart1_part2_" + assert stt_data.startswith(b"queued audio") + stt_data = stt_data[len(b"queued audio") :] + assert stt_data.startswith(b"part1") + stt_data = stt_data[BYTES_PER_CHUNK:] + assert stt_data.startswith(b"part2") async def test_pipeline_saved_audio_with_device_id( @@ -645,10 +652,10 @@ async def test_wake_word_detection_aborted( events: list[assist_pipeline.PipelineEvent] = [] async def audio_data(): - yield b"silence!" - yield b"wake word!" - yield b"part1" - yield b"part2" + yield make_10ms_chunk(b"silence!") + yield make_10ms_chunk(b"wake word!") + yield make_10ms_chunk(b"part1") + yield make_10ms_chunk(b"part2") yield b"" pipeline_store = pipeline_data.pipeline_store diff --git a/tests/components/assist_pipeline/test_websocket.py b/tests/components/assist_pipeline/test_websocket.py index 7d4a9b18c12..2da914f4252 100644 --- a/tests/components/assist_pipeline/test_websocket.py +++ b/tests/components/assist_pipeline/test_websocket.py @@ -8,7 +8,12 @@ from unittest.mock import ANY, patch import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.assist_pipeline.const import DOMAIN +from homeassistant.components.assist_pipeline.const import ( + DOMAIN, + SAMPLE_CHANNELS, + SAMPLE_RATE, + SAMPLE_WIDTH, +) from homeassistant.components.assist_pipeline.pipeline import ( DeviceAudioQueue, Pipeline, @@ -18,7 +23,13 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr -from .conftest import MockWakeWordEntity, MockWakeWordEntity2 +from .conftest import ( + BYTES_ONE_SECOND, + BYTES_PER_CHUNK, + MockWakeWordEntity, + MockWakeWordEntity2, + make_10ms_chunk, +) from tests.common import MockConfigEntry from tests.typing import WebSocketGenerator @@ -205,7 +216,7 @@ async def test_audio_pipeline_with_wake_word_timeout( "start_stage": "wake_word", "end_stage": "tts", "input": { - "sample_rate": 16000, + "sample_rate": SAMPLE_RATE, "timeout": 1, }, } @@ -229,7 +240,7 @@ async def test_audio_pipeline_with_wake_word_timeout( events.append(msg["event"]) # 2 seconds of silence - await client.send_bytes(bytes([1]) + bytes(16000 * 2 * 2)) + await client.send_bytes(bytes([1]) + bytes(2 * BYTES_ONE_SECOND)) # Time out error msg = await client.receive_json() @@ -259,7 +270,7 @@ async def test_audio_pipeline_with_wake_word_no_timeout( "type": "assist_pipeline/run", "start_stage": "wake_word", "end_stage": "tts", - "input": {"sample_rate": 16000, "timeout": 0, "no_vad": True}, + "input": {"sample_rate": SAMPLE_RATE, "timeout": 0, "no_vad": True}, } ) @@ -282,9 +293,10 @@ async def test_audio_pipeline_with_wake_word_no_timeout( events.append(msg["event"]) # "audio" - await client.send_bytes(bytes([handler_id]) + b"wake word") + await client.send_bytes(bytes([handler_id]) + make_10ms_chunk(b"wake word")) - msg = await client.receive_json() + async with asyncio.timeout(1): + msg = await client.receive_json() assert msg["event"]["type"] == "wake_word-end" assert msg["event"]["data"] == snapshot events.append(msg["event"]) @@ -365,7 +377,7 @@ async def test_audio_pipeline_no_wake_word_engine( "start_stage": "wake_word", "end_stage": "tts", "input": { - "sample_rate": 16000, + "sample_rate": SAMPLE_RATE, }, } ) @@ -402,7 +414,7 @@ async def test_audio_pipeline_no_wake_word_entity( "start_stage": "wake_word", "end_stage": "tts", "input": { - "sample_rate": 16000, + "sample_rate": SAMPLE_RATE, }, } ) @@ -1771,7 +1783,7 @@ async def test_audio_pipeline_with_enhancements( "start_stage": "stt", "end_stage": "tts", "input": { - "sample_rate": 16000, + "sample_rate": SAMPLE_RATE, # Enhancements "noise_suppression_level": 2, "auto_gain_dbfs": 15, @@ -1801,7 +1813,7 @@ async def test_audio_pipeline_with_enhancements( # One second of silence. # This will pass through the audio enhancement pipeline, but we don't test # the actual output. - await client.send_bytes(bytes([handler_id]) + bytes(16000 * 2)) + await client.send_bytes(bytes([handler_id]) + bytes(BYTES_ONE_SECOND)) # End of audio stream (handler id + empty payload) await client.send_bytes(bytes([handler_id])) @@ -1871,7 +1883,7 @@ async def test_wake_word_cooldown_same_id( "type": "assist_pipeline/run", "start_stage": "wake_word", "end_stage": "tts", - "input": {"sample_rate": 16000, "no_vad": True}, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, } ) @@ -1880,7 +1892,7 @@ async def test_wake_word_cooldown_same_id( "type": "assist_pipeline/run", "start_stage": "wake_word", "end_stage": "tts", - "input": {"sample_rate": 16000, "no_vad": True}, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, } ) @@ -1914,8 +1926,8 @@ async def test_wake_word_cooldown_same_id( assert msg["event"]["data"] == snapshot # Wake both up at the same time - await client_1.send_bytes(bytes([handler_id_1]) + b"wake word") - await client_2.send_bytes(bytes([handler_id_2]) + b"wake word") + await client_1.send_bytes(bytes([handler_id_1]) + make_10ms_chunk(b"wake word")) + await client_2.send_bytes(bytes([handler_id_2]) + make_10ms_chunk(b"wake word")) # Get response events error_data: dict[str, Any] | None = None @@ -1954,7 +1966,7 @@ async def test_wake_word_cooldown_different_ids( "type": "assist_pipeline/run", "start_stage": "wake_word", "end_stage": "tts", - "input": {"sample_rate": 16000, "no_vad": True}, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, } ) @@ -1963,7 +1975,7 @@ async def test_wake_word_cooldown_different_ids( "type": "assist_pipeline/run", "start_stage": "wake_word", "end_stage": "tts", - "input": {"sample_rate": 16000, "no_vad": True}, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, } ) @@ -1997,8 +2009,8 @@ async def test_wake_word_cooldown_different_ids( assert msg["event"]["data"] == snapshot # Wake both up at the same time, but they will have different wake word ids - await client_1.send_bytes(bytes([handler_id_1]) + b"wake word") - await client_2.send_bytes(bytes([handler_id_2]) + b"wake word") + await client_1.send_bytes(bytes([handler_id_1]) + make_10ms_chunk(b"wake word")) + await client_2.send_bytes(bytes([handler_id_2]) + make_10ms_chunk(b"wake word")) # Get response events msg = await client_1.receive_json() @@ -2073,7 +2085,7 @@ async def test_wake_word_cooldown_different_entities( "pipeline": pipeline_id_1, "start_stage": "wake_word", "end_stage": "tts", - "input": {"sample_rate": 16000, "no_vad": True}, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, } ) @@ -2084,7 +2096,7 @@ async def test_wake_word_cooldown_different_entities( "pipeline": pipeline_id_2, "start_stage": "wake_word", "end_stage": "tts", - "input": {"sample_rate": 16000, "no_vad": True}, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, } ) @@ -2119,8 +2131,8 @@ async def test_wake_word_cooldown_different_entities( # Wake both up at the same time. # They will have the same wake word id, but different entities. - await client_1.send_bytes(bytes([handler_id_1]) + b"wake word") - await client_2.send_bytes(bytes([handler_id_2]) + b"wake word") + await client_1.send_bytes(bytes([handler_id_1]) + make_10ms_chunk(b"wake word")) + await client_2.send_bytes(bytes([handler_id_2]) + make_10ms_chunk(b"wake word")) # Get response events error_data: dict[str, Any] | None = None @@ -2158,7 +2170,11 @@ async def test_device_capture( identifiers={("demo", "satellite-1234")}, ) - audio_chunks = [b"chunk1", b"chunk2", b"chunk3"] + audio_chunks = [ + make_10ms_chunk(b"chunk1"), + make_10ms_chunk(b"chunk2"), + make_10ms_chunk(b"chunk3"), + ] # Start capture client_capture = await hass_ws_client(hass) @@ -2181,7 +2197,7 @@ async def test_device_capture( "type": "assist_pipeline/run", "start_stage": "stt", "end_stage": "stt", - "input": {"sample_rate": 16000, "no_vad": True}, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, "device_id": satellite_device.id, } ) @@ -2232,9 +2248,9 @@ async def test_device_capture( # Verify audio chunks for i, audio_chunk in enumerate(audio_chunks): assert events[i]["type"] == "audio" - assert events[i]["rate"] == 16000 - assert events[i]["width"] == 2 - assert events[i]["channels"] == 1 + assert events[i]["rate"] == SAMPLE_RATE + assert events[i]["width"] == SAMPLE_WIDTH + assert events[i]["channels"] == SAMPLE_CHANNELS # Audio is base64 encoded assert events[i]["audio"] == base64.b64encode(audio_chunk).decode("ascii") @@ -2259,7 +2275,11 @@ async def test_device_capture_override( identifiers={("demo", "satellite-1234")}, ) - audio_chunks = [b"chunk1", b"chunk2", b"chunk3"] + audio_chunks = [ + make_10ms_chunk(b"chunk1"), + make_10ms_chunk(b"chunk2"), + make_10ms_chunk(b"chunk3"), + ] # Start first capture client_capture_1 = await hass_ws_client(hass) @@ -2282,7 +2302,7 @@ async def test_device_capture_override( "type": "assist_pipeline/run", "start_stage": "stt", "end_stage": "stt", - "input": {"sample_rate": 16000, "no_vad": True}, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, "device_id": satellite_device.id, } ) @@ -2365,9 +2385,9 @@ async def test_device_capture_override( # Verify all but first audio chunk for i, audio_chunk in enumerate(audio_chunks[1:]): assert events[i]["type"] == "audio" - assert events[i]["rate"] == 16000 - assert events[i]["width"] == 2 - assert events[i]["channels"] == 1 + assert events[i]["rate"] == SAMPLE_RATE + assert events[i]["width"] == SAMPLE_WIDTH + assert events[i]["channels"] == SAMPLE_CHANNELS # Audio is base64 encoded assert events[i]["audio"] == base64.b64encode(audio_chunk).decode("ascii") @@ -2427,7 +2447,7 @@ async def test_device_capture_queue_full( "type": "assist_pipeline/run", "start_stage": "stt", "end_stage": "stt", - "input": {"sample_rate": 16000, "no_vad": True}, + "input": {"sample_rate": SAMPLE_RATE, "no_vad": True}, "device_id": satellite_device.id, } ) @@ -2448,8 +2468,8 @@ async def test_device_capture_queue_full( assert msg["event"]["type"] == "stt-start" assert msg["event"]["data"] == snapshot - # Single sample will "overflow" the queue - await client_pipeline.send_bytes(bytes([handler_id, 0, 0])) + # Single chunk will "overflow" the queue + await client_pipeline.send_bytes(bytes([handler_id]) + bytes(BYTES_PER_CHUNK)) # End of audio stream await client_pipeline.send_bytes(bytes([handler_id])) @@ -2557,7 +2577,7 @@ async def test_stt_cooldown_same_id( "start_stage": "stt", "end_stage": "tts", "input": { - "sample_rate": 16000, + "sample_rate": SAMPLE_RATE, "wake_word_phrase": "ok_nabu", }, } @@ -2569,7 +2589,7 @@ async def test_stt_cooldown_same_id( "start_stage": "stt", "end_stage": "tts", "input": { - "sample_rate": 16000, + "sample_rate": SAMPLE_RATE, "wake_word_phrase": "ok_nabu", }, } @@ -2628,7 +2648,7 @@ async def test_stt_cooldown_different_ids( "start_stage": "stt", "end_stage": "tts", "input": { - "sample_rate": 16000, + "sample_rate": SAMPLE_RATE, "wake_word_phrase": "ok_nabu", }, } @@ -2640,7 +2660,7 @@ async def test_stt_cooldown_different_ids( "start_stage": "stt", "end_stage": "tts", "input": { - "sample_rate": 16000, + "sample_rate": SAMPLE_RATE, "wake_word_phrase": "hey_jarvis", }, } From d87366b1e7154637811091de4c2523ad730aff3a Mon Sep 17 00:00:00 2001 From: "David F. Mulcahey" Date: Fri, 2 Aug 2024 06:22:36 -0400 Subject: [PATCH 0317/1635] Make ZHA load quirks earlier (#123027) --- homeassistant/components/zha/__init__.py | 4 +++- homeassistant/components/zha/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/zha/test_repairs.py | 10 +++++----- 5 files changed, 11 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/zha/__init__.py b/homeassistant/components/zha/__init__.py index 216261e3011..fc573b19ab1 100644 --- a/homeassistant/components/zha/__init__.py +++ b/homeassistant/components/zha/__init__.py @@ -117,6 +117,8 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b ha_zha_data.config_entry = config_entry zha_lib_data: ZHAData = create_zha_config(hass, ha_zha_data) + zha_gateway = await Gateway.async_from_config(zha_lib_data) + # Load and cache device trigger information early device_registry = dr.async_get(hass) radio_mgr = ZhaRadioManager.from_config_entry(hass, config_entry) @@ -140,7 +142,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b _LOGGER.debug("Trigger cache: %s", zha_lib_data.device_trigger_cache) try: - zha_gateway = await Gateway.async_from_config(zha_lib_data) + await zha_gateway.async_initialize() except NetworkSettingsInconsistent as exc: await warn_on_inconsistent_network_settings( hass, diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index d2d328cc84b..6e35339c53f 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -21,7 +21,7 @@ "zha", "universal_silabs_flasher" ], - "requirements": ["universal-silabs-flasher==0.0.22", "zha==0.0.24"], + "requirements": ["universal-silabs-flasher==0.0.22", "zha==0.0.25"], "usb": [ { "vid": "10C4", diff --git a/requirements_all.txt b/requirements_all.txt index 0bf4b77e9d2..99a237efe02 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2986,7 +2986,7 @@ zeroconf==0.132.2 zeversolar==0.3.1 # homeassistant.components.zha -zha==0.0.24 +zha==0.0.25 # homeassistant.components.zhong_hong zhong-hong-hvac==1.0.12 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f9670987b70..4ee6914700f 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2360,7 +2360,7 @@ zeroconf==0.132.2 zeversolar==0.3.1 # homeassistant.components.zha -zha==0.0.24 +zha==0.0.25 # homeassistant.components.zwave_js zwave-js-server-python==0.57.0 diff --git a/tests/components/zha/test_repairs.py b/tests/components/zha/test_repairs.py index 7f9b2b4a016..c2925161748 100644 --- a/tests/components/zha/test_repairs.py +++ b/tests/components/zha/test_repairs.py @@ -148,7 +148,7 @@ async def test_multipan_firmware_repair( autospec=True, ), patch( - "homeassistant.components.zha.Gateway.async_from_config", + "homeassistant.components.zha.Gateway.async_initialize", side_effect=RuntimeError(), ), patch( @@ -199,7 +199,7 @@ async def test_multipan_firmware_no_repair_on_probe_failure( autospec=True, ), patch( - "homeassistant.components.zha.Gateway.async_from_config", + "homeassistant.components.zha.Gateway.async_initialize", side_effect=RuntimeError(), ), ): @@ -236,7 +236,7 @@ async def test_multipan_firmware_retry_on_probe_ezsp( autospec=True, ), patch( - "homeassistant.components.zha.Gateway.async_from_config", + "homeassistant.components.zha.Gateway.async_initialize", side_effect=RuntimeError(), ), ): @@ -311,7 +311,7 @@ async def test_inconsistent_settings_keep_new( old_state = network_backup with patch( - "homeassistant.components.zha.Gateway.async_from_config", + "homeassistant.components.zha.Gateway.async_initialize", side_effect=NetworkSettingsInconsistent( message="Network settings are inconsistent", new_state=new_state, @@ -390,7 +390,7 @@ async def test_inconsistent_settings_restore_old( old_state = network_backup with patch( - "homeassistant.components.zha.Gateway.async_from_config", + "homeassistant.components.zha.Gateway.async_initialize", side_effect=NetworkSettingsInconsistent( message="Network settings are inconsistent", new_state=new_state, From a624ada8d66f15942a6995c4f1f7a9f9699f0f66 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 1 Aug 2024 15:37:26 -0500 Subject: [PATCH 0318/1635] Fix doorbird models are missing the schedule API (#123033) * Fix doorbird models are missing the schedule API fixes #122997 * cover --- homeassistant/components/doorbird/device.py | 14 +++++++--- tests/components/doorbird/__init__.py | 29 ++++++++++----------- tests/components/doorbird/conftest.py | 2 ++ tests/components/doorbird/test_init.py | 10 +++++++ 4 files changed, 37 insertions(+), 18 deletions(-) diff --git a/homeassistant/components/doorbird/device.py b/homeassistant/components/doorbird/device.py index 866251f3d28..7cd45487464 100644 --- a/homeassistant/components/doorbird/device.py +++ b/homeassistant/components/doorbird/device.py @@ -5,9 +5,11 @@ from __future__ import annotations from collections import defaultdict from dataclasses import dataclass from functools import cached_property +from http import HTTPStatus import logging from typing import Any +from aiohttp import ClientResponseError from doorbirdpy import ( DoorBird, DoorBirdScheduleEntry, @@ -170,15 +172,21 @@ class ConfiguredDoorBird: ) -> DoorbirdEventConfig: """Get events and unconfigured favorites from http favorites.""" device = self.device - schedule = await device.schedule() + events: list[DoorbirdEvent] = [] + unconfigured_favorites: defaultdict[str, list[str]] = defaultdict(list) + try: + schedule = await device.schedule() + except ClientResponseError as ex: + if ex.status == HTTPStatus.NOT_FOUND: + # D301 models do not support schedules + return DoorbirdEventConfig(events, [], unconfigured_favorites) + raise favorite_input_type = { output.param: entry.input for entry in schedule for output in entry.output if output.event == HTTP_EVENT_TYPE } - events: list[DoorbirdEvent] = [] - unconfigured_favorites: defaultdict[str, list[str]] = defaultdict(list) default_event_types = { self._get_event_name(event): event_type for event, event_type in DEFAULT_EVENT_TYPES diff --git a/tests/components/doorbird/__init__.py b/tests/components/doorbird/__init__.py index 41def92f121..2d517dfcefe 100644 --- a/tests/components/doorbird/__init__.py +++ b/tests/components/doorbird/__init__.py @@ -47,31 +47,30 @@ def get_mock_doorbird_api( info: dict[str, Any] | None = None, info_side_effect: Exception | None = None, schedule: list[DoorBirdScheduleEntry] | None = None, + schedule_side_effect: Exception | None = None, favorites: dict[str, dict[str, Any]] | None = None, favorites_side_effect: Exception | None = None, change_schedule: tuple[bool, int] | None = None, ) -> DoorBird: """Return a mock DoorBirdAPI object with return values.""" doorbirdapi_mock = MagicMock(spec_set=DoorBird) - type(doorbirdapi_mock).info = AsyncMock( - side_effect=info_side_effect, return_value=info + api_mock_type = type(doorbirdapi_mock) + api_mock_type.info = AsyncMock(side_effect=info_side_effect, return_value=info) + api_mock_type.favorites = AsyncMock( + side_effect=favorites_side_effect, return_value=favorites ) - type(doorbirdapi_mock).favorites = AsyncMock( - side_effect=favorites_side_effect, - return_value=favorites, - ) - type(doorbirdapi_mock).change_favorite = AsyncMock(return_value=True) - type(doorbirdapi_mock).change_schedule = AsyncMock( + api_mock_type.change_favorite = AsyncMock(return_value=True) + api_mock_type.change_schedule = AsyncMock( return_value=change_schedule or (True, 200) ) - type(doorbirdapi_mock).schedule = AsyncMock(return_value=schedule) - type(doorbirdapi_mock).energize_relay = AsyncMock(return_value=True) - type(doorbirdapi_mock).turn_light_on = AsyncMock(return_value=True) - type(doorbirdapi_mock).delete_favorite = AsyncMock(return_value=True) - type(doorbirdapi_mock).get_image = AsyncMock(return_value=b"image") - type(doorbirdapi_mock).doorbell_state = AsyncMock( - side_effect=mock_unauthorized_exception() + api_mock_type.schedule = AsyncMock( + return_value=schedule, side_effect=schedule_side_effect ) + api_mock_type.energize_relay = AsyncMock(return_value=True) + api_mock_type.turn_light_on = AsyncMock(return_value=True) + api_mock_type.delete_favorite = AsyncMock(return_value=True) + api_mock_type.get_image = AsyncMock(return_value=b"image") + api_mock_type.doorbell_state = AsyncMock(side_effect=mock_unauthorized_exception()) return doorbirdapi_mock diff --git a/tests/components/doorbird/conftest.py b/tests/components/doorbird/conftest.py index 59ead250293..2e367e4e1d8 100644 --- a/tests/components/doorbird/conftest.py +++ b/tests/components/doorbird/conftest.py @@ -102,6 +102,7 @@ async def doorbird_mocker( info: dict[str, Any] | None = None, info_side_effect: Exception | None = None, schedule: list[DoorBirdScheduleEntry] | None = None, + schedule_side_effect: Exception | None = None, favorites: dict[str, dict[str, Any]] | None = None, favorites_side_effect: Exception | None = None, options: dict[str, Any] | None = None, @@ -118,6 +119,7 @@ async def doorbird_mocker( info=info or doorbird_info, info_side_effect=info_side_effect, schedule=schedule or doorbird_schedule, + schedule_side_effect=schedule_side_effect, favorites=favorites or doorbird_favorites, favorites_side_effect=favorites_side_effect, change_schedule=change_schedule, diff --git a/tests/components/doorbird/test_init.py b/tests/components/doorbird/test_init.py index fb8bad2fb46..31266c4acf0 100644 --- a/tests/components/doorbird/test_init.py +++ b/tests/components/doorbird/test_init.py @@ -56,6 +56,16 @@ async def test_http_favorites_request_fails( assert doorbird_entry.entry.state is ConfigEntryState.SETUP_RETRY +async def test_http_schedule_api_missing( + doorbird_mocker: DoorbirdMockerType, +) -> None: + """Test missing the schedule API is non-fatal as not all models support it.""" + doorbird_entry = await doorbird_mocker( + schedule_side_effect=mock_not_found_exception() + ) + assert doorbird_entry.entry.state is ConfigEntryState.LOADED + + async def test_events_changed( hass: HomeAssistant, doorbird_mocker: DoorbirdMockerType, From b06a5af069cbf13e96e452db23c0966b3b49b193 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Fri, 2 Aug 2024 12:53:39 +0200 Subject: [PATCH 0319/1635] Address post-merge reviews for KNX integration (#123038) --- homeassistant/components/knx/__init__.py | 2 +- homeassistant/components/knx/binary_sensor.py | 13 +++---- homeassistant/components/knx/button.py | 14 ++++---- homeassistant/components/knx/climate.py | 14 +++++--- homeassistant/components/knx/cover.py | 13 +++---- homeassistant/components/knx/date.py | 14 +++++--- homeassistant/components/knx/datetime.py | 12 ++++--- homeassistant/components/knx/fan.py | 13 +++---- homeassistant/components/knx/knx_entity.py | 36 ++++++++++++++++--- homeassistant/components/knx/light.py | 30 ++++++++-------- homeassistant/components/knx/notify.py | 14 +++++--- homeassistant/components/knx/number.py | 12 ++++--- homeassistant/components/knx/project.py | 6 ++-- homeassistant/components/knx/scene.py | 13 +++---- homeassistant/components/knx/select.py | 12 ++++--- homeassistant/components/knx/sensor.py | 17 +++++---- .../components/knx/storage/config_store.py | 28 ++++++++------- homeassistant/components/knx/switch.py | 30 ++++++++-------- homeassistant/components/knx/text.py | 12 ++++--- homeassistant/components/knx/time.py | 17 +++++---- homeassistant/components/knx/weather.py | 14 +++++--- 21 files changed, 211 insertions(+), 125 deletions(-) diff --git a/homeassistant/components/knx/__init__.py b/homeassistant/components/knx/__init__.py index 709a82b31fd..fd46cad8489 100644 --- a/homeassistant/components/knx/__init__.py +++ b/homeassistant/components/knx/__init__.py @@ -302,7 +302,7 @@ class KNXModule: self.entry = entry self.project = KNXProject(hass=hass, entry=entry) - self.config_store = KNXConfigStore(hass=hass, entry=entry) + self.config_store = KNXConfigStore(hass=hass, config_entry=entry) self.xknx = XKNX( connection_config=self.connection_config(), diff --git a/homeassistant/components/knx/binary_sensor.py b/homeassistant/components/knx/binary_sensor.py index 0423c1d7b32..ff15f725fae 100644 --- a/homeassistant/components/knx/binary_sensor.py +++ b/homeassistant/components/knx/binary_sensor.py @@ -4,7 +4,6 @@ from __future__ import annotations from typing import Any -from xknx import XKNX from xknx.devices import BinarySensor as XknxBinarySensor from homeassistant import config_entries @@ -23,6 +22,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType +from . import KNXModule from .const import ATTR_COUNTER, ATTR_SOURCE, DATA_KNX_CONFIG, DOMAIN from .knx_entity import KnxEntity from .schema import BinarySensorSchema @@ -34,11 +34,11 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the KNX binary sensor platform.""" - xknx: XKNX = hass.data[DOMAIN].xknx + knx_module: KNXModule = hass.data[DOMAIN] config: ConfigType = hass.data[DATA_KNX_CONFIG] async_add_entities( - KNXBinarySensor(xknx, entity_config) + KNXBinarySensor(knx_module, entity_config) for entity_config in config[Platform.BINARY_SENSOR] ) @@ -48,11 +48,12 @@ class KNXBinarySensor(KnxEntity, BinarySensorEntity, RestoreEntity): _device: XknxBinarySensor - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize of KNX binary sensor.""" super().__init__( + knx_module=knx_module, device=XknxBinarySensor( - xknx, + xknx=knx_module.xknx, name=config[CONF_NAME], group_address_state=config[BinarySensorSchema.CONF_STATE_ADDRESS], invert=config[BinarySensorSchema.CONF_INVERT], @@ -62,7 +63,7 @@ class KNXBinarySensor(KnxEntity, BinarySensorEntity, RestoreEntity): ], context_timeout=config.get(BinarySensorSchema.CONF_CONTEXT_TIMEOUT), reset_after=config.get(BinarySensorSchema.CONF_RESET_AFTER), - ) + ), ) self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY) self._attr_device_class = config.get(CONF_DEVICE_CLASS) diff --git a/homeassistant/components/knx/button.py b/homeassistant/components/knx/button.py index a38d8ad1b6c..2eb68eebe43 100644 --- a/homeassistant/components/knx/button.py +++ b/homeassistant/components/knx/button.py @@ -2,7 +2,6 @@ from __future__ import annotations -from xknx import XKNX from xknx.devices import RawValue as XknxRawValue from homeassistant import config_entries @@ -12,6 +11,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType +from . import KNXModule from .const import CONF_PAYLOAD_LENGTH, DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS from .knx_entity import KnxEntity @@ -22,11 +22,12 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the KNX binary sensor platform.""" - xknx: XKNX = hass.data[DOMAIN].xknx + knx_module: KNXModule = hass.data[DOMAIN] config: ConfigType = hass.data[DATA_KNX_CONFIG] async_add_entities( - KNXButton(xknx, entity_config) for entity_config in config[Platform.BUTTON] + KNXButton(knx_module, entity_config) + for entity_config in config[Platform.BUTTON] ) @@ -35,15 +36,16 @@ class KNXButton(KnxEntity, ButtonEntity): _device: XknxRawValue - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize a KNX button.""" super().__init__( + knx_module=knx_module, device=XknxRawValue( - xknx, + xknx=knx_module.xknx, name=config[CONF_NAME], payload_length=config[CONF_PAYLOAD_LENGTH], group_address=config[KNX_ADDRESS], - ) + ), ) self._payload = config[CONF_PAYLOAD] self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY) diff --git a/homeassistant/components/knx/climate.py b/homeassistant/components/knx/climate.py index 26be6a03a79..7470d60ef4b 100644 --- a/homeassistant/components/knx/climate.py +++ b/homeassistant/components/knx/climate.py @@ -27,6 +27,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType +from . import KNXModule from .const import ( CONTROLLER_MODES, CURRENT_HVAC_ACTIONS, @@ -48,10 +49,12 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up climate(s) for KNX platform.""" - xknx: XKNX = hass.data[DOMAIN].xknx + knx_module: KNXModule = hass.data[DOMAIN] config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.CLIMATE] - async_add_entities(KNXClimate(xknx, entity_config) for entity_config in config) + async_add_entities( + KNXClimate(knx_module, entity_config) for entity_config in config + ) def _create_climate(xknx: XKNX, config: ConfigType) -> XknxClimate: @@ -137,9 +140,12 @@ class KNXClimate(KnxEntity, ClimateEntity): _attr_temperature_unit = UnitOfTemperature.CELSIUS _enable_turn_on_off_backwards_compatibility = False - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize of a KNX climate device.""" - super().__init__(_create_climate(xknx, config)) + super().__init__( + knx_module=knx_module, + device=_create_climate(knx_module.xknx, config), + ) self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY) self._attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE if self._device.supports_on_off: diff --git a/homeassistant/components/knx/cover.py b/homeassistant/components/knx/cover.py index 9d86d6ac272..1962db0ad3f 100644 --- a/homeassistant/components/knx/cover.py +++ b/homeassistant/components/knx/cover.py @@ -5,7 +5,6 @@ from __future__ import annotations from collections.abc import Callable from typing import Any -from xknx import XKNX from xknx.devices import Cover as XknxCover from homeassistant import config_entries @@ -26,6 +25,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType +from . import KNXModule from .const import DATA_KNX_CONFIG, DOMAIN from .knx_entity import KnxEntity from .schema import CoverSchema @@ -37,10 +37,10 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up cover(s) for KNX platform.""" - xknx: XKNX = hass.data[DOMAIN].xknx + knx_module: KNXModule = hass.data[DOMAIN] config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.COVER] - async_add_entities(KNXCover(xknx, entity_config) for entity_config in config) + async_add_entities(KNXCover(knx_module, entity_config) for entity_config in config) class KNXCover(KnxEntity, CoverEntity): @@ -48,11 +48,12 @@ class KNXCover(KnxEntity, CoverEntity): _device: XknxCover - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize the cover.""" super().__init__( + knx_module=knx_module, device=XknxCover( - xknx, + xknx=knx_module.xknx, name=config[CONF_NAME], group_address_long=config.get(CoverSchema.CONF_MOVE_LONG_ADDRESS), group_address_short=config.get(CoverSchema.CONF_MOVE_SHORT_ADDRESS), @@ -70,7 +71,7 @@ class KNXCover(KnxEntity, CoverEntity): invert_updown=config[CoverSchema.CONF_INVERT_UPDOWN], invert_position=config[CoverSchema.CONF_INVERT_POSITION], invert_angle=config[CoverSchema.CONF_INVERT_ANGLE], - ) + ), ) self._unsubscribe_auto_updater: Callable[[], None] | None = None diff --git a/homeassistant/components/knx/date.py b/homeassistant/components/knx/date.py index 98cd22e0751..80fea63d0a6 100644 --- a/homeassistant/components/knx/date.py +++ b/homeassistant/components/knx/date.py @@ -22,6 +22,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType +from . import KNXModule from .const import ( CONF_RESPOND_TO_READ, CONF_STATE_ADDRESS, @@ -39,10 +40,12 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up entities for KNX platform.""" - xknx: XKNX = hass.data[DOMAIN].xknx + knx_module: KNXModule = hass.data[DOMAIN] config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.DATE] - async_add_entities(KNXDateEntity(xknx, entity_config) for entity_config in config) + async_add_entities( + KNXDateEntity(knx_module, entity_config) for entity_config in config + ) def _create_xknx_device(xknx: XKNX, config: ConfigType) -> XknxDateDevice: @@ -63,9 +66,12 @@ class KNXDateEntity(KnxEntity, DateEntity, RestoreEntity): _device: XknxDateDevice - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize a KNX time.""" - super().__init__(_create_xknx_device(xknx, config)) + super().__init__( + knx_module=knx_module, + device=_create_xknx_device(knx_module.xknx, config), + ) self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY) self._attr_unique_id = str(self._device.remote_value.group_address) diff --git a/homeassistant/components/knx/datetime.py b/homeassistant/components/knx/datetime.py index d4a25b522eb..16ccb7474a7 100644 --- a/homeassistant/components/knx/datetime.py +++ b/homeassistant/components/knx/datetime.py @@ -23,6 +23,7 @@ from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType import homeassistant.util.dt as dt_util +from . import KNXModule from .const import ( CONF_RESPOND_TO_READ, CONF_STATE_ADDRESS, @@ -40,11 +41,11 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up entities for KNX platform.""" - xknx: XKNX = hass.data[DOMAIN].xknx + knx_module: KNXModule = hass.data[DOMAIN] config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.DATETIME] async_add_entities( - KNXDateTimeEntity(xknx, entity_config) for entity_config in config + KNXDateTimeEntity(knx_module, entity_config) for entity_config in config ) @@ -66,9 +67,12 @@ class KNXDateTimeEntity(KnxEntity, DateTimeEntity, RestoreEntity): _device: XknxDateTimeDevice - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize a KNX time.""" - super().__init__(_create_xknx_device(xknx, config)) + super().__init__( + knx_module=knx_module, + device=_create_xknx_device(knx_module.xknx, config), + ) self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY) self._attr_unique_id = str(self._device.remote_value.group_address) diff --git a/homeassistant/components/knx/fan.py b/homeassistant/components/knx/fan.py index 426a750f766..940e241ccda 100644 --- a/homeassistant/components/knx/fan.py +++ b/homeassistant/components/knx/fan.py @@ -5,7 +5,6 @@ from __future__ import annotations import math from typing import Any, Final -from xknx import XKNX from xknx.devices import Fan as XknxFan from homeassistant import config_entries @@ -20,6 +19,7 @@ from homeassistant.util.percentage import ( ) from homeassistant.util.scaling import int_states_in_range +from . import KNXModule from .const import DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS from .knx_entity import KnxEntity from .schema import FanSchema @@ -33,10 +33,10 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up fan(s) for KNX platform.""" - xknx: XKNX = hass.data[DOMAIN].xknx + knx_module: KNXModule = hass.data[DOMAIN] config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.FAN] - async_add_entities(KNXFan(xknx, entity_config) for entity_config in config) + async_add_entities(KNXFan(knx_module, entity_config) for entity_config in config) class KNXFan(KnxEntity, FanEntity): @@ -45,12 +45,13 @@ class KNXFan(KnxEntity, FanEntity): _device: XknxFan _enable_turn_on_off_backwards_compatibility = False - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize of KNX fan.""" max_step = config.get(FanSchema.CONF_MAX_STEP) super().__init__( + knx_module=knx_module, device=XknxFan( - xknx, + xknx=knx_module.xknx, name=config[CONF_NAME], group_address_speed=config.get(KNX_ADDRESS), group_address_speed_state=config.get(FanSchema.CONF_STATE_ADDRESS), @@ -61,7 +62,7 @@ class KNXFan(KnxEntity, FanEntity): FanSchema.CONF_OSCILLATION_STATE_ADDRESS ), max_step=max_step, - ) + ), ) # FanSpeedMode.STEP if max_step is set self._step_range: tuple[int, int] | None = (1, max_step) if max_step else None diff --git a/homeassistant/components/knx/knx_entity.py b/homeassistant/components/knx/knx_entity.py index eebddbb0623..2b8d2e71186 100644 --- a/homeassistant/components/knx/knx_entity.py +++ b/homeassistant/components/knx/knx_entity.py @@ -2,23 +2,29 @@ from __future__ import annotations -from typing import cast +from typing import TYPE_CHECKING from xknx.devices import Device as XknxDevice +from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity import Entity -from . import KNXModule from .const import DOMAIN +if TYPE_CHECKING: + from . import KNXModule + +SIGNAL_ENTITY_REMOVE = f"{DOMAIN}_entity_remove_signal.{{}}" + class KnxEntity(Entity): """Representation of a KNX entity.""" _attr_should_poll = False - def __init__(self, device: XknxDevice) -> None: + def __init__(self, knx_module: KNXModule, device: XknxDevice) -> None: """Set up device.""" + self._knx_module = knx_module self._device = device @property @@ -29,8 +35,7 @@ class KnxEntity(Entity): @property def available(self) -> bool: """Return True if entity is available.""" - knx_module = cast(KNXModule, self.hass.data[DOMAIN]) - return knx_module.connected + return self._knx_module.connected async def async_update(self) -> None: """Request a state update from KNX bus.""" @@ -44,8 +49,29 @@ class KnxEntity(Entity): """Store register state change callback and start device object.""" self._device.register_device_updated_cb(self.after_update_callback) self._device.xknx.devices.async_add(self._device) + # super call needed to have methods of mulit-inherited classes called + # eg. for restoring state (like _KNXSwitch) + await super().async_added_to_hass() async def async_will_remove_from_hass(self) -> None: """Disconnect device object when removed.""" self._device.unregister_device_updated_cb(self.after_update_callback) self._device.xknx.devices.async_remove(self._device) + + +class KnxUIEntity(KnxEntity): + """Representation of a KNX UI entity.""" + + _attr_unique_id: str + + async def async_added_to_hass(self) -> None: + """Register callbacks when entity added to hass.""" + await super().async_added_to_hass() + self._knx_module.config_store.entities.add(self._attr_unique_id) + self.async_on_remove( + async_dispatcher_connect( + self.hass, + SIGNAL_ENTITY_REMOVE.format(self._attr_unique_id), + self.async_remove, + ) + ) diff --git a/homeassistant/components/knx/light.py b/homeassistant/components/knx/light.py index 8ec42f3ee56..1197f09354b 100644 --- a/homeassistant/components/knx/light.py +++ b/homeassistant/components/knx/light.py @@ -27,7 +27,7 @@ import homeassistant.util.color as color_util from . import KNXModule from .const import CONF_SYNC_STATE, DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS, ColorTempModes -from .knx_entity import KnxEntity +from .knx_entity import KnxEntity, KnxUIEntity from .schema import LightSchema from .storage.const import ( CONF_COLOR_TEMP_MAX, @@ -65,10 +65,10 @@ async def async_setup_entry( knx_module: KNXModule = hass.data[DOMAIN] entities: list[KnxEntity] = [] - if yaml_config := hass.data[DATA_KNX_CONFIG].get(Platform.LIGHT): + if yaml_platform_config := hass.data[DATA_KNX_CONFIG].get(Platform.LIGHT): entities.extend( - KnxYamlLight(knx_module.xknx, entity_config) - for entity_config in yaml_config + KnxYamlLight(knx_module, entity_config) + for entity_config in yaml_platform_config ) if ui_config := knx_module.config_store.data["entities"].get(Platform.LIGHT): entities.extend( @@ -294,7 +294,7 @@ def _create_ui_light(xknx: XKNX, knx_config: ConfigType, name: str) -> XknxLight ) -class _KnxLight(KnxEntity, LightEntity): +class _KnxLight(LightEntity): """Representation of a KNX light.""" _attr_max_color_temp_kelvin: int @@ -519,14 +519,17 @@ class _KnxLight(KnxEntity, LightEntity): await self._device.set_off() -class KnxYamlLight(_KnxLight): +class KnxYamlLight(_KnxLight, KnxEntity): """Representation of a KNX light.""" _device: XknxLight - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize of KNX light.""" - super().__init__(_create_yaml_light(xknx, config)) + super().__init__( + knx_module=knx_module, + device=_create_yaml_light(knx_module.xknx, config), + ) self._attr_max_color_temp_kelvin: int = config[LightSchema.CONF_MAX_KELVIN] self._attr_min_color_temp_kelvin: int = config[LightSchema.CONF_MIN_KELVIN] self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY) @@ -543,20 +546,21 @@ class KnxYamlLight(_KnxLight): ) -class KnxUiLight(_KnxLight): +class KnxUiLight(_KnxLight, KnxUIEntity): """Representation of a KNX light.""" - _device: XknxLight _attr_has_entity_name = True + _device: XknxLight def __init__( self, knx_module: KNXModule, unique_id: str, config: ConfigType ) -> None: """Initialize of KNX light.""" super().__init__( - _create_ui_light( + knx_module=knx_module, + device=_create_ui_light( knx_module.xknx, config[DOMAIN], config[CONF_ENTITY][CONF_NAME] - ) + ), ) self._attr_max_color_temp_kelvin: int = config[DOMAIN][CONF_COLOR_TEMP_MAX] self._attr_min_color_temp_kelvin: int = config[DOMAIN][CONF_COLOR_TEMP_MIN] @@ -565,5 +569,3 @@ class KnxUiLight(_KnxLight): self._attr_unique_id = unique_id if device_info := config[CONF_ENTITY].get(CONF_DEVICE_INFO): self._attr_device_info = DeviceInfo(identifiers={(DOMAIN, device_info)}) - - knx_module.config_store.entities[unique_id] = self diff --git a/homeassistant/components/knx/notify.py b/homeassistant/components/knx/notify.py index 997bdb81057..b349681990c 100644 --- a/homeassistant/components/knx/notify.py +++ b/homeassistant/components/knx/notify.py @@ -18,6 +18,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType +from . import KNXModule from .const import DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS from .knx_entity import KnxEntity @@ -44,7 +45,7 @@ async def async_get_service( class KNXNotificationService(BaseNotificationService): - """Implement demo notification service.""" + """Implement notification service.""" def __init__(self, devices: list[XknxNotification]) -> None: """Initialize the service.""" @@ -86,10 +87,10 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up notify(s) for KNX platform.""" - xknx: XKNX = hass.data[DOMAIN].xknx + knx_module: KNXModule = hass.data[DOMAIN] config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.NOTIFY] - async_add_entities(KNXNotify(xknx, entity_config) for entity_config in config) + async_add_entities(KNXNotify(knx_module, entity_config) for entity_config in config) def _create_notification_instance(xknx: XKNX, config: ConfigType) -> XknxNotification: @@ -107,9 +108,12 @@ class KNXNotify(KnxEntity, NotifyEntity): _device: XknxNotification - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize a KNX notification.""" - super().__init__(_create_notification_instance(xknx, config)) + super().__init__( + knx_module=knx_module, + device=_create_notification_instance(knx_module.xknx, config), + ) self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY) self._attr_unique_id = str(self._device.remote_value.group_address) diff --git a/homeassistant/components/knx/number.py b/homeassistant/components/knx/number.py index 8a9f1dea87c..3d4af503dff 100644 --- a/homeassistant/components/knx/number.py +++ b/homeassistant/components/knx/number.py @@ -22,6 +22,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType +from . import KNXModule from .const import ( CONF_RESPOND_TO_READ, CONF_STATE_ADDRESS, @@ -39,10 +40,10 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up number(s) for KNX platform.""" - xknx: XKNX = hass.data[DOMAIN].xknx + knx_module: KNXModule = hass.data[DOMAIN] config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.NUMBER] - async_add_entities(KNXNumber(xknx, entity_config) for entity_config in config) + async_add_entities(KNXNumber(knx_module, entity_config) for entity_config in config) def _create_numeric_value(xknx: XKNX, config: ConfigType) -> NumericValue: @@ -62,9 +63,12 @@ class KNXNumber(KnxEntity, RestoreNumber): _device: NumericValue - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize a KNX number.""" - super().__init__(_create_numeric_value(xknx, config)) + super().__init__( + knx_module=knx_module, + device=_create_numeric_value(knx_module.xknx, config), + ) self._attr_native_max_value = config.get( NumberSchema.CONF_MAX, self._device.sensor_value.dpt_class.value_max, diff --git a/homeassistant/components/knx/project.py b/homeassistant/components/knx/project.py index 3b3309dfc7d..b5bafe00724 100644 --- a/homeassistant/components/knx/project.py +++ b/homeassistant/components/knx/project.py @@ -8,9 +8,11 @@ from typing import Final from xknx import XKNX from xknx.dpt import DPTBase +from xknx.telegram.address import DeviceAddressableType from xknxproject import XKNXProj from xknxproject.models import ( Device, + DPTType, GroupAddress as GroupAddressModel, KNXProject as KNXProjectModel, ProjectInfo, @@ -89,7 +91,7 @@ class KNXProject: self.devices = project["devices"] self.info = project["info"] xknx.group_address_dpt.clear() - xknx_ga_dict = {} + xknx_ga_dict: dict[DeviceAddressableType, DPTType] = {} for ga_model in project["group_addresses"].values(): ga_info = _create_group_address_info(ga_model) @@ -97,7 +99,7 @@ class KNXProject: if (dpt_model := ga_model.get("dpt")) is not None: xknx_ga_dict[ga_model["address"]] = dpt_model - xknx.group_address_dpt.set(xknx_ga_dict) # type: ignore[arg-type] + xknx.group_address_dpt.set(xknx_ga_dict) _LOGGER.debug( "Loaded KNX project data with %s group addresses from storage", diff --git a/homeassistant/components/knx/scene.py b/homeassistant/components/knx/scene.py index 342d0f9eb83..fc37f36dd01 100644 --- a/homeassistant/components/knx/scene.py +++ b/homeassistant/components/knx/scene.py @@ -4,7 +4,6 @@ from __future__ import annotations from typing import Any -from xknx import XKNX from xknx.devices import Scene as XknxScene from homeassistant import config_entries @@ -14,6 +13,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType +from . import KNXModule from .const import DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS from .knx_entity import KnxEntity from .schema import SceneSchema @@ -25,10 +25,10 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up scene(s) for KNX platform.""" - xknx: XKNX = hass.data[DOMAIN].xknx + knx_module: KNXModule = hass.data[DOMAIN] config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.SCENE] - async_add_entities(KNXScene(xknx, entity_config) for entity_config in config) + async_add_entities(KNXScene(knx_module, entity_config) for entity_config in config) class KNXScene(KnxEntity, Scene): @@ -36,15 +36,16 @@ class KNXScene(KnxEntity, Scene): _device: XknxScene - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Init KNX scene.""" super().__init__( + knx_module=knx_module, device=XknxScene( - xknx, + xknx=knx_module.xknx, name=config[CONF_NAME], group_address=config[KNX_ADDRESS], scene_number=config[SceneSchema.CONF_SCENE_NUMBER], - ) + ), ) self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY) self._attr_unique_id = ( diff --git a/homeassistant/components/knx/select.py b/homeassistant/components/knx/select.py index f338bf9feaf..1b862010c2a 100644 --- a/homeassistant/components/knx/select.py +++ b/homeassistant/components/knx/select.py @@ -20,6 +20,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType +from . import KNXModule from .const import ( CONF_PAYLOAD_LENGTH, CONF_RESPOND_TO_READ, @@ -39,10 +40,10 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up select(s) for KNX platform.""" - xknx: XKNX = hass.data[DOMAIN].xknx + knx_module: KNXModule = hass.data[DOMAIN] config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.SELECT] - async_add_entities(KNXSelect(xknx, entity_config) for entity_config in config) + async_add_entities(KNXSelect(knx_module, entity_config) for entity_config in config) def _create_raw_value(xknx: XKNX, config: ConfigType) -> RawValue: @@ -63,9 +64,12 @@ class KNXSelect(KnxEntity, SelectEntity, RestoreEntity): _device: RawValue - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize a KNX select.""" - super().__init__(_create_raw_value(xknx, config)) + super().__init__( + knx_module=knx_module, + device=_create_raw_value(knx_module.xknx, config), + ) self._option_payloads: dict[str, int] = { option[SelectSchema.CONF_OPTION]: option[CONF_PAYLOAD] for option in config[SelectSchema.CONF_OPTIONS] diff --git a/homeassistant/components/knx/sensor.py b/homeassistant/components/knx/sensor.py index 5a09a921901..ab363e2a35f 100644 --- a/homeassistant/components/knx/sensor.py +++ b/homeassistant/components/knx/sensor.py @@ -116,17 +116,17 @@ async def async_setup_entry( ) -> None: """Set up sensor(s) for KNX platform.""" knx_module: KNXModule = hass.data[DOMAIN] - - async_add_entities( + entities: list[SensorEntity] = [] + entities.extend( KNXSystemSensor(knx_module, description) for description in SYSTEM_ENTITY_DESCRIPTIONS ) - config: list[ConfigType] = hass.data[DATA_KNX_CONFIG].get(Platform.SENSOR) if config: - async_add_entities( - KNXSensor(knx_module.xknx, entity_config) for entity_config in config + entities.extend( + KNXSensor(knx_module, entity_config) for entity_config in config ) + async_add_entities(entities) def _create_sensor(xknx: XKNX, config: ConfigType) -> XknxSensor: @@ -146,9 +146,12 @@ class KNXSensor(KnxEntity, SensorEntity): _device: XknxSensor - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize of a KNX sensor.""" - super().__init__(_create_sensor(xknx, config)) + super().__init__( + knx_module=knx_module, + device=_create_sensor(knx_module.xknx, config), + ) if device_class := config.get(CONF_DEVICE_CLASS): self._attr_device_class = device_class else: diff --git a/homeassistant/components/knx/storage/config_store.py b/homeassistant/components/knx/storage/config_store.py index 7ea61e1dd3e..876fe19a4b9 100644 --- a/homeassistant/components/knx/storage/config_store.py +++ b/homeassistant/components/knx/storage/config_store.py @@ -2,21 +2,20 @@ from collections.abc import Callable import logging -from typing import TYPE_CHECKING, Any, Final, TypedDict +from typing import Any, Final, TypedDict from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PLATFORM, Platform from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.storage import Store from homeassistant.util.ulid import ulid_now from ..const import DOMAIN +from ..knx_entity import SIGNAL_ENTITY_REMOVE from .const import CONF_DATA -if TYPE_CHECKING: - from ..knx_entity import KnxEntity - _LOGGER = logging.getLogger(__name__) STORAGE_VERSION: Final = 1 @@ -40,15 +39,16 @@ class KNXConfigStore: def __init__( self, hass: HomeAssistant, - entry: ConfigEntry, + config_entry: ConfigEntry, ) -> None: """Initialize config store.""" self.hass = hass + self.config_entry = config_entry self._store = Store[KNXConfigStoreModel](hass, STORAGE_VERSION, STORAGE_KEY) self.data = KNXConfigStoreModel(entities={}) - # entities and async_add_entity are filled by platform setups - self.entities: dict[str, KnxEntity] = {} # unique_id as key + # entities and async_add_entity are filled by platform / entity setups + self.entities: set[str] = set() # unique_id as values self.async_add_entity: dict[ Platform, Callable[[str, dict[str, Any]], None] ] = {} @@ -108,7 +108,7 @@ class KNXConfigStore: raise ConfigStoreException( f"Entity not found in storage: {entity_id} - {unique_id}" ) - await self.entities.pop(unique_id).async_remove() + async_dispatcher_send(self.hass, SIGNAL_ENTITY_REMOVE.format(unique_id)) self.async_add_entity[platform](unique_id, data) # store data after entity is added to make sure config doesn't raise exceptions self.data["entities"][platform][unique_id] = data @@ -126,7 +126,7 @@ class KNXConfigStore: f"Entity not found in {entry.domain}: {entry.unique_id}" ) from err try: - del self.entities[entry.unique_id] + self.entities.remove(entry.unique_id) except KeyError: _LOGGER.warning("Entity not initialized when deleted: %s", entity_id) entity_registry.async_remove(entity_id) @@ -134,10 +134,14 @@ class KNXConfigStore: def get_entity_entries(self) -> list[er.RegistryEntry]: """Get entity_ids of all configured entities by platform.""" + entity_registry = er.async_get(self.hass) + return [ - entity.registry_entry - for entity in self.entities.values() - if entity.registry_entry is not None + registry_entry + for registry_entry in er.async_entries_for_config_entry( + entity_registry, self.config_entry.entry_id + ) + if registry_entry.unique_id in self.entities ] diff --git a/homeassistant/components/knx/switch.py b/homeassistant/components/knx/switch.py index 0a8a1dff964..a5f430e6157 100644 --- a/homeassistant/components/knx/switch.py +++ b/homeassistant/components/knx/switch.py @@ -4,7 +4,6 @@ from __future__ import annotations from typing import Any -from xknx import XKNX from xknx.devices import Switch as XknxSwitch from homeassistant import config_entries @@ -33,7 +32,7 @@ from .const import ( DOMAIN, KNX_ADDRESS, ) -from .knx_entity import KnxEntity +from .knx_entity import KnxEntity, KnxUIEntity from .schema import SwitchSchema from .storage.const import ( CONF_DEVICE_INFO, @@ -54,10 +53,10 @@ async def async_setup_entry( knx_module: KNXModule = hass.data[DOMAIN] entities: list[KnxEntity] = [] - if yaml_config := hass.data[DATA_KNX_CONFIG].get(Platform.SWITCH): + if yaml_platform_config := hass.data[DATA_KNX_CONFIG].get(Platform.SWITCH): entities.extend( - KnxYamlSwitch(knx_module.xknx, entity_config) - for entity_config in yaml_config + KnxYamlSwitch(knx_module, entity_config) + for entity_config in yaml_platform_config ) if ui_config := knx_module.config_store.data["entities"].get(Platform.SWITCH): entities.extend( @@ -75,7 +74,7 @@ async def async_setup_entry( knx_module.config_store.async_add_entity[Platform.SWITCH] = add_new_ui_switch -class _KnxSwitch(KnxEntity, SwitchEntity, RestoreEntity): +class _KnxSwitch(SwitchEntity, RestoreEntity): """Base class for a KNX switch.""" _device: XknxSwitch @@ -103,36 +102,41 @@ class _KnxSwitch(KnxEntity, SwitchEntity, RestoreEntity): await self._device.set_off() -class KnxYamlSwitch(_KnxSwitch): +class KnxYamlSwitch(_KnxSwitch, KnxEntity): """Representation of a KNX switch configured from YAML.""" - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + _device: XknxSwitch + + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize of KNX switch.""" super().__init__( + knx_module=knx_module, device=XknxSwitch( - xknx, + xknx=knx_module.xknx, name=config[CONF_NAME], group_address=config[KNX_ADDRESS], group_address_state=config.get(SwitchSchema.CONF_STATE_ADDRESS), respond_to_read=config[CONF_RESPOND_TO_READ], invert=config[SwitchSchema.CONF_INVERT], - ) + ), ) self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY) self._attr_device_class = config.get(CONF_DEVICE_CLASS) self._attr_unique_id = str(self._device.switch.group_address) -class KnxUiSwitch(_KnxSwitch): +class KnxUiSwitch(_KnxSwitch, KnxUIEntity): """Representation of a KNX switch configured from UI.""" _attr_has_entity_name = True + _device: XknxSwitch def __init__( self, knx_module: KNXModule, unique_id: str, config: dict[str, Any] ) -> None: """Initialize of KNX switch.""" super().__init__( + knx_module=knx_module, device=XknxSwitch( knx_module.xknx, name=config[CONF_ENTITY][CONF_NAME], @@ -144,11 +148,9 @@ class KnxUiSwitch(_KnxSwitch): respond_to_read=config[DOMAIN][CONF_RESPOND_TO_READ], sync_state=config[DOMAIN][CONF_SYNC_STATE], invert=config[DOMAIN][CONF_INVERT], - ) + ), ) self._attr_entity_category = config[CONF_ENTITY][CONF_ENTITY_CATEGORY] self._attr_unique_id = unique_id if device_info := config[CONF_ENTITY].get(CONF_DEVICE_INFO): self._attr_device_info = DeviceInfo(identifiers={(DOMAIN, device_info)}) - - knx_module.config_store.entities[unique_id] = self diff --git a/homeassistant/components/knx/text.py b/homeassistant/components/knx/text.py index 22d008cd5ce..9bca37434ac 100644 --- a/homeassistant/components/knx/text.py +++ b/homeassistant/components/knx/text.py @@ -22,6 +22,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType +from . import KNXModule from .const import ( CONF_RESPOND_TO_READ, CONF_STATE_ADDRESS, @@ -38,10 +39,10 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up sensor(s) for KNX platform.""" - xknx: XKNX = hass.data[DOMAIN].xknx + knx_module: KNXModule = hass.data[DOMAIN] config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.TEXT] - async_add_entities(KNXText(xknx, entity_config) for entity_config in config) + async_add_entities(KNXText(knx_module, entity_config) for entity_config in config) def _create_notification(xknx: XKNX, config: ConfigType) -> XknxNotification: @@ -62,9 +63,12 @@ class KNXText(KnxEntity, TextEntity, RestoreEntity): _device: XknxNotification _attr_native_max = 14 - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize a KNX text.""" - super().__init__(_create_notification(xknx, config)) + super().__init__( + knx_module=knx_module, + device=_create_notification(knx_module.xknx, config), + ) self._attr_mode = config[CONF_MODE] self._attr_pattern = ( r"[\u0000-\u00ff]*" # Latin-1 diff --git a/homeassistant/components/knx/time.py b/homeassistant/components/knx/time.py index 28e1419233c..5d9225a1e41 100644 --- a/homeassistant/components/knx/time.py +++ b/homeassistant/components/knx/time.py @@ -3,7 +3,6 @@ from __future__ import annotations from datetime import time as dt_time -from typing import Final from xknx import XKNX from xknx.devices import TimeDevice as XknxTimeDevice @@ -23,6 +22,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType +from . import KNXModule from .const import ( CONF_RESPOND_TO_READ, CONF_STATE_ADDRESS, @@ -33,8 +33,6 @@ from .const import ( ) from .knx_entity import KnxEntity -_TIME_TRANSLATION_FORMAT: Final = "%H:%M:%S" - async def async_setup_entry( hass: HomeAssistant, @@ -42,10 +40,12 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up entities for KNX platform.""" - xknx: XKNX = hass.data[DOMAIN].xknx + knx_module: KNXModule = hass.data[DOMAIN] config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.TIME] - async_add_entities(KNXTimeEntity(xknx, entity_config) for entity_config in config) + async_add_entities( + KNXTimeEntity(knx_module, entity_config) for entity_config in config + ) def _create_xknx_device(xknx: XKNX, config: ConfigType) -> XknxTimeDevice: @@ -66,9 +66,12 @@ class KNXTimeEntity(KnxEntity, TimeEntity, RestoreEntity): _device: XknxTimeDevice - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize a KNX time.""" - super().__init__(_create_xknx_device(xknx, config)) + super().__init__( + knx_module=knx_module, + device=_create_xknx_device(knx_module.xknx, config), + ) self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY) self._attr_unique_id = str(self._device.remote_value.group_address) diff --git a/homeassistant/components/knx/weather.py b/homeassistant/components/knx/weather.py index 584c9fd3323..11dae452e2f 100644 --- a/homeassistant/components/knx/weather.py +++ b/homeassistant/components/knx/weather.py @@ -19,6 +19,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType +from . import KNXModule from .const import DATA_KNX_CONFIG, DOMAIN from .knx_entity import KnxEntity from .schema import WeatherSchema @@ -30,10 +31,12 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up switch(es) for KNX platform.""" - xknx: XKNX = hass.data[DOMAIN].xknx + knx_module: KNXModule = hass.data[DOMAIN] config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.WEATHER] - async_add_entities(KNXWeather(xknx, entity_config) for entity_config in config) + async_add_entities( + KNXWeather(knx_module, entity_config) for entity_config in config + ) def _create_weather(xknx: XKNX, config: ConfigType) -> XknxWeather: @@ -80,9 +83,12 @@ class KNXWeather(KnxEntity, WeatherEntity): _attr_native_temperature_unit = UnitOfTemperature.CELSIUS _attr_native_wind_speed_unit = UnitOfSpeed.METERS_PER_SECOND - def __init__(self, xknx: XKNX, config: ConfigType) -> None: + def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: """Initialize of a KNX sensor.""" - super().__init__(_create_weather(xknx, config)) + super().__init__( + knx_module=knx_module, + device=_create_weather(knx_module.xknx, config), + ) self._attr_unique_id = str(self._device._temperature.group_address_state) # noqa: SLF001 self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY) From dcae2f35ce917516d0abb8c8d45bb6931a700c37 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Fri, 2 Aug 2024 08:50:19 +0200 Subject: [PATCH 0320/1635] Mitigate breaking change for KNX climate schema (#123043) --- homeassistant/components/knx/schema.py | 7 +++-- homeassistant/components/knx/validation.py | 34 ++++++++++++++++++++++ 2 files changed, 39 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/knx/schema.py b/homeassistant/components/knx/schema.py index 43037ad8188..c31b3d30ad0 100644 --- a/homeassistant/components/knx/schema.py +++ b/homeassistant/components/knx/schema.py @@ -56,6 +56,7 @@ from .const import ( ColorTempModes, ) from .validation import ( + backwards_compatible_xknx_climate_enum_member, dpt_base_type_validator, ga_list_validator, ga_validator, @@ -409,10 +410,12 @@ class ClimateSchema(KNXPlatformSchema): CONF_ON_OFF_INVERT, default=DEFAULT_ON_OFF_INVERT ): cv.boolean, vol.Optional(CONF_OPERATION_MODES): vol.All( - cv.ensure_list, [vol.All(vol.Upper, cv.enum(HVACOperationMode))] + cv.ensure_list, + [backwards_compatible_xknx_climate_enum_member(HVACOperationMode)], ), vol.Optional(CONF_CONTROLLER_MODES): vol.All( - cv.ensure_list, [vol.All(vol.Upper, cv.enum(HVACControllerMode))] + cv.ensure_list, + [backwards_compatible_xknx_climate_enum_member(HVACControllerMode)], ), vol.Optional( CONF_DEFAULT_CONTROLLER_MODE, default=HVACMode.HEAT diff --git a/homeassistant/components/knx/validation.py b/homeassistant/components/knx/validation.py index 422b8474fd9..0283b65f899 100644 --- a/homeassistant/components/knx/validation.py +++ b/homeassistant/components/knx/validation.py @@ -1,6 +1,7 @@ """Validation helpers for KNX config schemas.""" from collections.abc import Callable +from enum import Enum import ipaddress from typing import Any @@ -104,3 +105,36 @@ sync_state_validator = vol.Any( cv.boolean, cv.matches_regex(r"^(init|expire|every)( \d*)?$"), ) + + +def backwards_compatible_xknx_climate_enum_member(enumClass: type[Enum]) -> vol.All: + """Transform a string to an enum member. + + Backwards compatible with member names of xknx 2.x climate DPT Enums + due to unintentional breaking change in HA 2024.8. + """ + + def _string_transform(value: Any) -> str: + """Upper and slugify string and substitute old member names. + + Previously this was checked against Enum values instead of names. These + looked like `FAN_ONLY = "Fan only"`, therefore the upper & replace part. + """ + if not isinstance(value, str): + raise vol.Invalid("value should be a string") + name = value.upper().replace(" ", "_") + match name: + case "NIGHT": + return "ECONOMY" + case "FROST_PROTECTION": + return "BUILDING_PROTECTION" + case "DRY": + return "DEHUMIDIFICATION" + case _: + return name + + return vol.All( + _string_transform, + vol.In(enumClass.__members__), + enumClass.__getitem__, + ) From bb597a908d4b4962175fbf4b5051e677e1da2031 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Fri, 2 Aug 2024 08:48:41 +0200 Subject: [PATCH 0321/1635] Use freezer in KNX tests (#123044) use freezer in tests --- tests/components/knx/test_binary_sensor.py | 27 ++++++++++++------- tests/components/knx/test_button.py | 9 ++++--- tests/components/knx/test_expose.py | 13 ++++----- tests/components/knx/test_interface_device.py | 10 ++++--- tests/components/knx/test_light.py | 11 ++++---- 5 files changed, 44 insertions(+), 26 deletions(-) diff --git a/tests/components/knx/test_binary_sensor.py b/tests/components/knx/test_binary_sensor.py index 1b304293a86..dbb8d2ee832 100644 --- a/tests/components/knx/test_binary_sensor.py +++ b/tests/components/knx/test_binary_sensor.py @@ -2,6 +2,8 @@ from datetime import timedelta +from freezegun.api import FrozenDateTimeFactory + from homeassistant.components.knx.const import CONF_STATE_ADDRESS, CONF_SYNC_STATE from homeassistant.components.knx.schema import BinarySensorSchema from homeassistant.const import ( @@ -13,7 +15,6 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant, State from homeassistant.helpers import entity_registry as er -from homeassistant.util import dt as dt_util from .conftest import KNXTestKit @@ -141,9 +142,12 @@ async def test_binary_sensor_ignore_internal_state( assert len(events) == 6 -async def test_binary_sensor_counter(hass: HomeAssistant, knx: KNXTestKit) -> None: +async def test_binary_sensor_counter( + hass: HomeAssistant, + knx: KNXTestKit, + freezer: FrozenDateTimeFactory, +) -> None: """Test KNX binary_sensor with context timeout.""" - async_fire_time_changed(hass, dt_util.utcnow()) context_timeout = 1 await knx.setup_integration( @@ -167,7 +171,8 @@ async def test_binary_sensor_counter(hass: HomeAssistant, knx: KNXTestKit) -> No state = hass.states.get("binary_sensor.test") assert state.state is STATE_OFF assert state.attributes.get("counter") == 0 - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=context_timeout)) + freezer.tick(timedelta(seconds=context_timeout)) + async_fire_time_changed(hass) await knx.xknx.task_registry.block_till_done() # state changed twice after context timeout - once to ON with counter 1 and once to counter 0 state = hass.states.get("binary_sensor.test") @@ -188,7 +193,8 @@ async def test_binary_sensor_counter(hass: HomeAssistant, knx: KNXTestKit) -> No state = hass.states.get("binary_sensor.test") assert state.state is STATE_ON assert state.attributes.get("counter") == 0 - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=context_timeout)) + freezer.tick(timedelta(seconds=context_timeout)) + async_fire_time_changed(hass) await knx.xknx.task_registry.block_till_done() state = hass.states.get("binary_sensor.test") assert state.state is STATE_ON @@ -202,10 +208,12 @@ async def test_binary_sensor_counter(hass: HomeAssistant, knx: KNXTestKit) -> No assert event.get("old_state").attributes.get("counter") == 2 -async def test_binary_sensor_reset(hass: HomeAssistant, knx: KNXTestKit) -> None: +async def test_binary_sensor_reset( + hass: HomeAssistant, + knx: KNXTestKit, + freezer: FrozenDateTimeFactory, +) -> None: """Test KNX binary_sensor with reset_after function.""" - async_fire_time_changed(hass, dt_util.utcnow()) - await knx.setup_integration( { BinarySensorSchema.PLATFORM: [ @@ -223,7 +231,8 @@ async def test_binary_sensor_reset(hass: HomeAssistant, knx: KNXTestKit) -> None await knx.receive_write("2/2/2", True) state = hass.states.get("binary_sensor.test") assert state.state is STATE_ON - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=1)) + freezer.tick(timedelta(seconds=1)) + async_fire_time_changed(hass) await hass.async_block_till_done() # state reset after after timeout state = hass.states.get("binary_sensor.test") diff --git a/tests/components/knx/test_button.py b/tests/components/knx/test_button.py index 613208d5595..a05752eced1 100644 --- a/tests/components/knx/test_button.py +++ b/tests/components/knx/test_button.py @@ -3,20 +3,22 @@ from datetime import timedelta import logging +from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.knx.const import CONF_PAYLOAD_LENGTH, DOMAIN, KNX_ADDRESS from homeassistant.components.knx.schema import ButtonSchema from homeassistant.const import CONF_NAME, CONF_PAYLOAD, CONF_TYPE from homeassistant.core import HomeAssistant -from homeassistant.util import dt as dt_util from .conftest import KNXTestKit from tests.common import async_capture_events, async_fire_time_changed -async def test_button_simple(hass: HomeAssistant, knx: KNXTestKit) -> None: +async def test_button_simple( + hass: HomeAssistant, knx: KNXTestKit, freezer: FrozenDateTimeFactory +) -> None: """Test KNX button with default payload.""" await knx.setup_integration( { @@ -38,7 +40,8 @@ async def test_button_simple(hass: HomeAssistant, knx: KNXTestKit) -> None: # received telegrams on button GA are ignored by the entity old_state = hass.states.get("button.test") - async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=3)) + freezer.tick(timedelta(seconds=3)) + async_fire_time_changed(hass) await knx.receive_write("1/2/3", False) await knx.receive_write("1/2/3", True) new_state = hass.states.get("button.test") diff --git a/tests/components/knx/test_expose.py b/tests/components/knx/test_expose.py index 96b00241ab6..c4d0acf0ce2 100644 --- a/tests/components/knx/test_expose.py +++ b/tests/components/knx/test_expose.py @@ -3,6 +3,7 @@ from datetime import timedelta from freezegun import freeze_time +from freezegun.api import FrozenDateTimeFactory import pytest from homeassistant.components.knx import CONF_KNX_EXPOSE, DOMAIN, KNX_ADDRESS @@ -14,11 +15,10 @@ from homeassistant.const import ( CONF_VALUE_TEMPLATE, ) from homeassistant.core import HomeAssistant -from homeassistant.util import dt as dt_util from .conftest import KNXTestKit -from tests.common import async_fire_time_changed_exact +from tests.common import async_fire_time_changed async def test_binary_expose(hass: HomeAssistant, knx: KNXTestKit) -> None: @@ -206,7 +206,9 @@ async def test_expose_string(hass: HomeAssistant, knx: KNXTestKit) -> None: ) -async def test_expose_cooldown(hass: HomeAssistant, knx: KNXTestKit) -> None: +async def test_expose_cooldown( + hass: HomeAssistant, knx: KNXTestKit, freezer: FrozenDateTimeFactory +) -> None: """Test an expose with cooldown.""" cooldown_time = 2 entity_id = "fake.entity" @@ -234,9 +236,8 @@ async def test_expose_cooldown(hass: HomeAssistant, knx: KNXTestKit) -> None: await hass.async_block_till_done() await knx.assert_no_telegram() # Wait for cooldown to pass - async_fire_time_changed_exact( - hass, dt_util.utcnow() + timedelta(seconds=cooldown_time) - ) + freezer.tick(timedelta(seconds=cooldown_time)) + async_fire_time_changed(hass) await hass.async_block_till_done() await knx.assert_write("1/1/8", (3,)) diff --git a/tests/components/knx/test_interface_device.py b/tests/components/knx/test_interface_device.py index 8010496ef0d..79114d4ffd5 100644 --- a/tests/components/knx/test_interface_device.py +++ b/tests/components/knx/test_interface_device.py @@ -2,6 +2,7 @@ from unittest.mock import patch +from freezegun.api import FrozenDateTimeFactory from xknx.core import XknxConnectionState, XknxConnectionType from xknx.telegram import IndividualAddress @@ -10,7 +11,6 @@ from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component -from homeassistant.util import dt as dt_util from .conftest import KNXTestKit @@ -19,7 +19,10 @@ from tests.typing import WebSocketGenerator async def test_diagnostic_entities( - hass: HomeAssistant, knx: KNXTestKit, entity_registry: er.EntityRegistry + hass: HomeAssistant, + knx: KNXTestKit, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, ) -> None: """Test diagnostic entities.""" await knx.setup_integration({}) @@ -50,7 +53,8 @@ async def test_diagnostic_entities( knx.xknx.connection_manager.cemi_count_outgoing_error = 2 events = async_capture_events(hass, "state_changed") - async_fire_time_changed(hass, dt_util.utcnow() + SCAN_INTERVAL) + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) await hass.async_block_till_done() assert len(events) == 3 # 5 polled sensors - 2 disabled diff --git a/tests/components/knx/test_light.py b/tests/components/knx/test_light.py index 8c966a77a0b..04f849bb555 100644 --- a/tests/components/knx/test_light.py +++ b/tests/components/knx/test_light.py @@ -4,6 +4,7 @@ from __future__ import annotations from datetime import timedelta +from freezegun.api import FrozenDateTimeFactory from xknx.core import XknxConnectionState from xknx.devices.light import Light as XknxLight @@ -19,7 +20,6 @@ from homeassistant.components.light import ( ) from homeassistant.const import CONF_NAME, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant -from homeassistant.util import dt as dt_util from . import KnxEntityGenerator from .conftest import KNXTestKit @@ -643,7 +643,9 @@ async def test_light_rgb_individual(hass: HomeAssistant, knx: KNXTestKit) -> Non await knx.assert_write(test_blue, (45,)) -async def test_light_rgbw_individual(hass: HomeAssistant, knx: KNXTestKit) -> None: +async def test_light_rgbw_individual( + hass: HomeAssistant, knx: KNXTestKit, freezer: FrozenDateTimeFactory +) -> None: """Test KNX light with rgbw color in individual GAs.""" test_red = "1/1/3" test_red_state = "1/1/4" @@ -763,9 +765,8 @@ async def test_light_rgbw_individual(hass: HomeAssistant, knx: KNXTestKit) -> No await knx.receive_write(test_green, (0,)) # # individual color debounce takes 0.2 seconds if not all 4 addresses received knx.assert_state("light.test", STATE_ON) - async_fire_time_changed( - hass, dt_util.utcnow() + timedelta(seconds=XknxLight.DEBOUNCE_TIMEOUT) - ) + freezer.tick(timedelta(seconds=XknxLight.DEBOUNCE_TIMEOUT)) + async_fire_time_changed(hass) await knx.xknx.task_registry.block_till_done() knx.assert_state("light.test", STATE_OFF) # turn ON from KNX From abeba39842d51bb5f66fafc1d11b9b008907ca0c Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Fri, 2 Aug 2024 12:08:44 +0200 Subject: [PATCH 0322/1635] OpenAI make supported features reflect the config entry options (#123047) --- .../openai_conversation/conversation.py | 15 +++++++++++++++ .../openai_conversation/test_conversation.py | 2 ++ 2 files changed, 17 insertions(+) diff --git a/homeassistant/components/openai_conversation/conversation.py b/homeassistant/components/openai_conversation/conversation.py index 483b37945d6..b482126e27c 100644 --- a/homeassistant/components/openai_conversation/conversation.py +++ b/homeassistant/components/openai_conversation/conversation.py @@ -23,6 +23,7 @@ from voluptuous_openapi import convert from homeassistant.components import assist_pipeline, conversation from homeassistant.components.conversation import trace +from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, TemplateError @@ -109,6 +110,9 @@ class OpenAIConversationEntity( self.hass, "conversation", self.entry.entry_id, self.entity_id ) conversation.async_set_agent(self.hass, self.entry, self) + self.entry.async_on_unload( + self.entry.add_update_listener(self._async_entry_update_listener) + ) async def async_will_remove_from_hass(self) -> None: """When entity will be removed from Home Assistant.""" @@ -319,3 +323,14 @@ class OpenAIConversationEntity( return conversation.ConversationResult( response=intent_response, conversation_id=conversation_id ) + + async def _async_entry_update_listener( + self, hass: HomeAssistant, entry: ConfigEntry + ) -> None: + """Handle options update.""" + if entry.options.get(CONF_LLM_HASS_API): + self._attr_supported_features = ( + conversation.ConversationEntityFeature.CONTROL + ) + else: + self._attr_supported_features = conversation.ConversationEntityFeature(0) diff --git a/tests/components/openai_conversation/test_conversation.py b/tests/components/openai_conversation/test_conversation.py index 3364d822245..e0665bc449f 100644 --- a/tests/components/openai_conversation/test_conversation.py +++ b/tests/components/openai_conversation/test_conversation.py @@ -521,6 +521,8 @@ async def test_unknown_hass_api( }, ) + await hass.async_block_till_done() + result = await conversation.async_converse( hass, "hello", None, Context(), agent_id=mock_config_entry.entry_id ) From d1411220082fcd046423be8241bad7ad6fb7ad55 Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Fri, 2 Aug 2024 12:31:31 +0200 Subject: [PATCH 0323/1635] Ollama implement CONTROL supported feature (#123049) --- .../components/ollama/conversation.py | 18 +++++++++++++ tests/components/ollama/test_conversation.py | 25 ++++++++++++++++++- 2 files changed, 42 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/ollama/conversation.py b/homeassistant/components/ollama/conversation.py index f59e268394b..9f66083f506 100644 --- a/homeassistant/components/ollama/conversation.py +++ b/homeassistant/components/ollama/conversation.py @@ -106,6 +106,10 @@ class OllamaConversationEntity( self._history: dict[str, MessageHistory] = {} self._attr_name = entry.title self._attr_unique_id = entry.entry_id + if self.entry.options.get(CONF_LLM_HASS_API): + self._attr_supported_features = ( + conversation.ConversationEntityFeature.CONTROL + ) async def async_added_to_hass(self) -> None: """When entity is added to Home Assistant.""" @@ -114,6 +118,9 @@ class OllamaConversationEntity( self.hass, "conversation", self.entry.entry_id, self.entity_id ) conversation.async_set_agent(self.hass, self.entry, self) + self.entry.async_on_unload( + self.entry.add_update_listener(self._async_entry_update_listener) + ) async def async_will_remove_from_hass(self) -> None: """When entity will be removed from Home Assistant.""" @@ -334,3 +341,14 @@ class OllamaConversationEntity( message_history.messages = [ message_history.messages[0] ] + message_history.messages[drop_index:] + + async def _async_entry_update_listener( + self, hass: HomeAssistant, entry: ConfigEntry + ) -> None: + """Handle options update.""" + if entry.options.get(CONF_LLM_HASS_API): + self._attr_supported_features = ( + conversation.ConversationEntityFeature.CONTROL + ) + else: + self._attr_supported_features = conversation.ConversationEntityFeature(0) diff --git a/tests/components/ollama/test_conversation.py b/tests/components/ollama/test_conversation.py index b5a94cc6f57..c83dce3b565 100644 --- a/tests/components/ollama/test_conversation.py +++ b/tests/components/ollama/test_conversation.py @@ -10,7 +10,7 @@ import voluptuous as vol from homeassistant.components import conversation, ollama from homeassistant.components.conversation import trace -from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL +from homeassistant.const import ATTR_SUPPORTED_FEATURES, CONF_LLM_HASS_API, MATCH_ALL from homeassistant.core import Context, HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import intent, llm @@ -554,3 +554,26 @@ async def test_conversation_agent( mock_config_entry.entry_id ) assert agent.supported_languages == MATCH_ALL + + state = hass.states.get("conversation.mock_title") + assert state + assert state.attributes[ATTR_SUPPORTED_FEATURES] == 0 + + +async def test_conversation_agent_with_assist( + hass: HomeAssistant, + mock_config_entry_with_assist: MockConfigEntry, + mock_init_component, +) -> None: + """Test OllamaConversationEntity.""" + agent = conversation.get_agent_manager(hass).async_get_agent( + mock_config_entry_with_assist.entry_id + ) + assert agent.supported_languages == MATCH_ALL + + state = hass.states.get("conversation.mock_title") + assert state + assert ( + state.attributes[ATTR_SUPPORTED_FEATURES] + == conversation.ConversationEntityFeature.CONTROL + ) From c1043ada22a51246ba6f6644b7cc74cdf32820a9 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 2 Aug 2024 11:58:07 +0200 Subject: [PATCH 0324/1635] Correct type annotation for `EntityPlatform.async_register_entity_service` (#123054) Correct type annotation for EntityPlatform.async_register_entity_service Co-authored-by: Marc Mueller <30130371+cdce8p@users.noreply.github.com> --- homeassistant/helpers/entity_platform.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/helpers/entity_platform.py b/homeassistant/helpers/entity_platform.py index d868e582f8f..6774780f00f 100644 --- a/homeassistant/helpers/entity_platform.py +++ b/homeassistant/helpers/entity_platform.py @@ -985,7 +985,7 @@ class EntityPlatform: def async_register_entity_service( self, name: str, - schema: VolDictType | VolSchemaType | None, + schema: VolDictType | VolSchemaType, func: str | Callable[..., Any], required_features: Iterable[int] | None = None, supports_response: SupportsResponse = SupportsResponse.NONE, From 15ad6db1a72455a94958e13eef0ccd7484565855 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?H=2E=20=C3=81rkosi=20R=C3=B3bert?= Date: Fri, 2 Aug 2024 14:25:43 +0200 Subject: [PATCH 0325/1635] Add LinkPlay models (#123056) * Add some LinkPlay models * Update utils.py * Update utils.py * Update utils.py * Update homeassistant/components/linkplay/utils.py * Update homeassistant/components/linkplay/utils.py * Update utils.py --------- Co-authored-by: Joost Lekkerkerker --- homeassistant/components/linkplay/utils.py | 26 ++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/homeassistant/components/linkplay/utils.py b/homeassistant/components/linkplay/utils.py index 9ca76b3933d..7532c9b354a 100644 --- a/homeassistant/components/linkplay/utils.py +++ b/homeassistant/components/linkplay/utils.py @@ -3,9 +3,19 @@ from typing import Final MANUFACTURER_ARTSOUND: Final[str] = "ArtSound" +MANUFACTURER_ARYLIC: Final[str] = "Arylic" +MANUFACTURER_IEAST: Final[str] = "iEAST" MANUFACTURER_GENERIC: Final[str] = "Generic" MODELS_ARTSOUND_SMART_ZONE4: Final[str] = "Smart Zone 4 AMP" MODELS_ARTSOUND_SMART_HYDE: Final[str] = "Smart Hyde" +MODELS_ARYLIC_S50: Final[str] = "S50+" +MODELS_ARYLIC_S50_PRO: Final[str] = "S50 Pro" +MODELS_ARYLIC_A30: Final[str] = "A30" +MODELS_ARYLIC_A50S: Final[str] = "A50+" +MODELS_ARYLIC_UP2STREAM_AMP_V3: Final[str] = "Up2Stream Amp v3" +MODELS_ARYLIC_UP2STREAM_AMP_V4: Final[str] = "Up2Stream Amp v4" +MODELS_ARYLIC_UP2STREAM_PRO_V3: Final[str] = "Up2Stream Pro v3" +MODELS_IEAST_AUDIOCAST_M5: Final[str] = "AudioCast M5" MODELS_GENERIC: Final[str] = "Generic" @@ -16,5 +26,21 @@ def get_info_from_project(project: str) -> tuple[str, str]: return MANUFACTURER_ARTSOUND, MODELS_ARTSOUND_SMART_ZONE4 case "SMART_HYDE": return MANUFACTURER_ARTSOUND, MODELS_ARTSOUND_SMART_HYDE + case "ARYLIC_S50": + return MANUFACTURER_ARYLIC, MODELS_ARYLIC_S50 + case "RP0016_S50PRO_S": + return MANUFACTURER_ARYLIC, MODELS_ARYLIC_S50_PRO + case "RP0011_WB60_S": + return MANUFACTURER_ARYLIC, MODELS_ARYLIC_A30 + case "ARYLIC_A50S": + return MANUFACTURER_ARYLIC, MODELS_ARYLIC_A50S + case "UP2STREAM_AMP_V3": + return MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_AMP_V3 + case "UP2STREAM_AMP_V4": + return MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_AMP_V4 + case "UP2STREAM_PRO_V3": + return MANUFACTURER_ARYLIC, MODELS_ARYLIC_UP2STREAM_PRO_V3 + case "iEAST-02": + return MANUFACTURER_IEAST, MODELS_IEAST_AUDIOCAST_M5 case _: return MANUFACTURER_GENERIC, MODELS_GENERIC From f9276e28b03bed2e88d3653cec98cf2a26686792 Mon Sep 17 00:00:00 2001 From: Fabian <115155196+Fabiann2205@users.noreply.github.com> Date: Fri, 2 Aug 2024 12:19:55 +0200 Subject: [PATCH 0326/1635] Add device class (#123059) --- homeassistant/components/google_travel_time/sensor.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/google_travel_time/sensor.py b/homeassistant/components/google_travel_time/sensor.py index 6c45033eeb7..618dda50bd4 100644 --- a/homeassistant/components/google_travel_time/sensor.py +++ b/homeassistant/components/google_travel_time/sensor.py @@ -8,7 +8,11 @@ import logging from googlemaps import Client from googlemaps.distance_matrix import distance_matrix -from homeassistant.components.sensor import SensorEntity +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorStateClass, +) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_API_KEY, @@ -72,6 +76,8 @@ class GoogleTravelTimeSensor(SensorEntity): _attr_attribution = ATTRIBUTION _attr_native_unit_of_measurement = UnitOfTime.MINUTES + _attr_device_class = SensorDeviceClass.DURATION + _attr_state_class = SensorStateClass.MEASUREMENT def __init__(self, config_entry, name, api_key, origin, destination, client): """Initialize the sensor.""" From d7cc2a7e9a3df922e64d78ef5487e41f74b9e508 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 2 Aug 2024 11:49:47 +0200 Subject: [PATCH 0327/1635] Correct squeezebox service (#123060) --- homeassistant/components/squeezebox/media_player.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/squeezebox/media_player.py b/homeassistant/components/squeezebox/media_player.py index aaf64c34ddf..c0a6dad7a47 100644 --- a/homeassistant/components/squeezebox/media_player.py +++ b/homeassistant/components/squeezebox/media_player.py @@ -185,7 +185,7 @@ async def async_setup_entry( {vol.Required(ATTR_OTHER_PLAYER): cv.string}, "async_sync", ) - platform.async_register_entity_service(SERVICE_UNSYNC, None, "async_unsync") + platform.async_register_entity_service(SERVICE_UNSYNC, {}, "async_unsync") # Start server discovery task if not already running entry.async_on_unload(async_at_start(hass, start_server_discovery)) From 9c7134a86513e2f17b46767e01dbeff596e40125 Mon Sep 17 00:00:00 2001 From: Philip Vanloo <26272906+dukeofphilberg@users.noreply.github.com> Date: Fri, 2 Aug 2024 13:23:45 +0200 Subject: [PATCH 0328/1635] LinkPlay: Bump python-linkplay to 0.0.6 (#123062) Bump python-linkplay to 0.0.6 --- homeassistant/components/linkplay/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/linkplay/manifest.json b/homeassistant/components/linkplay/manifest.json index 0345d4ad727..9ac2a9e66e6 100644 --- a/homeassistant/components/linkplay/manifest.json +++ b/homeassistant/components/linkplay/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/linkplay", "integration_type": "hub", "iot_class": "local_polling", - "requirements": ["python-linkplay==0.0.5"], + "requirements": ["python-linkplay==0.0.6"], "zeroconf": ["_linkplay._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index 99a237efe02..747f6509604 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2298,7 +2298,7 @@ python-juicenet==1.1.0 python-kasa[speedups]==0.7.1 # homeassistant.components.linkplay -python-linkplay==0.0.5 +python-linkplay==0.0.6 # homeassistant.components.lirc # python-lirc==1.2.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 4ee6914700f..e85a748d13e 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1816,7 +1816,7 @@ python-juicenet==1.1.0 python-kasa[speedups]==0.7.1 # homeassistant.components.linkplay -python-linkplay==0.0.5 +python-linkplay==0.0.6 # homeassistant.components.matter python-matter-server==6.3.0 From 13c9d69440813adce42ad1e5a0ac791e7e5c9f7c Mon Sep 17 00:00:00 2001 From: Philip Vanloo <26272906+dukeofphilberg@users.noreply.github.com> Date: Fri, 2 Aug 2024 13:38:05 +0200 Subject: [PATCH 0329/1635] Add additional items to REPEAT_MAP in LinkPlay (#123063) * Upgrade python-linkplay, add items to REPEAT_MAP * Undo dependency bump --- homeassistant/components/linkplay/media_player.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/homeassistant/components/linkplay/media_player.py b/homeassistant/components/linkplay/media_player.py index 103b09f46da..398add235bd 100644 --- a/homeassistant/components/linkplay/media_player.py +++ b/homeassistant/components/linkplay/media_player.py @@ -58,6 +58,8 @@ REPEAT_MAP: dict[LoopMode, RepeatMode] = { LoopMode.CONTINUOUS_PLAYBACK: RepeatMode.ALL, LoopMode.RANDOM_PLAYBACK: RepeatMode.ALL, LoopMode.LIST_CYCLE: RepeatMode.ALL, + LoopMode.SHUFF_DISABLED_REPEAT_DISABLED: RepeatMode.OFF, + LoopMode.SHUFF_ENABLED_REPEAT_ENABLED_LOOP_ONCE: RepeatMode.ALL, } REPEAT_MAP_INV: dict[RepeatMode, LoopMode] = {v: k for k, v in REPEAT_MAP.items()} From b36059fc64c6fd27b33069df234fdea468467573 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 2 Aug 2024 13:38:56 +0200 Subject: [PATCH 0330/1635] Do not raise repair issue about missing integration in safe mode (#123066) --- homeassistant/setup.py | 27 ++++++++++++++------------- tests/test_setup.py | 11 ++++++++++- 2 files changed, 24 insertions(+), 14 deletions(-) diff --git a/homeassistant/setup.py b/homeassistant/setup.py index 12dd17b289c..102c48e1d07 100644 --- a/homeassistant/setup.py +++ b/homeassistant/setup.py @@ -281,19 +281,20 @@ async def _async_setup_component( integration = await loader.async_get_integration(hass, domain) except loader.IntegrationNotFound: _log_error_setup_error(hass, domain, None, "Integration not found.") - ir.async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - f"integration_not_found.{domain}", - is_fixable=True, - issue_domain=HOMEASSISTANT_DOMAIN, - severity=IssueSeverity.ERROR, - translation_key="integration_not_found", - translation_placeholders={ - "domain": domain, - }, - data={"domain": domain}, - ) + if not hass.config.safe_mode: + ir.async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + f"integration_not_found.{domain}", + is_fixable=True, + issue_domain=HOMEASSISTANT_DOMAIN, + severity=IssueSeverity.ERROR, + translation_key="integration_not_found", + translation_placeholders={ + "domain": domain, + }, + data={"domain": domain}, + ) return False log_error = partial(_log_error_setup_error, hass, domain, integration) diff --git a/tests/test_setup.py b/tests/test_setup.py index 3430c17960c..4e7c23865da 100644 --- a/tests/test_setup.py +++ b/tests/test_setup.py @@ -245,7 +245,7 @@ async def test_validate_platform_config_4(hass: HomeAssistant) -> None: async def test_component_not_found( hass: HomeAssistant, issue_registry: IssueRegistry ) -> None: - """setup_component should not crash if component doesn't exist.""" + """setup_component should raise a repair issue if component doesn't exist.""" assert await setup.async_setup_component(hass, "non_existing", {}) is False assert len(issue_registry.issues) == 1 issue = issue_registry.async_get_issue( @@ -255,6 +255,15 @@ async def test_component_not_found( assert issue.translation_key == "integration_not_found" +async def test_component_missing_not_raising_in_safe_mode( + hass: HomeAssistant, issue_registry: IssueRegistry +) -> None: + """setup_component should not raise an issue if component doesn't exist in safe.""" + hass.config.safe_mode = True + assert await setup.async_setup_component(hass, "non_existing", {}) is False + assert len(issue_registry.issues) == 0 + + async def test_component_not_double_initialized(hass: HomeAssistant) -> None: """Test we do not set up a component twice.""" mock_setup = Mock(return_value=True) From 433c1a57e76b9206e4f201c4b9e42fbc9eccba25 Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Fri, 2 Aug 2024 16:48:37 +0200 Subject: [PATCH 0331/1635] Update frontend to 20240802.0 (#123072) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 60cfa0a26ff..95afe1221ec 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240731.0"] + "requirements": ["home-assistant-frontend==20240802.0"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 58f39907269..1cc6a0fa85d 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -31,7 +31,7 @@ habluetooth==3.1.3 hass-nabucasa==0.81.1 hassil==1.7.4 home-assistant-bluetooth==1.12.2 -home-assistant-frontend==20240731.0 +home-assistant-frontend==20240802.0 home-assistant-intents==2024.7.29 httpx==0.27.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 747f6509604..479e22a3bfc 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1093,7 +1093,7 @@ hole==0.8.0 holidays==0.53 # homeassistant.components.frontend -home-assistant-frontend==20240731.0 +home-assistant-frontend==20240802.0 # homeassistant.components.conversation home-assistant-intents==2024.7.29 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index e85a748d13e..9d92bde7aa8 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -916,7 +916,7 @@ hole==0.8.0 holidays==0.53 # homeassistant.components.frontend -home-assistant-frontend==20240731.0 +home-assistant-frontend==20240802.0 # homeassistant.components.conversation home-assistant-intents==2024.7.29 From fe82e7f24d1362a4a657c9561fd98880d0236ace Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Fri, 2 Aug 2024 17:46:01 +0200 Subject: [PATCH 0332/1635] Bump version to 2024.8.0b1 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index b315d8c2618..08ee0bb77f9 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -24,7 +24,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 8 -PATCH_VERSION: Final = "0b0" +PATCH_VERSION: Final = "0b1" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index e705101e4ae..c60a01663da 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.8.0b0" +version = "2024.8.0b1" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From e32a48ac55bc8fb1b056e6d0a1f9e54bbd7853fa Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 2 Aug 2024 17:55:46 +0200 Subject: [PATCH 0333/1635] Improve type hints in google_assistant (#122895) --- .../components/google_assistant/trait.py | 120 +++++++++--------- 1 file changed, 60 insertions(+), 60 deletions(-) diff --git a/homeassistant/components/google_assistant/trait.py b/homeassistant/components/google_assistant/trait.py index 2be20237a84..145eb4b2935 100644 --- a/homeassistant/components/google_assistant/trait.py +++ b/homeassistant/components/google_assistant/trait.py @@ -294,7 +294,7 @@ class _Trait(ABC): self.state = state self.config = config - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return attributes for a sync request.""" raise NotImplementedError @@ -302,7 +302,7 @@ class _Trait(ABC): """Add options for the sync request.""" return {} - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return the attributes of this trait for this entity.""" raise NotImplementedError @@ -337,11 +337,11 @@ class BrightnessTrait(_Trait): return False - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return brightness attributes for a sync request.""" return {} - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return brightness query attributes.""" domain = self.state.domain response = {} @@ -388,7 +388,7 @@ class CameraStreamTrait(_Trait): return False - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return stream attributes for a sync request.""" return { "cameraStreamSupportedProtocols": ["hls"], @@ -396,7 +396,7 @@ class CameraStreamTrait(_Trait): "cameraStreamNeedDrmEncryption": False, } - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return camera stream attributes.""" return self.stream_info or {} @@ -426,7 +426,7 @@ class ObjectDetection(_Trait): domain == event.DOMAIN and device_class == event.EventDeviceClass.DOORBELL ) - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return ObjectDetection attributes for a sync request.""" return {} @@ -434,7 +434,7 @@ class ObjectDetection(_Trait): """Add options for the sync request.""" return {"notificationSupportedByAgent": True} - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return ObjectDetection query attributes.""" return {} @@ -498,13 +498,13 @@ class OnOffTrait(_Trait): humidifier.DOMAIN, ) - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return OnOff attributes for a sync request.""" if self.state.attributes.get(ATTR_ASSUMED_STATE, False): return {"commandOnlyOnOff": True} return {} - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return OnOff query attributes.""" return {"on": self.state.state not in (STATE_OFF, STATE_UNKNOWN)} @@ -548,11 +548,11 @@ class ColorSettingTrait(_Trait): color_modes ) - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return color temperature attributes for a sync request.""" attrs = self.state.attributes color_modes = attrs.get(light.ATTR_SUPPORTED_COLOR_MODES) - response = {} + response: dict[str, Any] = {} if light.color_supported(color_modes): response["colorModel"] = "hsv" @@ -571,11 +571,11 @@ class ColorSettingTrait(_Trait): return response - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return color temperature query attributes.""" color_mode = self.state.attributes.get(light.ATTR_COLOR_MODE) - color = {} + color: dict[str, Any] = {} if light.color_supported([color_mode]): color_hs = self.state.attributes.get(light.ATTR_HS_COLOR) @@ -684,12 +684,12 @@ class SceneTrait(_Trait): script.DOMAIN, ) - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return scene attributes for a sync request.""" # None of the supported domains can support sceneReversible return {} - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return scene query attributes.""" return {} @@ -728,11 +728,11 @@ class DockTrait(_Trait): """Test if state is supported.""" return domain == vacuum.DOMAIN - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return dock attributes for a sync request.""" return {} - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return dock query attributes.""" return {"isDocked": self.state.state == vacuum.STATE_DOCKED} @@ -762,11 +762,11 @@ class LocatorTrait(_Trait): """Test if state is supported.""" return domain == vacuum.DOMAIN and features & VacuumEntityFeature.LOCATE - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return locator attributes for a sync request.""" return {} - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return locator query attributes.""" return {} @@ -802,14 +802,14 @@ class EnergyStorageTrait(_Trait): """Test if state is supported.""" return domain == vacuum.DOMAIN and features & VacuumEntityFeature.BATTERY - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return EnergyStorage attributes for a sync request.""" return { "isRechargeable": True, "queryOnlyEnergyStorage": True, } - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return EnergyStorage query attributes.""" battery_level = self.state.attributes.get(ATTR_BATTERY_LEVEL) if battery_level is None: @@ -866,7 +866,7 @@ class StartStopTrait(_Trait): return False - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return StartStop attributes for a sync request.""" domain = self.state.domain if domain == vacuum.DOMAIN: @@ -880,7 +880,7 @@ class StartStopTrait(_Trait): raise NotImplementedError(f"Unsupported domain {domain}") - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return StartStop query attributes.""" domain = self.state.domain state = self.state.state @@ -1012,7 +1012,7 @@ class TemperatureControlTrait(_Trait): and device_class == sensor.SensorDeviceClass.TEMPERATURE ) - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return temperature attributes for a sync request.""" response = {} domain = self.state.domain @@ -1048,7 +1048,7 @@ class TemperatureControlTrait(_Trait): return response - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return temperature states.""" response = {} domain = self.state.domain @@ -1174,7 +1174,7 @@ class TemperatureSettingTrait(_Trait): return modes - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return temperature point and modes attributes for a sync request.""" response = {} attrs = self.state.attributes @@ -1217,9 +1217,9 @@ class TemperatureSettingTrait(_Trait): return response - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return temperature point and modes query attributes.""" - response = {} + response: dict[str, Any] = {} attrs = self.state.attributes unit = self.hass.config.units.temperature_unit @@ -1432,9 +1432,9 @@ class HumiditySettingTrait(_Trait): and device_class == sensor.SensorDeviceClass.HUMIDITY ) - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return humidity attributes for a sync request.""" - response = {} + response: dict[str, Any] = {} attrs = self.state.attributes domain = self.state.domain @@ -1455,7 +1455,7 @@ class HumiditySettingTrait(_Trait): return response - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return humidity query attributes.""" response = {} attrs = self.state.attributes @@ -1464,9 +1464,9 @@ class HumiditySettingTrait(_Trait): if domain == sensor.DOMAIN: device_class = attrs.get(ATTR_DEVICE_CLASS) if device_class == sensor.SensorDeviceClass.HUMIDITY: - current_humidity = self.state.state - if current_humidity not in (STATE_UNKNOWN, STATE_UNAVAILABLE): - response["humidityAmbientPercent"] = round(float(current_humidity)) + humidity_state = self.state.state + if humidity_state not in (STATE_UNKNOWN, STATE_UNAVAILABLE): + response["humidityAmbientPercent"] = round(float(humidity_state)) elif domain == humidifier.DOMAIN: target_humidity: int | None = attrs.get(humidifier.ATTR_HUMIDITY) @@ -1518,11 +1518,11 @@ class LockUnlockTrait(_Trait): """Return if the trait might ask for 2FA.""" return True - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return LockUnlock attributes for a sync request.""" return {} - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return LockUnlock query attributes.""" if self.state.state == STATE_JAMMED: return {"isJammed": True} @@ -1604,7 +1604,7 @@ class ArmDisArmTrait(_Trait): return states[0] - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return ArmDisarm attributes for a sync request.""" response = {} levels = [] @@ -1624,7 +1624,7 @@ class ArmDisArmTrait(_Trait): response["availableArmLevels"] = {"levels": levels, "ordered": True} return response - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return ArmDisarm query attributes.""" armed_state = self.state.attributes.get("next_state", self.state.state) @@ -1721,11 +1721,11 @@ class FanSpeedTrait(_Trait): return features & ClimateEntityFeature.FAN_MODE return False - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return speed point and modes attributes for a sync request.""" domain = self.state.domain speeds = [] - result = {} + result: dict[str, Any] = {} if domain == fan.DOMAIN: reversible = bool( @@ -1770,7 +1770,7 @@ class FanSpeedTrait(_Trait): return result - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return speed point and modes query attributes.""" attrs = self.state.attributes @@ -1916,7 +1916,7 @@ class ModesTrait(_Trait): ) return mode - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return mode attributes for a sync request.""" modes = [] @@ -1940,10 +1940,10 @@ class ModesTrait(_Trait): return {"availableModes": modes} - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return current modes.""" attrs = self.state.attributes - response = {} + response: dict[str, Any] = {} mode_settings = {} if self.state.domain == fan.DOMAIN: @@ -2104,7 +2104,7 @@ class InputSelectorTrait(_Trait): return False - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return mode attributes for a sync request.""" attrs = self.state.attributes sourcelist: list[str] = attrs.get(media_player.ATTR_INPUT_SOURCE_LIST) or [] @@ -2115,7 +2115,7 @@ class InputSelectorTrait(_Trait): return {"availableInputs": inputs, "orderedInputs": True} - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return current modes.""" attrs = self.state.attributes return {"currentInput": attrs.get(media_player.ATTR_INPUT_SOURCE, "")} @@ -2185,7 +2185,7 @@ class OpenCloseTrait(_Trait): """Return if the trait might ask for 2FA.""" return domain == cover.DOMAIN and device_class in OpenCloseTrait.COVER_2FA - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return opening direction.""" response = {} features = self.state.attributes.get(ATTR_SUPPORTED_FEATURES, 0) @@ -2221,10 +2221,10 @@ class OpenCloseTrait(_Trait): return response - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return state query attributes.""" domain = self.state.domain - response = {} + response: dict[str, Any] = {} # When it's an assumed state, we will return empty state # This shouldn't happen because we set `commandOnlyOpenClose` @@ -2330,7 +2330,7 @@ class VolumeTrait(_Trait): return False - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return volume attributes for a sync request.""" features = self.state.attributes.get(ATTR_SUPPORTED_FEATURES, 0) return { @@ -2347,7 +2347,7 @@ class VolumeTrait(_Trait): "levelStepSize": 10, } - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return volume query attributes.""" response = {} @@ -2510,7 +2510,7 @@ class TransportControlTrait(_Trait): return False - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return opening direction.""" response = {} @@ -2525,7 +2525,7 @@ class TransportControlTrait(_Trait): return response - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return the attributes of this trait for this entity.""" return {} @@ -2624,11 +2624,11 @@ class MediaStateTrait(_Trait): """Test if state is supported.""" return domain == media_player.DOMAIN - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return attributes for a sync request.""" return {"supportActivityState": True, "supportPlaybackState": True} - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return the attributes of this trait for this entity.""" return { "activityState": self.activity_lookup.get(self.state.state, "INACTIVE"), @@ -2658,11 +2658,11 @@ class ChannelTrait(_Trait): return False - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return attributes for a sync request.""" return {"availableChannels": [], "commandOnlyChannels": True} - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return channel query attributes.""" return {} @@ -2735,7 +2735,7 @@ class SensorStateTrait(_Trait): """Test if state is supported.""" return domain == sensor.DOMAIN and device_class in cls.sensor_types - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return attributes for a sync request.""" device_class = self.state.attributes.get(ATTR_DEVICE_CLASS) data = self.sensor_types.get(device_class) @@ -2763,7 +2763,7 @@ class SensorStateTrait(_Trait): return {"sensorStatesSupported": [sensor_state]} - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return the attributes of this trait for this entity.""" device_class = self.state.attributes.get(ATTR_DEVICE_CLASS) data = self.sensor_types.get(device_class) From 34b561b21131fd5dc8c2c0593795576e82ee7b32 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 2 Aug 2024 19:04:00 +0200 Subject: [PATCH 0334/1635] Bump ruff to 0.5.6 (#123073) --- .pre-commit-config.yaml | 2 +- requirements_test_pre_commit.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 22e10d420d4..9c75edf780c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.5.5 + rev: v0.5.6 hooks: - id: ruff args: diff --git a/requirements_test_pre_commit.txt b/requirements_test_pre_commit.txt index d57a005bb5d..a3d38c11a8d 100644 --- a/requirements_test_pre_commit.txt +++ b/requirements_test_pre_commit.txt @@ -1,5 +1,5 @@ # Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit codespell==2.3.0 -ruff==0.5.5 +ruff==0.5.6 yamllint==1.35.1 From 8687c32c15fb3a577a993a2e32c0527ed36f838b Mon Sep 17 00:00:00 2001 From: Shay Levy Date: Fri, 2 Aug 2024 21:56:49 +0300 Subject: [PATCH 0335/1635] Ignore Shelly IPv6 address in zeroconf (#123081) --- .../components/shelly/config_flow.py | 2 ++ homeassistant/components/shelly/strings.json | 3 ++- tests/components/shelly/test_config_flow.py | 19 +++++++++++++++++++ 3 files changed, 23 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/shelly/config_flow.py b/homeassistant/components/shelly/config_flow.py index cb3bca6aa47..c80d1e84d6f 100644 --- a/homeassistant/components/shelly/config_flow.py +++ b/homeassistant/components/shelly/config_flow.py @@ -279,6 +279,8 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN): self, discovery_info: ZeroconfServiceInfo ) -> ConfigFlowResult: """Handle zeroconf discovery.""" + if discovery_info.ip_address.version == 6: + return self.async_abort(reason="ipv6_not_supported") host = discovery_info.host # First try to get the mac address from the name # so we can avoid making another connection to the diff --git a/homeassistant/components/shelly/strings.json b/homeassistant/components/shelly/strings.json index 8ae4ff1f3e4..f76319eb08c 100644 --- a/homeassistant/components/shelly/strings.json +++ b/homeassistant/components/shelly/strings.json @@ -52,7 +52,8 @@ "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "reauth_unsuccessful": "Re-authentication was unsuccessful, please remove the integration and set it up again.", "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", - "another_device": "Re-configuration was unsuccessful, the IP address/hostname of another Shelly device was used." + "another_device": "Re-configuration was unsuccessful, the IP address/hostname of another Shelly device was used.", + "ipv6_not_supported": "IPv6 is not supported." } }, "device_automation": { diff --git a/tests/components/shelly/test_config_flow.py b/tests/components/shelly/test_config_flow.py index a3040fc2eb8..0c574a33e0c 100644 --- a/tests/components/shelly/test_config_flow.py +++ b/tests/components/shelly/test_config_flow.py @@ -1305,3 +1305,22 @@ async def test_reconfigure_with_exception( ) assert result["errors"] == {"base": base_error} + + +async def test_zeroconf_rejects_ipv6(hass: HomeAssistant) -> None: + """Test zeroconf discovery rejects ipv6.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("fd00::b27c:63bb:cc85:4ea0"), + ip_addresses=[ip_address("fd00::b27c:63bb:cc85:4ea0")], + hostname="mock_hostname", + name="shelly1pm-12345", + port=None, + properties={zeroconf.ATTR_PROPERTIES_ID: "shelly1pm-12345"}, + type="mock_type", + ), + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "ipv6_not_supported" From f6ad018f8fa3ccd4bf31df47388d3c26532ecf6d Mon Sep 17 00:00:00 2001 From: Denis Shulyaka Date: Sat, 3 Aug 2024 09:14:24 +0300 Subject: [PATCH 0336/1635] Change enum type to string for Google Generative AI Conversation (#123069) --- .../conversation.py | 14 ++++- .../test_conversation.py | 59 +++++++++++++++++++ 2 files changed, 70 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/google_generative_ai_conversation/conversation.py b/homeassistant/components/google_generative_ai_conversation/conversation.py index a5c911bb757..6b3e87f8181 100644 --- a/homeassistant/components/google_generative_ai_conversation/conversation.py +++ b/homeassistant/components/google_generative_ai_conversation/conversation.py @@ -89,9 +89,9 @@ def _format_schema(schema: dict[str, Any]) -> dict[str, Any]: key = "type_" val = val.upper() elif key == "format": - if (schema.get("type") == "string" and val != "enum") or ( - schema.get("type") not in ("number", "integer", "string") - ): + if schema.get("type") == "string" and val != "enum": + continue + if schema.get("type") not in ("number", "integer", "string"): continue key = "format_" elif key == "items": @@ -100,11 +100,19 @@ def _format_schema(schema: dict[str, Any]) -> dict[str, Any]: val = {k: _format_schema(v) for k, v in val.items()} result[key] = val + if result.get("enum") and result.get("type_") != "STRING": + # enum is only allowed for STRING type. This is safe as long as the schema + # contains vol.Coerce for the respective type, for example: + # vol.All(vol.Coerce(int), vol.In([1, 2, 3])) + result["type_"] = "STRING" + result["enum"] = [str(item) for item in result["enum"]] + if result.get("type_") == "OBJECT" and not result.get("properties"): # An object with undefined properties is not supported by Gemini API. # Fallback to JSON string. This will probably fail for most tools that want it, # but we don't have a better fallback strategy so far. result["properties"] = {"json": {"type_": "STRING"}} + result["required"] = [] return result diff --git a/tests/components/google_generative_ai_conversation/test_conversation.py b/tests/components/google_generative_ai_conversation/test_conversation.py index 41f96c7b0ac..df8472995a7 100644 --- a/tests/components/google_generative_ai_conversation/test_conversation.py +++ b/tests/components/google_generative_ai_conversation/test_conversation.py @@ -17,6 +17,7 @@ from homeassistant.components.google_generative_ai_conversation.const import ( ) from homeassistant.components.google_generative_ai_conversation.conversation import ( _escape_decode, + _format_schema, ) from homeassistant.const import CONF_LLM_HASS_API from homeassistant.core import Context, HomeAssistant @@ -622,3 +623,61 @@ async def test_escape_decode() -> None: "param2": "param2's value", "param3": {"param31": "Cheminée", "param32": "Cheminée"}, } + + +@pytest.mark.parametrize( + ("openapi", "protobuf"), + [ + ( + {"type": "string", "enum": ["a", "b", "c"]}, + {"type_": "STRING", "enum": ["a", "b", "c"]}, + ), + ( + {"type": "integer", "enum": [1, 2, 3]}, + {"type_": "STRING", "enum": ["1", "2", "3"]}, + ), + ({"anyOf": [{"type": "integer"}, {"type": "number"}]}, {"type_": "INTEGER"}), + ( + { + "anyOf": [ + {"anyOf": [{"type": "integer"}, {"type": "number"}]}, + {"anyOf": [{"type": "integer"}, {"type": "number"}]}, + ] + }, + {"type_": "INTEGER"}, + ), + ({"type": "string", "format": "lower"}, {"type_": "STRING"}), + ({"type": "boolean", "format": "bool"}, {"type_": "BOOLEAN"}), + ( + {"type": "number", "format": "percent"}, + {"type_": "NUMBER", "format_": "percent"}, + ), + ( + { + "type": "object", + "properties": {"var": {"type": "string"}}, + "required": [], + }, + { + "type_": "OBJECT", + "properties": {"var": {"type_": "STRING"}}, + "required": [], + }, + ), + ( + {"type": "object", "additionalProperties": True}, + { + "type_": "OBJECT", + "properties": {"json": {"type_": "STRING"}}, + "required": [], + }, + ), + ( + {"type": "array", "items": {"type": "string"}}, + {"type_": "ARRAY", "items": {"type_": "STRING"}}, + ), + ], +) +async def test_format_schema(openapi, protobuf) -> None: + """Test _format_schema.""" + assert _format_schema(openapi) == protobuf From aa6f0cd55af1c7dc64e6e93fc0df3eee8323b5ac Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Sat, 3 Aug 2024 08:16:30 +0200 Subject: [PATCH 0337/1635] Add CONTROL supported feature to Google conversation when API access (#123046) * Add CONTROL supported feature to Google conversation when API access * Better function name * Handle entry update inline * Reload instead of update --- .../conversation.py | 14 ++++++++++++++ .../snapshots/test_conversation.ambr | 8 ++++---- .../test_conversation.py | 15 +++++++++++---- 3 files changed, 29 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/google_generative_ai_conversation/conversation.py b/homeassistant/components/google_generative_ai_conversation/conversation.py index 6b3e87f8181..0d24ddbf39f 100644 --- a/homeassistant/components/google_generative_ai_conversation/conversation.py +++ b/homeassistant/components/google_generative_ai_conversation/conversation.py @@ -172,6 +172,10 @@ class GoogleGenerativeAIConversationEntity( model="Generative AI", entry_type=dr.DeviceEntryType.SERVICE, ) + if self.entry.options.get(CONF_LLM_HASS_API): + self._attr_supported_features = ( + conversation.ConversationEntityFeature.CONTROL + ) @property def supported_languages(self) -> list[str] | Literal["*"]: @@ -185,6 +189,9 @@ class GoogleGenerativeAIConversationEntity( self.hass, "conversation", self.entry.entry_id, self.entity_id ) conversation.async_set_agent(self.hass, self.entry, self) + self.entry.async_on_unload( + self.entry.add_update_listener(self._async_entry_update_listener) + ) async def async_will_remove_from_hass(self) -> None: """When entity will be removed from Home Assistant.""" @@ -405,3 +412,10 @@ class GoogleGenerativeAIConversationEntity( parts.append(llm_api.api_prompt) return "\n".join(parts) + + async def _async_entry_update_listener( + self, hass: HomeAssistant, entry: ConfigEntry + ) -> None: + """Handle options update.""" + # Reload as we update device info + entity name + supported features + await hass.config_entries.async_reload(entry.entry_id) diff --git a/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr b/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr index abd3658e869..65238c5212a 100644 --- a/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr +++ b/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr @@ -215,7 +215,7 @@ ), ]) # --- -# name: test_default_prompt[config_entry_options0-None] +# name: test_default_prompt[config_entry_options0-0-None] list([ tuple( '', @@ -263,7 +263,7 @@ ), ]) # --- -# name: test_default_prompt[config_entry_options0-conversation.google_generative_ai_conversation] +# name: test_default_prompt[config_entry_options0-0-conversation.google_generative_ai_conversation] list([ tuple( '', @@ -311,7 +311,7 @@ ), ]) # --- -# name: test_default_prompt[config_entry_options1-None] +# name: test_default_prompt[config_entry_options1-1-None] list([ tuple( '', @@ -360,7 +360,7 @@ ), ]) # --- -# name: test_default_prompt[config_entry_options1-conversation.google_generative_ai_conversation] +# name: test_default_prompt[config_entry_options1-1-conversation.google_generative_ai_conversation] list([ tuple( '', diff --git a/tests/components/google_generative_ai_conversation/test_conversation.py b/tests/components/google_generative_ai_conversation/test_conversation.py index df8472995a7..a8eae34e08b 100644 --- a/tests/components/google_generative_ai_conversation/test_conversation.py +++ b/tests/components/google_generative_ai_conversation/test_conversation.py @@ -19,7 +19,7 @@ from homeassistant.components.google_generative_ai_conversation.conversation imp _escape_decode, _format_schema, ) -from homeassistant.const import CONF_LLM_HASS_API +from homeassistant.const import ATTR_SUPPORTED_FEATURES, CONF_LLM_HASS_API from homeassistant.core import Context, HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import intent, llm @@ -39,10 +39,13 @@ def freeze_the_time(): "agent_id", [None, "conversation.google_generative_ai_conversation"] ) @pytest.mark.parametrize( - "config_entry_options", + ("config_entry_options", "expected_features"), [ - {}, - {CONF_LLM_HASS_API: llm.LLM_API_ASSIST}, + ({}, 0), + ( + {CONF_LLM_HASS_API: llm.LLM_API_ASSIST}, + conversation.ConversationEntityFeature.CONTROL, + ), ], ) @pytest.mark.usefixtures("mock_init_component") @@ -52,6 +55,7 @@ async def test_default_prompt( snapshot: SnapshotAssertion, agent_id: str | None, config_entry_options: {}, + expected_features: conversation.ConversationEntityFeature, hass_ws_client: WebSocketGenerator, ) -> None: """Test that the default prompt works.""" @@ -98,6 +102,9 @@ async def test_default_prompt( assert [tuple(mock_call) for mock_call in mock_model.mock_calls] == snapshot assert mock_get_tools.called == (CONF_LLM_HASS_API in config_entry_options) + state = hass.states.get("conversation.google_generative_ai_conversation") + assert state.attributes[ATTR_SUPPORTED_FEATURES] == expected_features + @pytest.mark.parametrize( ("model_name", "supports_system_instruction"), From 6684f61a54a4fb11ec05451ae4485877a3672cd6 Mon Sep 17 00:00:00 2001 From: Chris Buckley Date: Sat, 3 Aug 2024 08:07:13 +0100 Subject: [PATCH 0338/1635] Add support for Todoist sections (#115671) * Add support for Todoist sections * ServiceValidationError & section name tweaks from PR comments * Remove whitespace Co-authored-by: Erik Montnemery * More natural error message Co-authored-by: Erik Montnemery --------- Co-authored-by: Erik Montnemery --- homeassistant/components/todoist/calendar.py | 31 +++++++++++++++++-- homeassistant/components/todoist/const.py | 2 ++ .../components/todoist/coordinator.py | 6 +++- .../components/todoist/services.yaml | 4 +++ homeassistant/components/todoist/strings.json | 9 ++++++ tests/components/todoist/conftest.py | 11 ++++++- tests/components/todoist/test_calendar.py | 29 ++++++++++++++++- 7 files changed, 86 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/todoist/calendar.py b/homeassistant/components/todoist/calendar.py index 1c6f40005c1..f89c09451b6 100644 --- a/homeassistant/components/todoist/calendar.py +++ b/homeassistant/components/todoist/calendar.py @@ -21,7 +21,7 @@ from homeassistant.components.calendar import ( from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ID, CONF_NAME, CONF_TOKEN, EVENT_HOMEASSISTANT_STOP from homeassistant.core import Event, HomeAssistant, ServiceCall, callback -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -54,6 +54,7 @@ from .const import ( REMINDER_DATE, REMINDER_DATE_LANG, REMINDER_DATE_STRING, + SECTION_NAME, SERVICE_NEW_TASK, START, SUMMARY, @@ -68,6 +69,7 @@ NEW_TASK_SERVICE_SCHEMA = vol.Schema( vol.Required(CONTENT): cv.string, vol.Optional(DESCRIPTION): cv.string, vol.Optional(PROJECT_NAME, default="inbox"): vol.All(cv.string, vol.Lower), + vol.Optional(SECTION_NAME): vol.All(cv.string, vol.Lower), vol.Optional(LABELS): cv.ensure_list_csv, vol.Optional(ASSIGNEE): cv.string, vol.Optional(PRIORITY): vol.All(vol.Coerce(int), vol.Range(min=1, max=4)), @@ -201,7 +203,7 @@ async def async_setup_platform( async_register_services(hass, coordinator) -def async_register_services( +def async_register_services( # noqa: C901 hass: HomeAssistant, coordinator: TodoistCoordinator ) -> None: """Register services.""" @@ -211,7 +213,7 @@ def async_register_services( session = async_get_clientsession(hass) - async def handle_new_task(call: ServiceCall) -> None: + async def handle_new_task(call: ServiceCall) -> None: # noqa: C901 """Call when a user creates a new Todoist Task from Home Assistant.""" project_name = call.data[PROJECT_NAME].lower() projects = await coordinator.async_get_projects() @@ -222,12 +224,35 @@ def async_register_services( if project_id is None: raise HomeAssistantError(f"Invalid project name '{project_name}'") + # Optional section within project + section_id: str | None = None + if SECTION_NAME in call.data: + section_name = call.data[SECTION_NAME] + sections = await coordinator.async_get_sections(project_id) + for section in sections: + if section_name == section.name.lower(): + section_id = section.id + break + if section_id is None: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="section_invalid", + translation_placeholders={ + "section": section_name, + "project": project_name, + }, + ) + # Create the task content = call.data[CONTENT] data: dict[str, Any] = {"project_id": project_id} if description := call.data.get(DESCRIPTION): data["description"] = description + + if section_id is not None: + data["section_id"] = section_id + if task_labels := call.data.get(LABELS): data["labels"] = task_labels diff --git a/homeassistant/components/todoist/const.py b/homeassistant/components/todoist/const.py index 1a66fc9764f..be95d57dd2c 100644 --- a/homeassistant/components/todoist/const.py +++ b/homeassistant/components/todoist/const.py @@ -78,6 +78,8 @@ PROJECT_ID: Final = "project_id" PROJECT_NAME: Final = "project" # Todoist API: Fetch all Projects PROJECTS: Final = "projects" +# Section Name: What Section of the Project do you want to add the Task to? +SECTION_NAME: Final = "section" # Calendar Platform: When does a calendar event start? START: Final = "start" # Calendar Platform: What is the next calendar event about? diff --git a/homeassistant/components/todoist/coordinator.py b/homeassistant/components/todoist/coordinator.py index e01b4ecb35a..b55680907ac 100644 --- a/homeassistant/components/todoist/coordinator.py +++ b/homeassistant/components/todoist/coordinator.py @@ -4,7 +4,7 @@ from datetime import timedelta import logging from todoist_api_python.api_async import TodoistAPIAsync -from todoist_api_python.models import Label, Project, Task +from todoist_api_python.models import Label, Project, Section, Task from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -41,6 +41,10 @@ class TodoistCoordinator(DataUpdateCoordinator[list[Task]]): self._projects = await self.api.get_projects() return self._projects + async def async_get_sections(self, project_id: str) -> list[Section]: + """Return todoist sections for a given project ID.""" + return await self.api.get_sections(project_id=project_id) + async def async_get_labels(self) -> list[Label]: """Return todoist labels fetched at most once.""" if self._labels is None: diff --git a/homeassistant/components/todoist/services.yaml b/homeassistant/components/todoist/services.yaml index 1bd6320ebe3..17d877ea786 100644 --- a/homeassistant/components/todoist/services.yaml +++ b/homeassistant/components/todoist/services.yaml @@ -13,6 +13,10 @@ new_task: default: Inbox selector: text: + section: + example: Deliveries + selector: + text: labels: example: Chores,Delivieries selector: diff --git a/homeassistant/components/todoist/strings.json b/homeassistant/components/todoist/strings.json index 0cc74c9c8c6..55b7ef62b58 100644 --- a/homeassistant/components/todoist/strings.json +++ b/homeassistant/components/todoist/strings.json @@ -20,6 +20,11 @@ "default": "[%key:common::config_flow::create_entry::authenticated%]" } }, + "exceptions": { + "section_invalid": { + "message": "Project \"{project}\" has no section \"{section}\"" + } + }, "services": { "new_task": { "name": "New task", @@ -37,6 +42,10 @@ "name": "Project", "description": "The name of the project this task should belong to." }, + "section": { + "name": "Section", + "description": "The name of a section within the project to add the task to." + }, "labels": { "name": "Labels", "description": "Any labels that you want to apply to this task, separated by a comma." diff --git a/tests/components/todoist/conftest.py b/tests/components/todoist/conftest.py index 45fda53ccc1..4b2bfea2e30 100644 --- a/tests/components/todoist/conftest.py +++ b/tests/components/todoist/conftest.py @@ -7,7 +7,7 @@ from unittest.mock import AsyncMock, patch import pytest from requests.exceptions import HTTPError from requests.models import Response -from todoist_api_python.models import Collaborator, Due, Label, Project, Task +from todoist_api_python.models import Collaborator, Due, Label, Project, Section, Task from homeassistant.components.todoist import DOMAIN from homeassistant.const import CONF_TOKEN, Platform @@ -18,6 +18,7 @@ from homeassistant.util import dt as dt_util from tests.common import MockConfigEntry PROJECT_ID = "project-id-1" +SECTION_ID = "section-id-1" SUMMARY = "A task" TOKEN = "some-token" TODAY = dt_util.now().strftime("%Y-%m-%d") @@ -98,6 +99,14 @@ def mock_api(tasks: list[Task]) -> AsyncMock: view_style="list", ) ] + api.get_sections.return_value = [ + Section( + id=SECTION_ID, + project_id=PROJECT_ID, + name="Section Name", + order=1, + ) + ] api.get_labels.return_value = [ Label(id="1", name="Label1", color="1", order=1, is_favorite=False) ] diff --git a/tests/components/todoist/test_calendar.py b/tests/components/todoist/test_calendar.py index d8123af3231..680406096cc 100644 --- a/tests/components/todoist/test_calendar.py +++ b/tests/components/todoist/test_calendar.py @@ -18,6 +18,7 @@ from homeassistant.components.todoist.const import ( DOMAIN, LABELS, PROJECT_NAME, + SECTION_NAME, SERVICE_NEW_TASK, ) from homeassistant.const import CONF_TOKEN, Platform @@ -26,7 +27,7 @@ from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_component import async_update_entity from homeassistant.util import dt as dt_util -from .conftest import PROJECT_ID, SUMMARY +from .conftest import PROJECT_ID, SECTION_ID, SUMMARY from tests.typing import ClientSessionGenerator @@ -269,6 +270,32 @@ async def test_create_task_service_call(hass: HomeAssistant, api: AsyncMock) -> ) +async def test_create_task_service_call_with_section( + hass: HomeAssistant, api: AsyncMock +) -> None: + """Test api is called correctly when section is included.""" + await hass.services.async_call( + DOMAIN, + SERVICE_NEW_TASK, + { + ASSIGNEE: "user", + CONTENT: "task", + LABELS: ["Label1"], + PROJECT_NAME: "Name", + SECTION_NAME: "Section Name", + }, + ) + await hass.async_block_till_done() + + api.add_task.assert_called_with( + "task", + project_id=PROJECT_ID, + section_id=SECTION_ID, + labels=["Label1"], + assignee_id="1", + ) + + @pytest.mark.parametrize( ("due"), [ From bb31fc1ec72e204ee153f400b9baaa1a71d564e2 Mon Sep 17 00:00:00 2001 From: David Bonnes Date: Sat, 3 Aug 2024 10:41:30 +0200 Subject: [PATCH 0339/1635] Test storage save and load for evohome (#122510) * test storage save and load * fix bug exposed by test * refactor test * add JSON for test account/location * create helpers to load JSON * refactor test * baseline refactor * tweak * update requiremenst * rationalise code * remove conditional in test * refactor test * mypy fix * tweak tests * working test * working test 4 * working test 5 * add typed dicts * working dtms * lint * fix dtm asserts * doc strings * list * tweak conditional * tweak test data sets to extend coverage * leverage conftest.py for subsequent tests * revert test storage * revert part two * rename symbols * remove anachronism * stop unwanted DNS lookup * Clean up type ignores * Format --------- Co-authored-by: Martin Hjelmare --- CODEOWNERS | 1 + requirements_test_all.txt | 3 + tests/components/evohome/__init__.py | 1 + tests/components/evohome/conftest.py | 111 ++++++ tests/components/evohome/const.py | 10 + .../evohome/fixtures/schedule_dhw.json | 81 ++++ .../evohome/fixtures/schedule_zone.json | 67 ++++ .../evohome/fixtures/status_2738909.json | 125 +++++++ .../evohome/fixtures/user_account.json | 11 + .../evohome/fixtures/user_locations.json | 346 ++++++++++++++++++ tests/components/evohome/test_storage.py | 208 +++++++++++ 11 files changed, 964 insertions(+) create mode 100644 tests/components/evohome/__init__.py create mode 100644 tests/components/evohome/conftest.py create mode 100644 tests/components/evohome/const.py create mode 100644 tests/components/evohome/fixtures/schedule_dhw.json create mode 100644 tests/components/evohome/fixtures/schedule_zone.json create mode 100644 tests/components/evohome/fixtures/status_2738909.json create mode 100644 tests/components/evohome/fixtures/user_account.json create mode 100644 tests/components/evohome/fixtures/user_locations.json create mode 100644 tests/components/evohome/test_storage.py diff --git a/CODEOWNERS b/CODEOWNERS index b53e0a929bb..c9b3a53184f 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -433,6 +433,7 @@ build.json @home-assistant/supervisor /homeassistant/components/evil_genius_labs/ @balloob /tests/components/evil_genius_labs/ @balloob /homeassistant/components/evohome/ @zxdavb +/tests/components/evohome/ @zxdavb /homeassistant/components/ezviz/ @RenierM26 @baqs /tests/components/ezviz/ @RenierM26 @baqs /homeassistant/components/faa_delays/ @ntilley905 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c7934e7fc84..e55efb78abb 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -717,6 +717,9 @@ eternalegypt==0.0.16 # homeassistant.components.eufylife_ble eufylife-ble-client==0.1.8 +# homeassistant.components.evohome +evohome-async==0.4.20 + # homeassistant.components.bryant_evolution evolutionhttp==0.0.18 diff --git a/tests/components/evohome/__init__.py b/tests/components/evohome/__init__.py new file mode 100644 index 00000000000..588e0f61746 --- /dev/null +++ b/tests/components/evohome/__init__.py @@ -0,0 +1 @@ +"""The tests for the evohome integration.""" diff --git a/tests/components/evohome/conftest.py b/tests/components/evohome/conftest.py new file mode 100644 index 00000000000..260330896b7 --- /dev/null +++ b/tests/components/evohome/conftest.py @@ -0,0 +1,111 @@ +"""Fixtures and helpers for the evohome tests.""" + +from __future__ import annotations + +from datetime import datetime, timedelta +from typing import Any, Final +from unittest.mock import MagicMock, patch + +from aiohttp import ClientSession +from evohomeasync2 import EvohomeClient +from evohomeasync2.broker import Broker +import pytest + +from homeassistant.components.evohome import CONF_PASSWORD, CONF_USERNAME, DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component +from homeassistant.util.json import JsonArrayType, JsonObjectType + +from .const import ACCESS_TOKEN, REFRESH_TOKEN + +from tests.common import load_json_array_fixture, load_json_object_fixture + +TEST_CONFIG: Final = { + CONF_USERNAME: "username", + CONF_PASSWORD: "password", +} + + +def user_account_config_fixture() -> JsonObjectType: + """Load JSON for the config of a user's account.""" + return load_json_object_fixture("user_account.json", DOMAIN) + + +def user_locations_config_fixture() -> JsonArrayType: + """Load JSON for the config of a user's installation (a list of locations).""" + return load_json_array_fixture("user_locations.json", DOMAIN) + + +def location_status_fixture(loc_id: str) -> JsonObjectType: + """Load JSON for the status of a specific location.""" + return load_json_object_fixture(f"status_{loc_id}.json", DOMAIN) + + +def dhw_schedule_fixture() -> JsonObjectType: + """Load JSON for the schedule of a domesticHotWater zone.""" + return load_json_object_fixture("schedule_dhw.json", DOMAIN) + + +def zone_schedule_fixture() -> JsonObjectType: + """Load JSON for the schedule of a temperatureZone zone.""" + return load_json_object_fixture("schedule_zone.json", DOMAIN) + + +async def mock_get( + self: Broker, url: str, **kwargs: Any +) -> JsonArrayType | JsonObjectType: + """Return the JSON for a HTTP get of a given URL.""" + + # a proxy for the behaviour of the real web API + if self.refresh_token is None: + self.refresh_token = f"new_{REFRESH_TOKEN}" + + if self.access_token_expires is None or self.access_token_expires < datetime.now(): + self.access_token = f"new_{ACCESS_TOKEN}" + self.access_token_expires = datetime.now() + timedelta(minutes=30) + + # assume a valid GET, and return the JSON for that web API + if url == "userAccount": # userAccount + return user_account_config_fixture() + + if url.startswith("location"): + if "installationInfo" in url: # location/installationInfo?userId={id} + return user_locations_config_fixture() + if "location" in url: # location/{id}/status + return location_status_fixture("2738909") + + elif "schedule" in url: + if url.startswith("domesticHotWater"): # domesticHotWater/{id}/schedule + return dhw_schedule_fixture() + if url.startswith("temperatureZone"): # temperatureZone/{id}/schedule + return zone_schedule_fixture() + + pytest.xfail(f"Unexpected URL: {url}") + + +@patch("evohomeasync2.broker.Broker.get", mock_get) +async def setup_evohome(hass: HomeAssistant, test_config: dict[str, str]) -> MagicMock: + """Set up the evohome integration and return its client. + + The class is mocked here to check the client was instantiated with the correct args. + """ + + with ( + patch("homeassistant.components.evohome.evo.EvohomeClient") as mock_client, + patch("homeassistant.components.evohome.ev1.EvohomeClient", return_value=None), + ): + mock_client.side_effect = EvohomeClient + + assert await async_setup_component(hass, DOMAIN, {DOMAIN: test_config}) + await hass.async_block_till_done() + + mock_client.assert_called_once() + + assert mock_client.call_args.args[0] == test_config[CONF_USERNAME] + assert mock_client.call_args.args[1] == test_config[CONF_PASSWORD] + + assert isinstance(mock_client.call_args.kwargs["session"], ClientSession) + + assert mock_client.account_info is not None + + return mock_client diff --git a/tests/components/evohome/const.py b/tests/components/evohome/const.py new file mode 100644 index 00000000000..0b298db533a --- /dev/null +++ b/tests/components/evohome/const.py @@ -0,0 +1,10 @@ +"""Constants for the evohome tests.""" + +from __future__ import annotations + +from typing import Final + +ACCESS_TOKEN: Final = "at_1dc7z657UKzbhKA..." +REFRESH_TOKEN: Final = "rf_jg68ZCKYdxEI3fF..." +SESSION_ID: Final = "F7181186..." +USERNAME: Final = "test_user@gmail.com" diff --git a/tests/components/evohome/fixtures/schedule_dhw.json b/tests/components/evohome/fixtures/schedule_dhw.json new file mode 100644 index 00000000000..da9a225fb82 --- /dev/null +++ b/tests/components/evohome/fixtures/schedule_dhw.json @@ -0,0 +1,81 @@ +{ + "dailySchedules": [ + { + "dayOfWeek": "Monday", + "switchpoints": [ + { "dhwState": "On", "timeOfDay": "06:30:00" }, + { "dhwState": "Off", "timeOfDay": "08:30:00" }, + { "dhwState": "On", "timeOfDay": "12:00:00" }, + { "dhwState": "Off", "timeOfDay": "13:00:00" }, + { "dhwState": "On", "timeOfDay": "16:30:00" }, + { "dhwState": "Off", "timeOfDay": "22:30:00" } + ] + }, + { + "dayOfWeek": "Tuesday", + "switchpoints": [ + { "dhwState": "On", "timeOfDay": "06:30:00" }, + { "dhwState": "Off", "timeOfDay": "08:30:00" }, + { "dhwState": "On", "timeOfDay": "12:00:00" }, + { "dhwState": "Off", "timeOfDay": "13:00:00" }, + { "dhwState": "On", "timeOfDay": "16:30:00" }, + { "dhwState": "Off", "timeOfDay": "22:30:00" } + ] + }, + { + "dayOfWeek": "Wednesday", + "switchpoints": [ + { "dhwState": "On", "timeOfDay": "06:30:00" }, + { "dhwState": "Off", "timeOfDay": "08:30:00" }, + { "dhwState": "On", "timeOfDay": "12:00:00" }, + { "dhwState": "Off", "timeOfDay": "13:00:00" }, + { "dhwState": "On", "timeOfDay": "16:30:00" }, + { "dhwState": "Off", "timeOfDay": "22:30:00" } + ] + }, + { + "dayOfWeek": "Thursday", + "switchpoints": [ + { "dhwState": "On", "timeOfDay": "06:30:00" }, + { "dhwState": "Off", "timeOfDay": "08:30:00" }, + { "dhwState": "On", "timeOfDay": "12:00:00" }, + { "dhwState": "Off", "timeOfDay": "13:00:00" }, + { "dhwState": "On", "timeOfDay": "16:30:00" }, + { "dhwState": "Off", "timeOfDay": "22:30:00" } + ] + }, + { + "dayOfWeek": "Friday", + "switchpoints": [ + { "dhwState": "On", "timeOfDay": "06:30:00" }, + { "dhwState": "Off", "timeOfDay": "08:30:00" }, + { "dhwState": "On", "timeOfDay": "12:00:00" }, + { "dhwState": "Off", "timeOfDay": "13:00:00" }, + { "dhwState": "On", "timeOfDay": "16:30:00" }, + { "dhwState": "Off", "timeOfDay": "22:30:00" } + ] + }, + { + "dayOfWeek": "Saturday", + "switchpoints": [ + { "dhwState": "On", "timeOfDay": "06:30:00" }, + { "dhwState": "Off", "timeOfDay": "09:30:00" }, + { "dhwState": "On", "timeOfDay": "12:00:00" }, + { "dhwState": "Off", "timeOfDay": "13:00:00" }, + { "dhwState": "On", "timeOfDay": "16:30:00" }, + { "dhwState": "Off", "timeOfDay": "23:00:00" } + ] + }, + { + "dayOfWeek": "Sunday", + "switchpoints": [ + { "dhwState": "On", "timeOfDay": "06:30:00" }, + { "dhwState": "Off", "timeOfDay": "09:30:00" }, + { "dhwState": "On", "timeOfDay": "12:00:00" }, + { "dhwState": "Off", "timeOfDay": "13:00:00" }, + { "dhwState": "On", "timeOfDay": "16:30:00" }, + { "dhwState": "Off", "timeOfDay": "23:00:00" } + ] + } + ] +} diff --git a/tests/components/evohome/fixtures/schedule_zone.json b/tests/components/evohome/fixtures/schedule_zone.json new file mode 100644 index 00000000000..5030d92ff3d --- /dev/null +++ b/tests/components/evohome/fixtures/schedule_zone.json @@ -0,0 +1,67 @@ +{ + "dailySchedules": [ + { + "dayOfWeek": "Monday", + "switchpoints": [ + { "heatSetpoint": 18.1, "timeOfDay": "07:00:00" }, + { "heatSetpoint": 16.0, "timeOfDay": "08:00:00" }, + { "heatSetpoint": 18.6, "timeOfDay": "22:10:00" }, + { "heatSetpoint": 15.9, "timeOfDay": "23:00:00" } + ] + }, + { + "dayOfWeek": "Tuesday", + "switchpoints": [ + { "heatSetpoint": 18.1, "timeOfDay": "07:00:00" }, + { "heatSetpoint": 16.0, "timeOfDay": "08:00:00" }, + { "heatSetpoint": 18.6, "timeOfDay": "22:10:00" }, + { "heatSetpoint": 15.9, "timeOfDay": "23:00:00" } + ] + }, + { + "dayOfWeek": "Wednesday", + "switchpoints": [ + { "heatSetpoint": 18.1, "timeOfDay": "07:00:00" }, + { "heatSetpoint": 16.0, "timeOfDay": "08:00:00" }, + { "heatSetpoint": 18.6, "timeOfDay": "22:10:00" }, + { "heatSetpoint": 15.9, "timeOfDay": "23:00:00" } + ] + }, + { + "dayOfWeek": "Thursday", + "switchpoints": [ + { "heatSetpoint": 18.1, "timeOfDay": "07:00:00" }, + { "heatSetpoint": 16.0, "timeOfDay": "08:00:00" }, + { "heatSetpoint": 18.6, "timeOfDay": "22:10:00" }, + { "heatSetpoint": 15.9, "timeOfDay": "23:00:00" } + ] + }, + { + "dayOfWeek": "Friday", + "switchpoints": [ + { "heatSetpoint": 18.1, "timeOfDay": "07:00:00" }, + { "heatSetpoint": 16.0, "timeOfDay": "08:00:00" }, + { "heatSetpoint": 18.6, "timeOfDay": "22:10:00" }, + { "heatSetpoint": 15.9, "timeOfDay": "23:00:00" } + ] + }, + { + "dayOfWeek": "Saturday", + "switchpoints": [ + { "heatSetpoint": 18.5, "timeOfDay": "07:00:00" }, + { "heatSetpoint": 16.0, "timeOfDay": "08:30:00" }, + { "heatSetpoint": 18.6, "timeOfDay": "22:10:00" }, + { "heatSetpoint": 15.9, "timeOfDay": "23:00:00" } + ] + }, + { + "dayOfWeek": "Sunday", + "switchpoints": [ + { "heatSetpoint": 18.5, "timeOfDay": "07:00:00" }, + { "heatSetpoint": 16.0, "timeOfDay": "08:30:00" }, + { "heatSetpoint": 18.6, "timeOfDay": "22:10:00" }, + { "heatSetpoint": 15.9, "timeOfDay": "23:00:00" } + ] + } + ] +} diff --git a/tests/components/evohome/fixtures/status_2738909.json b/tests/components/evohome/fixtures/status_2738909.json new file mode 100644 index 00000000000..6d555ba4e3e --- /dev/null +++ b/tests/components/evohome/fixtures/status_2738909.json @@ -0,0 +1,125 @@ +{ + "locationId": "2738909", + "gateways": [ + { + "gatewayId": "2499896", + "temperatureControlSystems": [ + { + "systemId": "3432522", + "zones": [ + { + "zoneId": "3432521", + "name": "Dead Zone", + "temperatureStatus": { "isAvailable": false }, + "setpointStatus": { + "targetHeatTemperature": 17.0, + "setpointMode": "FollowSchedule" + }, + "activeFaults": [] + }, + { + "zoneId": "3432576", + "name": "Main Room", + "temperatureStatus": { "temperature": 19.0, "isAvailable": true }, + "setpointStatus": { + "targetHeatTemperature": 17.0, + "setpointMode": "PermanentOverride" + }, + "activeFaults": [ + { + "faultType": "TempZoneActuatorCommunicationLost", + "since": "2022-03-02T15:56:01" + } + ] + }, + { + "zoneId": "3432577", + "name": "Front Room", + "temperatureStatus": { "temperature": 19.0, "isAvailable": true }, + "setpointStatus": { + "targetHeatTemperature": 21.0, + "setpointMode": "TemporaryOverride", + "until": "2022-03-07T19:00:00Z" + }, + "activeFaults": [ + { + "faultType": "TempZoneActuatorLowBattery", + "since": "2022-03-02T04:50:20" + } + ] + }, + { + "zoneId": "3432578", + "temperatureStatus": { "temperature": 20.0, "isAvailable": true }, + "activeFaults": [], + "setpointStatus": { + "targetHeatTemperature": 17.0, + "setpointMode": "FollowSchedule" + }, + "name": "Kitchen" + }, + { + "zoneId": "3432579", + "temperatureStatus": { "temperature": 20.0, "isAvailable": true }, + "activeFaults": [], + "setpointStatus": { + "targetHeatTemperature": 16.0, + "setpointMode": "FollowSchedule" + }, + "name": "Bathroom Dn" + }, + { + "zoneId": "3432580", + "temperatureStatus": { "temperature": 21.0, "isAvailable": true }, + "activeFaults": [], + "setpointStatus": { + "targetHeatTemperature": 16.0, + "setpointMode": "FollowSchedule" + }, + "name": "Main Bedroom" + }, + { + "zoneId": "3449703", + "temperatureStatus": { "temperature": 19.5, "isAvailable": true }, + "activeFaults": [], + "setpointStatus": { + "targetHeatTemperature": 17.0, + "setpointMode": "FollowSchedule" + }, + "name": "Kids Room" + }, + { + "zoneId": "3449740", + "temperatureStatus": { "temperature": 21.5, "isAvailable": true }, + "activeFaults": [], + "setpointStatus": { + "targetHeatTemperature": 16.5, + "setpointMode": "FollowSchedule" + }, + "name": "" + }, + { + "zoneId": "3450733", + "temperatureStatus": { "temperature": 19.5, "isAvailable": true }, + "activeFaults": [], + "setpointStatus": { + "targetHeatTemperature": 14.0, + "setpointMode": "PermanentOverride" + }, + "name": "Spare Room" + } + ], + "dhw": { + "dhwId": "3933910", + "temperatureStatus": { "temperature": 23.0, "isAvailable": true }, + "stateStatus": { "state": "Off", "mode": "PermanentOverride" }, + "activeFaults": [] + }, + "activeFaults": [], + "systemModeStatus": { "mode": "AutoWithEco", "isPermanent": true } + } + ], + "activeFaults": [] + } + ] +} diff --git a/tests/components/evohome/fixtures/user_account.json b/tests/components/evohome/fixtures/user_account.json new file mode 100644 index 00000000000..99a96a7961e --- /dev/null +++ b/tests/components/evohome/fixtures/user_account.json @@ -0,0 +1,11 @@ +{ + "userId": "2263181", + "username": "user_2263181@gmail.com", + "firstname": "John", + "lastname": "Smith", + "streetAddress": "1 Main Street", + "city": "London", + "postcode": "E1 1AA", + "country": "UnitedKingdom", + "language": "enGB" +} diff --git a/tests/components/evohome/fixtures/user_locations.json b/tests/components/evohome/fixtures/user_locations.json new file mode 100644 index 00000000000..cf59aa9ae8a --- /dev/null +++ b/tests/components/evohome/fixtures/user_locations.json @@ -0,0 +1,346 @@ +[ + { + "locationInfo": { + "locationId": "2738909", + "name": "My Home", + "streetAddress": "1 Main Street", + "city": "London", + "country": "UnitedKingdom", + "postcode": "E1 1AA", + "locationType": "Residential", + "useDaylightSaveSwitching": true, + "timeZone": { + "timeZoneId": "GMTStandardTime", + "displayName": "(UTC+00:00) Dublin, Edinburgh, Lisbon, London", + "offsetMinutes": 0, + "currentOffsetMinutes": 60, + "supportsDaylightSaving": true + }, + "locationOwner": { + "userId": "2263181", + "username": "user_2263181@gmail.com", + "firstname": "John", + "lastname": "Smith" + } + }, + "gateways": [ + { + "gatewayInfo": { + "gatewayId": "2499896", + "mac": "00D02DEE0000", + "crc": "1234", + "isWiFi": false + }, + "temperatureControlSystems": [ + { + "systemId": "3432522", + "modelType": "EvoTouch", + "zones": [ + { + "zoneId": "3432521", + "modelType": "HeatingZone", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Dead Zone", + "zoneType": "RadiatorZone" + }, + { + "zoneId": "3432576", + "modelType": "HeatingZone", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Main Room", + "zoneType": "RadiatorZone" + }, + { + "zoneId": "3432577", + "modelType": "HeatingZone", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Front Room", + "zoneType": "RadiatorZone" + }, + { + "zoneId": "3432578", + "modelType": "HeatingZone", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Kitchen", + "zoneType": "RadiatorZone" + }, + { + "zoneId": "3432579", + "modelType": "HeatingZone", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Bathroom Dn", + "zoneType": "RadiatorZone" + }, + { + "zoneId": "3432580", + "modelType": "HeatingZone", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Main Bedroom", + "zoneType": "RadiatorZone" + }, + { + "zoneId": "3449703", + "modelType": "HeatingZone", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Kids Room", + "zoneType": "RadiatorZone" + }, + { + "zoneId": "3449740", + "modelType": "Unknown", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "", + "zoneType": "Unknown" + }, + { + "zoneId": "3450733", + "modelType": "xx", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Spare Room", + "zoneType": "xx" + } + ], + "dhw": { + "dhwId": "3933910", + "dhwStateCapabilitiesResponse": { + "allowedStates": ["On", "Off"], + "allowedModes": [ + "FollowSchedule", + "PermanentOverride", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilitiesResponse": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00" + } + }, + "allowedSystemModes": [ + { + "systemMode": "HeatingOff", + "canBePermanent": true, + "canBeTemporary": false + }, + { + "systemMode": "Auto", + "canBePermanent": true, + "canBeTemporary": false + }, + { + "systemMode": "AutoWithReset", + "canBePermanent": true, + "canBeTemporary": false + }, + { + "systemMode": "AutoWithEco", + "canBePermanent": true, + "canBeTemporary": true, + "maxDuration": "1.00:00:00", + "timingResolution": "01:00:00", + "timingMode": "Duration" + }, + { + "systemMode": "Away", + "canBePermanent": true, + "canBeTemporary": true, + "maxDuration": "99.00:00:00", + "timingResolution": "1.00:00:00", + "timingMode": "Period" + }, + { + "systemMode": "DayOff", + "canBePermanent": true, + "canBeTemporary": true, + "maxDuration": "99.00:00:00", + "timingResolution": "1.00:00:00", + "timingMode": "Period" + }, + { + "systemMode": "Custom", + "canBePermanent": true, + "canBeTemporary": true, + "maxDuration": "99.00:00:00", + "timingResolution": "1.00:00:00", + "timingMode": "Period" + } + ] + } + ] + } + ] + } +] diff --git a/tests/components/evohome/test_storage.py b/tests/components/evohome/test_storage.py new file mode 100644 index 00000000000..e87b847a9ff --- /dev/null +++ b/tests/components/evohome/test_storage.py @@ -0,0 +1,208 @@ +"""The tests for evohome storage load & save.""" + +from __future__ import annotations + +from datetime import datetime, timedelta +from typing import Any, Final, NotRequired, TypedDict + +import pytest + +from homeassistant.components.evohome import ( + CONF_PASSWORD, + CONF_USERNAME, + DOMAIN, + STORAGE_KEY, + STORAGE_VER, + dt_aware_to_naive, +) +from homeassistant.core import HomeAssistant +import homeassistant.util.dt as dt_util + +from .conftest import setup_evohome +from .const import ACCESS_TOKEN, REFRESH_TOKEN, SESSION_ID, USERNAME + + +class _SessionDataT(TypedDict): + sessionId: str + + +class _TokenStoreT(TypedDict): + username: str + refresh_token: str + access_token: str + access_token_expires: str # 2024-07-27T23:57:30+01:00 + user_data: NotRequired[_SessionDataT] + + +class _EmptyStoreT(TypedDict): + pass + + +SZ_USERNAME: Final = "username" +SZ_REFRESH_TOKEN: Final = "refresh_token" +SZ_ACCESS_TOKEN: Final = "access_token" +SZ_ACCESS_TOKEN_EXPIRES: Final = "access_token_expires" +SZ_USER_DATA: Final = "user_data" + + +def dt_pair(dt_dtm: datetime) -> tuple[datetime, str]: + """Return a datetime without milliseconds and its string representation.""" + dt_str = dt_dtm.isoformat(timespec="seconds") # e.g. 2024-07-28T00:57:29+01:00 + return dt_util.parse_datetime(dt_str, raise_on_error=True), dt_str + + +ACCESS_TOKEN_EXP_DTM, ACCESS_TOKEN_EXP_STR = dt_pair(dt_util.now() + timedelta(hours=1)) + +USERNAME_DIFF: Final = f"not_{USERNAME}" +USERNAME_SAME: Final = USERNAME + +TEST_CONFIG: Final = { + CONF_USERNAME: USERNAME_SAME, + CONF_PASSWORD: "password", +} + +TEST_DATA: Final[dict[str, _TokenStoreT]] = { + "sans_session_id": { + SZ_USERNAME: USERNAME_SAME, + SZ_REFRESH_TOKEN: REFRESH_TOKEN, + SZ_ACCESS_TOKEN: ACCESS_TOKEN, + SZ_ACCESS_TOKEN_EXPIRES: ACCESS_TOKEN_EXP_STR, + }, + "with_session_id": { + SZ_USERNAME: USERNAME_SAME, + SZ_REFRESH_TOKEN: REFRESH_TOKEN, + SZ_ACCESS_TOKEN: ACCESS_TOKEN, + SZ_ACCESS_TOKEN_EXPIRES: ACCESS_TOKEN_EXP_STR, + SZ_USER_DATA: {"sessionId": SESSION_ID}, + }, +} + +TEST_DATA_NULL: Final[dict[str, _EmptyStoreT | None]] = { + "store_is_absent": None, + "store_was_reset": {}, +} + +DOMAIN_STORAGE_BASE: Final = { + "version": STORAGE_VER, + "minor_version": 1, + "key": STORAGE_KEY, +} + + +@pytest.mark.parametrize("idx", TEST_DATA_NULL) +async def test_auth_tokens_null( + hass: HomeAssistant, + hass_storage: dict[str, Any], + idx: str, +) -> None: + """Test loading/saving authentication tokens when no cached tokens in the store.""" + + hass_storage[DOMAIN] = DOMAIN_STORAGE_BASE | {"data": TEST_DATA_NULL[idx]} + + mock_client = await setup_evohome(hass, TEST_CONFIG) + + # Confirm client was instantiated without tokens, as cache was empty... + assert SZ_REFRESH_TOKEN not in mock_client.call_args.kwargs + assert SZ_ACCESS_TOKEN not in mock_client.call_args.kwargs + assert SZ_ACCESS_TOKEN_EXPIRES not in mock_client.call_args.kwarg + + # Confirm the expected tokens were cached to storage... + data: _TokenStoreT = hass_storage[DOMAIN]["data"] + + assert data[SZ_USERNAME] == USERNAME_SAME + assert data[SZ_REFRESH_TOKEN] == f"new_{REFRESH_TOKEN}" + assert data[SZ_ACCESS_TOKEN] == f"new_{ACCESS_TOKEN}" + assert ( + dt_util.parse_datetime(data[SZ_ACCESS_TOKEN_EXPIRES], raise_on_error=True) + > dt_util.now() + ) + + +@pytest.mark.parametrize("idx", TEST_DATA) +async def test_auth_tokens_same( + hass: HomeAssistant, hass_storage: dict[str, Any], idx: str +) -> None: + """Test loading/saving authentication tokens when matching username.""" + + hass_storage[DOMAIN] = DOMAIN_STORAGE_BASE | {"data": TEST_DATA[idx]} + + mock_client = await setup_evohome(hass, TEST_CONFIG) + + # Confirm client was instantiated with the cached tokens... + assert mock_client.call_args.kwargs[SZ_REFRESH_TOKEN] == REFRESH_TOKEN + assert mock_client.call_args.kwargs[SZ_ACCESS_TOKEN] == ACCESS_TOKEN + assert mock_client.call_args.kwargs[SZ_ACCESS_TOKEN_EXPIRES] == dt_aware_to_naive( + ACCESS_TOKEN_EXP_DTM + ) + + # Confirm the expected tokens were cached to storage... + data: _TokenStoreT = hass_storage[DOMAIN]["data"] + + assert data[SZ_USERNAME] == USERNAME_SAME + assert data[SZ_REFRESH_TOKEN] == REFRESH_TOKEN + assert data[SZ_ACCESS_TOKEN] == ACCESS_TOKEN + assert dt_util.parse_datetime(data[SZ_ACCESS_TOKEN_EXPIRES]) == ACCESS_TOKEN_EXP_DTM + + +@pytest.mark.parametrize("idx", TEST_DATA) +async def test_auth_tokens_past( + hass: HomeAssistant, hass_storage: dict[str, Any], idx: str +) -> None: + """Test loading/saving authentication tokens with matching username, but expired.""" + + dt_dtm, dt_str = dt_pair(dt_util.now() - timedelta(hours=1)) + + # make this access token have expired in the past... + test_data = TEST_DATA[idx].copy() # shallow copy is OK here + test_data[SZ_ACCESS_TOKEN_EXPIRES] = dt_str + + hass_storage[DOMAIN] = DOMAIN_STORAGE_BASE | {"data": test_data} + + mock_client = await setup_evohome(hass, TEST_CONFIG) + + # Confirm client was instantiated with the cached tokens... + assert mock_client.call_args.kwargs[SZ_REFRESH_TOKEN] == REFRESH_TOKEN + assert mock_client.call_args.kwargs[SZ_ACCESS_TOKEN] == ACCESS_TOKEN + assert mock_client.call_args.kwargs[SZ_ACCESS_TOKEN_EXPIRES] == dt_aware_to_naive( + dt_dtm + ) + + # Confirm the expected tokens were cached to storage... + data: _TokenStoreT = hass_storage[DOMAIN]["data"] + + assert data[SZ_USERNAME] == USERNAME_SAME + assert data[SZ_REFRESH_TOKEN] == REFRESH_TOKEN + assert data[SZ_ACCESS_TOKEN] == f"new_{ACCESS_TOKEN}" + assert ( + dt_util.parse_datetime(data[SZ_ACCESS_TOKEN_EXPIRES], raise_on_error=True) + > dt_util.now() + ) + + +@pytest.mark.parametrize("idx", TEST_DATA) +async def test_auth_tokens_diff( + hass: HomeAssistant, hass_storage: dict[str, Any], idx: str +) -> None: + """Test loading/saving authentication tokens when unmatched username.""" + + hass_storage[DOMAIN] = DOMAIN_STORAGE_BASE | {"data": TEST_DATA[idx]} + + mock_client = await setup_evohome( + hass, TEST_CONFIG | {CONF_USERNAME: USERNAME_DIFF} + ) + + # Confirm client was instantiated without tokens, as username was different... + assert SZ_REFRESH_TOKEN not in mock_client.call_args.kwargs + assert SZ_ACCESS_TOKEN not in mock_client.call_args.kwargs + assert SZ_ACCESS_TOKEN_EXPIRES not in mock_client.call_args.kwarg + + # Confirm the expected tokens were cached to storage... + data: _TokenStoreT = hass_storage[DOMAIN]["data"] + + assert data[SZ_USERNAME] == USERNAME_DIFF + assert data[SZ_REFRESH_TOKEN] == f"new_{REFRESH_TOKEN}" + assert data[SZ_ACCESS_TOKEN] == f"new_{ACCESS_TOKEN}" + assert ( + dt_util.parse_datetime(data[SZ_ACCESS_TOKEN_EXPIRES], raise_on_error=True) + > dt_util.now() + ) From 61cbb770424fa27869c5484c56045521e8c083aa Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 3 Aug 2024 06:26:32 -0500 Subject: [PATCH 0340/1635] Remove unneeded cast in logbook rest api (#123098) --- homeassistant/components/logbook/rest_api.py | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/logbook/rest_api.py b/homeassistant/components/logbook/rest_api.py index bd9efe7aba3..c7ba196275b 100644 --- a/homeassistant/components/logbook/rest_api.py +++ b/homeassistant/components/logbook/rest_api.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Callable from datetime import timedelta from http import HTTPStatus -from typing import Any, cast +from typing import Any from aiohttp import web import voluptuous as vol @@ -109,13 +109,6 @@ class LogbookView(HomeAssistantView): def json_events() -> web.Response: """Fetch events and generate JSON.""" - return self.json( - event_processor.get_events( - start_day, - end_day, - ) - ) + return self.json(event_processor.get_events(start_day, end_day)) - return cast( - web.Response, await get_instance(hass).async_add_executor_job(json_events) - ) + return await get_instance(hass).async_add_executor_job(json_events) From 0fe23c82a4c8ef078095da221500f0e6844f6e2b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 3 Aug 2024 06:26:54 -0500 Subject: [PATCH 0341/1635] Remove unused variables in logbook LazyEventPartialState (#123097) --- homeassistant/components/logbook/models.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/logbook/models.py b/homeassistant/components/logbook/models.py index 2f9b2c8e289..93fc8885f57 100644 --- a/homeassistant/components/logbook/models.py +++ b/homeassistant/components/logbook/models.py @@ -45,8 +45,6 @@ class LazyEventPartialState: ) -> None: """Init the lazy event.""" self.row = row - self._event_data: dict[str, Any] | None = None - self._event_data_cache = event_data_cache # We need to explicitly check for the row is EventAsRow as the unhappy path # to fetch row.data for Row is very expensive if type(row) is EventAsRow: @@ -60,10 +58,10 @@ class LazyEventPartialState: source = row.event_data if not source: self.data = {} - elif event_data := self._event_data_cache.get(source): + elif event_data := event_data_cache.get(source): self.data = event_data else: - self.data = self._event_data_cache[source] = cast( + self.data = event_data_cache[source] = cast( dict[str, Any], json_loads(source) ) From cdec43ec067868c1436fd18b117616607c807214 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 3 Aug 2024 08:03:28 -0500 Subject: [PATCH 0342/1635] Remove unreachable suppress in logbook (#123096) --- homeassistant/components/logbook/processor.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/homeassistant/components/logbook/processor.py b/homeassistant/components/logbook/processor.py index ed9888f83d0..28f98bc2ce9 100644 --- a/homeassistant/components/logbook/processor.py +++ b/homeassistant/components/logbook/processor.py @@ -3,7 +3,6 @@ from __future__ import annotations from collections.abc import Callable, Generator, Sequence -from contextlib import suppress from dataclasses import dataclass from datetime import datetime as dt import logging @@ -262,8 +261,7 @@ def _humanify( entry_domain = event_data.get(ATTR_DOMAIN) entry_entity_id = event_data.get(ATTR_ENTITY_ID) if entry_domain is None and entry_entity_id is not None: - with suppress(IndexError): - entry_domain = split_entity_id(str(entry_entity_id))[0] + entry_domain = split_entity_id(str(entry_entity_id))[0] data = { LOGBOOK_ENTRY_WHEN: format_time(row), LOGBOOK_ENTRY_NAME: event_data.get(ATTR_NAME), From eb5ee1ffd1d41dc3515ea0112584c52366743702 Mon Sep 17 00:00:00 2001 From: Kim de Vos Date: Sat, 3 Aug 2024 15:08:01 +0200 Subject: [PATCH 0343/1635] Use slugify to create id for UniFi WAN latency (#123108) Use slugify to create id for latency --- homeassistant/components/unifi/sensor.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/unifi/sensor.py b/homeassistant/components/unifi/sensor.py index d86b72d1b2f..08bd0ddb869 100644 --- a/homeassistant/components/unifi/sensor.py +++ b/homeassistant/components/unifi/sensor.py @@ -44,6 +44,7 @@ from homeassistant.core import Event as core_Event, HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType +from homeassistant.util import slugify import homeassistant.util.dt as dt_util from . import UnifiConfigEntry @@ -247,8 +248,9 @@ def make_wan_latency_sensors() -> tuple[UnifiSensorEntityDescription, ...]: def make_wan_latency_entity_description( wan: Literal["WAN", "WAN2"], name: str, monitor_target: str ) -> UnifiSensorEntityDescription: + name_wan = f"{name} {wan}" return UnifiSensorEntityDescription[Devices, Device]( - key=f"{name} {wan} latency", + key=f"{name_wan} latency", entity_category=EntityCategory.DIAGNOSTIC, native_unit_of_measurement=UnitOfTime.MILLISECONDS, state_class=SensorStateClass.MEASUREMENT, @@ -257,13 +259,12 @@ def make_wan_latency_sensors() -> tuple[UnifiSensorEntityDescription, ...]: api_handler_fn=lambda api: api.devices, available_fn=async_device_available_fn, device_info_fn=async_device_device_info_fn, - name_fn=lambda _: f"{name} {wan} latency", + name_fn=lambda device: f"{name_wan} latency", object_fn=lambda api, obj_id: api.devices[obj_id], supported_fn=partial( async_device_wan_latency_supported_fn, wan, monitor_target ), - unique_id_fn=lambda hub, - obj_id: f"{name.lower}_{wan.lower}_latency-{obj_id}", + unique_id_fn=lambda hub, obj_id: f"{slugify(name_wan)}_latency-{obj_id}", value_fn=partial(async_device_wan_latency_value_fn, wan, monitor_target), ) From b7d56ad38a7dbb0a4b1d6fce7cd49995c026c04b Mon Sep 17 00:00:00 2001 From: Arie Catsman <120491684+catsmanac@users.noreply.github.com> Date: Sat, 3 Aug 2024 17:21:12 +0200 Subject: [PATCH 0344/1635] Bump pyenphase to 1.22.0 (#123103) --- homeassistant/components/enphase_envoy/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/enphase_envoy/manifest.json b/homeassistant/components/enphase_envoy/manifest.json index 09c55fb23ac..aa06a1ff79f 100644 --- a/homeassistant/components/enphase_envoy/manifest.json +++ b/homeassistant/components/enphase_envoy/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/enphase_envoy", "iot_class": "local_polling", "loggers": ["pyenphase"], - "requirements": ["pyenphase==1.20.6"], + "requirements": ["pyenphase==1.22.0"], "zeroconf": [ { "type": "_enphase-envoy._tcp.local." diff --git a/requirements_all.txt b/requirements_all.txt index 2805ad2c716..559290fd069 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1843,7 +1843,7 @@ pyeiscp==0.0.7 pyemoncms==0.0.7 # homeassistant.components.enphase_envoy -pyenphase==1.20.6 +pyenphase==1.22.0 # homeassistant.components.envisalink pyenvisalink==4.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index e55efb78abb..4ff84200d77 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1475,7 +1475,7 @@ pyegps==0.2.5 pyemoncms==0.0.7 # homeassistant.components.enphase_envoy -pyenphase==1.20.6 +pyenphase==1.22.0 # homeassistant.components.everlights pyeverlights==0.1.0 From 02f81ec481870254497a963233858a2642a40fd8 Mon Sep 17 00:00:00 2001 From: Louis Christ Date: Sat, 3 Aug 2024 22:32:47 +0200 Subject: [PATCH 0345/1635] Fix wrong DeviceInfo in bluesound integration (#123101) Fix bluesound device info --- homeassistant/components/bluesound/media_player.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/bluesound/media_player.py b/homeassistant/components/bluesound/media_player.py index 809ba293f89..dc09feaed63 100644 --- a/homeassistant/components/bluesound/media_player.py +++ b/homeassistant/components/bluesound/media_player.py @@ -255,7 +255,7 @@ class BluesoundPlayer(MediaPlayerEntity): self._attr_unique_id = format_unique_id(sync_status.mac, port) # there should always be one player with the default port per mac - if port is DEFAULT_PORT: + if port == DEFAULT_PORT: self._attr_device_info = DeviceInfo( identifiers={(DOMAIN, format_mac(sync_status.mac))}, connections={(CONNECTION_NETWORK_MAC, format_mac(sync_status.mac))}, From b6de2cd741133cff15e9978ef478c3bbbba1f0b3 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 3 Aug 2024 17:33:46 -0500 Subject: [PATCH 0346/1635] Unpack non-performant any expressions in config flow discovery path (#123124) --- homeassistant/config_entries.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/homeassistant/config_entries.py b/homeassistant/config_entries.py index aa0113cd7ce..75b0631339f 100644 --- a/homeassistant/config_entries.py +++ b/homeassistant/config_entries.py @@ -1245,10 +1245,10 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): @callback def _async_has_other_discovery_flows(self, flow_id: str) -> bool: """Check if there are any other discovery flows in progress.""" - return any( - flow.context["source"] in DISCOVERY_SOURCES and flow.flow_id != flow_id - for flow in self._progress.values() - ) + for flow in self._progress.values(): + if flow.flow_id != flow_id and flow.context["source"] in DISCOVERY_SOURCES: + return True + return False async def async_init( self, handler: str, *, context: dict[str, Any] | None = None, data: Any = None @@ -1699,12 +1699,12 @@ class ConfigEntries: entries = self._entries.get_entries_for_domain(domain) if include_ignore and include_disabled: return bool(entries) - return any( - entry - for entry in entries - if (include_ignore or entry.source != SOURCE_IGNORE) - and (include_disabled or not entry.disabled_by) - ) + for entry in entries: + if (include_ignore or entry.source != SOURCE_IGNORE) and ( + include_disabled or not entry.disabled_by + ): + return True + return False @callback def async_entries( From 232f78e7b68e3e97713b998cd8fcab8d312253fe Mon Sep 17 00:00:00 2001 From: Steve Repsher Date: Sun, 4 Aug 2024 08:28:45 -0400 Subject: [PATCH 0347/1635] Restore old service worker URL (#123131) --- homeassistant/components/frontend/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/frontend/__init__.py b/homeassistant/components/frontend/__init__.py index 5b462842e4a..c5df84cf549 100644 --- a/homeassistant/components/frontend/__init__.py +++ b/homeassistant/components/frontend/__init__.py @@ -398,6 +398,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: static_paths_configs: list[StaticPathConfig] = [] for path, should_cache in ( + ("service_worker.js", False), ("sw-modern.js", False), ("sw-modern.js.map", False), ("sw-legacy.js", False), From 70704f67d39605132e66c62b2869108861da9aeb Mon Sep 17 00:00:00 2001 From: c0ffeeca7 <38767475+c0ffeeca7@users.noreply.github.com> Date: Sun, 4 Aug 2024 15:17:54 +0200 Subject: [PATCH 0348/1635] Recorder system info: fix capitalization (#123141) Co-authored-by: Joost Lekkerkerker --- homeassistant/components/recorder/strings.json | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/recorder/strings.json b/homeassistant/components/recorder/strings.json index f891b4d18d2..2ded6be58d6 100644 --- a/homeassistant/components/recorder/strings.json +++ b/homeassistant/components/recorder/strings.json @@ -1,11 +1,11 @@ { "system_health": { "info": { - "oldest_recorder_run": "Oldest Run Start Time", - "current_recorder_run": "Current Run Start Time", - "estimated_db_size": "Estimated Database Size (MiB)", - "database_engine": "Database Engine", - "database_version": "Database Version" + "oldest_recorder_run": "Oldest run start time", + "current_recorder_run": "Current run start time", + "estimated_db_size": "Estimated database size (MiB)", + "database_engine": "Database engine", + "database_version": "Database version" } }, "issues": { From e682d8c6e2cb08074f167c8877f04da5602a2c30 Mon Sep 17 00:00:00 2001 From: Yaroslav Halchenko Date: Sun, 4 Aug 2024 04:39:41 -1000 Subject: [PATCH 0349/1635] Handle command_line missing discovery_info (#116873) * command_line: Do not lead to erroring out code indexing None or empty discovery_info * Apply suggestions from code review Co-authored-by: Franck Nijhof --------- Co-authored-by: Erik Montnemery Co-authored-by: Franck Nijhof --- homeassistant/components/command_line/switch.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/homeassistant/components/command_line/switch.py b/homeassistant/components/command_line/switch.py index f8e9d21cf23..a3c3d0b3e9c 100644 --- a/homeassistant/components/command_line/switch.py +++ b/homeassistant/components/command_line/switch.py @@ -37,6 +37,8 @@ async def async_setup_platform( ) -> None: """Find and return switches controlled by shell commands.""" + if not discovery_info: + return switches = [] discovery_info = cast(DiscoveryInfoType, discovery_info) entities: dict[str, dict[str, Any]] = { From 30f4d1b9581827a26b480bec5b210093f5b93d62 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Sun, 4 Aug 2024 20:18:19 +0200 Subject: [PATCH 0350/1635] Fix implicit-return in overkiz (#123000) --- .../atlantic_pass_apc_zone_control_zone.py | 9 ++++++--- .../somfy_heating_temperature_interface.py | 3 ++- .../overkiz/water_heater_entities/hitachi_dhw.py | 3 ++- 3 files changed, 10 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/overkiz/climate_entities/atlantic_pass_apc_zone_control_zone.py b/homeassistant/components/overkiz/climate_entities/atlantic_pass_apc_zone_control_zone.py index f18edd0cfe6..9027dcf8d03 100644 --- a/homeassistant/components/overkiz/climate_entities/atlantic_pass_apc_zone_control_zone.py +++ b/homeassistant/components/overkiz/climate_entities/atlantic_pass_apc_zone_control_zone.py @@ -234,7 +234,8 @@ class AtlanticPassAPCZoneControlZone(AtlanticPassAPCHeatingZone): """Set new target hvac mode.""" if self.is_using_derogated_temperature_fallback: - return await super().async_set_hvac_mode(hvac_mode) + await super().async_set_hvac_mode(hvac_mode) + return # They are mainly managed by the Zone Control device # However, it make sense to map the OFF Mode to the Overkiz STOP Preset @@ -287,7 +288,8 @@ class AtlanticPassAPCZoneControlZone(AtlanticPassAPCHeatingZone): """Set new preset mode.""" if self.is_using_derogated_temperature_fallback: - return await super().async_set_preset_mode(preset_mode) + await super().async_set_preset_mode(preset_mode) + return mode = PRESET_MODES_TO_OVERKIZ[preset_mode] @@ -361,7 +363,8 @@ class AtlanticPassAPCZoneControlZone(AtlanticPassAPCHeatingZone): """Set new temperature.""" if self.is_using_derogated_temperature_fallback: - return await super().async_set_temperature(**kwargs) + await super().async_set_temperature(**kwargs) + return target_temperature = kwargs.get(ATTR_TEMPERATURE) target_temp_low = kwargs.get(ATTR_TARGET_TEMP_LOW) diff --git a/homeassistant/components/overkiz/climate_entities/somfy_heating_temperature_interface.py b/homeassistant/components/overkiz/climate_entities/somfy_heating_temperature_interface.py index 85ce7ae57e3..acc761664ec 100644 --- a/homeassistant/components/overkiz/climate_entities/somfy_heating_temperature_interface.py +++ b/homeassistant/components/overkiz/climate_entities/somfy_heating_temperature_interface.py @@ -181,6 +181,7 @@ class SomfyHeatingTemperatureInterface(OverkizEntity, ClimateEntity): OverkizState.OVP_HEATING_TEMPERATURE_INTERFACE_SETPOINT_MODE ] ) and mode.value_as_str: - return await self.executor.async_execute_command( + await self.executor.async_execute_command( SETPOINT_MODE_TO_OVERKIZ_COMMAND[mode.value_as_str], temperature ) + return diff --git a/homeassistant/components/overkiz/water_heater_entities/hitachi_dhw.py b/homeassistant/components/overkiz/water_heater_entities/hitachi_dhw.py index 9f0a8798233..dc2a93a8d2f 100644 --- a/homeassistant/components/overkiz/water_heater_entities/hitachi_dhw.py +++ b/homeassistant/components/overkiz/water_heater_entities/hitachi_dhw.py @@ -87,9 +87,10 @@ class HitachiDHW(OverkizEntity, WaterHeaterEntity): """Set new target operation mode.""" # Turn water heater off if operation_mode == OverkizCommandParam.OFF: - return await self.executor.async_execute_command( + await self.executor.async_execute_command( OverkizCommand.SET_CONTROL_DHW, OverkizCommandParam.STOP ) + return # Turn water heater on, when off if self.current_operation == OverkizCommandParam.OFF: From a9d8e47979583a2d21f1ba8afea8dc13c072c561 Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Sun, 4 Aug 2024 23:02:41 +0200 Subject: [PATCH 0351/1635] Support `DeviceInfo.model_id` in MQTT integration (#123152) Add support for model_id --- homeassistant/components/mqtt/abbreviations.py | 1 + homeassistant/components/mqtt/mixins.py | 5 +++++ homeassistant/components/mqtt/schemas.py | 2 ++ homeassistant/const.py | 2 ++ tests/components/mqtt/test_common.py | 4 ++++ 5 files changed, 14 insertions(+) diff --git a/homeassistant/components/mqtt/abbreviations.py b/homeassistant/components/mqtt/abbreviations.py index c3efe5667ad..f4a32bbdf9d 100644 --- a/homeassistant/components/mqtt/abbreviations.py +++ b/homeassistant/components/mqtt/abbreviations.py @@ -267,6 +267,7 @@ DEVICE_ABBREVIATIONS = { "name": "name", "mf": "manufacturer", "mdl": "model", + "mdl_id": "model_id", "hw": "hw_version", "sw": "sw_version", "sa": "suggested_area", diff --git a/homeassistant/components/mqtt/mixins.py b/homeassistant/components/mqtt/mixins.py index aca88f2cb97..ce811e13a24 100644 --- a/homeassistant/components/mqtt/mixins.py +++ b/homeassistant/components/mqtt/mixins.py @@ -16,6 +16,7 @@ from homeassistant.const import ( ATTR_HW_VERSION, ATTR_MANUFACTURER, ATTR_MODEL, + ATTR_MODEL_ID, ATTR_NAME, ATTR_SERIAL_NUMBER, ATTR_SUGGESTED_AREA, @@ -25,6 +26,7 @@ from homeassistant.const import ( CONF_ENTITY_CATEGORY, CONF_ICON, CONF_MODEL, + CONF_MODEL_ID, CONF_NAME, CONF_UNIQUE_ID, CONF_VALUE_TEMPLATE, @@ -992,6 +994,9 @@ def device_info_from_specifications( if CONF_MODEL in specifications: info[ATTR_MODEL] = specifications[CONF_MODEL] + if CONF_MODEL_ID in specifications: + info[ATTR_MODEL_ID] = specifications[CONF_MODEL_ID] + if CONF_NAME in specifications: info[ATTR_NAME] = specifications[CONF_NAME] diff --git a/homeassistant/components/mqtt/schemas.py b/homeassistant/components/mqtt/schemas.py index bbc0194a1a5..67c6b447709 100644 --- a/homeassistant/components/mqtt/schemas.py +++ b/homeassistant/components/mqtt/schemas.py @@ -9,6 +9,7 @@ from homeassistant.const import ( CONF_ENTITY_CATEGORY, CONF_ICON, CONF_MODEL, + CONF_MODEL_ID, CONF_NAME, CONF_UNIQUE_ID, CONF_VALUE_TEMPLATE, @@ -112,6 +113,7 @@ MQTT_ENTITY_DEVICE_INFO_SCHEMA = vol.All( ), vol.Optional(CONF_MANUFACTURER): cv.string, vol.Optional(CONF_MODEL): cv.string, + vol.Optional(CONF_MODEL_ID): cv.string, vol.Optional(CONF_NAME): cv.string, vol.Optional(CONF_HW_VERSION): cv.string, vol.Optional(CONF_SERIAL_NUMBER): cv.string, diff --git a/homeassistant/const.py b/homeassistant/const.py index 389aaf8fef3..891cc0cc023 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -222,6 +222,7 @@ CONF_METHOD: Final = "method" CONF_MINIMUM: Final = "minimum" CONF_MODE: Final = "mode" CONF_MODEL: Final = "model" +CONF_MODEL_ID: Final = "model_id" CONF_MONITORED_CONDITIONS: Final = "monitored_conditions" CONF_MONITORED_VARIABLES: Final = "monitored_variables" CONF_NAME: Final = "name" @@ -565,6 +566,7 @@ ATTR_CONNECTIONS: Final = "connections" ATTR_DEFAULT_NAME: Final = "default_name" ATTR_MANUFACTURER: Final = "manufacturer" ATTR_MODEL: Final = "model" +ATTR_MODEL_ID: Final = "model_id" ATTR_SERIAL_NUMBER: Final = "serial_number" ATTR_SUGGESTED_AREA: Final = "suggested_area" ATTR_SW_VERSION: Final = "sw_version" diff --git a/tests/components/mqtt/test_common.py b/tests/components/mqtt/test_common.py index 8d457d9da85..f7ebd039d1a 100644 --- a/tests/components/mqtt/test_common.py +++ b/tests/components/mqtt/test_common.py @@ -42,6 +42,7 @@ DEFAULT_CONFIG_DEVICE_INFO_ID = { "manufacturer": "Whatever", "name": "Beer", "model": "Glass", + "model_id": "XYZ001", "hw_version": "rev1", "serial_number": "1234deadbeef", "sw_version": "0.1-beta", @@ -54,6 +55,7 @@ DEFAULT_CONFIG_DEVICE_INFO_MAC = { "manufacturer": "Whatever", "name": "Beer", "model": "Glass", + "model_id": "XYZ001", "hw_version": "rev1", "serial_number": "1234deadbeef", "sw_version": "0.1-beta", @@ -999,6 +1001,7 @@ async def help_test_entity_device_info_with_identifier( assert device.manufacturer == "Whatever" assert device.name == "Beer" assert device.model == "Glass" + assert device.model_id == "XYZ001" assert device.hw_version == "rev1" assert device.sw_version == "0.1-beta" assert device.suggested_area == "default_area" @@ -1035,6 +1038,7 @@ async def help_test_entity_device_info_with_connection( assert device.manufacturer == "Whatever" assert device.name == "Beer" assert device.model == "Glass" + assert device.model_id == "XYZ001" assert device.hw_version == "rev1" assert device.sw_version == "0.1-beta" assert device.suggested_area == "default_area" From ccd157dc26cb3b514a6708bb162b1ca9c7b87783 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Sun, 4 Aug 2024 23:03:40 +0200 Subject: [PATCH 0352/1635] Use coordinator setup method in filesize (#123139) --- homeassistant/components/filesize/coordinator.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/filesize/coordinator.py b/homeassistant/components/filesize/coordinator.py index 37fba19fb4e..c0dbb14555e 100644 --- a/homeassistant/components/filesize/coordinator.py +++ b/homeassistant/components/filesize/coordinator.py @@ -46,14 +46,15 @@ class FileSizeCoordinator(DataUpdateCoordinator[dict[str, int | float | datetime def _update(self) -> os.stat_result: """Fetch file information.""" - if not hasattr(self, "path"): - self.path = self._get_full_path() - try: return self.path.stat() except OSError as error: raise UpdateFailed(f"Can not retrieve file statistics {error}") from error + async def _async_setup(self) -> None: + """Set up path.""" + self.path = await self.hass.async_add_executor_job(self._get_full_path) + async def _async_update_data(self) -> dict[str, float | int | datetime]: """Fetch file information.""" statinfo = await self.hass.async_add_executor_job(self._update) From 3353c3c20572113a1b756aebd2948c82a4009e2e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 4 Aug 2024 16:05:56 -0500 Subject: [PATCH 0353/1635] Remove unneeded formatter argument from logbook websocket_api (#123095) --- homeassistant/components/logbook/websocket_api.py | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/homeassistant/components/logbook/websocket_api.py b/homeassistant/components/logbook/websocket_api.py index cac58971cde..b776ad6303d 100644 --- a/homeassistant/components/logbook/websocket_api.py +++ b/homeassistant/components/logbook/websocket_api.py @@ -81,7 +81,6 @@ async def _async_send_historical_events( msg_id: int, start_time: dt, end_time: dt, - formatter: Callable[[int, Any], dict[str, Any]], event_processor: EventProcessor, partial: bool, force_send: bool = False, @@ -109,7 +108,6 @@ async def _async_send_historical_events( msg_id, start_time, end_time, - formatter, event_processor, partial, ) @@ -131,7 +129,6 @@ async def _async_send_historical_events( msg_id, recent_query_start, end_time, - formatter, event_processor, partial=True, ) @@ -143,7 +140,6 @@ async def _async_send_historical_events( msg_id, start_time, recent_query_start, - formatter, event_processor, partial, ) @@ -164,7 +160,6 @@ async def _async_get_ws_stream_events( msg_id: int, start_time: dt, end_time: dt, - formatter: Callable[[int, Any], dict[str, Any]], event_processor: EventProcessor, partial: bool, ) -> tuple[bytes, dt | None]: @@ -174,7 +169,6 @@ async def _async_get_ws_stream_events( msg_id, start_time, end_time, - formatter, event_processor, partial, ) @@ -195,7 +189,6 @@ def _ws_stream_get_events( msg_id: int, start_day: dt, end_day: dt, - formatter: Callable[[int, Any], dict[str, Any]], event_processor: EventProcessor, partial: bool, ) -> tuple[bytes, dt | None]: @@ -211,7 +204,7 @@ def _ws_stream_get_events( # data in case the UI needs to show that historical # data is still loading in the future message["partial"] = True - return json_bytes(formatter(msg_id, message)), last_time + return json_bytes(messages.event_message(msg_id, message)), last_time async def _async_events_consumer( @@ -318,7 +311,6 @@ async def ws_event_stream( msg_id, start_time, end_time, - messages.event_message, event_processor, partial=False, ) @@ -385,7 +377,6 @@ async def ws_event_stream( msg_id, start_time, subscriptions_setup_complete_time, - messages.event_message, event_processor, partial=True, # Force a send since the wait for the sync task @@ -431,7 +422,6 @@ async def ws_event_stream( # we could fetch the same event twice (last_event_time or start_time) + timedelta(microseconds=1), subscriptions_setup_complete_time, - messages.event_message, event_processor, partial=False, ) From b09dd95dbdb0a04294cff4fcb5edfb13f61cd8f1 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 4 Aug 2024 16:09:10 -0500 Subject: [PATCH 0354/1635] Improve alignment of live logbook and historical logbook models (#123070) * Improve alignment of live logbook and historical logbook models - Make EventAsRow as NamedType which is better aligned with sqlalchemy Row - Use getitem to fetch results for both Row and EventAsRow since its an order of magnitude faster fetching sqlalchemy Row object values. * final * fix * unused * fix more tests * cleanup * reduce * tweak --- homeassistant/components/logbook/models.py | 100 ++++++++++++------ homeassistant/components/logbook/processor.py | 58 ++++++---- homeassistant/scripts/benchmark/__init__.py | 49 +-------- tests/components/logbook/common.py | 8 +- tests/components/logbook/test_init.py | 62 +++-------- tests/components/logbook/test_models.py | 16 ++- 6 files changed, 138 insertions(+), 155 deletions(-) diff --git a/homeassistant/components/logbook/models.py b/homeassistant/components/logbook/models.py index 93fc8885f57..8fd850b26fb 100644 --- a/homeassistant/components/logbook/models.py +++ b/homeassistant/components/logbook/models.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Callable, Mapping from dataclasses import dataclass from functools import cached_property -from typing import TYPE_CHECKING, Any, cast +from typing import TYPE_CHECKING, Any, Final, NamedTuple, cast from sqlalchemy.engine.row import Row @@ -46,16 +46,16 @@ class LazyEventPartialState: """Init the lazy event.""" self.row = row # We need to explicitly check for the row is EventAsRow as the unhappy path - # to fetch row.data for Row is very expensive + # to fetch row[DATA_POS] for Row is very expensive if type(row) is EventAsRow: # If its an EventAsRow we can avoid the whole # json decode process as we already have the data - self.data = row.data + self.data = row[DATA_POS] return if TYPE_CHECKING: - source = cast(str, row.event_data) + source = cast(str, row[EVENT_DATA_POS]) else: - source = row.event_data + source = row[EVENT_DATA_POS] if not source: self.data = {} elif event_data := event_data_cache.get(source): @@ -68,51 +68,73 @@ class LazyEventPartialState: @cached_property def event_type(self) -> EventType[Any] | str | None: """Return the event type.""" - return self.row.event_type + return self.row[EVENT_TYPE_POS] @cached_property def entity_id(self) -> str | None: """Return the entity id.""" - return self.row.entity_id + return self.row[ENTITY_ID_POS] @cached_property def state(self) -> str | None: """Return the state.""" - return self.row.state + return self.row[STATE_POS] @cached_property def context_id(self) -> str | None: """Return the context id.""" - return bytes_to_ulid_or_none(self.row.context_id_bin) + return bytes_to_ulid_or_none(self.row[CONTEXT_ID_BIN_POS]) @cached_property def context_user_id(self) -> str | None: """Return the context user id.""" - return bytes_to_uuid_hex_or_none(self.row.context_user_id_bin) + return bytes_to_uuid_hex_or_none(self.row[CONTEXT_USER_ID_BIN_POS]) @cached_property def context_parent_id(self) -> str | None: """Return the context parent id.""" - return bytes_to_ulid_or_none(self.row.context_parent_id_bin) + return bytes_to_ulid_or_none(self.row[CONTEXT_PARENT_ID_BIN_POS]) -@dataclass(slots=True, frozen=True) -class EventAsRow: - """Convert an event to a row.""" +# Row order must match the query order in queries/common.py +# --------------------------------------------------------- +ROW_ID_POS: Final = 0 +EVENT_TYPE_POS: Final = 1 +EVENT_DATA_POS: Final = 2 +TIME_FIRED_TS_POS: Final = 3 +CONTEXT_ID_BIN_POS: Final = 4 +CONTEXT_USER_ID_BIN_POS: Final = 5 +CONTEXT_PARENT_ID_BIN_POS: Final = 6 +STATE_POS: Final = 7 +ENTITY_ID_POS: Final = 8 +ICON_POS: Final = 9 +CONTEXT_ONLY_POS: Final = 10 +# - For EventAsRow, additional fields are: +DATA_POS: Final = 11 +CONTEXT_POS: Final = 12 + +class EventAsRow(NamedTuple): + """Convert an event to a row. + + This much always match the order of the columns in queries/common.py + """ + + row_id: int + event_type: EventType[Any] | str | None + event_data: str | None + time_fired_ts: float + context_id_bin: bytes + context_user_id_bin: bytes | None + context_parent_id_bin: bytes | None + state: str | None + entity_id: str | None + icon: str | None + context_only: bool | None + + # Additional fields for EventAsRow data: Mapping[str, Any] context: Context - context_id_bin: bytes - time_fired_ts: float - row_id: int - event_data: str | None = None - entity_id: str | None = None - icon: str | None = None - context_user_id_bin: bytes | None = None - context_parent_id_bin: bytes | None = None - event_type: EventType[Any] | str | None = None - state: str | None = None - context_only: None = None @callback @@ -121,14 +143,19 @@ def async_event_to_row(event: Event) -> EventAsRow: if event.event_type != EVENT_STATE_CHANGED: context = event.context return EventAsRow( - data=event.data, - context=event.context, + row_id=hash(event), event_type=event.event_type, + event_data=None, + time_fired_ts=event.time_fired_timestamp, context_id_bin=ulid_to_bytes(context.id), context_user_id_bin=uuid_hex_to_bytes_or_none(context.user_id), context_parent_id_bin=ulid_to_bytes_or_none(context.parent_id), - time_fired_ts=event.time_fired_timestamp, - row_id=hash(event), + state=None, + entity_id=None, + icon=None, + context_only=None, + data=event.data, + context=context, ) # States are prefiltered so we never get states # that are missing new_state or old_state @@ -136,14 +163,17 @@ def async_event_to_row(event: Event) -> EventAsRow: new_state: State = event.data["new_state"] context = new_state.context return EventAsRow( - data=event.data, - context=event.context, - entity_id=new_state.entity_id, - state=new_state.state, + row_id=hash(event), + event_type=None, + event_data=None, + time_fired_ts=new_state.last_updated_timestamp, context_id_bin=ulid_to_bytes(context.id), context_user_id_bin=uuid_hex_to_bytes_or_none(context.user_id), context_parent_id_bin=ulid_to_bytes_or_none(context.parent_id), - time_fired_ts=new_state.last_updated_timestamp, - row_id=hash(event), + state=new_state.state, + entity_id=new_state.entity_id, icon=new_state.attributes.get(ATTR_ICON), + context_only=None, + data=event.data, + context=context, ) diff --git a/homeassistant/components/logbook/processor.py b/homeassistant/components/logbook/processor.py index 28f98bc2ce9..8d577089ea4 100644 --- a/homeassistant/components/logbook/processor.py +++ b/homeassistant/components/logbook/processor.py @@ -6,6 +6,7 @@ from collections.abc import Callable, Generator, Sequence from dataclasses import dataclass from datetime import datetime as dt import logging +import time from typing import Any from sqlalchemy.engine import Result @@ -17,7 +18,6 @@ from homeassistant.components.recorder.models import ( bytes_to_uuid_hex_or_none, extract_event_type_ids, extract_metadata_ids, - process_datetime_to_timestamp, process_timestamp_to_utc_isoformat, ) from homeassistant.components.recorder.util import ( @@ -62,7 +62,23 @@ from .const import ( LOGBOOK_ENTRY_WHEN, ) from .helpers import is_sensor_continuous -from .models import EventAsRow, LazyEventPartialState, LogbookConfig, async_event_to_row +from .models import ( + CONTEXT_ID_BIN_POS, + CONTEXT_ONLY_POS, + CONTEXT_PARENT_ID_BIN_POS, + CONTEXT_POS, + CONTEXT_USER_ID_BIN_POS, + ENTITY_ID_POS, + EVENT_TYPE_POS, + ICON_POS, + ROW_ID_POS, + STATE_POS, + TIME_FIRED_TS_POS, + EventAsRow, + LazyEventPartialState, + LogbookConfig, + async_event_to_row, +) from .queries import statement_for_request from .queries.common import PSEUDO_EVENT_STATE_CHANGED @@ -206,17 +222,17 @@ def _humanify( # Process rows for row in rows: - context_id_bin: bytes = row.context_id_bin + context_id_bin: bytes = row[CONTEXT_ID_BIN_POS] if memoize_new_contexts and context_id_bin not in context_lookup: context_lookup[context_id_bin] = row - if row.context_only: + if row[CONTEXT_ONLY_POS]: continue - event_type = row.event_type + event_type = row[EVENT_TYPE_POS] if event_type == EVENT_CALL_SERVICE: continue if event_type is PSEUDO_EVENT_STATE_CHANGED: - entity_id = row.entity_id + entity_id = row[ENTITY_ID_POS] assert entity_id is not None # Skip continuous sensors if ( @@ -229,12 +245,12 @@ def _humanify( data = { LOGBOOK_ENTRY_WHEN: format_time(row), - LOGBOOK_ENTRY_STATE: row.state, + LOGBOOK_ENTRY_STATE: row[STATE_POS], LOGBOOK_ENTRY_ENTITY_ID: entity_id, } if include_entity_name: data[LOGBOOK_ENTRY_NAME] = entity_name_cache.get(entity_id) - if icon := row.icon: + if icon := row[ICON_POS]: data[LOGBOOK_ENTRY_ICON] = icon context_augmenter.augment(data, row, context_id_bin) @@ -292,9 +308,11 @@ class ContextAugmenter: context_row := self.context_lookup.get(context_id_bin) ): return context_row - if (context := getattr(row, "context", None)) is not None and ( - origin_event := context.origin_event - ) is not None: + if ( + type(row) is EventAsRow + and (context := row[CONTEXT_POS]) is not None + and (origin_event := context.origin_event) is not None + ): return async_event_to_row(origin_event) return None @@ -302,7 +320,7 @@ class ContextAugmenter: self, data: dict[str, Any], row: Row | EventAsRow, context_id_bin: bytes | None ) -> None: """Augment data from the row and cache.""" - if context_user_id_bin := row.context_user_id_bin: + if context_user_id_bin := row[CONTEXT_USER_ID_BIN_POS]: data[CONTEXT_USER_ID] = bytes_to_uuid_hex_or_none(context_user_id_bin) if not (context_row := self._get_context_row(context_id_bin, row)): @@ -311,7 +329,7 @@ class ContextAugmenter: if _rows_match(row, context_row): # This is the first event with the given ID. Was it directly caused by # a parent event? - context_parent_id_bin = row.context_parent_id_bin + context_parent_id_bin = row[CONTEXT_PARENT_ID_BIN_POS] if ( not context_parent_id_bin or ( @@ -326,10 +344,10 @@ class ContextAugmenter: # this log entry. if _rows_match(row, context_row): return - event_type = context_row.event_type + event_type = context_row[EVENT_TYPE_POS] # State change - if context_entity_id := context_row.entity_id: - data[CONTEXT_STATE] = context_row.state + if context_entity_id := context_row[ENTITY_ID_POS]: + data[CONTEXT_STATE] = context_row[STATE_POS] data[CONTEXT_ENTITY_ID] = context_entity_id if self.include_entity_name: data[CONTEXT_ENTITY_ID_NAME] = self.entity_name_cache.get( @@ -375,20 +393,22 @@ class ContextAugmenter: def _rows_match(row: Row | EventAsRow, other_row: Row | EventAsRow) -> bool: """Check of rows match by using the same method as Events __hash__.""" return bool( - row is other_row or (row_id := row.row_id) and row_id == other_row.row_id + row is other_row + or (row_id := row[ROW_ID_POS]) + and row_id == other_row[ROW_ID_POS] ) def _row_time_fired_isoformat(row: Row | EventAsRow) -> str: """Convert the row timed_fired to isoformat.""" return process_timestamp_to_utc_isoformat( - dt_util.utc_from_timestamp(row.time_fired_ts) or dt_util.utcnow() + dt_util.utc_from_timestamp(row[TIME_FIRED_TS_POS]) or dt_util.utcnow() ) def _row_time_fired_timestamp(row: Row | EventAsRow) -> float: """Convert the row timed_fired to timestamp.""" - return row.time_fired_ts or process_datetime_to_timestamp(dt_util.utcnow()) + return row[TIME_FIRED_TS_POS] or time.time() class EntityNameCache: diff --git a/homeassistant/scripts/benchmark/__init__.py b/homeassistant/scripts/benchmark/__init__.py index d39b1b64861..b769d385a4f 100644 --- a/homeassistant/scripts/benchmark/__init__.py +++ b/homeassistant/scripts/benchmark/__init__.py @@ -4,10 +4,8 @@ from __future__ import annotations import argparse import asyncio -import collections from collections.abc import Callable from contextlib import suppress -import json import logging from timeit import default_timer as timer @@ -18,7 +16,7 @@ from homeassistant.helpers.event import ( async_track_state_change, async_track_state_change_event, ) -from homeassistant.helpers.json import JSON_DUMP, JSONEncoder +from homeassistant.helpers.json import JSON_DUMP # mypy: allow-untyped-calls, allow-untyped-defs, no-check-untyped-defs # mypy: no-warn-return-any @@ -310,48 +308,3 @@ async def json_serialize_states(hass): start = timer() JSON_DUMP(states) return timer() - start - - -def _create_state_changed_event_from_old_new( - entity_id, event_time_fired, old_state, new_state -): - """Create a state changed event from a old and new state.""" - attributes = {} - if new_state is not None: - attributes = new_state.get("attributes") - attributes_json = json.dumps(attributes, cls=JSONEncoder) - if attributes_json == "null": - attributes_json = "{}" - row = collections.namedtuple( # noqa: PYI024 - "Row", - [ - "event_type" - "event_data" - "time_fired" - "context_id" - "context_user_id" - "state" - "entity_id" - "domain" - "attributes" - "state_id", - "old_state_id", - ], - ) - - row.event_type = EVENT_STATE_CHANGED - row.event_data = "{}" - row.attributes = attributes_json - row.time_fired = event_time_fired - row.state = new_state and new_state.get("state") - row.entity_id = entity_id - row.domain = entity_id and core.split_entity_id(entity_id)[0] - row.context_id = None - row.context_user_id = None - row.old_state_id = old_state and 1 - row.state_id = new_state and 1 - - # pylint: disable-next=import-outside-toplevel - from homeassistant.components import logbook - - return logbook.LazyEventPartialState(row, {}) diff --git a/tests/components/logbook/common.py b/tests/components/logbook/common.py index 67f12955581..c55b6230418 100644 --- a/tests/components/logbook/common.py +++ b/tests/components/logbook/common.py @@ -7,7 +7,7 @@ from typing import Any from homeassistant.components import logbook from homeassistant.components.logbook import processor -from homeassistant.components.logbook.models import LogbookConfig +from homeassistant.components.logbook.models import EventAsRow, LogbookConfig from homeassistant.components.recorder.models import ( process_timestamp_to_utc_isoformat, ulid_to_bytes_or_none, @@ -18,6 +18,8 @@ from homeassistant.helpers import entity_registry as er from homeassistant.helpers.json import JSONEncoder import homeassistant.util.dt as dt_util +IDX_TO_NAME = dict(enumerate(EventAsRow._fields)) + class MockRow: """Minimal row mock.""" @@ -48,6 +50,10 @@ class MockRow: self.attributes = None self.context_only = False + def __getitem__(self, idx: int) -> Any: + """Get item.""" + return getattr(self, IDX_TO_NAME[idx]) + @property def time_fired_minute(self): """Minute the event was fired.""" diff --git a/tests/components/logbook/test_init.py b/tests/components/logbook/test_init.py index 34052cd8024..3a20aac2602 100644 --- a/tests/components/logbook/test_init.py +++ b/tests/components/logbook/test_init.py @@ -1,11 +1,9 @@ """The tests for the logbook component.""" import asyncio -import collections from collections.abc import Callable from datetime import datetime, timedelta from http import HTTPStatus -import json from unittest.mock import Mock from freezegun import freeze_time @@ -15,7 +13,7 @@ import voluptuous as vol from homeassistant.components import logbook, recorder from homeassistant.components.alexa.smart_home import EVENT_ALEXA_SMART_HOME from homeassistant.components.automation import EVENT_AUTOMATION_TRIGGERED -from homeassistant.components.logbook.models import LazyEventPartialState +from homeassistant.components.logbook.models import EventAsRow, LazyEventPartialState from homeassistant.components.logbook.processor import EventProcessor from homeassistant.components.logbook.queries.common import PSEUDO_EVENT_STATE_CHANGED from homeassistant.components.recorder import Recorder @@ -44,7 +42,6 @@ import homeassistant.core as ha from homeassistant.core import Event, HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entityfilter import CONF_ENTITY_GLOBS -from homeassistant.helpers.json import JSONEncoder from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util @@ -324,50 +321,21 @@ def create_state_changed_event_from_old_new( entity_id, event_time_fired, old_state, new_state ): """Create a state changed event from a old and new state.""" - attributes = {} - if new_state is not None: - attributes = new_state.get("attributes") - attributes_json = json.dumps(attributes, cls=JSONEncoder) - row = collections.namedtuple( # noqa: PYI024 - "Row", - [ - "event_type", - "event_data", - "time_fired", - "time_fired_ts", - "context_id_bin", - "context_user_id_bin", - "context_parent_id_bin", - "state", - "entity_id", - "domain", - "attributes", - "state_id", - "old_state_id", - "shared_attrs", - "shared_data", - "context_only", - ], + row = EventAsRow( + row_id=1, + event_type=PSEUDO_EVENT_STATE_CHANGED, + event_data="{}", + time_fired_ts=dt_util.utc_to_timestamp(event_time_fired), + context_id_bin=None, + context_user_id_bin=None, + context_parent_id_bin=None, + state=new_state and new_state.get("state"), + entity_id=entity_id, + icon=None, + context_only=False, + data=None, + context=None, ) - - row.event_type = PSEUDO_EVENT_STATE_CHANGED - row.event_data = "{}" - row.shared_data = "{}" - row.attributes = attributes_json - row.shared_attrs = attributes_json - row.time_fired = event_time_fired - row.time_fired_ts = dt_util.utc_to_timestamp(event_time_fired) - row.state = new_state and new_state.get("state") - row.entity_id = entity_id - row.domain = entity_id and ha.split_entity_id(entity_id)[0] - row.context_only = False - row.context_id_bin = None - row.friendly_name = None - row.icon = None - row.context_user_id_bin = None - row.context_parent_id_bin = None - row.old_state_id = old_state and 1 - row.state_id = new_state and 1 return LazyEventPartialState(row, {}) diff --git a/tests/components/logbook/test_models.py b/tests/components/logbook/test_models.py index 7021711014f..cfdd7efc727 100644 --- a/tests/components/logbook/test_models.py +++ b/tests/components/logbook/test_models.py @@ -2,20 +2,26 @@ from unittest.mock import Mock -from homeassistant.components.logbook.models import LazyEventPartialState +from homeassistant.components.logbook.models import EventAsRow, LazyEventPartialState def test_lazy_event_partial_state_context() -> None: """Test we can extract context from a lazy event partial state.""" state = LazyEventPartialState( - Mock( + EventAsRow( + row_id=1, + event_type="event_type", + event_data={}, + time_fired_ts=1, context_id_bin=b"1234123412341234", context_user_id_bin=b"1234123412341234", context_parent_id_bin=b"4444444444444444", - event_data={}, - event_type="event_type", - entity_id="entity_id", state="state", + entity_id="entity_id", + icon="icon", + context_only=False, + data={}, + context=Mock(), ), {}, ) From 6b7307df81b6918a04c9167d57ae6a266552a7d7 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 4 Aug 2024 16:43:43 -0500 Subject: [PATCH 0355/1635] Speed up logbook timestamp processing (#123126) --- homeassistant/components/logbook/processor.py | 36 ++++++++----------- tests/components/logbook/common.py | 2 +- 2 files changed, 16 insertions(+), 22 deletions(-) diff --git a/homeassistant/components/logbook/processor.py b/homeassistant/components/logbook/processor.py index 8d577089ea4..05566e52509 100644 --- a/homeassistant/components/logbook/processor.py +++ b/homeassistant/components/logbook/processor.py @@ -97,7 +97,7 @@ class LogbookRun: event_cache: EventCache entity_name_cache: EntityNameCache include_entity_name: bool - format_time: Callable[[Row | EventAsRow], Any] + timestamp: bool memoize_new_contexts: bool = True @@ -126,16 +126,13 @@ class EventProcessor: self.context_id = context_id logbook_config: LogbookConfig = hass.data[DOMAIN] self.filters: Filters | None = logbook_config.sqlalchemy_filter - format_time = ( - _row_time_fired_timestamp if timestamp else _row_time_fired_isoformat - ) self.logbook_run = LogbookRun( context_lookup={None: None}, external_events=logbook_config.external_events, event_cache=EventCache({}), entity_name_cache=EntityNameCache(self.hass), include_entity_name=include_entity_name, - format_time=format_time, + timestamp=timestamp, ) self.context_augmenter = ContextAugmenter(self.logbook_run) @@ -217,7 +214,7 @@ def _humanify( event_cache = logbook_run.event_cache entity_name_cache = logbook_run.entity_name_cache include_entity_name = logbook_run.include_entity_name - format_time = logbook_run.format_time + timestamp = logbook_run.timestamp memoize_new_contexts = logbook_run.memoize_new_contexts # Process rows @@ -231,6 +228,15 @@ def _humanify( if event_type == EVENT_CALL_SERVICE: continue + + time_fired_ts = row[TIME_FIRED_TS_POS] + if timestamp: + when = time_fired_ts or time.time() + else: + when = process_timestamp_to_utc_isoformat( + dt_util.utc_from_timestamp(time_fired_ts) or dt_util.utcnow() + ) + if event_type is PSEUDO_EVENT_STATE_CHANGED: entity_id = row[ENTITY_ID_POS] assert entity_id is not None @@ -244,7 +250,7 @@ def _humanify( continue data = { - LOGBOOK_ENTRY_WHEN: format_time(row), + LOGBOOK_ENTRY_WHEN: when, LOGBOOK_ENTRY_STATE: row[STATE_POS], LOGBOOK_ENTRY_ENTITY_ID: entity_id, } @@ -265,7 +271,7 @@ def _humanify( "Error with %s describe event for %s", domain, event_type ) continue - data[LOGBOOK_ENTRY_WHEN] = format_time(row) + data[LOGBOOK_ENTRY_WHEN] = when data[LOGBOOK_ENTRY_DOMAIN] = domain context_augmenter.augment(data, row, context_id_bin) yield data @@ -279,7 +285,7 @@ def _humanify( if entry_domain is None and entry_entity_id is not None: entry_domain = split_entity_id(str(entry_entity_id))[0] data = { - LOGBOOK_ENTRY_WHEN: format_time(row), + LOGBOOK_ENTRY_WHEN: when, LOGBOOK_ENTRY_NAME: event_data.get(ATTR_NAME), LOGBOOK_ENTRY_MESSAGE: event_data.get(ATTR_MESSAGE), LOGBOOK_ENTRY_DOMAIN: entry_domain, @@ -399,18 +405,6 @@ def _rows_match(row: Row | EventAsRow, other_row: Row | EventAsRow) -> bool: ) -def _row_time_fired_isoformat(row: Row | EventAsRow) -> str: - """Convert the row timed_fired to isoformat.""" - return process_timestamp_to_utc_isoformat( - dt_util.utc_from_timestamp(row[TIME_FIRED_TS_POS]) or dt_util.utcnow() - ) - - -def _row_time_fired_timestamp(row: Row | EventAsRow) -> float: - """Convert the row timed_fired to timestamp.""" - return row[TIME_FIRED_TS_POS] or time.time() - - class EntityNameCache: """A cache to lookup the name for an entity. diff --git a/tests/components/logbook/common.py b/tests/components/logbook/common.py index c55b6230418..afa8b7fcde5 100644 --- a/tests/components/logbook/common.py +++ b/tests/components/logbook/common.py @@ -79,7 +79,7 @@ def mock_humanify(hass_, rows): event_cache, entity_name_cache, include_entity_name=True, - format_time=processor._row_time_fired_isoformat, + timestamp=False, ) context_augmenter = processor.ContextAugmenter(logbook_run) return list( From 4fd92c17f077bf842d054a48c2af6ecc48722b74 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 4 Aug 2024 17:06:32 -0500 Subject: [PATCH 0356/1635] Optimize logbook row matching (#123127) --- homeassistant/components/logbook/processor.py | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/logbook/processor.py b/homeassistant/components/logbook/processor.py index 05566e52509..426f33fadb2 100644 --- a/homeassistant/components/logbook/processor.py +++ b/homeassistant/components/logbook/processor.py @@ -332,7 +332,7 @@ class ContextAugmenter: if not (context_row := self._get_context_row(context_id_bin, row)): return - if _rows_match(row, context_row): + if row is context_row or _rows_ids_match(row, context_row): # This is the first event with the given ID. Was it directly caused by # a parent event? context_parent_id_bin = row[CONTEXT_PARENT_ID_BIN_POS] @@ -348,7 +348,7 @@ class ContextAugmenter: return # Ensure the (parent) context_event exists and is not the root cause of # this log entry. - if _rows_match(row, context_row): + if row is context_row or _rows_ids_match(row, context_row): return event_type = context_row[EVENT_TYPE_POS] # State change @@ -396,13 +396,9 @@ class ContextAugmenter: data[CONTEXT_ENTITY_ID_NAME] = self.entity_name_cache.get(attr_entity_id) -def _rows_match(row: Row | EventAsRow, other_row: Row | EventAsRow) -> bool: +def _rows_ids_match(row: Row | EventAsRow, other_row: Row | EventAsRow) -> bool: """Check of rows match by using the same method as Events __hash__.""" - return bool( - row is other_row - or (row_id := row[ROW_ID_POS]) - and row_id == other_row[ROW_ID_POS] - ) + return bool((row_id := row[ROW_ID_POS]) and row_id == other_row[ROW_ID_POS]) class EntityNameCache: From 4d103c1fc24dc81011bf46cc3d0425686b47a67b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 5 Aug 2024 01:39:04 -0500 Subject: [PATCH 0357/1635] Bump aiohttp to 3.10.1 (#123159) --- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 1cc6a0fa85d..3b251e91179 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -4,7 +4,7 @@ aiodhcpwatcher==1.0.2 aiodiscover==2.1.0 aiodns==3.2.0 aiohttp-fast-zlib==0.1.1 -aiohttp==3.10.0 +aiohttp==3.10.1 aiohttp_cors==0.7.0 aiozoneinfo==0.2.1 astral==2.2 diff --git a/pyproject.toml b/pyproject.toml index 93f8427ef7f..49346f90448 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ requires-python = ">=3.12.0" dependencies = [ "aiodns==3.2.0", - "aiohttp==3.10.0", + "aiohttp==3.10.1", "aiohttp_cors==0.7.0", "aiohttp-fast-zlib==0.1.1", "aiozoneinfo==0.2.1", diff --git a/requirements.txt b/requirements.txt index c851927f9c6..1beefe73914 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ # Home Assistant Core aiodns==3.2.0 -aiohttp==3.10.0 +aiohttp==3.10.1 aiohttp_cors==0.7.0 aiohttp-fast-zlib==0.1.1 aiozoneinfo==0.2.1 From 7308912b391ad9feeea05bd97a7e782f3d1b32a1 Mon Sep 17 00:00:00 2001 From: "Mr. Bubbles" Date: Mon, 5 Aug 2024 08:43:29 +0200 Subject: [PATCH 0358/1635] Catch exception in coordinator setup of IronOS integration (#123079) --- .../components/iron_os/coordinator.py | 6 ++++- tests/components/iron_os/test_init.py | 26 +++++++++++++++++++ 2 files changed, 31 insertions(+), 1 deletion(-) create mode 100644 tests/components/iron_os/test_init.py diff --git a/homeassistant/components/iron_os/coordinator.py b/homeassistant/components/iron_os/coordinator.py index e8424478d86..aefb14b689b 100644 --- a/homeassistant/components/iron_os/coordinator.py +++ b/homeassistant/components/iron_os/coordinator.py @@ -46,4 +46,8 @@ class IronOSCoordinator(DataUpdateCoordinator[LiveDataResponse]): async def _async_setup(self) -> None: """Set up the coordinator.""" - self.device_info = await self.device.get_device_info() + try: + self.device_info = await self.device.get_device_info() + + except CommunicationError as e: + raise UpdateFailed("Cannot connect to device") from e diff --git a/tests/components/iron_os/test_init.py b/tests/components/iron_os/test_init.py new file mode 100644 index 00000000000..fb0a782ea36 --- /dev/null +++ b/tests/components/iron_os/test_init.py @@ -0,0 +1,26 @@ +"""Test init of IronOS integration.""" + +from unittest.mock import AsyncMock + +from pynecil import CommunicationError +import pytest + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("ble_device") +async def test_setup_config_entry_not_ready( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pynecil: AsyncMock, +) -> None: + """Test config entry not ready.""" + mock_pynecil.get_device_info.side_effect = CommunicationError + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.SETUP_RETRY From 31fd4efa368f5f05129c1493debcff1b5fb236c5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Aug 2024 09:21:37 +0200 Subject: [PATCH 0359/1635] Bump actions/upload-artifact from 4.3.4 to 4.3.5 (#123170) --- .github/workflows/builder.yml | 2 +- .github/workflows/ci.yaml | 20 ++++++++++---------- .github/workflows/wheels.yml | 6 +++--- 3 files changed, 14 insertions(+), 14 deletions(-) diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index d0edc631762..7e3597e7289 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -69,7 +69,7 @@ jobs: run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T - - name: Upload translations - uses: actions/upload-artifact@v4.3.4 + uses: actions/upload-artifact@v4.3.5 with: name: translations path: translations.tar.gz diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 23a8b10c0d3..8dfdb2fd7af 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -624,7 +624,7 @@ jobs: . venv/bin/activate pip-licenses --format=json --output-file=licenses.json - name: Upload licenses - uses: actions/upload-artifact@v4.3.4 + uses: actions/upload-artifact@v4.3.5 with: name: licenses path: licenses.json @@ -834,7 +834,7 @@ jobs: . venv/bin/activate python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests - name: Upload pytest_buckets - uses: actions/upload-artifact@v4.3.4 + uses: actions/upload-artifact@v4.3.5 with: name: pytest_buckets path: pytest_buckets.txt @@ -935,14 +935,14 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-full.conclusion == 'failure' - uses: actions/upload-artifact@v4.3.4 + uses: actions/upload-artifact@v4.3.5 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }} path: pytest-*.txt overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.3.4 + uses: actions/upload-artifact@v4.3.5 with: name: coverage-${{ matrix.python-version }}-${{ matrix.group }} path: coverage.xml @@ -1061,7 +1061,7 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.3.4 + uses: actions/upload-artifact@v4.3.5 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.mariadb }} @@ -1069,7 +1069,7 @@ jobs: overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.3.4 + uses: actions/upload-artifact@v4.3.5 with: name: coverage-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.mariadb }} @@ -1188,7 +1188,7 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.3.4 + uses: actions/upload-artifact@v4.3.5 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.postgresql }} @@ -1196,7 +1196,7 @@ jobs: overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.3.4 + uses: actions/upload-artifact@v4.3.5 with: name: coverage-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.postgresql }} @@ -1330,14 +1330,14 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.3.4 + uses: actions/upload-artifact@v4.3.5 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }} path: pytest-*.txt overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.3.4 + uses: actions/upload-artifact@v4.3.5 with: name: coverage-${{ matrix.python-version }}-${{ matrix.group }} path: coverage.xml diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 697535172d7..6c9cb07a180 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -82,14 +82,14 @@ jobs: ) > .env_file - name: Upload env_file - uses: actions/upload-artifact@v4.3.4 + uses: actions/upload-artifact@v4.3.5 with: name: env_file path: ./.env_file overwrite: true - name: Upload requirements_diff - uses: actions/upload-artifact@v4.3.4 + uses: actions/upload-artifact@v4.3.5 with: name: requirements_diff path: ./requirements_diff.txt @@ -101,7 +101,7 @@ jobs: python -m script.gen_requirements_all ci - name: Upload requirements_all_wheels - uses: actions/upload-artifact@v4.3.4 + uses: actions/upload-artifact@v4.3.5 with: name: requirements_all_wheels path: ./requirements_all_wheels_*.txt From d7e3df1974221957d3cbe7a36b1599e031e89a40 Mon Sep 17 00:00:00 2001 From: Brett Adams Date: Mon, 5 Aug 2024 17:59:33 +1000 Subject: [PATCH 0360/1635] Fix class attribute condition in Tesla Fleet (#123162) --- homeassistant/components/tesla_fleet/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/tesla_fleet/__init__.py b/homeassistant/components/tesla_fleet/__init__.py index 2c5ee1b5c75..8257bf75cd0 100644 --- a/homeassistant/components/tesla_fleet/__init__.py +++ b/homeassistant/components/tesla_fleet/__init__.py @@ -86,7 +86,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - vehicles: list[TeslaFleetVehicleData] = [] energysites: list[TeslaFleetEnergyData] = [] for product in products: - if "vin" in product and tesla.vehicle: + if "vin" in product and hasattr(tesla, "vehicle"): # Remove the protobuff 'cached_data' that we do not use to save memory product.pop("cached_data", None) vin = product["vin"] @@ -111,7 +111,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - device=device, ) ) - elif "energy_site_id" in product and tesla.energy: + elif "energy_site_id" in product and hasattr(tesla, "energy"): site_id = product["energy_site_id"] if not ( product["components"]["battery"] From b45fe0ec7351de8c463c699bcae1686cc4072360 Mon Sep 17 00:00:00 2001 From: Calvin Walton Date: Mon, 5 Aug 2024 04:02:15 -0400 Subject: [PATCH 0361/1635] Add Govee H612B to the Matter transition blocklist (#123163) --- homeassistant/components/matter/light.py | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/matter/light.py b/homeassistant/components/matter/light.py index 65c3a535216..d05a7c85f9d 100644 --- a/homeassistant/components/matter/light.py +++ b/homeassistant/components/matter/light.py @@ -54,6 +54,7 @@ TRANSITION_BLOCKLIST = ( (4488, 514, "1.0", "1.0.0"), (4488, 260, "1.0", "1.0.0"), (5010, 769, "3.0", "1.0.0"), + (4999, 24875, "1.0", "27.0"), (4999, 25057, "1.0", "27.0"), (4448, 36866, "V1", "V1.0.0.5"), (5009, 514, "1.0", "1.0.0"), From a7fbac51851a0481d7e5b4eb0e7390bde709909b Mon Sep 17 00:00:00 2001 From: dupondje Date: Mon, 5 Aug 2024 10:32:58 +0200 Subject: [PATCH 0362/1635] dsmr: migrate hourly_gas_meter_reading to mbus device (#123149) --- homeassistant/components/dsmr/sensor.py | 4 +- tests/components/dsmr/test_mbus_migration.py | 100 +++++++++++++++++++ 2 files changed, 103 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/dsmr/sensor.py b/homeassistant/components/dsmr/sensor.py index f794d1d05e9..b298ed5bfc0 100644 --- a/homeassistant/components/dsmr/sensor.py +++ b/homeassistant/components/dsmr/sensor.py @@ -439,7 +439,9 @@ def rename_old_gas_to_mbus( entries = er.async_entries_for_device(ent_reg, device_id) for entity in entries: - if entity.unique_id.endswith("belgium_5min_gas_meter_reading"): + if entity.unique_id.endswith( + "belgium_5min_gas_meter_reading" + ) or entity.unique_id.endswith("hourly_gas_meter_reading"): try: ent_reg.async_update_entity( entity.entity_id, diff --git a/tests/components/dsmr/test_mbus_migration.py b/tests/components/dsmr/test_mbus_migration.py index a28bc2c3a33..20b3d253f39 100644 --- a/tests/components/dsmr/test_mbus_migration.py +++ b/tests/components/dsmr/test_mbus_migration.py @@ -119,6 +119,106 @@ async def test_migrate_gas_to_mbus( ) +async def test_migrate_hourly_gas_to_mbus( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock], +) -> None: + """Test migration of unique_id.""" + (connection_factory, transport, protocol) = dsmr_connection_fixture + + mock_entry = MockConfigEntry( + domain=DOMAIN, + unique_id="/dev/ttyUSB0", + data={ + "port": "/dev/ttyUSB0", + "dsmr_version": "5", + "serial_id": "1234", + "serial_id_gas": "4730303738353635363037343639323231", + }, + options={ + "time_between_update": 0, + }, + ) + + mock_entry.add_to_hass(hass) + + old_unique_id = "4730303738353635363037343639323231_hourly_gas_meter_reading" + + device = device_registry.async_get_or_create( + config_entry_id=mock_entry.entry_id, + identifiers={(DOMAIN, mock_entry.entry_id)}, + name="Gas Meter", + ) + await hass.async_block_till_done() + + entity: er.RegistryEntry = entity_registry.async_get_or_create( + suggested_object_id="gas_meter_reading", + disabled_by=None, + domain=SENSOR_DOMAIN, + platform=DOMAIN, + device_id=device.id, + unique_id=old_unique_id, + config_entry=mock_entry, + ) + assert entity.unique_id == old_unique_id + await hass.async_block_till_done() + + telegram = Telegram() + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 1), [{"value": "003", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 1), + [{"value": "4730303738353635363037343639323231", "unit": ""}], + ), + "MBUS_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + MBUS_METER_READING, + MBusObject( + (0, 1), + [ + {"value": datetime.datetime.fromtimestamp(1722749707)}, + {"value": Decimal(778.963), "unit": "m3"}, + ], + ), + "MBUS_METER_READING", + ) + + assert await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() + + telegram_callback = connection_factory.call_args_list[0][0][2] + + # simulate a telegram pushed from the smartmeter and parsed by dsmr_parser + telegram_callback(telegram) + + # after receiving telegram entities need to have the chance to be created + await hass.async_block_till_done() + + dev_entities = er.async_entries_for_device( + entity_registry, device.id, include_disabled_entities=True + ) + assert not dev_entities + + assert ( + entity_registry.async_get_entity_id(SENSOR_DOMAIN, DOMAIN, old_unique_id) + is None + ) + assert ( + entity_registry.async_get_entity_id( + SENSOR_DOMAIN, DOMAIN, "4730303738353635363037343639323231" + ) + == "sensor.gas_meter_reading" + ) + + async def test_migrate_gas_to_mbus_exists( hass: HomeAssistant, entity_registry: er.EntityRegistry, From d246d02ab8d028439ab56681657a6127c1589da5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Simon=20H=C3=B6rrle?= <7945681+CM000n@users.noreply.github.com> Date: Mon, 5 Aug 2024 11:08:27 +0200 Subject: [PATCH 0363/1635] Add apsystems diagnostic binary sensors (#123045) * add diagnostic sensors * select output_data from data * split sensor and binary_sensor configurations * adjust module description * convert values to bool * add strings * add tests * add tests * update translations * remove already available _attr_has_entity_name * use dataclass instead of TypedDict * Update tests/components/apsystems/test_binary_sensor.py --------- Co-authored-by: Joost Lekkerkerker --- .../components/apsystems/__init__.py | 7 +- .../components/apsystems/binary_sensor.py | 102 +++++ .../components/apsystems/coordinator.py | 19 +- homeassistant/components/apsystems/sensor.py | 2 +- .../components/apsystems/strings.json | 14 + tests/components/apsystems/conftest.py | 8 +- .../snapshots/test_binary_sensor.ambr | 377 ++++++++++++++++++ .../apsystems/test_binary_sensor.py | 31 ++ 8 files changed, 553 insertions(+), 7 deletions(-) create mode 100644 homeassistant/components/apsystems/binary_sensor.py create mode 100644 tests/components/apsystems/snapshots/test_binary_sensor.ambr create mode 100644 tests/components/apsystems/test_binary_sensor.py diff --git a/homeassistant/components/apsystems/__init__.py b/homeassistant/components/apsystems/__init__.py index 91650201a87..372ce52e049 100644 --- a/homeassistant/components/apsystems/__init__.py +++ b/homeassistant/components/apsystems/__init__.py @@ -13,7 +13,12 @@ from homeassistant.core import HomeAssistant from .const import DEFAULT_PORT from .coordinator import ApSystemsDataCoordinator -PLATFORMS: list[Platform] = [Platform.NUMBER, Platform.SENSOR, Platform.SWITCH] +PLATFORMS: list[Platform] = [ + Platform.BINARY_SENSOR, + Platform.NUMBER, + Platform.SENSOR, + Platform.SWITCH, +] @dataclass diff --git a/homeassistant/components/apsystems/binary_sensor.py b/homeassistant/components/apsystems/binary_sensor.py new file mode 100644 index 00000000000..528203dc2d9 --- /dev/null +++ b/homeassistant/components/apsystems/binary_sensor.py @@ -0,0 +1,102 @@ +"""The read-only binary sensors for APsystems local API integration.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass + +from APsystemsEZ1 import ReturnAlarmInfo + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from . import ApSystemsConfigEntry, ApSystemsData +from .coordinator import ApSystemsDataCoordinator +from .entity import ApSystemsEntity + + +@dataclass(frozen=True, kw_only=True) +class ApsystemsLocalApiBinarySensorDescription(BinarySensorEntityDescription): + """Describes Apsystens Inverter binary sensor entity.""" + + is_on: Callable[[ReturnAlarmInfo], bool | None] + + +BINARY_SENSORS: tuple[ApsystemsLocalApiBinarySensorDescription, ...] = ( + ApsystemsLocalApiBinarySensorDescription( + key="off_grid_status", + translation_key="off_grid_status", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + is_on=lambda c: bool(c.og), + ), + ApsystemsLocalApiBinarySensorDescription( + key="dc_1_short_circuit_error_status", + translation_key="dc_1_short_circuit_error_status", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + is_on=lambda c: bool(c.isce1), + ), + ApsystemsLocalApiBinarySensorDescription( + key="dc_2_short_circuit_error_status", + translation_key="dc_2_short_circuit_error_status", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + is_on=lambda c: bool(c.isce2), + ), + ApsystemsLocalApiBinarySensorDescription( + key="output_fault_status", + translation_key="output_fault_status", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + is_on=lambda c: bool(c.oe), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ApSystemsConfigEntry, + add_entities: AddEntitiesCallback, +) -> None: + """Set up the binary sensor platform.""" + config = config_entry.runtime_data + + add_entities( + ApSystemsBinarySensorWithDescription( + data=config, + entity_description=desc, + ) + for desc in BINARY_SENSORS + ) + + +class ApSystemsBinarySensorWithDescription( + CoordinatorEntity[ApSystemsDataCoordinator], ApSystemsEntity, BinarySensorEntity +): + """Base binary sensor to be used with description.""" + + entity_description: ApsystemsLocalApiBinarySensorDescription + + def __init__( + self, + data: ApSystemsData, + entity_description: ApsystemsLocalApiBinarySensorDescription, + ) -> None: + """Initialize the sensor.""" + super().__init__(data.coordinator) + ApSystemsEntity.__init__(self, data) + self.entity_description = entity_description + self._attr_unique_id = f"{data.device_id}_{entity_description.key}" + + @property + def is_on(self) -> bool | None: + """Return value of sensor.""" + return self.entity_description.is_on(self.coordinator.data.alarm_info) diff --git a/homeassistant/components/apsystems/coordinator.py b/homeassistant/components/apsystems/coordinator.py index f2d076ce3fd..96956bafc3e 100644 --- a/homeassistant/components/apsystems/coordinator.py +++ b/homeassistant/components/apsystems/coordinator.py @@ -2,9 +2,10 @@ from __future__ import annotations +from dataclasses import dataclass from datetime import timedelta -from APsystemsEZ1 import APsystemsEZ1M, ReturnOutputData +from APsystemsEZ1 import APsystemsEZ1M, ReturnAlarmInfo, ReturnOutputData from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator @@ -12,7 +13,15 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from .const import LOGGER -class ApSystemsDataCoordinator(DataUpdateCoordinator[ReturnOutputData]): +@dataclass +class ApSystemsSensorData: + """Representing different Apsystems sensor data.""" + + output_data: ReturnOutputData + alarm_info: ReturnAlarmInfo + + +class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]): """Coordinator used for all sensors.""" def __init__(self, hass: HomeAssistant, api: APsystemsEZ1M) -> None: @@ -25,5 +34,7 @@ class ApSystemsDataCoordinator(DataUpdateCoordinator[ReturnOutputData]): ) self.api = api - async def _async_update_data(self) -> ReturnOutputData: - return await self.api.get_output_data() + async def _async_update_data(self) -> ApSystemsSensorData: + output_data = await self.api.get_output_data() + alarm_info = await self.api.get_alarm_info() + return ApSystemsSensorData(output_data=output_data, alarm_info=alarm_info) diff --git a/homeassistant/components/apsystems/sensor.py b/homeassistant/components/apsystems/sensor.py index 637def4e418..afeb9d071ab 100644 --- a/homeassistant/components/apsystems/sensor.py +++ b/homeassistant/components/apsystems/sensor.py @@ -148,4 +148,4 @@ class ApSystemsSensorWithDescription( @property def native_value(self) -> StateType: """Return value of sensor.""" - return self.entity_description.value_fn(self.coordinator.data) + return self.entity_description.value_fn(self.coordinator.data.output_data) diff --git a/homeassistant/components/apsystems/strings.json b/homeassistant/components/apsystems/strings.json index 18200f7b49d..e02f86c2730 100644 --- a/homeassistant/components/apsystems/strings.json +++ b/homeassistant/components/apsystems/strings.json @@ -19,6 +19,20 @@ } }, "entity": { + "binary_sensor": { + "off_grid_status": { + "name": "Off grid status" + }, + "dc_1_short_circuit_error_status": { + "name": "DC 1 short circuit error status" + }, + "dc_2_short_circuit_error_status": { + "name": "DC 2 short circuit error status" + }, + "output_fault_status": { + "name": "Output fault status" + } + }, "sensor": { "total_power": { "name": "Total power" diff --git a/tests/components/apsystems/conftest.py b/tests/components/apsystems/conftest.py index c191c7ca2dc..7e6140e8279 100644 --- a/tests/components/apsystems/conftest.py +++ b/tests/components/apsystems/conftest.py @@ -3,7 +3,7 @@ from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch -from APsystemsEZ1 import ReturnDeviceInfo, ReturnOutputData, Status +from APsystemsEZ1 import ReturnAlarmInfo, ReturnDeviceInfo, ReturnOutputData, Status import pytest from homeassistant.components.apsystems.const import DOMAIN @@ -52,6 +52,12 @@ def mock_apsystems() -> Generator[MagicMock]: e2=6.0, te2=7.0, ) + mock_api.get_alarm_info.return_value = ReturnAlarmInfo( + og=Status.normal, + isce1=Status.alarm, + isce2=Status.normal, + oe=Status.alarm, + ) mock_api.get_device_power_status.return_value = Status.normal yield mock_api diff --git a/tests/components/apsystems/snapshots/test_binary_sensor.ambr b/tests/components/apsystems/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..bb06b019f31 --- /dev/null +++ b/tests/components/apsystems/snapshots/test_binary_sensor.ambr @@ -0,0 +1,377 @@ +# serializer version: 1 +# name: test_all_entities[binary_sensor.mock_title_dc_1_short_circuit_error_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.mock_title_dc_1_short_circuit_error_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC 1 short circuit error status', + 'platform': 'apsystems', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dc_1_short_circuit_error_status', + 'unique_id': 'MY_SERIAL_NUMBER_dc_1_short_circuit_error_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_dc_1_short_circuit_error_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Mock Title DC 1 short circuit error status', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_title_dc_1_short_circuit_error_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_dc_2_short_circuit_error_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.mock_title_dc_2_short_circuit_error_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC 2 short circuit error status', + 'platform': 'apsystems', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dc_2_short_circuit_error_status', + 'unique_id': 'MY_SERIAL_NUMBER_dc_2_short_circuit_error_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_dc_2_short_circuit_error_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Mock Title DC 2 short circuit error status', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_title_dc_2_short_circuit_error_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_off_grid_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.mock_title_off_grid_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Off grid status', + 'platform': 'apsystems', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'off_grid_status', + 'unique_id': 'MY_SERIAL_NUMBER_off_grid_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_off_grid_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Mock Title Off grid status', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_title_off_grid_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_output_fault_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.mock_title_output_fault_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Output fault status', + 'platform': 'apsystems', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'output_fault_status', + 'unique_id': 'MY_SERIAL_NUMBER_output_fault_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_output_fault_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Mock Title Output fault status', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_title_output_fault_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_problem-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.mock_title_problem', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Problem', + 'platform': 'apsystems', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'off_grid_status', + 'unique_id': 'MY_SERIAL_NUMBER_off_grid_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_problem-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Mock Title Problem', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_title_problem', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_problem_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.mock_title_problem_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Problem', + 'platform': 'apsystems', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dc_1_short_circuit_error_status', + 'unique_id': 'MY_SERIAL_NUMBER_dc_1_short_circuit_error_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_problem_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Mock Title Problem', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_title_problem_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_problem_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.mock_title_problem_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Problem', + 'platform': 'apsystems', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dc_2_short_circuit_error_status', + 'unique_id': 'MY_SERIAL_NUMBER_dc_2_short_circuit_error_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_problem_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Mock Title Problem', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_title_problem_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_problem_4-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.mock_title_problem_4', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Problem', + 'platform': 'apsystems', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'output_fault_status', + 'unique_id': 'MY_SERIAL_NUMBER_output_fault_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_problem_4-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Mock Title Problem', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_title_problem_4', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/apsystems/test_binary_sensor.py b/tests/components/apsystems/test_binary_sensor.py new file mode 100644 index 00000000000..0c6fbffc93c --- /dev/null +++ b/tests/components/apsystems/test_binary_sensor.py @@ -0,0 +1,31 @@ +"""Test the APSystem binary sensor module.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_apsystems: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch( + "homeassistant.components.apsystems.PLATFORMS", + [Platform.BINARY_SENSOR], + ): + await setup_integration(hass, mock_config_entry) + await snapshot_platform( + hass, entity_registry, snapshot, mock_config_entry.entry_id + ) From e9e357b12efcccf37e7a6b0ed6fd791fb8cb38e6 Mon Sep 17 00:00:00 2001 From: "Barry vd. Heuvel" Date: Mon, 5 Aug 2024 11:18:04 +0200 Subject: [PATCH 0364/1635] Add spaces for readability in licenses.py (#123173) --- script/licenses.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/script/licenses.py b/script/licenses.py index 3b9ec389b08..f6578bf3f65 100644 --- a/script/licenses.py +++ b/script/licenses.py @@ -208,7 +208,7 @@ def main() -> int: if previous_unapproved_version < package.version: if approved: print( - "Approved license detected for" + "Approved license detected for " f"{package.name}@{package.version}: {package.license}" ) print("Please remove the package from the TODO list.") @@ -222,14 +222,14 @@ def main() -> int: exit_code = 1 elif not approved and package.name not in EXCEPTIONS: print( - "We could not detect an OSI-approved license for" + "We could not detect an OSI-approved license for " f"{package.name}@{package.version}: {package.license}" ) print() exit_code = 1 elif approved and package.name in EXCEPTIONS: print( - "Approved license detected for" + "Approved license detected for " f"{package.name}@{package.version}: {package.license}" ) print(f"Please remove the package from the EXCEPTIONS list: {package.name}") From 1163cc7caba5c15ba1aa4580d3f6544bcb794c4d Mon Sep 17 00:00:00 2001 From: Clifford Roche <1007595+cmroche@users.noreply.github.com> Date: Mon, 5 Aug 2024 05:18:34 -0400 Subject: [PATCH 0365/1635] Update greeclimate to 2.0.0 (#121030) Co-authored-by: Joostlek --- homeassistant/components/gree/const.py | 2 + homeassistant/components/gree/coordinator.py | 65 ++++++- homeassistant/components/gree/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/gree/test_bridge.py | 35 +++- tests/components/gree/test_climate.py | 187 ++++++++++--------- 7 files changed, 190 insertions(+), 105 deletions(-) diff --git a/homeassistant/components/gree/const.py b/homeassistant/components/gree/const.py index 46479210921..f926eb1c53e 100644 --- a/homeassistant/components/gree/const.py +++ b/homeassistant/components/gree/const.py @@ -18,3 +18,5 @@ FAN_MEDIUM_HIGH = "medium high" MAX_ERRORS = 2 TARGET_TEMPERATURE_STEP = 1 + +UPDATE_INTERVAL = 60 diff --git a/homeassistant/components/gree/coordinator.py b/homeassistant/components/gree/coordinator.py index 1bccf3bbc48..ae8b22706ef 100644 --- a/homeassistant/components/gree/coordinator.py +++ b/homeassistant/components/gree/coordinator.py @@ -2,16 +2,20 @@ from __future__ import annotations -from datetime import timedelta +from datetime import datetime, timedelta import logging +from typing import Any from greeclimate.device import Device, DeviceInfo from greeclimate.discovery import Discovery, Listener from greeclimate.exceptions import DeviceNotBoundError, DeviceTimeoutError +from greeclimate.network import Response from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_send +from homeassistant.helpers.json import json_dumps from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +from homeassistant.util.dt import utcnow from .const import ( COORDINATORS, @@ -19,12 +23,13 @@ from .const import ( DISPATCH_DEVICE_DISCOVERED, DOMAIN, MAX_ERRORS, + UPDATE_INTERVAL, ) _LOGGER = logging.getLogger(__name__) -class DeviceDataUpdateCoordinator(DataUpdateCoordinator): +class DeviceDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): """Manages polling for state changes from the device.""" def __init__(self, hass: HomeAssistant, device: Device) -> None: @@ -34,28 +39,68 @@ class DeviceDataUpdateCoordinator(DataUpdateCoordinator): hass, _LOGGER, name=f"{DOMAIN}-{device.device_info.name}", - update_interval=timedelta(seconds=60), + update_interval=timedelta(seconds=UPDATE_INTERVAL), + always_update=False, ) self.device = device - self._error_count = 0 + self.device.add_handler(Response.DATA, self.device_state_updated) + self.device.add_handler(Response.RESULT, self.device_state_updated) - async def _async_update_data(self): + self._error_count: int = 0 + self._last_response_time: datetime = utcnow() + self._last_error_time: datetime | None = None + + def device_state_updated(self, *args: Any) -> None: + """Handle device state updates.""" + _LOGGER.debug("Device state updated: %s", json_dumps(args)) + self._error_count = 0 + self._last_response_time = utcnow() + self.async_set_updated_data(self.device.raw_properties) + + async def _async_update_data(self) -> dict[str, Any]: """Update the state of the device.""" + _LOGGER.debug( + "Updating device state: %s, error count: %d", self.name, self._error_count + ) try: await self.device.update_state() except DeviceNotBoundError as error: - raise UpdateFailed(f"Device {self.name} is unavailable") from error + raise UpdateFailed( + f"Device {self.name} is unavailable, device is not bound." + ) from error except DeviceTimeoutError as error: self._error_count += 1 # Under normal conditions GREE units timeout every once in a while if self.last_update_success and self._error_count >= MAX_ERRORS: _LOGGER.warning( - "Device is unavailable: %s (%s)", - self.name, - self.device.device_info, + "Device %s is unavailable: %s", self.name, self.device.device_info ) - raise UpdateFailed(f"Device {self.name} is unavailable") from error + raise UpdateFailed( + f"Device {self.name} is unavailable, could not send update request" + ) from error + else: + # raise update failed if time for more than MAX_ERRORS has passed since last update + now = utcnow() + elapsed_success = now - self._last_response_time + if self.update_interval and elapsed_success >= self.update_interval: + if not self._last_error_time or ( + (now - self.update_interval) >= self._last_error_time + ): + self._last_error_time = now + self._error_count += 1 + + _LOGGER.warning( + "Device %s is unresponsive for %s seconds", + self.name, + elapsed_success, + ) + if self.last_update_success and self._error_count >= MAX_ERRORS: + raise UpdateFailed( + f"Device {self.name} is unresponsive for too long and now unavailable" + ) + + return self.device.raw_properties async def push_state_update(self): """Send state updates to the physical device.""" diff --git a/homeassistant/components/gree/manifest.json b/homeassistant/components/gree/manifest.json index a7c884c4042..ca1c4b5b754 100644 --- a/homeassistant/components/gree/manifest.json +++ b/homeassistant/components/gree/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/gree", "iot_class": "local_polling", "loggers": ["greeclimate"], - "requirements": ["greeclimate==1.4.6"] + "requirements": ["greeclimate==2.0.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 559290fd069..2c3889c4c0b 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1010,7 +1010,7 @@ gpiozero==1.6.2 gps3==0.33.3 # homeassistant.components.gree -greeclimate==1.4.6 +greeclimate==2.0.0 # homeassistant.components.greeneye_monitor greeneye_monitor==3.0.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 4ff84200d77..38739c86a3b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -854,7 +854,7 @@ govee-local-api==1.5.1 gps3==0.33.3 # homeassistant.components.gree -greeclimate==1.4.6 +greeclimate==2.0.0 # homeassistant.components.greeneye_monitor greeneye_monitor==3.0.3 diff --git a/tests/components/gree/test_bridge.py b/tests/components/gree/test_bridge.py index 37b0b0dc15e..32372bebf37 100644 --- a/tests/components/gree/test_bridge.py +++ b/tests/components/gree/test_bridge.py @@ -5,8 +5,12 @@ from datetime import timedelta from freezegun.api import FrozenDateTimeFactory import pytest -from homeassistant.components.climate import DOMAIN -from homeassistant.components.gree.const import COORDINATORS, DOMAIN as GREE +from homeassistant.components.climate import DOMAIN, HVACMode +from homeassistant.components.gree.const import ( + COORDINATORS, + DOMAIN as GREE, + UPDATE_INTERVAL, +) from homeassistant.core import HomeAssistant import homeassistant.util.dt as dt_util @@ -69,3 +73,30 @@ async def test_discovery_after_setup( device_infos = [x.device.device_info for x in hass.data[GREE][COORDINATORS]] assert device_infos[0].ip == "1.1.1.2" assert device_infos[1].ip == "2.2.2.1" + + +async def test_coordinator_updates( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device +) -> None: + """Test gree devices update their state.""" + await async_setup_gree(hass) + await hass.async_block_till_done() + + assert len(hass.states.async_all(DOMAIN)) == 1 + + callback = device().add_handler.call_args_list[0][0][1] + + async def fake_update_state(*args) -> None: + """Fake update state.""" + device().power = True + callback() + + device().update_state.side_effect = fake_update_state + + freezer.tick(timedelta(seconds=UPDATE_INTERVAL)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(ENTITY_ID_1) + assert state is not None + assert state.state != HVACMode.OFF diff --git a/tests/components/gree/test_climate.py b/tests/components/gree/test_climate.py index e6f24ade1aa..1bf49bbca26 100644 --- a/tests/components/gree/test_climate.py +++ b/tests/components/gree/test_climate.py @@ -48,7 +48,12 @@ from homeassistant.components.gree.climate import ( HVAC_MODES_REVERSE, GreeClimateEntity, ) -from homeassistant.components.gree.const import FAN_MEDIUM_HIGH, FAN_MEDIUM_LOW +from homeassistant.components.gree.const import ( + DISCOVERY_SCAN_INTERVAL, + FAN_MEDIUM_HIGH, + FAN_MEDIUM_LOW, + UPDATE_INTERVAL, +) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_TEMPERATURE, @@ -61,7 +66,6 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import entity_registry as er -import homeassistant.util.dt as dt_util from .common import async_setup_gree, build_device_mock @@ -70,12 +74,6 @@ from tests.common import async_fire_time_changed ENTITY_ID = f"{DOMAIN}.fake_device_1" -@pytest.fixture -def mock_now(): - """Fixture for dtutil.now.""" - return dt_util.utcnow() - - async def test_discovery_called_once(hass: HomeAssistant, discovery, device) -> None: """Test discovery is only ever called once.""" await async_setup_gree(hass) @@ -104,7 +102,7 @@ async def test_discovery_setup(hass: HomeAssistant, discovery, device) -> None: async def test_discovery_setup_connection_error( - hass: HomeAssistant, discovery, device, mock_now + hass: HomeAssistant, discovery, device ) -> None: """Test gree integration is setup.""" MockDevice1 = build_device_mock( @@ -126,7 +124,7 @@ async def test_discovery_setup_connection_error( async def test_discovery_after_setup( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device, mock_now + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device ) -> None: """Test gree devices don't change after multiple discoveries.""" MockDevice1 = build_device_mock( @@ -142,8 +140,7 @@ async def test_discovery_after_setup( discovery.return_value.mock_devices = [MockDevice1, MockDevice2] device.side_effect = [MockDevice1, MockDevice2] - await async_setup_gree(hass) - await hass.async_block_till_done() + await async_setup_gree(hass) # Update 1 assert discovery.return_value.scan_count == 1 assert len(hass.states.async_all(DOMAIN)) == 2 @@ -152,9 +149,8 @@ async def test_discovery_after_setup( discovery.return_value.mock_devices = [MockDevice1, MockDevice2] device.side_effect = [MockDevice1, MockDevice2] - next_update = mock_now + timedelta(minutes=6) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) + freezer.tick(timedelta(seconds=DISCOVERY_SCAN_INTERVAL)) + async_fire_time_changed(hass) await hass.async_block_till_done() assert discovery.return_value.scan_count == 2 @@ -162,7 +158,7 @@ async def test_discovery_after_setup( async def test_discovery_add_device_after_setup( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device, mock_now + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device ) -> None: """Test gree devices can be added after initial setup.""" MockDevice1 = build_device_mock( @@ -178,6 +174,8 @@ async def test_discovery_add_device_after_setup( discovery.return_value.mock_devices = [MockDevice1] device.side_effect = [MockDevice1] + await async_setup_gree(hass) # Update 1 + await async_setup_gree(hass) await hass.async_block_till_done() @@ -188,9 +186,8 @@ async def test_discovery_add_device_after_setup( discovery.return_value.mock_devices = [MockDevice2] device.side_effect = [MockDevice2] - next_update = mock_now + timedelta(minutes=6) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) + freezer.tick(timedelta(seconds=DISCOVERY_SCAN_INTERVAL)) + async_fire_time_changed(hass) await hass.async_block_till_done() assert discovery.return_value.scan_count == 2 @@ -198,7 +195,7 @@ async def test_discovery_add_device_after_setup( async def test_discovery_device_bind_after_setup( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device, mock_now + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device ) -> None: """Test gree devices can be added after a late device bind.""" MockDevice1 = build_device_mock( @@ -210,8 +207,7 @@ async def test_discovery_device_bind_after_setup( discovery.return_value.mock_devices = [MockDevice1] device.return_value = MockDevice1 - await async_setup_gree(hass) - await hass.async_block_till_done() + await async_setup_gree(hass) # Update 1 assert len(hass.states.async_all(DOMAIN)) == 1 state = hass.states.get(ENTITY_ID) @@ -222,9 +218,8 @@ async def test_discovery_device_bind_after_setup( MockDevice1.bind.side_effect = None MockDevice1.update_state.side_effect = None - next_update = mock_now + timedelta(minutes=5) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) + freezer.tick(timedelta(seconds=DISCOVERY_SCAN_INTERVAL)) + async_fire_time_changed(hass) await hass.async_block_till_done() state = hass.states.get(ENTITY_ID) @@ -232,7 +227,7 @@ async def test_discovery_device_bind_after_setup( async def test_update_connection_failure( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, device, mock_now + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device ) -> None: """Testing update hvac connection failure exception.""" device().update_state.side_effect = [ @@ -241,36 +236,32 @@ async def test_update_connection_failure( DeviceTimeoutError, ] - await async_setup_gree(hass) + await async_setup_gree(hass) # Update 1 + + async def run_update(): + freezer.tick(timedelta(seconds=UPDATE_INTERVAL)) + async_fire_time_changed(hass) - next_update = mock_now + timedelta(minutes=5) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) await hass.async_block_till_done() - # First update to make the device available + # Update 2 + await run_update() state = hass.states.get(ENTITY_ID) assert state.name == "fake-device-1" assert state.state != STATE_UNAVAILABLE - next_update = mock_now + timedelta(minutes=10) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() + # Update 3 + await run_update() - next_update = mock_now + timedelta(minutes=15) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() - - # Then two more update failures to make the device unavailable + # Update 4 + await run_update() state = hass.states.get(ENTITY_ID) assert state.name == "fake-device-1" assert state.state == STATE_UNAVAILABLE -async def test_update_connection_failure_recovery( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device, mock_now +async def test_update_connection_send_failure_recovery( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device ) -> None: """Testing update hvac connection failure recovery.""" device().update_state.side_effect = [ @@ -279,31 +270,27 @@ async def test_update_connection_failure_recovery( DEFAULT_MOCK, ] - await async_setup_gree(hass) + await async_setup_gree(hass) # Update 1 + + async def run_update(): + freezer.tick(timedelta(seconds=UPDATE_INTERVAL)) + async_fire_time_changed(hass) - # First update becomes unavailable - next_update = mock_now + timedelta(minutes=5) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) await hass.async_block_till_done() + await run_update() # Update 2 state = hass.states.get(ENTITY_ID) assert state.name == "fake-device-1" assert state.state == STATE_UNAVAILABLE - # Second update restores the connection - next_update = mock_now + timedelta(minutes=10) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() - + await run_update() # Update 3 state = hass.states.get(ENTITY_ID) assert state.name == "fake-device-1" assert state.state != STATE_UNAVAILABLE async def test_update_unhandled_exception( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device, mock_now + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device ) -> None: """Testing update hvac connection unhandled response exception.""" device().update_state.side_effect = [DEFAULT_MOCK, Exception] @@ -314,9 +301,8 @@ async def test_update_unhandled_exception( assert state.name == "fake-device-1" assert state.state != STATE_UNAVAILABLE - next_update = mock_now + timedelta(minutes=10) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) + freezer.tick(timedelta(seconds=UPDATE_INTERVAL)) + async_fire_time_changed(hass) await hass.async_block_till_done() state = hass.states.get(ENTITY_ID) @@ -325,15 +311,13 @@ async def test_update_unhandled_exception( async def test_send_command_device_timeout( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device, mock_now + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device ) -> None: """Test for sending power on command to the device with a device timeout.""" await async_setup_gree(hass) - # First update to make the device available - next_update = mock_now + timedelta(minutes=5) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) + freezer.tick(timedelta(seconds=UPDATE_INTERVAL)) + async_fire_time_changed(hass) await hass.async_block_till_done() state = hass.states.get(ENTITY_ID) @@ -355,7 +339,40 @@ async def test_send_command_device_timeout( assert state.state != STATE_UNAVAILABLE -async def test_send_power_on(hass: HomeAssistant, discovery, device, mock_now) -> None: +async def test_unresponsive_device( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device +) -> None: + """Test for unresponsive device.""" + await async_setup_gree(hass) + + async def run_update(): + freezer.tick(timedelta(seconds=UPDATE_INTERVAL)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Update 2 + await run_update() + state = hass.states.get(ENTITY_ID) + assert state.name == "fake-device-1" + assert state.state != STATE_UNAVAILABLE + + # Update 3, 4, 5 + await run_update() + await run_update() + await run_update() + state = hass.states.get(ENTITY_ID) + assert state.name == "fake-device-1" + assert state.state == STATE_UNAVAILABLE + + # Receiving update from device will reset the state to available again + device().device_state_updated("test") + await run_update() + state = hass.states.get(ENTITY_ID) + assert state.name == "fake-device-1" + assert state.state != STATE_UNAVAILABLE + + +async def test_send_power_on(hass: HomeAssistant, discovery, device) -> None: """Test for sending power on command to the device.""" await async_setup_gree(hass) @@ -372,7 +389,7 @@ async def test_send_power_on(hass: HomeAssistant, discovery, device, mock_now) - async def test_send_power_off_device_timeout( - hass: HomeAssistant, discovery, device, mock_now + hass: HomeAssistant, discovery, device ) -> None: """Test for sending power off command to the device with a device timeout.""" device().push_state_update.side_effect = DeviceTimeoutError @@ -543,9 +560,7 @@ async def test_update_target_temperature( @pytest.mark.parametrize( "preset", [PRESET_AWAY, PRESET_ECO, PRESET_SLEEP, PRESET_BOOST, PRESET_NONE] ) -async def test_send_preset_mode( - hass: HomeAssistant, discovery, device, mock_now, preset -) -> None: +async def test_send_preset_mode(hass: HomeAssistant, discovery, device, preset) -> None: """Test for sending preset mode command to the device.""" await async_setup_gree(hass) @@ -561,9 +576,7 @@ async def test_send_preset_mode( assert state.attributes.get(ATTR_PRESET_MODE) == preset -async def test_send_invalid_preset_mode( - hass: HomeAssistant, discovery, device, mock_now -) -> None: +async def test_send_invalid_preset_mode(hass: HomeAssistant, discovery, device) -> None: """Test for sending preset mode command to the device.""" await async_setup_gree(hass) @@ -584,7 +597,7 @@ async def test_send_invalid_preset_mode( "preset", [PRESET_AWAY, PRESET_ECO, PRESET_SLEEP, PRESET_BOOST, PRESET_NONE] ) async def test_send_preset_mode_device_timeout( - hass: HomeAssistant, discovery, device, mock_now, preset + hass: HomeAssistant, discovery, device, preset ) -> None: """Test for sending preset mode command to the device with a device timeout.""" device().push_state_update.side_effect = DeviceTimeoutError @@ -607,7 +620,7 @@ async def test_send_preset_mode_device_timeout( "preset", [PRESET_AWAY, PRESET_ECO, PRESET_SLEEP, PRESET_BOOST, PRESET_NONE] ) async def test_update_preset_mode( - hass: HomeAssistant, discovery, device, mock_now, preset + hass: HomeAssistant, discovery, device, preset ) -> None: """Test for updating preset mode from the device.""" device().steady_heat = preset == PRESET_AWAY @@ -634,7 +647,7 @@ async def test_update_preset_mode( ], ) async def test_send_hvac_mode( - hass: HomeAssistant, discovery, device, mock_now, hvac_mode + hass: HomeAssistant, discovery, device, hvac_mode ) -> None: """Test for sending hvac mode command to the device.""" await async_setup_gree(hass) @@ -656,7 +669,7 @@ async def test_send_hvac_mode( [HVACMode.AUTO, HVACMode.COOL, HVACMode.DRY, HVACMode.FAN_ONLY, HVACMode.HEAT], ) async def test_send_hvac_mode_device_timeout( - hass: HomeAssistant, discovery, device, mock_now, hvac_mode + hass: HomeAssistant, discovery, device, hvac_mode ) -> None: """Test for sending hvac mode command to the device with a device timeout.""" device().push_state_update.side_effect = DeviceTimeoutError @@ -687,7 +700,7 @@ async def test_send_hvac_mode_device_timeout( ], ) async def test_update_hvac_mode( - hass: HomeAssistant, discovery, device, mock_now, hvac_mode + hass: HomeAssistant, discovery, device, hvac_mode ) -> None: """Test for updating hvac mode from the device.""" device().power = hvac_mode != HVACMode.OFF @@ -704,9 +717,7 @@ async def test_update_hvac_mode( "fan_mode", [FAN_AUTO, FAN_LOW, FAN_MEDIUM_LOW, FAN_MEDIUM, FAN_MEDIUM_HIGH, FAN_HIGH], ) -async def test_send_fan_mode( - hass: HomeAssistant, discovery, device, mock_now, fan_mode -) -> None: +async def test_send_fan_mode(hass: HomeAssistant, discovery, device, fan_mode) -> None: """Test for sending fan mode command to the device.""" await async_setup_gree(hass) @@ -722,9 +733,7 @@ async def test_send_fan_mode( assert state.attributes.get(ATTR_FAN_MODE) == fan_mode -async def test_send_invalid_fan_mode( - hass: HomeAssistant, discovery, device, mock_now -) -> None: +async def test_send_invalid_fan_mode(hass: HomeAssistant, discovery, device) -> None: """Test for sending fan mode command to the device.""" await async_setup_gree(hass) @@ -746,7 +755,7 @@ async def test_send_invalid_fan_mode( [FAN_AUTO, FAN_LOW, FAN_MEDIUM_LOW, FAN_MEDIUM, FAN_MEDIUM_HIGH, FAN_HIGH], ) async def test_send_fan_mode_device_timeout( - hass: HomeAssistant, discovery, device, mock_now, fan_mode + hass: HomeAssistant, discovery, device, fan_mode ) -> None: """Test for sending fan mode command to the device with a device timeout.""" device().push_state_update.side_effect = DeviceTimeoutError @@ -770,7 +779,7 @@ async def test_send_fan_mode_device_timeout( [FAN_AUTO, FAN_LOW, FAN_MEDIUM_LOW, FAN_MEDIUM, FAN_MEDIUM_HIGH, FAN_HIGH], ) async def test_update_fan_mode( - hass: HomeAssistant, discovery, device, mock_now, fan_mode + hass: HomeAssistant, discovery, device, fan_mode ) -> None: """Test for updating fan mode from the device.""" device().fan_speed = FAN_MODES_REVERSE.get(fan_mode) @@ -786,7 +795,7 @@ async def test_update_fan_mode( "swing_mode", [SWING_OFF, SWING_BOTH, SWING_VERTICAL, SWING_HORIZONTAL] ) async def test_send_swing_mode( - hass: HomeAssistant, discovery, device, mock_now, swing_mode + hass: HomeAssistant, discovery, device, swing_mode ) -> None: """Test for sending swing mode command to the device.""" await async_setup_gree(hass) @@ -803,9 +812,7 @@ async def test_send_swing_mode( assert state.attributes.get(ATTR_SWING_MODE) == swing_mode -async def test_send_invalid_swing_mode( - hass: HomeAssistant, discovery, device, mock_now -) -> None: +async def test_send_invalid_swing_mode(hass: HomeAssistant, discovery, device) -> None: """Test for sending swing mode command to the device.""" await async_setup_gree(hass) @@ -826,7 +833,7 @@ async def test_send_invalid_swing_mode( "swing_mode", [SWING_OFF, SWING_BOTH, SWING_VERTICAL, SWING_HORIZONTAL] ) async def test_send_swing_mode_device_timeout( - hass: HomeAssistant, discovery, device, mock_now, swing_mode + hass: HomeAssistant, discovery, device, swing_mode ) -> None: """Test for sending swing mode command to the device with a device timeout.""" device().push_state_update.side_effect = DeviceTimeoutError @@ -849,7 +856,7 @@ async def test_send_swing_mode_device_timeout( "swing_mode", [SWING_OFF, SWING_BOTH, SWING_VERTICAL, SWING_HORIZONTAL] ) async def test_update_swing_mode( - hass: HomeAssistant, discovery, device, mock_now, swing_mode + hass: HomeAssistant, discovery, device, swing_mode ) -> None: """Test for updating swing mode from the device.""" device().horizontal_swing = ( From 3257bdeed2f4ab11a793fdc4237c17119e76ab25 Mon Sep 17 00:00:00 2001 From: "David F. Mulcahey" Date: Mon, 5 Aug 2024 05:19:57 -0400 Subject: [PATCH 0366/1635] Bump ZHA lib to 0.0.27 (#123125) --- homeassistant/components/zha/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index 6e35339c53f..d7dc53b5167 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -21,7 +21,7 @@ "zha", "universal_silabs_flasher" ], - "requirements": ["universal-silabs-flasher==0.0.22", "zha==0.0.25"], + "requirements": ["universal-silabs-flasher==0.0.22", "zha==0.0.27"], "usb": [ { "vid": "10C4", diff --git a/requirements_all.txt b/requirements_all.txt index 2c3889c4c0b..9a0164f6c35 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2989,7 +2989,7 @@ zeroconf==0.132.2 zeversolar==0.3.1 # homeassistant.components.zha -zha==0.0.25 +zha==0.0.27 # homeassistant.components.zhong_hong zhong-hong-hvac==1.0.12 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 38739c86a3b..b419e6c6430 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2366,7 +2366,7 @@ zeroconf==0.132.2 zeversolar==0.3.1 # homeassistant.components.zha -zha==0.0.25 +zha==0.0.27 # homeassistant.components.zwave_js zwave-js-server-python==0.57.0 From 67e3139dcfb8b4a48e18e10b9036679658455363 Mon Sep 17 00:00:00 2001 From: Artur Pragacz <49985303+arturpragacz@users.noreply.github.com> Date: Mon, 5 Aug 2024 11:22:13 +0200 Subject: [PATCH 0367/1635] Clean up useless logging handler setting (#120974) Setting level of the handler does effectively nothing, because HomeAssistantQueueHandler ignores this setting. Also make the convention of getting the root logger uniform. --- homeassistant/bootstrap.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/homeassistant/bootstrap.py b/homeassistant/bootstrap.py index 43f4d451497..742a293e4c4 100644 --- a/homeassistant/bootstrap.py +++ b/homeassistant/bootstrap.py @@ -586,10 +586,10 @@ async def async_enable_logging( logging.getLogger("aiohttp.access").setLevel(logging.WARNING) logging.getLogger("httpx").setLevel(logging.WARNING) - sys.excepthook = lambda *args: logging.getLogger(None).exception( + sys.excepthook = lambda *args: logging.getLogger().exception( "Uncaught exception", exc_info=args ) - threading.excepthook = lambda args: logging.getLogger(None).exception( + threading.excepthook = lambda args: logging.getLogger().exception( "Uncaught thread exception", exc_info=( # type: ignore[arg-type] args.exc_type, @@ -616,10 +616,9 @@ async def async_enable_logging( _create_log_file, err_log_path, log_rotate_days ) - err_handler.setLevel(logging.INFO if verbose else logging.WARNING) err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME)) - logger = logging.getLogger("") + logger = logging.getLogger() logger.addHandler(err_handler) logger.setLevel(logging.INFO if verbose else logging.WARNING) From ab811f70b18d72a7a550b61b76e415a0551583e7 Mon Sep 17 00:00:00 2001 From: Chris Buckley Date: Mon, 5 Aug 2024 10:24:49 +0100 Subject: [PATCH 0368/1635] Todoist service validation error consistency (#123122) --- homeassistant/components/todoist/calendar.py | 13 +++++++++--- homeassistant/components/todoist/strings.json | 3 +++ tests/components/todoist/test_calendar.py | 20 +++++++++++++++++++ 3 files changed, 33 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/todoist/calendar.py b/homeassistant/components/todoist/calendar.py index f89c09451b6..2acd4ea6dc6 100644 --- a/homeassistant/components/todoist/calendar.py +++ b/homeassistant/components/todoist/calendar.py @@ -21,7 +21,7 @@ from homeassistant.components.calendar import ( from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ID, CONF_NAME, CONF_TOKEN, EVENT_HOMEASSISTANT_STOP from homeassistant.core import Event, HomeAssistant, ServiceCall, callback -from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -215,14 +215,21 @@ def async_register_services( # noqa: C901 async def handle_new_task(call: ServiceCall) -> None: # noqa: C901 """Call when a user creates a new Todoist Task from Home Assistant.""" - project_name = call.data[PROJECT_NAME].lower() + project_name = call.data[PROJECT_NAME] projects = await coordinator.async_get_projects() project_id: str | None = None for project in projects: if project_name == project.name.lower(): project_id = project.id + break if project_id is None: - raise HomeAssistantError(f"Invalid project name '{project_name}'") + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="project_invalid", + translation_placeholders={ + "project": project_name, + }, + ) # Optional section within project section_id: str | None = None diff --git a/homeassistant/components/todoist/strings.json b/homeassistant/components/todoist/strings.json index 55b7ef62b58..5b083ac58bf 100644 --- a/homeassistant/components/todoist/strings.json +++ b/homeassistant/components/todoist/strings.json @@ -21,6 +21,9 @@ } }, "exceptions": { + "project_invalid": { + "message": "Invalid project name \"{project}\"" + }, "section_invalid": { "message": "Project \"{project}\" has no section \"{section}\"" } diff --git a/tests/components/todoist/test_calendar.py b/tests/components/todoist/test_calendar.py index 680406096cc..071a14a70ae 100644 --- a/tests/components/todoist/test_calendar.py +++ b/tests/components/todoist/test_calendar.py @@ -23,6 +23,7 @@ from homeassistant.components.todoist.const import ( ) from homeassistant.const import CONF_TOKEN, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_component import async_update_entity from homeassistant.util import dt as dt_util @@ -270,6 +271,25 @@ async def test_create_task_service_call(hass: HomeAssistant, api: AsyncMock) -> ) +async def test_create_task_service_call_raises( + hass: HomeAssistant, api: AsyncMock +) -> None: + """Test adding an item to an invalid project raises an error.""" + + with pytest.raises(ServiceValidationError, match="project_invalid"): + await hass.services.async_call( + DOMAIN, + SERVICE_NEW_TASK, + { + ASSIGNEE: "user", + CONTENT: "task", + LABELS: ["Label1"], + PROJECT_NAME: "Missing Project", + }, + blocking=True, + ) + + async def test_create_task_service_call_with_section( hass: HomeAssistant, api: AsyncMock ) -> None: From 5b7fd29797881b56c08034e3edef30b5ffccc6c9 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 5 Aug 2024 04:33:37 -0500 Subject: [PATCH 0369/1635] Improve performance of logbook processor humanify (#123157) --- homeassistant/components/logbook/processor.py | 101 ++++++++---------- 1 file changed, 45 insertions(+), 56 deletions(-) diff --git a/homeassistant/components/logbook/processor.py b/homeassistant/components/logbook/processor.py index 426f33fadb2..77aa71740f1 100644 --- a/homeassistant/components/logbook/processor.py +++ b/homeassistant/components/logbook/processor.py @@ -7,7 +7,7 @@ from dataclasses import dataclass from datetime import datetime as dt import logging import time -from typing import Any +from typing import TYPE_CHECKING, Any from sqlalchemy.engine import Result from sqlalchemy.engine.row import Row @@ -211,35 +211,30 @@ def _humanify( continuous_sensors: dict[str, bool] = {} context_lookup = logbook_run.context_lookup external_events = logbook_run.external_events - event_cache = logbook_run.event_cache - entity_name_cache = logbook_run.entity_name_cache + event_cache_get = logbook_run.event_cache.get + entity_name_cache_get = logbook_run.entity_name_cache.get include_entity_name = logbook_run.include_entity_name timestamp = logbook_run.timestamp memoize_new_contexts = logbook_run.memoize_new_contexts + get_context = context_augmenter.get_context + context_id_bin: bytes + data: dict[str, Any] # Process rows for row in rows: - context_id_bin: bytes = row[CONTEXT_ID_BIN_POS] + context_id_bin = row[CONTEXT_ID_BIN_POS] if memoize_new_contexts and context_id_bin not in context_lookup: context_lookup[context_id_bin] = row if row[CONTEXT_ONLY_POS]: continue event_type = row[EVENT_TYPE_POS] - if event_type == EVENT_CALL_SERVICE: continue - time_fired_ts = row[TIME_FIRED_TS_POS] - if timestamp: - when = time_fired_ts or time.time() - else: - when = process_timestamp_to_utc_isoformat( - dt_util.utc_from_timestamp(time_fired_ts) or dt_util.utcnow() - ) - if event_type is PSEUDO_EVENT_STATE_CHANGED: entity_id = row[ENTITY_ID_POS] - assert entity_id is not None + if TYPE_CHECKING: + assert entity_id is not None # Skip continuous sensors if ( is_continuous := continuous_sensors.get(entity_id) @@ -250,34 +245,27 @@ def _humanify( continue data = { - LOGBOOK_ENTRY_WHEN: when, LOGBOOK_ENTRY_STATE: row[STATE_POS], LOGBOOK_ENTRY_ENTITY_ID: entity_id, } if include_entity_name: - data[LOGBOOK_ENTRY_NAME] = entity_name_cache.get(entity_id) + data[LOGBOOK_ENTRY_NAME] = entity_name_cache_get(entity_id) if icon := row[ICON_POS]: data[LOGBOOK_ENTRY_ICON] = icon - context_augmenter.augment(data, row, context_id_bin) - yield data - elif event_type in external_events: domain, describe_event = external_events[event_type] try: - data = describe_event(event_cache.get(row)) + data = describe_event(event_cache_get(row)) except Exception: _LOGGER.exception( "Error with %s describe event for %s", domain, event_type ) continue - data[LOGBOOK_ENTRY_WHEN] = when data[LOGBOOK_ENTRY_DOMAIN] = domain - context_augmenter.augment(data, row, context_id_bin) - yield data elif event_type == EVENT_LOGBOOK_ENTRY: - event = event_cache.get(row) + event = event_cache_get(row) if not (event_data := event.data): continue entry_domain = event_data.get(ATTR_DOMAIN) @@ -285,14 +273,41 @@ def _humanify( if entry_domain is None and entry_entity_id is not None: entry_domain = split_entity_id(str(entry_entity_id))[0] data = { - LOGBOOK_ENTRY_WHEN: when, LOGBOOK_ENTRY_NAME: event_data.get(ATTR_NAME), LOGBOOK_ENTRY_MESSAGE: event_data.get(ATTR_MESSAGE), LOGBOOK_ENTRY_DOMAIN: entry_domain, LOGBOOK_ENTRY_ENTITY_ID: entry_entity_id, } - context_augmenter.augment(data, row, context_id_bin) - yield data + + else: + continue + + time_fired_ts = row[TIME_FIRED_TS_POS] + if timestamp: + when = time_fired_ts or time.time() + else: + when = process_timestamp_to_utc_isoformat( + dt_util.utc_from_timestamp(time_fired_ts) or dt_util.utcnow() + ) + data[LOGBOOK_ENTRY_WHEN] = when + + if context_user_id_bin := row[CONTEXT_USER_ID_BIN_POS]: + data[CONTEXT_USER_ID] = bytes_to_uuid_hex_or_none(context_user_id_bin) + + # Augment context if its available but not if the context is the same as the row + # or if the context is the parent of the row + if (context_row := get_context(context_id_bin, row)) and not ( + (row is context_row or _rows_ids_match(row, context_row)) + and ( + not (context_parent := row[CONTEXT_PARENT_ID_BIN_POS]) + or not (context_row := get_context(context_parent, context_row)) + or row is context_row + or _rows_ids_match(row, context_row) + ) + ): + context_augmenter.augment(data, context_row) + + yield data class ContextAugmenter: @@ -306,8 +321,8 @@ class ContextAugmenter: self.event_cache = logbook_run.event_cache self.include_entity_name = logbook_run.include_entity_name - def _get_context_row( - self, context_id_bin: bytes | None, row: Row | EventAsRow + def get_context( + self, context_id_bin: bytes | None, row: Row | EventAsRow | None ) -> Row | EventAsRow | None: """Get the context row from the id or row context.""" if context_id_bin is not None and ( @@ -322,34 +337,8 @@ class ContextAugmenter: return async_event_to_row(origin_event) return None - def augment( - self, data: dict[str, Any], row: Row | EventAsRow, context_id_bin: bytes | None - ) -> None: + def augment(self, data: dict[str, Any], context_row: Row | EventAsRow) -> None: """Augment data from the row and cache.""" - if context_user_id_bin := row[CONTEXT_USER_ID_BIN_POS]: - data[CONTEXT_USER_ID] = bytes_to_uuid_hex_or_none(context_user_id_bin) - - if not (context_row := self._get_context_row(context_id_bin, row)): - return - - if row is context_row or _rows_ids_match(row, context_row): - # This is the first event with the given ID. Was it directly caused by - # a parent event? - context_parent_id_bin = row[CONTEXT_PARENT_ID_BIN_POS] - if ( - not context_parent_id_bin - or ( - context_row := self._get_context_row( - context_parent_id_bin, context_row - ) - ) - is None - ): - return - # Ensure the (parent) context_event exists and is not the root cause of - # this log entry. - if row is context_row or _rows_ids_match(row, context_row): - return event_type = context_row[EVENT_TYPE_POS] # State change if context_entity_id := context_row[ENTITY_ID_POS]: From 7b1bf82e3c0179637cb43d4fcc5cd3a704122632 Mon Sep 17 00:00:00 2001 From: Clifford Roche <1007595+cmroche@users.noreply.github.com> Date: Mon, 5 Aug 2024 05:18:34 -0400 Subject: [PATCH 0370/1635] Update greeclimate to 2.0.0 (#121030) Co-authored-by: Joostlek --- homeassistant/components/gree/const.py | 2 + homeassistant/components/gree/coordinator.py | 65 ++++++- homeassistant/components/gree/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/gree/test_bridge.py | 35 +++- tests/components/gree/test_climate.py | 187 ++++++++++--------- 7 files changed, 190 insertions(+), 105 deletions(-) diff --git a/homeassistant/components/gree/const.py b/homeassistant/components/gree/const.py index 46479210921..f926eb1c53e 100644 --- a/homeassistant/components/gree/const.py +++ b/homeassistant/components/gree/const.py @@ -18,3 +18,5 @@ FAN_MEDIUM_HIGH = "medium high" MAX_ERRORS = 2 TARGET_TEMPERATURE_STEP = 1 + +UPDATE_INTERVAL = 60 diff --git a/homeassistant/components/gree/coordinator.py b/homeassistant/components/gree/coordinator.py index 1bccf3bbc48..ae8b22706ef 100644 --- a/homeassistant/components/gree/coordinator.py +++ b/homeassistant/components/gree/coordinator.py @@ -2,16 +2,20 @@ from __future__ import annotations -from datetime import timedelta +from datetime import datetime, timedelta import logging +from typing import Any from greeclimate.device import Device, DeviceInfo from greeclimate.discovery import Discovery, Listener from greeclimate.exceptions import DeviceNotBoundError, DeviceTimeoutError +from greeclimate.network import Response from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_send +from homeassistant.helpers.json import json_dumps from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +from homeassistant.util.dt import utcnow from .const import ( COORDINATORS, @@ -19,12 +23,13 @@ from .const import ( DISPATCH_DEVICE_DISCOVERED, DOMAIN, MAX_ERRORS, + UPDATE_INTERVAL, ) _LOGGER = logging.getLogger(__name__) -class DeviceDataUpdateCoordinator(DataUpdateCoordinator): +class DeviceDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): """Manages polling for state changes from the device.""" def __init__(self, hass: HomeAssistant, device: Device) -> None: @@ -34,28 +39,68 @@ class DeviceDataUpdateCoordinator(DataUpdateCoordinator): hass, _LOGGER, name=f"{DOMAIN}-{device.device_info.name}", - update_interval=timedelta(seconds=60), + update_interval=timedelta(seconds=UPDATE_INTERVAL), + always_update=False, ) self.device = device - self._error_count = 0 + self.device.add_handler(Response.DATA, self.device_state_updated) + self.device.add_handler(Response.RESULT, self.device_state_updated) - async def _async_update_data(self): + self._error_count: int = 0 + self._last_response_time: datetime = utcnow() + self._last_error_time: datetime | None = None + + def device_state_updated(self, *args: Any) -> None: + """Handle device state updates.""" + _LOGGER.debug("Device state updated: %s", json_dumps(args)) + self._error_count = 0 + self._last_response_time = utcnow() + self.async_set_updated_data(self.device.raw_properties) + + async def _async_update_data(self) -> dict[str, Any]: """Update the state of the device.""" + _LOGGER.debug( + "Updating device state: %s, error count: %d", self.name, self._error_count + ) try: await self.device.update_state() except DeviceNotBoundError as error: - raise UpdateFailed(f"Device {self.name} is unavailable") from error + raise UpdateFailed( + f"Device {self.name} is unavailable, device is not bound." + ) from error except DeviceTimeoutError as error: self._error_count += 1 # Under normal conditions GREE units timeout every once in a while if self.last_update_success and self._error_count >= MAX_ERRORS: _LOGGER.warning( - "Device is unavailable: %s (%s)", - self.name, - self.device.device_info, + "Device %s is unavailable: %s", self.name, self.device.device_info ) - raise UpdateFailed(f"Device {self.name} is unavailable") from error + raise UpdateFailed( + f"Device {self.name} is unavailable, could not send update request" + ) from error + else: + # raise update failed if time for more than MAX_ERRORS has passed since last update + now = utcnow() + elapsed_success = now - self._last_response_time + if self.update_interval and elapsed_success >= self.update_interval: + if not self._last_error_time or ( + (now - self.update_interval) >= self._last_error_time + ): + self._last_error_time = now + self._error_count += 1 + + _LOGGER.warning( + "Device %s is unresponsive for %s seconds", + self.name, + elapsed_success, + ) + if self.last_update_success and self._error_count >= MAX_ERRORS: + raise UpdateFailed( + f"Device {self.name} is unresponsive for too long and now unavailable" + ) + + return self.device.raw_properties async def push_state_update(self): """Send state updates to the physical device.""" diff --git a/homeassistant/components/gree/manifest.json b/homeassistant/components/gree/manifest.json index a7c884c4042..ca1c4b5b754 100644 --- a/homeassistant/components/gree/manifest.json +++ b/homeassistant/components/gree/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/gree", "iot_class": "local_polling", "loggers": ["greeclimate"], - "requirements": ["greeclimate==1.4.6"] + "requirements": ["greeclimate==2.0.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 479e22a3bfc..b73248c5e16 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1007,7 +1007,7 @@ gpiozero==1.6.2 gps3==0.33.3 # homeassistant.components.gree -greeclimate==1.4.6 +greeclimate==2.0.0 # homeassistant.components.greeneye_monitor greeneye_monitor==3.0.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 9d92bde7aa8..a0b634d0e2f 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -848,7 +848,7 @@ govee-local-api==1.5.1 gps3==0.33.3 # homeassistant.components.gree -greeclimate==1.4.6 +greeclimate==2.0.0 # homeassistant.components.greeneye_monitor greeneye_monitor==3.0.3 diff --git a/tests/components/gree/test_bridge.py b/tests/components/gree/test_bridge.py index 37b0b0dc15e..32372bebf37 100644 --- a/tests/components/gree/test_bridge.py +++ b/tests/components/gree/test_bridge.py @@ -5,8 +5,12 @@ from datetime import timedelta from freezegun.api import FrozenDateTimeFactory import pytest -from homeassistant.components.climate import DOMAIN -from homeassistant.components.gree.const import COORDINATORS, DOMAIN as GREE +from homeassistant.components.climate import DOMAIN, HVACMode +from homeassistant.components.gree.const import ( + COORDINATORS, + DOMAIN as GREE, + UPDATE_INTERVAL, +) from homeassistant.core import HomeAssistant import homeassistant.util.dt as dt_util @@ -69,3 +73,30 @@ async def test_discovery_after_setup( device_infos = [x.device.device_info for x in hass.data[GREE][COORDINATORS]] assert device_infos[0].ip == "1.1.1.2" assert device_infos[1].ip == "2.2.2.1" + + +async def test_coordinator_updates( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device +) -> None: + """Test gree devices update their state.""" + await async_setup_gree(hass) + await hass.async_block_till_done() + + assert len(hass.states.async_all(DOMAIN)) == 1 + + callback = device().add_handler.call_args_list[0][0][1] + + async def fake_update_state(*args) -> None: + """Fake update state.""" + device().power = True + callback() + + device().update_state.side_effect = fake_update_state + + freezer.tick(timedelta(seconds=UPDATE_INTERVAL)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(ENTITY_ID_1) + assert state is not None + assert state.state != HVACMode.OFF diff --git a/tests/components/gree/test_climate.py b/tests/components/gree/test_climate.py index e6f24ade1aa..1bf49bbca26 100644 --- a/tests/components/gree/test_climate.py +++ b/tests/components/gree/test_climate.py @@ -48,7 +48,12 @@ from homeassistant.components.gree.climate import ( HVAC_MODES_REVERSE, GreeClimateEntity, ) -from homeassistant.components.gree.const import FAN_MEDIUM_HIGH, FAN_MEDIUM_LOW +from homeassistant.components.gree.const import ( + DISCOVERY_SCAN_INTERVAL, + FAN_MEDIUM_HIGH, + FAN_MEDIUM_LOW, + UPDATE_INTERVAL, +) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_TEMPERATURE, @@ -61,7 +66,6 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import entity_registry as er -import homeassistant.util.dt as dt_util from .common import async_setup_gree, build_device_mock @@ -70,12 +74,6 @@ from tests.common import async_fire_time_changed ENTITY_ID = f"{DOMAIN}.fake_device_1" -@pytest.fixture -def mock_now(): - """Fixture for dtutil.now.""" - return dt_util.utcnow() - - async def test_discovery_called_once(hass: HomeAssistant, discovery, device) -> None: """Test discovery is only ever called once.""" await async_setup_gree(hass) @@ -104,7 +102,7 @@ async def test_discovery_setup(hass: HomeAssistant, discovery, device) -> None: async def test_discovery_setup_connection_error( - hass: HomeAssistant, discovery, device, mock_now + hass: HomeAssistant, discovery, device ) -> None: """Test gree integration is setup.""" MockDevice1 = build_device_mock( @@ -126,7 +124,7 @@ async def test_discovery_setup_connection_error( async def test_discovery_after_setup( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device, mock_now + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device ) -> None: """Test gree devices don't change after multiple discoveries.""" MockDevice1 = build_device_mock( @@ -142,8 +140,7 @@ async def test_discovery_after_setup( discovery.return_value.mock_devices = [MockDevice1, MockDevice2] device.side_effect = [MockDevice1, MockDevice2] - await async_setup_gree(hass) - await hass.async_block_till_done() + await async_setup_gree(hass) # Update 1 assert discovery.return_value.scan_count == 1 assert len(hass.states.async_all(DOMAIN)) == 2 @@ -152,9 +149,8 @@ async def test_discovery_after_setup( discovery.return_value.mock_devices = [MockDevice1, MockDevice2] device.side_effect = [MockDevice1, MockDevice2] - next_update = mock_now + timedelta(minutes=6) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) + freezer.tick(timedelta(seconds=DISCOVERY_SCAN_INTERVAL)) + async_fire_time_changed(hass) await hass.async_block_till_done() assert discovery.return_value.scan_count == 2 @@ -162,7 +158,7 @@ async def test_discovery_after_setup( async def test_discovery_add_device_after_setup( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device, mock_now + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device ) -> None: """Test gree devices can be added after initial setup.""" MockDevice1 = build_device_mock( @@ -178,6 +174,8 @@ async def test_discovery_add_device_after_setup( discovery.return_value.mock_devices = [MockDevice1] device.side_effect = [MockDevice1] + await async_setup_gree(hass) # Update 1 + await async_setup_gree(hass) await hass.async_block_till_done() @@ -188,9 +186,8 @@ async def test_discovery_add_device_after_setup( discovery.return_value.mock_devices = [MockDevice2] device.side_effect = [MockDevice2] - next_update = mock_now + timedelta(minutes=6) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) + freezer.tick(timedelta(seconds=DISCOVERY_SCAN_INTERVAL)) + async_fire_time_changed(hass) await hass.async_block_till_done() assert discovery.return_value.scan_count == 2 @@ -198,7 +195,7 @@ async def test_discovery_add_device_after_setup( async def test_discovery_device_bind_after_setup( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device, mock_now + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device ) -> None: """Test gree devices can be added after a late device bind.""" MockDevice1 = build_device_mock( @@ -210,8 +207,7 @@ async def test_discovery_device_bind_after_setup( discovery.return_value.mock_devices = [MockDevice1] device.return_value = MockDevice1 - await async_setup_gree(hass) - await hass.async_block_till_done() + await async_setup_gree(hass) # Update 1 assert len(hass.states.async_all(DOMAIN)) == 1 state = hass.states.get(ENTITY_ID) @@ -222,9 +218,8 @@ async def test_discovery_device_bind_after_setup( MockDevice1.bind.side_effect = None MockDevice1.update_state.side_effect = None - next_update = mock_now + timedelta(minutes=5) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) + freezer.tick(timedelta(seconds=DISCOVERY_SCAN_INTERVAL)) + async_fire_time_changed(hass) await hass.async_block_till_done() state = hass.states.get(ENTITY_ID) @@ -232,7 +227,7 @@ async def test_discovery_device_bind_after_setup( async def test_update_connection_failure( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, device, mock_now + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device ) -> None: """Testing update hvac connection failure exception.""" device().update_state.side_effect = [ @@ -241,36 +236,32 @@ async def test_update_connection_failure( DeviceTimeoutError, ] - await async_setup_gree(hass) + await async_setup_gree(hass) # Update 1 + + async def run_update(): + freezer.tick(timedelta(seconds=UPDATE_INTERVAL)) + async_fire_time_changed(hass) - next_update = mock_now + timedelta(minutes=5) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) await hass.async_block_till_done() - # First update to make the device available + # Update 2 + await run_update() state = hass.states.get(ENTITY_ID) assert state.name == "fake-device-1" assert state.state != STATE_UNAVAILABLE - next_update = mock_now + timedelta(minutes=10) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() + # Update 3 + await run_update() - next_update = mock_now + timedelta(minutes=15) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() - - # Then two more update failures to make the device unavailable + # Update 4 + await run_update() state = hass.states.get(ENTITY_ID) assert state.name == "fake-device-1" assert state.state == STATE_UNAVAILABLE -async def test_update_connection_failure_recovery( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device, mock_now +async def test_update_connection_send_failure_recovery( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device ) -> None: """Testing update hvac connection failure recovery.""" device().update_state.side_effect = [ @@ -279,31 +270,27 @@ async def test_update_connection_failure_recovery( DEFAULT_MOCK, ] - await async_setup_gree(hass) + await async_setup_gree(hass) # Update 1 + + async def run_update(): + freezer.tick(timedelta(seconds=UPDATE_INTERVAL)) + async_fire_time_changed(hass) - # First update becomes unavailable - next_update = mock_now + timedelta(minutes=5) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) await hass.async_block_till_done() + await run_update() # Update 2 state = hass.states.get(ENTITY_ID) assert state.name == "fake-device-1" assert state.state == STATE_UNAVAILABLE - # Second update restores the connection - next_update = mock_now + timedelta(minutes=10) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) - await hass.async_block_till_done() - + await run_update() # Update 3 state = hass.states.get(ENTITY_ID) assert state.name == "fake-device-1" assert state.state != STATE_UNAVAILABLE async def test_update_unhandled_exception( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device, mock_now + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device ) -> None: """Testing update hvac connection unhandled response exception.""" device().update_state.side_effect = [DEFAULT_MOCK, Exception] @@ -314,9 +301,8 @@ async def test_update_unhandled_exception( assert state.name == "fake-device-1" assert state.state != STATE_UNAVAILABLE - next_update = mock_now + timedelta(minutes=10) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) + freezer.tick(timedelta(seconds=UPDATE_INTERVAL)) + async_fire_time_changed(hass) await hass.async_block_till_done() state = hass.states.get(ENTITY_ID) @@ -325,15 +311,13 @@ async def test_update_unhandled_exception( async def test_send_command_device_timeout( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device, mock_now + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device ) -> None: """Test for sending power on command to the device with a device timeout.""" await async_setup_gree(hass) - # First update to make the device available - next_update = mock_now + timedelta(minutes=5) - freezer.move_to(next_update) - async_fire_time_changed(hass, next_update) + freezer.tick(timedelta(seconds=UPDATE_INTERVAL)) + async_fire_time_changed(hass) await hass.async_block_till_done() state = hass.states.get(ENTITY_ID) @@ -355,7 +339,40 @@ async def test_send_command_device_timeout( assert state.state != STATE_UNAVAILABLE -async def test_send_power_on(hass: HomeAssistant, discovery, device, mock_now) -> None: +async def test_unresponsive_device( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, discovery, device +) -> None: + """Test for unresponsive device.""" + await async_setup_gree(hass) + + async def run_update(): + freezer.tick(timedelta(seconds=UPDATE_INTERVAL)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # Update 2 + await run_update() + state = hass.states.get(ENTITY_ID) + assert state.name == "fake-device-1" + assert state.state != STATE_UNAVAILABLE + + # Update 3, 4, 5 + await run_update() + await run_update() + await run_update() + state = hass.states.get(ENTITY_ID) + assert state.name == "fake-device-1" + assert state.state == STATE_UNAVAILABLE + + # Receiving update from device will reset the state to available again + device().device_state_updated("test") + await run_update() + state = hass.states.get(ENTITY_ID) + assert state.name == "fake-device-1" + assert state.state != STATE_UNAVAILABLE + + +async def test_send_power_on(hass: HomeAssistant, discovery, device) -> None: """Test for sending power on command to the device.""" await async_setup_gree(hass) @@ -372,7 +389,7 @@ async def test_send_power_on(hass: HomeAssistant, discovery, device, mock_now) - async def test_send_power_off_device_timeout( - hass: HomeAssistant, discovery, device, mock_now + hass: HomeAssistant, discovery, device ) -> None: """Test for sending power off command to the device with a device timeout.""" device().push_state_update.side_effect = DeviceTimeoutError @@ -543,9 +560,7 @@ async def test_update_target_temperature( @pytest.mark.parametrize( "preset", [PRESET_AWAY, PRESET_ECO, PRESET_SLEEP, PRESET_BOOST, PRESET_NONE] ) -async def test_send_preset_mode( - hass: HomeAssistant, discovery, device, mock_now, preset -) -> None: +async def test_send_preset_mode(hass: HomeAssistant, discovery, device, preset) -> None: """Test for sending preset mode command to the device.""" await async_setup_gree(hass) @@ -561,9 +576,7 @@ async def test_send_preset_mode( assert state.attributes.get(ATTR_PRESET_MODE) == preset -async def test_send_invalid_preset_mode( - hass: HomeAssistant, discovery, device, mock_now -) -> None: +async def test_send_invalid_preset_mode(hass: HomeAssistant, discovery, device) -> None: """Test for sending preset mode command to the device.""" await async_setup_gree(hass) @@ -584,7 +597,7 @@ async def test_send_invalid_preset_mode( "preset", [PRESET_AWAY, PRESET_ECO, PRESET_SLEEP, PRESET_BOOST, PRESET_NONE] ) async def test_send_preset_mode_device_timeout( - hass: HomeAssistant, discovery, device, mock_now, preset + hass: HomeAssistant, discovery, device, preset ) -> None: """Test for sending preset mode command to the device with a device timeout.""" device().push_state_update.side_effect = DeviceTimeoutError @@ -607,7 +620,7 @@ async def test_send_preset_mode_device_timeout( "preset", [PRESET_AWAY, PRESET_ECO, PRESET_SLEEP, PRESET_BOOST, PRESET_NONE] ) async def test_update_preset_mode( - hass: HomeAssistant, discovery, device, mock_now, preset + hass: HomeAssistant, discovery, device, preset ) -> None: """Test for updating preset mode from the device.""" device().steady_heat = preset == PRESET_AWAY @@ -634,7 +647,7 @@ async def test_update_preset_mode( ], ) async def test_send_hvac_mode( - hass: HomeAssistant, discovery, device, mock_now, hvac_mode + hass: HomeAssistant, discovery, device, hvac_mode ) -> None: """Test for sending hvac mode command to the device.""" await async_setup_gree(hass) @@ -656,7 +669,7 @@ async def test_send_hvac_mode( [HVACMode.AUTO, HVACMode.COOL, HVACMode.DRY, HVACMode.FAN_ONLY, HVACMode.HEAT], ) async def test_send_hvac_mode_device_timeout( - hass: HomeAssistant, discovery, device, mock_now, hvac_mode + hass: HomeAssistant, discovery, device, hvac_mode ) -> None: """Test for sending hvac mode command to the device with a device timeout.""" device().push_state_update.side_effect = DeviceTimeoutError @@ -687,7 +700,7 @@ async def test_send_hvac_mode_device_timeout( ], ) async def test_update_hvac_mode( - hass: HomeAssistant, discovery, device, mock_now, hvac_mode + hass: HomeAssistant, discovery, device, hvac_mode ) -> None: """Test for updating hvac mode from the device.""" device().power = hvac_mode != HVACMode.OFF @@ -704,9 +717,7 @@ async def test_update_hvac_mode( "fan_mode", [FAN_AUTO, FAN_LOW, FAN_MEDIUM_LOW, FAN_MEDIUM, FAN_MEDIUM_HIGH, FAN_HIGH], ) -async def test_send_fan_mode( - hass: HomeAssistant, discovery, device, mock_now, fan_mode -) -> None: +async def test_send_fan_mode(hass: HomeAssistant, discovery, device, fan_mode) -> None: """Test for sending fan mode command to the device.""" await async_setup_gree(hass) @@ -722,9 +733,7 @@ async def test_send_fan_mode( assert state.attributes.get(ATTR_FAN_MODE) == fan_mode -async def test_send_invalid_fan_mode( - hass: HomeAssistant, discovery, device, mock_now -) -> None: +async def test_send_invalid_fan_mode(hass: HomeAssistant, discovery, device) -> None: """Test for sending fan mode command to the device.""" await async_setup_gree(hass) @@ -746,7 +755,7 @@ async def test_send_invalid_fan_mode( [FAN_AUTO, FAN_LOW, FAN_MEDIUM_LOW, FAN_MEDIUM, FAN_MEDIUM_HIGH, FAN_HIGH], ) async def test_send_fan_mode_device_timeout( - hass: HomeAssistant, discovery, device, mock_now, fan_mode + hass: HomeAssistant, discovery, device, fan_mode ) -> None: """Test for sending fan mode command to the device with a device timeout.""" device().push_state_update.side_effect = DeviceTimeoutError @@ -770,7 +779,7 @@ async def test_send_fan_mode_device_timeout( [FAN_AUTO, FAN_LOW, FAN_MEDIUM_LOW, FAN_MEDIUM, FAN_MEDIUM_HIGH, FAN_HIGH], ) async def test_update_fan_mode( - hass: HomeAssistant, discovery, device, mock_now, fan_mode + hass: HomeAssistant, discovery, device, fan_mode ) -> None: """Test for updating fan mode from the device.""" device().fan_speed = FAN_MODES_REVERSE.get(fan_mode) @@ -786,7 +795,7 @@ async def test_update_fan_mode( "swing_mode", [SWING_OFF, SWING_BOTH, SWING_VERTICAL, SWING_HORIZONTAL] ) async def test_send_swing_mode( - hass: HomeAssistant, discovery, device, mock_now, swing_mode + hass: HomeAssistant, discovery, device, swing_mode ) -> None: """Test for sending swing mode command to the device.""" await async_setup_gree(hass) @@ -803,9 +812,7 @@ async def test_send_swing_mode( assert state.attributes.get(ATTR_SWING_MODE) == swing_mode -async def test_send_invalid_swing_mode( - hass: HomeAssistant, discovery, device, mock_now -) -> None: +async def test_send_invalid_swing_mode(hass: HomeAssistant, discovery, device) -> None: """Test for sending swing mode command to the device.""" await async_setup_gree(hass) @@ -826,7 +833,7 @@ async def test_send_invalid_swing_mode( "swing_mode", [SWING_OFF, SWING_BOTH, SWING_VERTICAL, SWING_HORIZONTAL] ) async def test_send_swing_mode_device_timeout( - hass: HomeAssistant, discovery, device, mock_now, swing_mode + hass: HomeAssistant, discovery, device, swing_mode ) -> None: """Test for sending swing mode command to the device with a device timeout.""" device().push_state_update.side_effect = DeviceTimeoutError @@ -849,7 +856,7 @@ async def test_send_swing_mode_device_timeout( "swing_mode", [SWING_OFF, SWING_BOTH, SWING_VERTICAL, SWING_HORIZONTAL] ) async def test_update_swing_mode( - hass: HomeAssistant, discovery, device, mock_now, swing_mode + hass: HomeAssistant, discovery, device, swing_mode ) -> None: """Test for updating swing mode from the device.""" device().horizontal_swing = ( From 50b7eb44d1706ad69ebc016e0fe6423932cbde93 Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Sat, 3 Aug 2024 08:16:30 +0200 Subject: [PATCH 0371/1635] Add CONTROL supported feature to Google conversation when API access (#123046) * Add CONTROL supported feature to Google conversation when API access * Better function name * Handle entry update inline * Reload instead of update --- .../conversation.py | 14 ++++++++++++++ .../snapshots/test_conversation.ambr | 8 ++++---- .../test_conversation.py | 15 +++++++++++---- 3 files changed, 29 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/google_generative_ai_conversation/conversation.py b/homeassistant/components/google_generative_ai_conversation/conversation.py index a5c911bb757..1d8b46cde2f 100644 --- a/homeassistant/components/google_generative_ai_conversation/conversation.py +++ b/homeassistant/components/google_generative_ai_conversation/conversation.py @@ -164,6 +164,10 @@ class GoogleGenerativeAIConversationEntity( model="Generative AI", entry_type=dr.DeviceEntryType.SERVICE, ) + if self.entry.options.get(CONF_LLM_HASS_API): + self._attr_supported_features = ( + conversation.ConversationEntityFeature.CONTROL + ) @property def supported_languages(self) -> list[str] | Literal["*"]: @@ -177,6 +181,9 @@ class GoogleGenerativeAIConversationEntity( self.hass, "conversation", self.entry.entry_id, self.entity_id ) conversation.async_set_agent(self.hass, self.entry, self) + self.entry.async_on_unload( + self.entry.add_update_listener(self._async_entry_update_listener) + ) async def async_will_remove_from_hass(self) -> None: """When entity will be removed from Home Assistant.""" @@ -397,3 +404,10 @@ class GoogleGenerativeAIConversationEntity( parts.append(llm_api.api_prompt) return "\n".join(parts) + + async def _async_entry_update_listener( + self, hass: HomeAssistant, entry: ConfigEntry + ) -> None: + """Handle options update.""" + # Reload as we update device info + entity name + supported features + await hass.config_entries.async_reload(entry.entry_id) diff --git a/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr b/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr index abd3658e869..65238c5212a 100644 --- a/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr +++ b/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr @@ -215,7 +215,7 @@ ), ]) # --- -# name: test_default_prompt[config_entry_options0-None] +# name: test_default_prompt[config_entry_options0-0-None] list([ tuple( '', @@ -263,7 +263,7 @@ ), ]) # --- -# name: test_default_prompt[config_entry_options0-conversation.google_generative_ai_conversation] +# name: test_default_prompt[config_entry_options0-0-conversation.google_generative_ai_conversation] list([ tuple( '', @@ -311,7 +311,7 @@ ), ]) # --- -# name: test_default_prompt[config_entry_options1-None] +# name: test_default_prompt[config_entry_options1-1-None] list([ tuple( '', @@ -360,7 +360,7 @@ ), ]) # --- -# name: test_default_prompt[config_entry_options1-conversation.google_generative_ai_conversation] +# name: test_default_prompt[config_entry_options1-1-conversation.google_generative_ai_conversation] list([ tuple( '', diff --git a/tests/components/google_generative_ai_conversation/test_conversation.py b/tests/components/google_generative_ai_conversation/test_conversation.py index 41f96c7b0ac..98f469643af 100644 --- a/tests/components/google_generative_ai_conversation/test_conversation.py +++ b/tests/components/google_generative_ai_conversation/test_conversation.py @@ -18,7 +18,7 @@ from homeassistant.components.google_generative_ai_conversation.const import ( from homeassistant.components.google_generative_ai_conversation.conversation import ( _escape_decode, ) -from homeassistant.const import CONF_LLM_HASS_API +from homeassistant.const import ATTR_SUPPORTED_FEATURES, CONF_LLM_HASS_API from homeassistant.core import Context, HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import intent, llm @@ -38,10 +38,13 @@ def freeze_the_time(): "agent_id", [None, "conversation.google_generative_ai_conversation"] ) @pytest.mark.parametrize( - "config_entry_options", + ("config_entry_options", "expected_features"), [ - {}, - {CONF_LLM_HASS_API: llm.LLM_API_ASSIST}, + ({}, 0), + ( + {CONF_LLM_HASS_API: llm.LLM_API_ASSIST}, + conversation.ConversationEntityFeature.CONTROL, + ), ], ) @pytest.mark.usefixtures("mock_init_component") @@ -51,6 +54,7 @@ async def test_default_prompt( snapshot: SnapshotAssertion, agent_id: str | None, config_entry_options: {}, + expected_features: conversation.ConversationEntityFeature, hass_ws_client: WebSocketGenerator, ) -> None: """Test that the default prompt works.""" @@ -97,6 +101,9 @@ async def test_default_prompt( assert [tuple(mock_call) for mock_call in mock_model.mock_calls] == snapshot assert mock_get_tools.called == (CONF_LLM_HASS_API in config_entry_options) + state = hass.states.get("conversation.google_generative_ai_conversation") + assert state.attributes[ATTR_SUPPORTED_FEATURES] == expected_features + @pytest.mark.parametrize( ("model_name", "supports_system_instruction"), From bee77041e83227d6a1097d1a4f860a9c12514e47 Mon Sep 17 00:00:00 2001 From: Denis Shulyaka Date: Sat, 3 Aug 2024 09:14:24 +0300 Subject: [PATCH 0372/1635] Change enum type to string for Google Generative AI Conversation (#123069) --- .../conversation.py | 14 ++++- .../test_conversation.py | 59 +++++++++++++++++++ 2 files changed, 70 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/google_generative_ai_conversation/conversation.py b/homeassistant/components/google_generative_ai_conversation/conversation.py index 1d8b46cde2f..0d24ddbf39f 100644 --- a/homeassistant/components/google_generative_ai_conversation/conversation.py +++ b/homeassistant/components/google_generative_ai_conversation/conversation.py @@ -89,9 +89,9 @@ def _format_schema(schema: dict[str, Any]) -> dict[str, Any]: key = "type_" val = val.upper() elif key == "format": - if (schema.get("type") == "string" and val != "enum") or ( - schema.get("type") not in ("number", "integer", "string") - ): + if schema.get("type") == "string" and val != "enum": + continue + if schema.get("type") not in ("number", "integer", "string"): continue key = "format_" elif key == "items": @@ -100,11 +100,19 @@ def _format_schema(schema: dict[str, Any]) -> dict[str, Any]: val = {k: _format_schema(v) for k, v in val.items()} result[key] = val + if result.get("enum") and result.get("type_") != "STRING": + # enum is only allowed for STRING type. This is safe as long as the schema + # contains vol.Coerce for the respective type, for example: + # vol.All(vol.Coerce(int), vol.In([1, 2, 3])) + result["type_"] = "STRING" + result["enum"] = [str(item) for item in result["enum"]] + if result.get("type_") == "OBJECT" and not result.get("properties"): # An object with undefined properties is not supported by Gemini API. # Fallback to JSON string. This will probably fail for most tools that want it, # but we don't have a better fallback strategy so far. result["properties"] = {"json": {"type_": "STRING"}} + result["required"] = [] return result diff --git a/tests/components/google_generative_ai_conversation/test_conversation.py b/tests/components/google_generative_ai_conversation/test_conversation.py index 98f469643af..a8eae34e08b 100644 --- a/tests/components/google_generative_ai_conversation/test_conversation.py +++ b/tests/components/google_generative_ai_conversation/test_conversation.py @@ -17,6 +17,7 @@ from homeassistant.components.google_generative_ai_conversation.const import ( ) from homeassistant.components.google_generative_ai_conversation.conversation import ( _escape_decode, + _format_schema, ) from homeassistant.const import ATTR_SUPPORTED_FEATURES, CONF_LLM_HASS_API from homeassistant.core import Context, HomeAssistant @@ -629,3 +630,61 @@ async def test_escape_decode() -> None: "param2": "param2's value", "param3": {"param31": "Cheminée", "param32": "Cheminée"}, } + + +@pytest.mark.parametrize( + ("openapi", "protobuf"), + [ + ( + {"type": "string", "enum": ["a", "b", "c"]}, + {"type_": "STRING", "enum": ["a", "b", "c"]}, + ), + ( + {"type": "integer", "enum": [1, 2, 3]}, + {"type_": "STRING", "enum": ["1", "2", "3"]}, + ), + ({"anyOf": [{"type": "integer"}, {"type": "number"}]}, {"type_": "INTEGER"}), + ( + { + "anyOf": [ + {"anyOf": [{"type": "integer"}, {"type": "number"}]}, + {"anyOf": [{"type": "integer"}, {"type": "number"}]}, + ] + }, + {"type_": "INTEGER"}, + ), + ({"type": "string", "format": "lower"}, {"type_": "STRING"}), + ({"type": "boolean", "format": "bool"}, {"type_": "BOOLEAN"}), + ( + {"type": "number", "format": "percent"}, + {"type_": "NUMBER", "format_": "percent"}, + ), + ( + { + "type": "object", + "properties": {"var": {"type": "string"}}, + "required": [], + }, + { + "type_": "OBJECT", + "properties": {"var": {"type_": "STRING"}}, + "required": [], + }, + ), + ( + {"type": "object", "additionalProperties": True}, + { + "type_": "OBJECT", + "properties": {"json": {"type_": "STRING"}}, + "required": [], + }, + ), + ( + {"type": "array", "items": {"type": "string"}}, + {"type_": "ARRAY", "items": {"type_": "STRING"}}, + ), + ], +) +async def test_format_schema(openapi, protobuf) -> None: + """Test _format_schema.""" + assert _format_schema(openapi) == protobuf From fa241dcd0440336622413dd87216823a1238247c Mon Sep 17 00:00:00 2001 From: "Mr. Bubbles" Date: Mon, 5 Aug 2024 08:43:29 +0200 Subject: [PATCH 0373/1635] Catch exception in coordinator setup of IronOS integration (#123079) --- .../components/iron_os/coordinator.py | 6 ++++- tests/components/iron_os/test_init.py | 26 +++++++++++++++++++ 2 files changed, 31 insertions(+), 1 deletion(-) create mode 100644 tests/components/iron_os/test_init.py diff --git a/homeassistant/components/iron_os/coordinator.py b/homeassistant/components/iron_os/coordinator.py index e8424478d86..aefb14b689b 100644 --- a/homeassistant/components/iron_os/coordinator.py +++ b/homeassistant/components/iron_os/coordinator.py @@ -46,4 +46,8 @@ class IronOSCoordinator(DataUpdateCoordinator[LiveDataResponse]): async def _async_setup(self) -> None: """Set up the coordinator.""" - self.device_info = await self.device.get_device_info() + try: + self.device_info = await self.device.get_device_info() + + except CommunicationError as e: + raise UpdateFailed("Cannot connect to device") from e diff --git a/tests/components/iron_os/test_init.py b/tests/components/iron_os/test_init.py new file mode 100644 index 00000000000..fb0a782ea36 --- /dev/null +++ b/tests/components/iron_os/test_init.py @@ -0,0 +1,26 @@ +"""Test init of IronOS integration.""" + +from unittest.mock import AsyncMock + +from pynecil import CommunicationError +import pytest + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("ble_device") +async def test_setup_config_entry_not_ready( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pynecil: AsyncMock, +) -> None: + """Test config entry not ready.""" + mock_pynecil.get_device_info.side_effect = CommunicationError + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.SETUP_RETRY From 7623ee49e49867c388e5a74debf97dc7f36b550a Mon Sep 17 00:00:00 2001 From: Shay Levy Date: Fri, 2 Aug 2024 21:56:49 +0300 Subject: [PATCH 0374/1635] Ignore Shelly IPv6 address in zeroconf (#123081) --- .../components/shelly/config_flow.py | 2 ++ homeassistant/components/shelly/strings.json | 3 ++- tests/components/shelly/test_config_flow.py | 19 +++++++++++++++++++ 3 files changed, 23 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/shelly/config_flow.py b/homeassistant/components/shelly/config_flow.py index cb3bca6aa47..c80d1e84d6f 100644 --- a/homeassistant/components/shelly/config_flow.py +++ b/homeassistant/components/shelly/config_flow.py @@ -279,6 +279,8 @@ class ShellyConfigFlow(ConfigFlow, domain=DOMAIN): self, discovery_info: ZeroconfServiceInfo ) -> ConfigFlowResult: """Handle zeroconf discovery.""" + if discovery_info.ip_address.version == 6: + return self.async_abort(reason="ipv6_not_supported") host = discovery_info.host # First try to get the mac address from the name # so we can avoid making another connection to the diff --git a/homeassistant/components/shelly/strings.json b/homeassistant/components/shelly/strings.json index 8ae4ff1f3e4..f76319eb08c 100644 --- a/homeassistant/components/shelly/strings.json +++ b/homeassistant/components/shelly/strings.json @@ -52,7 +52,8 @@ "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "reauth_unsuccessful": "Re-authentication was unsuccessful, please remove the integration and set it up again.", "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", - "another_device": "Re-configuration was unsuccessful, the IP address/hostname of another Shelly device was used." + "another_device": "Re-configuration was unsuccessful, the IP address/hostname of another Shelly device was used.", + "ipv6_not_supported": "IPv6 is not supported." } }, "device_automation": { diff --git a/tests/components/shelly/test_config_flow.py b/tests/components/shelly/test_config_flow.py index a3040fc2eb8..0c574a33e0c 100644 --- a/tests/components/shelly/test_config_flow.py +++ b/tests/components/shelly/test_config_flow.py @@ -1305,3 +1305,22 @@ async def test_reconfigure_with_exception( ) assert result["errors"] == {"base": base_error} + + +async def test_zeroconf_rejects_ipv6(hass: HomeAssistant) -> None: + """Test zeroconf discovery rejects ipv6.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_ZEROCONF}, + data=zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("fd00::b27c:63bb:cc85:4ea0"), + ip_addresses=[ip_address("fd00::b27c:63bb:cc85:4ea0")], + hostname="mock_hostname", + name="shelly1pm-12345", + port=None, + properties={zeroconf.ATTR_PROPERTIES_ID: "shelly1pm-12345"}, + type="mock_type", + ), + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "ipv6_not_supported" From fdb1baadbeed6e7f135c0323f229e3a85f32de4e Mon Sep 17 00:00:00 2001 From: Louis Christ Date: Sat, 3 Aug 2024 22:32:47 +0200 Subject: [PATCH 0375/1635] Fix wrong DeviceInfo in bluesound integration (#123101) Fix bluesound device info --- homeassistant/components/bluesound/media_player.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/bluesound/media_player.py b/homeassistant/components/bluesound/media_player.py index 809ba293f89..dc09feaed63 100644 --- a/homeassistant/components/bluesound/media_player.py +++ b/homeassistant/components/bluesound/media_player.py @@ -255,7 +255,7 @@ class BluesoundPlayer(MediaPlayerEntity): self._attr_unique_id = format_unique_id(sync_status.mac, port) # there should always be one player with the default port per mac - if port is DEFAULT_PORT: + if port == DEFAULT_PORT: self._attr_device_info = DeviceInfo( identifiers={(DOMAIN, format_mac(sync_status.mac))}, connections={(CONNECTION_NETWORK_MAC, format_mac(sync_status.mac))}, From eccce7017f21269245bb84be8b9ca2cf7f92464b Mon Sep 17 00:00:00 2001 From: Arie Catsman <120491684+catsmanac@users.noreply.github.com> Date: Sat, 3 Aug 2024 17:21:12 +0200 Subject: [PATCH 0376/1635] Bump pyenphase to 1.22.0 (#123103) --- homeassistant/components/enphase_envoy/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/enphase_envoy/manifest.json b/homeassistant/components/enphase_envoy/manifest.json index 09c55fb23ac..aa06a1ff79f 100644 --- a/homeassistant/components/enphase_envoy/manifest.json +++ b/homeassistant/components/enphase_envoy/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/enphase_envoy", "iot_class": "local_polling", "loggers": ["pyenphase"], - "requirements": ["pyenphase==1.20.6"], + "requirements": ["pyenphase==1.22.0"], "zeroconf": [ { "type": "_enphase-envoy._tcp.local." diff --git a/requirements_all.txt b/requirements_all.txt index b73248c5e16..b48fdd28553 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1840,7 +1840,7 @@ pyeiscp==0.0.7 pyemoncms==0.0.7 # homeassistant.components.enphase_envoy -pyenphase==1.20.6 +pyenphase==1.22.0 # homeassistant.components.envisalink pyenvisalink==4.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a0b634d0e2f..c9f3878dbdf 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1469,7 +1469,7 @@ pyegps==0.2.5 pyemoncms==0.0.7 # homeassistant.components.enphase_envoy -pyenphase==1.20.6 +pyenphase==1.22.0 # homeassistant.components.everlights pyeverlights==0.1.0 From 832bac8c63ccb063ed74e17c6ffca83c07cb57bb Mon Sep 17 00:00:00 2001 From: Kim de Vos Date: Sat, 3 Aug 2024 15:08:01 +0200 Subject: [PATCH 0377/1635] Use slugify to create id for UniFi WAN latency (#123108) Use slugify to create id for latency --- homeassistant/components/unifi/sensor.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/unifi/sensor.py b/homeassistant/components/unifi/sensor.py index d86b72d1b2f..08bd0ddb869 100644 --- a/homeassistant/components/unifi/sensor.py +++ b/homeassistant/components/unifi/sensor.py @@ -44,6 +44,7 @@ from homeassistant.core import Event as core_Event, HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType +from homeassistant.util import slugify import homeassistant.util.dt as dt_util from . import UnifiConfigEntry @@ -247,8 +248,9 @@ def make_wan_latency_sensors() -> tuple[UnifiSensorEntityDescription, ...]: def make_wan_latency_entity_description( wan: Literal["WAN", "WAN2"], name: str, monitor_target: str ) -> UnifiSensorEntityDescription: + name_wan = f"{name} {wan}" return UnifiSensorEntityDescription[Devices, Device]( - key=f"{name} {wan} latency", + key=f"{name_wan} latency", entity_category=EntityCategory.DIAGNOSTIC, native_unit_of_measurement=UnitOfTime.MILLISECONDS, state_class=SensorStateClass.MEASUREMENT, @@ -257,13 +259,12 @@ def make_wan_latency_sensors() -> tuple[UnifiSensorEntityDescription, ...]: api_handler_fn=lambda api: api.devices, available_fn=async_device_available_fn, device_info_fn=async_device_device_info_fn, - name_fn=lambda _: f"{name} {wan} latency", + name_fn=lambda device: f"{name_wan} latency", object_fn=lambda api, obj_id: api.devices[obj_id], supported_fn=partial( async_device_wan_latency_supported_fn, wan, monitor_target ), - unique_id_fn=lambda hub, - obj_id: f"{name.lower}_{wan.lower}_latency-{obj_id}", + unique_id_fn=lambda hub, obj_id: f"{slugify(name_wan)}_latency-{obj_id}", value_fn=partial(async_device_wan_latency_value_fn, wan, monitor_target), ) From c8a0e5228da591e521502890e8b7e43c514e8f0a Mon Sep 17 00:00:00 2001 From: "David F. Mulcahey" Date: Mon, 5 Aug 2024 05:19:57 -0400 Subject: [PATCH 0378/1635] Bump ZHA lib to 0.0.27 (#123125) --- homeassistant/components/zha/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index 6e35339c53f..d7dc53b5167 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -21,7 +21,7 @@ "zha", "universal_silabs_flasher" ], - "requirements": ["universal-silabs-flasher==0.0.22", "zha==0.0.25"], + "requirements": ["universal-silabs-flasher==0.0.22", "zha==0.0.27"], "usb": [ { "vid": "10C4", diff --git a/requirements_all.txt b/requirements_all.txt index b48fdd28553..fb881f5a8c6 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2986,7 +2986,7 @@ zeroconf==0.132.2 zeversolar==0.3.1 # homeassistant.components.zha -zha==0.0.25 +zha==0.0.27 # homeassistant.components.zhong_hong zhong-hong-hvac==1.0.12 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c9f3878dbdf..ac3d594a541 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2360,7 +2360,7 @@ zeroconf==0.132.2 zeversolar==0.3.1 # homeassistant.components.zha -zha==0.0.25 +zha==0.0.27 # homeassistant.components.zwave_js zwave-js-server-python==0.57.0 From 0b4d92176275865c2a627d09c65337862489d3e6 Mon Sep 17 00:00:00 2001 From: Steve Repsher Date: Sun, 4 Aug 2024 08:28:45 -0400 Subject: [PATCH 0379/1635] Restore old service worker URL (#123131) --- homeassistant/components/frontend/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/frontend/__init__.py b/homeassistant/components/frontend/__init__.py index 5b462842e4a..c5df84cf549 100644 --- a/homeassistant/components/frontend/__init__.py +++ b/homeassistant/components/frontend/__init__.py @@ -398,6 +398,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: static_paths_configs: list[StaticPathConfig] = [] for path, should_cache in ( + ("service_worker.js", False), ("sw-modern.js", False), ("sw-modern.js.map", False), ("sw-legacy.js", False), From f6c4b6b0456cf8dd4b2bd4dce808e5c8b06caec2 Mon Sep 17 00:00:00 2001 From: dupondje Date: Mon, 5 Aug 2024 10:32:58 +0200 Subject: [PATCH 0380/1635] dsmr: migrate hourly_gas_meter_reading to mbus device (#123149) --- homeassistant/components/dsmr/sensor.py | 4 +- tests/components/dsmr/test_mbus_migration.py | 100 +++++++++++++++++++ 2 files changed, 103 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/dsmr/sensor.py b/homeassistant/components/dsmr/sensor.py index f794d1d05e9..b298ed5bfc0 100644 --- a/homeassistant/components/dsmr/sensor.py +++ b/homeassistant/components/dsmr/sensor.py @@ -439,7 +439,9 @@ def rename_old_gas_to_mbus( entries = er.async_entries_for_device(ent_reg, device_id) for entity in entries: - if entity.unique_id.endswith("belgium_5min_gas_meter_reading"): + if entity.unique_id.endswith( + "belgium_5min_gas_meter_reading" + ) or entity.unique_id.endswith("hourly_gas_meter_reading"): try: ent_reg.async_update_entity( entity.entity_id, diff --git a/tests/components/dsmr/test_mbus_migration.py b/tests/components/dsmr/test_mbus_migration.py index a28bc2c3a33..20b3d253f39 100644 --- a/tests/components/dsmr/test_mbus_migration.py +++ b/tests/components/dsmr/test_mbus_migration.py @@ -119,6 +119,106 @@ async def test_migrate_gas_to_mbus( ) +async def test_migrate_hourly_gas_to_mbus( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock], +) -> None: + """Test migration of unique_id.""" + (connection_factory, transport, protocol) = dsmr_connection_fixture + + mock_entry = MockConfigEntry( + domain=DOMAIN, + unique_id="/dev/ttyUSB0", + data={ + "port": "/dev/ttyUSB0", + "dsmr_version": "5", + "serial_id": "1234", + "serial_id_gas": "4730303738353635363037343639323231", + }, + options={ + "time_between_update": 0, + }, + ) + + mock_entry.add_to_hass(hass) + + old_unique_id = "4730303738353635363037343639323231_hourly_gas_meter_reading" + + device = device_registry.async_get_or_create( + config_entry_id=mock_entry.entry_id, + identifiers={(DOMAIN, mock_entry.entry_id)}, + name="Gas Meter", + ) + await hass.async_block_till_done() + + entity: er.RegistryEntry = entity_registry.async_get_or_create( + suggested_object_id="gas_meter_reading", + disabled_by=None, + domain=SENSOR_DOMAIN, + platform=DOMAIN, + device_id=device.id, + unique_id=old_unique_id, + config_entry=mock_entry, + ) + assert entity.unique_id == old_unique_id + await hass.async_block_till_done() + + telegram = Telegram() + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 1), [{"value": "003", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 1), + [{"value": "4730303738353635363037343639323231", "unit": ""}], + ), + "MBUS_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + MBUS_METER_READING, + MBusObject( + (0, 1), + [ + {"value": datetime.datetime.fromtimestamp(1722749707)}, + {"value": Decimal(778.963), "unit": "m3"}, + ], + ), + "MBUS_METER_READING", + ) + + assert await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() + + telegram_callback = connection_factory.call_args_list[0][0][2] + + # simulate a telegram pushed from the smartmeter and parsed by dsmr_parser + telegram_callback(telegram) + + # after receiving telegram entities need to have the chance to be created + await hass.async_block_till_done() + + dev_entities = er.async_entries_for_device( + entity_registry, device.id, include_disabled_entities=True + ) + assert not dev_entities + + assert ( + entity_registry.async_get_entity_id(SENSOR_DOMAIN, DOMAIN, old_unique_id) + is None + ) + assert ( + entity_registry.async_get_entity_id( + SENSOR_DOMAIN, DOMAIN, "4730303738353635363037343639323231" + ) + == "sensor.gas_meter_reading" + ) + + async def test_migrate_gas_to_mbus_exists( hass: HomeAssistant, entity_registry: er.EntityRegistry, From 73a2ad7304cd9d4b7d9b3b191dae64601e203bdc Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 5 Aug 2024 01:39:04 -0500 Subject: [PATCH 0381/1635] Bump aiohttp to 3.10.1 (#123159) --- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 1cc6a0fa85d..3b251e91179 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -4,7 +4,7 @@ aiodhcpwatcher==1.0.2 aiodiscover==2.1.0 aiodns==3.2.0 aiohttp-fast-zlib==0.1.1 -aiohttp==3.10.0 +aiohttp==3.10.1 aiohttp_cors==0.7.0 aiozoneinfo==0.2.1 astral==2.2 diff --git a/pyproject.toml b/pyproject.toml index c60a01663da..b3da2cdb631 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ requires-python = ">=3.12.0" dependencies = [ "aiodns==3.2.0", - "aiohttp==3.10.0", + "aiohttp==3.10.1", "aiohttp_cors==0.7.0", "aiohttp-fast-zlib==0.1.1", "aiozoneinfo==0.2.1", diff --git a/requirements.txt b/requirements.txt index c851927f9c6..1beefe73914 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ # Home Assistant Core aiodns==3.2.0 -aiohttp==3.10.0 +aiohttp==3.10.1 aiohttp_cors==0.7.0 aiohttp-fast-zlib==0.1.1 aiozoneinfo==0.2.1 From 85700fd80fe4229845ae7102ee22e95293d5dafe Mon Sep 17 00:00:00 2001 From: Brett Adams Date: Mon, 5 Aug 2024 17:59:33 +1000 Subject: [PATCH 0382/1635] Fix class attribute condition in Tesla Fleet (#123162) --- homeassistant/components/tesla_fleet/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/tesla_fleet/__init__.py b/homeassistant/components/tesla_fleet/__init__.py index 2c5ee1b5c75..8257bf75cd0 100644 --- a/homeassistant/components/tesla_fleet/__init__.py +++ b/homeassistant/components/tesla_fleet/__init__.py @@ -86,7 +86,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - vehicles: list[TeslaFleetVehicleData] = [] energysites: list[TeslaFleetEnergyData] = [] for product in products: - if "vin" in product and tesla.vehicle: + if "vin" in product and hasattr(tesla, "vehicle"): # Remove the protobuff 'cached_data' that we do not use to save memory product.pop("cached_data", None) vin = product["vin"] @@ -111,7 +111,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - device=device, ) ) - elif "energy_site_id" in product and tesla.energy: + elif "energy_site_id" in product and hasattr(tesla, "energy"): site_id = product["energy_site_id"] if not ( product["components"]["battery"] From cdb378066c14e7cd554b42e07e3801cbddacfb90 Mon Sep 17 00:00:00 2001 From: Calvin Walton Date: Mon, 5 Aug 2024 04:02:15 -0400 Subject: [PATCH 0383/1635] Add Govee H612B to the Matter transition blocklist (#123163) --- homeassistant/components/matter/light.py | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/matter/light.py b/homeassistant/components/matter/light.py index 65c3a535216..d05a7c85f9d 100644 --- a/homeassistant/components/matter/light.py +++ b/homeassistant/components/matter/light.py @@ -54,6 +54,7 @@ TRANSITION_BLOCKLIST = ( (4488, 514, "1.0", "1.0.0"), (4488, 260, "1.0", "1.0.0"), (5010, 769, "3.0", "1.0.0"), + (4999, 24875, "1.0", "27.0"), (4999, 25057, "1.0", "27.0"), (4448, 36866, "V1", "V1.0.0.5"), (5009, 514, "1.0", "1.0.0"), From 35a3d2306c8b77d0cf13442cc6c2c8e5cf928297 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Mon, 5 Aug 2024 12:22:03 +0200 Subject: [PATCH 0384/1635] Bump version to 2024.8.0b2 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index 08ee0bb77f9..7f27548a68c 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -24,7 +24,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 8 -PATCH_VERSION: Final = "0b1" +PATCH_VERSION: Final = "0b2" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index b3da2cdb631..f23c571feb5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.8.0b1" +version = "2024.8.0b2" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From f2d99cb059187f06dba3b26e04f1254fc42e23e0 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Mon, 5 Aug 2024 12:34:48 +0200 Subject: [PATCH 0385/1635] Use KNX UI entity platform controller class (#123128) --- homeassistant/components/knx/binary_sensor.py | 4 +- homeassistant/components/knx/button.py | 4 +- homeassistant/components/knx/climate.py | 4 +- homeassistant/components/knx/cover.py | 4 +- homeassistant/components/knx/date.py | 4 +- homeassistant/components/knx/datetime.py | 4 +- homeassistant/components/knx/fan.py | 4 +- homeassistant/components/knx/knx_entity.py | 76 +++++++++++++------ homeassistant/components/knx/light.py | 39 +++++----- homeassistant/components/knx/notify.py | 4 +- homeassistant/components/knx/number.py | 4 +- homeassistant/components/knx/scene.py | 4 +- homeassistant/components/knx/select.py | 4 +- homeassistant/components/knx/sensor.py | 4 +- .../components/knx/storage/config_store.py | 54 +++++++------ homeassistant/components/knx/switch.py | 59 +++++++------- homeassistant/components/knx/text.py | 4 +- homeassistant/components/knx/time.py | 4 +- homeassistant/components/knx/weather.py | 4 +- 19 files changed, 165 insertions(+), 123 deletions(-) diff --git a/homeassistant/components/knx/binary_sensor.py b/homeassistant/components/knx/binary_sensor.py index ff15f725fae..7d80ca55bf6 100644 --- a/homeassistant/components/knx/binary_sensor.py +++ b/homeassistant/components/knx/binary_sensor.py @@ -24,7 +24,7 @@ from homeassistant.helpers.typing import ConfigType from . import KNXModule from .const import ATTR_COUNTER, ATTR_SOURCE, DATA_KNX_CONFIG, DOMAIN -from .knx_entity import KnxEntity +from .knx_entity import KnxYamlEntity from .schema import BinarySensorSchema @@ -43,7 +43,7 @@ async def async_setup_entry( ) -class KNXBinarySensor(KnxEntity, BinarySensorEntity, RestoreEntity): +class KNXBinarySensor(KnxYamlEntity, BinarySensorEntity, RestoreEntity): """Representation of a KNX binary sensor.""" _device: XknxBinarySensor diff --git a/homeassistant/components/knx/button.py b/homeassistant/components/knx/button.py index 2eb68eebe43..f6627fc527b 100644 --- a/homeassistant/components/knx/button.py +++ b/homeassistant/components/knx/button.py @@ -13,7 +13,7 @@ from homeassistant.helpers.typing import ConfigType from . import KNXModule from .const import CONF_PAYLOAD_LENGTH, DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS -from .knx_entity import KnxEntity +from .knx_entity import KnxYamlEntity async def async_setup_entry( @@ -31,7 +31,7 @@ async def async_setup_entry( ) -class KNXButton(KnxEntity, ButtonEntity): +class KNXButton(KnxYamlEntity, ButtonEntity): """Representation of a KNX button.""" _device: XknxRawValue diff --git a/homeassistant/components/knx/climate.py b/homeassistant/components/knx/climate.py index 7470d60ef4b..9abc9023617 100644 --- a/homeassistant/components/knx/climate.py +++ b/homeassistant/components/knx/climate.py @@ -35,7 +35,7 @@ from .const import ( DOMAIN, PRESET_MODES, ) -from .knx_entity import KnxEntity +from .knx_entity import KnxYamlEntity from .schema import ClimateSchema ATTR_COMMAND_VALUE = "command_value" @@ -133,7 +133,7 @@ def _create_climate(xknx: XKNX, config: ConfigType) -> XknxClimate: ) -class KNXClimate(KnxEntity, ClimateEntity): +class KNXClimate(KnxYamlEntity, ClimateEntity): """Representation of a KNX climate device.""" _device: XknxClimate diff --git a/homeassistant/components/knx/cover.py b/homeassistant/components/knx/cover.py index 1962db0ad3f..408f746e094 100644 --- a/homeassistant/components/knx/cover.py +++ b/homeassistant/components/knx/cover.py @@ -27,7 +27,7 @@ from homeassistant.helpers.typing import ConfigType from . import KNXModule from .const import DATA_KNX_CONFIG, DOMAIN -from .knx_entity import KnxEntity +from .knx_entity import KnxYamlEntity from .schema import CoverSchema @@ -43,7 +43,7 @@ async def async_setup_entry( async_add_entities(KNXCover(knx_module, entity_config) for entity_config in config) -class KNXCover(KnxEntity, CoverEntity): +class KNXCover(KnxYamlEntity, CoverEntity): """Representation of a KNX cover.""" _device: XknxCover diff --git a/homeassistant/components/knx/date.py b/homeassistant/components/knx/date.py index 80fea63d0a6..9f04a4acd7e 100644 --- a/homeassistant/components/knx/date.py +++ b/homeassistant/components/knx/date.py @@ -31,7 +31,7 @@ from .const import ( DOMAIN, KNX_ADDRESS, ) -from .knx_entity import KnxEntity +from .knx_entity import KnxYamlEntity async def async_setup_entry( @@ -61,7 +61,7 @@ def _create_xknx_device(xknx: XKNX, config: ConfigType) -> XknxDateDevice: ) -class KNXDateEntity(KnxEntity, DateEntity, RestoreEntity): +class KNXDateEntity(KnxYamlEntity, DateEntity, RestoreEntity): """Representation of a KNX date.""" _device: XknxDateDevice diff --git a/homeassistant/components/knx/datetime.py b/homeassistant/components/knx/datetime.py index 16ccb7474a7..8f1a25e6e3c 100644 --- a/homeassistant/components/knx/datetime.py +++ b/homeassistant/components/knx/datetime.py @@ -32,7 +32,7 @@ from .const import ( DOMAIN, KNX_ADDRESS, ) -from .knx_entity import KnxEntity +from .knx_entity import KnxYamlEntity async def async_setup_entry( @@ -62,7 +62,7 @@ def _create_xknx_device(xknx: XKNX, config: ConfigType) -> XknxDateTimeDevice: ) -class KNXDateTimeEntity(KnxEntity, DateTimeEntity, RestoreEntity): +class KNXDateTimeEntity(KnxYamlEntity, DateTimeEntity, RestoreEntity): """Representation of a KNX datetime.""" _device: XknxDateTimeDevice diff --git a/homeassistant/components/knx/fan.py b/homeassistant/components/knx/fan.py index 940e241ccda..6fd87be97d1 100644 --- a/homeassistant/components/knx/fan.py +++ b/homeassistant/components/knx/fan.py @@ -21,7 +21,7 @@ from homeassistant.util.scaling import int_states_in_range from . import KNXModule from .const import DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS -from .knx_entity import KnxEntity +from .knx_entity import KnxYamlEntity from .schema import FanSchema DEFAULT_PERCENTAGE: Final = 50 @@ -39,7 +39,7 @@ async def async_setup_entry( async_add_entities(KNXFan(knx_module, entity_config) for entity_config in config) -class KNXFan(KnxEntity, FanEntity): +class KNXFan(KnxYamlEntity, FanEntity): """Representation of a KNX fan.""" _device: XknxFan diff --git a/homeassistant/components/knx/knx_entity.py b/homeassistant/components/knx/knx_entity.py index 2b8d2e71186..c81a6ee06db 100644 --- a/homeassistant/components/knx/knx_entity.py +++ b/homeassistant/components/knx/knx_entity.py @@ -2,30 +2,55 @@ from __future__ import annotations -from typing import TYPE_CHECKING +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING, Any from xknx.devices import Device as XknxDevice -from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity import Entity - -from .const import DOMAIN +from homeassistant.helpers.entity_platform import EntityPlatform +from homeassistant.helpers.entity_registry import RegistryEntry if TYPE_CHECKING: from . import KNXModule -SIGNAL_ENTITY_REMOVE = f"{DOMAIN}_entity_remove_signal.{{}}" +from .storage.config_store import PlatformControllerBase -class KnxEntity(Entity): +class KnxUiEntityPlatformController(PlatformControllerBase): + """Class to manage dynamic adding and reloading of UI entities.""" + + def __init__( + self, + knx_module: KNXModule, + entity_platform: EntityPlatform, + entity_class: type[KnxUiEntity], + ) -> None: + """Initialize the UI platform.""" + self._knx_module = knx_module + self._entity_platform = entity_platform + self._entity_class = entity_class + + async def create_entity(self, unique_id: str, config: dict[str, Any]) -> None: + """Add a new UI entity.""" + await self._entity_platform.async_add_entities( + [self._entity_class(self._knx_module, unique_id, config)] + ) + + async def update_entity( + self, entity_entry: RegistryEntry, config: dict[str, Any] + ) -> None: + """Update an existing UI entities configuration.""" + await self._entity_platform.async_remove_entity(entity_entry.entity_id) + await self.create_entity(unique_id=entity_entry.unique_id, config=config) + + +class _KnxEntityBase(Entity): """Representation of a KNX entity.""" _attr_should_poll = False - - def __init__(self, knx_module: KNXModule, device: XknxDevice) -> None: - """Set up device.""" - self._knx_module = knx_module - self._device = device + _knx_module: KNXModule + _device: XknxDevice @property def name(self) -> str: @@ -49,7 +74,7 @@ class KnxEntity(Entity): """Store register state change callback and start device object.""" self._device.register_device_updated_cb(self.after_update_callback) self._device.xknx.devices.async_add(self._device) - # super call needed to have methods of mulit-inherited classes called + # super call needed to have methods of multi-inherited classes called # eg. for restoring state (like _KNXSwitch) await super().async_added_to_hass() @@ -59,19 +84,22 @@ class KnxEntity(Entity): self._device.xknx.devices.async_remove(self._device) -class KnxUIEntity(KnxEntity): +class KnxYamlEntity(_KnxEntityBase): + """Representation of a KNX entity configured from YAML.""" + + def __init__(self, knx_module: KNXModule, device: XknxDevice) -> None: + """Initialize the YAML entity.""" + self._knx_module = knx_module + self._device = device + + +class KnxUiEntity(_KnxEntityBase, ABC): """Representation of a KNX UI entity.""" _attr_unique_id: str - async def async_added_to_hass(self) -> None: - """Register callbacks when entity added to hass.""" - await super().async_added_to_hass() - self._knx_module.config_store.entities.add(self._attr_unique_id) - self.async_on_remove( - async_dispatcher_connect( - self.hass, - SIGNAL_ENTITY_REMOVE.format(self._attr_unique_id), - self.async_remove, - ) - ) + @abstractmethod + def __init__( + self, knx_module: KNXModule, unique_id: str, config: dict[str, Any] + ) -> None: + """Initialize the UI entity.""" diff --git a/homeassistant/components/knx/light.py b/homeassistant/components/knx/light.py index 1197f09354b..a2ce8f8d2cb 100644 --- a/homeassistant/components/knx/light.py +++ b/homeassistant/components/knx/light.py @@ -19,15 +19,18 @@ from homeassistant.components.light import ( LightEntity, ) from homeassistant.const import CONF_ENTITY_CATEGORY, CONF_NAME, Platform -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.entity_platform import ( + AddEntitiesCallback, + async_get_current_platform, +) from homeassistant.helpers.typing import ConfigType import homeassistant.util.color as color_util from . import KNXModule from .const import CONF_SYNC_STATE, DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS, ColorTempModes -from .knx_entity import KnxEntity, KnxUIEntity +from .knx_entity import KnxUiEntity, KnxUiEntityPlatformController, KnxYamlEntity from .schema import LightSchema from .storage.const import ( CONF_COLOR_TEMP_MAX, @@ -63,8 +66,17 @@ async def async_setup_entry( ) -> None: """Set up light(s) for KNX platform.""" knx_module: KNXModule = hass.data[DOMAIN] + platform = async_get_current_platform() + knx_module.config_store.add_platform( + platform=Platform.LIGHT, + controller=KnxUiEntityPlatformController( + knx_module=knx_module, + entity_platform=platform, + entity_class=KnxUiLight, + ), + ) - entities: list[KnxEntity] = [] + entities: list[KnxYamlEntity | KnxUiEntity] = [] if yaml_platform_config := hass.data[DATA_KNX_CONFIG].get(Platform.LIGHT): entities.extend( KnxYamlLight(knx_module, entity_config) @@ -78,13 +90,6 @@ async def async_setup_entry( if entities: async_add_entities(entities) - @callback - def add_new_ui_light(unique_id: str, config: dict[str, Any]) -> None: - """Add KNX entity at runtime.""" - async_add_entities([KnxUiLight(knx_module, unique_id, config)]) - - knx_module.config_store.async_add_entity[Platform.LIGHT] = add_new_ui_light - def _create_yaml_light(xknx: XKNX, config: ConfigType) -> XknxLight: """Return a KNX Light device to be used within XKNX.""" @@ -519,7 +524,7 @@ class _KnxLight(LightEntity): await self._device.set_off() -class KnxYamlLight(_KnxLight, KnxEntity): +class KnxYamlLight(_KnxLight, KnxYamlEntity): """Representation of a KNX light.""" _device: XknxLight @@ -546,7 +551,7 @@ class KnxYamlLight(_KnxLight, KnxEntity): ) -class KnxUiLight(_KnxLight, KnxUIEntity): +class KnxUiLight(_KnxLight, KnxUiEntity): """Representation of a KNX light.""" _attr_has_entity_name = True @@ -556,11 +561,9 @@ class KnxUiLight(_KnxLight, KnxUIEntity): self, knx_module: KNXModule, unique_id: str, config: ConfigType ) -> None: """Initialize of KNX light.""" - super().__init__( - knx_module=knx_module, - device=_create_ui_light( - knx_module.xknx, config[DOMAIN], config[CONF_ENTITY][CONF_NAME] - ), + self._knx_module = knx_module + self._device = _create_ui_light( + knx_module.xknx, config[DOMAIN], config[CONF_ENTITY][CONF_NAME] ) self._attr_max_color_temp_kelvin: int = config[DOMAIN][CONF_COLOR_TEMP_MAX] self._attr_min_color_temp_kelvin: int = config[DOMAIN][CONF_COLOR_TEMP_MIN] diff --git a/homeassistant/components/knx/notify.py b/homeassistant/components/knx/notify.py index b349681990c..173ab3119a0 100644 --- a/homeassistant/components/knx/notify.py +++ b/homeassistant/components/knx/notify.py @@ -20,7 +20,7 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from . import KNXModule from .const import DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS -from .knx_entity import KnxEntity +from .knx_entity import KnxYamlEntity async def async_get_service( @@ -103,7 +103,7 @@ def _create_notification_instance(xknx: XKNX, config: ConfigType) -> XknxNotific ) -class KNXNotify(KnxEntity, NotifyEntity): +class KNXNotify(KnxYamlEntity, NotifyEntity): """Representation of a KNX notification entity.""" _device: XknxNotification diff --git a/homeassistant/components/knx/number.py b/homeassistant/components/knx/number.py index 3d4af503dff..cbbe91aba54 100644 --- a/homeassistant/components/knx/number.py +++ b/homeassistant/components/knx/number.py @@ -30,7 +30,7 @@ from .const import ( DOMAIN, KNX_ADDRESS, ) -from .knx_entity import KnxEntity +from .knx_entity import KnxYamlEntity from .schema import NumberSchema @@ -58,7 +58,7 @@ def _create_numeric_value(xknx: XKNX, config: ConfigType) -> NumericValue: ) -class KNXNumber(KnxEntity, RestoreNumber): +class KNXNumber(KnxYamlEntity, RestoreNumber): """Representation of a KNX number.""" _device: NumericValue diff --git a/homeassistant/components/knx/scene.py b/homeassistant/components/knx/scene.py index fc37f36dd01..2de832ae54a 100644 --- a/homeassistant/components/knx/scene.py +++ b/homeassistant/components/knx/scene.py @@ -15,7 +15,7 @@ from homeassistant.helpers.typing import ConfigType from . import KNXModule from .const import DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS -from .knx_entity import KnxEntity +from .knx_entity import KnxYamlEntity from .schema import SceneSchema @@ -31,7 +31,7 @@ async def async_setup_entry( async_add_entities(KNXScene(knx_module, entity_config) for entity_config in config) -class KNXScene(KnxEntity, Scene): +class KNXScene(KnxYamlEntity, Scene): """Representation of a KNX scene.""" _device: XknxScene diff --git a/homeassistant/components/knx/select.py b/homeassistant/components/knx/select.py index 1b862010c2a..6c73bf8d573 100644 --- a/homeassistant/components/knx/select.py +++ b/homeassistant/components/knx/select.py @@ -30,7 +30,7 @@ from .const import ( DOMAIN, KNX_ADDRESS, ) -from .knx_entity import KnxEntity +from .knx_entity import KnxYamlEntity from .schema import SelectSchema @@ -59,7 +59,7 @@ def _create_raw_value(xknx: XKNX, config: ConfigType) -> RawValue: ) -class KNXSelect(KnxEntity, SelectEntity, RestoreEntity): +class KNXSelect(KnxYamlEntity, SelectEntity, RestoreEntity): """Representation of a KNX select.""" _device: RawValue diff --git a/homeassistant/components/knx/sensor.py b/homeassistant/components/knx/sensor.py index ab363e2a35f..a28c1a339e6 100644 --- a/homeassistant/components/knx/sensor.py +++ b/homeassistant/components/knx/sensor.py @@ -35,7 +35,7 @@ from homeassistant.util.enum import try_parse_enum from . import KNXModule from .const import ATTR_SOURCE, DATA_KNX_CONFIG, DOMAIN -from .knx_entity import KnxEntity +from .knx_entity import KnxYamlEntity from .schema import SensorSchema SCAN_INTERVAL = timedelta(seconds=10) @@ -141,7 +141,7 @@ def _create_sensor(xknx: XKNX, config: ConfigType) -> XknxSensor: ) -class KNXSensor(KnxEntity, SensorEntity): +class KNXSensor(KnxYamlEntity, SensorEntity): """Representation of a KNX sensor.""" _device: XknxSensor diff --git a/homeassistant/components/knx/storage/config_store.py b/homeassistant/components/knx/storage/config_store.py index 876fe19a4b9..ce7a705e629 100644 --- a/homeassistant/components/knx/storage/config_store.py +++ b/homeassistant/components/knx/storage/config_store.py @@ -1,6 +1,6 @@ """KNX entity configuration store.""" -from collections.abc import Callable +from abc import ABC, abstractmethod import logging from typing import Any, Final, TypedDict @@ -8,12 +8,10 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PLATFORM, Platform from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import entity_registry as er -from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.storage import Store from homeassistant.util.ulid import ulid_now from ..const import DOMAIN -from ..knx_entity import SIGNAL_ENTITY_REMOVE from .const import CONF_DATA _LOGGER = logging.getLogger(__name__) @@ -33,6 +31,20 @@ class KNXConfigStoreModel(TypedDict): entities: KNXEntityStoreModel +class PlatformControllerBase(ABC): + """Entity platform controller base class.""" + + @abstractmethod + async def create_entity(self, unique_id: str, config: dict[str, Any]) -> None: + """Create a new entity.""" + + @abstractmethod + async def update_entity( + self, entity_entry: er.RegistryEntry, config: dict[str, Any] + ) -> None: + """Update an existing entities configuration.""" + + class KNXConfigStore: """Manage KNX config store data.""" @@ -46,12 +58,7 @@ class KNXConfigStore: self.config_entry = config_entry self._store = Store[KNXConfigStoreModel](hass, STORAGE_VERSION, STORAGE_KEY) self.data = KNXConfigStoreModel(entities={}) - - # entities and async_add_entity are filled by platform / entity setups - self.entities: set[str] = set() # unique_id as values - self.async_add_entity: dict[ - Platform, Callable[[str, dict[str, Any]], None] - ] = {} + self._platform_controllers: dict[Platform, PlatformControllerBase] = {} async def load_data(self) -> None: """Load config store data from storage.""" @@ -62,14 +69,19 @@ class KNXConfigStore: len(self.data["entities"]), ) + def add_platform( + self, platform: Platform, controller: PlatformControllerBase + ) -> None: + """Add platform controller.""" + self._platform_controllers[platform] = controller + async def create_entity( self, platform: Platform, data: dict[str, Any] ) -> str | None: """Create a new entity.""" - if platform not in self.async_add_entity: - raise ConfigStoreException(f"Entity platform not ready: {platform}") + platform_controller = self._platform_controllers[platform] unique_id = f"knx_es_{ulid_now()}" - self.async_add_entity[platform](unique_id, data) + await platform_controller.create_entity(unique_id, data) # store data after entity was added to be sure config didn't raise exceptions self.data["entities"].setdefault(platform, {})[unique_id] = data await self._store.async_save(self.data) @@ -95,8 +107,7 @@ class KNXConfigStore: self, platform: Platform, entity_id: str, data: dict[str, Any] ) -> None: """Update an existing entity.""" - if platform not in self.async_add_entity: - raise ConfigStoreException(f"Entity platform not ready: {platform}") + platform_controller = self._platform_controllers[platform] entity_registry = er.async_get(self.hass) if (entry := entity_registry.async_get(entity_id)) is None: raise ConfigStoreException(f"Entity not found: {entity_id}") @@ -108,8 +119,7 @@ class KNXConfigStore: raise ConfigStoreException( f"Entity not found in storage: {entity_id} - {unique_id}" ) - async_dispatcher_send(self.hass, SIGNAL_ENTITY_REMOVE.format(unique_id)) - self.async_add_entity[platform](unique_id, data) + await platform_controller.update_entity(entry, data) # store data after entity is added to make sure config doesn't raise exceptions self.data["entities"][platform][unique_id] = data await self._store.async_save(self.data) @@ -125,23 +135,21 @@ class KNXConfigStore: raise ConfigStoreException( f"Entity not found in {entry.domain}: {entry.unique_id}" ) from err - try: - self.entities.remove(entry.unique_id) - except KeyError: - _LOGGER.warning("Entity not initialized when deleted: %s", entity_id) entity_registry.async_remove(entity_id) await self._store.async_save(self.data) def get_entity_entries(self) -> list[er.RegistryEntry]: - """Get entity_ids of all configured entities by platform.""" + """Get entity_ids of all UI configured entities.""" entity_registry = er.async_get(self.hass) - + unique_ids = { + uid for platform in self.data["entities"].values() for uid in platform + } return [ registry_entry for registry_entry in er.async_entries_for_config_entry( entity_registry, self.config_entry.entry_id ) - if registry_entry.unique_id in self.entities + if registry_entry.unique_id in unique_ids ] diff --git a/homeassistant/components/knx/switch.py b/homeassistant/components/knx/switch.py index a5f430e6157..ebe930957d6 100644 --- a/homeassistant/components/knx/switch.py +++ b/homeassistant/components/knx/switch.py @@ -17,9 +17,12 @@ from homeassistant.const import ( STATE_UNKNOWN, Platform, ) -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.entity_platform import ( + AddEntitiesCallback, + async_get_current_platform, +) from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType @@ -32,7 +35,7 @@ from .const import ( DOMAIN, KNX_ADDRESS, ) -from .knx_entity import KnxEntity, KnxUIEntity +from .knx_entity import KnxUiEntity, KnxUiEntityPlatformController, KnxYamlEntity from .schema import SwitchSchema from .storage.const import ( CONF_DEVICE_INFO, @@ -51,8 +54,17 @@ async def async_setup_entry( ) -> None: """Set up switch(es) for KNX platform.""" knx_module: KNXModule = hass.data[DOMAIN] + platform = async_get_current_platform() + knx_module.config_store.add_platform( + platform=Platform.SWITCH, + controller=KnxUiEntityPlatformController( + knx_module=knx_module, + entity_platform=platform, + entity_class=KnxUiSwitch, + ), + ) - entities: list[KnxEntity] = [] + entities: list[KnxYamlEntity | KnxUiEntity] = [] if yaml_platform_config := hass.data[DATA_KNX_CONFIG].get(Platform.SWITCH): entities.extend( KnxYamlSwitch(knx_module, entity_config) @@ -66,13 +78,6 @@ async def async_setup_entry( if entities: async_add_entities(entities) - @callback - def add_new_ui_switch(unique_id: str, config: dict[str, Any]) -> None: - """Add KNX entity at runtime.""" - async_add_entities([KnxUiSwitch(knx_module, unique_id, config)]) - - knx_module.config_store.async_add_entity[Platform.SWITCH] = add_new_ui_switch - class _KnxSwitch(SwitchEntity, RestoreEntity): """Base class for a KNX switch.""" @@ -102,7 +107,7 @@ class _KnxSwitch(SwitchEntity, RestoreEntity): await self._device.set_off() -class KnxYamlSwitch(_KnxSwitch, KnxEntity): +class KnxYamlSwitch(_KnxSwitch, KnxYamlEntity): """Representation of a KNX switch configured from YAML.""" _device: XknxSwitch @@ -125,7 +130,7 @@ class KnxYamlSwitch(_KnxSwitch, KnxEntity): self._attr_unique_id = str(self._device.switch.group_address) -class KnxUiSwitch(_KnxSwitch, KnxUIEntity): +class KnxUiSwitch(_KnxSwitch, KnxUiEntity): """Representation of a KNX switch configured from UI.""" _attr_has_entity_name = True @@ -134,21 +139,19 @@ class KnxUiSwitch(_KnxSwitch, KnxUIEntity): def __init__( self, knx_module: KNXModule, unique_id: str, config: dict[str, Any] ) -> None: - """Initialize of KNX switch.""" - super().__init__( - knx_module=knx_module, - device=XknxSwitch( - knx_module.xknx, - name=config[CONF_ENTITY][CONF_NAME], - group_address=config[DOMAIN][CONF_GA_SWITCH][CONF_GA_WRITE], - group_address_state=[ - config[DOMAIN][CONF_GA_SWITCH][CONF_GA_STATE], - *config[DOMAIN][CONF_GA_SWITCH][CONF_GA_PASSIVE], - ], - respond_to_read=config[DOMAIN][CONF_RESPOND_TO_READ], - sync_state=config[DOMAIN][CONF_SYNC_STATE], - invert=config[DOMAIN][CONF_INVERT], - ), + """Initialize KNX switch.""" + self._knx_module = knx_module + self._device = XknxSwitch( + knx_module.xknx, + name=config[CONF_ENTITY][CONF_NAME], + group_address=config[DOMAIN][CONF_GA_SWITCH][CONF_GA_WRITE], + group_address_state=[ + config[DOMAIN][CONF_GA_SWITCH][CONF_GA_STATE], + *config[DOMAIN][CONF_GA_SWITCH][CONF_GA_PASSIVE], + ], + respond_to_read=config[DOMAIN][CONF_RESPOND_TO_READ], + sync_state=config[DOMAIN][CONF_SYNC_STATE], + invert=config[DOMAIN][CONF_INVERT], ) self._attr_entity_category = config[CONF_ENTITY][CONF_ENTITY_CATEGORY] self._attr_unique_id = unique_id diff --git a/homeassistant/components/knx/text.py b/homeassistant/components/knx/text.py index 9bca37434ac..381cb95ad32 100644 --- a/homeassistant/components/knx/text.py +++ b/homeassistant/components/knx/text.py @@ -30,7 +30,7 @@ from .const import ( DOMAIN, KNX_ADDRESS, ) -from .knx_entity import KnxEntity +from .knx_entity import KnxYamlEntity async def async_setup_entry( @@ -57,7 +57,7 @@ def _create_notification(xknx: XKNX, config: ConfigType) -> XknxNotification: ) -class KNXText(KnxEntity, TextEntity, RestoreEntity): +class KNXText(KnxYamlEntity, TextEntity, RestoreEntity): """Representation of a KNX text.""" _device: XknxNotification diff --git a/homeassistant/components/knx/time.py b/homeassistant/components/knx/time.py index 5d9225a1e41..b4e562a8869 100644 --- a/homeassistant/components/knx/time.py +++ b/homeassistant/components/knx/time.py @@ -31,7 +31,7 @@ from .const import ( DOMAIN, KNX_ADDRESS, ) -from .knx_entity import KnxEntity +from .knx_entity import KnxYamlEntity async def async_setup_entry( @@ -61,7 +61,7 @@ def _create_xknx_device(xknx: XKNX, config: ConfigType) -> XknxTimeDevice: ) -class KNXTimeEntity(KnxEntity, TimeEntity, RestoreEntity): +class KNXTimeEntity(KnxYamlEntity, TimeEntity, RestoreEntity): """Representation of a KNX time.""" _device: XknxTimeDevice diff --git a/homeassistant/components/knx/weather.py b/homeassistant/components/knx/weather.py index 11dae452e2f..99f4be962fe 100644 --- a/homeassistant/components/knx/weather.py +++ b/homeassistant/components/knx/weather.py @@ -21,7 +21,7 @@ from homeassistant.helpers.typing import ConfigType from . import KNXModule from .const import DATA_KNX_CONFIG, DOMAIN -from .knx_entity import KnxEntity +from .knx_entity import KnxYamlEntity from .schema import WeatherSchema @@ -75,7 +75,7 @@ def _create_weather(xknx: XKNX, config: ConfigType) -> XknxWeather: ) -class KNXWeather(KnxEntity, WeatherEntity): +class KNXWeather(KnxYamlEntity, WeatherEntity): """Representation of a KNX weather device.""" _device: XknxWeather From a6dfa6d4e01945e86ea1ec9168f6333f127976ac Mon Sep 17 00:00:00 2001 From: musapinar Date: Mon, 5 Aug 2024 14:21:01 +0200 Subject: [PATCH 0386/1635] Add Matter Leedarson RGBTW Bulb to the transition blocklist (#123182) --- homeassistant/components/matter/light.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/matter/light.py b/homeassistant/components/matter/light.py index d05a7c85f9d..6e9019c46fa 100644 --- a/homeassistant/components/matter/light.py +++ b/homeassistant/components/matter/light.py @@ -51,18 +51,19 @@ DEFAULT_TRANSITION = 0.2 # hw version (attributeKey 0/40/8) # sw version (attributeKey 0/40/10) TRANSITION_BLOCKLIST = ( - (4488, 514, "1.0", "1.0.0"), - (4488, 260, "1.0", "1.0.0"), - (5010, 769, "3.0", "1.0.0"), - (4999, 24875, "1.0", "27.0"), - (4999, 25057, "1.0", "27.0"), - (4448, 36866, "V1", "V1.0.0.5"), - (5009, 514, "1.0", "1.0.0"), (4107, 8475, "v1.0", "v1.0"), (4107, 8550, "v1.0", "v1.0"), (4107, 8551, "v1.0", "v1.0"), - (4107, 8656, "v1.0", "v1.0"), (4107, 8571, "v1.0", "v1.0"), + (4107, 8656, "v1.0", "v1.0"), + (4448, 36866, "V1", "V1.0.0.5"), + (4456, 1011, "1.0.0", "2.00.00"), + (4488, 260, "1.0", "1.0.0"), + (4488, 514, "1.0", "1.0.0"), + (4999, 24875, "1.0", "27.0"), + (4999, 25057, "1.0", "27.0"), + (5009, 514, "1.0", "1.0.0"), + (5010, 769, "3.0", "1.0.0"), ) From b73ca874bb24d40d94cefa946d478df2a90bafeb Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Mon, 5 Aug 2024 14:23:07 +0200 Subject: [PATCH 0387/1635] Bump uvcclient to 0.11.1 (#123185) --- homeassistant/components/uvc/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- script/licenses.py | 3 --- 4 files changed, 3 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/uvc/manifest.json b/homeassistant/components/uvc/manifest.json index 57e798c3fa6..0553eba320a 100644 --- a/homeassistant/components/uvc/manifest.json +++ b/homeassistant/components/uvc/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/uvc", "iot_class": "local_polling", "loggers": ["uvcclient"], - "requirements": ["uvcclient==0.11.0"] + "requirements": ["uvcclient==0.11.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 9a0164f6c35..2e49dfc914f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2845,7 +2845,7 @@ upcloud-api==2.5.1 url-normalize==1.4.3 # homeassistant.components.uvc -uvcclient==0.11.0 +uvcclient==0.11.1 # homeassistant.components.roborock vacuum-map-parser-roborock==0.1.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index b419e6c6430..63bdf5d3cc5 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2243,7 +2243,7 @@ upcloud-api==2.5.1 url-normalize==1.4.3 # homeassistant.components.uvc -uvcclient==0.11.0 +uvcclient==0.11.1 # homeassistant.components.roborock vacuum-map-parser-roborock==0.1.2 diff --git a/script/licenses.py b/script/licenses.py index f6578bf3f65..281f440a02e 100644 --- a/script/licenses.py +++ b/script/licenses.py @@ -186,9 +186,6 @@ TODO = { "0.3.0" ), # No license https://github.com/kk7ds/mficlient/issues/4 "pyflic": AwesomeVersion("2.0.3"), # No OSI approved license CC0-1.0 Universal) - "uvcclient": AwesomeVersion( - "0.11.0" - ), # No License https://github.com/kk7ds/uvcclient/issues/7 } From 96364f04527ef564f3d44b314f382967273d92d8 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Mon, 5 Aug 2024 14:43:39 +0200 Subject: [PATCH 0388/1635] Remove deprecated asterisk_cdr integration (#123180) --- .strict-typing | 1 - homeassistant/brands/asterisk.json | 5 -- .../components/asterisk_cdr/__init__.py | 1 - .../components/asterisk_cdr/mailbox.py | 70 ------------------- .../components/asterisk_cdr/manifest.json | 8 --- homeassistant/generated/integrations.json | 21 ++---- mypy.ini | 10 --- 7 files changed, 5 insertions(+), 111 deletions(-) delete mode 100644 homeassistant/brands/asterisk.json delete mode 100644 homeassistant/components/asterisk_cdr/__init__.py delete mode 100644 homeassistant/components/asterisk_cdr/mailbox.py delete mode 100644 homeassistant/components/asterisk_cdr/manifest.json diff --git a/.strict-typing b/.strict-typing index 02d9968d247..169a361262c 100644 --- a/.strict-typing +++ b/.strict-typing @@ -95,7 +95,6 @@ homeassistant.components.aruba.* homeassistant.components.arwn.* homeassistant.components.aseko_pool_live.* homeassistant.components.assist_pipeline.* -homeassistant.components.asterisk_cdr.* homeassistant.components.asterisk_mbox.* homeassistant.components.asuswrt.* homeassistant.components.autarco.* diff --git a/homeassistant/brands/asterisk.json b/homeassistant/brands/asterisk.json deleted file mode 100644 index 1df3e660afe..00000000000 --- a/homeassistant/brands/asterisk.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "domain": "asterisk", - "name": "Asterisk", - "integrations": ["asterisk_cdr", "asterisk_mbox"] -} diff --git a/homeassistant/components/asterisk_cdr/__init__.py b/homeassistant/components/asterisk_cdr/__init__.py deleted file mode 100644 index d681a392c56..00000000000 --- a/homeassistant/components/asterisk_cdr/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""The asterisk_cdr component.""" diff --git a/homeassistant/components/asterisk_cdr/mailbox.py b/homeassistant/components/asterisk_cdr/mailbox.py deleted file mode 100644 index fde4826fcee..00000000000 --- a/homeassistant/components/asterisk_cdr/mailbox.py +++ /dev/null @@ -1,70 +0,0 @@ -"""Support for the Asterisk CDR interface.""" - -from __future__ import annotations - -import datetime -import hashlib -from typing import Any - -from homeassistant.components.asterisk_mbox import ( - DOMAIN as ASTERISK_DOMAIN, - SIGNAL_CDR_UPDATE, -) -from homeassistant.components.mailbox import Mailbox -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.dispatcher import async_dispatcher_connect -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType - -MAILBOX_NAME = "asterisk_cdr" - - -async def async_get_handler( - hass: HomeAssistant, - config: ConfigType, - discovery_info: DiscoveryInfoType | None = None, -) -> Mailbox: - """Set up the Asterix CDR platform.""" - return AsteriskCDR(hass, MAILBOX_NAME) - - -class AsteriskCDR(Mailbox): - """Asterisk VM Call Data Record mailbox.""" - - def __init__(self, hass: HomeAssistant, name: str) -> None: - """Initialize Asterisk CDR.""" - super().__init__(hass, name) - self.cdr: list[dict[str, Any]] = [] - async_dispatcher_connect(self.hass, SIGNAL_CDR_UPDATE, self._update_callback) - - @callback - def _update_callback(self, msg: list[dict[str, Any]]) -> Any: - """Update the message count in HA, if needed.""" - self._build_message() - self.async_update() - - def _build_message(self) -> None: - """Build message structure.""" - cdr: list[dict[str, Any]] = [] - for entry in self.hass.data[ASTERISK_DOMAIN].cdr: - timestamp = datetime.datetime.strptime( - entry["time"], "%Y-%m-%d %H:%M:%S" - ).timestamp() - info = { - "origtime": timestamp, - "callerid": entry["callerid"], - "duration": entry["duration"], - } - sha = hashlib.sha256(str(entry).encode("utf-8")).hexdigest() - msg = ( - f"Destination: {entry['dest']}\n" - f"Application: {entry['application']}\n " - f"Context: {entry['context']}" - ) - cdr.append({"info": info, "sha": sha, "text": msg}) - self.cdr = cdr - - async def async_get_messages(self) -> list[dict[str, Any]]: - """Return a list of the current messages.""" - if not self.cdr: - self._build_message() - return self.cdr diff --git a/homeassistant/components/asterisk_cdr/manifest.json b/homeassistant/components/asterisk_cdr/manifest.json deleted file mode 100644 index 581b9dfb9a5..00000000000 --- a/homeassistant/components/asterisk_cdr/manifest.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "domain": "asterisk_cdr", - "name": "Asterisk Call Detail Records", - "codeowners": [], - "dependencies": ["asterisk_mbox"], - "documentation": "https://www.home-assistant.io/integrations/asterisk_cdr", - "iot_class": "local_polling" -} diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 25a78e30017..55e7a89c669 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -502,22 +502,11 @@ "config_flow": false, "iot_class": "local_push" }, - "asterisk": { - "name": "Asterisk", - "integrations": { - "asterisk_cdr": { - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_polling", - "name": "Asterisk Call Detail Records" - }, - "asterisk_mbox": { - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_push", - "name": "Asterisk Voicemail" - } - } + "asterisk_mbox": { + "name": "Asterisk Voicemail", + "integration_type": "hub", + "config_flow": false, + "iot_class": "local_push" }, "asuswrt": { "name": "ASUSWRT", diff --git a/mypy.ini b/mypy.ini index 0f4f8907612..67755595e69 100644 --- a/mypy.ini +++ b/mypy.ini @@ -705,16 +705,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.asterisk_cdr.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.asterisk_mbox.*] check_untyped_defs = true disallow_incomplete_defs = true From ef237a8431752655423c48b99f626c42ec60e212 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Mon, 5 Aug 2024 15:07:01 +0200 Subject: [PATCH 0389/1635] Fix MPD issue creation (#123187) --- homeassistant/components/mpd/media_player.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/mpd/media_player.py b/homeassistant/components/mpd/media_player.py index 3538b1c7973..92f0f5cfcc4 100644 --- a/homeassistant/components/mpd/media_player.py +++ b/homeassistant/components/mpd/media_player.py @@ -86,7 +86,7 @@ async def async_setup_platform( ) if ( result["type"] is FlowResultType.CREATE_ENTRY - or result["reason"] == "single_instance_allowed" + or result["reason"] == "already_configured" ): async_create_issue( hass, From 42ab8d0445fc9d0c1e647916e86ac17437deffd9 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Mon, 5 Aug 2024 15:28:49 +0200 Subject: [PATCH 0390/1635] Remove deprecated asterisk_mbox integration (#123174) --- .strict-typing | 1 - .../components/asterisk_mbox/__init__.py | 153 ------------------ .../components/asterisk_mbox/mailbox.py | 86 ---------- .../components/asterisk_mbox/manifest.json | 9 -- .../components/asterisk_mbox/strings.json | 8 - homeassistant/generated/integrations.json | 6 - mypy.ini | 10 -- requirements_all.txt | 3 - requirements_test_all.txt | 3 - script/licenses.py | 3 - tests/components/asterisk_mbox/__init__.py | 1 - tests/components/asterisk_mbox/const.py | 12 -- tests/components/asterisk_mbox/test_init.py | 36 ----- 13 files changed, 331 deletions(-) delete mode 100644 homeassistant/components/asterisk_mbox/__init__.py delete mode 100644 homeassistant/components/asterisk_mbox/mailbox.py delete mode 100644 homeassistant/components/asterisk_mbox/manifest.json delete mode 100644 homeassistant/components/asterisk_mbox/strings.json delete mode 100644 tests/components/asterisk_mbox/__init__.py delete mode 100644 tests/components/asterisk_mbox/const.py delete mode 100644 tests/components/asterisk_mbox/test_init.py diff --git a/.strict-typing b/.strict-typing index 169a361262c..1eec42ad209 100644 --- a/.strict-typing +++ b/.strict-typing @@ -95,7 +95,6 @@ homeassistant.components.aruba.* homeassistant.components.arwn.* homeassistant.components.aseko_pool_live.* homeassistant.components.assist_pipeline.* -homeassistant.components.asterisk_mbox.* homeassistant.components.asuswrt.* homeassistant.components.autarco.* homeassistant.components.auth.* diff --git a/homeassistant/components/asterisk_mbox/__init__.py b/homeassistant/components/asterisk_mbox/__init__.py deleted file mode 100644 index 3e3913b7d42..00000000000 --- a/homeassistant/components/asterisk_mbox/__init__.py +++ /dev/null @@ -1,153 +0,0 @@ -"""Support for Asterisk Voicemail interface.""" - -import logging -from typing import Any, cast - -from asterisk_mbox import Client as asteriskClient -from asterisk_mbox.commands import ( - CMD_MESSAGE_CDR, - CMD_MESSAGE_CDR_AVAILABLE, - CMD_MESSAGE_LIST, -) -import voluptuous as vol - -from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers import discovery -import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.dispatcher import async_dispatcher_send, dispatcher_connect -from homeassistant.helpers.issue_registry import IssueSeverity, create_issue -from homeassistant.helpers.typing import ConfigType - -_LOGGER = logging.getLogger(__name__) - -DOMAIN = "asterisk_mbox" - -SIGNAL_DISCOVER_PLATFORM = "asterisk_mbox.discover_platform" -SIGNAL_MESSAGE_REQUEST = "asterisk_mbox.message_request" -SIGNAL_MESSAGE_UPDATE = "asterisk_mbox.message_updated" -SIGNAL_CDR_UPDATE = "asterisk_mbox.message_updated" -SIGNAL_CDR_REQUEST = "asterisk_mbox.message_request" - -CONFIG_SCHEMA = vol.Schema( - { - DOMAIN: vol.Schema( - { - vol.Required(CONF_HOST): cv.string, - vol.Required(CONF_PASSWORD): cv.string, - vol.Required(CONF_PORT): cv.port, - } - ) - }, - extra=vol.ALLOW_EXTRA, -) - - -def setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up for the Asterisk Voicemail box.""" - conf: dict[str, Any] = config[DOMAIN] - - host: str = conf[CONF_HOST] - port: int = conf[CONF_PORT] - password: str = conf[CONF_PASSWORD] - - hass.data[DOMAIN] = AsteriskData(hass, host, port, password, config) - create_issue( - hass, - DOMAIN, - "deprecated_integration", - breaks_in_ha_version="2024.9.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_integration", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Asterisk Voicemail", - "mailbox": "mailbox", - }, - ) - - return True - - -class AsteriskData: - """Store Asterisk mailbox data.""" - - def __init__( - self, - hass: HomeAssistant, - host: str, - port: int, - password: str, - config: dict[str, Any], - ) -> None: - """Init the Asterisk data object.""" - - self.hass = hass - self.config = config - self.messages: list[dict[str, Any]] | None = None - self.cdr: list[dict[str, Any]] | None = None - - dispatcher_connect(self.hass, SIGNAL_MESSAGE_REQUEST, self._request_messages) - dispatcher_connect(self.hass, SIGNAL_CDR_REQUEST, self._request_cdr) - dispatcher_connect(self.hass, SIGNAL_DISCOVER_PLATFORM, self._discover_platform) - # Only connect after signal connection to ensure we don't miss any - self.client = asteriskClient(host, port, password, self.handle_data) - - @callback - def _discover_platform(self, component: str) -> None: - _LOGGER.debug("Adding mailbox %s", component) - self.hass.async_create_task( - discovery.async_load_platform( - self.hass, "mailbox", component, {}, self.config - ) - ) - - @callback - def handle_data( - self, command: int, msg: list[dict[str, Any]] | dict[str, Any] - ) -> None: - """Handle changes to the mailbox.""" - - if command == CMD_MESSAGE_LIST: - msg = cast(list[dict[str, Any]], msg) - _LOGGER.debug("AsteriskVM sent updated message list: Len %d", len(msg)) - old_messages = self.messages - self.messages = sorted( - msg, key=lambda item: item["info"]["origtime"], reverse=True - ) - if not isinstance(old_messages, list): - async_dispatcher_send(self.hass, SIGNAL_DISCOVER_PLATFORM, DOMAIN) - async_dispatcher_send(self.hass, SIGNAL_MESSAGE_UPDATE, self.messages) - elif command == CMD_MESSAGE_CDR: - msg = cast(dict[str, Any], msg) - _LOGGER.debug( - "AsteriskVM sent updated CDR list: Len %d", len(msg.get("entries", [])) - ) - self.cdr = msg["entries"] - async_dispatcher_send(self.hass, SIGNAL_CDR_UPDATE, self.cdr) - elif command == CMD_MESSAGE_CDR_AVAILABLE: - if not isinstance(self.cdr, list): - _LOGGER.debug("AsteriskVM adding CDR platform") - self.cdr = [] - async_dispatcher_send( - self.hass, SIGNAL_DISCOVER_PLATFORM, "asterisk_cdr" - ) - async_dispatcher_send(self.hass, SIGNAL_CDR_REQUEST) - else: - _LOGGER.debug( - "AsteriskVM sent unknown message '%d' len: %d", command, len(msg) - ) - - @callback - def _request_messages(self) -> None: - """Handle changes to the mailbox.""" - _LOGGER.debug("Requesting message list") - self.client.messages() - - @callback - def _request_cdr(self) -> None: - """Handle changes to the CDR.""" - _LOGGER.debug("Requesting CDR list") - self.client.get_cdr() diff --git a/homeassistant/components/asterisk_mbox/mailbox.py b/homeassistant/components/asterisk_mbox/mailbox.py deleted file mode 100644 index 14d54596eea..00000000000 --- a/homeassistant/components/asterisk_mbox/mailbox.py +++ /dev/null @@ -1,86 +0,0 @@ -"""Support for the Asterisk Voicemail interface.""" - -from __future__ import annotations - -from functools import partial -import logging -from typing import Any - -from asterisk_mbox import ServerError - -from homeassistant.components.mailbox import CONTENT_TYPE_MPEG, Mailbox, StreamError -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.dispatcher import async_dispatcher_connect -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType - -from . import DOMAIN as ASTERISK_DOMAIN, AsteriskData - -_LOGGER = logging.getLogger(__name__) - -SIGNAL_MESSAGE_REQUEST = "asterisk_mbox.message_request" -SIGNAL_MESSAGE_UPDATE = "asterisk_mbox.message_updated" - - -async def async_get_handler( - hass: HomeAssistant, - config: ConfigType, - discovery_info: DiscoveryInfoType | None = None, -) -> Mailbox: - """Set up the Asterix VM platform.""" - return AsteriskMailbox(hass, ASTERISK_DOMAIN) - - -class AsteriskMailbox(Mailbox): - """Asterisk VM Sensor.""" - - def __init__(self, hass: HomeAssistant, name: str) -> None: - """Initialize Asterisk mailbox.""" - super().__init__(hass, name) - async_dispatcher_connect( - self.hass, SIGNAL_MESSAGE_UPDATE, self._update_callback - ) - - @callback - def _update_callback(self, msg: str) -> None: - """Update the message count in HA, if needed.""" - self.async_update() - - @property - def media_type(self) -> str: - """Return the supported media type.""" - return CONTENT_TYPE_MPEG - - @property - def can_delete(self) -> bool: - """Return if messages can be deleted.""" - return True - - @property - def has_media(self) -> bool: - """Return if messages have attached media files.""" - return True - - async def async_get_media(self, msgid: str) -> bytes: - """Return the media blob for the msgid.""" - - data: AsteriskData = self.hass.data[ASTERISK_DOMAIN] - client = data.client - try: - return await self.hass.async_add_executor_job( - partial(client.mp3, msgid, sync=True) - ) - except ServerError as err: - raise StreamError(err) from err - - async def async_get_messages(self) -> list[dict[str, Any]]: - """Return a list of the current messages.""" - data: AsteriskData = self.hass.data[ASTERISK_DOMAIN] - return data.messages or [] - - async def async_delete(self, msgid: str) -> bool: - """Delete the specified messages.""" - data: AsteriskData = self.hass.data[ASTERISK_DOMAIN] - client = data.client - _LOGGER.info("Deleting: %s", msgid) - await self.hass.async_add_executor_job(client.delete, msgid) - return True diff --git a/homeassistant/components/asterisk_mbox/manifest.json b/homeassistant/components/asterisk_mbox/manifest.json deleted file mode 100644 index 8348e40ba6b..00000000000 --- a/homeassistant/components/asterisk_mbox/manifest.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "domain": "asterisk_mbox", - "name": "Asterisk Voicemail", - "codeowners": [], - "documentation": "https://www.home-assistant.io/integrations/asterisk_mbox", - "iot_class": "local_push", - "loggers": ["asterisk_mbox"], - "requirements": ["asterisk_mbox==0.5.0"] -} diff --git a/homeassistant/components/asterisk_mbox/strings.json b/homeassistant/components/asterisk_mbox/strings.json deleted file mode 100644 index fb6c0637a64..00000000000 --- a/homeassistant/components/asterisk_mbox/strings.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "issues": { - "deprecated_integration": { - "title": "The {integration_title} is being removed", - "description": "{integration_title} is being removed as the `{mailbox}` platform is being removed and {integration_title} supports no other platforms. Remove the `{domain}` configuration from your configuration.yaml file and restart Home Assistant to fix this issue." - } - } -} diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 55e7a89c669..673e37b3c12 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -502,12 +502,6 @@ "config_flow": false, "iot_class": "local_push" }, - "asterisk_mbox": { - "name": "Asterisk Voicemail", - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_push" - }, "asuswrt": { "name": "ASUSWRT", "integration_type": "hub", diff --git a/mypy.ini b/mypy.ini index 67755595e69..c5478689702 100644 --- a/mypy.ini +++ b/mypy.ini @@ -705,16 +705,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.asterisk_mbox.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.asuswrt.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/requirements_all.txt b/requirements_all.txt index 2e49dfc914f..f373c0fe7e3 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -481,9 +481,6 @@ arris-tg2492lg==2.2.0 # homeassistant.components.ampio asmog==0.0.6 -# homeassistant.components.asterisk_mbox -asterisk_mbox==0.5.0 - # homeassistant.components.dlna_dmr # homeassistant.components.dlna_dms # homeassistant.components.samsungtv diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 63bdf5d3cc5..529232f7847 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -445,9 +445,6 @@ aranet4==2.3.4 # homeassistant.components.arcam_fmj arcam-fmj==1.5.2 -# homeassistant.components.asterisk_mbox -asterisk_mbox==0.5.0 - # homeassistant.components.dlna_dmr # homeassistant.components.dlna_dms # homeassistant.components.samsungtv diff --git a/script/licenses.py b/script/licenses.py index 281f440a02e..e612b96794c 100644 --- a/script/licenses.py +++ b/script/licenses.py @@ -179,9 +179,6 @@ TODO = { "aiocache": AwesomeVersion( "0.12.2" ), # https://github.com/aio-libs/aiocache/blob/master/LICENSE all rights reserved? - "asterisk_mbox": AwesomeVersion( - "0.5.0" - ), # No license, integration is deprecated and scheduled for removal in 2024.9.0 "mficlient": AwesomeVersion( "0.3.0" ), # No license https://github.com/kk7ds/mficlient/issues/4 diff --git a/tests/components/asterisk_mbox/__init__.py b/tests/components/asterisk_mbox/__init__.py deleted file mode 100644 index 79e3675ad07..00000000000 --- a/tests/components/asterisk_mbox/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for the asterisk component.""" diff --git a/tests/components/asterisk_mbox/const.py b/tests/components/asterisk_mbox/const.py deleted file mode 100644 index 945c6b28d30..00000000000 --- a/tests/components/asterisk_mbox/const.py +++ /dev/null @@ -1,12 +0,0 @@ -"""Asterisk tests constants.""" - -from homeassistant.components.asterisk_mbox import DOMAIN -from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT - -CONFIG = { - DOMAIN: { - CONF_HOST: "localhost", - CONF_PASSWORD: "password", - CONF_PORT: 1234, - } -} diff --git a/tests/components/asterisk_mbox/test_init.py b/tests/components/asterisk_mbox/test_init.py deleted file mode 100644 index d7567ea3286..00000000000 --- a/tests/components/asterisk_mbox/test_init.py +++ /dev/null @@ -1,36 +0,0 @@ -"""Test mailbox.""" - -from collections.abc import Generator -from unittest.mock import Mock, patch - -import pytest - -from homeassistant.components.asterisk_mbox import DOMAIN -from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir -from homeassistant.setup import async_setup_component - -from .const import CONFIG - - -@pytest.fixture -def client() -> Generator[Mock]: - """Mock client.""" - with patch( - "homeassistant.components.asterisk_mbox.asteriskClient", autospec=True - ) as client: - yield client - - -async def test_repair_issue_is_created( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - client: Mock, -) -> None: - """Test repair issue is created.""" - assert await async_setup_component(hass, DOMAIN, CONFIG) - await hass.async_block_till_done() - assert ( - DOMAIN, - "deprecated_integration", - ) in issue_registry.issues From b223931ac0688d559a17cbd6e4d649d99dcbb709 Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Mon, 5 Aug 2024 15:44:00 +0200 Subject: [PATCH 0391/1635] Remove deprecated proximity entity (#123158) --- .../components/proximity/__init__.py | 170 +---- homeassistant/components/proximity/helpers.py | 12 - .../components/proximity/strings.json | 11 - tests/components/proximity/test_init.py | 581 ++++-------------- 4 files changed, 119 insertions(+), 655 deletions(-) delete mode 100644 homeassistant/components/proximity/helpers.py diff --git a/homeassistant/components/proximity/__init__.py b/homeassistant/components/proximity/__init__.py index 813686789a2..763274243c5 100644 --- a/homeassistant/components/proximity/__init__.py +++ b/homeassistant/components/proximity/__init__.py @@ -3,137 +3,20 @@ from __future__ import annotations import logging -from typing import cast -import voluptuous as vol - -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry -from homeassistant.const import ( - CONF_DEVICES, - CONF_NAME, - CONF_UNIT_OF_MEASUREMENT, - CONF_ZONE, - STATE_UNKNOWN, - Platform, -) -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -import homeassistant.helpers.config_validation as cv +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant from homeassistant.helpers.event import ( async_track_entity_registry_updated_event, async_track_state_change_event, ) -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from homeassistant.helpers.typing import ConfigType -from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import ( - ATTR_DIR_OF_TRAVEL, - ATTR_DIST_TO, - ATTR_NEAREST, - CONF_IGNORED_ZONES, - CONF_TOLERANCE, - CONF_TRACKED_ENTITIES, - DEFAULT_PROXIMITY_ZONE, - DEFAULT_TOLERANCE, - DOMAIN, - UNITS, -) +from .const import CONF_TRACKED_ENTITIES from .coordinator import ProximityConfigEntry, ProximityDataUpdateCoordinator -from .helpers import entity_used_in _LOGGER = logging.getLogger(__name__) -ZONE_SCHEMA = vol.Schema( - { - vol.Optional(CONF_ZONE, default=DEFAULT_PROXIMITY_ZONE): cv.string, - vol.Optional(CONF_DEVICES, default=[]): vol.All(cv.ensure_list, [cv.entity_id]), - vol.Optional(CONF_IGNORED_ZONES, default=[]): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(CONF_TOLERANCE, default=DEFAULT_TOLERANCE): cv.positive_int, - vol.Optional(CONF_UNIT_OF_MEASUREMENT): vol.All(cv.string, vol.In(UNITS)), - } -) - -CONFIG_SCHEMA = vol.Schema( - vol.All( - cv.deprecated(DOMAIN), - {DOMAIN: cv.schema_with_slug_keys(ZONE_SCHEMA)}, - ), - extra=vol.ALLOW_EXTRA, -) - - -async def _async_setup_legacy( - hass: HomeAssistant, - entry: ProximityConfigEntry, - coordinator: ProximityDataUpdateCoordinator, -) -> None: - """Legacy proximity entity handling, can be removed in 2024.8.""" - friendly_name = entry.data[CONF_NAME] - proximity = Proximity(hass, friendly_name, coordinator) - await proximity.async_added_to_hass() - proximity.async_write_ha_state() - - if used_in := entity_used_in(hass, f"{DOMAIN}.{friendly_name}"): - async_create_issue( - hass, - DOMAIN, - f"deprecated_proximity_entity_{friendly_name}", - breaks_in_ha_version="2024.8.0", - is_fixable=True, - is_persistent=True, - severity=IssueSeverity.WARNING, - translation_key="deprecated_proximity_entity", - translation_placeholders={ - "entity": f"{DOMAIN}.{friendly_name}", - "used_in": "\n- ".join([f"`{x}`" for x in used_in]), - }, - ) - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Get the zones and offsets from configuration.yaml.""" - if DOMAIN in config: - for friendly_name, proximity_config in config[DOMAIN].items(): - _LOGGER.debug("import %s with config:%s", friendly_name, proximity_config) - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={ - CONF_NAME: friendly_name, - CONF_ZONE: f"zone.{proximity_config[CONF_ZONE]}", - CONF_TRACKED_ENTITIES: proximity_config[CONF_DEVICES], - CONF_IGNORED_ZONES: [ - f"zone.{zone}" - for zone in proximity_config[CONF_IGNORED_ZONES] - ], - CONF_TOLERANCE: proximity_config[CONF_TOLERANCE], - CONF_UNIT_OF_MEASUREMENT: proximity_config.get( - CONF_UNIT_OF_MEASUREMENT, hass.config.units.length_unit - ), - }, - ) - ) - - async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.8.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Proximity", - }, - ) - - return True - async def async_setup_entry(hass: HomeAssistant, entry: ProximityConfigEntry) -> bool: """Set up Proximity from a config entry.""" @@ -160,9 +43,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ProximityConfigEntry) -> await coordinator.async_config_entry_first_refresh() entry.runtime_data = coordinator - if entry.source == SOURCE_IMPORT: - await _async_setup_legacy(hass, entry, coordinator) - await hass.config_entries.async_forward_entry_setups(entry, [Platform.SENSOR]) entry.async_on_unload(entry.add_update_listener(_async_update_listener)) return True @@ -176,45 +56,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: """Handle options update.""" await hass.config_entries.async_reload(entry.entry_id) - - -class Proximity(CoordinatorEntity[ProximityDataUpdateCoordinator]): - """Representation of a Proximity.""" - - # This entity is legacy and does not have a platform. - # We can't fix this easily without breaking changes. - _no_platform_reported = True - - def __init__( - self, - hass: HomeAssistant, - friendly_name: str, - coordinator: ProximityDataUpdateCoordinator, - ) -> None: - """Initialize the proximity.""" - super().__init__(coordinator) - self.hass = hass - self.entity_id = f"{DOMAIN}.{friendly_name}" - - self._attr_name = friendly_name - self._attr_unit_of_measurement = self.coordinator.unit_of_measurement - - @property - def data(self) -> dict[str, str | int | None]: - """Get data from coordinator.""" - return self.coordinator.data.proximity - - @property - def state(self) -> str | float: - """Return the state.""" - if isinstance(distance := self.data[ATTR_DIST_TO], str): - return distance - return self.coordinator.convert_legacy(cast(int, distance)) - - @property - def extra_state_attributes(self) -> dict[str, str]: - """Return the state attributes.""" - return { - ATTR_DIR_OF_TRAVEL: str(self.data[ATTR_DIR_OF_TRAVEL] or STATE_UNKNOWN), - ATTR_NEAREST: str(self.data[ATTR_NEAREST]), - } diff --git a/homeassistant/components/proximity/helpers.py b/homeassistant/components/proximity/helpers.py deleted file mode 100644 index af3d6d2a3bb..00000000000 --- a/homeassistant/components/proximity/helpers.py +++ /dev/null @@ -1,12 +0,0 @@ -"""Helper functions for proximity.""" - -from homeassistant.components.automation import automations_with_entity -from homeassistant.components.script import scripts_with_entity -from homeassistant.core import HomeAssistant - - -def entity_used_in(hass: HomeAssistant, entity_id: str) -> list[str]: - """Get list of related automations and scripts.""" - used_in = automations_with_entity(hass, entity_id) - used_in += scripts_with_entity(hass, entity_id) - return used_in diff --git a/homeassistant/components/proximity/strings.json b/homeassistant/components/proximity/strings.json index 72c95eeeeae..118004e908e 100644 --- a/homeassistant/components/proximity/strings.json +++ b/homeassistant/components/proximity/strings.json @@ -55,17 +55,6 @@ } }, "issues": { - "deprecated_proximity_entity": { - "title": "The proximity entity is deprecated", - "fix_flow": { - "step": { - "confirm": { - "title": "[%key:component::proximity::issues::deprecated_proximity_entity::title%]", - "description": "The proximity entity `{entity}` is deprecated and will be removed in `2024.8`. However it is used within the following configurations:\n- {used_in}\n\nPlease adjust any automations or scripts that use this deprecated Proximity entity.\nFor each tracked person or device one sensor for the distance and the direction of travel to/from the monitored zone is created. Additionally for each Proximity configuration one sensor which shows the nearest device or person to the monitored zone is created. With this you can use the Min/Max integration to determine the nearest and furthest distance." - } - } - } - }, "tracked_entity_removed": { "title": "Tracked entity has been removed", "fix_flow": { diff --git a/tests/components/proximity/test_init.py b/tests/components/proximity/test_init.py index 6c2b54cae29..456d6577c04 100644 --- a/tests/components/proximity/test_init.py +++ b/tests/components/proximity/test_init.py @@ -2,15 +2,12 @@ import pytest -from homeassistant.components import automation, script -from homeassistant.components.automation import automations_with_entity from homeassistant.components.proximity.const import ( CONF_IGNORED_ZONES, CONF_TOLERANCE, CONF_TRACKED_ENTITIES, DOMAIN, ) -from homeassistant.components.script import scripts_with_entity from homeassistant.const import ( ATTR_FRIENDLY_NAME, CONF_ZONE, @@ -20,109 +17,81 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er import homeassistant.helpers.issue_registry as ir -from homeassistant.setup import async_setup_component from homeassistant.util import slugify from tests.common import MockConfigEntry +async def async_setup_single_entry( + hass: HomeAssistant, + zone: str, + tracked_entites: list[str], + ignored_zones: list[str], + tolerance: int, +) -> MockConfigEntry: + """Set up the proximity component with a single entry.""" + mock_config = MockConfigEntry( + domain=DOMAIN, + title="Home", + data={ + CONF_ZONE: zone, + CONF_TRACKED_ENTITIES: tracked_entites, + CONF_IGNORED_ZONES: ignored_zones, + CONF_TOLERANCE: tolerance, + }, + ) + mock_config.add_to_hass(hass) + assert await hass.config_entries.async_setup(mock_config.entry_id) + await hass.async_block_till_done() + return mock_config + + @pytest.mark.parametrize( - ("friendly_name", "config"), + "config", [ - ( - "home", - { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1", "device_tracker.test2"], - "tolerance": "1", - }, - ), - ( - "work", - { - "devices": ["device_tracker.test1"], - "tolerance": "1", - "zone": "work", - }, - ), + { + CONF_IGNORED_ZONES: ["zone.work"], + CONF_TRACKED_ENTITIES: ["device_tracker.test1", "device_tracker.test2"], + CONF_TOLERANCE: 1, + CONF_ZONE: "zone.home", + }, + { + CONF_IGNORED_ZONES: [], + CONF_TRACKED_ENTITIES: ["device_tracker.test1"], + CONF_TOLERANCE: 1, + CONF_ZONE: "zone.work", + }, ], ) -async def test_proximities( - hass: HomeAssistant, friendly_name: str, config: dict -) -> None: +async def test_proximities(hass: HomeAssistant, config: dict) -> None: """Test a list of proximities.""" - assert await async_setup_component( - hass, DOMAIN, {"proximity": {friendly_name: config}} + title = hass.states.get(config[CONF_ZONE]).name + mock_config = MockConfigEntry( + domain=DOMAIN, + title=title, + data=config, ) + mock_config.add_to_hass(hass) + assert await hass.config_entries.async_setup(mock_config.entry_id) await hass.async_block_till_done() - # proximity entity - state = hass.states.get(f"proximity.{friendly_name}") - assert state.state == "not set" - assert state.attributes.get("nearest") == "not set" - assert state.attributes.get("dir_of_travel") == "not set" - hass.states.async_set(f"proximity.{friendly_name}", "0") - await hass.async_block_till_done() - state = hass.states.get(f"proximity.{friendly_name}") - assert state.state == "0" + zone_name = slugify(title) # sensor entities - state = hass.states.get(f"sensor.{friendly_name}_nearest_device") + state = hass.states.get(f"sensor.{zone_name}_nearest_device") assert state.state == STATE_UNKNOWN - for device in config["devices"]: - entity_base_name = f"sensor.{friendly_name}_{slugify(device.split('.')[-1])}" + for device in config[CONF_TRACKED_ENTITIES]: + entity_base_name = f"sensor.{zone_name}_{slugify(device.split('.')[-1])}" state = hass.states.get(f"{entity_base_name}_distance") assert state.state == STATE_UNAVAILABLE state = hass.states.get(f"{entity_base_name}_direction_of_travel") assert state.state == STATE_UNAVAILABLE -async def test_legacy_setup(hass: HomeAssistant) -> None: - """Test legacy setup only on imported entries.""" - config = { - "proximity": { - "home": { - "devices": ["device_tracker.test1"], - "tolerance": "1", - }, - } - } - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done() - assert hass.states.get("proximity.home") - - mock_config = MockConfigEntry( - domain=DOMAIN, - title="work", - data={ - CONF_ZONE: "zone.work", - CONF_TRACKED_ENTITIES: ["device_tracker.test2"], - CONF_IGNORED_ZONES: [], - CONF_TOLERANCE: 1, - }, - unique_id=f"{DOMAIN}_work", - ) - mock_config.add_to_hass(hass) - assert await hass.config_entries.async_setup(mock_config.entry_id) - await hass.async_block_till_done() - - assert not hass.states.get("proximity.work") - - async def test_device_tracker_test1_in_zone(hass: HomeAssistant) -> None: """Test for tracker in zone.""" - config = { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1"], - "tolerance": "1", - } - } - } - - assert await async_setup_component(hass, DOMAIN, config) + await async_setup_single_entry(hass, "zone.home", ["device_tracker.test1"], [], 1) hass.states.async_set( "device_tracker.test1", @@ -131,12 +100,6 @@ async def test_device_tracker_test1_in_zone(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.state == "0" - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "arrived" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -150,17 +113,7 @@ async def test_device_tracker_test1_in_zone(hass: HomeAssistant) -> None: async def test_device_tracker_test1_away(hass: HomeAssistant) -> None: """Test for tracker state away.""" - config = { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1"], - "tolerance": "1", - } - } - } - - assert await async_setup_component(hass, DOMAIN, config) + await async_setup_single_entry(hass, "zone.home", ["device_tracker.test1"], [], 1) hass.states.async_set( "device_tracker.test1", @@ -170,11 +123,6 @@ async def test_device_tracker_test1_away(hass: HomeAssistant) -> None: await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -190,20 +138,7 @@ async def test_device_tracker_test1_awayfurther( hass: HomeAssistant, config_zones ) -> None: """Test for tracker state away further.""" - - await hass.async_block_till_done() - - config = { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1"], - "tolerance": "1", - } - } - } - - assert await async_setup_component(hass, DOMAIN, config) + await async_setup_single_entry(hass, "zone.home", ["device_tracker.test1"], [], 1) hass.states.async_set( "device_tracker.test1", @@ -212,11 +147,6 @@ async def test_device_tracker_test1_awayfurther( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -234,11 +164,6 @@ async def test_device_tracker_test1_awayfurther( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "away_from" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -254,19 +179,7 @@ async def test_device_tracker_test1_awaycloser( hass: HomeAssistant, config_zones ) -> None: """Test for tracker state away closer.""" - await hass.async_block_till_done() - - config = { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1"], - "tolerance": "1", - } - } - } - - assert await async_setup_component(hass, DOMAIN, config) + await async_setup_single_entry(hass, "zone.home", ["device_tracker.test1"], [], 1) hass.states.async_set( "device_tracker.test1", @@ -275,11 +188,6 @@ async def test_device_tracker_test1_awaycloser( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -297,11 +205,6 @@ async def test_device_tracker_test1_awaycloser( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "towards" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -315,27 +218,11 @@ async def test_device_tracker_test1_awaycloser( async def test_all_device_trackers_in_ignored_zone(hass: HomeAssistant) -> None: """Test for tracker in ignored zone.""" - config = { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1"], - "tolerance": "1", - } - } - } - - assert await async_setup_component(hass, DOMAIN, config) + await async_setup_single_entry(hass, "zone.home", ["device_tracker.test1"], [], 1) hass.states.async_set("device_tracker.test1", "work", {"friendly_name": "test1"}) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.state == "not set" - assert state.attributes.get("nearest") == "not set" - assert state.attributes.get("dir_of_travel") == "not set" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == STATE_UNKNOWN @@ -349,28 +236,13 @@ async def test_all_device_trackers_in_ignored_zone(hass: HomeAssistant) -> None: async def test_device_tracker_test1_no_coordinates(hass: HomeAssistant) -> None: """Test for tracker with no coordinates.""" - config = { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1"], - "tolerance": "1", - } - } - } - - assert await async_setup_component(hass, DOMAIN, config) + await async_setup_single_entry(hass, "zone.home", ["device_tracker.test1"], [], 1) hass.states.async_set( "device_tracker.test1", "not_home", {"friendly_name": "test1"} ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "not set" - assert state.attributes.get("dir_of_travel") == "not set" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == STATE_UNKNOWN @@ -384,19 +256,8 @@ async def test_device_tracker_test1_no_coordinates(hass: HomeAssistant) -> None: async def test_device_tracker_test1_awayfurther_a_bit(hass: HomeAssistant) -> None: """Test for tracker states.""" - assert await async_setup_component( - hass, - DOMAIN, - { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1"], - "tolerance": 1000, - "zone": "home", - } - } - }, + await async_setup_single_entry( + hass, "zone.home", ["device_tracker.test1"], ["zone.work"], 1000 ) hass.states.async_set( @@ -406,11 +267,6 @@ async def test_device_tracker_test1_awayfurther_a_bit(hass: HomeAssistant) -> No ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -428,11 +284,6 @@ async def test_device_tracker_test1_awayfurther_a_bit(hass: HomeAssistant) -> No ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "stationary" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -446,17 +297,13 @@ async def test_device_tracker_test1_awayfurther_a_bit(hass: HomeAssistant) -> No async def test_device_trackers_in_zone(hass: HomeAssistant) -> None: """Test for trackers in zone.""" - config = { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1", "device_tracker.test2"], - "tolerance": "1", - } - } - } - - assert await async_setup_component(hass, DOMAIN, config) + await async_setup_single_entry( + hass, + "zone.home", + ["device_tracker.test1", "device_tracker.test2"], + ["zone.work"], + 1, + ) hass.states.async_set( "device_tracker.test1", @@ -471,14 +318,6 @@ async def test_device_trackers_in_zone(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.state == "0" - assert (state.attributes.get("nearest") == "test1, test2") or ( - state.attributes.get("nearest") == "test2, test1" - ) - assert state.attributes.get("dir_of_travel") == "arrived" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1, test2" @@ -495,30 +334,18 @@ async def test_device_tracker_test1_awayfurther_than_test2_first_test1( hass: HomeAssistant, config_zones ) -> None: """Test for tracker ordering.""" - await hass.async_block_till_done() - hass.states.async_set( "device_tracker.test1", "not_home", {"friendly_name": "test1"} ) - await hass.async_block_till_done() hass.states.async_set( "device_tracker.test2", "not_home", {"friendly_name": "test2"} ) - await hass.async_block_till_done() - - assert await async_setup_component( + await async_setup_single_entry( hass, - DOMAIN, - { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1", "device_tracker.test2"], - "tolerance": "1", - "zone": "home", - } - } - }, + "zone.home", + ["device_tracker.test1", "device_tracker.test2"], + ["zone.work"], + 1, ) hass.states.async_set( @@ -528,11 +355,6 @@ async def test_device_tracker_test1_awayfurther_than_test2_first_test1( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -556,11 +378,6 @@ async def test_device_tracker_test1_awayfurther_than_test2_first_test1( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -582,28 +399,19 @@ async def test_device_tracker_test1_awayfurther_than_test2_first_test2( hass: HomeAssistant, config_zones ) -> None: """Test for tracker ordering.""" - await hass.async_block_till_done() - hass.states.async_set( "device_tracker.test1", "not_home", {"friendly_name": "test1"} ) - await hass.async_block_till_done() hass.states.async_set( "device_tracker.test2", "not_home", {"friendly_name": "test2"} ) - await hass.async_block_till_done() - assert await async_setup_component( + + await async_setup_single_entry( hass, - DOMAIN, - { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1", "device_tracker.test2"], - "zone": "home", - } - } - }, + "zone.home", + ["device_tracker.test1", "device_tracker.test2"], + ["zone.work"], + 1, ) hass.states.async_set( @@ -613,11 +421,6 @@ async def test_device_tracker_test1_awayfurther_than_test2_first_test2( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test2" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test2" @@ -641,11 +444,6 @@ async def test_device_tracker_test1_awayfurther_than_test2_first_test2( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -670,23 +468,15 @@ async def test_device_tracker_test1_awayfurther_test2_in_ignored_zone( hass.states.async_set( "device_tracker.test1", "not_home", {"friendly_name": "test1"} ) - await hass.async_block_till_done() hass.states.async_set("device_tracker.test2", "work", {"friendly_name": "test2"}) - await hass.async_block_till_done() - assert await async_setup_component( - hass, - DOMAIN, - { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1", "device_tracker.test2"], - "zone": "home", - } - } - }, - ) + await async_setup_single_entry( + hass, + "zone.home", + ["device_tracker.test1", "device_tracker.test2"], + ["zone.work"], + 1, + ) hass.states.async_set( "device_tracker.test1", "not_home", @@ -694,11 +484,6 @@ async def test_device_tracker_test1_awayfurther_test2_in_ignored_zone( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -720,29 +505,19 @@ async def test_device_tracker_test1_awayfurther_test2_first( hass: HomeAssistant, config_zones ) -> None: """Test for tracker state.""" - await hass.async_block_till_done() - hass.states.async_set( "device_tracker.test1", "not_home", {"friendly_name": "test1"} ) - await hass.async_block_till_done() hass.states.async_set( "device_tracker.test2", "not_home", {"friendly_name": "test2"} ) - await hass.async_block_till_done() - assert await async_setup_component( + await async_setup_single_entry( hass, - DOMAIN, - { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1", "device_tracker.test2"], - "zone": "home", - } - } - }, + "zone.home", + ["device_tracker.test1", "device_tracker.test2"], + ["zone.work"], + 1, ) hass.states.async_set( @@ -776,11 +551,6 @@ async def test_device_tracker_test1_awayfurther_test2_first( hass.states.async_set("device_tracker.test1", "work", {"friendly_name": "test1"}) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test2" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test2" @@ -803,7 +573,6 @@ async def test_device_tracker_test1_nearest_after_test2_in_ignored_zone( ) -> None: """Test for tracker states.""" await hass.async_block_till_done() - hass.states.async_set( "device_tracker.test1", "not_home", {"friendly_name": "test1"} ) @@ -813,20 +582,28 @@ async def test_device_tracker_test1_nearest_after_test2_in_ignored_zone( ) await hass.async_block_till_done() - assert await async_setup_component( + await async_setup_single_entry( hass, - DOMAIN, - { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1", "device_tracker.test2"], - "zone": "home", - } - } - }, + "zone.home", + ["device_tracker.test1", "device_tracker.test2"], + ["zone.work"], + 1, ) + # assert await async_setup_component( + # hass, + # DOMAIN, + # { + # "proximity": { + # "home": { + # "ignored_zones": ["zone.work"], + # "devices": ["device_tracker.test1", "device_tracker.test2"], + # "zone": "home", + # } + # } + # }, + # ) + hass.states.async_set( "device_tracker.test1", "not_home", @@ -834,11 +611,6 @@ async def test_device_tracker_test1_nearest_after_test2_in_ignored_zone( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -862,11 +634,6 @@ async def test_device_tracker_test1_nearest_after_test2_in_ignored_zone( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test2" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test2" @@ -890,11 +657,6 @@ async def test_device_tracker_test1_nearest_after_test2_in_ignored_zone( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -914,22 +676,10 @@ async def test_device_tracker_test1_nearest_after_test2_in_ignored_zone( async def test_nearest_sensors(hass: HomeAssistant, config_zones) -> None: """Test for nearest sensors.""" - mock_config = MockConfigEntry( - domain=DOMAIN, - title="home", - data={ - CONF_ZONE: "zone.home", - CONF_TRACKED_ENTITIES: ["device_tracker.test1", "device_tracker.test2"], - CONF_IGNORED_ZONES: [], - CONF_TOLERANCE: 1, - }, - unique_id=f"{DOMAIN}_home", + await async_setup_single_entry( + hass, "zone.home", ["device_tracker.test1", "device_tracker.test2"], [], 1 ) - mock_config.add_to_hass(hass) - assert await hass.config_entries.async_setup(mock_config.entry_id) - await hass.async_block_till_done() - hass.states.async_set( "device_tracker.test1", "not_home", @@ -1038,71 +788,6 @@ async def test_nearest_sensors(hass: HomeAssistant, config_zones) -> None: assert state.state == STATE_UNKNOWN -async def test_create_deprecated_proximity_issue( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, -) -> None: - """Test we create an issue for deprecated proximity entities used in automations and scripts.""" - assert await async_setup_component( - hass, - automation.DOMAIN, - { - automation.DOMAIN: { - "alias": "test", - "trigger": {"platform": "state", "entity_id": "proximity.home"}, - "action": { - "service": "automation.turn_on", - "target": {"entity_id": "automation.test"}, - }, - } - }, - ) - assert await async_setup_component( - hass, - script.DOMAIN, - { - script.DOMAIN: { - "test": { - "sequence": [ - { - "condition": "state", - "entity_id": "proximity.home", - "state": "home", - }, - ], - } - } - }, - ) - config = { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1", "device_tracker.test2"], - "tolerance": "1", - }, - "work": {"tolerance": "1", "zone": "work"}, - } - } - - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done() - - automation_entities = automations_with_entity(hass, "proximity.home") - assert len(automation_entities) == 1 - assert automation_entities[0] == "automation.test" - - script_entites = scripts_with_entity(hass, "proximity.home") - - assert len(script_entites) == 1 - assert script_entites[0] == "script.test" - assert issue_registry.async_get_issue(DOMAIN, "deprecated_proximity_entity_home") - - assert not issue_registry.async_get_issue( - DOMAIN, "deprecated_proximity_entity_work" - ) - - async def test_create_removed_tracked_entity_issue( hass: HomeAssistant, issue_registry: ir.IssueRegistry, @@ -1119,22 +804,10 @@ async def test_create_removed_tracked_entity_issue( hass.states.async_set(t1.entity_id, "not_home") hass.states.async_set(t2.entity_id, "not_home") - mock_config = MockConfigEntry( - domain=DOMAIN, - title="home", - data={ - CONF_ZONE: "zone.home", - CONF_TRACKED_ENTITIES: [t1.entity_id, t2.entity_id], - CONF_IGNORED_ZONES: [], - CONF_TOLERANCE: 1, - }, - unique_id=f"{DOMAIN}_home", + await async_setup_single_entry( + hass, "zone.home", [t1.entity_id, t2.entity_id], [], 1 ) - mock_config.add_to_hass(hass) - assert await hass.config_entries.async_setup(mock_config.entry_id) - await hass.async_block_till_done() - sensor_t1 = f"sensor.home_{t1.entity_id.split('.')[-1]}_distance" sensor_t2 = f"sensor.home_{t2.entity_id.split('.')[-1]}_distance" @@ -1168,22 +841,10 @@ async def test_track_renamed_tracked_entity( hass.states.async_set(t1.entity_id, "not_home") - mock_config = MockConfigEntry( - domain=DOMAIN, - title="home", - data={ - CONF_ZONE: "zone.home", - CONF_TRACKED_ENTITIES: [t1.entity_id], - CONF_IGNORED_ZONES: [], - CONF_TOLERANCE: 1, - }, - unique_id=f"{DOMAIN}_home", + mock_config = await async_setup_single_entry( + hass, "zone.home", [t1.entity_id], ["zone.work"], 1 ) - mock_config.add_to_hass(hass) - assert await hass.config_entries.async_setup(mock_config.entry_id) - await hass.async_block_till_done() - sensor_t1 = f"sensor.home_{t1.entity_id.split('.')[-1]}_distance" entity = entity_registry.async_get(sensor_t1) @@ -1216,28 +877,16 @@ async def test_sensor_unique_ids( hass.states.async_set("device_tracker.test2", "not_home") - mock_config = MockConfigEntry( - domain=DOMAIN, - title="home", - data={ - CONF_ZONE: "zone.home", - CONF_TRACKED_ENTITIES: [t1.entity_id, "device_tracker.test2"], - CONF_IGNORED_ZONES: [], - CONF_TOLERANCE: 1, - }, - unique_id=f"{DOMAIN}_home", + mock_config = await async_setup_single_entry( + hass, "zone.home", [t1.entity_id, "device_tracker.test2"], ["zone.work"], 1 ) - mock_config.add_to_hass(hass) - assert await hass.config_entries.async_setup(mock_config.entry_id) - await hass.async_block_till_done() - sensor_t1 = "sensor.home_test_tracker_1_distance" entity = entity_registry.async_get(sensor_t1) assert entity assert entity.unique_id == f"{mock_config.entry_id}_{t1.id}_dist_to_zone" state = hass.states.get(sensor_t1) - assert state.attributes.get(ATTR_FRIENDLY_NAME) == "home Test tracker 1 Distance" + assert state.attributes.get(ATTR_FRIENDLY_NAME) == "Home Test tracker 1 Distance" entity = entity_registry.async_get("sensor.home_test2_distance") assert entity From 94542d42fae3c11f400cdb6367d9bb24bdf40637 Mon Sep 17 00:00:00 2001 From: Marius <33354141+Mariusthvdb@users.noreply.github.com> Date: Mon, 5 Aug 2024 17:17:46 +0200 Subject: [PATCH 0392/1635] Fix state icon for closed valve entities (#123190) --- homeassistant/components/valve/icons.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/valve/icons.json b/homeassistant/components/valve/icons.json index 1261d1cc398..2c887ebf273 100644 --- a/homeassistant/components/valve/icons.json +++ b/homeassistant/components/valve/icons.json @@ -3,7 +3,7 @@ "_": { "default": "mdi:valve-open", "state": { - "off": "mdi:valve-closed" + "closed": "mdi:valve-closed" } }, "gas": { @@ -12,7 +12,7 @@ "water": { "default": "mdi:valve-open", "state": { - "off": "mdi:valve-closed" + "closed": "mdi:valve-closed" } } }, From 844ccf461f9585d5381cab07296f61815adb89a3 Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Mon, 5 Aug 2024 17:44:14 +0200 Subject: [PATCH 0393/1635] Remove unnecessary config schema definitions (#123197) --- homeassistant/components/airvisual/__init__.py | 3 --- .../components/ambient_station/__init__.py | 3 --- .../components/android_ip_webcam/__init__.py | 4 ---- homeassistant/components/apcupsd/__init__.py | 3 --- homeassistant/components/arcam_fmj/__init__.py | 3 --- .../components/bmw_connected_drive/__init__.py | 2 -- homeassistant/components/cast/__init__.py | 3 +-- homeassistant/components/cloudflare/__init__.py | 3 --- homeassistant/components/coinbase/__init__.py | 5 +---- homeassistant/components/daikin/__init__.py | 3 --- homeassistant/components/directv/__init__.py | 3 --- homeassistant/components/glances/__init__.py | 2 -- .../hunterdouglas_powerview/__init__.py | 2 -- homeassistant/components/icloud/__init__.py | 2 -- .../components/islamic_prayer_times/__init__.py | 4 +--- homeassistant/components/local_ip/__init__.py | 5 +---- homeassistant/components/luftdaten/__init__.py | 3 --- homeassistant/components/mikrotik/__init__.py | 4 +--- homeassistant/components/mysensors/__init__.py | 4 ---- homeassistant/components/nexia/__init__.py | 3 --- homeassistant/components/nextcloud/__init__.py | 4 +--- homeassistant/components/notion/__init__.py | 7 +------ homeassistant/components/nuheat/__init__.py | 3 --- homeassistant/components/nzbget/__init__.py | 1 - homeassistant/components/pi_hole/__init__.py | 3 +-- homeassistant/components/powerwall/__init__.py | 3 --- .../components/pvpc_hourly_pricing/__init__.py | 2 -- homeassistant/components/rachio/__init__.py | 3 --- .../components/rainmachine/__init__.py | 1 - homeassistant/components/roku/__init__.py | 3 --- homeassistant/components/samsungtv/__init__.py | 7 +------ homeassistant/components/sentry/__init__.py | 5 +---- homeassistant/components/simplisafe/__init__.py | 2 -- homeassistant/components/solaredge/__init__.py | 5 +---- .../components/somfy_mylink/__init__.py | 3 --- .../components/synology_dsm/__init__.py | 5 +---- .../components/tankerkoenig/__init__.py | 3 --- .../components/totalconnect/__init__.py | 3 --- homeassistant/components/tradfri/__init__.py | 2 -- homeassistant/components/upnp/__init__.py | 3 +-- homeassistant/components/verisure/__init__.py | 3 --- homeassistant/components/vesync/__init__.py | 3 --- .../ambient_station/test_config_flow.py | 2 +- .../islamic_prayer_times/test_config_flow.py | 5 ++--- .../islamic_prayer_times/test_init.py | 17 ++++++++--------- tests/components/local_ip/test_init.py | 2 +- tests/components/nextcloud/test_config_flow.py | 2 +- tests/components/sentry/conftest.py | 2 +- 48 files changed, 27 insertions(+), 141 deletions(-) diff --git a/homeassistant/components/airvisual/__init__.py b/homeassistant/components/airvisual/__init__.py index 4d0563ddce8..60fdbf12ca1 100644 --- a/homeassistant/components/airvisual/__init__.py +++ b/homeassistant/components/airvisual/__init__.py @@ -31,7 +31,6 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers import ( aiohttp_client, - config_validation as cv, device_registry as dr, entity_registry as er, ) @@ -62,8 +61,6 @@ PLATFORMS = [Platform.SENSOR] DEFAULT_ATTRIBUTION = "Data provided by AirVisual" -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - @callback def async_get_cloud_api_update_interval( diff --git a/homeassistant/components/ambient_station/__init__.py b/homeassistant/components/ambient_station/__init__.py index d0b04e53e67..469ad7e6e06 100644 --- a/homeassistant/components/ambient_station/__init__.py +++ b/homeassistant/components/ambient_station/__init__.py @@ -17,7 +17,6 @@ from homeassistant.const import ( ) from homeassistant.core import Event, HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import config_validation as cv import homeassistant.helpers.device_registry as dr from homeassistant.helpers.dispatcher import async_dispatcher_send import homeassistant.helpers.entity_registry as er @@ -25,7 +24,6 @@ import homeassistant.helpers.entity_registry as er from .const import ( ATTR_LAST_DATA, CONF_APP_KEY, - DOMAIN, LOGGER, TYPE_SOLARRADIATION, TYPE_SOLARRADIATION_LX, @@ -37,7 +35,6 @@ DATA_CONFIG = "config" DEFAULT_SOCKET_MIN_RETRY = 15 -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) type AmbientStationConfigEntry = ConfigEntry[AmbientStation] diff --git a/homeassistant/components/android_ip_webcam/__init__.py b/homeassistant/components/android_ip_webcam/__init__.py index db50d6d3e1a..3772fe4642b 100644 --- a/homeassistant/components/android_ip_webcam/__init__.py +++ b/homeassistant/components/android_ip_webcam/__init__.py @@ -14,7 +14,6 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession -import homeassistant.helpers.config_validation as cv from .const import DOMAIN from .coordinator import AndroidIPCamDataUpdateCoordinator @@ -27,9 +26,6 @@ PLATFORMS: list[Platform] = [ ] -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Android IP Webcam from a config entry.""" websession = async_get_clientsession(hass) diff --git a/homeassistant/components/apcupsd/__init__.py b/homeassistant/components/apcupsd/__init__.py index 73ed721158d..7293a42f7e7 100644 --- a/homeassistant/components/apcupsd/__init__.py +++ b/homeassistant/components/apcupsd/__init__.py @@ -8,7 +8,6 @@ from typing import Final from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PORT, Platform from homeassistant.core import HomeAssistant -import homeassistant.helpers.config_validation as cv from .const import DOMAIN from .coordinator import APCUPSdCoordinator @@ -17,8 +16,6 @@ _LOGGER = logging.getLogger(__name__) PLATFORMS: Final = (Platform.BINARY_SENSOR, Platform.SENSOR) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Use config values to set up a function enabling status retrieval.""" diff --git a/homeassistant/components/arcam_fmj/__init__.py b/homeassistant/components/arcam_fmj/__init__.py index e1a2ee0a046..71639ed8388 100644 --- a/homeassistant/components/arcam_fmj/__init__.py +++ b/homeassistant/components/arcam_fmj/__init__.py @@ -11,12 +11,10 @@ from arcam.fmj.client import Client from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PORT, Platform from homeassistant.core import HomeAssistant -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import async_dispatcher_send from .const import ( DEFAULT_SCAN_INTERVAL, - DOMAIN, SIGNAL_CLIENT_DATA, SIGNAL_CLIENT_STARTED, SIGNAL_CLIENT_STOPPED, @@ -26,7 +24,6 @@ type ArcamFmjConfigEntry = ConfigEntry[Client] _LOGGER = logging.getLogger(__name__) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) PLATFORMS = [Platform.MEDIA_PLAYER] diff --git a/homeassistant/components/bmw_connected_drive/__init__.py b/homeassistant/components/bmw_connected_drive/__init__.py index 495359ca314..9e43cfc4187 100644 --- a/homeassistant/components/bmw_connected_drive/__init__.py +++ b/homeassistant/components/bmw_connected_drive/__init__.py @@ -23,8 +23,6 @@ from .coordinator import BMWDataUpdateCoordinator _LOGGER = logging.getLogger(__name__) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - SERVICE_SCHEMA = vol.Schema( vol.Any( {vol.Required(ATTR_VIN): cv.string}, diff --git a/homeassistant/components/cast/__init__.py b/homeassistant/components/cast/__init__.py index b41dc9ddb41..e72eb196b61 100644 --- a/homeassistant/components/cast/__init__.py +++ b/homeassistant/components/cast/__init__.py @@ -11,7 +11,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import config_validation as cv, device_registry as dr +from homeassistant.helpers import device_registry as dr from homeassistant.helpers.integration_platform import ( async_process_integration_platforms, ) @@ -19,7 +19,6 @@ from homeassistant.helpers.integration_platform import ( from . import home_assistant_cast from .const import DOMAIN -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) PLATFORMS = [Platform.MEDIA_PLAYER] diff --git a/homeassistant/components/cloudflare/__init__.py b/homeassistant/components/cloudflare/__init__.py index 5934e43f8a2..bd27be71d18 100644 --- a/homeassistant/components/cloudflare/__init__.py +++ b/homeassistant/components/cloudflare/__init__.py @@ -18,7 +18,6 @@ from homeassistant.exceptions import ( HomeAssistantError, ) from homeassistant.helpers.aiohttp_client import async_get_clientsession -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.event import async_track_time_interval from homeassistant.util.location import async_detect_location_info from homeassistant.util.network import is_ipv4_address @@ -27,8 +26,6 @@ from .const import CONF_RECORDS, DEFAULT_UPDATE_INTERVAL, DOMAIN, SERVICE_UPDATE _LOGGER = logging.getLogger(__name__) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Cloudflare from a config entry.""" diff --git a/homeassistant/components/coinbase/__init__.py b/homeassistant/components/coinbase/__init__.py index 0a34168b4ee..a231bb5cda0 100644 --- a/homeassistant/components/coinbase/__init__.py +++ b/homeassistant/components/coinbase/__init__.py @@ -11,7 +11,7 @@ from coinbase.wallet.error import AuthenticationError from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_validation as cv, entity_registry as er +from homeassistant.helpers import entity_registry as er from homeassistant.util import Throttle from .const import ( @@ -29,9 +29,6 @@ PLATFORMS = [Platform.SENSOR] MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=1) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Coinbase from a config entry.""" diff --git a/homeassistant/components/daikin/__init__.py b/homeassistant/components/daikin/__init__.py index 1bd833f354d..4da6bcee50b 100644 --- a/homeassistant/components/daikin/__init__.py +++ b/homeassistant/components/daikin/__init__.py @@ -23,7 +23,6 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.aiohttp_client import async_get_clientsession -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo from homeassistant.util import Throttle @@ -36,8 +35,6 @@ MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60) PLATFORMS = [Platform.CLIMATE, Platform.SENSOR, Platform.SWITCH] -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Establish connection with Daikin.""" diff --git a/homeassistant/components/directv/__init__.py b/homeassistant/components/directv/__init__.py index 50eb6bc7959..e59fa4e9d0d 100644 --- a/homeassistant/components/directv/__init__.py +++ b/homeassistant/components/directv/__init__.py @@ -10,13 +10,10 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import config_validation as cv from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - PLATFORMS = [Platform.MEDIA_PLAYER, Platform.REMOTE] SCAN_INTERVAL = timedelta(seconds=30) diff --git a/homeassistant/components/glances/__init__.py b/homeassistant/components/glances/__init__.py index f83b39d1cf9..0ddd8a86979 100644 --- a/homeassistant/components/glances/__init__.py +++ b/homeassistant/components/glances/__init__.py @@ -27,7 +27,6 @@ from homeassistant.exceptions import ( ConfigEntryNotReady, HomeAssistantError, ) -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.httpx_client import get_async_client from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue @@ -36,7 +35,6 @@ from .coordinator import GlancesDataUpdateCoordinator PLATFORMS = [Platform.SENSOR] -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/hunterdouglas_powerview/__init__.py b/homeassistant/components/hunterdouglas_powerview/__init__.py index 6f63641b722..f8c7ac43b94 100644 --- a/homeassistant/components/hunterdouglas_powerview/__init__.py +++ b/homeassistant/components/hunterdouglas_powerview/__init__.py @@ -13,7 +13,6 @@ from homeassistant.const import CONF_API_VERSION, CONF_HOST, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession -import homeassistant.helpers.config_validation as cv from .const import DOMAIN, HUB_EXCEPTIONS from .coordinator import PowerviewShadeUpdateCoordinator @@ -22,7 +21,6 @@ from .shade_data import PowerviewShadeData PARALLEL_UPDATES = 1 -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) PLATFORMS = [ Platform.BUTTON, diff --git a/homeassistant/components/icloud/__init__.py b/homeassistant/components/icloud/__init__.py index 431a1abd2e1..5bdfd00dc60 100644 --- a/homeassistant/components/icloud/__init__.py +++ b/homeassistant/components/icloud/__init__.py @@ -69,8 +69,6 @@ SERVICE_SCHEMA_LOST_DEVICE = vol.Schema( } ) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up an iCloud account from a config entry.""" diff --git a/homeassistant/components/islamic_prayer_times/__init__.py b/homeassistant/components/islamic_prayer_times/__init__.py index 089afc88564..d61eba343ac 100644 --- a/homeassistant/components/islamic_prayer_times/__init__.py +++ b/homeassistant/components/islamic_prayer_times/__init__.py @@ -7,14 +7,12 @@ import logging from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE, Platform from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers import config_validation as cv, entity_registry as er +from homeassistant.helpers import entity_registry as er -from .const import DOMAIN from .coordinator import IslamicPrayerDataUpdateCoordinator PLATFORMS = [Platform.SENSOR] -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/local_ip/__init__.py b/homeassistant/components/local_ip/__init__.py index 45ddbed7150..72f5d4f7a43 100644 --- a/homeassistant/components/local_ip/__init__.py +++ b/homeassistant/components/local_ip/__init__.py @@ -2,11 +2,8 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -import homeassistant.helpers.config_validation as cv -from .const import DOMAIN, PLATFORMS - -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) +from .const import PLATFORMS async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: diff --git a/homeassistant/components/luftdaten/__init__.py b/homeassistant/components/luftdaten/__init__.py index 2ef7864566f..9079b056731 100644 --- a/homeassistant/components/luftdaten/__init__.py +++ b/homeassistant/components/luftdaten/__init__.py @@ -15,7 +15,6 @@ from luftdaten.exceptions import LuftdatenError from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import CONF_SENSOR_ID, DEFAULT_SCAN_INTERVAL, DOMAIN @@ -24,8 +23,6 @@ _LOGGER = logging.getLogger(__name__) PLATFORMS = [Platform.SENSOR] -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Sensor.Community as config entry.""" diff --git a/homeassistant/components/mikrotik/__init__.py b/homeassistant/components/mikrotik/__init__.py index 9f2b40bf1c8..cecf96a6c3e 100644 --- a/homeassistant/components/mikrotik/__init__.py +++ b/homeassistant/components/mikrotik/__init__.py @@ -4,14 +4,12 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers import config_validation as cv, device_registry as dr +from homeassistant.helpers import device_registry as dr from .const import ATTR_MANUFACTURER, DOMAIN from .coordinator import MikrotikDataUpdateCoordinator, get_api from .errors import CannotConnect, LoginError -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - PLATFORMS = [Platform.DEVICE_TRACKER] type MikrotikConfigEntry = ConfigEntry[MikrotikDataUpdateCoordinator] diff --git a/homeassistant/components/mysensors/__init__.py b/homeassistant/components/mysensors/__init__.py index ed18b890a24..8ebcbe0e2fe 100644 --- a/homeassistant/components/mysensors/__init__.py +++ b/homeassistant/components/mysensors/__init__.py @@ -10,7 +10,6 @@ from mysensors import BaseAsyncGateway from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant, callback -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device_registry import DeviceEntry from .const import ( @@ -32,9 +31,6 @@ _LOGGER = logging.getLogger(__name__) DATA_HASS_CONFIG = "hass_config" -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up an instance of the MySensors integration. diff --git a/homeassistant/components/nexia/__init__.py b/homeassistant/components/nexia/__init__.py index 4d0993d3569..9bc76fdcfdc 100644 --- a/homeassistant/components/nexia/__init__.py +++ b/homeassistant/components/nexia/__init__.py @@ -12,7 +12,6 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import device_registry as dr from homeassistant.helpers.aiohttp_client import async_get_clientsession -import homeassistant.helpers.config_validation as cv from .const import CONF_BRAND, DOMAIN, PLATFORMS from .coordinator import NexiaDataUpdateCoordinator @@ -21,8 +20,6 @@ from .util import is_invalid_auth_code _LOGGER = logging.getLogger(__name__) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - async def async_setup_entry(hass: HomeAssistant, entry: NexiaConfigEntry) -> bool: """Configure the base Nexia device for Home Assistant.""" diff --git a/homeassistant/components/nextcloud/__init__.py b/homeassistant/components/nextcloud/__init__.py index 9e328e8e58d..a487a3f1414 100644 --- a/homeassistant/components/nextcloud/__init__.py +++ b/homeassistant/components/nextcloud/__init__.py @@ -19,14 +19,12 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers import config_validation as cv, entity_registry as er +from homeassistant.helpers import entity_registry as er -from .const import DOMAIN from .coordinator import NextcloudDataUpdateCoordinator PLATFORMS = (Platform.SENSOR, Platform.BINARY_SENSOR, Platform.UPDATE) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/notion/__init__.py b/homeassistant/components/notion/__init__.py index 1793a0cfd47..00bded5c3a0 100644 --- a/homeassistant/components/notion/__init__.py +++ b/homeassistant/components/notion/__init__.py @@ -14,11 +14,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers import ( - config_validation as cv, - device_registry as dr, - entity_registry as er, -) +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -49,7 +45,6 @@ ATTR_SYSTEM_NAME = "system_name" DEFAULT_SCAN_INTERVAL = timedelta(minutes=1) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) # Define a map of old-API task types to new-API listener types: TASK_TYPE_TO_LISTENER_MAP: dict[str, ListenerKind] = { diff --git a/homeassistant/components/nuheat/__init__.py b/homeassistant/components/nuheat/__init__.py index 8eeee1f3f95..fdb49688eba 100644 --- a/homeassistant/components/nuheat/__init__.py +++ b/homeassistant/components/nuheat/__init__.py @@ -11,15 +11,12 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import config_validation as cv from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from .const import CONF_SERIAL_NUMBER, DOMAIN, PLATFORMS _LOGGER = logging.getLogger(__name__) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - def _get_thermostat(api, serial_number): """Authenticate and create the thermostat object.""" diff --git a/homeassistant/components/nzbget/__init__.py b/homeassistant/components/nzbget/__init__.py index 61b3f98739c..d47ac78c9d0 100644 --- a/homeassistant/components/nzbget/__init__.py +++ b/homeassistant/components/nzbget/__init__.py @@ -23,7 +23,6 @@ from .coordinator import NZBGetDataUpdateCoordinator PLATFORMS = [Platform.SENSOR, Platform.SWITCH] -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) SPEED_LIMIT_SCHEMA = vol.Schema( {vol.Optional(ATTR_SPEED, default=DEFAULT_SPEED_LIMIT): cv.positive_int} diff --git a/homeassistant/components/pi_hole/__init__.py b/homeassistant/components/pi_hole/__init__.py index ad36b664994..bf314e96dec 100644 --- a/homeassistant/components/pi_hole/__init__.py +++ b/homeassistant/components/pi_hole/__init__.py @@ -20,7 +20,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed -from homeassistant.helpers import config_validation as cv, entity_registry as er +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.update_coordinator import ( @@ -33,7 +33,6 @@ from .const import CONF_STATISTICS_ONLY, DOMAIN, MIN_TIME_BETWEEN_UPDATES _LOGGER = logging.getLogger(__name__) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) PLATFORMS = [ Platform.BINARY_SENSOR, diff --git a/homeassistant/components/powerwall/__init__.py b/homeassistant/components/powerwall/__init__.py index be09c729237..0b6f889b90a 100644 --- a/homeassistant/components/powerwall/__init__.py +++ b/homeassistant/components/powerwall/__init__.py @@ -22,7 +22,6 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.aiohttp_client import async_create_clientsession -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from homeassistant.util.network import is_ip_address @@ -34,8 +33,6 @@ from .models import ( PowerwallRuntimeData, ) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR, Platform.SWITCH] _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/pvpc_hourly_pricing/__init__.py b/homeassistant/components/pvpc_hourly_pricing/__init__.py index a92f159d172..6327164e3c8 100644 --- a/homeassistant/components/pvpc_hourly_pricing/__init__.py +++ b/homeassistant/components/pvpc_hourly_pricing/__init__.py @@ -3,7 +3,6 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_TOKEN, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_validation as cv import homeassistant.helpers.entity_registry as er from .const import ATTR_POWER, ATTR_POWER_P3, DOMAIN @@ -11,7 +10,6 @@ from .coordinator import ElecPricesDataUpdateCoordinator from .helpers import get_enabled_sensor_keys PLATFORMS: list[Platform] = [Platform.SENSOR] -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: diff --git a/homeassistant/components/rachio/__init__.py b/homeassistant/components/rachio/__init__.py index a5922e0cb95..6976d3f5ba6 100644 --- a/homeassistant/components/rachio/__init__.py +++ b/homeassistant/components/rachio/__init__.py @@ -11,7 +11,6 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, CONF_WEBHOOK_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers import config_validation as cv from .const import CONF_CLOUDHOOK_URL, CONF_MANUAL_RUN_MINS, DOMAIN from .device import RachioPerson @@ -25,8 +24,6 @@ _LOGGER = logging.getLogger(__name__) PLATFORMS = [Platform.BINARY_SENSOR, Platform.CALENDAR, Platform.SWITCH] -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" diff --git a/homeassistant/components/rainmachine/__init__.py b/homeassistant/components/rainmachine/__init__.py index cfbc95cf009..b10d562ac67 100644 --- a/homeassistant/components/rainmachine/__init__.py +++ b/homeassistant/components/rainmachine/__init__.py @@ -58,7 +58,6 @@ from .model import RainMachineEntityDescription DEFAULT_SSL = True -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) PLATFORMS = [ Platform.BINARY_SENSOR, diff --git a/homeassistant/components/roku/__init__.py b/homeassistant/components/roku/__init__.py index 0620207a8ee..7515f375054 100644 --- a/homeassistant/components/roku/__init__.py +++ b/homeassistant/components/roku/__init__.py @@ -5,13 +5,10 @@ from __future__ import annotations from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_validation as cv from .const import DOMAIN from .coordinator import RokuDataUpdateCoordinator -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - PLATFORMS = [ Platform.BINARY_SENSOR, Platform.MEDIA_PLAYER, diff --git a/homeassistant/components/samsungtv/__init__.py b/homeassistant/components/samsungtv/__init__.py index 992c86d5d7e..f3b967a485e 100644 --- a/homeassistant/components/samsungtv/__init__.py +++ b/homeassistant/components/samsungtv/__init__.py @@ -23,11 +23,7 @@ from homeassistant.const import ( ) from homeassistant.core import Event, HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers import ( - config_validation as cv, - device_registry as dr, - entity_registry as er, -) +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.debounce import Debouncer from .bridge import ( @@ -53,7 +49,6 @@ from .coordinator import SamsungTVDataUpdateCoordinator PLATFORMS = [Platform.MEDIA_PLAYER, Platform.REMOTE] -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) SamsungTVConfigEntry = ConfigEntry[SamsungTVDataUpdateCoordinator] diff --git a/homeassistant/components/sentry/__init__.py b/homeassistant/components/sentry/__init__.py index 8c042621db6..904d493a863 100644 --- a/homeassistant/components/sentry/__init__.py +++ b/homeassistant/components/sentry/__init__.py @@ -17,7 +17,7 @@ from homeassistant.const import ( __version__ as current_version, ) from homeassistant.core import HomeAssistant, get_release_channel -from homeassistant.helpers import config_validation as cv, entity_platform, instance_id +from homeassistant.helpers import entity_platform, instance_id from homeassistant.helpers.event import async_call_later from homeassistant.helpers.system_info import async_get_system_info from homeassistant.loader import Integration, async_get_custom_components @@ -36,12 +36,9 @@ from .const import ( DEFAULT_LOGGING_EVENT_LEVEL, DEFAULT_LOGGING_LEVEL, DEFAULT_TRACING_SAMPLE_RATE, - DOMAIN, ENTITY_COMPONENTS, ) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - LOGGER_INFO_REGEX = re.compile(r"^(\w+)\.?(\w+)?\.?(\w+)?\.?(\w+)?(?:\..*)?$") diff --git a/homeassistant/components/simplisafe/__init__.py b/homeassistant/components/simplisafe/__init__.py index 29f53eafffb..b23358c985f 100644 --- a/homeassistant/components/simplisafe/__init__.py +++ b/homeassistant/components/simplisafe/__init__.py @@ -212,8 +212,6 @@ WEBSOCKET_EVENTS_TO_FIRE_HASS_EVENT = [ EVENT_USER_INITIATED_TEST, ] -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - @callback def _async_get_system_for_service_call( diff --git a/homeassistant/components/solaredge/__init__.py b/homeassistant/components/solaredge/__init__.py index 41448bae98d..206a2499494 100644 --- a/homeassistant/components/solaredge/__init__.py +++ b/homeassistant/components/solaredge/__init__.py @@ -11,13 +11,10 @@ from homeassistant.const import CONF_API_KEY, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession -import homeassistant.helpers.config_validation as cv -from .const import CONF_SITE_ID, DOMAIN, LOGGER +from .const import CONF_SITE_ID, LOGGER from .types import SolarEdgeConfigEntry, SolarEdgeData -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - PLATFORMS = [Platform.SENSOR] diff --git a/homeassistant/components/somfy_mylink/__init__.py b/homeassistant/components/somfy_mylink/__init__.py index ed9652de55a..89796f5ce46 100644 --- a/homeassistant/components/somfy_mylink/__init__.py +++ b/homeassistant/components/somfy_mylink/__init__.py @@ -8,7 +8,6 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import config_validation as cv from .const import CONF_SYSTEM_ID, DATA_SOMFY_MYLINK, DOMAIN, MYLINK_STATUS, PLATFORMS @@ -16,8 +15,6 @@ UNDO_UPDATE_LISTENER = "undo_update_listener" _LOGGER = logging.getLogger(__name__) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Somfy MyLink from a config entry.""" diff --git a/homeassistant/components/synology_dsm/__init__.py b/homeassistant/components/synology_dsm/__init__.py index d42dacca638..3619619782e 100644 --- a/homeassistant/components/synology_dsm/__init__.py +++ b/homeassistant/components/synology_dsm/__init__.py @@ -13,7 +13,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_MAC, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import config_validation as cv, device_registry as dr +from homeassistant.helpers import device_registry as dr from .common import SynoApi, raise_config_entry_auth_error from .const import ( @@ -33,9 +33,6 @@ from .coordinator import ( from .models import SynologyDSMData from .service import async_setup_services -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - - _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/tankerkoenig/__init__.py b/homeassistant/components/tankerkoenig/__init__.py index 78bced05b36..a500549a648 100644 --- a/homeassistant/components/tankerkoenig/__init__.py +++ b/homeassistant/components/tankerkoenig/__init__.py @@ -4,15 +4,12 @@ from __future__ import annotations from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_validation as cv from .const import DEFAULT_SCAN_INTERVAL, DOMAIN from .coordinator import TankerkoenigConfigEntry, TankerkoenigDataUpdateCoordinator PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR] -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - async def async_setup_entry( hass: HomeAssistant, entry: TankerkoenigConfigEntry diff --git a/homeassistant/components/totalconnect/__init__.py b/homeassistant/components/totalconnect/__init__.py index bb19697b1e7..0d8b915770a 100644 --- a/homeassistant/components/totalconnect/__init__.py +++ b/homeassistant/components/totalconnect/__init__.py @@ -7,15 +7,12 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed -import homeassistant.helpers.config_validation as cv from .const import AUTO_BYPASS, CONF_USERCODES, DOMAIN from .coordinator import TotalConnectDataUpdateCoordinator PLATFORMS = [Platform.ALARM_CONTROL_PANEL, Platform.BINARY_SENSOR, Platform.BUTTON] -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up upon config entry in user interface.""" diff --git a/homeassistant/components/tradfri/__init__.py b/homeassistant/components/tradfri/__init__.py index 2e267ffaa14..0060310e6c2 100644 --- a/homeassistant/components/tradfri/__init__.py +++ b/homeassistant/components/tradfri/__init__.py @@ -14,7 +14,6 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, EVENT_HOMEASSISTANT_STOP, Platform from homeassistant.core import Event, HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady -import homeassistant.helpers.config_validation as cv import homeassistant.helpers.device_registry as dr from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, @@ -35,7 +34,6 @@ from .const import ( ) from .coordinator import TradfriDeviceDataUpdateCoordinator -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) PLATFORMS = [ Platform.COVER, Platform.FAN, diff --git a/homeassistant/components/upnp/__init__.py b/homeassistant/components/upnp/__init__.py index 9b51e548f80..214521ee9c0 100644 --- a/homeassistant/components/upnp/__init__.py +++ b/homeassistant/components/upnp/__init__.py @@ -12,7 +12,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import config_validation as cv, device_registry as dr +from homeassistant.helpers import device_registry as dr from .const import ( CONFIG_ENTRY_FORCE_POLL, @@ -35,7 +35,6 @@ NOTIFICATION_TITLE = "UPnP/IGD Setup" PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR] -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) type UpnpConfigEntry = ConfigEntry[UpnpDataUpdateCoordinator] diff --git a/homeassistant/components/verisure/__init__.py b/homeassistant/components/verisure/__init__.py index 9e5f0ca2703..0f8c8d936ef 100644 --- a/homeassistant/components/verisure/__init__.py +++ b/homeassistant/components/verisure/__init__.py @@ -12,7 +12,6 @@ from homeassistant.const import CONF_EMAIL, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import entity_registry as er -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.storage import STORAGE_DIR from .const import CONF_LOCK_DEFAULT_CODE, DOMAIN, LOGGER @@ -27,8 +26,6 @@ PLATFORMS = [ Platform.SWITCH, ] -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Verisure from a config entry.""" diff --git a/homeassistant/components/vesync/__init__.py b/homeassistant/components/vesync/__init__.py index 7dceb1b3f8f..04547d33dea 100644 --- a/homeassistant/components/vesync/__init__.py +++ b/homeassistant/components/vesync/__init__.py @@ -7,7 +7,6 @@ from pyvesync import VeSync from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant, ServiceCall -from homeassistant.helpers import config_validation as cv from homeassistant.helpers.dispatcher import async_dispatcher_send from .common import async_process_devices @@ -26,8 +25,6 @@ PLATFORMS = [Platform.FAN, Platform.LIGHT, Platform.SENSOR, Platform.SWITCH] _LOGGER = logging.getLogger(__name__) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Set up Vesync as config entry.""" diff --git a/tests/components/ambient_station/test_config_flow.py b/tests/components/ambient_station/test_config_flow.py index 19ae9828c22..e4c8efabc20 100644 --- a/tests/components/ambient_station/test_config_flow.py +++ b/tests/components/ambient_station/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import AsyncMock, patch from aioambient.errors import AmbientError import pytest -from homeassistant.components.ambient_station import CONF_APP_KEY, DOMAIN +from homeassistant.components.ambient_station.const import CONF_APP_KEY, DOMAIN from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant diff --git a/tests/components/islamic_prayer_times/test_config_flow.py b/tests/components/islamic_prayer_times/test_config_flow.py index cb37a6b147d..695be636a84 100644 --- a/tests/components/islamic_prayer_times/test_config_flow.py +++ b/tests/components/islamic_prayer_times/test_config_flow.py @@ -3,7 +3,6 @@ import pytest from homeassistant import config_entries -from homeassistant.components import islamic_prayer_times from homeassistant.components.islamic_prayer_times.const import ( CONF_CALC_METHOD, CONF_LAT_ADJ_METHOD, @@ -24,7 +23,7 @@ pytestmark = pytest.mark.usefixtures("mock_setup_entry") async def test_flow_works(hass: HomeAssistant) -> None: """Test user config.""" result = await hass.config_entries.flow.async_init( - islamic_prayer_times.DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" @@ -76,7 +75,7 @@ async def test_integration_already_configured(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( - islamic_prayer_times.DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" diff --git a/tests/components/islamic_prayer_times/test_init.py b/tests/components/islamic_prayer_times/test_init.py index 025a202e6da..7961b79676b 100644 --- a/tests/components/islamic_prayer_times/test_init.py +++ b/tests/components/islamic_prayer_times/test_init.py @@ -6,8 +6,7 @@ from unittest.mock import patch from freezegun import freeze_time import pytest -from homeassistant.components import islamic_prayer_times -from homeassistant.components.islamic_prayer_times.const import CONF_CALC_METHOD +from homeassistant.components.islamic_prayer_times.const import CONF_CALC_METHOD, DOMAIN from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE @@ -30,7 +29,7 @@ async def test_successful_config_entry(hass: HomeAssistant) -> None: """Test that Islamic Prayer Times is configured successfully.""" entry = MockConfigEntry( - domain=islamic_prayer_times.DOMAIN, + domain=DOMAIN, data={}, ) entry.add_to_hass(hass) @@ -48,7 +47,7 @@ async def test_successful_config_entry(hass: HomeAssistant) -> None: async def test_unload_entry(hass: HomeAssistant) -> None: """Test removing Islamic Prayer Times.""" entry = MockConfigEntry( - domain=islamic_prayer_times.DOMAIN, + domain=DOMAIN, data={}, ) entry.add_to_hass(hass) @@ -66,7 +65,7 @@ async def test_unload_entry(hass: HomeAssistant) -> None: async def test_options_listener(hass: HomeAssistant) -> None: """Ensure updating options triggers a coordinator refresh.""" - entry = MockConfigEntry(domain=islamic_prayer_times.DOMAIN, data={}) + entry = MockConfigEntry(domain=DOMAIN, data={}) entry.add_to_hass(hass) with ( @@ -110,13 +109,13 @@ async def test_migrate_unique_id( old_unique_id: str, ) -> None: """Test unique id migration.""" - entry = MockConfigEntry(domain=islamic_prayer_times.DOMAIN, data={}) + entry = MockConfigEntry(domain=DOMAIN, data={}) entry.add_to_hass(hass) entity: er.RegistryEntry = entity_registry.async_get_or_create( suggested_object_id=object_id, domain=SENSOR_DOMAIN, - platform=islamic_prayer_times.DOMAIN, + platform=DOMAIN, unique_id=old_unique_id, config_entry=entry, ) @@ -140,7 +139,7 @@ async def test_migrate_unique_id( async def test_migration_from_1_1_to_1_2(hass: HomeAssistant) -> None: """Test migrating from version 1.1 to 1.2.""" entry = MockConfigEntry( - domain=islamic_prayer_times.DOMAIN, + domain=DOMAIN, data={}, ) entry.add_to_hass(hass) @@ -164,7 +163,7 @@ async def test_migration_from_1_1_to_1_2(hass: HomeAssistant) -> None: async def test_update_scheduling(hass: HomeAssistant) -> None: """Test that integration schedules update immediately after Islamic midnight.""" - entry = MockConfigEntry(domain=islamic_prayer_times.DOMAIN, data={}) + entry = MockConfigEntry(domain=DOMAIN, data={}) entry.add_to_hass(hass) with ( diff --git a/tests/components/local_ip/test_init.py b/tests/components/local_ip/test_init.py index 51e0628a417..7f411ea9cd7 100644 --- a/tests/components/local_ip/test_init.py +++ b/tests/components/local_ip/test_init.py @@ -2,7 +2,7 @@ from __future__ import annotations -from homeassistant.components.local_ip import DOMAIN +from homeassistant.components.local_ip.const import DOMAIN from homeassistant.components.network import MDNS_TARGET_IP, async_get_source_ip from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant diff --git a/tests/components/nextcloud/test_config_flow.py b/tests/components/nextcloud/test_config_flow.py index 9a881197cf9..c02516fdc99 100644 --- a/tests/components/nextcloud/test_config_flow.py +++ b/tests/components/nextcloud/test_config_flow.py @@ -10,7 +10,7 @@ from nextcloudmonitor import ( import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.nextcloud import DOMAIN +from homeassistant.components.nextcloud.const import DOMAIN from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant diff --git a/tests/components/sentry/conftest.py b/tests/components/sentry/conftest.py index 781250b2753..663f8ee6aa6 100644 --- a/tests/components/sentry/conftest.py +++ b/tests/components/sentry/conftest.py @@ -6,7 +6,7 @@ from typing import Any import pytest -from homeassistant.components.sentry import DOMAIN +from homeassistant.components.sentry.const import DOMAIN from tests.common import MockConfigEntry From 537d7728a726816220be0ca7c7f68ecea7d835d1 Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Mon, 5 Aug 2024 18:23:44 +0200 Subject: [PATCH 0394/1635] Update frontend to 20240805.1 (#123196) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 95afe1221ec..82dc9cdb83f 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240802.0"] + "requirements": ["home-assistant-frontend==20240805.1"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 3b251e91179..6fc0d6f535d 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -31,7 +31,7 @@ habluetooth==3.1.3 hass-nabucasa==0.81.1 hassil==1.7.4 home-assistant-bluetooth==1.12.2 -home-assistant-frontend==20240802.0 +home-assistant-frontend==20240805.1 home-assistant-intents==2024.7.29 httpx==0.27.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index f373c0fe7e3..69c0f5b4c21 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1093,7 +1093,7 @@ hole==0.8.0 holidays==0.53 # homeassistant.components.frontend -home-assistant-frontend==20240802.0 +home-assistant-frontend==20240805.1 # homeassistant.components.conversation home-assistant-intents==2024.7.29 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 529232f7847..4148ceced1c 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -919,7 +919,7 @@ hole==0.8.0 holidays==0.53 # homeassistant.components.frontend -home-assistant-frontend==20240802.0 +home-assistant-frontend==20240805.1 # homeassistant.components.conversation home-assistant-intents==2024.7.29 From f51cc8fe12ea1bbc0987e0b0518700fbcc364b6d Mon Sep 17 00:00:00 2001 From: Joakim Plate Date: Mon, 5 Aug 2024 19:02:07 +0200 Subject: [PATCH 0395/1635] Change zha diagnostic to snapshot (#123198) --- .../zha/snapshots/test_diagnostics.ambr | 266 ++++++++++++++++++ tests/components/zha/test_diagnostics.py | 51 +--- 2 files changed, 275 insertions(+), 42 deletions(-) create mode 100644 tests/components/zha/snapshots/test_diagnostics.ambr diff --git a/tests/components/zha/snapshots/test_diagnostics.ambr b/tests/components/zha/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..8899712b99d --- /dev/null +++ b/tests/components/zha/snapshots/test_diagnostics.ambr @@ -0,0 +1,266 @@ +# serializer version: 1 +# name: test_diagnostics_for_config_entry + dict({ + 'application_state': dict({ + 'broadcast_counters': dict({ + }), + 'counters': dict({ + 'ezsp_counters': dict({ + 'counter_1': dict({ + '__type': "", + 'repr': "Counter(name='counter_1', _raw_value=1, reset_count=0, _last_reset_value=0)", + }), + 'counter_2': dict({ + '__type': "", + 'repr': "Counter(name='counter_2', _raw_value=1, reset_count=0, _last_reset_value=0)", + }), + 'counter_3': dict({ + '__type': "", + 'repr': "Counter(name='counter_3', _raw_value=1, reset_count=0, _last_reset_value=0)", + }), + }), + }), + 'device_counters': dict({ + }), + 'group_counters': dict({ + }), + 'network_info': dict({ + 'channel': 15, + 'channel_mask': 0, + 'children': list([ + ]), + 'extended_pan_id': '**REDACTED**', + 'key_table': list([ + ]), + 'metadata': dict({ + }), + 'network_key': '**REDACTED**', + 'nwk_addresses': dict({ + }), + 'nwk_manager_id': 0, + 'nwk_update_id': 0, + 'pan_id': 4660, + 'security_level': 0, + 'source': None, + 'stack_specific': dict({ + }), + 'tc_link_key': dict({ + 'key': list([ + 90, + 105, + 103, + 66, + 101, + 101, + 65, + 108, + 108, + 105, + 97, + 110, + 99, + 101, + 48, + 57, + ]), + 'partner_ieee': '**REDACTED**', + 'rx_counter': 0, + 'seq': 0, + 'tx_counter': 0, + }), + }), + 'node_info': dict({ + 'ieee': '**REDACTED**', + 'logical_type': 2, + 'manufacturer': 'Coordinator Manufacturer', + 'model': 'Coordinator Model', + 'nwk': 0, + 'version': None, + }), + }), + 'config': dict({ + 'device_config': dict({ + }), + 'enable_quirks': False, + }), + 'config_entry': dict({ + 'data': dict({ + 'device': dict({ + 'baudrate': 115200, + 'flow_control': 'hardware', + 'path': '/dev/ttyUSB0', + }), + 'radio_type': 'ezsp', + }), + 'disabled_by': None, + 'domain': 'zha', + 'minor_version': 1, + 'options': dict({ + 'custom_configuration': dict({ + 'zha_alarm_options': dict({ + 'alarm_arm_requires_code': False, + 'alarm_failed_tries': 2, + 'alarm_master_code': '**REDACTED**', + }), + 'zha_options': dict({ + 'enhanced_light_transition': True, + 'group_members_assume_state': False, + }), + }), + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'title': 'Mock Title', + 'unique_id': None, + 'version': 4, + }), + 'devices': list([ + dict({ + 'logical_type': 'Coordinator', + 'manufacturer': 'Coordinator Manufacturer', + 'model': 'Coordinator Model', + }), + dict({ + 'logical_type': 'EndDevice', + 'manufacturer': 'FakeManufacturer', + 'model': 'FakeModel', + }), + ]), + 'energy_scan': dict({ + '11': 4.313725490196078, + '12': 4.705882352941177, + '13': 5.098039215686274, + '14': 5.490196078431373, + '15': 5.882352941176471, + '16': 6.2745098039215685, + '17': 6.666666666666667, + '18': 7.0588235294117645, + '19': 7.450980392156863, + '20': 7.8431372549019605, + '21': 8.235294117647058, + '22': 8.627450980392156, + '23': 9.019607843137255, + '24': 9.411764705882353, + '25': 9.803921568627452, + '26': 10.196078431372548, + }), + }) +# --- +# name: test_diagnostics_for_device + dict({ + 'active_coordinator': False, + 'area_id': None, + 'available': True, + 'cluster_details': dict({ + '1': dict({ + 'device_type': dict({ + 'id': 1025, + 'name': 'IAS_ANCILLARY_CONTROL', + }), + 'in_clusters': dict({ + '0x0500': dict({ + 'attributes': dict({ + '0x0010': dict({ + 'attribute_name': 'cie_addr', + 'value': list([ + 50, + 79, + 50, + 2, + 0, + 141, + 21, + 0, + ]), + }), + }), + 'endpoint_attribute': 'ias_zone', + 'unsupported_attributes': dict({ + '0x0012': dict({ + 'attribute_name': 'num_zone_sensitivity_levels_supported', + }), + '0x0013': dict({ + 'attribute_name': 'current_zone_sensitivity_level', + }), + }), + }), + '0x0501': dict({ + 'attributes': dict({ + }), + 'endpoint_attribute': 'ias_ace', + 'unsupported_attributes': dict({ + '0x1000': dict({ + }), + 'unknown_attribute_name': dict({ + }), + }), + }), + }), + 'out_clusters': dict({ + }), + 'profile_id': 260, + }), + }), + 'device_type': 'EndDevice', + 'endpoint_names': list([ + dict({ + 'name': 'IAS_ANCILLARY_CONTROL', + }), + ]), + 'entities': list([ + dict({ + 'entity_id': 'alarm_control_panel.fakemanufacturer_fakemodel_alarm_control_panel', + 'name': 'FakeManufacturer FakeModel', + }), + ]), + 'ieee': '**REDACTED**', + 'lqi': None, + 'manufacturer': 'FakeManufacturer', + 'manufacturer_code': 4098, + 'model': 'FakeModel', + 'name': 'FakeManufacturer FakeModel', + 'neighbors': list([ + ]), + 'nwk': 47004, + 'power_source': 'Mains', + 'quirk_applied': False, + 'quirk_class': 'zigpy.device.Device', + 'quirk_id': None, + 'routes': list([ + ]), + 'rssi': None, + 'signature': dict({ + 'endpoints': dict({ + '1': dict({ + 'device_type': '0x0401', + 'input_clusters': list([ + '0x0500', + '0x0501', + ]), + 'output_clusters': list([ + ]), + 'profile_id': '0x0104', + }), + }), + 'manufacturer': 'FakeManufacturer', + 'model': 'FakeModel', + 'node_descriptor': dict({ + 'aps_flags': 0, + 'complex_descriptor_available': 0, + 'descriptor_capability_field': 0, + 'frequency_band': 8, + 'logical_type': 2, + 'mac_capability_flags': 140, + 'manufacturer_code': 4098, + 'maximum_buffer_size': 82, + 'maximum_incoming_transfer_size': 82, + 'maximum_outgoing_transfer_size': 82, + 'reserved': 0, + 'server_mask': 0, + 'user_descriptor_available': 0, + }), + }), + 'user_given_name': None, + }) +# --- diff --git a/tests/components/zha/test_diagnostics.py b/tests/components/zha/test_diagnostics.py index bbdc6271207..ed3f83c0c36 100644 --- a/tests/components/zha/test_diagnostics.py +++ b/tests/components/zha/test_diagnostics.py @@ -3,11 +3,11 @@ from unittest.mock import patch import pytest +from syrupy.assertion import SnapshotAssertion +from syrupy.filters import props from zigpy.profiles import zha from zigpy.zcl.clusters import security -from homeassistant.components.diagnostics import REDACTED -from homeassistant.components.zha.diagnostics import KEYS_TO_REDACT from homeassistant.components.zha.helpers import ( ZHADeviceProxy, ZHAGatewayProxy, @@ -27,14 +27,6 @@ from tests.components.diagnostics import ( ) from tests.typing import ClientSessionGenerator -CONFIG_ENTRY_DIAGNOSTICS_KEYS = [ - "config", - "config_entry", - "application_state", - "versions", - "devices", -] - @pytest.fixture(autouse=True) def required_platforms_only(): @@ -51,6 +43,7 @@ async def test_diagnostics_for_config_entry( config_entry: MockConfigEntry, setup_zha, zigpy_device_mock, + snapshot: SnapshotAssertion, ) -> None: """Test diagnostics for config entry.""" @@ -81,30 +74,9 @@ async def test_diagnostics_for_config_entry( hass, hass_client, config_entry ) - for key in CONFIG_ENTRY_DIAGNOSTICS_KEYS: - assert key in diagnostics_data - assert diagnostics_data[key] is not None - - # Energy scan results are presented as a percentage. JSON object keys also must be - # strings, not integers. - assert diagnostics_data["energy_scan"] == { - str(k): 100 * v / 255 for k, v in scan.items() - } - - assert isinstance(diagnostics_data["devices"], list) - assert len(diagnostics_data["devices"]) == 2 - assert diagnostics_data["devices"] == [ - { - "manufacturer": "Coordinator Manufacturer", - "model": "Coordinator Model", - "logical_type": "Coordinator", - }, - { - "manufacturer": "FakeManufacturer", - "model": "FakeModel", - "logical_type": "EndDevice", - }, - ] + assert diagnostics_data == snapshot( + exclude=props("created_at", "modified_at", "entry_id", "versions") + ) async def test_diagnostics_for_device( @@ -114,6 +86,7 @@ async def test_diagnostics_for_device( config_entry: MockConfigEntry, setup_zha, zigpy_device_mock, + snapshot: SnapshotAssertion, ) -> None: """Test diagnostics for device.""" await setup_zha() @@ -161,11 +134,5 @@ async def test_diagnostics_for_device( diagnostics_data = await get_diagnostics_for_device( hass, hass_client, config_entry, device ) - assert diagnostics_data - device_info: dict = zha_device_proxy.zha_device_info - for key in device_info: - assert key in diagnostics_data - if key not in KEYS_TO_REDACT: - assert key in diagnostics_data - else: - assert diagnostics_data[key] == REDACTED + + assert diagnostics_data == snapshot(exclude=props("device_reg_id", "last_seen")) From 9c2ba9b157e0635231b69af3808d0cb590016384 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Mon, 5 Aug 2024 20:40:44 +0200 Subject: [PATCH 0396/1635] Mark tag to be an entity component (#123200) --- homeassistant/components/tag/__init__.py | 1 - homeassistant/components/tag/icons.json | 8 +++----- homeassistant/components/tag/manifest.json | 1 + homeassistant/components/tag/strings.json | 18 ++++++++---------- homeassistant/generated/integrations.json | 5 ----- 5 files changed, 12 insertions(+), 21 deletions(-) diff --git a/homeassistant/components/tag/__init__.py b/homeassistant/components/tag/__init__.py index 97307112f22..0462c5bec34 100644 --- a/homeassistant/components/tag/__init__.py +++ b/homeassistant/components/tag/__init__.py @@ -364,7 +364,6 @@ class TagEntity(Entity): """Representation of a Tag entity.""" _unrecorded_attributes = frozenset({TAG_ID}) - _attr_translation_key = DOMAIN _attr_should_poll = False def __init__( diff --git a/homeassistant/components/tag/icons.json b/homeassistant/components/tag/icons.json index d9532aadf73..c931ae8614c 100644 --- a/homeassistant/components/tag/icons.json +++ b/homeassistant/components/tag/icons.json @@ -1,9 +1,7 @@ { - "entity": { - "tag": { - "tag": { - "default": "mdi:tag-outline" - } + "entity_component": { + "_": { + "default": "mdi:tag-outline" } } } diff --git a/homeassistant/components/tag/manifest.json b/homeassistant/components/tag/manifest.json index 14701763573..738e7f7e744 100644 --- a/homeassistant/components/tag/manifest.json +++ b/homeassistant/components/tag/manifest.json @@ -3,5 +3,6 @@ "name": "Tags", "codeowners": ["@balloob", "@dmulcahey"], "documentation": "https://www.home-assistant.io/integrations/tag", + "integration_type": "entity", "quality_scale": "internal" } diff --git a/homeassistant/components/tag/strings.json b/homeassistant/components/tag/strings.json index 75cec1f9ef4..4adbf1d48fc 100644 --- a/homeassistant/components/tag/strings.json +++ b/homeassistant/components/tag/strings.json @@ -1,15 +1,13 @@ { "title": "Tag", - "entity": { - "tag": { - "tag": { - "state_attributes": { - "tag_id": { - "name": "Tag ID" - }, - "last_scanned_by_device_id": { - "name": "Last scanned by device ID" - } + "entity_component": { + "_": { + "state_attributes": { + "tag_id": { + "name": "Tag ID" + }, + "last_scanned_by_device_id": { + "name": "Last scanned by device ID" } } } diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 673e37b3c12..c941cce64d4 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -6002,10 +6002,6 @@ "config_flow": true, "iot_class": "cloud_polling" }, - "tag": { - "integration_type": "hub", - "config_flow": false - }, "tailscale": { "name": "Tailscale", "integration_type": "hub", @@ -7354,7 +7350,6 @@ "shopping_list", "sun", "switch_as_x", - "tag", "threshold", "time_date", "tod", From bb1efe56b686f6a474b95a4eeef31b77953df000 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Mon, 5 Aug 2024 21:06:58 +0200 Subject: [PATCH 0397/1635] Mark webhook as a system integration type (#123204) --- homeassistant/components/webhook/manifest.json | 1 + homeassistant/generated/integrations.json | 5 ----- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/homeassistant/components/webhook/manifest.json b/homeassistant/components/webhook/manifest.json index c2795e8ac17..43f5321d9f6 100644 --- a/homeassistant/components/webhook/manifest.json +++ b/homeassistant/components/webhook/manifest.json @@ -4,5 +4,6 @@ "codeowners": ["@home-assistant/core"], "dependencies": ["http"], "documentation": "https://www.home-assistant.io/integrations/webhook", + "integration_type": "system", "quality_scale": "internal" } diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index c941cce64d4..e8bdcc0bac9 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -6805,11 +6805,6 @@ } } }, - "webhook": { - "name": "Webhook", - "integration_type": "hub", - "config_flow": false - }, "webmin": { "name": "Webmin", "integration_type": "device", From 5142cb5e981a754f711b39fc4ece822745420b78 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Mon, 5 Aug 2024 21:07:11 +0200 Subject: [PATCH 0398/1635] Mark assist_pipeline as a system integration type (#123202) --- homeassistant/components/assist_pipeline/manifest.json | 1 + homeassistant/generated/integrations.json | 6 ------ 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/homeassistant/components/assist_pipeline/manifest.json b/homeassistant/components/assist_pipeline/manifest.json index dd3ec77f165..00950b138fd 100644 --- a/homeassistant/components/assist_pipeline/manifest.json +++ b/homeassistant/components/assist_pipeline/manifest.json @@ -4,6 +4,7 @@ "codeowners": ["@balloob", "@synesthesiam"], "dependencies": ["conversation", "stt", "tts", "wake_word"], "documentation": "https://www.home-assistant.io/integrations/assist_pipeline", + "integration_type": "system", "iot_class": "local_push", "quality_scale": "internal", "requirements": ["pymicro-vad==1.0.1"] diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index e8bdcc0bac9..2516f25e6f2 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -496,12 +496,6 @@ "config_flow": true, "iot_class": "cloud_polling" }, - "assist_pipeline": { - "name": "Assist pipeline", - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_push" - }, "asuswrt": { "name": "ASUSWRT", "integration_type": "hub", From 4898ba932d5bfa4e6088b36f69e79c6eb4b4a7e6 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Mon, 5 Aug 2024 12:34:48 +0200 Subject: [PATCH 0399/1635] Use KNX UI entity platform controller class (#123128) --- homeassistant/components/knx/binary_sensor.py | 4 +- homeassistant/components/knx/button.py | 4 +- homeassistant/components/knx/climate.py | 4 +- homeassistant/components/knx/cover.py | 4 +- homeassistant/components/knx/date.py | 4 +- homeassistant/components/knx/datetime.py | 4 +- homeassistant/components/knx/fan.py | 4 +- homeassistant/components/knx/knx_entity.py | 76 +++++++++++++------ homeassistant/components/knx/light.py | 39 +++++----- homeassistant/components/knx/notify.py | 4 +- homeassistant/components/knx/number.py | 4 +- homeassistant/components/knx/scene.py | 4 +- homeassistant/components/knx/select.py | 4 +- homeassistant/components/knx/sensor.py | 4 +- .../components/knx/storage/config_store.py | 54 +++++++------ homeassistant/components/knx/switch.py | 59 +++++++------- homeassistant/components/knx/text.py | 4 +- homeassistant/components/knx/time.py | 4 +- homeassistant/components/knx/weather.py | 4 +- 19 files changed, 165 insertions(+), 123 deletions(-) diff --git a/homeassistant/components/knx/binary_sensor.py b/homeassistant/components/knx/binary_sensor.py index ff15f725fae..7d80ca55bf6 100644 --- a/homeassistant/components/knx/binary_sensor.py +++ b/homeassistant/components/knx/binary_sensor.py @@ -24,7 +24,7 @@ from homeassistant.helpers.typing import ConfigType from . import KNXModule from .const import ATTR_COUNTER, ATTR_SOURCE, DATA_KNX_CONFIG, DOMAIN -from .knx_entity import KnxEntity +from .knx_entity import KnxYamlEntity from .schema import BinarySensorSchema @@ -43,7 +43,7 @@ async def async_setup_entry( ) -class KNXBinarySensor(KnxEntity, BinarySensorEntity, RestoreEntity): +class KNXBinarySensor(KnxYamlEntity, BinarySensorEntity, RestoreEntity): """Representation of a KNX binary sensor.""" _device: XknxBinarySensor diff --git a/homeassistant/components/knx/button.py b/homeassistant/components/knx/button.py index 2eb68eebe43..f6627fc527b 100644 --- a/homeassistant/components/knx/button.py +++ b/homeassistant/components/knx/button.py @@ -13,7 +13,7 @@ from homeassistant.helpers.typing import ConfigType from . import KNXModule from .const import CONF_PAYLOAD_LENGTH, DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS -from .knx_entity import KnxEntity +from .knx_entity import KnxYamlEntity async def async_setup_entry( @@ -31,7 +31,7 @@ async def async_setup_entry( ) -class KNXButton(KnxEntity, ButtonEntity): +class KNXButton(KnxYamlEntity, ButtonEntity): """Representation of a KNX button.""" _device: XknxRawValue diff --git a/homeassistant/components/knx/climate.py b/homeassistant/components/knx/climate.py index 7470d60ef4b..9abc9023617 100644 --- a/homeassistant/components/knx/climate.py +++ b/homeassistant/components/knx/climate.py @@ -35,7 +35,7 @@ from .const import ( DOMAIN, PRESET_MODES, ) -from .knx_entity import KnxEntity +from .knx_entity import KnxYamlEntity from .schema import ClimateSchema ATTR_COMMAND_VALUE = "command_value" @@ -133,7 +133,7 @@ def _create_climate(xknx: XKNX, config: ConfigType) -> XknxClimate: ) -class KNXClimate(KnxEntity, ClimateEntity): +class KNXClimate(KnxYamlEntity, ClimateEntity): """Representation of a KNX climate device.""" _device: XknxClimate diff --git a/homeassistant/components/knx/cover.py b/homeassistant/components/knx/cover.py index 1962db0ad3f..408f746e094 100644 --- a/homeassistant/components/knx/cover.py +++ b/homeassistant/components/knx/cover.py @@ -27,7 +27,7 @@ from homeassistant.helpers.typing import ConfigType from . import KNXModule from .const import DATA_KNX_CONFIG, DOMAIN -from .knx_entity import KnxEntity +from .knx_entity import KnxYamlEntity from .schema import CoverSchema @@ -43,7 +43,7 @@ async def async_setup_entry( async_add_entities(KNXCover(knx_module, entity_config) for entity_config in config) -class KNXCover(KnxEntity, CoverEntity): +class KNXCover(KnxYamlEntity, CoverEntity): """Representation of a KNX cover.""" _device: XknxCover diff --git a/homeassistant/components/knx/date.py b/homeassistant/components/knx/date.py index 80fea63d0a6..9f04a4acd7e 100644 --- a/homeassistant/components/knx/date.py +++ b/homeassistant/components/knx/date.py @@ -31,7 +31,7 @@ from .const import ( DOMAIN, KNX_ADDRESS, ) -from .knx_entity import KnxEntity +from .knx_entity import KnxYamlEntity async def async_setup_entry( @@ -61,7 +61,7 @@ def _create_xknx_device(xknx: XKNX, config: ConfigType) -> XknxDateDevice: ) -class KNXDateEntity(KnxEntity, DateEntity, RestoreEntity): +class KNXDateEntity(KnxYamlEntity, DateEntity, RestoreEntity): """Representation of a KNX date.""" _device: XknxDateDevice diff --git a/homeassistant/components/knx/datetime.py b/homeassistant/components/knx/datetime.py index 16ccb7474a7..8f1a25e6e3c 100644 --- a/homeassistant/components/knx/datetime.py +++ b/homeassistant/components/knx/datetime.py @@ -32,7 +32,7 @@ from .const import ( DOMAIN, KNX_ADDRESS, ) -from .knx_entity import KnxEntity +from .knx_entity import KnxYamlEntity async def async_setup_entry( @@ -62,7 +62,7 @@ def _create_xknx_device(xknx: XKNX, config: ConfigType) -> XknxDateTimeDevice: ) -class KNXDateTimeEntity(KnxEntity, DateTimeEntity, RestoreEntity): +class KNXDateTimeEntity(KnxYamlEntity, DateTimeEntity, RestoreEntity): """Representation of a KNX datetime.""" _device: XknxDateTimeDevice diff --git a/homeassistant/components/knx/fan.py b/homeassistant/components/knx/fan.py index 940e241ccda..6fd87be97d1 100644 --- a/homeassistant/components/knx/fan.py +++ b/homeassistant/components/knx/fan.py @@ -21,7 +21,7 @@ from homeassistant.util.scaling import int_states_in_range from . import KNXModule from .const import DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS -from .knx_entity import KnxEntity +from .knx_entity import KnxYamlEntity from .schema import FanSchema DEFAULT_PERCENTAGE: Final = 50 @@ -39,7 +39,7 @@ async def async_setup_entry( async_add_entities(KNXFan(knx_module, entity_config) for entity_config in config) -class KNXFan(KnxEntity, FanEntity): +class KNXFan(KnxYamlEntity, FanEntity): """Representation of a KNX fan.""" _device: XknxFan diff --git a/homeassistant/components/knx/knx_entity.py b/homeassistant/components/knx/knx_entity.py index 2b8d2e71186..c81a6ee06db 100644 --- a/homeassistant/components/knx/knx_entity.py +++ b/homeassistant/components/knx/knx_entity.py @@ -2,30 +2,55 @@ from __future__ import annotations -from typing import TYPE_CHECKING +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING, Any from xknx.devices import Device as XknxDevice -from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity import Entity - -from .const import DOMAIN +from homeassistant.helpers.entity_platform import EntityPlatform +from homeassistant.helpers.entity_registry import RegistryEntry if TYPE_CHECKING: from . import KNXModule -SIGNAL_ENTITY_REMOVE = f"{DOMAIN}_entity_remove_signal.{{}}" +from .storage.config_store import PlatformControllerBase -class KnxEntity(Entity): +class KnxUiEntityPlatformController(PlatformControllerBase): + """Class to manage dynamic adding and reloading of UI entities.""" + + def __init__( + self, + knx_module: KNXModule, + entity_platform: EntityPlatform, + entity_class: type[KnxUiEntity], + ) -> None: + """Initialize the UI platform.""" + self._knx_module = knx_module + self._entity_platform = entity_platform + self._entity_class = entity_class + + async def create_entity(self, unique_id: str, config: dict[str, Any]) -> None: + """Add a new UI entity.""" + await self._entity_platform.async_add_entities( + [self._entity_class(self._knx_module, unique_id, config)] + ) + + async def update_entity( + self, entity_entry: RegistryEntry, config: dict[str, Any] + ) -> None: + """Update an existing UI entities configuration.""" + await self._entity_platform.async_remove_entity(entity_entry.entity_id) + await self.create_entity(unique_id=entity_entry.unique_id, config=config) + + +class _KnxEntityBase(Entity): """Representation of a KNX entity.""" _attr_should_poll = False - - def __init__(self, knx_module: KNXModule, device: XknxDevice) -> None: - """Set up device.""" - self._knx_module = knx_module - self._device = device + _knx_module: KNXModule + _device: XknxDevice @property def name(self) -> str: @@ -49,7 +74,7 @@ class KnxEntity(Entity): """Store register state change callback and start device object.""" self._device.register_device_updated_cb(self.after_update_callback) self._device.xknx.devices.async_add(self._device) - # super call needed to have methods of mulit-inherited classes called + # super call needed to have methods of multi-inherited classes called # eg. for restoring state (like _KNXSwitch) await super().async_added_to_hass() @@ -59,19 +84,22 @@ class KnxEntity(Entity): self._device.xknx.devices.async_remove(self._device) -class KnxUIEntity(KnxEntity): +class KnxYamlEntity(_KnxEntityBase): + """Representation of a KNX entity configured from YAML.""" + + def __init__(self, knx_module: KNXModule, device: XknxDevice) -> None: + """Initialize the YAML entity.""" + self._knx_module = knx_module + self._device = device + + +class KnxUiEntity(_KnxEntityBase, ABC): """Representation of a KNX UI entity.""" _attr_unique_id: str - async def async_added_to_hass(self) -> None: - """Register callbacks when entity added to hass.""" - await super().async_added_to_hass() - self._knx_module.config_store.entities.add(self._attr_unique_id) - self.async_on_remove( - async_dispatcher_connect( - self.hass, - SIGNAL_ENTITY_REMOVE.format(self._attr_unique_id), - self.async_remove, - ) - ) + @abstractmethod + def __init__( + self, knx_module: KNXModule, unique_id: str, config: dict[str, Any] + ) -> None: + """Initialize the UI entity.""" diff --git a/homeassistant/components/knx/light.py b/homeassistant/components/knx/light.py index 1197f09354b..a2ce8f8d2cb 100644 --- a/homeassistant/components/knx/light.py +++ b/homeassistant/components/knx/light.py @@ -19,15 +19,18 @@ from homeassistant.components.light import ( LightEntity, ) from homeassistant.const import CONF_ENTITY_CATEGORY, CONF_NAME, Platform -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.entity_platform import ( + AddEntitiesCallback, + async_get_current_platform, +) from homeassistant.helpers.typing import ConfigType import homeassistant.util.color as color_util from . import KNXModule from .const import CONF_SYNC_STATE, DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS, ColorTempModes -from .knx_entity import KnxEntity, KnxUIEntity +from .knx_entity import KnxUiEntity, KnxUiEntityPlatformController, KnxYamlEntity from .schema import LightSchema from .storage.const import ( CONF_COLOR_TEMP_MAX, @@ -63,8 +66,17 @@ async def async_setup_entry( ) -> None: """Set up light(s) for KNX platform.""" knx_module: KNXModule = hass.data[DOMAIN] + platform = async_get_current_platform() + knx_module.config_store.add_platform( + platform=Platform.LIGHT, + controller=KnxUiEntityPlatformController( + knx_module=knx_module, + entity_platform=platform, + entity_class=KnxUiLight, + ), + ) - entities: list[KnxEntity] = [] + entities: list[KnxYamlEntity | KnxUiEntity] = [] if yaml_platform_config := hass.data[DATA_KNX_CONFIG].get(Platform.LIGHT): entities.extend( KnxYamlLight(knx_module, entity_config) @@ -78,13 +90,6 @@ async def async_setup_entry( if entities: async_add_entities(entities) - @callback - def add_new_ui_light(unique_id: str, config: dict[str, Any]) -> None: - """Add KNX entity at runtime.""" - async_add_entities([KnxUiLight(knx_module, unique_id, config)]) - - knx_module.config_store.async_add_entity[Platform.LIGHT] = add_new_ui_light - def _create_yaml_light(xknx: XKNX, config: ConfigType) -> XknxLight: """Return a KNX Light device to be used within XKNX.""" @@ -519,7 +524,7 @@ class _KnxLight(LightEntity): await self._device.set_off() -class KnxYamlLight(_KnxLight, KnxEntity): +class KnxYamlLight(_KnxLight, KnxYamlEntity): """Representation of a KNX light.""" _device: XknxLight @@ -546,7 +551,7 @@ class KnxYamlLight(_KnxLight, KnxEntity): ) -class KnxUiLight(_KnxLight, KnxUIEntity): +class KnxUiLight(_KnxLight, KnxUiEntity): """Representation of a KNX light.""" _attr_has_entity_name = True @@ -556,11 +561,9 @@ class KnxUiLight(_KnxLight, KnxUIEntity): self, knx_module: KNXModule, unique_id: str, config: ConfigType ) -> None: """Initialize of KNX light.""" - super().__init__( - knx_module=knx_module, - device=_create_ui_light( - knx_module.xknx, config[DOMAIN], config[CONF_ENTITY][CONF_NAME] - ), + self._knx_module = knx_module + self._device = _create_ui_light( + knx_module.xknx, config[DOMAIN], config[CONF_ENTITY][CONF_NAME] ) self._attr_max_color_temp_kelvin: int = config[DOMAIN][CONF_COLOR_TEMP_MAX] self._attr_min_color_temp_kelvin: int = config[DOMAIN][CONF_COLOR_TEMP_MIN] diff --git a/homeassistant/components/knx/notify.py b/homeassistant/components/knx/notify.py index b349681990c..173ab3119a0 100644 --- a/homeassistant/components/knx/notify.py +++ b/homeassistant/components/knx/notify.py @@ -20,7 +20,7 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from . import KNXModule from .const import DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS -from .knx_entity import KnxEntity +from .knx_entity import KnxYamlEntity async def async_get_service( @@ -103,7 +103,7 @@ def _create_notification_instance(xknx: XKNX, config: ConfigType) -> XknxNotific ) -class KNXNotify(KnxEntity, NotifyEntity): +class KNXNotify(KnxYamlEntity, NotifyEntity): """Representation of a KNX notification entity.""" _device: XknxNotification diff --git a/homeassistant/components/knx/number.py b/homeassistant/components/knx/number.py index 3d4af503dff..cbbe91aba54 100644 --- a/homeassistant/components/knx/number.py +++ b/homeassistant/components/knx/number.py @@ -30,7 +30,7 @@ from .const import ( DOMAIN, KNX_ADDRESS, ) -from .knx_entity import KnxEntity +from .knx_entity import KnxYamlEntity from .schema import NumberSchema @@ -58,7 +58,7 @@ def _create_numeric_value(xknx: XKNX, config: ConfigType) -> NumericValue: ) -class KNXNumber(KnxEntity, RestoreNumber): +class KNXNumber(KnxYamlEntity, RestoreNumber): """Representation of a KNX number.""" _device: NumericValue diff --git a/homeassistant/components/knx/scene.py b/homeassistant/components/knx/scene.py index fc37f36dd01..2de832ae54a 100644 --- a/homeassistant/components/knx/scene.py +++ b/homeassistant/components/knx/scene.py @@ -15,7 +15,7 @@ from homeassistant.helpers.typing import ConfigType from . import KNXModule from .const import DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS -from .knx_entity import KnxEntity +from .knx_entity import KnxYamlEntity from .schema import SceneSchema @@ -31,7 +31,7 @@ async def async_setup_entry( async_add_entities(KNXScene(knx_module, entity_config) for entity_config in config) -class KNXScene(KnxEntity, Scene): +class KNXScene(KnxYamlEntity, Scene): """Representation of a KNX scene.""" _device: XknxScene diff --git a/homeassistant/components/knx/select.py b/homeassistant/components/knx/select.py index 1b862010c2a..6c73bf8d573 100644 --- a/homeassistant/components/knx/select.py +++ b/homeassistant/components/knx/select.py @@ -30,7 +30,7 @@ from .const import ( DOMAIN, KNX_ADDRESS, ) -from .knx_entity import KnxEntity +from .knx_entity import KnxYamlEntity from .schema import SelectSchema @@ -59,7 +59,7 @@ def _create_raw_value(xknx: XKNX, config: ConfigType) -> RawValue: ) -class KNXSelect(KnxEntity, SelectEntity, RestoreEntity): +class KNXSelect(KnxYamlEntity, SelectEntity, RestoreEntity): """Representation of a KNX select.""" _device: RawValue diff --git a/homeassistant/components/knx/sensor.py b/homeassistant/components/knx/sensor.py index ab363e2a35f..a28c1a339e6 100644 --- a/homeassistant/components/knx/sensor.py +++ b/homeassistant/components/knx/sensor.py @@ -35,7 +35,7 @@ from homeassistant.util.enum import try_parse_enum from . import KNXModule from .const import ATTR_SOURCE, DATA_KNX_CONFIG, DOMAIN -from .knx_entity import KnxEntity +from .knx_entity import KnxYamlEntity from .schema import SensorSchema SCAN_INTERVAL = timedelta(seconds=10) @@ -141,7 +141,7 @@ def _create_sensor(xknx: XKNX, config: ConfigType) -> XknxSensor: ) -class KNXSensor(KnxEntity, SensorEntity): +class KNXSensor(KnxYamlEntity, SensorEntity): """Representation of a KNX sensor.""" _device: XknxSensor diff --git a/homeassistant/components/knx/storage/config_store.py b/homeassistant/components/knx/storage/config_store.py index 876fe19a4b9..ce7a705e629 100644 --- a/homeassistant/components/knx/storage/config_store.py +++ b/homeassistant/components/knx/storage/config_store.py @@ -1,6 +1,6 @@ """KNX entity configuration store.""" -from collections.abc import Callable +from abc import ABC, abstractmethod import logging from typing import Any, Final, TypedDict @@ -8,12 +8,10 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PLATFORM, Platform from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import entity_registry as er -from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.storage import Store from homeassistant.util.ulid import ulid_now from ..const import DOMAIN -from ..knx_entity import SIGNAL_ENTITY_REMOVE from .const import CONF_DATA _LOGGER = logging.getLogger(__name__) @@ -33,6 +31,20 @@ class KNXConfigStoreModel(TypedDict): entities: KNXEntityStoreModel +class PlatformControllerBase(ABC): + """Entity platform controller base class.""" + + @abstractmethod + async def create_entity(self, unique_id: str, config: dict[str, Any]) -> None: + """Create a new entity.""" + + @abstractmethod + async def update_entity( + self, entity_entry: er.RegistryEntry, config: dict[str, Any] + ) -> None: + """Update an existing entities configuration.""" + + class KNXConfigStore: """Manage KNX config store data.""" @@ -46,12 +58,7 @@ class KNXConfigStore: self.config_entry = config_entry self._store = Store[KNXConfigStoreModel](hass, STORAGE_VERSION, STORAGE_KEY) self.data = KNXConfigStoreModel(entities={}) - - # entities and async_add_entity are filled by platform / entity setups - self.entities: set[str] = set() # unique_id as values - self.async_add_entity: dict[ - Platform, Callable[[str, dict[str, Any]], None] - ] = {} + self._platform_controllers: dict[Platform, PlatformControllerBase] = {} async def load_data(self) -> None: """Load config store data from storage.""" @@ -62,14 +69,19 @@ class KNXConfigStore: len(self.data["entities"]), ) + def add_platform( + self, platform: Platform, controller: PlatformControllerBase + ) -> None: + """Add platform controller.""" + self._platform_controllers[platform] = controller + async def create_entity( self, platform: Platform, data: dict[str, Any] ) -> str | None: """Create a new entity.""" - if platform not in self.async_add_entity: - raise ConfigStoreException(f"Entity platform not ready: {platform}") + platform_controller = self._platform_controllers[platform] unique_id = f"knx_es_{ulid_now()}" - self.async_add_entity[platform](unique_id, data) + await platform_controller.create_entity(unique_id, data) # store data after entity was added to be sure config didn't raise exceptions self.data["entities"].setdefault(platform, {})[unique_id] = data await self._store.async_save(self.data) @@ -95,8 +107,7 @@ class KNXConfigStore: self, platform: Platform, entity_id: str, data: dict[str, Any] ) -> None: """Update an existing entity.""" - if platform not in self.async_add_entity: - raise ConfigStoreException(f"Entity platform not ready: {platform}") + platform_controller = self._platform_controllers[platform] entity_registry = er.async_get(self.hass) if (entry := entity_registry.async_get(entity_id)) is None: raise ConfigStoreException(f"Entity not found: {entity_id}") @@ -108,8 +119,7 @@ class KNXConfigStore: raise ConfigStoreException( f"Entity not found in storage: {entity_id} - {unique_id}" ) - async_dispatcher_send(self.hass, SIGNAL_ENTITY_REMOVE.format(unique_id)) - self.async_add_entity[platform](unique_id, data) + await platform_controller.update_entity(entry, data) # store data after entity is added to make sure config doesn't raise exceptions self.data["entities"][platform][unique_id] = data await self._store.async_save(self.data) @@ -125,23 +135,21 @@ class KNXConfigStore: raise ConfigStoreException( f"Entity not found in {entry.domain}: {entry.unique_id}" ) from err - try: - self.entities.remove(entry.unique_id) - except KeyError: - _LOGGER.warning("Entity not initialized when deleted: %s", entity_id) entity_registry.async_remove(entity_id) await self._store.async_save(self.data) def get_entity_entries(self) -> list[er.RegistryEntry]: - """Get entity_ids of all configured entities by platform.""" + """Get entity_ids of all UI configured entities.""" entity_registry = er.async_get(self.hass) - + unique_ids = { + uid for platform in self.data["entities"].values() for uid in platform + } return [ registry_entry for registry_entry in er.async_entries_for_config_entry( entity_registry, self.config_entry.entry_id ) - if registry_entry.unique_id in self.entities + if registry_entry.unique_id in unique_ids ] diff --git a/homeassistant/components/knx/switch.py b/homeassistant/components/knx/switch.py index a5f430e6157..ebe930957d6 100644 --- a/homeassistant/components/knx/switch.py +++ b/homeassistant/components/knx/switch.py @@ -17,9 +17,12 @@ from homeassistant.const import ( STATE_UNKNOWN, Platform, ) -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.entity_platform import ( + AddEntitiesCallback, + async_get_current_platform, +) from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType @@ -32,7 +35,7 @@ from .const import ( DOMAIN, KNX_ADDRESS, ) -from .knx_entity import KnxEntity, KnxUIEntity +from .knx_entity import KnxUiEntity, KnxUiEntityPlatformController, KnxYamlEntity from .schema import SwitchSchema from .storage.const import ( CONF_DEVICE_INFO, @@ -51,8 +54,17 @@ async def async_setup_entry( ) -> None: """Set up switch(es) for KNX platform.""" knx_module: KNXModule = hass.data[DOMAIN] + platform = async_get_current_platform() + knx_module.config_store.add_platform( + platform=Platform.SWITCH, + controller=KnxUiEntityPlatformController( + knx_module=knx_module, + entity_platform=platform, + entity_class=KnxUiSwitch, + ), + ) - entities: list[KnxEntity] = [] + entities: list[KnxYamlEntity | KnxUiEntity] = [] if yaml_platform_config := hass.data[DATA_KNX_CONFIG].get(Platform.SWITCH): entities.extend( KnxYamlSwitch(knx_module, entity_config) @@ -66,13 +78,6 @@ async def async_setup_entry( if entities: async_add_entities(entities) - @callback - def add_new_ui_switch(unique_id: str, config: dict[str, Any]) -> None: - """Add KNX entity at runtime.""" - async_add_entities([KnxUiSwitch(knx_module, unique_id, config)]) - - knx_module.config_store.async_add_entity[Platform.SWITCH] = add_new_ui_switch - class _KnxSwitch(SwitchEntity, RestoreEntity): """Base class for a KNX switch.""" @@ -102,7 +107,7 @@ class _KnxSwitch(SwitchEntity, RestoreEntity): await self._device.set_off() -class KnxYamlSwitch(_KnxSwitch, KnxEntity): +class KnxYamlSwitch(_KnxSwitch, KnxYamlEntity): """Representation of a KNX switch configured from YAML.""" _device: XknxSwitch @@ -125,7 +130,7 @@ class KnxYamlSwitch(_KnxSwitch, KnxEntity): self._attr_unique_id = str(self._device.switch.group_address) -class KnxUiSwitch(_KnxSwitch, KnxUIEntity): +class KnxUiSwitch(_KnxSwitch, KnxUiEntity): """Representation of a KNX switch configured from UI.""" _attr_has_entity_name = True @@ -134,21 +139,19 @@ class KnxUiSwitch(_KnxSwitch, KnxUIEntity): def __init__( self, knx_module: KNXModule, unique_id: str, config: dict[str, Any] ) -> None: - """Initialize of KNX switch.""" - super().__init__( - knx_module=knx_module, - device=XknxSwitch( - knx_module.xknx, - name=config[CONF_ENTITY][CONF_NAME], - group_address=config[DOMAIN][CONF_GA_SWITCH][CONF_GA_WRITE], - group_address_state=[ - config[DOMAIN][CONF_GA_SWITCH][CONF_GA_STATE], - *config[DOMAIN][CONF_GA_SWITCH][CONF_GA_PASSIVE], - ], - respond_to_read=config[DOMAIN][CONF_RESPOND_TO_READ], - sync_state=config[DOMAIN][CONF_SYNC_STATE], - invert=config[DOMAIN][CONF_INVERT], - ), + """Initialize KNX switch.""" + self._knx_module = knx_module + self._device = XknxSwitch( + knx_module.xknx, + name=config[CONF_ENTITY][CONF_NAME], + group_address=config[DOMAIN][CONF_GA_SWITCH][CONF_GA_WRITE], + group_address_state=[ + config[DOMAIN][CONF_GA_SWITCH][CONF_GA_STATE], + *config[DOMAIN][CONF_GA_SWITCH][CONF_GA_PASSIVE], + ], + respond_to_read=config[DOMAIN][CONF_RESPOND_TO_READ], + sync_state=config[DOMAIN][CONF_SYNC_STATE], + invert=config[DOMAIN][CONF_INVERT], ) self._attr_entity_category = config[CONF_ENTITY][CONF_ENTITY_CATEGORY] self._attr_unique_id = unique_id diff --git a/homeassistant/components/knx/text.py b/homeassistant/components/knx/text.py index 9bca37434ac..381cb95ad32 100644 --- a/homeassistant/components/knx/text.py +++ b/homeassistant/components/knx/text.py @@ -30,7 +30,7 @@ from .const import ( DOMAIN, KNX_ADDRESS, ) -from .knx_entity import KnxEntity +from .knx_entity import KnxYamlEntity async def async_setup_entry( @@ -57,7 +57,7 @@ def _create_notification(xknx: XKNX, config: ConfigType) -> XknxNotification: ) -class KNXText(KnxEntity, TextEntity, RestoreEntity): +class KNXText(KnxYamlEntity, TextEntity, RestoreEntity): """Representation of a KNX text.""" _device: XknxNotification diff --git a/homeassistant/components/knx/time.py b/homeassistant/components/knx/time.py index 5d9225a1e41..b4e562a8869 100644 --- a/homeassistant/components/knx/time.py +++ b/homeassistant/components/knx/time.py @@ -31,7 +31,7 @@ from .const import ( DOMAIN, KNX_ADDRESS, ) -from .knx_entity import KnxEntity +from .knx_entity import KnxYamlEntity async def async_setup_entry( @@ -61,7 +61,7 @@ def _create_xknx_device(xknx: XKNX, config: ConfigType) -> XknxTimeDevice: ) -class KNXTimeEntity(KnxEntity, TimeEntity, RestoreEntity): +class KNXTimeEntity(KnxYamlEntity, TimeEntity, RestoreEntity): """Representation of a KNX time.""" _device: XknxTimeDevice diff --git a/homeassistant/components/knx/weather.py b/homeassistant/components/knx/weather.py index 11dae452e2f..99f4be962fe 100644 --- a/homeassistant/components/knx/weather.py +++ b/homeassistant/components/knx/weather.py @@ -21,7 +21,7 @@ from homeassistant.helpers.typing import ConfigType from . import KNXModule from .const import DATA_KNX_CONFIG, DOMAIN -from .knx_entity import KnxEntity +from .knx_entity import KnxYamlEntity from .schema import WeatherSchema @@ -75,7 +75,7 @@ def _create_weather(xknx: XKNX, config: ConfigType) -> XknxWeather: ) -class KNXWeather(KnxEntity, WeatherEntity): +class KNXWeather(KnxYamlEntity, WeatherEntity): """Representation of a KNX weather device.""" _device: XknxWeather From 0427aeccb0cdeb6757116441ae5db2ebe949ab72 Mon Sep 17 00:00:00 2001 From: musapinar Date: Mon, 5 Aug 2024 14:21:01 +0200 Subject: [PATCH 0400/1635] Add Matter Leedarson RGBTW Bulb to the transition blocklist (#123182) --- homeassistant/components/matter/light.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/matter/light.py b/homeassistant/components/matter/light.py index d05a7c85f9d..6e9019c46fa 100644 --- a/homeassistant/components/matter/light.py +++ b/homeassistant/components/matter/light.py @@ -51,18 +51,19 @@ DEFAULT_TRANSITION = 0.2 # hw version (attributeKey 0/40/8) # sw version (attributeKey 0/40/10) TRANSITION_BLOCKLIST = ( - (4488, 514, "1.0", "1.0.0"), - (4488, 260, "1.0", "1.0.0"), - (5010, 769, "3.0", "1.0.0"), - (4999, 24875, "1.0", "27.0"), - (4999, 25057, "1.0", "27.0"), - (4448, 36866, "V1", "V1.0.0.5"), - (5009, 514, "1.0", "1.0.0"), (4107, 8475, "v1.0", "v1.0"), (4107, 8550, "v1.0", "v1.0"), (4107, 8551, "v1.0", "v1.0"), - (4107, 8656, "v1.0", "v1.0"), (4107, 8571, "v1.0", "v1.0"), + (4107, 8656, "v1.0", "v1.0"), + (4448, 36866, "V1", "V1.0.0.5"), + (4456, 1011, "1.0.0", "2.00.00"), + (4488, 260, "1.0", "1.0.0"), + (4488, 514, "1.0", "1.0.0"), + (4999, 24875, "1.0", "27.0"), + (4999, 25057, "1.0", "27.0"), + (5009, 514, "1.0", "1.0.0"), + (5010, 769, "3.0", "1.0.0"), ) From ea20c4b375c1ada418b42b13e172027124ed5c98 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Mon, 5 Aug 2024 15:07:01 +0200 Subject: [PATCH 0401/1635] Fix MPD issue creation (#123187) --- homeassistant/components/mpd/media_player.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/mpd/media_player.py b/homeassistant/components/mpd/media_player.py index 3538b1c7973..92f0f5cfcc4 100644 --- a/homeassistant/components/mpd/media_player.py +++ b/homeassistant/components/mpd/media_player.py @@ -86,7 +86,7 @@ async def async_setup_platform( ) if ( result["type"] is FlowResultType.CREATE_ENTRY - or result["reason"] == "single_instance_allowed" + or result["reason"] == "already_configured" ): async_create_issue( hass, From 6b10dbb38c61c6a50dd3074aaa8f182094910a0e Mon Sep 17 00:00:00 2001 From: Marius <33354141+Mariusthvdb@users.noreply.github.com> Date: Mon, 5 Aug 2024 17:17:46 +0200 Subject: [PATCH 0402/1635] Fix state icon for closed valve entities (#123190) --- homeassistant/components/valve/icons.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/valve/icons.json b/homeassistant/components/valve/icons.json index 1261d1cc398..2c887ebf273 100644 --- a/homeassistant/components/valve/icons.json +++ b/homeassistant/components/valve/icons.json @@ -3,7 +3,7 @@ "_": { "default": "mdi:valve-open", "state": { - "off": "mdi:valve-closed" + "closed": "mdi:valve-closed" } }, "gas": { @@ -12,7 +12,7 @@ "water": { "default": "mdi:valve-open", "state": { - "off": "mdi:valve-closed" + "closed": "mdi:valve-closed" } } }, From b16bf2981907c68e1be67bf98d170c76922f9006 Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Mon, 5 Aug 2024 18:23:44 +0200 Subject: [PATCH 0403/1635] Update frontend to 20240805.1 (#123196) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 95afe1221ec..82dc9cdb83f 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240802.0"] + "requirements": ["home-assistant-frontend==20240805.1"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 3b251e91179..6fc0d6f535d 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -31,7 +31,7 @@ habluetooth==3.1.3 hass-nabucasa==0.81.1 hassil==1.7.4 home-assistant-bluetooth==1.12.2 -home-assistant-frontend==20240802.0 +home-assistant-frontend==20240805.1 home-assistant-intents==2024.7.29 httpx==0.27.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index fb881f5a8c6..2fd2c93a687 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1093,7 +1093,7 @@ hole==0.8.0 holidays==0.53 # homeassistant.components.frontend -home-assistant-frontend==20240802.0 +home-assistant-frontend==20240805.1 # homeassistant.components.conversation home-assistant-intents==2024.7.29 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index ac3d594a541..3a71321bc5b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -916,7 +916,7 @@ hole==0.8.0 holidays==0.53 # homeassistant.components.frontend -home-assistant-frontend==20240802.0 +home-assistant-frontend==20240805.1 # homeassistant.components.conversation home-assistant-intents==2024.7.29 From 859874487eed3c6593e3b3a0d965d4a3ad3794de Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Mon, 5 Aug 2024 20:40:44 +0200 Subject: [PATCH 0404/1635] Mark tag to be an entity component (#123200) --- homeassistant/components/tag/__init__.py | 1 - homeassistant/components/tag/icons.json | 8 +++----- homeassistant/components/tag/manifest.json | 1 + homeassistant/components/tag/strings.json | 18 ++++++++---------- homeassistant/generated/integrations.json | 5 ----- 5 files changed, 12 insertions(+), 21 deletions(-) diff --git a/homeassistant/components/tag/__init__.py b/homeassistant/components/tag/__init__.py index 97307112f22..0462c5bec34 100644 --- a/homeassistant/components/tag/__init__.py +++ b/homeassistant/components/tag/__init__.py @@ -364,7 +364,6 @@ class TagEntity(Entity): """Representation of a Tag entity.""" _unrecorded_attributes = frozenset({TAG_ID}) - _attr_translation_key = DOMAIN _attr_should_poll = False def __init__( diff --git a/homeassistant/components/tag/icons.json b/homeassistant/components/tag/icons.json index d9532aadf73..c931ae8614c 100644 --- a/homeassistant/components/tag/icons.json +++ b/homeassistant/components/tag/icons.json @@ -1,9 +1,7 @@ { - "entity": { - "tag": { - "tag": { - "default": "mdi:tag-outline" - } + "entity_component": { + "_": { + "default": "mdi:tag-outline" } } } diff --git a/homeassistant/components/tag/manifest.json b/homeassistant/components/tag/manifest.json index 14701763573..738e7f7e744 100644 --- a/homeassistant/components/tag/manifest.json +++ b/homeassistant/components/tag/manifest.json @@ -3,5 +3,6 @@ "name": "Tags", "codeowners": ["@balloob", "@dmulcahey"], "documentation": "https://www.home-assistant.io/integrations/tag", + "integration_type": "entity", "quality_scale": "internal" } diff --git a/homeassistant/components/tag/strings.json b/homeassistant/components/tag/strings.json index 75cec1f9ef4..4adbf1d48fc 100644 --- a/homeassistant/components/tag/strings.json +++ b/homeassistant/components/tag/strings.json @@ -1,15 +1,13 @@ { "title": "Tag", - "entity": { - "tag": { - "tag": { - "state_attributes": { - "tag_id": { - "name": "Tag ID" - }, - "last_scanned_by_device_id": { - "name": "Last scanned by device ID" - } + "entity_component": { + "_": { + "state_attributes": { + "tag_id": { + "name": "Tag ID" + }, + "last_scanned_by_device_id": { + "name": "Last scanned by device ID" } } } diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 3cc3ea71df9..816241035e6 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -6013,10 +6013,6 @@ "config_flow": true, "iot_class": "cloud_polling" }, - "tag": { - "integration_type": "hub", - "config_flow": false - }, "tailscale": { "name": "Tailscale", "integration_type": "hub", @@ -7365,7 +7361,6 @@ "shopping_list", "sun", "switch_as_x", - "tag", "threshold", "time_date", "tod", From 62d38e786d21e3e80246ac5ff765a34c8061c49e Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Mon, 5 Aug 2024 21:07:11 +0200 Subject: [PATCH 0405/1635] Mark assist_pipeline as a system integration type (#123202) --- homeassistant/components/assist_pipeline/manifest.json | 1 + homeassistant/generated/integrations.json | 6 ------ 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/homeassistant/components/assist_pipeline/manifest.json b/homeassistant/components/assist_pipeline/manifest.json index dd3ec77f165..00950b138fd 100644 --- a/homeassistant/components/assist_pipeline/manifest.json +++ b/homeassistant/components/assist_pipeline/manifest.json @@ -4,6 +4,7 @@ "codeowners": ["@balloob", "@synesthesiam"], "dependencies": ["conversation", "stt", "tts", "wake_word"], "documentation": "https://www.home-assistant.io/integrations/assist_pipeline", + "integration_type": "system", "iot_class": "local_push", "quality_scale": "internal", "requirements": ["pymicro-vad==1.0.1"] diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 816241035e6..850c7d78bc0 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -490,12 +490,6 @@ "config_flow": true, "iot_class": "cloud_polling" }, - "assist_pipeline": { - "name": "Assist pipeline", - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_push" - }, "asterisk": { "name": "Asterisk", "integrations": { From 4f722e864c13f465dce8b46aadafc1add0646069 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Mon, 5 Aug 2024 21:06:58 +0200 Subject: [PATCH 0406/1635] Mark webhook as a system integration type (#123204) --- homeassistant/components/webhook/manifest.json | 1 + homeassistant/generated/integrations.json | 5 ----- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/homeassistant/components/webhook/manifest.json b/homeassistant/components/webhook/manifest.json index c2795e8ac17..43f5321d9f6 100644 --- a/homeassistant/components/webhook/manifest.json +++ b/homeassistant/components/webhook/manifest.json @@ -4,5 +4,6 @@ "codeowners": ["@home-assistant/core"], "dependencies": ["http"], "documentation": "https://www.home-assistant.io/integrations/webhook", + "integration_type": "system", "quality_scale": "internal" } diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 850c7d78bc0..5107587ab89 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -6810,11 +6810,6 @@ } } }, - "webhook": { - "name": "Webhook", - "integration_type": "hub", - "config_flow": false - }, "webmin": { "name": "Webmin", "integration_type": "device", From d530137bec3e23a194841e56d32246d8a6e741fa Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Mon, 5 Aug 2024 21:12:09 +0200 Subject: [PATCH 0407/1635] Bump version to 2024.8.0b3 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index 7f27548a68c..981fc42fd36 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -24,7 +24,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 8 -PATCH_VERSION: Final = "0b2" +PATCH_VERSION: Final = "0b3" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index f23c571feb5..c29252b1ced 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.8.0b2" +version = "2024.8.0b3" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From 54f8f24c2ce782a191fb76701493e03df3ec78ab Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 5 Aug 2024 15:51:15 -0500 Subject: [PATCH 0408/1635] Bump PyJWT to 2.9.0 (#123209) --- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 6fc0d6f535d..9f327697a19 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -44,7 +44,7 @@ paho-mqtt==1.6.1 Pillow==10.4.0 pip>=21.3.1 psutil-home-assistant==0.0.1 -PyJWT==2.8.0 +PyJWT==2.9.0 pymicro-vad==1.0.1 PyNaCl==1.5.0 pyOpenSSL==24.2.1 diff --git a/pyproject.toml b/pyproject.toml index 49346f90448..a0a98bc72be 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -47,7 +47,7 @@ dependencies = [ "ifaddr==0.2.0", "Jinja2==3.1.4", "lru-dict==1.3.0", - "PyJWT==2.8.0", + "PyJWT==2.9.0", # PyJWT has loose dependency. We want the latest one. "cryptography==43.0.0", "Pillow==10.4.0", diff --git a/requirements.txt b/requirements.txt index 1beefe73914..f35fffe680a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -23,7 +23,7 @@ home-assistant-bluetooth==1.12.2 ifaddr==0.2.0 Jinja2==3.1.4 lru-dict==1.3.0 -PyJWT==2.8.0 +PyJWT==2.9.0 cryptography==43.0.0 Pillow==10.4.0 pyOpenSSL==24.2.1 From 1b73b2a12ab2abeb73c5505283c9c80347e41883 Mon Sep 17 00:00:00 2001 From: Clifford Roche <1007595+cmroche@users.noreply.github.com> Date: Tue, 6 Aug 2024 04:06:35 -0400 Subject: [PATCH 0409/1635] Update greeclimate to 2.1.0 (#123210) --- homeassistant/components/gree/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/gree/manifest.json b/homeassistant/components/gree/manifest.json index ca1c4b5b754..dba8cd6077c 100644 --- a/homeassistant/components/gree/manifest.json +++ b/homeassistant/components/gree/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/gree", "iot_class": "local_polling", "loggers": ["greeclimate"], - "requirements": ["greeclimate==2.0.0"] + "requirements": ["greeclimate==2.1.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 69c0f5b4c21..d0d388e194a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1007,7 +1007,7 @@ gpiozero==1.6.2 gps3==0.33.3 # homeassistant.components.gree -greeclimate==2.0.0 +greeclimate==2.1.0 # homeassistant.components.greeneye_monitor greeneye_monitor==3.0.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 4148ceced1c..4953419fe6d 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -851,7 +851,7 @@ govee-local-api==1.5.1 gps3==0.33.3 # homeassistant.components.gree -greeclimate==2.0.0 +greeclimate==2.1.0 # homeassistant.components.greeneye_monitor greeneye_monitor==3.0.3 From 21da79a249c0b1aa85610fa4e82a922f39cc628e Mon Sep 17 00:00:00 2001 From: Jesse Hills <3060199+jesserockz@users.noreply.github.com> Date: Tue, 6 Aug 2024 20:11:08 +1200 Subject: [PATCH 0410/1635] Show project version as `sw_version` in ESPHome (#123183) --- homeassistant/components/esphome/manager.py | 6 +++--- tests/components/esphome/test_manager.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/esphome/manager.py b/homeassistant/components/esphome/manager.py index e8d002fba9d..ef2a6862f10 100644 --- a/homeassistant/components/esphome/manager.py +++ b/homeassistant/components/esphome/manager.py @@ -654,12 +654,13 @@ def _async_setup_device_registry( if device_info.manufacturer: manufacturer = device_info.manufacturer model = device_info.model - hw_version = None if device_info.project_name: project_name = device_info.project_name.split(".") manufacturer = project_name[0] model = project_name[1] - hw_version = device_info.project_version + sw_version = ( + f"{device_info.project_version} (ESPHome {device_info.esphome_version})" + ) suggested_area = None if device_info.suggested_area: @@ -674,7 +675,6 @@ def _async_setup_device_registry( manufacturer=manufacturer, model=model, sw_version=sw_version, - hw_version=hw_version, suggested_area=suggested_area, ) return device_entry.id diff --git a/tests/components/esphome/test_manager.py b/tests/components/esphome/test_manager.py index 01f267581f4..651c52cd083 100644 --- a/tests/components/esphome/test_manager.py +++ b/tests/components/esphome/test_manager.py @@ -1024,7 +1024,7 @@ async def test_esphome_device_with_project( ) assert dev.manufacturer == "mfr" assert dev.model == "model" - assert dev.hw_version == "2.2.2" + assert dev.sw_version == "2.2.2 (ESPHome 1.0.0)" async def test_esphome_device_with_manufacturer( From 164cfa85da6e5c1a85e004533da689982c053230 Mon Sep 17 00:00:00 2001 From: Jesse Hills <3060199+jesserockz@users.noreply.github.com> Date: Tue, 6 Aug 2024 20:12:31 +1200 Subject: [PATCH 0411/1635] Add support for ESPHome update entities to be checked on demand (#123161) --- homeassistant/components/esphome/manifest.json | 2 +- homeassistant/components/esphome/update.py | 11 +++++++++-- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/esphome/test_update.py | 14 ++++++++++++++ 5 files changed, 26 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/esphome/manifest.json b/homeassistant/components/esphome/manifest.json index ff7569bbc5f..97724a12203 100644 --- a/homeassistant/components/esphome/manifest.json +++ b/homeassistant/components/esphome/manifest.json @@ -17,7 +17,7 @@ "mqtt": ["esphome/discover/#"], "quality_scale": "platinum", "requirements": [ - "aioesphomeapi==24.6.2", + "aioesphomeapi==25.0.0", "esphome-dashboard-api==1.2.3", "bleak-esphome==1.0.0" ], diff --git a/homeassistant/components/esphome/update.py b/homeassistant/components/esphome/update.py index e86c88ddf5b..b7905fb4fdb 100644 --- a/homeassistant/components/esphome/update.py +++ b/homeassistant/components/esphome/update.py @@ -8,6 +8,7 @@ from typing import Any from aioesphomeapi import ( DeviceInfo as ESPHomeDeviceInfo, EntityInfo, + UpdateCommand, UpdateInfo, UpdateState, ) @@ -259,9 +260,15 @@ class ESPHomeUpdateEntity(EsphomeEntity[UpdateInfo, UpdateState], UpdateEntity): """Return the title of the update.""" return self._state.title + @convert_api_error_ha_error + async def async_update(self) -> None: + """Command device to check for update.""" + if self.available: + self._client.update_command(key=self._key, command=UpdateCommand.CHECK) + @convert_api_error_ha_error async def async_install( self, version: str | None, backup: bool, **kwargs: Any ) -> None: - """Update the current value.""" - self._client.update_command(key=self._key, install=True) + """Command device to install update.""" + self._client.update_command(key=self._key, command=UpdateCommand.INSTALL) diff --git a/requirements_all.txt b/requirements_all.txt index d0d388e194a..927b3d45310 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -237,7 +237,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==24.6.2 +aioesphomeapi==25.0.0 # homeassistant.components.flo aioflo==2021.11.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 4953419fe6d..a75a4b6cec8 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -225,7 +225,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==24.6.2 +aioesphomeapi==25.0.0 # homeassistant.components.flo aioflo==2021.11.0 diff --git a/tests/components/esphome/test_update.py b/tests/components/esphome/test_update.py index c9826c3f347..83e89b1de00 100644 --- a/tests/components/esphome/test_update.py +++ b/tests/components/esphome/test_update.py @@ -8,6 +8,7 @@ from aioesphomeapi import ( APIClient, EntityInfo, EntityState, + UpdateCommand, UpdateInfo, UpdateState, UserService, @@ -15,6 +16,10 @@ from aioesphomeapi import ( import pytest from homeassistant.components.esphome.dashboard import async_get_dashboard +from homeassistant.components.homeassistant import ( + DOMAIN as HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, +) from homeassistant.components.update import ( DOMAIN as UPDATE_DOMAIN, SERVICE_INSTALL, @@ -527,3 +532,12 @@ async def test_generic_device_update_entity_has_update( assert state is not None assert state.state == STATE_ON assert state.attributes["in_progress"] == 50 + + await hass.services.async_call( + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: "update.test_myupdate"}, + blocking=True, + ) + + mock_client.update_command.assert_called_with(key=1, command=UpdateCommand.CHECK) From 0d4ca35784f6768b782f7bc9a3329015fa6e93b5 Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Tue, 6 Aug 2024 10:13:12 +0200 Subject: [PATCH 0412/1635] Remove unused async_setup method in insteon (#123201) --- homeassistant/components/insteon/__init__.py | 10 +------ tests/components/insteon/const.py | 1 - tests/components/insteon/test_config_flow.py | 30 ++++++++------------ 3 files changed, 13 insertions(+), 28 deletions(-) diff --git a/homeassistant/components/insteon/__init__.py b/homeassistant/components/insteon/__init__.py index 0ec2434bc82..ff72f90a87e 100644 --- a/homeassistant/components/insteon/__init__.py +++ b/homeassistant/components/insteon/__init__.py @@ -10,8 +10,7 @@ from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import CONF_PLATFORM, EVENT_HOMEASSISTANT_STOP from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import config_validation as cv, device_registry as dr -from homeassistant.helpers.typing import ConfigType +from homeassistant.helpers import device_registry as dr from . import api from .const import ( @@ -36,8 +35,6 @@ from .utils import ( _LOGGER = logging.getLogger(__name__) OPTIONS = "options" -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - async def async_get_device_config(hass, config_entry): """Initiate the connection and services.""" @@ -77,11 +74,6 @@ async def close_insteon_connection(*args): await async_close() -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the Insteon platform.""" - return True - - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up an Insteon entry.""" diff --git a/tests/components/insteon/const.py b/tests/components/insteon/const.py index c35db3b7092..a4e4e8a390d 100644 --- a/tests/components/insteon/const.py +++ b/tests/components/insteon/const.py @@ -79,5 +79,4 @@ PATCH_CONNECTION = "homeassistant.components.insteon.config_flow.async_connect" PATCH_CONNECTION_CLOSE = "homeassistant.components.insteon.config_flow.async_close" PATCH_DEVICES = "homeassistant.components.insteon.config_flow.devices" PATCH_USB_LIST = "homeassistant.components.insteon.config_flow.async_get_usb_ports" -PATCH_ASYNC_SETUP = "homeassistant.components.insteon.async_setup" PATCH_ASYNC_SETUP_ENTRY = "homeassistant.components.insteon.async_setup_entry" diff --git a/tests/components/insteon/test_config_flow.py b/tests/components/insteon/test_config_flow.py index 4d3fb815463..51fdd7a550d 100644 --- a/tests/components/insteon/test_config_flow.py +++ b/tests/components/insteon/test_config_flow.py @@ -25,7 +25,6 @@ from .const import ( MOCK_USER_INPUT_HUB_V2, MOCK_USER_INPUT_PLM, MOCK_USER_INPUT_PLM_MANUAL, - PATCH_ASYNC_SETUP, PATCH_ASYNC_SETUP_ENTRY, PATCH_CONNECTION, PATCH_USB_LIST, @@ -81,7 +80,6 @@ async def _device_form(hass, flow_id, connection, user_input): PATCH_CONNECTION, new=connection, ), - patch(PATCH_ASYNC_SETUP, return_value=True) as mock_setup, patch( PATCH_ASYNC_SETUP_ENTRY, return_value=True, @@ -89,7 +87,7 @@ async def _device_form(hass, flow_id, connection, user_input): ): result = await hass.config_entries.flow.async_configure(flow_id, user_input) await hass.async_block_till_done() - return result, mock_setup, mock_setup_entry + return result, mock_setup_entry async def test_form_select_modem(hass: HomeAssistant) -> None: @@ -125,13 +123,12 @@ async def test_form_select_plm(hass: HomeAssistant) -> None: result = await _init_form(hass, STEP_PLM) - result2, mock_setup, mock_setup_entry = await _device_form( + result2, mock_setup_entry = await _device_form( hass, result["flow_id"], mock_successful_connection, MOCK_USER_INPUT_PLM ) assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["data"] == MOCK_USER_INPUT_PLM - assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 @@ -142,7 +139,7 @@ async def test_form_select_plm_no_usb(hass: HomeAssistant) -> None: USB_PORTS.clear() result = await _init_form(hass, STEP_PLM) - result2, _, _ = await _device_form( + result2, _ = await _device_form( hass, result["flow_id"], mock_successful_connection, None ) USB_PORTS.update(temp_usb_list) @@ -155,18 +152,17 @@ async def test_form_select_plm_manual(hass: HomeAssistant) -> None: result = await _init_form(hass, STEP_PLM) - result2, mock_setup, mock_setup_entry = await _device_form( + result2, mock_setup_entry = await _device_form( hass, result["flow_id"], mock_failed_connection, MOCK_USER_INPUT_PLM_MANUAL ) - result3, mock_setup, mock_setup_entry = await _device_form( + result3, mock_setup_entry = await _device_form( hass, result2["flow_id"], mock_successful_connection, MOCK_USER_INPUT_PLM ) assert result2["type"] is FlowResultType.FORM assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["data"] == MOCK_USER_INPUT_PLM - assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 @@ -175,7 +171,7 @@ async def test_form_select_hub_v1(hass: HomeAssistant) -> None: result = await _init_form(hass, STEP_HUB_V1) - result2, mock_setup, mock_setup_entry = await _device_form( + result2, mock_setup_entry = await _device_form( hass, result["flow_id"], mock_successful_connection, MOCK_USER_INPUT_HUB_V1 ) assert result2["type"] is FlowResultType.CREATE_ENTRY @@ -184,7 +180,6 @@ async def test_form_select_hub_v1(hass: HomeAssistant) -> None: CONF_HUB_VERSION: 1, } - assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 @@ -193,7 +188,7 @@ async def test_form_select_hub_v2(hass: HomeAssistant) -> None: result = await _init_form(hass, STEP_HUB_V2) - result2, mock_setup, mock_setup_entry = await _device_form( + result2, mock_setup_entry = await _device_form( hass, result["flow_id"], mock_successful_connection, MOCK_USER_INPUT_HUB_V2 ) assert result2["type"] is FlowResultType.CREATE_ENTRY @@ -202,7 +197,6 @@ async def test_form_select_hub_v2(hass: HomeAssistant) -> None: CONF_HUB_VERSION: 2, } - assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 @@ -233,7 +227,7 @@ async def test_failed_connection_plm(hass: HomeAssistant) -> None: result = await _init_form(hass, STEP_PLM) - result2, _, _ = await _device_form( + result2, _ = await _device_form( hass, result["flow_id"], mock_failed_connection, MOCK_USER_INPUT_PLM ) assert result2["type"] is FlowResultType.FORM @@ -245,10 +239,10 @@ async def test_failed_connection_plm_manually(hass: HomeAssistant) -> None: result = await _init_form(hass, STEP_PLM) - result2, _, _ = await _device_form( + result2, _ = await _device_form( hass, result["flow_id"], mock_successful_connection, MOCK_USER_INPUT_PLM_MANUAL ) - result3, _, _ = await _device_form( + result3, _ = await _device_form( hass, result["flow_id"], mock_failed_connection, MOCK_USER_INPUT_PLM ) assert result3["type"] is FlowResultType.FORM @@ -260,7 +254,7 @@ async def test_failed_connection_hub(hass: HomeAssistant) -> None: result = await _init_form(hass, STEP_HUB_V2) - result2, _, _ = await _device_form( + result2, _ = await _device_form( hass, result["flow_id"], mock_failed_connection, MOCK_USER_INPUT_HUB_V2 ) assert result2["type"] is FlowResultType.FORM @@ -284,7 +278,7 @@ async def test_discovery_via_usb(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM assert result["step_id"] == "confirm_usb" - with patch(PATCH_CONNECTION), patch(PATCH_ASYNC_SETUP, return_value=True): + with patch(PATCH_CONNECTION): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) From b9251e94a910f0229e21d613131a473f50c83550 Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Tue, 6 Aug 2024 10:15:48 +0200 Subject: [PATCH 0413/1635] Bump solarlog_cli to v0.1.6 (#123218) bump solarlog_cli to v0.1.6 --- homeassistant/components/solarlog/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/solarlog/manifest.json b/homeassistant/components/solarlog/manifest.json index 0878d652f43..0c097b7146d 100644 --- a/homeassistant/components/solarlog/manifest.json +++ b/homeassistant/components/solarlog/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/solarlog", "iot_class": "local_polling", "loggers": ["solarlog_cli"], - "requirements": ["solarlog_cli==0.1.5"] + "requirements": ["solarlog_cli==0.1.6"] } diff --git a/requirements_all.txt b/requirements_all.txt index 927b3d45310..79c4b327796 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2622,7 +2622,7 @@ soco==0.30.4 solaredge-local==0.2.3 # homeassistant.components.solarlog -solarlog_cli==0.1.5 +solarlog_cli==0.1.6 # homeassistant.components.solax solax==3.1.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a75a4b6cec8..7220510facd 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2068,7 +2068,7 @@ snapcast==2.3.6 soco==0.30.4 # homeassistant.components.solarlog -solarlog_cli==0.1.5 +solarlog_cli==0.1.6 # homeassistant.components.solax solax==3.1.1 From bc02925630217105ee6a627169918e0ce6714f73 Mon Sep 17 00:00:00 2001 From: flopp999 <21694965+flopp999@users.noreply.github.com> Date: Tue, 6 Aug 2024 10:21:48 +0200 Subject: [PATCH 0414/1635] Fix growatt server tlx battery api key (#123191) --- .../components/growatt_server/sensor_types/tlx.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/growatt_server/sensor_types/tlx.py b/homeassistant/components/growatt_server/sensor_types/tlx.py index d8f158f2421..bf8746e08ac 100644 --- a/homeassistant/components/growatt_server/sensor_types/tlx.py +++ b/homeassistant/components/growatt_server/sensor_types/tlx.py @@ -327,14 +327,14 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = ( GrowattSensorEntityDescription( key="tlx_battery_2_discharge_w", translation_key="tlx_battery_2_discharge_w", - api_key="bdc1DischargePower", + api_key="bdc2DischargePower", native_unit_of_measurement=UnitOfPower.WATT, device_class=SensorDeviceClass.POWER, ), GrowattSensorEntityDescription( key="tlx_battery_2_discharge_total", translation_key="tlx_battery_2_discharge_total", - api_key="bdc1DischargeTotal", + api_key="bdc2DischargeTotal", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, state_class=SensorStateClass.TOTAL_INCREASING, @@ -376,14 +376,14 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = ( GrowattSensorEntityDescription( key="tlx_battery_2_charge_w", translation_key="tlx_battery_2_charge_w", - api_key="bdc1ChargePower", + api_key="bdc2ChargePower", native_unit_of_measurement=UnitOfPower.WATT, device_class=SensorDeviceClass.POWER, ), GrowattSensorEntityDescription( key="tlx_battery_2_charge_total", translation_key="tlx_battery_2_charge_total", - api_key="bdc1ChargeTotal", + api_key="bdc2ChargeTotal", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, state_class=SensorStateClass.TOTAL_INCREASING, From 86d8c3b31ac9d0c36ca77ba8b191999827c9e357 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Tue, 6 Aug 2024 11:51:04 +0200 Subject: [PATCH 0415/1635] Update knx-frontend to 2024.8.6.85349 (#123226) --- homeassistant/components/knx/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/knx/manifest.json b/homeassistant/components/knx/manifest.json index 0f96970f3ae..37206df4c83 100644 --- a/homeassistant/components/knx/manifest.json +++ b/homeassistant/components/knx/manifest.json @@ -13,7 +13,7 @@ "requirements": [ "xknx==3.0.0", "xknxproject==3.7.1", - "knx-frontend==2024.7.25.204106" + "knx-frontend==2024.8.6.85349" ], "single_config_entry": true } diff --git a/requirements_all.txt b/requirements_all.txt index 79c4b327796..aeee59b04ff 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1216,7 +1216,7 @@ kiwiki-client==0.1.1 knocki==0.3.1 # homeassistant.components.knx -knx-frontend==2024.7.25.204106 +knx-frontend==2024.8.6.85349 # homeassistant.components.konnected konnected==1.2.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 7220510facd..a67210ffe30 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1015,7 +1015,7 @@ kegtron-ble==0.4.0 knocki==0.3.1 # homeassistant.components.knx -knx-frontend==2024.7.25.204106 +knx-frontend==2024.8.6.85349 # homeassistant.components.konnected konnected==1.2.0 From a2dd017229dd2ca0b5efb294c9199d35d47ca455 Mon Sep 17 00:00:00 2001 From: Guy Lowe Date: Tue, 6 Aug 2024 21:56:39 +1200 Subject: [PATCH 0416/1635] Add unit tests for SNMP integer Switches (#123094) * Add unit tests for SNMP Switches (integer only) * Add unit test for SNMP switches (integer unknown) * log a warning when SNMP response is not a recognised payload * Use a single configuration for all test_integer_switch tests * Tweak unknown SNMP response warning * Apply suggestions from code review Co-authored-by: Joost Lekkerkerker * import STATE_ consts * rename tests/components/snmp/test_integer_switch.py to test_switch.py * check that a warning is logged if the SNMP response payload is unknown --------- Co-authored-by: Joost Lekkerkerker --- homeassistant/components/snmp/switch.py | 5 ++ tests/components/snmp/test_switch.py | 67 +++++++++++++++++++++++++ 2 files changed, 72 insertions(+) create mode 100644 tests/components/snmp/test_switch.py diff --git a/homeassistant/components/snmp/switch.py b/homeassistant/components/snmp/switch.py index e3ce09cbf48..92e27daed6c 100644 --- a/homeassistant/components/snmp/switch.py +++ b/homeassistant/components/snmp/switch.py @@ -277,6 +277,11 @@ class SnmpSwitch(SwitchEntity): ): self._state = False else: + _LOGGER.warning( + "Invalid payload '%s' received for entity %s, state is unknown", + resrow[-1], + self.entity_id, + ) self._state = None @property diff --git a/tests/components/snmp/test_switch.py b/tests/components/snmp/test_switch.py new file mode 100644 index 00000000000..adb9d1c59d0 --- /dev/null +++ b/tests/components/snmp/test_switch.py @@ -0,0 +1,67 @@ +"""SNMP switch tests.""" + +from unittest.mock import patch + +from pysnmp.hlapi import Integer32 +import pytest + +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNKNOWN +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +config = { + SWITCH_DOMAIN: { + "platform": "snmp", + "host": "192.168.1.32", + # ippower-mib::ippoweroutlet1.0 + "baseoid": "1.3.6.1.4.1.38107.1.3.1.0", + "payload_on": 1, + "payload_off": 0, + }, +} + + +async def test_snmp_integer_switch_off(hass: HomeAssistant) -> None: + """Test snmp switch returning int 0 for off.""" + + mock_data = Integer32(0) + with patch( + "homeassistant.components.snmp.switch.getCmd", + return_value=(None, None, None, [[mock_data]]), + ): + assert await async_setup_component(hass, SWITCH_DOMAIN, config) + await hass.async_block_till_done() + state = hass.states.get("switch.snmp") + assert state.state == STATE_OFF + + +async def test_snmp_integer_switch_on(hass: HomeAssistant) -> None: + """Test snmp switch returning int 1 for on.""" + + mock_data = Integer32(1) + with patch( + "homeassistant.components.snmp.switch.getCmd", + return_value=(None, None, None, [[mock_data]]), + ): + assert await async_setup_component(hass, SWITCH_DOMAIN, config) + await hass.async_block_till_done() + state = hass.states.get("switch.snmp") + assert state.state == STATE_ON + + +async def test_snmp_integer_switch_unknown( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test snmp switch returning int 3 (not a configured payload) for unknown.""" + + mock_data = Integer32(3) + with patch( + "homeassistant.components.snmp.switch.getCmd", + return_value=(None, None, None, [[mock_data]]), + ): + assert await async_setup_component(hass, SWITCH_DOMAIN, config) + await hass.async_block_till_done() + state = hass.states.get("switch.snmp") + assert state.state == STATE_UNKNOWN + assert "Invalid payload '3' received for entity" in caplog.text From 78d1cd79af3aec7b7e649f53477fd8b97380be7b Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Tue, 6 Aug 2024 12:22:14 +0200 Subject: [PATCH 0417/1635] Bump yt-dlp to 2023.08.06 (#123229) --- homeassistant/components/media_extractor/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/media_extractor/manifest.json b/homeassistant/components/media_extractor/manifest.json index cd312413db3..2285d7bce7d 100644 --- a/homeassistant/components/media_extractor/manifest.json +++ b/homeassistant/components/media_extractor/manifest.json @@ -8,6 +8,6 @@ "iot_class": "calculated", "loggers": ["yt_dlp"], "quality_scale": "internal", - "requirements": ["yt-dlp==2024.07.16"], + "requirements": ["yt-dlp==2024.08.06"], "single_config_entry": true } diff --git a/requirements_all.txt b/requirements_all.txt index aeee59b04ff..234bfe664c6 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2971,7 +2971,7 @@ youless-api==2.1.2 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp==2024.07.16 +yt-dlp==2024.08.06 # homeassistant.components.zamg zamg==0.3.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a67210ffe30..7c5b7b47261 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2351,7 +2351,7 @@ youless-api==2.1.2 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp==2024.07.16 +yt-dlp==2024.08.06 # homeassistant.components.zamg zamg==0.3.6 From 1fc6ce3acd29a93982798110f65cb8237670f309 Mon Sep 17 00:00:00 2001 From: Petro31 <35082313+Petro31@users.noreply.github.com> Date: Tue, 6 Aug 2024 06:35:47 -0400 Subject: [PATCH 0418/1635] Fix yamaha legacy receivers (#122985) --- .../components/yamaha/media_player.py | 15 +++++--- tests/components/yamaha/test_media_player.py | 37 ++++++++++++++++++- 2 files changed, 46 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/yamaha/media_player.py b/homeassistant/components/yamaha/media_player.py index 507f485fcc7..a8200ea3373 100644 --- a/homeassistant/components/yamaha/media_player.py +++ b/homeassistant/components/yamaha/media_player.py @@ -2,6 +2,7 @@ from __future__ import annotations +import contextlib import logging from typing import Any @@ -129,11 +130,15 @@ def _discovery(config_info): else: _LOGGER.debug("Config Zones") zones = None - for recv in rxv.find(): - if recv.ctrl_url == config_info.ctrl_url: - _LOGGER.debug("Config Zones Matched %s", config_info.ctrl_url) - zones = recv.zone_controllers() - break + + # Fix for upstream issues in rxv.find() with some hardware. + with contextlib.suppress(AttributeError): + for recv in rxv.find(): + if recv.ctrl_url == config_info.ctrl_url: + _LOGGER.debug("Config Zones Matched %s", config_info.ctrl_url) + zones = recv.zone_controllers() + break + if not zones: _LOGGER.debug("Config Zones Fallback") zones = rxv.RXV(config_info.ctrl_url, config_info.name).zone_controllers() diff --git a/tests/components/yamaha/test_media_player.py b/tests/components/yamaha/test_media_player.py index 66d0a42f256..804b800aaef 100644 --- a/tests/components/yamaha/test_media_player.py +++ b/tests/components/yamaha/test_media_player.py @@ -53,7 +53,20 @@ def device_fixture(main_zone): yield device -async def test_setup_host(hass: HomeAssistant, device, main_zone) -> None: +@pytest.fixture(name="device2") +def device2_fixture(main_zone): + """Mock the yamaha device.""" + device = FakeYamahaDevice( + "http://127.0.0.1:80/YamahaRemoteControl/ctrl", "Receiver 2", zones=[main_zone] + ) + with ( + patch("rxv.RXV", return_value=device), + patch("rxv.find", return_value=[device]), + ): + yield device + + +async def test_setup_host(hass: HomeAssistant, device, device2, main_zone) -> None: """Test set up integration with host.""" assert await async_setup_component(hass, MP_DOMAIN, CONFIG) await hass.async_block_till_done() @@ -63,6 +76,28 @@ async def test_setup_host(hass: HomeAssistant, device, main_zone) -> None: assert state is not None assert state.state == "off" + with patch("rxv.find", return_value=[device2]): + assert await async_setup_component(hass, MP_DOMAIN, CONFIG) + await hass.async_block_till_done() + + state = hass.states.get("media_player.yamaha_receiver_main_zone") + + assert state is not None + assert state.state == "off" + + +async def test_setup_attribute_error(hass: HomeAssistant, device, main_zone) -> None: + """Test set up integration encountering an Attribute Error.""" + + with patch("rxv.find", side_effect=AttributeError): + assert await async_setup_component(hass, MP_DOMAIN, CONFIG) + await hass.async_block_till_done() + + state = hass.states.get("media_player.yamaha_receiver_main_zone") + + assert state is not None + assert state.state == "off" + async def test_setup_no_host(hass: HomeAssistant, device, main_zone) -> None: """Test set up integration without host.""" From bc380859e8b26141c02470f13140ed5052331069 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 6 Aug 2024 12:48:15 +0200 Subject: [PATCH 0419/1635] Update frontend to 20240806.0 (#123230) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 82dc9cdb83f..a91feb82461 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240805.1"] + "requirements": ["home-assistant-frontend==20240806.0"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 9f327697a19..9622c1acac0 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -31,7 +31,7 @@ habluetooth==3.1.3 hass-nabucasa==0.81.1 hassil==1.7.4 home-assistant-bluetooth==1.12.2 -home-assistant-frontend==20240805.1 +home-assistant-frontend==20240806.0 home-assistant-intents==2024.7.29 httpx==0.27.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 234bfe664c6..97419def421 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1093,7 +1093,7 @@ hole==0.8.0 holidays==0.53 # homeassistant.components.frontend -home-assistant-frontend==20240805.1 +home-assistant-frontend==20240806.0 # homeassistant.components.conversation home-assistant-intents==2024.7.29 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 7c5b7b47261..257ea036d3e 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -919,7 +919,7 @@ hole==0.8.0 holidays==0.53 # homeassistant.components.frontend -home-assistant-frontend==20240805.1 +home-assistant-frontend==20240806.0 # homeassistant.components.conversation home-assistant-intents==2024.7.29 From fd5533d719ee82177579df92b225214b14ff7d9a Mon Sep 17 00:00:00 2001 From: Petro31 <35082313+Petro31@users.noreply.github.com> Date: Tue, 6 Aug 2024 06:35:47 -0400 Subject: [PATCH 0420/1635] Fix yamaha legacy receivers (#122985) --- .../components/yamaha/media_player.py | 15 +++++--- tests/components/yamaha/test_media_player.py | 37 ++++++++++++++++++- 2 files changed, 46 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/yamaha/media_player.py b/homeassistant/components/yamaha/media_player.py index 507f485fcc7..a8200ea3373 100644 --- a/homeassistant/components/yamaha/media_player.py +++ b/homeassistant/components/yamaha/media_player.py @@ -2,6 +2,7 @@ from __future__ import annotations +import contextlib import logging from typing import Any @@ -129,11 +130,15 @@ def _discovery(config_info): else: _LOGGER.debug("Config Zones") zones = None - for recv in rxv.find(): - if recv.ctrl_url == config_info.ctrl_url: - _LOGGER.debug("Config Zones Matched %s", config_info.ctrl_url) - zones = recv.zone_controllers() - break + + # Fix for upstream issues in rxv.find() with some hardware. + with contextlib.suppress(AttributeError): + for recv in rxv.find(): + if recv.ctrl_url == config_info.ctrl_url: + _LOGGER.debug("Config Zones Matched %s", config_info.ctrl_url) + zones = recv.zone_controllers() + break + if not zones: _LOGGER.debug("Config Zones Fallback") zones = rxv.RXV(config_info.ctrl_url, config_info.name).zone_controllers() diff --git a/tests/components/yamaha/test_media_player.py b/tests/components/yamaha/test_media_player.py index 66d0a42f256..804b800aaef 100644 --- a/tests/components/yamaha/test_media_player.py +++ b/tests/components/yamaha/test_media_player.py @@ -53,7 +53,20 @@ def device_fixture(main_zone): yield device -async def test_setup_host(hass: HomeAssistant, device, main_zone) -> None: +@pytest.fixture(name="device2") +def device2_fixture(main_zone): + """Mock the yamaha device.""" + device = FakeYamahaDevice( + "http://127.0.0.1:80/YamahaRemoteControl/ctrl", "Receiver 2", zones=[main_zone] + ) + with ( + patch("rxv.RXV", return_value=device), + patch("rxv.find", return_value=[device]), + ): + yield device + + +async def test_setup_host(hass: HomeAssistant, device, device2, main_zone) -> None: """Test set up integration with host.""" assert await async_setup_component(hass, MP_DOMAIN, CONFIG) await hass.async_block_till_done() @@ -63,6 +76,28 @@ async def test_setup_host(hass: HomeAssistant, device, main_zone) -> None: assert state is not None assert state.state == "off" + with patch("rxv.find", return_value=[device2]): + assert await async_setup_component(hass, MP_DOMAIN, CONFIG) + await hass.async_block_till_done() + + state = hass.states.get("media_player.yamaha_receiver_main_zone") + + assert state is not None + assert state.state == "off" + + +async def test_setup_attribute_error(hass: HomeAssistant, device, main_zone) -> None: + """Test set up integration encountering an Attribute Error.""" + + with patch("rxv.find", side_effect=AttributeError): + assert await async_setup_component(hass, MP_DOMAIN, CONFIG) + await hass.async_block_till_done() + + state = hass.states.get("media_player.yamaha_receiver_main_zone") + + assert state is not None + assert state.state == "off" + async def test_setup_no_host(hass: HomeAssistant, device, main_zone) -> None: """Test set up integration without host.""" From 6d47a4d7e4d927f1d61448b32f3c6d7a6dc81ecf Mon Sep 17 00:00:00 2001 From: Jesse Hills <3060199+jesserockz@users.noreply.github.com> Date: Tue, 6 Aug 2024 20:12:31 +1200 Subject: [PATCH 0421/1635] Add support for ESPHome update entities to be checked on demand (#123161) --- homeassistant/components/esphome/manifest.json | 2 +- homeassistant/components/esphome/update.py | 11 +++++++++-- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/esphome/test_update.py | 14 ++++++++++++++ 5 files changed, 26 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/esphome/manifest.json b/homeassistant/components/esphome/manifest.json index ff7569bbc5f..97724a12203 100644 --- a/homeassistant/components/esphome/manifest.json +++ b/homeassistant/components/esphome/manifest.json @@ -17,7 +17,7 @@ "mqtt": ["esphome/discover/#"], "quality_scale": "platinum", "requirements": [ - "aioesphomeapi==24.6.2", + "aioesphomeapi==25.0.0", "esphome-dashboard-api==1.2.3", "bleak-esphome==1.0.0" ], diff --git a/homeassistant/components/esphome/update.py b/homeassistant/components/esphome/update.py index e86c88ddf5b..b7905fb4fdb 100644 --- a/homeassistant/components/esphome/update.py +++ b/homeassistant/components/esphome/update.py @@ -8,6 +8,7 @@ from typing import Any from aioesphomeapi import ( DeviceInfo as ESPHomeDeviceInfo, EntityInfo, + UpdateCommand, UpdateInfo, UpdateState, ) @@ -259,9 +260,15 @@ class ESPHomeUpdateEntity(EsphomeEntity[UpdateInfo, UpdateState], UpdateEntity): """Return the title of the update.""" return self._state.title + @convert_api_error_ha_error + async def async_update(self) -> None: + """Command device to check for update.""" + if self.available: + self._client.update_command(key=self._key, command=UpdateCommand.CHECK) + @convert_api_error_ha_error async def async_install( self, version: str | None, backup: bool, **kwargs: Any ) -> None: - """Update the current value.""" - self._client.update_command(key=self._key, install=True) + """Command device to install update.""" + self._client.update_command(key=self._key, command=UpdateCommand.INSTALL) diff --git a/requirements_all.txt b/requirements_all.txt index 2fd2c93a687..f54ae205864 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -237,7 +237,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==24.6.2 +aioesphomeapi==25.0.0 # homeassistant.components.flo aioflo==2021.11.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 3a71321bc5b..2974d065380 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -225,7 +225,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==24.6.2 +aioesphomeapi==25.0.0 # homeassistant.components.flo aioflo==2021.11.0 diff --git a/tests/components/esphome/test_update.py b/tests/components/esphome/test_update.py index c9826c3f347..83e89b1de00 100644 --- a/tests/components/esphome/test_update.py +++ b/tests/components/esphome/test_update.py @@ -8,6 +8,7 @@ from aioesphomeapi import ( APIClient, EntityInfo, EntityState, + UpdateCommand, UpdateInfo, UpdateState, UserService, @@ -15,6 +16,10 @@ from aioesphomeapi import ( import pytest from homeassistant.components.esphome.dashboard import async_get_dashboard +from homeassistant.components.homeassistant import ( + DOMAIN as HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, +) from homeassistant.components.update import ( DOMAIN as UPDATE_DOMAIN, SERVICE_INSTALL, @@ -527,3 +532,12 @@ async def test_generic_device_update_entity_has_update( assert state is not None assert state.state == STATE_ON assert state.attributes["in_progress"] == 50 + + await hass.services.async_call( + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: "update.test_myupdate"}, + blocking=True, + ) + + mock_client.update_command.assert_called_with(key=1, command=UpdateCommand.CHECK) From 6af1e25d7e2a83463b2ac33f9e61442dd7aeb6eb Mon Sep 17 00:00:00 2001 From: Jesse Hills <3060199+jesserockz@users.noreply.github.com> Date: Tue, 6 Aug 2024 20:11:08 +1200 Subject: [PATCH 0422/1635] Show project version as `sw_version` in ESPHome (#123183) --- homeassistant/components/esphome/manager.py | 6 +++--- tests/components/esphome/test_manager.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/esphome/manager.py b/homeassistant/components/esphome/manager.py index e8d002fba9d..ef2a6862f10 100644 --- a/homeassistant/components/esphome/manager.py +++ b/homeassistant/components/esphome/manager.py @@ -654,12 +654,13 @@ def _async_setup_device_registry( if device_info.manufacturer: manufacturer = device_info.manufacturer model = device_info.model - hw_version = None if device_info.project_name: project_name = device_info.project_name.split(".") manufacturer = project_name[0] model = project_name[1] - hw_version = device_info.project_version + sw_version = ( + f"{device_info.project_version} (ESPHome {device_info.esphome_version})" + ) suggested_area = None if device_info.suggested_area: @@ -674,7 +675,6 @@ def _async_setup_device_registry( manufacturer=manufacturer, model=model, sw_version=sw_version, - hw_version=hw_version, suggested_area=suggested_area, ) return device_entry.id diff --git a/tests/components/esphome/test_manager.py b/tests/components/esphome/test_manager.py index 01f267581f4..651c52cd083 100644 --- a/tests/components/esphome/test_manager.py +++ b/tests/components/esphome/test_manager.py @@ -1024,7 +1024,7 @@ async def test_esphome_device_with_project( ) assert dev.manufacturer == "mfr" assert dev.model == "model" - assert dev.hw_version == "2.2.2" + assert dev.sw_version == "2.2.2 (ESPHome 1.0.0)" async def test_esphome_device_with_manufacturer( From 495fd946bc044d251b07500336490c58e84f093d Mon Sep 17 00:00:00 2001 From: flopp999 <21694965+flopp999@users.noreply.github.com> Date: Tue, 6 Aug 2024 10:21:48 +0200 Subject: [PATCH 0423/1635] Fix growatt server tlx battery api key (#123191) --- .../components/growatt_server/sensor_types/tlx.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/growatt_server/sensor_types/tlx.py b/homeassistant/components/growatt_server/sensor_types/tlx.py index d8f158f2421..bf8746e08ac 100644 --- a/homeassistant/components/growatt_server/sensor_types/tlx.py +++ b/homeassistant/components/growatt_server/sensor_types/tlx.py @@ -327,14 +327,14 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = ( GrowattSensorEntityDescription( key="tlx_battery_2_discharge_w", translation_key="tlx_battery_2_discharge_w", - api_key="bdc1DischargePower", + api_key="bdc2DischargePower", native_unit_of_measurement=UnitOfPower.WATT, device_class=SensorDeviceClass.POWER, ), GrowattSensorEntityDescription( key="tlx_battery_2_discharge_total", translation_key="tlx_battery_2_discharge_total", - api_key="bdc1DischargeTotal", + api_key="bdc2DischargeTotal", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, state_class=SensorStateClass.TOTAL_INCREASING, @@ -376,14 +376,14 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = ( GrowattSensorEntityDescription( key="tlx_battery_2_charge_w", translation_key="tlx_battery_2_charge_w", - api_key="bdc1ChargePower", + api_key="bdc2ChargePower", native_unit_of_measurement=UnitOfPower.WATT, device_class=SensorDeviceClass.POWER, ), GrowattSensorEntityDescription( key="tlx_battery_2_charge_total", translation_key="tlx_battery_2_charge_total", - api_key="bdc1ChargeTotal", + api_key="bdc2ChargeTotal", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, state_class=SensorStateClass.TOTAL_INCREASING, From f796950493eeedd9be486684b4f1fdd4661229ab Mon Sep 17 00:00:00 2001 From: Clifford Roche <1007595+cmroche@users.noreply.github.com> Date: Tue, 6 Aug 2024 04:06:35 -0400 Subject: [PATCH 0424/1635] Update greeclimate to 2.1.0 (#123210) --- homeassistant/components/gree/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/gree/manifest.json b/homeassistant/components/gree/manifest.json index ca1c4b5b754..dba8cd6077c 100644 --- a/homeassistant/components/gree/manifest.json +++ b/homeassistant/components/gree/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/gree", "iot_class": "local_polling", "loggers": ["greeclimate"], - "requirements": ["greeclimate==2.0.0"] + "requirements": ["greeclimate==2.1.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index f54ae205864..04daff1f122 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1007,7 +1007,7 @@ gpiozero==1.6.2 gps3==0.33.3 # homeassistant.components.gree -greeclimate==2.0.0 +greeclimate==2.1.0 # homeassistant.components.greeneye_monitor greeneye_monitor==3.0.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 2974d065380..617a239e388 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -848,7 +848,7 @@ govee-local-api==1.5.1 gps3==0.33.3 # homeassistant.components.gree -greeclimate==2.0.0 +greeclimate==2.1.0 # homeassistant.components.greeneye_monitor greeneye_monitor==3.0.3 From 01b54fe1a97faa0100c76282feb3d843c43ff632 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Tue, 6 Aug 2024 11:51:04 +0200 Subject: [PATCH 0425/1635] Update knx-frontend to 2024.8.6.85349 (#123226) --- homeassistant/components/knx/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/knx/manifest.json b/homeassistant/components/knx/manifest.json index 0f96970f3ae..37206df4c83 100644 --- a/homeassistant/components/knx/manifest.json +++ b/homeassistant/components/knx/manifest.json @@ -13,7 +13,7 @@ "requirements": [ "xknx==3.0.0", "xknxproject==3.7.1", - "knx-frontend==2024.7.25.204106" + "knx-frontend==2024.8.6.85349" ], "single_config_entry": true } diff --git a/requirements_all.txt b/requirements_all.txt index 04daff1f122..f62e8ba616f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1216,7 +1216,7 @@ kiwiki-client==0.1.1 knocki==0.3.1 # homeassistant.components.knx -knx-frontend==2024.7.25.204106 +knx-frontend==2024.8.6.85349 # homeassistant.components.konnected konnected==1.2.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 617a239e388..3475a4f9f9a 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1012,7 +1012,7 @@ kegtron-ble==0.4.0 knocki==0.3.1 # homeassistant.components.knx -knx-frontend==2024.7.25.204106 +knx-frontend==2024.8.6.85349 # homeassistant.components.konnected konnected==1.2.0 From 97587fae08ffb0c0a9463daa345b2b3507466c29 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Tue, 6 Aug 2024 12:22:14 +0200 Subject: [PATCH 0426/1635] Bump yt-dlp to 2023.08.06 (#123229) --- homeassistant/components/media_extractor/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/media_extractor/manifest.json b/homeassistant/components/media_extractor/manifest.json index cd312413db3..2285d7bce7d 100644 --- a/homeassistant/components/media_extractor/manifest.json +++ b/homeassistant/components/media_extractor/manifest.json @@ -8,6 +8,6 @@ "iot_class": "calculated", "loggers": ["yt_dlp"], "quality_scale": "internal", - "requirements": ["yt-dlp==2024.07.16"], + "requirements": ["yt-dlp==2024.08.06"], "single_config_entry": true } diff --git a/requirements_all.txt b/requirements_all.txt index f62e8ba616f..e275d37d0cd 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2971,7 +2971,7 @@ youless-api==2.1.2 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp==2024.07.16 +yt-dlp==2024.08.06 # homeassistant.components.zamg zamg==0.3.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 3475a4f9f9a..39fff2f33c9 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2348,7 +2348,7 @@ youless-api==2.1.2 youtubeaio==1.1.5 # homeassistant.components.media_extractor -yt-dlp==2024.07.16 +yt-dlp==2024.08.06 # homeassistant.components.zamg zamg==0.3.6 From 77bcbbcf538abcd89aca33c142e20ad2cf778cfe Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 6 Aug 2024 12:48:15 +0200 Subject: [PATCH 0427/1635] Update frontend to 20240806.0 (#123230) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 82dc9cdb83f..a91feb82461 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240805.1"] + "requirements": ["home-assistant-frontend==20240806.0"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 6fc0d6f535d..96cf6d7624c 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -31,7 +31,7 @@ habluetooth==3.1.3 hass-nabucasa==0.81.1 hassil==1.7.4 home-assistant-bluetooth==1.12.2 -home-assistant-frontend==20240805.1 +home-assistant-frontend==20240806.0 home-assistant-intents==2024.7.29 httpx==0.27.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index e275d37d0cd..4614d760a84 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1093,7 +1093,7 @@ hole==0.8.0 holidays==0.53 # homeassistant.components.frontend -home-assistant-frontend==20240805.1 +home-assistant-frontend==20240806.0 # homeassistant.components.conversation home-assistant-intents==2024.7.29 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 39fff2f33c9..e9f7b37122c 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -916,7 +916,7 @@ hole==0.8.0 holidays==0.53 # homeassistant.components.frontend -home-assistant-frontend==20240805.1 +home-assistant-frontend==20240806.0 # homeassistant.components.conversation home-assistant-intents==2024.7.29 From d24a87145df931131c0fc3f4cf2ec46bebe82265 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 6 Aug 2024 13:18:22 +0200 Subject: [PATCH 0428/1635] Mark Alexa integration as system type (#123232) --- homeassistant/components/alexa/manifest.json | 1 + homeassistant/generated/integrations.json | 6 ------ 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/homeassistant/components/alexa/manifest.json b/homeassistant/components/alexa/manifest.json index 84a4e152c1d..de59d28925f 100644 --- a/homeassistant/components/alexa/manifest.json +++ b/homeassistant/components/alexa/manifest.json @@ -5,5 +5,6 @@ "codeowners": ["@home-assistant/cloud", "@ochlocracy", "@jbouwh"], "dependencies": ["http"], "documentation": "https://www.home-assistant.io/integrations/alexa", + "integration_type": "system", "iot_class": "cloud_push" } diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 2516f25e6f2..2e68bee3146 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -200,12 +200,6 @@ "amazon": { "name": "Amazon", "integrations": { - "alexa": { - "integration_type": "hub", - "config_flow": false, - "iot_class": "cloud_push", - "name": "Amazon Alexa" - }, "amazon_polly": { "integration_type": "hub", "config_flow": false, From 9d2c2d90c83a5cbf77a261c440caaee209853057 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 6 Aug 2024 13:20:32 +0200 Subject: [PATCH 0429/1635] Mark Google Assistant integration as system type (#123233) --- homeassistant/components/google_assistant/manifest.json | 1 + homeassistant/generated/integrations.json | 6 ------ 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/homeassistant/components/google_assistant/manifest.json b/homeassistant/components/google_assistant/manifest.json index e36f6a1ca87..a38ea4f7cfb 100644 --- a/homeassistant/components/google_assistant/manifest.json +++ b/homeassistant/components/google_assistant/manifest.json @@ -5,5 +5,6 @@ "codeowners": ["@home-assistant/cloud"], "dependencies": ["http"], "documentation": "https://www.home-assistant.io/integrations/google_assistant", + "integration_type": "system", "iot_class": "cloud_push" } diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 2e68bee3146..69d65fd4bd8 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -2225,12 +2225,6 @@ "google": { "name": "Google", "integrations": { - "google_assistant": { - "integration_type": "hub", - "config_flow": false, - "iot_class": "cloud_push", - "name": "Google Assistant" - }, "google_assistant_sdk": { "integration_type": "service", "config_flow": true, From c1953e938d530f7dd1056ede09cc0ee32942551d Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 6 Aug 2024 13:18:22 +0200 Subject: [PATCH 0430/1635] Mark Alexa integration as system type (#123232) --- homeassistant/components/alexa/manifest.json | 1 + homeassistant/generated/integrations.json | 6 ------ 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/homeassistant/components/alexa/manifest.json b/homeassistant/components/alexa/manifest.json index 84a4e152c1d..de59d28925f 100644 --- a/homeassistant/components/alexa/manifest.json +++ b/homeassistant/components/alexa/manifest.json @@ -5,5 +5,6 @@ "codeowners": ["@home-assistant/cloud", "@ochlocracy", "@jbouwh"], "dependencies": ["http"], "documentation": "https://www.home-assistant.io/integrations/alexa", + "integration_type": "system", "iot_class": "cloud_push" } diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 5107587ab89..73c60c1373e 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -200,12 +200,6 @@ "amazon": { "name": "Amazon", "integrations": { - "alexa": { - "integration_type": "hub", - "config_flow": false, - "iot_class": "cloud_push", - "name": "Amazon Alexa" - }, "amazon_polly": { "integration_type": "hub", "config_flow": false, From 5b2e188b528d2a3ae1bc3b8cb3a03badd028b4d3 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 6 Aug 2024 13:20:32 +0200 Subject: [PATCH 0431/1635] Mark Google Assistant integration as system type (#123233) --- homeassistant/components/google_assistant/manifest.json | 1 + homeassistant/generated/integrations.json | 6 ------ 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/homeassistant/components/google_assistant/manifest.json b/homeassistant/components/google_assistant/manifest.json index e36f6a1ca87..a38ea4f7cfb 100644 --- a/homeassistant/components/google_assistant/manifest.json +++ b/homeassistant/components/google_assistant/manifest.json @@ -5,5 +5,6 @@ "codeowners": ["@home-assistant/cloud"], "dependencies": ["http"], "documentation": "https://www.home-assistant.io/integrations/google_assistant", + "integration_type": "system", "iot_class": "cloud_push" } diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 73c60c1373e..85369b238db 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -2236,12 +2236,6 @@ "google": { "name": "Google", "integrations": { - "google_assistant": { - "integration_type": "hub", - "config_flow": false, - "iot_class": "cloud_push", - "name": "Google Assistant" - }, "google_assistant_sdk": { "integration_type": "service", "config_flow": true, From e9fe98f7f9bf2fd04d29c2145bd4719d06144db1 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 6 Aug 2024 13:22:46 +0200 Subject: [PATCH 0432/1635] Bump version to 2024.8.0b4 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index 981fc42fd36..ed0a44bb7cc 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -24,7 +24,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 8 -PATCH_VERSION: Final = "0b3" +PATCH_VERSION: Final = "0b4" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index c29252b1ced..86fa1c8bbc1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.8.0b3" +version = "2024.8.0b4" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From 7cb94e6392326c495948b6874fbf637a31355d3b Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Tue, 6 Aug 2024 14:01:48 +0200 Subject: [PATCH 0433/1635] Bump uvcclient to 0.12.1 (#123237) --- homeassistant/components/uvc/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/uvc/manifest.json b/homeassistant/components/uvc/manifest.json index 0553eba320a..c72b865b5ef 100644 --- a/homeassistant/components/uvc/manifest.json +++ b/homeassistant/components/uvc/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/uvc", "iot_class": "local_polling", "loggers": ["uvcclient"], - "requirements": ["uvcclient==0.11.1"] + "requirements": ["uvcclient==0.12.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 97419def421..42d6f362e1f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2842,7 +2842,7 @@ upcloud-api==2.5.1 url-normalize==1.4.3 # homeassistant.components.uvc -uvcclient==0.11.1 +uvcclient==0.12.1 # homeassistant.components.roborock vacuum-map-parser-roborock==0.1.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 257ea036d3e..bb61d52b171 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2240,7 +2240,7 @@ upcloud-api==2.5.1 url-normalize==1.4.3 # homeassistant.components.uvc -uvcclient==0.11.1 +uvcclient==0.12.1 # homeassistant.components.roborock vacuum-map-parser-roborock==0.1.2 From 260642345d2a2b1f335846b14fc455bc2c2ed9eb Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Tue, 6 Aug 2024 14:55:14 +0200 Subject: [PATCH 0434/1635] Delete mobile_app cloudhook if not logged into the cloud (#123234) --- .../components/mobile_app/__init__.py | 18 ++++++--- tests/components/mobile_app/test_init.py | 39 +++++++++++++++++++ 2 files changed, 51 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/mobile_app/__init__.py b/homeassistant/components/mobile_app/__init__.py index a8577cc596d..80893e0cbfa 100644 --- a/homeassistant/components/mobile_app/__init__.py +++ b/homeassistant/components/mobile_app/__init__.py @@ -124,12 +124,18 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: ): await async_create_cloud_hook(hass, webhook_id, entry) - if ( - CONF_CLOUDHOOK_URL not in entry.data - and cloud.async_active_subscription(hass) - and cloud.async_is_connected(hass) - ): - await async_create_cloud_hook(hass, webhook_id, entry) + if cloud.async_is_logged_in(hass): + if ( + CONF_CLOUDHOOK_URL not in entry.data + and cloud.async_active_subscription(hass) + and cloud.async_is_connected(hass) + ): + await async_create_cloud_hook(hass, webhook_id, entry) + elif CONF_CLOUDHOOK_URL in entry.data: + # If we have a cloudhook but no longer logged in to the cloud, remove it from the entry + data = dict(entry.data) + data.pop(CONF_CLOUDHOOK_URL) + hass.config_entries.async_update_entry(entry, data=data) entry.async_on_unload(cloud.async_listen_connection_change(hass, manage_cloudhook)) diff --git a/tests/components/mobile_app/test_init.py b/tests/components/mobile_app/test_init.py index 15380a0d8d7..e1c7ed27cf9 100644 --- a/tests/components/mobile_app/test_init.py +++ b/tests/components/mobile_app/test_init.py @@ -89,6 +89,7 @@ async def _test_create_cloud_hook( "homeassistant.components.cloud.async_active_subscription", return_value=async_active_subscription_return_value, ), + patch("homeassistant.components.cloud.async_is_logged_in", return_value=True), patch("homeassistant.components.cloud.async_is_connected", return_value=True), patch( "homeassistant.components.cloud.async_get_or_create_cloudhook", @@ -187,3 +188,41 @@ async def test_create_cloud_hook_after_connection( ) await _test_create_cloud_hook(hass, hass_admin_user, {}, False, additional_steps) + + +@pytest.mark.parametrize( + ("cloud_logged_in", "should_cloudhook_exist"), + [(True, True), (False, False)], +) +async def test_delete_cloud_hook( + hass: HomeAssistant, + hass_admin_user: MockUser, + cloud_logged_in: bool, + should_cloudhook_exist: bool, +) -> None: + """Test deleting the cloud hook only when logged out of the cloud.""" + + config_entry = MockConfigEntry( + data={ + **REGISTER_CLEARTEXT, + CONF_WEBHOOK_ID: "test-webhook-id", + ATTR_DEVICE_NAME: "Test", + ATTR_DEVICE_ID: "Test", + CONF_USER_ID: hass_admin_user.id, + CONF_CLOUDHOOK_URL: "https://hook-url-already-exists", + }, + domain=DOMAIN, + title="Test", + ) + config_entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.cloud.async_is_logged_in", + return_value=cloud_logged_in, + ), + ): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + assert (CONF_CLOUDHOOK_URL in config_entry.data) == should_cloudhook_exist From f94bf51bb53ff66fbb340c33c091067a56a97808 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 6 Aug 2024 14:55:37 +0200 Subject: [PATCH 0435/1635] Remove myself from DSMR codeowners (#123243) --- CODEOWNERS | 4 ++-- homeassistant/components/dsmr/manifest.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CODEOWNERS b/CODEOWNERS index c9b3a53184f..710846a7f42 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -349,8 +349,8 @@ build.json @home-assistant/supervisor /tests/components/dremel_3d_printer/ @tkdrob /homeassistant/components/drop_connect/ @ChandlerSystems @pfrazer /tests/components/drop_connect/ @ChandlerSystems @pfrazer -/homeassistant/components/dsmr/ @Robbie1221 @frenck -/tests/components/dsmr/ @Robbie1221 @frenck +/homeassistant/components/dsmr/ @Robbie1221 +/tests/components/dsmr/ @Robbie1221 /homeassistant/components/dsmr_reader/ @sorted-bits @glodenox @erwindouna /tests/components/dsmr_reader/ @sorted-bits @glodenox @erwindouna /homeassistant/components/duotecno/ @cereal2nd diff --git a/homeassistant/components/dsmr/manifest.json b/homeassistant/components/dsmr/manifest.json index 5490b2a6503..561f06d1bbe 100644 --- a/homeassistant/components/dsmr/manifest.json +++ b/homeassistant/components/dsmr/manifest.json @@ -1,7 +1,7 @@ { "domain": "dsmr", "name": "DSMR Smart Meter", - "codeowners": ["@Robbie1221", "@frenck"], + "codeowners": ["@Robbie1221"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/dsmr", "integration_type": "hub", From fe4e6f24f547f398a03b4ce60670a4c63aae6403 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 6 Aug 2024 08:32:58 -0500 Subject: [PATCH 0436/1635] Fix sense doing blocking I/O in the event loop (#123247) --- homeassistant/components/sense/__init__.py | 12 ++++++++++-- homeassistant/components/sense/config_flow.py | 12 ++++++++++-- 2 files changed, 20 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/sense/__init__.py b/homeassistant/components/sense/__init__.py index 28408c0cb7d..58e993ad6e0 100644 --- a/homeassistant/components/sense/__init__.py +++ b/homeassistant/components/sense/__init__.py @@ -2,6 +2,7 @@ from dataclasses import dataclass from datetime import timedelta +from functools import partial import logging from typing import Any @@ -80,8 +81,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: SenseConfigEntry) -> boo client_session = async_get_clientsession(hass) - gateway = ASyncSenseable( - api_timeout=timeout, wss_timeout=timeout, client_session=client_session + # Creating the AsyncSenseable object loads + # ssl certificates which does blocking IO + gateway = await hass.async_add_executor_job( + partial( + ASyncSenseable, + api_timeout=timeout, + wss_timeout=timeout, + client_session=client_session, + ) ) gateway.rate_limit = ACTIVE_UPDATE_RATE diff --git a/homeassistant/components/sense/config_flow.py b/homeassistant/components/sense/config_flow.py index 25c6898aec8..dab80b99e1a 100644 --- a/homeassistant/components/sense/config_flow.py +++ b/homeassistant/components/sense/config_flow.py @@ -1,6 +1,7 @@ """Config flow for Sense integration.""" from collections.abc import Mapping +from functools import partial import logging from typing import Any @@ -48,8 +49,15 @@ class SenseConfigFlow(ConfigFlow, domain=DOMAIN): timeout = self._auth_data[CONF_TIMEOUT] client_session = async_get_clientsession(self.hass) - self._gateway = ASyncSenseable( - api_timeout=timeout, wss_timeout=timeout, client_session=client_session + # Creating the AsyncSenseable object loads + # ssl certificates which does blocking IO + self._gateway = await self.hass.async_add_executor_job( + partial( + ASyncSenseable, + api_timeout=timeout, + wss_timeout=timeout, + client_session=client_session, + ) ) self._gateway.rate_limit = ACTIVE_UPDATE_RATE await self._gateway.authenticate( From 1eaaa00687e8d86c09c18d4b79f160856d991c2d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 6 Aug 2024 09:00:37 -0500 Subject: [PATCH 0437/1635] Detect blocking ssl context creation in the event loop (#123240) --- homeassistant/block_async_io.py | 19 +++++++++++++++++++ tests/test_block_async_io.py | 24 ++++++++++++++++++++++++ 2 files changed, 43 insertions(+) diff --git a/homeassistant/block_async_io.py b/homeassistant/block_async_io.py index 5b8ba535b5a..6ea0925574e 100644 --- a/homeassistant/block_async_io.py +++ b/homeassistant/block_async_io.py @@ -8,6 +8,7 @@ import glob from http.client import HTTPConnection import importlib import os +from ssl import SSLContext import sys import threading import time @@ -143,6 +144,24 @@ _BLOCKING_CALLS: tuple[BlockingCall, ...] = ( strict_core=False, skip_for_tests=True, ), + BlockingCall( + original_func=SSLContext.load_default_certs, + object=SSLContext, + function="load_default_certs", + check_allowed=None, + strict=False, + strict_core=False, + skip_for_tests=True, + ), + BlockingCall( + original_func=SSLContext.load_verify_locations, + object=SSLContext, + function="load_verify_locations", + check_allowed=None, + strict=False, + strict_core=False, + skip_for_tests=True, + ), ) diff --git a/tests/test_block_async_io.py b/tests/test_block_async_io.py index ef4f9df60f6..78b8711310b 100644 --- a/tests/test_block_async_io.py +++ b/tests/test_block_async_io.py @@ -5,6 +5,7 @@ import glob import importlib import os from pathlib import Path, PurePosixPath +import ssl import time from typing import Any from unittest.mock import Mock, patch @@ -330,6 +331,29 @@ async def test_protect_loop_walk( assert "Detected blocking call to walk with args" not in caplog.text +async def test_protect_loop_load_default_certs( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test SSLContext.load_default_certs calls in the loop are logged.""" + with patch.object(block_async_io, "_IN_TESTS", False): + block_async_io.enable() + context = ssl.create_default_context() + assert "Detected blocking call to load_default_certs" in caplog.text + assert context + + +async def test_protect_loop_load_verify_locations( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test SSLContext.load_verify_locations calls in the loop are logged.""" + with patch.object(block_async_io, "_IN_TESTS", False): + block_async_io.enable() + context = ssl.create_default_context() + with pytest.raises(OSError): + context.load_verify_locations("/dev/null") + assert "Detected blocking call to load_verify_locations" in caplog.text + + async def test_open_calls_ignored_in_tests(caplog: pytest.LogCaptureFixture) -> None: """Test opening a file in tests is ignored.""" assert block_async_io._IN_TESTS From c612cf95a8ac15a43ada3d5cf267f38e4e514bb5 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 6 Aug 2024 16:04:29 +0200 Subject: [PATCH 0438/1635] Mark FFmpeg integration as system type (#123241) --- homeassistant/components/ffmpeg/manifest.json | 1 + homeassistant/generated/integrations.json | 5 ----- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/homeassistant/components/ffmpeg/manifest.json b/homeassistant/components/ffmpeg/manifest.json index 8cd7b1f504d..ab9f3ed65c1 100644 --- a/homeassistant/components/ffmpeg/manifest.json +++ b/homeassistant/components/ffmpeg/manifest.json @@ -3,5 +3,6 @@ "name": "FFmpeg", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/ffmpeg", + "integration_type": "system", "requirements": ["ha-ffmpeg==3.2.0"] } diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 69d65fd4bd8..f59e0883dd4 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -1787,11 +1787,6 @@ "ffmpeg": { "name": "FFmpeg", "integrations": { - "ffmpeg": { - "integration_type": "hub", - "config_flow": false, - "name": "FFmpeg" - }, "ffmpeg_motion": { "integration_type": "hub", "config_flow": false, From 4627a565d32d0b20197923ea0c6fb200cc37e2fc Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Tue, 6 Aug 2024 16:16:22 +0200 Subject: [PATCH 0439/1635] Bump deebot-client to 8.3.0 (#123249) --- homeassistant/components/ecovacs/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/ecovacs/manifest.json b/homeassistant/components/ecovacs/manifest.json index 8838eb4f50a..560ee4d599c 100644 --- a/homeassistant/components/ecovacs/manifest.json +++ b/homeassistant/components/ecovacs/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/ecovacs", "iot_class": "cloud_push", "loggers": ["sleekxmppfs", "sucks", "deebot_client"], - "requirements": ["py-sucks==0.9.10", "deebot-client==8.2.0"] + "requirements": ["py-sucks==0.9.10", "deebot-client==8.3.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 42d6f362e1f..b683866f6d9 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -706,7 +706,7 @@ debugpy==1.8.1 # decora==0.6 # homeassistant.components.ecovacs -deebot-client==8.2.0 +deebot-client==8.3.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns diff --git a/requirements_test_all.txt b/requirements_test_all.txt index bb61d52b171..a55edb474ce 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -599,7 +599,7 @@ dbus-fast==2.22.1 debugpy==1.8.1 # homeassistant.components.ecovacs -deebot-client==8.2.0 +deebot-client==8.3.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns From 9414e6d47213eacf0391a631cab3fda4759afe64 Mon Sep 17 00:00:00 2001 From: Steve Repsher Date: Tue, 6 Aug 2024 10:17:54 -0400 Subject: [PATCH 0440/1635] Adapt static resource handler to aiohttp 3.10 (#123166) --- homeassistant/components/http/static.py | 79 +++++++------------------ tests/components/http/test_static.py | 35 +++-------- 2 files changed, 30 insertions(+), 84 deletions(-) diff --git a/homeassistant/components/http/static.py b/homeassistant/components/http/static.py index a7280fb9b2f..29c5840a4bf 100644 --- a/homeassistant/components/http/static.py +++ b/homeassistant/components/http/static.py @@ -3,81 +3,46 @@ from __future__ import annotations from collections.abc import Mapping -import mimetypes from pathlib import Path from typing import Final -from aiohttp import hdrs +from aiohttp.hdrs import CACHE_CONTROL, CONTENT_TYPE from aiohttp.web import FileResponse, Request, StreamResponse -from aiohttp.web_exceptions import HTTPForbidden, HTTPNotFound +from aiohttp.web_fileresponse import CONTENT_TYPES, FALLBACK_CONTENT_TYPE from aiohttp.web_urldispatcher import StaticResource from lru import LRU -from .const import KEY_HASS - CACHE_TIME: Final = 31 * 86400 # = 1 month CACHE_HEADER = f"public, max-age={CACHE_TIME}" -CACHE_HEADERS: Mapping[str, str] = {hdrs.CACHE_CONTROL: CACHE_HEADER} -PATH_CACHE: LRU[tuple[str, Path], tuple[Path | None, str | None]] = LRU(512) - - -def _get_file_path(rel_url: str, directory: Path) -> Path | None: - """Return the path to file on disk or None.""" - filename = Path(rel_url) - if filename.anchor: - # rel_url is an absolute name like - # /static/\\machine_name\c$ or /static/D:\path - # where the static dir is totally different - raise HTTPForbidden - filepath: Path = directory.joinpath(filename).resolve() - filepath.relative_to(directory) - # on opening a dir, load its contents if allowed - if filepath.is_dir(): - return None - if filepath.is_file(): - return filepath - raise FileNotFoundError +CACHE_HEADERS: Mapping[str, str] = {CACHE_CONTROL: CACHE_HEADER} +RESPONSE_CACHE: LRU[tuple[str, Path], tuple[Path, str]] = LRU(512) class CachingStaticResource(StaticResource): """Static Resource handler that will add cache headers.""" async def _handle(self, request: Request) -> StreamResponse: - """Return requested file from disk as a FileResponse.""" + """Wrap base handler to cache file path resolution and content type guess.""" rel_url = request.match_info["filename"] key = (rel_url, self._directory) - if (filepath_content_type := PATH_CACHE.get(key)) is None: - hass = request.app[KEY_HASS] - try: - filepath = await hass.async_add_executor_job(_get_file_path, *key) - except (ValueError, FileNotFoundError) as error: - # relatively safe - raise HTTPNotFound from error - except HTTPForbidden: - # forbidden - raise - except Exception as error: - # perm error or other kind! - request.app.logger.exception("Unexpected exception") - raise HTTPNotFound from error + response: StreamResponse - content_type: str | None = None - if filepath is not None: - content_type = (mimetypes.guess_type(rel_url))[ - 0 - ] or "application/octet-stream" - PATH_CACHE[key] = (filepath, content_type) + if key in RESPONSE_CACHE: + file_path, content_type = RESPONSE_CACHE[key] + response = FileResponse(file_path, chunk_size=self._chunk_size) + response.headers[CONTENT_TYPE] = content_type else: - filepath, content_type = filepath_content_type - - if filepath and content_type: - return FileResponse( - filepath, - chunk_size=self._chunk_size, - headers={ - hdrs.CACHE_CONTROL: CACHE_HEADER, - hdrs.CONTENT_TYPE: content_type, - }, + response = await super()._handle(request) + if not isinstance(response, FileResponse): + # Must be directory index; ignore caching + return response + file_path = response._path # noqa: SLF001 + response.content_type = ( + CONTENT_TYPES.guess_type(file_path)[0] or FALLBACK_CONTENT_TYPE ) + # Cache actual header after setter construction. + content_type = response.headers[CONTENT_TYPE] + RESPONSE_CACHE[key] = (file_path, content_type) - raise HTTPForbidden if filepath is None else HTTPNotFound + response.headers[CACHE_CONTROL] = CACHE_HEADER + return response diff --git a/tests/components/http/test_static.py b/tests/components/http/test_static.py index 52a5db5daa7..2ac7c6ded93 100644 --- a/tests/components/http/test_static.py +++ b/tests/components/http/test_static.py @@ -4,11 +4,10 @@ from http import HTTPStatus from pathlib import Path from aiohttp.test_utils import TestClient -from aiohttp.web_exceptions import HTTPForbidden import pytest from homeassistant.components.http import StaticPathConfig -from homeassistant.components.http.static import CachingStaticResource, _get_file_path +from homeassistant.components.http.static import CachingStaticResource from homeassistant.const import EVENT_HOMEASSISTANT_START from homeassistant.core import HomeAssistant from homeassistant.helpers.http import KEY_ALLOW_CONFIGURED_CORS @@ -31,37 +30,19 @@ async def mock_http_client(hass: HomeAssistant, aiohttp_client: ClientSessionGen return await aiohttp_client(hass.http.app, server_kwargs={"skip_url_asserts": True}) -@pytest.mark.parametrize( - ("url", "canonical_url"), - [ - ("//a", "//a"), - ("///a", "///a"), - ("/c:\\a\\b", "/c:%5Ca%5Cb"), - ], -) -async def test_static_path_blocks_anchors( - hass: HomeAssistant, - mock_http_client: TestClient, - tmp_path: Path, - url: str, - canonical_url: str, +async def test_static_resource_show_index( + hass: HomeAssistant, mock_http_client: TestClient, tmp_path: Path ) -> None: - """Test static paths block anchors.""" + """Test static resource will return a directory index.""" app = hass.http.app - resource = CachingStaticResource(url, str(tmp_path)) - assert resource.canonical == canonical_url + resource = CachingStaticResource("/", tmp_path, show_index=True) app.router.register_resource(resource) app[KEY_ALLOW_CONFIGURED_CORS](resource) - resp = await mock_http_client.get(canonical_url, allow_redirects=False) - assert resp.status == 403 - - # Tested directly since aiohttp will block it before - # it gets here but we want to make sure if aiohttp ever - # changes we still block it. - with pytest.raises(HTTPForbidden): - _get_file_path(canonical_url, tmp_path) + resp = await mock_http_client.get("/") + assert resp.status == 200 + assert resp.content_type == "text/html" async def test_async_register_static_paths( From 2000db57c8470df49a53df41607cd218c0faf285 Mon Sep 17 00:00:00 2001 From: Yehazkel Date: Tue, 6 Aug 2024 17:21:34 +0300 Subject: [PATCH 0441/1635] Fix Tami4 device name is None (#123156) Co-authored-by: Robert Resch --- homeassistant/components/tami4/config_flow.py | 5 ++- tests/components/tami4/conftest.py | 25 ++++++++++++++ tests/components/tami4/test_config_flow.py | 33 +++++++++++++++++++ 3 files changed, 62 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/tami4/config_flow.py b/homeassistant/components/tami4/config_flow.py index 8c1edbfb60f..0fa05bbebe4 100644 --- a/homeassistant/components/tami4/config_flow.py +++ b/homeassistant/components/tami4/config_flow.py @@ -82,8 +82,11 @@ class Tami4ConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: + device_name = api.device_metadata.name + if device_name is None: + device_name = "Tami4" return self.async_create_entry( - title=api.device_metadata.name, + title=device_name, data={CONF_REFRESH_TOKEN: refresh_token}, ) diff --git a/tests/components/tami4/conftest.py b/tests/components/tami4/conftest.py index 2f4201d9a9e..2b4acac0b3f 100644 --- a/tests/components/tami4/conftest.py +++ b/tests/components/tami4/conftest.py @@ -60,6 +60,31 @@ def mock__get_devices_metadata(request: pytest.FixtureRequest) -> Generator[None yield +@pytest.fixture +def mock__get_devices_metadata_no_name( + request: pytest.FixtureRequest, +) -> Generator[None]: + """Fixture to mock _get_devices which makes a call to the API.""" + + side_effect = getattr(request, "param", None) + + device_metadata = DeviceMetadata( + id=1, + name=None, + connected=True, + psn="psn", + type="type", + device_firmware="v1.1", + ) + + with patch( + "Tami4EdgeAPI.Tami4EdgeAPI.Tami4EdgeAPI._get_devices_metadata", + return_value=[device_metadata], + side_effect=side_effect, + ): + yield + + @pytest.fixture def mock_get_device( request: pytest.FixtureRequest, diff --git a/tests/components/tami4/test_config_flow.py b/tests/components/tami4/test_config_flow.py index 4210c391d70..4dfc27bba94 100644 --- a/tests/components/tami4/test_config_flow.py +++ b/tests/components/tami4/test_config_flow.py @@ -120,6 +120,39 @@ async def test_step_otp_valid( assert "refresh_token" in result["data"] +@pytest.mark.usefixtures( + "mock_setup_entry", + "mock_request_otp", + "mock_submit_otp", + "mock__get_devices_metadata_no_name", +) +async def test_step_otp_valid_device_no_name(hass: HomeAssistant) -> None: + """Test user step with valid phone number.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_PHONE: "+972555555555"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "otp" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"otp": "123456"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Tami4" + assert "refresh_token" in result["data"] + + @pytest.mark.parametrize( ("mock_submit_otp", "expected_error"), [ From f9f3c7fb51ad69340283fb8f9d8af4809c41abb9 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Tue, 6 Aug 2024 16:28:37 +0200 Subject: [PATCH 0442/1635] Bump mficlient to 0.5.0 (#123250) --- homeassistant/components/mfi/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- script/licenses.py | 3 --- 4 files changed, 3 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/mfi/manifest.json b/homeassistant/components/mfi/manifest.json index db9cb547b28..b569009d400 100644 --- a/homeassistant/components/mfi/manifest.json +++ b/homeassistant/components/mfi/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/mfi", "iot_class": "local_polling", "loggers": ["mficlient"], - "requirements": ["mficlient==0.3.0"] + "requirements": ["mficlient==0.5.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index b683866f6d9..b8d8f7ec2a6 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1327,7 +1327,7 @@ meteoalertapi==0.3.0 meteofrance-api==1.3.0 # homeassistant.components.mfi -mficlient==0.3.0 +mficlient==0.5.0 # homeassistant.components.xiaomi_miio micloud==0.5 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a55edb474ce..721e90fd89d 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1096,7 +1096,7 @@ melnor-bluetooth==0.0.25 meteofrance-api==1.3.0 # homeassistant.components.mfi -mficlient==0.3.0 +mficlient==0.5.0 # homeassistant.components.xiaomi_miio micloud==0.5 diff --git a/script/licenses.py b/script/licenses.py index e612b96794c..9bcc7b54540 100644 --- a/script/licenses.py +++ b/script/licenses.py @@ -179,9 +179,6 @@ TODO = { "aiocache": AwesomeVersion( "0.12.2" ), # https://github.com/aio-libs/aiocache/blob/master/LICENSE all rights reserved? - "mficlient": AwesomeVersion( - "0.3.0" - ), # No license https://github.com/kk7ds/mficlient/issues/4 "pyflic": AwesomeVersion("2.0.3"), # No OSI approved license CC0-1.0 Universal) } From abaac519c9ceb8a54bfd2bc9344841b6c0382673 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 6 Aug 2024 17:54:40 +0200 Subject: [PATCH 0443/1635] Update frontend to 20240806.1 (#123252) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index a91feb82461..de423ee9ac6 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240806.0"] + "requirements": ["home-assistant-frontend==20240806.1"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 9622c1acac0..ca37ffbf4a8 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -31,7 +31,7 @@ habluetooth==3.1.3 hass-nabucasa==0.81.1 hassil==1.7.4 home-assistant-bluetooth==1.12.2 -home-assistant-frontend==20240806.0 +home-assistant-frontend==20240806.1 home-assistant-intents==2024.7.29 httpx==0.27.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index b8d8f7ec2a6..e6509060764 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1093,7 +1093,7 @@ hole==0.8.0 holidays==0.53 # homeassistant.components.frontend -home-assistant-frontend==20240806.0 +home-assistant-frontend==20240806.1 # homeassistant.components.conversation home-assistant-intents==2024.7.29 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 721e90fd89d..42219d16de2 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -919,7 +919,7 @@ hole==0.8.0 holidays==0.53 # homeassistant.components.frontend -home-assistant-frontend==20240806.0 +home-assistant-frontend==20240806.1 # homeassistant.components.conversation home-assistant-intents==2024.7.29 From a09d0117b1a225d20f08a8d7d45f1c6443103baa Mon Sep 17 00:00:00 2001 From: Yehazkel Date: Tue, 6 Aug 2024 17:21:34 +0300 Subject: [PATCH 0444/1635] Fix Tami4 device name is None (#123156) Co-authored-by: Robert Resch --- homeassistant/components/tami4/config_flow.py | 5 ++- tests/components/tami4/conftest.py | 25 ++++++++++++++ tests/components/tami4/test_config_flow.py | 33 +++++++++++++++++++ 3 files changed, 62 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/tami4/config_flow.py b/homeassistant/components/tami4/config_flow.py index 8c1edbfb60f..0fa05bbebe4 100644 --- a/homeassistant/components/tami4/config_flow.py +++ b/homeassistant/components/tami4/config_flow.py @@ -82,8 +82,11 @@ class Tami4ConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: + device_name = api.device_metadata.name + if device_name is None: + device_name = "Tami4" return self.async_create_entry( - title=api.device_metadata.name, + title=device_name, data={CONF_REFRESH_TOKEN: refresh_token}, ) diff --git a/tests/components/tami4/conftest.py b/tests/components/tami4/conftest.py index 2f4201d9a9e..2b4acac0b3f 100644 --- a/tests/components/tami4/conftest.py +++ b/tests/components/tami4/conftest.py @@ -60,6 +60,31 @@ def mock__get_devices_metadata(request: pytest.FixtureRequest) -> Generator[None yield +@pytest.fixture +def mock__get_devices_metadata_no_name( + request: pytest.FixtureRequest, +) -> Generator[None]: + """Fixture to mock _get_devices which makes a call to the API.""" + + side_effect = getattr(request, "param", None) + + device_metadata = DeviceMetadata( + id=1, + name=None, + connected=True, + psn="psn", + type="type", + device_firmware="v1.1", + ) + + with patch( + "Tami4EdgeAPI.Tami4EdgeAPI.Tami4EdgeAPI._get_devices_metadata", + return_value=[device_metadata], + side_effect=side_effect, + ): + yield + + @pytest.fixture def mock_get_device( request: pytest.FixtureRequest, diff --git a/tests/components/tami4/test_config_flow.py b/tests/components/tami4/test_config_flow.py index 4210c391d70..4dfc27bba94 100644 --- a/tests/components/tami4/test_config_flow.py +++ b/tests/components/tami4/test_config_flow.py @@ -120,6 +120,39 @@ async def test_step_otp_valid( assert "refresh_token" in result["data"] +@pytest.mark.usefixtures( + "mock_setup_entry", + "mock_request_otp", + "mock_submit_otp", + "mock__get_devices_metadata_no_name", +) +async def test_step_otp_valid_device_no_name(hass: HomeAssistant) -> None: + """Test user step with valid phone number.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_PHONE: "+972555555555"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "otp" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={"otp": "123456"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Tami4" + assert "refresh_token" in result["data"] + + @pytest.mark.parametrize( ("mock_submit_otp", "expected_error"), [ From 35a6679ae9350351da3497862b95f08d5c3bfec6 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Tue, 6 Aug 2024 14:55:14 +0200 Subject: [PATCH 0445/1635] Delete mobile_app cloudhook if not logged into the cloud (#123234) --- .../components/mobile_app/__init__.py | 18 ++++++--- tests/components/mobile_app/test_init.py | 39 +++++++++++++++++++ 2 files changed, 51 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/mobile_app/__init__.py b/homeassistant/components/mobile_app/__init__.py index a8577cc596d..80893e0cbfa 100644 --- a/homeassistant/components/mobile_app/__init__.py +++ b/homeassistant/components/mobile_app/__init__.py @@ -124,12 +124,18 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: ): await async_create_cloud_hook(hass, webhook_id, entry) - if ( - CONF_CLOUDHOOK_URL not in entry.data - and cloud.async_active_subscription(hass) - and cloud.async_is_connected(hass) - ): - await async_create_cloud_hook(hass, webhook_id, entry) + if cloud.async_is_logged_in(hass): + if ( + CONF_CLOUDHOOK_URL not in entry.data + and cloud.async_active_subscription(hass) + and cloud.async_is_connected(hass) + ): + await async_create_cloud_hook(hass, webhook_id, entry) + elif CONF_CLOUDHOOK_URL in entry.data: + # If we have a cloudhook but no longer logged in to the cloud, remove it from the entry + data = dict(entry.data) + data.pop(CONF_CLOUDHOOK_URL) + hass.config_entries.async_update_entry(entry, data=data) entry.async_on_unload(cloud.async_listen_connection_change(hass, manage_cloudhook)) diff --git a/tests/components/mobile_app/test_init.py b/tests/components/mobile_app/test_init.py index 15380a0d8d7..e1c7ed27cf9 100644 --- a/tests/components/mobile_app/test_init.py +++ b/tests/components/mobile_app/test_init.py @@ -89,6 +89,7 @@ async def _test_create_cloud_hook( "homeassistant.components.cloud.async_active_subscription", return_value=async_active_subscription_return_value, ), + patch("homeassistant.components.cloud.async_is_logged_in", return_value=True), patch("homeassistant.components.cloud.async_is_connected", return_value=True), patch( "homeassistant.components.cloud.async_get_or_create_cloudhook", @@ -187,3 +188,41 @@ async def test_create_cloud_hook_after_connection( ) await _test_create_cloud_hook(hass, hass_admin_user, {}, False, additional_steps) + + +@pytest.mark.parametrize( + ("cloud_logged_in", "should_cloudhook_exist"), + [(True, True), (False, False)], +) +async def test_delete_cloud_hook( + hass: HomeAssistant, + hass_admin_user: MockUser, + cloud_logged_in: bool, + should_cloudhook_exist: bool, +) -> None: + """Test deleting the cloud hook only when logged out of the cloud.""" + + config_entry = MockConfigEntry( + data={ + **REGISTER_CLEARTEXT, + CONF_WEBHOOK_ID: "test-webhook-id", + ATTR_DEVICE_NAME: "Test", + ATTR_DEVICE_ID: "Test", + CONF_USER_ID: hass_admin_user.id, + CONF_CLOUDHOOK_URL: "https://hook-url-already-exists", + }, + domain=DOMAIN, + title="Test", + ) + config_entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.cloud.async_is_logged_in", + return_value=cloud_logged_in, + ), + ): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + assert (CONF_CLOUDHOOK_URL in config_entry.data) == should_cloudhook_exist From 870bb7efd4e52953ecd8d56184a152341ebd9cae Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 6 Aug 2024 16:04:29 +0200 Subject: [PATCH 0446/1635] Mark FFmpeg integration as system type (#123241) --- homeassistant/components/ffmpeg/manifest.json | 1 + homeassistant/generated/integrations.json | 5 ----- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/homeassistant/components/ffmpeg/manifest.json b/homeassistant/components/ffmpeg/manifest.json index 8cd7b1f504d..ab9f3ed65c1 100644 --- a/homeassistant/components/ffmpeg/manifest.json +++ b/homeassistant/components/ffmpeg/manifest.json @@ -3,5 +3,6 @@ "name": "FFmpeg", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/ffmpeg", + "integration_type": "system", "requirements": ["ha-ffmpeg==3.2.0"] } diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 85369b238db..13009fb58be 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -1798,11 +1798,6 @@ "ffmpeg": { "name": "FFmpeg", "integrations": { - "ffmpeg": { - "integration_type": "hub", - "config_flow": false, - "name": "FFmpeg" - }, "ffmpeg_motion": { "integration_type": "hub", "config_flow": false, From 7aae9d9ad3e405cf233fef401fc473ca94844bc8 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 6 Aug 2024 08:32:58 -0500 Subject: [PATCH 0447/1635] Fix sense doing blocking I/O in the event loop (#123247) --- homeassistant/components/sense/__init__.py | 12 ++++++++++-- homeassistant/components/sense/config_flow.py | 12 ++++++++++-- 2 files changed, 20 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/sense/__init__.py b/homeassistant/components/sense/__init__.py index 28408c0cb7d..58e993ad6e0 100644 --- a/homeassistant/components/sense/__init__.py +++ b/homeassistant/components/sense/__init__.py @@ -2,6 +2,7 @@ from dataclasses import dataclass from datetime import timedelta +from functools import partial import logging from typing import Any @@ -80,8 +81,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: SenseConfigEntry) -> boo client_session = async_get_clientsession(hass) - gateway = ASyncSenseable( - api_timeout=timeout, wss_timeout=timeout, client_session=client_session + # Creating the AsyncSenseable object loads + # ssl certificates which does blocking IO + gateway = await hass.async_add_executor_job( + partial( + ASyncSenseable, + api_timeout=timeout, + wss_timeout=timeout, + client_session=client_session, + ) ) gateway.rate_limit = ACTIVE_UPDATE_RATE diff --git a/homeassistant/components/sense/config_flow.py b/homeassistant/components/sense/config_flow.py index 25c6898aec8..dab80b99e1a 100644 --- a/homeassistant/components/sense/config_flow.py +++ b/homeassistant/components/sense/config_flow.py @@ -1,6 +1,7 @@ """Config flow for Sense integration.""" from collections.abc import Mapping +from functools import partial import logging from typing import Any @@ -48,8 +49,15 @@ class SenseConfigFlow(ConfigFlow, domain=DOMAIN): timeout = self._auth_data[CONF_TIMEOUT] client_session = async_get_clientsession(self.hass) - self._gateway = ASyncSenseable( - api_timeout=timeout, wss_timeout=timeout, client_session=client_session + # Creating the AsyncSenseable object loads + # ssl certificates which does blocking IO + self._gateway = await self.hass.async_add_executor_job( + partial( + ASyncSenseable, + api_timeout=timeout, + wss_timeout=timeout, + client_session=client_session, + ) ) self._gateway.rate_limit = ACTIVE_UPDATE_RATE await self._gateway.authenticate( From 3d0a0cf376e880528aba6532503c02b853b95738 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Tue, 6 Aug 2024 16:16:22 +0200 Subject: [PATCH 0448/1635] Bump deebot-client to 8.3.0 (#123249) --- homeassistant/components/ecovacs/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/ecovacs/manifest.json b/homeassistant/components/ecovacs/manifest.json index 8838eb4f50a..560ee4d599c 100644 --- a/homeassistant/components/ecovacs/manifest.json +++ b/homeassistant/components/ecovacs/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/ecovacs", "iot_class": "cloud_push", "loggers": ["sleekxmppfs", "sucks", "deebot_client"], - "requirements": ["py-sucks==0.9.10", "deebot-client==8.2.0"] + "requirements": ["py-sucks==0.9.10", "deebot-client==8.3.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 4614d760a84..64a93d91eff 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -706,7 +706,7 @@ debugpy==1.8.1 # decora==0.6 # homeassistant.components.ecovacs -deebot-client==8.2.0 +deebot-client==8.3.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns diff --git a/requirements_test_all.txt b/requirements_test_all.txt index e9f7b37122c..d6f0efa56ff 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -599,7 +599,7 @@ dbus-fast==2.22.1 debugpy==1.8.1 # homeassistant.components.ecovacs -deebot-client==8.2.0 +deebot-client==8.3.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns From 3cf3780587be1e8a287507375de07b421dff31df Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Tue, 6 Aug 2024 16:28:37 +0200 Subject: [PATCH 0449/1635] Bump mficlient to 0.5.0 (#123250) --- homeassistant/components/mfi/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- script/licenses.py | 3 --- 4 files changed, 3 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/mfi/manifest.json b/homeassistant/components/mfi/manifest.json index db9cb547b28..b569009d400 100644 --- a/homeassistant/components/mfi/manifest.json +++ b/homeassistant/components/mfi/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/mfi", "iot_class": "local_polling", "loggers": ["mficlient"], - "requirements": ["mficlient==0.3.0"] + "requirements": ["mficlient==0.5.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 64a93d91eff..a8670fad05e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1327,7 +1327,7 @@ meteoalertapi==0.3.0 meteofrance-api==1.3.0 # homeassistant.components.mfi -mficlient==0.3.0 +mficlient==0.5.0 # homeassistant.components.xiaomi_miio micloud==0.5 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index d6f0efa56ff..310e2cc9431 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1093,7 +1093,7 @@ melnor-bluetooth==0.0.25 meteofrance-api==1.3.0 # homeassistant.components.mfi -mficlient==0.3.0 +mficlient==0.5.0 # homeassistant.components.xiaomi_miio micloud==0.5 diff --git a/script/licenses.py b/script/licenses.py index 3b9ec389b08..dc89cdad9a9 100644 --- a/script/licenses.py +++ b/script/licenses.py @@ -182,9 +182,6 @@ TODO = { "asterisk_mbox": AwesomeVersion( "0.5.0" ), # No license, integration is deprecated and scheduled for removal in 2024.9.0 - "mficlient": AwesomeVersion( - "0.3.0" - ), # No license https://github.com/kk7ds/mficlient/issues/4 "pyflic": AwesomeVersion("2.0.3"), # No OSI approved license CC0-1.0 Universal) "uvcclient": AwesomeVersion( "0.11.0" From a243ed5b238deff82f84d918f591066e5f13f3d1 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 6 Aug 2024 17:54:40 +0200 Subject: [PATCH 0450/1635] Update frontend to 20240806.1 (#123252) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index a91feb82461..de423ee9ac6 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240806.0"] + "requirements": ["home-assistant-frontend==20240806.1"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 96cf6d7624c..13fbcabf0d1 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -31,7 +31,7 @@ habluetooth==3.1.3 hass-nabucasa==0.81.1 hassil==1.7.4 home-assistant-bluetooth==1.12.2 -home-assistant-frontend==20240806.0 +home-assistant-frontend==20240806.1 home-assistant-intents==2024.7.29 httpx==0.27.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index a8670fad05e..d5658b2903e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1093,7 +1093,7 @@ hole==0.8.0 holidays==0.53 # homeassistant.components.frontend -home-assistant-frontend==20240806.0 +home-assistant-frontend==20240806.1 # homeassistant.components.conversation home-assistant-intents==2024.7.29 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 310e2cc9431..b37af69bfc5 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -916,7 +916,7 @@ hole==0.8.0 holidays==0.53 # homeassistant.components.frontend -home-assistant-frontend==20240806.0 +home-assistant-frontend==20240806.1 # homeassistant.components.conversation home-assistant-intents==2024.7.29 From b636096ac308d8739f8549af7d87dbd63a479959 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Tue, 6 Aug 2024 18:08:19 +0200 Subject: [PATCH 0451/1635] Bump version to 2024.8.0b5 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index ed0a44bb7cc..dd181d74655 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -24,7 +24,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 8 -PATCH_VERSION: Final = "0b4" +PATCH_VERSION: Final = "0b5" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index 86fa1c8bbc1..1831a46a0af 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.8.0b4" +version = "2024.8.0b5" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From f4db9e09c86a1980cc67d4744181ba373dd61fe1 Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Wed, 7 Aug 2024 00:16:57 +0200 Subject: [PATCH 0452/1635] Bump reolink-aio to 0.9.7 (#123263) --- homeassistant/components/reolink/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/reolink/test_select.py | 12 ++++++------ 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/reolink/manifest.json b/homeassistant/components/reolink/manifest.json index 7289dac682c..9671a4b4fc1 100644 --- a/homeassistant/components/reolink/manifest.json +++ b/homeassistant/components/reolink/manifest.json @@ -18,5 +18,5 @@ "documentation": "https://www.home-assistant.io/integrations/reolink", "iot_class": "local_push", "loggers": ["reolink_aio"], - "requirements": ["reolink-aio==0.9.6"] + "requirements": ["reolink-aio==0.9.7"] } diff --git a/requirements_all.txt b/requirements_all.txt index e6509060764..7914459aa4b 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2483,7 +2483,7 @@ renault-api==0.2.5 renson-endura-delta==1.7.1 # homeassistant.components.reolink -reolink-aio==0.9.6 +reolink-aio==0.9.7 # homeassistant.components.idteck_prox rfk101py==0.0.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 42219d16de2..5e5da74232b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1968,7 +1968,7 @@ renault-api==0.2.5 renson-endura-delta==1.7.1 # homeassistant.components.reolink -reolink-aio==0.9.6 +reolink-aio==0.9.7 # homeassistant.components.rflink rflink==0.0.66 diff --git a/tests/components/reolink/test_select.py b/tests/components/reolink/test_select.py index 908c06dc16f..53c1e494b3d 100644 --- a/tests/components/reolink/test_select.py +++ b/tests/components/reolink/test_select.py @@ -110,15 +110,15 @@ async def test_chime_select( host=reolink_connect, dev_id=12345678, channel=0, - name="Test chime", - event_info={ - "md": {"switch": 0, "musicId": 0}, - "people": {"switch": 0, "musicId": 1}, - "visitor": {"switch": 1, "musicId": 2}, - }, ) + TEST_CHIME.name = "Test chime" TEST_CHIME.volume = 3 TEST_CHIME.led_state = True + TEST_CHIME.event_info = { + "md": {"switch": 0, "musicId": 0}, + "people": {"switch": 0, "musicId": 1}, + "visitor": {"switch": 1, "musicId": 2}, + } reolink_connect.chime_list = [TEST_CHIME] From 78154f5daf37f186538048585d79b529b85eb0e2 Mon Sep 17 00:00:00 2001 From: puddly <32534428+puddly@users.noreply.github.com> Date: Tue, 6 Aug 2024 21:46:25 -0400 Subject: [PATCH 0453/1635] Bump ZHA to 0.0.28 (#123259) * Bump ZHA to 0.0.28 * Drop redundant radio schema conversion --- homeassistant/components/zha/manifest.json | 2 +- homeassistant/components/zha/radio_manager.py | 1 - requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 3 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index d7dc53b5167..4a597b0233c 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -21,7 +21,7 @@ "zha", "universal_silabs_flasher" ], - "requirements": ["universal-silabs-flasher==0.0.22", "zha==0.0.27"], + "requirements": ["universal-silabs-flasher==0.0.22", "zha==0.0.28"], "usb": [ { "vid": "10C4", diff --git a/homeassistant/components/zha/radio_manager.py b/homeassistant/components/zha/radio_manager.py index 2b7a65f4997..82c30b7678a 100644 --- a/homeassistant/components/zha/radio_manager.py +++ b/homeassistant/components/zha/radio_manager.py @@ -178,7 +178,6 @@ class ZhaRadioManager: app_config[CONF_DEVICE] = self.device_settings app_config[CONF_NWK_BACKUP_ENABLED] = False app_config[CONF_USE_THREAD] = False - app_config = self.radio_type.controller.SCHEMA(app_config) app = await self.radio_type.controller.new( app_config, auto_form=False, start_radio=False diff --git a/requirements_all.txt b/requirements_all.txt index 7914459aa4b..e054715fddc 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2986,7 +2986,7 @@ zeroconf==0.132.2 zeversolar==0.3.1 # homeassistant.components.zha -zha==0.0.27 +zha==0.0.28 # homeassistant.components.zhong_hong zhong-hong-hvac==1.0.12 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5e5da74232b..125d583c263 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2363,7 +2363,7 @@ zeroconf==0.132.2 zeversolar==0.3.1 # homeassistant.components.zha -zha==0.0.27 +zha==0.0.28 # homeassistant.components.zwave_js zwave-js-server-python==0.57.0 From 89337091b2e7b1d91902335de728fd6485ea1fc5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 7 Aug 2024 09:04:48 +0200 Subject: [PATCH 0454/1635] Bump github/codeql-action from 3.25.15 to 3.26.0 (#123273) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/codeql.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 7fe545e469c..68673455e1f 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -24,11 +24,11 @@ jobs: uses: actions/checkout@v4.1.7 - name: Initialize CodeQL - uses: github/codeql-action/init@v3.25.15 + uses: github/codeql-action/init@v3.26.0 with: languages: python - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3.25.15 + uses: github/codeql-action/analyze@v3.26.0 with: category: "/language:python" From 95f92ababf1b63312f64ad00a1d3e52f7b412a7d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 7 Aug 2024 09:05:23 +0200 Subject: [PATCH 0455/1635] Bump actions/upload-artifact from 4.3.5 to 4.3.6 (#123272) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/builder.yml | 2 +- .github/workflows/ci.yaml | 20 ++++++++++---------- .github/workflows/wheels.yml | 6 +++--- 3 files changed, 14 insertions(+), 14 deletions(-) diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index 7e3597e7289..5cb860a98ef 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -69,7 +69,7 @@ jobs: run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T - - name: Upload translations - uses: actions/upload-artifact@v4.3.5 + uses: actions/upload-artifact@v4.3.6 with: name: translations path: translations.tar.gz diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 8dfdb2fd7af..3df1bf5b6aa 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -624,7 +624,7 @@ jobs: . venv/bin/activate pip-licenses --format=json --output-file=licenses.json - name: Upload licenses - uses: actions/upload-artifact@v4.3.5 + uses: actions/upload-artifact@v4.3.6 with: name: licenses path: licenses.json @@ -834,7 +834,7 @@ jobs: . venv/bin/activate python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests - name: Upload pytest_buckets - uses: actions/upload-artifact@v4.3.5 + uses: actions/upload-artifact@v4.3.6 with: name: pytest_buckets path: pytest_buckets.txt @@ -935,14 +935,14 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-full.conclusion == 'failure' - uses: actions/upload-artifact@v4.3.5 + uses: actions/upload-artifact@v4.3.6 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }} path: pytest-*.txt overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.3.5 + uses: actions/upload-artifact@v4.3.6 with: name: coverage-${{ matrix.python-version }}-${{ matrix.group }} path: coverage.xml @@ -1061,7 +1061,7 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.3.5 + uses: actions/upload-artifact@v4.3.6 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.mariadb }} @@ -1069,7 +1069,7 @@ jobs: overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.3.5 + uses: actions/upload-artifact@v4.3.6 with: name: coverage-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.mariadb }} @@ -1188,7 +1188,7 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.3.5 + uses: actions/upload-artifact@v4.3.6 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.postgresql }} @@ -1196,7 +1196,7 @@ jobs: overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.3.5 + uses: actions/upload-artifact@v4.3.6 with: name: coverage-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.postgresql }} @@ -1330,14 +1330,14 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.3.5 + uses: actions/upload-artifact@v4.3.6 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }} path: pytest-*.txt overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.3.5 + uses: actions/upload-artifact@v4.3.6 with: name: coverage-${{ matrix.python-version }}-${{ matrix.group }} path: coverage.xml diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 6c9cb07a180..694208d30ac 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -82,14 +82,14 @@ jobs: ) > .env_file - name: Upload env_file - uses: actions/upload-artifact@v4.3.5 + uses: actions/upload-artifact@v4.3.6 with: name: env_file path: ./.env_file overwrite: true - name: Upload requirements_diff - uses: actions/upload-artifact@v4.3.5 + uses: actions/upload-artifact@v4.3.6 with: name: requirements_diff path: ./requirements_diff.txt @@ -101,7 +101,7 @@ jobs: python -m script.gen_requirements_all ci - name: Upload requirements_all_wheels - uses: actions/upload-artifact@v4.3.5 + uses: actions/upload-artifact@v4.3.6 with: name: requirements_all_wheels path: ./requirements_all_wheels_*.txt From 7ec0b8b331ec8d55baac0ef5c84cd8a977829071 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Wed, 7 Aug 2024 09:12:20 +0200 Subject: [PATCH 0456/1635] Update knx-frontend to 2024.8.6.211307 (#123261) --- homeassistant/components/knx/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/knx/manifest.json b/homeassistant/components/knx/manifest.json index 37206df4c83..62364f641f4 100644 --- a/homeassistant/components/knx/manifest.json +++ b/homeassistant/components/knx/manifest.json @@ -13,7 +13,7 @@ "requirements": [ "xknx==3.0.0", "xknxproject==3.7.1", - "knx-frontend==2024.8.6.85349" + "knx-frontend==2024.8.6.211307" ], "single_config_entry": true } diff --git a/requirements_all.txt b/requirements_all.txt index e054715fddc..daf614f2a3a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1216,7 +1216,7 @@ kiwiki-client==0.1.1 knocki==0.3.1 # homeassistant.components.knx -knx-frontend==2024.8.6.85349 +knx-frontend==2024.8.6.211307 # homeassistant.components.konnected konnected==1.2.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 125d583c263..78ca83b9de6 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1015,7 +1015,7 @@ kegtron-ble==0.4.0 knocki==0.3.1 # homeassistant.components.knx -knx-frontend==2024.8.6.85349 +knx-frontend==2024.8.6.211307 # homeassistant.components.konnected konnected==1.2.0 From 185b6e5908f18f67b37ed000e1fd654ea2cffbfb Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 7 Aug 2024 02:13:23 -0500 Subject: [PATCH 0457/1635] Allow non-admins to subscribe to newer registry update events (#123267) --- homeassistant/auth/permissions/events.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/homeassistant/auth/permissions/events.py b/homeassistant/auth/permissions/events.py index 9f2fb45f9f0..cb0506769bf 100644 --- a/homeassistant/auth/permissions/events.py +++ b/homeassistant/auth/permissions/events.py @@ -18,9 +18,12 @@ from homeassistant.const import ( EVENT_THEMES_UPDATED, ) from homeassistant.helpers.area_registry import EVENT_AREA_REGISTRY_UPDATED +from homeassistant.helpers.category_registry import EVENT_CATEGORY_REGISTRY_UPDATED from homeassistant.helpers.device_registry import EVENT_DEVICE_REGISTRY_UPDATED from homeassistant.helpers.entity_registry import EVENT_ENTITY_REGISTRY_UPDATED +from homeassistant.helpers.floor_registry import EVENT_FLOOR_REGISTRY_UPDATED from homeassistant.helpers.issue_registry import EVENT_REPAIRS_ISSUE_REGISTRY_UPDATED +from homeassistant.helpers.label_registry import EVENT_LABEL_REGISTRY_UPDATED from homeassistant.util.event_type import EventType # These are events that do not contain any sensitive data @@ -41,4 +44,7 @@ SUBSCRIBE_ALLOWLIST: Final[set[EventType[Any] | str]] = { EVENT_SHOPPING_LIST_UPDATED, EVENT_STATE_CHANGED, EVENT_THEMES_UPDATED, + EVENT_LABEL_REGISTRY_UPDATED, + EVENT_CATEGORY_REGISTRY_UPDATED, + EVENT_FLOOR_REGISTRY_UPDATED, } From be7f3ca439e5854234ad38a4bfd9e81d72020c7e Mon Sep 17 00:00:00 2001 From: Terence Honles Date: Wed, 7 Aug 2024 10:10:18 +0200 Subject: [PATCH 0458/1635] remove unneeded type attributes on WebsocketNotification (#123238) --- .../components/bang_olufsen/const.py | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/bang_olufsen/const.py b/homeassistant/components/bang_olufsen/const.py index 657bedcf4d7..748b4baf621 100644 --- a/homeassistant/components/bang_olufsen/const.py +++ b/homeassistant/components/bang_olufsen/const.py @@ -68,20 +68,20 @@ class BangOlufsenModel(StrEnum): class WebsocketNotification(StrEnum): """Enum for WebSocket notification types.""" - PLAYBACK_ERROR: Final[str] = "playback_error" - PLAYBACK_METADATA: Final[str] = "playback_metadata" - PLAYBACK_PROGRESS: Final[str] = "playback_progress" - PLAYBACK_SOURCE: Final[str] = "playback_source" - PLAYBACK_STATE: Final[str] = "playback_state" - SOFTWARE_UPDATE_STATE: Final[str] = "software_update_state" - SOURCE_CHANGE: Final[str] = "source_change" - VOLUME: Final[str] = "volume" + PLAYBACK_ERROR = "playback_error" + PLAYBACK_METADATA = "playback_metadata" + PLAYBACK_PROGRESS = "playback_progress" + PLAYBACK_SOURCE = "playback_source" + PLAYBACK_STATE = "playback_state" + SOFTWARE_UPDATE_STATE = "software_update_state" + SOURCE_CHANGE = "source_change" + VOLUME = "volume" # Sub-notifications - NOTIFICATION: Final[str] = "notification" - REMOTE_MENU_CHANGED: Final[str] = "remoteMenuChanged" + NOTIFICATION = "notification" + REMOTE_MENU_CHANGED = "remoteMenuChanged" - ALL: Final[str] = "all" + ALL = "all" DOMAIN: Final[str] = "bang_olufsen" From 27b9965b10093d2132cace438b44d8a256fa862d Mon Sep 17 00:00:00 2001 From: tronikos Date: Wed, 7 Aug 2024 01:16:07 -0700 Subject: [PATCH 0459/1635] Fix Google Cloud TTS not respecting config values (#123275) --- .../components/google_cloud/helpers.py | 22 ++++++++++++------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/google_cloud/helpers.py b/homeassistant/components/google_cloud/helpers.py index 66dfbcf01eb..8ae6a456a4f 100644 --- a/homeassistant/components/google_cloud/helpers.py +++ b/homeassistant/components/google_cloud/helpers.py @@ -59,7 +59,10 @@ def tts_options_schema( vol.Optional( CONF_GENDER, description={"suggested_value": config_options.get(CONF_GENDER)}, - default=texttospeech.SsmlVoiceGender.NEUTRAL.name, # type: ignore[attr-defined] + default=config_options.get( + CONF_GENDER, + texttospeech.SsmlVoiceGender.NEUTRAL.name, # type: ignore[attr-defined] + ), ): vol.All( vol.Upper, SelectSelector( @@ -72,7 +75,7 @@ def tts_options_schema( vol.Optional( CONF_VOICE, description={"suggested_value": config_options.get(CONF_VOICE)}, - default=DEFAULT_VOICE, + default=config_options.get(CONF_VOICE, DEFAULT_VOICE), ): SelectSelector( SelectSelectorConfig( mode=SelectSelectorMode.DROPDOWN, @@ -82,7 +85,10 @@ def tts_options_schema( vol.Optional( CONF_ENCODING, description={"suggested_value": config_options.get(CONF_ENCODING)}, - default=texttospeech.AudioEncoding.MP3.name, # type: ignore[attr-defined] + default=config_options.get( + CONF_ENCODING, + texttospeech.AudioEncoding.MP3.name, # type: ignore[attr-defined] + ), ): vol.All( vol.Upper, SelectSelector( @@ -95,22 +101,22 @@ def tts_options_schema( vol.Optional( CONF_SPEED, description={"suggested_value": config_options.get(CONF_SPEED)}, - default=1.0, + default=config_options.get(CONF_SPEED, 1.0), ): NumberSelector(NumberSelectorConfig(min=0.25, max=4.0, step=0.01)), vol.Optional( CONF_PITCH, description={"suggested_value": config_options.get(CONF_PITCH)}, - default=0, + default=config_options.get(CONF_PITCH, 0), ): NumberSelector(NumberSelectorConfig(min=-20.0, max=20.0, step=0.1)), vol.Optional( CONF_GAIN, description={"suggested_value": config_options.get(CONF_GAIN)}, - default=0, + default=config_options.get(CONF_GAIN, 0), ): NumberSelector(NumberSelectorConfig(min=-96.0, max=16.0, step=0.1)), vol.Optional( CONF_PROFILES, description={"suggested_value": config_options.get(CONF_PROFILES)}, - default=[], + default=config_options.get(CONF_PROFILES, []), ): SelectSelector( SelectSelectorConfig( mode=SelectSelectorMode.DROPDOWN, @@ -132,7 +138,7 @@ def tts_options_schema( vol.Optional( CONF_TEXT_TYPE, description={"suggested_value": config_options.get(CONF_TEXT_TYPE)}, - default="text", + default=config_options.get(CONF_TEXT_TYPE, "text"), ): vol.All( vol.Lower, SelectSelector( From bf28419851bfd5e926721902b306d5caf779ac5a Mon Sep 17 00:00:00 2001 From: Jesse Hills <3060199+jesserockz@users.noreply.github.com> Date: Wed, 7 Aug 2024 20:17:01 +1200 Subject: [PATCH 0460/1635] Update ESPHome voice assistant pipeline log warning (#123269) --- homeassistant/components/esphome/manager.py | 2 +- tests/components/esphome/test_manager.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/esphome/manager.py b/homeassistant/components/esphome/manager.py index ef2a6862f10..4b4537d147f 100644 --- a/homeassistant/components/esphome/manager.py +++ b/homeassistant/components/esphome/manager.py @@ -346,7 +346,7 @@ class ESPHomeManager: ) -> int | None: """Start a voice assistant pipeline.""" if self.voice_assistant_pipeline is not None: - _LOGGER.warning("Voice assistant UDP server was not stopped") + _LOGGER.warning("Previous Voice assistant pipeline was not stopped") self.voice_assistant_pipeline.stop() self.voice_assistant_pipeline = None diff --git a/tests/components/esphome/test_manager.py b/tests/components/esphome/test_manager.py index 651c52cd083..9d2a906466e 100644 --- a/tests/components/esphome/test_manager.py +++ b/tests/components/esphome/test_manager.py @@ -1229,7 +1229,7 @@ async def test_manager_voice_assistant_handlers_api( "", 0, None, None ) - assert "Voice assistant UDP server was not stopped" in caplog.text + assert "Previous Voice assistant pipeline was not stopped" in caplog.text await device.mock_voice_assistant_handle_audio(bytes(_ONE_SECOND)) From 0270026f7cb6c76841deba32afda55798ee9dc3a Mon Sep 17 00:00:00 2001 From: Steve Repsher Date: Tue, 6 Aug 2024 10:17:54 -0400 Subject: [PATCH 0461/1635] Adapt static resource handler to aiohttp 3.10 (#123166) --- homeassistant/components/http/static.py | 79 +++++++------------------ tests/components/http/test_static.py | 35 +++-------- 2 files changed, 30 insertions(+), 84 deletions(-) diff --git a/homeassistant/components/http/static.py b/homeassistant/components/http/static.py index a7280fb9b2f..29c5840a4bf 100644 --- a/homeassistant/components/http/static.py +++ b/homeassistant/components/http/static.py @@ -3,81 +3,46 @@ from __future__ import annotations from collections.abc import Mapping -import mimetypes from pathlib import Path from typing import Final -from aiohttp import hdrs +from aiohttp.hdrs import CACHE_CONTROL, CONTENT_TYPE from aiohttp.web import FileResponse, Request, StreamResponse -from aiohttp.web_exceptions import HTTPForbidden, HTTPNotFound +from aiohttp.web_fileresponse import CONTENT_TYPES, FALLBACK_CONTENT_TYPE from aiohttp.web_urldispatcher import StaticResource from lru import LRU -from .const import KEY_HASS - CACHE_TIME: Final = 31 * 86400 # = 1 month CACHE_HEADER = f"public, max-age={CACHE_TIME}" -CACHE_HEADERS: Mapping[str, str] = {hdrs.CACHE_CONTROL: CACHE_HEADER} -PATH_CACHE: LRU[tuple[str, Path], tuple[Path | None, str | None]] = LRU(512) - - -def _get_file_path(rel_url: str, directory: Path) -> Path | None: - """Return the path to file on disk or None.""" - filename = Path(rel_url) - if filename.anchor: - # rel_url is an absolute name like - # /static/\\machine_name\c$ or /static/D:\path - # where the static dir is totally different - raise HTTPForbidden - filepath: Path = directory.joinpath(filename).resolve() - filepath.relative_to(directory) - # on opening a dir, load its contents if allowed - if filepath.is_dir(): - return None - if filepath.is_file(): - return filepath - raise FileNotFoundError +CACHE_HEADERS: Mapping[str, str] = {CACHE_CONTROL: CACHE_HEADER} +RESPONSE_CACHE: LRU[tuple[str, Path], tuple[Path, str]] = LRU(512) class CachingStaticResource(StaticResource): """Static Resource handler that will add cache headers.""" async def _handle(self, request: Request) -> StreamResponse: - """Return requested file from disk as a FileResponse.""" + """Wrap base handler to cache file path resolution and content type guess.""" rel_url = request.match_info["filename"] key = (rel_url, self._directory) - if (filepath_content_type := PATH_CACHE.get(key)) is None: - hass = request.app[KEY_HASS] - try: - filepath = await hass.async_add_executor_job(_get_file_path, *key) - except (ValueError, FileNotFoundError) as error: - # relatively safe - raise HTTPNotFound from error - except HTTPForbidden: - # forbidden - raise - except Exception as error: - # perm error or other kind! - request.app.logger.exception("Unexpected exception") - raise HTTPNotFound from error + response: StreamResponse - content_type: str | None = None - if filepath is not None: - content_type = (mimetypes.guess_type(rel_url))[ - 0 - ] or "application/octet-stream" - PATH_CACHE[key] = (filepath, content_type) + if key in RESPONSE_CACHE: + file_path, content_type = RESPONSE_CACHE[key] + response = FileResponse(file_path, chunk_size=self._chunk_size) + response.headers[CONTENT_TYPE] = content_type else: - filepath, content_type = filepath_content_type - - if filepath and content_type: - return FileResponse( - filepath, - chunk_size=self._chunk_size, - headers={ - hdrs.CACHE_CONTROL: CACHE_HEADER, - hdrs.CONTENT_TYPE: content_type, - }, + response = await super()._handle(request) + if not isinstance(response, FileResponse): + # Must be directory index; ignore caching + return response + file_path = response._path # noqa: SLF001 + response.content_type = ( + CONTENT_TYPES.guess_type(file_path)[0] or FALLBACK_CONTENT_TYPE ) + # Cache actual header after setter construction. + content_type = response.headers[CONTENT_TYPE] + RESPONSE_CACHE[key] = (file_path, content_type) - raise HTTPForbidden if filepath is None else HTTPNotFound + response.headers[CACHE_CONTROL] = CACHE_HEADER + return response diff --git a/tests/components/http/test_static.py b/tests/components/http/test_static.py index 52a5db5daa7..2ac7c6ded93 100644 --- a/tests/components/http/test_static.py +++ b/tests/components/http/test_static.py @@ -4,11 +4,10 @@ from http import HTTPStatus from pathlib import Path from aiohttp.test_utils import TestClient -from aiohttp.web_exceptions import HTTPForbidden import pytest from homeassistant.components.http import StaticPathConfig -from homeassistant.components.http.static import CachingStaticResource, _get_file_path +from homeassistant.components.http.static import CachingStaticResource from homeassistant.const import EVENT_HOMEASSISTANT_START from homeassistant.core import HomeAssistant from homeassistant.helpers.http import KEY_ALLOW_CONFIGURED_CORS @@ -31,37 +30,19 @@ async def mock_http_client(hass: HomeAssistant, aiohttp_client: ClientSessionGen return await aiohttp_client(hass.http.app, server_kwargs={"skip_url_asserts": True}) -@pytest.mark.parametrize( - ("url", "canonical_url"), - [ - ("//a", "//a"), - ("///a", "///a"), - ("/c:\\a\\b", "/c:%5Ca%5Cb"), - ], -) -async def test_static_path_blocks_anchors( - hass: HomeAssistant, - mock_http_client: TestClient, - tmp_path: Path, - url: str, - canonical_url: str, +async def test_static_resource_show_index( + hass: HomeAssistant, mock_http_client: TestClient, tmp_path: Path ) -> None: - """Test static paths block anchors.""" + """Test static resource will return a directory index.""" app = hass.http.app - resource = CachingStaticResource(url, str(tmp_path)) - assert resource.canonical == canonical_url + resource = CachingStaticResource("/", tmp_path, show_index=True) app.router.register_resource(resource) app[KEY_ALLOW_CONFIGURED_CORS](resource) - resp = await mock_http_client.get(canonical_url, allow_redirects=False) - assert resp.status == 403 - - # Tested directly since aiohttp will block it before - # it gets here but we want to make sure if aiohttp ever - # changes we still block it. - with pytest.raises(HTTPForbidden): - _get_file_path(canonical_url, tmp_path) + resp = await mock_http_client.get("/") + assert resp.status == 200 + assert resp.content_type == "text/html" async def test_async_register_static_paths( From 940327dccf8ad55a5bf368c5cd7ff09ab8b8a4f5 Mon Sep 17 00:00:00 2001 From: puddly <32534428+puddly@users.noreply.github.com> Date: Tue, 6 Aug 2024 21:46:25 -0400 Subject: [PATCH 0462/1635] Bump ZHA to 0.0.28 (#123259) * Bump ZHA to 0.0.28 * Drop redundant radio schema conversion --- homeassistant/components/zha/manifest.json | 2 +- homeassistant/components/zha/radio_manager.py | 1 - requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 3 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index d7dc53b5167..4a597b0233c 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -21,7 +21,7 @@ "zha", "universal_silabs_flasher" ], - "requirements": ["universal-silabs-flasher==0.0.22", "zha==0.0.27"], + "requirements": ["universal-silabs-flasher==0.0.22", "zha==0.0.28"], "usb": [ { "vid": "10C4", diff --git a/homeassistant/components/zha/radio_manager.py b/homeassistant/components/zha/radio_manager.py index 2b7a65f4997..82c30b7678a 100644 --- a/homeassistant/components/zha/radio_manager.py +++ b/homeassistant/components/zha/radio_manager.py @@ -178,7 +178,6 @@ class ZhaRadioManager: app_config[CONF_DEVICE] = self.device_settings app_config[CONF_NWK_BACKUP_ENABLED] = False app_config[CONF_USE_THREAD] = False - app_config = self.radio_type.controller.SCHEMA(app_config) app = await self.radio_type.controller.new( app_config, auto_form=False, start_radio=False diff --git a/requirements_all.txt b/requirements_all.txt index d5658b2903e..42850c46083 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2986,7 +2986,7 @@ zeroconf==0.132.2 zeversolar==0.3.1 # homeassistant.components.zha -zha==0.0.27 +zha==0.0.28 # homeassistant.components.zhong_hong zhong-hong-hvac==1.0.12 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index b37af69bfc5..bad3a851c88 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2360,7 +2360,7 @@ zeroconf==0.132.2 zeversolar==0.3.1 # homeassistant.components.zha -zha==0.0.27 +zha==0.0.28 # homeassistant.components.zwave_js zwave-js-server-python==0.57.0 From 9e75b639251359a3125bbd53d9cf31a4524fd1c0 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Wed, 7 Aug 2024 09:12:20 +0200 Subject: [PATCH 0463/1635] Update knx-frontend to 2024.8.6.211307 (#123261) --- homeassistant/components/knx/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/knx/manifest.json b/homeassistant/components/knx/manifest.json index 37206df4c83..62364f641f4 100644 --- a/homeassistant/components/knx/manifest.json +++ b/homeassistant/components/knx/manifest.json @@ -13,7 +13,7 @@ "requirements": [ "xknx==3.0.0", "xknxproject==3.7.1", - "knx-frontend==2024.8.6.85349" + "knx-frontend==2024.8.6.211307" ], "single_config_entry": true } diff --git a/requirements_all.txt b/requirements_all.txt index 42850c46083..21270678ba6 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1216,7 +1216,7 @@ kiwiki-client==0.1.1 knocki==0.3.1 # homeassistant.components.knx -knx-frontend==2024.8.6.85349 +knx-frontend==2024.8.6.211307 # homeassistant.components.konnected konnected==1.2.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index bad3a851c88..283c84d3b01 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1012,7 +1012,7 @@ kegtron-ble==0.4.0 knocki==0.3.1 # homeassistant.components.knx -knx-frontend==2024.8.6.85349 +knx-frontend==2024.8.6.211307 # homeassistant.components.konnected konnected==1.2.0 From 1143efedc53ace8e28cef3875e31de1d93d39a38 Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Wed, 7 Aug 2024 00:16:57 +0200 Subject: [PATCH 0464/1635] Bump reolink-aio to 0.9.7 (#123263) --- homeassistant/components/reolink/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/reolink/test_select.py | 12 ++++++------ 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/reolink/manifest.json b/homeassistant/components/reolink/manifest.json index 7289dac682c..9671a4b4fc1 100644 --- a/homeassistant/components/reolink/manifest.json +++ b/homeassistant/components/reolink/manifest.json @@ -18,5 +18,5 @@ "documentation": "https://www.home-assistant.io/integrations/reolink", "iot_class": "local_push", "loggers": ["reolink_aio"], - "requirements": ["reolink-aio==0.9.6"] + "requirements": ["reolink-aio==0.9.7"] } diff --git a/requirements_all.txt b/requirements_all.txt index 21270678ba6..008253960d5 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2483,7 +2483,7 @@ renault-api==0.2.5 renson-endura-delta==1.7.1 # homeassistant.components.reolink -reolink-aio==0.9.6 +reolink-aio==0.9.7 # homeassistant.components.idteck_prox rfk101py==0.0.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 283c84d3b01..737d30f7b25 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1965,7 +1965,7 @@ renault-api==0.2.5 renson-endura-delta==1.7.1 # homeassistant.components.reolink -reolink-aio==0.9.6 +reolink-aio==0.9.7 # homeassistant.components.rflink rflink==0.0.66 diff --git a/tests/components/reolink/test_select.py b/tests/components/reolink/test_select.py index 908c06dc16f..53c1e494b3d 100644 --- a/tests/components/reolink/test_select.py +++ b/tests/components/reolink/test_select.py @@ -110,15 +110,15 @@ async def test_chime_select( host=reolink_connect, dev_id=12345678, channel=0, - name="Test chime", - event_info={ - "md": {"switch": 0, "musicId": 0}, - "people": {"switch": 0, "musicId": 1}, - "visitor": {"switch": 1, "musicId": 2}, - }, ) + TEST_CHIME.name = "Test chime" TEST_CHIME.volume = 3 TEST_CHIME.led_state = True + TEST_CHIME.event_info = { + "md": {"switch": 0, "musicId": 0}, + "people": {"switch": 0, "musicId": 1}, + "visitor": {"switch": 1, "musicId": 2}, + } reolink_connect.chime_list = [TEST_CHIME] From b0269faae4c40b69f40b33074019628068cad97b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 7 Aug 2024 02:13:23 -0500 Subject: [PATCH 0465/1635] Allow non-admins to subscribe to newer registry update events (#123267) --- homeassistant/auth/permissions/events.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/homeassistant/auth/permissions/events.py b/homeassistant/auth/permissions/events.py index 9f2fb45f9f0..cb0506769bf 100644 --- a/homeassistant/auth/permissions/events.py +++ b/homeassistant/auth/permissions/events.py @@ -18,9 +18,12 @@ from homeassistant.const import ( EVENT_THEMES_UPDATED, ) from homeassistant.helpers.area_registry import EVENT_AREA_REGISTRY_UPDATED +from homeassistant.helpers.category_registry import EVENT_CATEGORY_REGISTRY_UPDATED from homeassistant.helpers.device_registry import EVENT_DEVICE_REGISTRY_UPDATED from homeassistant.helpers.entity_registry import EVENT_ENTITY_REGISTRY_UPDATED +from homeassistant.helpers.floor_registry import EVENT_FLOOR_REGISTRY_UPDATED from homeassistant.helpers.issue_registry import EVENT_REPAIRS_ISSUE_REGISTRY_UPDATED +from homeassistant.helpers.label_registry import EVENT_LABEL_REGISTRY_UPDATED from homeassistant.util.event_type import EventType # These are events that do not contain any sensitive data @@ -41,4 +44,7 @@ SUBSCRIBE_ALLOWLIST: Final[set[EventType[Any] | str]] = { EVENT_SHOPPING_LIST_UPDATED, EVENT_STATE_CHANGED, EVENT_THEMES_UPDATED, + EVENT_LABEL_REGISTRY_UPDATED, + EVENT_CATEGORY_REGISTRY_UPDATED, + EVENT_FLOOR_REGISTRY_UPDATED, } From ad674a1c2bdb52360606c55f55a5ad44d5189e3c Mon Sep 17 00:00:00 2001 From: Jesse Hills <3060199+jesserockz@users.noreply.github.com> Date: Wed, 7 Aug 2024 20:17:01 +1200 Subject: [PATCH 0466/1635] Update ESPHome voice assistant pipeline log warning (#123269) --- homeassistant/components/esphome/manager.py | 2 +- tests/components/esphome/test_manager.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/esphome/manager.py b/homeassistant/components/esphome/manager.py index ef2a6862f10..4b4537d147f 100644 --- a/homeassistant/components/esphome/manager.py +++ b/homeassistant/components/esphome/manager.py @@ -346,7 +346,7 @@ class ESPHomeManager: ) -> int | None: """Start a voice assistant pipeline.""" if self.voice_assistant_pipeline is not None: - _LOGGER.warning("Voice assistant UDP server was not stopped") + _LOGGER.warning("Previous Voice assistant pipeline was not stopped") self.voice_assistant_pipeline.stop() self.voice_assistant_pipeline = None diff --git a/tests/components/esphome/test_manager.py b/tests/components/esphome/test_manager.py index 651c52cd083..9d2a906466e 100644 --- a/tests/components/esphome/test_manager.py +++ b/tests/components/esphome/test_manager.py @@ -1229,7 +1229,7 @@ async def test_manager_voice_assistant_handlers_api( "", 0, None, None ) - assert "Voice assistant UDP server was not stopped" in caplog.text + assert "Previous Voice assistant pipeline was not stopped" in caplog.text await device.mock_voice_assistant_handle_audio(bytes(_ONE_SECOND)) From cc5699bf0894f107a94af74c8928db47c1ebcbd5 Mon Sep 17 00:00:00 2001 From: tronikos Date: Wed, 7 Aug 2024 01:16:07 -0700 Subject: [PATCH 0467/1635] Fix Google Cloud TTS not respecting config values (#123275) --- .../components/google_cloud/helpers.py | 22 ++++++++++++------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/google_cloud/helpers.py b/homeassistant/components/google_cloud/helpers.py index 66dfbcf01eb..8ae6a456a4f 100644 --- a/homeassistant/components/google_cloud/helpers.py +++ b/homeassistant/components/google_cloud/helpers.py @@ -59,7 +59,10 @@ def tts_options_schema( vol.Optional( CONF_GENDER, description={"suggested_value": config_options.get(CONF_GENDER)}, - default=texttospeech.SsmlVoiceGender.NEUTRAL.name, # type: ignore[attr-defined] + default=config_options.get( + CONF_GENDER, + texttospeech.SsmlVoiceGender.NEUTRAL.name, # type: ignore[attr-defined] + ), ): vol.All( vol.Upper, SelectSelector( @@ -72,7 +75,7 @@ def tts_options_schema( vol.Optional( CONF_VOICE, description={"suggested_value": config_options.get(CONF_VOICE)}, - default=DEFAULT_VOICE, + default=config_options.get(CONF_VOICE, DEFAULT_VOICE), ): SelectSelector( SelectSelectorConfig( mode=SelectSelectorMode.DROPDOWN, @@ -82,7 +85,10 @@ def tts_options_schema( vol.Optional( CONF_ENCODING, description={"suggested_value": config_options.get(CONF_ENCODING)}, - default=texttospeech.AudioEncoding.MP3.name, # type: ignore[attr-defined] + default=config_options.get( + CONF_ENCODING, + texttospeech.AudioEncoding.MP3.name, # type: ignore[attr-defined] + ), ): vol.All( vol.Upper, SelectSelector( @@ -95,22 +101,22 @@ def tts_options_schema( vol.Optional( CONF_SPEED, description={"suggested_value": config_options.get(CONF_SPEED)}, - default=1.0, + default=config_options.get(CONF_SPEED, 1.0), ): NumberSelector(NumberSelectorConfig(min=0.25, max=4.0, step=0.01)), vol.Optional( CONF_PITCH, description={"suggested_value": config_options.get(CONF_PITCH)}, - default=0, + default=config_options.get(CONF_PITCH, 0), ): NumberSelector(NumberSelectorConfig(min=-20.0, max=20.0, step=0.1)), vol.Optional( CONF_GAIN, description={"suggested_value": config_options.get(CONF_GAIN)}, - default=0, + default=config_options.get(CONF_GAIN, 0), ): NumberSelector(NumberSelectorConfig(min=-96.0, max=16.0, step=0.1)), vol.Optional( CONF_PROFILES, description={"suggested_value": config_options.get(CONF_PROFILES)}, - default=[], + default=config_options.get(CONF_PROFILES, []), ): SelectSelector( SelectSelectorConfig( mode=SelectSelectorMode.DROPDOWN, @@ -132,7 +138,7 @@ def tts_options_schema( vol.Optional( CONF_TEXT_TYPE, description={"suggested_value": config_options.get(CONF_TEXT_TYPE)}, - default="text", + default=config_options.get(CONF_TEXT_TYPE, "text"), ): vol.All( vol.Lower, SelectSelector( From a10fed9d728392aeba80a5312dc6e5f4ae3521d8 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Wed, 7 Aug 2024 10:22:39 +0200 Subject: [PATCH 0468/1635] Bump version to 2024.8.0b6 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index dd181d74655..02dcc77d36a 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -24,7 +24,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 8 -PATCH_VERSION: Final = "0b5" +PATCH_VERSION: Final = "0b6" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index 1831a46a0af..58922676286 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.8.0b5" +version = "2024.8.0b6" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From cba6273ac6f8ce9d3c5a13a5507b4e8e7dacd780 Mon Sep 17 00:00:00 2001 From: Erwin Douna Date: Wed, 7 Aug 2024 11:18:09 +0200 Subject: [PATCH 0469/1635] Tado change repair issue (#123256) --- homeassistant/components/tado/strings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/tado/strings.json b/homeassistant/components/tado/strings.json index ab903dafb5b..39453cb5fe1 100644 --- a/homeassistant/components/tado/strings.json +++ b/homeassistant/components/tado/strings.json @@ -152,7 +152,7 @@ "issues": { "water_heater_fallback": { "title": "Tado Water Heater entities now support fallback options", - "description": "Due to added support for water heaters entities, these entities may use different overlay. Please configure integration entity and tado app water heater zone overlay options." + "description": "Due to added support for water heaters entities, these entities may use different overlay. Please configure integration entity and tado app water heater zone overlay options. Otherwise, please configure the integration entity and Tado app water heater zone overlay options (under Settings -> Rooms & Devices -> Hot Water)." } } } From 933fba84a9a16bb75b8cebff0e8726daacc267e0 Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Wed, 7 Aug 2024 11:18:48 +0200 Subject: [PATCH 0470/1635] Reload conversation entries on update (#123279) --- homeassistant/components/ollama/conversation.py | 8 ++------ .../components/openai_conversation/conversation.py | 8 ++------ tests/components/ollama/conftest.py | 1 + tests/components/ollama/test_conversation.py | 1 + 4 files changed, 6 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/ollama/conversation.py b/homeassistant/components/ollama/conversation.py index 9f66083f506..c0423b258f0 100644 --- a/homeassistant/components/ollama/conversation.py +++ b/homeassistant/components/ollama/conversation.py @@ -346,9 +346,5 @@ class OllamaConversationEntity( self, hass: HomeAssistant, entry: ConfigEntry ) -> None: """Handle options update.""" - if entry.options.get(CONF_LLM_HASS_API): - self._attr_supported_features = ( - conversation.ConversationEntityFeature.CONTROL - ) - else: - self._attr_supported_features = conversation.ConversationEntityFeature(0) + # Reload as we update device info + entity name + supported features + await hass.config_entries.async_reload(entry.entry_id) diff --git a/homeassistant/components/openai_conversation/conversation.py b/homeassistant/components/openai_conversation/conversation.py index b482126e27c..a7109a6d6ec 100644 --- a/homeassistant/components/openai_conversation/conversation.py +++ b/homeassistant/components/openai_conversation/conversation.py @@ -328,9 +328,5 @@ class OpenAIConversationEntity( self, hass: HomeAssistant, entry: ConfigEntry ) -> None: """Handle options update.""" - if entry.options.get(CONF_LLM_HASS_API): - self._attr_supported_features = ( - conversation.ConversationEntityFeature.CONTROL - ) - else: - self._attr_supported_features = conversation.ConversationEntityFeature(0) + # Reload as we update device info + entity name + supported features + await hass.config_entries.async_reload(entry.entry_id) diff --git a/tests/components/ollama/conftest.py b/tests/components/ollama/conftest.py index 0355a13eba7..b28b8850cd5 100644 --- a/tests/components/ollama/conftest.py +++ b/tests/components/ollama/conftest.py @@ -48,6 +48,7 @@ async def mock_init_component(hass: HomeAssistant, mock_config_entry: MockConfig ): assert await async_setup_component(hass, ollama.DOMAIN, {}) await hass.async_block_till_done() + yield @pytest.fixture(autouse=True) diff --git a/tests/components/ollama/test_conversation.py b/tests/components/ollama/test_conversation.py index c83dce3b565..cb56b398342 100644 --- a/tests/components/ollama/test_conversation.py +++ b/tests/components/ollama/test_conversation.py @@ -312,6 +312,7 @@ async def test_unknown_hass_api( CONF_LLM_HASS_API: "non-existing", }, ) + await hass.async_block_till_done() result = await conversation.async_converse( hass, "hello", None, Context(), agent_id=mock_config_entry.entry_id From 799888df2fc491dcb09c6a2ef111c88393451c5f Mon Sep 17 00:00:00 2001 From: lunmay <28674102+lunmay@users.noreply.github.com> Date: Wed, 7 Aug 2024 11:20:36 +0200 Subject: [PATCH 0471/1635] Fix typo on one of islamic_prayer_times calculation_method option (#123281) --- homeassistant/components/islamic_prayer_times/strings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/islamic_prayer_times/strings.json b/homeassistant/components/islamic_prayer_times/strings.json index 359d4626bd4..a90031c088d 100644 --- a/homeassistant/components/islamic_prayer_times/strings.json +++ b/homeassistant/components/islamic_prayer_times/strings.json @@ -45,7 +45,7 @@ "jakim": "Jabatan Kemajuan Islam Malaysia (JAKIM)", "tunisia": "Tunisia", "algeria": "Algeria", - "kemenag": "ementerian Agama Republik Indonesia", + "kemenag": "Kementerian Agama Republik Indonesia", "morocco": "Morocco", "portugal": "Comunidade Islamica de Lisboa", "jordan": "Ministry of Awqaf, Islamic Affairs and Holy Places, Jordan", From 270990fe3907229d3aad0dd327e83d0f97a7ea2a Mon Sep 17 00:00:00 2001 From: Erwin Douna Date: Wed, 7 Aug 2024 11:18:09 +0200 Subject: [PATCH 0472/1635] Tado change repair issue (#123256) --- homeassistant/components/tado/strings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/tado/strings.json b/homeassistant/components/tado/strings.json index ab903dafb5b..39453cb5fe1 100644 --- a/homeassistant/components/tado/strings.json +++ b/homeassistant/components/tado/strings.json @@ -152,7 +152,7 @@ "issues": { "water_heater_fallback": { "title": "Tado Water Heater entities now support fallback options", - "description": "Due to added support for water heaters entities, these entities may use different overlay. Please configure integration entity and tado app water heater zone overlay options." + "description": "Due to added support for water heaters entities, these entities may use different overlay. Please configure integration entity and tado app water heater zone overlay options. Otherwise, please configure the integration entity and Tado app water heater zone overlay options (under Settings -> Rooms & Devices -> Hot Water)." } } } From db32460f3baceb3bf914979715cabfe2999c28d3 Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Wed, 7 Aug 2024 11:18:48 +0200 Subject: [PATCH 0473/1635] Reload conversation entries on update (#123279) --- homeassistant/components/ollama/conversation.py | 8 ++------ .../components/openai_conversation/conversation.py | 8 ++------ tests/components/ollama/conftest.py | 1 + tests/components/ollama/test_conversation.py | 1 + 4 files changed, 6 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/ollama/conversation.py b/homeassistant/components/ollama/conversation.py index 9f66083f506..c0423b258f0 100644 --- a/homeassistant/components/ollama/conversation.py +++ b/homeassistant/components/ollama/conversation.py @@ -346,9 +346,5 @@ class OllamaConversationEntity( self, hass: HomeAssistant, entry: ConfigEntry ) -> None: """Handle options update.""" - if entry.options.get(CONF_LLM_HASS_API): - self._attr_supported_features = ( - conversation.ConversationEntityFeature.CONTROL - ) - else: - self._attr_supported_features = conversation.ConversationEntityFeature(0) + # Reload as we update device info + entity name + supported features + await hass.config_entries.async_reload(entry.entry_id) diff --git a/homeassistant/components/openai_conversation/conversation.py b/homeassistant/components/openai_conversation/conversation.py index b482126e27c..a7109a6d6ec 100644 --- a/homeassistant/components/openai_conversation/conversation.py +++ b/homeassistant/components/openai_conversation/conversation.py @@ -328,9 +328,5 @@ class OpenAIConversationEntity( self, hass: HomeAssistant, entry: ConfigEntry ) -> None: """Handle options update.""" - if entry.options.get(CONF_LLM_HASS_API): - self._attr_supported_features = ( - conversation.ConversationEntityFeature.CONTROL - ) - else: - self._attr_supported_features = conversation.ConversationEntityFeature(0) + # Reload as we update device info + entity name + supported features + await hass.config_entries.async_reload(entry.entry_id) diff --git a/tests/components/ollama/conftest.py b/tests/components/ollama/conftest.py index 0355a13eba7..b28b8850cd5 100644 --- a/tests/components/ollama/conftest.py +++ b/tests/components/ollama/conftest.py @@ -48,6 +48,7 @@ async def mock_init_component(hass: HomeAssistant, mock_config_entry: MockConfig ): assert await async_setup_component(hass, ollama.DOMAIN, {}) await hass.async_block_till_done() + yield @pytest.fixture(autouse=True) diff --git a/tests/components/ollama/test_conversation.py b/tests/components/ollama/test_conversation.py index c83dce3b565..cb56b398342 100644 --- a/tests/components/ollama/test_conversation.py +++ b/tests/components/ollama/test_conversation.py @@ -312,6 +312,7 @@ async def test_unknown_hass_api( CONF_LLM_HASS_API: "non-existing", }, ) + await hass.async_block_till_done() result = await conversation.async_converse( hass, "hello", None, Context(), agent_id=mock_config_entry.entry_id From af6f78a784399e491209a8c03c09d005421bf81a Mon Sep 17 00:00:00 2001 From: lunmay <28674102+lunmay@users.noreply.github.com> Date: Wed, 7 Aug 2024 11:20:36 +0200 Subject: [PATCH 0474/1635] Fix typo on one of islamic_prayer_times calculation_method option (#123281) --- homeassistant/components/islamic_prayer_times/strings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/islamic_prayer_times/strings.json b/homeassistant/components/islamic_prayer_times/strings.json index 359d4626bd4..a90031c088d 100644 --- a/homeassistant/components/islamic_prayer_times/strings.json +++ b/homeassistant/components/islamic_prayer_times/strings.json @@ -45,7 +45,7 @@ "jakim": "Jabatan Kemajuan Islam Malaysia (JAKIM)", "tunisia": "Tunisia", "algeria": "Algeria", - "kemenag": "ementerian Agama Republik Indonesia", + "kemenag": "Kementerian Agama Republik Indonesia", "morocco": "Morocco", "portugal": "Comunidade Islamica de Lisboa", "jordan": "Ministry of Awqaf, Islamic Affairs and Holy Places, Jordan", From 782ff12e6eadbfc10342869351565a1e9f0e09be Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Wed, 7 Aug 2024 11:26:03 +0200 Subject: [PATCH 0475/1635] Bump version to 2024.8.0b7 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index 02dcc77d36a..8e49ccc1b0b 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -24,7 +24,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 8 -PATCH_VERSION: Final = "0b6" +PATCH_VERSION: Final = "0b7" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index 58922676286..4e2c2ebb72a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.8.0b6" +version = "2024.8.0b7" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From 45ce0fed0acc913597f7f3095c11c04fced2b326 Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Wed, 7 Aug 2024 12:22:50 +0200 Subject: [PATCH 0476/1635] Reload config entry for anthropic on update (#123280) * Reload config entry for anthropic on update * Fix tests --- homeassistant/components/anthropic/conversation.py | 8 ++------ tests/components/anthropic/conftest.py | 1 + 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/anthropic/conversation.py b/homeassistant/components/anthropic/conversation.py index 3d876bf3325..20e555e9592 100644 --- a/homeassistant/components/anthropic/conversation.py +++ b/homeassistant/components/anthropic/conversation.py @@ -312,9 +312,5 @@ class AnthropicConversationEntity( self, hass: HomeAssistant, entry: ConfigEntry ) -> None: """Handle options update.""" - if entry.options.get(CONF_LLM_HASS_API): - self._attr_supported_features = ( - conversation.ConversationEntityFeature.CONTROL - ) - else: - self._attr_supported_features = conversation.ConversationEntityFeature(0) + # Reload as we update device info + entity name + supported features + await hass.config_entries.async_reload(entry.entry_id) diff --git a/tests/components/anthropic/conftest.py b/tests/components/anthropic/conftest.py index 0a5ad5e5ac6..fe3b20f15b8 100644 --- a/tests/components/anthropic/conftest.py +++ b/tests/components/anthropic/conftest.py @@ -43,6 +43,7 @@ async def mock_init_component(hass, mock_config_entry): ): assert await async_setup_component(hass, "anthropic", {}) await hass.async_block_till_done() + yield @pytest.fixture(autouse=True) From 764166342e97cda133e099cdb11148a09b6a0ee7 Mon Sep 17 00:00:00 2001 From: Brett Adams Date: Wed, 7 Aug 2024 21:11:03 +1000 Subject: [PATCH 0477/1635] Add missing application credential to Tesla Fleet (#123271) Co-authored-by: Franck Nijhof --- .../components/tesla_fleet/__init__.py | 10 ++++++- .../tesla_fleet/application_credentials.py | 27 ++++++++++--------- .../components/tesla_fleet/config_flow.py | 17 +++++++++++- homeassistant/components/tesla_fleet/const.py | 5 ++++ tests/components/tesla_fleet/__init__.py | 14 ++++++++++ tests/components/tesla_fleet/conftest.py | 19 ------------- .../tesla_fleet/test_config_flow.py | 5 ++-- 7 files changed, 61 insertions(+), 36 deletions(-) diff --git a/homeassistant/components/tesla_fleet/__init__.py b/homeassistant/components/tesla_fleet/__init__.py index 8257bf75cd0..4eac1168674 100644 --- a/homeassistant/components/tesla_fleet/__init__.py +++ b/homeassistant/components/tesla_fleet/__init__.py @@ -13,6 +13,7 @@ from tesla_fleet_api.exceptions import ( TeslaFleetError, ) +from homeassistant.components.application_credentials import ClientCredential from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN, Platform from homeassistant.core import HomeAssistant @@ -26,7 +27,9 @@ from homeassistant.helpers.config_entry_oauth2_flow import ( import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device_registry import DeviceInfo -from .const import DOMAIN, LOGGER, MODELS +from .application_credentials import TeslaOAuth2Implementation +from .config_flow import OAuth2FlowHandler +from .const import CLIENT_ID, DOMAIN, LOGGER, MODELS, NAME from .coordinator import ( TeslaFleetEnergySiteInfoCoordinator, TeslaFleetEnergySiteLiveCoordinator, @@ -51,6 +54,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - scopes = token["scp"] region = token["ou_code"].lower() + OAuth2FlowHandler.async_register_implementation( + hass, + TeslaOAuth2Implementation(hass, DOMAIN, ClientCredential(CLIENT_ID, "", NAME)), + ) + implementation = await async_get_config_entry_implementation(hass, entry) oauth_session = OAuth2Session(hass, entry, implementation) refresh_lock = asyncio.Lock() diff --git a/homeassistant/components/tesla_fleet/application_credentials.py b/homeassistant/components/tesla_fleet/application_credentials.py index fda9fce8cec..32e16cc9244 100644 --- a/homeassistant/components/tesla_fleet/application_credentials.py +++ b/homeassistant/components/tesla_fleet/application_credentials.py @@ -5,15 +5,17 @@ import hashlib import secrets from typing import Any -from homeassistant.components.application_credentials import ClientCredential +from homeassistant.components.application_credentials import ( + AuthImplementation, + AuthorizationServer, + ClientCredential, +) from homeassistant.core import HomeAssistant from homeassistant.helpers import config_entry_oauth2_flow -from .const import DOMAIN, SCOPES +from .const import AUTHORIZE_URL, DOMAIN, SCOPES, TOKEN_URL -CLIENT_ID = "71b813eb-4a2e-483a-b831-4dec5cb9bf0d" -AUTHORIZE_URL = "https://auth.tesla.com/oauth2/v3/authorize" -TOKEN_URL = "https://auth.tesla.com/oauth2/v3/token" +AUTH_SERVER = AuthorizationServer(AUTHORIZE_URL, TOKEN_URL) async def async_get_auth_implementation( @@ -23,15 +25,16 @@ async def async_get_auth_implementation( return TeslaOAuth2Implementation( hass, DOMAIN, + credential, ) -class TeslaOAuth2Implementation(config_entry_oauth2_flow.LocalOAuth2Implementation): +class TeslaOAuth2Implementation(AuthImplementation): """Tesla Fleet API Open Source Oauth2 implementation.""" - _name = "Tesla Fleet API" - - def __init__(self, hass: HomeAssistant, domain: str) -> None: + def __init__( + self, hass: HomeAssistant, domain: str, credential: ClientCredential + ) -> None: """Initialize local auth implementation.""" self.hass = hass self._domain = domain @@ -45,10 +48,8 @@ class TeslaOAuth2Implementation(config_entry_oauth2_flow.LocalOAuth2Implementati super().__init__( hass, domain, - CLIENT_ID, - "", # Implementation has no client secret - AUTHORIZE_URL, - TOKEN_URL, + credential, + AUTH_SERVER, ) @property diff --git a/homeassistant/components/tesla_fleet/config_flow.py b/homeassistant/components/tesla_fleet/config_flow.py index ad6ba8817c9..c09ea78177f 100644 --- a/homeassistant/components/tesla_fleet/config_flow.py +++ b/homeassistant/components/tesla_fleet/config_flow.py @@ -8,10 +8,12 @@ from typing import Any import jwt +from homeassistant.components.application_credentials import ClientCredential from homeassistant.config_entries import ConfigEntry, ConfigFlowResult from homeassistant.helpers import config_entry_oauth2_flow -from .const import DOMAIN, LOGGER +from .application_credentials import TeslaOAuth2Implementation +from .const import CLIENT_ID, DOMAIN, LOGGER, NAME class OAuth2FlowHandler( @@ -27,6 +29,19 @@ class OAuth2FlowHandler( """Return logger.""" return LOGGER + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a flow start.""" + self.async_register_implementation( + self.hass, + TeslaOAuth2Implementation( + self.hass, DOMAIN, ClientCredential(CLIENT_ID, "", NAME) + ), + ) + + return await super().async_step_user() + async def async_oauth_create_entry( self, data: dict[str, Any], diff --git a/homeassistant/components/tesla_fleet/const.py b/homeassistant/components/tesla_fleet/const.py index ae622d2266c..9d78716a13e 100644 --- a/homeassistant/components/tesla_fleet/const.py +++ b/homeassistant/components/tesla_fleet/const.py @@ -13,6 +13,11 @@ CONF_REFRESH_TOKEN = "refresh_token" LOGGER = logging.getLogger(__package__) +NAME = "Home Assistant" +CLIENT_ID = "71b813eb-4a2e-483a-b831-4dec5cb9bf0d" +AUTHORIZE_URL = "https://auth.tesla.com/oauth2/v3/authorize" +TOKEN_URL = "https://auth.tesla.com/oauth2/v3/token" + SCOPES = [ Scope.OPENID, Scope.OFFLINE_ACCESS, diff --git a/tests/components/tesla_fleet/__init__.py b/tests/components/tesla_fleet/__init__.py index d5df0d0a2ed..78159402bff 100644 --- a/tests/components/tesla_fleet/__init__.py +++ b/tests/components/tesla_fleet/__init__.py @@ -4,9 +4,15 @@ from unittest.mock import patch from syrupy import SnapshotAssertion +from homeassistant.components.application_credentials import ( + ClientCredential, + async_import_client_credential, +) +from homeassistant.components.tesla_fleet.const import CLIENT_ID, DOMAIN from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry @@ -18,6 +24,14 @@ async def setup_platform( ) -> None: """Set up the Tesla Fleet platform.""" + assert await async_setup_component(hass, "application_credentials", {}) + await async_import_client_credential( + hass, + DOMAIN, + ClientCredential(CLIENT_ID, "", "Home Assistant"), + DOMAIN, + ) + config_entry.add_to_hass(hass) if platforms is None: diff --git a/tests/components/tesla_fleet/conftest.py b/tests/components/tesla_fleet/conftest.py index ade2f6eb0a9..7d60ae5e174 100644 --- a/tests/components/tesla_fleet/conftest.py +++ b/tests/components/tesla_fleet/conftest.py @@ -10,14 +10,7 @@ from unittest.mock import AsyncMock, patch import jwt import pytest -from homeassistant.components.application_credentials import ( - ClientCredential, - async_import_client_credential, -) -from homeassistant.components.tesla_fleet.application_credentials import CLIENT_ID from homeassistant.components.tesla_fleet.const import DOMAIN, SCOPES -from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component from .const import LIVE_STATUS, PRODUCTS, SITE_INFO, VEHICLE_DATA, VEHICLE_ONLINE @@ -71,18 +64,6 @@ def normal_config_entry(expires_at: int, scopes: list[str]) -> MockConfigEntry: ) -@pytest.fixture(autouse=True) -async def setup_credentials(hass: HomeAssistant) -> None: - """Fixture to setup credentials.""" - assert await async_setup_component(hass, "application_credentials", {}) - await async_import_client_credential( - hass, - DOMAIN, - ClientCredential(CLIENT_ID, ""), - DOMAIN, - ) - - @pytest.fixture(autouse=True) def mock_products() -> Generator[AsyncMock]: """Mock Tesla Fleet Api products method.""" diff --git a/tests/components/tesla_fleet/test_config_flow.py b/tests/components/tesla_fleet/test_config_flow.py index 334d8902fc7..bd1c7d7c2b8 100644 --- a/tests/components/tesla_fleet/test_config_flow.py +++ b/tests/components/tesla_fleet/test_config_flow.py @@ -5,12 +5,13 @@ from urllib.parse import parse_qs, urlparse import pytest -from homeassistant.components.tesla_fleet.application_credentials import ( +from homeassistant.components.tesla_fleet.const import ( AUTHORIZE_URL, CLIENT_ID, + DOMAIN, + SCOPES, TOKEN_URL, ) -from homeassistant.components.tesla_fleet.const import DOMAIN, SCOPES from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType From 9717a867a77c5416d8d5713664bebcae2f3386de Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Wed, 7 Aug 2024 13:12:29 +0200 Subject: [PATCH 0478/1635] Update wled to 0.20.1 (#123283) --- homeassistant/components/wled/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/wled/manifest.json b/homeassistant/components/wled/manifest.json index b19e5f16ccb..efeb414438d 100644 --- a/homeassistant/components/wled/manifest.json +++ b/homeassistant/components/wled/manifest.json @@ -7,6 +7,6 @@ "integration_type": "device", "iot_class": "local_push", "quality_scale": "platinum", - "requirements": ["wled==0.20.0"], + "requirements": ["wled==0.20.1"], "zeroconf": ["_wled._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index daf614f2a3a..e4c8a93b438 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2915,7 +2915,7 @@ wiffi==1.1.2 wirelesstagpy==0.8.1 # homeassistant.components.wled -wled==0.20.0 +wled==0.20.1 # homeassistant.components.wolflink wolf-comm==0.0.9 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 78ca83b9de6..92e91a7f9b2 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2301,7 +2301,7 @@ whois==0.9.27 wiffi==1.1.2 # homeassistant.components.wled -wled==0.20.0 +wled==0.20.1 # homeassistant.components.wolflink wolf-comm==0.0.9 From 6bb55ce79e2bf34edfc80ed1580e6decdb39e30c Mon Sep 17 00:00:00 2001 From: Brett Adams Date: Wed, 7 Aug 2024 21:11:03 +1000 Subject: [PATCH 0479/1635] Add missing application credential to Tesla Fleet (#123271) Co-authored-by: Franck Nijhof --- .../components/tesla_fleet/__init__.py | 10 ++++++- .../tesla_fleet/application_credentials.py | 27 ++++++++++--------- .../components/tesla_fleet/config_flow.py | 17 +++++++++++- homeassistant/components/tesla_fleet/const.py | 5 ++++ tests/components/tesla_fleet/__init__.py | 14 ++++++++++ tests/components/tesla_fleet/conftest.py | 19 ------------- .../tesla_fleet/test_config_flow.py | 5 ++-- 7 files changed, 61 insertions(+), 36 deletions(-) diff --git a/homeassistant/components/tesla_fleet/__init__.py b/homeassistant/components/tesla_fleet/__init__.py index 8257bf75cd0..4eac1168674 100644 --- a/homeassistant/components/tesla_fleet/__init__.py +++ b/homeassistant/components/tesla_fleet/__init__.py @@ -13,6 +13,7 @@ from tesla_fleet_api.exceptions import ( TeslaFleetError, ) +from homeassistant.components.application_credentials import ClientCredential from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN, Platform from homeassistant.core import HomeAssistant @@ -26,7 +27,9 @@ from homeassistant.helpers.config_entry_oauth2_flow import ( import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device_registry import DeviceInfo -from .const import DOMAIN, LOGGER, MODELS +from .application_credentials import TeslaOAuth2Implementation +from .config_flow import OAuth2FlowHandler +from .const import CLIENT_ID, DOMAIN, LOGGER, MODELS, NAME from .coordinator import ( TeslaFleetEnergySiteInfoCoordinator, TeslaFleetEnergySiteLiveCoordinator, @@ -51,6 +54,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - scopes = token["scp"] region = token["ou_code"].lower() + OAuth2FlowHandler.async_register_implementation( + hass, + TeslaOAuth2Implementation(hass, DOMAIN, ClientCredential(CLIENT_ID, "", NAME)), + ) + implementation = await async_get_config_entry_implementation(hass, entry) oauth_session = OAuth2Session(hass, entry, implementation) refresh_lock = asyncio.Lock() diff --git a/homeassistant/components/tesla_fleet/application_credentials.py b/homeassistant/components/tesla_fleet/application_credentials.py index fda9fce8cec..32e16cc9244 100644 --- a/homeassistant/components/tesla_fleet/application_credentials.py +++ b/homeassistant/components/tesla_fleet/application_credentials.py @@ -5,15 +5,17 @@ import hashlib import secrets from typing import Any -from homeassistant.components.application_credentials import ClientCredential +from homeassistant.components.application_credentials import ( + AuthImplementation, + AuthorizationServer, + ClientCredential, +) from homeassistant.core import HomeAssistant from homeassistant.helpers import config_entry_oauth2_flow -from .const import DOMAIN, SCOPES +from .const import AUTHORIZE_URL, DOMAIN, SCOPES, TOKEN_URL -CLIENT_ID = "71b813eb-4a2e-483a-b831-4dec5cb9bf0d" -AUTHORIZE_URL = "https://auth.tesla.com/oauth2/v3/authorize" -TOKEN_URL = "https://auth.tesla.com/oauth2/v3/token" +AUTH_SERVER = AuthorizationServer(AUTHORIZE_URL, TOKEN_URL) async def async_get_auth_implementation( @@ -23,15 +25,16 @@ async def async_get_auth_implementation( return TeslaOAuth2Implementation( hass, DOMAIN, + credential, ) -class TeslaOAuth2Implementation(config_entry_oauth2_flow.LocalOAuth2Implementation): +class TeslaOAuth2Implementation(AuthImplementation): """Tesla Fleet API Open Source Oauth2 implementation.""" - _name = "Tesla Fleet API" - - def __init__(self, hass: HomeAssistant, domain: str) -> None: + def __init__( + self, hass: HomeAssistant, domain: str, credential: ClientCredential + ) -> None: """Initialize local auth implementation.""" self.hass = hass self._domain = domain @@ -45,10 +48,8 @@ class TeslaOAuth2Implementation(config_entry_oauth2_flow.LocalOAuth2Implementati super().__init__( hass, domain, - CLIENT_ID, - "", # Implementation has no client secret - AUTHORIZE_URL, - TOKEN_URL, + credential, + AUTH_SERVER, ) @property diff --git a/homeassistant/components/tesla_fleet/config_flow.py b/homeassistant/components/tesla_fleet/config_flow.py index ad6ba8817c9..c09ea78177f 100644 --- a/homeassistant/components/tesla_fleet/config_flow.py +++ b/homeassistant/components/tesla_fleet/config_flow.py @@ -8,10 +8,12 @@ from typing import Any import jwt +from homeassistant.components.application_credentials import ClientCredential from homeassistant.config_entries import ConfigEntry, ConfigFlowResult from homeassistant.helpers import config_entry_oauth2_flow -from .const import DOMAIN, LOGGER +from .application_credentials import TeslaOAuth2Implementation +from .const import CLIENT_ID, DOMAIN, LOGGER, NAME class OAuth2FlowHandler( @@ -27,6 +29,19 @@ class OAuth2FlowHandler( """Return logger.""" return LOGGER + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a flow start.""" + self.async_register_implementation( + self.hass, + TeslaOAuth2Implementation( + self.hass, DOMAIN, ClientCredential(CLIENT_ID, "", NAME) + ), + ) + + return await super().async_step_user() + async def async_oauth_create_entry( self, data: dict[str, Any], diff --git a/homeassistant/components/tesla_fleet/const.py b/homeassistant/components/tesla_fleet/const.py index ae622d2266c..9d78716a13e 100644 --- a/homeassistant/components/tesla_fleet/const.py +++ b/homeassistant/components/tesla_fleet/const.py @@ -13,6 +13,11 @@ CONF_REFRESH_TOKEN = "refresh_token" LOGGER = logging.getLogger(__package__) +NAME = "Home Assistant" +CLIENT_ID = "71b813eb-4a2e-483a-b831-4dec5cb9bf0d" +AUTHORIZE_URL = "https://auth.tesla.com/oauth2/v3/authorize" +TOKEN_URL = "https://auth.tesla.com/oauth2/v3/token" + SCOPES = [ Scope.OPENID, Scope.OFFLINE_ACCESS, diff --git a/tests/components/tesla_fleet/__init__.py b/tests/components/tesla_fleet/__init__.py index d5df0d0a2ed..78159402bff 100644 --- a/tests/components/tesla_fleet/__init__.py +++ b/tests/components/tesla_fleet/__init__.py @@ -4,9 +4,15 @@ from unittest.mock import patch from syrupy import SnapshotAssertion +from homeassistant.components.application_credentials import ( + ClientCredential, + async_import_client_credential, +) +from homeassistant.components.tesla_fleet.const import CLIENT_ID, DOMAIN from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry @@ -18,6 +24,14 @@ async def setup_platform( ) -> None: """Set up the Tesla Fleet platform.""" + assert await async_setup_component(hass, "application_credentials", {}) + await async_import_client_credential( + hass, + DOMAIN, + ClientCredential(CLIENT_ID, "", "Home Assistant"), + DOMAIN, + ) + config_entry.add_to_hass(hass) if platforms is None: diff --git a/tests/components/tesla_fleet/conftest.py b/tests/components/tesla_fleet/conftest.py index ade2f6eb0a9..7d60ae5e174 100644 --- a/tests/components/tesla_fleet/conftest.py +++ b/tests/components/tesla_fleet/conftest.py @@ -10,14 +10,7 @@ from unittest.mock import AsyncMock, patch import jwt import pytest -from homeassistant.components.application_credentials import ( - ClientCredential, - async_import_client_credential, -) -from homeassistant.components.tesla_fleet.application_credentials import CLIENT_ID from homeassistant.components.tesla_fleet.const import DOMAIN, SCOPES -from homeassistant.core import HomeAssistant -from homeassistant.setup import async_setup_component from .const import LIVE_STATUS, PRODUCTS, SITE_INFO, VEHICLE_DATA, VEHICLE_ONLINE @@ -71,18 +64,6 @@ def normal_config_entry(expires_at: int, scopes: list[str]) -> MockConfigEntry: ) -@pytest.fixture(autouse=True) -async def setup_credentials(hass: HomeAssistant) -> None: - """Fixture to setup credentials.""" - assert await async_setup_component(hass, "application_credentials", {}) - await async_import_client_credential( - hass, - DOMAIN, - ClientCredential(CLIENT_ID, ""), - DOMAIN, - ) - - @pytest.fixture(autouse=True) def mock_products() -> Generator[AsyncMock]: """Mock Tesla Fleet Api products method.""" diff --git a/tests/components/tesla_fleet/test_config_flow.py b/tests/components/tesla_fleet/test_config_flow.py index 334d8902fc7..bd1c7d7c2b8 100644 --- a/tests/components/tesla_fleet/test_config_flow.py +++ b/tests/components/tesla_fleet/test_config_flow.py @@ -5,12 +5,13 @@ from urllib.parse import parse_qs, urlparse import pytest -from homeassistant.components.tesla_fleet.application_credentials import ( +from homeassistant.components.tesla_fleet.const import ( AUTHORIZE_URL, CLIENT_ID, + DOMAIN, + SCOPES, TOKEN_URL, ) -from homeassistant.components.tesla_fleet.const import DOMAIN, SCOPES from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType From 4a212791a26a8f0d5a38aff399a58fb7cad53fef Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Wed, 7 Aug 2024 13:12:29 +0200 Subject: [PATCH 0480/1635] Update wled to 0.20.1 (#123283) --- homeassistant/components/wled/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/wled/manifest.json b/homeassistant/components/wled/manifest.json index b19e5f16ccb..efeb414438d 100644 --- a/homeassistant/components/wled/manifest.json +++ b/homeassistant/components/wled/manifest.json @@ -7,6 +7,6 @@ "integration_type": "device", "iot_class": "local_push", "quality_scale": "platinum", - "requirements": ["wled==0.20.0"], + "requirements": ["wled==0.20.1"], "zeroconf": ["_wled._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index 008253960d5..b12da4becd3 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2915,7 +2915,7 @@ wiffi==1.1.2 wirelesstagpy==0.8.1 # homeassistant.components.wled -wled==0.20.0 +wled==0.20.1 # homeassistant.components.wolflink wolf-comm==0.0.9 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 737d30f7b25..7ec92ee20e7 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2298,7 +2298,7 @@ whois==0.9.27 wiffi==1.1.2 # homeassistant.components.wled -wled==0.20.0 +wled==0.20.1 # homeassistant.components.wolflink wolf-comm==0.0.9 From 082290b092985dcb4680655090f071692b911751 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Wed, 7 Aug 2024 13:15:23 +0200 Subject: [PATCH 0481/1635] Bump version to 2024.8.0b8 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index 8e49ccc1b0b..987ee6572b6 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -24,7 +24,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 8 -PATCH_VERSION: Final = "0b7" +PATCH_VERSION: Final = "0b8" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index 4e2c2ebb72a..29c58954c5b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.8.0b7" +version = "2024.8.0b8" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From 4cd3fce55556f9491de313613ccbf26c672433ec Mon Sep 17 00:00:00 2001 From: ashalita Date: Wed, 7 Aug 2024 15:19:05 +0300 Subject: [PATCH 0482/1635] Revert "Upgrade pycoolmasternet-async to 0.2.0" (#123286) --- homeassistant/components/coolmaster/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/coolmaster/manifest.json b/homeassistant/components/coolmaster/manifest.json index b405a82ad62..9488e068d44 100644 --- a/homeassistant/components/coolmaster/manifest.json +++ b/homeassistant/components/coolmaster/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/coolmaster", "iot_class": "local_polling", "loggers": ["pycoolmasternet_async"], - "requirements": ["pycoolmasternet-async==0.2.0"] + "requirements": ["pycoolmasternet-async==0.1.5"] } diff --git a/requirements_all.txt b/requirements_all.txt index e4c8a93b438..30dbe77f5ff 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1777,7 +1777,7 @@ pycmus==0.1.1 pycomfoconnect==0.5.1 # homeassistant.components.coolmaster -pycoolmasternet-async==0.2.0 +pycoolmasternet-async==0.1.5 # homeassistant.components.microsoft pycsspeechtts==1.0.8 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 92e91a7f9b2..3b92a57da5d 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1430,7 +1430,7 @@ pycfdns==3.0.0 pycomfoconnect==0.5.1 # homeassistant.components.coolmaster -pycoolmasternet-async==0.2.0 +pycoolmasternet-async==0.1.5 # homeassistant.components.microsoft pycsspeechtts==1.0.8 From f1029596d2ce66ad075aeefe74ebb4f2752cc0b3 Mon Sep 17 00:00:00 2001 From: Stefan Agner Date: Wed, 7 Aug 2024 16:45:46 +0200 Subject: [PATCH 0483/1635] Drop Matter Microwave Oven Mode select entity (#123294) --- homeassistant/components/matter/select.py | 13 --------- tests/components/matter/test_select.py | 32 ----------------------- 2 files changed, 45 deletions(-) diff --git a/homeassistant/components/matter/select.py b/homeassistant/components/matter/select.py index 350712061ba..4a9ef3780d1 100644 --- a/homeassistant/components/matter/select.py +++ b/homeassistant/components/matter/select.py @@ -27,7 +27,6 @@ type SelectCluster = ( | clusters.RvcRunMode | clusters.RvcCleanMode | clusters.DishwasherMode - | clusters.MicrowaveOvenMode | clusters.EnergyEvseMode | clusters.DeviceEnergyManagementMode ) @@ -199,18 +198,6 @@ DISCOVERY_SCHEMAS = [ clusters.DishwasherMode.Attributes.SupportedModes, ), ), - MatterDiscoverySchema( - platform=Platform.SELECT, - entity_description=MatterSelectEntityDescription( - key="MatterMicrowaveOvenMode", - translation_key="mode", - ), - entity_class=MatterModeSelectEntity, - required_attributes=( - clusters.MicrowaveOvenMode.Attributes.CurrentMode, - clusters.MicrowaveOvenMode.Attributes.SupportedModes, - ), - ), MatterDiscoverySchema( platform=Platform.SELECT, entity_description=MatterSelectEntityDescription( diff --git a/tests/components/matter/test_select.py b/tests/components/matter/test_select.py index 9b774f0430b..f84e5870392 100644 --- a/tests/components/matter/test_select.py +++ b/tests/components/matter/test_select.py @@ -25,16 +25,6 @@ async def dimmable_light_node_fixture( ) -@pytest.fixture(name="microwave_oven_node") -async def microwave_oven_node_fixture( - hass: HomeAssistant, matter_client: MagicMock -) -> MatterNode: - """Fixture for a microwave oven node.""" - return await setup_integration_with_node_fixture( - hass, "microwave-oven", matter_client - ) - - # This tests needs to be adjusted to remove lingering tasks @pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_mode_select_entities( @@ -87,28 +77,6 @@ async def test_mode_select_entities( # This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) -async def test_microwave_select_entities( - hass: HomeAssistant, - matter_client: MagicMock, - microwave_oven_node: MatterNode, -) -> None: - """Test select entities are created for the MicrowaveOvenMode cluster attributes.""" - state = hass.states.get("select.microwave_oven_mode") - assert state - assert state.state == "Normal" - assert state.attributes["options"] == [ - "Normal", - "Defrost", - ] - # name should just be Mode (from the translation key) - assert state.attributes["friendly_name"] == "Microwave Oven Mode" - set_node_attribute(microwave_oven_node, 1, 94, 1, 1) - await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("select.microwave_oven_mode") - assert state.state == "Defrost" - - @pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_attribute_select_entities( hass: HomeAssistant, From f8fa6e43094c7288fcf1cb9fe6a260d71c4f3e1b Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Wed, 7 Aug 2024 10:42:59 -0500 Subject: [PATCH 0484/1635] Bump intents to 2024.8.7 (#123295) --- homeassistant/components/conversation/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/conversation/manifest.json b/homeassistant/components/conversation/manifest.json index 65c79cef187..d7a308b8b2b 100644 --- a/homeassistant/components/conversation/manifest.json +++ b/homeassistant/components/conversation/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/conversation", "integration_type": "system", "quality_scale": "internal", - "requirements": ["hassil==1.7.4", "home-assistant-intents==2024.7.29"] + "requirements": ["hassil==1.7.4", "home-assistant-intents==2024.8.7"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index ca37ffbf4a8..10823557b94 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -32,7 +32,7 @@ hass-nabucasa==0.81.1 hassil==1.7.4 home-assistant-bluetooth==1.12.2 home-assistant-frontend==20240806.1 -home-assistant-intents==2024.7.29 +home-assistant-intents==2024.8.7 httpx==0.27.0 ifaddr==0.2.0 Jinja2==3.1.4 diff --git a/requirements_all.txt b/requirements_all.txt index 30dbe77f5ff..6a0c68b8e25 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1096,7 +1096,7 @@ holidays==0.53 home-assistant-frontend==20240806.1 # homeassistant.components.conversation -home-assistant-intents==2024.7.29 +home-assistant-intents==2024.8.7 # homeassistant.components.home_connect homeconnect==0.8.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 3b92a57da5d..21e9895187d 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -922,7 +922,7 @@ holidays==0.53 home-assistant-frontend==20240806.1 # homeassistant.components.conversation -home-assistant-intents==2024.7.29 +home-assistant-intents==2024.8.7 # homeassistant.components.home_connect homeconnect==0.8.0 From ef564c537d2e3a23ee433ec8a96315038baf9a9f Mon Sep 17 00:00:00 2001 From: ashalita Date: Wed, 7 Aug 2024 15:19:05 +0300 Subject: [PATCH 0485/1635] Revert "Upgrade pycoolmasternet-async to 0.2.0" (#123286) --- homeassistant/components/coolmaster/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/coolmaster/manifest.json b/homeassistant/components/coolmaster/manifest.json index b405a82ad62..9488e068d44 100644 --- a/homeassistant/components/coolmaster/manifest.json +++ b/homeassistant/components/coolmaster/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/coolmaster", "iot_class": "local_polling", "loggers": ["pycoolmasternet_async"], - "requirements": ["pycoolmasternet-async==0.2.0"] + "requirements": ["pycoolmasternet-async==0.1.5"] } diff --git a/requirements_all.txt b/requirements_all.txt index b12da4becd3..cf96a4363e5 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1777,7 +1777,7 @@ pycmus==0.1.1 pycomfoconnect==0.5.1 # homeassistant.components.coolmaster -pycoolmasternet-async==0.2.0 +pycoolmasternet-async==0.1.5 # homeassistant.components.microsoft pycsspeechtts==1.0.8 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 7ec92ee20e7..8c215eedcea 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1427,7 +1427,7 @@ pycfdns==3.0.0 pycomfoconnect==0.5.1 # homeassistant.components.coolmaster -pycoolmasternet-async==0.2.0 +pycoolmasternet-async==0.1.5 # homeassistant.components.microsoft pycsspeechtts==1.0.8 From 7a51d4ff62eaf3408662e76fe62de52d3e966ad9 Mon Sep 17 00:00:00 2001 From: Stefan Agner Date: Wed, 7 Aug 2024 16:45:46 +0200 Subject: [PATCH 0486/1635] Drop Matter Microwave Oven Mode select entity (#123294) --- homeassistant/components/matter/select.py | 13 --------- tests/components/matter/test_select.py | 32 ----------------------- 2 files changed, 45 deletions(-) diff --git a/homeassistant/components/matter/select.py b/homeassistant/components/matter/select.py index 350712061ba..4a9ef3780d1 100644 --- a/homeassistant/components/matter/select.py +++ b/homeassistant/components/matter/select.py @@ -27,7 +27,6 @@ type SelectCluster = ( | clusters.RvcRunMode | clusters.RvcCleanMode | clusters.DishwasherMode - | clusters.MicrowaveOvenMode | clusters.EnergyEvseMode | clusters.DeviceEnergyManagementMode ) @@ -199,18 +198,6 @@ DISCOVERY_SCHEMAS = [ clusters.DishwasherMode.Attributes.SupportedModes, ), ), - MatterDiscoverySchema( - platform=Platform.SELECT, - entity_description=MatterSelectEntityDescription( - key="MatterMicrowaveOvenMode", - translation_key="mode", - ), - entity_class=MatterModeSelectEntity, - required_attributes=( - clusters.MicrowaveOvenMode.Attributes.CurrentMode, - clusters.MicrowaveOvenMode.Attributes.SupportedModes, - ), - ), MatterDiscoverySchema( platform=Platform.SELECT, entity_description=MatterSelectEntityDescription( diff --git a/tests/components/matter/test_select.py b/tests/components/matter/test_select.py index 9b774f0430b..f84e5870392 100644 --- a/tests/components/matter/test_select.py +++ b/tests/components/matter/test_select.py @@ -25,16 +25,6 @@ async def dimmable_light_node_fixture( ) -@pytest.fixture(name="microwave_oven_node") -async def microwave_oven_node_fixture( - hass: HomeAssistant, matter_client: MagicMock -) -> MatterNode: - """Fixture for a microwave oven node.""" - return await setup_integration_with_node_fixture( - hass, "microwave-oven", matter_client - ) - - # This tests needs to be adjusted to remove lingering tasks @pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_mode_select_entities( @@ -87,28 +77,6 @@ async def test_mode_select_entities( # This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) -async def test_microwave_select_entities( - hass: HomeAssistant, - matter_client: MagicMock, - microwave_oven_node: MatterNode, -) -> None: - """Test select entities are created for the MicrowaveOvenMode cluster attributes.""" - state = hass.states.get("select.microwave_oven_mode") - assert state - assert state.state == "Normal" - assert state.attributes["options"] == [ - "Normal", - "Defrost", - ] - # name should just be Mode (from the translation key) - assert state.attributes["friendly_name"] == "Microwave Oven Mode" - set_node_attribute(microwave_oven_node, 1, 94, 1, 1) - await trigger_subscription_callback(hass, matter_client) - state = hass.states.get("select.microwave_oven_mode") - assert state.state == "Defrost" - - @pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_attribute_select_entities( hass: HomeAssistant, From 5367886732a5af1d5b07d4975480394612bc3f12 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Wed, 7 Aug 2024 10:42:59 -0500 Subject: [PATCH 0487/1635] Bump intents to 2024.8.7 (#123295) --- homeassistant/components/conversation/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/conversation/manifest.json b/homeassistant/components/conversation/manifest.json index 65c79cef187..d7a308b8b2b 100644 --- a/homeassistant/components/conversation/manifest.json +++ b/homeassistant/components/conversation/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/conversation", "integration_type": "system", "quality_scale": "internal", - "requirements": ["hassil==1.7.4", "home-assistant-intents==2024.7.29"] + "requirements": ["hassil==1.7.4", "home-assistant-intents==2024.8.7"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 13fbcabf0d1..472134fea37 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -32,7 +32,7 @@ hass-nabucasa==0.81.1 hassil==1.7.4 home-assistant-bluetooth==1.12.2 home-assistant-frontend==20240806.1 -home-assistant-intents==2024.7.29 +home-assistant-intents==2024.8.7 httpx==0.27.0 ifaddr==0.2.0 Jinja2==3.1.4 diff --git a/requirements_all.txt b/requirements_all.txt index cf96a4363e5..b8f50d328f1 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1096,7 +1096,7 @@ holidays==0.53 home-assistant-frontend==20240806.1 # homeassistant.components.conversation -home-assistant-intents==2024.7.29 +home-assistant-intents==2024.8.7 # homeassistant.components.home_connect homeconnect==0.8.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 8c215eedcea..f6602bf082b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -919,7 +919,7 @@ holidays==0.53 home-assistant-frontend==20240806.1 # homeassistant.components.conversation -home-assistant-intents==2024.7.29 +home-assistant-intents==2024.8.7 # homeassistant.components.home_connect homeconnect==0.8.0 From ac6abb363c98ce5aef70c7b7eed38e4bcf787432 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Wed, 7 Aug 2024 18:24:15 +0200 Subject: [PATCH 0488/1635] Bump version to 2024.8.0b9 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index 987ee6572b6..6c63a980c5f 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -24,7 +24,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 8 -PATCH_VERSION: Final = "0b8" +PATCH_VERSION: Final = "0b9" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index 29c58954c5b..a4e95216896 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.8.0b8" +version = "2024.8.0b9" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From be4810731a76c17a2c8206eb2e1bc90ec13b05cc Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Wed, 7 Aug 2024 19:04:33 +0200 Subject: [PATCH 0489/1635] Bump version to 2024.8.0 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index 6c63a980c5f..402f57a4f8b 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -24,7 +24,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 MINOR_VERSION: Final = 8 -PATCH_VERSION: Final = "0b9" +PATCH_VERSION: Final = "0" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) diff --git a/pyproject.toml b/pyproject.toml index a4e95216896..dc943b0832a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.8.0b9" +version = "2024.8.0" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From 2144a9a7b22e8f495bd15eac2d84d1e0fb95b82c Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Thu, 8 Aug 2024 02:56:02 -0400 Subject: [PATCH 0490/1635] Bump aiorussound to 2.2.2 (#123319) --- homeassistant/components/russound_rio/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/russound_rio/manifest.json b/homeassistant/components/russound_rio/manifest.json index be5dd86793f..e7bb99010ee 100644 --- a/homeassistant/components/russound_rio/manifest.json +++ b/homeassistant/components/russound_rio/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/russound_rio", "iot_class": "local_push", "loggers": ["aiorussound"], - "requirements": ["aiorussound==2.2.0"] + "requirements": ["aiorussound==2.2.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 6a0c68b8e25..51d321b9a19 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -350,7 +350,7 @@ aioridwell==2024.01.0 aioruckus==0.34 # homeassistant.components.russound_rio -aiorussound==2.2.0 +aiorussound==2.2.2 # homeassistant.components.ruuvi_gateway aioruuvigateway==0.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 21e9895187d..1d69137d94c 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -332,7 +332,7 @@ aioridwell==2024.01.0 aioruckus==0.34 # homeassistant.components.russound_rio -aiorussound==2.2.0 +aiorussound==2.2.2 # homeassistant.components.ruuvi_gateway aioruuvigateway==0.1.0 From 135c80186fe5f9348eb64a43c16e4876aebdc568 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 8 Aug 2024 01:56:40 -0500 Subject: [PATCH 0491/1635] Fix doorbird with externally added events (#123313) --- homeassistant/components/doorbird/device.py | 2 +- tests/components/doorbird/fixtures/favorites.json | 4 ++++ tests/components/doorbird/test_button.py | 2 +- 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/doorbird/device.py b/homeassistant/components/doorbird/device.py index 7cd45487464..adcb441f458 100644 --- a/homeassistant/components/doorbird/device.py +++ b/homeassistant/components/doorbird/device.py @@ -195,7 +195,7 @@ class ConfiguredDoorBird: title: str | None = data.get("title") if not title or not title.startswith("Home Assistant"): continue - event = title.split("(")[1].strip(")") + event = title.partition("(")[2].strip(")") if input_type := favorite_input_type.get(identifier): events.append(DoorbirdEvent(event, input_type)) elif input_type := default_event_types.get(event): diff --git a/tests/components/doorbird/fixtures/favorites.json b/tests/components/doorbird/fixtures/favorites.json index c56f79c0300..50dddb850a5 100644 --- a/tests/components/doorbird/fixtures/favorites.json +++ b/tests/components/doorbird/fixtures/favorites.json @@ -7,6 +7,10 @@ "1": { "title": "Home Assistant (mydoorbird_motion)", "value": "http://127.0.0.1:8123/api/doorbird/mydoorbird_motion?token=01J2F4B97Y7P1SARXEJ6W07EKD" + }, + "2": { + "title": "externally added event", + "value": "http://127.0.0.1/" } } } diff --git a/tests/components/doorbird/test_button.py b/tests/components/doorbird/test_button.py index 2131e3d6133..cb4bab656ee 100644 --- a/tests/components/doorbird/test_button.py +++ b/tests/components/doorbird/test_button.py @@ -49,4 +49,4 @@ async def test_reset_favorites_button( DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: reset_entity_id}, blocking=True ) assert hass.states.get(reset_entity_id).state != STATE_UNKNOWN - assert doorbird_entry.api.delete_favorite.call_count == 2 + assert doorbird_entry.api.delete_favorite.call_count == 3 From 7dea5d2fe6911c2ed516b1d752e9ef83aa11b1a4 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Thu, 8 Aug 2024 08:59:30 +0200 Subject: [PATCH 0492/1635] Refactor spc tests (#123287) --- homeassistant/components/spc/__init__.py | 4 +- tests/components/spc/conftest.py | 26 ++++++++ .../spc/test_alarm_control_panel.py | 34 ++++++++++ tests/components/spc/test_init.py | 65 ++----------------- 4 files changed, 69 insertions(+), 60 deletions(-) create mode 100644 tests/components/spc/conftest.py create mode 100644 tests/components/spc/test_alarm_control_panel.py diff --git a/homeassistant/components/spc/__init__.py b/homeassistant/components/spc/__init__.py index bb025d699fc..3d9467f2041 100644 --- a/homeassistant/components/spc/__init__.py +++ b/homeassistant/components/spc/__init__.py @@ -41,7 +41,7 @@ CONFIG_SCHEMA = vol.Schema( async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the SPC component.""" - async def async_upate_callback(spc_object): + async def async_update_callback(spc_object): if isinstance(spc_object, Area): async_dispatcher_send(hass, SIGNAL_UPDATE_ALARM.format(spc_object.id)) elif isinstance(spc_object, Zone): @@ -54,7 +54,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: session=session, api_url=config[DOMAIN].get(CONF_API_URL), ws_url=config[DOMAIN].get(CONF_WS_URL), - async_callback=async_upate_callback, + async_callback=async_update_callback, ) hass.data[DATA_API] = spc diff --git a/tests/components/spc/conftest.py b/tests/components/spc/conftest.py new file mode 100644 index 00000000000..1ccda31e314 --- /dev/null +++ b/tests/components/spc/conftest.py @@ -0,0 +1,26 @@ +"""Tests for Vanderbilt SPC component.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +import pyspcwebgw +import pytest + + +@pytest.fixture +def mock_client() -> Generator[AsyncMock]: + """Mock the SPC client.""" + + with patch( + "homeassistant.components.spc.SpcWebGateway", autospec=True + ) as mock_client: + client = mock_client.return_value + client.async_load_parameters.return_value = True + mock_area = AsyncMock(spec=pyspcwebgw.area.Area) + mock_area.id = "1" + mock_area.mode = pyspcwebgw.const.AreaMode.FULL_SET + mock_area.last_changed_by = "Sven" + mock_area.name = "House" + mock_area.verified_alarm = False + client.areas = {"1": mock_area} + yield mock_client diff --git a/tests/components/spc/test_alarm_control_panel.py b/tests/components/spc/test_alarm_control_panel.py new file mode 100644 index 00000000000..7b1ab4ff947 --- /dev/null +++ b/tests/components/spc/test_alarm_control_panel.py @@ -0,0 +1,34 @@ +"""Tests for Vanderbilt SPC component.""" + +from unittest.mock import AsyncMock + +from pyspcwebgw.const import AreaMode + +from homeassistant.const import STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + + +async def test_update_alarm_device(hass: HomeAssistant, mock_client: AsyncMock) -> None: + """Test that alarm panel state changes on incoming websocket data.""" + + config = {"spc": {"api_url": "http://localhost/", "ws_url": "ws://localhost/"}} + assert await async_setup_component(hass, "spc", config) is True + + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.house" + + assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).attributes["changed_by"] == "Sven" + + mock_area = mock_client.return_value.areas["1"] + + mock_area.mode = AreaMode.UNSET + mock_area.last_changed_by = "Anna" + + await mock_client.call_args_list[0][1]["async_callback"](mock_area) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).attributes["changed_by"] == "Anna" diff --git a/tests/components/spc/test_init.py b/tests/components/spc/test_init.py index 4f335e2f980..dc407dc2c5b 100644 --- a/tests/components/spc/test_init.py +++ b/tests/components/spc/test_init.py @@ -1,73 +1,22 @@ """Tests for Vanderbilt SPC component.""" -from unittest.mock import Mock, PropertyMock, patch +from unittest.mock import AsyncMock -import pyspcwebgw -from pyspcwebgw.const import AreaMode - -from homeassistant.components.spc import DATA_API -from homeassistant.const import STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -async def test_valid_device_config(hass: HomeAssistant) -> None: +async def test_valid_device_config(hass: HomeAssistant, mock_client: AsyncMock) -> None: """Test valid device config.""" config = {"spc": {"api_url": "http://localhost/", "ws_url": "ws://localhost/"}} - with patch( - "homeassistant.components.spc.SpcWebGateway.async_load_parameters", - return_value=True, - ): - assert await async_setup_component(hass, "spc", config) is True + assert await async_setup_component(hass, "spc", config) is True -async def test_invalid_device_config(hass: HomeAssistant) -> None: +async def test_invalid_device_config( + hass: HomeAssistant, mock_client: AsyncMock +) -> None: """Test valid device config.""" config = {"spc": {"api_url": "http://localhost/"}} - with patch( - "homeassistant.components.spc.SpcWebGateway.async_load_parameters", - return_value=True, - ): - assert await async_setup_component(hass, "spc", config) is False - - -async def test_update_alarm_device(hass: HomeAssistant) -> None: - """Test that alarm panel state changes on incoming websocket data.""" - - config = {"spc": {"api_url": "http://localhost/", "ws_url": "ws://localhost/"}} - - area_mock = Mock( - spec=pyspcwebgw.area.Area, - id="1", - mode=AreaMode.FULL_SET, - last_changed_by="Sven", - ) - area_mock.name = "House" - area_mock.verified_alarm = False - - with patch( - "homeassistant.components.spc.SpcWebGateway.areas", new_callable=PropertyMock - ) as mock_areas: - mock_areas.return_value = {"1": area_mock} - with patch( - "homeassistant.components.spc.SpcWebGateway.async_load_parameters", - return_value=True, - ): - assert await async_setup_component(hass, "spc", config) is True - - await hass.async_block_till_done() - - entity_id = "alarm_control_panel.house" - - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY - assert hass.states.get(entity_id).attributes["changed_by"] == "Sven" - - area_mock.mode = AreaMode.UNSET - area_mock.last_changed_by = "Anna" - await hass.data[DATA_API]._async_callback(area_mock) - await hass.async_block_till_done() - - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED - assert hass.states.get(entity_id).attributes["changed_by"] == "Anna" + assert await async_setup_component(hass, "spc", config) is False From b7f5236a0a630aea594f1b2f9558aff908a99555 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 8 Aug 2024 10:09:36 +0200 Subject: [PATCH 0493/1635] Fix implicit-return in konnected (#122915) * Fix implicit-return in konnected * Adjust * Adjust * Adjust tests --- homeassistant/components/konnected/switch.py | 5 ++--- tests/components/konnected/test_panel.py | 2 +- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/konnected/switch.py b/homeassistant/components/konnected/switch.py index 424a2d9164d..65b99d623f1 100644 --- a/homeassistant/components/konnected/switch.py +++ b/homeassistant/components/konnected/switch.py @@ -102,13 +102,12 @@ class KonnectedSwitch(SwitchEntity): if resp.get(ATTR_STATE) is not None: self._set_state(self._boolean_state(resp.get(ATTR_STATE))) - def _boolean_state(self, int_state): - if int_state is None: - return False + def _boolean_state(self, int_state: int | None) -> bool | None: if int_state == 0: return self._activation == STATE_LOW if int_state == 1: return self._activation == STATE_HIGH + return None def _set_state(self, state): self._attr_is_on = state diff --git a/tests/components/konnected/test_panel.py b/tests/components/konnected/test_panel.py index 64cc414cdd3..48ebea64161 100644 --- a/tests/components/konnected/test_panel.py +++ b/tests/components/konnected/test_panel.py @@ -700,4 +700,4 @@ async def test_connect_retry(hass: HomeAssistant, mock_panel) -> None: async_fire_time_changed(hass, utcnow() + timedelta(seconds=21)) await hass.async_block_till_done() await async_update_entity(hass, "switch.konnected_445566_actuator_6") - assert hass.states.get("switch.konnected_445566_actuator_6").state == "off" + assert hass.states.get("switch.konnected_445566_actuator_6").state == "unknown" From 984bbf60ef9c1b14832f3f5ac4e3fb946371c09a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 8 Aug 2024 10:25:32 +0200 Subject: [PATCH 0494/1635] Bump sigstore/cosign-installer from 3.5.0 to 3.6.0 (#123335) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/builder.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index 5cb860a98ef..522095ad041 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -323,7 +323,7 @@ jobs: uses: actions/checkout@v4.1.7 - name: Install Cosign - uses: sigstore/cosign-installer@v3.5.0 + uses: sigstore/cosign-installer@v3.6.0 with: cosign-release: "v2.2.3" From 8a8fac46e0206d2f58496569cee873cfeab8b9dd Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Thu, 8 Aug 2024 11:32:22 +0200 Subject: [PATCH 0495/1635] Remove unneeded logs from Yamaha (#123349) --- homeassistant/components/yamaha/media_player.py | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/homeassistant/components/yamaha/media_player.py b/homeassistant/components/yamaha/media_player.py index a8200ea3373..f6434616cfa 100644 --- a/homeassistant/components/yamaha/media_player.py +++ b/homeassistant/components/yamaha/media_player.py @@ -238,19 +238,6 @@ class YamahaDeviceZone(MediaPlayerEntity): # the default name of the integration may not be changed # to avoid a breaking change. self._attr_unique_id = f"{self.zctrl.serial_number}_{self._zone}" - _LOGGER.debug( - "Receiver zone: %s zone %s uid %s", - self._name, - self._zone, - self._attr_unique_id, - ) - else: - _LOGGER.info( - "Receiver zone: %s zone %s no uid %s", - self._name, - self._zone, - self._attr_unique_id, - ) def update(self) -> None: """Get the latest details from the device.""" From 02a404081a2820302dd2333f89b5eec40a88b2f7 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 8 Aug 2024 11:33:49 +0200 Subject: [PATCH 0496/1635] Fix implicit-return in yeelight (#122943) --- homeassistant/components/yeelight/scanner.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/yeelight/scanner.py b/homeassistant/components/yeelight/scanner.py index 6ca12e9bd01..ac482504880 100644 --- a/homeassistant/components/yeelight/scanner.py +++ b/homeassistant/components/yeelight/scanner.py @@ -67,7 +67,8 @@ class YeelightScanner: async def async_setup(self) -> None: """Set up the scanner.""" if self._setup_future is not None: - return await self._setup_future + await self._setup_future + return self._setup_future = self._hass.loop.create_future() connected_futures: list[asyncio.Future[None]] = [] From d08f4fbace01f528dbebb732d02fc8201c5fdbf0 Mon Sep 17 00:00:00 2001 From: Sid <27780930+autinerd@users.noreply.github.com> Date: Thu, 8 Aug 2024 11:56:18 +0200 Subject: [PATCH 0497/1635] Enable Ruff RET501 (#115031) * Enable Ruff RET501 * add noqa and type hints * use Any for fixtures * Review comments, typing fixes * Review comments * fix new occurrences, clean up * Fix typing * clean up rebase * more cleanup * Remove old occurrences --------- Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> --- pyproject.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index a0a98bc72be..9e4fe36243b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -822,7 +822,6 @@ ignore = [ # temporarily disabled "RET503", - "RET501", "TRY301" ] From baceb2a92ac6e0cb70639195c3ea205a378551ac Mon Sep 17 00:00:00 2001 From: Tom Brien Date: Thu, 8 Aug 2024 11:26:03 +0100 Subject: [PATCH 0498/1635] Add support for v3 Coinbase API (#116345) * Add support for v3 Coinbase API * Add deps * Move tests --- homeassistant/components/coinbase/__init__.py | 103 ++++++++++++++---- .../components/coinbase/config_flow.py | 45 +++++--- homeassistant/components/coinbase/const.py | 11 +- .../components/coinbase/manifest.json | 2 +- homeassistant/components/coinbase/sensor.py | 69 +++++++----- requirements_all.txt | 3 + requirements_test_all.txt | 3 + tests/components/coinbase/common.py | 68 +++++++++++- tests/components/coinbase/const.py | 28 +++++ .../coinbase/snapshots/test_diagnostics.ambr | 33 ++---- tests/components/coinbase/test_config_flow.py | 90 ++++++++++++++- 11 files changed, 359 insertions(+), 96 deletions(-) diff --git a/homeassistant/components/coinbase/__init__.py b/homeassistant/components/coinbase/__init__.py index a231bb5cda0..f5fd8fa1dc3 100644 --- a/homeassistant/components/coinbase/__init__.py +++ b/homeassistant/components/coinbase/__init__.py @@ -5,7 +5,9 @@ from __future__ import annotations from datetime import timedelta import logging -from coinbase.wallet.client import Client +from coinbase.rest import RESTClient +from coinbase.rest.rest_base import HTTPError +from coinbase.wallet.client import Client as LegacyClient from coinbase.wallet.error import AuthenticationError from homeassistant.config_entries import ConfigEntry @@ -15,8 +17,23 @@ from homeassistant.helpers import entity_registry as er from homeassistant.util import Throttle from .const import ( + ACCOUNT_IS_VAULT, + API_ACCOUNT_AMOUNT, + API_ACCOUNT_AVALIABLE, + API_ACCOUNT_BALANCE, + API_ACCOUNT_CURRENCY, + API_ACCOUNT_CURRENCY_CODE, + API_ACCOUNT_HOLD, API_ACCOUNT_ID, - API_ACCOUNTS_DATA, + API_ACCOUNT_NAME, + API_ACCOUNT_VALUE, + API_ACCOUNTS, + API_DATA, + API_RATES_CURRENCY, + API_RESOURCE_TYPE, + API_TYPE_VAULT, + API_V3_ACCOUNT_ID, + API_V3_TYPE_VAULT, CONF_CURRENCIES, CONF_EXCHANGE_BASE, CONF_EXCHANGE_RATES, @@ -56,9 +73,16 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: def create_and_update_instance(entry: ConfigEntry) -> CoinbaseData: """Create and update a Coinbase Data instance.""" - client = Client(entry.data[CONF_API_KEY], entry.data[CONF_API_TOKEN]) + if "organizations" not in entry.data[CONF_API_KEY]: + client = LegacyClient(entry.data[CONF_API_KEY], entry.data[CONF_API_TOKEN]) + version = "v2" + else: + client = RESTClient( + api_key=entry.data[CONF_API_KEY], api_secret=entry.data[CONF_API_TOKEN] + ) + version = "v3" base_rate = entry.options.get(CONF_EXCHANGE_BASE, "USD") - instance = CoinbaseData(client, base_rate) + instance = CoinbaseData(client, base_rate, version) instance.update() return instance @@ -83,42 +107,83 @@ async def update_listener(hass: HomeAssistant, config_entry: ConfigEntry) -> Non registry.async_remove(entity.entity_id) -def get_accounts(client): +def get_accounts(client, version): """Handle paginated accounts.""" response = client.get_accounts() - accounts = response[API_ACCOUNTS_DATA] - next_starting_after = response.pagination.next_starting_after - - while next_starting_after: - response = client.get_accounts(starting_after=next_starting_after) - accounts += response[API_ACCOUNTS_DATA] + if version == "v2": + accounts = response[API_DATA] next_starting_after = response.pagination.next_starting_after - return accounts + while next_starting_after: + response = client.get_accounts(starting_after=next_starting_after) + accounts += response[API_DATA] + next_starting_after = response.pagination.next_starting_after + + return [ + { + API_ACCOUNT_ID: account[API_ACCOUNT_ID], + API_ACCOUNT_NAME: account[API_ACCOUNT_NAME], + API_ACCOUNT_CURRENCY: account[API_ACCOUNT_CURRENCY][ + API_ACCOUNT_CURRENCY_CODE + ], + API_ACCOUNT_AMOUNT: account[API_ACCOUNT_BALANCE][API_ACCOUNT_AMOUNT], + ACCOUNT_IS_VAULT: account[API_RESOURCE_TYPE] == API_TYPE_VAULT, + } + for account in accounts + ] + + accounts = response[API_ACCOUNTS] + while response["has_next"]: + response = client.get_accounts(cursor=response["cursor"]) + accounts += response["accounts"] + + return [ + { + API_ACCOUNT_ID: account[API_V3_ACCOUNT_ID], + API_ACCOUNT_NAME: account[API_ACCOUNT_NAME], + API_ACCOUNT_CURRENCY: account[API_ACCOUNT_CURRENCY], + API_ACCOUNT_AMOUNT: account[API_ACCOUNT_AVALIABLE][API_ACCOUNT_VALUE] + + account[API_ACCOUNT_HOLD][API_ACCOUNT_VALUE], + ACCOUNT_IS_VAULT: account[API_RESOURCE_TYPE] == API_V3_TYPE_VAULT, + } + for account in accounts + ] class CoinbaseData: """Get the latest data and update the states.""" - def __init__(self, client, exchange_base): + def __init__(self, client, exchange_base, version): """Init the coinbase data object.""" self.client = client self.accounts = None self.exchange_base = exchange_base self.exchange_rates = None - self.user_id = self.client.get_current_user()[API_ACCOUNT_ID] + if version == "v2": + self.user_id = self.client.get_current_user()[API_ACCOUNT_ID] + else: + self.user_id = ( + "v3_" + client.get_portfolios()["portfolios"][0][API_V3_ACCOUNT_ID] + ) + self.api_version = version @Throttle(MIN_TIME_BETWEEN_UPDATES) def update(self): """Get the latest data from coinbase.""" try: - self.accounts = get_accounts(self.client) - self.exchange_rates = self.client.get_exchange_rates( - currency=self.exchange_base - ) - except AuthenticationError as coinbase_error: + self.accounts = get_accounts(self.client, self.api_version) + if self.api_version == "v2": + self.exchange_rates = self.client.get_exchange_rates( + currency=self.exchange_base + ) + else: + self.exchange_rates = self.client.get( + "/v2/exchange-rates", + params={API_RATES_CURRENCY: self.exchange_base}, + )[API_DATA] + except (AuthenticationError, HTTPError) as coinbase_error: _LOGGER.error( "Authentication error connecting to coinbase: %s", coinbase_error ) diff --git a/homeassistant/components/coinbase/config_flow.py b/homeassistant/components/coinbase/config_flow.py index 623d5cf6731..616fdaf8f7a 100644 --- a/homeassistant/components/coinbase/config_flow.py +++ b/homeassistant/components/coinbase/config_flow.py @@ -5,7 +5,9 @@ from __future__ import annotations import logging from typing import Any -from coinbase.wallet.client import Client +from coinbase.rest import RESTClient +from coinbase.rest.rest_base import HTTPError +from coinbase.wallet.client import Client as LegacyClient from coinbase.wallet.error import AuthenticationError import voluptuous as vol @@ -15,18 +17,17 @@ from homeassistant.config_entries import ( ConfigFlowResult, OptionsFlow, ) -from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN +from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, CONF_API_VERSION from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError import homeassistant.helpers.config_validation as cv from . import get_accounts from .const import ( + ACCOUNT_IS_VAULT, API_ACCOUNT_CURRENCY, - API_ACCOUNT_CURRENCY_CODE, + API_DATA, API_RATES, - API_RESOURCE_TYPE, - API_TYPE_VAULT, CONF_CURRENCIES, CONF_EXCHANGE_BASE, CONF_EXCHANGE_PRECISION, @@ -49,8 +50,11 @@ STEP_USER_DATA_SCHEMA = vol.Schema( def get_user_from_client(api_key, api_token): """Get the user name from Coinbase API credentials.""" - client = Client(api_key, api_token) - return client.get_current_user() + if "organizations" not in api_key: + client = LegacyClient(api_key, api_token) + return client.get_current_user()["name"] + client = RESTClient(api_key=api_key, api_secret=api_token) + return client.get_portfolios()["portfolios"][0]["name"] async def validate_api(hass: HomeAssistant, data): @@ -60,11 +64,13 @@ async def validate_api(hass: HomeAssistant, data): user = await hass.async_add_executor_job( get_user_from_client, data[CONF_API_KEY], data[CONF_API_TOKEN] ) - except AuthenticationError as error: - if "api key" in str(error): + except (AuthenticationError, HTTPError) as error: + if "api key" in str(error) or " 401 Client Error" in str(error): _LOGGER.debug("Coinbase rejected API credentials due to an invalid API key") raise InvalidKey from error - if "invalid signature" in str(error): + if "invalid signature" in str( + error + ) or "'Could not deserialize key data" in str(error): _LOGGER.debug( "Coinbase rejected API credentials due to an invalid API secret" ) @@ -73,8 +79,8 @@ async def validate_api(hass: HomeAssistant, data): raise InvalidAuth from error except ConnectionError as error: raise CannotConnect from error - - return {"title": user["name"]} + api_version = "v3" if "organizations" in data[CONF_API_KEY] else "v2" + return {"title": user, "api_version": api_version} async def validate_options(hass: HomeAssistant, config_entry: ConfigEntry, options): @@ -82,14 +88,20 @@ async def validate_options(hass: HomeAssistant, config_entry: ConfigEntry, optio client = hass.data[DOMAIN][config_entry.entry_id].client - accounts = await hass.async_add_executor_job(get_accounts, client) + accounts = await hass.async_add_executor_job( + get_accounts, client, config_entry.data.get("api_version", "v2") + ) accounts_currencies = [ - account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE] + account[API_ACCOUNT_CURRENCY] for account in accounts - if account[API_RESOURCE_TYPE] != API_TYPE_VAULT + if not account[ACCOUNT_IS_VAULT] ] - available_rates = await hass.async_add_executor_job(client.get_exchange_rates) + if config_entry.data.get("api_version", "v2") == "v2": + available_rates = await hass.async_add_executor_job(client.get_exchange_rates) + else: + resp = await hass.async_add_executor_job(client.get, "/v2/exchange-rates") + available_rates = resp[API_DATA] if CONF_CURRENCIES in options: for currency in options[CONF_CURRENCIES]: if currency not in accounts_currencies: @@ -134,6 +146,7 @@ class CoinbaseConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: + user_input[CONF_API_VERSION] = info["api_version"] return self.async_create_entry(title=info["title"], data=user_input) return self.async_show_form( step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors diff --git a/homeassistant/components/coinbase/const.py b/homeassistant/components/coinbase/const.py index f5c75e3f926..0f47d4bc208 100644 --- a/homeassistant/components/coinbase/const.py +++ b/homeassistant/components/coinbase/const.py @@ -1,5 +1,7 @@ """Constants used for Coinbase.""" +ACCOUNT_IS_VAULT = "is_vault" + CONF_CURRENCIES = "account_balance_currencies" CONF_EXCHANGE_BASE = "exchange_base" CONF_EXCHANGE_RATES = "exchange_rate_currencies" @@ -10,18 +12,25 @@ DOMAIN = "coinbase" # Constants for data returned by Coinbase API API_ACCOUNT_AMOUNT = "amount" +API_ACCOUNT_AVALIABLE = "available_balance" API_ACCOUNT_BALANCE = "balance" API_ACCOUNT_CURRENCY = "currency" API_ACCOUNT_CURRENCY_CODE = "code" +API_ACCOUNT_HOLD = "hold" API_ACCOUNT_ID = "id" API_ACCOUNT_NATIVE_BALANCE = "balance" API_ACCOUNT_NAME = "name" -API_ACCOUNTS_DATA = "data" +API_ACCOUNT_VALUE = "value" +API_ACCOUNTS = "accounts" +API_DATA = "data" API_RATES = "rates" +API_RATES_CURRENCY = "currency" API_RESOURCE_PATH = "resource_path" API_RESOURCE_TYPE = "type" API_TYPE_VAULT = "vault" API_USD = "USD" +API_V3_ACCOUNT_ID = "uuid" +API_V3_TYPE_VAULT = "ACCOUNT_TYPE_VAULT" WALLETS = { "1INCH": "1INCH", diff --git a/homeassistant/components/coinbase/manifest.json b/homeassistant/components/coinbase/manifest.json index 515fe9f9abb..be632b5e856 100644 --- a/homeassistant/components/coinbase/manifest.json +++ b/homeassistant/components/coinbase/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/coinbase", "iot_class": "cloud_polling", "loggers": ["coinbase"], - "requirements": ["coinbase==2.1.0"] + "requirements": ["coinbase==2.1.0", "coinbase-advanced-py==1.2.2"] } diff --git a/homeassistant/components/coinbase/sensor.py b/homeassistant/components/coinbase/sensor.py index 83c63fa55fb..d3f3c81fb0c 100644 --- a/homeassistant/components/coinbase/sensor.py +++ b/homeassistant/components/coinbase/sensor.py @@ -12,15 +12,12 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import CoinbaseData from .const import ( + ACCOUNT_IS_VAULT, API_ACCOUNT_AMOUNT, - API_ACCOUNT_BALANCE, API_ACCOUNT_CURRENCY, - API_ACCOUNT_CURRENCY_CODE, API_ACCOUNT_ID, API_ACCOUNT_NAME, API_RATES, - API_RESOURCE_TYPE, - API_TYPE_VAULT, CONF_CURRENCIES, CONF_EXCHANGE_PRECISION, CONF_EXCHANGE_PRECISION_DEFAULT, @@ -31,6 +28,7 @@ from .const import ( _LOGGER = logging.getLogger(__name__) ATTR_NATIVE_BALANCE = "Balance in native currency" +ATTR_API_VERSION = "API Version" CURRENCY_ICONS = { "BTC": "mdi:currency-btc", @@ -56,9 +54,9 @@ async def async_setup_entry( entities: list[SensorEntity] = [] provided_currencies: list[str] = [ - account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE] + account[API_ACCOUNT_CURRENCY] for account in instance.accounts - if account[API_RESOURCE_TYPE] != API_TYPE_VAULT + if not account[ACCOUNT_IS_VAULT] ] desired_currencies: list[str] = [] @@ -73,6 +71,11 @@ async def async_setup_entry( ) for currency in desired_currencies: + _LOGGER.debug( + "Attempting to set up %s account sensor with %s API", + currency, + instance.api_version, + ) if currency not in provided_currencies: _LOGGER.warning( ( @@ -85,12 +88,17 @@ async def async_setup_entry( entities.append(AccountSensor(instance, currency)) if CONF_EXCHANGE_RATES in config_entry.options: - entities.extend( - ExchangeRateSensor( - instance, rate, exchange_base_currency, exchange_precision + for rate in config_entry.options[CONF_EXCHANGE_RATES]: + _LOGGER.debug( + "Attempting to set up %s account sensor with %s API", + rate, + instance.api_version, + ) + entities.append( + ExchangeRateSensor( + instance, rate, exchange_base_currency, exchange_precision + ) ) - for rate in config_entry.options[CONF_EXCHANGE_RATES] - ) async_add_entities(entities) @@ -105,26 +113,21 @@ class AccountSensor(SensorEntity): self._coinbase_data = coinbase_data self._currency = currency for account in coinbase_data.accounts: - if ( - account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE] != currency - or account[API_RESOURCE_TYPE] == API_TYPE_VAULT - ): + if account[API_ACCOUNT_CURRENCY] != currency or account[ACCOUNT_IS_VAULT]: continue self._attr_name = f"Coinbase {account[API_ACCOUNT_NAME]}" self._attr_unique_id = ( f"coinbase-{account[API_ACCOUNT_ID]}-wallet-" - f"{account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE]}" + f"{account[API_ACCOUNT_CURRENCY]}" ) - self._attr_native_value = account[API_ACCOUNT_BALANCE][API_ACCOUNT_AMOUNT] - self._attr_native_unit_of_measurement = account[API_ACCOUNT_CURRENCY][ - API_ACCOUNT_CURRENCY_CODE - ] + self._attr_native_value = account[API_ACCOUNT_AMOUNT] + self._attr_native_unit_of_measurement = account[API_ACCOUNT_CURRENCY] self._attr_icon = CURRENCY_ICONS.get( - account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE], + account[API_ACCOUNT_CURRENCY], DEFAULT_COIN_ICON, ) self._native_balance = round( - float(account[API_ACCOUNT_BALANCE][API_ACCOUNT_AMOUNT]) + float(account[API_ACCOUNT_AMOUNT]) / float(coinbase_data.exchange_rates[API_RATES][currency]), 2, ) @@ -144,21 +147,26 @@ class AccountSensor(SensorEntity): """Return the state attributes of the sensor.""" return { ATTR_NATIVE_BALANCE: f"{self._native_balance} {self._coinbase_data.exchange_base}", + ATTR_API_VERSION: self._coinbase_data.api_version, } def update(self) -> None: """Get the latest state of the sensor.""" + _LOGGER.debug( + "Updating %s account sensor with %s API", + self._currency, + self._coinbase_data.api_version, + ) self._coinbase_data.update() for account in self._coinbase_data.accounts: if ( - account[API_ACCOUNT_CURRENCY][API_ACCOUNT_CURRENCY_CODE] - != self._currency - or account[API_RESOURCE_TYPE] == API_TYPE_VAULT + account[API_ACCOUNT_CURRENCY] != self._currency + or account[ACCOUNT_IS_VAULT] ): continue - self._attr_native_value = account[API_ACCOUNT_BALANCE][API_ACCOUNT_AMOUNT] + self._attr_native_value = account[API_ACCOUNT_AMOUNT] self._native_balance = round( - float(account[API_ACCOUNT_BALANCE][API_ACCOUNT_AMOUNT]) + float(account[API_ACCOUNT_AMOUNT]) / float(self._coinbase_data.exchange_rates[API_RATES][self._currency]), 2, ) @@ -202,8 +210,13 @@ class ExchangeRateSensor(SensorEntity): def update(self) -> None: """Get the latest state of the sensor.""" + _LOGGER.debug( + "Updating %s rate sensor with %s API", + self._currency, + self._coinbase_data.api_version, + ) self._coinbase_data.update() self._attr_native_value = round( - 1 / float(self._coinbase_data.exchange_rates.rates[self._currency]), + 1 / float(self._coinbase_data.exchange_rates[API_RATES][self._currency]), self._precision, ) diff --git a/requirements_all.txt b/requirements_all.txt index 51d321b9a19..1960107d88e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -660,6 +660,9 @@ clearpasspy==1.0.2 # homeassistant.components.sinch clx-sdk-xms==1.0.0 +# homeassistant.components.coinbase +coinbase-advanced-py==1.2.2 + # homeassistant.components.coinbase coinbase==2.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 1d69137d94c..4559cd3dca0 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -562,6 +562,9 @@ cached_ipaddress==0.3.0 # homeassistant.components.caldav caldav==1.3.9 +# homeassistant.components.coinbase +coinbase-advanced-py==1.2.2 + # homeassistant.components.coinbase coinbase==2.1.0 diff --git a/tests/components/coinbase/common.py b/tests/components/coinbase/common.py index 3421c4ce838..2768b6a2cd4 100644 --- a/tests/components/coinbase/common.py +++ b/tests/components/coinbase/common.py @@ -5,13 +5,14 @@ from homeassistant.components.coinbase.const import ( CONF_EXCHANGE_RATES, DOMAIN, ) -from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN +from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, CONF_API_VERSION from .const import ( GOOD_CURRENCY_2, GOOD_EXCHANGE_RATE, GOOD_EXCHANGE_RATE_2, MOCK_ACCOUNTS_RESPONSE, + MOCK_ACCOUNTS_RESPONSE_V3, ) from tests.common import MockConfigEntry @@ -54,6 +55,33 @@ def mocked_get_accounts(_, **kwargs): return MockGetAccounts(**kwargs) +class MockGetAccountsV3: + """Mock accounts with pagination.""" + + def __init__(self, cursor=""): + """Init mocked object, forced to return two at a time.""" + ids = [account["uuid"] for account in MOCK_ACCOUNTS_RESPONSE_V3] + start = ids.index(cursor) if cursor else 0 + + has_next = (target_end := start + 2) < len(MOCK_ACCOUNTS_RESPONSE_V3) + end = target_end if has_next else -1 + next_cursor = ids[end] if has_next else ids[-1] + self.accounts = { + "accounts": MOCK_ACCOUNTS_RESPONSE_V3[start:end], + "has_next": has_next, + "cursor": next_cursor, + } + + def __getitem__(self, item): + """Handle subscript request.""" + return self.accounts[item] + + +def mocked_get_accounts_v3(_, **kwargs): + """Return simplified accounts using mock.""" + return MockGetAccountsV3(**kwargs) + + def mock_get_current_user(): """Return a simplified mock user.""" return { @@ -74,6 +102,19 @@ def mock_get_exchange_rates(): } +def mock_get_portfolios(): + """Return a mocked list of Coinbase portfolios.""" + return { + "portfolios": [ + { + "name": "Default", + "uuid": "123456", + "type": "DEFAULT", + } + ] + } + + async def init_mock_coinbase(hass, currencies=None, rates=None): """Init Coinbase integration for testing.""" config_entry = MockConfigEntry( @@ -93,3 +134,28 @@ async def init_mock_coinbase(hass, currencies=None, rates=None): await hass.async_block_till_done() return config_entry + + +async def init_mock_coinbase_v3(hass, currencies=None, rates=None): + """Init Coinbase integration for testing.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + entry_id="080272b77a4f80c41b94d7cdc86fd826", + unique_id=None, + title="Test User v3", + data={ + CONF_API_KEY: "organizations/123456", + CONF_API_TOKEN: "AbCDeF", + CONF_API_VERSION: "v3", + }, + options={ + CONF_CURRENCIES: currencies or [], + CONF_EXCHANGE_RATES: rates or [], + }, + ) + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + return config_entry diff --git a/tests/components/coinbase/const.py b/tests/components/coinbase/const.py index dcd14555ca3..5fbba11eb2d 100644 --- a/tests/components/coinbase/const.py +++ b/tests/components/coinbase/const.py @@ -31,3 +31,31 @@ MOCK_ACCOUNTS_RESPONSE = [ "type": "fiat", }, ] + +MOCK_ACCOUNTS_RESPONSE_V3 = [ + { + "uuid": "123456789", + "name": "BTC Wallet", + "currency": GOOD_CURRENCY, + "available_balance": {"value": "0.00001", "currency": GOOD_CURRENCY}, + "type": "ACCOUNT_TYPE_CRYPTO", + "hold": {"value": "0", "currency": GOOD_CURRENCY}, + }, + { + "uuid": "abcdefg", + "name": "BTC Vault", + "currency": GOOD_CURRENCY, + "available_balance": {"value": "100.00", "currency": GOOD_CURRENCY}, + "type": "ACCOUNT_TYPE_VAULT", + "hold": {"value": "0", "currency": GOOD_CURRENCY}, + }, + { + "uuid": "987654321", + "name": "USD Wallet", + "currency": GOOD_CURRENCY_2, + "available_balance": {"value": "9.90", "currency": GOOD_CURRENCY_2}, + "type": "ACCOUNT_TYPE_FIAT", + "ready": True, + "hold": {"value": "0", "currency": GOOD_CURRENCY_2}, + }, +] diff --git a/tests/components/coinbase/snapshots/test_diagnostics.ambr b/tests/components/coinbase/snapshots/test_diagnostics.ambr index 9079a7682c8..4f9e75dc38b 100644 --- a/tests/components/coinbase/snapshots/test_diagnostics.ambr +++ b/tests/components/coinbase/snapshots/test_diagnostics.ambr @@ -3,40 +3,25 @@ dict({ 'accounts': list([ dict({ - 'balance': dict({ - 'amount': '**REDACTED**', - 'currency': 'BTC', - }), - 'currency': dict({ - 'code': 'BTC', - }), + 'amount': '**REDACTED**', + 'currency': 'BTC', 'id': '**REDACTED**', + 'is_vault': False, 'name': 'BTC Wallet', - 'type': 'wallet', }), dict({ - 'balance': dict({ - 'amount': '**REDACTED**', - 'currency': 'BTC', - }), - 'currency': dict({ - 'code': 'BTC', - }), + 'amount': '**REDACTED**', + 'currency': 'BTC', 'id': '**REDACTED**', + 'is_vault': True, 'name': 'BTC Vault', - 'type': 'vault', }), dict({ - 'balance': dict({ - 'amount': '**REDACTED**', - 'currency': 'USD', - }), - 'currency': dict({ - 'code': 'USD', - }), + 'amount': '**REDACTED**', + 'currency': 'USD', 'id': '**REDACTED**', + 'is_vault': False, 'name': 'USD Wallet', - 'type': 'fiat', }), ]), 'entry': dict({ diff --git a/tests/components/coinbase/test_config_flow.py b/tests/components/coinbase/test_config_flow.py index f213392bb1e..aa2c6208e0f 100644 --- a/tests/components/coinbase/test_config_flow.py +++ b/tests/components/coinbase/test_config_flow.py @@ -14,15 +14,18 @@ from homeassistant.components.coinbase.const import ( CONF_EXCHANGE_RATES, DOMAIN, ) -from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN +from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, CONF_API_VERSION from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from .common import ( init_mock_coinbase, + init_mock_coinbase_v3, mock_get_current_user, mock_get_exchange_rates, + mock_get_portfolios, mocked_get_accounts, + mocked_get_accounts_v3, ) from .const import BAD_CURRENCY, BAD_EXCHANGE_RATE, GOOD_CURRENCY, GOOD_EXCHANGE_RATE @@ -53,16 +56,17 @@ async def test_form(hass: HomeAssistant) -> None: ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - { - CONF_API_KEY: "123456", - CONF_API_TOKEN: "AbCDeF", - }, + {CONF_API_KEY: "123456", CONF_API_TOKEN: "AbCDeF"}, ) await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "Test User" - assert result2["data"] == {CONF_API_KEY: "123456", CONF_API_TOKEN: "AbCDeF"} + assert result2["data"] == { + CONF_API_KEY: "123456", + CONF_API_TOKEN: "AbCDeF", + CONF_API_VERSION: "v2", + } assert len(mock_setup_entry.mock_calls) == 1 @@ -314,3 +318,77 @@ async def test_option_catch_all_exception(hass: HomeAssistant) -> None: assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": "unknown"} + + +async def test_form_v3(hass: HomeAssistant) -> None: + """Test we get the form.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + with ( + patch("coinbase.rest.RESTClient.get_accounts", new=mocked_get_accounts_v3), + patch( + "coinbase.rest.RESTClient.get_portfolios", + return_value=mock_get_portfolios(), + ), + patch( + "coinbase.rest.RESTBase.get", + return_value={"data": mock_get_exchange_rates()}, + ), + patch( + "homeassistant.components.coinbase.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_API_KEY: "organizations/123456", CONF_API_TOKEN: "AbCDeF"}, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == "Default" + assert result2["data"] == { + CONF_API_KEY: "organizations/123456", + CONF_API_TOKEN: "AbCDeF", + CONF_API_VERSION: "v3", + } + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_option_form_v3(hass: HomeAssistant) -> None: + """Test we handle a good wallet currency option.""" + + with ( + patch("coinbase.rest.RESTClient.get_accounts", new=mocked_get_accounts_v3), + patch( + "coinbase.rest.RESTClient.get_portfolios", + return_value=mock_get_portfolios(), + ), + patch( + "coinbase.rest.RESTBase.get", + return_value={"data": mock_get_exchange_rates()}, + ), + patch( + "homeassistant.components.coinbase.update_listener" + ) as mock_update_listener, + ): + config_entry = await init_mock_coinbase_v3(hass) + await hass.async_block_till_done() + result = await hass.config_entries.options.async_init(config_entry.entry_id) + await hass.async_block_till_done() + result2 = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + CONF_CURRENCIES: [GOOD_CURRENCY], + CONF_EXCHANGE_RATES: [GOOD_EXCHANGE_RATE], + CONF_EXCHANGE_PRECISION: 5, + }, + ) + assert result2["type"] is FlowResultType.CREATE_ENTRY + await hass.async_block_till_done() + assert len(mock_update_listener.mock_calls) == 1 From a406068f13022c7279ccf7c68c4553df60896124 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 8 Aug 2024 16:32:46 +0200 Subject: [PATCH 0499/1635] Fix implicit-return in homematic (#122922) --- homeassistant/components/homematic/__init__.py | 3 +++ homeassistant/components/homematic/climate.py | 2 ++ 2 files changed, 5 insertions(+) diff --git a/homeassistant/components/homematic/__init__.py b/homeassistant/components/homematic/__init__.py index 80345866b1f..f0fc2a40278 100644 --- a/homeassistant/components/homematic/__init__.py +++ b/homeassistant/components/homematic/__init__.py @@ -573,6 +573,8 @@ def _create_ha_id(name, channel, param, count): if count > 1 and param is not None: return f"{name} {channel} {param}" + raise ValueError(f"Unable to create unique id for count:{count} and param:{param}") + def _hm_event_handler(hass, interface, device, caller, attribute, value): """Handle all pyhomematic device events.""" @@ -621,3 +623,4 @@ def _device_from_servicecall(hass, service): for devices in hass.data[DATA_HOMEMATIC].devices.values(): if address in devices: return devices[address] + return None diff --git a/homeassistant/components/homematic/climate.py b/homeassistant/components/homematic/climate.py index bf1295df6be..2be28487cbb 100644 --- a/homeassistant/components/homematic/climate.py +++ b/homeassistant/components/homematic/climate.py @@ -125,6 +125,7 @@ class HMThermostat(HMDevice, ClimateEntity): for node in HM_HUMI_MAP: if node in self._data: return self._data[node] + return None @property def current_temperature(self): @@ -132,6 +133,7 @@ class HMThermostat(HMDevice, ClimateEntity): for node in HM_TEMP_MAP: if node in self._data: return self._data[node] + return None @property def target_temperature(self): From 60117ae1504efa973c238e174e7dc76001d0383f Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 8 Aug 2024 17:30:39 +0200 Subject: [PATCH 0500/1635] Revert "Fix blocking I/O while validating config schema" (#123377) --- homeassistant/config.py | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/homeassistant/config.py b/homeassistant/config.py index 18c833d4c75..948ab342e79 100644 --- a/homeassistant/config.py +++ b/homeassistant/config.py @@ -817,9 +817,7 @@ async def async_process_ha_core_config(hass: HomeAssistant, config: dict) -> Non This method is a coroutine. """ - # CORE_CONFIG_SCHEMA is not async safe since it uses vol.IsDir - # so we need to run it in an executor job. - config = await hass.async_add_executor_job(CORE_CONFIG_SCHEMA, config) + config = CORE_CONFIG_SCHEMA(config) # Only load auth during startup. if not hasattr(hass, "auth"): @@ -1535,15 +1533,9 @@ async def async_process_component_config( return IntegrationConfigInfo(None, config_exceptions) # No custom config validator, proceed with schema validation - if config_schema := getattr(component, "CONFIG_SCHEMA", None): + if hasattr(component, "CONFIG_SCHEMA"): try: - if domain in config: - # cv.isdir, cv.isfile, cv.isdevice are not async - # friendly so we need to run this in executor - schema = await hass.async_add_executor_job(config_schema, config) - else: - schema = config_schema(config) - return IntegrationConfigInfo(schema, []) + return IntegrationConfigInfo(component.CONFIG_SCHEMA(config), []) except vol.Invalid as exc: exc_info = ConfigExceptionInfo( exc, From c2f2a868c4973de3d2fac019f6b0468372b0e496 Mon Sep 17 00:00:00 2001 From: fustom Date: Thu, 8 Aug 2024 18:49:47 +0200 Subject: [PATCH 0501/1635] Fix limit and order property for transmission integration (#123305) --- homeassistant/components/transmission/coordinator.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/transmission/coordinator.py b/homeassistant/components/transmission/coordinator.py index d6b5b695656..e0930bd9e9e 100644 --- a/homeassistant/components/transmission/coordinator.py +++ b/homeassistant/components/transmission/coordinator.py @@ -55,12 +55,12 @@ class TransmissionDataUpdateCoordinator(DataUpdateCoordinator[SessionStats]): @property def limit(self) -> int: """Return limit.""" - return self.config_entry.data.get(CONF_LIMIT, DEFAULT_LIMIT) + return self.config_entry.options.get(CONF_LIMIT, DEFAULT_LIMIT) @property def order(self) -> str: """Return order.""" - return self.config_entry.data.get(CONF_ORDER, DEFAULT_ORDER) + return self.config_entry.options.get(CONF_ORDER, DEFAULT_ORDER) async def _async_update_data(self) -> SessionStats: """Update transmission data.""" From ddc94030a6a7d6b422991d02b0ed909c348f5cd5 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 8 Aug 2024 18:50:49 +0200 Subject: [PATCH 0502/1635] Fix raise-within-try in config validation helper (#123353) --- homeassistant/helpers/config_validation.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/helpers/config_validation.py b/homeassistant/helpers/config_validation.py index cd6670dc597..01960b6c0c3 100644 --- a/homeassistant/helpers/config_validation.py +++ b/homeassistant/helpers/config_validation.py @@ -770,9 +770,9 @@ def socket_timeout(value: Any | None) -> object: float_value = float(value) if float_value > 0.0: return float_value - raise vol.Invalid("Invalid socket timeout value. float > 0.0 required.") except Exception as err: raise vol.Invalid(f"Invalid socket timeout: {err}") from err + raise vol.Invalid("Invalid socket timeout value. float > 0.0 required.") def url( From b498c898606b550f7f585b5d56a794ae81f0d18e Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 8 Aug 2024 18:51:19 +0200 Subject: [PATCH 0503/1635] Fix raise-within-try in language util (#123354) --- homeassistant/util/language.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/homeassistant/util/language.py b/homeassistant/util/language.py index 8644f8014b6..8a82de9065f 100644 --- a/homeassistant/util/language.py +++ b/homeassistant/util/language.py @@ -137,9 +137,6 @@ class Dialect: region_idx = pref_regions.index(self.region) elif dialect.region is not None: region_idx = pref_regions.index(dialect.region) - else: - # Can't happen, but mypy is not smart enough - raise ValueError # More preferred regions are at the front. # Add 1 to boost above a weak match where no regions are set. From 634a2b22dc09188a4ef02b2d93746efe71077e7b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Fern=C3=A1ndez=20Rojas?= Date: Thu, 8 Aug 2024 19:23:52 +0200 Subject: [PATCH 0504/1635] Improve Airzone Cloud sensors availability (#123383) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit airzone_cloud: improve sensors availability Make sensor entities unavailable instead of providing an unknown state. Signed-off-by: Álvaro Fernández Rojas --- homeassistant/components/airzone_cloud/binary_sensor.py | 5 +++++ homeassistant/components/airzone_cloud/sensor.py | 5 +++++ 2 files changed, 10 insertions(+) diff --git a/homeassistant/components/airzone_cloud/binary_sensor.py b/homeassistant/components/airzone_cloud/binary_sensor.py index f22515155f1..3d6f6b42901 100644 --- a/homeassistant/components/airzone_cloud/binary_sensor.py +++ b/homeassistant/components/airzone_cloud/binary_sensor.py @@ -161,6 +161,11 @@ class AirzoneBinarySensor(AirzoneEntity, BinarySensorEntity): entity_description: AirzoneBinarySensorEntityDescription + @property + def available(self) -> bool: + """Return Airzone Cloud binary sensor availability.""" + return super().available and self.is_on is not None + @callback def _handle_coordinator_update(self) -> None: """Update attributes when the coordinator updates.""" diff --git a/homeassistant/components/airzone_cloud/sensor.py b/homeassistant/components/airzone_cloud/sensor.py index a3a456edd03..9f0ee01aca2 100644 --- a/homeassistant/components/airzone_cloud/sensor.py +++ b/homeassistant/components/airzone_cloud/sensor.py @@ -189,6 +189,11 @@ async def async_setup_entry( class AirzoneSensor(AirzoneEntity, SensorEntity): """Define an Airzone Cloud sensor.""" + @property + def available(self) -> bool: + """Return Airzone Cloud sensor availability.""" + return super().available and self.native_value is not None + @callback def _handle_coordinator_update(self) -> None: """Update attributes when the coordinator updates.""" From 2343f5e40fcc5ad60844e92843e1caa2fc9ce16e Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Thu, 8 Aug 2024 19:28:46 +0200 Subject: [PATCH 0505/1635] Reolink Chime online status and ability to remove (#123301) * Add chime available * allow removing a Reolink chime * Allow removal if doorbell itself removed * fix tests * Add tests * fix styling --- homeassistant/components/reolink/__init__.py | 40 ++++++++++ homeassistant/components/reolink/entity.py | 5 ++ tests/components/reolink/conftest.py | 32 ++++++++ tests/components/reolink/test_init.py | 82 ++++++++++++++++++-- tests/components/reolink/test_select.py | 37 ++++----- 5 files changed, 168 insertions(+), 28 deletions(-) diff --git a/homeassistant/components/reolink/__init__.py b/homeassistant/components/reolink/__init__.py index cc293d970b2..a319024633c 100644 --- a/homeassistant/components/reolink/__init__.py +++ b/homeassistant/components/reolink/__init__.py @@ -188,6 +188,46 @@ async def async_remove_config_entry_device( host: ReolinkHost = hass.data[DOMAIN][config_entry.entry_id].host (device_uid, ch, is_chime) = get_device_uid_and_ch(device, host) + if is_chime: + await host.api.get_state(cmd="GetDingDongList") + chime = host.api.chime(ch) + if ( + chime is None + or chime.connect_state is None + or chime.connect_state < 0 + or chime.channel not in host.api.channels + ): + _LOGGER.debug( + "Removing Reolink chime %s with id %s, " + "since it is not coupled to %s anymore", + device.name, + ch, + host.api.nvr_name, + ) + return True + + # remove the chime from the host + await chime.remove() + await host.api.get_state(cmd="GetDingDongList") + if chime.connect_state < 0: + _LOGGER.debug( + "Removed Reolink chime %s with id %s from %s", + device.name, + ch, + host.api.nvr_name, + ) + return True + + _LOGGER.warning( + "Cannot remove Reolink chime %s with id %s, because it is still connected " + "to %s, please first remove the chime " + "in the reolink app", + device.name, + ch, + host.api.nvr_name, + ) + return False + if not host.api.is_nvr or ch is None: _LOGGER.warning( "Cannot remove Reolink device %s, because it is not a camera connected " diff --git a/homeassistant/components/reolink/entity.py b/homeassistant/components/reolink/entity.py index 053792ad667..c47822e125c 100644 --- a/homeassistant/components/reolink/entity.py +++ b/homeassistant/components/reolink/entity.py @@ -190,3 +190,8 @@ class ReolinkChimeCoordinatorEntity(ReolinkChannelCoordinatorEntity): serial_number=str(chime.dev_id), configuration_url=self._conf_url, ) + + @property + def available(self) -> bool: + """Return True if entity is available.""" + return self._chime.online and super().available diff --git a/tests/components/reolink/conftest.py b/tests/components/reolink/conftest.py index c74cac76192..981dcc30e60 100644 --- a/tests/components/reolink/conftest.py +++ b/tests/components/reolink/conftest.py @@ -4,6 +4,7 @@ from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest +from reolink_aio.api import Chime from homeassistant.components.reolink import const from homeassistant.components.reolink.config_flow import DEFAULT_PROTOCOL @@ -107,6 +108,14 @@ def reolink_connect_class() -> Generator[MagicMock]: host_mock.capabilities = {"Host": ["RTSP"], "0": ["motion_detection"]} host_mock.checked_api_versions = {"GetEvents": 1} host_mock.abilities = {"abilityChn": [{"aiTrack": {"permit": 0, "ver": 0}}]} + + # enums + host_mock.whiteled_mode.return_value = 1 + host_mock.whiteled_mode_list.return_value = ["off", "auto"] + host_mock.doorbell_led.return_value = "Off" + host_mock.doorbell_led_list.return_value = ["stayoff", "auto"] + host_mock.auto_track_method.return_value = 3 + host_mock.daynight_state.return_value = "Black&White" yield host_mock_class @@ -145,3 +154,26 @@ def config_entry(hass: HomeAssistant) -> MockConfigEntry: ) config_entry.add_to_hass(hass) return config_entry + + +@pytest.fixture +def test_chime(reolink_connect: MagicMock) -> None: + """Mock a reolink chime.""" + TEST_CHIME = Chime( + host=reolink_connect, + dev_id=12345678, + channel=0, + ) + TEST_CHIME.name = "Test chime" + TEST_CHIME.volume = 3 + TEST_CHIME.connect_state = 2 + TEST_CHIME.led_state = True + TEST_CHIME.event_info = { + "md": {"switch": 0, "musicId": 0}, + "people": {"switch": 0, "musicId": 1}, + "visitor": {"switch": 1, "musicId": 2}, + } + + reolink_connect.chime_list = [TEST_CHIME] + reolink_connect.chime.return_value = TEST_CHIME + return TEST_CHIME diff --git a/tests/components/reolink/test_init.py b/tests/components/reolink/test_init.py index 4f745530b6b..5334e171e5e 100644 --- a/tests/components/reolink/test_init.py +++ b/tests/components/reolink/test_init.py @@ -6,6 +6,7 @@ from typing import Any from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest +from reolink_aio.api import Chime from reolink_aio.exceptions import CredentialsInvalidError, ReolinkError from homeassistant.components.reolink import ( @@ -40,6 +41,8 @@ from tests.typing import WebSocketGenerator pytestmark = pytest.mark.usefixtures("reolink_connect", "reolink_platforms") +CHIME_MODEL = "Reolink Chime" + async def test_wait(*args, **key_args): """Ensure a mocked function takes a bit of time to be able to timeout in test.""" @@ -224,13 +227,9 @@ async def test_removing_disconnected_cams( device_models = [device.model for device in device_entries] assert sorted(device_models) == sorted([TEST_HOST_MODEL, TEST_CAM_MODEL]) - # reload integration after 'disconnecting' a camera. + # Try to remove the device after 'disconnecting' a camera. if attr is not None: setattr(reolink_connect, attr, value) - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SWITCH]): - assert await hass.config_entries.async_reload(config_entry.entry_id) - await hass.async_block_till_done() - expected_success = TEST_CAM_MODEL not in expected_models for device in device_entries: if device.model == TEST_CAM_MODEL: @@ -244,6 +243,79 @@ async def test_removing_disconnected_cams( assert sorted(device_models) == sorted(expected_models) +@pytest.mark.parametrize( + ("attr", "value", "expected_models"), + [ + ( + None, + None, + [TEST_HOST_MODEL, TEST_CAM_MODEL, CHIME_MODEL], + ), + ( + "connect_state", + -1, + [TEST_HOST_MODEL, TEST_CAM_MODEL], + ), + ( + "remove", + -1, + [TEST_HOST_MODEL, TEST_CAM_MODEL], + ), + ], +) +async def test_removing_chime( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + test_chime: Chime, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + attr: str | None, + value: Any, + expected_models: list[str], +) -> None: + """Test removing a chime.""" + reolink_connect.channels = [0] + assert await async_setup_component(hass, "config", {}) + client = await hass_ws_client(hass) + # setup CH 0 and NVR switch entities/device + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SWITCH]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + device_entries = dr.async_entries_for_config_entry( + device_registry, config_entry.entry_id + ) + device_models = [device.model for device in device_entries] + assert sorted(device_models) == sorted( + [TEST_HOST_MODEL, TEST_CAM_MODEL, CHIME_MODEL] + ) + + if attr == "remove": + + async def test_remove_chime(*args, **key_args): + """Remove chime.""" + test_chime.connect_state = -1 + + test_chime.remove = test_remove_chime + elif attr is not None: + setattr(test_chime, attr, value) + + # Try to remove the device after 'disconnecting' a chime. + expected_success = CHIME_MODEL not in expected_models + for device in device_entries: + if device.model == CHIME_MODEL: + response = await client.remove_device(device.id, config_entry.entry_id) + assert response["success"] == expected_success + + device_entries = dr.async_entries_for_config_entry( + device_registry, config_entry.entry_id + ) + device_models = [device.model for device in device_entries] + assert sorted(device_models) == sorted(expected_models) + + @pytest.mark.parametrize( ( "original_id", diff --git a/tests/components/reolink/test_select.py b/tests/components/reolink/test_select.py index 53c1e494b3d..5536797d7d3 100644 --- a/tests/components/reolink/test_select.py +++ b/tests/components/reolink/test_select.py @@ -33,8 +33,6 @@ async def test_floodlight_mode_select( entity_registry: er.EntityRegistry, ) -> None: """Test select entity with floodlight_mode.""" - reolink_connect.whiteled_mode.return_value = 1 - reolink_connect.whiteled_mode_list.return_value = ["off", "auto"] with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SELECT]): assert await hass.config_entries.async_setup(config_entry.entry_id) is True await hass.async_block_till_done() @@ -72,6 +70,14 @@ async def test_floodlight_mode_select( blocking=True, ) + reolink_connect.whiteled_mode.return_value = -99 # invalid value + async_fire_time_changed( + hass, utcnow() + DEVICE_UPDATE_INTERVAL + timedelta(seconds=30) + ) + await hass.async_block_till_done() + + assert hass.states.is_state(entity_id, STATE_UNKNOWN) + async def test_play_quick_reply_message( hass: HomeAssistant, @@ -103,25 +109,10 @@ async def test_chime_select( hass: HomeAssistant, config_entry: MockConfigEntry, reolink_connect: MagicMock, + test_chime: Chime, entity_registry: er.EntityRegistry, ) -> None: """Test chime select entity.""" - TEST_CHIME = Chime( - host=reolink_connect, - dev_id=12345678, - channel=0, - ) - TEST_CHIME.name = "Test chime" - TEST_CHIME.volume = 3 - TEST_CHIME.led_state = True - TEST_CHIME.event_info = { - "md": {"switch": 0, "musicId": 0}, - "people": {"switch": 0, "musicId": 1}, - "visitor": {"switch": 1, "musicId": 2}, - } - - reolink_connect.chime_list = [TEST_CHIME] - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SELECT]): assert await hass.config_entries.async_setup(config_entry.entry_id) is True await hass.async_block_till_done() @@ -131,16 +122,16 @@ async def test_chime_select( entity_id = f"{Platform.SELECT}.test_chime_visitor_ringtone" assert hass.states.is_state(entity_id, "pianokey") - TEST_CHIME.set_tone = AsyncMock() + test_chime.set_tone = AsyncMock() await hass.services.async_call( SELECT_DOMAIN, SERVICE_SELECT_OPTION, {ATTR_ENTITY_ID: entity_id, "option": "off"}, blocking=True, ) - TEST_CHIME.set_tone.assert_called_once() + test_chime.set_tone.assert_called_once() - TEST_CHIME.set_tone = AsyncMock(side_effect=ReolinkError("Test error")) + test_chime.set_tone = AsyncMock(side_effect=ReolinkError("Test error")) with pytest.raises(HomeAssistantError): await hass.services.async_call( SELECT_DOMAIN, @@ -149,7 +140,7 @@ async def test_chime_select( blocking=True, ) - TEST_CHIME.set_tone = AsyncMock(side_effect=InvalidParameterError("Test error")) + test_chime.set_tone = AsyncMock(side_effect=InvalidParameterError("Test error")) with pytest.raises(ServiceValidationError): await hass.services.async_call( SELECT_DOMAIN, @@ -158,7 +149,7 @@ async def test_chime_select( blocking=True, ) - TEST_CHIME.event_info = {} + test_chime.event_info = {} async_fire_time_changed( hass, utcnow() + DEVICE_UPDATE_INTERVAL + timedelta(seconds=30) ) From 69740e865c82c82fc27b79e9732787dc677feccd Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 8 Aug 2024 15:52:09 -0500 Subject: [PATCH 0506/1635] Reduce number of aiohttp.TCPConnector cleanup_closed checks to one per minute (#123268) --- homeassistant/helpers/aiohttp_client.py | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/homeassistant/helpers/aiohttp_client.py b/homeassistant/helpers/aiohttp_client.py index 6f52569c38c..d61f889d4b5 100644 --- a/homeassistant/helpers/aiohttp_client.py +++ b/homeassistant/helpers/aiohttp_client.py @@ -285,6 +285,21 @@ def _make_key( return (verify_ssl, family) +class HomeAssistantTCPConnector(aiohttp.TCPConnector): + """Home Assistant TCP Connector. + + Same as aiohttp.TCPConnector but with a longer cleanup_closed timeout. + + By default the cleanup_closed timeout is 2 seconds. This is too short + for Home Assistant since we churn through a lot of connections. We set + it to 60 seconds to reduce the overhead of aborting TLS connections + that are likely already closed. + """ + + # abort transport after 60 seconds (cleanup broken connections) + _cleanup_closed_period = 60.0 + + @callback def _async_get_connector( hass: HomeAssistant, @@ -306,7 +321,7 @@ def _async_get_connector( else: ssl_context = ssl_util.get_default_no_verify_context() - connector = aiohttp.TCPConnector( + connector = HomeAssistantTCPConnector( family=family, enable_cleanup_closed=ENABLE_CLEANUP_CLOSED, ssl=ssl_context, From 03ba8f6173a708e2dd800e2ee1ee5b0ed5697ccf Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 8 Aug 2024 17:07:22 -0500 Subject: [PATCH 0507/1635] Bump aiohttp to 3.10.2 (#123394) --- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 10823557b94..7c0255b9eae 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -4,7 +4,7 @@ aiodhcpwatcher==1.0.2 aiodiscover==2.1.0 aiodns==3.2.0 aiohttp-fast-zlib==0.1.1 -aiohttp==3.10.1 +aiohttp==3.10.2 aiohttp_cors==0.7.0 aiozoneinfo==0.2.1 astral==2.2 diff --git a/pyproject.toml b/pyproject.toml index 9e4fe36243b..07af385dce4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ requires-python = ">=3.12.0" dependencies = [ "aiodns==3.2.0", - "aiohttp==3.10.1", + "aiohttp==3.10.2", "aiohttp_cors==0.7.0", "aiohttp-fast-zlib==0.1.1", "aiozoneinfo==0.2.1", diff --git a/requirements.txt b/requirements.txt index f35fffe680a..40f2165a61a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ # Home Assistant Core aiodns==3.2.0 -aiohttp==3.10.1 +aiohttp==3.10.2 aiohttp_cors==0.7.0 aiohttp-fast-zlib==0.1.1 aiozoneinfo==0.2.1 From 00c1a3fd4e473d415d89447cb2b89326dd8eeac3 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 8 Aug 2024 23:19:12 -0500 Subject: [PATCH 0508/1635] Ensure legacy event foreign key is removed from the states table when a previous rebuild failed (#123388) * Ensure legacy event foreign key is removed from the states table If the system ran out of disk space removing the FK, it would fail. #121938 fixed that to try again, however that PR was made ineffective by #122069 since it will never reach the check. To solve this, the migration version is incremented to 2, and the migration is no longer marked as done unless the rebuild /fk removal is successful. * fix logic for mysql * fix test * asserts * coverage * coverage * narrow test * fixes * split tests * should have skipped * fixture must be used --- .../components/recorder/migration.py | 24 +- tests/components/recorder/test_migrate.py | 14 +- .../components/recorder/test_v32_migration.py | 346 ++++++++++++++++++ 3 files changed, 368 insertions(+), 16 deletions(-) diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index 2932ea484c9..a41de07e243 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -632,7 +632,7 @@ def _update_states_table_with_foreign_key_options( def _drop_foreign_key_constraints( session_maker: Callable[[], Session], engine: Engine, table: str, column: str -) -> list[tuple[str, str, ReflectedForeignKeyConstraint]]: +) -> tuple[bool, list[tuple[str, str, ReflectedForeignKeyConstraint]]]: """Drop foreign key constraints for a table on specific columns.""" inspector = sqlalchemy.inspect(engine) dropped_constraints = [ @@ -649,6 +649,7 @@ def _drop_foreign_key_constraints( if foreign_key["name"] and foreign_key["constrained_columns"] == [column] ] + fk_remove_ok = True for drop in drops: with session_scope(session=session_maker()) as session: try: @@ -660,8 +661,9 @@ def _drop_foreign_key_constraints( TABLE_STATES, column, ) + fk_remove_ok = False - return dropped_constraints + return fk_remove_ok, dropped_constraints def _restore_foreign_key_constraints( @@ -1481,7 +1483,7 @@ class _SchemaVersion44Migrator(_SchemaVersionMigrator, target_version=44): for column in columns for dropped_constraint in _drop_foreign_key_constraints( self.session_maker, self.engine, table, column - ) + )[1] ] _LOGGER.debug("Dropped foreign key constraints: %s", dropped_constraints) @@ -1956,14 +1958,15 @@ def cleanup_legacy_states_event_ids(instance: Recorder) -> bool: if instance.dialect_name == SupportedDialect.SQLITE: # SQLite does not support dropping foreign key constraints # so we have to rebuild the table - rebuild_sqlite_table(session_maker, instance.engine, States) + fk_remove_ok = rebuild_sqlite_table(session_maker, instance.engine, States) else: - _drop_foreign_key_constraints( + fk_remove_ok, _ = _drop_foreign_key_constraints( session_maker, instance.engine, TABLE_STATES, "event_id" ) - _drop_index(session_maker, "states", LEGACY_STATES_EVENT_ID_INDEX) - instance.use_legacy_events_index = False - _mark_migration_done(session, EventIDPostMigration) + if fk_remove_ok: + _drop_index(session_maker, "states", LEGACY_STATES_EVENT_ID_INDEX) + instance.use_legacy_events_index = False + _mark_migration_done(session, EventIDPostMigration) return True @@ -2419,6 +2422,7 @@ class EventIDPostMigration(BaseRunTimeMigration): migration_id = "event_id_post_migration" task = MigrationTask + migration_version = 2 @staticmethod def migrate_data(instance: Recorder) -> bool: @@ -2469,7 +2473,7 @@ def _mark_migration_done( def rebuild_sqlite_table( session_maker: Callable[[], Session], engine: Engine, table: type[Base] -) -> None: +) -> bool: """Rebuild an SQLite table. This must only be called after all migrations are complete @@ -2524,8 +2528,10 @@ def rebuild_sqlite_table( # Swallow the exception since we do not want to ever raise # an integrity error as it would cause the database # to be discarded and recreated from scratch + return False else: _LOGGER.warning("Rebuilding SQLite table %s finished", orig_name) + return True finally: with session_scope(session=session_maker()) as session: # Step 12 - Re-enable foreign keys diff --git a/tests/components/recorder/test_migrate.py b/tests/components/recorder/test_migrate.py index dc99ddefa3b..e55793caad7 100644 --- a/tests/components/recorder/test_migrate.py +++ b/tests/components/recorder/test_migrate.py @@ -748,7 +748,7 @@ def test_rebuild_sqlite_states_table(recorder_db_url: str) -> None: session.add(States(state="on")) session.commit() - migration.rebuild_sqlite_table(session_maker, engine, States) + assert migration.rebuild_sqlite_table(session_maker, engine, States) is True with session_scope(session=session_maker()) as session: assert session.query(States).count() == 1 @@ -776,13 +776,13 @@ def test_rebuild_sqlite_states_table_missing_fails( session.connection().execute(text("DROP TABLE states")) session.commit() - migration.rebuild_sqlite_table(session_maker, engine, States) + assert migration.rebuild_sqlite_table(session_maker, engine, States) is False assert "Error recreating SQLite table states" in caplog.text caplog.clear() # Now rebuild the events table to make sure the database did not # get corrupted - migration.rebuild_sqlite_table(session_maker, engine, Events) + assert migration.rebuild_sqlite_table(session_maker, engine, Events) is True with session_scope(session=session_maker()) as session: assert session.query(Events).count() == 1 @@ -812,7 +812,7 @@ def test_rebuild_sqlite_states_table_extra_columns( text("ALTER TABLE states ADD COLUMN extra_column TEXT") ) - migration.rebuild_sqlite_table(session_maker, engine, States) + assert migration.rebuild_sqlite_table(session_maker, engine, States) is True assert "Error recreating SQLite table states" not in caplog.text with session_scope(session=session_maker()) as session: @@ -905,7 +905,7 @@ def test_drop_restore_foreign_key_constraints(recorder_db_url: str) -> None: for table, column in constraints_to_recreate for dropped_constraint in migration._drop_foreign_key_constraints( session_maker, engine, table, column - ) + )[1] ] assert dropped_constraints_1 == expected_dropped_constraints[db_engine] @@ -917,7 +917,7 @@ def test_drop_restore_foreign_key_constraints(recorder_db_url: str) -> None: for table, column in constraints_to_recreate for dropped_constraint in migration._drop_foreign_key_constraints( session_maker, engine, table, column - ) + )[1] ] assert dropped_constraints_2 == [] @@ -936,7 +936,7 @@ def test_drop_restore_foreign_key_constraints(recorder_db_url: str) -> None: for table, column in constraints_to_recreate for dropped_constraint in migration._drop_foreign_key_constraints( session_maker, engine, table, column - ) + )[1] ] assert dropped_constraints_3 == expected_dropped_constraints[db_engine] diff --git a/tests/components/recorder/test_v32_migration.py b/tests/components/recorder/test_v32_migration.py index 9956fec8a09..5266e55851c 100644 --- a/tests/components/recorder/test_v32_migration.py +++ b/tests/components/recorder/test_v32_migration.py @@ -7,6 +7,7 @@ from unittest.mock import patch import pytest from sqlalchemy import create_engine, inspect +from sqlalchemy.exc import OperationalError, SQLAlchemyError from sqlalchemy.orm import Session from homeassistant.components import recorder @@ -444,3 +445,348 @@ async def test_migrate_can_resume_ix_states_event_id_removed( assert await instance.async_add_executor_job(_get_event_id_foreign_keys) is None await hass.async_stop() + + +@pytest.mark.usefixtures("skip_by_db_engine") +@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) +@pytest.mark.parametrize("enable_migrate_event_ids", [True]) +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage +async def test_out_of_disk_space_while_rebuild_states_table( + async_test_recorder: RecorderInstanceGenerator, + caplog: pytest.LogCaptureFixture, + recorder_db_url: str, +) -> None: + """Test that we can recover from out of disk space while rebuilding the states table. + + This case tests the migration still happens if + ix_states_event_id is removed from the states table. + """ + importlib.import_module(SCHEMA_MODULE) + old_db_schema = sys.modules[SCHEMA_MODULE] + now = dt_util.utcnow() + one_second_past = now - timedelta(seconds=1) + mock_state = State( + "sensor.test", + "old", + {"last_reset": now.isoformat()}, + last_changed=one_second_past, + last_updated=now, + ) + state_changed_event = Event( + EVENT_STATE_CHANGED, + { + "entity_id": "sensor.test", + "old_state": None, + "new_state": mock_state, + }, + EventOrigin.local, + time_fired_timestamp=now.timestamp(), + ) + custom_event = Event( + "custom_event", + {"entity_id": "sensor.custom"}, + EventOrigin.local, + time_fired_timestamp=now.timestamp(), + ) + number_of_migrations = 5 + + def _get_event_id_foreign_keys(): + assert instance.engine is not None + return next( + ( + fk # type: ignore[misc] + for fk in inspect(instance.engine).get_foreign_keys("states") + if fk["constrained_columns"] == ["event_id"] + ), + None, + ) + + def _get_states_index_names(): + with session_scope(hass=hass) as session: + return inspect(session.connection()).get_indexes("states") + + with ( + patch.object(recorder, "db_schema", old_db_schema), + patch.object( + recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION + ), + patch.object(core, "StatesMeta", old_db_schema.StatesMeta), + patch.object(core, "EventTypes", old_db_schema.EventTypes), + patch.object(core, "EventData", old_db_schema.EventData), + patch.object(core, "States", old_db_schema.States), + patch.object(core, "Events", old_db_schema.Events), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), + patch( + "homeassistant.components.recorder.migration.cleanup_legacy_states_event_ids" + ), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await hass.async_block_till_done() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + + def _add_data(): + with session_scope(hass=hass) as session: + session.add(old_db_schema.Events.from_event(custom_event)) + session.add(old_db_schema.States.from_event(state_changed_event)) + + await instance.async_add_executor_job(_add_data) + await hass.async_block_till_done() + await instance.async_block_till_done() + + await instance.async_add_executor_job( + migration._drop_index, + instance.get_session, + "states", + "ix_states_event_id", + ) + + states_indexes = await instance.async_add_executor_job( + _get_states_index_names + ) + states_index_names = {index["name"] for index in states_indexes} + assert instance.use_legacy_events_index is True + assert ( + await instance.async_add_executor_job(_get_event_id_foreign_keys) + is not None + ) + + await hass.async_stop() + await hass.async_block_till_done() + + assert "ix_states_entity_id_last_updated_ts" in states_index_names + + # Simulate out of disk space while rebuilding the states table by + # - patching CreateTable to raise SQLAlchemyError for SQLite + # - patching DropConstraint to raise InternalError for MySQL and PostgreSQL + with ( + patch( + "homeassistant.components.recorder.migration.CreateTable", + side_effect=SQLAlchemyError, + ), + patch( + "homeassistant.components.recorder.migration.DropConstraint", + side_effect=OperationalError( + None, None, OSError("No space left on device") + ), + ), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await hass.async_block_till_done() + + # We need to wait for all the migration tasks to complete + # before we can check the database. + for _ in range(number_of_migrations): + await instance.async_block_till_done() + await async_wait_recording_done(hass) + + states_indexes = await instance.async_add_executor_job( + _get_states_index_names + ) + states_index_names = {index["name"] for index in states_indexes} + assert instance.use_legacy_events_index is True + assert "Error recreating SQLite table states" in caplog.text + assert await instance.async_add_executor_job(_get_event_id_foreign_keys) + + await hass.async_stop() + + # Now run it again to verify the table rebuild tries again + caplog.clear() + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await hass.async_block_till_done() + + # We need to wait for all the migration tasks to complete + # before we can check the database. + for _ in range(number_of_migrations): + await instance.async_block_till_done() + await async_wait_recording_done(hass) + + states_indexes = await instance.async_add_executor_job(_get_states_index_names) + states_index_names = {index["name"] for index in states_indexes} + assert instance.use_legacy_events_index is False + assert "ix_states_entity_id_last_updated_ts" not in states_index_names + assert "ix_states_event_id" not in states_index_names + assert "Rebuilding SQLite table states finished" in caplog.text + assert await instance.async_add_executor_job(_get_event_id_foreign_keys) is None + + await hass.async_stop() + + +@pytest.mark.usefixtures("skip_by_db_engine") +@pytest.mark.skip_on_db_engine(["sqlite"]) +@pytest.mark.parametrize("enable_migrate_event_ids", [True]) +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage +async def test_out_of_disk_space_while_removing_foreign_key( + async_test_recorder: RecorderInstanceGenerator, + caplog: pytest.LogCaptureFixture, + recorder_db_url: str, +) -> None: + """Test that we can recover from out of disk space while removing the foreign key. + + This case tests the migration still happens if + ix_states_event_id is removed from the states table. + """ + importlib.import_module(SCHEMA_MODULE) + old_db_schema = sys.modules[SCHEMA_MODULE] + now = dt_util.utcnow() + one_second_past = now - timedelta(seconds=1) + mock_state = State( + "sensor.test", + "old", + {"last_reset": now.isoformat()}, + last_changed=one_second_past, + last_updated=now, + ) + state_changed_event = Event( + EVENT_STATE_CHANGED, + { + "entity_id": "sensor.test", + "old_state": None, + "new_state": mock_state, + }, + EventOrigin.local, + time_fired_timestamp=now.timestamp(), + ) + custom_event = Event( + "custom_event", + {"entity_id": "sensor.custom"}, + EventOrigin.local, + time_fired_timestamp=now.timestamp(), + ) + number_of_migrations = 5 + + def _get_event_id_foreign_keys(): + assert instance.engine is not None + return next( + ( + fk # type: ignore[misc] + for fk in inspect(instance.engine).get_foreign_keys("states") + if fk["constrained_columns"] == ["event_id"] + ), + None, + ) + + def _get_states_index_names(): + with session_scope(hass=hass) as session: + return inspect(session.connection()).get_indexes("states") + + with ( + patch.object(recorder, "db_schema", old_db_schema), + patch.object( + recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION + ), + patch.object(core, "StatesMeta", old_db_schema.StatesMeta), + patch.object(core, "EventTypes", old_db_schema.EventTypes), + patch.object(core, "EventData", old_db_schema.EventData), + patch.object(core, "States", old_db_schema.States), + patch.object(core, "Events", old_db_schema.Events), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), + patch( + "homeassistant.components.recorder.migration.cleanup_legacy_states_event_ids" + ), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await hass.async_block_till_done() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + + def _add_data(): + with session_scope(hass=hass) as session: + session.add(old_db_schema.Events.from_event(custom_event)) + session.add(old_db_schema.States.from_event(state_changed_event)) + + await instance.async_add_executor_job(_add_data) + await hass.async_block_till_done() + await instance.async_block_till_done() + + await instance.async_add_executor_job( + migration._drop_index, + instance.get_session, + "states", + "ix_states_event_id", + ) + + states_indexes = await instance.async_add_executor_job( + _get_states_index_names + ) + states_index_names = {index["name"] for index in states_indexes} + assert instance.use_legacy_events_index is True + assert ( + await instance.async_add_executor_job(_get_event_id_foreign_keys) + is not None + ) + + await hass.async_stop() + await hass.async_block_till_done() + + assert "ix_states_entity_id_last_updated_ts" in states_index_names + + # Simulate out of disk space while removing the foreign key from the states table by + # - patching DropConstraint to raise InternalError for MySQL and PostgreSQL + with ( + patch( + "homeassistant.components.recorder.migration.DropConstraint", + side_effect=OperationalError( + None, None, OSError("No space left on device") + ), + ), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await hass.async_block_till_done() + + # We need to wait for all the migration tasks to complete + # before we can check the database. + for _ in range(number_of_migrations): + await instance.async_block_till_done() + await async_wait_recording_done(hass) + + states_indexes = await instance.async_add_executor_job( + _get_states_index_names + ) + states_index_names = {index["name"] for index in states_indexes} + assert instance.use_legacy_events_index is True + assert await instance.async_add_executor_job(_get_event_id_foreign_keys) + + await hass.async_stop() + + # Now run it again to verify the table rebuild tries again + caplog.clear() + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await hass.async_block_till_done() + + # We need to wait for all the migration tasks to complete + # before we can check the database. + for _ in range(number_of_migrations): + await instance.async_block_till_done() + await async_wait_recording_done(hass) + + states_indexes = await instance.async_add_executor_job(_get_states_index_names) + states_index_names = {index["name"] for index in states_indexes} + assert instance.use_legacy_events_index is False + assert "ix_states_entity_id_last_updated_ts" not in states_index_names + assert "ix_states_event_id" not in states_index_names + assert await instance.async_add_executor_job(_get_event_id_foreign_keys) is None + + await hass.async_stop() From f8e1c2cfd4bd393a376231e5273ac208bd6929c6 Mon Sep 17 00:00:00 2001 From: Brett Adams Date: Fri, 9 Aug 2024 16:38:12 +1000 Subject: [PATCH 0509/1635] Rework OAuth in Tesla Fleet (#123324) * Rework Oauth * Improve docstrings * Update homeassistant/components/tesla_fleet/oauth.py Co-authored-by: Martin Hjelmare * review feedback * Add tests for user creds --------- Co-authored-by: Martin Hjelmare --- .../components/tesla_fleet/__init__.py | 7 +- .../tesla_fleet/application_credentials.py | 62 +------------ .../components/tesla_fleet/config_flow.py | 9 +- homeassistant/components/tesla_fleet/const.py | 1 - homeassistant/components/tesla_fleet/oauth.py | 86 +++++++++++++++++++ .../tesla_fleet/test_config_flow.py | 86 ++++++++++++++++++- 6 files changed, 181 insertions(+), 70 deletions(-) create mode 100644 homeassistant/components/tesla_fleet/oauth.py diff --git a/homeassistant/components/tesla_fleet/__init__.py b/homeassistant/components/tesla_fleet/__init__.py index 4eac1168674..45657b3d8fb 100644 --- a/homeassistant/components/tesla_fleet/__init__.py +++ b/homeassistant/components/tesla_fleet/__init__.py @@ -13,7 +13,6 @@ from tesla_fleet_api.exceptions import ( TeslaFleetError, ) -from homeassistant.components.application_credentials import ClientCredential from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN, Platform from homeassistant.core import HomeAssistant @@ -27,15 +26,15 @@ from homeassistant.helpers.config_entry_oauth2_flow import ( import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device_registry import DeviceInfo -from .application_credentials import TeslaOAuth2Implementation from .config_flow import OAuth2FlowHandler -from .const import CLIENT_ID, DOMAIN, LOGGER, MODELS, NAME +from .const import DOMAIN, LOGGER, MODELS from .coordinator import ( TeslaFleetEnergySiteInfoCoordinator, TeslaFleetEnergySiteLiveCoordinator, TeslaFleetVehicleDataCoordinator, ) from .models import TeslaFleetData, TeslaFleetEnergyData, TeslaFleetVehicleData +from .oauth import TeslaSystemImplementation PLATFORMS: Final = [Platform.BINARY_SENSOR, Platform.DEVICE_TRACKER, Platform.SENSOR] @@ -56,7 +55,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - OAuth2FlowHandler.async_register_implementation( hass, - TeslaOAuth2Implementation(hass, DOMAIN, ClientCredential(CLIENT_ID, "", NAME)), + TeslaSystemImplementation(hass), ) implementation = await async_get_config_entry_implementation(hass, entry) diff --git a/homeassistant/components/tesla_fleet/application_credentials.py b/homeassistant/components/tesla_fleet/application_credentials.py index 32e16cc9244..0ef38567b65 100644 --- a/homeassistant/components/tesla_fleet/application_credentials.py +++ b/homeassistant/components/tesla_fleet/application_credentials.py @@ -1,72 +1,18 @@ """Application Credentials platform the Tesla Fleet integration.""" -import base64 -import hashlib -import secrets -from typing import Any - -from homeassistant.components.application_credentials import ( - AuthImplementation, - AuthorizationServer, - ClientCredential, -) +from homeassistant.components.application_credentials import ClientCredential from homeassistant.core import HomeAssistant from homeassistant.helpers import config_entry_oauth2_flow -from .const import AUTHORIZE_URL, DOMAIN, SCOPES, TOKEN_URL - -AUTH_SERVER = AuthorizationServer(AUTHORIZE_URL, TOKEN_URL) +from .oauth import TeslaUserImplementation async def async_get_auth_implementation( hass: HomeAssistant, auth_domain: str, credential: ClientCredential ) -> config_entry_oauth2_flow.AbstractOAuth2Implementation: """Return auth implementation.""" - return TeslaOAuth2Implementation( + return TeslaUserImplementation( hass, - DOMAIN, + auth_domain, credential, ) - - -class TeslaOAuth2Implementation(AuthImplementation): - """Tesla Fleet API Open Source Oauth2 implementation.""" - - def __init__( - self, hass: HomeAssistant, domain: str, credential: ClientCredential - ) -> None: - """Initialize local auth implementation.""" - self.hass = hass - self._domain = domain - - # Setup PKCE - self.code_verifier = secrets.token_urlsafe(32) - hashed_verifier = hashlib.sha256(self.code_verifier.encode()).digest() - self.code_challenge = ( - base64.urlsafe_b64encode(hashed_verifier).decode().replace("=", "") - ) - super().__init__( - hass, - domain, - credential, - AUTH_SERVER, - ) - - @property - def extra_authorize_data(self) -> dict[str, Any]: - """Extra data that needs to be appended to the authorize url.""" - return { - "scope": " ".join(SCOPES), - "code_challenge": self.code_challenge, # PKCE - } - - async def async_resolve_external_data(self, external_data: Any) -> dict: - """Resolve the authorization code to tokens.""" - return await self._token_request( - { - "grant_type": "authorization_code", - "code": external_data["code"], - "redirect_uri": external_data["state"]["redirect_uri"], - "code_verifier": self.code_verifier, # PKCE - } - ) diff --git a/homeassistant/components/tesla_fleet/config_flow.py b/homeassistant/components/tesla_fleet/config_flow.py index c09ea78177f..0ffdca1aec6 100644 --- a/homeassistant/components/tesla_fleet/config_flow.py +++ b/homeassistant/components/tesla_fleet/config_flow.py @@ -8,12 +8,11 @@ from typing import Any import jwt -from homeassistant.components.application_credentials import ClientCredential from homeassistant.config_entries import ConfigEntry, ConfigFlowResult from homeassistant.helpers import config_entry_oauth2_flow -from .application_credentials import TeslaOAuth2Implementation -from .const import CLIENT_ID, DOMAIN, LOGGER, NAME +from .const import DOMAIN, LOGGER +from .oauth import TeslaSystemImplementation class OAuth2FlowHandler( @@ -35,9 +34,7 @@ class OAuth2FlowHandler( """Handle a flow start.""" self.async_register_implementation( self.hass, - TeslaOAuth2Implementation( - self.hass, DOMAIN, ClientCredential(CLIENT_ID, "", NAME) - ), + TeslaSystemImplementation(self.hass), ) return await super().async_step_user() diff --git a/homeassistant/components/tesla_fleet/const.py b/homeassistant/components/tesla_fleet/const.py index 9d78716a13e..081225c296c 100644 --- a/homeassistant/components/tesla_fleet/const.py +++ b/homeassistant/components/tesla_fleet/const.py @@ -13,7 +13,6 @@ CONF_REFRESH_TOKEN = "refresh_token" LOGGER = logging.getLogger(__package__) -NAME = "Home Assistant" CLIENT_ID = "71b813eb-4a2e-483a-b831-4dec5cb9bf0d" AUTHORIZE_URL = "https://auth.tesla.com/oauth2/v3/authorize" TOKEN_URL = "https://auth.tesla.com/oauth2/v3/token" diff --git a/homeassistant/components/tesla_fleet/oauth.py b/homeassistant/components/tesla_fleet/oauth.py new file mode 100644 index 00000000000..00976abf56f --- /dev/null +++ b/homeassistant/components/tesla_fleet/oauth.py @@ -0,0 +1,86 @@ +"""Provide oauth implementations for the Tesla Fleet integration.""" + +import base64 +import hashlib +import secrets +from typing import Any + +from homeassistant.components.application_credentials import ( + AuthImplementation, + AuthorizationServer, + ClientCredential, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import config_entry_oauth2_flow + +from .const import AUTHORIZE_URL, CLIENT_ID, DOMAIN, SCOPES, TOKEN_URL + + +class TeslaSystemImplementation(config_entry_oauth2_flow.LocalOAuth2Implementation): + """Tesla Fleet API open source Oauth2 implementation.""" + + code_verifier: str + code_challenge: str + + def __init__(self, hass: HomeAssistant) -> None: + """Initialize open source Oauth2 implementation.""" + + # Setup PKCE + self.code_verifier = secrets.token_urlsafe(32) + hashed_verifier = hashlib.sha256(self.code_verifier.encode()).digest() + self.code_challenge = ( + base64.urlsafe_b64encode(hashed_verifier).decode().replace("=", "") + ) + super().__init__( + hass, + DOMAIN, + CLIENT_ID, + "", + AUTHORIZE_URL, + TOKEN_URL, + ) + + @property + def name(self) -> str: + """Name of the implementation.""" + return "Built-in open source client ID" + + @property + def extra_authorize_data(self) -> dict[str, Any]: + """Extra data that needs to be appended to the authorize url.""" + return { + "scope": " ".join(SCOPES), + "code_challenge": self.code_challenge, # PKCE + } + + async def async_resolve_external_data(self, external_data: Any) -> dict: + """Resolve the authorization code to tokens.""" + return await self._token_request( + { + "grant_type": "authorization_code", + "code": external_data["code"], + "redirect_uri": external_data["state"]["redirect_uri"], + "code_verifier": self.code_verifier, # PKCE + } + ) + + +class TeslaUserImplementation(AuthImplementation): + """Tesla Fleet API user Oauth2 implementation.""" + + def __init__( + self, hass: HomeAssistant, auth_domain: str, credential: ClientCredential + ) -> None: + """Initialize user Oauth2 implementation.""" + + super().__init__( + hass, + auth_domain, + credential, + AuthorizationServer(AUTHORIZE_URL, TOKEN_URL), + ) + + @property + def extra_authorize_data(self) -> dict[str, Any]: + """Extra data that needs to be appended to the authorize url.""" + return {"scope": " ".join(SCOPES)} diff --git a/tests/components/tesla_fleet/test_config_flow.py b/tests/components/tesla_fleet/test_config_flow.py index bd1c7d7c2b8..45dbe6ca598 100644 --- a/tests/components/tesla_fleet/test_config_flow.py +++ b/tests/components/tesla_fleet/test_config_flow.py @@ -5,6 +5,10 @@ from urllib.parse import parse_qs, urlparse import pytest +from homeassistant.components.application_credentials import ( + ClientCredential, + async_import_client_credential, +) from homeassistant.components.tesla_fleet.const import ( AUTHORIZE_URL, CLIENT_ID, @@ -16,6 +20,7 @@ from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow +from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker @@ -26,7 +31,7 @@ UNIQUE_ID = "uid" @pytest.fixture -async def access_token(hass: HomeAssistant) -> dict[str, str | list[str]]: +async def access_token(hass: HomeAssistant) -> str: """Return a valid access token.""" return config_entry_oauth2_flow._encode_jwt( hass, @@ -111,6 +116,85 @@ async def test_full_flow( assert result["result"].data["token"]["refresh_token"] == "mock-refresh-token" +@pytest.mark.usefixtures("current_request_with_host") +async def test_full_flow_user_cred( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + access_token, +) -> None: + """Check full flow.""" + + # Create user application credential + assert await async_setup_component(hass, "application_credentials", {}) + await async_import_client_credential( + hass, + DOMAIN, + ClientCredential("user_client_id", "user_client_secret"), + "user_cred", + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {"implementation": "user_cred"} + ) + assert result["type"] is FlowResultType.EXTERNAL_STEP + + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": REDIRECT, + }, + ) + + assert result["url"].startswith(AUTHORIZE_URL) + parsed_url = urlparse(result["url"]) + parsed_query = parse_qs(parsed_url.query) + assert parsed_query["response_type"][0] == "code" + assert parsed_query["client_id"][0] == "user_client_id" + assert parsed_query["redirect_uri"][0] == REDIRECT + assert parsed_query["state"][0] == state + assert parsed_query["scope"][0] == " ".join(SCOPES) + assert "code_challenge" not in parsed_query # Ensure not a PKCE flow + + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.clear_requests() + aioclient_mock.post( + TOKEN_URL, + json={ + "refresh_token": "mock-refresh-token", + "access_token": access_token, + "type": "Bearer", + "expires_in": 60, + }, + ) + with patch( + "homeassistant.components.tesla_fleet.async_setup_entry", return_value=True + ) as mock_setup: + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + assert len(mock_setup.mock_calls) == 1 + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == UNIQUE_ID + assert "result" in result + assert result["result"].unique_id == UNIQUE_ID + assert "token" in result["result"].data + assert result["result"].data["token"]["access_token"] == access_token + assert result["result"].data["token"]["refresh_token"] == "mock-refresh-token" + + @pytest.mark.usefixtures("current_request_with_host") async def test_reauthentication( hass: HomeAssistant, From 84d6f5ed071b39df6af3dddce29f558b0ef0b5e7 Mon Sep 17 00:00:00 2001 From: Jesse Hills <3060199+jesserockz@users.noreply.github.com> Date: Fri, 9 Aug 2024 21:43:02 +1200 Subject: [PATCH 0510/1635] Remove ESPHome legacy entity naming (#123436) * Remove ESPHome legacy entity naming * Update test entity_ids to use sanitized name instead of object_id --- homeassistant/components/esphome/entity.py | 20 +-- .../esphome/test_alarm_control_panel.py | 22 +-- .../components/esphome/test_binary_sensor.py | 10 +- tests/components/esphome/test_button.py | 8 +- tests/components/esphome/test_camera.py | 36 ++--- tests/components/esphome/test_climate.py | 36 ++--- tests/components/esphome/test_cover.py | 24 +-- tests/components/esphome/test_date.py | 6 +- tests/components/esphome/test_datetime.py | 6 +- tests/components/esphome/test_entity.py | 89 ++++------- tests/components/esphome/test_event.py | 2 +- tests/components/esphome/test_fan.py | 46 +++--- tests/components/esphome/test_light.py | 148 +++++++++--------- tests/components/esphome/test_lock.py | 14 +- tests/components/esphome/test_media_player.py | 24 +-- tests/components/esphome/test_number.py | 10 +- tests/components/esphome/test_select.py | 4 +- tests/components/esphome/test_sensor.py | 38 ++--- tests/components/esphome/test_switch.py | 6 +- tests/components/esphome/test_text.py | 8 +- tests/components/esphome/test_time.py | 6 +- tests/components/esphome/test_update.py | 10 +- tests/components/esphome/test_valve.py | 24 +-- 23 files changed, 275 insertions(+), 322 deletions(-) diff --git a/homeassistant/components/esphome/entity.py b/homeassistant/components/esphome/entity.py index 6e02f8de869..5f845f4665b 100644 --- a/homeassistant/components/esphome/entity.py +++ b/homeassistant/components/esphome/entity.py @@ -176,6 +176,7 @@ ENTITY_CATEGORIES: EsphomeEnumMapper[EsphomeEntityCategory, EntityCategory | Non class EsphomeEntity(Entity, Generic[_InfoT, _StateT]): """Define a base esphome entity.""" + _attr_has_entity_name = True _attr_should_poll = False _static_info: _InfoT _state: _StateT @@ -200,25 +201,6 @@ class EsphomeEntity(Entity, Generic[_InfoT, _StateT]): self._attr_device_info = DeviceInfo( connections={(dr.CONNECTION_NETWORK_MAC, device_info.mac_address)} ) - # - # If `friendly_name` is set, we use the Friendly naming rules, if - # `friendly_name` is not set we make an exception to the naming rules for - # backwards compatibility and use the Legacy naming rules. - # - # Friendly naming - # - Friendly name is prepended to entity names - # - Device Name is prepended to entity ids - # - Entity id is constructed from device name and object id - # - # Legacy naming - # - Device name is not prepended to entity names - # - Device name is not prepended to entity ids - # - Entity id is constructed from entity name - # - if not device_info.friendly_name: - return - self._attr_has_entity_name = True - self.entity_id = f"{domain}.{device_info.name}_{entity_info.object_id}" async def async_added_to_hass(self) -> None: """Register callbacks.""" diff --git a/tests/components/esphome/test_alarm_control_panel.py b/tests/components/esphome/test_alarm_control_panel.py index af717ac1b49..33c7be94736 100644 --- a/tests/components/esphome/test_alarm_control_panel.py +++ b/tests/components/esphome/test_alarm_control_panel.py @@ -58,7 +58,7 @@ async def test_generic_alarm_control_panel_requires_code( user_service=user_service, states=states, ) - state = hass.states.get("alarm_control_panel.test_myalarm_control_panel") + state = hass.states.get("alarm_control_panel.test_my_alarm_control_panel") assert state is not None assert state.state == STATE_ALARM_ARMED_AWAY @@ -66,7 +66,7 @@ async def test_generic_alarm_control_panel_requires_code( ALARM_CONTROL_PANEL_DOMAIN, SERVICE_ALARM_ARM_AWAY, { - ATTR_ENTITY_ID: "alarm_control_panel.test_myalarm_control_panel", + ATTR_ENTITY_ID: "alarm_control_panel.test_my_alarm_control_panel", ATTR_CODE: 1234, }, blocking=True, @@ -80,7 +80,7 @@ async def test_generic_alarm_control_panel_requires_code( ALARM_CONTROL_PANEL_DOMAIN, SERVICE_ALARM_ARM_CUSTOM_BYPASS, { - ATTR_ENTITY_ID: "alarm_control_panel.test_myalarm_control_panel", + ATTR_ENTITY_ID: "alarm_control_panel.test_my_alarm_control_panel", ATTR_CODE: 1234, }, blocking=True, @@ -94,7 +94,7 @@ async def test_generic_alarm_control_panel_requires_code( ALARM_CONTROL_PANEL_DOMAIN, SERVICE_ALARM_ARM_HOME, { - ATTR_ENTITY_ID: "alarm_control_panel.test_myalarm_control_panel", + ATTR_ENTITY_ID: "alarm_control_panel.test_my_alarm_control_panel", ATTR_CODE: 1234, }, blocking=True, @@ -108,7 +108,7 @@ async def test_generic_alarm_control_panel_requires_code( ALARM_CONTROL_PANEL_DOMAIN, SERVICE_ALARM_ARM_NIGHT, { - ATTR_ENTITY_ID: "alarm_control_panel.test_myalarm_control_panel", + ATTR_ENTITY_ID: "alarm_control_panel.test_my_alarm_control_panel", ATTR_CODE: 1234, }, blocking=True, @@ -122,7 +122,7 @@ async def test_generic_alarm_control_panel_requires_code( ALARM_CONTROL_PANEL_DOMAIN, SERVICE_ALARM_ARM_VACATION, { - ATTR_ENTITY_ID: "alarm_control_panel.test_myalarm_control_panel", + ATTR_ENTITY_ID: "alarm_control_panel.test_my_alarm_control_panel", ATTR_CODE: 1234, }, blocking=True, @@ -136,7 +136,7 @@ async def test_generic_alarm_control_panel_requires_code( ALARM_CONTROL_PANEL_DOMAIN, SERVICE_ALARM_TRIGGER, { - ATTR_ENTITY_ID: "alarm_control_panel.test_myalarm_control_panel", + ATTR_ENTITY_ID: "alarm_control_panel.test_my_alarm_control_panel", ATTR_CODE: 1234, }, blocking=True, @@ -150,7 +150,7 @@ async def test_generic_alarm_control_panel_requires_code( ALARM_CONTROL_PANEL_DOMAIN, SERVICE_ALARM_DISARM, { - ATTR_ENTITY_ID: "alarm_control_panel.test_myalarm_control_panel", + ATTR_ENTITY_ID: "alarm_control_panel.test_my_alarm_control_panel", ATTR_CODE: 1234, }, blocking=True, @@ -193,14 +193,14 @@ async def test_generic_alarm_control_panel_no_code( user_service=user_service, states=states, ) - state = hass.states.get("alarm_control_panel.test_myalarm_control_panel") + state = hass.states.get("alarm_control_panel.test_my_alarm_control_panel") assert state is not None assert state.state == STATE_ALARM_ARMED_AWAY await hass.services.async_call( ALARM_CONTROL_PANEL_DOMAIN, SERVICE_ALARM_DISARM, - {ATTR_ENTITY_ID: "alarm_control_panel.test_myalarm_control_panel"}, + {ATTR_ENTITY_ID: "alarm_control_panel.test_my_alarm_control_panel"}, blocking=True, ) mock_client.alarm_control_panel_command.assert_has_calls( @@ -239,6 +239,6 @@ async def test_generic_alarm_control_panel_missing_state( user_service=user_service, states=states, ) - state = hass.states.get("alarm_control_panel.test_myalarm_control_panel") + state = hass.states.get("alarm_control_panel.test_my_alarm_control_panel") assert state is not None assert state.state == STATE_UNKNOWN diff --git a/tests/components/esphome/test_binary_sensor.py b/tests/components/esphome/test_binary_sensor.py index 3da8a54ff34..c07635eff3b 100644 --- a/tests/components/esphome/test_binary_sensor.py +++ b/tests/components/esphome/test_binary_sensor.py @@ -74,7 +74,7 @@ async def test_binary_sensor_generic_entity( user_service=user_service, states=states, ) - state = hass.states.get("binary_sensor.test_mybinary_sensor") + state = hass.states.get("binary_sensor.test_my_binary_sensor") assert state is not None assert state.state == hass_state @@ -105,7 +105,7 @@ async def test_status_binary_sensor( user_service=user_service, states=states, ) - state = hass.states.get("binary_sensor.test_mybinary_sensor") + state = hass.states.get("binary_sensor.test_my_binary_sensor") assert state is not None assert state.state == STATE_ON @@ -135,7 +135,7 @@ async def test_binary_sensor_missing_state( user_service=user_service, states=states, ) - state = hass.states.get("binary_sensor.test_mybinary_sensor") + state = hass.states.get("binary_sensor.test_my_binary_sensor") assert state is not None assert state.state == STATE_UNKNOWN @@ -165,12 +165,12 @@ async def test_binary_sensor_has_state_false( user_service=user_service, states=states, ) - state = hass.states.get("binary_sensor.test_mybinary_sensor") + state = hass.states.get("binary_sensor.test_my_binary_sensor") assert state is not None assert state.state == STATE_UNKNOWN mock_device.set_state(BinarySensorState(key=1, state=True, missing_state=False)) await hass.async_block_till_done() - state = hass.states.get("binary_sensor.test_mybinary_sensor") + state = hass.states.get("binary_sensor.test_my_binary_sensor") assert state is not None assert state.state == STATE_ON diff --git a/tests/components/esphome/test_button.py b/tests/components/esphome/test_button.py index 8c120949caa..d3fec2a56d2 100644 --- a/tests/components/esphome/test_button.py +++ b/tests/components/esphome/test_button.py @@ -29,22 +29,22 @@ async def test_button_generic_entity( user_service=user_service, states=states, ) - state = hass.states.get("button.test_mybutton") + state = hass.states.get("button.test_my_button") assert state is not None assert state.state == STATE_UNKNOWN await hass.services.async_call( BUTTON_DOMAIN, SERVICE_PRESS, - {ATTR_ENTITY_ID: "button.test_mybutton"}, + {ATTR_ENTITY_ID: "button.test_my_button"}, blocking=True, ) mock_client.button_command.assert_has_calls([call(1)]) - state = hass.states.get("button.test_mybutton") + state = hass.states.get("button.test_my_button") assert state is not None assert state.state != STATE_UNKNOWN await mock_device.mock_disconnect(False) - state = hass.states.get("button.test_mybutton") + state = hass.states.get("button.test_my_button") assert state is not None assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/esphome/test_camera.py b/tests/components/esphome/test_camera.py index c6a61cd18e8..680cda00944 100644 --- a/tests/components/esphome/test_camera.py +++ b/tests/components/esphome/test_camera.py @@ -53,7 +53,7 @@ async def test_camera_single_image( user_service=user_service, states=states, ) - state = hass.states.get("camera.test_mycamera") + state = hass.states.get("camera.test_my_camera") assert state is not None assert state.state == STATE_IDLE @@ -63,9 +63,9 @@ async def test_camera_single_image( mock_client.request_single_image = _mock_camera_image client = await hass_client() - resp = await client.get("/api/camera_proxy/camera.test_mycamera") + resp = await client.get("/api/camera_proxy/camera.test_my_camera") await hass.async_block_till_done() - state = hass.states.get("camera.test_mycamera") + state = hass.states.get("camera.test_my_camera") assert state is not None assert state.state == STATE_IDLE @@ -101,15 +101,15 @@ async def test_camera_single_image_unavailable_before_requested( user_service=user_service, states=states, ) - state = hass.states.get("camera.test_mycamera") + state = hass.states.get("camera.test_my_camera") assert state is not None assert state.state == STATE_IDLE await mock_device.mock_disconnect(False) client = await hass_client() - resp = await client.get("/api/camera_proxy/camera.test_mycamera") + resp = await client.get("/api/camera_proxy/camera.test_my_camera") await hass.async_block_till_done() - state = hass.states.get("camera.test_mycamera") + state = hass.states.get("camera.test_my_camera") assert state is not None assert state.state == STATE_UNAVAILABLE @@ -142,7 +142,7 @@ async def test_camera_single_image_unavailable_during_request( user_service=user_service, states=states, ) - state = hass.states.get("camera.test_mycamera") + state = hass.states.get("camera.test_my_camera") assert state is not None assert state.state == STATE_IDLE @@ -152,9 +152,9 @@ async def test_camera_single_image_unavailable_during_request( mock_client.request_single_image = _mock_camera_image client = await hass_client() - resp = await client.get("/api/camera_proxy/camera.test_mycamera") + resp = await client.get("/api/camera_proxy/camera.test_my_camera") await hass.async_block_till_done() - state = hass.states.get("camera.test_mycamera") + state = hass.states.get("camera.test_my_camera") assert state is not None assert state.state == STATE_UNAVAILABLE @@ -187,7 +187,7 @@ async def test_camera_stream( user_service=user_service, states=states, ) - state = hass.states.get("camera.test_mycamera") + state = hass.states.get("camera.test_my_camera") assert state is not None assert state.state == STATE_IDLE remaining_responses = 3 @@ -203,9 +203,9 @@ async def test_camera_stream( mock_client.request_single_image = _mock_camera_image client = await hass_client() - resp = await client.get("/api/camera_proxy_stream/camera.test_mycamera") + resp = await client.get("/api/camera_proxy_stream/camera.test_my_camera") await hass.async_block_till_done() - state = hass.states.get("camera.test_mycamera") + state = hass.states.get("camera.test_my_camera") assert state is not None assert state.state == STATE_IDLE @@ -247,16 +247,16 @@ async def test_camera_stream_unavailable( user_service=user_service, states=states, ) - state = hass.states.get("camera.test_mycamera") + state = hass.states.get("camera.test_my_camera") assert state is not None assert state.state == STATE_IDLE await mock_device.mock_disconnect(False) client = await hass_client() - await client.get("/api/camera_proxy_stream/camera.test_mycamera") + await client.get("/api/camera_proxy_stream/camera.test_my_camera") await hass.async_block_till_done() - state = hass.states.get("camera.test_mycamera") + state = hass.states.get("camera.test_my_camera") assert state is not None assert state.state == STATE_UNAVAILABLE @@ -287,7 +287,7 @@ async def test_camera_stream_with_disconnection( user_service=user_service, states=states, ) - state = hass.states.get("camera.test_mycamera") + state = hass.states.get("camera.test_my_camera") assert state is not None assert state.state == STATE_IDLE remaining_responses = 3 @@ -305,8 +305,8 @@ async def test_camera_stream_with_disconnection( mock_client.request_single_image = _mock_camera_image client = await hass_client() - await client.get("/api/camera_proxy_stream/camera.test_mycamera") + await client.get("/api/camera_proxy_stream/camera.test_my_camera") await hass.async_block_till_done() - state = hass.states.get("camera.test_mycamera") + state = hass.states.get("camera.test_my_camera") assert state is not None assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/esphome/test_climate.py b/tests/components/esphome/test_climate.py index 4ec7fee6447..a573128bef1 100644 --- a/tests/components/esphome/test_climate.py +++ b/tests/components/esphome/test_climate.py @@ -78,14 +78,14 @@ async def test_climate_entity( user_service=user_service, states=states, ) - state = hass.states.get("climate.test_myclimate") + state = hass.states.get("climate.test_my_climate") assert state is not None assert state.state == HVACMode.COOL await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: "climate.test_myclimate", ATTR_TEMPERATURE: 25}, + {ATTR_ENTITY_ID: "climate.test_my_climate", ATTR_TEMPERATURE: 25}, blocking=True, ) mock_client.climate_command.assert_has_calls([call(key=1, target_temperature=25.0)]) @@ -130,14 +130,14 @@ async def test_climate_entity_with_step_and_two_point( user_service=user_service, states=states, ) - state = hass.states.get("climate.test_myclimate") + state = hass.states.get("climate.test_my_climate") assert state is not None assert state.state == HVACMode.COOL await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: "climate.test_myclimate", ATTR_TEMPERATURE: 25}, + {ATTR_ENTITY_ID: "climate.test_my_climate", ATTR_TEMPERATURE: 25}, blocking=True, ) mock_client.climate_command.assert_has_calls([call(key=1, target_temperature=25.0)]) @@ -147,7 +147,7 @@ async def test_climate_entity_with_step_and_two_point( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, { - ATTR_ENTITY_ID: "climate.test_myclimate", + ATTR_ENTITY_ID: "climate.test_my_climate", ATTR_HVAC_MODE: HVACMode.AUTO, ATTR_TARGET_TEMP_LOW: 20, ATTR_TARGET_TEMP_HIGH: 30, @@ -209,14 +209,14 @@ async def test_climate_entity_with_step_and_target_temp( user_service=user_service, states=states, ) - state = hass.states.get("climate.test_myclimate") + state = hass.states.get("climate.test_my_climate") assert state is not None assert state.state == HVACMode.COOL await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: "climate.test_myclimate", ATTR_TEMPERATURE: 25}, + {ATTR_ENTITY_ID: "climate.test_my_climate", ATTR_TEMPERATURE: 25}, blocking=True, ) mock_client.climate_command.assert_has_calls([call(key=1, target_temperature=25.0)]) @@ -226,7 +226,7 @@ async def test_climate_entity_with_step_and_target_temp( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, { - ATTR_ENTITY_ID: "climate.test_myclimate", + ATTR_ENTITY_ID: "climate.test_my_climate", ATTR_HVAC_MODE: HVACMode.AUTO, ATTR_TARGET_TEMP_LOW: 20, ATTR_TARGET_TEMP_HIGH: 30, @@ -249,7 +249,7 @@ async def test_climate_entity_with_step_and_target_temp( CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, { - ATTR_ENTITY_ID: "climate.test_myclimate", + ATTR_ENTITY_ID: "climate.test_my_climate", ATTR_HVAC_MODE: HVACMode.HEAT, }, blocking=True, @@ -267,7 +267,7 @@ async def test_climate_entity_with_step_and_target_temp( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: "climate.test_myclimate", ATTR_PRESET_MODE: "away"}, + {ATTR_ENTITY_ID: "climate.test_my_climate", ATTR_PRESET_MODE: "away"}, blocking=True, ) mock_client.climate_command.assert_has_calls( @@ -283,7 +283,7 @@ async def test_climate_entity_with_step_and_target_temp( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: "climate.test_myclimate", ATTR_PRESET_MODE: "preset1"}, + {ATTR_ENTITY_ID: "climate.test_my_climate", ATTR_PRESET_MODE: "preset1"}, blocking=True, ) mock_client.climate_command.assert_has_calls([call(key=1, custom_preset="preset1")]) @@ -292,7 +292,7 @@ async def test_climate_entity_with_step_and_target_temp( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_FAN_MODE, - {ATTR_ENTITY_ID: "climate.test_myclimate", ATTR_FAN_MODE: FAN_HIGH}, + {ATTR_ENTITY_ID: "climate.test_my_climate", ATTR_FAN_MODE: FAN_HIGH}, blocking=True, ) mock_client.climate_command.assert_has_calls( @@ -303,7 +303,7 @@ async def test_climate_entity_with_step_and_target_temp( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_FAN_MODE, - {ATTR_ENTITY_ID: "climate.test_myclimate", ATTR_FAN_MODE: "fan2"}, + {ATTR_ENTITY_ID: "climate.test_my_climate", ATTR_FAN_MODE: "fan2"}, blocking=True, ) mock_client.climate_command.assert_has_calls([call(key=1, custom_fan_mode="fan2")]) @@ -312,7 +312,7 @@ async def test_climate_entity_with_step_and_target_temp( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_SWING_MODE, - {ATTR_ENTITY_ID: "climate.test_myclimate", ATTR_SWING_MODE: SWING_BOTH}, + {ATTR_ENTITY_ID: "climate.test_my_climate", ATTR_SWING_MODE: SWING_BOTH}, blocking=True, ) mock_client.climate_command.assert_has_calls( @@ -362,7 +362,7 @@ async def test_climate_entity_with_humidity( user_service=user_service, states=states, ) - state = hass.states.get("climate.test_myclimate") + state = hass.states.get("climate.test_my_climate") assert state is not None assert state.state == HVACMode.AUTO attributes = state.attributes @@ -374,7 +374,7 @@ async def test_climate_entity_with_humidity( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_HUMIDITY, - {ATTR_ENTITY_ID: "climate.test_myclimate", ATTR_HUMIDITY: 23}, + {ATTR_ENTITY_ID: "climate.test_my_climate", ATTR_HUMIDITY: 23}, blocking=True, ) mock_client.climate_command.assert_has_calls([call(key=1, target_humidity=23)]) @@ -422,7 +422,7 @@ async def test_climate_entity_with_inf_value( user_service=user_service, states=states, ) - state = hass.states.get("climate.test_myclimate") + state = hass.states.get("climate.test_my_climate") assert state is not None assert state.state == HVACMode.AUTO attributes = state.attributes @@ -484,7 +484,7 @@ async def test_climate_entity_attributes( user_service=user_service, states=states, ) - state = hass.states.get("climate.test_myclimate") + state = hass.states.get("climate.test_my_climate") assert state is not None assert state.state == HVACMode.COOL assert state.attributes == snapshot(name="climate-entity-attributes") diff --git a/tests/components/esphome/test_cover.py b/tests/components/esphome/test_cover.py index b190d287198..59eadb3cfd9 100644 --- a/tests/components/esphome/test_cover.py +++ b/tests/components/esphome/test_cover.py @@ -72,7 +72,7 @@ async def test_cover_entity( user_service=user_service, states=states, ) - state = hass.states.get("cover.test_mycover") + state = hass.states.get("cover.test_my_cover") assert state is not None assert state.state == STATE_OPENING assert state.attributes[ATTR_CURRENT_POSITION] == 50 @@ -81,7 +81,7 @@ async def test_cover_entity( await hass.services.async_call( COVER_DOMAIN, SERVICE_CLOSE_COVER, - {ATTR_ENTITY_ID: "cover.test_mycover"}, + {ATTR_ENTITY_ID: "cover.test_my_cover"}, blocking=True, ) mock_client.cover_command.assert_has_calls([call(key=1, position=0.0)]) @@ -90,7 +90,7 @@ async def test_cover_entity( await hass.services.async_call( COVER_DOMAIN, SERVICE_OPEN_COVER, - {ATTR_ENTITY_ID: "cover.test_mycover"}, + {ATTR_ENTITY_ID: "cover.test_my_cover"}, blocking=True, ) mock_client.cover_command.assert_has_calls([call(key=1, position=1.0)]) @@ -99,7 +99,7 @@ async def test_cover_entity( await hass.services.async_call( COVER_DOMAIN, SERVICE_SET_COVER_POSITION, - {ATTR_ENTITY_ID: "cover.test_mycover", ATTR_POSITION: 50}, + {ATTR_ENTITY_ID: "cover.test_my_cover", ATTR_POSITION: 50}, blocking=True, ) mock_client.cover_command.assert_has_calls([call(key=1, position=0.5)]) @@ -108,7 +108,7 @@ async def test_cover_entity( await hass.services.async_call( COVER_DOMAIN, SERVICE_STOP_COVER, - {ATTR_ENTITY_ID: "cover.test_mycover"}, + {ATTR_ENTITY_ID: "cover.test_my_cover"}, blocking=True, ) mock_client.cover_command.assert_has_calls([call(key=1, stop=True)]) @@ -117,7 +117,7 @@ async def test_cover_entity( await hass.services.async_call( COVER_DOMAIN, SERVICE_OPEN_COVER_TILT, - {ATTR_ENTITY_ID: "cover.test_mycover"}, + {ATTR_ENTITY_ID: "cover.test_my_cover"}, blocking=True, ) mock_client.cover_command.assert_has_calls([call(key=1, tilt=1.0)]) @@ -126,7 +126,7 @@ async def test_cover_entity( await hass.services.async_call( COVER_DOMAIN, SERVICE_CLOSE_COVER_TILT, - {ATTR_ENTITY_ID: "cover.test_mycover"}, + {ATTR_ENTITY_ID: "cover.test_my_cover"}, blocking=True, ) mock_client.cover_command.assert_has_calls([call(key=1, tilt=0.0)]) @@ -135,7 +135,7 @@ async def test_cover_entity( await hass.services.async_call( COVER_DOMAIN, SERVICE_SET_COVER_TILT_POSITION, - {ATTR_ENTITY_ID: "cover.test_mycover", ATTR_TILT_POSITION: 50}, + {ATTR_ENTITY_ID: "cover.test_my_cover", ATTR_TILT_POSITION: 50}, blocking=True, ) mock_client.cover_command.assert_has_calls([call(key=1, tilt=0.5)]) @@ -145,7 +145,7 @@ async def test_cover_entity( CoverState(key=1, position=0.0, current_operation=CoverOperation.IDLE) ) await hass.async_block_till_done() - state = hass.states.get("cover.test_mycover") + state = hass.states.get("cover.test_my_cover") assert state is not None assert state.state == STATE_CLOSED @@ -153,7 +153,7 @@ async def test_cover_entity( CoverState(key=1, position=0.5, current_operation=CoverOperation.IS_CLOSING) ) await hass.async_block_till_done() - state = hass.states.get("cover.test_mycover") + state = hass.states.get("cover.test_my_cover") assert state is not None assert state.state == STATE_CLOSING @@ -161,7 +161,7 @@ async def test_cover_entity( CoverState(key=1, position=1.0, current_operation=CoverOperation.IDLE) ) await hass.async_block_till_done() - state = hass.states.get("cover.test_mycover") + state = hass.states.get("cover.test_my_cover") assert state is not None assert state.state == STATE_OPEN @@ -201,7 +201,7 @@ async def test_cover_entity_without_position( user_service=user_service, states=states, ) - state = hass.states.get("cover.test_mycover") + state = hass.states.get("cover.test_my_cover") assert state is not None assert state.state == STATE_OPENING assert ATTR_CURRENT_TILT_POSITION not in state.attributes diff --git a/tests/components/esphome/test_date.py b/tests/components/esphome/test_date.py index 2deb92775fb..3b620a30461 100644 --- a/tests/components/esphome/test_date.py +++ b/tests/components/esphome/test_date.py @@ -35,14 +35,14 @@ async def test_generic_date_entity( user_service=user_service, states=states, ) - state = hass.states.get("date.test_mydate") + state = hass.states.get("date.test_my_date") assert state is not None assert state.state == "2024-12-31" await hass.services.async_call( DATE_DOMAIN, SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: "date.test_mydate", ATTR_DATE: "1999-01-01"}, + {ATTR_ENTITY_ID: "date.test_my_date", ATTR_DATE: "1999-01-01"}, blocking=True, ) mock_client.date_command.assert_has_calls([call(1, 1999, 1, 1)]) @@ -71,6 +71,6 @@ async def test_generic_date_missing_state( user_service=user_service, states=states, ) - state = hass.states.get("date.test_mydate") + state = hass.states.get("date.test_my_date") assert state is not None assert state.state == STATE_UNKNOWN diff --git a/tests/components/esphome/test_datetime.py b/tests/components/esphome/test_datetime.py index 3bdc196de95..95f0b7584ad 100644 --- a/tests/components/esphome/test_datetime.py +++ b/tests/components/esphome/test_datetime.py @@ -35,7 +35,7 @@ async def test_generic_datetime_entity( user_service=user_service, states=states, ) - state = hass.states.get("datetime.test_mydatetime") + state = hass.states.get("datetime.test_my_datetime") assert state is not None assert state.state == "2024-04-16T12:34:56+00:00" @@ -43,7 +43,7 @@ async def test_generic_datetime_entity( DATETIME_DOMAIN, SERVICE_SET_VALUE, { - ATTR_ENTITY_ID: "datetime.test_mydatetime", + ATTR_ENTITY_ID: "datetime.test_my_datetime", ATTR_DATETIME: "2000-01-01T01:23:45+00:00", }, blocking=True, @@ -74,6 +74,6 @@ async def test_generic_datetime_missing_state( user_service=user_service, states=states, ) - state = hass.states.get("datetime.test_mydatetime") + state = hass.states.get("datetime.test_my_datetime") assert state is not None assert state.state == STATE_UNKNOWN diff --git a/tests/components/esphome/test_entity.py b/tests/components/esphome/test_entity.py index 296d61b664d..64b8d6101ac 100644 --- a/tests/components/esphome/test_entity.py +++ b/tests/components/esphome/test_entity.py @@ -69,10 +69,10 @@ async def test_entities_removed( entry = mock_device.entry entry_id = entry.entry_id storage_key = f"esphome.{entry_id}" - state = hass.states.get("binary_sensor.test_mybinary_sensor") + state = hass.states.get("binary_sensor.test_my_binary_sensor") assert state is not None assert state.state == STATE_ON - state = hass.states.get("binary_sensor.test_mybinary_sensor_to_be_removed") + state = hass.states.get("binary_sensor.test_my_binary_sensor_to_be_removed") assert state is not None assert state.state == STATE_ON @@ -81,13 +81,13 @@ async def test_entities_removed( assert len(hass_storage[storage_key]["data"]["binary_sensor"]) == 2 - state = hass.states.get("binary_sensor.test_mybinary_sensor") + state = hass.states.get("binary_sensor.test_my_binary_sensor") assert state is not None assert state.attributes[ATTR_RESTORED] is True - state = hass.states.get("binary_sensor.test_mybinary_sensor_to_be_removed") + state = hass.states.get("binary_sensor.test_my_binary_sensor_to_be_removed") assert state is not None reg_entry = entity_registry.async_get( - "binary_sensor.test_mybinary_sensor_to_be_removed" + "binary_sensor.test_my_binary_sensor_to_be_removed" ) assert reg_entry is not None assert state.attributes[ATTR_RESTORED] is True @@ -111,13 +111,13 @@ async def test_entities_removed( entry=entry, ) assert mock_device.entry.entry_id == entry_id - state = hass.states.get("binary_sensor.test_mybinary_sensor") + state = hass.states.get("binary_sensor.test_my_binary_sensor") assert state is not None assert state.state == STATE_ON - state = hass.states.get("binary_sensor.test_mybinary_sensor_to_be_removed") + state = hass.states.get("binary_sensor.test_my_binary_sensor_to_be_removed") assert state is None reg_entry = entity_registry.async_get( - "binary_sensor.test_mybinary_sensor_to_be_removed" + "binary_sensor.test_my_binary_sensor_to_be_removed" ) assert reg_entry is None await hass.config_entries.async_unload(entry.entry_id) @@ -164,15 +164,15 @@ async def test_entities_removed_after_reload( entry = mock_device.entry entry_id = entry.entry_id storage_key = f"esphome.{entry_id}" - state = hass.states.get("binary_sensor.test_mybinary_sensor") + state = hass.states.get("binary_sensor.test_my_binary_sensor") assert state is not None assert state.state == STATE_ON - state = hass.states.get("binary_sensor.test_mybinary_sensor_to_be_removed") + state = hass.states.get("binary_sensor.test_my_binary_sensor_to_be_removed") assert state is not None assert state.state == STATE_ON reg_entry = entity_registry.async_get( - "binary_sensor.test_mybinary_sensor_to_be_removed" + "binary_sensor.test_my_binary_sensor_to_be_removed" ) assert reg_entry is not None @@ -181,15 +181,15 @@ async def test_entities_removed_after_reload( assert len(hass_storage[storage_key]["data"]["binary_sensor"]) == 2 - state = hass.states.get("binary_sensor.test_mybinary_sensor") + state = hass.states.get("binary_sensor.test_my_binary_sensor") assert state is not None assert state.attributes[ATTR_RESTORED] is True - state = hass.states.get("binary_sensor.test_mybinary_sensor_to_be_removed") + state = hass.states.get("binary_sensor.test_my_binary_sensor_to_be_removed") assert state is not None assert state.attributes[ATTR_RESTORED] is True reg_entry = entity_registry.async_get( - "binary_sensor.test_mybinary_sensor_to_be_removed" + "binary_sensor.test_my_binary_sensor_to_be_removed" ) assert reg_entry is not None @@ -198,14 +198,14 @@ async def test_entities_removed_after_reload( assert len(hass_storage[storage_key]["data"]["binary_sensor"]) == 2 - state = hass.states.get("binary_sensor.test_mybinary_sensor") + state = hass.states.get("binary_sensor.test_my_binary_sensor") assert state is not None assert ATTR_RESTORED not in state.attributes - state = hass.states.get("binary_sensor.test_mybinary_sensor_to_be_removed") + state = hass.states.get("binary_sensor.test_my_binary_sensor_to_be_removed") assert state is not None assert ATTR_RESTORED not in state.attributes reg_entry = entity_registry.async_get( - "binary_sensor.test_mybinary_sensor_to_be_removed" + "binary_sensor.test_my_binary_sensor_to_be_removed" ) assert reg_entry is not None @@ -236,23 +236,23 @@ async def test_entities_removed_after_reload( on_future.set_result(None) async_track_state_change_event( - hass, ["binary_sensor.test_mybinary_sensor"], _async_wait_for_on + hass, ["binary_sensor.test_my_binary_sensor"], _async_wait_for_on ) await hass.async_block_till_done() async with asyncio.timeout(2): await on_future assert mock_device.entry.entry_id == entry_id - state = hass.states.get("binary_sensor.test_mybinary_sensor") + state = hass.states.get("binary_sensor.test_my_binary_sensor") assert state is not None assert state.state == STATE_ON - state = hass.states.get("binary_sensor.test_mybinary_sensor_to_be_removed") + state = hass.states.get("binary_sensor.test_my_binary_sensor_to_be_removed") assert state is None await hass.async_block_till_done() reg_entry = entity_registry.async_get( - "binary_sensor.test_mybinary_sensor_to_be_removed" + "binary_sensor.test_my_binary_sensor_to_be_removed" ) assert reg_entry is None assert await hass.config_entries.async_unload(entry.entry_id) @@ -260,35 +260,6 @@ async def test_entities_removed_after_reload( assert len(hass_storage[storage_key]["data"]["binary_sensor"]) == 1 -async def test_entity_info_object_ids( - hass: HomeAssistant, - mock_client: APIClient, - mock_esphome_device: Callable[ - [APIClient, list[EntityInfo], list[UserService], list[EntityState]], - Awaitable[MockESPHomeDevice], - ], -) -> None: - """Test how object ids affect entity id.""" - entity_info = [ - BinarySensorInfo( - object_id="object_id_is_used", - key=1, - name="my binary_sensor", - unique_id="my_binary_sensor", - ) - ] - states = [] - user_service = [] - await mock_esphome_device( - mock_client=mock_client, - entity_info=entity_info, - user_service=user_service, - states=states, - ) - state = hass.states.get("binary_sensor.test_object_id_is_used") - assert state is not None - - async def test_deep_sleep_device( hass: HomeAssistant, mock_client: APIClient, @@ -326,7 +297,7 @@ async def test_deep_sleep_device( states=states, device_info={"has_deep_sleep": True}, ) - state = hass.states.get("binary_sensor.test_mybinary_sensor") + state = hass.states.get("binary_sensor.test_my_binary_sensor") assert state is not None assert state.state == STATE_ON state = hass.states.get("sensor.test_my_sensor") @@ -335,7 +306,7 @@ async def test_deep_sleep_device( await mock_device.mock_disconnect(False) await hass.async_block_till_done() - state = hass.states.get("binary_sensor.test_mybinary_sensor") + state = hass.states.get("binary_sensor.test_my_binary_sensor") assert state is not None assert state.state == STATE_UNAVAILABLE state = hass.states.get("sensor.test_my_sensor") @@ -345,7 +316,7 @@ async def test_deep_sleep_device( await mock_device.mock_connect() await hass.async_block_till_done() - state = hass.states.get("binary_sensor.test_mybinary_sensor") + state = hass.states.get("binary_sensor.test_my_binary_sensor") assert state is not None assert state.state == STATE_ON state = hass.states.get("sensor.test_my_sensor") @@ -359,7 +330,7 @@ async def test_deep_sleep_device( mock_device.set_state(BinarySensorState(key=1, state=False, missing_state=False)) mock_device.set_state(SensorState(key=3, state=56, missing_state=False)) await hass.async_block_till_done() - state = hass.states.get("binary_sensor.test_mybinary_sensor") + state = hass.states.get("binary_sensor.test_my_binary_sensor") assert state is not None assert state.state == STATE_OFF state = hass.states.get("sensor.test_my_sensor") @@ -368,7 +339,7 @@ async def test_deep_sleep_device( await mock_device.mock_disconnect(True) await hass.async_block_till_done() - state = hass.states.get("binary_sensor.test_mybinary_sensor") + state = hass.states.get("binary_sensor.test_my_binary_sensor") assert state is not None assert state.state == STATE_OFF state = hass.states.get("sensor.test_my_sensor") @@ -379,7 +350,7 @@ async def test_deep_sleep_device( await hass.async_block_till_done() await mock_device.mock_disconnect(False) await hass.async_block_till_done() - state = hass.states.get("binary_sensor.test_mybinary_sensor") + state = hass.states.get("binary_sensor.test_my_binary_sensor") assert state is not None assert state.state == STATE_UNAVAILABLE state = hass.states.get("sensor.test_my_sensor") @@ -388,14 +359,14 @@ async def test_deep_sleep_device( await mock_device.mock_connect() await hass.async_block_till_done() - state = hass.states.get("binary_sensor.test_mybinary_sensor") + state = hass.states.get("binary_sensor.test_my_binary_sensor") assert state is not None assert state.state == STATE_ON hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) await hass.async_block_till_done() # Verify we do not dispatch any more state updates or # availability updates after the stop event is fired - state = hass.states.get("binary_sensor.test_mybinary_sensor") + state = hass.states.get("binary_sensor.test_my_binary_sensor") assert state is not None assert state.state == STATE_ON @@ -430,6 +401,6 @@ async def test_esphome_device_without_friendly_name( states=states, device_info={"friendly_name": None}, ) - state = hass.states.get("binary_sensor.my_binary_sensor") + state = hass.states.get("binary_sensor.test_my_binary_sensor") assert state is not None assert state.state == STATE_ON diff --git a/tests/components/esphome/test_event.py b/tests/components/esphome/test_event.py index c17dc4d98a9..2daba94a3ca 100644 --- a/tests/components/esphome/test_event.py +++ b/tests/components/esphome/test_event.py @@ -32,7 +32,7 @@ async def test_generic_event_entity( user_service=user_service, states=states, ) - state = hass.states.get("event.test_myevent") + state = hass.states.get("event.test_my_event") assert state is not None assert state.state == "2024-04-24T00:00:00.000+00:00" assert state.attributes["event_type"] == "type1" diff --git a/tests/components/esphome/test_fan.py b/tests/components/esphome/test_fan.py index 064b37b1ec1..9aca36d79a4 100644 --- a/tests/components/esphome/test_fan.py +++ b/tests/components/esphome/test_fan.py @@ -62,14 +62,14 @@ async def test_fan_entity_with_all_features_old_api( user_service=user_service, states=states, ) - state = hass.states.get("fan.test_myfan") + state = hass.states.get("fan.test_my_fan") assert state is not None assert state.state == STATE_ON await hass.services.async_call( FAN_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "fan.test_myfan", ATTR_PERCENTAGE: 20}, + {ATTR_ENTITY_ID: "fan.test_my_fan", ATTR_PERCENTAGE: 20}, blocking=True, ) mock_client.fan_command.assert_has_calls( @@ -80,7 +80,7 @@ async def test_fan_entity_with_all_features_old_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "fan.test_myfan", ATTR_PERCENTAGE: 50}, + {ATTR_ENTITY_ID: "fan.test_my_fan", ATTR_PERCENTAGE: 50}, blocking=True, ) mock_client.fan_command.assert_has_calls( @@ -91,7 +91,7 @@ async def test_fan_entity_with_all_features_old_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_DECREASE_SPEED, - {ATTR_ENTITY_ID: "fan.test_myfan"}, + {ATTR_ENTITY_ID: "fan.test_my_fan"}, blocking=True, ) mock_client.fan_command.assert_has_calls( @@ -102,7 +102,7 @@ async def test_fan_entity_with_all_features_old_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_INCREASE_SPEED, - {ATTR_ENTITY_ID: "fan.test_myfan"}, + {ATTR_ENTITY_ID: "fan.test_my_fan"}, blocking=True, ) mock_client.fan_command.assert_has_calls( @@ -113,7 +113,7 @@ async def test_fan_entity_with_all_features_old_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "fan.test_myfan"}, + {ATTR_ENTITY_ID: "fan.test_my_fan"}, blocking=True, ) mock_client.fan_command.assert_has_calls([call(key=1, state=False)]) @@ -122,7 +122,7 @@ async def test_fan_entity_with_all_features_old_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_SET_PERCENTAGE, - {ATTR_ENTITY_ID: "fan.test_myfan", ATTR_PERCENTAGE: 100}, + {ATTR_ENTITY_ID: "fan.test_my_fan", ATTR_PERCENTAGE: 100}, blocking=True, ) mock_client.fan_command.assert_has_calls( @@ -166,14 +166,14 @@ async def test_fan_entity_with_all_features_new_api( user_service=user_service, states=states, ) - state = hass.states.get("fan.test_myfan") + state = hass.states.get("fan.test_my_fan") assert state is not None assert state.state == STATE_ON await hass.services.async_call( FAN_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "fan.test_myfan", ATTR_PERCENTAGE: 20}, + {ATTR_ENTITY_ID: "fan.test_my_fan", ATTR_PERCENTAGE: 20}, blocking=True, ) mock_client.fan_command.assert_has_calls([call(key=1, speed_level=1, state=True)]) @@ -182,7 +182,7 @@ async def test_fan_entity_with_all_features_new_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "fan.test_myfan", ATTR_PERCENTAGE: 50}, + {ATTR_ENTITY_ID: "fan.test_my_fan", ATTR_PERCENTAGE: 50}, blocking=True, ) mock_client.fan_command.assert_has_calls([call(key=1, speed_level=2, state=True)]) @@ -191,7 +191,7 @@ async def test_fan_entity_with_all_features_new_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_DECREASE_SPEED, - {ATTR_ENTITY_ID: "fan.test_myfan"}, + {ATTR_ENTITY_ID: "fan.test_my_fan"}, blocking=True, ) mock_client.fan_command.assert_has_calls([call(key=1, speed_level=2, state=True)]) @@ -200,7 +200,7 @@ async def test_fan_entity_with_all_features_new_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_INCREASE_SPEED, - {ATTR_ENTITY_ID: "fan.test_myfan"}, + {ATTR_ENTITY_ID: "fan.test_my_fan"}, blocking=True, ) mock_client.fan_command.assert_has_calls([call(key=1, speed_level=4, state=True)]) @@ -209,7 +209,7 @@ async def test_fan_entity_with_all_features_new_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "fan.test_myfan"}, + {ATTR_ENTITY_ID: "fan.test_my_fan"}, blocking=True, ) mock_client.fan_command.assert_has_calls([call(key=1, state=False)]) @@ -218,7 +218,7 @@ async def test_fan_entity_with_all_features_new_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_SET_PERCENTAGE, - {ATTR_ENTITY_ID: "fan.test_myfan", ATTR_PERCENTAGE: 100}, + {ATTR_ENTITY_ID: "fan.test_my_fan", ATTR_PERCENTAGE: 100}, blocking=True, ) mock_client.fan_command.assert_has_calls([call(key=1, speed_level=4, state=True)]) @@ -227,7 +227,7 @@ async def test_fan_entity_with_all_features_new_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_SET_PERCENTAGE, - {ATTR_ENTITY_ID: "fan.test_myfan", ATTR_PERCENTAGE: 0}, + {ATTR_ENTITY_ID: "fan.test_my_fan", ATTR_PERCENTAGE: 0}, blocking=True, ) mock_client.fan_command.assert_has_calls([call(key=1, state=False)]) @@ -236,7 +236,7 @@ async def test_fan_entity_with_all_features_new_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_OSCILLATE, - {ATTR_ENTITY_ID: "fan.test_myfan", ATTR_OSCILLATING: True}, + {ATTR_ENTITY_ID: "fan.test_my_fan", ATTR_OSCILLATING: True}, blocking=True, ) mock_client.fan_command.assert_has_calls([call(key=1, oscillating=True)]) @@ -245,7 +245,7 @@ async def test_fan_entity_with_all_features_new_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_OSCILLATE, - {ATTR_ENTITY_ID: "fan.test_myfan", ATTR_OSCILLATING: False}, + {ATTR_ENTITY_ID: "fan.test_my_fan", ATTR_OSCILLATING: False}, blocking=True, ) mock_client.fan_command.assert_has_calls([call(key=1, oscillating=False)]) @@ -254,7 +254,7 @@ async def test_fan_entity_with_all_features_new_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_SET_DIRECTION, - {ATTR_ENTITY_ID: "fan.test_myfan", ATTR_DIRECTION: "forward"}, + {ATTR_ENTITY_ID: "fan.test_my_fan", ATTR_DIRECTION: "forward"}, blocking=True, ) mock_client.fan_command.assert_has_calls( @@ -265,7 +265,7 @@ async def test_fan_entity_with_all_features_new_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_SET_DIRECTION, - {ATTR_ENTITY_ID: "fan.test_myfan", ATTR_DIRECTION: "reverse"}, + {ATTR_ENTITY_ID: "fan.test_my_fan", ATTR_DIRECTION: "reverse"}, blocking=True, ) mock_client.fan_command.assert_has_calls( @@ -276,7 +276,7 @@ async def test_fan_entity_with_all_features_new_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: "fan.test_myfan", ATTR_PRESET_MODE: "Preset1"}, + {ATTR_ENTITY_ID: "fan.test_my_fan", ATTR_PRESET_MODE: "Preset1"}, blocking=True, ) mock_client.fan_command.assert_has_calls([call(key=1, preset_mode="Preset1")]) @@ -308,14 +308,14 @@ async def test_fan_entity_with_no_features_new_api( user_service=user_service, states=states, ) - state = hass.states.get("fan.test_myfan") + state = hass.states.get("fan.test_my_fan") assert state is not None assert state.state == STATE_ON await hass.services.async_call( FAN_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "fan.test_myfan"}, + {ATTR_ENTITY_ID: "fan.test_my_fan"}, blocking=True, ) mock_client.fan_command.assert_has_calls([call(key=1, state=True)]) @@ -324,7 +324,7 @@ async def test_fan_entity_with_no_features_new_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "fan.test_myfan"}, + {ATTR_ENTITY_ID: "fan.test_my_fan"}, blocking=True, ) mock_client.fan_command.assert_has_calls([call(key=1, state=False)]) diff --git a/tests/components/esphome/test_light.py b/tests/components/esphome/test_light.py index 2324c73b16f..5dc563d9991 100644 --- a/tests/components/esphome/test_light.py +++ b/tests/components/esphome/test_light.py @@ -65,14 +65,14 @@ async def test_light_on_off( user_service=user_service, states=states, ) - state = hass.states.get("light.test_mylight") + state = hass.states.get("light.test_my_light") assert state is not None assert state.state == STATE_ON await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight"}, + {ATTR_ENTITY_ID: "light.test_my_light"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -105,14 +105,14 @@ async def test_light_brightness( user_service=user_service, states=states, ) - state = hass.states.get("light.test_mylight") + state = hass.states.get("light.test_my_light") assert state is not None assert state.state == STATE_ON await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight"}, + {ATTR_ENTITY_ID: "light.test_my_light"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -123,7 +123,7 @@ async def test_light_brightness( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight", ATTR_BRIGHTNESS: 127}, + {ATTR_ENTITY_ID: "light.test_my_light", ATTR_BRIGHTNESS: 127}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -141,7 +141,7 @@ async def test_light_brightness( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "light.test_mylight", ATTR_TRANSITION: 2}, + {ATTR_ENTITY_ID: "light.test_my_light", ATTR_TRANSITION: 2}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -152,7 +152,7 @@ async def test_light_brightness( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "light.test_mylight", ATTR_FLASH: FLASH_LONG}, + {ATTR_ENTITY_ID: "light.test_my_light", ATTR_FLASH: FLASH_LONG}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -163,7 +163,7 @@ async def test_light_brightness( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight", ATTR_TRANSITION: 2}, + {ATTR_ENTITY_ID: "light.test_my_light", ATTR_TRANSITION: 2}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -181,7 +181,7 @@ async def test_light_brightness( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight", ATTR_FLASH: FLASH_SHORT}, + {ATTR_ENTITY_ID: "light.test_my_light", ATTR_FLASH: FLASH_SHORT}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -223,14 +223,14 @@ async def test_light_brightness_on_off( user_service=user_service, states=states, ) - state = hass.states.get("light.test_mylight") + state = hass.states.get("light.test_my_light") assert state is not None assert state.state == STATE_ON await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight"}, + {ATTR_ENTITY_ID: "light.test_my_light"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -248,7 +248,7 @@ async def test_light_brightness_on_off( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight", ATTR_BRIGHTNESS: 127}, + {ATTR_ENTITY_ID: "light.test_my_light", ATTR_BRIGHTNESS: 127}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -293,14 +293,14 @@ async def test_light_legacy_white_converted_to_brightness( user_service=user_service, states=states, ) - state = hass.states.get("light.test_mylight") + state = hass.states.get("light.test_my_light") assert state is not None assert state.state == STATE_ON await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight"}, + {ATTR_ENTITY_ID: "light.test_my_light"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -351,7 +351,7 @@ async def test_light_legacy_white_with_rgb( user_service=user_service, states=states, ) - state = hass.states.get("light.test_mylight") + state = hass.states.get("light.test_my_light") assert state is not None assert state.state == STATE_ON assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ @@ -362,7 +362,7 @@ async def test_light_legacy_white_with_rgb( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight", ATTR_WHITE: 60}, + {ATTR_ENTITY_ID: "light.test_my_light", ATTR_WHITE: 60}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -405,14 +405,14 @@ async def test_light_brightness_on_off_with_unknown_color_mode( user_service=user_service, states=states, ) - state = hass.states.get("light.test_mylight") + state = hass.states.get("light.test_my_light") assert state is not None assert state.state == STATE_ON await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight"}, + {ATTR_ENTITY_ID: "light.test_my_light"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -431,7 +431,7 @@ async def test_light_brightness_on_off_with_unknown_color_mode( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight", ATTR_BRIGHTNESS: 127}, + {ATTR_ENTITY_ID: "light.test_my_light", ATTR_BRIGHTNESS: 127}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -476,14 +476,14 @@ async def test_light_on_and_brightness( user_service=user_service, states=states, ) - state = hass.states.get("light.test_mylight") + state = hass.states.get("light.test_my_light") assert state is not None assert state.state == STATE_ON await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight"}, + {ATTR_ENTITY_ID: "light.test_my_light"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -526,14 +526,14 @@ async def test_rgb_color_temp_light( user_service=user_service, states=states, ) - state = hass.states.get("light.test_mylight") + state = hass.states.get("light.test_my_light") assert state is not None assert state.state == STATE_ON await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight"}, + {ATTR_ENTITY_ID: "light.test_my_light"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -551,7 +551,7 @@ async def test_rgb_color_temp_light( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight", ATTR_BRIGHTNESS: 127}, + {ATTR_ENTITY_ID: "light.test_my_light", ATTR_BRIGHTNESS: 127}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -570,7 +570,7 @@ async def test_rgb_color_temp_light( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight", ATTR_COLOR_TEMP_KELVIN: 2500}, + {ATTR_ENTITY_ID: "light.test_my_light", ATTR_COLOR_TEMP_KELVIN: 2500}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -614,14 +614,14 @@ async def test_light_rgb( user_service=user_service, states=states, ) - state = hass.states.get("light.test_mylight") + state = hass.states.get("light.test_my_light") assert state is not None assert state.state == STATE_ON await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight"}, + {ATTR_ENTITY_ID: "light.test_my_light"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -640,7 +640,7 @@ async def test_light_rgb( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight", ATTR_BRIGHTNESS: 127}, + {ATTR_ENTITY_ID: "light.test_my_light", ATTR_BRIGHTNESS: 127}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -661,7 +661,7 @@ async def test_light_rgb( LIGHT_DOMAIN, SERVICE_TURN_ON, { - ATTR_ENTITY_ID: "light.test_mylight", + ATTR_ENTITY_ID: "light.test_my_light", ATTR_BRIGHTNESS: 127, ATTR_HS_COLOR: (100, 100), }, @@ -686,7 +686,7 @@ async def test_light_rgb( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight", ATTR_RGB_COLOR: (255, 255, 255)}, + {ATTR_ENTITY_ID: "light.test_my_light", ATTR_RGB_COLOR: (255, 255, 255)}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -746,7 +746,7 @@ async def test_light_rgbw( user_service=user_service, states=states, ) - state = hass.states.get("light.test_mylight") + state = hass.states.get("light.test_my_light") assert state is not None assert state.state == STATE_ON assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.RGBW] @@ -755,7 +755,7 @@ async def test_light_rgbw( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight"}, + {ATTR_ENTITY_ID: "light.test_my_light"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -775,7 +775,7 @@ async def test_light_rgbw( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight", ATTR_BRIGHTNESS: 127}, + {ATTR_ENTITY_ID: "light.test_my_light", ATTR_BRIGHTNESS: 127}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -797,7 +797,7 @@ async def test_light_rgbw( LIGHT_DOMAIN, SERVICE_TURN_ON, { - ATTR_ENTITY_ID: "light.test_mylight", + ATTR_ENTITY_ID: "light.test_my_light", ATTR_BRIGHTNESS: 127, ATTR_HS_COLOR: (100, 100), }, @@ -824,7 +824,7 @@ async def test_light_rgbw( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight", ATTR_RGB_COLOR: (255, 255, 255)}, + {ATTR_ENTITY_ID: "light.test_my_light", ATTR_RGB_COLOR: (255, 255, 255)}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -847,7 +847,7 @@ async def test_light_rgbw( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight", ATTR_RGBW_COLOR: (255, 255, 255, 255)}, + {ATTR_ENTITY_ID: "light.test_my_light", ATTR_RGBW_COLOR: (255, 255, 255, 255)}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -917,7 +917,7 @@ async def test_light_rgbww_with_cold_warm_white_support( user_service=user_service, states=states, ) - state = hass.states.get("light.test_mylight") + state = hass.states.get("light.test_my_light") assert state is not None assert state.state == STATE_ON assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.RGBWW] @@ -927,7 +927,7 @@ async def test_light_rgbww_with_cold_warm_white_support( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight"}, + {ATTR_ENTITY_ID: "light.test_my_light"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -949,7 +949,7 @@ async def test_light_rgbww_with_cold_warm_white_support( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight", ATTR_BRIGHTNESS: 127}, + {ATTR_ENTITY_ID: "light.test_my_light", ATTR_BRIGHTNESS: 127}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -973,7 +973,7 @@ async def test_light_rgbww_with_cold_warm_white_support( LIGHT_DOMAIN, SERVICE_TURN_ON, { - ATTR_ENTITY_ID: "light.test_mylight", + ATTR_ENTITY_ID: "light.test_my_light", ATTR_BRIGHTNESS: 127, ATTR_HS_COLOR: (100, 100), }, @@ -1003,7 +1003,7 @@ async def test_light_rgbww_with_cold_warm_white_support( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight", ATTR_RGB_COLOR: (255, 255, 255)}, + {ATTR_ENTITY_ID: "light.test_my_light", ATTR_RGB_COLOR: (255, 255, 255)}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1029,7 +1029,7 @@ async def test_light_rgbww_with_cold_warm_white_support( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight", ATTR_RGBW_COLOR: (255, 255, 255, 255)}, + {ATTR_ENTITY_ID: "light.test_my_light", ATTR_RGBW_COLOR: (255, 255, 255, 255)}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1056,7 +1056,7 @@ async def test_light_rgbww_with_cold_warm_white_support( LIGHT_DOMAIN, SERVICE_TURN_ON, { - ATTR_ENTITY_ID: "light.test_mylight", + ATTR_ENTITY_ID: "light.test_my_light", ATTR_RGBWW_COLOR: (255, 255, 255, 255, 255), }, blocking=True, @@ -1084,7 +1084,7 @@ async def test_light_rgbww_with_cold_warm_white_support( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight", ATTR_COLOR_TEMP_KELVIN: 2500}, + {ATTR_ENTITY_ID: "light.test_my_light", ATTR_COLOR_TEMP_KELVIN: 2500}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1154,7 +1154,7 @@ async def test_light_rgbww_without_cold_warm_white_support( user_service=user_service, states=states, ) - state = hass.states.get("light.test_mylight") + state = hass.states.get("light.test_my_light") assert state is not None assert state.state == STATE_ON assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.RGBWW] @@ -1164,7 +1164,7 @@ async def test_light_rgbww_without_cold_warm_white_support( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight"}, + {ATTR_ENTITY_ID: "light.test_my_light"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1185,7 +1185,7 @@ async def test_light_rgbww_without_cold_warm_white_support( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight", ATTR_BRIGHTNESS: 127}, + {ATTR_ENTITY_ID: "light.test_my_light", ATTR_BRIGHTNESS: 127}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1208,7 +1208,7 @@ async def test_light_rgbww_without_cold_warm_white_support( LIGHT_DOMAIN, SERVICE_TURN_ON, { - ATTR_ENTITY_ID: "light.test_mylight", + ATTR_ENTITY_ID: "light.test_my_light", ATTR_BRIGHTNESS: 127, ATTR_HS_COLOR: (100, 100), }, @@ -1237,7 +1237,7 @@ async def test_light_rgbww_without_cold_warm_white_support( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight", ATTR_RGB_COLOR: (255, 255, 255)}, + {ATTR_ENTITY_ID: "light.test_my_light", ATTR_RGB_COLOR: (255, 255, 255)}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1262,7 +1262,7 @@ async def test_light_rgbww_without_cold_warm_white_support( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight", ATTR_RGBW_COLOR: (255, 255, 255, 255)}, + {ATTR_ENTITY_ID: "light.test_my_light", ATTR_RGBW_COLOR: (255, 255, 255, 255)}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1288,7 +1288,7 @@ async def test_light_rgbww_without_cold_warm_white_support( LIGHT_DOMAIN, SERVICE_TURN_ON, { - ATTR_ENTITY_ID: "light.test_mylight", + ATTR_ENTITY_ID: "light.test_my_light", ATTR_RGBWW_COLOR: (255, 255, 255, 255, 255), }, blocking=True, @@ -1315,7 +1315,7 @@ async def test_light_rgbww_without_cold_warm_white_support( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight", ATTR_COLOR_TEMP_KELVIN: 2500}, + {ATTR_ENTITY_ID: "light.test_my_light", ATTR_COLOR_TEMP_KELVIN: 2500}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1374,7 +1374,7 @@ async def test_light_color_temp( user_service=user_service, states=states, ) - state = hass.states.get("light.test_mylight") + state = hass.states.get("light.test_my_light") assert state is not None assert state.state == STATE_ON attributes = state.attributes @@ -1387,7 +1387,7 @@ async def test_light_color_temp( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight"}, + {ATTR_ENTITY_ID: "light.test_my_light"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1406,7 +1406,7 @@ async def test_light_color_temp( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "light.test_mylight"}, + {ATTR_ENTITY_ID: "light.test_my_light"}, blocking=True, ) mock_client.light_command.assert_has_calls([call(key=1, state=False)]) @@ -1449,7 +1449,7 @@ async def test_light_color_temp_no_mireds_set( user_service=user_service, states=states, ) - state = hass.states.get("light.test_mylight") + state = hass.states.get("light.test_my_light") assert state is not None assert state.state == STATE_ON attributes = state.attributes @@ -1462,7 +1462,7 @@ async def test_light_color_temp_no_mireds_set( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight"}, + {ATTR_ENTITY_ID: "light.test_my_light"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1481,7 +1481,7 @@ async def test_light_color_temp_no_mireds_set( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight", ATTR_COLOR_TEMP_KELVIN: 6000}, + {ATTR_ENTITY_ID: "light.test_my_light", ATTR_COLOR_TEMP_KELVIN: 6000}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1501,7 +1501,7 @@ async def test_light_color_temp_no_mireds_set( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "light.test_mylight"}, + {ATTR_ENTITY_ID: "light.test_my_light"}, blocking=True, ) mock_client.light_command.assert_has_calls([call(key=1, state=False)]) @@ -1551,7 +1551,7 @@ async def test_light_color_temp_legacy( user_service=user_service, states=states, ) - state = hass.states.get("light.test_mylight") + state = hass.states.get("light.test_my_light") assert state is not None assert state.state == STATE_ON attributes = state.attributes @@ -1566,7 +1566,7 @@ async def test_light_color_temp_legacy( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight"}, + {ATTR_ENTITY_ID: "light.test_my_light"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1585,7 +1585,7 @@ async def test_light_color_temp_legacy( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "light.test_mylight"}, + {ATTR_ENTITY_ID: "light.test_my_light"}, blocking=True, ) mock_client.light_command.assert_has_calls([call(key=1, state=False)]) @@ -1637,7 +1637,7 @@ async def test_light_rgb_legacy( user_service=user_service, states=states, ) - state = hass.states.get("light.test_mylight") + state = hass.states.get("light.test_my_light") assert state is not None assert state.state == STATE_ON attributes = state.attributes @@ -1647,7 +1647,7 @@ async def test_light_rgb_legacy( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight"}, + {ATTR_ENTITY_ID: "light.test_my_light"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1663,7 +1663,7 @@ async def test_light_rgb_legacy( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "light.test_mylight"}, + {ATTR_ENTITY_ID: "light.test_my_light"}, blocking=True, ) mock_client.light_command.assert_has_calls([call(key=1, state=False)]) @@ -1672,7 +1672,7 @@ async def test_light_rgb_legacy( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight", ATTR_RGB_COLOR: (255, 255, 255)}, + {ATTR_ENTITY_ID: "light.test_my_light", ATTR_RGB_COLOR: (255, 255, 255)}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1715,7 +1715,7 @@ async def test_light_effects( user_service=user_service, states=states, ) - state = hass.states.get("light.test_mylight") + state = hass.states.get("light.test_my_light") assert state is not None assert state.state == STATE_ON assert state.attributes[ATTR_EFFECT_LIST] == ["effect1", "effect2"] @@ -1723,7 +1723,7 @@ async def test_light_effects( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight", ATTR_EFFECT: "effect1"}, + {ATTR_ENTITY_ID: "light.test_my_light", ATTR_EFFECT: "effect1"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1782,7 +1782,7 @@ async def test_only_cold_warm_white_support( user_service=user_service, states=states, ) - state = hass.states.get("light.test_mylight") + state = hass.states.get("light.test_my_light") assert state is not None assert state.state == STATE_ON assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.COLOR_TEMP] @@ -1791,7 +1791,7 @@ async def test_only_cold_warm_white_support( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight"}, + {ATTR_ENTITY_ID: "light.test_my_light"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1802,7 +1802,7 @@ async def test_only_cold_warm_white_support( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight", ATTR_BRIGHTNESS: 127}, + {ATTR_ENTITY_ID: "light.test_my_light", ATTR_BRIGHTNESS: 127}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1820,7 +1820,7 @@ async def test_only_cold_warm_white_support( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight", ATTR_COLOR_TEMP_KELVIN: 2500}, + {ATTR_ENTITY_ID: "light.test_my_light", ATTR_COLOR_TEMP_KELVIN: 2500}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1861,7 +1861,7 @@ async def test_light_no_color_modes( user_service=user_service, states=states, ) - state = hass.states.get("light.test_mylight") + state = hass.states.get("light.test_my_light") assert state is not None assert state.state == STATE_ON assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.ONOFF] @@ -1869,7 +1869,7 @@ async def test_light_no_color_modes( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_mylight"}, + {ATTR_ENTITY_ID: "light.test_my_light"}, blocking=True, ) mock_client.light_command.assert_has_calls([call(key=1, state=True, color_mode=0)]) diff --git a/tests/components/esphome/test_lock.py b/tests/components/esphome/test_lock.py index 82c24b59a2c..259e990c5f7 100644 --- a/tests/components/esphome/test_lock.py +++ b/tests/components/esphome/test_lock.py @@ -39,14 +39,14 @@ async def test_lock_entity_no_open( user_service=user_service, states=states, ) - state = hass.states.get("lock.test_mylock") + state = hass.states.get("lock.test_my_lock") assert state is not None assert state.state == STATE_UNLOCKING await hass.services.async_call( LOCK_DOMAIN, SERVICE_LOCK, - {ATTR_ENTITY_ID: "lock.test_mylock"}, + {ATTR_ENTITY_ID: "lock.test_my_lock"}, blocking=True, ) mock_client.lock_command.assert_has_calls([call(1, LockCommand.LOCK)]) @@ -73,7 +73,7 @@ async def test_lock_entity_start_locked( user_service=user_service, states=states, ) - state = hass.states.get("lock.test_mylock") + state = hass.states.get("lock.test_my_lock") assert state is not None assert state.state == STATE_LOCKED @@ -100,14 +100,14 @@ async def test_lock_entity_supports_open( user_service=user_service, states=states, ) - state = hass.states.get("lock.test_mylock") + state = hass.states.get("lock.test_my_lock") assert state is not None assert state.state == STATE_LOCKING await hass.services.async_call( LOCK_DOMAIN, SERVICE_LOCK, - {ATTR_ENTITY_ID: "lock.test_mylock"}, + {ATTR_ENTITY_ID: "lock.test_my_lock"}, blocking=True, ) mock_client.lock_command.assert_has_calls([call(1, LockCommand.LOCK)]) @@ -116,7 +116,7 @@ async def test_lock_entity_supports_open( await hass.services.async_call( LOCK_DOMAIN, SERVICE_UNLOCK, - {ATTR_ENTITY_ID: "lock.test_mylock"}, + {ATTR_ENTITY_ID: "lock.test_my_lock"}, blocking=True, ) mock_client.lock_command.assert_has_calls([call(1, LockCommand.UNLOCK, None)]) @@ -125,7 +125,7 @@ async def test_lock_entity_supports_open( await hass.services.async_call( LOCK_DOMAIN, SERVICE_OPEN, - {ATTR_ENTITY_ID: "lock.test_mylock"}, + {ATTR_ENTITY_ID: "lock.test_my_lock"}, blocking=True, ) mock_client.lock_command.assert_has_calls([call(1, LockCommand.OPEN)]) diff --git a/tests/components/esphome/test_media_player.py b/tests/components/esphome/test_media_player.py index 3879129ccb6..c9fcecb5d55 100644 --- a/tests/components/esphome/test_media_player.py +++ b/tests/components/esphome/test_media_player.py @@ -62,7 +62,7 @@ async def test_media_player_entity( user_service=user_service, states=states, ) - state = hass.states.get("media_player.test_mymedia_player") + state = hass.states.get("media_player.test_my_media_player") assert state is not None assert state.state == "paused" @@ -70,7 +70,7 @@ async def test_media_player_entity( MEDIA_PLAYER_DOMAIN, SERVICE_VOLUME_MUTE, { - ATTR_ENTITY_ID: "media_player.test_mymedia_player", + ATTR_ENTITY_ID: "media_player.test_my_media_player", ATTR_MEDIA_VOLUME_MUTED: True, }, blocking=True, @@ -84,7 +84,7 @@ async def test_media_player_entity( MEDIA_PLAYER_DOMAIN, SERVICE_VOLUME_MUTE, { - ATTR_ENTITY_ID: "media_player.test_mymedia_player", + ATTR_ENTITY_ID: "media_player.test_my_media_player", ATTR_MEDIA_VOLUME_MUTED: True, }, blocking=True, @@ -98,7 +98,7 @@ async def test_media_player_entity( MEDIA_PLAYER_DOMAIN, SERVICE_VOLUME_SET, { - ATTR_ENTITY_ID: "media_player.test_mymedia_player", + ATTR_ENTITY_ID: "media_player.test_my_media_player", ATTR_MEDIA_VOLUME_LEVEL: 0.5, }, blocking=True, @@ -110,7 +110,7 @@ async def test_media_player_entity( MEDIA_PLAYER_DOMAIN, SERVICE_MEDIA_PAUSE, { - ATTR_ENTITY_ID: "media_player.test_mymedia_player", + ATTR_ENTITY_ID: "media_player.test_my_media_player", }, blocking=True, ) @@ -123,7 +123,7 @@ async def test_media_player_entity( MEDIA_PLAYER_DOMAIN, SERVICE_MEDIA_PLAY, { - ATTR_ENTITY_ID: "media_player.test_mymedia_player", + ATTR_ENTITY_ID: "media_player.test_my_media_player", }, blocking=True, ) @@ -136,7 +136,7 @@ async def test_media_player_entity( MEDIA_PLAYER_DOMAIN, SERVICE_MEDIA_STOP, { - ATTR_ENTITY_ID: "media_player.test_mymedia_player", + ATTR_ENTITY_ID: "media_player.test_my_media_player", }, blocking=True, ) @@ -207,7 +207,7 @@ async def test_media_player_entity_with_source( user_service=user_service, states=states, ) - state = hass.states.get("media_player.test_mymedia_player") + state = hass.states.get("media_player.test_my_media_player") assert state is not None assert state.state == "playing" @@ -216,7 +216,7 @@ async def test_media_player_entity_with_source( MEDIA_PLAYER_DOMAIN, SERVICE_PLAY_MEDIA, { - ATTR_ENTITY_ID: "media_player.test_mymedia_player", + ATTR_ENTITY_ID: "media_player.test_my_media_player", ATTR_MEDIA_CONTENT_TYPE: MediaType.MUSIC, ATTR_MEDIA_CONTENT_ID: "media-source://local/xz", }, @@ -240,7 +240,7 @@ async def test_media_player_entity_with_source( MEDIA_PLAYER_DOMAIN, SERVICE_PLAY_MEDIA, { - ATTR_ENTITY_ID: "media_player.test_mymedia_player", + ATTR_ENTITY_ID: "media_player.test_my_media_player", ATTR_MEDIA_CONTENT_TYPE: "audio/mp3", ATTR_MEDIA_CONTENT_ID: "media-source://local/xy", }, @@ -256,7 +256,7 @@ async def test_media_player_entity_with_source( { "id": 1, "type": "media_player/browse_media", - "entity_id": "media_player.test_mymedia_player", + "entity_id": "media_player.test_my_media_player", } ) response = await client.receive_json() @@ -266,7 +266,7 @@ async def test_media_player_entity_with_source( MEDIA_PLAYER_DOMAIN, SERVICE_PLAY_MEDIA, { - ATTR_ENTITY_ID: "media_player.test_mymedia_player", + ATTR_ENTITY_ID: "media_player.test_my_media_player", ATTR_MEDIA_CONTENT_TYPE: MediaType.URL, ATTR_MEDIA_CONTENT_ID: "media-source://tts?message=hello", ATTR_MEDIA_ANNOUNCE: True, diff --git a/tests/components/esphome/test_number.py b/tests/components/esphome/test_number.py index 557425052f3..91a21e670f5 100644 --- a/tests/components/esphome/test_number.py +++ b/tests/components/esphome/test_number.py @@ -48,14 +48,14 @@ async def test_generic_number_entity( user_service=user_service, states=states, ) - state = hass.states.get("number.test_mynumber") + state = hass.states.get("number.test_my_number") assert state is not None assert state.state == "50" await hass.services.async_call( NUMBER_DOMAIN, SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: "number.test_mynumber", ATTR_VALUE: 50}, + {ATTR_ENTITY_ID: "number.test_my_number", ATTR_VALUE: 50}, blocking=True, ) mock_client.number_command.assert_has_calls([call(1, 50)]) @@ -89,7 +89,7 @@ async def test_generic_number_nan( user_service=user_service, states=states, ) - state = hass.states.get("number.test_mynumber") + state = hass.states.get("number.test_my_number") assert state is not None assert state.state == STATE_UNKNOWN @@ -121,7 +121,7 @@ async def test_generic_number_with_unit_of_measurement_as_empty_string( user_service=user_service, states=states, ) - state = hass.states.get("number.test_mynumber") + state = hass.states.get("number.test_my_number") assert state is not None assert state.state == "42" assert ATTR_UNIT_OF_MEASUREMENT not in state.attributes @@ -160,7 +160,7 @@ async def test_generic_number_entity_set_when_disconnected( await hass.services.async_call( NUMBER_DOMAIN, SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: "number.test_mynumber", ATTR_VALUE: 20}, + {ATTR_ENTITY_ID: "number.test_my_number", ATTR_VALUE: 20}, blocking=True, ) mock_client.number_command.reset_mock() diff --git a/tests/components/esphome/test_select.py b/tests/components/esphome/test_select.py index a433b1b0ab0..8df898ea3cf 100644 --- a/tests/components/esphome/test_select.py +++ b/tests/components/esphome/test_select.py @@ -59,14 +59,14 @@ async def test_select_generic_entity( user_service=user_service, states=states, ) - state = hass.states.get("select.test_myselect") + state = hass.states.get("select.test_my_select") assert state is not None assert state.state == "a" await hass.services.async_call( SELECT_DOMAIN, SERVICE_SELECT_OPTION, - {ATTR_ENTITY_ID: "select.test_myselect", ATTR_OPTION: "b"}, + {ATTR_ENTITY_ID: "select.test_my_select", ATTR_OPTION: "b"}, blocking=True, ) mock_client.select_command.assert_has_calls([call(1, "b")]) diff --git a/tests/components/esphome/test_sensor.py b/tests/components/esphome/test_sensor.py index 76f71b53167..5f68dbb8660 100644 --- a/tests/components/esphome/test_sensor.py +++ b/tests/components/esphome/test_sensor.py @@ -62,35 +62,35 @@ async def test_generic_numeric_sensor( user_service=user_service, states=states, ) - state = hass.states.get("sensor.test_mysensor") + state = hass.states.get("sensor.test_my_sensor") assert state is not None assert state.state == "50" # Test updating state mock_device.set_state(SensorState(key=1, state=60)) await hass.async_block_till_done() - state = hass.states.get("sensor.test_mysensor") + state = hass.states.get("sensor.test_my_sensor") assert state is not None assert state.state == "60" # Test sending the same state again mock_device.set_state(SensorState(key=1, state=60)) await hass.async_block_till_done() - state = hass.states.get("sensor.test_mysensor") + state = hass.states.get("sensor.test_my_sensor") assert state is not None assert state.state == "60" # Test we can still update after the same state mock_device.set_state(SensorState(key=1, state=70)) await hass.async_block_till_done() - state = hass.states.get("sensor.test_mysensor") + state = hass.states.get("sensor.test_my_sensor") assert state is not None assert state.state == "70" # Test invalid data from the underlying api does not crash us mock_device.set_state(SensorState(key=1, state=object())) await hass.async_block_till_done() - state = hass.states.get("sensor.test_mysensor") + state = hass.states.get("sensor.test_my_sensor") assert state is not None assert state.state == "70" @@ -120,11 +120,11 @@ async def test_generic_numeric_sensor_with_entity_category_and_icon( user_service=user_service, states=states, ) - state = hass.states.get("sensor.test_mysensor") + state = hass.states.get("sensor.test_my_sensor") assert state is not None assert state.state == "50" assert state.attributes[ATTR_ICON] == "mdi:leaf" - entry = entity_registry.async_get("sensor.test_mysensor") + entry = entity_registry.async_get("sensor.test_my_sensor") assert entry is not None # Note that ESPHome includes the EntityInfo type in the unique id # as this is not a 1:1 mapping to the entity platform (ie. text_sensor) @@ -158,11 +158,11 @@ async def test_generic_numeric_sensor_state_class_measurement( user_service=user_service, states=states, ) - state = hass.states.get("sensor.test_mysensor") + state = hass.states.get("sensor.test_my_sensor") assert state is not None assert state.state == "50" assert state.attributes[ATTR_STATE_CLASS] == SensorStateClass.MEASUREMENT - entry = entity_registry.async_get("sensor.test_mysensor") + entry = entity_registry.async_get("sensor.test_my_sensor") assert entry is not None # Note that ESPHome includes the EntityInfo type in the unique id # as this is not a 1:1 mapping to the entity platform (ie. text_sensor) @@ -193,7 +193,7 @@ async def test_generic_numeric_sensor_device_class_timestamp( user_service=user_service, states=states, ) - state = hass.states.get("sensor.test_mysensor") + state = hass.states.get("sensor.test_my_sensor") assert state is not None assert state.state == "2023-06-22T18:43:52+00:00" @@ -222,7 +222,7 @@ async def test_generic_numeric_sensor_legacy_last_reset_convert( user_service=user_service, states=states, ) - state = hass.states.get("sensor.test_mysensor") + state = hass.states.get("sensor.test_my_sensor") assert state is not None assert state.state == "50" assert state.attributes[ATTR_STATE_CLASS] == SensorStateClass.TOTAL_INCREASING @@ -248,7 +248,7 @@ async def test_generic_numeric_sensor_no_state( user_service=user_service, states=states, ) - state = hass.states.get("sensor.test_mysensor") + state = hass.states.get("sensor.test_my_sensor") assert state is not None assert state.state == STATE_UNKNOWN @@ -273,7 +273,7 @@ async def test_generic_numeric_sensor_nan_state( user_service=user_service, states=states, ) - state = hass.states.get("sensor.test_mysensor") + state = hass.states.get("sensor.test_my_sensor") assert state is not None assert state.state == STATE_UNKNOWN @@ -298,7 +298,7 @@ async def test_generic_numeric_sensor_missing_state( user_service=user_service, states=states, ) - state = hass.states.get("sensor.test_mysensor") + state = hass.states.get("sensor.test_my_sensor") assert state is not None assert state.state == STATE_UNKNOWN @@ -325,7 +325,7 @@ async def test_generic_text_sensor( user_service=user_service, states=states, ) - state = hass.states.get("sensor.test_mysensor") + state = hass.states.get("sensor.test_my_sensor") assert state is not None assert state.state == "i am a teapot" @@ -350,7 +350,7 @@ async def test_generic_text_sensor_missing_state( user_service=user_service, states=states, ) - state = hass.states.get("sensor.test_mysensor") + state = hass.states.get("sensor.test_my_sensor") assert state is not None assert state.state == STATE_UNKNOWN @@ -378,7 +378,7 @@ async def test_generic_text_sensor_device_class_timestamp( user_service=user_service, states=states, ) - state = hass.states.get("sensor.test_mysensor") + state = hass.states.get("sensor.test_my_sensor") assert state is not None assert state.state == "2023-06-22T18:43:52+00:00" assert state.attributes[ATTR_DEVICE_CLASS] == SensorDeviceClass.TIMESTAMP @@ -407,7 +407,7 @@ async def test_generic_text_sensor_device_class_date( user_service=user_service, states=states, ) - state = hass.states.get("sensor.test_mysensor") + state = hass.states.get("sensor.test_my_sensor") assert state is not None assert state.state == "2023-06-22" assert state.attributes[ATTR_DEVICE_CLASS] == SensorDeviceClass.DATE @@ -434,7 +434,7 @@ async def test_generic_numeric_sensor_empty_string_uom( user_service=user_service, states=states, ) - state = hass.states.get("sensor.test_mysensor") + state = hass.states.get("sensor.test_my_sensor") assert state is not None assert state.state == "123" assert ATTR_UNIT_OF_MEASUREMENT not in state.attributes diff --git a/tests/components/esphome/test_switch.py b/tests/components/esphome/test_switch.py index 561ac0b369f..799290c931a 100644 --- a/tests/components/esphome/test_switch.py +++ b/tests/components/esphome/test_switch.py @@ -33,14 +33,14 @@ async def test_switch_generic_entity( user_service=user_service, states=states, ) - state = hass.states.get("switch.test_myswitch") + state = hass.states.get("switch.test_my_switch") assert state is not None assert state.state == STATE_ON await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "switch.test_myswitch"}, + {ATTR_ENTITY_ID: "switch.test_my_switch"}, blocking=True, ) mock_client.switch_command.assert_has_calls([call(1, True)]) @@ -48,7 +48,7 @@ async def test_switch_generic_entity( await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "switch.test_myswitch"}, + {ATTR_ENTITY_ID: "switch.test_my_switch"}, blocking=True, ) mock_client.switch_command.assert_has_calls([call(1, False)]) diff --git a/tests/components/esphome/test_text.py b/tests/components/esphome/test_text.py index 07157d98ac6..e2b459be2d2 100644 --- a/tests/components/esphome/test_text.py +++ b/tests/components/esphome/test_text.py @@ -39,14 +39,14 @@ async def test_generic_text_entity( user_service=user_service, states=states, ) - state = hass.states.get("text.test_mytext") + state = hass.states.get("text.test_my_text") assert state is not None assert state.state == "hello world" await hass.services.async_call( TEXT_DOMAIN, SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: "text.test_mytext", ATTR_VALUE: "goodbye"}, + {ATTR_ENTITY_ID: "text.test_my_text", ATTR_VALUE: "goodbye"}, blocking=True, ) mock_client.text_command.assert_has_calls([call(1, "goodbye")]) @@ -79,7 +79,7 @@ async def test_generic_text_entity_no_state( user_service=user_service, states=states, ) - state = hass.states.get("text.test_mytext") + state = hass.states.get("text.test_my_text") assert state is not None assert state.state == STATE_UNKNOWN @@ -110,6 +110,6 @@ async def test_generic_text_entity_missing_state( user_service=user_service, states=states, ) - state = hass.states.get("text.test_mytext") + state = hass.states.get("text.test_my_text") assert state is not None assert state.state == STATE_UNKNOWN diff --git a/tests/components/esphome/test_time.py b/tests/components/esphome/test_time.py index aaa18c77a47..5277ca82e34 100644 --- a/tests/components/esphome/test_time.py +++ b/tests/components/esphome/test_time.py @@ -35,14 +35,14 @@ async def test_generic_time_entity( user_service=user_service, states=states, ) - state = hass.states.get("time.test_mytime") + state = hass.states.get("time.test_my_time") assert state is not None assert state.state == "12:34:56" await hass.services.async_call( TIME_DOMAIN, SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: "time.test_mytime", ATTR_TIME: "01:23:45"}, + {ATTR_ENTITY_ID: "time.test_my_time", ATTR_TIME: "01:23:45"}, blocking=True, ) mock_client.time_command.assert_has_calls([call(1, 1, 23, 45)]) @@ -71,6 +71,6 @@ async def test_generic_time_missing_state( user_service=user_service, states=states, ) - state = hass.states.get("time.test_mytime") + state = hass.states.get("time.test_my_time") assert state is not None assert state.state == STATE_UNKNOWN diff --git a/tests/components/esphome/test_update.py b/tests/components/esphome/test_update.py index 83e89b1de00..ad638add0a0 100644 --- a/tests/components/esphome/test_update.py +++ b/tests/components/esphome/test_update.py @@ -464,7 +464,7 @@ async def test_generic_device_update_entity( user_service=user_service, states=states, ) - state = hass.states.get("update.test_myupdate") + state = hass.states.get("update.test_my_update") assert state is not None assert state.state == STATE_OFF @@ -503,14 +503,14 @@ async def test_generic_device_update_entity_has_update( user_service=user_service, states=states, ) - state = hass.states.get("update.test_myupdate") + state = hass.states.get("update.test_my_update") assert state is not None assert state.state == STATE_ON await hass.services.async_call( UPDATE_DOMAIN, SERVICE_INSTALL, - {ATTR_ENTITY_ID: "update.test_myupdate"}, + {ATTR_ENTITY_ID: "update.test_my_update"}, blocking=True, ) @@ -528,7 +528,7 @@ async def test_generic_device_update_entity_has_update( ) ) - state = hass.states.get("update.test_myupdate") + state = hass.states.get("update.test_my_update") assert state is not None assert state.state == STATE_ON assert state.attributes["in_progress"] == 50 @@ -536,7 +536,7 @@ async def test_generic_device_update_entity_has_update( await hass.services.async_call( HOMEASSISTANT_DOMAIN, SERVICE_UPDATE_ENTITY, - {ATTR_ENTITY_ID: "update.test_myupdate"}, + {ATTR_ENTITY_ID: "update.test_my_update"}, blocking=True, ) diff --git a/tests/components/esphome/test_valve.py b/tests/components/esphome/test_valve.py index 5ba7bcbe187..413e4cfcb9f 100644 --- a/tests/components/esphome/test_valve.py +++ b/tests/components/esphome/test_valve.py @@ -65,7 +65,7 @@ async def test_valve_entity( user_service=user_service, states=states, ) - state = hass.states.get("valve.test_myvalve") + state = hass.states.get("valve.test_my_valve") assert state is not None assert state.state == STATE_OPENING assert state.attributes[ATTR_CURRENT_POSITION] == 50 @@ -73,7 +73,7 @@ async def test_valve_entity( await hass.services.async_call( VALVE_DOMAIN, SERVICE_CLOSE_VALVE, - {ATTR_ENTITY_ID: "valve.test_myvalve"}, + {ATTR_ENTITY_ID: "valve.test_my_valve"}, blocking=True, ) mock_client.valve_command.assert_has_calls([call(key=1, position=0.0)]) @@ -82,7 +82,7 @@ async def test_valve_entity( await hass.services.async_call( VALVE_DOMAIN, SERVICE_OPEN_VALVE, - {ATTR_ENTITY_ID: "valve.test_myvalve"}, + {ATTR_ENTITY_ID: "valve.test_my_valve"}, blocking=True, ) mock_client.valve_command.assert_has_calls([call(key=1, position=1.0)]) @@ -91,7 +91,7 @@ async def test_valve_entity( await hass.services.async_call( VALVE_DOMAIN, SERVICE_SET_VALVE_POSITION, - {ATTR_ENTITY_ID: "valve.test_myvalve", ATTR_POSITION: 50}, + {ATTR_ENTITY_ID: "valve.test_my_valve", ATTR_POSITION: 50}, blocking=True, ) mock_client.valve_command.assert_has_calls([call(key=1, position=0.5)]) @@ -100,7 +100,7 @@ async def test_valve_entity( await hass.services.async_call( VALVE_DOMAIN, SERVICE_STOP_VALVE, - {ATTR_ENTITY_ID: "valve.test_myvalve"}, + {ATTR_ENTITY_ID: "valve.test_my_valve"}, blocking=True, ) mock_client.valve_command.assert_has_calls([call(key=1, stop=True)]) @@ -110,7 +110,7 @@ async def test_valve_entity( ValveState(key=1, position=0.0, current_operation=ValveOperation.IDLE) ) await hass.async_block_till_done() - state = hass.states.get("valve.test_myvalve") + state = hass.states.get("valve.test_my_valve") assert state is not None assert state.state == STATE_CLOSED @@ -118,7 +118,7 @@ async def test_valve_entity( ValveState(key=1, position=0.5, current_operation=ValveOperation.IS_CLOSING) ) await hass.async_block_till_done() - state = hass.states.get("valve.test_myvalve") + state = hass.states.get("valve.test_my_valve") assert state is not None assert state.state == STATE_CLOSING @@ -126,7 +126,7 @@ async def test_valve_entity( ValveState(key=1, position=1.0, current_operation=ValveOperation.IDLE) ) await hass.async_block_till_done() - state = hass.states.get("valve.test_myvalve") + state = hass.states.get("valve.test_my_valve") assert state is not None assert state.state == STATE_OPEN @@ -164,7 +164,7 @@ async def test_valve_entity_without_position( user_service=user_service, states=states, ) - state = hass.states.get("valve.test_myvalve") + state = hass.states.get("valve.test_my_valve") assert state is not None assert state.state == STATE_OPENING assert ATTR_CURRENT_POSITION not in state.attributes @@ -172,7 +172,7 @@ async def test_valve_entity_without_position( await hass.services.async_call( VALVE_DOMAIN, SERVICE_CLOSE_VALVE, - {ATTR_ENTITY_ID: "valve.test_myvalve"}, + {ATTR_ENTITY_ID: "valve.test_my_valve"}, blocking=True, ) mock_client.valve_command.assert_has_calls([call(key=1, position=0.0)]) @@ -181,7 +181,7 @@ async def test_valve_entity_without_position( await hass.services.async_call( VALVE_DOMAIN, SERVICE_OPEN_VALVE, - {ATTR_ENTITY_ID: "valve.test_myvalve"}, + {ATTR_ENTITY_ID: "valve.test_my_valve"}, blocking=True, ) mock_client.valve_command.assert_has_calls([call(key=1, position=1.0)]) @@ -191,6 +191,6 @@ async def test_valve_entity_without_position( ValveState(key=1, position=0.0, current_operation=ValveOperation.IDLE) ) await hass.async_block_till_done() - state = hass.states.get("valve.test_myvalve") + state = hass.states.get("valve.test_my_valve") assert state is not None assert state.state == STATE_CLOSED From 55eb11055ca8a5fbb2109fda1f9319e7d8adbe99 Mon Sep 17 00:00:00 2001 From: Matrix Date: Fri, 9 Aug 2024 18:21:49 +0800 Subject: [PATCH 0511/1635] Bump YoLink API to 0.4.7 (#123441) --- homeassistant/components/yolink/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/yolink/manifest.json b/homeassistant/components/yolink/manifest.json index ceb4e4ceff3..78b553d7978 100644 --- a/homeassistant/components/yolink/manifest.json +++ b/homeassistant/components/yolink/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["auth", "application_credentials"], "documentation": "https://www.home-assistant.io/integrations/yolink", "iot_class": "cloud_push", - "requirements": ["yolink-api==0.4.6"] + "requirements": ["yolink-api==0.4.7"] } diff --git a/requirements_all.txt b/requirements_all.txt index 1960107d88e..a4eba7932fe 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2965,7 +2965,7 @@ yeelight==0.7.14 yeelightsunflower==0.0.10 # homeassistant.components.yolink -yolink-api==0.4.6 +yolink-api==0.4.7 # homeassistant.components.youless youless-api==2.1.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 4559cd3dca0..5d8c6ffb9d9 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2345,7 +2345,7 @@ yalexs==6.4.3 yeelight==0.7.14 # homeassistant.components.yolink -yolink-api==0.4.6 +yolink-api==0.4.7 # homeassistant.components.youless youless-api==2.1.2 From aee5d5126f7cbfa68c33f30d8b49cb5e35edcfe8 Mon Sep 17 00:00:00 2001 From: Andrew Jackson Date: Fri, 9 Aug 2024 15:02:27 +0100 Subject: [PATCH 0512/1635] Add sensor platform for Mastodon (#123434) * Add account sensors * Sensor icons * Change sensors to use value_fn * Add native unit of measurement * Update native unit of measurement * Change toots to posts * Fix sensor icons * Add device entry type * Explain conditional naming * Fixes from review * Remove unnecessary constructor --- homeassistant/components/mastodon/__init__.py | 43 ++++- homeassistant/components/mastodon/const.py | 3 + .../components/mastodon/coordinator.py | 35 ++++ homeassistant/components/mastodon/entity.py | 48 ++++++ homeassistant/components/mastodon/icons.json | 15 ++ homeassistant/components/mastodon/sensor.py | 85 ++++++++++ .../components/mastodon/strings.json | 13 ++ .../mastodon/snapshots/test_sensor.ambr | 151 ++++++++++++++++++ tests/components/mastodon/test_sensor.py | 27 ++++ 9 files changed, 418 insertions(+), 2 deletions(-) create mode 100644 homeassistant/components/mastodon/coordinator.py create mode 100644 homeassistant/components/mastodon/entity.py create mode 100644 homeassistant/components/mastodon/icons.json create mode 100644 homeassistant/components/mastodon/sensor.py create mode 100644 tests/components/mastodon/snapshots/test_sensor.ambr create mode 100644 tests/components/mastodon/test_sensor.py diff --git a/homeassistant/components/mastodon/__init__.py b/homeassistant/components/mastodon/__init__.py index 2fe379702ee..0a7c93911b5 100644 --- a/homeassistant/components/mastodon/__init__.py +++ b/homeassistant/components/mastodon/__init__.py @@ -2,6 +2,9 @@ from __future__ import annotations +from dataclasses import dataclass +from typing import TYPE_CHECKING + from mastodon.Mastodon import Mastodon, MastodonError from homeassistant.config_entries import ConfigEntry @@ -17,14 +20,33 @@ from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import discovery from .const import CONF_BASE_URL, DOMAIN +from .coordinator import MastodonCoordinator from .utils import create_mastodon_client +PLATFORMS: list[Platform] = [Platform.NOTIFY, Platform.SENSOR] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + +@dataclass +class MastodonData: + """Mastodon data type.""" + + client: Mastodon + instance: dict + account: dict + coordinator: MastodonCoordinator + + +type MastodonConfigEntry = ConfigEntry[MastodonData] + +if TYPE_CHECKING: + from . import MastodonConfigEntry + + +async def async_setup_entry(hass: HomeAssistant, entry: MastodonConfigEntry) -> bool: """Set up Mastodon from a config entry.""" try: - client, _, _ = await hass.async_add_executor_job( + client, instance, account = await hass.async_add_executor_job( setup_mastodon, entry, ) @@ -34,6 +56,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: assert entry.unique_id + coordinator = MastodonCoordinator(hass, client) + + await coordinator.async_config_entry_first_refresh() + + entry.runtime_data = MastodonData(client, instance, account, coordinator) + await discovery.async_load_platform( hass, Platform.NOTIFY, @@ -42,9 +70,20 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: {}, ) + await hass.config_entries.async_forward_entry_setups( + entry, [platform for platform in PLATFORMS if platform != Platform.NOTIFY] + ) + return True +async def async_unload_entry(hass: HomeAssistant, entry: MastodonConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms( + entry, [platform for platform in PLATFORMS if platform != Platform.NOTIFY] + ) + + def setup_mastodon(entry: ConfigEntry) -> tuple[Mastodon, dict, dict]: """Get mastodon details.""" client = create_mastodon_client( diff --git a/homeassistant/components/mastodon/const.py b/homeassistant/components/mastodon/const.py index 3a9cf7462e6..e0593d15d2c 100644 --- a/homeassistant/components/mastodon/const.py +++ b/homeassistant/components/mastodon/const.py @@ -16,3 +16,6 @@ INSTANCE_VERSION: Final = "version" INSTANCE_URI: Final = "uri" INSTANCE_DOMAIN: Final = "domain" ACCOUNT_USERNAME: Final = "username" +ACCOUNT_FOLLOWERS_COUNT: Final = "followers_count" +ACCOUNT_FOLLOWING_COUNT: Final = "following_count" +ACCOUNT_STATUSES_COUNT: Final = "statuses_count" diff --git a/homeassistant/components/mastodon/coordinator.py b/homeassistant/components/mastodon/coordinator.py new file mode 100644 index 00000000000..f1332a0ea43 --- /dev/null +++ b/homeassistant/components/mastodon/coordinator.py @@ -0,0 +1,35 @@ +"""Define an object to manage fetching Mastodon data.""" + +from __future__ import annotations + +from datetime import timedelta +from typing import Any + +from mastodon import Mastodon +from mastodon.Mastodon import MastodonError + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import LOGGER + + +class MastodonCoordinator(DataUpdateCoordinator[dict[str, Any]]): + """Class to manage fetching Mastodon data.""" + + def __init__(self, hass: HomeAssistant, client: Mastodon) -> None: + """Initialize coordinator.""" + super().__init__( + hass, logger=LOGGER, name="Mastodon", update_interval=timedelta(hours=1) + ) + self.client = client + + async def _async_update_data(self) -> dict[str, Any]: + try: + account: dict = await self.hass.async_add_executor_job( + self.client.account_verify_credentials + ) + except MastodonError as ex: + raise UpdateFailed(ex) from ex + + return account diff --git a/homeassistant/components/mastodon/entity.py b/homeassistant/components/mastodon/entity.py new file mode 100644 index 00000000000..93d630627d7 --- /dev/null +++ b/homeassistant/components/mastodon/entity.py @@ -0,0 +1,48 @@ +"""Base class for Mastodon entities.""" + +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from . import MastodonConfigEntry +from .const import DEFAULT_NAME, DOMAIN, INSTANCE_VERSION +from .coordinator import MastodonCoordinator +from .utils import construct_mastodon_username + + +class MastodonEntity(CoordinatorEntity[MastodonCoordinator]): + """Defines a base Mastodon entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: MastodonCoordinator, + entity_description: EntityDescription, + data: MastodonConfigEntry, + ) -> None: + """Initialize Mastodon entity.""" + super().__init__(coordinator) + unique_id = data.unique_id + assert unique_id is not None + self._attr_unique_id = f"{unique_id}_{entity_description.key}" + + # Legacy yaml config default title is Mastodon, don't make name Mastodon Mastodon + name = "Mastodon" + if data.title != DEFAULT_NAME: + name = f"Mastodon {data.title}" + + full_account_name = construct_mastodon_username( + data.runtime_data.instance, data.runtime_data.account + ) + + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, unique_id)}, + manufacturer="Mastodon gGmbH", + model=full_account_name, + entry_type=DeviceEntryType.SERVICE, + sw_version=data.runtime_data.instance[INSTANCE_VERSION], + name=name, + ) + + self.entity_description = entity_description diff --git a/homeassistant/components/mastodon/icons.json b/homeassistant/components/mastodon/icons.json new file mode 100644 index 00000000000..082e27a64c2 --- /dev/null +++ b/homeassistant/components/mastodon/icons.json @@ -0,0 +1,15 @@ +{ + "entity": { + "sensor": { + "followers": { + "default": "mdi:account-multiple" + }, + "following": { + "default": "mdi:account-multiple" + }, + "posts": { + "default": "mdi:message-text" + } + } + } +} diff --git a/homeassistant/components/mastodon/sensor.py b/homeassistant/components/mastodon/sensor.py new file mode 100644 index 00000000000..12acfc04743 --- /dev/null +++ b/homeassistant/components/mastodon/sensor.py @@ -0,0 +1,85 @@ +"""Mastodon platform for sensor components.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from typing import Any + +from homeassistant.components.sensor import ( + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType + +from . import MastodonConfigEntry +from .const import ( + ACCOUNT_FOLLOWERS_COUNT, + ACCOUNT_FOLLOWING_COUNT, + ACCOUNT_STATUSES_COUNT, +) +from .entity import MastodonEntity + + +@dataclass(frozen=True, kw_only=True) +class MastodonSensorEntityDescription(SensorEntityDescription): + """Describes Mastodon sensor entity.""" + + value_fn: Callable[[dict[str, Any]], StateType] + + +ENTITY_DESCRIPTIONS = ( + MastodonSensorEntityDescription( + key="followers", + translation_key="followers", + native_unit_of_measurement="accounts", + state_class=SensorStateClass.TOTAL, + value_fn=lambda data: data.get(ACCOUNT_FOLLOWERS_COUNT), + ), + MastodonSensorEntityDescription( + key="following", + translation_key="following", + native_unit_of_measurement="accounts", + state_class=SensorStateClass.TOTAL, + value_fn=lambda data: data.get(ACCOUNT_FOLLOWING_COUNT), + ), + MastodonSensorEntityDescription( + key="posts", + translation_key="posts", + native_unit_of_measurement="posts", + state_class=SensorStateClass.TOTAL, + value_fn=lambda data: data.get(ACCOUNT_STATUSES_COUNT), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: MastodonConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the sensor platform for entity.""" + coordinator = entry.runtime_data.coordinator + + async_add_entities( + MastodonSensorEntity( + coordinator=coordinator, + entity_description=entity_description, + data=entry, + ) + for entity_description in ENTITY_DESCRIPTIONS + ) + + +class MastodonSensorEntity(MastodonEntity, SensorEntity): + """A Mastodon sensor entity.""" + + entity_description: MastodonSensorEntityDescription + + @property + def native_value(self) -> StateType: + """Return the native value of the sensor.""" + return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/mastodon/strings.json b/homeassistant/components/mastodon/strings.json index e1124aad1a9..ed8162eb3df 100644 --- a/homeassistant/components/mastodon/strings.json +++ b/homeassistant/components/mastodon/strings.json @@ -35,5 +35,18 @@ "title": "YAML import failed with unknown error", "description": "Configuring {integration_title} using YAML is being removed but there was an unknown error while importing your existing configuration.\nPlease use the UI to configure Mastodon. Don't forget to delete the YAML configuration." } + }, + "entity": { + "sensor": { + "followers": { + "name": "Followers" + }, + "following": { + "name": "Following" + }, + "posts": { + "name": "Posts" + } + } } } diff --git a/tests/components/mastodon/snapshots/test_sensor.ambr b/tests/components/mastodon/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..f94e34c00ab --- /dev/null +++ b/tests/components/mastodon/snapshots/test_sensor.ambr @@ -0,0 +1,151 @@ +# serializer version: 1 +# name: test_sensors[sensor.mastodon_trwnh_mastodon_social_followers-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mastodon_trwnh_mastodon_social_followers', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Followers', + 'platform': 'mastodon', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'followers', + 'unique_id': 'client_id_followers', + 'unit_of_measurement': 'accounts', + }) +# --- +# name: test_sensors[sensor.mastodon_trwnh_mastodon_social_followers-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mastodon @trwnh@mastodon.social Followers', + 'state_class': , + 'unit_of_measurement': 'accounts', + }), + 'context': , + 'entity_id': 'sensor.mastodon_trwnh_mastodon_social_followers', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '821', + }) +# --- +# name: test_sensors[sensor.mastodon_trwnh_mastodon_social_following-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mastodon_trwnh_mastodon_social_following', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Following', + 'platform': 'mastodon', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'following', + 'unique_id': 'client_id_following', + 'unit_of_measurement': 'accounts', + }) +# --- +# name: test_sensors[sensor.mastodon_trwnh_mastodon_social_following-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mastodon @trwnh@mastodon.social Following', + 'state_class': , + 'unit_of_measurement': 'accounts', + }), + 'context': , + 'entity_id': 'sensor.mastodon_trwnh_mastodon_social_following', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '178', + }) +# --- +# name: test_sensors[sensor.mastodon_trwnh_mastodon_social_posts-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mastodon_trwnh_mastodon_social_posts', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Posts', + 'platform': 'mastodon', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'posts', + 'unique_id': 'client_id_posts', + 'unit_of_measurement': 'posts', + }) +# --- +# name: test_sensors[sensor.mastodon_trwnh_mastodon_social_posts-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mastodon @trwnh@mastodon.social Posts', + 'state_class': , + 'unit_of_measurement': 'posts', + }), + 'context': , + 'entity_id': 'sensor.mastodon_trwnh_mastodon_social_posts', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '33120', + }) +# --- diff --git a/tests/components/mastodon/test_sensor.py b/tests/components/mastodon/test_sensor.py new file mode 100644 index 00000000000..343505260e2 --- /dev/null +++ b/tests/components/mastodon/test_sensor.py @@ -0,0 +1,27 @@ +"""Tests for the Mastodon sensors.""" + +from unittest.mock import AsyncMock, patch + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_sensors( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_mastodon_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the sensor entities.""" + with patch("homeassistant.components.mastodon.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) From e6e985af24bda84bbcba548d32ad71c5044e1bcd Mon Sep 17 00:00:00 2001 From: Andrew Jackson Date: Fri, 9 Aug 2024 15:28:55 +0100 Subject: [PATCH 0513/1635] Remove type checking of config entry in Mastodon (#123467) Remove type checking of configentry --- homeassistant/components/mastodon/__init__.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/homeassistant/components/mastodon/__init__.py b/homeassistant/components/mastodon/__init__.py index 0a7c93911b5..3c305ca655b 100644 --- a/homeassistant/components/mastodon/__init__.py +++ b/homeassistant/components/mastodon/__init__.py @@ -3,7 +3,6 @@ from __future__ import annotations from dataclasses import dataclass -from typing import TYPE_CHECKING from mastodon.Mastodon import Mastodon, MastodonError @@ -38,9 +37,6 @@ class MastodonData: type MastodonConfigEntry = ConfigEntry[MastodonData] -if TYPE_CHECKING: - from . import MastodonConfigEntry - async def async_setup_entry(hass: HomeAssistant, entry: MastodonConfigEntry) -> bool: """Set up Mastodon from a config entry.""" From 97410474f5514473cc07231dd287f758dc8fe044 Mon Sep 17 00:00:00 2001 From: puddly <32534428+puddly@users.noreply.github.com> Date: Fri, 9 Aug 2024 10:31:55 -0400 Subject: [PATCH 0514/1635] Bump ZHA library to 0.0.29 (#123464) * Bump zha to 0.0.29 * Pass the Core timezone to ZHA * Add a unit test --- homeassistant/components/zha/__init__.py | 19 ++++++++++++++++-- homeassistant/components/zha/helpers.py | 2 ++ homeassistant/components/zha/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/zha/test_init.py | 23 +++++++++++++++++++++- 6 files changed, 44 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/zha/__init__.py b/homeassistant/components/zha/__init__.py index fc573b19ab1..1897b741d87 100644 --- a/homeassistant/components/zha/__init__.py +++ b/homeassistant/components/zha/__init__.py @@ -2,6 +2,7 @@ import contextlib import logging +from zoneinfo import ZoneInfo import voluptuous as vol from zha.application.const import BAUD_RATES, RadioType @@ -12,8 +13,13 @@ from zigpy.config import CONF_DATABASE, CONF_DEVICE, CONF_DEVICE_PATH from zigpy.exceptions import NetworkSettingsInconsistent, TransientConnectionError from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_TYPE, EVENT_HOMEASSISTANT_STOP, Platform -from homeassistant.core import Event, HomeAssistant +from homeassistant.const import ( + CONF_TYPE, + EVENT_CORE_CONFIG_UPDATE, + EVENT_HOMEASSISTANT_STOP, + Platform, +) +from homeassistant.core import Event, HomeAssistant, callback from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady from homeassistant.helpers import device_registry as dr import homeassistant.helpers.config_validation as cv @@ -204,6 +210,15 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, async_shutdown) ) + @callback + def update_config(event: Event) -> None: + """Handle Core config update.""" + zha_gateway.config.local_timezone = ZoneInfo(hass.config.time_zone) + + config_entry.async_on_unload( + hass.bus.async_listen(EVENT_CORE_CONFIG_UPDATE, update_config) + ) + await ha_zha_data.gateway_proxy.async_initialize_devices_and_entities() await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) async_dispatcher_send(hass, SIGNAL_ADD_ENTITIES) diff --git a/homeassistant/components/zha/helpers.py b/homeassistant/components/zha/helpers.py index 0691e2429d1..35a794e8631 100644 --- a/homeassistant/components/zha/helpers.py +++ b/homeassistant/components/zha/helpers.py @@ -15,6 +15,7 @@ import re import time from types import MappingProxyType from typing import TYPE_CHECKING, Any, Concatenate, NamedTuple, ParamSpec, TypeVar, cast +from zoneinfo import ZoneInfo import voluptuous as vol from zha.application.const import ( @@ -1273,6 +1274,7 @@ def create_zha_config(hass: HomeAssistant, ha_zha_data: HAZHAData) -> ZHAData: quirks_configuration=quirks_config, device_overrides=overrides_config, ), + local_timezone=ZoneInfo(hass.config.time_zone), ) diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index 4a597b0233c..385b95c8058 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -21,7 +21,7 @@ "zha", "universal_silabs_flasher" ], - "requirements": ["universal-silabs-flasher==0.0.22", "zha==0.0.28"], + "requirements": ["universal-silabs-flasher==0.0.22", "zha==0.0.29"], "usb": [ { "vid": "10C4", diff --git a/requirements_all.txt b/requirements_all.txt index a4eba7932fe..21a3ad0e3f6 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2989,7 +2989,7 @@ zeroconf==0.132.2 zeversolar==0.3.1 # homeassistant.components.zha -zha==0.0.28 +zha==0.0.29 # homeassistant.components.zhong_hong zhong-hong-hvac==1.0.12 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5d8c6ffb9d9..c1254d775d1 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2366,7 +2366,7 @@ zeroconf==0.132.2 zeversolar==0.3.1 # homeassistant.components.zha -zha==0.0.28 +zha==0.0.29 # homeassistant.components.zwave_js zwave-js-server-python==0.57.0 diff --git a/tests/components/zha/test_init.py b/tests/components/zha/test_init.py index aa68d688799..00fc3afd0ea 100644 --- a/tests/components/zha/test_init.py +++ b/tests/components/zha/test_init.py @@ -3,6 +3,7 @@ import asyncio import typing from unittest.mock import AsyncMock, Mock, patch +import zoneinfo import pytest from zigpy.application import ControllerApplication @@ -16,7 +17,7 @@ from homeassistant.components.zha.const import ( CONF_USB_PATH, DOMAIN, ) -from homeassistant.components.zha.helpers import get_zha_data +from homeassistant.components.zha.helpers import get_zha_data, get_zha_gateway from homeassistant.const import ( EVENT_HOMEASSISTANT_STOP, MAJOR_VERSION, @@ -288,3 +289,23 @@ async def test_shutdown_on_ha_stop( await hass.async_block_till_done() assert len(mock_shutdown.mock_calls) == 1 + + +async def test_timezone_update( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_zigpy_connect: ControllerApplication, +) -> None: + """Test that the ZHA gateway timezone is updated when HA timezone changes.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + gateway = get_zha_gateway(hass) + + assert hass.config.time_zone == "US/Pacific" + assert gateway.config.local_timezone == zoneinfo.ZoneInfo("US/Pacific") + + await hass.config.async_update(time_zone="America/New_York") + + assert hass.config.time_zone == "America/New_York" + assert gateway.config.local_timezone == zoneinfo.ZoneInfo("America/New_York") From 228db1c063c3c3e678df83a0e15953ece952f9db Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Fri, 9 Aug 2024 17:18:42 +0200 Subject: [PATCH 0515/1635] Support action YAML syntax in old-style notify groups (#123457) --- homeassistant/components/group/notify.py | 33 ++++++++++++++++++-- tests/components/group/test_notify.py | 39 ++++++++++++++++++++++-- 2 files changed, 67 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/group/notify.py b/homeassistant/components/group/notify.py index 8294b55be5e..ecbfec0bdb8 100644 --- a/homeassistant/components/group/notify.py +++ b/homeassistant/components/group/notify.py @@ -22,8 +22,9 @@ from homeassistant.components.notify import ( from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( ATTR_ENTITY_ID, - ATTR_SERVICE, + CONF_ACTION, CONF_ENTITIES, + CONF_SERVICE, STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant, callback @@ -36,11 +37,37 @@ from .entity import GroupEntity CONF_SERVICES = "services" + +def _backward_compat_schema(value: Any | None) -> Any: + """Backward compatibility for notify service schemas.""" + + if not isinstance(value, dict): + return value + + # `service` has been renamed to `action` + if CONF_SERVICE in value: + if CONF_ACTION in value: + raise vol.Invalid( + "Cannot specify both 'service' and 'action'. Please use 'action' only." + ) + value[CONF_ACTION] = value.pop(CONF_SERVICE) + + return value + + PLATFORM_SCHEMA = NOTIFY_PLATFORM_SCHEMA.extend( { vol.Required(CONF_SERVICES): vol.All( cv.ensure_list, - [{vol.Required(ATTR_SERVICE): cv.slug, vol.Optional(ATTR_DATA): dict}], + [ + vol.All( + _backward_compat_schema, + { + vol.Required(CONF_ACTION): cv.slug, + vol.Optional(ATTR_DATA): dict, + }, + ) + ], ) } ) @@ -88,7 +115,7 @@ class GroupNotifyPlatform(BaseNotificationService): tasks.append( asyncio.create_task( self.hass.services.async_call( - DOMAIN, entity[ATTR_SERVICE], sending_payload, blocking=True + DOMAIN, entity[CONF_ACTION], sending_payload, blocking=True ) ) ) diff --git a/tests/components/group/test_notify.py b/tests/components/group/test_notify.py index 2595b211dae..bbf2d98b492 100644 --- a/tests/components/group/test_notify.py +++ b/tests/components/group/test_notify.py @@ -122,7 +122,7 @@ async def test_send_message_with_data(hass: HomeAssistant, tmp_path: Path) -> No "services": [ {"service": "test_service1"}, { - "service": "test_service2", + "action": "test_service2", "data": { "target": "unnamed device", "data": {"test": "message", "default": "default"}, @@ -202,6 +202,41 @@ async def test_send_message_with_data(hass: HomeAssistant, tmp_path: Path) -> No ) +async def test_invalid_configuration( + hass: HomeAssistant, tmp_path: Path, caplog: pytest.LogCaptureFixture +) -> None: + """Test failing to set up group with an invalid configuration.""" + assert await async_setup_component( + hass, + "group", + {}, + ) + await hass.async_block_till_done() + + group_setup = [ + { + "platform": "group", + "name": "My invalid notification group", + "services": [ + { + "service": "test_service1", + "action": "test_service2", + "data": { + "target": "unnamed device", + "data": {"test": "message", "default": "default"}, + }, + }, + ], + } + ] + await help_setup_notify(hass, tmp_path, {"service1": 1, "service2": 2}, group_setup) + assert not hass.services.has_service("notify", "my_invalid_notification_group") + assert ( + "Invalid config for 'notify' from integration 'group':" + " Cannot specify both 'service' and 'action'." in caplog.text + ) + + async def test_reload_notify(hass: HomeAssistant, tmp_path: Path) -> None: """Verify we can reload the notify service.""" assert await async_setup_component( @@ -219,7 +254,7 @@ async def test_reload_notify(hass: HomeAssistant, tmp_path: Path) -> None: { "name": "group_notify", "platform": "group", - "services": [{"service": "test_service1"}], + "services": [{"action": "test_service1"}], } ], ) From 85cbc2437c78dce62f92c8d7bb1b6ea2fe01c79a Mon Sep 17 00:00:00 2001 From: David Knowles Date: Fri, 9 Aug 2024 11:25:25 -0400 Subject: [PATCH 0516/1635] Bump pydrawise to 2024.8.0 (#123461) --- homeassistant/components/hydrawise/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/hydrawise/manifest.json b/homeassistant/components/hydrawise/manifest.json index c6f4d7d8dcd..9b733cb73d0 100644 --- a/homeassistant/components/hydrawise/manifest.json +++ b/homeassistant/components/hydrawise/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/hydrawise", "iot_class": "cloud_polling", "loggers": ["pydrawise"], - "requirements": ["pydrawise==2024.6.4"] + "requirements": ["pydrawise==2024.8.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 21a3ad0e3f6..25adc9fade9 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1810,7 +1810,7 @@ pydiscovergy==3.0.1 pydoods==1.0.2 # homeassistant.components.hydrawise -pydrawise==2024.6.4 +pydrawise==2024.8.0 # homeassistant.components.android_ip_webcam pydroid-ipcam==2.0.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c1254d775d1..a51ec2d936a 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1451,7 +1451,7 @@ pydexcom==0.2.3 pydiscovergy==3.0.1 # homeassistant.components.hydrawise -pydrawise==2024.6.4 +pydrawise==2024.8.0 # homeassistant.components.android_ip_webcam pydroid-ipcam==2.0.0 From 572293fb8b7ae229be1c6c64f057c2819cd93bc2 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 9 Aug 2024 10:27:16 -0500 Subject: [PATCH 0517/1635] Bump uiprotect to 6.0.0 (#123402) --- homeassistant/components/unifiprotect/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/unifiprotect/manifest.json b/homeassistant/components/unifiprotect/manifest.json index afc4b9a06e6..2843faa52bc 100644 --- a/homeassistant/components/unifiprotect/manifest.json +++ b/homeassistant/components/unifiprotect/manifest.json @@ -40,7 +40,7 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["uiprotect", "unifi_discovery"], - "requirements": ["uiprotect==5.4.0", "unifi-discovery==1.2.0"], + "requirements": ["uiprotect==6.0.0", "unifi-discovery==1.2.0"], "ssdp": [ { "manufacturer": "Ubiquiti Networks", diff --git a/requirements_all.txt b/requirements_all.txt index 25adc9fade9..96da7411d27 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2816,7 +2816,7 @@ twitchAPI==4.2.1 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==5.4.0 +uiprotect==6.0.0 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a51ec2d936a..bb44cfdeb79 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2220,7 +2220,7 @@ twitchAPI==4.2.1 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==5.4.0 +uiprotect==6.0.0 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 From 6e1978971a7d9a2765e2d474b5cadf810fc08e27 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 9 Aug 2024 10:33:48 -0500 Subject: [PATCH 0518/1635] Bump PyYAML to 6.0.2 (#123466) --- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 7c0255b9eae..7a44cf27677 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -52,7 +52,7 @@ pyserial==3.5 python-slugify==8.0.4 PyTurboJPEG==1.7.1 pyudev==0.24.1 -PyYAML==6.0.1 +PyYAML==6.0.2 requests==2.32.3 SQLAlchemy==2.0.31 typing-extensions>=4.12.2,<5.0 diff --git a/pyproject.toml b/pyproject.toml index 07af385dce4..a0012520758 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,7 +57,7 @@ dependencies = [ "pip>=21.3.1", "psutil-home-assistant==0.0.1", "python-slugify==8.0.4", - "PyYAML==6.0.1", + "PyYAML==6.0.2", "requests==2.32.3", "SQLAlchemy==2.0.31", "typing-extensions>=4.12.2,<5.0", diff --git a/requirements.txt b/requirements.txt index 40f2165a61a..a98b174be1e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -32,7 +32,7 @@ packaging>=23.1 pip>=21.3.1 psutil-home-assistant==0.0.1 python-slugify==8.0.4 -PyYAML==6.0.1 +PyYAML==6.0.2 requests==2.32.3 SQLAlchemy==2.0.31 typing-extensions>=4.12.2,<5.0 From b445517244f06c5589ea22f38c831cfbd9cc1fc6 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 9 Aug 2024 10:34:21 -0500 Subject: [PATCH 0519/1635] Bump orjson to 3.10.7 (#123465) --- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 7a44cf27677..0eedc043527 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -38,7 +38,7 @@ ifaddr==0.2.0 Jinja2==3.1.4 lru-dict==1.3.0 mutagen==1.47.0 -orjson==3.10.6 +orjson==3.10.7 packaging>=23.1 paho-mqtt==1.6.1 Pillow==10.4.0 diff --git a/pyproject.toml b/pyproject.toml index a0012520758..cb928c736a2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,7 +52,7 @@ dependencies = [ "cryptography==43.0.0", "Pillow==10.4.0", "pyOpenSSL==24.2.1", - "orjson==3.10.6", + "orjson==3.10.7", "packaging>=23.1", "pip>=21.3.1", "psutil-home-assistant==0.0.1", diff --git a/requirements.txt b/requirements.txt index a98b174be1e..7efddf5c765 100644 --- a/requirements.txt +++ b/requirements.txt @@ -27,7 +27,7 @@ PyJWT==2.9.0 cryptography==43.0.0 Pillow==10.4.0 pyOpenSSL==24.2.1 -orjson==3.10.6 +orjson==3.10.7 packaging>=23.1 pip>=21.3.1 psutil-home-assistant==0.0.1 From 618efdb3267e5f221828e6bc4810edd9de7f49e7 Mon Sep 17 00:00:00 2001 From: yangqian <5144644+yangqian@users.noreply.github.com> Date: Sat, 10 Aug 2024 00:25:53 +0800 Subject: [PATCH 0520/1635] Bump chacha20poly1305-reuseable to 0.13.2 (#123471) Co-authored-by: J. Nick Koston --- homeassistant/components/homekit/manifest.json | 1 + requirements_all.txt | 3 +++ requirements_test_all.txt | 3 +++ 3 files changed, 7 insertions(+) diff --git a/homeassistant/components/homekit/manifest.json b/homeassistant/components/homekit/manifest.json index 17d1237e579..8bbf6b677a9 100644 --- a/homeassistant/components/homekit/manifest.json +++ b/homeassistant/components/homekit/manifest.json @@ -10,6 +10,7 @@ "loggers": ["pyhap"], "requirements": [ "HAP-python==4.9.1", + "chacha20poly1305-reuseable==0.13.2", "fnv-hash-fast==0.5.0", "PyQRCode==1.2.1", "base36==0.1.1" diff --git a/requirements_all.txt b/requirements_all.txt index 96da7411d27..f064223e0a5 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -651,6 +651,9 @@ cached_ipaddress==0.3.0 # homeassistant.components.caldav caldav==1.3.9 +# homeassistant.components.homekit +chacha20poly1305-reuseable==0.13.2 + # homeassistant.components.cisco_mobility_express ciscomobilityexpress==0.3.9 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index bb44cfdeb79..bd916af36a7 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -562,6 +562,9 @@ cached_ipaddress==0.3.0 # homeassistant.components.caldav caldav==1.3.9 +# homeassistant.components.homekit +chacha20poly1305-reuseable==0.13.2 + # homeassistant.components.coinbase coinbase-advanced-py==1.2.2 From acda7bc5c49269c4e530808f75012e4c9e3861c1 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 9 Aug 2024 11:50:05 -0500 Subject: [PATCH 0521/1635] Bump uiprotect to 6.0.1 (#123481) --- homeassistant/components/unifiprotect/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/unifiprotect/manifest.json b/homeassistant/components/unifiprotect/manifest.json index 2843faa52bc..93536d1ad1b 100644 --- a/homeassistant/components/unifiprotect/manifest.json +++ b/homeassistant/components/unifiprotect/manifest.json @@ -40,7 +40,7 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["uiprotect", "unifi_discovery"], - "requirements": ["uiprotect==6.0.0", "unifi-discovery==1.2.0"], + "requirements": ["uiprotect==6.0.1", "unifi-discovery==1.2.0"], "ssdp": [ { "manufacturer": "Ubiquiti Networks", diff --git a/requirements_all.txt b/requirements_all.txt index f064223e0a5..51710d80f8c 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2819,7 +2819,7 @@ twitchAPI==4.2.1 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==6.0.0 +uiprotect==6.0.1 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index bd916af36a7..bf827fd9fad 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2223,7 +2223,7 @@ twitchAPI==4.2.1 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==6.0.0 +uiprotect==6.0.1 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 From 1ad1a2d51e846c8d2509bce030f8f1b13b216f11 Mon Sep 17 00:00:00 2001 From: Steve Easley Date: Fri, 9 Aug 2024 12:51:50 -0400 Subject: [PATCH 0522/1635] Bump pyjvcprojector to 1.0.12 to fix blocking call (#123473) --- homeassistant/components/jvc_projector/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/jvc_projector/manifest.json b/homeassistant/components/jvc_projector/manifest.json index d3e1bf3d940..5d83e937494 100644 --- a/homeassistant/components/jvc_projector/manifest.json +++ b/homeassistant/components/jvc_projector/manifest.json @@ -7,5 +7,5 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["jvcprojector"], - "requirements": ["pyjvcprojector==1.0.11"] + "requirements": ["pyjvcprojector==1.0.12"] } diff --git a/requirements_all.txt b/requirements_all.txt index 51710d80f8c..e39f34e2c81 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1948,7 +1948,7 @@ pyisy==3.1.14 pyitachip2ir==0.0.7 # homeassistant.components.jvc_projector -pyjvcprojector==1.0.11 +pyjvcprojector==1.0.12 # homeassistant.components.kaleidescape pykaleidescape==1.0.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index bf827fd9fad..f5d00968f1c 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1556,7 +1556,7 @@ pyiss==1.0.1 pyisy==3.1.14 # homeassistant.components.jvc_projector -pyjvcprojector==1.0.11 +pyjvcprojector==1.0.12 # homeassistant.components.kaleidescape pykaleidescape==1.0.1 From 8e34a0d3c71c54db163913b4bad20b39889c30fe Mon Sep 17 00:00:00 2001 From: Jake Martin Date: Fri, 9 Aug 2024 17:52:07 +0100 Subject: [PATCH 0523/1635] Bump monzopy to 1.3.2 (#123480) --- homeassistant/components/monzo/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/monzo/manifest.json b/homeassistant/components/monzo/manifest.json index 8b816457004..d9d17eb8abc 100644 --- a/homeassistant/components/monzo/manifest.json +++ b/homeassistant/components/monzo/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["application_credentials"], "documentation": "https://www.home-assistant.io/integrations/monzo", "iot_class": "cloud_polling", - "requirements": ["monzopy==1.3.0"] + "requirements": ["monzopy==1.3.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index e39f34e2c81..ddae9993474 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1357,7 +1357,7 @@ moat-ble==0.1.1 moehlenhoff-alpha2==1.3.1 # homeassistant.components.monzo -monzopy==1.3.0 +monzopy==1.3.2 # homeassistant.components.mopeka mopeka-iot-ble==0.8.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f5d00968f1c..e20809eab88 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1126,7 +1126,7 @@ moat-ble==0.1.1 moehlenhoff-alpha2==1.3.1 # homeassistant.components.monzo -monzopy==1.3.0 +monzopy==1.3.2 # homeassistant.components.mopeka mopeka-iot-ble==0.8.0 From 57da71c537ee4d08b8089cca87e96a730dec2421 Mon Sep 17 00:00:00 2001 From: YogevBokobza Date: Fri, 9 Aug 2024 20:04:11 +0300 Subject: [PATCH 0524/1635] Bump aioswitcher to 4.0.0 (#123260) * Bump aioswitcher to 4.0.0 * switcher fix version * swithcer fix test * switcher fix tests --- homeassistant/components/switcher_kis/button.py | 1 + homeassistant/components/switcher_kis/climate.py | 1 + homeassistant/components/switcher_kis/cover.py | 3 ++- homeassistant/components/switcher_kis/manifest.json | 2 +- homeassistant/components/switcher_kis/switch.py | 1 + requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/switcher_kis/consts.py | 8 ++++++++ tests/components/switcher_kis/test_cover.py | 2 +- tests/components/switcher_kis/test_diagnostics.py | 3 ++- 10 files changed, 19 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/switcher_kis/button.py b/homeassistant/components/switcher_kis/button.py index b770c48c11c..2e559ba9f3b 100644 --- a/homeassistant/components/switcher_kis/button.py +++ b/homeassistant/components/switcher_kis/button.py @@ -137,6 +137,7 @@ class SwitcherThermostatButtonEntity( try: async with SwitcherType2Api( + self.coordinator.data.device_type, self.coordinator.data.ip_address, self.coordinator.data.device_id, self.coordinator.data.device_key, diff --git a/homeassistant/components/switcher_kis/climate.py b/homeassistant/components/switcher_kis/climate.py index e6267e15305..511630251f2 100644 --- a/homeassistant/components/switcher_kis/climate.py +++ b/homeassistant/components/switcher_kis/climate.py @@ -169,6 +169,7 @@ class SwitcherClimateEntity( try: async with SwitcherType2Api( + self.coordinator.data.device_type, self.coordinator.data.ip_address, self.coordinator.data.device_id, self.coordinator.data.device_key, diff --git a/homeassistant/components/switcher_kis/cover.py b/homeassistant/components/switcher_kis/cover.py index 258af3e1d5e..19c40d05e63 100644 --- a/homeassistant/components/switcher_kis/cover.py +++ b/homeassistant/components/switcher_kis/cover.py @@ -29,7 +29,7 @@ from .coordinator import SwitcherDataUpdateCoordinator _LOGGER = logging.getLogger(__name__) API_SET_POSITON = "set_position" -API_STOP = "stop" +API_STOP = "stop_shutter" async def async_setup_entry( @@ -98,6 +98,7 @@ class SwitcherCoverEntity( try: async with SwitcherType2Api( + self.coordinator.data.device_type, self.coordinator.data.ip_address, self.coordinator.data.device_id, self.coordinator.data.device_key, diff --git a/homeassistant/components/switcher_kis/manifest.json b/homeassistant/components/switcher_kis/manifest.json index 52b218fce9c..6aedd2f5670 100644 --- a/homeassistant/components/switcher_kis/manifest.json +++ b/homeassistant/components/switcher_kis/manifest.json @@ -7,6 +7,6 @@ "iot_class": "local_push", "loggers": ["aioswitcher"], "quality_scale": "platinum", - "requirements": ["aioswitcher==3.4.3"], + "requirements": ["aioswitcher==4.0.0"], "single_config_entry": true } diff --git a/homeassistant/components/switcher_kis/switch.py b/homeassistant/components/switcher_kis/switch.py index aac5da10ae1..c667a6dd473 100644 --- a/homeassistant/components/switcher_kis/switch.py +++ b/homeassistant/components/switcher_kis/switch.py @@ -117,6 +117,7 @@ class SwitcherBaseSwitchEntity( try: async with SwitcherType1Api( + self.coordinator.data.device_type, self.coordinator.data.ip_address, self.coordinator.data.device_id, self.coordinator.data.device_key, diff --git a/requirements_all.txt b/requirements_all.txt index ddae9993474..9258a46aa23 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -374,7 +374,7 @@ aiosolaredge==0.2.0 aiosteamist==1.0.0 # homeassistant.components.switcher_kis -aioswitcher==3.4.3 +aioswitcher==4.0.0 # homeassistant.components.syncthing aiosyncthing==0.5.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index e20809eab88..fabd949fff5 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -356,7 +356,7 @@ aiosolaredge==0.2.0 aiosteamist==1.0.0 # homeassistant.components.switcher_kis -aioswitcher==3.4.3 +aioswitcher==4.0.0 # homeassistant.components.syncthing aiosyncthing==0.5.1 diff --git a/tests/components/switcher_kis/consts.py b/tests/components/switcher_kis/consts.py index 3c5f3ff241e..ffeef64b5d7 100644 --- a/tests/components/switcher_kis/consts.py +++ b/tests/components/switcher_kis/consts.py @@ -38,6 +38,10 @@ DUMMY_MAC_ADDRESS1 = "A1:B2:C3:45:67:D8" DUMMY_MAC_ADDRESS2 = "A1:B2:C3:45:67:D9" DUMMY_MAC_ADDRESS3 = "A1:B2:C3:45:67:DA" DUMMY_MAC_ADDRESS4 = "A1:B2:C3:45:67:DB" +DUMMY_TOKEN_NEEDED1 = False +DUMMY_TOKEN_NEEDED2 = False +DUMMY_TOKEN_NEEDED3 = False +DUMMY_TOKEN_NEEDED4 = False DUMMY_PHONE_ID = "1234" DUMMY_POWER_CONSUMPTION1 = 100 DUMMY_POWER_CONSUMPTION2 = 2780 @@ -60,6 +64,7 @@ DUMMY_PLUG_DEVICE = SwitcherPowerPlug( DUMMY_IP_ADDRESS1, DUMMY_MAC_ADDRESS1, DUMMY_DEVICE_NAME1, + DUMMY_TOKEN_NEEDED1, DUMMY_POWER_CONSUMPTION1, DUMMY_ELECTRIC_CURRENT1, ) @@ -72,6 +77,7 @@ DUMMY_WATER_HEATER_DEVICE = SwitcherWaterHeater( DUMMY_IP_ADDRESS2, DUMMY_MAC_ADDRESS2, DUMMY_DEVICE_NAME2, + DUMMY_TOKEN_NEEDED2, DUMMY_POWER_CONSUMPTION2, DUMMY_ELECTRIC_CURRENT2, DUMMY_REMAINING_TIME, @@ -86,6 +92,7 @@ DUMMY_SHUTTER_DEVICE = SwitcherShutter( DUMMY_IP_ADDRESS4, DUMMY_MAC_ADDRESS4, DUMMY_DEVICE_NAME4, + DUMMY_TOKEN_NEEDED4, DUMMY_POSITION, DUMMY_DIRECTION, ) @@ -98,6 +105,7 @@ DUMMY_THERMOSTAT_DEVICE = SwitcherThermostat( DUMMY_IP_ADDRESS3, DUMMY_MAC_ADDRESS3, DUMMY_DEVICE_NAME3, + DUMMY_TOKEN_NEEDED3, DUMMY_THERMOSTAT_MODE, DUMMY_TEMPERATURE, DUMMY_TARGET_TEMPERATURE, diff --git a/tests/components/switcher_kis/test_cover.py b/tests/components/switcher_kis/test_cover.py index 57e2f98915e..c228da6b556 100644 --- a/tests/components/switcher_kis/test_cover.py +++ b/tests/components/switcher_kis/test_cover.py @@ -105,7 +105,7 @@ async def test_cover( # Test stop with patch( - "homeassistant.components.switcher_kis.cover.SwitcherType2Api.stop" + "homeassistant.components.switcher_kis.cover.SwitcherType2Api.stop_shutter" ) as mock_control_device: await hass.services.async_call( COVER_DOMAIN, diff --git a/tests/components/switcher_kis/test_diagnostics.py b/tests/components/switcher_kis/test_diagnostics.py index c8df4dd0b83..89bcefa5138 100644 --- a/tests/components/switcher_kis/test_diagnostics.py +++ b/tests/components/switcher_kis/test_diagnostics.py @@ -40,7 +40,7 @@ async def test_diagnostics( "__type": "", "repr": ( ")>" + "1, , False)>" ), }, "electric_current": 12.8, @@ -50,6 +50,7 @@ async def test_diagnostics( "name": "Heater FE12", "power_consumption": 2780, "remaining_time": "01:29:32", + "token_needed": False, } ], "entry": { From 86c4ded4cde544b6345c060589c3184b6245f2d7 Mon Sep 17 00:00:00 2001 From: Louis Christ Date: Fri, 9 Aug 2024 19:36:58 +0200 Subject: [PATCH 0525/1635] Fix startup blocked by bluesound integration (#123483) --- homeassistant/components/bluesound/media_player.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/bluesound/media_player.py b/homeassistant/components/bluesound/media_player.py index dc09feaed63..c1b662fcddc 100644 --- a/homeassistant/components/bluesound/media_player.py +++ b/homeassistant/components/bluesound/media_player.py @@ -317,21 +317,24 @@ class BluesoundPlayer(MediaPlayerEntity): await self.async_update_status() except (TimeoutError, ClientError): - _LOGGER.error("Node %s:%s is offline, retrying later", self.name, self.port) + _LOGGER.error("Node %s:%s is offline, retrying later", self.host, self.port) await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT) self.start_polling() except CancelledError: - _LOGGER.debug("Stopping the polling of node %s:%s", self.name, self.port) + _LOGGER.debug("Stopping the polling of node %s:%s", self.host, self.port) except Exception: - _LOGGER.exception("Unexpected error in %s:%s", self.name, self.port) + _LOGGER.exception("Unexpected error in %s:%s", self.host, self.port) raise async def async_added_to_hass(self) -> None: """Start the polling task.""" await super().async_added_to_hass() - self._polling_task = self.hass.async_create_task(self._start_poll_command()) + self._polling_task = self.hass.async_create_background_task( + self._start_poll_command(), + name=f"bluesound.polling_{self.host}:{self.port}", + ) async def async_will_remove_from_hass(self) -> None: """Stop the polling task.""" From 65f33f58e9e88d04a82a879333128d85bfdd76c5 Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Fri, 9 Aug 2024 20:22:16 +0200 Subject: [PATCH 0526/1635] Bump motionblinds to 0.6.24 (#123395) --- homeassistant/components/motion_blinds/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/motion_blinds/manifest.json b/homeassistant/components/motion_blinds/manifest.json index 0f9241db7b4..e1e12cf6729 100644 --- a/homeassistant/components/motion_blinds/manifest.json +++ b/homeassistant/components/motion_blinds/manifest.json @@ -21,5 +21,5 @@ "documentation": "https://www.home-assistant.io/integrations/motion_blinds", "iot_class": "local_push", "loggers": ["motionblinds"], - "requirements": ["motionblinds==0.6.23"] + "requirements": ["motionblinds==0.6.24"] } diff --git a/requirements_all.txt b/requirements_all.txt index 9258a46aa23..0b760d074e0 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1363,7 +1363,7 @@ monzopy==1.3.2 mopeka-iot-ble==0.8.0 # homeassistant.components.motion_blinds -motionblinds==0.6.23 +motionblinds==0.6.24 # homeassistant.components.motionblinds_ble motionblindsble==0.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index fabd949fff5..6ee73516726 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1132,7 +1132,7 @@ monzopy==1.3.2 mopeka-iot-ble==0.8.0 # homeassistant.components.motion_blinds -motionblinds==0.6.23 +motionblinds==0.6.24 # homeassistant.components.motionblinds_ble motionblindsble==0.1.0 From ac28d34ad5a68e9a0c485c80f44c0319ee1b45ca Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Fri, 9 Aug 2024 20:23:00 +0200 Subject: [PATCH 0527/1635] Improve test coverage for AVM Fritz!Smarthome (#122974) --- homeassistant/components/fritzbox/light.py | 5 +--- tests/components/fritzbox/__init__.py | 13 +++++++++ tests/components/fritzbox/test_climate.py | 19 +++++++++++- tests/components/fritzbox/test_cover.py | 28 ++++++++++++++++-- tests/components/fritzbox/test_init.py | 30 +++++++++++++++++++ tests/components/fritzbox/test_light.py | 34 +++++++++++++++++----- 6 files changed, 115 insertions(+), 14 deletions(-) diff --git a/homeassistant/components/fritzbox/light.py b/homeassistant/components/fritzbox/light.py index 689e64c709a..65446dc3e04 100644 --- a/homeassistant/components/fritzbox/light.py +++ b/homeassistant/components/fritzbox/light.py @@ -72,11 +72,8 @@ class FritzboxLight(FritzBoxDeviceEntity, LightEntity): return self.data.level # type: ignore [no-any-return] @property - def hs_color(self) -> tuple[float, float] | None: + def hs_color(self) -> tuple[float, float]: """Return the hs color value.""" - if self.data.color_mode != COLOR_MODE: - return None - hue = self.data.hue saturation = self.data.saturation diff --git a/tests/components/fritzbox/__init__.py b/tests/components/fritzbox/__init__.py index 61312805e91..09e0aeaee51 100644 --- a/tests/components/fritzbox/__init__.py +++ b/tests/components/fritzbox/__init__.py @@ -115,6 +115,13 @@ class FritzDeviceClimateMock(FritzEntityBaseMock): scheduled_preset = PRESET_ECO +class FritzDeviceClimateWithoutTempSensorMock(FritzDeviceClimateMock): + """Mock of a AVM Fritz!Box climate device without exposing temperature sensor.""" + + temperature = None + has_temperature_sensor = False + + class FritzDeviceSensorMock(FritzEntityBaseMock): """Mock of a AVM Fritz!Box sensor device.""" @@ -187,3 +194,9 @@ class FritzDeviceCoverMock(FritzEntityBaseMock): has_thermostat = False has_blind = True levelpercentage = 0 + + +class FritzDeviceCoverUnknownPositionMock(FritzDeviceCoverMock): + """Mock of a AVM Fritz!Box cover device with unknown position.""" + + levelpercentage = None diff --git a/tests/components/fritzbox/test_climate.py b/tests/components/fritzbox/test_climate.py index 853c09c534b..358eeaa714e 100644 --- a/tests/components/fritzbox/test_climate.py +++ b/tests/components/fritzbox/test_climate.py @@ -46,7 +46,12 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError import homeassistant.util.dt as dt_util -from . import FritzDeviceClimateMock, set_devices, setup_config_entry +from . import ( + FritzDeviceClimateMock, + FritzDeviceClimateWithoutTempSensorMock, + set_devices, + setup_config_entry, +) from .const import CONF_FAKE_NAME, MOCK_CONFIG from tests.common import async_fire_time_changed @@ -162,6 +167,18 @@ async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: assert state.state == PRESET_COMFORT +async def test_hkr_wo_temperature_sensor(hass: HomeAssistant, fritz: Mock) -> None: + """Test hkr without exposing dedicated temperature sensor data block.""" + device = FritzDeviceClimateWithoutTempSensorMock() + assert await setup_config_entry( + hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz + ) + + state = hass.states.get(ENTITY_ID) + assert state + assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 18.0 + + async def test_target_temperature_on(hass: HomeAssistant, fritz: Mock) -> None: """Test turn device on.""" device = FritzDeviceClimateMock() diff --git a/tests/components/fritzbox/test_cover.py b/tests/components/fritzbox/test_cover.py index 6c301fc8f46..6626db2bccf 100644 --- a/tests/components/fritzbox/test_cover.py +++ b/tests/components/fritzbox/test_cover.py @@ -3,7 +3,12 @@ from datetime import timedelta from unittest.mock import Mock, call -from homeassistant.components.cover import ATTR_CURRENT_POSITION, ATTR_POSITION, DOMAIN +from homeassistant.components.cover import ( + ATTR_CURRENT_POSITION, + ATTR_POSITION, + DOMAIN, + STATE_OPEN, +) from homeassistant.components.fritzbox.const import DOMAIN as FB_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, @@ -12,11 +17,17 @@ from homeassistant.const import ( SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, SERVICE_STOP_COVER, + STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant import homeassistant.util.dt as dt_util -from . import FritzDeviceCoverMock, set_devices, setup_config_entry +from . import ( + FritzDeviceCoverMock, + FritzDeviceCoverUnknownPositionMock, + set_devices, + setup_config_entry, +) from .const import CONF_FAKE_NAME, MOCK_CONFIG from tests.common import async_fire_time_changed @@ -33,9 +44,22 @@ async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: state = hass.states.get(ENTITY_ID) assert state + assert state.state == STATE_OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 100 +async def test_unknown_position(hass: HomeAssistant, fritz: Mock) -> None: + """Test cover with unknown position.""" + device = FritzDeviceCoverUnknownPositionMock() + assert await setup_config_entry( + hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz + ) + + state = hass.states.get(ENTITY_ID) + assert state + assert state.state == STATE_UNKNOWN + + async def test_open_cover(hass: HomeAssistant, fritz: Mock) -> None: """Test opening the cover.""" device = FritzDeviceCoverMock() diff --git a/tests/components/fritzbox/test_init.py b/tests/components/fritzbox/test_init.py index c84498b1560..56e3e7a5738 100644 --- a/tests/components/fritzbox/test_init.py +++ b/tests/components/fritzbox/test_init.py @@ -18,6 +18,7 @@ from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, CONF_USERNAME, + EVENT_HOMEASSISTANT_STOP, STATE_UNAVAILABLE, UnitOfTemperature, ) @@ -199,6 +200,35 @@ async def test_unload_remove(hass: HomeAssistant, fritz: Mock) -> None: assert state is None +async def test_logout_on_stop(hass: HomeAssistant, fritz: Mock) -> None: + """Test we log out from fritzbox when Home Assistants stops.""" + fritz().get_devices.return_value = [FritzDeviceSwitchMock()] + entity_id = f"{SWITCH_DOMAIN}.{CONF_FAKE_NAME}" + + entry = MockConfigEntry( + domain=FB_DOMAIN, + data=MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], + unique_id=entity_id, + ) + entry.add_to_hass(hass) + + config_entries = hass.config_entries.async_entries(FB_DOMAIN) + assert len(config_entries) == 1 + assert entry is config_entries[0] + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.state is ConfigEntryState.LOADED + state = hass.states.get(entity_id) + assert state + + hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) + await hass.async_block_till_done() + + assert fritz().logout.call_count == 1 + + async def test_remove_device( hass: HomeAssistant, device_registry: dr.DeviceRegistry, diff --git a/tests/components/fritzbox/test_light.py b/tests/components/fritzbox/test_light.py index 45920c7c3ee..3cafa933fa3 100644 --- a/tests/components/fritzbox/test_light.py +++ b/tests/components/fritzbox/test_light.py @@ -3,6 +3,7 @@ from datetime import timedelta from unittest.mock import Mock, call +import pytest from requests.exceptions import HTTPError from homeassistant.components.fritzbox.const import ( @@ -12,12 +13,14 @@ from homeassistant.components.fritzbox.const import ( ) from homeassistant.components.light import ( ATTR_BRIGHTNESS, + ATTR_COLOR_MODE, ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ATTR_MAX_COLOR_TEMP_KELVIN, ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_SUPPORTED_COLOR_MODES, DOMAIN, + ColorMode, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -56,9 +59,11 @@ async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: assert state assert state.state == STATE_ON assert state.attributes[ATTR_FRIENDLY_NAME] == "fake_name" + assert state.attributes[ATTR_COLOR_MODE] == ColorMode.COLOR_TEMP assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 2700 assert state.attributes[ATTR_MIN_COLOR_TEMP_KELVIN] == 2700 assert state.attributes[ATTR_MAX_COLOR_TEMP_KELVIN] == 6500 + assert state.attributes[ATTR_HS_COLOR] == (28.395, 65.723) assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == ["color_temp", "hs"] @@ -99,6 +104,9 @@ async def test_setup_non_color_non_level(hass: HomeAssistant, fritz: Mock) -> No assert state.attributes[ATTR_FRIENDLY_NAME] == "fake_name" assert ATTR_BRIGHTNESS not in state.attributes assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == ["onoff"] + assert state.attributes[ATTR_COLOR_MODE] == ColorMode.ONOFF + assert state.attributes.get(ATTR_COLOR_TEMP_KELVIN) is None + assert state.attributes.get(ATTR_HS_COLOR) is None async def test_setup_color(hass: HomeAssistant, fritz: Mock) -> None: @@ -120,6 +128,8 @@ async def test_setup_color(hass: HomeAssistant, fritz: Mock) -> None: assert state assert state.state == STATE_ON assert state.attributes[ATTR_FRIENDLY_NAME] == "fake_name" + assert state.attributes[ATTR_COLOR_MODE] == ColorMode.HS + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] is None assert state.attributes[ATTR_BRIGHTNESS] == 100 assert state.attributes[ATTR_HS_COLOR] == (100, 70) assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == ["color_temp", "hs"] @@ -183,16 +193,16 @@ async def test_turn_on_color_unsupported_api_method( device.get_colors.return_value = { "Red": [("100", "70", "10"), ("100", "50", "10"), ("100", "30", "10")] } - mockresponse = Mock() - mockresponse.status_code = 400 - - error = HTTPError("Bad Request") - error.response = mockresponse - device.set_unmapped_color.side_effect = error - assert await setup_config_entry( hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz ) + + # test fallback to `setcolor` + error = HTTPError("Bad Request") + error.response = Mock() + error.response.status_code = 400 + device.set_unmapped_color.side_effect = error + await hass.services.async_call( DOMAIN, SERVICE_TURN_ON, @@ -205,6 +215,16 @@ async def test_turn_on_color_unsupported_api_method( assert device.set_level.call_args_list == [call(100)] assert device.set_color.call_args_list == [call((100, 70))] + # test for unknown error + error.response.status_code = 500 + with pytest.raises(HTTPError, match="Bad Request"): + await hass.services.async_call( + DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_BRIGHTNESS: 100, ATTR_HS_COLOR: (100, 70)}, + True, + ) + async def test_turn_off(hass: HomeAssistant, fritz: Mock) -> None: """Test turn device off.""" From eb1c2f5d9fe8e1381904f7c45b68f8a146bb1b82 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Fri, 9 Aug 2024 20:30:39 +0200 Subject: [PATCH 0528/1635] Update frontend to 20240809.0 (#123485) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index de423ee9ac6..035b087e481 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240806.1"] + "requirements": ["home-assistant-frontend==20240809.0"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 0eedc043527..9a23a5e42e0 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -31,7 +31,7 @@ habluetooth==3.1.3 hass-nabucasa==0.81.1 hassil==1.7.4 home-assistant-bluetooth==1.12.2 -home-assistant-frontend==20240806.1 +home-assistant-frontend==20240809.0 home-assistant-intents==2024.8.7 httpx==0.27.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 0b760d074e0..a4a7b576215 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1099,7 +1099,7 @@ hole==0.8.0 holidays==0.53 # homeassistant.components.frontend -home-assistant-frontend==20240806.1 +home-assistant-frontend==20240809.0 # homeassistant.components.conversation home-assistant-intents==2024.8.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 6ee73516726..a991cb4cc2a 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -925,7 +925,7 @@ hole==0.8.0 holidays==0.53 # homeassistant.components.frontend -home-assistant-frontend==20240806.1 +home-assistant-frontend==20240809.0 # homeassistant.components.conversation home-assistant-intents==2024.8.7 From 2b95a642fc030157ec712044530acebd046d14d0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 9 Aug 2024 13:32:11 -0500 Subject: [PATCH 0529/1635] Remove august IPv6 workaround (#123408) --- homeassistant/components/august/util.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/homeassistant/components/august/util.py b/homeassistant/components/august/util.py index 47482100794..6972913ba22 100644 --- a/homeassistant/components/august/util.py +++ b/homeassistant/components/august/util.py @@ -4,7 +4,6 @@ from __future__ import annotations from datetime import datetime, timedelta from functools import partial -import socket import aiohttp from yalexs.activity import ACTION_DOORBELL_CALL_MISSED, Activity, ActivityType @@ -26,14 +25,7 @@ def async_create_august_clientsession(hass: HomeAssistant) -> aiohttp.ClientSess # Create an aiohttp session instead of using the default one since the # default one is likely to trigger august's WAF if another integration # is also using Cloudflare - # - # The family is set to AF_INET because IPv6 keeps coming up as an issue - # see https://github.com/home-assistant/core/issues/97146 - # - # When https://github.com/aio-libs/aiohttp/issues/4451 is implemented - # we can allow IPv6 again - # - return aiohttp_client.async_create_clientsession(hass, family=socket.AF_INET) + return aiohttp_client.async_create_clientsession(hass) def retrieve_time_based_activity( From ec9944b92ae41c503932bde0d40c3e8b6d6b9ed6 Mon Sep 17 00:00:00 2001 From: Brett Adams Date: Sat, 10 Aug 2024 04:33:13 +1000 Subject: [PATCH 0530/1635] Add missing logger to Tessie (#123413) --- homeassistant/components/tessie/manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/tessie/manifest.json b/homeassistant/components/tessie/manifest.json index 6059072c239..c921921a0ca 100644 --- a/homeassistant/components/tessie/manifest.json +++ b/homeassistant/components/tessie/manifest.json @@ -5,7 +5,7 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/tessie", "iot_class": "cloud_polling", - "loggers": ["tessie"], + "loggers": ["tessie", "tesla-fleet-api"], "quality_scale": "platinum", "requirements": ["tessie-api==0.1.1", "tesla-fleet-api==0.7.3"] } From 94af95c95b1e5c24edcc754deee129ce0d778e94 Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Sat, 10 Aug 2024 01:25:38 +0200 Subject: [PATCH 0531/1635] Post merge review for Proximity (#123500) remove commented code --- tests/components/proximity/test_init.py | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/tests/components/proximity/test_init.py b/tests/components/proximity/test_init.py index 456d6577c04..37573483b74 100644 --- a/tests/components/proximity/test_init.py +++ b/tests/components/proximity/test_init.py @@ -590,20 +590,6 @@ async def test_device_tracker_test1_nearest_after_test2_in_ignored_zone( 1, ) - # assert await async_setup_component( - # hass, - # DOMAIN, - # { - # "proximity": { - # "home": { - # "ignored_zones": ["zone.work"], - # "devices": ["device_tracker.test1", "device_tracker.test2"], - # "zone": "home", - # } - # } - # }, - # ) - hass.states.async_set( "device_tracker.test1", "not_home", From 750bce2b8659c7824326884d4f91c022a4815e5f Mon Sep 17 00:00:00 2001 From: dupondje Date: Sat, 10 Aug 2024 10:40:11 +0200 Subject: [PATCH 0532/1635] Also migrate dsmr entries for devices with correct serial (#123407) dsmr: also migrate entries for devices with correct serial When the dsmr code could not find the serial_nr for the gas meter, it creates the gas meter device with the entry_id as identifier. But when there is a correct serial_nr, it will use that as identifier for the dsmr gas device. Now the migration code did not take this into account, so migration to the new name failed since it didn't look for the device with correct serial_nr. This commit fixes this and adds a test for this. --- homeassistant/components/dsmr/sensor.py | 67 +++++++------- tests/components/dsmr/test_mbus_migration.py | 95 ++++++++++++++++++++ 2 files changed, 129 insertions(+), 33 deletions(-) diff --git a/homeassistant/components/dsmr/sensor.py b/homeassistant/components/dsmr/sensor.py index b298ed5bfc0..77c40c5c292 100644 --- a/homeassistant/components/dsmr/sensor.py +++ b/homeassistant/components/dsmr/sensor.py @@ -431,41 +431,42 @@ def rename_old_gas_to_mbus( ) -> None: """Rename old gas sensor to mbus variant.""" dev_reg = dr.async_get(hass) - device_entry_v1 = dev_reg.async_get_device(identifiers={(DOMAIN, entry.entry_id)}) - if device_entry_v1 is not None: - device_id = device_entry_v1.id + for dev_id in (mbus_device_id, entry.entry_id): + device_entry_v1 = dev_reg.async_get_device(identifiers={(DOMAIN, dev_id)}) + if device_entry_v1 is not None: + device_id = device_entry_v1.id - ent_reg = er.async_get(hass) - entries = er.async_entries_for_device(ent_reg, device_id) + ent_reg = er.async_get(hass) + entries = er.async_entries_for_device(ent_reg, device_id) - for entity in entries: - if entity.unique_id.endswith( - "belgium_5min_gas_meter_reading" - ) or entity.unique_id.endswith("hourly_gas_meter_reading"): - try: - ent_reg.async_update_entity( - entity.entity_id, - new_unique_id=mbus_device_id, - device_id=mbus_device_id, - ) - except ValueError: - LOGGER.debug( - "Skip migration of %s because it already exists", - entity.entity_id, - ) - else: - LOGGER.debug( - "Migrated entity %s from unique id %s to %s", - entity.entity_id, - entity.unique_id, - mbus_device_id, - ) - # Cleanup old device - dev_entities = er.async_entries_for_device( - ent_reg, device_id, include_disabled_entities=True - ) - if not dev_entities: - dev_reg.async_remove_device(device_id) + for entity in entries: + if entity.unique_id.endswith( + "belgium_5min_gas_meter_reading" + ) or entity.unique_id.endswith("hourly_gas_meter_reading"): + try: + ent_reg.async_update_entity( + entity.entity_id, + new_unique_id=mbus_device_id, + device_id=mbus_device_id, + ) + except ValueError: + LOGGER.debug( + "Skip migration of %s because it already exists", + entity.entity_id, + ) + else: + LOGGER.debug( + "Migrated entity %s from unique id %s to %s", + entity.entity_id, + entity.unique_id, + mbus_device_id, + ) + # Cleanup old device + dev_entities = er.async_entries_for_device( + ent_reg, device_id, include_disabled_entities=True + ) + if not dev_entities: + dev_reg.async_remove_device(device_id) def is_supported_description( diff --git a/tests/components/dsmr/test_mbus_migration.py b/tests/components/dsmr/test_mbus_migration.py index 20b3d253f39..7c7d182aa97 100644 --- a/tests/components/dsmr/test_mbus_migration.py +++ b/tests/components/dsmr/test_mbus_migration.py @@ -219,6 +219,101 @@ async def test_migrate_hourly_gas_to_mbus( ) +async def test_migrate_gas_with_devid_to_mbus( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, + dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock], +) -> None: + """Test migration of unique_id.""" + (connection_factory, transport, protocol) = dsmr_connection_fixture + + mock_entry = MockConfigEntry( + domain=DOMAIN, + unique_id="/dev/ttyUSB0", + data={ + "port": "/dev/ttyUSB0", + "dsmr_version": "5B", + "serial_id": "1234", + "serial_id_gas": "37464C4F32313139303333373331", + }, + options={ + "time_between_update": 0, + }, + ) + + mock_entry.add_to_hass(hass) + + old_unique_id = "37464C4F32313139303333373331_belgium_5min_gas_meter_reading" + + device = device_registry.async_get_or_create( + config_entry_id=mock_entry.entry_id, + identifiers={(DOMAIN, "37464C4F32313139303333373331")}, + name="Gas Meter", + ) + await hass.async_block_till_done() + + entity: er.RegistryEntry = entity_registry.async_get_or_create( + suggested_object_id="gas_meter_reading", + disabled_by=None, + domain=SENSOR_DOMAIN, + platform=DOMAIN, + device_id=device.id, + unique_id=old_unique_id, + config_entry=mock_entry, + ) + assert entity.unique_id == old_unique_id + await hass.async_block_till_done() + + telegram = Telegram() + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 1), [{"value": "003", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_EQUIPMENT_IDENTIFIER, + CosemObject( + (0, 1), + [{"value": "37464C4F32313139303333373331", "unit": ""}], + ), + "MBUS_EQUIPMENT_IDENTIFIER", + ) + telegram.add( + MBUS_METER_READING, + MBusObject( + (0, 1), + [ + {"value": datetime.datetime.fromtimestamp(1551642213)}, + {"value": Decimal(745.695), "unit": "m3"}, + ], + ), + "MBUS_METER_READING", + ) + + assert await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() + + telegram_callback = connection_factory.call_args_list[0][0][2] + + # simulate a telegram pushed from the smartmeter and parsed by dsmr_parser + telegram_callback(telegram) + + # after receiving telegram entities need to have the chance to be created + await hass.async_block_till_done() + + assert ( + entity_registry.async_get_entity_id(SENSOR_DOMAIN, DOMAIN, old_unique_id) + is None + ) + assert ( + entity_registry.async_get_entity_id( + SENSOR_DOMAIN, DOMAIN, "37464C4F32313139303333373331" + ) + == "sensor.gas_meter_reading" + ) + + async def test_migrate_gas_to_mbus_exists( hass: HomeAssistant, entity_registry: er.EntityRegistry, From 089d855c47e8783ecbba1d1d9dd740a583d8b615 Mon Sep 17 00:00:00 2001 From: Willem-Jan van Rootselaar Date: Sat, 10 Aug 2024 12:28:48 +0200 Subject: [PATCH 0533/1635] Bump bsblan to 0.5.19 (#123515) * bump bsblan lib version * chore: Update bsblan diagnostics to use to_dict() instead of dict() method --- homeassistant/components/bsblan/diagnostics.py | 6 +++--- homeassistant/components/bsblan/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/bsblan/conftest.py | 6 +++--- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/bsblan/diagnostics.py b/homeassistant/components/bsblan/diagnostics.py index 0bceed0bf23..a24082fd698 100644 --- a/homeassistant/components/bsblan/diagnostics.py +++ b/homeassistant/components/bsblan/diagnostics.py @@ -17,7 +17,7 @@ async def async_get_config_entry_diagnostics( """Return diagnostics for a config entry.""" data: HomeAssistantBSBLANData = hass.data[DOMAIN][entry.entry_id] return { - "info": data.info.dict(), - "device": data.device.dict(), - "state": data.coordinator.data.dict(), + "info": data.info.to_dict(), + "device": data.device.to_dict(), + "state": data.coordinator.data.to_dict(), } diff --git a/homeassistant/components/bsblan/manifest.json b/homeassistant/components/bsblan/manifest.json index 3f58fbe364c..fb3c9b49e4c 100644 --- a/homeassistant/components/bsblan/manifest.json +++ b/homeassistant/components/bsblan/manifest.json @@ -7,5 +7,5 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["bsblan"], - "requirements": ["python-bsblan==0.5.18"] + "requirements": ["python-bsblan==0.5.19"] } diff --git a/requirements_all.txt b/requirements_all.txt index a4a7b576215..c5daef2eda9 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2253,7 +2253,7 @@ python-awair==0.2.4 python-blockchain-api==0.0.2 # homeassistant.components.bsblan -python-bsblan==0.5.18 +python-bsblan==0.5.19 # homeassistant.components.clementine python-clementine-remote==1.0.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a991cb4cc2a..9ee4e424f33 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1798,7 +1798,7 @@ python-MotionMount==2.0.0 python-awair==0.2.4 # homeassistant.components.bsblan -python-bsblan==0.5.18 +python-bsblan==0.5.19 # homeassistant.components.ecobee python-ecobee-api==0.2.18 diff --git a/tests/components/bsblan/conftest.py b/tests/components/bsblan/conftest.py index 862f3ae1d0c..07ca8b648f3 100644 --- a/tests/components/bsblan/conftest.py +++ b/tests/components/bsblan/conftest.py @@ -48,11 +48,11 @@ def mock_bsblan() -> Generator[MagicMock]: patch("homeassistant.components.bsblan.config_flow.BSBLAN", new=bsblan_mock), ): bsblan = bsblan_mock.return_value - bsblan.info.return_value = Info.parse_raw(load_fixture("info.json", DOMAIN)) - bsblan.device.return_value = Device.parse_raw( + bsblan.info.return_value = Info.from_json(load_fixture("info.json", DOMAIN)) + bsblan.device.return_value = Device.from_json( load_fixture("device.json", DOMAIN) ) - bsblan.state.return_value = State.parse_raw(load_fixture("state.json", DOMAIN)) + bsblan.state.return_value = State.from_json(load_fixture("state.json", DOMAIN)) yield bsblan From 5f03589d3e8444785ef3f045c67a9bf9ca9f63ff Mon Sep 17 00:00:00 2001 From: Matt Way Date: Sat, 10 Aug 2024 21:06:29 +1000 Subject: [PATCH 0534/1635] Bump pydaikin to 2.13.2 (#123519) --- homeassistant/components/daikin/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/daikin/manifest.json b/homeassistant/components/daikin/manifest.json index 827deb27add..c5cb6064d88 100644 --- a/homeassistant/components/daikin/manifest.json +++ b/homeassistant/components/daikin/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/daikin", "iot_class": "local_polling", "loggers": ["pydaikin"], - "requirements": ["pydaikin==2.13.1"], + "requirements": ["pydaikin==2.13.2"], "zeroconf": ["_dkapi._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index c5daef2eda9..945e239a372 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1792,7 +1792,7 @@ pycsspeechtts==1.0.8 # pycups==1.9.73 # homeassistant.components.daikin -pydaikin==2.13.1 +pydaikin==2.13.2 # homeassistant.components.danfoss_air pydanfossair==0.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 9ee4e424f33..441c7aee91f 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1442,7 +1442,7 @@ pycoolmasternet-async==0.1.5 pycsspeechtts==1.0.8 # homeassistant.components.daikin -pydaikin==2.13.1 +pydaikin==2.13.2 # homeassistant.components.deconz pydeconz==116 From 5f73c73a8856eabe992f4f0ae296d7ba395b53be Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Sat, 10 Aug 2024 13:21:01 +0200 Subject: [PATCH 0535/1635] Improve test coverage for Proximity (#123523) * remove unneccessary type checking * remove unused method after #123158 * test when tracked zone is removed --- .../components/proximity/coordinator.py | 14 ------- tests/components/proximity/test_init.py | 41 +++++++++++++++++++ 2 files changed, 41 insertions(+), 14 deletions(-) diff --git a/homeassistant/components/proximity/coordinator.py b/homeassistant/components/proximity/coordinator.py index 2d32926832a..a8dd85c1523 100644 --- a/homeassistant/components/proximity/coordinator.py +++ b/homeassistant/components/proximity/coordinator.py @@ -13,7 +13,6 @@ from homeassistant.const import ( ATTR_NAME, CONF_UNIT_OF_MEASUREMENT, CONF_ZONE, - UnitOfLength, ) from homeassistant.core import ( Event, @@ -27,7 +26,6 @@ from homeassistant.helpers.issue_registry import IssueSeverity, async_create_iss from homeassistant.helpers.typing import ConfigType from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from homeassistant.util.location import distance -from homeassistant.util.unit_conversion import DistanceConverter from .const import ( ATTR_DIR_OF_TRAVEL, @@ -145,18 +143,6 @@ class ProximityDataUpdateCoordinator(DataUpdateCoordinator[ProximityData]): }, ) - def convert_legacy(self, value: float | str) -> float | str: - """Round and convert given distance value.""" - if isinstance(value, str): - return value - return round( - DistanceConverter.convert( - value, - UnitOfLength.METERS, - self.unit_of_measurement, - ) - ) - def _calc_distance_to_zone( self, zone: State, diff --git a/tests/components/proximity/test_init.py b/tests/components/proximity/test_init.py index 37573483b74..eeb181e0670 100644 --- a/tests/components/proximity/test_init.py +++ b/tests/components/proximity/test_init.py @@ -879,3 +879,44 @@ async def test_sensor_unique_ids( assert ( entity.unique_id == f"{mock_config.entry_id}_device_tracker.test2_dist_to_zone" ) + + +async def test_tracked_zone_is_removed(hass: HomeAssistant) -> None: + """Test that tracked zone is removed.""" + await async_setup_single_entry(hass, "zone.home", ["device_tracker.test1"], [], 1) + + hass.states.async_set( + "device_tracker.test1", + "home", + {"friendly_name": "test1", "latitude": 2.1, "longitude": 1.1}, + ) + await hass.async_block_till_done() + + # check sensor entities + state = hass.states.get("sensor.home_nearest_device") + assert state.state == "test1" + + entity_base_name = "sensor.home_test1" + state = hass.states.get(f"{entity_base_name}_distance") + assert state.state == "0" + state = hass.states.get(f"{entity_base_name}_direction_of_travel") + assert state.state == "arrived" + + # remove tracked zone and move tracked entity + assert hass.states.async_remove("zone.home") + hass.states.async_set( + "device_tracker.test1", + "home", + {"friendly_name": "test1", "latitude": 2.2, "longitude": 1.2}, + ) + await hass.async_block_till_done() + + # check sensor entities + state = hass.states.get("sensor.home_nearest_device") + assert state.state == STATE_UNKNOWN + + entity_base_name = "sensor.home_test1" + state = hass.states.get(f"{entity_base_name}_distance") + assert state.state == STATE_UNAVAILABLE + state = hass.states.get(f"{entity_base_name}_direction_of_travel") + assert state.state == STATE_UNAVAILABLE From 9b678e474bbc36acf6709c9e33c4c90017863135 Mon Sep 17 00:00:00 2001 From: Sid <27780930+autinerd@users.noreply.github.com> Date: Sat, 10 Aug 2024 14:04:27 +0200 Subject: [PATCH 0536/1635] Bump ruff to 0.5.7 (#123531) --- .pre-commit-config.yaml | 2 +- requirements_test_pre_commit.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9c75edf780c..ed28852fd81 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.5.6 + rev: v0.5.7 hooks: - id: ruff args: diff --git a/requirements_test_pre_commit.txt b/requirements_test_pre_commit.txt index a3d38c11a8d..ba54a19da3e 100644 --- a/requirements_test_pre_commit.txt +++ b/requirements_test_pre_commit.txt @@ -1,5 +1,5 @@ # Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit codespell==2.3.0 -ruff==0.5.6 +ruff==0.5.7 yamllint==1.35.1 From cfd2ca3abb597fc782f7b2be14f710051d87a095 Mon Sep 17 00:00:00 2001 From: "David F. Mulcahey" Date: Sat, 10 Aug 2024 08:07:08 -0400 Subject: [PATCH 0537/1635] Bump zha lib to 0.0.30 (#123499) --- homeassistant/components/zha/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index 385b95c8058..bb1480b43e1 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -21,7 +21,7 @@ "zha", "universal_silabs_flasher" ], - "requirements": ["universal-silabs-flasher==0.0.22", "zha==0.0.29"], + "requirements": ["universal-silabs-flasher==0.0.22", "zha==0.0.30"], "usb": [ { "vid": "10C4", diff --git a/requirements_all.txt b/requirements_all.txt index 945e239a372..222de4cd5d0 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2992,7 +2992,7 @@ zeroconf==0.132.2 zeversolar==0.3.1 # homeassistant.components.zha -zha==0.0.29 +zha==0.0.30 # homeassistant.components.zhong_hong zhong-hong-hvac==1.0.12 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 441c7aee91f..c9043045431 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2369,7 +2369,7 @@ zeroconf==0.132.2 zeversolar==0.3.1 # homeassistant.components.zha -zha==0.0.29 +zha==0.0.30 # homeassistant.components.zwave_js zwave-js-server-python==0.57.0 From f02fceed5b6ae53f86aa0f0c4afa6c0fa9b255ae Mon Sep 17 00:00:00 2001 From: Duco Sebel <74970928+DCSBL@users.noreply.github.com> Date: Sat, 10 Aug 2024 17:01:17 +0200 Subject: [PATCH 0538/1635] Bumb python-homewizard-energy to 6.2.0 (#123514) --- homeassistant/components/homewizard/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/homewizard/manifest.json b/homeassistant/components/homewizard/manifest.json index 474d63e943d..dbad91b1fb8 100644 --- a/homeassistant/components/homewizard/manifest.json +++ b/homeassistant/components/homewizard/manifest.json @@ -7,6 +7,6 @@ "iot_class": "local_polling", "loggers": ["homewizard_energy"], "quality_scale": "platinum", - "requirements": ["python-homewizard-energy==v6.1.1"], + "requirements": ["python-homewizard-energy==v6.2.0"], "zeroconf": ["_hwenergy._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index 222de4cd5d0..3f0d1f2d99f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2286,7 +2286,7 @@ python-gitlab==1.6.0 python-homeassistant-analytics==0.7.0 # homeassistant.components.homewizard -python-homewizard-energy==v6.1.1 +python-homewizard-energy==v6.2.0 # homeassistant.components.hp_ilo python-hpilo==4.4.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c9043045431..d7d98d3e057 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1813,7 +1813,7 @@ python-fullykiosk==0.0.14 python-homeassistant-analytics==0.7.0 # homeassistant.components.homewizard -python-homewizard-energy==v6.1.1 +python-homewizard-energy==v6.2.0 # homeassistant.components.izone python-izone==1.2.9 From 4f8a6979d9916714cb2a47141cd1615d91c7dc26 Mon Sep 17 00:00:00 2001 From: Evgeny <940893+freekode@users.noreply.github.com> Date: Sat, 10 Aug 2024 17:01:26 +0200 Subject: [PATCH 0539/1635] Bump OpenWeatherMap to 0.1.1 (#120178) * add owm modes * fix tests * fix modes * remove sensors * Update homeassistant/components/openweathermap/sensor.py Co-authored-by: Joost Lekkerkerker --------- Co-authored-by: Joost Lekkerkerker --- .../components/openweathermap/__init__.py | 7 +-- .../components/openweathermap/const.py | 13 +++-- .../components/openweathermap/coordinator.py | 16 +++++-- .../components/openweathermap/manifest.json | 2 +- .../components/openweathermap/sensor.py | 27 +++++++---- .../components/openweathermap/utils.py | 4 +- .../components/openweathermap/weather.py | 48 +++++++++++++------ requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- .../openweathermap/test_config_flow.py | 26 +++++----- 10 files changed, 97 insertions(+), 50 deletions(-) diff --git a/homeassistant/components/openweathermap/__init__.py b/homeassistant/components/openweathermap/__init__.py index 7aea6aafe20..747b93179bc 100644 --- a/homeassistant/components/openweathermap/__init__.py +++ b/homeassistant/components/openweathermap/__init__.py @@ -5,7 +5,7 @@ from __future__ import annotations from dataclasses import dataclass import logging -from pyopenweathermap import OWMClient +from pyopenweathermap import create_owm_client from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( @@ -33,6 +33,7 @@ class OpenweathermapData: """Runtime data definition.""" name: str + mode: str coordinator: WeatherUpdateCoordinator @@ -52,7 +53,7 @@ async def async_setup_entry( else: async_delete_issue(hass, entry.entry_id) - owm_client = OWMClient(api_key, mode, lang=language) + owm_client = create_owm_client(api_key, mode, lang=language) weather_coordinator = WeatherUpdateCoordinator( owm_client, latitude, longitude, hass ) @@ -61,7 +62,7 @@ async def async_setup_entry( entry.async_on_unload(entry.add_update_listener(async_update_options)) - entry.runtime_data = OpenweathermapData(name, weather_coordinator) + entry.runtime_data = OpenweathermapData(name, mode, weather_coordinator) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) diff --git a/homeassistant/components/openweathermap/const.py b/homeassistant/components/openweathermap/const.py index 6c9997fc061..d34125a2405 100644 --- a/homeassistant/components/openweathermap/const.py +++ b/homeassistant/components/openweathermap/const.py @@ -58,10 +58,17 @@ FORECAST_MODE_DAILY = "daily" FORECAST_MODE_FREE_DAILY = "freedaily" FORECAST_MODE_ONECALL_HOURLY = "onecall_hourly" FORECAST_MODE_ONECALL_DAILY = "onecall_daily" -OWM_MODE_V25 = "v2.5" +OWM_MODE_FREE_CURRENT = "current" +OWM_MODE_FREE_FORECAST = "forecast" OWM_MODE_V30 = "v3.0" -OWM_MODES = [OWM_MODE_V30, OWM_MODE_V25] -DEFAULT_OWM_MODE = OWM_MODE_V30 +OWM_MODE_V25 = "v2.5" +OWM_MODES = [ + OWM_MODE_FREE_CURRENT, + OWM_MODE_FREE_FORECAST, + OWM_MODE_V30, + OWM_MODE_V25, +] +DEFAULT_OWM_MODE = OWM_MODE_FREE_CURRENT LANGUAGES = [ "af", diff --git a/homeassistant/components/openweathermap/coordinator.py b/homeassistant/components/openweathermap/coordinator.py index 0f99af5ad64..f7672a1290b 100644 --- a/homeassistant/components/openweathermap/coordinator.py +++ b/homeassistant/components/openweathermap/coordinator.py @@ -86,8 +86,14 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator): """Format the weather response correctly.""" _LOGGER.debug("OWM weather response: %s", weather_report) + current_weather = ( + self._get_current_weather_data(weather_report.current) + if weather_report.current is not None + else {} + ) + return { - ATTR_API_CURRENT: self._get_current_weather_data(weather_report.current), + ATTR_API_CURRENT: current_weather, ATTR_API_HOURLY_FORECAST: [ self._get_hourly_forecast_weather_data(item) for item in weather_report.hourly_forecast @@ -122,6 +128,8 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator): } def _get_hourly_forecast_weather_data(self, forecast: HourlyWeatherForecast): + uv_index = float(forecast.uv_index) if forecast.uv_index is not None else None + return Forecast( datetime=forecast.date_time.isoformat(), condition=self._get_condition(forecast.condition.id), @@ -134,12 +142,14 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator): wind_speed=forecast.wind_speed, native_wind_gust_speed=forecast.wind_gust, wind_bearing=forecast.wind_bearing, - uv_index=float(forecast.uv_index), + uv_index=uv_index, precipitation_probability=round(forecast.precipitation_probability * 100), precipitation=self._calc_precipitation(forecast.rain, forecast.snow), ) def _get_daily_forecast_weather_data(self, forecast: DailyWeatherForecast): + uv_index = float(forecast.uv_index) if forecast.uv_index is not None else None + return Forecast( datetime=forecast.date_time.isoformat(), condition=self._get_condition(forecast.condition.id), @@ -153,7 +163,7 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator): wind_speed=forecast.wind_speed, native_wind_gust_speed=forecast.wind_gust, wind_bearing=forecast.wind_bearing, - uv_index=float(forecast.uv_index), + uv_index=uv_index, precipitation_probability=round(forecast.precipitation_probability * 100), precipitation=round(forecast.rain + forecast.snow, 2), ) diff --git a/homeassistant/components/openweathermap/manifest.json b/homeassistant/components/openweathermap/manifest.json index e2c809cf385..199e750ad4f 100644 --- a/homeassistant/components/openweathermap/manifest.json +++ b/homeassistant/components/openweathermap/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/openweathermap", "iot_class": "cloud_polling", "loggers": ["pyopenweathermap"], - "requirements": ["pyopenweathermap==0.0.9"] + "requirements": ["pyopenweathermap==0.1.1"] } diff --git a/homeassistant/components/openweathermap/sensor.py b/homeassistant/components/openweathermap/sensor.py index 89905e99ed9..46789f4b3d2 100644 --- a/homeassistant/components/openweathermap/sensor.py +++ b/homeassistant/components/openweathermap/sensor.py @@ -19,6 +19,7 @@ from homeassistant.const import ( UnitOfVolumetricFlux, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType @@ -47,6 +48,7 @@ from .const import ( DEFAULT_NAME, DOMAIN, MANUFACTURER, + OWM_MODE_FREE_FORECAST, ) from .coordinator import WeatherUpdateCoordinator @@ -161,16 +163,23 @@ async def async_setup_entry( name = domain_data.name weather_coordinator = domain_data.coordinator - entities: list[AbstractOpenWeatherMapSensor] = [ - OpenWeatherMapSensor( - name, - f"{config_entry.unique_id}-{description.key}", - description, - weather_coordinator, + if domain_data.mode == OWM_MODE_FREE_FORECAST: + entity_registry = er.async_get(hass) + entries = er.async_entries_for_config_entry( + entity_registry, config_entry.entry_id + ) + for entry in entries: + entity_registry.async_remove(entry.entity_id) + else: + async_add_entities( + OpenWeatherMapSensor( + name, + f"{config_entry.unique_id}-{description.key}", + description, + weather_coordinator, + ) + for description in WEATHER_SENSOR_TYPES ) - for description in WEATHER_SENSOR_TYPES - ] - async_add_entities(entities) class AbstractOpenWeatherMapSensor(SensorEntity): diff --git a/homeassistant/components/openweathermap/utils.py b/homeassistant/components/openweathermap/utils.py index 7f2391b21a1..ba5378fb31c 100644 --- a/homeassistant/components/openweathermap/utils.py +++ b/homeassistant/components/openweathermap/utils.py @@ -2,7 +2,7 @@ from typing import Any -from pyopenweathermap import OWMClient, RequestError +from pyopenweathermap import RequestError, create_owm_client from homeassistant.const import CONF_LANGUAGE, CONF_MODE @@ -16,7 +16,7 @@ async def validate_api_key(api_key, mode): api_key_valid = None errors, description_placeholders = {}, {} try: - owm_client = OWMClient(api_key, mode) + owm_client = create_owm_client(api_key, mode) api_key_valid = await owm_client.validate_key() except RequestError as error: errors["base"] = "cannot_connect" diff --git a/homeassistant/components/openweathermap/weather.py b/homeassistant/components/openweathermap/weather.py index 62b15218233..3a134a0ee26 100644 --- a/homeassistant/components/openweathermap/weather.py +++ b/homeassistant/components/openweathermap/weather.py @@ -8,6 +8,7 @@ from homeassistant.components.weather import ( WeatherEntityFeature, ) from homeassistant.const import ( + UnitOfLength, UnitOfPrecipitationDepth, UnitOfPressure, UnitOfSpeed, @@ -29,6 +30,7 @@ from .const import ( ATTR_API_HUMIDITY, ATTR_API_PRESSURE, ATTR_API_TEMPERATURE, + ATTR_API_VISIBILITY_DISTANCE, ATTR_API_WIND_BEARING, ATTR_API_WIND_GUST, ATTR_API_WIND_SPEED, @@ -36,6 +38,9 @@ from .const import ( DEFAULT_NAME, DOMAIN, MANUFACTURER, + OWM_MODE_FREE_FORECAST, + OWM_MODE_V25, + OWM_MODE_V30, ) from .coordinator import WeatherUpdateCoordinator @@ -48,10 +53,11 @@ async def async_setup_entry( """Set up OpenWeatherMap weather entity based on a config entry.""" domain_data = config_entry.runtime_data name = domain_data.name + mode = domain_data.mode weather_coordinator = domain_data.coordinator unique_id = f"{config_entry.unique_id}" - owm_weather = OpenWeatherMapWeather(name, unique_id, weather_coordinator) + owm_weather = OpenWeatherMapWeather(name, unique_id, mode, weather_coordinator) async_add_entities([owm_weather], False) @@ -66,11 +72,13 @@ class OpenWeatherMapWeather(SingleCoordinatorWeatherEntity[WeatherUpdateCoordina _attr_native_pressure_unit = UnitOfPressure.HPA _attr_native_temperature_unit = UnitOfTemperature.CELSIUS _attr_native_wind_speed_unit = UnitOfSpeed.METERS_PER_SECOND + _attr_native_visibility_unit = UnitOfLength.METERS def __init__( self, name: str, unique_id: str, + mode: str, weather_coordinator: WeatherUpdateCoordinator, ) -> None: """Initialize the sensor.""" @@ -83,59 +91,71 @@ class OpenWeatherMapWeather(SingleCoordinatorWeatherEntity[WeatherUpdateCoordina manufacturer=MANUFACTURER, name=DEFAULT_NAME, ) - self._attr_supported_features = ( - WeatherEntityFeature.FORECAST_DAILY | WeatherEntityFeature.FORECAST_HOURLY - ) + + if mode in (OWM_MODE_V30, OWM_MODE_V25): + self._attr_supported_features = ( + WeatherEntityFeature.FORECAST_DAILY + | WeatherEntityFeature.FORECAST_HOURLY + ) + elif mode == OWM_MODE_FREE_FORECAST: + self._attr_supported_features = WeatherEntityFeature.FORECAST_HOURLY @property def condition(self) -> str | None: """Return the current condition.""" - return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_CONDITION] + return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_CONDITION) @property def cloud_coverage(self) -> float | None: """Return the Cloud coverage in %.""" - return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_CLOUDS] + return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_CLOUDS) @property def native_apparent_temperature(self) -> float | None: """Return the apparent temperature.""" - return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_FEELS_LIKE_TEMPERATURE] + return self.coordinator.data[ATTR_API_CURRENT].get( + ATTR_API_FEELS_LIKE_TEMPERATURE + ) @property def native_temperature(self) -> float | None: """Return the temperature.""" - return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_TEMPERATURE] + return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_TEMPERATURE) @property def native_pressure(self) -> float | None: """Return the pressure.""" - return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_PRESSURE] + return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_PRESSURE) @property def humidity(self) -> float | None: """Return the humidity.""" - return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_HUMIDITY] + return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_HUMIDITY) @property def native_dew_point(self) -> float | None: """Return the dew point.""" - return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_DEW_POINT] + return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_DEW_POINT) @property def native_wind_gust_speed(self) -> float | None: """Return the wind gust speed.""" - return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_WIND_GUST] + return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_WIND_GUST) @property def native_wind_speed(self) -> float | None: """Return the wind speed.""" - return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_WIND_SPEED] + return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_WIND_SPEED) @property def wind_bearing(self) -> float | str | None: """Return the wind bearing.""" - return self.coordinator.data[ATTR_API_CURRENT][ATTR_API_WIND_BEARING] + return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_WIND_BEARING) + + @property + def visibility(self) -> float | str | None: + """Return visibility.""" + return self.coordinator.data[ATTR_API_CURRENT].get(ATTR_API_VISIBILITY_DISTANCE) @callback def _async_forecast_daily(self) -> list[Forecast] | None: diff --git a/requirements_all.txt b/requirements_all.txt index 3f0d1f2d99f..a7f66c5c161 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2074,7 +2074,7 @@ pyombi==0.1.10 pyopenuv==2023.02.0 # homeassistant.components.openweathermap -pyopenweathermap==0.0.9 +pyopenweathermap==0.1.1 # homeassistant.components.opnsense pyopnsense==0.4.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index d7d98d3e057..2bb4ecf5998 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1661,7 +1661,7 @@ pyoctoprintapi==0.1.12 pyopenuv==2023.02.0 # homeassistant.components.openweathermap -pyopenweathermap==0.0.9 +pyopenweathermap==0.1.1 # homeassistant.components.opnsense pyopnsense==0.4.0 diff --git a/tests/components/openweathermap/test_config_flow.py b/tests/components/openweathermap/test_config_flow.py index be02a6b01a9..f18aa432e2f 100644 --- a/tests/components/openweathermap/test_config_flow.py +++ b/tests/components/openweathermap/test_config_flow.py @@ -45,7 +45,7 @@ CONFIG = { VALID_YAML_CONFIG = {CONF_API_KEY: "foo"} -def _create_mocked_owm_client(is_valid: bool): +def _create_mocked_owm_factory(is_valid: bool): current_weather = CurrentWeather( date_time=datetime.fromtimestamp(1714063536, tz=UTC), temperature=6.84, @@ -118,18 +118,18 @@ def _create_mocked_owm_client(is_valid: bool): def mock_owm_client(): """Mock config_flow OWMClient.""" with patch( - "homeassistant.components.openweathermap.OWMClient", - ) as owm_client_mock: - yield owm_client_mock + "homeassistant.components.openweathermap.create_owm_client", + ) as mock: + yield mock @pytest.fixture(name="config_flow_owm_client_mock") def mock_config_flow_owm_client(): """Mock config_flow OWMClient.""" with patch( - "homeassistant.components.openweathermap.utils.OWMClient", - ) as config_flow_owm_client_mock: - yield config_flow_owm_client_mock + "homeassistant.components.openweathermap.utils.create_owm_client", + ) as mock: + yield mock async def test_successful_config_flow( @@ -138,7 +138,7 @@ async def test_successful_config_flow( config_flow_owm_client_mock, ) -> None: """Test that the form is served with valid input.""" - mock = _create_mocked_owm_client(True) + mock = _create_mocked_owm_factory(True) owm_client_mock.return_value = mock config_flow_owm_client_mock.return_value = mock @@ -177,7 +177,7 @@ async def test_abort_config_flow( config_flow_owm_client_mock, ) -> None: """Test that the form is served with same data.""" - mock = _create_mocked_owm_client(True) + mock = _create_mocked_owm_factory(True) owm_client_mock.return_value = mock config_flow_owm_client_mock.return_value = mock @@ -200,7 +200,7 @@ async def test_config_flow_options_change( config_flow_owm_client_mock, ) -> None: """Test that the options form.""" - mock = _create_mocked_owm_client(True) + mock = _create_mocked_owm_factory(True) owm_client_mock.return_value = mock config_flow_owm_client_mock.return_value = mock @@ -261,7 +261,7 @@ async def test_form_invalid_api_key( config_flow_owm_client_mock, ) -> None: """Test that the form is served with no input.""" - config_flow_owm_client_mock.return_value = _create_mocked_owm_client(False) + config_flow_owm_client_mock.return_value = _create_mocked_owm_factory(False) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=CONFIG ) @@ -269,7 +269,7 @@ async def test_form_invalid_api_key( assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": "invalid_api_key"} - config_flow_owm_client_mock.return_value = _create_mocked_owm_client(True) + config_flow_owm_client_mock.return_value = _create_mocked_owm_factory(True) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=CONFIG ) @@ -282,7 +282,7 @@ async def test_form_api_call_error( config_flow_owm_client_mock, ) -> None: """Test setting up with api call error.""" - config_flow_owm_client_mock.return_value = _create_mocked_owm_client(True) + config_flow_owm_client_mock.return_value = _create_mocked_owm_factory(True) config_flow_owm_client_mock.side_effect = RequestError("oops") result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=CONFIG From ace6385f5e96d00470fa21704345e2e64c9f0533 Mon Sep 17 00:00:00 2001 From: cnico Date: Sat, 10 Aug 2024 17:01:49 +0200 Subject: [PATCH 0540/1635] Upgrade chacon_dio_api to version 1.2.0 (#123528) Upgrade api version 1.2.0 with the first user feedback improvement --- homeassistant/components/chacon_dio/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/chacon_dio/manifest.json b/homeassistant/components/chacon_dio/manifest.json index d077b130da9..c0f4059e798 100644 --- a/homeassistant/components/chacon_dio/manifest.json +++ b/homeassistant/components/chacon_dio/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/chacon_dio", "iot_class": "cloud_push", "loggers": ["dio_chacon_api"], - "requirements": ["dio-chacon-wifi-api==1.1.0"] + "requirements": ["dio-chacon-wifi-api==1.2.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index a7f66c5c161..cbfab8d9812 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -738,7 +738,7 @@ devolo-home-control-api==0.18.3 devolo-plc-api==1.4.1 # homeassistant.components.chacon_dio -dio-chacon-wifi-api==1.1.0 +dio-chacon-wifi-api==1.2.0 # homeassistant.components.directv directv==0.4.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 2bb4ecf5998..9a980966cf6 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -631,7 +631,7 @@ devolo-home-control-api==0.18.3 devolo-plc-api==1.4.1 # homeassistant.components.chacon_dio -dio-chacon-wifi-api==1.1.0 +dio-chacon-wifi-api==1.2.0 # homeassistant.components.directv directv==0.4.0 From 257742de46d586e89ffeb6849e4d29126ec59bf4 Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Sat, 10 Aug 2024 18:01:15 +0200 Subject: [PATCH 0541/1635] Fix cleanup of old orphan device entries in AVM Fritz!Tools (#123516) fix cleanup of old orphan device entries --- homeassistant/components/fritz/coordinator.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/fritz/coordinator.py b/homeassistant/components/fritz/coordinator.py index a67f385f3e8..13c442a1ace 100644 --- a/homeassistant/components/fritz/coordinator.py +++ b/homeassistant/components/fritz/coordinator.py @@ -652,8 +652,6 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]): entities: list[er.RegistryEntry] = er.async_entries_for_config_entry( entity_reg, config_entry.entry_id ) - - orphan_macs: set[str] = set() for entity in entities: entry_mac = entity.unique_id.split("_")[0] if ( @@ -661,17 +659,16 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]): or "_internet_access" in entity.unique_id ) and entry_mac not in device_hosts: _LOGGER.info("Removing orphan entity entry %s", entity.entity_id) - orphan_macs.add(entry_mac) entity_reg.async_remove(entity.entity_id) device_reg = dr.async_get(self.hass) - orphan_connections = { - (CONNECTION_NETWORK_MAC, dr.format_mac(mac)) for mac in orphan_macs + valid_connections = { + (CONNECTION_NETWORK_MAC, dr.format_mac(mac)) for mac in device_hosts } for device in dr.async_entries_for_config_entry( device_reg, config_entry.entry_id ): - if any(con in device.connections for con in orphan_connections): + if not any(con in device.connections for con in valid_connections): _LOGGER.debug("Removing obsolete device entry %s", device.name) device_reg.async_update_device( device.id, remove_config_entry_id=config_entry.entry_id From 13b12a7657f9976ecb723a02a4daef410c51dbc0 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sat, 10 Aug 2024 18:28:01 +0200 Subject: [PATCH 0542/1635] Clean up codespell words (#123541) --- .pre-commit-config.yaml | 2 +- homeassistant/components/acer_projector/switch.py | 12 ++++++------ homeassistant/components/keba/sensor.py | 2 +- homeassistant/components/mysensors/binary_sensor.py | 4 ++-- homeassistant/components/mysensors/helpers.py | 10 ++++------ homeassistant/components/mysensors/sensor.py | 4 ++-- tests/helpers/test_config_validation.py | 4 ++-- 7 files changed, 18 insertions(+), 20 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ed28852fd81..f057931e2a8 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -12,7 +12,7 @@ repos: hooks: - id: codespell args: - - --ignore-words-list=astroid,checkin,currenty,hass,iif,incomfort,lookin,nam,NotIn,pres,ser,ue + - --ignore-words-list=astroid,checkin,currenty,hass,iif,incomfort,lookin,nam,NotIn - --skip="./.*,*.csv,*.json,*.ambr" - --quiet-level=2 exclude_types: [csv, json, html] diff --git a/homeassistant/components/acer_projector/switch.py b/homeassistant/components/acer_projector/switch.py index 5c1c37df5d8..c1463cd9a08 100644 --- a/homeassistant/components/acer_projector/switch.py +++ b/homeassistant/components/acer_projector/switch.py @@ -81,7 +81,7 @@ class AcerSwitch(SwitchEntity): write_timeout: int, ) -> None: """Init of the Acer projector.""" - self.ser = serial.Serial( + self.serial = serial.Serial( port=serial_port, timeout=timeout, write_timeout=write_timeout ) self._serial_port = serial_port @@ -99,16 +99,16 @@ class AcerSwitch(SwitchEntity): # was disconnected during runtime. # This way the projector can be reconnected and will still work try: - if not self.ser.is_open: - self.ser.open() - self.ser.write(msg.encode("utf-8")) + if not self.serial.is_open: + self.serial.open() + self.serial.write(msg.encode("utf-8")) # Size is an experience value there is no real limit. # AFAIK there is no limit and no end character so we will usually # need to wait for timeout - ret = self.ser.read_until(size=20).decode("utf-8") + ret = self.serial.read_until(size=20).decode("utf-8") except serial.SerialException: _LOGGER.error("Problem communicating with %s", self._serial_port) - self.ser.close() + self.serial.close() return ret def _write_read_format(self, msg: str) -> str: diff --git a/homeassistant/components/keba/sensor.py b/homeassistant/components/keba/sensor.py index 74c08933cbe..1878a7f6e49 100644 --- a/homeassistant/components/keba/sensor.py +++ b/homeassistant/components/keba/sensor.py @@ -64,7 +64,7 @@ async def async_setup_platform( keba, "session_energy", SensorEntityDescription( - key="E pres", + key="E pres", # codespell:ignore pres name="Session Energy", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, diff --git a/homeassistant/components/mysensors/binary_sensor.py b/homeassistant/components/mysensors/binary_sensor.py index a0a1c92c682..b8a3769308a 100644 --- a/homeassistant/components/mysensors/binary_sensor.py +++ b/homeassistant/components/mysensors/binary_sensor.py @@ -104,8 +104,8 @@ class MySensorsBinarySensor(mysensors.device.MySensorsChildEntity, BinarySensorE def __init__(self, *args: Any, **kwargs: Any) -> None: """Set up the instance.""" super().__init__(*args, **kwargs) - pres = self.gateway.const.Presentation - self.entity_description = SENSORS[pres(self.child_type).name] + presentation = self.gateway.const.Presentation + self.entity_description = SENSORS[presentation(self.child_type).name] @property def is_on(self) -> bool: diff --git a/homeassistant/components/mysensors/helpers.py b/homeassistant/components/mysensors/helpers.py index f060f3313dc..74dc99e76d3 100644 --- a/homeassistant/components/mysensors/helpers.py +++ b/homeassistant/components/mysensors/helpers.py @@ -168,11 +168,9 @@ def invalid_msg( gateway: BaseAsyncGateway, child: ChildSensor, value_type_name: ValueType ) -> str: """Return a message for an invalid child during schema validation.""" - pres = gateway.const.Presentation + presentation = gateway.const.Presentation set_req = gateway.const.SetReq - return ( - f"{pres(child.type).name} requires value_type {set_req[value_type_name].name}" - ) + return f"{presentation(child.type).name} requires value_type {set_req[value_type_name].name}" def validate_set_msg( @@ -202,10 +200,10 @@ def validate_child( ) -> defaultdict[Platform, list[DevId]]: """Validate a child. Returns a dict mapping hass platform names to list of DevId.""" validated: defaultdict[Platform, list[DevId]] = defaultdict(list) - pres: type[IntEnum] = gateway.const.Presentation + presentation: type[IntEnum] = gateway.const.Presentation set_req: type[IntEnum] = gateway.const.SetReq child_type_name: SensorType | None = next( - (member.name for member in pres if member.value == child.type), None + (member.name for member in presentation if member.value == child.type), None ) if not child_type_name: _LOGGER.warning("Child type %s is not supported", child.type) diff --git a/homeassistant/components/mysensors/sensor.py b/homeassistant/components/mysensors/sensor.py index a6a91c12a81..9c6c0de89e6 100644 --- a/homeassistant/components/mysensors/sensor.py +++ b/homeassistant/components/mysensors/sensor.py @@ -318,9 +318,9 @@ class MySensorsSensor(mysensors.device.MySensorsChildEntity, SensorEntity): entity_description = SENSORS.get(set_req(self.value_type).name) if not entity_description: - pres = self.gateway.const.Presentation + presentation = self.gateway.const.Presentation entity_description = SENSORS.get( - f"{set_req(self.value_type).name}_{pres(self.child_type).name}" + f"{set_req(self.value_type).name}_{presentation(self.child_type).name}" ) return entity_description diff --git a/tests/helpers/test_config_validation.py b/tests/helpers/test_config_validation.py index cf72012a1f1..ac3af13949b 100644 --- a/tests/helpers/test_config_validation.py +++ b/tests/helpers/test_config_validation.py @@ -33,7 +33,7 @@ def test_boolean() -> None: "T", "negative", "lock", - "tr ue", + "tr ue", # codespell:ignore ue [], [1, 2], {"one": "two"}, @@ -1492,7 +1492,7 @@ def test_whitespace() -> None: "T", "negative", "lock", - "tr ue", + "tr ue", # codespell:ignore ue [], [1, 2], {"one": "two"}, From 778194f7a0a6a086cf92a7f56005521686bc5fcc Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sat, 10 Aug 2024 18:31:17 +0200 Subject: [PATCH 0543/1635] Bump AirGradient to 0.8.0 (#123527) --- homeassistant/components/airgradient/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/airgradient/manifest.json b/homeassistant/components/airgradient/manifest.json index efb18ae5752..fed4fafdc74 100644 --- a/homeassistant/components/airgradient/manifest.json +++ b/homeassistant/components/airgradient/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/airgradient", "integration_type": "device", "iot_class": "local_polling", - "requirements": ["airgradient==0.7.1"], + "requirements": ["airgradient==0.8.0"], "zeroconf": ["_airgradient._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index cbfab8d9812..2e8089bbf9a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -410,7 +410,7 @@ aiowithings==3.0.2 aioymaps==1.2.5 # homeassistant.components.airgradient -airgradient==0.7.1 +airgradient==0.8.0 # homeassistant.components.airly airly==1.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 9a980966cf6..aeffade42eb 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -392,7 +392,7 @@ aiowithings==3.0.2 aioymaps==1.2.5 # homeassistant.components.airgradient -airgradient==0.7.1 +airgradient==0.8.0 # homeassistant.components.airly airly==1.1.0 From ef2ddbf86d9ed63fbf712092209e2f956206c733 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 10 Aug 2024 11:37:00 -0500 Subject: [PATCH 0544/1635] Revert "Bump chacha20poly1305-reuseable to 0.13.2" (#123505) --- homeassistant/components/homekit/manifest.json | 1 - requirements_all.txt | 3 --- requirements_test_all.txt | 3 --- 3 files changed, 7 deletions(-) diff --git a/homeassistant/components/homekit/manifest.json b/homeassistant/components/homekit/manifest.json index 8bbf6b677a9..17d1237e579 100644 --- a/homeassistant/components/homekit/manifest.json +++ b/homeassistant/components/homekit/manifest.json @@ -10,7 +10,6 @@ "loggers": ["pyhap"], "requirements": [ "HAP-python==4.9.1", - "chacha20poly1305-reuseable==0.13.2", "fnv-hash-fast==0.5.0", "PyQRCode==1.2.1", "base36==0.1.1" diff --git a/requirements_all.txt b/requirements_all.txt index 2e8089bbf9a..29146455616 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -651,9 +651,6 @@ cached_ipaddress==0.3.0 # homeassistant.components.caldav caldav==1.3.9 -# homeassistant.components.homekit -chacha20poly1305-reuseable==0.13.2 - # homeassistant.components.cisco_mobility_express ciscomobilityexpress==0.3.9 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index aeffade42eb..46889f670ee 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -562,9 +562,6 @@ cached_ipaddress==0.3.0 # homeassistant.components.caldav caldav==1.3.9 -# homeassistant.components.homekit -chacha20poly1305-reuseable==0.13.2 - # homeassistant.components.coinbase coinbase-advanced-py==1.2.2 From 0558c85b5dc4523772b4c7ab7171e412012f428e Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Sat, 10 Aug 2024 18:38:20 +0200 Subject: [PATCH 0545/1635] Revert "Remove ESPHome legacy entity naming" (#123453) --- homeassistant/components/esphome/entity.py | 20 ++- .../esphome/test_alarm_control_panel.py | 22 +-- .../components/esphome/test_binary_sensor.py | 10 +- tests/components/esphome/test_button.py | 8 +- tests/components/esphome/test_camera.py | 36 ++--- tests/components/esphome/test_climate.py | 36 ++--- tests/components/esphome/test_cover.py | 24 +-- tests/components/esphome/test_date.py | 6 +- tests/components/esphome/test_datetime.py | 6 +- tests/components/esphome/test_entity.py | 89 +++++++---- tests/components/esphome/test_event.py | 2 +- tests/components/esphome/test_fan.py | 46 +++--- tests/components/esphome/test_light.py | 148 +++++++++--------- tests/components/esphome/test_lock.py | 14 +- tests/components/esphome/test_media_player.py | 24 +-- tests/components/esphome/test_number.py | 10 +- tests/components/esphome/test_select.py | 4 +- tests/components/esphome/test_sensor.py | 38 ++--- tests/components/esphome/test_switch.py | 6 +- tests/components/esphome/test_text.py | 8 +- tests/components/esphome/test_time.py | 6 +- tests/components/esphome/test_update.py | 10 +- tests/components/esphome/test_valve.py | 24 +-- 23 files changed, 322 insertions(+), 275 deletions(-) diff --git a/homeassistant/components/esphome/entity.py b/homeassistant/components/esphome/entity.py index 5f845f4665b..6e02f8de869 100644 --- a/homeassistant/components/esphome/entity.py +++ b/homeassistant/components/esphome/entity.py @@ -176,7 +176,6 @@ ENTITY_CATEGORIES: EsphomeEnumMapper[EsphomeEntityCategory, EntityCategory | Non class EsphomeEntity(Entity, Generic[_InfoT, _StateT]): """Define a base esphome entity.""" - _attr_has_entity_name = True _attr_should_poll = False _static_info: _InfoT _state: _StateT @@ -201,6 +200,25 @@ class EsphomeEntity(Entity, Generic[_InfoT, _StateT]): self._attr_device_info = DeviceInfo( connections={(dr.CONNECTION_NETWORK_MAC, device_info.mac_address)} ) + # + # If `friendly_name` is set, we use the Friendly naming rules, if + # `friendly_name` is not set we make an exception to the naming rules for + # backwards compatibility and use the Legacy naming rules. + # + # Friendly naming + # - Friendly name is prepended to entity names + # - Device Name is prepended to entity ids + # - Entity id is constructed from device name and object id + # + # Legacy naming + # - Device name is not prepended to entity names + # - Device name is not prepended to entity ids + # - Entity id is constructed from entity name + # + if not device_info.friendly_name: + return + self._attr_has_entity_name = True + self.entity_id = f"{domain}.{device_info.name}_{entity_info.object_id}" async def async_added_to_hass(self) -> None: """Register callbacks.""" diff --git a/tests/components/esphome/test_alarm_control_panel.py b/tests/components/esphome/test_alarm_control_panel.py index 33c7be94736..af717ac1b49 100644 --- a/tests/components/esphome/test_alarm_control_panel.py +++ b/tests/components/esphome/test_alarm_control_panel.py @@ -58,7 +58,7 @@ async def test_generic_alarm_control_panel_requires_code( user_service=user_service, states=states, ) - state = hass.states.get("alarm_control_panel.test_my_alarm_control_panel") + state = hass.states.get("alarm_control_panel.test_myalarm_control_panel") assert state is not None assert state.state == STATE_ALARM_ARMED_AWAY @@ -66,7 +66,7 @@ async def test_generic_alarm_control_panel_requires_code( ALARM_CONTROL_PANEL_DOMAIN, SERVICE_ALARM_ARM_AWAY, { - ATTR_ENTITY_ID: "alarm_control_panel.test_my_alarm_control_panel", + ATTR_ENTITY_ID: "alarm_control_panel.test_myalarm_control_panel", ATTR_CODE: 1234, }, blocking=True, @@ -80,7 +80,7 @@ async def test_generic_alarm_control_panel_requires_code( ALARM_CONTROL_PANEL_DOMAIN, SERVICE_ALARM_ARM_CUSTOM_BYPASS, { - ATTR_ENTITY_ID: "alarm_control_panel.test_my_alarm_control_panel", + ATTR_ENTITY_ID: "alarm_control_panel.test_myalarm_control_panel", ATTR_CODE: 1234, }, blocking=True, @@ -94,7 +94,7 @@ async def test_generic_alarm_control_panel_requires_code( ALARM_CONTROL_PANEL_DOMAIN, SERVICE_ALARM_ARM_HOME, { - ATTR_ENTITY_ID: "alarm_control_panel.test_my_alarm_control_panel", + ATTR_ENTITY_ID: "alarm_control_panel.test_myalarm_control_panel", ATTR_CODE: 1234, }, blocking=True, @@ -108,7 +108,7 @@ async def test_generic_alarm_control_panel_requires_code( ALARM_CONTROL_PANEL_DOMAIN, SERVICE_ALARM_ARM_NIGHT, { - ATTR_ENTITY_ID: "alarm_control_panel.test_my_alarm_control_panel", + ATTR_ENTITY_ID: "alarm_control_panel.test_myalarm_control_panel", ATTR_CODE: 1234, }, blocking=True, @@ -122,7 +122,7 @@ async def test_generic_alarm_control_panel_requires_code( ALARM_CONTROL_PANEL_DOMAIN, SERVICE_ALARM_ARM_VACATION, { - ATTR_ENTITY_ID: "alarm_control_panel.test_my_alarm_control_panel", + ATTR_ENTITY_ID: "alarm_control_panel.test_myalarm_control_panel", ATTR_CODE: 1234, }, blocking=True, @@ -136,7 +136,7 @@ async def test_generic_alarm_control_panel_requires_code( ALARM_CONTROL_PANEL_DOMAIN, SERVICE_ALARM_TRIGGER, { - ATTR_ENTITY_ID: "alarm_control_panel.test_my_alarm_control_panel", + ATTR_ENTITY_ID: "alarm_control_panel.test_myalarm_control_panel", ATTR_CODE: 1234, }, blocking=True, @@ -150,7 +150,7 @@ async def test_generic_alarm_control_panel_requires_code( ALARM_CONTROL_PANEL_DOMAIN, SERVICE_ALARM_DISARM, { - ATTR_ENTITY_ID: "alarm_control_panel.test_my_alarm_control_panel", + ATTR_ENTITY_ID: "alarm_control_panel.test_myalarm_control_panel", ATTR_CODE: 1234, }, blocking=True, @@ -193,14 +193,14 @@ async def test_generic_alarm_control_panel_no_code( user_service=user_service, states=states, ) - state = hass.states.get("alarm_control_panel.test_my_alarm_control_panel") + state = hass.states.get("alarm_control_panel.test_myalarm_control_panel") assert state is not None assert state.state == STATE_ALARM_ARMED_AWAY await hass.services.async_call( ALARM_CONTROL_PANEL_DOMAIN, SERVICE_ALARM_DISARM, - {ATTR_ENTITY_ID: "alarm_control_panel.test_my_alarm_control_panel"}, + {ATTR_ENTITY_ID: "alarm_control_panel.test_myalarm_control_panel"}, blocking=True, ) mock_client.alarm_control_panel_command.assert_has_calls( @@ -239,6 +239,6 @@ async def test_generic_alarm_control_panel_missing_state( user_service=user_service, states=states, ) - state = hass.states.get("alarm_control_panel.test_my_alarm_control_panel") + state = hass.states.get("alarm_control_panel.test_myalarm_control_panel") assert state is not None assert state.state == STATE_UNKNOWN diff --git a/tests/components/esphome/test_binary_sensor.py b/tests/components/esphome/test_binary_sensor.py index c07635eff3b..3da8a54ff34 100644 --- a/tests/components/esphome/test_binary_sensor.py +++ b/tests/components/esphome/test_binary_sensor.py @@ -74,7 +74,7 @@ async def test_binary_sensor_generic_entity( user_service=user_service, states=states, ) - state = hass.states.get("binary_sensor.test_my_binary_sensor") + state = hass.states.get("binary_sensor.test_mybinary_sensor") assert state is not None assert state.state == hass_state @@ -105,7 +105,7 @@ async def test_status_binary_sensor( user_service=user_service, states=states, ) - state = hass.states.get("binary_sensor.test_my_binary_sensor") + state = hass.states.get("binary_sensor.test_mybinary_sensor") assert state is not None assert state.state == STATE_ON @@ -135,7 +135,7 @@ async def test_binary_sensor_missing_state( user_service=user_service, states=states, ) - state = hass.states.get("binary_sensor.test_my_binary_sensor") + state = hass.states.get("binary_sensor.test_mybinary_sensor") assert state is not None assert state.state == STATE_UNKNOWN @@ -165,12 +165,12 @@ async def test_binary_sensor_has_state_false( user_service=user_service, states=states, ) - state = hass.states.get("binary_sensor.test_my_binary_sensor") + state = hass.states.get("binary_sensor.test_mybinary_sensor") assert state is not None assert state.state == STATE_UNKNOWN mock_device.set_state(BinarySensorState(key=1, state=True, missing_state=False)) await hass.async_block_till_done() - state = hass.states.get("binary_sensor.test_my_binary_sensor") + state = hass.states.get("binary_sensor.test_mybinary_sensor") assert state is not None assert state.state == STATE_ON diff --git a/tests/components/esphome/test_button.py b/tests/components/esphome/test_button.py index d3fec2a56d2..8c120949caa 100644 --- a/tests/components/esphome/test_button.py +++ b/tests/components/esphome/test_button.py @@ -29,22 +29,22 @@ async def test_button_generic_entity( user_service=user_service, states=states, ) - state = hass.states.get("button.test_my_button") + state = hass.states.get("button.test_mybutton") assert state is not None assert state.state == STATE_UNKNOWN await hass.services.async_call( BUTTON_DOMAIN, SERVICE_PRESS, - {ATTR_ENTITY_ID: "button.test_my_button"}, + {ATTR_ENTITY_ID: "button.test_mybutton"}, blocking=True, ) mock_client.button_command.assert_has_calls([call(1)]) - state = hass.states.get("button.test_my_button") + state = hass.states.get("button.test_mybutton") assert state is not None assert state.state != STATE_UNKNOWN await mock_device.mock_disconnect(False) - state = hass.states.get("button.test_my_button") + state = hass.states.get("button.test_mybutton") assert state is not None assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/esphome/test_camera.py b/tests/components/esphome/test_camera.py index 680cda00944..c6a61cd18e8 100644 --- a/tests/components/esphome/test_camera.py +++ b/tests/components/esphome/test_camera.py @@ -53,7 +53,7 @@ async def test_camera_single_image( user_service=user_service, states=states, ) - state = hass.states.get("camera.test_my_camera") + state = hass.states.get("camera.test_mycamera") assert state is not None assert state.state == STATE_IDLE @@ -63,9 +63,9 @@ async def test_camera_single_image( mock_client.request_single_image = _mock_camera_image client = await hass_client() - resp = await client.get("/api/camera_proxy/camera.test_my_camera") + resp = await client.get("/api/camera_proxy/camera.test_mycamera") await hass.async_block_till_done() - state = hass.states.get("camera.test_my_camera") + state = hass.states.get("camera.test_mycamera") assert state is not None assert state.state == STATE_IDLE @@ -101,15 +101,15 @@ async def test_camera_single_image_unavailable_before_requested( user_service=user_service, states=states, ) - state = hass.states.get("camera.test_my_camera") + state = hass.states.get("camera.test_mycamera") assert state is not None assert state.state == STATE_IDLE await mock_device.mock_disconnect(False) client = await hass_client() - resp = await client.get("/api/camera_proxy/camera.test_my_camera") + resp = await client.get("/api/camera_proxy/camera.test_mycamera") await hass.async_block_till_done() - state = hass.states.get("camera.test_my_camera") + state = hass.states.get("camera.test_mycamera") assert state is not None assert state.state == STATE_UNAVAILABLE @@ -142,7 +142,7 @@ async def test_camera_single_image_unavailable_during_request( user_service=user_service, states=states, ) - state = hass.states.get("camera.test_my_camera") + state = hass.states.get("camera.test_mycamera") assert state is not None assert state.state == STATE_IDLE @@ -152,9 +152,9 @@ async def test_camera_single_image_unavailable_during_request( mock_client.request_single_image = _mock_camera_image client = await hass_client() - resp = await client.get("/api/camera_proxy/camera.test_my_camera") + resp = await client.get("/api/camera_proxy/camera.test_mycamera") await hass.async_block_till_done() - state = hass.states.get("camera.test_my_camera") + state = hass.states.get("camera.test_mycamera") assert state is not None assert state.state == STATE_UNAVAILABLE @@ -187,7 +187,7 @@ async def test_camera_stream( user_service=user_service, states=states, ) - state = hass.states.get("camera.test_my_camera") + state = hass.states.get("camera.test_mycamera") assert state is not None assert state.state == STATE_IDLE remaining_responses = 3 @@ -203,9 +203,9 @@ async def test_camera_stream( mock_client.request_single_image = _mock_camera_image client = await hass_client() - resp = await client.get("/api/camera_proxy_stream/camera.test_my_camera") + resp = await client.get("/api/camera_proxy_stream/camera.test_mycamera") await hass.async_block_till_done() - state = hass.states.get("camera.test_my_camera") + state = hass.states.get("camera.test_mycamera") assert state is not None assert state.state == STATE_IDLE @@ -247,16 +247,16 @@ async def test_camera_stream_unavailable( user_service=user_service, states=states, ) - state = hass.states.get("camera.test_my_camera") + state = hass.states.get("camera.test_mycamera") assert state is not None assert state.state == STATE_IDLE await mock_device.mock_disconnect(False) client = await hass_client() - await client.get("/api/camera_proxy_stream/camera.test_my_camera") + await client.get("/api/camera_proxy_stream/camera.test_mycamera") await hass.async_block_till_done() - state = hass.states.get("camera.test_my_camera") + state = hass.states.get("camera.test_mycamera") assert state is not None assert state.state == STATE_UNAVAILABLE @@ -287,7 +287,7 @@ async def test_camera_stream_with_disconnection( user_service=user_service, states=states, ) - state = hass.states.get("camera.test_my_camera") + state = hass.states.get("camera.test_mycamera") assert state is not None assert state.state == STATE_IDLE remaining_responses = 3 @@ -305,8 +305,8 @@ async def test_camera_stream_with_disconnection( mock_client.request_single_image = _mock_camera_image client = await hass_client() - await client.get("/api/camera_proxy_stream/camera.test_my_camera") + await client.get("/api/camera_proxy_stream/camera.test_mycamera") await hass.async_block_till_done() - state = hass.states.get("camera.test_my_camera") + state = hass.states.get("camera.test_mycamera") assert state is not None assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/esphome/test_climate.py b/tests/components/esphome/test_climate.py index a573128bef1..4ec7fee6447 100644 --- a/tests/components/esphome/test_climate.py +++ b/tests/components/esphome/test_climate.py @@ -78,14 +78,14 @@ async def test_climate_entity( user_service=user_service, states=states, ) - state = hass.states.get("climate.test_my_climate") + state = hass.states.get("climate.test_myclimate") assert state is not None assert state.state == HVACMode.COOL await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: "climate.test_my_climate", ATTR_TEMPERATURE: 25}, + {ATTR_ENTITY_ID: "climate.test_myclimate", ATTR_TEMPERATURE: 25}, blocking=True, ) mock_client.climate_command.assert_has_calls([call(key=1, target_temperature=25.0)]) @@ -130,14 +130,14 @@ async def test_climate_entity_with_step_and_two_point( user_service=user_service, states=states, ) - state = hass.states.get("climate.test_my_climate") + state = hass.states.get("climate.test_myclimate") assert state is not None assert state.state == HVACMode.COOL await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: "climate.test_my_climate", ATTR_TEMPERATURE: 25}, + {ATTR_ENTITY_ID: "climate.test_myclimate", ATTR_TEMPERATURE: 25}, blocking=True, ) mock_client.climate_command.assert_has_calls([call(key=1, target_temperature=25.0)]) @@ -147,7 +147,7 @@ async def test_climate_entity_with_step_and_two_point( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, { - ATTR_ENTITY_ID: "climate.test_my_climate", + ATTR_ENTITY_ID: "climate.test_myclimate", ATTR_HVAC_MODE: HVACMode.AUTO, ATTR_TARGET_TEMP_LOW: 20, ATTR_TARGET_TEMP_HIGH: 30, @@ -209,14 +209,14 @@ async def test_climate_entity_with_step_and_target_temp( user_service=user_service, states=states, ) - state = hass.states.get("climate.test_my_climate") + state = hass.states.get("climate.test_myclimate") assert state is not None assert state.state == HVACMode.COOL await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, - {ATTR_ENTITY_ID: "climate.test_my_climate", ATTR_TEMPERATURE: 25}, + {ATTR_ENTITY_ID: "climate.test_myclimate", ATTR_TEMPERATURE: 25}, blocking=True, ) mock_client.climate_command.assert_has_calls([call(key=1, target_temperature=25.0)]) @@ -226,7 +226,7 @@ async def test_climate_entity_with_step_and_target_temp( CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, { - ATTR_ENTITY_ID: "climate.test_my_climate", + ATTR_ENTITY_ID: "climate.test_myclimate", ATTR_HVAC_MODE: HVACMode.AUTO, ATTR_TARGET_TEMP_LOW: 20, ATTR_TARGET_TEMP_HIGH: 30, @@ -249,7 +249,7 @@ async def test_climate_entity_with_step_and_target_temp( CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, { - ATTR_ENTITY_ID: "climate.test_my_climate", + ATTR_ENTITY_ID: "climate.test_myclimate", ATTR_HVAC_MODE: HVACMode.HEAT, }, blocking=True, @@ -267,7 +267,7 @@ async def test_climate_entity_with_step_and_target_temp( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: "climate.test_my_climate", ATTR_PRESET_MODE: "away"}, + {ATTR_ENTITY_ID: "climate.test_myclimate", ATTR_PRESET_MODE: "away"}, blocking=True, ) mock_client.climate_command.assert_has_calls( @@ -283,7 +283,7 @@ async def test_climate_entity_with_step_and_target_temp( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: "climate.test_my_climate", ATTR_PRESET_MODE: "preset1"}, + {ATTR_ENTITY_ID: "climate.test_myclimate", ATTR_PRESET_MODE: "preset1"}, blocking=True, ) mock_client.climate_command.assert_has_calls([call(key=1, custom_preset="preset1")]) @@ -292,7 +292,7 @@ async def test_climate_entity_with_step_and_target_temp( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_FAN_MODE, - {ATTR_ENTITY_ID: "climate.test_my_climate", ATTR_FAN_MODE: FAN_HIGH}, + {ATTR_ENTITY_ID: "climate.test_myclimate", ATTR_FAN_MODE: FAN_HIGH}, blocking=True, ) mock_client.climate_command.assert_has_calls( @@ -303,7 +303,7 @@ async def test_climate_entity_with_step_and_target_temp( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_FAN_MODE, - {ATTR_ENTITY_ID: "climate.test_my_climate", ATTR_FAN_MODE: "fan2"}, + {ATTR_ENTITY_ID: "climate.test_myclimate", ATTR_FAN_MODE: "fan2"}, blocking=True, ) mock_client.climate_command.assert_has_calls([call(key=1, custom_fan_mode="fan2")]) @@ -312,7 +312,7 @@ async def test_climate_entity_with_step_and_target_temp( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_SWING_MODE, - {ATTR_ENTITY_ID: "climate.test_my_climate", ATTR_SWING_MODE: SWING_BOTH}, + {ATTR_ENTITY_ID: "climate.test_myclimate", ATTR_SWING_MODE: SWING_BOTH}, blocking=True, ) mock_client.climate_command.assert_has_calls( @@ -362,7 +362,7 @@ async def test_climate_entity_with_humidity( user_service=user_service, states=states, ) - state = hass.states.get("climate.test_my_climate") + state = hass.states.get("climate.test_myclimate") assert state is not None assert state.state == HVACMode.AUTO attributes = state.attributes @@ -374,7 +374,7 @@ async def test_climate_entity_with_humidity( await hass.services.async_call( CLIMATE_DOMAIN, SERVICE_SET_HUMIDITY, - {ATTR_ENTITY_ID: "climate.test_my_climate", ATTR_HUMIDITY: 23}, + {ATTR_ENTITY_ID: "climate.test_myclimate", ATTR_HUMIDITY: 23}, blocking=True, ) mock_client.climate_command.assert_has_calls([call(key=1, target_humidity=23)]) @@ -422,7 +422,7 @@ async def test_climate_entity_with_inf_value( user_service=user_service, states=states, ) - state = hass.states.get("climate.test_my_climate") + state = hass.states.get("climate.test_myclimate") assert state is not None assert state.state == HVACMode.AUTO attributes = state.attributes @@ -484,7 +484,7 @@ async def test_climate_entity_attributes( user_service=user_service, states=states, ) - state = hass.states.get("climate.test_my_climate") + state = hass.states.get("climate.test_myclimate") assert state is not None assert state.state == HVACMode.COOL assert state.attributes == snapshot(name="climate-entity-attributes") diff --git a/tests/components/esphome/test_cover.py b/tests/components/esphome/test_cover.py index 59eadb3cfd9..b190d287198 100644 --- a/tests/components/esphome/test_cover.py +++ b/tests/components/esphome/test_cover.py @@ -72,7 +72,7 @@ async def test_cover_entity( user_service=user_service, states=states, ) - state = hass.states.get("cover.test_my_cover") + state = hass.states.get("cover.test_mycover") assert state is not None assert state.state == STATE_OPENING assert state.attributes[ATTR_CURRENT_POSITION] == 50 @@ -81,7 +81,7 @@ async def test_cover_entity( await hass.services.async_call( COVER_DOMAIN, SERVICE_CLOSE_COVER, - {ATTR_ENTITY_ID: "cover.test_my_cover"}, + {ATTR_ENTITY_ID: "cover.test_mycover"}, blocking=True, ) mock_client.cover_command.assert_has_calls([call(key=1, position=0.0)]) @@ -90,7 +90,7 @@ async def test_cover_entity( await hass.services.async_call( COVER_DOMAIN, SERVICE_OPEN_COVER, - {ATTR_ENTITY_ID: "cover.test_my_cover"}, + {ATTR_ENTITY_ID: "cover.test_mycover"}, blocking=True, ) mock_client.cover_command.assert_has_calls([call(key=1, position=1.0)]) @@ -99,7 +99,7 @@ async def test_cover_entity( await hass.services.async_call( COVER_DOMAIN, SERVICE_SET_COVER_POSITION, - {ATTR_ENTITY_ID: "cover.test_my_cover", ATTR_POSITION: 50}, + {ATTR_ENTITY_ID: "cover.test_mycover", ATTR_POSITION: 50}, blocking=True, ) mock_client.cover_command.assert_has_calls([call(key=1, position=0.5)]) @@ -108,7 +108,7 @@ async def test_cover_entity( await hass.services.async_call( COVER_DOMAIN, SERVICE_STOP_COVER, - {ATTR_ENTITY_ID: "cover.test_my_cover"}, + {ATTR_ENTITY_ID: "cover.test_mycover"}, blocking=True, ) mock_client.cover_command.assert_has_calls([call(key=1, stop=True)]) @@ -117,7 +117,7 @@ async def test_cover_entity( await hass.services.async_call( COVER_DOMAIN, SERVICE_OPEN_COVER_TILT, - {ATTR_ENTITY_ID: "cover.test_my_cover"}, + {ATTR_ENTITY_ID: "cover.test_mycover"}, blocking=True, ) mock_client.cover_command.assert_has_calls([call(key=1, tilt=1.0)]) @@ -126,7 +126,7 @@ async def test_cover_entity( await hass.services.async_call( COVER_DOMAIN, SERVICE_CLOSE_COVER_TILT, - {ATTR_ENTITY_ID: "cover.test_my_cover"}, + {ATTR_ENTITY_ID: "cover.test_mycover"}, blocking=True, ) mock_client.cover_command.assert_has_calls([call(key=1, tilt=0.0)]) @@ -135,7 +135,7 @@ async def test_cover_entity( await hass.services.async_call( COVER_DOMAIN, SERVICE_SET_COVER_TILT_POSITION, - {ATTR_ENTITY_ID: "cover.test_my_cover", ATTR_TILT_POSITION: 50}, + {ATTR_ENTITY_ID: "cover.test_mycover", ATTR_TILT_POSITION: 50}, blocking=True, ) mock_client.cover_command.assert_has_calls([call(key=1, tilt=0.5)]) @@ -145,7 +145,7 @@ async def test_cover_entity( CoverState(key=1, position=0.0, current_operation=CoverOperation.IDLE) ) await hass.async_block_till_done() - state = hass.states.get("cover.test_my_cover") + state = hass.states.get("cover.test_mycover") assert state is not None assert state.state == STATE_CLOSED @@ -153,7 +153,7 @@ async def test_cover_entity( CoverState(key=1, position=0.5, current_operation=CoverOperation.IS_CLOSING) ) await hass.async_block_till_done() - state = hass.states.get("cover.test_my_cover") + state = hass.states.get("cover.test_mycover") assert state is not None assert state.state == STATE_CLOSING @@ -161,7 +161,7 @@ async def test_cover_entity( CoverState(key=1, position=1.0, current_operation=CoverOperation.IDLE) ) await hass.async_block_till_done() - state = hass.states.get("cover.test_my_cover") + state = hass.states.get("cover.test_mycover") assert state is not None assert state.state == STATE_OPEN @@ -201,7 +201,7 @@ async def test_cover_entity_without_position( user_service=user_service, states=states, ) - state = hass.states.get("cover.test_my_cover") + state = hass.states.get("cover.test_mycover") assert state is not None assert state.state == STATE_OPENING assert ATTR_CURRENT_TILT_POSITION not in state.attributes diff --git a/tests/components/esphome/test_date.py b/tests/components/esphome/test_date.py index 3b620a30461..2deb92775fb 100644 --- a/tests/components/esphome/test_date.py +++ b/tests/components/esphome/test_date.py @@ -35,14 +35,14 @@ async def test_generic_date_entity( user_service=user_service, states=states, ) - state = hass.states.get("date.test_my_date") + state = hass.states.get("date.test_mydate") assert state is not None assert state.state == "2024-12-31" await hass.services.async_call( DATE_DOMAIN, SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: "date.test_my_date", ATTR_DATE: "1999-01-01"}, + {ATTR_ENTITY_ID: "date.test_mydate", ATTR_DATE: "1999-01-01"}, blocking=True, ) mock_client.date_command.assert_has_calls([call(1, 1999, 1, 1)]) @@ -71,6 +71,6 @@ async def test_generic_date_missing_state( user_service=user_service, states=states, ) - state = hass.states.get("date.test_my_date") + state = hass.states.get("date.test_mydate") assert state is not None assert state.state == STATE_UNKNOWN diff --git a/tests/components/esphome/test_datetime.py b/tests/components/esphome/test_datetime.py index 95f0b7584ad..3bdc196de95 100644 --- a/tests/components/esphome/test_datetime.py +++ b/tests/components/esphome/test_datetime.py @@ -35,7 +35,7 @@ async def test_generic_datetime_entity( user_service=user_service, states=states, ) - state = hass.states.get("datetime.test_my_datetime") + state = hass.states.get("datetime.test_mydatetime") assert state is not None assert state.state == "2024-04-16T12:34:56+00:00" @@ -43,7 +43,7 @@ async def test_generic_datetime_entity( DATETIME_DOMAIN, SERVICE_SET_VALUE, { - ATTR_ENTITY_ID: "datetime.test_my_datetime", + ATTR_ENTITY_ID: "datetime.test_mydatetime", ATTR_DATETIME: "2000-01-01T01:23:45+00:00", }, blocking=True, @@ -74,6 +74,6 @@ async def test_generic_datetime_missing_state( user_service=user_service, states=states, ) - state = hass.states.get("datetime.test_my_datetime") + state = hass.states.get("datetime.test_mydatetime") assert state is not None assert state.state == STATE_UNKNOWN diff --git a/tests/components/esphome/test_entity.py b/tests/components/esphome/test_entity.py index 64b8d6101ac..296d61b664d 100644 --- a/tests/components/esphome/test_entity.py +++ b/tests/components/esphome/test_entity.py @@ -69,10 +69,10 @@ async def test_entities_removed( entry = mock_device.entry entry_id = entry.entry_id storage_key = f"esphome.{entry_id}" - state = hass.states.get("binary_sensor.test_my_binary_sensor") + state = hass.states.get("binary_sensor.test_mybinary_sensor") assert state is not None assert state.state == STATE_ON - state = hass.states.get("binary_sensor.test_my_binary_sensor_to_be_removed") + state = hass.states.get("binary_sensor.test_mybinary_sensor_to_be_removed") assert state is not None assert state.state == STATE_ON @@ -81,13 +81,13 @@ async def test_entities_removed( assert len(hass_storage[storage_key]["data"]["binary_sensor"]) == 2 - state = hass.states.get("binary_sensor.test_my_binary_sensor") + state = hass.states.get("binary_sensor.test_mybinary_sensor") assert state is not None assert state.attributes[ATTR_RESTORED] is True - state = hass.states.get("binary_sensor.test_my_binary_sensor_to_be_removed") + state = hass.states.get("binary_sensor.test_mybinary_sensor_to_be_removed") assert state is not None reg_entry = entity_registry.async_get( - "binary_sensor.test_my_binary_sensor_to_be_removed" + "binary_sensor.test_mybinary_sensor_to_be_removed" ) assert reg_entry is not None assert state.attributes[ATTR_RESTORED] is True @@ -111,13 +111,13 @@ async def test_entities_removed( entry=entry, ) assert mock_device.entry.entry_id == entry_id - state = hass.states.get("binary_sensor.test_my_binary_sensor") + state = hass.states.get("binary_sensor.test_mybinary_sensor") assert state is not None assert state.state == STATE_ON - state = hass.states.get("binary_sensor.test_my_binary_sensor_to_be_removed") + state = hass.states.get("binary_sensor.test_mybinary_sensor_to_be_removed") assert state is None reg_entry = entity_registry.async_get( - "binary_sensor.test_my_binary_sensor_to_be_removed" + "binary_sensor.test_mybinary_sensor_to_be_removed" ) assert reg_entry is None await hass.config_entries.async_unload(entry.entry_id) @@ -164,15 +164,15 @@ async def test_entities_removed_after_reload( entry = mock_device.entry entry_id = entry.entry_id storage_key = f"esphome.{entry_id}" - state = hass.states.get("binary_sensor.test_my_binary_sensor") + state = hass.states.get("binary_sensor.test_mybinary_sensor") assert state is not None assert state.state == STATE_ON - state = hass.states.get("binary_sensor.test_my_binary_sensor_to_be_removed") + state = hass.states.get("binary_sensor.test_mybinary_sensor_to_be_removed") assert state is not None assert state.state == STATE_ON reg_entry = entity_registry.async_get( - "binary_sensor.test_my_binary_sensor_to_be_removed" + "binary_sensor.test_mybinary_sensor_to_be_removed" ) assert reg_entry is not None @@ -181,15 +181,15 @@ async def test_entities_removed_after_reload( assert len(hass_storage[storage_key]["data"]["binary_sensor"]) == 2 - state = hass.states.get("binary_sensor.test_my_binary_sensor") + state = hass.states.get("binary_sensor.test_mybinary_sensor") assert state is not None assert state.attributes[ATTR_RESTORED] is True - state = hass.states.get("binary_sensor.test_my_binary_sensor_to_be_removed") + state = hass.states.get("binary_sensor.test_mybinary_sensor_to_be_removed") assert state is not None assert state.attributes[ATTR_RESTORED] is True reg_entry = entity_registry.async_get( - "binary_sensor.test_my_binary_sensor_to_be_removed" + "binary_sensor.test_mybinary_sensor_to_be_removed" ) assert reg_entry is not None @@ -198,14 +198,14 @@ async def test_entities_removed_after_reload( assert len(hass_storage[storage_key]["data"]["binary_sensor"]) == 2 - state = hass.states.get("binary_sensor.test_my_binary_sensor") + state = hass.states.get("binary_sensor.test_mybinary_sensor") assert state is not None assert ATTR_RESTORED not in state.attributes - state = hass.states.get("binary_sensor.test_my_binary_sensor_to_be_removed") + state = hass.states.get("binary_sensor.test_mybinary_sensor_to_be_removed") assert state is not None assert ATTR_RESTORED not in state.attributes reg_entry = entity_registry.async_get( - "binary_sensor.test_my_binary_sensor_to_be_removed" + "binary_sensor.test_mybinary_sensor_to_be_removed" ) assert reg_entry is not None @@ -236,23 +236,23 @@ async def test_entities_removed_after_reload( on_future.set_result(None) async_track_state_change_event( - hass, ["binary_sensor.test_my_binary_sensor"], _async_wait_for_on + hass, ["binary_sensor.test_mybinary_sensor"], _async_wait_for_on ) await hass.async_block_till_done() async with asyncio.timeout(2): await on_future assert mock_device.entry.entry_id == entry_id - state = hass.states.get("binary_sensor.test_my_binary_sensor") + state = hass.states.get("binary_sensor.test_mybinary_sensor") assert state is not None assert state.state == STATE_ON - state = hass.states.get("binary_sensor.test_my_binary_sensor_to_be_removed") + state = hass.states.get("binary_sensor.test_mybinary_sensor_to_be_removed") assert state is None await hass.async_block_till_done() reg_entry = entity_registry.async_get( - "binary_sensor.test_my_binary_sensor_to_be_removed" + "binary_sensor.test_mybinary_sensor_to_be_removed" ) assert reg_entry is None assert await hass.config_entries.async_unload(entry.entry_id) @@ -260,6 +260,35 @@ async def test_entities_removed_after_reload( assert len(hass_storage[storage_key]["data"]["binary_sensor"]) == 1 +async def test_entity_info_object_ids( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test how object ids affect entity id.""" + entity_info = [ + BinarySensorInfo( + object_id="object_id_is_used", + key=1, + name="my binary_sensor", + unique_id="my_binary_sensor", + ) + ] + states = [] + user_service = [] + await mock_esphome_device( + mock_client=mock_client, + entity_info=entity_info, + user_service=user_service, + states=states, + ) + state = hass.states.get("binary_sensor.test_object_id_is_used") + assert state is not None + + async def test_deep_sleep_device( hass: HomeAssistant, mock_client: APIClient, @@ -297,7 +326,7 @@ async def test_deep_sleep_device( states=states, device_info={"has_deep_sleep": True}, ) - state = hass.states.get("binary_sensor.test_my_binary_sensor") + state = hass.states.get("binary_sensor.test_mybinary_sensor") assert state is not None assert state.state == STATE_ON state = hass.states.get("sensor.test_my_sensor") @@ -306,7 +335,7 @@ async def test_deep_sleep_device( await mock_device.mock_disconnect(False) await hass.async_block_till_done() - state = hass.states.get("binary_sensor.test_my_binary_sensor") + state = hass.states.get("binary_sensor.test_mybinary_sensor") assert state is not None assert state.state == STATE_UNAVAILABLE state = hass.states.get("sensor.test_my_sensor") @@ -316,7 +345,7 @@ async def test_deep_sleep_device( await mock_device.mock_connect() await hass.async_block_till_done() - state = hass.states.get("binary_sensor.test_my_binary_sensor") + state = hass.states.get("binary_sensor.test_mybinary_sensor") assert state is not None assert state.state == STATE_ON state = hass.states.get("sensor.test_my_sensor") @@ -330,7 +359,7 @@ async def test_deep_sleep_device( mock_device.set_state(BinarySensorState(key=1, state=False, missing_state=False)) mock_device.set_state(SensorState(key=3, state=56, missing_state=False)) await hass.async_block_till_done() - state = hass.states.get("binary_sensor.test_my_binary_sensor") + state = hass.states.get("binary_sensor.test_mybinary_sensor") assert state is not None assert state.state == STATE_OFF state = hass.states.get("sensor.test_my_sensor") @@ -339,7 +368,7 @@ async def test_deep_sleep_device( await mock_device.mock_disconnect(True) await hass.async_block_till_done() - state = hass.states.get("binary_sensor.test_my_binary_sensor") + state = hass.states.get("binary_sensor.test_mybinary_sensor") assert state is not None assert state.state == STATE_OFF state = hass.states.get("sensor.test_my_sensor") @@ -350,7 +379,7 @@ async def test_deep_sleep_device( await hass.async_block_till_done() await mock_device.mock_disconnect(False) await hass.async_block_till_done() - state = hass.states.get("binary_sensor.test_my_binary_sensor") + state = hass.states.get("binary_sensor.test_mybinary_sensor") assert state is not None assert state.state == STATE_UNAVAILABLE state = hass.states.get("sensor.test_my_sensor") @@ -359,14 +388,14 @@ async def test_deep_sleep_device( await mock_device.mock_connect() await hass.async_block_till_done() - state = hass.states.get("binary_sensor.test_my_binary_sensor") + state = hass.states.get("binary_sensor.test_mybinary_sensor") assert state is not None assert state.state == STATE_ON hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) await hass.async_block_till_done() # Verify we do not dispatch any more state updates or # availability updates after the stop event is fired - state = hass.states.get("binary_sensor.test_my_binary_sensor") + state = hass.states.get("binary_sensor.test_mybinary_sensor") assert state is not None assert state.state == STATE_ON @@ -401,6 +430,6 @@ async def test_esphome_device_without_friendly_name( states=states, device_info={"friendly_name": None}, ) - state = hass.states.get("binary_sensor.test_my_binary_sensor") + state = hass.states.get("binary_sensor.my_binary_sensor") assert state is not None assert state.state == STATE_ON diff --git a/tests/components/esphome/test_event.py b/tests/components/esphome/test_event.py index 2daba94a3ca..c17dc4d98a9 100644 --- a/tests/components/esphome/test_event.py +++ b/tests/components/esphome/test_event.py @@ -32,7 +32,7 @@ async def test_generic_event_entity( user_service=user_service, states=states, ) - state = hass.states.get("event.test_my_event") + state = hass.states.get("event.test_myevent") assert state is not None assert state.state == "2024-04-24T00:00:00.000+00:00" assert state.attributes["event_type"] == "type1" diff --git a/tests/components/esphome/test_fan.py b/tests/components/esphome/test_fan.py index 9aca36d79a4..064b37b1ec1 100644 --- a/tests/components/esphome/test_fan.py +++ b/tests/components/esphome/test_fan.py @@ -62,14 +62,14 @@ async def test_fan_entity_with_all_features_old_api( user_service=user_service, states=states, ) - state = hass.states.get("fan.test_my_fan") + state = hass.states.get("fan.test_myfan") assert state is not None assert state.state == STATE_ON await hass.services.async_call( FAN_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "fan.test_my_fan", ATTR_PERCENTAGE: 20}, + {ATTR_ENTITY_ID: "fan.test_myfan", ATTR_PERCENTAGE: 20}, blocking=True, ) mock_client.fan_command.assert_has_calls( @@ -80,7 +80,7 @@ async def test_fan_entity_with_all_features_old_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "fan.test_my_fan", ATTR_PERCENTAGE: 50}, + {ATTR_ENTITY_ID: "fan.test_myfan", ATTR_PERCENTAGE: 50}, blocking=True, ) mock_client.fan_command.assert_has_calls( @@ -91,7 +91,7 @@ async def test_fan_entity_with_all_features_old_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_DECREASE_SPEED, - {ATTR_ENTITY_ID: "fan.test_my_fan"}, + {ATTR_ENTITY_ID: "fan.test_myfan"}, blocking=True, ) mock_client.fan_command.assert_has_calls( @@ -102,7 +102,7 @@ async def test_fan_entity_with_all_features_old_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_INCREASE_SPEED, - {ATTR_ENTITY_ID: "fan.test_my_fan"}, + {ATTR_ENTITY_ID: "fan.test_myfan"}, blocking=True, ) mock_client.fan_command.assert_has_calls( @@ -113,7 +113,7 @@ async def test_fan_entity_with_all_features_old_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "fan.test_my_fan"}, + {ATTR_ENTITY_ID: "fan.test_myfan"}, blocking=True, ) mock_client.fan_command.assert_has_calls([call(key=1, state=False)]) @@ -122,7 +122,7 @@ async def test_fan_entity_with_all_features_old_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_SET_PERCENTAGE, - {ATTR_ENTITY_ID: "fan.test_my_fan", ATTR_PERCENTAGE: 100}, + {ATTR_ENTITY_ID: "fan.test_myfan", ATTR_PERCENTAGE: 100}, blocking=True, ) mock_client.fan_command.assert_has_calls( @@ -166,14 +166,14 @@ async def test_fan_entity_with_all_features_new_api( user_service=user_service, states=states, ) - state = hass.states.get("fan.test_my_fan") + state = hass.states.get("fan.test_myfan") assert state is not None assert state.state == STATE_ON await hass.services.async_call( FAN_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "fan.test_my_fan", ATTR_PERCENTAGE: 20}, + {ATTR_ENTITY_ID: "fan.test_myfan", ATTR_PERCENTAGE: 20}, blocking=True, ) mock_client.fan_command.assert_has_calls([call(key=1, speed_level=1, state=True)]) @@ -182,7 +182,7 @@ async def test_fan_entity_with_all_features_new_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "fan.test_my_fan", ATTR_PERCENTAGE: 50}, + {ATTR_ENTITY_ID: "fan.test_myfan", ATTR_PERCENTAGE: 50}, blocking=True, ) mock_client.fan_command.assert_has_calls([call(key=1, speed_level=2, state=True)]) @@ -191,7 +191,7 @@ async def test_fan_entity_with_all_features_new_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_DECREASE_SPEED, - {ATTR_ENTITY_ID: "fan.test_my_fan"}, + {ATTR_ENTITY_ID: "fan.test_myfan"}, blocking=True, ) mock_client.fan_command.assert_has_calls([call(key=1, speed_level=2, state=True)]) @@ -200,7 +200,7 @@ async def test_fan_entity_with_all_features_new_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_INCREASE_SPEED, - {ATTR_ENTITY_ID: "fan.test_my_fan"}, + {ATTR_ENTITY_ID: "fan.test_myfan"}, blocking=True, ) mock_client.fan_command.assert_has_calls([call(key=1, speed_level=4, state=True)]) @@ -209,7 +209,7 @@ async def test_fan_entity_with_all_features_new_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "fan.test_my_fan"}, + {ATTR_ENTITY_ID: "fan.test_myfan"}, blocking=True, ) mock_client.fan_command.assert_has_calls([call(key=1, state=False)]) @@ -218,7 +218,7 @@ async def test_fan_entity_with_all_features_new_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_SET_PERCENTAGE, - {ATTR_ENTITY_ID: "fan.test_my_fan", ATTR_PERCENTAGE: 100}, + {ATTR_ENTITY_ID: "fan.test_myfan", ATTR_PERCENTAGE: 100}, blocking=True, ) mock_client.fan_command.assert_has_calls([call(key=1, speed_level=4, state=True)]) @@ -227,7 +227,7 @@ async def test_fan_entity_with_all_features_new_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_SET_PERCENTAGE, - {ATTR_ENTITY_ID: "fan.test_my_fan", ATTR_PERCENTAGE: 0}, + {ATTR_ENTITY_ID: "fan.test_myfan", ATTR_PERCENTAGE: 0}, blocking=True, ) mock_client.fan_command.assert_has_calls([call(key=1, state=False)]) @@ -236,7 +236,7 @@ async def test_fan_entity_with_all_features_new_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_OSCILLATE, - {ATTR_ENTITY_ID: "fan.test_my_fan", ATTR_OSCILLATING: True}, + {ATTR_ENTITY_ID: "fan.test_myfan", ATTR_OSCILLATING: True}, blocking=True, ) mock_client.fan_command.assert_has_calls([call(key=1, oscillating=True)]) @@ -245,7 +245,7 @@ async def test_fan_entity_with_all_features_new_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_OSCILLATE, - {ATTR_ENTITY_ID: "fan.test_my_fan", ATTR_OSCILLATING: False}, + {ATTR_ENTITY_ID: "fan.test_myfan", ATTR_OSCILLATING: False}, blocking=True, ) mock_client.fan_command.assert_has_calls([call(key=1, oscillating=False)]) @@ -254,7 +254,7 @@ async def test_fan_entity_with_all_features_new_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_SET_DIRECTION, - {ATTR_ENTITY_ID: "fan.test_my_fan", ATTR_DIRECTION: "forward"}, + {ATTR_ENTITY_ID: "fan.test_myfan", ATTR_DIRECTION: "forward"}, blocking=True, ) mock_client.fan_command.assert_has_calls( @@ -265,7 +265,7 @@ async def test_fan_entity_with_all_features_new_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_SET_DIRECTION, - {ATTR_ENTITY_ID: "fan.test_my_fan", ATTR_DIRECTION: "reverse"}, + {ATTR_ENTITY_ID: "fan.test_myfan", ATTR_DIRECTION: "reverse"}, blocking=True, ) mock_client.fan_command.assert_has_calls( @@ -276,7 +276,7 @@ async def test_fan_entity_with_all_features_new_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_SET_PRESET_MODE, - {ATTR_ENTITY_ID: "fan.test_my_fan", ATTR_PRESET_MODE: "Preset1"}, + {ATTR_ENTITY_ID: "fan.test_myfan", ATTR_PRESET_MODE: "Preset1"}, blocking=True, ) mock_client.fan_command.assert_has_calls([call(key=1, preset_mode="Preset1")]) @@ -308,14 +308,14 @@ async def test_fan_entity_with_no_features_new_api( user_service=user_service, states=states, ) - state = hass.states.get("fan.test_my_fan") + state = hass.states.get("fan.test_myfan") assert state is not None assert state.state == STATE_ON await hass.services.async_call( FAN_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "fan.test_my_fan"}, + {ATTR_ENTITY_ID: "fan.test_myfan"}, blocking=True, ) mock_client.fan_command.assert_has_calls([call(key=1, state=True)]) @@ -324,7 +324,7 @@ async def test_fan_entity_with_no_features_new_api( await hass.services.async_call( FAN_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "fan.test_my_fan"}, + {ATTR_ENTITY_ID: "fan.test_myfan"}, blocking=True, ) mock_client.fan_command.assert_has_calls([call(key=1, state=False)]) diff --git a/tests/components/esphome/test_light.py b/tests/components/esphome/test_light.py index 5dc563d9991..2324c73b16f 100644 --- a/tests/components/esphome/test_light.py +++ b/tests/components/esphome/test_light.py @@ -65,14 +65,14 @@ async def test_light_on_off( user_service=user_service, states=states, ) - state = hass.states.get("light.test_my_light") + state = hass.states.get("light.test_mylight") assert state is not None assert state.state == STATE_ON await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light"}, + {ATTR_ENTITY_ID: "light.test_mylight"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -105,14 +105,14 @@ async def test_light_brightness( user_service=user_service, states=states, ) - state = hass.states.get("light.test_my_light") + state = hass.states.get("light.test_mylight") assert state is not None assert state.state == STATE_ON await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light"}, + {ATTR_ENTITY_ID: "light.test_mylight"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -123,7 +123,7 @@ async def test_light_brightness( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light", ATTR_BRIGHTNESS: 127}, + {ATTR_ENTITY_ID: "light.test_mylight", ATTR_BRIGHTNESS: 127}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -141,7 +141,7 @@ async def test_light_brightness( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "light.test_my_light", ATTR_TRANSITION: 2}, + {ATTR_ENTITY_ID: "light.test_mylight", ATTR_TRANSITION: 2}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -152,7 +152,7 @@ async def test_light_brightness( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "light.test_my_light", ATTR_FLASH: FLASH_LONG}, + {ATTR_ENTITY_ID: "light.test_mylight", ATTR_FLASH: FLASH_LONG}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -163,7 +163,7 @@ async def test_light_brightness( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light", ATTR_TRANSITION: 2}, + {ATTR_ENTITY_ID: "light.test_mylight", ATTR_TRANSITION: 2}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -181,7 +181,7 @@ async def test_light_brightness( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light", ATTR_FLASH: FLASH_SHORT}, + {ATTR_ENTITY_ID: "light.test_mylight", ATTR_FLASH: FLASH_SHORT}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -223,14 +223,14 @@ async def test_light_brightness_on_off( user_service=user_service, states=states, ) - state = hass.states.get("light.test_my_light") + state = hass.states.get("light.test_mylight") assert state is not None assert state.state == STATE_ON await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light"}, + {ATTR_ENTITY_ID: "light.test_mylight"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -248,7 +248,7 @@ async def test_light_brightness_on_off( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light", ATTR_BRIGHTNESS: 127}, + {ATTR_ENTITY_ID: "light.test_mylight", ATTR_BRIGHTNESS: 127}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -293,14 +293,14 @@ async def test_light_legacy_white_converted_to_brightness( user_service=user_service, states=states, ) - state = hass.states.get("light.test_my_light") + state = hass.states.get("light.test_mylight") assert state is not None assert state.state == STATE_ON await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light"}, + {ATTR_ENTITY_ID: "light.test_mylight"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -351,7 +351,7 @@ async def test_light_legacy_white_with_rgb( user_service=user_service, states=states, ) - state = hass.states.get("light.test_my_light") + state = hass.states.get("light.test_mylight") assert state is not None assert state.state == STATE_ON assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ @@ -362,7 +362,7 @@ async def test_light_legacy_white_with_rgb( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light", ATTR_WHITE: 60}, + {ATTR_ENTITY_ID: "light.test_mylight", ATTR_WHITE: 60}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -405,14 +405,14 @@ async def test_light_brightness_on_off_with_unknown_color_mode( user_service=user_service, states=states, ) - state = hass.states.get("light.test_my_light") + state = hass.states.get("light.test_mylight") assert state is not None assert state.state == STATE_ON await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light"}, + {ATTR_ENTITY_ID: "light.test_mylight"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -431,7 +431,7 @@ async def test_light_brightness_on_off_with_unknown_color_mode( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light", ATTR_BRIGHTNESS: 127}, + {ATTR_ENTITY_ID: "light.test_mylight", ATTR_BRIGHTNESS: 127}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -476,14 +476,14 @@ async def test_light_on_and_brightness( user_service=user_service, states=states, ) - state = hass.states.get("light.test_my_light") + state = hass.states.get("light.test_mylight") assert state is not None assert state.state == STATE_ON await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light"}, + {ATTR_ENTITY_ID: "light.test_mylight"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -526,14 +526,14 @@ async def test_rgb_color_temp_light( user_service=user_service, states=states, ) - state = hass.states.get("light.test_my_light") + state = hass.states.get("light.test_mylight") assert state is not None assert state.state == STATE_ON await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light"}, + {ATTR_ENTITY_ID: "light.test_mylight"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -551,7 +551,7 @@ async def test_rgb_color_temp_light( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light", ATTR_BRIGHTNESS: 127}, + {ATTR_ENTITY_ID: "light.test_mylight", ATTR_BRIGHTNESS: 127}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -570,7 +570,7 @@ async def test_rgb_color_temp_light( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light", ATTR_COLOR_TEMP_KELVIN: 2500}, + {ATTR_ENTITY_ID: "light.test_mylight", ATTR_COLOR_TEMP_KELVIN: 2500}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -614,14 +614,14 @@ async def test_light_rgb( user_service=user_service, states=states, ) - state = hass.states.get("light.test_my_light") + state = hass.states.get("light.test_mylight") assert state is not None assert state.state == STATE_ON await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light"}, + {ATTR_ENTITY_ID: "light.test_mylight"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -640,7 +640,7 @@ async def test_light_rgb( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light", ATTR_BRIGHTNESS: 127}, + {ATTR_ENTITY_ID: "light.test_mylight", ATTR_BRIGHTNESS: 127}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -661,7 +661,7 @@ async def test_light_rgb( LIGHT_DOMAIN, SERVICE_TURN_ON, { - ATTR_ENTITY_ID: "light.test_my_light", + ATTR_ENTITY_ID: "light.test_mylight", ATTR_BRIGHTNESS: 127, ATTR_HS_COLOR: (100, 100), }, @@ -686,7 +686,7 @@ async def test_light_rgb( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light", ATTR_RGB_COLOR: (255, 255, 255)}, + {ATTR_ENTITY_ID: "light.test_mylight", ATTR_RGB_COLOR: (255, 255, 255)}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -746,7 +746,7 @@ async def test_light_rgbw( user_service=user_service, states=states, ) - state = hass.states.get("light.test_my_light") + state = hass.states.get("light.test_mylight") assert state is not None assert state.state == STATE_ON assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.RGBW] @@ -755,7 +755,7 @@ async def test_light_rgbw( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light"}, + {ATTR_ENTITY_ID: "light.test_mylight"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -775,7 +775,7 @@ async def test_light_rgbw( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light", ATTR_BRIGHTNESS: 127}, + {ATTR_ENTITY_ID: "light.test_mylight", ATTR_BRIGHTNESS: 127}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -797,7 +797,7 @@ async def test_light_rgbw( LIGHT_DOMAIN, SERVICE_TURN_ON, { - ATTR_ENTITY_ID: "light.test_my_light", + ATTR_ENTITY_ID: "light.test_mylight", ATTR_BRIGHTNESS: 127, ATTR_HS_COLOR: (100, 100), }, @@ -824,7 +824,7 @@ async def test_light_rgbw( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light", ATTR_RGB_COLOR: (255, 255, 255)}, + {ATTR_ENTITY_ID: "light.test_mylight", ATTR_RGB_COLOR: (255, 255, 255)}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -847,7 +847,7 @@ async def test_light_rgbw( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light", ATTR_RGBW_COLOR: (255, 255, 255, 255)}, + {ATTR_ENTITY_ID: "light.test_mylight", ATTR_RGBW_COLOR: (255, 255, 255, 255)}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -917,7 +917,7 @@ async def test_light_rgbww_with_cold_warm_white_support( user_service=user_service, states=states, ) - state = hass.states.get("light.test_my_light") + state = hass.states.get("light.test_mylight") assert state is not None assert state.state == STATE_ON assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.RGBWW] @@ -927,7 +927,7 @@ async def test_light_rgbww_with_cold_warm_white_support( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light"}, + {ATTR_ENTITY_ID: "light.test_mylight"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -949,7 +949,7 @@ async def test_light_rgbww_with_cold_warm_white_support( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light", ATTR_BRIGHTNESS: 127}, + {ATTR_ENTITY_ID: "light.test_mylight", ATTR_BRIGHTNESS: 127}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -973,7 +973,7 @@ async def test_light_rgbww_with_cold_warm_white_support( LIGHT_DOMAIN, SERVICE_TURN_ON, { - ATTR_ENTITY_ID: "light.test_my_light", + ATTR_ENTITY_ID: "light.test_mylight", ATTR_BRIGHTNESS: 127, ATTR_HS_COLOR: (100, 100), }, @@ -1003,7 +1003,7 @@ async def test_light_rgbww_with_cold_warm_white_support( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light", ATTR_RGB_COLOR: (255, 255, 255)}, + {ATTR_ENTITY_ID: "light.test_mylight", ATTR_RGB_COLOR: (255, 255, 255)}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1029,7 +1029,7 @@ async def test_light_rgbww_with_cold_warm_white_support( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light", ATTR_RGBW_COLOR: (255, 255, 255, 255)}, + {ATTR_ENTITY_ID: "light.test_mylight", ATTR_RGBW_COLOR: (255, 255, 255, 255)}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1056,7 +1056,7 @@ async def test_light_rgbww_with_cold_warm_white_support( LIGHT_DOMAIN, SERVICE_TURN_ON, { - ATTR_ENTITY_ID: "light.test_my_light", + ATTR_ENTITY_ID: "light.test_mylight", ATTR_RGBWW_COLOR: (255, 255, 255, 255, 255), }, blocking=True, @@ -1084,7 +1084,7 @@ async def test_light_rgbww_with_cold_warm_white_support( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light", ATTR_COLOR_TEMP_KELVIN: 2500}, + {ATTR_ENTITY_ID: "light.test_mylight", ATTR_COLOR_TEMP_KELVIN: 2500}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1154,7 +1154,7 @@ async def test_light_rgbww_without_cold_warm_white_support( user_service=user_service, states=states, ) - state = hass.states.get("light.test_my_light") + state = hass.states.get("light.test_mylight") assert state is not None assert state.state == STATE_ON assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.RGBWW] @@ -1164,7 +1164,7 @@ async def test_light_rgbww_without_cold_warm_white_support( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light"}, + {ATTR_ENTITY_ID: "light.test_mylight"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1185,7 +1185,7 @@ async def test_light_rgbww_without_cold_warm_white_support( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light", ATTR_BRIGHTNESS: 127}, + {ATTR_ENTITY_ID: "light.test_mylight", ATTR_BRIGHTNESS: 127}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1208,7 +1208,7 @@ async def test_light_rgbww_without_cold_warm_white_support( LIGHT_DOMAIN, SERVICE_TURN_ON, { - ATTR_ENTITY_ID: "light.test_my_light", + ATTR_ENTITY_ID: "light.test_mylight", ATTR_BRIGHTNESS: 127, ATTR_HS_COLOR: (100, 100), }, @@ -1237,7 +1237,7 @@ async def test_light_rgbww_without_cold_warm_white_support( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light", ATTR_RGB_COLOR: (255, 255, 255)}, + {ATTR_ENTITY_ID: "light.test_mylight", ATTR_RGB_COLOR: (255, 255, 255)}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1262,7 +1262,7 @@ async def test_light_rgbww_without_cold_warm_white_support( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light", ATTR_RGBW_COLOR: (255, 255, 255, 255)}, + {ATTR_ENTITY_ID: "light.test_mylight", ATTR_RGBW_COLOR: (255, 255, 255, 255)}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1288,7 +1288,7 @@ async def test_light_rgbww_without_cold_warm_white_support( LIGHT_DOMAIN, SERVICE_TURN_ON, { - ATTR_ENTITY_ID: "light.test_my_light", + ATTR_ENTITY_ID: "light.test_mylight", ATTR_RGBWW_COLOR: (255, 255, 255, 255, 255), }, blocking=True, @@ -1315,7 +1315,7 @@ async def test_light_rgbww_without_cold_warm_white_support( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light", ATTR_COLOR_TEMP_KELVIN: 2500}, + {ATTR_ENTITY_ID: "light.test_mylight", ATTR_COLOR_TEMP_KELVIN: 2500}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1374,7 +1374,7 @@ async def test_light_color_temp( user_service=user_service, states=states, ) - state = hass.states.get("light.test_my_light") + state = hass.states.get("light.test_mylight") assert state is not None assert state.state == STATE_ON attributes = state.attributes @@ -1387,7 +1387,7 @@ async def test_light_color_temp( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light"}, + {ATTR_ENTITY_ID: "light.test_mylight"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1406,7 +1406,7 @@ async def test_light_color_temp( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "light.test_my_light"}, + {ATTR_ENTITY_ID: "light.test_mylight"}, blocking=True, ) mock_client.light_command.assert_has_calls([call(key=1, state=False)]) @@ -1449,7 +1449,7 @@ async def test_light_color_temp_no_mireds_set( user_service=user_service, states=states, ) - state = hass.states.get("light.test_my_light") + state = hass.states.get("light.test_mylight") assert state is not None assert state.state == STATE_ON attributes = state.attributes @@ -1462,7 +1462,7 @@ async def test_light_color_temp_no_mireds_set( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light"}, + {ATTR_ENTITY_ID: "light.test_mylight"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1481,7 +1481,7 @@ async def test_light_color_temp_no_mireds_set( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light", ATTR_COLOR_TEMP_KELVIN: 6000}, + {ATTR_ENTITY_ID: "light.test_mylight", ATTR_COLOR_TEMP_KELVIN: 6000}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1501,7 +1501,7 @@ async def test_light_color_temp_no_mireds_set( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "light.test_my_light"}, + {ATTR_ENTITY_ID: "light.test_mylight"}, blocking=True, ) mock_client.light_command.assert_has_calls([call(key=1, state=False)]) @@ -1551,7 +1551,7 @@ async def test_light_color_temp_legacy( user_service=user_service, states=states, ) - state = hass.states.get("light.test_my_light") + state = hass.states.get("light.test_mylight") assert state is not None assert state.state == STATE_ON attributes = state.attributes @@ -1566,7 +1566,7 @@ async def test_light_color_temp_legacy( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light"}, + {ATTR_ENTITY_ID: "light.test_mylight"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1585,7 +1585,7 @@ async def test_light_color_temp_legacy( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "light.test_my_light"}, + {ATTR_ENTITY_ID: "light.test_mylight"}, blocking=True, ) mock_client.light_command.assert_has_calls([call(key=1, state=False)]) @@ -1637,7 +1637,7 @@ async def test_light_rgb_legacy( user_service=user_service, states=states, ) - state = hass.states.get("light.test_my_light") + state = hass.states.get("light.test_mylight") assert state is not None assert state.state == STATE_ON attributes = state.attributes @@ -1647,7 +1647,7 @@ async def test_light_rgb_legacy( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light"}, + {ATTR_ENTITY_ID: "light.test_mylight"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1663,7 +1663,7 @@ async def test_light_rgb_legacy( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "light.test_my_light"}, + {ATTR_ENTITY_ID: "light.test_mylight"}, blocking=True, ) mock_client.light_command.assert_has_calls([call(key=1, state=False)]) @@ -1672,7 +1672,7 @@ async def test_light_rgb_legacy( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light", ATTR_RGB_COLOR: (255, 255, 255)}, + {ATTR_ENTITY_ID: "light.test_mylight", ATTR_RGB_COLOR: (255, 255, 255)}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1715,7 +1715,7 @@ async def test_light_effects( user_service=user_service, states=states, ) - state = hass.states.get("light.test_my_light") + state = hass.states.get("light.test_mylight") assert state is not None assert state.state == STATE_ON assert state.attributes[ATTR_EFFECT_LIST] == ["effect1", "effect2"] @@ -1723,7 +1723,7 @@ async def test_light_effects( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light", ATTR_EFFECT: "effect1"}, + {ATTR_ENTITY_ID: "light.test_mylight", ATTR_EFFECT: "effect1"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1782,7 +1782,7 @@ async def test_only_cold_warm_white_support( user_service=user_service, states=states, ) - state = hass.states.get("light.test_my_light") + state = hass.states.get("light.test_mylight") assert state is not None assert state.state == STATE_ON assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.COLOR_TEMP] @@ -1791,7 +1791,7 @@ async def test_only_cold_warm_white_support( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light"}, + {ATTR_ENTITY_ID: "light.test_mylight"}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1802,7 +1802,7 @@ async def test_only_cold_warm_white_support( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light", ATTR_BRIGHTNESS: 127}, + {ATTR_ENTITY_ID: "light.test_mylight", ATTR_BRIGHTNESS: 127}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1820,7 +1820,7 @@ async def test_only_cold_warm_white_support( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light", ATTR_COLOR_TEMP_KELVIN: 2500}, + {ATTR_ENTITY_ID: "light.test_mylight", ATTR_COLOR_TEMP_KELVIN: 2500}, blocking=True, ) mock_client.light_command.assert_has_calls( @@ -1861,7 +1861,7 @@ async def test_light_no_color_modes( user_service=user_service, states=states, ) - state = hass.states.get("light.test_my_light") + state = hass.states.get("light.test_mylight") assert state is not None assert state.state == STATE_ON assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.ONOFF] @@ -1869,7 +1869,7 @@ async def test_light_no_color_modes( await hass.services.async_call( LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "light.test_my_light"}, + {ATTR_ENTITY_ID: "light.test_mylight"}, blocking=True, ) mock_client.light_command.assert_has_calls([call(key=1, state=True, color_mode=0)]) diff --git a/tests/components/esphome/test_lock.py b/tests/components/esphome/test_lock.py index 259e990c5f7..82c24b59a2c 100644 --- a/tests/components/esphome/test_lock.py +++ b/tests/components/esphome/test_lock.py @@ -39,14 +39,14 @@ async def test_lock_entity_no_open( user_service=user_service, states=states, ) - state = hass.states.get("lock.test_my_lock") + state = hass.states.get("lock.test_mylock") assert state is not None assert state.state == STATE_UNLOCKING await hass.services.async_call( LOCK_DOMAIN, SERVICE_LOCK, - {ATTR_ENTITY_ID: "lock.test_my_lock"}, + {ATTR_ENTITY_ID: "lock.test_mylock"}, blocking=True, ) mock_client.lock_command.assert_has_calls([call(1, LockCommand.LOCK)]) @@ -73,7 +73,7 @@ async def test_lock_entity_start_locked( user_service=user_service, states=states, ) - state = hass.states.get("lock.test_my_lock") + state = hass.states.get("lock.test_mylock") assert state is not None assert state.state == STATE_LOCKED @@ -100,14 +100,14 @@ async def test_lock_entity_supports_open( user_service=user_service, states=states, ) - state = hass.states.get("lock.test_my_lock") + state = hass.states.get("lock.test_mylock") assert state is not None assert state.state == STATE_LOCKING await hass.services.async_call( LOCK_DOMAIN, SERVICE_LOCK, - {ATTR_ENTITY_ID: "lock.test_my_lock"}, + {ATTR_ENTITY_ID: "lock.test_mylock"}, blocking=True, ) mock_client.lock_command.assert_has_calls([call(1, LockCommand.LOCK)]) @@ -116,7 +116,7 @@ async def test_lock_entity_supports_open( await hass.services.async_call( LOCK_DOMAIN, SERVICE_UNLOCK, - {ATTR_ENTITY_ID: "lock.test_my_lock"}, + {ATTR_ENTITY_ID: "lock.test_mylock"}, blocking=True, ) mock_client.lock_command.assert_has_calls([call(1, LockCommand.UNLOCK, None)]) @@ -125,7 +125,7 @@ async def test_lock_entity_supports_open( await hass.services.async_call( LOCK_DOMAIN, SERVICE_OPEN, - {ATTR_ENTITY_ID: "lock.test_my_lock"}, + {ATTR_ENTITY_ID: "lock.test_mylock"}, blocking=True, ) mock_client.lock_command.assert_has_calls([call(1, LockCommand.OPEN)]) diff --git a/tests/components/esphome/test_media_player.py b/tests/components/esphome/test_media_player.py index c9fcecb5d55..3879129ccb6 100644 --- a/tests/components/esphome/test_media_player.py +++ b/tests/components/esphome/test_media_player.py @@ -62,7 +62,7 @@ async def test_media_player_entity( user_service=user_service, states=states, ) - state = hass.states.get("media_player.test_my_media_player") + state = hass.states.get("media_player.test_mymedia_player") assert state is not None assert state.state == "paused" @@ -70,7 +70,7 @@ async def test_media_player_entity( MEDIA_PLAYER_DOMAIN, SERVICE_VOLUME_MUTE, { - ATTR_ENTITY_ID: "media_player.test_my_media_player", + ATTR_ENTITY_ID: "media_player.test_mymedia_player", ATTR_MEDIA_VOLUME_MUTED: True, }, blocking=True, @@ -84,7 +84,7 @@ async def test_media_player_entity( MEDIA_PLAYER_DOMAIN, SERVICE_VOLUME_MUTE, { - ATTR_ENTITY_ID: "media_player.test_my_media_player", + ATTR_ENTITY_ID: "media_player.test_mymedia_player", ATTR_MEDIA_VOLUME_MUTED: True, }, blocking=True, @@ -98,7 +98,7 @@ async def test_media_player_entity( MEDIA_PLAYER_DOMAIN, SERVICE_VOLUME_SET, { - ATTR_ENTITY_ID: "media_player.test_my_media_player", + ATTR_ENTITY_ID: "media_player.test_mymedia_player", ATTR_MEDIA_VOLUME_LEVEL: 0.5, }, blocking=True, @@ -110,7 +110,7 @@ async def test_media_player_entity( MEDIA_PLAYER_DOMAIN, SERVICE_MEDIA_PAUSE, { - ATTR_ENTITY_ID: "media_player.test_my_media_player", + ATTR_ENTITY_ID: "media_player.test_mymedia_player", }, blocking=True, ) @@ -123,7 +123,7 @@ async def test_media_player_entity( MEDIA_PLAYER_DOMAIN, SERVICE_MEDIA_PLAY, { - ATTR_ENTITY_ID: "media_player.test_my_media_player", + ATTR_ENTITY_ID: "media_player.test_mymedia_player", }, blocking=True, ) @@ -136,7 +136,7 @@ async def test_media_player_entity( MEDIA_PLAYER_DOMAIN, SERVICE_MEDIA_STOP, { - ATTR_ENTITY_ID: "media_player.test_my_media_player", + ATTR_ENTITY_ID: "media_player.test_mymedia_player", }, blocking=True, ) @@ -207,7 +207,7 @@ async def test_media_player_entity_with_source( user_service=user_service, states=states, ) - state = hass.states.get("media_player.test_my_media_player") + state = hass.states.get("media_player.test_mymedia_player") assert state is not None assert state.state == "playing" @@ -216,7 +216,7 @@ async def test_media_player_entity_with_source( MEDIA_PLAYER_DOMAIN, SERVICE_PLAY_MEDIA, { - ATTR_ENTITY_ID: "media_player.test_my_media_player", + ATTR_ENTITY_ID: "media_player.test_mymedia_player", ATTR_MEDIA_CONTENT_TYPE: MediaType.MUSIC, ATTR_MEDIA_CONTENT_ID: "media-source://local/xz", }, @@ -240,7 +240,7 @@ async def test_media_player_entity_with_source( MEDIA_PLAYER_DOMAIN, SERVICE_PLAY_MEDIA, { - ATTR_ENTITY_ID: "media_player.test_my_media_player", + ATTR_ENTITY_ID: "media_player.test_mymedia_player", ATTR_MEDIA_CONTENT_TYPE: "audio/mp3", ATTR_MEDIA_CONTENT_ID: "media-source://local/xy", }, @@ -256,7 +256,7 @@ async def test_media_player_entity_with_source( { "id": 1, "type": "media_player/browse_media", - "entity_id": "media_player.test_my_media_player", + "entity_id": "media_player.test_mymedia_player", } ) response = await client.receive_json() @@ -266,7 +266,7 @@ async def test_media_player_entity_with_source( MEDIA_PLAYER_DOMAIN, SERVICE_PLAY_MEDIA, { - ATTR_ENTITY_ID: "media_player.test_my_media_player", + ATTR_ENTITY_ID: "media_player.test_mymedia_player", ATTR_MEDIA_CONTENT_TYPE: MediaType.URL, ATTR_MEDIA_CONTENT_ID: "media-source://tts?message=hello", ATTR_MEDIA_ANNOUNCE: True, diff --git a/tests/components/esphome/test_number.py b/tests/components/esphome/test_number.py index 91a21e670f5..557425052f3 100644 --- a/tests/components/esphome/test_number.py +++ b/tests/components/esphome/test_number.py @@ -48,14 +48,14 @@ async def test_generic_number_entity( user_service=user_service, states=states, ) - state = hass.states.get("number.test_my_number") + state = hass.states.get("number.test_mynumber") assert state is not None assert state.state == "50" await hass.services.async_call( NUMBER_DOMAIN, SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: "number.test_my_number", ATTR_VALUE: 50}, + {ATTR_ENTITY_ID: "number.test_mynumber", ATTR_VALUE: 50}, blocking=True, ) mock_client.number_command.assert_has_calls([call(1, 50)]) @@ -89,7 +89,7 @@ async def test_generic_number_nan( user_service=user_service, states=states, ) - state = hass.states.get("number.test_my_number") + state = hass.states.get("number.test_mynumber") assert state is not None assert state.state == STATE_UNKNOWN @@ -121,7 +121,7 @@ async def test_generic_number_with_unit_of_measurement_as_empty_string( user_service=user_service, states=states, ) - state = hass.states.get("number.test_my_number") + state = hass.states.get("number.test_mynumber") assert state is not None assert state.state == "42" assert ATTR_UNIT_OF_MEASUREMENT not in state.attributes @@ -160,7 +160,7 @@ async def test_generic_number_entity_set_when_disconnected( await hass.services.async_call( NUMBER_DOMAIN, SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: "number.test_my_number", ATTR_VALUE: 20}, + {ATTR_ENTITY_ID: "number.test_mynumber", ATTR_VALUE: 20}, blocking=True, ) mock_client.number_command.reset_mock() diff --git a/tests/components/esphome/test_select.py b/tests/components/esphome/test_select.py index 8df898ea3cf..a433b1b0ab0 100644 --- a/tests/components/esphome/test_select.py +++ b/tests/components/esphome/test_select.py @@ -59,14 +59,14 @@ async def test_select_generic_entity( user_service=user_service, states=states, ) - state = hass.states.get("select.test_my_select") + state = hass.states.get("select.test_myselect") assert state is not None assert state.state == "a" await hass.services.async_call( SELECT_DOMAIN, SERVICE_SELECT_OPTION, - {ATTR_ENTITY_ID: "select.test_my_select", ATTR_OPTION: "b"}, + {ATTR_ENTITY_ID: "select.test_myselect", ATTR_OPTION: "b"}, blocking=True, ) mock_client.select_command.assert_has_calls([call(1, "b")]) diff --git a/tests/components/esphome/test_sensor.py b/tests/components/esphome/test_sensor.py index 5f68dbb8660..76f71b53167 100644 --- a/tests/components/esphome/test_sensor.py +++ b/tests/components/esphome/test_sensor.py @@ -62,35 +62,35 @@ async def test_generic_numeric_sensor( user_service=user_service, states=states, ) - state = hass.states.get("sensor.test_my_sensor") + state = hass.states.get("sensor.test_mysensor") assert state is not None assert state.state == "50" # Test updating state mock_device.set_state(SensorState(key=1, state=60)) await hass.async_block_till_done() - state = hass.states.get("sensor.test_my_sensor") + state = hass.states.get("sensor.test_mysensor") assert state is not None assert state.state == "60" # Test sending the same state again mock_device.set_state(SensorState(key=1, state=60)) await hass.async_block_till_done() - state = hass.states.get("sensor.test_my_sensor") + state = hass.states.get("sensor.test_mysensor") assert state is not None assert state.state == "60" # Test we can still update after the same state mock_device.set_state(SensorState(key=1, state=70)) await hass.async_block_till_done() - state = hass.states.get("sensor.test_my_sensor") + state = hass.states.get("sensor.test_mysensor") assert state is not None assert state.state == "70" # Test invalid data from the underlying api does not crash us mock_device.set_state(SensorState(key=1, state=object())) await hass.async_block_till_done() - state = hass.states.get("sensor.test_my_sensor") + state = hass.states.get("sensor.test_mysensor") assert state is not None assert state.state == "70" @@ -120,11 +120,11 @@ async def test_generic_numeric_sensor_with_entity_category_and_icon( user_service=user_service, states=states, ) - state = hass.states.get("sensor.test_my_sensor") + state = hass.states.get("sensor.test_mysensor") assert state is not None assert state.state == "50" assert state.attributes[ATTR_ICON] == "mdi:leaf" - entry = entity_registry.async_get("sensor.test_my_sensor") + entry = entity_registry.async_get("sensor.test_mysensor") assert entry is not None # Note that ESPHome includes the EntityInfo type in the unique id # as this is not a 1:1 mapping to the entity platform (ie. text_sensor) @@ -158,11 +158,11 @@ async def test_generic_numeric_sensor_state_class_measurement( user_service=user_service, states=states, ) - state = hass.states.get("sensor.test_my_sensor") + state = hass.states.get("sensor.test_mysensor") assert state is not None assert state.state == "50" assert state.attributes[ATTR_STATE_CLASS] == SensorStateClass.MEASUREMENT - entry = entity_registry.async_get("sensor.test_my_sensor") + entry = entity_registry.async_get("sensor.test_mysensor") assert entry is not None # Note that ESPHome includes the EntityInfo type in the unique id # as this is not a 1:1 mapping to the entity platform (ie. text_sensor) @@ -193,7 +193,7 @@ async def test_generic_numeric_sensor_device_class_timestamp( user_service=user_service, states=states, ) - state = hass.states.get("sensor.test_my_sensor") + state = hass.states.get("sensor.test_mysensor") assert state is not None assert state.state == "2023-06-22T18:43:52+00:00" @@ -222,7 +222,7 @@ async def test_generic_numeric_sensor_legacy_last_reset_convert( user_service=user_service, states=states, ) - state = hass.states.get("sensor.test_my_sensor") + state = hass.states.get("sensor.test_mysensor") assert state is not None assert state.state == "50" assert state.attributes[ATTR_STATE_CLASS] == SensorStateClass.TOTAL_INCREASING @@ -248,7 +248,7 @@ async def test_generic_numeric_sensor_no_state( user_service=user_service, states=states, ) - state = hass.states.get("sensor.test_my_sensor") + state = hass.states.get("sensor.test_mysensor") assert state is not None assert state.state == STATE_UNKNOWN @@ -273,7 +273,7 @@ async def test_generic_numeric_sensor_nan_state( user_service=user_service, states=states, ) - state = hass.states.get("sensor.test_my_sensor") + state = hass.states.get("sensor.test_mysensor") assert state is not None assert state.state == STATE_UNKNOWN @@ -298,7 +298,7 @@ async def test_generic_numeric_sensor_missing_state( user_service=user_service, states=states, ) - state = hass.states.get("sensor.test_my_sensor") + state = hass.states.get("sensor.test_mysensor") assert state is not None assert state.state == STATE_UNKNOWN @@ -325,7 +325,7 @@ async def test_generic_text_sensor( user_service=user_service, states=states, ) - state = hass.states.get("sensor.test_my_sensor") + state = hass.states.get("sensor.test_mysensor") assert state is not None assert state.state == "i am a teapot" @@ -350,7 +350,7 @@ async def test_generic_text_sensor_missing_state( user_service=user_service, states=states, ) - state = hass.states.get("sensor.test_my_sensor") + state = hass.states.get("sensor.test_mysensor") assert state is not None assert state.state == STATE_UNKNOWN @@ -378,7 +378,7 @@ async def test_generic_text_sensor_device_class_timestamp( user_service=user_service, states=states, ) - state = hass.states.get("sensor.test_my_sensor") + state = hass.states.get("sensor.test_mysensor") assert state is not None assert state.state == "2023-06-22T18:43:52+00:00" assert state.attributes[ATTR_DEVICE_CLASS] == SensorDeviceClass.TIMESTAMP @@ -407,7 +407,7 @@ async def test_generic_text_sensor_device_class_date( user_service=user_service, states=states, ) - state = hass.states.get("sensor.test_my_sensor") + state = hass.states.get("sensor.test_mysensor") assert state is not None assert state.state == "2023-06-22" assert state.attributes[ATTR_DEVICE_CLASS] == SensorDeviceClass.DATE @@ -434,7 +434,7 @@ async def test_generic_numeric_sensor_empty_string_uom( user_service=user_service, states=states, ) - state = hass.states.get("sensor.test_my_sensor") + state = hass.states.get("sensor.test_mysensor") assert state is not None assert state.state == "123" assert ATTR_UNIT_OF_MEASUREMENT not in state.attributes diff --git a/tests/components/esphome/test_switch.py b/tests/components/esphome/test_switch.py index 799290c931a..561ac0b369f 100644 --- a/tests/components/esphome/test_switch.py +++ b/tests/components/esphome/test_switch.py @@ -33,14 +33,14 @@ async def test_switch_generic_entity( user_service=user_service, states=states, ) - state = hass.states.get("switch.test_my_switch") + state = hass.states.get("switch.test_myswitch") assert state is not None assert state.state == STATE_ON await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: "switch.test_my_switch"}, + {ATTR_ENTITY_ID: "switch.test_myswitch"}, blocking=True, ) mock_client.switch_command.assert_has_calls([call(1, True)]) @@ -48,7 +48,7 @@ async def test_switch_generic_entity( await hass.services.async_call( SWITCH_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: "switch.test_my_switch"}, + {ATTR_ENTITY_ID: "switch.test_myswitch"}, blocking=True, ) mock_client.switch_command.assert_has_calls([call(1, False)]) diff --git a/tests/components/esphome/test_text.py b/tests/components/esphome/test_text.py index e2b459be2d2..07157d98ac6 100644 --- a/tests/components/esphome/test_text.py +++ b/tests/components/esphome/test_text.py @@ -39,14 +39,14 @@ async def test_generic_text_entity( user_service=user_service, states=states, ) - state = hass.states.get("text.test_my_text") + state = hass.states.get("text.test_mytext") assert state is not None assert state.state == "hello world" await hass.services.async_call( TEXT_DOMAIN, SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: "text.test_my_text", ATTR_VALUE: "goodbye"}, + {ATTR_ENTITY_ID: "text.test_mytext", ATTR_VALUE: "goodbye"}, blocking=True, ) mock_client.text_command.assert_has_calls([call(1, "goodbye")]) @@ -79,7 +79,7 @@ async def test_generic_text_entity_no_state( user_service=user_service, states=states, ) - state = hass.states.get("text.test_my_text") + state = hass.states.get("text.test_mytext") assert state is not None assert state.state == STATE_UNKNOWN @@ -110,6 +110,6 @@ async def test_generic_text_entity_missing_state( user_service=user_service, states=states, ) - state = hass.states.get("text.test_my_text") + state = hass.states.get("text.test_mytext") assert state is not None assert state.state == STATE_UNKNOWN diff --git a/tests/components/esphome/test_time.py b/tests/components/esphome/test_time.py index 5277ca82e34..aaa18c77a47 100644 --- a/tests/components/esphome/test_time.py +++ b/tests/components/esphome/test_time.py @@ -35,14 +35,14 @@ async def test_generic_time_entity( user_service=user_service, states=states, ) - state = hass.states.get("time.test_my_time") + state = hass.states.get("time.test_mytime") assert state is not None assert state.state == "12:34:56" await hass.services.async_call( TIME_DOMAIN, SERVICE_SET_VALUE, - {ATTR_ENTITY_ID: "time.test_my_time", ATTR_TIME: "01:23:45"}, + {ATTR_ENTITY_ID: "time.test_mytime", ATTR_TIME: "01:23:45"}, blocking=True, ) mock_client.time_command.assert_has_calls([call(1, 1, 23, 45)]) @@ -71,6 +71,6 @@ async def test_generic_time_missing_state( user_service=user_service, states=states, ) - state = hass.states.get("time.test_my_time") + state = hass.states.get("time.test_mytime") assert state is not None assert state.state == STATE_UNKNOWN diff --git a/tests/components/esphome/test_update.py b/tests/components/esphome/test_update.py index ad638add0a0..83e89b1de00 100644 --- a/tests/components/esphome/test_update.py +++ b/tests/components/esphome/test_update.py @@ -464,7 +464,7 @@ async def test_generic_device_update_entity( user_service=user_service, states=states, ) - state = hass.states.get("update.test_my_update") + state = hass.states.get("update.test_myupdate") assert state is not None assert state.state == STATE_OFF @@ -503,14 +503,14 @@ async def test_generic_device_update_entity_has_update( user_service=user_service, states=states, ) - state = hass.states.get("update.test_my_update") + state = hass.states.get("update.test_myupdate") assert state is not None assert state.state == STATE_ON await hass.services.async_call( UPDATE_DOMAIN, SERVICE_INSTALL, - {ATTR_ENTITY_ID: "update.test_my_update"}, + {ATTR_ENTITY_ID: "update.test_myupdate"}, blocking=True, ) @@ -528,7 +528,7 @@ async def test_generic_device_update_entity_has_update( ) ) - state = hass.states.get("update.test_my_update") + state = hass.states.get("update.test_myupdate") assert state is not None assert state.state == STATE_ON assert state.attributes["in_progress"] == 50 @@ -536,7 +536,7 @@ async def test_generic_device_update_entity_has_update( await hass.services.async_call( HOMEASSISTANT_DOMAIN, SERVICE_UPDATE_ENTITY, - {ATTR_ENTITY_ID: "update.test_my_update"}, + {ATTR_ENTITY_ID: "update.test_myupdate"}, blocking=True, ) diff --git a/tests/components/esphome/test_valve.py b/tests/components/esphome/test_valve.py index 413e4cfcb9f..5ba7bcbe187 100644 --- a/tests/components/esphome/test_valve.py +++ b/tests/components/esphome/test_valve.py @@ -65,7 +65,7 @@ async def test_valve_entity( user_service=user_service, states=states, ) - state = hass.states.get("valve.test_my_valve") + state = hass.states.get("valve.test_myvalve") assert state is not None assert state.state == STATE_OPENING assert state.attributes[ATTR_CURRENT_POSITION] == 50 @@ -73,7 +73,7 @@ async def test_valve_entity( await hass.services.async_call( VALVE_DOMAIN, SERVICE_CLOSE_VALVE, - {ATTR_ENTITY_ID: "valve.test_my_valve"}, + {ATTR_ENTITY_ID: "valve.test_myvalve"}, blocking=True, ) mock_client.valve_command.assert_has_calls([call(key=1, position=0.0)]) @@ -82,7 +82,7 @@ async def test_valve_entity( await hass.services.async_call( VALVE_DOMAIN, SERVICE_OPEN_VALVE, - {ATTR_ENTITY_ID: "valve.test_my_valve"}, + {ATTR_ENTITY_ID: "valve.test_myvalve"}, blocking=True, ) mock_client.valve_command.assert_has_calls([call(key=1, position=1.0)]) @@ -91,7 +91,7 @@ async def test_valve_entity( await hass.services.async_call( VALVE_DOMAIN, SERVICE_SET_VALVE_POSITION, - {ATTR_ENTITY_ID: "valve.test_my_valve", ATTR_POSITION: 50}, + {ATTR_ENTITY_ID: "valve.test_myvalve", ATTR_POSITION: 50}, blocking=True, ) mock_client.valve_command.assert_has_calls([call(key=1, position=0.5)]) @@ -100,7 +100,7 @@ async def test_valve_entity( await hass.services.async_call( VALVE_DOMAIN, SERVICE_STOP_VALVE, - {ATTR_ENTITY_ID: "valve.test_my_valve"}, + {ATTR_ENTITY_ID: "valve.test_myvalve"}, blocking=True, ) mock_client.valve_command.assert_has_calls([call(key=1, stop=True)]) @@ -110,7 +110,7 @@ async def test_valve_entity( ValveState(key=1, position=0.0, current_operation=ValveOperation.IDLE) ) await hass.async_block_till_done() - state = hass.states.get("valve.test_my_valve") + state = hass.states.get("valve.test_myvalve") assert state is not None assert state.state == STATE_CLOSED @@ -118,7 +118,7 @@ async def test_valve_entity( ValveState(key=1, position=0.5, current_operation=ValveOperation.IS_CLOSING) ) await hass.async_block_till_done() - state = hass.states.get("valve.test_my_valve") + state = hass.states.get("valve.test_myvalve") assert state is not None assert state.state == STATE_CLOSING @@ -126,7 +126,7 @@ async def test_valve_entity( ValveState(key=1, position=1.0, current_operation=ValveOperation.IDLE) ) await hass.async_block_till_done() - state = hass.states.get("valve.test_my_valve") + state = hass.states.get("valve.test_myvalve") assert state is not None assert state.state == STATE_OPEN @@ -164,7 +164,7 @@ async def test_valve_entity_without_position( user_service=user_service, states=states, ) - state = hass.states.get("valve.test_my_valve") + state = hass.states.get("valve.test_myvalve") assert state is not None assert state.state == STATE_OPENING assert ATTR_CURRENT_POSITION not in state.attributes @@ -172,7 +172,7 @@ async def test_valve_entity_without_position( await hass.services.async_call( VALVE_DOMAIN, SERVICE_CLOSE_VALVE, - {ATTR_ENTITY_ID: "valve.test_my_valve"}, + {ATTR_ENTITY_ID: "valve.test_myvalve"}, blocking=True, ) mock_client.valve_command.assert_has_calls([call(key=1, position=0.0)]) @@ -181,7 +181,7 @@ async def test_valve_entity_without_position( await hass.services.async_call( VALVE_DOMAIN, SERVICE_OPEN_VALVE, - {ATTR_ENTITY_ID: "valve.test_my_valve"}, + {ATTR_ENTITY_ID: "valve.test_myvalve"}, blocking=True, ) mock_client.valve_command.assert_has_calls([call(key=1, position=1.0)]) @@ -191,6 +191,6 @@ async def test_valve_entity_without_position( ValveState(key=1, position=0.0, current_operation=ValveOperation.IDLE) ) await hass.async_block_till_done() - state = hass.states.get("valve.test_my_valve") + state = hass.states.get("valve.test_myvalve") assert state is not None assert state.state == STATE_CLOSED From f53da62026cc508d86703f45788418dfd8225597 Mon Sep 17 00:00:00 2001 From: Joakim Plate Date: Sat, 10 Aug 2024 19:25:21 +0200 Subject: [PATCH 0546/1635] Extend ZHA attribute diagnostic information (#123199) * Include full attribute representation in in data * Extend attribute diagnostics for zha --- homeassistant/components/zha/diagnostics.py | 22 ++------ .../zha/snapshots/test_diagnostics.ambr | 50 ++++++++++++++----- 2 files changed, 42 insertions(+), 30 deletions(-) diff --git a/homeassistant/components/zha/diagnostics.py b/homeassistant/components/zha/diagnostics.py index bc4738d032a..3e598c4d5f8 100644 --- a/homeassistant/components/zha/diagnostics.py +++ b/homeassistant/components/zha/diagnostics.py @@ -7,7 +7,7 @@ from importlib.metadata import version from typing import Any from zha.application.const import ( - ATTR_ATTRIBUTE_NAME, + ATTR_ATTRIBUTE, ATTR_DEVICE_TYPE, ATTR_IEEE, ATTR_IN_CLUSTERS, @@ -158,27 +158,13 @@ def get_endpoint_cluster_attr_data(zha_device: Device) -> dict: def get_cluster_attr_data(cluster: Cluster) -> dict: """Return cluster attribute data.""" - unsupported_attributes = {} - for u_attr in cluster.unsupported_attributes: - try: - u_attr_def = cluster.find_attribute(u_attr) - unsupported_attributes[f"0x{u_attr_def.id:04x}"] = { - ATTR_ATTRIBUTE_NAME: u_attr_def.name - } - except KeyError: - if isinstance(u_attr, int): - unsupported_attributes[f"0x{u_attr:04x}"] = {} - else: - unsupported_attributes[u_attr] = {} - return { ATTRIBUTES: { f"0x{attr_id:04x}": { - ATTR_ATTRIBUTE_NAME: attr_def.name, - ATTR_VALUE: attr_value, + ATTR_ATTRIBUTE: repr(attr_def), + ATTR_VALUE: cluster.get(attr_def.name), } for attr_id, attr_def in cluster.attributes.items() - if (attr_value := cluster.get(attr_def.name)) is not None }, - UNSUPPORTED_ATTRIBUTES: unsupported_attributes, + UNSUPPORTED_ATTRIBUTES: cluster.unsupported_attributes, } diff --git a/tests/components/zha/snapshots/test_diagnostics.ambr b/tests/components/zha/snapshots/test_diagnostics.ambr index 8899712b99d..67655aebc8c 100644 --- a/tests/components/zha/snapshots/test_diagnostics.ambr +++ b/tests/components/zha/snapshots/test_diagnostics.ambr @@ -161,8 +161,20 @@ 'in_clusters': dict({ '0x0500': dict({ 'attributes': dict({ + '0x0000': dict({ + 'attribute': "ZCLAttributeDef(id=0x0000, name='zone_state', type=, access=, mandatory=True, is_manufacturer_specific=False)", + 'value': None, + }), + '0x0001': dict({ + 'attribute': "ZCLAttributeDef(id=0x0001, name='zone_type', type=, access=, mandatory=True, is_manufacturer_specific=False)", + 'value': None, + }), + '0x0002': dict({ + 'attribute': "ZCLAttributeDef(id=0x0002, name='zone_status', type=, access=, mandatory=True, is_manufacturer_specific=False)", + 'value': None, + }), '0x0010': dict({ - 'attribute_name': 'cie_addr', + 'attribute': "ZCLAttributeDef(id=0x0010, name='cie_addr', type=, access=, mandatory=True, is_manufacturer_specific=False)", 'value': list([ 50, 79, @@ -174,27 +186,41 @@ 0, ]), }), - }), - 'endpoint_attribute': 'ias_zone', - 'unsupported_attributes': dict({ + '0x0011': dict({ + 'attribute': "ZCLAttributeDef(id=0x0011, name='zone_id', type=, access=, mandatory=True, is_manufacturer_specific=False)", + 'value': None, + }), '0x0012': dict({ - 'attribute_name': 'num_zone_sensitivity_levels_supported', + 'attribute': "ZCLAttributeDef(id=0x0012, name='num_zone_sensitivity_levels_supported', type=, access=, mandatory=False, is_manufacturer_specific=False)", + 'value': None, }), '0x0013': dict({ - 'attribute_name': 'current_zone_sensitivity_level', + 'attribute': "ZCLAttributeDef(id=0x0013, name='current_zone_sensitivity_level', type=, access=, mandatory=False, is_manufacturer_specific=False)", + 'value': None, }), }), + 'endpoint_attribute': 'ias_zone', + 'unsupported_attributes': list([ + 18, + 'current_zone_sensitivity_level', + ]), }), '0x0501': dict({ 'attributes': dict({ + '0xfffd': dict({ + 'attribute': "ZCLAttributeDef(id=0xFFFD, name='cluster_revision', type=, access=, mandatory=True, is_manufacturer_specific=False)", + 'value': None, + }), + '0xfffe': dict({ + 'attribute': "ZCLAttributeDef(id=0xFFFE, name='reporting_status', type=, access=, mandatory=False, is_manufacturer_specific=False)", + 'value': None, + }), }), 'endpoint_attribute': 'ias_ace', - 'unsupported_attributes': dict({ - '0x1000': dict({ - }), - 'unknown_attribute_name': dict({ - }), - }), + 'unsupported_attributes': list([ + 4096, + 'unknown_attribute_name', + ]), }), }), 'out_clusters': dict({ From f69507527b75eccc01dd2e9ba448b5533db0b1f0 Mon Sep 17 00:00:00 2001 From: Joakim Plate Date: Sat, 10 Aug 2024 20:55:31 +0200 Subject: [PATCH 0547/1635] Make sure diagnostic data is output in deterministic order ZHA (#123551) Make sure diagnostic data is output in deterministic order Sets are not ordered, so the tests for this failed sporadically since it is converted into a list when converted to json. --- homeassistant/components/zha/diagnostics.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/zha/diagnostics.py b/homeassistant/components/zha/diagnostics.py index 3e598c4d5f8..f276630dfee 100644 --- a/homeassistant/components/zha/diagnostics.py +++ b/homeassistant/components/zha/diagnostics.py @@ -166,5 +166,7 @@ def get_cluster_attr_data(cluster: Cluster) -> dict: } for attr_id, attr_def in cluster.attributes.items() }, - UNSUPPORTED_ATTRIBUTES: cluster.unsupported_attributes, + UNSUPPORTED_ATTRIBUTES: sorted( + cluster.unsupported_attributes, key=lambda v: (isinstance(v, str), v) + ), } From 7aed35b3f0043bb802aadc9f258ac37d0a743e0f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 10 Aug 2024 15:09:18 -0500 Subject: [PATCH 0548/1635] Bump aiohttp to 3.10.3 (#123549) --- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 9a23a5e42e0..1f7c6bd61cc 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -4,7 +4,7 @@ aiodhcpwatcher==1.0.2 aiodiscover==2.1.0 aiodns==3.2.0 aiohttp-fast-zlib==0.1.1 -aiohttp==3.10.2 +aiohttp==3.10.3 aiohttp_cors==0.7.0 aiozoneinfo==0.2.1 astral==2.2 diff --git a/pyproject.toml b/pyproject.toml index cb928c736a2..e94c9e96225 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ requires-python = ">=3.12.0" dependencies = [ "aiodns==3.2.0", - "aiohttp==3.10.2", + "aiohttp==3.10.3", "aiohttp_cors==0.7.0", "aiohttp-fast-zlib==0.1.1", "aiozoneinfo==0.2.1", diff --git a/requirements.txt b/requirements.txt index 7efddf5c765..556f9013cee 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ # Home Assistant Core aiodns==3.2.0 -aiohttp==3.10.2 +aiohttp==3.10.3 aiohttp_cors==0.7.0 aiohttp-fast-zlib==0.1.1 aiozoneinfo==0.2.1 From 9be8616cc072e34eb624cc289eb6d5f854202310 Mon Sep 17 00:00:00 2001 From: Pavel Skuratovich Date: Sun, 11 Aug 2024 13:54:31 +0300 Subject: [PATCH 0549/1635] Add state_class to starline sensors to generate long-term statistics (#123540) * starline: Add state_class to sensors to generate long-term statistics * starline: Add 'errors' unit to 'errors' sensor --- homeassistant/components/starline/sensor.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/homeassistant/components/starline/sensor.py b/homeassistant/components/starline/sensor.py index a53751a3b23..f9bd304c1e1 100644 --- a/homeassistant/components/starline/sensor.py +++ b/homeassistant/components/starline/sensor.py @@ -6,6 +6,7 @@ from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, SensorEntityDescription, + SensorStateClass, ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( @@ -30,47 +31,57 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( translation_key="battery", device_class=SensorDeviceClass.VOLTAGE, native_unit_of_measurement=UnitOfElectricPotential.VOLT, + state_class=SensorStateClass.MEASUREMENT, ), SensorEntityDescription( key="balance", translation_key="balance", + state_class=SensorStateClass.MEASUREMENT, ), SensorEntityDescription( key="ctemp", translation_key="interior_temperature", device_class=SensorDeviceClass.TEMPERATURE, native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, ), SensorEntityDescription( key="etemp", translation_key="engine_temperature", device_class=SensorDeviceClass.TEMPERATURE, native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, ), SensorEntityDescription( key="gsm_lvl", translation_key="gsm_signal", native_unit_of_measurement=PERCENTAGE, + state_class=SensorStateClass.MEASUREMENT, ), SensorEntityDescription( key="fuel", translation_key="fuel", + state_class=SensorStateClass.MEASUREMENT, ), SensorEntityDescription( key="errors", translation_key="errors", + native_unit_of_measurement="errors", entity_category=EntityCategory.DIAGNOSTIC, + state_class=SensorStateClass.MEASUREMENT, ), SensorEntityDescription( key="mileage", translation_key="mileage", native_unit_of_measurement=UnitOfLength.KILOMETERS, device_class=SensorDeviceClass.DISTANCE, + state_class=SensorStateClass.TOTAL_INCREASING, ), SensorEntityDescription( key="gps_count", translation_key="gps_count", native_unit_of_measurement="satellites", + state_class=SensorStateClass.MEASUREMENT, ), ) From e93d0dfdfc43df4240b1e2d6918e3377b73f2bd2 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Sun, 11 Aug 2024 14:15:20 +0200 Subject: [PATCH 0550/1635] Use setup method in coordinator for Trafikverket Train (#123138) * Use setup method in coordinator for Trafikverket Train * Overwrite types --- .../components/trafikverket_train/__init__.py | 28 ++-------------- .../trafikverket_train/coordinator.py | 32 +++++++++++++------ .../components/trafikverket_train/conftest.py | 2 +- .../trafikverket_train/test_config_flow.py | 2 +- .../trafikverket_train/test_init.py | 8 ++--- 5 files changed, 31 insertions(+), 41 deletions(-) diff --git a/homeassistant/components/trafikverket_train/__init__.py b/homeassistant/components/trafikverket_train/__init__.py index 4bf1f681807..3e807df9301 100644 --- a/homeassistant/components/trafikverket_train/__init__.py +++ b/homeassistant/components/trafikverket_train/__init__.py @@ -2,21 +2,11 @@ from __future__ import annotations -from pytrafikverket import TrafikverketTrain -from pytrafikverket.exceptions import ( - InvalidAuthentication, - MultipleTrainStationsFound, - NoTrainStationFound, -) - from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers import entity_registry as er -from homeassistant.helpers.aiohttp_client import async_get_clientsession -from .const import CONF_FROM, CONF_TO, PLATFORMS +from .const import PLATFORMS from .coordinator import TVDataUpdateCoordinator TVTrainConfigEntry = ConfigEntry[TVDataUpdateCoordinator] @@ -25,21 +15,7 @@ TVTrainConfigEntry = ConfigEntry[TVDataUpdateCoordinator] async def async_setup_entry(hass: HomeAssistant, entry: TVTrainConfigEntry) -> bool: """Set up Trafikverket Train from a config entry.""" - http_session = async_get_clientsession(hass) - train_api = TrafikverketTrain(http_session, entry.data[CONF_API_KEY]) - - try: - to_station = await train_api.async_get_train_station(entry.data[CONF_TO]) - from_station = await train_api.async_get_train_station(entry.data[CONF_FROM]) - except InvalidAuthentication as error: - raise ConfigEntryAuthFailed from error - except (NoTrainStationFound, MultipleTrainStationsFound) as error: - raise ConfigEntryNotReady( - f"Problem when trying station {entry.data[CONF_FROM]} to" - f" {entry.data[CONF_TO]}. Error: {error} " - ) from error - - coordinator = TVDataUpdateCoordinator(hass, to_station, from_station) + coordinator = TVDataUpdateCoordinator(hass) await coordinator.async_config_entry_first_refresh() entry.runtime_data = coordinator diff --git a/homeassistant/components/trafikverket_train/coordinator.py b/homeassistant/components/trafikverket_train/coordinator.py index 66ef3e6a1d2..16a7a649b85 100644 --- a/homeassistant/components/trafikverket_train/coordinator.py +++ b/homeassistant/components/trafikverket_train/coordinator.py @@ -10,7 +10,9 @@ from typing import TYPE_CHECKING from pytrafikverket import TrafikverketTrain from pytrafikverket.exceptions import ( InvalidAuthentication, + MultipleTrainStationsFound, NoTrainAnnouncementFound, + NoTrainStationFound, UnknownError, ) from pytrafikverket.models import StationInfoModel, TrainStopModel @@ -22,7 +24,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from homeassistant.util import dt as dt_util -from .const import CONF_FILTER_PRODUCT, CONF_TIME, DOMAIN +from .const import CONF_FILTER_PRODUCT, CONF_FROM, CONF_TIME, CONF_TO, DOMAIN from .util import next_departuredate if TYPE_CHECKING: @@ -69,13 +71,10 @@ class TVDataUpdateCoordinator(DataUpdateCoordinator[TrainData]): """A Trafikverket Data Update Coordinator.""" config_entry: TVTrainConfigEntry + from_station: StationInfoModel + to_station: StationInfoModel - def __init__( - self, - hass: HomeAssistant, - to_station: StationInfoModel, - from_station: StationInfoModel, - ) -> None: + def __init__(self, hass: HomeAssistant) -> None: """Initialize the Trafikverket coordinator.""" super().__init__( hass, @@ -86,14 +85,29 @@ class TVDataUpdateCoordinator(DataUpdateCoordinator[TrainData]): self._train_api = TrafikverketTrain( async_get_clientsession(hass), self.config_entry.data[CONF_API_KEY] ) - self.from_station: StationInfoModel = from_station - self.to_station: StationInfoModel = to_station self._time: time | None = dt_util.parse_time(self.config_entry.data[CONF_TIME]) self._weekdays: list[str] = self.config_entry.data[CONF_WEEKDAY] self._filter_product: str | None = self.config_entry.options.get( CONF_FILTER_PRODUCT ) + async def _async_setup(self) -> None: + """Initiate stations.""" + try: + self.to_station = await self._train_api.async_get_train_station( + self.config_entry.data[CONF_TO] + ) + self.from_station = await self._train_api.async_get_train_station( + self.config_entry.data[CONF_FROM] + ) + except InvalidAuthentication as error: + raise ConfigEntryAuthFailed from error + except (NoTrainStationFound, MultipleTrainStationsFound) as error: + raise UpdateFailed( + f"Problem when trying station {self.config_entry.data[CONF_FROM]} to" + f" {self.config_entry.data[CONF_TO]}. Error: {error} " + ) from error + async def _async_update_data(self) -> TrainData: """Fetch data from Trafikverket.""" diff --git a/tests/components/trafikverket_train/conftest.py b/tests/components/trafikverket_train/conftest.py index 4915635e316..14671d27252 100644 --- a/tests/components/trafikverket_train/conftest.py +++ b/tests/components/trafikverket_train/conftest.py @@ -38,7 +38,7 @@ async def load_integration_from_entry( return_value=get_train_stop, ), patch( - "homeassistant.components.trafikverket_train.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", ), ): await hass.config_entries.async_setup(config_entry_id) diff --git a/tests/components/trafikverket_train/test_config_flow.py b/tests/components/trafikverket_train/test_config_flow.py index 400f396d355..83cc5a89016 100644 --- a/tests/components/trafikverket_train/test_config_flow.py +++ b/tests/components/trafikverket_train/test_config_flow.py @@ -499,7 +499,7 @@ async def test_options_flow( with ( patch( - "homeassistant.components.trafikverket_train.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", ), patch( "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_next_train_stops", diff --git a/tests/components/trafikverket_train/test_init.py b/tests/components/trafikverket_train/test_init.py index 06598297dd1..c8fea174e83 100644 --- a/tests/components/trafikverket_train/test_init.py +++ b/tests/components/trafikverket_train/test_init.py @@ -34,7 +34,7 @@ async def test_unload_entry( with ( patch( - "homeassistant.components.trafikverket_train.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", ), patch( "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_next_train_stops", @@ -69,7 +69,7 @@ async def test_auth_failed( entry.add_to_hass(hass) with patch( - "homeassistant.components.trafikverket_train.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", side_effect=InvalidAuthentication, ): await hass.config_entries.async_setup(entry.entry_id) @@ -99,7 +99,7 @@ async def test_no_stations( entry.add_to_hass(hass) with patch( - "homeassistant.components.trafikverket_train.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", side_effect=NoTrainStationFound, ): await hass.config_entries.async_setup(entry.entry_id) @@ -135,7 +135,7 @@ async def test_migrate_entity_unique_id( with ( patch( - "homeassistant.components.trafikverket_train.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", ), patch( "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_next_train_stops", From be3e720c574cf77146bff8b72a739c9eb225ad97 Mon Sep 17 00:00:00 2001 From: Andrew Jackson Date: Sun, 11 Aug 2024 13:53:44 +0100 Subject: [PATCH 0551/1635] Add diagnostics platform to Mastodon (#123592) Diagnostics --- .../components/mastodon/diagnostics.py | 35 +++ .../mastodon/snapshots/test_diagnostics.ambr | 247 ++++++++++++++++++ tests/components/mastodon/test_diagnostics.py | 28 ++ 3 files changed, 310 insertions(+) create mode 100644 homeassistant/components/mastodon/diagnostics.py create mode 100644 tests/components/mastodon/snapshots/test_diagnostics.ambr create mode 100644 tests/components/mastodon/test_diagnostics.py diff --git a/homeassistant/components/mastodon/diagnostics.py b/homeassistant/components/mastodon/diagnostics.py new file mode 100644 index 00000000000..7246ae9cf63 --- /dev/null +++ b/homeassistant/components/mastodon/diagnostics.py @@ -0,0 +1,35 @@ +"""Diagnostics support for the Mastodon integration.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.core import HomeAssistant + +from . import MastodonConfigEntry + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, config_entry: MastodonConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + + instance, account = await hass.async_add_executor_job( + get_diagnostics, + config_entry, + ) + + return { + "instance": instance, + "account": account, + } + + +def get_diagnostics(config_entry: MastodonConfigEntry) -> tuple[dict, dict]: + """Get mastodon diagnostics.""" + client = config_entry.runtime_data.client + + instance = client.instance() + account = client.account_verify_credentials() + + return instance, account diff --git a/tests/components/mastodon/snapshots/test_diagnostics.ambr b/tests/components/mastodon/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..982ecee7ee2 --- /dev/null +++ b/tests/components/mastodon/snapshots/test_diagnostics.ambr @@ -0,0 +1,247 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'account': dict({ + 'acct': 'trwnh', + 'avatar': 'https://files.mastodon.social/accounts/avatars/000/014/715/original/34aa222f4ae2e0a9.png', + 'avatar_static': 'https://files.mastodon.social/accounts/avatars/000/014/715/original/34aa222f4ae2e0a9.png', + 'bot': False, + 'created_at': '2016-11-24T10:02:12.085Z', + 'display_name': 'infinite love ⴳ', + 'emojis': list([ + dict({ + 'shortcode': 'fatyoshi', + 'static_url': 'https://files.mastodon.social/custom_emojis/images/000/023/920/static/e57ecb623faa0dc9.png', + 'url': 'https://files.mastodon.social/custom_emojis/images/000/023/920/original/e57ecb623faa0dc9.png', + 'visible_in_picker': True, + }), + ]), + 'fields': list([ + dict({ + 'name': 'Website', + 'value': 'trwnh.com', + 'verified_at': '2019-08-29T04:14:55.571+00:00', + }), + dict({ + 'name': 'Sponsor', + 'value': 'liberapay.com/at', + 'verified_at': '2019-11-15T10:06:15.557+00:00', + }), + dict({ + 'name': 'Fan of:', + 'value': 'Punk-rock and post-hardcore (Circa Survive, letlive., La Dispute, THE FEVER 333)Manga (Yu-Gi-Oh!, One Piece, JoJo's Bizarre Adventure, Death Note, Shaman King)Platformers and RPGs (Banjo-Kazooie, Boktai, Final Fantasy Crystal Chronicles)', + 'verified_at': None, + }), + dict({ + 'name': 'Main topics:', + 'value': 'systemic analysis, design patterns, anticapitalism, info/tech freedom, theory and philosophy, and otherwise being a genuine and decent wholesome poster. i'm just here to hang out and talk to cool people!', + 'verified_at': None, + }), + ]), + 'followers_count': 821, + 'following_count': 178, + 'header': 'https://files.mastodon.social/accounts/headers/000/014/715/original/5c6fc24edb3bb873.jpg', + 'header_static': 'https://files.mastodon.social/accounts/headers/000/014/715/original/5c6fc24edb3bb873.jpg', + 'id': '14715', + 'last_status_at': '2019-11-24T15:49:42.251Z', + 'locked': False, + 'note': '

i have approximate knowledge of many things. perpetual student. (nb/ace/they)

xmpp/email: a@trwnh.com
trwnh.com
help me live: liberapay.com/at or paypal.me/trwnh

- my triggers are moths and glitter
- i have all notifs except mentions turned off, so please interact if you wanna be friends! i literally will not notice otherwise
- dm me if i did something wrong, so i can improve
- purest person on fedi, do not lewd in my presence
- #1 ami cole fan account

:fatyoshi:

', + 'source': dict({ + 'fields': list([ + dict({ + 'name': 'Website', + 'value': 'https://trwnh.com', + 'verified_at': '2019-08-29T04:14:55.571+00:00', + }), + dict({ + 'name': 'Sponsor', + 'value': 'https://liberapay.com/at', + 'verified_at': '2019-11-15T10:06:15.557+00:00', + }), + dict({ + 'name': 'Fan of:', + 'value': "Punk-rock and post-hardcore (Circa Survive, letlive., La Dispute, THE FEVER 333)Manga (Yu-Gi-Oh!, One Piece, JoJo's Bizarre Adventure, Death Note, Shaman King)Platformers and RPGs (Banjo-Kazooie, Boktai, Final Fantasy Crystal Chronicles)", + 'verified_at': None, + }), + dict({ + 'name': 'Main topics:', + 'value': "systemic analysis, design patterns, anticapitalism, info/tech freedom, theory and philosophy, and otherwise being a genuine and decent wholesome poster. i'm just here to hang out and talk to cool people!", + 'verified_at': None, + }), + ]), + 'follow_requests_count': 0, + 'language': '', + 'note': ''' + i have approximate knowledge of many things. perpetual student. (nb/ace/they) + + xmpp/email: a@trwnh.com + https://trwnh.com + help me live: https://liberapay.com/at or https://paypal.me/trwnh + + - my triggers are moths and glitter + - i have all notifs except mentions turned off, so please interact if you wanna be friends! i literally will not notice otherwise + - dm me if i did something wrong, so i can improve + - purest person on fedi, do not lewd in my presence + - #1 ami cole fan account + + :fatyoshi: + ''', + 'privacy': 'public', + 'sensitive': False, + }), + 'statuses_count': 33120, + 'url': 'https://mastodon.social/@trwnh', + 'username': 'trwnh', + }), + 'instance': dict({ + 'configuration': dict({ + 'accounts': dict({ + 'max_featured_tags': 10, + 'max_pinned_statuses': 4, + }), + 'media_attachments': dict({ + 'image_matrix_limit': 16777216, + 'image_size_limit': 10485760, + 'supported_mime_types': list([ + 'image/jpeg', + 'image/png', + 'image/gif', + 'image/heic', + 'image/heif', + 'image/webp', + 'video/webm', + 'video/mp4', + 'video/quicktime', + 'video/ogg', + 'audio/wave', + 'audio/wav', + 'audio/x-wav', + 'audio/x-pn-wave', + 'audio/vnd.wave', + 'audio/ogg', + 'audio/vorbis', + 'audio/mpeg', + 'audio/mp3', + 'audio/webm', + 'audio/flac', + 'audio/aac', + 'audio/m4a', + 'audio/x-m4a', + 'audio/mp4', + 'audio/3gpp', + 'video/x-ms-asf', + ]), + 'video_frame_rate_limit': 60, + 'video_matrix_limit': 2304000, + 'video_size_limit': 41943040, + }), + 'polls': dict({ + 'max_characters_per_option': 50, + 'max_expiration': 2629746, + 'max_options': 4, + 'min_expiration': 300, + }), + 'statuses': dict({ + 'characters_reserved_per_url': 23, + 'max_characters': 500, + 'max_media_attachments': 4, + }), + 'translation': dict({ + 'enabled': True, + }), + 'urls': dict({ + 'streaming': 'wss://mastodon.social', + }), + 'vapid': dict({ + 'public_key': 'BCkMmVdKDnKYwzVCDC99Iuc9GvId-x7-kKtuHnLgfF98ENiZp_aj-UNthbCdI70DqN1zUVis-x0Wrot2sBagkMc=', + }), + }), + 'contact': dict({ + 'account': dict({ + 'acct': 'Gargron', + 'avatar': 'https://files.mastodon.social/accounts/avatars/000/000/001/original/dc4286ceb8fab734.jpg', + 'avatar_static': 'https://files.mastodon.social/accounts/avatars/000/000/001/original/dc4286ceb8fab734.jpg', + 'bot': False, + 'created_at': '2016-03-16T00:00:00.000Z', + 'discoverable': True, + 'display_name': 'Eugen 💀', + 'emojis': list([ + ]), + 'fields': list([ + dict({ + 'name': 'Patreon', + 'value': 'patreon.com/mastodon', + 'verified_at': None, + }), + ]), + 'followers_count': 133026, + 'following_count': 311, + 'group': False, + 'header': 'https://files.mastodon.social/accounts/headers/000/000/001/original/3b91c9965d00888b.jpeg', + 'header_static': 'https://files.mastodon.social/accounts/headers/000/000/001/original/3b91c9965d00888b.jpeg', + 'id': '1', + 'last_status_at': '2022-10-31', + 'locked': False, + 'noindex': False, + 'note': '

Founder, CEO and lead developer @Mastodon, Germany.

', + 'statuses_count': 72605, + 'url': 'https://mastodon.social/@Gargron', + 'username': 'Gargron', + }), + 'email': 'staff@mastodon.social', + }), + 'description': 'The original server operated by the Mastodon gGmbH non-profit', + 'domain': 'mastodon.social', + 'languages': list([ + 'en', + ]), + 'registrations': dict({ + 'approval_required': False, + 'enabled': False, + 'message': None, + }), + 'rules': list([ + dict({ + 'id': '1', + 'text': 'Sexually explicit or violent media must be marked as sensitive when posting', + }), + dict({ + 'id': '2', + 'text': 'No racism, sexism, homophobia, transphobia, xenophobia, or casteism', + }), + dict({ + 'id': '3', + 'text': 'No incitement of violence or promotion of violent ideologies', + }), + dict({ + 'id': '4', + 'text': 'No harassment, dogpiling or doxxing of other users', + }), + dict({ + 'id': '5', + 'text': 'No content illegal in Germany', + }), + dict({ + 'id': '7', + 'text': 'Do not share intentionally false or misleading information', + }), + ]), + 'source_url': 'https://github.com/mastodon/mastodon', + 'thumbnail': dict({ + 'blurhash': 'UeKUpFxuo~R%0nW;WCnhF6RjaJt757oJodS$', + 'url': 'https://files.mastodon.social/site_uploads/files/000/000/001/@1x/57c12f441d083cde.png', + 'versions': dict({ + '@1x': 'https://files.mastodon.social/site_uploads/files/000/000/001/@1x/57c12f441d083cde.png', + '@2x': 'https://files.mastodon.social/site_uploads/files/000/000/001/@2x/57c12f441d083cde.png', + }), + }), + 'title': 'Mastodon', + 'usage': dict({ + 'users': dict({ + 'active_month': 123122, + }), + }), + 'version': '4.0.0rc1', + }), + }) +# --- diff --git a/tests/components/mastodon/test_diagnostics.py b/tests/components/mastodon/test_diagnostics.py new file mode 100644 index 00000000000..c2de15d1a51 --- /dev/null +++ b/tests/components/mastodon/test_diagnostics.py @@ -0,0 +1,28 @@ +"""Test Mastodon diagnostics.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_mastodon_client: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test config entry diagnostics.""" + await setup_integration(hass, mock_config_entry) + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, mock_config_entry) + == snapshot + ) From b392d6139131aac2895ae1c8e43f9642474a1684 Mon Sep 17 00:00:00 2001 From: Carlos Gustavo Sarmiento Date: Sun, 11 Aug 2024 11:50:11 -0500 Subject: [PATCH 0552/1635] Update MPD Player to use HOST and PORT to detect duplicate configs (#123410) * Allow Monetary device_class to accept `Measurement` state_class * Update MPD Player to use HOST and PORT to detect duplicate configs --- homeassistant/components/mpd/config_flow.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/mpd/config_flow.py b/homeassistant/components/mpd/config_flow.py index 619fb8936e2..f37ebe5e5e8 100644 --- a/homeassistant/components/mpd/config_flow.py +++ b/homeassistant/components/mpd/config_flow.py @@ -32,7 +32,9 @@ class MPDConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a flow initiated by the user.""" errors = {} if user_input: - self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]}) + self._async_abort_entries_match( + {CONF_HOST: user_input[CONF_HOST], CONF_PORT: user_input[CONF_PORT]} + ) client = MPDClient() client.timeout = 30 client.idletimeout = 10 From 766733b3b26104552700365ef2f51b32f60d4f4d Mon Sep 17 00:00:00 2001 From: wittypluck Date: Sun, 11 Aug 2024 19:14:43 +0200 Subject: [PATCH 0553/1635] Avoid Exception on Glances missing key (#114628) * Handle case of sensors removed server side * Update available state on value update * Set uptime to None if key is missing * Replace _attr_available by _data_valid --- .../components/glances/coordinator.py | 10 ++--- homeassistant/components/glances/sensor.py | 25 ++++++------ tests/components/glances/test_sensor.py | 38 +++++++++++++++++++ 3 files changed, 56 insertions(+), 17 deletions(-) diff --git a/homeassistant/components/glances/coordinator.py b/homeassistant/components/glances/coordinator.py index 4e5bdcc1543..8882b097ba9 100644 --- a/homeassistant/components/glances/coordinator.py +++ b/homeassistant/components/glances/coordinator.py @@ -45,15 +45,13 @@ class GlancesDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): except exceptions.GlancesApiError as err: raise UpdateFailed from err # Update computed values - uptime: datetime | None = self.data["computed"]["uptime"] if self.data else None + uptime: datetime | None = None up_duration: timedelta | None = None - if up_duration := parse_duration(data.get("uptime")): + if "uptime" in data and (up_duration := parse_duration(data["uptime"])): + uptime = self.data["computed"]["uptime"] if self.data else None # Update uptime if previous value is None or previous uptime is bigger than # new uptime (i.e. server restarted) - if ( - self.data is None - or self.data["computed"]["uptime_duration"] > up_duration - ): + if uptime is None or self.data["computed"]["uptime_duration"] > up_duration: uptime = utcnow() - up_duration data["computed"] = {"uptime_duration": up_duration, "uptime": uptime} return data or {} diff --git a/homeassistant/components/glances/sensor.py b/homeassistant/components/glances/sensor.py index a1cb8e47b9d..59eba69d60a 100644 --- a/homeassistant/components/glances/sensor.py +++ b/homeassistant/components/glances/sensor.py @@ -325,6 +325,7 @@ class GlancesSensor(CoordinatorEntity[GlancesDataUpdateCoordinator], SensorEntit entity_description: GlancesSensorEntityDescription _attr_has_entity_name = True + _data_valid: bool = False def __init__( self, @@ -351,14 +352,7 @@ class GlancesSensor(CoordinatorEntity[GlancesDataUpdateCoordinator], SensorEntit @property def available(self) -> bool: """Set sensor unavailable when native value is invalid.""" - if super().available: - return ( - not self._numeric_state_expected - or isinstance(value := self.native_value, (int, float)) - or isinstance(value, str) - and value.isnumeric() - ) - return False + return super().available and self._data_valid @callback def _handle_coordinator_update(self) -> None: @@ -368,10 +362,19 @@ class GlancesSensor(CoordinatorEntity[GlancesDataUpdateCoordinator], SensorEntit def _update_native_value(self) -> None: """Update sensor native value from coordinator data.""" - data = self.coordinator.data[self.entity_description.type] - if dict_val := data.get(self._sensor_label): + data = self.coordinator.data.get(self.entity_description.type) + if data and (dict_val := data.get(self._sensor_label)): self._attr_native_value = dict_val.get(self.entity_description.key) - elif self.entity_description.key in data: + elif data and (self.entity_description.key in data): self._attr_native_value = data.get(self.entity_description.key) else: self._attr_native_value = None + self._update_data_valid() + + def _update_data_valid(self) -> None: + self._data_valid = self._attr_native_value is not None and ( + not self._numeric_state_expected + or isinstance(self._attr_native_value, (int, float)) + or isinstance(self._attr_native_value, str) + and self._attr_native_value.isnumeric() + ) diff --git a/tests/components/glances/test_sensor.py b/tests/components/glances/test_sensor.py index 7dee47680ed..8e0367a712c 100644 --- a/tests/components/glances/test_sensor.py +++ b/tests/components/glances/test_sensor.py @@ -7,6 +7,7 @@ from freezegun.api import FrozenDateTimeFactory from syrupy import SnapshotAssertion from homeassistant.components.glances.const import DOMAIN +from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -71,3 +72,40 @@ async def test_uptime_variation( async_fire_time_changed(hass) await hass.async_block_till_done() assert hass.states.get("sensor.0_0_0_0_uptime").state == "2024-02-15T12:49:52+00:00" + + +async def test_sensor_removed( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_api: AsyncMock, + entity_registry: er.EntityRegistry, +) -> None: + """Test sensor removed server side.""" + + # Init with reference time + freezer.move_to(MOCK_REFERENCE_DATE) + entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_INPUT, entry_id="test") + entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert hass.states.get("sensor.0_0_0_0_ssl_disk_used").state != STATE_UNAVAILABLE + assert hass.states.get("sensor.0_0_0_0_memory_use").state != STATE_UNAVAILABLE + assert hass.states.get("sensor.0_0_0_0_uptime").state != STATE_UNAVAILABLE + + # Remove some sensors from Glances API data + mock_data = HA_SENSOR_DATA.copy() + mock_data.pop("fs") + mock_data.pop("mem") + mock_data.pop("uptime") + mock_api.return_value.get_ha_sensor_data = AsyncMock(return_value=mock_data) + + # Server stops providing some sensors, so state should switch to Unavailable + freezer.move_to(MOCK_REFERENCE_DATE + timedelta(minutes=2)) + freezer.tick(delta=timedelta(seconds=120)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get("sensor.0_0_0_0_ssl_disk_used").state == STATE_UNAVAILABLE + assert hass.states.get("sensor.0_0_0_0_memory_use").state == STATE_UNAVAILABLE + assert hass.states.get("sensor.0_0_0_0_uptime").state == STATE_UNAVAILABLE From a040f1a9d15621c8ae6f13fddea35e1ad6ede95f Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Sun, 11 Aug 2024 19:56:12 +0200 Subject: [PATCH 0554/1635] Bump `aioshelly` to version 11.2.0 (#123602) Bump aioshelly to version 11.2.0 --- homeassistant/components/shelly/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/shelly/manifest.json b/homeassistant/components/shelly/manifest.json index 1e65a51733d..c742b45632c 100644 --- a/homeassistant/components/shelly/manifest.json +++ b/homeassistant/components/shelly/manifest.json @@ -9,7 +9,7 @@ "iot_class": "local_push", "loggers": ["aioshelly"], "quality_scale": "platinum", - "requirements": ["aioshelly==11.1.0"], + "requirements": ["aioshelly==11.2.0"], "zeroconf": [ { "type": "_http._tcp.local.", diff --git a/requirements_all.txt b/requirements_all.txt index 29146455616..f1900dea123 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -359,7 +359,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==11.1.0 +aioshelly==11.2.0 # homeassistant.components.skybell aioskybell==22.7.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 46889f670ee..63e63f48e56 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -341,7 +341,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==11.1.0 +aioshelly==11.2.0 # homeassistant.components.skybell aioskybell==22.7.0 From 4daefe0b6e9b7f83e907da2adcaa7d165f5a9098 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Sun, 11 Aug 2024 22:37:33 +0200 Subject: [PATCH 0555/1635] Remove deprecated fan as light in lutron (#123607) * Remove deprecated fan as light in lutron * Remove more --- homeassistant/components/lutron/__init__.py | 2 - homeassistant/components/lutron/light.py | 84 +------------------- homeassistant/components/lutron/strings.json | 14 ---- 3 files changed, 4 insertions(+), 96 deletions(-) diff --git a/homeassistant/components/lutron/__init__.py b/homeassistant/components/lutron/__init__.py index 1521a05df8e..45a51eb6df8 100644 --- a/homeassistant/components/lutron/__init__.py +++ b/homeassistant/components/lutron/__init__.py @@ -82,8 +82,6 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b elif output.type == "CEILING_FAN_TYPE": entry_data.fans.append((area.name, output)) platform = Platform.FAN - # Deprecated, should be removed in 2024.8 - entry_data.lights.append((area.name, output)) elif output.is_dimmable: entry_data.lights.append((area.name, output)) platform = Platform.LIGHT diff --git a/homeassistant/components/lutron/light.py b/homeassistant/components/lutron/light.py index eb003fd431a..7e8829b231c 100644 --- a/homeassistant/components/lutron/light.py +++ b/homeassistant/components/lutron/light.py @@ -3,12 +3,10 @@ from __future__ import annotations from collections.abc import Mapping -import logging from typing import Any from pylutron import Output -from homeassistant.components.automation import automations_with_entity from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_FLASH, @@ -17,23 +15,13 @@ from homeassistant.components.light import ( LightEntity, LightEntityFeature, ) -from homeassistant.components.script import scripts_with_entity from homeassistant.config_entries import ConfigEntry -from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.issue_registry import ( - IssueSeverity, - async_create_issue, - create_issue, -) from . import DOMAIN, LutronData from .entity import LutronDevice -_LOGGER = logging.getLogger(__name__) - async def async_setup_entry( hass: HomeAssistant, @@ -45,50 +33,13 @@ async def async_setup_entry( Adds dimmers from the Main Repeater associated with the config_entry as light entities. """ - ent_reg = er.async_get(hass) entry_data: LutronData = hass.data[DOMAIN][config_entry.entry_id] - lights = [] - - for area_name, device in entry_data.lights: - if device.type == "CEILING_FAN_TYPE": - # If this is a fan, check to see if this entity already exists. - # If not, do not create a new one. - entity_id = ent_reg.async_get_entity_id( - Platform.LIGHT, - DOMAIN, - f"{entry_data.client.guid}_{device.uuid}", - ) - if entity_id: - entity_entry = ent_reg.async_get(entity_id) - assert entity_entry - if entity_entry.disabled: - # If the entity exists and is disabled then we want to remove - # the entity so that the user is using the new fan entity instead. - ent_reg.async_remove(entity_id) - else: - lights.append(LutronLight(area_name, device, entry_data.client)) - entity_automations = automations_with_entity(hass, entity_id) - entity_scripts = scripts_with_entity(hass, entity_id) - for item in entity_automations + entity_scripts: - async_create_issue( - hass, - DOMAIN, - f"deprecated_light_fan_{entity_id}_{item}", - breaks_in_ha_version="2024.8.0", - is_fixable=True, - is_persistent=True, - severity=IssueSeverity.WARNING, - translation_key="deprecated_light_fan_entity", - translation_placeholders={ - "entity": entity_id, - "info": item, - }, - ) - else: - lights.append(LutronLight(area_name, device, entry_data.client)) async_add_entities( - lights, + ( + LutronLight(area_name, device, entry_data.client) + for area_name, device in entry_data.lights + ), True, ) @@ -113,24 +64,8 @@ class LutronLight(LutronDevice, LightEntity): _prev_brightness: int | None = None _attr_name = None - def __init__(self, area_name, lutron_device, controller) -> None: - """Initialize the light.""" - super().__init__(area_name, lutron_device, controller) - self._is_fan = lutron_device.type == "CEILING_FAN_TYPE" - def turn_on(self, **kwargs: Any) -> None: """Turn the light on.""" - if self._is_fan: - create_issue( - self.hass, - DOMAIN, - "deprecated_light_fan_on", - breaks_in_ha_version="2024.8.0", - is_fixable=True, - is_persistent=True, - severity=IssueSeverity.WARNING, - translation_key="deprecated_light_fan_on", - ) if flash := kwargs.get(ATTR_FLASH): self._lutron_device.flash(0.5 if flash == "short" else 1.5) else: @@ -148,17 +83,6 @@ class LutronLight(LutronDevice, LightEntity): def turn_off(self, **kwargs: Any) -> None: """Turn the light off.""" - if self._is_fan: - create_issue( - self.hass, - DOMAIN, - "deprecated_light_fan_off", - breaks_in_ha_version="2024.8.0", - is_fixable=True, - is_persistent=True, - severity=IssueSeverity.WARNING, - translation_key="deprecated_light_fan_off", - ) args = {"new_level": 0} if ATTR_TRANSITION in kwargs: args["fade_time_seconds"] = kwargs[ATTR_TRANSITION] diff --git a/homeassistant/components/lutron/strings.json b/homeassistant/components/lutron/strings.json index d5197375dc1..770a453eb9e 100644 --- a/homeassistant/components/lutron/strings.json +++ b/homeassistant/components/lutron/strings.json @@ -36,19 +36,5 @@ } } } - }, - "issues": { - "deprecated_light_fan_entity": { - "title": "Detected Lutron fan entity created as a light", - "description": "Fan entities have been added to the Lutron integration.\nWe detected that entity `{entity}` is being used in `{info}`\n\nWe have created a new fan entity and you should migrate `{info}` to use this new entity.\n\nWhen you are done migrating `{info}` and are ready to have the deprecated light entity removed, disable the entity and restart Home Assistant." - }, - "deprecated_light_fan_on": { - "title": "The Lutron integration deprecated fan turned on", - "description": "Fan entities have been added to the Lutron integration.\nPreviously fans were created as lights; this behavior is now deprecated.\n\nYour configuration just turned on a fan created as a light. You should migrate your scenes and automations to use the new fan entity.\n\nWhen you are done migrating your automations and are ready to have the deprecated light entity removed, disable the entity and restart Home Assistant.\n\nAn issue will be created each time the incorrect entity is used to remind you to migrate." - }, - "deprecated_light_fan_off": { - "title": "The Lutron integration deprecated fan turned off", - "description": "Fan entities have been added to the Lutron integration.\nPreviously fans were created as lights; this behavior is now deprecated.\n\nYour configuration just turned off a fan created as a light. You should migrate your scenes and automations to use the new fan entity.\n\nWhen you are done migrating your automations and are ready to have the deprecated light entity removed, disable the entity and restart Home Assistant.\n\nAn issue will be created each time the incorrect entity is used to remind you to migrate." - } } } From 4a099ab9423f4bc8b445bf437df0ae8739d7f248 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Sun, 11 Aug 2024 22:38:20 +0200 Subject: [PATCH 0556/1635] Remove deprecated yaml import from lupusec (#123606) --- homeassistant/components/lupusec/__init__.py | 83 +----------------- .../components/lupusec/config_flow.py | 39 +-------- homeassistant/components/lupusec/const.py | 4 - homeassistant/components/lupusec/strings.json | 10 --- tests/components/lupusec/test_config_flow.py | 85 ------------------- 5 files changed, 4 insertions(+), 217 deletions(-) diff --git a/homeassistant/components/lupusec/__init__.py b/homeassistant/components/lupusec/__init__.py index 51bba44aef0..c0593674972 100644 --- a/homeassistant/components/lupusec/__init__.py +++ b/homeassistant/components/lupusec/__init__.py @@ -5,24 +5,10 @@ import logging import lupupy from lupupy.exceptions import LupusecException -import voluptuous as vol -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry -from homeassistant.const import ( - CONF_HOST, - CONF_IP_ADDRESS, - CONF_NAME, - CONF_PASSWORD, - CONF_USERNAME, - Platform, -) -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from homeassistant.helpers.typing import ConfigType - -from .const import INTEGRATION_TITLE, ISSUE_PLACEHOLDER +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform +from homeassistant.core import HomeAssistant _LOGGER = logging.getLogger(__name__) @@ -31,19 +17,6 @@ DOMAIN = "lupusec" NOTIFICATION_ID = "lupusec_notification" NOTIFICATION_TITLE = "Lupusec Security Setup" -CONFIG_SCHEMA = vol.Schema( - { - DOMAIN: vol.Schema( - { - vol.Required(CONF_USERNAME): cv.string, - vol.Required(CONF_PASSWORD): cv.string, - vol.Required(CONF_IP_ADDRESS): cv.string, - vol.Optional(CONF_NAME): cv.string, - } - ) - }, - extra=vol.ALLOW_EXTRA, -) PLATFORMS: list[Platform] = [ Platform.ALARM_CONTROL_PANEL, @@ -52,56 +25,6 @@ PLATFORMS: list[Platform] = [ ] -async def handle_async_init_result(hass: HomeAssistant, domain: str, conf: dict): - """Handle the result of the async_init to issue deprecated warnings.""" - flow = hass.config_entries.flow - result = await flow.async_init(domain, context={"source": SOURCE_IMPORT}, data=conf) - - if ( - result["type"] == FlowResultType.CREATE_ENTRY - or result["reason"] == "already_configured" - ): - async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.8.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": INTEGRATION_TITLE, - }, - ) - else: - async_create_issue( - hass, - DOMAIN, - f"deprecated_yaml_import_issue_{result['reason']}", - breaks_in_ha_version="2024.8.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key=f"deprecated_yaml_import_issue_{result['reason']}", - translation_placeholders=ISSUE_PLACEHOLDER, - ) - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the lupusec integration.""" - - if DOMAIN not in config: - return True - - conf = config[DOMAIN] - - hass.async_create_task(handle_async_init_result(hass, DOMAIN, conf)) - - return True - - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up this integration using UI.""" diff --git a/homeassistant/components/lupusec/config_flow.py b/homeassistant/components/lupusec/config_flow.py index 82162bccf80..45b2b2b0cd8 100644 --- a/homeassistant/components/lupusec/config_flow.py +++ b/homeassistant/components/lupusec/config_flow.py @@ -8,13 +8,7 @@ import lupupy import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import ( - CONF_HOST, - CONF_IP_ADDRESS, - CONF_NAME, - CONF_PASSWORD, - CONF_USERNAME, -) +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -66,37 +60,6 @@ class LupusecConfigFlowHandler(ConfigFlow, domain=DOMAIN): step_id="user", data_schema=DATA_SCHEMA, errors=errors ) - async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: - """Import the yaml config.""" - self._async_abort_entries_match( - { - CONF_HOST: user_input[CONF_IP_ADDRESS], - CONF_USERNAME: user_input[CONF_USERNAME], - CONF_PASSWORD: user_input[CONF_PASSWORD], - } - ) - host = user_input[CONF_IP_ADDRESS] - username = user_input[CONF_USERNAME] - password = user_input[CONF_PASSWORD] - try: - await test_host_connection(self.hass, host, username, password) - except CannotConnect: - return self.async_abort(reason="cannot_connect") - except JSONDecodeError: - return self.async_abort(reason="cannot_connect") - except Exception: - _LOGGER.exception("Unexpected exception") - return self.async_abort(reason="unknown") - - return self.async_create_entry( - title=user_input.get(CONF_NAME, host), - data={ - CONF_HOST: host, - CONF_USERNAME: username, - CONF_PASSWORD: password, - }, - ) - async def test_host_connection( hass: HomeAssistant, host: str, username: str, password: str diff --git a/homeassistant/components/lupusec/const.py b/homeassistant/components/lupusec/const.py index 489d878306d..4904bc481a7 100644 --- a/homeassistant/components/lupusec/const.py +++ b/homeassistant/components/lupusec/const.py @@ -18,10 +18,6 @@ from lupupy.constants import ( DOMAIN = "lupusec" -INTEGRATION_TITLE = "Lupus Electronics LUPUSEC" -ISSUE_PLACEHOLDER = {"url": "/config/integrations/dashboard/add?domain=lupusec"} - - TYPE_TRANSLATION = { TYPE_WINDOW: "Fensterkontakt", TYPE_DOOR: "Türkontakt", diff --git a/homeassistant/components/lupusec/strings.json b/homeassistant/components/lupusec/strings.json index 6fa59aaeb3d..907232e0665 100644 --- a/homeassistant/components/lupusec/strings.json +++ b/homeassistant/components/lupusec/strings.json @@ -17,15 +17,5 @@ "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" } - }, - "issues": { - "deprecated_yaml_import_issue_cannot_connect": { - "title": "The Lupus Electronics LUPUSEC YAML configuration import failed", - "description": "Configuring Lupus Electronics LUPUSEC using YAML is being removed but there was a connection error importing your YAML configuration.\n\nEnsure connection to Lupus Electronics LUPUSEC works and restart Home Assistant to try again or remove the Lupus Electronics LUPUSEC YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." - }, - "deprecated_yaml_import_issue_unknown": { - "title": "The Lupus Electronics LUPUSEC YAML configuration import failed", - "description": "Configuring Lupus Electronics LUPUSEC using YAML is being removed but there was an unknown error when trying to import the YAML configuration.\n\nEnsure the imported configuration is correct and remove the Lupus Electronics LUPUSEC YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." - } } } diff --git a/tests/components/lupusec/test_config_flow.py b/tests/components/lupusec/test_config_flow.py index e106bbd5001..f354eaf0644 100644 --- a/tests/components/lupusec/test_config_flow.py +++ b/tests/components/lupusec/test_config_flow.py @@ -153,88 +153,3 @@ async def test_flow_user_init_data_already_configured(hass: HomeAssistant) -> No assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "already_configured" - - -@pytest.mark.parametrize( - ("mock_import_step", "mock_title"), - [ - (MOCK_IMPORT_STEP, MOCK_IMPORT_STEP[CONF_IP_ADDRESS]), - (MOCK_IMPORT_STEP_NAME, MOCK_IMPORT_STEP_NAME[CONF_NAME]), - ], -) -async def test_flow_source_import( - hass: HomeAssistant, mock_import_step, mock_title -) -> None: - """Test configuration import from YAML.""" - with ( - patch( - "homeassistant.components.lupusec.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - patch( - "homeassistant.components.lupusec.config_flow.lupupy.Lupusec", - ) as mock_initialize_lupusec, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data=mock_import_step, - ) - - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == mock_title - assert result["data"] == MOCK_DATA_STEP - assert len(mock_setup_entry.mock_calls) == 1 - assert len(mock_initialize_lupusec.mock_calls) == 1 - - -@pytest.mark.parametrize( - ("raise_error", "text_error"), - [ - (LupusecException("Test lupusec exception"), "cannot_connect"), - (JSONDecodeError("Test JSONDecodeError", "test", 1), "cannot_connect"), - (Exception("Test unknown exception"), "unknown"), - ], -) -async def test_flow_source_import_error_and_recover( - hass: HomeAssistant, raise_error, text_error -) -> None: - """Test exceptions and recovery.""" - - with patch( - "homeassistant.components.lupusec.config_flow.lupupy.Lupusec", - side_effect=raise_error, - ) as mock_initialize_lupusec: - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data=MOCK_IMPORT_STEP, - ) - - await hass.async_block_till_done() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == text_error - assert len(mock_initialize_lupusec.mock_calls) == 1 - - -async def test_flow_source_import_already_configured(hass: HomeAssistant) -> None: - """Test duplicate config entry..""" - - entry = MockConfigEntry( - domain=DOMAIN, - title=MOCK_DATA_STEP[CONF_HOST], - data=MOCK_DATA_STEP, - ) - - entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data=MOCK_IMPORT_STEP, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" From ca34bac479c08e678a5c3729d351147928b47689 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Sun, 11 Aug 2024 22:38:59 +0200 Subject: [PATCH 0557/1635] Remove deprecated horn switch in starline (#123608) --- homeassistant/components/starline/strings.json | 9 --------- homeassistant/components/starline/switch.py | 16 ---------------- 2 files changed, 25 deletions(-) diff --git a/homeassistant/components/starline/strings.json b/homeassistant/components/starline/strings.json index 6f0c42f0882..14a8ed5a035 100644 --- a/homeassistant/components/starline/strings.json +++ b/homeassistant/components/starline/strings.json @@ -114,9 +114,6 @@ "additional_channel": { "name": "Additional channel" }, - "horn": { - "name": "Horn" - }, "service_mode": { "name": "Service mode" } @@ -127,12 +124,6 @@ } } }, - "issues": { - "deprecated_horn_switch": { - "title": "The Starline Horn switch entity is being removed", - "description": "Using the Horn switch is now deprecated and will be removed in a future version of Home Assistant.\n\nPlease adjust any automations or scripts that use Horn switch entity to instead use the Horn button entity." - } - }, "services": { "update_state": { "name": "Update state", diff --git a/homeassistant/components/starline/switch.py b/homeassistant/components/starline/switch.py index 8ca736d2ac5..1b48a72c732 100644 --- a/homeassistant/components/starline/switch.py +++ b/homeassistant/components/starline/switch.py @@ -8,7 +8,6 @@ from homeassistant.components.switch import SwitchEntity, SwitchEntityDescriptio from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.issue_registry import IssueSeverity, create_issue from .account import StarlineAccount, StarlineDevice from .const import DOMAIN @@ -27,11 +26,6 @@ SWITCH_TYPES: tuple[SwitchEntityDescription, ...] = ( key="out", translation_key="additional_channel", ), - # Deprecated and should be removed in 2024.8 - SwitchEntityDescription( - key="poke", - translation_key="horn", - ), SwitchEntityDescription( key="valet", translation_key="service_mode", @@ -90,16 +84,6 @@ class StarlineSwitch(StarlineEntity, SwitchEntity): def turn_on(self, **kwargs: Any) -> None: """Turn the entity on.""" - if self._key == "poke": - create_issue( - self.hass, - DOMAIN, - "deprecated_horn_switch", - breaks_in_ha_version="2024.8.0", - is_fixable=False, - severity=IssueSeverity.WARNING, - translation_key="deprecated_horn_switch", - ) self._account.api.set_car_state(self._device.device_id, self._key, True) def turn_off(self, **kwargs: Any) -> None: From e1336a197508ef8ec5e4908be266ec80b4a1eb74 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Mon, 12 Aug 2024 08:55:24 +0200 Subject: [PATCH 0558/1635] Update knx-frontend to 2024.8.9.225351 (#123557) --- homeassistant/components/knx/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/knx/manifest.json b/homeassistant/components/knx/manifest.json index 62364f641f4..6974ee300f5 100644 --- a/homeassistant/components/knx/manifest.json +++ b/homeassistant/components/knx/manifest.json @@ -13,7 +13,7 @@ "requirements": [ "xknx==3.0.0", "xknxproject==3.7.1", - "knx-frontend==2024.8.6.211307" + "knx-frontend==2024.8.9.225351" ], "single_config_entry": true } diff --git a/requirements_all.txt b/requirements_all.txt index f1900dea123..f1bad6ec1b4 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1219,7 +1219,7 @@ kiwiki-client==0.1.1 knocki==0.3.1 # homeassistant.components.knx -knx-frontend==2024.8.6.211307 +knx-frontend==2024.8.9.225351 # homeassistant.components.konnected konnected==1.2.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 63e63f48e56..c4d10f58d83 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1018,7 +1018,7 @@ kegtron-ble==0.4.0 knocki==0.3.1 # homeassistant.components.knx -knx-frontend==2024.8.6.211307 +knx-frontend==2024.8.9.225351 # homeassistant.components.konnected konnected==1.2.0 From 5b6bfa9ac8114e458c6ee6e829f6cffbd2804b95 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Mon, 12 Aug 2024 09:04:12 +0200 Subject: [PATCH 0559/1635] Remove Spotify scope check (#123545) --- homeassistant/components/spotify/media_player.py | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/homeassistant/components/spotify/media_player.py b/homeassistant/components/spotify/media_player.py index bd1bcdfd43e..3653bdb149a 100644 --- a/homeassistant/components/spotify/media_player.py +++ b/homeassistant/components/spotify/media_player.py @@ -31,7 +31,7 @@ from homeassistant.util.dt import utcnow from . import SpotifyConfigEntry from .browse_media import async_browse_media_internal -from .const import DOMAIN, MEDIA_PLAYER_PREFIX, PLAYABLE_MEDIA_TYPES, SPOTIFY_SCOPES +from .const import DOMAIN, MEDIA_PLAYER_PREFIX, PLAYABLE_MEDIA_TYPES from .models import HomeAssistantSpotifyData from .util import fetch_image_url @@ -138,10 +138,6 @@ class SpotifyMediaPlayer(MediaPlayerEntity): entry_type=DeviceEntryType.SERVICE, configuration_url="https://open.spotify.com", ) - - self._scope_ok = set(data.session.token["scope"].split(" ")).issuperset( - SPOTIFY_SCOPES - ) self._currently_playing: dict | None = {} self._playlist: dict | None = None self._restricted_device: bool = False @@ -459,13 +455,6 @@ class SpotifyMediaPlayer(MediaPlayerEntity): ) -> BrowseMedia: """Implement the websocket media browsing helper.""" - if not self._scope_ok: - _LOGGER.debug( - "Spotify scopes are not set correctly, this can impact features such as" - " media browsing" - ) - raise NotImplementedError - return await async_browse_media_internal( self.hass, self.data.client, From 6343a086e497659bd0bdceeff9359b9bfc696eae Mon Sep 17 00:00:00 2001 From: G Johansson Date: Mon, 12 Aug 2024 09:08:40 +0200 Subject: [PATCH 0560/1635] Remove deprecated process sensor from System monitor (#123616) --- .../components/systemmonitor/__init__.py | 14 +- .../components/systemmonitor/config_flow.py | 2 +- .../components/systemmonitor/repairs.py | 72 ------- .../components/systemmonitor/sensor.py | 63 +----- .../components/systemmonitor/strings.json | 13 -- .../snapshots/test_diagnostics.ambr | 2 +- .../systemmonitor/snapshots/test_sensor.ambr | 18 -- tests/components/systemmonitor/test_init.py | 46 +++- .../components/systemmonitor/test_repairs.py | 199 ------------------ tests/components/systemmonitor/test_sensor.py | 61 +----- 10 files changed, 60 insertions(+), 430 deletions(-) delete mode 100644 homeassistant/components/systemmonitor/repairs.py delete mode 100644 tests/components/systemmonitor/test_repairs.py diff --git a/homeassistant/components/systemmonitor/__init__.py b/homeassistant/components/systemmonitor/__init__.py index 3fbc9edec2a..4a794a00432 100644 --- a/homeassistant/components/systemmonitor/__init__.py +++ b/homeassistant/components/systemmonitor/__init__.py @@ -73,7 +73,11 @@ async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Migrate old entry.""" - if entry.version == 1: + if entry.version > 1: + # This means the user has downgraded from a future version + return False + + if entry.version == 1 and entry.minor_version < 3: new_options = {**entry.options} if entry.minor_version == 1: # Migration copies process sensors to binary sensors @@ -84,6 +88,14 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: entry, options=new_options, version=1, minor_version=2 ) + if entry.minor_version == 2: + new_options = {**entry.options} + if SENSOR_DOMAIN in new_options: + new_options.pop(SENSOR_DOMAIN) + hass.config_entries.async_update_entry( + entry, options=new_options, version=1, minor_version=3 + ) + _LOGGER.debug( "Migration to version %s.%s successful", entry.version, entry.minor_version ) diff --git a/homeassistant/components/systemmonitor/config_flow.py b/homeassistant/components/systemmonitor/config_flow.py index 0ff882d89da..34b28a1d47a 100644 --- a/homeassistant/components/systemmonitor/config_flow.py +++ b/homeassistant/components/systemmonitor/config_flow.py @@ -95,7 +95,7 @@ class SystemMonitorConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN): config_flow = CONFIG_FLOW options_flow = OPTIONS_FLOW VERSION = 1 - MINOR_VERSION = 2 + MINOR_VERSION = 3 def async_config_entry_title(self, options: Mapping[str, Any]) -> str: """Return config entry title.""" diff --git a/homeassistant/components/systemmonitor/repairs.py b/homeassistant/components/systemmonitor/repairs.py deleted file mode 100644 index 10b5d18830d..00000000000 --- a/homeassistant/components/systemmonitor/repairs.py +++ /dev/null @@ -1,72 +0,0 @@ -"""Repairs platform for the System Monitor integration.""" - -from __future__ import annotations - -from typing import Any, cast - -from homeassistant import data_entry_flow -from homeassistant.components.repairs import ConfirmRepairFlow, RepairsFlow -from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - - -class ProcessFixFlow(RepairsFlow): - """Handler for an issue fixing flow.""" - - def __init__(self, entry: ConfigEntry, processes: list[str]) -> None: - """Create flow.""" - super().__init__() - self.entry = entry - self._processes = processes - - async def async_step_init( - self, user_input: dict[str, str] | None = None - ) -> data_entry_flow.FlowResult: - """Handle the first step of a fix flow.""" - return await self.async_step_migrate_process_sensor() - - async def async_step_migrate_process_sensor( - self, user_input: dict[str, Any] | None = None - ) -> data_entry_flow.FlowResult: - """Handle the options step of a fix flow.""" - if user_input is None: - return self.async_show_form( - step_id="migrate_process_sensor", - description_placeholders={"processes": ", ".join(self._processes)}, - ) - - # Migration has copied the sensors to binary sensors - # Pop the sensors to repair and remove entities - new_options: dict[str, Any] = self.entry.options.copy() - new_options.pop(SENSOR_DOMAIN) - - entity_reg = er.async_get(self.hass) - entries = er.async_entries_for_config_entry(entity_reg, self.entry.entry_id) - for entry in entries: - if entry.entity_id.startswith("sensor.") and entry.unique_id.startswith( - "process_" - ): - entity_reg.async_remove(entry.entity_id) - - self.hass.config_entries.async_update_entry(self.entry, options=new_options) - await self.hass.config_entries.async_reload(self.entry.entry_id) - return self.async_create_entry(data={}) - - -async def async_create_fix_flow( - hass: HomeAssistant, - issue_id: str, - data: dict[str, Any] | None, -) -> RepairsFlow: - """Create flow.""" - entry = None - if data and (entry_id := data.get("entry_id")): - entry_id = cast(str, entry_id) - processes: list[str] = data["processes"] - entry = hass.config_entries.async_get_entry(entry_id) - assert entry - return ProcessFixFlow(entry, processes) - - return ConfirmRepairFlow() diff --git a/homeassistant/components/systemmonitor/sensor.py b/homeassistant/components/systemmonitor/sensor.py index bad4c3be0b5..ef1153f09e8 100644 --- a/homeassistant/components/systemmonitor/sensor.py +++ b/homeassistant/components/systemmonitor/sensor.py @@ -14,8 +14,6 @@ import sys import time from typing import Any, Literal -from psutil import NoSuchProcess - from homeassistant.components.sensor import ( DOMAIN as SENSOR_DOMAIN, SensorDeviceClass, @@ -25,8 +23,6 @@ from homeassistant.components.sensor import ( ) from homeassistant.const import ( PERCENTAGE, - STATE_OFF, - STATE_ON, EntityCategory, UnitOfDataRate, UnitOfInformation, @@ -36,13 +32,12 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import entity_registry as er from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.util import slugify from . import SystemMonitorConfigEntry -from .const import CONF_PROCESS, DOMAIN, NET_IO_TYPES +from .const import DOMAIN, NET_IO_TYPES from .coordinator import SystemMonitorCoordinator from .util import get_all_disk_mounts, get_all_network_interfaces, read_cpu_temperature @@ -68,24 +63,6 @@ def get_cpu_icon() -> Literal["mdi:cpu-64-bit", "mdi:cpu-32-bit"]: return "mdi:cpu-32-bit" -def get_process(entity: SystemMonitorSensor) -> str: - """Return process.""" - state = STATE_OFF - for proc in entity.coordinator.data.processes: - try: - _LOGGER.debug("process %s for argument %s", proc.name(), entity.argument) - if entity.argument == proc.name(): - state = STATE_ON - break - except NoSuchProcess as err: - _LOGGER.warning( - "Failed to load process with ID: %s, old name: %s", - err.pid, - err.name, - ) - return state - - def get_network(entity: SystemMonitorSensor) -> float | None: """Return network in and out.""" counters = entity.coordinator.data.io_counters @@ -341,15 +318,6 @@ SENSOR_TYPES: dict[str, SysMonitorSensorEntityDescription] = { value_fn=get_throughput, add_to_update=lambda entity: ("io_counters", ""), ), - "process": SysMonitorSensorEntityDescription( - key="process", - translation_key="process", - placeholder="process", - icon=get_cpu_icon(), - mandatory_arg=True, - value_fn=get_process, - add_to_update=lambda entity: ("processes", ""), - ), "processor_use": SysMonitorSensorEntityDescription( key="processor_use", translation_key="processor_use", @@ -551,35 +519,6 @@ async def async_setup_entry( ) continue - if _type == "process": - _entry = entry.options.get(SENSOR_DOMAIN, {}) - for argument in _entry.get(CONF_PROCESS, []): - loaded_resources.add(slugify(f"{_type}_{argument}")) - entities.append( - SystemMonitorSensor( - coordinator, - sensor_description, - entry.entry_id, - argument, - True, - ) - ) - async_create_issue( - hass, - DOMAIN, - "process_sensor", - breaks_in_ha_version="2024.9.0", - is_fixable=True, - is_persistent=False, - severity=IssueSeverity.WARNING, - translation_key="process_sensor", - data={ - "entry_id": entry.entry_id, - "processes": _entry[CONF_PROCESS], - }, - ) - continue - if _type == "processor_use": argument = "" is_enabled = check_legacy_resource(f"{_type}_{argument}", legacy_resources) diff --git a/homeassistant/components/systemmonitor/strings.json b/homeassistant/components/systemmonitor/strings.json index aae2463c9da..dde97918bc3 100644 --- a/homeassistant/components/systemmonitor/strings.json +++ b/homeassistant/components/systemmonitor/strings.json @@ -22,19 +22,6 @@ } } }, - "issues": { - "process_sensor": { - "title": "Process sensors are deprecated and will be removed", - "fix_flow": { - "step": { - "migrate_process_sensor": { - "title": "Process sensors have been setup as binary sensors", - "description": "Process sensors `{processes}` have been created as binary sensors and the sensors will be removed in 2024.9.0.\n\nPlease update all automations, scripts, dashboards or other things depending on these sensors to use the newly created binary sensors instead and press **Submit** to fix this issue." - } - } - } - } - }, "entity": { "binary_sensor": { "process": { diff --git a/tests/components/systemmonitor/snapshots/test_diagnostics.ambr b/tests/components/systemmonitor/snapshots/test_diagnostics.ambr index b50e051c816..328065f6098 100644 --- a/tests/components/systemmonitor/snapshots/test_diagnostics.ambr +++ b/tests/components/systemmonitor/snapshots/test_diagnostics.ambr @@ -35,7 +35,7 @@ }), 'disabled_by': None, 'domain': 'systemmonitor', - 'minor_version': 2, + 'minor_version': 3, 'options': dict({ 'binary_sensor': dict({ 'process': list([ diff --git a/tests/components/systemmonitor/snapshots/test_sensor.ambr b/tests/components/systemmonitor/snapshots/test_sensor.ambr index 3fe9ae7e809..1ee9067a528 100644 --- a/tests/components/systemmonitor/snapshots/test_sensor.ambr +++ b/tests/components/systemmonitor/snapshots/test_sensor.ambr @@ -300,24 +300,6 @@ # name: test_sensor[System Monitor Packets out eth1 - state] '150' # --- -# name: test_sensor[System Monitor Process pip - attributes] - ReadOnlyDict({ - 'friendly_name': 'System Monitor Process pip', - 'icon': 'mdi:cpu-64-bit', - }) -# --- -# name: test_sensor[System Monitor Process pip - state] - 'on' -# --- -# name: test_sensor[System Monitor Process python3 - attributes] - ReadOnlyDict({ - 'friendly_name': 'System Monitor Process python3', - 'icon': 'mdi:cpu-64-bit', - }) -# --- -# name: test_sensor[System Monitor Process python3 - state] - 'on' -# --- # name: test_sensor[System Monitor Processor temperature - attributes] ReadOnlyDict({ 'device_class': 'temperature', diff --git a/tests/components/systemmonitor/test_init.py b/tests/components/systemmonitor/test_init.py index 97f4a41b96c..6c1e4e6316c 100644 --- a/tests/components/systemmonitor/test_init.py +++ b/tests/components/systemmonitor/test_init.py @@ -95,9 +95,49 @@ async def test_migrate_process_sensor_to_binary_sensors( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - process_sensor = hass.states.get("sensor.system_monitor_process_python3") - assert process_sensor is not None - assert process_sensor.state == STATE_ON process_sensor = hass.states.get("binary_sensor.system_monitor_process_python3") assert process_sensor is not None assert process_sensor.state == STATE_ON + + assert mock_config_entry.minor_version == 3 + assert mock_config_entry.options == { + "binary_sensor": {"process": ["python3", "pip"]}, + "resources": [ + "disk_use_percent_/", + "disk_use_percent_/home/notexist/", + "memory_free_", + "network_out_eth0", + "process_python3", + ], + } + + +async def test_migration_from_future_version( + hass: HomeAssistant, + mock_psutil: Mock, + mock_os: Mock, + freezer: FrozenDateTimeFactory, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test migration from future version.""" + mock_config_entry = MockConfigEntry( + title="System Monitor", + domain=DOMAIN, + version=2, + data={}, + options={ + "sensor": {"process": ["python3", "pip"]}, + "resources": [ + "disk_use_percent_/", + "disk_use_percent_/home/notexist/", + "memory_free_", + "network_out_eth0", + "process_python3", + ], + }, + ) + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.MIGRATION_ERROR diff --git a/tests/components/systemmonitor/test_repairs.py b/tests/components/systemmonitor/test_repairs.py deleted file mode 100644 index 6c1ff9dfd16..00000000000 --- a/tests/components/systemmonitor/test_repairs.py +++ /dev/null @@ -1,199 +0,0 @@ -"""Test repairs for System Monitor.""" - -from __future__ import annotations - -from http import HTTPStatus -from unittest.mock import Mock - -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.repairs.websocket_api import ( - RepairsFlowIndexView, - RepairsFlowResourceView, -) -from homeassistant.components.systemmonitor.const import DOMAIN -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers import entity_registry as er, issue_registry as ir -from homeassistant.setup import async_setup_component - -from tests.common import ANY, MockConfigEntry -from tests.typing import ClientSessionGenerator, WebSocketGenerator - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_migrate_process_sensor( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_psutil: Mock, - mock_os: Mock, - hass_client: ClientSessionGenerator, - hass_ws_client: WebSocketGenerator, - snapshot: SnapshotAssertion, -) -> None: - """Test migrating process sensor to binary sensor.""" - mock_config_entry = MockConfigEntry( - title="System Monitor", - domain=DOMAIN, - data={}, - options={ - "binary_sensor": {"process": ["python3", "pip"]}, - "sensor": {"process": ["python3", "pip"]}, - "resources": [ - "disk_use_percent_/", - "disk_use_percent_/home/notexist/", - "memory_free_", - "network_out_eth0", - "process_python3", - ], - }, - ) - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - assert hass.config_entries.async_entries(DOMAIN) == snapshot( - name="before_migration" - ) - - assert await async_setup_component(hass, "repairs", {}) - await hass.async_block_till_done() - - entity = "sensor.system_monitor_process_python3" - state = hass.states.get(entity) - assert state - - assert entity_registry.async_get(entity) - - ws_client = await hass_ws_client(hass) - client = await hass_client() - - await ws_client.send_json({"id": 1, "type": "repairs/list_issues"}) - msg = await ws_client.receive_json() - - assert msg["success"] - assert len(msg["result"]["issues"]) > 0 - issue = None - for i in msg["result"]["issues"]: - if i["issue_id"] == "process_sensor": - issue = i - assert issue is not None - - url = RepairsFlowIndexView.url - resp = await client.post( - url, json={"handler": DOMAIN, "issue_id": "process_sensor"} - ) - assert resp.status == HTTPStatus.OK - data = await resp.json() - - flow_id = data["flow_id"] - assert data["step_id"] == "migrate_process_sensor" - - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - resp = await client.post(url, json={}) - assert resp.status == HTTPStatus.OK - data = await resp.json() - - # Cannot use identity `is` check here as the value is parsed from JSON - assert data["type"] == FlowResultType.CREATE_ENTRY.value - await hass.async_block_till_done() - - state = hass.states.get("binary_sensor.system_monitor_process_python3") - assert state - - await ws_client.send_json({"id": 2, "type": "repairs/list_issues"}) - msg = await ws_client.receive_json() - - assert msg["success"] - issue = None - for i in msg["result"]["issues"]: - if i["issue_id"] == "migrate_process_sensor": - issue = i - assert not issue - - entity = "sensor.system_monitor_process_python3" - state = hass.states.get(entity) - assert not state - - assert not entity_registry.async_get(entity) - - assert hass.config_entries.async_entries(DOMAIN) == snapshot(name="after_migration") - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_other_fixable_issues( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - hass_ws_client: WebSocketGenerator, - mock_added_config_entry: ConfigEntry, -) -> None: - """Test fixing other issues.""" - assert await async_setup_component(hass, "repairs", {}) - await hass.async_block_till_done() - - ws_client = await hass_ws_client(hass) - client = await hass_client() - - await ws_client.send_json({"id": 1, "type": "repairs/list_issues"}) - msg = await ws_client.receive_json() - - assert msg["success"] - - issue = { - "breaks_in_ha_version": "2022.9.0dev0", - "domain": DOMAIN, - "issue_id": "issue_1", - "is_fixable": True, - "learn_more_url": "", - "severity": "error", - "translation_key": "issue_1", - } - ir.async_create_issue( - hass, - issue["domain"], - issue["issue_id"], - breaks_in_ha_version=issue["breaks_in_ha_version"], - is_fixable=issue["is_fixable"], - is_persistent=False, - learn_more_url=None, - severity=issue["severity"], - translation_key=issue["translation_key"], - ) - - await ws_client.send_json({"id": 2, "type": "repairs/list_issues"}) - msg = await ws_client.receive_json() - - assert msg["success"] - results = msg["result"]["issues"] - assert { - "breaks_in_ha_version": "2022.9.0dev0", - "created": ANY, - "dismissed_version": None, - "domain": DOMAIN, - "is_fixable": True, - "issue_domain": None, - "issue_id": "issue_1", - "learn_more_url": None, - "severity": "error", - "translation_key": "issue_1", - "translation_placeholders": None, - "ignored": False, - } in results - - url = RepairsFlowIndexView.url - resp = await client.post(url, json={"handler": DOMAIN, "issue_id": "issue_1"}) - assert resp.status == HTTPStatus.OK - data = await resp.json() - - flow_id = data["flow_id"] - assert data["step_id"] == "confirm" - - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - resp = await client.post(url) - assert resp.status == HTTPStatus.OK - data = await resp.json() - - # Cannot use identity `is` check here as the value is parsed from JSON - assert data["type"] == FlowResultType.CREATE_ENTRY.value - await hass.async_block_till_done() diff --git a/tests/components/systemmonitor/test_sensor.py b/tests/components/systemmonitor/test_sensor.py index ce15083da67..6d22c5354a4 100644 --- a/tests/components/systemmonitor/test_sensor.py +++ b/tests/components/systemmonitor/test_sensor.py @@ -14,12 +14,10 @@ from homeassistant.components.systemmonitor.const import DOMAIN from homeassistant.components.systemmonitor.coordinator import VirtualMemory from homeassistant.components.systemmonitor.sensor import get_cpu_icon from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, STATE_UNKNOWN +from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .conftest import MockProcess - from tests.common import MockConfigEntry, async_fire_time_changed @@ -38,7 +36,6 @@ async def test_sensor( data={}, options={ "binary_sensor": {"process": ["python3", "pip"]}, - "sensor": {"process": ["python3", "pip"]}, "resources": [ "disk_use_percent_/", "disk_use_percent_/home/notexist/", @@ -62,10 +59,6 @@ async def test_sensor( "friendly_name": "System Monitor Memory free", } - process_sensor = hass.states.get("sensor.system_monitor_process_python3") - assert process_sensor is not None - assert process_sensor.state == STATE_ON - for entity in er.async_entries_for_config_entry( entity_registry, mock_config_entry.entry_id ): @@ -154,7 +147,6 @@ async def test_sensor_updating( data={}, options={ "binary_sensor": {"process": ["python3", "pip"]}, - "sensor": {"process": ["python3", "pip"]}, "resources": [ "disk_use_percent_/", "disk_use_percent_/home/notexist/", @@ -172,10 +164,6 @@ async def test_sensor_updating( assert memory_sensor is not None assert memory_sensor.state == "40.0" - process_sensor = hass.states.get("sensor.system_monitor_process_python3") - assert process_sensor is not None - assert process_sensor.state == STATE_ON - mock_psutil.virtual_memory.side_effect = Exception("Failed to update") freezer.tick(timedelta(minutes=1)) async_fire_time_changed(hass) @@ -202,53 +190,6 @@ async def test_sensor_updating( assert memory_sensor.state == "25.0" -async def test_sensor_process_fails( - hass: HomeAssistant, - mock_psutil: Mock, - mock_os: Mock, - freezer: FrozenDateTimeFactory, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test process not exist failure.""" - mock_config_entry = MockConfigEntry( - title="System Monitor", - domain=DOMAIN, - data={}, - options={ - "binary_sensor": {"process": ["python3", "pip"]}, - "sensor": {"process": ["python3", "pip"]}, - "resources": [ - "disk_use_percent_/", - "disk_use_percent_/home/notexist/", - "memory_free_", - "network_out_eth0", - "process_python3", - ], - }, - ) - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - process_sensor = hass.states.get("sensor.system_monitor_process_python3") - assert process_sensor is not None - assert process_sensor.state == STATE_ON - - _process = MockProcess("python3", True) - - mock_psutil.process_iter.return_value = [_process] - - freezer.tick(timedelta(minutes=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - - process_sensor = hass.states.get("sensor.system_monitor_process_python3") - assert process_sensor is not None - assert process_sensor.state == STATE_OFF - - assert "Failed to load process with ID: 1, old name: python3" in caplog.text - - @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_sensor_network_sensors( freezer: FrozenDateTimeFactory, From 401e36b8857c7f3370f057bab281752a7afaf70f Mon Sep 17 00:00:00 2001 From: G Johansson Date: Mon, 12 Aug 2024 09:09:51 +0200 Subject: [PATCH 0561/1635] Remove deprecated yaml import from Ecovacs (#123605) --- homeassistant/components/ecovacs/__init__.py | 33 +---- .../components/ecovacs/config_flow.py | 104 +-------------- homeassistant/components/ecovacs/strings.json | 26 ---- tests/components/ecovacs/test_config_flow.py | 124 +----------------- tests/components/ecovacs/test_init.py | 32 +---- 5 files changed, 8 insertions(+), 311 deletions(-) diff --git a/homeassistant/components/ecovacs/__init__.py b/homeassistant/components/ecovacs/__init__.py index d13a337057d..f8abf87ef27 100644 --- a/homeassistant/components/ecovacs/__init__.py +++ b/homeassistant/components/ecovacs/__init__.py @@ -1,31 +1,13 @@ """Support for Ecovacs Deebot vacuums.""" from sucks import VacBot -import voluptuous as vol -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry -from homeassistant.const import CONF_COUNTRY, CONF_PASSWORD, CONF_USERNAME, Platform +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform from homeassistant.core import HomeAssistant -import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.typing import ConfigType -from .const import CONF_CONTINENT, DOMAIN from .controller import EcovacsController -CONFIG_SCHEMA = vol.Schema( - { - DOMAIN: vol.Schema( - { - vol.Required(CONF_USERNAME): cv.string, - vol.Required(CONF_PASSWORD): cv.string, - vol.Required(CONF_COUNTRY): vol.All(vol.Lower, cv.string), - vol.Required(CONF_CONTINENT): vol.All(vol.Lower, cv.string), - } - ) - }, - extra=vol.ALLOW_EXTRA, -) - PLATFORMS = [ Platform.BINARY_SENSOR, Platform.BUTTON, @@ -41,17 +23,6 @@ PLATFORMS = [ type EcovacsConfigEntry = ConfigEntry[EcovacsController] -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the Ecovacs component.""" - if DOMAIN in config: - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=config[DOMAIN] - ) - ) - return True - - async def async_setup_entry(hass: HomeAssistant, entry: EcovacsConfigEntry) -> bool: """Set up this integration using UI.""" controller = EcovacsController(hass, entry.data) diff --git a/homeassistant/components/ecovacs/config_flow.py b/homeassistant/components/ecovacs/config_flow.py index a254731a946..e19d9994f9e 100644 --- a/homeassistant/components/ecovacs/config_flow.py +++ b/homeassistant/components/ecovacs/config_flow.py @@ -4,7 +4,7 @@ from __future__ import annotations import logging import ssl -from typing import Any, cast +from typing import Any from urllib.parse import urlparse from aiohttp import ClientError @@ -13,21 +13,16 @@ from deebot_client.const import UNDEFINED, UndefinedType from deebot_client.exceptions import InvalidAuthenticationError, MqttError from deebot_client.mqtt_client import MqttClient, create_mqtt_config from deebot_client.util import md5 -from deebot_client.util.continents import COUNTRIES_TO_CONTINENTS, get_continent import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_COUNTRY, CONF_MODE, CONF_PASSWORD, CONF_USERNAME -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -from homeassistant.data_entry_flow import AbortFlow +from homeassistant.core import HomeAssistant from homeassistant.helpers import aiohttp_client, selector -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import VolDictType -from homeassistant.loader import async_get_issue_tracker from homeassistant.util.ssl import get_default_no_verify_context from .const import ( - CONF_CONTINENT, CONF_OVERRIDE_MQTT_URL, CONF_OVERRIDE_REST_URL, CONF_VERIFY_MQTT_CERTIFICATE, @@ -218,98 +213,3 @@ class EcovacsConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, last_step=True, ) - - async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: - """Import configuration from yaml.""" - - def create_repair( - error: str | None = None, placeholders: dict[str, Any] | None = None - ) -> None: - if placeholders is None: - placeholders = {} - if error: - async_create_issue( - self.hass, - DOMAIN, - f"deprecated_yaml_import_issue_{error}", - breaks_in_ha_version="2024.8.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key=f"deprecated_yaml_import_issue_{error}", - translation_placeholders=placeholders - | {"url": "/config/integrations/dashboard/add?domain=ecovacs"}, - ) - else: - async_create_issue( - self.hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.8.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders=placeholders - | { - "domain": DOMAIN, - "integration_title": "Ecovacs", - }, - ) - - # We need to validate the imported country and continent - # as the YAML configuration allows any string for them. - # The config flow allows only valid alpha-2 country codes - # through the CountrySelector. - # The continent will be calculated with the function get_continent - # from the country code and there is no need to specify the continent anymore. - # As the YAML configuration includes the continent, - # we check if both the entered continent and the calculated continent match. - # If not we will inform the user about the mismatch. - error = None - placeholders = None - - # Convert the country to upper case as ISO 3166-1 alpha-2 country codes are upper case - user_input[CONF_COUNTRY] = user_input[CONF_COUNTRY].upper() - - if len(user_input[CONF_COUNTRY]) != 2: - error = "invalid_country_length" - placeholders = {"countries_url": "https://www.iso.org/obp/ui/#search/code/"} - elif len(user_input[CONF_CONTINENT]) != 2: - error = "invalid_continent_length" - placeholders = { - "continent_list": ",".join( - sorted(set(COUNTRIES_TO_CONTINENTS.values())) - ) - } - elif user_input[CONF_CONTINENT].lower() != ( - continent := get_continent(user_input[CONF_COUNTRY]) - ): - error = "continent_not_match" - placeholders = { - "continent": continent, - "github_issue_url": cast( - str, async_get_issue_tracker(self.hass, integration_domain=DOMAIN) - ), - } - - if error: - create_repair(error, placeholders) - return self.async_abort(reason=error) - - # Remove the continent from the user input as it is not needed anymore - user_input.pop(CONF_CONTINENT) - try: - result = await self.async_step_auth(user_input) - except AbortFlow as ex: - if ex.reason == "already_configured": - create_repair() - raise - - if errors := result.get("errors"): - error = errors["base"] - create_repair(error) - return self.async_abort(reason=error) - - create_repair() - return result diff --git a/homeassistant/components/ecovacs/strings.json b/homeassistant/components/ecovacs/strings.json index ea216f11694..8222cabed07 100644 --- a/homeassistant/components/ecovacs/strings.json +++ b/homeassistant/components/ecovacs/strings.json @@ -237,32 +237,6 @@ "message": "Getting the positions of the chargers and the device itself is not supported" } }, - "issues": { - "deprecated_yaml_import_issue_cannot_connect": { - "title": "The Ecovacs YAML configuration import failed", - "description": "Configuring Ecovacs using YAML is being removed but there was a connection error when trying to import the YAML configuration.\n\nPlease verify that you have a stable internet connection and restart Home Assistant to try again or remove the Ecovacs YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." - }, - "deprecated_yaml_import_issue_invalid_auth": { - "title": "The Ecovacs YAML configuration import failed", - "description": "Configuring Ecovacs using YAML is being removed but there was an authentication error when trying to import the YAML configuration.\n\nCorrect the YAML configuration and restart Home Assistant to try again or remove the Ecovacs YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." - }, - "deprecated_yaml_import_issue_unknown": { - "title": "The Ecovacs YAML configuration import failed", - "description": "Configuring Ecovacs using YAML is being removed but there was an unknown error when trying to import the YAML configuration.\n\nEnsure the YAML configuration is correct and restart Home Assistant to try again or remove the Ecovacs YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." - }, - "deprecated_yaml_import_issue_invalid_country_length": { - "title": "The Ecovacs YAML configuration import failed", - "description": "Configuring Ecovacs using YAML is being removed but there is an invalid country specified in the YAML configuration.\n\nPlease change the country to the [Alpha-2 code of your country]({countries_url}) and restart Home Assistant to try again or remove the Ecovacs YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." - }, - "deprecated_yaml_import_issue_invalid_continent_length": { - "title": "The Ecovacs YAML configuration import failed", - "description": "Configuring Ecovacs using YAML is being removed but there is an invalid continent specified in the YAML configuration.\n\nPlease correct the continent to be one of {continent_list} and restart Home Assistant to try again or remove the Ecovacs YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." - }, - "deprecated_yaml_import_issue_continent_not_match": { - "title": "The Ecovacs YAML configuration import failed", - "description": "Configuring Ecovacs using YAML is being removed but there is an unexpected continent specified in the YAML configuration.\n\nFrom the given country, the continent \"{continent}\" is expected. Change the continent and restart Home Assistant to try again or remove the Ecovacs YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually.\n\nIf the contintent \"{continent}\" is not applicable, please open an issue on [GitHub]({github_issue_url})." - } - }, "selector": { "installation_mode": { "options": { diff --git a/tests/components/ecovacs/test_config_flow.py b/tests/components/ecovacs/test_config_flow.py index 0a161f88baa..5bf1144db0b 100644 --- a/tests/components/ecovacs/test_config_flow.py +++ b/tests/components/ecovacs/test_config_flow.py @@ -11,28 +11,23 @@ from deebot_client.mqtt_client import create_mqtt_config import pytest from homeassistant.components.ecovacs.const import ( - CONF_CONTINENT, CONF_OVERRIDE_MQTT_URL, CONF_OVERRIDE_REST_URL, CONF_VERIFY_MQTT_CERTIFICATE, DOMAIN, InstanceMode, ) -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER -from homeassistant.const import CONF_COUNTRY, CONF_MODE, CONF_USERNAME -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_MODE, CONF_USERNAME +from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers import issue_registry as ir from .const import ( - IMPORT_DATA, VALID_ENTRY_DATA_CLOUD, VALID_ENTRY_DATA_SELF_HOSTED, VALID_ENTRY_DATA_SELF_HOSTED_WITH_VALIDATE_CERT, ) -from tests.common import MockConfigEntry - _USER_STEP_SELF_HOSTED = {CONF_MODE: InstanceMode.SELF_HOSTED} _TEST_FN_AUTH_ARG = "user_input_auth" @@ -303,116 +298,3 @@ async def test_user_flow_self_hosted_error( mock_setup_entry.assert_called() mock_authenticator_authenticate.assert_called() mock_mqtt_client.verify_config.assert_called() - - -async def test_import_flow( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - mock_setup_entry: AsyncMock, - mock_authenticator_authenticate: AsyncMock, - mock_mqtt_client: Mock, -) -> None: - """Test importing yaml config.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=IMPORT_DATA.copy(), - ) - mock_authenticator_authenticate.assert_called() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == VALID_ENTRY_DATA_CLOUD[CONF_USERNAME] - assert result["data"] == VALID_ENTRY_DATA_CLOUD - assert (HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{DOMAIN}") in issue_registry.issues - mock_setup_entry.assert_called() - mock_mqtt_client.verify_config.assert_called() - - -async def test_import_flow_already_configured( - hass: HomeAssistant, issue_registry: ir.IssueRegistry -) -> None: - """Test importing yaml config where entry already configured.""" - entry = MockConfigEntry(domain=DOMAIN, data=VALID_ENTRY_DATA_CLOUD) - entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=IMPORT_DATA.copy(), - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - assert (HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{DOMAIN}") in issue_registry.issues - - -@pytest.mark.parametrize("show_advanced_options", [True, False]) -@pytest.mark.parametrize( - ("side_effect", "reason"), - [ - (ClientError, "cannot_connect"), - (InvalidAuthenticationError, "invalid_auth"), - (Exception, "unknown"), - ], -) -async def test_import_flow_error( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - mock_authenticator_authenticate: AsyncMock, - mock_mqtt_client: Mock, - side_effect: Exception, - reason: str, - show_advanced_options: bool, -) -> None: - """Test handling invalid connection.""" - mock_authenticator_authenticate.side_effect = side_effect - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_IMPORT, - "show_advanced_options": show_advanced_options, - }, - data=IMPORT_DATA.copy(), - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == reason - assert ( - DOMAIN, - f"deprecated_yaml_import_issue_{reason}", - ) in issue_registry.issues - mock_authenticator_authenticate.assert_called() - - -@pytest.mark.parametrize("show_advanced_options", [True, False]) -@pytest.mark.parametrize( - ("reason", "user_input"), - [ - ("invalid_country_length", IMPORT_DATA | {CONF_COUNTRY: "too_long"}), - ("invalid_country_length", IMPORT_DATA | {CONF_COUNTRY: "a"}), # too short - ("invalid_continent_length", IMPORT_DATA | {CONF_CONTINENT: "too_long"}), - ("invalid_continent_length", IMPORT_DATA | {CONF_CONTINENT: "a"}), # too short - ("continent_not_match", IMPORT_DATA | {CONF_CONTINENT: "AA"}), - ], -) -async def test_import_flow_invalid_data( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - reason: str, - user_input: dict[str, Any], - show_advanced_options: bool, -) -> None: - """Test handling invalid connection.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_IMPORT, - "show_advanced_options": show_advanced_options, - }, - data=user_input, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == reason - assert ( - DOMAIN, - f"deprecated_yaml_import_issue_{reason}", - ) in issue_registry.issues diff --git a/tests/components/ecovacs/test_init.py b/tests/components/ecovacs/test_init.py index ac4d5661a83..2185ae4c9eb 100644 --- a/tests/components/ecovacs/test_init.py +++ b/tests/components/ecovacs/test_init.py @@ -1,7 +1,6 @@ """Test init of ecovacs.""" -from typing import Any -from unittest.mock import AsyncMock, Mock, patch +from unittest.mock import Mock, patch from deebot_client.exceptions import DeebotError, InvalidAuthenticationError import pytest @@ -12,9 +11,6 @@ from homeassistant.components.ecovacs.controller import EcovacsController from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr -from homeassistant.setup import async_setup_component - -from .const import IMPORT_DATA from tests.common import MockConfigEntry @@ -88,32 +84,6 @@ async def test_invalid_auth( assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR -@pytest.mark.parametrize( - ("config", "config_entries_expected"), - [ - ({}, 0), - ({DOMAIN: IMPORT_DATA.copy()}, 1), - ], - ids=["no_config", "import_config"], -) -async def test_async_setup_import( - hass: HomeAssistant, - config: dict[str, Any], - config_entries_expected: int, - mock_setup_entry: AsyncMock, - mock_authenticator_authenticate: AsyncMock, - mock_mqtt_client: Mock, -) -> None: - """Test async_setup config import.""" - assert len(hass.config_entries.async_entries(DOMAIN)) == 0 - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done() - assert len(hass.config_entries.async_entries(DOMAIN)) == config_entries_expected - assert mock_setup_entry.call_count == config_entries_expected - assert mock_authenticator_authenticate.call_count == config_entries_expected - assert mock_mqtt_client.verify_config.call_count == config_entries_expected - - async def test_devices_in_dr( device_registry: dr.DeviceRegistry, controller: EcovacsController, From b19758ff71297061f8942ff063ac38688e90b477 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Mon, 12 Aug 2024 09:11:44 +0200 Subject: [PATCH 0562/1635] Change WoL to be secondary on device info (#123591) --- homeassistant/components/wake_on_lan/button.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/wake_on_lan/button.py b/homeassistant/components/wake_on_lan/button.py index 39c4511868d..87135a61380 100644 --- a/homeassistant/components/wake_on_lan/button.py +++ b/homeassistant/components/wake_on_lan/button.py @@ -15,8 +15,6 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN - _LOGGER = logging.getLogger(__name__) @@ -62,9 +60,8 @@ class WolButton(ButtonEntity): self._attr_unique_id = dr.format_mac(mac_address) self._attr_device_info = dr.DeviceInfo( connections={(dr.CONNECTION_NETWORK_MAC, self._attr_unique_id)}, - identifiers={(DOMAIN, self._attr_unique_id)}, - manufacturer="Wake on LAN", - name=name, + default_manufacturer="Wake on LAN", + default_name=name, ) async def async_press(self) -> None: From bbefe47aeb6eb176d7a0c08f364dcfeed2736d5c Mon Sep 17 00:00:00 2001 From: G Johansson Date: Mon, 12 Aug 2024 09:12:26 +0200 Subject: [PATCH 0563/1635] Add unique id to Manual alarm (#123588) --- homeassistant/components/demo/alarm_control_panel.py | 9 ++------- homeassistant/components/manual/alarm_control_panel.py | 5 +++++ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/demo/alarm_control_panel.py b/homeassistant/components/demo/alarm_control_panel.py index f95042f2cc7..d1b558842b6 100644 --- a/homeassistant/components/demo/alarm_control_panel.py +++ b/homeassistant/components/demo/alarm_control_panel.py @@ -30,9 +30,10 @@ async def async_setup_entry( """Set up the Demo config entry.""" async_add_entities( [ - DemoAlarm( # type:ignore[no-untyped-call] + ManualAlarm( # type:ignore[no-untyped-call] hass, "Security", + "demo_alarm_control_panel", "1234", None, True, @@ -74,9 +75,3 @@ async def async_setup_entry( ) ] ) - - -class DemoAlarm(ManualAlarm): - """Demo Alarm Control Panel.""" - - _attr_unique_id = "demo_alarm_control_panel" diff --git a/homeassistant/components/manual/alarm_control_panel.py b/homeassistant/components/manual/alarm_control_panel.py index 5b344dd01ac..422a9726e81 100644 --- a/homeassistant/components/manual/alarm_control_panel.py +++ b/homeassistant/components/manual/alarm_control_panel.py @@ -21,6 +21,7 @@ from homeassistant.const import ( CONF_NAME, CONF_PLATFORM, CONF_TRIGGER_TIME, + CONF_UNIQUE_ID, STATE_ALARM_ARMED_AWAY, STATE_ALARM_ARMED_CUSTOM_BYPASS, STATE_ALARM_ARMED_HOME, @@ -122,6 +123,7 @@ PLATFORM_SCHEMA = vol.Schema( { vol.Required(CONF_PLATFORM): "manual", vol.Optional(CONF_NAME, default=DEFAULT_ALARM_NAME): cv.string, + vol.Optional(CONF_UNIQUE_ID): cv.string, vol.Exclusive(CONF_CODE, "code validation"): cv.string, vol.Exclusive(CONF_CODE_TEMPLATE, "code validation"): cv.template, vol.Optional(CONF_CODE_ARM_REQUIRED, default=True): cv.boolean, @@ -179,6 +181,7 @@ def setup_platform( ManualAlarm( hass, config[CONF_NAME], + config.get(CONF_UNIQUE_ID), config.get(CONF_CODE), config.get(CONF_CODE_TEMPLATE), config.get(CONF_CODE_ARM_REQUIRED), @@ -205,6 +208,7 @@ class ManualAlarm(AlarmControlPanelEntity, RestoreEntity): self, hass, name, + unique_id, code, code_template, code_arm_required, @@ -215,6 +219,7 @@ class ManualAlarm(AlarmControlPanelEntity, RestoreEntity): self._state = STATE_ALARM_DISARMED self._hass = hass self._attr_name = name + self._attr_unique_id = unique_id if code_template: self._code = code_template self._code.hass = hass From 4527de18d579d87c2336ce1c5d3983aa01184401 Mon Sep 17 00:00:00 2001 From: Amit Finkelstein Date: Mon, 12 Aug 2024 10:13:52 +0300 Subject: [PATCH 0564/1635] Bump pycoolmasternet-async to 0.2.2 (#123634) --- homeassistant/components/coolmaster/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/coolmaster/manifest.json b/homeassistant/components/coolmaster/manifest.json index 9488e068d44..8775d7f72b8 100644 --- a/homeassistant/components/coolmaster/manifest.json +++ b/homeassistant/components/coolmaster/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/coolmaster", "iot_class": "local_polling", "loggers": ["pycoolmasternet_async"], - "requirements": ["pycoolmasternet-async==0.1.5"] + "requirements": ["pycoolmasternet-async==0.2.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index f1bad6ec1b4..8cca616dc16 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1780,7 +1780,7 @@ pycmus==0.1.1 pycomfoconnect==0.5.1 # homeassistant.components.coolmaster -pycoolmasternet-async==0.1.5 +pycoolmasternet-async==0.2.2 # homeassistant.components.microsoft pycsspeechtts==1.0.8 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c4d10f58d83..f63b962b489 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1433,7 +1433,7 @@ pycfdns==3.0.0 pycomfoconnect==0.5.1 # homeassistant.components.coolmaster -pycoolmasternet-async==0.1.5 +pycoolmasternet-async==0.2.2 # homeassistant.components.microsoft pycsspeechtts==1.0.8 From 86df43879c6e310c458085b38fc0f956a0af48fe Mon Sep 17 00:00:00 2001 From: G Johansson Date: Mon, 12 Aug 2024 09:14:48 +0200 Subject: [PATCH 0565/1635] Define Manual alarm as a helper (#123587) --- homeassistant/components/manual/manifest.json | 1 + homeassistant/generated/integrations.json | 12 ++++++------ 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/manual/manifest.json b/homeassistant/components/manual/manifest.json index 7406ab26830..37ba45c2dda 100644 --- a/homeassistant/components/manual/manifest.json +++ b/homeassistant/components/manual/manifest.json @@ -3,6 +3,7 @@ "name": "Manual Alarm Control Panel", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/manual", + "integration_type": "helper", "iot_class": "calculated", "quality_scale": "internal" } diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index f59e0883dd4..3d3344c1f60 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -3449,12 +3449,6 @@ "config_flow": true, "iot_class": "cloud_push" }, - "manual": { - "name": "Manual Alarm Control Panel", - "integration_type": "hub", - "config_flow": false, - "iot_class": "calculated" - }, "marantz": { "name": "Marantz", "integration_type": "virtual", @@ -7218,6 +7212,12 @@ "config_flow": true, "iot_class": "local_push" }, + "manual": { + "name": "Manual Alarm Control Panel", + "integration_type": "helper", + "config_flow": false, + "iot_class": "calculated" + }, "min_max": { "integration_type": "helper", "config_flow": true, From b15ea588510601789af555b6beb3abf6639f1a25 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 12 Aug 2024 02:15:33 -0500 Subject: [PATCH 0566/1635] Relocate code to get scheduled TimerHandles (#123546) --- homeassistant/core.py | 4 ++-- homeassistant/util/async_.py | 16 +++++++++++++++- tests/common.py | 3 ++- tests/conftest.py | 4 ++-- tests/util/test_async.py | 14 ++++++++++++++ 5 files changed, 35 insertions(+), 6 deletions(-) diff --git a/homeassistant/core.py b/homeassistant/core.py index 5d223b9f19f..1050d25ee71 100644 --- a/homeassistant/core.py +++ b/homeassistant/core.py @@ -101,6 +101,7 @@ from .util import dt as dt_util, location from .util.async_ import ( cancelling, create_eager_task, + get_scheduled_timer_handles, run_callback_threadsafe, shutdown_run_callback_threadsafe, ) @@ -1227,8 +1228,7 @@ class HomeAssistant: def _cancel_cancellable_timers(self) -> None: """Cancel timer handles marked as cancellable.""" - handles: Iterable[asyncio.TimerHandle] = self.loop._scheduled # type: ignore[attr-defined] # noqa: SLF001 - for handle in handles: + for handle in get_scheduled_timer_handles(self.loop): if ( not handle.cancelled() and (args := handle._args) # noqa: SLF001 diff --git a/homeassistant/util/async_.py b/homeassistant/util/async_.py index f2dc1291324..dcb788f0685 100644 --- a/homeassistant/util/async_.py +++ b/homeassistant/util/async_.py @@ -2,7 +2,15 @@ from __future__ import annotations -from asyncio import AbstractEventLoop, Future, Semaphore, Task, gather, get_running_loop +from asyncio import ( + AbstractEventLoop, + Future, + Semaphore, + Task, + TimerHandle, + gather, + get_running_loop, +) from collections.abc import Awaitable, Callable, Coroutine import concurrent.futures import logging @@ -124,3 +132,9 @@ def shutdown_run_callback_threadsafe(loop: AbstractEventLoop) -> None: python is going to exit. """ setattr(loop, _SHUTDOWN_RUN_CALLBACK_THREADSAFE, True) + + +def get_scheduled_timer_handles(loop: AbstractEventLoop) -> list[TimerHandle]: + """Return a list of scheduled TimerHandles.""" + handles: list[TimerHandle] = loop._scheduled # type: ignore[attr-defined] # noqa: SLF001 + return handles diff --git a/tests/common.py b/tests/common.py index 64e11ee7b51..d36df509142 100644 --- a/tests/common.py +++ b/tests/common.py @@ -93,6 +93,7 @@ from homeassistant.helpers.json import JSONEncoder, _orjson_default_encoder, jso from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.util.async_ import ( _SHUTDOWN_RUN_CALLBACK_THREADSAFE, + get_scheduled_timer_handles, run_callback_threadsafe, ) import homeassistant.util.dt as dt_util @@ -531,7 +532,7 @@ def _async_fire_time_changed( hass: HomeAssistant, utc_datetime: datetime | None, fire_all: bool ) -> None: timestamp = dt_util.utc_to_timestamp(utc_datetime) - for task in list(hass.loop._scheduled): + for task in list(get_scheduled_timer_handles(hass.loop)): if not isinstance(task, asyncio.TimerHandle): continue if task.cancelled(): diff --git a/tests/conftest.py b/tests/conftest.py index 0667edf4be2..ea0453e7450 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -84,7 +84,7 @@ from homeassistant.helpers.translation import _TranslationsCacheData from homeassistant.helpers.typing import ConfigType from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util, location -from homeassistant.util.async_ import create_eager_task +from homeassistant.util.async_ import create_eager_task, get_scheduled_timer_handles from homeassistant.util.json import json_loads from .ignore_uncaught_exceptions import IGNORE_UNCAUGHT_EXCEPTIONS @@ -372,7 +372,7 @@ def verify_cleanup( if tasks: event_loop.run_until_complete(asyncio.wait(tasks)) - for handle in event_loop._scheduled: # type: ignore[attr-defined] + for handle in get_scheduled_timer_handles(event_loop): if not handle.cancelled(): with long_repr_strings(): if expected_lingering_timers: diff --git a/tests/util/test_async.py b/tests/util/test_async.py index 373768788b7..17349cf6ff9 100644 --- a/tests/util/test_async.py +++ b/tests/util/test_async.py @@ -199,3 +199,17 @@ async def test_create_eager_task_from_thread_in_integration( "from a thread at homeassistant/components/hue/light.py, line 23: " "self.light.is_on" ) in caplog.text + + +async def test_get_scheduled_timer_handles(hass: HomeAssistant) -> None: + """Test get_scheduled_timer_handles returns all scheduled timer handles.""" + loop = hass.loop + timer_handle = loop.call_later(10, lambda: None) + timer_handle2 = loop.call_later(5, lambda: None) + timer_handle3 = loop.call_later(15, lambda: None) + + handles = hasync.get_scheduled_timer_handles(loop) + assert set(handles).issuperset({timer_handle, timer_handle2, timer_handle3}) + timer_handle.cancel() + timer_handle2.cancel() + timer_handle3.cancel() From 0bb8c4832d8b8a3296cebbbdca54d0c7909a9d28 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 12 Aug 2024 09:16:33 +0200 Subject: [PATCH 0567/1635] Enable raise-within-try (TRY301) rule in ruff (#123351) --- homeassistant/components/alexa/smart_home.py | 2 +- homeassistant/components/amcrest/camera.py | 2 +- homeassistant/components/camera/__init__.py | 2 +- homeassistant/components/control4/config_flow.py | 4 ++-- homeassistant/components/currencylayer/sensor.py | 2 +- homeassistant/components/device_tracker/legacy.py | 2 +- homeassistant/components/dunehd/config_flow.py | 2 +- homeassistant/components/egardia/__init__.py | 2 +- homeassistant/components/elmax/config_flow.py | 2 +- homeassistant/components/generic_thermostat/climate.py | 2 +- homeassistant/components/group/sensor.py | 2 +- homeassistant/components/icloud/account.py | 2 +- homeassistant/components/icloud/config_flow.py | 6 +++--- homeassistant/components/idasen_desk/__init__.py | 2 +- homeassistant/components/jvc_projector/config_flow.py | 2 +- homeassistant/components/kaleidescape/config_flow.py | 4 ++-- homeassistant/components/knx/config_flow.py | 2 +- homeassistant/components/mailbox/__init__.py | 2 +- homeassistant/components/mqtt/sensor.py | 4 ++-- homeassistant/components/notify/legacy.py | 2 +- homeassistant/components/python_script/__init__.py | 2 +- homeassistant/components/raincloud/__init__.py | 2 +- homeassistant/components/roborock/__init__.py | 2 +- homeassistant/components/signal_messenger/notify.py | 4 ++-- homeassistant/components/starline/config_flow.py | 2 +- homeassistant/components/tami4/config_flow.py | 2 +- homeassistant/components/template/vacuum.py | 2 +- homeassistant/components/websocket_api/connection.py | 2 +- homeassistant/components/websocket_api/http.py | 4 ++-- homeassistant/components/wyoming/data.py | 2 +- pyproject.toml | 3 +-- tests/components/system_log/test_init.py | 6 +++--- 32 files changed, 41 insertions(+), 42 deletions(-) diff --git a/homeassistant/components/alexa/smart_home.py b/homeassistant/components/alexa/smart_home.py index 57c1ba791ba..d7bcfa5698e 100644 --- a/homeassistant/components/alexa/smart_home.py +++ b/homeassistant/components/alexa/smart_home.py @@ -194,7 +194,7 @@ async def async_handle_message( try: if not enabled: - raise AlexaBridgeUnreachableError( + raise AlexaBridgeUnreachableError( # noqa: TRY301 "Alexa API not enabled in Home Assistant configuration" ) diff --git a/homeassistant/components/amcrest/camera.py b/homeassistant/components/amcrest/camera.py index b9b2701eac6..0bf02b604f1 100644 --- a/homeassistant/components/amcrest/camera.py +++ b/homeassistant/components/amcrest/camera.py @@ -499,7 +499,7 @@ class AmcrestCam(Camera): await getattr(self, f"_async_set_{func}")(value) new_value = await getattr(self, f"_async_get_{func}")() if new_value != value: - raise AmcrestCommandFailed + raise AmcrestCommandFailed # noqa: TRY301 except (AmcrestError, AmcrestCommandFailed) as error: if tries == 1: log_update_error(_LOGGER, action, self.name, description, error) diff --git a/homeassistant/components/camera/__init__.py b/homeassistant/components/camera/__init__.py index d8fa4bfbc7a..cbcf08cb7c2 100644 --- a/homeassistant/components/camera/__init__.py +++ b/homeassistant/components/camera/__init__.py @@ -862,7 +862,7 @@ class CameraMjpegStream(CameraView): # Compose camera stream from stills interval = float(interval_str) if interval < MIN_STREAM_INTERVAL: - raise ValueError(f"Stream interval must be > {MIN_STREAM_INTERVAL}") + raise ValueError(f"Stream interval must be > {MIN_STREAM_INTERVAL}") # noqa: TRY301 return await camera.handle_async_still_stream(request, interval) except ValueError as err: raise web.HTTPBadRequest from err diff --git a/homeassistant/components/control4/config_flow.py b/homeassistant/components/control4/config_flow.py index f6d746c9cb4..f6eb410cbf2 100644 --- a/homeassistant/components/control4/config_flow.py +++ b/homeassistant/components/control4/config_flow.py @@ -105,9 +105,9 @@ class Control4ConfigFlow(ConfigFlow, domain=DOMAIN): ) try: if not await hub.authenticate(): - raise InvalidAuth + raise InvalidAuth # noqa: TRY301 if not await hub.connect_to_director(): - raise CannotConnect + raise CannotConnect # noqa: TRY301 except InvalidAuth: errors["base"] = "invalid_auth" except CannotConnect: diff --git a/homeassistant/components/currencylayer/sensor.py b/homeassistant/components/currencylayer/sensor.py index 2ad0f88a2ab..01dec10efe0 100644 --- a/homeassistant/components/currencylayer/sensor.py +++ b/homeassistant/components/currencylayer/sensor.py @@ -108,7 +108,7 @@ class CurrencylayerData: try: result = requests.get(self._resource, params=self._parameters, timeout=10) if "error" in result.json(): - raise ValueError(result.json()["error"]["info"]) + raise ValueError(result.json()["error"]["info"]) # noqa: TRY301 self.data = result.json()["quotes"] _LOGGER.debug("Currencylayer data updated: %s", result.json()["timestamp"]) except ValueError as err: diff --git a/homeassistant/components/device_tracker/legacy.py b/homeassistant/components/device_tracker/legacy.py index ac168c06fb1..15cb67f5ee8 100644 --- a/homeassistant/components/device_tracker/legacy.py +++ b/homeassistant/components/device_tracker/legacy.py @@ -350,7 +350,7 @@ class DeviceTrackerPlatform: discovery_info, ) else: - raise HomeAssistantError("Invalid legacy device_tracker platform.") + raise HomeAssistantError("Invalid legacy device_tracker platform.") # noqa: TRY301 if scanner is not None: async_setup_scanner_platform( diff --git a/homeassistant/components/dunehd/config_flow.py b/homeassistant/components/dunehd/config_flow.py index 8a0f3eec4a0..33ffd4a812a 100644 --- a/homeassistant/components/dunehd/config_flow.py +++ b/homeassistant/components/dunehd/config_flow.py @@ -39,7 +39,7 @@ class DuneHDConfigFlow(ConfigFlow, domain=DOMAIN): try: if self.host_already_configured(host): - raise AlreadyConfigured + raise AlreadyConfigured # noqa: TRY301 await self.init_device(host) except CannotConnect: errors[CONF_HOST] = "cannot_connect" diff --git a/homeassistant/components/egardia/__init__.py b/homeassistant/components/egardia/__init__.py index 9ff4b9af94f..89dae7d23c9 100644 --- a/homeassistant/components/egardia/__init__.py +++ b/homeassistant/components/egardia/__init__.py @@ -113,7 +113,7 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: server = egardiaserver.EgardiaServer("", rs_port) bound = server.bind() if not bound: - raise OSError( + raise OSError( # noqa: TRY301 "Binding error occurred while starting EgardiaServer." ) hass.data[EGARDIA_SERVER] = server diff --git a/homeassistant/components/elmax/config_flow.py b/homeassistant/components/elmax/config_flow.py index 2971a425663..69f69a5fd31 100644 --- a/homeassistant/components/elmax/config_flow.py +++ b/homeassistant/components/elmax/config_flow.py @@ -424,7 +424,7 @@ class ElmaxConfigFlow(ConfigFlow, domain=DOMAIN): if p.hash == self._entry.data[CONF_ELMAX_PANEL_ID] ] if len(panels) < 1: - raise NoOnlinePanelsError + raise NoOnlinePanelsError # noqa: TRY301 # Verify the pin is still valid. await client.get_panel_status( diff --git a/homeassistant/components/generic_thermostat/climate.py b/homeassistant/components/generic_thermostat/climate.py index c142d15f9e5..2a118b70879 100644 --- a/homeassistant/components/generic_thermostat/climate.py +++ b/homeassistant/components/generic_thermostat/climate.py @@ -485,7 +485,7 @@ class GenericThermostat(ClimateEntity, RestoreEntity): try: cur_temp = float(state.state) if not math.isfinite(cur_temp): - raise ValueError(f"Sensor has illegal state {state.state}") + raise ValueError(f"Sensor has illegal state {state.state}") # noqa: TRY301 self._cur_temp = cur_temp except ValueError as ex: _LOGGER.error("Unable to update from sensor: %s", ex) diff --git a/homeassistant/components/group/sensor.py b/homeassistant/components/group/sensor.py index eaaedcf0e46..a99ed9dad63 100644 --- a/homeassistant/components/group/sensor.py +++ b/homeassistant/components/group/sensor.py @@ -406,7 +406,7 @@ class SensorGroup(GroupEntity, SensorEntity): and (uom := state.attributes["unit_of_measurement"]) not in self._valid_units ): - raise HomeAssistantError("Not a valid unit") + raise HomeAssistantError("Not a valid unit") # noqa: TRY301 sensor_values.append((entity_id, numeric_state, state)) if entity_id in self._state_incorrect: diff --git a/homeassistant/components/icloud/account.py b/homeassistant/components/icloud/account.py index 988073384f8..9536cd9ee5c 100644 --- a/homeassistant/components/icloud/account.py +++ b/homeassistant/components/icloud/account.py @@ -117,7 +117,7 @@ class IcloudAccount: if self.api.requires_2fa: # Trigger a new log in to ensure the user enters the 2FA code again. - raise PyiCloudFailedLoginException + raise PyiCloudFailedLoginException # noqa: TRY301 except PyiCloudFailedLoginException: self.api = None diff --git a/homeassistant/components/icloud/config_flow.py b/homeassistant/components/icloud/config_flow.py index 36fe880ec79..30942ce6727 100644 --- a/homeassistant/components/icloud/config_flow.py +++ b/homeassistant/components/icloud/config_flow.py @@ -141,7 +141,7 @@ class IcloudFlowHandler(ConfigFlow, domain=DOMAIN): getattr, self.api, "devices" ) if not devices: - raise PyiCloudNoDevicesException + raise PyiCloudNoDevicesException # noqa: TRY301 except (PyiCloudServiceNotActivatedException, PyiCloudNoDevicesException): _LOGGER.error("No device found in the iCloud account: %s", self._username) self.api = None @@ -264,13 +264,13 @@ class IcloudFlowHandler(ConfigFlow, domain=DOMAIN): if not await self.hass.async_add_executor_job( self.api.validate_2fa_code, self._verification_code ): - raise PyiCloudException("The code you entered is not valid.") + raise PyiCloudException("The code you entered is not valid.") # noqa: TRY301 elif not await self.hass.async_add_executor_job( self.api.validate_verification_code, self._trusted_device, self._verification_code, ): - raise PyiCloudException("The code you entered is not valid.") + raise PyiCloudException("The code you entered is not valid.") # noqa: TRY301 except PyiCloudException as error: # Reset to the initial 2FA state to allow the user to retry _LOGGER.error("Failed to verify verification code: %s", error) diff --git a/homeassistant/components/idasen_desk/__init__.py b/homeassistant/components/idasen_desk/__init__.py index f0d8013cb50..56a377ac2df 100644 --- a/homeassistant/components/idasen_desk/__init__.py +++ b/homeassistant/components/idasen_desk/__init__.py @@ -54,7 +54,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: try: if not await coordinator.async_connect(): - raise ConfigEntryNotReady(f"Unable to connect to desk {address}") + raise ConfigEntryNotReady(f"Unable to connect to desk {address}") # noqa: TRY301 except (AuthFailedError, TimeoutError, BleakError, Exception) as ex: raise ConfigEntryNotReady(f"Unable to connect to desk {address}") from ex diff --git a/homeassistant/components/jvc_projector/config_flow.py b/homeassistant/components/jvc_projector/config_flow.py index 7564d571d3b..7fbfb17a976 100644 --- a/homeassistant/components/jvc_projector/config_flow.py +++ b/homeassistant/components/jvc_projector/config_flow.py @@ -37,7 +37,7 @@ class JvcProjectorConfigFlow(ConfigFlow, domain=DOMAIN): try: if not is_host_valid(host): - raise InvalidHost + raise InvalidHost # noqa: TRY301 mac = await get_mac_address(host, port, password) except InvalidHost: diff --git a/homeassistant/components/kaleidescape/config_flow.py b/homeassistant/components/kaleidescape/config_flow.py index bb9f47ec1e8..e4a562dc00b 100644 --- a/homeassistant/components/kaleidescape/config_flow.py +++ b/homeassistant/components/kaleidescape/config_flow.py @@ -38,7 +38,7 @@ class KaleidescapeConfigFlow(ConfigFlow, domain=DOMAIN): try: info = await validate_host(host) if info.server_only: - raise UnsupportedError + raise UnsupportedError # noqa: TRY301 except ConnectionError: errors["base"] = ERROR_CANNOT_CONNECT except UnsupportedError: @@ -73,7 +73,7 @@ class KaleidescapeConfigFlow(ConfigFlow, domain=DOMAIN): try: self.discovered_device = await validate_host(host) if self.discovered_device.server_only: - raise UnsupportedError + raise UnsupportedError # noqa: TRY301 except ConnectionError: return self.async_abort(reason=ERROR_CANNOT_CONNECT) except UnsupportedError: diff --git a/homeassistant/components/knx/config_flow.py b/homeassistant/components/knx/config_flow.py index 2fc1f49800c..7e4db1f889b 100644 --- a/homeassistant/components/knx/config_flow.py +++ b/homeassistant/components/knx/config_flow.py @@ -445,7 +445,7 @@ class KNXCommonFlow(ABC, ConfigEntryBaseFlow): try: key_bytes = bytes.fromhex(user_input[CONF_KNX_ROUTING_BACKBONE_KEY]) if len(key_bytes) != 16: - raise ValueError + raise ValueError # noqa: TRY301 except ValueError: errors[CONF_KNX_ROUTING_BACKBONE_KEY] = "invalid_backbone_key" if not errors: diff --git a/homeassistant/components/mailbox/__init__.py b/homeassistant/components/mailbox/__init__.py index b446ba3704e..e0438342a54 100644 --- a/homeassistant/components/mailbox/__init__.py +++ b/homeassistant/components/mailbox/__init__.py @@ -92,7 +92,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: platform.get_handler, hass, p_config, discovery_info ) else: - raise HomeAssistantError("Invalid mailbox platform.") + raise HomeAssistantError("Invalid mailbox platform.") # noqa: TRY301 if mailbox is None: _LOGGER.error("Failed to initialize mailbox platform %s", p_type) diff --git a/homeassistant/components/mqtt/sensor.py b/homeassistant/components/mqtt/sensor.py index 4a41f486831..e983f1b66f3 100644 --- a/homeassistant/components/mqtt/sensor.py +++ b/homeassistant/components/mqtt/sensor.py @@ -260,7 +260,7 @@ class MqttSensor(MqttEntity, RestoreSensor): return try: if (payload_datetime := dt_util.parse_datetime(payload)) is None: - raise ValueError + raise ValueError # noqa: TRY301 except ValueError: _LOGGER.warning("Invalid state message '%s' from '%s'", payload, msg.topic) self._attr_native_value = None @@ -280,7 +280,7 @@ class MqttSensor(MqttEntity, RestoreSensor): try: last_reset = dt_util.parse_datetime(str(payload)) if last_reset is None: - raise ValueError + raise ValueError # noqa: TRY301 self._attr_last_reset = last_reset except ValueError: _LOGGER.warning( diff --git a/homeassistant/components/notify/legacy.py b/homeassistant/components/notify/legacy.py index b3871d858e8..81444a36296 100644 --- a/homeassistant/components/notify/legacy.py +++ b/homeassistant/components/notify/legacy.py @@ -105,7 +105,7 @@ def async_setup_legacy( platform.get_service, hass, p_config, discovery_info ) else: - raise HomeAssistantError("Invalid notify platform.") + raise HomeAssistantError("Invalid notify platform.") # noqa: TRY301 if notify_service is None: # Platforms can decide not to create a service based diff --git a/homeassistant/components/python_script/__init__.py b/homeassistant/components/python_script/__init__.py index ab8cf17daa0..70e9c5b0d29 100644 --- a/homeassistant/components/python_script/__init__.py +++ b/homeassistant/components/python_script/__init__.py @@ -277,7 +277,7 @@ def execute(hass, filename, source, data=None, return_response=False): if not isinstance(restricted_globals["output"], dict): output_type = type(restricted_globals["output"]) restricted_globals["output"] = {} - raise ScriptError( + raise ScriptError( # noqa: TRY301 f"Expected `output` to be a dictionary, was {output_type}" ) except ScriptError as err: diff --git a/homeassistant/components/raincloud/__init__.py b/homeassistant/components/raincloud/__init__.py index e6f5d2ecf8d..a805024357c 100644 --- a/homeassistant/components/raincloud/__init__.py +++ b/homeassistant/components/raincloud/__init__.py @@ -102,7 +102,7 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: try: raincloud = RainCloudy(username=username, password=password) if not raincloud.is_connected: - raise HTTPError + raise HTTPError # noqa: TRY301 hass.data[DATA_RAINCLOUD] = RainCloudHub(raincloud) except (ConnectTimeout, HTTPError) as ex: _LOGGER.error("Unable to connect to Rain Cloud service: %s", str(ex)) diff --git a/homeassistant/components/roborock/__init__.py b/homeassistant/components/roborock/__init__.py index 3743faa32d8..d107a0bee8b 100644 --- a/homeassistant/components/roborock/__init__.py +++ b/homeassistant/components/roborock/__init__.py @@ -174,7 +174,7 @@ async def setup_device_v1( if networking is None: # If the api does not return an error but does return None for # get_networking - then we need to go through cache checking. - raise RoborockException("Networking request returned None.") + raise RoborockException("Networking request returned None.") # noqa: TRY301 except RoborockException as err: _LOGGER.warning( "Not setting up %s because we could not get the network information of the device. " diff --git a/homeassistant/components/signal_messenger/notify.py b/homeassistant/components/signal_messenger/notify.py index 21d42f8912f..9321bc3232f 100644 --- a/homeassistant/components/signal_messenger/notify.py +++ b/homeassistant/components/signal_messenger/notify.py @@ -166,7 +166,7 @@ class SignalNotificationService(BaseNotificationService): and int(str(resp.headers.get("Content-Length"))) > attachment_size_limit ): - raise ValueError( + raise ValueError( # noqa: TRY301 "Attachment too large (Content-Length reports {}). Max size: {}" " bytes".format( int(str(resp.headers.get("Content-Length"))), @@ -179,7 +179,7 @@ class SignalNotificationService(BaseNotificationService): for chunk in resp.iter_content(1024): size += len(chunk) if size > attachment_size_limit: - raise ValueError( + raise ValueError( # noqa: TRY301 f"Attachment too large (Stream reports {size}). " f"Max size: {CONF_MAX_ALLOWED_DOWNLOAD_SIZE_BYTES} bytes" ) diff --git a/homeassistant/components/starline/config_flow.py b/homeassistant/components/starline/config_flow.py index 6c38603a843..fbb7fa9acdc 100644 --- a/homeassistant/components/starline/config_flow.py +++ b/homeassistant/components/starline/config_flow.py @@ -214,7 +214,7 @@ class StarlineFlowHandler(ConfigFlow, domain=DOMAIN): self._captcha_image = data["captchaImg"] return self._async_form_auth_captcha(error) - raise Exception(data) # noqa: TRY002 + raise Exception(data) # noqa: TRY002, TRY301 except Exception as err: # noqa: BLE001 _LOGGER.error("Error auth user: %s", err) return self._async_form_auth_user(ERROR_AUTH_USER) diff --git a/homeassistant/components/tami4/config_flow.py b/homeassistant/components/tami4/config_flow.py index 0fa05bbebe4..72b19470f45 100644 --- a/homeassistant/components/tami4/config_flow.py +++ b/homeassistant/components/tami4/config_flow.py @@ -42,7 +42,7 @@ class Tami4ConfigFlow(ConfigFlow, domain=DOMAIN): if m := _PHONE_MATCHER.match(phone): self.phone = f"+972{m.group('number')}" else: - raise InvalidPhoneNumber + raise InvalidPhoneNumber # noqa: TRY301 await self.hass.async_add_executor_job( Tami4EdgeAPI.request_otp, self.phone ) diff --git a/homeassistant/components/template/vacuum.py b/homeassistant/components/template/vacuum.py index 9062f71d818..e512ce2eb04 100644 --- a/homeassistant/components/template/vacuum.py +++ b/homeassistant/components/template/vacuum.py @@ -318,7 +318,7 @@ class TemplateVacuum(TemplateEntity, StateVacuumEntity): try: battery_level_int = int(battery_level) if not 0 <= battery_level_int <= 100: - raise ValueError + raise ValueError # noqa: TRY301 except ValueError: _LOGGER.error( "Received invalid battery level: %s for entity %s. Expected: 0-100", diff --git a/homeassistant/components/websocket_api/connection.py b/homeassistant/components/websocket_api/connection.py index ef70df4a123..6c0c6f0c587 100644 --- a/homeassistant/components/websocket_api/connection.py +++ b/homeassistant/components/websocket_api/connection.py @@ -223,7 +223,7 @@ class ActiveConnection: try: if schema is False: if len(msg) > 2: - raise vol.Invalid("extra keys not allowed") + raise vol.Invalid("extra keys not allowed") # noqa: TRY301 handler(self.hass, self, msg) else: handler(self.hass, self, schema(msg)) diff --git a/homeassistant/components/websocket_api/http.py b/homeassistant/components/websocket_api/http.py index c65c4c65988..8ed3469d7ed 100644 --- a/homeassistant/components/websocket_api/http.py +++ b/homeassistant/components/websocket_api/http.py @@ -339,11 +339,11 @@ class WebSocketHandler: raise Disconnect from err if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSED, WSMsgType.CLOSING): - raise Disconnect + raise Disconnect # noqa: TRY301 if msg.type != WSMsgType.TEXT: disconnect_warn = "Received non-Text message." - raise Disconnect + raise Disconnect # noqa: TRY301 try: auth_msg_data = json_loads(msg.data) diff --git a/homeassistant/components/wyoming/data.py b/homeassistant/components/wyoming/data.py index e333a740741..1ee0f24f805 100644 --- a/homeassistant/components/wyoming/data.py +++ b/homeassistant/components/wyoming/data.py @@ -100,7 +100,7 @@ async def load_wyoming_info( while True: event = await client.read_event() if event is None: - raise WyomingError( + raise WyomingError( # noqa: TRY301 "Connection closed unexpectedly", ) diff --git a/pyproject.toml b/pyproject.toml index e94c9e96225..1cccc155d34 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -821,8 +821,7 @@ ignore = [ "PLE0605", # temporarily disabled - "RET503", - "TRY301" + "RET503" ] [tool.ruff.lint.flake8-import-conventions.extend-aliases] diff --git a/tests/components/system_log/test_init.py b/tests/components/system_log/test_init.py index 1f1c4464c71..83adab8200b 100644 --- a/tests/components/system_log/test_init.py +++ b/tests/components/system_log/test_init.py @@ -36,7 +36,7 @@ async def get_error_log(hass_ws_client): def _generate_and_log_exception(exception, log): try: - raise Exception(exception) # noqa: TRY002 + raise Exception(exception) # noqa: TRY002, TRY301 except Exception: _LOGGER.exception(log) @@ -461,7 +461,7 @@ async def test__figure_out_source(hass: HomeAssistant) -> None: in a test because the test is not a component. """ try: - raise ValueError("test") + raise ValueError("test") # noqa: TRY301 except ValueError as ex: exc_info = (type(ex), ex, ex.__traceback__) mock_record = MagicMock( @@ -486,7 +486,7 @@ async def test__figure_out_source(hass: HomeAssistant) -> None: async def test_formatting_exception(hass: HomeAssistant) -> None: """Test that exceptions are formatted correctly.""" try: - raise ValueError("test") + raise ValueError("test") # noqa: TRY301 except ValueError as ex: exc_info = (type(ex), ex, ex.__traceback__) mock_record = MagicMock( From e64ca7c274fd12208c0e0ecc4058cb01dad9a301 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 12 Aug 2024 10:04:16 +0200 Subject: [PATCH 0568/1635] Enable implicit-return (RET503) rule in ruff (#122771) --- homeassistant/components/control4/__init__.py | 3 ++- homeassistant/components/recorder/pool.py | 2 +- homeassistant/components/stream/fmp4utils.py | 3 ++- pyproject.toml | 5 +---- 4 files changed, 6 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/control4/__init__.py b/homeassistant/components/control4/__init__.py index c9a6eab5c62..a3d0cebd1fc 100644 --- a/homeassistant/components/control4/__init__.py +++ b/homeassistant/components/control4/__init__.py @@ -50,7 +50,8 @@ PLATFORMS = [Platform.LIGHT, Platform.MEDIA_PLAYER] async def call_c4_api_retry(func, *func_args): """Call C4 API function and retry on failure.""" - for i in range(API_RETRY_TIMES): + # Ruff doesn't understand this loop - the exception is always raised after the retries + for i in range(API_RETRY_TIMES): # noqa: RET503 try: return await func(*func_args) except client_exceptions.ClientError as exception: diff --git a/homeassistant/components/recorder/pool.py b/homeassistant/components/recorder/pool.py index 0fa0e82e98b..30f8fa8d07a 100644 --- a/homeassistant/components/recorder/pool.py +++ b/homeassistant/components/recorder/pool.py @@ -100,7 +100,7 @@ class RecorderPool(SingletonThreadPool, NullPool): # which is allowed but discouraged since its much slower return self._do_get_db_connection_protected() # In the event loop, raise an exception - raise_for_blocking_call( + raise_for_blocking_call( # noqa: RET503 self._do_get_db_connection_protected, strict=True, advise_msg=ADVISE_MSG, diff --git a/homeassistant/components/stream/fmp4utils.py b/homeassistant/components/stream/fmp4utils.py index 255d75e3b79..5080678e3ca 100644 --- a/homeassistant/components/stream/fmp4utils.py +++ b/homeassistant/components/stream/fmp4utils.py @@ -149,7 +149,8 @@ def get_codec_string(mp4_bytes: bytes) -> str: def find_moov(mp4_io: BufferedIOBase) -> int: """Find location of moov atom in a BufferedIOBase mp4.""" index = 0 - while 1: + # Ruff doesn't understand this loop - the exception is always raised at the end + while 1: # noqa: RET503 mp4_io.seek(index) box_header = mp4_io.read(8) if len(box_header) != 8 or box_header[0:4] == b"\x00\x00\x00\x00": diff --git a/pyproject.toml b/pyproject.toml index 1cccc155d34..5f6324bbac5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -818,10 +818,7 @@ ignore = [ "ISC001", # Disabled because ruff does not understand type of __all__ generated by a function - "PLE0605", - - # temporarily disabled - "RET503" + "PLE0605" ] [tool.ruff.lint.flake8-import-conventions.extend-aliases] From 8cfac68317288fcb88888e369d3bd4302037b77b Mon Sep 17 00:00:00 2001 From: Willem-Jan van Rootselaar Date: Mon, 12 Aug 2024 10:57:51 +0200 Subject: [PATCH 0569/1635] Bump BSBLan to 0.6.2 (#123594) * chore: Update bsblan library to version 0.6.1 * add dataclass BSBLANConfig remove session as bsblan has it's own session * Update temperature unit handling in BSBLANClimate * chore: Remove unused constant in bsblan/const.py * chore: Update python-bsblan library to version 0.6.2 * feat: Add async_get_clientsession to BSBLAN initialization This commit adds the `async_get_clientsession` function to the initialization of the `BSBLAN` class in both `__init__.py` and `config_flow.py` files. This allows the `BSBLAN` instance to have its own session for making HTTP requests. This change improves the performance and reliability of the BSBLAN integration. --- homeassistant/components/bsblan/__init__.py | 16 +++++++++++----- homeassistant/components/bsblan/climate.py | 2 +- homeassistant/components/bsblan/config_flow.py | 12 ++++++------ homeassistant/components/bsblan/const.py | 2 -- homeassistant/components/bsblan/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 7 files changed, 21 insertions(+), 17 deletions(-) diff --git a/homeassistant/components/bsblan/__init__.py b/homeassistant/components/bsblan/__init__.py index 9a471329ba9..113a582f403 100644 --- a/homeassistant/components/bsblan/__init__.py +++ b/homeassistant/components/bsblan/__init__.py @@ -2,7 +2,7 @@ import dataclasses -from bsblan import BSBLAN, Device, Info, StaticState +from bsblan import BSBLAN, BSBLANConfig, Device, Info, StaticState from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( @@ -35,22 +35,28 @@ class HomeAssistantBSBLANData: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up BSB-Lan from a config entry.""" - session = async_get_clientsession(hass) - bsblan = BSBLAN( - entry.data[CONF_HOST], + # create config using BSBLANConfig + config = BSBLANConfig( + host=entry.data[CONF_HOST], passkey=entry.data[CONF_PASSKEY], port=entry.data[CONF_PORT], username=entry.data.get(CONF_USERNAME), password=entry.data.get(CONF_PASSWORD), - session=session, ) + # create BSBLAN client + session = async_get_clientsession(hass) + bsblan = BSBLAN(config, session) + + # Create and perform first refresh of the coordinator coordinator = BSBLanUpdateCoordinator(hass, entry, bsblan) await coordinator.async_config_entry_first_refresh() + # Fetch all required data concurrently device = await bsblan.device() info = await bsblan.info() static = await bsblan.static_values() + hass.data.setdefault(DOMAIN, {})[entry.entry_id] = HomeAssistantBSBLANData( client=bsblan, coordinator=coordinator, diff --git a/homeassistant/components/bsblan/climate.py b/homeassistant/components/bsblan/climate.py index 1b300e1e738..4d6514251cb 100644 --- a/homeassistant/components/bsblan/climate.py +++ b/homeassistant/components/bsblan/climate.py @@ -103,7 +103,7 @@ class BSBLANClimate( self._attr_min_temp = float(static.min_temp.value) self._attr_max_temp = float(static.max_temp.value) # check if self.coordinator.data.current_temperature.unit is "°C" or "°C" - if self.coordinator.data.current_temperature.unit in ("°C", "°C"): + if static.min_temp.unit in ("°C", "°C"): self._attr_temperature_unit = UnitOfTemperature.CELSIUS else: self._attr_temperature_unit = UnitOfTemperature.FAHRENHEIT diff --git a/homeassistant/components/bsblan/config_flow.py b/homeassistant/components/bsblan/config_flow.py index 9732f0a77a9..a1d7d6d403a 100644 --- a/homeassistant/components/bsblan/config_flow.py +++ b/homeassistant/components/bsblan/config_flow.py @@ -4,7 +4,7 @@ from __future__ import annotations from typing import Any -from bsblan import BSBLAN, BSBLANError +from bsblan import BSBLAN, BSBLANConfig, BSBLANError import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult @@ -80,15 +80,15 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN): async def _get_bsblan_info(self, raise_on_progress: bool = True) -> None: """Get device information from an BSBLAN device.""" - session = async_get_clientsession(self.hass) - bsblan = BSBLAN( + config = BSBLANConfig( host=self.host, - username=self.username, - password=self.password, passkey=self.passkey, port=self.port, - session=session, + username=self.username, + password=self.password, ) + session = async_get_clientsession(self.hass) + bsblan = BSBLAN(config, session) device = await bsblan.device() self.mac = device.MAC diff --git a/homeassistant/components/bsblan/const.py b/homeassistant/components/bsblan/const.py index 5bca20cb4d4..25d9dec865b 100644 --- a/homeassistant/components/bsblan/const.py +++ b/homeassistant/components/bsblan/const.py @@ -21,6 +21,4 @@ ATTR_OUTSIDE_TEMPERATURE: Final = "outside_temperature" CONF_PASSKEY: Final = "passkey" -CONF_DEVICE_IDENT: Final = "RVS21.831F/127" - DEFAULT_PORT: Final = 80 diff --git a/homeassistant/components/bsblan/manifest.json b/homeassistant/components/bsblan/manifest.json index fb3c9b49e4c..6cd8608c42d 100644 --- a/homeassistant/components/bsblan/manifest.json +++ b/homeassistant/components/bsblan/manifest.json @@ -7,5 +7,5 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["bsblan"], - "requirements": ["python-bsblan==0.5.19"] + "requirements": ["python-bsblan==0.6.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 8cca616dc16..d0bd63bd826 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2250,7 +2250,7 @@ python-awair==0.2.4 python-blockchain-api==0.0.2 # homeassistant.components.bsblan -python-bsblan==0.5.19 +python-bsblan==0.6.2 # homeassistant.components.clementine python-clementine-remote==1.0.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f63b962b489..b6c15fc8942 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1795,7 +1795,7 @@ python-MotionMount==2.0.0 python-awair==0.2.4 # homeassistant.components.bsblan -python-bsblan==0.5.19 +python-bsblan==0.6.2 # homeassistant.components.ecobee python-ecobee-api==0.2.18 From 0803ac9b0ba7a3a75a5e3b473eef9eada342c700 Mon Sep 17 00:00:00 2001 From: Cyrill Raccaud Date: Mon, 12 Aug 2024 11:26:42 +0200 Subject: [PATCH 0570/1635] Add Swiss public transport fetch connections service (#114671) * add service to fetch more connections * improve error messages * better errors * wip * fix service register * add working tests * improve tests * temp availability * test availability * remove availability test * change error type for coordinator update * fix missed coverage * convert from entity service to integration service * cleanup changes * add more tests for the service --- .../swiss_public_transport/__init__.py | 16 +- .../swiss_public_transport/const.py | 9 +- .../swiss_public_transport/coordinator.py | 20 +- .../swiss_public_transport/icons.json | 3 + .../swiss_public_transport/sensor.py | 4 +- .../swiss_public_transport/services.py | 89 +++++++ .../swiss_public_transport/services.yaml | 14 ++ .../swiss_public_transport/strings.json | 25 ++ script/hassfest/translations.py | 1 + .../swiss_public_transport/__init__.py | 12 + .../fixtures/connections.json | 130 ++++++++++ .../swiss_public_transport/test_init.py | 2 +- .../swiss_public_transport/test_service.py | 226 ++++++++++++++++++ 13 files changed, 541 insertions(+), 10 deletions(-) create mode 100644 homeassistant/components/swiss_public_transport/services.py create mode 100644 homeassistant/components/swiss_public_transport/services.yaml create mode 100644 tests/components/swiss_public_transport/fixtures/connections.json create mode 100644 tests/components/swiss_public_transport/test_service.py diff --git a/homeassistant/components/swiss_public_transport/__init__.py b/homeassistant/components/swiss_public_transport/__init__.py index 1242c95269e..3e29fb9c746 100644 --- a/homeassistant/components/swiss_public_transport/__init__.py +++ b/homeassistant/components/swiss_public_transport/__init__.py @@ -11,18 +11,32 @@ from opendata_transport.exceptions import ( from homeassistant import config_entries, core from homeassistant.const import Platform from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import ( + config_validation as cv, + device_registry as dr, + entity_registry as er, +) from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.typing import ConfigType from .const import CONF_DESTINATION, CONF_START, CONF_VIA, DOMAIN, PLACEHOLDERS from .coordinator import SwissPublicTransportDataUpdateCoordinator from .helper import unique_id_from_config +from .services import setup_services _LOGGER = logging.getLogger(__name__) PLATFORMS: list[Platform] = [Platform.SENSOR] +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) + + +async def async_setup(hass: core.HomeAssistant, config: ConfigType) -> bool: + """Set up the Swiss public transport component.""" + setup_services(hass) + return True + async def async_setup_entry( hass: core.HomeAssistant, entry: config_entries.ConfigEntry diff --git a/homeassistant/components/swiss_public_transport/const.py b/homeassistant/components/swiss_public_transport/const.py index 32b6427ced5..c02f36f2f25 100644 --- a/homeassistant/components/swiss_public_transport/const.py +++ b/homeassistant/components/swiss_public_transport/const.py @@ -9,12 +9,19 @@ CONF_START: Final = "from" CONF_VIA: Final = "via" DEFAULT_NAME = "Next Destination" +DEFAULT_UPDATE_TIME = 90 MAX_VIA = 5 -SENSOR_CONNECTIONS_COUNT = 3 +CONNECTIONS_COUNT = 3 +CONNECTIONS_MAX = 15 PLACEHOLDERS = { "stationboard_url": "http://transport.opendata.ch/examples/stationboard.html", "opendata_url": "http://transport.opendata.ch", } + +ATTR_CONFIG_ENTRY_ID: Final = "config_entry_id" +ATTR_LIMIT: Final = "limit" + +SERVICE_FETCH_CONNECTIONS = "fetch_connections" diff --git a/homeassistant/components/swiss_public_transport/coordinator.py b/homeassistant/components/swiss_public_transport/coordinator.py index ae7e1b2366d..114215520ac 100644 --- a/homeassistant/components/swiss_public_transport/coordinator.py +++ b/homeassistant/components/swiss_public_transport/coordinator.py @@ -7,14 +7,17 @@ import logging from typing import TypedDict from opendata_transport import OpendataTransport -from opendata_transport.exceptions import OpendataTransportError +from opendata_transport.exceptions import ( + OpendataTransportConnectionError, + OpendataTransportError, +) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed import homeassistant.util.dt as dt_util -from .const import DOMAIN, SENSOR_CONNECTIONS_COUNT +from .const import CONNECTIONS_COUNT, DEFAULT_UPDATE_TIME, DOMAIN _LOGGER = logging.getLogger(__name__) @@ -54,7 +57,7 @@ class SwissPublicTransportDataUpdateCoordinator( hass, _LOGGER, name=DOMAIN, - update_interval=timedelta(seconds=90), + update_interval=timedelta(seconds=DEFAULT_UPDATE_TIME), ) self._opendata = opendata @@ -74,14 +77,21 @@ class SwissPublicTransportDataUpdateCoordinator( return None async def _async_update_data(self) -> list[DataConnection]: + return await self.fetch_connections(limit=CONNECTIONS_COUNT) + + async def fetch_connections(self, limit: int) -> list[DataConnection]: + """Fetch connections using the opendata api.""" + self._opendata.limit = limit try: await self._opendata.async_get_data() + except OpendataTransportConnectionError as e: + _LOGGER.warning("Connection to transport.opendata.ch cannot be established") + raise UpdateFailed from e except OpendataTransportError as e: _LOGGER.warning( "Unable to connect and retrieve data from transport.opendata.ch" ) raise UpdateFailed from e - connections = self._opendata.connections return [ DataConnection( @@ -95,6 +105,6 @@ class SwissPublicTransportDataUpdateCoordinator( remaining_time=str(self.remaining_time(connections[i]["departure"])), delay=connections[i]["delay"], ) - for i in range(SENSOR_CONNECTIONS_COUNT) + for i in range(limit) if len(connections) > i and connections[i] is not None ] diff --git a/homeassistant/components/swiss_public_transport/icons.json b/homeassistant/components/swiss_public_transport/icons.json index 10573b8f5c3..7c2e5436834 100644 --- a/homeassistant/components/swiss_public_transport/icons.json +++ b/homeassistant/components/swiss_public_transport/icons.json @@ -23,5 +23,8 @@ "default": "mdi:clock-plus" } } + }, + "services": { + "fetch_connections": "mdi:bus-clock" } } diff --git a/homeassistant/components/swiss_public_transport/sensor.py b/homeassistant/components/swiss_public_transport/sensor.py index 88a6dbecae4..c186b963705 100644 --- a/homeassistant/components/swiss_public_transport/sensor.py +++ b/homeassistant/components/swiss_public_transport/sensor.py @@ -20,7 +20,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN, SENSOR_CONNECTIONS_COUNT +from .const import CONNECTIONS_COUNT, DOMAIN from .coordinator import DataConnection, SwissPublicTransportDataUpdateCoordinator _LOGGER = logging.getLogger(__name__) @@ -46,7 +46,7 @@ SENSORS: tuple[SwissPublicTransportSensorEntityDescription, ...] = ( value_fn=lambda data_connection: data_connection["departure"], index=i, ) - for i in range(SENSOR_CONNECTIONS_COUNT) + for i in range(CONNECTIONS_COUNT) ], SwissPublicTransportSensorEntityDescription( key="duration", diff --git a/homeassistant/components/swiss_public_transport/services.py b/homeassistant/components/swiss_public_transport/services.py new file mode 100644 index 00000000000..e8b7c6bd458 --- /dev/null +++ b/homeassistant/components/swiss_public_transport/services.py @@ -0,0 +1,89 @@ +"""Define services for the Swiss public transport integration.""" + +import voluptuous as vol + +from homeassistant import config_entries +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import ( + HomeAssistant, + ServiceCall, + ServiceResponse, + SupportsResponse, +) +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers.selector import ( + NumberSelector, + NumberSelectorConfig, + NumberSelectorMode, +) +from homeassistant.helpers.update_coordinator import UpdateFailed + +from .const import ( + ATTR_CONFIG_ENTRY_ID, + ATTR_LIMIT, + CONNECTIONS_COUNT, + CONNECTIONS_MAX, + DOMAIN, + SERVICE_FETCH_CONNECTIONS, +) + +SERVICE_FETCH_CONNECTIONS_SCHEMA = vol.Schema( + { + vol.Required(ATTR_CONFIG_ENTRY_ID): str, + vol.Optional(ATTR_LIMIT, default=CONNECTIONS_COUNT): NumberSelector( + NumberSelectorConfig( + min=1, max=CONNECTIONS_MAX, mode=NumberSelectorMode.BOX + ) + ), + } +) + + +def async_get_entry( + hass: HomeAssistant, config_entry_id: str +) -> config_entries.ConfigEntry: + """Get the Swiss public transport config entry.""" + if not (entry := hass.config_entries.async_get_entry(config_entry_id)): + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="config_entry_not_found", + translation_placeholders={"target": config_entry_id}, + ) + if entry.state is not ConfigEntryState.LOADED: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="not_loaded", + translation_placeholders={"target": entry.title}, + ) + return entry + + +def setup_services(hass: HomeAssistant) -> None: + """Set up the services for the Swiss public transport integration.""" + + async def async_fetch_connections( + call: ServiceCall, + ) -> ServiceResponse: + """Fetch a set of connections.""" + config_entry = async_get_entry(hass, call.data[ATTR_CONFIG_ENTRY_ID]) + limit = call.data.get(ATTR_LIMIT) or CONNECTIONS_COUNT + coordinator = hass.data[DOMAIN][config_entry.entry_id] + try: + connections = await coordinator.fetch_connections(limit=int(limit)) + except UpdateFailed as e: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="cannot_connect", + translation_placeholders={ + "error": str(e), + }, + ) from e + return {"connections": connections} + + hass.services.async_register( + DOMAIN, + SERVICE_FETCH_CONNECTIONS, + async_fetch_connections, + schema=SERVICE_FETCH_CONNECTIONS_SCHEMA, + supports_response=SupportsResponse.ONLY, + ) diff --git a/homeassistant/components/swiss_public_transport/services.yaml b/homeassistant/components/swiss_public_transport/services.yaml new file mode 100644 index 00000000000..d88dad2ca1f --- /dev/null +++ b/homeassistant/components/swiss_public_transport/services.yaml @@ -0,0 +1,14 @@ +fetch_connections: + fields: + config_entry_id: + required: true + selector: + config_entry: + integration: swiss_public_transport + limit: + example: 3 + selector: + number: + min: 1 + max: 15 + step: 1 diff --git a/homeassistant/components/swiss_public_transport/strings.json b/homeassistant/components/swiss_public_transport/strings.json index 4f4bc0522fc..29e73978538 100644 --- a/homeassistant/components/swiss_public_transport/strings.json +++ b/homeassistant/components/swiss_public_transport/strings.json @@ -49,12 +49,37 @@ } } }, + "services": { + "fetch_connections": { + "name": "Fetch Connections", + "description": "Fetch a list of connections from the swiss public transport.", + "fields": { + "config_entry_id": { + "name": "Instance", + "description": "Swiss public transport instance to fetch connections for." + }, + "limit": { + "name": "Limit", + "description": "Number of connections to fetch from [1-15]" + } + } + } + }, "exceptions": { "invalid_data": { "message": "Setup failed for entry {config_title} with invalid data, check at the [stationboard]({stationboard_url}) if your station names are valid.\n{error}" }, "request_timeout": { "message": "Timeout while connecting for entry {config_title}.\n{error}" + }, + "cannot_connect": { + "message": "Cannot connect to server.\n{error}" + }, + "not_loaded": { + "message": "{target} is not loaded." + }, + "config_entry_not_found": { + "message": "Swiss public transport integration instance \"{target}\" not found." } } } diff --git a/script/hassfest/translations.py b/script/hassfest/translations.py index c39c070eba2..c5efd05948f 100644 --- a/script/hassfest/translations.py +++ b/script/hassfest/translations.py @@ -41,6 +41,7 @@ ALLOW_NAME_TRANSLATION = { "local_todo", "nmap_tracker", "rpi_power", + "swiss_public_transport", "waze_travel_time", "zodiac", } diff --git a/tests/components/swiss_public_transport/__init__.py b/tests/components/swiss_public_transport/__init__.py index 3859a630c31..98262324b11 100644 --- a/tests/components/swiss_public_transport/__init__.py +++ b/tests/components/swiss_public_transport/__init__.py @@ -1 +1,13 @@ """Tests for the swiss_public_transport integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/swiss_public_transport/fixtures/connections.json b/tests/components/swiss_public_transport/fixtures/connections.json new file mode 100644 index 00000000000..4edead56f14 --- /dev/null +++ b/tests/components/swiss_public_transport/fixtures/connections.json @@ -0,0 +1,130 @@ +[ + { + "departure": "2024-01-06T18:03:00+0100", + "number": 0, + "platform": 0, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:04:00+0100", + "number": 1, + "platform": 1, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:05:00+0100", + "number": 2, + "platform": 2, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:06:00+0100", + "number": 3, + "platform": 3, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:07:00+0100", + "number": 4, + "platform": 4, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:08:00+0100", + "number": 5, + "platform": 5, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:09:00+0100", + "number": 6, + "platform": 6, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:10:00+0100", + "number": 7, + "platform": 7, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:11:00+0100", + "number": 8, + "platform": 8, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:12:00+0100", + "number": 9, + "platform": 9, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:13:00+0100", + "number": 10, + "platform": 10, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:14:00+0100", + "number": 11, + "platform": 11, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:15:00+0100", + "number": 12, + "platform": 12, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:16:00+0100", + "number": 13, + "platform": 13, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:17:00+0100", + "number": 14, + "platform": 14, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:18:00+0100", + "number": 15, + "platform": 15, + "transfers": 0, + "duration": "10", + "delay": 0 + } +] diff --git a/tests/components/swiss_public_transport/test_init.py b/tests/components/swiss_public_transport/test_init.py index 47360f93cf2..7ee8b696499 100644 --- a/tests/components/swiss_public_transport/test_init.py +++ b/tests/components/swiss_public_transport/test_init.py @@ -1,4 +1,4 @@ -"""Test the swiss_public_transport config flow.""" +"""Test the swiss_public_transport integration.""" from unittest.mock import AsyncMock, patch diff --git a/tests/components/swiss_public_transport/test_service.py b/tests/components/swiss_public_transport/test_service.py new file mode 100644 index 00000000000..34640de9f21 --- /dev/null +++ b/tests/components/swiss_public_transport/test_service.py @@ -0,0 +1,226 @@ +"""Test the swiss_public_transport service.""" + +import json +import logging +from unittest.mock import AsyncMock, patch + +from opendata_transport.exceptions import ( + OpendataTransportConnectionError, + OpendataTransportError, +) +import pytest +from voluptuous import error as vol_er + +from homeassistant.components.swiss_public_transport.const import ( + ATTR_CONFIG_ENTRY_ID, + ATTR_LIMIT, + CONF_DESTINATION, + CONF_START, + CONNECTIONS_COUNT, + CONNECTIONS_MAX, + DOMAIN, + SERVICE_FETCH_CONNECTIONS, +) +from homeassistant.components.swiss_public_transport.helper import unique_id_from_config +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError + +from . import setup_integration + +from tests.common import MockConfigEntry, load_fixture + +_LOGGER = logging.getLogger(__name__) + +MOCK_DATA_STEP_BASE = { + CONF_START: "test_start", + CONF_DESTINATION: "test_destination", +} + + +@pytest.mark.parametrize( + ("limit", "config_data"), + [ + (1, MOCK_DATA_STEP_BASE), + (2, MOCK_DATA_STEP_BASE), + (3, MOCK_DATA_STEP_BASE), + (CONNECTIONS_MAX, MOCK_DATA_STEP_BASE), + (None, MOCK_DATA_STEP_BASE), + ], +) +async def test_service_call_fetch_connections_success( + hass: HomeAssistant, + limit: int, + config_data, +) -> None: + """Test the fetch_connections service.""" + + unique_id = unique_id_from_config(config_data) + + config_entry = MockConfigEntry( + domain=DOMAIN, + data=config_data, + title=f"Service test call with limit={limit}", + unique_id=unique_id, + entry_id=f"entry_{unique_id}", + ) + + with patch( + "homeassistant.components.swiss_public_transport.OpendataTransport", + return_value=AsyncMock(), + ) as mock: + mock().connections = json.loads(load_fixture("connections.json", DOMAIN))[ + 0 : (limit or CONNECTIONS_COUNT) + 2 + ] + + await setup_integration(hass, config_entry) + + data = {ATTR_CONFIG_ENTRY_ID: config_entry.entry_id} + if limit is not None: + data[ATTR_LIMIT] = limit + assert hass.services.has_service(DOMAIN, SERVICE_FETCH_CONNECTIONS) + response = await hass.services.async_call( + domain=DOMAIN, + service=SERVICE_FETCH_CONNECTIONS, + service_data=data, + blocking=True, + return_response=True, + ) + await hass.async_block_till_done() + assert response["connections"] is not None + assert len(response["connections"]) == (limit or CONNECTIONS_COUNT) + + +@pytest.mark.parametrize( + ("limit", "config_data", "expected_result", "raise_error"), + [ + (-1, MOCK_DATA_STEP_BASE, pytest.raises(vol_er.MultipleInvalid), None), + (0, MOCK_DATA_STEP_BASE, pytest.raises(vol_er.MultipleInvalid), None), + ( + CONNECTIONS_MAX + 1, + MOCK_DATA_STEP_BASE, + pytest.raises(vol_er.MultipleInvalid), + None, + ), + ( + 1, + MOCK_DATA_STEP_BASE, + pytest.raises(HomeAssistantError), + OpendataTransportConnectionError(), + ), + ( + 2, + MOCK_DATA_STEP_BASE, + pytest.raises(HomeAssistantError), + OpendataTransportError(), + ), + ], +) +async def test_service_call_fetch_connections_error( + hass: HomeAssistant, + limit, + config_data, + expected_result, + raise_error, +) -> None: + """Test service call with standard error.""" + + unique_id = unique_id_from_config(config_data) + + config_entry = MockConfigEntry( + domain=DOMAIN, + data=config_data, + title=f"Service test call with limit={limit} and error={raise_error}", + unique_id=unique_id, + entry_id=f"entry_{unique_id}", + ) + + with patch( + "homeassistant.components.swiss_public_transport.OpendataTransport", + return_value=AsyncMock(), + ) as mock: + mock().connections = json.loads(load_fixture("connections.json", DOMAIN)) + + await setup_integration(hass, config_entry) + + assert hass.services.has_service(DOMAIN, SERVICE_FETCH_CONNECTIONS) + mock().async_get_data.side_effect = raise_error + with expected_result: + await hass.services.async_call( + domain=DOMAIN, + service=SERVICE_FETCH_CONNECTIONS, + service_data={ + ATTR_CONFIG_ENTRY_ID: config_entry.entry_id, + ATTR_LIMIT: limit, + }, + blocking=True, + return_response=True, + ) + + +async def test_service_call_load_unload( + hass: HomeAssistant, +) -> None: + """Test service call with integration error.""" + + unique_id = unique_id_from_config(MOCK_DATA_STEP_BASE) + + config_entry = MockConfigEntry( + domain=DOMAIN, + data=MOCK_DATA_STEP_BASE, + title="Service test call for unloaded entry", + unique_id=unique_id, + entry_id=f"entry_{unique_id}", + ) + + bad_entry_id = "bad_entry_id" + + with patch( + "homeassistant.components.swiss_public_transport.OpendataTransport", + return_value=AsyncMock(), + ) as mock: + mock().connections = json.loads(load_fixture("connections.json", DOMAIN)) + + await setup_integration(hass, config_entry) + + assert hass.services.has_service(DOMAIN, SERVICE_FETCH_CONNECTIONS) + response = await hass.services.async_call( + domain=DOMAIN, + service=SERVICE_FETCH_CONNECTIONS, + service_data={ + ATTR_CONFIG_ENTRY_ID: config_entry.entry_id, + }, + blocking=True, + return_response=True, + ) + await hass.async_block_till_done() + assert response["connections"] is not None + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + with pytest.raises( + ServiceValidationError, match=f"{config_entry.title} is not loaded" + ): + await hass.services.async_call( + domain=DOMAIN, + service=SERVICE_FETCH_CONNECTIONS, + service_data={ + ATTR_CONFIG_ENTRY_ID: config_entry.entry_id, + }, + blocking=True, + return_response=True, + ) + + with pytest.raises( + ServiceValidationError, + match=f'Swiss public transport integration instance "{bad_entry_id}" not found', + ): + await hass.services.async_call( + domain=DOMAIN, + service=SERVICE_FETCH_CONNECTIONS, + service_data={ + ATTR_CONFIG_ENTRY_ID: bad_entry_id, + }, + blocking=True, + return_response=True, + ) From 8c5748dcc12cbc7101fa16ac9c0246a1f0cee153 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Mon, 12 Aug 2024 13:23:10 +0200 Subject: [PATCH 0571/1635] Remove regex constraint (#123650) --- homeassistant/package_constraints.txt | 5 ----- script/gen_requirements_all.py | 5 ----- 2 files changed, 10 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 1f7c6bd61cc..e3f80f1b82d 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -98,11 +98,6 @@ enum34==1000000000.0.0 typing==1000000000.0.0 uuid==1000000000.0.0 -# regex causes segfault with version 2021.8.27 -# https://bitbucket.org/mrabarnett/mrab-regex/issues/421/2021827-results-in-fatal-python-error -# This is fixed in 2021.8.28 -regex==2021.8.28 - # httpx requires httpcore, and httpcore requires anyio and h11, but the version constraints on # these requirements are quite loose. As the entire stack has some outstanding issues, and # even newer versions seem to introduce new issues, it's useful for us to pin all these diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index f887f8113a7..aa92cacb237 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -120,11 +120,6 @@ enum34==1000000000.0.0 typing==1000000000.0.0 uuid==1000000000.0.0 -# regex causes segfault with version 2021.8.27 -# https://bitbucket.org/mrabarnett/mrab-regex/issues/421/2021827-results-in-fatal-python-error -# This is fixed in 2021.8.28 -regex==2021.8.28 - # httpx requires httpcore, and httpcore requires anyio and h11, but the version constraints on # these requirements are quite loose. As the entire stack has some outstanding issues, and # even newer versions seem to introduce new issues, it's useful for us to pin all these From e8d7eb05ae875ad5a02930335917613dac0f3e82 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Mon, 12 Aug 2024 13:28:09 +0200 Subject: [PATCH 0572/1635] Delete unused snapshots (#123656) * Delete unused snapshots * Delete unused snapshots --- .../accuweather/snapshots/test_weather.ambr | 81 - .../aemet/snapshots/test_weather.ambr | 490 - .../snapshots/test_binary_sensor.ambr | 188 - .../snapshots/test_websocket.ambr | 3 - .../autarco/snapshots/test_sensor.ambr | 402 - .../axis/snapshots/test_binary_sensor.ambr | 825 -- .../azure_devops/snapshots/test_sensor.ambr | 759 - .../calendar/snapshots/test_init.ambr | 30 - .../snapshots/test_default_agent.ambr | 190 - .../deconz/snapshots/test_light.ambr | 1485 -- .../deconz/snapshots/test_number.ambr | 100 - .../deconz/snapshots/test_sensor.ambr | 96 - .../google_tasks/snapshots/test_todo.ambr | 3 - .../snapshots/test_exposed_entities.ambr | 10 - .../snapshots/test_init.ambr | 318 - .../snapshots/test_sensor.ambr | 58 - .../snapshots/test_binary_sensor.ambr | 98 - .../imgw_pib/snapshots/test_sensor.ambr | 110 - .../ipma/snapshots/test_weather.ambr | 115 - .../israel_rail/snapshots/test_sensor.ambr | 285 - .../ista_ecotrend/snapshots/test_sensor.ambr | 60 - .../mastodon/snapshots/test_init.ambr | 33 - .../mealie/snapshots/test_todo.ambr | 14 - .../media_extractor/snapshots/test_init.ambr | 45 - .../met_eireann/snapshots/test_weather.ambr | 100 - .../metoffice/snapshots/test_weather.ambr | 654 - .../components/nam/snapshots/test_sensor.ambr | 47 - .../nextcloud/snapshots/test_config_flow.ambr | 8 - .../nextdns/snapshots/test_binary_sensor.ambr | 2182 --- .../nextdns/snapshots/test_sensor.ambr | 3498 ----- .../nextdns/snapshots/test_switch.ambr | 1390 -- .../nibe_heatpump/snapshots/test_climate.ambr | 6 - .../ping/snapshots/test_binary_sensor.ambr | 60 - .../pyload/snapshots/test_switch.ambr | 47 - .../samsungtv/snapshots/test_init.ambr | 8 + .../smhi/snapshots/test_weather.ambr | 136 - .../solarlog/snapshots/test_sensor.ambr | 1091 -- .../template/snapshots/test_select.ambr | 2 +- .../template/snapshots/test_weather.ambr | 254 - .../tesla_fleet/snapshots/test_sensor.ambr | 61 - .../tessie/snapshots/test_cover.ambr | 33 - .../tomorrowio/snapshots/test_weather.ambr | 1120 -- .../tplink_omada/snapshots/test_switch.ambr | 289 - .../snapshots/test_binary_sensor.ambr | 47 - .../snapshots/test_device_tracker.ambr | 51 - .../tractive/snapshots/test_switch.ambr | 138 - .../unifi/snapshots/test_button.ambr | 94 - .../unifi/snapshots/test_image.ambr | 6 - .../unifi/snapshots/test_sensor.ambr | 135 - .../uptime/snapshots/test_sensor.ambr | 22 - .../snapshots/test_sensor.ambr | 750 - .../weatherkit/snapshots/test_weather.ambr | 12262 ---------------- .../wyoming/snapshots/test_config_flow.ambr | 38 - .../wyoming/snapshots/test_tts.ambr | 22 - .../zeversolar/snapshots/test_sensor.ambr | 20 - 55 files changed, 9 insertions(+), 30360 deletions(-) delete mode 100644 tests/components/mastodon/snapshots/test_init.ambr diff --git a/tests/components/accuweather/snapshots/test_weather.ambr b/tests/components/accuweather/snapshots/test_weather.ambr index 49bf4008884..cbe1891d216 100644 --- a/tests/components/accuweather/snapshots/test_weather.ambr +++ b/tests/components/accuweather/snapshots/test_weather.ambr @@ -1,85 +1,4 @@ # serializer version: 1 -# name: test_forecast_service[get_forecast] - dict({ - 'forecast': list([ - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 58, - 'condition': 'lightning-rainy', - 'datetime': '2020-07-26T05:00:00+00:00', - 'humidity': 60, - 'precipitation': 2.5, - 'precipitation_probability': 60, - 'temperature': 29.5, - 'templow': 15.4, - 'uv_index': 5, - 'wind_bearing': 166, - 'wind_gust_speed': 29.6, - 'wind_speed': 13.0, - }), - dict({ - 'apparent_temperature': 28.9, - 'cloud_coverage': 52, - 'condition': 'partlycloudy', - 'datetime': '2020-07-27T05:00:00+00:00', - 'humidity': 58, - 'precipitation': 0.0, - 'precipitation_probability': 25, - 'temperature': 26.2, - 'templow': 15.9, - 'uv_index': 7, - 'wind_bearing': 297, - 'wind_gust_speed': 14.8, - 'wind_speed': 9.3, - }), - dict({ - 'apparent_temperature': 31.6, - 'cloud_coverage': 65, - 'condition': 'partlycloudy', - 'datetime': '2020-07-28T05:00:00+00:00', - 'humidity': 52, - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': 31.7, - 'templow': 16.8, - 'uv_index': 7, - 'wind_bearing': 198, - 'wind_gust_speed': 24.1, - 'wind_speed': 16.7, - }), - dict({ - 'apparent_temperature': 26.5, - 'cloud_coverage': 45, - 'condition': 'partlycloudy', - 'datetime': '2020-07-29T05:00:00+00:00', - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 9, - 'temperature': 24.0, - 'templow': 11.7, - 'uv_index': 6, - 'wind_bearing': 293, - 'wind_gust_speed': 24.1, - 'wind_speed': 13.0, - }), - dict({ - 'apparent_temperature': 22.2, - 'cloud_coverage': 50, - 'condition': 'partlycloudy', - 'datetime': '2020-07-30T05:00:00+00:00', - 'humidity': 55, - 'precipitation': 0.0, - 'precipitation_probability': 1, - 'temperature': 21.4, - 'templow': 12.2, - 'uv_index': 7, - 'wind_bearing': 280, - 'wind_gust_speed': 27.8, - 'wind_speed': 18.5, - }), - ]), - }) -# --- # name: test_forecast_service[get_forecasts] dict({ 'weather.home': dict({ diff --git a/tests/components/aemet/snapshots/test_weather.ambr b/tests/components/aemet/snapshots/test_weather.ambr index f19f95a6e80..58c854dcda9 100644 --- a/tests/components/aemet/snapshots/test_weather.ambr +++ b/tests/components/aemet/snapshots/test_weather.ambr @@ -1,494 +1,4 @@ # serializer version: 1 -# name: test_forecast_service[get_forecast] - dict({ - 'forecast': list([ - dict({ - 'condition': 'snowy', - 'datetime': '2021-01-08T23:00:00+00:00', - 'precipitation_probability': 0, - 'temperature': 2.0, - 'templow': -1.0, - 'wind_bearing': 90.0, - 'wind_speed': 0.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-01-09T23:00:00+00:00', - 'precipitation_probability': 30, - 'temperature': 4.0, - 'templow': -4.0, - 'wind_bearing': 45.0, - 'wind_speed': 20.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-01-10T23:00:00+00:00', - 'precipitation_probability': 0, - 'temperature': 3.0, - 'templow': -7.0, - 'wind_bearing': 0.0, - 'wind_speed': 5.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-01-11T23:00:00+00:00', - 'precipitation_probability': 0, - 'temperature': -1.0, - 'templow': -13.0, - 'wind_bearing': None, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-01-12T23:00:00+00:00', - 'precipitation_probability': 0, - 'temperature': 6.0, - 'templow': -11.0, - 'wind_bearing': None, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-01-13T23:00:00+00:00', - 'precipitation_probability': 0, - 'temperature': 6.0, - 'templow': -7.0, - 'wind_bearing': None, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-14T23:00:00+00:00', - 'precipitation_probability': 0, - 'temperature': 5.0, - 'templow': -4.0, - 'wind_bearing': None, - }), - ]), - }) -# --- -# name: test_forecast_service[get_forecast].1 - dict({ - 'forecast': list([ - dict({ - 'condition': 'snowy', - 'datetime': '2021-01-09T12:00:00+00:00', - 'precipitation': 2.7, - 'precipitation_probability': 100, - 'temperature': 0.0, - 'wind_bearing': 135.0, - 'wind_gust_speed': 22.0, - 'wind_speed': 15.0, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2021-01-09T13:00:00+00:00', - 'precipitation': 0.6, - 'precipitation_probability': 100, - 'temperature': 0.0, - 'wind_bearing': 135.0, - 'wind_gust_speed': 24.0, - 'wind_speed': 14.0, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2021-01-09T14:00:00+00:00', - 'precipitation': 0.8, - 'precipitation_probability': 100, - 'temperature': 1.0, - 'wind_bearing': 135.0, - 'wind_gust_speed': 20.0, - 'wind_speed': 10.0, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-01-09T15:00:00+00:00', - 'precipitation': 1.4, - 'precipitation_probability': 100, - 'temperature': 1.0, - 'wind_bearing': 135.0, - 'wind_gust_speed': 14.0, - 'wind_speed': 8.0, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-01-09T16:00:00+00:00', - 'precipitation': 1.2, - 'precipitation_probability': 100, - 'temperature': 1.0, - 'wind_bearing': 135.0, - 'wind_gust_speed': 13.0, - 'wind_speed': 9.0, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-01-09T17:00:00+00:00', - 'precipitation': 0.4, - 'precipitation_probability': 100, - 'temperature': 1.0, - 'wind_bearing': 90.0, - 'wind_gust_speed': 13.0, - 'wind_speed': 7.0, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2021-01-09T18:00:00+00:00', - 'precipitation': 0.3, - 'precipitation_probability': 100, - 'temperature': 1.0, - 'wind_bearing': 135.0, - 'wind_gust_speed': 12.0, - 'wind_speed': 8.0, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2021-01-09T19:00:00+00:00', - 'precipitation': 0.1, - 'precipitation_probability': 100, - 'temperature': 1.0, - 'wind_bearing': 135.0, - 'wind_gust_speed': 12.0, - 'wind_speed': 6.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-09T20:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 100, - 'temperature': 1.0, - 'wind_bearing': 90.0, - 'wind_gust_speed': 8.0, - 'wind_speed': 6.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-09T21:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 100, - 'temperature': 1.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 9.0, - 'wind_speed': 6.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-01-09T22:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 100, - 'temperature': 1.0, - 'wind_bearing': 90.0, - 'wind_gust_speed': 11.0, - 'wind_speed': 8.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-01-09T23:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': 1.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 12.0, - 'wind_speed': 6.0, - }), - dict({ - 'condition': 'fog', - 'datetime': '2021-01-10T00:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': 0.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 10.0, - 'wind_speed': 5.0, - }), - dict({ - 'condition': 'fog', - 'datetime': '2021-01-10T01:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': 0.0, - 'wind_bearing': 0.0, - 'wind_gust_speed': 11.0, - 'wind_speed': 6.0, - }), - dict({ - 'condition': 'fog', - 'datetime': '2021-01-10T02:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': 0.0, - 'wind_bearing': 0.0, - 'wind_gust_speed': 9.0, - 'wind_speed': 6.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T03:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': -1.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 12.0, - 'wind_speed': 8.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T04:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': -1.0, - 'wind_bearing': 0.0, - 'wind_gust_speed': 11.0, - 'wind_speed': 5.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T05:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 15, - 'temperature': -1.0, - 'wind_bearing': 0.0, - 'wind_gust_speed': 13.0, - 'wind_speed': 9.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T06:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 15, - 'temperature': -2.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 18.0, - 'wind_speed': 13.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T07:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 15, - 'temperature': -1.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 25.0, - 'wind_speed': 17.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T08:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 15, - 'temperature': -1.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 31.0, - 'wind_speed': 21.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-01-10T09:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 15, - 'temperature': 0.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 32.0, - 'wind_speed': 21.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-01-10T10:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 15, - 'temperature': 2.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 30.0, - 'wind_speed': 21.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 5, - 'temperature': 3.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 32.0, - 'wind_speed': 22.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T12:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 5, - 'temperature': 3.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 32.0, - 'wind_speed': 20.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T13:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 5, - 'temperature': 3.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 30.0, - 'wind_speed': 19.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T14:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 5, - 'temperature': 4.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 28.0, - 'wind_speed': 17.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T15:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 5, - 'temperature': 3.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 25.0, - 'wind_speed': 16.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T16:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 5, - 'temperature': 2.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 24.0, - 'wind_speed': 16.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-01-10T17:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 1.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 24.0, - 'wind_speed': 17.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-01-10T18:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 1.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 25.0, - 'wind_speed': 17.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T19:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 1.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 25.0, - 'wind_speed': 16.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T20:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 1.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 24.0, - 'wind_speed': 17.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T21:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 0.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 27.0, - 'wind_speed': 19.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T22:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 0.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 30.0, - 'wind_speed': 21.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T23:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': -1.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 30.0, - 'wind_speed': 19.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-01-11T00:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': -1.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 27.0, - 'wind_speed': 16.0, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-01-11T01:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': -2.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 22.0, - 'wind_speed': 12.0, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-01-11T02:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': -2.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 17.0, - 'wind_speed': 10.0, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-01-11T03:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': -3.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 15.0, - 'wind_speed': 11.0, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-01-11T04:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': -4.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 15.0, - 'wind_speed': 10.0, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-01-11T05:00:00+00:00', - 'precipitation_probability': None, - 'temperature': -4.0, - 'wind_bearing': 0.0, - 'wind_gust_speed': 15.0, - 'wind_speed': 10.0, - }), - ]), - }) -# --- # name: test_forecast_service[get_forecasts] dict({ 'weather.aemet': dict({ diff --git a/tests/components/apsystems/snapshots/test_binary_sensor.ambr b/tests/components/apsystems/snapshots/test_binary_sensor.ambr index bb06b019f31..0875c88976b 100644 --- a/tests/components/apsystems/snapshots/test_binary_sensor.ambr +++ b/tests/components/apsystems/snapshots/test_binary_sensor.ambr @@ -187,191 +187,3 @@ 'state': 'on', }) # --- -# name: test_all_entities[binary_sensor.mock_title_problem-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.mock_title_problem', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Problem', - 'platform': 'apsystems', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'off_grid_status', - 'unique_id': 'MY_SERIAL_NUMBER_off_grid_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.mock_title_problem-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': 'Mock Title Problem', - }), - 'context': , - 'entity_id': 'binary_sensor.mock_title_problem', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_entities[binary_sensor.mock_title_problem_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.mock_title_problem_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Problem', - 'platform': 'apsystems', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dc_1_short_circuit_error_status', - 'unique_id': 'MY_SERIAL_NUMBER_dc_1_short_circuit_error_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.mock_title_problem_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': 'Mock Title Problem', - }), - 'context': , - 'entity_id': 'binary_sensor.mock_title_problem_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_all_entities[binary_sensor.mock_title_problem_3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.mock_title_problem_3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Problem', - 'platform': 'apsystems', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dc_2_short_circuit_error_status', - 'unique_id': 'MY_SERIAL_NUMBER_dc_2_short_circuit_error_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.mock_title_problem_3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': 'Mock Title Problem', - }), - 'context': , - 'entity_id': 'binary_sensor.mock_title_problem_3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_all_entities[binary_sensor.mock_title_problem_4-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.mock_title_problem_4', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Problem', - 'platform': 'apsystems', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'output_fault_status', - 'unique_id': 'MY_SERIAL_NUMBER_output_fault_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[binary_sensor.mock_title_problem_4-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'problem', - 'friendly_name': 'Mock Title Problem', - }), - 'context': , - 'entity_id': 'binary_sensor.mock_title_problem_4', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/assist_pipeline/snapshots/test_websocket.ambr b/tests/components/assist_pipeline/snapshots/test_websocket.ambr index e5ae18d28f2..fb1ca6db121 100644 --- a/tests/components/assist_pipeline/snapshots/test_websocket.ambr +++ b/tests/components/assist_pipeline/snapshots/test_websocket.ambr @@ -663,9 +663,6 @@ # name: test_stt_stream_failed.2 None # --- -# name: test_text_only_pipeline.3 - None -# --- # name: test_text_only_pipeline[extra_msg0] dict({ 'language': 'en', diff --git a/tests/components/autarco/snapshots/test_sensor.ambr b/tests/components/autarco/snapshots/test_sensor.ambr index 2ff0236a59f..0aa093d6a6d 100644 --- a/tests/components/autarco/snapshots/test_sensor.ambr +++ b/tests/components/autarco/snapshots/test_sensor.ambr @@ -401,405 +401,3 @@ 'state': '200', }) # --- -# name: test_solar_sensors[sensor.inverter_test_serial_1_energy_ac_output_total-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.inverter_test_serial_1_energy_ac_output_total', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Energy AC output total', - 'platform': 'autarco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'out_ac_energy_total', - 'unique_id': 'test-serial-1_out_ac_energy_total', - 'unit_of_measurement': , - }) -# --- -# name: test_solar_sensors[sensor.inverter_test_serial_1_energy_ac_output_total-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Inverter test-serial-1 Energy AC output total', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.inverter_test_serial_1_energy_ac_output_total', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10379', - }) -# --- -# name: test_solar_sensors[sensor.inverter_test_serial_1_power_ac_output-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.inverter_test_serial_1_power_ac_output', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power AC output', - 'platform': 'autarco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'out_ac_power', - 'unique_id': 'test-serial-1_out_ac_power', - 'unit_of_measurement': , - }) -# --- -# name: test_solar_sensors[sensor.inverter_test_serial_1_power_ac_output-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Inverter test-serial-1 Power AC output', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.inverter_test_serial_1_power_ac_output', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '200', - }) -# --- -# name: test_solar_sensors[sensor.inverter_test_serial_2_energy_ac_output_total-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.inverter_test_serial_2_energy_ac_output_total', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Energy AC output total', - 'platform': 'autarco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'out_ac_energy_total', - 'unique_id': 'test-serial-2_out_ac_energy_total', - 'unit_of_measurement': , - }) -# --- -# name: test_solar_sensors[sensor.inverter_test_serial_2_energy_ac_output_total-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Inverter test-serial-2 Energy AC output total', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.inverter_test_serial_2_energy_ac_output_total', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10379', - }) -# --- -# name: test_solar_sensors[sensor.inverter_test_serial_2_power_ac_output-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.inverter_test_serial_2_power_ac_output', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power AC output', - 'platform': 'autarco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'out_ac_power', - 'unique_id': 'test-serial-2_out_ac_power', - 'unit_of_measurement': , - }) -# --- -# name: test_solar_sensors[sensor.inverter_test_serial_2_power_ac_output-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Inverter test-serial-2 Power AC output', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.inverter_test_serial_2_power_ac_output', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '500', - }) -# --- -# name: test_solar_sensors[sensor.solar_energy_production_month-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solar_energy_production_month', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Energy production month', - 'platform': 'autarco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'energy_production_month', - 'unique_id': '1_solar_energy_production_month', - 'unit_of_measurement': , - }) -# --- -# name: test_solar_sensors[sensor.solar_energy_production_month-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Solar Energy production month', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solar_energy_production_month', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '58', - }) -# --- -# name: test_solar_sensors[sensor.solar_energy_production_today-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solar_energy_production_today', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Energy production today', - 'platform': 'autarco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'energy_production_today', - 'unique_id': '1_solar_energy_production_today', - 'unit_of_measurement': , - }) -# --- -# name: test_solar_sensors[sensor.solar_energy_production_today-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Solar Energy production today', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solar_energy_production_today', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4', - }) -# --- -# name: test_solar_sensors[sensor.solar_energy_production_total-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solar_energy_production_total', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Energy production total', - 'platform': 'autarco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'energy_production_total', - 'unique_id': '1_solar_energy_production_total', - 'unit_of_measurement': , - }) -# --- -# name: test_solar_sensors[sensor.solar_energy_production_total-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Solar Energy production total', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solar_energy_production_total', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10379', - }) -# --- -# name: test_solar_sensors[sensor.solar_power_production-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solar_power_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power production', - 'platform': 'autarco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'power_production', - 'unique_id': '1_solar_power_production', - 'unit_of_measurement': , - }) -# --- -# name: test_solar_sensors[sensor.solar_power_production-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Solar Power production', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solar_power_production', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '200', - }) -# --- diff --git a/tests/components/axis/snapshots/test_binary_sensor.ambr b/tests/components/axis/snapshots/test_binary_sensor.ambr index 94b1cc2fc2e..ab860489d55 100644 --- a/tests/components/axis/snapshots/test_binary_sensor.ambr +++ b/tests/components/axis/snapshots/test_binary_sensor.ambr @@ -1,79 +1,4 @@ # serializer version: 1 -# name: test_binary_sensors[event0-binary_sensor.name_daynight_1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'light', - 'friendly_name': 'name DayNight 1', - }), - 'context': , - 'entity_id': 'binary_sensor.name_daynight_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event0-daynight_1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'light', - 'friendly_name': 'name DayNight 1', - }), - 'context': , - 'entity_id': 'binary_sensor.name_daynight_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event0-daynight_1][binary_sensor.home_daynight_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.home_daynight_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'DayNight 1', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tns1:VideoSource/tnsaxis:DayNightVision-1', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[event0-daynight_1][binary_sensor.home_daynight_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'light', - 'friendly_name': 'home DayNight 1', - }), - 'context': , - 'entity_id': 'binary_sensor.home_daynight_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_binary_sensors[event0][binary_sensor.home_daynight_1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -121,156 +46,6 @@ 'state': 'on', }) # --- -# name: test_binary_sensors[event1-binary_sensor.name_sound_1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'sound', - 'friendly_name': 'name Sound 1', - }), - 'context': , - 'entity_id': 'binary_sensor.name_sound_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[event1-sound_1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'sound', - 'friendly_name': 'name Sound 1', - }), - 'context': , - 'entity_id': 'binary_sensor.name_sound_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[event1-sound_1][binary_sensor.home_sound_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.home_sound_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Sound 1', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tns1:AudioSource/tnsaxis:TriggerLevel-1', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[event1-sound_1][binary_sensor.home_sound_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'sound', - 'friendly_name': 'home Sound 1', - }), - 'context': , - 'entity_id': 'binary_sensor.home_sound_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[event10-binary_sensor.name_object_analytics_device1scenario8] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'name Object Analytics Device1Scenario8', - }), - 'context': , - 'entity_id': 'binary_sensor.name_object_analytics_device1scenario8', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event10-object_analytics_device1scenario8] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'name Object Analytics Device1Scenario8', - }), - 'context': , - 'entity_id': 'binary_sensor.name_object_analytics_device1scenario8', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event10-object_analytics_device1scenario8][binary_sensor.home_object_analytics_device1scenario8-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.home_object_analytics_device1scenario8', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Object Analytics Device1Scenario8', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/ObjectAnalytics/Device1Scenario8-Device1Scenario8', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[event10-object_analytics_device1scenario8][binary_sensor.home_object_analytics_device1scenario8-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'home Object Analytics Device1Scenario8', - }), - 'context': , - 'entity_id': 'binary_sensor.home_object_analytics_device1scenario8', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_binary_sensors[event10][binary_sensor.home_object_analytics_device1scenario8-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -365,81 +140,6 @@ 'state': 'off', }) # --- -# name: test_binary_sensors[event2-binary_sensor.name_pir_sensor] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': 'name PIR sensor', - }), - 'context': , - 'entity_id': 'binary_sensor.name_pir_sensor', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[event2-pir_sensor] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': 'name PIR sensor', - }), - 'context': , - 'entity_id': 'binary_sensor.name_pir_sensor', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[event2-pir_sensor][binary_sensor.home_pir_sensor-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.home_pir_sensor', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'PIR sensor', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tns1:Device/tnsaxis:IO/Port-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[event2-pir_sensor][binary_sensor.home_pir_sensor-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': 'home PIR sensor', - }), - 'context': , - 'entity_id': 'binary_sensor.home_pir_sensor', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_binary_sensors[event2][binary_sensor.home_pir_sensor-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -487,81 +187,6 @@ 'state': 'off', }) # --- -# name: test_binary_sensors[event3-binary_sensor.name_pir_0] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'name PIR 0', - }), - 'context': , - 'entity_id': 'binary_sensor.name_pir_0', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[event3-pir_0] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'name PIR 0', - }), - 'context': , - 'entity_id': 'binary_sensor.name_pir_0', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[event3-pir_0][binary_sensor.home_pir_0-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.home_pir_0', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'PIR 0', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tns1:Device/tnsaxis:Sensor/PIR-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[event3-pir_0][binary_sensor.home_pir_0-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'home PIR 0', - }), - 'context': , - 'entity_id': 'binary_sensor.home_pir_0', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_binary_sensors[event3][binary_sensor.home_pir_0-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -609,81 +234,6 @@ 'state': 'off', }) # --- -# name: test_binary_sensors[event4-binary_sensor.name_fence_guard_profile_1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'name Fence Guard Profile 1', - }), - 'context': , - 'entity_id': 'binary_sensor.name_fence_guard_profile_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event4-fence_guard_profile_1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'name Fence Guard Profile 1', - }), - 'context': , - 'entity_id': 'binary_sensor.name_fence_guard_profile_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event4-fence_guard_profile_1][binary_sensor.home_fence_guard_profile_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.home_fence_guard_profile_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Fence Guard Profile 1', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/FenceGuard/Camera1Profile1-Camera1Profile1', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[event4-fence_guard_profile_1][binary_sensor.home_fence_guard_profile_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'home Fence Guard Profile 1', - }), - 'context': , - 'entity_id': 'binary_sensor.home_fence_guard_profile_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_binary_sensors[event4][binary_sensor.home_fence_guard_profile_1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -731,81 +281,6 @@ 'state': 'on', }) # --- -# name: test_binary_sensors[event5-binary_sensor.name_motion_guard_profile_1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'name Motion Guard Profile 1', - }), - 'context': , - 'entity_id': 'binary_sensor.name_motion_guard_profile_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event5-motion_guard_profile_1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'name Motion Guard Profile 1', - }), - 'context': , - 'entity_id': 'binary_sensor.name_motion_guard_profile_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event5-motion_guard_profile_1][binary_sensor.home_motion_guard_profile_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.home_motion_guard_profile_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Motion Guard Profile 1', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/MotionGuard/Camera1Profile1-Camera1Profile1', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[event5-motion_guard_profile_1][binary_sensor.home_motion_guard_profile_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'home Motion Guard Profile 1', - }), - 'context': , - 'entity_id': 'binary_sensor.home_motion_guard_profile_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_binary_sensors[event5][binary_sensor.home_motion_guard_profile_1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -853,81 +328,6 @@ 'state': 'on', }) # --- -# name: test_binary_sensors[event6-binary_sensor.name_loitering_guard_profile_1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'name Loitering Guard Profile 1', - }), - 'context': , - 'entity_id': 'binary_sensor.name_loitering_guard_profile_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event6-loitering_guard_profile_1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'name Loitering Guard Profile 1', - }), - 'context': , - 'entity_id': 'binary_sensor.name_loitering_guard_profile_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event6-loitering_guard_profile_1][binary_sensor.home_loitering_guard_profile_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.home_loitering_guard_profile_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Loitering Guard Profile 1', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/LoiteringGuard/Camera1Profile1-Camera1Profile1', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[event6-loitering_guard_profile_1][binary_sensor.home_loitering_guard_profile_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'home Loitering Guard Profile 1', - }), - 'context': , - 'entity_id': 'binary_sensor.home_loitering_guard_profile_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_binary_sensors[event6][binary_sensor.home_loitering_guard_profile_1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -975,81 +375,6 @@ 'state': 'on', }) # --- -# name: test_binary_sensors[event7-binary_sensor.name_vmd4_profile_1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'name VMD4 Profile 1', - }), - 'context': , - 'entity_id': 'binary_sensor.name_vmd4_profile_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event7-vmd4_profile_1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'name VMD4 Profile 1', - }), - 'context': , - 'entity_id': 'binary_sensor.name_vmd4_profile_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event7-vmd4_profile_1][binary_sensor.home_vmd4_profile_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.home_vmd4_profile_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VMD4 Profile 1', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/VMD/Camera1Profile1-Camera1Profile1', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[event7-vmd4_profile_1][binary_sensor.home_vmd4_profile_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'home VMD4 Profile 1', - }), - 'context': , - 'entity_id': 'binary_sensor.home_vmd4_profile_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_binary_sensors[event7][binary_sensor.home_vmd4_profile_1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1097,81 +422,6 @@ 'state': 'on', }) # --- -# name: test_binary_sensors[event8-binary_sensor.name_object_analytics_scenario_1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'name Object Analytics Scenario 1', - }), - 'context': , - 'entity_id': 'binary_sensor.name_object_analytics_scenario_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event8-object_analytics_scenario_1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'name Object Analytics Scenario 1', - }), - 'context': , - 'entity_id': 'binary_sensor.name_object_analytics_scenario_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event8-object_analytics_scenario_1][binary_sensor.home_object_analytics_scenario_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.home_object_analytics_scenario_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Object Analytics Scenario 1', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/ObjectAnalytics/Device1Scenario1-Device1Scenario1', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[event8-object_analytics_scenario_1][binary_sensor.home_object_analytics_scenario_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'home Object Analytics Scenario 1', - }), - 'context': , - 'entity_id': 'binary_sensor.home_object_analytics_scenario_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_binary_sensors[event8][binary_sensor.home_object_analytics_scenario_1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1219,81 +469,6 @@ 'state': 'on', }) # --- -# name: test_binary_sensors[event9-binary_sensor.name_vmd4_camera1profile9] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'name VMD4 Camera1Profile9', - }), - 'context': , - 'entity_id': 'binary_sensor.name_vmd4_camera1profile9', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event9-vmd4_camera1profile9] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'name VMD4 Camera1Profile9', - }), - 'context': , - 'entity_id': 'binary_sensor.name_vmd4_camera1profile9', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event9-vmd4_camera1profile9][binary_sensor.home_vmd4_camera1profile9-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.home_vmd4_camera1profile9', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VMD4 Camera1Profile9', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/VMD/Camera1Profile9-Camera1Profile9', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[event9-vmd4_camera1profile9][binary_sensor.home_vmd4_camera1profile9-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'home VMD4 Camera1Profile9', - }), - 'context': , - 'entity_id': 'binary_sensor.home_vmd4_camera1profile9', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_binary_sensors[event9][binary_sensor.home_vmd4_camera1profile9-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/azure_devops/snapshots/test_sensor.ambr b/tests/components/azure_devops/snapshots/test_sensor.ambr index 0ce82cae1e8..aa8d1d9e7e0 100644 --- a/tests/components/azure_devops/snapshots/test_sensor.ambr +++ b/tests/components/azure_devops/snapshots/test_sensor.ambr @@ -1,467 +1,4 @@ # serializer version: 1 -# name: test_sensors[sensor.testproject_ci_build_finish_time-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_ci_build_finish_time', - 'has_entity_name': True, - 'hidden_by': , - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'CI build finish time', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'finish_time', - 'unique_id': 'testorg_1234_9876_finish_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_finish_time-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'date', - 'friendly_name': 'testproject CI build finish time', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_finish_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2021-01-01T00:00:00+00:00', - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_id-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_ci_build_id', - 'has_entity_name': True, - 'hidden_by': , - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CI build id', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'build_id', - 'unique_id': 'testorg_1234_9876_build_id', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_id-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build id', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_id', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '5678', - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_queue_time-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_ci_build_queue_time', - 'has_entity_name': True, - 'hidden_by': , - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'CI build queue time', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'queue_time', - 'unique_id': 'testorg_1234_9876_queue_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_queue_time-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'date', - 'friendly_name': 'testproject CI build queue time', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_queue_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2021-01-01T00:00:00+00:00', - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_reason-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_ci_build_reason', - 'has_entity_name': True, - 'hidden_by': , - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CI build reason', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'reason', - 'unique_id': 'testorg_1234_9876_reason', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_reason-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build reason', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_reason', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'manual', - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_result-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_ci_build_result', - 'has_entity_name': True, - 'hidden_by': , - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CI build result', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'result', - 'unique_id': 'testorg_1234_9876_result', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_result-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build result', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_result', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'succeeded', - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_source_branch-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_ci_build_source_branch', - 'has_entity_name': True, - 'hidden_by': , - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CI build source branch', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'source_branch', - 'unique_id': 'testorg_1234_9876_source_branch', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_source_branch-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build source branch', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_source_branch', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'main', - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_source_version-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_ci_build_source_version', - 'has_entity_name': True, - 'hidden_by': , - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CI build source version', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'source_version', - 'unique_id': 'testorg_1234_9876_source_version', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_source_version-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build source version', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_source_version', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '123', - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_start_time-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_ci_build_start_time', - 'has_entity_name': True, - 'hidden_by': , - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'CI build start time', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'start_time', - 'unique_id': 'testorg_1234_9876_start_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_start_time-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'date', - 'friendly_name': 'testproject CI build start time', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_start_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2021-01-01T00:00:00+00:00', - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_status-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_ci_build_status', - 'has_entity_name': True, - 'hidden_by': , - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CI build status', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'status', - 'unique_id': 'testorg_1234_9876_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build status', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'completed', - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_url-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_ci_build_url', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CI build url', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'url', - 'unique_id': 'testorg_1234_9876_url', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_url-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build url', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_url', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- # name: test_sensors[sensor.testproject_ci_latest_build-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -891,52 +428,6 @@ 'state': '2021-01-01T00:00:00+00:00', }) # --- -# name: test_sensors[sensor.testproject_ci_latest_build_status-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_ci_latest_build_status', - 'has_entity_name': True, - 'hidden_by': , - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CI latest build status', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'status', - 'unique_id': 'testorg_1234_9876_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_ci_latest_build_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI latest build status', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_latest_build_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'completed', - }) -# --- # name: test_sensors[sensor.testproject_ci_latest_build_url-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -983,243 +474,6 @@ 'state': 'unknown', }) # --- -# name: test_sensors[sensor.testproject_test_build_build_id-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_test_build_build_id', - 'has_entity_name': True, - 'hidden_by': , - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Test Build build id', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'build_id', - 'unique_id': 'testorg_1234_9876_build_id', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_test_build_build_id-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject Test Build build id', - }), - 'context': , - 'entity_id': 'sensor.testproject_test_build_build_id', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '5678', - }) -# --- -# name: test_sensors[sensor.testproject_test_build_latest_build-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_test_build_latest_build', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Test Build latest build', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'latest_build', - 'unique_id': 'testorg_1234_9876_latest_build', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_test_build_latest_build-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'definition_id': 9876, - 'definition_name': 'Test Build', - 'finish_time': '2021-01-01T00:00:00Z', - 'friendly_name': 'testproject Test Build latest build', - 'id': 5678, - 'queue_time': '2021-01-01T00:00:00Z', - 'reason': 'manual', - 'result': 'succeeded', - 'source_branch': 'main', - 'source_version': '123', - 'start_time': '2021-01-01T00:00:00Z', - 'status': 'completed', - 'url': None, - }), - 'context': , - 'entity_id': 'sensor.testproject_test_build_latest_build', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1', - }) -# --- -# name: test_sensors_missing_data[sensor.testproject_ci_build_finish_time-state-missing-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'date', - 'friendly_name': 'testproject CI build finish time', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_finish_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_sensors_missing_data[sensor.testproject_ci_build_id-state-missing-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build id', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_id', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '6789', - }) -# --- -# name: test_sensors_missing_data[sensor.testproject_ci_build_queue_time-state-missing-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'date', - 'friendly_name': 'testproject CI build queue time', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_queue_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_sensors_missing_data[sensor.testproject_ci_build_reason-state-missing-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build reason', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_reason', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_sensors_missing_data[sensor.testproject_ci_build_result-state-missing-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build result', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_result', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_sensors_missing_data[sensor.testproject_ci_build_source_branch-state-missing-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build source branch', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_source_branch', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_sensors_missing_data[sensor.testproject_ci_build_source_version-state-missing-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build source version', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_source_version', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_sensors_missing_data[sensor.testproject_ci_build_start_time-state-missing-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'date', - 'friendly_name': 'testproject CI build start time', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_start_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_sensors_missing_data[sensor.testproject_ci_build_status-state-missing-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build status', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_sensors_missing_data[sensor.testproject_ci_build_url-state-missing-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build url', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_url', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- # name: test_sensors_missing_data[sensor.testproject_ci_latest_build-state-missing-data] StateSnapshot({ 'attributes': ReadOnlyDict({ @@ -1352,19 +606,6 @@ 'state': 'unknown', }) # --- -# name: test_sensors_missing_data[sensor.testproject_ci_latest_build_status-state-missing-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI latest build status', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_latest_build_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- # name: test_sensors_missing_data[sensor.testproject_ci_latest_build_url-state-missing-data] StateSnapshot({ 'attributes': ReadOnlyDict({ diff --git a/tests/components/calendar/snapshots/test_init.ambr b/tests/components/calendar/snapshots/test_init.ambr index fe23c5dbac9..1b2bb9f0196 100644 --- a/tests/components/calendar/snapshots/test_init.ambr +++ b/tests/components/calendar/snapshots/test_init.ambr @@ -7,12 +7,6 @@ }), }) # --- -# name: test_list_events_service_duration[frozen_time-calendar.calendar_1-00:15:00-list_events] - dict({ - 'events': list([ - ]), - }) -# --- # name: test_list_events_service_duration[frozen_time-calendar.calendar_1-01:00:00-get_events] dict({ 'calendar.calendar_1': dict({ @@ -28,19 +22,6 @@ }), }) # --- -# name: test_list_events_service_duration[frozen_time-calendar.calendar_1-01:00:00-list_events] - dict({ - 'events': list([ - dict({ - 'description': 'Future Description', - 'end': '2023-10-19T09:20:05-06:00', - 'location': 'Future Location', - 'start': '2023-10-19T08:20:05-06:00', - 'summary': 'Future Event', - }), - ]), - }) -# --- # name: test_list_events_service_duration[frozen_time-calendar.calendar_2-00:15:00-get_events] dict({ 'calendar.calendar_2': dict({ @@ -54,14 +35,3 @@ }), }) # --- -# name: test_list_events_service_duration[frozen_time-calendar.calendar_2-00:15:00-list_events] - dict({ - 'events': list([ - dict({ - 'end': '2023-10-19T08:20:05-06:00', - 'start': '2023-10-19T07:20:05-06:00', - 'summary': 'Current Event', - }), - ]), - }) -# --- diff --git a/tests/components/conversation/snapshots/test_default_agent.ambr b/tests/components/conversation/snapshots/test_default_agent.ambr index d015b19ddc1..051613f0300 100644 --- a/tests/components/conversation/snapshots/test_default_agent.ambr +++ b/tests/components/conversation/snapshots/test_default_agent.ambr @@ -344,126 +344,6 @@ }), }) # --- -# name: test_intent_entity_exposed.1 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_valid_targets', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called my cool light', - }), - }), - }), - }) -# --- -# name: test_intent_entity_exposed.2 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_valid_targets', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called kitchen light', - }), - }), - }), - }) -# --- -# name: test_intent_entity_exposed.3 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_valid_targets', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called my cool light', - }), - }), - }), - }) -# --- -# name: test_intent_entity_exposed.4 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen light', - 'type': , - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_intent_entity_exposed.5 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen light', - 'type': , - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- # name: test_intent_entity_fail_if_unexposed dict({ 'conversation_id': None, @@ -614,73 +494,3 @@ }), }) # --- -# name: test_intent_entity_renamed.2 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_valid_targets', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called kitchen light', - }), - }), - }), - }) -# --- -# name: test_intent_entity_renamed.3 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen light', - 'type': , - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_intent_entity_renamed.4 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_valid_targets', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called renamed light', - }), - }), - }), - }) -# --- diff --git a/tests/components/deconz/snapshots/test_light.ambr b/tests/components/deconz/snapshots/test_light.ambr index 46b6611dcbe..b5a9f7b5543 100644 --- a/tests/components/deconz/snapshots/test_light.ambr +++ b/tests/components/deconz/snapshots/test_light.ambr @@ -1,308 +1,4 @@ # serializer version: 1 -# name: test_groups[input0-expected0-light_payload0][light.dimmable_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.dimmable_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Dimmable light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:02-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input0-expected0-light_payload0][light.dimmable_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 255, - 'color_mode': , - 'friendly_name': 'Dimmable light', - 'is_deconz_group': False, - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.dimmable_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_groups[input0-expected0-light_payload0][light.group-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'effect_list': list([ - 'colorloop', - ]), - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'supported_color_modes': list([ - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.group', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '01234E56789A-/groups/0', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input0-expected0-light_payload0][light.group-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'all_on': False, - 'brightness': 255, - 'color_mode': , - 'color_temp': 2500, - 'color_temp_kelvin': 400, - 'effect': None, - 'effect_list': list([ - 'colorloop', - ]), - 'friendly_name': 'Group', - 'hs_color': tuple( - 15.981, - 100.0, - ), - 'is_deconz_group': True, - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'rgb_color': tuple( - 255, - 67, - 0, - ), - 'supported_color_modes': list([ - , - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.674, - 0.322, - ), - }), - 'context': , - 'entity_id': 'light.group', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_groups[input0-expected0-light_payload0][light.rgb_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'effect_list': list([ - 'colorloop', - ]), - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.rgb_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'RGB light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:00-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input0-expected0-light_payload0][light.rgb_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 50, - 'color_mode': , - 'effect': None, - 'effect_list': list([ - 'colorloop', - ]), - 'friendly_name': 'RGB light', - 'hs_color': tuple( - 52.0, - 100.0, - ), - 'is_deconz_group': False, - 'rgb_color': tuple( - 255, - 221, - 0, - ), - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.5, - 0.5, - ), - }), - 'context': , - 'entity_id': 'light.rgb_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_groups[input0-expected0-light_payload0][light.tunable_white_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max_color_temp_kelvin': 6451, - 'max_mireds': 454, - 'min_color_temp_kelvin': 2202, - 'min_mireds': 155, - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.tunable_white_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Tunable white light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:01-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input0-expected0-light_payload0][light.tunable_white_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': None, - 'color_mode': , - 'color_temp': 2500, - 'color_temp_kelvin': 400, - 'friendly_name': 'Tunable white light', - 'hs_color': tuple( - 15.981, - 100.0, - ), - 'is_deconz_group': False, - 'max_color_temp_kelvin': 6451, - 'max_mireds': 454, - 'min_color_temp_kelvin': 2202, - 'min_mireds': 155, - 'rgb_color': tuple( - 255, - 67, - 0, - ), - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.674, - 0.322, - ), - }), - 'context': , - 'entity_id': 'light.tunable_white_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_groups[input0-light_payload0][light.dimmable_light-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -607,310 +303,6 @@ 'state': 'on', }) # --- -# name: test_groups[input1-expected1-light_payload0][light.dimmable_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.dimmable_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Dimmable light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:02-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input1-expected1-light_payload0][light.dimmable_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 255, - 'color_mode': , - 'friendly_name': 'Dimmable light', - 'is_deconz_group': False, - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.dimmable_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_groups[input1-expected1-light_payload0][light.group-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'effect_list': list([ - 'colorloop', - ]), - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'supported_color_modes': list([ - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.group', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '01234E56789A-/groups/0', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input1-expected1-light_payload0][light.group-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'all_on': False, - 'brightness': 50, - 'color_mode': , - 'color_temp': 2500, - 'color_temp_kelvin': 400, - 'effect': None, - 'effect_list': list([ - 'colorloop', - ]), - 'friendly_name': 'Group', - 'hs_color': tuple( - 15.981, - 100.0, - ), - 'is_deconz_group': True, - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'rgb_color': tuple( - 255, - 67, - 0, - ), - 'supported_color_modes': list([ - , - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.674, - 0.322, - ), - }), - 'context': , - 'entity_id': 'light.group', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_groups[input1-expected1-light_payload0][light.rgb_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'effect_list': list([ - 'colorloop', - ]), - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.rgb_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'RGB light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:00-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input1-expected1-light_payload0][light.rgb_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 50, - 'color_mode': , - 'effect': None, - 'effect_list': list([ - 'colorloop', - ]), - 'friendly_name': 'RGB light', - 'hs_color': tuple( - 52.0, - 100.0, - ), - 'is_deconz_group': False, - 'rgb_color': tuple( - 255, - 221, - 0, - ), - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.5, - 0.5, - ), - }), - 'context': , - 'entity_id': 'light.rgb_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_groups[input1-expected1-light_payload0][light.tunable_white_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max_color_temp_kelvin': 6451, - 'max_mireds': 454, - 'min_color_temp_kelvin': 2202, - 'min_mireds': 155, - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.tunable_white_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Tunable white light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:01-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input1-expected1-light_payload0][light.tunable_white_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': None, - 'color_mode': , - 'color_temp': 2500, - 'color_temp_kelvin': 400, - 'friendly_name': 'Tunable white light', - 'hs_color': tuple( - 15.981, - 100.0, - ), - 'is_deconz_group': False, - 'max_color_temp_kelvin': 6451, - 'max_mireds': 454, - 'min_color_temp_kelvin': 2202, - 'min_mireds': 155, - 'rgb_color': tuple( - 255, - 67, - 0, - ), - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.674, - 0.322, - ), - }), - 'context': , - 'entity_id': 'light.tunable_white_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_groups[input1-light_payload0][light.dimmable_light-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1215,310 +607,6 @@ 'state': 'on', }) # --- -# name: test_groups[input2-expected2-light_payload0][light.dimmable_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.dimmable_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Dimmable light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:02-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input2-expected2-light_payload0][light.dimmable_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 255, - 'color_mode': , - 'friendly_name': 'Dimmable light', - 'is_deconz_group': False, - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.dimmable_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_groups[input2-expected2-light_payload0][light.group-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'effect_list': list([ - 'colorloop', - ]), - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'supported_color_modes': list([ - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.group', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '01234E56789A-/groups/0', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input2-expected2-light_payload0][light.group-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'all_on': False, - 'brightness': 50, - 'color_mode': , - 'color_temp': None, - 'color_temp_kelvin': None, - 'effect': None, - 'effect_list': list([ - 'colorloop', - ]), - 'friendly_name': 'Group', - 'hs_color': tuple( - 52.0, - 100.0, - ), - 'is_deconz_group': True, - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'rgb_color': tuple( - 255, - 221, - 0, - ), - 'supported_color_modes': list([ - , - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.5, - 0.5, - ), - }), - 'context': , - 'entity_id': 'light.group', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_groups[input2-expected2-light_payload0][light.rgb_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'effect_list': list([ - 'colorloop', - ]), - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.rgb_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'RGB light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:00-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input2-expected2-light_payload0][light.rgb_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 50, - 'color_mode': , - 'effect': None, - 'effect_list': list([ - 'colorloop', - ]), - 'friendly_name': 'RGB light', - 'hs_color': tuple( - 52.0, - 100.0, - ), - 'is_deconz_group': False, - 'rgb_color': tuple( - 255, - 221, - 0, - ), - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.5, - 0.5, - ), - }), - 'context': , - 'entity_id': 'light.rgb_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_groups[input2-expected2-light_payload0][light.tunable_white_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max_color_temp_kelvin': 6451, - 'max_mireds': 454, - 'min_color_temp_kelvin': 2202, - 'min_mireds': 155, - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.tunable_white_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Tunable white light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:01-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input2-expected2-light_payload0][light.tunable_white_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': None, - 'color_mode': , - 'color_temp': 2500, - 'color_temp_kelvin': 400, - 'friendly_name': 'Tunable white light', - 'hs_color': tuple( - 15.981, - 100.0, - ), - 'is_deconz_group': False, - 'max_color_temp_kelvin': 6451, - 'max_mireds': 454, - 'min_color_temp_kelvin': 2202, - 'min_mireds': 155, - 'rgb_color': tuple( - 255, - 67, - 0, - ), - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.674, - 0.322, - ), - }), - 'context': , - 'entity_id': 'light.tunable_white_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_groups[input2-light_payload0][light.dimmable_light-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1823,97 +911,6 @@ 'state': 'on', }) # --- -# name: test_lights[light_payload0-expected0][light.hue_go-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'effect_list': list([ - 'colorloop', - ]), - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'supported_color_modes': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.hue_go', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Hue Go', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:17:88:01:01:23:45:67-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_lights[light_payload0-expected0][light.hue_go-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 254, - 'color_mode': , - 'color_temp': 375, - 'color_temp_kelvin': 2666, - 'effect': None, - 'effect_list': list([ - 'colorloop', - ]), - 'friendly_name': 'Hue Go', - 'hs_color': tuple( - 28.47, - 66.821, - ), - 'is_deconz_group': False, - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'rgb_color': tuple( - 255, - 165, - 84, - ), - 'supported_color_modes': list([ - , - , - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.53, - 0.388, - ), - }), - 'context': , - 'entity_id': 'light.hue_go', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_lights[light_payload0][light.hue_go-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2005,97 +1002,6 @@ 'state': 'on', }) # --- -# name: test_lights[light_payload1-expected1][light.hue_ensis-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'effect_list': list([ - 'colorloop', - ]), - 'max_color_temp_kelvin': 7142, - 'max_mireds': 650, - 'min_color_temp_kelvin': 1538, - 'min_mireds': 140, - 'supported_color_modes': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.hue_ensis', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Hue Ensis', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:17:88:01:01:23:45:67-01', - 'unit_of_measurement': None, - }) -# --- -# name: test_lights[light_payload1-expected1][light.hue_ensis-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 254, - 'color_mode': , - 'color_temp': None, - 'color_temp_kelvin': None, - 'effect': None, - 'effect_list': list([ - 'colorloop', - ]), - 'friendly_name': 'Hue Ensis', - 'hs_color': tuple( - 29.691, - 38.039, - ), - 'is_deconz_group': False, - 'max_color_temp_kelvin': 7142, - 'max_mireds': 650, - 'min_color_temp_kelvin': 1538, - 'min_mireds': 140, - 'rgb_color': tuple( - 255, - 206, - 158, - ), - 'supported_color_modes': list([ - , - , - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.427, - 0.373, - ), - }), - 'context': , - 'entity_id': 'light.hue_ensis', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_lights[light_payload1][light.hue_ensis-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2187,113 +1093,6 @@ 'state': 'on', }) # --- -# name: test_lights[light_payload2-expected2][light.lidl_xmas_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'effect_list': list([ - 'carnival', - 'collide', - 'fading', - 'fireworks', - 'flag', - 'glow', - 'rainbow', - 'snake', - 'snow', - 'sparkles', - 'steady', - 'strobe', - 'twinkle', - 'updown', - 'vintage', - 'waves', - ]), - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.lidl_xmas_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'LIDL xmas light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '58:8e:81:ff:fe:db:7b:be-01', - 'unit_of_measurement': None, - }) -# --- -# name: test_lights[light_payload2-expected2][light.lidl_xmas_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 25, - 'color_mode': , - 'effect': None, - 'effect_list': list([ - 'carnival', - 'collide', - 'fading', - 'fireworks', - 'flag', - 'glow', - 'rainbow', - 'snake', - 'snow', - 'sparkles', - 'steady', - 'strobe', - 'twinkle', - 'updown', - 'vintage', - 'waves', - ]), - 'friendly_name': 'LIDL xmas light', - 'hs_color': tuple( - 294.938, - 55.294, - ), - 'is_deconz_group': False, - 'rgb_color': tuple( - 243, - 113, - 255, - ), - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.357, - 0.188, - ), - }), - 'context': , - 'entity_id': 'light.lidl_xmas_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_lights[light_payload2][light.lidl_xmas_light-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2401,86 +1200,6 @@ 'state': 'on', }) # --- -# name: test_lights[light_payload3-expected3][light.hue_white_ambiance-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max_color_temp_kelvin': 6535, - 'max_mireds': 454, - 'min_color_temp_kelvin': 2202, - 'min_mireds': 153, - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.hue_white_ambiance', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Hue White Ambiance', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:17:88:01:01:23:45:67-02', - 'unit_of_measurement': None, - }) -# --- -# name: test_lights[light_payload3-expected3][light.hue_white_ambiance-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 254, - 'color_mode': , - 'color_temp': 396, - 'color_temp_kelvin': 2525, - 'friendly_name': 'Hue White Ambiance', - 'hs_color': tuple( - 28.809, - 71.624, - ), - 'is_deconz_group': False, - 'max_color_temp_kelvin': 6535, - 'max_mireds': 454, - 'min_color_temp_kelvin': 2202, - 'min_mireds': 153, - 'rgb_color': tuple( - 255, - 160, - 72, - ), - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.544, - 0.389, - ), - }), - 'context': , - 'entity_id': 'light.hue_white_ambiance', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_lights[light_payload3][light.hue_white_ambiance-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2561,63 +1280,6 @@ 'state': 'on', }) # --- -# name: test_lights[light_payload4-expected4][light.hue_filament-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.hue_filament', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Hue Filament', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:17:88:01:01:23:45:67-03', - 'unit_of_measurement': None, - }) -# --- -# name: test_lights[light_payload4-expected4][light.hue_filament-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 254, - 'color_mode': , - 'friendly_name': 'Hue Filament', - 'is_deconz_group': False, - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.hue_filament', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_lights[light_payload4][light.hue_filament-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2675,62 +1337,6 @@ 'state': 'on', }) # --- -# name: test_lights[light_payload5-expected5][light.simple_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.simple_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Simple Light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:15:8d:00:01:23:45:67-01', - 'unit_of_measurement': None, - }) -# --- -# name: test_lights[light_payload5-expected5][light.simple_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'color_mode': , - 'friendly_name': 'Simple Light', - 'is_deconz_group': False, - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.simple_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_lights[light_payload5][light.simple_light-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2787,97 +1393,6 @@ 'state': 'on', }) # --- -# name: test_lights[light_payload6-expected6][light.gradient_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'effect_list': list([ - 'colorloop', - ]), - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'supported_color_modes': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.gradient_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Gradient light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:17:88:01:0b:0c:0d:0e-0f', - 'unit_of_measurement': None, - }) -# --- -# name: test_lights[light_payload6-expected6][light.gradient_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 184, - 'color_mode': , - 'color_temp': None, - 'color_temp_kelvin': None, - 'effect': None, - 'effect_list': list([ - 'colorloop', - ]), - 'friendly_name': 'Gradient light', - 'hs_color': tuple( - 98.095, - 74.118, - ), - 'is_deconz_group': False, - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'rgb_color': tuple( - 135, - 255, - 66, - ), - 'supported_color_modes': list([ - , - , - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.2727, - 0.6226, - ), - }), - 'context': , - 'entity_id': 'light.gradient_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_lights[light_payload6][light.gradient_light-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/deconz/snapshots/test_number.ambr b/tests/components/deconz/snapshots/test_number.ambr index 5311addc7a1..26e044e1d31 100644 --- a/tests/components/deconz/snapshots/test_number.ambr +++ b/tests/components/deconz/snapshots/test_number.ambr @@ -1,54 +1,4 @@ # serializer version: 1 -# name: test_number_entities[sensor_payload0-expected0][binary_sensor.presence_sensor-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.presence_sensor', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Presence sensor', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:00-00-presence', - 'unit_of_measurement': None, - }) -# --- -# name: test_number_entities[sensor_payload0-expected0][binary_sensor.presence_sensor-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'dark': False, - 'device_class': 'motion', - 'friendly_name': 'Presence sensor', - 'on': True, - 'temperature': 0.1, - }), - 'context': , - 'entity_id': 'binary_sensor.presence_sensor', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_number_entities[sensor_payload0-expected0][number.presence_sensor_delay-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -104,56 +54,6 @@ 'state': '0', }) # --- -# name: test_number_entities[sensor_payload1-expected1][binary_sensor.presence_sensor-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.presence_sensor', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Presence sensor', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:00-00-presence', - 'unit_of_measurement': None, - }) -# --- -# name: test_number_entities[sensor_payload1-expected1][binary_sensor.presence_sensor-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'dark': False, - 'device_class': 'motion', - 'friendly_name': 'Presence sensor', - 'on': True, - 'temperature': 0.1, - }), - 'context': , - 'entity_id': 'binary_sensor.presence_sensor', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_number_entities[sensor_payload1-expected1][number.presence_sensor_duration-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/deconz/snapshots/test_sensor.ambr b/tests/components/deconz/snapshots/test_sensor.ambr index 7f12292abbd..dd097ea1c9a 100644 --- a/tests/components/deconz/snapshots/test_sensor.ambr +++ b/tests/components/deconz/snapshots/test_sensor.ambr @@ -548,53 +548,6 @@ 'state': '100', }) # --- -# name: test_sensors[config_entry_options0-sensor_payload12-expected12][binary_sensor.soil_sensor_low_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.soil_sensor_low_battery', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Soil Sensor Low Battery', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'a4:c1:38:fe:86:8f:07:a3-01-0408-low_battery', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload12-expected12][binary_sensor.soil_sensor_low_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Soil Sensor Low Battery', - }), - 'context': , - 'entity_id': 'binary_sensor.soil_sensor_low_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_sensors[config_entry_options0-sensor_payload12-expected12][sensor.soil_sensor-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1224,55 +1177,6 @@ 'state': '40', }) # --- -# name: test_sensors[config_entry_options0-sensor_payload19-expected19][binary_sensor.alarm_10-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.alarm_10', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Alarm 10', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:15:8d:00:02:b5:d1:80-01-0500-alarm', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload19-expected19][binary_sensor.alarm_10-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'safety', - 'friendly_name': 'Alarm 10', - 'on': True, - 'temperature': 26.0, - }), - 'context': , - 'entity_id': 'binary_sensor.alarm_10', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_sensors[config_entry_options0-sensor_payload19-expected19][sensor.alarm_10_battery-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/google_tasks/snapshots/test_todo.ambr b/tests/components/google_tasks/snapshots/test_todo.ambr index af8dec6a182..76611ba4a31 100644 --- a/tests/components/google_tasks/snapshots/test_todo.ambr +++ b/tests/components/google_tasks/snapshots/test_todo.ambr @@ -79,9 +79,6 @@ }), ]) # --- -# name: test_move_todo_item[api_responses0].4 - None -# --- # name: test_parent_child_ordering[api_responses0] list([ dict({ diff --git a/tests/components/homeassistant/snapshots/test_exposed_entities.ambr b/tests/components/homeassistant/snapshots/test_exposed_entities.ambr index 55b95186b49..9c93655cd4e 100644 --- a/tests/components/homeassistant/snapshots/test_exposed_entities.ambr +++ b/tests/components/homeassistant/snapshots/test_exposed_entities.ambr @@ -13,13 +13,3 @@ dict({ }) # --- -# name: test_listeners - dict({ - 'light.kitchen': dict({ - 'should_expose': True, - }), - 'switch.test_unique1': mappingproxy({ - 'should_expose': True, - }), - }) -# --- diff --git a/tests/components/homekit_controller/snapshots/test_init.ambr b/tests/components/homekit_controller/snapshots/test_init.ambr index 2e96295a0ab..078ef792a55 100644 --- a/tests/components/homekit_controller/snapshots/test_init.ambr +++ b/tests/components/homekit_controller/snapshots/test_init.ambr @@ -7010,324 +7010,6 @@ }), ]) # --- -# name: test_snapshots[haa_fan] - list([ - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:1', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'José A. Jiménez Campos', - 'model': 'RavenSystem HAA', - 'name': 'HAA-C718B3', - 'name_by_user': None, - 'serial_number': 'C718B3-1', - 'suggested_area': None, - 'sw_version': '5.0.18', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.haa_c718b3_identify', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'HAA-C718B3 Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_1_1_7', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'HAA-C718B3 Identify', - }), - 'entity_id': 'button.haa_c718b3_identify', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.haa_c718b3_setup', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'HAA-C718B3 Setup', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'setup', - 'unique_id': '00:00:00:00:00:00_1_1010_1012', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'friendly_name': 'HAA-C718B3 Setup', - }), - 'entity_id': 'button.haa_c718b3_setup', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.haa_c718b3_update', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'HAA-C718B3 Update', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_1_1010_1011', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'update', - 'friendly_name': 'HAA-C718B3 Update', - }), - 'entity_id': 'button.haa_c718b3_update', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'preset_modes': None, - }), - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'fan', - 'entity_category': None, - 'entity_id': 'fan.haa_c718b3', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'HAA-C718B3', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_1_8', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'friendly_name': 'HAA-C718B3', - 'percentage': 66, - 'percentage_step': 33.333333333333336, - 'preset_mode': None, - 'preset_modes': None, - 'supported_features': , - }), - 'entity_id': 'fan.haa_c718b3', - 'state': 'on', - }), - }), - ]), - }), - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:2', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'José A. Jiménez Campos', - 'model': 'RavenSystem HAA', - 'name': 'HAA-C718B3', - 'name_by_user': None, - 'serial_number': 'C718B3-2', - 'suggested_area': None, - 'sw_version': '5.0.18', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.haa_c718b3_identify_2', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'HAA-C718B3 Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_2_1_7', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'HAA-C718B3 Identify', - }), - 'entity_id': 'button.haa_c718b3_identify_2', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.haa_c718b3', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'HAA-C718B3', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_2_8', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'friendly_name': 'HAA-C718B3', - }), - 'entity_id': 'switch.haa_c718b3', - 'state': 'off', - }), - }), - ]), - }), - ]) -# --- # name: test_snapshots[home_assistant_bridge_basic_cover] list([ dict({ diff --git a/tests/components/husqvarna_automower/snapshots/test_sensor.ambr b/tests/components/husqvarna_automower/snapshots/test_sensor.ambr index c727a49b71a..c260e6beba6 100644 --- a/tests/components/husqvarna_automower/snapshots/test_sensor.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_sensor.ambr @@ -551,64 +551,6 @@ 'state': '2023-06-05T17:00:00+00:00', }) # --- -# name: test_sensor_snapshot[sensor.test_mower_1_none-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'Front lawn', - 'Back lawn', - 'my_lawn', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_mower_1_none', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'husqvarna_automower', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'work_area', - 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_work_area', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_snapshot[sensor.test_mower_1_none-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Test Mower 1 None', - 'options': list([ - 'Front lawn', - 'Back lawn', - 'my_lawn', - ]), - }), - 'context': , - 'entity_id': 'sensor.test_mower_1_none', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'Front lawn', - }) -# --- # name: test_sensor_snapshot[sensor.test_mower_1_number_of_charging_cycles-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/imgw_pib/snapshots/test_binary_sensor.ambr b/tests/components/imgw_pib/snapshots/test_binary_sensor.ambr index f314a4be590..c5ae6880022 100644 --- a/tests/components/imgw_pib/snapshots/test_binary_sensor.ambr +++ b/tests/components/imgw_pib/snapshots/test_binary_sensor.ambr @@ -95,101 +95,3 @@ 'state': 'off', }) # --- -# name: test_binary_sensor[binary_sensor.station_name_flood_alarm-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.station_name_flood_alarm', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Flood alarm', - 'platform': 'imgw_pib', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'flood_alarm', - 'unique_id': '123_flood_alarm', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.station_name_flood_alarm-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'alarm_level': 630.0, - 'attribution': 'Data provided by IMGW-PIB', - 'device_class': 'safety', - 'friendly_name': 'Station Name Flood alarm', - }), - 'context': , - 'entity_id': 'binary_sensor.station_name_flood_alarm', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[binary_sensor.station_name_flood_warning-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.station_name_flood_warning', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Flood warning', - 'platform': 'imgw_pib', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'flood_warning', - 'unique_id': '123_flood_warning', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.station_name_flood_warning-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by IMGW-PIB', - 'device_class': 'safety', - 'friendly_name': 'Station Name Flood warning', - 'warning_level': 590.0, - }), - 'context': , - 'entity_id': 'binary_sensor.station_name_flood_warning', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/imgw_pib/snapshots/test_sensor.ambr b/tests/components/imgw_pib/snapshots/test_sensor.ambr index 2638e468d92..6c69b890842 100644 --- a/tests/components/imgw_pib/snapshots/test_sensor.ambr +++ b/tests/components/imgw_pib/snapshots/test_sensor.ambr @@ -213,113 +213,3 @@ 'state': '10.8', }) # --- -# name: test_sensor[sensor.station_name_water_level-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.station_name_water_level', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Water level', - 'platform': 'imgw_pib', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'water_level', - 'unique_id': '123_water_level', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[sensor.station_name_water_level-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by IMGW-PIB', - 'device_class': 'distance', - 'friendly_name': 'Station Name Water level', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.station_name_water_level', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '526.0', - }) -# --- -# name: test_sensor[sensor.station_name_water_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.station_name_water_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Water temperature', - 'platform': 'imgw_pib', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'water_temperature', - 'unique_id': '123_water_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[sensor.station_name_water_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by IMGW-PIB', - 'device_class': 'temperature', - 'friendly_name': 'Station Name Water temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.station_name_water_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10.8', - }) -# --- diff --git a/tests/components/ipma/snapshots/test_weather.ambr b/tests/components/ipma/snapshots/test_weather.ambr index 1142cb7cfe5..80f385546d1 100644 --- a/tests/components/ipma/snapshots/test_weather.ambr +++ b/tests/components/ipma/snapshots/test_weather.ambr @@ -1,119 +1,4 @@ # serializer version: 1 -# name: test_forecast_service - dict({ - 'forecast': list([ - dict({ - 'condition': 'rainy', - 'datetime': datetime.datetime(2020, 1, 16, 0, 0), - 'precipitation_probability': '100.0', - 'temperature': 16.2, - 'templow': 10.6, - 'wind_bearing': 'S', - 'wind_speed': 10.0, - }), - ]), - }) -# --- -# name: test_forecast_service.1 - dict({ - 'forecast': list([ - dict({ - 'condition': 'rainy', - 'datetime': datetime.datetime(2020, 1, 15, 1, 0, tzinfo=datetime.timezone.utc), - 'precipitation_probability': 80.0, - 'temperature': 12.0, - 'wind_bearing': 'S', - 'wind_speed': 32.7, - }), - dict({ - 'condition': 'clear-night', - 'datetime': datetime.datetime(2020, 1, 15, 2, 0, tzinfo=datetime.timezone.utc), - 'precipitation_probability': 80.0, - 'temperature': 12.0, - 'wind_bearing': 'S', - 'wind_speed': 32.7, - }), - ]), - }) -# --- -# name: test_forecast_service[forecast] - dict({ - 'weather.hometown': dict({ - 'forecast': list([ - dict({ - 'condition': 'rainy', - 'datetime': datetime.datetime(2020, 1, 16, 0, 0), - 'precipitation_probability': '100.0', - 'temperature': 16.2, - 'templow': 10.6, - 'wind_bearing': 'S', - 'wind_speed': 10.0, - }), - ]), - }), - }) -# --- -# name: test_forecast_service[forecast].1 - dict({ - 'weather.hometown': dict({ - 'forecast': list([ - dict({ - 'condition': 'rainy', - 'datetime': datetime.datetime(2020, 1, 15, 1, 0, tzinfo=datetime.timezone.utc), - 'precipitation_probability': 80.0, - 'temperature': 12.0, - 'wind_bearing': 'S', - 'wind_speed': 32.7, - }), - dict({ - 'condition': 'clear-night', - 'datetime': datetime.datetime(2020, 1, 15, 2, 0, tzinfo=datetime.timezone.utc), - 'precipitation_probability': 80.0, - 'temperature': 12.0, - 'wind_bearing': 'S', - 'wind_speed': 32.7, - }), - ]), - }), - }) -# --- -# name: test_forecast_service[get_forecast] - dict({ - 'forecast': list([ - dict({ - 'condition': 'rainy', - 'datetime': datetime.datetime(2020, 1, 16, 0, 0), - 'precipitation_probability': 100.0, - 'temperature': 16.2, - 'templow': 10.6, - 'wind_bearing': 'S', - 'wind_speed': 10.0, - }), - ]), - }) -# --- -# name: test_forecast_service[get_forecast].1 - dict({ - 'forecast': list([ - dict({ - 'condition': 'rainy', - 'datetime': datetime.datetime(2020, 1, 15, 1, 0, tzinfo=datetime.timezone.utc), - 'precipitation_probability': 80.0, - 'temperature': 12.0, - 'wind_bearing': 'S', - 'wind_speed': 32.7, - }), - dict({ - 'condition': 'clear-night', - 'datetime': datetime.datetime(2020, 1, 15, 2, 0, tzinfo=datetime.timezone.utc), - 'precipitation_probability': 80.0, - 'temperature': 12.0, - 'wind_bearing': 'S', - 'wind_speed': 32.7, - }), - ]), - }) -# --- # name: test_forecast_service[get_forecasts] dict({ 'weather.hometown': dict({ diff --git a/tests/components/israel_rail/snapshots/test_sensor.ambr b/tests/components/israel_rail/snapshots/test_sensor.ambr index 9806ecb1fae..f851f1cd726 100644 --- a/tests/components/israel_rail/snapshots/test_sensor.ambr +++ b/tests/components/israel_rail/snapshots/test_sensor.ambr @@ -143,147 +143,6 @@ 'state': '2021-10-10T10:30:10+00:00', }) # --- -# name: test_valid_config[sensor.mock_title_none-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_none', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'israel_rail', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'platform', - 'unique_id': 'באר יעקב אשקלון_platform', - 'unit_of_measurement': None, - }) -# --- -# name: test_valid_config[sensor.mock_title_none-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Israel rail.', - 'friendly_name': 'Mock Title None', - }), - 'context': , - 'entity_id': 'sensor.mock_title_none', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1', - }) -# --- -# name: test_valid_config[sensor.mock_title_none_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_none_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'israel_rail', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'trains', - 'unique_id': 'באר יעקב אשקלון_trains', - 'unit_of_measurement': None, - }) -# --- -# name: test_valid_config[sensor.mock_title_none_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Israel rail.', - 'friendly_name': 'Mock Title None', - }), - 'context': , - 'entity_id': 'sensor.mock_title_none_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1', - }) -# --- -# name: test_valid_config[sensor.mock_title_none_3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_none_3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'israel_rail', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'train_number', - 'unique_id': 'באר יעקב אשקלון_train_number', - 'unit_of_measurement': None, - }) -# --- -# name: test_valid_config[sensor.mock_title_none_3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Israel rail.', - 'friendly_name': 'Mock Title None', - }), - 'context': , - 'entity_id': 'sensor.mock_title_none_3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1234', - }) -# --- # name: test_valid_config[sensor.mock_title_platform-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -331,150 +190,6 @@ 'state': '1', }) # --- -# name: test_valid_config[sensor.mock_title_timestamp-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_timestamp', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Timestamp', - 'platform': 'israel_rail', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'departure0', - 'unique_id': 'באר יעקב אשקלון_departure', - 'unit_of_measurement': None, - }) -# --- -# name: test_valid_config[sensor.mock_title_timestamp-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Israel rail.', - 'device_class': 'timestamp', - 'friendly_name': 'Mock Title Timestamp', - }), - 'context': , - 'entity_id': 'sensor.mock_title_timestamp', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2021-10-10T10:10:10+00:00', - }) -# --- -# name: test_valid_config[sensor.mock_title_timestamp_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_timestamp_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Timestamp', - 'platform': 'israel_rail', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'departure1', - 'unique_id': 'באר יעקב אשקלון_departure1', - 'unit_of_measurement': None, - }) -# --- -# name: test_valid_config[sensor.mock_title_timestamp_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Israel rail.', - 'device_class': 'timestamp', - 'friendly_name': 'Mock Title Timestamp', - }), - 'context': , - 'entity_id': 'sensor.mock_title_timestamp_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2021-10-10T10:20:10+00:00', - }) -# --- -# name: test_valid_config[sensor.mock_title_timestamp_3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_timestamp_3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Timestamp', - 'platform': 'israel_rail', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'departure2', - 'unique_id': 'באר יעקב אשקלון_departure2', - 'unit_of_measurement': None, - }) -# --- -# name: test_valid_config[sensor.mock_title_timestamp_3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Israel rail.', - 'device_class': 'timestamp', - 'friendly_name': 'Mock Title Timestamp', - }), - 'context': , - 'entity_id': 'sensor.mock_title_timestamp_3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2021-10-10T10:30:10+00:00', - }) -# --- # name: test_valid_config[sensor.mock_title_train_number-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/ista_ecotrend/snapshots/test_sensor.ambr b/tests/components/ista_ecotrend/snapshots/test_sensor.ambr index f9ab7a54b63..b5056019c74 100644 --- a/tests/components/ista_ecotrend/snapshots/test_sensor.ambr +++ b/tests/components/ista_ecotrend/snapshots/test_sensor.ambr @@ -1,64 +1,4 @@ # serializer version: 1 -# name: test_setup.32 - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': 'https://ecotrend.ista.de/', - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': , - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'ista_ecotrend', - '26e93f1a-c828-11ea-87d0-0242ac130003', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'ista SE', - 'model': 'ista EcoTrend', - 'name': 'Luxemburger Str. 1', - 'name_by_user': None, - 'serial_number': None, - 'suggested_area': None, - 'sw_version': None, - 'via_device_id': None, - }) -# --- -# name: test_setup.33 - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': 'https://ecotrend.ista.de/', - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': , - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'ista_ecotrend', - 'eaf5c5c8-889f-4a3c-b68c-e9a676505762', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'ista SE', - 'model': 'ista EcoTrend', - 'name': 'Bahnhofsstr. 1A', - 'name_by_user': None, - 'serial_number': None, - 'suggested_area': None, - 'sw_version': None, - 'via_device_id': None, - }) -# --- # name: test_setup[sensor.bahnhofsstr_1a_heating-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/mastodon/snapshots/test_init.ambr b/tests/components/mastodon/snapshots/test_init.ambr deleted file mode 100644 index f0b650076be..00000000000 --- a/tests/components/mastodon/snapshots/test_init.ambr +++ /dev/null @@ -1,33 +0,0 @@ -# serializer version: 1 -# name: test_device_info - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': None, - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': , - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'mastodon', - 'client_id', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'Mastodon gGmbH', - 'model': '@trwnh@mastodon.social', - 'model_id': None, - 'name': 'Mastodon', - 'name_by_user': None, - 'primary_config_entry': , - 'serial_number': None, - 'suggested_area': None, - 'sw_version': '4.0.0rc1', - 'via_device_id': None, - }) -# --- diff --git a/tests/components/mealie/snapshots/test_todo.ambr b/tests/components/mealie/snapshots/test_todo.ambr index a580862535e..4c58a839f57 100644 --- a/tests/components/mealie/snapshots/test_todo.ambr +++ b/tests/components/mealie/snapshots/test_todo.ambr @@ -140,17 +140,3 @@ 'state': '3', }) # --- -# name: test_get_todo_list_items - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mealie Supermarket', - 'supported_features': , - }), - 'context': , - 'entity_id': 'todo.mealie_supermarket', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3', - }) -# --- diff --git a/tests/components/media_extractor/snapshots/test_init.ambr b/tests/components/media_extractor/snapshots/test_init.ambr index ed56f40af73..9731a415c00 100644 --- a/tests/components/media_extractor/snapshots/test_init.ambr +++ b/tests/components/media_extractor/snapshots/test_init.ambr @@ -30,15 +30,6 @@ 'media_content_type': 'VIDEO', }) # --- -# name: test_play_media_service - ReadOnlyDict({ - 'entity_id': 'media_player.bedroom', - 'extra': dict({ - }), - 'media_content_id': 'https://manifest.googlevideo.com/api/manifest/hls_playlist/expire/1694794256/ei/sC0EZYCPHbuZx_AP3bGz0Ac/ip/84.31.234.146/id/750c38c3d5a05dc4/itag/616/source/youtube/requiressl/yes/ratebypass/yes/pfa/1/wft/1/sgovp/clen%3D99471214%3Bdur%3D212.040%3Bgir%3Dyes%3Bitag%3D356%3Blmt%3D1694043438471036/hls_chunk_host/rr2---sn-5hnekn7k.googlevideo.com/mh/7c/mm/31,29/mn/sn-5hnekn7k,sn-5hne6nzy/ms/au,rdu/mv/m/mvi/2/pl/14/initcwndbps/2267500/vprv/1/playlist_type/DVR/dover/13/txp/4532434/mt/1694772337/fvip/3/short_key/1/keepalive/yes/fexp/24007246,24362685/beids/24350018/sparams/expire,ei,ip,id,itag,source,requiressl,ratebypass,pfa,wft,sgovp,vprv,playlist_type/sig/AOq0QJ8wRgIhAIC0iobMnRschmQ3QaYsytXg9eg7l9B_-UNvMciis4bmAiEAg-3jr6SwOfAGCCU-JyTyxcXmraug-hPcjjJzm__43ug%3D/lsparams/hls_chunk_host,mh,mm,mn,ms,mv,mvi,pl,initcwndbps/lsig/AG3C_xAwRQIhAOlqbgmuueNhIuGENYKCsdwiNAUPheXw-RMUqsiaB7YuAiANN43FxJl14Ve_H_c9K-aDoXG4sI7PDCqKDhov6Qro_g%3D%3D/playlist/index.m3u8', - 'media_content_type': 'VIDEO', - }) -# --- # name: test_play_media_service[https://soundcloud.com/bruttoband/brutto-11-AUDIO-audio_media_extractor_config] ReadOnlyDict({ 'entity_id': 'media_player.bedroom', @@ -57,24 +48,6 @@ 'media_content_type': 'AUDIO', }) # --- -# name: test_play_media_service[https://soundcloud.com/bruttoband/brutto-11-VIDEO-audio_media_extractor_config] - ReadOnlyDict({ - 'entity_id': 'media_player.bedroom', - 'extra': dict({ - }), - 'media_content_id': 'https://cf-media.sndcdn.com/50remGX1OqRY.128.mp3?Policy=eyJTdGF0ZW1lbnQiOlt7IlJlc291cmNlIjoiKjovL2NmLW1lZGlhLnNuZGNkbi5jb20vNTByZW1HWDFPcVJZLjEyOC5tcDMqIiwiQ29uZGl0aW9uIjp7IkRhdGVMZXNzVGhhbiI6eyJBV1M6RXBvY2hUaW1lIjoxNjk0Nzk5MTc5fX19XX0_&Signature=JtF8BXxTCElhjCrhnSAq3W6z960VmdVXx7BPhQvI0MCxr~J43JFGO8CVw9-VBM2oEf14mqWo63-C0FO29DvUuBZnmLD3dhDfryVfWJsrix7voimoRDaNFE~3zntDbg7O2S8uWYyZK8OZC9anzwokvjH7jbmviWqK4~2IM9dwgejGgzrQU1aadV2Yro7NJZnF7SD~7tVjkM-hBg~X5zDYVxmGrdzN3tFoLwRmUch6RNDL~1DcWBk0AveBKQFAdBrFBjDDUeIyDz9Idhw2aG9~fjfckcf95KwqrVQxz1N5XEzfNDDo8xkUgDt0eb9dtXdwxLJ0swC6e5VLS8bsH91GMg__&Key-Pair-Id=APKAI6TU7MMXM5DG6EPQ', - 'media_content_type': 'VIDEO', - }) -# --- -# name: test_play_media_service[https://soundcloud.com/bruttoband/brutto-11-VIDEO-empty_media_extractor_config] - ReadOnlyDict({ - 'entity_id': 'media_player.bedroom', - 'extra': dict({ - }), - 'media_content_id': 'https://cf-media.sndcdn.com/50remGX1OqRY.128.mp3?Policy=eyJTdGF0ZW1lbnQiOlt7IlJlc291cmNlIjoiKjovL2NmLW1lZGlhLnNuZGNkbi5jb20vNTByZW1HWDFPcVJZLjEyOC5tcDMqIiwiQ29uZGl0aW9uIjp7IkRhdGVMZXNzVGhhbiI6eyJBV1M6RXBvY2hUaW1lIjoxNjk0Nzk4NTkzfX19XX0_&Signature=flALJvEBnzS0ZOOhf0-07Ap~NURw2Gn2OqkeKKTTMX5HRGJw9eXFay79tcC4GsMMXWUgWoCx-n3yelpyilE2MOEIufBNUbjqRfMSJaX5YhYxjQdoDYuiU~gqBzJyPw9pKzr6P8~5HNKL3Idr0CNhUzdV6FQLaUPKMMibq9ghV833mUmdyvdk1~GZBc8MOg9GrTdcigGgpPzd-vrIMICMvFzFnwBOeOotxX2Vfqf9~wVekBKGlvB9A~7TlZ71lv9Fl9u4m8rse9E-mByweVc1M784ehJV3~tRPjuF~FXXWKP8x0nGJmoq7RAnG7iFIt~fQFmsfOq2o~PG7dHMRPh7hw__&Key-Pair-Id=APKAI6TU7MMXM5DG6EPQ', - 'media_content_type': 'VIDEO', - }) -# --- # name: test_play_media_service[https://test.com/abc-AUDIO-audio_media_extractor_config] ReadOnlyDict({ 'entity_id': 'media_player.bedroom', @@ -93,15 +66,6 @@ 'media_content_type': 'AUDIO', }) # --- -# name: test_play_media_service[https://www.youtube.com/watch?v=dQw4w9WgXcQ-VIDEO-audio_media_extractor_config-] - ReadOnlyDict({ - 'entity_id': 'media_player.bedroom', - 'extra': dict({ - }), - 'media_content_id': 'https://manifest.googlevideo.com/api/manifest/hls_playlist/expire/1694805268/ei/tFgEZcu0DoOD-gaqg47wBA/ip/45.93.75.130/id/750c38c3d5a05dc4/itag/616/source/youtube/requiressl/yes/ratebypass/yes/pfa/1/wft/1/sgovp/clen%3D99471214%3Bdur%3D212.040%3Bgir%3Dyes%3Bitag%3D356%3Blmt%3D1694043438471036/hls_chunk_host/rr3---sn-5hne6nzy.googlevideo.com/mh/7c/mm/31,29/mn/sn-5hne6nzy,sn-5hnekn7k/ms/au,rdu/mv/m/mvi/3/pl/22/initcwndbps/1957500/vprv/1/playlist_type/DVR/dover/13/txp/4532434/mt/1694783146/fvip/2/short_key/1/keepalive/yes/fexp/24007246/sparams/expire,ei,ip,id,itag,source,requiressl,ratebypass,pfa,wft,sgovp,vprv,playlist_type/sig/AOq0QJ8wRQIhALAASH0_ZDQQoMA82qWNCXSHPZ0bb9TQldIs7AAxktiiAiASA5bQy7IAa6NwdGIOpfye5OgcY_BNuo0WgSdh84tosw%3D%3D/lsparams/hls_chunk_host,mh,mm,mn,ms,mv,mvi,pl,initcwndbps/lsig/AG3C_xAwRgIhAIsDcLGH8KJpQpBgyJ5VWlDxfr75HyO8hMSVS9v7nRu4AiEA2xjtLZOzeNFoJlxwCsH3YqsUQt-BF_4gikhi_P4FbBc%3D/playlist/index.m3u8', - 'media_content_type': 'VIDEO', - }) -# --- # name: test_play_media_service[https://www.youtube.com/watch?v=dQw4w9WgXcQ-VIDEO-audio_media_extractor_config] ReadOnlyDict({ 'entity_id': 'media_player.bedroom', @@ -111,15 +75,6 @@ 'media_content_type': 'VIDEO', }) # --- -# name: test_play_media_service[https://www.youtube.com/watch?v=dQw4w9WgXcQ-VIDEO-empty_media_extractor_config-] - ReadOnlyDict({ - 'entity_id': 'media_player.bedroom', - 'extra': dict({ - }), - 'media_content_id': 'https://manifest.googlevideo.com/api/manifest/hls_playlist/expire/1694805294/ei/zlgEZcCPFpqOx_APj42f2Ao/ip/45.93.75.130/id/750c38c3d5a05dc4/itag/616/source/youtube/requiressl/yes/ratebypass/yes/pfa/1/wft/1/sgovp/clen%3D99471214%3Bdur%3D212.040%3Bgir%3Dyes%3Bitag%3D356%3Blmt%3D1694043438471036/hls_chunk_host/rr3---sn-5hne6nzy.googlevideo.com/mh/7c/mm/31,26/mn/sn-5hne6nzy,sn-aigzrnld/ms/au,onr/mv/m/mvi/3/pl/22/initcwndbps/2095000/vprv/1/playlist_type/DVR/dover/13/txp/4532434/mt/1694783390/fvip/1/short_key/1/keepalive/yes/fexp/24007246,24362685/beids/24350017/sparams/expire,ei,ip,id,itag,source,requiressl,ratebypass,pfa,wft,sgovp,vprv,playlist_type/sig/AOq0QJ8wRgIhANCPwWNfq6wBp1Xo1L8bRJpDrzOyv7kfH_J65cZ_PRZLAiEAwo-0wQgeIjPe7OgyAAvMCx_A9wd1h8Qyh7VntKwGJUs%3D/lsparams/hls_chunk_host,mh,mm,mn,ms,mv,mvi,pl,initcwndbps/lsig/AG3C_xAwRQIgIqS9Ub_6L9ScKXr0T9bkeu6TZsEsyNApYfF_MqeukqECIQCMSeJ1sSEw5QGMgHAW8Fhsir4TYHEK5KVg-PzJbrT6hw%3D%3D/playlist/index.m3u8', - 'media_content_type': 'VIDEO', - }) -# --- # name: test_play_media_service[https://www.youtube.com/watch?v=dQw4w9WgXcQ-VIDEO-empty_media_extractor_config] ReadOnlyDict({ 'entity_id': 'media_player.bedroom', diff --git a/tests/components/met_eireann/snapshots/test_weather.ambr b/tests/components/met_eireann/snapshots/test_weather.ambr index 90f36d09d25..de8b69de18a 100644 --- a/tests/components/met_eireann/snapshots/test_weather.ambr +++ b/tests/components/met_eireann/snapshots/test_weather.ambr @@ -1,104 +1,4 @@ # serializer version: 1 -# name: test_forecast_service - dict({ - 'forecast': list([ - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-08T12:00:00+00:00', - 'temperature': 10.0, - }), - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-09T12:00:00+00:00', - 'temperature': 20.0, - }), - ]), - }) -# --- -# name: test_forecast_service.1 - dict({ - 'forecast': list([ - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-08T12:00:00+00:00', - 'temperature': 10.0, - }), - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-09T12:00:00+00:00', - 'temperature': 20.0, - }), - ]), - }) -# --- -# name: test_forecast_service[forecast] - dict({ - 'weather.somewhere': dict({ - 'forecast': list([ - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-08T12:00:00+00:00', - 'temperature': 10.0, - }), - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-09T12:00:00+00:00', - 'temperature': 20.0, - }), - ]), - }), - }) -# --- -# name: test_forecast_service[forecast].1 - dict({ - 'weather.somewhere': dict({ - 'forecast': list([ - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-08T12:00:00+00:00', - 'temperature': 10.0, - }), - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-09T12:00:00+00:00', - 'temperature': 20.0, - }), - ]), - }), - }) -# --- -# name: test_forecast_service[get_forecast] - dict({ - 'forecast': list([ - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-08T12:00:00+00:00', - 'temperature': 10.0, - }), - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-09T12:00:00+00:00', - 'temperature': 20.0, - }), - ]), - }) -# --- -# name: test_forecast_service[get_forecast].1 - dict({ - 'forecast': list([ - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-08T12:00:00+00:00', - 'temperature': 10.0, - }), - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-09T12:00:00+00:00', - 'temperature': 20.0, - }), - ]), - }) -# --- # name: test_forecast_service[get_forecasts] dict({ 'weather.somewhere': dict({ diff --git a/tests/components/metoffice/snapshots/test_weather.ambr b/tests/components/metoffice/snapshots/test_weather.ambr index a6991a8631b..0bbc0e06a0a 100644 --- a/tests/components/metoffice/snapshots/test_weather.ambr +++ b/tests/components/metoffice/snapshots/test_weather.ambr @@ -1,658 +1,4 @@ # serializer version: 1 -# name: test_forecast_service[get_forecast] - dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T12:00:00+00:00', - 'precipitation_probability': 9, - 'temperature': 13.0, - 'wind_bearing': 'WNW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-27T12:00:00+00:00', - 'precipitation_probability': 14, - 'temperature': 11.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-28T12:00:00+00:00', - 'precipitation_probability': 10, - 'temperature': 12.0, - 'wind_bearing': 'ENE', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2020-04-29T12:00:00+00:00', - 'precipitation_probability': 59, - 'temperature': 13.0, - 'wind_bearing': 'SE', - 'wind_speed': 20.92, - }), - ]), - }) -# --- -# name: test_forecast_service[get_forecast].1 - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2020-04-25T15:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 19.0, - 'wind_bearing': 'S', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-25T18:00:00+00:00', - 'precipitation_probability': 2, - 'temperature': 17.0, - 'wind_bearing': 'WNW', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-25T21:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 14.0, - 'wind_bearing': 'NW', - 'wind_speed': 3.22, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-26T00:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 13.0, - 'wind_bearing': 'WSW', - 'wind_speed': 3.22, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-26T03:00:00+00:00', - 'precipitation_probability': 2, - 'temperature': 12.0, - 'wind_bearing': 'WNW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T06:00:00+00:00', - 'precipitation_probability': 5, - 'temperature': 11.0, - 'wind_bearing': 'NW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T09:00:00+00:00', - 'precipitation_probability': 5, - 'temperature': 12.0, - 'wind_bearing': 'WNW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T12:00:00+00:00', - 'precipitation_probability': 6, - 'temperature': 12.0, - 'wind_bearing': 'WNW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T15:00:00+00:00', - 'precipitation_probability': 5, - 'temperature': 12.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T18:00:00+00:00', - 'precipitation_probability': 9, - 'temperature': 11.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T21:00:00+00:00', - 'precipitation_probability': 9, - 'temperature': 10.0, - 'wind_bearing': 'NW', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-27T00:00:00+00:00', - 'precipitation_probability': 11, - 'temperature': 9.0, - 'wind_bearing': 'WNW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-27T03:00:00+00:00', - 'precipitation_probability': 12, - 'temperature': 8.0, - 'wind_bearing': 'WNW', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-27T06:00:00+00:00', - 'precipitation_probability': 14, - 'temperature': 8.0, - 'wind_bearing': 'NW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-27T09:00:00+00:00', - 'precipitation_probability': 6, - 'temperature': 9.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-27T12:00:00+00:00', - 'precipitation_probability': 4, - 'temperature': 10.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2020-04-27T15:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 10.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2020-04-27T18:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 10.0, - 'wind_bearing': 'NW', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2020-04-27T21:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 9.0, - 'wind_bearing': 'NW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2020-04-28T00:00:00+00:00', - 'precipitation_probability': 2, - 'temperature': 8.0, - 'wind_bearing': 'NNW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2020-04-28T03:00:00+00:00', - 'precipitation_probability': 3, - 'temperature': 7.0, - 'wind_bearing': 'W', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2020-04-28T06:00:00+00:00', - 'precipitation_probability': 5, - 'temperature': 6.0, - 'wind_bearing': 'S', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-28T09:00:00+00:00', - 'precipitation_probability': 6, - 'temperature': 9.0, - 'wind_bearing': 'ENE', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-28T12:00:00+00:00', - 'precipitation_probability': 10, - 'temperature': 11.0, - 'wind_bearing': 'ENE', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-28T15:00:00+00:00', - 'precipitation_probability': 10, - 'temperature': 12.0, - 'wind_bearing': 'N', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-28T18:00:00+00:00', - 'precipitation_probability': 10, - 'temperature': 11.0, - 'wind_bearing': 'N', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-28T21:00:00+00:00', - 'precipitation_probability': 9, - 'temperature': 10.0, - 'wind_bearing': 'NNE', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-29T00:00:00+00:00', - 'precipitation_probability': 6, - 'temperature': 9.0, - 'wind_bearing': 'E', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-29T03:00:00+00:00', - 'precipitation_probability': 3, - 'temperature': 8.0, - 'wind_bearing': 'SSE', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-29T06:00:00+00:00', - 'precipitation_probability': 9, - 'temperature': 8.0, - 'wind_bearing': 'SE', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-29T09:00:00+00:00', - 'precipitation_probability': 12, - 'temperature': 10.0, - 'wind_bearing': 'SE', - 'wind_speed': 17.7, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2020-04-29T12:00:00+00:00', - 'precipitation_probability': 47, - 'temperature': 12.0, - 'wind_bearing': 'SE', - 'wind_speed': 20.92, - }), - dict({ - 'condition': 'pouring', - 'datetime': '2020-04-29T15:00:00+00:00', - 'precipitation_probability': 59, - 'temperature': 13.0, - 'wind_bearing': 'SSE', - 'wind_speed': 20.92, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2020-04-29T18:00:00+00:00', - 'precipitation_probability': 39, - 'temperature': 12.0, - 'wind_bearing': 'SSE', - 'wind_speed': 17.7, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-29T21:00:00+00:00', - 'precipitation_probability': 19, - 'temperature': 11.0, - 'wind_bearing': 'SSE', - 'wind_speed': 20.92, - }), - ]), - }) -# --- -# name: test_forecast_service[get_forecast].2 - dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T12:00:00+00:00', - 'precipitation_probability': 9, - 'temperature': 13.0, - 'wind_bearing': 'WNW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-27T12:00:00+00:00', - 'precipitation_probability': 14, - 'temperature': 11.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-28T12:00:00+00:00', - 'precipitation_probability': 10, - 'temperature': 12.0, - 'wind_bearing': 'ENE', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2020-04-29T12:00:00+00:00', - 'precipitation_probability': 59, - 'temperature': 13.0, - 'wind_bearing': 'SE', - 'wind_speed': 20.92, - }), - ]), - }) -# --- -# name: test_forecast_service[get_forecast].3 - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2020-04-25T15:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 19.0, - 'wind_bearing': 'S', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-25T18:00:00+00:00', - 'precipitation_probability': 2, - 'temperature': 17.0, - 'wind_bearing': 'WNW', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-25T21:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 14.0, - 'wind_bearing': 'NW', - 'wind_speed': 3.22, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-26T00:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 13.0, - 'wind_bearing': 'WSW', - 'wind_speed': 3.22, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-26T03:00:00+00:00', - 'precipitation_probability': 2, - 'temperature': 12.0, - 'wind_bearing': 'WNW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T06:00:00+00:00', - 'precipitation_probability': 5, - 'temperature': 11.0, - 'wind_bearing': 'NW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T09:00:00+00:00', - 'precipitation_probability': 5, - 'temperature': 12.0, - 'wind_bearing': 'WNW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T12:00:00+00:00', - 'precipitation_probability': 6, - 'temperature': 12.0, - 'wind_bearing': 'WNW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T15:00:00+00:00', - 'precipitation_probability': 5, - 'temperature': 12.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T18:00:00+00:00', - 'precipitation_probability': 9, - 'temperature': 11.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T21:00:00+00:00', - 'precipitation_probability': 9, - 'temperature': 10.0, - 'wind_bearing': 'NW', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-27T00:00:00+00:00', - 'precipitation_probability': 11, - 'temperature': 9.0, - 'wind_bearing': 'WNW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-27T03:00:00+00:00', - 'precipitation_probability': 12, - 'temperature': 8.0, - 'wind_bearing': 'WNW', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-27T06:00:00+00:00', - 'precipitation_probability': 14, - 'temperature': 8.0, - 'wind_bearing': 'NW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-27T09:00:00+00:00', - 'precipitation_probability': 6, - 'temperature': 9.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-27T12:00:00+00:00', - 'precipitation_probability': 4, - 'temperature': 10.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2020-04-27T15:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 10.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2020-04-27T18:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 10.0, - 'wind_bearing': 'NW', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2020-04-27T21:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 9.0, - 'wind_bearing': 'NW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2020-04-28T00:00:00+00:00', - 'precipitation_probability': 2, - 'temperature': 8.0, - 'wind_bearing': 'NNW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2020-04-28T03:00:00+00:00', - 'precipitation_probability': 3, - 'temperature': 7.0, - 'wind_bearing': 'W', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2020-04-28T06:00:00+00:00', - 'precipitation_probability': 5, - 'temperature': 6.0, - 'wind_bearing': 'S', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-28T09:00:00+00:00', - 'precipitation_probability': 6, - 'temperature': 9.0, - 'wind_bearing': 'ENE', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-28T12:00:00+00:00', - 'precipitation_probability': 10, - 'temperature': 11.0, - 'wind_bearing': 'ENE', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-28T15:00:00+00:00', - 'precipitation_probability': 10, - 'temperature': 12.0, - 'wind_bearing': 'N', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-28T18:00:00+00:00', - 'precipitation_probability': 10, - 'temperature': 11.0, - 'wind_bearing': 'N', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-28T21:00:00+00:00', - 'precipitation_probability': 9, - 'temperature': 10.0, - 'wind_bearing': 'NNE', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-29T00:00:00+00:00', - 'precipitation_probability': 6, - 'temperature': 9.0, - 'wind_bearing': 'E', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-29T03:00:00+00:00', - 'precipitation_probability': 3, - 'temperature': 8.0, - 'wind_bearing': 'SSE', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-29T06:00:00+00:00', - 'precipitation_probability': 9, - 'temperature': 8.0, - 'wind_bearing': 'SE', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-29T09:00:00+00:00', - 'precipitation_probability': 12, - 'temperature': 10.0, - 'wind_bearing': 'SE', - 'wind_speed': 17.7, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2020-04-29T12:00:00+00:00', - 'precipitation_probability': 47, - 'temperature': 12.0, - 'wind_bearing': 'SE', - 'wind_speed': 20.92, - }), - dict({ - 'condition': 'pouring', - 'datetime': '2020-04-29T15:00:00+00:00', - 'precipitation_probability': 59, - 'temperature': 13.0, - 'wind_bearing': 'SSE', - 'wind_speed': 20.92, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2020-04-29T18:00:00+00:00', - 'precipitation_probability': 39, - 'temperature': 12.0, - 'wind_bearing': 'SSE', - 'wind_speed': 17.7, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-29T21:00:00+00:00', - 'precipitation_probability': 19, - 'temperature': 11.0, - 'wind_bearing': 'SSE', - 'wind_speed': 20.92, - }), - ]), - }) -# --- -# name: test_forecast_service[get_forecast].4 - dict({ - 'forecast': list([ - ]), - }) -# --- # name: test_forecast_service[get_forecasts] dict({ 'weather.met_office_wavertree_daily': dict({ diff --git a/tests/components/nam/snapshots/test_sensor.ambr b/tests/components/nam/snapshots/test_sensor.ambr index 426b2ff2e03..16129c5d7ce 100644 --- a/tests/components/nam/snapshots/test_sensor.ambr +++ b/tests/components/nam/snapshots/test_sensor.ambr @@ -1,51 +1,4 @@ # serializer version: 1 -# name: test_sensor[button.nettigo_air_monitor_restart-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.nettigo_air_monitor_restart', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Restart', - 'platform': 'nam', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'aa:bb:cc:dd:ee:ff-restart', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[button.nettigo_air_monitor_restart-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'restart', - 'friendly_name': 'Nettigo Air Monitor Restart', - }), - 'context': , - 'entity_id': 'button.nettigo_air_monitor_restart', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- # name: test_sensor[sensor.nettigo_air_monitor_bme280_humidity-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/nextcloud/snapshots/test_config_flow.ambr b/tests/components/nextcloud/snapshots/test_config_flow.ambr index 3334478ba24..06c4ce216db 100644 --- a/tests/components/nextcloud/snapshots/test_config_flow.ambr +++ b/tests/components/nextcloud/snapshots/test_config_flow.ambr @@ -1,12 +1,4 @@ # serializer version: 1 -# name: test_import - dict({ - 'password': 'nc_pass', - 'url': 'nc_url', - 'username': 'nc_user', - 'verify_ssl': True, - }) -# --- # name: test_reauth dict({ 'password': 'other_password', diff --git a/tests/components/nextdns/snapshots/test_binary_sensor.ambr b/tests/components/nextdns/snapshots/test_binary_sensor.ambr index bd4ecbba084..814b4c1ac16 100644 --- a/tests/components/nextdns/snapshots/test_binary_sensor.ambr +++ b/tests/components/nextdns/snapshots/test_binary_sensor.ambr @@ -1,1095 +1,4 @@ # serializer version: 1 -# name: test_binary_Sensor[switch.fake_profile_ai_driven_threat_detection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'AI-Driven threat detection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'ai_threat_detection', - 'unique_id': 'xyz12_ai_threat_detection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_ai_driven_threat_detection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile AI-Driven threat detection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_allow_affiliate_tracking_links-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Allow affiliate & tracking links', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'allow_affiliate', - 'unique_id': 'xyz12_allow_affiliate', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_allow_affiliate_tracking_links-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Allow affiliate & tracking links', - }), - 'context': , - 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_anonymized_edns_client_subnet-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Anonymized EDNS client subnet', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'anonymized_ecs', - 'unique_id': 'xyz12_anonymized_ecs', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_anonymized_edns_client_subnet-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Anonymized EDNS client subnet', - }), - 'context': , - 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_9gag-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_9gag', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block 9GAG', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_9gag', - 'unique_id': 'xyz12_block_9gag', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_bypass_methods-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_bypass_methods', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block bypass methods', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_bypass_methods', - 'unique_id': 'xyz12_block_bypass_methods', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_bypass_methods-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block bypass methods', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_bypass_methods', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_child_sexual_abuse_material-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block child sexual abuse material', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_csam', - 'unique_id': 'xyz12_block_csam', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_child_sexual_abuse_material-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block child sexual abuse material', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_disguised_third_party_trackers-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block disguised third-party trackers', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_disguised_trackers', - 'unique_id': 'xyz12_block_disguised_trackers', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_disguised_third_party_trackers-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block disguised third-party trackers', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_dynamic_dns_hostnames-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block dynamic DNS hostnames', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_ddns', - 'unique_id': 'xyz12_block_ddns', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_dynamic_dns_hostnames-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block dynamic DNS hostnames', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_newly_registered_domains-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_newly_registered_domains', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block newly registered domains', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_nrd', - 'unique_id': 'xyz12_block_nrd', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_newly_registered_domains-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block newly registered domains', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_newly_registered_domains', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_page-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_page', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block page', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_page', - 'unique_id': 'xyz12_block_page', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_page-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block page', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_page', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_parked_domains-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_parked_domains', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block parked domains', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_parked_domains', - 'unique_id': 'xyz12_block_parked_domains', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_parked_domains-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block parked domains', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_parked_domains', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_cache_boost-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_cache_boost', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cache boost', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cache_boost', - 'unique_id': 'xyz12_cache_boost', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_cache_boost-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Cache boost', - }), - 'context': , - 'entity_id': 'switch.fake_profile_cache_boost', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_cname_flattening-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_cname_flattening', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CNAME flattening', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cname_flattening', - 'unique_id': 'xyz12_cname_flattening', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_cname_flattening-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile CNAME flattening', - }), - 'context': , - 'entity_id': 'switch.fake_profile_cname_flattening', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_cryptojacking_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_cryptojacking_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cryptojacking protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cryptojacking_protection', - 'unique_id': 'xyz12_cryptojacking_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_cryptojacking_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Cryptojacking protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_cryptojacking_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_dns_rebinding_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_dns_rebinding_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS rebinding protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dns_rebinding_protection', - 'unique_id': 'xyz12_dns_rebinding_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_dns_rebinding_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS rebinding protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_dns_rebinding_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_domain_generation_algorithms_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Domain generation algorithms protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dga_protection', - 'unique_id': 'xyz12_dga_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_domain_generation_algorithms_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Domain generation algorithms protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_force_safesearch-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_force_safesearch', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Force SafeSearch', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'safesearch', - 'unique_id': 'xyz12_safesearch', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_force_safesearch-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Force SafeSearch', - }), - 'context': , - 'entity_id': 'switch.fake_profile_force_safesearch', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_force_youtube_restricted_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Force YouTube restricted mode', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'youtube_restricted_mode', - 'unique_id': 'xyz12_youtube_restricted_mode', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_force_youtube_restricted_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Force YouTube restricted mode', - }), - 'context': , - 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_google_safe_browsing-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_google_safe_browsing', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Google safe browsing', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'google_safe_browsing', - 'unique_id': 'xyz12_google_safe_browsing', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_google_safe_browsing-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Google safe browsing', - }), - 'context': , - 'entity_id': 'switch.fake_profile_google_safe_browsing', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_idn_homograph_attacks_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'IDN homograph attacks protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'idn_homograph_attacks_protection', - 'unique_id': 'xyz12_idn_homograph_attacks_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_idn_homograph_attacks_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile IDN homograph attacks protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_logs-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_logs', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Logs', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'logs', - 'unique_id': 'xyz12_logs', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_logs-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Logs', - }), - 'context': , - 'entity_id': 'switch.fake_profile_logs', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_threat_intelligence_feeds-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Threat intelligence feeds', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'threat_intelligence_feeds', - 'unique_id': 'xyz12_threat_intelligence_feeds', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_threat_intelligence_feeds-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Threat intelligence feeds', - }), - 'context': , - 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_typosquatting_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_typosquatting_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Typosquatting protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'typosquatting_protection', - 'unique_id': 'xyz12_typosquatting_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_typosquatting_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Typosquatting protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_typosquatting_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_web3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_web3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Web3', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'web3', - 'unique_id': 'xyz12_web3', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_web3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Web3', - }), - 'context': , - 'entity_id': 'switch.fake_profile_web3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_binary_sensor[binary_sensor.fake_profile_device_connection_status-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1184,1094 +93,3 @@ 'state': 'off', }) # --- -# name: test_binary_sensor[switch.fake_profile_ai_driven_threat_detection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'AI-Driven threat detection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'ai_threat_detection', - 'unique_id': 'xyz12_ai_threat_detection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_ai_driven_threat_detection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile AI-Driven threat detection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_allow_affiliate_tracking_links-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Allow affiliate & tracking links', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'allow_affiliate', - 'unique_id': 'xyz12_allow_affiliate', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_allow_affiliate_tracking_links-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Allow affiliate & tracking links', - }), - 'context': , - 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_anonymized_edns_client_subnet-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Anonymized EDNS client subnet', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'anonymized_ecs', - 'unique_id': 'xyz12_anonymized_ecs', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_anonymized_edns_client_subnet-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Anonymized EDNS client subnet', - }), - 'context': , - 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_9gag-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_9gag', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block 9GAG', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_9gag', - 'unique_id': 'xyz12_block_9gag', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_bypass_methods-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_bypass_methods', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block bypass methods', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_bypass_methods', - 'unique_id': 'xyz12_block_bypass_methods', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_bypass_methods-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block bypass methods', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_bypass_methods', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_child_sexual_abuse_material-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block child sexual abuse material', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_csam', - 'unique_id': 'xyz12_block_csam', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_child_sexual_abuse_material-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block child sexual abuse material', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_disguised_third_party_trackers-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block disguised third-party trackers', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_disguised_trackers', - 'unique_id': 'xyz12_block_disguised_trackers', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_disguised_third_party_trackers-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block disguised third-party trackers', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_dynamic_dns_hostnames-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block dynamic DNS hostnames', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_ddns', - 'unique_id': 'xyz12_block_ddns', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_dynamic_dns_hostnames-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block dynamic DNS hostnames', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_newly_registered_domains-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_newly_registered_domains', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block newly registered domains', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_nrd', - 'unique_id': 'xyz12_block_nrd', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_newly_registered_domains-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block newly registered domains', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_newly_registered_domains', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_page-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_page', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block page', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_page', - 'unique_id': 'xyz12_block_page', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_page-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block page', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_page', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_parked_domains-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_parked_domains', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block parked domains', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_parked_domains', - 'unique_id': 'xyz12_block_parked_domains', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_parked_domains-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block parked domains', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_parked_domains', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_cache_boost-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_cache_boost', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cache boost', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cache_boost', - 'unique_id': 'xyz12_cache_boost', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_cache_boost-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Cache boost', - }), - 'context': , - 'entity_id': 'switch.fake_profile_cache_boost', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_cname_flattening-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_cname_flattening', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CNAME flattening', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cname_flattening', - 'unique_id': 'xyz12_cname_flattening', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_cname_flattening-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile CNAME flattening', - }), - 'context': , - 'entity_id': 'switch.fake_profile_cname_flattening', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_cryptojacking_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_cryptojacking_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cryptojacking protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cryptojacking_protection', - 'unique_id': 'xyz12_cryptojacking_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_cryptojacking_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Cryptojacking protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_cryptojacking_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_dns_rebinding_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_dns_rebinding_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS rebinding protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dns_rebinding_protection', - 'unique_id': 'xyz12_dns_rebinding_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_dns_rebinding_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS rebinding protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_dns_rebinding_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_domain_generation_algorithms_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Domain generation algorithms protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dga_protection', - 'unique_id': 'xyz12_dga_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_domain_generation_algorithms_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Domain generation algorithms protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_force_safesearch-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_force_safesearch', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Force SafeSearch', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'safesearch', - 'unique_id': 'xyz12_safesearch', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_force_safesearch-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Force SafeSearch', - }), - 'context': , - 'entity_id': 'switch.fake_profile_force_safesearch', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_force_youtube_restricted_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Force YouTube restricted mode', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'youtube_restricted_mode', - 'unique_id': 'xyz12_youtube_restricted_mode', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_force_youtube_restricted_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Force YouTube restricted mode', - }), - 'context': , - 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_google_safe_browsing-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_google_safe_browsing', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Google safe browsing', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'google_safe_browsing', - 'unique_id': 'xyz12_google_safe_browsing', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_google_safe_browsing-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Google safe browsing', - }), - 'context': , - 'entity_id': 'switch.fake_profile_google_safe_browsing', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_idn_homograph_attacks_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'IDN homograph attacks protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'idn_homograph_attacks_protection', - 'unique_id': 'xyz12_idn_homograph_attacks_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_idn_homograph_attacks_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile IDN homograph attacks protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_logs-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_logs', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Logs', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'logs', - 'unique_id': 'xyz12_logs', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_logs-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Logs', - }), - 'context': , - 'entity_id': 'switch.fake_profile_logs', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_threat_intelligence_feeds-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Threat intelligence feeds', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'threat_intelligence_feeds', - 'unique_id': 'xyz12_threat_intelligence_feeds', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_threat_intelligence_feeds-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Threat intelligence feeds', - }), - 'context': , - 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_typosquatting_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_typosquatting_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Typosquatting protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'typosquatting_protection', - 'unique_id': 'xyz12_typosquatting_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_typosquatting_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Typosquatting protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_typosquatting_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_web3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_web3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Web3', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'web3', - 'unique_id': 'xyz12_web3', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_web3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Web3', - }), - 'context': , - 'entity_id': 'switch.fake_profile_web3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/nextdns/snapshots/test_sensor.ambr b/tests/components/nextdns/snapshots/test_sensor.ambr index 34b40433e3b..14bebea53f8 100644 --- a/tests/components/nextdns/snapshots/test_sensor.ambr +++ b/tests/components/nextdns/snapshots/test_sensor.ambr @@ -1,144 +1,4 @@ # serializer version: 1 -# name: test_sensor[binary_sensor.fake_profile_device_connection_status-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.fake_profile_device_connection_status', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Device connection status', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'device_connection_status', - 'unique_id': 'xyz12_this_device_nextdns_connection_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[binary_sensor.fake_profile_device_connection_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': 'Fake Profile Device connection status', - }), - 'context': , - 'entity_id': 'binary_sensor.fake_profile_device_connection_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[binary_sensor.fake_profile_device_profile_connection_status-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.fake_profile_device_profile_connection_status', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Device profile connection status', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'device_profile_connection_status', - 'unique_id': 'xyz12_this_device_profile_connection_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[binary_sensor.fake_profile_device_profile_connection_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': 'Fake Profile Device profile connection status', - }), - 'context': , - 'entity_id': 'binary_sensor.fake_profile_device_profile_connection_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_sensor[button.fake_profile_clear_logs-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.fake_profile_clear_logs', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Clear logs', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'clear_logs', - 'unique_id': 'xyz12_clear_logs', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[button.fake_profile_clear_logs-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Clear logs', - }), - 'context': , - 'entity_id': 'button.fake_profile_clear_logs', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- # name: test_sensor[sensor.fake_profile_dns_over_http_3_queries-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1389,3361 +1249,3 @@ 'state': '40', }) # --- -# name: test_sensor[switch.fake_profile_ai_driven_threat_detection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'AI-Driven threat detection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'ai_threat_detection', - 'unique_id': 'xyz12_ai_threat_detection', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_ai_driven_threat_detection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile AI-Driven threat detection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_allow_affiliate_tracking_links-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Allow affiliate & tracking links', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'allow_affiliate', - 'unique_id': 'xyz12_allow_affiliate', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_allow_affiliate_tracking_links-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Allow affiliate & tracking links', - }), - 'context': , - 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_anonymized_edns_client_subnet-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Anonymized EDNS client subnet', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'anonymized_ecs', - 'unique_id': 'xyz12_anonymized_ecs', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_anonymized_edns_client_subnet-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Anonymized EDNS client subnet', - }), - 'context': , - 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_9gag-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_9gag', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block 9GAG', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_9gag', - 'unique_id': 'xyz12_block_9gag', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_9gag-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block 9GAG', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_9gag', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_amazon-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_amazon', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Amazon', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_amazon', - 'unique_id': 'xyz12_block_amazon', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_amazon-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Amazon', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_amazon', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_bereal-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_bereal', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block BeReal', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_bereal', - 'unique_id': 'xyz12_block_bereal', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_bereal-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block BeReal', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_bereal', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_blizzard-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_blizzard', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Blizzard', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_blizzard', - 'unique_id': 'xyz12_block_blizzard', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_blizzard-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Blizzard', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_blizzard', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_bypass_methods-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_bypass_methods', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block bypass methods', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_bypass_methods', - 'unique_id': 'xyz12_block_bypass_methods', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_bypass_methods-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block bypass methods', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_bypass_methods', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_chatgpt-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_chatgpt', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block ChatGPT', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_chatgpt', - 'unique_id': 'xyz12_block_chatgpt', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_chatgpt-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block ChatGPT', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_chatgpt', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_child_sexual_abuse_material-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block child sexual abuse material', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_csam', - 'unique_id': 'xyz12_block_csam', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_child_sexual_abuse_material-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block child sexual abuse material', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_dailymotion-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_dailymotion', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Dailymotion', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_dailymotion', - 'unique_id': 'xyz12_block_dailymotion', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_dailymotion-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Dailymotion', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_dailymotion', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_dating-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_dating', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block dating', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_dating', - 'unique_id': 'xyz12_block_dating', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_dating-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block dating', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_dating', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_discord-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_discord', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Discord', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_discord', - 'unique_id': 'xyz12_block_discord', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_discord-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Discord', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_discord', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_disguised_third_party_trackers-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block disguised third-party trackers', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_disguised_trackers', - 'unique_id': 'xyz12_block_disguised_trackers', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_disguised_third_party_trackers-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block disguised third-party trackers', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_disney_plus-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_disney_plus', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Disney Plus', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_disneyplus', - 'unique_id': 'xyz12_block_disneyplus', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_disney_plus-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Disney Plus', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_disney_plus', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_dynamic_dns_hostnames-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block dynamic DNS hostnames', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_ddns', - 'unique_id': 'xyz12_block_ddns', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_dynamic_dns_hostnames-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block dynamic DNS hostnames', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_ebay-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_ebay', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block eBay', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_ebay', - 'unique_id': 'xyz12_block_ebay', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_ebay-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block eBay', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_ebay', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_facebook-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_facebook', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Facebook', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_facebook', - 'unique_id': 'xyz12_block_facebook', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_facebook-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Facebook', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_facebook', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_fortnite-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_fortnite', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Fortnite', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_fortnite', - 'unique_id': 'xyz12_block_fortnite', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_fortnite-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Fortnite', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_fortnite', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_gambling-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_gambling', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block gambling', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_gambling', - 'unique_id': 'xyz12_block_gambling', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_gambling-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block gambling', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_gambling', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_google_chat-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_google_chat', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Google Chat', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_google_chat', - 'unique_id': 'xyz12_block_google_chat', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_google_chat-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Google Chat', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_google_chat', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_hbo_max-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_hbo_max', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block HBO Max', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_hbomax', - 'unique_id': 'xyz12_block_hbomax', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_hbo_max-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block HBO Max', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_hbo_max', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_hulu-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_hulu', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Hulu', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'xyz12_block_hulu', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_hulu-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Hulu', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_hulu', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_imgur-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_imgur', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Imgur', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_imgur', - 'unique_id': 'xyz12_block_imgur', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_imgur-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Imgur', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_imgur', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_instagram-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_instagram', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Instagram', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_instagram', - 'unique_id': 'xyz12_block_instagram', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_instagram-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Instagram', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_instagram', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_league_of_legends-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_league_of_legends', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block League of Legends', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_leagueoflegends', - 'unique_id': 'xyz12_block_leagueoflegends', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_league_of_legends-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block League of Legends', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_league_of_legends', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_mastodon-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_mastodon', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Mastodon', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_mastodon', - 'unique_id': 'xyz12_block_mastodon', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_mastodon-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Mastodon', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_mastodon', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_messenger-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_messenger', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Messenger', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_messenger', - 'unique_id': 'xyz12_block_messenger', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_messenger-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Messenger', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_messenger', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_minecraft-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_minecraft', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Minecraft', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_minecraft', - 'unique_id': 'xyz12_block_minecraft', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_minecraft-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Minecraft', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_minecraft', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_netflix-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_netflix', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Netflix', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_netflix', - 'unique_id': 'xyz12_block_netflix', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_netflix-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Netflix', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_netflix', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_newly_registered_domains-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_newly_registered_domains', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block newly registered domains', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_nrd', - 'unique_id': 'xyz12_block_nrd', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_newly_registered_domains-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block newly registered domains', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_newly_registered_domains', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_online_gaming-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_online_gaming', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block online gaming', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_online_gaming', - 'unique_id': 'xyz12_block_online_gaming', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_online_gaming-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block online gaming', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_online_gaming', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_page-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_page', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block page', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_page', - 'unique_id': 'xyz12_block_page', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_page-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block page', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_page', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_sensor[switch.fake_profile_block_parked_domains-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_parked_domains', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block parked domains', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_parked_domains', - 'unique_id': 'xyz12_block_parked_domains', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_parked_domains-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block parked domains', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_parked_domains', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_pinterest-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_pinterest', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Pinterest', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_pinterest', - 'unique_id': 'xyz12_block_pinterest', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_pinterest-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Pinterest', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_pinterest', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_piracy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_piracy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block piracy', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_piracy', - 'unique_id': 'xyz12_block_piracy', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_piracy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block piracy', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_piracy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_playstation_network-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_playstation_network', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block PlayStation Network', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_playstation_network', - 'unique_id': 'xyz12_block_playstation_network', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_playstation_network-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block PlayStation Network', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_playstation_network', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_porn-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_porn', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block porn', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_porn', - 'unique_id': 'xyz12_block_porn', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_porn-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block porn', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_porn', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_prime_video-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_prime_video', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Prime Video', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_primevideo', - 'unique_id': 'xyz12_block_primevideo', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_prime_video-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Prime Video', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_prime_video', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_reddit-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_reddit', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Reddit', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_reddit', - 'unique_id': 'xyz12_block_reddit', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_reddit-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Reddit', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_reddit', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_roblox-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_roblox', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Roblox', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_roblox', - 'unique_id': 'xyz12_block_roblox', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_roblox-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Roblox', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_roblox', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_signal-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_signal', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Signal', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_signal', - 'unique_id': 'xyz12_block_signal', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_signal-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Signal', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_signal', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_skype-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_skype', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Skype', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_skype', - 'unique_id': 'xyz12_block_skype', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_skype-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Skype', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_skype', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_snapchat-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_snapchat', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Snapchat', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_snapchat', - 'unique_id': 'xyz12_block_snapchat', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_snapchat-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Snapchat', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_snapchat', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_social_networks-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_social_networks', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block social networks', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_social_networks', - 'unique_id': 'xyz12_block_social_networks', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_social_networks-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block social networks', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_social_networks', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_spotify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_spotify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Spotify', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_spotify', - 'unique_id': 'xyz12_block_spotify', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_spotify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Spotify', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_spotify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_steam-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_steam', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Steam', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_steam', - 'unique_id': 'xyz12_block_steam', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_steam-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Steam', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_steam', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_telegram-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_telegram', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Telegram', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_telegram', - 'unique_id': 'xyz12_block_telegram', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_telegram-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Telegram', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_telegram', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_tiktok-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_tiktok', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block TikTok', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_tiktok', - 'unique_id': 'xyz12_block_tiktok', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_tiktok-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block TikTok', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_tiktok', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_tinder-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_tinder', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Tinder', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_tinder', - 'unique_id': 'xyz12_block_tinder', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_tinder-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Tinder', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_tinder', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_tumblr-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_tumblr', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Tumblr', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_tumblr', - 'unique_id': 'xyz12_block_tumblr', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_tumblr-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Tumblr', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_tumblr', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_twitch-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_twitch', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Twitch', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_twitch', - 'unique_id': 'xyz12_block_twitch', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_twitch-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Twitch', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_twitch', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_video_streaming-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_video_streaming', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block video streaming', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_video_streaming', - 'unique_id': 'xyz12_block_video_streaming', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_video_streaming-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block video streaming', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_video_streaming', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_vimeo-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_vimeo', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Vimeo', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_vimeo', - 'unique_id': 'xyz12_block_vimeo', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_vimeo-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Vimeo', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_vimeo', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_vk-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_vk', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block VK', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_vk', - 'unique_id': 'xyz12_block_vk', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_vk-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block VK', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_vk', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_whatsapp-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_whatsapp', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block WhatsApp', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_whatsapp', - 'unique_id': 'xyz12_block_whatsapp', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_whatsapp-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block WhatsApp', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_whatsapp', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_x_formerly_twitter-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_x_formerly_twitter', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block X (formerly Twitter)', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_twitter', - 'unique_id': 'xyz12_block_twitter', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_x_formerly_twitter-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block X (formerly Twitter)', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_x_formerly_twitter', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_xbox_live-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_xbox_live', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Xbox Live', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_xboxlive', - 'unique_id': 'xyz12_block_xboxlive', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_xbox_live-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Xbox Live', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_xbox_live', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_youtube-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_youtube', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block YouTube', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_youtube', - 'unique_id': 'xyz12_block_youtube', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_youtube-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block YouTube', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_youtube', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_zoom-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_zoom', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Zoom', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_zoom', - 'unique_id': 'xyz12_block_zoom', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_zoom-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Zoom', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_zoom', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_cache_boost-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_cache_boost', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cache boost', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cache_boost', - 'unique_id': 'xyz12_cache_boost', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_cache_boost-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Cache boost', - }), - 'context': , - 'entity_id': 'switch.fake_profile_cache_boost', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_cname_flattening-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_cname_flattening', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CNAME flattening', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cname_flattening', - 'unique_id': 'xyz12_cname_flattening', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_cname_flattening-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile CNAME flattening', - }), - 'context': , - 'entity_id': 'switch.fake_profile_cname_flattening', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_cryptojacking_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_cryptojacking_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cryptojacking protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cryptojacking_protection', - 'unique_id': 'xyz12_cryptojacking_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_cryptojacking_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Cryptojacking protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_cryptojacking_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_dns_rebinding_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_dns_rebinding_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS rebinding protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dns_rebinding_protection', - 'unique_id': 'xyz12_dns_rebinding_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_dns_rebinding_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS rebinding protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_dns_rebinding_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_domain_generation_algorithms_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Domain generation algorithms protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dga_protection', - 'unique_id': 'xyz12_dga_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_domain_generation_algorithms_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Domain generation algorithms protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_force_safesearch-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_force_safesearch', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Force SafeSearch', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'safesearch', - 'unique_id': 'xyz12_safesearch', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_force_safesearch-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Force SafeSearch', - }), - 'context': , - 'entity_id': 'switch.fake_profile_force_safesearch', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_sensor[switch.fake_profile_force_youtube_restricted_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Force YouTube restricted mode', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'youtube_restricted_mode', - 'unique_id': 'xyz12_youtube_restricted_mode', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_force_youtube_restricted_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Force YouTube restricted mode', - }), - 'context': , - 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_sensor[switch.fake_profile_google_safe_browsing-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_google_safe_browsing', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Google safe browsing', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'google_safe_browsing', - 'unique_id': 'xyz12_google_safe_browsing', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_google_safe_browsing-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Google safe browsing', - }), - 'context': , - 'entity_id': 'switch.fake_profile_google_safe_browsing', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_sensor[switch.fake_profile_idn_homograph_attacks_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'IDN homograph attacks protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'idn_homograph_attacks_protection', - 'unique_id': 'xyz12_idn_homograph_attacks_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_idn_homograph_attacks_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile IDN homograph attacks protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_logs-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_logs', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Logs', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'logs', - 'unique_id': 'xyz12_logs', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_logs-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Logs', - }), - 'context': , - 'entity_id': 'switch.fake_profile_logs', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_threat_intelligence_feeds-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Threat intelligence feeds', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'threat_intelligence_feeds', - 'unique_id': 'xyz12_threat_intelligence_feeds', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_threat_intelligence_feeds-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Threat intelligence feeds', - }), - 'context': , - 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_typosquatting_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_typosquatting_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Typosquatting protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'typosquatting_protection', - 'unique_id': 'xyz12_typosquatting_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_typosquatting_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Typosquatting protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_typosquatting_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_web3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_web3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Web3', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'web3', - 'unique_id': 'xyz12_web3', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_web3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Web3', - }), - 'context': , - 'entity_id': 'switch.fake_profile_web3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/nextdns/snapshots/test_switch.ambr b/tests/components/nextdns/snapshots/test_switch.ambr index 8472f02e8c5..3328e341a2e 100644 --- a/tests/components/nextdns/snapshots/test_switch.ambr +++ b/tests/components/nextdns/snapshots/test_switch.ambr @@ -1,1394 +1,4 @@ # serializer version: 1 -# name: test_switch[binary_sensor.fake_profile_device_connection_status-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.fake_profile_device_connection_status', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Device connection status', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'device_connection_status', - 'unique_id': 'xyz12_this_device_nextdns_connection_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[binary_sensor.fake_profile_device_connection_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': 'Fake Profile Device connection status', - }), - 'context': , - 'entity_id': 'binary_sensor.fake_profile_device_connection_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_switch[binary_sensor.fake_profile_device_profile_connection_status-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.fake_profile_device_profile_connection_status', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Device profile connection status', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'device_profile_connection_status', - 'unique_id': 'xyz12_this_device_profile_connection_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[binary_sensor.fake_profile_device_profile_connection_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': 'Fake Profile Device profile connection status', - }), - 'context': , - 'entity_id': 'binary_sensor.fake_profile_device_profile_connection_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_switch[button.fake_profile_clear_logs-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.fake_profile_clear_logs', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Clear logs', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'clear_logs', - 'unique_id': 'xyz12_clear_logs', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[button.fake_profile_clear_logs-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Clear logs', - }), - 'context': , - 'entity_id': 'button.fake_profile_clear_logs', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_http_3_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_over_http_3_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS-over-HTTP/3 queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'doh3_queries', - 'unique_id': 'xyz12_doh3_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_http_3_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS-over-HTTP/3 queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_over_http_3_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_http_3_queries_ratio-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_over_http_3_queries_ratio', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS-over-HTTP/3 queries ratio', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'doh3_queries_ratio', - 'unique_id': 'xyz12_doh3_queries_ratio', - 'unit_of_measurement': '%', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_http_3_queries_ratio-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS-over-HTTP/3 queries ratio', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_over_http_3_queries_ratio', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '13.0', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_https_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_over_https_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS-over-HTTPS queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'doh_queries', - 'unique_id': 'xyz12_doh_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_https_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS-over-HTTPS queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_over_https_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '20', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_https_queries_ratio-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_over_https_queries_ratio', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS-over-HTTPS queries ratio', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'doh_queries_ratio', - 'unique_id': 'xyz12_doh_queries_ratio', - 'unit_of_measurement': '%', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_https_queries_ratio-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS-over-HTTPS queries ratio', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_over_https_queries_ratio', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '17.4', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_quic_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_over_quic_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS-over-QUIC queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'doq_queries', - 'unique_id': 'xyz12_doq_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_quic_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS-over-QUIC queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_over_quic_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_quic_queries_ratio-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_over_quic_queries_ratio', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS-over-QUIC queries ratio', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'doq_queries_ratio', - 'unique_id': 'xyz12_doq_queries_ratio', - 'unit_of_measurement': '%', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_quic_queries_ratio-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS-over-QUIC queries ratio', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_over_quic_queries_ratio', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '8.7', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_tls_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_over_tls_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS-over-TLS queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dot_queries', - 'unique_id': 'xyz12_dot_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_tls_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS-over-TLS queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_over_tls_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '30', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_tls_queries_ratio-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_over_tls_queries_ratio', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS-over-TLS queries ratio', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dot_queries_ratio', - 'unique_id': 'xyz12_dot_queries_ratio', - 'unit_of_measurement': '%', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_tls_queries_ratio-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS-over-TLS queries ratio', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_over_tls_queries_ratio', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '26.1', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'all_queries', - 'unique_id': 'xyz12_all_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_queries_blocked-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_queries_blocked', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS queries blocked', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'blocked_queries', - 'unique_id': 'xyz12_blocked_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_queries_blocked-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS queries blocked', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_queries_blocked', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '20', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_queries_blocked_ratio-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_queries_blocked_ratio', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS queries blocked ratio', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'blocked_queries_ratio', - 'unique_id': 'xyz12_blocked_queries_ratio', - 'unit_of_measurement': '%', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_queries_blocked_ratio-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS queries blocked ratio', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_queries_blocked_ratio', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '20.0', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_queries_relayed-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_queries_relayed', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS queries relayed', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'relayed_queries', - 'unique_id': 'xyz12_relayed_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_queries_relayed-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS queries relayed', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_queries_relayed', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10', - }) -# --- -# name: test_switch[sensor.fake_profile_dnssec_not_validated_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dnssec_not_validated_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNSSEC not validated queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'not_validated_queries', - 'unique_id': 'xyz12_not_validated_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_dnssec_not_validated_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNSSEC not validated queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dnssec_not_validated_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '25', - }) -# --- -# name: test_switch[sensor.fake_profile_dnssec_validated_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dnssec_validated_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNSSEC validated queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'validated_queries', - 'unique_id': 'xyz12_validated_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_dnssec_validated_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNSSEC validated queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dnssec_validated_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '75', - }) -# --- -# name: test_switch[sensor.fake_profile_dnssec_validated_queries_ratio-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dnssec_validated_queries_ratio', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNSSEC validated queries ratio', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'validated_queries_ratio', - 'unique_id': 'xyz12_validated_queries_ratio', - 'unit_of_measurement': '%', - }) -# --- -# name: test_switch[sensor.fake_profile_dnssec_validated_queries_ratio-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNSSEC validated queries ratio', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dnssec_validated_queries_ratio', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '75.0', - }) -# --- -# name: test_switch[sensor.fake_profile_encrypted_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_encrypted_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Encrypted queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'encrypted_queries', - 'unique_id': 'xyz12_encrypted_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_encrypted_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Encrypted queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_encrypted_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '60', - }) -# --- -# name: test_switch[sensor.fake_profile_encrypted_queries_ratio-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_encrypted_queries_ratio', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Encrypted queries ratio', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'encrypted_queries_ratio', - 'unique_id': 'xyz12_encrypted_queries_ratio', - 'unit_of_measurement': '%', - }) -# --- -# name: test_switch[sensor.fake_profile_encrypted_queries_ratio-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Encrypted queries ratio', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_encrypted_queries_ratio', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '60.0', - }) -# --- -# name: test_switch[sensor.fake_profile_ipv4_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_ipv4_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'IPv4 queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'ipv4_queries', - 'unique_id': 'xyz12_ipv4_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_ipv4_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile IPv4 queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_ipv4_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '90', - }) -# --- -# name: test_switch[sensor.fake_profile_ipv6_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_ipv6_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'IPv6 queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'ipv6_queries', - 'unique_id': 'xyz12_ipv6_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_ipv6_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile IPv6 queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_ipv6_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10', - }) -# --- -# name: test_switch[sensor.fake_profile_ipv6_queries_ratio-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_ipv6_queries_ratio', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'IPv6 queries ratio', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'ipv6_queries_ratio', - 'unique_id': 'xyz12_ipv6_queries_ratio', - 'unit_of_measurement': '%', - }) -# --- -# name: test_switch[sensor.fake_profile_ipv6_queries_ratio-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile IPv6 queries ratio', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_ipv6_queries_ratio', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10.0', - }) -# --- -# name: test_switch[sensor.fake_profile_tcp_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_tcp_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'TCP queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'tcp_queries', - 'unique_id': 'xyz12_tcp_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_tcp_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile TCP queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_tcp_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_switch[sensor.fake_profile_tcp_queries_ratio-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_tcp_queries_ratio', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'TCP queries ratio', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'tcp_queries_ratio', - 'unique_id': 'xyz12_tcp_queries_ratio', - 'unit_of_measurement': '%', - }) -# --- -# name: test_switch[sensor.fake_profile_tcp_queries_ratio-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile TCP queries ratio', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_tcp_queries_ratio', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_switch[sensor.fake_profile_udp_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_udp_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'UDP queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'udp_queries', - 'unique_id': 'xyz12_udp_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_udp_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile UDP queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_udp_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '40', - }) -# --- -# name: test_switch[sensor.fake_profile_udp_queries_ratio-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_udp_queries_ratio', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'UDP queries ratio', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'udp_queries_ratio', - 'unique_id': 'xyz12_udp_queries_ratio', - 'unit_of_measurement': '%', - }) -# --- -# name: test_switch[sensor.fake_profile_udp_queries_ratio-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile UDP queries ratio', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_udp_queries_ratio', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '34.8', - }) -# --- -# name: test_switch[sensor.fake_profile_unencrypted_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_unencrypted_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Unencrypted queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'unencrypted_queries', - 'unique_id': 'xyz12_unencrypted_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_unencrypted_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Unencrypted queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_unencrypted_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '40', - }) -# --- # name: test_switch[switch.fake_profile_ai_driven_threat_detection-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/nibe_heatpump/snapshots/test_climate.ambr b/tests/components/nibe_heatpump/snapshots/test_climate.ambr index fb3e2d1003b..2db9a813bff 100644 --- a/tests/components/nibe_heatpump/snapshots/test_climate.ambr +++ b/tests/components/nibe_heatpump/snapshots/test_climate.ambr @@ -97,12 +97,6 @@ 'state': 'unavailable', }) # --- -# name: test_active_accessory[Model.S320-s2-climate.climate_system_21][initial] - None -# --- -# name: test_active_accessory[Model.S320-s2-climate.climate_system_s1][initial] - None -# --- # name: test_basic[Model.F1155-s2-climate.climate_system_s2][cooling] StateSnapshot({ 'attributes': ReadOnlyDict({ diff --git a/tests/components/ping/snapshots/test_binary_sensor.ambr b/tests/components/ping/snapshots/test_binary_sensor.ambr index 98ea9a8a847..24717938874 100644 --- a/tests/components/ping/snapshots/test_binary_sensor.ambr +++ b/tests/components/ping/snapshots/test_binary_sensor.ambr @@ -1,64 +1,4 @@ # serializer version: 1 -# name: test_sensor - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.10_10_10_10', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': '10.10.10.10', - 'platform': 'ping', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor.1 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': '10.10.10.10', - 'round_trip_time_avg': 4.333, - 'round_trip_time_max': 10, - 'round_trip_time_mdev': '', - 'round_trip_time_min': 1, - }), - 'context': , - 'entity_id': 'binary_sensor.10_10_10_10', - 'last_changed': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor.2 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': '10.10.10.10', - }), - 'context': , - 'entity_id': 'binary_sensor.10_10_10_10', - 'last_changed': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_setup_and_update EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/pyload/snapshots/test_switch.ambr b/tests/components/pyload/snapshots/test_switch.ambr index b6465341b0a..0fcc45f8586 100644 --- a/tests/components/pyload/snapshots/test_switch.ambr +++ b/tests/components/pyload/snapshots/test_switch.ambr @@ -93,50 +93,3 @@ 'state': 'on', }) # --- -# name: test_state[switch.pyload_reconnect-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.pyload_reconnect', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Reconnect', - 'platform': 'pyload', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': 'XXXXXXXXXXXXXX_reconnect', - 'unit_of_measurement': None, - }) -# --- -# name: test_state[switch.pyload_reconnect-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'pyLoad Reconnect', - }), - 'context': , - 'entity_id': 'switch.pyload_reconnect', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/samsungtv/snapshots/test_init.ambr b/tests/components/samsungtv/snapshots/test_init.ambr index 42a3f4fb396..061b5bc1836 100644 --- a/tests/components/samsungtv/snapshots/test_init.ambr +++ b/tests/components/samsungtv/snapshots/test_init.ambr @@ -30,8 +30,10 @@ }), 'manufacturer': None, 'model': '82GXARRS', + 'model_id': None, 'name': 'fake', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -50,6 +52,10 @@ 'mac', 'aa:bb:cc:dd:ee:ff', ), + tuple( + 'mac', + 'none', + ), }), 'disabled_by': None, 'entry_type': None, @@ -66,8 +72,10 @@ }), 'manufacturer': None, 'model': '82GXARRS', + 'model_id': None, 'name': 'fake', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, diff --git a/tests/components/smhi/snapshots/test_weather.ambr b/tests/components/smhi/snapshots/test_weather.ambr index d825e22d470..9ab0375df83 100644 --- a/tests/components/smhi/snapshots/test_weather.ambr +++ b/tests/components/smhi/snapshots/test_weather.ambr @@ -80,142 +80,6 @@ 'wind_speed_unit': , }) # --- -# name: test_forecast_service[get_forecast] - dict({ - 'forecast': list([ - dict({ - 'cloud_coverage': 100, - 'condition': 'cloudy', - 'datetime': '2023-08-07T12:00:00', - 'humidity': 96, - 'precipitation': 0.0, - 'pressure': 991.0, - 'temperature': 18.0, - 'templow': 15.0, - 'wind_bearing': 114, - 'wind_gust_speed': 32.76, - 'wind_speed': 10.08, - }), - dict({ - 'cloud_coverage': 100, - 'condition': 'rainy', - 'datetime': '2023-08-08T12:00:00', - 'humidity': 97, - 'precipitation': 10.6, - 'pressure': 984.0, - 'temperature': 15.0, - 'templow': 11.0, - 'wind_bearing': 183, - 'wind_gust_speed': 27.36, - 'wind_speed': 11.16, - }), - dict({ - 'cloud_coverage': 100, - 'condition': 'rainy', - 'datetime': '2023-08-09T12:00:00', - 'humidity': 95, - 'precipitation': 6.3, - 'pressure': 1001.0, - 'temperature': 12.0, - 'templow': 11.0, - 'wind_bearing': 166, - 'wind_gust_speed': 48.24, - 'wind_speed': 18.0, - }), - dict({ - 'cloud_coverage': 100, - 'condition': 'cloudy', - 'datetime': '2023-08-10T12:00:00', - 'humidity': 75, - 'precipitation': 4.8, - 'pressure': 1011.0, - 'temperature': 14.0, - 'templow': 10.0, - 'wind_bearing': 174, - 'wind_gust_speed': 29.16, - 'wind_speed': 11.16, - }), - dict({ - 'cloud_coverage': 100, - 'condition': 'cloudy', - 'datetime': '2023-08-11T12:00:00', - 'humidity': 69, - 'precipitation': 0.6, - 'pressure': 1015.0, - 'temperature': 18.0, - 'templow': 12.0, - 'wind_bearing': 197, - 'wind_gust_speed': 27.36, - 'wind_speed': 10.08, - }), - dict({ - 'cloud_coverage': 100, - 'condition': 'cloudy', - 'datetime': '2023-08-12T12:00:00', - 'humidity': 82, - 'precipitation': 0.0, - 'pressure': 1014.0, - 'temperature': 17.0, - 'templow': 12.0, - 'wind_bearing': 225, - 'wind_gust_speed': 28.08, - 'wind_speed': 8.64, - }), - dict({ - 'cloud_coverage': 75, - 'condition': 'partlycloudy', - 'datetime': '2023-08-13T12:00:00', - 'humidity': 59, - 'precipitation': 0.0, - 'pressure': 1013.0, - 'temperature': 20.0, - 'templow': 14.0, - 'wind_bearing': 234, - 'wind_gust_speed': 35.64, - 'wind_speed': 14.76, - }), - dict({ - 'cloud_coverage': 100, - 'condition': 'partlycloudy', - 'datetime': '2023-08-14T12:00:00', - 'humidity': 56, - 'precipitation': 0.0, - 'pressure': 1015.0, - 'temperature': 21.0, - 'templow': 14.0, - 'wind_bearing': 216, - 'wind_gust_speed': 33.12, - 'wind_speed': 13.68, - }), - dict({ - 'cloud_coverage': 88, - 'condition': 'partlycloudy', - 'datetime': '2023-08-15T12:00:00', - 'humidity': 64, - 'precipitation': 3.6, - 'pressure': 1014.0, - 'temperature': 20.0, - 'templow': 14.0, - 'wind_bearing': 226, - 'wind_gust_speed': 33.12, - 'wind_speed': 13.68, - }), - dict({ - 'cloud_coverage': 75, - 'condition': 'partlycloudy', - 'datetime': '2023-08-16T12:00:00', - 'humidity': 61, - 'precipitation': 2.4, - 'pressure': 1014.0, - 'temperature': 20.0, - 'templow': 14.0, - 'wind_bearing': 233, - 'wind_gust_speed': 33.48, - 'wind_speed': 14.04, - }), - ]), - }) -# --- # name: test_forecast_service[get_forecasts] dict({ 'weather.smhi_test': dict({ diff --git a/tests/components/solarlog/snapshots/test_sensor.ambr b/tests/components/solarlog/snapshots/test_sensor.ambr index 5fb369bc3b6..df154a5eb9b 100644 --- a/tests/components/solarlog/snapshots/test_sensor.ambr +++ b/tests/components/solarlog/snapshots/test_sensor.ambr @@ -745,1097 +745,6 @@ 'state': '545', }) # --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_alternator_loss-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_alternator_loss', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Alternator loss', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'alternator_loss', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_alternator_loss', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_alternator_loss-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'solarlog_test_1_2_3 Alternator loss', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_alternator_loss', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_capacity-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_capacity', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Capacity', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'capacity', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_capacity', - 'unit_of_measurement': '%', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_capacity-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'solarlog_test_1_2_3 Capacity', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_capacity', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '85.0', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_ac-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_ac', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Consumption AC', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'consumption_ac', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_ac', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_ac-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'solarlog_test_1_2_3 Consumption AC', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_ac', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '54.87', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_day-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_day', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Consumption day', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'consumption_day', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_day', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_day-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'solarlog_test_1_2_3 Consumption day', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_day', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.005', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_month-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_month', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Consumption month', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'consumption_month', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_month', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_month-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'solarlog_test_1_2_3 Consumption month', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_month', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.758', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_total-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_total', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Consumption total', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'consumption_total', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_total', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_total-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'solarlog_test_1_2_3 Consumption total', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_total', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '354.687', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_year-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_year', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Consumption year', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'consumption_year', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_year', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_year-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'solarlog_test_1_2_3 Consumption year', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_year', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4.587', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_yesterday-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_yesterday', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Consumption yesterday', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'consumption_yesterday', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_yesterday', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_yesterday-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'solarlog_test_1_2_3 Consumption yesterday', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_yesterday', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.007', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_efficiency-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_efficiency', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Efficiency', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'efficiency', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_efficiency', - 'unit_of_measurement': '%', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_efficiency-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'solarlog_test_1_2_3 Efficiency', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_efficiency', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '98.0', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_installed_peak_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_installed_peak_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Installed peak power', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'total_power', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_total_power', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_installed_peak_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'solarlog_test_1_2_3 Installed peak power', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_installed_peak_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '120', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_last_update-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_last_update', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Last update', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'last_update', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_last_updated', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_last_update-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'solarlog_test_1_2_3 Last update', - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_last_update', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_power_ac-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_power_ac', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power AC', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'power_ac', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_power_ac', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_power_ac-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'solarlog_test_1_2_3 Power AC', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_power_ac', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_power_available-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_power_available', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power available', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'power_available', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_power_available', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_power_available-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'solarlog_test_1_2_3 Power available', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_power_available', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '45.13', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_power_dc-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_power_dc', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power DC', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'power_dc', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_power_dc', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_power_dc-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'solarlog_test_1_2_3 Power DC', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_power_dc', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '102', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_usage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_usage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Usage', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'usage', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_usage', - 'unit_of_measurement': '%', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_usage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'solarlog_test_1_2_3 Usage', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_usage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '54.9', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_voltage_ac-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_voltage_ac', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Voltage AC', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'voltage_ac', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_voltage_ac', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_voltage_ac-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'solarlog_test_1_2_3 Voltage AC', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_voltage_ac', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_voltage_dc-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_voltage_dc', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Voltage DC', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'voltage_dc', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_voltage_dc', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_voltage_dc-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'solarlog_test_1_2_3 Voltage DC', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_voltage_dc', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_day-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_yield_day', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Yield day', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'yield_day', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_yield_day', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_day-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'solarlog_test_1_2_3 Yield day', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_yield_day', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.004', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_month-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_yield_month', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Yield month', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'yield_month', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_yield_month', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_month-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'solarlog_test_1_2_3 Yield month', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_yield_month', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.515', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_total-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_yield_total', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Yield total', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'yield_total', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_yield_total', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_total-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'solarlog_test_1_2_3 Yield total', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_yield_total', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '56.513', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_year-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_yield_year', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Yield year', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'yield_year', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_yield_year', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_year-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'solarlog_test_1_2_3 Yield year', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_yield_year', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.023', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_yesterday-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_yield_yesterday', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Yield yesterday', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'yield_yesterday', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_yield_yesterday', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_yesterday-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'solarlog_test_1_2_3 Yield yesterday', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_yield_yesterday', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.005', - }) -# --- # name: test_all_entities[sensor.solarlog_usage-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/template/snapshots/test_select.ambr b/tests/components/template/snapshots/test_select.ambr index d4cabb2900f..e2142394cba 100644 --- a/tests/components/template/snapshots/test_select.ambr +++ b/tests/components/template/snapshots/test_select.ambr @@ -16,4 +16,4 @@ 'last_updated': , 'state': 'on', }) -# --- \ No newline at end of file +# --- diff --git a/tests/components/template/snapshots/test_weather.ambr b/tests/components/template/snapshots/test_weather.ambr index 9b0cf2b9471..bdda5b44e94 100644 --- a/tests/components/template/snapshots/test_weather.ambr +++ b/tests/components/template/snapshots/test_weather.ambr @@ -1,87 +1,4 @@ # serializer version: 1 -# name: test_forecasts[config0-1-weather-forecast] - dict({ - 'weather.forecast': dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2023-02-17T14:00:00+00:00', - 'temperature': 14.2, - }), - ]), - }), - }) -# --- -# name: test_forecasts[config0-1-weather-forecast].1 - dict({ - 'weather.forecast': dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2023-02-17T14:00:00+00:00', - 'temperature': 14.2, - }), - ]), - }), - }) -# --- -# name: test_forecasts[config0-1-weather-forecast].2 - dict({ - 'weather.forecast': dict({ - 'forecast': list([ - dict({ - 'condition': 'fog', - 'datetime': '2023-02-17T14:00:00+00:00', - 'is_daytime': True, - 'temperature': 14.2, - }), - ]), - }), - }) -# --- -# name: test_forecasts[config0-1-weather-forecast].3 - dict({ - 'weather.forecast': dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2023-02-17T14:00:00+00:00', - 'temperature': 16.9, - }), - ]), - }), - }) -# --- -# name: test_forecasts[config0-1-weather-get_forecast] - dict({ - 'forecast': list([ - ]), - }) -# --- -# name: test_forecasts[config0-1-weather-get_forecast].1 - dict({ - 'forecast': list([ - ]), - }) -# --- -# name: test_forecasts[config0-1-weather-get_forecast].2 - dict({ - 'forecast': list([ - dict({ - 'condition': 'fog', - 'datetime': '2023-02-17T14:00:00+00:00', - 'is_daytime': True, - 'temperature': 14.2, - }), - ]), - }) -# --- -# name: test_forecasts[config0-1-weather-get_forecast].3 - dict({ - 'forecast': list([ - ]), - }) -# --- # name: test_forecasts[config0-1-weather-get_forecasts] dict({ 'weather.forecast': dict({ @@ -120,51 +37,6 @@ }), }) # --- -# name: test_forecasts[config0-1-weather] - dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2023-02-17T14:00:00+00:00', - 'temperature': 14.2, - }), - ]), - }) -# --- -# name: test_forecasts[config0-1-weather].1 - dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2023-02-17T14:00:00+00:00', - 'temperature': 14.2, - }), - ]), - }) -# --- -# name: test_forecasts[config0-1-weather].2 - dict({ - 'forecast': list([ - dict({ - 'condition': 'fog', - 'datetime': '2023-02-17T14:00:00+00:00', - 'is_daytime': True, - 'temperature': 14.2, - }), - ]), - }) -# --- -# name: test_forecasts[config0-1-weather].3 - dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2023-02-17T14:00:00+00:00', - 'temperature': 16.9, - }), - ]), - }) -# --- # name: test_restore_weather_save_state dict({ 'last_apparent_temperature': None, @@ -180,92 +52,6 @@ 'last_wind_speed': None, }) # --- -# name: test_trigger_weather_services[config0-1-template-forecast] - dict({ - 'weather.test': dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2023-10-19T06:50:05-07:00', - 'precipitation': 20.0, - 'temperature': 20.0, - 'templow': 15.0, - }), - ]), - }), - }) -# --- -# name: test_trigger_weather_services[config0-1-template-forecast].1 - dict({ - 'weather.test': dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2023-10-19T06:50:05-07:00', - 'precipitation': 20.0, - 'temperature': 20.0, - 'templow': 15.0, - }), - ]), - }), - }) -# --- -# name: test_trigger_weather_services[config0-1-template-forecast].2 - dict({ - 'weather.test': dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2023-10-19T06:50:05-07:00', - 'is_daytime': True, - 'precipitation': 20.0, - 'temperature': 20.0, - 'templow': 15.0, - }), - ]), - }), - }) -# --- -# name: test_trigger_weather_services[config0-1-template-get_forecast] - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2023-10-19T06:50:05-07:00', - 'precipitation': 20.0, - 'temperature': 20.0, - 'templow': 15.0, - }), - ]), - }) -# --- -# name: test_trigger_weather_services[config0-1-template-get_forecast].1 - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2023-10-19T06:50:05-07:00', - 'precipitation': 20.0, - 'temperature': 20.0, - 'templow': 15.0, - }), - ]), - }) -# --- -# name: test_trigger_weather_services[config0-1-template-get_forecast].2 - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2023-10-19T06:50:05-07:00', - 'is_daytime': True, - 'precipitation': 20.0, - 'temperature': 20.0, - 'templow': 15.0, - }), - ]), - }) -# --- # name: test_trigger_weather_services[config0-1-template-get_forecasts] dict({ 'weather.test': dict({ @@ -312,43 +98,3 @@ }), }) # --- -# name: test_trigger_weather_services[config0-1-template] - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2023-10-19T06:50:05-07:00', - 'precipitation': 20.0, - 'temperature': 20.0, - 'templow': 15.0, - }), - ]), - }) -# --- -# name: test_trigger_weather_services[config0-1-template].1 - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2023-10-19T06:50:05-07:00', - 'precipitation': 20.0, - 'temperature': 20.0, - 'templow': 15.0, - }), - ]), - }) -# --- -# name: test_trigger_weather_services[config0-1-template].2 - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2023-10-19T06:50:05-07:00', - 'is_daytime': True, - 'precipitation': 20.0, - 'temperature': 20.0, - 'templow': 15.0, - }), - ]), - }) -# --- diff --git a/tests/components/tesla_fleet/snapshots/test_sensor.ambr b/tests/components/tesla_fleet/snapshots/test_sensor.ambr index e4c4c3d96c2..c6a4860056a 100644 --- a/tests/components/tesla_fleet/snapshots/test_sensor.ambr +++ b/tests/components/tesla_fleet/snapshots/test_sensor.ambr @@ -437,67 +437,6 @@ 'state': '6.245', }) # --- -# name: test_sensors[sensor.energy_site_none-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_none', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'island_status', - 'unique_id': '123456-island_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.energy_site_none-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Energy Site None', - }), - 'context': , - 'entity_id': 'sensor.energy_site_none', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on_grid', - }) -# --- -# name: test_sensors[sensor.energy_site_none-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Energy Site None', - }), - 'context': , - 'entity_id': 'sensor.energy_site_none', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on_grid', - }) -# --- # name: test_sensors[sensor.energy_site_percentage_charged-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tessie/snapshots/test_cover.ambr b/tests/components/tessie/snapshots/test_cover.ambr index 8c8c9a48c11..6338758afb7 100644 --- a/tests/components/tessie/snapshots/test_cover.ambr +++ b/tests/components/tessie/snapshots/test_cover.ambr @@ -95,39 +95,6 @@ 'state': 'closed', }) # --- -# name: test_covers[cover.test_none-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_none', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'tessie', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'vehicle_state_sun_roof_state', - 'unique_id': 'VINVINVIN-vehicle_state_sun_roof_state', - 'unit_of_measurement': None, - }) -# --- # name: test_covers[cover.test_sunroof-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tomorrowio/snapshots/test_weather.ambr b/tests/components/tomorrowio/snapshots/test_weather.ambr index fe65925e4c7..6278b50b7f7 100644 --- a/tests/components/tomorrowio/snapshots/test_weather.ambr +++ b/tests/components/tomorrowio/snapshots/test_weather.ambr @@ -735,1126 +735,6 @@ }), ]) # --- -# name: test_v4_forecast_service - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T11:00:00+00:00', - 'dew_point': 12.8, - 'humidity': 58, - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 45.9, - 'templow': 26.1, - 'wind_bearing': 239.6, - 'wind_speed': 34.16, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-08T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 49.4, - 'templow': 26.3, - 'wind_bearing': 262.82, - 'wind_speed': 26.06, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-09T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 67.0, - 'templow': 31.5, - 'wind_bearing': 229.3, - 'wind_speed': 25.38, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-10T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 65.3, - 'templow': 37.3, - 'wind_bearing': 149.91, - 'wind_speed': 38.3, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-11T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 66.2, - 'templow': 48.3, - 'wind_bearing': 210.45, - 'wind_speed': 56.48, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2021-03-12T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 25, - 'temperature': 67.9, - 'templow': 53.8, - 'wind_bearing': 217.98, - 'wind_speed': 44.28, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-13T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 25, - 'temperature': 54.5, - 'templow': 42.9, - 'wind_bearing': 58.79, - 'wind_speed': 34.99, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-03-14T10:00:00+00:00', - 'precipitation': 0.94, - 'precipitation_probability': 95, - 'temperature': 42.9, - 'templow': 33.4, - 'wind_bearing': 70.25, - 'wind_speed': 58.5, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-03-15T10:00:00+00:00', - 'precipitation': 0.06, - 'precipitation_probability': 55, - 'temperature': 43.7, - 'templow': 29.4, - 'wind_bearing': 84.47, - 'wind_speed': 57.2, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-16T10:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 43.0, - 'templow': 29.1, - 'wind_bearing': 103.85, - 'wind_speed': 24.16, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-17T10:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 52.4, - 'templow': 34.3, - 'wind_bearing': 145.41, - 'wind_speed': 26.17, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-18T10:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': 54.1, - 'templow': 41.3, - 'wind_bearing': 62.99, - 'wind_speed': 23.69, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2021-03-19T10:00:00+00:00', - 'precipitation': 0.12, - 'precipitation_probability': 55, - 'temperature': 48.9, - 'templow': 39.4, - 'wind_bearing': 68.54, - 'wind_speed': 50.08, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-03-20T10:00:00+00:00', - 'precipitation': 0.05, - 'precipitation_probability': 33, - 'temperature': 40.1, - 'templow': 35.1, - 'wind_bearing': 56.98, - 'wind_speed': 62.46, - }), - ]), - }) -# --- -# name: test_v4_forecast_service.1 - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T17:48:00+00:00', - 'dew_point': 12.8, - 'humidity': 58, - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 44.1, - 'wind_bearing': 315.14, - 'wind_speed': 33.59, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T18:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 44.8, - 'wind_bearing': 321.71, - 'wind_speed': 31.82, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T19:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 45.8, - 'wind_bearing': 323.38, - 'wind_speed': 32.04, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T20:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 45.3, - 'wind_bearing': 318.43, - 'wind_speed': 33.73, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T21:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 44.6, - 'wind_bearing': 320.9, - 'wind_speed': 28.98, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T22:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 41.9, - 'wind_bearing': 322.11, - 'wind_speed': 15.7, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T23:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 38.9, - 'wind_bearing': 295.94, - 'wind_speed': 17.78, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-08T00:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 36.2, - 'wind_bearing': 11.94, - 'wind_speed': 20.12, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-08T01:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 34.3, - 'wind_bearing': 13.68, - 'wind_speed': 20.05, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T02:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 32.9, - 'wind_bearing': 14.93, - 'wind_speed': 19.48, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T03:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 31.9, - 'wind_bearing': 26.07, - 'wind_speed': 16.6, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T04:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 29.2, - 'wind_bearing': 51.27, - 'wind_speed': 9.32, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T05:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 27.4, - 'wind_bearing': 343.25, - 'wind_speed': 11.92, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T06:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 26.7, - 'wind_bearing': 341.46, - 'wind_speed': 15.37, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T07:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 26.4, - 'wind_bearing': 322.34, - 'wind_speed': 12.71, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T08:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 26.1, - 'wind_bearing': 294.69, - 'wind_speed': 13.14, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T09:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 30.1, - 'wind_bearing': 325.32, - 'wind_speed': 11.52, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T10:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 31.0, - 'wind_bearing': 322.27, - 'wind_speed': 10.22, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T11:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 27.2, - 'wind_bearing': 310.14, - 'wind_speed': 20.12, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T12:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 29.2, - 'wind_bearing': 324.8, - 'wind_speed': 25.38, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-03-08T13:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 33.2, - 'wind_bearing': 335.16, - 'wind_speed': 23.26, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-08T14:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 37.0, - 'wind_bearing': 324.49, - 'wind_speed': 21.17, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-08T15:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 40.0, - 'wind_bearing': 310.68, - 'wind_speed': 19.98, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-03-08T16:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 42.4, - 'wind_bearing': 304.18, - 'wind_speed': 19.66, - }), - ]), - }) -# --- -# name: test_v4_forecast_service[forecast] - dict({ - 'weather.tomorrow_io_daily': dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T11:00:00+00:00', - 'dew_point': 12.8, - 'humidity': 58, - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 45.9, - 'templow': 26.1, - 'wind_bearing': 239.6, - 'wind_speed': 34.16, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-08T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 49.4, - 'templow': 26.3, - 'wind_bearing': 262.82, - 'wind_speed': 26.06, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-09T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 67.0, - 'templow': 31.5, - 'wind_bearing': 229.3, - 'wind_speed': 25.38, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-10T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 65.3, - 'templow': 37.3, - 'wind_bearing': 149.91, - 'wind_speed': 38.3, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-11T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 66.2, - 'templow': 48.3, - 'wind_bearing': 210.45, - 'wind_speed': 56.48, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2021-03-12T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 25, - 'temperature': 67.9, - 'templow': 53.8, - 'wind_bearing': 217.98, - 'wind_speed': 44.28, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-13T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 25, - 'temperature': 54.5, - 'templow': 42.9, - 'wind_bearing': 58.79, - 'wind_speed': 34.99, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-03-14T10:00:00+00:00', - 'precipitation': 0.94, - 'precipitation_probability': 95, - 'temperature': 42.9, - 'templow': 33.4, - 'wind_bearing': 70.25, - 'wind_speed': 58.5, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-03-15T10:00:00+00:00', - 'precipitation': 0.06, - 'precipitation_probability': 55, - 'temperature': 43.7, - 'templow': 29.4, - 'wind_bearing': 84.47, - 'wind_speed': 57.2, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-16T10:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 43.0, - 'templow': 29.1, - 'wind_bearing': 103.85, - 'wind_speed': 24.16, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-17T10:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 52.4, - 'templow': 34.3, - 'wind_bearing': 145.41, - 'wind_speed': 26.17, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-18T10:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': 54.1, - 'templow': 41.3, - 'wind_bearing': 62.99, - 'wind_speed': 23.69, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2021-03-19T10:00:00+00:00', - 'precipitation': 0.12, - 'precipitation_probability': 55, - 'temperature': 48.9, - 'templow': 39.4, - 'wind_bearing': 68.54, - 'wind_speed': 50.08, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-03-20T10:00:00+00:00', - 'precipitation': 0.05, - 'precipitation_probability': 33, - 'temperature': 40.1, - 'templow': 35.1, - 'wind_bearing': 56.98, - 'wind_speed': 62.46, - }), - ]), - }), - }) -# --- -# name: test_v4_forecast_service[forecast].1 - dict({ - 'weather.tomorrow_io_daily': dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T17:48:00+00:00', - 'dew_point': 12.8, - 'humidity': 58, - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 44.1, - 'wind_bearing': 315.14, - 'wind_speed': 33.59, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T18:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 44.8, - 'wind_bearing': 321.71, - 'wind_speed': 31.82, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T19:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 45.8, - 'wind_bearing': 323.38, - 'wind_speed': 32.04, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T20:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 45.3, - 'wind_bearing': 318.43, - 'wind_speed': 33.73, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T21:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 44.6, - 'wind_bearing': 320.9, - 'wind_speed': 28.98, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T22:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 41.9, - 'wind_bearing': 322.11, - 'wind_speed': 15.7, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T23:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 38.9, - 'wind_bearing': 295.94, - 'wind_speed': 17.78, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-08T00:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 36.2, - 'wind_bearing': 11.94, - 'wind_speed': 20.12, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-08T01:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 34.3, - 'wind_bearing': 13.68, - 'wind_speed': 20.05, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T02:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 32.9, - 'wind_bearing': 14.93, - 'wind_speed': 19.48, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T03:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 31.9, - 'wind_bearing': 26.07, - 'wind_speed': 16.6, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T04:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 29.2, - 'wind_bearing': 51.27, - 'wind_speed': 9.32, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T05:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 27.4, - 'wind_bearing': 343.25, - 'wind_speed': 11.92, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T06:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 26.7, - 'wind_bearing': 341.46, - 'wind_speed': 15.37, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T07:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 26.4, - 'wind_bearing': 322.34, - 'wind_speed': 12.71, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T08:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 26.1, - 'wind_bearing': 294.69, - 'wind_speed': 13.14, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T09:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 30.1, - 'wind_bearing': 325.32, - 'wind_speed': 11.52, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T10:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 31.0, - 'wind_bearing': 322.27, - 'wind_speed': 10.22, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T11:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 27.2, - 'wind_bearing': 310.14, - 'wind_speed': 20.12, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T12:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 29.2, - 'wind_bearing': 324.8, - 'wind_speed': 25.38, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-03-08T13:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 33.2, - 'wind_bearing': 335.16, - 'wind_speed': 23.26, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-08T14:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 37.0, - 'wind_bearing': 324.49, - 'wind_speed': 21.17, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-08T15:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 40.0, - 'wind_bearing': 310.68, - 'wind_speed': 19.98, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-03-08T16:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 42.4, - 'wind_bearing': 304.18, - 'wind_speed': 19.66, - }), - ]), - }), - }) -# --- -# name: test_v4_forecast_service[get_forecast] - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T11:00:00+00:00', - 'dew_point': 12.8, - 'humidity': 58, - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 45.9, - 'templow': 26.1, - 'wind_bearing': 239.6, - 'wind_speed': 34.16, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-08T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 49.4, - 'templow': 26.3, - 'wind_bearing': 262.82, - 'wind_speed': 26.06, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-09T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 67.0, - 'templow': 31.5, - 'wind_bearing': 229.3, - 'wind_speed': 25.38, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-10T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 65.3, - 'templow': 37.3, - 'wind_bearing': 149.91, - 'wind_speed': 38.3, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-11T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 66.2, - 'templow': 48.3, - 'wind_bearing': 210.45, - 'wind_speed': 56.48, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2021-03-12T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 25, - 'temperature': 67.9, - 'templow': 53.8, - 'wind_bearing': 217.98, - 'wind_speed': 44.28, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-13T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 25, - 'temperature': 54.5, - 'templow': 42.9, - 'wind_bearing': 58.79, - 'wind_speed': 34.99, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-03-14T10:00:00+00:00', - 'precipitation': 0.94, - 'precipitation_probability': 95, - 'temperature': 42.9, - 'templow': 33.4, - 'wind_bearing': 70.25, - 'wind_speed': 58.5, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-03-15T10:00:00+00:00', - 'precipitation': 0.06, - 'precipitation_probability': 55, - 'temperature': 43.7, - 'templow': 29.4, - 'wind_bearing': 84.47, - 'wind_speed': 57.2, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-16T10:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 43.0, - 'templow': 29.1, - 'wind_bearing': 103.85, - 'wind_speed': 24.16, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-17T10:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 52.4, - 'templow': 34.3, - 'wind_bearing': 145.41, - 'wind_speed': 26.17, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-18T10:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': 54.1, - 'templow': 41.3, - 'wind_bearing': 62.99, - 'wind_speed': 23.69, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2021-03-19T10:00:00+00:00', - 'precipitation': 0.12, - 'precipitation_probability': 55, - 'temperature': 48.9, - 'templow': 39.4, - 'wind_bearing': 68.54, - 'wind_speed': 50.08, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-03-20T10:00:00+00:00', - 'precipitation': 0.05, - 'precipitation_probability': 33, - 'temperature': 40.1, - 'templow': 35.1, - 'wind_bearing': 56.98, - 'wind_speed': 62.46, - }), - ]), - }) -# --- -# name: test_v4_forecast_service[get_forecast].1 - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T17:48:00+00:00', - 'dew_point': 12.8, - 'humidity': 58, - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 44.1, - 'wind_bearing': 315.14, - 'wind_speed': 33.59, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T18:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 44.8, - 'wind_bearing': 321.71, - 'wind_speed': 31.82, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T19:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 45.8, - 'wind_bearing': 323.38, - 'wind_speed': 32.04, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T20:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 45.3, - 'wind_bearing': 318.43, - 'wind_speed': 33.73, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T21:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 44.6, - 'wind_bearing': 320.9, - 'wind_speed': 28.98, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T22:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 41.9, - 'wind_bearing': 322.11, - 'wind_speed': 15.7, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T23:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 38.9, - 'wind_bearing': 295.94, - 'wind_speed': 17.78, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-08T00:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 36.2, - 'wind_bearing': 11.94, - 'wind_speed': 20.12, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-08T01:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 34.3, - 'wind_bearing': 13.68, - 'wind_speed': 20.05, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T02:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 32.9, - 'wind_bearing': 14.93, - 'wind_speed': 19.48, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T03:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 31.9, - 'wind_bearing': 26.07, - 'wind_speed': 16.6, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T04:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 29.2, - 'wind_bearing': 51.27, - 'wind_speed': 9.32, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T05:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 27.4, - 'wind_bearing': 343.25, - 'wind_speed': 11.92, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T06:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 26.7, - 'wind_bearing': 341.46, - 'wind_speed': 15.37, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T07:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 26.4, - 'wind_bearing': 322.34, - 'wind_speed': 12.71, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T08:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 26.1, - 'wind_bearing': 294.69, - 'wind_speed': 13.14, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T09:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 30.1, - 'wind_bearing': 325.32, - 'wind_speed': 11.52, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T10:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 31.0, - 'wind_bearing': 322.27, - 'wind_speed': 10.22, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T11:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 27.2, - 'wind_bearing': 310.14, - 'wind_speed': 20.12, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T12:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 29.2, - 'wind_bearing': 324.8, - 'wind_speed': 25.38, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-03-08T13:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 33.2, - 'wind_bearing': 335.16, - 'wind_speed': 23.26, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-08T14:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 37.0, - 'wind_bearing': 324.49, - 'wind_speed': 21.17, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-08T15:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 40.0, - 'wind_bearing': 310.68, - 'wind_speed': 19.98, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-03-08T16:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 42.4, - 'wind_bearing': 304.18, - 'wind_speed': 19.66, - }), - ]), - }) -# --- # name: test_v4_forecast_service[get_forecasts] dict({ 'weather.tomorrow_io_daily': dict({ diff --git a/tests/components/tplink_omada/snapshots/test_switch.ambr b/tests/components/tplink_omada/snapshots/test_switch.ambr index 282d2a4a6a5..a13d386e721 100644 --- a/tests/components/tplink_omada/snapshots/test_switch.ambr +++ b/tests/components/tplink_omada/snapshots/test_switch.ambr @@ -25,19 +25,6 @@ 'state': 'on', }) # --- -# name: test_gateway_disappear_disables_switches - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Router Port 4 Internet Connected', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.test_router_port_4_internet_connected', - 'last_changed': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_gateway_port_change_disables_switch_entities StateSnapshot({ 'attributes': ReadOnlyDict({ @@ -110,144 +97,6 @@ 'unit_of_measurement': None, }) # --- -# name: test_poe_switches.10 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test PoE Switch Port 6 PoE', - }), - 'context': , - 'entity_id': 'switch.test_poe_switch_port_6_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_poe_switches.11 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.test_poe_switch_port_6_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Port 6 PoE', - 'platform': 'tplink_omada', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'poe_control', - 'unique_id': '54-AF-97-00-00-01_000000000000000000000006_poe', - 'unit_of_measurement': None, - }) -# --- -# name: test_poe_switches.12 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test PoE Switch Port 7 PoE', - }), - 'context': , - 'entity_id': 'switch.test_poe_switch_port_7_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_poe_switches.13 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.test_poe_switch_port_7_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Port 7 PoE', - 'platform': 'tplink_omada', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'poe_control', - 'unique_id': '54-AF-97-00-00-01_000000000000000000000007_poe', - 'unit_of_measurement': None, - }) -# --- -# name: test_poe_switches.14 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test PoE Switch Port 8 PoE', - }), - 'context': , - 'entity_id': 'switch.test_poe_switch_port_8_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_poe_switches.15 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.test_poe_switch_port_8_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Port 8 PoE', - 'platform': 'tplink_omada', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'poe_control', - 'unique_id': '54-AF-97-00-00-01_000000000000000000000008_poe', - 'unit_of_measurement': None, - }) -# --- # name: test_poe_switches.2 StateSnapshot({ 'attributes': ReadOnlyDict({ @@ -294,141 +143,3 @@ 'unit_of_measurement': None, }) # --- -# name: test_poe_switches.4 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test PoE Switch Port 3 PoE', - }), - 'context': , - 'entity_id': 'switch.test_poe_switch_port_3_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_poe_switches.5 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.test_poe_switch_port_3_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Port 3 PoE', - 'platform': 'tplink_omada', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'poe_control', - 'unique_id': '54-AF-97-00-00-01_000000000000000000000003_poe', - 'unit_of_measurement': None, - }) -# --- -# name: test_poe_switches.6 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test PoE Switch Port 4 PoE', - }), - 'context': , - 'entity_id': 'switch.test_poe_switch_port_4_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_poe_switches.7 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.test_poe_switch_port_4_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Port 4 PoE', - 'platform': 'tplink_omada', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'poe_control', - 'unique_id': '54-AF-97-00-00-01_000000000000000000000004_poe', - 'unit_of_measurement': None, - }) -# --- -# name: test_poe_switches.8 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test PoE Switch Port 5 PoE', - }), - 'context': , - 'entity_id': 'switch.test_poe_switch_port_5_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_poe_switches.9 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.test_poe_switch_port_5_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Port 5 PoE', - 'platform': 'tplink_omada', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'poe_control', - 'unique_id': '54-AF-97-00-00-01_000000000000000000000005_poe', - 'unit_of_measurement': None, - }) -# --- diff --git a/tests/components/tractive/snapshots/test_binary_sensor.ambr b/tests/components/tractive/snapshots/test_binary_sensor.ambr index c6d50fb0fbb..4b610e927d5 100644 --- a/tests/components/tractive/snapshots/test_binary_sensor.ambr +++ b/tests/components/tractive/snapshots/test_binary_sensor.ambr @@ -46,50 +46,3 @@ 'state': 'on', }) # --- -# name: test_sensor[binary_sensor.test_pet_tracker_battery_charging-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.test_pet_tracker_battery_charging', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Tracker battery charging', - 'platform': 'tractive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'tracker_battery_charging', - 'unique_id': 'pet_id_123_battery_charging', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[binary_sensor.test_pet_tracker_battery_charging-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery_charging', - 'friendly_name': 'Test Pet Tracker battery charging', - }), - 'context': , - 'entity_id': 'binary_sensor.test_pet_tracker_battery_charging', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/tractive/snapshots/test_device_tracker.ambr b/tests/components/tractive/snapshots/test_device_tracker.ambr index 3a145a48b5a..4e7c5bfe173 100644 --- a/tests/components/tractive/snapshots/test_device_tracker.ambr +++ b/tests/components/tractive/snapshots/test_device_tracker.ambr @@ -50,54 +50,3 @@ 'state': 'not_home', }) # --- -# name: test_sensor[device_tracker.test_pet_tracker-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'device_tracker', - 'entity_category': , - 'entity_id': 'device_tracker.test_pet_tracker', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Tracker', - 'platform': 'tractive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'tracker', - 'unique_id': 'pet_id_123', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[device_tracker.test_pet_tracker-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'battery_level': 88, - 'friendly_name': 'Test Pet Tracker', - 'gps_accuracy': 99, - 'latitude': 22.333, - 'longitude': 44.555, - 'source_type': , - }), - 'context': , - 'entity_id': 'device_tracker.test_pet_tracker', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'not_home', - }) -# --- diff --git a/tests/components/tractive/snapshots/test_switch.ambr b/tests/components/tractive/snapshots/test_switch.ambr index ea9ea9d9e48..08e0c984d0c 100644 --- a/tests/components/tractive/snapshots/test_switch.ambr +++ b/tests/components/tractive/snapshots/test_switch.ambr @@ -1,142 +1,4 @@ # serializer version: 1 -# name: test_sensor[switch.test_pet_live_tracking-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.test_pet_live_tracking', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Live tracking', - 'platform': 'tractive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'live_tracking', - 'unique_id': 'pet_id_123_live_tracking', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.test_pet_live_tracking-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Pet Live tracking', - }), - 'context': , - 'entity_id': 'switch.test_pet_live_tracking', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.test_pet_tracker_buzzer-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.test_pet_tracker_buzzer', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Tracker buzzer', - 'platform': 'tractive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'tracker_buzzer', - 'unique_id': 'pet_id_123_buzzer', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.test_pet_tracker_buzzer-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Pet Tracker buzzer', - }), - 'context': , - 'entity_id': 'switch.test_pet_tracker_buzzer', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.test_pet_tracker_led-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.test_pet_tracker_led', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Tracker LED', - 'platform': 'tractive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'tracker_led', - 'unique_id': 'pet_id_123_led', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.test_pet_tracker_led-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Pet Tracker LED', - }), - 'context': , - 'entity_id': 'switch.test_pet_tracker_led', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_switch[switch.test_pet_live_tracking-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/unifi/snapshots/test_button.ambr b/tests/components/unifi/snapshots/test_button.ambr index 51a37620268..de305aee7eb 100644 --- a/tests/components/unifi/snapshots/test_button.ambr +++ b/tests/components/unifi/snapshots/test_button.ambr @@ -1,98 +1,4 @@ # serializer version: 1 -# name: test_entity_and_device_data[site_payload0-device_payload0][button.switch_port_1_power_cycle-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.switch_port_1_power_cycle', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Port 1 Power Cycle', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'power_cycle-00:00:00:00:01:01_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][button.switch_port_1_power_cycle-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'restart', - 'friendly_name': 'switch Port 1 Power Cycle', - }), - 'context': , - 'entity_id': 'button.switch_port_1_power_cycle', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][button.switch_restart-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.switch_restart', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Restart', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'device_restart-00:00:00:00:01:01', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][button.switch_restart-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'restart', - 'friendly_name': 'switch Restart', - }), - 'context': , - 'entity_id': 'button.switch_restart', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- # name: test_entity_and_device_data[site_payload0-wlan_payload0-device_payload0][button.ssid_1_regenerate_password-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/unifi/snapshots/test_image.ambr b/tests/components/unifi/snapshots/test_image.ambr index e33ec678217..0922320ed4d 100644 --- a/tests/components/unifi/snapshots/test_image.ambr +++ b/tests/components/unifi/snapshots/test_image.ambr @@ -47,12 +47,6 @@ 'state': '2021-01-01T01:01:00+00:00', }) # --- -# name: test_wlan_qr_code - b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x84\x00\x00\x00\x84\x01\x00\x00\x00\x00y?\xbe\n\x00\x00\x00\xcaIDATx\xda\xedV[\n\xc30\x0c\x13\xbb\x80\xef\x7fK\xdd\xc0\x93\x94\xfd\xac\x1fcL\xfbl(\xc4\x04*\xacG\xdcb/\x8b\xb8O\xdeO\x00\xccP\x95\x8b\xe5\x03\xd7\xf5\xcd\x89pF\xcf\x8c \\48\x08\nS\x948\x03p\xfe\x80C\xa8\x9d\x16\xc7P\xabvJ}\xe2\xd7\x84[\xe5W\xfc7\xbbS\xfd\xde\xcfB\xf115\xa2\xe3%\x99\xad\x93\xa0:\xbf6\xbeS\xec\x1a^\xb4\xed\xfb\xb2\xab\xd1\x99\xc9\xcdAjx\x89\x0e\xc5\xea\xf4T\xf9\xee\xe40m58\xb6<\x1b\xab~\xf4\xban\xd7:\xceu\x9e\x05\xc4I\xa6\xbb\xfb%q<7:\xbf\xa2\x90wo\xf5 -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-config_entry_options0].3 - -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-config_entry_options0].4 - '1234.0' -# --- # name: test_sensor_sources[client_payload0-sensor.wired_client_rx-rx--config_entry_options0] 'rx-00:00:00:00:00:01' # --- @@ -35,63 +20,6 @@ # name: test_sensor_sources[client_payload0-sensor.wired_client_rx-rx--config_entry_options0].6 '1234.0' # --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_uptime-uptime--config_entry_options0] - 'uptime-00:00:00:00:00:01' -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_uptime-uptime--config_entry_options0].1 - -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_uptime-uptime--config_entry_options0].2 - 'timestamp' -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_uptime-uptime--config_entry_options0].3 - 'Wired client Uptime' -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_uptime-uptime--config_entry_options0].4 - None -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_uptime-uptime--config_entry_options0].5 - None -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_uptime-uptime--config_entry_options0].6 - '2020-09-14T14:41:45+00:00' -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_rx-rx--config_entry_options0] - 'rx-00:00:00:00:00:01' -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_rx-rx--config_entry_options0].1 - -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_rx-rx--config_entry_options0].2 - 'data_rate' -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_rx-rx--config_entry_options0].3 - 'Wired client RX' -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_rx-rx--config_entry_options0].4 - -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_rx-rx--config_entry_options0].5 - -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_rx-rx--config_entry_options0].6 - '1234.0' -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-config_entry_options0] - 'data_rate' -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-config_entry_options0].1 - 'data_rate' -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-config_entry_options0].2 - -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-config_entry_options0].3 - -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-config_entry_options0].4 - '5678.0' -# --- # name: test_sensor_sources[client_payload1-sensor.wired_client_tx-tx--config_entry_options0] 'tx-00:00:00:00:00:01' # --- @@ -113,27 +41,6 @@ # name: test_sensor_sources[client_payload1-sensor.wired_client_tx-tx--config_entry_options0].6 '5678.0' # --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_tx-tx--config_entry_options0] - 'tx-00:00:00:00:00:01' -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_tx-tx--config_entry_options0].1 - -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_tx-tx--config_entry_options0].2 - 'data_rate' -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_tx-tx--config_entry_options0].3 - 'Wired client TX' -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_tx-tx--config_entry_options0].4 - -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_tx-tx--config_entry_options0].5 - -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_tx-tx--config_entry_options0].6 - '5678.0' -# --- # name: test_sensor_sources[client_payload2-sensor.wired_client_uptime-uptime--config_entry_options0] 'uptime-00:00:00:00:00:01' # --- @@ -155,27 +62,6 @@ # name: test_sensor_sources[client_payload2-sensor.wired_client_uptime-uptime--config_entry_options0].6 '2020-09-14T14:41:45+00:00' # --- -# name: test_sensor_sources[client_payload2-sensor.wireless_client_rx-rx--config_entry_options0] - 'rx-00:00:00:00:00:02' -# --- -# name: test_sensor_sources[client_payload2-sensor.wireless_client_rx-rx--config_entry_options0].1 - -# --- -# name: test_sensor_sources[client_payload2-sensor.wireless_client_rx-rx--config_entry_options0].2 - 'data_rate' -# --- -# name: test_sensor_sources[client_payload2-sensor.wireless_client_rx-rx--config_entry_options0].3 - 'Wireless client RX' -# --- -# name: test_sensor_sources[client_payload2-sensor.wireless_client_rx-rx--config_entry_options0].4 - -# --- -# name: test_sensor_sources[client_payload2-sensor.wireless_client_rx-rx--config_entry_options0].5 - -# --- -# name: test_sensor_sources[client_payload2-sensor.wireless_client_rx-rx--config_entry_options0].6 - '2345.0' -# --- # name: test_sensor_sources[client_payload3-sensor.wireless_client_rx-rx--config_entry_options0] 'rx-00:00:00:00:00:01' # --- @@ -197,27 +83,6 @@ # name: test_sensor_sources[client_payload3-sensor.wireless_client_rx-rx--config_entry_options0].6 '2345.0' # --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_tx-tx--config_entry_options0] - 'tx-00:00:00:00:00:02' -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_tx-tx--config_entry_options0].1 - -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_tx-tx--config_entry_options0].2 - 'data_rate' -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_tx-tx--config_entry_options0].3 - 'Wireless client TX' -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_tx-tx--config_entry_options0].4 - -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_tx-tx--config_entry_options0].5 - -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_tx-tx--config_entry_options0].6 - '6789.0' -# --- # name: test_sensor_sources[client_payload4-sensor.wireless_client_tx-tx--config_entry_options0] 'tx-00:00:00:00:00:01' # --- diff --git a/tests/components/uptime/snapshots/test_sensor.ambr b/tests/components/uptime/snapshots/test_sensor.ambr index fa0cb6bf8a9..561e4b83320 100644 --- a/tests/components/uptime/snapshots/test_sensor.ambr +++ b/tests/components/uptime/snapshots/test_sensor.ambr @@ -71,25 +71,3 @@ 'via_device_id': None, }) # --- -# name: test_uptime_sensor.3 - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': None, - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': , - 'hw_version': None, - 'id': , - 'is_new': False, - 'manufacturer': None, - 'model': None, - 'name': 'Uptime', - 'name_by_user': None, - 'serial_number': None, - 'suggested_area': None, - 'sw_version': None, - 'via_device_id': None, - }) -# --- diff --git a/tests/components/weatherflow_cloud/snapshots/test_sensor.ambr b/tests/components/weatherflow_cloud/snapshots/test_sensor.ambr index f7b635eb4fa..95be86664a2 100644 --- a/tests/components/weatherflow_cloud/snapshots/test_sensor.ambr +++ b/tests/components/weatherflow_cloud/snapshots/test_sensor.ambr @@ -53,122 +53,6 @@ 'state': '0.96139', }) # --- -# name: test_all_entities[sensor.my_home_station_atmospheric_pressure-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_atmospheric_pressure', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Atmospheric pressure', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'barometric_pressure', - 'unique_id': '24432_barometric_pressure', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.my_home_station_atmospheric_pressure-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'device_class': 'atmospheric_pressure', - 'friendly_name': 'My Home Station Atmospheric pressure', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_atmospheric_pressure', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '782.8', - }) -# --- -# name: test_all_entities[sensor.my_home_station_atmospheric_pressure_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_atmospheric_pressure_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Atmospheric pressure', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'sea_level_pressure', - 'unique_id': '24432_sea_level_pressure', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.my_home_station_atmospheric_pressure_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'device_class': 'atmospheric_pressure', - 'friendly_name': 'My Home Station Atmospheric pressure', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_atmospheric_pressure_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1006.2', - }) -# --- # name: test_all_entities[sensor.my_home_station_dew_point-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -224,58 +108,6 @@ 'state': '-10.4', }) # --- -# name: test_all_entities[sensor.my_home_station_distance-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_distance', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Distance', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lightning_strike_last_distance', - 'unique_id': '24432_lightning_strike_last_distance', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.my_home_station_distance-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'device_class': 'distance', - 'friendly_name': 'My Home Station Distance', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_distance', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '26', - }) -# --- # name: test_all_entities[sensor.my_home_station_feels_like-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -636,210 +468,6 @@ 'state': '2024-02-07T23:01:15+00:00', }) # --- -# name: test_all_entities[sensor.my_home_station_none-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_none', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 5, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'air_density', - 'unique_id': '24432_air_density', - 'unit_of_measurement': 'kg/m³', - }) -# --- -# name: test_all_entities[sensor.my_home_station_none-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'friendly_name': 'My Home Station None', - 'state_class': , - 'unit_of_measurement': 'kg/m³', - }), - 'context': , - 'entity_id': 'sensor.my_home_station_none', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.96139', - }) -# --- -# name: test_all_entities[sensor.my_home_station_none_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_none_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lightning_strike_count', - 'unique_id': '24432_lightning_strike_count', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.my_home_station_none_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'friendly_name': 'My Home Station None', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_none_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_all_entities[sensor.my_home_station_none_3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_none_3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lightning_strike_count_last_1hr', - 'unique_id': '24432_lightning_strike_count_last_1hr', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.my_home_station_none_3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'friendly_name': 'My Home Station None', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_none_3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_all_entities[sensor.my_home_station_none_4-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_none_4', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lightning_strike_count_last_3hr', - 'unique_id': '24432_lightning_strike_count_last_3hr', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.my_home_station_none_4-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'friendly_name': 'My Home Station None', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_none_4', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- # name: test_all_entities[sensor.my_home_station_pressure_barometric-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1011,384 +639,6 @@ 'state': '10.5', }) # --- -# name: test_all_entities[sensor.my_home_station_temperature_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_temperature_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Temperature', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dew_point', - 'unique_id': '24432_dew_point', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.my_home_station_temperature_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'device_class': 'temperature', - 'friendly_name': 'My Home Station Temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_temperature_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '-10.4', - }) -# --- -# name: test_all_entities[sensor.my_home_station_temperature_3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_temperature_3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Temperature', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'feels_like', - 'unique_id': '24432_feels_like', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.my_home_station_temperature_3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'device_class': 'temperature', - 'friendly_name': 'My Home Station Temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_temperature_3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10.5', - }) -# --- -# name: test_all_entities[sensor.my_home_station_temperature_4-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_temperature_4', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Temperature', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'heat_index', - 'unique_id': '24432_heat_index', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.my_home_station_temperature_4-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'device_class': 'temperature', - 'friendly_name': 'My Home Station Temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_temperature_4', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10.5', - }) -# --- -# name: test_all_entities[sensor.my_home_station_temperature_5-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_temperature_5', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Temperature', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'wind_chill', - 'unique_id': '24432_wind_chill', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.my_home_station_temperature_5-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'device_class': 'temperature', - 'friendly_name': 'My Home Station Temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_temperature_5', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10.5', - }) -# --- -# name: test_all_entities[sensor.my_home_station_temperature_6-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_temperature_6', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Temperature', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'wet_bulb_temperature', - 'unique_id': '24432_wet_bulb_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.my_home_station_temperature_6-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'device_class': 'temperature', - 'friendly_name': 'My Home Station Temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_temperature_6', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.1', - }) -# --- -# name: test_all_entities[sensor.my_home_station_temperature_7-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_temperature_7', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Temperature', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'wet_bulb_globe_temperature', - 'unique_id': '24432_wet_bulb_globe_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.my_home_station_temperature_7-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'device_class': 'temperature', - 'friendly_name': 'My Home Station Temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_temperature_7', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4.6', - }) -# --- -# name: test_all_entities[sensor.my_home_station_timestamp-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_timestamp', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Timestamp', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lightning_strike_last_epoch', - 'unique_id': '24432_lightning_strike_last_epoch', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.my_home_station_timestamp-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'device_class': 'timestamp', - 'friendly_name': 'My Home Station Timestamp', - }), - 'context': , - 'entity_id': 'sensor.my_home_station_timestamp', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-02-07T23:01:15+00:00', - }) -# --- # name: test_all_entities[sensor.my_home_station_wet_bulb_globe_temperature-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/weatherkit/snapshots/test_weather.ambr b/tests/components/weatherkit/snapshots/test_weather.ambr index 1fbe5389e98..f6fa2f1514b 100644 --- a/tests/components/weatherkit/snapshots/test_weather.ambr +++ b/tests/components/weatherkit/snapshots/test_weather.ambr @@ -1,294 +1,4 @@ # serializer version: 1 -# name: test_daily_forecast - dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2023-09-08T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'temperature': 28.6, - 'templow': 21.2, - 'uv_index': 6, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-09T15:00:00Z', - 'precipitation': 3.6, - 'precipitation_probability': 45.0, - 'temperature': 30.6, - 'templow': 21.0, - 'uv_index': 6, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2023-09-10T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'temperature': 30.4, - 'templow': 23.1, - 'uv_index': 6, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-11T15:00:00Z', - 'precipitation': 0.7, - 'precipitation_probability': 47.0, - 'temperature': 30.4, - 'templow': 23.1, - 'uv_index': 5, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-12T15:00:00Z', - 'precipitation': 7.7, - 'precipitation_probability': 37.0, - 'temperature': 30.4, - 'templow': 22.1, - 'uv_index': 6, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-13T15:00:00Z', - 'precipitation': 0.6, - 'precipitation_probability': 45.0, - 'temperature': 31.0, - 'templow': 22.6, - 'uv_index': 6, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 52.0, - 'temperature': 31.5, - 'templow': 22.4, - 'uv_index': 7, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2023-09-15T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'temperature': 31.8, - 'templow': 23.3, - 'uv_index': 8, - }), - dict({ - 'condition': 'lightning', - 'datetime': '2023-09-16T15:00:00Z', - 'precipitation': 5.3, - 'precipitation_probability': 35.0, - 'temperature': 30.7, - 'templow': 23.2, - 'uv_index': 8, - }), - dict({ - 'condition': 'lightning', - 'datetime': '2023-09-17T15:00:00Z', - 'precipitation': 2.1, - 'precipitation_probability': 49.0, - 'temperature': 28.1, - 'templow': 22.5, - 'uv_index': 6, - }), - ]), - }) -# --- -# name: test_daily_forecast[forecast] - dict({ - 'weather.home': dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2023-09-08T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'temperature': 28.6, - 'templow': 21.2, - 'uv_index': 6, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-09T15:00:00Z', - 'precipitation': 3.6, - 'precipitation_probability': 45.0, - 'temperature': 30.6, - 'templow': 21.0, - 'uv_index': 6, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2023-09-10T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'temperature': 30.4, - 'templow': 23.1, - 'uv_index': 6, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-11T15:00:00Z', - 'precipitation': 0.7, - 'precipitation_probability': 47.0, - 'temperature': 30.4, - 'templow': 23.1, - 'uv_index': 5, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-12T15:00:00Z', - 'precipitation': 7.7, - 'precipitation_probability': 37.0, - 'temperature': 30.4, - 'templow': 22.1, - 'uv_index': 6, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-13T15:00:00Z', - 'precipitation': 0.6, - 'precipitation_probability': 45.0, - 'temperature': 31.0, - 'templow': 22.6, - 'uv_index': 6, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 52.0, - 'temperature': 31.5, - 'templow': 22.4, - 'uv_index': 7, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2023-09-15T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'temperature': 31.8, - 'templow': 23.3, - 'uv_index': 8, - }), - dict({ - 'condition': 'lightning', - 'datetime': '2023-09-16T15:00:00Z', - 'precipitation': 5.3, - 'precipitation_probability': 35.0, - 'temperature': 30.7, - 'templow': 23.2, - 'uv_index': 8, - }), - dict({ - 'condition': 'lightning', - 'datetime': '2023-09-17T15:00:00Z', - 'precipitation': 2.1, - 'precipitation_probability': 49.0, - 'temperature': 28.1, - 'templow': 22.5, - 'uv_index': 6, - }), - ]), - }), - }) -# --- -# name: test_daily_forecast[get_forecast] - dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2023-09-08T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'temperature': 28.6, - 'templow': 21.2, - 'uv_index': 6, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-09T15:00:00Z', - 'precipitation': 3.6, - 'precipitation_probability': 45.0, - 'temperature': 30.6, - 'templow': 21.0, - 'uv_index': 6, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2023-09-10T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'temperature': 30.4, - 'templow': 23.1, - 'uv_index': 6, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-11T15:00:00Z', - 'precipitation': 0.7, - 'precipitation_probability': 47.0, - 'temperature': 30.4, - 'templow': 23.1, - 'uv_index': 5, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-12T15:00:00Z', - 'precipitation': 7.7, - 'precipitation_probability': 37.0, - 'temperature': 30.4, - 'templow': 22.1, - 'uv_index': 6, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-13T15:00:00Z', - 'precipitation': 0.6, - 'precipitation_probability': 45.0, - 'temperature': 31.0, - 'templow': 22.6, - 'uv_index': 6, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 52.0, - 'temperature': 31.5, - 'templow': 22.4, - 'uv_index': 7, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2023-09-15T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'temperature': 31.8, - 'templow': 23.3, - 'uv_index': 8, - }), - dict({ - 'condition': 'lightning', - 'datetime': '2023-09-16T15:00:00Z', - 'precipitation': 5.3, - 'precipitation_probability': 35.0, - 'temperature': 30.7, - 'templow': 23.2, - 'uv_index': 8, - }), - dict({ - 'condition': 'lightning', - 'datetime': '2023-09-17T15:00:00Z', - 'precipitation': 2.1, - 'precipitation_probability': 49.0, - 'temperature': 28.1, - 'templow': 22.5, - 'uv_index': 6, - }), - ]), - }) -# --- # name: test_daily_forecast[get_forecasts] dict({ 'weather.home': dict({ @@ -387,11978 +97,6 @@ }), }) # --- -# name: test_hourly_forecast - dict({ - 'forecast': list([ - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 79.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T14:00:00Z', - 'dew_point': 21.5, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.24, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 264, - 'wind_gust_speed': 13.44, - 'wind_speed': 6.62, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 80.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T15:00:00Z', - 'dew_point': 21.4, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.24, - 'temperature': 22.4, - 'uv_index': 0, - 'wind_bearing': 261, - 'wind_gust_speed': 11.91, - 'wind_speed': 6.64, - }), - dict({ - 'apparent_temperature': 23.8, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T16:00:00Z', - 'dew_point': 21.1, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.12, - 'temperature': 22.0, - 'uv_index': 0, - 'wind_bearing': 252, - 'wind_gust_speed': 11.15, - 'wind_speed': 6.14, - }), - dict({ - 'apparent_temperature': 23.5, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T17:00:00Z', - 'dew_point': 20.9, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.03, - 'temperature': 21.7, - 'uv_index': 0, - 'wind_bearing': 248, - 'wind_gust_speed': 11.57, - 'wind_speed': 5.95, - }), - dict({ - 'apparent_temperature': 23.3, - 'cloud_coverage': 85.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T18:00:00Z', - 'dew_point': 20.8, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.05, - 'temperature': 21.6, - 'uv_index': 0, - 'wind_bearing': 237, - 'wind_gust_speed': 12.42, - 'wind_speed': 5.86, - }), - dict({ - 'apparent_temperature': 23.0, - 'cloud_coverage': 75.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T19:00:00Z', - 'dew_point': 20.6, - 'humidity': 96, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.03, - 'temperature': 21.3, - 'uv_index': 0, - 'wind_bearing': 224, - 'wind_gust_speed': 11.3, - 'wind_speed': 5.34, - }), - dict({ - 'apparent_temperature': 22.8, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T20:00:00Z', - 'dew_point': 20.4, - 'humidity': 96, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.31, - 'temperature': 21.2, - 'uv_index': 0, - 'wind_bearing': 221, - 'wind_gust_speed': 10.57, - 'wind_speed': 5.13, - }), - dict({ - 'apparent_temperature': 23.1, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-08T21:00:00Z', - 'dew_point': 20.5, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.55, - 'temperature': 21.4, - 'uv_index': 0, - 'wind_bearing': 237, - 'wind_gust_speed': 10.63, - 'wind_speed': 5.7, - }), - dict({ - 'apparent_temperature': 24.9, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-08T22:00:00Z', - 'dew_point': 21.3, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.79, - 'temperature': 22.8, - 'uv_index': 1, - 'wind_bearing': 258, - 'wind_gust_speed': 10.47, - 'wind_speed': 5.22, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T23:00:00Z', - 'dew_point': 21.3, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.95, - 'temperature': 24.0, - 'uv_index': 2, - 'wind_bearing': 282, - 'wind_gust_speed': 12.74, - 'wind_speed': 5.71, - }), - dict({ - 'apparent_temperature': 27.4, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T00:00:00Z', - 'dew_point': 21.5, - 'humidity': 80, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.35, - 'temperature': 25.1, - 'uv_index': 3, - 'wind_bearing': 294, - 'wind_gust_speed': 13.87, - 'wind_speed': 6.53, - }), - dict({ - 'apparent_temperature': 29.0, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T01:00:00Z', - 'dew_point': 21.8, - 'humidity': 75, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.48, - 'temperature': 26.5, - 'uv_index': 5, - 'wind_bearing': 308, - 'wind_gust_speed': 16.04, - 'wind_speed': 6.54, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T02:00:00Z', - 'dew_point': 22.0, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.23, - 'temperature': 27.6, - 'uv_index': 6, - 'wind_bearing': 314, - 'wind_gust_speed': 18.1, - 'wind_speed': 7.32, - }), - dict({ - 'apparent_temperature': 31.1, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T03:00:00Z', - 'dew_point': 22.1, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.86, - 'temperature': 28.3, - 'uv_index': 6, - 'wind_bearing': 317, - 'wind_gust_speed': 20.77, - 'wind_speed': 9.1, - }), - dict({ - 'apparent_temperature': 31.5, - 'cloud_coverage': 69.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T04:00:00Z', - 'dew_point': 22.1, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.65, - 'temperature': 28.6, - 'uv_index': 6, - 'wind_bearing': 311, - 'wind_gust_speed': 21.27, - 'wind_speed': 10.21, - }), - dict({ - 'apparent_temperature': 31.3, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T05:00:00Z', - 'dew_point': 22.1, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.48, - 'temperature': 28.4, - 'uv_index': 5, - 'wind_bearing': 317, - 'wind_gust_speed': 19.62, - 'wind_speed': 10.53, - }), - dict({ - 'apparent_temperature': 30.8, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T06:00:00Z', - 'dew_point': 22.2, - 'humidity': 71, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.54, - 'temperature': 27.9, - 'uv_index': 3, - 'wind_bearing': 335, - 'wind_gust_speed': 18.98, - 'wind_speed': 8.63, - }), - dict({ - 'apparent_temperature': 29.9, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T07:00:00Z', - 'dew_point': 22.2, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.76, - 'temperature': 27.1, - 'uv_index': 2, - 'wind_bearing': 338, - 'wind_gust_speed': 17.04, - 'wind_speed': 7.75, - }), - dict({ - 'apparent_temperature': 29.1, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T08:00:00Z', - 'dew_point': 22.1, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.05, - 'temperature': 26.4, - 'uv_index': 0, - 'wind_bearing': 342, - 'wind_gust_speed': 14.75, - 'wind_speed': 6.26, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T09:00:00Z', - 'dew_point': 22.0, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.38, - 'temperature': 25.4, - 'uv_index': 0, - 'wind_bearing': 344, - 'wind_gust_speed': 10.43, - 'wind_speed': 5.2, - }), - dict({ - 'apparent_temperature': 26.9, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T10:00:00Z', - 'dew_point': 21.9, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.73, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 339, - 'wind_gust_speed': 6.95, - 'wind_speed': 3.59, - }), - dict({ - 'apparent_temperature': 26.4, - 'cloud_coverage': 51.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T11:00:00Z', - 'dew_point': 21.8, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.3, - 'temperature': 24.1, - 'uv_index': 0, - 'wind_bearing': 326, - 'wind_gust_speed': 5.27, - 'wind_speed': 2.1, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 53.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T12:00:00Z', - 'dew_point': 21.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.52, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 257, - 'wind_gust_speed': 5.48, - 'wind_speed': 0.93, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T13:00:00Z', - 'dew_point': 21.8, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.53, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 188, - 'wind_gust_speed': 4.44, - 'wind_speed': 1.79, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T14:00:00Z', - 'dew_point': 21.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.46, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 183, - 'wind_gust_speed': 4.49, - 'wind_speed': 2.19, - }), - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 45.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T15:00:00Z', - 'dew_point': 21.4, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.21, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 179, - 'wind_gust_speed': 5.32, - 'wind_speed': 2.65, - }), - dict({ - 'apparent_temperature': 24.0, - 'cloud_coverage': 42.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T16:00:00Z', - 'dew_point': 21.1, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.09, - 'temperature': 22.1, - 'uv_index': 0, - 'wind_bearing': 173, - 'wind_gust_speed': 5.81, - 'wind_speed': 3.2, - }), - dict({ - 'apparent_temperature': 23.7, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T17:00:00Z', - 'dew_point': 20.9, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.88, - 'temperature': 21.9, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 5.53, - 'wind_speed': 3.16, - }), - dict({ - 'apparent_temperature': 23.3, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T18:00:00Z', - 'dew_point': 20.7, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.94, - 'temperature': 21.6, - 'uv_index': 0, - 'wind_bearing': 153, - 'wind_gust_speed': 6.09, - 'wind_speed': 3.36, - }), - dict({ - 'apparent_temperature': 23.1, - 'cloud_coverage': 51.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T19:00:00Z', - 'dew_point': 20.5, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.96, - 'temperature': 21.4, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 6.83, - 'wind_speed': 3.71, - }), - dict({ - 'apparent_temperature': 22.5, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T20:00:00Z', - 'dew_point': 20.0, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 21.0, - 'uv_index': 0, - 'wind_bearing': 156, - 'wind_gust_speed': 7.98, - 'wind_speed': 4.27, - }), - dict({ - 'apparent_temperature': 22.8, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T21:00:00Z', - 'dew_point': 20.2, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.61, - 'temperature': 21.2, - 'uv_index': 0, - 'wind_bearing': 156, - 'wind_gust_speed': 8.4, - 'wind_speed': 4.69, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T22:00:00Z', - 'dew_point': 21.3, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.87, - 'temperature': 23.1, - 'uv_index': 1, - 'wind_bearing': 150, - 'wind_gust_speed': 7.66, - 'wind_speed': 4.33, - }), - dict({ - 'apparent_temperature': 28.3, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T23:00:00Z', - 'dew_point': 22.3, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 25.6, - 'uv_index': 2, - 'wind_bearing': 123, - 'wind_gust_speed': 9.63, - 'wind_speed': 3.91, - }), - dict({ - 'apparent_temperature': 30.4, - 'cloud_coverage': 63.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T00:00:00Z', - 'dew_point': 22.6, - 'humidity': 75, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 27.4, - 'uv_index': 4, - 'wind_bearing': 105, - 'wind_gust_speed': 12.59, - 'wind_speed': 3.96, - }), - dict({ - 'apparent_temperature': 32.2, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T01:00:00Z', - 'dew_point': 22.9, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.79, - 'temperature': 28.9, - 'uv_index': 5, - 'wind_bearing': 99, - 'wind_gust_speed': 14.17, - 'wind_speed': 4.06, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 62.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-10T02:00:00Z', - 'dew_point': 22.9, - 'humidity': 66, - 'precipitation': 0.3, - 'precipitation_probability': 7.000000000000001, - 'pressure': 1011.29, - 'temperature': 29.9, - 'uv_index': 6, - 'wind_bearing': 93, - 'wind_gust_speed': 17.75, - 'wind_speed': 4.87, - }), - dict({ - 'apparent_temperature': 34.3, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T03:00:00Z', - 'dew_point': 23.1, - 'humidity': 64, - 'precipitation': 0.3, - 'precipitation_probability': 11.0, - 'pressure': 1010.78, - 'temperature': 30.6, - 'uv_index': 6, - 'wind_bearing': 78, - 'wind_gust_speed': 17.43, - 'wind_speed': 4.54, - }), - dict({ - 'apparent_temperature': 34.0, - 'cloud_coverage': 74.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T04:00:00Z', - 'dew_point': 23.2, - 'humidity': 66, - 'precipitation': 0.4, - 'precipitation_probability': 15.0, - 'pressure': 1010.37, - 'temperature': 30.3, - 'uv_index': 5, - 'wind_bearing': 60, - 'wind_gust_speed': 15.24, - 'wind_speed': 4.9, - }), - dict({ - 'apparent_temperature': 33.7, - 'cloud_coverage': 79.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T05:00:00Z', - 'dew_point': 23.3, - 'humidity': 67, - 'precipitation': 0.7, - 'precipitation_probability': 17.0, - 'pressure': 1010.09, - 'temperature': 30.0, - 'uv_index': 4, - 'wind_bearing': 80, - 'wind_gust_speed': 13.53, - 'wind_speed': 5.98, - }), - dict({ - 'apparent_temperature': 33.2, - 'cloud_coverage': 80.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T06:00:00Z', - 'dew_point': 23.4, - 'humidity': 70, - 'precipitation': 1.0, - 'precipitation_probability': 17.0, - 'pressure': 1010.0, - 'temperature': 29.5, - 'uv_index': 3, - 'wind_bearing': 83, - 'wind_gust_speed': 12.55, - 'wind_speed': 6.84, - }), - dict({ - 'apparent_temperature': 32.3, - 'cloud_coverage': 88.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T07:00:00Z', - 'dew_point': 23.4, - 'humidity': 73, - 'precipitation': 0.4, - 'precipitation_probability': 16.0, - 'pressure': 1010.27, - 'temperature': 28.7, - 'uv_index': 2, - 'wind_bearing': 90, - 'wind_gust_speed': 10.16, - 'wind_speed': 6.07, - }), - dict({ - 'apparent_temperature': 30.9, - 'cloud_coverage': 92.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T08:00:00Z', - 'dew_point': 23.2, - 'humidity': 77, - 'precipitation': 0.5, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1010.71, - 'temperature': 27.6, - 'uv_index': 0, - 'wind_bearing': 101, - 'wind_gust_speed': 8.18, - 'wind_speed': 4.82, - }), - dict({ - 'apparent_temperature': 29.7, - 'cloud_coverage': 93.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T09:00:00Z', - 'dew_point': 23.2, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.9, - 'temperature': 26.5, - 'uv_index': 0, - 'wind_bearing': 128, - 'wind_gust_speed': 8.89, - 'wind_speed': 4.95, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T10:00:00Z', - 'dew_point': 23.0, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.12, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 134, - 'wind_gust_speed': 10.03, - 'wind_speed': 4.52, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 87.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T11:00:00Z', - 'dew_point': 22.8, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.43, - 'temperature': 25.1, - 'uv_index': 0, - 'wind_bearing': 137, - 'wind_gust_speed': 12.4, - 'wind_speed': 5.41, - }), - dict({ - 'apparent_temperature': 27.4, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T12:00:00Z', - 'dew_point': 22.5, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.58, - 'temperature': 24.8, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 16.36, - 'wind_speed': 6.31, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T13:00:00Z', - 'dew_point': 22.4, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.55, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 19.66, - 'wind_speed': 7.23, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T14:00:00Z', - 'dew_point': 22.2, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.4, - 'temperature': 24.3, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 21.15, - 'wind_speed': 7.46, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T15:00:00Z', - 'dew_point': 22.0, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.23, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 22.26, - 'wind_speed': 7.84, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T16:00:00Z', - 'dew_point': 21.8, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.01, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 23.53, - 'wind_speed': 8.63, - }), - dict({ - 'apparent_temperature': 25.6, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-10T17:00:00Z', - 'dew_point': 21.6, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.78, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 22.83, - 'wind_speed': 8.61, - }), - dict({ - 'apparent_temperature': 25.4, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T18:00:00Z', - 'dew_point': 21.5, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.69, - 'temperature': 23.3, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 23.7, - 'wind_speed': 8.7, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T19:00:00Z', - 'dew_point': 21.4, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.77, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 140, - 'wind_gust_speed': 24.24, - 'wind_speed': 8.74, - }), - dict({ - 'apparent_temperature': 25.5, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T20:00:00Z', - 'dew_point': 21.6, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.89, - 'temperature': 23.3, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 23.99, - 'wind_speed': 8.81, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T21:00:00Z', - 'dew_point': 21.6, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.1, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 25.55, - 'wind_speed': 9.05, - }), - dict({ - 'apparent_temperature': 27.0, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T22:00:00Z', - 'dew_point': 21.8, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 24.6, - 'uv_index': 1, - 'wind_bearing': 140, - 'wind_gust_speed': 29.08, - 'wind_speed': 10.37, - }), - dict({ - 'apparent_temperature': 28.4, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T23:00:00Z', - 'dew_point': 21.9, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.36, - 'temperature': 25.9, - 'uv_index': 2, - 'wind_bearing': 140, - 'wind_gust_speed': 34.13, - 'wind_speed': 12.56, - }), - dict({ - 'apparent_temperature': 30.1, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T00:00:00Z', - 'dew_point': 22.3, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.39, - 'temperature': 27.2, - 'uv_index': 3, - 'wind_bearing': 140, - 'wind_gust_speed': 38.2, - 'wind_speed': 15.65, - }), - dict({ - 'apparent_temperature': 31.4, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-11T01:00:00Z', - 'dew_point': 22.3, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.31, - 'temperature': 28.4, - 'uv_index': 5, - 'wind_bearing': 141, - 'wind_gust_speed': 37.55, - 'wind_speed': 15.78, - }), - dict({ - 'apparent_temperature': 32.7, - 'cloud_coverage': 63.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T02:00:00Z', - 'dew_point': 22.4, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.98, - 'temperature': 29.6, - 'uv_index': 6, - 'wind_bearing': 143, - 'wind_gust_speed': 35.86, - 'wind_speed': 15.41, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T03:00:00Z', - 'dew_point': 22.5, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.61, - 'temperature': 30.3, - 'uv_index': 6, - 'wind_bearing': 141, - 'wind_gust_speed': 35.88, - 'wind_speed': 15.51, - }), - dict({ - 'apparent_temperature': 33.8, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T04:00:00Z', - 'dew_point': 22.6, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.36, - 'temperature': 30.4, - 'uv_index': 5, - 'wind_bearing': 140, - 'wind_gust_speed': 35.99, - 'wind_speed': 15.75, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T05:00:00Z', - 'dew_point': 22.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.11, - 'temperature': 30.1, - 'uv_index': 4, - 'wind_bearing': 137, - 'wind_gust_speed': 33.61, - 'wind_speed': 15.36, - }), - dict({ - 'apparent_temperature': 33.2, - 'cloud_coverage': 77.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T06:00:00Z', - 'dew_point': 22.5, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.98, - 'temperature': 30.0, - 'uv_index': 3, - 'wind_bearing': 138, - 'wind_gust_speed': 32.61, - 'wind_speed': 14.98, - }), - dict({ - 'apparent_temperature': 32.3, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T07:00:00Z', - 'dew_point': 22.2, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.13, - 'temperature': 29.2, - 'uv_index': 2, - 'wind_bearing': 138, - 'wind_gust_speed': 28.1, - 'wind_speed': 13.88, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-11T08:00:00Z', - 'dew_point': 22.1, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.48, - 'temperature': 28.3, - 'uv_index': 0, - 'wind_bearing': 137, - 'wind_gust_speed': 24.22, - 'wind_speed': 13.02, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 55.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-11T09:00:00Z', - 'dew_point': 21.9, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.81, - 'temperature': 27.1, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 22.5, - 'wind_speed': 11.94, - }), - dict({ - 'apparent_temperature': 28.8, - 'cloud_coverage': 63.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T10:00:00Z', - 'dew_point': 21.7, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 137, - 'wind_gust_speed': 21.47, - 'wind_speed': 11.25, - }), - dict({ - 'apparent_temperature': 28.1, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T11:00:00Z', - 'dew_point': 21.8, - 'humidity': 80, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.77, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 22.71, - 'wind_speed': 12.39, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T12:00:00Z', - 'dew_point': 21.8, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.97, - 'temperature': 25.2, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 23.67, - 'wind_speed': 12.83, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T13:00:00Z', - 'dew_point': 21.7, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.97, - 'temperature': 24.7, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 23.34, - 'wind_speed': 12.62, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T14:00:00Z', - 'dew_point': 21.7, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.83, - 'temperature': 24.4, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 22.9, - 'wind_speed': 12.07, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 90.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T15:00:00Z', - 'dew_point': 21.6, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.74, - 'temperature': 24.1, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 22.01, - 'wind_speed': 11.19, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T16:00:00Z', - 'dew_point': 21.6, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.56, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 21.29, - 'wind_speed': 10.97, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 85.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T17:00:00Z', - 'dew_point': 21.5, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.35, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 20.52, - 'wind_speed': 10.5, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T18:00:00Z', - 'dew_point': 21.4, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.3, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 20.04, - 'wind_speed': 10.51, - }), - dict({ - 'apparent_temperature': 25.4, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T19:00:00Z', - 'dew_point': 21.3, - 'humidity': 88, - 'precipitation': 0.3, - 'precipitation_probability': 12.0, - 'pressure': 1011.37, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 18.07, - 'wind_speed': 10.13, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T20:00:00Z', - 'dew_point': 21.2, - 'humidity': 89, - 'precipitation': 0.2, - 'precipitation_probability': 13.0, - 'pressure': 1011.53, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 16.86, - 'wind_speed': 10.34, - }), - dict({ - 'apparent_temperature': 25.5, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T21:00:00Z', - 'dew_point': 21.4, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.71, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 16.66, - 'wind_speed': 10.68, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T22:00:00Z', - 'dew_point': 21.9, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.94, - 'temperature': 24.4, - 'uv_index': 1, - 'wind_bearing': 137, - 'wind_gust_speed': 17.21, - 'wind_speed': 10.61, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T23:00:00Z', - 'dew_point': 22.3, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.05, - 'temperature': 25.6, - 'uv_index': 2, - 'wind_bearing': 138, - 'wind_gust_speed': 19.23, - 'wind_speed': 11.13, - }), - dict({ - 'apparent_temperature': 29.5, - 'cloud_coverage': 79.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T00:00:00Z', - 'dew_point': 22.6, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.07, - 'temperature': 26.6, - 'uv_index': 3, - 'wind_bearing': 140, - 'wind_gust_speed': 20.61, - 'wind_speed': 11.13, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 82.0, - 'condition': 'rainy', - 'datetime': '2023-09-12T01:00:00Z', - 'dew_point': 23.1, - 'humidity': 75, - 'precipitation': 0.2, - 'precipitation_probability': 16.0, - 'pressure': 1011.89, - 'temperature': 27.9, - 'uv_index': 4, - 'wind_bearing': 141, - 'wind_gust_speed': 23.35, - 'wind_speed': 11.98, - }), - dict({ - 'apparent_temperature': 32.6, - 'cloud_coverage': 85.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T02:00:00Z', - 'dew_point': 23.5, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.53, - 'temperature': 29.0, - 'uv_index': 5, - 'wind_bearing': 143, - 'wind_gust_speed': 26.45, - 'wind_speed': 13.01, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T03:00:00Z', - 'dew_point': 23.5, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.15, - 'temperature': 29.8, - 'uv_index': 5, - 'wind_bearing': 141, - 'wind_gust_speed': 28.95, - 'wind_speed': 13.9, - }), - dict({ - 'apparent_temperature': 34.0, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T04:00:00Z', - 'dew_point': 23.4, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.79, - 'temperature': 30.2, - 'uv_index': 5, - 'wind_bearing': 141, - 'wind_gust_speed': 27.9, - 'wind_speed': 13.95, - }), - dict({ - 'apparent_temperature': 34.0, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T05:00:00Z', - 'dew_point': 23.1, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.43, - 'temperature': 30.4, - 'uv_index': 4, - 'wind_bearing': 140, - 'wind_gust_speed': 26.53, - 'wind_speed': 13.78, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T06:00:00Z', - 'dew_point': 22.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.21, - 'temperature': 30.1, - 'uv_index': 3, - 'wind_bearing': 138, - 'wind_gust_speed': 24.56, - 'wind_speed': 13.74, - }), - dict({ - 'apparent_temperature': 32.0, - 'cloud_coverage': 53.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T07:00:00Z', - 'dew_point': 22.1, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.26, - 'temperature': 29.1, - 'uv_index': 2, - 'wind_bearing': 138, - 'wind_gust_speed': 22.78, - 'wind_speed': 13.21, - }), - dict({ - 'apparent_temperature': 30.9, - 'cloud_coverage': 48.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T08:00:00Z', - 'dew_point': 21.9, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.51, - 'temperature': 28.1, - 'uv_index': 0, - 'wind_bearing': 140, - 'wind_gust_speed': 19.92, - 'wind_speed': 12.0, - }), - dict({ - 'apparent_temperature': 29.7, - 'cloud_coverage': 50.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T09:00:00Z', - 'dew_point': 21.7, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.8, - 'temperature': 27.2, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 17.65, - 'wind_speed': 10.97, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T10:00:00Z', - 'dew_point': 21.4, - 'humidity': 75, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.23, - 'temperature': 26.2, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 15.87, - 'wind_speed': 10.23, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T11:00:00Z', - 'dew_point': 21.3, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1011.79, - 'temperature': 25.4, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 13.9, - 'wind_speed': 9.39, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T12:00:00Z', - 'dew_point': 21.2, - 'humidity': 81, - 'precipitation': 0.0, - 'precipitation_probability': 47.0, - 'pressure': 1012.12, - 'temperature': 24.7, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 13.32, - 'wind_speed': 8.9, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T13:00:00Z', - 'dew_point': 21.2, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1012.18, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 13.18, - 'wind_speed': 8.59, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T14:00:00Z', - 'dew_point': 21.3, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.09, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 13.84, - 'wind_speed': 8.87, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T15:00:00Z', - 'dew_point': 21.3, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.99, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 15.08, - 'wind_speed': 8.93, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T16:00:00Z', - 'dew_point': 21.0, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 23.2, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 16.74, - 'wind_speed': 9.49, - }), - dict({ - 'apparent_temperature': 24.7, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T17:00:00Z', - 'dew_point': 20.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.75, - 'temperature': 22.9, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 17.45, - 'wind_speed': 9.12, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T18:00:00Z', - 'dew_point': 20.7, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.77, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 17.04, - 'wind_speed': 8.68, - }), - dict({ - 'apparent_temperature': 24.1, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T19:00:00Z', - 'dew_point': 20.6, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 22.4, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 16.8, - 'wind_speed': 8.61, - }), - dict({ - 'apparent_temperature': 23.9, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T20:00:00Z', - 'dew_point': 20.5, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.23, - 'temperature': 22.1, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 15.35, - 'wind_speed': 8.36, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 75.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T21:00:00Z', - 'dew_point': 20.6, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.49, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 155, - 'wind_gust_speed': 14.09, - 'wind_speed': 7.77, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T22:00:00Z', - 'dew_point': 21.0, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.72, - 'temperature': 23.8, - 'uv_index': 1, - 'wind_bearing': 152, - 'wind_gust_speed': 14.04, - 'wind_speed': 7.25, - }), - dict({ - 'apparent_temperature': 27.8, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T23:00:00Z', - 'dew_point': 21.4, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.85, - 'temperature': 25.5, - 'uv_index': 2, - 'wind_bearing': 149, - 'wind_gust_speed': 15.31, - 'wind_speed': 7.14, - }), - dict({ - 'apparent_temperature': 29.7, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-13T00:00:00Z', - 'dew_point': 21.8, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.89, - 'temperature': 27.1, - 'uv_index': 4, - 'wind_bearing': 141, - 'wind_gust_speed': 16.42, - 'wind_speed': 6.89, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T01:00:00Z', - 'dew_point': 22.0, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.65, - 'temperature': 28.4, - 'uv_index': 5, - 'wind_bearing': 137, - 'wind_gust_speed': 18.64, - 'wind_speed': 6.65, - }), - dict({ - 'apparent_temperature': 32.3, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T02:00:00Z', - 'dew_point': 21.9, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.26, - 'temperature': 29.4, - 'uv_index': 5, - 'wind_bearing': 128, - 'wind_gust_speed': 21.69, - 'wind_speed': 7.12, - }), - dict({ - 'apparent_temperature': 33.0, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T03:00:00Z', - 'dew_point': 21.9, - 'humidity': 62, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.88, - 'temperature': 30.1, - 'uv_index': 6, - 'wind_bearing': 111, - 'wind_gust_speed': 23.41, - 'wind_speed': 7.33, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 72.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T04:00:00Z', - 'dew_point': 22.0, - 'humidity': 61, - 'precipitation': 0.9, - 'precipitation_probability': 12.0, - 'pressure': 1011.55, - 'temperature': 30.4, - 'uv_index': 5, - 'wind_bearing': 56, - 'wind_gust_speed': 23.1, - 'wind_speed': 8.09, - }), - dict({ - 'apparent_temperature': 33.2, - 'cloud_coverage': 72.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T05:00:00Z', - 'dew_point': 21.9, - 'humidity': 61, - 'precipitation': 1.9, - 'precipitation_probability': 12.0, - 'pressure': 1011.29, - 'temperature': 30.2, - 'uv_index': 4, - 'wind_bearing': 20, - 'wind_gust_speed': 21.81, - 'wind_speed': 9.46, - }), - dict({ - 'apparent_temperature': 32.6, - 'cloud_coverage': 74.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T06:00:00Z', - 'dew_point': 21.9, - 'humidity': 63, - 'precipitation': 2.3, - 'precipitation_probability': 11.0, - 'pressure': 1011.17, - 'temperature': 29.7, - 'uv_index': 3, - 'wind_bearing': 20, - 'wind_gust_speed': 19.72, - 'wind_speed': 9.8, - }), - dict({ - 'apparent_temperature': 31.8, - 'cloud_coverage': 69.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T07:00:00Z', - 'dew_point': 22.4, - 'humidity': 68, - 'precipitation': 1.8, - 'precipitation_probability': 10.0, - 'pressure': 1011.32, - 'temperature': 28.8, - 'uv_index': 1, - 'wind_bearing': 18, - 'wind_gust_speed': 17.55, - 'wind_speed': 9.23, - }), - dict({ - 'apparent_temperature': 30.8, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T08:00:00Z', - 'dew_point': 22.9, - 'humidity': 76, - 'precipitation': 0.8, - 'precipitation_probability': 10.0, - 'pressure': 1011.6, - 'temperature': 27.6, - 'uv_index': 0, - 'wind_bearing': 27, - 'wind_gust_speed': 15.08, - 'wind_speed': 8.05, - }), - dict({ - 'apparent_temperature': 29.4, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T09:00:00Z', - 'dew_point': 23.0, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.94, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 32, - 'wind_gust_speed': 12.17, - 'wind_speed': 6.68, - }), - dict({ - 'apparent_temperature': 28.5, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T10:00:00Z', - 'dew_point': 22.9, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.3, - 'temperature': 25.5, - 'uv_index': 0, - 'wind_bearing': 69, - 'wind_gust_speed': 11.64, - 'wind_speed': 6.69, - }), - dict({ - 'apparent_temperature': 27.7, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T11:00:00Z', - 'dew_point': 22.6, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.71, - 'temperature': 25.0, - 'uv_index': 0, - 'wind_bearing': 155, - 'wind_gust_speed': 11.91, - 'wind_speed': 6.23, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T12:00:00Z', - 'dew_point': 22.3, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.96, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 12.47, - 'wind_speed': 5.73, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T13:00:00Z', - 'dew_point': 22.3, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.03, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 13.57, - 'wind_speed': 5.66, - }), - dict({ - 'apparent_temperature': 26.4, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T14:00:00Z', - 'dew_point': 22.2, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.99, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 15.07, - 'wind_speed': 5.83, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T15:00:00Z', - 'dew_point': 22.2, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.95, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 16.06, - 'wind_speed': 5.93, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T16:00:00Z', - 'dew_point': 22.0, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.9, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 153, - 'wind_gust_speed': 16.05, - 'wind_speed': 5.75, - }), - dict({ - 'apparent_temperature': 25.4, - 'cloud_coverage': 90.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T17:00:00Z', - 'dew_point': 21.8, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.85, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 15.52, - 'wind_speed': 5.49, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 92.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T18:00:00Z', - 'dew_point': 21.8, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.87, - 'temperature': 23.0, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 15.01, - 'wind_speed': 5.32, - }), - dict({ - 'apparent_temperature': 25.0, - 'cloud_coverage': 90.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T19:00:00Z', - 'dew_point': 21.7, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.01, - 'temperature': 22.8, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 14.39, - 'wind_speed': 5.33, - }), - dict({ - 'apparent_temperature': 24.8, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T20:00:00Z', - 'dew_point': 21.6, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.22, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 13.79, - 'wind_speed': 5.43, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T21:00:00Z', - 'dew_point': 21.8, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.41, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 14.12, - 'wind_speed': 5.52, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 77.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T22:00:00Z', - 'dew_point': 22.1, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.59, - 'temperature': 24.3, - 'uv_index': 1, - 'wind_bearing': 147, - 'wind_gust_speed': 16.14, - 'wind_speed': 5.58, - }), - dict({ - 'apparent_temperature': 28.4, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T23:00:00Z', - 'dew_point': 22.4, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.74, - 'temperature': 25.7, - 'uv_index': 2, - 'wind_bearing': 146, - 'wind_gust_speed': 19.09, - 'wind_speed': 5.62, - }), - dict({ - 'apparent_temperature': 30.5, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T00:00:00Z', - 'dew_point': 22.9, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.78, - 'temperature': 27.4, - 'uv_index': 4, - 'wind_bearing': 143, - 'wind_gust_speed': 21.6, - 'wind_speed': 5.58, - }), - dict({ - 'apparent_temperature': 32.2, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T01:00:00Z', - 'dew_point': 23.2, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.61, - 'temperature': 28.7, - 'uv_index': 5, - 'wind_bearing': 138, - 'wind_gust_speed': 23.36, - 'wind_speed': 5.34, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T02:00:00Z', - 'dew_point': 23.2, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.32, - 'temperature': 29.9, - 'uv_index': 6, - 'wind_bearing': 111, - 'wind_gust_speed': 24.72, - 'wind_speed': 4.99, - }), - dict({ - 'apparent_temperature': 34.4, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T03:00:00Z', - 'dew_point': 23.3, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.04, - 'temperature': 30.7, - 'uv_index': 6, - 'wind_bearing': 354, - 'wind_gust_speed': 25.23, - 'wind_speed': 4.74, - }), - dict({ - 'apparent_temperature': 34.9, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T04:00:00Z', - 'dew_point': 23.4, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.77, - 'temperature': 31.0, - 'uv_index': 6, - 'wind_bearing': 341, - 'wind_gust_speed': 24.6, - 'wind_speed': 4.79, - }), - dict({ - 'apparent_temperature': 34.5, - 'cloud_coverage': 60.0, - 'condition': 'rainy', - 'datetime': '2023-09-14T05:00:00Z', - 'dew_point': 23.2, - 'humidity': 64, - 'precipitation': 0.2, - 'precipitation_probability': 15.0, - 'pressure': 1012.53, - 'temperature': 30.7, - 'uv_index': 5, - 'wind_bearing': 336, - 'wind_gust_speed': 23.28, - 'wind_speed': 5.07, - }), - dict({ - 'apparent_temperature': 33.8, - 'cloud_coverage': 59.0, - 'condition': 'rainy', - 'datetime': '2023-09-14T06:00:00Z', - 'dew_point': 23.1, - 'humidity': 66, - 'precipitation': 0.2, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1012.49, - 'temperature': 30.2, - 'uv_index': 3, - 'wind_bearing': 336, - 'wind_gust_speed': 22.05, - 'wind_speed': 5.34, - }), - dict({ - 'apparent_temperature': 32.9, - 'cloud_coverage': 53.0, - 'condition': 'rainy', - 'datetime': '2023-09-14T07:00:00Z', - 'dew_point': 23.0, - 'humidity': 68, - 'precipitation': 0.2, - 'precipitation_probability': 40.0, - 'pressure': 1012.73, - 'temperature': 29.5, - 'uv_index': 2, - 'wind_bearing': 339, - 'wind_gust_speed': 21.18, - 'wind_speed': 5.63, - }), - dict({ - 'apparent_temperature': 31.6, - 'cloud_coverage': 43.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T08:00:00Z', - 'dew_point': 22.8, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 45.0, - 'pressure': 1013.16, - 'temperature': 28.4, - 'uv_index': 0, - 'wind_bearing': 342, - 'wind_gust_speed': 20.35, - 'wind_speed': 5.93, - }), - dict({ - 'apparent_temperature': 30.0, - 'cloud_coverage': 35.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T09:00:00Z', - 'dew_point': 22.5, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1013.62, - 'temperature': 27.1, - 'uv_index': 0, - 'wind_bearing': 347, - 'wind_gust_speed': 19.42, - 'wind_speed': 5.95, - }), - dict({ - 'apparent_temperature': 29.0, - 'cloud_coverage': 32.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T10:00:00Z', - 'dew_point': 22.4, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.09, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 348, - 'wind_gust_speed': 18.19, - 'wind_speed': 5.31, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T11:00:00Z', - 'dew_point': 22.4, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.56, - 'temperature': 25.5, - 'uv_index': 0, - 'wind_bearing': 177, - 'wind_gust_speed': 16.79, - 'wind_speed': 4.28, - }), - dict({ - 'apparent_temperature': 27.5, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T12:00:00Z', - 'dew_point': 22.3, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.87, - 'temperature': 24.9, - 'uv_index': 0, - 'wind_bearing': 171, - 'wind_gust_speed': 15.61, - 'wind_speed': 3.72, - }), - dict({ - 'apparent_temperature': 26.6, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T13:00:00Z', - 'dew_point': 22.1, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.91, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 171, - 'wind_gust_speed': 14.7, - 'wind_speed': 4.11, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 32.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T14:00:00Z', - 'dew_point': 21.9, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.8, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 171, - 'wind_gust_speed': 13.81, - 'wind_speed': 4.97, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 34.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T15:00:00Z', - 'dew_point': 21.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.66, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 170, - 'wind_gust_speed': 12.88, - 'wind_speed': 5.57, - }), - dict({ - 'apparent_temperature': 24.8, - 'cloud_coverage': 37.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T16:00:00Z', - 'dew_point': 21.5, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.54, - 'temperature': 22.7, - 'uv_index': 0, - 'wind_bearing': 168, - 'wind_gust_speed': 12.0, - 'wind_speed': 5.62, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 39.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T17:00:00Z', - 'dew_point': 21.3, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.45, - 'temperature': 22.4, - 'uv_index': 0, - 'wind_bearing': 165, - 'wind_gust_speed': 11.43, - 'wind_speed': 5.48, - }), - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T18:00:00Z', - 'dew_point': 21.4, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 44.0, - 'pressure': 1014.45, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 11.42, - 'wind_speed': 5.38, - }), - dict({ - 'apparent_temperature': 25.0, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T19:00:00Z', - 'dew_point': 21.6, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 52.0, - 'pressure': 1014.63, - 'temperature': 22.9, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 12.15, - 'wind_speed': 5.39, - }), - dict({ - 'apparent_temperature': 25.6, - 'cloud_coverage': 38.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T20:00:00Z', - 'dew_point': 21.8, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 51.0, - 'pressure': 1014.91, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 13.54, - 'wind_speed': 5.45, - }), - dict({ - 'apparent_temperature': 26.6, - 'cloud_coverage': 36.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T21:00:00Z', - 'dew_point': 22.0, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 42.0, - 'pressure': 1015.18, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 15.48, - 'wind_speed': 5.62, - }), - dict({ - 'apparent_temperature': 28.5, - 'cloud_coverage': 32.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T22:00:00Z', - 'dew_point': 22.5, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 28.999999999999996, - 'pressure': 1015.4, - 'temperature': 25.7, - 'uv_index': 1, - 'wind_bearing': 158, - 'wind_gust_speed': 17.86, - 'wind_speed': 5.84, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T23:00:00Z', - 'dew_point': 22.9, - 'humidity': 77, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.54, - 'temperature': 27.2, - 'uv_index': 2, - 'wind_bearing': 155, - 'wind_gust_speed': 20.19, - 'wind_speed': 6.09, - }), - dict({ - 'apparent_temperature': 32.1, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-15T00:00:00Z', - 'dew_point': 23.3, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.55, - 'temperature': 28.6, - 'uv_index': 4, - 'wind_bearing': 152, - 'wind_gust_speed': 21.83, - 'wind_speed': 6.42, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 34.0, - 'condition': 'sunny', - 'datetime': '2023-09-15T01:00:00Z', - 'dew_point': 23.5, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.35, - 'temperature': 29.6, - 'uv_index': 6, - 'wind_bearing': 144, - 'wind_gust_speed': 22.56, - 'wind_speed': 6.91, - }), - dict({ - 'apparent_temperature': 34.2, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T02:00:00Z', - 'dew_point': 23.5, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.0, - 'temperature': 30.4, - 'uv_index': 7, - 'wind_bearing': 336, - 'wind_gust_speed': 22.83, - 'wind_speed': 7.47, - }), - dict({ - 'apparent_temperature': 34.9, - 'cloud_coverage': 46.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T03:00:00Z', - 'dew_point': 23.5, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.62, - 'temperature': 30.9, - 'uv_index': 7, - 'wind_bearing': 336, - 'wind_gust_speed': 22.98, - 'wind_speed': 7.95, - }), - dict({ - 'apparent_temperature': 35.4, - 'cloud_coverage': 46.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T04:00:00Z', - 'dew_point': 23.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.25, - 'temperature': 31.3, - 'uv_index': 6, - 'wind_bearing': 341, - 'wind_gust_speed': 23.21, - 'wind_speed': 8.44, - }), - dict({ - 'apparent_temperature': 35.6, - 'cloud_coverage': 44.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T05:00:00Z', - 'dew_point': 23.7, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.95, - 'temperature': 31.5, - 'uv_index': 5, - 'wind_bearing': 344, - 'wind_gust_speed': 23.46, - 'wind_speed': 8.95, - }), - dict({ - 'apparent_temperature': 35.1, - 'cloud_coverage': 42.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T06:00:00Z', - 'dew_point': 23.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.83, - 'temperature': 31.1, - 'uv_index': 3, - 'wind_bearing': 347, - 'wind_gust_speed': 23.64, - 'wind_speed': 9.13, - }), - dict({ - 'apparent_temperature': 34.1, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T07:00:00Z', - 'dew_point': 23.4, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.96, - 'temperature': 30.3, - 'uv_index': 2, - 'wind_bearing': 350, - 'wind_gust_speed': 23.66, - 'wind_speed': 8.78, - }), - dict({ - 'apparent_temperature': 32.4, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T08:00:00Z', - 'dew_point': 23.1, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.25, - 'temperature': 29.0, - 'uv_index': 0, - 'wind_bearing': 356, - 'wind_gust_speed': 23.51, - 'wind_speed': 8.13, - }), - dict({ - 'apparent_temperature': 31.1, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T09:00:00Z', - 'dew_point': 22.9, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.61, - 'temperature': 27.9, - 'uv_index': 0, - 'wind_bearing': 3, - 'wind_gust_speed': 23.21, - 'wind_speed': 7.48, - }), - dict({ - 'apparent_temperature': 30.0, - 'cloud_coverage': 43.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T10:00:00Z', - 'dew_point': 22.8, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.02, - 'temperature': 26.9, - 'uv_index': 0, - 'wind_bearing': 20, - 'wind_gust_speed': 22.68, - 'wind_speed': 6.83, - }), - dict({ - 'apparent_temperature': 29.2, - 'cloud_coverage': 46.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T11:00:00Z', - 'dew_point': 22.8, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.43, - 'temperature': 26.2, - 'uv_index': 0, - 'wind_bearing': 129, - 'wind_gust_speed': 22.04, - 'wind_speed': 6.1, - }), - dict({ - 'apparent_temperature': 28.4, - 'cloud_coverage': 48.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T12:00:00Z', - 'dew_point': 22.7, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.71, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 21.64, - 'wind_speed': 5.6, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T13:00:00Z', - 'dew_point': 23.2, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.52, - 'temperature': 25.2, - 'uv_index': 0, - 'wind_bearing': 164, - 'wind_gust_speed': 16.35, - 'wind_speed': 5.58, - }), - dict({ - 'apparent_temperature': 27.4, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T14:00:00Z', - 'dew_point': 22.9, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.37, - 'temperature': 24.6, - 'uv_index': 0, - 'wind_bearing': 168, - 'wind_gust_speed': 17.11, - 'wind_speed': 5.79, - }), - dict({ - 'apparent_temperature': 26.9, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T15:00:00Z', - 'dew_point': 22.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.21, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 182, - 'wind_gust_speed': 17.32, - 'wind_speed': 5.77, - }), - dict({ - 'apparent_temperature': 26.4, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T16:00:00Z', - 'dew_point': 22.6, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.07, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 201, - 'wind_gust_speed': 16.6, - 'wind_speed': 5.27, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T17:00:00Z', - 'dew_point': 22.5, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.95, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 219, - 'wind_gust_speed': 15.52, - 'wind_speed': 4.62, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T18:00:00Z', - 'dew_point': 22.3, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.88, - 'temperature': 23.3, - 'uv_index': 0, - 'wind_bearing': 216, - 'wind_gust_speed': 14.64, - 'wind_speed': 4.32, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T19:00:00Z', - 'dew_point': 22.4, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.91, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 198, - 'wind_gust_speed': 14.06, - 'wind_speed': 4.73, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T20:00:00Z', - 'dew_point': 22.4, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.99, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 189, - 'wind_gust_speed': 13.7, - 'wind_speed': 5.49, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T21:00:00Z', - 'dew_point': 22.5, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.07, - 'temperature': 24.4, - 'uv_index': 0, - 'wind_bearing': 183, - 'wind_gust_speed': 13.77, - 'wind_speed': 5.95, - }), - dict({ - 'apparent_temperature': 28.3, - 'cloud_coverage': 59.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T22:00:00Z', - 'dew_point': 22.6, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.12, - 'temperature': 25.5, - 'uv_index': 1, - 'wind_bearing': 179, - 'wind_gust_speed': 14.38, - 'wind_speed': 5.77, - }), - dict({ - 'apparent_temperature': 29.9, - 'cloud_coverage': 52.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T23:00:00Z', - 'dew_point': 22.9, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.13, - 'temperature': 26.9, - 'uv_index': 2, - 'wind_bearing': 170, - 'wind_gust_speed': 15.2, - 'wind_speed': 5.27, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 44.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T00:00:00Z', - 'dew_point': 22.9, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.04, - 'temperature': 28.0, - 'uv_index': 4, - 'wind_bearing': 155, - 'wind_gust_speed': 15.85, - 'wind_speed': 4.76, - }), - dict({ - 'apparent_temperature': 32.5, - 'cloud_coverage': 24.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T01:00:00Z', - 'dew_point': 22.6, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.52, - 'temperature': 29.2, - 'uv_index': 6, - 'wind_bearing': 110, - 'wind_gust_speed': 16.27, - 'wind_speed': 6.81, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 16.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T02:00:00Z', - 'dew_point': 22.4, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.01, - 'temperature': 30.2, - 'uv_index': 8, - 'wind_bearing': 30, - 'wind_gust_speed': 16.55, - 'wind_speed': 6.86, - }), - dict({ - 'apparent_temperature': 34.2, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T03:00:00Z', - 'dew_point': 22.0, - 'humidity': 59, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.45, - 'temperature': 31.1, - 'uv_index': 8, - 'wind_bearing': 17, - 'wind_gust_speed': 16.52, - 'wind_speed': 6.8, - }), - dict({ - 'apparent_temperature': 34.7, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T04:00:00Z', - 'dew_point': 21.9, - 'humidity': 57, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.89, - 'temperature': 31.5, - 'uv_index': 8, - 'wind_bearing': 17, - 'wind_gust_speed': 16.08, - 'wind_speed': 6.62, - }), - dict({ - 'apparent_temperature': 34.9, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T05:00:00Z', - 'dew_point': 21.9, - 'humidity': 56, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.39, - 'temperature': 31.8, - 'uv_index': 6, - 'wind_bearing': 20, - 'wind_gust_speed': 15.48, - 'wind_speed': 6.45, - }), - dict({ - 'apparent_temperature': 34.5, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T06:00:00Z', - 'dew_point': 21.7, - 'humidity': 56, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.11, - 'temperature': 31.4, - 'uv_index': 4, - 'wind_bearing': 26, - 'wind_gust_speed': 15.08, - 'wind_speed': 6.43, - }), - dict({ - 'apparent_temperature': 33.6, - 'cloud_coverage': 7.000000000000001, - 'condition': 'sunny', - 'datetime': '2023-09-16T07:00:00Z', - 'dew_point': 21.7, - 'humidity': 59, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.15, - 'temperature': 30.7, - 'uv_index': 2, - 'wind_bearing': 39, - 'wind_gust_speed': 14.88, - 'wind_speed': 6.61, - }), - dict({ - 'apparent_temperature': 32.5, - 'cloud_coverage': 2.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T08:00:00Z', - 'dew_point': 21.9, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.41, - 'temperature': 29.6, - 'uv_index': 0, - 'wind_bearing': 72, - 'wind_gust_speed': 14.82, - 'wind_speed': 6.95, - }), - dict({ - 'apparent_temperature': 31.4, - 'cloud_coverage': 2.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T09:00:00Z', - 'dew_point': 22.1, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.75, - 'temperature': 28.5, - 'uv_index': 0, - 'wind_bearing': 116, - 'wind_gust_speed': 15.13, - 'wind_speed': 7.45, - }), - dict({ - 'apparent_temperature': 30.5, - 'cloud_coverage': 13.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T10:00:00Z', - 'dew_point': 22.3, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.13, - 'temperature': 27.6, - 'uv_index': 0, - 'wind_bearing': 140, - 'wind_gust_speed': 16.09, - 'wind_speed': 8.15, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T11:00:00Z', - 'dew_point': 22.6, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.47, - 'temperature': 26.9, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 17.37, - 'wind_speed': 8.87, - }), - dict({ - 'apparent_temperature': 29.3, - 'cloud_coverage': 45.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T12:00:00Z', - 'dew_point': 22.9, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.6, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 155, - 'wind_gust_speed': 18.29, - 'wind_speed': 9.21, - }), - dict({ - 'apparent_temperature': 28.7, - 'cloud_coverage': 51.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T13:00:00Z', - 'dew_point': 23.0, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.41, - 'temperature': 25.7, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 18.49, - 'wind_speed': 8.96, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 55.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T14:00:00Z', - 'dew_point': 22.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.01, - 'temperature': 25.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 18.47, - 'wind_speed': 8.45, - }), - dict({ - 'apparent_temperature': 27.2, - 'cloud_coverage': 59.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T15:00:00Z', - 'dew_point': 22.7, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.55, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 18.79, - 'wind_speed': 8.1, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T16:00:00Z', - 'dew_point': 22.6, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.1, - 'temperature': 24.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 19.81, - 'wind_speed': 8.15, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T17:00:00Z', - 'dew_point': 22.6, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.68, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 20.96, - 'wind_speed': 8.3, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T18:00:00Z', - 'dew_point': 22.4, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.39, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 21.41, - 'wind_speed': 8.24, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T19:00:00Z', - 'dew_point': 22.5, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 20.42, - 'wind_speed': 7.62, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T20:00:00Z', - 'dew_point': 22.6, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.31, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 18.61, - 'wind_speed': 6.66, - }), - dict({ - 'apparent_temperature': 27.7, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T21:00:00Z', - 'dew_point': 22.6, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.37, - 'temperature': 24.9, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 17.14, - 'wind_speed': 5.86, - }), - dict({ - 'apparent_temperature': 28.9, - 'cloud_coverage': 48.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T22:00:00Z', - 'dew_point': 22.6, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.46, - 'temperature': 26.0, - 'uv_index': 1, - 'wind_bearing': 161, - 'wind_gust_speed': 16.78, - 'wind_speed': 5.5, - }), - dict({ - 'apparent_temperature': 30.6, - 'cloud_coverage': 39.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T23:00:00Z', - 'dew_point': 22.9, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.51, - 'temperature': 27.5, - 'uv_index': 2, - 'wind_bearing': 165, - 'wind_gust_speed': 17.21, - 'wind_speed': 5.56, - }), - dict({ - 'apparent_temperature': 31.7, - 'cloud_coverage': 33.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T00:00:00Z', - 'dew_point': 22.8, - 'humidity': 71, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.39, - 'temperature': 28.5, - 'uv_index': 4, - 'wind_bearing': 174, - 'wind_gust_speed': 17.96, - 'wind_speed': 6.04, - }), - dict({ - 'apparent_temperature': 32.6, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T01:00:00Z', - 'dew_point': 22.7, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.98, - 'temperature': 29.4, - 'uv_index': 6, - 'wind_bearing': 192, - 'wind_gust_speed': 19.15, - 'wind_speed': 7.23, - }), - dict({ - 'apparent_temperature': 33.6, - 'cloud_coverage': 28.999999999999996, - 'condition': 'sunny', - 'datetime': '2023-09-17T02:00:00Z', - 'dew_point': 22.8, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.38, - 'temperature': 30.1, - 'uv_index': 7, - 'wind_bearing': 225, - 'wind_gust_speed': 20.89, - 'wind_speed': 8.9, - }), - dict({ - 'apparent_temperature': 34.1, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T03:00:00Z', - 'dew_point': 22.8, - 'humidity': 63, - 'precipitation': 0.3, - 'precipitation_probability': 9.0, - 'pressure': 1009.75, - 'temperature': 30.7, - 'uv_index': 8, - 'wind_bearing': 264, - 'wind_gust_speed': 22.67, - 'wind_speed': 10.27, - }), - dict({ - 'apparent_temperature': 33.9, - 'cloud_coverage': 37.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T04:00:00Z', - 'dew_point': 22.5, - 'humidity': 62, - 'precipitation': 0.4, - 'precipitation_probability': 10.0, - 'pressure': 1009.18, - 'temperature': 30.5, - 'uv_index': 7, - 'wind_bearing': 293, - 'wind_gust_speed': 23.93, - 'wind_speed': 10.82, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 45.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T05:00:00Z', - 'dew_point': 22.4, - 'humidity': 63, - 'precipitation': 0.6, - 'precipitation_probability': 12.0, - 'pressure': 1008.71, - 'temperature': 30.1, - 'uv_index': 5, - 'wind_bearing': 308, - 'wind_gust_speed': 24.39, - 'wind_speed': 10.72, - }), - dict({ - 'apparent_temperature': 32.7, - 'cloud_coverage': 50.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T06:00:00Z', - 'dew_point': 22.2, - 'humidity': 64, - 'precipitation': 0.7, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1008.46, - 'temperature': 29.6, - 'uv_index': 3, - 'wind_bearing': 312, - 'wind_gust_speed': 23.9, - 'wind_speed': 10.28, - }), - dict({ - 'apparent_temperature': 31.8, - 'cloud_coverage': 47.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T07:00:00Z', - 'dew_point': 22.1, - 'humidity': 67, - 'precipitation': 0.7, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1008.53, - 'temperature': 28.9, - 'uv_index': 1, - 'wind_bearing': 312, - 'wind_gust_speed': 22.3, - 'wind_speed': 9.59, - }), - dict({ - 'apparent_temperature': 30.6, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T08:00:00Z', - 'dew_point': 21.9, - 'humidity': 70, - 'precipitation': 0.6, - 'precipitation_probability': 15.0, - 'pressure': 1008.82, - 'temperature': 27.9, - 'uv_index': 0, - 'wind_bearing': 305, - 'wind_gust_speed': 19.73, - 'wind_speed': 8.58, - }), - dict({ - 'apparent_temperature': 29.6, - 'cloud_coverage': 35.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T09:00:00Z', - 'dew_point': 22.0, - 'humidity': 74, - 'precipitation': 0.5, - 'precipitation_probability': 15.0, - 'pressure': 1009.21, - 'temperature': 27.0, - 'uv_index': 0, - 'wind_bearing': 291, - 'wind_gust_speed': 16.49, - 'wind_speed': 7.34, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 33.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T10:00:00Z', - 'dew_point': 21.9, - 'humidity': 78, - 'precipitation': 0.4, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1009.65, - 'temperature': 26.1, - 'uv_index': 0, - 'wind_bearing': 257, - 'wind_gust_speed': 12.71, - 'wind_speed': 5.91, - }), - dict({ - 'apparent_temperature': 27.8, - 'cloud_coverage': 34.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T11:00:00Z', - 'dew_point': 21.9, - 'humidity': 82, - 'precipitation': 0.3, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1010.04, - 'temperature': 25.3, - 'uv_index': 0, - 'wind_bearing': 212, - 'wind_gust_speed': 9.16, - 'wind_speed': 4.54, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 36.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T12:00:00Z', - 'dew_point': 21.9, - 'humidity': 85, - 'precipitation': 0.3, - 'precipitation_probability': 28.000000000000004, - 'pressure': 1010.24, - 'temperature': 24.6, - 'uv_index': 0, - 'wind_bearing': 192, - 'wind_gust_speed': 7.09, - 'wind_speed': 3.62, - }), - dict({ - 'apparent_temperature': 26.5, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T13:00:00Z', - 'dew_point': 22.0, - 'humidity': 88, - 'precipitation': 0.3, - 'precipitation_probability': 30.0, - 'pressure': 1010.15, - 'temperature': 24.1, - 'uv_index': 0, - 'wind_bearing': 185, - 'wind_gust_speed': 7.2, - 'wind_speed': 3.27, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 44.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T14:00:00Z', - 'dew_point': 21.8, - 'humidity': 90, - 'precipitation': 0.3, - 'precipitation_probability': 30.0, - 'pressure': 1009.87, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 182, - 'wind_gust_speed': 8.37, - 'wind_speed': 3.22, - }), - dict({ - 'apparent_temperature': 25.5, - 'cloud_coverage': 49.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T15:00:00Z', - 'dew_point': 21.8, - 'humidity': 92, - 'precipitation': 0.2, - 'precipitation_probability': 31.0, - 'pressure': 1009.56, - 'temperature': 23.2, - 'uv_index': 0, - 'wind_bearing': 180, - 'wind_gust_speed': 9.21, - 'wind_speed': 3.3, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 53.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T16:00:00Z', - 'dew_point': 21.8, - 'humidity': 94, - 'precipitation': 0.2, - 'precipitation_probability': 33.0, - 'pressure': 1009.29, - 'temperature': 22.9, - 'uv_index': 0, - 'wind_bearing': 182, - 'wind_gust_speed': 9.0, - 'wind_speed': 3.46, - }), - dict({ - 'apparent_temperature': 24.8, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T17:00:00Z', - 'dew_point': 21.7, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 35.0, - 'pressure': 1009.09, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 186, - 'wind_gust_speed': 8.37, - 'wind_speed': 3.72, - }), - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 59.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T18:00:00Z', - 'dew_point': 21.6, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 37.0, - 'pressure': 1009.01, - 'temperature': 22.5, - 'uv_index': 0, - 'wind_bearing': 201, - 'wind_gust_speed': 7.99, - 'wind_speed': 4.07, - }), - dict({ - 'apparent_temperature': 24.9, - 'cloud_coverage': 62.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T19:00:00Z', - 'dew_point': 21.7, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 39.0, - 'pressure': 1009.07, - 'temperature': 22.7, - 'uv_index': 0, - 'wind_bearing': 258, - 'wind_gust_speed': 8.18, - 'wind_speed': 4.55, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-17T20:00:00Z', - 'dew_point': 21.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 39.0, - 'pressure': 1009.23, - 'temperature': 23.0, - 'uv_index': 0, - 'wind_bearing': 305, - 'wind_gust_speed': 8.77, - 'wind_speed': 5.17, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-17T21:00:00Z', - 'dew_point': 21.8, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 38.0, - 'pressure': 1009.47, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 318, - 'wind_gust_speed': 9.69, - 'wind_speed': 5.77, - }), - dict({ - 'apparent_temperature': 26.5, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-17T22:00:00Z', - 'dew_point': 21.8, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 30.0, - 'pressure': 1009.77, - 'temperature': 24.2, - 'uv_index': 1, - 'wind_bearing': 324, - 'wind_gust_speed': 10.88, - 'wind_speed': 6.26, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 80.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T23:00:00Z', - 'dew_point': 21.9, - 'humidity': 83, - 'precipitation': 0.2, - 'precipitation_probability': 15.0, - 'pressure': 1010.09, - 'temperature': 25.1, - 'uv_index': 2, - 'wind_bearing': 329, - 'wind_gust_speed': 12.21, - 'wind_speed': 6.68, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 87.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T00:00:00Z', - 'dew_point': 21.9, - 'humidity': 80, - 'precipitation': 0.2, - 'precipitation_probability': 15.0, - 'pressure': 1010.33, - 'temperature': 25.7, - 'uv_index': 3, - 'wind_bearing': 332, - 'wind_gust_speed': 13.52, - 'wind_speed': 7.12, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 67.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T01:00:00Z', - 'dew_point': 21.7, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1007.43, - 'temperature': 27.2, - 'uv_index': 5, - 'wind_bearing': 330, - 'wind_gust_speed': 11.36, - 'wind_speed': 11.36, - }), - dict({ - 'apparent_temperature': 30.1, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T02:00:00Z', - 'dew_point': 21.6, - 'humidity': 70, - 'precipitation': 0.3, - 'precipitation_probability': 9.0, - 'pressure': 1007.05, - 'temperature': 27.5, - 'uv_index': 6, - 'wind_bearing': 332, - 'wind_gust_speed': 12.06, - 'wind_speed': 12.06, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T03:00:00Z', - 'dew_point': 21.6, - 'humidity': 69, - 'precipitation': 0.5, - 'precipitation_probability': 10.0, - 'pressure': 1006.67, - 'temperature': 27.8, - 'uv_index': 6, - 'wind_bearing': 333, - 'wind_gust_speed': 12.81, - 'wind_speed': 12.81, - }), - dict({ - 'apparent_temperature': 30.6, - 'cloud_coverage': 67.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T04:00:00Z', - 'dew_point': 21.5, - 'humidity': 68, - 'precipitation': 0.4, - 'precipitation_probability': 10.0, - 'pressure': 1006.28, - 'temperature': 28.0, - 'uv_index': 5, - 'wind_bearing': 335, - 'wind_gust_speed': 13.68, - 'wind_speed': 13.68, - }), - dict({ - 'apparent_temperature': 30.7, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T05:00:00Z', - 'dew_point': 21.4, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1005.89, - 'temperature': 28.1, - 'uv_index': 4, - 'wind_bearing': 336, - 'wind_gust_speed': 14.61, - 'wind_speed': 14.61, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T06:00:00Z', - 'dew_point': 21.2, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 27.0, - 'pressure': 1005.67, - 'temperature': 27.9, - 'uv_index': 3, - 'wind_bearing': 338, - 'wind_gust_speed': 15.25, - 'wind_speed': 15.25, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T07:00:00Z', - 'dew_point': 21.3, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 28.000000000000004, - 'pressure': 1005.74, - 'temperature': 27.4, - 'uv_index': 1, - 'wind_bearing': 339, - 'wind_gust_speed': 15.45, - 'wind_speed': 15.45, - }), - dict({ - 'apparent_temperature': 29.1, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T08:00:00Z', - 'dew_point': 21.4, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 26.0, - 'pressure': 1005.98, - 'temperature': 26.7, - 'uv_index': 0, - 'wind_bearing': 341, - 'wind_gust_speed': 15.38, - 'wind_speed': 15.38, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T09:00:00Z', - 'dew_point': 21.6, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1006.22, - 'temperature': 26.1, - 'uv_index': 0, - 'wind_bearing': 341, - 'wind_gust_speed': 15.27, - 'wind_speed': 15.27, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T10:00:00Z', - 'dew_point': 21.6, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1006.44, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 339, - 'wind_gust_speed': 15.09, - 'wind_speed': 15.09, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T11:00:00Z', - 'dew_point': 21.7, - 'humidity': 81, - 'precipitation': 0.0, - 'precipitation_probability': 26.0, - 'pressure': 1006.66, - 'temperature': 25.2, - 'uv_index': 0, - 'wind_bearing': 336, - 'wind_gust_speed': 14.88, - 'wind_speed': 14.88, - }), - dict({ - 'apparent_temperature': 27.2, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T12:00:00Z', - 'dew_point': 21.8, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 26.0, - 'pressure': 1006.79, - 'temperature': 24.8, - 'uv_index': 0, - 'wind_bearing': 333, - 'wind_gust_speed': 14.91, - 'wind_speed': 14.91, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 38.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T13:00:00Z', - 'dew_point': 21.2, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.36, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 83, - 'wind_gust_speed': 4.58, - 'wind_speed': 3.16, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T14:00:00Z', - 'dew_point': 21.2, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.96, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 4.74, - 'wind_speed': 4.52, - }), - dict({ - 'apparent_temperature': 24.5, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T15:00:00Z', - 'dew_point': 20.9, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.6, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 152, - 'wind_gust_speed': 5.63, - 'wind_speed': 5.63, - }), - dict({ - 'apparent_temperature': 24.0, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T16:00:00Z', - 'dew_point': 20.7, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.37, - 'temperature': 22.3, - 'uv_index': 0, - 'wind_bearing': 156, - 'wind_gust_speed': 6.02, - 'wind_speed': 6.02, - }), - dict({ - 'apparent_temperature': 23.7, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T17:00:00Z', - 'dew_point': 20.4, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.2, - 'temperature': 22.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 6.15, - 'wind_speed': 6.15, - }), - dict({ - 'apparent_temperature': 23.4, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T18:00:00Z', - 'dew_point': 20.2, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.08, - 'temperature': 21.9, - 'uv_index': 0, - 'wind_bearing': 167, - 'wind_gust_speed': 6.48, - 'wind_speed': 6.48, - }), - dict({ - 'apparent_temperature': 23.2, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T19:00:00Z', - 'dew_point': 19.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.04, - 'temperature': 21.8, - 'uv_index': 0, - 'wind_bearing': 165, - 'wind_gust_speed': 7.51, - 'wind_speed': 7.51, - }), - dict({ - 'apparent_temperature': 23.4, - 'cloud_coverage': 99.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T20:00:00Z', - 'dew_point': 19.6, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.05, - 'temperature': 22.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 8.73, - 'wind_speed': 8.73, - }), - dict({ - 'apparent_temperature': 23.9, - 'cloud_coverage': 98.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T21:00:00Z', - 'dew_point': 19.5, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.06, - 'temperature': 22.5, - 'uv_index': 0, - 'wind_bearing': 164, - 'wind_gust_speed': 9.21, - 'wind_speed': 9.11, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 96.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T22:00:00Z', - 'dew_point': 19.7, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.09, - 'temperature': 23.8, - 'uv_index': 1, - 'wind_bearing': 171, - 'wind_gust_speed': 9.03, - 'wind_speed': 7.91, - }), - ]), - }) -# --- -# name: test_hourly_forecast[forecast] - dict({ - 'weather.home': dict({ - 'forecast': list([ - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 79.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T14:00:00Z', - 'dew_point': 21.5, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.24, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 264, - 'wind_gust_speed': 13.44, - 'wind_speed': 6.62, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 80.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T15:00:00Z', - 'dew_point': 21.4, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.24, - 'temperature': 22.4, - 'uv_index': 0, - 'wind_bearing': 261, - 'wind_gust_speed': 11.91, - 'wind_speed': 6.64, - }), - dict({ - 'apparent_temperature': 23.8, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T16:00:00Z', - 'dew_point': 21.1, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.12, - 'temperature': 22.0, - 'uv_index': 0, - 'wind_bearing': 252, - 'wind_gust_speed': 11.15, - 'wind_speed': 6.14, - }), - dict({ - 'apparent_temperature': 23.5, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T17:00:00Z', - 'dew_point': 20.9, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.03, - 'temperature': 21.7, - 'uv_index': 0, - 'wind_bearing': 248, - 'wind_gust_speed': 11.57, - 'wind_speed': 5.95, - }), - dict({ - 'apparent_temperature': 23.3, - 'cloud_coverage': 85.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T18:00:00Z', - 'dew_point': 20.8, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.05, - 'temperature': 21.6, - 'uv_index': 0, - 'wind_bearing': 237, - 'wind_gust_speed': 12.42, - 'wind_speed': 5.86, - }), - dict({ - 'apparent_temperature': 23.0, - 'cloud_coverage': 75.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T19:00:00Z', - 'dew_point': 20.6, - 'humidity': 96, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.03, - 'temperature': 21.3, - 'uv_index': 0, - 'wind_bearing': 224, - 'wind_gust_speed': 11.3, - 'wind_speed': 5.34, - }), - dict({ - 'apparent_temperature': 22.8, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T20:00:00Z', - 'dew_point': 20.4, - 'humidity': 96, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.31, - 'temperature': 21.2, - 'uv_index': 0, - 'wind_bearing': 221, - 'wind_gust_speed': 10.57, - 'wind_speed': 5.13, - }), - dict({ - 'apparent_temperature': 23.1, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-08T21:00:00Z', - 'dew_point': 20.5, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.55, - 'temperature': 21.4, - 'uv_index': 0, - 'wind_bearing': 237, - 'wind_gust_speed': 10.63, - 'wind_speed': 5.7, - }), - dict({ - 'apparent_temperature': 24.9, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-08T22:00:00Z', - 'dew_point': 21.3, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.79, - 'temperature': 22.8, - 'uv_index': 1, - 'wind_bearing': 258, - 'wind_gust_speed': 10.47, - 'wind_speed': 5.22, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T23:00:00Z', - 'dew_point': 21.3, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.95, - 'temperature': 24.0, - 'uv_index': 2, - 'wind_bearing': 282, - 'wind_gust_speed': 12.74, - 'wind_speed': 5.71, - }), - dict({ - 'apparent_temperature': 27.4, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T00:00:00Z', - 'dew_point': 21.5, - 'humidity': 80, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.35, - 'temperature': 25.1, - 'uv_index': 3, - 'wind_bearing': 294, - 'wind_gust_speed': 13.87, - 'wind_speed': 6.53, - }), - dict({ - 'apparent_temperature': 29.0, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T01:00:00Z', - 'dew_point': 21.8, - 'humidity': 75, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.48, - 'temperature': 26.5, - 'uv_index': 5, - 'wind_bearing': 308, - 'wind_gust_speed': 16.04, - 'wind_speed': 6.54, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T02:00:00Z', - 'dew_point': 22.0, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.23, - 'temperature': 27.6, - 'uv_index': 6, - 'wind_bearing': 314, - 'wind_gust_speed': 18.1, - 'wind_speed': 7.32, - }), - dict({ - 'apparent_temperature': 31.1, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T03:00:00Z', - 'dew_point': 22.1, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.86, - 'temperature': 28.3, - 'uv_index': 6, - 'wind_bearing': 317, - 'wind_gust_speed': 20.77, - 'wind_speed': 9.1, - }), - dict({ - 'apparent_temperature': 31.5, - 'cloud_coverage': 69.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T04:00:00Z', - 'dew_point': 22.1, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.65, - 'temperature': 28.6, - 'uv_index': 6, - 'wind_bearing': 311, - 'wind_gust_speed': 21.27, - 'wind_speed': 10.21, - }), - dict({ - 'apparent_temperature': 31.3, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T05:00:00Z', - 'dew_point': 22.1, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.48, - 'temperature': 28.4, - 'uv_index': 5, - 'wind_bearing': 317, - 'wind_gust_speed': 19.62, - 'wind_speed': 10.53, - }), - dict({ - 'apparent_temperature': 30.8, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T06:00:00Z', - 'dew_point': 22.2, - 'humidity': 71, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.54, - 'temperature': 27.9, - 'uv_index': 3, - 'wind_bearing': 335, - 'wind_gust_speed': 18.98, - 'wind_speed': 8.63, - }), - dict({ - 'apparent_temperature': 29.9, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T07:00:00Z', - 'dew_point': 22.2, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.76, - 'temperature': 27.1, - 'uv_index': 2, - 'wind_bearing': 338, - 'wind_gust_speed': 17.04, - 'wind_speed': 7.75, - }), - dict({ - 'apparent_temperature': 29.1, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T08:00:00Z', - 'dew_point': 22.1, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.05, - 'temperature': 26.4, - 'uv_index': 0, - 'wind_bearing': 342, - 'wind_gust_speed': 14.75, - 'wind_speed': 6.26, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T09:00:00Z', - 'dew_point': 22.0, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.38, - 'temperature': 25.4, - 'uv_index': 0, - 'wind_bearing': 344, - 'wind_gust_speed': 10.43, - 'wind_speed': 5.2, - }), - dict({ - 'apparent_temperature': 26.9, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T10:00:00Z', - 'dew_point': 21.9, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.73, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 339, - 'wind_gust_speed': 6.95, - 'wind_speed': 3.59, - }), - dict({ - 'apparent_temperature': 26.4, - 'cloud_coverage': 51.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T11:00:00Z', - 'dew_point': 21.8, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.3, - 'temperature': 24.1, - 'uv_index': 0, - 'wind_bearing': 326, - 'wind_gust_speed': 5.27, - 'wind_speed': 2.1, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 53.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T12:00:00Z', - 'dew_point': 21.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.52, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 257, - 'wind_gust_speed': 5.48, - 'wind_speed': 0.93, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T13:00:00Z', - 'dew_point': 21.8, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.53, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 188, - 'wind_gust_speed': 4.44, - 'wind_speed': 1.79, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T14:00:00Z', - 'dew_point': 21.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.46, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 183, - 'wind_gust_speed': 4.49, - 'wind_speed': 2.19, - }), - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 45.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T15:00:00Z', - 'dew_point': 21.4, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.21, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 179, - 'wind_gust_speed': 5.32, - 'wind_speed': 2.65, - }), - dict({ - 'apparent_temperature': 24.0, - 'cloud_coverage': 42.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T16:00:00Z', - 'dew_point': 21.1, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.09, - 'temperature': 22.1, - 'uv_index': 0, - 'wind_bearing': 173, - 'wind_gust_speed': 5.81, - 'wind_speed': 3.2, - }), - dict({ - 'apparent_temperature': 23.7, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T17:00:00Z', - 'dew_point': 20.9, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.88, - 'temperature': 21.9, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 5.53, - 'wind_speed': 3.16, - }), - dict({ - 'apparent_temperature': 23.3, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T18:00:00Z', - 'dew_point': 20.7, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.94, - 'temperature': 21.6, - 'uv_index': 0, - 'wind_bearing': 153, - 'wind_gust_speed': 6.09, - 'wind_speed': 3.36, - }), - dict({ - 'apparent_temperature': 23.1, - 'cloud_coverage': 51.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T19:00:00Z', - 'dew_point': 20.5, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.96, - 'temperature': 21.4, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 6.83, - 'wind_speed': 3.71, - }), - dict({ - 'apparent_temperature': 22.5, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T20:00:00Z', - 'dew_point': 20.0, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 21.0, - 'uv_index': 0, - 'wind_bearing': 156, - 'wind_gust_speed': 7.98, - 'wind_speed': 4.27, - }), - dict({ - 'apparent_temperature': 22.8, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T21:00:00Z', - 'dew_point': 20.2, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.61, - 'temperature': 21.2, - 'uv_index': 0, - 'wind_bearing': 156, - 'wind_gust_speed': 8.4, - 'wind_speed': 4.69, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T22:00:00Z', - 'dew_point': 21.3, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.87, - 'temperature': 23.1, - 'uv_index': 1, - 'wind_bearing': 150, - 'wind_gust_speed': 7.66, - 'wind_speed': 4.33, - }), - dict({ - 'apparent_temperature': 28.3, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T23:00:00Z', - 'dew_point': 22.3, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 25.6, - 'uv_index': 2, - 'wind_bearing': 123, - 'wind_gust_speed': 9.63, - 'wind_speed': 3.91, - }), - dict({ - 'apparent_temperature': 30.4, - 'cloud_coverage': 63.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T00:00:00Z', - 'dew_point': 22.6, - 'humidity': 75, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 27.4, - 'uv_index': 4, - 'wind_bearing': 105, - 'wind_gust_speed': 12.59, - 'wind_speed': 3.96, - }), - dict({ - 'apparent_temperature': 32.2, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T01:00:00Z', - 'dew_point': 22.9, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.79, - 'temperature': 28.9, - 'uv_index': 5, - 'wind_bearing': 99, - 'wind_gust_speed': 14.17, - 'wind_speed': 4.06, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 62.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-10T02:00:00Z', - 'dew_point': 22.9, - 'humidity': 66, - 'precipitation': 0.3, - 'precipitation_probability': 7.000000000000001, - 'pressure': 1011.29, - 'temperature': 29.9, - 'uv_index': 6, - 'wind_bearing': 93, - 'wind_gust_speed': 17.75, - 'wind_speed': 4.87, - }), - dict({ - 'apparent_temperature': 34.3, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T03:00:00Z', - 'dew_point': 23.1, - 'humidity': 64, - 'precipitation': 0.3, - 'precipitation_probability': 11.0, - 'pressure': 1010.78, - 'temperature': 30.6, - 'uv_index': 6, - 'wind_bearing': 78, - 'wind_gust_speed': 17.43, - 'wind_speed': 4.54, - }), - dict({ - 'apparent_temperature': 34.0, - 'cloud_coverage': 74.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T04:00:00Z', - 'dew_point': 23.2, - 'humidity': 66, - 'precipitation': 0.4, - 'precipitation_probability': 15.0, - 'pressure': 1010.37, - 'temperature': 30.3, - 'uv_index': 5, - 'wind_bearing': 60, - 'wind_gust_speed': 15.24, - 'wind_speed': 4.9, - }), - dict({ - 'apparent_temperature': 33.7, - 'cloud_coverage': 79.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T05:00:00Z', - 'dew_point': 23.3, - 'humidity': 67, - 'precipitation': 0.7, - 'precipitation_probability': 17.0, - 'pressure': 1010.09, - 'temperature': 30.0, - 'uv_index': 4, - 'wind_bearing': 80, - 'wind_gust_speed': 13.53, - 'wind_speed': 5.98, - }), - dict({ - 'apparent_temperature': 33.2, - 'cloud_coverage': 80.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T06:00:00Z', - 'dew_point': 23.4, - 'humidity': 70, - 'precipitation': 1.0, - 'precipitation_probability': 17.0, - 'pressure': 1010.0, - 'temperature': 29.5, - 'uv_index': 3, - 'wind_bearing': 83, - 'wind_gust_speed': 12.55, - 'wind_speed': 6.84, - }), - dict({ - 'apparent_temperature': 32.3, - 'cloud_coverage': 88.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T07:00:00Z', - 'dew_point': 23.4, - 'humidity': 73, - 'precipitation': 0.4, - 'precipitation_probability': 16.0, - 'pressure': 1010.27, - 'temperature': 28.7, - 'uv_index': 2, - 'wind_bearing': 90, - 'wind_gust_speed': 10.16, - 'wind_speed': 6.07, - }), - dict({ - 'apparent_temperature': 30.9, - 'cloud_coverage': 92.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T08:00:00Z', - 'dew_point': 23.2, - 'humidity': 77, - 'precipitation': 0.5, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1010.71, - 'temperature': 27.6, - 'uv_index': 0, - 'wind_bearing': 101, - 'wind_gust_speed': 8.18, - 'wind_speed': 4.82, - }), - dict({ - 'apparent_temperature': 29.7, - 'cloud_coverage': 93.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T09:00:00Z', - 'dew_point': 23.2, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.9, - 'temperature': 26.5, - 'uv_index': 0, - 'wind_bearing': 128, - 'wind_gust_speed': 8.89, - 'wind_speed': 4.95, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T10:00:00Z', - 'dew_point': 23.0, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.12, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 134, - 'wind_gust_speed': 10.03, - 'wind_speed': 4.52, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 87.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T11:00:00Z', - 'dew_point': 22.8, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.43, - 'temperature': 25.1, - 'uv_index': 0, - 'wind_bearing': 137, - 'wind_gust_speed': 12.4, - 'wind_speed': 5.41, - }), - dict({ - 'apparent_temperature': 27.4, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T12:00:00Z', - 'dew_point': 22.5, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.58, - 'temperature': 24.8, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 16.36, - 'wind_speed': 6.31, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T13:00:00Z', - 'dew_point': 22.4, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.55, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 19.66, - 'wind_speed': 7.23, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T14:00:00Z', - 'dew_point': 22.2, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.4, - 'temperature': 24.3, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 21.15, - 'wind_speed': 7.46, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T15:00:00Z', - 'dew_point': 22.0, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.23, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 22.26, - 'wind_speed': 7.84, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T16:00:00Z', - 'dew_point': 21.8, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.01, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 23.53, - 'wind_speed': 8.63, - }), - dict({ - 'apparent_temperature': 25.6, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-10T17:00:00Z', - 'dew_point': 21.6, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.78, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 22.83, - 'wind_speed': 8.61, - }), - dict({ - 'apparent_temperature': 25.4, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T18:00:00Z', - 'dew_point': 21.5, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.69, - 'temperature': 23.3, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 23.7, - 'wind_speed': 8.7, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T19:00:00Z', - 'dew_point': 21.4, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.77, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 140, - 'wind_gust_speed': 24.24, - 'wind_speed': 8.74, - }), - dict({ - 'apparent_temperature': 25.5, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T20:00:00Z', - 'dew_point': 21.6, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.89, - 'temperature': 23.3, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 23.99, - 'wind_speed': 8.81, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T21:00:00Z', - 'dew_point': 21.6, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.1, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 25.55, - 'wind_speed': 9.05, - }), - dict({ - 'apparent_temperature': 27.0, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T22:00:00Z', - 'dew_point': 21.8, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 24.6, - 'uv_index': 1, - 'wind_bearing': 140, - 'wind_gust_speed': 29.08, - 'wind_speed': 10.37, - }), - dict({ - 'apparent_temperature': 28.4, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T23:00:00Z', - 'dew_point': 21.9, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.36, - 'temperature': 25.9, - 'uv_index': 2, - 'wind_bearing': 140, - 'wind_gust_speed': 34.13, - 'wind_speed': 12.56, - }), - dict({ - 'apparent_temperature': 30.1, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T00:00:00Z', - 'dew_point': 22.3, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.39, - 'temperature': 27.2, - 'uv_index': 3, - 'wind_bearing': 140, - 'wind_gust_speed': 38.2, - 'wind_speed': 15.65, - }), - dict({ - 'apparent_temperature': 31.4, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-11T01:00:00Z', - 'dew_point': 22.3, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.31, - 'temperature': 28.4, - 'uv_index': 5, - 'wind_bearing': 141, - 'wind_gust_speed': 37.55, - 'wind_speed': 15.78, - }), - dict({ - 'apparent_temperature': 32.7, - 'cloud_coverage': 63.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T02:00:00Z', - 'dew_point': 22.4, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.98, - 'temperature': 29.6, - 'uv_index': 6, - 'wind_bearing': 143, - 'wind_gust_speed': 35.86, - 'wind_speed': 15.41, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T03:00:00Z', - 'dew_point': 22.5, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.61, - 'temperature': 30.3, - 'uv_index': 6, - 'wind_bearing': 141, - 'wind_gust_speed': 35.88, - 'wind_speed': 15.51, - }), - dict({ - 'apparent_temperature': 33.8, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T04:00:00Z', - 'dew_point': 22.6, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.36, - 'temperature': 30.4, - 'uv_index': 5, - 'wind_bearing': 140, - 'wind_gust_speed': 35.99, - 'wind_speed': 15.75, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T05:00:00Z', - 'dew_point': 22.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.11, - 'temperature': 30.1, - 'uv_index': 4, - 'wind_bearing': 137, - 'wind_gust_speed': 33.61, - 'wind_speed': 15.36, - }), - dict({ - 'apparent_temperature': 33.2, - 'cloud_coverage': 77.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T06:00:00Z', - 'dew_point': 22.5, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.98, - 'temperature': 30.0, - 'uv_index': 3, - 'wind_bearing': 138, - 'wind_gust_speed': 32.61, - 'wind_speed': 14.98, - }), - dict({ - 'apparent_temperature': 32.3, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T07:00:00Z', - 'dew_point': 22.2, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.13, - 'temperature': 29.2, - 'uv_index': 2, - 'wind_bearing': 138, - 'wind_gust_speed': 28.1, - 'wind_speed': 13.88, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-11T08:00:00Z', - 'dew_point': 22.1, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.48, - 'temperature': 28.3, - 'uv_index': 0, - 'wind_bearing': 137, - 'wind_gust_speed': 24.22, - 'wind_speed': 13.02, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 55.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-11T09:00:00Z', - 'dew_point': 21.9, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.81, - 'temperature': 27.1, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 22.5, - 'wind_speed': 11.94, - }), - dict({ - 'apparent_temperature': 28.8, - 'cloud_coverage': 63.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T10:00:00Z', - 'dew_point': 21.7, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 137, - 'wind_gust_speed': 21.47, - 'wind_speed': 11.25, - }), - dict({ - 'apparent_temperature': 28.1, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T11:00:00Z', - 'dew_point': 21.8, - 'humidity': 80, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.77, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 22.71, - 'wind_speed': 12.39, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T12:00:00Z', - 'dew_point': 21.8, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.97, - 'temperature': 25.2, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 23.67, - 'wind_speed': 12.83, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T13:00:00Z', - 'dew_point': 21.7, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.97, - 'temperature': 24.7, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 23.34, - 'wind_speed': 12.62, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T14:00:00Z', - 'dew_point': 21.7, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.83, - 'temperature': 24.4, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 22.9, - 'wind_speed': 12.07, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 90.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T15:00:00Z', - 'dew_point': 21.6, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.74, - 'temperature': 24.1, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 22.01, - 'wind_speed': 11.19, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T16:00:00Z', - 'dew_point': 21.6, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.56, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 21.29, - 'wind_speed': 10.97, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 85.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T17:00:00Z', - 'dew_point': 21.5, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.35, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 20.52, - 'wind_speed': 10.5, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T18:00:00Z', - 'dew_point': 21.4, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.3, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 20.04, - 'wind_speed': 10.51, - }), - dict({ - 'apparent_temperature': 25.4, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T19:00:00Z', - 'dew_point': 21.3, - 'humidity': 88, - 'precipitation': 0.3, - 'precipitation_probability': 12.0, - 'pressure': 1011.37, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 18.07, - 'wind_speed': 10.13, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T20:00:00Z', - 'dew_point': 21.2, - 'humidity': 89, - 'precipitation': 0.2, - 'precipitation_probability': 13.0, - 'pressure': 1011.53, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 16.86, - 'wind_speed': 10.34, - }), - dict({ - 'apparent_temperature': 25.5, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T21:00:00Z', - 'dew_point': 21.4, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.71, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 16.66, - 'wind_speed': 10.68, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T22:00:00Z', - 'dew_point': 21.9, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.94, - 'temperature': 24.4, - 'uv_index': 1, - 'wind_bearing': 137, - 'wind_gust_speed': 17.21, - 'wind_speed': 10.61, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T23:00:00Z', - 'dew_point': 22.3, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.05, - 'temperature': 25.6, - 'uv_index': 2, - 'wind_bearing': 138, - 'wind_gust_speed': 19.23, - 'wind_speed': 11.13, - }), - dict({ - 'apparent_temperature': 29.5, - 'cloud_coverage': 79.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T00:00:00Z', - 'dew_point': 22.6, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.07, - 'temperature': 26.6, - 'uv_index': 3, - 'wind_bearing': 140, - 'wind_gust_speed': 20.61, - 'wind_speed': 11.13, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 82.0, - 'condition': 'rainy', - 'datetime': '2023-09-12T01:00:00Z', - 'dew_point': 23.1, - 'humidity': 75, - 'precipitation': 0.2, - 'precipitation_probability': 16.0, - 'pressure': 1011.89, - 'temperature': 27.9, - 'uv_index': 4, - 'wind_bearing': 141, - 'wind_gust_speed': 23.35, - 'wind_speed': 11.98, - }), - dict({ - 'apparent_temperature': 32.6, - 'cloud_coverage': 85.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T02:00:00Z', - 'dew_point': 23.5, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.53, - 'temperature': 29.0, - 'uv_index': 5, - 'wind_bearing': 143, - 'wind_gust_speed': 26.45, - 'wind_speed': 13.01, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T03:00:00Z', - 'dew_point': 23.5, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.15, - 'temperature': 29.8, - 'uv_index': 5, - 'wind_bearing': 141, - 'wind_gust_speed': 28.95, - 'wind_speed': 13.9, - }), - dict({ - 'apparent_temperature': 34.0, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T04:00:00Z', - 'dew_point': 23.4, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.79, - 'temperature': 30.2, - 'uv_index': 5, - 'wind_bearing': 141, - 'wind_gust_speed': 27.9, - 'wind_speed': 13.95, - }), - dict({ - 'apparent_temperature': 34.0, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T05:00:00Z', - 'dew_point': 23.1, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.43, - 'temperature': 30.4, - 'uv_index': 4, - 'wind_bearing': 140, - 'wind_gust_speed': 26.53, - 'wind_speed': 13.78, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T06:00:00Z', - 'dew_point': 22.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.21, - 'temperature': 30.1, - 'uv_index': 3, - 'wind_bearing': 138, - 'wind_gust_speed': 24.56, - 'wind_speed': 13.74, - }), - dict({ - 'apparent_temperature': 32.0, - 'cloud_coverage': 53.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T07:00:00Z', - 'dew_point': 22.1, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.26, - 'temperature': 29.1, - 'uv_index': 2, - 'wind_bearing': 138, - 'wind_gust_speed': 22.78, - 'wind_speed': 13.21, - }), - dict({ - 'apparent_temperature': 30.9, - 'cloud_coverage': 48.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T08:00:00Z', - 'dew_point': 21.9, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.51, - 'temperature': 28.1, - 'uv_index': 0, - 'wind_bearing': 140, - 'wind_gust_speed': 19.92, - 'wind_speed': 12.0, - }), - dict({ - 'apparent_temperature': 29.7, - 'cloud_coverage': 50.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T09:00:00Z', - 'dew_point': 21.7, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.8, - 'temperature': 27.2, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 17.65, - 'wind_speed': 10.97, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T10:00:00Z', - 'dew_point': 21.4, - 'humidity': 75, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.23, - 'temperature': 26.2, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 15.87, - 'wind_speed': 10.23, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T11:00:00Z', - 'dew_point': 21.3, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1011.79, - 'temperature': 25.4, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 13.9, - 'wind_speed': 9.39, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T12:00:00Z', - 'dew_point': 21.2, - 'humidity': 81, - 'precipitation': 0.0, - 'precipitation_probability': 47.0, - 'pressure': 1012.12, - 'temperature': 24.7, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 13.32, - 'wind_speed': 8.9, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T13:00:00Z', - 'dew_point': 21.2, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1012.18, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 13.18, - 'wind_speed': 8.59, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T14:00:00Z', - 'dew_point': 21.3, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.09, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 13.84, - 'wind_speed': 8.87, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T15:00:00Z', - 'dew_point': 21.3, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.99, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 15.08, - 'wind_speed': 8.93, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T16:00:00Z', - 'dew_point': 21.0, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 23.2, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 16.74, - 'wind_speed': 9.49, - }), - dict({ - 'apparent_temperature': 24.7, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T17:00:00Z', - 'dew_point': 20.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.75, - 'temperature': 22.9, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 17.45, - 'wind_speed': 9.12, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T18:00:00Z', - 'dew_point': 20.7, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.77, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 17.04, - 'wind_speed': 8.68, - }), - dict({ - 'apparent_temperature': 24.1, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T19:00:00Z', - 'dew_point': 20.6, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 22.4, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 16.8, - 'wind_speed': 8.61, - }), - dict({ - 'apparent_temperature': 23.9, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T20:00:00Z', - 'dew_point': 20.5, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.23, - 'temperature': 22.1, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 15.35, - 'wind_speed': 8.36, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 75.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T21:00:00Z', - 'dew_point': 20.6, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.49, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 155, - 'wind_gust_speed': 14.09, - 'wind_speed': 7.77, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T22:00:00Z', - 'dew_point': 21.0, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.72, - 'temperature': 23.8, - 'uv_index': 1, - 'wind_bearing': 152, - 'wind_gust_speed': 14.04, - 'wind_speed': 7.25, - }), - dict({ - 'apparent_temperature': 27.8, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T23:00:00Z', - 'dew_point': 21.4, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.85, - 'temperature': 25.5, - 'uv_index': 2, - 'wind_bearing': 149, - 'wind_gust_speed': 15.31, - 'wind_speed': 7.14, - }), - dict({ - 'apparent_temperature': 29.7, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-13T00:00:00Z', - 'dew_point': 21.8, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.89, - 'temperature': 27.1, - 'uv_index': 4, - 'wind_bearing': 141, - 'wind_gust_speed': 16.42, - 'wind_speed': 6.89, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T01:00:00Z', - 'dew_point': 22.0, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.65, - 'temperature': 28.4, - 'uv_index': 5, - 'wind_bearing': 137, - 'wind_gust_speed': 18.64, - 'wind_speed': 6.65, - }), - dict({ - 'apparent_temperature': 32.3, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T02:00:00Z', - 'dew_point': 21.9, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.26, - 'temperature': 29.4, - 'uv_index': 5, - 'wind_bearing': 128, - 'wind_gust_speed': 21.69, - 'wind_speed': 7.12, - }), - dict({ - 'apparent_temperature': 33.0, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T03:00:00Z', - 'dew_point': 21.9, - 'humidity': 62, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.88, - 'temperature': 30.1, - 'uv_index': 6, - 'wind_bearing': 111, - 'wind_gust_speed': 23.41, - 'wind_speed': 7.33, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 72.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T04:00:00Z', - 'dew_point': 22.0, - 'humidity': 61, - 'precipitation': 0.9, - 'precipitation_probability': 12.0, - 'pressure': 1011.55, - 'temperature': 30.4, - 'uv_index': 5, - 'wind_bearing': 56, - 'wind_gust_speed': 23.1, - 'wind_speed': 8.09, - }), - dict({ - 'apparent_temperature': 33.2, - 'cloud_coverage': 72.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T05:00:00Z', - 'dew_point': 21.9, - 'humidity': 61, - 'precipitation': 1.9, - 'precipitation_probability': 12.0, - 'pressure': 1011.29, - 'temperature': 30.2, - 'uv_index': 4, - 'wind_bearing': 20, - 'wind_gust_speed': 21.81, - 'wind_speed': 9.46, - }), - dict({ - 'apparent_temperature': 32.6, - 'cloud_coverage': 74.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T06:00:00Z', - 'dew_point': 21.9, - 'humidity': 63, - 'precipitation': 2.3, - 'precipitation_probability': 11.0, - 'pressure': 1011.17, - 'temperature': 29.7, - 'uv_index': 3, - 'wind_bearing': 20, - 'wind_gust_speed': 19.72, - 'wind_speed': 9.8, - }), - dict({ - 'apparent_temperature': 31.8, - 'cloud_coverage': 69.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T07:00:00Z', - 'dew_point': 22.4, - 'humidity': 68, - 'precipitation': 1.8, - 'precipitation_probability': 10.0, - 'pressure': 1011.32, - 'temperature': 28.8, - 'uv_index': 1, - 'wind_bearing': 18, - 'wind_gust_speed': 17.55, - 'wind_speed': 9.23, - }), - dict({ - 'apparent_temperature': 30.8, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T08:00:00Z', - 'dew_point': 22.9, - 'humidity': 76, - 'precipitation': 0.8, - 'precipitation_probability': 10.0, - 'pressure': 1011.6, - 'temperature': 27.6, - 'uv_index': 0, - 'wind_bearing': 27, - 'wind_gust_speed': 15.08, - 'wind_speed': 8.05, - }), - dict({ - 'apparent_temperature': 29.4, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T09:00:00Z', - 'dew_point': 23.0, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.94, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 32, - 'wind_gust_speed': 12.17, - 'wind_speed': 6.68, - }), - dict({ - 'apparent_temperature': 28.5, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T10:00:00Z', - 'dew_point': 22.9, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.3, - 'temperature': 25.5, - 'uv_index': 0, - 'wind_bearing': 69, - 'wind_gust_speed': 11.64, - 'wind_speed': 6.69, - }), - dict({ - 'apparent_temperature': 27.7, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T11:00:00Z', - 'dew_point': 22.6, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.71, - 'temperature': 25.0, - 'uv_index': 0, - 'wind_bearing': 155, - 'wind_gust_speed': 11.91, - 'wind_speed': 6.23, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T12:00:00Z', - 'dew_point': 22.3, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.96, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 12.47, - 'wind_speed': 5.73, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T13:00:00Z', - 'dew_point': 22.3, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.03, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 13.57, - 'wind_speed': 5.66, - }), - dict({ - 'apparent_temperature': 26.4, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T14:00:00Z', - 'dew_point': 22.2, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.99, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 15.07, - 'wind_speed': 5.83, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T15:00:00Z', - 'dew_point': 22.2, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.95, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 16.06, - 'wind_speed': 5.93, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T16:00:00Z', - 'dew_point': 22.0, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.9, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 153, - 'wind_gust_speed': 16.05, - 'wind_speed': 5.75, - }), - dict({ - 'apparent_temperature': 25.4, - 'cloud_coverage': 90.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T17:00:00Z', - 'dew_point': 21.8, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.85, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 15.52, - 'wind_speed': 5.49, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 92.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T18:00:00Z', - 'dew_point': 21.8, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.87, - 'temperature': 23.0, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 15.01, - 'wind_speed': 5.32, - }), - dict({ - 'apparent_temperature': 25.0, - 'cloud_coverage': 90.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T19:00:00Z', - 'dew_point': 21.7, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.01, - 'temperature': 22.8, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 14.39, - 'wind_speed': 5.33, - }), - dict({ - 'apparent_temperature': 24.8, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T20:00:00Z', - 'dew_point': 21.6, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.22, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 13.79, - 'wind_speed': 5.43, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T21:00:00Z', - 'dew_point': 21.8, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.41, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 14.12, - 'wind_speed': 5.52, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 77.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T22:00:00Z', - 'dew_point': 22.1, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.59, - 'temperature': 24.3, - 'uv_index': 1, - 'wind_bearing': 147, - 'wind_gust_speed': 16.14, - 'wind_speed': 5.58, - }), - dict({ - 'apparent_temperature': 28.4, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T23:00:00Z', - 'dew_point': 22.4, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.74, - 'temperature': 25.7, - 'uv_index': 2, - 'wind_bearing': 146, - 'wind_gust_speed': 19.09, - 'wind_speed': 5.62, - }), - dict({ - 'apparent_temperature': 30.5, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T00:00:00Z', - 'dew_point': 22.9, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.78, - 'temperature': 27.4, - 'uv_index': 4, - 'wind_bearing': 143, - 'wind_gust_speed': 21.6, - 'wind_speed': 5.58, - }), - dict({ - 'apparent_temperature': 32.2, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T01:00:00Z', - 'dew_point': 23.2, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.61, - 'temperature': 28.7, - 'uv_index': 5, - 'wind_bearing': 138, - 'wind_gust_speed': 23.36, - 'wind_speed': 5.34, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T02:00:00Z', - 'dew_point': 23.2, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.32, - 'temperature': 29.9, - 'uv_index': 6, - 'wind_bearing': 111, - 'wind_gust_speed': 24.72, - 'wind_speed': 4.99, - }), - dict({ - 'apparent_temperature': 34.4, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T03:00:00Z', - 'dew_point': 23.3, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.04, - 'temperature': 30.7, - 'uv_index': 6, - 'wind_bearing': 354, - 'wind_gust_speed': 25.23, - 'wind_speed': 4.74, - }), - dict({ - 'apparent_temperature': 34.9, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T04:00:00Z', - 'dew_point': 23.4, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.77, - 'temperature': 31.0, - 'uv_index': 6, - 'wind_bearing': 341, - 'wind_gust_speed': 24.6, - 'wind_speed': 4.79, - }), - dict({ - 'apparent_temperature': 34.5, - 'cloud_coverage': 60.0, - 'condition': 'rainy', - 'datetime': '2023-09-14T05:00:00Z', - 'dew_point': 23.2, - 'humidity': 64, - 'precipitation': 0.2, - 'precipitation_probability': 15.0, - 'pressure': 1012.53, - 'temperature': 30.7, - 'uv_index': 5, - 'wind_bearing': 336, - 'wind_gust_speed': 23.28, - 'wind_speed': 5.07, - }), - dict({ - 'apparent_temperature': 33.8, - 'cloud_coverage': 59.0, - 'condition': 'rainy', - 'datetime': '2023-09-14T06:00:00Z', - 'dew_point': 23.1, - 'humidity': 66, - 'precipitation': 0.2, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1012.49, - 'temperature': 30.2, - 'uv_index': 3, - 'wind_bearing': 336, - 'wind_gust_speed': 22.05, - 'wind_speed': 5.34, - }), - dict({ - 'apparent_temperature': 32.9, - 'cloud_coverage': 53.0, - 'condition': 'rainy', - 'datetime': '2023-09-14T07:00:00Z', - 'dew_point': 23.0, - 'humidity': 68, - 'precipitation': 0.2, - 'precipitation_probability': 40.0, - 'pressure': 1012.73, - 'temperature': 29.5, - 'uv_index': 2, - 'wind_bearing': 339, - 'wind_gust_speed': 21.18, - 'wind_speed': 5.63, - }), - dict({ - 'apparent_temperature': 31.6, - 'cloud_coverage': 43.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T08:00:00Z', - 'dew_point': 22.8, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 45.0, - 'pressure': 1013.16, - 'temperature': 28.4, - 'uv_index': 0, - 'wind_bearing': 342, - 'wind_gust_speed': 20.35, - 'wind_speed': 5.93, - }), - dict({ - 'apparent_temperature': 30.0, - 'cloud_coverage': 35.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T09:00:00Z', - 'dew_point': 22.5, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1013.62, - 'temperature': 27.1, - 'uv_index': 0, - 'wind_bearing': 347, - 'wind_gust_speed': 19.42, - 'wind_speed': 5.95, - }), - dict({ - 'apparent_temperature': 29.0, - 'cloud_coverage': 32.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T10:00:00Z', - 'dew_point': 22.4, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.09, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 348, - 'wind_gust_speed': 18.19, - 'wind_speed': 5.31, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T11:00:00Z', - 'dew_point': 22.4, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.56, - 'temperature': 25.5, - 'uv_index': 0, - 'wind_bearing': 177, - 'wind_gust_speed': 16.79, - 'wind_speed': 4.28, - }), - dict({ - 'apparent_temperature': 27.5, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T12:00:00Z', - 'dew_point': 22.3, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.87, - 'temperature': 24.9, - 'uv_index': 0, - 'wind_bearing': 171, - 'wind_gust_speed': 15.61, - 'wind_speed': 3.72, - }), - dict({ - 'apparent_temperature': 26.6, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T13:00:00Z', - 'dew_point': 22.1, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.91, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 171, - 'wind_gust_speed': 14.7, - 'wind_speed': 4.11, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 32.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T14:00:00Z', - 'dew_point': 21.9, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.8, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 171, - 'wind_gust_speed': 13.81, - 'wind_speed': 4.97, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 34.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T15:00:00Z', - 'dew_point': 21.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.66, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 170, - 'wind_gust_speed': 12.88, - 'wind_speed': 5.57, - }), - dict({ - 'apparent_temperature': 24.8, - 'cloud_coverage': 37.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T16:00:00Z', - 'dew_point': 21.5, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.54, - 'temperature': 22.7, - 'uv_index': 0, - 'wind_bearing': 168, - 'wind_gust_speed': 12.0, - 'wind_speed': 5.62, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 39.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T17:00:00Z', - 'dew_point': 21.3, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.45, - 'temperature': 22.4, - 'uv_index': 0, - 'wind_bearing': 165, - 'wind_gust_speed': 11.43, - 'wind_speed': 5.48, - }), - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T18:00:00Z', - 'dew_point': 21.4, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 44.0, - 'pressure': 1014.45, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 11.42, - 'wind_speed': 5.38, - }), - dict({ - 'apparent_temperature': 25.0, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T19:00:00Z', - 'dew_point': 21.6, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 52.0, - 'pressure': 1014.63, - 'temperature': 22.9, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 12.15, - 'wind_speed': 5.39, - }), - dict({ - 'apparent_temperature': 25.6, - 'cloud_coverage': 38.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T20:00:00Z', - 'dew_point': 21.8, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 51.0, - 'pressure': 1014.91, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 13.54, - 'wind_speed': 5.45, - }), - dict({ - 'apparent_temperature': 26.6, - 'cloud_coverage': 36.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T21:00:00Z', - 'dew_point': 22.0, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 42.0, - 'pressure': 1015.18, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 15.48, - 'wind_speed': 5.62, - }), - dict({ - 'apparent_temperature': 28.5, - 'cloud_coverage': 32.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T22:00:00Z', - 'dew_point': 22.5, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 28.999999999999996, - 'pressure': 1015.4, - 'temperature': 25.7, - 'uv_index': 1, - 'wind_bearing': 158, - 'wind_gust_speed': 17.86, - 'wind_speed': 5.84, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T23:00:00Z', - 'dew_point': 22.9, - 'humidity': 77, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.54, - 'temperature': 27.2, - 'uv_index': 2, - 'wind_bearing': 155, - 'wind_gust_speed': 20.19, - 'wind_speed': 6.09, - }), - dict({ - 'apparent_temperature': 32.1, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-15T00:00:00Z', - 'dew_point': 23.3, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.55, - 'temperature': 28.6, - 'uv_index': 4, - 'wind_bearing': 152, - 'wind_gust_speed': 21.83, - 'wind_speed': 6.42, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 34.0, - 'condition': 'sunny', - 'datetime': '2023-09-15T01:00:00Z', - 'dew_point': 23.5, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.35, - 'temperature': 29.6, - 'uv_index': 6, - 'wind_bearing': 144, - 'wind_gust_speed': 22.56, - 'wind_speed': 6.91, - }), - dict({ - 'apparent_temperature': 34.2, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T02:00:00Z', - 'dew_point': 23.5, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.0, - 'temperature': 30.4, - 'uv_index': 7, - 'wind_bearing': 336, - 'wind_gust_speed': 22.83, - 'wind_speed': 7.47, - }), - dict({ - 'apparent_temperature': 34.9, - 'cloud_coverage': 46.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T03:00:00Z', - 'dew_point': 23.5, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.62, - 'temperature': 30.9, - 'uv_index': 7, - 'wind_bearing': 336, - 'wind_gust_speed': 22.98, - 'wind_speed': 7.95, - }), - dict({ - 'apparent_temperature': 35.4, - 'cloud_coverage': 46.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T04:00:00Z', - 'dew_point': 23.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.25, - 'temperature': 31.3, - 'uv_index': 6, - 'wind_bearing': 341, - 'wind_gust_speed': 23.21, - 'wind_speed': 8.44, - }), - dict({ - 'apparent_temperature': 35.6, - 'cloud_coverage': 44.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T05:00:00Z', - 'dew_point': 23.7, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.95, - 'temperature': 31.5, - 'uv_index': 5, - 'wind_bearing': 344, - 'wind_gust_speed': 23.46, - 'wind_speed': 8.95, - }), - dict({ - 'apparent_temperature': 35.1, - 'cloud_coverage': 42.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T06:00:00Z', - 'dew_point': 23.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.83, - 'temperature': 31.1, - 'uv_index': 3, - 'wind_bearing': 347, - 'wind_gust_speed': 23.64, - 'wind_speed': 9.13, - }), - dict({ - 'apparent_temperature': 34.1, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T07:00:00Z', - 'dew_point': 23.4, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.96, - 'temperature': 30.3, - 'uv_index': 2, - 'wind_bearing': 350, - 'wind_gust_speed': 23.66, - 'wind_speed': 8.78, - }), - dict({ - 'apparent_temperature': 32.4, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T08:00:00Z', - 'dew_point': 23.1, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.25, - 'temperature': 29.0, - 'uv_index': 0, - 'wind_bearing': 356, - 'wind_gust_speed': 23.51, - 'wind_speed': 8.13, - }), - dict({ - 'apparent_temperature': 31.1, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T09:00:00Z', - 'dew_point': 22.9, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.61, - 'temperature': 27.9, - 'uv_index': 0, - 'wind_bearing': 3, - 'wind_gust_speed': 23.21, - 'wind_speed': 7.48, - }), - dict({ - 'apparent_temperature': 30.0, - 'cloud_coverage': 43.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T10:00:00Z', - 'dew_point': 22.8, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.02, - 'temperature': 26.9, - 'uv_index': 0, - 'wind_bearing': 20, - 'wind_gust_speed': 22.68, - 'wind_speed': 6.83, - }), - dict({ - 'apparent_temperature': 29.2, - 'cloud_coverage': 46.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T11:00:00Z', - 'dew_point': 22.8, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.43, - 'temperature': 26.2, - 'uv_index': 0, - 'wind_bearing': 129, - 'wind_gust_speed': 22.04, - 'wind_speed': 6.1, - }), - dict({ - 'apparent_temperature': 28.4, - 'cloud_coverage': 48.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T12:00:00Z', - 'dew_point': 22.7, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.71, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 21.64, - 'wind_speed': 5.6, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T13:00:00Z', - 'dew_point': 23.2, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.52, - 'temperature': 25.2, - 'uv_index': 0, - 'wind_bearing': 164, - 'wind_gust_speed': 16.35, - 'wind_speed': 5.58, - }), - dict({ - 'apparent_temperature': 27.4, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T14:00:00Z', - 'dew_point': 22.9, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.37, - 'temperature': 24.6, - 'uv_index': 0, - 'wind_bearing': 168, - 'wind_gust_speed': 17.11, - 'wind_speed': 5.79, - }), - dict({ - 'apparent_temperature': 26.9, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T15:00:00Z', - 'dew_point': 22.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.21, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 182, - 'wind_gust_speed': 17.32, - 'wind_speed': 5.77, - }), - dict({ - 'apparent_temperature': 26.4, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T16:00:00Z', - 'dew_point': 22.6, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.07, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 201, - 'wind_gust_speed': 16.6, - 'wind_speed': 5.27, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T17:00:00Z', - 'dew_point': 22.5, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.95, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 219, - 'wind_gust_speed': 15.52, - 'wind_speed': 4.62, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T18:00:00Z', - 'dew_point': 22.3, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.88, - 'temperature': 23.3, - 'uv_index': 0, - 'wind_bearing': 216, - 'wind_gust_speed': 14.64, - 'wind_speed': 4.32, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T19:00:00Z', - 'dew_point': 22.4, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.91, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 198, - 'wind_gust_speed': 14.06, - 'wind_speed': 4.73, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T20:00:00Z', - 'dew_point': 22.4, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.99, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 189, - 'wind_gust_speed': 13.7, - 'wind_speed': 5.49, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T21:00:00Z', - 'dew_point': 22.5, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.07, - 'temperature': 24.4, - 'uv_index': 0, - 'wind_bearing': 183, - 'wind_gust_speed': 13.77, - 'wind_speed': 5.95, - }), - dict({ - 'apparent_temperature': 28.3, - 'cloud_coverage': 59.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T22:00:00Z', - 'dew_point': 22.6, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.12, - 'temperature': 25.5, - 'uv_index': 1, - 'wind_bearing': 179, - 'wind_gust_speed': 14.38, - 'wind_speed': 5.77, - }), - dict({ - 'apparent_temperature': 29.9, - 'cloud_coverage': 52.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T23:00:00Z', - 'dew_point': 22.9, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.13, - 'temperature': 26.9, - 'uv_index': 2, - 'wind_bearing': 170, - 'wind_gust_speed': 15.2, - 'wind_speed': 5.27, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 44.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T00:00:00Z', - 'dew_point': 22.9, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.04, - 'temperature': 28.0, - 'uv_index': 4, - 'wind_bearing': 155, - 'wind_gust_speed': 15.85, - 'wind_speed': 4.76, - }), - dict({ - 'apparent_temperature': 32.5, - 'cloud_coverage': 24.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T01:00:00Z', - 'dew_point': 22.6, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.52, - 'temperature': 29.2, - 'uv_index': 6, - 'wind_bearing': 110, - 'wind_gust_speed': 16.27, - 'wind_speed': 6.81, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 16.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T02:00:00Z', - 'dew_point': 22.4, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.01, - 'temperature': 30.2, - 'uv_index': 8, - 'wind_bearing': 30, - 'wind_gust_speed': 16.55, - 'wind_speed': 6.86, - }), - dict({ - 'apparent_temperature': 34.2, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T03:00:00Z', - 'dew_point': 22.0, - 'humidity': 59, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.45, - 'temperature': 31.1, - 'uv_index': 8, - 'wind_bearing': 17, - 'wind_gust_speed': 16.52, - 'wind_speed': 6.8, - }), - dict({ - 'apparent_temperature': 34.7, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T04:00:00Z', - 'dew_point': 21.9, - 'humidity': 57, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.89, - 'temperature': 31.5, - 'uv_index': 8, - 'wind_bearing': 17, - 'wind_gust_speed': 16.08, - 'wind_speed': 6.62, - }), - dict({ - 'apparent_temperature': 34.9, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T05:00:00Z', - 'dew_point': 21.9, - 'humidity': 56, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.39, - 'temperature': 31.8, - 'uv_index': 6, - 'wind_bearing': 20, - 'wind_gust_speed': 15.48, - 'wind_speed': 6.45, - }), - dict({ - 'apparent_temperature': 34.5, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T06:00:00Z', - 'dew_point': 21.7, - 'humidity': 56, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.11, - 'temperature': 31.4, - 'uv_index': 4, - 'wind_bearing': 26, - 'wind_gust_speed': 15.08, - 'wind_speed': 6.43, - }), - dict({ - 'apparent_temperature': 33.6, - 'cloud_coverage': 7.000000000000001, - 'condition': 'sunny', - 'datetime': '2023-09-16T07:00:00Z', - 'dew_point': 21.7, - 'humidity': 59, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.15, - 'temperature': 30.7, - 'uv_index': 2, - 'wind_bearing': 39, - 'wind_gust_speed': 14.88, - 'wind_speed': 6.61, - }), - dict({ - 'apparent_temperature': 32.5, - 'cloud_coverage': 2.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T08:00:00Z', - 'dew_point': 21.9, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.41, - 'temperature': 29.6, - 'uv_index': 0, - 'wind_bearing': 72, - 'wind_gust_speed': 14.82, - 'wind_speed': 6.95, - }), - dict({ - 'apparent_temperature': 31.4, - 'cloud_coverage': 2.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T09:00:00Z', - 'dew_point': 22.1, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.75, - 'temperature': 28.5, - 'uv_index': 0, - 'wind_bearing': 116, - 'wind_gust_speed': 15.13, - 'wind_speed': 7.45, - }), - dict({ - 'apparent_temperature': 30.5, - 'cloud_coverage': 13.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T10:00:00Z', - 'dew_point': 22.3, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.13, - 'temperature': 27.6, - 'uv_index': 0, - 'wind_bearing': 140, - 'wind_gust_speed': 16.09, - 'wind_speed': 8.15, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T11:00:00Z', - 'dew_point': 22.6, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.47, - 'temperature': 26.9, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 17.37, - 'wind_speed': 8.87, - }), - dict({ - 'apparent_temperature': 29.3, - 'cloud_coverage': 45.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T12:00:00Z', - 'dew_point': 22.9, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.6, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 155, - 'wind_gust_speed': 18.29, - 'wind_speed': 9.21, - }), - dict({ - 'apparent_temperature': 28.7, - 'cloud_coverage': 51.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T13:00:00Z', - 'dew_point': 23.0, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.41, - 'temperature': 25.7, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 18.49, - 'wind_speed': 8.96, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 55.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T14:00:00Z', - 'dew_point': 22.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.01, - 'temperature': 25.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 18.47, - 'wind_speed': 8.45, - }), - dict({ - 'apparent_temperature': 27.2, - 'cloud_coverage': 59.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T15:00:00Z', - 'dew_point': 22.7, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.55, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 18.79, - 'wind_speed': 8.1, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T16:00:00Z', - 'dew_point': 22.6, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.1, - 'temperature': 24.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 19.81, - 'wind_speed': 8.15, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T17:00:00Z', - 'dew_point': 22.6, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.68, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 20.96, - 'wind_speed': 8.3, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T18:00:00Z', - 'dew_point': 22.4, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.39, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 21.41, - 'wind_speed': 8.24, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T19:00:00Z', - 'dew_point': 22.5, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 20.42, - 'wind_speed': 7.62, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T20:00:00Z', - 'dew_point': 22.6, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.31, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 18.61, - 'wind_speed': 6.66, - }), - dict({ - 'apparent_temperature': 27.7, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T21:00:00Z', - 'dew_point': 22.6, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.37, - 'temperature': 24.9, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 17.14, - 'wind_speed': 5.86, - }), - dict({ - 'apparent_temperature': 28.9, - 'cloud_coverage': 48.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T22:00:00Z', - 'dew_point': 22.6, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.46, - 'temperature': 26.0, - 'uv_index': 1, - 'wind_bearing': 161, - 'wind_gust_speed': 16.78, - 'wind_speed': 5.5, - }), - dict({ - 'apparent_temperature': 30.6, - 'cloud_coverage': 39.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T23:00:00Z', - 'dew_point': 22.9, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.51, - 'temperature': 27.5, - 'uv_index': 2, - 'wind_bearing': 165, - 'wind_gust_speed': 17.21, - 'wind_speed': 5.56, - }), - dict({ - 'apparent_temperature': 31.7, - 'cloud_coverage': 33.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T00:00:00Z', - 'dew_point': 22.8, - 'humidity': 71, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.39, - 'temperature': 28.5, - 'uv_index': 4, - 'wind_bearing': 174, - 'wind_gust_speed': 17.96, - 'wind_speed': 6.04, - }), - dict({ - 'apparent_temperature': 32.6, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T01:00:00Z', - 'dew_point': 22.7, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.98, - 'temperature': 29.4, - 'uv_index': 6, - 'wind_bearing': 192, - 'wind_gust_speed': 19.15, - 'wind_speed': 7.23, - }), - dict({ - 'apparent_temperature': 33.6, - 'cloud_coverage': 28.999999999999996, - 'condition': 'sunny', - 'datetime': '2023-09-17T02:00:00Z', - 'dew_point': 22.8, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.38, - 'temperature': 30.1, - 'uv_index': 7, - 'wind_bearing': 225, - 'wind_gust_speed': 20.89, - 'wind_speed': 8.9, - }), - dict({ - 'apparent_temperature': 34.1, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T03:00:00Z', - 'dew_point': 22.8, - 'humidity': 63, - 'precipitation': 0.3, - 'precipitation_probability': 9.0, - 'pressure': 1009.75, - 'temperature': 30.7, - 'uv_index': 8, - 'wind_bearing': 264, - 'wind_gust_speed': 22.67, - 'wind_speed': 10.27, - }), - dict({ - 'apparent_temperature': 33.9, - 'cloud_coverage': 37.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T04:00:00Z', - 'dew_point': 22.5, - 'humidity': 62, - 'precipitation': 0.4, - 'precipitation_probability': 10.0, - 'pressure': 1009.18, - 'temperature': 30.5, - 'uv_index': 7, - 'wind_bearing': 293, - 'wind_gust_speed': 23.93, - 'wind_speed': 10.82, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 45.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T05:00:00Z', - 'dew_point': 22.4, - 'humidity': 63, - 'precipitation': 0.6, - 'precipitation_probability': 12.0, - 'pressure': 1008.71, - 'temperature': 30.1, - 'uv_index': 5, - 'wind_bearing': 308, - 'wind_gust_speed': 24.39, - 'wind_speed': 10.72, - }), - dict({ - 'apparent_temperature': 32.7, - 'cloud_coverage': 50.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T06:00:00Z', - 'dew_point': 22.2, - 'humidity': 64, - 'precipitation': 0.7, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1008.46, - 'temperature': 29.6, - 'uv_index': 3, - 'wind_bearing': 312, - 'wind_gust_speed': 23.9, - 'wind_speed': 10.28, - }), - dict({ - 'apparent_temperature': 31.8, - 'cloud_coverage': 47.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T07:00:00Z', - 'dew_point': 22.1, - 'humidity': 67, - 'precipitation': 0.7, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1008.53, - 'temperature': 28.9, - 'uv_index': 1, - 'wind_bearing': 312, - 'wind_gust_speed': 22.3, - 'wind_speed': 9.59, - }), - dict({ - 'apparent_temperature': 30.6, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T08:00:00Z', - 'dew_point': 21.9, - 'humidity': 70, - 'precipitation': 0.6, - 'precipitation_probability': 15.0, - 'pressure': 1008.82, - 'temperature': 27.9, - 'uv_index': 0, - 'wind_bearing': 305, - 'wind_gust_speed': 19.73, - 'wind_speed': 8.58, - }), - dict({ - 'apparent_temperature': 29.6, - 'cloud_coverage': 35.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T09:00:00Z', - 'dew_point': 22.0, - 'humidity': 74, - 'precipitation': 0.5, - 'precipitation_probability': 15.0, - 'pressure': 1009.21, - 'temperature': 27.0, - 'uv_index': 0, - 'wind_bearing': 291, - 'wind_gust_speed': 16.49, - 'wind_speed': 7.34, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 33.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T10:00:00Z', - 'dew_point': 21.9, - 'humidity': 78, - 'precipitation': 0.4, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1009.65, - 'temperature': 26.1, - 'uv_index': 0, - 'wind_bearing': 257, - 'wind_gust_speed': 12.71, - 'wind_speed': 5.91, - }), - dict({ - 'apparent_temperature': 27.8, - 'cloud_coverage': 34.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T11:00:00Z', - 'dew_point': 21.9, - 'humidity': 82, - 'precipitation': 0.3, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1010.04, - 'temperature': 25.3, - 'uv_index': 0, - 'wind_bearing': 212, - 'wind_gust_speed': 9.16, - 'wind_speed': 4.54, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 36.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T12:00:00Z', - 'dew_point': 21.9, - 'humidity': 85, - 'precipitation': 0.3, - 'precipitation_probability': 28.000000000000004, - 'pressure': 1010.24, - 'temperature': 24.6, - 'uv_index': 0, - 'wind_bearing': 192, - 'wind_gust_speed': 7.09, - 'wind_speed': 3.62, - }), - dict({ - 'apparent_temperature': 26.5, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T13:00:00Z', - 'dew_point': 22.0, - 'humidity': 88, - 'precipitation': 0.3, - 'precipitation_probability': 30.0, - 'pressure': 1010.15, - 'temperature': 24.1, - 'uv_index': 0, - 'wind_bearing': 185, - 'wind_gust_speed': 7.2, - 'wind_speed': 3.27, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 44.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T14:00:00Z', - 'dew_point': 21.8, - 'humidity': 90, - 'precipitation': 0.3, - 'precipitation_probability': 30.0, - 'pressure': 1009.87, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 182, - 'wind_gust_speed': 8.37, - 'wind_speed': 3.22, - }), - dict({ - 'apparent_temperature': 25.5, - 'cloud_coverage': 49.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T15:00:00Z', - 'dew_point': 21.8, - 'humidity': 92, - 'precipitation': 0.2, - 'precipitation_probability': 31.0, - 'pressure': 1009.56, - 'temperature': 23.2, - 'uv_index': 0, - 'wind_bearing': 180, - 'wind_gust_speed': 9.21, - 'wind_speed': 3.3, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 53.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T16:00:00Z', - 'dew_point': 21.8, - 'humidity': 94, - 'precipitation': 0.2, - 'precipitation_probability': 33.0, - 'pressure': 1009.29, - 'temperature': 22.9, - 'uv_index': 0, - 'wind_bearing': 182, - 'wind_gust_speed': 9.0, - 'wind_speed': 3.46, - }), - dict({ - 'apparent_temperature': 24.8, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T17:00:00Z', - 'dew_point': 21.7, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 35.0, - 'pressure': 1009.09, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 186, - 'wind_gust_speed': 8.37, - 'wind_speed': 3.72, - }), - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 59.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T18:00:00Z', - 'dew_point': 21.6, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 37.0, - 'pressure': 1009.01, - 'temperature': 22.5, - 'uv_index': 0, - 'wind_bearing': 201, - 'wind_gust_speed': 7.99, - 'wind_speed': 4.07, - }), - dict({ - 'apparent_temperature': 24.9, - 'cloud_coverage': 62.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T19:00:00Z', - 'dew_point': 21.7, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 39.0, - 'pressure': 1009.07, - 'temperature': 22.7, - 'uv_index': 0, - 'wind_bearing': 258, - 'wind_gust_speed': 8.18, - 'wind_speed': 4.55, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-17T20:00:00Z', - 'dew_point': 21.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 39.0, - 'pressure': 1009.23, - 'temperature': 23.0, - 'uv_index': 0, - 'wind_bearing': 305, - 'wind_gust_speed': 8.77, - 'wind_speed': 5.17, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-17T21:00:00Z', - 'dew_point': 21.8, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 38.0, - 'pressure': 1009.47, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 318, - 'wind_gust_speed': 9.69, - 'wind_speed': 5.77, - }), - dict({ - 'apparent_temperature': 26.5, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-17T22:00:00Z', - 'dew_point': 21.8, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 30.0, - 'pressure': 1009.77, - 'temperature': 24.2, - 'uv_index': 1, - 'wind_bearing': 324, - 'wind_gust_speed': 10.88, - 'wind_speed': 6.26, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 80.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T23:00:00Z', - 'dew_point': 21.9, - 'humidity': 83, - 'precipitation': 0.2, - 'precipitation_probability': 15.0, - 'pressure': 1010.09, - 'temperature': 25.1, - 'uv_index': 2, - 'wind_bearing': 329, - 'wind_gust_speed': 12.21, - 'wind_speed': 6.68, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 87.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T00:00:00Z', - 'dew_point': 21.9, - 'humidity': 80, - 'precipitation': 0.2, - 'precipitation_probability': 15.0, - 'pressure': 1010.33, - 'temperature': 25.7, - 'uv_index': 3, - 'wind_bearing': 332, - 'wind_gust_speed': 13.52, - 'wind_speed': 7.12, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 67.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T01:00:00Z', - 'dew_point': 21.7, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1007.43, - 'temperature': 27.2, - 'uv_index': 5, - 'wind_bearing': 330, - 'wind_gust_speed': 11.36, - 'wind_speed': 11.36, - }), - dict({ - 'apparent_temperature': 30.1, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T02:00:00Z', - 'dew_point': 21.6, - 'humidity': 70, - 'precipitation': 0.3, - 'precipitation_probability': 9.0, - 'pressure': 1007.05, - 'temperature': 27.5, - 'uv_index': 6, - 'wind_bearing': 332, - 'wind_gust_speed': 12.06, - 'wind_speed': 12.06, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T03:00:00Z', - 'dew_point': 21.6, - 'humidity': 69, - 'precipitation': 0.5, - 'precipitation_probability': 10.0, - 'pressure': 1006.67, - 'temperature': 27.8, - 'uv_index': 6, - 'wind_bearing': 333, - 'wind_gust_speed': 12.81, - 'wind_speed': 12.81, - }), - dict({ - 'apparent_temperature': 30.6, - 'cloud_coverage': 67.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T04:00:00Z', - 'dew_point': 21.5, - 'humidity': 68, - 'precipitation': 0.4, - 'precipitation_probability': 10.0, - 'pressure': 1006.28, - 'temperature': 28.0, - 'uv_index': 5, - 'wind_bearing': 335, - 'wind_gust_speed': 13.68, - 'wind_speed': 13.68, - }), - dict({ - 'apparent_temperature': 30.7, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T05:00:00Z', - 'dew_point': 21.4, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1005.89, - 'temperature': 28.1, - 'uv_index': 4, - 'wind_bearing': 336, - 'wind_gust_speed': 14.61, - 'wind_speed': 14.61, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T06:00:00Z', - 'dew_point': 21.2, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 27.0, - 'pressure': 1005.67, - 'temperature': 27.9, - 'uv_index': 3, - 'wind_bearing': 338, - 'wind_gust_speed': 15.25, - 'wind_speed': 15.25, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T07:00:00Z', - 'dew_point': 21.3, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 28.000000000000004, - 'pressure': 1005.74, - 'temperature': 27.4, - 'uv_index': 1, - 'wind_bearing': 339, - 'wind_gust_speed': 15.45, - 'wind_speed': 15.45, - }), - dict({ - 'apparent_temperature': 29.1, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T08:00:00Z', - 'dew_point': 21.4, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 26.0, - 'pressure': 1005.98, - 'temperature': 26.7, - 'uv_index': 0, - 'wind_bearing': 341, - 'wind_gust_speed': 15.38, - 'wind_speed': 15.38, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T09:00:00Z', - 'dew_point': 21.6, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1006.22, - 'temperature': 26.1, - 'uv_index': 0, - 'wind_bearing': 341, - 'wind_gust_speed': 15.27, - 'wind_speed': 15.27, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T10:00:00Z', - 'dew_point': 21.6, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1006.44, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 339, - 'wind_gust_speed': 15.09, - 'wind_speed': 15.09, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T11:00:00Z', - 'dew_point': 21.7, - 'humidity': 81, - 'precipitation': 0.0, - 'precipitation_probability': 26.0, - 'pressure': 1006.66, - 'temperature': 25.2, - 'uv_index': 0, - 'wind_bearing': 336, - 'wind_gust_speed': 14.88, - 'wind_speed': 14.88, - }), - dict({ - 'apparent_temperature': 27.2, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T12:00:00Z', - 'dew_point': 21.8, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 26.0, - 'pressure': 1006.79, - 'temperature': 24.8, - 'uv_index': 0, - 'wind_bearing': 333, - 'wind_gust_speed': 14.91, - 'wind_speed': 14.91, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 38.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T13:00:00Z', - 'dew_point': 21.2, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.36, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 83, - 'wind_gust_speed': 4.58, - 'wind_speed': 3.16, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T14:00:00Z', - 'dew_point': 21.2, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.96, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 4.74, - 'wind_speed': 4.52, - }), - dict({ - 'apparent_temperature': 24.5, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T15:00:00Z', - 'dew_point': 20.9, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.6, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 152, - 'wind_gust_speed': 5.63, - 'wind_speed': 5.63, - }), - dict({ - 'apparent_temperature': 24.0, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T16:00:00Z', - 'dew_point': 20.7, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.37, - 'temperature': 22.3, - 'uv_index': 0, - 'wind_bearing': 156, - 'wind_gust_speed': 6.02, - 'wind_speed': 6.02, - }), - dict({ - 'apparent_temperature': 23.7, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T17:00:00Z', - 'dew_point': 20.4, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.2, - 'temperature': 22.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 6.15, - 'wind_speed': 6.15, - }), - dict({ - 'apparent_temperature': 23.4, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T18:00:00Z', - 'dew_point': 20.2, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.08, - 'temperature': 21.9, - 'uv_index': 0, - 'wind_bearing': 167, - 'wind_gust_speed': 6.48, - 'wind_speed': 6.48, - }), - dict({ - 'apparent_temperature': 23.2, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T19:00:00Z', - 'dew_point': 19.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.04, - 'temperature': 21.8, - 'uv_index': 0, - 'wind_bearing': 165, - 'wind_gust_speed': 7.51, - 'wind_speed': 7.51, - }), - dict({ - 'apparent_temperature': 23.4, - 'cloud_coverage': 99.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T20:00:00Z', - 'dew_point': 19.6, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.05, - 'temperature': 22.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 8.73, - 'wind_speed': 8.73, - }), - dict({ - 'apparent_temperature': 23.9, - 'cloud_coverage': 98.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T21:00:00Z', - 'dew_point': 19.5, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.06, - 'temperature': 22.5, - 'uv_index': 0, - 'wind_bearing': 164, - 'wind_gust_speed': 9.21, - 'wind_speed': 9.11, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 96.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T22:00:00Z', - 'dew_point': 19.7, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.09, - 'temperature': 23.8, - 'uv_index': 1, - 'wind_bearing': 171, - 'wind_gust_speed': 9.03, - 'wind_speed': 7.91, - }), - ]), - }), - }) -# --- -# name: test_hourly_forecast[get_forecast] - dict({ - 'forecast': list([ - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 79.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T14:00:00Z', - 'dew_point': 21.5, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.24, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 264, - 'wind_gust_speed': 13.44, - 'wind_speed': 6.62, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 80.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T15:00:00Z', - 'dew_point': 21.4, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.24, - 'temperature': 22.4, - 'uv_index': 0, - 'wind_bearing': 261, - 'wind_gust_speed': 11.91, - 'wind_speed': 6.64, - }), - dict({ - 'apparent_temperature': 23.8, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T16:00:00Z', - 'dew_point': 21.1, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.12, - 'temperature': 22.0, - 'uv_index': 0, - 'wind_bearing': 252, - 'wind_gust_speed': 11.15, - 'wind_speed': 6.14, - }), - dict({ - 'apparent_temperature': 23.5, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T17:00:00Z', - 'dew_point': 20.9, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.03, - 'temperature': 21.7, - 'uv_index': 0, - 'wind_bearing': 248, - 'wind_gust_speed': 11.57, - 'wind_speed': 5.95, - }), - dict({ - 'apparent_temperature': 23.3, - 'cloud_coverage': 85.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T18:00:00Z', - 'dew_point': 20.8, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.05, - 'temperature': 21.6, - 'uv_index': 0, - 'wind_bearing': 237, - 'wind_gust_speed': 12.42, - 'wind_speed': 5.86, - }), - dict({ - 'apparent_temperature': 23.0, - 'cloud_coverage': 75.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T19:00:00Z', - 'dew_point': 20.6, - 'humidity': 96, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.03, - 'temperature': 21.3, - 'uv_index': 0, - 'wind_bearing': 224, - 'wind_gust_speed': 11.3, - 'wind_speed': 5.34, - }), - dict({ - 'apparent_temperature': 22.8, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T20:00:00Z', - 'dew_point': 20.4, - 'humidity': 96, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.31, - 'temperature': 21.2, - 'uv_index': 0, - 'wind_bearing': 221, - 'wind_gust_speed': 10.57, - 'wind_speed': 5.13, - }), - dict({ - 'apparent_temperature': 23.1, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-08T21:00:00Z', - 'dew_point': 20.5, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.55, - 'temperature': 21.4, - 'uv_index': 0, - 'wind_bearing': 237, - 'wind_gust_speed': 10.63, - 'wind_speed': 5.7, - }), - dict({ - 'apparent_temperature': 24.9, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-08T22:00:00Z', - 'dew_point': 21.3, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.79, - 'temperature': 22.8, - 'uv_index': 1, - 'wind_bearing': 258, - 'wind_gust_speed': 10.47, - 'wind_speed': 5.22, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T23:00:00Z', - 'dew_point': 21.3, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.95, - 'temperature': 24.0, - 'uv_index': 2, - 'wind_bearing': 282, - 'wind_gust_speed': 12.74, - 'wind_speed': 5.71, - }), - dict({ - 'apparent_temperature': 27.4, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T00:00:00Z', - 'dew_point': 21.5, - 'humidity': 80, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.35, - 'temperature': 25.1, - 'uv_index': 3, - 'wind_bearing': 294, - 'wind_gust_speed': 13.87, - 'wind_speed': 6.53, - }), - dict({ - 'apparent_temperature': 29.0, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T01:00:00Z', - 'dew_point': 21.8, - 'humidity': 75, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.48, - 'temperature': 26.5, - 'uv_index': 5, - 'wind_bearing': 308, - 'wind_gust_speed': 16.04, - 'wind_speed': 6.54, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T02:00:00Z', - 'dew_point': 22.0, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.23, - 'temperature': 27.6, - 'uv_index': 6, - 'wind_bearing': 314, - 'wind_gust_speed': 18.1, - 'wind_speed': 7.32, - }), - dict({ - 'apparent_temperature': 31.1, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T03:00:00Z', - 'dew_point': 22.1, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.86, - 'temperature': 28.3, - 'uv_index': 6, - 'wind_bearing': 317, - 'wind_gust_speed': 20.77, - 'wind_speed': 9.1, - }), - dict({ - 'apparent_temperature': 31.5, - 'cloud_coverage': 69.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T04:00:00Z', - 'dew_point': 22.1, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.65, - 'temperature': 28.6, - 'uv_index': 6, - 'wind_bearing': 311, - 'wind_gust_speed': 21.27, - 'wind_speed': 10.21, - }), - dict({ - 'apparent_temperature': 31.3, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T05:00:00Z', - 'dew_point': 22.1, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.48, - 'temperature': 28.4, - 'uv_index': 5, - 'wind_bearing': 317, - 'wind_gust_speed': 19.62, - 'wind_speed': 10.53, - }), - dict({ - 'apparent_temperature': 30.8, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T06:00:00Z', - 'dew_point': 22.2, - 'humidity': 71, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.54, - 'temperature': 27.9, - 'uv_index': 3, - 'wind_bearing': 335, - 'wind_gust_speed': 18.98, - 'wind_speed': 8.63, - }), - dict({ - 'apparent_temperature': 29.9, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T07:00:00Z', - 'dew_point': 22.2, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.76, - 'temperature': 27.1, - 'uv_index': 2, - 'wind_bearing': 338, - 'wind_gust_speed': 17.04, - 'wind_speed': 7.75, - }), - dict({ - 'apparent_temperature': 29.1, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T08:00:00Z', - 'dew_point': 22.1, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.05, - 'temperature': 26.4, - 'uv_index': 0, - 'wind_bearing': 342, - 'wind_gust_speed': 14.75, - 'wind_speed': 6.26, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T09:00:00Z', - 'dew_point': 22.0, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.38, - 'temperature': 25.4, - 'uv_index': 0, - 'wind_bearing': 344, - 'wind_gust_speed': 10.43, - 'wind_speed': 5.2, - }), - dict({ - 'apparent_temperature': 26.9, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T10:00:00Z', - 'dew_point': 21.9, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.73, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 339, - 'wind_gust_speed': 6.95, - 'wind_speed': 3.59, - }), - dict({ - 'apparent_temperature': 26.4, - 'cloud_coverage': 51.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T11:00:00Z', - 'dew_point': 21.8, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.3, - 'temperature': 24.1, - 'uv_index': 0, - 'wind_bearing': 326, - 'wind_gust_speed': 5.27, - 'wind_speed': 2.1, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 53.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T12:00:00Z', - 'dew_point': 21.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.52, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 257, - 'wind_gust_speed': 5.48, - 'wind_speed': 0.93, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T13:00:00Z', - 'dew_point': 21.8, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.53, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 188, - 'wind_gust_speed': 4.44, - 'wind_speed': 1.79, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T14:00:00Z', - 'dew_point': 21.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.46, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 183, - 'wind_gust_speed': 4.49, - 'wind_speed': 2.19, - }), - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 45.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T15:00:00Z', - 'dew_point': 21.4, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.21, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 179, - 'wind_gust_speed': 5.32, - 'wind_speed': 2.65, - }), - dict({ - 'apparent_temperature': 24.0, - 'cloud_coverage': 42.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T16:00:00Z', - 'dew_point': 21.1, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.09, - 'temperature': 22.1, - 'uv_index': 0, - 'wind_bearing': 173, - 'wind_gust_speed': 5.81, - 'wind_speed': 3.2, - }), - dict({ - 'apparent_temperature': 23.7, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T17:00:00Z', - 'dew_point': 20.9, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.88, - 'temperature': 21.9, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 5.53, - 'wind_speed': 3.16, - }), - dict({ - 'apparent_temperature': 23.3, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T18:00:00Z', - 'dew_point': 20.7, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.94, - 'temperature': 21.6, - 'uv_index': 0, - 'wind_bearing': 153, - 'wind_gust_speed': 6.09, - 'wind_speed': 3.36, - }), - dict({ - 'apparent_temperature': 23.1, - 'cloud_coverage': 51.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T19:00:00Z', - 'dew_point': 20.5, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.96, - 'temperature': 21.4, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 6.83, - 'wind_speed': 3.71, - }), - dict({ - 'apparent_temperature': 22.5, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T20:00:00Z', - 'dew_point': 20.0, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 21.0, - 'uv_index': 0, - 'wind_bearing': 156, - 'wind_gust_speed': 7.98, - 'wind_speed': 4.27, - }), - dict({ - 'apparent_temperature': 22.8, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T21:00:00Z', - 'dew_point': 20.2, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.61, - 'temperature': 21.2, - 'uv_index': 0, - 'wind_bearing': 156, - 'wind_gust_speed': 8.4, - 'wind_speed': 4.69, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T22:00:00Z', - 'dew_point': 21.3, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.87, - 'temperature': 23.1, - 'uv_index': 1, - 'wind_bearing': 150, - 'wind_gust_speed': 7.66, - 'wind_speed': 4.33, - }), - dict({ - 'apparent_temperature': 28.3, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T23:00:00Z', - 'dew_point': 22.3, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 25.6, - 'uv_index': 2, - 'wind_bearing': 123, - 'wind_gust_speed': 9.63, - 'wind_speed': 3.91, - }), - dict({ - 'apparent_temperature': 30.4, - 'cloud_coverage': 63.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T00:00:00Z', - 'dew_point': 22.6, - 'humidity': 75, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 27.4, - 'uv_index': 4, - 'wind_bearing': 105, - 'wind_gust_speed': 12.59, - 'wind_speed': 3.96, - }), - dict({ - 'apparent_temperature': 32.2, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T01:00:00Z', - 'dew_point': 22.9, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.79, - 'temperature': 28.9, - 'uv_index': 5, - 'wind_bearing': 99, - 'wind_gust_speed': 14.17, - 'wind_speed': 4.06, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 62.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-10T02:00:00Z', - 'dew_point': 22.9, - 'humidity': 66, - 'precipitation': 0.3, - 'precipitation_probability': 7.000000000000001, - 'pressure': 1011.29, - 'temperature': 29.9, - 'uv_index': 6, - 'wind_bearing': 93, - 'wind_gust_speed': 17.75, - 'wind_speed': 4.87, - }), - dict({ - 'apparent_temperature': 34.3, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T03:00:00Z', - 'dew_point': 23.1, - 'humidity': 64, - 'precipitation': 0.3, - 'precipitation_probability': 11.0, - 'pressure': 1010.78, - 'temperature': 30.6, - 'uv_index': 6, - 'wind_bearing': 78, - 'wind_gust_speed': 17.43, - 'wind_speed': 4.54, - }), - dict({ - 'apparent_temperature': 34.0, - 'cloud_coverage': 74.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T04:00:00Z', - 'dew_point': 23.2, - 'humidity': 66, - 'precipitation': 0.4, - 'precipitation_probability': 15.0, - 'pressure': 1010.37, - 'temperature': 30.3, - 'uv_index': 5, - 'wind_bearing': 60, - 'wind_gust_speed': 15.24, - 'wind_speed': 4.9, - }), - dict({ - 'apparent_temperature': 33.7, - 'cloud_coverage': 79.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T05:00:00Z', - 'dew_point': 23.3, - 'humidity': 67, - 'precipitation': 0.7, - 'precipitation_probability': 17.0, - 'pressure': 1010.09, - 'temperature': 30.0, - 'uv_index': 4, - 'wind_bearing': 80, - 'wind_gust_speed': 13.53, - 'wind_speed': 5.98, - }), - dict({ - 'apparent_temperature': 33.2, - 'cloud_coverage': 80.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T06:00:00Z', - 'dew_point': 23.4, - 'humidity': 70, - 'precipitation': 1.0, - 'precipitation_probability': 17.0, - 'pressure': 1010.0, - 'temperature': 29.5, - 'uv_index': 3, - 'wind_bearing': 83, - 'wind_gust_speed': 12.55, - 'wind_speed': 6.84, - }), - dict({ - 'apparent_temperature': 32.3, - 'cloud_coverage': 88.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T07:00:00Z', - 'dew_point': 23.4, - 'humidity': 73, - 'precipitation': 0.4, - 'precipitation_probability': 16.0, - 'pressure': 1010.27, - 'temperature': 28.7, - 'uv_index': 2, - 'wind_bearing': 90, - 'wind_gust_speed': 10.16, - 'wind_speed': 6.07, - }), - dict({ - 'apparent_temperature': 30.9, - 'cloud_coverage': 92.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T08:00:00Z', - 'dew_point': 23.2, - 'humidity': 77, - 'precipitation': 0.5, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1010.71, - 'temperature': 27.6, - 'uv_index': 0, - 'wind_bearing': 101, - 'wind_gust_speed': 8.18, - 'wind_speed': 4.82, - }), - dict({ - 'apparent_temperature': 29.7, - 'cloud_coverage': 93.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T09:00:00Z', - 'dew_point': 23.2, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.9, - 'temperature': 26.5, - 'uv_index': 0, - 'wind_bearing': 128, - 'wind_gust_speed': 8.89, - 'wind_speed': 4.95, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T10:00:00Z', - 'dew_point': 23.0, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.12, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 134, - 'wind_gust_speed': 10.03, - 'wind_speed': 4.52, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 87.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T11:00:00Z', - 'dew_point': 22.8, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.43, - 'temperature': 25.1, - 'uv_index': 0, - 'wind_bearing': 137, - 'wind_gust_speed': 12.4, - 'wind_speed': 5.41, - }), - dict({ - 'apparent_temperature': 27.4, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T12:00:00Z', - 'dew_point': 22.5, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.58, - 'temperature': 24.8, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 16.36, - 'wind_speed': 6.31, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T13:00:00Z', - 'dew_point': 22.4, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.55, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 19.66, - 'wind_speed': 7.23, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T14:00:00Z', - 'dew_point': 22.2, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.4, - 'temperature': 24.3, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 21.15, - 'wind_speed': 7.46, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T15:00:00Z', - 'dew_point': 22.0, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.23, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 22.26, - 'wind_speed': 7.84, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T16:00:00Z', - 'dew_point': 21.8, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.01, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 23.53, - 'wind_speed': 8.63, - }), - dict({ - 'apparent_temperature': 25.6, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-10T17:00:00Z', - 'dew_point': 21.6, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.78, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 22.83, - 'wind_speed': 8.61, - }), - dict({ - 'apparent_temperature': 25.4, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T18:00:00Z', - 'dew_point': 21.5, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.69, - 'temperature': 23.3, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 23.7, - 'wind_speed': 8.7, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T19:00:00Z', - 'dew_point': 21.4, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.77, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 140, - 'wind_gust_speed': 24.24, - 'wind_speed': 8.74, - }), - dict({ - 'apparent_temperature': 25.5, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T20:00:00Z', - 'dew_point': 21.6, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.89, - 'temperature': 23.3, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 23.99, - 'wind_speed': 8.81, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T21:00:00Z', - 'dew_point': 21.6, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.1, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 25.55, - 'wind_speed': 9.05, - }), - dict({ - 'apparent_temperature': 27.0, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T22:00:00Z', - 'dew_point': 21.8, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 24.6, - 'uv_index': 1, - 'wind_bearing': 140, - 'wind_gust_speed': 29.08, - 'wind_speed': 10.37, - }), - dict({ - 'apparent_temperature': 28.4, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T23:00:00Z', - 'dew_point': 21.9, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.36, - 'temperature': 25.9, - 'uv_index': 2, - 'wind_bearing': 140, - 'wind_gust_speed': 34.13, - 'wind_speed': 12.56, - }), - dict({ - 'apparent_temperature': 30.1, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T00:00:00Z', - 'dew_point': 22.3, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.39, - 'temperature': 27.2, - 'uv_index': 3, - 'wind_bearing': 140, - 'wind_gust_speed': 38.2, - 'wind_speed': 15.65, - }), - dict({ - 'apparent_temperature': 31.4, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-11T01:00:00Z', - 'dew_point': 22.3, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.31, - 'temperature': 28.4, - 'uv_index': 5, - 'wind_bearing': 141, - 'wind_gust_speed': 37.55, - 'wind_speed': 15.78, - }), - dict({ - 'apparent_temperature': 32.7, - 'cloud_coverage': 63.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T02:00:00Z', - 'dew_point': 22.4, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.98, - 'temperature': 29.6, - 'uv_index': 6, - 'wind_bearing': 143, - 'wind_gust_speed': 35.86, - 'wind_speed': 15.41, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T03:00:00Z', - 'dew_point': 22.5, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.61, - 'temperature': 30.3, - 'uv_index': 6, - 'wind_bearing': 141, - 'wind_gust_speed': 35.88, - 'wind_speed': 15.51, - }), - dict({ - 'apparent_temperature': 33.8, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T04:00:00Z', - 'dew_point': 22.6, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.36, - 'temperature': 30.4, - 'uv_index': 5, - 'wind_bearing': 140, - 'wind_gust_speed': 35.99, - 'wind_speed': 15.75, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T05:00:00Z', - 'dew_point': 22.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.11, - 'temperature': 30.1, - 'uv_index': 4, - 'wind_bearing': 137, - 'wind_gust_speed': 33.61, - 'wind_speed': 15.36, - }), - dict({ - 'apparent_temperature': 33.2, - 'cloud_coverage': 77.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T06:00:00Z', - 'dew_point': 22.5, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.98, - 'temperature': 30.0, - 'uv_index': 3, - 'wind_bearing': 138, - 'wind_gust_speed': 32.61, - 'wind_speed': 14.98, - }), - dict({ - 'apparent_temperature': 32.3, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T07:00:00Z', - 'dew_point': 22.2, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.13, - 'temperature': 29.2, - 'uv_index': 2, - 'wind_bearing': 138, - 'wind_gust_speed': 28.1, - 'wind_speed': 13.88, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-11T08:00:00Z', - 'dew_point': 22.1, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.48, - 'temperature': 28.3, - 'uv_index': 0, - 'wind_bearing': 137, - 'wind_gust_speed': 24.22, - 'wind_speed': 13.02, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 55.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-11T09:00:00Z', - 'dew_point': 21.9, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.81, - 'temperature': 27.1, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 22.5, - 'wind_speed': 11.94, - }), - dict({ - 'apparent_temperature': 28.8, - 'cloud_coverage': 63.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T10:00:00Z', - 'dew_point': 21.7, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 137, - 'wind_gust_speed': 21.47, - 'wind_speed': 11.25, - }), - dict({ - 'apparent_temperature': 28.1, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T11:00:00Z', - 'dew_point': 21.8, - 'humidity': 80, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.77, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 22.71, - 'wind_speed': 12.39, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T12:00:00Z', - 'dew_point': 21.8, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.97, - 'temperature': 25.2, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 23.67, - 'wind_speed': 12.83, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T13:00:00Z', - 'dew_point': 21.7, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.97, - 'temperature': 24.7, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 23.34, - 'wind_speed': 12.62, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T14:00:00Z', - 'dew_point': 21.7, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.83, - 'temperature': 24.4, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 22.9, - 'wind_speed': 12.07, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 90.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T15:00:00Z', - 'dew_point': 21.6, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.74, - 'temperature': 24.1, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 22.01, - 'wind_speed': 11.19, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T16:00:00Z', - 'dew_point': 21.6, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.56, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 21.29, - 'wind_speed': 10.97, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 85.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T17:00:00Z', - 'dew_point': 21.5, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.35, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 20.52, - 'wind_speed': 10.5, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T18:00:00Z', - 'dew_point': 21.4, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.3, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 20.04, - 'wind_speed': 10.51, - }), - dict({ - 'apparent_temperature': 25.4, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T19:00:00Z', - 'dew_point': 21.3, - 'humidity': 88, - 'precipitation': 0.3, - 'precipitation_probability': 12.0, - 'pressure': 1011.37, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 18.07, - 'wind_speed': 10.13, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T20:00:00Z', - 'dew_point': 21.2, - 'humidity': 89, - 'precipitation': 0.2, - 'precipitation_probability': 13.0, - 'pressure': 1011.53, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 16.86, - 'wind_speed': 10.34, - }), - dict({ - 'apparent_temperature': 25.5, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T21:00:00Z', - 'dew_point': 21.4, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.71, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 16.66, - 'wind_speed': 10.68, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T22:00:00Z', - 'dew_point': 21.9, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.94, - 'temperature': 24.4, - 'uv_index': 1, - 'wind_bearing': 137, - 'wind_gust_speed': 17.21, - 'wind_speed': 10.61, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T23:00:00Z', - 'dew_point': 22.3, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.05, - 'temperature': 25.6, - 'uv_index': 2, - 'wind_bearing': 138, - 'wind_gust_speed': 19.23, - 'wind_speed': 11.13, - }), - dict({ - 'apparent_temperature': 29.5, - 'cloud_coverage': 79.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T00:00:00Z', - 'dew_point': 22.6, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.07, - 'temperature': 26.6, - 'uv_index': 3, - 'wind_bearing': 140, - 'wind_gust_speed': 20.61, - 'wind_speed': 11.13, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 82.0, - 'condition': 'rainy', - 'datetime': '2023-09-12T01:00:00Z', - 'dew_point': 23.1, - 'humidity': 75, - 'precipitation': 0.2, - 'precipitation_probability': 16.0, - 'pressure': 1011.89, - 'temperature': 27.9, - 'uv_index': 4, - 'wind_bearing': 141, - 'wind_gust_speed': 23.35, - 'wind_speed': 11.98, - }), - dict({ - 'apparent_temperature': 32.6, - 'cloud_coverage': 85.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T02:00:00Z', - 'dew_point': 23.5, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.53, - 'temperature': 29.0, - 'uv_index': 5, - 'wind_bearing': 143, - 'wind_gust_speed': 26.45, - 'wind_speed': 13.01, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T03:00:00Z', - 'dew_point': 23.5, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.15, - 'temperature': 29.8, - 'uv_index': 5, - 'wind_bearing': 141, - 'wind_gust_speed': 28.95, - 'wind_speed': 13.9, - }), - dict({ - 'apparent_temperature': 34.0, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T04:00:00Z', - 'dew_point': 23.4, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.79, - 'temperature': 30.2, - 'uv_index': 5, - 'wind_bearing': 141, - 'wind_gust_speed': 27.9, - 'wind_speed': 13.95, - }), - dict({ - 'apparent_temperature': 34.0, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T05:00:00Z', - 'dew_point': 23.1, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.43, - 'temperature': 30.4, - 'uv_index': 4, - 'wind_bearing': 140, - 'wind_gust_speed': 26.53, - 'wind_speed': 13.78, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T06:00:00Z', - 'dew_point': 22.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.21, - 'temperature': 30.1, - 'uv_index': 3, - 'wind_bearing': 138, - 'wind_gust_speed': 24.56, - 'wind_speed': 13.74, - }), - dict({ - 'apparent_temperature': 32.0, - 'cloud_coverage': 53.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T07:00:00Z', - 'dew_point': 22.1, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.26, - 'temperature': 29.1, - 'uv_index': 2, - 'wind_bearing': 138, - 'wind_gust_speed': 22.78, - 'wind_speed': 13.21, - }), - dict({ - 'apparent_temperature': 30.9, - 'cloud_coverage': 48.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T08:00:00Z', - 'dew_point': 21.9, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.51, - 'temperature': 28.1, - 'uv_index': 0, - 'wind_bearing': 140, - 'wind_gust_speed': 19.92, - 'wind_speed': 12.0, - }), - dict({ - 'apparent_temperature': 29.7, - 'cloud_coverage': 50.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T09:00:00Z', - 'dew_point': 21.7, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.8, - 'temperature': 27.2, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 17.65, - 'wind_speed': 10.97, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T10:00:00Z', - 'dew_point': 21.4, - 'humidity': 75, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.23, - 'temperature': 26.2, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 15.87, - 'wind_speed': 10.23, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T11:00:00Z', - 'dew_point': 21.3, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1011.79, - 'temperature': 25.4, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 13.9, - 'wind_speed': 9.39, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T12:00:00Z', - 'dew_point': 21.2, - 'humidity': 81, - 'precipitation': 0.0, - 'precipitation_probability': 47.0, - 'pressure': 1012.12, - 'temperature': 24.7, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 13.32, - 'wind_speed': 8.9, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T13:00:00Z', - 'dew_point': 21.2, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1012.18, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 13.18, - 'wind_speed': 8.59, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T14:00:00Z', - 'dew_point': 21.3, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.09, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 13.84, - 'wind_speed': 8.87, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T15:00:00Z', - 'dew_point': 21.3, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.99, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 15.08, - 'wind_speed': 8.93, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T16:00:00Z', - 'dew_point': 21.0, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 23.2, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 16.74, - 'wind_speed': 9.49, - }), - dict({ - 'apparent_temperature': 24.7, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T17:00:00Z', - 'dew_point': 20.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.75, - 'temperature': 22.9, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 17.45, - 'wind_speed': 9.12, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T18:00:00Z', - 'dew_point': 20.7, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.77, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 17.04, - 'wind_speed': 8.68, - }), - dict({ - 'apparent_temperature': 24.1, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T19:00:00Z', - 'dew_point': 20.6, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 22.4, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 16.8, - 'wind_speed': 8.61, - }), - dict({ - 'apparent_temperature': 23.9, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T20:00:00Z', - 'dew_point': 20.5, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.23, - 'temperature': 22.1, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 15.35, - 'wind_speed': 8.36, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 75.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T21:00:00Z', - 'dew_point': 20.6, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.49, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 155, - 'wind_gust_speed': 14.09, - 'wind_speed': 7.77, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T22:00:00Z', - 'dew_point': 21.0, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.72, - 'temperature': 23.8, - 'uv_index': 1, - 'wind_bearing': 152, - 'wind_gust_speed': 14.04, - 'wind_speed': 7.25, - }), - dict({ - 'apparent_temperature': 27.8, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T23:00:00Z', - 'dew_point': 21.4, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.85, - 'temperature': 25.5, - 'uv_index': 2, - 'wind_bearing': 149, - 'wind_gust_speed': 15.31, - 'wind_speed': 7.14, - }), - dict({ - 'apparent_temperature': 29.7, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-13T00:00:00Z', - 'dew_point': 21.8, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.89, - 'temperature': 27.1, - 'uv_index': 4, - 'wind_bearing': 141, - 'wind_gust_speed': 16.42, - 'wind_speed': 6.89, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T01:00:00Z', - 'dew_point': 22.0, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.65, - 'temperature': 28.4, - 'uv_index': 5, - 'wind_bearing': 137, - 'wind_gust_speed': 18.64, - 'wind_speed': 6.65, - }), - dict({ - 'apparent_temperature': 32.3, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T02:00:00Z', - 'dew_point': 21.9, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.26, - 'temperature': 29.4, - 'uv_index': 5, - 'wind_bearing': 128, - 'wind_gust_speed': 21.69, - 'wind_speed': 7.12, - }), - dict({ - 'apparent_temperature': 33.0, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T03:00:00Z', - 'dew_point': 21.9, - 'humidity': 62, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.88, - 'temperature': 30.1, - 'uv_index': 6, - 'wind_bearing': 111, - 'wind_gust_speed': 23.41, - 'wind_speed': 7.33, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 72.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T04:00:00Z', - 'dew_point': 22.0, - 'humidity': 61, - 'precipitation': 0.9, - 'precipitation_probability': 12.0, - 'pressure': 1011.55, - 'temperature': 30.4, - 'uv_index': 5, - 'wind_bearing': 56, - 'wind_gust_speed': 23.1, - 'wind_speed': 8.09, - }), - dict({ - 'apparent_temperature': 33.2, - 'cloud_coverage': 72.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T05:00:00Z', - 'dew_point': 21.9, - 'humidity': 61, - 'precipitation': 1.9, - 'precipitation_probability': 12.0, - 'pressure': 1011.29, - 'temperature': 30.2, - 'uv_index': 4, - 'wind_bearing': 20, - 'wind_gust_speed': 21.81, - 'wind_speed': 9.46, - }), - dict({ - 'apparent_temperature': 32.6, - 'cloud_coverage': 74.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T06:00:00Z', - 'dew_point': 21.9, - 'humidity': 63, - 'precipitation': 2.3, - 'precipitation_probability': 11.0, - 'pressure': 1011.17, - 'temperature': 29.7, - 'uv_index': 3, - 'wind_bearing': 20, - 'wind_gust_speed': 19.72, - 'wind_speed': 9.8, - }), - dict({ - 'apparent_temperature': 31.8, - 'cloud_coverage': 69.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T07:00:00Z', - 'dew_point': 22.4, - 'humidity': 68, - 'precipitation': 1.8, - 'precipitation_probability': 10.0, - 'pressure': 1011.32, - 'temperature': 28.8, - 'uv_index': 1, - 'wind_bearing': 18, - 'wind_gust_speed': 17.55, - 'wind_speed': 9.23, - }), - dict({ - 'apparent_temperature': 30.8, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T08:00:00Z', - 'dew_point': 22.9, - 'humidity': 76, - 'precipitation': 0.8, - 'precipitation_probability': 10.0, - 'pressure': 1011.6, - 'temperature': 27.6, - 'uv_index': 0, - 'wind_bearing': 27, - 'wind_gust_speed': 15.08, - 'wind_speed': 8.05, - }), - dict({ - 'apparent_temperature': 29.4, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T09:00:00Z', - 'dew_point': 23.0, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.94, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 32, - 'wind_gust_speed': 12.17, - 'wind_speed': 6.68, - }), - dict({ - 'apparent_temperature': 28.5, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T10:00:00Z', - 'dew_point': 22.9, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.3, - 'temperature': 25.5, - 'uv_index': 0, - 'wind_bearing': 69, - 'wind_gust_speed': 11.64, - 'wind_speed': 6.69, - }), - dict({ - 'apparent_temperature': 27.7, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T11:00:00Z', - 'dew_point': 22.6, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.71, - 'temperature': 25.0, - 'uv_index': 0, - 'wind_bearing': 155, - 'wind_gust_speed': 11.91, - 'wind_speed': 6.23, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T12:00:00Z', - 'dew_point': 22.3, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.96, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 12.47, - 'wind_speed': 5.73, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T13:00:00Z', - 'dew_point': 22.3, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.03, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 13.57, - 'wind_speed': 5.66, - }), - dict({ - 'apparent_temperature': 26.4, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T14:00:00Z', - 'dew_point': 22.2, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.99, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 15.07, - 'wind_speed': 5.83, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T15:00:00Z', - 'dew_point': 22.2, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.95, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 16.06, - 'wind_speed': 5.93, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T16:00:00Z', - 'dew_point': 22.0, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.9, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 153, - 'wind_gust_speed': 16.05, - 'wind_speed': 5.75, - }), - dict({ - 'apparent_temperature': 25.4, - 'cloud_coverage': 90.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T17:00:00Z', - 'dew_point': 21.8, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.85, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 15.52, - 'wind_speed': 5.49, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 92.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T18:00:00Z', - 'dew_point': 21.8, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.87, - 'temperature': 23.0, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 15.01, - 'wind_speed': 5.32, - }), - dict({ - 'apparent_temperature': 25.0, - 'cloud_coverage': 90.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T19:00:00Z', - 'dew_point': 21.7, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.01, - 'temperature': 22.8, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 14.39, - 'wind_speed': 5.33, - }), - dict({ - 'apparent_temperature': 24.8, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T20:00:00Z', - 'dew_point': 21.6, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.22, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 13.79, - 'wind_speed': 5.43, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T21:00:00Z', - 'dew_point': 21.8, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.41, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 14.12, - 'wind_speed': 5.52, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 77.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T22:00:00Z', - 'dew_point': 22.1, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.59, - 'temperature': 24.3, - 'uv_index': 1, - 'wind_bearing': 147, - 'wind_gust_speed': 16.14, - 'wind_speed': 5.58, - }), - dict({ - 'apparent_temperature': 28.4, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T23:00:00Z', - 'dew_point': 22.4, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.74, - 'temperature': 25.7, - 'uv_index': 2, - 'wind_bearing': 146, - 'wind_gust_speed': 19.09, - 'wind_speed': 5.62, - }), - dict({ - 'apparent_temperature': 30.5, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T00:00:00Z', - 'dew_point': 22.9, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.78, - 'temperature': 27.4, - 'uv_index': 4, - 'wind_bearing': 143, - 'wind_gust_speed': 21.6, - 'wind_speed': 5.58, - }), - dict({ - 'apparent_temperature': 32.2, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T01:00:00Z', - 'dew_point': 23.2, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.61, - 'temperature': 28.7, - 'uv_index': 5, - 'wind_bearing': 138, - 'wind_gust_speed': 23.36, - 'wind_speed': 5.34, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T02:00:00Z', - 'dew_point': 23.2, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.32, - 'temperature': 29.9, - 'uv_index': 6, - 'wind_bearing': 111, - 'wind_gust_speed': 24.72, - 'wind_speed': 4.99, - }), - dict({ - 'apparent_temperature': 34.4, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T03:00:00Z', - 'dew_point': 23.3, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.04, - 'temperature': 30.7, - 'uv_index': 6, - 'wind_bearing': 354, - 'wind_gust_speed': 25.23, - 'wind_speed': 4.74, - }), - dict({ - 'apparent_temperature': 34.9, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T04:00:00Z', - 'dew_point': 23.4, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.77, - 'temperature': 31.0, - 'uv_index': 6, - 'wind_bearing': 341, - 'wind_gust_speed': 24.6, - 'wind_speed': 4.79, - }), - dict({ - 'apparent_temperature': 34.5, - 'cloud_coverage': 60.0, - 'condition': 'rainy', - 'datetime': '2023-09-14T05:00:00Z', - 'dew_point': 23.2, - 'humidity': 64, - 'precipitation': 0.2, - 'precipitation_probability': 15.0, - 'pressure': 1012.53, - 'temperature': 30.7, - 'uv_index': 5, - 'wind_bearing': 336, - 'wind_gust_speed': 23.28, - 'wind_speed': 5.07, - }), - dict({ - 'apparent_temperature': 33.8, - 'cloud_coverage': 59.0, - 'condition': 'rainy', - 'datetime': '2023-09-14T06:00:00Z', - 'dew_point': 23.1, - 'humidity': 66, - 'precipitation': 0.2, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1012.49, - 'temperature': 30.2, - 'uv_index': 3, - 'wind_bearing': 336, - 'wind_gust_speed': 22.05, - 'wind_speed': 5.34, - }), - dict({ - 'apparent_temperature': 32.9, - 'cloud_coverage': 53.0, - 'condition': 'rainy', - 'datetime': '2023-09-14T07:00:00Z', - 'dew_point': 23.0, - 'humidity': 68, - 'precipitation': 0.2, - 'precipitation_probability': 40.0, - 'pressure': 1012.73, - 'temperature': 29.5, - 'uv_index': 2, - 'wind_bearing': 339, - 'wind_gust_speed': 21.18, - 'wind_speed': 5.63, - }), - dict({ - 'apparent_temperature': 31.6, - 'cloud_coverage': 43.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T08:00:00Z', - 'dew_point': 22.8, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 45.0, - 'pressure': 1013.16, - 'temperature': 28.4, - 'uv_index': 0, - 'wind_bearing': 342, - 'wind_gust_speed': 20.35, - 'wind_speed': 5.93, - }), - dict({ - 'apparent_temperature': 30.0, - 'cloud_coverage': 35.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T09:00:00Z', - 'dew_point': 22.5, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1013.62, - 'temperature': 27.1, - 'uv_index': 0, - 'wind_bearing': 347, - 'wind_gust_speed': 19.42, - 'wind_speed': 5.95, - }), - dict({ - 'apparent_temperature': 29.0, - 'cloud_coverage': 32.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T10:00:00Z', - 'dew_point': 22.4, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.09, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 348, - 'wind_gust_speed': 18.19, - 'wind_speed': 5.31, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T11:00:00Z', - 'dew_point': 22.4, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.56, - 'temperature': 25.5, - 'uv_index': 0, - 'wind_bearing': 177, - 'wind_gust_speed': 16.79, - 'wind_speed': 4.28, - }), - dict({ - 'apparent_temperature': 27.5, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T12:00:00Z', - 'dew_point': 22.3, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.87, - 'temperature': 24.9, - 'uv_index': 0, - 'wind_bearing': 171, - 'wind_gust_speed': 15.61, - 'wind_speed': 3.72, - }), - dict({ - 'apparent_temperature': 26.6, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T13:00:00Z', - 'dew_point': 22.1, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.91, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 171, - 'wind_gust_speed': 14.7, - 'wind_speed': 4.11, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 32.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T14:00:00Z', - 'dew_point': 21.9, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.8, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 171, - 'wind_gust_speed': 13.81, - 'wind_speed': 4.97, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 34.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T15:00:00Z', - 'dew_point': 21.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.66, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 170, - 'wind_gust_speed': 12.88, - 'wind_speed': 5.57, - }), - dict({ - 'apparent_temperature': 24.8, - 'cloud_coverage': 37.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T16:00:00Z', - 'dew_point': 21.5, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.54, - 'temperature': 22.7, - 'uv_index': 0, - 'wind_bearing': 168, - 'wind_gust_speed': 12.0, - 'wind_speed': 5.62, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 39.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T17:00:00Z', - 'dew_point': 21.3, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.45, - 'temperature': 22.4, - 'uv_index': 0, - 'wind_bearing': 165, - 'wind_gust_speed': 11.43, - 'wind_speed': 5.48, - }), - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T18:00:00Z', - 'dew_point': 21.4, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 44.0, - 'pressure': 1014.45, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 11.42, - 'wind_speed': 5.38, - }), - dict({ - 'apparent_temperature': 25.0, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T19:00:00Z', - 'dew_point': 21.6, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 52.0, - 'pressure': 1014.63, - 'temperature': 22.9, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 12.15, - 'wind_speed': 5.39, - }), - dict({ - 'apparent_temperature': 25.6, - 'cloud_coverage': 38.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T20:00:00Z', - 'dew_point': 21.8, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 51.0, - 'pressure': 1014.91, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 13.54, - 'wind_speed': 5.45, - }), - dict({ - 'apparent_temperature': 26.6, - 'cloud_coverage': 36.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T21:00:00Z', - 'dew_point': 22.0, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 42.0, - 'pressure': 1015.18, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 15.48, - 'wind_speed': 5.62, - }), - dict({ - 'apparent_temperature': 28.5, - 'cloud_coverage': 32.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T22:00:00Z', - 'dew_point': 22.5, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 28.999999999999996, - 'pressure': 1015.4, - 'temperature': 25.7, - 'uv_index': 1, - 'wind_bearing': 158, - 'wind_gust_speed': 17.86, - 'wind_speed': 5.84, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T23:00:00Z', - 'dew_point': 22.9, - 'humidity': 77, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.54, - 'temperature': 27.2, - 'uv_index': 2, - 'wind_bearing': 155, - 'wind_gust_speed': 20.19, - 'wind_speed': 6.09, - }), - dict({ - 'apparent_temperature': 32.1, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-15T00:00:00Z', - 'dew_point': 23.3, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.55, - 'temperature': 28.6, - 'uv_index': 4, - 'wind_bearing': 152, - 'wind_gust_speed': 21.83, - 'wind_speed': 6.42, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 34.0, - 'condition': 'sunny', - 'datetime': '2023-09-15T01:00:00Z', - 'dew_point': 23.5, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.35, - 'temperature': 29.6, - 'uv_index': 6, - 'wind_bearing': 144, - 'wind_gust_speed': 22.56, - 'wind_speed': 6.91, - }), - dict({ - 'apparent_temperature': 34.2, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T02:00:00Z', - 'dew_point': 23.5, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.0, - 'temperature': 30.4, - 'uv_index': 7, - 'wind_bearing': 336, - 'wind_gust_speed': 22.83, - 'wind_speed': 7.47, - }), - dict({ - 'apparent_temperature': 34.9, - 'cloud_coverage': 46.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T03:00:00Z', - 'dew_point': 23.5, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.62, - 'temperature': 30.9, - 'uv_index': 7, - 'wind_bearing': 336, - 'wind_gust_speed': 22.98, - 'wind_speed': 7.95, - }), - dict({ - 'apparent_temperature': 35.4, - 'cloud_coverage': 46.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T04:00:00Z', - 'dew_point': 23.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.25, - 'temperature': 31.3, - 'uv_index': 6, - 'wind_bearing': 341, - 'wind_gust_speed': 23.21, - 'wind_speed': 8.44, - }), - dict({ - 'apparent_temperature': 35.6, - 'cloud_coverage': 44.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T05:00:00Z', - 'dew_point': 23.7, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.95, - 'temperature': 31.5, - 'uv_index': 5, - 'wind_bearing': 344, - 'wind_gust_speed': 23.46, - 'wind_speed': 8.95, - }), - dict({ - 'apparent_temperature': 35.1, - 'cloud_coverage': 42.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T06:00:00Z', - 'dew_point': 23.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.83, - 'temperature': 31.1, - 'uv_index': 3, - 'wind_bearing': 347, - 'wind_gust_speed': 23.64, - 'wind_speed': 9.13, - }), - dict({ - 'apparent_temperature': 34.1, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T07:00:00Z', - 'dew_point': 23.4, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.96, - 'temperature': 30.3, - 'uv_index': 2, - 'wind_bearing': 350, - 'wind_gust_speed': 23.66, - 'wind_speed': 8.78, - }), - dict({ - 'apparent_temperature': 32.4, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T08:00:00Z', - 'dew_point': 23.1, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.25, - 'temperature': 29.0, - 'uv_index': 0, - 'wind_bearing': 356, - 'wind_gust_speed': 23.51, - 'wind_speed': 8.13, - }), - dict({ - 'apparent_temperature': 31.1, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T09:00:00Z', - 'dew_point': 22.9, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.61, - 'temperature': 27.9, - 'uv_index': 0, - 'wind_bearing': 3, - 'wind_gust_speed': 23.21, - 'wind_speed': 7.48, - }), - dict({ - 'apparent_temperature': 30.0, - 'cloud_coverage': 43.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T10:00:00Z', - 'dew_point': 22.8, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.02, - 'temperature': 26.9, - 'uv_index': 0, - 'wind_bearing': 20, - 'wind_gust_speed': 22.68, - 'wind_speed': 6.83, - }), - dict({ - 'apparent_temperature': 29.2, - 'cloud_coverage': 46.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T11:00:00Z', - 'dew_point': 22.8, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.43, - 'temperature': 26.2, - 'uv_index': 0, - 'wind_bearing': 129, - 'wind_gust_speed': 22.04, - 'wind_speed': 6.1, - }), - dict({ - 'apparent_temperature': 28.4, - 'cloud_coverage': 48.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T12:00:00Z', - 'dew_point': 22.7, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.71, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 21.64, - 'wind_speed': 5.6, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T13:00:00Z', - 'dew_point': 23.2, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.52, - 'temperature': 25.2, - 'uv_index': 0, - 'wind_bearing': 164, - 'wind_gust_speed': 16.35, - 'wind_speed': 5.58, - }), - dict({ - 'apparent_temperature': 27.4, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T14:00:00Z', - 'dew_point': 22.9, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.37, - 'temperature': 24.6, - 'uv_index': 0, - 'wind_bearing': 168, - 'wind_gust_speed': 17.11, - 'wind_speed': 5.79, - }), - dict({ - 'apparent_temperature': 26.9, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T15:00:00Z', - 'dew_point': 22.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.21, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 182, - 'wind_gust_speed': 17.32, - 'wind_speed': 5.77, - }), - dict({ - 'apparent_temperature': 26.4, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T16:00:00Z', - 'dew_point': 22.6, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.07, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 201, - 'wind_gust_speed': 16.6, - 'wind_speed': 5.27, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T17:00:00Z', - 'dew_point': 22.5, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.95, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 219, - 'wind_gust_speed': 15.52, - 'wind_speed': 4.62, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T18:00:00Z', - 'dew_point': 22.3, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.88, - 'temperature': 23.3, - 'uv_index': 0, - 'wind_bearing': 216, - 'wind_gust_speed': 14.64, - 'wind_speed': 4.32, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T19:00:00Z', - 'dew_point': 22.4, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.91, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 198, - 'wind_gust_speed': 14.06, - 'wind_speed': 4.73, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T20:00:00Z', - 'dew_point': 22.4, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.99, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 189, - 'wind_gust_speed': 13.7, - 'wind_speed': 5.49, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T21:00:00Z', - 'dew_point': 22.5, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.07, - 'temperature': 24.4, - 'uv_index': 0, - 'wind_bearing': 183, - 'wind_gust_speed': 13.77, - 'wind_speed': 5.95, - }), - dict({ - 'apparent_temperature': 28.3, - 'cloud_coverage': 59.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T22:00:00Z', - 'dew_point': 22.6, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.12, - 'temperature': 25.5, - 'uv_index': 1, - 'wind_bearing': 179, - 'wind_gust_speed': 14.38, - 'wind_speed': 5.77, - }), - dict({ - 'apparent_temperature': 29.9, - 'cloud_coverage': 52.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T23:00:00Z', - 'dew_point': 22.9, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.13, - 'temperature': 26.9, - 'uv_index': 2, - 'wind_bearing': 170, - 'wind_gust_speed': 15.2, - 'wind_speed': 5.27, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 44.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T00:00:00Z', - 'dew_point': 22.9, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.04, - 'temperature': 28.0, - 'uv_index': 4, - 'wind_bearing': 155, - 'wind_gust_speed': 15.85, - 'wind_speed': 4.76, - }), - dict({ - 'apparent_temperature': 32.5, - 'cloud_coverage': 24.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T01:00:00Z', - 'dew_point': 22.6, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.52, - 'temperature': 29.2, - 'uv_index': 6, - 'wind_bearing': 110, - 'wind_gust_speed': 16.27, - 'wind_speed': 6.81, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 16.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T02:00:00Z', - 'dew_point': 22.4, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.01, - 'temperature': 30.2, - 'uv_index': 8, - 'wind_bearing': 30, - 'wind_gust_speed': 16.55, - 'wind_speed': 6.86, - }), - dict({ - 'apparent_temperature': 34.2, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T03:00:00Z', - 'dew_point': 22.0, - 'humidity': 59, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.45, - 'temperature': 31.1, - 'uv_index': 8, - 'wind_bearing': 17, - 'wind_gust_speed': 16.52, - 'wind_speed': 6.8, - }), - dict({ - 'apparent_temperature': 34.7, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T04:00:00Z', - 'dew_point': 21.9, - 'humidity': 57, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.89, - 'temperature': 31.5, - 'uv_index': 8, - 'wind_bearing': 17, - 'wind_gust_speed': 16.08, - 'wind_speed': 6.62, - }), - dict({ - 'apparent_temperature': 34.9, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T05:00:00Z', - 'dew_point': 21.9, - 'humidity': 56, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.39, - 'temperature': 31.8, - 'uv_index': 6, - 'wind_bearing': 20, - 'wind_gust_speed': 15.48, - 'wind_speed': 6.45, - }), - dict({ - 'apparent_temperature': 34.5, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T06:00:00Z', - 'dew_point': 21.7, - 'humidity': 56, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.11, - 'temperature': 31.4, - 'uv_index': 4, - 'wind_bearing': 26, - 'wind_gust_speed': 15.08, - 'wind_speed': 6.43, - }), - dict({ - 'apparent_temperature': 33.6, - 'cloud_coverage': 7.000000000000001, - 'condition': 'sunny', - 'datetime': '2023-09-16T07:00:00Z', - 'dew_point': 21.7, - 'humidity': 59, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.15, - 'temperature': 30.7, - 'uv_index': 2, - 'wind_bearing': 39, - 'wind_gust_speed': 14.88, - 'wind_speed': 6.61, - }), - dict({ - 'apparent_temperature': 32.5, - 'cloud_coverage': 2.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T08:00:00Z', - 'dew_point': 21.9, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.41, - 'temperature': 29.6, - 'uv_index': 0, - 'wind_bearing': 72, - 'wind_gust_speed': 14.82, - 'wind_speed': 6.95, - }), - dict({ - 'apparent_temperature': 31.4, - 'cloud_coverage': 2.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T09:00:00Z', - 'dew_point': 22.1, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.75, - 'temperature': 28.5, - 'uv_index': 0, - 'wind_bearing': 116, - 'wind_gust_speed': 15.13, - 'wind_speed': 7.45, - }), - dict({ - 'apparent_temperature': 30.5, - 'cloud_coverage': 13.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T10:00:00Z', - 'dew_point': 22.3, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.13, - 'temperature': 27.6, - 'uv_index': 0, - 'wind_bearing': 140, - 'wind_gust_speed': 16.09, - 'wind_speed': 8.15, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T11:00:00Z', - 'dew_point': 22.6, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.47, - 'temperature': 26.9, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 17.37, - 'wind_speed': 8.87, - }), - dict({ - 'apparent_temperature': 29.3, - 'cloud_coverage': 45.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T12:00:00Z', - 'dew_point': 22.9, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.6, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 155, - 'wind_gust_speed': 18.29, - 'wind_speed': 9.21, - }), - dict({ - 'apparent_temperature': 28.7, - 'cloud_coverage': 51.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T13:00:00Z', - 'dew_point': 23.0, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.41, - 'temperature': 25.7, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 18.49, - 'wind_speed': 8.96, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 55.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T14:00:00Z', - 'dew_point': 22.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.01, - 'temperature': 25.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 18.47, - 'wind_speed': 8.45, - }), - dict({ - 'apparent_temperature': 27.2, - 'cloud_coverage': 59.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T15:00:00Z', - 'dew_point': 22.7, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.55, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 18.79, - 'wind_speed': 8.1, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T16:00:00Z', - 'dew_point': 22.6, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.1, - 'temperature': 24.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 19.81, - 'wind_speed': 8.15, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T17:00:00Z', - 'dew_point': 22.6, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.68, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 20.96, - 'wind_speed': 8.3, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T18:00:00Z', - 'dew_point': 22.4, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.39, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 21.41, - 'wind_speed': 8.24, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T19:00:00Z', - 'dew_point': 22.5, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 20.42, - 'wind_speed': 7.62, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T20:00:00Z', - 'dew_point': 22.6, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.31, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 18.61, - 'wind_speed': 6.66, - }), - dict({ - 'apparent_temperature': 27.7, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T21:00:00Z', - 'dew_point': 22.6, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.37, - 'temperature': 24.9, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 17.14, - 'wind_speed': 5.86, - }), - dict({ - 'apparent_temperature': 28.9, - 'cloud_coverage': 48.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T22:00:00Z', - 'dew_point': 22.6, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.46, - 'temperature': 26.0, - 'uv_index': 1, - 'wind_bearing': 161, - 'wind_gust_speed': 16.78, - 'wind_speed': 5.5, - }), - dict({ - 'apparent_temperature': 30.6, - 'cloud_coverage': 39.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T23:00:00Z', - 'dew_point': 22.9, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.51, - 'temperature': 27.5, - 'uv_index': 2, - 'wind_bearing': 165, - 'wind_gust_speed': 17.21, - 'wind_speed': 5.56, - }), - dict({ - 'apparent_temperature': 31.7, - 'cloud_coverage': 33.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T00:00:00Z', - 'dew_point': 22.8, - 'humidity': 71, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.39, - 'temperature': 28.5, - 'uv_index': 4, - 'wind_bearing': 174, - 'wind_gust_speed': 17.96, - 'wind_speed': 6.04, - }), - dict({ - 'apparent_temperature': 32.6, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T01:00:00Z', - 'dew_point': 22.7, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.98, - 'temperature': 29.4, - 'uv_index': 6, - 'wind_bearing': 192, - 'wind_gust_speed': 19.15, - 'wind_speed': 7.23, - }), - dict({ - 'apparent_temperature': 33.6, - 'cloud_coverage': 28.999999999999996, - 'condition': 'sunny', - 'datetime': '2023-09-17T02:00:00Z', - 'dew_point': 22.8, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.38, - 'temperature': 30.1, - 'uv_index': 7, - 'wind_bearing': 225, - 'wind_gust_speed': 20.89, - 'wind_speed': 8.9, - }), - dict({ - 'apparent_temperature': 34.1, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T03:00:00Z', - 'dew_point': 22.8, - 'humidity': 63, - 'precipitation': 0.3, - 'precipitation_probability': 9.0, - 'pressure': 1009.75, - 'temperature': 30.7, - 'uv_index': 8, - 'wind_bearing': 264, - 'wind_gust_speed': 22.67, - 'wind_speed': 10.27, - }), - dict({ - 'apparent_temperature': 33.9, - 'cloud_coverage': 37.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T04:00:00Z', - 'dew_point': 22.5, - 'humidity': 62, - 'precipitation': 0.4, - 'precipitation_probability': 10.0, - 'pressure': 1009.18, - 'temperature': 30.5, - 'uv_index': 7, - 'wind_bearing': 293, - 'wind_gust_speed': 23.93, - 'wind_speed': 10.82, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 45.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T05:00:00Z', - 'dew_point': 22.4, - 'humidity': 63, - 'precipitation': 0.6, - 'precipitation_probability': 12.0, - 'pressure': 1008.71, - 'temperature': 30.1, - 'uv_index': 5, - 'wind_bearing': 308, - 'wind_gust_speed': 24.39, - 'wind_speed': 10.72, - }), - dict({ - 'apparent_temperature': 32.7, - 'cloud_coverage': 50.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T06:00:00Z', - 'dew_point': 22.2, - 'humidity': 64, - 'precipitation': 0.7, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1008.46, - 'temperature': 29.6, - 'uv_index': 3, - 'wind_bearing': 312, - 'wind_gust_speed': 23.9, - 'wind_speed': 10.28, - }), - dict({ - 'apparent_temperature': 31.8, - 'cloud_coverage': 47.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T07:00:00Z', - 'dew_point': 22.1, - 'humidity': 67, - 'precipitation': 0.7, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1008.53, - 'temperature': 28.9, - 'uv_index': 1, - 'wind_bearing': 312, - 'wind_gust_speed': 22.3, - 'wind_speed': 9.59, - }), - dict({ - 'apparent_temperature': 30.6, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T08:00:00Z', - 'dew_point': 21.9, - 'humidity': 70, - 'precipitation': 0.6, - 'precipitation_probability': 15.0, - 'pressure': 1008.82, - 'temperature': 27.9, - 'uv_index': 0, - 'wind_bearing': 305, - 'wind_gust_speed': 19.73, - 'wind_speed': 8.58, - }), - dict({ - 'apparent_temperature': 29.6, - 'cloud_coverage': 35.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T09:00:00Z', - 'dew_point': 22.0, - 'humidity': 74, - 'precipitation': 0.5, - 'precipitation_probability': 15.0, - 'pressure': 1009.21, - 'temperature': 27.0, - 'uv_index': 0, - 'wind_bearing': 291, - 'wind_gust_speed': 16.49, - 'wind_speed': 7.34, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 33.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T10:00:00Z', - 'dew_point': 21.9, - 'humidity': 78, - 'precipitation': 0.4, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1009.65, - 'temperature': 26.1, - 'uv_index': 0, - 'wind_bearing': 257, - 'wind_gust_speed': 12.71, - 'wind_speed': 5.91, - }), - dict({ - 'apparent_temperature': 27.8, - 'cloud_coverage': 34.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T11:00:00Z', - 'dew_point': 21.9, - 'humidity': 82, - 'precipitation': 0.3, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1010.04, - 'temperature': 25.3, - 'uv_index': 0, - 'wind_bearing': 212, - 'wind_gust_speed': 9.16, - 'wind_speed': 4.54, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 36.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T12:00:00Z', - 'dew_point': 21.9, - 'humidity': 85, - 'precipitation': 0.3, - 'precipitation_probability': 28.000000000000004, - 'pressure': 1010.24, - 'temperature': 24.6, - 'uv_index': 0, - 'wind_bearing': 192, - 'wind_gust_speed': 7.09, - 'wind_speed': 3.62, - }), - dict({ - 'apparent_temperature': 26.5, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T13:00:00Z', - 'dew_point': 22.0, - 'humidity': 88, - 'precipitation': 0.3, - 'precipitation_probability': 30.0, - 'pressure': 1010.15, - 'temperature': 24.1, - 'uv_index': 0, - 'wind_bearing': 185, - 'wind_gust_speed': 7.2, - 'wind_speed': 3.27, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 44.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T14:00:00Z', - 'dew_point': 21.8, - 'humidity': 90, - 'precipitation': 0.3, - 'precipitation_probability': 30.0, - 'pressure': 1009.87, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 182, - 'wind_gust_speed': 8.37, - 'wind_speed': 3.22, - }), - dict({ - 'apparent_temperature': 25.5, - 'cloud_coverage': 49.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T15:00:00Z', - 'dew_point': 21.8, - 'humidity': 92, - 'precipitation': 0.2, - 'precipitation_probability': 31.0, - 'pressure': 1009.56, - 'temperature': 23.2, - 'uv_index': 0, - 'wind_bearing': 180, - 'wind_gust_speed': 9.21, - 'wind_speed': 3.3, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 53.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T16:00:00Z', - 'dew_point': 21.8, - 'humidity': 94, - 'precipitation': 0.2, - 'precipitation_probability': 33.0, - 'pressure': 1009.29, - 'temperature': 22.9, - 'uv_index': 0, - 'wind_bearing': 182, - 'wind_gust_speed': 9.0, - 'wind_speed': 3.46, - }), - dict({ - 'apparent_temperature': 24.8, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T17:00:00Z', - 'dew_point': 21.7, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 35.0, - 'pressure': 1009.09, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 186, - 'wind_gust_speed': 8.37, - 'wind_speed': 3.72, - }), - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 59.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T18:00:00Z', - 'dew_point': 21.6, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 37.0, - 'pressure': 1009.01, - 'temperature': 22.5, - 'uv_index': 0, - 'wind_bearing': 201, - 'wind_gust_speed': 7.99, - 'wind_speed': 4.07, - }), - dict({ - 'apparent_temperature': 24.9, - 'cloud_coverage': 62.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T19:00:00Z', - 'dew_point': 21.7, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 39.0, - 'pressure': 1009.07, - 'temperature': 22.7, - 'uv_index': 0, - 'wind_bearing': 258, - 'wind_gust_speed': 8.18, - 'wind_speed': 4.55, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-17T20:00:00Z', - 'dew_point': 21.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 39.0, - 'pressure': 1009.23, - 'temperature': 23.0, - 'uv_index': 0, - 'wind_bearing': 305, - 'wind_gust_speed': 8.77, - 'wind_speed': 5.17, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-17T21:00:00Z', - 'dew_point': 21.8, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 38.0, - 'pressure': 1009.47, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 318, - 'wind_gust_speed': 9.69, - 'wind_speed': 5.77, - }), - dict({ - 'apparent_temperature': 26.5, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-17T22:00:00Z', - 'dew_point': 21.8, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 30.0, - 'pressure': 1009.77, - 'temperature': 24.2, - 'uv_index': 1, - 'wind_bearing': 324, - 'wind_gust_speed': 10.88, - 'wind_speed': 6.26, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 80.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T23:00:00Z', - 'dew_point': 21.9, - 'humidity': 83, - 'precipitation': 0.2, - 'precipitation_probability': 15.0, - 'pressure': 1010.09, - 'temperature': 25.1, - 'uv_index': 2, - 'wind_bearing': 329, - 'wind_gust_speed': 12.21, - 'wind_speed': 6.68, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 87.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T00:00:00Z', - 'dew_point': 21.9, - 'humidity': 80, - 'precipitation': 0.2, - 'precipitation_probability': 15.0, - 'pressure': 1010.33, - 'temperature': 25.7, - 'uv_index': 3, - 'wind_bearing': 332, - 'wind_gust_speed': 13.52, - 'wind_speed': 7.12, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 67.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T01:00:00Z', - 'dew_point': 21.7, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1007.43, - 'temperature': 27.2, - 'uv_index': 5, - 'wind_bearing': 330, - 'wind_gust_speed': 11.36, - 'wind_speed': 11.36, - }), - dict({ - 'apparent_temperature': 30.1, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T02:00:00Z', - 'dew_point': 21.6, - 'humidity': 70, - 'precipitation': 0.3, - 'precipitation_probability': 9.0, - 'pressure': 1007.05, - 'temperature': 27.5, - 'uv_index': 6, - 'wind_bearing': 332, - 'wind_gust_speed': 12.06, - 'wind_speed': 12.06, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T03:00:00Z', - 'dew_point': 21.6, - 'humidity': 69, - 'precipitation': 0.5, - 'precipitation_probability': 10.0, - 'pressure': 1006.67, - 'temperature': 27.8, - 'uv_index': 6, - 'wind_bearing': 333, - 'wind_gust_speed': 12.81, - 'wind_speed': 12.81, - }), - dict({ - 'apparent_temperature': 30.6, - 'cloud_coverage': 67.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T04:00:00Z', - 'dew_point': 21.5, - 'humidity': 68, - 'precipitation': 0.4, - 'precipitation_probability': 10.0, - 'pressure': 1006.28, - 'temperature': 28.0, - 'uv_index': 5, - 'wind_bearing': 335, - 'wind_gust_speed': 13.68, - 'wind_speed': 13.68, - }), - dict({ - 'apparent_temperature': 30.7, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T05:00:00Z', - 'dew_point': 21.4, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1005.89, - 'temperature': 28.1, - 'uv_index': 4, - 'wind_bearing': 336, - 'wind_gust_speed': 14.61, - 'wind_speed': 14.61, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T06:00:00Z', - 'dew_point': 21.2, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 27.0, - 'pressure': 1005.67, - 'temperature': 27.9, - 'uv_index': 3, - 'wind_bearing': 338, - 'wind_gust_speed': 15.25, - 'wind_speed': 15.25, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T07:00:00Z', - 'dew_point': 21.3, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 28.000000000000004, - 'pressure': 1005.74, - 'temperature': 27.4, - 'uv_index': 1, - 'wind_bearing': 339, - 'wind_gust_speed': 15.45, - 'wind_speed': 15.45, - }), - dict({ - 'apparent_temperature': 29.1, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T08:00:00Z', - 'dew_point': 21.4, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 26.0, - 'pressure': 1005.98, - 'temperature': 26.7, - 'uv_index': 0, - 'wind_bearing': 341, - 'wind_gust_speed': 15.38, - 'wind_speed': 15.38, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T09:00:00Z', - 'dew_point': 21.6, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1006.22, - 'temperature': 26.1, - 'uv_index': 0, - 'wind_bearing': 341, - 'wind_gust_speed': 15.27, - 'wind_speed': 15.27, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T10:00:00Z', - 'dew_point': 21.6, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1006.44, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 339, - 'wind_gust_speed': 15.09, - 'wind_speed': 15.09, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T11:00:00Z', - 'dew_point': 21.7, - 'humidity': 81, - 'precipitation': 0.0, - 'precipitation_probability': 26.0, - 'pressure': 1006.66, - 'temperature': 25.2, - 'uv_index': 0, - 'wind_bearing': 336, - 'wind_gust_speed': 14.88, - 'wind_speed': 14.88, - }), - dict({ - 'apparent_temperature': 27.2, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T12:00:00Z', - 'dew_point': 21.8, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 26.0, - 'pressure': 1006.79, - 'temperature': 24.8, - 'uv_index': 0, - 'wind_bearing': 333, - 'wind_gust_speed': 14.91, - 'wind_speed': 14.91, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 38.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T13:00:00Z', - 'dew_point': 21.2, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.36, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 83, - 'wind_gust_speed': 4.58, - 'wind_speed': 3.16, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T14:00:00Z', - 'dew_point': 21.2, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.96, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 4.74, - 'wind_speed': 4.52, - }), - dict({ - 'apparent_temperature': 24.5, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T15:00:00Z', - 'dew_point': 20.9, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.6, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 152, - 'wind_gust_speed': 5.63, - 'wind_speed': 5.63, - }), - dict({ - 'apparent_temperature': 24.0, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T16:00:00Z', - 'dew_point': 20.7, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.37, - 'temperature': 22.3, - 'uv_index': 0, - 'wind_bearing': 156, - 'wind_gust_speed': 6.02, - 'wind_speed': 6.02, - }), - dict({ - 'apparent_temperature': 23.7, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T17:00:00Z', - 'dew_point': 20.4, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.2, - 'temperature': 22.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 6.15, - 'wind_speed': 6.15, - }), - dict({ - 'apparent_temperature': 23.4, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T18:00:00Z', - 'dew_point': 20.2, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.08, - 'temperature': 21.9, - 'uv_index': 0, - 'wind_bearing': 167, - 'wind_gust_speed': 6.48, - 'wind_speed': 6.48, - }), - dict({ - 'apparent_temperature': 23.2, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T19:00:00Z', - 'dew_point': 19.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.04, - 'temperature': 21.8, - 'uv_index': 0, - 'wind_bearing': 165, - 'wind_gust_speed': 7.51, - 'wind_speed': 7.51, - }), - dict({ - 'apparent_temperature': 23.4, - 'cloud_coverage': 99.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T20:00:00Z', - 'dew_point': 19.6, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.05, - 'temperature': 22.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 8.73, - 'wind_speed': 8.73, - }), - dict({ - 'apparent_temperature': 23.9, - 'cloud_coverage': 98.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T21:00:00Z', - 'dew_point': 19.5, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.06, - 'temperature': 22.5, - 'uv_index': 0, - 'wind_bearing': 164, - 'wind_gust_speed': 9.21, - 'wind_speed': 9.11, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 96.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T22:00:00Z', - 'dew_point': 19.7, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.09, - 'temperature': 23.8, - 'uv_index': 1, - 'wind_bearing': 171, - 'wind_gust_speed': 9.03, - 'wind_speed': 7.91, - }), - ]), - }) -# --- # name: test_hourly_forecast[get_forecasts] dict({ 'weather.home': dict({ diff --git a/tests/components/wyoming/snapshots/test_config_flow.ambr b/tests/components/wyoming/snapshots/test_config_flow.ambr index a0e0c7c5011..ee4c5533254 100644 --- a/tests/components/wyoming/snapshots/test_config_flow.ambr +++ b/tests/components/wyoming/snapshots/test_config_flow.ambr @@ -1,42 +1,4 @@ # serializer version: 1 -# name: test_hassio_addon_discovery - FlowResultSnapshot({ - 'context': dict({ - 'source': 'hassio', - 'unique_id': '1234', - }), - 'data': dict({ - 'host': 'mock-piper', - 'port': 10200, - }), - 'description': None, - 'description_placeholders': None, - 'flow_id': , - 'handler': 'wyoming', - 'options': dict({ - }), - 'result': ConfigEntrySnapshot({ - 'data': dict({ - 'host': 'mock-piper', - 'port': 10200, - }), - 'disabled_by': None, - 'domain': 'wyoming', - 'entry_id': , - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'hassio', - 'title': 'Piper', - 'unique_id': '1234', - 'version': 1, - }), - 'title': 'Piper', - 'type': , - 'version': 1, - }) -# --- # name: test_hassio_addon_discovery[info0] FlowResultSnapshot({ 'context': dict({ diff --git a/tests/components/wyoming/snapshots/test_tts.ambr b/tests/components/wyoming/snapshots/test_tts.ambr index 299bddb07e5..7ca5204e66c 100644 --- a/tests/components/wyoming/snapshots/test_tts.ambr +++ b/tests/components/wyoming/snapshots/test_tts.ambr @@ -32,28 +32,6 @@ }), ]) # --- -# name: test_get_tts_audio_mp3 - list([ - dict({ - 'data': dict({ - 'text': 'Hello world', - }), - 'payload': None, - 'type': 'synthesize', - }), - ]) -# --- -# name: test_get_tts_audio_raw - list([ - dict({ - 'data': dict({ - 'text': 'Hello world', - }), - 'payload': None, - 'type': 'synthesize', - }), - ]) -# --- # name: test_voice_speaker list([ dict({ diff --git a/tests/components/zeversolar/snapshots/test_sensor.ambr b/tests/components/zeversolar/snapshots/test_sensor.ambr index bee522133a5..aaef2c43d79 100644 --- a/tests/components/zeversolar/snapshots/test_sensor.ambr +++ b/tests/components/zeversolar/snapshots/test_sensor.ambr @@ -1,24 +1,4 @@ # serializer version: 1 -# name: test_sensors - ConfigEntrySnapshot({ - 'data': dict({ - 'host': 'zeversolar-fake-host', - 'port': 10200, - }), - 'disabled_by': None, - 'domain': 'zeversolar', - 'entry_id': , - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': 'Mock Title', - 'unique_id': None, - 'version': 1, - }) -# --- # name: test_sensors[sensor.zeversolar_sensor_energy_today-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ From ecf22e4c4f8d28d1198a1f38c40d320b9086e858 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 12 Aug 2024 13:29:38 +0200 Subject: [PATCH 0573/1635] Improve type hints in logbook tests (#123652) --- tests/components/logbook/test_init.py | 154 +++++++++++------- .../components/logbook/test_websocket_api.py | 6 - 2 files changed, 93 insertions(+), 67 deletions(-) diff --git a/tests/components/logbook/test_init.py b/tests/components/logbook/test_init.py index 3a20aac2602..9dc96410166 100644 --- a/tests/components/logbook/test_init.py +++ b/tests/components/logbook/test_init.py @@ -65,12 +65,12 @@ async def hass_(recorder_mock: Recorder, hass: HomeAssistant) -> HomeAssistant: @pytest.fixture -async def set_utc(hass): +async def set_utc(hass: HomeAssistant) -> None: """Set timezone to UTC.""" await hass.config.async_set_time_zone("UTC") -async def test_service_call_create_logbook_entry(hass_) -> None: +async def test_service_call_create_logbook_entry(hass_: HomeAssistant) -> None: """Test if service call create log book entry.""" calls = async_capture_events(hass_, logbook.EVENT_LOGBOOK_ENTRY) @@ -123,8 +123,9 @@ async def test_service_call_create_logbook_entry(hass_) -> None: assert last_call.data.get(logbook.ATTR_DOMAIN) == "logbook" +@pytest.mark.usefixtures("recorder_mock") async def test_service_call_create_logbook_entry_invalid_entity_id( - recorder_mock: Recorder, hass: HomeAssistant + hass: HomeAssistant, ) -> None: """Test if service call create log book entry with an invalid entity id.""" await async_setup_component(hass, "logbook", {}) @@ -153,7 +154,9 @@ async def test_service_call_create_logbook_entry_invalid_entity_id( assert events[0][logbook.ATTR_MESSAGE] == "is triggered" -async def test_service_call_create_log_book_entry_no_message(hass_) -> None: +async def test_service_call_create_log_book_entry_no_message( + hass_: HomeAssistant, +) -> None: """Test if service call create log book entry without message.""" calls = async_capture_events(hass_, logbook.EVENT_LOGBOOK_ENTRY) @@ -169,7 +172,7 @@ async def test_service_call_create_log_book_entry_no_message(hass_) -> None: async def test_filter_sensor( - hass_: ha.HomeAssistant, hass_client: ClientSessionGenerator + hass_: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test numeric sensors are filtered.""" @@ -217,7 +220,7 @@ async def test_filter_sensor( _assert_entry(entries[2], name="ble", entity_id=entity_id4, state="10") -async def test_home_assistant_start_stop_not_grouped(hass_) -> None: +async def test_home_assistant_start_stop_not_grouped(hass_: HomeAssistant) -> None: """Test if HA start and stop events are no longer grouped.""" await async_setup_component(hass_, "homeassistant", {}) await hass_.async_block_till_done() @@ -234,7 +237,7 @@ async def test_home_assistant_start_stop_not_grouped(hass_) -> None: assert_entry(entries[1], name="Home Assistant", message="started", domain=ha.DOMAIN) -async def test_home_assistant_start(hass_) -> None: +async def test_home_assistant_start(hass_: HomeAssistant) -> None: """Test if HA start is not filtered or converted into a restart.""" await async_setup_component(hass_, "homeassistant", {}) await hass_.async_block_till_done() @@ -254,7 +257,7 @@ async def test_home_assistant_start(hass_) -> None: assert_entry(entries[1], pointA, "bla", entity_id=entity_id) -def test_process_custom_logbook_entries(hass_) -> None: +def test_process_custom_logbook_entries(hass_: HomeAssistant) -> None: """Test if custom log book entries get added as an entry.""" name = "Nice name" message = "has a custom entry" @@ -339,8 +342,9 @@ def create_state_changed_event_from_old_new( return LazyEventPartialState(row, {}) +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_view( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view.""" await async_setup_component(hass, "logbook", {}) @@ -350,8 +354,9 @@ async def test_logbook_view( assert response.status == HTTPStatus.OK +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_view_invalid_start_date_time( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with an invalid date time.""" await async_setup_component(hass, "logbook", {}) @@ -361,8 +366,9 @@ async def test_logbook_view_invalid_start_date_time( assert response.status == HTTPStatus.BAD_REQUEST +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_view_invalid_end_date_time( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view.""" await async_setup_component(hass, "logbook", {}) @@ -374,11 +380,10 @@ async def test_logbook_view_invalid_end_date_time( assert response.status == HTTPStatus.BAD_REQUEST +@pytest.mark.usefixtures("recorder_mock", "set_utc") async def test_logbook_view_period_entity( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator, - set_utc, ) -> None: """Test the logbook view with period and entity.""" await async_setup_component(hass, "logbook", {}) @@ -460,8 +465,9 @@ async def test_logbook_view_period_entity( assert response_json[0]["entity_id"] == entity_id_test +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_describe_event( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test teaching logbook about a new event.""" @@ -508,8 +514,9 @@ async def test_logbook_describe_event( assert event["domain"] == "test_domain" +@pytest.mark.usefixtures("recorder_mock") async def test_exclude_described_event( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test exclusions of events that are described by another integration.""" name = "My Automation Rule" @@ -579,8 +586,9 @@ async def test_exclude_described_event( assert event["entity_id"] == "automation.included_rule" +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_view_end_time_entity( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with end_time and entity.""" await async_setup_component(hass, "logbook", {}) @@ -639,8 +647,9 @@ async def test_logbook_view_end_time_entity( assert response_json[0]["entity_id"] == entity_id_test +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_entity_filter_with_automations( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with end_time and entity with automations and scripts.""" await asyncio.gather( @@ -725,8 +734,9 @@ async def test_logbook_entity_filter_with_automations( assert json_dict[0]["entity_id"] == entity_id_second +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_entity_no_longer_in_state_machine( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with an entity that hass been removed from the state machine.""" await async_setup_component(hass, "logbook", {}) @@ -764,11 +774,10 @@ async def test_logbook_entity_no_longer_in_state_machine( assert json_dict[0]["name"] == "area 001" +@pytest.mark.usefixtures("recorder_mock", "set_utc") async def test_filter_continuous_sensor_values( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator, - set_utc, ) -> None: """Test remove continuous sensor events from logbook.""" await async_setup_component(hass, "logbook", {}) @@ -808,11 +817,10 @@ async def test_filter_continuous_sensor_values( assert response_json[1]["entity_id"] == entity_id_third +@pytest.mark.usefixtures("recorder_mock", "set_utc") async def test_exclude_new_entities( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator, - set_utc, ) -> None: """Test if events are excluded on first update.""" await asyncio.gather( @@ -850,11 +858,10 @@ async def test_exclude_new_entities( assert response_json[1]["message"] == "started" +@pytest.mark.usefixtures("recorder_mock", "set_utc") async def test_exclude_removed_entities( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator, - set_utc, ) -> None: """Test if events are excluded on last update.""" await asyncio.gather( @@ -899,11 +906,10 @@ async def test_exclude_removed_entities( assert response_json[2]["entity_id"] == entity_id2 +@pytest.mark.usefixtures("recorder_mock", "set_utc") async def test_exclude_attribute_changes( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator, - set_utc, ) -> None: """Test if events of attribute changes are filtered.""" await asyncio.gather( @@ -944,8 +950,9 @@ async def test_exclude_attribute_changes( assert response_json[2]["entity_id"] == "light.kitchen" +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_entity_context_id( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with end_time and entity with automations and scripts.""" await asyncio.gather( @@ -1097,8 +1104,9 @@ async def test_logbook_entity_context_id( assert json_dict[7]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474" +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_context_id_automation_script_started_manually( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook populates context_ids for scripts and automations started manually.""" await asyncio.gather( @@ -1189,8 +1197,9 @@ async def test_logbook_context_id_automation_script_started_manually( assert json_dict[4]["context_domain"] == "script" +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_entity_context_parent_id( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view links events via context parent_id.""" await asyncio.gather( @@ -1371,8 +1380,9 @@ async def test_logbook_entity_context_parent_id( assert json_dict[8]["context_user_id"] == "485cacf93ef84d25a99ced3126b921d2" +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_context_from_template( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with end_time and entity with automations and scripts.""" await asyncio.gather( @@ -1461,8 +1471,9 @@ async def test_logbook_context_from_template( assert json_dict[5]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474" +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with a single entity and .""" await async_setup_component(hass, "logbook", {}) @@ -1532,8 +1543,9 @@ async def test_logbook_( assert json_dict[1]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474" +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_many_entities_multiple_calls( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with a many entities called multiple times.""" await async_setup_component(hass, "logbook", {}) @@ -1604,8 +1616,9 @@ async def test_logbook_many_entities_multiple_calls( assert len(json_dict) == 0 +@pytest.mark.usefixtures("recorder_mock") async def test_custom_log_entry_discoverable_via_( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if a custom log entry is later discoverable via .""" await async_setup_component(hass, "logbook", {}) @@ -1641,8 +1654,9 @@ async def test_custom_log_entry_discoverable_via_( assert json_dict[0]["entity_id"] == "switch.test_switch" +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_multiple_entities( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with a multiple entities.""" await async_setup_component(hass, "logbook", {}) @@ -1767,8 +1781,9 @@ async def test_logbook_multiple_entities( assert json_dict[3]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474" +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_invalid_entity( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with requesting an invalid entity.""" await async_setup_component(hass, "logbook", {}) @@ -1787,8 +1802,9 @@ async def test_logbook_invalid_entity( assert response.status == HTTPStatus.INTERNAL_SERVER_ERROR +@pytest.mark.usefixtures("recorder_mock") async def test_icon_and_state( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test to ensure state and custom icons are returned.""" await asyncio.gather( @@ -1832,8 +1848,9 @@ async def test_icon_and_state( assert response_json[2]["state"] == STATE_OFF +@pytest.mark.usefixtures("recorder_mock") async def test_fire_logbook_entries( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test many logbook entry calls.""" await async_setup_component(hass, "logbook", {}) @@ -1870,8 +1887,9 @@ async def test_fire_logbook_entries( assert len(response_json) == 11 +@pytest.mark.usefixtures("recorder_mock") async def test_exclude_events_domain( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if events are filtered if domain is excluded in config.""" entity_id = "switch.bla" @@ -1906,8 +1924,9 @@ async def test_exclude_events_domain( _assert_entry(entries[1], name="blu", entity_id=entity_id2) +@pytest.mark.usefixtures("recorder_mock") async def test_exclude_events_domain_glob( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if events are filtered if domain or glob is excluded in config.""" entity_id = "switch.bla" @@ -1951,8 +1970,9 @@ async def test_exclude_events_domain_glob( _assert_entry(entries[1], name="blu", entity_id=entity_id2) +@pytest.mark.usefixtures("recorder_mock") async def test_include_events_entity( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if events are filtered if entity is included in config.""" entity_id = "sensor.bla" @@ -1993,8 +2013,9 @@ async def test_include_events_entity( _assert_entry(entries[1], name="blu", entity_id=entity_id2) +@pytest.mark.usefixtures("recorder_mock") async def test_exclude_events_entity( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if events are filtered if entity is excluded in config.""" entity_id = "sensor.bla" @@ -2029,8 +2050,9 @@ async def test_exclude_events_entity( _assert_entry(entries[1], name="blu", entity_id=entity_id2) +@pytest.mark.usefixtures("recorder_mock") async def test_include_events_domain( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if events are filtered if domain is included in config.""" assert await async_setup_component(hass, "alexa", {}) @@ -2073,8 +2095,9 @@ async def test_include_events_domain( _assert_entry(entries[2], name="blu", entity_id=entity_id2) +@pytest.mark.usefixtures("recorder_mock") async def test_include_events_domain_glob( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if events are filtered if domain or glob is included in config.""" assert await async_setup_component(hass, "alexa", {}) @@ -2132,8 +2155,9 @@ async def test_include_events_domain_glob( _assert_entry(entries[3], name="included", entity_id=entity_id3) +@pytest.mark.usefixtures("recorder_mock") async def test_include_exclude_events_no_globs( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if events are filtered if include and exclude is configured.""" entity_id = "switch.bla" @@ -2190,8 +2214,9 @@ async def test_include_exclude_events_no_globs( _assert_entry(entries[5], name="keep", entity_id=entity_id4, state="10") +@pytest.mark.usefixtures("recorder_mock") async def test_include_exclude_events_with_glob_filters( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if events are filtered if include and exclude is configured.""" entity_id = "switch.bla" @@ -2256,8 +2281,9 @@ async def test_include_exclude_events_with_glob_filters( _assert_entry(entries[6], name="included", entity_id=entity_id5, state="30") +@pytest.mark.usefixtures("recorder_mock") async def test_empty_config( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test we can handle an empty entity filter.""" entity_id = "sensor.blu" @@ -2290,8 +2316,9 @@ async def test_empty_config( _assert_entry(entries[1], name="blu", entity_id=entity_id) +@pytest.mark.usefixtures("recorder_mock") async def test_context_filter( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test we can filter by context.""" assert await async_setup_component(hass, "logbook", {}) @@ -2367,8 +2394,9 @@ def _assert_entry( assert state == entry["state"] +@pytest.mark.usefixtures("recorder_mock") async def test_get_events( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test logbook get_events.""" now = dt_util.utcnow() @@ -2487,8 +2515,9 @@ async def test_get_events( assert isinstance(results[0]["when"], float) +@pytest.mark.usefixtures("recorder_mock") async def test_get_events_future_start_time( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test get_events with a future start time.""" await async_setup_component(hass, "logbook", {}) @@ -2512,8 +2541,9 @@ async def test_get_events_future_start_time( assert len(results) == 0 +@pytest.mark.usefixtures("recorder_mock") async def test_get_events_bad_start_time( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test get_events bad start time.""" await async_setup_component(hass, "logbook", {}) @@ -2532,8 +2562,9 @@ async def test_get_events_bad_start_time( assert response["error"]["code"] == "invalid_start_time" +@pytest.mark.usefixtures("recorder_mock") async def test_get_events_bad_end_time( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test get_events bad end time.""" now = dt_util.utcnow() @@ -2554,8 +2585,9 @@ async def test_get_events_bad_end_time( assert response["error"]["code"] == "invalid_end_time" +@pytest.mark.usefixtures("recorder_mock") async def test_get_events_invalid_filters( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test get_events invalid filters.""" await async_setup_component(hass, "logbook", {}) @@ -2584,8 +2616,8 @@ async def test_get_events_invalid_filters( assert response["error"]["code"] == "invalid_format" +@pytest.mark.usefixtures("recorder_mock") async def test_get_events_with_device_ids( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator, device_registry: dr.DeviceRegistry, @@ -2725,8 +2757,9 @@ async def test_get_events_with_device_ids( assert isinstance(results[3]["when"], float) +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_select_entities_context_id( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with end_time and entity with automations and scripts.""" await asyncio.gather( @@ -2860,8 +2893,9 @@ async def test_logbook_select_entities_context_id( assert json_dict[3]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474" +@pytest.mark.usefixtures("recorder_mock") async def test_get_events_with_context_state( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test logbook get_events with a context state.""" now = dt_util.utcnow() @@ -2925,9 +2959,8 @@ async def test_get_events_with_context_state( assert "context_event_type" not in results[3] -async def test_logbook_with_empty_config( - recorder_mock: Recorder, hass: HomeAssistant -) -> None: +@pytest.mark.usefixtures("recorder_mock") +async def test_logbook_with_empty_config(hass: HomeAssistant) -> None: """Test we handle a empty configuration.""" assert await async_setup_component( hass, @@ -2940,9 +2973,8 @@ async def test_logbook_with_empty_config( await hass.async_block_till_done() -async def test_logbook_with_non_iterable_entity_filter( - recorder_mock: Recorder, hass: HomeAssistant -) -> None: +@pytest.mark.usefixtures("recorder_mock") +async def test_logbook_with_non_iterable_entity_filter(hass: HomeAssistant) -> None: """Test we handle a non-iterable entity filter.""" assert await async_setup_component( hass, diff --git a/tests/components/logbook/test_websocket_api.py b/tests/components/logbook/test_websocket_api.py index 9b1a6bb44cc..e5649564f94 100644 --- a/tests/components/logbook/test_websocket_api.py +++ b/tests/components/logbook/test_websocket_api.py @@ -48,12 +48,6 @@ from tests.components.recorder.common import ( from tests.typing import RecorderInstanceGenerator, WebSocketGenerator -@pytest.fixture -async def set_utc(hass): - """Set timezone to UTC.""" - await hass.config.async_set_time_zone("UTC") - - def listeners_without_writes(listeners: dict[str, int]) -> dict[str, int]: """Return listeners without final write listeners since we are not testing for these.""" return { From 81faf1b5820f23b8c11a6eab40a0dae54ca42ef2 Mon Sep 17 00:00:00 2001 From: "Barry vd. Heuvel" Date: Mon, 12 Aug 2024 14:01:12 +0200 Subject: [PATCH 0574/1635] Add homematicip_cloud service set cooling home (#121943) * [homematicip_cloud] Add service to set cooling mode * Create seperate test for cooling * Rename service to set_home_cooling_mode * Raise exception when accesspoint not found --- .../components/homematicip_cloud/icons.json | 3 +- .../homematicip_cloud/manifest.json | 2 +- .../components/homematicip_cloud/services.py | 40 +++++++++++- .../homematicip_cloud/services.yaml | 11 ++++ .../components/homematicip_cloud/strings.json | 19 ++++++ requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- .../homematicip_cloud/test_climate.py | 65 ++++++++++++++++--- .../components/homematicip_cloud/test_init.py | 6 +- 9 files changed, 133 insertions(+), 17 deletions(-) diff --git a/homeassistant/components/homematicip_cloud/icons.json b/homeassistant/components/homematicip_cloud/icons.json index 2e9f6158c35..73c60ea8cdd 100644 --- a/homeassistant/components/homematicip_cloud/icons.json +++ b/homeassistant/components/homematicip_cloud/icons.json @@ -7,6 +7,7 @@ "deactivate_vacation": "mdi:compass-off", "set_active_climate_profile": "mdi:home-thermometer", "dump_hap_config": "mdi:database-export", - "reset_energy_counter": "mdi:reload" + "reset_energy_counter": "mdi:reload", + "set_home_cooling_mode": "mdi:snowflake" } } diff --git a/homeassistant/components/homematicip_cloud/manifest.json b/homeassistant/components/homematicip_cloud/manifest.json index 024cb2d9f21..b3e7eb9a72a 100644 --- a/homeassistant/components/homematicip_cloud/manifest.json +++ b/homeassistant/components/homematicip_cloud/manifest.json @@ -7,5 +7,5 @@ "iot_class": "cloud_push", "loggers": ["homematicip"], "quality_scale": "silver", - "requirements": ["homematicip==1.1.1"] + "requirements": ["homematicip==1.1.2"] } diff --git a/homeassistant/components/homematicip_cloud/services.py b/homeassistant/components/homematicip_cloud/services.py index 37cda9e7683..4c04e4a858b 100644 --- a/homeassistant/components/homematicip_cloud/services.py +++ b/homeassistant/components/homematicip_cloud/services.py @@ -13,6 +13,7 @@ import voluptuous as vol from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.exceptions import ServiceValidationError import homeassistant.helpers.config_validation as cv from homeassistant.helpers.config_validation import comp_entity_ids from homeassistant.helpers.service import ( @@ -31,6 +32,7 @@ ATTR_CONFIG_OUTPUT_FILE_PREFIX = "config_output_file_prefix" ATTR_CONFIG_OUTPUT_PATH = "config_output_path" ATTR_DURATION = "duration" ATTR_ENDTIME = "endtime" +ATTR_COOLING = "cooling" DEFAULT_CONFIG_FILE_PREFIX = "hmip-config" @@ -42,6 +44,7 @@ SERVICE_DEACTIVATE_VACATION = "deactivate_vacation" SERVICE_DUMP_HAP_CONFIG = "dump_hap_config" SERVICE_RESET_ENERGY_COUNTER = "reset_energy_counter" SERVICE_SET_ACTIVE_CLIMATE_PROFILE = "set_active_climate_profile" +SERVICE_SET_HOME_COOLING_MODE = "set_home_cooling_mode" HMIPC_SERVICES = [ SERVICE_ACTIVATE_ECO_MODE_WITH_DURATION, @@ -52,6 +55,7 @@ HMIPC_SERVICES = [ SERVICE_DUMP_HAP_CONFIG, SERVICE_RESET_ENERGY_COUNTER, SERVICE_SET_ACTIVE_CLIMATE_PROFILE, + SERVICE_SET_HOME_COOLING_MODE, ] SCHEMA_ACTIVATE_ECO_MODE_WITH_DURATION = vol.Schema( @@ -107,6 +111,13 @@ SCHEMA_RESET_ENERGY_COUNTER = vol.Schema( {vol.Required(ATTR_ENTITY_ID): comp_entity_ids} ) +SCHEMA_SET_HOME_COOLING_MODE = vol.Schema( + { + vol.Optional(ATTR_COOLING, default=True): cv.boolean, + vol.Optional(ATTR_ACCESSPOINT_ID): vol.All(str, vol.Length(min=24, max=24)), + } +) + async def async_setup_services(hass: HomeAssistant) -> None: """Set up the HomematicIP Cloud services.""" @@ -135,6 +146,8 @@ async def async_setup_services(hass: HomeAssistant) -> None: await _async_reset_energy_counter(hass, service) elif service_name == SERVICE_SET_ACTIVE_CLIMATE_PROFILE: await _set_active_climate_profile(hass, service) + elif service_name == SERVICE_SET_HOME_COOLING_MODE: + await _async_set_home_cooling_mode(hass, service) hass.services.async_register( domain=HMIPC_DOMAIN, @@ -194,6 +207,14 @@ async def async_setup_services(hass: HomeAssistant) -> None: schema=SCHEMA_RESET_ENERGY_COUNTER, ) + async_register_admin_service( + hass=hass, + domain=HMIPC_DOMAIN, + service=SERVICE_SET_HOME_COOLING_MODE, + service_func=async_call_hmipc_service, + schema=SCHEMA_SET_HOME_COOLING_MODE, + ) + async def async_unload_services(hass: HomeAssistant): """Unload HomematicIP Cloud services.""" @@ -324,10 +345,25 @@ async def _async_reset_energy_counter(hass: HomeAssistant, service: ServiceCall) await device.reset_energy_counter() +async def _async_set_home_cooling_mode(hass: HomeAssistant, service: ServiceCall): + """Service to set the cooling mode.""" + cooling = service.data[ATTR_COOLING] + + if hapid := service.data.get(ATTR_ACCESSPOINT_ID): + if home := _get_home(hass, hapid): + await home.set_cooling(cooling) + else: + for hap in hass.data[HMIPC_DOMAIN].values(): + await hap.home.set_cooling(cooling) + + def _get_home(hass: HomeAssistant, hapid: str) -> AsyncHome | None: """Return a HmIP home.""" if hap := hass.data[HMIPC_DOMAIN].get(hapid): return hap.home - _LOGGER.info("No matching access point found for access point id %s", hapid) - return None + raise ServiceValidationError( + translation_domain=HMIPC_DOMAIN, + translation_key="access_point_not_found", + translation_placeholders={"id": hapid}, + ) diff --git a/homeassistant/components/homematicip_cloud/services.yaml b/homeassistant/components/homematicip_cloud/services.yaml index 9e831339787..aced5c838a6 100644 --- a/homeassistant/components/homematicip_cloud/services.yaml +++ b/homeassistant/components/homematicip_cloud/services.yaml @@ -98,3 +98,14 @@ reset_energy_counter: example: switch.livingroom selector: text: + +set_home_cooling_mode: + fields: + cooling: + default: true + selector: + boolean: + accesspoint_id: + example: 3014xxxxxxxxxxxxxxxxxxxx + selector: + text: diff --git a/homeassistant/components/homematicip_cloud/strings.json b/homeassistant/components/homematicip_cloud/strings.json index 3795508d75d..a7c795c81f6 100644 --- a/homeassistant/components/homematicip_cloud/strings.json +++ b/homeassistant/components/homematicip_cloud/strings.json @@ -26,6 +26,11 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" } }, + "exceptions": { + "access_point_not_found": { + "message": "No matching access point found for access point id {id}" + } + }, "services": { "activate_eco_mode_with_duration": { "name": "Activate eco mode with duration", @@ -134,6 +139,20 @@ "description": "The ID of the measuring entity. Use 'all' keyword to reset all energy counters." } } + }, + "set_home_cooling_mode": { + "name": "Set home cooling mode", + "description": "Set the heating/cooling mode for the entire home", + "fields": { + "accesspoint_id": { + "name": "[%key:component::homematicip_cloud::services::activate_eco_mode_with_duration::fields::accesspoint_id::name%]", + "description": "[%key:component::homematicip_cloud::services::activate_eco_mode_with_duration::fields::accesspoint_id::description%]" + }, + "cooling": { + "name": "Cooling", + "description": "Enable for cooling mode, disable for heating mode" + } + } } } } diff --git a/requirements_all.txt b/requirements_all.txt index d0bd63bd826..9898da0e19e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1105,7 +1105,7 @@ home-assistant-intents==2024.8.7 homeconnect==0.8.0 # homeassistant.components.homematicip_cloud -homematicip==1.1.1 +homematicip==1.1.2 # homeassistant.components.horizon horimote==0.4.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index b6c15fc8942..572e70bc2b8 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -931,7 +931,7 @@ home-assistant-intents==2024.8.7 homeconnect==0.8.0 # homeassistant.components.homematicip_cloud -homematicip==1.1.1 +homematicip==1.1.2 # homeassistant.components.remember_the_milk httplib2==0.20.4 diff --git a/tests/components/homematicip_cloud/test_climate.py b/tests/components/homematicip_cloud/test_climate.py index f175e2060df..2b4d023baf8 100644 --- a/tests/components/homematicip_cloud/test_climate.py +++ b/tests/components/homematicip_cloud/test_climate.py @@ -622,18 +622,67 @@ async def test_hmip_climate_services( assert len(home._connection.mock_calls) == 10 not_existing_hap_id = "5555F7110000000000000001" - await hass.services.async_call( - "homematicip_cloud", - "deactivate_vacation", - {"accesspoint_id": not_existing_hap_id}, - blocking=True, - ) - assert home.mock_calls[-1][0] == "deactivate_vacation" - assert home.mock_calls[-1][1] == () + with pytest.raises(ServiceValidationError) as excinfo: + await hass.services.async_call( + "homematicip_cloud", + "deactivate_vacation", + {"accesspoint_id": not_existing_hap_id}, + blocking=True, + ) + assert excinfo.value.translation_domain == HMIPC_DOMAIN + assert excinfo.value.translation_key == "access_point_not_found" # There is no further call on connection. assert len(home._connection.mock_calls) == 10 +async def test_hmip_set_home_cooling_mode( + hass: HomeAssistant, mock_hap_with_service +) -> None: + """Test HomematicipSetHomeCoolingMode.""" + + home = mock_hap_with_service.home + + await hass.services.async_call( + "homematicip_cloud", + "set_home_cooling_mode", + {"accesspoint_id": HAPID, "cooling": False}, + blocking=True, + ) + assert home.mock_calls[-1][0] == "set_cooling" + assert home.mock_calls[-1][1] == (False,) + assert len(home._connection.mock_calls) == 1 + + await hass.services.async_call( + "homematicip_cloud", + "set_home_cooling_mode", + {"accesspoint_id": HAPID, "cooling": True}, + blocking=True, + ) + assert home.mock_calls[-1][0] == "set_cooling" + assert home.mock_calls[-1][1] + assert len(home._connection.mock_calls) == 2 + + await hass.services.async_call( + "homematicip_cloud", "set_home_cooling_mode", blocking=True + ) + assert home.mock_calls[-1][0] == "set_cooling" + assert home.mock_calls[-1][1] + assert len(home._connection.mock_calls) == 3 + + not_existing_hap_id = "5555F7110000000000000001" + with pytest.raises(ServiceValidationError) as excinfo: + await hass.services.async_call( + "homematicip_cloud", + "set_home_cooling_mode", + {"accesspoint_id": not_existing_hap_id, "cooling": True}, + blocking=True, + ) + assert excinfo.value.translation_domain == HMIPC_DOMAIN + assert excinfo.value.translation_key == "access_point_not_found" + # There is no further call on connection. + assert len(home._connection.mock_calls) == 3 + + async def test_hmip_heating_group_services( hass: HomeAssistant, default_mock_hap_factory ) -> None: diff --git a/tests/components/homematicip_cloud/test_init.py b/tests/components/homematicip_cloud/test_init.py index 9303a755e89..ad1c8140aea 100644 --- a/tests/components/homematicip_cloud/test_init.py +++ b/tests/components/homematicip_cloud/test_init.py @@ -199,7 +199,7 @@ async def test_setup_services_and_unload_services(hass: HomeAssistant) -> None: # Check services are created hmipc_services = hass.services.async_services()[HMIPC_DOMAIN] - assert len(hmipc_services) == 8 + assert len(hmipc_services) == 9 config_entries = hass.config_entries.async_entries(HMIPC_DOMAIN) assert len(config_entries) == 1 @@ -232,7 +232,7 @@ async def test_setup_two_haps_unload_one_by_one(hass: HomeAssistant) -> None: assert await async_setup_component(hass, HMIPC_DOMAIN, {}) hmipc_services = hass.services.async_services()[HMIPC_DOMAIN] - assert len(hmipc_services) == 8 + assert len(hmipc_services) == 9 config_entries = hass.config_entries.async_entries(HMIPC_DOMAIN) assert len(config_entries) == 2 @@ -241,7 +241,7 @@ async def test_setup_two_haps_unload_one_by_one(hass: HomeAssistant) -> None: # services still exists hmipc_services = hass.services.async_services()[HMIPC_DOMAIN] - assert len(hmipc_services) == 8 + assert len(hmipc_services) == 9 # unload the second AP await hass.config_entries.async_unload(config_entries[1].entry_id) From 840d9a0923c086bc704c8af654761a8a030656bb Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 12 Aug 2024 14:30:35 +0200 Subject: [PATCH 0575/1635] Remove unnecessary assignment of Template.hass from arest (#123662) --- homeassistant/components/arest/sensor.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/homeassistant/components/arest/sensor.py b/homeassistant/components/arest/sensor.py index ab502fa275a..8c68c13018b 100644 --- a/homeassistant/components/arest/sensor.py +++ b/homeassistant/components/arest/sensor.py @@ -87,8 +87,6 @@ def setup_platform( if value_template is None: return lambda value: value - value_template.hass = hass - def _render(value): try: return value_template.async_render({"value": value}, parse_result=False) From ecc308c32622098d35ef420b9e35c001c277f67e Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 12 Aug 2024 14:30:48 +0200 Subject: [PATCH 0576/1635] Remove unnecessary assignment of Template.hass from command_line (#123664) --- homeassistant/components/command_line/binary_sensor.py | 4 +--- homeassistant/components/command_line/cover.py | 5 +---- homeassistant/components/command_line/sensor.py | 4 +--- homeassistant/components/command_line/switch.py | 5 +---- 4 files changed, 4 insertions(+), 14 deletions(-) diff --git a/homeassistant/components/command_line/binary_sensor.py b/homeassistant/components/command_line/binary_sensor.py index 2ff17e86efd..20deddcf14e 100644 --- a/homeassistant/components/command_line/binary_sensor.py +++ b/homeassistant/components/command_line/binary_sensor.py @@ -51,9 +51,7 @@ async def async_setup_platform( scan_interval: timedelta = binary_sensor_config.get( CONF_SCAN_INTERVAL, SCAN_INTERVAL ) - - if value_template := binary_sensor_config.get(CONF_VALUE_TEMPLATE): - value_template.hass = hass + value_template: Template | None = binary_sensor_config.get(CONF_VALUE_TEMPLATE) data = CommandSensorData(hass, command, command_timeout) diff --git a/homeassistant/components/command_line/cover.py b/homeassistant/components/command_line/cover.py index 2c6ec78b689..344fddabc3b 100644 --- a/homeassistant/components/command_line/cover.py +++ b/homeassistant/components/command_line/cover.py @@ -45,9 +45,6 @@ async def async_setup_platform( } for device_name, cover_config in entities.items(): - if value_template := cover_config.get(CONF_VALUE_TEMPLATE): - value_template.hass = hass - trigger_entity_config = { CONF_NAME: Template(cover_config.get(CONF_NAME, device_name), hass), **{k: v for k, v in cover_config.items() if k in TRIGGER_ENTITY_OPTIONS}, @@ -60,7 +57,7 @@ async def async_setup_platform( cover_config[CONF_COMMAND_CLOSE], cover_config[CONF_COMMAND_STOP], cover_config.get(CONF_COMMAND_STATE), - value_template, + cover_config.get(CONF_VALUE_TEMPLATE), cover_config[CONF_COMMAND_TIMEOUT], cover_config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL), ) diff --git a/homeassistant/components/command_line/sensor.py b/homeassistant/components/command_line/sensor.py index 14edbb55ed0..786afc8f3a7 100644 --- a/homeassistant/components/command_line/sensor.py +++ b/homeassistant/components/command_line/sensor.py @@ -57,11 +57,9 @@ async def async_setup_platform( json_attributes: list[str] | None = sensor_config.get(CONF_JSON_ATTRIBUTES) json_attributes_path: str | None = sensor_config.get(CONF_JSON_ATTRIBUTES_PATH) scan_interval: timedelta = sensor_config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL) + value_template: Template | None = sensor_config.get(CONF_VALUE_TEMPLATE) data = CommandSensorData(hass, command, command_timeout) - if value_template := sensor_config.get(CONF_VALUE_TEMPLATE): - value_template.hass = hass - trigger_entity_config = { CONF_NAME: Template(sensor_config[CONF_NAME], hass), **{k: v for k, v in sensor_config.items() if k in TRIGGER_ENTITY_OPTIONS}, diff --git a/homeassistant/components/command_line/switch.py b/homeassistant/components/command_line/switch.py index a3c3d0b3e9c..b02fb6dcd4a 100644 --- a/homeassistant/components/command_line/switch.py +++ b/homeassistant/components/command_line/switch.py @@ -46,9 +46,6 @@ async def async_setup_platform( } for object_id, switch_config in entities.items(): - if value_template := switch_config.get(CONF_VALUE_TEMPLATE): - value_template.hass = hass - trigger_entity_config = { CONF_NAME: Template(switch_config.get(CONF_NAME, object_id), hass), **{k: v for k, v in switch_config.items() if k in TRIGGER_ENTITY_OPTIONS}, @@ -61,7 +58,7 @@ async def async_setup_platform( switch_config[CONF_COMMAND_ON], switch_config[CONF_COMMAND_OFF], switch_config.get(CONF_COMMAND_STATE), - value_template, + switch_config.get(CONF_VALUE_TEMPLATE), switch_config[CONF_COMMAND_TIMEOUT], switch_config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL), ) From 32d2218ff0d6eaa7a7cd4eedbf0bc8723379da0f Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 12 Aug 2024 14:31:11 +0200 Subject: [PATCH 0577/1635] Remove unnecessary assignment of Template.hass from doods (#123666) --- homeassistant/components/doods/image_processing.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/homeassistant/components/doods/image_processing.py b/homeassistant/components/doods/image_processing.py index 7ffb6655bb6..acd9d7fe71b 100644 --- a/homeassistant/components/doods/image_processing.py +++ b/homeassistant/components/doods/image_processing.py @@ -207,8 +207,6 @@ class Doods(ImageProcessingEntity): ] self._covers = area_config[CONF_COVERS] - template.attach(hass, self._file_out) - self._dconfig = dconfig self._matches = {} self._total_matches = 0 From 64ceb11f8c7f623bce388fb6754f4fec7f12c598 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Mon, 12 Aug 2024 14:44:52 +0200 Subject: [PATCH 0578/1635] Remove libcst constraint (#123661) --- homeassistant/package_constraints.txt | 5 ----- script/gen_requirements_all.py | 5 ----- 2 files changed, 10 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index e3f80f1b82d..6c93e6b5f1c 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -79,11 +79,6 @@ grpcio==1.59.0 grpcio-status==1.59.0 grpcio-reflection==1.59.0 -# libcst >=0.4.0 requires a newer Rust than we currently have available, -# thus our wheels builds fail. This pins it to the last working version, -# which at this point satisfies our needs. -libcst==0.3.23 - # This is a old unmaintained library and is replaced with pycryptodome pycrypto==1000000000.0.0 diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index aa92cacb237..f9bd379b7ce 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -101,11 +101,6 @@ grpcio==1.59.0 grpcio-status==1.59.0 grpcio-reflection==1.59.0 -# libcst >=0.4.0 requires a newer Rust than we currently have available, -# thus our wheels builds fail. This pins it to the last working version, -# which at this point satisfies our needs. -libcst==0.3.23 - # This is a old unmaintained library and is replaced with pycryptodome pycrypto==1000000000.0.0 From 27d76f59531b8734378cf4d22ce2bc78217ed0a0 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 12 Aug 2024 14:52:34 +0200 Subject: [PATCH 0579/1635] Remove unnecessary assignment of Template.hass from history_stats (#123671) --- homeassistant/components/history_stats/sensor.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/homeassistant/components/history_stats/sensor.py b/homeassistant/components/history_stats/sensor.py index 99e953ff9dd..4558da8722c 100644 --- a/homeassistant/components/history_stats/sensor.py +++ b/homeassistant/components/history_stats/sensor.py @@ -103,10 +103,6 @@ async def async_setup_platform( name: str = config[CONF_NAME] unique_id: str | None = config.get(CONF_UNIQUE_ID) - for template in (start, end): - if template is not None: - template.hass = hass - history_stats = HistoryStats(hass, entity_id, entity_states, start, end, duration) coordinator = HistoryStatsUpdateCoordinator(hass, history_stats, name) await coordinator.async_refresh() From b9010e96a0ca41285002d7e22666dc00d1270b15 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 12 Aug 2024 14:52:51 +0200 Subject: [PATCH 0580/1635] Remove unnecessary assignment of Template.hass from emoncms (#123668) --- homeassistant/components/emoncms/sensor.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/homeassistant/components/emoncms/sensor.py b/homeassistant/components/emoncms/sensor.py index c299c5a1b9f..3c448391974 100644 --- a/homeassistant/components/emoncms/sensor.py +++ b/homeassistant/components/emoncms/sensor.py @@ -87,9 +87,6 @@ async def async_setup_platform( sensor_names = config.get(CONF_SENSOR_NAMES) scan_interval = config.get(CONF_SCAN_INTERVAL, timedelta(seconds=30)) - if value_template is not None: - value_template.hass = hass - emoncms_client = EmoncmsClient(url, apikey, session=async_get_clientsession(hass)) coordinator = EmoncmsCoordinator(hass, emoncms_client, scan_interval) await coordinator.async_refresh() From 77e9acd864c1f9b9315dd0e5768205d258ca0dbf Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 12 Aug 2024 14:53:22 +0200 Subject: [PATCH 0581/1635] Remove unnecessary assignment of Template.hass from emulated_kasa (#123670) --- homeassistant/components/emulated_kasa/__init__.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/homeassistant/components/emulated_kasa/__init__.py b/homeassistant/components/emulated_kasa/__init__.py index d5fc8af1aa4..408d8c4eff8 100644 --- a/homeassistant/components/emulated_kasa/__init__.py +++ b/homeassistant/components/emulated_kasa/__init__.py @@ -95,8 +95,6 @@ async def validate_configs(hass, entity_configs): power_val = entity_config[CONF_POWER] if isinstance(power_val, str) and is_template_string(power_val): entity_config[CONF_POWER] = Template(power_val, hass) - elif isinstance(power_val, Template): - entity_config[CONF_POWER].hass = hass elif CONF_POWER_ENTITY in entity_config: power_val = entity_config[CONF_POWER_ENTITY] if hass.states.get(power_val) is None: From 33a22ae2080fbfa0a3363585bc665f8f8c57cdc2 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 12 Aug 2024 14:54:03 +0200 Subject: [PATCH 0582/1635] Remove unnecessary assignment of Template.hass from triggers (#123672) --- homeassistant/components/homeassistant/triggers/event.py | 3 --- .../components/homeassistant/triggers/numeric_state.py | 4 ---- homeassistant/components/homeassistant/triggers/state.py | 1 - homeassistant/components/hp_ilo/sensor.py | 3 --- 4 files changed, 11 deletions(-) diff --git a/homeassistant/components/homeassistant/triggers/event.py b/homeassistant/components/homeassistant/triggers/event.py index 98363de1f8d..985e4819b24 100644 --- a/homeassistant/components/homeassistant/triggers/event.py +++ b/homeassistant/components/homeassistant/triggers/event.py @@ -60,7 +60,6 @@ async def async_attach_trigger( trigger_data = trigger_info["trigger_data"] variables = trigger_info["variables"] - template.attach(hass, config[CONF_EVENT_TYPE]) event_types = template.render_complex( config[CONF_EVENT_TYPE], variables, limited=True ) @@ -72,7 +71,6 @@ async def async_attach_trigger( event_data_items: ItemsView | None = None if CONF_EVENT_DATA in config: # Render the schema input - template.attach(hass, config[CONF_EVENT_DATA]) event_data = {} event_data.update( template.render_complex(config[CONF_EVENT_DATA], variables, limited=True) @@ -94,7 +92,6 @@ async def async_attach_trigger( event_context_items: ItemsView | None = None if CONF_EVENT_CONTEXT in config: # Render the schema input - template.attach(hass, config[CONF_EVENT_CONTEXT]) event_context = {} event_context.update( template.render_complex(config[CONF_EVENT_CONTEXT], variables, limited=True) diff --git a/homeassistant/components/homeassistant/triggers/numeric_state.py b/homeassistant/components/homeassistant/triggers/numeric_state.py index bc2c95675ad..dac250792ea 100644 --- a/homeassistant/components/homeassistant/triggers/numeric_state.py +++ b/homeassistant/components/homeassistant/triggers/numeric_state.py @@ -108,7 +108,6 @@ async def async_attach_trigger( below = config.get(CONF_BELOW) above = config.get(CONF_ABOVE) time_delta = config.get(CONF_FOR) - template.attach(hass, time_delta) value_template = config.get(CONF_VALUE_TEMPLATE) unsub_track_same: dict[str, Callable[[], None]] = {} armed_entities: set[str] = set() @@ -119,9 +118,6 @@ async def async_attach_trigger( trigger_data = trigger_info["trigger_data"] _variables = trigger_info["variables"] or {} - if value_template is not None: - value_template.hass = hass - def variables(entity_id: str) -> dict[str, Any]: """Return a dict with trigger variables.""" trigger_info = { diff --git a/homeassistant/components/homeassistant/triggers/state.py b/homeassistant/components/homeassistant/triggers/state.py index e0cbbf09610..53372cb479e 100644 --- a/homeassistant/components/homeassistant/triggers/state.py +++ b/homeassistant/components/homeassistant/triggers/state.py @@ -117,7 +117,6 @@ async def async_attach_trigger( match_to_state = process_state_match(MATCH_ALL) time_delta = config.get(CONF_FOR) - template.attach(hass, time_delta) # If neither CONF_FROM or CONF_TO are specified, # fire on all changes to the state or an attribute match_all = all( diff --git a/homeassistant/components/hp_ilo/sensor.py b/homeassistant/components/hp_ilo/sensor.py index 85908a45af4..0eeb443cf2d 100644 --- a/homeassistant/components/hp_ilo/sensor.py +++ b/homeassistant/components/hp_ilo/sensor.py @@ -131,9 +131,6 @@ class HpIloSensor(SensorEntity): self._unit_of_measurement = unit_of_measurement self._ilo_function = SENSOR_TYPES[sensor_type][1] self.hp_ilo_data = hp_ilo_data - - if sensor_value_template is not None: - sensor_value_template.hass = hass self._sensor_value_template = sensor_value_template self._state = None From 82bedb1ab5ef85aed935313eb3c6bb09322376c0 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 12 Aug 2024 14:54:31 +0200 Subject: [PATCH 0583/1635] Remove unnecessary assignment of Template.hass from dweet (#123667) --- homeassistant/components/dweet/sensor.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/homeassistant/components/dweet/sensor.py b/homeassistant/components/dweet/sensor.py index 01e0567ac8d..10109189eb0 100644 --- a/homeassistant/components/dweet/sensor.py +++ b/homeassistant/components/dweet/sensor.py @@ -51,8 +51,6 @@ def setup_platform( device = config.get(CONF_DEVICE) value_template = config.get(CONF_VALUE_TEMPLATE) unit = config.get(CONF_UNIT_OF_MEASUREMENT) - if value_template is not None: - value_template.hass = hass try: content = json.dumps(dweepy.get_latest_dweet_for(device)[0]["content"]) @@ -60,7 +58,7 @@ def setup_platform( _LOGGER.error("Device/thing %s could not be found", device) return - if value_template.render_with_possible_json_value(content) == "": + if value_template and value_template.render_with_possible_json_value(content) == "": _LOGGER.error("%s was not found", value_template) return From f6e82ae0ba18c9cc561df45b988305f95d1a93ca Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 12 Aug 2024 14:54:41 +0200 Subject: [PATCH 0584/1635] Remove unnecessary assignment of Template.hass from camera (#123663) --- homeassistant/components/camera/__init__.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/homeassistant/components/camera/__init__.py b/homeassistant/components/camera/__init__.py index cbcf08cb7c2..ff11bc04da7 100644 --- a/homeassistant/components/camera/__init__.py +++ b/homeassistant/components/camera/__init__.py @@ -992,7 +992,6 @@ async def async_handle_snapshot_service( """Handle snapshot services calls.""" hass = camera.hass filename: Template = service_call.data[ATTR_FILENAME] - filename.hass = hass snapshot_file = filename.async_render(variables={ATTR_ENTITY_ID: camera}) @@ -1069,9 +1068,7 @@ async def async_handle_record_service( if not stream: raise HomeAssistantError(f"{camera.entity_id} does not support record service") - hass = camera.hass filename = service_call.data[CONF_FILENAME] - filename.hass = hass video_path = filename.async_render(variables={ATTR_ENTITY_ID: camera}) await stream.async_record( From b20623447e069979168af86f008c8a5cab0980bc Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 12 Aug 2024 07:54:57 -0500 Subject: [PATCH 0585/1635] Ensure HomeKit connection is kept alive for devices that timeout too quickly (#123601) --- .../homekit_controller/connection.py | 36 ++++++++++++++----- .../homekit_controller/test_connection.py | 15 ++++---- 2 files changed, 36 insertions(+), 15 deletions(-) diff --git a/homeassistant/components/homekit_controller/connection.py b/homeassistant/components/homekit_controller/connection.py index 0d21ff9ba1d..4da907daf3e 100644 --- a/homeassistant/components/homekit_controller/connection.py +++ b/homeassistant/components/homekit_controller/connection.py @@ -845,21 +845,41 @@ class HKDevice: async def async_update(self, now: datetime | None = None) -> None: """Poll state of all entities attached to this bridge/accessory.""" + to_poll = self.pollable_characteristics + accessories = self.entity_map.accessories + if ( - len(self.entity_map.accessories) == 1 + len(accessories) == 1 and self.available - and not (self.pollable_characteristics - self.watchable_characteristics) + and not (to_poll - self.watchable_characteristics) and self.pairing.is_available and await self.pairing.controller.async_reachable( self.unique_id, timeout=5.0 ) ): # If its a single accessory and all chars are watchable, - # we don't need to poll. - _LOGGER.debug("Accessory is reachable, skip polling: %s", self.unique_id) - return + # only poll the firmware version to keep the connection alive + # https://github.com/home-assistant/core/issues/123412 + # + # Firmware revision is used here since iOS does this to keep camera + # connections alive, and the goal is to not regress + # https://github.com/home-assistant/core/issues/116143 + # by polling characteristics that are not normally polled frequently + # and may not be tested by the device vendor. + # + _LOGGER.debug( + "Accessory is reachable, limiting poll to firmware version: %s", + self.unique_id, + ) + first_accessory = accessories[0] + accessory_info = first_accessory.services.first( + service_type=ServicesTypes.ACCESSORY_INFORMATION + ) + assert accessory_info is not None + firmware_iid = accessory_info[CharacteristicsTypes.FIRMWARE_REVISION].iid + to_poll = {(first_accessory.aid, firmware_iid)} - if not self.pollable_characteristics: + if not to_poll: self.async_update_available_state() _LOGGER.debug( "HomeKit connection not polling any characteristics: %s", self.unique_id @@ -892,9 +912,7 @@ class HKDevice: _LOGGER.debug("Starting HomeKit device update: %s", self.unique_id) try: - new_values_dict = await self.get_characteristics( - self.pollable_characteristics - ) + new_values_dict = await self.get_characteristics(to_poll) except AccessoryNotFoundError: # Not only did the connection fail, but also the accessory is not # visible on the network. diff --git a/tests/components/homekit_controller/test_connection.py b/tests/components/homekit_controller/test_connection.py index 60ef0b1c547..8d3cc02fab9 100644 --- a/tests/components/homekit_controller/test_connection.py +++ b/tests/components/homekit_controller/test_connection.py @@ -344,10 +344,10 @@ async def test_thread_provision_migration_failed(hass: HomeAssistant) -> None: assert config_entry.data["Connection"] == "BLE" -async def test_skip_polling_all_watchable_accessory_mode( +async def test_poll_firmware_version_only_all_watchable_accessory_mode( hass: HomeAssistant, get_next_aid: Callable[[], int] ) -> None: - """Test that we skip polling if available and all chars are watchable accessory mode.""" + """Test that we only poll firmware if available and all chars are watchable accessory mode.""" def _create_accessory(accessory): service = accessory.add_service(ServicesTypes.LIGHTBULB, name="TestDevice") @@ -370,7 +370,10 @@ async def test_skip_polling_all_watchable_accessory_mode( # Initial state is that the light is off state = await helper.poll_and_get_state() assert state.state == STATE_OFF - assert mock_get_characteristics.call_count == 0 + assert mock_get_characteristics.call_count == 2 + # Verify only firmware version is polled + assert mock_get_characteristics.call_args_list[0][0][0] == {(1, 7)} + assert mock_get_characteristics.call_args_list[1][0][0] == {(1, 7)} # Test device goes offline helper.pairing.available = False @@ -382,16 +385,16 @@ async def test_skip_polling_all_watchable_accessory_mode( state = await helper.poll_and_get_state() assert state.state == STATE_UNAVAILABLE # Tries twice before declaring unavailable - assert mock_get_characteristics.call_count == 2 + assert mock_get_characteristics.call_count == 4 # Test device comes back online helper.pairing.available = True state = await helper.poll_and_get_state() assert state.state == STATE_OFF - assert mock_get_characteristics.call_count == 3 + assert mock_get_characteristics.call_count == 6 # Next poll should not happen because its a single # accessory, available, and all chars are watchable state = await helper.poll_and_get_state() assert state.state == STATE_OFF - assert mock_get_characteristics.call_count == 3 + assert mock_get_characteristics.call_count == 8 From 5b5b9ac4ef49c3d118ca46c74e8cc2a4c761bc49 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 12 Aug 2024 14:56:29 +0200 Subject: [PATCH 0586/1635] Remove unnecessary assignment of Template.hass from influxdb (#123673) --- homeassistant/components/influxdb/sensor.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/influxdb/sensor.py b/homeassistant/components/influxdb/sensor.py index 03b6acb204c..a1a9e618cb8 100644 --- a/homeassistant/components/influxdb/sensor.py +++ b/homeassistant/components/influxdb/sensor.py @@ -205,28 +205,24 @@ class InfluxSensor(SensorEntity): self._attr_unique_id = query.get(CONF_UNIQUE_ID) if query[CONF_LANGUAGE] == LANGUAGE_FLUX: - query_clause = query.get(CONF_QUERY) - query_clause.hass = hass self.data = InfluxFluxSensorData( influx, query.get(CONF_BUCKET), query.get(CONF_RANGE_START), query.get(CONF_RANGE_STOP), - query_clause, + query.get(CONF_QUERY), query.get(CONF_IMPORTS), query.get(CONF_GROUP_FUNCTION), ) else: - where_clause = query.get(CONF_WHERE) - where_clause.hass = hass self.data = InfluxQLSensorData( influx, query.get(CONF_DB_NAME), query.get(CONF_GROUP_FUNCTION), query.get(CONF_FIELD), query.get(CONF_MEASUREMENT_NAME), - where_clause, + query.get(CONF_WHERE), ) @property From 11fd1086afd797286bfe9c26a28250a43f99374e Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 12 Aug 2024 15:39:18 +0200 Subject: [PATCH 0587/1635] Remove unnecessary assignment of Template.hass from logbook (#123677) --- homeassistant/components/logbook/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/homeassistant/components/logbook/__init__.py b/homeassistant/components/logbook/__init__.py index d520cafb80e..239a52ff7a1 100644 --- a/homeassistant/components/logbook/__init__.py +++ b/homeassistant/components/logbook/__init__.py @@ -112,7 +112,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # away so we use the "logbook" domain domain = DOMAIN - message.hass = hass message = message.async_render(parse_result=False) async_log_entry(hass, name, message, domain, entity_id, service.context) From 78b6cdb2012e91dcb5580588140cbac3f9ac8319 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 12 Aug 2024 15:39:34 +0200 Subject: [PATCH 0588/1635] Remove unnecessary assignment of Template.hass from logi_circle (#123678) --- homeassistant/components/logi_circle/camera.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/homeassistant/components/logi_circle/camera.py b/homeassistant/components/logi_circle/camera.py index ad31713d734..04f12586679 100644 --- a/homeassistant/components/logi_circle/camera.py +++ b/homeassistant/components/logi_circle/camera.py @@ -166,7 +166,6 @@ class LogiCam(Camera): async def download_livestream(self, filename, duration): """Download a recording from the camera's livestream.""" # Render filename from template. - filename.hass = self.hass stream_file = filename.async_render(variables={ATTR_ENTITY_ID: self.entity_id}) # Respect configured allowed paths. @@ -183,7 +182,6 @@ class LogiCam(Camera): async def livestream_snapshot(self, filename): """Download a still frame from the camera's livestream.""" # Render filename from template. - filename.hass = self.hass snapshot_file = filename.async_render( variables={ATTR_ENTITY_ID: self.entity_id} ) From 4639e7d5c7693a3a48b8f784bb2ba4acac9ad716 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 12 Aug 2024 15:49:37 +0200 Subject: [PATCH 0589/1635] Remove unnecessary assignment of Template.hass from tcp (#123691) --- homeassistant/components/tcp/common.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/homeassistant/components/tcp/common.py b/homeassistant/components/tcp/common.py index d6a7fb28f11..263fc416026 100644 --- a/homeassistant/components/tcp/common.py +++ b/homeassistant/components/tcp/common.py @@ -25,7 +25,6 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import TemplateError import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity -from homeassistant.helpers.template import Template from homeassistant.helpers.typing import ConfigType from .const import ( @@ -63,10 +62,6 @@ class TcpEntity(Entity): def __init__(self, hass: HomeAssistant, config: ConfigType) -> None: """Set all the config values if they exist and get initial state.""" - value_template: Template | None = config.get(CONF_VALUE_TEMPLATE) - if value_template is not None: - value_template.hass = hass - self._hass = hass self._config: TcpSensorConfig = { CONF_NAME: config[CONF_NAME], @@ -75,7 +70,7 @@ class TcpEntity(Entity): CONF_TIMEOUT: config[CONF_TIMEOUT], CONF_PAYLOAD: config[CONF_PAYLOAD], CONF_UNIT_OF_MEASUREMENT: config.get(CONF_UNIT_OF_MEASUREMENT), - CONF_VALUE_TEMPLATE: value_template, + CONF_VALUE_TEMPLATE: config.get(CONF_VALUE_TEMPLATE), CONF_VALUE_ON: config.get(CONF_VALUE_ON), CONF_BUFFER_SIZE: config[CONF_BUFFER_SIZE], CONF_SSL: config[CONF_SSL], From 533e383d5de6318c8a991be76020618cbe8b5c31 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 12 Aug 2024 15:51:02 +0200 Subject: [PATCH 0590/1635] Remove unnecessary assignment of Template.hass from sql (#123690) --- homeassistant/components/sql/sensor.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/homeassistant/components/sql/sensor.py b/homeassistant/components/sql/sensor.py index f09f7ae95cf..1d033728c0d 100644 --- a/homeassistant/components/sql/sensor.py +++ b/homeassistant/components/sql/sensor.py @@ -81,9 +81,6 @@ async def async_setup_platform( unique_id: str | None = conf.get(CONF_UNIQUE_ID) db_url: str = resolve_db_url(hass, conf.get(CONF_DB_URL)) - if value_template is not None: - value_template.hass = hass - trigger_entity_config = {CONF_NAME: name} for key in TRIGGER_ENTITY_OPTIONS: if key not in conf: @@ -117,12 +114,10 @@ async def async_setup_entry( value_template: Template | None = None if template is not None: try: - value_template = Template(template) + value_template = Template(template, hass) value_template.ensure_valid() except TemplateError: value_template = None - if value_template is not None: - value_template.hass = hass name_template = Template(name, hass) trigger_entity_config = {CONF_NAME: name_template, CONF_UNIQUE_ID: entry.entry_id} From efa3f228a55a146522ccdd01838e2d580ffb003c Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 12 Aug 2024 15:51:26 +0200 Subject: [PATCH 0591/1635] Remove unnecessary assignment of Template.hass from slack (#123688) --- homeassistant/components/slack/notify.py | 1 - 1 file changed, 1 deletion(-) diff --git a/homeassistant/components/slack/notify.py b/homeassistant/components/slack/notify.py index a18b211962a..28f9dd203ff 100644 --- a/homeassistant/components/slack/notify.py +++ b/homeassistant/components/slack/notify.py @@ -291,7 +291,6 @@ class SlackNotificationService(BaseNotificationService): if ATTR_FILE not in data: if ATTR_BLOCKS_TEMPLATE in data: value = cv.template_complex(data[ATTR_BLOCKS_TEMPLATE]) - template.attach(self._hass, value) blocks = template.render_complex(value) elif ATTR_BLOCKS in data: blocks = data[ATTR_BLOCKS] From 1c9a1c71d3dd41729312a68b9beb4429fd77b3bf Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 12 Aug 2024 15:51:37 +0200 Subject: [PATCH 0592/1635] Remove unnecessary assignment of Template.hass from scrape (#123685) --- homeassistant/components/scrape/sensor.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/homeassistant/components/scrape/sensor.py b/homeassistant/components/scrape/sensor.py index ceaf1e63a9d..dd84767ad41 100644 --- a/homeassistant/components/scrape/sensor.py +++ b/homeassistant/components/scrape/sensor.py @@ -67,10 +67,6 @@ async def async_setup_platform( entities: list[ScrapeSensor] = [] for sensor_config in sensors_config: - value_template: Template | None = sensor_config.get(CONF_VALUE_TEMPLATE) - if value_template is not None: - value_template.hass = hass - trigger_entity_config = {CONF_NAME: sensor_config[CONF_NAME]} for key in TRIGGER_ENTITY_OPTIONS: if key not in sensor_config: @@ -85,7 +81,7 @@ async def async_setup_platform( sensor_config[CONF_SELECT], sensor_config.get(CONF_ATTRIBUTE), sensor_config[CONF_INDEX], - value_template, + sensor_config.get(CONF_VALUE_TEMPLATE), True, ) ) From ecd061d46fc27f521e5e8320e40d629764a0ebe8 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 12 Aug 2024 15:52:03 +0200 Subject: [PATCH 0593/1635] Remove unnecessary assignment of Template.hass from rest (#123682) --- homeassistant/components/rest/__init__.py | 5 ----- homeassistant/components/rest/binary_sensor.py | 2 -- homeassistant/components/rest/notify.py | 1 - homeassistant/components/rest/sensor.py | 2 -- homeassistant/components/rest/switch.py | 5 ----- homeassistant/components/rest_command/__init__.py | 4 ---- 6 files changed, 19 deletions(-) diff --git a/homeassistant/components/rest/__init__.py b/homeassistant/components/rest/__init__.py index b7cdee2e039..59239ad6744 100644 --- a/homeassistant/components/rest/__init__.py +++ b/homeassistant/components/rest/__init__.py @@ -202,19 +202,14 @@ def create_rest_data_from_config(hass: HomeAssistant, config: ConfigType) -> Res timeout: int = config[CONF_TIMEOUT] encoding: str = config[CONF_ENCODING] if resource_template is not None: - resource_template.hass = hass resource = resource_template.async_render(parse_result=False) if payload_template is not None: - payload_template.hass = hass payload = payload_template.async_render(parse_result=False) if not resource: raise HomeAssistantError("Resource not set for RestData") - template.attach(hass, headers) - template.attach(hass, params) - auth: httpx.DigestAuth | tuple[str, str] | None = None if username and password: if config.get(CONF_AUTHENTICATION) == HTTP_DIGEST_AUTHENTICATION: diff --git a/homeassistant/components/rest/binary_sensor.py b/homeassistant/components/rest/binary_sensor.py index e8119a40f8c..c976506d1ba 100644 --- a/homeassistant/components/rest/binary_sensor.py +++ b/homeassistant/components/rest/binary_sensor.py @@ -133,8 +133,6 @@ class RestBinarySensor(ManualTriggerEntity, RestEntity, BinarySensorEntity): ) self._previous_data = None self._value_template: Template | None = config.get(CONF_VALUE_TEMPLATE) - if (value_template := self._value_template) is not None: - value_template.hass = hass @property def available(self) -> bool: diff --git a/homeassistant/components/rest/notify.py b/homeassistant/components/rest/notify.py index c8314d18707..1ca3c55e2b2 100644 --- a/homeassistant/components/rest/notify.py +++ b/homeassistant/components/rest/notify.py @@ -172,7 +172,6 @@ class RestNotificationService(BaseNotificationService): } if not isinstance(value, Template): return value - value.hass = self._hass return value.async_render(kwargs, parse_result=False) if self._data: diff --git a/homeassistant/components/rest/sensor.py b/homeassistant/components/rest/sensor.py index d7bb0ea33fb..fc6ce8c6749 100644 --- a/homeassistant/components/rest/sensor.py +++ b/homeassistant/components/rest/sensor.py @@ -139,8 +139,6 @@ class RestSensor(ManualTriggerSensorEntity, RestEntity): config[CONF_FORCE_UPDATE], ) self._value_template = config.get(CONF_VALUE_TEMPLATE) - if (value_template := self._value_template) is not None: - value_template.hass = hass self._json_attrs = config.get(CONF_JSON_ATTRS) self._json_attrs_path = config.get(CONF_JSON_ATTRS_PATH) self._attr_extra_state_attributes = {} diff --git a/homeassistant/components/rest/switch.py b/homeassistant/components/rest/switch.py index d01aab2cf9f..219084ea683 100644 --- a/homeassistant/components/rest/switch.py +++ b/homeassistant/components/rest/switch.py @@ -153,11 +153,6 @@ class RestSwitch(ManualTriggerEntity, SwitchEntity): self._body_on.hass = hass self._body_off.hass = hass - if (is_on_template := self._is_on_template) is not None: - is_on_template.hass = hass - - template.attach(hass, self._headers) - template.attach(hass, self._params) async def async_added_to_hass(self) -> None: """Handle adding to Home Assistant.""" diff --git a/homeassistant/components/rest_command/__init__.py b/homeassistant/components/rest_command/__init__.py index b6945c5ce98..ee93fde35fa 100644 --- a/homeassistant/components/rest_command/__init__.py +++ b/homeassistant/components/rest_command/__init__.py @@ -96,7 +96,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: method = command_config[CONF_METHOD] template_url = command_config[CONF_URL] - template_url.hass = hass auth = None if CONF_USERNAME in command_config: @@ -107,11 +106,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: template_payload = None if CONF_PAYLOAD in command_config: template_payload = command_config[CONF_PAYLOAD] - template_payload.hass = hass template_headers = command_config.get(CONF_HEADERS, {}) - for template_header in template_headers.values(): - template_header.hass = hass content_type = command_config.get(CONF_CONTENT_TYPE) From b04e3dc6fd6cb266dd58ca9746c762bb9c98f9cb Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 12 Aug 2024 15:52:41 +0200 Subject: [PATCH 0594/1635] Remove unnecessary assignment of Template.hass from serial (#123686) --- homeassistant/components/serial/sensor.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/homeassistant/components/serial/sensor.py b/homeassistant/components/serial/sensor.py index e3fee36c09e..e7c39d97f6a 100644 --- a/homeassistant/components/serial/sensor.py +++ b/homeassistant/components/serial/sensor.py @@ -93,9 +93,7 @@ async def async_setup_platform( xonxoff = config.get(CONF_XONXOFF) rtscts = config.get(CONF_RTSCTS) dsrdtr = config.get(CONF_DSRDTR) - - if (value_template := config.get(CONF_VALUE_TEMPLATE)) is not None: - value_template.hass = hass + value_template = config.get(CONF_VALUE_TEMPLATE) sensor = SerialSensor( name, From 268044cd015207d370ec8bce31a9a266d5590a89 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 12 Aug 2024 15:53:19 +0200 Subject: [PATCH 0595/1635] Remove unnecessary assignment of Template.hass from notify (#123680) --- homeassistant/components/notify/__init__.py | 2 -- homeassistant/components/notify/legacy.py | 2 -- 2 files changed, 4 deletions(-) diff --git a/homeassistant/components/notify/__init__.py b/homeassistant/components/notify/__init__.py index 1fc7836ecd8..31c7b8e4d70 100644 --- a/homeassistant/components/notify/__init__.py +++ b/homeassistant/components/notify/__init__.py @@ -91,14 +91,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def persistent_notification(service: ServiceCall) -> None: """Send notification via the built-in persistent_notify integration.""" message: Template = service.data[ATTR_MESSAGE] - message.hass = hass check_templates_warn(hass, message) title = None title_tpl: Template | None if title_tpl := service.data.get(ATTR_TITLE): check_templates_warn(hass, title_tpl) - title_tpl.hass = hass title = title_tpl.async_render(parse_result=False) notification_id = None diff --git a/homeassistant/components/notify/legacy.py b/homeassistant/components/notify/legacy.py index 81444a36296..dcb148a99f5 100644 --- a/homeassistant/components/notify/legacy.py +++ b/homeassistant/components/notify/legacy.py @@ -259,7 +259,6 @@ class BaseNotificationService: title: Template | None if title := service.data.get(ATTR_TITLE): check_templates_warn(self.hass, title) - title.hass = self.hass kwargs[ATTR_TITLE] = title.async_render(parse_result=False) if self.registered_targets.get(service.service) is not None: @@ -268,7 +267,6 @@ class BaseNotificationService: kwargs[ATTR_TARGET] = service.data.get(ATTR_TARGET) check_templates_warn(self.hass, message) - message.hass = self.hass kwargs[ATTR_MESSAGE] = message.async_render(parse_result=False) kwargs[ATTR_DATA] = service.data.get(ATTR_DATA) From 81788790dfcd6b058b5b145cfc89366186f5210f Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 12 Aug 2024 15:53:32 +0200 Subject: [PATCH 0596/1635] Remove unnecessary assignment of Template.hass from rss_feed_template (#123683) --- .../components/rss_feed_template/__init__.py | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/homeassistant/components/rss_feed_template/__init__.py b/homeassistant/components/rss_feed_template/__init__.py index debff5a6e96..89624c922e6 100644 --- a/homeassistant/components/rss_feed_template/__init__.py +++ b/homeassistant/components/rss_feed_template/__init__.py @@ -49,18 +49,8 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: requires_auth: bool = feedconfig["requires_api_password"] - title: Template | None - if (title := feedconfig.get("title")) is not None: - title.hass = hass - items: list[dict[str, Template]] = feedconfig["items"] - for item in items: - if "title" in item: - item["title"].hass = hass - if "description" in item: - item["description"].hass = hass - - rss_view = RssView(url, requires_auth, title, items) + rss_view = RssView(url, requires_auth, feedconfig.get("title"), items) hass.http.register_view(rss_view) return True From 4dadf0ea1b2d4f0561f6743e87e9e6f9d18d5591 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 12 Aug 2024 15:54:06 +0200 Subject: [PATCH 0597/1635] Remove unnecessary assignment of Template.hass from snmp (#123689) --- homeassistant/components/snmp/sensor.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/homeassistant/components/snmp/sensor.py b/homeassistant/components/snmp/sensor.py index fb7b87403cb..4586d0600e9 100644 --- a/homeassistant/components/snmp/sensor.py +++ b/homeassistant/components/snmp/sensor.py @@ -174,8 +174,6 @@ async def async_setup_platform( trigger_entity_config[key] = config[key] value_template: Template | None = config.get(CONF_VALUE_TEMPLATE) - if value_template is not None: - value_template.hass = hass data = SnmpData(request_args, baseoid, accept_errors, default_value) async_add_entities([SnmpSensor(hass, data, trigger_entity_config, value_template)]) From c47fdf70746a8effe205955aa34bb6a6617f123f Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 12 Aug 2024 15:54:20 +0200 Subject: [PATCH 0598/1635] Remove unnecessary assignment of Template.hass from intent_script (#123676) --- homeassistant/components/intent_script/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/homeassistant/components/intent_script/__init__.py b/homeassistant/components/intent_script/__init__.py index d6fbb1edd80..371163ce00a 100644 --- a/homeassistant/components/intent_script/__init__.py +++ b/homeassistant/components/intent_script/__init__.py @@ -90,7 +90,6 @@ async def async_reload(hass: HomeAssistant, service_call: ServiceCall) -> None: def async_load_intents(hass: HomeAssistant, intents: dict[str, ConfigType]) -> None: """Load YAML intents into the intent system.""" - template.attach(hass, intents) hass.data[DOMAIN] = intents for intent_type, conf in intents.items(): From 2d41723cfecccec262edc06ac86bd77033fb3000 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 12 Aug 2024 15:54:35 +0200 Subject: [PATCH 0599/1635] Remove unnecessary assignment of Template.hass from minio (#123679) --- homeassistant/components/minio/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/homeassistant/components/minio/__init__.py b/homeassistant/components/minio/__init__.py index e2cbcdf9ed1..e5470cc3313 100644 --- a/homeassistant/components/minio/__init__.py +++ b/homeassistant/components/minio/__init__.py @@ -127,7 +127,6 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: def _render_service_value(service, key): value = service.data[key] - value.hass = hass return value.async_render(parse_result=False) def put_file(service: ServiceCall) -> None: From 7985974a58d792a7402cf435e4b83f6f129269b5 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 12 Aug 2024 09:02:23 -0500 Subject: [PATCH 0600/1635] Bump aiohomekit to 3.2.2 (#123669) --- homeassistant/components/homekit_controller/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/homekit_controller/manifest.json b/homeassistant/components/homekit_controller/manifest.json index 476d17d3515..007153aceaf 100644 --- a/homeassistant/components/homekit_controller/manifest.json +++ b/homeassistant/components/homekit_controller/manifest.json @@ -14,6 +14,6 @@ "documentation": "https://www.home-assistant.io/integrations/homekit_controller", "iot_class": "local_push", "loggers": ["aiohomekit", "commentjson"], - "requirements": ["aiohomekit==3.2.1"], + "requirements": ["aiohomekit==3.2.2"], "zeroconf": ["_hap._tcp.local.", "_hap._udp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index 9898da0e19e..59fb5d23a1e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -255,7 +255,7 @@ aioguardian==2022.07.0 aioharmony==0.2.10 # homeassistant.components.homekit_controller -aiohomekit==3.2.1 +aiohomekit==3.2.2 # homeassistant.components.hue aiohue==4.7.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 572e70bc2b8..032a09fb699 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -240,7 +240,7 @@ aioguardian==2022.07.0 aioharmony==0.2.10 # homeassistant.components.homekit_controller -aiohomekit==3.2.1 +aiohomekit==3.2.2 # homeassistant.components.hue aiohue==4.7.2 From 342ba1b5996b6330744d6bd62a58212cb039f7cc Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 12 Aug 2024 16:06:19 +0200 Subject: [PATCH 0601/1635] Remove unnecessary assignment of Template.hass from telegram_bot (#123693) --- homeassistant/components/telegram_bot/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/homeassistant/components/telegram_bot/__init__.py b/homeassistant/components/telegram_bot/__init__.py index fed9021a46e..9d1a5398055 100644 --- a/homeassistant/components/telegram_bot/__init__.py +++ b/homeassistant/components/telegram_bot/__init__.py @@ -408,7 +408,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: ): data[attribute] = attribute_templ else: - attribute_templ.hass = hass try: data[attribute] = attribute_templ.async_render( parse_result=False From 5cb990113494da4e21276c176592369c31b6e0c4 Mon Sep 17 00:00:00 2001 From: Cyrill Raccaud Date: Mon, 12 Aug 2024 16:19:36 +0200 Subject: [PATCH 0602/1635] Cleaner unit tests for Swiss public transport (#123660) cleaner unit tests --- .../swiss_public_transport/test_service.py | 24 +++++++++---------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/tests/components/swiss_public_transport/test_service.py b/tests/components/swiss_public_transport/test_service.py index 34640de9f21..4009327e77d 100644 --- a/tests/components/swiss_public_transport/test_service.py +++ b/tests/components/swiss_public_transport/test_service.py @@ -38,18 +38,18 @@ MOCK_DATA_STEP_BASE = { @pytest.mark.parametrize( - ("limit", "config_data"), + ("data", "config_data"), [ - (1, MOCK_DATA_STEP_BASE), - (2, MOCK_DATA_STEP_BASE), - (3, MOCK_DATA_STEP_BASE), - (CONNECTIONS_MAX, MOCK_DATA_STEP_BASE), - (None, MOCK_DATA_STEP_BASE), + ({ATTR_LIMIT: 1}, MOCK_DATA_STEP_BASE), + ({ATTR_LIMIT: 2}, MOCK_DATA_STEP_BASE), + ({ATTR_LIMIT: 3}, MOCK_DATA_STEP_BASE), + ({ATTR_LIMIT: CONNECTIONS_MAX}, MOCK_DATA_STEP_BASE), + ({}, MOCK_DATA_STEP_BASE), ], ) async def test_service_call_fetch_connections_success( hass: HomeAssistant, - limit: int, + data: dict, config_data, ) -> None: """Test the fetch_connections service.""" @@ -59,7 +59,7 @@ async def test_service_call_fetch_connections_success( config_entry = MockConfigEntry( domain=DOMAIN, data=config_data, - title=f"Service test call with limit={limit}", + title=f"Service test call with data={data}", unique_id=unique_id, entry_id=f"entry_{unique_id}", ) @@ -69,14 +69,12 @@ async def test_service_call_fetch_connections_success( return_value=AsyncMock(), ) as mock: mock().connections = json.loads(load_fixture("connections.json", DOMAIN))[ - 0 : (limit or CONNECTIONS_COUNT) + 2 + 0 : data.get(ATTR_LIMIT, CONNECTIONS_COUNT) + 2 ] await setup_integration(hass, config_entry) - data = {ATTR_CONFIG_ENTRY_ID: config_entry.entry_id} - if limit is not None: - data[ATTR_LIMIT] = limit + data[ATTR_CONFIG_ENTRY_ID] = config_entry.entry_id assert hass.services.has_service(DOMAIN, SERVICE_FETCH_CONNECTIONS) response = await hass.services.async_call( domain=DOMAIN, @@ -87,7 +85,7 @@ async def test_service_call_fetch_connections_success( ) await hass.async_block_till_done() assert response["connections"] is not None - assert len(response["connections"]) == (limit or CONNECTIONS_COUNT) + assert len(response["connections"]) == data.get(ATTR_LIMIT, CONNECTIONS_COUNT) @pytest.mark.parametrize( From 26a69458b0f1cfccdcd382b924c8c29bde92a8f2 Mon Sep 17 00:00:00 2001 From: Shay Levy Date: Mon, 12 Aug 2024 17:52:37 +0300 Subject: [PATCH 0603/1635] Bump aioswitcher to 4.0.1 (#123697) --- homeassistant/components/switcher_kis/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/switcher_kis/manifest.json b/homeassistant/components/switcher_kis/manifest.json index 6aedd2f5670..1ec32a26e4e 100644 --- a/homeassistant/components/switcher_kis/manifest.json +++ b/homeassistant/components/switcher_kis/manifest.json @@ -7,6 +7,6 @@ "iot_class": "local_push", "loggers": ["aioswitcher"], "quality_scale": "platinum", - "requirements": ["aioswitcher==4.0.0"], + "requirements": ["aioswitcher==4.0.1"], "single_config_entry": true } diff --git a/requirements_all.txt b/requirements_all.txt index 59fb5d23a1e..3e868a419eb 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -374,7 +374,7 @@ aiosolaredge==0.2.0 aiosteamist==1.0.0 # homeassistant.components.switcher_kis -aioswitcher==4.0.0 +aioswitcher==4.0.1 # homeassistant.components.syncthing aiosyncthing==0.5.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 032a09fb699..af61773fa16 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -356,7 +356,7 @@ aiosolaredge==0.2.0 aiosteamist==1.0.0 # homeassistant.components.switcher_kis -aioswitcher==4.0.0 +aioswitcher==4.0.1 # homeassistant.components.syncthing aiosyncthing==0.5.1 From 200f04bf21cda2c546ca6efb927e80228af8cba3 Mon Sep 17 00:00:00 2001 From: Cyrill Raccaud Date: Mon, 12 Aug 2024 17:01:06 +0200 Subject: [PATCH 0604/1635] Fix startup block from Swiss public transport (#123704) --- homeassistant/components/swiss_public_transport/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/swiss_public_transport/__init__.py b/homeassistant/components/swiss_public_transport/__init__.py index 3e29fb9c746..dc1d0eb236c 100644 --- a/homeassistant/components/swiss_public_transport/__init__.py +++ b/homeassistant/components/swiss_public_transport/__init__.py @@ -58,7 +58,7 @@ async def async_setup_entry( translation_key="request_timeout", translation_placeholders={ "config_title": entry.title, - "error": e, + "error": str(e), }, ) from e except OpendataTransportError as e: @@ -68,7 +68,7 @@ async def async_setup_entry( translation_placeholders={ **PLACEHOLDERS, "config_title": entry.title, - "error": e, + "error": str(e), }, ) from e From 5fb6c65d23e4af9af3bd9080017772d5623162d6 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 12 Aug 2024 17:01:31 +0200 Subject: [PATCH 0605/1635] Remove unnecessary assignment of Template.hass from alexa (#123699) --- homeassistant/components/alexa/flash_briefings.py | 1 - 1 file changed, 1 deletion(-) diff --git a/homeassistant/components/alexa/flash_briefings.py b/homeassistant/components/alexa/flash_briefings.py index eed700602ce..0d75ee04b7a 100644 --- a/homeassistant/components/alexa/flash_briefings.py +++ b/homeassistant/components/alexa/flash_briefings.py @@ -52,7 +52,6 @@ class AlexaFlashBriefingView(http.HomeAssistantView): """Initialize Alexa view.""" super().__init__() self.flash_briefings = flash_briefings - template.attach(hass, self.flash_briefings) @callback def get( From b7a0bf152b4e27e01134df8e9ad50eef358d938e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Fern=C3=A1ndez=20Rojas?= Date: Mon, 12 Aug 2024 18:56:54 +0200 Subject: [PATCH 0606/1635] Update aioqsw to v0.4.1 (#123721) --- homeassistant/components/qnap_qsw/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/qnap_qsw/manifest.json b/homeassistant/components/qnap_qsw/manifest.json index b8c62133193..d34848346b7 100644 --- a/homeassistant/components/qnap_qsw/manifest.json +++ b/homeassistant/components/qnap_qsw/manifest.json @@ -11,5 +11,5 @@ "documentation": "https://www.home-assistant.io/integrations/qnap_qsw", "iot_class": "local_polling", "loggers": ["aioqsw"], - "requirements": ["aioqsw==0.4.0"] + "requirements": ["aioqsw==0.4.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 3e868a419eb..f8f30eba714 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -335,7 +335,7 @@ aiopvpc==4.2.2 aiopyarr==23.4.0 # homeassistant.components.qnap_qsw -aioqsw==0.4.0 +aioqsw==0.4.1 # homeassistant.components.rainforest_raven aioraven==0.7.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index af61773fa16..270a4126979 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -317,7 +317,7 @@ aiopvpc==4.2.2 aiopyarr==23.4.0 # homeassistant.components.qnap_qsw -aioqsw==0.4.0 +aioqsw==0.4.1 # homeassistant.components.rainforest_raven aioraven==0.7.0 From 93eb74d970b2eab057ae274fc31cdaf3b06b3d86 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Fern=C3=A1ndez=20Rojas?= Date: Mon, 12 Aug 2024 18:57:34 +0200 Subject: [PATCH 0607/1635] Update aioairzone-cloud to v0.6.2 (#123719) --- homeassistant/components/airzone_cloud/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/airzone_cloud/manifest.json b/homeassistant/components/airzone_cloud/manifest.json index 362973ae833..b691770e934 100644 --- a/homeassistant/components/airzone_cloud/manifest.json +++ b/homeassistant/components/airzone_cloud/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/airzone_cloud", "iot_class": "cloud_push", "loggers": ["aioairzone_cloud"], - "requirements": ["aioairzone-cloud==0.6.1"] + "requirements": ["aioairzone-cloud==0.6.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index f8f30eba714..a56599587d6 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -176,7 +176,7 @@ aio-georss-gdacs==0.9 aioairq==0.3.2 # homeassistant.components.airzone_cloud -aioairzone-cloud==0.6.1 +aioairzone-cloud==0.6.2 # homeassistant.components.airzone aioairzone==0.8.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 270a4126979..83e9b41145e 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -164,7 +164,7 @@ aio-georss-gdacs==0.9 aioairq==0.3.2 # homeassistant.components.airzone_cloud -aioairzone-cloud==0.6.1 +aioairzone-cloud==0.6.2 # homeassistant.components.airzone aioairzone==0.8.1 From 138d229fef307554ac0942b08a0d392ad698e900 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Fern=C3=A1ndez=20Rojas?= Date: Mon, 12 Aug 2024 18:59:31 +0200 Subject: [PATCH 0608/1635] Update AEMET-OpenData to v0.5.4 (#123716) --- homeassistant/components/aemet/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/aemet/manifest.json b/homeassistant/components/aemet/manifest.json index d2e5c5fdc5a..3696e16b437 100644 --- a/homeassistant/components/aemet/manifest.json +++ b/homeassistant/components/aemet/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/aemet", "iot_class": "cloud_polling", "loggers": ["aemet_opendata"], - "requirements": ["AEMET-OpenData==0.5.3"] + "requirements": ["AEMET-OpenData==0.5.4"] } diff --git a/requirements_all.txt b/requirements_all.txt index a56599587d6..0dca62e06f0 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -4,7 +4,7 @@ -r requirements.txt # homeassistant.components.aemet -AEMET-OpenData==0.5.3 +AEMET-OpenData==0.5.4 # homeassistant.components.honeywell AIOSomecomfort==0.0.25 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 83e9b41145e..ec32098b7bb 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -4,7 +4,7 @@ -r requirements_test.txt # homeassistant.components.aemet -AEMET-OpenData==0.5.3 +AEMET-OpenData==0.5.4 # homeassistant.components.honeywell AIOSomecomfort==0.0.25 From 74a09073c2bcf5cf73bd43b6dfca45bd21f65d64 Mon Sep 17 00:00:00 2001 From: David Knowles Date: Mon, 12 Aug 2024 13:01:07 -0400 Subject: [PATCH 0609/1635] Bump pyschlage to 2024.8.0 (#123714) --- homeassistant/components/schlage/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/schlage/manifest.json b/homeassistant/components/schlage/manifest.json index c6dfc443bb8..5619cf7b312 100644 --- a/homeassistant/components/schlage/manifest.json +++ b/homeassistant/components/schlage/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/schlage", "iot_class": "cloud_polling", - "requirements": ["pyschlage==2024.6.0"] + "requirements": ["pyschlage==2024.8.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 0dca62e06f0..5414c205ac7 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2160,7 +2160,7 @@ pysabnzbd==1.1.1 pysaj==0.0.16 # homeassistant.components.schlage -pyschlage==2024.6.0 +pyschlage==2024.8.0 # homeassistant.components.sensibo pysensibo==1.0.36 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index ec32098b7bb..8a1abbfa769 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1726,7 +1726,7 @@ pyrympro==0.0.8 pysabnzbd==1.1.1 # homeassistant.components.schlage -pyschlage==2024.6.0 +pyschlage==2024.8.0 # homeassistant.components.sensibo pysensibo==1.0.36 From 21987a67e7c4d3c526ddfbd4866c64f93c2569f0 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Mon, 12 Aug 2024 19:20:21 +0200 Subject: [PATCH 0610/1635] Cleanup unneeded assignment of hass property on MQTT Template objects (#123706) * Cleanup unneeded assignment of hass property on MQTT Template objects * Commented out code and unneeded checks * Consistent assign hass to Template in mqtt tests * Remove unused hass attribute * Missed line --- homeassistant/components/mqtt/__init__.py | 5 ++-- homeassistant/components/mqtt/models.py | 19 +------------- homeassistant/components/mqtt/tag.py | 6 ++--- homeassistant/components/mqtt/trigger.py | 5 ++-- tests/components/mqtt/test_client.py | 2 +- tests/components/mqtt/test_init.py | 30 +++++++++++------------ 6 files changed, 23 insertions(+), 44 deletions(-) diff --git a/homeassistant/components/mqtt/__init__.py b/homeassistant/components/mqtt/__init__.py index 5f7f1b1d330..013bd26e49c 100644 --- a/homeassistant/components/mqtt/__init__.py +++ b/homeassistant/components/mqtt/__init__.py @@ -303,8 +303,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: # has been deprecated with HA Core 2024.8.0 # and will be removed with HA Core 2025.2.0 rendered_topic: Any = MqttCommandTemplate( - template.Template(msg_topic_template), - hass=hass, + template.Template(msg_topic_template, hass), ).async_render() ir.async_create_issue( hass, @@ -353,7 +352,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: }, ) payload = MqttCommandTemplate( - template.Template(payload_template), hass=hass + template.Template(payload_template, hass) ).async_render() if TYPE_CHECKING: diff --git a/homeassistant/components/mqtt/models.py b/homeassistant/components/mqtt/models.py index c355510a5c2..f2b3165f66c 100644 --- a/homeassistant/components/mqtt/models.py +++ b/homeassistant/components/mqtt/models.py @@ -12,7 +12,7 @@ import logging from typing import TYPE_CHECKING, Any, TypedDict from homeassistant.const import ATTR_ENTITY_ID, ATTR_NAME, Platform -from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback +from homeassistant.core import CALLBACK_TYPE, callback from homeassistant.exceptions import ServiceValidationError, TemplateError from homeassistant.helpers import template from homeassistant.helpers.entity import Entity @@ -159,22 +159,13 @@ class MqttCommandTemplate: self, command_template: template.Template | None, *, - hass: HomeAssistant | None = None, entity: Entity | None = None, ) -> None: """Instantiate a command template.""" self._template_state: template.TemplateStateFromEntityId | None = None self._command_template = command_template - if command_template is None: - return - self._entity = entity - command_template.hass = hass - - if entity: - command_template.hass = entity.hass - @callback def async_render( self, @@ -270,7 +261,6 @@ class MqttValueTemplate: self, value_template: template.Template | None, *, - hass: HomeAssistant | None = None, entity: Entity | None = None, config_attributes: TemplateVarsType = None, ) -> None: @@ -278,15 +268,8 @@ class MqttValueTemplate: self._template_state: template.TemplateStateFromEntityId | None = None self._value_template = value_template self._config_attributes = config_attributes - if value_template is None: - return - - value_template.hass = hass self._entity = entity - if entity: - value_template.hass = entity.hass - @callback def async_render_with_possible_json_value( self, diff --git a/homeassistant/components/mqtt/tag.py b/homeassistant/components/mqtt/tag.py index fbb0ea813c2..d5f5371c357 100644 --- a/homeassistant/components/mqtt/tag.py +++ b/homeassistant/components/mqtt/tag.py @@ -118,8 +118,7 @@ class MQTTTagScanner(MqttDiscoveryDeviceUpdateMixin): self.hass = hass self._sub_state: dict[str, EntitySubscription] | None = None self._value_template = MqttValueTemplate( - config.get(CONF_VALUE_TEMPLATE), - hass=self.hass, + config.get(CONF_VALUE_TEMPLATE) ).async_render_with_possible_json_value MqttDiscoveryDeviceUpdateMixin.__init__( @@ -136,8 +135,7 @@ class MQTTTagScanner(MqttDiscoveryDeviceUpdateMixin): return self._config = config self._value_template = MqttValueTemplate( - config.get(CONF_VALUE_TEMPLATE), - hass=self.hass, + config.get(CONF_VALUE_TEMPLATE) ).async_render_with_possible_json_value update_device(self.hass, self._config_entry, config) await self.subscribe_topics() diff --git a/homeassistant/components/mqtt/trigger.py b/homeassistant/components/mqtt/trigger.py index 91ac404a07a..3f7f03d7f19 100644 --- a/homeassistant/components/mqtt/trigger.py +++ b/homeassistant/components/mqtt/trigger.py @@ -60,10 +60,10 @@ async def async_attach_trigger( trigger_data: TriggerData = trigger_info["trigger_data"] command_template: Callable[ [PublishPayloadType, TemplateVarsType], PublishPayloadType - ] = MqttCommandTemplate(config.get(CONF_PAYLOAD), hass=hass).async_render + ] = MqttCommandTemplate(config.get(CONF_PAYLOAD)).async_render value_template: Callable[[ReceivePayloadType, str], ReceivePayloadType] value_template = MqttValueTemplate( - config.get(CONF_VALUE_TEMPLATE), hass=hass + config.get(CONF_VALUE_TEMPLATE) ).async_render_with_possible_json_value encoding: str | None = config[CONF_ENCODING] or None qos: int = config[CONF_QOS] @@ -75,7 +75,6 @@ async def async_attach_trigger( wanted_payload = command_template(None, variables) topic_template: Template = config[CONF_TOPIC] - topic_template.hass = hass topic = topic_template.async_render(variables, limited=True, parse_result=False) mqtt.util.valid_subscribe_topic(topic) diff --git a/tests/components/mqtt/test_client.py b/tests/components/mqtt/test_client.py index cd02d805e1c..c5887016f2e 100644 --- a/tests/components/mqtt/test_client.py +++ b/tests/components/mqtt/test_client.py @@ -225,7 +225,7 @@ async def test_publish( async def test_convert_outgoing_payload(hass: HomeAssistant) -> None: """Test the converting of outgoing MQTT payloads without template.""" - command_template = mqtt.MqttCommandTemplate(None, hass=hass) + command_template = mqtt.MqttCommandTemplate(None) assert command_template.async_render(b"\xde\xad\xbe\xef") == b"\xde\xad\xbe\xef" assert ( command_template.async_render("b'\\xde\\xad\\xbe\\xef'") diff --git a/tests/components/mqtt/test_init.py b/tests/components/mqtt/test_init.py index 51379dc8508..f495c5ca585 100644 --- a/tests/components/mqtt/test_init.py +++ b/tests/components/mqtt/test_init.py @@ -89,12 +89,12 @@ async def test_command_template_value(hass: HomeAssistant) -> None: # test rendering value tpl = template.Template("{{ value + 1 }}", hass=hass) - cmd_tpl = mqtt.MqttCommandTemplate(tpl, hass=hass) + cmd_tpl = mqtt.MqttCommandTemplate(tpl) assert cmd_tpl.async_render(4321) == "4322" # test variables at rendering tpl = template.Template("{{ some_var }}", hass=hass) - cmd_tpl = mqtt.MqttCommandTemplate(tpl, hass=hass) + cmd_tpl = mqtt.MqttCommandTemplate(tpl) assert cmd_tpl.async_render(None, variables=variables) == "beer" @@ -161,8 +161,8 @@ async def test_command_template_variables( async def test_command_template_fails(hass: HomeAssistant) -> None: """Test the exception handling of an MQTT command template.""" - tpl = template.Template("{{ value * 2 }}") - cmd_tpl = mqtt.MqttCommandTemplate(tpl, hass=hass) + tpl = template.Template("{{ value * 2 }}", hass=hass) + cmd_tpl = mqtt.MqttCommandTemplate(tpl) with pytest.raises(MqttCommandTemplateException) as exc: cmd_tpl.async_render(None) assert "unsupported operand type(s) for *: 'NoneType' and 'int'" in str(exc.value) @@ -174,13 +174,13 @@ async def test_value_template_value(hass: HomeAssistant) -> None: variables = {"id": 1234, "some_var": "beer"} # test rendering value - tpl = template.Template("{{ value_json.id }}") - val_tpl = mqtt.MqttValueTemplate(tpl, hass=hass) + tpl = template.Template("{{ value_json.id }}", hass=hass) + val_tpl = mqtt.MqttValueTemplate(tpl) assert val_tpl.async_render_with_possible_json_value('{"id": 4321}') == "4321" # test variables at rendering - tpl = template.Template("{{ value_json.id }} {{ some_var }} {{ code }}") - val_tpl = mqtt.MqttValueTemplate(tpl, hass=hass, config_attributes={"code": 1234}) + tpl = template.Template("{{ value_json.id }} {{ some_var }} {{ code }}", hass=hass) + val_tpl = mqtt.MqttValueTemplate(tpl, config_attributes={"code": 1234}) assert ( val_tpl.async_render_with_possible_json_value( '{"id": 4321}', variables=variables @@ -189,8 +189,8 @@ async def test_value_template_value(hass: HomeAssistant) -> None: ) # test with default value if an error occurs due to an invalid template - tpl = template.Template("{{ value_json.id | as_datetime }}") - val_tpl = mqtt.MqttValueTemplate(tpl, hass=hass) + tpl = template.Template("{{ value_json.id | as_datetime }}", hass=hass) + val_tpl = mqtt.MqttValueTemplate(tpl) assert ( val_tpl.async_render_with_possible_json_value('{"otherid": 4321}', "my default") == "my default" @@ -200,19 +200,19 @@ async def test_value_template_value(hass: HomeAssistant) -> None: entity = Entity() entity.hass = hass entity.entity_id = "select.test" - tpl = template.Template("{{ value_json.id }}") + tpl = template.Template("{{ value_json.id }}", hass=hass) val_tpl = mqtt.MqttValueTemplate(tpl, entity=entity) assert val_tpl.async_render_with_possible_json_value('{"id": 4321}') == "4321" # test this object in a template - tpl2 = template.Template("{{ this.entity_id }}") + tpl2 = template.Template("{{ this.entity_id }}", hass=hass) val_tpl2 = mqtt.MqttValueTemplate(tpl2, entity=entity) assert val_tpl2.async_render_with_possible_json_value("bla") == "select.test" with patch( "homeassistant.helpers.template.TemplateStateFromEntityId", MagicMock() ) as template_state_calls: - tpl3 = template.Template("{{ this.entity_id }}") + tpl3 = template.Template("{{ this.entity_id }}", hass=hass) val_tpl3 = mqtt.MqttValueTemplate(tpl3, entity=entity) val_tpl3.async_render_with_possible_json_value("call1") val_tpl3.async_render_with_possible_json_value("call2") @@ -223,8 +223,8 @@ async def test_value_template_fails(hass: HomeAssistant) -> None: """Test the rendering of MQTT value template fails.""" entity = MockEntity(entity_id="sensor.test") entity.hass = hass - tpl = template.Template("{{ value_json.some_var * 2 }}") - val_tpl = mqtt.MqttValueTemplate(tpl, hass=hass, entity=entity) + tpl = template.Template("{{ value_json.some_var * 2 }}", hass=hass) + val_tpl = mqtt.MqttValueTemplate(tpl, entity=entity) with pytest.raises(MqttValueTemplateException) as exc: val_tpl.async_render_with_possible_json_value('{"some_var": null }') assert str(exc.value) == ( From a4f0234841ceee6a526c3a7c804bd9b088309f1b Mon Sep 17 00:00:00 2001 From: G Johansson Date: Mon, 12 Aug 2024 20:42:39 +0200 Subject: [PATCH 0611/1635] Reduce logging in command_line (#123723) --- homeassistant/components/command_line/cover.py | 4 ++-- homeassistant/components/command_line/switch.py | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/command_line/cover.py b/homeassistant/components/command_line/cover.py index 344fddabc3b..2bcbb610296 100644 --- a/homeassistant/components/command_line/cover.py +++ b/homeassistant/components/command_line/cover.py @@ -110,7 +110,7 @@ class CommandCover(ManualTriggerEntity, CoverEntity): async def _async_move_cover(self, command: str) -> bool: """Execute the actual commands.""" - LOGGER.info("Running command: %s", command) + LOGGER.debug("Running command: %s", command) returncode = await async_call_shell_with_timeout(command, self._timeout) success = returncode == 0 @@ -140,7 +140,7 @@ class CommandCover(ManualTriggerEntity, CoverEntity): async def _async_query_state(self) -> str | None: """Query for the state.""" if self._command_state: - LOGGER.info("Running state value command: %s", self._command_state) + LOGGER.debug("Running state value command: %s", self._command_state) return await async_check_output_or_log(self._command_state, self._timeout) return None diff --git a/homeassistant/components/command_line/switch.py b/homeassistant/components/command_line/switch.py index b02fb6dcd4a..33b38ab9115 100644 --- a/homeassistant/components/command_line/switch.py +++ b/homeassistant/components/command_line/switch.py @@ -111,7 +111,7 @@ class CommandSwitch(ManualTriggerEntity, SwitchEntity): async def _switch(self, command: str) -> bool: """Execute the actual commands.""" - LOGGER.info("Running command: %s", command) + LOGGER.debug("Running command: %s", command) success = await async_call_shell_with_timeout(command, self._timeout) == 0 @@ -122,12 +122,12 @@ class CommandSwitch(ManualTriggerEntity, SwitchEntity): async def _async_query_state_value(self, command: str) -> str | None: """Execute state command for return value.""" - LOGGER.info("Running state value command: %s", command) + LOGGER.debug("Running state value command: %s", command) return await async_check_output_or_log(command, self._timeout) async def _async_query_state_code(self, command: str) -> bool: """Execute state command for return code.""" - LOGGER.info("Running state code command: %s", command) + LOGGER.debug("Running state code command: %s", command) return ( await async_call_shell_with_timeout( command, self._timeout, log_return_code=False From ff0a44cc123522736a8f1e0c41de1649c1900bf1 Mon Sep 17 00:00:00 2001 From: Shay Levy Date: Mon, 12 Aug 2024 22:28:39 +0300 Subject: [PATCH 0612/1635] Bump aioswitcher to 4.0.2 (#123734) --- homeassistant/components/switcher_kis/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/switcher_kis/manifest.json b/homeassistant/components/switcher_kis/manifest.json index 1ec32a26e4e..75ace60e942 100644 --- a/homeassistant/components/switcher_kis/manifest.json +++ b/homeassistant/components/switcher_kis/manifest.json @@ -7,6 +7,6 @@ "iot_class": "local_push", "loggers": ["aioswitcher"], "quality_scale": "platinum", - "requirements": ["aioswitcher==4.0.1"], + "requirements": ["aioswitcher==4.0.2"], "single_config_entry": true } diff --git a/requirements_all.txt b/requirements_all.txt index 5414c205ac7..76dce213ebf 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -374,7 +374,7 @@ aiosolaredge==0.2.0 aiosteamist==1.0.0 # homeassistant.components.switcher_kis -aioswitcher==4.0.1 +aioswitcher==4.0.2 # homeassistant.components.syncthing aiosyncthing==0.5.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 8a1abbfa769..d88d5607050 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -356,7 +356,7 @@ aiosolaredge==0.2.0 aiosteamist==1.0.0 # homeassistant.components.switcher_kis -aioswitcher==4.0.1 +aioswitcher==4.0.2 # homeassistant.components.syncthing aiosyncthing==0.5.1 From 05c4b1a6a9a505e883bba41651bd0f67d3f28690 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Mon, 12 Aug 2024 21:31:10 +0200 Subject: [PATCH 0613/1635] Remove deprecated logi_circle integration (#123727) --- CODEOWNERS | 2 - .../components/logi_circle/__init__.py | 271 ------------------ .../components/logi_circle/camera.py | 200 ------------- .../components/logi_circle/config_flow.py | 206 ------------- homeassistant/components/logi_circle/const.py | 22 -- .../components/logi_circle/icons.json | 7 - .../components/logi_circle/manifest.json | 11 - .../components/logi_circle/sensor.py | 164 ----------- .../components/logi_circle/services.yaml | 53 ---- .../components/logi_circle/strings.json | 105 ------- homeassistant/generated/config_flows.py | 1 - homeassistant/generated/integrations.json | 6 - requirements_all.txt | 3 - requirements_test_all.txt | 3 - tests/components/logi_circle/__init__.py | 1 - .../logi_circle/test_config_flow.py | 227 --------------- tests/components/logi_circle/test_init.py | 68 ----- 17 files changed, 1350 deletions(-) delete mode 100644 homeassistant/components/logi_circle/__init__.py delete mode 100644 homeassistant/components/logi_circle/camera.py delete mode 100644 homeassistant/components/logi_circle/config_flow.py delete mode 100644 homeassistant/components/logi_circle/const.py delete mode 100644 homeassistant/components/logi_circle/icons.json delete mode 100644 homeassistant/components/logi_circle/manifest.json delete mode 100644 homeassistant/components/logi_circle/sensor.py delete mode 100644 homeassistant/components/logi_circle/services.yaml delete mode 100644 homeassistant/components/logi_circle/strings.json delete mode 100644 tests/components/logi_circle/__init__.py delete mode 100644 tests/components/logi_circle/test_config_flow.py delete mode 100644 tests/components/logi_circle/test_init.py diff --git a/CODEOWNERS b/CODEOWNERS index 710846a7f42..a3b38498f68 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -826,8 +826,6 @@ build.json @home-assistant/supervisor /tests/components/logbook/ @home-assistant/core /homeassistant/components/logger/ @home-assistant/core /tests/components/logger/ @home-assistant/core -/homeassistant/components/logi_circle/ @evanjd -/tests/components/logi_circle/ @evanjd /homeassistant/components/london_underground/ @jpbede /tests/components/london_underground/ @jpbede /homeassistant/components/lookin/ @ANMalko @bdraco diff --git a/homeassistant/components/logi_circle/__init__.py b/homeassistant/components/logi_circle/__init__.py deleted file mode 100644 index 0713bcc438e..00000000000 --- a/homeassistant/components/logi_circle/__init__.py +++ /dev/null @@ -1,271 +0,0 @@ -"""Support for Logi Circle devices.""" - -import asyncio - -from aiohttp.client_exceptions import ClientResponseError -from logi_circle import LogiCircle -from logi_circle.exception import AuthorizationFailed -import voluptuous as vol - -from homeassistant import config_entries -from homeassistant.components import persistent_notification -from homeassistant.components.camera import ATTR_FILENAME -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_ENTITY_ID, - ATTR_MODE, - CONF_API_KEY, - CONF_CLIENT_ID, - CONF_CLIENT_SECRET, - CONF_MONITORED_CONDITIONS, - CONF_SENSORS, - EVENT_HOMEASSISTANT_STOP, - Platform, -) -from homeassistant.core import HomeAssistant, ServiceCall -from homeassistant.helpers import config_validation as cv, issue_registry as ir -from homeassistant.helpers.dispatcher import async_dispatcher_send -from homeassistant.helpers.typing import ConfigType - -from . import config_flow -from .const import ( - CONF_REDIRECT_URI, - DATA_LOGI, - DEFAULT_CACHEDB, - DOMAIN, - LED_MODE_KEY, - RECORDING_MODE_KEY, - SIGNAL_LOGI_CIRCLE_RECONFIGURE, - SIGNAL_LOGI_CIRCLE_RECORD, - SIGNAL_LOGI_CIRCLE_SNAPSHOT, -) -from .sensor import SENSOR_TYPES - -NOTIFICATION_ID = "logi_circle_notification" -NOTIFICATION_TITLE = "Logi Circle Setup" - -_TIMEOUT = 15 # seconds - -SERVICE_SET_CONFIG = "set_config" -SERVICE_LIVESTREAM_SNAPSHOT = "livestream_snapshot" -SERVICE_LIVESTREAM_RECORD = "livestream_record" - -ATTR_VALUE = "value" -ATTR_DURATION = "duration" - -PLATFORMS = [Platform.CAMERA, Platform.SENSOR] - -SENSOR_KEYS = [desc.key for desc in SENSOR_TYPES] - -SENSOR_SCHEMA = vol.Schema( - { - vol.Optional(CONF_MONITORED_CONDITIONS, default=SENSOR_KEYS): vol.All( - cv.ensure_list, [vol.In(SENSOR_KEYS)] - ) - } -) - -CONFIG_SCHEMA = vol.Schema( - { - DOMAIN: vol.Schema( - { - vol.Required(CONF_CLIENT_ID): cv.string, - vol.Required(CONF_CLIENT_SECRET): cv.string, - vol.Required(CONF_API_KEY): cv.string, - vol.Required(CONF_REDIRECT_URI): cv.string, - vol.Optional(CONF_SENSORS, default={}): SENSOR_SCHEMA, - } - ) - }, - extra=vol.ALLOW_EXTRA, -) - -LOGI_CIRCLE_SERVICE_SET_CONFIG = vol.Schema( - { - vol.Optional(ATTR_ENTITY_ID): cv.comp_entity_ids, - vol.Required(ATTR_MODE): vol.In([LED_MODE_KEY, RECORDING_MODE_KEY]), - vol.Required(ATTR_VALUE): cv.boolean, - } -) - -LOGI_CIRCLE_SERVICE_SNAPSHOT = vol.Schema( - { - vol.Optional(ATTR_ENTITY_ID): cv.comp_entity_ids, - vol.Required(ATTR_FILENAME): cv.template, - } -) - -LOGI_CIRCLE_SERVICE_RECORD = vol.Schema( - { - vol.Optional(ATTR_ENTITY_ID): cv.comp_entity_ids, - vol.Required(ATTR_FILENAME): cv.template, - vol.Required(ATTR_DURATION): cv.positive_int, - } -) - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up configured Logi Circle component.""" - if DOMAIN not in config: - return True - - conf = config[DOMAIN] - - config_flow.register_flow_implementation( - hass, - DOMAIN, - client_id=conf[CONF_CLIENT_ID], - client_secret=conf[CONF_CLIENT_SECRET], - api_key=conf[CONF_API_KEY], - redirect_uri=conf[CONF_REDIRECT_URI], - sensors=conf[CONF_SENSORS], - ) - - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_IMPORT} - ) - ) - - return True - - -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Set up Logi Circle from a config entry.""" - ir.async_create_issue( - hass, - DOMAIN, - DOMAIN, - breaks_in_ha_version="2024.9.0", - is_fixable=False, - severity=ir.IssueSeverity.WARNING, - translation_key="integration_removed", - translation_placeholders={ - "entries": "/config/integrations/integration/logi_circle", - }, - ) - - logi_circle = LogiCircle( - client_id=entry.data[CONF_CLIENT_ID], - client_secret=entry.data[CONF_CLIENT_SECRET], - api_key=entry.data[CONF_API_KEY], - redirect_uri=entry.data[CONF_REDIRECT_URI], - cache_file=hass.config.path(DEFAULT_CACHEDB), - ) - - if not logi_circle.authorized: - persistent_notification.create( - hass, - ( - "Error: The cached access tokens are missing from" - f" {DEFAULT_CACHEDB}.
Please unload then re-add the Logi Circle" - " integration to resolve." - ), - title=NOTIFICATION_TITLE, - notification_id=NOTIFICATION_ID, - ) - return False - - try: - async with asyncio.timeout(_TIMEOUT): - # Ensure the cameras property returns the same Camera objects for - # all devices. Performs implicit login and session validation. - await logi_circle.synchronize_cameras() - except AuthorizationFailed: - persistent_notification.create( - hass, - ( - "Error: Failed to obtain an access token from the cached " - "refresh token.
" - "Token may have expired or been revoked.
" - "Please unload then re-add the Logi Circle integration to resolve" - ), - title=NOTIFICATION_TITLE, - notification_id=NOTIFICATION_ID, - ) - return False - except TimeoutError: - # The TimeoutError exception object returns nothing when casted to a - # string, so we'll handle it separately. - err = f"{_TIMEOUT}s timeout exceeded when connecting to Logi Circle API" - persistent_notification.create( - hass, - f"Error: {err}
You will need to restart hass after fixing.", - title=NOTIFICATION_TITLE, - notification_id=NOTIFICATION_ID, - ) - return False - except ClientResponseError as ex: - persistent_notification.create( - hass, - f"Error: {ex}
You will need to restart hass after fixing.", - title=NOTIFICATION_TITLE, - notification_id=NOTIFICATION_ID, - ) - return False - - hass.data[DATA_LOGI] = logi_circle - - await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - - async def service_handler(service: ServiceCall) -> None: - """Dispatch service calls to target entities.""" - params = dict(service.data) - - if service.service == SERVICE_SET_CONFIG: - async_dispatcher_send(hass, SIGNAL_LOGI_CIRCLE_RECONFIGURE, params) - if service.service == SERVICE_LIVESTREAM_SNAPSHOT: - async_dispatcher_send(hass, SIGNAL_LOGI_CIRCLE_SNAPSHOT, params) - if service.service == SERVICE_LIVESTREAM_RECORD: - async_dispatcher_send(hass, SIGNAL_LOGI_CIRCLE_RECORD, params) - - hass.services.async_register( - DOMAIN, - SERVICE_SET_CONFIG, - service_handler, - schema=LOGI_CIRCLE_SERVICE_SET_CONFIG, - ) - - hass.services.async_register( - DOMAIN, - SERVICE_LIVESTREAM_SNAPSHOT, - service_handler, - schema=LOGI_CIRCLE_SERVICE_SNAPSHOT, - ) - - hass.services.async_register( - DOMAIN, - SERVICE_LIVESTREAM_RECORD, - service_handler, - schema=LOGI_CIRCLE_SERVICE_RECORD, - ) - - async def shut_down(event=None): - """Close Logi Circle aiohttp session.""" - await logi_circle.auth_provider.close() - - entry.async_on_unload( - hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, shut_down) - ) - - return True - - -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Unload a config entry.""" - if all( - config_entry.state is config_entries.ConfigEntryState.NOT_LOADED - for config_entry in hass.config_entries.async_entries(DOMAIN) - if config_entry.entry_id != entry.entry_id - ): - ir.async_delete_issue(hass, DOMAIN, DOMAIN) - - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - - logi_circle = hass.data.pop(DATA_LOGI) - - # Tell API wrapper to close all aiohttp sessions, invalidate WS connections - # and clear all locally cached tokens - await logi_circle.auth_provider.clear_authorization() - - return unload_ok diff --git a/homeassistant/components/logi_circle/camera.py b/homeassistant/components/logi_circle/camera.py deleted file mode 100644 index 04f12586679..00000000000 --- a/homeassistant/components/logi_circle/camera.py +++ /dev/null @@ -1,200 +0,0 @@ -"""Support to the Logi Circle cameras.""" - -from __future__ import annotations - -from datetime import timedelta -import logging - -from homeassistant.components.camera import Camera, CameraEntityFeature -from homeassistant.components.ffmpeg import get_ffmpeg_manager -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_BATTERY_CHARGING, - ATTR_BATTERY_LEVEL, - ATTR_ENTITY_ID, - STATE_OFF, - STATE_ON, -) -from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.dispatcher import async_dispatcher_connect -from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType - -from .const import ( - ATTRIBUTION, - DEVICE_BRAND, - DOMAIN as LOGI_CIRCLE_DOMAIN, - LED_MODE_KEY, - RECORDING_MODE_KEY, - SIGNAL_LOGI_CIRCLE_RECONFIGURE, - SIGNAL_LOGI_CIRCLE_RECORD, - SIGNAL_LOGI_CIRCLE_SNAPSHOT, -) - -_LOGGER = logging.getLogger(__name__) - -SCAN_INTERVAL = timedelta(seconds=60) - - -async def async_setup_platform( - hass: HomeAssistant, - config: ConfigType, - async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Set up a Logi Circle Camera. Obsolete.""" - _LOGGER.warning("Logi Circle no longer works with camera platform configuration") - - -async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback -) -> None: - """Set up a Logi Circle Camera based on a config entry.""" - devices = await hass.data[LOGI_CIRCLE_DOMAIN].cameras - ffmpeg = get_ffmpeg_manager(hass) - - cameras = [LogiCam(device, ffmpeg) for device in devices] - - async_add_entities(cameras, True) - - -class LogiCam(Camera): - """An implementation of a Logi Circle camera.""" - - _attr_attribution = ATTRIBUTION - _attr_should_poll = True # Cameras default to False - _attr_supported_features = CameraEntityFeature.ON_OFF - _attr_has_entity_name = True - _attr_name = None - - def __init__(self, camera, ffmpeg): - """Initialize Logi Circle camera.""" - super().__init__() - self._camera = camera - self._has_battery = camera.supports_feature("battery_level") - self._ffmpeg = ffmpeg - self._listeners = [] - self._attr_unique_id = camera.mac_address - self._attr_device_info = DeviceInfo( - identifiers={(LOGI_CIRCLE_DOMAIN, camera.id)}, - manufacturer=DEVICE_BRAND, - model=camera.model_name, - name=camera.name, - sw_version=camera.firmware, - ) - - async def async_added_to_hass(self) -> None: - """Connect camera methods to signals.""" - - def _dispatch_proxy(method): - """Expand parameters & filter entity IDs.""" - - async def _call(params): - entity_ids = params.get(ATTR_ENTITY_ID) - filtered_params = { - k: v for k, v in params.items() if k != ATTR_ENTITY_ID - } - if entity_ids is None or self.entity_id in entity_ids: - await method(**filtered_params) - - return _call - - self._listeners.extend( - [ - async_dispatcher_connect( - self.hass, - SIGNAL_LOGI_CIRCLE_RECONFIGURE, - _dispatch_proxy(self.set_config), - ), - async_dispatcher_connect( - self.hass, - SIGNAL_LOGI_CIRCLE_SNAPSHOT, - _dispatch_proxy(self.livestream_snapshot), - ), - async_dispatcher_connect( - self.hass, - SIGNAL_LOGI_CIRCLE_RECORD, - _dispatch_proxy(self.download_livestream), - ), - ] - ) - - async def async_will_remove_from_hass(self) -> None: - """Disconnect dispatcher listeners when removed.""" - for detach in self._listeners: - detach() - - @property - def extra_state_attributes(self): - """Return the state attributes.""" - state = { - "battery_saving_mode": ( - STATE_ON if self._camera.battery_saving else STATE_OFF - ), - "microphone_gain": self._camera.microphone_gain, - } - - # Add battery attributes if camera is battery-powered - if self._has_battery: - state[ATTR_BATTERY_CHARGING] = self._camera.charging - state[ATTR_BATTERY_LEVEL] = self._camera.battery_level - - return state - - async def async_camera_image( - self, width: int | None = None, height: int | None = None - ) -> bytes | None: - """Return a still image from the camera.""" - return await self._camera.live_stream.download_jpeg() - - async def async_turn_off(self) -> None: - """Disable streaming mode for this camera.""" - await self._camera.set_config("streaming", False) - - async def async_turn_on(self) -> None: - """Enable streaming mode for this camera.""" - await self._camera.set_config("streaming", True) - - async def set_config(self, mode, value): - """Set an configuration property for the target camera.""" - if mode == LED_MODE_KEY: - await self._camera.set_config("led", value) - if mode == RECORDING_MODE_KEY: - await self._camera.set_config("recording_disabled", not value) - - async def download_livestream(self, filename, duration): - """Download a recording from the camera's livestream.""" - # Render filename from template. - stream_file = filename.async_render(variables={ATTR_ENTITY_ID: self.entity_id}) - - # Respect configured allowed paths. - if not self.hass.config.is_allowed_path(stream_file): - _LOGGER.error("Can't write %s, no access to path!", stream_file) - return - - await self._camera.live_stream.download_rtsp( - filename=stream_file, - duration=timedelta(seconds=duration), - ffmpeg_bin=self._ffmpeg.binary, - ) - - async def livestream_snapshot(self, filename): - """Download a still frame from the camera's livestream.""" - # Render filename from template. - snapshot_file = filename.async_render( - variables={ATTR_ENTITY_ID: self.entity_id} - ) - - # Respect configured allowed paths. - if not self.hass.config.is_allowed_path(snapshot_file): - _LOGGER.error("Can't write %s, no access to path!", snapshot_file) - return - - await self._camera.live_stream.download_jpeg( - filename=snapshot_file, refresh=True - ) - - async def async_update(self) -> None: - """Update camera entity and refresh attributes.""" - await self._camera.update() diff --git a/homeassistant/components/logi_circle/config_flow.py b/homeassistant/components/logi_circle/config_flow.py deleted file mode 100644 index 6c1a549aa04..00000000000 --- a/homeassistant/components/logi_circle/config_flow.py +++ /dev/null @@ -1,206 +0,0 @@ -"""Config flow to configure Logi Circle component.""" - -import asyncio -from collections import OrderedDict -from http import HTTPStatus - -from logi_circle import LogiCircle -from logi_circle.exception import AuthorizationFailed -import voluptuous as vol - -from homeassistant.components.http import KEY_HASS, HomeAssistantView -from homeassistant.config_entries import ConfigFlow -from homeassistant.const import ( - CONF_API_KEY, - CONF_CLIENT_ID, - CONF_CLIENT_SECRET, - CONF_SENSORS, -) -from homeassistant.core import callback - -from .const import CONF_REDIRECT_URI, DEFAULT_CACHEDB, DOMAIN - -_TIMEOUT = 15 # seconds - -DATA_FLOW_IMPL = "logi_circle_flow_implementation" -EXTERNAL_ERRORS = "logi_errors" -AUTH_CALLBACK_PATH = "/api/logi_circle" -AUTH_CALLBACK_NAME = "api:logi_circle" - - -@callback -def register_flow_implementation( - hass, domain, client_id, client_secret, api_key, redirect_uri, sensors -): - """Register a flow implementation. - - domain: Domain of the component responsible for the implementation. - client_id: Client ID. - client_secret: Client secret. - api_key: API key issued by Logitech. - redirect_uri: Auth callback redirect URI. - sensors: Sensor config. - """ - if DATA_FLOW_IMPL not in hass.data: - hass.data[DATA_FLOW_IMPL] = OrderedDict() - - hass.data[DATA_FLOW_IMPL][domain] = { - CONF_CLIENT_ID: client_id, - CONF_CLIENT_SECRET: client_secret, - CONF_API_KEY: api_key, - CONF_REDIRECT_URI: redirect_uri, - CONF_SENSORS: sensors, - EXTERNAL_ERRORS: None, - } - - -class LogiCircleFlowHandler(ConfigFlow, domain=DOMAIN): - """Config flow for Logi Circle component.""" - - VERSION = 1 - - def __init__(self) -> None: - """Initialize flow.""" - self.flow_impl = None - - async def async_step_import(self, user_input=None): - """Handle external yaml configuration.""" - self._async_abort_entries_match() - - self.flow_impl = DOMAIN - - return await self.async_step_auth() - - async def async_step_user(self, user_input=None): - """Handle a flow start.""" - flows = self.hass.data.get(DATA_FLOW_IMPL, {}) - - self._async_abort_entries_match() - - if not flows: - return self.async_abort(reason="missing_configuration") - - if len(flows) == 1: - self.flow_impl = list(flows)[0] - return await self.async_step_auth() - - if user_input is not None: - self.flow_impl = user_input["flow_impl"] - return await self.async_step_auth() - - return self.async_show_form( - step_id="user", - data_schema=vol.Schema({vol.Required("flow_impl"): vol.In(list(flows))}), - ) - - async def async_step_auth(self, user_input=None): - """Create an entry for auth.""" - if self._async_current_entries(): - return self.async_abort(reason="external_setup") - - external_error = self.hass.data[DATA_FLOW_IMPL][DOMAIN][EXTERNAL_ERRORS] - errors = {} - if external_error: - # Handle error from another flow - errors["base"] = external_error - self.hass.data[DATA_FLOW_IMPL][DOMAIN][EXTERNAL_ERRORS] = None - elif user_input is not None: - errors["base"] = "follow_link" - - url = self._get_authorization_url() - - return self.async_show_form( - step_id="auth", - description_placeholders={"authorization_url": url}, - errors=errors, - ) - - def _get_authorization_url(self): - """Create temporary Circle session and generate authorization url.""" - flow = self.hass.data[DATA_FLOW_IMPL][self.flow_impl] - client_id = flow[CONF_CLIENT_ID] - client_secret = flow[CONF_CLIENT_SECRET] - api_key = flow[CONF_API_KEY] - redirect_uri = flow[CONF_REDIRECT_URI] - - logi_session = LogiCircle( - client_id=client_id, - client_secret=client_secret, - api_key=api_key, - redirect_uri=redirect_uri, - ) - - self.hass.http.register_view(LogiCircleAuthCallbackView()) - - return logi_session.authorize_url - - async def async_step_code(self, code=None): - """Received code for authentication.""" - self._async_abort_entries_match() - - return await self._async_create_session(code) - - async def _async_create_session(self, code): - """Create Logi Circle session and entries.""" - flow = self.hass.data[DATA_FLOW_IMPL][DOMAIN] - client_id = flow[CONF_CLIENT_ID] - client_secret = flow[CONF_CLIENT_SECRET] - api_key = flow[CONF_API_KEY] - redirect_uri = flow[CONF_REDIRECT_URI] - sensors = flow[CONF_SENSORS] - - logi_session = LogiCircle( - client_id=client_id, - client_secret=client_secret, - api_key=api_key, - redirect_uri=redirect_uri, - cache_file=self.hass.config.path(DEFAULT_CACHEDB), - ) - - try: - async with asyncio.timeout(_TIMEOUT): - await logi_session.authorize(code) - except AuthorizationFailed: - (self.hass.data[DATA_FLOW_IMPL][DOMAIN][EXTERNAL_ERRORS]) = "invalid_auth" - return self.async_abort(reason="external_error") - except TimeoutError: - ( - self.hass.data[DATA_FLOW_IMPL][DOMAIN][EXTERNAL_ERRORS] - ) = "authorize_url_timeout" - return self.async_abort(reason="external_error") - - account_id = (await logi_session.account)["accountId"] - await logi_session.close() - return self.async_create_entry( - title=f"Logi Circle ({account_id})", - data={ - CONF_CLIENT_ID: client_id, - CONF_CLIENT_SECRET: client_secret, - CONF_API_KEY: api_key, - CONF_REDIRECT_URI: redirect_uri, - CONF_SENSORS: sensors, - }, - ) - - -class LogiCircleAuthCallbackView(HomeAssistantView): - """Logi Circle Authorization Callback View.""" - - requires_auth = False - url = AUTH_CALLBACK_PATH - name = AUTH_CALLBACK_NAME - - async def get(self, request): - """Receive authorization code.""" - hass = request.app[KEY_HASS] - if "code" in request.query: - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, context={"source": "code"}, data=request.query["code"] - ) - ) - return self.json_message("Authorisation code saved") - return self.json_message( - "Authorisation code missing from query string", - status_code=HTTPStatus.BAD_REQUEST, - ) diff --git a/homeassistant/components/logi_circle/const.py b/homeassistant/components/logi_circle/const.py deleted file mode 100644 index e144f47ce4e..00000000000 --- a/homeassistant/components/logi_circle/const.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Constants in Logi Circle component.""" - -from __future__ import annotations - -DOMAIN = "logi_circle" -DATA_LOGI = DOMAIN - -CONF_REDIRECT_URI = "redirect_uri" - -DEFAULT_CACHEDB = ".logi_cache.pickle" - - -LED_MODE_KEY = "LED" -RECORDING_MODE_KEY = "RECORDING_MODE" - -SIGNAL_LOGI_CIRCLE_RECONFIGURE = "logi_circle_reconfigure" -SIGNAL_LOGI_CIRCLE_SNAPSHOT = "logi_circle_snapshot" -SIGNAL_LOGI_CIRCLE_RECORD = "logi_circle_record" - -# Attribution -ATTRIBUTION = "Data provided by circle.logi.com" -DEVICE_BRAND = "Logitech" diff --git a/homeassistant/components/logi_circle/icons.json b/homeassistant/components/logi_circle/icons.json deleted file mode 100644 index 9289746d375..00000000000 --- a/homeassistant/components/logi_circle/icons.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "services": { - "set_config": "mdi:cog", - "livestream_snapshot": "mdi:camera", - "livestream_record": "mdi:record-rec" - } -} diff --git a/homeassistant/components/logi_circle/manifest.json b/homeassistant/components/logi_circle/manifest.json deleted file mode 100644 index f4f65b22505..00000000000 --- a/homeassistant/components/logi_circle/manifest.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "domain": "logi_circle", - "name": "Logi Circle", - "codeowners": ["@evanjd"], - "config_flow": true, - "dependencies": ["ffmpeg", "http"], - "documentation": "https://www.home-assistant.io/integrations/logi_circle", - "iot_class": "cloud_polling", - "loggers": ["logi_circle"], - "requirements": ["logi-circle==0.2.3"] -} diff --git a/homeassistant/components/logi_circle/sensor.py b/homeassistant/components/logi_circle/sensor.py deleted file mode 100644 index 121cb8848ae..00000000000 --- a/homeassistant/components/logi_circle/sensor.py +++ /dev/null @@ -1,164 +0,0 @@ -"""Support for Logi Circle sensors.""" - -from __future__ import annotations - -import logging -from typing import Any - -from homeassistant.components.sensor import ( - SensorDeviceClass, - SensorEntity, - SensorEntityDescription, -) -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_BATTERY_CHARGING, - CONF_MONITORED_CONDITIONS, - CONF_SENSORS, - PERCENTAGE, - STATE_OFF, - STATE_ON, -) -from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from homeassistant.util.dt import as_local - -from .const import ATTRIBUTION, DEVICE_BRAND, DOMAIN as LOGI_CIRCLE_DOMAIN - -_LOGGER = logging.getLogger(__name__) - - -SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( - SensorEntityDescription( - key="battery_level", - native_unit_of_measurement=PERCENTAGE, - device_class=SensorDeviceClass.BATTERY, - ), - SensorEntityDescription( - key="last_activity_time", - translation_key="last_activity", - icon="mdi:history", - ), - SensorEntityDescription( - key="recording", - translation_key="recording_mode", - icon="mdi:eye", - ), - SensorEntityDescription( - key="signal_strength_category", - translation_key="wifi_signal_category", - icon="mdi:wifi", - ), - SensorEntityDescription( - key="signal_strength_percentage", - translation_key="wifi_signal_strength", - native_unit_of_measurement=PERCENTAGE, - icon="mdi:wifi", - ), - SensorEntityDescription( - key="streaming", - translation_key="streaming_mode", - icon="mdi:camera", - ), -) - - -async def async_setup_platform( - hass: HomeAssistant, - config: ConfigType, - async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Set up a sensor for a Logi Circle device. Obsolete.""" - _LOGGER.warning("Logi Circle no longer works with sensor platform configuration") - - -async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback -) -> None: - """Set up a Logi Circle sensor based on a config entry.""" - devices = await hass.data[LOGI_CIRCLE_DOMAIN].cameras - time_zone = str(hass.config.time_zone) - - monitored_conditions = entry.data[CONF_SENSORS].get(CONF_MONITORED_CONDITIONS) - entities = [ - LogiSensor(device, time_zone, description) - for description in SENSOR_TYPES - if description.key in monitored_conditions - for device in devices - if device.supports_feature(description.key) - ] - - async_add_entities(entities, True) - - -class LogiSensor(SensorEntity): - """A sensor implementation for a Logi Circle camera.""" - - _attr_attribution = ATTRIBUTION - _attr_has_entity_name = True - - def __init__(self, camera, time_zone, description: SensorEntityDescription) -> None: - """Initialize a sensor for Logi Circle camera.""" - self.entity_description = description - self._camera = camera - self._attr_unique_id = f"{camera.mac_address}-{description.key}" - self._activity: dict[Any, Any] = {} - self._tz = time_zone - self._attr_device_info = DeviceInfo( - identifiers={(LOGI_CIRCLE_DOMAIN, camera.id)}, - manufacturer=DEVICE_BRAND, - model=camera.model_name, - name=camera.name, - sw_version=camera.firmware, - ) - - @property - def extra_state_attributes(self): - """Return the state attributes.""" - state = { - "battery_saving_mode": ( - STATE_ON if self._camera.battery_saving else STATE_OFF - ), - "microphone_gain": self._camera.microphone_gain, - } - - if self.entity_description.key == "battery_level": - state[ATTR_BATTERY_CHARGING] = self._camera.charging - - return state - - @property - def icon(self): - """Icon to use in the frontend, if any.""" - sensor_type = self.entity_description.key - if sensor_type == "recording_mode" and self._attr_native_value is not None: - return "mdi:eye" if self._attr_native_value == STATE_ON else "mdi:eye-off" - if sensor_type == "streaming_mode" and self._attr_native_value is not None: - return ( - "mdi:camera" - if self._attr_native_value == STATE_ON - else "mdi:camera-off" - ) - return self.entity_description.icon - - async def async_update(self) -> None: - """Get the latest data and updates the state.""" - _LOGGER.debug("Pulling data from %s sensor", self.name) - await self._camera.update() - - if self.entity_description.key == "last_activity_time": - last_activity = await self._camera.get_last_activity(force_refresh=True) - if last_activity is not None: - last_activity_time = as_local(last_activity.end_time_utc) - self._attr_native_value = ( - f"{last_activity_time.hour:0>2}:{last_activity_time.minute:0>2}" - ) - else: - state = getattr(self._camera, self.entity_description.key, None) - if isinstance(state, bool): - self._attr_native_value = STATE_ON if state is True else STATE_OFF - else: - self._attr_native_value = state diff --git a/homeassistant/components/logi_circle/services.yaml b/homeassistant/components/logi_circle/services.yaml deleted file mode 100644 index cb855a953a6..00000000000 --- a/homeassistant/components/logi_circle/services.yaml +++ /dev/null @@ -1,53 +0,0 @@ -# Describes the format for available Logi Circle services - -set_config: - fields: - entity_id: - selector: - entity: - integration: logi_circle - domain: camera - mode: - required: true - selector: - select: - options: - - "LED" - - "RECORDING_MODE" - value: - required: true - selector: - boolean: - -livestream_snapshot: - fields: - entity_id: - selector: - entity: - integration: logi_circle - domain: camera - filename: - required: true - example: "/tmp/snapshot_{{ entity_id }}.jpg" - selector: - text: - -livestream_record: - fields: - entity_id: - selector: - entity: - integration: logi_circle - domain: camera - filename: - required: true - example: "/tmp/snapshot_{{ entity_id }}.mp4" - selector: - text: - duration: - required: true - selector: - number: - min: 1 - max: 3600 - unit_of_measurement: seconds diff --git a/homeassistant/components/logi_circle/strings.json b/homeassistant/components/logi_circle/strings.json deleted file mode 100644 index be0f4632c25..00000000000 --- a/homeassistant/components/logi_circle/strings.json +++ /dev/null @@ -1,105 +0,0 @@ -{ - "config": { - "step": { - "user": { - "title": "Authentication Provider", - "description": "Pick via which authentication provider you want to authenticate with Logi Circle.", - "data": { - "flow_impl": "Provider" - } - }, - "auth": { - "title": "Authenticate with Logi Circle", - "description": "Please follow the link below and **Accept** access to your Logi Circle account, then come back and press **Submit** below.\n\n[Link]({authorization_url})" - } - }, - "error": { - "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", - "authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]", - "follow_link": "Please follow the link and authenticate before pressing Submit." - }, - "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", - "external_error": "Exception occurred from another flow.", - "external_setup": "Logi Circle successfully configured from another flow.", - "missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]" - } - }, - "entity": { - "sensor": { - "last_activity": { - "name": "Last activity" - }, - "recording_mode": { - "name": "Recording mode" - }, - "wifi_signal_category": { - "name": "Wi-Fi signal category" - }, - "wifi_signal_strength": { - "name": "Wi-Fi signal strength" - }, - "streaming_mode": { - "name": "Streaming mode" - } - } - }, - "issues": { - "integration_removed": { - "title": "The Logi Circle integration has been deprecated and will be removed", - "description": "Logitech stopped accepting applications for access to the Logi Circle API in May 2022, and the Logi Circle integration will be removed from Home Assistant.\n\nTo resolve this issue, please remove the integration entries from your Home Assistant setup. [Click here to see your existing Logi Circle integration entries]({entries})." - } - }, - "services": { - "set_config": { - "name": "Set config", - "description": "Sets a configuration property.", - "fields": { - "entity_id": { - "name": "Entity", - "description": "Name(s) of entities to apply the operation mode to." - }, - "mode": { - "name": "[%key:common::config_flow::data::mode%]", - "description": "Operation mode. Allowed values: LED, RECORDING_MODE." - }, - "value": { - "name": "Value", - "description": "Operation value." - } - } - }, - "livestream_snapshot": { - "name": "Livestream snapshot", - "description": "Takes a snapshot from the camera's livestream. Will wake the camera from sleep if required.", - "fields": { - "entity_id": { - "name": "Entity", - "description": "Name(s) of entities to create snapshots from." - }, - "filename": { - "name": "File name", - "description": "Template of a Filename. Variable is entity_id." - } - } - }, - "livestream_record": { - "name": "Livestream record", - "description": "Takes a video recording from the camera's livestream.", - "fields": { - "entity_id": { - "name": "Entity", - "description": "Name(s) of entities to create recordings from." - }, - "filename": { - "name": "File name", - "description": "[%key:component::logi_circle::services::livestream_snapshot::fields::filename::description%]" - }, - "duration": { - "name": "Duration", - "description": "Recording duration." - } - } - } - } -} diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index 67cffd25f28..24e151d2902 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -325,7 +325,6 @@ FLOWS = { "local_ip", "local_todo", "locative", - "logi_circle", "lookin", "loqed", "luftdaten", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 3d3344c1f60..1415ab51a75 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -3339,12 +3339,6 @@ "config_flow": false, "iot_class": "cloud_push" }, - "logi_circle": { - "name": "Logi Circle", - "integration_type": "hub", - "config_flow": true, - "iot_class": "cloud_polling" - }, "logitech": { "name": "Logitech", "integrations": { diff --git a/requirements_all.txt b/requirements_all.txt index 76dce213ebf..1c4cd900cde 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1278,9 +1278,6 @@ lmcloud==1.1.13 # homeassistant.components.google_maps locationsharinglib==5.0.1 -# homeassistant.components.logi_circle -logi-circle==0.2.3 - # homeassistant.components.london_underground london-tube-status==0.5 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index d88d5607050..e252e804be0 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1056,9 +1056,6 @@ linear-garage-door==0.2.9 # homeassistant.components.lamarzocco lmcloud==1.1.13 -# homeassistant.components.logi_circle -logi-circle==0.2.3 - # homeassistant.components.london_underground london-tube-status==0.5 diff --git a/tests/components/logi_circle/__init__.py b/tests/components/logi_circle/__init__.py deleted file mode 100644 index d2e2fbb8fdb..00000000000 --- a/tests/components/logi_circle/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for the Logi Circle component.""" diff --git a/tests/components/logi_circle/test_config_flow.py b/tests/components/logi_circle/test_config_flow.py deleted file mode 100644 index ab4bae02ad6..00000000000 --- a/tests/components/logi_circle/test_config_flow.py +++ /dev/null @@ -1,227 +0,0 @@ -"""Tests for Logi Circle config flow.""" - -import asyncio -from collections.abc import Generator -from http import HTTPStatus -from typing import Any -from unittest.mock import AsyncMock, MagicMock, Mock, patch - -import pytest - -from homeassistant import config_entries -from homeassistant.components.http import KEY_HASS -from homeassistant.components.logi_circle import config_flow -from homeassistant.components.logi_circle.config_flow import ( - DOMAIN, - AuthorizationFailed, - LogiCircleAuthCallbackView, -) -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import AbortFlow, FlowResultType -from homeassistant.setup import async_setup_component - -from tests.common import MockConfigEntry - - -class MockRequest: - """Mock request passed to HomeAssistantView.""" - - def __init__(self, hass: HomeAssistant, query: dict[str, Any]) -> None: - """Init request object.""" - self.app = {KEY_HASS: hass} - self.query = query - - -def init_config_flow(hass: HomeAssistant) -> config_flow.LogiCircleFlowHandler: - """Init a configuration flow.""" - config_flow.register_flow_implementation( - hass, - DOMAIN, - client_id="id", - client_secret="secret", - api_key="123", - redirect_uri="http://example.com", - sensors=None, - ) - flow = config_flow.LogiCircleFlowHandler() - flow._get_authorization_url = Mock(return_value="http://example.com") - flow.hass = hass - return flow - - -@pytest.fixture -def mock_logi_circle() -> Generator[MagicMock]: - """Mock logi_circle.""" - with patch( - "homeassistant.components.logi_circle.config_flow.LogiCircle" - ) as logi_circle: - future = asyncio.Future() - future.set_result({"accountId": "testId"}) - LogiCircle = logi_circle() - LogiCircle.authorize = AsyncMock(return_value=True) - LogiCircle.close = AsyncMock(return_value=True) - LogiCircle.account = future - LogiCircle.authorize_url = "http://authorize.url" - yield LogiCircle - - -@pytest.mark.usefixtures("mock_logi_circle") -async def test_step_import(hass: HomeAssistant) -> None: - """Test that we trigger import when configuring with client.""" - flow = init_config_flow(hass) - - result = await flow.async_step_import() - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "auth" - - -@pytest.mark.usefixtures("mock_logi_circle") -async def test_full_flow_implementation(hass: HomeAssistant) -> None: - """Test registering an implementation and finishing flow works.""" - config_flow.register_flow_implementation( - hass, - "test-other", - client_id=None, - client_secret=None, - api_key=None, - redirect_uri=None, - sensors=None, - ) - flow = init_config_flow(hass) - - result = await flow.async_step_user() - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await flow.async_step_user({"flow_impl": "test-other"}) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "auth" - assert result["description_placeholders"] == { - "authorization_url": "http://example.com" - } - - result = await flow.async_step_code("123ABC") - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Logi Circle ({})".format("testId") - - -async def test_we_reprompt_user_to_follow_link(hass: HomeAssistant) -> None: - """Test we prompt user to follow link if previously prompted.""" - flow = init_config_flow(hass) - - result = await flow.async_step_auth("dummy") - assert result["errors"]["base"] == "follow_link" - - -async def test_abort_if_no_implementation_registered(hass: HomeAssistant) -> None: - """Test we abort if no implementation is registered.""" - flow = config_flow.LogiCircleFlowHandler() - flow.hass = hass - - result = await flow.async_step_user() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "missing_configuration" - - -async def test_abort_if_already_setup(hass: HomeAssistant) -> None: - """Test we abort if Logi Circle is already setup.""" - flow = init_config_flow(hass) - MockConfigEntry(domain=config_flow.DOMAIN).add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - config_flow.DOMAIN, - context={"source": config_entries.SOURCE_USER}, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - result = await hass.config_entries.flow.async_init( - config_flow.DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - with pytest.raises(AbortFlow): - result = await flow.async_step_code() - - result = await flow.async_step_auth() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "external_setup" - - -@pytest.mark.parametrize( - ("side_effect", "error"), - [ - (TimeoutError, "authorize_url_timeout"), - (AuthorizationFailed, "invalid_auth"), - ], -) -async def test_abort_if_authorize_fails( - hass: HomeAssistant, - mock_logi_circle: MagicMock, - side_effect: type[Exception], - error: str, -) -> None: - """Test we abort if authorizing fails.""" - flow = init_config_flow(hass) - mock_logi_circle.authorize.side_effect = side_effect - - result = await flow.async_step_code("123ABC") - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "external_error" - - result = await flow.async_step_auth() - assert result["errors"]["base"] == error - - -async def test_not_pick_implementation_if_only_one(hass: HomeAssistant) -> None: - """Test we bypass picking implementation if we have one flow_imp.""" - flow = init_config_flow(hass) - - result = await flow.async_step_user() - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "auth" - - -@pytest.mark.usefixtures("mock_logi_circle") -async def test_gen_auth_url(hass: HomeAssistant) -> None: - """Test generating authorize URL from Logi Circle API.""" - config_flow.register_flow_implementation( - hass, - "test-auth-url", - client_id="id", - client_secret="secret", - api_key="123", - redirect_uri="http://example.com", - sensors=None, - ) - flow = config_flow.LogiCircleFlowHandler() - flow.hass = hass - flow.flow_impl = "test-auth-url" - await async_setup_component(hass, "http", {}) - - result = flow._get_authorization_url() - assert result == "http://authorize.url" - - -async def test_callback_view_rejects_missing_code(hass: HomeAssistant) -> None: - """Test the auth callback view rejects requests with no code.""" - view = LogiCircleAuthCallbackView() - resp = await view.get(MockRequest(hass, {})) - - assert resp.status == HTTPStatus.BAD_REQUEST - - -async def test_callback_view_accepts_code( - hass: HomeAssistant, mock_logi_circle: MagicMock -) -> None: - """Test the auth callback view handles requests with auth code.""" - init_config_flow(hass) - view = LogiCircleAuthCallbackView() - - resp = await view.get(MockRequest(hass, {"code": "456"})) - assert resp.status == HTTPStatus.OK - - await hass.async_block_till_done() - mock_logi_circle.authorize.assert_called_with("456") diff --git a/tests/components/logi_circle/test_init.py b/tests/components/logi_circle/test_init.py deleted file mode 100644 index d953acdf744..00000000000 --- a/tests/components/logi_circle/test_init.py +++ /dev/null @@ -1,68 +0,0 @@ -"""Tests for the Logi Circle integration.""" - -import asyncio -from collections.abc import Generator -from unittest.mock import AsyncMock, MagicMock, Mock, patch - -import pytest - -from homeassistant.components.logi_circle import DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir - -from tests.common import MockConfigEntry - - -@pytest.fixture(name="disable_platforms") -def disable_platforms_fixture() -> Generator[None]: - """Disable logi_circle platforms.""" - with patch("homeassistant.components.logi_circle.PLATFORMS", []): - yield - - -@pytest.fixture -def mock_logi_circle() -> Generator[MagicMock]: - """Mock logi_circle.""" - - auth_provider_mock = Mock() - auth_provider_mock.close = AsyncMock() - auth_provider_mock.clear_authorization = AsyncMock() - - with patch("homeassistant.components.logi_circle.LogiCircle") as logi_circle: - future = asyncio.Future() - future.set_result({"accountId": "testId"}) - LogiCircle = logi_circle() - LogiCircle.auth_provider = auth_provider_mock - LogiCircle.synchronize_cameras = AsyncMock() - yield LogiCircle - - -@pytest.mark.usefixtures("disable_platforms", "mock_logi_circle") -async def test_repair_issue( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, -) -> None: - """Test the LogiCircle configuration entry loading/unloading handles the repair.""" - config_entry = MockConfigEntry( - title="Example 1", - domain=DOMAIN, - data={ - "api_key": "blah", - "client_id": "blah", - "client_secret": "blah", - "redirect_uri": "blah", - }, - ) - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - assert issue_registry.async_get_issue(DOMAIN, DOMAIN) - - # Remove the entry - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.NOT_LOADED - assert issue_registry.async_get_issue(DOMAIN, DOMAIN) is None From d8b13c8c02ae79e7fa763ec17754fe35fd4f1e76 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Mon, 12 Aug 2024 21:31:42 +0200 Subject: [PATCH 0614/1635] Remove deprecated yaml import for gpsd (#123725) --- homeassistant/components/gpsd/config_flow.py | 4 -- homeassistant/components/gpsd/sensor.py | 60 +------------------- tests/components/gpsd/test_config_flow.py | 22 +------ 3 files changed, 4 insertions(+), 82 deletions(-) diff --git a/homeassistant/components/gpsd/config_flow.py b/homeassistant/components/gpsd/config_flow.py index 59c95d0ddbf..ac41324f857 100644 --- a/homeassistant/components/gpsd/config_flow.py +++ b/homeassistant/components/gpsd/config_flow.py @@ -39,10 +39,6 @@ class GPSDConfigFlow(ConfigFlow, domain=DOMAIN): else: return True - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: - """Import a config entry from configuration.yaml.""" - return await self.async_step_user(import_data) - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/gpsd/sensor.py b/homeassistant/components/gpsd/sensor.py index e67287ae134..63f8ac4f28c 100644 --- a/homeassistant/components/gpsd/sensor.py +++ b/homeassistant/components/gpsd/sensor.py @@ -7,35 +7,17 @@ from dataclasses import dataclass import logging from typing import Any -from gps3.agps3threaded import ( - GPSD_PORT as DEFAULT_PORT, - HOST as DEFAULT_HOST, - AGPS3mechanism, -) -import voluptuous as vol +from gps3.agps3threaded import AGPS3mechanism from homeassistant.components.sensor import ( - PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, SensorDeviceClass, SensorEntity, SensorEntityDescription, ) -from homeassistant.config_entries import SOURCE_IMPORT -from homeassistant.const import ( - ATTR_LATITUDE, - ATTR_LONGITUDE, - ATTR_MODE, - CONF_HOST, - CONF_NAME, - CONF_PORT, - EntityCategory, -) -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -import homeassistant.helpers.config_validation as cv +from homeassistant.const import ATTR_LATITUDE, ATTR_LONGITUDE, ATTR_MODE, EntityCategory +from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from . import GPSDConfigEntry from .const import DOMAIN @@ -71,14 +53,6 @@ SENSOR_TYPES: tuple[GpsdSensorDescription, ...] = ( ), ) -PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( - { - vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, - vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string, - vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, - } -) - async def async_setup_entry( hass: HomeAssistant, @@ -98,34 +72,6 @@ async def async_setup_entry( ) -async def async_setup_platform( - hass: HomeAssistant, - config: ConfigType, - async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Initialize gpsd import from config.""" - async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - is_fixable=False, - breaks_in_ha_version="2024.9.0", - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "GPSD", - }, - ) - - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=config - ) - ) - - class GpsdSensor(SensorEntity): """Representation of a GPS receiver available via GPSD.""" diff --git a/tests/components/gpsd/test_config_flow.py b/tests/components/gpsd/test_config_flow.py index 2d68a704119..4d832e120e4 100644 --- a/tests/components/gpsd/test_config_flow.py +++ b/tests/components/gpsd/test_config_flow.py @@ -6,7 +6,7 @@ from gps3.agps3threaded import GPSD_PORT as DEFAULT_PORT from homeassistant import config_entries from homeassistant.components.gpsd.const import DOMAIN -from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT +from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -52,23 +52,3 @@ async def test_connection_error(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.ABORT assert result["reason"] == "cannot_connect" - - -async def test_import(hass: HomeAssistant) -> None: - """Test import step.""" - with patch("homeassistant.components.gpsd.config_flow.socket") as mock_socket: - mock_connect = mock_socket.return_value.connect - mock_connect.return_value = None - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={CONF_HOST: HOST, CONF_PORT: 1234, CONF_NAME: "MyGPS"}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "MyGPS" - assert result["data"] == { - CONF_HOST: HOST, - CONF_NAME: "MyGPS", - CONF_PORT: 1234, - } From f46fe7eeb2c6ed898c43297cd8269e48ef2f9825 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Mon, 12 Aug 2024 21:32:28 +0200 Subject: [PATCH 0615/1635] Remove deprecated yaml import for velux (#123724) --- homeassistant/components/velux/__init__.py | 36 +----------- homeassistant/components/velux/config_flow.py | 57 ------------------- homeassistant/components/velux/strings.json | 10 ---- tests/components/velux/test_config_flow.py | 40 ++----------- 4 files changed, 5 insertions(+), 138 deletions(-) diff --git a/homeassistant/components/velux/__init__.py b/homeassistant/components/velux/__init__.py index 1b7cbd1ff93..614ed810429 100644 --- a/homeassistant/components/velux/__init__.py +++ b/homeassistant/components/velux/__init__.py @@ -1,48 +1,14 @@ """Support for VELUX KLF 200 devices.""" from pyvlx import Node, PyVLX, PyVLXException -import voluptuous as vol -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry +from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PASSWORD, EVENT_HOMEASSISTANT_STOP from homeassistant.core import HomeAssistant, ServiceCall, callback -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity -from homeassistant.helpers.typing import ConfigType from .const import DOMAIN, LOGGER, PLATFORMS -CONFIG_SCHEMA = vol.Schema( - vol.All( - cv.deprecated(DOMAIN), - { - DOMAIN: vol.Schema( - { - vol.Required(CONF_HOST): cv.string, - vol.Required(CONF_PASSWORD): cv.string, - } - ) - }, - ), - extra=vol.ALLOW_EXTRA, -) - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the velux component.""" - if DOMAIN not in config: - return True - - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=config[DOMAIN], - ) - ) - - return True - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up the velux component.""" diff --git a/homeassistant/components/velux/config_flow.py b/homeassistant/components/velux/config_flow.py index c0d4ec8035b..f4bfa13b4d5 100644 --- a/homeassistant/components/velux/config_flow.py +++ b/homeassistant/components/velux/config_flow.py @@ -1,15 +1,11 @@ """Config flow for Velux integration.""" -from typing import Any - from pyvlx import PyVLX, PyVLXException import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PASSWORD -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from .const import DOMAIN, LOGGER @@ -24,59 +20,6 @@ DATA_SCHEMA = vol.Schema( class VeluxConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for velux.""" - async def async_step_import(self, config: dict[str, Any]) -> ConfigFlowResult: - """Import a config entry.""" - - def create_repair(error: str | None = None) -> None: - if error: - async_create_issue( - self.hass, - DOMAIN, - f"deprecated_yaml_import_issue_{error}", - breaks_in_ha_version="2024.9.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key=f"deprecated_yaml_import_issue_{error}", - ) - else: - async_create_issue( - self.hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.9.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Velux", - }, - ) - - for entry in self._async_current_entries(): - if entry.data[CONF_HOST] == config[CONF_HOST]: - create_repair() - return self.async_abort(reason="already_configured") - - pyvlx = PyVLX(host=config[CONF_HOST], password=config[CONF_PASSWORD]) - try: - await pyvlx.connect() - await pyvlx.disconnect() - except (PyVLXException, ConnectionError): - create_repair("cannot_connect") - return self.async_abort(reason="cannot_connect") - except Exception: # noqa: BLE001 - create_repair("unknown") - return self.async_abort(reason="unknown") - - create_repair() - return self.async_create_entry( - title=config[CONF_HOST], - data=config, - ) - async def async_step_user( self, user_input: dict[str, str] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/velux/strings.json b/homeassistant/components/velux/strings.json index 3964c22efe2..5b7b459a3f7 100644 --- a/homeassistant/components/velux/strings.json +++ b/homeassistant/components/velux/strings.json @@ -17,16 +17,6 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" } }, - "issues": { - "deprecated_yaml_import_issue_cannot_connect": { - "title": "The Velux YAML configuration import cannot connect to server", - "description": "Configuring Velux using YAML is being removed but there was an connection error importing your YAML configuration.\n\nMake sure your home assistant can reach the KLF 200." - }, - "deprecated_yaml_import_issue_unknown": { - "title": "The Velux YAML configuration import failed with unknown error raised by pyvlx", - "description": "Configuring Velux using YAML is being removed but there was an unknown error importing your YAML configuration.\n\nCheck your configuration or have a look at the documentation of the integration." - } - }, "services": { "reboot_gateway": { "name": "Reboot gateway", diff --git a/tests/components/velux/test_config_flow.py b/tests/components/velux/test_config_flow.py index 8021ad52810..5f7932d358a 100644 --- a/tests/components/velux/test_config_flow.py +++ b/tests/components/velux/test_config_flow.py @@ -10,7 +10,7 @@ import pytest from pyvlx import PyVLXException from homeassistant.components.velux import DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_HOST, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -69,22 +69,8 @@ async def test_user_errors( assert result["errors"] == {"base": error_name} -async def test_import_valid_config(hass: HomeAssistant) -> None: - """Test import initialized flow with valid config.""" - with patch(PYVLX_CONFIG_FLOW_CLASS_PATH, autospec=True): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=DUMMY_DATA, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == DUMMY_DATA[CONF_HOST] - assert result["data"] == DUMMY_DATA - - -@pytest.mark.parametrize("flow_source", [SOURCE_IMPORT, SOURCE_USER]) -async def test_flow_duplicate_entry(hass: HomeAssistant, flow_source: str) -> None: - """Test import initialized flow with a duplicate entry.""" +async def test_flow_duplicate_entry(hass: HomeAssistant) -> None: + """Test initialized flow with a duplicate entry.""" with patch(PYVLX_CONFIG_FLOW_CLASS_PATH, autospec=True): conf_entry: MockConfigEntry = MockConfigEntry( domain=DOMAIN, title=DUMMY_DATA[CONF_HOST], data=DUMMY_DATA @@ -94,26 +80,8 @@ async def test_flow_duplicate_entry(hass: HomeAssistant, flow_source: str) -> No result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": flow_source}, + context={"source": SOURCE_USER}, data=DUMMY_DATA, ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - - -@pytest.mark.parametrize(("error", "error_name"), error_types_to_test) -async def test_import_errors( - hass: HomeAssistant, error: Exception, error_name: str -) -> None: - """Test import initialized flow with exceptions.""" - with patch( - PYVLX_CONFIG_FLOW_CONNECT_FUNCTION_PATH, - side_effect=error, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=DUMMY_DATA, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == error_name From d1dff95ac897200ce3f280bd3a3e8f6f8a34a005 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Fern=C3=A1ndez=20Rojas?= Date: Mon, 12 Aug 2024 21:33:56 +0200 Subject: [PATCH 0616/1635] Update aioairzone to v0.8.2 (#123718) --- homeassistant/components/airzone/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/airzone/manifest.json b/homeassistant/components/airzone/manifest.json index 0c32787d8ae..31ff7423ad6 100644 --- a/homeassistant/components/airzone/manifest.json +++ b/homeassistant/components/airzone/manifest.json @@ -11,5 +11,5 @@ "documentation": "https://www.home-assistant.io/integrations/airzone", "iot_class": "local_polling", "loggers": ["aioairzone"], - "requirements": ["aioairzone==0.8.1"] + "requirements": ["aioairzone==0.8.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 1c4cd900cde..7f947776f09 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -179,7 +179,7 @@ aioairq==0.3.2 aioairzone-cloud==0.6.2 # homeassistant.components.airzone -aioairzone==0.8.1 +aioairzone==0.8.2 # homeassistant.components.ambient_network # homeassistant.components.ambient_station diff --git a/requirements_test_all.txt b/requirements_test_all.txt index e252e804be0..b4f9fc0b103 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -167,7 +167,7 @@ aioairq==0.3.2 aioairzone-cloud==0.6.2 # homeassistant.components.airzone -aioairzone==0.8.1 +aioairzone==0.8.2 # homeassistant.components.ambient_network # homeassistant.components.ambient_station From 178cb0659a5f6cb969526c996b09c75250e753da Mon Sep 17 00:00:00 2001 From: G Johansson Date: Mon, 12 Aug 2024 21:35:02 +0200 Subject: [PATCH 0617/1635] Guard for no discovery info in command_line (#123717) --- .../components/command_line/binary_sensor.py | 2 ++ homeassistant/components/command_line/cover.py | 2 ++ .../components/command_line/notify.py | 4 +++- .../components/command_line/sensor.py | 2 ++ .../components/command_line/switch.py | 2 +- .../command_line/test_binary_sensor.py | 18 ++++++++++++++++++ tests/components/command_line/test_cover.py | 18 ++++++++++++++++++ tests/components/command_line/test_notify.py | 18 ++++++++++++++++++ tests/components/command_line/test_sensor.py | 18 ++++++++++++++++++ tests/components/command_line/test_switch.py | 18 ++++++++++++++++++ 10 files changed, 100 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/command_line/binary_sensor.py b/homeassistant/components/command_line/binary_sensor.py index 20deddcf14e..f5d9ad9d63d 100644 --- a/homeassistant/components/command_line/binary_sensor.py +++ b/homeassistant/components/command_line/binary_sensor.py @@ -40,6 +40,8 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Command line Binary Sensor.""" + if not discovery_info: + return discovery_info = cast(DiscoveryInfoType, discovery_info) binary_sensor_config = discovery_info diff --git a/homeassistant/components/command_line/cover.py b/homeassistant/components/command_line/cover.py index 2bcbb610296..d848237467b 100644 --- a/homeassistant/components/command_line/cover.py +++ b/homeassistant/components/command_line/cover.py @@ -37,6 +37,8 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up cover controlled by shell commands.""" + if not discovery_info: + return covers = [] discovery_info = cast(DiscoveryInfoType, discovery_info) diff --git a/homeassistant/components/command_line/notify.py b/homeassistant/components/command_line/notify.py index 14245b72288..4f5a4e4b499 100644 --- a/homeassistant/components/command_line/notify.py +++ b/homeassistant/components/command_line/notify.py @@ -21,8 +21,10 @@ def get_service( hass: HomeAssistant, config: ConfigType, discovery_info: DiscoveryInfoType | None = None, -) -> CommandLineNotificationService: +) -> CommandLineNotificationService | None: """Get the Command Line notification service.""" + if not discovery_info: + return None discovery_info = cast(DiscoveryInfoType, discovery_info) notify_config = discovery_info diff --git a/homeassistant/components/command_line/sensor.py b/homeassistant/components/command_line/sensor.py index 786afc8f3a7..7c31af165f9 100644 --- a/homeassistant/components/command_line/sensor.py +++ b/homeassistant/components/command_line/sensor.py @@ -48,6 +48,8 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Command Sensor.""" + if not discovery_info: + return discovery_info = cast(DiscoveryInfoType, discovery_info) sensor_config = discovery_info diff --git a/homeassistant/components/command_line/switch.py b/homeassistant/components/command_line/switch.py index 33b38ab9115..6d4670106ba 100644 --- a/homeassistant/components/command_line/switch.py +++ b/homeassistant/components/command_line/switch.py @@ -36,9 +36,9 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Find and return switches controlled by shell commands.""" - if not discovery_info: return + switches = [] discovery_info = cast(DiscoveryInfoType, discovery_info) entities: dict[str, dict[str, Any]] = { diff --git a/tests/components/command_line/test_binary_sensor.py b/tests/components/command_line/test_binary_sensor.py index fd726ab77a4..5d1cd845e27 100644 --- a/tests/components/command_line/test_binary_sensor.py +++ b/tests/components/command_line/test_binary_sensor.py @@ -56,6 +56,24 @@ async def test_setup_integration_yaml( assert entity_state.name == "Test" +async def test_setup_platform_yaml(hass: HomeAssistant) -> None: + """Test setting up the platform with platform yaml.""" + await setup.async_setup_component( + hass, + "binary_sensor", + { + "binary_sensor": { + "platform": "command_line", + "command": "echo 1", + "payload_on": "1", + "payload_off": "0", + } + }, + ) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 + + @pytest.mark.parametrize( "get_config", [ diff --git a/tests/components/command_line/test_cover.py b/tests/components/command_line/test_cover.py index 7ed48909d79..b81d915c6d5 100644 --- a/tests/components/command_line/test_cover.py +++ b/tests/components/command_line/test_cover.py @@ -36,6 +36,24 @@ from . import mock_asyncio_subprocess_run from tests.common import async_fire_time_changed +async def test_setup_platform_yaml(hass: HomeAssistant) -> None: + """Test setting up the platform with platform yaml.""" + await setup.async_setup_component( + hass, + "cover", + { + "cover": { + "platform": "command_line", + "command": "echo 1", + "payload_on": "1", + "payload_off": "0", + } + }, + ) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 + + async def test_no_poll_when_cover_has_no_command_state(hass: HomeAssistant) -> None: """Test that the cover does not polls when there's no state command.""" diff --git a/tests/components/command_line/test_notify.py b/tests/components/command_line/test_notify.py index c775d87fedb..6898b44f062 100644 --- a/tests/components/command_line/test_notify.py +++ b/tests/components/command_line/test_notify.py @@ -16,6 +16,24 @@ from homeassistant.components.notify import DOMAIN as NOTIFY_DOMAIN from homeassistant.core import HomeAssistant +async def test_setup_platform_yaml(hass: HomeAssistant) -> None: + """Test setting up the platform with platform yaml.""" + await setup.async_setup_component( + hass, + "notify", + { + "notify": { + "platform": "command_line", + "command": "echo 1", + "payload_on": "1", + "payload_off": "0", + } + }, + ) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 + + @pytest.mark.parametrize( "get_config", [ diff --git a/tests/components/command_line/test_sensor.py b/tests/components/command_line/test_sensor.py index eeccf2c358e..f7879b334cd 100644 --- a/tests/components/command_line/test_sensor.py +++ b/tests/components/command_line/test_sensor.py @@ -27,6 +27,24 @@ from . import mock_asyncio_subprocess_run from tests.common import async_fire_time_changed +async def test_setup_platform_yaml(hass: HomeAssistant) -> None: + """Test setting up the platform with platform yaml.""" + await setup.async_setup_component( + hass, + "sensor", + { + "sensor": { + "platform": "command_line", + "command": "echo 1", + "payload_on": "1", + "payload_off": "0", + } + }, + ) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 + + @pytest.mark.parametrize( "get_config", [ diff --git a/tests/components/command_line/test_switch.py b/tests/components/command_line/test_switch.py index c464ded34fb..549e729892c 100644 --- a/tests/components/command_line/test_switch.py +++ b/tests/components/command_line/test_switch.py @@ -37,6 +37,24 @@ from . import mock_asyncio_subprocess_run from tests.common import async_fire_time_changed +async def test_setup_platform_yaml(hass: HomeAssistant) -> None: + """Test setting up the platform with platform yaml.""" + await setup.async_setup_component( + hass, + "switch", + { + "switch": { + "platform": "command_line", + "command": "echo 1", + "payload_on": "1", + "payload_off": "0", + } + }, + ) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 + + async def test_state_integration_yaml(hass: HomeAssistant) -> None: """Test with none state.""" with tempfile.TemporaryDirectory() as tempdirname: From b09c6654ecda68711d7f3cefe984b3e7d6c7138f Mon Sep 17 00:00:00 2001 From: G Johansson Date: Mon, 12 Aug 2024 21:42:00 +0200 Subject: [PATCH 0618/1635] Replace not needed guard in command_line with type check (#123722) --- homeassistant/components/command_line/cover.py | 10 +++++----- homeassistant/components/command_line/switch.py | 12 ++++++------ 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/command_line/cover.py b/homeassistant/components/command_line/cover.py index d848237467b..8ddfd399ba8 100644 --- a/homeassistant/components/command_line/cover.py +++ b/homeassistant/components/command_line/cover.py @@ -4,7 +4,7 @@ from __future__ import annotations import asyncio from datetime import datetime, timedelta -from typing import Any, cast +from typing import TYPE_CHECKING, Any, cast from homeassistant.components.cover import CoverEntity from homeassistant.const import ( @@ -141,10 +141,10 @@ class CommandCover(ManualTriggerEntity, CoverEntity): async def _async_query_state(self) -> str | None: """Query for the state.""" - if self._command_state: - LOGGER.debug("Running state value command: %s", self._command_state) - return await async_check_output_or_log(self._command_state, self._timeout) - return None + if TYPE_CHECKING: + assert self._command_state + LOGGER.debug("Running state value command: %s", self._command_state) + return await async_check_output_or_log(self._command_state, self._timeout) async def _update_entity_state(self, now: datetime | None = None) -> None: """Update the state of the entity.""" diff --git a/homeassistant/components/command_line/switch.py b/homeassistant/components/command_line/switch.py index 6d4670106ba..e42c2226cf2 100644 --- a/homeassistant/components/command_line/switch.py +++ b/homeassistant/components/command_line/switch.py @@ -4,7 +4,7 @@ from __future__ import annotations import asyncio from datetime import datetime, timedelta -from typing import Any, cast +from typing import TYPE_CHECKING, Any, cast from homeassistant.components.switch import ENTITY_ID_FORMAT, SwitchEntity from homeassistant.const import ( @@ -142,11 +142,11 @@ class CommandSwitch(ManualTriggerEntity, SwitchEntity): async def _async_query_state(self) -> str | int | None: """Query for state.""" - if self._command_state: - if self._value_template: - return await self._async_query_state_value(self._command_state) - return await self._async_query_state_code(self._command_state) - return None + if TYPE_CHECKING: + assert self._command_state + if self._value_template: + return await self._async_query_state_value(self._command_state) + return await self._async_query_state_code(self._command_state) async def _update_entity_state(self, now: datetime | None = None) -> None: """Update the state of the entity.""" From 7cf5d12ec09d075733ee5c5bd917d27049affc88 Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Mon, 12 Aug 2024 15:45:05 -0400 Subject: [PATCH 0619/1635] Fix secondary russound controller discovery failure (#123590) --- .../components/russound_rio/manifest.json | 2 +- .../components/russound_rio/media_player.py | 17 ++++++++++++++--- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 17 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/russound_rio/manifest.json b/homeassistant/components/russound_rio/manifest.json index e7bb99010ee..67a01239615 100644 --- a/homeassistant/components/russound_rio/manifest.json +++ b/homeassistant/components/russound_rio/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/russound_rio", "iot_class": "local_push", "loggers": ["aiorussound"], - "requirements": ["aiorussound==2.2.2"] + "requirements": ["aiorussound==2.2.3"] } diff --git a/homeassistant/components/russound_rio/media_player.py b/homeassistant/components/russound_rio/media_player.py index 1489f12e59c..ff0d9e006c0 100644 --- a/homeassistant/components/russound_rio/media_player.py +++ b/homeassistant/components/russound_rio/media_player.py @@ -128,11 +128,18 @@ class RussoundZoneDevice(MediaPlayerEntity): self._zone = zone self._sources = sources self._attr_name = zone.name - self._attr_unique_id = f"{self._controller.mac_address}-{zone.device_str()}" + primary_mac_address = ( + self._controller.mac_address + or self._controller.parent_controller.mac_address + ) + self._attr_unique_id = f"{primary_mac_address}-{zone.device_str()}" + device_identifier = ( + self._controller.mac_address + or f"{primary_mac_address}-{self._controller.controller_id}" + ) self._attr_device_info = DeviceInfo( # Use MAC address of Russound device as identifier - identifiers={(DOMAIN, self._controller.mac_address)}, - connections={(CONNECTION_NETWORK_MAC, self._controller.mac_address)}, + identifiers={(DOMAIN, device_identifier)}, manufacturer="Russound", name=self._controller.controller_type, model=self._controller.controller_type, @@ -143,6 +150,10 @@ class RussoundZoneDevice(MediaPlayerEntity): DOMAIN, self._controller.parent_controller.mac_address, ) + else: + self._attr_device_info["connections"] = { + (CONNECTION_NETWORK_MAC, self._controller.mac_address) + } for flag, feature in MP_FEATURES_BY_FLAG.items(): if flag in zone.instance.supported_features: self._attr_supported_features |= feature diff --git a/requirements_all.txt b/requirements_all.txt index 7f947776f09..f6106738de4 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -350,7 +350,7 @@ aioridwell==2024.01.0 aioruckus==0.34 # homeassistant.components.russound_rio -aiorussound==2.2.2 +aiorussound==2.2.3 # homeassistant.components.ruuvi_gateway aioruuvigateway==0.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index b4f9fc0b103..82f62a21a52 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -332,7 +332,7 @@ aioridwell==2024.01.0 aioruckus==0.34 # homeassistant.components.russound_rio -aiorussound==2.2.2 +aiorussound==2.2.3 # homeassistant.components.ruuvi_gateway aioruuvigateway==0.1.0 From 3c864322f7dfa68f4c4a07bf656611de9701edc1 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 12 Aug 2024 21:49:10 +0200 Subject: [PATCH 0620/1635] Combine requirements files in CI (#123687) --- .github/workflows/ci.yaml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 3df1bf5b6aa..0f0850ade1a 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -37,7 +37,7 @@ on: type: boolean env: - CACHE_VERSION: 9 + CACHE_VERSION: 10 UV_CACHE_VERSION: 1 MYPY_CACHE_VERSION: 8 HA_SHORT_VERSION: "2024.9" @@ -514,8 +514,7 @@ jobs: uv pip install -U "pip>=21.3.1" setuptools wheel uv pip install -r requirements.txt python -m script.gen_requirements_all ci - uv pip install -r requirements_all_pytest.txt - uv pip install -r requirements_test.txt + uv pip install -r requirements_all_pytest.txt -r requirements_test.txt uv pip install -e . --config-settings editable_mode=compat hassfest: From b62f216c53ee832f4eeebe6ae74f3803c03b727f Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 12 Aug 2024 21:49:46 +0200 Subject: [PATCH 0621/1635] Remove unnecessary assignment of Template.hass from telnet (#123694) --- homeassistant/components/telnet/switch.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/homeassistant/components/telnet/switch.py b/homeassistant/components/telnet/switch.py index 3b4b9e137d1..82d8905a775 100644 --- a/homeassistant/components/telnet/switch.py +++ b/homeassistant/components/telnet/switch.py @@ -67,11 +67,6 @@ def setup_platform( switches = [] for object_id, device_config in devices.items(): - value_template: Template | None = device_config.get(CONF_VALUE_TEMPLATE) - - if value_template is not None: - value_template.hass = hass - switches.append( TelnetSwitch( object_id, @@ -81,7 +76,7 @@ def setup_platform( device_config[CONF_COMMAND_ON], device_config[CONF_COMMAND_OFF], device_config.get(CONF_COMMAND_STATE), - value_template, + device_config.get(CONF_VALUE_TEMPLATE), device_config[CONF_TIMEOUT], ) ) From 9d67956fc83d9fccd03f84f5de25f3664880b049 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 12 Aug 2024 21:49:58 +0200 Subject: [PATCH 0622/1635] Remove unnecessary assignment of Template.hass from tensorflow (#123695) --- homeassistant/components/tensorflow/image_processing.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/homeassistant/components/tensorflow/image_processing.py b/homeassistant/components/tensorflow/image_processing.py index 85fe6439f1c..f13c0b24d0b 100644 --- a/homeassistant/components/tensorflow/image_processing.py +++ b/homeassistant/components/tensorflow/image_processing.py @@ -261,8 +261,6 @@ class TensorFlowImageProcessor(ImageProcessingEntity): area_config.get(CONF_RIGHT), ] - template.attach(hass, self._file_out) - self._matches = {} self._total_matches = 0 self._last_image = None From bf55cc605ac31c5e4607747988695fc32c25dd7c Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 12 Aug 2024 21:50:11 +0200 Subject: [PATCH 0623/1635] Remove unnecessary assignment of Template.hass from velbus (#123696) --- homeassistant/components/velbus/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/homeassistant/components/velbus/__init__.py b/homeassistant/components/velbus/__init__.py index d47444e3994..685f8b49500 100644 --- a/homeassistant/components/velbus/__init__.py +++ b/homeassistant/components/velbus/__init__.py @@ -119,7 +119,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def set_memo_text(call: ServiceCall) -> None: """Handle Memo Text service call.""" memo_text = call.data[CONF_MEMO_TEXT] - memo_text.hass = hass await ( hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"] .get_module(call.data[CONF_ADDRESS]) From 6bde80ad65f6ef39d2ee7ec0a0a4b25bae1a5d11 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 12 Aug 2024 21:50:23 +0200 Subject: [PATCH 0624/1635] Remove unnecessary assignment of Template.hass from esphome (#123701) --- homeassistant/components/esphome/manager.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/esphome/manager.py b/homeassistant/components/esphome/manager.py index 4b4537d147f..7629d1fa9cd 100644 --- a/homeassistant/components/esphome/manager.py +++ b/homeassistant/components/esphome/manager.py @@ -197,9 +197,9 @@ class ESPHomeManager: if service.data_template: try: data_template = { - key: Template(value) for key, value in service.data_template.items() + key: Template(value, hass) + for key, value in service.data_template.items() } - template.attach(hass, data_template) service_data.update( template.render_complex(data_template, service.variables) ) From 5e75c5faff9fc4f029496571b6c92dfae0f6f8ac Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 12 Aug 2024 21:50:34 +0200 Subject: [PATCH 0625/1635] Remove unnecessary assignment of Template.hass from mobile_app (#123702) --- homeassistant/components/mobile_app/device_action.py | 1 - 1 file changed, 1 deletion(-) diff --git a/homeassistant/components/mobile_app/device_action.py b/homeassistant/components/mobile_app/device_action.py index bebdef0e917..dccff926b34 100644 --- a/homeassistant/components/mobile_app/device_action.py +++ b/homeassistant/components/mobile_app/device_action.py @@ -64,7 +64,6 @@ async def async_call_action_from_config( continue value_template = config[key] - template.attach(hass, value_template) try: service_data[key] = template.render_complex(value_template, variables) From 6caec897931ad03bc2982599be3512b18c027708 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 12 Aug 2024 21:50:50 +0200 Subject: [PATCH 0626/1635] Remove unnecessary assignment of Template.hass from trigger entity helper (#123709) --- homeassistant/helpers/trigger_template_entity.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/homeassistant/helpers/trigger_template_entity.py b/homeassistant/helpers/trigger_template_entity.py index 7b1c4ab8078..7f8ad41d7bb 100644 --- a/homeassistant/helpers/trigger_template_entity.py +++ b/homeassistant/helpers/trigger_template_entity.py @@ -30,7 +30,7 @@ from homeassistant.util.json import JSON_DECODE_EXCEPTIONS, json_loads from . import config_validation as cv from .entity import Entity -from .template import attach as template_attach, render_complex +from .template import render_complex from .typing import ConfigType CONF_AVAILABILITY = "availability" @@ -157,11 +157,6 @@ class TriggerBaseEntity(Entity): """Return extra attributes.""" return self._rendered.get(CONF_ATTRIBUTES) - async def async_added_to_hass(self) -> None: - """Handle being added to Home Assistant.""" - await super().async_added_to_hass() - template_attach(self.hass, self._config) - def _set_unique_id(self, unique_id: str | None) -> None: """Set unique id.""" self._unique_id = unique_id From c5e87108895995542c8e71869b8bef048b177b29 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 12 Aug 2024 21:51:02 +0200 Subject: [PATCH 0627/1635] Remove unnecessary assignment of Template.hass from service helper (#123710) --- homeassistant/helpers/service.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/homeassistant/helpers/service.py b/homeassistant/helpers/service.py index 58cd4657301..be4974906bb 100644 --- a/homeassistant/helpers/service.py +++ b/homeassistant/helpers/service.py @@ -365,7 +365,6 @@ def async_prepare_call_from_config( if isinstance(domain_service, template.Template): try: - domain_service.hass = hass domain_service = domain_service.async_render(variables) domain_service = cv.service(domain_service) except TemplateError as ex: @@ -384,10 +383,8 @@ def async_prepare_call_from_config( conf = config[CONF_TARGET] try: if isinstance(conf, template.Template): - conf.hass = hass target.update(conf.async_render(variables)) else: - template.attach(hass, conf) target.update(template.render_complex(conf, variables)) if CONF_ENTITY_ID in target: @@ -413,7 +410,6 @@ def async_prepare_call_from_config( if conf not in config: continue try: - template.attach(hass, config[conf]) render = template.render_complex(config[conf], variables) if not isinstance(render, dict): raise HomeAssistantError( From c49a31e0deb10409f1e28226b008a57ddfddea77 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 12 Aug 2024 21:51:24 +0200 Subject: [PATCH 0628/1635] Remove unnecessary assignment of Template.hass from script variables helper (#123712) --- homeassistant/helpers/script_variables.py | 1 - 1 file changed, 1 deletion(-) diff --git a/homeassistant/helpers/script_variables.py b/homeassistant/helpers/script_variables.py index 043101b9b86..2b4507abd64 100644 --- a/homeassistant/helpers/script_variables.py +++ b/homeassistant/helpers/script_variables.py @@ -36,7 +36,6 @@ class ScriptVariables: """ if self._has_template is None: self._has_template = template.is_complex(self.variables) - template.attach(hass, self.variables) if not self._has_template: if render_as_defaults: From 31dcc6f685778fd1630d15344121193e192ad2ef Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 12 Aug 2024 14:51:45 -0500 Subject: [PATCH 0629/1635] Bump protobuf to 4.25.4 (#123675) --- homeassistant/package_constraints.txt | 2 +- script/gen_requirements_all.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 6c93e6b5f1c..8cad4d2037a 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -142,7 +142,7 @@ pyOpenSSL>=24.0.0 # protobuf must be in package constraints for the wheel # builder to build binary wheels -protobuf==4.25.1 +protobuf==4.25.4 # faust-cchardet: Ensure we have a version we can build wheels # 2.1.18 is the first version that works with our wheel builder diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index f9bd379b7ce..522a626754d 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -164,7 +164,7 @@ pyOpenSSL>=24.0.0 # protobuf must be in package constraints for the wheel # builder to build binary wheels -protobuf==4.25.1 +protobuf==4.25.4 # faust-cchardet: Ensure we have a version we can build wheels # 2.1.18 is the first version that works with our wheel builder From b0d1d7bdb23376f5e9f0a5f597c337130bc3a7b8 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 12 Aug 2024 21:53:32 +0200 Subject: [PATCH 0630/1635] Improve type hints in lcn tests (#123648) --- tests/components/lcn/conftest.py | 24 +++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/tests/components/lcn/conftest.py b/tests/components/lcn/conftest.py index 2884bc833c2..24447abf77a 100644 --- a/tests/components/lcn/conftest.py +++ b/tests/components/lcn/conftest.py @@ -1,5 +1,6 @@ """Test configuration and mocks for LCN component.""" +from collections.abc import AsyncGenerator import json from unittest.mock import AsyncMock, patch @@ -10,8 +11,9 @@ from pypck.module import GroupConnection, ModuleConnection import pytest from homeassistant.components.lcn.const import DOMAIN -from homeassistant.components.lcn.helpers import generate_unique_id +from homeassistant.components.lcn.helpers import AddressType, generate_unique_id from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component @@ -42,13 +44,13 @@ class MockGroupConnection(GroupConnection): class MockPchkConnectionManager(PchkConnectionManager): """Fake connection handler.""" - async def async_connect(self, timeout=30): + async def async_connect(self, timeout: int = 30) -> None: """Mock establishing a connection to PCHK.""" self.authentication_completed_future.set_result(True) self.license_error_future.set_result(True) self.segment_scan_completed_event.set() - async def async_close(self): + async def async_close(self) -> None: """Mock closing a connection to PCHK.""" @patch.object(pypck.connection, "ModuleConnection", MockModuleConnection) @@ -60,7 +62,7 @@ class MockPchkConnectionManager(PchkConnectionManager): send_command = AsyncMock() -def create_config_entry(name): +def create_config_entry(name: str) -> MockConfigEntry: """Set up config entries with configuration data.""" fixture_filename = f"lcn/config_entry_{name}.json" entry_data = json.loads(load_fixture(fixture_filename)) @@ -78,19 +80,21 @@ def create_config_entry(name): @pytest.fixture(name="entry") -def create_config_entry_pchk(): +def create_config_entry_pchk() -> MockConfigEntry: """Return one specific config entry.""" return create_config_entry("pchk") @pytest.fixture(name="entry2") -def create_config_entry_myhome(): +def create_config_entry_myhome() -> MockConfigEntry: """Return one specific config entry.""" return create_config_entry("myhome") @pytest.fixture(name="lcn_connection") -async def init_integration(hass, entry): +async def init_integration( + hass: HomeAssistant, entry: MockConfigEntry +) -> AsyncGenerator[MockPchkConnectionManager]: """Set up the LCN integration in Home Assistant.""" lcn_connection = None @@ -109,7 +113,7 @@ async def init_integration(hass, entry): yield lcn_connection -async def setup_component(hass): +async def setup_component(hass: HomeAssistant) -> None: """Set up the LCN component.""" fixture_filename = "lcn/config.json" config_data = json.loads(load_fixture(fixture_filename)) @@ -118,7 +122,9 @@ async def setup_component(hass): await hass.async_block_till_done() -def get_device(hass, entry, address): +def get_device( + hass: HomeAssistant, entry: MockConfigEntry, address: AddressType +) -> dr.DeviceEntry: """Get LCN device for specified address.""" device_registry = dr.async_get(hass) identifiers = {(DOMAIN, generate_unique_id(entry.entry_id, address))} From 416d2fb82a2e09a337cafbc857ac81a7ab10222b Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 12 Aug 2024 21:55:44 +0200 Subject: [PATCH 0631/1635] Improve type hints in locative tests (#123643) --- tests/components/locative/test_init.py | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/tests/components/locative/test_init.py b/tests/components/locative/test_init.py index 305497ebbd6..8fd239ee398 100644 --- a/tests/components/locative/test_init.py +++ b/tests/components/locative/test_init.py @@ -38,7 +38,7 @@ async def locative_client( @pytest.fixture -async def webhook_id(hass, locative_client): +async def webhook_id(hass: HomeAssistant, locative_client: TestClient) -> str: """Initialize the Geofency component and get the webhook_id.""" await async_process_ha_core_config( hass, @@ -56,7 +56,7 @@ async def webhook_id(hass, locative_client): return result["result"].data["webhook_id"] -async def test_missing_data(locative_client, webhook_id) -> None: +async def test_missing_data(locative_client: TestClient, webhook_id: str) -> None: """Test missing data.""" url = f"/api/webhook/{webhook_id}" @@ -116,7 +116,9 @@ async def test_missing_data(locative_client, webhook_id) -> None: assert req.status == HTTPStatus.UNPROCESSABLE_ENTITY -async def test_enter_and_exit(hass: HomeAssistant, locative_client, webhook_id) -> None: +async def test_enter_and_exit( + hass: HomeAssistant, locative_client: TestClient, webhook_id: str +) -> None: """Test when there is a known zone.""" url = f"/api/webhook/{webhook_id}" @@ -186,7 +188,7 @@ async def test_enter_and_exit(hass: HomeAssistant, locative_client, webhook_id) async def test_exit_after_enter( - hass: HomeAssistant, locative_client, webhook_id + hass: HomeAssistant, locative_client: TestClient, webhook_id: str ) -> None: """Test when an exit message comes after an enter message.""" url = f"/api/webhook/{webhook_id}" @@ -229,7 +231,9 @@ async def test_exit_after_enter( assert state.state == "work" -async def test_exit_first(hass: HomeAssistant, locative_client, webhook_id) -> None: +async def test_exit_first( + hass: HomeAssistant, locative_client: TestClient, webhook_id: str +) -> None: """Test when an exit message is sent first on a new device.""" url = f"/api/webhook/{webhook_id}" @@ -250,7 +254,9 @@ async def test_exit_first(hass: HomeAssistant, locative_client, webhook_id) -> N assert state.state == "not_home" -async def test_two_devices(hass: HomeAssistant, locative_client, webhook_id) -> None: +async def test_two_devices( + hass: HomeAssistant, locative_client: TestClient, webhook_id: str +) -> None: """Test updating two different devices.""" url = f"/api/webhook/{webhook_id}" @@ -294,7 +300,7 @@ async def test_two_devices(hass: HomeAssistant, locative_client, webhook_id) -> reason="The device_tracker component does not support unloading yet." ) async def test_load_unload_entry( - hass: HomeAssistant, locative_client, webhook_id + hass: HomeAssistant, locative_client: TestClient, webhook_id: str ) -> None: """Test that the appropriate dispatch signals are added and removed.""" url = f"/api/webhook/{webhook_id}" From 52f52394d5f1888f1f03f7ed74a1a30e415f69ac Mon Sep 17 00:00:00 2001 From: G Johansson Date: Mon, 12 Aug 2024 22:36:36 +0200 Subject: [PATCH 0632/1635] Remove demo mailbox (#123741) --- homeassistant/components/demo/__init__.py | 1 - homeassistant/components/demo/mailbox.py | 95 ----------------------- 2 files changed, 96 deletions(-) delete mode 100644 homeassistant/components/demo/mailbox.py diff --git a/homeassistant/components/demo/__init__.py b/homeassistant/components/demo/__init__.py index 371b783b653..d088dfb140b 100644 --- a/homeassistant/components/demo/__init__.py +++ b/homeassistant/components/demo/__init__.py @@ -55,7 +55,6 @@ COMPONENTS_WITH_CONFIG_ENTRY_DEMO_PLATFORM = [ COMPONENTS_WITH_DEMO_PLATFORM = [ Platform.TTS, - Platform.MAILBOX, Platform.IMAGE_PROCESSING, Platform.DEVICE_TRACKER, ] diff --git a/homeassistant/components/demo/mailbox.py b/homeassistant/components/demo/mailbox.py deleted file mode 100644 index e0cdd05782d..00000000000 --- a/homeassistant/components/demo/mailbox.py +++ /dev/null @@ -1,95 +0,0 @@ -"""Support for a demo mailbox.""" - -from __future__ import annotations - -from hashlib import sha1 -import logging -import os -from typing import Any - -from homeassistant.components.mailbox import CONTENT_TYPE_MPEG, Mailbox, StreamError -from homeassistant.core import HomeAssistant -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from homeassistant.util import dt as dt_util - -_LOGGER = logging.getLogger(__name__) - -MAILBOX_NAME = "DemoMailbox" - - -async def async_get_handler( - hass: HomeAssistant, - config: ConfigType, - discovery_info: DiscoveryInfoType | None = None, -) -> Mailbox: - """Set up the Demo mailbox.""" - return DemoMailbox(hass, MAILBOX_NAME) - - -class DemoMailbox(Mailbox): - """Demo Mailbox.""" - - def __init__(self, hass: HomeAssistant, name: str) -> None: - """Initialize Demo mailbox.""" - super().__init__(hass, name) - self._messages: dict[str, dict[str, Any]] = {} - txt = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. " - for idx in range(10): - msgtime = int( - dt_util.as_timestamp(dt_util.utcnow()) - 3600 * 24 * (10 - idx) - ) - msgtxt = f"Message {idx + 1}. {txt * (1 + idx * (idx % 2))}" - msgsha = sha1(msgtxt.encode("utf-8")).hexdigest() - msg = { - "info": { - "origtime": msgtime, - "callerid": "John Doe <212-555-1212>", - "duration": "10", - }, - "text": msgtxt, - "sha": msgsha, - } - self._messages[msgsha] = msg - - @property - def media_type(self) -> str: - """Return the supported media type.""" - return CONTENT_TYPE_MPEG - - @property - def can_delete(self) -> bool: - """Return if messages can be deleted.""" - return True - - @property - def has_media(self) -> bool: - """Return if messages have attached media files.""" - return True - - def _get_media(self) -> bytes: - """Return the media blob for the msgid.""" - audio_path = os.path.join(os.path.dirname(__file__), "tts.mp3") - with open(audio_path, "rb") as file: - return file.read() - - async def async_get_media(self, msgid: str) -> bytes: - """Return the media blob for the msgid.""" - if msgid not in self._messages: - raise StreamError("Message not found") - return await self.hass.async_add_executor_job(self._get_media) - - async def async_get_messages(self) -> list[dict[str, Any]]: - """Return a list of the current messages.""" - return sorted( - self._messages.values(), - key=lambda item: item["info"]["origtime"], - reverse=True, - ) - - async def async_delete(self, msgid: str) -> bool: - """Delete the specified messages.""" - if msgid in self._messages: - _LOGGER.info("Deleting: %s", msgid) - del self._messages[msgid] - self.async_update() - return True From 732b4b95dba76ac547bd71c8d08c9bb0df1f9acd Mon Sep 17 00:00:00 2001 From: "David F. Mulcahey" Date: Mon, 12 Aug 2024 16:38:59 -0400 Subject: [PATCH 0633/1635] Bump ZHA lib to 0.0.31 (#123743) --- homeassistant/components/zha/entity.py | 2 +- homeassistant/components/zha/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/zha/entity.py b/homeassistant/components/zha/entity.py index 6db0ffad964..348e545f1c4 100644 --- a/homeassistant/components/zha/entity.py +++ b/homeassistant/components/zha/entity.py @@ -62,7 +62,7 @@ class ZHAEntity(LogMixin, RestoreEntity, Entity): @property def available(self) -> bool: """Return entity availability.""" - return self.entity_data.device_proxy.device.available + return self.entity_data.entity.available @property def device_info(self) -> DeviceInfo: diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index bb1480b43e1..a5e57fcb1ec 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -21,7 +21,7 @@ "zha", "universal_silabs_flasher" ], - "requirements": ["universal-silabs-flasher==0.0.22", "zha==0.0.30"], + "requirements": ["universal-silabs-flasher==0.0.22", "zha==0.0.31"], "usb": [ { "vid": "10C4", diff --git a/requirements_all.txt b/requirements_all.txt index f6106738de4..eb79831820c 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2986,7 +2986,7 @@ zeroconf==0.132.2 zeversolar==0.3.1 # homeassistant.components.zha -zha==0.0.30 +zha==0.0.31 # homeassistant.components.zhong_hong zhong-hong-hvac==1.0.12 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 82f62a21a52..43764d7518b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2363,7 +2363,7 @@ zeroconf==0.132.2 zeversolar==0.3.1 # homeassistant.components.zha -zha==0.0.30 +zha==0.0.31 # homeassistant.components.zwave_js zwave-js-server-python==0.57.0 From 831c28e890b1aa89e5c689ae9f22e7dbefee1af1 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 12 Aug 2024 15:40:35 -0500 Subject: [PATCH 0634/1635] Bump yalexs to 6.5.0 (#123739) --- homeassistant/components/august/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/august/manifest.json b/homeassistant/components/august/manifest.json index 293c94c9629..5a911eee5e5 100644 --- a/homeassistant/components/august/manifest.json +++ b/homeassistant/components/august/manifest.json @@ -28,5 +28,5 @@ "documentation": "https://www.home-assistant.io/integrations/august", "iot_class": "cloud_push", "loggers": ["pubnub", "yalexs"], - "requirements": ["yalexs==6.4.3", "yalexs-ble==2.4.3"] + "requirements": ["yalexs==6.5.0", "yalexs-ble==2.4.3"] } diff --git a/requirements_all.txt b/requirements_all.txt index eb79831820c..8c57eabb585 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2953,7 +2953,7 @@ yalesmartalarmclient==0.3.9 yalexs-ble==2.4.3 # homeassistant.components.august -yalexs==6.4.3 +yalexs==6.5.0 # homeassistant.components.yeelight yeelight==0.7.14 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 43764d7518b..8d2a53745a6 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2336,7 +2336,7 @@ yalesmartalarmclient==0.3.9 yalexs-ble==2.4.3 # homeassistant.components.august -yalexs==6.4.3 +yalexs==6.5.0 # homeassistant.components.yeelight yeelight==0.7.14 From e06e9bb39c5c95ff1b4cd331960e898c825474cb Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 12 Aug 2024 15:41:19 -0500 Subject: [PATCH 0635/1635] Bump pyatv to 0.15.0 (#123674) --- homeassistant/components/apple_tv/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/apple_tv/manifest.json b/homeassistant/components/apple_tv/manifest.json index 1f7ac45372e..9a053829516 100644 --- a/homeassistant/components/apple_tv/manifest.json +++ b/homeassistant/components/apple_tv/manifest.json @@ -7,7 +7,7 @@ "documentation": "https://www.home-assistant.io/integrations/apple_tv", "iot_class": "local_push", "loggers": ["pyatv", "srptools"], - "requirements": ["pyatv==0.14.3"], + "requirements": ["pyatv==0.15.0"], "zeroconf": [ "_mediaremotetv._tcp.local.", "_companion-link._tcp.local.", diff --git a/requirements_all.txt b/requirements_all.txt index 8c57eabb585..02369d191cb 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1732,7 +1732,7 @@ pyatag==0.3.5.3 pyatmo==8.0.3 # homeassistant.components.apple_tv -pyatv==0.14.3 +pyatv==0.15.0 # homeassistant.components.aussie_broadband pyaussiebb==0.0.15 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 8d2a53745a6..997b44c252a 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1403,7 +1403,7 @@ pyatag==0.3.5.3 pyatmo==8.0.3 # homeassistant.components.apple_tv -pyatv==0.14.3 +pyatv==0.15.0 # homeassistant.components.aussie_broadband pyaussiebb==0.0.15 From 7eccb38851b1e328172d8fa4cd9e92e23bf4c6a6 Mon Sep 17 00:00:00 2001 From: Franck Nijhof Date: Mon, 12 Aug 2024 23:23:34 +0200 Subject: [PATCH 0636/1635] Update wled to 0.20.2 (#123746) --- homeassistant/components/wled/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/wled/manifest.json b/homeassistant/components/wled/manifest.json index efeb414438d..71939127356 100644 --- a/homeassistant/components/wled/manifest.json +++ b/homeassistant/components/wled/manifest.json @@ -7,6 +7,6 @@ "integration_type": "device", "iot_class": "local_push", "quality_scale": "platinum", - "requirements": ["wled==0.20.1"], + "requirements": ["wled==0.20.2"], "zeroconf": ["_wled._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index 02369d191cb..70edbeec552 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2915,7 +2915,7 @@ wiffi==1.1.2 wirelesstagpy==0.8.1 # homeassistant.components.wled -wled==0.20.1 +wled==0.20.2 # homeassistant.components.wolflink wolf-comm==0.0.9 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 997b44c252a..fdaa9696d44 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2301,7 +2301,7 @@ whois==0.9.27 wiffi==1.1.2 # homeassistant.components.wled -wled==0.20.1 +wled==0.20.2 # homeassistant.components.wolflink wolf-comm==0.0.9 From dee06d57770b4724337d013946b651554fd458b9 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Mon, 12 Aug 2024 23:47:47 -0700 Subject: [PATCH 0637/1635] Bump python-nest-sdm to 4.0.6 (#123762) --- homeassistant/components/nest/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/nest/manifest.json b/homeassistant/components/nest/manifest.json index d3ba571e65a..fbe5ddb6534 100644 --- a/homeassistant/components/nest/manifest.json +++ b/homeassistant/components/nest/manifest.json @@ -20,5 +20,5 @@ "iot_class": "cloud_push", "loggers": ["google_nest_sdm"], "quality_scale": "platinum", - "requirements": ["google-nest-sdm==4.0.5"] + "requirements": ["google-nest-sdm==4.0.6"] } diff --git a/requirements_all.txt b/requirements_all.txt index 70edbeec552..0d7fa9d6710 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -986,7 +986,7 @@ google-cloud-texttospeech==2.16.3 google-generativeai==0.6.0 # homeassistant.components.nest -google-nest-sdm==4.0.5 +google-nest-sdm==4.0.6 # homeassistant.components.google_travel_time googlemaps==2.5.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index fdaa9696d44..6ccc0145295 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -836,7 +836,7 @@ google-cloud-pubsub==2.13.11 google-generativeai==0.6.0 # homeassistant.components.nest -google-nest-sdm==4.0.5 +google-nest-sdm==4.0.6 # homeassistant.components.google_travel_time googlemaps==2.5.1 From a988cd050bdba74dd47d403cd0b2327ff8b64895 Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Tue, 13 Aug 2024 08:50:02 +0200 Subject: [PATCH 0638/1635] Fix error message in html5 (#123749) --- homeassistant/components/html5/notify.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/html5/notify.py b/homeassistant/components/html5/notify.py index 798589d2807..8082ca37aa3 100644 --- a/homeassistant/components/html5/notify.py +++ b/homeassistant/components/html5/notify.py @@ -533,7 +533,7 @@ class HTML5NotificationService(BaseNotificationService): elif response.status_code > 399: _LOGGER.error( "There was an issue sending the notification %s: %s", - response.status, + response.status_code, response.text, ) From 6406065e1f43cc92635619eb44d453eeafa03067 Mon Sep 17 00:00:00 2001 From: Ian Date: Tue, 13 Aug 2024 02:51:41 -0400 Subject: [PATCH 0639/1635] Bump py-nextbusnext to 2.0.4 (#123750) --- homeassistant/components/nextbus/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/nextbus/manifest.json b/homeassistant/components/nextbus/manifest.json index 27fec1bfba9..d22ba66d860 100644 --- a/homeassistant/components/nextbus/manifest.json +++ b/homeassistant/components/nextbus/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/nextbus", "iot_class": "cloud_polling", "loggers": ["py_nextbus"], - "requirements": ["py-nextbusnext==2.0.3"] + "requirements": ["py-nextbusnext==2.0.4"] } diff --git a/requirements_all.txt b/requirements_all.txt index 0d7fa9d6710..8e574d22837 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1650,7 +1650,7 @@ py-madvr2==1.6.29 py-melissa-climate==2.1.4 # homeassistant.components.nextbus -py-nextbusnext==2.0.3 +py-nextbusnext==2.0.4 # homeassistant.components.nightscout py-nightscout==1.2.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 6ccc0145295..956e945d92d 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1345,7 +1345,7 @@ py-madvr2==1.6.29 py-melissa-climate==2.1.4 # homeassistant.components.nextbus -py-nextbusnext==2.0.3 +py-nextbusnext==2.0.4 # homeassistant.components.nightscout py-nightscout==1.2.2 From 86322973d0de9f220215548c049ebabacd9720e7 Mon Sep 17 00:00:00 2001 From: Jan Rieger Date: Tue, 13 Aug 2024 10:37:43 +0200 Subject: [PATCH 0640/1635] Migrate GPSD extra state attributes to separate states (#122193) * Migrate GPSD extra state attributes to separate states * Use common translations * Address feedback --- homeassistant/components/gpsd/icons.json | 9 +++ homeassistant/components/gpsd/sensor.py | 85 ++++++++++++++++++++-- homeassistant/components/gpsd/strings.json | 26 +++++-- 3 files changed, 108 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/gpsd/icons.json b/homeassistant/components/gpsd/icons.json index b29640e0001..59d904f918c 100644 --- a/homeassistant/components/gpsd/icons.json +++ b/homeassistant/components/gpsd/icons.json @@ -7,6 +7,15 @@ "2d_fix": "mdi:crosshairs-gps", "3d_fix": "mdi:crosshairs-gps" } + }, + "latitude": { + "default": "mdi:latitude" + }, + "longitude": { + "default": "mdi:longitude" + }, + "elevation": { + "default": "mdi:arrow-up-down" } } } diff --git a/homeassistant/components/gpsd/sensor.py b/homeassistant/components/gpsd/sensor.py index 63f8ac4f28c..1bac41ecaae 100644 --- a/homeassistant/components/gpsd/sensor.py +++ b/homeassistant/components/gpsd/sensor.py @@ -4,6 +4,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass +from datetime import datetime import logging from typing import Any @@ -14,10 +15,20 @@ from homeassistant.components.sensor import ( SensorEntity, SensorEntityDescription, ) -from homeassistant.const import ATTR_LATITUDE, ATTR_LONGITUDE, ATTR_MODE, EntityCategory +from homeassistant.const import ( + ATTR_LATITUDE, + ATTR_LONGITUDE, + ATTR_MODE, + ATTR_TIME, + EntityCategory, + UnitOfLength, + UnitOfSpeed, +) from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType +from homeassistant.util import dt as dt_util from . import GPSDConfigEntry from .const import DOMAIN @@ -38,19 +49,73 @@ _MODE_VALUES = {2: "2d_fix", 3: "3d_fix"} class GpsdSensorDescription(SensorEntityDescription): """Class describing GPSD sensor entities.""" - value_fn: Callable[[AGPS3mechanism], str | None] + value_fn: Callable[[AGPS3mechanism], StateType | datetime] SENSOR_TYPES: tuple[GpsdSensorDescription, ...] = ( GpsdSensorDescription( - key="mode", - translation_key="mode", + key=ATTR_MODE, + translation_key=ATTR_MODE, name=None, entity_category=EntityCategory.DIAGNOSTIC, device_class=SensorDeviceClass.ENUM, options=list(_MODE_VALUES.values()), value_fn=lambda agps_thread: _MODE_VALUES.get(agps_thread.data_stream.mode), ), + GpsdSensorDescription( + key=ATTR_LATITUDE, + translation_key=ATTR_LATITUDE, + entity_category=EntityCategory.DIAGNOSTIC, + value_fn=lambda agps_thread: agps_thread.data_stream.lat, + entity_registry_enabled_default=False, + ), + GpsdSensorDescription( + key=ATTR_LONGITUDE, + translation_key=ATTR_LONGITUDE, + entity_category=EntityCategory.DIAGNOSTIC, + value_fn=lambda agps_thread: agps_thread.data_stream.lon, + entity_registry_enabled_default=False, + ), + GpsdSensorDescription( + key=ATTR_ELEVATION, + translation_key=ATTR_ELEVATION, + entity_category=EntityCategory.DIAGNOSTIC, + device_class=SensorDeviceClass.DISTANCE, + native_unit_of_measurement=UnitOfLength.METERS, + value_fn=lambda agps_thread: agps_thread.data_stream.alt, + suggested_display_precision=2, + entity_registry_enabled_default=False, + ), + GpsdSensorDescription( + key=ATTR_TIME, + translation_key=ATTR_TIME, + entity_category=EntityCategory.DIAGNOSTIC, + device_class=SensorDeviceClass.TIMESTAMP, + value_fn=lambda agps_thread: dt_util.parse_datetime( + agps_thread.data_stream.time + ), + entity_registry_enabled_default=False, + ), + GpsdSensorDescription( + key=ATTR_SPEED, + translation_key=ATTR_SPEED, + entity_category=EntityCategory.DIAGNOSTIC, + device_class=SensorDeviceClass.SPEED, + native_unit_of_measurement=UnitOfSpeed.METERS_PER_SECOND, + value_fn=lambda agps_thread: agps_thread.data_stream.speed, + suggested_display_precision=2, + entity_registry_enabled_default=False, + ), + GpsdSensorDescription( + key=ATTR_CLIMB, + translation_key=ATTR_CLIMB, + entity_category=EntityCategory.DIAGNOSTIC, + device_class=SensorDeviceClass.SPEED, + native_unit_of_measurement=UnitOfSpeed.METERS_PER_SECOND, + value_fn=lambda agps_thread: agps_thread.data_stream.climb, + suggested_display_precision=2, + entity_registry_enabled_default=False, + ), ) @@ -96,13 +161,19 @@ class GpsdSensor(SensorEntity): self.agps_thread = agps_thread @property - def native_value(self) -> str | None: + def native_value(self) -> StateType | datetime: """Return the state of GPSD.""" - return self.entity_description.value_fn(self.agps_thread) + value = self.entity_description.value_fn(self.agps_thread) + return None if value == "n/a" else value + # Deprecated since Home Assistant 2024.9.0 + # Can be removed completely in 2025.3.0 @property - def extra_state_attributes(self) -> dict[str, Any]: + def extra_state_attributes(self) -> dict[str, Any] | None: """Return the state attributes of the GPS.""" + if self.entity_description.key != ATTR_MODE: + return None + return { ATTR_LATITUDE: self.agps_thread.data_stream.lat, ATTR_LONGITUDE: self.agps_thread.data_stream.lon, diff --git a/homeassistant/components/gpsd/strings.json b/homeassistant/components/gpsd/strings.json index 20dc283a8bb..867edf0b5a8 100644 --- a/homeassistant/components/gpsd/strings.json +++ b/homeassistant/components/gpsd/strings.json @@ -18,7 +18,15 @@ }, "entity": { "sensor": { + "latitude": { "name": "[%key:common::config_flow::data::latitude%]" }, + "longitude": { "name": "[%key:common::config_flow::data::longitude%]" }, + "elevation": { "name": "[%key:common::config_flow::data::elevation%]" }, + "time": { + "name": "[%key:component::time_date::selector::display_options::options::time%]" + }, + "climb": { "name": "Climb" }, "mode": { + "name": "[%key:common::config_flow::data::mode%]", "state": { "2d_fix": "2D Fix", "3d_fix": "3D Fix" @@ -28,11 +36,19 @@ "longitude": { "name": "[%key:common::config_flow::data::longitude%]" }, - "elevation": { "name": "Elevation" }, - "gps_time": { "name": "Time" }, - "speed": { "name": "Speed" }, - "climb": { "name": "Climb" }, - "mode": { "name": "Mode" } + "elevation": { + "name": "[%key:common::config_flow::data::elevation%]" + }, + "gps_time": { + "name": "[%key:component::time_date::selector::display_options::options::time%]" + }, + "speed": { + "name": "[%key:component::sensor::entity_component::speed::name%]" + }, + "climb": { + "name": "[%key:component::gpsd::entity::sensor::climb::name%]" + }, + "mode": { "name": "[%key:common::config_flow::data::mode%]" } } } } From 04570edb3fe2127a699c5e7e826d157df5737632 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 13 Aug 2024 11:00:33 +0200 Subject: [PATCH 0641/1635] Remove unnecessary assignment of Template.hass from generic camera (#123767) --- homeassistant/components/generic/camera.py | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/generic/camera.py b/homeassistant/components/generic/camera.py index 80971760b85..3aac5145ca5 100644 --- a/homeassistant/components/generic/camera.py +++ b/homeassistant/components/generic/camera.py @@ -28,10 +28,10 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import TemplateError -from homeassistant.helpers import config_validation as cv, template as template_helper from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.httpx_client import get_async_client +from homeassistant.helpers.template import Template from . import DOMAIN from .const import ( @@ -91,18 +91,11 @@ class GenericCamera(Camera): self._password = device_info.get(CONF_PASSWORD) self._name = device_info.get(CONF_NAME, title) self._still_image_url = device_info.get(CONF_STILL_IMAGE_URL) - if ( - not isinstance(self._still_image_url, template_helper.Template) - and self._still_image_url - ): - self._still_image_url = cv.template(self._still_image_url) if self._still_image_url: - self._still_image_url.hass = hass + self._still_image_url = Template(self._still_image_url, hass) self._stream_source = device_info.get(CONF_STREAM_SOURCE) if self._stream_source: - if not isinstance(self._stream_source, template_helper.Template): - self._stream_source = cv.template(self._stream_source) - self._stream_source.hass = hass + self._stream_source = Template(self._stream_source, hass) self._limit_refetch = device_info[CONF_LIMIT_REFETCH_TO_URL_CHANGE] self._attr_frame_interval = 1 / device_info[CONF_FRAMERATE] if self._stream_source: From 6317053cc61cc9b706da7b89965c2a75e339c18a Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 13 Aug 2024 11:52:27 +0200 Subject: [PATCH 0642/1635] Remove unnecessary assignment of Template.hass from condition helper (#123775) --- homeassistant/helpers/condition.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/homeassistant/helpers/condition.py b/homeassistant/helpers/condition.py index 3438336dbfa..629cdeef942 100644 --- a/homeassistant/helpers/condition.py +++ b/homeassistant/helpers/condition.py @@ -60,7 +60,7 @@ import homeassistant.util.dt as dt_util from . import config_validation as cv, entity_registry as er from .sun import get_astral_event_date -from .template import Template, attach as template_attach, render_complex +from .template import Template, render_complex from .trace import ( TraceElement, trace_append_element, @@ -510,9 +510,6 @@ def async_numeric_state_from_config(config: ConfigType) -> ConditionCheckerType: hass: HomeAssistant, variables: TemplateVarsType = None ) -> bool: """Test numeric state condition.""" - if value_template is not None: - value_template.hass = hass - errors = [] for index, entity_id in enumerate(entity_ids): try: @@ -630,7 +627,6 @@ def state_from_config(config: ConfigType) -> ConditionCheckerType: @trace_condition_function def if_state(hass: HomeAssistant, variables: TemplateVarsType = None) -> bool: """Test if condition.""" - template_attach(hass, for_period) errors = [] result: bool = match != ENTITY_MATCH_ANY for index, entity_id in enumerate(entity_ids): @@ -792,8 +788,6 @@ def async_template_from_config(config: ConfigType) -> ConditionCheckerType: @trace_condition_function def template_if(hass: HomeAssistant, variables: TemplateVarsType = None) -> bool: """Validate template based if-condition.""" - value_template.hass = hass - return async_template(hass, value_template, variables) return template_if From a6f3e587bcc9e48c69f9f984141309bd6c909ae6 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 13 Aug 2024 11:52:46 +0200 Subject: [PATCH 0643/1635] Remove unnecessary assignment of Template.hass from manual (#123770) --- homeassistant/components/manual/alarm_control_panel.py | 1 - 1 file changed, 1 deletion(-) diff --git a/homeassistant/components/manual/alarm_control_panel.py b/homeassistant/components/manual/alarm_control_panel.py index 422a9726e81..055e79867ab 100644 --- a/homeassistant/components/manual/alarm_control_panel.py +++ b/homeassistant/components/manual/alarm_control_panel.py @@ -222,7 +222,6 @@ class ManualAlarm(AlarmControlPanelEntity, RestoreEntity): self._attr_unique_id = unique_id if code_template: self._code = code_template - self._code.hass = hass else: self._code = code or None self._attr_code_arm_required = code_arm_required From 314ee9c74cfc8ff2ef7ac7df0d97b8cacf9cffd4 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 13 Aug 2024 11:53:06 +0200 Subject: [PATCH 0644/1635] Remove unnecessary assignment of Template.hass from manual_mqtt (#123771) --- homeassistant/components/manual_mqtt/alarm_control_panel.py | 1 - 1 file changed, 1 deletion(-) diff --git a/homeassistant/components/manual_mqtt/alarm_control_panel.py b/homeassistant/components/manual_mqtt/alarm_control_panel.py index 26946a2a45c..8d447bbc8ac 100644 --- a/homeassistant/components/manual_mqtt/alarm_control_panel.py +++ b/homeassistant/components/manual_mqtt/alarm_control_panel.py @@ -273,7 +273,6 @@ class ManualMQTTAlarm(AlarmControlPanelEntity): self._attr_name = name if code_template: self._code = code_template - self._code.hass = hass else: self._code = code or None self._disarm_after_trigger = disarm_after_trigger From f97fc8a90794c49edf5a30eaa8e4983bf52a6b0d Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 13 Aug 2024 11:53:24 +0200 Subject: [PATCH 0645/1635] Remove unnecessary assignment of Template.hass from rest (#123772) --- homeassistant/components/rest/switch.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/homeassistant/components/rest/switch.py b/homeassistant/components/rest/switch.py index 219084ea683..e4bb1f797d9 100644 --- a/homeassistant/components/rest/switch.py +++ b/homeassistant/components/rest/switch.py @@ -151,9 +151,6 @@ class RestSwitch(ManualTriggerEntity, SwitchEntity): self._timeout: int = config[CONF_TIMEOUT] self._verify_ssl: bool = config[CONF_VERIFY_SSL] - self._body_on.hass = hass - self._body_off.hass = hass - async def async_added_to_hass(self) -> None: """Handle adding to Home Assistant.""" await super().async_added_to_hass() From 5837450a05f4f1be0aae39951a2467e2cbffe043 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 13 Aug 2024 11:53:39 +0200 Subject: [PATCH 0646/1635] Remove unnecessary assignment of Template.hass from influxdb (#123768) --- homeassistant/components/influxdb/sensor.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/homeassistant/components/influxdb/sensor.py b/homeassistant/components/influxdb/sensor.py index a1a9e618cb8..cc601888f56 100644 --- a/homeassistant/components/influxdb/sensor.py +++ b/homeassistant/components/influxdb/sensor.py @@ -194,12 +194,7 @@ class InfluxSensor(SensorEntity): """Initialize the sensor.""" self._name = query.get(CONF_NAME) self._unit_of_measurement = query.get(CONF_UNIT_OF_MEASUREMENT) - value_template = query.get(CONF_VALUE_TEMPLATE) - if value_template is not None: - self._value_template = value_template - self._value_template.hass = hass - else: - self._value_template = None + self._value_template = query.get(CONF_VALUE_TEMPLATE) self._state = None self._hass = hass self._attr_unique_id = query.get(CONF_UNIQUE_ID) From dc462aa52950b7ad651f89adf3dcfae4cf3ad726 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 13 Aug 2024 11:54:36 +0200 Subject: [PATCH 0647/1635] Remove unnecessary assignment of Template.hass from template (#123773) --- .../components/template/alarm_control_panel.py | 2 +- .../components/template/binary_sensor.py | 12 ++++++++---- homeassistant/components/template/config.py | 2 +- homeassistant/components/template/cover.py | 2 +- homeassistant/components/template/fan.py | 2 +- homeassistant/components/template/light.py | 2 +- homeassistant/components/template/lock.py | 2 +- homeassistant/components/template/sensor.py | 12 ++++++++---- homeassistant/components/template/switch.py | 2 +- .../components/template/template_entity.py | 17 +++++++++-------- homeassistant/components/template/trigger.py | 2 -- homeassistant/components/template/vacuum.py | 2 +- homeassistant/components/template/weather.py | 2 +- .../template/test_alarm_control_panel.py | 2 +- .../components/template/test_template_entity.py | 8 ++++---- 15 files changed, 39 insertions(+), 32 deletions(-) diff --git a/homeassistant/components/template/alarm_control_panel.py b/homeassistant/components/template/alarm_control_panel.py index 2ac91d39858..7c23fdcebcc 100644 --- a/homeassistant/components/template/alarm_control_panel.py +++ b/homeassistant/components/template/alarm_control_panel.py @@ -108,7 +108,7 @@ async def _async_create_entities(hass, config): alarm_control_panels = [] for object_id, entity_config in config[CONF_ALARM_CONTROL_PANELS].items(): - entity_config = rewrite_common_legacy_to_modern_conf(entity_config) + entity_config = rewrite_common_legacy_to_modern_conf(hass, entity_config) unique_id = entity_config.get(CONF_UNIQUE_ID) alarm_control_panels.append( diff --git a/homeassistant/components/template/binary_sensor.py b/homeassistant/components/template/binary_sensor.py index 68b3cd6d35a..187c7079f59 100644 --- a/homeassistant/components/template/binary_sensor.py +++ b/homeassistant/components/template/binary_sensor.py @@ -119,17 +119,21 @@ LEGACY_BINARY_SENSOR_SCHEMA = vol.All( ) -def rewrite_legacy_to_modern_conf(cfg: dict[str, dict]) -> list[dict]: +def rewrite_legacy_to_modern_conf( + hass: HomeAssistant, cfg: dict[str, dict] +) -> list[dict]: """Rewrite legacy binary sensor definitions to modern ones.""" sensors = [] for object_id, entity_cfg in cfg.items(): entity_cfg = {**entity_cfg, CONF_OBJECT_ID: object_id} - entity_cfg = rewrite_common_legacy_to_modern_conf(entity_cfg, LEGACY_FIELDS) + entity_cfg = rewrite_common_legacy_to_modern_conf( + hass, entity_cfg, LEGACY_FIELDS + ) if CONF_NAME not in entity_cfg: - entity_cfg[CONF_NAME] = template.Template(object_id) + entity_cfg[CONF_NAME] = template.Template(object_id, hass) sensors.append(entity_cfg) @@ -183,7 +187,7 @@ async def async_setup_platform( _async_create_template_tracking_entities( async_add_entities, hass, - rewrite_legacy_to_modern_conf(config[CONF_SENSORS]), + rewrite_legacy_to_modern_conf(hass, config[CONF_SENSORS]), None, ) return diff --git a/homeassistant/components/template/config.py b/homeassistant/components/template/config.py index 42a57cfc4aa..e2015743a0e 100644 --- a/homeassistant/components/template/config.py +++ b/homeassistant/components/template/config.py @@ -115,7 +115,7 @@ async def async_validate_config(hass: HomeAssistant, config: ConfigType) -> Conf ) definitions = list(cfg[new_key]) if new_key in cfg else [] - definitions.extend(transform(cfg[old_key])) + definitions.extend(transform(hass, cfg[old_key])) cfg = {**cfg, new_key: definitions} config_sections.append(cfg) diff --git a/homeassistant/components/template/cover.py b/homeassistant/components/template/cover.py index d50067f6278..2c84387ed64 100644 --- a/homeassistant/components/template/cover.py +++ b/homeassistant/components/template/cover.py @@ -106,7 +106,7 @@ async def _async_create_entities(hass, config): covers = [] for object_id, entity_config in config[CONF_COVERS].items(): - entity_config = rewrite_common_legacy_to_modern_conf(entity_config) + entity_config = rewrite_common_legacy_to_modern_conf(hass, entity_config) unique_id = entity_config.get(CONF_UNIQUE_ID) diff --git a/homeassistant/components/template/fan.py b/homeassistant/components/template/fan.py index 20a2159e378..cedd7d0d725 100644 --- a/homeassistant/components/template/fan.py +++ b/homeassistant/components/template/fan.py @@ -94,7 +94,7 @@ async def _async_create_entities(hass, config): fans = [] for object_id, entity_config in config[CONF_FANS].items(): - entity_config = rewrite_common_legacy_to_modern_conf(entity_config) + entity_config = rewrite_common_legacy_to_modern_conf(hass, entity_config) unique_id = entity_config.get(CONF_UNIQUE_ID) diff --git a/homeassistant/components/template/light.py b/homeassistant/components/template/light.py index ba6b8ce846b..cae6c0cebc1 100644 --- a/homeassistant/components/template/light.py +++ b/homeassistant/components/template/light.py @@ -126,7 +126,7 @@ async def _async_create_entities(hass, config): lights = [] for object_id, entity_config in config[CONF_LIGHTS].items(): - entity_config = rewrite_common_legacy_to_modern_conf(entity_config) + entity_config = rewrite_common_legacy_to_modern_conf(hass, entity_config) unique_id = entity_config.get(CONF_UNIQUE_ID) lights.append( diff --git a/homeassistant/components/template/lock.py b/homeassistant/components/template/lock.py index 0fa219fcd9b..5c0b67a23dc 100644 --- a/homeassistant/components/template/lock.py +++ b/homeassistant/components/template/lock.py @@ -59,7 +59,7 @@ PLATFORM_SCHEMA = LOCK_PLATFORM_SCHEMA.extend( async def _async_create_entities(hass, config): """Create the Template lock.""" - config = rewrite_common_legacy_to_modern_conf(config) + config = rewrite_common_legacy_to_modern_conf(hass, config) return [TemplateLock(hass, config, config.get(CONF_UNIQUE_ID))] diff --git a/homeassistant/components/template/sensor.py b/homeassistant/components/template/sensor.py index 70a2d5dd650..ee24407699d 100644 --- a/homeassistant/components/template/sensor.py +++ b/homeassistant/components/template/sensor.py @@ -142,17 +142,21 @@ def extra_validation_checks(val): return val -def rewrite_legacy_to_modern_conf(cfg: dict[str, dict]) -> list[dict]: +def rewrite_legacy_to_modern_conf( + hass: HomeAssistant, cfg: dict[str, dict] +) -> list[dict]: """Rewrite legacy sensor definitions to modern ones.""" sensors = [] for object_id, entity_cfg in cfg.items(): entity_cfg = {**entity_cfg, CONF_OBJECT_ID: object_id} - entity_cfg = rewrite_common_legacy_to_modern_conf(entity_cfg, LEGACY_FIELDS) + entity_cfg = rewrite_common_legacy_to_modern_conf( + hass, entity_cfg, LEGACY_FIELDS + ) if CONF_NAME not in entity_cfg: - entity_cfg[CONF_NAME] = template.Template(object_id) + entity_cfg[CONF_NAME] = template.Template(object_id, hass) sensors.append(entity_cfg) @@ -210,7 +214,7 @@ async def async_setup_platform( _async_create_template_tracking_entities( async_add_entities, hass, - rewrite_legacy_to_modern_conf(config[CONF_SENSORS]), + rewrite_legacy_to_modern_conf(hass, config[CONF_SENSORS]), None, ) return diff --git a/homeassistant/components/template/switch.py b/homeassistant/components/template/switch.py index fbb35399ef8..9145625f706 100644 --- a/homeassistant/components/template/switch.py +++ b/homeassistant/components/template/switch.py @@ -76,7 +76,7 @@ async def _async_create_entities(hass, config): switches = [] for object_id, entity_config in config[CONF_SWITCHES].items(): - entity_config = rewrite_common_legacy_to_modern_conf(entity_config) + entity_config = rewrite_common_legacy_to_modern_conf(hass, entity_config) unique_id = entity_config.get(CONF_UNIQUE_ID) switches.append( diff --git a/homeassistant/components/template/template_entity.py b/homeassistant/components/template/template_entity.py index b5d2ab6fff3..a074f828284 100644 --- a/homeassistant/components/template/template_entity.py +++ b/homeassistant/components/template/template_entity.py @@ -123,7 +123,9 @@ LEGACY_FIELDS = { def rewrite_common_legacy_to_modern_conf( - entity_cfg: dict[str, Any], extra_legacy_fields: dict[str, str] | None = None + hass: HomeAssistant, + entity_cfg: dict[str, Any], + extra_legacy_fields: dict[str, str] | None = None, ) -> dict[str, Any]: """Rewrite legacy config.""" entity_cfg = {**entity_cfg} @@ -138,11 +140,11 @@ def rewrite_common_legacy_to_modern_conf( val = entity_cfg.pop(from_key) if isinstance(val, str): - val = Template(val) + val = Template(val, hass) entity_cfg[to_key] = val if CONF_NAME in entity_cfg and isinstance(entity_cfg[CONF_NAME], str): - entity_cfg[CONF_NAME] = Template(entity_cfg[CONF_NAME]) + entity_cfg[CONF_NAME] = Template(entity_cfg[CONF_NAME], hass) return entity_cfg @@ -310,7 +312,6 @@ class TemplateEntity(Entity): # Try to render the name as it can influence the entity ID self._attr_name = fallback_name if self._friendly_name_template: - self._friendly_name_template.hass = hass with contextlib.suppress(TemplateError): self._attr_name = self._friendly_name_template.async_render( variables=variables, parse_result=False @@ -319,14 +320,12 @@ class TemplateEntity(Entity): # Templates will not render while the entity is unavailable, try to render the # icon and picture templates. if self._entity_picture_template: - self._entity_picture_template.hass = hass with contextlib.suppress(TemplateError): self._attr_entity_picture = self._entity_picture_template.async_render( variables=variables, parse_result=False ) if self._icon_template: - self._icon_template.hass = hass with contextlib.suppress(TemplateError): self._attr_icon = self._icon_template.async_render( variables=variables, parse_result=False @@ -388,8 +387,10 @@ class TemplateEntity(Entity): If True, the attribute will be set to None if the template errors. """ - assert self.hass is not None, "hass cannot be None" - template.hass = self.hass + if self.hass is None: + raise ValueError("hass cannot be None") + if template.hass is None: + raise ValueError("template.hass cannot be None") template_attribute = _TemplateAttribute( self, attribute, template, validator, on_update, none_on_template_error ) diff --git a/homeassistant/components/template/trigger.py b/homeassistant/components/template/trigger.py index 09ad0754634..44ac2d93051 100644 --- a/homeassistant/components/template/trigger.py +++ b/homeassistant/components/template/trigger.py @@ -49,9 +49,7 @@ async def async_attach_trigger( """Listen for state changes based on configuration.""" trigger_data = trigger_info["trigger_data"] value_template: Template = config[CONF_VALUE_TEMPLATE] - value_template.hass = hass time_delta = config.get(CONF_FOR) - template.attach(hass, time_delta) delay_cancel = None job = HassJob(action) armed = False diff --git a/homeassistant/components/template/vacuum.py b/homeassistant/components/template/vacuum.py index e512ce2eb04..1d021bcb571 100644 --- a/homeassistant/components/template/vacuum.py +++ b/homeassistant/components/template/vacuum.py @@ -100,7 +100,7 @@ async def _async_create_entities(hass, config): vacuums = [] for object_id, entity_config in config[CONF_VACUUMS].items(): - entity_config = rewrite_common_legacy_to_modern_conf(entity_config) + entity_config = rewrite_common_legacy_to_modern_conf(hass, entity_config) unique_id = entity_config.get(CONF_UNIQUE_ID) vacuums.append( diff --git a/homeassistant/components/template/weather.py b/homeassistant/components/template/weather.py index 5c3e4107b2c..ec6d1f08dd3 100644 --- a/homeassistant/components/template/weather.py +++ b/homeassistant/components/template/weather.py @@ -153,7 +153,7 @@ async def async_setup_platform( ) return - config = rewrite_common_legacy_to_modern_conf(config) + config = rewrite_common_legacy_to_modern_conf(hass, config) unique_id = config.get(CONF_UNIQUE_ID) async_add_entities( diff --git a/tests/components/template/test_alarm_control_panel.py b/tests/components/template/test_alarm_control_panel.py index 6a2a95a64eb..ea63d7b9926 100644 --- a/tests/components/template/test_alarm_control_panel.py +++ b/tests/components/template/test_alarm_control_panel.py @@ -244,7 +244,7 @@ async def test_template_syntax_error( "platform": "template", "panels": { "test_template_panel": { - "name": "Template Alarm Panel", + "name": '{{ "Template Alarm Panel" }}', "value_template": "disarmed", **OPTIMISTIC_TEMPLATE_ALARM_CONFIG, } diff --git a/tests/components/template/test_template_entity.py b/tests/components/template/test_template_entity.py index dcceea95181..c09a09750fe 100644 --- a/tests/components/template/test_template_entity.py +++ b/tests/components/template/test_template_entity.py @@ -11,14 +11,14 @@ async def test_template_entity_requires_hass_set(hass: HomeAssistant) -> None: """Test template entity requires hass to be set before accepting templates.""" entity = template_entity.TemplateEntity(hass) - with pytest.raises(AssertionError): + with pytest.raises(ValueError, match="^hass cannot be None"): entity.add_template_attribute("_hello", template.Template("Hello")) entity.hass = object() - entity.add_template_attribute("_hello", template.Template("Hello", None)) + with pytest.raises(ValueError, match="^template.hass cannot be None"): + entity.add_template_attribute("_hello", template.Template("Hello", None)) tpl_with_hass = template.Template("Hello", entity.hass) entity.add_template_attribute("_hello", tpl_with_hass) - # Because hass is set in `add_template_attribute`, both templates match `tpl_with_hass` - assert len(entity._template_attrs.get(tpl_with_hass, [])) == 2 + assert len(entity._template_attrs.get(tpl_with_hass, [])) == 1 From e9682fe003b9ef1826166fb0bb13ce5baae2f812 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 13 Aug 2024 11:54:56 +0200 Subject: [PATCH 0648/1635] Remove unnecessary assignment of Template.hass from xiaomi (#123774) --- homeassistant/components/xiaomi/camera.py | 1 - 1 file changed, 1 deletion(-) diff --git a/homeassistant/components/xiaomi/camera.py b/homeassistant/components/xiaomi/camera.py index 323a0f8a157..8ab15f85147 100644 --- a/homeassistant/components/xiaomi/camera.py +++ b/homeassistant/components/xiaomi/camera.py @@ -80,7 +80,6 @@ class XiaomiCamera(Camera): self._manager = get_ffmpeg_manager(hass) self._name = config[CONF_NAME] self.host = config[CONF_HOST] - self.host.hass = hass self._model = config[CONF_MODEL] self.port = config[CONF_PORT] self.path = config[CONF_PATH] From 992de497f25a93c2ff907e0df909c405485aa241 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 13 Aug 2024 11:55:37 +0200 Subject: [PATCH 0649/1635] Remove unnecessary assignment of Template.hass from script helper (#123780) --- homeassistant/helpers/script.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/homeassistant/helpers/script.py b/homeassistant/helpers/script.py index a1b885d0c52..26a9b6e069e 100644 --- a/homeassistant/helpers/script.py +++ b/homeassistant/helpers/script.py @@ -669,7 +669,6 @@ class _ScriptRun: trace_set_result(wait=self._variables["wait"]) wait_template = self._action[CONF_WAIT_TEMPLATE] - wait_template.hass = self._hass # check if condition already okay if condition.async_template(self._hass, wait_template, self._variables, False): @@ -1429,7 +1428,6 @@ class Script: self._hass = hass self.sequence = sequence - template.attach(hass, self.sequence) self.name = name self.unique_id = f"{domain}.{name}-{id(self)}" self.domain = domain @@ -1459,8 +1457,6 @@ class Script: self._sequence_scripts: dict[int, Script] = {} self.variables = variables self._variables_dynamic = template.is_complex(variables) - if self._variables_dynamic: - template.attach(hass, variables) self._copy_variables_on_run = copy_variables @property From 5f694d9a8449e25aa3aff0ff252fd83c5afe54c3 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 13 Aug 2024 11:56:18 +0200 Subject: [PATCH 0650/1635] Improve type hints in mochad tests (#123794) --- tests/components/mochad/test_light.py | 2 +- tests/components/mochad/test_switch.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/components/mochad/test_light.py b/tests/components/mochad/test_light.py index 872bd3a9d61..49beebbaec6 100644 --- a/tests/components/mochad/test_light.py +++ b/tests/components/mochad/test_light.py @@ -18,7 +18,7 @@ def pymochad_mock(): @pytest.fixture -def light_mock(hass, brightness): +def light_mock(hass: HomeAssistant, brightness: int) -> mochad.MochadLight: """Mock light.""" controller_mock = mock.MagicMock() dev_dict = {"address": "a1", "name": "fake_light", "brightness_levels": brightness} diff --git a/tests/components/mochad/test_switch.py b/tests/components/mochad/test_switch.py index 750dd48296e..9fea3b5c14c 100644 --- a/tests/components/mochad/test_switch.py +++ b/tests/components/mochad/test_switch.py @@ -21,7 +21,7 @@ def pymochad_mock(): @pytest.fixture -def switch_mock(hass): +def switch_mock(hass: HomeAssistant) -> mochad.MochadSwitch: """Mock switch.""" controller_mock = mock.MagicMock() dev_dict = {"address": "a1", "name": "fake_switch"} From 2b968dfd9aab9d33fbb2ddc873b4a9498b8f233b Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 13 Aug 2024 11:56:55 +0200 Subject: [PATCH 0651/1635] Improve type hints in mfi tests (#123792) --- tests/components/mfi/test_sensor.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/components/mfi/test_sensor.py b/tests/components/mfi/test_sensor.py index 49efdd5dc71..37512ca78f8 100644 --- a/tests/components/mfi/test_sensor.py +++ b/tests/components/mfi/test_sensor.py @@ -116,13 +116,13 @@ async def test_setup_adds_proper_devices(hass: HomeAssistant) -> None: @pytest.fixture(name="port") -def port_fixture(): +def port_fixture() -> mock.MagicMock: """Port fixture.""" return mock.MagicMock() @pytest.fixture(name="sensor") -def sensor_fixture(hass, port): +def sensor_fixture(hass: HomeAssistant, port: mock.MagicMock) -> mfi.MfiSensor: """Sensor fixture.""" sensor = mfi.MfiSensor(port, hass) sensor.hass = hass From 3660c2dbb46476ac625e0dce8d512a6498fe9a41 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 13 Aug 2024 11:59:22 +0200 Subject: [PATCH 0652/1635] Improve type hints in mailgun tests (#123789) --- tests/components/mailgun/test_init.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/components/mailgun/test_init.py b/tests/components/mailgun/test_init.py index 908e98ae31e..2e60c56faa4 100644 --- a/tests/components/mailgun/test_init.py +++ b/tests/components/mailgun/test_init.py @@ -10,7 +10,7 @@ from homeassistant import config_entries from homeassistant.components import mailgun, webhook from homeassistant.config import async_process_ha_core_config from homeassistant.const import CONF_API_KEY, CONF_DOMAIN -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import Event, HomeAssistant, callback from homeassistant.data_entry_flow import FlowResultType from homeassistant.setup import async_setup_component @@ -29,7 +29,7 @@ async def http_client( @pytest.fixture -async def webhook_id_with_api_key(hass): +async def webhook_id_with_api_key(hass: HomeAssistant) -> str: """Initialize the Mailgun component and get the webhook_id.""" await async_setup_component( hass, @@ -53,7 +53,7 @@ async def webhook_id_with_api_key(hass): @pytest.fixture -async def webhook_id_without_api_key(hass): +async def webhook_id_without_api_key(hass: HomeAssistant) -> str: """Initialize the Mailgun component and get the webhook_id w/o API key.""" await async_setup_component(hass, mailgun.DOMAIN, {}) @@ -73,7 +73,7 @@ async def webhook_id_without_api_key(hass): @pytest.fixture -async def mailgun_events(hass): +async def mailgun_events(hass: HomeAssistant) -> list[Event]: """Return a list of mailgun_events triggered.""" events = [] From 4ceb9b9dbfc63bc7263fa626b7070a30d2a3767a Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 13 Aug 2024 11:59:48 +0200 Subject: [PATCH 0653/1635] Improve type hints in anthropic tests (#123784) --- tests/components/anthropic/conftest.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/tests/components/anthropic/conftest.py b/tests/components/anthropic/conftest.py index fe3b20f15b8..ce6b98c480c 100644 --- a/tests/components/anthropic/conftest.py +++ b/tests/components/anthropic/conftest.py @@ -1,5 +1,6 @@ """Tests helpers.""" +from collections.abc import AsyncGenerator from unittest.mock import AsyncMock, patch import pytest @@ -13,7 +14,7 @@ from tests.common import MockConfigEntry @pytest.fixture -def mock_config_entry(hass): +def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: """Mock a config entry.""" entry = MockConfigEntry( title="Claude", @@ -27,7 +28,9 @@ def mock_config_entry(hass): @pytest.fixture -def mock_config_entry_with_assist(hass, mock_config_entry): +def mock_config_entry_with_assist( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> MockConfigEntry: """Mock a config entry with assist.""" hass.config_entries.async_update_entry( mock_config_entry, options={CONF_LLM_HASS_API: llm.LLM_API_ASSIST} @@ -36,7 +39,9 @@ def mock_config_entry_with_assist(hass, mock_config_entry): @pytest.fixture -async def mock_init_component(hass, mock_config_entry): +async def mock_init_component( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> AsyncGenerator[None]: """Initialize integration.""" with patch( "anthropic.resources.messages.AsyncMessages.create", new_callable=AsyncMock From 78f7b3340dcd294d4176d8787ad6669f977a2843 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 13 Aug 2024 12:09:30 +0200 Subject: [PATCH 0654/1635] Remove unnecessary assignment of Template.hass from event helper (#123777) --- homeassistant/helpers/event.py | 2 -- tests/helpers/test_event.py | 25 +++++++++++++------------ 2 files changed, 13 insertions(+), 14 deletions(-) diff --git a/homeassistant/helpers/event.py b/homeassistant/helpers/event.py index 207dd024b6a..38f461d8d7a 100644 --- a/homeassistant/helpers/event.py +++ b/homeassistant/helpers/event.py @@ -976,8 +976,6 @@ class TrackTemplateResultInfo: self.hass = hass self._job = HassJob(action, f"track template result {track_templates}") - for track_template_ in track_templates: - track_template_.template.hass = hass self._track_templates = track_templates self._has_super_template = has_super_template diff --git a/tests/helpers/test_event.py b/tests/helpers/test_event.py index 4bb4c1a1967..6c71f1d8a7c 100644 --- a/tests/helpers/test_event.py +++ b/tests/helpers/test_event.py @@ -1476,7 +1476,7 @@ async def test_track_template_result_super_template_2( wildercard_runs = [] wildercard_runs_availability = [] - template_availability = Template(availability_template) + template_availability = Template(availability_template, hass) template_condition = Template("{{states.sensor.test.state}}", hass) template_condition_var = Template( "{{(states.sensor.test.state|int) + test }}", hass @@ -1628,7 +1628,7 @@ async def test_track_template_result_super_template_2_initially_false( wildercard_runs = [] wildercard_runs_availability = [] - template_availability = Template(availability_template) + template_availability = Template(availability_template, hass) template_condition = Template("{{states.sensor.test.state}}", hass) template_condition_var = Template( "{{(states.sensor.test.state|int) + test }}", hass @@ -3124,11 +3124,11 @@ async def test_async_track_template_result_multiple_templates( ) -> None: """Test tracking multiple templates.""" - template_1 = Template("{{ states.switch.test.state == 'on' }}") - template_2 = Template("{{ states.switch.test.state == 'on' }}") - template_3 = Template("{{ states.switch.test.state == 'off' }}") + template_1 = Template("{{ states.switch.test.state == 'on' }}", hass) + template_2 = Template("{{ states.switch.test.state == 'on' }}", hass) + template_3 = Template("{{ states.switch.test.state == 'off' }}", hass) template_4 = Template( - "{{ states.binary_sensor | map(attribute='entity_id') | list }}" + "{{ states.binary_sensor | map(attribute='entity_id') | list }}", hass ) refresh_runs = [] @@ -3188,11 +3188,12 @@ async def test_async_track_template_result_multiple_templates_mixing_domain( ) -> None: """Test tracking multiple templates when tracking entities and an entire domain.""" - template_1 = Template("{{ states.switch.test.state == 'on' }}") - template_2 = Template("{{ states.switch.test.state == 'on' }}") - template_3 = Template("{{ states.switch.test.state == 'off' }}") + template_1 = Template("{{ states.switch.test.state == 'on' }}", hass) + template_2 = Template("{{ states.switch.test.state == 'on' }}", hass) + template_3 = Template("{{ states.switch.test.state == 'off' }}", hass) template_4 = Template( - "{{ states.switch | sort(attribute='entity_id') | map(attribute='entity_id') | list }}" + "{{ states.switch | sort(attribute='entity_id') | map(attribute='entity_id') | list }}", + hass, ) refresh_runs = [] @@ -3417,8 +3418,8 @@ async def test_async_track_template_result_multiple_templates_mixing_listeners( ) -> None: """Test tracking multiple templates with mixing listener types.""" - template_1 = Template("{{ states.switch.test.state == 'on' }}") - template_2 = Template("{{ now() and True }}") + template_1 = Template("{{ states.switch.test.state == 'on' }}", hass) + template_2 = Template("{{ now() and True }}", hass) refresh_runs = [] From e15ac2fbe0d7f67112b7d81e6153419dbc227505 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 13 Aug 2024 12:10:15 +0200 Subject: [PATCH 0655/1635] Improve type hints in elevenlabs tests (#123786) --- tests/components/elevenlabs/test_tts.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/components/elevenlabs/test_tts.py b/tests/components/elevenlabs/test_tts.py index 7fa289f24ed..5eee4084452 100644 --- a/tests/components/elevenlabs/test_tts.py +++ b/tests/components/elevenlabs/test_tts.py @@ -4,7 +4,7 @@ from __future__ import annotations from http import HTTPStatus from typing import Any -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock, MagicMock, patch from elevenlabs.core import ApiError from elevenlabs.types import GetVoicesResponse @@ -29,7 +29,7 @@ from tests.typing import ClientSessionGenerator @pytest.fixture(autouse=True) -def tts_mutagen_mock_fixture_autouse(tts_mutagen_mock): +def tts_mutagen_mock_fixture_autouse(tts_mutagen_mock: MagicMock) -> None: """Mock writing tags.""" From 193a7b7360c068eae55ee3596927bf97b77d99e0 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 13 Aug 2024 12:18:59 +0200 Subject: [PATCH 0656/1635] Improve type hints in dsmr tests (#123785) --- tests/components/dsmr/test_sensor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/components/dsmr/test_sensor.py b/tests/components/dsmr/test_sensor.py index b93dd8d18d2..c2c6d48b007 100644 --- a/tests/components/dsmr/test_sensor.py +++ b/tests/components/dsmr/test_sensor.py @@ -1521,7 +1521,7 @@ async def test_gas_meter_providing_energy_reading( ) -def test_all_obis_references_exists(): +def test_all_obis_references_exists() -> None: """Verify that all attributes exist by name in database.""" for sensor in SENSORS: assert hasattr(obis_references, sensor.obis_reference) From 30994710e6a2001ff16d8355d8e3e67b0126fabd Mon Sep 17 00:00:00 2001 From: Louis Christ Date: Tue, 13 Aug 2024 12:55:01 +0200 Subject: [PATCH 0657/1635] Fix status update loop in bluesound integration (#123790) * Fix retry loop for status update * Use 'available' instead of _is_online * Fix tests --- .../components/bluesound/media_player.py | 37 ++++++++++--------- tests/components/bluesound/conftest.py | 35 ++++++++++++++++-- .../components/bluesound/test_config_flow.py | 4 +- 3 files changed, 55 insertions(+), 21 deletions(-) diff --git a/homeassistant/components/bluesound/media_player.py b/homeassistant/components/bluesound/media_player.py index c1b662fcddc..92f47977ee5 100644 --- a/homeassistant/components/bluesound/media_player.py +++ b/homeassistant/components/bluesound/media_player.py @@ -244,7 +244,6 @@ class BluesoundPlayer(MediaPlayerEntity): self._status: Status | None = None self._inputs: list[Input] = [] self._presets: list[Preset] = [] - self._is_online = False self._muted = False self._master: BluesoundPlayer | None = None self._is_master = False @@ -312,20 +311,24 @@ class BluesoundPlayer(MediaPlayerEntity): async def _start_poll_command(self): """Loop which polls the status of the player.""" - try: - while True: + while True: + try: await self.async_update_status() - - except (TimeoutError, ClientError): - _LOGGER.error("Node %s:%s is offline, retrying later", self.host, self.port) - await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT) - self.start_polling() - - except CancelledError: - _LOGGER.debug("Stopping the polling of node %s:%s", self.host, self.port) - except Exception: - _LOGGER.exception("Unexpected error in %s:%s", self.host, self.port) - raise + except (TimeoutError, ClientError): + _LOGGER.error( + "Node %s:%s is offline, retrying later", self.host, self.port + ) + await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT) + except CancelledError: + _LOGGER.debug( + "Stopping the polling of node %s:%s", self.host, self.port + ) + return + except Exception: + _LOGGER.exception( + "Unexpected error in %s:%s, retrying later", self.host, self.port + ) + await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT) async def async_added_to_hass(self) -> None: """Start the polling task.""" @@ -348,7 +351,7 @@ class BluesoundPlayer(MediaPlayerEntity): async def async_update(self) -> None: """Update internal status of the entity.""" - if not self._is_online: + if not self.available: return with suppress(TimeoutError): @@ -365,7 +368,7 @@ class BluesoundPlayer(MediaPlayerEntity): try: status = await self._player.status(etag=etag, poll_timeout=120, timeout=125) - self._is_online = True + self._attr_available = True self._last_status_update = dt_util.utcnow() self._status = status @@ -394,7 +397,7 @@ class BluesoundPlayer(MediaPlayerEntity): self.async_write_ha_state() except (TimeoutError, ClientError): - self._is_online = False + self._attr_available = False self._last_status_update = None self._status = None self.async_write_ha_state() diff --git a/tests/components/bluesound/conftest.py b/tests/components/bluesound/conftest.py index 5d81b6863c6..155d6b66e4e 100644 --- a/tests/components/bluesound/conftest.py +++ b/tests/components/bluesound/conftest.py @@ -3,7 +3,7 @@ from collections.abc import Generator from unittest.mock import AsyncMock, patch -from pyblu import SyncStatus +from pyblu import Status, SyncStatus import pytest from homeassistant.components.bluesound.const import DOMAIN @@ -39,6 +39,35 @@ def sync_status() -> SyncStatus: ) +@pytest.fixture +def status() -> Status: + """Return a status object.""" + return Status( + etag="etag", + input_id=None, + service=None, + state="playing", + shuffle=False, + album=None, + artist=None, + name=None, + image=None, + volume=10, + volume_db=22.3, + mute=False, + mute_volume=None, + mute_volume_db=None, + seconds=2, + total_seconds=123.1, + can_seek=False, + sleep=0, + group_name=None, + group_volume=None, + indexing=False, + stream_url=None, + ) + + @pytest.fixture def mock_setup_entry() -> Generator[AsyncMock]: """Override async_setup_entry.""" @@ -65,7 +94,7 @@ def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: @pytest.fixture -def mock_player() -> Generator[AsyncMock]: +def mock_player(status: Status) -> Generator[AsyncMock]: """Mock the player.""" with ( patch( @@ -78,7 +107,7 @@ def mock_player() -> Generator[AsyncMock]: ): player = mock_player.return_value player.__aenter__.return_value = player - player.status.return_value = None + player.status.return_value = status player.sync_status.return_value = SyncStatus( etag="etag", id="1.1.1.1:11000", diff --git a/tests/components/bluesound/test_config_flow.py b/tests/components/bluesound/test_config_flow.py index 32f36fcea58..8fecba7017d 100644 --- a/tests/components/bluesound/test_config_flow.py +++ b/tests/components/bluesound/test_config_flow.py @@ -41,7 +41,7 @@ async def test_user_flow_success( async def test_user_flow_cannot_connect( - hass: HomeAssistant, mock_player: AsyncMock + hass: HomeAssistant, mock_player: AsyncMock, mock_setup_entry: AsyncMock ) -> None: """Test we handle cannot connect error.""" result = await hass.config_entries.flow.async_init( @@ -76,6 +76,8 @@ async def test_user_flow_cannot_connect( CONF_PORT: 11000, } + mock_setup_entry.assert_called_once() + async def test_user_flow_aleady_configured( hass: HomeAssistant, From 8e0dfbcd1339fa0c178d2058037d3bfebbee011d Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 13 Aug 2024 13:15:35 +0200 Subject: [PATCH 0658/1635] Improve type hints in modbus tests (#123795) --- tests/components/modbus/conftest.py | 4 +++- tests/components/modbus/test_sensor.py | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/components/modbus/conftest.py b/tests/components/modbus/conftest.py index 6741504585a..28f8eae5a0b 100644 --- a/tests/components/modbus/conftest.py +++ b/tests/components/modbus/conftest.py @@ -192,7 +192,9 @@ async def mock_test_state_fixture( @pytest.fixture(name="mock_modbus_ha") -async def mock_modbus_ha_fixture(hass, mock_modbus): +async def mock_modbus_ha_fixture( + hass: HomeAssistant, mock_modbus: mock.AsyncMock +) -> mock.AsyncMock: """Load homeassistant to allow service calls.""" assert await async_setup_component(hass, "homeassistant", {}) await hass.async_block_till_done() diff --git a/tests/components/modbus/test_sensor.py b/tests/components/modbus/test_sensor.py index 20ff558fce6..87015fa634c 100644 --- a/tests/components/modbus/test_sensor.py +++ b/tests/components/modbus/test_sensor.py @@ -1335,7 +1335,7 @@ async def test_wrap_sensor(hass: HomeAssistant, mock_do_cycle, expected) -> None @pytest.fixture(name="mock_restore") -async def mock_restore(hass): +async def mock_restore(hass: HomeAssistant) -> None: """Mock restore cache.""" mock_restore_cache_with_extra_data( hass, From 71e23e78493fb516bf51773763b8b70d8307462f Mon Sep 17 00:00:00 2001 From: Aidan Timson Date: Tue, 13 Aug 2024 12:15:58 +0100 Subject: [PATCH 0659/1635] System Bridge package updates (#123657) --- homeassistant/components/system_bridge/manifest.json | 2 +- requirements_all.txt | 4 ++-- requirements_test_all.txt | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/system_bridge/manifest.json b/homeassistant/components/system_bridge/manifest.json index 80527de75cd..e886bcad150 100644 --- a/homeassistant/components/system_bridge/manifest.json +++ b/homeassistant/components/system_bridge/manifest.json @@ -10,6 +10,6 @@ "iot_class": "local_push", "loggers": ["systembridgeconnector"], "quality_scale": "silver", - "requirements": ["systembridgeconnector==4.1.0", "systembridgemodels==4.1.0"], + "requirements": ["systembridgeconnector==4.1.5", "systembridgemodels==4.2.4"], "zeroconf": ["_system-bridge._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index 8e574d22837..c97d0fe527e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2700,10 +2700,10 @@ switchbot-api==2.2.1 synology-srm==0.2.0 # homeassistant.components.system_bridge -systembridgeconnector==4.1.0 +systembridgeconnector==4.1.5 # homeassistant.components.system_bridge -systembridgemodels==4.1.0 +systembridgemodels==4.2.4 # homeassistant.components.tailscale tailscale==0.6.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 956e945d92d..f04c0425af9 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2137,10 +2137,10 @@ surepy==0.9.0 switchbot-api==2.2.1 # homeassistant.components.system_bridge -systembridgeconnector==4.1.0 +systembridgeconnector==4.1.5 # homeassistant.components.system_bridge -systembridgemodels==4.1.0 +systembridgemodels==4.2.4 # homeassistant.components.tailscale tailscale==0.6.1 From b3d1d79a49c610ef5e788e3d7715b4076d81faf2 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Tue, 13 Aug 2024 13:28:37 +0200 Subject: [PATCH 0660/1635] Update xknx to 3.1.0 and fix climate read only mode (#123776) --- homeassistant/components/knx/climate.py | 29 ++++++-- homeassistant/components/knx/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/knx/test_climate.py | 84 ++++++++++++++++++++++ 5 files changed, 109 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/knx/climate.py b/homeassistant/components/knx/climate.py index 9abc9023617..abce143c760 100644 --- a/homeassistant/components/knx/climate.py +++ b/homeassistant/components/knx/climate.py @@ -5,7 +5,11 @@ from __future__ import annotations from typing import Any from xknx import XKNX -from xknx.devices import Climate as XknxClimate, ClimateMode as XknxClimateMode +from xknx.devices import ( + Climate as XknxClimate, + ClimateMode as XknxClimateMode, + Device as XknxDevice, +) from xknx.dpt.dpt_20 import HVACControllerMode from homeassistant import config_entries @@ -241,12 +245,9 @@ class KNXClimate(KnxYamlEntity, ClimateEntity): if self._device.supports_on_off and not self._device.is_on: return HVACMode.OFF if self._device.mode is not None and self._device.mode.supports_controller_mode: - hvac_mode = CONTROLLER_MODES.get( + return CONTROLLER_MODES.get( self._device.mode.controller_mode, self.default_hvac_mode ) - if hvac_mode is not HVACMode.OFF: - self._last_hvac_mode = hvac_mode - return hvac_mode return self.default_hvac_mode @property @@ -261,11 +262,15 @@ class KNXClimate(KnxYamlEntity, ClimateEntity): if self._device.supports_on_off: if not ha_controller_modes: - ha_controller_modes.append(self.default_hvac_mode) + ha_controller_modes.append(self._last_hvac_mode) ha_controller_modes.append(HVACMode.OFF) hvac_modes = list(set(filter(None, ha_controller_modes))) - return hvac_modes if hvac_modes else [self.default_hvac_mode] + return ( + hvac_modes + if hvac_modes + else [self.hvac_mode] # mode read-only -> fall back to only current mode + ) @property def hvac_action(self) -> HVACAction | None: @@ -354,3 +359,13 @@ class KNXClimate(KnxYamlEntity, ClimateEntity): self._device.mode.unregister_device_updated_cb(self.after_update_callback) self._device.mode.xknx.devices.async_remove(self._device.mode) await super().async_will_remove_from_hass() + + def after_update_callback(self, _device: XknxDevice) -> None: + """Call after device was updated.""" + if self._device.mode is not None and self._device.mode.supports_controller_mode: + hvac_mode = CONTROLLER_MODES.get( + self._device.mode.controller_mode, self.default_hvac_mode + ) + if hvac_mode is not HVACMode.OFF: + self._last_hvac_mode = hvac_mode + super().after_update_callback(_device) diff --git a/homeassistant/components/knx/manifest.json b/homeassistant/components/knx/manifest.json index 6974ee300f5..9ecf687d6b9 100644 --- a/homeassistant/components/knx/manifest.json +++ b/homeassistant/components/knx/manifest.json @@ -11,7 +11,7 @@ "loggers": ["xknx", "xknxproject"], "quality_scale": "platinum", "requirements": [ - "xknx==3.0.0", + "xknx==3.1.0", "xknxproject==3.7.1", "knx-frontend==2024.8.9.225351" ], diff --git a/requirements_all.txt b/requirements_all.txt index c97d0fe527e..e19819dafde 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2930,7 +2930,7 @@ xbox-webapi==2.0.11 xiaomi-ble==0.30.2 # homeassistant.components.knx -xknx==3.0.0 +xknx==3.1.0 # homeassistant.components.knx xknxproject==3.7.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f04c0425af9..575fd7757be 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2316,7 +2316,7 @@ xbox-webapi==2.0.11 xiaomi-ble==0.30.2 # homeassistant.components.knx -xknx==3.0.0 +xknx==3.1.0 # homeassistant.components.knx xknxproject==3.7.1 diff --git a/tests/components/knx/test_climate.py b/tests/components/knx/test_climate.py index 77eeeef3559..9f198b48bd4 100644 --- a/tests/components/knx/test_climate.py +++ b/tests/components/knx/test_climate.py @@ -231,6 +231,90 @@ async def test_climate_hvac_mode( assert hass.states.get("climate.test").state == "cool" +async def test_climate_heat_cool_read_only( + hass: HomeAssistant, knx: KNXTestKit +) -> None: + """Test KNX climate hvac mode.""" + heat_cool_state_ga = "3/3/3" + await knx.setup_integration( + { + ClimateSchema.PLATFORM: { + CONF_NAME: "test", + ClimateSchema.CONF_TEMPERATURE_ADDRESS: "1/2/3", + ClimateSchema.CONF_TARGET_TEMPERATURE_ADDRESS: "1/2/4", + ClimateSchema.CONF_TARGET_TEMPERATURE_STATE_ADDRESS: "1/2/5", + ClimateSchema.CONF_HEAT_COOL_STATE_ADDRESS: heat_cool_state_ga, + } + } + ) + # read states state updater + # StateUpdater semaphore allows 2 concurrent requests + await knx.assert_read("1/2/3") + await knx.assert_read("1/2/5") + # StateUpdater initialize state + await knx.receive_response("1/2/3", RAW_FLOAT_20_0) + await knx.receive_response("1/2/5", RAW_FLOAT_20_0) + await knx.assert_read(heat_cool_state_ga) + await knx.receive_response(heat_cool_state_ga, True) # heat + + state = hass.states.get("climate.test") + assert state.state == "heat" + assert state.attributes["hvac_modes"] == ["heat"] + assert state.attributes["hvac_action"] == "heating" + + await knx.receive_write(heat_cool_state_ga, False) # cool + state = hass.states.get("climate.test") + assert state.state == "cool" + assert state.attributes["hvac_modes"] == ["cool"] + assert state.attributes["hvac_action"] == "cooling" + + +async def test_climate_heat_cool_read_only_on_off( + hass: HomeAssistant, knx: KNXTestKit +) -> None: + """Test KNX climate hvac mode.""" + on_off_ga = "2/2/2" + heat_cool_state_ga = "3/3/3" + await knx.setup_integration( + { + ClimateSchema.PLATFORM: { + CONF_NAME: "test", + ClimateSchema.CONF_TEMPERATURE_ADDRESS: "1/2/3", + ClimateSchema.CONF_TARGET_TEMPERATURE_ADDRESS: "1/2/4", + ClimateSchema.CONF_TARGET_TEMPERATURE_STATE_ADDRESS: "1/2/5", + ClimateSchema.CONF_ON_OFF_ADDRESS: on_off_ga, + ClimateSchema.CONF_HEAT_COOL_STATE_ADDRESS: heat_cool_state_ga, + } + } + ) + # read states state updater + # StateUpdater semaphore allows 2 concurrent requests + await knx.assert_read("1/2/3") + await knx.assert_read("1/2/5") + # StateUpdater initialize state + await knx.receive_response("1/2/3", RAW_FLOAT_20_0) + await knx.receive_response("1/2/5", RAW_FLOAT_20_0) + await knx.assert_read(heat_cool_state_ga) + await knx.receive_response(heat_cool_state_ga, True) # heat + + state = hass.states.get("climate.test") + assert state.state == "off" + assert set(state.attributes["hvac_modes"]) == {"off", "heat"} + assert state.attributes["hvac_action"] == "off" + + await knx.receive_write(heat_cool_state_ga, False) # cool + state = hass.states.get("climate.test") + assert state.state == "off" + assert set(state.attributes["hvac_modes"]) == {"off", "cool"} + assert state.attributes["hvac_action"] == "off" + + await knx.receive_write(on_off_ga, True) + state = hass.states.get("climate.test") + assert state.state == "cool" + assert set(state.attributes["hvac_modes"]) == {"off", "cool"} + assert state.attributes["hvac_action"] == "cooling" + + async def test_climate_preset_mode( hass: HomeAssistant, knx: KNXTestKit, entity_registry: er.EntityRegistry ) -> None: From 2c4b7c2577b628899f71bda15789f315bae96461 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 13 Aug 2024 13:32:44 +0200 Subject: [PATCH 0661/1635] Improve type hints in knx tests (#123787) --- tests/components/knx/test_knx_selectors.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/tests/components/knx/test_knx_selectors.py b/tests/components/knx/test_knx_selectors.py index 432a0fb9f80..7b2f09af84b 100644 --- a/tests/components/knx/test_knx_selectors.py +++ b/tests/components/knx/test_knx_selectors.py @@ -1,5 +1,7 @@ """Test KNX selectors.""" +from typing import Any + import pytest import voluptuous as vol @@ -111,7 +113,11 @@ INVALID = "invalid" ), ], ) -def test_ga_selector(selector_config, data, expected): +def test_ga_selector( + selector_config: dict[str, Any], + data: dict[str, Any], + expected: str | dict[str, Any], +) -> None: """Test GASelector.""" selector = GASelector(**selector_config) if expected == INVALID: From 2859dde69757e095ad191ed25cdc20bf9625caca Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 13 Aug 2024 06:56:10 -0500 Subject: [PATCH 0662/1635] Remove unifiprotect deprecate_package_sensor repair (#123807) --- .../components/unifiprotect/migrate.py | 20 ++++++------------- .../components/unifiprotect/strings.json | 4 ---- 2 files changed, 6 insertions(+), 18 deletions(-) diff --git a/homeassistant/components/unifiprotect/migrate.py b/homeassistant/components/unifiprotect/migrate.py index e469b684518..2c631489217 100644 --- a/homeassistant/components/unifiprotect/migrate.py +++ b/homeassistant/components/unifiprotect/migrate.py @@ -107,20 +107,18 @@ async def async_migrate_data( ) -> None: """Run all valid UniFi Protect data migrations.""" - _LOGGER.debug("Start Migrate: async_deprecate_hdr_package") - async_deprecate_hdr_package(hass, entry) - _LOGGER.debug("Completed Migrate: async_deprecate_hdr_package") + _LOGGER.debug("Start Migrate: async_deprecate_hdr") + async_deprecate_hdr(hass, entry) + _LOGGER.debug("Completed Migrate: async_deprecate_hdr") @callback -def async_deprecate_hdr_package(hass: HomeAssistant, entry: UFPConfigEntry) -> None: - """Check for usages of hdr_mode switch and package sensor and raise repair if it is used. +def async_deprecate_hdr(hass: HomeAssistant, entry: UFPConfigEntry) -> None: + """Check for usages of hdr_mode switch and raise repair if it is used. UniFi Protect v3.0.22 changed how HDR works so it is no longer a simple on/off toggle. There is Always On, Always Off and Auto. So it has been migrated to a select. The old switch is now deprecated. - Additionally, the Package sensor is no longer functional due to how events work so a repair to notify users. - Added in 2024.4.0 """ @@ -128,11 +126,5 @@ def async_deprecate_hdr_package(hass: HomeAssistant, entry: UFPConfigEntry) -> N hass, entry, "2024.10.0", - { - "hdr_switch": {"id": "hdr_mode", "platform": Platform.SWITCH}, - "package_sensor": { - "id": "smart_obj_package", - "platform": Platform.BINARY_SENSOR, - }, - }, + {"hdr_switch": {"id": "hdr_mode", "platform": Platform.SWITCH}}, ) diff --git a/homeassistant/components/unifiprotect/strings.json b/homeassistant/components/unifiprotect/strings.json index f785498c005..aaef111a351 100644 --- a/homeassistant/components/unifiprotect/strings.json +++ b/homeassistant/components/unifiprotect/strings.json @@ -124,10 +124,6 @@ "deprecate_hdr_switch": { "title": "HDR Mode Switch Deprecated", "description": "UniFi Protect v3 added a new state for HDR (auto). As a result, the HDR Mode Switch has been replaced with an HDR Mode Select, and it is deprecated.\n\nBelow are the detected automations or scripts that use one or more of the deprecated entities:\n{items}\nThe above list may be incomplete and it does not include any template usages inside of dashboards. Please update any templates, automations or scripts accordingly." - }, - "deprecate_package_sensor": { - "title": "Package Event Sensor Deprecated", - "description": "The package event sensor never tripped because of the way events are reported in UniFi Protect. As a result, the sensor is deprecated and will be removed.\n\nBelow are the detected automations or scripts that use one or more of the deprecated entities:\n{items}\nThe above list may be incomplete and it does not include any template usages inside of dashboards. Please update any templates, automations or scripts accordingly." } }, "entity": { From f0247e942e1e6f3d2161518a090bafba2d6c6553 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 13 Aug 2024 14:31:12 +0200 Subject: [PATCH 0663/1635] Remove unnecessary assignment of Template.hass from alert (#123766) --- homeassistant/components/alert/__init__.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/homeassistant/components/alert/__init__.py b/homeassistant/components/alert/__init__.py index 1ffeb7c73ac..f49a962fa87 100644 --- a/homeassistant/components/alert/__init__.py +++ b/homeassistant/components/alert/__init__.py @@ -162,16 +162,8 @@ class Alert(Entity): self._data = data self._message_template = message_template - if self._message_template is not None: - self._message_template.hass = hass - self._done_message_template = done_message_template - if self._done_message_template is not None: - self._done_message_template.hass = hass - self._title_template = title_template - if self._title_template is not None: - self._title_template.hass = hass self._notifiers = notifiers self._can_ack = can_ack From ae74fdf252df515965224804a250340e65975663 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 13 Aug 2024 14:57:24 +0200 Subject: [PATCH 0664/1635] Improve type hints in nzbget tests (#123798) --- tests/components/nzbget/conftest.py | 3 ++- tests/components/nzbget/test_init.py | 4 +++- tests/components/nzbget/test_sensor.py | 7 ++++--- tests/components/nzbget/test_switch.py | 8 ++++++-- 4 files changed, 15 insertions(+), 7 deletions(-) diff --git a/tests/components/nzbget/conftest.py b/tests/components/nzbget/conftest.py index 8f48a4306c7..8a980d3ddb0 100644 --- a/tests/components/nzbget/conftest.py +++ b/tests/components/nzbget/conftest.py @@ -1,5 +1,6 @@ """Define fixtures available for all tests.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest @@ -8,7 +9,7 @@ from . import MOCK_HISTORY, MOCK_STATUS, MOCK_VERSION @pytest.fixture -def nzbget_api(hass): +def nzbget_api() -> Generator[MagicMock]: """Mock NZBGetApi for easier testing.""" with patch("homeassistant.components.nzbget.coordinator.NZBGetAPI") as mock_api: instance = mock_api.return_value diff --git a/tests/components/nzbget/test_init.py b/tests/components/nzbget/test_init.py index a119bb953ce..baf0a37546d 100644 --- a/tests/components/nzbget/test_init.py +++ b/tests/components/nzbget/test_init.py @@ -3,6 +3,7 @@ from unittest.mock import patch from pynzbgetapi import NZBGetAPIException +import pytest from homeassistant.components.nzbget.const import DOMAIN from homeassistant.config_entries import ConfigEntryState @@ -13,7 +14,8 @@ from . import ENTRY_CONFIG, _patch_version, init_integration from tests.common import MockConfigEntry -async def test_unload_entry(hass: HomeAssistant, nzbget_api) -> None: +@pytest.mark.usefixtures("nzbget_api") +async def test_unload_entry(hass: HomeAssistant) -> None: """Test successful unload of entry.""" entry = await init_integration(hass) diff --git a/tests/components/nzbget/test_sensor.py b/tests/components/nzbget/test_sensor.py index 30a7f262b0b..38f7d8a68c3 100644 --- a/tests/components/nzbget/test_sensor.py +++ b/tests/components/nzbget/test_sensor.py @@ -3,6 +3,8 @@ from datetime import timedelta from unittest.mock import patch +import pytest + from homeassistant.components.sensor import SensorDeviceClass from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, @@ -16,9 +18,8 @@ from homeassistant.util import dt as dt_util from . import init_integration -async def test_sensors( - hass: HomeAssistant, entity_registry: er.EntityRegistry, nzbget_api -) -> None: +@pytest.mark.usefixtures("nzbget_api") +async def test_sensors(hass: HomeAssistant, entity_registry: er.EntityRegistry) -> None: """Test the creation and values of the sensors.""" now = dt_util.utcnow().replace(microsecond=0) with patch("homeassistant.components.nzbget.sensor.utcnow", return_value=now): diff --git a/tests/components/nzbget/test_switch.py b/tests/components/nzbget/test_switch.py index 1c518486b9f..afb88a7be82 100644 --- a/tests/components/nzbget/test_switch.py +++ b/tests/components/nzbget/test_switch.py @@ -1,5 +1,7 @@ """Test the NZBGet switches.""" +from unittest.mock import MagicMock + from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, @@ -16,7 +18,7 @@ from . import init_integration async def test_download_switch( - hass: HomeAssistant, entity_registry: er.EntityRegistry, nzbget_api + hass: HomeAssistant, entity_registry: er.EntityRegistry, nzbget_api: MagicMock ) -> None: """Test the creation and values of the download switch.""" instance = nzbget_api.return_value @@ -44,7 +46,9 @@ async def test_download_switch( assert state.state == STATE_OFF -async def test_download_switch_services(hass: HomeAssistant, nzbget_api) -> None: +async def test_download_switch_services( + hass: HomeAssistant, nzbget_api: MagicMock +) -> None: """Test download switch services.""" instance = nzbget_api.return_value From 04b1d2414da4fb44ffdef8fd222db2e89c406a93 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 13 Aug 2024 15:19:08 +0200 Subject: [PATCH 0665/1635] Improve type hints in mobile_app tests (#123793) --- tests/components/mobile_app/conftest.py | 7 +- .../mobile_app/test_binary_sensor.py | 22 ++-- .../mobile_app/test_device_tracker.py | 11 +- tests/components/mobile_app/test_sensor.py | 68 ++++++----- tests/components/mobile_app/test_webhook.py | 112 ++++++++++++------ 5 files changed, 147 insertions(+), 73 deletions(-) diff --git a/tests/components/mobile_app/conftest.py b/tests/components/mobile_app/conftest.py index 9f0681d41f7..53e90cb61ae 100644 --- a/tests/components/mobile_app/conftest.py +++ b/tests/components/mobile_app/conftest.py @@ -1,6 +1,7 @@ """Tests for mobile_app component.""" from http import HTTPStatus +from typing import Any from aiohttp.test_utils import TestClient import pytest @@ -15,7 +16,9 @@ from tests.typing import ClientSessionGenerator @pytest.fixture -async def create_registrations(hass, webhook_client): +async def create_registrations( + hass: HomeAssistant, webhook_client: TestClient +) -> tuple[dict[str, Any], dict[str, Any]]: """Return two new registrations.""" await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) @@ -37,7 +40,7 @@ async def create_registrations(hass, webhook_client): @pytest.fixture -async def push_registration(hass, webhook_client): +async def push_registration(hass: HomeAssistant, webhook_client: TestClient): """Return registration with push notifications enabled.""" await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) diff --git a/tests/components/mobile_app/test_binary_sensor.py b/tests/components/mobile_app/test_binary_sensor.py index acebd8796b7..9ffb61f92ab 100644 --- a/tests/components/mobile_app/test_binary_sensor.py +++ b/tests/components/mobile_app/test_binary_sensor.py @@ -1,7 +1,9 @@ """Entity tests for mobile_app.""" from http import HTTPStatus +from typing import Any +from aiohttp.test_utils import TestClient import pytest from homeassistant.const import STATE_UNKNOWN @@ -12,8 +14,8 @@ from homeassistant.helpers import device_registry as dr async def test_sensor( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - create_registrations, - webhook_client, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that sensors can be registered and updated.""" webhook_id = create_registrations[1]["webhook_id"] @@ -98,7 +100,9 @@ async def test_sensor( async def test_sensor_must_register( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that sensors must be registered before updating.""" webhook_id = create_registrations[1]["webhook_id"] @@ -122,8 +126,8 @@ async def test_sensor_must_register( async def test_sensor_id_no_dupes( hass: HomeAssistant, - create_registrations, - webhook_client, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, caplog: pytest.LogCaptureFixture, ) -> None: """Test that a duplicate unique ID in registration updates the sensor.""" @@ -185,7 +189,9 @@ async def test_sensor_id_no_dupes( async def test_register_sensor_no_state( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that sensors can be registered, when there is no (unknown) state.""" webhook_id = create_registrations[1]["webhook_id"] @@ -244,7 +250,9 @@ async def test_register_sensor_no_state( async def test_update_sensor_no_state( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that sensors can be updated, when there is no (unknown) state.""" webhook_id = create_registrations[1]["webhook_id"] diff --git a/tests/components/mobile_app/test_device_tracker.py b/tests/components/mobile_app/test_device_tracker.py index e3e2ce3227a..d1cbc21c36b 100644 --- a/tests/components/mobile_app/test_device_tracker.py +++ b/tests/components/mobile_app/test_device_tracker.py @@ -1,12 +1,17 @@ """Test mobile app device tracker.""" from http import HTTPStatus +from typing import Any + +from aiohttp.test_utils import TestClient from homeassistant.core import HomeAssistant async def test_sending_location( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test sending a location via a webhook.""" resp = await webhook_client.post( @@ -76,7 +81,9 @@ async def test_sending_location( async def test_restoring_location( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test sending a location via a webhook.""" resp = await webhook_client.post( diff --git a/tests/components/mobile_app/test_sensor.py b/tests/components/mobile_app/test_sensor.py index a7fb0ffc183..6411274fc4e 100644 --- a/tests/components/mobile_app/test_sensor.py +++ b/tests/components/mobile_app/test_sensor.py @@ -1,8 +1,10 @@ """Entity tests for mobile_app.""" from http import HTTPStatus +from typing import Any from unittest.mock import patch +from aiohttp.test_utils import TestClient import pytest from homeassistant.components.sensor import SensorDeviceClass @@ -14,7 +16,11 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.util.unit_system import METRIC_SYSTEM, US_CUSTOMARY_SYSTEM +from homeassistant.util.unit_system import ( + METRIC_SYSTEM, + US_CUSTOMARY_SYSTEM, + UnitSystem, +) @pytest.mark.parametrize( @@ -28,12 +34,12 @@ async def test_sensor( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - create_registrations, - webhook_client, - unit_system, - state_unit, - state1, - state2, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, + unit_system: UnitSystem, + state_unit: UnitOfTemperature, + state1: str, + state2: str, ) -> None: """Test that sensors can be registered and updated.""" hass.config.units = unit_system @@ -149,13 +155,13 @@ async def test_sensor( ) async def test_sensor_migration( hass: HomeAssistant, - create_registrations, - webhook_client, - unique_id, - unit_system, - state_unit, - state1, - state2, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, + unique_id: str, + unit_system: UnitSystem, + state_unit: UnitOfTemperature, + state1: str, + state2: str, ) -> None: """Test migration to RestoreSensor.""" hass.config.units = unit_system @@ -243,7 +249,9 @@ async def test_sensor_migration( async def test_sensor_must_register( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that sensors must be registered before updating.""" webhook_id = create_registrations[1]["webhook_id"] @@ -265,8 +273,8 @@ async def test_sensor_must_register( async def test_sensor_id_no_dupes( hass: HomeAssistant, - create_registrations, - webhook_client, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, caplog: pytest.LogCaptureFixture, ) -> None: """Test that a duplicate unique ID in registration updates the sensor.""" @@ -331,7 +339,9 @@ async def test_sensor_id_no_dupes( async def test_register_sensor_no_state( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that sensors can be registered, when there is no (unknown) state.""" webhook_id = create_registrations[1]["webhook_id"] @@ -390,7 +400,9 @@ async def test_register_sensor_no_state( async def test_update_sensor_no_state( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that sensors can be updated, when there is no (unknown) state.""" webhook_id = create_registrations[1]["webhook_id"] @@ -464,11 +476,11 @@ async def test_update_sensor_no_state( ) async def test_sensor_datetime( hass: HomeAssistant, - create_registrations, - webhook_client, - device_class, - native_value, - state_value, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, + device_class: SensorDeviceClass, + native_value: str, + state_value: str, ) -> None: """Test that sensors can be registered and updated.""" webhook_id = create_registrations[1]["webhook_id"] @@ -505,8 +517,8 @@ async def test_sensor_datetime( async def test_default_disabling_entity( hass: HomeAssistant, entity_registry: er.EntityRegistry, - create_registrations, - webhook_client, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that sensors can be disabled by default upon registration.""" webhook_id = create_registrations[1]["webhook_id"] @@ -543,8 +555,8 @@ async def test_default_disabling_entity( async def test_updating_disabled_sensor( hass: HomeAssistant, entity_registry: er.EntityRegistry, - create_registrations, - webhook_client, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that sensors return error if disabled in instance.""" webhook_id = create_registrations[1]["webhook_id"] diff --git a/tests/components/mobile_app/test_webhook.py b/tests/components/mobile_app/test_webhook.py index 77798c57f10..61e342a45ce 100644 --- a/tests/components/mobile_app/test_webhook.py +++ b/tests/components/mobile_app/test_webhook.py @@ -1,10 +1,13 @@ """Webhook tests for mobile_app.""" from binascii import unhexlify +from collections.abc import Callable from http import HTTPStatus import json +from typing import Any from unittest.mock import ANY, patch +from aiohttp.test_utils import TestClient from nacl.encoding import Base64Encoder from nacl.secret import SecretBox import pytest @@ -31,7 +34,7 @@ from tests.components.conversation import MockAgent @pytest.fixture -async def homeassistant(hass): +async def homeassistant(hass: HomeAssistant) -> None: """Load the homeassistant integration.""" await async_setup_component(hass, "homeassistant", {}) @@ -93,7 +96,8 @@ def decrypt_payload_legacy(secret_key, encrypted_data): async def test_webhook_handle_render_template( - create_registrations, webhook_client + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that we render templates properly.""" resp = await webhook_client.post( @@ -121,7 +125,9 @@ async def test_webhook_handle_render_template( async def test_webhook_handle_call_services( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that we call services properly.""" calls = async_mock_service(hass, "test", "mobile_app") @@ -137,7 +143,9 @@ async def test_webhook_handle_call_services( async def test_webhook_handle_fire_event( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that we can fire events.""" events = [] @@ -161,7 +169,7 @@ async def test_webhook_handle_fire_event( assert events[0].data["hello"] == "yo world" -async def test_webhook_update_registration(webhook_client) -> None: +async def test_webhook_update_registration(webhook_client: TestClient) -> None: """Test that a we can update an existing registration via webhook.""" register_resp = await webhook_client.post( "/api/mobile_app/registrations", json=REGISTER_CLEARTEXT @@ -186,7 +194,9 @@ async def test_webhook_update_registration(webhook_client) -> None: async def test_webhook_handle_get_zones( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that we can get zones properly.""" # Zone is already loaded as part of the fixture, @@ -238,7 +248,9 @@ async def test_webhook_handle_get_zones( async def test_webhook_handle_get_config( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that we can get config properly.""" webhook_id = create_registrations[1]["webhook_id"] @@ -299,7 +311,9 @@ async def test_webhook_handle_get_config( async def test_webhook_returns_error_incorrect_json( - webhook_client, create_registrations, caplog: pytest.LogCaptureFixture + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, + caplog: pytest.LogCaptureFixture, ) -> None: """Test that an error is returned when JSON is invalid.""" resp = await webhook_client.post( @@ -323,7 +337,11 @@ async def test_webhook_returns_error_incorrect_json( ], ) async def test_webhook_handle_decryption( - hass: HomeAssistant, webhook_client, create_registrations, msg, generate_response + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, + msg: dict[str, Any], + generate_response: Callable[[HomeAssistant], dict[str, Any]], ) -> None: """Test that we can encrypt/decrypt properly.""" key = create_registrations[0]["secret"] @@ -346,7 +364,8 @@ async def test_webhook_handle_decryption( async def test_webhook_handle_decryption_legacy( - webhook_client, create_registrations + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that we can encrypt/decrypt properly.""" key = create_registrations[0]["secret"] @@ -369,7 +388,9 @@ async def test_webhook_handle_decryption_legacy( async def test_webhook_handle_decryption_fail( - webhook_client, create_registrations, caplog: pytest.LogCaptureFixture + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, + caplog: pytest.LogCaptureFixture, ) -> None: """Test that we can encrypt/decrypt properly.""" key = create_registrations[0]["secret"] @@ -412,7 +433,9 @@ async def test_webhook_handle_decryption_fail( async def test_webhook_handle_decryption_legacy_fail( - webhook_client, create_registrations, caplog: pytest.LogCaptureFixture + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, + caplog: pytest.LogCaptureFixture, ) -> None: """Test that we can encrypt/decrypt properly.""" key = create_registrations[0]["secret"] @@ -455,7 +478,8 @@ async def test_webhook_handle_decryption_legacy_fail( async def test_webhook_handle_decryption_legacy_upgrade( - webhook_client, create_registrations + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that we can encrypt/decrypt properly.""" key = create_registrations[0]["secret"] @@ -510,7 +534,8 @@ async def test_webhook_handle_decryption_legacy_upgrade( async def test_webhook_requires_encryption( - webhook_client, create_registrations + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that encrypted registrations only accept encrypted data.""" resp = await webhook_client.post( @@ -527,7 +552,9 @@ async def test_webhook_requires_encryption( async def test_webhook_update_location_without_locations( - hass: HomeAssistant, webhook_client, create_registrations + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that location can be updated.""" @@ -564,7 +591,9 @@ async def test_webhook_update_location_without_locations( async def test_webhook_update_location_with_gps( - hass: HomeAssistant, webhook_client, create_registrations + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that location can be updated.""" resp = await webhook_client.post( @@ -586,7 +615,9 @@ async def test_webhook_update_location_with_gps( async def test_webhook_update_location_with_gps_without_accuracy( - hass: HomeAssistant, webhook_client, create_registrations + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that location can be updated.""" resp = await webhook_client.post( @@ -604,7 +635,9 @@ async def test_webhook_update_location_with_gps_without_accuracy( async def test_webhook_update_location_with_location_name( - hass: HomeAssistant, webhook_client, create_registrations + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that location can be updated.""" @@ -666,7 +699,9 @@ async def test_webhook_update_location_with_location_name( async def test_webhook_enable_encryption( - hass: HomeAssistant, webhook_client, create_registrations + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that encryption can be added to a reg initially created without.""" webhook_id = create_registrations[1]["webhook_id"] @@ -717,7 +752,9 @@ async def test_webhook_enable_encryption( async def test_webhook_camera_stream_non_existent( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test fetching camera stream URLs for a non-existent camera.""" webhook_id = create_registrations[1]["webhook_id"] @@ -736,7 +773,9 @@ async def test_webhook_camera_stream_non_existent( async def test_webhook_camera_stream_non_hls( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test fetching camera stream URLs for a non-HLS/stream-supporting camera.""" hass.states.async_set("camera.non_stream_camera", "idle", {"supported_features": 0}) @@ -761,7 +800,9 @@ async def test_webhook_camera_stream_non_hls( async def test_webhook_camera_stream_stream_available( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test fetching camera stream URLs for an HLS/stream-supporting camera.""" hass.states.async_set( @@ -791,7 +832,9 @@ async def test_webhook_camera_stream_stream_available( async def test_webhook_camera_stream_stream_available_but_errors( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test fetching camera stream URLs for an HLS/stream-supporting camera but that streaming errors.""" hass.states.async_set( @@ -823,8 +866,8 @@ async def test_webhook_camera_stream_stream_available_but_errors( async def test_webhook_handle_scan_tag( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - create_registrations, - webhook_client, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that we can scan tags.""" device = device_registry.async_get_device(identifiers={(DOMAIN, "mock-device-id")}) @@ -847,7 +890,9 @@ async def test_webhook_handle_scan_tag( async def test_register_sensor_limits_state_class( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that we limit state classes to sensors only.""" webhook_id = create_registrations[1]["webhook_id"] @@ -890,8 +935,8 @@ async def test_register_sensor_limits_state_class( async def test_reregister_sensor( hass: HomeAssistant, entity_registry: er.EntityRegistry, - create_registrations, - webhook_client, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that we can add more info in re-registration.""" webhook_id = create_registrations[1]["webhook_id"] @@ -992,11 +1037,11 @@ async def test_reregister_sensor( assert entry.original_icon is None +@pytest.mark.usefixtures("homeassistant") async def test_webhook_handle_conversation_process( hass: HomeAssistant, - homeassistant, - create_registrations, - webhook_client, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, mock_conversation_agent: MockAgent, ) -> None: """Test that we can converse.""" @@ -1042,9 +1087,8 @@ async def test_webhook_handle_conversation_process( async def test_sending_sensor_state( hass: HomeAssistant, entity_registry: er.EntityRegistry, - create_registrations, - webhook_client, - caplog: pytest.LogCaptureFixture, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that we can register and send sensor state as number and None.""" webhook_id = create_registrations[1]["webhook_id"] From 135f15fdc3dfc17812c02260799a5c6b521ab126 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 13 Aug 2024 16:01:24 +0200 Subject: [PATCH 0666/1635] Improve type hints in openai_conversation tests (#123811) --- tests/components/openai_conversation/conftest.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/tests/components/openai_conversation/conftest.py b/tests/components/openai_conversation/conftest.py index 6d770b51ce9..4639d0dc8e0 100644 --- a/tests/components/openai_conversation/conftest.py +++ b/tests/components/openai_conversation/conftest.py @@ -13,7 +13,7 @@ from tests.common import MockConfigEntry @pytest.fixture -def mock_config_entry(hass): +def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: """Mock a config entry.""" entry = MockConfigEntry( title="OpenAI", @@ -27,7 +27,9 @@ def mock_config_entry(hass): @pytest.fixture -def mock_config_entry_with_assist(hass, mock_config_entry): +def mock_config_entry_with_assist( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> MockConfigEntry: """Mock a config entry with assist.""" hass.config_entries.async_update_entry( mock_config_entry, options={CONF_LLM_HASS_API: llm.LLM_API_ASSIST} @@ -36,7 +38,9 @@ def mock_config_entry_with_assist(hass, mock_config_entry): @pytest.fixture -async def mock_init_component(hass, mock_config_entry): +async def mock_init_component( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: """Initialize integration.""" with patch( "openai.resources.models.AsyncModels.list", From 4cc3f7211bf249b2776b6c2e70bd7a5f311f0a83 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 13 Aug 2024 16:07:17 +0200 Subject: [PATCH 0667/1635] Improve type hints in openuv tests (#123813) --- tests/components/openuv/conftest.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/tests/components/openuv/conftest.py b/tests/components/openuv/conftest.py index cc344d25ccb..9bb1970bc2f 100644 --- a/tests/components/openuv/conftest.py +++ b/tests/components/openuv/conftest.py @@ -2,6 +2,7 @@ from collections.abc import Generator import json +from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest @@ -13,6 +14,7 @@ from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, ) +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_fixture @@ -41,7 +43,9 @@ def client_fixture(data_protection_window, data_uv_index): @pytest.fixture(name="config_entry") -def config_entry_fixture(hass, config): +def config_entry_fixture( + hass: HomeAssistant, config: dict[str, Any] +) -> MockConfigEntry: """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, @@ -54,7 +58,7 @@ def config_entry_fixture(hass, config): @pytest.fixture(name="config") -def config_fixture(): +def config_fixture() -> dict[str, Any]: """Define a config entry data fixture.""" return { CONF_API_KEY: TEST_API_KEY, @@ -89,7 +93,9 @@ async def mock_pyopenuv_fixture(client): @pytest.fixture(name="setup_config_entry") -async def setup_config_entry_fixture(hass, config_entry, mock_pyopenuv): +async def setup_config_entry_fixture( + hass: HomeAssistant, config_entry: MockConfigEntry, mock_pyopenuv: None +) -> None: """Define a fixture to set up openuv.""" assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() From e8157ed9a2d2b3d38492ccb9c885db699a145e37 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 13 Aug 2024 16:08:15 +0200 Subject: [PATCH 0668/1635] Improve type hints in otbr tests (#123814) --- tests/components/otbr/conftest.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/components/otbr/conftest.py b/tests/components/otbr/conftest.py index ba0f43c4a71..56f29bdc79b 100644 --- a/tests/components/otbr/conftest.py +++ b/tests/components/otbr/conftest.py @@ -19,7 +19,7 @@ from tests.common import MockConfigEntry @pytest.fixture(name="otbr_config_entry_multipan") -async def otbr_config_entry_multipan_fixture(hass): +async def otbr_config_entry_multipan_fixture(hass: HomeAssistant) -> None: """Mock Open Thread Border Router config entry.""" config_entry = MockConfigEntry( data=CONFIG_ENTRY_DATA_MULTIPAN, @@ -46,7 +46,7 @@ async def otbr_config_entry_multipan_fixture(hass): @pytest.fixture(name="otbr_config_entry_thread") -async def otbr_config_entry_thread_fixture(hass): +async def otbr_config_entry_thread_fixture(hass: HomeAssistant) -> None: """Mock Open Thread Border Router config entry.""" config_entry = MockConfigEntry( data=CONFIG_ENTRY_DATA_THREAD, From 679baddd3d2050362d348d521c438691357ef2db Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 13 Aug 2024 16:08:36 +0200 Subject: [PATCH 0669/1635] Improve type hints in openalpr_cloud tests (#123812) --- tests/components/openalpr_cloud/test_image_processing.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/components/openalpr_cloud/test_image_processing.py b/tests/components/openalpr_cloud/test_image_processing.py index 7115c3e7bf0..143513f9852 100644 --- a/tests/components/openalpr_cloud/test_image_processing.py +++ b/tests/components/openalpr_cloud/test_image_processing.py @@ -6,7 +6,7 @@ import pytest from homeassistant.components import camera, image_processing as ip from homeassistant.components.openalpr_cloud.image_processing import OPENALPR_API_URL -from homeassistant.core import HomeAssistant +from homeassistant.core import Event, HomeAssistant from homeassistant.setup import async_setup_component from tests.common import assert_setup_component, async_capture_events, load_fixture @@ -15,13 +15,13 @@ from tests.test_util.aiohttp import AiohttpClientMocker @pytest.fixture(autouse=True) -async def setup_homeassistant(hass: HomeAssistant): +async def setup_homeassistant(hass: HomeAssistant) -> None: """Set up the homeassistant integration.""" await async_setup_component(hass, "homeassistant", {}) @pytest.fixture -async def setup_openalpr_cloud(hass): +async def setup_openalpr_cloud(hass: HomeAssistant) -> None: """Set up openalpr cloud.""" config = { ip.DOMAIN: { @@ -43,7 +43,7 @@ async def setup_openalpr_cloud(hass): @pytest.fixture -async def alpr_events(hass): +async def alpr_events(hass: HomeAssistant) -> list[Event]: """Listen for events.""" return async_capture_events(hass, "image_processing.found_plate") From 995ed778498a857587b844d3aeaa25ff6b37efb8 Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Tue, 13 Aug 2024 10:23:13 -0400 Subject: [PATCH 0670/1635] Add error handling for Russound RIO async calls (#123756) Add better error handling to Russound RIO --- .../components/russound_rio/__init__.py | 8 +++-- .../components/russound_rio/media_player.py | 30 ++++++++++++++++++- 2 files changed, 34 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/russound_rio/__init__.py b/homeassistant/components/russound_rio/__init__.py index 1560a4cd332..e36cedecfe3 100644 --- a/homeassistant/components/russound_rio/__init__.py +++ b/homeassistant/components/russound_rio/__init__.py @@ -8,7 +8,7 @@ from aiorussound import Russound from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PORT, Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryError +from homeassistant.exceptions import ConfigEntryNotReady from .const import CONNECT_TIMEOUT, RUSSOUND_RIO_EXCEPTIONS @@ -22,13 +22,15 @@ type RussoundConfigEntry = ConfigEntry[Russound] async def async_setup_entry(hass: HomeAssistant, entry: RussoundConfigEntry) -> bool: """Set up a config entry.""" - russ = Russound(hass.loop, entry.data[CONF_HOST], entry.data[CONF_PORT]) + host = entry.data[CONF_HOST] + port = entry.data[CONF_PORT] + russ = Russound(hass.loop, host, port) try: async with asyncio.timeout(CONNECT_TIMEOUT): await russ.connect() except RUSSOUND_RIO_EXCEPTIONS as err: - raise ConfigEntryError(err) from err + raise ConfigEntryNotReady(f"Error while connecting to {host}:{port}") from err entry.runtime_data = russ diff --git a/homeassistant/components/russound_rio/media_player.py b/homeassistant/components/russound_rio/media_player.py index ff0d9e006c0..8cf07e23cc8 100644 --- a/homeassistant/components/russound_rio/media_player.py +++ b/homeassistant/components/russound_rio/media_player.py @@ -2,7 +2,10 @@ from __future__ import annotations +from collections.abc import Awaitable, Callable, Coroutine +from functools import wraps import logging +from typing import Any, Concatenate from aiorussound import Source, Zone @@ -17,12 +20,13 @@ from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback from homeassistant.data_entry_flow import FlowResultType +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import RussoundConfigEntry +from . import RUSSOUND_RIO_EXCEPTIONS, RussoundConfigEntry from .const import DOMAIN, MP_FEATURES_BY_FLAG _LOGGER = logging.getLogger(__name__) @@ -107,6 +111,24 @@ async def async_setup_entry( async_add_entities(entities) +def command[_T: RussoundZoneDevice, **_P]( + func: Callable[Concatenate[_T, _P], Awaitable[None]], +) -> Callable[Concatenate[_T, _P], Coroutine[Any, Any, None]]: + """Wrap async calls to raise on request error.""" + + @wraps(func) + async def decorator(self: _T, *args: _P.args, **kwargs: _P.kwargs) -> None: + """Wrap all command methods.""" + try: + await func(self, *args, **kwargs) + except RUSSOUND_RIO_EXCEPTIONS as exc: + raise HomeAssistantError( + f"Error executing {func.__name__} on entity {self.entity_id}," + ) from exc + + return decorator + + class RussoundZoneDevice(MediaPlayerEntity): """Representation of a Russound Zone.""" @@ -221,19 +243,23 @@ class RussoundZoneDevice(MediaPlayerEntity): """ return float(self._zone.volume or "0") / 50.0 + @command async def async_turn_off(self) -> None: """Turn off the zone.""" await self._zone.zone_off() + @command async def async_turn_on(self) -> None: """Turn on the zone.""" await self._zone.zone_on() + @command async def async_set_volume_level(self, volume: float) -> None: """Set the volume level.""" rvol = int(volume * 50.0) await self._zone.set_volume(rvol) + @command async def async_select_source(self, source: str) -> None: """Select the source input for this zone.""" for source_id, src in self._sources.items(): @@ -242,10 +268,12 @@ class RussoundZoneDevice(MediaPlayerEntity): await self._zone.select_source(source_id) break + @command async def async_volume_up(self) -> None: """Step the volume up.""" await self._zone.volume_up() + @command async def async_volume_down(self) -> None: """Step the volume down.""" await self._zone.volume_down() From ba54a19d4b9819f2f35ad628089370c6c2a37c7f Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 13 Aug 2024 18:01:06 +0200 Subject: [PATCH 0671/1635] Simplify mock_tts_cache_dir_autouse fixture (#123783) --- tests/components/assist_pipeline/conftest.py | 3 +-- tests/components/cloud/conftest.py | 3 +-- tests/components/elevenlabs/test_tts.py | 4 ++-- tests/components/google_translate/test_tts.py | 3 +-- tests/components/marytts/test_tts.py | 3 +-- tests/components/microsoft/test_tts.py | 3 +-- tests/components/voicerss/test_tts.py | 3 +-- tests/components/voip/test_voip.py | 3 +-- tests/components/wyoming/conftest.py | 3 +-- tests/components/yandextts/test_tts.py | 3 +-- 10 files changed, 11 insertions(+), 20 deletions(-) diff --git a/tests/components/assist_pipeline/conftest.py b/tests/components/assist_pipeline/conftest.py index b2eca1e7ce1..141baaa9870 100644 --- a/tests/components/assist_pipeline/conftest.py +++ b/tests/components/assist_pipeline/conftest.py @@ -43,9 +43,8 @@ BYTES_ONE_SECOND = SAMPLE_RATE * SAMPLE_WIDTH * SAMPLE_CHANNELS @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: """Mock the TTS cache dir with empty dir.""" - return mock_tts_cache_dir class BaseProvider: diff --git a/tests/components/cloud/conftest.py b/tests/components/cloud/conftest.py index 3a5d333f9b8..2edd9571bdd 100644 --- a/tests/components/cloud/conftest.py +++ b/tests/components/cloud/conftest.py @@ -187,9 +187,8 @@ def set_cloud_prefs_fixture( @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: """Mock the TTS cache dir with empty dir.""" - return mock_tts_cache_dir @pytest.fixture(autouse=True) diff --git a/tests/components/elevenlabs/test_tts.py b/tests/components/elevenlabs/test_tts.py index 5eee4084452..8b14ab26487 100644 --- a/tests/components/elevenlabs/test_tts.py +++ b/tests/components/elevenlabs/test_tts.py @@ -3,6 +3,7 @@ from __future__ import annotations from http import HTTPStatus +from pathlib import Path from typing import Any from unittest.mock import AsyncMock, MagicMock, patch @@ -34,9 +35,8 @@ def tts_mutagen_mock_fixture_autouse(tts_mutagen_mock: MagicMock) -> None: @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir): +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: """Mock the TTS cache dir with empty dir.""" - return mock_tts_cache_dir @pytest.fixture diff --git a/tests/components/google_translate/test_tts.py b/tests/components/google_translate/test_tts.py index 41cecd8cd98..95313df6140 100644 --- a/tests/components/google_translate/test_tts.py +++ b/tests/components/google_translate/test_tts.py @@ -30,9 +30,8 @@ def tts_mutagen_mock_fixture_autouse(tts_mutagen_mock: MagicMock) -> None: @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: """Mock the TTS cache dir with empty dir.""" - return mock_tts_cache_dir @pytest.fixture(autouse=True) diff --git a/tests/components/marytts/test_tts.py b/tests/components/marytts/test_tts.py index 75784bb56c5..0ad27cde29b 100644 --- a/tests/components/marytts/test_tts.py +++ b/tests/components/marytts/test_tts.py @@ -34,9 +34,8 @@ def get_empty_wav() -> bytes: @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: """Mock the TTS cache dir with empty dir.""" - return mock_tts_cache_dir async def test_setup_component(hass: HomeAssistant) -> None: diff --git a/tests/components/microsoft/test_tts.py b/tests/components/microsoft/test_tts.py index dca760230ac..0f11501843e 100644 --- a/tests/components/microsoft/test_tts.py +++ b/tests/components/microsoft/test_tts.py @@ -20,9 +20,8 @@ from tests.typing import ClientSessionGenerator @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: """Mock the TTS cache dir with empty dir.""" - return mock_tts_cache_dir @pytest.fixture(autouse=True) diff --git a/tests/components/voicerss/test_tts.py b/tests/components/voicerss/test_tts.py index 1a2ad002586..776c0ac153a 100644 --- a/tests/components/voicerss/test_tts.py +++ b/tests/components/voicerss/test_tts.py @@ -36,9 +36,8 @@ def tts_mutagen_mock_fixture_autouse(tts_mutagen_mock: MagicMock) -> None: @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: """Mock the TTS cache dir with empty dir.""" - return mock_tts_cache_dir async def test_setup_component(hass: HomeAssistant) -> None: diff --git a/tests/components/voip/test_voip.py b/tests/components/voip/test_voip.py index c2978afc17f..aab35bfd029 100644 --- a/tests/components/voip/test_voip.py +++ b/tests/components/voip/test_voip.py @@ -19,9 +19,8 @@ _MEDIA_ID = "12345" @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: """Mock the TTS cache dir with empty dir.""" - return mock_tts_cache_dir def _empty_wav() -> bytes: diff --git a/tests/components/wyoming/conftest.py b/tests/components/wyoming/conftest.py index f6093e34261..770186d92aa 100644 --- a/tests/components/wyoming/conftest.py +++ b/tests/components/wyoming/conftest.py @@ -19,9 +19,8 @@ from tests.common import MockConfigEntry @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: """Mock the TTS cache dir with empty dir.""" - return mock_tts_cache_dir @pytest.fixture(autouse=True) diff --git a/tests/components/yandextts/test_tts.py b/tests/components/yandextts/test_tts.py index 496c187469a..77878c2be51 100644 --- a/tests/components/yandextts/test_tts.py +++ b/tests/components/yandextts/test_tts.py @@ -29,9 +29,8 @@ def tts_mutagen_mock_fixture_autouse(tts_mutagen_mock: MagicMock) -> None: @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: """Mock the TTS cache dir with empty dir.""" - return mock_tts_cache_dir async def test_setup_component(hass: HomeAssistant) -> None: From 493859e5898b2fa8696205208f6a2e88f0cd9731 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Tue, 13 Aug 2024 18:44:12 +0200 Subject: [PATCH 0672/1635] Add update platform to AirGradient (#123534) --- .../components/airgradient/__init__.py | 1 + .../components/airgradient/update.py | 55 +++++++++++++++ tests/components/airgradient/conftest.py | 1 + .../airgradient/snapshots/test_update.ambr | 58 ++++++++++++++++ tests/components/airgradient/test_update.py | 69 +++++++++++++++++++ 5 files changed, 184 insertions(+) create mode 100644 homeassistant/components/airgradient/update.py create mode 100644 tests/components/airgradient/snapshots/test_update.ambr create mode 100644 tests/components/airgradient/test_update.py diff --git a/homeassistant/components/airgradient/__init__.py b/homeassistant/components/airgradient/__init__.py index 69f1e70c6af..7ee8ac6a3c7 100644 --- a/homeassistant/components/airgradient/__init__.py +++ b/homeassistant/components/airgradient/__init__.py @@ -21,6 +21,7 @@ PLATFORMS: list[Platform] = [ Platform.SELECT, Platform.SENSOR, Platform.SWITCH, + Platform.UPDATE, ] diff --git a/homeassistant/components/airgradient/update.py b/homeassistant/components/airgradient/update.py new file mode 100644 index 00000000000..95e64930ea6 --- /dev/null +++ b/homeassistant/components/airgradient/update.py @@ -0,0 +1,55 @@ +"""Airgradient Update platform.""" + +from datetime import timedelta +from functools import cached_property + +from homeassistant.components.update import UpdateDeviceClass, UpdateEntity +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import AirGradientConfigEntry, AirGradientMeasurementCoordinator +from .entity import AirGradientEntity + +SCAN_INTERVAL = timedelta(hours=1) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: AirGradientConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Airgradient update platform.""" + + data = config_entry.runtime_data + + async_add_entities([AirGradientUpdate(data.measurement)], True) + + +class AirGradientUpdate(AirGradientEntity, UpdateEntity): + """Representation of Airgradient Update.""" + + _attr_device_class = UpdateDeviceClass.FIRMWARE + coordinator: AirGradientMeasurementCoordinator + + def __init__(self, coordinator: AirGradientMeasurementCoordinator) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + self._attr_unique_id = f"{coordinator.serial_number}-update" + + @cached_property + def should_poll(self) -> bool: + """Return True because we need to poll the latest version.""" + return True + + @property + def installed_version(self) -> str: + """Return the installed version of the entity.""" + return self.coordinator.data.firmware_version + + async def async_update(self) -> None: + """Update the entity.""" + self._attr_latest_version = ( + await self.coordinator.client.get_latest_firmware_version( + self.coordinator.serial_number + ) + ) diff --git a/tests/components/airgradient/conftest.py b/tests/components/airgradient/conftest.py index a6ee85ecbdd..1899e12c8ae 100644 --- a/tests/components/airgradient/conftest.py +++ b/tests/components/airgradient/conftest.py @@ -44,6 +44,7 @@ def mock_airgradient_client() -> Generator[AsyncMock]: client.get_config.return_value = Config.from_json( load_fixture("get_config_local.json", DOMAIN) ) + client.get_latest_firmware_version.return_value = "3.1.4" yield client diff --git a/tests/components/airgradient/snapshots/test_update.ambr b/tests/components/airgradient/snapshots/test_update.ambr new file mode 100644 index 00000000000..c639a97d5dd --- /dev/null +++ b/tests/components/airgradient/snapshots/test_update.ambr @@ -0,0 +1,58 @@ +# serializer version: 1 +# name: test_all_entities[update.airgradient_firmware-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.airgradient_firmware', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Firmware', + 'platform': 'airgradient', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '84fce612f5b8-update', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[update.airgradient_firmware-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'entity_picture': 'https://brands.home-assistant.io/_/airgradient/icon.png', + 'friendly_name': 'Airgradient Firmware', + 'in_progress': False, + 'installed_version': '3.1.1', + 'latest_version': '3.1.4', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + }), + 'context': , + 'entity_id': 'update.airgradient_firmware', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/airgradient/test_update.py b/tests/components/airgradient/test_update.py new file mode 100644 index 00000000000..020a9a82a71 --- /dev/null +++ b/tests/components/airgradient/test_update.py @@ -0,0 +1,69 @@ +"""Tests for the AirGradient update platform.""" + +from datetime import timedelta +from unittest.mock import AsyncMock, patch + +from freezegun.api import FrozenDateTimeFactory +from syrupy import SnapshotAssertion + +from homeassistant.const import STATE_OFF, STATE_ON, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_airgradient_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.airgradient.PLATFORMS", [Platform.UPDATE]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_update_mechanism( + hass: HomeAssistant, + mock_airgradient_client: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test update entity.""" + await setup_integration(hass, mock_config_entry) + + state = hass.states.get("update.airgradient_firmware") + assert state.state == STATE_ON + assert state.attributes["installed_version"] == "3.1.1" + assert state.attributes["latest_version"] == "3.1.4" + mock_airgradient_client.get_latest_firmware_version.assert_called_once() + mock_airgradient_client.get_latest_firmware_version.reset_mock() + + mock_airgradient_client.get_current_measures.return_value.firmware_version = "3.1.4" + + freezer.tick(timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get("update.airgradient_firmware") + assert state.state == STATE_OFF + assert state.attributes["installed_version"] == "3.1.4" + assert state.attributes["latest_version"] == "3.1.4" + + mock_airgradient_client.get_latest_firmware_version.return_value = "3.1.5" + + freezer.tick(timedelta(minutes=59)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_airgradient_client.get_latest_firmware_version.assert_called_once() + state = hass.states.get("update.airgradient_firmware") + assert state.state == STATE_ON + assert state.attributes["installed_version"] == "3.1.4" + assert state.attributes["latest_version"] == "3.1.5" From f14d5ba5f28da23d86b6b8eedd6c8443f09d2272 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 13 Aug 2024 14:06:38 -0500 Subject: [PATCH 0673/1635] Bump yalexs to 8.0.2 (#123817) --- homeassistant/components/august/config_flow.py | 2 +- homeassistant/components/august/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/august/mocks.py | 2 +- tests/components/august/test_config_flow.py | 2 +- tests/components/august/test_gateway.py | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/august/config_flow.py b/homeassistant/components/august/config_flow.py index 18c15ad61a1..3523a4f7c39 100644 --- a/homeassistant/components/august/config_flow.py +++ b/homeassistant/components/august/config_flow.py @@ -8,7 +8,7 @@ from typing import Any import aiohttp import voluptuous as vol -from yalexs.authenticator import ValidationResult +from yalexs.authenticator_common import ValidationResult from yalexs.const import BRANDS, DEFAULT_BRAND from yalexs.manager.exceptions import CannotConnect, InvalidAuth, RequireValidation diff --git a/homeassistant/components/august/manifest.json b/homeassistant/components/august/manifest.json index 5a911eee5e5..13035d68dfe 100644 --- a/homeassistant/components/august/manifest.json +++ b/homeassistant/components/august/manifest.json @@ -28,5 +28,5 @@ "documentation": "https://www.home-assistant.io/integrations/august", "iot_class": "cloud_push", "loggers": ["pubnub", "yalexs"], - "requirements": ["yalexs==6.5.0", "yalexs-ble==2.4.3"] + "requirements": ["yalexs==8.0.2", "yalexs-ble==2.4.3"] } diff --git a/requirements_all.txt b/requirements_all.txt index e19819dafde..1c6163c4bb9 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2953,7 +2953,7 @@ yalesmartalarmclient==0.3.9 yalexs-ble==2.4.3 # homeassistant.components.august -yalexs==6.5.0 +yalexs==8.0.2 # homeassistant.components.yeelight yeelight==0.7.14 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 575fd7757be..496789466d2 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2336,7 +2336,7 @@ yalesmartalarmclient==0.3.9 yalexs-ble==2.4.3 # homeassistant.components.august -yalexs==6.5.0 +yalexs==8.0.2 # homeassistant.components.yeelight yeelight==0.7.14 diff --git a/tests/components/august/mocks.py b/tests/components/august/mocks.py index 30be50e75c9..a0f5b55a607 100644 --- a/tests/components/august/mocks.py +++ b/tests/components/august/mocks.py @@ -25,7 +25,7 @@ from yalexs.activity import ( DoorOperationActivity, LockOperationActivity, ) -from yalexs.authenticator import AuthenticationState +from yalexs.authenticator_common import AuthenticationState from yalexs.const import Brand from yalexs.doorbell import Doorbell, DoorbellDetail from yalexs.lock import Lock, LockDetail diff --git a/tests/components/august/test_config_flow.py b/tests/components/august/test_config_flow.py index aec08864c65..fdebb8d5c46 100644 --- a/tests/components/august/test_config_flow.py +++ b/tests/components/august/test_config_flow.py @@ -2,7 +2,7 @@ from unittest.mock import patch -from yalexs.authenticator import ValidationResult +from yalexs.authenticator_common import ValidationResult from yalexs.manager.exceptions import CannotConnect, InvalidAuth, RequireValidation from homeassistant import config_entries diff --git a/tests/components/august/test_gateway.py b/tests/components/august/test_gateway.py index e605fd74f0a..74266397ed5 100644 --- a/tests/components/august/test_gateway.py +++ b/tests/components/august/test_gateway.py @@ -50,5 +50,5 @@ async def _patched_refresh_access_token( ) await august_gateway.async_refresh_access_token_if_needed() refresh_access_token_mock.assert_called() - assert august_gateway.access_token == new_token + assert await august_gateway.async_get_access_token() == new_token assert august_gateway.authentication.access_token_expires == new_token_expire_time From f8bc662620943c8f3441d7ebeb13374342e55978 Mon Sep 17 00:00:00 2001 From: Paarth Shah Date: Tue, 13 Aug 2024 12:31:59 -0700 Subject: [PATCH 0674/1635] Bump `matrix-nio` to 0.25.0 (#123832) Bump matrix-nio to 0.25.0 Co-authored-by: J. Nick Koston --- homeassistant/components/matrix/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/matrix/manifest.json b/homeassistant/components/matrix/manifest.json index 7e854a85434..3c465c44f24 100644 --- a/homeassistant/components/matrix/manifest.json +++ b/homeassistant/components/matrix/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/matrix", "iot_class": "cloud_push", "loggers": ["matrix_client"], - "requirements": ["matrix-nio==0.24.0", "Pillow==10.4.0"] + "requirements": ["matrix-nio==0.25.0", "Pillow==10.4.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 1c6163c4bb9..a87cf317b7b 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1297,7 +1297,7 @@ lw12==0.9.2 lxml==5.1.0 # homeassistant.components.matrix -matrix-nio==0.24.0 +matrix-nio==0.25.0 # homeassistant.components.maxcube maxcube-api==0.4.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 496789466d2..1fb0f027ede 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1072,7 +1072,7 @@ lupupy==0.3.2 lxml==5.1.0 # homeassistant.components.matrix -matrix-nio==0.24.0 +matrix-nio==0.25.0 # homeassistant.components.maxcube maxcube-api==0.4.3 From 4a6e81296303cf9b3e72f9d4daffa8a2f22bd4a6 Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Tue, 13 Aug 2024 22:21:48 +0200 Subject: [PATCH 0675/1635] Bump py-synologydsm-api to 2.4.5 (#123815) bump py-synologydsm-api to 2.4.5 --- homeassistant/components/synology_dsm/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/synology_dsm/manifest.json b/homeassistant/components/synology_dsm/manifest.json index b1133fd61ad..9d977609d14 100644 --- a/homeassistant/components/synology_dsm/manifest.json +++ b/homeassistant/components/synology_dsm/manifest.json @@ -7,7 +7,7 @@ "documentation": "https://www.home-assistant.io/integrations/synology_dsm", "iot_class": "local_polling", "loggers": ["synology_dsm"], - "requirements": ["py-synologydsm-api==2.4.4"], + "requirements": ["py-synologydsm-api==2.4.5"], "ssdp": [ { "manufacturer": "Synology", diff --git a/requirements_all.txt b/requirements_all.txt index a87cf317b7b..535fc6a95ba 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1662,7 +1662,7 @@ py-schluter==0.1.7 py-sucks==0.9.10 # homeassistant.components.synology_dsm -py-synologydsm-api==2.4.4 +py-synologydsm-api==2.4.5 # homeassistant.components.zabbix py-zabbix==1.1.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 1fb0f027ede..0fd598c84c2 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1354,7 +1354,7 @@ py-nightscout==1.2.2 py-sucks==0.9.10 # homeassistant.components.synology_dsm -py-synologydsm-api==2.4.4 +py-synologydsm-api==2.4.5 # homeassistant.components.hdmi_cec pyCEC==0.5.2 From 2b6949f3c7dd86ce5d1a729be1ef787975108f32 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 13 Aug 2024 16:29:26 -0500 Subject: [PATCH 0676/1635] Bump uiprotect to 6.0.2 (#123808) changelog: https://github.com/uilibs/uiprotect/compare/v6.0.1...v6.0.2 --- homeassistant/components/unifiprotect/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/unifiprotect/manifest.json b/homeassistant/components/unifiprotect/manifest.json index 93536d1ad1b..4483a5990eb 100644 --- a/homeassistant/components/unifiprotect/manifest.json +++ b/homeassistant/components/unifiprotect/manifest.json @@ -40,7 +40,7 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["uiprotect", "unifi_discovery"], - "requirements": ["uiprotect==6.0.1", "unifi-discovery==1.2.0"], + "requirements": ["uiprotect==6.0.2", "unifi-discovery==1.2.0"], "ssdp": [ { "manufacturer": "Ubiquiti Networks", diff --git a/requirements_all.txt b/requirements_all.txt index 535fc6a95ba..cadd93c2b1f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2813,7 +2813,7 @@ twitchAPI==4.2.1 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==6.0.1 +uiprotect==6.0.2 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 0fd598c84c2..2b5581f3181 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2217,7 +2217,7 @@ twitchAPI==4.2.1 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==6.0.1 +uiprotect==6.0.2 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 From 29887c2a17d40c7e413919ef09557ea0f3dfb6ef Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Tue, 13 Aug 2024 17:40:51 -0400 Subject: [PATCH 0677/1635] Add base entity to Russound RIO integration (#123842) * Add base entity to Russound RIO integration * Set entity back to primary mac addr * Switch to type shorthand --- .../components/russound_rio/entity.py | 70 +++++++++++++++++++ .../components/russound_rio/media_player.py | 59 ++-------------- 2 files changed, 75 insertions(+), 54 deletions(-) create mode 100644 homeassistant/components/russound_rio/entity.py diff --git a/homeassistant/components/russound_rio/entity.py b/homeassistant/components/russound_rio/entity.py new file mode 100644 index 00000000000..3430a77108b --- /dev/null +++ b/homeassistant/components/russound_rio/entity.py @@ -0,0 +1,70 @@ +"""Base entity for Russound RIO integration.""" + +from collections.abc import Awaitable, Callable, Coroutine +from functools import wraps +from typing import Any, Concatenate + +from aiorussound import Controller + +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo +from homeassistant.helpers.entity import Entity + +from .const import DOMAIN, RUSSOUND_RIO_EXCEPTIONS + + +def command[_EntityT: RussoundBaseEntity, **_P]( + func: Callable[Concatenate[_EntityT, _P], Awaitable[None]], +) -> Callable[Concatenate[_EntityT, _P], Coroutine[Any, Any, None]]: + """Wrap async calls to raise on request error.""" + + @wraps(func) + async def decorator(self: _EntityT, *args: _P.args, **kwargs: _P.kwargs) -> None: + """Wrap all command methods.""" + try: + await func(self, *args, **kwargs) + except RUSSOUND_RIO_EXCEPTIONS as exc: + raise HomeAssistantError( + f"Error executing {func.__name__} on entity {self.entity_id}," + ) from exc + + return decorator + + +class RussoundBaseEntity(Entity): + """Russound Base Entity.""" + + _attr_has_entity_name = True + _attr_should_poll = False + + def __init__( + self, + controller: Controller, + ) -> None: + """Initialize the entity.""" + self._instance = controller.instance + self._controller = controller + self._primary_mac_address = ( + controller.mac_address or controller.parent_controller.mac_address + ) + self._device_identifier = ( + self._controller.mac_address + or f"{self._primary_mac_address}-{self._controller.controller_id}" + ) + self._attr_device_info = DeviceInfo( + # Use MAC address of Russound device as identifier + identifiers={(DOMAIN, self._device_identifier)}, + manufacturer="Russound", + name=controller.controller_type, + model=controller.controller_type, + sw_version=controller.firmware_version, + ) + if controller.parent_controller: + self._attr_device_info["via_device"] = ( + DOMAIN, + controller.parent_controller.mac_address, + ) + else: + self._attr_device_info["connections"] = { + (CONNECTION_NETWORK_MAC, controller.mac_address) + } diff --git a/homeassistant/components/russound_rio/media_player.py b/homeassistant/components/russound_rio/media_player.py index 8cf07e23cc8..5f11227ef53 100644 --- a/homeassistant/components/russound_rio/media_player.py +++ b/homeassistant/components/russound_rio/media_player.py @@ -2,10 +2,7 @@ from __future__ import annotations -from collections.abc import Awaitable, Callable, Coroutine -from functools import wraps import logging -from typing import Any, Concatenate from aiorussound import Source, Zone @@ -20,14 +17,13 @@ from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback from homeassistant.data_entry_flow import FlowResultType -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from . import RUSSOUND_RIO_EXCEPTIONS, RussoundConfigEntry +from . import RussoundConfigEntry from .const import DOMAIN, MP_FEATURES_BY_FLAG +from .entity import RussoundBaseEntity, command _LOGGER = logging.getLogger(__name__) @@ -111,31 +107,11 @@ async def async_setup_entry( async_add_entities(entities) -def command[_T: RussoundZoneDevice, **_P]( - func: Callable[Concatenate[_T, _P], Awaitable[None]], -) -> Callable[Concatenate[_T, _P], Coroutine[Any, Any, None]]: - """Wrap async calls to raise on request error.""" - - @wraps(func) - async def decorator(self: _T, *args: _P.args, **kwargs: _P.kwargs) -> None: - """Wrap all command methods.""" - try: - await func(self, *args, **kwargs) - except RUSSOUND_RIO_EXCEPTIONS as exc: - raise HomeAssistantError( - f"Error executing {func.__name__} on entity {self.entity_id}," - ) from exc - - return decorator - - -class RussoundZoneDevice(MediaPlayerEntity): +class RussoundZoneDevice(RussoundBaseEntity, MediaPlayerEntity): """Representation of a Russound Zone.""" _attr_device_class = MediaPlayerDeviceClass.SPEAKER _attr_media_content_type = MediaType.MUSIC - _attr_should_poll = False - _attr_has_entity_name = True _attr_supported_features = ( MediaPlayerEntityFeature.VOLUME_SET | MediaPlayerEntityFeature.VOLUME_STEP @@ -146,36 +122,11 @@ class RussoundZoneDevice(MediaPlayerEntity): def __init__(self, zone: Zone, sources: dict[int, Source]) -> None: """Initialize the zone device.""" - self._controller = zone.controller + super().__init__(zone.controller) self._zone = zone self._sources = sources self._attr_name = zone.name - primary_mac_address = ( - self._controller.mac_address - or self._controller.parent_controller.mac_address - ) - self._attr_unique_id = f"{primary_mac_address}-{zone.device_str()}" - device_identifier = ( - self._controller.mac_address - or f"{primary_mac_address}-{self._controller.controller_id}" - ) - self._attr_device_info = DeviceInfo( - # Use MAC address of Russound device as identifier - identifiers={(DOMAIN, device_identifier)}, - manufacturer="Russound", - name=self._controller.controller_type, - model=self._controller.controller_type, - sw_version=self._controller.firmware_version, - ) - if self._controller.parent_controller: - self._attr_device_info["via_device"] = ( - DOMAIN, - self._controller.parent_controller.mac_address, - ) - else: - self._attr_device_info["connections"] = { - (CONNECTION_NETWORK_MAC, self._controller.mac_address) - } + self._attr_unique_id = f"{self._primary_mac_address}-{zone.device_str()}" for flag, feature in MP_FEATURES_BY_FLAG.items(): if flag in zone.instance.supported_features: self._attr_supported_features |= feature From b7bbc938d348b2ae82766f434d508ca9f75b1522 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 14 Aug 2024 09:31:37 +0200 Subject: [PATCH 0678/1635] Drop violating rows before adding foreign constraints in DB schema 44 migration (#123454) * Drop violating rows before adding foreign constraints * Don't delete rows with null-references * Only delete rows when integrityerror is caught * Move restore of dropped foreign key constraints to a separate migration step * Use aliases for tables * Update homeassistant/components/recorder/migration.py * Update test * Don't use alias for table we're deleting from, improve test * Fix MySQL * Update instead of deleting in case of self references * Improve log messages * Batch updates * Add workaround for unsupported LIMIT in PostgreSQL * Simplify --------- Co-authored-by: J. Nick Koston --- .../components/recorder/db_schema.py | 2 +- .../components/recorder/migration.py | 239 +++++++++++++++--- tests/components/recorder/test_migrate.py | 115 +++++++-- 3 files changed, 304 insertions(+), 52 deletions(-) diff --git a/homeassistant/components/recorder/db_schema.py b/homeassistant/components/recorder/db_schema.py index 8d4cc29d9be..dd293ed6bc2 100644 --- a/homeassistant/components/recorder/db_schema.py +++ b/homeassistant/components/recorder/db_schema.py @@ -77,7 +77,7 @@ class LegacyBase(DeclarativeBase): """Base class for tables, used for schema migration.""" -SCHEMA_VERSION = 44 +SCHEMA_VERSION = 45 _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index a41de07e243..55856dcf449 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -669,33 +669,177 @@ def _drop_foreign_key_constraints( def _restore_foreign_key_constraints( session_maker: Callable[[], Session], engine: Engine, - dropped_constraints: list[tuple[str, str, ReflectedForeignKeyConstraint]], + foreign_columns: list[tuple[str, str, str | None, str | None]], ) -> None: """Restore foreign key constraints.""" - for table, column, dropped_constraint in dropped_constraints: + for table, column, foreign_table, foreign_column in foreign_columns: constraints = Base.metadata.tables[table].foreign_key_constraints for constraint in constraints: if constraint.column_keys == [column]: break else: - _LOGGER.info( - "Did not find a matching constraint for %s", dropped_constraint - ) + _LOGGER.info("Did not find a matching constraint for %s.%s", table, column) continue + if TYPE_CHECKING: + assert foreign_table is not None + assert foreign_column is not None + # AddConstraint mutates the constraint passed to it, we need to # undo that to avoid changing the behavior of the table schema. # https://github.com/sqlalchemy/sqlalchemy/blob/96f1172812f858fead45cdc7874abac76f45b339/lib/sqlalchemy/sql/ddl.py#L746-L748 create_rule = constraint._create_rule # noqa: SLF001 add_constraint = AddConstraint(constraint) # type: ignore[no-untyped-call] constraint._create_rule = create_rule # noqa: SLF001 + try: + _add_constraint(session_maker, add_constraint, table, column) + except IntegrityError: + _LOGGER.exception( + ( + "Could not update foreign options in %s table, will delete " + "violations and try again" + ), + table, + ) + _delete_foreign_key_violations( + session_maker, engine, table, column, foreign_table, foreign_column + ) + _add_constraint(session_maker, add_constraint, table, column) - with session_scope(session=session_maker()) as session: - try: - connection = session.connection() - connection.execute(add_constraint) - except (InternalError, OperationalError): - _LOGGER.exception("Could not update foreign options in %s table", table) + +def _add_constraint( + session_maker: Callable[[], Session], + add_constraint: AddConstraint, + table: str, + column: str, +) -> None: + """Add a foreign key constraint.""" + _LOGGER.warning( + "Adding foreign key constraint to %s.%s. " + "Note: this can take several minutes on large databases and slow " + "machines. Please be patient!", + table, + column, + ) + with session_scope(session=session_maker()) as session: + try: + connection = session.connection() + connection.execute(add_constraint) + except (InternalError, OperationalError): + _LOGGER.exception("Could not update foreign options in %s table", table) + + +def _delete_foreign_key_violations( + session_maker: Callable[[], Session], + engine: Engine, + table: str, + column: str, + foreign_table: str, + foreign_column: str, +) -> None: + """Remove rows which violate the constraints.""" + if engine.dialect.name not in (SupportedDialect.MYSQL, SupportedDialect.POSTGRESQL): + raise RuntimeError( + f"_delete_foreign_key_violations not supported for {engine.dialect.name}" + ) + + _LOGGER.warning( + "Rows in table %s where %s references non existing %s.%s will be %s. " + "Note: this can take several minutes on large databases and slow " + "machines. Please be patient!", + table, + column, + foreign_table, + foreign_column, + "set to NULL" if table == foreign_table else "deleted", + ) + + result: CursorResult | None = None + if table == foreign_table: + # In case of a foreign reference to the same table, we set invalid + # references to NULL instead of deleting as deleting rows may + # cause additional invalid references to be created. This is to handle + # old_state_id referencing a missing state. + if engine.dialect.name == SupportedDialect.MYSQL: + while result is None or result.rowcount > 0: + with session_scope(session=session_maker()) as session: + # The subquery (SELECT {foreign_column} from {foreign_table}) is + # to be compatible with old MySQL versions which do not allow + # referencing the table being updated in the WHERE clause. + result = session.connection().execute( + text( + f"UPDATE {table} as t1 " # noqa: S608 + f"SET {column} = NULL " + "WHERE (" + f"t1.{column} IS NOT NULL AND " + "NOT EXISTS " + "(SELECT 1 " + f"FROM (SELECT {foreign_column} from {foreign_table}) AS t2 " + f"WHERE t2.{foreign_column} = t1.{column})) " + "LIMIT 100000;" + ) + ) + elif engine.dialect.name == SupportedDialect.POSTGRESQL: + while result is None or result.rowcount > 0: + with session_scope(session=session_maker()) as session: + # PostgreSQL does not support LIMIT in UPDATE clauses, so we + # update matches from a limited subquery instead. + result = session.connection().execute( + text( + f"UPDATE {table} " # noqa: S608 + f"SET {column} = NULL " + f"WHERE {column} in " + f"(SELECT {column} from {table} as t1 " + "WHERE (" + f"t1.{column} IS NOT NULL AND " + "NOT EXISTS " + "(SELECT 1 " + f"FROM {foreign_table} AS t2 " + f"WHERE t2.{foreign_column} = t1.{column})) " + "LIMIT 100000);" + ) + ) + return + + if engine.dialect.name == SupportedDialect.MYSQL: + while result is None or result.rowcount > 0: + with session_scope(session=session_maker()) as session: + result = session.connection().execute( + # We don't use an alias for the table we're deleting from, + # support of the form `DELETE FROM table AS t1` was added in + # MariaDB 11.6 and is not supported by MySQL. Those engines + # instead support the from `DELETE t1 from table AS t1` which + # is not supported by PostgreSQL and undocumented for MariaDB. + text( + f"DELETE FROM {table} " # noqa: S608 + "WHERE (" + f"{table}.{column} IS NOT NULL AND " + "NOT EXISTS " + "(SELECT 1 " + f"FROM {foreign_table} AS t2 " + f"WHERE t2.{foreign_column} = {table}.{column})) " + "LIMIT 100000;" + ) + ) + elif engine.dialect.name == SupportedDialect.POSTGRESQL: + while result is None or result.rowcount > 0: + with session_scope(session=session_maker()) as session: + # PostgreSQL does not support LIMIT in DELETE clauses, so we + # delete matches from a limited subquery instead. + result = session.connection().execute( + text( + f"DELETE FROM {table} " # noqa: S608 + f"WHERE {column} in " + f"(SELECT {column} from {table} as t1 " + "WHERE (" + f"t1.{column} IS NOT NULL AND " + "NOT EXISTS " + "(SELECT 1 " + f"FROM {foreign_table} AS t2 " + f"WHERE t2.{foreign_column} = t1.{column})) " + "LIMIT 100000);" + ) + ) @database_job_retry_wrapper("Apply migration update", 10) @@ -1459,6 +1603,38 @@ class _SchemaVersion43Migrator(_SchemaVersionMigrator, target_version=43): ) +FOREIGN_COLUMNS = ( + ( + "events", + ("data_id", "event_type_id"), + ( + ("data_id", "event_data", "data_id"), + ("event_type_id", "event_types", "event_type_id"), + ), + ), + ( + "states", + ("event_id", "old_state_id", "attributes_id", "metadata_id"), + ( + ("event_id", None, None), + ("old_state_id", "states", "state_id"), + ("attributes_id", "state_attributes", "attributes_id"), + ("metadata_id", "states_meta", "metadata_id"), + ), + ), + ( + "statistics", + ("metadata_id",), + (("metadata_id", "statistics_meta", "id"),), + ), + ( + "statistics_short_term", + ("metadata_id",), + (("metadata_id", "statistics_meta", "id"),), + ), +) + + class _SchemaVersion44Migrator(_SchemaVersionMigrator, target_version=44): def _apply_update(self) -> None: """Version specific update method.""" @@ -1471,24 +1647,14 @@ class _SchemaVersion44Migrator(_SchemaVersionMigrator, target_version=44): else "" ) # First drop foreign key constraints - foreign_columns = ( - ("events", ("data_id", "event_type_id")), - ("states", ("event_id", "old_state_id", "attributes_id", "metadata_id")), - ("statistics", ("metadata_id",)), - ("statistics_short_term", ("metadata_id",)), - ) - dropped_constraints = [ - dropped_constraint - for table, columns in foreign_columns - for column in columns - for dropped_constraint in _drop_foreign_key_constraints( - self.session_maker, self.engine, table, column - )[1] - ] - _LOGGER.debug("Dropped foreign key constraints: %s", dropped_constraints) + for table, columns, _ in FOREIGN_COLUMNS: + for column in columns: + _drop_foreign_key_constraints( + self.session_maker, self.engine, table, column + ) # Then modify the constrained columns - for table, columns in foreign_columns: + for table, columns, _ in FOREIGN_COLUMNS: _modify_columns( self.session_maker, self.engine, @@ -1518,9 +1684,24 @@ class _SchemaVersion44Migrator(_SchemaVersionMigrator, target_version=44): table, [f"{column} {BIG_INTEGER_SQL} {identity_sql}"], ) - # Finally restore dropped constraints + + +class _SchemaVersion45Migrator(_SchemaVersionMigrator, target_version=45): + def _apply_update(self) -> None: + """Version specific update method.""" + # We skip this step for SQLITE, it doesn't have differently sized integers + if self.engine.dialect.name == SupportedDialect.SQLITE: + return + + # Restore constraints dropped in migration to schema version 44 _restore_foreign_key_constraints( - self.session_maker, self.engine, dropped_constraints + self.session_maker, + self.engine, + [ + (table, column, foreign_table, foreign_column) + for table, _, foreign_mappings in FOREIGN_COLUMNS + for column, foreign_table, foreign_column in foreign_mappings + ], ) diff --git a/tests/components/recorder/test_migrate.py b/tests/components/recorder/test_migrate.py index e55793caad7..988eade29b6 100644 --- a/tests/components/recorder/test_migrate.py +++ b/tests/components/recorder/test_migrate.py @@ -831,9 +831,9 @@ def test_drop_restore_foreign_key_constraints(recorder_db_url: str) -> None: """ constraints_to_recreate = ( - ("events", "data_id"), - ("states", "event_id"), # This won't be found - ("states", "old_state_id"), + ("events", "data_id", "event_data", "data_id"), + ("states", "event_id", None, None), # This won't be found + ("states", "old_state_id", "states", "state_id"), ) db_engine = recorder_db_url.partition("://")[0] @@ -902,7 +902,7 @@ def test_drop_restore_foreign_key_constraints(recorder_db_url: str) -> None: session_maker = Mock(return_value=session) dropped_constraints_1 = [ dropped_constraint - for table, column in constraints_to_recreate + for table, column, _, _ in constraints_to_recreate for dropped_constraint in migration._drop_foreign_key_constraints( session_maker, engine, table, column )[1] @@ -914,7 +914,7 @@ def test_drop_restore_foreign_key_constraints(recorder_db_url: str) -> None: session_maker = Mock(return_value=session) dropped_constraints_2 = [ dropped_constraint - for table, column in constraints_to_recreate + for table, column, _, _ in constraints_to_recreate for dropped_constraint in migration._drop_foreign_key_constraints( session_maker, engine, table, column )[1] @@ -925,7 +925,7 @@ def test_drop_restore_foreign_key_constraints(recorder_db_url: str) -> None: with Session(engine) as session: session_maker = Mock(return_value=session) migration._restore_foreign_key_constraints( - session_maker, engine, dropped_constraints_1 + session_maker, engine, constraints_to_recreate ) # Check we do find the constrained columns again (they are restored) @@ -933,7 +933,7 @@ def test_drop_restore_foreign_key_constraints(recorder_db_url: str) -> None: session_maker = Mock(return_value=session) dropped_constraints_3 = [ dropped_constraint - for table, column in constraints_to_recreate + for table, column, _, _ in constraints_to_recreate for dropped_constraint in migration._drop_foreign_key_constraints( session_maker, engine, table, column )[1] @@ -951,21 +951,7 @@ def test_restore_foreign_key_constraints_with_error( This is not supported on SQLite """ - constraints_to_restore = [ - ( - "events", - "data_id", - { - "comment": None, - "constrained_columns": ["data_id"], - "name": "events_data_id_fkey", - "options": {}, - "referred_columns": ["data_id"], - "referred_schema": None, - "referred_table": "event_data", - }, - ), - ] + constraints_to_restore = [("events", "data_id", "event_data", "data_id")] connection = Mock() connection.execute = Mock(side_effect=InternalError(None, None, None)) @@ -981,3 +967,88 @@ def test_restore_foreign_key_constraints_with_error( ) assert "Could not update foreign options in events table" in caplog.text + + +@pytest.mark.skip_on_db_engine(["sqlite"]) +@pytest.mark.usefixtures("skip_by_db_engine") +def test_restore_foreign_key_constraints_with_integrity_error( + recorder_db_url: str, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test we can drop and then restore foreign keys. + + This is not supported on SQLite + """ + + constraints = ( + ("events", "data_id", "event_data", "data_id", Events), + ("states", "old_state_id", "states", "state_id", States), + ) + + engine = create_engine(recorder_db_url) + db_schema.Base.metadata.create_all(engine) + + # Drop constraints + with Session(engine) as session: + session_maker = Mock(return_value=session) + for table, column, _, _, _ in constraints: + migration._drop_foreign_key_constraints( + session_maker, engine, table, column + ) + + # Add rows violating the constraints + with Session(engine) as session: + for _, column, _, _, table_class in constraints: + session.add(table_class(**{column: 123})) + session.add(table_class()) + # Insert a States row referencing the row with an invalid foreign reference + session.add(States(old_state_id=1)) + session.commit() + + # Check we could insert the rows + with Session(engine) as session: + assert session.query(Events).count() == 2 + assert session.query(States).count() == 3 + + # Restore constraints + to_restore = [ + (table, column, foreign_table, foreign_column) + for table, column, foreign_table, foreign_column, _ in constraints + ] + with Session(engine) as session: + session_maker = Mock(return_value=session) + migration._restore_foreign_key_constraints(session_maker, engine, to_restore) + + # Check the violating row has been deleted from the Events table + with Session(engine) as session: + assert session.query(Events).count() == 1 + assert session.query(States).count() == 3 + + engine.dispose() + + assert ( + "Could not update foreign options in events table, " + "will delete violations and try again" + ) in caplog.text + + +def test_delete_foreign_key_violations_unsupported_engine( + caplog: pytest.LogCaptureFixture, +) -> None: + """Test calling _delete_foreign_key_violations with an unsupported engine.""" + + connection = Mock() + connection.execute = Mock(side_effect=InternalError(None, None, None)) + session = Mock() + session.connection = Mock(return_value=connection) + instance = Mock() + instance.get_session = Mock(return_value=session) + engine = Mock() + engine.dialect = Mock() + engine.dialect.name = "sqlite" + + session_maker = Mock(return_value=session) + with pytest.raises( + RuntimeError, match="_delete_foreign_key_violations not supported for sqlite" + ): + migration._delete_foreign_key_violations(session_maker, engine, "", "", "", "") From 5f967fdee25588d50b15bee5429548d5eb096b4a Mon Sep 17 00:00:00 2001 From: Andrew Jackson Date: Wed, 14 Aug 2024 09:11:11 +0100 Subject: [PATCH 0679/1635] Correct case of config strings in Mastodon (#123859) Fix string casing --- homeassistant/components/mastodon/strings.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/mastodon/strings.json b/homeassistant/components/mastodon/strings.json index ed8162eb3df..906b67dd481 100644 --- a/homeassistant/components/mastodon/strings.json +++ b/homeassistant/components/mastodon/strings.json @@ -4,8 +4,8 @@ "user": { "data": { "base_url": "[%key:common::config_flow::data::url%]", - "client_id": "Client Key", - "client_secret": "Client Secret", + "client_id": "Client key", + "client_secret": "Client secret", "access_token": "[%key:common::config_flow::data::access_token%]" }, "data_description": { From 706354173318437d8b25d83d3a41c6d968f81163 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 14 Aug 2024 10:46:29 +0200 Subject: [PATCH 0680/1635] Support None schema in EntityPlatform.async_register_entity_service (#123064) --- homeassistant/helpers/config_validation.py | 4 ++-- homeassistant/helpers/entity_platform.py | 4 ++-- tests/helpers/test_entity_platform.py | 28 ++++++++++++++++++++++ 3 files changed, 32 insertions(+), 4 deletions(-) diff --git a/homeassistant/helpers/config_validation.py b/homeassistant/helpers/config_validation.py index 01960b6c0c3..ed3eca6e316 100644 --- a/homeassistant/helpers/config_validation.py +++ b/homeassistant/helpers/config_validation.py @@ -1275,7 +1275,7 @@ BASE_ENTITY_SCHEMA = _make_entity_service_schema({}, vol.PREVENT_EXTRA) def make_entity_service_schema( - schema: dict, *, extra: int = vol.PREVENT_EXTRA + schema: dict | None, *, extra: int = vol.PREVENT_EXTRA ) -> vol.Schema: """Create an entity service schema.""" if not schema and extra == vol.PREVENT_EXTRA: @@ -1283,7 +1283,7 @@ def make_entity_service_schema( # the base schema and avoid compiling a new schema which is the case # for ~50% of services. return BASE_ENTITY_SCHEMA - return _make_entity_service_schema(schema, extra) + return _make_entity_service_schema(schema or {}, extra) SCRIPT_CONVERSATION_RESPONSE_SCHEMA = vol.Any(template, None) diff --git a/homeassistant/helpers/entity_platform.py b/homeassistant/helpers/entity_platform.py index 6774780f00f..f3d5f5b076a 100644 --- a/homeassistant/helpers/entity_platform.py +++ b/homeassistant/helpers/entity_platform.py @@ -985,7 +985,7 @@ class EntityPlatform: def async_register_entity_service( self, name: str, - schema: VolDictType | VolSchemaType, + schema: VolDictType | VolSchemaType | None, func: str | Callable[..., Any], required_features: Iterable[int] | None = None, supports_response: SupportsResponse = SupportsResponse.NONE, @@ -997,7 +997,7 @@ class EntityPlatform: if self.hass.services.has_service(self.platform_name, name): return - if isinstance(schema, dict): + if schema is None or isinstance(schema, dict): schema = cv.make_entity_service_schema(schema) service_func: str | HassJob[..., Any] diff --git a/tests/helpers/test_entity_platform.py b/tests/helpers/test_entity_platform.py index 75a41945a91..be8ba998481 100644 --- a/tests/helpers/test_entity_platform.py +++ b/tests/helpers/test_entity_platform.py @@ -1760,6 +1760,34 @@ async def test_register_entity_service_limited_to_matching_platforms( } +async def test_register_entity_service_none_schema( + hass: HomeAssistant, +) -> None: + """Test registering a service with schema set to None.""" + entity_platform = MockEntityPlatform( + hass, domain="mock_integration", platform_name="mock_platform", platform=None + ) + entity1 = SlowEntity(name="entity_1") + entity2 = SlowEntity(name="entity_1") + await entity_platform.async_add_entities([entity1, entity2]) + + entities = [] + + @callback + def handle_service(entity, *_): + entities.append(entity) + + entity_platform.async_register_entity_service("hello", None, handle_service) + + await hass.services.async_call( + "mock_platform", "hello", {"entity_id": "all"}, blocking=True + ) + + assert len(entities) == 2 + assert entity1 in entities + assert entity2 in entities + + @pytest.mark.parametrize("update_before_add", [True, False]) async def test_invalid_entity_id( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, update_before_add: bool From e1a0a855d52478de2d1b8eb65a8e22bf2fe54afa Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 14 Aug 2024 11:44:38 +0200 Subject: [PATCH 0681/1635] Support None schema in EntityComponent.async_register_entity_service (#123867) --- homeassistant/helpers/entity_component.py | 4 +-- tests/helpers/test_entity_component.py | 30 +++++++++++++++-------- 2 files changed, 22 insertions(+), 12 deletions(-) diff --git a/homeassistant/helpers/entity_component.py b/homeassistant/helpers/entity_component.py index 0034eb1c6fc..c8bcda0eef2 100644 --- a/homeassistant/helpers/entity_component.py +++ b/homeassistant/helpers/entity_component.py @@ -258,13 +258,13 @@ class EntityComponent(Generic[_EntityT]): def async_register_entity_service( self, name: str, - schema: VolDictType | VolSchemaType, + schema: VolDictType | VolSchemaType | None, func: str | Callable[..., Any], required_features: list[int] | None = None, supports_response: SupportsResponse = SupportsResponse.NONE, ) -> None: """Register an entity service.""" - if isinstance(schema, dict): + if schema is None or isinstance(schema, dict): schema = cv.make_entity_service_schema(schema) service_func: str | HassJob[..., Any] diff --git a/tests/helpers/test_entity_component.py b/tests/helpers/test_entity_component.py index 3f34305b39d..0c09c9d75f7 100644 --- a/tests/helpers/test_entity_component.py +++ b/tests/helpers/test_entity_component.py @@ -495,7 +495,19 @@ async def test_extract_all_use_match_all( ) not in caplog.text -async def test_register_entity_service(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("schema", "service_data"), + [ + ({"some": str}, {"some": "data"}), + ({}, {}), + (None, {}), + ], +) +async def test_register_entity_service( + hass: HomeAssistant, + schema: dict | None, + service_data: dict, +) -> None: """Test registering an enttiy service and calling it.""" entity = MockEntity(entity_id=f"{DOMAIN}.entity") calls = [] @@ -510,9 +522,7 @@ async def test_register_entity_service(hass: HomeAssistant) -> None: await component.async_setup({}) await component.async_add_entities([entity]) - component.async_register_entity_service( - "hello", {"some": str}, "async_called_by_service" - ) + component.async_register_entity_service("hello", schema, "async_called_by_service") with pytest.raises(vol.Invalid): await hass.services.async_call( @@ -524,24 +534,24 @@ async def test_register_entity_service(hass: HomeAssistant) -> None: assert len(calls) == 0 await hass.services.async_call( - DOMAIN, "hello", {"entity_id": entity.entity_id, "some": "data"}, blocking=True + DOMAIN, "hello", {"entity_id": entity.entity_id} | service_data, blocking=True ) assert len(calls) == 1 - assert calls[0] == {"some": "data"} + assert calls[0] == service_data await hass.services.async_call( - DOMAIN, "hello", {"entity_id": ENTITY_MATCH_ALL, "some": "data"}, blocking=True + DOMAIN, "hello", {"entity_id": ENTITY_MATCH_ALL} | service_data, blocking=True ) assert len(calls) == 2 - assert calls[1] == {"some": "data"} + assert calls[1] == service_data await hass.services.async_call( - DOMAIN, "hello", {"entity_id": ENTITY_MATCH_NONE, "some": "data"}, blocking=True + DOMAIN, "hello", {"entity_id": ENTITY_MATCH_NONE} | service_data, blocking=True ) assert len(calls) == 2 await hass.services.async_call( - DOMAIN, "hello", {"area_id": ENTITY_MATCH_NONE, "some": "data"}, blocking=True + DOMAIN, "hello", {"area_id": ENTITY_MATCH_NONE} | service_data, blocking=True ) assert len(calls) == 2 From 82c705e188d9c0a618817319f13ab95e7fb7df19 Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Wed, 14 Aug 2024 12:09:46 +0200 Subject: [PATCH 0682/1635] Fix translation for integration not found repair issue (#123868) * correct setp id in strings * add issue_ignored string --- homeassistant/components/homeassistant/strings.json | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/homeassistant/strings.json b/homeassistant/components/homeassistant/strings.json index e3e1464077a..69a3e26ad79 100644 --- a/homeassistant/components/homeassistant/strings.json +++ b/homeassistant/components/homeassistant/strings.json @@ -60,8 +60,11 @@ "integration_not_found": { "title": "Integration {domain} not found", "fix_flow": { + "abort": { + "issue_ignored": "Not existing integration {domain} ignored." + }, "step": { - "remove_entries": { + "init": { "title": "[%key:component::homeassistant::issues::integration_not_found::title%]", "description": "The integration `{domain}` could not be found. This happens when a (custom) integration was removed from Home Assistant, but there are still configurations for this `integration`. Please use the buttons below to either remove the previous configurations for `{domain}` or ignore this.", "menu_options": { From bd509469ab5a2e69ba418f342170b390e7019df9 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 12:16:38 +0200 Subject: [PATCH 0683/1635] Improve type hints in reolink tests (#123883) --- tests/components/reolink/test_init.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/components/reolink/test_init.py b/tests/components/reolink/test_init.py index 5334e171e5e..1c93114217c 100644 --- a/tests/components/reolink/test_init.py +++ b/tests/components/reolink/test_init.py @@ -44,7 +44,7 @@ pytestmark = pytest.mark.usefixtures("reolink_connect", "reolink_platforms") CHIME_MODEL = "Reolink Chime" -async def test_wait(*args, **key_args): +async def test_wait(*args, **key_args) -> None: """Ensure a mocked function takes a bit of time to be able to timeout in test.""" await asyncio.sleep(0) From cd382bcddaa345d88d22b2aaa4eabe0e33aa63e8 Mon Sep 17 00:00:00 2001 From: kingy444 Date: Wed, 14 Aug 2024 20:31:18 +1000 Subject: [PATCH 0684/1635] Bump pydaikin to 2.13.4 (#123623) * bump pydaikin to 2.13.3 * bump pydaikin to 2.13.4 --- homeassistant/components/daikin/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/daikin/manifest.json b/homeassistant/components/daikin/manifest.json index c5cb6064d88..0d93c0e25ad 100644 --- a/homeassistant/components/daikin/manifest.json +++ b/homeassistant/components/daikin/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/daikin", "iot_class": "local_polling", "loggers": ["pydaikin"], - "requirements": ["pydaikin==2.13.2"], + "requirements": ["pydaikin==2.13.4"], "zeroconf": ["_dkapi._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index cadd93c2b1f..58058e40e10 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1786,7 +1786,7 @@ pycsspeechtts==1.0.8 # pycups==1.9.73 # homeassistant.components.daikin -pydaikin==2.13.2 +pydaikin==2.13.4 # homeassistant.components.danfoss_air pydanfossair==0.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 2b5581f3181..56a16273f3e 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1436,7 +1436,7 @@ pycoolmasternet-async==0.2.2 pycsspeechtts==1.0.8 # homeassistant.components.daikin -pydaikin==2.13.2 +pydaikin==2.13.4 # homeassistant.components.deconz pydeconz==116 From 36f9b69923f7af396e9005459631ff309360fe12 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 12:33:26 +0200 Subject: [PATCH 0685/1635] Improve type hints in rfxtrx tests (#123885) --- tests/components/rfxtrx/conftest.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/tests/components/rfxtrx/conftest.py b/tests/components/rfxtrx/conftest.py index 88450638d6c..be5c72e6483 100644 --- a/tests/components/rfxtrx/conftest.py +++ b/tests/components/rfxtrx/conftest.py @@ -2,7 +2,9 @@ from __future__ import annotations -from unittest.mock import Mock, patch +from collections.abc import Callable, Coroutine, Generator +from typing import Any +from unittest.mock import MagicMock, Mock, patch from freezegun import freeze_time import pytest @@ -67,7 +69,7 @@ async def setup_rfx_test_cfg( @pytest.fixture(autouse=True) -async def transport_mock(hass): +def transport_mock() -> Generator[Mock]: """Fixture that make sure all transports are fake.""" transport = Mock(spec=RFXtrxTransport) with ( @@ -78,14 +80,14 @@ async def transport_mock(hass): @pytest.fixture(autouse=True) -async def connect_mock(hass): +def connect_mock() -> Generator[MagicMock]: """Fixture that make sure connect class is mocked.""" with patch("RFXtrx.Connect") as connect: yield connect @pytest.fixture(autouse=True, name="rfxtrx") -def rfxtrx_fixture(hass, connect_mock): +def rfxtrx_fixture(hass: HomeAssistant, connect_mock: MagicMock) -> Mock: """Fixture that cleans up threads from integration.""" rfx = Mock(spec=Connect) @@ -114,19 +116,21 @@ def rfxtrx_fixture(hass, connect_mock): @pytest.fixture(name="rfxtrx_automatic") -async def rfxtrx_automatic_fixture(hass, rfxtrx): +async def rfxtrx_automatic_fixture(hass: HomeAssistant, rfxtrx: Mock) -> Mock: """Fixture that starts up with automatic additions.""" await setup_rfx_test_cfg(hass, automatic_add=True, devices={}) return rfxtrx @pytest.fixture -async def timestep(hass): +def timestep( + hass: HomeAssistant, +) -> Generator[Callable[[int], Coroutine[Any, Any, None]]]: """Step system time forward.""" with freeze_time(utcnow()) as frozen_time: - async def delay(seconds): + async def delay(seconds: int) -> None: """Trigger delay in system.""" frozen_time.tick(delta=seconds) async_fire_time_changed(hass) From a712eca70af6352f55436465026ed706bd72e444 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 12:47:38 +0200 Subject: [PATCH 0686/1635] Improve type hints in stream tests (#123894) --- tests/components/stream/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/components/stream/conftest.py b/tests/components/stream/conftest.py index 0142d71a805..6aab3c06d13 100644 --- a/tests/components/stream/conftest.py +++ b/tests/components/stream/conftest.py @@ -60,7 +60,7 @@ class WorkerSync: @pytest.fixture -def stream_worker_sync(hass): +def stream_worker_sync() -> Generator[WorkerSync]: """Patch StreamOutput to allow test to synchronize worker stream end.""" sync = WorkerSync() with patch( From 165ec62405de083fde4d5eb193347e1b8d585f0d Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 12:48:10 +0200 Subject: [PATCH 0687/1635] Improve type hints in ssdp tests (#123892) --- tests/components/ssdp/conftest.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/components/ssdp/conftest.py b/tests/components/ssdp/conftest.py index 8b06163cd95..ac0ac7298a8 100644 --- a/tests/components/ssdp/conftest.py +++ b/tests/components/ssdp/conftest.py @@ -1,11 +1,14 @@ """Configuration for SSDP tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch from async_upnp_client.server import UpnpServer from async_upnp_client.ssdp_listener import SsdpListener import pytest +from homeassistant.core import HomeAssistant + @pytest.fixture(autouse=True) async def silent_ssdp_listener(): @@ -32,7 +35,7 @@ async def disabled_upnp_server(): @pytest.fixture -def mock_flow_init(hass): +def mock_flow_init(hass: HomeAssistant) -> Generator[AsyncMock]: """Mock hass.config_entries.flow.async_init.""" with patch.object( hass.config_entries.flow, "async_init", return_value=AsyncMock() From 24a8060f43576e715d928646bd0c7534c1c440ab Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 12:48:36 +0200 Subject: [PATCH 0688/1635] Improve type hints in sonos tests (#123891) --- tests/components/sonos/conftest.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/tests/components/sonos/conftest.py b/tests/components/sonos/conftest.py index bbec7a2308c..4f14a2aa132 100644 --- a/tests/components/sonos/conftest.py +++ b/tests/components/sonos/conftest.py @@ -1,9 +1,10 @@ """Configuration for Sonos tests.""" import asyncio -from collections.abc import Callable, Generator +from collections.abc import Callable, Coroutine, Generator from copy import copy from ipaddress import ip_address +from typing import Any from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest @@ -120,7 +121,9 @@ async def async_autosetup_sonos(async_setup_sonos): @pytest.fixture -def async_setup_sonos(hass, config_entry, fire_zgs_event): +def async_setup_sonos( + hass: HomeAssistant, config_entry: MockConfigEntry, fire_zgs_event +) -> Callable[[], Coroutine[Any, Any, None]]: """Return a coroutine to set up a Sonos integration instance on demand.""" async def _wrapper(): @@ -136,7 +139,7 @@ def async_setup_sonos(hass, config_entry, fire_zgs_event): @pytest.fixture(name="config_entry") -def config_entry_fixture(): +def config_entry_fixture() -> MockConfigEntry: """Create a mock Sonos config entry.""" return MockConfigEntry(domain=DOMAIN, title="Sonos") @@ -650,7 +653,9 @@ def zgs_discovery_fixture(): @pytest.fixture(name="fire_zgs_event") -def zgs_event_fixture(hass: HomeAssistant, soco: SoCo, zgs_discovery: str): +def zgs_event_fixture( + hass: HomeAssistant, soco: SoCo, zgs_discovery: str +) -> Callable[[], Coroutine[Any, Any, None]]: """Create alarm_event fixture.""" variables = {"ZoneGroupState": zgs_discovery} From 57902fed22722e1873885a3a2187c430b27a6572 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 12:49:27 +0200 Subject: [PATCH 0689/1635] Improve type hints in smart_meter_texas tests (#123890) --- tests/components/smart_meter_texas/conftest.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/components/smart_meter_texas/conftest.py b/tests/components/smart_meter_texas/conftest.py index d06571fe05e..9c0301037a9 100644 --- a/tests/components/smart_meter_texas/conftest.py +++ b/tests/components/smart_meter_texas/conftest.py @@ -19,6 +19,7 @@ from homeassistant.components.homeassistant import ( ) from homeassistant.components.smart_meter_texas.const import DOMAIN from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, load_fixture @@ -91,7 +92,7 @@ def mock_connection( @pytest.fixture(name="config_entry") -def mock_config_entry(hass): +def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: """Return a mock config entry.""" config_entry = MockConfigEntry( domain=DOMAIN, From f8879a51fede3229f71ba9f4583c464c4396205b Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 12:49:53 +0200 Subject: [PATCH 0690/1635] Improve type hints in sma tests (#123889) --- tests/components/sma/conftest.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/components/sma/conftest.py b/tests/components/sma/conftest.py index a98eda673e4..a54f478a31d 100644 --- a/tests/components/sma/conftest.py +++ b/tests/components/sma/conftest.py @@ -9,6 +9,7 @@ import pytest from homeassistant import config_entries from homeassistant.components.sma.const import DOMAIN +from homeassistant.core import HomeAssistant from . import MOCK_DEVICE, MOCK_USER_INPUT @@ -16,7 +17,7 @@ from tests.common import MockConfigEntry @pytest.fixture -def mock_config_entry(): +def mock_config_entry() -> MockConfigEntry: """Return the default mocked config entry.""" return MockConfigEntry( domain=DOMAIN, @@ -28,7 +29,9 @@ def mock_config_entry(): @pytest.fixture -async def init_integration(hass, mock_config_entry): +async def init_integration( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> MockConfigEntry: """Create a fake SMA Config Entry.""" mock_config_entry.add_to_hass(hass) From 7ff368fe0d655a28cc14c55faeb4dc361637ca15 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 12:50:08 +0200 Subject: [PATCH 0691/1635] Improve type hints in sharkiq tests (#123888) --- tests/components/sharkiq/test_vacuum.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/components/sharkiq/test_vacuum.py b/tests/components/sharkiq/test_vacuum.py index e5154008f56..3748cfd6dc4 100644 --- a/tests/components/sharkiq/test_vacuum.py +++ b/tests/components/sharkiq/test_vacuum.py @@ -141,7 +141,7 @@ class MockShark(SharkIqVacuum): @pytest.fixture(autouse=True) @patch("sharkiq.ayla_api.AylaApi", MockAyla) -async def setup_integration(hass): +async def setup_integration(hass: HomeAssistant) -> None: """Build the mock integration.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=TEST_USERNAME, data=CONFIG, entry_id=ENTRY_ID From 13b071fd72cee862fd665da22bdc37289d5c324c Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 12:50:29 +0200 Subject: [PATCH 0692/1635] Improve type hints in risco tests (#123887) --- tests/components/risco/conftest.py | 20 ++++++++++++++------ tests/components/risco/test_sensor.py | 2 +- 2 files changed, 15 insertions(+), 7 deletions(-) diff --git a/tests/components/risco/conftest.py b/tests/components/risco/conftest.py index ab3b64b245d..3961d85d694 100644 --- a/tests/components/risco/conftest.py +++ b/tests/components/risco/conftest.py @@ -1,7 +1,10 @@ """Fixtures for Risco tests.""" +from collections.abc import AsyncGenerator +from typing import Any from unittest.mock import MagicMock, PropertyMock, patch +from pyrisco.cloud.event import Event import pytest from homeassistant.components.risco.const import DOMAIN, TYPE_LOCAL @@ -13,6 +16,7 @@ from homeassistant.const import ( CONF_TYPE, CONF_USERNAME, ) +from homeassistant.core import HomeAssistant from .util import TEST_SITE_NAME, TEST_SITE_UUID, system_mock, zone_mock @@ -116,19 +120,19 @@ def two_zone_local(): @pytest.fixture -def options(): +def options() -> dict[str, Any]: """Fixture for default (empty) options.""" return {} @pytest.fixture -def events(): +def events() -> list[Event]: """Fixture for default (empty) events.""" return [] @pytest.fixture -def cloud_config_entry(hass, options): +def cloud_config_entry(hass: HomeAssistant, options: dict[str, Any]) -> MockConfigEntry: """Fixture for a cloud config entry.""" config_entry = MockConfigEntry( domain=DOMAIN, @@ -151,7 +155,9 @@ def login_with_error(exception): @pytest.fixture -async def setup_risco_cloud(hass, cloud_config_entry, events): +async def setup_risco_cloud( + hass: HomeAssistant, cloud_config_entry: MockConfigEntry, events: list[Event] +) -> AsyncGenerator[MockConfigEntry]: """Set up a Risco integration for testing.""" with ( patch( @@ -181,7 +187,7 @@ async def setup_risco_cloud(hass, cloud_config_entry, events): @pytest.fixture -def local_config_entry(hass, options): +def local_config_entry(hass: HomeAssistant, options: dict[str, Any]) -> MockConfigEntry: """Fixture for a local config entry.""" config_entry = MockConfigEntry( domain=DOMAIN, data=TEST_LOCAL_CONFIG, options=options @@ -201,7 +207,9 @@ def connect_with_error(exception): @pytest.fixture -async def setup_risco_local(hass, local_config_entry): +async def setup_risco_local( + hass: HomeAssistant, local_config_entry: MockConfigEntry +) -> AsyncGenerator[MockConfigEntry]: """Set up a local Risco integration for testing.""" with ( patch( diff --git a/tests/components/risco/test_sensor.py b/tests/components/risco/test_sensor.py index 4c8f7bb4180..2b1094554ae 100644 --- a/tests/components/risco/test_sensor.py +++ b/tests/components/risco/test_sensor.py @@ -160,7 +160,7 @@ def _check_state(hass, category, entity_id): @pytest.fixture -async def _set_utc_time_zone(hass): +async def _set_utc_time_zone(hass: HomeAssistant) -> None: await hass.config.async_set_time_zone("UTC") From 7fe2f175aae3dba7e66bc1af0a322548313756bd Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 12:50:51 +0200 Subject: [PATCH 0693/1635] Improve type hints in ridwell tests (#123886) --- tests/components/ridwell/conftest.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/tests/components/ridwell/conftest.py b/tests/components/ridwell/conftest.py index 32907ac8037..6ea9d91f8e9 100644 --- a/tests/components/ridwell/conftest.py +++ b/tests/components/ridwell/conftest.py @@ -1,6 +1,8 @@ """Define test fixtures for Ridwell.""" +from collections.abc import Generator from datetime import date +from typing import Any from unittest.mock import AsyncMock, Mock, patch from aioridwell.model import EventState, RidwellPickup, RidwellPickupEvent @@ -8,6 +10,7 @@ import pytest from homeassistant.components.ridwell.const import DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -19,7 +22,7 @@ TEST_USER_ID = "12345" @pytest.fixture(name="account") -def account_fixture(): +def account_fixture() -> Mock: """Define a Ridwell account.""" return Mock( account_id=TEST_ACCOUNT_ID, @@ -44,7 +47,7 @@ def account_fixture(): @pytest.fixture(name="client") -def client_fixture(account): +def client_fixture(account: Mock) -> Mock: """Define an aioridwell client.""" return Mock( async_authenticate=AsyncMock(), @@ -55,7 +58,9 @@ def client_fixture(account): @pytest.fixture(name="config_entry") -def config_entry_fixture(hass, config): +def config_entry_fixture( + hass: HomeAssistant, config: dict[str, Any] +) -> MockConfigEntry: """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, @@ -68,7 +73,7 @@ def config_entry_fixture(hass, config): @pytest.fixture(name="config") -def config_fixture(hass): +def config_fixture() -> dict[str, Any]: """Define a config entry data fixture.""" return { CONF_USERNAME: TEST_USERNAME, @@ -77,7 +82,7 @@ def config_fixture(hass): @pytest.fixture(name="mock_aioridwell") -async def mock_aioridwell_fixture(hass, client, config): +def mock_aioridwell_fixture(client: Mock, config: dict[str, Any]) -> Generator[None]: """Define a fixture to patch aioridwell.""" with ( patch( @@ -93,7 +98,9 @@ async def mock_aioridwell_fixture(hass, client, config): @pytest.fixture(name="setup_config_entry") -async def setup_config_entry_fixture(hass, config_entry, mock_aioridwell): +async def setup_config_entry_fixture( + hass: HomeAssistant, config_entry: MockConfigEntry, mock_aioridwell: None +) -> None: """Define a fixture to set up ridwell.""" assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() From 6626c63bb516d2165c8cd7e8855c74add93cae1d Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 12:51:52 +0200 Subject: [PATCH 0694/1635] Improve type hints in recollect_waste tests (#123882) --- tests/components/recollect_waste/conftest.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/tests/components/recollect_waste/conftest.py b/tests/components/recollect_waste/conftest.py index 360dd8aac98..8384da3f388 100644 --- a/tests/components/recollect_waste/conftest.py +++ b/tests/components/recollect_waste/conftest.py @@ -1,6 +1,7 @@ """Define test fixtures for ReCollect Waste.""" from datetime import date +from typing import Any from unittest.mock import AsyncMock, Mock, patch from aiorecollect.client import PickupEvent, PickupType @@ -11,6 +12,7 @@ from homeassistant.components.recollect_waste.const import ( CONF_SERVICE_ID, DOMAIN, ) +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -25,7 +27,9 @@ def client_fixture(pickup_events): @pytest.fixture(name="config_entry") -def config_entry_fixture(hass, config): +def config_entry_fixture( + hass: HomeAssistant, config: dict[str, Any] +) -> MockConfigEntry: """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=f"{TEST_PLACE_ID}, {TEST_SERVICE_ID}", data=config @@ -35,7 +39,7 @@ def config_entry_fixture(hass, config): @pytest.fixture(name="config") -def config_fixture(): +def config_fixture() -> dict[str, Any]: """Define a config entry data fixture.""" return { CONF_PLACE_ID: TEST_PLACE_ID, @@ -54,7 +58,7 @@ def pickup_events_fixture(): @pytest.fixture(name="mock_aiorecollect") -async def mock_aiorecollect_fixture(client): +def mock_aiorecollect_fixture(client): """Define a fixture to patch aiorecollect.""" with ( patch( @@ -70,7 +74,9 @@ async def mock_aiorecollect_fixture(client): @pytest.fixture(name="setup_config_entry") -async def setup_config_entry_fixture(hass, config_entry, mock_aiorecollect): +async def setup_config_entry_fixture( + hass: HomeAssistant, config_entry: MockConfigEntry, mock_aiorecollect: None +) -> None: """Define a fixture to set up recollect_waste.""" assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() From d50bac3b3e0ec758f76a030d9256287bbfdb2443 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 12:52:09 +0200 Subject: [PATCH 0695/1635] Improve type hints in rainmachine tests (#123881) --- tests/components/rainmachine/conftest.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/tests/components/rainmachine/conftest.py b/tests/components/rainmachine/conftest.py index 717d74b421b..22ee807d187 100644 --- a/tests/components/rainmachine/conftest.py +++ b/tests/components/rainmachine/conftest.py @@ -1,5 +1,6 @@ """Define test fixtures for RainMachine.""" +from collections.abc import AsyncGenerator import json from typing import Any from unittest.mock import AsyncMock, patch @@ -8,19 +9,20 @@ import pytest from homeassistant.components.rainmachine import DOMAIN from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD, CONF_PORT, CONF_SSL +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, load_fixture @pytest.fixture(name="client") -def client_fixture(controller, controller_mac): +def client_fixture(controller: AsyncMock, controller_mac: str) -> AsyncMock: """Define a regenmaschine client.""" return AsyncMock(load_local=AsyncMock(), controllers={controller_mac: controller}) @pytest.fixture(name="config") -def config_fixture(hass): +def config_fixture() -> dict[str, Any]: """Define a config entry data fixture.""" return { CONF_IP_ADDRESS: "192.168.1.100", @@ -31,7 +33,9 @@ def config_fixture(hass): @pytest.fixture(name="config_entry") -def config_entry_fixture(hass, config, controller_mac): +def config_entry_fixture( + hass: HomeAssistant, config: dict[str, Any], controller_mac: str +) -> MockConfigEntry: """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, @@ -78,7 +82,7 @@ def controller_fixture( @pytest.fixture(name="controller_mac") -def controller_mac_fixture(): +def controller_mac_fixture() -> str: """Define a controller MAC address.""" return "aa:bb:cc:dd:ee:ff" @@ -145,7 +149,9 @@ def data_zones_fixture(): @pytest.fixture(name="setup_rainmachine") -async def setup_rainmachine_fixture(hass, client, config): +async def setup_rainmachine_fixture( + hass: HomeAssistant, client: AsyncMock, config: dict[str, Any] +) -> AsyncGenerator[None]: """Define a fixture to set up RainMachine.""" with ( patch("homeassistant.components.rainmachine.Client", return_value=client), From ac223e64f9cfc48069c24c50b56f01415841b29d Mon Sep 17 00:00:00 2001 From: Andrew Jackson Date: Wed, 14 Aug 2024 11:55:59 +0100 Subject: [PATCH 0696/1635] Migrate Mastodon unique id (#123877) * Migrate unique id * Fix unique id check * Switch to minor version and other fixes --- homeassistant/components/mastodon/__init__.py | 38 ++++++++++++- .../components/mastodon/config_flow.py | 12 ++-- tests/components/mastodon/conftest.py | 4 +- .../mastodon/snapshots/test_init.ambr | 33 +++++++++++ .../mastodon/snapshots/test_sensor.ambr | 6 +- tests/components/mastodon/test_config_flow.py | 2 +- tests/components/mastodon/test_init.py | 57 +++++++++++++++++++ 7 files changed, 139 insertions(+), 13 deletions(-) create mode 100644 tests/components/mastodon/snapshots/test_init.ambr diff --git a/homeassistant/components/mastodon/__init__.py b/homeassistant/components/mastodon/__init__.py index 3c305ca655b..77e66b6e45c 100644 --- a/homeassistant/components/mastodon/__init__.py +++ b/homeassistant/components/mastodon/__init__.py @@ -17,10 +17,11 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import discovery +from homeassistant.util import slugify -from .const import CONF_BASE_URL, DOMAIN +from .const import CONF_BASE_URL, DOMAIN, LOGGER from .coordinator import MastodonCoordinator -from .utils import create_mastodon_client +from .utils import construct_mastodon_username, create_mastodon_client PLATFORMS: list[Platform] = [Platform.NOTIFY, Platform.SENSOR] @@ -80,6 +81,39 @@ async def async_unload_entry(hass: HomeAssistant, entry: MastodonConfigEntry) -> ) +async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Migrate old config.""" + + if entry.version == 1 and entry.minor_version == 1: + # Version 1.1 had the unique_id as client_id, this isn't necessarily unique + LOGGER.debug("Migrating config entry from version %s", entry.version) + + try: + _, instance, account = await hass.async_add_executor_job( + setup_mastodon, + entry, + ) + except MastodonError as ex: + LOGGER.error("Migration failed with error %s", ex) + return False + + entry.minor_version = 2 + + hass.config_entries.async_update_entry( + entry, + unique_id=slugify(construct_mastodon_username(instance, account)), + ) + + LOGGER.info( + "Entry %s successfully migrated to version %s.%s", + entry.entry_id, + entry.version, + entry.minor_version, + ) + + return True + + def setup_mastodon(entry: ConfigEntry) -> tuple[Mastodon, dict, dict]: """Get mastodon details.""" client = create_mastodon_client( diff --git a/homeassistant/components/mastodon/config_flow.py b/homeassistant/components/mastodon/config_flow.py index 7d1c9396cbb..4e856275736 100644 --- a/homeassistant/components/mastodon/config_flow.py +++ b/homeassistant/components/mastodon/config_flow.py @@ -20,6 +20,7 @@ from homeassistant.helpers.selector import ( TextSelectorType, ) from homeassistant.helpers.typing import ConfigType +from homeassistant.util import slugify from .const import CONF_BASE_URL, DEFAULT_URL, DOMAIN, LOGGER from .utils import construct_mastodon_username, create_mastodon_client @@ -47,6 +48,7 @@ class MastodonConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow.""" VERSION = 1 + MINOR_VERSION = 2 config_entry: ConfigEntry def check_connection( @@ -105,10 +107,6 @@ class MastodonConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a flow initialized by the user.""" errors: dict[str, str] | None = None if user_input: - self._async_abort_entries_match( - {CONF_CLIENT_ID: user_input[CONF_CLIENT_ID]} - ) - instance, account, errors = await self.hass.async_add_executor_job( self.check_connection, user_input[CONF_BASE_URL], @@ -119,7 +117,8 @@ class MastodonConfigFlow(ConfigFlow, domain=DOMAIN): if not errors: name = construct_mastodon_username(instance, account) - await self.async_set_unique_id(user_input[CONF_CLIENT_ID]) + await self.async_set_unique_id(slugify(name)) + self._abort_if_unique_id_configured() return self.async_create_entry( title=name, data=user_input, @@ -148,7 +147,8 @@ class MastodonConfigFlow(ConfigFlow, domain=DOMAIN): ) if not errors: - await self.async_set_unique_id(client_id) + name = construct_mastodon_username(instance, account) + await self.async_set_unique_id(slugify(name)) self._abort_if_unique_id_configured() if not name: diff --git a/tests/components/mastodon/conftest.py b/tests/components/mastodon/conftest.py index 03c3e754c11..c64de44d496 100644 --- a/tests/components/mastodon/conftest.py +++ b/tests/components/mastodon/conftest.py @@ -53,5 +53,7 @@ def mock_config_entry() -> MockConfigEntry: CONF_ACCESS_TOKEN: "access_token", }, entry_id="01J35M4AH9HYRC2V0G6RNVNWJH", - unique_id="client_id", + unique_id="trwnh_mastodon_social", + version=1, + minor_version=2, ) diff --git a/tests/components/mastodon/snapshots/test_init.ambr b/tests/components/mastodon/snapshots/test_init.ambr new file mode 100644 index 00000000000..37fa765acea --- /dev/null +++ b/tests/components/mastodon/snapshots/test_init.ambr @@ -0,0 +1,33 @@ +# serializer version: 1 +# name: test_device_info + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': , + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'mastodon', + 'trwnh_mastodon_social', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Mastodon gGmbH', + 'model': '@trwnh@mastodon.social', + 'model_id': None, + 'name': 'Mastodon @trwnh@mastodon.social', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': '4.0.0rc1', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/mastodon/snapshots/test_sensor.ambr b/tests/components/mastodon/snapshots/test_sensor.ambr index f94e34c00ab..c8df8cdab19 100644 --- a/tests/components/mastodon/snapshots/test_sensor.ambr +++ b/tests/components/mastodon/snapshots/test_sensor.ambr @@ -30,7 +30,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'followers', - 'unique_id': 'client_id_followers', + 'unique_id': 'trwnh_mastodon_social_followers', 'unit_of_measurement': 'accounts', }) # --- @@ -80,7 +80,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'following', - 'unique_id': 'client_id_following', + 'unique_id': 'trwnh_mastodon_social_following', 'unit_of_measurement': 'accounts', }) # --- @@ -130,7 +130,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'posts', - 'unique_id': 'client_id_posts', + 'unique_id': 'trwnh_mastodon_social_posts', 'unit_of_measurement': 'posts', }) # --- diff --git a/tests/components/mastodon/test_config_flow.py b/tests/components/mastodon/test_config_flow.py index 01cdc061d3e..073a6534d7d 100644 --- a/tests/components/mastodon/test_config_flow.py +++ b/tests/components/mastodon/test_config_flow.py @@ -44,7 +44,7 @@ async def test_full_flow( CONF_CLIENT_SECRET: "client_secret", CONF_ACCESS_TOKEN: "access_token", } - assert result["result"].unique_id == "client_id" + assert result["result"].unique_id == "trwnh_mastodon_social" @pytest.mark.parametrize( diff --git a/tests/components/mastodon/test_init.py b/tests/components/mastodon/test_init.py index 53796e39782..c3d0728fe08 100644 --- a/tests/components/mastodon/test_init.py +++ b/tests/components/mastodon/test_init.py @@ -3,15 +3,36 @@ from unittest.mock import AsyncMock from mastodon.Mastodon import MastodonError +from syrupy.assertion import SnapshotAssertion +from homeassistant.components.mastodon.config_flow import MastodonConfigFlow +from homeassistant.components.mastodon.const import CONF_BASE_URL, DOMAIN from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_ACCESS_TOKEN, CONF_CLIENT_ID, CONF_CLIENT_SECRET from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr from . import setup_integration from tests.common import MockConfigEntry +async def test_device_info( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_mastodon_client: AsyncMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test device registry integration.""" + await setup_integration(hass, mock_config_entry) + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, mock_config_entry.unique_id)} + ) + assert device_entry is not None + assert device_entry == snapshot + + async def test_initialization_failure( hass: HomeAssistant, mock_mastodon_client: AsyncMock, @@ -23,3 +44,39 @@ async def test_initialization_failure( await setup_integration(hass, mock_config_entry) assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_migrate( + hass: HomeAssistant, + mock_mastodon_client: AsyncMock, +) -> None: + """Test migration.""" + # Setup the config entry + config_entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_BASE_URL: "https://mastodon.social", + CONF_CLIENT_ID: "client_id", + CONF_CLIENT_SECRET: "client_secret", + CONF_ACCESS_TOKEN: "access_token", + }, + title="@trwnh@mastodon.social", + unique_id="client_id", + version=1, + minor_version=1, + ) + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + # Check migration was successful + assert config_entry.state is ConfigEntryState.LOADED + assert config_entry.data == { + CONF_BASE_URL: "https://mastodon.social", + CONF_CLIENT_ID: "client_id", + CONF_CLIENT_SECRET: "client_secret", + CONF_ACCESS_TOKEN: "access_token", + } + assert config_entry.version == MastodonConfigFlow.VERSION + assert config_entry.minor_version == MastodonConfigFlow.MINOR_VERSION + assert config_entry.unique_id == "trwnh_mastodon_social" From 8117532cc7be91fea7801dfdb678f24bb851fec4 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 12:58:49 +0200 Subject: [PATCH 0697/1635] Improve type hints in rainforest_eagle tests (#123880) --- tests/components/rainforest_eagle/conftest.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/tests/components/rainforest_eagle/conftest.py b/tests/components/rainforest_eagle/conftest.py index 1aff693e61f..c3790a12e86 100644 --- a/tests/components/rainforest_eagle/conftest.py +++ b/tests/components/rainforest_eagle/conftest.py @@ -1,6 +1,7 @@ """Conftest for rainforest_eagle.""" -from unittest.mock import AsyncMock, Mock, patch +from collections.abc import AsyncGenerator +from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest @@ -13,6 +14,7 @@ from homeassistant.components.rainforest_eagle.const import ( TYPE_EAGLE_200, ) from homeassistant.const import CONF_HOST, CONF_TYPE +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from . import MOCK_200_RESPONSE_WITHOUT_PRICE, MOCK_CLOUD_ID @@ -21,7 +23,7 @@ from tests.common import MockConfigEntry @pytest.fixture -def config_entry_200(hass): +def config_entry_200(hass: HomeAssistant) -> MockConfigEntry: """Return a config entry.""" entry = MockConfigEntry( domain="rainforest_eagle", @@ -38,7 +40,9 @@ def config_entry_200(hass): @pytest.fixture -async def setup_rainforest_200(hass, config_entry_200): +async def setup_rainforest_200( + hass: HomeAssistant, config_entry_200: MockConfigEntry +) -> AsyncGenerator[Mock]: """Set up rainforest.""" with patch( "aioeagle.ElectricMeter.create_instance", @@ -53,7 +57,7 @@ async def setup_rainforest_200(hass, config_entry_200): @pytest.fixture -async def setup_rainforest_100(hass): +async def setup_rainforest_100(hass: HomeAssistant) -> AsyncGenerator[MagicMock]: """Set up rainforest.""" MockConfigEntry( domain="rainforest_eagle", From f414f5d77a876ef2fa8a0ae0ae83614a675ccd53 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 13:00:07 +0200 Subject: [PATCH 0698/1635] Improve type hints in person tests (#123871) --- tests/components/person/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/components/person/conftest.py b/tests/components/person/conftest.py index ecec42b003d..a6dc95ccc9e 100644 --- a/tests/components/person/conftest.py +++ b/tests/components/person/conftest.py @@ -18,7 +18,7 @@ DEVICE_TRACKER_2 = "device_tracker.test_tracker_2" @pytest.fixture -def storage_collection(hass): +def storage_collection(hass: HomeAssistant) -> person.PersonStorageCollection: """Return an empty storage collection.""" id_manager = collection.IDManager() return person.PersonStorageCollection( From 1af6528f4f30910a99eb79f4937f6c37de82506d Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 13:05:43 +0200 Subject: [PATCH 0699/1635] Improve type hints in prusalink tests (#123873) --- tests/components/prusalink/conftest.py | 37 +++++++++++++++----------- 1 file changed, 21 insertions(+), 16 deletions(-) diff --git a/tests/components/prusalink/conftest.py b/tests/components/prusalink/conftest.py index 104e4d47afa..9bcf45056cd 100644 --- a/tests/components/prusalink/conftest.py +++ b/tests/components/prusalink/conftest.py @@ -1,16 +1,19 @@ """Fixtures for PrusaLink.""" +from collections.abc import Generator +from typing import Any from unittest.mock import patch import pytest from homeassistant.components.prusalink import DOMAIN +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @pytest.fixture -def mock_config_entry(hass): +def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: """Mock a PrusaLink config entry.""" entry = MockConfigEntry( domain=DOMAIN, @@ -23,7 +26,7 @@ def mock_config_entry(hass): @pytest.fixture -def mock_version_api(hass): +def mock_version_api() -> Generator[dict[str, str]]: """Mock PrusaLink version API.""" resp = { "api": "2.0.0", @@ -36,7 +39,7 @@ def mock_version_api(hass): @pytest.fixture -def mock_info_api(hass): +def mock_info_api() -> Generator[dict[str, Any]]: """Mock PrusaLink info API.""" resp = { "nozzle_diameter": 0.40, @@ -50,7 +53,7 @@ def mock_info_api(hass): @pytest.fixture -def mock_get_legacy_printer(hass): +def mock_get_legacy_printer() -> Generator[dict[str, Any]]: """Mock PrusaLink printer API.""" resp = {"telemetry": {"material": "PLA"}} with patch("pyprusalink.PrusaLink.get_legacy_printer", return_value=resp): @@ -58,7 +61,7 @@ def mock_get_legacy_printer(hass): @pytest.fixture -def mock_get_status_idle(hass): +def mock_get_status_idle() -> Generator[dict[str, Any]]: """Mock PrusaLink printer API.""" resp = { "storage": { @@ -86,7 +89,7 @@ def mock_get_status_idle(hass): @pytest.fixture -def mock_get_status_printing(hass): +def mock_get_status_printing() -> Generator[dict[str, Any]]: """Mock PrusaLink printer API.""" resp = { "job": { @@ -114,7 +117,7 @@ def mock_get_status_printing(hass): @pytest.fixture -def mock_job_api_idle(hass): +def mock_job_api_idle() -> Generator[dict[str, Any]]: """Mock PrusaLink job API having no job.""" resp = {} with patch("pyprusalink.PrusaLink.get_job", return_value=resp): @@ -122,7 +125,7 @@ def mock_job_api_idle(hass): @pytest.fixture -def mock_job_api_idle_mk3(hass): +def mock_job_api_idle_mk3() -> Generator[dict[str, Any]]: """Mock PrusaLink job API having a job with idle state (MK3).""" resp = { "id": 129, @@ -148,7 +151,7 @@ def mock_job_api_idle_mk3(hass): @pytest.fixture -def mock_job_api_printing(hass): +def mock_job_api_printing() -> Generator[dict[str, Any]]: """Mock PrusaLink printing.""" resp = { "id": 129, @@ -174,7 +177,9 @@ def mock_job_api_printing(hass): @pytest.fixture -def mock_job_api_paused(hass, mock_get_status_printing, mock_job_api_printing): +def mock_job_api_paused( + mock_get_status_printing: dict[str, Any], mock_job_api_printing: dict[str, Any] +) -> None: """Mock PrusaLink paused printing.""" mock_job_api_printing["state"] = "PAUSED" mock_get_status_printing["printer"]["state"] = "PAUSED" @@ -182,10 +187,10 @@ def mock_job_api_paused(hass, mock_get_status_printing, mock_job_api_printing): @pytest.fixture def mock_api( - mock_version_api, - mock_info_api, - mock_get_legacy_printer, - mock_get_status_idle, - mock_job_api_idle, -): + mock_version_api: dict[str, str], + mock_info_api: dict[str, Any], + mock_get_legacy_printer: dict[str, Any], + mock_get_status_idle: dict[str, Any], + mock_job_api_idle: dict[str, Any], +) -> None: """Mock PrusaLink API.""" From 5f1d7e55662b732e9c97c2f7921f097009d089df Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 13:05:52 +0200 Subject: [PATCH 0700/1635] Improve type hints in purpleair tests (#123874) --- tests/components/purpleair/conftest.py | 23 ++++++++++++++++------- 1 file changed, 16 insertions(+), 7 deletions(-) diff --git a/tests/components/purpleair/conftest.py b/tests/components/purpleair/conftest.py index 1305c98308d..3d6776dd12e 100644 --- a/tests/components/purpleair/conftest.py +++ b/tests/components/purpleair/conftest.py @@ -1,5 +1,7 @@ """Define fixtures for PurpleAir tests.""" +from collections.abc import Generator +from typing import Any from unittest.mock import AsyncMock, Mock, patch from aiopurpleair.endpoints.sensors import NearbySensorResult @@ -7,6 +9,7 @@ from aiopurpleair.models.sensors import GetSensorsResponse import pytest from homeassistant.components.purpleair import DOMAIN +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_fixture @@ -16,7 +19,7 @@ TEST_SENSOR_INDEX2 = 567890 @pytest.fixture(name="api") -def api_fixture(get_sensors_response): +def api_fixture(get_sensors_response: GetSensorsResponse) -> Mock: """Define a fixture to return a mocked aiopurple API object.""" return Mock( async_check_api_key=AsyncMock(), @@ -34,7 +37,11 @@ def api_fixture(get_sensors_response): @pytest.fixture(name="config_entry") -def config_entry_fixture(hass, config_entry_data, config_entry_options): +def config_entry_fixture( + hass: HomeAssistant, + config_entry_data: dict[str, Any], + config_entry_options: dict[str, Any], +) -> MockConfigEntry: """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, @@ -48,7 +55,7 @@ def config_entry_fixture(hass, config_entry_data, config_entry_options): @pytest.fixture(name="config_entry_data") -def config_entry_data_fixture(): +def config_entry_data_fixture() -> dict[str, Any]: """Define a config entry data fixture.""" return { "api_key": TEST_API_KEY, @@ -56,7 +63,7 @@ def config_entry_data_fixture(): @pytest.fixture(name="config_entry_options") -def config_entry_options_fixture(): +def config_entry_options_fixture() -> dict[str, Any]: """Define a config entry options fixture.""" return { "sensor_indices": [TEST_SENSOR_INDEX1], @@ -64,7 +71,7 @@ def config_entry_options_fixture(): @pytest.fixture(name="get_sensors_response", scope="package") -def get_sensors_response_fixture(): +def get_sensors_response_fixture() -> GetSensorsResponse: """Define a fixture to mock an aiopurpleair GetSensorsResponse object.""" return GetSensorsResponse.parse_raw( load_fixture("get_sensors_response.json", "purpleair") @@ -72,7 +79,7 @@ def get_sensors_response_fixture(): @pytest.fixture(name="mock_aiopurpleair") -async def mock_aiopurpleair_fixture(api): +def mock_aiopurpleair_fixture(api: Mock) -> Generator[Mock]: """Define a fixture to patch aiopurpleair.""" with ( patch("homeassistant.components.purpleair.config_flow.API", return_value=api), @@ -82,7 +89,9 @@ async def mock_aiopurpleair_fixture(api): @pytest.fixture(name="setup_config_entry") -async def setup_config_entry_fixture(hass, config_entry, mock_aiopurpleair): +async def setup_config_entry_fixture( + hass: HomeAssistant, config_entry: MockConfigEntry, mock_aiopurpleair: Mock +) -> None: """Define a fixture to set up purpleair.""" assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() From 903342b394a96ce44a2b05b9fee7af0747334bf0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Fern=C3=A1ndez=20Rojas?= Date: Wed, 14 Aug 2024 13:06:52 +0200 Subject: [PATCH 0701/1635] Handle timeouts on Airzone DHCP config flow (#123869) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit airzone: config_flow: dhcp: catch timeout exception Signed-off-by: Álvaro Fernández Rojas --- homeassistant/components/airzone/config_flow.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/airzone/config_flow.py b/homeassistant/components/airzone/config_flow.py index 24ee37bbcb4..406fd72a6db 100644 --- a/homeassistant/components/airzone/config_flow.py +++ b/homeassistant/components/airzone/config_flow.py @@ -114,7 +114,7 @@ class AirZoneConfigFlow(ConfigFlow, domain=DOMAIN): ) try: await airzone.get_version() - except AirzoneError as err: + except (AirzoneError, TimeoutError) as err: raise AbortFlow("cannot_connect") from err return await self.async_step_discovered_connection() From d4082aee5ad2a41ea277c554a2c054f51a5c4c13 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 13:08:54 +0200 Subject: [PATCH 0702/1635] Improve type hints in owntracks tests (#123866) --- .../owntracks/test_device_tracker.py | 147 +++++++++++------- 1 file changed, 94 insertions(+), 53 deletions(-) diff --git a/tests/components/owntracks/test_device_tracker.py b/tests/components/owntracks/test_device_tracker.py index 0648a94c70b..b4b5c00880c 100644 --- a/tests/components/owntracks/test_device_tracker.py +++ b/tests/components/owntracks/test_device_tracker.py @@ -1,6 +1,7 @@ """The tests for the Owntracks device tracker.""" import base64 +from collections.abc import Callable import json import pickle from unittest.mock import patch @@ -18,6 +19,8 @@ from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, async_fire_mqtt_message from tests.typing import ClientSessionGenerator, MqttMockHAClient +type OwnTracksContextFactory = Callable[[], owntracks.OwnTracksContext] + USER = "greg" DEVICE = "phone" @@ -314,7 +317,7 @@ async def setup_owntracks(hass, config, ctx_cls=owntracks.OwnTracksContext): @pytest.fixture -def context(hass, setup_comp): +def context(hass: HomeAssistant, setup_comp: None) -> OwnTracksContextFactory: """Set up the mocked context.""" orig_context = owntracks.OwnTracksContext context = None @@ -407,14 +410,16 @@ def assert_mobile_tracker_accuracy(hass, accuracy, beacon=IBEACON_DEVICE): assert state.attributes.get("gps_accuracy") == accuracy -async def test_location_invalid_devid(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_location_invalid_devid(hass: HomeAssistant) -> None: """Test the update of a location.""" await send_message(hass, "owntracks/paulus/nexus-5x", LOCATION_MESSAGE) state = hass.states.get("device_tracker.paulus_nexus_5x") assert state.state == "outer" -async def test_location_update(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_location_update(hass: HomeAssistant) -> None: """Test the update of a location.""" await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) @@ -424,7 +429,8 @@ async def test_location_update(hass: HomeAssistant, context) -> None: assert_location_state(hass, "outer") -async def test_location_update_no_t_key(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_location_update_no_t_key(hass: HomeAssistant) -> None: """Test the update of a location when message does not contain 't'.""" message = LOCATION_MESSAGE.copy() message.pop("t") @@ -436,7 +442,8 @@ async def test_location_update_no_t_key(hass: HomeAssistant, context) -> None: assert_location_state(hass, "outer") -async def test_location_inaccurate_gps(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_location_inaccurate_gps(hass: HomeAssistant) -> None: """Test the location for inaccurate GPS information.""" await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE_INACCURATE) @@ -446,7 +453,8 @@ async def test_location_inaccurate_gps(hass: HomeAssistant, context) -> None: assert_location_longitude(hass, LOCATION_MESSAGE["lon"]) -async def test_location_zero_accuracy_gps(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_location_zero_accuracy_gps(hass: HomeAssistant) -> None: """Ignore the location for zero accuracy GPS information.""" await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE_ZERO_ACCURACY) @@ -458,7 +466,9 @@ async def test_location_zero_accuracy_gps(hass: HomeAssistant, context) -> None: # ------------------------------------------------------------------------ # GPS based event entry / exit testing -async def test_event_gps_entry_exit(hass: HomeAssistant, context) -> None: +async def test_event_gps_entry_exit( + hass: HomeAssistant, context: OwnTracksContextFactory +) -> None: """Test the entry event.""" # Entering the owntracks circular region named "inner" await send_message(hass, EVENT_TOPIC, REGION_GPS_ENTER_MESSAGE) @@ -496,7 +506,9 @@ async def test_event_gps_entry_exit(hass: HomeAssistant, context) -> None: assert_location_accuracy(hass, LOCATION_MESSAGE["acc"]) -async def test_event_gps_with_spaces(hass: HomeAssistant, context) -> None: +async def test_event_gps_with_spaces( + hass: HomeAssistant, context: OwnTracksContextFactory +) -> None: """Test the entry event.""" message = build_message({"desc": "inner 2"}, REGION_GPS_ENTER_MESSAGE) await send_message(hass, EVENT_TOPIC, message) @@ -509,7 +521,8 @@ async def test_event_gps_with_spaces(hass: HomeAssistant, context) -> None: assert not context().regions_entered[USER] -async def test_event_gps_entry_inaccurate(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_event_gps_entry_inaccurate(hass: HomeAssistant) -> None: """Test the event for inaccurate entry.""" # Set location to the outer zone. await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) @@ -522,7 +535,9 @@ async def test_event_gps_entry_inaccurate(hass: HomeAssistant, context) -> None: assert_location_state(hass, "inner") -async def test_event_gps_entry_exit_inaccurate(hass: HomeAssistant, context) -> None: +async def test_event_gps_entry_exit_inaccurate( + hass: HomeAssistant, context: OwnTracksContextFactory +) -> None: """Test the event for inaccurate exit.""" await send_message(hass, EVENT_TOPIC, REGION_GPS_ENTER_MESSAGE) @@ -542,7 +557,9 @@ async def test_event_gps_entry_exit_inaccurate(hass: HomeAssistant, context) -> assert not context().regions_entered[USER] -async def test_event_gps_entry_exit_zero_accuracy(hass: HomeAssistant, context) -> None: +async def test_event_gps_entry_exit_zero_accuracy( + hass: HomeAssistant, context: OwnTracksContextFactory +) -> None: """Test entry/exit events with accuracy zero.""" await send_message(hass, EVENT_TOPIC, REGION_GPS_ENTER_MESSAGE_ZERO) @@ -562,9 +579,8 @@ async def test_event_gps_entry_exit_zero_accuracy(hass: HomeAssistant, context) assert not context().regions_entered[USER] -async def test_event_gps_exit_outside_zone_sets_away( - hass: HomeAssistant, context -) -> None: +@pytest.mark.usefixtures("context") +async def test_event_gps_exit_outside_zone_sets_away(hass: HomeAssistant) -> None: """Test the event for exit zone.""" await send_message(hass, EVENT_TOPIC, REGION_GPS_ENTER_MESSAGE) assert_location_state(hass, "inner") @@ -577,7 +593,8 @@ async def test_event_gps_exit_outside_zone_sets_away( assert_location_state(hass, STATE_NOT_HOME) -async def test_event_gps_entry_exit_right_order(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_event_gps_entry_exit_right_order(hass: HomeAssistant) -> None: """Test the event for ordering.""" # Enter inner zone # Set location to the outer zone. @@ -602,7 +619,8 @@ async def test_event_gps_entry_exit_right_order(hass: HomeAssistant, context) -> assert_location_state(hass, "outer") -async def test_event_gps_entry_exit_wrong_order(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_event_gps_entry_exit_wrong_order(hass: HomeAssistant) -> None: """Test the event for wrong order.""" # Enter inner zone await send_message(hass, EVENT_TOPIC, REGION_GPS_ENTER_MESSAGE) @@ -625,7 +643,8 @@ async def test_event_gps_entry_exit_wrong_order(hass: HomeAssistant, context) -> assert_location_state(hass, "outer") -async def test_event_gps_entry_unknown_zone(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_event_gps_entry_unknown_zone(hass: HomeAssistant) -> None: """Test the event for unknown zone.""" # Just treat as location update message = build_message({"desc": "unknown"}, REGION_GPS_ENTER_MESSAGE) @@ -634,7 +653,8 @@ async def test_event_gps_entry_unknown_zone(hass: HomeAssistant, context) -> Non assert_location_state(hass, "inner") -async def test_event_gps_exit_unknown_zone(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_event_gps_exit_unknown_zone(hass: HomeAssistant) -> None: """Test the event for unknown zone.""" # Just treat as location update message = build_message({"desc": "unknown"}, REGION_GPS_LEAVE_MESSAGE) @@ -643,7 +663,8 @@ async def test_event_gps_exit_unknown_zone(hass: HomeAssistant, context) -> None assert_location_state(hass, "outer") -async def test_event_entry_zone_loading_dash(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_event_entry_zone_loading_dash(hass: HomeAssistant) -> None: """Test the event for zone landing.""" # Make sure the leading - is ignored # Owntracks uses this to switch on hold @@ -652,7 +673,9 @@ async def test_event_entry_zone_loading_dash(hass: HomeAssistant, context) -> No assert_location_state(hass, "inner") -async def test_events_only_on(hass: HomeAssistant, context) -> None: +async def test_events_only_on( + hass: HomeAssistant, context: OwnTracksContextFactory +) -> None: """Test events_only config suppresses location updates.""" # Sending a location message that is not home await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE_NOT_HOME) @@ -673,7 +696,9 @@ async def test_events_only_on(hass: HomeAssistant, context) -> None: assert_location_state(hass, STATE_NOT_HOME) -async def test_events_only_off(hass: HomeAssistant, context) -> None: +async def test_events_only_off( + hass: HomeAssistant, context: OwnTracksContextFactory +) -> None: """Test when events_only is False.""" # Sending a location message that is not home await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE_NOT_HOME) @@ -694,7 +719,8 @@ async def test_events_only_off(hass: HomeAssistant, context) -> None: assert_location_state(hass, "outer") -async def test_event_source_type_entry_exit(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_event_source_type_entry_exit(hass: HomeAssistant) -> None: """Test the entry and exit events of source type.""" # Entering the owntracks circular region named "inner" await send_message(hass, EVENT_TOPIC, REGION_GPS_ENTER_MESSAGE) @@ -724,7 +750,9 @@ async def test_event_source_type_entry_exit(hass: HomeAssistant, context) -> Non # Region Beacon based event entry / exit testing -async def test_event_region_entry_exit(hass: HomeAssistant, context) -> None: +async def test_event_region_entry_exit( + hass: HomeAssistant, context: OwnTracksContextFactory +) -> None: """Test the entry event.""" # Seeing a beacon named "inner" await send_message(hass, EVENT_TOPIC, REGION_BEACON_ENTER_MESSAGE) @@ -763,7 +791,9 @@ async def test_event_region_entry_exit(hass: HomeAssistant, context) -> None: assert_location_accuracy(hass, LOCATION_MESSAGE["acc"]) -async def test_event_region_with_spaces(hass: HomeAssistant, context) -> None: +async def test_event_region_with_spaces( + hass: HomeAssistant, context: OwnTracksContextFactory +) -> None: """Test the entry event.""" message = build_message({"desc": "inner 2"}, REGION_BEACON_ENTER_MESSAGE) await send_message(hass, EVENT_TOPIC, message) @@ -776,9 +806,8 @@ async def test_event_region_with_spaces(hass: HomeAssistant, context) -> None: assert not context().regions_entered[USER] -async def test_event_region_entry_exit_right_order( - hass: HomeAssistant, context -) -> None: +@pytest.mark.usefixtures("context") +async def test_event_region_entry_exit_right_order(hass: HomeAssistant) -> None: """Test the event for ordering.""" # Enter inner zone # Set location to the outer zone. @@ -809,9 +838,8 @@ async def test_event_region_entry_exit_right_order( assert_location_state(hass, "inner") -async def test_event_region_entry_exit_wrong_order( - hass: HomeAssistant, context -) -> None: +@pytest.mark.usefixtures("context") +async def test_event_region_entry_exit_wrong_order(hass: HomeAssistant) -> None: """Test the event for wrong order.""" # Enter inner zone await send_message(hass, EVENT_TOPIC, REGION_BEACON_ENTER_MESSAGE) @@ -838,9 +866,8 @@ async def test_event_region_entry_exit_wrong_order( assert_location_state(hass, "inner_2") -async def test_event_beacon_unknown_zone_no_location( - hass: HomeAssistant, context -) -> None: +@pytest.mark.usefixtures("context") +async def test_event_beacon_unknown_zone_no_location(hass: HomeAssistant) -> None: """Test the event for unknown zone.""" # A beacon which does not match a HA zone is the # definition of a mobile beacon. In this case, "unknown" @@ -865,7 +892,8 @@ async def test_event_beacon_unknown_zone_no_location( assert_mobile_tracker_state(hass, "unknown", "unknown") -async def test_event_beacon_unknown_zone(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_event_beacon_unknown_zone(hass: HomeAssistant) -> None: """Test the event for unknown zone.""" # A beacon which does not match a HA zone is the # definition of a mobile beacon. In this case, "unknown" @@ -885,9 +913,8 @@ async def test_event_beacon_unknown_zone(hass: HomeAssistant, context) -> None: assert_mobile_tracker_state(hass, "outer", "unknown") -async def test_event_beacon_entry_zone_loading_dash( - hass: HomeAssistant, context -) -> None: +@pytest.mark.usefixtures("context") +async def test_event_beacon_entry_zone_loading_dash(hass: HomeAssistant) -> None: """Test the event for beacon zone landing.""" # Make sure the leading - is ignored # Owntracks uses this to switch on hold @@ -899,7 +926,8 @@ async def test_event_beacon_entry_zone_loading_dash( # ------------------------------------------------------------------------ # Mobile Beacon based event entry / exit testing -async def test_mobile_enter_move_beacon(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_mobile_enter_move_beacon(hass: HomeAssistant) -> None: """Test the movement of a beacon.""" # I am in the outer zone. await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) @@ -923,7 +951,8 @@ async def test_mobile_enter_move_beacon(hass: HomeAssistant, context) -> None: assert_mobile_tracker_latitude(hass, not_home_lat) -async def test_mobile_enter_exit_region_beacon(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_mobile_enter_exit_region_beacon(hass: HomeAssistant) -> None: """Test the enter and the exit of a mobile beacon.""" # I am in the outer zone. await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) @@ -946,7 +975,8 @@ async def test_mobile_enter_exit_region_beacon(hass: HomeAssistant, context) -> assert_mobile_tracker_state(hass, "outer") -async def test_mobile_exit_move_beacon(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_mobile_exit_move_beacon(hass: HomeAssistant) -> None: """Test the exit move of a beacon.""" # I am in the outer zone. await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) @@ -968,7 +998,9 @@ async def test_mobile_exit_move_beacon(hass: HomeAssistant, context) -> None: assert_mobile_tracker_state(hass, "outer") -async def test_mobile_multiple_async_enter_exit(hass: HomeAssistant, context) -> None: +async def test_mobile_multiple_async_enter_exit( + hass: HomeAssistant, context: OwnTracksContextFactory +) -> None: """Test the multiple entering.""" # Test race condition for _ in range(20): @@ -988,7 +1020,9 @@ async def test_mobile_multiple_async_enter_exit(hass: HomeAssistant, context) -> assert len(context().mobile_beacons_active["greg_phone"]) == 0 -async def test_mobile_multiple_enter_exit(hass: HomeAssistant, context) -> None: +async def test_mobile_multiple_enter_exit( + hass: HomeAssistant, context: OwnTracksContextFactory +) -> None: """Test the multiple entering.""" await send_message(hass, EVENT_TOPIC, MOBILE_BEACON_ENTER_EVENT_MESSAGE) await send_message(hass, EVENT_TOPIC, MOBILE_BEACON_ENTER_EVENT_MESSAGE) @@ -997,7 +1031,8 @@ async def test_mobile_multiple_enter_exit(hass: HomeAssistant, context) -> None: assert len(context().mobile_beacons_active["greg_phone"]) == 0 -async def test_complex_movement(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_complex_movement(hass: HomeAssistant) -> None: """Test a complex sequence representative of real-world use.""" # I am in the outer zone. await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) @@ -1119,9 +1154,8 @@ async def test_complex_movement(hass: HomeAssistant, context) -> None: assert_mobile_tracker_state(hass, "outer") -async def test_complex_movement_sticky_keys_beacon( - hass: HomeAssistant, context -) -> None: +@pytest.mark.usefixtures("context") +async def test_complex_movement_sticky_keys_beacon(hass: HomeAssistant) -> None: """Test a complex sequence which was previously broken.""" # I am not_home await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) @@ -1233,7 +1267,8 @@ async def test_complex_movement_sticky_keys_beacon( assert_mobile_tracker_latitude(hass, INNER_ZONE["latitude"]) -async def test_waypoint_import_simple(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_waypoint_import_simple(hass: HomeAssistant) -> None: """Test a simple import of list of waypoints.""" waypoints_message = WAYPOINTS_EXPORTED_MESSAGE.copy() await send_message(hass, WAYPOINTS_TOPIC, waypoints_message) @@ -1244,7 +1279,8 @@ async def test_waypoint_import_simple(hass: HomeAssistant, context) -> None: assert wayp is not None -async def test_waypoint_import_block(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_waypoint_import_block(hass: HomeAssistant) -> None: """Test import of list of waypoints for blocked user.""" waypoints_message = WAYPOINTS_EXPORTED_MESSAGE.copy() await send_message(hass, WAYPOINTS_TOPIC_BLOCKED, waypoints_message) @@ -1275,7 +1311,8 @@ async def test_waypoint_import_no_whitelist(hass: HomeAssistant, setup_comp) -> assert wayp is not None -async def test_waypoint_import_bad_json(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_waypoint_import_bad_json(hass: HomeAssistant) -> None: """Test importing a bad JSON payload.""" waypoints_message = WAYPOINTS_EXPORTED_MESSAGE.copy() await send_message(hass, WAYPOINTS_TOPIC, waypoints_message, True) @@ -1286,7 +1323,8 @@ async def test_waypoint_import_bad_json(hass: HomeAssistant, context) -> None: assert wayp is None -async def test_waypoint_import_existing(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_waypoint_import_existing(hass: HomeAssistant) -> None: """Test importing a zone that exists.""" waypoints_message = WAYPOINTS_EXPORTED_MESSAGE.copy() await send_message(hass, WAYPOINTS_TOPIC, waypoints_message) @@ -1299,7 +1337,8 @@ async def test_waypoint_import_existing(hass: HomeAssistant, context) -> None: assert wayp == new_wayp -async def test_single_waypoint_import(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_single_waypoint_import(hass: HomeAssistant) -> None: """Test single waypoint message.""" waypoint_message = WAYPOINT_MESSAGE.copy() await send_message(hass, WAYPOINT_TOPIC, waypoint_message) @@ -1307,7 +1346,8 @@ async def test_single_waypoint_import(hass: HomeAssistant, context) -> None: assert wayp is not None -async def test_not_implemented_message(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_not_implemented_message(hass: HomeAssistant) -> None: """Handle not implemented message type.""" patch_handler = patch( "homeassistant.components.owntracks.messages.async_handle_not_impl_msg", @@ -1318,7 +1358,8 @@ async def test_not_implemented_message(hass: HomeAssistant, context) -> None: patch_handler.stop() -async def test_unsupported_message(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_unsupported_message(hass: HomeAssistant) -> None: """Handle not implemented message type.""" patch_handler = patch( "homeassistant.components.owntracks.messages.async_handle_unsupported_msg", From 1ddc7232745c8c66c95fc0483c679de78b056ef2 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 13:12:24 +0200 Subject: [PATCH 0703/1635] Improve type hints in powerwall tests (#123872) --- tests/components/powerwall/test_switch.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/components/powerwall/test_switch.py b/tests/components/powerwall/test_switch.py index b01f60210a6..b4ff0ca724e 100644 --- a/tests/components/powerwall/test_switch.py +++ b/tests/components/powerwall/test_switch.py @@ -1,6 +1,6 @@ """Test for Powerwall off-grid switch.""" -from unittest.mock import patch +from unittest.mock import MagicMock, patch import pytest from tesla_powerwall import GridStatus, PowerwallError @@ -24,7 +24,7 @@ ENTITY_ID = "switch.mysite_off_grid_operation" @pytest.fixture(name="mock_powerwall") -async def mock_powerwall_fixture(hass): +async def mock_powerwall_fixture(hass: HomeAssistant) -> MagicMock: """Set up base powerwall fixture.""" mock_powerwall = await _mock_powerwall_with_fixtures(hass) From dc2886d9b12078867cd3d2771869c3e29539892a Mon Sep 17 00:00:00 2001 From: G Johansson Date: Wed, 14 Aug 2024 13:27:21 +0200 Subject: [PATCH 0704/1635] Use coordinator setup method in yale_smart_alarm (#123819) --- .../components/yale_smart_alarm/__init__.py | 4 --- .../yale_smart_alarm/coordinator.py | 25 ++++++++++--------- 2 files changed, 13 insertions(+), 16 deletions(-) diff --git a/homeassistant/components/yale_smart_alarm/__init__.py b/homeassistant/components/yale_smart_alarm/__init__.py index 1ef68d98a13..3c853afb6fd 100644 --- a/homeassistant/components/yale_smart_alarm/__init__.py +++ b/homeassistant/components/yale_smart_alarm/__init__.py @@ -6,7 +6,6 @@ from homeassistant.components.lock import CONF_DEFAULT_CODE, DOMAIN as LOCK_DOMA from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_CODE from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers import entity_registry as er from .const import LOGGER, PLATFORMS @@ -19,9 +18,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: YaleConfigEntry) -> bool """Set up Yale from a config entry.""" coordinator = YaleDataUpdateCoordinator(hass, entry) - if not await hass.async_add_executor_job(coordinator.get_updates): - raise ConfigEntryAuthFailed - await coordinator.async_config_entry_first_refresh() entry.runtime_data = coordinator diff --git a/homeassistant/components/yale_smart_alarm/coordinator.py b/homeassistant/components/yale_smart_alarm/coordinator.py index 5307e166e17..328558d0aba 100644 --- a/homeassistant/components/yale_smart_alarm/coordinator.py +++ b/homeassistant/components/yale_smart_alarm/coordinator.py @@ -20,10 +20,11 @@ from .const import DEFAULT_SCAN_INTERVAL, DOMAIN, LOGGER, YALE_BASE_ERRORS class YaleDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): """A Yale Data Update Coordinator.""" + yale: YaleSmartAlarmClient + def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: """Initialize the Yale hub.""" self.entry = entry - self.yale: YaleSmartAlarmClient | None = None super().__init__( hass, LOGGER, @@ -32,6 +33,17 @@ class YaleDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): always_update=False, ) + async def _async_setup(self) -> None: + """Set up connection to Yale.""" + try: + self.yale = YaleSmartAlarmClient( + self.entry.data[CONF_USERNAME], self.entry.data[CONF_PASSWORD] + ) + except AuthenticationError as error: + raise ConfigEntryAuthFailed from error + except YALE_BASE_ERRORS as error: + raise UpdateFailed from error + async def _async_update_data(self) -> dict[str, Any]: """Fetch data from Yale.""" @@ -132,17 +144,6 @@ class YaleDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): def get_updates(self) -> dict[str, Any]: """Fetch data from Yale.""" - - if self.yale is None: - try: - self.yale = YaleSmartAlarmClient( - self.entry.data[CONF_USERNAME], self.entry.data[CONF_PASSWORD] - ) - except AuthenticationError as error: - raise ConfigEntryAuthFailed from error - except YALE_BASE_ERRORS as error: - raise UpdateFailed from error - try: arm_status = self.yale.get_armed_status() data = self.yale.get_all() From 3b1b600606a379facc372ece209d4a279d7023de Mon Sep 17 00:00:00 2001 From: Thomas55555 <59625598+Thomas55555@users.noreply.github.com> Date: Wed, 14 Aug 2024 13:47:49 +0200 Subject: [PATCH 0705/1635] Bump aioautomower to 2024.8.0 (#123826) --- homeassistant/components/husqvarna_automower/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/husqvarna_automower/fixtures/mower.json | 1 + .../husqvarna_automower/snapshots/test_diagnostics.ambr | 3 +++ 5 files changed, 7 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/husqvarna_automower/manifest.json b/homeassistant/components/husqvarna_automower/manifest.json index bb03806e417..7326408e403 100644 --- a/homeassistant/components/husqvarna_automower/manifest.json +++ b/homeassistant/components/husqvarna_automower/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/husqvarna_automower", "iot_class": "cloud_push", "loggers": ["aioautomower"], - "requirements": ["aioautomower==2024.7.3"] + "requirements": ["aioautomower==2024.8.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 58058e40e10..782d41171f7 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -198,7 +198,7 @@ aioaseko==0.2.0 aioasuswrt==1.4.0 # homeassistant.components.husqvarna_automower -aioautomower==2024.7.3 +aioautomower==2024.8.0 # homeassistant.components.azure_devops aioazuredevops==2.1.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 56a16273f3e..bf39d8c33e5 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -186,7 +186,7 @@ aioaseko==0.2.0 aioasuswrt==1.4.0 # homeassistant.components.husqvarna_automower -aioautomower==2024.7.3 +aioautomower==2024.8.0 # homeassistant.components.azure_devops aioazuredevops==2.1.1 diff --git a/tests/components/husqvarna_automower/fixtures/mower.json b/tests/components/husqvarna_automower/fixtures/mower.json index a5cae68f47c..aa8ea2cbef4 100644 --- a/tests/components/husqvarna_automower/fixtures/mower.json +++ b/tests/components/husqvarna_automower/fixtures/mower.json @@ -13,6 +13,7 @@ "batteryPercent": 100 }, "capabilities": { + "canConfirmError": true, "headlights": true, "workAreas": true, "position": true, diff --git a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr index 212be85ce51..3838f2eb960 100644 --- a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr @@ -9,6 +9,7 @@ dict({ 'end': '2024-03-02T00:00:00', 'rrule': 'FREQ=WEEKLY;BYDAY=MO,WE,FR', + 'schedule_no': 1, 'start': '2024-03-01T19:00:00', 'uid': '1140_300_MO,WE,FR', 'work_area_id': None, @@ -17,6 +18,7 @@ dict({ 'end': '2024-03-02T08:00:00', 'rrule': 'FREQ=WEEKLY;BYDAY=TU,TH,SA', + 'schedule_no': 2, 'start': '2024-03-02T00:00:00', 'uid': '0_480_TU,TH,SA', 'work_area_id': None, @@ -53,6 +55,7 @@ ]), }), 'capabilities': dict({ + 'can_confirm_error': True, 'headlights': True, 'position': True, 'stay_out_zones': True, From b698dd8f32ce63be2e3a12f331d9986e11705a60 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Wed, 14 Aug 2024 13:49:10 +0200 Subject: [PATCH 0706/1635] Bump pyflic to 2.0.4 (#123895) --- homeassistant/components/flic/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- script/licenses.py | 1 - 4 files changed, 3 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/flic/manifest.json b/homeassistant/components/flic/manifest.json index 8fc146ded6a..0442e4a7b7b 100644 --- a/homeassistant/components/flic/manifest.json +++ b/homeassistant/components/flic/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/flic", "iot_class": "local_push", "loggers": ["pyflic"], - "requirements": ["pyflic==2.0.3"] + "requirements": ["pyflic==2.0.4"] } diff --git a/requirements_all.txt b/requirements_all.txt index 782d41171f7..ba19291607d 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1867,7 +1867,7 @@ pyfido==2.1.2 pyfireservicerota==0.0.43 # homeassistant.components.flic -pyflic==2.0.3 +pyflic==2.0.4 # homeassistant.components.futurenow pyfnip==0.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index bf39d8c33e5..c6785ede042 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1493,7 +1493,7 @@ pyfido==2.1.2 pyfireservicerota==0.0.43 # homeassistant.components.flic -pyflic==2.0.3 +pyflic==2.0.4 # homeassistant.components.forked_daapd pyforked-daapd==0.1.14 diff --git a/script/licenses.py b/script/licenses.py index 9bcc7b54540..0c2870aebd8 100644 --- a/script/licenses.py +++ b/script/licenses.py @@ -179,7 +179,6 @@ TODO = { "aiocache": AwesomeVersion( "0.12.2" ), # https://github.com/aio-libs/aiocache/blob/master/LICENSE all rights reserved? - "pyflic": AwesomeVersion("2.0.3"), # No OSI approved license CC0-1.0 Universal) } From 80f5683cd66182dbae87f956d70a3dc59b9960d2 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 14 Aug 2024 13:59:06 +0200 Subject: [PATCH 0707/1635] Raise on database error in recorder.migration._add_constraint (#123646) * Raise on database error in recorder.migration._add_constraint * Fix test --- homeassistant/components/recorder/migration.py | 1 + tests/components/recorder/test_migrate.py | 7 ++++--- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index 55856dcf449..e96cef14cf4 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -727,6 +727,7 @@ def _add_constraint( connection.execute(add_constraint) except (InternalError, OperationalError): _LOGGER.exception("Could not update foreign options in %s table", table) + raise def _delete_foreign_key_violations( diff --git a/tests/components/recorder/test_migrate.py b/tests/components/recorder/test_migrate.py index 988eade29b6..2a33f08050c 100644 --- a/tests/components/recorder/test_migrate.py +++ b/tests/components/recorder/test_migrate.py @@ -962,9 +962,10 @@ def test_restore_foreign_key_constraints_with_error( engine = Mock() session_maker = Mock(return_value=session) - migration._restore_foreign_key_constraints( - session_maker, engine, constraints_to_restore - ) + with pytest.raises(InternalError): + migration._restore_foreign_key_constraints( + session_maker, engine, constraints_to_restore + ) assert "Could not update foreign options in events table" in caplog.text From ea7e88d000b8550ac01315dab78cd71ef0091227 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 14 Aug 2024 14:04:29 +0200 Subject: [PATCH 0708/1635] Pass None instead of empty dict when registering entity services (#123878) --- homeassistant/components/agent_dvr/camera.py | 2 +- homeassistant/components/alert/__init__.py | 6 +++--- .../components/androidtv/media_player.py | 2 +- homeassistant/components/automation/__init__.py | 4 ++-- homeassistant/components/blink/camera.py | 4 ++-- homeassistant/components/bond/light.py | 2 +- homeassistant/components/button/__init__.py | 2 +- homeassistant/components/camera/__init__.py | 8 ++++---- .../components/channels/media_player.py | 4 ++-- homeassistant/components/climate/__init__.py | 6 +++--- homeassistant/components/counter/__init__.py | 6 +++--- homeassistant/components/cover/__init__.py | 16 ++++++++-------- .../components/denonavr/media_player.py | 2 +- homeassistant/components/ecovacs/vacuum.py | 2 +- homeassistant/components/elgato/light.py | 2 +- homeassistant/components/elkm1/sensor.py | 4 ++-- homeassistant/components/ezviz/camera.py | 2 +- homeassistant/components/fan/__init__.py | 4 ++-- homeassistant/components/flo/switch.py | 6 +++--- homeassistant/components/harmony/remote.py | 2 +- homeassistant/components/hive/climate.py | 2 +- homeassistant/components/humidifier/__init__.py | 6 +++--- .../components/hydrawise/binary_sensor.py | 2 +- .../components/input_boolean/__init__.py | 6 +++--- .../components/input_button/__init__.py | 2 +- .../components/input_number/__init__.py | 4 ++-- .../components/input_select/__init__.py | 4 ++-- homeassistant/components/kef/media_player.py | 2 +- homeassistant/components/lawn_mower/__init__.py | 6 +++--- 29 files changed, 60 insertions(+), 60 deletions(-) diff --git a/homeassistant/components/agent_dvr/camera.py b/homeassistant/components/agent_dvr/camera.py index 4438bf72a1a..933d0c6b40b 100644 --- a/homeassistant/components/agent_dvr/camera.py +++ b/homeassistant/components/agent_dvr/camera.py @@ -59,7 +59,7 @@ async def async_setup_entry( platform = async_get_current_platform() for service, method in CAMERA_SERVICES.items(): - platform.async_register_entity_service(service, {}, method) + platform.async_register_entity_service(service, None, method) class AgentCamera(MjpegCamera): diff --git a/homeassistant/components/alert/__init__.py b/homeassistant/components/alert/__init__.py index f49a962fa87..c49e14f2c6f 100644 --- a/homeassistant/components/alert/__init__.py +++ b/homeassistant/components/alert/__init__.py @@ -124,9 +124,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: if not entities: return False - component.async_register_entity_service(SERVICE_TURN_OFF, {}, "async_turn_off") - component.async_register_entity_service(SERVICE_TURN_ON, {}, "async_turn_on") - component.async_register_entity_service(SERVICE_TOGGLE, {}, "async_toggle") + component.async_register_entity_service(SERVICE_TURN_OFF, None, "async_turn_off") + component.async_register_entity_service(SERVICE_TURN_ON, None, "async_turn_on") + component.async_register_entity_service(SERVICE_TOGGLE, None, "async_toggle") await component.async_add_entities(entities) diff --git a/homeassistant/components/androidtv/media_player.py b/homeassistant/components/androidtv/media_player.py index 884b5f60f57..75cf6ead6c3 100644 --- a/homeassistant/components/androidtv/media_player.py +++ b/homeassistant/components/androidtv/media_player.py @@ -87,7 +87,7 @@ async def async_setup_entry( "adb_command", ) platform.async_register_entity_service( - SERVICE_LEARN_SENDEVENT, {}, "learn_sendevent" + SERVICE_LEARN_SENDEVENT, None, "learn_sendevent" ) platform.async_register_entity_service( SERVICE_DOWNLOAD, diff --git a/homeassistant/components/automation/__init__.py b/homeassistant/components/automation/__init__.py index f2ef404ab34..8ab9c478bc4 100644 --- a/homeassistant/components/automation/__init__.py +++ b/homeassistant/components/automation/__init__.py @@ -322,8 +322,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: }, trigger_service_handler, ) - component.async_register_entity_service(SERVICE_TOGGLE, {}, "async_toggle") - component.async_register_entity_service(SERVICE_TURN_ON, {}, "async_turn_on") + component.async_register_entity_service(SERVICE_TOGGLE, None, "async_toggle") + component.async_register_entity_service(SERVICE_TURN_ON, None, "async_turn_on") component.async_register_entity_service( SERVICE_TURN_OFF, {vol.Optional(CONF_STOP_ACTIONS, default=DEFAULT_STOP_ACTIONS): cv.boolean}, diff --git a/homeassistant/components/blink/camera.py b/homeassistant/components/blink/camera.py index fcf19adf71e..cce9100a0bd 100644 --- a/homeassistant/components/blink/camera.py +++ b/homeassistant/components/blink/camera.py @@ -51,8 +51,8 @@ async def async_setup_entry( async_add_entities(entities) platform = entity_platform.async_get_current_platform() - platform.async_register_entity_service(SERVICE_RECORD, {}, "record") - platform.async_register_entity_service(SERVICE_TRIGGER, {}, "trigger_camera") + platform.async_register_entity_service(SERVICE_RECORD, None, "record") + platform.async_register_entity_service(SERVICE_TRIGGER, None, "trigger_camera") platform.async_register_entity_service( SERVICE_SAVE_RECENT_CLIPS, {vol.Required(CONF_FILE_PATH): cv.string}, diff --git a/homeassistant/components/bond/light.py b/homeassistant/components/bond/light.py index 3bff7fe754e..c3cf23e4fad 100644 --- a/homeassistant/components/bond/light.py +++ b/homeassistant/components/bond/light.py @@ -52,7 +52,7 @@ async def async_setup_entry( for service in ENTITY_SERVICES: platform.async_register_entity_service( service, - {}, + None, f"async_{service}", ) diff --git a/homeassistant/components/button/__init__.py b/homeassistant/components/button/__init__.py index 323f9eddd77..3955fabdf00 100644 --- a/homeassistant/components/button/__init__.py +++ b/homeassistant/components/button/__init__.py @@ -54,7 +54,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: component.async_register_entity_service( SERVICE_PRESS, - {}, + None, "_async_press_action", ) diff --git a/homeassistant/components/camera/__init__.py b/homeassistant/components/camera/__init__.py index ff11bc04da7..859ced1ba86 100644 --- a/homeassistant/components/camera/__init__.py +++ b/homeassistant/components/camera/__init__.py @@ -431,13 +431,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, unsub_track_time_interval) component.async_register_entity_service( - SERVICE_ENABLE_MOTION, {}, "async_enable_motion_detection" + SERVICE_ENABLE_MOTION, None, "async_enable_motion_detection" ) component.async_register_entity_service( - SERVICE_DISABLE_MOTION, {}, "async_disable_motion_detection" + SERVICE_DISABLE_MOTION, None, "async_disable_motion_detection" ) - component.async_register_entity_service(SERVICE_TURN_OFF, {}, "async_turn_off") - component.async_register_entity_service(SERVICE_TURN_ON, {}, "async_turn_on") + component.async_register_entity_service(SERVICE_TURN_OFF, None, "async_turn_off") + component.async_register_entity_service(SERVICE_TURN_ON, None, "async_turn_on") component.async_register_entity_service( SERVICE_SNAPSHOT, CAMERA_SERVICE_SNAPSHOT, async_handle_snapshot_service ) diff --git a/homeassistant/components/channels/media_player.py b/homeassistant/components/channels/media_player.py index 07ed8ce7d66..f6de35a4156 100644 --- a/homeassistant/components/channels/media_player.py +++ b/homeassistant/components/channels/media_player.py @@ -49,12 +49,12 @@ async def async_setup_platform( platform.async_register_entity_service( SERVICE_SEEK_FORWARD, - {}, + None, "seek_forward", ) platform.async_register_entity_service( SERVICE_SEEK_BACKWARD, - {}, + None, "seek_backward", ) platform.async_register_entity_service( diff --git a/homeassistant/components/climate/__init__.py b/homeassistant/components/climate/__init__.py index f546ae0e671..6097e4f1346 100644 --- a/homeassistant/components/climate/__init__.py +++ b/homeassistant/components/climate/__init__.py @@ -156,19 +156,19 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: component.async_register_entity_service( SERVICE_TURN_ON, - {}, + None, "async_turn_on", [ClimateEntityFeature.TURN_ON], ) component.async_register_entity_service( SERVICE_TURN_OFF, - {}, + None, "async_turn_off", [ClimateEntityFeature.TURN_OFF], ) component.async_register_entity_service( SERVICE_TOGGLE, - {}, + None, "async_toggle", [ClimateEntityFeature.TURN_OFF, ClimateEntityFeature.TURN_ON], ) diff --git a/homeassistant/components/counter/__init__.py b/homeassistant/components/counter/__init__.py index 324668a63e2..f0a14aa7951 100644 --- a/homeassistant/components/counter/__init__.py +++ b/homeassistant/components/counter/__init__.py @@ -122,9 +122,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: storage_collection, DOMAIN, DOMAIN, STORAGE_FIELDS, STORAGE_FIELDS ).async_setup(hass) - component.async_register_entity_service(SERVICE_INCREMENT, {}, "async_increment") - component.async_register_entity_service(SERVICE_DECREMENT, {}, "async_decrement") - component.async_register_entity_service(SERVICE_RESET, {}, "async_reset") + component.async_register_entity_service(SERVICE_INCREMENT, None, "async_increment") + component.async_register_entity_service(SERVICE_DECREMENT, None, "async_decrement") + component.async_register_entity_service(SERVICE_RESET, None, "async_reset") component.async_register_entity_service( SERVICE_SET_VALUE, {vol.Required(VALUE): cv.positive_int}, diff --git a/homeassistant/components/cover/__init__.py b/homeassistant/components/cover/__init__.py index 645bd88de7a..90d2b644810 100644 --- a/homeassistant/components/cover/__init__.py +++ b/homeassistant/components/cover/__init__.py @@ -158,11 +158,11 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: await component.async_setup(config) component.async_register_entity_service( - SERVICE_OPEN_COVER, {}, "async_open_cover", [CoverEntityFeature.OPEN] + SERVICE_OPEN_COVER, None, "async_open_cover", [CoverEntityFeature.OPEN] ) component.async_register_entity_service( - SERVICE_CLOSE_COVER, {}, "async_close_cover", [CoverEntityFeature.CLOSE] + SERVICE_CLOSE_COVER, None, "async_close_cover", [CoverEntityFeature.CLOSE] ) component.async_register_entity_service( @@ -177,33 +177,33 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: ) component.async_register_entity_service( - SERVICE_STOP_COVER, {}, "async_stop_cover", [CoverEntityFeature.STOP] + SERVICE_STOP_COVER, None, "async_stop_cover", [CoverEntityFeature.STOP] ) component.async_register_entity_service( SERVICE_TOGGLE, - {}, + None, "async_toggle", [CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE], ) component.async_register_entity_service( SERVICE_OPEN_COVER_TILT, - {}, + None, "async_open_cover_tilt", [CoverEntityFeature.OPEN_TILT], ) component.async_register_entity_service( SERVICE_CLOSE_COVER_TILT, - {}, + None, "async_close_cover_tilt", [CoverEntityFeature.CLOSE_TILT], ) component.async_register_entity_service( SERVICE_STOP_COVER_TILT, - {}, + None, "async_stop_cover_tilt", [CoverEntityFeature.STOP_TILT], ) @@ -221,7 +221,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: component.async_register_entity_service( SERVICE_TOGGLE_COVER_TILT, - {}, + None, "async_toggle_tilt", [CoverEntityFeature.OPEN_TILT | CoverEntityFeature.CLOSE_TILT], ) diff --git a/homeassistant/components/denonavr/media_player.py b/homeassistant/components/denonavr/media_player.py index 8d6df72a67e..a7d8565d6a4 100644 --- a/homeassistant/components/denonavr/media_player.py +++ b/homeassistant/components/denonavr/media_player.py @@ -152,7 +152,7 @@ async def async_setup_entry( ) platform.async_register_entity_service( SERVICE_UPDATE_AUDYSSEY, - {}, + None, f"async_{SERVICE_UPDATE_AUDYSSEY}", ) diff --git a/homeassistant/components/ecovacs/vacuum.py b/homeassistant/components/ecovacs/vacuum.py index e690038ff71..0d14267e08d 100644 --- a/homeassistant/components/ecovacs/vacuum.py +++ b/homeassistant/components/ecovacs/vacuum.py @@ -65,7 +65,7 @@ async def async_setup_entry( platform = entity_platform.async_get_current_platform() platform.async_register_entity_service( SERVICE_RAW_GET_POSITIONS, - {}, + None, "async_raw_get_positions", supports_response=SupportsResponse.ONLY, ) diff --git a/homeassistant/components/elgato/light.py b/homeassistant/components/elgato/light.py index 339bed97f6f..a62a26f21d3 100644 --- a/homeassistant/components/elgato/light.py +++ b/homeassistant/components/elgato/light.py @@ -40,7 +40,7 @@ async def async_setup_entry( platform = async_get_current_platform() platform.async_register_entity_service( SERVICE_IDENTIFY, - {}, + None, ElgatoLight.async_identify.__name__, ) diff --git a/homeassistant/components/elkm1/sensor.py b/homeassistant/components/elkm1/sensor.py index 7d3601f0bd0..16f877719a7 100644 --- a/homeassistant/components/elkm1/sensor.py +++ b/homeassistant/components/elkm1/sensor.py @@ -56,7 +56,7 @@ async def async_setup_entry( platform.async_register_entity_service( SERVICE_SENSOR_COUNTER_REFRESH, - {}, + None, "async_counter_refresh", ) platform.async_register_entity_service( @@ -71,7 +71,7 @@ async def async_setup_entry( ) platform.async_register_entity_service( SERVICE_SENSOR_ZONE_TRIGGER, - {}, + None, "async_zone_trigger", ) diff --git a/homeassistant/components/ezviz/camera.py b/homeassistant/components/ezviz/camera.py index 455c41b385f..3c4a5f70ff4 100644 --- a/homeassistant/components/ezviz/camera.py +++ b/homeassistant/components/ezviz/camera.py @@ -112,7 +112,7 @@ async def async_setup_entry( platform = async_get_current_platform() platform.async_register_entity_service( - SERVICE_WAKE_DEVICE, {}, "perform_wake_device" + SERVICE_WAKE_DEVICE, None, "perform_wake_device" ) diff --git a/homeassistant/components/fan/__init__.py b/homeassistant/components/fan/__init__.py index 6ecc675a45e..5a15ece665a 100644 --- a/homeassistant/components/fan/__init__.py +++ b/homeassistant/components/fan/__init__.py @@ -139,11 +139,11 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: [FanEntityFeature.TURN_ON], ) component.async_register_entity_service( - SERVICE_TURN_OFF, {}, "async_turn_off", [FanEntityFeature.TURN_OFF] + SERVICE_TURN_OFF, None, "async_turn_off", [FanEntityFeature.TURN_OFF] ) component.async_register_entity_service( SERVICE_TOGGLE, - {}, + None, "async_toggle", [FanEntityFeature.TURN_OFF, FanEntityFeature.TURN_ON], ) diff --git a/homeassistant/components/flo/switch.py b/homeassistant/components/flo/switch.py index ab201dfb906..f0460839837 100644 --- a/homeassistant/components/flo/switch.py +++ b/homeassistant/components/flo/switch.py @@ -42,13 +42,13 @@ async def async_setup_entry( platform = entity_platform.async_get_current_platform() platform.async_register_entity_service( - SERVICE_SET_AWAY_MODE, {}, "async_set_mode_away" + SERVICE_SET_AWAY_MODE, None, "async_set_mode_away" ) platform.async_register_entity_service( - SERVICE_SET_HOME_MODE, {}, "async_set_mode_home" + SERVICE_SET_HOME_MODE, None, "async_set_mode_home" ) platform.async_register_entity_service( - SERVICE_RUN_HEALTH_TEST, {}, "async_run_health_test" + SERVICE_RUN_HEALTH_TEST, None, "async_run_health_test" ) platform.async_register_entity_service( SERVICE_SET_SLEEP_MODE, diff --git a/homeassistant/components/harmony/remote.py b/homeassistant/components/harmony/remote.py index d30aa475944..efbd4b2ac02 100644 --- a/homeassistant/components/harmony/remote.py +++ b/homeassistant/components/harmony/remote.py @@ -75,7 +75,7 @@ async def async_setup_entry( platform.async_register_entity_service( SERVICE_SYNC, - {}, + None, "sync", ) platform.async_register_entity_service( diff --git a/homeassistant/components/hive/climate.py b/homeassistant/components/hive/climate.py index f4c8e678702..87d93eea95f 100644 --- a/homeassistant/components/hive/climate.py +++ b/homeassistant/components/hive/climate.py @@ -83,7 +83,7 @@ async def async_setup_entry( platform.async_register_entity_service( SERVICE_BOOST_HEATING_OFF, - {}, + None, "async_heating_boost_off", ) diff --git a/homeassistant/components/humidifier/__init__.py b/homeassistant/components/humidifier/__init__.py index ce94eaaf5a0..37e2bd3e3ba 100644 --- a/homeassistant/components/humidifier/__init__.py +++ b/homeassistant/components/humidifier/__init__.py @@ -92,9 +92,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: ) await component.async_setup(config) - component.async_register_entity_service(SERVICE_TURN_ON, {}, "async_turn_on") - component.async_register_entity_service(SERVICE_TURN_OFF, {}, "async_turn_off") - component.async_register_entity_service(SERVICE_TOGGLE, {}, "async_toggle") + component.async_register_entity_service(SERVICE_TURN_ON, None, "async_turn_on") + component.async_register_entity_service(SERVICE_TURN_OFF, None, "async_turn_off") + component.async_register_entity_service(SERVICE_TOGGLE, None, "async_toggle") component.async_register_entity_service( SERVICE_SET_MODE, {vol.Required(ATTR_MODE): cv.string}, diff --git a/homeassistant/components/hydrawise/binary_sensor.py b/homeassistant/components/hydrawise/binary_sensor.py index 0e00d237fae..9b6dcadf95f 100644 --- a/homeassistant/components/hydrawise/binary_sensor.py +++ b/homeassistant/components/hydrawise/binary_sensor.py @@ -110,7 +110,7 @@ async def async_setup_entry( ) async_add_entities(entities) platform = entity_platform.async_get_current_platform() - platform.async_register_entity_service(SERVICE_RESUME, {}, "resume") + platform.async_register_entity_service(SERVICE_RESUME, None, "resume") platform.async_register_entity_service( SERVICE_START_WATERING, SCHEMA_START_WATERING, "start_watering" ) diff --git a/homeassistant/components/input_boolean/__init__.py b/homeassistant/components/input_boolean/__init__.py index 57165c5508a..54457ab2fb7 100644 --- a/homeassistant/components/input_boolean/__init__.py +++ b/homeassistant/components/input_boolean/__init__.py @@ -138,11 +138,11 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: schema=RELOAD_SERVICE_SCHEMA, ) - component.async_register_entity_service(SERVICE_TURN_ON, {}, "async_turn_on") + component.async_register_entity_service(SERVICE_TURN_ON, None, "async_turn_on") - component.async_register_entity_service(SERVICE_TURN_OFF, {}, "async_turn_off") + component.async_register_entity_service(SERVICE_TURN_OFF, None, "async_turn_off") - component.async_register_entity_service(SERVICE_TOGGLE, {}, "async_toggle") + component.async_register_entity_service(SERVICE_TOGGLE, None, "async_toggle") return True diff --git a/homeassistant/components/input_button/__init__.py b/homeassistant/components/input_button/__init__.py index 1488d80a1f5..6584b40fb55 100644 --- a/homeassistant/components/input_button/__init__.py +++ b/homeassistant/components/input_button/__init__.py @@ -123,7 +123,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: schema=RELOAD_SERVICE_SCHEMA, ) - component.async_register_entity_service(SERVICE_PRESS, {}, "_async_press_action") + component.async_register_entity_service(SERVICE_PRESS, None, "_async_press_action") return True diff --git a/homeassistant/components/input_number/__init__.py b/homeassistant/components/input_number/__init__.py index f55ceabc6f0..d52bfedfe77 100644 --- a/homeassistant/components/input_number/__init__.py +++ b/homeassistant/components/input_number/__init__.py @@ -157,9 +157,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: "async_set_value", ) - component.async_register_entity_service(SERVICE_INCREMENT, {}, "async_increment") + component.async_register_entity_service(SERVICE_INCREMENT, None, "async_increment") - component.async_register_entity_service(SERVICE_DECREMENT, {}, "async_decrement") + component.async_register_entity_service(SERVICE_DECREMENT, None, "async_decrement") return True diff --git a/homeassistant/components/input_select/__init__.py b/homeassistant/components/input_select/__init__.py index 44d2df02a92..6efe16240cb 100644 --- a/homeassistant/components/input_select/__init__.py +++ b/homeassistant/components/input_select/__init__.py @@ -183,13 +183,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: component.async_register_entity_service( SERVICE_SELECT_FIRST, - {}, + None, InputSelect.async_first.__name__, ) component.async_register_entity_service( SERVICE_SELECT_LAST, - {}, + None, InputSelect.async_last.__name__, ) diff --git a/homeassistant/components/kef/media_player.py b/homeassistant/components/kef/media_player.py index ad335499ba4..1c5188b1a6f 100644 --- a/homeassistant/components/kef/media_player.py +++ b/homeassistant/components/kef/media_player.py @@ -161,7 +161,7 @@ async def async_setup_platform( }, "set_mode", ) - platform.async_register_entity_service(SERVICE_UPDATE_DSP, {}, "update_dsp") + platform.async_register_entity_service(SERVICE_UPDATE_DSP, None, "update_dsp") def add_service(name, which, option): options = DSP_OPTION_MAPPING[which] diff --git a/homeassistant/components/lawn_mower/__init__.py b/homeassistant/components/lawn_mower/__init__.py index 27765d207d8..9eef6ad8343 100644 --- a/homeassistant/components/lawn_mower/__init__.py +++ b/homeassistant/components/lawn_mower/__init__.py @@ -39,15 +39,15 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: component.async_register_entity_service( SERVICE_START_MOWING, - {}, + None, "async_start_mowing", [LawnMowerEntityFeature.START_MOWING], ) component.async_register_entity_service( - SERVICE_PAUSE, {}, "async_pause", [LawnMowerEntityFeature.PAUSE] + SERVICE_PAUSE, None, "async_pause", [LawnMowerEntityFeature.PAUSE] ) component.async_register_entity_service( - SERVICE_DOCK, {}, "async_dock", [LawnMowerEntityFeature.DOCK] + SERVICE_DOCK, None, "async_dock", [LawnMowerEntityFeature.DOCK] ) return True From e050d187c4c4b43fc1ff3c6133baae22541fdab5 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 14 Aug 2024 14:04:53 +0200 Subject: [PATCH 0709/1635] Clarify SQLite can't drop foreign key constraints (#123898) --- .../components/recorder/migration.py | 48 ++++++++++++++++--- tests/components/recorder/test_migrate.py | 48 +++++++++++++++++++ 2 files changed, 90 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index e96cef14cf4..ccdaf3082e0 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -585,7 +585,18 @@ def _modify_columns( def _update_states_table_with_foreign_key_options( session_maker: Callable[[], Session], engine: Engine ) -> None: - """Add the options to foreign key constraints.""" + """Add the options to foreign key constraints. + + This is not supported for SQLite because it does not support + dropping constraints. + """ + + if engine.dialect.name not in (SupportedDialect.MYSQL, SupportedDialect.POSTGRESQL): + raise RuntimeError( + "_update_states_table_with_foreign_key_options not supported for " + f"{engine.dialect.name}" + ) + inspector = sqlalchemy.inspect(engine) tmp_states_table = Table(TABLE_STATES, MetaData()) alters = [ @@ -633,7 +644,17 @@ def _update_states_table_with_foreign_key_options( def _drop_foreign_key_constraints( session_maker: Callable[[], Session], engine: Engine, table: str, column: str ) -> tuple[bool, list[tuple[str, str, ReflectedForeignKeyConstraint]]]: - """Drop foreign key constraints for a table on specific columns.""" + """Drop foreign key constraints for a table on specific columns. + + This is not supported for SQLite because it does not support + dropping constraints. + """ + + if engine.dialect.name not in (SupportedDialect.MYSQL, SupportedDialect.POSTGRESQL): + raise RuntimeError( + f"_drop_foreign_key_constraints not supported for {engine.dialect.name}" + ) + inspector = sqlalchemy.inspect(engine) dropped_constraints = [ (table, column, foreign_key) @@ -1026,7 +1047,17 @@ class _SchemaVersion11Migrator(_SchemaVersionMigrator, target_version=11): def _apply_update(self) -> None: """Version specific update method.""" _create_index(self.session_maker, "states", "ix_states_old_state_id") - _update_states_table_with_foreign_key_options(self.session_maker, self.engine) + + # _update_states_table_with_foreign_key_options first drops foreign + # key constraints, and then re-adds them with the correct settings. + # This is not supported by SQLite + if self.engine.dialect.name in ( + SupportedDialect.MYSQL, + SupportedDialect.POSTGRESQL, + ): + _update_states_table_with_foreign_key_options( + self.session_maker, self.engine + ) class _SchemaVersion12Migrator(_SchemaVersionMigrator, target_version=12): @@ -1080,9 +1111,14 @@ class _SchemaVersion15Migrator(_SchemaVersionMigrator, target_version=15): class _SchemaVersion16Migrator(_SchemaVersionMigrator, target_version=16): def _apply_update(self) -> None: """Version specific update method.""" - _drop_foreign_key_constraints( - self.session_maker, self.engine, TABLE_STATES, "old_state_id" - ) + # Dropping foreign key constraints is not supported by SQLite + if self.engine.dialect.name in ( + SupportedDialect.MYSQL, + SupportedDialect.POSTGRESQL, + ): + _drop_foreign_key_constraints( + self.session_maker, self.engine, TABLE_STATES, "old_state_id" + ) class _SchemaVersion17Migrator(_SchemaVersionMigrator, target_version=17): diff --git a/tests/components/recorder/test_migrate.py b/tests/components/recorder/test_migrate.py index 2a33f08050c..625a5023287 100644 --- a/tests/components/recorder/test_migrate.py +++ b/tests/components/recorder/test_migrate.py @@ -1053,3 +1053,51 @@ def test_delete_foreign_key_violations_unsupported_engine( RuntimeError, match="_delete_foreign_key_violations not supported for sqlite" ): migration._delete_foreign_key_violations(session_maker, engine, "", "", "", "") + + +def test_drop_foreign_key_constraints_unsupported_engine( + caplog: pytest.LogCaptureFixture, +) -> None: + """Test calling _drop_foreign_key_constraints with an unsupported engine.""" + + connection = Mock() + connection.execute = Mock(side_effect=InternalError(None, None, None)) + session = Mock() + session.connection = Mock(return_value=connection) + instance = Mock() + instance.get_session = Mock(return_value=session) + engine = Mock() + engine.dialect = Mock() + engine.dialect.name = "sqlite" + + session_maker = Mock(return_value=session) + with pytest.raises( + RuntimeError, match="_drop_foreign_key_constraints not supported for sqlite" + ): + migration._drop_foreign_key_constraints(session_maker, engine, "", "") + + +def test_update_states_table_with_foreign_key_options_unsupported_engine( + caplog: pytest.LogCaptureFixture, +) -> None: + """Test calling function with an unsupported engine. + + This tests _update_states_table_with_foreign_key_options. + """ + + connection = Mock() + connection.execute = Mock(side_effect=InternalError(None, None, None)) + session = Mock() + session.connection = Mock(return_value=connection) + instance = Mock() + instance.get_session = Mock(return_value=session) + engine = Mock() + engine.dialect = Mock() + engine.dialect.name = "sqlite" + + session_maker = Mock(return_value=session) + with pytest.raises( + RuntimeError, + match="_update_states_table_with_foreign_key_options not supported for sqlite", + ): + migration._update_states_table_with_foreign_key_options(session_maker, engine) From f6cb28eb5b3ca0a1c71d8a5d2281f9be9c24e501 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 14 Aug 2024 07:42:20 -0500 Subject: [PATCH 0710/1635] Bump aioesphomeapi to 25.1.0 (#123851) --- homeassistant/components/esphome/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/esphome/manifest.json b/homeassistant/components/esphome/manifest.json index 97724a12203..b2647709e8e 100644 --- a/homeassistant/components/esphome/manifest.json +++ b/homeassistant/components/esphome/manifest.json @@ -17,7 +17,7 @@ "mqtt": ["esphome/discover/#"], "quality_scale": "platinum", "requirements": [ - "aioesphomeapi==25.0.0", + "aioesphomeapi==25.1.0", "esphome-dashboard-api==1.2.3", "bleak-esphome==1.0.0" ], diff --git a/requirements_all.txt b/requirements_all.txt index ba19291607d..beae1629b79 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -237,7 +237,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==25.0.0 +aioesphomeapi==25.1.0 # homeassistant.components.flo aioflo==2021.11.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c6785ede042..86be5c9e14a 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -225,7 +225,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==25.0.0 +aioesphomeapi==25.1.0 # homeassistant.components.flo aioflo==2021.11.0 From ccde51da855fd035f0e79dfcb2c2c5fc2d2c9392 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 14:56:03 +0200 Subject: [PATCH 0711/1635] Improve type hints in tasmota tests (#123913) --- tests/components/tasmota/conftest.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/components/tasmota/conftest.py b/tests/components/tasmota/conftest.py index 48cd4012f07..0de0788d7d9 100644 --- a/tests/components/tasmota/conftest.py +++ b/tests/components/tasmota/conftest.py @@ -10,6 +10,7 @@ from homeassistant.components.tasmota.const import ( DEFAULT_PREFIX, DOMAIN, ) +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry from tests.components.light.conftest import mock_light_profiles # noqa: F401 @@ -55,6 +56,6 @@ async def setup_tasmota_helper(hass): @pytest.fixture -async def setup_tasmota(hass): +async def setup_tasmota(hass: HomeAssistant) -> None: """Set up Tasmota.""" await setup_tasmota_helper(hass) From 324b6529e862143050657dafa93bb2e86aeb1501 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 14:56:58 +0200 Subject: [PATCH 0712/1635] Improve type hints in telegram_bot tests (#123914) --- tests/components/telegram_bot/conftest.py | 29 +++++++++++++---------- 1 file changed, 17 insertions(+), 12 deletions(-) diff --git a/tests/components/telegram_bot/conftest.py b/tests/components/telegram_bot/conftest.py index 6ea5d1446dd..1afe70dcb8a 100644 --- a/tests/components/telegram_bot/conftest.py +++ b/tests/components/telegram_bot/conftest.py @@ -1,6 +1,8 @@ """Tests for the telegram_bot integration.""" +from collections.abc import AsyncGenerator, Generator from datetime import datetime +from typing import Any from unittest.mock import patch import pytest @@ -18,11 +20,12 @@ from homeassistant.const import ( CONF_URL, EVENT_HOMEASSISTANT_START, ) +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @pytest.fixture -def config_webhooks(): +def config_webhooks() -> dict[str, Any]: """Fixture for a webhooks platform configuration.""" return { DOMAIN: [ @@ -43,7 +46,7 @@ def config_webhooks(): @pytest.fixture -def config_polling(): +def config_polling() -> dict[str, Any]: """Fixture for a polling platform configuration.""" return { DOMAIN: [ @@ -62,7 +65,7 @@ def config_polling(): @pytest.fixture -def mock_register_webhook(): +def mock_register_webhook() -> Generator[None]: """Mock calls made by telegram_bot when (de)registering webhook.""" with ( patch( @@ -78,7 +81,7 @@ def mock_register_webhook(): @pytest.fixture -def mock_external_calls(): +def mock_external_calls() -> Generator[None]: """Mock calls that make calls to the live Telegram API.""" test_user = User(123456, "Testbot", True) message = Message( @@ -109,7 +112,7 @@ def mock_external_calls(): @pytest.fixture -def mock_generate_secret_token(): +def mock_generate_secret_token() -> Generator[str]: """Mock secret token generated for webhook.""" mock_secret_token = "DEADBEEF12345678DEADBEEF87654321" with patch( @@ -217,12 +220,12 @@ def update_callback_query(): @pytest.fixture async def webhook_platform( - hass, - config_webhooks, - mock_register_webhook, - mock_external_calls, - mock_generate_secret_token, -): + hass: HomeAssistant, + config_webhooks: dict[str, Any], + mock_register_webhook: None, + mock_external_calls: None, + mock_generate_secret_token: str, +) -> AsyncGenerator[None]: """Fixture for setting up the webhooks platform using appropriate config and mocks.""" await async_setup_component( hass, @@ -235,7 +238,9 @@ async def webhook_platform( @pytest.fixture -async def polling_platform(hass, config_polling, mock_external_calls): +async def polling_platform( + hass: HomeAssistant, config_polling: dict[str, Any], mock_external_calls: None +) -> None: """Fixture for setting up the polling platform using appropriate config and mocks.""" await async_setup_component( hass, From 67f761c0e9538c28a63d7102f8150c7f597be64f Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 14:58:16 +0200 Subject: [PATCH 0713/1635] Improve type hints in template tests (#123915) --- tests/components/template/test_light.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/tests/components/template/test_light.py b/tests/components/template/test_light.py index ad97146d0fb..065a1488dc9 100644 --- a/tests/components/template/test_light.py +++ b/tests/components/template/test_light.py @@ -1,5 +1,7 @@ """The tests for the Template light platform.""" +from typing import Any + import pytest from homeassistant.components import light @@ -152,7 +154,9 @@ OPTIMISTIC_RGBWW_COLOR_LIGHT_CONFIG = { } -async def async_setup_light(hass, count, light_config): +async def async_setup_light( + hass: HomeAssistant, count: int, light_config: dict[str, Any] +) -> None: """Do setup of light integration.""" config = {"light": {"platform": "template", "lights": light_config}} @@ -169,7 +173,9 @@ async def async_setup_light(hass, count, light_config): @pytest.fixture -async def setup_light(hass, count, light_config): +async def setup_light( + hass: HomeAssistant, count: int, light_config: dict[str, Any] +) -> None: """Do setup of light integration.""" await async_setup_light(hass, count, light_config) From 7f6bf95aa6976da2c03ffdce9060f68c54874460 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 15:02:52 +0200 Subject: [PATCH 0714/1635] Improve type hints in universal tests (#123920) --- tests/components/universal/test_media_player.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/components/universal/test_media_player.py b/tests/components/universal/test_media_player.py index 187b62a93a1..69d4c8666cf 100644 --- a/tests/components/universal/test_media_player.py +++ b/tests/components/universal/test_media_player.py @@ -220,7 +220,7 @@ class MockMediaPlayer(media_player.MediaPlayerEntity): @pytest.fixture -async def mock_states(hass): +async def mock_states(hass: HomeAssistant) -> Mock: """Set mock states used in tests.""" result = Mock() From 1e5762fbf7a55590d903100fb41963db8a7326b9 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 15:03:38 +0200 Subject: [PATCH 0715/1635] Improve type hints in tod tests (#123917) --- tests/components/tod/test_binary_sensor.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/components/tod/test_binary_sensor.py b/tests/components/tod/test_binary_sensor.py index c4b28b527cb..b4b6b13d8e3 100644 --- a/tests/components/tod/test_binary_sensor.py +++ b/tests/components/tod/test_binary_sensor.py @@ -1,6 +1,6 @@ """Test Times of the Day Binary Sensor.""" -from datetime import datetime, timedelta +from datetime import datetime, timedelta, tzinfo from freezegun.api import FrozenDateTimeFactory import pytest @@ -16,13 +16,13 @@ from tests.common import assert_setup_component, async_fire_time_changed @pytest.fixture -def hass_time_zone(): +def hass_time_zone() -> str: """Return default hass timezone.""" return "US/Pacific" @pytest.fixture(autouse=True) -async def setup_fixture(hass, hass_time_zone): +async def setup_fixture(hass: HomeAssistant, hass_time_zone: str) -> None: """Set up things to be run when tests are started.""" hass.config.latitude = 50.27583 hass.config.longitude = 18.98583 @@ -30,7 +30,7 @@ async def setup_fixture(hass, hass_time_zone): @pytest.fixture -def hass_tz_info(hass): +def hass_tz_info(hass: HomeAssistant) -> tzinfo | None: """Return timezone info for the hass timezone.""" return dt_util.get_time_zone(hass.config.time_zone) From 78c868c0754303a433296a4e0773d23507fd4e0e Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 15:03:51 +0200 Subject: [PATCH 0716/1635] Improve type hints in tile tests (#123916) --- tests/components/tile/conftest.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/tests/components/tile/conftest.py b/tests/components/tile/conftest.py index e3b55c49ae7..01a711d9261 100644 --- a/tests/components/tile/conftest.py +++ b/tests/components/tile/conftest.py @@ -1,6 +1,8 @@ """Define test fixtures for Tile.""" +from collections.abc import Generator import json +from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest @@ -8,6 +10,7 @@ from pytile.tile import Tile from homeassistant.components.tile.const import DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_fixture @@ -16,7 +19,7 @@ TEST_USERNAME = "user@host.com" @pytest.fixture(name="api") -def api_fixture(hass, data_tile_details): +def api_fixture(data_tile_details: dict[str, Any]) -> Mock: """Define a pytile API object.""" tile = Tile(None, data_tile_details) tile.async_update = AsyncMock() @@ -29,7 +32,9 @@ def api_fixture(hass, data_tile_details): @pytest.fixture(name="config_entry") -def config_entry_fixture(hass, config): +def config_entry_fixture( + hass: HomeAssistant, config: dict[str, Any] +) -> MockConfigEntry: """Define a config entry fixture.""" entry = MockConfigEntry(domain=DOMAIN, unique_id=config[CONF_USERNAME], data=config) entry.add_to_hass(hass) @@ -37,7 +42,7 @@ def config_entry_fixture(hass, config): @pytest.fixture(name="config") -def config_fixture(): +def config_fixture() -> dict[str, Any]: """Define a config entry data fixture.""" return { CONF_USERNAME: TEST_USERNAME, @@ -52,7 +57,7 @@ def data_tile_details_fixture(): @pytest.fixture(name="mock_pytile") -async def mock_pytile_fixture(api): +def mock_pytile_fixture(api: Mock) -> Generator[None]: """Define a fixture to patch pytile.""" with ( patch( @@ -64,7 +69,9 @@ async def mock_pytile_fixture(api): @pytest.fixture(name="setup_config_entry") -async def setup_config_entry_fixture(hass, config_entry, mock_pytile): +async def setup_config_entry_fixture( + hass: HomeAssistant, config_entry: MockConfigEntry, mock_pytile: None +) -> None: """Define a fixture to set up tile.""" assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() From 99b1fc75d33a8ee61e89d3ffa5454d17608f2352 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 15:04:51 +0200 Subject: [PATCH 0717/1635] Improve type hints in traccar tests (#123919) --- tests/components/traccar/test_init.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/components/traccar/test_init.py b/tests/components/traccar/test_init.py index b25ab6a0a34..49127aec347 100644 --- a/tests/components/traccar/test_init.py +++ b/tests/components/traccar/test_init.py @@ -63,7 +63,7 @@ async def setup_zones(hass: HomeAssistant) -> None: @pytest.fixture(name="webhook_id") -async def webhook_id_fixture(hass, client): +async def webhook_id_fixture(hass: HomeAssistant, client: TestClient) -> str: """Initialize the Traccar component and get the webhook_id.""" await async_process_ha_core_config( hass, From 2c99bd178ce6ce1ab710d99af1aef0964a21346c Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 15:06:33 +0200 Subject: [PATCH 0718/1635] Improve type hints in subaru tests (#123911) --- tests/components/subaru/conftest.py | 8 +++++--- tests/components/subaru/test_config_flow.py | 17 ++++++++++++----- 2 files changed, 17 insertions(+), 8 deletions(-) diff --git a/tests/components/subaru/conftest.py b/tests/components/subaru/conftest.py index f769eba252c..e18ea8fd398 100644 --- a/tests/components/subaru/conftest.py +++ b/tests/components/subaru/conftest.py @@ -100,7 +100,7 @@ TEST_DEVICE_NAME = "test_vehicle_2" TEST_ENTITY_ID = f"sensor.{TEST_DEVICE_NAME}_odometer" -def advance_time_to_next_fetch(hass): +def advance_time_to_next_fetch(hass: HomeAssistant) -> None: """Fast forward time to next fetch.""" future = dt_util.utcnow() + timedelta(seconds=FETCH_INTERVAL + 30) async_fire_time_changed(hass, future) @@ -181,7 +181,7 @@ async def setup_subaru_config_entry( @pytest.fixture -async def subaru_config_entry(hass): +async def subaru_config_entry(hass: HomeAssistant) -> MockConfigEntry: """Create a Subaru config entry prior to setup.""" await async_setup_component(hass, HA_DOMAIN, {}) config_entry = MockConfigEntry(**TEST_CONFIG_ENTRY) @@ -190,7 +190,9 @@ async def subaru_config_entry(hass): @pytest.fixture -async def ev_entry(hass, subaru_config_entry): +async def ev_entry( + hass: HomeAssistant, subaru_config_entry: MockConfigEntry +) -> MockConfigEntry: """Create a Subaru entry representing an EV vehicle with full STARLINK subscription.""" await setup_subaru_config_entry(hass, subaru_config_entry) assert DOMAIN in hass.config_entries.async_domains() diff --git a/tests/components/subaru/test_config_flow.py b/tests/components/subaru/test_config_flow.py index 9bddeeee051..6abc544c92a 100644 --- a/tests/components/subaru/test_config_flow.py +++ b/tests/components/subaru/test_config_flow.py @@ -10,6 +10,7 @@ from subarulink.exceptions import InvalidCredentials, InvalidPIN, SubaruExceptio from homeassistant import config_entries from homeassistant.components.subaru import config_flow from homeassistant.components.subaru.const import CONF_UPDATE_ENABLED, DOMAIN +from homeassistant.config_entries import ConfigFlowResult from homeassistant.const import CONF_DEVICE_ID, CONF_PIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -389,7 +390,7 @@ async def test_option_flow(hass: HomeAssistant, options_form) -> None: @pytest.fixture -async def user_form(hass): +async def user_form(hass: HomeAssistant) -> ConfigFlowResult: """Return initial form for Subaru config flow.""" return await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -397,7 +398,9 @@ async def user_form(hass): @pytest.fixture -async def two_factor_start_form(hass, user_form): +async def two_factor_start_form( + hass: HomeAssistant, user_form: ConfigFlowResult +) -> ConfigFlowResult: """Return two factor form for Subaru config flow.""" with ( patch(MOCK_API_CONNECT, return_value=True), @@ -410,7 +413,9 @@ async def two_factor_start_form(hass, user_form): @pytest.fixture -async def two_factor_verify_form(hass, two_factor_start_form): +async def two_factor_verify_form( + hass: HomeAssistant, two_factor_start_form: ConfigFlowResult +) -> ConfigFlowResult: """Return two factor form for Subaru config flow.""" with ( patch( @@ -427,7 +432,9 @@ async def two_factor_verify_form(hass, two_factor_start_form): @pytest.fixture -async def pin_form(hass, two_factor_verify_form): +async def pin_form( + hass: HomeAssistant, two_factor_verify_form: ConfigFlowResult +) -> ConfigFlowResult: """Return PIN input form for Subaru config flow.""" with ( patch( @@ -443,7 +450,7 @@ async def pin_form(hass, two_factor_verify_form): @pytest.fixture -async def options_form(hass): +async def options_form(hass: HomeAssistant) -> ConfigFlowResult: """Return options form for Subaru config flow.""" entry = MockConfigEntry(domain=DOMAIN, data={}, options=None) entry.add_to_hass(hass) From fa8f86b672144b335f381d781975e3f39558dcb0 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 15:07:55 +0200 Subject: [PATCH 0719/1635] Improve type hints in smartthings tests (#123912) --- tests/components/smartthings/conftest.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/tests/components/smartthings/conftest.py b/tests/components/smartthings/conftest.py index 17e2c781989..70fd9db0744 100644 --- a/tests/components/smartthings/conftest.py +++ b/tests/components/smartthings/conftest.py @@ -91,7 +91,7 @@ async def setup_component( await async_setup_component(hass, "smartthings", {}) -def _create_location(): +def _create_location() -> Mock: loc = Mock(Location) loc.name = "Test Location" loc.location_id = str(uuid4()) @@ -99,19 +99,19 @@ def _create_location(): @pytest.fixture(name="location") -def location_fixture(): +def location_fixture() -> Mock: """Fixture for a single location.""" return _create_location() @pytest.fixture(name="locations") -def locations_fixture(location): +def locations_fixture(location: Mock) -> list[Mock]: """Fixture for 2 locations.""" return [location, _create_location()] @pytest.fixture(name="app") -async def app_fixture(hass, config_file): +async def app_fixture(hass: HomeAssistant, config_file: dict[str, str]) -> Mock: """Fixture for a single app.""" app = Mock(AppEntity) app.app_name = APP_NAME_PREFIX + str(uuid4()) @@ -133,7 +133,7 @@ async def app_fixture(hass, config_file): @pytest.fixture(name="app_oauth_client") -def app_oauth_client_fixture(): +def app_oauth_client_fixture() -> Mock: """Fixture for a single app's oauth.""" client = Mock(AppOAuthClient) client.client_id = str(uuid4()) @@ -150,7 +150,7 @@ def app_settings_fixture(app, config_file): return settings -def _create_installed_app(location_id, app_id): +def _create_installed_app(location_id: str, app_id: str) -> Mock: item = Mock(InstalledApp) item.installed_app_id = str(uuid4()) item.installed_app_status = InstalledAppStatus.AUTHORIZED @@ -161,7 +161,7 @@ def _create_installed_app(location_id, app_id): @pytest.fixture(name="installed_app") -def installed_app_fixture(location, app): +def installed_app_fixture(location: Mock, app: Mock) -> Mock: """Fixture for a single installed app.""" return _create_installed_app(location.location_id, app.app_id) @@ -222,7 +222,7 @@ def device_fixture(location): @pytest.fixture(name="config_entry") -def config_entry_fixture(hass, installed_app, location): +def config_entry_fixture(installed_app: Mock, location: Mock) -> MockConfigEntry: """Fixture representing a config entry.""" data = { CONF_ACCESS_TOKEN: str(uuid4()), From 3e5d0eb632313523ce4cfc637eb542294516d80d Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 15:08:10 +0200 Subject: [PATCH 0720/1635] Improve type hints in owntracks tests (#123905) --- tests/components/owntracks/test_device_tracker.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/components/owntracks/test_device_tracker.py b/tests/components/owntracks/test_device_tracker.py index b4b5c00880c..bc2ae7ce4d8 100644 --- a/tests/components/owntracks/test_device_tracker.py +++ b/tests/components/owntracks/test_device_tracker.py @@ -1,7 +1,7 @@ """The tests for the Owntracks device tracker.""" import base64 -from collections.abc import Callable +from collections.abc import Callable, Generator import json import pickle from unittest.mock import patch @@ -294,7 +294,7 @@ def setup_comp( hass: HomeAssistant, mock_device_tracker_conf: list[Device], mqtt_mock: MqttMockHAClient, -): +) -> None: """Initialize components.""" hass.loop.run_until_complete(async_setup_component(hass, "device_tracker", {})) @@ -1426,7 +1426,7 @@ def mock_cipher(): @pytest.fixture -def config_context(hass, setup_comp): +def config_context(setup_comp: None) -> Generator[None]: """Set up the mocked context.""" patch_load = patch( "homeassistant.components.device_tracker.async_load_config", From f7e017aa73d912393cc95d7c9c488b6c858ca970 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 15:08:55 +0200 Subject: [PATCH 0721/1635] Improve type hints in sia tests (#123909) --- tests/components/sia/test_config_flow.py | 30 ++++++++++++++---------- 1 file changed, 18 insertions(+), 12 deletions(-) diff --git a/tests/components/sia/test_config_flow.py b/tests/components/sia/test_config_flow.py index 95de53d7fbe..b0d83855a25 100644 --- a/tests/components/sia/test_config_flow.py +++ b/tests/components/sia/test_config_flow.py @@ -1,5 +1,6 @@ """Test the sia config flow.""" +from collections.abc import Generator from unittest.mock import patch import pytest @@ -16,6 +17,7 @@ from homeassistant.components.sia.const import ( CONF_ZONES, DOMAIN, ) +from homeassistant.config_entries import ConfigFlowResult from homeassistant.const import CONF_PORT, CONF_PROTOCOL from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -105,7 +107,7 @@ ADDITIONAL_OPTIONS = { @pytest.fixture -async def flow_at_user_step(hass): +async def flow_at_user_step(hass: HomeAssistant) -> ConfigFlowResult: """Return a initialized flow.""" return await hass.config_entries.flow.async_init( DOMAIN, @@ -114,7 +116,9 @@ async def flow_at_user_step(hass): @pytest.fixture -async def entry_with_basic_config(hass, flow_at_user_step): +async def entry_with_basic_config( + hass: HomeAssistant, flow_at_user_step: ConfigFlowResult +) -> ConfigFlowResult: """Return a entry with a basic config.""" with patch("homeassistant.components.sia.async_setup_entry", return_value=True): return await hass.config_entries.flow.async_configure( @@ -123,7 +127,9 @@ async def entry_with_basic_config(hass, flow_at_user_step): @pytest.fixture -async def flow_at_add_account_step(hass, flow_at_user_step): +async def flow_at_add_account_step( + hass: HomeAssistant, flow_at_user_step: ConfigFlowResult +) -> ConfigFlowResult: """Return a initialized flow at the additional account step.""" return await hass.config_entries.flow.async_configure( flow_at_user_step["flow_id"], BASIC_CONFIG_ADDITIONAL @@ -131,7 +137,9 @@ async def flow_at_add_account_step(hass, flow_at_user_step): @pytest.fixture -async def entry_with_additional_account_config(hass, flow_at_add_account_step): +async def entry_with_additional_account_config( + hass: HomeAssistant, flow_at_add_account_step: ConfigFlowResult +) -> ConfigFlowResult: """Return a entry with a two account config.""" with patch("homeassistant.components.sia.async_setup_entry", return_value=True): return await hass.config_entries.flow.async_configure( @@ -139,7 +147,7 @@ async def entry_with_additional_account_config(hass, flow_at_add_account_step): ) -async def setup_sia(hass: HomeAssistant, config_entry: MockConfigEntry): +async def setup_sia(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: """Add mock config to HASS.""" assert await async_setup_component(hass, DOMAIN, {}) config_entry.add_to_hass(hass) @@ -147,23 +155,21 @@ async def setup_sia(hass: HomeAssistant, config_entry: MockConfigEntry): await hass.async_block_till_done() -async def test_form_start_user(hass: HomeAssistant, flow_at_user_step) -> None: +async def test_form_start_user(flow_at_user_step: ConfigFlowResult) -> None: """Start the form and check if you get the right id and schema for the user step.""" assert flow_at_user_step["step_id"] == "user" assert flow_at_user_step["errors"] is None assert flow_at_user_step["data_schema"] == HUB_SCHEMA -async def test_form_start_account( - hass: HomeAssistant, flow_at_add_account_step -) -> None: +async def test_form_start_account(flow_at_add_account_step: ConfigFlowResult) -> None: """Start the form and check if you get the right id and schema for the additional account step.""" assert flow_at_add_account_step["step_id"] == "add_account" assert flow_at_add_account_step["errors"] is None assert flow_at_add_account_step["data_schema"] == ACCOUNT_SCHEMA -async def test_create(hass: HomeAssistant, entry_with_basic_config) -> None: +async def test_create(entry_with_basic_config: ConfigFlowResult) -> None: """Test we create a entry through the form.""" assert entry_with_basic_config["type"] is FlowResultType.CREATE_ENTRY assert ( @@ -175,7 +181,7 @@ async def test_create(hass: HomeAssistant, entry_with_basic_config) -> None: async def test_create_additional_account( - hass: HomeAssistant, entry_with_additional_account_config + entry_with_additional_account_config: ConfigFlowResult, ) -> None: """Test we create a config with two accounts.""" assert entry_with_additional_account_config["type"] is FlowResultType.CREATE_ENTRY @@ -210,7 +216,7 @@ async def test_abort_form(hass: HomeAssistant) -> None: @pytest.fixture(autouse=True) -def mock_sia(): +def mock_sia() -> Generator[None]: """Mock SIAClient.""" with patch("homeassistant.components.sia.hub.SIAClient", autospec=True): yield From 17f0d9ce45c9eb7b65fec9f2da4991d02b1c352b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 14 Aug 2024 08:36:49 -0500 Subject: [PATCH 0722/1635] Map pre-heating and defrosting hvac actions in homekit (#123907) closes #123864 --- .../components/homekit/type_thermostats.py | 8 +++-- .../homekit/test_type_thermostats.py | 34 +++++++++++++++++++ 2 files changed, 40 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/homekit/type_thermostats.py b/homeassistant/components/homekit/type_thermostats.py index 5dc520e8568..a97f26b7abb 100644 --- a/homeassistant/components/homekit/type_thermostats.py +++ b/homeassistant/components/homekit/type_thermostats.py @@ -150,6 +150,8 @@ HC_HASS_TO_HOMEKIT_ACTION = { HVACAction.COOLING: HC_HEAT_COOL_COOL, HVACAction.DRYING: HC_HEAT_COOL_COOL, HVACAction.FAN: HC_HEAT_COOL_COOL, + HVACAction.PREHEATING: HC_HEAT_COOL_HEAT, + HVACAction.DEFROSTING: HC_HEAT_COOL_HEAT, } FAN_STATE_INACTIVE = 0 @@ -623,8 +625,10 @@ class Thermostat(HomeAccessory): ) # Set current operation mode for supported thermostats - if hvac_action := attributes.get(ATTR_HVAC_ACTION): - homekit_hvac_action = HC_HASS_TO_HOMEKIT_ACTION[hvac_action] + if (hvac_action := attributes.get(ATTR_HVAC_ACTION)) and ( + homekit_hvac_action := HC_HASS_TO_HOMEKIT_ACTION.get(hvac_action), + HC_HEAT_COOL_OFF, + ): self.char_current_heat_cool.set_value(homekit_hvac_action) # Update current temperature diff --git a/tests/components/homekit/test_type_thermostats.py b/tests/components/homekit/test_type_thermostats.py index 3a32e94e491..8454610566b 100644 --- a/tests/components/homekit/test_type_thermostats.py +++ b/tests/components/homekit/test_type_thermostats.py @@ -161,6 +161,40 @@ async def test_thermostat(hass: HomeAssistant, hk_driver, events: list[Event]) - assert acc.char_current_temp.value == 23.0 assert acc.char_display_units.value == 0 + hass.states.async_set( + entity_id, + HVACMode.HEAT, + { + **base_attrs, + ATTR_TEMPERATURE: 22.2, + ATTR_CURRENT_TEMPERATURE: 17.8, + ATTR_HVAC_ACTION: HVACAction.PREHEATING, + }, + ) + await hass.async_block_till_done() + assert acc.char_target_temp.value == 22.2 + assert acc.char_current_heat_cool.value == 1 + assert acc.char_target_heat_cool.value == 1 + assert acc.char_current_temp.value == 17.8 + assert acc.char_display_units.value == 0 + + hass.states.async_set( + entity_id, + HVACMode.HEAT, + { + **base_attrs, + ATTR_TEMPERATURE: 22.2, + ATTR_CURRENT_TEMPERATURE: 17.8, + ATTR_HVAC_ACTION: HVACAction.DEFROSTING, + }, + ) + await hass.async_block_till_done() + assert acc.char_target_temp.value == 22.2 + assert acc.char_current_heat_cool.value == 1 + assert acc.char_target_heat_cool.value == 1 + assert acc.char_current_temp.value == 17.8 + assert acc.char_display_units.value == 0 + hass.states.async_set( entity_id, HVACMode.FAN_ONLY, From e6fc34325de46f85a5bd98856228c5989d050da4 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 15:39:52 +0200 Subject: [PATCH 0723/1635] Improve type hints in zha tests (#123926) --- tests/components/zha/test_config_flow.py | 62 +++++++++++++++++------- 1 file changed, 44 insertions(+), 18 deletions(-) diff --git a/tests/components/zha/test_config_flow.py b/tests/components/zha/test_config_flow.py index f3104141269..af6f2d9af0c 100644 --- a/tests/components/zha/test_config_flow.py +++ b/tests/components/zha/test_config_flow.py @@ -1,14 +1,16 @@ """Tests for ZHA config flow.""" +from collections.abc import Callable, Coroutine, Generator import copy from datetime import timedelta from ipaddress import ip_address import json +from typing import Any from unittest.mock import AsyncMock, MagicMock, PropertyMock, create_autospec, patch import uuid import pytest -import serial.tools.list_ports +from serial.tools.list_ports_common import ListPortInfo from zha.application.const import RadioType from zigpy.backups import BackupManager import zigpy.config @@ -36,6 +38,7 @@ from homeassistant.config_entries import ( SOURCE_USER, SOURCE_ZEROCONF, ConfigEntryState, + ConfigFlowResult, ) from homeassistant.const import CONF_SOURCE from homeassistant.core import HomeAssistant @@ -43,6 +46,9 @@ from homeassistant.data_entry_flow import FlowResultType from tests.common import MockConfigEntry +type RadioPicker = Callable[ + [RadioType], Coroutine[Any, Any, tuple[ConfigFlowResult, ListPortInfo]] +] PROBE_FUNCTION_PATH = "zigbee.application.ControllerApplication.probe" @@ -70,7 +76,7 @@ def mock_multipan_platform(): @pytest.fixture(autouse=True) -def mock_app(): +def mock_app() -> Generator[AsyncMock]: """Mock zigpy app interface.""" mock_app = AsyncMock() mock_app.backups = create_autospec(BackupManager, instance=True) @@ -130,9 +136,9 @@ def mock_detect_radio_type( return detect -def com_port(device="/dev/ttyUSB1234"): +def com_port(device="/dev/ttyUSB1234") -> ListPortInfo: """Mock of a serial port.""" - port = serial.tools.list_ports_common.ListPortInfo("/dev/ttyUSB1234") + port = ListPortInfo("/dev/ttyUSB1234") port.serial_number = "1234" port.manufacturer = "Virtual serial port" port.device = device @@ -1038,10 +1044,12 @@ def test_prevent_overwrite_ezsp_ieee() -> None: @pytest.fixture -def pick_radio(hass): +def pick_radio( + hass: HomeAssistant, +) -> Generator[RadioPicker]: """Fixture for the first step of the config flow (where a radio is picked).""" - async def wrapper(radio_type): + async def wrapper(radio_type: RadioType) -> tuple[ConfigFlowResult, ListPortInfo]: port = com_port() port_select = f"{port}, s/n: {port.serial_number} - {port.manufacturer}" @@ -1070,7 +1078,7 @@ def pick_radio(hass): async def test_strategy_no_network_settings( - pick_radio, mock_app, hass: HomeAssistant + pick_radio: RadioPicker, mock_app: AsyncMock, hass: HomeAssistant ) -> None: """Test formation strategy when no network settings are present.""" mock_app.load_network_info = MagicMock(side_effect=NetworkNotFormed()) @@ -1083,7 +1091,7 @@ async def test_strategy_no_network_settings( async def test_formation_strategy_form_new_network( - pick_radio, mock_app, hass: HomeAssistant + pick_radio: RadioPicker, mock_app: AsyncMock, hass: HomeAssistant ) -> None: """Test forming a new network.""" result, port = await pick_radio(RadioType.ezsp) @@ -1101,7 +1109,7 @@ async def test_formation_strategy_form_new_network( async def test_formation_strategy_form_initial_network( - pick_radio, mock_app, hass: HomeAssistant + pick_radio: RadioPicker, mock_app: AsyncMock, hass: HomeAssistant ) -> None: """Test forming a new network, with no previous settings on the radio.""" mock_app.load_network_info = AsyncMock(side_effect=NetworkNotFormed()) @@ -1122,7 +1130,7 @@ async def test_formation_strategy_form_initial_network( @patch(f"zigpy_znp.{PROBE_FUNCTION_PATH}", AsyncMock(return_value=True)) @patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True)) async def test_onboarding_auto_formation_new_hardware( - mock_app, hass: HomeAssistant + mock_app: AsyncMock, hass: HomeAssistant ) -> None: """Test auto network formation with new hardware during onboarding.""" mock_app.load_network_info = AsyncMock(side_effect=NetworkNotFormed()) @@ -1157,7 +1165,7 @@ async def test_onboarding_auto_formation_new_hardware( async def test_formation_strategy_reuse_settings( - pick_radio, mock_app, hass: HomeAssistant + pick_radio: RadioPicker, mock_app: AsyncMock, hass: HomeAssistant ) -> None: """Test reusing existing network settings.""" result, port = await pick_radio(RadioType.ezsp) @@ -1190,7 +1198,10 @@ def test_parse_uploaded_backup(process_mock) -> None: @patch("homeassistant.components.zha.radio_manager._allow_overwrite_ezsp_ieee") async def test_formation_strategy_restore_manual_backup_non_ezsp( - allow_overwrite_ieee_mock, pick_radio, mock_app, hass: HomeAssistant + allow_overwrite_ieee_mock, + pick_radio: RadioPicker, + mock_app: AsyncMock, + hass: HomeAssistant, ) -> None: """Test restoring a manual backup on non-EZSP coordinators.""" result, port = await pick_radio(RadioType.znp) @@ -1222,7 +1233,11 @@ async def test_formation_strategy_restore_manual_backup_non_ezsp( @patch("homeassistant.components.zha.radio_manager._allow_overwrite_ezsp_ieee") async def test_formation_strategy_restore_manual_backup_overwrite_ieee_ezsp( - allow_overwrite_ieee_mock, pick_radio, mock_app, backup, hass: HomeAssistant + allow_overwrite_ieee_mock, + pick_radio: RadioPicker, + mock_app: AsyncMock, + backup, + hass: HomeAssistant, ) -> None: """Test restoring a manual backup on EZSP coordinators (overwrite IEEE).""" result, port = await pick_radio(RadioType.ezsp) @@ -1262,7 +1277,10 @@ async def test_formation_strategy_restore_manual_backup_overwrite_ieee_ezsp( @patch("homeassistant.components.zha.radio_manager._allow_overwrite_ezsp_ieee") async def test_formation_strategy_restore_manual_backup_ezsp( - allow_overwrite_ieee_mock, pick_radio, mock_app, hass: HomeAssistant + allow_overwrite_ieee_mock, + pick_radio: RadioPicker, + mock_app: AsyncMock, + hass: HomeAssistant, ) -> None: """Test restoring a manual backup on EZSP coordinators (don't overwrite IEEE).""" result, port = await pick_radio(RadioType.ezsp) @@ -1303,7 +1321,7 @@ async def test_formation_strategy_restore_manual_backup_ezsp( async def test_formation_strategy_restore_manual_backup_invalid_upload( - pick_radio, mock_app, hass: HomeAssistant + pick_radio: RadioPicker, mock_app: AsyncMock, hass: HomeAssistant ) -> None: """Test restoring a manual backup but an invalid file is uploaded.""" result, port = await pick_radio(RadioType.ezsp) @@ -1355,7 +1373,7 @@ def test_format_backup_choice() -> None: ) @patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True)) async def test_formation_strategy_restore_automatic_backup_ezsp( - pick_radio, mock_app, make_backup, hass: HomeAssistant + pick_radio: RadioPicker, mock_app: AsyncMock, make_backup, hass: HomeAssistant ) -> None: """Test restoring an automatic backup (EZSP radio).""" mock_app.backups.backups = [ @@ -1404,7 +1422,11 @@ async def test_formation_strategy_restore_automatic_backup_ezsp( @patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True)) @pytest.mark.parametrize("is_advanced", [True, False]) async def test_formation_strategy_restore_automatic_backup_non_ezsp( - is_advanced, pick_radio, mock_app, make_backup, hass: HomeAssistant + is_advanced, + pick_radio: RadioPicker, + mock_app: AsyncMock, + make_backup, + hass: HomeAssistant, ) -> None: """Test restoring an automatic backup (non-EZSP radio).""" mock_app.backups.backups = [ @@ -1457,7 +1479,11 @@ async def test_formation_strategy_restore_automatic_backup_non_ezsp( @patch("homeassistant.components.zha.radio_manager._allow_overwrite_ezsp_ieee") async def test_ezsp_restore_without_settings_change_ieee( - allow_overwrite_ieee_mock, pick_radio, mock_app, backup, hass: HomeAssistant + allow_overwrite_ieee_mock, + pick_radio: RadioPicker, + mock_app: AsyncMock, + backup, + hass: HomeAssistant, ) -> None: """Test a manual backup on EZSP coordinators without settings (no IEEE write).""" # Fail to load settings From c65f84532994210e3257ab09b1b9a018603fdda1 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 15:40:57 +0200 Subject: [PATCH 0724/1635] Improve type hints in wemo tests (#123923) * Improve type hints in wemo tests * typo --- tests/components/wemo/conftest.py | 34 ++++++++++++++++++++++--------- 1 file changed, 24 insertions(+), 10 deletions(-) diff --git a/tests/components/wemo/conftest.py b/tests/components/wemo/conftest.py index 1316c37b62b..64bd89f4793 100644 --- a/tests/components/wemo/conftest.py +++ b/tests/components/wemo/conftest.py @@ -1,13 +1,15 @@ """Fixtures for pywemo.""" +from collections.abc import Generator import contextlib -from unittest.mock import create_autospec, patch +from unittest.mock import MagicMock, create_autospec, patch import pytest import pywemo from homeassistant.components.wemo import CONF_DISCOVERY, CONF_STATIC from homeassistant.components.wemo.const import DOMAIN +from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component @@ -22,13 +24,13 @@ MOCK_INSIGHT_STATE_THRESHOLD_POWER = 8.0 @pytest.fixture(name="pywemo_model") -def pywemo_model_fixture(): +def pywemo_model_fixture() -> str: """Fixture containing a pywemo class name used by pywemo_device_fixture.""" return "LightSwitch" @pytest.fixture(name="pywemo_registry", autouse=True) -async def async_pywemo_registry_fixture(): +def async_pywemo_registry_fixture() -> Generator[MagicMock]: """Fixture for SubscriptionRegistry instances.""" registry = create_autospec(pywemo.SubscriptionRegistry, instance=True) @@ -52,7 +54,9 @@ def pywemo_discovery_responder_fixture(): @contextlib.contextmanager -def create_pywemo_device(pywemo_registry, pywemo_model): +def create_pywemo_device( + pywemo_registry: MagicMock, pywemo_model: str +) -> pywemo.WeMoDevice: """Create a WeMoDevice instance.""" cls = getattr(pywemo, pywemo_model) device = create_autospec(cls, instance=True) @@ -90,14 +94,18 @@ def create_pywemo_device(pywemo_registry, pywemo_model): @pytest.fixture(name="pywemo_device") -def pywemo_device_fixture(pywemo_registry, pywemo_model): +def pywemo_device_fixture( + pywemo_registry: MagicMock, pywemo_model: str +) -> Generator[pywemo.WeMoDevice]: """Fixture for WeMoDevice instances.""" with create_pywemo_device(pywemo_registry, pywemo_model) as pywemo_device: yield pywemo_device @pytest.fixture(name="pywemo_dli_device") -def pywemo_dli_device_fixture(pywemo_registry, pywemo_model): +def pywemo_dli_device_fixture( + pywemo_registry: MagicMock, pywemo_model: str +) -> Generator[pywemo.WeMoDevice]: """Fixture for Digital Loggers emulated instances.""" with create_pywemo_device(pywemo_registry, pywemo_model) as pywemo_dli_device: pywemo_dli_device.model_name = "DLI emulated Belkin Socket" @@ -106,12 +114,14 @@ def pywemo_dli_device_fixture(pywemo_registry, pywemo_model): @pytest.fixture(name="wemo_entity_suffix") -def wemo_entity_suffix_fixture(): +def wemo_entity_suffix_fixture() -> str: """Fixture to select a specific entity for wemo_entity.""" return "" -async def async_create_wemo_entity(hass, pywemo_device, wemo_entity_suffix): +async def async_create_wemo_entity( + hass: HomeAssistant, pywemo_device: pywemo.WeMoDevice, wemo_entity_suffix: str +) -> er.RegistryEntry | None: """Create a hass entity for a wemo device.""" assert await async_setup_component( hass, @@ -134,12 +144,16 @@ async def async_create_wemo_entity(hass, pywemo_device, wemo_entity_suffix): @pytest.fixture(name="wemo_entity") -async def async_wemo_entity_fixture(hass, pywemo_device, wemo_entity_suffix): +async def async_wemo_entity_fixture( + hass: HomeAssistant, pywemo_device: pywemo.WeMoDevice, wemo_entity_suffix: str +) -> er.RegistryEntry | None: """Fixture for a Wemo entity in hass.""" return await async_create_wemo_entity(hass, pywemo_device, wemo_entity_suffix) @pytest.fixture(name="wemo_dli_entity") -async def async_wemo_dli_entity_fixture(hass, pywemo_dli_device, wemo_entity_suffix): +async def async_wemo_dli_entity_fixture( + hass: HomeAssistant, pywemo_dli_device: pywemo.WeMoDevice, wemo_entity_suffix: str +) -> er.RegistryEntry | None: """Fixture for a Wemo entity in hass.""" return await async_create_wemo_entity(hass, pywemo_dli_device, wemo_entity_suffix) From 1227cd8693da0a216942e878ebb642f6e9338602 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 15:41:15 +0200 Subject: [PATCH 0725/1635] Improve type hints in zerproc tests (#123925) --- tests/components/zerproc/test_light.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/components/zerproc/test_light.py b/tests/components/zerproc/test_light.py index c47f960b182..6e00cfbde4c 100644 --- a/tests/components/zerproc/test_light.py +++ b/tests/components/zerproc/test_light.py @@ -35,13 +35,13 @@ from tests.common import MockConfigEntry, async_fire_time_changed @pytest.fixture -async def mock_entry(hass): +async def mock_entry() -> MockConfigEntry: """Create a mock light entity.""" return MockConfigEntry(domain=DOMAIN) @pytest.fixture -async def mock_light(hass, mock_entry): +async def mock_light(hass: HomeAssistant, mock_entry: MockConfigEntry) -> MagicMock: """Create a mock light entity.""" mock_entry.add_to_hass(hass) From ff4dac8f3a336f57927b448bef0f1dad5a964b07 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 15:42:53 +0200 Subject: [PATCH 0726/1635] Improve type hints in watttime tests (#123921) --- tests/components/watttime/conftest.py | 38 ++++++++++++++++++--------- 1 file changed, 25 insertions(+), 13 deletions(-) diff --git a/tests/components/watttime/conftest.py b/tests/components/watttime/conftest.py index 0b7403d45fc..650d07b36a1 100644 --- a/tests/components/watttime/conftest.py +++ b/tests/components/watttime/conftest.py @@ -1,6 +1,7 @@ """Define test fixtures for WattTime.""" -import json +from collections.abc import AsyncGenerator +from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest @@ -20,13 +21,17 @@ from homeassistant.const import ( CONF_PASSWORD, CONF_USERNAME, ) +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component +from homeassistant.util.json import JsonObjectType -from tests.common import MockConfigEntry, load_fixture +from tests.common import MockConfigEntry, load_json_object_fixture @pytest.fixture(name="client") -def client_fixture(get_grid_region, data_realtime_emissions): +def client_fixture( + get_grid_region: AsyncMock, data_realtime_emissions: JsonObjectType +) -> Mock: """Define an aiowatttime client.""" client = Mock() client.emissions.async_get_grid_region = get_grid_region @@ -37,7 +42,7 @@ def client_fixture(get_grid_region, data_realtime_emissions): @pytest.fixture(name="config_auth") -def config_auth_fixture(hass): +def config_auth_fixture() -> dict[str, Any]: """Define an auth config entry data fixture.""" return { CONF_USERNAME: "user", @@ -46,7 +51,7 @@ def config_auth_fixture(hass): @pytest.fixture(name="config_coordinates") -def config_coordinates_fixture(hass): +def config_coordinates_fixture() -> dict[str, Any]: """Define a coordinates config entry data fixture.""" return { CONF_LATITUDE: 32.87336, @@ -55,7 +60,7 @@ def config_coordinates_fixture(hass): @pytest.fixture(name="config_location_type") -def config_location_type_fixture(hass): +def config_location_type_fixture() -> dict[str, Any]: """Define a location type config entry data fixture.""" return { CONF_LOCATION_TYPE: LOCATION_TYPE_COORDINATES, @@ -63,7 +68,9 @@ def config_location_type_fixture(hass): @pytest.fixture(name="config_entry") -def config_entry_fixture(hass, config_auth, config_coordinates): +def config_entry_fixture( + hass: HomeAssistant, config_auth: dict[str, Any], config_coordinates: dict[str, Any] +) -> MockConfigEntry: """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, @@ -82,25 +89,30 @@ def config_entry_fixture(hass, config_auth, config_coordinates): @pytest.fixture(name="data_grid_region", scope="package") -def data_grid_region_fixture(): +def data_grid_region_fixture() -> JsonObjectType: """Define grid region data.""" - return json.loads(load_fixture("grid_region_data.json", "watttime")) + return load_json_object_fixture("grid_region_data.json", "watttime") @pytest.fixture(name="data_realtime_emissions", scope="package") -def data_realtime_emissions_fixture(): +def data_realtime_emissions_fixture() -> JsonObjectType: """Define realtime emissions data.""" - return json.loads(load_fixture("realtime_emissions_data.json", "watttime")) + return load_json_object_fixture("realtime_emissions_data.json", "watttime") @pytest.fixture(name="get_grid_region") -def get_grid_region_fixture(data_grid_region): +def get_grid_region_fixture(data_grid_region: JsonObjectType) -> AsyncMock: """Define an aiowatttime method to get grid region data.""" return AsyncMock(return_value=data_grid_region) @pytest.fixture(name="setup_watttime") -async def setup_watttime_fixture(hass, client, config_auth, config_coordinates): +async def setup_watttime_fixture( + hass: HomeAssistant, + client: Mock, + config_auth: dict[str, Any], + config_coordinates: dict[str, Any], +) -> AsyncGenerator[None]: """Define a fixture to set up WattTime.""" with ( patch( From 04598c6fb12d0f20f1b3774924a80f3b41d7f1ef Mon Sep 17 00:00:00 2001 From: Robert Svensson Date: Wed, 14 Aug 2024 15:45:08 +0200 Subject: [PATCH 0727/1635] Use more snapshot in UniFi sensor tests (#122875) * Use more snapshot in UniFi sensor tests * Fix comment --- .../unifi/snapshots/test_sensor.ambr | 2121 ++++++++++++++++- tests/components/unifi/test_sensor.py | 452 ++-- 2 files changed, 2195 insertions(+), 378 deletions(-) diff --git a/tests/components/unifi/snapshots/test_sensor.ambr b/tests/components/unifi/snapshots/test_sensor.ambr index 496c386e2cc..3053f69d616 100644 --- a/tests/components/unifi/snapshots/test_sensor.ambr +++ b/tests/components/unifi/snapshots/test_sensor.ambr @@ -1,127 +1,2080 @@ # serializer version: 1 -# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-rx--config_entry_options0] - 'rx-00:00:00:00:00:01' +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.device_clients-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.device_clients', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Clients', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_clients-20:00:00:00:01:01', + 'unit_of_measurement': None, + }) # --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-rx--config_entry_options0].1 - +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.device_clients-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device Clients', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.device_clients', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) # --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-rx--config_entry_options0].2 - 'data_rate' +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.device_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Disconnected', + 'Connected', + 'Pending', + 'Firmware Mismatch', + 'Upgrading', + 'Provisioning', + 'Heartbeat Missed', + 'Adopting', + 'Deleting', + 'Inform Error', + 'Adoption Failed', + 'Isolated', + 'Unknown', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.device_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'State', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_state-20:00:00:00:01:01', + 'unit_of_measurement': None, + }) # --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-rx--config_entry_options0].3 - 'Wired client RX' +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.device_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Device State', + 'options': list([ + 'Disconnected', + 'Connected', + 'Pending', + 'Firmware Mismatch', + 'Upgrading', + 'Provisioning', + 'Heartbeat Missed', + 'Adopting', + 'Deleting', + 'Inform Error', + 'Adoption Failed', + 'Isolated', + 'Unknown', + ]), + }), + 'context': , + 'entity_id': 'sensor.device_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Connected', + }) # --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-rx--config_entry_options0].4 - +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.device_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.device_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_temperature-20:00:00:00:01:01', + 'unit_of_measurement': , + }) # --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-rx--config_entry_options0].5 - +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.device_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Device Temperature', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '30', + }) # --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-rx--config_entry_options0].6 - '1234.0' +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.device_uptime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.device_uptime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Uptime', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_uptime-20:00:00:00:01:01', + 'unit_of_measurement': None, + }) # --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-tx--config_entry_options0] - 'tx-00:00:00:00:00:01' +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.device_uptime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Device Uptime', + }), + 'context': , + 'entity_id': 'sensor.device_uptime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-01-01T01:00:00+00:00', + }) # --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-tx--config_entry_options0].1 - +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_ac_power_budget-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_ac_power_budget', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC Power Budget', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'ac_power_budget-01:02:03:04:05:ff', + 'unit_of_measurement': , + }) # --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-tx--config_entry_options0].2 - 'data_rate' +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_ac_power_budget-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Dummy USP-PDU-Pro AC Power Budget', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_ac_power_budget', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1875.000', + }) # --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-tx--config_entry_options0].3 - 'Wired client TX' +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_ac_power_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_ac_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC Power Consumption', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'ac_power_conumption-01:02:03:04:05:ff', + 'unit_of_measurement': , + }) # --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-tx--config_entry_options0].4 - +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_ac_power_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Dummy USP-PDU-Pro AC Power Consumption', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_ac_power_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '201.683', + }) # --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-tx--config_entry_options0].5 - +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_clients-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_clients', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Clients', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_clients-01:02:03:04:05:ff', + 'unit_of_measurement': None, + }) # --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-tx--config_entry_options0].6 - '5678.0' +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_clients-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dummy USP-PDU-Pro Clients', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_clients', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) # --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_uptime-uptime--config_entry_options0] - 'uptime-00:00:00:00:00:01' +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_cpu_utilization-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_cpu_utilization', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CPU utilization', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'cpu_utilization-01:02:03:04:05:ff', + 'unit_of_measurement': '%', + }) # --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_uptime-uptime--config_entry_options0].1 - +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_cpu_utilization-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dummy USP-PDU-Pro CPU utilization', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_cpu_utilization', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.4', + }) # --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_uptime-uptime--config_entry_options0].2 - 'timestamp' +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_memory_utilization-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_memory_utilization', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Memory utilization', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'memory_utilization-01:02:03:04:05:ff', + 'unit_of_measurement': '%', + }) # --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_uptime-uptime--config_entry_options0].3 - 'Wired client Uptime' +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_memory_utilization-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dummy USP-PDU-Pro Memory utilization', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_memory_utilization', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '28.9', + }) # --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_uptime-uptime--config_entry_options0].4 - None +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_outlet_2_outlet_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_outlet_2_outlet_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outlet 2 Outlet Power', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet_power-01:02:03:04:05:ff_2', + 'unit_of_measurement': , + }) # --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_uptime-uptime--config_entry_options0].5 - None +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_outlet_2_outlet_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Dummy USP-PDU-Pro Outlet 2 Outlet Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_outlet_2_outlet_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '73.827', + }) # --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_uptime-uptime--config_entry_options0].6 - '2020-09-14T14:41:45+00:00' +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Disconnected', + 'Connected', + 'Pending', + 'Firmware Mismatch', + 'Upgrading', + 'Provisioning', + 'Heartbeat Missed', + 'Adopting', + 'Deleting', + 'Inform Error', + 'Adoption Failed', + 'Isolated', + 'Unknown', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'State', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_state-01:02:03:04:05:ff', + 'unit_of_measurement': None, + }) # --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_rx-rx--config_entry_options0] - 'rx-00:00:00:00:00:01' +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Dummy USP-PDU-Pro State', + 'options': list([ + 'Disconnected', + 'Connected', + 'Pending', + 'Firmware Mismatch', + 'Upgrading', + 'Provisioning', + 'Heartbeat Missed', + 'Adopting', + 'Deleting', + 'Inform Error', + 'Adoption Failed', + 'Isolated', + 'Unknown', + ]), + }), + 'context': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Connected', + }) # --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_rx-rx--config_entry_options0].1 - +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_uptime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_uptime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Uptime', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_uptime-01:02:03:04:05:ff', + 'unit_of_measurement': None, + }) # --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_rx-rx--config_entry_options0].2 - 'data_rate' +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_uptime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Dummy USP-PDU-Pro Uptime', + }), + 'context': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_uptime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2020-12-18T05:36:58+00:00', + }) # --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_rx-rx--config_entry_options0].3 - 'Wireless client RX' +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_clients-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_clients', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Clients', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_clients-10:00:00:00:01:01', + 'unit_of_measurement': None, + }) # --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_rx-rx--config_entry_options0].4 - +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_clients-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'mock-name Clients', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_clients', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) # --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_rx-rx--config_entry_options0].5 - +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_cloudflare_wan2_latency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_cloudflare_wan2_latency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cloudflare WAN2 latency', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'cloudflare_wan2_latency-10:00:00:00:01:01', + 'unit_of_measurement': , + }) # --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_rx-rx--config_entry_options0].6 - '2345.0' +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_cloudflare_wan2_latency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'mock-name Cloudflare WAN2 latency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_cloudflare_wan2_latency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) # --- -# name: test_sensor_sources[client_payload4-sensor.wireless_client_tx-tx--config_entry_options0] - 'tx-00:00:00:00:00:01' +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_cloudflare_wan_latency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_cloudflare_wan_latency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cloudflare WAN latency', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'cloudflare_wan_latency-10:00:00:00:01:01', + 'unit_of_measurement': , + }) # --- -# name: test_sensor_sources[client_payload4-sensor.wireless_client_tx-tx--config_entry_options0].1 - +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_cloudflare_wan_latency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'mock-name Cloudflare WAN latency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_cloudflare_wan_latency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '30', + }) # --- -# name: test_sensor_sources[client_payload4-sensor.wireless_client_tx-tx--config_entry_options0].2 - 'data_rate' +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_google_wan2_latency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_google_wan2_latency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Google WAN2 latency', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'google_wan2_latency-10:00:00:00:01:01', + 'unit_of_measurement': , + }) # --- -# name: test_sensor_sources[client_payload4-sensor.wireless_client_tx-tx--config_entry_options0].3 - 'Wireless client TX' +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_google_wan2_latency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'mock-name Google WAN2 latency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_google_wan2_latency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) # --- -# name: test_sensor_sources[client_payload4-sensor.wireless_client_tx-tx--config_entry_options0].4 - +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_google_wan_latency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_google_wan_latency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Google WAN latency', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'google_wan_latency-10:00:00:00:01:01', + 'unit_of_measurement': , + }) # --- -# name: test_sensor_sources[client_payload4-sensor.wireless_client_tx-tx--config_entry_options0].5 - +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_google_wan_latency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'mock-name Google WAN latency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_google_wan_latency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '53', + }) # --- -# name: test_sensor_sources[client_payload4-sensor.wireless_client_tx-tx--config_entry_options0].6 - '6789.0' +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_microsoft_wan2_latency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_microsoft_wan2_latency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Microsoft WAN2 latency', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'microsoft_wan2_latency-10:00:00:00:01:01', + 'unit_of_measurement': , + }) # --- -# name: test_sensor_sources[client_payload5-sensor.wireless_client_uptime-uptime--config_entry_options0] - 'uptime-00:00:00:00:00:01' +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_microsoft_wan2_latency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'mock-name Microsoft WAN2 latency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_microsoft_wan2_latency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) # --- -# name: test_sensor_sources[client_payload5-sensor.wireless_client_uptime-uptime--config_entry_options0].1 - +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_microsoft_wan_latency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_microsoft_wan_latency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Microsoft WAN latency', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'microsoft_wan_latency-10:00:00:00:01:01', + 'unit_of_measurement': , + }) # --- -# name: test_sensor_sources[client_payload5-sensor.wireless_client_uptime-uptime--config_entry_options0].2 - 'timestamp' +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_microsoft_wan_latency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'mock-name Microsoft WAN latency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_microsoft_wan_latency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '56', + }) # --- -# name: test_sensor_sources[client_payload5-sensor.wireless_client_uptime-uptime--config_entry_options0].3 - 'Wireless client Uptime' +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_1_poe_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_port_1_poe_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Port 1 PoE Power', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe_power-10:00:00:00:01:01_1', + 'unit_of_measurement': , + }) # --- -# name: test_sensor_sources[client_payload5-sensor.wireless_client_uptime-uptime--config_entry_options0].4 - None +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_1_poe_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'mock-name Port 1 PoE Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_port_1_poe_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.56', + }) # --- -# name: test_sensor_sources[client_payload5-sensor.wireless_client_uptime-uptime--config_entry_options0].5 - None +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_1_rx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_port_1_rx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:download', + 'original_name': 'Port 1 RX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'port_rx-10:00:00:00:01:01_1', + 'unit_of_measurement': , + }) # --- -# name: test_sensor_sources[client_payload5-sensor.wireless_client_uptime-uptime--config_entry_options0].6 - '2021-01-01T01:00:00+00:00' +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_1_rx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'mock-name Port 1 RX', + 'icon': 'mdi:download', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_port_1_rx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.00000', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_1_tx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_port_1_tx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:upload', + 'original_name': 'Port 1 TX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'port_tx-10:00:00:00:01:01_1', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_1_tx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'mock-name Port 1 TX', + 'icon': 'mdi:upload', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_port_1_tx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.00000', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_2_poe_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_port_2_poe_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Port 2 PoE Power', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe_power-10:00:00:00:01:01_2', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_2_poe_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'mock-name Port 2 PoE Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_port_2_poe_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.56', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_2_rx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_port_2_rx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:download', + 'original_name': 'Port 2 RX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'port_rx-10:00:00:00:01:01_2', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_2_rx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'mock-name Port 2 RX', + 'icon': 'mdi:download', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_port_2_rx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.00000', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_2_tx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_port_2_tx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:upload', + 'original_name': 'Port 2 TX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'port_tx-10:00:00:00:01:01_2', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_2_tx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'mock-name Port 2 TX', + 'icon': 'mdi:upload', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_port_2_tx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.00000', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_3_rx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_port_3_rx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:download', + 'original_name': 'Port 3 RX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'port_rx-10:00:00:00:01:01_3', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_3_rx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'mock-name Port 3 RX', + 'icon': 'mdi:download', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_port_3_rx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.00000', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_3_tx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_port_3_tx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:upload', + 'original_name': 'Port 3 TX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'port_tx-10:00:00:00:01:01_3', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_3_tx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'mock-name Port 3 TX', + 'icon': 'mdi:upload', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_port_3_tx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.00000', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_4_poe_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_port_4_poe_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Port 4 PoE Power', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe_power-10:00:00:00:01:01_4', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_4_poe_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'mock-name Port 4 PoE Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_port_4_poe_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.00', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_4_rx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_port_4_rx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:download', + 'original_name': 'Port 4 RX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'port_rx-10:00:00:00:01:01_4', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_4_rx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'mock-name Port 4 RX', + 'icon': 'mdi:download', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_port_4_rx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.00000', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_4_tx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_port_4_tx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:upload', + 'original_name': 'Port 4 TX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'port_tx-10:00:00:00:01:01_4', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_4_tx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'mock-name Port 4 TX', + 'icon': 'mdi:upload', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_port_4_tx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.00000', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Disconnected', + 'Connected', + 'Pending', + 'Firmware Mismatch', + 'Upgrading', + 'Provisioning', + 'Heartbeat Missed', + 'Adopting', + 'Deleting', + 'Inform Error', + 'Adoption Failed', + 'Isolated', + 'Unknown', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'State', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_state-10:00:00:00:01:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'mock-name State', + 'options': list([ + 'Disconnected', + 'Connected', + 'Pending', + 'Firmware Mismatch', + 'Upgrading', + 'Provisioning', + 'Heartbeat Missed', + 'Adopting', + 'Deleting', + 'Inform Error', + 'Adoption Failed', + 'Isolated', + 'Unknown', + ]), + }), + 'context': , + 'entity_id': 'sensor.mock_name_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Connected', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_uptime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_uptime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Uptime', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_uptime-10:00:00:00:01:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_uptime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'mock-name Uptime', + }), + 'context': , + 'entity_id': 'sensor.mock_name_uptime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.ssid_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.ssid_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'wlan_clients-012345678910111213141516', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.ssid_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SSID 1', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.ssid_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wired_client_rx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.wired_client_rx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:upload', + 'original_name': 'RX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'rx-00:00:00:00:00:01', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wired_client_rx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'Wired client RX', + 'icon': 'mdi:upload', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.wired_client_rx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1234.0', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wired_client_tx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.wired_client_tx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:download', + 'original_name': 'TX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'tx-00:00:00:00:00:01', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wired_client_tx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'Wired client TX', + 'icon': 'mdi:download', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.wired_client_tx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5678.0', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wired_client_uptime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.wired_client_uptime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Uptime', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'uptime-00:00:00:00:00:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wired_client_uptime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Wired client Uptime', + }), + 'context': , + 'entity_id': 'sensor.wired_client_uptime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2020-09-14T14:41:45+00:00', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wireless_client_rx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.wireless_client_rx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:upload', + 'original_name': 'RX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'rx-00:00:00:00:00:02', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wireless_client_rx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'Wireless client RX', + 'icon': 'mdi:upload', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.wireless_client_rx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2345.0', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wireless_client_tx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.wireless_client_tx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:download', + 'original_name': 'TX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'tx-00:00:00:00:00:02', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wireless_client_tx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'Wireless client TX', + 'icon': 'mdi:download', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.wireless_client_tx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6789.0', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wireless_client_uptime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.wireless_client_uptime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Uptime', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'uptime-00:00:00:00:00:02', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wireless_client_uptime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Wireless client Uptime', + }), + 'context': , + 'entity_id': 'sensor.wireless_client_uptime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-01-01T01:00:00+00:00', + }) # --- diff --git a/tests/components/unifi/test_sensor.py b/tests/components/unifi/test_sensor.py index 5af4b297847..cc51c31fc8b 100644 --- a/tests/components/unifi/test_sensor.py +++ b/tests/components/unifi/test_sensor.py @@ -10,14 +10,12 @@ from aiounifi.models.device import DeviceState from aiounifi.models.message import MessageKey from freezegun.api import FrozenDateTimeFactory, freeze_time import pytest -from syrupy.assertion import SnapshotAssertion +from syrupy import SnapshotAssertion from homeassistant.components.sensor import ( - ATTR_STATE_CLASS, DOMAIN as SENSOR_DOMAIN, SCAN_INTERVAL, SensorDeviceClass, - SensorStateClass, ) from homeassistant.components.unifi.const import ( CONF_ALLOW_BANDWIDTH_SENSORS, @@ -29,13 +27,7 @@ from homeassistant.components.unifi.const import ( DEVICE_STATES, ) from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY -from homeassistant.const import ( - ATTR_DEVICE_CLASS, - ATTR_FRIENDLY_NAME, - ATTR_UNIT_OF_MEASUREMENT, - STATE_UNAVAILABLE, - EntityCategory, -) +from homeassistant.const import ATTR_DEVICE_CLASS, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryDisabler @@ -47,7 +39,26 @@ from .conftest import ( WebsocketStateManager, ) -from tests.common import MockConfigEntry, async_fire_time_changed +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + +WIRED_CLIENT = { + "hostname": "Wired client", + "is_wired": True, + "mac": "00:00:00:00:00:01", + "oui": "Producer", + "wired-rx_bytes-r": 1234000000, + "wired-tx_bytes-r": 5678000000, + "uptime": 1600094505, +} +WIRELESS_CLIENT = { + "is_wired": False, + "mac": "00:00:00:00:00:02", + "name": "Wireless client", + "oui": "Producer", + "rx_bytes-r": 2345000000.0, + "tx_bytes-r": 6789000000.0, + "uptime": 60, +} DEVICE_1 = { "board_rev": 2, @@ -321,6 +332,114 @@ PDU_OUTLETS_UPDATE_DATA = [ ] +@pytest.mark.parametrize( + "config_entry_options", + [ + { + CONF_ALLOW_BANDWIDTH_SENSORS: True, + CONF_ALLOW_UPTIME_SENSORS: True, + } + ], +) +@pytest.mark.parametrize("client_payload", [[WIRED_CLIENT, WIRELESS_CLIENT]]) +@pytest.mark.parametrize( + "device_payload", + [ + [ + DEVICE_1, + PDU_DEVICE_1, + { # Temperature + "board_rev": 3, + "device_id": "mock-id", + "general_temperature": 30, + "has_fan": True, + "has_temperature": True, + "fan_level": 0, + "ip": "10.0.1.1", + "last_seen": 1562600145, + "mac": "20:00:00:00:01:01", + "model": "US16P150", + "name": "Device", + "next_interval": 20, + "overheating": True, + "state": 1, + "type": "usw", + "upgradable": True, + "uptime": 60, + "version": "4.0.42.10433", + }, + { # Latency monitors + "board_rev": 2, + "device_id": "mock-id", + "ip": "10.0.1.1", + "mac": "10:00:00:00:01:01", + "last_seen": 1562600145, + "model": "US16P150", + "name": "mock-name", + "port_overrides": [], + "uptime_stats": { + "WAN": { + "availability": 100.0, + "latency_average": 39, + "monitors": [ + { + "availability": 100.0, + "latency_average": 56, + "target": "www.microsoft.com", + "type": "icmp", + }, + { + "availability": 100.0, + "latency_average": 53, + "target": "google.com", + "type": "icmp", + }, + { + "availability": 100.0, + "latency_average": 30, + "target": "1.1.1.1", + "type": "icmp", + }, + ], + }, + "WAN2": { + "monitors": [ + { + "availability": 0.0, + "target": "www.microsoft.com", + "type": "icmp", + }, + { + "availability": 0.0, + "target": "google.com", + "type": "icmp", + }, + {"availability": 0.0, "target": "1.1.1.1", "type": "icmp"}, + ], + }, + }, + "state": 1, + "type": "usw", + "version": "4.0.42.10433", + }, + ] + ], +) +@pytest.mark.parametrize("wlan_payload", [[WLAN]]) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.freeze_time("2021-01-01 01:01:00") +async def test_entity_and_device_data( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory, + snapshot: SnapshotAssertion, +) -> None: + """Validate entity and device data.""" + with patch("homeassistant.components.unifi.PLATFORMS", [Platform.SENSOR]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + @pytest.mark.parametrize( "config_entry_options", [{CONF_ALLOW_BANDWIDTH_SENSORS: True, CONF_ALLOW_UPTIME_SENSORS: True}], @@ -342,29 +461,7 @@ async def test_no_clients(hass: HomeAssistant) -> None: } ], ) -@pytest.mark.parametrize( - "client_payload", - [ - [ - { - "hostname": "Wired client", - "is_wired": True, - "mac": "00:00:00:00:00:01", - "oui": "Producer", - "wired-rx_bytes-r": 1234000000, - "wired-tx_bytes-r": 5678000000, - }, - { - "is_wired": False, - "mac": "00:00:00:00:00:02", - "name": "Wireless client", - "oui": "Producer", - "rx_bytes-r": 2345000000.0, - "tx_bytes-r": 6789000000.0, - }, - ] - ], -) +@pytest.mark.parametrize("client_payload", [[WIRED_CLIENT, WIRELESS_CLIENT]]) async def test_bandwidth_sensors( hass: HomeAssistant, mock_websocket_message: WebsocketMessageMock, @@ -373,33 +470,8 @@ async def test_bandwidth_sensors( client_payload: list[dict[str, Any]], ) -> None: """Verify that bandwidth sensors are working as expected.""" - assert len(hass.states.async_all()) == 5 - assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 4 - - # Verify sensor attributes and state - - wrx_sensor = hass.states.get("sensor.wired_client_rx") - assert wrx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE - assert wrx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert wrx_sensor.state == "1234.0" - - wtx_sensor = hass.states.get("sensor.wired_client_tx") - assert wtx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE - assert wtx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert wtx_sensor.state == "5678.0" - - wlrx_sensor = hass.states.get("sensor.wireless_client_rx") - assert wlrx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE - assert wlrx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert wlrx_sensor.state == "2345.0" - - wltx_sensor = hass.states.get("sensor.wireless_client_tx") - assert wltx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE - assert wltx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert wltx_sensor.state == "6789.0" - # Verify state update - wireless_client = client_payload[1] + wireless_client = deepcopy(client_payload[1]) wireless_client["rx_bytes-r"] = 3456000000 wireless_client["tx_bytes-r"] = 7891000000 @@ -468,31 +540,7 @@ async def test_bandwidth_sensors( "config_entry_options", [{CONF_ALLOW_BANDWIDTH_SENSORS: True, CONF_ALLOW_UPTIME_SENSORS: True}], ) -@pytest.mark.parametrize( - "client_payload", - [ - [ - { - "hostname": "Wired client", - "is_wired": True, - "mac": "00:00:00:00:00:01", - "oui": "Producer", - "wired-rx_bytes": 1234000000, - "wired-tx_bytes": 5678000000, - "uptime": 1600094505, - }, - { - "is_wired": False, - "mac": "00:00:00:00:00:02", - "name": "Wireless client", - "oui": "Producer", - "rx_bytes": 2345000000, - "tx_bytes": 6789000000, - "uptime": 60, - }, - ] - ], -) +@pytest.mark.parametrize("client_payload", [[WIRED_CLIENT, WIRELESS_CLIENT]]) @pytest.mark.usefixtures("config_entry_setup") @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_remove_sensors( @@ -535,7 +583,6 @@ async def test_poe_port_switches( ent_reg_entry = entity_registry.async_get("sensor.mock_name_port_1_poe_power") assert ent_reg_entry.disabled_by == RegistryEntryDisabler.INTEGRATION - assert ent_reg_entry.entity_category is EntityCategory.DIAGNOSTIC # Enable entity entity_registry.async_update_entity( @@ -600,7 +647,6 @@ async def test_poe_port_switches( @pytest.mark.parametrize("wlan_payload", [[WLAN]]) async def test_wlan_client_sensors( hass: HomeAssistant, - entity_registry: er.EntityRegistry, config_entry_factory: ConfigEntryFactoryType, mock_websocket_message: WebsocketMessageMock, mock_websocket_state: WebsocketStateManager, @@ -633,14 +679,8 @@ async def test_wlan_client_sensors( assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 1 - ent_reg_entry = entity_registry.async_get("sensor.ssid_1") - assert ent_reg_entry.unique_id == "wlan_clients-012345678910111213141516" - assert ent_reg_entry.entity_category is EntityCategory.DIAGNOSTIC - # Validate state object - ssid_1 = hass.states.get("sensor.ssid_1") - assert ssid_1 is not None - assert ssid_1.state == "1" + assert hass.states.get("sensor.ssid_1").state == "1" # Verify state update - increasing number wireless_client_1 = client_payload[0] @@ -709,7 +749,6 @@ async def test_wlan_client_sensors( @pytest.mark.parametrize( ( "entity_id", - "expected_unique_id", "expected_value", "changed_data", "expected_update_value", @@ -717,21 +756,18 @@ async def test_wlan_client_sensors( [ ( "dummy_usp_pdu_pro_outlet_2_outlet_power", - "outlet_power-01:02:03:04:05:ff_2", "73.827", {"outlet_table": PDU_OUTLETS_UPDATE_DATA}, "123.45", ), ( "dummy_usp_pdu_pro_ac_power_budget", - "ac_power_budget-01:02:03:04:05:ff", "1875.000", None, None, ), ( "dummy_usp_pdu_pro_ac_power_consumption", - "ac_power_conumption-01:02:03:04:05:ff", "201.683", {"outlet_ac_power_consumption": "456.78"}, "456.78", @@ -742,26 +778,18 @@ async def test_wlan_client_sensors( @pytest.mark.usefixtures("config_entry_setup") async def test_outlet_power_readings( hass: HomeAssistant, - entity_registry: er.EntityRegistry, mock_websocket_message: WebsocketMessageMock, device_payload: list[dict[str, Any]], entity_id: str, - expected_unique_id: str, - expected_value: Any, - changed_data: dict | None, - expected_update_value: Any, + expected_value: str, + changed_data: dict[str, Any] | None, + expected_update_value: str | None, ) -> None: """Test the outlet power reporting on PDU devices.""" assert len(hass.states.async_all()) == 13 assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 7 - ent_reg_entry = entity_registry.async_get(f"sensor.{entity_id}") - assert ent_reg_entry.unique_id == expected_unique_id - assert ent_reg_entry.entity_category is EntityCategory.DIAGNOSTIC - - sensor_data = hass.states.get(f"sensor.{entity_id}") - assert sensor_data.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.POWER - assert sensor_data.state == expected_value + assert hass.states.get(f"sensor.{entity_id}").state == expected_value if changed_data is not None: updated_device_data = deepcopy(device_payload[0]) @@ -770,8 +798,7 @@ async def test_outlet_power_readings( mock_websocket_message(message=MessageKey.DEVICE, data=updated_device_data) await hass.async_block_till_done() - sensor_data = hass.states.get(f"sensor.{entity_id}") - assert sensor_data.state == expected_update_value + assert hass.states.get(f"sensor.{entity_id}").state == expected_update_value @pytest.mark.parametrize( @@ -804,17 +831,12 @@ async def test_outlet_power_readings( @pytest.mark.usefixtures("config_entry_setup") async def test_device_temperature( hass: HomeAssistant, - entity_registry: er.EntityRegistry, mock_websocket_message: WebsocketMessageMock, device_payload: list[dict[str, Any]], ) -> None: """Verify that temperature sensors are working as expected.""" assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 3 assert hass.states.get("sensor.device_temperature").state == "30" - assert ( - entity_registry.async_get("sensor.device_temperature").entity_category - is EntityCategory.DIAGNOSTIC - ) # Verify new event change temperature device = device_payload[0] @@ -859,10 +881,6 @@ async def test_device_state( ) -> None: """Verify that state sensors are working as expected.""" assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 3 - assert ( - entity_registry.async_get("sensor.device_state").entity_category - is EntityCategory.DIAGNOSTIC - ) device = device_payload[0] for i in list(map(int, DeviceState)): @@ -890,7 +908,6 @@ async def test_device_state( @pytest.mark.usefixtures("config_entry_setup") async def test_device_system_stats( hass: HomeAssistant, - entity_registry: er.EntityRegistry, mock_websocket_message: WebsocketMessageMock, device_payload: list[dict[str, Any]], ) -> None: @@ -901,16 +918,6 @@ async def test_device_system_stats( assert hass.states.get("sensor.device_cpu_utilization").state == "5.8" assert hass.states.get("sensor.device_memory_utilization").state == "31.1" - assert ( - entity_registry.async_get("sensor.device_cpu_utilization").entity_category - is EntityCategory.DIAGNOSTIC - ) - - assert ( - entity_registry.async_get("sensor.device_memory_utilization").entity_category - is EntityCategory.DIAGNOSTIC - ) - # Verify new event change system-stats device = device_payload[0] device["system-stats"] = {"cpu": 7.7, "mem": 33.3, "uptime": 7316} @@ -997,11 +1004,9 @@ async def test_bandwidth_port_sensors( p1rx_reg_entry = entity_registry.async_get("sensor.mock_name_port_1_rx") assert p1rx_reg_entry.disabled_by == RegistryEntryDisabler.INTEGRATION - assert p1rx_reg_entry.entity_category is EntityCategory.DIAGNOSTIC p1tx_reg_entry = entity_registry.async_get("sensor.mock_name_port_1_tx") assert p1tx_reg_entry.disabled_by == RegistryEntryDisabler.INTEGRATION - assert p1tx_reg_entry.entity_category is EntityCategory.DIAGNOSTIC # Enable entity entity_registry.async_update_entity( @@ -1028,26 +1033,11 @@ async def test_bandwidth_port_sensors( assert len(hass.states.async_all()) == 9 assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 6 - # Verify sensor attributes and state - p1rx_sensor = hass.states.get("sensor.mock_name_port_1_rx") - assert p1rx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE - assert p1rx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert p1rx_sensor.state == "0.00921" - - p1tx_sensor = hass.states.get("sensor.mock_name_port_1_tx") - assert p1tx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE - assert p1tx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert p1tx_sensor.state == "0.04089" - - p2rx_sensor = hass.states.get("sensor.mock_name_port_2_rx") - assert p2rx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE - assert p2rx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert p2rx_sensor.state == "0.01229" - - p2tx_sensor = hass.states.get("sensor.mock_name_port_2_tx") - assert p2tx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE - assert p2tx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert p2tx_sensor.state == "0.02892" + # Verify sensor state + assert hass.states.get("sensor.mock_name_port_1_rx").state == "0.00921" + assert hass.states.get("sensor.mock_name_port_1_tx").state == "0.04089" + assert hass.states.get("sensor.mock_name_port_2_rx").state == "0.01229" + assert hass.states.get("sensor.mock_name_port_2_tx").state == "0.02892" # Verify state update device_1 = device_payload[0] @@ -1141,13 +1131,9 @@ async def test_device_client_sensors( ent_reg_entry = entity_registry.async_get("sensor.wired_device_clients") assert ent_reg_entry.disabled_by == RegistryEntryDisabler.INTEGRATION - assert ent_reg_entry.entity_category is EntityCategory.DIAGNOSTIC - assert ent_reg_entry.unique_id == "device_clients-01:00:00:00:00:00" ent_reg_entry = entity_registry.async_get("sensor.wireless_device_clients") assert ent_reg_entry.disabled_by == RegistryEntryDisabler.INTEGRATION - assert ent_reg_entry.entity_category is EntityCategory.DIAGNOSTIC - assert ent_reg_entry.unique_id == "device_clients-02:00:00:00:00:00" # Enable entity entity_registry.async_update_entity( @@ -1184,72 +1170,6 @@ async def test_device_client_sensors( assert hass.states.get("sensor.wireless_device_clients").state == "0" -WIRED_CLIENT = { - "hostname": "Wired client", - "is_wired": True, - "mac": "00:00:00:00:00:01", - "oui": "Producer", - "wired-rx_bytes-r": 1234000000, - "wired-tx_bytes-r": 5678000000, - "uptime": 1600094505, -} -WIRELESS_CLIENT = { - "is_wired": False, - "mac": "00:00:00:00:00:01", - "name": "Wireless client", - "oui": "Producer", - "rx_bytes-r": 2345000000.0, - "tx_bytes-r": 6789000000.0, - "uptime": 60, -} - - -@pytest.mark.parametrize( - "config_entry_options", - [ - { - CONF_ALLOW_BANDWIDTH_SENSORS: True, - CONF_ALLOW_UPTIME_SENSORS: True, - CONF_TRACK_CLIENTS: False, - CONF_TRACK_DEVICES: False, - } - ], -) -@pytest.mark.parametrize( - ("client_payload", "entity_id", "unique_id_prefix"), - [ - ([WIRED_CLIENT], "sensor.wired_client_rx", "rx-"), - ([WIRED_CLIENT], "sensor.wired_client_tx", "tx-"), - ([WIRED_CLIENT], "sensor.wired_client_uptime", "uptime-"), - ([WIRELESS_CLIENT], "sensor.wireless_client_rx", "rx-"), - ([WIRELESS_CLIENT], "sensor.wireless_client_tx", "tx-"), - ([WIRELESS_CLIENT], "sensor.wireless_client_uptime", "uptime-"), - ], -) -@pytest.mark.usefixtures("config_entry_setup") -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -@pytest.mark.freeze_time("2021-01-01 01:01:00") -async def test_sensor_sources( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, - entity_id: str, - unique_id_prefix: str, -) -> None: - """Test sensor sources and the entity description.""" - ent_reg_entry = entity_registry.async_get(entity_id) - assert ent_reg_entry.unique_id.startswith(unique_id_prefix) - assert ent_reg_entry.unique_id == snapshot - assert ent_reg_entry.entity_category == snapshot - - state = hass.states.get(entity_id) - assert state.attributes.get(ATTR_DEVICE_CLASS) == snapshot - assert state.attributes.get(ATTR_FRIENDLY_NAME) == snapshot - assert state.attributes.get(ATTR_STATE_CLASS) == snapshot - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == snapshot - assert state.state == snapshot - - async def _test_uptime_entity( hass: HomeAssistant, freezer: FrozenDateTimeFactory, @@ -1306,19 +1226,7 @@ async def _test_uptime_entity( @pytest.mark.parametrize("config_entry_options", [{CONF_ALLOW_UPTIME_SENSORS: True}]) -@pytest.mark.parametrize( - "client_payload", - [ - [ - { - "mac": "00:00:00:00:00:01", - "name": "client1", - "oui": "Producer", - "uptime": 0, - } - ] - ], -) +@pytest.mark.parametrize("client_payload", [[WIRED_CLIENT]]) @pytest.mark.parametrize( ("initial_uptime", "event_uptime", "small_variation_uptime", "new_uptime"), [ @@ -1331,7 +1239,6 @@ async def _test_uptime_entity( @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_client_uptime( hass: HomeAssistant, - entity_registry: er.EntityRegistry, freezer: FrozenDateTimeFactory, config_entry_options: MappingProxyType[str, Any], config_entry_factory: ConfigEntryFactoryType, @@ -1349,7 +1256,7 @@ async def test_client_uptime( mock_websocket_message, config_entry_factory, payload=client_payload[0], - entity_id="sensor.client1_uptime", + entity_id="sensor.wired_client_uptime", message_key=MessageKey.CLIENT, initial_uptime=initial_uptime, event_uptime=event_uptime, @@ -1357,18 +1264,13 @@ async def test_client_uptime( new_uptime=new_uptime, ) - assert ( - entity_registry.async_get("sensor.client1_uptime").entity_category - is EntityCategory.DIAGNOSTIC - ) - # Disable option options = deepcopy(config_entry_options) options[CONF_ALLOW_UPTIME_SENSORS] = False hass.config_entries.async_update_entry(config_entry, options=options) await hass.async_block_till_done() - assert hass.states.get("sensor.client1_uptime") is None + assert hass.states.get("sensor.wired_client_uptime") is None # Enable option options = deepcopy(config_entry_options) @@ -1376,34 +1278,10 @@ async def test_client_uptime( hass.config_entries.async_update_entry(config_entry, options=options) await hass.async_block_till_done() - assert hass.states.get("sensor.client1_uptime") + assert hass.states.get("sensor.wired_client_uptime") -@pytest.mark.parametrize( - "device_payload", - [ - [ - { - "board_rev": 3, - "device_id": "mock-id", - "has_fan": True, - "fan_level": 0, - "ip": "10.0.1.1", - "last_seen": 1562600145, - "mac": "00:00:00:00:01:01", - "model": "US16P150", - "name": "Device", - "next_interval": 20, - "overheating": True, - "state": 1, - "type": "usw", - "upgradable": True, - "uptime": 60, - "version": "4.0.42.10433", - } - ] - ], -) +@pytest.mark.parametrize("device_payload", [[DEVICE_1]]) async def test_device_uptime( hass: HomeAssistant, entity_registry: er.EntityRegistry, @@ -1419,7 +1297,7 @@ async def test_device_uptime( mock_websocket_message, config_entry_factory, payload=device_payload[0], - entity_id="sensor.device_uptime", + entity_id="sensor.mock_name_uptime", message_key=MessageKey.DEVICE, initial_uptime=60, event_uptime=240, @@ -1427,11 +1305,6 @@ async def test_device_uptime( new_uptime=60, ) - assert ( - entity_registry.async_get("sensor.device_uptime").entity_category - is EntityCategory.DIAGNOSTIC - ) - @pytest.mark.parametrize( "device_payload", @@ -1495,7 +1368,7 @@ async def test_device_uptime( ], ) @pytest.mark.parametrize( - ("entity_id", "state", "updated_state", "index_to_update"), + ("monitor_id", "state", "updated_state", "index_to_update"), [ # Microsoft ("microsoft_wan", "56", "20", 0), @@ -1511,24 +1384,22 @@ async def test_wan_monitor_latency( entity_registry: er.EntityRegistry, mock_websocket_message: WebsocketMessageMock, device_payload: list[dict[str, Any]], - entity_id: str, + monitor_id: str, state: str, updated_state: str, index_to_update: int, ) -> None: """Verify that wan latency sensors are working as expected.""" + entity_id = f"sensor.mock_name_{monitor_id}_latency" assert len(hass.states.async_all()) == 6 assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 - latency_entry = entity_registry.async_get(f"sensor.mock_name_{entity_id}_latency") + latency_entry = entity_registry.async_get(entity_id) assert latency_entry.disabled_by == RegistryEntryDisabler.INTEGRATION - assert latency_entry.entity_category is EntityCategory.DIAGNOSTIC # Enable entity - entity_registry.async_update_entity( - entity_id=f"sensor.mock_name_{entity_id}_latency", disabled_by=None - ) + entity_registry.async_update_entity(entity_id=entity_id, disabled_by=None) await hass.async_block_till_done() @@ -1541,13 +1412,8 @@ async def test_wan_monitor_latency( assert len(hass.states.async_all()) == 7 assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 3 - # Verify sensor attributes and state - latency_entry = hass.states.get(f"sensor.mock_name_{entity_id}_latency") - assert latency_entry.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DURATION - assert ( - latency_entry.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - ) - assert latency_entry.state == state + # Verify sensor state + assert hass.states.get(entity_id).state == state # Verify state update device = device_payload[0] @@ -1557,9 +1423,7 @@ async def test_wan_monitor_latency( mock_websocket_message(message=MessageKey.DEVICE, data=device) - assert ( - hass.states.get(f"sensor.mock_name_{entity_id}_latency").state == updated_state - ) + assert hass.states.get(entity_id).state == updated_state @pytest.mark.parametrize( From e33a7ecefa0921a2d54bed25a5b1d343b7de86cb Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 15:54:00 +0200 Subject: [PATCH 0728/1635] Improve type hints in websocket_api tests (#123922) --- tests/components/websocket_api/test_auth.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/components/websocket_api/test_auth.py b/tests/components/websocket_api/test_auth.py index 62298098adc..20a728cf3cd 100644 --- a/tests/components/websocket_api/test_auth.py +++ b/tests/components/websocket_api/test_auth.py @@ -26,7 +26,7 @@ from tests.typing import ClientSessionGenerator @pytest.fixture -def track_connected(hass): +def track_connected(hass: HomeAssistant) -> dict[str, list[int]]: """Track connected and disconnected events.""" connected_evt = [] From c761d755501bbc6992697ccfe41cf7cc8c51d0ac Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Wed, 14 Aug 2024 15:55:59 +0200 Subject: [PATCH 0729/1635] Fix blocking I/O of SSLContext.load_default_certs in Ecovacs (#123856) --- .../components/ecovacs/config_flow.py | 14 +++++--- .../components/ecovacs/controller.py | 36 +++++++++++-------- 2 files changed, 31 insertions(+), 19 deletions(-) diff --git a/homeassistant/components/ecovacs/config_flow.py b/homeassistant/components/ecovacs/config_flow.py index e19d9994f9e..2637dbbddf8 100644 --- a/homeassistant/components/ecovacs/config_flow.py +++ b/homeassistant/components/ecovacs/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +from functools import partial import logging import ssl from typing import Any @@ -100,11 +101,14 @@ async def _validate_input( if not user_input.get(CONF_VERIFY_MQTT_CERTIFICATE, True) and mqtt_url: ssl_context = get_default_no_verify_context() - mqtt_config = create_mqtt_config( - device_id=device_id, - country=country, - override_mqtt_url=mqtt_url, - ssl_context=ssl_context, + mqtt_config = await hass.async_add_executor_job( + partial( + create_mqtt_config, + device_id=device_id, + country=country, + override_mqtt_url=mqtt_url, + ssl_context=ssl_context, + ) ) client = MqttClient(mqtt_config, authenticator) diff --git a/homeassistant/components/ecovacs/controller.py b/homeassistant/components/ecovacs/controller.py index c22fb240536..ec67845cf9f 100644 --- a/homeassistant/components/ecovacs/controller.py +++ b/homeassistant/components/ecovacs/controller.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Mapping +from functools import partial import logging import ssl from typing import Any @@ -64,32 +65,28 @@ class EcovacsController: if not config.get(CONF_VERIFY_MQTT_CERTIFICATE, True) and mqtt_url: ssl_context = get_default_no_verify_context() - self._mqtt = MqttClient( - create_mqtt_config( - device_id=self._device_id, - country=country, - override_mqtt_url=mqtt_url, - ssl_context=ssl_context, - ), - self._authenticator, + self._mqtt_config_fn = partial( + create_mqtt_config, + device_id=self._device_id, + country=country, + override_mqtt_url=mqtt_url, + ssl_context=ssl_context, ) + self._mqtt_client: MqttClient | None = None self._added_legacy_entities: set[str] = set() async def initialize(self) -> None: """Init controller.""" - mqtt_config_verfied = False try: devices = await self._api_client.get_devices() credentials = await self._authenticator.authenticate() for device_config in devices: if isinstance(device_config, DeviceInfo): # MQTT device - if not mqtt_config_verfied: - await self._mqtt.verify_config() - mqtt_config_verfied = True device = Device(device_config, self._authenticator) - await device.initialize(self._mqtt) + mqtt = await self._get_mqtt_client() + await device.initialize(mqtt) self._devices.append(device) else: # Legacy device @@ -116,7 +113,8 @@ class EcovacsController: await device.teardown() for legacy_device in self._legacy_devices: await self._hass.async_add_executor_job(legacy_device.disconnect) - await self._mqtt.disconnect() + if self._mqtt_client is not None: + await self._mqtt_client.disconnect() await self._authenticator.teardown() def add_legacy_entity(self, device: VacBot, component: str) -> None: @@ -127,6 +125,16 @@ class EcovacsController: """Check if legacy entity is added.""" return f"{device.vacuum['did']}_{component}" in self._added_legacy_entities + async def _get_mqtt_client(self) -> MqttClient: + """Return validated MQTT client.""" + if self._mqtt_client is None: + config = await self._hass.async_add_executor_job(self._mqtt_config_fn) + mqtt = MqttClient(config, self._authenticator) + await mqtt.verify_config() + self._mqtt_client = mqtt + + return self._mqtt_client + @property def devices(self) -> list[Device]: """Return devices.""" From ae6ac31d02ee25994cc7dd623de8859f560b2831 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 16:02:47 +0200 Subject: [PATCH 0730/1635] Improve type hints in smarttub tests (#123910) --- tests/components/smarttub/conftest.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/tests/components/smarttub/conftest.py b/tests/components/smarttub/conftest.py index c05762a903d..06780f8fb1e 100644 --- a/tests/components/smarttub/conftest.py +++ b/tests/components/smarttub/conftest.py @@ -1,5 +1,6 @@ """Common fixtures for smarttub tests.""" +from typing import Any from unittest.mock import create_autospec, patch import pytest @@ -7,19 +8,20 @@ import smarttub from homeassistant.components.smarttub.const import DOMAIN from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry @pytest.fixture -def config_data(): +def config_data() -> dict[str, Any]: """Provide configuration data for tests.""" return {CONF_EMAIL: "test-email", CONF_PASSWORD: "test-password"} @pytest.fixture -def config_entry(config_data): +def config_entry(config_data: dict[str, Any]) -> MockConfigEntry: """Create a mock config entry.""" return MockConfigEntry( domain=DOMAIN, @@ -29,7 +31,7 @@ def config_entry(config_data): @pytest.fixture -async def setup_component(hass): +async def setup_component(hass: HomeAssistant) -> None: """Set up the component.""" assert await async_setup_component(hass, DOMAIN, {}) is True @@ -162,7 +164,7 @@ def mock_api(account, spa): @pytest.fixture -async def setup_entry(hass, config_entry): +async def setup_entry(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: """Initialize the config entry.""" config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) From bba298a44d6850f762028a88fe982ad8bfa05524 Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Wed, 14 Aug 2024 16:08:34 +0200 Subject: [PATCH 0731/1635] Add favorite position buttons to Motion Blinds (#123489) --- .../components/motion_blinds/button.py | 71 +++++++++++++++++++ .../components/motion_blinds/const.py | 2 +- .../components/motion_blinds/cover.py | 56 +-------------- .../components/motion_blinds/entity.py | 53 ++++++++++++++ .../components/motion_blinds/icons.json | 10 +++ .../components/motion_blinds/strings.json | 8 +++ 6 files changed, 145 insertions(+), 55 deletions(-) create mode 100644 homeassistant/components/motion_blinds/button.py diff --git a/homeassistant/components/motion_blinds/button.py b/homeassistant/components/motion_blinds/button.py new file mode 100644 index 00000000000..30f1cd53e6f --- /dev/null +++ b/homeassistant/components/motion_blinds/button.py @@ -0,0 +1,71 @@ +"""Support for Motionblinds button entity using their WLAN API.""" + +from __future__ import annotations + +from motionblinds.motion_blinds import LimitStatus, MotionBlind + +from homeassistant.components.button import ButtonEntity +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import DOMAIN, KEY_COORDINATOR, KEY_GATEWAY +from .coordinator import DataUpdateCoordinatorMotionBlinds +from .entity import MotionCoordinatorEntity + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Perform the setup for Motionblinds.""" + entities: list[ButtonEntity] = [] + motion_gateway = hass.data[DOMAIN][config_entry.entry_id][KEY_GATEWAY] + coordinator = hass.data[DOMAIN][config_entry.entry_id][KEY_COORDINATOR] + + for blind in motion_gateway.device_list.values(): + if blind.limit_status == LimitStatus.Limit3Detected.name: + entities.append(MotionGoFavoriteButton(coordinator, blind)) + entities.append(MotionSetFavoriteButton(coordinator, blind)) + + async_add_entities(entities) + + +class MotionGoFavoriteButton(MotionCoordinatorEntity, ButtonEntity): + """Button entity to go to the favorite position of a blind.""" + + _attr_translation_key = "go_favorite" + + def __init__( + self, coordinator: DataUpdateCoordinatorMotionBlinds, blind: MotionBlind + ) -> None: + """Initialize the Motion Button.""" + super().__init__(coordinator, blind) + self._attr_unique_id = f"{blind.mac}-go-favorite" + + async def async_press(self) -> None: + """Execute the button action.""" + async with self._api_lock: + await self.hass.async_add_executor_job(self._blind.Go_favorite_position) + await self.async_request_position_till_stop() + + +class MotionSetFavoriteButton(MotionCoordinatorEntity, ButtonEntity): + """Button entity to set the favorite position of a blind to the current position.""" + + _attr_entity_category = EntityCategory.CONFIG + _attr_translation_key = "set_favorite" + + def __init__( + self, coordinator: DataUpdateCoordinatorMotionBlinds, blind: MotionBlind + ) -> None: + """Initialize the Motion Button.""" + super().__init__(coordinator, blind) + self._attr_unique_id = f"{blind.mac}-set-favorite" + + async def async_press(self) -> None: + """Execute the button action.""" + async with self._api_lock: + await self.hass.async_add_executor_job(self._blind.Set_favorite_position) diff --git a/homeassistant/components/motion_blinds/const.py b/homeassistant/components/motion_blinds/const.py index e089fd17943..96067d7ceb0 100644 --- a/homeassistant/components/motion_blinds/const.py +++ b/homeassistant/components/motion_blinds/const.py @@ -6,7 +6,7 @@ DOMAIN = "motion_blinds" MANUFACTURER = "Motionblinds, Coulisse B.V." DEFAULT_GATEWAY_NAME = "Motionblinds Gateway" -PLATFORMS = [Platform.COVER, Platform.SENSOR] +PLATFORMS = [Platform.BUTTON, Platform.COVER, Platform.SENSOR] CONF_WAIT_FOR_PUSH = "wait_for_push" CONF_INTERFACE = "interface" diff --git a/homeassistant/components/motion_blinds/cover.py b/homeassistant/components/motion_blinds/cover.py index 2cbee96adb7..72b78915bad 100644 --- a/homeassistant/components/motion_blinds/cover.py +++ b/homeassistant/components/motion_blinds/cover.py @@ -5,7 +5,7 @@ from __future__ import annotations import logging from typing import Any -from motionblinds import DEVICE_TYPES_WIFI, BlindType +from motionblinds import BlindType import voluptuous as vol from homeassistant.components.cover import ( @@ -16,10 +16,9 @@ from homeassistant.components.cover import ( CoverEntityFeature, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.core import CALLBACK_TYPE, HomeAssistant +from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.event import async_call_later from homeassistant.helpers.typing import VolDictType from .const import ( @@ -31,8 +30,6 @@ from .const import ( KEY_GATEWAY, SERVICE_SET_ABSOLUTE_POSITION, UPDATE_DELAY_STOP, - UPDATE_INTERVAL_MOVING, - UPDATE_INTERVAL_MOVING_WIFI, ) from .entity import MotionCoordinatorEntity @@ -179,14 +176,6 @@ class MotionBaseDevice(MotionCoordinatorEntity, CoverEntity): """Initialize the blind.""" super().__init__(coordinator, blind) - self._requesting_position: CALLBACK_TYPE | None = None - self._previous_positions = [] - - if blind.device_type in DEVICE_TYPES_WIFI: - self._update_interval_moving = UPDATE_INTERVAL_MOVING_WIFI - else: - self._update_interval_moving = UPDATE_INTERVAL_MOVING - self._attr_device_class = device_class self._attr_unique_id = blind.mac @@ -218,47 +207,6 @@ class MotionBaseDevice(MotionCoordinatorEntity, CoverEntity): return None return self._blind.position == 100 - async def async_scheduled_update_request(self, *_): - """Request a state update from the blind at a scheduled point in time.""" - # add the last position to the list and keep the list at max 2 items - self._previous_positions.append(self.current_cover_position) - if len(self._previous_positions) > 2: - del self._previous_positions[: len(self._previous_positions) - 2] - - async with self._api_lock: - await self.hass.async_add_executor_job(self._blind.Update_trigger) - - self.async_write_ha_state() - - if len(self._previous_positions) < 2 or not all( - self.current_cover_position == prev_position - for prev_position in self._previous_positions - ): - # keep updating the position @self._update_interval_moving until the position does not change. - self._requesting_position = async_call_later( - self.hass, - self._update_interval_moving, - self.async_scheduled_update_request, - ) - else: - self._previous_positions = [] - self._requesting_position = None - - async def async_request_position_till_stop(self, delay=None): - """Request the position of the blind every self._update_interval_moving seconds until it stops moving.""" - if delay is None: - delay = self._update_interval_moving - - self._previous_positions = [] - if self.current_cover_position is None: - return - if self._requesting_position is not None: - self._requesting_position() - - self._requesting_position = async_call_later( - self.hass, delay, self.async_scheduled_update_request - ) - async def async_open_cover(self, **kwargs: Any) -> None: """Open the cover.""" async with self._api_lock: diff --git a/homeassistant/components/motion_blinds/entity.py b/homeassistant/components/motion_blinds/entity.py index 4734d4d9a65..483a638a0eb 100644 --- a/homeassistant/components/motion_blinds/entity.py +++ b/homeassistant/components/motion_blinds/entity.py @@ -5,8 +5,10 @@ from __future__ import annotations from motionblinds import DEVICE_TYPES_GATEWAY, DEVICE_TYPES_WIFI, MotionGateway from motionblinds.motion_blinds import MotionBlind +from homeassistant.core import CALLBACK_TYPE from homeassistant.helpers import device_registry as dr from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.event import async_call_later from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import ( @@ -15,6 +17,8 @@ from .const import ( DOMAIN, KEY_GATEWAY, MANUFACTURER, + UPDATE_INTERVAL_MOVING, + UPDATE_INTERVAL_MOVING_WIFI, ) from .coordinator import DataUpdateCoordinatorMotionBlinds from .gateway import device_name @@ -36,6 +40,14 @@ class MotionCoordinatorEntity(CoordinatorEntity[DataUpdateCoordinatorMotionBlind self._blind = blind self._api_lock = coordinator.api_lock + self._requesting_position: CALLBACK_TYPE | None = None + self._previous_positions: list[int | dict | None] = [] + + if blind.device_type in DEVICE_TYPES_WIFI: + self._update_interval_moving = UPDATE_INTERVAL_MOVING_WIFI + else: + self._update_interval_moving = UPDATE_INTERVAL_MOVING + if blind.device_type in DEVICE_TYPES_GATEWAY: gateway = blind else: @@ -95,3 +107,44 @@ class MotionCoordinatorEntity(CoordinatorEntity[DataUpdateCoordinatorMotionBlind """Unsubscribe when removed.""" self._blind.Remove_callback(self.unique_id) await super().async_will_remove_from_hass() + + async def async_scheduled_update_request(self, *_) -> None: + """Request a state update from the blind at a scheduled point in time.""" + # add the last position to the list and keep the list at max 2 items + self._previous_positions.append(self._blind.position) + if len(self._previous_positions) > 2: + del self._previous_positions[: len(self._previous_positions) - 2] + + async with self._api_lock: + await self.hass.async_add_executor_job(self._blind.Update_trigger) + + self.coordinator.async_update_listeners() + + if len(self._previous_positions) < 2 or not all( + self._blind.position == prev_position + for prev_position in self._previous_positions + ): + # keep updating the position @self._update_interval_moving until the position does not change. + self._requesting_position = async_call_later( + self.hass, + self._update_interval_moving, + self.async_scheduled_update_request, + ) + else: + self._previous_positions = [] + self._requesting_position = None + + async def async_request_position_till_stop(self, delay: int | None = None) -> None: + """Request the position of the blind every self._update_interval_moving seconds until it stops moving.""" + if delay is None: + delay = self._update_interval_moving + + self._previous_positions = [] + if self._blind.position is None: + return + if self._requesting_position is not None: + self._requesting_position() + + self._requesting_position = async_call_later( + self.hass, delay, self.async_scheduled_update_request + ) diff --git a/homeassistant/components/motion_blinds/icons.json b/homeassistant/components/motion_blinds/icons.json index a61c36e3f00..9e1cd613e5b 100644 --- a/homeassistant/components/motion_blinds/icons.json +++ b/homeassistant/components/motion_blinds/icons.json @@ -1,4 +1,14 @@ { + "entity": { + "button": { + "go_favorite": { + "default": "mdi:star" + }, + "set_favorite": { + "default": "mdi:star-cog" + } + } + }, "services": { "set_absolute_position": "mdi:set-square" } diff --git a/homeassistant/components/motion_blinds/strings.json b/homeassistant/components/motion_blinds/strings.json index cb9468c3a27..ddbf928462a 100644 --- a/homeassistant/components/motion_blinds/strings.json +++ b/homeassistant/components/motion_blinds/strings.json @@ -62,6 +62,14 @@ } }, "entity": { + "button": { + "go_favorite": { + "name": "Go to favorite position" + }, + "set_favorite": { + "name": "Set current position as favorite" + } + }, "cover": { "top": { "name": "Top" From bb889619680254270c7c17ff09b699f852d5d3a0 Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Wed, 14 Aug 2024 10:10:59 -0400 Subject: [PATCH 0732/1635] Bump aiorussound to 2.3.1 (#123929) --- homeassistant/components/russound_rio/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/russound_rio/manifest.json b/homeassistant/components/russound_rio/manifest.json index 67a01239615..42e9dc2df8a 100644 --- a/homeassistant/components/russound_rio/manifest.json +++ b/homeassistant/components/russound_rio/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/russound_rio", "iot_class": "local_push", "loggers": ["aiorussound"], - "requirements": ["aiorussound==2.2.3"] + "requirements": ["aiorussound==2.3.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index beae1629b79..21e4a9b7404 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -350,7 +350,7 @@ aioridwell==2024.01.0 aioruckus==0.34 # homeassistant.components.russound_rio -aiorussound==2.2.3 +aiorussound==2.3.1 # homeassistant.components.ruuvi_gateway aioruuvigateway==0.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 86be5c9e14a..6cf59077ffd 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -332,7 +332,7 @@ aioridwell==2024.01.0 aioruckus==0.34 # homeassistant.components.russound_rio -aiorussound==2.2.3 +aiorussound==2.3.1 # homeassistant.components.ruuvi_gateway aioruuvigateway==0.1.0 From faacfe3f90d3d3dceb244edc080fb78ccc30422c Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Wed, 14 Aug 2024 10:38:40 -0400 Subject: [PATCH 0733/1635] Set available property in russound base entity (#123933) * Set available property in Russound base entity * Fix * Fix --- homeassistant/components/russound_rio/__init__.py | 15 ++++++++++++++- homeassistant/components/russound_rio/entity.py | 15 +++++++++++++++ .../components/russound_rio/media_player.py | 6 ++++++ 3 files changed, 35 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/russound_rio/__init__.py b/homeassistant/components/russound_rio/__init__.py index e36cedecfe3..8627c636ef2 100644 --- a/homeassistant/components/russound_rio/__init__.py +++ b/homeassistant/components/russound_rio/__init__.py @@ -7,7 +7,7 @@ from aiorussound import Russound from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PORT, Platform -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady from .const import CONNECT_TIMEOUT, RUSSOUND_RIO_EXCEPTIONS @@ -26,6 +26,19 @@ async def async_setup_entry(hass: HomeAssistant, entry: RussoundConfigEntry) -> port = entry.data[CONF_PORT] russ = Russound(hass.loop, host, port) + @callback + def is_connected_updated(connected: bool) -> None: + if connected: + _LOGGER.warning("Reconnected to controller at %s:%s", host, port) + else: + _LOGGER.warning( + "Disconnected from controller at %s:%s", + host, + port, + ) + + russ.add_connection_callback(is_connected_updated) + try: async with asyncio.timeout(CONNECT_TIMEOUT): await russ.connect() diff --git a/homeassistant/components/russound_rio/entity.py b/homeassistant/components/russound_rio/entity.py index 3430a77108b..150c4e285d1 100644 --- a/homeassistant/components/russound_rio/entity.py +++ b/homeassistant/components/russound_rio/entity.py @@ -6,6 +6,7 @@ from typing import Any, Concatenate from aiorussound import Controller +from homeassistant.core import callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo from homeassistant.helpers.entity import Entity @@ -68,3 +69,17 @@ class RussoundBaseEntity(Entity): self._attr_device_info["connections"] = { (CONNECTION_NETWORK_MAC, controller.mac_address) } + + @callback + def _is_connected_updated(self, connected: bool) -> None: + """Update the state when the device is ready to receive commands or is unavailable.""" + self._attr_available = connected + self.async_write_ha_state() + + async def async_added_to_hass(self) -> None: + """Register callbacks.""" + self._instance.add_connection_callback(self._is_connected_updated) + + async def async_will_remove_from_hass(self) -> None: + """Remove callbacks.""" + self._instance.remove_connection_callback(self._is_connected_updated) diff --git a/homeassistant/components/russound_rio/media_player.py b/homeassistant/components/russound_rio/media_player.py index 5f11227ef53..20aaf0f3c08 100644 --- a/homeassistant/components/russound_rio/media_player.py +++ b/homeassistant/components/russound_rio/media_player.py @@ -140,8 +140,14 @@ class RussoundZoneDevice(RussoundBaseEntity, MediaPlayerEntity): async def async_added_to_hass(self) -> None: """Register callback handlers.""" + await super().async_added_to_hass() self._zone.add_callback(self._callback_handler) + async def async_will_remove_from_hass(self) -> None: + """Remove callbacks.""" + await super().async_will_remove_from_hass() + self._zone.remove_callback(self._callback_handler) + def _current_source(self) -> Source: return self._zone.fetch_current_source() From 56083011789c8eea9531e5a439f55057a3f6b89d Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 16:48:52 +0200 Subject: [PATCH 0734/1635] Add missing return type in test __init__ methods (#123932) * Add missing return type in test __init__ methods * Adjust --- tests/components/androidtv/patchers.py | 3 ++- tests/components/apple_tv/common.py | 4 +++- tests/components/aws/test_init.py | 3 ++- tests/components/blackbird/test_media_player.py | 2 +- tests/components/bluetooth/__init__.py | 2 +- tests/components/bluetooth/test_init.py | 5 +++-- tests/components/bluetooth/test_scanner.py | 3 ++- .../components/bluetooth_le_tracker/test_device_tracker.py | 3 ++- tests/components/demo/test_media_player.py | 2 +- tests/components/device_tracker/common.py | 4 ++-- tests/components/esphome/conftest.py | 2 +- tests/components/fan/test_init.py | 2 +- tests/components/file_upload/test_init.py | 3 ++- tests/components/hyperion/test_config_flow.py | 2 +- tests/components/lcn/conftest.py | 3 ++- tests/components/lifx/__init__.py | 7 ++++--- tests/components/lifx/conftest.py | 3 ++- tests/components/lifx/test_config_flow.py | 3 ++- tests/components/lifx/test_init.py | 5 +++-- tests/components/lifx/test_migration.py | 3 ++- tests/components/monoprice/test_media_player.py | 2 +- tests/components/nest/common.py | 2 +- tests/components/nest/conftest.py | 2 +- tests/components/numato/numato_mock.py | 2 +- tests/components/number/test_init.py | 6 +++--- tests/components/oralb/conftest.py | 3 ++- tests/components/plex/test_config_flow.py | 2 +- tests/components/plex/test_init.py | 2 +- tests/components/profiler/test_init.py | 4 ++-- tests/components/stream/conftest.py | 7 ++++--- tests/components/stream/test_worker.py | 4 ++-- tests/components/websocket_api/test_commands.py | 2 +- tests/components/whois/conftest.py | 3 ++- tests/components/xiaomi_ble/conftest.py | 2 +- tests/test_util/aiohttp.py | 4 ++-- 35 files changed, 64 insertions(+), 47 deletions(-) diff --git a/tests/components/androidtv/patchers.py b/tests/components/androidtv/patchers.py index 1c32e1770e0..500b9e75cb3 100644 --- a/tests/components/androidtv/patchers.py +++ b/tests/components/androidtv/patchers.py @@ -1,5 +1,6 @@ """Define patches used for androidtv tests.""" +from typing import Any from unittest.mock import patch from androidtv.adb_manager.adb_manager_async import DeviceAsync @@ -25,7 +26,7 @@ PROPS_DEV_MAC = "ether ab:cd:ef:gh:ij:kl brd" class AdbDeviceTcpAsyncFake: """A fake of the `adb_shell.adb_device_async.AdbDeviceTcpAsync` class.""" - def __init__(self, *args, **kwargs) -> None: + def __init__(self, *args: Any, **kwargs: Any) -> None: """Initialize a fake `adb_shell.adb_device_async.AdbDeviceTcpAsync` instance.""" self.available = False diff --git a/tests/components/apple_tv/common.py b/tests/components/apple_tv/common.py index ddb8c1348d9..8a81536c792 100644 --- a/tests/components/apple_tv/common.py +++ b/tests/components/apple_tv/common.py @@ -1,5 +1,7 @@ """Test code shared between test files.""" +from typing import Any + from pyatv import conf, const, interface from pyatv.const import Protocol @@ -7,7 +9,7 @@ from pyatv.const import Protocol class MockPairingHandler(interface.PairingHandler): """Mock for PairingHandler in pyatv.""" - def __init__(self, *args): + def __init__(self, *args: Any) -> None: """Initialize a new MockPairingHandler.""" super().__init__(*args) self.pin_code = None diff --git a/tests/components/aws/test_init.py b/tests/components/aws/test_init.py index 9589ad6c037..820b08e51b4 100644 --- a/tests/components/aws/test_init.py +++ b/tests/components/aws/test_init.py @@ -1,6 +1,7 @@ """Tests for the aws component config and setup.""" import json +from typing import Any from unittest.mock import AsyncMock, MagicMock, call, patch as async_patch from homeassistant.core import HomeAssistant @@ -10,7 +11,7 @@ from homeassistant.setup import async_setup_component class MockAioSession: """Mock AioSession.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init a mock session.""" self.get_user = AsyncMock() self.invoke = AsyncMock() diff --git a/tests/components/blackbird/test_media_player.py b/tests/components/blackbird/test_media_player.py index ec5a37f72ad..db92dddcc77 100644 --- a/tests/components/blackbird/test_media_player.py +++ b/tests/components/blackbird/test_media_player.py @@ -35,7 +35,7 @@ class AttrDict(dict): class MockBlackbird: """Mock for pyblackbird object.""" - def __init__(self): + def __init__(self) -> None: """Init mock object.""" self.zones = defaultdict(lambda: AttrDict(power=True, av=1)) diff --git a/tests/components/bluetooth/__init__.py b/tests/components/bluetooth/__init__.py index eae867b96d5..8794d808718 100644 --- a/tests/components/bluetooth/__init__.py +++ b/tests/components/bluetooth/__init__.py @@ -271,7 +271,7 @@ async def _async_setup_with_adapter( class MockBleakClient(BleakClient): """Mock bleak client.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Mock init.""" super().__init__(*args, **kwargs) self._device_path = "/dev/test" diff --git a/tests/components/bluetooth/test_init.py b/tests/components/bluetooth/test_init.py index bd38c9cfbae..8e7d604f794 100644 --- a/tests/components/bluetooth/test_init.py +++ b/tests/components/bluetooth/test_init.py @@ -3,6 +3,7 @@ import asyncio from datetime import timedelta import time +from typing import Any from unittest.mock import ANY, AsyncMock, MagicMock, Mock, patch from bleak import BleakError @@ -100,7 +101,7 @@ async def test_setup_and_stop_passive( init_kwargs = None class MockPassiveBleakScanner: - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init the scanner.""" nonlocal init_kwargs init_kwargs = kwargs @@ -151,7 +152,7 @@ async def test_setup_and_stop_old_bluez( init_kwargs = None class MockBleakScanner: - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init the scanner.""" nonlocal init_kwargs init_kwargs = kwargs diff --git a/tests/components/bluetooth/test_scanner.py b/tests/components/bluetooth/test_scanner.py index dc25f29111c..ecd1ee58692 100644 --- a/tests/components/bluetooth/test_scanner.py +++ b/tests/components/bluetooth/test_scanner.py @@ -3,6 +3,7 @@ import asyncio from datetime import timedelta import time +from typing import Any from unittest.mock import ANY, MagicMock, patch from bleak import BleakError @@ -631,7 +632,7 @@ async def test_setup_and_stop_macos( init_kwargs = None class MockBleakScanner: - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init the scanner.""" nonlocal init_kwargs init_kwargs = kwargs diff --git a/tests/components/bluetooth_le_tracker/test_device_tracker.py b/tests/components/bluetooth_le_tracker/test_device_tracker.py index f183f987cde..452297e38c2 100644 --- a/tests/components/bluetooth_le_tracker/test_device_tracker.py +++ b/tests/components/bluetooth_le_tracker/test_device_tracker.py @@ -1,6 +1,7 @@ """Test Bluetooth LE device tracker.""" from datetime import timedelta +from typing import Any from unittest.mock import patch from bleak import BleakError @@ -31,7 +32,7 @@ from tests.components.bluetooth import generate_advertisement_data, generate_ble class MockBleakClient: """Mock BleakClient.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Mock BleakClient.""" async def __aenter__(self, *args, **kwargs): diff --git a/tests/components/demo/test_media_player.py b/tests/components/demo/test_media_player.py index a6669fa705c..7487a4c13e3 100644 --- a/tests/components/demo/test_media_player.py +++ b/tests/components/demo/test_media_player.py @@ -497,7 +497,7 @@ async def test_media_image_proxy( class MockResponse: """Test response.""" - def __init__(self): + def __init__(self) -> None: """Test response init.""" self.status = 200 self.headers = {"Content-Type": "sometype"} diff --git a/tests/components/device_tracker/common.py b/tests/components/device_tracker/common.py index d30db984a66..b6341443d36 100644 --- a/tests/components/device_tracker/common.py +++ b/tests/components/device_tracker/common.py @@ -61,7 +61,7 @@ def async_see( class MockScannerEntity(ScannerEntity): """Test implementation of a ScannerEntity.""" - def __init__(self): + def __init__(self) -> None: """Init.""" self.connected = False self._hostname = "test.hostname.org" @@ -110,7 +110,7 @@ class MockScannerEntity(ScannerEntity): class MockScanner(DeviceScanner): """Mock device scanner.""" - def __init__(self): + def __init__(self) -> None: """Initialize the MockScanner.""" self.devices_home = [] diff --git a/tests/components/esphome/conftest.py b/tests/components/esphome/conftest.py index 75be231558f..ea4099560cd 100644 --- a/tests/components/esphome/conftest.py +++ b/tests/components/esphome/conftest.py @@ -421,7 +421,7 @@ async def _mock_generic_device_entry( class MockReconnectLogic(BaseMockReconnectLogic): """Mock ReconnectLogic.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init the mock.""" super().__init__(*args, **kwargs) mock_device.set_on_disconnect(kwargs["on_disconnect"]) diff --git a/tests/components/fan/test_init.py b/tests/components/fan/test_init.py index a72ad5e48f6..a7dc544a97a 100644 --- a/tests/components/fan/test_init.py +++ b/tests/components/fan/test_init.py @@ -38,7 +38,7 @@ from tests.common import ( class BaseFan(FanEntity): """Implementation of the abstract FanEntity.""" - def __init__(self): + def __init__(self) -> None: """Initialize the fan.""" diff --git a/tests/components/file_upload/test_init.py b/tests/components/file_upload/test_init.py index 149bbb7ee2f..22ad9323f05 100644 --- a/tests/components/file_upload/test_init.py +++ b/tests/components/file_upload/test_init.py @@ -3,6 +3,7 @@ from contextlib import contextmanager from pathlib import Path from random import getrandbits +from typing import Any from unittest.mock import patch import pytest @@ -141,7 +142,7 @@ async def test_upload_large_file_fails( yield MockPathOpen() class MockPathOpen: - def __init__(self, *args, **kwargs) -> None: + def __init__(self, *args: Any, **kwargs: Any) -> None: pass def write(self, data: bytes) -> None: diff --git a/tests/components/hyperion/test_config_flow.py b/tests/components/hyperion/test_config_flow.py index 57749f5eedc..fb4fa1fe671 100644 --- a/tests/components/hyperion/test_config_flow.py +++ b/tests/components/hyperion/test_config_flow.py @@ -427,7 +427,7 @@ async def test_auth_create_token_approval_declined_task_canceled( class CanceledAwaitableMock(AsyncMock): """A canceled awaitable mock.""" - def __init__(self): + def __init__(self) -> None: super().__init__() self.done = Mock(return_value=False) self.cancel = Mock() diff --git a/tests/components/lcn/conftest.py b/tests/components/lcn/conftest.py index 24447abf77a..e29a7076430 100644 --- a/tests/components/lcn/conftest.py +++ b/tests/components/lcn/conftest.py @@ -2,6 +2,7 @@ from collections.abc import AsyncGenerator import json +from typing import Any from unittest.mock import AsyncMock, patch import pypck @@ -29,7 +30,7 @@ class MockModuleConnection(ModuleConnection): request_name = AsyncMock(return_value="TestModule") send_command = AsyncMock(return_value=True) - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Construct ModuleConnection instance.""" super().__init__(*args, **kwargs) self.serials_request_handler.serial_known.set() diff --git a/tests/components/lifx/__init__.py b/tests/components/lifx/__init__.py index 4834e486ec0..1d37496fbcb 100644 --- a/tests/components/lifx/__init__.py +++ b/tests/components/lifx/__init__.py @@ -4,6 +4,7 @@ from __future__ import annotations import asyncio from contextlib import contextmanager +from typing import Any from unittest.mock import AsyncMock, MagicMock, Mock, patch from aiolifx.aiolifx import Light @@ -212,7 +213,7 @@ def _patch_device(device: Light | None = None, no_device: bool = False): class MockLifxConnecton: """Mock lifx discovery.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init connection.""" if no_device: self.device = _mocked_failing_bulb() @@ -240,7 +241,7 @@ def _patch_discovery(device: Light | None = None, no_device: bool = False): class MockLifxDiscovery: """Mock lifx discovery.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init discovery.""" if no_device: self.lights = {} @@ -276,7 +277,7 @@ def _patch_config_flow_try_connect( class MockLifxConnection: """Mock lifx discovery.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init connection.""" if no_device: self.device = _mocked_failing_bulb() diff --git a/tests/components/lifx/conftest.py b/tests/components/lifx/conftest.py index 5cb7c702f43..e4a5f303f61 100644 --- a/tests/components/lifx/conftest.py +++ b/tests/components/lifx/conftest.py @@ -1,5 +1,6 @@ """Tests for the lifx integration.""" +from typing import Any from unittest.mock import AsyncMock, MagicMock, patch import pytest @@ -21,7 +22,7 @@ def mock_effect_conductor(): """Mock the effect conductor.""" class MockConductor: - def __init__(self, *args, **kwargs) -> None: + def __init__(self, *args: Any, **kwargs: Any) -> None: """Mock the conductor.""" self.start = AsyncMock() self.stop = AsyncMock() diff --git a/tests/components/lifx/test_config_flow.py b/tests/components/lifx/test_config_flow.py index 59b7090788a..735fc4bf6f6 100644 --- a/tests/components/lifx/test_config_flow.py +++ b/tests/components/lifx/test_config_flow.py @@ -2,6 +2,7 @@ from ipaddress import ip_address import socket +from typing import Any from unittest.mock import patch import pytest @@ -288,7 +289,7 @@ async def test_manual_dns_error(hass: HomeAssistant) -> None: class MockLifxConnectonDnsError: """Mock lifx discovery.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init connection.""" self.device = _mocked_failing_bulb() diff --git a/tests/components/lifx/test_init.py b/tests/components/lifx/test_init.py index 42ece68a2c5..66adc54704e 100644 --- a/tests/components/lifx/test_init.py +++ b/tests/components/lifx/test_init.py @@ -4,6 +4,7 @@ from __future__ import annotations from datetime import timedelta import socket +from typing import Any from unittest.mock import patch import pytest @@ -37,7 +38,7 @@ async def test_configuring_lifx_causes_discovery(hass: HomeAssistant) -> None: class MockLifxDiscovery: """Mock lifx discovery.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init discovery.""" discovered = _mocked_bulb() self.lights = {discovered.mac_addr: discovered} @@ -137,7 +138,7 @@ async def test_dns_error_at_startup(hass: HomeAssistant) -> None: class MockLifxConnectonDnsError: """Mock lifx connection with a dns error.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init connection.""" self.device = bulb diff --git a/tests/components/lifx/test_migration.py b/tests/components/lifx/test_migration.py index e5b2f9f8167..f984acce238 100644 --- a/tests/components/lifx/test_migration.py +++ b/tests/components/lifx/test_migration.py @@ -3,6 +3,7 @@ from __future__ import annotations from datetime import timedelta +from typing import Any from unittest.mock import patch from homeassistant import setup @@ -114,7 +115,7 @@ async def test_discovery_is_more_frequent_during_migration( class MockLifxDiscovery: """Mock lifx discovery.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init discovery.""" self.bulb = bulb self.lights = {} diff --git a/tests/components/monoprice/test_media_player.py b/tests/components/monoprice/test_media_player.py index f7d88692cf5..c4ba998261b 100644 --- a/tests/components/monoprice/test_media_player.py +++ b/tests/components/monoprice/test_media_player.py @@ -58,7 +58,7 @@ class AttrDict(dict): class MockMonoprice: """Mock for pymonoprice object.""" - def __init__(self): + def __init__(self) -> None: """Init mock object.""" self.zones = defaultdict( lambda: AttrDict(power=True, volume=0, mute=True, source=1) diff --git a/tests/components/nest/common.py b/tests/components/nest/common.py index 0a553f9c114..9c8de0224f0 100644 --- a/tests/components/nest/common.py +++ b/tests/components/nest/common.py @@ -92,7 +92,7 @@ class FakeSubscriber(GoogleNestSubscriber): stop_calls = 0 - def __init__(self): # pylint: disable=super-init-not-called + def __init__(self) -> None: # pylint: disable=super-init-not-called """Initialize Fake Subscriber.""" self._device_manager = DeviceManager() diff --git a/tests/components/nest/conftest.py b/tests/components/nest/conftest.py index 4b64e80543b..85c64aff379 100644 --- a/tests/components/nest/conftest.py +++ b/tests/components/nest/conftest.py @@ -53,7 +53,7 @@ class FakeAuth(AbstractAuth): from the API. """ - def __init__(self): + def __init__(self) -> None: """Initialize FakeAuth.""" super().__init__(None, None) # Tests can set fake responses here. diff --git a/tests/components/numato/numato_mock.py b/tests/components/numato/numato_mock.py index 097a785beb1..3f2fecb4a58 100644 --- a/tests/components/numato/numato_mock.py +++ b/tests/components/numato/numato_mock.py @@ -8,7 +8,7 @@ class NumatoModuleMock: NumatoGpioError = NumatoGpioError - def __init__(self): + def __init__(self) -> None: """Initialize the numato_gpio module mockup class.""" self.devices = {} diff --git a/tests/components/number/test_init.py b/tests/components/number/test_init.py index 55dad2506f1..721b531e8cd 100644 --- a/tests/components/number/test_init.py +++ b/tests/components/number/test_init.py @@ -121,7 +121,7 @@ class MockNumberEntityDescr(NumberEntity): Step is calculated based on the smaller max_value and min_value. """ - def __init__(self): + def __init__(self) -> None: """Initialize the clas instance.""" self.entity_description = NumberEntityDescription( "test", @@ -145,7 +145,7 @@ class MockNumberEntityAttrWithDescription(NumberEntity): members take precedence over the entity description. """ - def __init__(self): + def __init__(self) -> None: """Initialize the clas instance.""" self.entity_description = NumberEntityDescription( "test", @@ -223,7 +223,7 @@ class MockNumberEntityDescrDeprecated(NumberEntity): Step is calculated based on the smaller max_value and min_value. """ - def __init__(self): + def __init__(self) -> None: """Initialize the clas instance.""" self.entity_description = NumberEntityDescription( "test", diff --git a/tests/components/oralb/conftest.py b/tests/components/oralb/conftest.py index c757d79a78e..3e5f38ffb73 100644 --- a/tests/components/oralb/conftest.py +++ b/tests/components/oralb/conftest.py @@ -1,6 +1,7 @@ """OralB session fixtures.""" from collections.abc import Generator +from typing import Any from unittest import mock import pytest @@ -19,7 +20,7 @@ class MockBleakClient: services = MockServices() - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Mock BleakClient.""" async def __aenter__(self, *args, **kwargs): diff --git a/tests/components/plex/test_config_flow.py b/tests/components/plex/test_config_flow.py index 08733a7dd17..202d62d70e0 100644 --- a/tests/components/plex/test_config_flow.py +++ b/tests/components/plex/test_config_flow.py @@ -537,7 +537,7 @@ async def test_manual_config(hass: HomeAssistant, mock_plex_calls) -> None: class WrongCertValidaitionException(requests.exceptions.SSLError): """Mock the exception showing an unmatched error.""" - def __init__(self): # pylint: disable=super-init-not-called + def __init__(self) -> None: # pylint: disable=super-init-not-called self.__context__ = ssl.SSLCertVerificationError( "some random message that doesn't match" ) diff --git a/tests/components/plex/test_init.py b/tests/components/plex/test_init.py index 15af78faf65..490091998ff 100644 --- a/tests/components/plex/test_init.py +++ b/tests/components/plex/test_init.py @@ -209,7 +209,7 @@ async def test_setup_when_certificate_changed( class WrongCertHostnameException(requests.exceptions.SSLError): """Mock the exception showing a mismatched hostname.""" - def __init__(self): # pylint: disable=super-init-not-called + def __init__(self) -> None: # pylint: disable=super-init-not-called self.__context__ = ssl.SSLCertVerificationError( f"hostname '{old_domain}' doesn't match" ) diff --git a/tests/components/profiler/test_init.py b/tests/components/profiler/test_init.py index b172ae0e12d..1f8f8baf02b 100644 --- a/tests/components/profiler/test_init.py +++ b/tests/components/profiler/test_init.py @@ -284,14 +284,14 @@ async def test_lru_stats(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) return 1 class DomainData: - def __init__(self): + def __init__(self) -> None: self._data = LRU(1) domain_data = DomainData() assert hass.services.has_service(DOMAIN, SERVICE_LRU_STATS) class LRUCache: - def __init__(self): + def __init__(self) -> None: self._data = {"sqlalchemy_test": 1} sqlalchemy_lru_cache = LRUCache() diff --git a/tests/components/stream/conftest.py b/tests/components/stream/conftest.py index 6aab3c06d13..39e4de13fed 100644 --- a/tests/components/stream/conftest.py +++ b/tests/components/stream/conftest.py @@ -16,6 +16,7 @@ import asyncio from collections.abc import Generator import logging import threading +from typing import Any from unittest.mock import Mock, patch from aiohttp import web @@ -32,7 +33,7 @@ TEST_TIMEOUT = 7.0 # Lower than 9s home assistant timeout class WorkerSync: """Test fixture that intercepts stream worker calls to StreamOutput.""" - def __init__(self): + def __init__(self) -> None: """Initialize WorkerSync.""" self._event = None self._original = StreamState.discontinuity @@ -74,7 +75,7 @@ def stream_worker_sync() -> Generator[WorkerSync]: class HLSSync: """Test fixture that intercepts stream worker calls to StreamOutput.""" - def __init__(self): + def __init__(self) -> None: """Initialize HLSSync.""" self._request_event = asyncio.Event() self._original_recv = StreamOutput.recv @@ -91,7 +92,7 @@ class HLSSync: self.check_requests_ready() class SyncResponse(web.Response): - def __init__(self, *args, **kwargs) -> None: + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) on_resp() diff --git a/tests/components/stream/test_worker.py b/tests/components/stream/test_worker.py index a96866eac4b..d0f89364778 100644 --- a/tests/components/stream/test_worker.py +++ b/tests/components/stream/test_worker.py @@ -160,7 +160,7 @@ class PacketSequence: class FakePacket(bytearray): # Be a bytearray so that memoryview works - def __init__(self): + def __init__(self) -> None: super().__init__(3) time_base = VIDEO_TIME_BASE @@ -209,7 +209,7 @@ class FakePyAvContainer: class FakePyAvBuffer: """Holds outputs of the decoded stream for tests to assert on results.""" - def __init__(self): + def __init__(self) -> None: """Initialize the FakePyAvBuffer.""" self.segments = [] self.audio_packets = [] diff --git a/tests/components/websocket_api/test_commands.py b/tests/components/websocket_api/test_commands.py index 10a9c4876b9..772a8ee793e 100644 --- a/tests/components/websocket_api/test_commands.py +++ b/tests/components/websocket_api/test_commands.py @@ -920,7 +920,7 @@ async def test_subscribe_entities_with_unserializable_state( class CannotSerializeMe: """Cannot serialize this.""" - def __init__(self): + def __init__(self) -> None: """Init cannot serialize this.""" hass.states.async_set("light.permitted", "off", {"color": "red"}) diff --git a/tests/components/whois/conftest.py b/tests/components/whois/conftest.py index 1c779cce671..4bb18581c1a 100644 --- a/tests/components/whois/conftest.py +++ b/tests/components/whois/conftest.py @@ -4,6 +4,7 @@ from __future__ import annotations from collections.abc import Generator from datetime import datetime +from typing import Any from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest @@ -74,7 +75,7 @@ def mock_whois_missing_some_attrs() -> Generator[Mock]: class LimitedWhoisMock: """A limited mock of whois_query.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Mock only attributes the library always sets being available.""" self.creation_date = datetime(2019, 1, 1, 0, 0, 0) self.dnssec = True diff --git a/tests/components/xiaomi_ble/conftest.py b/tests/components/xiaomi_ble/conftest.py index 8994aec813c..d4864cbe2f8 100644 --- a/tests/components/xiaomi_ble/conftest.py +++ b/tests/components/xiaomi_ble/conftest.py @@ -19,7 +19,7 @@ class MockBleakClient: services = MockServices() - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: """Mock BleakClient.""" async def __aenter__(self, *args, **kwargs): diff --git a/tests/test_util/aiohttp.py b/tests/test_util/aiohttp.py index d0bd7fbeb2f..b5eda374f01 100644 --- a/tests/test_util/aiohttp.py +++ b/tests/test_util/aiohttp.py @@ -37,7 +37,7 @@ def mock_stream(data): class AiohttpClientMocker: """Mock Aiohttp client requests.""" - def __init__(self): + def __init__(self) -> None: """Initialize the request mocker.""" self._mocks = [] self._cookies = {} @@ -327,7 +327,7 @@ class MockLongPollSideEffect: If queue is empty, will await until done. """ - def __init__(self): + def __init__(self) -> None: """Initialize the queue.""" self.semaphore = asyncio.Semaphore(0) self.response_list = [] From 5958ef363f7f0a2b5cfd36ac465d1f7146d46a8a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 14 Aug 2024 10:02:44 -0500 Subject: [PATCH 0735/1635] Bump pylutron_caseta to 0.21.1 (#123924) --- homeassistant/components/lutron_caseta/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- script/licenses.py | 1 - tests/components/lutron_caseta/test_device_trigger.py | 5 +++-- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/lutron_caseta/manifest.json b/homeassistant/components/lutron_caseta/manifest.json index 48445f645aa..3c6348ed4da 100644 --- a/homeassistant/components/lutron_caseta/manifest.json +++ b/homeassistant/components/lutron_caseta/manifest.json @@ -9,7 +9,7 @@ }, "iot_class": "local_push", "loggers": ["pylutron_caseta"], - "requirements": ["pylutron-caseta==0.20.0"], + "requirements": ["pylutron-caseta==0.21.1"], "zeroconf": [ { "type": "_lutron._tcp.local.", diff --git a/requirements_all.txt b/requirements_all.txt index 21e4a9b7404..c00200df958 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1990,7 +1990,7 @@ pylitejet==0.6.2 pylitterbot==2023.5.0 # homeassistant.components.lutron_caseta -pylutron-caseta==0.20.0 +pylutron-caseta==0.21.1 # homeassistant.components.lutron pylutron==0.2.15 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 6cf59077ffd..6cd024839f8 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1592,7 +1592,7 @@ pylitejet==0.6.2 pylitterbot==2023.5.0 # homeassistant.components.lutron_caseta -pylutron-caseta==0.20.0 +pylutron-caseta==0.21.1 # homeassistant.components.lutron pylutron==0.2.15 diff --git a/script/licenses.py b/script/licenses.py index 0c2870aebd8..1e01fb6111b 100644 --- a/script/licenses.py +++ b/script/licenses.py @@ -159,7 +159,6 @@ EXCEPTIONS = { "pyTibber", # https://github.com/Danielhiversen/pyTibber/pull/294 "pybbox", # https://github.com/HydrelioxGitHub/pybbox/pull/5 "pyeconet", # https://github.com/w1ll1am23/pyeconet/pull/41 - "pylutron-caseta", # https://github.com/gurumitts/pylutron-caseta/pull/168 "pysabnzbd", # https://github.com/jeradM/pysabnzbd/pull/6 "pyvera", # https://github.com/maximvelichko/pyvera/pull/164 "pyxeoma", # https://github.com/jeradM/pyxeoma/pull/11 diff --git a/tests/components/lutron_caseta/test_device_trigger.py b/tests/components/lutron_caseta/test_device_trigger.py index 405c504dee1..9353b897602 100644 --- a/tests/components/lutron_caseta/test_device_trigger.py +++ b/tests/components/lutron_caseta/test_device_trigger.py @@ -487,8 +487,9 @@ async def test_if_fires_on_button_event_late_setup( }, ) - await hass.config_entries.async_setup(config_entry_id) - await hass.async_block_till_done() + with patch("homeassistant.components.lutron_caseta.Smartbridge.create_tls"): + await hass.config_entries.async_setup(config_entry_id) + await hass.async_block_till_done() message = { ATTR_SERIAL: device.get("serial"), From 5e6f8373e15d6c805fe1e29f7e86f233105454b6 Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Wed, 14 Aug 2024 11:22:36 -0400 Subject: [PATCH 0736/1635] Set quality scale to silver for Russound RIO (#123937) --- homeassistant/components/russound_rio/manifest.json | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/russound_rio/manifest.json b/homeassistant/components/russound_rio/manifest.json index 42e9dc2df8a..7180c3be84f 100644 --- a/homeassistant/components/russound_rio/manifest.json +++ b/homeassistant/components/russound_rio/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/russound_rio", "iot_class": "local_push", "loggers": ["aiorussound"], + "quality_scale": "silver", "requirements": ["aiorussound==2.3.1"] } From 178482068dfd376c27041c55bbf5c92b92f41dad Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 17:38:30 +0200 Subject: [PATCH 0737/1635] Add missing return type in test __init__ method (part 3) (#123940) --- tests/components/alexa/test_common.py | 4 ++-- tests/components/application_credentials/test_init.py | 7 ++++++- tests/components/media_player/test_async_helpers.py | 2 +- tests/components/nest/test_config_flow.py | 7 ++++++- tests/components/universal/test_media_player.py | 2 +- 5 files changed, 16 insertions(+), 6 deletions(-) diff --git a/tests/components/alexa/test_common.py b/tests/components/alexa/test_common.py index 9fdcc1c89c1..f93d9483b43 100644 --- a/tests/components/alexa/test_common.py +++ b/tests/components/alexa/test_common.py @@ -7,7 +7,7 @@ import pytest from homeassistant.components.alexa import config, smart_home from homeassistant.components.alexa.const import CONF_ENDPOINT, CONF_FILTER, CONF_LOCALE -from homeassistant.core import Context, callback +from homeassistant.core import Context, HomeAssistant, callback from homeassistant.helpers import entityfilter from tests.common import async_mock_service @@ -28,7 +28,7 @@ class MockConfig(smart_home.AlexaConfig): "camera.test": {"display_categories": "CAMERA"}, } - def __init__(self, hass): + def __init__(self, hass: HomeAssistant) -> None: """Mock Alexa config.""" super().__init__( hass, diff --git a/tests/components/application_credentials/test_init.py b/tests/components/application_credentials/test_init.py index e6fdf568bcc..134d996865d 100644 --- a/tests/components/application_credentials/test_init.py +++ b/tests/components/application_credentials/test_init.py @@ -124,7 +124,12 @@ def config_flow_handler( class OAuthFixture: """Fixture to facilitate testing an OAuth flow.""" - def __init__(self, hass, hass_client, aioclient_mock): + def __init__( + self, + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + ) -> None: """Initialize OAuthFixture.""" self.hass = hass self.hass_client = hass_client diff --git a/tests/components/media_player/test_async_helpers.py b/tests/components/media_player/test_async_helpers.py index 783846d8857..750d2861f21 100644 --- a/tests/components/media_player/test_async_helpers.py +++ b/tests/components/media_player/test_async_helpers.py @@ -17,7 +17,7 @@ from homeassistant.core import HomeAssistant class SimpleMediaPlayer(mp.MediaPlayerEntity): """Media player test class.""" - def __init__(self, hass): + def __init__(self, hass: HomeAssistant) -> None: """Initialize the test media player.""" self.hass = hass self._volume = 0 diff --git a/tests/components/nest/test_config_flow.py b/tests/components/nest/test_config_flow.py index 5c8f01c8e39..b6e84ce358f 100644 --- a/tests/components/nest/test_config_flow.py +++ b/tests/components/nest/test_config_flow.py @@ -56,7 +56,12 @@ def nest_test_config() -> NestTestConfig: class OAuthFixture: """Simulate the oauth flow used by the config flow.""" - def __init__(self, hass, hass_client_no_auth, aioclient_mock): + def __init__( + self, + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + ) -> None: """Initialize OAuthFixture.""" self.hass = hass self.hass_client = hass_client_no_auth diff --git a/tests/components/universal/test_media_player.py b/tests/components/universal/test_media_player.py index 69d4c8666cf..7c992814cfe 100644 --- a/tests/components/universal/test_media_player.py +++ b/tests/components/universal/test_media_player.py @@ -55,7 +55,7 @@ def validate_config(config): class MockMediaPlayer(media_player.MediaPlayerEntity): """Mock media player for testing.""" - def __init__(self, hass, name): + def __init__(self, hass: HomeAssistant, name: str) -> None: """Initialize the media player.""" self.hass = hass self._name = name From 3322fa0294d3f4abcb8bf1b1803075b9674e1af1 Mon Sep 17 00:00:00 2001 From: IceBotYT <34712694+IceBotYT@users.noreply.github.com> Date: Wed, 14 Aug 2024 11:39:23 -0400 Subject: [PATCH 0738/1635] Bump LaCrosse View to 1.0.2, fixes blocking call (#123935) --- homeassistant/components/lacrosse_view/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/lacrosse_view/manifest.json b/homeassistant/components/lacrosse_view/manifest.json index 1236f63ddad..1cf8794237d 100644 --- a/homeassistant/components/lacrosse_view/manifest.json +++ b/homeassistant/components/lacrosse_view/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/lacrosse_view", "iot_class": "cloud_polling", "loggers": ["lacrosse_view"], - "requirements": ["lacrosse-view==1.0.1"] + "requirements": ["lacrosse-view==1.0.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index c00200df958..c5fe1e2171a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1228,7 +1228,7 @@ konnected==1.2.0 krakenex==2.1.0 # homeassistant.components.lacrosse_view -lacrosse-view==1.0.1 +lacrosse-view==1.0.2 # homeassistant.components.eufy lakeside==0.13 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 6cd024839f8..b84660fb0b0 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1027,7 +1027,7 @@ konnected==1.2.0 krakenex==2.1.0 # homeassistant.components.lacrosse_view -lacrosse-view==1.0.1 +lacrosse-view==1.0.2 # homeassistant.components.laundrify laundrify-aio==1.2.2 From 3e967700fd4e51c8bf86a799e93b5594f1afffc3 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 14 Aug 2024 17:59:15 +0200 Subject: [PATCH 0739/1635] Add missing return type in test __init__ method (part 2) (#123939) * Add missing return type in test __init__ method (part 2) * Adjust * One more * One more * More --- tests/components/alexa/test_common.py | 2 +- tests/components/androidtv/test_config_flow.py | 2 +- tests/components/application_credentials/test_init.py | 2 +- tests/components/assist_pipeline/conftest.py | 4 ++-- tests/components/baf/__init__.py | 2 +- tests/components/bluetooth/test_scanner.py | 2 +- tests/components/coinbase/common.py | 6 +++--- tests/components/fido/test_sensor.py | 2 +- tests/components/flic/test_binary_sensor.py | 2 +- tests/components/folder_watcher/test_init.py | 4 ++-- tests/components/fritz/conftest.py | 2 +- tests/components/google/test_calendar.py | 2 +- tests/components/hddtemp/test_sensor.py | 2 +- tests/components/hdmi_cec/__init__.py | 2 +- tests/components/hlk_sw16/test_config_flow.py | 2 +- tests/components/homematicip_cloud/helper.py | 2 +- tests/components/http/test_cors.py | 2 +- tests/components/insteon/mock_devices.py | 2 +- tests/components/keymitt_ble/__init__.py | 4 ++-- tests/components/knx/test_config_flow.py | 2 +- tests/components/kodi/util.py | 4 ++-- tests/components/lifx/__init__.py | 6 +++--- tests/components/lifx/test_config_flow.py | 2 +- tests/components/lifx/test_light.py | 7 ++++--- tests/components/lutron_caseta/__init__.py | 2 +- tests/components/modbus/conftest.py | 2 +- tests/components/nsw_fuel_station/test_sensor.py | 8 +++++--- tests/components/numato/numato_mock.py | 2 +- tests/components/pilight/test_init.py | 2 +- tests/components/plex/mock_classes.py | 2 +- tests/components/plex/test_playback.py | 2 +- tests/components/profiler/test_init.py | 2 +- tests/components/recorder/db_schema_16.py | 2 +- tests/components/recorder/db_schema_18.py | 2 +- tests/components/recorder/db_schema_22.py | 2 +- tests/components/recorder/db_schema_23.py | 2 +- .../recorder/db_schema_23_with_newer_columns.py | 2 +- tests/components/recorder/test_util.py | 2 +- tests/components/reddit/test_sensor.py | 2 +- tests/components/samsungtv/conftest.py | 2 +- tests/components/sonos/conftest.py | 2 +- tests/components/stream/test_hls.py | 2 +- tests/components/stream/test_worker.py | 10 +++++----- tests/components/tomato/test_device_tracker.py | 2 +- tests/components/venstar/__init__.py | 2 +- tests/components/vera/common.py | 2 +- tests/components/vizio/conftest.py | 2 +- tests/components/ws66i/test_media_player.py | 2 +- tests/components/yamaha/test_media_player.py | 2 +- tests/components/yeelight/__init__.py | 2 +- tests/test_util/aiohttp.py | 2 +- 51 files changed, 70 insertions(+), 67 deletions(-) diff --git a/tests/components/alexa/test_common.py b/tests/components/alexa/test_common.py index f93d9483b43..43e7d77ce71 100644 --- a/tests/components/alexa/test_common.py +++ b/tests/components/alexa/test_common.py @@ -213,7 +213,7 @@ async def reported_properties(hass, endpoint, return_full_response=False): class ReportedProperties: """Class to help assert reported properties.""" - def __init__(self, properties): + def __init__(self, properties) -> None: """Initialize class.""" self.properties = properties diff --git a/tests/components/androidtv/test_config_flow.py b/tests/components/androidtv/test_config_flow.py index e2b5207c590..b73fee9fb10 100644 --- a/tests/components/androidtv/test_config_flow.py +++ b/tests/components/androidtv/test_config_flow.py @@ -73,7 +73,7 @@ CONNECT_METHOD = ( class MockConfigDevice: """Mock class to emulate Android device.""" - def __init__(self, eth_mac=ETH_MAC, wifi_mac=None): + def __init__(self, eth_mac=ETH_MAC, wifi_mac=None) -> None: """Initialize a fake device to test config flow.""" self.available = True self.device_properties = {PROP_ETHMAC: eth_mac, PROP_WIFIMAC: wifi_mac} diff --git a/tests/components/application_credentials/test_init.py b/tests/components/application_credentials/test_init.py index 134d996865d..d90084fa7c9 100644 --- a/tests/components/application_credentials/test_init.py +++ b/tests/components/application_credentials/test_init.py @@ -189,7 +189,7 @@ async def oauth_fixture( class Client: """Test client with helper methods for application credentials websocket.""" - def __init__(self, client): + def __init__(self, client) -> None: """Initialize Client.""" self.client = client self.id = 0 diff --git a/tests/components/assist_pipeline/conftest.py b/tests/components/assist_pipeline/conftest.py index 141baaa9870..b7bf83a7ed0 100644 --- a/tests/components/assist_pipeline/conftest.py +++ b/tests/components/assist_pipeline/conftest.py @@ -153,7 +153,7 @@ class MockTTSPlatform(MockPlatform): PLATFORM_SCHEMA = tts.PLATFORM_SCHEMA - def __init__(self, *, async_get_engine, **kwargs): + def __init__(self, *, async_get_engine, **kwargs: Any) -> None: """Initialize the tts platform.""" super().__init__(**kwargs) self.async_get_engine = async_get_engine @@ -180,7 +180,7 @@ def mock_stt_provider_entity() -> MockSttProviderEntity: class MockSttPlatform(MockPlatform): """Provide a fake STT platform.""" - def __init__(self, *, async_get_engine, **kwargs): + def __init__(self, *, async_get_engine, **kwargs: Any) -> None: """Initialize the stt platform.""" super().__init__(**kwargs) self.async_get_engine = async_get_engine diff --git a/tests/components/baf/__init__.py b/tests/components/baf/__init__.py index f1074a87cee..a047029f9a0 100644 --- a/tests/components/baf/__init__.py +++ b/tests/components/baf/__init__.py @@ -12,7 +12,7 @@ class MockBAFDevice(Device): """A simple mock for a BAF Device.""" # pylint: disable-next=super-init-not-called - def __init__(self, async_wait_available_side_effect=None): + def __init__(self, async_wait_available_side_effect=None) -> None: """Init simple mock.""" self._async_wait_available_side_effect = async_wait_available_side_effect diff --git a/tests/components/bluetooth/test_scanner.py b/tests/components/bluetooth/test_scanner.py index ecd1ee58692..6acb86476e7 100644 --- a/tests/components/bluetooth/test_scanner.py +++ b/tests/components/bluetooth/test_scanner.py @@ -212,7 +212,7 @@ async def test_recovery_from_dbus_restart(hass: HomeAssistant) -> None: mock_discovered = [] class MockBleakScanner: - def __init__(self, detection_callback, *args, **kwargs): + def __init__(self, detection_callback, *args: Any, **kwargs: Any) -> None: nonlocal _callback _callback = detection_callback diff --git a/tests/components/coinbase/common.py b/tests/components/coinbase/common.py index 2768b6a2cd4..1a141c88bc3 100644 --- a/tests/components/coinbase/common.py +++ b/tests/components/coinbase/common.py @@ -21,7 +21,7 @@ from tests.common import MockConfigEntry class MockPagination: """Mock pagination result.""" - def __init__(self, value=None): + def __init__(self, value=None) -> None: """Load simple pagination for tests.""" self.next_starting_after = value @@ -29,7 +29,7 @@ class MockPagination: class MockGetAccounts: """Mock accounts with pagination.""" - def __init__(self, starting_after=0): + def __init__(self, starting_after=0) -> None: """Init mocked object, forced to return two at a time.""" if (target_end := starting_after + 2) >= ( max_end := len(MOCK_ACCOUNTS_RESPONSE) @@ -58,7 +58,7 @@ def mocked_get_accounts(_, **kwargs): class MockGetAccountsV3: """Mock accounts with pagination.""" - def __init__(self, cursor=""): + def __init__(self, cursor="") -> None: """Init mocked object, forced to return two at a time.""" ids = [account["uuid"] for account in MOCK_ACCOUNTS_RESPONSE_V3] start = ids.index(cursor) if cursor else 0 diff --git a/tests/components/fido/test_sensor.py b/tests/components/fido/test_sensor.py index d47c7ce8e9f..654221cfacd 100644 --- a/tests/components/fido/test_sensor.py +++ b/tests/components/fido/test_sensor.py @@ -18,7 +18,7 @@ CONTRACT = "123456789" class FidoClientMock: """Fake Fido client.""" - def __init__(self, username, password, timeout=None, httpsession=None): + def __init__(self, username, password, timeout=None, httpsession=None) -> None: """Fake Fido client init.""" def get_phone_numbers(self): diff --git a/tests/components/flic/test_binary_sensor.py b/tests/components/flic/test_binary_sensor.py index 44db1d6ea1b..cdc1d64db41 100644 --- a/tests/components/flic/test_binary_sensor.py +++ b/tests/components/flic/test_binary_sensor.py @@ -8,7 +8,7 @@ from homeassistant.setup import async_setup_component class _MockFlicClient: - def __init__(self, button_addresses): + def __init__(self, button_addresses) -> None: self.addresses = button_addresses self.get_info_callback = None self.scan_wizard = None diff --git a/tests/components/folder_watcher/test_init.py b/tests/components/folder_watcher/test_init.py index 8309988931a..965ae33c4f8 100644 --- a/tests/components/folder_watcher/test_init.py +++ b/tests/components/folder_watcher/test_init.py @@ -36,7 +36,7 @@ def test_event() -> None: class MockPatternMatchingEventHandler: """Mock base class for the pattern matcher event handler.""" - def __init__(self, patterns): + def __init__(self, patterns) -> None: pass with patch( @@ -66,7 +66,7 @@ def test_move_event() -> None: class MockPatternMatchingEventHandler: """Mock base class for the pattern matcher event handler.""" - def __init__(self, patterns): + def __init__(self, patterns) -> None: pass with patch( diff --git a/tests/components/fritz/conftest.py b/tests/components/fritz/conftest.py index bb049f067b4..fa92fa37c04 100644 --- a/tests/components/fritz/conftest.py +++ b/tests/components/fritz/conftest.py @@ -30,7 +30,7 @@ class FritzServiceMock(Service): class FritzConnectionMock: """FritzConnection mocking.""" - def __init__(self, services): + def __init__(self, services) -> None: """Init Mocking class.""" self.modelname = MOCK_MODELNAME self.call_action = self._call_action diff --git a/tests/components/google/test_calendar.py b/tests/components/google/test_calendar.py index 903b68a5cf2..11d4ec46bd1 100644 --- a/tests/components/google/test_calendar.py +++ b/tests/components/google/test_calendar.py @@ -74,7 +74,7 @@ def upcoming_event_url(entity: str = TEST_ENTITY) -> str: class Client: """Test client with helper methods for calendar websocket.""" - def __init__(self, client): + def __init__(self, client) -> None: """Initialize Client.""" self.client = client self.id = 0 diff --git a/tests/components/hddtemp/test_sensor.py b/tests/components/hddtemp/test_sensor.py index 2bd0519c12c..15740ffa0ea 100644 --- a/tests/components/hddtemp/test_sensor.py +++ b/tests/components/hddtemp/test_sensor.py @@ -60,7 +60,7 @@ REFERENCE = { class TelnetMock: """Mock class for the telnetlib.Telnet object.""" - def __init__(self, host, port, timeout=0): + def __init__(self, host, port, timeout=0) -> None: """Initialize Telnet object.""" self.host = host self.port = port diff --git a/tests/components/hdmi_cec/__init__.py b/tests/components/hdmi_cec/__init__.py index 5cf8ed18b6a..1d51fa0cc50 100644 --- a/tests/components/hdmi_cec/__init__.py +++ b/tests/components/hdmi_cec/__init__.py @@ -8,7 +8,7 @@ from homeassistant.components.hdmi_cec import KeyPressCommand, KeyReleaseCommand class MockHDMIDevice: """Mock of a HDMIDevice.""" - def __init__(self, *, logical_address, **values): + def __init__(self, *, logical_address, **values) -> None: """Mock of a HDMIDevice.""" self.set_update_callback = Mock(side_effect=self._set_update_callback) self.logical_address = logical_address diff --git a/tests/components/hlk_sw16/test_config_flow.py b/tests/components/hlk_sw16/test_config_flow.py index 6a758ec5066..2225ea1b79a 100644 --- a/tests/components/hlk_sw16/test_config_flow.py +++ b/tests/components/hlk_sw16/test_config_flow.py @@ -12,7 +12,7 @@ from homeassistant.data_entry_flow import FlowResultType class MockSW16Client: """Class to mock the SW16Client client.""" - def __init__(self, fail): + def __init__(self, fail) -> None: """Initialise client with failure modes.""" self.fail = fail self.disconnect_callback = None diff --git a/tests/components/homematicip_cloud/helper.py b/tests/components/homematicip_cloud/helper.py index e7d7350f98e..bf20d37f2a3 100644 --- a/tests/components/homematicip_cloud/helper.py +++ b/tests/components/homematicip_cloud/helper.py @@ -132,7 +132,7 @@ class HomeTemplate(Home): def __init__( self, connection=None, home_name="", test_devices=None, test_groups=None - ): + ) -> None: """Init template with connection.""" super().__init__(connection=connection) self.name = home_name diff --git a/tests/components/http/test_cors.py b/tests/components/http/test_cors.py index 1188131cc0f..c0256abb25d 100644 --- a/tests/components/http/test_cors.py +++ b/tests/components/http/test_cors.py @@ -119,7 +119,7 @@ async def test_cors_middleware_with_cors_allowed_view(hass: HomeAssistant) -> No requires_auth = False cors_allowed = True - def __init__(self, url, name): + def __init__(self, url, name) -> None: """Initialize test view.""" self.url = url self.name = name diff --git a/tests/components/insteon/mock_devices.py b/tests/components/insteon/mock_devices.py index 6b5f5cf5e09..2c385c337fd 100644 --- a/tests/components/insteon/mock_devices.py +++ b/tests/components/insteon/mock_devices.py @@ -30,7 +30,7 @@ class MockSwitchLinc(SwitchedLightingControl_SwitchLinc02): class MockDevices: """Mock devices class.""" - def __init__(self, connected=True): + def __init__(self, connected=True) -> None: """Init the MockDevices class.""" self._devices = {} self.modem = None diff --git a/tests/components/keymitt_ble/__init__.py b/tests/components/keymitt_ble/__init__.py index 1e717b805c5..6fa608ad3b4 100644 --- a/tests/components/keymitt_ble/__init__.py +++ b/tests/components/keymitt_ble/__init__.py @@ -53,7 +53,7 @@ SERVICE_INFO = BluetoothServiceInfoBleak( class MockMicroBotApiClient: """Mock MicroBotApiClient.""" - def __init__(self, device, token): + def __init__(self, device, token) -> None: """Mock init.""" async def connect(self, init): @@ -70,7 +70,7 @@ class MockMicroBotApiClient: class MockMicroBotApiClientFail: """Mock MicroBotApiClient.""" - def __init__(self, device, token): + def __init__(self, device, token) -> None: """Mock init.""" async def connect(self, init): diff --git a/tests/components/knx/test_config_flow.py b/tests/components/knx/test_config_flow.py index d82e5ae33c7..78751c7e641 100644 --- a/tests/components/knx/test_config_flow.py +++ b/tests/components/knx/test_config_flow.py @@ -126,7 +126,7 @@ def _gateway_descriptor( class GatewayScannerMock: """Mock GatewayScanner.""" - def __init__(self, gateways=None): + def __init__(self, gateways=None) -> None: """Initialize GatewayScannerMock.""" # Key is a HPAI instance in xknx, but not used in HA anyway. self.found_gateways = ( diff --git a/tests/components/kodi/util.py b/tests/components/kodi/util.py index 6217a77903b..e56ba03b7e5 100644 --- a/tests/components/kodi/util.py +++ b/tests/components/kodi/util.py @@ -63,7 +63,7 @@ def get_kodi_connection( class MockConnection: """A mock kodi connection.""" - def __init__(self, connected=True): + def __init__(self, connected=True) -> None: """Mock the Kodi connection.""" self._connected = connected @@ -92,7 +92,7 @@ class MockConnection: class MockWSConnection: """A mock kodi websocket connection.""" - def __init__(self, connected=True): + def __init__(self, connected=True) -> None: """Mock the websocket connection.""" self._connected = connected diff --git a/tests/components/lifx/__init__.py b/tests/components/lifx/__init__.py index 1d37496fbcb..432e7673db6 100644 --- a/tests/components/lifx/__init__.py +++ b/tests/components/lifx/__init__.py @@ -26,7 +26,7 @@ DEFAULT_ENTRY_TITLE = LABEL class MockMessage: """Mock a lifx message.""" - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """Init message.""" self.target_addr = SERIAL self.count = 9 @@ -38,7 +38,7 @@ class MockMessage: class MockFailingLifxCommand: """Mock a lifx command that fails.""" - def __init__(self, bulb, **kwargs): + def __init__(self, bulb, **kwargs: Any) -> None: """Init command.""" self.bulb = bulb self.calls = [] @@ -61,7 +61,7 @@ class MockLifxCommand: """Return name.""" return "mock_lifx_command" - def __init__(self, bulb, **kwargs): + def __init__(self, bulb, **kwargs: Any) -> None: """Init command.""" self.bulb = bulb self.calls = [] diff --git a/tests/components/lifx/test_config_flow.py b/tests/components/lifx/test_config_flow.py index 735fc4bf6f6..29324d0d19a 100644 --- a/tests/components/lifx/test_config_flow.py +++ b/tests/components/lifx/test_config_flow.py @@ -575,7 +575,7 @@ async def test_suggested_area( class MockLifxCommandGetGroup: """Mock the get_group method that gets the group name from the bulb.""" - def __init__(self, bulb, **kwargs): + def __init__(self, bulb, **kwargs: Any) -> None: """Init command.""" self.bulb = bulb self.lifx_group = kwargs.get("lifx_group") diff --git a/tests/components/lifx/test_light.py b/tests/components/lifx/test_light.py index 9972bc1021a..a642347b4e6 100644 --- a/tests/components/lifx/test_light.py +++ b/tests/components/lifx/test_light.py @@ -1,6 +1,7 @@ """Tests for the lifx integration light platform.""" from datetime import timedelta +from typing import Any from unittest.mock import patch import aiolifx_effects @@ -1299,7 +1300,7 @@ async def test_config_zoned_light_strip_fails( class MockFailingLifxCommand: """Mock a lifx command that fails on the 2nd try.""" - def __init__(self, bulb, **kwargs): + def __init__(self, bulb, **kwargs: Any) -> None: """Init command.""" self.bulb = bulb self.call_count = 0 @@ -1338,7 +1339,7 @@ async def test_legacy_zoned_light_strip( class MockPopulateLifxZonesCommand: """Mock populating the number of zones.""" - def __init__(self, bulb, **kwargs): + def __init__(self, bulb, **kwargs: Any) -> None: """Init command.""" self.bulb = bulb self.call_count = 0 @@ -1845,7 +1846,7 @@ async def test_color_bulb_is_actually_off(hass: HomeAssistant) -> None: class MockLifxCommandActuallyOff: """Mock a lifx command that will update our power level state.""" - def __init__(self, bulb, **kwargs): + def __init__(self, bulb, **kwargs: Any) -> None: """Init command.""" self.bulb = bulb self.calls = [] diff --git a/tests/components/lutron_caseta/__init__.py b/tests/components/lutron_caseta/__init__.py index 9b25e2a0164..b27d30ac31f 100644 --- a/tests/components/lutron_caseta/__init__.py +++ b/tests/components/lutron_caseta/__init__.py @@ -101,7 +101,7 @@ async def async_setup_integration(hass: HomeAssistant, mock_bridge) -> MockConfi class MockBridge: """Mock Lutron bridge that emulates configured connected status.""" - def __init__(self, can_connect=True): + def __init__(self, can_connect=True) -> None: """Initialize MockBridge instance with configured mock connectivity.""" self.can_connect = can_connect self.is_currently_connected = False diff --git a/tests/components/modbus/conftest.py b/tests/components/modbus/conftest.py index 28f8eae5a0b..5c612f9f8ad 100644 --- a/tests/components/modbus/conftest.py +++ b/tests/components/modbus/conftest.py @@ -37,7 +37,7 @@ TEST_PORT_SERIAL = "usb01" class ReadResult: """Storage class for register read results.""" - def __init__(self, register_words): + def __init__(self, register_words) -> None: """Init.""" self.registers = register_words self.bits = register_words diff --git a/tests/components/nsw_fuel_station/test_sensor.py b/tests/components/nsw_fuel_station/test_sensor.py index 898d5757870..dbf52d937f0 100644 --- a/tests/components/nsw_fuel_station/test_sensor.py +++ b/tests/components/nsw_fuel_station/test_sensor.py @@ -23,7 +23,9 @@ VALID_CONFIG_EXPECTED_ENTITY_IDS = ["my_fake_station_p95", "my_fake_station_e10" class MockPrice: """Mock Price implementation.""" - def __init__(self, price, fuel_type, last_updated, price_unit, station_code): + def __init__( + self, price, fuel_type, last_updated, price_unit, station_code + ) -> None: """Initialize a mock price instance.""" self.price = price self.fuel_type = fuel_type @@ -35,7 +37,7 @@ class MockPrice: class MockStation: """Mock Station implementation.""" - def __init__(self, name, code): + def __init__(self, name, code) -> None: """Initialize a mock Station instance.""" self.name = name self.code = code @@ -44,7 +46,7 @@ class MockStation: class MockGetFuelPricesResponse: """Mock GetFuelPricesResponse implementation.""" - def __init__(self, prices, stations): + def __init__(self, prices, stations) -> None: """Initialize a mock GetFuelPricesResponse instance.""" self.prices = prices self.stations = stations diff --git a/tests/components/numato/numato_mock.py b/tests/components/numato/numato_mock.py index 3f2fecb4a58..208beffe83f 100644 --- a/tests/components/numato/numato_mock.py +++ b/tests/components/numato/numato_mock.py @@ -15,7 +15,7 @@ class NumatoModuleMock: class NumatoDeviceMock: """Mockup for the numato_gpio.NumatoUsbGpio class.""" - def __init__(self, device): + def __init__(self, device) -> None: """Initialize numato device mockup.""" self.device = device self.callbacks = {} diff --git a/tests/components/pilight/test_init.py b/tests/components/pilight/test_init.py index c48135f59eb..dfc62d30619 100644 --- a/tests/components/pilight/test_init.py +++ b/tests/components/pilight/test_init.py @@ -40,7 +40,7 @@ class PilightDaemonSim: "message": {"id": 0, "unit": 0, "off": 1}, } - def __init__(self, host, port): + def __init__(self, host, port) -> None: """Init pilight client, ignore parameters.""" def send_code(self, call): diff --git a/tests/components/plex/mock_classes.py b/tests/components/plex/mock_classes.py index c6f1aeda9b7..92844f755d6 100644 --- a/tests/components/plex/mock_classes.py +++ b/tests/components/plex/mock_classes.py @@ -67,7 +67,7 @@ GDM_CLIENT_PAYLOAD = [ class MockGDM: """Mock a GDM instance.""" - def __init__(self, disabled=False): + def __init__(self, disabled=False) -> None: """Initialize the object.""" self.entries = [] self.disabled = disabled diff --git a/tests/components/plex/test_playback.py b/tests/components/plex/test_playback.py index 183a779c940..c4206bd5f3e 100644 --- a/tests/components/plex/test_playback.py +++ b/tests/components/plex/test_playback.py @@ -28,7 +28,7 @@ class MockPlexMedia: viewOffset = 333 _server = Mock(_baseurl=PLEX_DIRECT_URL) - def __init__(self, title, mediatype): + def __init__(self, title, mediatype) -> None: """Initialize the instance.""" self.listType = mediatype self.title = title diff --git a/tests/components/profiler/test_init.py b/tests/components/profiler/test_init.py index 1f8f8baf02b..3f0e0b92056 100644 --- a/tests/components/profiler/test_init.py +++ b/tests/components/profiler/test_init.py @@ -176,7 +176,7 @@ async def test_dump_log_object( await hass.async_block_till_done() class DumpLogDummy: - def __init__(self, fail): + def __init__(self, fail) -> None: self.fail = fail def __repr__(self): diff --git a/tests/components/recorder/db_schema_16.py b/tests/components/recorder/db_schema_16.py index 24786b1ad44..ffee438f2e9 100644 --- a/tests/components/recorder/db_schema_16.py +++ b/tests/components/recorder/db_schema_16.py @@ -356,7 +356,7 @@ class LazyState(State): "_context", ] - def __init__(self, row): # pylint: disable=super-init-not-called + def __init__(self, row) -> None: # pylint: disable=super-init-not-called """Init the lazy state.""" self._row = row self.entity_id = self._row.entity_id diff --git a/tests/components/recorder/db_schema_18.py b/tests/components/recorder/db_schema_18.py index db6fbb78f56..09cd41d9e33 100644 --- a/tests/components/recorder/db_schema_18.py +++ b/tests/components/recorder/db_schema_18.py @@ -369,7 +369,7 @@ class LazyState(State): "_context", ] - def __init__(self, row): # pylint: disable=super-init-not-called + def __init__(self, row) -> None: # pylint: disable=super-init-not-called """Init the lazy state.""" self._row = row self.entity_id = self._row.entity_id diff --git a/tests/components/recorder/db_schema_22.py b/tests/components/recorder/db_schema_22.py index cd0dc52a927..d05cb48ff6f 100644 --- a/tests/components/recorder/db_schema_22.py +++ b/tests/components/recorder/db_schema_22.py @@ -488,7 +488,7 @@ class LazyState(State): "_context", ] - def __init__(self, row): # pylint: disable=super-init-not-called + def __init__(self, row) -> None: # pylint: disable=super-init-not-called """Init the lazy state.""" self._row = row self.entity_id = self._row.entity_id diff --git a/tests/components/recorder/db_schema_23.py b/tests/components/recorder/db_schema_23.py index 9187d271216..9dffadaa0cc 100644 --- a/tests/components/recorder/db_schema_23.py +++ b/tests/components/recorder/db_schema_23.py @@ -478,7 +478,7 @@ class LazyState(State): "_context", ] - def __init__(self, row): # pylint: disable=super-init-not-called + def __init__(self, row) -> None: # pylint: disable=super-init-not-called """Init the lazy state.""" self._row = row self.entity_id = self._row.entity_id diff --git a/tests/components/recorder/db_schema_23_with_newer_columns.py b/tests/components/recorder/db_schema_23_with_newer_columns.py index 9f902523c64..4343f53d00d 100644 --- a/tests/components/recorder/db_schema_23_with_newer_columns.py +++ b/tests/components/recorder/db_schema_23_with_newer_columns.py @@ -602,7 +602,7 @@ class LazyState(State): "_context", ] - def __init__(self, row): # pylint: disable=super-init-not-called + def __init__(self, row) -> None: # pylint: disable=super-init-not-called """Init the lazy state.""" self._row = row self.entity_id = self._row.entity_id diff --git a/tests/components/recorder/test_util.py b/tests/components/recorder/test_util.py index 04fe762c780..d850778d214 100644 --- a/tests/components/recorder/test_util.py +++ b/tests/components/recorder/test_util.py @@ -990,7 +990,7 @@ async def test_execute_stmt_lambda_element( all_calls = 0 class MockExecutor: - def __init__(self, stmt): + def __init__(self, stmt) -> None: assert isinstance(stmt, StatementLambdaElement) def all(self): diff --git a/tests/components/reddit/test_sensor.py b/tests/components/reddit/test_sensor.py index 52dac07d621..98cf2b79db3 100644 --- a/tests/components/reddit/test_sensor.py +++ b/tests/components/reddit/test_sensor.py @@ -66,7 +66,7 @@ INVALID_SORT_BY_CONFIG = { class ObjectView: """Use dict properties as attributes.""" - def __init__(self, d): + def __init__(self, d) -> None: """Set dict as internal dict.""" self.__dict__ = d diff --git a/tests/components/samsungtv/conftest.py b/tests/components/samsungtv/conftest.py index 752bce3b960..ec12031ef96 100644 --- a/tests/components/samsungtv/conftest.py +++ b/tests/components/samsungtv/conftest.py @@ -179,7 +179,7 @@ def rest_api_fixture_non_ssl_only() -> Mock: class MockSamsungTVAsyncRest: """Mock for a MockSamsungTVAsyncRest.""" - def __init__(self, host, session, port, timeout): + def __init__(self, host, session, port, timeout) -> None: """Mock a MockSamsungTVAsyncRest.""" self.port = port self.host = host diff --git a/tests/components/sonos/conftest.py b/tests/components/sonos/conftest.py index 4f14a2aa132..840fcb4dcdb 100644 --- a/tests/components/sonos/conftest.py +++ b/tests/components/sonos/conftest.py @@ -80,7 +80,7 @@ class SonosMockService: class SonosMockEvent: """Mock a sonos Event used in callbacks.""" - def __init__(self, soco, service, variables): + def __init__(self, soco, service, variables) -> None: """Initialize the instance.""" self.sid = f"{soco.uid}_sub0000000001" self.seq = "0" diff --git a/tests/components/stream/test_hls.py b/tests/components/stream/test_hls.py index ce66848a2b1..babd7c0b748 100644 --- a/tests/components/stream/test_hls.py +++ b/tests/components/stream/test_hls.py @@ -54,7 +54,7 @@ async def setup_component(hass: HomeAssistant) -> None: class HlsClient: """Test fixture for fetching the hls stream.""" - def __init__(self, http_client, parsed_url): + def __init__(self, http_client, parsed_url) -> None: """Initialize HlsClient.""" self.http_client = http_client self.parsed_url = parsed_url diff --git a/tests/components/stream/test_worker.py b/tests/components/stream/test_worker.py index d0f89364778..d61530f9076 100644 --- a/tests/components/stream/test_worker.py +++ b/tests/components/stream/test_worker.py @@ -100,7 +100,7 @@ def mock_stream_settings(hass: HomeAssistant) -> None: class FakeAvInputStream: """A fake pyav Stream.""" - def __init__(self, name, time_base): + def __init__(self, name, time_base) -> None: """Initialize the stream.""" self.name = name self.time_base = time_base @@ -142,7 +142,7 @@ class PacketSequence: exercise corner cases. """ - def __init__(self, num_packets): + def __init__(self, num_packets) -> None: """Initialize the sequence with the number of packets it provides.""" self.packet = 0 self.num_packets = num_packets @@ -181,7 +181,7 @@ class PacketSequence: class FakePyAvContainer: """A fake container returned by mock av.open for a stream.""" - def __init__(self, video_stream, audio_stream): + def __init__(self, video_stream, audio_stream) -> None: """Initialize the fake container.""" # Tests can override this to trigger different worker behavior self.packets = PacketSequence(0) @@ -220,7 +220,7 @@ class FakePyAvBuffer: """Create an output buffer that captures packets for test to examine.""" class FakeAvOutputStream: - def __init__(self, capture_packets): + def __init__(self, capture_packets) -> None: self.capture_packets = capture_packets self.type = "ignored-type" @@ -266,7 +266,7 @@ class FakePyAvBuffer: class MockPyAv: """Mocks out av.open.""" - def __init__(self, video=True, audio=False): + def __init__(self, video=True, audio=False) -> None: """Initialize the MockPyAv.""" video_stream = VIDEO_STREAM if video else None audio_stream = AUDIO_STREAM if audio else None diff --git a/tests/components/tomato/test_device_tracker.py b/tests/components/tomato/test_device_tracker.py index 099a2c2b40a..9484d3393d7 100644 --- a/tests/components/tomato/test_device_tracker.py +++ b/tests/components/tomato/test_device_tracker.py @@ -25,7 +25,7 @@ def mock_session_response(*args, **kwargs): """Mock data generation for session response.""" class MockSessionResponse: - def __init__(self, text, status_code): + def __init__(self, text, status_code) -> None: self.text = text self.status_code = status_code diff --git a/tests/components/venstar/__init__.py b/tests/components/venstar/__init__.py index 116a3be0925..6a40212b793 100644 --- a/tests/components/venstar/__init__.py +++ b/tests/components/venstar/__init__.py @@ -15,7 +15,7 @@ class VenstarColorTouchMock: pin=None, proto="http", SSLCert=False, - ): + ) -> None: """Initialize the Venstar library.""" self.status = {} self.model = "COLORTOUCH" diff --git a/tests/components/vera/common.py b/tests/components/vera/common.py index 5e0fac6c84a..c5e3a5d4931 100644 --- a/tests/components/vera/common.py +++ b/tests/components/vera/common.py @@ -83,7 +83,7 @@ def new_simple_controller_config( class ComponentFactory: """Factory class.""" - def __init__(self, vera_controller_class_mock): + def __init__(self, vera_controller_class_mock) -> None: """Initialize the factory.""" self.vera_controller_class_mock = vera_controller_class_mock diff --git a/tests/components/vizio/conftest.py b/tests/components/vizio/conftest.py index f33c7839c72..923509dea2c 100644 --- a/tests/components/vizio/conftest.py +++ b/tests/components/vizio/conftest.py @@ -30,7 +30,7 @@ from .const import ( class MockInput: """Mock Vizio device input.""" - def __init__(self, name): + def __init__(self, name) -> None: """Initialize mock Vizio device input.""" self.meta_name = name self.name = name diff --git a/tests/components/ws66i/test_media_player.py b/tests/components/ws66i/test_media_player.py index a66e79bf9e0..aa67ea24b63 100644 --- a/tests/components/ws66i/test_media_player.py +++ b/tests/components/ws66i/test_media_player.py @@ -73,7 +73,7 @@ class AttrDict(dict): class MockWs66i: """Mock for pyws66i object.""" - def __init__(self, fail_open=False, fail_zone_check=None): + def __init__(self, fail_open=False, fail_zone_check=None) -> None: """Init mock object.""" self.zones = defaultdict( lambda: AttrDict( diff --git a/tests/components/yamaha/test_media_player.py b/tests/components/yamaha/test_media_player.py index 804b800aaef..d96da8f6854 100644 --- a/tests/components/yamaha/test_media_player.py +++ b/tests/components/yamaha/test_media_player.py @@ -25,7 +25,7 @@ def _create_zone_mock(name, url): class FakeYamahaDevice: """A fake Yamaha device.""" - def __init__(self, ctrl_url, name, zones=None): + def __init__(self, ctrl_url, name, zones=None) -> None: """Initialize the fake Yamaha device.""" self.ctrl_url = ctrl_url self.name = name diff --git a/tests/components/yeelight/__init__.py b/tests/components/yeelight/__init__.py index 2de064cf567..bdd8cdda312 100644 --- a/tests/components/yeelight/__init__.py +++ b/tests/components/yeelight/__init__.py @@ -109,7 +109,7 @@ CONFIG_ENTRY_DATA = {CONF_ID: ID} class MockAsyncBulb: """A mock for yeelight.aio.AsyncBulb.""" - def __init__(self, model, bulb_type, cannot_connect): + def __init__(self, model, bulb_type, cannot_connect) -> None: """Init the mock.""" self.model = model self.bulb_type = bulb_type diff --git a/tests/test_util/aiohttp.py b/tests/test_util/aiohttp.py index b5eda374f01..de1db0e4847 100644 --- a/tests/test_util/aiohttp.py +++ b/tests/test_util/aiohttp.py @@ -174,7 +174,7 @@ class AiohttpClientMockResponse: headers=None, side_effect=None, closing=None, - ): + ) -> None: """Initialize a fake response.""" if json is not None: text = json_dumps(json) From 9b33d2f17ee688d3468462bc9e98f8e9634b4440 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 14 Aug 2024 11:00:11 -0500 Subject: [PATCH 0740/1635] Fix paste error in homekit climate update (#123943) --- homeassistant/components/homekit/type_thermostats.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/homekit/type_thermostats.py b/homeassistant/components/homekit/type_thermostats.py index a97f26b7abb..91bab2d470a 100644 --- a/homeassistant/components/homekit/type_thermostats.py +++ b/homeassistant/components/homekit/type_thermostats.py @@ -625,11 +625,10 @@ class Thermostat(HomeAccessory): ) # Set current operation mode for supported thermostats - if (hvac_action := attributes.get(ATTR_HVAC_ACTION)) and ( - homekit_hvac_action := HC_HASS_TO_HOMEKIT_ACTION.get(hvac_action), - HC_HEAT_COOL_OFF, - ): - self.char_current_heat_cool.set_value(homekit_hvac_action) + if hvac_action := attributes.get(ATTR_HVAC_ACTION): + self.char_current_heat_cool.set_value( + HC_HASS_TO_HOMEKIT_ACTION.get(hvac_action, HC_HEAT_COOL_OFF) + ) # Update current temperature current_temp = _get_current_temperature(new_state, self._unit) From 0790611b93b506c864810d0b7ba363eac40a9d13 Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Wed, 14 Aug 2024 20:39:15 +0200 Subject: [PATCH 0741/1635] Fix PI-Hole update entity when no update available (#123930) show installed version when no update available --- homeassistant/components/pi_hole/update.py | 8 +++- tests/components/pi_hole/__init__.py | 23 +++++++++-- tests/components/pi_hole/test_config_flow.py | 2 +- tests/components/pi_hole/test_update.py | 43 +++++++++++++++++++- 4 files changed, 70 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/pi_hole/update.py b/homeassistant/components/pi_hole/update.py index db78d3ab0a5..c1a435f628c 100644 --- a/homeassistant/components/pi_hole/update.py +++ b/homeassistant/components/pi_hole/update.py @@ -22,6 +22,7 @@ class PiHoleUpdateEntityDescription(UpdateEntityDescription): installed_version: Callable[[dict], str | None] = lambda api: None latest_version: Callable[[dict], str | None] = lambda api: None + has_update: Callable[[dict], bool | None] = lambda api: None release_base_url: str | None = None title: str | None = None @@ -34,6 +35,7 @@ UPDATE_ENTITY_TYPES: tuple[PiHoleUpdateEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, installed_version=lambda versions: versions.get("core_current"), latest_version=lambda versions: versions.get("core_latest"), + has_update=lambda versions: versions.get("core_update"), release_base_url="https://github.com/pi-hole/pi-hole/releases/tag", ), PiHoleUpdateEntityDescription( @@ -43,6 +45,7 @@ UPDATE_ENTITY_TYPES: tuple[PiHoleUpdateEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, installed_version=lambda versions: versions.get("web_current"), latest_version=lambda versions: versions.get("web_latest"), + has_update=lambda versions: versions.get("web_update"), release_base_url="https://github.com/pi-hole/AdminLTE/releases/tag", ), PiHoleUpdateEntityDescription( @@ -52,6 +55,7 @@ UPDATE_ENTITY_TYPES: tuple[PiHoleUpdateEntityDescription, ...] = ( entity_category=EntityCategory.DIAGNOSTIC, installed_version=lambda versions: versions.get("FTL_current"), latest_version=lambda versions: versions.get("FTL_latest"), + has_update=lambda versions: versions.get("FTL_update"), release_base_url="https://github.com/pi-hole/FTL/releases/tag", ), ) @@ -110,7 +114,9 @@ class PiHoleUpdateEntity(PiHoleEntity, UpdateEntity): def latest_version(self) -> str | None: """Latest version available for install.""" if isinstance(self.api.versions, dict): - return self.entity_description.latest_version(self.api.versions) + if self.entity_description.has_update(self.api.versions): + return self.entity_description.latest_version(self.api.versions) + return self.installed_version return None @property diff --git a/tests/components/pi_hole/__init__.py b/tests/components/pi_hole/__init__.py index 38231778624..993f6a2571c 100644 --- a/tests/components/pi_hole/__init__.py +++ b/tests/components/pi_hole/__init__.py @@ -33,7 +33,7 @@ ZERO_DATA = { "unique_domains": 0, } -SAMPLE_VERSIONS = { +SAMPLE_VERSIONS_WITH_UPDATES = { "core_current": "v5.5", "core_latest": "v5.6", "core_update": True, @@ -45,6 +45,18 @@ SAMPLE_VERSIONS = { "FTL_update": True, } +SAMPLE_VERSIONS_NO_UPDATES = { + "core_current": "v5.5", + "core_latest": "v5.5", + "core_update": False, + "web_current": "v5.7", + "web_latest": "v5.7", + "web_update": False, + "FTL_current": "v5.10", + "FTL_latest": "v5.10", + "FTL_update": False, +} + HOST = "1.2.3.4" PORT = 80 LOCATION = "location" @@ -103,7 +115,9 @@ CONFIG_ENTRY_WITHOUT_API_KEY = { SWITCH_ENTITY_ID = "switch.pi_hole" -def _create_mocked_hole(raise_exception=False, has_versions=True, has_data=True): +def _create_mocked_hole( + raise_exception=False, has_versions=True, has_update=True, has_data=True +): mocked_hole = MagicMock() type(mocked_hole).get_data = AsyncMock( side_effect=HoleError("") if raise_exception else None @@ -118,7 +132,10 @@ def _create_mocked_hole(raise_exception=False, has_versions=True, has_data=True) else: mocked_hole.data = [] if has_versions: - mocked_hole.versions = SAMPLE_VERSIONS + if has_update: + mocked_hole.versions = SAMPLE_VERSIONS_WITH_UPDATES + else: + mocked_hole.versions = SAMPLE_VERSIONS_NO_UPDATES else: mocked_hole.versions = None return mocked_hole diff --git a/tests/components/pi_hole/test_config_flow.py b/tests/components/pi_hole/test_config_flow.py index 326b01b9a7a..d13712d6f76 100644 --- a/tests/components/pi_hole/test_config_flow.py +++ b/tests/components/pi_hole/test_config_flow.py @@ -96,7 +96,7 @@ async def test_flow_user_without_api_key(hass: HomeAssistant) -> None: async def test_flow_user_invalid(hass: HomeAssistant) -> None: """Test user initialized flow with invalid server.""" - mocked_hole = _create_mocked_hole(True) + mocked_hole = _create_mocked_hole(raise_exception=True) with _patch_config_flow_hole(mocked_hole): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER}, data=CONFIG_FLOW_USER diff --git a/tests/components/pi_hole/test_update.py b/tests/components/pi_hole/test_update.py index 091b553c475..705e9f9c08d 100644 --- a/tests/components/pi_hole/test_update.py +++ b/tests/components/pi_hole/test_update.py @@ -1,7 +1,7 @@ """Test pi_hole component.""" from homeassistant.components import pi_hole -from homeassistant.const import STATE_ON, STATE_UNKNOWN +from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNKNOWN from homeassistant.core import HomeAssistant from . import CONFIG_DATA_DEFAULTS, _create_mocked_hole, _patch_init_hole @@ -80,3 +80,44 @@ async def test_update_no_versions(hass: HomeAssistant) -> None: assert state.attributes["installed_version"] is None assert state.attributes["latest_version"] is None assert state.attributes["release_url"] is None + + +async def test_update_no_updates(hass: HomeAssistant) -> None: + """Tests update entity when no latest data available.""" + mocked_hole = _create_mocked_hole(has_versions=True, has_update=False) + entry = MockConfigEntry(domain=pi_hole.DOMAIN, data=CONFIG_DATA_DEFAULTS) + entry.add_to_hass(hass) + with _patch_init_hole(mocked_hole): + assert await hass.config_entries.async_setup(entry.entry_id) + + await hass.async_block_till_done() + + state = hass.states.get("update.pi_hole_core_update_available") + assert state.name == "Pi-Hole Core update available" + assert state.state == STATE_OFF + assert state.attributes["installed_version"] == "v5.5" + assert state.attributes["latest_version"] == "v5.5" + assert ( + state.attributes["release_url"] + == "https://github.com/pi-hole/pi-hole/releases/tag/v5.5" + ) + + state = hass.states.get("update.pi_hole_ftl_update_available") + assert state.name == "Pi-Hole FTL update available" + assert state.state == STATE_OFF + assert state.attributes["installed_version"] == "v5.10" + assert state.attributes["latest_version"] == "v5.10" + assert ( + state.attributes["release_url"] + == "https://github.com/pi-hole/FTL/releases/tag/v5.10" + ) + + state = hass.states.get("update.pi_hole_web_update_available") + assert state.name == "Pi-Hole Web update available" + assert state.state == STATE_OFF + assert state.attributes["installed_version"] == "v5.7" + assert state.attributes["latest_version"] == "v5.7" + assert ( + state.attributes["release_url"] + == "https://github.com/pi-hole/AdminLTE/releases/tag/v5.7" + ) From 9c4677a3c650997660be99a898546794dd040c9c Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 14 Aug 2024 21:22:06 +0200 Subject: [PATCH 0742/1635] Add comment clarifying recorder migration to schema version 16 (#123902) --- homeassistant/components/recorder/migration.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index ccdaf3082e0..cbe0dcd7258 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -1116,6 +1116,10 @@ class _SchemaVersion16Migrator(_SchemaVersionMigrator, target_version=16): SupportedDialect.MYSQL, SupportedDialect.POSTGRESQL, ): + # Version 16 changes settings for the foreign key constraint on + # states.old_state_id. Dropping the constraint is not really correct + # we should have recreated it instead. Recreating the constraint now + # happens in the migration to schema version 45. _drop_foreign_key_constraints( self.session_maker, self.engine, TABLE_STATES, "old_state_id" ) From 58851f0048cadbca502938aaa0f1bbba9997153a Mon Sep 17 00:00:00 2001 From: Andre Lengwenus Date: Wed, 14 Aug 2024 21:36:11 +0200 Subject: [PATCH 0743/1635] Bump pypck to 0.7.20 (#123948) --- homeassistant/components/lcn/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/lcn/manifest.json b/homeassistant/components/lcn/manifest.json index 6153ecf4540..44a4d683c81 100644 --- a/homeassistant/components/lcn/manifest.json +++ b/homeassistant/components/lcn/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/lcn", "iot_class": "local_push", "loggers": ["pypck"], - "requirements": ["pypck==0.7.17"] + "requirements": ["pypck==0.7.20"] } diff --git a/requirements_all.txt b/requirements_all.txt index c5fe1e2171a..6908004c2d1 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2097,7 +2097,7 @@ pyownet==0.10.0.post1 pypca==0.0.7 # homeassistant.components.lcn -pypck==0.7.17 +pypck==0.7.20 # homeassistant.components.pjlink pypjlink2==1.2.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index b84660fb0b0..189577764c1 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1678,7 +1678,7 @@ pyoverkiz==1.13.14 pyownet==0.10.0.post1 # homeassistant.components.lcn -pypck==0.7.17 +pypck==0.7.20 # homeassistant.components.pjlink pypjlink2==1.2.1 From aee1be1e643df46ea22bcb2ede8db25cde9e1a7e Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Wed, 14 Aug 2024 21:47:47 +0200 Subject: [PATCH 0744/1635] Use `elif` in alexa handlers code to avoid additional checks (#123853) --- homeassistant/components/alexa/handlers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/alexa/handlers.py b/homeassistant/components/alexa/handlers.py index 53bf6702138..3571f436ff6 100644 --- a/homeassistant/components/alexa/handlers.py +++ b/homeassistant/components/alexa/handlers.py @@ -1206,7 +1206,7 @@ async def async_api_set_mode( raise AlexaInvalidValueError(msg) # Remote Activity - if instance == f"{remote.DOMAIN}.{remote.ATTR_ACTIVITY}": + elif instance == f"{remote.DOMAIN}.{remote.ATTR_ACTIVITY}": activity = mode.split(".")[1] activities: list[str] | None = entity.attributes.get(remote.ATTR_ACTIVITY_LIST) if activity != PRESET_MODE_NA and activities and activity in activities: From 392f64d33e23c5087ac5f26a3fa0677ea338c069 Mon Sep 17 00:00:00 2001 From: ilan <31193909+iloveicedgreentea@users.noreply.github.com> Date: Wed, 14 Aug 2024 16:06:57 -0400 Subject: [PATCH 0745/1635] Fix Madvr sensor values on startup (#122479) * fix: add startup values * fix: update snap * fix: use native value to show None --- homeassistant/components/madvr/sensor.py | 13 ++++++++++++- tests/components/madvr/test_sensors.py | 13 +++++++++++++ 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/madvr/sensor.py b/homeassistant/components/madvr/sensor.py index 6f0933ac879..047b8bb83e6 100644 --- a/homeassistant/components/madvr/sensor.py +++ b/homeassistant/components/madvr/sensor.py @@ -277,4 +277,15 @@ class MadvrSensor(MadVREntity, SensorEntity): @property def native_value(self) -> float | str | None: """Return the state of the sensor.""" - return self.entity_description.value_fn(self.coordinator) + val = self.entity_description.value_fn(self.coordinator) + # check if sensor is enum + if self.entity_description.device_class == SensorDeviceClass.ENUM: + if ( + self.entity_description.options + and val in self.entity_description.options + ): + return val + # return None for values that are not in the options + return None + + return val diff --git a/tests/components/madvr/test_sensors.py b/tests/components/madvr/test_sensors.py index 25dcc1cdcca..ddc01fc737a 100644 --- a/tests/components/madvr/test_sensors.py +++ b/tests/components/madvr/test_sensors.py @@ -93,3 +93,16 @@ async def test_sensor_setup_and_states( # test get_temperature ValueError assert get_temperature(None, "temp_key") is None + + # test startup placeholder values + update_callback({"outgoing_bit_depth": "0bit"}) + await hass.async_block_till_done() + assert ( + hass.states.get("sensor.madvr_envy_outgoing_bit_depth").state == STATE_UNKNOWN + ) + + update_callback({"outgoing_color_space": "?"}) + await hass.async_block_till_done() + assert ( + hass.states.get("sensor.madvr_envy_outgoing_color_space").state == STATE_UNKNOWN + ) From e6ed3c8c5ccbecb31003845a596a968aa15898a7 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 14 Aug 2024 22:37:23 +0200 Subject: [PATCH 0746/1635] Raise on database error in recorder.migration function (#123644) * Raise on database error in recorder.migration._update_states_table_with_foreign_key_options * Improve test coverage * Fix test * Fix test --- .../components/recorder/migration.py | 1 + tests/components/recorder/test_migrate.py | 58 +++++++++++++++++++ 2 files changed, 59 insertions(+) diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index cbe0dcd7258..ebec536dc48 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -639,6 +639,7 @@ def _update_states_table_with_foreign_key_options( _LOGGER.exception( "Could not update foreign options in %s table", TABLE_STATES ) + raise def _drop_foreign_key_constraints( diff --git a/tests/components/recorder/test_migrate.py b/tests/components/recorder/test_migrate.py index 625a5023287..76387f56e9f 100644 --- a/tests/components/recorder/test_migrate.py +++ b/tests/components/recorder/test_migrate.py @@ -199,6 +199,64 @@ async def test_database_migration_failed( assert len(mock_dismiss.mock_calls) == expected_pn_dismiss +@pytest.mark.parametrize( + ( + "func_to_patch", + "expected_setup_result", + "expected_pn_create", + "expected_pn_dismiss", + ), + [ + ("DropConstraint", False, 1, 0), # This makes migration to step 11 fail + ], +) +@pytest.mark.skip_on_db_engine(["sqlite"]) +@pytest.mark.usefixtures("skip_by_db_engine") +async def test_database_migration_failed_step_11( + hass: HomeAssistant, + async_setup_recorder_instance: RecorderInstanceGenerator, + func_to_patch: str, + expected_setup_result: bool, + expected_pn_create: int, + expected_pn_dismiss: int, +) -> None: + """Test we notify if the migration fails.""" + assert recorder.util.async_migration_in_progress(hass) is False + + with ( + patch( + "homeassistant.components.recorder.core.create_engine", + new=create_engine_test, + ), + patch( + f"homeassistant.components.recorder.migration.{func_to_patch}", + side_effect=OperationalError( + None, None, OSError("No space left on device") + ), + ), + patch( + "homeassistant.components.persistent_notification.create", + side_effect=pn.create, + ) as mock_create, + patch( + "homeassistant.components.persistent_notification.dismiss", + side_effect=pn.dismiss, + ) as mock_dismiss, + ): + await async_setup_recorder_instance( + hass, wait_recorder=False, expected_setup_result=expected_setup_result + ) + hass.states.async_set("my.entity", "on", {}) + hass.states.async_set("my.entity", "off", {}) + await hass.async_block_till_done() + await hass.async_add_executor_job(recorder.get_instance(hass).join) + await hass.async_block_till_done() + + assert recorder.util.async_migration_in_progress(hass) is False + assert len(mock_create.mock_calls) == expected_pn_create + assert len(mock_dismiss.mock_calls) == expected_pn_dismiss + + @pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) @pytest.mark.usefixtures("skip_by_db_engine") async def test_live_database_migration_encounters_corruption( From e6b3d35cdf67d9d3410b336c0bce5728898b2754 Mon Sep 17 00:00:00 2001 From: Lars Date: Wed, 14 Aug 2024 22:53:29 +0200 Subject: [PATCH 0747/1635] Remove unnecessary check in fritz light (#123829) * Remove unnecessary check in fritz light * Revert remove SUPPORTED_COLOR_MODES --- homeassistant/components/fritzbox/light.py | 7 ++----- tests/components/fritzbox/__init__.py | 1 + 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/fritzbox/light.py b/homeassistant/components/fritzbox/light.py index 65446dc3e04..1009b0fb368 100644 --- a/homeassistant/components/fritzbox/light.py +++ b/homeassistant/components/fritzbox/light.py @@ -17,7 +17,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import FritzboxDataUpdateCoordinator, FritzBoxDeviceEntity -from .const import COLOR_MODE, COLOR_TEMP_MODE, LOGGER +from .const import COLOR_MODE, LOGGER from .coordinator import FritzboxConfigEntry SUPPORTED_COLOR_MODES = {ColorMode.COLOR_TEMP, ColorMode.HS} @@ -80,11 +80,8 @@ class FritzboxLight(FritzBoxDeviceEntity, LightEntity): return (hue, float(saturation) * 100.0 / 255.0) @property - def color_temp_kelvin(self) -> int | None: + def color_temp_kelvin(self) -> int: """Return the CT color value.""" - if self.data.color_mode != COLOR_TEMP_MODE: - return None - return self.data.color_temp # type: ignore [no-any-return] @property diff --git a/tests/components/fritzbox/__init__.py b/tests/components/fritzbox/__init__.py index 09e0aeaee51..bd68615212d 100644 --- a/tests/components/fritzbox/__init__.py +++ b/tests/components/fritzbox/__init__.py @@ -180,6 +180,7 @@ class FritzDeviceLightMock(FritzEntityBaseMock): level = 100 present = True state = True + color_temp = None class FritzDeviceCoverMock(FritzEntityBaseMock): From 667414a457360c9c21d14594a5e357eb2e8ed906 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 15 Aug 2024 08:25:18 +0200 Subject: [PATCH 0748/1635] Raise on database error in recorder.migration._drop_foreign_key_constraints (#123645) * Raise on database error in recorder.migration._drop_foreign_key_constraints * Fix test * Fix test * Revert "Fix test" This reverts commit 940b8cb506e912826d43d09d7697c10888bdf685. * Update test * Improve test coverage * Disable test for SQLite --- .../components/recorder/migration.py | 18 ++-- tests/components/recorder/conftest.py | 90 ++++++++++++++++--- tests/components/recorder/test_migrate.py | 78 +++++++++++++++- .../components/recorder/test_v32_migration.py | 75 ++++++++++------ 4 files changed, 209 insertions(+), 52 deletions(-) diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index ebec536dc48..c34d05b3ade 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -644,7 +644,7 @@ def _update_states_table_with_foreign_key_options( def _drop_foreign_key_constraints( session_maker: Callable[[], Session], engine: Engine, table: str, column: str -) -> tuple[bool, list[tuple[str, str, ReflectedForeignKeyConstraint]]]: +) -> list[tuple[str, str, ReflectedForeignKeyConstraint]]: """Drop foreign key constraints for a table on specific columns. This is not supported for SQLite because it does not support @@ -671,7 +671,6 @@ def _drop_foreign_key_constraints( if foreign_key["name"] and foreign_key["constrained_columns"] == [column] ] - fk_remove_ok = True for drop in drops: with session_scope(session=session_maker()) as session: try: @@ -683,9 +682,9 @@ def _drop_foreign_key_constraints( TABLE_STATES, column, ) - fk_remove_ok = False + raise - return fk_remove_ok, dropped_constraints + return dropped_constraints def _restore_foreign_key_constraints( @@ -2183,9 +2182,14 @@ def cleanup_legacy_states_event_ids(instance: Recorder) -> bool: # so we have to rebuild the table fk_remove_ok = rebuild_sqlite_table(session_maker, instance.engine, States) else: - fk_remove_ok, _ = _drop_foreign_key_constraints( - session_maker, instance.engine, TABLE_STATES, "event_id" - ) + try: + _drop_foreign_key_constraints( + session_maker, instance.engine, TABLE_STATES, "event_id" + ) + except (InternalError, OperationalError): + fk_remove_ok = False + else: + fk_remove_ok = True if fk_remove_ok: _drop_index(session_maker, "states", LEGACY_STATES_EVENT_ID_INDEX) instance.use_legacy_events_index = False diff --git a/tests/components/recorder/conftest.py b/tests/components/recorder/conftest.py index f562ba163ba..d66cf4fe2ec 100644 --- a/tests/components/recorder/conftest.py +++ b/tests/components/recorder/conftest.py @@ -1,12 +1,15 @@ """Fixtures for the recorder component tests.""" -from collections.abc import AsyncGenerator, Generator +from collections.abc import Callable, Generator +from contextlib import contextmanager from dataclasses import dataclass from functools import partial import threading from unittest.mock import Mock, patch import pytest +from sqlalchemy.engine import Engine +from sqlalchemy.orm.session import Session from homeassistant.components import recorder from homeassistant.components.recorder import db_schema @@ -57,31 +60,69 @@ def recorder_dialect_name(hass: HomeAssistant, db_engine: str) -> Generator[None class InstrumentedMigration: """Container to aid controlling migration progress.""" - migration_done: threading.Event + live_migration_done: threading.Event + live_migration_done_stall: threading.Event migration_stall: threading.Event migration_started: threading.Event migration_version: int | None + non_live_migration_done: threading.Event + non_live_migration_done_stall: threading.Event apply_update_mock: Mock + stall_on_schema_version: int + apply_update_stalled: threading.Event -@pytest.fixture -async def instrument_migration( +@pytest.fixture(name="instrument_migration") +def instrument_migration_fixture( hass: HomeAssistant, -) -> AsyncGenerator[InstrumentedMigration]: +) -> Generator[InstrumentedMigration]: + """Instrument recorder migration.""" + with instrument_migration(hass) as instrumented_migration: + yield instrumented_migration + + +@contextmanager +def instrument_migration( + hass: HomeAssistant, +) -> Generator[InstrumentedMigration]: """Instrument recorder migration.""" real_migrate_schema_live = recorder.migration.migrate_schema_live real_migrate_schema_non_live = recorder.migration.migrate_schema_non_live real_apply_update = recorder.migration._apply_update - def _instrument_migrate_schema(real_func, *args): + def _instrument_migrate_schema_live(real_func, *args): + """Control migration progress and check results.""" + return _instrument_migrate_schema( + real_func, + args, + instrumented_migration.live_migration_done, + instrumented_migration.live_migration_done_stall, + ) + + def _instrument_migrate_schema_non_live(real_func, *args): + """Control migration progress and check results.""" + return _instrument_migrate_schema( + real_func, + args, + instrumented_migration.non_live_migration_done, + instrumented_migration.non_live_migration_done_stall, + ) + + def _instrument_migrate_schema( + real_func, + args, + migration_done: threading.Event, + migration_done_stall: threading.Event, + ): """Control migration progress and check results.""" instrumented_migration.migration_started.set() try: migration_result = real_func(*args) except Exception: - instrumented_migration.migration_done.set() + migration_done.set() + migration_done_stall.wait() raise # Check and report the outcome of the migration; if migration fails @@ -93,22 +134,36 @@ async def instrument_migration( .first() ) instrumented_migration.migration_version = res.schema_version - instrumented_migration.migration_done.set() + migration_done.set() + migration_done_stall.wait() return migration_result - def _instrument_apply_update(*args): + def _instrument_apply_update( + instance: recorder.Recorder, + hass: HomeAssistant, + engine: Engine, + session_maker: Callable[[], Session], + new_version: int, + old_version: int, + ): """Control migration progress.""" - instrumented_migration.migration_stall.wait() - real_apply_update(*args) + if new_version == instrumented_migration.stall_on_schema_version: + instrumented_migration.apply_update_stalled.set() + instrumented_migration.migration_stall.wait() + real_apply_update( + instance, hass, engine, session_maker, new_version, old_version + ) with ( patch( "homeassistant.components.recorder.migration.migrate_schema_live", - wraps=partial(_instrument_migrate_schema, real_migrate_schema_live), + wraps=partial(_instrument_migrate_schema_live, real_migrate_schema_live), ), patch( "homeassistant.components.recorder.migration.migrate_schema_non_live", - wraps=partial(_instrument_migrate_schema, real_migrate_schema_non_live), + wraps=partial( + _instrument_migrate_schema_non_live, real_migrate_schema_non_live + ), ), patch( "homeassistant.components.recorder.migration._apply_update", @@ -116,11 +171,18 @@ async def instrument_migration( ) as apply_update_mock, ): instrumented_migration = InstrumentedMigration( - migration_done=threading.Event(), + live_migration_done=threading.Event(), + live_migration_done_stall=threading.Event(), migration_stall=threading.Event(), migration_started=threading.Event(), migration_version=None, + non_live_migration_done=threading.Event(), + non_live_migration_done_stall=threading.Event(), apply_update_mock=apply_update_mock, + stall_on_schema_version=1, + apply_update_stalled=threading.Event(), ) + instrumented_migration.live_migration_done_stall.set() + instrumented_migration.non_live_migration_done_stall.set() yield instrumented_migration diff --git a/tests/components/recorder/test_migrate.py b/tests/components/recorder/test_migrate.py index 76387f56e9f..eadcd2bd9ad 100644 --- a/tests/components/recorder/test_migrate.py +++ b/tests/components/recorder/test_migrate.py @@ -257,6 +257,76 @@ async def test_database_migration_failed_step_11( assert len(mock_dismiss.mock_calls) == expected_pn_dismiss +@pytest.mark.parametrize( + ( + "func_to_patch", + "expected_setup_result", + "expected_pn_create", + "expected_pn_dismiss", + ), + [ + ("DropConstraint", False, 2, 1), # This makes migration to step 44 fail + ], +) +@pytest.mark.skip_on_db_engine(["sqlite"]) +@pytest.mark.usefixtures("skip_by_db_engine") +async def test_database_migration_failed_step_44( + hass: HomeAssistant, + async_setup_recorder_instance: RecorderInstanceGenerator, + instrument_migration: InstrumentedMigration, + func_to_patch: str, + expected_setup_result: bool, + expected_pn_create: int, + expected_pn_dismiss: int, +) -> None: + """Test we notify if the migration fails.""" + assert recorder.util.async_migration_in_progress(hass) is False + instrument_migration.stall_on_schema_version = 44 + + with ( + patch( + "homeassistant.components.recorder.core.create_engine", + new=create_engine_test, + ), + patch( + "homeassistant.components.persistent_notification.create", + side_effect=pn.create, + ) as mock_create, + patch( + "homeassistant.components.persistent_notification.dismiss", + side_effect=pn.dismiss, + ) as mock_dismiss, + ): + await async_setup_recorder_instance( + hass, + wait_recorder=False, + wait_recorder_setup=False, + expected_setup_result=expected_setup_result, + ) + # Wait for migration to reach schema version 44 + await hass.async_add_executor_job( + instrument_migration.apply_update_stalled.wait + ) + + # Make it fail + with patch( + f"homeassistant.components.recorder.migration.{func_to_patch}", + side_effect=OperationalError( + None, None, OSError("No space left on device") + ), + ): + instrument_migration.migration_stall.set() + hass.states.async_set("my.entity", "on", {}) + hass.states.async_set("my.entity", "off", {}) + await hass.async_block_till_done() + await hass.async_add_executor_job(recorder.get_instance(hass).join) + await hass.async_block_till_done() + + assert recorder.util.async_migration_in_progress(hass) is False + assert len(mock_create.mock_calls) == expected_pn_create + assert len(mock_dismiss.mock_calls) == expected_pn_dismiss + + @pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) @pytest.mark.usefixtures("skip_by_db_engine") async def test_live_database_migration_encounters_corruption( @@ -611,7 +681,7 @@ async def test_schema_migrate( assert recorder.util.async_migration_is_live(hass) == live instrument_migration.migration_stall.set() await hass.async_block_till_done() - await hass.async_add_executor_job(instrument_migration.migration_done.wait) + await hass.async_add_executor_job(instrument_migration.live_migration_done.wait) await async_wait_recording_done(hass) assert instrument_migration.migration_version == db_schema.SCHEMA_VERSION assert setup_run.called @@ -963,7 +1033,7 @@ def test_drop_restore_foreign_key_constraints(recorder_db_url: str) -> None: for table, column, _, _ in constraints_to_recreate for dropped_constraint in migration._drop_foreign_key_constraints( session_maker, engine, table, column - )[1] + ) ] assert dropped_constraints_1 == expected_dropped_constraints[db_engine] @@ -975,7 +1045,7 @@ def test_drop_restore_foreign_key_constraints(recorder_db_url: str) -> None: for table, column, _, _ in constraints_to_recreate for dropped_constraint in migration._drop_foreign_key_constraints( session_maker, engine, table, column - )[1] + ) ] assert dropped_constraints_2 == [] @@ -994,7 +1064,7 @@ def test_drop_restore_foreign_key_constraints(recorder_db_url: str) -> None: for table, column, _, _ in constraints_to_recreate for dropped_constraint in migration._drop_foreign_key_constraints( session_maker, engine, table, column - )[1] + ) ] assert dropped_constraints_3 == expected_dropped_constraints[db_engine] diff --git a/tests/components/recorder/test_v32_migration.py b/tests/components/recorder/test_v32_migration.py index 5266e55851c..c9ba330b758 100644 --- a/tests/components/recorder/test_v32_migration.py +++ b/tests/components/recorder/test_v32_migration.py @@ -19,6 +19,7 @@ from homeassistant.core import Event, EventOrigin, State import homeassistant.util.dt as dt_util from .common import async_wait_recording_done +from .conftest import instrument_migration from tests.common import async_test_home_assistant from tests.typing import RecorderInstanceGenerator @@ -637,6 +638,10 @@ async def test_out_of_disk_space_while_removing_foreign_key( This case tests the migration still happens if ix_states_event_id is removed from the states table. + + Note that the test is somewhat forced; the states.event_id foreign key constraint is + removed when migrating to schema version 44, inspecting the schema in + cleanup_legacy_states_event_ids is not likely to fail. """ importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -737,36 +742,52 @@ async def test_out_of_disk_space_while_removing_foreign_key( assert "ix_states_entity_id_last_updated_ts" in states_index_names - # Simulate out of disk space while removing the foreign key from the states table by - # - patching DropConstraint to raise InternalError for MySQL and PostgreSQL - with ( - patch( - "homeassistant.components.recorder.migration.DropConstraint", - side_effect=OperationalError( - None, None, OSError("No space left on device") - ), - ), - ): - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass) as instance, - ): - await hass.async_block_till_done() + async with async_test_home_assistant() as hass: + with instrument_migration(hass) as instrumented_migration: + # Allow migration to start, but stall when live migration is completed + instrumented_migration.migration_stall.set() + instrumented_migration.live_migration_done_stall.clear() - # We need to wait for all the migration tasks to complete - # before we can check the database. - for _ in range(number_of_migrations): - await instance.async_block_till_done() - await async_wait_recording_done(hass) + async with async_test_recorder(hass, wait_recorder=False) as instance: + await hass.async_block_till_done() - states_indexes = await instance.async_add_executor_job( - _get_states_index_names - ) - states_index_names = {index["name"] for index in states_indexes} - assert instance.use_legacy_events_index is True - assert await instance.async_add_executor_job(_get_event_id_foreign_keys) + # Wait for live migration to complete + await hass.async_add_executor_job( + instrumented_migration.live_migration_done.wait + ) - await hass.async_stop() + # Simulate out of disk space while removing the foreign key from the states table by + # - patching DropConstraint to raise InternalError for MySQL and PostgreSQL + with ( + patch( + "homeassistant.components.recorder.migration.sqlalchemy.inspect", + side_effect=OperationalError( + None, None, OSError("No space left on device") + ), + ), + ): + instrumented_migration.live_migration_done_stall.set() + # We need to wait for all the migration tasks to complete + # before we can check the database. + for _ in range(number_of_migrations): + await instance.async_block_till_done() + await async_wait_recording_done(hass) + + states_indexes = await instance.async_add_executor_job( + _get_states_index_names + ) + states_index_names = {index["name"] for index in states_indexes} + assert instance.use_legacy_events_index is True + # The states.event_id foreign key constraint was removed when + # migration to schema version 44 + assert ( + await instance.async_add_executor_job( + _get_event_id_foreign_keys + ) + is None + ) + + await hass.async_stop() # Now run it again to verify the table rebuild tries again caplog.clear() From 9911aa4ede2e81bf9df5ff924518555bc8c625a8 Mon Sep 17 00:00:00 2001 From: Thomas55555 <59625598+Thomas55555@users.noreply.github.com> Date: Thu, 15 Aug 2024 08:29:06 +0200 Subject: [PATCH 0749/1635] Enable confirm error button in Husqvarna Automower by default (#123927) --- homeassistant/components/husqvarna_automower/button.py | 5 +++-- tests/components/husqvarna_automower/test_button.py | 1 - 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/husqvarna_automower/button.py b/homeassistant/components/husqvarna_automower/button.py index a9747108393..810dd4df92d 100644 --- a/homeassistant/components/husqvarna_automower/button.py +++ b/homeassistant/components/husqvarna_automower/button.py @@ -25,7 +25,9 @@ async def async_setup_entry( """Set up button platform.""" coordinator = entry.runtime_data async_add_entities( - AutomowerButtonEntity(mower_id, coordinator) for mower_id in coordinator.data + AutomowerButtonEntity(mower_id, coordinator) + for mower_id in coordinator.data + if coordinator.data[mower_id].capabilities.can_confirm_error ) @@ -33,7 +35,6 @@ class AutomowerButtonEntity(AutomowerAvailableEntity, ButtonEntity): """Defining the AutomowerButtonEntity.""" _attr_translation_key = "confirm_error" - _attr_entity_registry_enabled_default = False def __init__( self, diff --git a/tests/components/husqvarna_automower/test_button.py b/tests/components/husqvarna_automower/test_button.py index 6cc465df74b..5cbb9b893a8 100644 --- a/tests/components/husqvarna_automower/test_button.py +++ b/tests/components/husqvarna_automower/test_button.py @@ -34,7 +34,6 @@ from tests.common import ( @pytest.mark.freeze_time(datetime.datetime(2024, 2, 29, 11, tzinfo=datetime.UTC)) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_button_states_and_commands( hass: HomeAssistant, mock_automower_client: AsyncMock, From b042ebe4ff577a53ebc24611f34fd1ed45eebf7e Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Thu, 15 Aug 2024 08:37:10 +0200 Subject: [PATCH 0750/1635] Rename KNX Climate preset modes according to specification (#123964) * Rename KNX Climate preset modes according to specification * change icon for "standby" --- homeassistant/components/knx/climate.py | 45 ++++++++--------------- homeassistant/components/knx/const.py | 21 +---------- homeassistant/components/knx/icons.json | 14 +++++++ homeassistant/components/knx/strings.json | 16 ++++++++ tests/components/knx/test_climate.py | 17 ++++----- 5 files changed, 54 insertions(+), 59 deletions(-) diff --git a/homeassistant/components/knx/climate.py b/homeassistant/components/knx/climate.py index abce143c760..4932df55087 100644 --- a/homeassistant/components/knx/climate.py +++ b/homeassistant/components/knx/climate.py @@ -10,11 +10,10 @@ from xknx.devices import ( ClimateMode as XknxClimateMode, Device as XknxDevice, ) -from xknx.dpt.dpt_20 import HVACControllerMode +from xknx.dpt.dpt_20 import HVACControllerMode, HVACOperationMode from homeassistant import config_entries from homeassistant.components.climate import ( - PRESET_AWAY, ClimateEntity, ClimateEntityFeature, HVACAction, @@ -32,19 +31,12 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType from . import KNXModule -from .const import ( - CONTROLLER_MODES, - CURRENT_HVAC_ACTIONS, - DATA_KNX_CONFIG, - DOMAIN, - PRESET_MODES, -) +from .const import CONTROLLER_MODES, CURRENT_HVAC_ACTIONS, DATA_KNX_CONFIG, DOMAIN from .knx_entity import KnxYamlEntity from .schema import ClimateSchema ATTR_COMMAND_VALUE = "command_value" CONTROLLER_MODES_INV = {value: key for key, value in CONTROLLER_MODES.items()} -PRESET_MODES_INV = {value: key for key, value in PRESET_MODES.items()} async def async_setup_entry( @@ -142,6 +134,7 @@ class KNXClimate(KnxYamlEntity, ClimateEntity): _device: XknxClimate _attr_temperature_unit = UnitOfTemperature.CELSIUS + _attr_translation_key = "knx_climate" _enable_turn_on_off_backwards_compatibility = False def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: @@ -165,8 +158,14 @@ class KNXClimate(KnxYamlEntity, ClimateEntity): ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON ) - if self.preset_modes: + if ( + self._device.mode is not None + and self._device.mode.operation_modes # empty list when not writable + ): self._attr_supported_features |= ClimateEntityFeature.PRESET_MODE + self._attr_preset_modes = [ + mode.name.lower() for mode in self._device.mode.operation_modes + ] self._attr_target_temperature_step = self._device.temperature_step self._attr_unique_id = ( f"{self._device.temperature.group_address_state}_" @@ -309,32 +308,18 @@ class KNXClimate(KnxYamlEntity, ClimateEntity): Requires ClimateEntityFeature.PRESET_MODE. """ if self._device.mode is not None and self._device.mode.supports_operation_mode: - return PRESET_MODES.get(self._device.mode.operation_mode, PRESET_AWAY) + return self._device.mode.operation_mode.name.lower() return None - @property - def preset_modes(self) -> list[str] | None: - """Return a list of available preset modes. - - Requires ClimateEntityFeature.PRESET_MODE. - """ - if self._device.mode is None: - return None - - presets = [ - PRESET_MODES.get(operation_mode) - for operation_mode in self._device.mode.operation_modes - ] - return list(filter(None, presets)) - async def async_set_preset_mode(self, preset_mode: str) -> None: """Set new preset mode.""" if ( self._device.mode is not None - and self._device.mode.supports_operation_mode - and (knx_operation_mode := PRESET_MODES_INV.get(preset_mode)) is not None + and self._device.mode.operation_modes # empty list when not writable ): - await self._device.mode.set_operation_mode(knx_operation_mode) + await self._device.mode.set_operation_mode( + HVACOperationMode[preset_mode.upper()] + ) self.async_write_ha_state() @property diff --git a/homeassistant/components/knx/const.py b/homeassistant/components/knx/const.py index 6400f0fe466..9ceb18385cb 100644 --- a/homeassistant/components/knx/const.py +++ b/homeassistant/components/knx/const.py @@ -6,18 +6,10 @@ from collections.abc import Awaitable, Callable from enum import Enum from typing import Final, TypedDict -from xknx.dpt.dpt_20 import HVACControllerMode, HVACOperationMode +from xknx.dpt.dpt_20 import HVACControllerMode from xknx.telegram import Telegram -from homeassistant.components.climate import ( - PRESET_AWAY, - PRESET_COMFORT, - PRESET_ECO, - PRESET_NONE, - PRESET_SLEEP, - HVACAction, - HVACMode, -) +from homeassistant.components.climate import HVACAction, HVACMode from homeassistant.const import Platform DOMAIN: Final = "knx" @@ -174,12 +166,3 @@ CURRENT_HVAC_ACTIONS: Final = { HVACMode.FAN_ONLY: HVACAction.FAN, HVACMode.DRY: HVACAction.DRYING, } - -PRESET_MODES: Final = { - # Map DPT 20.102 HVAC operating modes to HA presets - HVACOperationMode.AUTO: PRESET_NONE, - HVACOperationMode.BUILDING_PROTECTION: PRESET_ECO, - HVACOperationMode.ECONOMY: PRESET_SLEEP, - HVACOperationMode.STANDBY: PRESET_AWAY, - HVACOperationMode.COMFORT: PRESET_COMFORT, -} diff --git a/homeassistant/components/knx/icons.json b/homeassistant/components/knx/icons.json index 736923375ee..2aee34219f6 100644 --- a/homeassistant/components/knx/icons.json +++ b/homeassistant/components/knx/icons.json @@ -1,5 +1,19 @@ { "entity": { + "climate": { + "knx_climate": { + "state_attributes": { + "preset_mode": { + "state": { + "comfort": "mdi:sofa", + "standby": "mdi:home-export-outline", + "economy": "mdi:leaf", + "building_protection": "mdi:sun-snowflake-variant" + } + } + } + } + }, "sensor": { "individual_address": { "default": "mdi:router-network" diff --git a/homeassistant/components/knx/strings.json b/homeassistant/components/knx/strings.json index d6e1e2f49f0..8d8692f6b7a 100644 --- a/homeassistant/components/knx/strings.json +++ b/homeassistant/components/knx/strings.json @@ -267,6 +267,22 @@ } }, "entity": { + "climate": { + "knx_climate": { + "state_attributes": { + "preset_mode": { + "name": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::name%]", + "state": { + "auto": "Auto", + "comfort": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::state::comfort%]", + "standby": "Standby", + "economy": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::state::eco%]", + "building_protection": "Building protection" + } + } + } + } + }, "sensor": { "individual_address": { "name": "[%key:component::knx::config::step::routing::data::individual_address%]" diff --git a/tests/components/knx/test_climate.py b/tests/components/knx/test_climate.py index 9f198b48bd4..ec0498dc447 100644 --- a/tests/components/knx/test_climate.py +++ b/tests/components/knx/test_climate.py @@ -2,7 +2,7 @@ import pytest -from homeassistant.components.climate import PRESET_ECO, PRESET_SLEEP, HVACMode +from homeassistant.components.climate import HVACMode from homeassistant.components.knx.schema import ClimateSchema from homeassistant.const import CONF_NAME, STATE_IDLE from homeassistant.core import HomeAssistant @@ -331,7 +331,6 @@ async def test_climate_preset_mode( } } ) - events = async_capture_events(hass, "state_changed") # StateUpdater initialize state # StateUpdater semaphore allows 2 concurrent requests @@ -340,30 +339,28 @@ async def test_climate_preset_mode( await knx.receive_response("1/2/3", RAW_FLOAT_21_0) await knx.receive_response("1/2/5", RAW_FLOAT_22_0) await knx.assert_read("1/2/7") - await knx.receive_response("1/2/7", (0x01,)) - events.clear() + await knx.receive_response("1/2/7", (0x01,)) # comfort + knx.assert_state("climate.test", HVACMode.HEAT, preset_mode="comfort") # set preset mode await hass.services.async_call( "climate", "set_preset_mode", - {"entity_id": "climate.test", "preset_mode": PRESET_ECO}, + {"entity_id": "climate.test", "preset_mode": "building_protection"}, blocking=True, ) await knx.assert_write("1/2/6", (0x04,)) - assert len(events) == 1 - events.pop() + knx.assert_state("climate.test", HVACMode.HEAT, preset_mode="building_protection") # set preset mode await hass.services.async_call( "climate", "set_preset_mode", - {"entity_id": "climate.test", "preset_mode": PRESET_SLEEP}, + {"entity_id": "climate.test", "preset_mode": "economy"}, blocking=True, ) await knx.assert_write("1/2/6", (0x03,)) - assert len(events) == 1 - events.pop() + knx.assert_state("climate.test", HVACMode.HEAT, preset_mode="economy") assert len(knx.xknx.devices) == 2 assert len(knx.xknx.devices[0].device_updated_cbs) == 2 From 1f684330e06afeb9b213fa28978fb6f8a4aa0869 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 15 Aug 2024 09:18:20 +0200 Subject: [PATCH 0751/1635] Bump github/codeql-action from 3.26.0 to 3.26.2 (#123966) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/codeql.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 68673455e1f..45c2b31d772 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -24,11 +24,11 @@ jobs: uses: actions/checkout@v4.1.7 - name: Initialize CodeQL - uses: github/codeql-action/init@v3.26.0 + uses: github/codeql-action/init@v3.26.2 with: languages: python - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3.26.0 + uses: github/codeql-action/analyze@v3.26.2 with: category: "/language:python" From ac30efb5ac5012ee7ba9f336239cbe4c73810b3c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 15 Aug 2024 09:31:42 +0200 Subject: [PATCH 0752/1635] Bump home-assistant/builder from 2024.03.5 to 2024.08.1 (#123967) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/builder.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index 522095ad041..7f3c0b0e66e 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -197,7 +197,7 @@ jobs: password: ${{ secrets.GITHUB_TOKEN }} - name: Build base image - uses: home-assistant/builder@2024.03.5 + uses: home-assistant/builder@2024.08.1 with: args: | $BUILD_ARGS \ @@ -263,7 +263,7 @@ jobs: password: ${{ secrets.GITHUB_TOKEN }} - name: Build base image - uses: home-assistant/builder@2024.03.5 + uses: home-assistant/builder@2024.08.1 with: args: | $BUILD_ARGS \ From dde1ecbf5b03fb945851e4e265606e7bf4b7a637 Mon Sep 17 00:00:00 2001 From: Brett Adams Date: Thu, 15 Aug 2024 17:59:08 +1000 Subject: [PATCH 0753/1635] Improve code quality of Tesla Fleet tests (#123959) --- tests/components/tesla_fleet/conftest.py | 4 +-- .../tesla_fleet/test_binary_sensors.py | 8 +++-- .../tesla_fleet/test_config_flow.py | 8 ++--- .../tesla_fleet/test_device_tracker.py | 6 ++-- tests/components/tesla_fleet/test_init.py | 30 +++++++++---------- tests/components/tesla_fleet/test_sensor.py | 6 ++-- 6 files changed, 34 insertions(+), 28 deletions(-) diff --git a/tests/components/tesla_fleet/conftest.py b/tests/components/tesla_fleet/conftest.py index 7d60ae5e174..071fd7b02f1 100644 --- a/tests/components/tesla_fleet/conftest.py +++ b/tests/components/tesla_fleet/conftest.py @@ -106,7 +106,7 @@ def mock_wake_up() -> Generator[AsyncMock]: @pytest.fixture(autouse=True) def mock_live_status() -> Generator[AsyncMock]: - """Mock Teslemetry Energy Specific live_status method.""" + """Mock Tesla Fleet API Energy Specific live_status method.""" with patch( "homeassistant.components.tesla_fleet.EnergySpecific.live_status", side_effect=lambda: deepcopy(LIVE_STATUS), @@ -116,7 +116,7 @@ def mock_live_status() -> Generator[AsyncMock]: @pytest.fixture(autouse=True) def mock_site_info() -> Generator[AsyncMock]: - """Mock Teslemetry Energy Specific site_info method.""" + """Mock Tesla Fleet API Energy Specific site_info method.""" with patch( "homeassistant.components.tesla_fleet.EnergySpecific.site_info", side_effect=lambda: deepcopy(SITE_INFO), diff --git a/tests/components/tesla_fleet/test_binary_sensors.py b/tests/components/tesla_fleet/test_binary_sensors.py index ffbaac5e6d8..a759e5ced70 100644 --- a/tests/components/tesla_fleet/test_binary_sensors.py +++ b/tests/components/tesla_fleet/test_binary_sensors.py @@ -1,8 +1,10 @@ """Test the Tesla Fleet binary sensor platform.""" +from unittest.mock import AsyncMock + from freezegun.api import FrozenDateTimeFactory import pytest -from syrupy import SnapshotAssertion +from syrupy.assertion import SnapshotAssertion from tesla_fleet_api.exceptions import VehicleOffline from homeassistant.components.tesla_fleet.coordinator import VEHICLE_INTERVAL @@ -34,7 +36,7 @@ async def test_binary_sensor_refresh( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, - mock_vehicle_data, + mock_vehicle_data: AsyncMock, freezer: FrozenDateTimeFactory, normal_config_entry: MockConfigEntry, ) -> None: @@ -53,7 +55,7 @@ async def test_binary_sensor_refresh( async def test_binary_sensor_offline( hass: HomeAssistant, - mock_vehicle_data, + mock_vehicle_data: AsyncMock, normal_config_entry: MockConfigEntry, ) -> None: """Tests that the binary sensor entities are correct when offline.""" diff --git a/tests/components/tesla_fleet/test_config_flow.py b/tests/components/tesla_fleet/test_config_flow.py index 45dbe6ca598..81ba92f1e9c 100644 --- a/tests/components/tesla_fleet/test_config_flow.py +++ b/tests/components/tesla_fleet/test_config_flow.py @@ -57,7 +57,7 @@ async def test_full_flow( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, - access_token, + access_token: str, ) -> None: """Check full flow.""" result = await hass.config_entries.flow.async_init( @@ -121,7 +121,7 @@ async def test_full_flow_user_cred( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, - access_token, + access_token: str, ) -> None: """Check full flow.""" @@ -200,7 +200,7 @@ async def test_reauthentication( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, - access_token, + access_token: str, ) -> None: """Test Tesla Fleet reauthentication.""" old_entry = MockConfigEntry( @@ -261,7 +261,7 @@ async def test_reauth_account_mismatch( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, - access_token, + access_token: str, ) -> None: """Test Tesla Fleet reauthentication with different account.""" old_entry = MockConfigEntry(domain=DOMAIN, unique_id="baduid", version=1, data={}) diff --git a/tests/components/tesla_fleet/test_device_tracker.py b/tests/components/tesla_fleet/test_device_tracker.py index 66a0c06de7f..e6f483d7953 100644 --- a/tests/components/tesla_fleet/test_device_tracker.py +++ b/tests/components/tesla_fleet/test_device_tracker.py @@ -1,6 +1,8 @@ """Test the Tesla Fleet device tracker platform.""" -from syrupy import SnapshotAssertion +from unittest.mock import AsyncMock + +from syrupy.assertion import SnapshotAssertion from tesla_fleet_api.exceptions import VehicleOffline from homeassistant.const import STATE_UNKNOWN, Platform @@ -26,7 +28,7 @@ async def test_device_tracker( async def test_device_tracker_offline( hass: HomeAssistant, - mock_vehicle_data, + mock_vehicle_data: AsyncMock, normal_config_entry: MockConfigEntry, ) -> None: """Tests that the device tracker entities are correct when offline.""" diff --git a/tests/components/tesla_fleet/test_init.py b/tests/components/tesla_fleet/test_init.py index 20bb6c66906..a22d91ee531 100644 --- a/tests/components/tesla_fleet/test_init.py +++ b/tests/components/tesla_fleet/test_init.py @@ -4,7 +4,7 @@ from unittest.mock import AsyncMock from freezegun.api import FrozenDateTimeFactory import pytest -from syrupy import SnapshotAssertion +from syrupy.assertion import SnapshotAssertion from tesla_fleet_api.exceptions import ( InvalidToken, LoginRequired, @@ -59,9 +59,9 @@ async def test_load_unload( async def test_init_error( hass: HomeAssistant, normal_config_entry: MockConfigEntry, - mock_products, - side_effect, - state, + mock_products: AsyncMock, + side_effect: TeslaFleetError, + state: ConfigEntryState, ) -> None: """Test init with errors.""" @@ -91,8 +91,8 @@ async def test_devices( async def test_vehicle_refresh_offline( hass: HomeAssistant, normal_config_entry: MockConfigEntry, - mock_vehicle_state, - mock_vehicle_data, + mock_vehicle_state: AsyncMock, + mock_vehicle_data: AsyncMock, freezer: FrozenDateTimeFactory, ) -> None: """Test coordinator refresh with an error.""" @@ -148,7 +148,7 @@ async def test_vehicle_refresh_error( async def test_vehicle_refresh_ratelimited( hass: HomeAssistant, normal_config_entry: MockConfigEntry, - mock_vehicle_data, + mock_vehicle_data: AsyncMock, freezer: FrozenDateTimeFactory, ) -> None: """Test coordinator refresh handles 429.""" @@ -179,7 +179,7 @@ async def test_vehicle_refresh_ratelimited( async def test_vehicle_sleep( hass: HomeAssistant, normal_config_entry: MockConfigEntry, - mock_vehicle_data, + mock_vehicle_data: AsyncMock, freezer: FrozenDateTimeFactory, ) -> None: """Test coordinator refresh with an error.""" @@ -241,9 +241,9 @@ async def test_vehicle_sleep( async def test_energy_live_refresh_error( hass: HomeAssistant, normal_config_entry: MockConfigEntry, - mock_live_status, - side_effect, - state, + mock_live_status: AsyncMock, + side_effect: TeslaFleetError, + state: ConfigEntryState, ) -> None: """Test coordinator refresh with an error.""" mock_live_status.side_effect = side_effect @@ -256,9 +256,9 @@ async def test_energy_live_refresh_error( async def test_energy_site_refresh_error( hass: HomeAssistant, normal_config_entry: MockConfigEntry, - mock_site_info, - side_effect, - state, + mock_site_info: AsyncMock, + side_effect: TeslaFleetError, + state: ConfigEntryState, ) -> None: """Test coordinator refresh with an error.""" mock_site_info.side_effect = side_effect @@ -300,7 +300,7 @@ async def test_energy_live_refresh_ratelimited( async def test_energy_info_refresh_ratelimited( hass: HomeAssistant, normal_config_entry: MockConfigEntry, - mock_site_info, + mock_site_info: AsyncMock, freezer: FrozenDateTimeFactory, ) -> None: """Test coordinator refresh handles 429.""" diff --git a/tests/components/tesla_fleet/test_sensor.py b/tests/components/tesla_fleet/test_sensor.py index 2133194e2a0..377179ca26a 100644 --- a/tests/components/tesla_fleet/test_sensor.py +++ b/tests/components/tesla_fleet/test_sensor.py @@ -1,8 +1,10 @@ """Test the Tesla Fleet sensor platform.""" +from unittest.mock import AsyncMock + from freezegun.api import FrozenDateTimeFactory import pytest -from syrupy import SnapshotAssertion +from syrupy.assertion import SnapshotAssertion from homeassistant.components.tesla_fleet.coordinator import VEHICLE_INTERVAL from homeassistant.const import Platform @@ -22,7 +24,7 @@ async def test_sensors( normal_config_entry: MockConfigEntry, entity_registry: er.EntityRegistry, freezer: FrozenDateTimeFactory, - mock_vehicle_data, + mock_vehicle_data: AsyncMock, ) -> None: """Tests that the sensor entities are correct.""" From 2c3d97d3733389c36a96f99843dd3aa33cf61cbe Mon Sep 17 00:00:00 2001 From: "Phill (pssc)" Date: Thu, 15 Aug 2024 09:03:03 +0100 Subject: [PATCH 0754/1635] Handle Yamaha ValueError (#123547) * fix yamaha remove info logging * ruff * fix yamnaha supress rxv.find UnicodeDecodeError * fix formatting * make more realistic * make more realistic and use parms * add value error after more feedback * ruff format * Update homeassistant/components/yamaha/media_player.py Co-authored-by: Martin Hjelmare * remove unused method * add more debugging * Increase discovery timeout add more debug allow config to overrite dicovery for name --------- Co-authored-by: Martin Hjelmare --- homeassistant/components/yamaha/const.py | 1 + .../components/yamaha/media_player.py | 28 +++++++++++++++---- tests/components/yamaha/test_media_player.py | 20 +++++++++---- 3 files changed, 37 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/yamaha/const.py b/homeassistant/components/yamaha/const.py index 492babe9657..1cdb619b6ef 100644 --- a/homeassistant/components/yamaha/const.py +++ b/homeassistant/components/yamaha/const.py @@ -1,6 +1,7 @@ """Constants for the Yamaha component.""" DOMAIN = "yamaha" +DISCOVER_TIMEOUT = 3 KNOWN_ZONES = "known_zones" CURSOR_TYPE_DOWN = "down" CURSOR_TYPE_LEFT = "left" diff --git a/homeassistant/components/yamaha/media_player.py b/homeassistant/components/yamaha/media_player.py index f6434616cfa..58f501b99be 100644 --- a/homeassistant/components/yamaha/media_player.py +++ b/homeassistant/components/yamaha/media_player.py @@ -31,6 +31,7 @@ from .const import ( CURSOR_TYPE_RIGHT, CURSOR_TYPE_SELECT, CURSOR_TYPE_UP, + DISCOVER_TIMEOUT, DOMAIN, KNOWN_ZONES, SERVICE_ENABLE_OUTPUT, @@ -125,18 +126,33 @@ def _discovery(config_info): elif config_info.host is None: _LOGGER.debug("Config No Host Supplied Zones") zones = [] - for recv in rxv.find(): + for recv in rxv.find(DISCOVER_TIMEOUT): zones.extend(recv.zone_controllers()) else: _LOGGER.debug("Config Zones") zones = None # Fix for upstream issues in rxv.find() with some hardware. - with contextlib.suppress(AttributeError): - for recv in rxv.find(): + with contextlib.suppress(AttributeError, ValueError): + for recv in rxv.find(DISCOVER_TIMEOUT): + _LOGGER.debug( + "Found Serial %s %s %s", + recv.serial_number, + recv.ctrl_url, + recv.zone, + ) if recv.ctrl_url == config_info.ctrl_url: - _LOGGER.debug("Config Zones Matched %s", config_info.ctrl_url) - zones = recv.zone_controllers() + _LOGGER.debug( + "Config Zones Matched Serial %s: %s", + recv.ctrl_url, + recv.serial_number, + ) + zones = rxv.RXV( + config_info.ctrl_url, + friendly_name=config_info.name, + serial_number=recv.serial_number, + model_name=recv.model_name, + ).zone_controllers() break if not zones: @@ -170,7 +186,7 @@ async def async_setup_platform( entities = [] for zctrl in zone_ctrls: - _LOGGER.debug("Receiver zone: %s", zctrl.zone) + _LOGGER.debug("Receiver zone: %s serial %s", zctrl.zone, zctrl.serial_number) if config_info.zone_ignore and zctrl.zone in config_info.zone_ignore: _LOGGER.debug("Ignore receiver zone: %s %s", config_info.name, zctrl.zone) continue diff --git a/tests/components/yamaha/test_media_player.py b/tests/components/yamaha/test_media_player.py index d96da8f6854..2375e7d07f4 100644 --- a/tests/components/yamaha/test_media_player.py +++ b/tests/components/yamaha/test_media_player.py @@ -86,17 +86,25 @@ async def test_setup_host(hass: HomeAssistant, device, device2, main_zone) -> No assert state.state == "off" -async def test_setup_attribute_error(hass: HomeAssistant, device, main_zone) -> None: - """Test set up integration encountering an Attribute Error.""" +@pytest.mark.parametrize( + ("error"), + [ + AttributeError, + ValueError, + UnicodeDecodeError("", b"", 1, 0, ""), + ], +) +async def test_setup_find_errors(hass: HomeAssistant, device, main_zone, error) -> None: + """Test set up integration encountering an Error.""" - with patch("rxv.find", side_effect=AttributeError): + with patch("rxv.find", side_effect=error): assert await async_setup_component(hass, MP_DOMAIN, CONFIG) await hass.async_block_till_done() - state = hass.states.get("media_player.yamaha_receiver_main_zone") + state = hass.states.get("media_player.yamaha_receiver_main_zone") - assert state is not None - assert state.state == "off" + assert state is not None + assert state.state == "off" async def test_setup_no_host(hass: HomeAssistant, device, main_zone) -> None: From 5836f8edb55a40652fc65bdddbcf4b118053b232 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 15 Aug 2024 10:11:43 +0200 Subject: [PATCH 0755/1635] Pass None instead of empty dict when registering entity services (#123879) * Pass None instead of empty dict when registering entity services * Update rainmachine --- .../components/media_player/__init__.py | 26 +++++++++---------- homeassistant/components/modern_forms/fan.py | 2 +- .../components/modern_forms/light.py | 2 +- homeassistant/components/motioneye/camera.py | 2 +- homeassistant/components/netatmo/climate.py | 2 +- homeassistant/components/prosegur/camera.py | 2 +- .../components/rainmachine/switch.py | 16 ++++++------ homeassistant/components/roborock/vacuum.py | 2 +- homeassistant/components/select/__init__.py | 4 +-- homeassistant/components/siren/__init__.py | 4 +-- .../components/snapcast/media_player.py | 6 ++--- .../components/sonos/media_player.py | 4 ++- .../components/squeezebox/media_player.py | 2 +- homeassistant/components/switch/__init__.py | 6 ++--- homeassistant/components/timer/__init__.py | 6 ++--- homeassistant/components/todo/__init__.py | 2 +- .../totalconnect/alarm_control_panel.py | 4 +-- homeassistant/components/upb/light.py | 2 +- homeassistant/components/upb/scene.py | 4 +-- homeassistant/components/update/__init__.py | 4 +-- homeassistant/components/vacuum/__init__.py | 12 ++++----- homeassistant/components/valve/__init__.py | 11 +++++--- homeassistant/components/verisure/camera.py | 2 +- homeassistant/components/verisure/lock.py | 4 +-- .../components/water_heater/__init__.py | 4 +-- homeassistant/components/wemo/fan.py | 2 +- .../components/xiaomi_miio/remote.py | 4 +-- .../components/xiaomi_miio/vacuum.py | 4 +-- 28 files changed, 75 insertions(+), 70 deletions(-) diff --git a/homeassistant/components/media_player/__init__.py b/homeassistant/components/media_player/__init__.py index d499ee8d6d3..beb672a1e58 100644 --- a/homeassistant/components/media_player/__init__.py +++ b/homeassistant/components/media_player/__init__.py @@ -274,59 +274,59 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: await component.async_setup(config) component.async_register_entity_service( - SERVICE_TURN_ON, {}, "async_turn_on", [MediaPlayerEntityFeature.TURN_ON] + SERVICE_TURN_ON, None, "async_turn_on", [MediaPlayerEntityFeature.TURN_ON] ) component.async_register_entity_service( - SERVICE_TURN_OFF, {}, "async_turn_off", [MediaPlayerEntityFeature.TURN_OFF] + SERVICE_TURN_OFF, None, "async_turn_off", [MediaPlayerEntityFeature.TURN_OFF] ) component.async_register_entity_service( SERVICE_TOGGLE, - {}, + None, "async_toggle", [MediaPlayerEntityFeature.TURN_OFF | MediaPlayerEntityFeature.TURN_ON], ) component.async_register_entity_service( SERVICE_VOLUME_UP, - {}, + None, "async_volume_up", [MediaPlayerEntityFeature.VOLUME_SET, MediaPlayerEntityFeature.VOLUME_STEP], ) component.async_register_entity_service( SERVICE_VOLUME_DOWN, - {}, + None, "async_volume_down", [MediaPlayerEntityFeature.VOLUME_SET, MediaPlayerEntityFeature.VOLUME_STEP], ) component.async_register_entity_service( SERVICE_MEDIA_PLAY_PAUSE, - {}, + None, "async_media_play_pause", [MediaPlayerEntityFeature.PLAY | MediaPlayerEntityFeature.PAUSE], ) component.async_register_entity_service( - SERVICE_MEDIA_PLAY, {}, "async_media_play", [MediaPlayerEntityFeature.PLAY] + SERVICE_MEDIA_PLAY, None, "async_media_play", [MediaPlayerEntityFeature.PLAY] ) component.async_register_entity_service( - SERVICE_MEDIA_PAUSE, {}, "async_media_pause", [MediaPlayerEntityFeature.PAUSE] + SERVICE_MEDIA_PAUSE, None, "async_media_pause", [MediaPlayerEntityFeature.PAUSE] ) component.async_register_entity_service( - SERVICE_MEDIA_STOP, {}, "async_media_stop", [MediaPlayerEntityFeature.STOP] + SERVICE_MEDIA_STOP, None, "async_media_stop", [MediaPlayerEntityFeature.STOP] ) component.async_register_entity_service( SERVICE_MEDIA_NEXT_TRACK, - {}, + None, "async_media_next_track", [MediaPlayerEntityFeature.NEXT_TRACK], ) component.async_register_entity_service( SERVICE_MEDIA_PREVIOUS_TRACK, - {}, + None, "async_media_previous_track", [MediaPlayerEntityFeature.PREVIOUS_TRACK], ) component.async_register_entity_service( SERVICE_CLEAR_PLAYLIST, - {}, + None, "async_clear_playlist", [MediaPlayerEntityFeature.CLEAR_PLAYLIST], ) @@ -423,7 +423,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: [MediaPlayerEntityFeature.SHUFFLE_SET], ) component.async_register_entity_service( - SERVICE_UNJOIN, {}, "async_unjoin_player", [MediaPlayerEntityFeature.GROUPING] + SERVICE_UNJOIN, None, "async_unjoin_player", [MediaPlayerEntityFeature.GROUPING] ) component.async_register_entity_service( diff --git a/homeassistant/components/modern_forms/fan.py b/homeassistant/components/modern_forms/fan.py index c00549c327a..e34038c7be7 100644 --- a/homeassistant/components/modern_forms/fan.py +++ b/homeassistant/components/modern_forms/fan.py @@ -56,7 +56,7 @@ async def async_setup_entry( platform.async_register_entity_service( SERVICE_CLEAR_FAN_SLEEP_TIMER, - {}, + None, "async_clear_fan_sleep_timer", ) diff --git a/homeassistant/components/modern_forms/light.py b/homeassistant/components/modern_forms/light.py index e758a50e77e..4c210038694 100644 --- a/homeassistant/components/modern_forms/light.py +++ b/homeassistant/components/modern_forms/light.py @@ -61,7 +61,7 @@ async def async_setup_entry( platform.async_register_entity_service( SERVICE_CLEAR_LIGHT_SLEEP_TIMER, - {}, + None, "async_clear_light_sleep_timer", ) diff --git a/homeassistant/components/motioneye/camera.py b/homeassistant/components/motioneye/camera.py index da5eb36d494..d84f7b43c04 100644 --- a/homeassistant/components/motioneye/camera.py +++ b/homeassistant/components/motioneye/camera.py @@ -136,7 +136,7 @@ async def async_setup_entry( ) platform.async_register_entity_service( SERVICE_SNAPSHOT, - {}, + None, "async_request_snapshot", ) diff --git a/homeassistant/components/netatmo/climate.py b/homeassistant/components/netatmo/climate.py index e257c7a89ea..c2953b9d49d 100644 --- a/homeassistant/components/netatmo/climate.py +++ b/homeassistant/components/netatmo/climate.py @@ -174,7 +174,7 @@ async def async_setup_entry( ) platform.async_register_entity_service( SERVICE_CLEAR_TEMPERATURE_SETTING, - {}, + None, "_async_service_clear_temperature_setting", ) diff --git a/homeassistant/components/prosegur/camera.py b/homeassistant/components/prosegur/camera.py index fd911fa5898..2df6ff62038 100644 --- a/homeassistant/components/prosegur/camera.py +++ b/homeassistant/components/prosegur/camera.py @@ -31,7 +31,7 @@ async def async_setup_entry( platform = async_get_current_platform() platform.async_register_entity_service( SERVICE_REQUEST_IMAGE, - {}, + None, "async_request_image", ) diff --git a/homeassistant/components/rainmachine/switch.py b/homeassistant/components/rainmachine/switch.py index d4c0064219e..8368db47d61 100644 --- a/homeassistant/components/rainmachine/switch.py +++ b/homeassistant/components/rainmachine/switch.py @@ -6,7 +6,7 @@ import asyncio from collections.abc import Awaitable, Callable, Coroutine from dataclasses import dataclass from datetime import datetime -from typing import Any, Concatenate, cast +from typing import Any, Concatenate from regenmaschine.errors import RainMachineError import voluptuous as vol @@ -184,8 +184,8 @@ async def async_setup_entry( """Set up RainMachine switches based on a config entry.""" platform = entity_platform.async_get_current_platform() - for service_name, schema, method in ( - ("start_program", {}, "async_start_program"), + services: tuple[tuple[str, VolDictType | None, str], ...] = ( + ("start_program", None, "async_start_program"), ( "start_zone", { @@ -195,11 +195,11 @@ async def async_setup_entry( }, "async_start_zone", ), - ("stop_program", {}, "async_stop_program"), - ("stop_zone", {}, "async_stop_zone"), - ): - schema_dict = cast(VolDictType, schema) - platform.async_register_entity_service(service_name, schema_dict, method) + ("stop_program", None, "async_stop_program"), + ("stop_zone", None, "async_stop_zone"), + ) + for service_name, schema, method in services: + platform.async_register_entity_service(service_name, schema, method) data = entry.runtime_data entities: list[RainMachineBaseSwitch] = [] diff --git a/homeassistant/components/roborock/vacuum.py b/homeassistant/components/roborock/vacuum.py index f7fc58161a8..81a10e26415 100644 --- a/homeassistant/components/roborock/vacuum.py +++ b/homeassistant/components/roborock/vacuum.py @@ -69,7 +69,7 @@ async def async_setup_entry( platform.async_register_entity_service( GET_MAPS_SERVICE_NAME, - {}, + None, RoborockVacuum.get_maps.__name__, supports_response=SupportsResponse.ONLY, ) diff --git a/homeassistant/components/select/__init__.py b/homeassistant/components/select/__init__.py index 27d41dafcd1..24f7d8bffea 100644 --- a/homeassistant/components/select/__init__.py +++ b/homeassistant/components/select/__init__.py @@ -66,13 +66,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: component.async_register_entity_service( SERVICE_SELECT_FIRST, - {}, + None, SelectEntity.async_first.__name__, ) component.async_register_entity_service( SERVICE_SELECT_LAST, - {}, + None, SelectEntity.async_last.__name__, ) diff --git a/homeassistant/components/siren/__init__.py b/homeassistant/components/siren/__init__.py index 216e111b7db..801ca4f2bee 100644 --- a/homeassistant/components/siren/__init__.py +++ b/homeassistant/components/siren/__init__.py @@ -129,11 +129,11 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: [SirenEntityFeature.TURN_ON], ) component.async_register_entity_service( - SERVICE_TURN_OFF, {}, "async_turn_off", [SirenEntityFeature.TURN_OFF] + SERVICE_TURN_OFF, None, "async_turn_off", [SirenEntityFeature.TURN_OFF] ) component.async_register_entity_service( SERVICE_TOGGLE, - {}, + None, "async_toggle", [SirenEntityFeature.TURN_ON | SirenEntityFeature.TURN_OFF], ) diff --git a/homeassistant/components/snapcast/media_player.py b/homeassistant/components/snapcast/media_player.py index 0918d6465ad..bda411acde3 100644 --- a/homeassistant/components/snapcast/media_player.py +++ b/homeassistant/components/snapcast/media_player.py @@ -42,12 +42,12 @@ def register_services(): """Register snapcast services.""" platform = entity_platform.async_get_current_platform() - platform.async_register_entity_service(SERVICE_SNAPSHOT, {}, "snapshot") - platform.async_register_entity_service(SERVICE_RESTORE, {}, "async_restore") + platform.async_register_entity_service(SERVICE_SNAPSHOT, None, "snapshot") + platform.async_register_entity_service(SERVICE_RESTORE, None, "async_restore") platform.async_register_entity_service( SERVICE_JOIN, {vol.Required(ATTR_MASTER): cv.entity_id}, handle_async_join ) - platform.async_register_entity_service(SERVICE_UNJOIN, {}, handle_async_unjoin) + platform.async_register_entity_service(SERVICE_UNJOIN, None, handle_async_unjoin) platform.async_register_entity_service( SERVICE_SET_LATENCY, {vol.Required(ATTR_LATENCY): cv.positive_int}, diff --git a/homeassistant/components/sonos/media_player.py b/homeassistant/components/sonos/media_player.py index 4125466bd99..590761752c5 100644 --- a/homeassistant/components/sonos/media_player.py +++ b/homeassistant/components/sonos/media_player.py @@ -162,7 +162,9 @@ async def async_setup_entry( "set_sleep_timer", ) - platform.async_register_entity_service(SERVICE_CLEAR_TIMER, {}, "clear_sleep_timer") + platform.async_register_entity_service( + SERVICE_CLEAR_TIMER, None, "clear_sleep_timer" + ) platform.async_register_entity_service( SERVICE_UPDATE_ALARM, diff --git a/homeassistant/components/squeezebox/media_player.py b/homeassistant/components/squeezebox/media_player.py index 97d55c2f2ef..552b8ed800c 100644 --- a/homeassistant/components/squeezebox/media_player.py +++ b/homeassistant/components/squeezebox/media_player.py @@ -185,7 +185,7 @@ async def async_setup_entry( {vol.Required(ATTR_OTHER_PLAYER): cv.string}, "async_sync", ) - platform.async_register_entity_service(SERVICE_UNSYNC, {}, "async_unsync") + platform.async_register_entity_service(SERVICE_UNSYNC, None, "async_unsync") # Start server discovery task if not already running entry.async_on_unload(async_at_start(hass, start_server_discovery)) diff --git a/homeassistant/components/switch/__init__.py b/homeassistant/components/switch/__init__.py index 55e0a7a767e..43971741e51 100644 --- a/homeassistant/components/switch/__init__.py +++ b/homeassistant/components/switch/__init__.py @@ -79,9 +79,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: ) await component.async_setup(config) - component.async_register_entity_service(SERVICE_TURN_OFF, {}, "async_turn_off") - component.async_register_entity_service(SERVICE_TURN_ON, {}, "async_turn_on") - component.async_register_entity_service(SERVICE_TOGGLE, {}, "async_toggle") + component.async_register_entity_service(SERVICE_TURN_OFF, None, "async_turn_off") + component.async_register_entity_service(SERVICE_TURN_ON, None, "async_turn_on") + component.async_register_entity_service(SERVICE_TOGGLE, None, "async_toggle") return True diff --git a/homeassistant/components/timer/__init__.py b/homeassistant/components/timer/__init__.py index 3f2b4bd7f43..c2057551239 100644 --- a/homeassistant/components/timer/__init__.py +++ b/homeassistant/components/timer/__init__.py @@ -159,9 +159,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: {vol.Optional(ATTR_DURATION, default=DEFAULT_DURATION): cv.time_period}, "async_start", ) - component.async_register_entity_service(SERVICE_PAUSE, {}, "async_pause") - component.async_register_entity_service(SERVICE_CANCEL, {}, "async_cancel") - component.async_register_entity_service(SERVICE_FINISH, {}, "async_finish") + component.async_register_entity_service(SERVICE_PAUSE, None, "async_pause") + component.async_register_entity_service(SERVICE_CANCEL, None, "async_cancel") + component.async_register_entity_service(SERVICE_FINISH, None, "async_finish") component.async_register_entity_service( SERVICE_CHANGE, {vol.Optional(ATTR_DURATION, default=DEFAULT_DURATION): cv.time_period}, diff --git a/homeassistant/components/todo/__init__.py b/homeassistant/components/todo/__init__.py index 5febc9561c4..d35d9d6bbea 100644 --- a/homeassistant/components/todo/__init__.py +++ b/homeassistant/components/todo/__init__.py @@ -183,7 +183,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: ) component.async_register_entity_service( TodoServices.REMOVE_COMPLETED_ITEMS, - {}, + None, _async_remove_completed_items, required_features=[TodoListEntityFeature.DELETE_TODO_ITEM], ) diff --git a/homeassistant/components/totalconnect/alarm_control_panel.py b/homeassistant/components/totalconnect/alarm_control_panel.py index 17a16674dd5..49d97e45e00 100644 --- a/homeassistant/components/totalconnect/alarm_control_panel.py +++ b/homeassistant/components/totalconnect/alarm_control_panel.py @@ -55,13 +55,13 @@ async def async_setup_entry( platform.async_register_entity_service( SERVICE_ALARM_ARM_AWAY_INSTANT, - {}, + None, "async_alarm_arm_away_instant", ) platform.async_register_entity_service( SERVICE_ALARM_ARM_HOME_INSTANT, - {}, + None, "async_alarm_arm_home_instant", ) diff --git a/homeassistant/components/upb/light.py b/homeassistant/components/upb/light.py index eb20fc949dc..881eda3525f 100644 --- a/homeassistant/components/upb/light.py +++ b/homeassistant/components/upb/light.py @@ -42,7 +42,7 @@ async def async_setup_entry( SERVICE_LIGHT_FADE_START, UPB_BRIGHTNESS_RATE_SCHEMA, "async_light_fade_start" ) platform.async_register_entity_service( - SERVICE_LIGHT_FADE_STOP, {}, "async_light_fade_stop" + SERVICE_LIGHT_FADE_STOP, None, "async_light_fade_stop" ) platform.async_register_entity_service( SERVICE_LIGHT_BLINK, UPB_BLINK_RATE_SCHEMA, "async_light_blink" diff --git a/homeassistant/components/upb/scene.py b/homeassistant/components/upb/scene.py index 9cf6788de4f..276b620d5b5 100644 --- a/homeassistant/components/upb/scene.py +++ b/homeassistant/components/upb/scene.py @@ -31,10 +31,10 @@ async def async_setup_entry( platform = entity_platform.async_get_current_platform() platform.async_register_entity_service( - SERVICE_LINK_DEACTIVATE, {}, "async_link_deactivate" + SERVICE_LINK_DEACTIVATE, None, "async_link_deactivate" ) platform.async_register_entity_service( - SERVICE_LINK_FADE_STOP, {}, "async_link_fade_stop" + SERVICE_LINK_FADE_STOP, None, "async_link_fade_stop" ) platform.async_register_entity_service( SERVICE_LINK_GOTO, UPB_BRIGHTNESS_RATE_SCHEMA, "async_link_goto" diff --git a/homeassistant/components/update/__init__.py b/homeassistant/components/update/__init__.py index e7813b354c1..cd52de6550f 100644 --- a/homeassistant/components/update/__init__.py +++ b/homeassistant/components/update/__init__.py @@ -95,12 +95,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: component.async_register_entity_service( SERVICE_SKIP, - {}, + None, async_skip, ) component.async_register_entity_service( "clear_skipped", - {}, + None, async_clear_skipped, ) diff --git a/homeassistant/components/vacuum/__init__.py b/homeassistant/components/vacuum/__init__.py index 90018e2d8cc..867e25d4b2a 100644 --- a/homeassistant/components/vacuum/__init__.py +++ b/homeassistant/components/vacuum/__init__.py @@ -116,37 +116,37 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: component.async_register_entity_service( SERVICE_START, - {}, + None, "async_start", [VacuumEntityFeature.START], ) component.async_register_entity_service( SERVICE_PAUSE, - {}, + None, "async_pause", [VacuumEntityFeature.PAUSE], ) component.async_register_entity_service( SERVICE_RETURN_TO_BASE, - {}, + None, "async_return_to_base", [VacuumEntityFeature.RETURN_HOME], ) component.async_register_entity_service( SERVICE_CLEAN_SPOT, - {}, + None, "async_clean_spot", [VacuumEntityFeature.CLEAN_SPOT], ) component.async_register_entity_service( SERVICE_LOCATE, - {}, + None, "async_locate", [VacuumEntityFeature.LOCATE], ) component.async_register_entity_service( SERVICE_STOP, - {}, + None, "async_stop", [VacuumEntityFeature.STOP], ) diff --git a/homeassistant/components/valve/__init__.py b/homeassistant/components/valve/__init__.py index 3814275b703..04ce12e8a8f 100644 --- a/homeassistant/components/valve/__init__.py +++ b/homeassistant/components/valve/__init__.py @@ -71,11 +71,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: await component.async_setup(config) component.async_register_entity_service( - SERVICE_OPEN_VALVE, {}, "async_handle_open_valve", [ValveEntityFeature.OPEN] + SERVICE_OPEN_VALVE, None, "async_handle_open_valve", [ValveEntityFeature.OPEN] ) component.async_register_entity_service( - SERVICE_CLOSE_VALVE, {}, "async_handle_close_valve", [ValveEntityFeature.CLOSE] + SERVICE_CLOSE_VALVE, + None, + "async_handle_close_valve", + [ValveEntityFeature.CLOSE], ) component.async_register_entity_service( @@ -90,12 +93,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: ) component.async_register_entity_service( - SERVICE_STOP_VALVE, {}, "async_stop_valve", [ValveEntityFeature.STOP] + SERVICE_STOP_VALVE, None, "async_stop_valve", [ValveEntityFeature.STOP] ) component.async_register_entity_service( SERVICE_TOGGLE, - {}, + None, "async_toggle", [ValveEntityFeature.OPEN | ValveEntityFeature.CLOSE], ) diff --git a/homeassistant/components/verisure/camera.py b/homeassistant/components/verisure/camera.py index 72f5ab93c70..50606a49eab 100644 --- a/homeassistant/components/verisure/camera.py +++ b/homeassistant/components/verisure/camera.py @@ -33,7 +33,7 @@ async def async_setup_entry( platform = async_get_current_platform() platform.async_register_entity_service( SERVICE_CAPTURE_SMARTCAM, - {}, + None, VerisureSmartcam.capture_smartcam.__name__, ) diff --git a/homeassistant/components/verisure/lock.py b/homeassistant/components/verisure/lock.py index da2bc2ced2b..5c56fc0df2c 100644 --- a/homeassistant/components/verisure/lock.py +++ b/homeassistant/components/verisure/lock.py @@ -41,12 +41,12 @@ async def async_setup_entry( platform = async_get_current_platform() platform.async_register_entity_service( SERVICE_DISABLE_AUTOLOCK, - {}, + None, VerisureDoorlock.disable_autolock.__name__, ) platform.async_register_entity_service( SERVICE_ENABLE_AUTOLOCK, - {}, + None, VerisureDoorlock.enable_autolock.__name__, ) diff --git a/homeassistant/components/water_heater/__init__.py b/homeassistant/components/water_heater/__init__.py index 731a513fb66..0b54a4c1aa4 100644 --- a/homeassistant/components/water_heater/__init__.py +++ b/homeassistant/components/water_heater/__init__.py @@ -129,10 +129,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: await component.async_setup(config) component.async_register_entity_service( - SERVICE_TURN_ON, {}, "async_turn_on", [WaterHeaterEntityFeature.ON_OFF] + SERVICE_TURN_ON, None, "async_turn_on", [WaterHeaterEntityFeature.ON_OFF] ) component.async_register_entity_service( - SERVICE_TURN_OFF, {}, "async_turn_off", [WaterHeaterEntityFeature.ON_OFF] + SERVICE_TURN_OFF, None, "async_turn_off", [WaterHeaterEntityFeature.ON_OFF] ) component.async_register_entity_service( SERVICE_SET_AWAY_MODE, SET_AWAY_MODE_SCHEMA, async_service_away_mode diff --git a/homeassistant/components/wemo/fan.py b/homeassistant/components/wemo/fan.py index b7c9840bcdc..f9d3270aaa0 100644 --- a/homeassistant/components/wemo/fan.py +++ b/homeassistant/components/wemo/fan.py @@ -67,7 +67,7 @@ async def async_setup_entry( # This will call WemoHumidifier.reset_filter_life() platform.async_register_entity_service( - SERVICE_RESET_FILTER_LIFE, {}, WemoHumidifier.reset_filter_life.__name__ + SERVICE_RESET_FILTER_LIFE, None, WemoHumidifier.reset_filter_life.__name__ ) diff --git a/homeassistant/components/xiaomi_miio/remote.py b/homeassistant/components/xiaomi_miio/remote.py index 959bf0a7bee..72707109ad6 100644 --- a/homeassistant/components/xiaomi_miio/remote.py +++ b/homeassistant/components/xiaomi_miio/remote.py @@ -170,12 +170,12 @@ async def async_setup_platform( ) platform.async_register_entity_service( SERVICE_SET_REMOTE_LED_ON, - {}, + None, async_service_led_on_handler, ) platform.async_register_entity_service( SERVICE_SET_REMOTE_LED_OFF, - {}, + None, async_service_led_off_handler, ) diff --git a/homeassistant/components/xiaomi_miio/vacuum.py b/homeassistant/components/xiaomi_miio/vacuum.py index ef6f94c162f..ac833f7646c 100644 --- a/homeassistant/components/xiaomi_miio/vacuum.py +++ b/homeassistant/components/xiaomi_miio/vacuum.py @@ -104,13 +104,13 @@ async def async_setup_entry( platform.async_register_entity_service( SERVICE_START_REMOTE_CONTROL, - {}, + None, MiroboVacuum.async_remote_control_start.__name__, ) platform.async_register_entity_service( SERVICE_STOP_REMOTE_CONTROL, - {}, + None, MiroboVacuum.async_remote_control_stop.__name__, ) From 81c4bb5f72f2c9b0770a2f7142408f8caf8f205f Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 15 Aug 2024 10:32:40 +0200 Subject: [PATCH 0756/1635] Fix flaky recorder migration tests (#123971) --- tests/components/recorder/conftest.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/tests/components/recorder/conftest.py b/tests/components/recorder/conftest.py index d66cf4fe2ec..b13b3b270a9 100644 --- a/tests/components/recorder/conftest.py +++ b/tests/components/recorder/conftest.py @@ -68,7 +68,7 @@ class InstrumentedMigration: non_live_migration_done: threading.Event non_live_migration_done_stall: threading.Event apply_update_mock: Mock - stall_on_schema_version: int + stall_on_schema_version: int | None apply_update_stalled: threading.Event @@ -147,7 +147,8 @@ def instrument_migration( old_version: int, ): """Control migration progress.""" - if new_version == instrumented_migration.stall_on_schema_version: + stall_version = instrumented_migration.stall_on_schema_version + if stall_version is None or stall_version == new_version: instrumented_migration.apply_update_stalled.set() instrumented_migration.migration_stall.wait() real_apply_update( @@ -179,7 +180,7 @@ def instrument_migration( non_live_migration_done=threading.Event(), non_live_migration_done_stall=threading.Event(), apply_update_mock=apply_update_mock, - stall_on_schema_version=1, + stall_on_schema_version=None, apply_update_stalled=threading.Event(), ) From d6d016e0290736f5ab0fbc27a0edfac9585d3805 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Thu, 15 Aug 2024 10:52:55 +0200 Subject: [PATCH 0757/1635] Fix KNX UI Light color temperature DPT (#123778) --- homeassistant/components/knx/light.py | 4 +-- tests/components/knx/test_light.py | 44 +++++++++++++++++++++++++++ 2 files changed, 46 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/knx/light.py b/homeassistant/components/knx/light.py index a2ce8f8d2cb..0caa3f0a799 100644 --- a/homeassistant/components/knx/light.py +++ b/homeassistant/components/knx/light.py @@ -226,7 +226,7 @@ def _create_ui_light(xknx: XKNX, knx_config: ConfigType, name: str) -> XknxLight group_address_color_temp_state = None color_temperature_type = ColorTemperatureType.UINT_2_BYTE if ga_color_temp := knx_config.get(CONF_GA_COLOR_TEMP): - if ga_color_temp[CONF_DPT] == ColorTempModes.RELATIVE: + if ga_color_temp[CONF_DPT] == ColorTempModes.RELATIVE.value: group_address_tunable_white = ga_color_temp[CONF_GA_WRITE] group_address_tunable_white_state = [ ga_color_temp[CONF_GA_STATE], @@ -239,7 +239,7 @@ def _create_ui_light(xknx: XKNX, knx_config: ConfigType, name: str) -> XknxLight ga_color_temp[CONF_GA_STATE], *ga_color_temp[CONF_GA_PASSIVE], ] - if ga_color_temp[CONF_DPT] == ColorTempModes.ABSOLUTE_FLOAT: + if ga_color_temp[CONF_DPT] == ColorTempModes.ABSOLUTE_FLOAT.value: color_temperature_type = ColorTemperatureType.FLOAT_2_BYTE _color_dpt = get_dpt(CONF_GA_COLOR) diff --git a/tests/components/knx/test_light.py b/tests/components/knx/test_light.py index 04f849bb555..e2e4a673a0d 100644 --- a/tests/components/knx/test_light.py +++ b/tests/components/knx/test_light.py @@ -5,6 +5,7 @@ from __future__ import annotations from datetime import timedelta from freezegun.api import FrozenDateTimeFactory +import pytest from xknx.core import XknxConnectionState from xknx.devices.light import Light as XknxLight @@ -1174,3 +1175,46 @@ async def test_light_ui_create( await knx.receive_response("2/2/2", True) state = hass.states.get("light.test") assert state.state is STATE_ON + + +@pytest.mark.parametrize( + ("color_temp_mode", "raw_ct"), + [ + ("7.600", (0x10, 0x68)), + ("9", (0x46, 0x69)), + ("5.001", (0x74,)), + ], +) +async def test_light_ui_color_temp( + hass: HomeAssistant, + knx: KNXTestKit, + create_ui_entity: KnxEntityGenerator, + color_temp_mode: str, + raw_ct: tuple[int, ...], +) -> None: + """Test creating a switch.""" + await knx.setup_integration({}) + await create_ui_entity( + platform=Platform.LIGHT, + entity_data={"name": "test"}, + knx_data={ + "ga_switch": {"write": "1/1/1", "state": "2/2/2"}, + "ga_color_temp": { + "write": "3/3/3", + "dpt": color_temp_mode, + }, + "_light_color_mode_schema": "default", + "sync_state": True, + }, + ) + await knx.assert_read("2/2/2", True) + await hass.services.async_call( + "light", + "turn_on", + {"entity_id": "light.test", ATTR_COLOR_TEMP_KELVIN: 4200}, + blocking=True, + ) + await knx.assert_write("3/3/3", raw_ct) + state = hass.states.get("light.test") + assert state.state is STATE_ON + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == pytest.approx(4200, abs=1) From 629b919707e31b7b0f41a23aec524c0939100422 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 15 Aug 2024 10:56:04 +0200 Subject: [PATCH 0758/1635] Remove unnecessary assignment of Template.hass from knx (#123977) --- homeassistant/components/knx/expose.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/homeassistant/components/knx/expose.py b/homeassistant/components/knx/expose.py index 29d0be998b6..921af6ba4a9 100644 --- a/homeassistant/components/knx/expose.py +++ b/homeassistant/components/knx/expose.py @@ -84,8 +84,6 @@ class KNXExposeSensor: self.expose_default = config.get(ExposeSchema.CONF_KNX_EXPOSE_DEFAULT) self.expose_type: int | str = config[ExposeSchema.CONF_KNX_EXPOSE_TYPE] self.value_template: Template | None = config.get(CONF_VALUE_TEMPLATE) - if self.value_template is not None: - self.value_template.hass = hass self._remove_listener: Callable[[], None] | None = None self.device: ExposeSensor = ExposeSensor( From 72e235ad9fa4028754ffc36aa71f41877192f950 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 15 Aug 2024 10:56:56 +0200 Subject: [PATCH 0759/1635] Improve some comments in recorder migration code (#123969) --- homeassistant/components/recorder/migration.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index c34d05b3ade..44b4a980238 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -607,7 +607,7 @@ def _update_states_table_with_foreign_key_options( "columns": foreign_key["constrained_columns"], } for foreign_key in inspector.get_foreign_keys(TABLE_STATES) - if foreign_key["name"] + if foreign_key["name"] # It's not possible to drop an unnamed constraint and ( # MySQL/MariaDB will have empty options not foreign_key.get("options") @@ -663,7 +663,7 @@ def _drop_foreign_key_constraints( if foreign_key["name"] and foreign_key["constrained_columns"] == [column] ] - ## Bind the ForeignKeyConstraints to the table + ## Find matching named constraints and bind the ForeignKeyConstraints to the table tmp_table = Table(table, MetaData()) drops = [ ForeignKeyConstraint((), (), name=foreign_key["name"], table=tmp_table) @@ -829,9 +829,9 @@ def _delete_foreign_key_violations( result = session.connection().execute( # We don't use an alias for the table we're deleting from, # support of the form `DELETE FROM table AS t1` was added in - # MariaDB 11.6 and is not supported by MySQL. Those engines - # instead support the from `DELETE t1 from table AS t1` which - # is not supported by PostgreSQL and undocumented for MariaDB. + # MariaDB 11.6 and is not supported by MySQL. MySQL and older + # MariaDB instead support the from `DELETE t1 from table AS t1` + # which is undocumented for MariaDB. text( f"DELETE FROM {table} " # noqa: S608 "WHERE (" From 9b78ae5908196a1ea8e2af904dd0403b40148dde Mon Sep 17 00:00:00 2001 From: Brett Adams Date: Thu, 15 Aug 2024 19:00:07 +1000 Subject: [PATCH 0760/1635] Handle InvalidRegion in Tesla Fleet (#123958) --- .../components/tesla_fleet/__init__.py | 13 +++++++- tests/components/tesla_fleet/conftest.py | 9 ++++++ tests/components/tesla_fleet/test_init.py | 31 +++++++++++++++++++ 3 files changed, 52 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/tesla_fleet/__init__.py b/homeassistant/components/tesla_fleet/__init__.py index 45657b3d8fb..47a2a9173a5 100644 --- a/homeassistant/components/tesla_fleet/__init__.py +++ b/homeassistant/components/tesla_fleet/__init__.py @@ -7,7 +7,9 @@ import jwt from tesla_fleet_api import EnergySpecific, TeslaFleetApi, VehicleSpecific from tesla_fleet_api.const import Scope from tesla_fleet_api.exceptions import ( + InvalidRegion, InvalidToken, + LibraryError, LoginRequired, OAuthExpired, TeslaFleetError, @@ -75,7 +77,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - region=region, charging_scope=False, partner_scope=False, - user_scope=False, energy_scope=Scope.ENERGY_DEVICE_DATA in scopes, vehicle_scope=Scope.VEHICLE_DEVICE_DATA in scopes, refresh_hook=_refresh_token, @@ -84,6 +85,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - products = (await tesla.products())["response"] except (InvalidToken, OAuthExpired, LoginRequired) as e: raise ConfigEntryAuthFailed from e + except InvalidRegion: + try: + LOGGER.info("Region is invalid, trying to find the correct region") + await tesla.find_server() + try: + products = (await tesla.products())["response"] + except TeslaFleetError as e: + raise ConfigEntryNotReady from e + except LibraryError as e: + raise ConfigEntryAuthFailed from e except TeslaFleetError as e: raise ConfigEntryNotReady from e diff --git a/tests/components/tesla_fleet/conftest.py b/tests/components/tesla_fleet/conftest.py index 071fd7b02f1..49f0be9cca7 100644 --- a/tests/components/tesla_fleet/conftest.py +++ b/tests/components/tesla_fleet/conftest.py @@ -122,3 +122,12 @@ def mock_site_info() -> Generator[AsyncMock]: side_effect=lambda: deepcopy(SITE_INFO), ) as mock_live_status: yield mock_live_status + + +@pytest.fixture(autouse=True) +def mock_find_server() -> Generator[AsyncMock]: + """Mock Tesla Fleet find server method.""" + with patch( + "homeassistant.components.tesla_fleet.TeslaFleetApi.find_server", + ) as mock_find_server: + yield mock_find_server diff --git a/tests/components/tesla_fleet/test_init.py b/tests/components/tesla_fleet/test_init.py index a22d91ee531..b5eb21d1cdd 100644 --- a/tests/components/tesla_fleet/test_init.py +++ b/tests/components/tesla_fleet/test_init.py @@ -6,7 +6,9 @@ from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion from tesla_fleet_api.exceptions import ( + InvalidRegion, InvalidToken, + LibraryError, LoginRequired, OAuthExpired, RateLimited, @@ -326,3 +328,32 @@ async def test_energy_info_refresh_ratelimited( await hass.async_block_till_done() assert mock_site_info.call_count == 3 + + +async def test_init_region_issue( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + mock_products: AsyncMock, + mock_find_server: AsyncMock, +) -> None: + """Test init with region issue.""" + + mock_products.side_effect = InvalidRegion + await setup_platform(hass, normal_config_entry) + mock_find_server.assert_called_once() + assert normal_config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_init_region_issue_failed( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + mock_products: AsyncMock, + mock_find_server: AsyncMock, +) -> None: + """Test init with unresolvable region issue.""" + + mock_products.side_effect = InvalidRegion + mock_find_server.side_effect = LibraryError + await setup_platform(hass, normal_config_entry) + mock_find_server.assert_called_once() + assert normal_config_entry.state is ConfigEntryState.SETUP_ERROR From f2d39feec0300dcde25c47b0e64a09c55e1a3222 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Thu, 15 Aug 2024 04:08:40 -0500 Subject: [PATCH 0761/1635] Adjust VAD seconds better for microVAD (#123942) --- .../components/assist_pipeline/vad.py | 29 +++-- tests/components/assist_pipeline/test_vad.py | 109 +++++++++++++++++- 2 files changed, 123 insertions(+), 15 deletions(-) diff --git a/homeassistant/components/assist_pipeline/vad.py b/homeassistant/components/assist_pipeline/vad.py index 49496e66159..8372dbc54c7 100644 --- a/homeassistant/components/assist_pipeline/vad.py +++ b/homeassistant/components/assist_pipeline/vad.py @@ -6,13 +6,11 @@ from collections.abc import Callable, Iterable from dataclasses import dataclass from enum import StrEnum import logging -from typing import Final + +from .const import SAMPLE_CHANNELS, SAMPLE_RATE, SAMPLE_WIDTH _LOGGER = logging.getLogger(__name__) -_SAMPLE_RATE: Final = 16000 # Hz -_SAMPLE_WIDTH: Final = 2 # bytes - class VadSensitivity(StrEnum): """How quickly the end of a voice command is detected.""" @@ -26,12 +24,12 @@ class VadSensitivity(StrEnum): """Return seconds of silence for sensitivity level.""" sensitivity = VadSensitivity(sensitivity) if sensitivity == VadSensitivity.RELAXED: - return 2.0 + return 1.25 if sensitivity == VadSensitivity.AGGRESSIVE: - return 0.5 + return 0.25 - return 1.0 + return 0.7 class AudioBuffer: @@ -80,7 +78,7 @@ class VoiceCommandSegmenter: speech_seconds: float = 0.3 """Seconds of speech before voice command has started.""" - silence_seconds: float = 1.0 + silence_seconds: float = 0.7 """Seconds of silence after voice command has ended.""" timeout_seconds: float = 15.0 @@ -92,6 +90,9 @@ class VoiceCommandSegmenter: in_command: bool = False """True if inside voice command.""" + timed_out: bool = False + """True a timeout occurred during voice command.""" + _speech_seconds_left: float = 0.0 """Seconds left before considering voice command as started.""" @@ -121,6 +122,9 @@ class VoiceCommandSegmenter: Returns False when command is done. """ + if self.timed_out: + self.timed_out = False + self._timeout_seconds_left -= chunk_seconds if self._timeout_seconds_left <= 0: _LOGGER.warning( @@ -128,6 +132,7 @@ class VoiceCommandSegmenter: self.timeout_seconds, ) self.reset() + self.timed_out = True return False if not self.in_command: @@ -179,7 +184,9 @@ class VoiceCommandSegmenter: """ if vad_samples_per_chunk is None: # No chunking - chunk_seconds = (len(chunk) // _SAMPLE_WIDTH) / _SAMPLE_RATE + chunk_seconds = ( + len(chunk) // (SAMPLE_WIDTH * SAMPLE_CHANNELS) + ) / SAMPLE_RATE is_speech = vad_is_speech(chunk) return self.process(chunk_seconds, is_speech) @@ -187,8 +194,8 @@ class VoiceCommandSegmenter: raise ValueError("leftover_chunk_buffer is required when vad uses chunking") # With chunking - seconds_per_chunk = vad_samples_per_chunk / _SAMPLE_RATE - bytes_per_chunk = vad_samples_per_chunk * _SAMPLE_WIDTH + seconds_per_chunk = vad_samples_per_chunk / SAMPLE_RATE + bytes_per_chunk = vad_samples_per_chunk * (SAMPLE_WIDTH * SAMPLE_CHANNELS) for vad_chunk in chunk_samples(chunk, bytes_per_chunk, leftover_chunk_buffer): is_speech = vad_is_speech(vad_chunk) if not self.process(seconds_per_chunk, is_speech): diff --git a/tests/components/assist_pipeline/test_vad.py b/tests/components/assist_pipeline/test_vad.py index 17cb73a9139..db039ab3140 100644 --- a/tests/components/assist_pipeline/test_vad.py +++ b/tests/components/assist_pipeline/test_vad.py @@ -17,15 +17,12 @@ def test_silence() -> None: # True return value indicates voice command has not finished assert segmenter.process(_ONE_SECOND * 3, False) + assert not segmenter.in_command def test_speech() -> None: """Test that silence + speech + silence triggers a voice command.""" - def is_speech(chunk): - """Anything non-zero is speech.""" - return sum(chunk) > 0 - segmenter = VoiceCommandSegmenter() # silence @@ -33,10 +30,12 @@ def test_speech() -> None: # "speech" assert segmenter.process(_ONE_SECOND, True) + assert segmenter.in_command # silence # False return value indicates voice command is finished assert not segmenter.process(_ONE_SECOND, False) + assert not segmenter.in_command def test_audio_buffer() -> None: @@ -105,3 +104,105 @@ def test_chunk_samples_leftover() -> None: assert len(chunks) == 1 assert leftover_chunk_buffer.bytes() == bytes([5, 6]) + + +def test_silence_seconds() -> None: + """Test end of voice command silence seconds.""" + + segmenter = VoiceCommandSegmenter(silence_seconds=1.0) + + # silence + assert segmenter.process(_ONE_SECOND, False) + assert not segmenter.in_command + + # "speech" + assert segmenter.process(_ONE_SECOND, True) + assert segmenter.in_command + + # not enough silence to end + assert segmenter.process(_ONE_SECOND * 0.5, False) + assert segmenter.in_command + + # exactly enough silence now + assert not segmenter.process(_ONE_SECOND * 0.5, False) + assert not segmenter.in_command + + +def test_silence_reset() -> None: + """Test that speech resets end of voice command detection.""" + + segmenter = VoiceCommandSegmenter(silence_seconds=1.0, reset_seconds=0.5) + + # silence + assert segmenter.process(_ONE_SECOND, False) + assert not segmenter.in_command + + # "speech" + assert segmenter.process(_ONE_SECOND, True) + assert segmenter.in_command + + # not enough silence to end + assert segmenter.process(_ONE_SECOND * 0.5, False) + assert segmenter.in_command + + # speech should reset silence detection + assert segmenter.process(_ONE_SECOND * 0.5, True) + assert segmenter.in_command + + # not enough silence to end + assert segmenter.process(_ONE_SECOND * 0.5, False) + assert segmenter.in_command + + # exactly enough silence now + assert not segmenter.process(_ONE_SECOND * 0.5, False) + assert not segmenter.in_command + + +def test_speech_reset() -> None: + """Test that silence resets start of voice command detection.""" + + segmenter = VoiceCommandSegmenter( + silence_seconds=1.0, reset_seconds=0.5, speech_seconds=1.0 + ) + + # silence + assert segmenter.process(_ONE_SECOND, False) + assert not segmenter.in_command + + # not enough speech to start voice command + assert segmenter.process(_ONE_SECOND * 0.5, True) + assert not segmenter.in_command + + # silence should reset speech detection + assert segmenter.process(_ONE_SECOND, False) + assert not segmenter.in_command + + # not enough speech to start voice command + assert segmenter.process(_ONE_SECOND * 0.5, True) + assert not segmenter.in_command + + # exactly enough speech now + assert segmenter.process(_ONE_SECOND * 0.5, True) + assert segmenter.in_command + + +def test_timeout() -> None: + """Test that voice command detection times out.""" + + segmenter = VoiceCommandSegmenter(timeout_seconds=1.0) + + # not enough to time out + assert not segmenter.timed_out + assert segmenter.process(_ONE_SECOND * 0.5, False) + assert not segmenter.timed_out + + # enough to time out + assert not segmenter.process(_ONE_SECOND * 0.5, True) + assert segmenter.timed_out + + # flag resets with more audio + assert segmenter.process(_ONE_SECOND * 0.5, True) + assert not segmenter.timed_out + + assert not segmenter.process(_ONE_SECOND * 0.5, False) + assert segmenter.timed_out From 26e80cec3d8b3dc7833e0521cff21ca33719696b Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 15 Aug 2024 11:09:24 +0200 Subject: [PATCH 0762/1635] Deduplicate some recorder migration tests (#123972) --- tests/components/recorder/test_migrate.py | 69 +++-------------------- 1 file changed, 7 insertions(+), 62 deletions(-) diff --git a/tests/components/recorder/test_migrate.py b/tests/components/recorder/test_migrate.py index eadcd2bd9ad..7b91a97e5a0 100644 --- a/tests/components/recorder/test_migrate.py +++ b/tests/components/recorder/test_migrate.py @@ -201,79 +201,24 @@ async def test_database_migration_failed( @pytest.mark.parametrize( ( + "patch_version", "func_to_patch", "expected_setup_result", "expected_pn_create", "expected_pn_dismiss", ), [ - ("DropConstraint", False, 1, 0), # This makes migration to step 11 fail + (11, "DropConstraint", False, 1, 0), + (44, "DropConstraint", False, 2, 1), ], ) @pytest.mark.skip_on_db_engine(["sqlite"]) @pytest.mark.usefixtures("skip_by_db_engine") -async def test_database_migration_failed_step_11( - hass: HomeAssistant, - async_setup_recorder_instance: RecorderInstanceGenerator, - func_to_patch: str, - expected_setup_result: bool, - expected_pn_create: int, - expected_pn_dismiss: int, -) -> None: - """Test we notify if the migration fails.""" - assert recorder.util.async_migration_in_progress(hass) is False - - with ( - patch( - "homeassistant.components.recorder.core.create_engine", - new=create_engine_test, - ), - patch( - f"homeassistant.components.recorder.migration.{func_to_patch}", - side_effect=OperationalError( - None, None, OSError("No space left on device") - ), - ), - patch( - "homeassistant.components.persistent_notification.create", - side_effect=pn.create, - ) as mock_create, - patch( - "homeassistant.components.persistent_notification.dismiss", - side_effect=pn.dismiss, - ) as mock_dismiss, - ): - await async_setup_recorder_instance( - hass, wait_recorder=False, expected_setup_result=expected_setup_result - ) - hass.states.async_set("my.entity", "on", {}) - hass.states.async_set("my.entity", "off", {}) - await hass.async_block_till_done() - await hass.async_add_executor_job(recorder.get_instance(hass).join) - await hass.async_block_till_done() - - assert recorder.util.async_migration_in_progress(hass) is False - assert len(mock_create.mock_calls) == expected_pn_create - assert len(mock_dismiss.mock_calls) == expected_pn_dismiss - - -@pytest.mark.parametrize( - ( - "func_to_patch", - "expected_setup_result", - "expected_pn_create", - "expected_pn_dismiss", - ), - [ - ("DropConstraint", False, 2, 1), # This makes migration to step 44 fail - ], -) -@pytest.mark.skip_on_db_engine(["sqlite"]) -@pytest.mark.usefixtures("skip_by_db_engine") -async def test_database_migration_failed_step_44( +async def test_database_migration_failed_non_sqlite( hass: HomeAssistant, async_setup_recorder_instance: RecorderInstanceGenerator, instrument_migration: InstrumentedMigration, + patch_version: int, func_to_patch: str, expected_setup_result: bool, expected_pn_create: int, @@ -281,7 +226,7 @@ async def test_database_migration_failed_step_44( ) -> None: """Test we notify if the migration fails.""" assert recorder.util.async_migration_in_progress(hass) is False - instrument_migration.stall_on_schema_version = 44 + instrument_migration.stall_on_schema_version = patch_version with ( patch( @@ -303,7 +248,7 @@ async def test_database_migration_failed_step_44( wait_recorder_setup=False, expected_setup_result=expected_setup_result, ) - # Wait for migration to reach schema version 44 + # Wait for migration to reach the schema version we want to break await hass.async_add_executor_job( instrument_migration.apply_update_stalled.wait ) From b3399082a88b9d6d9857795d37925e44bc7b027c Mon Sep 17 00:00:00 2001 From: Brett Adams Date: Thu, 15 Aug 2024 19:33:31 +1000 Subject: [PATCH 0763/1635] Gold quality for Tesla Fleet (#122235) gold quality --- homeassistant/components/tesla_fleet/manifest.json | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/tesla_fleet/manifest.json b/homeassistant/components/tesla_fleet/manifest.json index 2acacab5065..29966b3b49c 100644 --- a/homeassistant/components/tesla_fleet/manifest.json +++ b/homeassistant/components/tesla_fleet/manifest.json @@ -7,5 +7,6 @@ "documentation": "https://www.home-assistant.io/integrations/tesla_fleet", "iot_class": "cloud_polling", "loggers": ["tesla-fleet-api"], + "quality_scale": "gold", "requirements": ["tesla-fleet-api==0.7.3"] } From ab163c356fc4aa5f421879f72b688771cf1042de Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 15 Aug 2024 12:39:01 +0200 Subject: [PATCH 0764/1635] Bump pyhomeworks to 1.1.1 (#123981) --- homeassistant/components/homeworks/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/homeworks/manifest.json b/homeassistant/components/homeworks/manifest.json index 1ba0672c9f1..a399e0a98e7 100644 --- a/homeassistant/components/homeworks/manifest.json +++ b/homeassistant/components/homeworks/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/homeworks", "iot_class": "local_push", "loggers": ["pyhomeworks"], - "requirements": ["pyhomeworks==1.1.0"] + "requirements": ["pyhomeworks==1.1.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 6908004c2d1..649395442ad 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1906,7 +1906,7 @@ pyhiveapi==0.5.16 pyhomematic==0.1.77 # homeassistant.components.homeworks -pyhomeworks==1.1.0 +pyhomeworks==1.1.1 # homeassistant.components.ialarm pyialarm==2.2.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 189577764c1..0b77e322e95 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1523,7 +1523,7 @@ pyhiveapi==0.5.16 pyhomematic==0.1.77 # homeassistant.components.homeworks -pyhomeworks==1.1.0 +pyhomeworks==1.1.1 # homeassistant.components.ialarm pyialarm==2.2.0 From f72d9a2c023a1e1c41065cc56914ab3caa3165a2 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 15 Aug 2024 14:46:23 +0200 Subject: [PATCH 0765/1635] Raise on database error in recorder.migration._modify_columns (#123642) * Raise on database error in recorder.migration._modify_columns * Improve test coverage --- homeassistant/components/recorder/migration.py | 1 + tests/components/recorder/conftest.py | 3 +++ tests/components/recorder/test_migrate.py | 11 ++++++++--- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index 44b4a980238..9a19ff7084a 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -580,6 +580,7 @@ def _modify_columns( _LOGGER.exception( "Could not modify column %s in table %s", column_def, table_name ) + raise def _update_states_table_with_foreign_key_options( diff --git a/tests/components/recorder/conftest.py b/tests/components/recorder/conftest.py index b13b3b270a9..9cdf9dbb372 100644 --- a/tests/components/recorder/conftest.py +++ b/tests/components/recorder/conftest.py @@ -70,6 +70,7 @@ class InstrumentedMigration: apply_update_mock: Mock stall_on_schema_version: int | None apply_update_stalled: threading.Event + apply_update_version: int | None @pytest.fixture(name="instrument_migration") @@ -147,6 +148,7 @@ def instrument_migration( old_version: int, ): """Control migration progress.""" + instrumented_migration.apply_update_version = new_version stall_version = instrumented_migration.stall_on_schema_version if stall_version is None or stall_version == new_version: instrumented_migration.apply_update_stalled.set() @@ -182,6 +184,7 @@ def instrument_migration( apply_update_mock=apply_update_mock, stall_on_schema_version=None, apply_update_stalled=threading.Event(), + apply_update_version=None, ) instrumented_migration.live_migration_done_stall.set() diff --git a/tests/components/recorder/test_migrate.py b/tests/components/recorder/test_migrate.py index 7b91a97e5a0..4bc317bdaa7 100644 --- a/tests/components/recorder/test_migrate.py +++ b/tests/components/recorder/test_migrate.py @@ -208,8 +208,12 @@ async def test_database_migration_failed( "expected_pn_dismiss", ), [ - (11, "DropConstraint", False, 1, 0), - (44, "DropConstraint", False, 2, 1), + # Test error handling in _update_states_table_with_foreign_key_options + (11, "homeassistant.components.recorder.migration.DropConstraint", False, 1, 0), + # Test error handling in _modify_columns + (12, "sqlalchemy.engine.base.Connection.execute", False, 1, 0), + # Test error handling in _drop_foreign_key_constraints + (44, "homeassistant.components.recorder.migration.DropConstraint", False, 2, 1), ], ) @pytest.mark.skip_on_db_engine(["sqlite"]) @@ -255,7 +259,7 @@ async def test_database_migration_failed_non_sqlite( # Make it fail with patch( - f"homeassistant.components.recorder.migration.{func_to_patch}", + func_to_patch, side_effect=OperationalError( None, None, OSError("No space left on device") ), @@ -267,6 +271,7 @@ async def test_database_migration_failed_non_sqlite( await hass.async_add_executor_job(recorder.get_instance(hass).join) await hass.async_block_till_done() + assert instrument_migration.apply_update_version == patch_version assert recorder.util.async_migration_in_progress(hass) is False assert len(mock_create.mock_calls) == expected_pn_create assert len(mock_dismiss.mock_calls) == expected_pn_dismiss From c674a25eba4a5975d37f5af813a6db8d4896a9e8 Mon Sep 17 00:00:00 2001 From: Lenn <78048721+LennP@users.noreply.github.com> Date: Thu, 15 Aug 2024 15:39:47 +0200 Subject: [PATCH 0766/1635] Add Motionblinds Bluetooth full test coverage (#121878) * Add tests * Fix entity test * Format * Fix sensor tests * Fix sensor tests * Fix sensor tests * Add init tests * Change service info * Rename test_sensor parameters * Removce ConfigEntryState.LOADED assertion * Remove platforms parameter from setup_platform * Rename setup_platform to setup_integration * Fixture for blind_type and mock_config_entry * Use mock for MotionDevice * Use mock for MotionDevice * Add type hint * Use Mock instead of patch * Use mock_config_entry fixture * Move constants to init * Fix entity_id name * Use fixture * Use fixtures instead of constants * Use display_name fixture * Rename mac to mac_code * Remove one patch * Use fixtures for mock_config_entry * Apply suggestion * Replace patch with mock * Replace patch with mock * Replace patch with mock * Fix * Use pytest.mark.usefixtures if parameter not used * Base mac code on address * Remove if statement from entity test --------- Co-authored-by: Joostlek --- tests/components/motionblinds_ble/__init__.py | 15 ++ tests/components/motionblinds_ble/conftest.py | 139 +++++++++++++-- .../motionblinds_ble/test_button.py | 47 ++++++ .../motionblinds_ble/test_config_flow.py | 158 +++++++++--------- .../components/motionblinds_ble/test_cover.py | 124 ++++++++++++++ .../motionblinds_ble/test_entity.py | 54 ++++++ .../components/motionblinds_ble/test_init.py | 48 ++++++ .../motionblinds_ble/test_select.py | 76 +++++++++ .../motionblinds_ble/test_sensor.py | 107 ++++++++++++ 9 files changed, 677 insertions(+), 91 deletions(-) create mode 100644 tests/components/motionblinds_ble/test_button.py create mode 100644 tests/components/motionblinds_ble/test_cover.py create mode 100644 tests/components/motionblinds_ble/test_entity.py create mode 100644 tests/components/motionblinds_ble/test_init.py create mode 100644 tests/components/motionblinds_ble/test_select.py create mode 100644 tests/components/motionblinds_ble/test_sensor.py diff --git a/tests/components/motionblinds_ble/__init__.py b/tests/components/motionblinds_ble/__init__.py index c2385555dbf..e1caef9f51f 100644 --- a/tests/components/motionblinds_ble/__init__.py +++ b/tests/components/motionblinds_ble/__init__.py @@ -1 +1,16 @@ """Tests for the Motionblinds Bluetooth integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Mock a fully setup config entry.""" + + mock_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/motionblinds_ble/conftest.py b/tests/components/motionblinds_ble/conftest.py index 00db23734dd..f89cf4f305d 100644 --- a/tests/components/motionblinds_ble/conftest.py +++ b/tests/components/motionblinds_ble/conftest.py @@ -3,21 +3,140 @@ from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch +from motionblindsble.const import MotionBlindType import pytest -TEST_MAC = "abcd" -TEST_NAME = f"MOTION_{TEST_MAC.upper()}" -TEST_ADDRESS = "test_adress" +from homeassistant.components.bluetooth.models import BluetoothServiceInfoBleak +from homeassistant.components.motionblinds_ble.const import ( + CONF_BLIND_TYPE, + CONF_LOCAL_NAME, + CONF_MAC_CODE, + DOMAIN, +) +from homeassistant.const import CONF_ADDRESS + +from tests.common import MockConfigEntry +from tests.components.bluetooth import generate_advertisement_data, generate_ble_device -@pytest.fixture(name="motionblinds_ble_connect", autouse=True) -def motion_blinds_connect_fixture( - enable_bluetooth: None, +@pytest.fixture +def address() -> str: + """Address fixture.""" + return "cc:cc:cc:cc:cc:cc" + + +@pytest.fixture +def mac_code(address: str) -> str: + """MAC code fixture.""" + return "".join(address.split(":")[-3:-1]).upper() + + +@pytest.fixture +def display_name(mac_code: str) -> str: + """Display name fixture.""" + return f"Motionblind {mac_code.upper()}" + + +@pytest.fixture +def name(display_name: str) -> str: + """Name fixture.""" + return display_name.lower().replace(" ", "_") + + +@pytest.fixture +def local_name(mac_code: str) -> str: + """Local name fixture.""" + return f"MOTION_{mac_code.upper()}" + + +@pytest.fixture +def blind_type() -> MotionBlindType: + """Blind type fixture.""" + return MotionBlindType.ROLLER + + +@pytest.fixture +def service_info(local_name: str, address: str) -> BluetoothServiceInfoBleak: + """Service info fixture.""" + return BluetoothServiceInfoBleak( + name=local_name, + address=address, + device=generate_ble_device( + address=address, + name=local_name, + ), + rssi=-61, + manufacturer_data={000: b"test"}, + service_data={ + "test": bytearray(b"0000"), + }, + service_uuids=[ + "test", + ], + source="local", + advertisement=generate_advertisement_data( + manufacturer_data={000: b"test"}, + service_uuids=["test"], + ), + connectable=True, + time=0, + tx_power=-127, + ) + + +@pytest.fixture +def mock_motion_device( + blind_type: MotionBlindType, display_name: str +) -> Generator[AsyncMock]: + """Mock a MotionDevice.""" + + with patch( + "homeassistant.components.motionblinds_ble.MotionDevice", + autospec=True, + ) as mock_device: + device = mock_device.return_value + device.ble_device = Mock() + device.display_name = display_name + device.blind_type = blind_type + yield device + + +@pytest.fixture +def mock_config_entry( + blind_type: MotionBlindType, address: str, display_name: str, mac_code: str +) -> MockConfigEntry: + """Config entry fixture.""" + return MockConfigEntry( + title="mock_title", + domain=DOMAIN, + unique_id=address, + data={ + CONF_ADDRESS: address, + CONF_LOCAL_NAME: display_name, + CONF_MAC_CODE: mac_code, + CONF_BLIND_TYPE: blind_type.name.lower(), + }, + ) + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.motionblinds_ble.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def motionblinds_ble_connect( + enable_bluetooth: None, local_name: str, address: str ) -> Generator[tuple[AsyncMock, Mock]]: """Mock motion blinds ble connection and entry setup.""" device = Mock() - device.name = TEST_NAME - device.address = TEST_ADDRESS + device.name = local_name + device.address = address bleak_scanner = AsyncMock() bleak_scanner.discover.return_value = [device] @@ -31,9 +150,5 @@ def motion_blinds_connect_fixture( "homeassistant.components.motionblinds_ble.config_flow.bluetooth.async_get_scanner", return_value=bleak_scanner, ), - patch( - "homeassistant.components.motionblinds_ble.async_setup_entry", - return_value=True, - ), ): yield bleak_scanner, device diff --git a/tests/components/motionblinds_ble/test_button.py b/tests/components/motionblinds_ble/test_button.py new file mode 100644 index 00000000000..f0f80762759 --- /dev/null +++ b/tests/components/motionblinds_ble/test_button.py @@ -0,0 +1,47 @@ +"""Tests for Motionblinds BLE buttons.""" + +from unittest.mock import Mock + +import pytest + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.motionblinds_ble.const import ( + ATTR_CONNECT, + ATTR_DISCONNECT, + ATTR_FAVORITE, +) +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("motionblinds_ble_connect") +@pytest.mark.parametrize( + ("button"), + [ + ATTR_CONNECT, + ATTR_DISCONNECT, + ATTR_FAVORITE, + ], +) +async def test_button( + mock_config_entry: MockConfigEntry, + mock_motion_device: Mock, + name: str, + hass: HomeAssistant, + button: str, +) -> None: + """Test states of the button.""" + + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: f"button.{name}_{button}"}, + blocking=True, + ) + getattr(mock_motion_device, button).assert_called_once() diff --git a/tests/components/motionblinds_ble/test_config_flow.py b/tests/components/motionblinds_ble/test_config_flow.py index 4cab12269dd..05d3077ceb1 100644 --- a/tests/components/motionblinds_ble/test_config_flow.py +++ b/tests/components/motionblinds_ble/test_config_flow.py @@ -12,41 +12,19 @@ from homeassistant.const import CONF_ADDRESS from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from .conftest import TEST_ADDRESS, TEST_MAC, TEST_NAME - from tests.common import MockConfigEntry -from tests.components.bluetooth import generate_advertisement_data, generate_ble_device - -TEST_BLIND_TYPE = MotionBlindType.ROLLER.name.lower() - -BLIND_SERVICE_INFO = BluetoothServiceInfoBleak( - name=TEST_NAME, - address=TEST_ADDRESS, - device=generate_ble_device( - address="cc:cc:cc:cc:cc:cc", - name=TEST_NAME, - ), - rssi=-61, - manufacturer_data={000: b"test"}, - service_data={ - "test": bytearray(b"0000"), - }, - service_uuids=[ - "test", - ], - source="local", - advertisement=generate_advertisement_data( - manufacturer_data={000: b"test"}, - service_uuids=["test"], - ), - connectable=True, - time=0, - tx_power=-127, -) @pytest.mark.usefixtures("motionblinds_ble_connect") -async def test_config_flow_manual_success(hass: HomeAssistant) -> None: +@pytest.mark.usefixtures("mock_setup_entry") +async def test_config_flow_manual_success( + hass: HomeAssistant, + blind_type: MotionBlindType, + mac_code: str, + address: str, + local_name: str, + display_name: str, +) -> None: """Successful flow manually initialized by the user.""" result = await hass.config_entries.flow.async_init( const.DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -57,28 +35,36 @@ async def test_config_flow_manual_success(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_MAC_CODE: TEST_MAC}, + {const.CONF_MAC_CODE: mac_code}, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "confirm" result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_BLIND_TYPE: MotionBlindType.ROLLER.name.lower()}, + {const.CONF_BLIND_TYPE: blind_type.name.lower()}, ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == f"Motionblind {TEST_MAC.upper()}" + assert result["title"] == display_name assert result["data"] == { - CONF_ADDRESS: TEST_ADDRESS, - const.CONF_LOCAL_NAME: TEST_NAME, - const.CONF_MAC_CODE: TEST_MAC.upper(), - const.CONF_BLIND_TYPE: TEST_BLIND_TYPE, + CONF_ADDRESS: address, + const.CONF_LOCAL_NAME: local_name, + const.CONF_MAC_CODE: mac_code, + const.CONF_BLIND_TYPE: blind_type.name.lower(), } assert result["options"] == {} @pytest.mark.usefixtures("motionblinds_ble_connect") -async def test_config_flow_manual_error_invalid_mac(hass: HomeAssistant) -> None: +@pytest.mark.usefixtures("mock_setup_entry") +async def test_config_flow_manual_error_invalid_mac( + hass: HomeAssistant, + mac_code: str, + address: str, + local_name: str, + display_name: str, + blind_type: MotionBlindType, +) -> None: """Invalid MAC code error flow manually initialized by the user.""" # Initialize @@ -101,7 +87,7 @@ async def test_config_flow_manual_error_invalid_mac(hass: HomeAssistant) -> None # Recover result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_MAC_CODE: TEST_MAC}, + {const.CONF_MAC_CODE: mac_code}, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "confirm" @@ -109,15 +95,15 @@ async def test_config_flow_manual_error_invalid_mac(hass: HomeAssistant) -> None # Finish flow result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_BLIND_TYPE: MotionBlindType.ROLLER.name.lower()}, + {const.CONF_BLIND_TYPE: blind_type.name.lower()}, ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == f"Motionblind {TEST_MAC.upper()}" + assert result["title"] == display_name assert result["data"] == { - CONF_ADDRESS: TEST_ADDRESS, - const.CONF_LOCAL_NAME: TEST_NAME, - const.CONF_MAC_CODE: TEST_MAC.upper(), - const.CONF_BLIND_TYPE: TEST_BLIND_TYPE, + CONF_ADDRESS: address, + const.CONF_LOCAL_NAME: local_name, + const.CONF_MAC_CODE: mac_code, + const.CONF_BLIND_TYPE: blind_type.name.lower(), } assert result["options"] == {} @@ -125,6 +111,7 @@ async def test_config_flow_manual_error_invalid_mac(hass: HomeAssistant) -> None @pytest.mark.usefixtures("motionblinds_ble_connect") async def test_config_flow_manual_error_no_bluetooth_adapter( hass: HomeAssistant, + mac_code: str, ) -> None: """No Bluetooth adapter error flow manually initialized by the user.""" @@ -153,14 +140,21 @@ async def test_config_flow_manual_error_no_bluetooth_adapter( ): result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_MAC_CODE: TEST_MAC}, + {const.CONF_MAC_CODE: mac_code}, ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == const.ERROR_NO_BLUETOOTH_ADAPTER +@pytest.mark.usefixtures("mock_setup_entry") async def test_config_flow_manual_error_could_not_find_motor( - hass: HomeAssistant, motionblinds_ble_connect: tuple[AsyncMock, Mock] + hass: HomeAssistant, + motionblinds_ble_connect: tuple[AsyncMock, Mock], + mac_code: str, + local_name: str, + display_name: str, + address: str, + blind_type: MotionBlindType, ) -> None: """Could not find motor error flow manually initialized by the user.""" @@ -176,17 +170,17 @@ async def test_config_flow_manual_error_could_not_find_motor( motionblinds_ble_connect[1].name = "WRONG_NAME" result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_MAC_CODE: TEST_MAC}, + {const.CONF_MAC_CODE: mac_code}, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" assert result["errors"] == {"base": const.ERROR_COULD_NOT_FIND_MOTOR} # Recover - motionblinds_ble_connect[1].name = TEST_NAME + motionblinds_ble_connect[1].name = local_name result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_MAC_CODE: TEST_MAC}, + {const.CONF_MAC_CODE: mac_code}, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "confirm" @@ -194,21 +188,23 @@ async def test_config_flow_manual_error_could_not_find_motor( # Finish flow result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_BLIND_TYPE: MotionBlindType.ROLLER.name.lower()}, + {const.CONF_BLIND_TYPE: blind_type.name.lower()}, ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == f"Motionblind {TEST_MAC.upper()}" + assert result["title"] == display_name assert result["data"] == { - CONF_ADDRESS: TEST_ADDRESS, - const.CONF_LOCAL_NAME: TEST_NAME, - const.CONF_MAC_CODE: TEST_MAC.upper(), - const.CONF_BLIND_TYPE: TEST_BLIND_TYPE, + CONF_ADDRESS: address, + const.CONF_LOCAL_NAME: local_name, + const.CONF_MAC_CODE: mac_code, + const.CONF_BLIND_TYPE: blind_type.name.lower(), } assert result["options"] == {} async def test_config_flow_manual_error_no_devices_found( - hass: HomeAssistant, motionblinds_ble_connect: tuple[AsyncMock, Mock] + hass: HomeAssistant, + motionblinds_ble_connect: tuple[AsyncMock, Mock], + mac_code: str, ) -> None: """No devices found error flow manually initialized by the user.""" @@ -224,19 +220,27 @@ async def test_config_flow_manual_error_no_devices_found( motionblinds_ble_connect[0].discover.return_value = [] result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_MAC_CODE: TEST_MAC}, + {const.CONF_MAC_CODE: mac_code}, ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == const.ERROR_NO_DEVICES_FOUND @pytest.mark.usefixtures("motionblinds_ble_connect") -async def test_config_flow_bluetooth_success(hass: HomeAssistant) -> None: +async def test_config_flow_bluetooth_success( + hass: HomeAssistant, + mac_code: str, + service_info: BluetoothServiceInfoBleak, + address: str, + local_name: str, + display_name: str, + blind_type: MotionBlindType, +) -> None: """Successful bluetooth discovery flow.""" result = await hass.config_entries.flow.async_init( const.DOMAIN, context={"source": config_entries.SOURCE_BLUETOOTH}, - data=BLIND_SERVICE_INFO, + data=service_info, ) assert result["type"] is FlowResultType.FORM @@ -244,36 +248,32 @@ async def test_config_flow_bluetooth_success(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_BLIND_TYPE: MotionBlindType.ROLLER.name.lower()}, + {const.CONF_BLIND_TYPE: blind_type.name.lower()}, ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == f"Motionblind {TEST_MAC.upper()}" + assert result["title"] == display_name assert result["data"] == { - CONF_ADDRESS: TEST_ADDRESS, - const.CONF_LOCAL_NAME: TEST_NAME, - const.CONF_MAC_CODE: TEST_MAC.upper(), - const.CONF_BLIND_TYPE: TEST_BLIND_TYPE, + CONF_ADDRESS: address, + const.CONF_LOCAL_NAME: local_name, + const.CONF_MAC_CODE: mac_code, + const.CONF_BLIND_TYPE: blind_type.name.lower(), } assert result["options"] == {} -async def test_options_flow(hass: HomeAssistant) -> None: +@pytest.mark.usefixtures("mock_setup_entry") +async def test_options_flow( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: """Test the options flow.""" - entry = MockConfigEntry( - domain=const.DOMAIN, - unique_id="0123456789", - data={ - const.CONF_BLIND_TYPE: MotionBlindType.ROLLER, - }, - ) - entry.add_to_hass(hass) - - await hass.config_entries.async_setup(entry.entry_id) + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - result = await hass.config_entries.options.async_init(entry.entry_id) + result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "init" diff --git a/tests/components/motionblinds_ble/test_cover.py b/tests/components/motionblinds_ble/test_cover.py new file mode 100644 index 00000000000..2e6f1ad587a --- /dev/null +++ b/tests/components/motionblinds_ble/test_cover.py @@ -0,0 +1,124 @@ +"""Tests for Motionblinds BLE covers.""" + +from typing import Any +from unittest.mock import Mock + +from motionblindsble.const import MotionBlindType, MotionRunningType +import pytest + +from homeassistant.components.cover import ( + ATTR_POSITION, + ATTR_TILT_POSITION, + DOMAIN as COVER_DOMAIN, + SERVICE_CLOSE_COVER, + SERVICE_CLOSE_COVER_TILT, + SERVICE_OPEN_COVER, + SERVICE_OPEN_COVER_TILT, + SERVICE_SET_COVER_POSITION, + SERVICE_SET_COVER_TILT_POSITION, + SERVICE_STOP_COVER, + SERVICE_STOP_COVER_TILT, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, +) +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +@pytest.mark.parametrize("blind_type", [MotionBlindType.VENETIAN]) +@pytest.mark.parametrize( + ("service", "method", "kwargs"), + [ + (SERVICE_OPEN_COVER, "open", {}), + (SERVICE_CLOSE_COVER, "close", {}), + (SERVICE_OPEN_COVER_TILT, "open_tilt", {}), + (SERVICE_CLOSE_COVER_TILT, "close_tilt", {}), + (SERVICE_SET_COVER_POSITION, "position", {ATTR_POSITION: 5}), + (SERVICE_SET_COVER_TILT_POSITION, "tilt", {ATTR_TILT_POSITION: 10}), + (SERVICE_STOP_COVER, "stop", {}), + (SERVICE_STOP_COVER_TILT, "stop", {}), + ], +) +async def test_cover_service( + mock_config_entry: MockConfigEntry, + mock_motion_device: Mock, + name: str, + hass: HomeAssistant, + service: str, + method: str, + kwargs: dict[str, Any], +) -> None: + """Test cover service.""" + + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + COVER_DOMAIN, + service, + {ATTR_ENTITY_ID: f"cover.{name}", **kwargs}, + blocking=True, + ) + getattr(mock_motion_device, method).assert_called_once() + + +@pytest.mark.parametrize( + ("running_type", "state"), + [ + (None, "unknown"), + (MotionRunningType.STILL, "unknown"), + (MotionRunningType.OPENING, STATE_OPENING), + (MotionRunningType.CLOSING, STATE_CLOSING), + ], +) +async def test_cover_update_running( + mock_config_entry: MockConfigEntry, + mock_motion_device: Mock, + name: str, + hass: HomeAssistant, + running_type: str | None, + state: str, +) -> None: + """Test updating running status.""" + + await setup_integration(hass, mock_config_entry) + + async_update_running = mock_motion_device.register_running_callback.call_args[0][0] + + async_update_running(running_type) + assert hass.states.get(f"cover.{name}").state == state + + +@pytest.mark.parametrize( + ("position", "tilt", "state"), + [ + (None, None, "unknown"), + (0, 0, STATE_OPEN), + (50, 90, STATE_OPEN), + (100, 180, STATE_CLOSED), + ], +) +async def test_cover_update_position( + mock_config_entry: MockConfigEntry, + mock_motion_device: Mock, + name: str, + hass: HomeAssistant, + position: int, + tilt: int, + state: str, +) -> None: + """Test updating cover position and tilt.""" + + await setup_integration(hass, mock_config_entry) + + async_update_position = mock_motion_device.register_position_callback.call_args[0][ + 0 + ] + + async_update_position(position, tilt) + assert hass.states.get(f"cover.{name}").state == state diff --git a/tests/components/motionblinds_ble/test_entity.py b/tests/components/motionblinds_ble/test_entity.py new file mode 100644 index 00000000000..d5927e438a5 --- /dev/null +++ b/tests/components/motionblinds_ble/test_entity.py @@ -0,0 +1,54 @@ +"""Tests for Motionblinds BLE entities.""" + +from unittest.mock import Mock + +import pytest + +from homeassistant.components.homeassistant import ( + DOMAIN as HA_DOMAIN, + SERVICE_UPDATE_ENTITY, +) +from homeassistant.components.motionblinds_ble.const import ( + ATTR_CONNECT, + ATTR_DISCONNECT, + ATTR_FAVORITE, + ATTR_SPEED, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from . import setup_integration + +from tests.common import MockConfigEntry + + +@pytest.mark.parametrize( + ("platform", "entity"), + [ + (Platform.BUTTON, ATTR_CONNECT), + (Platform.BUTTON, ATTR_DISCONNECT), + (Platform.BUTTON, ATTR_FAVORITE), + (Platform.SELECT, ATTR_SPEED), + ], +) +async def test_entity_update( + mock_config_entry: MockConfigEntry, + mock_motion_device: Mock, + name: str, + hass: HomeAssistant, + platform: Platform, + entity: str, +) -> None: + """Test updating entity using homeassistant.update_entity.""" + + await async_setup_component(hass, HA_DOMAIN, {}) + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + HA_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: f"{platform.name.lower()}.{name}_{entity}"}, + blocking=True, + ) + getattr(mock_motion_device, "status_query").assert_called_once_with() diff --git a/tests/components/motionblinds_ble/test_init.py b/tests/components/motionblinds_ble/test_init.py new file mode 100644 index 00000000000..706dfdc2f01 --- /dev/null +++ b/tests/components/motionblinds_ble/test_init.py @@ -0,0 +1,48 @@ +"""Tests for Motionblinds BLE init.""" + +from unittest.mock import patch + +from homeassistant.components.bluetooth.models import BluetoothServiceInfoBleak +from homeassistant.components.motionblinds_ble import options_update_listener +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry +from tests.components.bluetooth import inject_bluetooth_service_info + + +async def test_options_update_listener( + mock_config_entry: MockConfigEntry, hass: HomeAssistant +) -> None: + """Test options_update_listener.""" + + await setup_integration(hass, mock_config_entry) + + with ( + patch( + "homeassistant.components.motionblinds_ble.MotionDevice.set_custom_disconnect_time" + ) as mock_set_custom_disconnect_time, + patch( + "homeassistant.components.motionblinds_ble.MotionDevice.set_permanent_connection" + ) as set_permanent_connection, + ): + await options_update_listener(hass, mock_config_entry) + mock_set_custom_disconnect_time.assert_called_once() + set_permanent_connection.assert_called_once() + + +async def test_update_ble_device( + mock_config_entry: MockConfigEntry, + hass: HomeAssistant, + service_info: BluetoothServiceInfoBleak, +) -> None: + """Test async_update_ble_device.""" + + await setup_integration(hass, mock_config_entry) + + with patch( + "homeassistant.components.motionblinds_ble.MotionDevice.set_ble_device" + ) as mock_set_ble_device: + inject_bluetooth_service_info(hass, service_info) + mock_set_ble_device.assert_called_once() diff --git a/tests/components/motionblinds_ble/test_select.py b/tests/components/motionblinds_ble/test_select.py new file mode 100644 index 00000000000..813df89e387 --- /dev/null +++ b/tests/components/motionblinds_ble/test_select.py @@ -0,0 +1,76 @@ +"""Tests for Motionblinds BLE selects.""" + +from collections.abc import Callable +from enum import Enum +from typing import Any +from unittest.mock import Mock + +from motionblindsble.const import MotionSpeedLevel +from motionblindsble.device import MotionDevice +import pytest + +from homeassistant.components.motionblinds_ble.const import ATTR_SPEED +from homeassistant.components.select import ( + DOMAIN as SELECT_DOMAIN, + SERVICE_SELECT_OPTION, +) +from homeassistant.const import ATTR_ENTITY_ID, ATTR_OPTION +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +@pytest.mark.parametrize(("select", "args"), [(ATTR_SPEED, MotionSpeedLevel.HIGH)]) +async def test_select( + mock_config_entry: MockConfigEntry, + mock_motion_device: Mock, + name: str, + hass: HomeAssistant, + select: str, + args: Any, +) -> None: + """Test select.""" + + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: f"select.{name}_{select}", + ATTR_OPTION: MotionSpeedLevel.HIGH.value, + }, + blocking=True, + ) + getattr(mock_motion_device, select).assert_called_once_with(args) + + +@pytest.mark.parametrize( + ("select", "register_callback", "value"), + [ + ( + ATTR_SPEED, + lambda device: device.register_speed_callback, + MotionSpeedLevel.HIGH, + ) + ], +) +async def test_select_update( + mock_config_entry: MockConfigEntry, + mock_motion_device: Mock, + name: str, + hass: HomeAssistant, + select: str, + register_callback: Callable[[MotionDevice], Callable[..., None]], + value: type[Enum], +) -> None: + """Test select state update.""" + + await setup_integration(hass, mock_config_entry) + + update_func = register_callback(mock_motion_device).call_args[0][0] + + update_func(value) + assert hass.states.get(f"select.{name}_{select}").state == str(value.value) diff --git a/tests/components/motionblinds_ble/test_sensor.py b/tests/components/motionblinds_ble/test_sensor.py new file mode 100644 index 00000000000..4859fc643c9 --- /dev/null +++ b/tests/components/motionblinds_ble/test_sensor.py @@ -0,0 +1,107 @@ +"""Tests for Motionblinds BLE sensors.""" + +from collections.abc import Callable +from typing import Any +from unittest.mock import Mock + +from motionblindsble.const import ( + MotionBlindType, + MotionCalibrationType, + MotionConnectionType, +) +from motionblindsble.device import MotionDevice +import pytest + +from homeassistant.components.motionblinds_ble.const import ( + ATTR_BATTERY, + ATTR_SIGNAL_STRENGTH, +) +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +@pytest.mark.parametrize("blind_type", [MotionBlindType.CURTAIN]) +@pytest.mark.parametrize( + ("sensor", "register_callback", "initial_value", "args", "expected_value"), + [ + ( + "connection_status", + lambda device: device.register_connection_callback, + MotionConnectionType.DISCONNECTED.value, + [MotionConnectionType.CONNECTING], + MotionConnectionType.CONNECTING.value, + ), + ( + ATTR_BATTERY, + lambda device: device.register_battery_callback, + "unknown", + [25, True, False], + "25", + ), + ( # Battery unknown + ATTR_BATTERY, + lambda device: device.register_battery_callback, + "unknown", + [None, False, False], + "unknown", + ), + ( # Wired + ATTR_BATTERY, + lambda device: device.register_battery_callback, + "unknown", + [255, False, True], + "255", + ), + ( # Almost full + ATTR_BATTERY, + lambda device: device.register_battery_callback, + "unknown", + [99, False, False], + "99", + ), + ( # Almost empty + ATTR_BATTERY, + lambda device: device.register_battery_callback, + "unknown", + [1, False, False], + "1", + ), + ( + "calibration_status", + lambda device: device.register_calibration_callback, + "unknown", + [MotionCalibrationType.CALIBRATING], + MotionCalibrationType.CALIBRATING.value, + ), + ( + ATTR_SIGNAL_STRENGTH, + lambda device: device.register_signal_strength_callback, + "unknown", + [-50], + "-50", + ), + ], +) +async def test_sensor( + mock_config_entry: MockConfigEntry, + mock_motion_device: Mock, + name: str, + hass: HomeAssistant, + sensor: str, + register_callback: Callable[[MotionDevice], Callable[..., None]], + initial_value: str, + args: list[Any], + expected_value: str, +) -> None: + """Test sensors.""" + + await setup_integration(hass, mock_config_entry) + + assert hass.states.get(f"{SENSOR_DOMAIN}.{name}_{sensor}").state == initial_value + update_func = register_callback(mock_motion_device).call_args[0][0] + update_func(*args) + assert hass.states.get(f"{SENSOR_DOMAIN}.{name}_{sensor}").state == expected_value From 21c9cd1caa29a4371b943f051db75f56a6b5c212 Mon Sep 17 00:00:00 2001 From: cnico Date: Thu, 15 Aug 2024 15:44:49 +0200 Subject: [PATCH 0767/1635] Add switch platform to chacon_dio integration (#122514) * Adding switch platform for dio devices * Remove useless logger * Review corrections * review corrections --- .../components/chacon_dio/__init__.py | 2 +- homeassistant/components/chacon_dio/switch.py | 74 ++++++++++ tests/components/chacon_dio/conftest.py | 2 + .../chacon_dio/snapshots/test_switch.ambr | 48 +++++++ tests/components/chacon_dio/test_switch.py | 132 ++++++++++++++++++ 5 files changed, 257 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/chacon_dio/switch.py create mode 100644 tests/components/chacon_dio/snapshots/test_switch.ambr create mode 100644 tests/components/chacon_dio/test_switch.py diff --git a/homeassistant/components/chacon_dio/__init__.py b/homeassistant/components/chacon_dio/__init__.py index 00558572fca..94617cb3929 100644 --- a/homeassistant/components/chacon_dio/__init__.py +++ b/homeassistant/components/chacon_dio/__init__.py @@ -17,7 +17,7 @@ from homeassistant.core import Event, HomeAssistant _LOGGER = logging.getLogger(__name__) -PLATFORMS: list[Platform] = [Platform.COVER] +PLATFORMS: list[Platform] = [Platform.COVER, Platform.SWITCH] @dataclass diff --git a/homeassistant/components/chacon_dio/switch.py b/homeassistant/components/chacon_dio/switch.py new file mode 100644 index 00000000000..be178c3c3b5 --- /dev/null +++ b/homeassistant/components/chacon_dio/switch.py @@ -0,0 +1,74 @@ +"""Switch Platform for Chacon Dio REV-LIGHT and switch plug devices.""" + +import logging +from typing import Any + +from dio_chacon_wifi_api.const import DeviceTypeEnum + +from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import ChaconDioConfigEntry +from .entity import ChaconDioEntity + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ChaconDioConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Chacon Dio switch devices.""" + data = config_entry.runtime_data + client = data.client + + async_add_entities( + ChaconDioSwitch(client, device) + for device in data.list_devices + if device["type"] + in (DeviceTypeEnum.SWITCH_LIGHT.value, DeviceTypeEnum.SWITCH_PLUG.value) + ) + + +class ChaconDioSwitch(ChaconDioEntity, SwitchEntity): + """Object for controlling a Chacon Dio switch.""" + + _attr_device_class = SwitchDeviceClass.SWITCH + _attr_name = None + + def _update_attr(self, data: dict[str, Any]) -> None: + """Recomputes the attributes values either at init or when the device state changes.""" + self._attr_available = data["connected"] + self._attr_is_on = data["is_on"] + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on the switch. + + Turned on status is effective after the server callback that triggers callback_device_state. + """ + + _LOGGER.debug( + "Turn on the switch %s , %s, %s", + self.target_id, + self.entity_id, + self._attr_is_on, + ) + + await self.client.switch_switch(self.target_id, True) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off the switch. + + Turned on status is effective after the server callback that triggers callback_device_state. + """ + + _LOGGER.debug( + "Turn off the switch %s , %s, %s", + self.target_id, + self.entity_id, + self._attr_is_on, + ) + + await self.client.switch_switch(self.target_id, False) diff --git a/tests/components/chacon_dio/conftest.py b/tests/components/chacon_dio/conftest.py index 3c3b970cec0..186bc468bee 100644 --- a/tests/components/chacon_dio/conftest.py +++ b/tests/components/chacon_dio/conftest.py @@ -65,6 +65,8 @@ def mock_dio_chacon_client() -> Generator[AsyncMock]: client.get_user_id.return_value = "dummy-user-id" client.search_all_devices.return_value = MOCK_COVER_DEVICE + client.switch_switch.return_value = {} + client.move_shutter_direction.return_value = {} client.disconnect.return_value = {} diff --git a/tests/components/chacon_dio/snapshots/test_switch.ambr b/tests/components/chacon_dio/snapshots/test_switch.ambr new file mode 100644 index 00000000000..7a65dad5445 --- /dev/null +++ b/tests/components/chacon_dio/snapshots/test_switch.ambr @@ -0,0 +1,48 @@ +# serializer version: 1 +# name: test_entities[switch.switch_mock_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.switch_mock_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'chacon_dio', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'L4HActuator_idmock1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[switch.switch_mock_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Switch mock 1', + }), + 'context': , + 'entity_id': 'switch.switch_mock_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/chacon_dio/test_switch.py b/tests/components/chacon_dio/test_switch.py new file mode 100644 index 00000000000..a5ad0d0ea13 --- /dev/null +++ b/tests/components/chacon_dio/test_switch.py @@ -0,0 +1,132 @@ +"""Test the Chacon Dio switch.""" + +from collections.abc import Callable +from unittest.mock import AsyncMock + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.switch import ( + DOMAIN as SWITCH_DOMAIN, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + +SWITCH_ENTITY_ID = "switch.switch_mock_1" + +MOCK_SWITCH_DEVICE = { + "L4HActuator_idmock1": { + "id": "L4HActuator_idmock1", + "name": "Switch mock 1", + "type": "SWITCH_LIGHT", + "model": "CERNwd-3B_1.0.6", + "connected": True, + "is_on": True, + } +} + + +async def test_entities( + hass: HomeAssistant, + mock_dio_chacon_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the creation and values of the Chacon Dio switches.""" + + mock_dio_chacon_client.search_all_devices.return_value = MOCK_SWITCH_DEVICE + + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_switch_actions( + hass: HomeAssistant, + mock_dio_chacon_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the actions on the Chacon Dio switch.""" + + mock_dio_chacon_client.search_all_devices.return_value = MOCK_SWITCH_DEVICE + + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: SWITCH_ENTITY_ID}, + blocking=True, + ) + state = hass.states.get(SWITCH_ENTITY_ID) + assert state.state == STATE_ON + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: SWITCH_ENTITY_ID}, + blocking=True, + ) + state = hass.states.get(SWITCH_ENTITY_ID) + # turn off does not change directly the state, it is made by a server side callback. + assert state.state == STATE_ON + + +async def test_switch_callbacks( + hass: HomeAssistant, + mock_dio_chacon_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test the callbacks on the Chacon Dio switches.""" + + mock_dio_chacon_client.search_all_devices.return_value = MOCK_SWITCH_DEVICE + + await setup_integration(hass, mock_config_entry) + + # Server side callback tests + # We find the callback method on the mock client + callback_device_state_function: Callable = ( + mock_dio_chacon_client.set_callback_device_state_by_device.call_args[0][1] + ) + + # Define a method to simply call it + async def _callback_device_state_function(is_on: bool) -> None: + callback_device_state_function( + { + "id": "L4HActuator_idmock1", + "connected": True, + "is_on": is_on, + } + ) + await hass.async_block_till_done() + + # And call it to effectively launch the callback as the server would do + await _callback_device_state_function(False) + state = hass.states.get(SWITCH_ENTITY_ID) + assert state + assert state.state == STATE_OFF + + +async def test_no_switch_found( + hass: HomeAssistant, + mock_dio_chacon_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test the switch absence.""" + + mock_dio_chacon_client.search_all_devices.return_value = None + + await setup_integration(hass, mock_config_entry) + + assert not hass.states.async_entity_ids(SWITCH_DOMAIN) From 983806817ba524a14b94264a260adf309d2d94bd Mon Sep 17 00:00:00 2001 From: Philip Vanloo <26272906+dukeofphilberg@users.noreply.github.com> Date: Thu, 15 Aug 2024 15:48:02 +0200 Subject: [PATCH 0768/1635] Add ArtSound as a virtual integration (#122636) * Add ArtSound as a virtual integration and brand * Remove ArtSound as brand * Add docstring for __init__ * Address hassfest --- homeassistant/components/artsound/__init__.py | 1 + homeassistant/components/artsound/manifest.json | 6 ++++++ homeassistant/generated/integrations.json | 5 +++++ 3 files changed, 12 insertions(+) create mode 100644 homeassistant/components/artsound/__init__.py create mode 100644 homeassistant/components/artsound/manifest.json diff --git a/homeassistant/components/artsound/__init__.py b/homeassistant/components/artsound/__init__.py new file mode 100644 index 00000000000..149f06bc7c7 --- /dev/null +++ b/homeassistant/components/artsound/__init__.py @@ -0,0 +1 @@ +"""Virtual integration: ArtSound.""" diff --git a/homeassistant/components/artsound/manifest.json b/homeassistant/components/artsound/manifest.json new file mode 100644 index 00000000000..589ba862102 --- /dev/null +++ b/homeassistant/components/artsound/manifest.json @@ -0,0 +1,6 @@ +{ + "domain": "artsound", + "name": "ArtSound", + "integration_type": "virtual", + "supported_by": "linkplay" +} diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 1415ab51a75..6d2c5ec9a30 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -455,6 +455,11 @@ "config_flow": false, "iot_class": "local_polling" }, + "artsound": { + "name": "ArtSound", + "integration_type": "virtual", + "supported_by": "linkplay" + }, "aruba": { "name": "Aruba", "integrations": { From a50aeb0a660427303e13f6d07616f20988302b93 Mon Sep 17 00:00:00 2001 From: Glenn Waters Date: Thu, 15 Aug 2024 10:14:01 -0400 Subject: [PATCH 0769/1635] Environment Canada weather format fix (#123960) * Add missing isoformat. * Move fixture loading to common conftest.py * Add deepcopy. --- .../components/environment_canada/weather.py | 8 ++++-- .../components/environment_canada/conftest.py | 27 +++++++++++++++++++ .../snapshots/test_weather.ambr | 22 +++++++-------- .../environment_canada/test_diagnostics.py | 2 ++ .../environment_canada/test_weather.py | 24 +++++------------ 5 files changed, 53 insertions(+), 30 deletions(-) create mode 100644 tests/components/environment_canada/conftest.py diff --git a/homeassistant/components/environment_canada/weather.py b/homeassistant/components/environment_canada/weather.py index 2d54a313dde..1871062c2e9 100644 --- a/homeassistant/components/environment_canada/weather.py +++ b/homeassistant/components/environment_canada/weather.py @@ -2,6 +2,8 @@ from __future__ import annotations +from typing import Any + from homeassistant.components.weather import ( ATTR_CONDITION_CLEAR_NIGHT, ATTR_CONDITION_CLOUDY, @@ -190,10 +192,12 @@ def get_forecast(ec_data, hourly) -> list[Forecast] | None: if not (half_days := ec_data.daily_forecasts): return None - def get_day_forecast(fcst: list[dict[str, str]]) -> Forecast: + def get_day_forecast( + fcst: list[dict[str, Any]], + ) -> Forecast: high_temp = int(fcst[0]["temperature"]) if len(fcst) == 2 else None return { - ATTR_FORECAST_TIME: fcst[0]["timestamp"], + ATTR_FORECAST_TIME: fcst[0]["timestamp"].isoformat(), ATTR_FORECAST_NATIVE_TEMP: high_temp, ATTR_FORECAST_NATIVE_TEMP_LOW: int(fcst[-1]["temperature"]), ATTR_FORECAST_PRECIPITATION_PROBABILITY: int( diff --git a/tests/components/environment_canada/conftest.py b/tests/components/environment_canada/conftest.py new file mode 100644 index 00000000000..69cec187d11 --- /dev/null +++ b/tests/components/environment_canada/conftest.py @@ -0,0 +1,27 @@ +"""Common fixture for Environment Canada tests.""" + +import contextlib +from datetime import datetime +import json + +import pytest + +from tests.common import load_fixture + + +@pytest.fixture +def ec_data(): + """Load Environment Canada data.""" + + def date_hook(weather): + """Convert timestamp string to datetime.""" + + if t := weather.get("timestamp"): + with contextlib.suppress(ValueError): + weather["timestamp"] = datetime.fromisoformat(t) + return weather + + return json.loads( + load_fixture("environment_canada/current_conditions_data.json"), + object_hook=date_hook, + ) diff --git a/tests/components/environment_canada/snapshots/test_weather.ambr b/tests/components/environment_canada/snapshots/test_weather.ambr index 7ba37110c2a..cfa0ad912a4 100644 --- a/tests/components/environment_canada/snapshots/test_weather.ambr +++ b/tests/components/environment_canada/snapshots/test_weather.ambr @@ -5,35 +5,35 @@ 'forecast': list([ dict({ 'condition': 'sunny', - 'datetime': '2022-10-04 15:00:00+00:00', + 'datetime': '2022-10-04T15:00:00+00:00', 'precipitation_probability': 0, 'temperature': 18.0, 'templow': 3.0, }), dict({ 'condition': 'sunny', - 'datetime': '2022-10-05 15:00:00+00:00', + 'datetime': '2022-10-05T15:00:00+00:00', 'precipitation_probability': 0, 'temperature': 20.0, 'templow': 9.0, }), dict({ 'condition': 'partlycloudy', - 'datetime': '2022-10-06 15:00:00+00:00', + 'datetime': '2022-10-06T15:00:00+00:00', 'precipitation_probability': 0, 'temperature': 20.0, 'templow': 7.0, }), dict({ 'condition': 'rainy', - 'datetime': '2022-10-07 15:00:00+00:00', + 'datetime': '2022-10-07T15:00:00+00:00', 'precipitation_probability': 40, 'temperature': 13.0, 'templow': 1.0, }), dict({ 'condition': 'partlycloudy', - 'datetime': '2022-10-08 15:00:00+00:00', + 'datetime': '2022-10-08T15:00:00+00:00', 'precipitation_probability': 0, 'temperature': 10.0, 'templow': 3.0, @@ -48,42 +48,42 @@ 'forecast': list([ dict({ 'condition': 'clear-night', - 'datetime': '2022-10-03 15:00:00+00:00', + 'datetime': '2022-10-03T15:00:00+00:00', 'precipitation_probability': 0, 'temperature': None, 'templow': -1.0, }), dict({ 'condition': 'sunny', - 'datetime': '2022-10-04 15:00:00+00:00', + 'datetime': '2022-10-04T15:00:00+00:00', 'precipitation_probability': 0, 'temperature': 18.0, 'templow': 3.0, }), dict({ 'condition': 'sunny', - 'datetime': '2022-10-05 15:00:00+00:00', + 'datetime': '2022-10-05T15:00:00+00:00', 'precipitation_probability': 0, 'temperature': 20.0, 'templow': 9.0, }), dict({ 'condition': 'partlycloudy', - 'datetime': '2022-10-06 15:00:00+00:00', + 'datetime': '2022-10-06T15:00:00+00:00', 'precipitation_probability': 0, 'temperature': 20.0, 'templow': 7.0, }), dict({ 'condition': 'rainy', - 'datetime': '2022-10-07 15:00:00+00:00', + 'datetime': '2022-10-07T15:00:00+00:00', 'precipitation_probability': 40, 'temperature': 13.0, 'templow': 1.0, }), dict({ 'condition': 'partlycloudy', - 'datetime': '2022-10-08 15:00:00+00:00', + 'datetime': '2022-10-08T15:00:00+00:00', 'precipitation_probability': 0, 'temperature': 10.0, 'templow': 3.0, diff --git a/tests/components/environment_canada/test_diagnostics.py b/tests/components/environment_canada/test_diagnostics.py index 7e9c8691f90..79b72961124 100644 --- a/tests/components/environment_canada/test_diagnostics.py +++ b/tests/components/environment_canada/test_diagnostics.py @@ -1,6 +1,7 @@ """Test Environment Canada diagnostics.""" import json +from typing import Any from syrupy import SnapshotAssertion @@ -26,6 +27,7 @@ async def test_entry_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, snapshot: SnapshotAssertion, + ec_data: dict[str, Any], ) -> None: """Test config entry diagnostics.""" diff --git a/tests/components/environment_canada/test_weather.py b/tests/components/environment_canada/test_weather.py index e8c21e2dc06..8e22f68462f 100644 --- a/tests/components/environment_canada/test_weather.py +++ b/tests/components/environment_canada/test_weather.py @@ -1,6 +1,7 @@ """Test weather.""" -import json +import copy +from typing import Any from syrupy.assertion import SnapshotAssertion @@ -12,23 +13,17 @@ from homeassistant.core import HomeAssistant from . import init_integration -from tests.common import load_fixture - async def test_forecast_daily( - hass: HomeAssistant, - snapshot: SnapshotAssertion, + hass: HomeAssistant, snapshot: SnapshotAssertion, ec_data: dict[str, Any] ) -> None: """Test basic forecast.""" - ec_data = json.loads( - load_fixture("environment_canada/current_conditions_data.json") - ) - # First entry in test data is a half day; we don't want that for this test - del ec_data["daily_forecasts"][0] + local_ec_data = copy.deepcopy(ec_data) + del local_ec_data["daily_forecasts"][0] - await init_integration(hass, ec_data) + await init_integration(hass, local_ec_data) response = await hass.services.async_call( WEATHER_DOMAIN, @@ -44,15 +39,10 @@ async def test_forecast_daily( async def test_forecast_daily_with_some_previous_days_data( - hass: HomeAssistant, - snapshot: SnapshotAssertion, + hass: HomeAssistant, snapshot: SnapshotAssertion, ec_data: dict[str, Any] ) -> None: """Test forecast with half day at start.""" - ec_data = json.loads( - load_fixture("environment_canada/current_conditions_data.json") - ) - await init_integration(hass, ec_data) response = await hass.services.async_call( From 874ae15d6a8915d2172077bc0ff830175f4abdb8 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Thu, 15 Aug 2024 18:16:49 +0200 Subject: [PATCH 0770/1635] Fix motionblinds ble test (#123990) * Fix motionblinds ble test * Fix motionblinds ble test --- tests/components/motionblinds_ble/test_button.py | 2 +- tests/components/motionblinds_ble/test_cover.py | 9 ++++++--- tests/components/motionblinds_ble/test_entity.py | 2 +- tests/components/motionblinds_ble/test_init.py | 5 +++-- tests/components/motionblinds_ble/test_select.py | 4 ++-- tests/components/motionblinds_ble/test_sensor.py | 2 +- 6 files changed, 14 insertions(+), 10 deletions(-) diff --git a/tests/components/motionblinds_ble/test_button.py b/tests/components/motionblinds_ble/test_button.py index f0f80762759..9c27056c929 100644 --- a/tests/components/motionblinds_ble/test_button.py +++ b/tests/components/motionblinds_ble/test_button.py @@ -28,10 +28,10 @@ from tests.common import MockConfigEntry ], ) async def test_button( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_motion_device: Mock, name: str, - hass: HomeAssistant, button: str, ) -> None: """Test states of the button.""" diff --git a/tests/components/motionblinds_ble/test_cover.py b/tests/components/motionblinds_ble/test_cover.py index 2e6f1ad587a..2f6b33b3017 100644 --- a/tests/components/motionblinds_ble/test_cover.py +++ b/tests/components/motionblinds_ble/test_cover.py @@ -31,6 +31,7 @@ from . import setup_integration from tests.common import MockConfigEntry +@pytest.mark.usefixtures("motionblinds_ble_connect") @pytest.mark.parametrize("blind_type", [MotionBlindType.VENETIAN]) @pytest.mark.parametrize( ("service", "method", "kwargs"), @@ -46,10 +47,10 @@ from tests.common import MockConfigEntry ], ) async def test_cover_service( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_motion_device: Mock, name: str, - hass: HomeAssistant, service: str, method: str, kwargs: dict[str, Any], @@ -67,6 +68,7 @@ async def test_cover_service( getattr(mock_motion_device, method).assert_called_once() +@pytest.mark.usefixtures("motionblinds_ble_connect") @pytest.mark.parametrize( ("running_type", "state"), [ @@ -77,10 +79,10 @@ async def test_cover_service( ], ) async def test_cover_update_running( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_motion_device: Mock, name: str, - hass: HomeAssistant, running_type: str | None, state: str, ) -> None: @@ -94,6 +96,7 @@ async def test_cover_update_running( assert hass.states.get(f"cover.{name}").state == state +@pytest.mark.usefixtures("motionblinds_ble_connect") @pytest.mark.parametrize( ("position", "tilt", "state"), [ @@ -104,10 +107,10 @@ async def test_cover_update_running( ], ) async def test_cover_update_position( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_motion_device: Mock, name: str, - hass: HomeAssistant, position: int, tilt: int, state: str, diff --git a/tests/components/motionblinds_ble/test_entity.py b/tests/components/motionblinds_ble/test_entity.py index d5927e438a5..1bfd3b185e5 100644 --- a/tests/components/motionblinds_ble/test_entity.py +++ b/tests/components/motionblinds_ble/test_entity.py @@ -33,10 +33,10 @@ from tests.common import MockConfigEntry ], ) async def test_entity_update( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_motion_device: Mock, name: str, - hass: HomeAssistant, platform: Platform, entity: str, ) -> None: diff --git a/tests/components/motionblinds_ble/test_init.py b/tests/components/motionblinds_ble/test_init.py index 706dfdc2f01..09596bd8d5e 100644 --- a/tests/components/motionblinds_ble/test_init.py +++ b/tests/components/motionblinds_ble/test_init.py @@ -13,7 +13,8 @@ from tests.components.bluetooth import inject_bluetooth_service_info async def test_options_update_listener( - mock_config_entry: MockConfigEntry, hass: HomeAssistant + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, ) -> None: """Test options_update_listener.""" @@ -33,8 +34,8 @@ async def test_options_update_listener( async def test_update_ble_device( - mock_config_entry: MockConfigEntry, hass: HomeAssistant, + mock_config_entry: MockConfigEntry, service_info: BluetoothServiceInfoBleak, ) -> None: """Test async_update_ble_device.""" diff --git a/tests/components/motionblinds_ble/test_select.py b/tests/components/motionblinds_ble/test_select.py index 813df89e387..2bd1bb30ec2 100644 --- a/tests/components/motionblinds_ble/test_select.py +++ b/tests/components/motionblinds_ble/test_select.py @@ -24,10 +24,10 @@ from tests.common import MockConfigEntry @pytest.mark.parametrize(("select", "args"), [(ATTR_SPEED, MotionSpeedLevel.HIGH)]) async def test_select( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_motion_device: Mock, name: str, - hass: HomeAssistant, select: str, args: Any, ) -> None: @@ -58,10 +58,10 @@ async def test_select( ], ) async def test_select_update( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_motion_device: Mock, name: str, - hass: HomeAssistant, select: str, register_callback: Callable[[MotionDevice], Callable[..., None]], value: type[Enum], diff --git a/tests/components/motionblinds_ble/test_sensor.py b/tests/components/motionblinds_ble/test_sensor.py index 4859fc643c9..54d2fbcb064 100644 --- a/tests/components/motionblinds_ble/test_sensor.py +++ b/tests/components/motionblinds_ble/test_sensor.py @@ -87,10 +87,10 @@ from tests.common import MockConfigEntry ], ) async def test_sensor( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_motion_device: Mock, name: str, - hass: HomeAssistant, sensor: str, register_callback: Callable[[MotionDevice], Callable[..., None]], initial_value: str, From e39bfeac08af624b316a538d6ce39e981dc9edc0 Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Thu, 15 Aug 2024 18:18:05 +0200 Subject: [PATCH 0771/1635] Allow shared Synology DSM Photo albums shown in media browser (#123613) --- .../components/synology_dsm/manifest.json | 2 +- .../components/synology_dsm/media_source.py | 58 +++++++++++++------ requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- .../synology_dsm/test_media_source.py | 35 ++++++----- 5 files changed, 65 insertions(+), 34 deletions(-) diff --git a/homeassistant/components/synology_dsm/manifest.json b/homeassistant/components/synology_dsm/manifest.json index 9d977609d14..5d42188357b 100644 --- a/homeassistant/components/synology_dsm/manifest.json +++ b/homeassistant/components/synology_dsm/manifest.json @@ -7,7 +7,7 @@ "documentation": "https://www.home-assistant.io/integrations/synology_dsm", "iot_class": "local_polling", "loggers": ["synology_dsm"], - "requirements": ["py-synologydsm-api==2.4.5"], + "requirements": ["py-synologydsm-api==2.5.2"], "ssdp": [ { "manufacturer": "Synology", diff --git a/homeassistant/components/synology_dsm/media_source.py b/homeassistant/components/synology_dsm/media_source.py index ace5733c222..d35b262809c 100644 --- a/homeassistant/components/synology_dsm/media_source.py +++ b/homeassistant/components/synology_dsm/media_source.py @@ -46,18 +46,24 @@ class SynologyPhotosMediaSourceIdentifier: self.cache_key = None self.file_name = None self.is_shared = False + self.passphrase = "" - if parts: - self.unique_id = parts[0] - if len(parts) > 1: - self.album_id = parts[1] - if len(parts) > 2: - self.cache_key = parts[2] - if len(parts) > 3: - self.file_name = parts[3] - if self.file_name.endswith(SHARED_SUFFIX): - self.is_shared = True - self.file_name = self.file_name.removesuffix(SHARED_SUFFIX) + self.unique_id = parts[0] + + if len(parts) > 1: + album_parts = parts[1].split("_") + self.album_id = album_parts[0] + if len(album_parts) > 1: + self.passphrase = parts[1].replace(f"{self.album_id}_", "") + + if len(parts) > 2: + self.cache_key = parts[2] + + if len(parts) > 3: + self.file_name = parts[3] + if self.file_name.endswith(SHARED_SUFFIX): + self.is_shared = True + self.file_name = self.file_name.removesuffix(SHARED_SUFFIX) class SynologyPhotosMediaSource(MediaSource): @@ -135,7 +141,7 @@ class SynologyPhotosMediaSource(MediaSource): ret.extend( BrowseMediaSource( domain=DOMAIN, - identifier=f"{item.identifier}/{album.album_id}", + identifier=f"{item.identifier}/{album.album_id}_{album.passphrase}", media_class=MediaClass.DIRECTORY, media_content_type=MediaClass.IMAGE, title=album.name, @@ -149,7 +155,7 @@ class SynologyPhotosMediaSource(MediaSource): # Request items of album # Get Items - album = SynoPhotosAlbum(int(identifier.album_id), "", 0) + album = SynoPhotosAlbum(int(identifier.album_id), "", 0, identifier.passphrase) try: album_items = await diskstation.api.photos.get_items_from_album( album, 0, 1000 @@ -170,7 +176,12 @@ class SynologyPhotosMediaSource(MediaSource): ret.append( BrowseMediaSource( domain=DOMAIN, - identifier=f"{identifier.unique_id}/{identifier.album_id}/{album_item.thumbnail_cache_key}/{album_item.file_name}{suffix}", + identifier=( + f"{identifier.unique_id}/" + f"{identifier.album_id}_{identifier.passphrase}/" + f"{album_item.thumbnail_cache_key}/" + f"{album_item.file_name}{suffix}" + ), media_class=MediaClass.IMAGE, media_content_type=mime_type, title=album_item.file_name, @@ -197,7 +208,12 @@ class SynologyPhotosMediaSource(MediaSource): if identifier.is_shared: suffix = SHARED_SUFFIX return PlayMedia( - f"/synology_dsm/{identifier.unique_id}/{identifier.cache_key}/{identifier.file_name}{suffix}", + ( + f"/synology_dsm/{identifier.unique_id}/" + f"{identifier.cache_key}/" + f"{identifier.file_name}{suffix}/" + f"{identifier.passphrase}" + ), mime_type, ) @@ -231,18 +247,24 @@ class SynologyDsmMediaView(http.HomeAssistantView): if not self.hass.data.get(DOMAIN): raise web.HTTPNotFound # location: {cache_key}/{filename} - cache_key, file_name = location.split("/") + cache_key, file_name, passphrase = location.split("/") image_id = int(cache_key.split("_")[0]) + if shared := file_name.endswith(SHARED_SUFFIX): file_name = file_name.removesuffix(SHARED_SUFFIX) + mime_type, _ = mimetypes.guess_type(file_name) if not isinstance(mime_type, str): raise web.HTTPNotFound + diskstation: SynologyDSMData = self.hass.data[DOMAIN][source_dir_id] assert diskstation.api.photos is not None - item = SynoPhotosItem(image_id, "", "", "", cache_key, "", shared) + item = SynoPhotosItem(image_id, "", "", "", cache_key, "xl", shared, passphrase) try: - image = await diskstation.api.photos.download_item(item) + if passphrase: + image = await diskstation.api.photos.download_item_thumbnail(item) + else: + image = await diskstation.api.photos.download_item(item) except SynologyDSMException as exc: raise web.HTTPNotFound from exc return web.Response(body=image, content_type=mime_type) diff --git a/requirements_all.txt b/requirements_all.txt index 649395442ad..fc8351c9237 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1662,7 +1662,7 @@ py-schluter==0.1.7 py-sucks==0.9.10 # homeassistant.components.synology_dsm -py-synologydsm-api==2.4.5 +py-synologydsm-api==2.5.2 # homeassistant.components.zabbix py-zabbix==1.1.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 0b77e322e95..5a89f6c644d 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1354,7 +1354,7 @@ py-nightscout==1.2.2 py-sucks==0.9.10 # homeassistant.components.synology_dsm -py-synologydsm-api==2.4.5 +py-synologydsm-api==2.5.2 # homeassistant.components.hdmi_cec pyCEC==0.5.2 diff --git a/tests/components/synology_dsm/test_media_source.py b/tests/components/synology_dsm/test_media_source.py index f7ab26997ba..0c7ab6bc1cc 100644 --- a/tests/components/synology_dsm/test_media_source.py +++ b/tests/components/synology_dsm/test_media_source.py @@ -48,11 +48,15 @@ def dsm_with_photos() -> MagicMock: dsm.surveillance_station.update = AsyncMock(return_value=True) dsm.upgrade.update = AsyncMock(return_value=True) - dsm.photos.get_albums = AsyncMock(return_value=[SynoPhotosAlbum(1, "Album 1", 10)]) + dsm.photos.get_albums = AsyncMock( + return_value=[SynoPhotosAlbum(1, "Album 1", 10, "")] + ) dsm.photos.get_items_from_album = AsyncMock( return_value=[ - SynoPhotosItem(10, "", "filename.jpg", 12345, "10_1298753", "sm", False), - SynoPhotosItem(10, "", "filename.jpg", 12345, "10_1298753", "sm", True), + SynoPhotosItem( + 10, "", "filename.jpg", 12345, "10_1298753", "sm", False, "" + ), + SynoPhotosItem(10, "", "filename.jpg", 12345, "10_1298753", "sm", True, ""), ] ) dsm.photos.get_item_thumbnail_url = AsyncMock( @@ -96,17 +100,22 @@ async def test_resolve_media_bad_identifier( [ ( "ABC012345/10/27643_876876/filename.jpg", - "/synology_dsm/ABC012345/27643_876876/filename.jpg", + "/synology_dsm/ABC012345/27643_876876/filename.jpg/", "image/jpeg", ), ( "ABC012345/12/12631_47189/filename.png", - "/synology_dsm/ABC012345/12631_47189/filename.png", + "/synology_dsm/ABC012345/12631_47189/filename.png/", "image/png", ), ( "ABC012345/12/12631_47189/filename.png_shared", - "/synology_dsm/ABC012345/12631_47189/filename.png_shared", + "/synology_dsm/ABC012345/12631_47189/filename.png_shared/", + "image/png", + ), + ( + "ABC012345/12_dmypass/12631_47189/filename.png", + "/synology_dsm/ABC012345/12631_47189/filename.png/dmypass", "image/png", ), ], @@ -250,7 +259,7 @@ async def test_browse_media_get_albums( assert result.children[0].identifier == "mocked_syno_dsm_entry/0" assert result.children[0].title == "All images" assert isinstance(result.children[1], BrowseMedia) - assert result.children[1].identifier == "mocked_syno_dsm_entry/1" + assert result.children[1].identifier == "mocked_syno_dsm_entry/1_" assert result.children[1].title == "Album 1" @@ -382,7 +391,7 @@ async def test_browse_media_get_items( assert len(result.children) == 2 item = result.children[0] assert isinstance(item, BrowseMedia) - assert item.identifier == "mocked_syno_dsm_entry/1/10_1298753/filename.jpg" + assert item.identifier == "mocked_syno_dsm_entry/1_/10_1298753/filename.jpg" assert item.title == "filename.jpg" assert item.media_class == MediaClass.IMAGE assert item.media_content_type == "image/jpeg" @@ -391,7 +400,7 @@ async def test_browse_media_get_items( assert item.thumbnail == "http://my.thumbnail.url" item = result.children[1] assert isinstance(item, BrowseMedia) - assert item.identifier == "mocked_syno_dsm_entry/1/10_1298753/filename.jpg_shared" + assert item.identifier == "mocked_syno_dsm_entry/1_/10_1298753/filename.jpg_shared" assert item.title == "filename.jpg" assert item.media_class == MediaClass.IMAGE assert item.media_content_type == "image/jpeg" @@ -435,24 +444,24 @@ async def test_media_view( assert await hass.config_entries.async_setup(entry.entry_id) with pytest.raises(web.HTTPNotFound): - await view.get(request, "", "10_1298753/filename") + await view.get(request, "", "10_1298753/filename/") # exception in download_item() dsm_with_photos.photos.download_item = AsyncMock( side_effect=SynologyDSMException("", None) ) with pytest.raises(web.HTTPNotFound): - await view.get(request, "mocked_syno_dsm_entry", "10_1298753/filename.jpg") + await view.get(request, "mocked_syno_dsm_entry", "10_1298753/filename.jpg/") # success dsm_with_photos.photos.download_item = AsyncMock(return_value=b"xxxx") with patch.object(tempfile, "tempdir", tmp_path): result = await view.get( - request, "mocked_syno_dsm_entry", "10_1298753/filename.jpg" + request, "mocked_syno_dsm_entry", "10_1298753/filename.jpg/" ) assert isinstance(result, web.Response) with patch.object(tempfile, "tempdir", tmp_path): result = await view.get( - request, "mocked_syno_dsm_entry", "10_1298753/filename.jpg_shared" + request, "mocked_syno_dsm_entry", "10_1298753/filename.jpg_shared/" ) assert isinstance(result, web.Response) From 24a20c75eb4793d3fafd75b9f330d4c07f83c260 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Thu, 15 Aug 2024 18:21:07 +0200 Subject: [PATCH 0772/1635] Add options flow to File (#120269) * Add options flow to File * Review comments --- homeassistant/components/file/__init__.py | 32 ++++++- homeassistant/components/file/config_flow.py | 70 +++++++++++++-- homeassistant/components/file/notify.py | 5 +- homeassistant/components/file/sensor.py | 7 +- homeassistant/components/file/strings.json | 16 ++++ tests/components/file/test_config_flow.py | 89 +++++++++++++++++--- tests/components/file/test_init.py | 65 ++++++++++++++ tests/components/file/test_notify.py | 35 ++++++-- tests/components/file/test_sensor.py | 26 +++++- 9 files changed, 307 insertions(+), 38 deletions(-) create mode 100644 tests/components/file/test_init.py diff --git a/homeassistant/components/file/__init__.py b/homeassistant/components/file/__init__.py index aa3e241cc81..0c9cfee5f4d 100644 --- a/homeassistant/components/file/__init__.py +++ b/homeassistant/components/file/__init__.py @@ -1,5 +1,8 @@ """The file component.""" +from copy import deepcopy +from typing import Any + from homeassistant.components.notify import migrate_notify_issue from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import ( @@ -84,7 +87,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a file component entry.""" - config = dict(entry.data) + config = {**entry.data, **entry.options} filepath: str = config[CONF_FILE_PATH] if filepath and not await hass.async_add_executor_job( hass.config.is_allowed_path, filepath @@ -98,6 +101,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await hass.config_entries.async_forward_entry_setups( entry, [Platform(entry.data[CONF_PLATFORM])] ) + entry.async_on_unload(entry.add_update_listener(update_listener)) if entry.data[CONF_PLATFORM] == Platform.NOTIFY and CONF_NAME in entry.data: # New notify entities are being setup through the config entry, # but during the deprecation period we want to keep the legacy notify platform, @@ -121,3 +125,29 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return await hass.config_entries.async_unload_platforms( entry, [entry.data[CONF_PLATFORM]] ) + + +async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: + """Handle options update.""" + await hass.config_entries.async_reload(entry.entry_id) + + +async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: + """Migrate config entry.""" + if config_entry.version > 2: + # Downgraded from future + return False + + if config_entry.version < 2: + # Move optional fields from data to options in config entry + data: dict[str, Any] = deepcopy(dict(config_entry.data)) + options = {} + for key, value in config_entry.data.items(): + if key not in (CONF_FILE_PATH, CONF_PLATFORM, CONF_NAME): + data.pop(key) + options[key] = value + + hass.config_entries.async_update_entry( + config_entry, version=2, data=data, options=options + ) + return True diff --git a/homeassistant/components/file/config_flow.py b/homeassistant/components/file/config_flow.py index 2d729473929..8cb58ec1f47 100644 --- a/homeassistant/components/file/config_flow.py +++ b/homeassistant/components/file/config_flow.py @@ -1,11 +1,18 @@ """Config flow for file integration.""" +from copy import deepcopy import os from typing import Any import voluptuous as vol -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ( + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, + OptionsFlowWithConfigEntry, +) from homeassistant.const import ( CONF_FILE_PATH, CONF_FILENAME, @@ -15,6 +22,7 @@ from homeassistant.const import ( CONF_VALUE_TEMPLATE, Platform, ) +from homeassistant.core import callback from homeassistant.helpers.selector import ( BooleanSelector, BooleanSelectorConfig, @@ -31,27 +39,44 @@ BOOLEAN_SELECTOR = BooleanSelector(BooleanSelectorConfig()) TEMPLATE_SELECTOR = TemplateSelector(TemplateSelectorConfig()) TEXT_SELECTOR = TextSelector(TextSelectorConfig(type=TextSelectorType.TEXT)) -FILE_FLOW_SCHEMAS = { +FILE_OPTIONS_SCHEMAS = { Platform.SENSOR.value: vol.Schema( { - vol.Required(CONF_FILE_PATH): TEXT_SELECTOR, vol.Optional(CONF_VALUE_TEMPLATE): TEMPLATE_SELECTOR, vol.Optional(CONF_UNIT_OF_MEASUREMENT): TEXT_SELECTOR, } ), Platform.NOTIFY.value: vol.Schema( { - vol.Required(CONF_FILE_PATH): TEXT_SELECTOR, vol.Optional(CONF_TIMESTAMP, default=False): BOOLEAN_SELECTOR, } ), } +FILE_FLOW_SCHEMAS = { + Platform.SENSOR.value: vol.Schema( + { + vol.Required(CONF_FILE_PATH): TEXT_SELECTOR, + } + ).extend(FILE_OPTIONS_SCHEMAS[Platform.SENSOR.value].schema), + Platform.NOTIFY.value: vol.Schema( + { + vol.Required(CONF_FILE_PATH): TEXT_SELECTOR, + } + ).extend(FILE_OPTIONS_SCHEMAS[Platform.NOTIFY.value].schema), +} + class FileConfigFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a file config flow.""" - VERSION = 1 + VERSION = 2 + + @staticmethod + @callback + def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + """Get the options flow for this handler.""" + return FileOptionsFlowHandler(config_entry) async def validate_file_path(self, file_path: str) -> bool: """Ensure the file path is valid.""" @@ -80,7 +105,13 @@ class FileConfigFlowHandler(ConfigFlow, domain=DOMAIN): errors[CONF_FILE_PATH] = "not_allowed" else: title = f"{DEFAULT_NAME} [{user_input[CONF_FILE_PATH]}]" - return self.async_create_entry(data=user_input, title=title) + data = deepcopy(user_input) + options = {} + for key, value in user_input.items(): + if key not in (CONF_FILE_PATH, CONF_PLATFORM, CONF_NAME): + data.pop(key) + options[key] = value + return self.async_create_entry(data=data, title=title, options=options) return self.async_show_form( step_id=platform, data_schema=FILE_FLOW_SCHEMAS[platform], errors=errors @@ -114,4 +145,29 @@ class FileConfigFlowHandler(ConfigFlow, domain=DOMAIN): else: file_path = import_data[CONF_FILE_PATH] title = f"{name} [{file_path}]" - return self.async_create_entry(title=title, data=import_data) + data = deepcopy(import_data) + options = {} + for key, value in import_data.items(): + if key not in (CONF_FILE_PATH, CONF_PLATFORM, CONF_NAME): + data.pop(key) + options[key] = value + return self.async_create_entry(title=title, data=data, options=options) + + +class FileOptionsFlowHandler(OptionsFlowWithConfigEntry): + """Handle File options.""" + + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Manage File options.""" + if user_input: + return self.async_create_entry(data=user_input) + + platform = self.config_entry.data[CONF_PLATFORM] + return self.async_show_form( + step_id="init", + data_schema=self.add_suggested_values_to_schema( + FILE_OPTIONS_SCHEMAS[platform], self.config_entry.options or {} + ), + ) diff --git a/homeassistant/components/file/notify.py b/homeassistant/components/file/notify.py index 1516efd6d96..9411b7cf1a8 100644 --- a/homeassistant/components/file/notify.py +++ b/homeassistant/components/file/notify.py @@ -5,7 +5,6 @@ from __future__ import annotations from functools import partial import logging import os -from types import MappingProxyType from typing import Any, TextIO import voluptuous as vol @@ -109,7 +108,7 @@ async def async_setup_entry( ) -> None: """Set up notify entity.""" unique_id = entry.entry_id - async_add_entities([FileNotifyEntity(unique_id, entry.data)]) + async_add_entities([FileNotifyEntity(unique_id, {**entry.data, **entry.options})]) class FileNotifyEntity(NotifyEntity): @@ -118,7 +117,7 @@ class FileNotifyEntity(NotifyEntity): _attr_icon = FILE_ICON _attr_supported_features = NotifyEntityFeature.TITLE - def __init__(self, unique_id: str, config: MappingProxyType[str, Any]) -> None: + def __init__(self, unique_id: str, config: dict[str, Any]) -> None: """Initialize the service.""" self._file_path: str = config[CONF_FILE_PATH] self._add_timestamp: bool = config.get(CONF_TIMESTAMP, False) diff --git a/homeassistant/components/file/sensor.py b/homeassistant/components/file/sensor.py index fda0d14a6aa..e37a3df86a6 100644 --- a/homeassistant/components/file/sensor.py +++ b/homeassistant/components/file/sensor.py @@ -60,14 +60,15 @@ async def async_setup_entry( ) -> None: """Set up the file sensor.""" config = dict(entry.data) + options = dict(entry.options) file_path: str = config[CONF_FILE_PATH] unique_id: str = entry.entry_id name: str = config.get(CONF_NAME, DEFAULT_NAME) - unit: str | None = config.get(CONF_UNIT_OF_MEASUREMENT) + unit: str | None = options.get(CONF_UNIT_OF_MEASUREMENT) value_template: Template | None = None - if CONF_VALUE_TEMPLATE in config: - value_template = Template(config[CONF_VALUE_TEMPLATE], hass) + if CONF_VALUE_TEMPLATE in options: + value_template = Template(options[CONF_VALUE_TEMPLATE], hass) async_add_entities( [FileSensor(unique_id, name, file_path, unit, value_template)], True diff --git a/homeassistant/components/file/strings.json b/homeassistant/components/file/strings.json index 9d49e6300e9..60ebf451f78 100644 --- a/homeassistant/components/file/strings.json +++ b/homeassistant/components/file/strings.json @@ -42,6 +42,22 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" } }, + "options": { + "step": { + "init": { + "data": { + "value_template": "[%key:component::file::config::step::sensor::data::value_template%]", + "unit_of_measurement": "[%key:component::file::config::step::sensor::data::unit_of_measurement%]", + "timestamp": "[%key:component::file::config::step::notify::data::timestamp%]" + }, + "data_description": { + "value_template": "[%key:component::file::config::step::sensor::data_description::value_template%]", + "unit_of_measurement": "[%key:component::file::config::step::sensor::data_description::unit_of_measurement%]", + "timestamp": "[%key:component::file::config::step::notify::data_description::timestamp%]" + } + } + } + }, "exceptions": { "dir_not_allowed": { "message": "Access to {filename} is not allowed." diff --git a/tests/components/file/test_config_flow.py b/tests/components/file/test_config_flow.py index 86ada1fec61..30d00411c44 100644 --- a/tests/components/file/test_config_flow.py +++ b/tests/components/file/test_config_flow.py @@ -7,6 +7,7 @@ import pytest from homeassistant import config_entries from homeassistant.components.file import DOMAIN +from homeassistant.const import CONF_UNIT_OF_MEASUREMENT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -15,20 +16,22 @@ from tests.common import MockConfigEntry MOCK_CONFIG_NOTIFY = { "platform": "notify", "file_path": "some_file", - "timestamp": True, } +MOCK_OPTIONS_NOTIFY = {"timestamp": True} MOCK_CONFIG_SENSOR = { "platform": "sensor", "file_path": "some/path", - "value_template": "{{ value | round(1) }}", } - -pytestmark = pytest.mark.usefixtures("mock_setup_entry") +MOCK_OPTIONS_SENSOR = {"value_template": "{{ value | round(1) }}"} +@pytest.mark.usefixtures("mock_setup_entry") @pytest.mark.parametrize( - ("platform", "data"), - [("sensor", MOCK_CONFIG_SENSOR), ("notify", MOCK_CONFIG_NOTIFY)], + ("platform", "data", "options"), + [ + ("sensor", MOCK_CONFIG_SENSOR, MOCK_OPTIONS_SENSOR), + ("notify", MOCK_CONFIG_NOTIFY, MOCK_OPTIONS_NOTIFY), + ], ) async def test_form( hass: HomeAssistant, @@ -36,6 +39,7 @@ async def test_form( mock_is_allowed_path: bool, platform: str, data: dict[str, Any], + options: dict[str, Any], ) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -50,7 +54,7 @@ async def test_form( ) await hass.async_block_till_done() - user_input = dict(data) + user_input = {**data, **options} user_input.pop("platform") result2 = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=user_input @@ -59,12 +63,17 @@ async def test_form( assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["data"] == data + assert result2["options"] == options assert len(mock_setup_entry.mock_calls) == 1 +@pytest.mark.usefixtures("mock_setup_entry") @pytest.mark.parametrize( - ("platform", "data"), - [("sensor", MOCK_CONFIG_SENSOR), ("notify", MOCK_CONFIG_NOTIFY)], + ("platform", "data", "options"), + [ + ("sensor", MOCK_CONFIG_SENSOR, MOCK_OPTIONS_SENSOR), + ("notify", MOCK_CONFIG_NOTIFY, MOCK_OPTIONS_NOTIFY), + ], ) async def test_already_configured( hass: HomeAssistant, @@ -72,9 +81,10 @@ async def test_already_configured( mock_is_allowed_path: bool, platform: str, data: dict[str, Any], + options: dict[str, Any], ) -> None: """Test aborting if the entry is already configured.""" - entry = MockConfigEntry(domain=DOMAIN, data=data) + entry = MockConfigEntry(domain=DOMAIN, data=data, options=options) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( @@ -91,7 +101,7 @@ async def test_already_configured( assert result["type"] is FlowResultType.FORM assert result["step_id"] == platform - user_input = dict(data) + user_input = {**data, **options} user_input.pop("platform") result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -103,10 +113,14 @@ async def test_already_configured( assert result2["reason"] == "already_configured" +@pytest.mark.usefixtures("mock_setup_entry") @pytest.mark.parametrize("is_allowed", [False], ids=["not_allowed"]) @pytest.mark.parametrize( - ("platform", "data"), - [("sensor", MOCK_CONFIG_SENSOR), ("notify", MOCK_CONFIG_NOTIFY)], + ("platform", "data", "options"), + [ + ("sensor", MOCK_CONFIG_SENSOR, MOCK_OPTIONS_SENSOR), + ("notify", MOCK_CONFIG_NOTIFY, MOCK_OPTIONS_NOTIFY), + ], ) async def test_not_allowed( hass: HomeAssistant, @@ -114,6 +128,7 @@ async def test_not_allowed( mock_is_allowed_path: bool, platform: str, data: dict[str, Any], + options: dict[str, Any], ) -> None: """Test aborting if the file path is not allowed.""" result = await hass.config_entries.flow.async_init( @@ -130,7 +145,7 @@ async def test_not_allowed( assert result["type"] is FlowResultType.FORM assert result["step_id"] == platform - user_input = dict(data) + user_input = {**data, **options} user_input.pop("platform") result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -140,3 +155,49 @@ async def test_not_allowed( assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"file_path": "not_allowed"} + + +@pytest.mark.parametrize( + ("platform", "data", "options", "new_options"), + [ + ( + "sensor", + MOCK_CONFIG_SENSOR, + MOCK_OPTIONS_SENSOR, + {CONF_UNIT_OF_MEASUREMENT: "mm"}, + ), + ("notify", MOCK_CONFIG_NOTIFY, MOCK_OPTIONS_NOTIFY, {"timestamp": False}), + ], +) +async def test_options_flow( + hass: HomeAssistant, + mock_is_allowed_path: bool, + platform: str, + data: dict[str, Any], + options: dict[str, Any], + new_options: dict[str, Any], +) -> None: + """Test options config flow.""" + entry = MockConfigEntry(domain=DOMAIN, data=data, options=options, version=2) + entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + result = await hass.config_entries.options.async_init(entry.entry_id) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input=new_options, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == new_options + + entry = hass.config_entries.async_get_entry(entry.entry_id) + assert entry.state is config_entries.ConfigEntryState.LOADED + assert entry.options == new_options diff --git a/tests/components/file/test_init.py b/tests/components/file/test_init.py new file mode 100644 index 00000000000..faf1488ed07 --- /dev/null +++ b/tests/components/file/test_init.py @@ -0,0 +1,65 @@ +"""The tests for local file init.""" + +from unittest.mock import MagicMock, Mock, patch + +from homeassistant.components.file import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry, get_fixture_path + + +@patch("os.path.isfile", Mock(return_value=True)) +@patch("os.access", Mock(return_value=True)) +async def test_migration_to_version_2( + hass: HomeAssistant, mock_is_allowed_path: MagicMock +) -> None: + """Test the File sensor with JSON entries.""" + data = { + "platform": "sensor", + "name": "file2", + "file_path": get_fixture_path("file_value_template.txt", "file"), + "value_template": "{{ value_json.temperature }}", + } + + entry = MockConfigEntry( + domain=DOMAIN, + version=1, + data=data, + title=f"test [{data['file_path']}]", + ) + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + + assert entry.state is ConfigEntryState.LOADED + assert entry.version == 2 + assert entry.data == { + "platform": "sensor", + "name": "file2", + "file_path": get_fixture_path("file_value_template.txt", "file"), + } + assert entry.options == { + "value_template": "{{ value_json.temperature }}", + } + + +@patch("os.path.isfile", Mock(return_value=True)) +@patch("os.access", Mock(return_value=True)) +async def test_migration_from_future_version( + hass: HomeAssistant, mock_is_allowed_path: MagicMock +) -> None: + """Test the File sensor with JSON entries.""" + data = { + "platform": "sensor", + "name": "file2", + "file_path": get_fixture_path("file_value_template.txt", "file"), + "value_template": "{{ value_json.temperature }}", + } + + entry = MockConfigEntry( + domain=DOMAIN, version=3, data=data, title=f"test [{data['file_path']}]" + ) + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + + assert entry.state is ConfigEntryState.MIGRATION_ERROR diff --git a/tests/components/file/test_notify.py b/tests/components/file/test_notify.py index faa9027aa21..33e4739a488 100644 --- a/tests/components/file/test_notify.py +++ b/tests/components/file/test_notify.py @@ -174,7 +174,7 @@ async def test_legacy_notify_file_exception( @pytest.mark.parametrize( - ("timestamp", "data"), + ("timestamp", "data", "options"), [ ( False, @@ -182,6 +182,8 @@ async def test_legacy_notify_file_exception( "name": "test", "platform": "notify", "file_path": "mock_file", + }, + { "timestamp": False, }, ), @@ -191,6 +193,8 @@ async def test_legacy_notify_file_exception( "name": "test", "platform": "notify", "file_path": "mock_file", + }, + { "timestamp": True, }, ), @@ -203,6 +207,7 @@ async def test_legacy_notify_file_entry_only_setup( timestamp: bool, mock_is_allowed_path: MagicMock, data: dict[str, Any], + options: dict[str, Any], ) -> None: """Test the legacy notify file output in entry only setup.""" filename = "mock_file" @@ -213,7 +218,11 @@ async def test_legacy_notify_file_entry_only_setup( message = params["message"] entry = MockConfigEntry( - domain=DOMAIN, data=data, title=f"test [{data['file_path']}]" + domain=DOMAIN, + data=data, + version=2, + options=options, + title=f"test [{data['file_path']}]", ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -252,7 +261,7 @@ async def test_legacy_notify_file_entry_only_setup( @pytest.mark.parametrize( - ("is_allowed", "config"), + ("is_allowed", "config", "options"), [ ( False, @@ -260,6 +269,8 @@ async def test_legacy_notify_file_entry_only_setup( "name": "test", "platform": "notify", "file_path": "mock_file", + }, + { "timestamp": False, }, ), @@ -271,10 +282,15 @@ async def test_legacy_notify_file_not_allowed( caplog: pytest.LogCaptureFixture, mock_is_allowed_path: MagicMock, config: dict[str, Any], + options: dict[str, Any], ) -> None: """Test legacy notify file output not allowed.""" entry = MockConfigEntry( - domain=DOMAIN, data=config, title=f"test [{config['file_path']}]" + domain=DOMAIN, + data=config, + version=2, + options=options, + title=f"test [{config['file_path']}]", ) entry.add_to_hass(hass) assert not await hass.config_entries.async_setup(entry.entry_id) @@ -293,13 +309,15 @@ async def test_legacy_notify_file_not_allowed( ], ) @pytest.mark.parametrize( - ("data", "is_allowed"), + ("data", "options", "is_allowed"), [ ( { "name": "test", "platform": "notify", "file_path": "mock_file", + }, + { "timestamp": False, }, True, @@ -314,12 +332,17 @@ async def test_notify_file_write_access_failed( service: str, params: dict[str, Any], data: dict[str, Any], + options: dict[str, Any], ) -> None: """Test the notify file fails.""" domain = notify.DOMAIN entry = MockConfigEntry( - domain=DOMAIN, data=data, title=f"test [{data['file_path']}]" + domain=DOMAIN, + data=data, + version=2, + options=options, + title=f"test [{data['file_path']}]", ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) diff --git a/tests/components/file/test_sensor.py b/tests/components/file/test_sensor.py index 60a81df2b1e..634ae9d626c 100644 --- a/tests/components/file/test_sensor.py +++ b/tests/components/file/test_sensor.py @@ -47,7 +47,11 @@ async def test_file_value_entry_setup( } entry = MockConfigEntry( - domain=DOMAIN, data=data, title=f"test [{data['file_path']}]" + domain=DOMAIN, + data=data, + version=2, + options={}, + title=f"test [{data['file_path']}]", ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -66,11 +70,17 @@ async def test_file_value_template( "platform": "sensor", "name": "file2", "file_path": get_fixture_path("file_value_template.txt", "file"), + } + options = { "value_template": "{{ value_json.temperature }}", } entry = MockConfigEntry( - domain=DOMAIN, data=data, title=f"test [{data['file_path']}]" + domain=DOMAIN, + data=data, + version=2, + options=options, + title=f"test [{data['file_path']}]", ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -90,7 +100,11 @@ async def test_file_empty(hass: HomeAssistant, mock_is_allowed_path: MagicMock) } entry = MockConfigEntry( - domain=DOMAIN, data=data, title=f"test [{data['file_path']}]" + domain=DOMAIN, + data=data, + version=2, + options={}, + title=f"test [{data['file_path']}]", ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -113,7 +127,11 @@ async def test_file_path_invalid( } entry = MockConfigEntry( - domain=DOMAIN, data=data, title=f"test [{data['file_path']}]" + domain=DOMAIN, + data=data, + version=2, + options={}, + title=f"test [{data['file_path']}]", ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) From 46357519e0dff3c43c400000c3d0469b76636533 Mon Sep 17 00:00:00 2001 From: IceBotYT <34712694+IceBotYT@users.noreply.github.com> Date: Thu, 15 Aug 2024 12:46:06 -0400 Subject: [PATCH 0773/1635] Add Nice G.O. integration (#122748) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Convert Linear Garage Door to Nice G.O. * Remove useless fixtures * Update manifest (now cloud push! 🎉) * Fix entry unload * Extend config entry type * Fix circular import * Bump nice-go (hopefully fix dep conflict) * Bump nice-go (moves type stubs to dev deps) * Remove lingering mentions of Linear * Add nice-go as logger * Convert nice_go into a new integration and restore linear_garage_door * Add missing new lines to snapshots * Fixes suggested by @joostlek * More fixes * Fixes * Fixes * Fix coordinator tests * Move coordinator tests * Move test_no_connection_state from test_cover to test_init --------- Co-authored-by: Joostlek --- CODEOWNERS | 2 + homeassistant/components/nice_go/__init__.py | 43 +++ .../components/nice_go/config_flow.py | 68 +++++ homeassistant/components/nice_go/const.py | 13 + .../components/nice_go/coordinator.py | 220 +++++++++++++ homeassistant/components/nice_go/cover.py | 72 +++++ homeassistant/components/nice_go/entity.py | 41 +++ .../components/nice_go/manifest.json | 10 + homeassistant/components/nice_go/strings.json | 26 ++ homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 6 + requirements_all.txt | 3 + requirements_test_all.txt | 3 + tests/components/nice_go/__init__.py | 22 ++ tests/components/nice_go/conftest.py | 78 +++++ .../nice_go/fixtures/device_state_update.json | 21 ++ .../fixtures/device_state_update_1.json | 21 ++ .../nice_go/fixtures/get_all_barriers.json | 64 ++++ .../nice_go/snapshots/test_cover.ambr | 193 ++++++++++++ .../nice_go/snapshots/test_diagnostics.ambr | 43 +++ .../nice_go/snapshots/test_init.ambr | 16 + .../nice_go/snapshots/test_light.ambr | 223 ++++++++++++++ tests/components/nice_go/test_config_flow.py | 111 +++++++ tests/components/nice_go/test_cover.py | 115 +++++++ tests/components/nice_go/test_init.py | 288 ++++++++++++++++++ 25 files changed, 1703 insertions(+) create mode 100644 homeassistant/components/nice_go/__init__.py create mode 100644 homeassistant/components/nice_go/config_flow.py create mode 100644 homeassistant/components/nice_go/const.py create mode 100644 homeassistant/components/nice_go/coordinator.py create mode 100644 homeassistant/components/nice_go/cover.py create mode 100644 homeassistant/components/nice_go/entity.py create mode 100644 homeassistant/components/nice_go/manifest.json create mode 100644 homeassistant/components/nice_go/strings.json create mode 100644 tests/components/nice_go/__init__.py create mode 100644 tests/components/nice_go/conftest.py create mode 100644 tests/components/nice_go/fixtures/device_state_update.json create mode 100644 tests/components/nice_go/fixtures/device_state_update_1.json create mode 100644 tests/components/nice_go/fixtures/get_all_barriers.json create mode 100644 tests/components/nice_go/snapshots/test_cover.ambr create mode 100644 tests/components/nice_go/snapshots/test_diagnostics.ambr create mode 100644 tests/components/nice_go/snapshots/test_init.ambr create mode 100644 tests/components/nice_go/snapshots/test_light.ambr create mode 100644 tests/components/nice_go/test_config_flow.py create mode 100644 tests/components/nice_go/test_cover.py create mode 100644 tests/components/nice_go/test_init.py diff --git a/CODEOWNERS b/CODEOWNERS index a3b38498f68..6593c02c8a5 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -968,6 +968,8 @@ build.json @home-assistant/supervisor /tests/components/nfandroidtv/ @tkdrob /homeassistant/components/nibe_heatpump/ @elupus /tests/components/nibe_heatpump/ @elupus +/homeassistant/components/nice_go/ @IceBotYT +/tests/components/nice_go/ @IceBotYT /homeassistant/components/nightscout/ @marciogranzotto /tests/components/nightscout/ @marciogranzotto /homeassistant/components/nilu/ @hfurubotten diff --git a/homeassistant/components/nice_go/__init__.py b/homeassistant/components/nice_go/__init__.py new file mode 100644 index 00000000000..33c81b70966 --- /dev/null +++ b/homeassistant/components/nice_go/__init__.py @@ -0,0 +1,43 @@ +"""The Nice G.O. integration.""" + +from __future__ import annotations + +import logging + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from .coordinator import NiceGOUpdateCoordinator + +_LOGGER = logging.getLogger(__name__) +PLATFORMS: list[Platform] = [Platform.COVER] + +type NiceGOConfigEntry = ConfigEntry[NiceGOUpdateCoordinator] + + +async def async_setup_entry(hass: HomeAssistant, entry: NiceGOConfigEntry) -> bool: + """Set up Nice G.O. from a config entry.""" + + coordinator = NiceGOUpdateCoordinator(hass) + + await coordinator.async_config_entry_first_refresh() + entry.runtime_data = coordinator + + entry.async_create_background_task( + hass, + coordinator.client_listen(), + "nice_go_websocket_task", + ) + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: NiceGOConfigEntry) -> bool: + """Unload a config entry.""" + if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): + await entry.runtime_data.api.close() + + return unload_ok diff --git a/homeassistant/components/nice_go/config_flow.py b/homeassistant/components/nice_go/config_flow.py new file mode 100644 index 00000000000..9d2c1c05518 --- /dev/null +++ b/homeassistant/components/nice_go/config_flow.py @@ -0,0 +1,68 @@ +"""Config flow for Nice G.O. integration.""" + +from __future__ import annotations + +from datetime import datetime +import logging +from typing import Any + +from nice_go import AuthFailedError, NiceGOApi +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.helpers.aiohttp_client import async_get_clientsession + +from .const import CONF_REFRESH_TOKEN, CONF_REFRESH_TOKEN_CREATION_TIME, DOMAIN + +_LOGGER = logging.getLogger(__name__) + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_EMAIL): str, + vol.Required(CONF_PASSWORD): str, + } +) + + +class NiceGOConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Nice G.O.""" + + VERSION = 1 + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + errors: dict[str, str] = {} + if user_input is not None: + await self.async_set_unique_id(user_input[CONF_EMAIL]) + self._abort_if_unique_id_configured() + + hub = NiceGOApi() + + try: + refresh_token = await hub.authenticate( + user_input[CONF_EMAIL], + user_input[CONF_PASSWORD], + async_get_clientsession(self.hass), + ) + except AuthFailedError: + errors["base"] = "invalid_auth" + except Exception: # noqa: BLE001 + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + return self.async_create_entry( + title=user_input[CONF_EMAIL], + data={ + CONF_EMAIL: user_input[CONF_EMAIL], + CONF_PASSWORD: user_input[CONF_PASSWORD], + CONF_REFRESH_TOKEN: refresh_token, + CONF_REFRESH_TOKEN_CREATION_TIME: datetime.now().timestamp(), + }, + ) + + return self.async_show_form( + step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors + ) diff --git a/homeassistant/components/nice_go/const.py b/homeassistant/components/nice_go/const.py new file mode 100644 index 00000000000..c3caa92c8be --- /dev/null +++ b/homeassistant/components/nice_go/const.py @@ -0,0 +1,13 @@ +"""Constants for the Nice G.O. integration.""" + +from datetime import timedelta + +DOMAIN = "nice_go" + +# Configuration +CONF_SITE_ID = "site_id" +CONF_DEVICE_ID = "device_id" +CONF_REFRESH_TOKEN = "refresh_token" +CONF_REFRESH_TOKEN_CREATION_TIME = "refresh_token_creation_time" + +REFRESH_TOKEN_EXPIRY_TIME = timedelta(days=30) diff --git a/homeassistant/components/nice_go/coordinator.py b/homeassistant/components/nice_go/coordinator.py new file mode 100644 index 00000000000..196ed0a211c --- /dev/null +++ b/homeassistant/components/nice_go/coordinator.py @@ -0,0 +1,220 @@ +"""DataUpdateCoordinator for Nice G.O.""" + +from __future__ import annotations + +import asyncio +from dataclasses import dataclass +from datetime import datetime +import json +import logging +from typing import Any + +from nice_go import ( + BARRIER_STATUS, + ApiError, + AuthFailedError, + BarrierState, + ConnectionState, + NiceGOApi, +) + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed +from homeassistant.helpers import issue_registry as ir +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import ( + CONF_REFRESH_TOKEN, + CONF_REFRESH_TOKEN_CREATION_TIME, + DOMAIN, + REFRESH_TOKEN_EXPIRY_TIME, +) + +_LOGGER = logging.getLogger(__name__) + + +@dataclass +class NiceGODevice: + """Nice G.O. device dataclass.""" + + id: str + name: str + barrier_status: str + light_status: bool + fw_version: str + connected: bool + + +class NiceGOUpdateCoordinator(DataUpdateCoordinator[dict[str, NiceGODevice]]): + """DataUpdateCoordinator for Nice G.O.""" + + config_entry: ConfigEntry + organization_id: str + + def __init__(self, hass: HomeAssistant) -> None: + """Initialize DataUpdateCoordinator for Nice G.O.""" + super().__init__( + hass, + _LOGGER, + name="Nice G.O.", + ) + + self.refresh_token = self.config_entry.data[CONF_REFRESH_TOKEN] + self.refresh_token_creation_time = self.config_entry.data[ + CONF_REFRESH_TOKEN_CREATION_TIME + ] + self.email = self.config_entry.data[CONF_EMAIL] + self.password = self.config_entry.data[CONF_PASSWORD] + self.api = NiceGOApi() + self.ws_connected = False + + async def _parse_barrier(self, barrier_state: BarrierState) -> NiceGODevice | None: + """Parse barrier data.""" + + device_id = barrier_state.deviceId + name = barrier_state.reported["displayName"] + if barrier_state.reported["migrationStatus"] == "NOT_STARTED": + ir.async_create_issue( + self.hass, + DOMAIN, + f"firmware_update_required_{device_id}", + is_fixable=False, + severity=ir.IssueSeverity.ERROR, + translation_key="firmware_update_required", + translation_placeholders={"device_name": name}, + ) + return None + ir.async_delete_issue( + self.hass, DOMAIN, f"firmware_update_required_{device_id}" + ) + barrier_status_raw = [ + int(x) for x in barrier_state.reported["barrierStatus"].split(",") + ] + + if BARRIER_STATUS[int(barrier_status_raw[2])] == "STATIONARY": + barrier_status = "open" if barrier_status_raw[0] == 1 else "closed" + else: + barrier_status = BARRIER_STATUS[int(barrier_status_raw[2])].lower() + + light_status = barrier_state.reported["lightStatus"].split(",")[0] == "1" + fw_version = barrier_state.reported["deviceFwVersion"] + if barrier_state.connectionState: + connected = barrier_state.connectionState.connected + else: + connected = False + + return NiceGODevice( + id=device_id, + name=name, + barrier_status=barrier_status, + light_status=light_status, + fw_version=fw_version, + connected=connected, + ) + + async def _async_update_data(self) -> dict[str, NiceGODevice]: + return self.data + + async def _async_setup(self) -> None: + """Set up the coordinator.""" + async with asyncio.timeout(10): + expiry_time = ( + self.refresh_token_creation_time + + REFRESH_TOKEN_EXPIRY_TIME.total_seconds() + ) + try: + if datetime.now().timestamp() >= expiry_time: + await self._update_refresh_token() + else: + await self.api.authenticate_refresh( + self.refresh_token, async_get_clientsession(self.hass) + ) + _LOGGER.debug("Authenticated with Nice G.O. API") + + barriers = await self.api.get_all_barriers() + parsed_barriers = [ + await self._parse_barrier(barrier.state) for barrier in barriers + ] + + # Parse the barriers and save them in a dictionary + devices = { + barrier.id: barrier for barrier in parsed_barriers if barrier + } + self.organization_id = await barriers[0].get_attr("organization") + except AuthFailedError as e: + raise ConfigEntryAuthFailed from e + except ApiError as e: + raise UpdateFailed from e + else: + self.async_set_updated_data(devices) + + async def _update_refresh_token(self) -> None: + """Update the refresh token with Nice G.O. API.""" + _LOGGER.debug("Updating the refresh token with Nice G.O. API") + try: + refresh_token = await self.api.authenticate( + self.email, self.password, async_get_clientsession(self.hass) + ) + except AuthFailedError as e: + _LOGGER.exception("Authentication failed") + raise ConfigEntryAuthFailed from e + except ApiError as e: + _LOGGER.exception("API error") + raise UpdateFailed from e + + self.refresh_token = refresh_token + data = { + **self.config_entry.data, + CONF_REFRESH_TOKEN: refresh_token, + CONF_REFRESH_TOKEN_CREATION_TIME: datetime.now().timestamp(), + } + self.hass.config_entries.async_update_entry(self.config_entry, data=data) + + async def client_listen(self) -> None: + """Listen to the websocket for updates.""" + self.api.event(self.on_connected) + self.api.event(self.on_data) + try: + await self.api.connect(reconnect=True) + except ApiError: + _LOGGER.exception("API error") + + if not self.hass.is_stopping: + await asyncio.sleep(5) + await self.client_listen() + + async def on_data(self, data: dict[str, Any]) -> None: + """Handle incoming data from the websocket.""" + _LOGGER.debug("Received data from the websocket") + _LOGGER.debug(data) + raw_data = data["data"]["devicesStatesUpdateFeed"]["item"] + parsed_data = await self._parse_barrier( + BarrierState( + deviceId=raw_data["deviceId"], + desired=json.loads(raw_data["desired"]), + reported=json.loads(raw_data["reported"]), + connectionState=ConnectionState( + connected=raw_data["connectionState"]["connected"], + updatedTimestamp=raw_data["connectionState"]["updatedTimestamp"], + ) + if raw_data["connectionState"] + else None, + version=raw_data["version"], + timestamp=raw_data["timestamp"], + ) + ) + if parsed_data is None: + return + + data_copy = self.data.copy() + data_copy[parsed_data.id] = parsed_data + + self.async_set_updated_data(data_copy) + + async def on_connected(self) -> None: + """Handle the websocket connection.""" + _LOGGER.debug("Connected to the websocket") + await self.api.subscribe(self.organization_id) diff --git a/homeassistant/components/nice_go/cover.py b/homeassistant/components/nice_go/cover.py new file mode 100644 index 00000000000..70bd4b136a5 --- /dev/null +++ b/homeassistant/components/nice_go/cover.py @@ -0,0 +1,72 @@ +"""Cover entity for Nice G.O.""" + +from typing import Any + +from homeassistant.components.cover import ( + CoverDeviceClass, + CoverEntity, + CoverEntityFeature, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import NiceGOConfigEntry +from .entity import NiceGOEntity + +PARALLEL_UPDATES = 1 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: NiceGOConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Nice G.O. cover.""" + coordinator = config_entry.runtime_data + + async_add_entities( + NiceGOCoverEntity(coordinator, device_id, device_data.name, "cover") + for device_id, device_data in coordinator.data.items() + ) + + +class NiceGOCoverEntity(NiceGOEntity, CoverEntity): + """Representation of a Nice G.O. cover.""" + + _attr_supported_features = CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE + _attr_name = None + _attr_device_class = CoverDeviceClass.GARAGE + + @property + def is_closed(self) -> bool: + """Return if cover is closed.""" + return self.data.barrier_status == "closed" + + @property + def is_opened(self) -> bool: + """Return if cover is open.""" + return self.data.barrier_status == "open" + + @property + def is_opening(self) -> bool: + """Return if cover is opening.""" + return self.data.barrier_status == "opening" + + @property + def is_closing(self) -> bool: + """Return if cover is closing.""" + return self.data.barrier_status == "closing" + + async def async_close_cover(self, **kwargs: Any) -> None: + """Close the garage door.""" + if self.is_closed: + return + + await self.coordinator.api.close_barrier(self._device_id) + + async def async_open_cover(self, **kwargs: Any) -> None: + """Open the garage door.""" + if self.is_opened: + return + + await self.coordinator.api.open_barrier(self._device_id) diff --git a/homeassistant/components/nice_go/entity.py b/homeassistant/components/nice_go/entity.py new file mode 100644 index 00000000000..5af4b9c8731 --- /dev/null +++ b/homeassistant/components/nice_go/entity.py @@ -0,0 +1,41 @@ +"""Base entity for Nice G.O.""" + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import NiceGODevice, NiceGOUpdateCoordinator + + +class NiceGOEntity(CoordinatorEntity[NiceGOUpdateCoordinator]): + """Common base for Nice G.O. entities.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: NiceGOUpdateCoordinator, + device_id: str, + device_name: str, + sub_device_id: str, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + + self._attr_unique_id = f"{device_id}-{sub_device_id}" + self._device_id = device_id + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, device_id)}, + name=device_name, + sw_version=coordinator.data[device_id].fw_version, + ) + + @property + def data(self) -> NiceGODevice: + """Return the Nice G.O. device.""" + return self.coordinator.data[self._device_id] + + @property + def available(self) -> bool: + """Return if entity is available.""" + return super().available and self.data.connected diff --git a/homeassistant/components/nice_go/manifest.json b/homeassistant/components/nice_go/manifest.json new file mode 100644 index 00000000000..e86c68b491f --- /dev/null +++ b/homeassistant/components/nice_go/manifest.json @@ -0,0 +1,10 @@ +{ + "domain": "nice_go", + "name": "Nice G.O.", + "codeowners": ["@IceBotYT"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/nice_go", + "iot_class": "cloud_push", + "loggers": ["nice-go"], + "requirements": ["nice-go==0.1.6"] +} diff --git a/homeassistant/components/nice_go/strings.json b/homeassistant/components/nice_go/strings.json new file mode 100644 index 00000000000..8d21f6d9740 --- /dev/null +++ b/homeassistant/components/nice_go/strings.json @@ -0,0 +1,26 @@ +{ + "config": { + "step": { + "user": { + "data": { + "email": "[%key:common::config_flow::data::email%]", + "password": "[%key:common::config_flow::data::password%]" + } + } + }, + "error": { + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + } + }, + "issues": { + "firmware_update_required": { + "title": "Firmware update required", + "description": "Your device ({device_name}) requires a firmware update on the Nice G.O. app in order to work with this integration. Please update the firmware on the Nice G.O. app and reconfigure this integration." + } + } +} diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index 24e151d2902..c3fe4af4a76 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -385,6 +385,7 @@ FLOWS = { "nextdns", "nfandroidtv", "nibe_heatpump", + "nice_go", "nightscout", "nina", "nmap_tracker", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 6d2c5ec9a30..7df27aa5e68 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -4029,6 +4029,12 @@ "config_flow": true, "iot_class": "local_polling" }, + "nice_go": { + "name": "Nice G.O.", + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_push" + }, "nightscout": { "name": "Nightscout", "integration_type": "hub", diff --git a/requirements_all.txt b/requirements_all.txt index fc8351c9237..a1ce1581826 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1419,6 +1419,9 @@ nextdns==3.1.0 # homeassistant.components.nibe_heatpump nibe==2.11.0 +# homeassistant.components.nice_go +nice-go==0.1.6 + # homeassistant.components.niko_home_control niko-home-control==0.2.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5a89f6c644d..4b979e53de3 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1179,6 +1179,9 @@ nextdns==3.1.0 # homeassistant.components.nibe_heatpump nibe==2.11.0 +# homeassistant.components.nice_go +nice-go==0.1.6 + # homeassistant.components.nfandroidtv notifications-android-tv==0.1.5 diff --git a/tests/components/nice_go/__init__.py b/tests/components/nice_go/__init__.py new file mode 100644 index 00000000000..0208795a12c --- /dev/null +++ b/tests/components/nice_go/__init__.py @@ -0,0 +1,22 @@ +"""Tests for the Nice G.O. integration.""" + +from unittest.mock import patch + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration( + hass: HomeAssistant, config_entry: MockConfigEntry, platforms: list[Platform] +) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + with patch( + "homeassistant.components.nice_go.PLATFORMS", + platforms, + ): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/nice_go/conftest.py b/tests/components/nice_go/conftest.py new file mode 100644 index 00000000000..31b21083c05 --- /dev/null +++ b/tests/components/nice_go/conftest.py @@ -0,0 +1,78 @@ +"""Common fixtures for the Nice G.O. tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +from nice_go import Barrier, BarrierState, ConnectionState +import pytest + +from homeassistant.components.nice_go.const import ( + CONF_REFRESH_TOKEN, + CONF_REFRESH_TOKEN_CREATION_TIME, + DOMAIN, +) +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD + +from tests.common import MockConfigEntry, load_json_array_fixture + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.nice_go.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_nice_go() -> Generator[AsyncMock]: + """Mock a Nice G.O. client.""" + with ( + patch( + "homeassistant.components.nice_go.coordinator.NiceGOApi", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.nice_go.config_flow.NiceGOApi", + new=mock_client, + ), + ): + client = mock_client.return_value + client.authenticate.return_value = "test-refresh-token" + client.authenticate_refresh.return_value = None + client.id_token = None + client.get_all_barriers.return_value = [ + Barrier( + id=barrier["id"], + type=barrier["type"], + controlLevel=barrier["controlLevel"], + attr=barrier["attr"], + state=BarrierState( + **barrier["state"], + connectionState=ConnectionState(**barrier["connectionState"]), + ), + api=client, + ) + for barrier in load_json_array_fixture("get_all_barriers.json", DOMAIN) + ] + yield client + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + entry_id="acefdd4b3a4a0911067d1cf51414201e", + title="test-email", + data={ + CONF_EMAIL: "test-email", + CONF_PASSWORD: "test-password", + CONF_REFRESH_TOKEN: "test-refresh-token", + CONF_REFRESH_TOKEN_CREATION_TIME: 1722184160.738171, + }, + version=1, + unique_id="test-email", + ) diff --git a/tests/components/nice_go/fixtures/device_state_update.json b/tests/components/nice_go/fixtures/device_state_update.json new file mode 100644 index 00000000000..53d89c5411b --- /dev/null +++ b/tests/components/nice_go/fixtures/device_state_update.json @@ -0,0 +1,21 @@ +{ + "data": { + "devicesStatesUpdateFeed": { + "receiver": "ORG/0:2372", + "item": { + "deviceId": "1", + "desired": "{\"key\":\"value\"}", + "reported": "{\"displayName\":\"Test Garage 1\",\"autoDisabled\":false,\"migrationStatus\":\"DONE\",\"deviceId\":\"1\",\"lightStatus\":\"0,100\",\"vcnMode\":false,\"deviceFwVersion\":\"1.2.3.4.5.6\",\"barrierStatus\":\"0,0,1,0,-1,0,3,0\"}", + "timestamp": 123, + "version": 123, + "connectionState": { + "connected": true, + "updatedTimestamp": "123", + "__typename": "DeviceConnectionState" + }, + "__typename": "DeviceState" + }, + "__typename": "DeviceStateUpdateNotice" + } + } +} diff --git a/tests/components/nice_go/fixtures/device_state_update_1.json b/tests/components/nice_go/fixtures/device_state_update_1.json new file mode 100644 index 00000000000..cc718e8b093 --- /dev/null +++ b/tests/components/nice_go/fixtures/device_state_update_1.json @@ -0,0 +1,21 @@ +{ + "data": { + "devicesStatesUpdateFeed": { + "receiver": "ORG/0:2372", + "item": { + "deviceId": "2", + "desired": "{\"key\":\"value\"}", + "reported": "{\"displayName\":\"Test Garage 2\",\"autoDisabled\":false,\"migrationStatus\":\"DONE\",\"deviceId\":\"2\",\"lightStatus\":\"1,100\",\"vcnMode\":false,\"deviceFwVersion\":\"1.2.3.4.5.6\",\"barrierStatus\":\"1,100,2,0,-1,0,3,0\"}", + "timestamp": 123, + "version": 123, + "connectionState": { + "connected": true, + "updatedTimestamp": "123", + "__typename": "DeviceConnectionState" + }, + "__typename": "DeviceState" + }, + "__typename": "DeviceStateUpdateNotice" + } + } +} diff --git a/tests/components/nice_go/fixtures/get_all_barriers.json b/tests/components/nice_go/fixtures/get_all_barriers.json new file mode 100644 index 00000000000..481c73d91a8 --- /dev/null +++ b/tests/components/nice_go/fixtures/get_all_barriers.json @@ -0,0 +1,64 @@ +[ + { + "id": "1", + "type": "WallStation", + "controlLevel": "Owner", + "attr": [ + { + "key": "organization", + "value": "test_organization" + } + ], + "state": { + "deviceId": "1", + "desired": { "key": "value" }, + "reported": { + "displayName": "Test Garage 1", + "autoDisabled": false, + "migrationStatus": "DONE", + "deviceId": "1", + "lightStatus": "1,100", + "vcnMode": false, + "deviceFwVersion": "1.2.3.4.5.6", + "barrierStatus": "0,0,0,0,-1,0,3,0" + }, + "timestamp": null, + "version": null + }, + "connectionState": { + "connected": true, + "updatedTimestamp": "123" + } + }, + { + "id": "2", + "type": "WallStation", + "controlLevel": "Owner", + "attr": [ + { + "key": "organization", + "value": "test_organization" + } + ], + "state": { + "deviceId": "2", + "desired": { "key": "value" }, + "reported": { + "displayName": "Test Garage 2", + "autoDisabled": false, + "migrationStatus": "DONE", + "deviceId": "2", + "lightStatus": "0,100", + "vcnMode": false, + "deviceFwVersion": "1.2.3.4.5.6", + "barrierStatus": "1,100,0,0,-1,0,3,0" + }, + "timestamp": null, + "version": null + }, + "connectionState": { + "connected": true, + "updatedTimestamp": "123" + } + } +] diff --git a/tests/components/nice_go/snapshots/test_cover.ambr b/tests/components/nice_go/snapshots/test_cover.ambr new file mode 100644 index 00000000000..8f85fea2726 --- /dev/null +++ b/tests/components/nice_go/snapshots/test_cover.ambr @@ -0,0 +1,193 @@ +# serializer version: 1 +# name: test_covers[cover.test_garage_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_garage_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'nice_go', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '1-cover', + 'unit_of_measurement': None, + }) +# --- +# name: test_covers[cover.test_garage_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'garage', + 'friendly_name': 'Test Garage 1', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_garage_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'closed', + }) +# --- +# name: test_covers[cover.test_garage_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_garage_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'nice_go', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '2-cover', + 'unit_of_measurement': None, + }) +# --- +# name: test_covers[cover.test_garage_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'garage', + 'friendly_name': 'Test Garage 2', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_garage_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- +# name: test_covers[cover.test_garage_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_garage_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'linear_garage_door', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'test3-GDO', + 'unit_of_measurement': None, + }) +# --- +# name: test_covers[cover.test_garage_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'garage', + 'friendly_name': 'Test Garage 3', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_garage_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'opening', + }) +# --- +# name: test_covers[cover.test_garage_4-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_garage_4', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'linear_garage_door', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'test4-GDO', + 'unit_of_measurement': None, + }) +# --- +# name: test_covers[cover.test_garage_4-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'garage', + 'friendly_name': 'Test Garage 4', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_garage_4', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'closing', + }) +# --- diff --git a/tests/components/nice_go/snapshots/test_diagnostics.ambr b/tests/components/nice_go/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..b7d564b619b --- /dev/null +++ b/tests/components/nice_go/snapshots/test_diagnostics.ambr @@ -0,0 +1,43 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'coordinator_data': dict({ + '1': dict({ + 'barrier_status': 'closed', + 'connected': True, + 'fw_version': '1.2.3.4.5.6', + 'id': '1', + 'light_status': True, + 'name': 'Test Garage 1', + }), + '2': dict({ + 'barrier_status': 'open', + 'connected': True, + 'fw_version': '1.2.3.4.5.6', + 'id': '2', + 'light_status': False, + 'name': 'Test Garage 2', + }), + }), + 'entry': dict({ + 'data': dict({ + 'email': '**REDACTED**', + 'password': '**REDACTED**', + 'refresh_token': '**REDACTED**', + 'refresh_token_creation_time': 1722184160.738171, + }), + 'disabled_by': None, + 'domain': 'nice_go', + 'entry_id': 'acefdd4b3a4a0911067d1cf51414201e', + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'title': '**REDACTED**', + 'unique_id': None, + 'version': 1, + }), + }) +# --- diff --git a/tests/components/nice_go/snapshots/test_init.ambr b/tests/components/nice_go/snapshots/test_init.ambr new file mode 100644 index 00000000000..ff389568d1b --- /dev/null +++ b/tests/components/nice_go/snapshots/test_init.ambr @@ -0,0 +1,16 @@ +# serializer version: 1 +# name: test_on_data_none_parsed + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'garage', + 'friendly_name': 'Test Garage 1', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_garage_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'closed', + }) +# --- diff --git a/tests/components/nice_go/snapshots/test_light.ambr b/tests/components/nice_go/snapshots/test_light.ambr new file mode 100644 index 00000000000..294488e3d46 --- /dev/null +++ b/tests/components/nice_go/snapshots/test_light.ambr @@ -0,0 +1,223 @@ +# serializer version: 1 +# name: test_data[light.test_garage_1_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.test_garage_1_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light', + 'platform': 'nice_go', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'light', + 'unique_id': '1-light', + 'unit_of_measurement': None, + }) +# --- +# name: test_data[light.test_garage_1_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'color_mode': , + 'friendly_name': 'Test Garage 1 Light', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.test_garage_1_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_data[light.test_garage_2_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.test_garage_2_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light', + 'platform': 'nice_go', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'light', + 'unique_id': '2-light', + 'unit_of_measurement': None, + }) +# --- +# name: test_data[light.test_garage_2_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'color_mode': None, + 'friendly_name': 'Test Garage 2 Light', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.test_garage_2_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_data[light.test_garage_3_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.test_garage_3_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light', + 'platform': 'linear_garage_door', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'light', + 'unique_id': 'test3-Light', + 'unit_of_measurement': None, + }) +# --- +# name: test_data[light.test_garage_3_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': None, + 'color_mode': None, + 'friendly_name': 'Test Garage 3 Light', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.test_garage_3_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_data[light.test_garage_4_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.test_garage_4_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light', + 'platform': 'linear_garage_door', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'light', + 'unique_id': 'test4-Light', + 'unit_of_measurement': None, + }) +# --- +# name: test_data[light.test_garage_4_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 255, + 'color_mode': , + 'friendly_name': 'Test Garage 4 Light', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.test_garage_4_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/nice_go/test_config_flow.py b/tests/components/nice_go/test_config_flow.py new file mode 100644 index 00000000000..67930b9f752 --- /dev/null +++ b/tests/components/nice_go/test_config_flow.py @@ -0,0 +1,111 @@ +"""Test the Nice G.O. config flow.""" + +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +from nice_go import AuthFailedError +import pytest + +from homeassistant.components.nice_go.const import ( + CONF_REFRESH_TOKEN, + CONF_REFRESH_TOKEN_CREATION_TIME, + DOMAIN, +) +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_form( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_setup_entry: AsyncMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test we get the form.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert not result["errors"] + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: "test-email", + CONF_PASSWORD: "test-password", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "test-email" + assert result["data"][CONF_EMAIL] == "test-email" + assert result["data"][CONF_PASSWORD] == "test-password" + assert result["data"][CONF_REFRESH_TOKEN] == "test-refresh-token" + assert CONF_REFRESH_TOKEN_CREATION_TIME in result["data"] + assert result["result"].unique_id == "test-email" + assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.parametrize( + ("side_effect", "expected_error"), + [(AuthFailedError, "invalid_auth"), (Exception, "unknown")], +) +async def test_form_exceptions( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_setup_entry: AsyncMock, + side_effect: Exception, + expected_error: str, +) -> None: + """Test we handle invalid auth.""" + mock_nice_go.authenticate.side_effect = side_effect + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: "test-email", + CONF_PASSWORD: "test-password", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": expected_error} + mock_nice_go.authenticate.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: "test-email", + CONF_PASSWORD: "test-password", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + +async def test_duplicate_device( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_nice_go: AsyncMock, +) -> None: + """Test that duplicate devices are handled.""" + mock_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: "test-email", + CONF_PASSWORD: "test-password", + }, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/nice_go/test_cover.py b/tests/components/nice_go/test_cover.py new file mode 100644 index 00000000000..a6eb9bd27fb --- /dev/null +++ b/tests/components/nice_go/test_cover.py @@ -0,0 +1,115 @@ +"""Test Nice G.O. cover.""" + +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +from syrupy import SnapshotAssertion + +from homeassistant.components.cover import ( + DOMAIN as COVER_DOMAIN, + SERVICE_CLOSE_COVER, + SERVICE_OPEN_COVER, +) +from homeassistant.components.nice_go.const import DOMAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, load_json_object_fixture, snapshot_platform + + +async def test_covers( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that data gets parsed and returned appropriately.""" + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_open_cover( + hass: HomeAssistant, mock_nice_go: AsyncMock, mock_config_entry: MockConfigEntry +) -> None: + """Test that opening the cover works as intended.""" + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: "cover.test_garage_2"}, + blocking=True, + ) + + assert mock_nice_go.open_barrier.call_count == 0 + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: "cover.test_garage_1"}, + blocking=True, + ) + + assert mock_nice_go.open_barrier.call_count == 1 + + +async def test_close_cover( + hass: HomeAssistant, mock_nice_go: AsyncMock, mock_config_entry: MockConfigEntry +) -> None: + """Test that closing the cover works as intended.""" + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: "cover.test_garage_1"}, + blocking=True, + ) + + assert mock_nice_go.close_barrier.call_count == 0 + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: "cover.test_garage_2"}, + blocking=True, + ) + + assert mock_nice_go.close_barrier.call_count == 1 + + +async def test_update_cover_state( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test that closing the cover works as intended.""" + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + assert hass.states.get("cover.test_garage_1").state == STATE_CLOSED + assert hass.states.get("cover.test_garage_2").state == STATE_OPEN + + device_update = load_json_object_fixture("device_state_update.json", DOMAIN) + await mock_config_entry.runtime_data.on_data(device_update) + device_update_1 = load_json_object_fixture("device_state_update_1.json", DOMAIN) + await mock_config_entry.runtime_data.on_data(device_update_1) + + assert hass.states.get("cover.test_garage_1").state == STATE_OPENING + assert hass.states.get("cover.test_garage_2").state == STATE_CLOSING diff --git a/tests/components/nice_go/test_init.py b/tests/components/nice_go/test_init.py new file mode 100644 index 00000000000..d6877d72724 --- /dev/null +++ b/tests/components/nice_go/test_init.py @@ -0,0 +1,288 @@ +"""Test Nice G.O. init.""" + +from datetime import timedelta +from unittest.mock import AsyncMock, MagicMock + +from freezegun.api import FrozenDateTimeFactory +from nice_go import ApiError, AuthFailedError, Barrier, BarrierState +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.nice_go.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed + + +async def test_unload_entry( + hass: HomeAssistant, mock_nice_go: AsyncMock, mock_config_entry: MockConfigEntry +) -> None: + """Test the unload entry.""" + + await setup_integration(hass, mock_config_entry, []) + assert mock_config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + + +@pytest.mark.parametrize( + ("side_effect", "entry_state"), + [ + ( + AuthFailedError(), + ConfigEntryState.SETUP_ERROR, + ), + (ApiError(), ConfigEntryState.SETUP_RETRY), + ], +) +async def test_setup_failure( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + side_effect: Exception, + entry_state: ConfigEntryState, +) -> None: + """Test reauth trigger setup.""" + + mock_nice_go.authenticate_refresh.side_effect = side_effect + + await setup_integration(hass, mock_config_entry, []) + assert mock_config_entry.state is entry_state + + +async def test_firmware_update_required( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + issue_registry: ir.IssueRegistry, +) -> None: + """Test firmware update required.""" + + mock_nice_go.get_all_barriers.return_value = [ + Barrier( + id="test-device-id", + type="test-type", + controlLevel="test-control-level", + attr=[{"key": "test-attr", "value": "test-value"}], + state=BarrierState( + deviceId="test-device-id", + reported={ + "displayName": "test-display-name", + "migrationStatus": "NOT_STARTED", + }, + desired=None, + connectionState=None, + version=None, + timestamp=None, + ), + api=mock_nice_go, + ) + ] + + await setup_integration(hass, mock_config_entry, []) + + issue = issue_registry.async_get_issue( + DOMAIN, + "firmware_update_required_test-device-id", + ) + assert issue + + +async def test_update_refresh_token( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test updating refresh token.""" + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + assert mock_nice_go.authenticate_refresh.call_count == 1 + assert mock_nice_go.get_all_barriers.call_count == 1 + assert mock_nice_go.authenticate.call_count == 0 + + mock_nice_go.authenticate.return_value = "new-refresh-token" + freezer.tick(timedelta(days=30)) + async_fire_time_changed(hass) + assert await hass.config_entries.async_reload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_nice_go.authenticate_refresh.call_count == 1 + assert mock_nice_go.authenticate.call_count == 1 + assert mock_nice_go.get_all_barriers.call_count == 2 + assert mock_config_entry.data["refresh_token"] == "new-refresh-token" + + +async def test_update_refresh_token_api_error( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test updating refresh token with error.""" + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + assert mock_nice_go.authenticate_refresh.call_count == 1 + assert mock_nice_go.get_all_barriers.call_count == 1 + assert mock_nice_go.authenticate.call_count == 0 + + mock_nice_go.authenticate.side_effect = ApiError + freezer.tick(timedelta(days=30)) + async_fire_time_changed(hass) + assert not await hass.config_entries.async_reload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_nice_go.authenticate_refresh.call_count == 1 + assert mock_nice_go.authenticate.call_count == 1 + assert mock_nice_go.get_all_barriers.call_count == 1 + assert mock_config_entry.data["refresh_token"] == "test-refresh-token" + assert "API error" in caplog.text + + +async def test_update_refresh_token_auth_failed( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test updating refresh token with error.""" + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + assert mock_nice_go.authenticate_refresh.call_count == 1 + assert mock_nice_go.get_all_barriers.call_count == 1 + assert mock_nice_go.authenticate.call_count == 0 + + mock_nice_go.authenticate.side_effect = AuthFailedError + freezer.tick(timedelta(days=30)) + async_fire_time_changed(hass) + assert not await hass.config_entries.async_reload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_nice_go.authenticate_refresh.call_count == 1 + assert mock_nice_go.authenticate.call_count == 1 + assert mock_nice_go.get_all_barriers.call_count == 1 + assert mock_config_entry.data["refresh_token"] == "test-refresh-token" + assert "Authentication failed" in caplog.text + + +async def test_client_listen_api_error( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + caplog: pytest.LogCaptureFixture, + freezer: FrozenDateTimeFactory, +) -> None: + """Test client listen with error.""" + + mock_nice_go.connect.side_effect = ApiError + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + assert "API error" in caplog.text + + mock_nice_go.connect.side_effect = None + + freezer.tick(timedelta(seconds=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert mock_nice_go.connect.call_count == 2 + + +async def test_on_data_none_parsed( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test on data with None parsed.""" + + mock_nice_go.event = MagicMock() + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + await mock_nice_go.event.call_args[0][0]( + { + "data": { + "devicesStatesUpdateFeed": { + "item": { + "deviceId": "1", + "desired": '{"key": "value"}', + "reported": '{"displayName":"test-display-name", "migrationStatus":"NOT_STARTED"}', + "connectionState": { + "connected": None, + "updatedTimestamp": None, + }, + "version": None, + "timestamp": None, + } + } + } + } + ) + + assert hass.states.get("cover.test_garage_1") == snapshot + + +async def test_on_connected( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test on connected.""" + + mock_nice_go.event = MagicMock() + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + assert mock_nice_go.event.call_count == 2 + + mock_nice_go.subscribe = AsyncMock() + await mock_nice_go.event.call_args_list[0][0][0]() + + assert mock_nice_go.subscribe.call_count == 1 + + +async def test_no_connection_state( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test parsing barrier with no connection state.""" + + mock_nice_go.event = MagicMock() + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + assert mock_nice_go.event.call_count == 2 + + await mock_nice_go.event.call_args[0][0]( + { + "data": { + "devicesStatesUpdateFeed": { + "item": { + "deviceId": "1", + "desired": '{"key": "value"}', + "reported": '{"displayName":"Test Garage 1", "migrationStatus":"DONE", "barrierStatus": "1,100,0", "deviceFwVersion": "1.0.0", "lightStatus": "1,100"}', + "connectionState": None, + "version": None, + "timestamp": None, + } + } + } + } + ) + + assert hass.states.get("cover.test_garage_1").state == "unavailable" From 64a68b17f4c6425c2425f76ceaa7096fca028f26 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 15 Aug 2024 18:58:52 +0200 Subject: [PATCH 0774/1635] Simplify recorder.migration._drop_foreign_key_constraints (#123968) --- .../components/recorder/migration.py | 10 +-- tests/components/recorder/test_migrate.py | 61 +++++++++++-------- 2 files changed, 35 insertions(+), 36 deletions(-) diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index 9a19ff7084a..185be02e9aa 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -15,7 +15,6 @@ from uuid import UUID import sqlalchemy from sqlalchemy import ForeignKeyConstraint, MetaData, Table, func, text, update from sqlalchemy.engine import CursorResult, Engine -from sqlalchemy.engine.interfaces import ReflectedForeignKeyConstraint from sqlalchemy.exc import ( DatabaseError, IntegrityError, @@ -645,7 +644,7 @@ def _update_states_table_with_foreign_key_options( def _drop_foreign_key_constraints( session_maker: Callable[[], Session], engine: Engine, table: str, column: str -) -> list[tuple[str, str, ReflectedForeignKeyConstraint]]: +) -> None: """Drop foreign key constraints for a table on specific columns. This is not supported for SQLite because it does not support @@ -658,11 +657,6 @@ def _drop_foreign_key_constraints( ) inspector = sqlalchemy.inspect(engine) - dropped_constraints = [ - (table, column, foreign_key) - for foreign_key in inspector.get_foreign_keys(table) - if foreign_key["name"] and foreign_key["constrained_columns"] == [column] - ] ## Find matching named constraints and bind the ForeignKeyConstraints to the table tmp_table = Table(table, MetaData()) @@ -685,8 +679,6 @@ def _drop_foreign_key_constraints( ) raise - return dropped_constraints - def _restore_foreign_key_constraints( session_maker: Callable[[], Session], diff --git a/tests/components/recorder/test_migrate.py b/tests/components/recorder/test_migrate.py index 4bc317bdaa7..b56dfe3e189 100644 --- a/tests/components/recorder/test_migrate.py +++ b/tests/components/recorder/test_migrate.py @@ -7,7 +7,9 @@ import sys from unittest.mock import ANY, Mock, PropertyMock, call, patch import pytest -from sqlalchemy import create_engine, text +from sqlalchemy import create_engine, inspect, text +from sqlalchemy.engine import Engine +from sqlalchemy.engine.interfaces import ReflectedForeignKeyConstraint from sqlalchemy.exc import ( DatabaseError, InternalError, @@ -973,31 +975,40 @@ def test_drop_restore_foreign_key_constraints(recorder_db_url: str) -> None: ], } + def find_constraints( + engine: Engine, table: str, column: str + ) -> list[tuple[str, str, ReflectedForeignKeyConstraint]]: + inspector = inspect(engine) + return [ + (table, column, foreign_key) + for foreign_key in inspector.get_foreign_keys(table) + if foreign_key["name"] and foreign_key["constrained_columns"] == [column] + ] + engine = create_engine(recorder_db_url) db_schema.Base.metadata.create_all(engine) + matching_constraints_1 = [ + dropped_constraint + for table, column, _, _ in constraints_to_recreate + for dropped_constraint in find_constraints(engine, table, column) + ] + assert matching_constraints_1 == expected_dropped_constraints[db_engine] + with Session(engine) as session: session_maker = Mock(return_value=session) - dropped_constraints_1 = [ - dropped_constraint - for table, column, _, _ in constraints_to_recreate - for dropped_constraint in migration._drop_foreign_key_constraints( + for table, column, _, _ in constraints_to_recreate: + migration._drop_foreign_key_constraints( session_maker, engine, table, column ) - ] - assert dropped_constraints_1 == expected_dropped_constraints[db_engine] # Check we don't find the constrained columns again (they are removed) - with Session(engine) as session: - session_maker = Mock(return_value=session) - dropped_constraints_2 = [ - dropped_constraint - for table, column, _, _ in constraints_to_recreate - for dropped_constraint in migration._drop_foreign_key_constraints( - session_maker, engine, table, column - ) - ] - assert dropped_constraints_2 == [] + matching_constraints_2 = [ + dropped_constraint + for table, column, _, _ in constraints_to_recreate + for dropped_constraint in find_constraints(engine, table, column) + ] + assert matching_constraints_2 == [] # Restore the constraints with Session(engine) as session: @@ -1007,16 +1018,12 @@ def test_drop_restore_foreign_key_constraints(recorder_db_url: str) -> None: ) # Check we do find the constrained columns again (they are restored) - with Session(engine) as session: - session_maker = Mock(return_value=session) - dropped_constraints_3 = [ - dropped_constraint - for table, column, _, _ in constraints_to_recreate - for dropped_constraint in migration._drop_foreign_key_constraints( - session_maker, engine, table, column - ) - ] - assert dropped_constraints_3 == expected_dropped_constraints[db_engine] + matching_constraints_3 = [ + dropped_constraint + for table, column, _, _ in constraints_to_recreate + for dropped_constraint in find_constraints(engine, table, column) + ] + assert matching_constraints_3 == expected_dropped_constraints[db_engine] engine.dispose() From bf9d621939ce33d843936484eafa860e7cfb1504 Mon Sep 17 00:00:00 2001 From: "Mr. Bubbles" Date: Thu, 15 Aug 2024 19:09:21 +0200 Subject: [PATCH 0775/1635] Revert "Rename sensor to finished downloads in pyLoad integration" (#121483) Revert "Rename sensor to finished downloads in pyLoad integration (#120483)" This reverts commit 8e598ec3ff9b6c96dfe633d5510762c4053279c3. --- homeassistant/components/pyload/strings.json | 2 +- .../pyload/snapshots/test_sensor.ambr | 400 +++++++++--------- 2 files changed, 201 insertions(+), 201 deletions(-) diff --git a/homeassistant/components/pyload/strings.json b/homeassistant/components/pyload/strings.json index 38e17e5016f..bbe6989f5e7 100644 --- a/homeassistant/components/pyload/strings.json +++ b/homeassistant/components/pyload/strings.json @@ -74,7 +74,7 @@ "name": "Downloads in queue" }, "total": { - "name": "Finished downloads" + "name": "Total downloads" }, "free_space": { "name": "Free space" diff --git a/tests/components/pyload/snapshots/test_sensor.ambr b/tests/components/pyload/snapshots/test_sensor.ambr index c1e5a9d6c3a..69d0387fc8f 100644 --- a/tests/components/pyload/snapshots/test_sensor.ambr +++ b/tests/components/pyload/snapshots/test_sensor.ambr @@ -99,56 +99,6 @@ 'state': 'unavailable', }) # --- -# name: test_sensor_update_exceptions[CannotConnect][sensor.pyload_finished_downloads-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.pyload_finished_downloads', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Finished downloads', - 'platform': 'pyload', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': 'XXXXXXXXXXXXXX_total', - 'unit_of_measurement': 'downloads', - }) -# --- -# name: test_sensor_update_exceptions[CannotConnect][sensor.pyload_finished_downloads-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'pyLoad Finished downloads', - 'state_class': , - 'unit_of_measurement': 'downloads', - }), - 'context': , - 'entity_id': 'sensor.pyload_finished_downloads', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- # name: test_sensor_update_exceptions[CannotConnect][sensor.pyload_free_space-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -257,6 +207,56 @@ 'state': 'unavailable', }) # --- +# name: test_sensor_update_exceptions[CannotConnect][sensor.pyload_total_downloads-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pyload_total_downloads', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Total downloads', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_total', + 'unit_of_measurement': 'downloads', + }) +# --- +# name: test_sensor_update_exceptions[CannotConnect][sensor.pyload_total_downloads-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'pyLoad Total downloads', + 'state_class': , + 'unit_of_measurement': 'downloads', + }), + 'context': , + 'entity_id': 'sensor.pyload_total_downloads', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- # name: test_sensor_update_exceptions[InvalidAuth][sensor.pyload_active_downloads-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -357,56 +357,6 @@ 'state': 'unavailable', }) # --- -# name: test_sensor_update_exceptions[InvalidAuth][sensor.pyload_finished_downloads-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.pyload_finished_downloads', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Finished downloads', - 'platform': 'pyload', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': 'XXXXXXXXXXXXXX_total', - 'unit_of_measurement': 'downloads', - }) -# --- -# name: test_sensor_update_exceptions[InvalidAuth][sensor.pyload_finished_downloads-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'pyLoad Finished downloads', - 'state_class': , - 'unit_of_measurement': 'downloads', - }), - 'context': , - 'entity_id': 'sensor.pyload_finished_downloads', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- # name: test_sensor_update_exceptions[InvalidAuth][sensor.pyload_free_space-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -515,6 +465,56 @@ 'state': 'unavailable', }) # --- +# name: test_sensor_update_exceptions[InvalidAuth][sensor.pyload_total_downloads-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pyload_total_downloads', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Total downloads', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_total', + 'unit_of_measurement': 'downloads', + }) +# --- +# name: test_sensor_update_exceptions[InvalidAuth][sensor.pyload_total_downloads-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'pyLoad Total downloads', + 'state_class': , + 'unit_of_measurement': 'downloads', + }), + 'context': , + 'entity_id': 'sensor.pyload_total_downloads', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- # name: test_sensor_update_exceptions[ParserError][sensor.pyload_active_downloads-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -615,56 +615,6 @@ 'state': 'unavailable', }) # --- -# name: test_sensor_update_exceptions[ParserError][sensor.pyload_finished_downloads-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.pyload_finished_downloads', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Finished downloads', - 'platform': 'pyload', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': 'XXXXXXXXXXXXXX_total', - 'unit_of_measurement': 'downloads', - }) -# --- -# name: test_sensor_update_exceptions[ParserError][sensor.pyload_finished_downloads-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'pyLoad Finished downloads', - 'state_class': , - 'unit_of_measurement': 'downloads', - }), - 'context': , - 'entity_id': 'sensor.pyload_finished_downloads', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- # name: test_sensor_update_exceptions[ParserError][sensor.pyload_free_space-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -773,6 +723,56 @@ 'state': 'unavailable', }) # --- +# name: test_sensor_update_exceptions[ParserError][sensor.pyload_total_downloads-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pyload_total_downloads', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Total downloads', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_total', + 'unit_of_measurement': 'downloads', + }) +# --- +# name: test_sensor_update_exceptions[ParserError][sensor.pyload_total_downloads-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'pyLoad Total downloads', + 'state_class': , + 'unit_of_measurement': 'downloads', + }), + 'context': , + 'entity_id': 'sensor.pyload_total_downloads', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- # name: test_setup[sensor.pyload_active_downloads-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -873,56 +873,6 @@ 'state': '6', }) # --- -# name: test_setup[sensor.pyload_finished_downloads-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.pyload_finished_downloads', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Finished downloads', - 'platform': 'pyload', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': 'XXXXXXXXXXXXXX_total', - 'unit_of_measurement': 'downloads', - }) -# --- -# name: test_setup[sensor.pyload_finished_downloads-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'pyLoad Finished downloads', - 'state_class': , - 'unit_of_measurement': 'downloads', - }), - 'context': , - 'entity_id': 'sensor.pyload_finished_downloads', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '37', - }) -# --- # name: test_setup[sensor.pyload_free_space-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1031,3 +981,53 @@ 'state': '43.247704', }) # --- +# name: test_setup[sensor.pyload_total_downloads-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pyload_total_downloads', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Total downloads', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_total', + 'unit_of_measurement': 'downloads', + }) +# --- +# name: test_setup[sensor.pyload_total_downloads-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'pyLoad Total downloads', + 'state_class': , + 'unit_of_measurement': 'downloads', + }), + 'context': , + 'entity_id': 'sensor.pyload_total_downloads', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '37', + }) +# --- From 29c0a7f32484cca4d55ec73ad4bb6247171be0e2 Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Thu, 15 Aug 2024 13:23:35 -0400 Subject: [PATCH 0776/1635] Bump aiorussound to 2.3.2 (#123997) --- homeassistant/components/russound_rio/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/russound_rio/manifest.json b/homeassistant/components/russound_rio/manifest.json index 7180c3be84f..6c473d94874 100644 --- a/homeassistant/components/russound_rio/manifest.json +++ b/homeassistant/components/russound_rio/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_push", "loggers": ["aiorussound"], "quality_scale": "silver", - "requirements": ["aiorussound==2.3.1"] + "requirements": ["aiorussound==2.3.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index a1ce1581826..920241247b9 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -350,7 +350,7 @@ aioridwell==2024.01.0 aioruckus==0.34 # homeassistant.components.russound_rio -aiorussound==2.3.1 +aiorussound==2.3.2 # homeassistant.components.ruuvi_gateway aioruuvigateway==0.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 4b979e53de3..ef99d18d1fe 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -332,7 +332,7 @@ aioridwell==2024.01.0 aioruckus==0.34 # homeassistant.components.russound_rio -aiorussound==2.3.1 +aiorussound==2.3.2 # homeassistant.components.ruuvi_gateway aioruuvigateway==0.1.0 From f58106674729692c66c109f11da58870d4fb2f10 Mon Sep 17 00:00:00 2001 From: dougiteixeira <31328123+dougiteixeira@users.noreply.github.com> Date: Thu, 15 Aug 2024 15:02:51 -0300 Subject: [PATCH 0777/1635] Add config flow for platform number in Template (#121849) * Add config flow to select platform in Template * Remove device id duplicate in schema * Add config flow for number platform in Template * Remove mode --- .../components/template/config_flow.py | 37 +++++++ homeassistant/components/template/number.py | 97 ++++++++++++++----- .../components/template/strings.json | 31 ++++++ .../template/snapshots/test_number.ambr | 18 ++++ tests/components/template/test_config_flow.py | 64 ++++++++++++ tests/components/template/test_init.py | 16 +++ tests/components/template/test_number.py | 78 ++++++++++++++- 7 files changed, 314 insertions(+), 27 deletions(-) create mode 100644 tests/components/template/snapshots/test_number.ambr diff --git a/homeassistant/components/template/config_flow.py b/homeassistant/components/template/config_flow.py index c52a890c1f7..2c12a0d03e9 100644 --- a/homeassistant/components/template/config_flow.py +++ b/homeassistant/components/template/config_flow.py @@ -41,6 +41,16 @@ from homeassistant.helpers.schema_config_entry_flow import ( from .binary_sensor import async_create_preview_binary_sensor from .const import CONF_PRESS, CONF_TURN_OFF, CONF_TURN_ON, DOMAIN +from .number import ( + CONF_MAX, + CONF_MIN, + CONF_SET_VALUE, + CONF_STEP, + DEFAULT_MAX_VALUE, + DEFAULT_MIN_VALUE, + DEFAULT_STEP, + async_create_preview_number, +) from .select import CONF_OPTIONS, CONF_SELECT_OPTION from .sensor import async_create_preview_sensor from .switch import async_create_preview_switch @@ -94,6 +104,21 @@ def generate_schema(domain: str, flow_type: str) -> vol.Schema: vol.Optional(CONF_VERIFY_SSL, default=True): selector.BooleanSelector(), } + if domain == Platform.NUMBER: + schema |= { + vol.Required(CONF_STATE): selector.TemplateSelector(), + vol.Required( + CONF_MIN, default=f"{{{{{DEFAULT_MIN_VALUE}}}}}" + ): selector.TemplateSelector(), + vol.Required( + CONF_MAX, default=f"{{{{{DEFAULT_MAX_VALUE}}}}}" + ): selector.TemplateSelector(), + vol.Required( + CONF_STEP, default=f"{{{{{DEFAULT_STEP}}}}}" + ): selector.TemplateSelector(), + vol.Optional(CONF_SET_VALUE): selector.ActionSelector(), + } + if domain == Platform.SELECT: schema |= _SCHEMA_STATE | { vol.Required(CONF_OPTIONS): selector.TemplateSelector(), @@ -238,6 +263,7 @@ TEMPLATE_TYPES = [ "binary_sensor", "button", "image", + "number", "select", "sensor", "switch", @@ -258,6 +284,11 @@ CONFIG_FLOW = { config_schema(Platform.IMAGE), validate_user_input=validate_user_input(Platform.IMAGE), ), + Platform.NUMBER: SchemaFlowFormStep( + config_schema(Platform.NUMBER), + preview="template", + validate_user_input=validate_user_input(Platform.NUMBER), + ), Platform.SELECT: SchemaFlowFormStep( config_schema(Platform.SELECT), validate_user_input=validate_user_input(Platform.SELECT), @@ -290,6 +321,11 @@ OPTIONS_FLOW = { options_schema(Platform.IMAGE), validate_user_input=validate_user_input(Platform.IMAGE), ), + Platform.NUMBER: SchemaFlowFormStep( + options_schema(Platform.NUMBER), + preview="template", + validate_user_input=validate_user_input(Platform.NUMBER), + ), Platform.SELECT: SchemaFlowFormStep( options_schema(Platform.SELECT), validate_user_input=validate_user_input(Platform.SELECT), @@ -311,6 +347,7 @@ CREATE_PREVIEW_ENTITY: dict[ Callable[[HomeAssistant, str, dict[str, Any]], TemplateEntity], ] = { "binary_sensor": async_create_preview_binary_sensor, + "number": async_create_preview_number, "sensor": async_create_preview_sensor, "switch": async_create_preview_switch, } diff --git a/homeassistant/components/template/number.py b/homeassistant/components/template/number.py index d4004ee9535..955600a9b9e 100644 --- a/homeassistant/components/template/number.py +++ b/homeassistant/components/template/number.py @@ -8,9 +8,6 @@ from typing import Any import voluptuous as vol from homeassistant.components.number import ( - ATTR_MAX, - ATTR_MIN, - ATTR_STEP, ATTR_VALUE, DEFAULT_MAX_VALUE, DEFAULT_MIN_VALUE, @@ -18,9 +15,17 @@ from homeassistant.components.number import ( DOMAIN as NUMBER_DOMAIN, NumberEntity, ) -from homeassistant.const import CONF_NAME, CONF_OPTIMISTIC, CONF_STATE, CONF_UNIQUE_ID +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ( + CONF_DEVICE_ID, + CONF_NAME, + CONF_OPTIMISTIC, + CONF_STATE, + CONF_UNIQUE_ID, +) from homeassistant.core import HomeAssistant, callback -import homeassistant.helpers.config_validation as cv +from homeassistant.helpers import config_validation as cv, selector +from homeassistant.helpers.device import async_device_info_to_link_from_device_id from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.script import Script from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType @@ -37,6 +42,9 @@ from .trigger_entity import TriggerEntity _LOGGER = logging.getLogger(__name__) CONF_SET_VALUE = "set_value" +CONF_MIN = "min" +CONF_MAX = "max" +CONF_STEP = "step" DEFAULT_NAME = "Template Number" DEFAULT_OPTIMISTIC = False @@ -47,9 +55,9 @@ NUMBER_SCHEMA = ( vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.template, vol.Required(CONF_STATE): cv.template, vol.Required(CONF_SET_VALUE): cv.SCRIPT_SCHEMA, - vol.Required(ATTR_STEP): cv.template, - vol.Optional(ATTR_MIN, default=DEFAULT_MIN_VALUE): cv.template, - vol.Optional(ATTR_MAX, default=DEFAULT_MAX_VALUE): cv.template, + vol.Required(CONF_STEP): cv.template, + vol.Optional(CONF_MIN, default=DEFAULT_MIN_VALUE): cv.template, + vol.Optional(CONF_MAX, default=DEFAULT_MAX_VALUE): cv.template, vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean, vol.Optional(CONF_UNIQUE_ID): cv.string, } @@ -57,6 +65,17 @@ NUMBER_SCHEMA = ( .extend(TEMPLATE_ENTITY_AVAILABILITY_SCHEMA.schema) .extend(TEMPLATE_ENTITY_ICON_SCHEMA.schema) ) +NUMBER_CONFIG_SCHEMA = vol.Schema( + { + vol.Required(CONF_NAME): cv.template, + vol.Required(CONF_STATE): cv.template, + vol.Required(CONF_STEP): cv.template, + vol.Optional(CONF_SET_VALUE): cv.SCRIPT_SCHEMA, + vol.Optional(CONF_MIN): cv.template, + vol.Optional(CONF_MAX): cv.template, + vol.Optional(CONF_DEVICE_ID): selector.DeviceSelector(), + } +) async def _async_create_entities( @@ -99,6 +118,27 @@ async def async_setup_platform( ) +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Initialize config entry.""" + _options = dict(config_entry.options) + _options.pop("template_type") + validated_config = NUMBER_CONFIG_SCHEMA(_options) + async_add_entities([TemplateNumber(hass, validated_config, config_entry.entry_id)]) + + +@callback +def async_create_preview_number( + hass: HomeAssistant, name: str, config: dict[str, Any] +) -> TemplateNumber: + """Create a preview number.""" + validated_config = NUMBER_CONFIG_SCHEMA(config | {CONF_NAME: name}) + return TemplateNumber(hass, validated_config, None) + + class TemplateNumber(TemplateEntity, NumberEntity): """Representation of a template number.""" @@ -114,16 +154,22 @@ class TemplateNumber(TemplateEntity, NumberEntity): super().__init__(hass, config=config, unique_id=unique_id) assert self._attr_name is not None self._value_template = config[CONF_STATE] - self._command_set_value = Script( - hass, config[CONF_SET_VALUE], self._attr_name, DOMAIN + self._command_set_value = ( + Script(hass, config[CONF_SET_VALUE], self._attr_name, DOMAIN) + if config.get(CONF_SET_VALUE, None) is not None + else None ) - self._step_template = config[ATTR_STEP] - self._min_value_template = config[ATTR_MIN] - self._max_value_template = config[ATTR_MAX] - self._attr_assumed_state = self._optimistic = config[CONF_OPTIMISTIC] + self._step_template = config[CONF_STEP] + self._min_value_template = config[CONF_MIN] + self._max_value_template = config[CONF_MAX] + self._attr_assumed_state = self._optimistic = config.get(CONF_OPTIMISTIC) self._attr_native_step = DEFAULT_STEP self._attr_native_min_value = DEFAULT_MIN_VALUE self._attr_native_max_value = DEFAULT_MAX_VALUE + self._attr_device_info = async_device_info_to_link_from_device_id( + hass, + config.get(CONF_DEVICE_ID), + ) @callback def _async_setup_templates(self) -> None: @@ -161,11 +207,12 @@ class TemplateNumber(TemplateEntity, NumberEntity): if self._optimistic: self._attr_native_value = value self.async_write_ha_state() - await self.async_run_script( - self._command_set_value, - run_variables={ATTR_VALUE: value}, - context=self._context, - ) + if self._command_set_value: + await self.async_run_script( + self._command_set_value, + run_variables={ATTR_VALUE: value}, + context=self._context, + ) class TriggerNumberEntity(TriggerEntity, NumberEntity): @@ -174,9 +221,9 @@ class TriggerNumberEntity(TriggerEntity, NumberEntity): domain = NUMBER_DOMAIN extra_template_keys = ( CONF_STATE, - ATTR_STEP, - ATTR_MIN, - ATTR_MAX, + CONF_STEP, + CONF_MIN, + CONF_MAX, ) def __init__( @@ -203,21 +250,21 @@ class TriggerNumberEntity(TriggerEntity, NumberEntity): def native_min_value(self) -> int: """Return the minimum value.""" return vol.Any(vol.Coerce(float), None)( - self._rendered.get(ATTR_MIN, super().native_min_value) + self._rendered.get(CONF_MIN, super().native_min_value) ) @property def native_max_value(self) -> int: """Return the maximum value.""" return vol.Any(vol.Coerce(float), None)( - self._rendered.get(ATTR_MAX, super().native_max_value) + self._rendered.get(CONF_MAX, super().native_max_value) ) @property def native_step(self) -> int: """Return the increment/decrement step.""" return vol.Any(vol.Coerce(float), None)( - self._rendered.get(ATTR_STEP, super().native_step) + self._rendered.get(CONF_STEP, super().native_step) ) async def async_set_native_value(self, value: float) -> None: diff --git a/homeassistant/components/template/strings.json b/homeassistant/components/template/strings.json index b1f14af2202..fa365bf3cfd 100644 --- a/homeassistant/components/template/strings.json +++ b/homeassistant/components/template/strings.json @@ -37,6 +37,21 @@ }, "title": "Template image" }, + "number": { + "data": { + "device_id": "[%key:common::config_flow::data::device%]", + "name": "[%key:common::config_flow::data::name%]", + "state": "[%key:component::template::config::step::sensor::data::state%]", + "step": "Step value", + "set_value": "Actions on set value", + "max": "Maximum value", + "min": "Minimum value" + }, + "data_description": { + "device_id": "[%key:component::template::config::step::sensor::data_description::device_id%]" + }, + "title": "Template number" + }, "select": { "data": { "device_id": "[%key:common::config_flow::data::device%]", @@ -70,6 +85,7 @@ "binary_sensor": "Template a binary sensor", "button": "Template a button", "image": "Template a image", + "number": "Template a number", "select": "Template a select", "sensor": "Template a sensor", "switch": "Template a switch" @@ -125,6 +141,21 @@ }, "title": "[%key:component::template::config::step::image::title%]" }, + "number": { + "data": { + "device_id": "[%key:common::config_flow::data::device%]", + "name": "[%key:common::config_flow::data::name%]", + "state": "[%key:component::template::config::step::sensor::data::state%]", + "step": "[%key:component::template::config::step::number::data::step%]", + "set_value": "[%key:component::template::config::step::number::data::set_value%]", + "max": "[%key:component::template::config::step::number::data::max%]", + "min": "[%key:component::template::config::step::number::data::min%]" + }, + "data_description": { + "device_id": "[%key:component::template::config::step::sensor::data_description::device_id%]" + }, + "title": "[%key:component::template::config::step::number::title%]" + }, "select": { "data": { "device_id": "[%key:common::config_flow::data::device%]", diff --git a/tests/components/template/snapshots/test_number.ambr b/tests/components/template/snapshots/test_number.ambr new file mode 100644 index 00000000000..d6f5b1e338d --- /dev/null +++ b/tests/components/template/snapshots/test_number.ambr @@ -0,0 +1,18 @@ +# serializer version: 1 +# name: test_setup_config_entry + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'My template', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 0.1, + }), + 'context': , + 'entity_id': 'number.my_template', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10.0', + }) +# --- \ No newline at end of file diff --git a/tests/components/template/test_config_flow.py b/tests/components/template/test_config_flow.py index ff5db52d667..a62370f4261 100644 --- a/tests/components/template/test_config_flow.py +++ b/tests/components/template/test_config_flow.py @@ -91,6 +91,24 @@ from tests.typing import WebSocketGenerator {"verify_ssl": True}, {}, ), + ( + "number", + {"state": "{{ states('number.one') }}"}, + "30.0", + {"one": "30.0", "two": "20.0"}, + {}, + { + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + }, + { + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + }, + {}, + ), ( "select", {"state": "{{ states('select.one') }}"}, @@ -226,6 +244,20 @@ async def test_config_flow( {"verify_ssl": True}, {"verify_ssl": True}, ), + ( + "number", + {"state": "{{ states('number.one') }}"}, + { + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + }, + { + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + }, + ), ( "select", {"state": "{{ states('select.one') }}"}, @@ -402,6 +434,24 @@ def get_suggested(schema, key): }, "url", ), + ( + "number", + {"state": "{{ states('number.one') }}"}, + {"state": "{{ states('number.two') }}"}, + ["30.0", "20.0"], + {"one": "30.0", "two": "20.0"}, + { + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + }, + { + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + }, + "state", + ), ( "select", {"state": "{{ states('select.one') }}"}, @@ -1156,6 +1206,20 @@ async def test_option_flow_sensor_preview_config_entry_removed( {}, {}, ), + ( + "number", + {"state": "{{ states('number.one') }}"}, + { + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + }, + { + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + }, + ), ( "select", {"state": "{{ states('select.one') }}"}, diff --git a/tests/components/template/test_init.py b/tests/components/template/test_init.py index fe08e1f4963..2e5870217a2 100644 --- a/tests/components/template/test_init.py +++ b/tests/components/template/test_init.py @@ -314,6 +314,22 @@ async def async_yaml_patch_helper(hass, filename): }, {}, ), + ( + { + "template_type": "number", + "name": "My template", + "state": "{{ 10 }}", + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + }, + { + "state": "{{ 11 }}", + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + }, + ), ( { "template_type": "select", diff --git a/tests/components/template/test_number.py b/tests/components/template/test_number.py index bf04151fd36..ca9fe2d7688 100644 --- a/tests/components/template/test_number.py +++ b/tests/components/template/test_number.py @@ -1,5 +1,7 @@ """The tests for the Template number platform.""" +from syrupy.assertion import SnapshotAssertion + from homeassistant import setup from homeassistant.components.input_number import ( ATTR_VALUE as INPUT_NUMBER_ATTR_VALUE, @@ -14,11 +16,12 @@ from homeassistant.components.number import ( DOMAIN as NUMBER_DOMAIN, SERVICE_SET_VALUE as NUMBER_SERVICE_SET_VALUE, ) +from homeassistant.components.template import DOMAIN from homeassistant.const import ATTR_ICON, CONF_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import Context, HomeAssistant, ServiceCall -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er -from tests.common import assert_setup_component, async_capture_events +from tests.common import MockConfigEntry, assert_setup_component, async_capture_events _TEST_NUMBER = "number.template_number" # Represent for number's value @@ -42,6 +45,35 @@ _VALUE_INPUT_NUMBER_CONFIG = { } +async def test_setup_config_entry( + hass: HomeAssistant, + snapshot: SnapshotAssertion, +) -> None: + """Test the config flow.""" + + template_config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "My template", + "template_type": "number", + "state": "{{ 10 }}", + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + }, + title="My template", + ) + template_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(template_config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("number.my_template") + assert state is not None + assert state == snapshot + + async def test_missing_optional_config(hass: HomeAssistant) -> None: """Test: missing optional template is ok.""" with assert_setup_component(1, "template"): @@ -460,3 +492,45 @@ async def test_icon_template_with_trigger(hass: HomeAssistant) -> None: state = hass.states.get(_TEST_NUMBER) assert float(state.state) == 51 assert state.attributes[ATTR_ICON] == "mdi:greater" + + +async def test_device_id( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test for device for number template.""" + + device_config_entry = MockConfigEntry() + device_config_entry.add_to_hass(hass) + device_entry = device_registry.async_get_or_create( + config_entry_id=device_config_entry.entry_id, + identifiers={("test", "identifier_test")}, + connections={("mac", "30:31:32:33:34:35")}, + ) + await hass.async_block_till_done() + assert device_entry is not None + assert device_entry.id is not None + + template_config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "My template", + "template_type": "number", + "state": "{{ 10 }}", + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + "device_id": device_entry.id, + }, + title="My template", + ) + template_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(template_config_entry.entry_id) + await hass.async_block_till_done() + + template_entity = entity_registry.async_get("number.my_template") + assert template_entity is not None + assert template_entity.device_id == device_entry.id From 65fa4a34ed9eaef61b0edd62b4155b6dbea366fd Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Thu, 15 Aug 2024 14:05:28 -0400 Subject: [PATCH 0778/1635] Add configuration url to russound device (#124001) feat: add configuration url --- homeassistant/components/russound_rio/entity.py | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/russound_rio/entity.py b/homeassistant/components/russound_rio/entity.py index 150c4e285d1..0e4d5cf7dde 100644 --- a/homeassistant/components/russound_rio/entity.py +++ b/homeassistant/components/russound_rio/entity.py @@ -53,6 +53,7 @@ class RussoundBaseEntity(Entity): or f"{self._primary_mac_address}-{self._controller.controller_id}" ) self._attr_device_info = DeviceInfo( + configuration_url=f"http://{self._instance.host}", # Use MAC address of Russound device as identifier identifiers={(DOMAIN, self._device_identifier)}, manufacturer="Russound", From 7d552b64f7ebcf743c2c479fc3cc7c32d50650e4 Mon Sep 17 00:00:00 2001 From: "Mr. Bubbles" Date: Thu, 15 Aug 2024 20:06:23 +0200 Subject: [PATCH 0779/1635] Use `clearCompletedTodos` API endpoint for deleting Habitica todos (#121877) Use clearCompletedTodos endpoint for deleting multiple completed todo items --- homeassistant/components/habitica/strings.json | 5 ++++- homeassistant/components/habitica/todo.py | 15 ++++++++++++--- 2 files changed, 16 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/habitica/strings.json b/homeassistant/components/habitica/strings.json index 5696e6f9911..21d2622245c 100644 --- a/homeassistant/components/habitica/strings.json +++ b/homeassistant/components/habitica/strings.json @@ -100,7 +100,10 @@ }, "exceptions": { "delete_todos_failed": { - "message": "Unable to delete {count} Habitica to-do(s), please try again" + "message": "Unable to delete item from Habitica to-do list, please try again" + }, + "delete_completed_todos_failed": { + "message": "Unable to delete completed to-do items from Habitica to-do list, please try again" }, "move_todos_item_failed": { "message": "Unable to move the Habitica to-do to position {pos}, please try again" diff --git a/homeassistant/components/habitica/todo.py b/homeassistant/components/habitica/todo.py index ab458f9f59f..451db1e6806 100644 --- a/homeassistant/components/habitica/todo.py +++ b/homeassistant/components/habitica/todo.py @@ -75,14 +75,23 @@ class BaseHabiticaListEntity(HabiticaBase, TodoListEntity): async def async_delete_todo_items(self, uids: list[str]) -> None: """Delete Habitica tasks.""" - for task_id in uids: + if len(uids) > 1 and self.entity_description.key is HabiticaTodoList.TODOS: try: - await self.coordinator.api.tasks[task_id].delete() + await self.coordinator.api.tasks.clearCompletedTodos.post() except ClientResponseError as e: raise ServiceValidationError( translation_domain=DOMAIN, - translation_key=f"delete_{self.entity_description.key}_failed", + translation_key="delete_completed_todos_failed", ) from e + else: + for task_id in uids: + try: + await self.coordinator.api.tasks[task_id].delete() + except ClientResponseError as e: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key=f"delete_{self.entity_description.key}_failed", + ) from e await self.coordinator.async_refresh() From 142469be9558500c5ef78e280b2742672b998570 Mon Sep 17 00:00:00 2001 From: Austin Mroczek Date: Thu, 15 Aug 2024 11:25:56 -0700 Subject: [PATCH 0780/1635] TotalConnect state attribute deprecation warning (#122320) * add warning comment * make comments smaller and put at top * Update homeassistant/components/totalconnect/alarm_control_panel.py --------- Co-authored-by: Joost Lekkerkerker --- homeassistant/components/totalconnect/alarm_control_panel.py | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/totalconnect/alarm_control_panel.py b/homeassistant/components/totalconnect/alarm_control_panel.py index 49d97e45e00..edbbbb06e70 100644 --- a/homeassistant/components/totalconnect/alarm_control_panel.py +++ b/homeassistant/components/totalconnect/alarm_control_panel.py @@ -103,6 +103,7 @@ class TotalConnectAlarm(TotalConnectLocationEntity, AlarmControlPanelEntity): @property def state(self) -> str | None: """Return the state of the device.""" + # State attributes can be removed in 2025.3 attr = { "location_id": self._location.location_id, "partition": self._partition_id, From 37328c78c142d430abd6a5dd2c07fe93a1142743 Mon Sep 17 00:00:00 2001 From: Robert Svensson Date: Thu, 15 Aug 2024 21:29:32 +0200 Subject: [PATCH 0781/1635] Use snapshot in UniFi switch tests (#122871) * Use snapshot in UniFi switch tests * Fix review comment --- .../unifi/snapshots/test_switch.ambr | 2473 +++++++++++++++++ tests/components/unifi/test_switch.py | 132 +- 2 files changed, 2518 insertions(+), 87 deletions(-) create mode 100644 tests/components/unifi/snapshots/test_switch.ambr diff --git a/tests/components/unifi/snapshots/test_switch.ambr b/tests/components/unifi/snapshots/test_switch.ambr new file mode 100644 index 00000000000..04b15f329fd --- /dev/null +++ b/tests/components/unifi/snapshots/test_switch.ambr @@ -0,0 +1,2473 @@ +# serializer version: 1 +# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_port_1_power_cycle-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_name_port_1_power_cycle', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Port 1 Power Cycle', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'power_cycle-10:00:00:00:01:01_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_port_1_power_cycle-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'restart', + 'friendly_name': 'mock-name Port 1 Power Cycle', + }), + 'context': , + 'entity_id': 'button.mock_name_port_1_power_cycle', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_port_2_power_cycle-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_name_port_2_power_cycle', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Port 2 Power Cycle', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'power_cycle-10:00:00:00:01:01_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_port_2_power_cycle-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'restart', + 'friendly_name': 'mock-name Port 2 Power Cycle', + }), + 'context': , + 'entity_id': 'button.mock_name_port_2_power_cycle', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_port_4_power_cycle-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_name_port_4_power_cycle', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Port 4 Power Cycle', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'power_cycle-10:00:00:00:01:01_4', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_port_4_power_cycle-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'restart', + 'friendly_name': 'mock-name Port 4 Power Cycle', + }), + 'context': , + 'entity_id': 'button.mock_name_port_4_power_cycle', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_restart-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_name_restart', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Restart', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_restart-10:00:00:00:01:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_restart-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'restart', + 'friendly_name': 'mock-name Restart', + }), + 'context': , + 'entity_id': 'button.mock_name_restart', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][switch.mock_name_port_1_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 1 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][switch.mock_name_port_1_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 1 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][switch.mock_name_port_2_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 2 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][switch.mock_name_port_2_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 2 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][switch.mock_name_port_4_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 4 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_4', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][switch.mock_name_port_4_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 4 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.block_media_streaming-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.block_media_streaming', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:network', + 'original_name': 'Block Media Streaming', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '5f976f4ae3c58f018ec7dff6', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.block_media_streaming-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Block Media Streaming', + 'icon': 'mdi:network', + }), + 'context': , + 'entity_id': 'switch.block_media_streaming', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_outlet_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outlet 2', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-01:02:03:04:05:ff_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_outlet_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Dummy USP-PDU-Pro Outlet 2', + }), + 'context': , + 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_usb_outlet_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'USB Outlet 1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-01:02:03:04:05:ff_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_usb_outlet_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Dummy USP-PDU-Pro USB Outlet 1', + }), + 'context': , + 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_1_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 1 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_1_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 1 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_2_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 2 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_2_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 2 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_4_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 4 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_4', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_4_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 4 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.plug_outlet_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.plug_outlet_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outlet 1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-fc:ec:da:76:4f:5f_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.plug_outlet_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Plug Outlet 1', + }), + 'context': , + 'entity_id': 'switch.plug_outlet_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.block_media_streaming-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.block_media_streaming', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:network', + 'original_name': 'Block Media Streaming', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '5f976f4ae3c58f018ec7dff6', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.block_media_streaming-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Block Media Streaming', + 'icon': 'mdi:network', + }), + 'context': , + 'entity_id': 'switch.block_media_streaming', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_outlet_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outlet 2', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-01:02:03:04:05:ff_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_outlet_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Dummy USP-PDU-Pro Outlet 2', + }), + 'context': , + 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_usb_outlet_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'USB Outlet 1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-01:02:03:04:05:ff_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_usb_outlet_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Dummy USP-PDU-Pro USB Outlet 1', + }), + 'context': , + 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_1_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 1 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_1_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 1 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_2_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 2 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_2_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 2 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_4_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 4 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_4', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_4_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 4 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.plug_outlet_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.plug_outlet_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outlet 1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-fc:ec:da:76:4f:5f_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.plug_outlet_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Plug Outlet 1', + }), + 'context': , + 'entity_id': 'switch.plug_outlet_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.ssid_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.ssid_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:wifi-check', + 'original_name': None, + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'wlan-012345678910111213141516', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.ssid_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'SSID 1', + 'icon': 'mdi:wifi-check', + }), + 'context': , + 'entity_id': 'switch.ssid_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_client_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.block_client_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': None, + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'block-00:00:00:00:01:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_client_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Block Client 1', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.block_client_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_media_streaming-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.block_media_streaming', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:network', + 'original_name': 'Block Media Streaming', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '5f976f4ae3c58f018ec7dff6', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_media_streaming-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Block Media Streaming', + 'icon': 'mdi:network', + }), + 'context': , + 'entity_id': 'switch.block_media_streaming', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_outlet_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outlet 2', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-01:02:03:04:05:ff_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_outlet_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Dummy USP-PDU-Pro Outlet 2', + }), + 'context': , + 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_usb_outlet_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'USB Outlet 1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-01:02:03:04:05:ff_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_usb_outlet_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Dummy USP-PDU-Pro USB Outlet 1', + }), + 'context': , + 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_1_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 1 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_1_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 1 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_2_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 2 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_2_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 2 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_4_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 4 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_4', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_4_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 4 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.plug_outlet_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.plug_outlet_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outlet 1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-fc:ec:da:76:4f:5f_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.plug_outlet_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Plug Outlet 1', + }), + 'context': , + 'entity_id': 'switch.plug_outlet_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.ssid_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.ssid_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:wifi-check', + 'original_name': None, + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'wlan-012345678910111213141516', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.ssid_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'SSID 1', + 'icon': 'mdi:wifi-check', + }), + 'context': , + 'entity_id': 'switch.ssid_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.unifi_network_plex-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.unifi_network_plex', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:upload-network', + 'original_name': 'plex', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'port_forward-5a32aa4ee4b0412345678911', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.unifi_network_plex-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'UniFi Network plex', + 'icon': 'mdi:upload-network', + }), + 'context': , + 'entity_id': 'switch.unifi_network_plex', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.block_media_streaming-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.block_media_streaming', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:network', + 'original_name': 'Block Media Streaming', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '5f976f4ae3c58f018ec7dff6', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.block_media_streaming-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Block Media Streaming', + 'icon': 'mdi:network', + }), + 'context': , + 'entity_id': 'switch.block_media_streaming', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_outlet_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outlet 2', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-01:02:03:04:05:ff_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_outlet_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Dummy USP-PDU-Pro Outlet 2', + }), + 'context': , + 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_usb_outlet_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'USB Outlet 1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-01:02:03:04:05:ff_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_usb_outlet_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Dummy USP-PDU-Pro USB Outlet 1', + }), + 'context': , + 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_1_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 1 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_1_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 1 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_2_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 2 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_2_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 2 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_4_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 4 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_4', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_4_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 4 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.plug_outlet_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.plug_outlet_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outlet 1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-fc:ec:da:76:4f:5f_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.plug_outlet_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Plug Outlet 1', + }), + 'context': , + 'entity_id': 'switch.plug_outlet_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.ssid_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.ssid_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:wifi-check', + 'original_name': None, + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'wlan-012345678910111213141516', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.ssid_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'SSID 1', + 'icon': 'mdi:wifi-check', + }), + 'context': , + 'entity_id': 'switch.ssid_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.unifi_network_plex-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.unifi_network_plex', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:upload-network', + 'original_name': 'plex', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'port_forward-5a32aa4ee4b0412345678911', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.unifi_network_plex-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'UniFi Network plex', + 'icon': 'mdi:upload-network', + }), + 'context': , + 'entity_id': 'switch.unifi_network_plex', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_client_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.block_client_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': None, + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'block-00:00:00:00:01:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_client_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Block Client 1', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.block_client_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_media_streaming-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.block_media_streaming', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:network', + 'original_name': 'Block Media Streaming', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '5f976f4ae3c58f018ec7dff6', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_media_streaming-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Block Media Streaming', + 'icon': 'mdi:network', + }), + 'context': , + 'entity_id': 'switch.block_media_streaming', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_outlet_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outlet 2', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-01:02:03:04:05:ff_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_outlet_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Dummy USP-PDU-Pro Outlet 2', + }), + 'context': , + 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_usb_outlet_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'USB Outlet 1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-01:02:03:04:05:ff_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_usb_outlet_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Dummy USP-PDU-Pro USB Outlet 1', + }), + 'context': , + 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_1_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 1 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_1_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 1 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_2_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 2 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_2_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 2 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_4_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 4 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_4', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_4_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 4 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.plug_outlet_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.plug_outlet_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outlet 1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-fc:ec:da:76:4f:5f_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.plug_outlet_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Plug Outlet 1', + }), + 'context': , + 'entity_id': 'switch.plug_outlet_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.ssid_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.ssid_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:wifi-check', + 'original_name': None, + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'wlan-012345678910111213141516', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.ssid_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'SSID 1', + 'icon': 'mdi:wifi-check', + }), + 'context': , + 'entity_id': 'switch.ssid_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.unifi_network_plex-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.unifi_network_plex', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:upload-network', + 'original_name': 'plex', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'port_forward-5a32aa4ee4b0412345678911', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.unifi_network_plex-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'UniFi Network plex', + 'icon': 'mdi:upload-network', + }), + 'context': , + 'entity_id': 'switch.unifi_network_plex', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.unifi_network_test_traffic_rule-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.unifi_network_test_traffic_rule', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:security-network', + 'original_name': 'Test Traffic Rule', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'traffic_rule-6452cd9b859d5b11aa002ea1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.unifi_network_test_traffic_rule-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'UniFi Network Test Traffic Rule', + 'icon': 'mdi:security-network', + }), + 'context': , + 'entity_id': 'switch.unifi_network_test_traffic_rule', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/unifi/test_switch.py b/tests/components/unifi/test_switch.py index 6d85437a244..ef93afa7e3e 100644 --- a/tests/components/unifi/test_switch.py +++ b/tests/components/unifi/test_switch.py @@ -3,15 +3,16 @@ from copy import deepcopy from datetime import timedelta from typing import Any +from unittest.mock import patch from aiounifi.models.message import MessageKey import pytest +from syrupy import SnapshotAssertion from homeassistant.components.switch import ( DOMAIN as SWITCH_DOMAIN, SERVICE_TURN_OFF, SERVICE_TURN_ON, - SwitchDeviceClass, ) from homeassistant.components.unifi.const import ( CONF_BLOCK_CLIENT, @@ -23,13 +24,12 @@ from homeassistant.components.unifi.const import ( ) from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY from homeassistant.const import ( - ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, CONF_HOST, STATE_OFF, STATE_ON, STATE_UNAVAILABLE, - EntityCategory, + Platform, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -43,7 +43,7 @@ from .conftest import ( WebsocketStateManager, ) -from tests.common import MockConfigEntry, async_fire_time_changed +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker CLIENT_1 = { @@ -810,6 +810,34 @@ TRAFFIC_RULE = { } +@pytest.mark.parametrize( + "config_entry_options", [{CONF_BLOCK_CLIENT: [BLOCKED["mac"]]}] +) +@pytest.mark.parametrize("client_payload", [[BLOCKED]]) +@pytest.mark.parametrize("device_payload", [[DEVICE_1, OUTLET_UP1, PDU_DEVICE_1]]) +@pytest.mark.parametrize("dpi_app_payload", [DPI_APPS]) +@pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) +@pytest.mark.parametrize("port_forward_payload", [[PORT_FORWARD_PLEX]]) +@pytest.mark.parametrize(("traffic_rule_payload"), [([TRAFFIC_RULE])]) +@pytest.mark.parametrize("wlan_payload", [[WLAN]]) +@pytest.mark.parametrize( + "site_payload", + [[{"desc": "Site name", "name": "site_id", "role": "admin", "_id": "1"}]], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_entity_and_device_data( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + site_payload: dict[str, Any], + snapshot: SnapshotAssertion, +) -> None: + """Validate entity and device data with and without admin rights.""" + with patch("homeassistant.components.unifi.PLATFORMS", [Platform.SWITCH]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + @pytest.mark.parametrize("client_payload", [[CONTROLLER_HOST]]) @pytest.mark.parametrize("device_payload", [[DEVICE_1]]) @pytest.mark.usefixtures("config_entry_setup") @@ -819,18 +847,6 @@ async def test_hub_not_client(hass: HomeAssistant) -> None: assert hass.states.get("switch.cloud_key") is None -@pytest.mark.parametrize("client_payload", [[CLIENT_1]]) -@pytest.mark.parametrize("device_payload", [[DEVICE_1]]) -@pytest.mark.parametrize( - "site_payload", - [[{"desc": "Site name", "name": "site_id", "role": "not admin", "_id": "1"}]], -) -@pytest.mark.usefixtures("config_entry_setup") -async def test_not_admin(hass: HomeAssistant) -> None: - """Test that switch platform only work on an admin account.""" - assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 0 - - @pytest.mark.parametrize( "config_entry_options", [ @@ -841,40 +857,17 @@ async def test_not_admin(hass: HomeAssistant) -> None: } ], ) -@pytest.mark.parametrize("client_payload", [[CLIENT_4]]) @pytest.mark.parametrize("clients_all_payload", [[BLOCKED, UNBLOCKED, CLIENT_1]]) @pytest.mark.parametrize("dpi_app_payload", [DPI_APPS]) @pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) async def test_switches( hass: HomeAssistant, - entity_registry: er.EntityRegistry, aioclient_mock: AiohttpClientMocker, config_entry_setup: MockConfigEntry, ) -> None: """Test the update_items function with some clients.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 3 - switch_4 = hass.states.get("switch.poe_client_4") - assert switch_4 is None - - blocked = hass.states.get("switch.block_client_1") - assert blocked is not None - assert blocked.state == "off" - - unblocked = hass.states.get("switch.block_client_2") - assert unblocked is not None - assert unblocked.state == "on" - - dpi_switch = hass.states.get("switch.block_media_streaming") - assert dpi_switch is not None - assert dpi_switch.state == "on" - assert dpi_switch.attributes["icon"] == "mdi:network" - - for entry_id in ("switch.block_client_1", "switch.block_media_streaming"): - assert ( - entity_registry.async_get(entry_id).entity_category is EntityCategory.CONFIG - ) - # Block and unblock client aioclient_mock.clear_requests() aioclient_mock.post( @@ -1038,10 +1031,7 @@ async def test_dpi_switches( """Test the update_items function with some clients.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 - dpi_switch = hass.states.get("switch.block_media_streaming") - assert dpi_switch is not None - assert dpi_switch.state == STATE_ON - assert dpi_switch.attributes["icon"] == "mdi:network" + assert hass.states.get("switch.block_media_streaming").state == STATE_ON mock_websocket_message(data=DPI_APP_DISABLED_EVENT) await hass.async_block_till_done() @@ -1118,20 +1108,18 @@ async def test_traffic_rules( traffic_rule_payload: list[dict[str, Any]], ) -> None: """Test control of UniFi traffic rules.""" - assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 # Validate state object - switch_1 = hass.states.get("switch.unifi_network_test_traffic_rule") - assert switch_1.state == STATE_ON - assert switch_1.attributes.get(ATTR_DEVICE_CLASS) == SwitchDeviceClass.SWITCH + assert hass.states.get("switch.unifi_network_test_traffic_rule").state == STATE_ON traffic_rule = deepcopy(traffic_rule_payload[0]) # Disable traffic rule aioclient_mock.put( f"https://{config_entry_setup.data[CONF_HOST]}:1234" - f"/v2/api/site/{config_entry_setup.data[CONF_SITE_ID]}/trafficrules/{traffic_rule['_id']}", + f"/v2/api/site/{config_entry_setup.data[CONF_SITE_ID]}" + f"/trafficrules/{traffic_rule['_id']}", ) call_count = aioclient_mock.call_count @@ -1188,10 +1176,7 @@ async def test_outlet_switches( assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == expected_switches # Validate state object - switch_1 = hass.states.get(f"switch.{entity_id}") - assert switch_1 is not None - assert switch_1.state == STATE_ON - assert switch_1.attributes.get(ATTR_DEVICE_CLASS) == SwitchDeviceClass.OUTLET + assert hass.states.get(f"switch.{entity_id}").state == STATE_ON # Update state object device_1 = deepcopy(device_payload[0]) @@ -1250,15 +1235,6 @@ async def test_outlet_switches( await hass.async_block_till_done() assert hass.states.get(f"switch.{entity_id}").state == STATE_OFF - # Unload config entry - await hass.config_entries.async_unload(config_entry_setup.entry_id) - assert hass.states.get(f"switch.{entity_id}").state == STATE_UNAVAILABLE - - # Remove config entry - await hass.config_entries.async_remove(config_entry_setup.entry_id) - await hass.async_block_till_done() - assert hass.states.get(f"switch.{entity_id}") is None - @pytest.mark.parametrize( "config_entry_options", @@ -1359,8 +1335,8 @@ async def test_poe_port_switches( hass: HomeAssistant, entity_registry: er.EntityRegistry, aioclient_mock: AiohttpClientMocker, - mock_websocket_message: WebsocketMessageMock, config_entry_setup: MockConfigEntry, + mock_websocket_message: WebsocketMessageMock, device_payload: list[dict[str, Any]], ) -> None: """Test PoE port entities work.""" @@ -1368,7 +1344,6 @@ async def test_poe_port_switches( ent_reg_entry = entity_registry.async_get("switch.mock_name_port_1_poe") assert ent_reg_entry.disabled_by == RegistryEntryDisabler.INTEGRATION - assert ent_reg_entry.entity_category is EntityCategory.CONFIG # Enable entity entity_registry.async_update_entity( @@ -1385,10 +1360,7 @@ async def test_poe_port_switches( await hass.async_block_till_done() # Validate state object - switch_1 = hass.states.get("switch.mock_name_port_1_poe") - assert switch_1 is not None - assert switch_1.state == STATE_ON - assert switch_1.attributes.get(ATTR_DEVICE_CLASS) == SwitchDeviceClass.OUTLET + assert hass.states.get("switch.mock_name_port_1_poe").state == STATE_ON # Update state object device_1 = deepcopy(device_payload[0]) @@ -1456,24 +1428,16 @@ async def test_poe_port_switches( @pytest.mark.parametrize("wlan_payload", [[WLAN]]) async def test_wlan_switches( hass: HomeAssistant, - entity_registry: er.EntityRegistry, aioclient_mock: AiohttpClientMocker, - mock_websocket_message: WebsocketMessageMock, config_entry_setup: MockConfigEntry, + mock_websocket_message: WebsocketMessageMock, wlan_payload: list[dict[str, Any]], ) -> None: """Test control of UniFi WLAN availability.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 - ent_reg_entry = entity_registry.async_get("switch.ssid_1") - assert ent_reg_entry.unique_id == "wlan-012345678910111213141516" - assert ent_reg_entry.entity_category is EntityCategory.CONFIG - # Validate state object - switch_1 = hass.states.get("switch.ssid_1") - assert switch_1 is not None - assert switch_1.state == STATE_ON - assert switch_1.attributes.get(ATTR_DEVICE_CLASS) == SwitchDeviceClass.SWITCH + assert hass.states.get("switch.ssid_1").state == STATE_ON # Update state object wlan = deepcopy(wlan_payload[0]) @@ -1512,24 +1476,16 @@ async def test_wlan_switches( @pytest.mark.parametrize("port_forward_payload", [[PORT_FORWARD_PLEX]]) async def test_port_forwarding_switches( hass: HomeAssistant, - entity_registry: er.EntityRegistry, aioclient_mock: AiohttpClientMocker, - mock_websocket_message: WebsocketMessageMock, config_entry_setup: MockConfigEntry, + mock_websocket_message: WebsocketMessageMock, port_forward_payload: list[dict[str, Any]], ) -> None: """Test control of UniFi port forwarding.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 - ent_reg_entry = entity_registry.async_get("switch.unifi_network_plex") - assert ent_reg_entry.unique_id == "port_forward-5a32aa4ee4b0412345678911" - assert ent_reg_entry.entity_category is EntityCategory.CONFIG - # Validate state object - switch_1 = hass.states.get("switch.unifi_network_plex") - assert switch_1 is not None - assert switch_1.state == STATE_ON - assert switch_1.attributes.get(ATTR_DEVICE_CLASS) == SwitchDeviceClass.SWITCH + assert hass.states.get("switch.unifi_network_plex").state == STATE_ON # Update state object data = port_forward_payload[0].copy() @@ -1648,6 +1604,7 @@ async def test_updating_unique_id( @pytest.mark.parametrize("dpi_app_payload", [DPI_APPS]) @pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) @pytest.mark.parametrize("port_forward_payload", [[PORT_FORWARD_PLEX]]) +@pytest.mark.parametrize(("traffic_rule_payload"), [([TRAFFIC_RULE])]) @pytest.mark.parametrize("wlan_payload", [[WLAN]]) @pytest.mark.usefixtures("config_entry_setup") @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -1661,6 +1618,7 @@ async def test_hub_state_change( "switch.plug_outlet_1", "switch.block_media_streaming", "switch.unifi_network_plex", + "switch.unifi_network_test_traffic_rule", "switch.ssid_1", ) for entity_id in entity_ids: From 2f8766a9ece1c425a46789831a0cb0498b16f3e8 Mon Sep 17 00:00:00 2001 From: Brett Adams Date: Fri, 16 Aug 2024 07:52:18 +1000 Subject: [PATCH 0782/1635] Fix rear trunk logic in Tessie (#124011) Allow open to be anything not zero --- homeassistant/components/tessie/cover.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/tessie/cover.py b/homeassistant/components/tessie/cover.py index 93ce25993d9..e739f8c074d 100644 --- a/homeassistant/components/tessie/cover.py +++ b/homeassistant/components/tessie/cover.py @@ -168,13 +168,13 @@ class TessieRearTrunkEntity(TessieEntity, CoverEntity): async def async_open_cover(self, **kwargs: Any) -> None: """Open rear trunk.""" - if self._value == TessieCoverStates.CLOSED: + if self.is_closed: await self.run(open_close_rear_trunk) self.set((self.key, TessieCoverStates.OPEN)) async def async_close_cover(self, **kwargs: Any) -> None: """Close rear trunk.""" - if self._value == TessieCoverStates.OPEN: + if not self.is_closed: await self.run(open_close_rear_trunk) self.set((self.key, TessieCoverStates.CLOSED)) From a944541c584188fc8eb6c19746be250afa226c3e Mon Sep 17 00:00:00 2001 From: Robert Svensson Date: Fri, 16 Aug 2024 04:36:06 +0200 Subject: [PATCH 0783/1635] Bump aiounifi to v80 (#124004) --- homeassistant/components/unifi/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/unifi/manifest.json b/homeassistant/components/unifi/manifest.json index aa9b553cb67..6f92dec5361 100644 --- a/homeassistant/components/unifi/manifest.json +++ b/homeassistant/components/unifi/manifest.json @@ -8,7 +8,7 @@ "iot_class": "local_push", "loggers": ["aiounifi"], "quality_scale": "platinum", - "requirements": ["aiounifi==79"], + "requirements": ["aiounifi==80"], "ssdp": [ { "manufacturer": "Ubiquiti Networks", diff --git a/requirements_all.txt b/requirements_all.txt index 920241247b9..87e1d39dcb9 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -386,7 +386,7 @@ aiotankerkoenig==0.4.1 aiotractive==0.6.0 # homeassistant.components.unifi -aiounifi==79 +aiounifi==80 # homeassistant.components.vlc_telnet aiovlc==0.3.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index ef99d18d1fe..12227469600 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -368,7 +368,7 @@ aiotankerkoenig==0.4.1 aiotractive==0.6.0 # homeassistant.components.unifi -aiounifi==79 +aiounifi==80 # homeassistant.components.vlc_telnet aiovlc==0.3.2 From 6d9764185ba05d03db3d5a97549fd0bb6848dbd2 Mon Sep 17 00:00:00 2001 From: Andre Lengwenus Date: Fri, 16 Aug 2024 09:22:00 +0200 Subject: [PATCH 0784/1635] Bump pypck to 0.7.21 (#124023) --- homeassistant/components/lcn/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/lcn/manifest.json b/homeassistant/components/lcn/manifest.json index 44a4d683c81..e9717774e17 100644 --- a/homeassistant/components/lcn/manifest.json +++ b/homeassistant/components/lcn/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/lcn", "iot_class": "local_push", "loggers": ["pypck"], - "requirements": ["pypck==0.7.20"] + "requirements": ["pypck==0.7.21"] } diff --git a/requirements_all.txt b/requirements_all.txt index 87e1d39dcb9..2452ce37fac 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2100,7 +2100,7 @@ pyownet==0.10.0.post1 pypca==0.0.7 # homeassistant.components.lcn -pypck==0.7.20 +pypck==0.7.21 # homeassistant.components.pjlink pypjlink2==1.2.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 12227469600..7302bbe5bab 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1681,7 +1681,7 @@ pyoverkiz==1.13.14 pyownet==0.10.0.post1 # homeassistant.components.lcn -pypck==0.7.20 +pypck==0.7.21 # homeassistant.components.pjlink pypjlink2==1.2.1 From f9ade788eb170f7912151b30d73ca5975b261ca1 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 16 Aug 2024 10:01:12 +0200 Subject: [PATCH 0785/1635] Do sanity check EntityPlatform.async_register_entity_service schema (#123058) * Do a sanity check of schema passed to EntityPlatform.async_register_entity_service * Only attempt to check schema of Schema * Handle All/Any wrapped in schema * Clarify comment * Apply suggestions from code review Co-authored-by: Robert Resch --------- Co-authored-by: Robert Resch --- homeassistant/helpers/entity_platform.py | 16 ++++++++++++++++ tests/helpers/test_entity_platform.py | 23 +++++++++++++++++++++++ 2 files changed, 39 insertions(+) diff --git a/homeassistant/helpers/entity_platform.py b/homeassistant/helpers/entity_platform.py index f3d5f5b076a..ec177fbf316 100644 --- a/homeassistant/helpers/entity_platform.py +++ b/homeassistant/helpers/entity_platform.py @@ -10,6 +10,8 @@ from functools import partial from logging import Logger, getLogger from typing import TYPE_CHECKING, Any, Protocol +import voluptuous as vol + from homeassistant import config_entries from homeassistant.const import ( ATTR_RESTORED, @@ -999,6 +1001,20 @@ class EntityPlatform: if schema is None or isinstance(schema, dict): schema = cv.make_entity_service_schema(schema) + # Do a sanity check to check this is a valid entity service schema, + # the check could be extended to require All/Any to have sub schema(s) + # with all entity service fields + elif ( + # Don't check All/Any + not isinstance(schema, (vol.All, vol.Any)) + # Don't check All/Any wrapped in schema + and not isinstance(schema.schema, (vol.All, vol.Any)) + and any(key not in schema.schema for key in cv.ENTITY_SERVICE_FIELDS) + ): + raise HomeAssistantError( + "The schema does not include all required keys: " + f"{", ".join(str(key) for key in cv.ENTITY_SERVICE_FIELDS)}" + ) service_func: str | HassJob[..., Any] service_func = func if isinstance(func, str) else HassJob(func) diff --git a/tests/helpers/test_entity_platform.py b/tests/helpers/test_entity_platform.py index be8ba998481..50180ecd844 100644 --- a/tests/helpers/test_entity_platform.py +++ b/tests/helpers/test_entity_platform.py @@ -8,6 +8,7 @@ from typing import Any from unittest.mock import ANY, AsyncMock, Mock, patch import pytest +import voluptuous as vol from homeassistant.const import EVENT_HOMEASSISTANT_STARTED, PERCENTAGE, EntityCategory from homeassistant.core import ( @@ -1788,6 +1789,28 @@ async def test_register_entity_service_none_schema( assert entity2 in entities +async def test_register_entity_service_non_entity_service_schema( + hass: HomeAssistant, +) -> None: + """Test attempting to register a service with an incomplete schema.""" + entity_platform = MockEntityPlatform( + hass, domain="mock_integration", platform_name="mock_platform", platform=None + ) + + with pytest.raises( + HomeAssistantError, + match=( + "The schema does not include all required keys: entity_id, device_id, area_id, " + "floor_id, label_id" + ), + ): + entity_platform.async_register_entity_service( + "hello", + vol.Schema({"some": str}), + Mock(), + ) + + @pytest.mark.parametrize("update_before_add", [True, False]) async def test_invalid_entity_id( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, update_before_add: bool From 66a873333326ee250084ced1e9400b639c549b8e Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 16 Aug 2024 10:26:12 +0200 Subject: [PATCH 0786/1635] Add missing return type in test __init__ method (part 4) (#123947) --- tests/components/broadlink/__init__.py | 31 +++++++++++++++++--------- tests/components/ffmpeg/test_init.py | 7 +++++- tests/components/recorder/test_init.py | 2 +- tests/components/siren/test_init.py | 2 +- 4 files changed, 28 insertions(+), 14 deletions(-) diff --git a/tests/components/broadlink/__init__.py b/tests/components/broadlink/__init__.py index 61ef27815fd..207014d0958 100644 --- a/tests/components/broadlink/__init__.py +++ b/tests/components/broadlink/__init__.py @@ -115,18 +115,27 @@ class BroadlinkDevice: """Representation of a Broadlink device.""" def __init__( - self, name, host, mac, model, manufacturer, type_, devtype, fwversion, timeout - ): + self, + name: str, + host: str, + mac: str, + model: str, + manufacturer: str, + type_: str, + devtype: int, + fwversion: int, + timeout: int, + ) -> None: """Initialize the device.""" - self.name: str = name - self.host: str = host - self.mac: str = mac - self.model: str = model - self.manufacturer: str = manufacturer - self.type: str = type_ - self.devtype: int = devtype - self.timeout: int = timeout - self.fwversion: int = fwversion + self.name = name + self.host = host + self.mac = mac + self.model = model + self.manufacturer = manufacturer + self.type = type_ + self.devtype = devtype + self.timeout = timeout + self.fwversion = fwversion async def setup_entry(self, hass, mock_api=None, mock_entry=None): """Set up the device.""" diff --git a/tests/components/ffmpeg/test_init.py b/tests/components/ffmpeg/test_init.py index 353b8fdfcc0..ceefed8d62b 100644 --- a/tests/components/ffmpeg/test_init.py +++ b/tests/components/ffmpeg/test_init.py @@ -54,7 +54,12 @@ def async_restart(hass, entity_id=None): class MockFFmpegDev(ffmpeg.FFmpegBase): """FFmpeg device mock.""" - def __init__(self, hass, initial_state=True, entity_id="test.ffmpeg_device"): + def __init__( + self, + hass: HomeAssistant, + initial_state: bool = True, + entity_id: str = "test.ffmpeg_device", + ) -> None: """Initialize mock.""" super().__init__(None, initial_state) diff --git a/tests/components/recorder/test_init.py b/tests/components/recorder/test_init.py index 3cd4c3ab4b6..c8e58d58105 100644 --- a/tests/components/recorder/test_init.py +++ b/tests/components/recorder/test_init.py @@ -2322,7 +2322,7 @@ async def test_connect_args_priority(hass: HomeAssistant, config_url) -> None: __bases__ = [] _has_events = False - def __init__(*args, **kwargs): ... + def __init__(self, *args: Any, **kwargs: Any) -> None: ... @property def is_async(self): diff --git a/tests/components/siren/test_init.py b/tests/components/siren/test_init.py index 168300d0abe..475b32540b4 100644 --- a/tests/components/siren/test_init.py +++ b/tests/components/siren/test_init.py @@ -27,7 +27,7 @@ class MockSirenEntity(SirenEntity): supported_features=0, available_tones_as_attr=None, available_tones_in_desc=None, - ): + ) -> None: """Initialize mock siren entity.""" self._attr_supported_features = supported_features if available_tones_as_attr is not None: From 4b62dcfd19b992abb39f33eccae04dde8c5484aa Mon Sep 17 00:00:00 2001 From: "Mr. Bubbles" Date: Fri, 16 Aug 2024 11:41:04 +0200 Subject: [PATCH 0787/1635] Improve rate limit handling in Habitica integration (#121763) * Adjustments to requests and update interval due to rate limiting * Use debounced refresh for to-do lists * Use debounced refresh in switch and buttons * Request refresh only if a to-do was changed * Update task order provisionally in the coordinator --- homeassistant/components/habitica/button.py | 2 +- .../components/habitica/coordinator.py | 14 ++++- homeassistant/components/habitica/todo.py | 54 +++++++++++++------ 3 files changed, 50 insertions(+), 20 deletions(-) diff --git a/homeassistant/components/habitica/button.py b/homeassistant/components/habitica/button.py index cdd166a4444..276aa4e7fc0 100644 --- a/homeassistant/components/habitica/button.py +++ b/homeassistant/components/habitica/button.py @@ -113,7 +113,7 @@ class HabiticaButton(HabiticaBase, ButtonEntity): translation_key="service_call_exception", ) from e else: - await self.coordinator.async_refresh() + await self.coordinator.async_request_refresh() @property def available(self) -> bool: diff --git a/homeassistant/components/habitica/coordinator.py b/homeassistant/components/habitica/coordinator.py index 9d0ebe651e3..4e949b703fb 100644 --- a/homeassistant/components/habitica/coordinator.py +++ b/homeassistant/components/habitica/coordinator.py @@ -15,6 +15,7 @@ from habitipy.aio import HabitipyAsync from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers.debounce import Debouncer from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import DOMAIN @@ -41,7 +42,13 @@ class HabiticaDataUpdateCoordinator(DataUpdateCoordinator[HabiticaData]): hass, _LOGGER, name=DOMAIN, - update_interval=timedelta(seconds=30), + update_interval=timedelta(seconds=60), + request_refresh_debouncer=Debouncer( + hass, + _LOGGER, + cooldown=5, + immediate=False, + ), ) self.api = habitipy @@ -51,6 +58,9 @@ class HabiticaDataUpdateCoordinator(DataUpdateCoordinator[HabiticaData]): tasks_response = await self.api.tasks.user.get() tasks_response.extend(await self.api.tasks.user.get(type="completedTodos")) except ClientResponseError as error: + if error.status == HTTPStatus.TOO_MANY_REQUESTS: + _LOGGER.debug("Currently rate limited, skipping update") + return self.data raise UpdateFailed(f"Error communicating with API: {error}") from error return HabiticaData(user=user_response, tasks=tasks_response) @@ -73,4 +83,4 @@ class HabiticaDataUpdateCoordinator(DataUpdateCoordinator[HabiticaData]): translation_key="service_call_exception", ) from e else: - await self.async_refresh() + await self.async_request_refresh() diff --git a/homeassistant/components/habitica/todo.py b/homeassistant/components/habitica/todo.py index 451db1e6806..ae739d47262 100644 --- a/homeassistant/components/habitica/todo.py +++ b/homeassistant/components/habitica/todo.py @@ -93,7 +93,7 @@ class BaseHabiticaListEntity(HabiticaBase, TodoListEntity): translation_key=f"delete_{self.entity_description.key}_failed", ) from e - await self.coordinator.async_refresh() + await self.coordinator.async_request_refresh() async def async_move_todo_item( self, uid: str, previous_uid: str | None = None @@ -121,9 +121,22 @@ class BaseHabiticaListEntity(HabiticaBase, TodoListEntity): translation_key=f"move_{self.entity_description.key}_item_failed", translation_placeholders={"pos": str(pos)}, ) from e + else: + # move tasks in the coordinator until we have fresh data + tasks = self.coordinator.data.tasks + new_pos = ( + tasks.index(next(task for task in tasks if task["id"] == previous_uid)) + + 1 + if previous_uid + else 0 + ) + old_pos = tasks.index(next(task for task in tasks if task["id"] == uid)) + tasks.insert(new_pos, tasks.pop(old_pos)) + await self.coordinator.async_request_refresh() async def async_update_todo_item(self, item: TodoItem) -> None: """Update a Habitica todo.""" + refresh_required = False current_item = next( (task for task in (self.todo_items or []) if task.uid == item.uid), None, @@ -132,7 +145,6 @@ class BaseHabiticaListEntity(HabiticaBase, TodoListEntity): if TYPE_CHECKING: assert item.uid assert current_item - assert item.due if ( self.entity_description.key is HabiticaTodoList.TODOS @@ -142,18 +154,24 @@ class BaseHabiticaListEntity(HabiticaBase, TodoListEntity): else: date = None - try: - await self.coordinator.api.tasks[item.uid].put( - text=item.summary, - notes=item.description or "", - date=date, - ) - except ClientResponseError as e: - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key=f"update_{self.entity_description.key}_item_failed", - translation_placeholders={"name": item.summary or ""}, - ) from e + if ( + item.summary != current_item.summary + or item.description != current_item.description + or item.due != current_item.due + ): + try: + await self.coordinator.api.tasks[item.uid].put( + text=item.summary, + notes=item.description or "", + date=date, + ) + refresh_required = True + except ClientResponseError as e: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key=f"update_{self.entity_description.key}_item_failed", + translation_placeholders={"name": item.summary or ""}, + ) from e try: # Score up or down if item status changed @@ -164,6 +182,7 @@ class BaseHabiticaListEntity(HabiticaBase, TodoListEntity): score_result = ( await self.coordinator.api.tasks[item.uid].score["up"].post() ) + refresh_required = True elif ( current_item.status is TodoItemStatus.COMPLETED and item.status == TodoItemStatus.NEEDS_ACTION @@ -171,6 +190,7 @@ class BaseHabiticaListEntity(HabiticaBase, TodoListEntity): score_result = ( await self.coordinator.api.tasks[item.uid].score["down"].post() ) + refresh_required = True else: score_result = None @@ -189,8 +209,8 @@ class BaseHabiticaListEntity(HabiticaBase, TodoListEntity): persistent_notification.async_create( self.hass, message=msg, title="Habitica" ) - - await self.coordinator.async_refresh() + if refresh_required: + await self.coordinator.async_request_refresh() class HabiticaTodosListEntity(BaseHabiticaListEntity): @@ -254,7 +274,7 @@ class HabiticaTodosListEntity(BaseHabiticaListEntity): translation_placeholders={"name": item.summary or ""}, ) from e - await self.coordinator.async_refresh() + await self.coordinator.async_request_refresh() class HabiticaDailiesListEntity(BaseHabiticaListEntity): From 1f214bec9376c836c89763c89214c54c43e477d5 Mon Sep 17 00:00:00 2001 From: Pete Sage <76050312+PeteRager@users.noreply.github.com> Date: Fri, 16 Aug 2024 05:49:00 -0400 Subject: [PATCH 0788/1635] Add Sonos tests for media_player shuffle and repeat (#122816) * initial commit * initial commit * update comments --- tests/components/sonos/test_media_player.py | 146 +++++++++++++++++++- 1 file changed, 145 insertions(+), 1 deletion(-) diff --git a/tests/components/sonos/test_media_player.py b/tests/components/sonos/test_media_player.py index c765ed82ac6..599a04b806a 100644 --- a/tests/components/sonos/test_media_player.py +++ b/tests/components/sonos/test_media_player.py @@ -12,6 +12,7 @@ from homeassistant.components.media_player import ( SERVICE_PLAY_MEDIA, SERVICE_SELECT_SOURCE, MediaPlayerEnqueue, + RepeatMode, ) from homeassistant.components.sonos.const import ( DOMAIN as SONOS_DOMAIN, @@ -25,6 +26,8 @@ from homeassistant.components.sonos.media_player import ( VOLUME_INCREMENT, ) from homeassistant.const import ( + SERVICE_REPEAT_SET, + SERVICE_SHUFFLE_SET, SERVICE_VOLUME_DOWN, SERVICE_VOLUME_SET, SERVICE_VOLUME_UP, @@ -39,7 +42,7 @@ from homeassistant.helpers.device_registry import ( ) from homeassistant.setup import async_setup_component -from .conftest import MockMusicServiceItem, MockSoCo, SoCoMockFactory +from .conftest import MockMusicServiceItem, MockSoCo, SoCoMockFactory, SonosMockEvent async def test_device_registry( @@ -683,6 +686,147 @@ async def test_select_source_error( assert "Could not find a Sonos favorite" in str(sve.value) +async def test_shuffle_set( + hass: HomeAssistant, + soco: MockSoCo, + async_autosetup_sonos, +) -> None: + """Test the set shuffle method.""" + assert soco.play_mode == "NORMAL" + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_SHUFFLE_SET, + { + "entity_id": "media_player.zone_a", + "shuffle": True, + }, + blocking=True, + ) + assert soco.play_mode == "SHUFFLE_NOREPEAT" + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_SHUFFLE_SET, + { + "entity_id": "media_player.zone_a", + "shuffle": False, + }, + blocking=True, + ) + assert soco.play_mode == "NORMAL" + + +async def test_shuffle_get( + hass: HomeAssistant, + soco: MockSoCo, + async_autosetup_sonos, + no_media_event: SonosMockEvent, +) -> None: + """Test the get shuffle attribute by simulating a Sonos Event.""" + subscription = soco.avTransport.subscribe.return_value + sub_callback = subscription.callback + + state = hass.states.get("media_player.zone_a") + assert state.attributes["shuffle"] is False + + no_media_event.variables["current_play_mode"] = "SHUFFLE_NOREPEAT" + sub_callback(no_media_event) + await hass.async_block_till_done(wait_background_tasks=True) + state = hass.states.get("media_player.zone_a") + assert state.attributes["shuffle"] is True + + # The integration keeps a copy of the last event to check for + # changes, so we create a new event. + no_media_event = SonosMockEvent( + soco, soco.avTransport, no_media_event.variables.copy() + ) + no_media_event.variables["current_play_mode"] = "NORMAL" + sub_callback(no_media_event) + await hass.async_block_till_done(wait_background_tasks=True) + state = hass.states.get("media_player.zone_a") + assert state.attributes["shuffle"] is False + + +async def test_repeat_set( + hass: HomeAssistant, + soco: MockSoCo, + async_autosetup_sonos, +) -> None: + """Test the set repeat method.""" + assert soco.play_mode == "NORMAL" + await hass.services.async_call( + MP_DOMAIN, + SERVICE_REPEAT_SET, + { + "entity_id": "media_player.zone_a", + "repeat": RepeatMode.ALL, + }, + blocking=True, + ) + assert soco.play_mode == "REPEAT_ALL" + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_REPEAT_SET, + { + "entity_id": "media_player.zone_a", + "repeat": RepeatMode.ONE, + }, + blocking=True, + ) + assert soco.play_mode == "REPEAT_ONE" + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_REPEAT_SET, + { + "entity_id": "media_player.zone_a", + "repeat": RepeatMode.OFF, + }, + blocking=True, + ) + assert soco.play_mode == "NORMAL" + + +async def test_repeat_get( + hass: HomeAssistant, + soco: MockSoCo, + async_autosetup_sonos, + no_media_event: SonosMockEvent, +) -> None: + """Test the get repeat attribute by simulating a Sonos Event.""" + subscription = soco.avTransport.subscribe.return_value + sub_callback = subscription.callback + + state = hass.states.get("media_player.zone_a") + assert state.attributes["repeat"] == RepeatMode.OFF + + no_media_event.variables["current_play_mode"] = "REPEAT_ALL" + sub_callback(no_media_event) + await hass.async_block_till_done(wait_background_tasks=True) + state = hass.states.get("media_player.zone_a") + assert state.attributes["repeat"] == RepeatMode.ALL + + no_media_event = SonosMockEvent( + soco, soco.avTransport, no_media_event.variables.copy() + ) + no_media_event.variables["current_play_mode"] = "REPEAT_ONE" + sub_callback(no_media_event) + await hass.async_block_till_done(wait_background_tasks=True) + state = hass.states.get("media_player.zone_a") + assert state.attributes["repeat"] == RepeatMode.ONE + + no_media_event = SonosMockEvent( + soco, soco.avTransport, no_media_event.variables.copy() + ) + no_media_event.variables["current_play_mode"] = "NORMAL" + sub_callback(no_media_event) + await hass.async_block_till_done(wait_background_tasks=True) + state = hass.states.get("media_player.zone_a") + assert state.attributes["repeat"] == RepeatMode.OFF + + async def test_play_media_favorite_item_id( hass: HomeAssistant, soco_factory: SoCoMockFactory, From 723ea6173e7bff7ab9b277a31a5ff4d5dad620e9 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 16 Aug 2024 05:04:57 -0500 Subject: [PATCH 0789/1635] Add Python-2.0.1 license to list of approved licenses (#124020) https://spdx.org/licenses/Python-2.0.1.html --- script/licenses.py | 1 + 1 file changed, 1 insertion(+) diff --git a/script/licenses.py b/script/licenses.py index 1e01fb6111b..9d7da912398 100644 --- a/script/licenses.py +++ b/script/licenses.py @@ -116,6 +116,7 @@ OSI_APPROVED_LICENSES = { "Unlicense", "Apache-2", "GPLv2", + "Python-2.0.1", } EXCEPTIONS = { From 461ef335536ab3bf2f075e2832721a9a467d8bed Mon Sep 17 00:00:00 2001 From: WebSpider Date: Fri, 16 Aug 2024 12:27:20 +0200 Subject: [PATCH 0790/1635] Bump meteoalert to 0.3.1 (#123848) Bump meteoalertapi to 0.3.1 --- homeassistant/components/meteoalarm/manifest.json | 2 +- requirements_all.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/meteoalarm/manifest.json b/homeassistant/components/meteoalarm/manifest.json index 9a41e8a3062..4de91f6a431 100644 --- a/homeassistant/components/meteoalarm/manifest.json +++ b/homeassistant/components/meteoalarm/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/meteoalarm", "iot_class": "cloud_polling", "loggers": ["meteoalertapi"], - "requirements": ["meteoalertapi==0.3.0"] + "requirements": ["meteoalertapi==0.3.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 2452ce37fac..f968b8addf5 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1321,7 +1321,7 @@ melnor-bluetooth==0.0.25 messagebird==1.2.0 # homeassistant.components.meteoalarm -meteoalertapi==0.3.0 +meteoalertapi==0.3.1 # homeassistant.components.meteo_france meteofrance-api==1.3.0 From 0093276e93c25c58830098f4668563c553d80d5a Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Fri, 16 Aug 2024 12:46:51 +0200 Subject: [PATCH 0791/1635] Reolink add 100% coverage of binary_sensor platfrom (#123862) * Implement 100% coverage of binary_sensor * fix styling * Apply suggestions from code review Co-authored-by: Joost Lekkerkerker * use get().state instead of is_state * Remove unneeded "is True" * Remove unneeded "is True" * reset the mock and use assert_not_called * use freezer * fix styling * fix styling --------- Co-authored-by: Joost Lekkerkerker --- tests/components/reolink/conftest.py | 4 - .../components/reolink/test_binary_sensor.py | 52 ++++++++++++ tests/components/reolink/test_config_flow.py | 9 +- tests/components/reolink/test_host.py | 85 +++++++++++++++++++ tests/components/reolink/test_init.py | 23 +++-- tests/components/reolink/test_media_source.py | 6 +- tests/components/reolink/test_select.py | 31 ++++--- 7 files changed, 170 insertions(+), 40 deletions(-) create mode 100644 tests/components/reolink/test_binary_sensor.py create mode 100644 tests/components/reolink/test_host.py diff --git a/tests/components/reolink/conftest.py b/tests/components/reolink/conftest.py index 981dcc30e60..ddea36cb292 100644 --- a/tests/components/reolink/conftest.py +++ b/tests/components/reolink/conftest.py @@ -53,10 +53,6 @@ def mock_setup_entry() -> Generator[AsyncMock]: def reolink_connect_class() -> Generator[MagicMock]: """Mock reolink connection and return both the host_mock and host_mock_class.""" with ( - patch( - "homeassistant.components.reolink.host.webhook.async_register", - return_value=True, - ), patch( "homeassistant.components.reolink.host.Host", autospec=True ) as host_mock_class, diff --git a/tests/components/reolink/test_binary_sensor.py b/tests/components/reolink/test_binary_sensor.py new file mode 100644 index 00000000000..e02742afe1d --- /dev/null +++ b/tests/components/reolink/test_binary_sensor.py @@ -0,0 +1,52 @@ +"""Test the Reolink binary sensor platform.""" + +from unittest.mock import MagicMock, patch + +from freezegun.api import FrozenDateTimeFactory + +from homeassistant.components.reolink import DEVICE_UPDATE_INTERVAL, const +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import STATE_OFF, STATE_ON, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .conftest import TEST_NVR_NAME, TEST_UID + +from tests.common import MockConfigEntry, async_fire_time_changed +from tests.typing import ClientSessionGenerator + + +async def test_motion_sensor( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + freezer: FrozenDateTimeFactory, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + entity_registry: er.EntityRegistry, +) -> None: + """Test binary sensor entity with motion sensor.""" + reolink_connect.model = "Reolink Duo PoE" + reolink_connect.motion_detected.return_value = True + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.BINARY_SENSOR]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.BINARY_SENSOR}.{TEST_NVR_NAME}_motion_lens_0" + assert hass.states.get(entity_id).state == STATE_ON + + reolink_connect.motion_detected.return_value = False + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state == STATE_OFF + + # test webhook callback + reolink_connect.motion_detected.return_value = True + reolink_connect.ONVIF_event_callback.return_value = [0] + webhook_id = f"{const.DOMAIN}_{TEST_UID.replace(':', '')}_ONVIF" + client = await hass_client_no_auth() + await client.post(f"/api/webhook/{webhook_id}", data="test_data") + + assert hass.states.get(entity_id).state == STATE_ON diff --git a/tests/components/reolink/test_config_flow.py b/tests/components/reolink/test_config_flow.py index 6e57a7924e7..55dd0d4fea9 100644 --- a/tests/components/reolink/test_config_flow.py +++ b/tests/components/reolink/test_config_flow.py @@ -1,10 +1,10 @@ """Test the Reolink config flow.""" -from datetime import timedelta import json from typing import Any from unittest.mock import AsyncMock, MagicMock, call +from freezegun.api import FrozenDateTimeFactory import pytest from reolink_aio.exceptions import ApiError, CredentialsInvalidError, ReolinkError @@ -25,7 +25,6 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers.device_registry import format_mac -from homeassistant.util.dt import utcnow from .conftest import ( DHCP_FORMATTED_MAC, @@ -439,6 +438,7 @@ async def test_dhcp_flow(hass: HomeAssistant, mock_setup_entry: MagicMock) -> No ) async def test_dhcp_ip_update( hass: HomeAssistant, + freezer: FrozenDateTimeFactory, reolink_connect_class: MagicMock, reolink_connect: MagicMock, last_update_success: bool, @@ -472,9 +472,8 @@ async def test_dhcp_ip_update( if not last_update_success: # ensure the last_update_succes is False for the device_coordinator. reolink_connect.get_states = AsyncMock(side_effect=ReolinkError("Test error")) - async_fire_time_changed( - hass, utcnow() + DEVICE_UPDATE_INTERVAL + timedelta(minutes=1) - ) + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) await hass.async_block_till_done() dhcp_data = dhcp.DhcpServiceInfo( diff --git a/tests/components/reolink/test_host.py b/tests/components/reolink/test_host.py new file mode 100644 index 00000000000..690bfd035f8 --- /dev/null +++ b/tests/components/reolink/test_host.py @@ -0,0 +1,85 @@ +"""Test the Reolink host.""" + +from asyncio import CancelledError +from unittest.mock import AsyncMock, MagicMock + +from aiohttp import ClientResponseError +import pytest + +from homeassistant.components.reolink import const +from homeassistant.components.webhook import async_handle_webhook +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.util.aiohttp import MockRequest + +from .conftest import TEST_UID + +from tests.common import MockConfigEntry +from tests.typing import ClientSessionGenerator + + +async def test_webhook_callback( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + entity_registry: er.EntityRegistry, +) -> None: + """Test webhook callback with motion sensor.""" + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + webhook_id = f"{const.DOMAIN}_{TEST_UID.replace(':', '')}_ONVIF" + + signal_all = MagicMock() + signal_ch = MagicMock() + async_dispatcher_connect(hass, f"{webhook_id}_all", signal_all) + async_dispatcher_connect(hass, f"{webhook_id}_0", signal_ch) + + client = await hass_client_no_auth() + + # test webhook callback success all channels + reolink_connect.ONVIF_event_callback.return_value = None + await client.post(f"/api/webhook/{webhook_id}") + signal_all.assert_called_once() + + # test webhook callback all channels with failure to read motion_state + signal_all.reset_mock() + reolink_connect.get_motion_state_all_ch.return_value = False + await client.post(f"/api/webhook/{webhook_id}") + signal_all.assert_not_called() + + # test webhook callback success single channel + reolink_connect.ONVIF_event_callback.return_value = [0] + await client.post(f"/api/webhook/{webhook_id}", data="test_data") + signal_ch.assert_called_once() + + # test webhook callback single channel with error in event callback + signal_ch.reset_mock() + reolink_connect.ONVIF_event_callback = AsyncMock( + side_effect=Exception("Test error") + ) + await client.post(f"/api/webhook/{webhook_id}", data="test_data") + signal_ch.assert_not_called() + + # test failure to read date from webhook post + request = MockRequest( + method="POST", + content=bytes("test", "utf-8"), + mock_source="test", + ) + request.read = AsyncMock(side_effect=ConnectionResetError("Test error")) + await async_handle_webhook(hass, webhook_id, request) + signal_all.assert_not_called() + + request.read = AsyncMock(side_effect=ClientResponseError("Test error", "Test")) + await async_handle_webhook(hass, webhook_id, request) + signal_all.assert_not_called() + + request.read = AsyncMock(side_effect=CancelledError("Test error")) + with pytest.raises(CancelledError): + await async_handle_webhook(hass, webhook_id, request) + signal_all.assert_not_called() diff --git a/tests/components/reolink/test_init.py b/tests/components/reolink/test_init.py index 1c93114217c..f5cd56a05d2 100644 --- a/tests/components/reolink/test_init.py +++ b/tests/components/reolink/test_init.py @@ -1,10 +1,10 @@ """Test the Reolink init.""" import asyncio -from datetime import timedelta from typing import Any from unittest.mock import AsyncMock, MagicMock, Mock, patch +from freezegun.api import FrozenDateTimeFactory import pytest from reolink_aio.api import Chime from reolink_aio.exceptions import CredentialsInvalidError, ReolinkError @@ -25,7 +25,6 @@ from homeassistant.helpers import ( issue_registry as ir, ) from homeassistant.setup import async_setup_component -from homeassistant.util.dt import utcnow from .conftest import ( TEST_CAM_MODEL, @@ -104,6 +103,7 @@ async def test_failures_parametrized( async def test_firmware_error_twice( hass: HomeAssistant, + freezer: FrozenDateTimeFactory, reolink_connect: MagicMock, config_entry: MockConfigEntry, ) -> None: @@ -112,31 +112,31 @@ async def test_firmware_error_twice( side_effect=ReolinkError("Test error") ) with patch("homeassistant.components.reolink.PLATFORMS", [Platform.UPDATE]): - assert await hass.config_entries.async_setup(config_entry.entry_id) is True + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.LOADED entity_id = f"{Platform.UPDATE}.{TEST_NVR_NAME}_firmware" - assert hass.states.is_state(entity_id, STATE_OFF) + assert hass.states.get(entity_id).state == STATE_OFF - async_fire_time_changed( - hass, utcnow() + FIRMWARE_UPDATE_INTERVAL + timedelta(minutes=1) - ) + freezer.tick(FIRMWARE_UPDATE_INTERVAL) + async_fire_time_changed(hass) await hass.async_block_till_done() - assert hass.states.is_state(entity_id, STATE_UNAVAILABLE) + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE async def test_credential_error_three( hass: HomeAssistant, + freezer: FrozenDateTimeFactory, reolink_connect: MagicMock, config_entry: MockConfigEntry, issue_registry: ir.IssueRegistry, ) -> None: """Test when the update gives credential error 3 times.""" with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SWITCH]): - assert await hass.config_entries.async_setup(config_entry.entry_id) is True + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.LOADED @@ -147,9 +147,8 @@ async def test_credential_error_three( issue_id = f"config_entry_reauth_{const.DOMAIN}_{config_entry.entry_id}" for _ in range(NUM_CRED_ERRORS): assert (HOMEASSISTANT_DOMAIN, issue_id) not in issue_registry.issues - async_fire_time_changed( - hass, utcnow() + DEVICE_UPDATE_INTERVAL + timedelta(seconds=30) - ) + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) await hass.async_block_till_done() assert (HOMEASSISTANT_DOMAIN, issue_id) in issue_registry.issues diff --git a/tests/components/reolink/test_media_source.py b/tests/components/reolink/test_media_source.py index 66ed32ca823..b09c267fcfd 100644 --- a/tests/components/reolink/test_media_source.py +++ b/tests/components/reolink/test_media_source.py @@ -275,7 +275,7 @@ async def test_browsing_rec_playback_unsupported( reolink_connect.api_version.return_value = 0 with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): - assert await hass.config_entries.async_setup(config_entry.entry_id) is True + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() # browse root @@ -296,7 +296,7 @@ async def test_browsing_errors( reolink_connect.api_version.return_value = 1 with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): - assert await hass.config_entries.async_setup(config_entry.entry_id) is True + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() # browse root @@ -315,7 +315,7 @@ async def test_browsing_not_loaded( reolink_connect.api_version.return_value = 1 with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): - assert await hass.config_entries.async_setup(config_entry.entry_id) is True + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() reolink_connect.get_host_data = AsyncMock(side_effect=ReolinkError("Test error")) diff --git a/tests/components/reolink/test_select.py b/tests/components/reolink/test_select.py index 5536797d7d3..5536e85afb9 100644 --- a/tests/components/reolink/test_select.py +++ b/tests/components/reolink/test_select.py @@ -1,8 +1,8 @@ """Test the Reolink select platform.""" -from datetime import timedelta from unittest.mock import AsyncMock, MagicMock, patch +from freezegun.api import FrozenDateTimeFactory import pytest from reolink_aio.api import Chime from reolink_aio.exceptions import InvalidParameterError, ReolinkError @@ -19,7 +19,6 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import entity_registry as er -from homeassistant.util.dt import utcnow from .conftest import TEST_NVR_NAME @@ -28,18 +27,19 @@ from tests.common import MockConfigEntry, async_fire_time_changed async def test_floodlight_mode_select( hass: HomeAssistant, + freezer: FrozenDateTimeFactory, config_entry: MockConfigEntry, reolink_connect: MagicMock, entity_registry: er.EntityRegistry, ) -> None: """Test select entity with floodlight_mode.""" with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SELECT]): - assert await hass.config_entries.async_setup(config_entry.entry_id) is True + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.LOADED entity_id = f"{Platform.SELECT}.{TEST_NVR_NAME}_floodlight_mode" - assert hass.states.is_state(entity_id, "auto") + assert hass.states.get(entity_id).state == "auto" reolink_connect.set_whiteled = AsyncMock() await hass.services.async_call( @@ -71,12 +71,11 @@ async def test_floodlight_mode_select( ) reolink_connect.whiteled_mode.return_value = -99 # invalid value - async_fire_time_changed( - hass, utcnow() + DEVICE_UPDATE_INTERVAL + timedelta(seconds=30) - ) + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) await hass.async_block_till_done() - assert hass.states.is_state(entity_id, STATE_UNKNOWN) + assert hass.states.get(entity_id).state == STATE_UNKNOWN async def test_play_quick_reply_message( @@ -88,12 +87,12 @@ async def test_play_quick_reply_message( """Test select play_quick_reply_message entity.""" reolink_connect.quick_reply_dict.return_value = {0: "off", 1: "test message"} with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SELECT]): - assert await hass.config_entries.async_setup(config_entry.entry_id) is True + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.LOADED entity_id = f"{Platform.SELECT}.{TEST_NVR_NAME}_play_quick_reply_message" - assert hass.states.is_state(entity_id, STATE_UNKNOWN) + assert hass.states.get(entity_id).state == STATE_UNKNOWN reolink_connect.play_quick_reply = AsyncMock() await hass.services.async_call( @@ -107,6 +106,7 @@ async def test_play_quick_reply_message( async def test_chime_select( hass: HomeAssistant, + freezer: FrozenDateTimeFactory, config_entry: MockConfigEntry, reolink_connect: MagicMock, test_chime: Chime, @@ -114,13 +114,13 @@ async def test_chime_select( ) -> None: """Test chime select entity.""" with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SELECT]): - assert await hass.config_entries.async_setup(config_entry.entry_id) is True + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.LOADED entity_id = f"{Platform.SELECT}.test_chime_visitor_ringtone" - assert hass.states.is_state(entity_id, "pianokey") + assert hass.states.get(entity_id).state == "pianokey" test_chime.set_tone = AsyncMock() await hass.services.async_call( @@ -150,9 +150,8 @@ async def test_chime_select( ) test_chime.event_info = {} - async_fire_time_changed( - hass, utcnow() + DEVICE_UPDATE_INTERVAL + timedelta(seconds=30) - ) + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) await hass.async_block_till_done() - assert hass.states.is_state(entity_id, STATE_UNKNOWN) + assert hass.states.get(entity_id).state == STATE_UNKNOWN From 99ab2566c25a3ba40b205302065740907675bd84 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 16 Aug 2024 12:58:05 +0200 Subject: [PATCH 0792/1635] Correct water heater service schemas (#124038) * Correct water heater service schemas * Update tests --- .../components/water_heater/__init__.py | 42 ++++++------------- tests/components/water_heater/test_init.py | 5 ++- 2 files changed, 15 insertions(+), 32 deletions(-) diff --git a/homeassistant/components/water_heater/__init__.py b/homeassistant/components/water_heater/__init__.py index 0b54a4c1aa4..2e749735b0c 100644 --- a/homeassistant/components/water_heater/__init__.py +++ b/homeassistant/components/water_heater/__init__.py @@ -35,7 +35,7 @@ from homeassistant.helpers.deprecation import ( from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.temperature import display_temp as show_temp -from homeassistant.helpers.typing import ConfigType +from homeassistant.helpers.typing import ConfigType, VolDictType from homeassistant.util.unit_conversion import TemperatureConverter from .const import DOMAIN @@ -94,29 +94,17 @@ CONVERTIBLE_ATTRIBUTE = [ATTR_TEMPERATURE] _LOGGER = logging.getLogger(__name__) -ON_OFF_SERVICE_SCHEMA = vol.Schema({vol.Optional(ATTR_ENTITY_ID): cv.comp_entity_ids}) - -SET_AWAY_MODE_SCHEMA = vol.Schema( - { - vol.Optional(ATTR_ENTITY_ID): cv.comp_entity_ids, - vol.Required(ATTR_AWAY_MODE): cv.boolean, - } -) -SET_TEMPERATURE_SCHEMA = vol.Schema( - vol.All( - { - vol.Required(ATTR_TEMPERATURE, "temperature"): vol.Coerce(float), - vol.Optional(ATTR_ENTITY_ID): cv.comp_entity_ids, - vol.Optional(ATTR_OPERATION_MODE): cv.string, - } - ) -) -SET_OPERATION_MODE_SCHEMA = vol.Schema( - { - vol.Optional(ATTR_ENTITY_ID): cv.comp_entity_ids, - vol.Required(ATTR_OPERATION_MODE): cv.string, - } -) +SET_AWAY_MODE_SCHEMA: VolDictType = { + vol.Required(ATTR_AWAY_MODE): cv.boolean, +} +SET_TEMPERATURE_SCHEMA: VolDictType = { + vol.Required(ATTR_TEMPERATURE, "temperature"): vol.Coerce(float), + vol.Optional(ATTR_ENTITY_ID): cv.comp_entity_ids, + vol.Optional(ATTR_OPERATION_MODE): cv.string, +} +SET_OPERATION_MODE_SCHEMA: VolDictType = { + vol.Required(ATTR_OPERATION_MODE): cv.string, +} # mypy: disallow-any-generics @@ -145,12 +133,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: SET_OPERATION_MODE_SCHEMA, "async_handle_set_operation_mode", ) - component.async_register_entity_service( - SERVICE_TURN_OFF, ON_OFF_SERVICE_SCHEMA, "async_turn_off" - ) - component.async_register_entity_service( - SERVICE_TURN_ON, ON_OFF_SERVICE_SCHEMA, "async_turn_on" - ) return True diff --git a/tests/components/water_heater/test_init.py b/tests/components/water_heater/test_init.py index f883cf47b19..4e0f860366c 100644 --- a/tests/components/water_heater/test_init.py +++ b/tests/components/water_heater/test_init.py @@ -22,6 +22,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from tests.common import ( @@ -42,7 +43,7 @@ async def test_set_temp_schema_no_req( """Test the set temperature schema with missing required data.""" domain = "climate" service = "test_set_temperature" - schema = SET_TEMPERATURE_SCHEMA + schema = cv.make_entity_service_schema(SET_TEMPERATURE_SCHEMA) calls = async_mock_service(hass, domain, service, schema) data = {"hvac_mode": "off", "entity_id": ["climate.test_id"]} @@ -59,7 +60,7 @@ async def test_set_temp_schema( """Test the set temperature schema with ok required data.""" domain = "water_heater" service = "test_set_temperature" - schema = SET_TEMPERATURE_SCHEMA + schema = cv.make_entity_service_schema(SET_TEMPERATURE_SCHEMA) calls = async_mock_service(hass, domain, service, schema) data = { From f3e2d0692214f91b0cdc19b5c32e284b993c77e0 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 16 Aug 2024 13:21:39 +0200 Subject: [PATCH 0793/1635] Add missing hass type in tests/scripts (#124042) --- tests/scripts/test_auth.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/scripts/test_auth.py b/tests/scripts/test_auth.py index 002807f08a5..e52a2cc6567 100644 --- a/tests/scripts/test_auth.py +++ b/tests/scripts/test_auth.py @@ -1,5 +1,6 @@ """Test the auth script to manage local users.""" +import argparse from asyncio import AbstractEventLoop from collections.abc import Generator import logging @@ -148,7 +149,9 @@ def test_parsing_args(event_loop: AbstractEventLoop) -> None: """Test we parse args correctly.""" called = False - async def mock_func(hass, provider, args2): + async def mock_func( + hass: HomeAssistant, provider: hass_auth.AuthProvider, args2: argparse.Namespace + ) -> None: """Mock function to be called.""" nonlocal called called = True From 183c191d63d945ddf7b7d0a442696bf81081b413 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Fri, 16 Aug 2024 13:34:14 +0200 Subject: [PATCH 0794/1635] Allow raw mqtt payload to be in mqtt publish action (#123900) * Publish raw rendered mqtt payload as raw for mqtt publish action * Move check out of try block * Only try to eval `bytes` is payload starts with supported string Co-authored-by: Erik Montnemery * Improve docst * Add `evaluate_bytes` option to publish action * Rename to `evaluate_payload` * Update homeassistant/components/mqtt/strings.json Co-authored-by: Erik Montnemery * Extend test to assert literal_eval is called or not --------- Co-authored-by: Erik Montnemery --- homeassistant/components/mqtt/__init__.py | 7 +++ homeassistant/components/mqtt/models.py | 34 +++++------ homeassistant/components/mqtt/services.yaml | 5 ++ homeassistant/components/mqtt/strings.json | 4 ++ tests/components/mqtt/test_init.py | 68 +++++++++++++++++++++ 5 files changed, 101 insertions(+), 17 deletions(-) diff --git a/homeassistant/components/mqtt/__init__.py b/homeassistant/components/mqtt/__init__.py index 013bd26e49c..b2adb7665fc 100644 --- a/homeassistant/components/mqtt/__init__.py +++ b/homeassistant/components/mqtt/__init__.py @@ -90,6 +90,7 @@ from .models import ( # noqa: F401 PublishPayloadType, ReceiveMessage, ReceivePayloadType, + convert_outgoing_mqtt_payload, ) from .subscription import ( # noqa: F401 EntitySubscription, @@ -115,6 +116,7 @@ SERVICE_DUMP = "dump" ATTR_TOPIC_TEMPLATE = "topic_template" ATTR_PAYLOAD_TEMPLATE = "payload_template" +ATTR_EVALUATE_PAYLOAD = "evaluate_payload" MAX_RECONNECT_WAIT = 300 # seconds @@ -166,6 +168,7 @@ MQTT_PUBLISH_SCHEMA = vol.All( vol.Exclusive(ATTR_TOPIC_TEMPLATE, CONF_TOPIC): cv.string, vol.Exclusive(ATTR_PAYLOAD, CONF_PAYLOAD): cv.string, vol.Exclusive(ATTR_PAYLOAD_TEMPLATE, CONF_PAYLOAD): cv.string, + vol.Optional(ATTR_EVALUATE_PAYLOAD): cv.boolean, vol.Optional(ATTR_QOS, default=DEFAULT_QOS): valid_qos_schema, vol.Optional(ATTR_RETAIN, default=DEFAULT_RETAIN): cv.boolean, }, @@ -295,6 +298,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: msg_topic: str | None = call.data.get(ATTR_TOPIC) msg_topic_template: str | None = call.data.get(ATTR_TOPIC_TEMPLATE) payload: PublishPayloadType = call.data.get(ATTR_PAYLOAD) + evaluate_payload: bool = call.data.get(ATTR_EVALUATE_PAYLOAD, False) payload_template: str | None = call.data.get(ATTR_PAYLOAD_TEMPLATE) qos: int = call.data[ATTR_QOS] retain: bool = call.data[ATTR_RETAIN] @@ -354,6 +358,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: payload = MqttCommandTemplate( template.Template(payload_template, hass) ).async_render() + elif evaluate_payload: + # Convert quoted binary literal to raw data + payload = convert_outgoing_mqtt_payload(payload) if TYPE_CHECKING: assert msg_topic is not None diff --git a/homeassistant/components/mqtt/models.py b/homeassistant/components/mqtt/models.py index f2b3165f66c..f7abbc29464 100644 --- a/homeassistant/components/mqtt/models.py +++ b/homeassistant/components/mqtt/models.py @@ -51,6 +51,22 @@ ATTR_THIS = "this" type PublishPayloadType = str | bytes | int | float | None +def convert_outgoing_mqtt_payload( + payload: PublishPayloadType, +) -> PublishPayloadType: + """Ensure correct raw MQTT payload is passed as bytes for publishing.""" + if isinstance(payload, str) and payload.startswith(("b'", 'b"')): + try: + native_object = literal_eval(payload) + except (ValueError, TypeError, SyntaxError, MemoryError): + pass + else: + if isinstance(native_object, bytes): + return native_object + + return payload + + @dataclass class PublishMessage: """MQTT Message for publishing.""" @@ -173,22 +189,6 @@ class MqttCommandTemplate: variables: TemplateVarsType = None, ) -> PublishPayloadType: """Render or convert the command template with given value or variables.""" - - def _convert_outgoing_payload( - payload: PublishPayloadType, - ) -> PublishPayloadType: - """Ensure correct raw MQTT payload is passed as bytes for publishing.""" - if isinstance(payload, str): - try: - native_object = literal_eval(payload) - if isinstance(native_object, bytes): - return native_object - - except (ValueError, TypeError, SyntaxError, MemoryError): - pass - - return payload - if self._command_template is None: return value @@ -210,7 +210,7 @@ class MqttCommandTemplate: self._command_template, ) try: - return _convert_outgoing_payload( + return convert_outgoing_mqtt_payload( self._command_template.async_render(values, parse_result=False) ) except TemplateError as exc: diff --git a/homeassistant/components/mqtt/services.yaml b/homeassistant/components/mqtt/services.yaml index ee5e4ff56e8..c5e4f372bd6 100644 --- a/homeassistant/components/mqtt/services.yaml +++ b/homeassistant/components/mqtt/services.yaml @@ -12,6 +12,11 @@ publish: example: "The temperature is {{ states('sensor.temperature') }}" selector: template: + evaluate_payload: + advanced: true + default: false + selector: + boolean: qos: advanced: true default: 0 diff --git a/homeassistant/components/mqtt/strings.json b/homeassistant/components/mqtt/strings.json index 93131376154..c786d7e08a1 100644 --- a/homeassistant/components/mqtt/strings.json +++ b/homeassistant/components/mqtt/strings.json @@ -230,6 +230,10 @@ "name": "Publish", "description": "Publishes a message to an MQTT topic.", "fields": { + "evaluate_payload": { + "name": "Evaluate payload", + "description": "When `payload` is a Python bytes literal, evaluate the bytes literal and publish the raw data." + }, "topic": { "name": "Topic", "description": "Topic to publish to." diff --git a/tests/components/mqtt/test_init.py b/tests/components/mqtt/test_init.py index f495c5ca585..333960d8ad4 100644 --- a/tests/components/mqtt/test_init.py +++ b/tests/components/mqtt/test_init.py @@ -420,6 +420,74 @@ async def test_mqtt_publish_action_call_with_template_payload_renders_template( mqtt_mock.reset_mock() +@pytest.mark.parametrize( + ("attr_payload", "payload", "evaluate_payload", "literal_eval_calls"), + [ + ("b'\\xde\\xad\\xbe\\xef'", b"\xde\xad\xbe\xef", True, 1), + ("b'\\xde\\xad\\xbe\\xef'", "b'\\xde\\xad\\xbe\\xef'", False, 0), + ("DEADBEEF", "DEADBEEF", False, 0), + ( + "b'\\xde", + "b'\\xde", + True, + 1, + ), # Bytes literal is invalid, fall back to string + ], +) +async def test_mqtt_publish_action_call_with_raw_data( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + attr_payload: str, + payload: str | bytes, + evaluate_payload: bool, + literal_eval_calls: int, +) -> None: + """Test the mqtt publish action call raw data. + + When `payload` represents a `bytes` object, it should be published + as raw data if `evaluate_payload` is set. + """ + mqtt_mock = await mqtt_mock_entry() + await hass.services.async_call( + mqtt.DOMAIN, + mqtt.SERVICE_PUBLISH, + { + mqtt.ATTR_TOPIC: "test/topic", + mqtt.ATTR_PAYLOAD: attr_payload, + mqtt.ATTR_EVALUATE_PAYLOAD: evaluate_payload, + }, + blocking=True, + ) + assert mqtt_mock.async_publish.called + assert mqtt_mock.async_publish.call_args[0][1] == payload + + with patch( + "homeassistant.components.mqtt.models.literal_eval" + ) as literal_eval_mock: + await hass.services.async_call( + mqtt.DOMAIN, + mqtt.SERVICE_PUBLISH, + { + mqtt.ATTR_TOPIC: "test/topic", + mqtt.ATTR_PAYLOAD: attr_payload, + }, + blocking=True, + ) + literal_eval_mock.assert_not_called() + + await hass.services.async_call( + mqtt.DOMAIN, + mqtt.SERVICE_PUBLISH, + { + mqtt.ATTR_TOPIC: "test/topic", + mqtt.ATTR_PAYLOAD: attr_payload, + mqtt.ATTR_EVALUATE_PAYLOAD: evaluate_payload, + }, + blocking=True, + ) + assert len(literal_eval_mock.mock_calls) == literal_eval_calls + + # The use of a payload_template in an mqtt publish action call # has been deprecated with HA Core 2024.8.0 and will be removed with HA Core 2025.2.0 async def test_publish_action_call_with_bad_payload_template( From 799e95c1bdfa77ff80b23bfc6e3cdd39d14a7ec9 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 16 Aug 2024 13:39:25 +0200 Subject: [PATCH 0795/1635] Do sanity check in EntityComponent.async_register_entity_service schema (#124029) * Do sanity check in EntityComponent.async_register_entity_service schema * Improve test --- homeassistant/helpers/entity_component.py | 15 +++++++++++++ tests/helpers/test_entity_component.py | 27 +++++++++++++++++++++++ 2 files changed, 42 insertions(+) diff --git a/homeassistant/helpers/entity_component.py b/homeassistant/helpers/entity_component.py index c8bcda0eef2..826693de133 100644 --- a/homeassistant/helpers/entity_component.py +++ b/homeassistant/helpers/entity_component.py @@ -11,6 +11,7 @@ from types import ModuleType from typing import Any, Generic from typing_extensions import TypeVar +import voluptuous as vol from homeassistant import config as conf_util from homeassistant.config_entries import ConfigEntry @@ -266,6 +267,20 @@ class EntityComponent(Generic[_EntityT]): """Register an entity service.""" if schema is None or isinstance(schema, dict): schema = cv.make_entity_service_schema(schema) + # Do a sanity check to check this is a valid entity service schema, + # the check could be extended to require All/Any to have sub schema(s) + # with all entity service fields + elif ( + # Don't check All/Any + not isinstance(schema, (vol.All, vol.Any)) + # Don't check All/Any wrapped in schema + and not isinstance(schema.schema, (vol.All, vol.Any)) + and any(key not in schema.schema for key in cv.ENTITY_SERVICE_FIELDS) + ): + raise HomeAssistantError( + "The schema does not include all required keys: " + f"{", ".join(str(key) for key in cv.ENTITY_SERVICE_FIELDS)}" + ) service_func: str | HassJob[..., Any] service_func = func if isinstance(func, str) else HassJob(func) diff --git a/tests/helpers/test_entity_component.py b/tests/helpers/test_entity_component.py index 0c09c9d75f7..230944e6a96 100644 --- a/tests/helpers/test_entity_component.py +++ b/tests/helpers/test_entity_component.py @@ -556,6 +556,33 @@ async def test_register_entity_service( assert len(calls) == 2 +async def test_register_entity_service_non_entity_service_schema( + hass: HomeAssistant, +) -> None: + """Test attempting to register a service with an incomplete schema.""" + component = EntityComponent(_LOGGER, DOMAIN, hass) + + with pytest.raises( + HomeAssistantError, + match=( + "The schema does not include all required keys: entity_id, device_id, area_id, " + "floor_id, label_id" + ), + ): + component.async_register_entity_service( + "hello", vol.Schema({"some": str}), Mock() + ) + + # The check currently does not recurse into vol.All or vol.Any allowing these + # non-compliatn schemas to pass + component.async_register_entity_service( + "hello", vol.All(vol.Schema({"some": str})), Mock() + ) + component.async_register_entity_service( + "hello", vol.Any(vol.Schema({"some": str})), Mock() + ) + + async def test_register_entity_service_response_data(hass: HomeAssistant) -> None: """Test an entity service that does support response data.""" entity = MockEntity(entity_id=f"{DOMAIN}.entity") From ea52acd7bd3611e35a2dbe7c0d49fc1f6b4dc51e Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Fri, 16 Aug 2024 13:43:02 +0200 Subject: [PATCH 0796/1635] Fix loading KNX integration actions when not using YAML (#124027) * Fix loading KNX integration services when not using YAML * remove unnecessary comment * Remove unreachable test --- homeassistant/components/knx/__init__.py | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/knx/__init__.py b/homeassistant/components/knx/__init__.py index fd46cad8489..a401ee2ccac 100644 --- a/homeassistant/components/knx/__init__.py +++ b/homeassistant/components/knx/__init__.py @@ -147,18 +147,10 @@ CONFIG_SCHEMA = vol.Schema( async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Start the KNX integration.""" hass.data[DATA_HASS_CONFIG] = config - conf: ConfigType | None = config.get(DOMAIN) - - if conf is None: - # If we have a config entry, setup is done by that config entry. - # If there is no config entry, this should fail. - return bool(hass.config_entries.async_entries(DOMAIN)) - - conf = dict(conf) - hass.data[DATA_KNX_CONFIG] = conf + if (conf := config.get(DOMAIN)) is not None: + hass.data[DATA_KNX_CONFIG] = dict(conf) register_knx_services(hass) - return True From 0a846cfca88cca1a8607a499733674c58172b0bd Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 16 Aug 2024 13:43:43 +0200 Subject: [PATCH 0797/1635] Add missing hass type in tests/test_util (#124043) --- tests/test_util/aiohttp.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/test_util/aiohttp.py b/tests/test_util/aiohttp.py index de1db0e4847..04d6db509e0 100644 --- a/tests/test_util/aiohttp.py +++ b/tests/test_util/aiohttp.py @@ -5,6 +5,7 @@ from collections.abc import Iterator from contextlib import contextmanager from http import HTTPStatus import re +from typing import Any from unittest import mock from urllib.parse import parse_qs @@ -19,6 +20,7 @@ from multidict import CIMultiDict from yarl import URL from homeassistant.const import EVENT_HOMEASSISTANT_CLOSE +from homeassistant.core import HomeAssistant from homeassistant.helpers.json import json_dumps from homeassistant.util.json import json_loads @@ -301,7 +303,7 @@ def mock_aiohttp_client() -> Iterator[AiohttpClientMocker]: """Context manager to mock aiohttp client.""" mocker = AiohttpClientMocker() - def create_session(hass, *args, **kwargs): + def create_session(hass: HomeAssistant, *args: Any, **kwargs: Any) -> ClientSession: session = mocker.create_session(hass.loop) async def close_session(event): From 4cc4ec44b03fb7729d1ebf1576634b73cb51ff8e Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 16 Aug 2024 13:50:02 +0200 Subject: [PATCH 0798/1635] Exclude aiohappyeyeballs from license check (#124041) --- script/licenses.py | 1 + 1 file changed, 1 insertion(+) diff --git a/script/licenses.py b/script/licenses.py index 9d7da912398..0663821ed2c 100644 --- a/script/licenses.py +++ b/script/licenses.py @@ -125,6 +125,7 @@ EXCEPTIONS = { "PyXiaomiGateway", # https://github.com/Danielhiversen/PyXiaomiGateway/pull/201 "aiocomelit", # https://github.com/chemelli74/aiocomelit/pull/138 "aioecowitt", # https://github.com/home-assistant-libs/aioecowitt/pull/180 + "aiohappyeyeballs", # Python-2.0.1 "aioopenexchangerates", # https://github.com/MartinHjelmare/aioopenexchangerates/pull/94 "aiooui", # https://github.com/Bluetooth-Devices/aiooui/pull/8 "aioruuvigateway", # https://github.com/akx/aioruuvigateway/pull/6 From 738cc5095d47c7559bb55879b6664a571c013037 Mon Sep 17 00:00:00 2001 From: Sid <27780930+autinerd@users.noreply.github.com> Date: Fri, 16 Aug 2024 13:53:11 +0200 Subject: [PATCH 0799/1635] Bump openwebifpy to 4.2.7 (#123995) * Bump openwebifpy to 4.2.6 * Bump openwebifpy to 4.2.7 --------- Co-authored-by: J. Nick Koston --- homeassistant/components/enigma2/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/enigma2/manifest.json b/homeassistant/components/enigma2/manifest.json index 538cfb56388..1a0875b04c0 100644 --- a/homeassistant/components/enigma2/manifest.json +++ b/homeassistant/components/enigma2/manifest.json @@ -7,5 +7,5 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["openwebif"], - "requirements": ["openwebifpy==4.2.5"] + "requirements": ["openwebifpy==4.2.7"] } diff --git a/requirements_all.txt b/requirements_all.txt index f968b8addf5..dbd9aae5fda 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1505,7 +1505,7 @@ openhomedevice==2.2.0 opensensemap-api==0.2.0 # homeassistant.components.enigma2 -openwebifpy==4.2.5 +openwebifpy==4.2.7 # homeassistant.components.luci openwrt-luci-rpc==1.1.17 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 7302bbe5bab..8cf132dd9a7 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1241,7 +1241,7 @@ openerz-api==0.3.0 openhomedevice==2.2.0 # homeassistant.components.enigma2 -openwebifpy==4.2.5 +openwebifpy==4.2.7 # homeassistant.components.opower opower==0.6.0 From 69943af68ac842ba3e90371c4787c8a076fae545 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 16 Aug 2024 14:06:35 +0200 Subject: [PATCH 0800/1635] Deduplicate async_register_entity_service (#124045) --- homeassistant/helpers/entity_component.py | 39 +++------------ homeassistant/helpers/entity_platform.py | 43 ++++------------- homeassistant/helpers/service.py | 58 ++++++++++++++++++++++- 3 files changed, 72 insertions(+), 68 deletions(-) diff --git a/homeassistant/helpers/entity_component.py b/homeassistant/helpers/entity_component.py index 826693de133..76abb3020d1 100644 --- a/homeassistant/helpers/entity_component.py +++ b/homeassistant/helpers/entity_component.py @@ -5,13 +5,11 @@ from __future__ import annotations import asyncio from collections.abc import Callable, Iterable from datetime import timedelta -from functools import partial import logging from types import ModuleType from typing import Any, Generic from typing_extensions import TypeVar -import voluptuous as vol from homeassistant import config as conf_util from homeassistant.config_entries import ConfigEntry @@ -265,39 +263,16 @@ class EntityComponent(Generic[_EntityT]): supports_response: SupportsResponse = SupportsResponse.NONE, ) -> None: """Register an entity service.""" - if schema is None or isinstance(schema, dict): - schema = cv.make_entity_service_schema(schema) - # Do a sanity check to check this is a valid entity service schema, - # the check could be extended to require All/Any to have sub schema(s) - # with all entity service fields - elif ( - # Don't check All/Any - not isinstance(schema, (vol.All, vol.Any)) - # Don't check All/Any wrapped in schema - and not isinstance(schema.schema, (vol.All, vol.Any)) - and any(key not in schema.schema for key in cv.ENTITY_SERVICE_FIELDS) - ): - raise HomeAssistantError( - "The schema does not include all required keys: " - f"{", ".join(str(key) for key in cv.ENTITY_SERVICE_FIELDS)}" - ) - - service_func: str | HassJob[..., Any] - service_func = func if isinstance(func, str) else HassJob(func) - - self.hass.services.async_register( + service.async_register_entity_service( + self.hass, self.domain, name, - partial( - service.entity_service_call, - self.hass, - self._entities, - service_func, - required_features=required_features, - ), - schema, - supports_response, + entities=self._entities, + func=func, job_type=HassJobType.Coroutinefunction, + required_features=required_features, + schema=schema, + supports_response=supports_response, ) async def async_setup_platform( diff --git a/homeassistant/helpers/entity_platform.py b/homeassistant/helpers/entity_platform.py index ec177fbf316..ce107d63b73 100644 --- a/homeassistant/helpers/entity_platform.py +++ b/homeassistant/helpers/entity_platform.py @@ -6,12 +6,9 @@ import asyncio from collections.abc import Awaitable, Callable, Coroutine, Iterable from contextvars import ContextVar from datetime import timedelta -from functools import partial from logging import Logger, getLogger from typing import TYPE_CHECKING, Any, Protocol -import voluptuous as vol - from homeassistant import config_entries from homeassistant.const import ( ATTR_RESTORED, @@ -22,7 +19,6 @@ from homeassistant.core import ( CALLBACK_TYPE, DOMAIN as HOMEASSISTANT_DOMAIN, CoreState, - HassJob, HomeAssistant, ServiceCall, SupportsResponse, @@ -43,7 +39,6 @@ from homeassistant.util.async_ import create_eager_task from homeassistant.util.hass_dict import HassKey from . import ( - config_validation as cv, device_registry as dev_reg, entity_registry as ent_reg, service, @@ -999,38 +994,16 @@ class EntityPlatform: if self.hass.services.has_service(self.platform_name, name): return - if schema is None or isinstance(schema, dict): - schema = cv.make_entity_service_schema(schema) - # Do a sanity check to check this is a valid entity service schema, - # the check could be extended to require All/Any to have sub schema(s) - # with all entity service fields - elif ( - # Don't check All/Any - not isinstance(schema, (vol.All, vol.Any)) - # Don't check All/Any wrapped in schema - and not isinstance(schema.schema, (vol.All, vol.Any)) - and any(key not in schema.schema for key in cv.ENTITY_SERVICE_FIELDS) - ): - raise HomeAssistantError( - "The schema does not include all required keys: " - f"{", ".join(str(key) for key in cv.ENTITY_SERVICE_FIELDS)}" - ) - - service_func: str | HassJob[..., Any] - service_func = func if isinstance(func, str) else HassJob(func) - - self.hass.services.async_register( + service.async_register_entity_service( + self.hass, self.platform_name, name, - partial( - service.entity_service_call, - self.hass, - self.domain_platform_entities, - service_func, - required_features=required_features, - ), - schema, - supports_response, + entities=self.domain_platform_entities, + func=func, + job_type=None, + required_features=required_features, + schema=schema, + supports_response=supports_response, ) async def _async_update_entity_states(self) -> None: diff --git a/homeassistant/helpers/service.py b/homeassistant/helpers/service.py index be4974906bb..0551b5289c5 100644 --- a/homeassistant/helpers/service.py +++ b/homeassistant/helpers/service.py @@ -33,6 +33,7 @@ from homeassistant.core import ( Context, EntityServiceResponse, HassJob, + HassJobType, HomeAssistant, ServiceCall, ServiceResponse, @@ -63,7 +64,7 @@ from . import ( ) from .group import expand_entity_ids from .selector import TargetSelector -from .typing import ConfigType, TemplateVarsType, VolSchemaType +from .typing import ConfigType, TemplateVarsType, VolDictType, VolSchemaType if TYPE_CHECKING: from .entity import Entity @@ -1240,3 +1241,58 @@ class ReloadServiceHelper[_T]: self._service_running = False self._pending_reload_targets -= reload_targets self._service_condition.notify_all() + + +@callback +def async_register_entity_service( + hass: HomeAssistant, + domain: str, + name: str, + *, + entities: dict[str, Entity], + func: str | Callable[..., Any], + job_type: HassJobType | None, + required_features: Iterable[int] | None = None, + schema: VolDictType | VolSchemaType | None, + supports_response: SupportsResponse = SupportsResponse.NONE, +) -> None: + """Help registering an entity service. + + This is called by EntityComponent.async_register_entity_service and + EntityPlatform.async_register_entity_service and should not be called + directly by integrations. + """ + if schema is None or isinstance(schema, dict): + schema = cv.make_entity_service_schema(schema) + # Do a sanity check to check this is a valid entity service schema, + # the check could be extended to require All/Any to have sub schema(s) + # with all entity service fields + elif ( + # Don't check All/Any + not isinstance(schema, (vol.All, vol.Any)) + # Don't check All/Any wrapped in schema + and not isinstance(schema.schema, (vol.All, vol.Any)) + and any(key not in schema.schema for key in cv.ENTITY_SERVICE_FIELDS) + ): + raise HomeAssistantError( + "The schema does not include all required keys: " + f"{", ".join(str(key) for key in cv.ENTITY_SERVICE_FIELDS)}" + ) + + service_func: str | HassJob[..., Any] + service_func = func if isinstance(func, str) else HassJob(func) + + hass.services.async_register( + domain, + name, + partial( + entity_service_call, + hass, + entities, + service_func, + required_features=required_features, + ), + schema, + supports_response, + job_type=job_type, + ) From f1b7847d1c1a6c72df564f2379a6332b93a75b06 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 16 Aug 2024 14:09:09 +0200 Subject: [PATCH 0801/1635] Simplify cv._make_entity_service_schema (#124046) --- homeassistant/helpers/config_validation.py | 28 ++++++++++------------ 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/homeassistant/helpers/config_validation.py b/homeassistant/helpers/config_validation.py index ed3eca6e316..6e9a6d5a69d 100644 --- a/homeassistant/helpers/config_validation.py +++ b/homeassistant/helpers/config_validation.py @@ -1253,21 +1253,19 @@ TARGET_SERVICE_FIELDS = { _HAS_ENTITY_SERVICE_FIELD = has_at_least_one_key(*ENTITY_SERVICE_FIELDS) -def _make_entity_service_schema(schema: dict, extra: int) -> vol.Schema: +def _make_entity_service_schema(schema: dict, extra: int) -> VolSchemaType: """Create an entity service schema.""" - return vol.Schema( - vol.All( - vol.Schema( - { - # The frontend stores data here. Don't use in core. - vol.Remove("metadata"): dict, - **schema, - **ENTITY_SERVICE_FIELDS, - }, - extra=extra, - ), - _HAS_ENTITY_SERVICE_FIELD, - ) + return vol.All( + vol.Schema( + { + # The frontend stores data here. Don't use in core. + vol.Remove("metadata"): dict, + **schema, + **ENTITY_SERVICE_FIELDS, + }, + extra=extra, + ), + _HAS_ENTITY_SERVICE_FIELD, ) @@ -1276,7 +1274,7 @@ BASE_ENTITY_SCHEMA = _make_entity_service_schema({}, vol.PREVENT_EXTRA) def make_entity_service_schema( schema: dict | None, *, extra: int = vol.PREVENT_EXTRA -) -> vol.Schema: +) -> VolSchemaType: """Create an entity service schema.""" if not schema and extra == vol.PREVENT_EXTRA: # If the schema is empty and we don't allow extra keys, we can return From c717e7a6f6ab11370eebe040a43ff2bdeb431964 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 16 Aug 2024 07:12:17 -0500 Subject: [PATCH 0802/1635] Bump bluetooth-adapters to 0.19.4 (#124018) Fixes a call to enumerate USB devices that did blocking I/O --- homeassistant/components/bluetooth/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/bluetooth/manifest.json b/homeassistant/components/bluetooth/manifest.json index 95d2b171c9f..657209cdba0 100644 --- a/homeassistant/components/bluetooth/manifest.json +++ b/homeassistant/components/bluetooth/manifest.json @@ -16,7 +16,7 @@ "requirements": [ "bleak==0.22.2", "bleak-retry-connector==3.5.0", - "bluetooth-adapters==0.19.3", + "bluetooth-adapters==0.19.4", "bluetooth-auto-recovery==1.4.2", "bluetooth-data-tools==1.19.4", "dbus-fast==2.22.1", diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 8cad4d2037a..900d4510c17 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -16,7 +16,7 @@ awesomeversion==24.6.0 bcrypt==4.1.3 bleak-retry-connector==3.5.0 bleak==0.22.2 -bluetooth-adapters==0.19.3 +bluetooth-adapters==0.19.4 bluetooth-auto-recovery==1.4.2 bluetooth-data-tools==1.19.4 cached_ipaddress==0.3.0 diff --git a/requirements_all.txt b/requirements_all.txt index dbd9aae5fda..3e12a7c3c52 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -591,7 +591,7 @@ bluemaestro-ble==0.2.3 # bluepy==1.3.0 # homeassistant.components.bluetooth -bluetooth-adapters==0.19.3 +bluetooth-adapters==0.19.4 # homeassistant.components.bluetooth bluetooth-auto-recovery==1.4.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 8cf132dd9a7..a2fd97338f2 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -515,7 +515,7 @@ bluecurrent-api==1.2.3 bluemaestro-ble==0.2.3 # homeassistant.components.bluetooth -bluetooth-adapters==0.19.3 +bluetooth-adapters==0.19.4 # homeassistant.components.bluetooth bluetooth-auto-recovery==1.4.2 From 14a3217d7e3b871b492184512d4e9b4b01826203 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 16 Aug 2024 15:08:37 +0200 Subject: [PATCH 0803/1635] Improve entity platform tests (#124051) --- tests/helpers/test_entity_component.py | 2 +- tests/helpers/test_entity_platform.py | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/tests/helpers/test_entity_component.py b/tests/helpers/test_entity_component.py index 230944e6a96..5ce0292c2ec 100644 --- a/tests/helpers/test_entity_component.py +++ b/tests/helpers/test_entity_component.py @@ -574,7 +574,7 @@ async def test_register_entity_service_non_entity_service_schema( ) # The check currently does not recurse into vol.All or vol.Any allowing these - # non-compliatn schemas to pass + # non-compliant schemas to pass component.async_register_entity_service( "hello", vol.All(vol.Schema({"some": str})), Mock() ) diff --git a/tests/helpers/test_entity_platform.py b/tests/helpers/test_entity_platform.py index 50180ecd844..fcbc825bbdc 100644 --- a/tests/helpers/test_entity_platform.py +++ b/tests/helpers/test_entity_platform.py @@ -1809,6 +1809,14 @@ async def test_register_entity_service_non_entity_service_schema( vol.Schema({"some": str}), Mock(), ) + # The check currently does not recurse into vol.All or vol.Any allowing these + # non-compliant schemas to pass + entity_platform.async_register_entity_service( + "hello", vol.All(vol.Schema({"some": str})), Mock() + ) + entity_platform.async_register_entity_service( + "hello", vol.Any(vol.Schema({"some": str})), Mock() + ) @pytest.mark.parametrize("update_before_add", [True, False]) From cb8a6af12d189eb97cd5f23c9c6f383759e831de Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 16 Aug 2024 09:03:24 -0500 Subject: [PATCH 0804/1635] Add additional blocking operations to loop protection (#124017) --- homeassistant/block_async_io.py | 55 +++++++++++++++++++++ tests/test_block_async_io.py | 88 +++++++++++++++++++++++++++++++++ 2 files changed, 143 insertions(+) diff --git a/homeassistant/block_async_io.py b/homeassistant/block_async_io.py index 6ea0925574e..7a68b2515e9 100644 --- a/homeassistant/block_async_io.py +++ b/homeassistant/block_async_io.py @@ -8,6 +8,7 @@ import glob from http.client import HTTPConnection import importlib import os +from pathlib import Path from ssl import SSLContext import sys import threading @@ -162,6 +163,60 @@ _BLOCKING_CALLS: tuple[BlockingCall, ...] = ( strict_core=False, skip_for_tests=True, ), + BlockingCall( + original_func=SSLContext.load_cert_chain, + object=SSLContext, + function="load_cert_chain", + check_allowed=None, + strict=False, + strict_core=False, + skip_for_tests=True, + ), + BlockingCall( + original_func=Path.open, + object=Path, + function="open", + check_allowed=_check_file_allowed, + strict=False, + strict_core=False, + skip_for_tests=True, + ), + BlockingCall( + original_func=Path.read_text, + object=Path, + function="read_text", + check_allowed=_check_file_allowed, + strict=False, + strict_core=False, + skip_for_tests=True, + ), + BlockingCall( + original_func=Path.read_bytes, + object=Path, + function="read_bytes", + check_allowed=_check_file_allowed, + strict=False, + strict_core=False, + skip_for_tests=True, + ), + BlockingCall( + original_func=Path.write_text, + object=Path, + function="write_text", + check_allowed=_check_file_allowed, + strict=False, + strict_core=False, + skip_for_tests=True, + ), + BlockingCall( + original_func=Path.write_bytes, + object=Path, + function="write_bytes", + check_allowed=_check_file_allowed, + strict=False, + strict_core=False, + skip_for_tests=True, + ), ) diff --git a/tests/test_block_async_io.py b/tests/test_block_async_io.py index 78b8711310b..dc2b096f595 100644 --- a/tests/test_block_async_io.py +++ b/tests/test_block_async_io.py @@ -218,6 +218,17 @@ async def test_protect_loop_open(caplog: pytest.LogCaptureFixture) -> None: assert "Detected blocking call to open with args" not in caplog.text +async def test_protect_loop_path_open(caplog: pytest.LogCaptureFixture) -> None: + """Test opening a file in /proc is not reported.""" + block_async_io.enable() + with ( + contextlib.suppress(FileNotFoundError), + Path("/proc/does_not_exist").open(encoding="utf8"), # noqa: ASYNC230 + ): + pass + assert "Detected blocking call to open with args" not in caplog.text + + async def test_protect_open(caplog: pytest.LogCaptureFixture) -> None: """Test opening a file in the event loop logs.""" with patch.object(block_async_io, "_IN_TESTS", False): @@ -231,6 +242,71 @@ async def test_protect_open(caplog: pytest.LogCaptureFixture) -> None: assert "Detected blocking call to open with args" in caplog.text +async def test_protect_path_open(caplog: pytest.LogCaptureFixture) -> None: + """Test opening a file in the event loop logs.""" + with patch.object(block_async_io, "_IN_TESTS", False): + block_async_io.enable() + with ( + contextlib.suppress(FileNotFoundError), + Path("/config/data_not_exist").open(encoding="utf8"), # noqa: ASYNC230 + ): + pass + + assert "Detected blocking call to open with args" in caplog.text + + +async def test_protect_path_read_bytes(caplog: pytest.LogCaptureFixture) -> None: + """Test reading file bytes in the event loop logs.""" + with patch.object(block_async_io, "_IN_TESTS", False): + block_async_io.enable() + with ( + contextlib.suppress(FileNotFoundError), + Path("/config/data_not_exist").read_bytes(), # noqa: ASYNC230 + ): + pass + + assert "Detected blocking call to read_bytes with args" in caplog.text + + +async def test_protect_path_read_text(caplog: pytest.LogCaptureFixture) -> None: + """Test reading a file text in the event loop logs.""" + with patch.object(block_async_io, "_IN_TESTS", False): + block_async_io.enable() + with ( + contextlib.suppress(FileNotFoundError), + Path("/config/data_not_exist").read_text(encoding="utf8"), # noqa: ASYNC230 + ): + pass + + assert "Detected blocking call to read_text with args" in caplog.text + + +async def test_protect_path_write_bytes(caplog: pytest.LogCaptureFixture) -> None: + """Test writing file bytes in the event loop logs.""" + with patch.object(block_async_io, "_IN_TESTS", False): + block_async_io.enable() + with ( + contextlib.suppress(FileNotFoundError), + Path("/config/data/not/exist").write_bytes(b"xxx"), # noqa: ASYNC230 + ): + pass + + assert "Detected blocking call to write_bytes with args" in caplog.text + + +async def test_protect_path_write_text(caplog: pytest.LogCaptureFixture) -> None: + """Test writing file text in the event loop logs.""" + with patch.object(block_async_io, "_IN_TESTS", False): + block_async_io.enable() + with ( + contextlib.suppress(FileNotFoundError), + Path("/config/data/not/exist").write_text("xxx", encoding="utf8"), # noqa: ASYNC230 + ): + pass + + assert "Detected blocking call to write_text with args" in caplog.text + + async def test_enable_multiple_times(caplog: pytest.LogCaptureFixture) -> None: """Test trying to enable multiple times.""" with patch.object(block_async_io, "_IN_TESTS", False): @@ -354,6 +430,18 @@ async def test_protect_loop_load_verify_locations( assert "Detected blocking call to load_verify_locations" in caplog.text +async def test_protect_loop_load_cert_chain( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test SSLContext.load_cert_chain calls in the loop are logged.""" + with patch.object(block_async_io, "_IN_TESTS", False): + block_async_io.enable() + context = ssl.create_default_context() + with pytest.raises(OSError): + context.load_cert_chain("/dev/null") + assert "Detected blocking call to load_cert_chain" in caplog.text + + async def test_open_calls_ignored_in_tests(caplog: pytest.LogCaptureFixture) -> None: """Test opening a file in tests is ignored.""" assert block_async_io._IN_TESTS From ea4443f79e0470651ea46207b473e0e63d521c99 Mon Sep 17 00:00:00 2001 From: "Mr. Bubbles" Date: Fri, 16 Aug 2024 16:12:15 +0200 Subject: [PATCH 0805/1635] Add statistics import to Ista EcoTrend integration (#118788) * Add statistics import to Ista EcoTrend integration * Use decorator for fixtures * define recorder as after_dependency * Increase test coverage * remember initial statistic_id * fix type checking --- .../components/ista_ecotrend/manifest.json | 1 + .../components/ista_ecotrend/sensor.py | 98 ++- tests/components/ista_ecotrend/conftest.py | 49 ++ .../snapshots/test_statistics.ambr | 609 ++++++++++++++++++ .../ista_ecotrend/test_statistics.py | 82 +++ 5 files changed, 837 insertions(+), 2 deletions(-) create mode 100644 tests/components/ista_ecotrend/snapshots/test_statistics.ambr create mode 100644 tests/components/ista_ecotrend/test_statistics.py diff --git a/homeassistant/components/ista_ecotrend/manifest.json b/homeassistant/components/ista_ecotrend/manifest.json index 23d60a0a5bb..baa5fbde9c0 100644 --- a/homeassistant/components/ista_ecotrend/manifest.json +++ b/homeassistant/components/ista_ecotrend/manifest.json @@ -1,6 +1,7 @@ { "domain": "ista_ecotrend", "name": "ista EcoTrend", + "after_dependencies": ["recorder"], "codeowners": ["@tr4nt0r"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/ista_ecotrend", diff --git a/homeassistant/components/ista_ecotrend/sensor.py b/homeassistant/components/ista_ecotrend/sensor.py index 3ae2128e142..7aa1adfe4c9 100644 --- a/homeassistant/components/ista_ecotrend/sensor.py +++ b/homeassistant/components/ista_ecotrend/sensor.py @@ -2,10 +2,21 @@ from __future__ import annotations +import asyncio from dataclasses import dataclass +import datetime from enum import StrEnum import logging +from homeassistant.components.recorder.models.statistics import ( + StatisticData, + StatisticMetaData, +) +from homeassistant.components.recorder.statistics import ( + async_add_external_statistics, + get_instance, + get_last_statistics, +) from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, @@ -14,7 +25,11 @@ from homeassistant.components.sensor import ( ) from homeassistant.const import UnitOfEnergy, UnitOfVolume from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.device_registry import ( + DeviceEntry, + DeviceEntryType, + DeviceInfo, +) from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -22,7 +37,7 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import IstaConfigEntry from .const import DOMAIN from .coordinator import IstaCoordinator -from .util import IstaConsumptionType, IstaValueType, get_native_value +from .util import IstaConsumptionType, IstaValueType, get_native_value, get_statistics _LOGGER = logging.getLogger(__name__) @@ -155,6 +170,7 @@ class IstaSensor(CoordinatorEntity[IstaCoordinator], SensorEntity): entity_description: IstaSensorEntityDescription _attr_has_entity_name = True + device_entry: DeviceEntry def __init__( self, @@ -186,3 +202,81 @@ class IstaSensor(CoordinatorEntity[IstaCoordinator], SensorEntity): consumption_type=self.entity_description.consumption_type, value_type=self.entity_description.value_type, ) + + async def async_added_to_hass(self) -> None: + """When added to hass.""" + # perform initial statistics import when sensor is added, otherwise it would take + # 1 day when _handle_coordinator_update is triggered for the first time. + await self.update_statistics() + await super().async_added_to_hass() + + def _handle_coordinator_update(self) -> None: + """Handle coordinator update.""" + asyncio.run_coroutine_threadsafe(self.update_statistics(), self.hass.loop) + + async def update_statistics(self) -> None: + """Import ista EcoTrend historical statistics.""" + + # Remember the statistic_id that was initially created + # in case the entity gets renamed, because we cannot + # change the statistic_id + name = self.coordinator.config_entry.options.get( + f"lts_{self.entity_description.key}_{self.consumption_unit}" + ) + if not name: + name = self.entity_id.removeprefix("sensor.") + self.hass.config_entries.async_update_entry( + entry=self.coordinator.config_entry, + options={ + **self.coordinator.config_entry.options, + f"lts_{self.entity_description.key}_{self.consumption_unit}": name, + }, + ) + + statistic_id = f"{DOMAIN}:{name}" + statistics_sum = 0.0 + statistics_since = None + + last_stats = await get_instance(self.hass).async_add_executor_job( + get_last_statistics, + self.hass, + 1, + statistic_id, + False, + {"sum"}, + ) + + _LOGGER.debug("Last statistics: %s", last_stats) + + if last_stats: + statistics_sum = last_stats[statistic_id][0].get("sum") or 0.0 + statistics_since = datetime.datetime.fromtimestamp( + last_stats[statistic_id][0].get("end") or 0, tz=datetime.UTC + ) + datetime.timedelta(days=1) + + if monthly_consumptions := get_statistics( + self.coordinator.data[self.consumption_unit], + self.entity_description.consumption_type, + self.entity_description.value_type, + ): + statistics: list[StatisticData] = [ + { + "start": consumptions["date"], + "state": consumptions["value"], + "sum": (statistics_sum := statistics_sum + consumptions["value"]), + } + for consumptions in monthly_consumptions + if statistics_since is None or consumptions["date"] > statistics_since + ] + + metadata: StatisticMetaData = { + "has_mean": False, + "has_sum": True, + "name": f"{self.device_entry.name} {self.name}", + "source": DOMAIN, + "statistic_id": statistic_id, + "unit_of_measurement": self.entity_description.native_unit_of_measurement, + } + if statistics: + _LOGGER.debug("Insert statistics: %s %s", metadata, statistics) + async_add_external_statistics(self.hass, metadata, statistics) diff --git a/tests/components/ista_ecotrend/conftest.py b/tests/components/ista_ecotrend/conftest.py index cbbc166031d..7edf2e4717b 100644 --- a/tests/components/ista_ecotrend/conftest.py +++ b/tests/components/ista_ecotrend/conftest.py @@ -166,3 +166,52 @@ def get_consumption_data(obj_uuid: str | None = None) -> dict[str, Any]: }, ], } + + +def extend_statistics(obj_uuid: str | None = None) -> dict[str, Any]: + """Extend statistics data with new values.""" + stats = get_consumption_data(obj_uuid) + + stats["costs"].insert( + 0, + { + "date": {"month": 6, "year": 2024}, + "costsByEnergyType": [ + { + "type": "heating", + "value": 9000, + }, + { + "type": "warmwater", + "value": 9000, + }, + { + "type": "water", + "value": 9000, + }, + ], + }, + ) + stats["consumptions"].insert( + 0, + { + "date": {"month": 6, "year": 2024}, + "readings": [ + { + "type": "heating", + "value": "9000", + "additionalValue": "9000,0", + }, + { + "type": "warmwater", + "value": "9999,0", + "additionalValue": "90000,0", + }, + { + "type": "water", + "value": "9000,0", + }, + ], + }, + ) + return stats diff --git a/tests/components/ista_ecotrend/snapshots/test_statistics.ambr b/tests/components/ista_ecotrend/snapshots/test_statistics.ambr new file mode 100644 index 00000000000..78ecd6a6b6b --- /dev/null +++ b/tests/components/ista_ecotrend/snapshots/test_statistics.ambr @@ -0,0 +1,609 @@ +# serializer version: 1 +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_heating_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 104.0, + 'sum': 104.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 35.0, + 'sum': 139.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_heating_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 104.0, + 'sum': 104.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 35.0, + 'sum': 139.0, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9139.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_heating_cost_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 62.0, + 'sum': 62.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 21.0, + 'sum': 83.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_heating_cost_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 62.0, + 'sum': 62.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 21.0, + 'sum': 83.0, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9083.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_heating_energy_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 113.0, + 'sum': 113.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 38.0, + 'sum': 151.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_heating_energy_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 113.0, + 'sum': 113.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 38.0, + 'sum': 151.0, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9151.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_hot_water_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 1.1, + 'sum': 1.1, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 1.0, + 'sum': 2.1, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_hot_water_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 1.1, + 'sum': 1.1, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 1.0, + 'sum': 2.1, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9999.0, + 'sum': 10001.1, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_hot_water_cost_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 7.0, + 'sum': 7.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 7.0, + 'sum': 14.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_hot_water_cost_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 7.0, + 'sum': 7.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 7.0, + 'sum': 14.0, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9014.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_hot_water_energy_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 61.1, + 'sum': 61.1, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 57.0, + 'sum': 118.1, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_hot_water_energy_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 61.1, + 'sum': 61.1, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 57.0, + 'sum': 118.1, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 90000.0, + 'sum': 90118.1, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_water_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 6.8, + 'sum': 6.8, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 5.0, + 'sum': 11.8, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_water_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 6.8, + 'sum': 6.8, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 5.0, + 'sum': 11.8, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9011.8, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_water_cost_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 2.0, + 'sum': 2.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 3.0, + 'sum': 5.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_water_cost_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 2.0, + 'sum': 2.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 3.0, + 'sum': 5.0, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9005.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_heating_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 104.0, + 'sum': 104.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 35.0, + 'sum': 139.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_heating_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 104.0, + 'sum': 104.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 35.0, + 'sum': 139.0, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9139.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_heating_cost_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 62.0, + 'sum': 62.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 21.0, + 'sum': 83.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_heating_cost_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 62.0, + 'sum': 62.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 21.0, + 'sum': 83.0, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9083.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_heating_energy_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 113.0, + 'sum': 113.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 38.0, + 'sum': 151.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_heating_energy_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 113.0, + 'sum': 113.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 38.0, + 'sum': 151.0, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9151.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_hot_water_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 1.1, + 'sum': 1.1, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 1.0, + 'sum': 2.1, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_hot_water_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 1.1, + 'sum': 1.1, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 1.0, + 'sum': 2.1, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9999.0, + 'sum': 10001.1, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_hot_water_cost_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 7.0, + 'sum': 7.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 7.0, + 'sum': 14.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_hot_water_cost_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 7.0, + 'sum': 7.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 7.0, + 'sum': 14.0, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9014.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_hot_water_energy_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 61.1, + 'sum': 61.1, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 57.0, + 'sum': 118.1, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_hot_water_energy_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 61.1, + 'sum': 61.1, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 57.0, + 'sum': 118.1, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 90000.0, + 'sum': 90118.1, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_water_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 6.8, + 'sum': 6.8, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 5.0, + 'sum': 11.8, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_water_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 6.8, + 'sum': 6.8, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 5.0, + 'sum': 11.8, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9011.8, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_water_cost_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 2.0, + 'sum': 2.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 3.0, + 'sum': 5.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_water_cost_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 2.0, + 'sum': 2.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 3.0, + 'sum': 5.0, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9005.0, + }), + ]) +# --- diff --git a/tests/components/ista_ecotrend/test_statistics.py b/tests/components/ista_ecotrend/test_statistics.py new file mode 100644 index 00000000000..6b2f98affe9 --- /dev/null +++ b/tests/components/ista_ecotrend/test_statistics.py @@ -0,0 +1,82 @@ +"""Tests for the ista EcoTrend Statistics import.""" + +import datetime +from unittest.mock import MagicMock + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.recorder.statistics import statistics_during_period +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .conftest import extend_statistics + +from tests.common import MockConfigEntry +from tests.components.recorder.common import async_wait_recording_done + + +@pytest.mark.usefixtures("recorder_mock", "entity_registry_enabled_by_default") +async def test_statistics_import( + hass: HomeAssistant, + ista_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_ista: MagicMock, + snapshot: SnapshotAssertion, + freezer: FrozenDateTimeFactory, +) -> None: + """Test setup of ista EcoTrend sensor platform.""" + + ista_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(ista_config_entry.entry_id) + await hass.async_block_till_done() + + assert ista_config_entry.state is ConfigEntryState.LOADED + entities = er.async_entries_for_config_entry( + entity_registry, ista_config_entry.entry_id + ) + await async_wait_recording_done(hass) + + # Test that consumption statistics for 2 months have been added + for entity in entities: + statistic_id = f"ista_ecotrend:{entity.entity_id.removeprefix("sensor.")}" + stats = await hass.async_add_executor_job( + statistics_during_period, + hass, + datetime.datetime.fromtimestamp(0, tz=datetime.UTC), + None, + {statistic_id}, + "month", + None, + {"state", "sum"}, + ) + assert stats[statistic_id] == snapshot(name=f"{statistic_id}_2months") + assert len(stats[statistic_id]) == 2 + + # Add another monthly consumption and forward + # 1 day and test if the new values have been + # appended to the statistics + mock_ista.get_consumption_data = extend_statistics + + freezer.tick(datetime.timedelta(days=1)) + await async_wait_recording_done(hass) + freezer.tick(datetime.timedelta(days=1)) + await async_wait_recording_done(hass) + + for entity in entities: + statistic_id = f"ista_ecotrend:{entity.entity_id.removeprefix("sensor.")}" + stats = await hass.async_add_executor_job( + statistics_during_period, + hass, + datetime.datetime.fromtimestamp(0, tz=datetime.UTC), + None, + {statistic_id}, + "month", + None, + {"state", "sum"}, + ) + assert stats[statistic_id] == snapshot(name=f"{statistic_id}_3months") + + assert len(stats[statistic_id]) == 3 From 0cb0af496e9ca5284c6a296f26203cba0a255f91 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 16 Aug 2024 16:46:58 +0200 Subject: [PATCH 0806/1635] Re-enable concord232 (#124000) --- homeassistant/components/concord232/alarm_control_panel.py | 3 +-- homeassistant/components/concord232/binary_sensor.py | 3 +-- homeassistant/components/concord232/manifest.json | 3 +-- homeassistant/components/concord232/ruff.toml | 5 ----- requirements_all.txt | 3 +++ 5 files changed, 6 insertions(+), 11 deletions(-) delete mode 100644 homeassistant/components/concord232/ruff.toml diff --git a/homeassistant/components/concord232/alarm_control_panel.py b/homeassistant/components/concord232/alarm_control_panel.py index d3bafdeba4a..661a2beacc0 100644 --- a/homeassistant/components/concord232/alarm_control_panel.py +++ b/homeassistant/components/concord232/alarm_control_panel.py @@ -1,12 +1,11 @@ """Support for Concord232 alarm control panels.""" -# mypy: ignore-errors from __future__ import annotations import datetime import logging -# from concord232 import client as concord232_client +from concord232 import client as concord232_client import requests import voluptuous as vol diff --git a/homeassistant/components/concord232/binary_sensor.py b/homeassistant/components/concord232/binary_sensor.py index 588e7681746..a1dcbc222f7 100644 --- a/homeassistant/components/concord232/binary_sensor.py +++ b/homeassistant/components/concord232/binary_sensor.py @@ -1,12 +1,11 @@ """Support for exposing Concord232 elements as sensors.""" -# mypy: ignore-errors from __future__ import annotations import datetime import logging -# from concord232 import client as concord232_client +from concord232 import client as concord232_client import requests import voluptuous as vol diff --git a/homeassistant/components/concord232/manifest.json b/homeassistant/components/concord232/manifest.json index ef075ba5f96..e0aea5d64d9 100644 --- a/homeassistant/components/concord232/manifest.json +++ b/homeassistant/components/concord232/manifest.json @@ -2,9 +2,8 @@ "domain": "concord232", "name": "Concord232", "codeowners": [], - "disabled": "This integration is disabled because it uses non-open source code to operate.", "documentation": "https://www.home-assistant.io/integrations/concord232", "iot_class": "local_polling", "loggers": ["concord232", "stevedore"], - "requirements": ["concord232==0.15"] + "requirements": ["concord232==0.15.1"] } diff --git a/homeassistant/components/concord232/ruff.toml b/homeassistant/components/concord232/ruff.toml deleted file mode 100644 index 38f6f586aef..00000000000 --- a/homeassistant/components/concord232/ruff.toml +++ /dev/null @@ -1,5 +0,0 @@ -extend = "../../../pyproject.toml" - -lint.extend-ignore = [ - "F821" -] diff --git a/requirements_all.txt b/requirements_all.txt index 3e12a7c3c52..8a73d65eadb 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -672,6 +672,9 @@ colorlog==6.8.2 # homeassistant.components.color_extractor colorthief==0.2.1 +# homeassistant.components.concord232 +concord232==0.15.1 + # homeassistant.components.upc_connect connect-box==0.3.1 From e8d57bf63668d3ef6a572cb68f290e3ee03f676c Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 16 Aug 2024 16:48:33 +0200 Subject: [PATCH 0807/1635] Bump aiomealie to 0.8.1 (#124047) --- homeassistant/components/mealie/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/mealie/manifest.json b/homeassistant/components/mealie/manifest.json index acfe30aecaa..75093577b0f 100644 --- a/homeassistant/components/mealie/manifest.json +++ b/homeassistant/components/mealie/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/mealie", "integration_type": "service", "iot_class": "local_polling", - "requirements": ["aiomealie==0.8.0"] + "requirements": ["aiomealie==0.8.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 8a73d65eadb..522ee846847 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -288,7 +288,7 @@ aiolookin==1.0.0 aiolyric==1.1.0 # homeassistant.components.mealie -aiomealie==0.8.0 +aiomealie==0.8.1 # homeassistant.components.modern_forms aiomodernforms==0.1.8 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a2fd97338f2..31e448dbbe4 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -270,7 +270,7 @@ aiolookin==1.0.0 aiolyric==1.1.0 # homeassistant.components.mealie -aiomealie==0.8.0 +aiomealie==0.8.1 # homeassistant.components.modern_forms aiomodernforms==0.1.8 From c8b0c939e42ae4b10a9a739853b624f6649f6386 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 16 Aug 2024 09:48:59 -0500 Subject: [PATCH 0808/1635] Ensure event entities are allowed for linked homekit config via YAML (#123994) --- homeassistant/components/homekit/util.py | 7 +++- tests/components/homekit/test_util.py | 50 ++++++++++++++++++++++++ 2 files changed, 55 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/homekit/util.py b/homeassistant/components/homekit/util.py index a4566efaa35..4d4620477cb 100644 --- a/homeassistant/components/homekit/util.py +++ b/homeassistant/components/homekit/util.py @@ -22,6 +22,7 @@ from homeassistant.components import ( sensor, ) from homeassistant.components.camera import DOMAIN as CAMERA_DOMAIN +from homeassistant.components.event import DOMAIN as EVENT_DOMAIN from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN from homeassistant.components.media_player import ( DOMAIN as MEDIA_PLAYER_DOMAIN, @@ -167,9 +168,11 @@ CAMERA_SCHEMA = BASIC_INFO_SCHEMA.extend( vol.Optional( CONF_VIDEO_PACKET_SIZE, default=DEFAULT_VIDEO_PACKET_SIZE ): cv.positive_int, - vol.Optional(CONF_LINKED_MOTION_SENSOR): cv.entity_domain(binary_sensor.DOMAIN), + vol.Optional(CONF_LINKED_MOTION_SENSOR): cv.entity_domain( + [binary_sensor.DOMAIN, EVENT_DOMAIN] + ), vol.Optional(CONF_LINKED_DOORBELL_SENSOR): cv.entity_domain( - binary_sensor.DOMAIN + [binary_sensor.DOMAIN, EVENT_DOMAIN] ), } ) diff --git a/tests/components/homekit/test_util.py b/tests/components/homekit/test_util.py index 4939511166f..7f7e3ee0ce0 100644 --- a/tests/components/homekit/test_util.py +++ b/tests/components/homekit/test_util.py @@ -7,13 +7,38 @@ import voluptuous as vol from homeassistant.components.homekit.const import ( BRIDGE_NAME, + CONF_AUDIO_CODEC, + CONF_AUDIO_MAP, + CONF_AUDIO_PACKET_SIZE, CONF_FEATURE, CONF_FEATURE_LIST, CONF_LINKED_BATTERY_SENSOR, + CONF_LINKED_DOORBELL_SENSOR, + CONF_LINKED_MOTION_SENSOR, CONF_LOW_BATTERY_THRESHOLD, + CONF_MAX_FPS, + CONF_MAX_HEIGHT, + CONF_MAX_WIDTH, + CONF_STREAM_COUNT, + CONF_SUPPORT_AUDIO, CONF_THRESHOLD_CO, CONF_THRESHOLD_CO2, + CONF_VIDEO_CODEC, + CONF_VIDEO_MAP, + CONF_VIDEO_PACKET_SIZE, + DEFAULT_AUDIO_CODEC, + DEFAULT_AUDIO_MAP, + DEFAULT_AUDIO_PACKET_SIZE, DEFAULT_CONFIG_FLOW_PORT, + DEFAULT_LOW_BATTERY_THRESHOLD, + DEFAULT_MAX_FPS, + DEFAULT_MAX_HEIGHT, + DEFAULT_MAX_WIDTH, + DEFAULT_STREAM_COUNT, + DEFAULT_SUPPORT_AUDIO, + DEFAULT_VIDEO_CODEC, + DEFAULT_VIDEO_MAP, + DEFAULT_VIDEO_PACKET_SIZE, DOMAIN, FEATURE_ON_OFF, FEATURE_PLAY_PAUSE, @@ -178,6 +203,31 @@ def test_validate_entity_config() -> None: assert vec({"sensor.co2": {CONF_THRESHOLD_CO2: 500}}) == { "sensor.co2": {CONF_THRESHOLD_CO2: 500, CONF_LOW_BATTERY_THRESHOLD: 20} } + assert vec( + { + "camera.demo": { + CONF_LINKED_DOORBELL_SENSOR: "event.doorbell", + CONF_LINKED_MOTION_SENSOR: "event.motion", + } + } + ) == { + "camera.demo": { + CONF_LINKED_DOORBELL_SENSOR: "event.doorbell", + CONF_LINKED_MOTION_SENSOR: "event.motion", + CONF_AUDIO_CODEC: DEFAULT_AUDIO_CODEC, + CONF_SUPPORT_AUDIO: DEFAULT_SUPPORT_AUDIO, + CONF_MAX_WIDTH: DEFAULT_MAX_WIDTH, + CONF_MAX_HEIGHT: DEFAULT_MAX_HEIGHT, + CONF_MAX_FPS: DEFAULT_MAX_FPS, + CONF_AUDIO_MAP: DEFAULT_AUDIO_MAP, + CONF_VIDEO_MAP: DEFAULT_VIDEO_MAP, + CONF_STREAM_COUNT: DEFAULT_STREAM_COUNT, + CONF_VIDEO_CODEC: DEFAULT_VIDEO_CODEC, + CONF_AUDIO_PACKET_SIZE: DEFAULT_AUDIO_PACKET_SIZE, + CONF_VIDEO_PACKET_SIZE: DEFAULT_VIDEO_PACKET_SIZE, + CONF_LOW_BATTERY_THRESHOLD: DEFAULT_LOW_BATTERY_THRESHOLD, + } + } def test_validate_media_player_features() -> None: From 06209dd94c262eb4b2e13800b5e69d7e53b256ff Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 16 Aug 2024 16:54:20 +0200 Subject: [PATCH 0809/1635] Bump ruff to 0.6.0 (#123985) --- .pre-commit-config.yaml | 2 +- homeassistant/components/environment_canada/config_flow.py | 4 ++-- homeassistant/components/environment_canada/coordinator.py | 4 ++-- requirements_test_pre_commit.txt | 2 +- tests/components/environment_canada/test_config_flow.py | 4 ++-- tests/components/lg_netcast/__init__.py | 4 ++-- 6 files changed, 10 insertions(+), 10 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f057931e2a8..4c3ce7fe104 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.5.7 + rev: v0.6.0 hooks: - id: ruff args: diff --git a/homeassistant/components/environment_canada/config_flow.py b/homeassistant/components/environment_canada/config_flow.py index a351bb0ef06..79b37c64c1b 100644 --- a/homeassistant/components/environment_canada/config_flow.py +++ b/homeassistant/components/environment_canada/config_flow.py @@ -1,7 +1,7 @@ """Config flow for Environment Canada integration.""" import logging -import xml.etree.ElementTree as et +import xml.etree.ElementTree as ET import aiohttp from env_canada import ECWeather, ec_exc @@ -52,7 +52,7 @@ class EnvironmentCanadaConfigFlow(ConfigFlow, domain=DOMAIN): if user_input is not None: try: info = await validate_input(user_input) - except (et.ParseError, vol.MultipleInvalid, ec_exc.UnknownStationId): + except (ET.ParseError, vol.MultipleInvalid, ec_exc.UnknownStationId): errors["base"] = "bad_station_id" except aiohttp.ClientConnectionError: errors["base"] = "cannot_connect" diff --git a/homeassistant/components/environment_canada/coordinator.py b/homeassistant/components/environment_canada/coordinator.py index e17c360e3fb..8e77b309c78 100644 --- a/homeassistant/components/environment_canada/coordinator.py +++ b/homeassistant/components/environment_canada/coordinator.py @@ -1,7 +1,7 @@ """Coordinator for the Environment Canada (EC) component.""" import logging -import xml.etree.ElementTree as et +import xml.etree.ElementTree as ET from env_canada import ec_exc @@ -27,6 +27,6 @@ class ECDataUpdateCoordinator(DataUpdateCoordinator): """Fetch data from EC.""" try: await self.ec_data.update() - except (et.ParseError, ec_exc.UnknownStationId) as ex: + except (ET.ParseError, ec_exc.UnknownStationId) as ex: raise UpdateFailed(f"Error fetching {self.name} data: {ex}") from ex return self.ec_data diff --git a/requirements_test_pre_commit.txt b/requirements_test_pre_commit.txt index ba54a19da3e..091f872d511 100644 --- a/requirements_test_pre_commit.txt +++ b/requirements_test_pre_commit.txt @@ -1,5 +1,5 @@ # Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit codespell==2.3.0 -ruff==0.5.7 +ruff==0.6.0 yamllint==1.35.1 diff --git a/tests/components/environment_canada/test_config_flow.py b/tests/components/environment_canada/test_config_flow.py index f2c35ab4295..d61966e8da1 100644 --- a/tests/components/environment_canada/test_config_flow.py +++ b/tests/components/environment_canada/test_config_flow.py @@ -1,7 +1,7 @@ """Test the Environment Canada (EC) config flow.""" from unittest.mock import AsyncMock, MagicMock, Mock, patch -import xml.etree.ElementTree as et +import xml.etree.ElementTree as ET import aiohttp import pytest @@ -94,7 +94,7 @@ async def test_create_same_entry_twice(hass: HomeAssistant) -> None: (aiohttp.ClientResponseError(Mock(), (), status=404), "bad_station_id"), (aiohttp.ClientResponseError(Mock(), (), status=400), "error_response"), (aiohttp.ClientConnectionError, "cannot_connect"), - (et.ParseError, "bad_station_id"), + (ET.ParseError, "bad_station_id"), (ValueError, "unknown"), ], ) diff --git a/tests/components/lg_netcast/__init__.py b/tests/components/lg_netcast/__init__.py index ce3e09aeb65..6e608ae207b 100644 --- a/tests/components/lg_netcast/__init__.py +++ b/tests/components/lg_netcast/__init__.py @@ -1,7 +1,7 @@ """Tests for LG Netcast TV.""" from unittest.mock import patch -from xml.etree import ElementTree +import xml.etree.ElementTree as ET from pylgnetcast import AccessTokenError, LgNetCastClient, SessionIdError import requests @@ -56,7 +56,7 @@ def _patched_lgnetcast_client( if always_404: return None if invalid_details: - raise ElementTree.ParseError("Mocked Parsed Error") + raise ET.ParseError("Mocked Parsed Error") return { "uuid": UNIQUE_ID if not no_unique_id else None, "model_name": MODEL_NAME, From 115c5d1704a04ce90cc3c7c95bdfbceb99809511 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 16 Aug 2024 16:59:33 +0200 Subject: [PATCH 0810/1635] Fix threading in get_test_home_assistant test helper (#124056) --- tests/common.py | 42 +++++++++++++++++++++++++++++------------- 1 file changed, 29 insertions(+), 13 deletions(-) diff --git a/tests/common.py b/tests/common.py index d36df509142..684b9eb0433 100644 --- a/tests/common.py +++ b/tests/common.py @@ -13,7 +13,12 @@ from collections.abc import ( Mapping, Sequence, ) -from contextlib import asynccontextmanager, contextmanager, suppress +from contextlib import ( + AbstractAsyncContextManager, + asynccontextmanager, + contextmanager, + suppress, +) from datetime import UTC, datetime, timedelta from enum import Enum import functools as ft @@ -177,24 +182,36 @@ def get_test_config_dir(*add_path): @contextmanager def get_test_home_assistant() -> Generator[HomeAssistant]: """Return a Home Assistant object pointing at test config directory.""" - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - context_manager = async_test_home_assistant(loop) - hass = loop.run_until_complete(context_manager.__aenter__()) - + hass_created_event = threading.Event() loop_stop_event = threading.Event() + context_manager: AbstractAsyncContextManager = None + hass: HomeAssistant = None + loop: asyncio.AbstractEventLoop = None + orig_stop: Callable = None + def run_loop() -> None: - """Run event loop.""" + """Create and run event loop.""" + nonlocal context_manager, hass, loop, orig_stop + + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + context_manager = async_test_home_assistant(loop) + hass = loop.run_until_complete(context_manager.__aenter__()) + + orig_stop = hass.stop + hass._stopped = Mock(set=loop.stop) + hass.start = start_hass + hass.stop = stop_hass loop._thread_ident = threading.get_ident() + + hass_created_event.set() + hass.loop_thread_id = loop._thread_ident loop.run_forever() loop_stop_event.set() - orig_stop = hass.stop - hass._stopped = Mock(set=loop.stop) - def start_hass(*mocks: Any) -> None: """Start hass.""" asyncio.run_coroutine_threadsafe(hass.async_start(), loop).result() @@ -204,11 +221,10 @@ def get_test_home_assistant() -> Generator[HomeAssistant]: orig_stop() loop_stop_event.wait() - hass.start = start_hass - hass.stop = stop_hass - threading.Thread(name="LoopThread", target=run_loop, daemon=False).start() + hass_created_event.wait() + try: yield hass finally: From 2cd44567627c5e7ba44baf28191c76512d856b08 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 16 Aug 2024 17:07:57 +0200 Subject: [PATCH 0811/1635] Add missing hass type hint in component tests (a) (#124059) --- tests/components/advantage_air/__init__.py | 3 +- tests/components/airthings_ble/__init__.py | 3 +- .../components/alarm_control_panel/common.py | 29 ++++++++++++++----- tests/components/apache_kafka/test_init.py | 11 ++++--- tests/components/auth/test_init.py | 3 +- tests/components/auth/test_init_link_user.py | 5 +++- 6 files changed, 39 insertions(+), 15 deletions(-) diff --git a/tests/components/advantage_air/__init__.py b/tests/components/advantage_air/__init__.py index 05d98e957bb..5587c668c7e 100644 --- a/tests/components/advantage_air/__init__.py +++ b/tests/components/advantage_air/__init__.py @@ -4,6 +4,7 @@ from unittest.mock import AsyncMock, patch from homeassistant.components.advantage_air.const import DOMAIN from homeassistant.const import CONF_IP_ADDRESS, CONF_PORT +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_json_object_fixture @@ -43,7 +44,7 @@ def patch_update(return_value=True, side_effect=None): ) -async def add_mock_config(hass): +async def add_mock_config(hass: HomeAssistant) -> MockConfigEntry: """Create a fake Advantage Air Config Entry.""" entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/airthings_ble/__init__.py b/tests/components/airthings_ble/__init__.py index 45521903a08..a736fa979e9 100644 --- a/tests/components/airthings_ble/__init__.py +++ b/tests/components/airthings_ble/__init__.py @@ -13,6 +13,7 @@ from airthings_ble import ( from homeassistant.components.airthings_ble.const import DOMAIN from homeassistant.components.bluetooth.models import BluetoothServiceInfoBleak from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import CONNECTION_BLUETOOTH, DeviceRegistry from tests.common import MockConfigEntry, MockEntity @@ -225,7 +226,7 @@ VOC_V3 = MockEntity( ) -def create_entry(hass): +def create_entry(hass: HomeAssistant) -> MockConfigEntry: """Create a config entry.""" entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/alarm_control_panel/common.py b/tests/components/alarm_control_panel/common.py index 9ec419d8cf0..36e9918f54c 100644 --- a/tests/components/alarm_control_panel/common.py +++ b/tests/components/alarm_control_panel/common.py @@ -27,11 +27,14 @@ from homeassistant.const import ( STATE_ALARM_DISARMED, STATE_ALARM_TRIGGERED, ) +from homeassistant.core import HomeAssistant from tests.common import MockEntity -async def async_alarm_disarm(hass, code=None, entity_id=ENTITY_MATCH_ALL): +async def async_alarm_disarm( + hass: HomeAssistant, code: str | None = None, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the alarm the command for disarm.""" data = {} if code: @@ -42,7 +45,9 @@ async def async_alarm_disarm(hass, code=None, entity_id=ENTITY_MATCH_ALL): await hass.services.async_call(DOMAIN, SERVICE_ALARM_DISARM, data, blocking=True) -async def async_alarm_arm_home(hass, code=None, entity_id=ENTITY_MATCH_ALL): +async def async_alarm_arm_home( + hass: HomeAssistant, code: str | None = None, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the alarm the command for disarm.""" data = {} if code: @@ -53,7 +58,9 @@ async def async_alarm_arm_home(hass, code=None, entity_id=ENTITY_MATCH_ALL): await hass.services.async_call(DOMAIN, SERVICE_ALARM_ARM_HOME, data, blocking=True) -async def async_alarm_arm_away(hass, code=None, entity_id=ENTITY_MATCH_ALL): +async def async_alarm_arm_away( + hass: HomeAssistant, code: str | None = None, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the alarm the command for disarm.""" data = {} if code: @@ -64,7 +71,9 @@ async def async_alarm_arm_away(hass, code=None, entity_id=ENTITY_MATCH_ALL): await hass.services.async_call(DOMAIN, SERVICE_ALARM_ARM_AWAY, data, blocking=True) -async def async_alarm_arm_night(hass, code=None, entity_id=ENTITY_MATCH_ALL): +async def async_alarm_arm_night( + hass: HomeAssistant, code: str | None = None, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the alarm the command for disarm.""" data = {} if code: @@ -75,7 +84,9 @@ async def async_alarm_arm_night(hass, code=None, entity_id=ENTITY_MATCH_ALL): await hass.services.async_call(DOMAIN, SERVICE_ALARM_ARM_NIGHT, data, blocking=True) -async def async_alarm_arm_vacation(hass, code=None, entity_id=ENTITY_MATCH_ALL): +async def async_alarm_arm_vacation( + hass: HomeAssistant, code: str | None = None, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the alarm the command for vacation mode.""" data = {} if code: @@ -88,7 +99,9 @@ async def async_alarm_arm_vacation(hass, code=None, entity_id=ENTITY_MATCH_ALL): ) -async def async_alarm_trigger(hass, code=None, entity_id=ENTITY_MATCH_ALL): +async def async_alarm_trigger( + hass: HomeAssistant, code: str | None = None, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the alarm the command for disarm.""" data = {} if code: @@ -99,7 +112,9 @@ async def async_alarm_trigger(hass, code=None, entity_id=ENTITY_MATCH_ALL): await hass.services.async_call(DOMAIN, SERVICE_ALARM_TRIGGER, data, blocking=True) -async def async_alarm_arm_custom_bypass(hass, code=None, entity_id=ENTITY_MATCH_ALL): +async def async_alarm_arm_custom_bypass( + hass: HomeAssistant, code: str | None = None, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the alarm the command for disarm.""" data = {} if code: diff --git a/tests/components/apache_kafka/test_init.py b/tests/components/apache_kafka/test_init.py index 2b702046054..cffe08ffd4a 100644 --- a/tests/components/apache_kafka/test_init.py +++ b/tests/components/apache_kafka/test_init.py @@ -3,8 +3,9 @@ from __future__ import annotations from asyncio import AbstractEventLoop -from collections.abc import Callable +from collections.abc import Callable, Generator from dataclasses import dataclass +from typing import Any from unittest.mock import patch import pytest @@ -41,7 +42,7 @@ class MockKafkaClient: @pytest.fixture(name="mock_client") -def mock_client_fixture(): +def mock_client_fixture() -> Generator[MockKafkaClient]: """Mock the apache kafka client.""" with ( patch(f"{PRODUCER_PATH}.start") as start, @@ -89,7 +90,7 @@ async def test_full_config(hass: HomeAssistant, mock_client: MockKafkaClient) -> mock_client.start.assert_called_once() -async def _setup(hass, filter_config): +async def _setup(hass: HomeAssistant, filter_config: dict[str, Any]) -> None: """Shared set up for filtering tests.""" config = {apache_kafka.DOMAIN: {"filter": filter_config}} config[apache_kafka.DOMAIN].update(MIN_CONFIG) @@ -98,7 +99,9 @@ async def _setup(hass, filter_config): await hass.async_block_till_done() -async def _run_filter_tests(hass, tests, mock_client): +async def _run_filter_tests( + hass: HomeAssistant, tests: list[FilterTest], mock_client: MockKafkaClient +) -> None: """Run a series of filter tests on apache kafka.""" for test in tests: hass.states.async_set(test.id, STATE_ON) diff --git a/tests/components/auth/test_init.py b/tests/components/auth/test_init.py index 0f4908c2fc0..718bb369b53 100644 --- a/tests/components/auth/test_init.py +++ b/tests/components/auth/test_init.py @@ -13,6 +13,7 @@ from homeassistant.auth.models import ( TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN, TOKEN_TYPE_NORMAL, Credentials, + RefreshToken, ) from homeassistant.components import auth from homeassistant.core import HomeAssistant @@ -37,7 +38,7 @@ def mock_credential(): ) -async def async_setup_user_refresh_token(hass): +async def async_setup_user_refresh_token(hass: HomeAssistant) -> RefreshToken: """Create a testing user with a connected credential.""" user = await hass.auth.async_create_user("Test User") diff --git a/tests/components/auth/test_init_link_user.py b/tests/components/auth/test_init_link_user.py index d1a5fa51af2..a8f04c2720d 100644 --- a/tests/components/auth/test_init_link_user.py +++ b/tests/components/auth/test_init_link_user.py @@ -1,6 +1,7 @@ """Tests for the link user flow.""" from http import HTTPStatus +from typing import Any from unittest.mock import patch from homeassistant.core import HomeAssistant @@ -11,7 +12,9 @@ from tests.common import CLIENT_ID, CLIENT_REDIRECT_URI from tests.typing import ClientSessionGenerator -async def async_get_code(hass, aiohttp_client): +async def async_get_code( + hass: HomeAssistant, aiohttp_client: ClientSessionGenerator +) -> dict[str, Any]: """Return authorization code for link user tests.""" config = [ { From 56b4a7f2917a7f089de3558730acdf5861381a79 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 16 Aug 2024 17:09:12 +0200 Subject: [PATCH 0812/1635] Add missing hass type in tests/helpers (#124039) --- .../helpers/test_config_entry_oauth2_flow.py | 4 +- tests/helpers/test_discovery.py | 15 ++- tests/helpers/test_entity.py | 23 +++-- tests/helpers/test_entity_platform.py | 93 +++++++++++++------ tests/helpers/test_integration_platform.py | 25 +++-- tests/helpers/test_significant_change.py | 13 ++- tests/helpers/test_singleton.py | 10 +- tests/helpers/test_start.py | 24 ++--- 8 files changed, 146 insertions(+), 61 deletions(-) diff --git a/tests/helpers/test_config_entry_oauth2_flow.py b/tests/helpers/test_config_entry_oauth2_flow.py index 23919f3a6a3..52def52f3f0 100644 --- a/tests/helpers/test_config_entry_oauth2_flow.py +++ b/tests/helpers/test_config_entry_oauth2_flow.py @@ -873,7 +873,9 @@ async def test_implementation_provider(hass: HomeAssistant, local_impl) -> None: provider_source = [] - async def async_provide_implementation(hass, domain): + async def async_provide_implementation( + hass: HomeAssistant, domain: str + ) -> list[config_entry_oauth2_flow.AbstractOAuth2Implementation]: """Mock implementation provider.""" return provider_source diff --git a/tests/helpers/test_discovery.py b/tests/helpers/test_discovery.py index 100b50e2749..a66ac7474e3 100644 --- a/tests/helpers/test_discovery.py +++ b/tests/helpers/test_discovery.py @@ -9,6 +9,8 @@ from homeassistant.const import Platform from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import discovery from homeassistant.helpers.dispatcher import async_dispatcher_send +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from tests.common import MockModule, MockPlatform, mock_integration, mock_platform @@ -115,7 +117,7 @@ async def test_circular_import(hass: HomeAssistant) -> None: component_calls = [] platform_calls = [] - def component_setup(hass, config): + def component_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up mock component.""" discovery.load_platform( hass, Platform.SWITCH, "test_circular", {"key": "value"}, config @@ -123,7 +125,12 @@ async def test_circular_import(hass: HomeAssistant) -> None: component_calls.append(1) return True - def setup_platform(hass, config, add_entities_callback, discovery_info=None): + def setup_platform( + hass: HomeAssistant, + config: ConfigType, + add_entities_callback: AddEntitiesCallback, + discovery_info: DiscoveryInfoType | None = None, + ) -> None: """Set up mock platform.""" platform_calls.append("disc" if discovery_info else "component") @@ -162,14 +169,14 @@ async def test_1st_discovers_2nd_component(hass: HomeAssistant) -> None: """ component_calls = [] - async def component1_setup(hass, config): + async def component1_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up mock component.""" await discovery.async_discover( hass, "test_component2", {}, "test_component2", {} ) return True - def component2_setup(hass, config): + def component2_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up mock component.""" component_calls.append(1) return True diff --git a/tests/helpers/test_entity.py b/tests/helpers/test_entity.py index 283a5b4fb37..58554059fb4 100644 --- a/tests/helpers/test_entity.py +++ b/tests/helpers/test_entity.py @@ -16,6 +16,7 @@ import pytest from syrupy.assertion import SnapshotAssertion import voluptuous as vol +from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( ATTR_ATTRIBUTION, ATTR_DEVICE_CLASS, @@ -34,6 +35,7 @@ from homeassistant.core import ( from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity, entity_registry as er from homeassistant.helpers.entity_component import async_update_entity +from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import UNDEFINED, UndefinedType from tests.common import ( @@ -981,10 +983,13 @@ async def _test_friendly_name( ) -> None: """Test friendly name.""" - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities([ent]) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id") @@ -1306,10 +1311,13 @@ async def test_entity_name_translation_placeholder_errors( """Return all backend translations.""" return translations[language] - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities([ent]) - return True ent = MockEntity( unique_id="qwer", @@ -1531,7 +1539,11 @@ async def test_friendly_name_updated( ) -> None: """Test friendly name is updated when device or entity registry updates.""" - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities( [ @@ -1547,7 +1559,6 @@ async def test_friendly_name_updated( ), ] ) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id") diff --git a/tests/helpers/test_entity_platform.py b/tests/helpers/test_entity_platform.py index fcbc825bbdc..2cc3348626c 100644 --- a/tests/helpers/test_entity_platform.py +++ b/tests/helpers/test_entity_platform.py @@ -10,6 +10,7 @@ from unittest.mock import ANY, AsyncMock, Mock, patch import pytest import voluptuous as vol +from homeassistant.config_entries import ConfigEntry from homeassistant.const import EVENT_HOMEASSISTANT_STARTED, PERCENTAGE, EntityCategory from homeassistant.core import ( CoreState, @@ -33,6 +34,7 @@ from homeassistant.helpers.entity_component import ( DEFAULT_SCAN_INTERVAL, EntityComponent, ) +from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType import homeassistant.util.dt as dt_util @@ -855,10 +857,13 @@ async def test_setup_entry( ) -> None: """Test we can setup an entry.""" - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities([MockEntity(name="test1", unique_id="unique")]) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id") @@ -1138,7 +1143,11 @@ async def test_device_info_called( model="via", ) - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities( [ @@ -1163,7 +1172,6 @@ async def test_device_info_called( ), ] ) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) entity_platform = MockEntityPlatform( @@ -1207,7 +1215,11 @@ async def test_device_info_not_overrides( assert device.manufacturer == "test-manufacturer" assert device.model == "test-model" - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities( [ @@ -1222,7 +1234,6 @@ async def test_device_info_not_overrides( ) ] ) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) entity_platform = MockEntityPlatform( @@ -1257,7 +1268,11 @@ async def test_device_info_homeassistant_url( model="via", ) - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities( [ @@ -1271,7 +1286,6 @@ async def test_device_info_homeassistant_url( ), ] ) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) entity_platform = MockEntityPlatform( @@ -1306,7 +1320,11 @@ async def test_device_info_change_to_no_url( configuration_url="homeassistant://config/mqtt", ) - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities( [ @@ -1320,7 +1338,6 @@ async def test_device_info_change_to_no_url( ), ] ) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) entity_platform = MockEntityPlatform( @@ -1375,10 +1392,13 @@ async def test_entity_disabled_by_device( unique_id="disabled", device_info=DeviceInfo(connections=connections) ) - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities([entity_disabled]) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id", domain=DOMAIN) @@ -1855,13 +1875,16 @@ async def test_setup_entry_with_entities_that_block_forever( ) -> None: """Test we cancel adding entities when we reach the timeout.""" - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities( [MockBlockingEntity(name="test1", unique_id="unique")], update_before_add=update_before_add, ) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id") @@ -1900,13 +1923,16 @@ async def test_cancellation_is_not_blocked( ) -> None: """Test cancellation is not blocked while adding entities.""" - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities( [MockCancellingEntity(name="test1", unique_id="unique")], update_before_add=update_before_add, ) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id") @@ -1994,7 +2020,11 @@ async def test_entity_name_influences_entity_id( ) -> None: """Test entity_id is influenced by entity name.""" - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities( [ @@ -2011,7 +2041,6 @@ async def test_entity_name_influences_entity_id( ], update_before_add=update_before_add, ) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id") @@ -2079,12 +2108,15 @@ async def test_translated_entity_name_influences_entity_id( """Return all backend translations.""" return translations[language] - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities( [TranslatedEntity(has_entity_name)], update_before_add=update_before_add ) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id") @@ -2164,10 +2196,13 @@ async def test_translated_device_class_name_influences_entity_id( """Return all backend translations.""" return translations[language] - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities([TranslatedDeviceClassEntity(device_class, has_entity_name)]) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id") @@ -2223,10 +2258,13 @@ async def test_device_name_defaulting_config_entry( _attr_unique_id = "qwer" _attr_device_info = device_info - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities([DeviceNameEntity()]) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(title=config_entry_title, entry_id="super-mock-id") @@ -2276,10 +2314,13 @@ async def test_device_type_error_checking( _attr_unique_id = "qwer" _attr_device_info = device_info - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities([DeviceNameEntity()]) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry( diff --git a/tests/helpers/test_integration_platform.py b/tests/helpers/test_integration_platform.py index 497bae5fb88..93bfeb2da5b 100644 --- a/tests/helpers/test_integration_platform.py +++ b/tests/helpers/test_integration_platform.py @@ -2,6 +2,7 @@ from collections.abc import Callable from types import ModuleType +from typing import Any from unittest.mock import Mock, patch import pytest @@ -29,7 +30,9 @@ async def test_process_integration_platforms_with_wait(hass: HomeAssistant) -> N processed = [] - async def _process_platform(hass, domain, platform): + async def _process_platform( + hass: HomeAssistant, domain: str, platform: Any + ) -> None: """Process platform.""" processed.append((domain, platform)) @@ -67,7 +70,9 @@ async def test_process_integration_platforms(hass: HomeAssistant) -> None: processed = [] - async def _process_platform(hass, domain, platform): + async def _process_platform( + hass: HomeAssistant, domain: str, platform: Any + ) -> None: """Process platform.""" processed.append((domain, platform)) @@ -107,7 +112,9 @@ async def test_process_integration_platforms_import_fails( processed = [] - async def _process_platform(hass, domain, platform): + async def _process_platform( + hass: HomeAssistant, domain: str, platform: Any + ) -> None: """Process platform.""" processed.append((domain, platform)) @@ -150,7 +157,9 @@ async def test_process_integration_platforms_import_fails_after_registered( processed = [] - async def _process_platform(hass, domain, platform): + async def _process_platform( + hass: HomeAssistant, domain: str, platform: Any + ) -> None: """Process platform.""" processed.append((domain, platform)) @@ -242,7 +251,9 @@ async def test_broken_integration( processed = [] - async def _process_platform(hass, domain, platform): + async def _process_platform( + hass: HomeAssistant, domain: str, platform: Any + ) -> None: """Process platform.""" processed.append((domain, platform)) @@ -265,7 +276,9 @@ async def test_process_integration_platforms_no_integrations( processed = [] - async def _process_platform(hass, domain, platform): + async def _process_platform( + hass: HomeAssistant, domain: str, platform: Any + ) -> None: """Process platform.""" processed.append((domain, platform)) diff --git a/tests/helpers/test_significant_change.py b/tests/helpers/test_significant_change.py index f9dca5b6034..577ea5907e5 100644 --- a/tests/helpers/test_significant_change.py +++ b/tests/helpers/test_significant_change.py @@ -1,5 +1,8 @@ """Test significant change helper.""" +from types import MappingProxyType +from typing import Any + import pytest from homeassistant.components.sensor import SensorDeviceClass @@ -67,8 +70,14 @@ async def test_significant_change_extra( assert checker.async_is_significant_change(State(ent_id, "100", attrs), extra_arg=1) def extra_significant_check( - hass, old_state, old_attrs, old_extra_arg, new_state, new_attrs, new_extra_arg - ): + hass: HomeAssistant, + old_state: str, + old_attrs: dict | MappingProxyType, + old_extra_arg: Any, + new_state: str, + new_attrs: dict | MappingProxyType, + new_extra_arg: Any, + ) -> bool | None: return old_extra_arg != new_extra_arg checker.extra_significant_check = extra_significant_check diff --git a/tests/helpers/test_singleton.py b/tests/helpers/test_singleton.py index dcda1e2db3a..4722c58dc9f 100644 --- a/tests/helpers/test_singleton.py +++ b/tests/helpers/test_singleton.py @@ -1,9 +1,11 @@ """Test singleton helper.""" +from typing import Any from unittest.mock import Mock import pytest +from homeassistant.core import HomeAssistant from homeassistant.helpers import singleton @@ -14,11 +16,11 @@ def mock_hass(): @pytest.mark.parametrize("result", [object(), {}, []]) -async def test_singleton_async(mock_hass, result) -> None: +async def test_singleton_async(mock_hass: HomeAssistant, result: Any) -> None: """Test singleton with async function.""" @singleton.singleton("test_key") - async def something(hass): + async def something(hass: HomeAssistant) -> Any: return result result1 = await something(mock_hass) @@ -30,11 +32,11 @@ async def test_singleton_async(mock_hass, result) -> None: @pytest.mark.parametrize("result", [object(), {}, []]) -def test_singleton(mock_hass, result) -> None: +def test_singleton(mock_hass: HomeAssistant, result: Any) -> None: """Test singleton with function.""" @singleton.singleton("test_key") - def something(hass): + def something(hass: HomeAssistant) -> Any: return result result1 = something(mock_hass) diff --git a/tests/helpers/test_start.py b/tests/helpers/test_start.py index d9c6bbf441c..bd6b328a2c7 100644 --- a/tests/helpers/test_start.py +++ b/tests/helpers/test_start.py @@ -14,7 +14,7 @@ async def test_at_start_when_running_awaitable(hass: HomeAssistant) -> None: calls = [] - async def cb_at_start(hass): + async def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) @@ -40,7 +40,7 @@ async def test_at_start_when_running_callback( calls = [] @callback - def cb_at_start(hass): + def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) @@ -65,7 +65,7 @@ async def test_at_start_when_starting_awaitable(hass: HomeAssistant) -> None: calls = [] - async def cb_at_start(hass): + async def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) @@ -88,7 +88,7 @@ async def test_at_start_when_starting_callback( calls = [] @callback - def cb_at_start(hass): + def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) @@ -116,7 +116,7 @@ async def test_cancelling_at_start_when_running( calls = [] - async def cb_at_start(hass): + async def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) @@ -137,7 +137,7 @@ async def test_cancelling_at_start_when_starting(hass: HomeAssistant) -> None: calls = [] @callback - def cb_at_start(hass): + def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) @@ -156,7 +156,7 @@ async def test_at_started_when_running_awaitable(hass: HomeAssistant) -> None: calls = [] - async def cb_at_start(hass): + async def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) @@ -181,7 +181,7 @@ async def test_at_started_when_running_callback( calls = [] @callback - def cb_at_start(hass): + def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) @@ -205,7 +205,7 @@ async def test_at_started_when_starting_awaitable(hass: HomeAssistant) -> None: calls = [] - async def cb_at_start(hass): + async def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) @@ -231,7 +231,7 @@ async def test_at_started_when_starting_callback( calls = [] @callback - def cb_at_start(hass): + def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) @@ -263,7 +263,7 @@ async def test_cancelling_at_started_when_running( calls = [] - async def cb_at_start(hass): + async def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) @@ -284,7 +284,7 @@ async def test_cancelling_at_started_when_starting(hass: HomeAssistant) -> None: calls = [] @callback - def cb_at_start(hass): + def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) From e07768412a0343ba73bfc27190777927a27eb73a Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 16 Aug 2024 17:16:56 +0200 Subject: [PATCH 0813/1635] Update ffmpeg tests to async (#124058) --- tests/components/ffmpeg/test_init.py | 28 ++++++++++++---------------- 1 file changed, 12 insertions(+), 16 deletions(-) diff --git a/tests/components/ffmpeg/test_init.py b/tests/components/ffmpeg/test_init.py index ceefed8d62b..e9a781327e0 100644 --- a/tests/components/ffmpeg/test_init.py +++ b/tests/components/ffmpeg/test_init.py @@ -16,9 +16,9 @@ from homeassistant.const import ( EVENT_HOMEASSISTANT_STOP, ) from homeassistant.core import HomeAssistant, callback -from homeassistant.setup import async_setup_component, setup_component +from homeassistant.setup import async_setup_component -from tests.common import assert_setup_component, get_test_home_assistant +from tests.common import assert_setup_component @callback @@ -82,26 +82,22 @@ class MockFFmpegDev(ffmpeg.FFmpegBase): self.called_entities = entity_ids -def test_setup_component() -> None: +async def test_setup_component(hass: HomeAssistant) -> None: """Set up ffmpeg component.""" - with get_test_home_assistant() as hass: - with assert_setup_component(1): - setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) + with assert_setup_component(1): + await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) - assert hass.data[ffmpeg.DATA_FFMPEG].binary == "ffmpeg" - hass.stop() + assert hass.data[ffmpeg.DATA_FFMPEG].binary == "ffmpeg" -def test_setup_component_test_service() -> None: +async def test_setup_component_test_service(hass: HomeAssistant) -> None: """Set up ffmpeg component test services.""" - with get_test_home_assistant() as hass: - with assert_setup_component(1): - setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) + with assert_setup_component(1): + await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) - assert hass.services.has_service(ffmpeg.DOMAIN, "start") - assert hass.services.has_service(ffmpeg.DOMAIN, "stop") - assert hass.services.has_service(ffmpeg.DOMAIN, "restart") - hass.stop() + assert hass.services.has_service(ffmpeg.DOMAIN, "start") + assert hass.services.has_service(ffmpeg.DOMAIN, "stop") + assert hass.services.has_service(ffmpeg.DOMAIN, "restart") async def test_setup_component_test_register(hass: HomeAssistant) -> None: From 8a110abc82e00e323ebac6d85e07548658fff167 Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Fri, 16 Aug 2024 17:46:37 +0200 Subject: [PATCH 0814/1635] Bump fyta_cli to 0.6.0 (#123816) * Bump fyta_cli to 0.5.1 * Code adjustments to enable strit typing * Update homeassistant/components/fyta/__init__.py Co-authored-by: Joost Lekkerkerker * Update diagnostics * Update config_flow + init (ruff) * Update sensor * Update coordinator * Update homeassistant/components/fyta/diagnostics.py Co-authored-by: Joost Lekkerkerker * Update homeassistant/components/fyta/diagnostics.py Co-authored-by: Joost Lekkerkerker * Update homeassistant/components/fyta/sensor.py Co-authored-by: Joost Lekkerkerker * Set one ph sensor to null/none * Update sensor * Clean-up (ruff) --------- Co-authored-by: Joost Lekkerkerker --- .strict-typing | 1 + homeassistant/components/fyta/__init__.py | 7 +- homeassistant/components/fyta/config_flow.py | 24 +- homeassistant/components/fyta/coordinator.py | 12 +- homeassistant/components/fyta/diagnostics.py | 2 +- homeassistant/components/fyta/entity.py | 8 +- homeassistant/components/fyta/manifest.json | 2 +- homeassistant/components/fyta/sensor.py | 34 +- mypy.ini | 10 + requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/fyta/conftest.py | 25 +- .../components/fyta/fixtures/plant_list.json | 4 - .../fyta/fixtures/plant_status.json | 14 - .../fyta/fixtures/plant_status1.json | 23 + .../fyta/fixtures/plant_status2.json | 23 + .../fyta/snapshots/test_diagnostics.ambr | 38 +- .../fyta/snapshots/test_sensor.ambr | 1124 ++++++++++++++++- 18 files changed, 1279 insertions(+), 76 deletions(-) delete mode 100644 tests/components/fyta/fixtures/plant_list.json delete mode 100644 tests/components/fyta/fixtures/plant_status.json create mode 100644 tests/components/fyta/fixtures/plant_status1.json create mode 100644 tests/components/fyta/fixtures/plant_status2.json diff --git a/.strict-typing b/.strict-typing index 1eec42ad209..51ca93bb9fa 100644 --- a/.strict-typing +++ b/.strict-typing @@ -197,6 +197,7 @@ homeassistant.components.fritzbox_callmonitor.* homeassistant.components.fronius.* homeassistant.components.frontend.* homeassistant.components.fully_kiosk.* +homeassistant.components.fyta.* homeassistant.components.generic_hygrostat.* homeassistant.components.generic_thermostat.* homeassistant.components.geo_location.* diff --git a/homeassistant/components/fyta/__init__.py b/homeassistant/components/fyta/__init__.py index b666c5a1f52..efbb1453456 100644 --- a/homeassistant/components/fyta/__init__.py +++ b/homeassistant/components/fyta/__init__.py @@ -4,7 +4,6 @@ from __future__ import annotations from datetime import datetime import logging -from typing import Any from fyta_cli.fyta_connector import FytaConnector @@ -73,11 +72,11 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> fyta = FytaConnector( config_entry.data[CONF_USERNAME], config_entry.data[CONF_PASSWORD] ) - credentials: dict[str, Any] = await fyta.login() + credentials = await fyta.login() await fyta.client.close() - new[CONF_ACCESS_TOKEN] = credentials[CONF_ACCESS_TOKEN] - new[CONF_EXPIRATION] = credentials[CONF_EXPIRATION].isoformat() + new[CONF_ACCESS_TOKEN] = credentials.access_token + new[CONF_EXPIRATION] = credentials.expiration.isoformat() hass.config_entries.async_update_entry( config_entry, data=new, minor_version=2, version=1 diff --git a/homeassistant/components/fyta/config_flow.py b/homeassistant/components/fyta/config_flow.py index 4cb8bddbf10..f2b5163c9db 100644 --- a/homeassistant/components/fyta/config_flow.py +++ b/homeassistant/components/fyta/config_flow.py @@ -12,10 +12,11 @@ from fyta_cli.fyta_exceptions import ( FytaConnectionError, FytaPasswordError, ) +from fyta_cli.fyta_models import Credentials import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.const import CONF_ACCESS_TOKEN, CONF_PASSWORD, CONF_USERNAME from homeassistant.helpers.selector import ( TextSelector, TextSelectorConfig, @@ -49,14 +50,11 @@ DATA_SCHEMA = vol.Schema( class FytaConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Fyta.""" + credentials: Credentials + _entry: FytaConfigEntry | None = None VERSION = 1 MINOR_VERSION = 2 - def __init__(self) -> None: - """Initialize FytaConfigFlow.""" - self.credentials: dict[str, Any] = {} - self._entry: FytaConfigEntry | None = None - async def async_auth(self, user_input: Mapping[str, Any]) -> dict[str, str]: """Reusable Auth Helper.""" fyta = FytaConnector(user_input[CONF_USERNAME], user_input[CONF_PASSWORD]) @@ -75,10 +73,6 @@ class FytaConfigFlow(ConfigFlow, domain=DOMAIN): finally: await fyta.client.close() - self.credentials[CONF_EXPIRATION] = self.credentials[ - CONF_EXPIRATION - ].isoformat() - return {} async def async_step_user( @@ -90,7 +84,10 @@ class FytaConfigFlow(ConfigFlow, domain=DOMAIN): self._async_abort_entries_match({CONF_USERNAME: user_input[CONF_USERNAME]}) if not (errors := await self.async_auth(user_input)): - user_input |= self.credentials + user_input |= { + CONF_ACCESS_TOKEN: self.credentials.access_token, + CONF_EXPIRATION: self.credentials.expiration.isoformat(), + } return self.async_create_entry( title=user_input[CONF_USERNAME], data=user_input ) @@ -114,7 +111,10 @@ class FytaConfigFlow(ConfigFlow, domain=DOMAIN): assert self._entry is not None if user_input and not (errors := await self.async_auth(user_input)): - user_input |= self.credentials + user_input |= { + CONF_ACCESS_TOKEN: self.credentials.access_token, + CONF_EXPIRATION: self.credentials.expiration.isoformat(), + } return self.async_update_reload_and_abort( self._entry, data={**self._entry.data, **user_input} ) diff --git a/homeassistant/components/fyta/coordinator.py b/homeassistant/components/fyta/coordinator.py index b6fbf73ec25..c92a96eed63 100644 --- a/homeassistant/components/fyta/coordinator.py +++ b/homeassistant/components/fyta/coordinator.py @@ -4,7 +4,7 @@ from __future__ import annotations from datetime import datetime, timedelta import logging -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING from fyta_cli.fyta_connector import FytaConnector from fyta_cli.fyta_exceptions import ( @@ -13,6 +13,7 @@ from fyta_cli.fyta_exceptions import ( FytaPasswordError, FytaPlantError, ) +from fyta_cli.fyta_models import Plant from homeassistant.const import CONF_ACCESS_TOKEN from homeassistant.core import HomeAssistant @@ -27,7 +28,7 @@ if TYPE_CHECKING: _LOGGER = logging.getLogger(__name__) -class FytaCoordinator(DataUpdateCoordinator[dict[int, dict[str, Any]]]): +class FytaCoordinator(DataUpdateCoordinator[dict[int, Plant]]): """Fyta custom coordinator.""" config_entry: FytaConfigEntry @@ -44,7 +45,7 @@ class FytaCoordinator(DataUpdateCoordinator[dict[int, dict[str, Any]]]): async def _async_update_data( self, - ) -> dict[int, dict[str, Any]]: + ) -> dict[int, Plant]: """Fetch data from API endpoint.""" if ( @@ -60,7 +61,6 @@ class FytaCoordinator(DataUpdateCoordinator[dict[int, dict[str, Any]]]): async def renew_authentication(self) -> bool: """Renew access token for FYTA API.""" - credentials: dict[str, Any] = {} try: credentials = await self.fyta.login() @@ -70,8 +70,8 @@ class FytaCoordinator(DataUpdateCoordinator[dict[int, dict[str, Any]]]): raise ConfigEntryAuthFailed from ex new_config_entry = {**self.config_entry.data} - new_config_entry[CONF_ACCESS_TOKEN] = credentials[CONF_ACCESS_TOKEN] - new_config_entry[CONF_EXPIRATION] = credentials[CONF_EXPIRATION].isoformat() + new_config_entry[CONF_ACCESS_TOKEN] = credentials.access_token + new_config_entry[CONF_EXPIRATION] = credentials.expiration.isoformat() self.hass.config_entries.async_update_entry( self.config_entry, data=new_config_entry diff --git a/homeassistant/components/fyta/diagnostics.py b/homeassistant/components/fyta/diagnostics.py index 55720b75ee6..d02f8cacfa3 100644 --- a/homeassistant/components/fyta/diagnostics.py +++ b/homeassistant/components/fyta/diagnostics.py @@ -25,5 +25,5 @@ async def async_get_config_entry_diagnostics( return { "config_entry": async_redact_data(config_entry.as_dict(), TO_REDACT), - "plant_data": data, + "plant_data": {key: value.to_dict() for key, value in data.items()}, } diff --git a/homeassistant/components/fyta/entity.py b/homeassistant/components/fyta/entity.py index 681a50f4cf5..18c52d74e25 100644 --- a/homeassistant/components/fyta/entity.py +++ b/homeassistant/components/fyta/entity.py @@ -1,6 +1,6 @@ """Entities for FYTA integration.""" -from typing import Any +from fyta_cli.fyta_models import Plant from homeassistant.components.sensor import SensorEntityDescription from homeassistant.config_entries import ConfigEntry @@ -32,13 +32,13 @@ class FytaPlantEntity(CoordinatorEntity[FytaCoordinator]): manufacturer="Fyta", model="Plant", identifiers={(DOMAIN, f"{entry.entry_id}-{plant_id}")}, - name=self.plant.get("name"), - sw_version=self.plant.get("sw_version"), + name=self.plant.name, + sw_version=self.plant.sw_version, ) self.entity_description = description @property - def plant(self) -> dict[str, Any]: + def plant(self) -> Plant: """Get plant data.""" return self.coordinator.data[self.plant_id] diff --git a/homeassistant/components/fyta/manifest.json b/homeassistant/components/fyta/manifest.json index f0953dd2a33..07387f4ab05 100644 --- a/homeassistant/components/fyta/manifest.json +++ b/homeassistant/components/fyta/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "cloud_polling", "quality_scale": "platinum", - "requirements": ["fyta_cli==0.4.1"] + "requirements": ["fyta_cli==0.6.0"] } diff --git a/homeassistant/components/fyta/sensor.py b/homeassistant/components/fyta/sensor.py index 27576ae5065..262d0b6d1f4 100644 --- a/homeassistant/components/fyta/sensor.py +++ b/homeassistant/components/fyta/sensor.py @@ -7,7 +7,7 @@ from dataclasses import dataclass from datetime import datetime from typing import Final -from fyta_cli.fyta_connector import PLANT_MEASUREMENT_STATUS, PLANT_STATUS +from fyta_cli.fyta_models import Plant from homeassistant.components.sensor import ( SensorDeviceClass, @@ -23,19 +23,18 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType from . import FytaConfigEntry from .coordinator import FytaCoordinator from .entity import FytaPlantEntity -@dataclass(frozen=True) +@dataclass(frozen=True, kw_only=True) class FytaSensorEntityDescription(SensorEntityDescription): """Describes Fyta sensor entity.""" - value_fn: Callable[[str | int | float | datetime], str | int | float | datetime] = ( - lambda value: value - ) + value_fn: Callable[[Plant], StateType | datetime] PLANT_STATUS_LIST: list[str] = ["deleted", "doing_great", "need_attention", "no_sensor"] @@ -48,63 +47,68 @@ PLANT_MEASUREMENT_STATUS_LIST: list[str] = [ "too_high", ] + SENSORS: Final[list[FytaSensorEntityDescription]] = [ FytaSensorEntityDescription( key="scientific_name", translation_key="scientific_name", + value_fn=lambda plant: plant.scientific_name, ), FytaSensorEntityDescription( key="status", translation_key="plant_status", device_class=SensorDeviceClass.ENUM, options=PLANT_STATUS_LIST, - value_fn=PLANT_STATUS.get, + value_fn=lambda plant: plant.status.name.lower(), ), FytaSensorEntityDescription( key="temperature_status", translation_key="temperature_status", device_class=SensorDeviceClass.ENUM, options=PLANT_MEASUREMENT_STATUS_LIST, - value_fn=PLANT_MEASUREMENT_STATUS.get, + value_fn=lambda plant: plant.temperature_status.name.lower(), ), FytaSensorEntityDescription( key="light_status", translation_key="light_status", device_class=SensorDeviceClass.ENUM, options=PLANT_MEASUREMENT_STATUS_LIST, - value_fn=PLANT_MEASUREMENT_STATUS.get, + value_fn=lambda plant: plant.light_status.name.lower(), ), FytaSensorEntityDescription( key="moisture_status", translation_key="moisture_status", device_class=SensorDeviceClass.ENUM, options=PLANT_MEASUREMENT_STATUS_LIST, - value_fn=PLANT_MEASUREMENT_STATUS.get, + value_fn=lambda plant: plant.moisture_status.name.lower(), ), FytaSensorEntityDescription( key="salinity_status", translation_key="salinity_status", device_class=SensorDeviceClass.ENUM, options=PLANT_MEASUREMENT_STATUS_LIST, - value_fn=PLANT_MEASUREMENT_STATUS.get, + value_fn=lambda plant: plant.salinity_status.name.lower(), ), FytaSensorEntityDescription( key="temperature", native_unit_of_measurement=UnitOfTemperature.CELSIUS, device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda plant: plant.temperature, ), FytaSensorEntityDescription( key="light", translation_key="light", native_unit_of_measurement="μmol/s⋅m²", state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda plant: plant.light, ), FytaSensorEntityDescription( key="moisture", native_unit_of_measurement=PERCENTAGE, device_class=SensorDeviceClass.MOISTURE, state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda plant: plant.moisture, ), FytaSensorEntityDescription( key="salinity", @@ -112,11 +116,13 @@ SENSORS: Final[list[FytaSensorEntityDescription]] = [ native_unit_of_measurement=UnitOfConductivity.MILLISIEMENS, device_class=SensorDeviceClass.CONDUCTIVITY, state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda plant: plant.salinity, ), FytaSensorEntityDescription( key="ph", device_class=SensorDeviceClass.PH, state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda plant: plant.ph, ), FytaSensorEntityDescription( key="battery_level", @@ -124,6 +130,7 @@ SENSORS: Final[list[FytaSensorEntityDescription]] = [ device_class=SensorDeviceClass.BATTERY, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, + value_fn=lambda plant: plant.battery_level, ), ] @@ -138,7 +145,7 @@ async def async_setup_entry( FytaPlantSensor(coordinator, entry, sensor, plant_id) for plant_id in coordinator.fyta.plant_list for sensor in SENSORS - if sensor.key in coordinator.data[plant_id] + if sensor.key in dir(coordinator.data[plant_id]) ] async_add_entities(plant_entities) @@ -150,8 +157,7 @@ class FytaPlantSensor(FytaPlantEntity, SensorEntity): entity_description: FytaSensorEntityDescription @property - def native_value(self) -> str | int | float | datetime: + def native_value(self) -> StateType | datetime: """Return the state for this sensor.""" - val = self.plant[self.entity_description.key] - return self.entity_description.value_fn(val) + return self.entity_description.value_fn(self.plant) diff --git a/mypy.ini b/mypy.ini index c5478689702..f0a941f20eb 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1726,6 +1726,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.fyta.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.generic_hygrostat.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/requirements_all.txt b/requirements_all.txt index 522ee846847..59b8b35730e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -921,7 +921,7 @@ freesms==0.2.0 fritzconnection[qr]==1.13.2 # homeassistant.components.fyta -fyta_cli==0.4.1 +fyta_cli==0.6.0 # homeassistant.components.google_translate gTTS==2.2.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 31e448dbbe4..90ea1d2c806 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -774,7 +774,7 @@ freebox-api==1.1.0 fritzconnection[qr]==1.13.2 # homeassistant.components.fyta -fyta_cli==0.4.1 +fyta_cli==0.6.0 # homeassistant.components.google_translate gTTS==2.2.4 diff --git a/tests/components/fyta/conftest.py b/tests/components/fyta/conftest.py index 6a67ae75ec2..2bcad9b3c80 100644 --- a/tests/components/fyta/conftest.py +++ b/tests/components/fyta/conftest.py @@ -4,6 +4,7 @@ from collections.abc import Generator from datetime import UTC, datetime from unittest.mock import AsyncMock, patch +from fyta_cli.fyta_models import Credentials, Plant import pytest from homeassistant.components.fyta.const import CONF_EXPIRATION, DOMAIN as FYTA_DOMAIN @@ -35,23 +36,27 @@ def mock_config_entry() -> MockConfigEntry: def mock_fyta_connector(): """Build a fixture for the Fyta API that connects successfully and returns one device.""" + plants: dict[int, Plant] = { + 0: Plant.from_dict(load_json_object_fixture("plant_status1.json", FYTA_DOMAIN)), + 1: Plant.from_dict(load_json_object_fixture("plant_status2.json", FYTA_DOMAIN)), + } + mock_fyta_connector = AsyncMock() mock_fyta_connector.expiration = datetime.fromisoformat(EXPIRATION).replace( tzinfo=UTC ) mock_fyta_connector.client = AsyncMock(autospec=True) - mock_fyta_connector.update_all_plants.return_value = load_json_object_fixture( - "plant_status.json", FYTA_DOMAIN - ) - mock_fyta_connector.plant_list = load_json_object_fixture( - "plant_list.json", FYTA_DOMAIN - ) + mock_fyta_connector.update_all_plants.return_value = plants + mock_fyta_connector.plant_list = { + 0: "Gummibaum", + 1: "Kakaobaum", + } mock_fyta_connector.login = AsyncMock( - return_value={ - CONF_ACCESS_TOKEN: ACCESS_TOKEN, - CONF_EXPIRATION: datetime.fromisoformat(EXPIRATION).replace(tzinfo=UTC), - } + return_value=Credentials( + access_token=ACCESS_TOKEN, + expiration=datetime.fromisoformat(EXPIRATION).replace(tzinfo=UTC), + ) ) with ( patch( diff --git a/tests/components/fyta/fixtures/plant_list.json b/tests/components/fyta/fixtures/plant_list.json deleted file mode 100644 index 9527c7d9d96..00000000000 --- a/tests/components/fyta/fixtures/plant_list.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "0": "Gummibaum", - "1": "Kakaobaum" -} diff --git a/tests/components/fyta/fixtures/plant_status.json b/tests/components/fyta/fixtures/plant_status.json deleted file mode 100644 index 5d9cb2d31d9..00000000000 --- a/tests/components/fyta/fixtures/plant_status.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "0": { - "name": "Gummibaum", - "scientific_name": "Ficus elastica", - "status": 1, - "sw_version": "1.0" - }, - "1": { - "name": "Kakaobaum", - "scientific_name": "Theobroma cacao", - "status": 2, - "sw_version": "1.0" - } -} diff --git a/tests/components/fyta/fixtures/plant_status1.json b/tests/components/fyta/fixtures/plant_status1.json new file mode 100644 index 00000000000..f2e8dc9c970 --- /dev/null +++ b/tests/components/fyta/fixtures/plant_status1.json @@ -0,0 +1,23 @@ +{ + "battery_level": 80, + "battery_status": true, + "last_updated": "2023-01-10 10:10:00", + "light": 2, + "light_status": 3, + "nickname": "Gummibaum", + "moisture": 61, + "moisture_status": 3, + "sensor_available": true, + "sw_version": "1.0", + "status": 3, + "online": true, + "ph": null, + "plant_id": 0, + "plant_origin_path": "", + "plant_thumb_path": "", + "salinity": 1, + "salinity_status": 4, + "scientific_name": "Ficus elastica", + "temperature": 25.2, + "temperature_status": 3 +} diff --git a/tests/components/fyta/fixtures/plant_status2.json b/tests/components/fyta/fixtures/plant_status2.json new file mode 100644 index 00000000000..a5c2735ca7c --- /dev/null +++ b/tests/components/fyta/fixtures/plant_status2.json @@ -0,0 +1,23 @@ +{ + "battery_level": 80, + "battery_status": true, + "last_updated": "2023-01-02 10:10:00", + "light": 2, + "light_status": 3, + "nickname": "Kakaobaum", + "moisture": 61, + "moisture_status": 3, + "sensor_available": true, + "sw_version": "1.0", + "status": 3, + "online": true, + "ph": 7, + "plant_id": 0, + "plant_origin_path": "", + "plant_thumb_path": "", + "salinity": 1, + "salinity_status": 4, + "scientific_name": "Theobroma cacao", + "temperature": 25.2, + "temperature_status": 3 +} diff --git a/tests/components/fyta/snapshots/test_diagnostics.ambr b/tests/components/fyta/snapshots/test_diagnostics.ambr index 7491310129b..cf6bcdb77ad 100644 --- a/tests/components/fyta/snapshots/test_diagnostics.ambr +++ b/tests/components/fyta/snapshots/test_diagnostics.ambr @@ -23,16 +23,50 @@ }), 'plant_data': dict({ '0': dict({ + 'battery_level': 80.0, + 'battery_status': True, + 'last_updated': '2023-01-10T10:10:00', + 'light': 2.0, + 'light_status': 3, + 'moisture': 61.0, + 'moisture_status': 3, 'name': 'Gummibaum', + 'online': True, + 'ph': None, + 'plant_id': 0, + 'plant_origin_path': '', + 'plant_thumb_path': '', + 'salinity': 1.0, + 'salinity_status': 4, 'scientific_name': 'Ficus elastica', - 'status': 1, + 'sensor_available': True, + 'status': 3, 'sw_version': '1.0', + 'temperature': 25.2, + 'temperature_status': 3, }), '1': dict({ + 'battery_level': 80.0, + 'battery_status': True, + 'last_updated': '2023-01-02T10:10:00', + 'light': 2.0, + 'light_status': 3, + 'moisture': 61.0, + 'moisture_status': 3, 'name': 'Kakaobaum', + 'online': True, + 'ph': 7.0, + 'plant_id': 0, + 'plant_origin_path': '', + 'plant_thumb_path': '', + 'salinity': 1.0, + 'salinity_status': 4, 'scientific_name': 'Theobroma cacao', - 'status': 2, + 'sensor_available': True, + 'status': 3, 'sw_version': '1.0', + 'temperature': 25.2, + 'temperature_status': 3, }), }), }) diff --git a/tests/components/fyta/snapshots/test_sensor.ambr b/tests/components/fyta/snapshots/test_sensor.ambr index 1041fff501e..2e96de0a283 100644 --- a/tests/components/fyta/snapshots/test_sensor.ambr +++ b/tests/components/fyta/snapshots/test_sensor.ambr @@ -1,4 +1,334 @@ # serializer version: 1 +# name: test_all_entities[sensor.gummibaum_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.gummibaum_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-battery_level', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[sensor.gummibaum_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Gummibaum Battery', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.gummibaum_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '80.0', + }) +# --- +# name: test_all_entities[sensor.gummibaum_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gummibaum_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'light', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-light', + 'unit_of_measurement': 'μmol/s⋅m²', + }) +# --- +# name: test_all_entities[sensor.gummibaum_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gummibaum Light', + 'state_class': , + 'unit_of_measurement': 'μmol/s⋅m²', + }), + 'context': , + 'entity_id': 'sensor.gummibaum_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.0', + }) +# --- +# name: test_all_entities[sensor.gummibaum_light_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gummibaum_light_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Light state', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'light_status', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-light_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.gummibaum_light_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Gummibaum Light state', + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'context': , + 'entity_id': 'sensor.gummibaum_light_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'perfect', + }) +# --- +# name: test_all_entities[sensor.gummibaum_moisture-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gummibaum_moisture', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Moisture', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-moisture', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[sensor.gummibaum_moisture-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'moisture', + 'friendly_name': 'Gummibaum Moisture', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.gummibaum_moisture', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '61.0', + }) +# --- +# name: test_all_entities[sensor.gummibaum_moisture_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gummibaum_moisture_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Moisture state', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'moisture_status', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-moisture_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.gummibaum_moisture_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Gummibaum Moisture state', + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'context': , + 'entity_id': 'sensor.gummibaum_moisture_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'perfect', + }) +# --- +# name: test_all_entities[sensor.gummibaum_ph-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gummibaum_ph', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'pH', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-ph', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.gummibaum_ph-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'ph', + 'friendly_name': 'Gummibaum pH', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.gummibaum_ph', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- # name: test_all_entities[sensor.gummibaum_plant_state-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -56,7 +386,122 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'doing_great', + 'state': 'no_sensor', + }) +# --- +# name: test_all_entities[sensor.gummibaum_salinity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gummibaum_salinity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Salinity', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'salinity', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-salinity', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.gummibaum_salinity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'conductivity', + 'friendly_name': 'Gummibaum Salinity', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gummibaum_salinity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.0', + }) +# --- +# name: test_all_entities[sensor.gummibaum_salinity_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gummibaum_salinity_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Salinity state', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'salinity_status', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-salinity_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.gummibaum_salinity_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Gummibaum Salinity state', + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'context': , + 'entity_id': 'sensor.gummibaum_salinity_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'high', }) # --- # name: test_all_entities[sensor.gummibaum_scientific_name-entry] @@ -105,6 +550,451 @@ 'state': 'Ficus elastica', }) # --- +# name: test_all_entities[sensor.gummibaum_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gummibaum_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.gummibaum_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gummibaum Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gummibaum_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '25.2', + }) +# --- +# name: test_all_entities[sensor.gummibaum_temperature_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gummibaum_temperature_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature state', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temperature_status', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-temperature_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.gummibaum_temperature_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Gummibaum Temperature state', + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'context': , + 'entity_id': 'sensor.gummibaum_temperature_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'perfect', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.kakaobaum_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-battery_level', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Kakaobaum Battery', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.kakaobaum_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '80.0', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.kakaobaum_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'light', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-light', + 'unit_of_measurement': 'μmol/s⋅m²', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Kakaobaum Light', + 'state_class': , + 'unit_of_measurement': 'μmol/s⋅m²', + }), + 'context': , + 'entity_id': 'sensor.kakaobaum_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.0', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_light_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.kakaobaum_light_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Light state', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'light_status', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-light_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.kakaobaum_light_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Kakaobaum Light state', + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'context': , + 'entity_id': 'sensor.kakaobaum_light_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'perfect', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_moisture-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.kakaobaum_moisture', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Moisture', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-moisture', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_moisture-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'moisture', + 'friendly_name': 'Kakaobaum Moisture', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.kakaobaum_moisture', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '61.0', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_moisture_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.kakaobaum_moisture_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Moisture state', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'moisture_status', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-moisture_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.kakaobaum_moisture_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Kakaobaum Moisture state', + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'context': , + 'entity_id': 'sensor.kakaobaum_moisture_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'perfect', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_ph-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.kakaobaum_ph', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'pH', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-ph', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.kakaobaum_ph-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'ph', + 'friendly_name': 'Kakaobaum pH', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.kakaobaum_ph', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7.0', + }) +# --- # name: test_all_entities[sensor.kakaobaum_plant_state-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -162,7 +1052,122 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'need_attention', + 'state': 'no_sensor', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_salinity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.kakaobaum_salinity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Salinity', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'salinity', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-salinity', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.kakaobaum_salinity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'conductivity', + 'friendly_name': 'Kakaobaum Salinity', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.kakaobaum_salinity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.0', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_salinity_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.kakaobaum_salinity_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Salinity state', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'salinity_status', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-salinity_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.kakaobaum_salinity_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Kakaobaum Salinity state', + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'context': , + 'entity_id': 'sensor.kakaobaum_salinity_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'high', }) # --- # name: test_all_entities[sensor.kakaobaum_scientific_name-entry] @@ -211,3 +1216,118 @@ 'state': 'Theobroma cacao', }) # --- +# name: test_all_entities[sensor.kakaobaum_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.kakaobaum_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.kakaobaum_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Kakaobaum Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.kakaobaum_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '25.2', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_temperature_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.kakaobaum_temperature_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature state', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temperature_status', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-temperature_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.kakaobaum_temperature_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Kakaobaum Temperature state', + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'context': , + 'entity_id': 'sensor.kakaobaum_temperature_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'perfect', + }) +# --- From 9b11aaf1eb8731638976c31edd1fd541e144d9b8 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 16 Aug 2024 19:00:44 +0200 Subject: [PATCH 0815/1635] Add missing hass type hint in alexa tests (#124064) * Add missing hass type hint in alexa tests * One more --- tests/components/alexa/test_auth.py | 16 +++--- tests/components/alexa/test_common.py | 53 +++++++++++++------ tests/components/alexa/test_smart_home.py | 26 +++++++-- .../components/alexa/test_smart_home_http.py | 5 +- 4 files changed, 69 insertions(+), 31 deletions(-) diff --git a/tests/components/alexa/test_auth.py b/tests/components/alexa/test_auth.py index 8d4308ba792..b3aa645bfcb 100644 --- a/tests/components/alexa/test_auth.py +++ b/tests/components/alexa/test_auth.py @@ -10,14 +10,14 @@ from tests.test_util.aiohttp import AiohttpClientMocker async def run_auth_get_access_token( - hass, - aioclient_mock, - expires_in, - client_id, - client_secret, - accept_grant_code, - refresh_token, -): + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + expires_in: int, + client_id: str, + client_secret: str, + accept_grant_code: str, + refresh_token: str, +) -> None: """Do auth and request a new token for tests.""" aioclient_mock.post( TEST_TOKEN_URL, diff --git a/tests/components/alexa/test_common.py b/tests/components/alexa/test_common.py index 43e7d77ce71..e78f2cba40f 100644 --- a/tests/components/alexa/test_common.py +++ b/tests/components/alexa/test_common.py @@ -1,5 +1,8 @@ """Test helpers for the Alexa integration.""" +from __future__ import annotations + +from typing import Any from unittest.mock import Mock from uuid import uuid4 @@ -7,7 +10,7 @@ import pytest from homeassistant.components.alexa import config, smart_home from homeassistant.components.alexa.const import CONF_ENDPOINT, CONF_FILTER, CONF_LOCALE -from homeassistant.core import Context, HomeAssistant, callback +from homeassistant.core import Context, HomeAssistant, ServiceCall, callback from homeassistant.helpers import entityfilter from tests.common import async_mock_service @@ -62,7 +65,7 @@ class MockConfig(smart_home.AlexaConfig): """Accept a grant.""" -def get_default_config(hass): +def get_default_config(hass: HomeAssistant) -> MockConfig: """Return a MockConfig instance.""" return MockConfig(hass) @@ -93,15 +96,15 @@ def get_new_request(namespace, name, endpoint=None): async def assert_request_calls_service( - namespace, - name, - endpoint, - service, - hass, + namespace: str, + name: str, + endpoint: str, + service: str, + hass: HomeAssistant, response_type="Response", - payload=None, - instance=None, -): + payload: dict[str, Any] | None = None, + instance: str | None = None, +) -> tuple[ServiceCall, dict[str, Any]]: """Assert an API request calls a hass service.""" context = Context() request = get_new_request(namespace, name, endpoint) @@ -129,8 +132,14 @@ async def assert_request_calls_service( async def assert_request_fails( - namespace, name, endpoint, service_not_called, hass, payload=None, instance=None -): + namespace: str, + name: str, + endpoint: str, + service_not_called: str, + hass: HomeAssistant, + payload: dict[str, Any] | None = None, + instance: str | None = None, +) -> None: """Assert an API request returns an ErrorResponse.""" request = get_new_request(namespace, name, endpoint) if payload: @@ -152,8 +161,12 @@ async def assert_request_fails( async def assert_power_controller_works( - endpoint, on_service, off_service, hass, timestamp -): + endpoint: str, + on_service: str, + off_service: str, + hass: HomeAssistant, + timestamp: str, +) -> None: """Assert PowerController API requests work.""" _, response = await assert_request_calls_service( "Alexa.PowerController", "TurnOn", endpoint, on_service, hass @@ -169,8 +182,12 @@ async def assert_power_controller_works( async def assert_scene_controller_works( - endpoint, activate_service, deactivate_service, hass, timestamp -): + endpoint: str, + activate_service: str, + deactivate_service: str, + hass: HomeAssistant, + timestamp: str, +) -> None: """Assert SceneController API requests work.""" _, response = await assert_request_calls_service( "Alexa.SceneController", @@ -196,7 +213,9 @@ async def assert_scene_controller_works( assert response["event"]["payload"]["timestamp"] == timestamp -async def reported_properties(hass, endpoint, return_full_response=False): +async def reported_properties( + hass: HomeAssistant, endpoint: str, return_full_response: bool = False +) -> ReportedProperties: """Use ReportState to get properties and return them. The result is a ReportedProperties instance, which has methods to make diff --git a/tests/components/alexa/test_smart_home.py b/tests/components/alexa/test_smart_home.py index fb27c91eea7..6ccf265dcdc 100644 --- a/tests/components/alexa/test_smart_home.py +++ b/tests/components/alexa/test_smart_home.py @@ -120,7 +120,9 @@ async def test_wrong_version(hass: HomeAssistant) -> None: await smart_home.async_handle_message(hass, get_default_config(hass), msg) -async def discovery_test(device, hass, expected_endpoints=1): +async def discovery_test( + device, hass: HomeAssistant, expected_endpoints: int = 1 +) -> dict[str, Any] | list[dict[str, Any]] | None: """Test alexa discovery request.""" request = get_new_request("Alexa.Discovery", "Discover") @@ -2601,8 +2603,15 @@ async def test_stop_valve( async def assert_percentage_changes( - hass, adjustments, namespace, name, endpoint, parameter, service, changed_parameter -): + hass: HomeAssistant, + adjustments, + namespace, + name, + endpoint, + parameter, + service, + changed_parameter, +) -> None: """Assert an API request making percentage changes works. AdjustPercentage, AdjustBrightness, etc. are examples of such requests. @@ -2616,8 +2625,15 @@ async def assert_percentage_changes( async def assert_range_changes( - hass, adjustments, namespace, name, endpoint, service, changed_parameter, instance -): + hass: HomeAssistant, + adjustments: list[tuple[int | str, int, bool]], + namespace: str, + name: str, + endpoint: str, + service: str, + changed_parameter: str | None, + instance: str, +) -> None: """Assert an API request making range changes works. AdjustRangeValue are examples of such requests. diff --git a/tests/components/alexa/test_smart_home_http.py b/tests/components/alexa/test_smart_home_http.py index 1c30c72e72c..20d9b30dda5 100644 --- a/tests/components/alexa/test_smart_home_http.py +++ b/tests/components/alexa/test_smart_home_http.py @@ -5,6 +5,7 @@ import json import logging from typing import Any +from aiohttp import ClientResponse import pytest from homeassistant.components.alexa import DOMAIN, smart_home @@ -17,7 +18,9 @@ from .test_common import get_new_request from tests.typing import ClientSessionGenerator -async def do_http_discovery(config, hass, hass_client): +async def do_http_discovery( + config: dict[str, Any], hass: HomeAssistant, hass_client: ClientSessionGenerator +) -> ClientResponse: """Submit a request to the Smart Home HTTP API.""" await async_setup_component(hass, DOMAIN, config) http_client = await hass_client() From a8a7d01a845ef6fbf34c5a8bb99de7aa569a1a1b Mon Sep 17 00:00:00 2001 From: Kim de Vos Date: Fri, 16 Aug 2024 21:37:24 +0200 Subject: [PATCH 0816/1635] Add temperature sensors for unifi device (#122518) * Add temperature sensors for device * Move to single line * Use right reference * Always return a value * Update tests * Use slugify for id name * Return default value if not present * Make _device_temperature return value * Add default value if temperatures is None * Set value to go over all code paths * Add test for no matching temperatures * make first part deterministic --- homeassistant/components/unifi/sensor.py | 74 ++++++++++- tests/components/unifi/test_sensor.py | 162 +++++++++++++++++++++++ 2 files changed, 234 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/unifi/sensor.py b/homeassistant/components/unifi/sensor.py index 08bd0ddb869..39e487c0d57 100644 --- a/homeassistant/components/unifi/sensor.py +++ b/homeassistant/components/unifi/sensor.py @@ -21,7 +21,11 @@ from aiounifi.interfaces.ports import Ports from aiounifi.interfaces.wlans import Wlans from aiounifi.models.api import ApiItemT from aiounifi.models.client import Client -from aiounifi.models.device import Device, TypedDeviceUptimeStatsWanMonitor +from aiounifi.models.device import ( + Device, + TypedDeviceTemperature, + TypedDeviceUptimeStatsWanMonitor, +) from aiounifi.models.outlet import Outlet from aiounifi.models.port import Port from aiounifi.models.wlan import Wlan @@ -280,6 +284,72 @@ def make_wan_latency_sensors() -> tuple[UnifiSensorEntityDescription, ...]: ) +@callback +def async_device_temperatures_value_fn( + temperature_name: str, hub: UnifiHub, device: Device +) -> float: + """Retrieve the temperature of the device.""" + return_value: float = 0 + if device.temperatures: + temperature = _device_temperature(temperature_name, device.temperatures) + return_value = temperature if temperature is not None else 0 + return return_value + + +@callback +def async_device_temperatures_supported_fn( + temperature_name: str, hub: UnifiHub, obj_id: str +) -> bool: + """Determine if an device have a temperatures.""" + if (device := hub.api.devices[obj_id]) and device.temperatures: + return _device_temperature(temperature_name, device.temperatures) is not None + return False + + +@callback +def _device_temperature( + temperature_name: str, temperatures: list[TypedDeviceTemperature] +) -> float | None: + """Return the temperature of the device.""" + for temperature in temperatures: + if temperature_name in temperature["name"]: + return temperature["value"] + return None + + +def make_device_temperatur_sensors() -> tuple[UnifiSensorEntityDescription, ...]: + """Create device temperature sensors.""" + + def make_device_temperature_entity_description( + name: str, + ) -> UnifiSensorEntityDescription: + return UnifiSensorEntityDescription[Devices, Device]( + key=f"Device {name} temperature", + device_class=SensorDeviceClass.TEMPERATURE, + entity_category=EntityCategory.DIAGNOSTIC, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + entity_registry_enabled_default=False, + api_handler_fn=lambda api: api.devices, + available_fn=async_device_available_fn, + device_info_fn=async_device_device_info_fn, + name_fn=lambda device: f"{device.name} {name} Temperature", + object_fn=lambda api, obj_id: api.devices[obj_id], + supported_fn=partial(async_device_temperatures_supported_fn, name), + unique_id_fn=lambda hub, obj_id: f"temperature-{slugify(name)}-{obj_id}", + value_fn=partial(async_device_temperatures_value_fn, name), + ) + + return tuple( + make_device_temperature_entity_description(name) + for name in ( + "CPU", + "Local", + "PHY", + ) + ) + + @dataclass(frozen=True, kw_only=True) class UnifiSensorEntityDescription( SensorEntityDescription, UnifiEntityDescription[HandlerT, ApiItemT] @@ -544,7 +614,7 @@ ENTITY_DESCRIPTIONS: tuple[UnifiSensorEntityDescription, ...] = ( ), ) -ENTITY_DESCRIPTIONS += make_wan_latency_sensors() +ENTITY_DESCRIPTIONS += make_wan_latency_sensors() + make_device_temperatur_sensors() async def async_setup_entry( diff --git a/tests/components/unifi/test_sensor.py b/tests/components/unifi/test_sensor.py index cc51c31fc8b..afa256c087e 100644 --- a/tests/components/unifi/test_sensor.py +++ b/tests/components/unifi/test_sensor.py @@ -1519,3 +1519,165 @@ async def test_wan_monitor_latency_with_no_uptime( latency_entry = entity_registry.async_get("sensor.mock_name_google_wan_latency") assert latency_entry is None + + +@pytest.mark.parametrize( + "device_payload", + [ + [ + { + "board_rev": 3, + "device_id": "mock-id", + "has_fan": True, + "fan_level": 0, + "ip": "10.0.1.1", + "last_seen": 1562600145, + "mac": "00:00:00:00:01:01", + "model": "US16P150", + "name": "Device", + "next_interval": 20, + "overheating": True, + "state": 1, + "type": "usw", + "upgradable": True, + "uptime": 60, + "version": "4.0.42.10433", + "temperatures": [ + {"name": "CPU", "type": "cpu", "value": 66.0}, + {"name": "Local", "type": "board", "value": 48.75}, + {"name": "PHY", "type": "board", "value": 50.25}, + ], + } + ] + ], +) +@pytest.mark.parametrize( + ("temperature_id", "state", "updated_state", "index_to_update"), + [ + ("device_cpu", "66.0", "20", 0), + ("device_local", "48.75", "90.64", 1), + ("device_phy", "50.25", "80", 2), + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_device_temperatures( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_websocket_message, + device_payload: list[dict[str, Any]], + temperature_id: str, + state: str, + updated_state: str, + index_to_update: int, +) -> None: + """Verify that device temperatures sensors are working as expected.""" + + entity_id = f"sensor.device_{temperature_id}_temperature" + + assert len(hass.states.async_all()) == 6 + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 + + temperature_entity = entity_registry.async_get(entity_id) + assert temperature_entity.disabled_by == RegistryEntryDisabler.INTEGRATION + + # Enable entity + entity_registry.async_update_entity(entity_id=entity_id, disabled_by=None) + + await hass.async_block_till_done() + + async_fire_time_changed( + hass, + dt_util.utcnow() + timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1), + ) + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == 7 + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 3 + + # Verify sensor state + assert hass.states.get(entity_id).state == state + + # # Verify state update + device = device_payload[0] + device["temperatures"][index_to_update]["value"] = updated_state + + mock_websocket_message(message=MessageKey.DEVICE, data=device) + + assert hass.states.get(entity_id).state == updated_state + + +@pytest.mark.parametrize( + "device_payload", + [ + [ + { + "board_rev": 2, + "device_id": "mock-id", + "ip": "10.0.1.1", + "mac": "10:00:00:00:01:01", + "last_seen": 1562600145, + "model": "US16P150", + "name": "mock-name", + "port_overrides": [], + "state": 1, + "type": "usw", + "version": "4.0.42.10433", + } + ] + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_device_with_no_temperature( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, +) -> None: + """Verify that device temperature sensors is not created if there is no data.""" + + assert len(hass.states.async_all()) == 6 + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 + + temperature_entity = entity_registry.async_get( + "sensor.device_device_cpu_temperature" + ) + + assert temperature_entity is None + + +@pytest.mark.parametrize( + "device_payload", + [ + [ + { + "board_rev": 2, + "device_id": "mock-id", + "ip": "10.0.1.1", + "mac": "10:00:00:00:01:01", + "last_seen": 1562600145, + "model": "US16P150", + "name": "mock-name", + "port_overrides": [], + "state": 1, + "type": "usw", + "version": "4.0.42.10433", + "temperatures": [ + {"name": "MEM", "type": "mem", "value": 66.0}, + ], + } + ] + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_device_with_no_matching_temperatures( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, +) -> None: + """Verify that device temperature sensors is not created if there is no matching data.""" + + assert len(hass.states.async_all()) == 6 + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 + + temperature_entity = entity_registry.async_get( + "sensor.device_device_cpu_temperature" + ) + + assert temperature_entity is None From 24680b731faef233b3dc6e713d27795514ee139f Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 16 Aug 2024 21:51:58 +0200 Subject: [PATCH 0817/1635] Add missing hass type hint in component tests (f) (#124076) --- tests/components/ffmpeg/test_init.py | 6 +++--- tests/components/flick_electric/test_config_flow.py | 3 ++- tests/components/fronius/__init__.py | 10 +++++++++- tests/components/fully_kiosk/test_number.py | 8 +++++--- tests/components/fully_kiosk/test_switch.py | 8 +++++--- 5 files changed, 24 insertions(+), 11 deletions(-) diff --git a/tests/components/ffmpeg/test_init.py b/tests/components/ffmpeg/test_init.py index e9a781327e0..aa407d5b695 100644 --- a/tests/components/ffmpeg/test_init.py +++ b/tests/components/ffmpeg/test_init.py @@ -22,7 +22,7 @@ from tests.common import assert_setup_component @callback -def async_start(hass, entity_id=None): +def async_start(hass: HomeAssistant, entity_id: str | None = None) -> None: """Start a FFmpeg process on entity. This is a legacy helper method. Do not use it for new tests. @@ -32,7 +32,7 @@ def async_start(hass, entity_id=None): @callback -def async_stop(hass, entity_id=None): +def async_stop(hass: HomeAssistant, entity_id: str | None = None) -> None: """Stop a FFmpeg process on entity. This is a legacy helper method. Do not use it for new tests. @@ -42,7 +42,7 @@ def async_stop(hass, entity_id=None): @callback -def async_restart(hass, entity_id=None): +def async_restart(hass: HomeAssistant, entity_id: str | None = None) -> None: """Restart a FFmpeg process on entity. This is a legacy helper method. Do not use it for new tests. diff --git a/tests/components/flick_electric/test_config_flow.py b/tests/components/flick_electric/test_config_flow.py index 1b3ed1de34d..85a6495d3c5 100644 --- a/tests/components/flick_electric/test_config_flow.py +++ b/tests/components/flick_electric/test_config_flow.py @@ -6,6 +6,7 @@ from pyflick.authentication import AuthException from homeassistant import config_entries from homeassistant.components.flick_electric.const import DOMAIN +from homeassistant.config_entries import ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -15,7 +16,7 @@ from tests.common import MockConfigEntry CONF = {CONF_USERNAME: "test-username", CONF_PASSWORD: "test-password"} -async def _flow_submit(hass): +async def _flow_submit(hass: HomeAssistant) -> ConfigFlowResult: return await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, diff --git a/tests/components/fronius/__init__.py b/tests/components/fronius/__init__.py index 2109d4a6692..57b22490ed0 100644 --- a/tests/components/fronius/__init__.py +++ b/tests/components/fronius/__init__.py @@ -3,9 +3,12 @@ from __future__ import annotations from collections.abc import Callable +from datetime import timedelta import json from typing import Any +from freezegun.api import FrozenDateTimeFactory + from homeassistant.components.fronius.const import DOMAIN from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST @@ -114,7 +117,12 @@ def mock_responses( ) -async def enable_all_entities(hass, freezer, config_entry_id, time_till_next_update): +async def enable_all_entities( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + config_entry_id: str, + time_till_next_update: timedelta, +) -> None: """Enable all entities for a config entry and fast forward time to receive data.""" registry = er.async_get(hass) entities = er.async_entries_for_config_entry(registry, config_entry_id) diff --git a/tests/components/fully_kiosk/test_number.py b/tests/components/fully_kiosk/test_number.py index 2fbbf751725..5f74002f8cd 100644 --- a/tests/components/fully_kiosk/test_number.py +++ b/tests/components/fully_kiosk/test_number.py @@ -5,7 +5,7 @@ from unittest.mock import MagicMock from homeassistant.components import number from homeassistant.components.fully_kiosk.const import DOMAIN, UPDATE_INTERVAL from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, ServiceResponse from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.util import dt as dt_util @@ -81,9 +81,11 @@ async def test_numbers( assert device_entry.sw_version == "1.42.5" -def set_value(hass, entity_id, value): +async def set_value( + hass: HomeAssistant, entity_id: str, value: float +) -> ServiceResponse: """Set the value of a number entity.""" - return hass.services.async_call( + return await hass.services.async_call( number.DOMAIN, "set_value", {ATTR_ENTITY_ID: entity_id, number.ATTR_VALUE: value}, diff --git a/tests/components/fully_kiosk/test_switch.py b/tests/components/fully_kiosk/test_switch.py index 5b3b5e651b0..14a464e0dcd 100644 --- a/tests/components/fully_kiosk/test_switch.py +++ b/tests/components/fully_kiosk/test_switch.py @@ -5,7 +5,7 @@ from unittest.mock import MagicMock from homeassistant.components import switch from homeassistant.components.fully_kiosk.const import DOMAIN from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, ServiceResponse from homeassistant.helpers import device_registry as dr, entity_registry as er from tests.common import MockConfigEntry, async_fire_mqtt_message @@ -149,8 +149,10 @@ def has_subscribed(mqtt_mock: MqttMockHAClient, topic: str) -> bool: return False -def call_service(hass, service, entity_id): +async def call_service( + hass: HomeAssistant, service: str, entity_id: str +) -> ServiceResponse: """Call any service on entity.""" - return hass.services.async_call( + return await hass.services.async_call( switch.DOMAIN, service, {ATTR_ENTITY_ID: entity_id}, blocking=True ) From 91951ed7347022f1ccaf87ece9320aa43c8d2182 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 16 Aug 2024 16:48:03 -0500 Subject: [PATCH 0818/1635] Speed up initializing config flows (#124015) --- homeassistant/config_entries.py | 43 +++++++++++++++++++++------------ 1 file changed, 28 insertions(+), 15 deletions(-) diff --git a/homeassistant/config_entries.py b/homeassistant/config_entries.py index 75b0631339f..e48313cab33 100644 --- a/homeassistant/config_entries.py +++ b/homeassistant/config_entries.py @@ -3,7 +3,7 @@ from __future__ import annotations import asyncio -from collections import UserDict +from collections import UserDict, defaultdict from collections.abc import ( Callable, Coroutine, @@ -1224,8 +1224,12 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): super().__init__(hass) self.config_entries = config_entries self._hass_config = hass_config - self._pending_import_flows: dict[str, dict[str, asyncio.Future[None]]] = {} - self._initialize_futures: dict[str, list[asyncio.Future[None]]] = {} + self._pending_import_flows: defaultdict[ + str, dict[str, asyncio.Future[None]] + ] = defaultdict(dict) + self._initialize_futures: defaultdict[str, set[asyncio.Future[None]]] = ( + defaultdict(set) + ) self._discovery_debouncer = Debouncer[None]( hass, _LOGGER, @@ -1278,12 +1282,11 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): loop = self.hass.loop if context["source"] == SOURCE_IMPORT: - self._pending_import_flows.setdefault(handler, {})[flow_id] = ( - loop.create_future() - ) + self._pending_import_flows[handler][flow_id] = loop.create_future() cancel_init_future = loop.create_future() - self._initialize_futures.setdefault(handler, []).append(cancel_init_future) + handler_init_futures = self._initialize_futures[handler] + handler_init_futures.add(cancel_init_future) try: async with interrupt( cancel_init_future, @@ -1294,8 +1297,13 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): except FlowCancelledError as ex: raise asyncio.CancelledError from ex finally: - self._initialize_futures[handler].remove(cancel_init_future) - self._pending_import_flows.get(handler, {}).pop(flow_id, None) + handler_init_futures.remove(cancel_init_future) + if not handler_init_futures: + del self._initialize_futures[handler] + if handler in self._pending_import_flows: + self._pending_import_flows[handler].pop(flow_id, None) + if not self._pending_import_flows[handler]: + del self._pending_import_flows[handler] if result["type"] != data_entry_flow.FlowResultType.ABORT: await self.async_post_init(flow, result) @@ -1322,11 +1330,18 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): try: result = await self._async_handle_step(flow, flow.init_step, data) finally: - init_done = self._pending_import_flows.get(handler, {}).get(flow_id) - if init_done and not init_done.done(): - init_done.set_result(None) + self._set_pending_import_done(flow) return flow, result + def _set_pending_import_done(self, flow: ConfigFlow) -> None: + """Set pending import flow as done.""" + if ( + (handler_import_flows := self._pending_import_flows.get(flow.handler)) + and (init_done := handler_import_flows.get(flow.flow_id)) + and not init_done.done() + ): + init_done.set_result(None) + @callback def async_shutdown(self) -> None: """Cancel any initializing flows.""" @@ -1347,9 +1362,7 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): # We do this to avoid a circular dependency where async_finish_flow sets up a # new entry, which needs the integration to be set up, which is waiting for # init to be done. - init_done = self._pending_import_flows.get(flow.handler, {}).get(flow.flow_id) - if init_done and not init_done.done(): - init_done.set_result(None) + self._set_pending_import_done(flow) # Remove notification if no other discovery config entries in progress if not self._async_has_other_discovery_flows(flow.flow_id): From 69700f068f589ed028c8ec9791c67a2809da9589 Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Fri, 16 Aug 2024 23:57:10 +0200 Subject: [PATCH 0819/1635] Migrate back from `pysnmp-lextudio` to `pysnmp` (#123579) --- homeassistant/components/brother/manifest.json | 2 +- homeassistant/components/snmp/manifest.json | 2 +- homeassistant/package_constraints.txt | 3 --- requirements_all.txt | 4 ++-- requirements_test_all.txt | 4 ++-- script/gen_requirements_all.py | 3 --- tests/components/snmp/test_integer_sensor.py | 2 +- tests/components/snmp/test_negative_sensor.py | 2 +- tests/components/snmp/test_switch.py | 2 +- 9 files changed, 9 insertions(+), 15 deletions(-) diff --git a/homeassistant/components/brother/manifest.json b/homeassistant/components/brother/manifest.json index 5caaeb2f1a1..d9c8e36aa1d 100644 --- a/homeassistant/components/brother/manifest.json +++ b/homeassistant/components/brother/manifest.json @@ -9,7 +9,7 @@ "iot_class": "local_polling", "loggers": ["brother", "pyasn1", "pysmi", "pysnmp"], "quality_scale": "platinum", - "requirements": ["brother==4.2.0"], + "requirements": ["brother==4.3.0"], "zeroconf": [ { "type": "_printer._tcp.local.", diff --git a/homeassistant/components/snmp/manifest.json b/homeassistant/components/snmp/manifest.json index d79910c44cd..c3970e1e00a 100644 --- a/homeassistant/components/snmp/manifest.json +++ b/homeassistant/components/snmp/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/snmp", "iot_class": "local_polling", "loggers": ["pyasn1", "pysmi", "pysnmp"], - "requirements": ["pysnmp-lextudio==6.0.11"] + "requirements": ["pysnmp==6.2.5"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 900d4510c17..1e90333a198 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -156,9 +156,6 @@ websockets>=11.0.1 # pysnmplib is no longer maintained and does not work with newer # python pysnmplib==1000000000.0.0 -# pysnmp is no longer maintained and does not work with newer -# python -pysnmp==1000000000.0.0 # The get-mac package has been replaced with getmac. Installing get-mac alongside getmac # breaks getmac due to them both sharing the same python package name inside 'getmac'. diff --git a/requirements_all.txt b/requirements_all.txt index 59b8b35730e..ad750c5ec4b 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -622,7 +622,7 @@ bring-api==0.8.1 broadlink==0.19.0 # homeassistant.components.brother -brother==4.2.0 +brother==4.3.0 # homeassistant.components.brottsplatskartan brottsplatskartan==1.0.5 @@ -2208,7 +2208,7 @@ pysmartthings==0.7.8 pysml==0.0.12 # homeassistant.components.snmp -pysnmp-lextudio==6.0.11 +pysnmp==6.2.5 # homeassistant.components.snooz pysnooz==0.8.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 90ea1d2c806..808f93b6ad5 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -542,7 +542,7 @@ bring-api==0.8.1 broadlink==0.19.0 # homeassistant.components.brother -brother==4.2.0 +brother==4.3.0 # homeassistant.components.brottsplatskartan brottsplatskartan==1.0.5 @@ -1762,7 +1762,7 @@ pysmartthings==0.7.8 pysml==0.0.12 # homeassistant.components.snmp -pysnmp-lextudio==6.0.11 +pysnmp==6.2.5 # homeassistant.components.snooz pysnooz==0.8.6 diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index 522a626754d..6ce97468699 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -178,9 +178,6 @@ websockets>=11.0.1 # pysnmplib is no longer maintained and does not work with newer # python pysnmplib==1000000000.0.0 -# pysnmp is no longer maintained and does not work with newer -# python -pysnmp==1000000000.0.0 # The get-mac package has been replaced with getmac. Installing get-mac alongside getmac # breaks getmac due to them both sharing the same python package name inside 'getmac'. diff --git a/tests/components/snmp/test_integer_sensor.py b/tests/components/snmp/test_integer_sensor.py index dab2b080c97..8e7e0f166ef 100644 --- a/tests/components/snmp/test_integer_sensor.py +++ b/tests/components/snmp/test_integer_sensor.py @@ -2,7 +2,7 @@ from unittest.mock import patch -from pysnmp.hlapi import Integer32 +from pysnmp.proto.rfc1902 import Integer32 import pytest from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN diff --git a/tests/components/snmp/test_negative_sensor.py b/tests/components/snmp/test_negative_sensor.py index dba09ea75bd..66a111b68d0 100644 --- a/tests/components/snmp/test_negative_sensor.py +++ b/tests/components/snmp/test_negative_sensor.py @@ -2,7 +2,7 @@ from unittest.mock import patch -from pysnmp.hlapi import Integer32 +from pysnmp.proto.rfc1902 import Integer32 import pytest from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN diff --git a/tests/components/snmp/test_switch.py b/tests/components/snmp/test_switch.py index adb9d1c59d0..fe1c3922ff0 100644 --- a/tests/components/snmp/test_switch.py +++ b/tests/components/snmp/test_switch.py @@ -2,7 +2,7 @@ from unittest.mock import patch -from pysnmp.hlapi import Integer32 +from pysnmp.proto.rfc1902 import Integer32 import pytest from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN From 1614e2c825f796c2e195ac384dcf3ca3ed1acb68 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 16 Aug 2024 23:58:42 +0200 Subject: [PATCH 0820/1635] Use constants in Sonos media player tests (#124037) --- tests/components/sonos/test_media_player.py | 145 ++++++++++---------- 1 file changed, 76 insertions(+), 69 deletions(-) diff --git a/tests/components/sonos/test_media_player.py b/tests/components/sonos/test_media_player.py index 599a04b806a..ddf84efd7da 100644 --- a/tests/components/sonos/test_media_player.py +++ b/tests/components/sonos/test_media_player.py @@ -7,7 +7,13 @@ from unittest.mock import patch import pytest from homeassistant.components.media_player import ( + ATTR_INPUT_SOURCE, + ATTR_MEDIA_CONTENT_ID, + ATTR_MEDIA_CONTENT_TYPE, ATTR_MEDIA_ENQUEUE, + ATTR_MEDIA_REPEAT, + ATTR_MEDIA_SHUFFLE, + ATTR_MEDIA_VOLUME_LEVEL, DOMAIN as MP_DOMAIN, SERVICE_PLAY_MEDIA, SERVICE_SELECT_SOURCE, @@ -26,6 +32,7 @@ from homeassistant.components.sonos.media_player import ( VOLUME_INCREMENT, ) from homeassistant.const import ( + ATTR_ENTITY_ID, SERVICE_REPEAT_SET, SERVICE_SHUFFLE_SET, SERVICE_VOLUME_DOWN, @@ -176,9 +183,9 @@ async def test_play_media_library( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": media_content_type, - "media_content_id": media_content_id, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: media_content_type, + ATTR_MEDIA_CONTENT_ID: media_content_id, ATTR_MEDIA_ENQUEUE: enqueue, }, blocking=True, @@ -225,9 +232,9 @@ async def test_play_media_lib_track_play( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "track", - "media_content_id": _track_url, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "track", + ATTR_MEDIA_CONTENT_ID: _track_url, ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.PLAY, }, blocking=True, @@ -254,9 +261,9 @@ async def test_play_media_lib_track_next( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "track", - "media_content_id": _track_url, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "track", + ATTR_MEDIA_CONTENT_ID: _track_url, ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.NEXT, }, blocking=True, @@ -282,9 +289,9 @@ async def test_play_media_lib_track_replace( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "track", - "media_content_id": _track_url, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "track", + ATTR_MEDIA_CONTENT_ID: _track_url, ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.REPLACE, }, blocking=True, @@ -305,9 +312,9 @@ async def test_play_media_lib_track_add( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "track", - "media_content_id": _track_url, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "track", + ATTR_MEDIA_CONTENT_ID: _track_url, ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.ADD, }, blocking=True, @@ -335,9 +342,9 @@ async def test_play_media_share_link_add( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "playlist", - "media_content_id": _share_link, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "playlist", + ATTR_MEDIA_CONTENT_ID: _share_link, ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.ADD, }, blocking=True, @@ -363,9 +370,9 @@ async def test_play_media_share_link_next( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "playlist", - "media_content_id": _share_link, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "playlist", + ATTR_MEDIA_CONTENT_ID: _share_link, ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.NEXT, }, blocking=True, @@ -395,9 +402,9 @@ async def test_play_media_share_link_play( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "playlist", - "media_content_id": _share_link, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "playlist", + ATTR_MEDIA_CONTENT_ID: _share_link, ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.PLAY, }, blocking=True, @@ -429,9 +436,9 @@ async def test_play_media_share_link_replace( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "playlist", - "media_content_id": _share_link, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "playlist", + ATTR_MEDIA_CONTENT_ID: _share_link, ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.REPLACE, }, blocking=True, @@ -494,9 +501,9 @@ async def test_play_media_music_library_playlist( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "playlist", - "media_content_id": media_content_id, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "playlist", + ATTR_MEDIA_CONTENT_ID: media_content_id, }, blocking=True, ) @@ -524,9 +531,9 @@ async def test_play_media_music_library_playlist_dne( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "playlist", - "media_content_id": media_content_id, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "playlist", + ATTR_MEDIA_CONTENT_ID: media_content_id, }, blocking=True, ) @@ -565,8 +572,8 @@ async def test_select_source_line_in_tv( MP_DOMAIN, SERVICE_SELECT_SOURCE, { - "entity_id": "media_player.zone_a", - "source": source, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_INPUT_SOURCE: source, }, blocking=True, ) @@ -608,8 +615,8 @@ async def test_select_source_play_uri( MP_DOMAIN, SERVICE_SELECT_SOURCE, { - "entity_id": "media_player.zone_a", - "source": source, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_INPUT_SOURCE: source, }, blocking=True, ) @@ -648,8 +655,8 @@ async def test_select_source_play_queue( MP_DOMAIN, SERVICE_SELECT_SOURCE, { - "entity_id": "media_player.zone_a", - "source": source, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_INPUT_SOURCE: source, }, blocking=True, ) @@ -677,8 +684,8 @@ async def test_select_source_error( MP_DOMAIN, SERVICE_SELECT_SOURCE, { - "entity_id": "media_player.zone_a", - "source": "invalid_source", + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_INPUT_SOURCE: "invalid_source", }, blocking=True, ) @@ -698,8 +705,8 @@ async def test_shuffle_set( MP_DOMAIN, SERVICE_SHUFFLE_SET, { - "entity_id": "media_player.zone_a", - "shuffle": True, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_SHUFFLE: True, }, blocking=True, ) @@ -709,8 +716,8 @@ async def test_shuffle_set( MP_DOMAIN, SERVICE_SHUFFLE_SET, { - "entity_id": "media_player.zone_a", - "shuffle": False, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_SHUFFLE: False, }, blocking=True, ) @@ -728,13 +735,13 @@ async def test_shuffle_get( sub_callback = subscription.callback state = hass.states.get("media_player.zone_a") - assert state.attributes["shuffle"] is False + assert state.attributes[ATTR_MEDIA_SHUFFLE] is False no_media_event.variables["current_play_mode"] = "SHUFFLE_NOREPEAT" sub_callback(no_media_event) await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get("media_player.zone_a") - assert state.attributes["shuffle"] is True + assert state.attributes[ATTR_MEDIA_SHUFFLE] is True # The integration keeps a copy of the last event to check for # changes, so we create a new event. @@ -745,7 +752,7 @@ async def test_shuffle_get( sub_callback(no_media_event) await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get("media_player.zone_a") - assert state.attributes["shuffle"] is False + assert state.attributes[ATTR_MEDIA_SHUFFLE] is False async def test_repeat_set( @@ -759,8 +766,8 @@ async def test_repeat_set( MP_DOMAIN, SERVICE_REPEAT_SET, { - "entity_id": "media_player.zone_a", - "repeat": RepeatMode.ALL, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_REPEAT: RepeatMode.ALL, }, blocking=True, ) @@ -770,8 +777,8 @@ async def test_repeat_set( MP_DOMAIN, SERVICE_REPEAT_SET, { - "entity_id": "media_player.zone_a", - "repeat": RepeatMode.ONE, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_REPEAT: RepeatMode.ONE, }, blocking=True, ) @@ -781,8 +788,8 @@ async def test_repeat_set( MP_DOMAIN, SERVICE_REPEAT_SET, { - "entity_id": "media_player.zone_a", - "repeat": RepeatMode.OFF, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_REPEAT: RepeatMode.OFF, }, blocking=True, ) @@ -800,13 +807,13 @@ async def test_repeat_get( sub_callback = subscription.callback state = hass.states.get("media_player.zone_a") - assert state.attributes["repeat"] == RepeatMode.OFF + assert state.attributes[ATTR_MEDIA_REPEAT] == RepeatMode.OFF no_media_event.variables["current_play_mode"] = "REPEAT_ALL" sub_callback(no_media_event) await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get("media_player.zone_a") - assert state.attributes["repeat"] == RepeatMode.ALL + assert state.attributes[ATTR_MEDIA_REPEAT] == RepeatMode.ALL no_media_event = SonosMockEvent( soco, soco.avTransport, no_media_event.variables.copy() @@ -815,7 +822,7 @@ async def test_repeat_get( sub_callback(no_media_event) await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get("media_player.zone_a") - assert state.attributes["repeat"] == RepeatMode.ONE + assert state.attributes[ATTR_MEDIA_REPEAT] == RepeatMode.ONE no_media_event = SonosMockEvent( soco, soco.avTransport, no_media_event.variables.copy() @@ -824,7 +831,7 @@ async def test_repeat_get( sub_callback(no_media_event) await hass.async_block_till_done(wait_background_tasks=True) state = hass.states.get("media_player.zone_a") - assert state.attributes["repeat"] == RepeatMode.OFF + assert state.attributes[ATTR_MEDIA_REPEAT] == RepeatMode.OFF async def test_play_media_favorite_item_id( @@ -838,9 +845,9 @@ async def test_play_media_favorite_item_id( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "favorite_item_id", - "media_content_id": "FV:2/4", + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "favorite_item_id", + ATTR_MEDIA_CONTENT_ID: "FV:2/4", }, blocking=True, ) @@ -860,9 +867,9 @@ async def test_play_media_favorite_item_id( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "favorite_item_id", - "media_content_id": "UNKNOWN_ID", + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "favorite_item_id", + ATTR_MEDIA_CONTENT_ID: "UNKNOWN_ID", }, blocking=True, ) @@ -900,7 +907,7 @@ async def test_service_snapshot_restore( SONOS_DOMAIN, SERVICE_SNAPSHOT, { - "entity_id": ["media_player.living_room", "media_player.bedroom"], + ATTR_ENTITY_ID: ["media_player.living_room", "media_player.bedroom"], }, blocking=True, ) @@ -913,7 +920,7 @@ async def test_service_snapshot_restore( SONOS_DOMAIN, SERVICE_RESTORE, { - "entity_id": ["media_player.living_room", "media_player.bedroom"], + ATTR_ENTITY_ID: ["media_player.living_room", "media_player.bedroom"], }, blocking=True, ) @@ -932,7 +939,7 @@ async def test_volume( MP_DOMAIN, SERVICE_VOLUME_UP, { - "entity_id": "media_player.zone_a", + ATTR_ENTITY_ID: "media_player.zone_a", }, blocking=True, ) @@ -942,7 +949,7 @@ async def test_volume( MP_DOMAIN, SERVICE_VOLUME_DOWN, { - "entity_id": "media_player.zone_a", + ATTR_ENTITY_ID: "media_player.zone_a", }, blocking=True, ) @@ -951,7 +958,7 @@ async def test_volume( await hass.services.async_call( MP_DOMAIN, SERVICE_VOLUME_SET, - {"entity_id": "media_player.zone_a", "volume_level": 0.30}, + {ATTR_ENTITY_ID: "media_player.zone_a", ATTR_MEDIA_VOLUME_LEVEL: 0.30}, blocking=True, ) # SoCo uses 0..100 for its range. From 7deb9bf30f2d2ddb2b0d06baefa22ae4baa2178b Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 16 Aug 2024 23:59:21 +0200 Subject: [PATCH 0821/1635] Do not override hass.loop_thread_id in tests (#124053) --- tests/common.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/common.py b/tests/common.py index 684b9eb0433..2f0c032616a 100644 --- a/tests/common.py +++ b/tests/common.py @@ -208,7 +208,6 @@ def get_test_home_assistant() -> Generator[HomeAssistant]: hass_created_event.set() - hass.loop_thread_id = loop._thread_ident loop.run_forever() loop_stop_event.set() From 6c01e4b99cb1639dea94258119e4a1ff1a65046c Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Sat, 17 Aug 2024 10:59:39 +0200 Subject: [PATCH 0822/1635] Use BaseEventLoop._thread_id instead of a custom attribute (#124054) Co-authored-by: J. Nick Koston --- homeassistant/core.py | 4 +--- homeassistant/runner.py | 1 - homeassistant/util/async_.py | 2 +- tests/common.py | 2 -- tests/util/test_async.py | 10 +++++----- 5 files changed, 7 insertions(+), 12 deletions(-) diff --git a/homeassistant/core.py b/homeassistant/core.py index 1050d25ee71..b797798134e 100644 --- a/homeassistant/core.py +++ b/homeassistant/core.py @@ -451,9 +451,7 @@ class HomeAssistant: self.import_executor = InterruptibleThreadPoolExecutor( max_workers=1, thread_name_prefix="ImportExecutor" ) - self.loop_thread_id = getattr( - self.loop, "_thread_ident", getattr(self.loop, "_thread_id") - ) + self.loop_thread_id = getattr(self.loop, "_thread_id") def verify_event_loop_thread(self, what: str) -> None: """Report and raise if we are not running in the event loop thread.""" diff --git a/homeassistant/runner.py b/homeassistant/runner.py index a1510336302..4bac12ec399 100644 --- a/homeassistant/runner.py +++ b/homeassistant/runner.py @@ -107,7 +107,6 @@ class HassEventLoopPolicy(asyncio.DefaultEventLoopPolicy): def new_event_loop(self) -> asyncio.AbstractEventLoop: """Get the event loop.""" loop: asyncio.AbstractEventLoop = super().new_event_loop() - setattr(loop, "_thread_ident", threading.get_ident()) loop.set_exception_handler(_async_loop_exception_handler) if self.debug: loop.set_debug(True) diff --git a/homeassistant/util/async_.py b/homeassistant/util/async_.py index dcb788f0685..d010d8cb341 100644 --- a/homeassistant/util/async_.py +++ b/homeassistant/util/async_.py @@ -57,7 +57,7 @@ def run_callback_threadsafe[_T, *_Ts]( Return a concurrent.futures.Future to access the result. """ - if (ident := loop.__dict__.get("_thread_ident")) and ident == threading.get_ident(): + if (ident := loop.__dict__.get("_thread_id")) and ident == threading.get_ident(): raise RuntimeError("Cannot be called from within the event loop") future: concurrent.futures.Future[_T] = concurrent.futures.Future() diff --git a/tests/common.py b/tests/common.py index 2f0c032616a..3a4c3f2851c 100644 --- a/tests/common.py +++ b/tests/common.py @@ -204,8 +204,6 @@ def get_test_home_assistant() -> Generator[HomeAssistant]: hass.start = start_hass hass.stop = stop_hass - loop._thread_ident = threading.get_ident() - hass_created_event.set() loop.run_forever() diff --git a/tests/util/test_async.py b/tests/util/test_async.py index 17349cf6ff9..cda10b69c3f 100644 --- a/tests/util/test_async.py +++ b/tests/util/test_async.py @@ -22,18 +22,18 @@ def test_run_callback_threadsafe_from_inside_event_loop( loop = Mock(spec=["call_soon_threadsafe"]) - loop._thread_ident = None + loop._thread_id = None mock_ident.return_value = 5 hasync.run_callback_threadsafe(loop, callback) assert len(loop.call_soon_threadsafe.mock_calls) == 1 - loop._thread_ident = 5 + loop._thread_id = 5 mock_ident.return_value = 5 with pytest.raises(RuntimeError): hasync.run_callback_threadsafe(loop, callback) assert len(loop.call_soon_threadsafe.mock_calls) == 1 - loop._thread_ident = 1 + loop._thread_id = 1 mock_ident.return_value = 5 hasync.run_callback_threadsafe(loop, callback) assert len(loop.call_soon_threadsafe.mock_calls) == 2 @@ -78,7 +78,7 @@ async def test_run_callback_threadsafe(hass: HomeAssistant) -> None: nonlocal it_ran it_ran = True - with patch.dict(hass.loop.__dict__, {"_thread_ident": -1}): + with patch.dict(hass.loop.__dict__, {"_thread_id": -1}): assert hasync.run_callback_threadsafe(hass.loop, callback) assert it_ran is False @@ -98,7 +98,7 @@ async def test_callback_is_always_scheduled(hass: HomeAssistant) -> None: hasync.shutdown_run_callback_threadsafe(hass.loop) with ( - patch.dict(hass.loop.__dict__, {"_thread_ident": -1}), + patch.dict(hass.loop.__dict__, {"_thread_id": -1}), patch.object(hass.loop, "call_soon_threadsafe") as mock_call_soon_threadsafe, pytest.raises(RuntimeError), ): From a7bca9bceace548494a8fb0ad1a3d51d274cc56c Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Sat, 17 Aug 2024 11:01:14 +0200 Subject: [PATCH 0823/1635] Use BIGINT SQL type for ID columns (#123973) Redo recorder ID migration from INT to BIGINT --- .../components/recorder/db_schema.py | 2 +- .../components/recorder/migration.py | 26 ++++++++++++++++--- tests/components/recorder/test_migrate.py | 2 +- .../components/recorder/test_v32_migration.py | 4 +-- 4 files changed, 26 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/recorder/db_schema.py b/homeassistant/components/recorder/db_schema.py index dd293ed6bc2..f84459675ae 100644 --- a/homeassistant/components/recorder/db_schema.py +++ b/homeassistant/components/recorder/db_schema.py @@ -77,7 +77,7 @@ class LegacyBase(DeclarativeBase): """Base class for tables, used for schema migration.""" -SCHEMA_VERSION = 45 +SCHEMA_VERSION = 47 _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index 185be02e9aa..a4a5fa87466 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -1112,7 +1112,7 @@ class _SchemaVersion16Migrator(_SchemaVersionMigrator, target_version=16): # Version 16 changes settings for the foreign key constraint on # states.old_state_id. Dropping the constraint is not really correct # we should have recreated it instead. Recreating the constraint now - # happens in the migration to schema version 45. + # happens in the migration to schema version 47. _drop_foreign_key_constraints( self.session_maker, self.engine, TABLE_STATES, "old_state_id" ) @@ -1637,6 +1637,24 @@ class _SchemaVersion43Migrator(_SchemaVersionMigrator, target_version=43): ) +class _SchemaVersion44Migrator(_SchemaVersionMigrator, target_version=44): + def _apply_update(self) -> None: + """Version specific update method.""" + # The changes in this version are identical to the changes in version + # 46. We apply the same changes again because the migration code previously + # swallowed errors which caused some users' databases to end up in an + # undefined state after the migration. + + +class _SchemaVersion45Migrator(_SchemaVersionMigrator, target_version=45): + def _apply_update(self) -> None: + """Version specific update method.""" + # The changes in this version are identical to the changes in version + # 47. We apply the same changes again because the migration code previously + # swallowed errors which caused some users' databases to end up in an + # undefined state after the migration. + + FOREIGN_COLUMNS = ( ( "events", @@ -1669,7 +1687,7 @@ FOREIGN_COLUMNS = ( ) -class _SchemaVersion44Migrator(_SchemaVersionMigrator, target_version=44): +class _SchemaVersion46Migrator(_SchemaVersionMigrator, target_version=46): def _apply_update(self) -> None: """Version specific update method.""" # We skip this step for SQLITE, it doesn't have differently sized integers @@ -1720,14 +1738,14 @@ class _SchemaVersion44Migrator(_SchemaVersionMigrator, target_version=44): ) -class _SchemaVersion45Migrator(_SchemaVersionMigrator, target_version=45): +class _SchemaVersion47Migrator(_SchemaVersionMigrator, target_version=47): def _apply_update(self) -> None: """Version specific update method.""" # We skip this step for SQLITE, it doesn't have differently sized integers if self.engine.dialect.name == SupportedDialect.SQLITE: return - # Restore constraints dropped in migration to schema version 44 + # Restore constraints dropped in migration to schema version 46 _restore_foreign_key_constraints( self.session_maker, self.engine, diff --git a/tests/components/recorder/test_migrate.py b/tests/components/recorder/test_migrate.py index b56dfe3e189..1edbac3c566 100644 --- a/tests/components/recorder/test_migrate.py +++ b/tests/components/recorder/test_migrate.py @@ -215,7 +215,7 @@ async def test_database_migration_failed( # Test error handling in _modify_columns (12, "sqlalchemy.engine.base.Connection.execute", False, 1, 0), # Test error handling in _drop_foreign_key_constraints - (44, "homeassistant.components.recorder.migration.DropConstraint", False, 2, 1), + (46, "homeassistant.components.recorder.migration.DropConstraint", False, 2, 1), ], ) @pytest.mark.skip_on_db_engine(["sqlite"]) diff --git a/tests/components/recorder/test_v32_migration.py b/tests/components/recorder/test_v32_migration.py index c9ba330b758..1006a03f4ec 100644 --- a/tests/components/recorder/test_v32_migration.py +++ b/tests/components/recorder/test_v32_migration.py @@ -640,7 +640,7 @@ async def test_out_of_disk_space_while_removing_foreign_key( ix_states_event_id is removed from the states table. Note that the test is somewhat forced; the states.event_id foreign key constraint is - removed when migrating to schema version 44, inspecting the schema in + removed when migrating to schema version 46, inspecting the schema in cleanup_legacy_states_event_ids is not likely to fail. """ importlib.import_module(SCHEMA_MODULE) @@ -779,7 +779,7 @@ async def test_out_of_disk_space_while_removing_foreign_key( states_index_names = {index["name"] for index in states_indexes} assert instance.use_legacy_events_index is True # The states.event_id foreign key constraint was removed when - # migration to schema version 44 + # migration to schema version 46 assert ( await instance.async_add_executor_job( _get_event_id_foreign_keys From 533442f33e8c8419f40928eb242ea056c0bc1ebc Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Sat, 17 Aug 2024 11:01:49 +0200 Subject: [PATCH 0824/1635] Add async friendly helper for validating config schemas (#123800) * Add async friendly helper for validating config schemas * Improve docstrings * Add tests --- homeassistant/config.py | 8 +- homeassistant/helpers/check_config.py | 6 +- homeassistant/helpers/config_validation.py | 85 +++++++++++++++++++++- tests/helpers/test_config_validation.py | 69 +++++++++++++++++- 4 files changed, 161 insertions(+), 7 deletions(-) diff --git a/homeassistant/config.py b/homeassistant/config.py index 948ab342e79..5066dffab47 100644 --- a/homeassistant/config.py +++ b/homeassistant/config.py @@ -1535,7 +1535,9 @@ async def async_process_component_config( # No custom config validator, proceed with schema validation if hasattr(component, "CONFIG_SCHEMA"): try: - return IntegrationConfigInfo(component.CONFIG_SCHEMA(config), []) + return IntegrationConfigInfo( + await cv.async_validate(hass, component.CONFIG_SCHEMA, config), [] + ) except vol.Invalid as exc: exc_info = ConfigExceptionInfo( exc, @@ -1570,7 +1572,9 @@ async def async_process_component_config( # Validate component specific platform schema platform_path = f"{p_name}.{domain}" try: - p_validated = component_platform_schema(p_config) + p_validated = await cv.async_validate( + hass, component_platform_schema, p_config + ) except vol.Invalid as exc: exc_info = ConfigExceptionInfo( exc, diff --git a/homeassistant/helpers/check_config.py b/homeassistant/helpers/check_config.py index 06d836e8c20..43021fffac5 100644 --- a/homeassistant/helpers/check_config.py +++ b/homeassistant/helpers/check_config.py @@ -234,7 +234,7 @@ async def async_check_ha_config_file( # noqa: C901 config_schema = getattr(component, "CONFIG_SCHEMA", None) if config_schema is not None: try: - validated_config = config_schema(config) + validated_config = await cv.async_validate(hass, config_schema, config) # Don't fail if the validator removed the domain from the config if domain in validated_config: result[domain] = validated_config[domain] @@ -255,7 +255,9 @@ async def async_check_ha_config_file( # noqa: C901 for p_name, p_config in config_per_platform(config, domain): # Validate component specific platform schema try: - p_validated = component_platform_schema(p_config) + p_validated = await cv.async_validate( + hass, component_platform_schema, p_config + ) except vol.Invalid as ex: _comp_error(ex, domain, p_config, p_config) continue diff --git a/homeassistant/helpers/config_validation.py b/homeassistant/helpers/config_validation.py index 6e9a6d5a69d..d7a5d5ae8a1 100644 --- a/homeassistant/helpers/config_validation.py +++ b/homeassistant/helpers/config_validation.py @@ -6,6 +6,7 @@ from collections.abc import Callable, Hashable import contextlib +from contextvars import ContextVar from datetime import ( date as date_sys, datetime as datetime_sys, @@ -13,6 +14,7 @@ from datetime import ( timedelta, ) from enum import Enum, StrEnum +import functools import logging from numbers import Number import os @@ -20,6 +22,7 @@ import re from socket import ( # type: ignore[attr-defined] # private, not in typeshed _GLOBAL_DEFAULT_TIMEOUT, ) +import threading from typing import Any, cast, overload from urllib.parse import urlparse from uuid import UUID @@ -94,6 +97,7 @@ from homeassistant.const import ( ) from homeassistant.core import ( DOMAIN as HOMEASSISTANT_DOMAIN, + HomeAssistant, async_get_hass, async_get_hass_or_none, split_entity_id, @@ -114,6 +118,51 @@ from .typing import VolDictType, VolSchemaType TIME_PERIOD_ERROR = "offset {} should be format 'HH:MM', 'HH:MM:SS' or 'HH:MM:SS.F'" +class MustValidateInExecutor(HomeAssistantError): + """Raised when validation must happen in an executor thread.""" + + +class _Hass(threading.local): + """Container which makes a HomeAssistant instance available to validators.""" + + hass: HomeAssistant | None = None + + +_hass = _Hass() +"""Set when doing async friendly schema validation.""" + + +def _async_get_hass_or_none() -> HomeAssistant | None: + """Return the HomeAssistant instance or None. + + First tries core.async_get_hass_or_none, then _hass which is + set when doing async friendly schema validation. + """ + return async_get_hass_or_none() or _hass.hass + + +_validating_async: ContextVar[bool] = ContextVar("_validating_async", default=False) +"""Set to True when doing async friendly schema validation.""" + + +def not_async_friendly[**_P, _R](validator: Callable[_P, _R]) -> Callable[_P, _R]: + """Mark a validator as not async friendly. + + This makes validation happen in an executor thread if validation is done by + async_validate, otherwise does nothing. + """ + + @functools.wraps(validator) + def _not_async_friendly(*args: _P.args, **kwargs: _P.kwargs) -> _R: + if _validating_async.get() and async_get_hass_or_none(): + # Raise if doing async friendly validation and validation + # is happening in the event loop + raise MustValidateInExecutor + return validator(*args, **kwargs) + + return _not_async_friendly + + class UrlProtocolSchema(StrEnum): """Valid URL protocol schema values.""" @@ -217,6 +266,7 @@ def whitespace(value: Any) -> str: raise vol.Invalid(f"contains non-whitespace: {value}") +@not_async_friendly def isdevice(value: Any) -> str: """Validate that value is a real device.""" try: @@ -258,6 +308,7 @@ def is_regex(value: Any) -> re.Pattern[Any]: return r +@not_async_friendly def isfile(value: Any) -> str: """Validate that the value is an existing file.""" if value is None: @@ -271,6 +322,7 @@ def isfile(value: Any) -> str: return file_in +@not_async_friendly def isdir(value: Any) -> str: """Validate that the value is an existing dir.""" if value is None: @@ -664,7 +716,7 @@ def template(value: Any | None) -> template_helper.Template: if isinstance(value, (list, dict, template_helper.Template)): raise vol.Invalid("template value should be a string") - template_value = template_helper.Template(str(value), async_get_hass_or_none()) + template_value = template_helper.Template(str(value), _async_get_hass_or_none()) try: template_value.ensure_valid() @@ -682,7 +734,7 @@ def dynamic_template(value: Any | None) -> template_helper.Template: if not template_helper.is_template_string(str(value)): raise vol.Invalid("template value does not contain a dynamic template") - template_value = template_helper.Template(str(value), async_get_hass_or_none()) + template_value = template_helper.Template(str(value), _async_get_hass_or_none()) try: template_value.ensure_valid() @@ -1918,3 +1970,32 @@ historic_currency = vol.In( country = vol.In(COUNTRIES, msg="invalid ISO 3166 formatted country") language = vol.In(LANGUAGES, msg="invalid RFC 5646 formatted language") + + +async def async_validate( + hass: HomeAssistant, validator: Callable[[Any], Any], value: Any +) -> Any: + """Async friendly schema validation. + + If a validator decorated with @not_async_friendly is called, validation will be + deferred to an executor. If not, validation will happen in the event loop. + """ + _validating_async.set(True) + try: + return validator(value) + except MustValidateInExecutor: + return await hass.async_add_executor_job( + _validate_in_executor, hass, validator, value + ) + finally: + _validating_async.set(False) + + +def _validate_in_executor( + hass: HomeAssistant, validator: Callable[[Any], Any], value: Any +) -> Any: + _hass.hass = hass + try: + return validator(value) + finally: + _hass.hass = None diff --git a/tests/helpers/test_config_validation.py b/tests/helpers/test_config_validation.py index ac3af13949b..973f504df08 100644 --- a/tests/helpers/test_config_validation.py +++ b/tests/helpers/test_config_validation.py @@ -3,13 +3,16 @@ from collections import OrderedDict from datetime import date, datetime, timedelta import enum +from functools import partial import logging import os from socket import _GLOBAL_DEFAULT_TIMEOUT +import threading from typing import Any -from unittest.mock import Mock, patch +from unittest.mock import ANY, Mock, patch import uuid +import py import pytest import voluptuous as vol @@ -1738,3 +1741,67 @@ def test_determine_script_action_ambiguous() -> None: def test_determine_script_action_non_ambiguous() -> None: """Test determine script action with a non ambiguous action.""" assert cv.determine_script_action({"delay": "00:00:05"}) == "delay" + + +async def test_async_validate(hass: HomeAssistant, tmpdir: py.path.local) -> None: + """Test the async_validate helper.""" + validator_calls: dict[str, list[int]] = {} + + def _mock_validator_schema(real_func, *args): + calls = validator_calls.setdefault(real_func.__name__, []) + calls.append(threading.get_ident()) + return real_func(*args) + + CV_PREFIX = "homeassistant.helpers.config_validation" + with ( + patch(f"{CV_PREFIX}.isdir", wraps=partial(_mock_validator_schema, cv.isdir)), + patch(f"{CV_PREFIX}.string", wraps=partial(_mock_validator_schema, cv.string)), + ): + # Assert validation in event loop when not decorated with not_async_friendly + await cv.async_validate(hass, cv.string, "abcd") + assert validator_calls == {"string": [hass.loop_thread_id]} + validator_calls = {} + + # Assert validation in executor when decorated with not_async_friendly + await cv.async_validate(hass, cv.isdir, tmpdir) + assert validator_calls == {"isdir": [hass.loop_thread_id, ANY]} + assert validator_calls["isdir"][1] != hass.loop_thread_id + validator_calls = {} + + # Assert validation in executor when decorated with not_async_friendly + await cv.async_validate(hass, vol.All(cv.isdir, cv.string), tmpdir) + assert validator_calls == {"isdir": [hass.loop_thread_id, ANY], "string": [ANY]} + assert validator_calls["isdir"][1] != hass.loop_thread_id + assert validator_calls["string"][0] != hass.loop_thread_id + validator_calls = {} + + # Assert validation in executor when decorated with not_async_friendly + await cv.async_validate(hass, vol.All(cv.string, cv.isdir), tmpdir) + assert validator_calls == { + "isdir": [hass.loop_thread_id, ANY], + "string": [hass.loop_thread_id, ANY], + } + assert validator_calls["isdir"][1] != hass.loop_thread_id + assert validator_calls["string"][1] != hass.loop_thread_id + validator_calls = {} + + # Assert validation in event loop when not using cv.async_validate + cv.isdir(tmpdir) + assert validator_calls == {"isdir": [hass.loop_thread_id]} + validator_calls = {} + + # Assert validation in event loop when not using cv.async_validate + vol.All(cv.isdir, cv.string)(tmpdir) + assert validator_calls == { + "isdir": [hass.loop_thread_id], + "string": [hass.loop_thread_id], + } + validator_calls = {} + + # Assert validation in event loop when not using cv.async_validate + vol.All(cv.string, cv.isdir)(tmpdir) + assert validator_calls == { + "isdir": [hass.loop_thread_id], + "string": [hass.loop_thread_id], + } + validator_calls = {} From 7efd8089c88466a6c24c83f8eff2f3cbf431fabc Mon Sep 17 00:00:00 2001 From: Pete Sage <76050312+PeteRager@users.noreply.github.com> Date: Sat, 17 Aug 2024 07:47:27 -0400 Subject: [PATCH 0825/1635] Add Sonos test for media_player play, pause, stop, previous, next (#122978) * initial commit * update to use constant * retrigger checks --- tests/components/sonos/test_media_player.py | 36 +++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/tests/components/sonos/test_media_player.py b/tests/components/sonos/test_media_player.py index ddf84efd7da..b7b879dc368 100644 --- a/tests/components/sonos/test_media_player.py +++ b/tests/components/sonos/test_media_player.py @@ -15,6 +15,7 @@ from homeassistant.components.media_player import ( ATTR_MEDIA_SHUFFLE, ATTR_MEDIA_VOLUME_LEVEL, DOMAIN as MP_DOMAIN, + SERVICE_CLEAR_PLAYLIST, SERVICE_PLAY_MEDIA, SERVICE_SELECT_SOURCE, MediaPlayerEnqueue, @@ -33,6 +34,11 @@ from homeassistant.components.sonos.media_player import ( ) from homeassistant.const import ( ATTR_ENTITY_ID, + SERVICE_MEDIA_NEXT_TRACK, + SERVICE_MEDIA_PAUSE, + SERVICE_MEDIA_PLAY, + SERVICE_MEDIA_PREVIOUS_TRACK, + SERVICE_MEDIA_STOP, SERVICE_REPEAT_SET, SERVICE_SHUFFLE_SET, SERVICE_VOLUME_DOWN, @@ -963,3 +969,33 @@ async def test_volume( ) # SoCo uses 0..100 for its range. assert soco.volume == 30 + + +@pytest.mark.parametrize( + ("service", "client_call"), + [ + (SERVICE_MEDIA_PLAY, "play"), + (SERVICE_MEDIA_PAUSE, "pause"), + (SERVICE_MEDIA_STOP, "stop"), + (SERVICE_MEDIA_NEXT_TRACK, "next"), + (SERVICE_MEDIA_PREVIOUS_TRACK, "previous"), + (SERVICE_CLEAR_PLAYLIST, "clear_queue"), + ], +) +async def test_media_transport( + hass: HomeAssistant, + soco: MockSoCo, + async_autosetup_sonos, + service: str, + client_call: str, +) -> None: + """Test the media player transport services.""" + await hass.services.async_call( + MP_DOMAIN, + service, + { + ATTR_ENTITY_ID: "media_player.zone_a", + }, + blocking=True, + ) + assert getattr(soco, client_call).call_count == 1 From 153eef16bb5d09d7550989c758a3a316133caf1a Mon Sep 17 00:00:00 2001 From: Pete Sage <76050312+PeteRager@users.noreply.github.com> Date: Sat, 17 Aug 2024 10:29:27 -0400 Subject: [PATCH 0826/1635] Add Sonos Test for plex media player enqueue options (#124120) initial commit --- tests/components/sonos/test_plex_playback.py | 50 +++++++++++++++++++- 1 file changed, 49 insertions(+), 1 deletion(-) diff --git a/tests/components/sonos/test_plex_playback.py b/tests/components/sonos/test_plex_playback.py index 428e970697e..01a66f640d5 100644 --- a/tests/components/sonos/test_plex_playback.py +++ b/tests/components/sonos/test_plex_playback.py @@ -8,17 +8,24 @@ import pytest from homeassistant.components.media_player import ( ATTR_MEDIA_CONTENT_ID, ATTR_MEDIA_CONTENT_TYPE, + ATTR_MEDIA_ENQUEUE, DOMAIN as MP_DOMAIN, SERVICE_PLAY_MEDIA, + MediaPlayerEnqueue, MediaType, ) from homeassistant.components.plex import DOMAIN as PLEX_DOMAIN, PLEX_URI_SCHEME +from homeassistant.components.sonos.media_player import LONG_SERVICE_TIMEOUT from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from .conftest import MockSoCo -async def test_plex_play_media(hass: HomeAssistant, async_autosetup_sonos) -> None: + +async def test_plex_play_media( + hass: HomeAssistant, soco: MockSoCo, async_autosetup_sonos +) -> None: """Test playing media via the Plex integration.""" mock_plex_server = Mock() mock_lookup = mock_plex_server.lookup_media @@ -55,6 +62,9 @@ async def test_plex_play_media(hass: HomeAssistant, async_autosetup_sonos) -> No assert not mock_shuffle.called assert mock_lookup.mock_calls[0][1][0] == MediaType.MUSIC assert mock_lookup.mock_calls[0][2] == json.loads(media_content_id) + assert soco.clear_queue.call_count == 1 + assert soco.play_from_queue.call_count == 1 + soco.play_from_queue.assert_called_with(0) # Test handling shuffle in payload mock_lookup.reset_mock() @@ -130,3 +140,41 @@ async def test_plex_play_media(hass: HomeAssistant, async_autosetup_sonos) -> No assert mock_shuffle.called assert mock_lookup.mock_calls[0][1][0] == PLEX_DOMAIN assert mock_lookup.mock_calls[0][2] == {"plex_key": plex_item_key} + + mock_add_to_queue.reset_mock() + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: media_player, + ATTR_MEDIA_CONTENT_TYPE: MediaType.MUSIC, + ATTR_MEDIA_CONTENT_ID: f"{PLEX_URI_SCHEME}{media_content_id}", + ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.ADD, + }, + blocking=True, + ) + assert mock_add_to_queue.call_count == 1 + mock_add_to_queue.assert_called_with( + mock_lookup(), timeout=LONG_SERVICE_TIMEOUT + ) + + soco.play_from_queue.reset_mock() + mock_add_to_queue.reset_mock() + mock_add_to_queue.return_value = 9 + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: media_player, + ATTR_MEDIA_CONTENT_TYPE: MediaType.MUSIC, + ATTR_MEDIA_CONTENT_ID: f"{PLEX_URI_SCHEME}{media_content_id}", + ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.PLAY, + }, + blocking=True, + ) + assert mock_add_to_queue.call_count == 1 + mock_add_to_queue.assert_called_with( + mock_lookup(), position=1, timeout=LONG_SERVICE_TIMEOUT + ) + assert soco.play_from_queue.call_count == 1 + soco.play_from_queue.assert_called_with(mock_add_to_queue.return_value - 1) From 85a47ffb68d7f5f3d5271df91105d47e6c9d899b Mon Sep 17 00:00:00 2001 From: Artem Draft Date: Sat, 17 Aug 2024 17:30:26 +0300 Subject: [PATCH 0827/1635] Bump pybravia to 0.3.4 (#124113) --- homeassistant/components/braviatv/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/braviatv/manifest.json b/homeassistant/components/braviatv/manifest.json index 5a0a9def0ae..a445a34cfcd 100644 --- a/homeassistant/components/braviatv/manifest.json +++ b/homeassistant/components/braviatv/manifest.json @@ -7,7 +7,7 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["pybravia"], - "requirements": ["pybravia==0.3.3"], + "requirements": ["pybravia==0.3.4"], "ssdp": [ { "st": "urn:schemas-sony-com:service:ScalarWebAPI:1", diff --git a/requirements_all.txt b/requirements_all.txt index ad750c5ec4b..f4209f9d1dc 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1759,7 +1759,7 @@ pyblu==0.4.0 pybotvac==0.0.25 # homeassistant.components.braviatv -pybravia==0.3.3 +pybravia==0.3.4 # homeassistant.components.nissan_leaf pycarwings2==2.14 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 808f93b6ad5..9d930f5986b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1424,7 +1424,7 @@ pyblu==0.4.0 pybotvac==0.0.25 # homeassistant.components.braviatv -pybravia==0.3.3 +pybravia==0.3.4 # homeassistant.components.cloudflare pycfdns==3.0.0 From 1bab3737075cbb419c3323472413d3b19c01ad09 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 17 Aug 2024 10:10:45 -0500 Subject: [PATCH 0828/1635] Revert "Exclude aiohappyeyeballs from license check" (#124116) --- script/licenses.py | 1 - 1 file changed, 1 deletion(-) diff --git a/script/licenses.py b/script/licenses.py index 0663821ed2c..9d7da912398 100644 --- a/script/licenses.py +++ b/script/licenses.py @@ -125,7 +125,6 @@ EXCEPTIONS = { "PyXiaomiGateway", # https://github.com/Danielhiversen/PyXiaomiGateway/pull/201 "aiocomelit", # https://github.com/chemelli74/aiocomelit/pull/138 "aioecowitt", # https://github.com/home-assistant-libs/aioecowitt/pull/180 - "aiohappyeyeballs", # Python-2.0.1 "aioopenexchangerates", # https://github.com/MartinHjelmare/aioopenexchangerates/pull/94 "aiooui", # https://github.com/Bluetooth-Devices/aiooui/pull/8 "aioruuvigateway", # https://github.com/akx/aioruuvigateway/pull/6 From 08234efedf60e58c7252678e373a3cd027ddda85 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 17 Aug 2024 10:32:58 -0500 Subject: [PATCH 0829/1635] Bump aiohomekit to 3.2.3 (#124115) --- homeassistant/components/homekit_controller/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/homekit_controller/manifest.json b/homeassistant/components/homekit_controller/manifest.json index 007153aceaf..b2b215a98b9 100644 --- a/homeassistant/components/homekit_controller/manifest.json +++ b/homeassistant/components/homekit_controller/manifest.json @@ -14,6 +14,6 @@ "documentation": "https://www.home-assistant.io/integrations/homekit_controller", "iot_class": "local_push", "loggers": ["aiohomekit", "commentjson"], - "requirements": ["aiohomekit==3.2.2"], + "requirements": ["aiohomekit==3.2.3"], "zeroconf": ["_hap._tcp.local.", "_hap._udp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index f4209f9d1dc..533aa557fc3 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -255,7 +255,7 @@ aioguardian==2022.07.0 aioharmony==0.2.10 # homeassistant.components.homekit_controller -aiohomekit==3.2.2 +aiohomekit==3.2.3 # homeassistant.components.hue aiohue==4.7.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 9d930f5986b..8bbf2109d2a 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -240,7 +240,7 @@ aioguardian==2022.07.0 aioharmony==0.2.10 # homeassistant.components.homekit_controller -aiohomekit==3.2.2 +aiohomekit==3.2.3 # homeassistant.components.hue aiohue==4.7.2 From 08a76133c2d3ef555649da3ac9ec5b28c69e7452 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Sat, 17 Aug 2024 19:24:35 +0200 Subject: [PATCH 0830/1635] Simplify water_heater.set_temperature service schema (#124101) --- homeassistant/components/water_heater/__init__.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/homeassistant/components/water_heater/__init__.py b/homeassistant/components/water_heater/__init__.py index 2e749735b0c..e6e424329fb 100644 --- a/homeassistant/components/water_heater/__init__.py +++ b/homeassistant/components/water_heater/__init__.py @@ -13,7 +13,6 @@ import voluptuous as vol from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( - ATTR_ENTITY_ID, ATTR_TEMPERATURE, PRECISION_TENTHS, PRECISION_WHOLE, @@ -99,7 +98,6 @@ SET_AWAY_MODE_SCHEMA: VolDictType = { } SET_TEMPERATURE_SCHEMA: VolDictType = { vol.Required(ATTR_TEMPERATURE, "temperature"): vol.Coerce(float), - vol.Optional(ATTR_ENTITY_ID): cv.comp_entity_ids, vol.Optional(ATTR_OPERATION_MODE): cv.string, } SET_OPERATION_MODE_SCHEMA: VolDictType = { From 23fb4b50c91b308f83751aa27cef714d435480e2 Mon Sep 17 00:00:00 2001 From: Jordi Date: Sat, 17 Aug 2024 20:45:24 +0200 Subject: [PATCH 0831/1635] Add brand selection to support additional brands who use the same API for AquaCell integration (#121817) * Support harvey brand * Update tests * Moved the brand selection step to the same step as credentials * Update tests/components/aquacell/test_init.py --------- Co-authored-by: Joost Lekkerkerker --- homeassistant/components/aquacell/__init__.py | 6 +++++- homeassistant/components/aquacell/config_flow.py | 16 +++++++++++++--- homeassistant/components/aquacell/const.py | 1 + homeassistant/components/aquacell/manifest.json | 2 +- homeassistant/components/aquacell/strings.json | 3 ++- homeassistant/generated/integrations.json | 2 +- tests/components/aquacell/__init__.py | 12 ++++++++++++ tests/components/aquacell/conftest.py | 16 +++++++++++++++- tests/components/aquacell/test_config_flow.py | 10 +++++++++- tests/components/aquacell/test_init.py | 11 +++++++++++ 10 files changed, 70 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/aquacell/__init__.py b/homeassistant/components/aquacell/__init__.py index 98cf5d7f0f0..e44c0f00fa8 100644 --- a/homeassistant/components/aquacell/__init__.py +++ b/homeassistant/components/aquacell/__init__.py @@ -3,12 +3,14 @@ from __future__ import annotations from aioaquacell import AquacellApi +from aioaquacell.const import Brand from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession +from .const import CONF_BRAND from .coordinator import AquacellCoordinator PLATFORMS = [Platform.SENSOR] @@ -20,7 +22,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: AquacellConfigEntry) -> """Set up Aquacell from a config entry.""" session = async_get_clientsession(hass) - aquacell_api = AquacellApi(session) + brand = entry.data.get(CONF_BRAND, Brand.AQUACELL) + + aquacell_api = AquacellApi(session, brand) coordinator = AquacellCoordinator(hass, aquacell_api) diff --git a/homeassistant/components/aquacell/config_flow.py b/homeassistant/components/aquacell/config_flow.py index a9c749e9e2d..332cd16e749 100644 --- a/homeassistant/components/aquacell/config_flow.py +++ b/homeassistant/components/aquacell/config_flow.py @@ -7,18 +7,27 @@ import logging from typing import Any from aioaquacell import ApiException, AquacellApi, AuthenticationFailed +from aioaquacell.const import SUPPORTED_BRANDS, Brand import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.helpers.aiohttp_client import async_get_clientsession -from .const import CONF_REFRESH_TOKEN, CONF_REFRESH_TOKEN_CREATION_TIME, DOMAIN +from .const import ( + CONF_BRAND, + CONF_REFRESH_TOKEN, + CONF_REFRESH_TOKEN_CREATION_TIME, + DOMAIN, +) _LOGGER = logging.getLogger(__name__) DATA_SCHEMA = vol.Schema( { + vol.Required(CONF_BRAND, default=Brand.AQUACELL): vol.In( + {key: brand.name for key, brand in SUPPORTED_BRANDS.items()} + ), vol.Required(CONF_EMAIL): str, vol.Required(CONF_PASSWORD): str, } @@ -33,7 +42,7 @@ class AquaCellConfigFlow(ConfigFlow, domain=DOMAIN): async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: - """Handle the initial step.""" + """Handle the cloud logon step.""" errors: dict[str, str] = {} if user_input is not None: await self.async_set_unique_id( @@ -42,7 +51,7 @@ class AquaCellConfigFlow(ConfigFlow, domain=DOMAIN): self._abort_if_unique_id_configured() session = async_get_clientsession(self.hass) - api = AquacellApi(session) + api = AquacellApi(session, user_input[CONF_BRAND]) try: refresh_token = await api.authenticate( user_input[CONF_EMAIL], user_input[CONF_PASSWORD] @@ -59,6 +68,7 @@ class AquaCellConfigFlow(ConfigFlow, domain=DOMAIN): title=user_input[CONF_EMAIL], data={ **user_input, + CONF_BRAND: user_input[CONF_BRAND], CONF_REFRESH_TOKEN: refresh_token, CONF_REFRESH_TOKEN_CREATION_TIME: datetime.now().timestamp(), }, diff --git a/homeassistant/components/aquacell/const.py b/homeassistant/components/aquacell/const.py index 96568d2286b..818c96fc53a 100644 --- a/homeassistant/components/aquacell/const.py +++ b/homeassistant/components/aquacell/const.py @@ -5,6 +5,7 @@ from datetime import timedelta DOMAIN = "aquacell" DATA_AQUACELL = "DATA_AQUACELL" +CONF_BRAND = "brand" CONF_REFRESH_TOKEN = "refresh_token" CONF_REFRESH_TOKEN_CREATION_TIME = "refresh_token_creation_time" diff --git a/homeassistant/components/aquacell/manifest.json b/homeassistant/components/aquacell/manifest.json index de4a9986d6e..2d8b80f4488 100644 --- a/homeassistant/components/aquacell/manifest.json +++ b/homeassistant/components/aquacell/manifest.json @@ -1,6 +1,6 @@ { "domain": "aquacell", - "name": "Aquacell", + "name": "AquaCell", "codeowners": ["@Jordi1990"], "config_flow": true, "dependencies": ["http", "network"], diff --git a/homeassistant/components/aquacell/strings.json b/homeassistant/components/aquacell/strings.json index 32b6bba943a..53304d04804 100644 --- a/homeassistant/components/aquacell/strings.json +++ b/homeassistant/components/aquacell/strings.json @@ -2,8 +2,9 @@ "config": { "step": { "user": { - "description": "Fill in your Aquacell mobile app credentials", + "description": "Select the brand of the softener and fill in your softener mobile app credentials", "data": { + "brand": "Brand", "email": "[%key:common::config_flow::data::email%]", "password": "[%key:common::config_flow::data::password%]" } diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 7df27aa5e68..a61e58d80d4 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -414,7 +414,7 @@ "iot_class": "local_polling" }, "aquacell": { - "name": "Aquacell", + "name": "AquaCell", "integration_type": "device", "config_flow": true, "iot_class": "cloud_polling" diff --git a/tests/components/aquacell/__init__.py b/tests/components/aquacell/__init__.py index c54bc539496..9190172145a 100644 --- a/tests/components/aquacell/__init__.py +++ b/tests/components/aquacell/__init__.py @@ -1,6 +1,9 @@ """Tests for the Aquacell integration.""" +from aioaquacell import Brand + from homeassistant.components.aquacell.const import ( + CONF_BRAND, CONF_REFRESH_TOKEN, CONF_REFRESH_TOKEN_CREATION_TIME, ) @@ -14,11 +17,20 @@ TEST_CONFIG_ENTRY = { CONF_PASSWORD: "test-password", CONF_REFRESH_TOKEN: "refresh-token", CONF_REFRESH_TOKEN_CREATION_TIME: 0, + CONF_BRAND: Brand.AQUACELL, +} + +TEST_CONFIG_ENTRY_WITHOUT_BRAND = { + CONF_EMAIL: "test@test.com", + CONF_PASSWORD: "test-password", + CONF_REFRESH_TOKEN: "refresh-token", + CONF_REFRESH_TOKEN_CREATION_TIME: 0, } TEST_USER_INPUT = { CONF_EMAIL: "test@test.com", CONF_PASSWORD: "test-password", + CONF_BRAND: "aquacell", } DSN = "DSN" diff --git a/tests/components/aquacell/conftest.py b/tests/components/aquacell/conftest.py index f5a741ceed8..443f7da77ce 100644 --- a/tests/components/aquacell/conftest.py +++ b/tests/components/aquacell/conftest.py @@ -13,7 +13,7 @@ from homeassistant.components.aquacell.const import ( ) from homeassistant.const import CONF_EMAIL -from . import TEST_CONFIG_ENTRY +from . import TEST_CONFIG_ENTRY, TEST_CONFIG_ENTRY_WITHOUT_BRAND from tests.common import MockConfigEntry, load_json_array_fixture @@ -76,3 +76,17 @@ def mock_config_entry() -> MockConfigEntry: CONF_REFRESH_TOKEN_CREATION_TIME: datetime.now().timestamp(), }, ) + + +@pytest.fixture +def mock_config_entry_without_brand() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title="Aquacell", + unique_id=TEST_CONFIG_ENTRY[CONF_EMAIL], + data={ + **TEST_CONFIG_ENTRY_WITHOUT_BRAND, + CONF_REFRESH_TOKEN_CREATION_TIME: datetime.now().timestamp(), + }, + ) diff --git a/tests/components/aquacell/test_config_flow.py b/tests/components/aquacell/test_config_flow.py index b6bcb82293c..b73852d513f 100644 --- a/tests/components/aquacell/test_config_flow.py +++ b/tests/components/aquacell/test_config_flow.py @@ -5,7 +5,11 @@ from unittest.mock import AsyncMock from aioaquacell import ApiException, AuthenticationFailed import pytest -from homeassistant.components.aquacell.const import CONF_REFRESH_TOKEN, DOMAIN +from homeassistant.components.aquacell.const import ( + CONF_BRAND, + CONF_REFRESH_TOKEN, + DOMAIN, +) from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.core import HomeAssistant @@ -51,7 +55,9 @@ async def test_full_flow( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" assert result["errors"] == {} result2 = await hass.config_entries.flow.async_configure( @@ -65,6 +71,7 @@ async def test_full_flow( assert result2["data"][CONF_EMAIL] == TEST_CONFIG_ENTRY[CONF_EMAIL] assert result2["data"][CONF_PASSWORD] == TEST_CONFIG_ENTRY[CONF_PASSWORD] assert result2["data"][CONF_REFRESH_TOKEN] == TEST_CONFIG_ENTRY[CONF_REFRESH_TOKEN] + assert result2["data"][CONF_BRAND] == TEST_CONFIG_ENTRY[CONF_BRAND] assert len(mock_setup_entry.mock_calls) == 1 @@ -109,4 +116,5 @@ async def test_form_exceptions( assert result3["data"][CONF_EMAIL] == TEST_CONFIG_ENTRY[CONF_EMAIL] assert result3["data"][CONF_PASSWORD] == TEST_CONFIG_ENTRY[CONF_PASSWORD] assert result3["data"][CONF_REFRESH_TOKEN] == TEST_CONFIG_ENTRY[CONF_REFRESH_TOKEN] + assert result3["data"][CONF_BRAND] == TEST_CONFIG_ENTRY[CONF_BRAND] assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/aquacell/test_init.py b/tests/components/aquacell/test_init.py index a70d077e180..580d87f4d9a 100644 --- a/tests/components/aquacell/test_init.py +++ b/tests/components/aquacell/test_init.py @@ -38,6 +38,17 @@ async def test_load_unload_entry( assert entry.state is ConfigEntryState.NOT_LOADED +async def test_load_withoutbrand( + hass: HomeAssistant, + mock_aquacell_api: AsyncMock, + mock_config_entry_without_brand: MockConfigEntry, +) -> None: + """Test load entry without brand.""" + await setup_integration(hass, mock_config_entry_without_brand) + + assert mock_config_entry_without_brand.state is ConfigEntryState.LOADED + + async def test_coordinator_update_valid_refresh_token( hass: HomeAssistant, mock_aquacell_api: AsyncMock, From d72d4286db1e4e1df25c02888a26d297cd09dad5 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Sat, 17 Aug 2024 12:11:19 -0700 Subject: [PATCH 0832/1635] Bump nest to 4.0.7 to increase subscriber deadline (#124131) Bump nest to 4.0.7 --- homeassistant/components/nest/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/nest/manifest.json b/homeassistant/components/nest/manifest.json index fbe5ddb6534..3472fa64e8f 100644 --- a/homeassistant/components/nest/manifest.json +++ b/homeassistant/components/nest/manifest.json @@ -20,5 +20,5 @@ "iot_class": "cloud_push", "loggers": ["google_nest_sdm"], "quality_scale": "platinum", - "requirements": ["google-nest-sdm==4.0.6"] + "requirements": ["google-nest-sdm==4.0.7"] } diff --git a/requirements_all.txt b/requirements_all.txt index 533aa557fc3..af14e1b5c36 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -989,7 +989,7 @@ google-cloud-texttospeech==2.16.3 google-generativeai==0.6.0 # homeassistant.components.nest -google-nest-sdm==4.0.6 +google-nest-sdm==4.0.7 # homeassistant.components.google_travel_time googlemaps==2.5.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 8bbf2109d2a..70d51e3cf1c 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -836,7 +836,7 @@ google-cloud-pubsub==2.13.11 google-generativeai==0.6.0 # homeassistant.components.nest -google-nest-sdm==4.0.6 +google-nest-sdm==4.0.7 # homeassistant.components.google_travel_time googlemaps==2.5.1 From 63d1cc10e2960e81350e31cce36265f3602e0fa2 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Sat, 17 Aug 2024 21:11:32 +0200 Subject: [PATCH 0833/1635] Update homeassistant integration tests to async (#124117) --- tests/components/homeassistant/test_init.py | 319 ++++++++------------ 1 file changed, 127 insertions(+), 192 deletions(-) diff --git a/tests/components/homeassistant/test_init.py b/tests/components/homeassistant/test_init.py index d090da280a0..a0902fe62df 100644 --- a/tests/components/homeassistant/test_init.py +++ b/tests/components/homeassistant/test_init.py @@ -1,7 +1,5 @@ """The tests for Core components.""" -import asyncio -import unittest from unittest.mock import Mock, patch import pytest @@ -44,206 +42,143 @@ from tests.common import ( MockUser, async_capture_events, async_mock_service, - get_test_home_assistant, - mock_service, patch_yaml_files, ) -def turn_on(hass, entity_id=None, **service_data): - """Turn specified entity on if possible. - - This is a legacy helper method. Do not use it for new tests. - """ - if entity_id is not None: - service_data[ATTR_ENTITY_ID] = entity_id - - hass.services.call(ha.DOMAIN, SERVICE_TURN_ON, service_data) +async def test_is_on(hass: HomeAssistant) -> None: + """Test is_on method.""" + with pytest.raises( + RuntimeError, + match="Detected code that uses homeassistant.components.is_on. This is deprecated and will stop working", + ): + assert comps.is_on(hass, "light.Bowl") -def turn_off(hass, entity_id=None, **service_data): - """Turn specified entity off. - - This is a legacy helper method. Do not use it for new tests. - """ - if entity_id is not None: - service_data[ATTR_ENTITY_ID] = entity_id - - hass.services.call(ha.DOMAIN, SERVICE_TURN_OFF, service_data) +async def test_turn_on_without_entities(hass: HomeAssistant) -> None: + """Test turn_on method without entities.""" + await async_setup_component(hass, ha.DOMAIN, {}) + calls = async_mock_service(hass, "light", SERVICE_TURN_ON) + await hass.services.async_call(ha.DOMAIN, SERVICE_TURN_ON, blocking=True) + assert len(calls) == 0 -def toggle(hass, entity_id=None, **service_data): - """Toggle specified entity. - - This is a legacy helper method. Do not use it for new tests. - """ - if entity_id is not None: - service_data[ATTR_ENTITY_ID] = entity_id - - hass.services.call(ha.DOMAIN, SERVICE_TOGGLE, service_data) - - -def stop(hass): - """Stop Home Assistant. - - This is a legacy helper method. Do not use it for new tests. - """ - hass.services.call(ha.DOMAIN, SERVICE_HOMEASSISTANT_STOP) - - -def restart(hass): - """Stop Home Assistant. - - This is a legacy helper method. Do not use it for new tests. - """ - hass.services.call(ha.DOMAIN, SERVICE_HOMEASSISTANT_RESTART) - - -def check_config(hass): - """Check the config files. - - This is a legacy helper method. Do not use it for new tests. - """ - hass.services.call(ha.DOMAIN, SERVICE_CHECK_CONFIG) - - -def reload_core_config(hass): - """Reload the core config. - - This is a legacy helper method. Do not use it for new tests. - """ - hass.services.call(ha.DOMAIN, SERVICE_RELOAD_CORE_CONFIG) - - -class TestComponentsCore(unittest.TestCase): - """Test homeassistant.components module.""" - - def setUp(self): - """Set up things to be run when tests are started.""" - self._manager = get_test_home_assistant() - self.hass = self._manager.__enter__() - assert asyncio.run_coroutine_threadsafe( - async_setup_component(self.hass, "homeassistant", {}), self.hass.loop - ).result() - - self.hass.states.set("light.Bowl", STATE_ON) - self.hass.states.set("light.Ceiling", STATE_OFF) - - def tearDown(self) -> None: - """Tear down hass object.""" - self.hass.stop() - self._manager.__exit__(None, None, None) - - def test_is_on(self): - """Test is_on method.""" - with pytest.raises( - RuntimeError, - match="Detected code that uses homeassistant.components.is_on. This is deprecated and will stop working", - ): - assert comps.is_on(self.hass, "light.Bowl") - - def test_turn_on_without_entities(self): - """Test turn_on method without entities.""" - calls = mock_service(self.hass, "light", SERVICE_TURN_ON) - turn_on(self.hass) - self.hass.block_till_done() - assert len(calls) == 0 - - def test_turn_on(self): - """Test turn_on method.""" - calls = mock_service(self.hass, "light", SERVICE_TURN_ON) - turn_on(self.hass, "light.Ceiling") - self.hass.block_till_done() - assert len(calls) == 1 - - def test_turn_off(self): - """Test turn_off method.""" - calls = mock_service(self.hass, "light", SERVICE_TURN_OFF) - turn_off(self.hass, "light.Bowl") - self.hass.block_till_done() - assert len(calls) == 1 - - def test_toggle(self): - """Test toggle method.""" - calls = mock_service(self.hass, "light", SERVICE_TOGGLE) - toggle(self.hass, "light.Bowl") - self.hass.block_till_done() - assert len(calls) == 1 - - @patch("homeassistant.config.os.path.isfile", Mock(return_value=True)) - def test_reload_core_conf(self): - """Test reload core conf service.""" - ent = entity.Entity() - ent.entity_id = "test.entity" - ent.hass = self.hass - ent.schedule_update_ha_state() - self.hass.block_till_done() - - state = self.hass.states.get("test.entity") - assert state is not None - assert state.state == "unknown" - assert state.attributes == {} - - files = { - config.YAML_CONFIG_FILE: yaml.dump( - { - ha.DOMAIN: { - "country": "SE", # To avoid creating issue country_not_configured - "latitude": 10, - "longitude": 20, - "customize": {"test.Entity": {"hello": "world"}}, - } - } - ) - } - with patch_yaml_files(files, True): - reload_core_config(self.hass) - self.hass.block_till_done() - - assert self.hass.config.latitude == 10 - assert self.hass.config.longitude == 20 - - ent.schedule_update_ha_state() - self.hass.block_till_done() - - state = self.hass.states.get("test.entity") - assert state is not None - assert state.state == "unknown" - assert state.attributes.get("hello") == "world" - - @patch("homeassistant.config.os.path.isfile", Mock(return_value=True)) - @patch("homeassistant.components.homeassistant._LOGGER.error") - @patch("homeassistant.config.async_process_ha_core_config") - def test_reload_core_with_wrong_conf(self, mock_process, mock_error): - """Test reload core conf service.""" - files = {config.YAML_CONFIG_FILE: yaml.dump(["invalid", "config"])} - with patch_yaml_files(files, True): - reload_core_config(self.hass) - self.hass.block_till_done() - - assert mock_error.called - assert mock_process.called is False - - @patch("homeassistant.core.HomeAssistant.async_stop", return_value=None) - @patch( - "homeassistant.config.async_check_ha_config_file", - side_effect=HomeAssistantError("Test error"), +async def test_turn_on(hass: HomeAssistant) -> None: + """Test turn_on method.""" + await async_setup_component(hass, ha.DOMAIN, {}) + calls = async_mock_service(hass, "light", SERVICE_TURN_ON) + await hass.services.async_call( + ha.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: "light.Ceiling"}, blocking=True ) - def test_restart_homeassistant_wrong_conf(self, mock_check, mock_restart): - """Test stop service.""" - restart(self.hass) - self.hass.block_till_done() - assert mock_check.called - assert not mock_restart.called + assert len(calls) == 1 - @patch("homeassistant.core.HomeAssistant.async_stop", return_value=None) - @patch("homeassistant.config.async_check_ha_config_file", return_value=None) - def test_check_config(self, mock_check, mock_stop): - """Test stop service.""" - check_config(self.hass) - self.hass.block_till_done() - assert mock_check.called - assert not mock_stop.called + +async def test_turn_off(hass: HomeAssistant) -> None: + """Test turn_off method.""" + await async_setup_component(hass, ha.DOMAIN, {}) + calls = async_mock_service(hass, "light", SERVICE_TURN_OFF) + await hass.services.async_call( + ha.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: "light.Bowl"}, blocking=True + ) + assert len(calls) == 1 + + +async def test_toggle(hass: HomeAssistant) -> None: + """Test toggle method.""" + await async_setup_component(hass, ha.DOMAIN, {}) + calls = async_mock_service(hass, "light", SERVICE_TOGGLE) + await hass.services.async_call( + ha.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: "light.Bowl"}, blocking=True + ) + assert len(calls) == 1 + + +@patch("homeassistant.config.os.path.isfile", Mock(return_value=True)) +async def test_reload_core_conf(hass: HomeAssistant) -> None: + """Test reload core conf service.""" + await async_setup_component(hass, ha.DOMAIN, {}) + ent = entity.Entity() + ent.entity_id = "test.entity" + ent.hass = hass + ent.async_write_ha_state() + + state = hass.states.get("test.entity") + assert state is not None + assert state.state == "unknown" + assert state.attributes == {} + + files = { + config.YAML_CONFIG_FILE: yaml.dump( + { + ha.DOMAIN: { + "country": "SE", # To avoid creating issue country_not_configured + "latitude": 10, + "longitude": 20, + "customize": {"test.Entity": {"hello": "world"}}, + } + } + ) + } + with patch_yaml_files(files, True): + await hass.services.async_call( + ha.DOMAIN, SERVICE_RELOAD_CORE_CONFIG, blocking=True + ) + + assert hass.config.latitude == 10 + assert hass.config.longitude == 20 + + ent.async_write_ha_state() + + state = hass.states.get("test.entity") + assert state is not None + assert state.state == "unknown" + assert state.attributes.get("hello") == "world" + + +@patch("homeassistant.config.os.path.isfile", Mock(return_value=True)) +@patch("homeassistant.components.homeassistant._LOGGER.error") +@patch("homeassistant.config.async_process_ha_core_config") +async def test_reload_core_with_wrong_conf( + mock_process, mock_error, hass: HomeAssistant +) -> None: + """Test reload core conf service.""" + files = {config.YAML_CONFIG_FILE: yaml.dump(["invalid", "config"])} + await async_setup_component(hass, ha.DOMAIN, {}) + with patch_yaml_files(files, True): + await hass.services.async_call( + ha.DOMAIN, SERVICE_RELOAD_CORE_CONFIG, blocking=True + ) + + assert mock_error.called + assert mock_process.called is False + + +@patch("homeassistant.core.HomeAssistant.async_stop", return_value=None) +@patch( + "homeassistant.config.async_check_ha_config_file", + side_effect=HomeAssistantError("Test error"), +) +async def test_restart_homeassistant_wrong_conf( + mock_check, mock_restart, hass: HomeAssistant +) -> None: + """Test restart service with error.""" + await async_setup_component(hass, ha.DOMAIN, {}) + with pytest.raises(HomeAssistantError, match="Test error"): + await hass.services.async_call( + ha.DOMAIN, SERVICE_HOMEASSISTANT_RESTART, blocking=True + ) + assert mock_check.called + assert not mock_restart.called + + +@patch("homeassistant.core.HomeAssistant.async_stop", return_value=None) +@patch("homeassistant.config.async_check_ha_config_file", return_value=None) +async def test_check_config(mock_check, mock_stop, hass: HomeAssistant) -> None: + """Test stop service.""" + await async_setup_component(hass, ha.DOMAIN, {}) + await hass.services.async_call(ha.DOMAIN, SERVICE_CHECK_CONFIG, blocking=True) + assert mock_check.called + assert not mock_stop.called async def test_turn_on_skips_domains_without_service( From 3b2893f2f4e16f9a05d9cc4a7ba9f31984c841be Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 17 Aug 2024 15:14:28 -0500 Subject: [PATCH 0834/1635] Fix blocking I/O while validating core config schema (#124125) --- homeassistant/config.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/homeassistant/config.py b/homeassistant/config.py index 5066dffab47..9063429ca91 100644 --- a/homeassistant/config.py +++ b/homeassistant/config.py @@ -817,7 +817,9 @@ async def async_process_ha_core_config(hass: HomeAssistant, config: dict) -> Non This method is a coroutine. """ - config = CORE_CONFIG_SCHEMA(config) + # CORE_CONFIG_SCHEMA is not async safe since it uses vol.IsDir + # so we need to run it in an executor job. + config = await hass.async_add_executor_job(CORE_CONFIG_SCHEMA, config) # Only load auth during startup. if not hasattr(hass, "auth"): From 59d72bb4edf8678823025c4ad42547939b023b9b Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Sun, 18 Aug 2024 00:49:39 +0200 Subject: [PATCH 0835/1635] Use better config validation for integrations with legacy setup method, but only config entry setup (#123203) use config_entry_only_config_schema over cv.removed Co-authored-by: J. Nick Koston --- homeassistant/components/doorbird/__init__.py | 2 +- homeassistant/components/lametric/__init__.py | 2 +- homeassistant/components/nfandroidtv/__init__.py | 2 +- homeassistant/components/tado/__init__.py | 2 +- homeassistant/components/tibber/__init__.py | 2 +- homeassistant/components/webostv/__init__.py | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/doorbird/__init__.py b/homeassistant/components/doorbird/__init__.py index 113b8031d9b..c943fa68766 100644 --- a/homeassistant/components/doorbird/__init__.py +++ b/homeassistant/components/doorbird/__init__.py @@ -29,7 +29,7 @@ from .view import DoorBirdRequestView CONF_CUSTOM_URL = "hass_url_override" -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/lametric/__init__.py b/homeassistant/components/lametric/__init__.py index 10fdee0ddc7..779cfa10445 100644 --- a/homeassistant/components/lametric/__init__.py +++ b/homeassistant/components/lametric/__init__.py @@ -12,7 +12,7 @@ from .const import DOMAIN, PLATFORMS from .coordinator import LaMetricDataUpdateCoordinator from .services import async_setup_services -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: diff --git a/homeassistant/components/nfandroidtv/__init__.py b/homeassistant/components/nfandroidtv/__init__.py index 42d42e26d1f..ae7a4e615d4 100644 --- a/homeassistant/components/nfandroidtv/__init__.py +++ b/homeassistant/components/nfandroidtv/__init__.py @@ -14,7 +14,7 @@ from .const import DATA_HASS_CONFIG, DOMAIN PLATFORMS = [Platform.NOTIFY] -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: diff --git a/homeassistant/components/tado/__init__.py b/homeassistant/components/tado/__init__.py index 2c853a0e6e3..084819d8e68 100644 --- a/homeassistant/components/tado/__init__.py +++ b/homeassistant/components/tado/__init__.py @@ -44,7 +44,7 @@ MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=4) SCAN_INTERVAL = timedelta(minutes=5) SCAN_MOBILE_DEVICE_INTERVAL = timedelta(seconds=30) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: diff --git a/homeassistant/components/tibber/__init__.py b/homeassistant/components/tibber/__init__.py index 51d6f0560f1..7c44e797780 100644 --- a/homeassistant/components/tibber/__init__.py +++ b/homeassistant/components/tibber/__init__.py @@ -25,7 +25,7 @@ from .services import async_setup_services PLATFORMS = [Platform.NOTIFY, Platform.SENSOR] -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/webostv/__init__.py b/homeassistant/components/webostv/__init__.py index 36950b0e02a..499d0a85518 100644 --- a/homeassistant/components/webostv/__init__.py +++ b/homeassistant/components/webostv/__init__.py @@ -40,7 +40,7 @@ from .const import ( WEBOSTV_EXCEPTIONS, ) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) CALL_SCHEMA = vol.Schema({vol.Required(ATTR_ENTITY_ID): cv.comp_entity_ids}) From ec0012209d1a33379b33c8380de869beb95b86e9 Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Sun, 18 Aug 2024 11:58:51 +0200 Subject: [PATCH 0836/1635] Add missing sensors for Shelly Plus RGBW PM (#123589) * Add missing sensors for Shelly Plus RGBW PM * Add tests --- homeassistant/components/shelly/sensor.py | 106 ++++++++++++++++++++++ tests/components/shelly/test_sensor.py | 90 ++++++++++++++++++ 2 files changed, 196 insertions(+) diff --git a/homeassistant/components/shelly/sensor.py b/homeassistant/components/shelly/sensor.py index 8c1333a989c..6cabee6ccba 100644 --- a/homeassistant/components/shelly/sensor.py +++ b/homeassistant/components/shelly/sensor.py @@ -392,6 +392,22 @@ RPC_SENSORS: Final = { device_class=SensorDeviceClass.POWER, state_class=SensorStateClass.MEASUREMENT, ), + "power_rgb": RpcSensorDescription( + key="rgb", + sub_key="apower", + name="Power", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + ), + "power_rgbw": RpcSensorDescription( + key="rgbw", + sub_key="apower", + name="Power", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + ), "a_act_power": RpcSensorDescription( key="em", sub_key="a_act_power", @@ -536,6 +552,28 @@ RPC_SENSORS: Final = { state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, ), + "voltage_rgb": RpcSensorDescription( + key="rgb", + sub_key="voltage", + name="Voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + value=lambda status, _: None if status is None else float(status), + suggested_display_precision=1, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + ), + "voltage_rgbw": RpcSensorDescription( + key="rgbw", + sub_key="voltage", + name="Voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + value=lambda status, _: None if status is None else float(status), + suggested_display_precision=1, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + ), "a_voltage": RpcSensorDescription( key="em", sub_key="a_voltage", @@ -603,6 +641,26 @@ RPC_SENSORS: Final = { state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, ), + "current_rgb": RpcSensorDescription( + key="rgb", + sub_key="current", + name="Current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value=lambda status, _: None if status is None else float(status), + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + ), + "current_rgbw": RpcSensorDescription( + key="rgbw", + sub_key="current", + name="Current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value=lambda status, _: None if status is None else float(status), + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + ), "a_current": RpcSensorDescription( key="em", sub_key="a_current", @@ -683,6 +741,28 @@ RPC_SENSORS: Final = { device_class=SensorDeviceClass.ENERGY, state_class=SensorStateClass.TOTAL_INCREASING, ), + "energy_rgb": RpcSensorDescription( + key="rgb", + sub_key="aenergy", + name="Energy", + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + value=lambda status, _: status["total"], + suggested_display_precision=2, + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + ), + "energy_rgbw": RpcSensorDescription( + key="rgbw", + sub_key="aenergy", + name="Energy", + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + value=lambda status, _: status["total"], + suggested_display_precision=2, + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + ), "total_act": RpcSensorDescription( key="emdata", sub_key="total_act", @@ -895,6 +975,32 @@ RPC_SENSORS: Final = { entity_category=EntityCategory.DIAGNOSTIC, use_polling_coordinator=True, ), + "temperature_rgb": RpcSensorDescription( + key="rgb", + sub_key="temperature", + name="Device temperature", + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + value=lambda status, _: status["tC"], + suggested_display_precision=1, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + entity_category=EntityCategory.DIAGNOSTIC, + use_polling_coordinator=True, + ), + "temperature_rgbw": RpcSensorDescription( + key="rgbw", + sub_key="temperature", + name="Device temperature", + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + value=lambda status, _: status["tC"], + suggested_display_precision=1, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + entity_category=EntityCategory.DIAGNOSTIC, + use_polling_coordinator=True, + ), "temperature_0": RpcSensorDescription( key="temperature", sub_key="tC", diff --git a/tests/components/shelly/test_sensor.py b/tests/components/shelly/test_sensor.py index a39123a6722..843270107e0 100644 --- a/tests/components/shelly/test_sensor.py +++ b/tests/components/shelly/test_sensor.py @@ -25,8 +25,12 @@ from homeassistant.const import ( PERCENTAGE, STATE_UNAVAILABLE, STATE_UNKNOWN, + UnitOfElectricCurrent, + UnitOfElectricPotential, UnitOfEnergy, UnitOfFrequency, + UnitOfPower, + UnitOfTemperature, ) from homeassistant.core import HomeAssistant, State from homeassistant.helpers.device_registry import DeviceRegistry @@ -1189,3 +1193,89 @@ async def test_rpc_remove_enum_virtual_sensor_when_orphaned( entry = entity_registry.async_get(entity_id) assert not entry + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize("light_type", ["rgb", "rgbw"]) +async def test_rpc_rgbw_sensors( + hass: HomeAssistant, + entity_registry: EntityRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + light_type: str, +) -> None: + """Test sensors for RGB/RGBW light.""" + config = deepcopy(mock_rpc_device.config) + config[f"{light_type}:0"] = {"id": 0} + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status[f"{light_type}:0"] = { + "temperature": {"tC": 54.3, "tF": 129.7}, + "aenergy": {"total": 45.141}, + "apower": 12.2, + "current": 0.23, + "voltage": 12.4, + } + monkeypatch.setattr(mock_rpc_device, "status", status) + + await init_integration(hass, 2) + + entity_id = "sensor.test_name_power" + + state = hass.states.get(entity_id) + assert state + assert state.state == "12.2" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfPower.WATT + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == f"123456789ABC-{light_type}:0-power_{light_type}" + + entity_id = "sensor.test_name_energy" + + state = hass.states.get(entity_id) + assert state + assert state.state == "0.045141" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.KILO_WATT_HOUR + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == f"123456789ABC-{light_type}:0-energy_{light_type}" + + entity_id = "sensor.test_name_current" + + state = hass.states.get(entity_id) + assert state + assert state.state == "0.23" + assert ( + state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfElectricCurrent.AMPERE + ) + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == f"123456789ABC-{light_type}:0-current_{light_type}" + + entity_id = "sensor.test_name_voltage" + + state = hass.states.get(entity_id) + assert state + assert state.state == "12.4" + assert ( + state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfElectricPotential.VOLT + ) + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == f"123456789ABC-{light_type}:0-voltage_{light_type}" + + entity_id = "sensor.test_name_device_temperature" + + state = hass.states.get(entity_id) + assert state + assert state.state == "54.3" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == f"123456789ABC-{light_type}:0-temperature_{light_type}" From c4fee124b3b5888bc4b3d2406ccfb96796320df4 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Sun, 18 Aug 2024 13:53:59 +0200 Subject: [PATCH 0837/1635] Add missing hass type hint in flux tests (#124078) --- tests/components/flux/test_switch.py | 65 +++++++++++++++++++++------- 1 file changed, 49 insertions(+), 16 deletions(-) diff --git a/tests/components/flux/test_switch.py b/tests/components/flux/test_switch.py index f957083dd11..ab0e8a556c4 100644 --- a/tests/components/flux/test_switch.py +++ b/tests/components/flux/test_switch.py @@ -1,5 +1,6 @@ """The tests for the Flux switch platform.""" +from datetime import date, datetime from unittest.mock import patch from freezegun import freeze_time @@ -187,7 +188,9 @@ async def test_flux_when_switch_is_off( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date(hass, event, now=None): + def event_date( + hass: HomeAssistant, event: str, now: date | datetime | None = None + ) -> datetime | None: if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -242,7 +245,9 @@ async def test_flux_before_sunrise( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=5) - def event_date(hass, event, now=None): + def event_date( + hass: HomeAssistant, event: str, now: date | datetime | None = None + ) -> datetime | None: if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -364,7 +369,9 @@ async def test_flux_after_sunrise_before_sunset( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date(hass, event, now=None): + def event_date( + hass: HomeAssistant, event: str, now: date | datetime | None = None + ) -> datetime | None: if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -426,7 +433,9 @@ async def test_flux_after_sunset_before_stop( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date(hass, event, now=None): + def event_date( + hass: HomeAssistant, event: str, now: date | datetime | None = None + ) -> datetime | None: if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -489,7 +498,9 @@ async def test_flux_after_stop_before_sunrise( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date(hass, event, now=None): + def event_date( + hass: HomeAssistant, event: str, now: date | datetime | None = None + ) -> datetime | None: if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -551,7 +562,9 @@ async def test_flux_with_custom_start_stop_times( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date(hass, event, now=None): + def event_date( + hass: HomeAssistant, event: str, now: date | datetime | None = None + ) -> datetime | None: if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -618,7 +631,9 @@ async def test_flux_before_sunrise_stop_next_day( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date(hass, event, now=None): + def event_date( + hass: HomeAssistant, event: str, now: date | datetime | None = None + ) -> datetime | None: if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -684,7 +699,9 @@ async def test_flux_after_sunrise_before_sunset_stop_next_day( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date(hass, event, now=None): + def event_date( + hass: HomeAssistant, event: str, now: date | datetime | None = None + ) -> datetime | None: if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -750,7 +767,9 @@ async def test_flux_after_sunset_before_midnight_stop_next_day( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date(hass, event, now=None): + def event_date( + hass: HomeAssistant, event: str, now: date | datetime | None = None + ) -> datetime | None: if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -816,7 +835,9 @@ async def test_flux_after_sunset_after_midnight_stop_next_day( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date(hass, event, now=None): + def event_date( + hass: HomeAssistant, event: str, now: date | datetime | None = None + ) -> datetime | None: if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -882,7 +903,9 @@ async def test_flux_after_stop_before_sunrise_stop_next_day( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date(hass, event, now=None): + def event_date( + hass: HomeAssistant, event: str, now: date | datetime | None = None + ) -> datetime | None: if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -945,7 +968,9 @@ async def test_flux_with_custom_colortemps( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date(hass, event, now=None): + def event_date( + hass: HomeAssistant, event: str, now: date | datetime | None = None + ) -> datetime | None: if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -1010,7 +1035,9 @@ async def test_flux_with_custom_brightness( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date(hass, event, now=None): + def event_date( + hass: HomeAssistant, event: str, now: date | datetime | None = None + ) -> datetime | None: if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -1091,7 +1118,9 @@ async def test_flux_with_multiple_lights( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date(hass, event, now=None): + def event_date( + hass: HomeAssistant, event: str, now: date | datetime | None = None + ) -> datetime | None: if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -1158,7 +1187,9 @@ async def test_flux_with_mired( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date(hass, event, now=None): + def event_date( + hass: HomeAssistant, event: str, now: date | datetime | None = None + ) -> datetime | None: if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -1219,7 +1250,9 @@ async def test_flux_with_rgb( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date(hass, event, now=None): + def event_date( + hass: HomeAssistant, event: str, date: date | datetime | None = None + ) -> datetime | None: if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time From e3287b93a598a370884fc14ba30ba37f5861e0cb Mon Sep 17 00:00:00 2001 From: MarkGodwin <10632972+MarkGodwin@users.noreply.github.com> Date: Sun, 18 Aug 2024 13:23:47 +0100 Subject: [PATCH 0838/1635] Bump tplink-omada-api to 1.4.2 (#124136) Fix for bad pre-registered clients --- homeassistant/components/tplink_omada/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/tplink_omada/manifest.json b/homeassistant/components/tplink_omada/manifest.json index 9544470d7a9..6bde656dc30 100644 --- a/homeassistant/components/tplink_omada/manifest.json +++ b/homeassistant/components/tplink_omada/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/tplink_omada", "integration_type": "hub", "iot_class": "local_polling", - "requirements": ["tplink-omada-client==1.3.12"] + "requirements": ["tplink-omada-client==1.4.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index af14e1b5c36..58b20b2f378 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2792,7 +2792,7 @@ total-connect-client==2024.5 tp-connected==0.0.4 # homeassistant.components.tplink_omada -tplink-omada-client==1.3.12 +tplink-omada-client==1.4.2 # homeassistant.components.transmission transmission-rpc==7.0.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 70d51e3cf1c..f32f707213c 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2193,7 +2193,7 @@ toonapi==0.3.0 total-connect-client==2024.5 # homeassistant.components.tplink_omada -tplink-omada-client==1.3.12 +tplink-omada-client==1.4.2 # homeassistant.components.transmission transmission-rpc==7.0.3 From 04b0760e270c4989209eceb722672b9b5d1384d6 Mon Sep 17 00:00:00 2001 From: Daniel Rozycki Date: Sun, 18 Aug 2024 05:24:44 -0700 Subject: [PATCH 0839/1635] Skip NextBus update if integration is still loading (#123564) * Skip NextBus update if integration is still loading Fixes a race between the loading thread and update thread leading to an unrecoverable error * Use async_at_started * Use local copy of _route_stops to avoid NextBus race condition * Update homeassistant/components/nextbus/coordinator.py --------- Co-authored-by: Joost Lekkerkerker --- homeassistant/components/nextbus/coordinator.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/nextbus/coordinator.py b/homeassistant/components/nextbus/coordinator.py index 6c438f6f808..781742e4c08 100644 --- a/homeassistant/components/nextbus/coordinator.py +++ b/homeassistant/components/nextbus/coordinator.py @@ -50,13 +50,15 @@ class NextBusDataUpdateCoordinator(DataUpdateCoordinator): async def _async_update_data(self) -> dict[str, Any]: """Fetch data from NextBus.""" - self.logger.debug("Updating data from API. Routes: %s", str(self._route_stops)) + + _route_stops = set(self._route_stops) + self.logger.debug("Updating data from API. Routes: %s", str(_route_stops)) def _update_data() -> dict: """Fetch data from NextBus.""" self.logger.debug("Updating data from API (executor)") predictions: dict[RouteStop, dict[str, Any]] = {} - for route_stop in self._route_stops: + for route_stop in _route_stops: prediction_results: list[dict[str, Any]] = [] try: prediction_results = self.client.predictions_for_stop( From 0c5a3fab22c3831efcf5e3f37a54001478be4c2d Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Sun, 18 Aug 2024 14:53:35 +0200 Subject: [PATCH 0840/1635] Cleanup the Fritz!Smarthome light entity (#123488) * use self.data when ever possible * use short hand attributes for color mode and supported color modes * color mode can change during runtime * group executer jobs --- homeassistant/components/fritzbox/light.py | 31 ++++++++++------------ 1 file changed, 14 insertions(+), 17 deletions(-) diff --git a/homeassistant/components/fritzbox/light.py b/homeassistant/components/fritzbox/light.py index 1009b0fb368..c19d7a8600d 100644 --- a/homeassistant/components/fritzbox/light.py +++ b/homeassistant/components/fritzbox/light.py @@ -20,8 +20,6 @@ from . import FritzboxDataUpdateCoordinator, FritzBoxDeviceEntity from .const import COLOR_MODE, LOGGER from .coordinator import FritzboxConfigEntry -SUPPORTED_COLOR_MODES = {ColorMode.COLOR_TEMP, ColorMode.HS} - async def async_setup_entry( hass: HomeAssistant, @@ -61,6 +59,12 @@ class FritzboxLight(FritzBoxDeviceEntity, LightEntity): super().__init__(coordinator, ain, None) self._supported_hs: dict[int, list[int]] = {} + self._attr_supported_color_modes = {ColorMode.ONOFF} + if self.data.has_color: + self._attr_supported_color_modes = {ColorMode.COLOR_TEMP, ColorMode.HS} + elif self.data.has_level: + self._attr_supported_color_modes = {ColorMode.BRIGHTNESS} + @property def is_on(self) -> bool: """If the light is currently on or off.""" @@ -95,15 +99,6 @@ class FritzboxLight(FritzBoxDeviceEntity, LightEntity): return ColorMode.BRIGHTNESS return ColorMode.ONOFF - @property - def supported_color_modes(self) -> set[ColorMode]: - """Flag supported color modes.""" - if self.data.has_color: - return SUPPORTED_COLOR_MODES - if self.data.has_level: - return {ColorMode.BRIGHTNESS} - return {ColorMode.ONOFF} - async def async_turn_on(self, **kwargs: Any) -> None: """Turn the light on.""" if kwargs.get(ATTR_BRIGHTNESS) is not None: @@ -157,12 +152,14 @@ class FritzboxLight(FritzBoxDeviceEntity, LightEntity): async def async_added_to_hass(self) -> None: """Get light attributes from device after entity is added to hass.""" await super().async_added_to_hass() - supported_colors = await self.hass.async_add_executor_job( - self.coordinator.data.devices[self.ain].get_colors - ) - supported_color_temps = await self.hass.async_add_executor_job( - self.coordinator.data.devices[self.ain].get_color_temps - ) + + def _get_color_data() -> tuple[dict, list]: + return (self.data.get_colors(), self.data.get_color_temps()) + + ( + supported_colors, + supported_color_temps, + ) = await self.hass.async_add_executor_job(_get_color_data) if supported_color_temps: # only available for color bulbs From 565f271c5cb6f35d02a847014d62bdf8e08bff8f Mon Sep 17 00:00:00 2001 From: Duco Sebel <74970928+DCSBL@users.noreply.github.com> Date: Sun, 18 Aug 2024 15:21:04 +0200 Subject: [PATCH 0841/1635] Bumb python-homewizard-energy to 6.3.0 (#124150) --- homeassistant/components/homewizard/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/homewizard/manifest.json b/homeassistant/components/homewizard/manifest.json index dbad91b1fb8..65672903eb8 100644 --- a/homeassistant/components/homewizard/manifest.json +++ b/homeassistant/components/homewizard/manifest.json @@ -7,6 +7,6 @@ "iot_class": "local_polling", "loggers": ["homewizard_energy"], "quality_scale": "platinum", - "requirements": ["python-homewizard-energy==v6.2.0"], + "requirements": ["python-homewizard-energy==v6.3.0"], "zeroconf": ["_hwenergy._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index 58b20b2f378..bdaa2ecdd91 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2286,7 +2286,7 @@ python-gitlab==1.6.0 python-homeassistant-analytics==0.7.0 # homeassistant.components.homewizard -python-homewizard-energy==v6.2.0 +python-homewizard-energy==v6.3.0 # homeassistant.components.hp_ilo python-hpilo==4.4.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f32f707213c..8bfd4ec095f 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1810,7 +1810,7 @@ python-fullykiosk==0.0.14 python-homeassistant-analytics==0.7.0 # homeassistant.components.homewizard -python-homewizard-energy==v6.2.0 +python-homewizard-energy==v6.3.0 # homeassistant.components.izone python-izone==1.2.9 From c8797298ea6975bf6f05bf732e09ebc737a79b8c Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Sun, 18 Aug 2024 15:34:59 +0200 Subject: [PATCH 0842/1635] Add missing hass type hint in component tests (e) (#124075) --- tests/components/eafm/test_sensor.py | 7 ++++++- tests/components/energy/test_sensor.py | 7 +++++-- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/tests/components/eafm/test_sensor.py b/tests/components/eafm/test_sensor.py index 986e1153cac..add604167b9 100644 --- a/tests/components/eafm/test_sensor.py +++ b/tests/components/eafm/test_sensor.py @@ -1,6 +1,9 @@ """Tests for polling measures.""" +from collections.abc import Callable, Coroutine import datetime +from typing import Any +from unittest.mock import AsyncMock import aiohttp import pytest @@ -23,7 +26,9 @@ CONNECTION_EXCEPTIONS = [ ] -async def async_setup_test_fixture(hass, mock_get_station, initial_value): +async def async_setup_test_fixture( + hass: HomeAssistant, mock_get_station: AsyncMock, initial_value: dict[str, Any] +) -> tuple[MockConfigEntry, Callable[[Any], Coroutine[Any, Any, None]]]: """Create a dummy config entry for testing polling.""" mock_get_station.return_value = initial_value diff --git a/tests/components/energy/test_sensor.py b/tests/components/energy/test_sensor.py index 0439ac2c028..a27451b853d 100644 --- a/tests/components/energy/test_sensor.py +++ b/tests/components/energy/test_sensor.py @@ -1,5 +1,6 @@ """Test the Energy sensors.""" +from collections.abc import Callable, Coroutine import copy from datetime import timedelta from typing import Any @@ -37,10 +38,12 @@ TEST_TIME_ADVANCE_INTERVAL = timedelta(milliseconds=10) @pytest.fixture -async def setup_integration(recorder_mock: Recorder): +async def setup_integration( + recorder_mock: Recorder, +) -> Callable[[HomeAssistant], Coroutine[Any, Any, None]]: """Set up the integration.""" - async def setup_integration(hass): + async def setup_integration(hass: HomeAssistant) -> None: assert await async_setup_component(hass, "energy", {}) await hass.async_block_till_done() From 975363b66037686179d35c1303a1b4c117bff5f6 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Sun, 18 Aug 2024 15:35:31 +0200 Subject: [PATCH 0843/1635] Add missing hass type hint in component tests (c) (#124067) --- tests/components/cast/test_media_player.py | 21 ++++- tests/components/clicksend_tts/test_notify.py | 2 +- tests/components/climate/common.py | 84 ++++++++++++------- tests/components/cloud/test_client.py | 4 +- tests/components/coinbase/common.py | 13 ++- .../components/config/test_device_registry.py | 13 ++- tests/components/counter/common.py | 8 +- tests/components/cover/test_init.py | 13 +-- 8 files changed, 111 insertions(+), 47 deletions(-) diff --git a/tests/components/cast/test_media_player.py b/tests/components/cast/test_media_player.py index 1d99adb4723..513f32b1ad6 100644 --- a/tests/components/cast/test_media_player.py +++ b/tests/components/cast/test_media_player.py @@ -3,7 +3,9 @@ from __future__ import annotations import asyncio +from collections.abc import Callable import json +from typing import Any from unittest.mock import ANY, AsyncMock, MagicMock, Mock, patch from uuid import UUID @@ -112,7 +114,9 @@ def get_fake_zconf(host="192.168.178.42", port=8009): return zconf -async def async_setup_cast(hass, config=None): +async def async_setup_cast( + hass: HomeAssistant, config: dict[str, Any] | None = None +) -> MagicMock: """Set up the cast platform.""" if config is None: config = {} @@ -128,7 +132,20 @@ async def async_setup_cast(hass, config=None): return add_entities -async def async_setup_cast_internal_discovery(hass, config=None): +async def async_setup_cast_internal_discovery( + hass: HomeAssistant, config: dict[str, Any] | None = None +) -> tuple[ + Callable[ + [ + pychromecast.discovery.HostServiceInfo + | pychromecast.discovery.MDNSServiceInfo, + ChromecastInfo, + ], + None, + ], + Callable[[str, ChromecastInfo], None], + MagicMock, +]: """Set up the cast platform and the discovery.""" browser = MagicMock(devices={}, zc={}) diff --git a/tests/components/clicksend_tts/test_notify.py b/tests/components/clicksend_tts/test_notify.py index e73f0576d9e..892d7541354 100644 --- a/tests/components/clicksend_tts/test_notify.py +++ b/tests/components/clicksend_tts/test_notify.py @@ -46,7 +46,7 @@ def mock_clicksend_tts_notify(): yield ns -async def setup_notify(hass): +async def setup_notify(hass: HomeAssistant) -> None: """Test setup.""" with assert_setup_component(1, notify.DOMAIN) as config: assert await async_setup_component(hass, notify.DOMAIN, CONFIG) diff --git a/tests/components/climate/common.py b/tests/components/climate/common.py index c890d3a7bb5..d6aedd23671 100644 --- a/tests/components/climate/common.py +++ b/tests/components/climate/common.py @@ -23,6 +23,7 @@ from homeassistant.components.climate import ( SERVICE_SET_SWING_MODE, SERVICE_SET_TEMPERATURE, ) +from homeassistant.components.climate.const import HVACMode from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_TEMPERATURE, @@ -30,10 +31,13 @@ from homeassistant.const import ( SERVICE_TURN_OFF, SERVICE_TURN_ON, ) +from homeassistant.core import HomeAssistant from homeassistant.loader import bind_hass -async def async_set_preset_mode(hass, preset_mode, entity_id=ENTITY_MATCH_ALL): +async def async_set_preset_mode( + hass: HomeAssistant, preset_mode: str, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Set new preset mode.""" data = {ATTR_PRESET_MODE: preset_mode} @@ -44,7 +48,9 @@ async def async_set_preset_mode(hass, preset_mode, entity_id=ENTITY_MATCH_ALL): @bind_hass -def set_preset_mode(hass, preset_mode, entity_id=ENTITY_MATCH_ALL): +def set_preset_mode( + hass: HomeAssistant, preset_mode: str, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Set new preset mode.""" data = {ATTR_PRESET_MODE: preset_mode} @@ -54,7 +60,9 @@ def set_preset_mode(hass, preset_mode, entity_id=ENTITY_MATCH_ALL): hass.services.call(DOMAIN, SERVICE_SET_PRESET_MODE, data) -async def async_set_aux_heat(hass, aux_heat, entity_id=ENTITY_MATCH_ALL): +async def async_set_aux_heat( + hass: HomeAssistant, aux_heat: bool, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Turn all or specified climate devices auxiliary heater on.""" data = {ATTR_AUX_HEAT: aux_heat} @@ -65,7 +73,9 @@ async def async_set_aux_heat(hass, aux_heat, entity_id=ENTITY_MATCH_ALL): @bind_hass -def set_aux_heat(hass, aux_heat, entity_id=ENTITY_MATCH_ALL): +def set_aux_heat( + hass: HomeAssistant, aux_heat: bool, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Turn all or specified climate devices auxiliary heater on.""" data = {ATTR_AUX_HEAT: aux_heat} @@ -76,13 +86,13 @@ def set_aux_heat(hass, aux_heat, entity_id=ENTITY_MATCH_ALL): async def async_set_temperature( - hass, - temperature=None, - entity_id=ENTITY_MATCH_ALL, - target_temp_high=None, - target_temp_low=None, - hvac_mode=None, -): + hass: HomeAssistant, + temperature: float | None = None, + entity_id: str = ENTITY_MATCH_ALL, + target_temp_high: float | None = None, + target_temp_low: float | None = None, + hvac_mode: HVACMode | None = None, +) -> None: """Set new target temperature.""" kwargs = { key: value @@ -103,13 +113,13 @@ async def async_set_temperature( @bind_hass def set_temperature( - hass, - temperature=None, - entity_id=ENTITY_MATCH_ALL, - target_temp_high=None, - target_temp_low=None, - hvac_mode=None, -): + hass: HomeAssistant, + temperature: float | None = None, + entity_id: str = ENTITY_MATCH_ALL, + target_temp_high: float | None = None, + target_temp_low: float | None = None, + hvac_mode: HVACMode | None = None, +) -> None: """Set new target temperature.""" kwargs = { key: value @@ -126,7 +136,9 @@ def set_temperature( hass.services.call(DOMAIN, SERVICE_SET_TEMPERATURE, kwargs) -async def async_set_humidity(hass, humidity, entity_id=ENTITY_MATCH_ALL): +async def async_set_humidity( + hass: HomeAssistant, humidity: int, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Set new target humidity.""" data = {ATTR_HUMIDITY: humidity} @@ -137,7 +149,9 @@ async def async_set_humidity(hass, humidity, entity_id=ENTITY_MATCH_ALL): @bind_hass -def set_humidity(hass, humidity, entity_id=ENTITY_MATCH_ALL): +def set_humidity( + hass: HomeAssistant, humidity: int, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Set new target humidity.""" data = {ATTR_HUMIDITY: humidity} @@ -147,7 +161,9 @@ def set_humidity(hass, humidity, entity_id=ENTITY_MATCH_ALL): hass.services.call(DOMAIN, SERVICE_SET_HUMIDITY, data) -async def async_set_fan_mode(hass, fan, entity_id=ENTITY_MATCH_ALL): +async def async_set_fan_mode( + hass: HomeAssistant, fan: str, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Set all or specified climate devices fan mode on.""" data = {ATTR_FAN_MODE: fan} @@ -158,7 +174,9 @@ async def async_set_fan_mode(hass, fan, entity_id=ENTITY_MATCH_ALL): @bind_hass -def set_fan_mode(hass, fan, entity_id=ENTITY_MATCH_ALL): +def set_fan_mode( + hass: HomeAssistant, fan: str, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Set all or specified climate devices fan mode on.""" data = {ATTR_FAN_MODE: fan} @@ -168,7 +186,9 @@ def set_fan_mode(hass, fan, entity_id=ENTITY_MATCH_ALL): hass.services.call(DOMAIN, SERVICE_SET_FAN_MODE, data) -async def async_set_hvac_mode(hass, hvac_mode, entity_id=ENTITY_MATCH_ALL): +async def async_set_hvac_mode( + hass: HomeAssistant, hvac_mode: HVACMode, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Set new target operation mode.""" data = {ATTR_HVAC_MODE: hvac_mode} @@ -179,7 +199,9 @@ async def async_set_hvac_mode(hass, hvac_mode, entity_id=ENTITY_MATCH_ALL): @bind_hass -def set_operation_mode(hass, hvac_mode, entity_id=ENTITY_MATCH_ALL): +def set_operation_mode( + hass: HomeAssistant, hvac_mode: HVACMode, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Set new target operation mode.""" data = {ATTR_HVAC_MODE: hvac_mode} @@ -189,7 +211,9 @@ def set_operation_mode(hass, hvac_mode, entity_id=ENTITY_MATCH_ALL): hass.services.call(DOMAIN, SERVICE_SET_HVAC_MODE, data) -async def async_set_swing_mode(hass, swing_mode, entity_id=ENTITY_MATCH_ALL): +async def async_set_swing_mode( + hass: HomeAssistant, swing_mode: str, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Set new target swing mode.""" data = {ATTR_SWING_MODE: swing_mode} @@ -200,7 +224,9 @@ async def async_set_swing_mode(hass, swing_mode, entity_id=ENTITY_MATCH_ALL): @bind_hass -def set_swing_mode(hass, swing_mode, entity_id=ENTITY_MATCH_ALL): +def set_swing_mode( + hass: HomeAssistant, swing_mode: str, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Set new target swing mode.""" data = {ATTR_SWING_MODE: swing_mode} @@ -210,7 +236,7 @@ def set_swing_mode(hass, swing_mode, entity_id=ENTITY_MATCH_ALL): hass.services.call(DOMAIN, SERVICE_SET_SWING_MODE, data) -async def async_turn_on(hass, entity_id=ENTITY_MATCH_ALL): +async def async_turn_on(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Turn on device.""" data = {} @@ -220,7 +246,9 @@ async def async_turn_on(hass, entity_id=ENTITY_MATCH_ALL): await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, data, blocking=True) -async def async_turn_off(hass, entity_id=ENTITY_MATCH_ALL): +async def async_turn_off( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Turn off device.""" data = {} diff --git a/tests/components/cloud/test_client.py b/tests/components/cloud/test_client.py index 005efd990fb..7af163cc49d 100644 --- a/tests/components/cloud/test_client.py +++ b/tests/components/cloud/test_client.py @@ -208,7 +208,9 @@ async def test_webhook_msg( received = [] - async def handler(hass, webhook_id, request): + async def handler( + hass: HomeAssistant, webhook_id: str, request: web.Request + ) -> web.Response: """Handle a webhook.""" received.append(request) return web.json_response({"from": "handler"}) diff --git a/tests/components/coinbase/common.py b/tests/components/coinbase/common.py index 1a141c88bc3..0a2475ac218 100644 --- a/tests/components/coinbase/common.py +++ b/tests/components/coinbase/common.py @@ -6,6 +6,7 @@ from homeassistant.components.coinbase.const import ( DOMAIN, ) from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, CONF_API_VERSION +from homeassistant.core import HomeAssistant from .const import ( GOOD_CURRENCY_2, @@ -115,7 +116,11 @@ def mock_get_portfolios(): } -async def init_mock_coinbase(hass, currencies=None, rates=None): +async def init_mock_coinbase( + hass: HomeAssistant, + currencies: list[str] | None = None, + rates: list[str] | None = None, +) -> MockConfigEntry: """Init Coinbase integration for testing.""" config_entry = MockConfigEntry( domain=DOMAIN, @@ -136,7 +141,11 @@ async def init_mock_coinbase(hass, currencies=None, rates=None): return config_entry -async def init_mock_coinbase_v3(hass, currencies=None, rates=None): +async def init_mock_coinbase_v3( + hass: HomeAssistant, + currencies: list[str] | None = None, + rates: list[str] | None = None, +) -> MockConfigEntry: """Init Coinbase integration for testing.""" config_entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/config/test_device_registry.py b/tests/components/config/test_device_registry.py index aab898f5fd6..c840ce2bed2 100644 --- a/tests/components/config/test_device_registry.py +++ b/tests/components/config/test_device_registry.py @@ -7,6 +7,7 @@ import pytest from pytest_unordered import unordered from homeassistant.components.config import device_registry +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component @@ -274,7 +275,9 @@ async def test_remove_config_entry_from_device( can_remove = False - async def async_remove_config_entry_device(hass, config_entry, device_entry): + async def async_remove_config_entry_device( + hass: HomeAssistant, config_entry: ConfigEntry, device_entry: dr.DeviceEntry + ) -> bool: return can_remove mock_integration( @@ -356,7 +359,9 @@ async def test_remove_config_entry_from_device_fails( assert await async_setup_component(hass, "config", {}) ws_client = await hass_ws_client(hass) - async def async_remove_config_entry_device(hass, config_entry, device_entry): + async def async_remove_config_entry_device( + hass: HomeAssistant, config_entry: ConfigEntry, device_entry: dr.DeviceEntry + ) -> bool: return True mock_integration( @@ -473,7 +478,9 @@ async def test_remove_config_entry_from_device_if_integration_remove( can_remove = False - async def async_remove_config_entry_device(hass, config_entry, device_entry): + async def async_remove_config_entry_device( + hass: HomeAssistant, config_entry: ConfigEntry, device_entry: dr.DeviceEntry + ) -> bool: if can_remove: device_registry.async_update_device( device_entry.id, remove_config_entry_id=config_entry.entry_id diff --git a/tests/components/counter/common.py b/tests/components/counter/common.py index b5156c1a432..e5d9316cd22 100644 --- a/tests/components/counter/common.py +++ b/tests/components/counter/common.py @@ -11,13 +11,13 @@ from homeassistant.components.counter import ( SERVICE_RESET, ) from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import callback +from homeassistant.core import HomeAssistant, callback from homeassistant.loader import bind_hass @callback @bind_hass -def async_increment(hass, entity_id): +def async_increment(hass: HomeAssistant, entity_id: str) -> None: """Increment a counter.""" hass.async_create_task( hass.services.async_call(DOMAIN, SERVICE_INCREMENT, {ATTR_ENTITY_ID: entity_id}) @@ -26,7 +26,7 @@ def async_increment(hass, entity_id): @callback @bind_hass -def async_decrement(hass, entity_id): +def async_decrement(hass: HomeAssistant, entity_id: str) -> None: """Decrement a counter.""" hass.async_create_task( hass.services.async_call(DOMAIN, SERVICE_DECREMENT, {ATTR_ENTITY_ID: entity_id}) @@ -35,7 +35,7 @@ def async_decrement(hass, entity_id): @callback @bind_hass -def async_reset(hass, entity_id): +def async_reset(hass: HomeAssistant, entity_id: str) -> None: """Reset a counter.""" hass.async_create_task( hass.services.async_call(DOMAIN, SERVICE_RESET, {ATTR_ENTITY_ID: entity_id}) diff --git a/tests/components/cover/test_init.py b/tests/components/cover/test_init.py index 37740260c2f..d1d84ffad6c 100644 --- a/tests/components/cover/test_init.py +++ b/tests/components/cover/test_init.py @@ -14,7 +14,8 @@ from homeassistant.const import ( STATE_OPEN, STATE_OPENING, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, ServiceResponse +from homeassistant.helpers.entity import Entity from homeassistant.setup import async_setup_component from .common import MockCover @@ -119,7 +120,7 @@ async def test_services( assert is_closing(hass, ent5) -def call_service(hass, service, ent): +def call_service(hass: HomeAssistant, service: str, ent: Entity) -> ServiceResponse: """Call any service on entity.""" return hass.services.async_call( cover.DOMAIN, service, {ATTR_ENTITY_ID: ent.entity_id}, blocking=True @@ -136,22 +137,22 @@ def set_state(ent, state) -> None: ent._values["state"] = state -def is_open(hass, ent): +def is_open(hass: HomeAssistant, ent: Entity) -> bool: """Return if the cover is closed based on the statemachine.""" return hass.states.is_state(ent.entity_id, STATE_OPEN) -def is_opening(hass, ent): +def is_opening(hass: HomeAssistant, ent: Entity) -> bool: """Return if the cover is closed based on the statemachine.""" return hass.states.is_state(ent.entity_id, STATE_OPENING) -def is_closed(hass, ent): +def is_closed(hass: HomeAssistant, ent: Entity) -> bool: """Return if the cover is closed based on the statemachine.""" return hass.states.is_state(ent.entity_id, STATE_CLOSED) -def is_closing(hass, ent): +def is_closing(hass: HomeAssistant, ent: Entity) -> bool: """Return if the cover is closed based on the statemachine.""" return hass.states.is_state(ent.entity_id, STATE_CLOSING) From 10c27c318954ea51e19d9d5343cf3f4a5844fe0f Mon Sep 17 00:00:00 2001 From: cdnninja Date: Sun, 18 Aug 2024 07:36:03 -0600 Subject: [PATCH 0844/1635] Add Alt Core300s model to vesync integration (#124091) --- homeassistant/components/vesync/const.py | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/vesync/const.py b/homeassistant/components/vesync/const.py index 483ab89b02e..54fc21d2659 100644 --- a/homeassistant/components/vesync/const.py +++ b/homeassistant/components/vesync/const.py @@ -40,6 +40,7 @@ SKU_TO_BASE_DEVICE = { "LAP-C202S-WUSR": "Core200S", # Alt ID Model Core200S "Core300S": "Core300S", "LAP-C301S-WJP": "Core300S", # Alt ID Model Core300S + "LAP-C301S-WAAA": "Core300S", # Alt ID Model Core300S "Core400S": "Core400S", "LAP-C401S-WJP": "Core400S", # Alt ID Model Core400S "LAP-C401S-WUSR": "Core400S", # Alt ID Model Core400S From 1afed8ae15934aa1998d9a05ecf6705059296da5 Mon Sep 17 00:00:00 2001 From: Antoine Reversat Date: Sun, 18 Aug 2024 09:37:33 -0400 Subject: [PATCH 0845/1635] Add Fujitsu FGLair integration (#109335) * Add support for Fujitsu HVAC devices * Add the entity code to .coveragerc * Only include code that can fail in the try/except block Co-authored-by: Josef Zweck <24647999+zweckj@users.noreply.github.com> * Remove empty keys from manifest * Remove VERSION as it's already the default * Remve the get_devices function and use asyncio.gather to parallelize dev updates * Move initial step to a function * Let KeyError bubble up. If we are passed an invalid mode it's probably worth raising an exception. * Await the gather * Use the async version of the refresh_auth call * Use the serial number as unique id * Use HA constant for precision * Use dev instead of self._dev * Move to property decorated methods * Remove bidict dependency * Setup one config entry for our api credentials instead of per device * Remove bidict from requirements * Signout and remove our api object on unload * Use app credentials from ayla_iot_unofficial * Use entry_id as a key to store our API object * Delete unused code * Create reverse mappings from forward mapping instead of hardcoding them * Clean up the property methods * Only import part of config_entries we are using * Implement suggested changes * Fix tests to use new API consts * Add support for reauth * Use a coordinator instead of doing per-entity refresh * Auto is equivalent to HEAT_COOL not AUTO * Add ON and OFF to list of supported features * Use the mock_setup_entry fixture for the reauth tests * Parametrize testing of config flow exceptions * Only wrap fallable code in try/except * Add tests for coordinator * Use self.coordinator_context instead of self._dev.device_serial_number * Move timeout to ayla_iot_unofficial * Add description for is_europe field * Bump version of ayla-iot-unofficial * Remove turn_on/turn_off warning * Move coordinator creating to __init__ * Add the type of coordinator to the CoordiatorEntity * Update docstring for FujitsuHVACDevice constructor * Fix missed self._dev to dev * Abort instead of showing the form again with an error when usernames are different * Remove useless argument * Fix tests * Implement some suggestions * Use a device property the maps to the coordinator data * Fix api sign out when unloading the entry * Address comments * Fix device lookup * Move API sign in to coordinator setup * Get rid of FujitsuHVACConfigData * Fix async_setup_entry signature * Fix mock_ayla_api * Cleanup common errors * Add test to check that re adding the same account fails * Also patch new_ayla_api in __init__.py * Create a fixture to generate test devices * Add a setup_integration function that does the setup for a mock config entry * Rework unit tests for the coordinator * Fix typos * Use hass session * Rework reauth config flow to only modify password * Update name to be more use-friendly * Fix wrong type for entry in async_unload_entry * Let TimeoutError bubble up as teh base class handles it * Make the mock ayla api return some devices by default * Move test to test_climate.py * Move tests to test_init.py * Remove reauth flow * Remove useless mock setup * Make our mock devices look real * Fix tests * Rename fujitsu_hvac to fujitsu_fglair and rename the integration to FGLair * Add the Fujitsu brand * Add a helper function to generate an entity_id from a device * Use entity_id to remove hardcoded entity ids * Add a test to increase code coverage --------- Co-authored-by: Josef Zweck <24647999+zweckj@users.noreply.github.com> Co-authored-by: Erik Montnemery Co-authored-by: Joostlek --- .strict-typing | 1 + CODEOWNERS | 2 + homeassistant/brands/fujitsu.json | 5 + .../components/fujitsu_fglair/__init__.py | 49 +++++ .../components/fujitsu_fglair/climate.py | 141 +++++++++++++ .../components/fujitsu_fglair/config_flow.py | 73 +++++++ .../components/fujitsu_fglair/const.py | 54 +++++ .../components/fujitsu_fglair/coordinator.py | 63 ++++++ .../components/fujitsu_fglair/manifest.json | 9 + .../components/fujitsu_fglair/strings.json | 25 +++ homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 20 +- mypy.ini | 10 + requirements_all.txt | 3 + requirements_test_all.txt | 3 + tests/components/fujitsu_fglair/__init__.py | 21 ++ tests/components/fujitsu_fglair/conftest.py | 113 +++++++++++ .../snapshots/test_climate.ambr | 189 ++++++++++++++++++ .../components/fujitsu_fglair/test_climate.py | 98 +++++++++ .../fujitsu_fglair/test_config_flow.py | 107 ++++++++++ tests/components/fujitsu_fglair/test_init.py | 128 ++++++++++++ 21 files changed, 1111 insertions(+), 4 deletions(-) create mode 100644 homeassistant/brands/fujitsu.json create mode 100644 homeassistant/components/fujitsu_fglair/__init__.py create mode 100644 homeassistant/components/fujitsu_fglair/climate.py create mode 100644 homeassistant/components/fujitsu_fglair/config_flow.py create mode 100644 homeassistant/components/fujitsu_fglair/const.py create mode 100644 homeassistant/components/fujitsu_fglair/coordinator.py create mode 100644 homeassistant/components/fujitsu_fglair/manifest.json create mode 100644 homeassistant/components/fujitsu_fglair/strings.json create mode 100644 tests/components/fujitsu_fglair/__init__.py create mode 100644 tests/components/fujitsu_fglair/conftest.py create mode 100644 tests/components/fujitsu_fglair/snapshots/test_climate.ambr create mode 100644 tests/components/fujitsu_fglair/test_climate.py create mode 100644 tests/components/fujitsu_fglair/test_config_flow.py create mode 100644 tests/components/fujitsu_fglair/test_init.py diff --git a/.strict-typing b/.strict-typing index 51ca93bb9fa..c8f28c84f8a 100644 --- a/.strict-typing +++ b/.strict-typing @@ -196,6 +196,7 @@ homeassistant.components.fritzbox.* homeassistant.components.fritzbox_callmonitor.* homeassistant.components.fronius.* homeassistant.components.frontend.* +homeassistant.components.fujitsu_fglair.* homeassistant.components.fully_kiosk.* homeassistant.components.fyta.* homeassistant.components.generic_hygrostat.* diff --git a/CODEOWNERS b/CODEOWNERS index 6593c02c8a5..367c6eee2bf 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -499,6 +499,8 @@ build.json @home-assistant/supervisor /tests/components/frontend/ @home-assistant/frontend /homeassistant/components/frontier_silicon/ @wlcrs /tests/components/frontier_silicon/ @wlcrs +/homeassistant/components/fujitsu_fglair/ @crevetor +/tests/components/fujitsu_fglair/ @crevetor /homeassistant/components/fully_kiosk/ @cgarwood /tests/components/fully_kiosk/ @cgarwood /homeassistant/components/fyta/ @dontinelli diff --git a/homeassistant/brands/fujitsu.json b/homeassistant/brands/fujitsu.json new file mode 100644 index 00000000000..75d12e33851 --- /dev/null +++ b/homeassistant/brands/fujitsu.json @@ -0,0 +1,5 @@ +{ + "domain": "fujitsu", + "name": "Fujitsu", + "integrations": ["fujitsu_anywair", "fujitsu_fglair"] +} diff --git a/homeassistant/components/fujitsu_fglair/__init__.py b/homeassistant/components/fujitsu_fglair/__init__.py new file mode 100644 index 00000000000..bd891f05b8d --- /dev/null +++ b/homeassistant/components/fujitsu_fglair/__init__.py @@ -0,0 +1,49 @@ +"""The Fujitsu HVAC (based on Ayla IOT) integration.""" + +from __future__ import annotations + +from contextlib import suppress + +from ayla_iot_unofficial import new_ayla_api + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import aiohttp_client + +from .const import API_TIMEOUT, CONF_EUROPE, FGLAIR_APP_ID, FGLAIR_APP_SECRET +from .coordinator import FGLairCoordinator + +PLATFORMS: list[Platform] = [Platform.CLIMATE] + +type FGLairConfigEntry = ConfigEntry[FGLairCoordinator] + + +async def async_setup_entry(hass: HomeAssistant, entry: FGLairConfigEntry) -> bool: + """Set up Fujitsu HVAC (based on Ayla IOT) from a config entry.""" + api = new_ayla_api( + entry.data[CONF_USERNAME], + entry.data[CONF_PASSWORD], + FGLAIR_APP_ID, + FGLAIR_APP_SECRET, + europe=entry.data[CONF_EUROPE], + websession=aiohttp_client.async_get_clientsession(hass), + timeout=API_TIMEOUT, + ) + + coordinator = FGLairCoordinator(hass, api) + await coordinator.async_config_entry_first_refresh() + + entry.runtime_data = coordinator + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: FGLairConfigEntry) -> bool: + """Unload a config entry.""" + unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + with suppress(TimeoutError): + await entry.runtime_data.api.async_sign_out() + + return unload_ok diff --git a/homeassistant/components/fujitsu_fglair/climate.py b/homeassistant/components/fujitsu_fglair/climate.py new file mode 100644 index 00000000000..558f4b73a18 --- /dev/null +++ b/homeassistant/components/fujitsu_fglair/climate.py @@ -0,0 +1,141 @@ +"""Support for Fujitsu HVAC devices that use the Ayla Iot platform.""" + +from typing import Any + +from ayla_iot_unofficial.fujitsu_hvac import Capability, FujitsuHVAC + +from homeassistant.components.climate import ( + ClimateEntity, + ClimateEntityFeature, + HVACMode, +) +from homeassistant.const import ATTR_TEMPERATURE, PRECISION_HALVES, UnitOfTemperature +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from . import FGLairConfigEntry +from .const import ( + DOMAIN, + FUJI_TO_HA_FAN, + FUJI_TO_HA_HVAC, + FUJI_TO_HA_SWING, + HA_TO_FUJI_FAN, + HA_TO_FUJI_HVAC, + HA_TO_FUJI_SWING, +) +from .coordinator import FGLairCoordinator + + +async def async_setup_entry( + hass: HomeAssistant, + entry: FGLairConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up one Fujitsu HVAC device.""" + async_add_entities( + FGLairDevice(entry.runtime_data, device) + for device in entry.runtime_data.data.values() + ) + + +class FGLairDevice(CoordinatorEntity[FGLairCoordinator], ClimateEntity): + """Represent a Fujitsu HVAC device.""" + + _attr_temperature_unit = UnitOfTemperature.CELSIUS + _attr_precision = PRECISION_HALVES + _attr_target_temperature_step = 0.5 + _attr_has_entity_name = True + _attr_name = None + + _enable_turn_on_off_backwards_compatibility: bool = False + + def __init__(self, coordinator: FGLairCoordinator, device: FujitsuHVAC) -> None: + """Store the representation of the device and set the static attributes.""" + super().__init__(coordinator, context=device.device_serial_number) + + self._attr_unique_id = device.device_serial_number + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, device.device_serial_number)}, + name=device.device_name, + manufacturer="Fujitsu", + model=device.property_values["model_name"], + serial_number=device.device_serial_number, + sw_version=device.property_values["mcu_firmware_version"], + ) + + self._attr_supported_features = ( + ClimateEntityFeature.TARGET_TEMPERATURE + | ClimateEntityFeature.TURN_ON + | ClimateEntityFeature.TURN_OFF + ) + if device.has_capability(Capability.OP_FAN): + self._attr_supported_features |= ClimateEntityFeature.FAN_MODE + + if device.has_capability(Capability.SWING_HORIZONTAL) or device.has_capability( + Capability.SWING_VERTICAL + ): + self._attr_supported_features |= ClimateEntityFeature.SWING_MODE + self._set_attr() + + @property + def device(self) -> FujitsuHVAC: + """Return the device object from the coordinator data.""" + return self.coordinator.data[self.coordinator_context] + + @property + def available(self) -> bool: + """Return if the device is available.""" + return super().available and self.coordinator_context in self.coordinator.data + + async def async_set_fan_mode(self, fan_mode: str) -> None: + """Set Fan mode.""" + await self.device.async_set_fan_speed(HA_TO_FUJI_FAN[fan_mode]) + await self.coordinator.async_request_refresh() + + async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: + """Set HVAC mode.""" + await self.device.async_set_op_mode(HA_TO_FUJI_HVAC[hvac_mode]) + await self.coordinator.async_request_refresh() + + async def async_set_swing_mode(self, swing_mode: str) -> None: + """Set swing mode.""" + await self.device.async_set_swing_mode(HA_TO_FUJI_SWING[swing_mode]) + await self.coordinator.async_request_refresh() + + async def async_set_temperature(self, **kwargs: Any) -> None: + """Set target temperature.""" + if (temperature := kwargs.get(ATTR_TEMPERATURE)) is None: + return + await self.device.async_set_set_temp(temperature) + await self.coordinator.async_request_refresh() + + def _set_attr(self) -> None: + if self.coordinator_context in self.coordinator.data: + self._attr_fan_mode = FUJI_TO_HA_FAN.get(self.device.fan_speed) + self._attr_fan_modes = [ + FUJI_TO_HA_FAN[mode] + for mode in self.device.supported_fan_speeds + if mode in FUJI_TO_HA_FAN + ] + self._attr_hvac_mode = FUJI_TO_HA_HVAC.get(self.device.op_mode) + self._attr_hvac_modes = [ + FUJI_TO_HA_HVAC[mode] + for mode in self.device.supported_op_modes + if mode in FUJI_TO_HA_HVAC + ] + self._attr_swing_mode = FUJI_TO_HA_SWING.get(self.device.swing_mode) + self._attr_swing_modes = [ + FUJI_TO_HA_SWING[mode] + for mode in self.device.supported_swing_modes + if mode in FUJI_TO_HA_SWING + ] + self._attr_min_temp = self.device.temperature_range[0] + self._attr_max_temp = self.device.temperature_range[1] + self._attr_current_temperature = self.device.sensed_temp + self._attr_target_temperature = self.device.set_temp + + def _handle_coordinator_update(self) -> None: + self._set_attr() + super()._handle_coordinator_update() diff --git a/homeassistant/components/fujitsu_fglair/config_flow.py b/homeassistant/components/fujitsu_fglair/config_flow.py new file mode 100644 index 00000000000..10f703df6d9 --- /dev/null +++ b/homeassistant/components/fujitsu_fglair/config_flow.py @@ -0,0 +1,73 @@ +"""Config flow for Fujitsu HVAC (based on Ayla IOT) integration.""" + +import logging +from typing import Any + +from ayla_iot_unofficial import AylaAuthError, new_ayla_api +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.helpers import aiohttp_client + +from .const import API_TIMEOUT, CONF_EUROPE, DOMAIN, FGLAIR_APP_ID, FGLAIR_APP_SECRET + +_LOGGER = logging.getLogger(__name__) + + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_USERNAME): str, + vol.Required(CONF_PASSWORD): str, + vol.Required(CONF_EUROPE): bool, + } +) + + +class FGLairConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Fujitsu HVAC (based on Ayla IOT).""" + + async def _async_validate_credentials( + self, user_input: dict[str, Any] + ) -> dict[str, str]: + errors: dict[str, str] = {} + api = new_ayla_api( + user_input[CONF_USERNAME], + user_input[CONF_PASSWORD], + FGLAIR_APP_ID, + FGLAIR_APP_SECRET, + europe=user_input[CONF_EUROPE], + websession=aiohttp_client.async_get_clientsession(self.hass), + timeout=API_TIMEOUT, + ) + try: + await api.async_sign_in() + except TimeoutError: + errors["base"] = "cannot_connect" + except AylaAuthError: + errors["base"] = "invalid_auth" + except Exception: # pylint: disable=broad-except + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + + return errors + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + errors: dict[str, str] = {} + if user_input: + await self.async_set_unique_id(user_input[CONF_USERNAME].lower()) + self._abort_if_unique_id_configured() + + errors = await self._async_validate_credentials(user_input) + if len(errors) == 0: + return self.async_create_entry( + title=f"FGLair ({user_input[CONF_USERNAME]})", + data=user_input, + ) + + return self.async_show_form( + step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors + ) diff --git a/homeassistant/components/fujitsu_fglair/const.py b/homeassistant/components/fujitsu_fglair/const.py new file mode 100644 index 00000000000..0e93361f20b --- /dev/null +++ b/homeassistant/components/fujitsu_fglair/const.py @@ -0,0 +1,54 @@ +"""Constants for the Fujitsu HVAC (based on Ayla IOT) integration.""" + +from datetime import timedelta + +from ayla_iot_unofficial.fujitsu_consts import ( # noqa: F401 + FGLAIR_APP_ID, + FGLAIR_APP_SECRET, +) +from ayla_iot_unofficial.fujitsu_hvac import FanSpeed, OpMode, SwingMode + +from homeassistant.components.climate import ( + FAN_AUTO, + FAN_HIGH, + FAN_LOW, + FAN_MEDIUM, + SWING_BOTH, + SWING_HORIZONTAL, + SWING_OFF, + SWING_VERTICAL, + HVACMode, +) + +API_TIMEOUT = 10 +API_REFRESH = timedelta(minutes=5) + +DOMAIN = "fujitsu_fglair" + +CONF_EUROPE = "is_europe" + +HA_TO_FUJI_FAN = { + FAN_LOW: FanSpeed.LOW, + FAN_MEDIUM: FanSpeed.MEDIUM, + FAN_HIGH: FanSpeed.HIGH, + FAN_AUTO: FanSpeed.AUTO, +} +FUJI_TO_HA_FAN = {value: key for key, value in HA_TO_FUJI_FAN.items()} + +HA_TO_FUJI_HVAC = { + HVACMode.OFF: OpMode.OFF, + HVACMode.HEAT: OpMode.HEAT, + HVACMode.COOL: OpMode.COOL, + HVACMode.HEAT_COOL: OpMode.AUTO, + HVACMode.DRY: OpMode.DRY, + HVACMode.FAN_ONLY: OpMode.FAN, +} +FUJI_TO_HA_HVAC = {value: key for key, value in HA_TO_FUJI_HVAC.items()} + +HA_TO_FUJI_SWING = { + SWING_OFF: SwingMode.OFF, + SWING_VERTICAL: SwingMode.SWING_VERTICAL, + SWING_HORIZONTAL: SwingMode.SWING_HORIZONTAL, + SWING_BOTH: SwingMode.SWING_BOTH, +} +FUJI_TO_HA_SWING = {value: key for key, value in HA_TO_FUJI_SWING.items()} diff --git a/homeassistant/components/fujitsu_fglair/coordinator.py b/homeassistant/components/fujitsu_fglair/coordinator.py new file mode 100644 index 00000000000..267c0b2c3e5 --- /dev/null +++ b/homeassistant/components/fujitsu_fglair/coordinator.py @@ -0,0 +1,63 @@ +"""Coordinator for Fujitsu HVAC integration.""" + +import logging + +from ayla_iot_unofficial import AylaApi, AylaAuthError +from ayla_iot_unofficial.fujitsu_hvac import FujitsuHVAC + +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryError +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator + +from .const import API_REFRESH + +_LOGGER = logging.getLogger(__name__) + + +class FGLairCoordinator(DataUpdateCoordinator[dict[str, FujitsuHVAC]]): + """Coordinator for Fujitsu HVAC integration.""" + + def __init__(self, hass: HomeAssistant, api: AylaApi) -> None: + """Initialize coordinator for Fujitsu HVAC integration.""" + super().__init__( + hass, + _LOGGER, + name="Fujitsu HVAC data", + update_interval=API_REFRESH, + ) + self.api = api + + async def _async_setup(self) -> None: + try: + await self.api.async_sign_in() + except AylaAuthError as e: + raise ConfigEntryError("Credentials expired for Ayla IoT API") from e + + async def _async_update_data(self) -> dict[str, FujitsuHVAC]: + """Fetch data from api endpoint.""" + listening_entities = set(self.async_contexts()) + try: + if self.api.token_expired: + await self.api.async_sign_in() + + if self.api.token_expiring_soon: + await self.api.async_refresh_auth() + + devices = await self.api.async_get_devices() + except AylaAuthError as e: + raise ConfigEntryError("Credentials expired for Ayla IoT API") from e + + if len(listening_entities) == 0: + devices = list(filter(lambda x: isinstance(x, FujitsuHVAC), devices)) + else: + devices = list( + filter(lambda x: x.device_serial_number in listening_entities, devices) + ) + + try: + for dev in devices: + await dev.async_update() + except AylaAuthError as e: + raise ConfigEntryError("Credentials expired for Ayla IoT API") from e + + return {d.device_serial_number: d for d in devices} diff --git a/homeassistant/components/fujitsu_fglair/manifest.json b/homeassistant/components/fujitsu_fglair/manifest.json new file mode 100644 index 00000000000..9286f7c24d9 --- /dev/null +++ b/homeassistant/components/fujitsu_fglair/manifest.json @@ -0,0 +1,9 @@ +{ + "domain": "fujitsu_fglair", + "name": "FGLair", + "codeowners": ["@crevetor"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/fujitsu_fglair", + "iot_class": "cloud_polling", + "requirements": ["ayla-iot-unofficial==1.3.1"] +} diff --git a/homeassistant/components/fujitsu_fglair/strings.json b/homeassistant/components/fujitsu_fglair/strings.json new file mode 100644 index 00000000000..71d8542cd3e --- /dev/null +++ b/homeassistant/components/fujitsu_fglair/strings.json @@ -0,0 +1,25 @@ +{ + "config": { + "step": { + "user": { + "title": "Enter your FGLair credentials", + "data": { + "is_europe": "Use european servers", + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "is_europe": "Allows the user to choose whether to use european servers or not since the API uses different endoint URLs for european vs non-european users" + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + } +} diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index c3fe4af4a76..b474fbaf54f 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -200,6 +200,7 @@ FLOWS = { "fritzbox_callmonitor", "fronius", "frontier_silicon", + "fujitsu_fglair", "fully_kiosk", "fyta", "garages_amsterdam", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index a61e58d80d4..9e1250e3b60 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -2054,10 +2054,22 @@ "config_flow": true, "iot_class": "local_polling" }, - "fujitsu_anywair": { - "name": "Fujitsu anywAIR", - "integration_type": "virtual", - "supported_by": "advantage_air" + "fujitsu": { + "name": "Fujitsu", + "integrations": { + "fujitsu_anywair": { + "integration_type": "virtual", + "config_flow": false, + "supported_by": "advantage_air", + "name": "Fujitsu anywAIR" + }, + "fujitsu_fglair": { + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling", + "name": "FGLair" + } + } }, "fully_kiosk": { "name": "Fully Kiosk Browser", diff --git a/mypy.ini b/mypy.ini index f0a941f20eb..ca10d05af86 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1716,6 +1716,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.fujitsu_fglair.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.fully_kiosk.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/requirements_all.txt b/requirements_all.txt index bdaa2ecdd91..0988a50a4cc 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -522,6 +522,9 @@ autarco==2.0.0 # homeassistant.components.axis axis==62 +# homeassistant.components.fujitsu_fglair +ayla-iot-unofficial==1.3.1 + # homeassistant.components.azure_event_hub azure-eventhub==5.11.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 8bfd4ec095f..7a27406deae 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -471,6 +471,9 @@ autarco==2.0.0 # homeassistant.components.axis axis==62 +# homeassistant.components.fujitsu_fglair +ayla-iot-unofficial==1.3.1 + # homeassistant.components.azure_event_hub azure-eventhub==5.11.1 diff --git a/tests/components/fujitsu_fglair/__init__.py b/tests/components/fujitsu_fglair/__init__.py new file mode 100644 index 00000000000..2ec3fa0fce6 --- /dev/null +++ b/tests/components/fujitsu_fglair/__init__.py @@ -0,0 +1,21 @@ +"""Tests for the Fujitsu HVAC (based on Ayla IOT) integration.""" + +from ayla_iot_unofficial.fujitsu_hvac import FujitsuHVAC + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + +def entity_id(device: FujitsuHVAC) -> str: + """Generate the entity id for the given serial.""" + return f"{Platform.CLIMATE}.{device.device_serial_number}" diff --git a/tests/components/fujitsu_fglair/conftest.py b/tests/components/fujitsu_fglair/conftest.py new file mode 100644 index 00000000000..b73007a566b --- /dev/null +++ b/tests/components/fujitsu_fglair/conftest.py @@ -0,0 +1,113 @@ +"""Common fixtures for the Fujitsu HVAC (based on Ayla IOT) tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, create_autospec, patch + +from ayla_iot_unofficial import AylaApi +from ayla_iot_unofficial.fujitsu_hvac import FanSpeed, FujitsuHVAC, OpMode, SwingMode +import pytest + +from homeassistant.components.fujitsu_fglair.const import CONF_EUROPE, DOMAIN +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME + +from tests.common import MockConfigEntry + +TEST_DEVICE_NAME = "Test device" +TEST_DEVICE_SERIAL = "testserial" +TEST_USERNAME = "test-username" +TEST_PASSWORD = "test-password" + +TEST_USERNAME2 = "test-username2" +TEST_PASSWORD2 = "test-password2" + +TEST_SERIAL_NUMBER = "testserial123" +TEST_SERIAL_NUMBER2 = "testserial345" + +TEST_PROPERTY_VALUES = { + "model_name": "mock_fujitsu_device", + "mcu_firmware_version": "1", +} + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock, None, None]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.fujitsu_fglair.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_ayla_api(mock_devices: list[AsyncMock]) -> Generator[AsyncMock]: + """Override AylaApi creation.""" + my_mock = create_autospec(AylaApi) + + with ( + patch( + "homeassistant.components.fujitsu_fglair.new_ayla_api", return_value=my_mock + ), + patch( + "homeassistant.components.fujitsu_fglair.config_flow.new_ayla_api", + return_value=my_mock, + ), + ): + my_mock.async_get_devices.return_value = mock_devices + yield my_mock + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return a regular config entry.""" + return MockConfigEntry( + domain=DOMAIN, + unique_id=TEST_USERNAME, + data={ + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: TEST_PASSWORD, + CONF_EUROPE: False, + }, + ) + + +def _create_device(serial_number: str) -> AsyncMock: + dev = AsyncMock(spec=FujitsuHVAC) + dev.device_serial_number = serial_number + dev.device_name = serial_number + dev.property_values = TEST_PROPERTY_VALUES + dev.has_capability.return_value = True + dev.fan_speed = FanSpeed.AUTO + dev.supported_fan_speeds = [ + FanSpeed.LOW, + FanSpeed.MEDIUM, + FanSpeed.HIGH, + FanSpeed.AUTO, + ] + dev.op_mode = OpMode.COOL + dev.supported_op_modes = [ + OpMode.OFF, + OpMode.ON, + OpMode.AUTO, + OpMode.COOL, + OpMode.DRY, + ] + dev.swing_mode = SwingMode.SWING_BOTH + dev.supported_swing_modes = [ + SwingMode.OFF, + SwingMode.SWING_HORIZONTAL, + SwingMode.SWING_VERTICAL, + SwingMode.SWING_BOTH, + ] + dev.temperature_range = [18.0, 26.0] + dev.sensed_temp = 22.0 + dev.set_temp = 21.0 + + return dev + + +@pytest.fixture +def mock_devices() -> list[AsyncMock]: + """Generate a list of mock devices that the API can return.""" + return [ + _create_device(serial) for serial in (TEST_SERIAL_NUMBER, TEST_SERIAL_NUMBER2) + ] diff --git a/tests/components/fujitsu_fglair/snapshots/test_climate.ambr b/tests/components/fujitsu_fglair/snapshots/test_climate.ambr new file mode 100644 index 00000000000..31b143c6f95 --- /dev/null +++ b/tests/components/fujitsu_fglair/snapshots/test_climate.ambr @@ -0,0 +1,189 @@ +# serializer version: 1 +# name: test_entities[climate.testserial123-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'fan_modes': list([ + 'low', + 'medium', + 'high', + 'auto', + ]), + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 26.0, + 'min_temp': 18.0, + 'swing_modes': list([ + 'off', + 'horizontal', + 'vertical', + 'both', + ]), + 'target_temp_step': 0.5, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.testserial123', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'fujitsu_fglair', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'testserial123', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[climate.testserial123-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 22.0, + 'fan_mode': 'auto', + 'fan_modes': list([ + 'low', + 'medium', + 'high', + 'auto', + ]), + 'friendly_name': 'testserial123', + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 26.0, + 'min_temp': 18.0, + 'supported_features': , + 'swing_mode': 'both', + 'swing_modes': list([ + 'off', + 'horizontal', + 'vertical', + 'both', + ]), + 'target_temp_step': 0.5, + 'temperature': 21.0, + }), + 'context': , + 'entity_id': 'climate.testserial123', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'cool', + }) +# --- +# name: test_entities[climate.testserial345-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'fan_modes': list([ + 'low', + 'medium', + 'high', + 'auto', + ]), + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 26.0, + 'min_temp': 18.0, + 'swing_modes': list([ + 'off', + 'horizontal', + 'vertical', + 'both', + ]), + 'target_temp_step': 0.5, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.testserial345', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'fujitsu_fglair', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'testserial345', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[climate.testserial345-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 22.0, + 'fan_mode': 'auto', + 'fan_modes': list([ + 'low', + 'medium', + 'high', + 'auto', + ]), + 'friendly_name': 'testserial345', + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 26.0, + 'min_temp': 18.0, + 'supported_features': , + 'swing_mode': 'both', + 'swing_modes': list([ + 'off', + 'horizontal', + 'vertical', + 'both', + ]), + 'target_temp_step': 0.5, + 'temperature': 21.0, + }), + 'context': , + 'entity_id': 'climate.testserial345', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'cool', + }) +# --- diff --git a/tests/components/fujitsu_fglair/test_climate.py b/tests/components/fujitsu_fglair/test_climate.py new file mode 100644 index 00000000000..fd016e4e226 --- /dev/null +++ b/tests/components/fujitsu_fglair/test_climate.py @@ -0,0 +1,98 @@ +"""Test for the climate entities of Fujitsu HVAC.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.components.climate import ( + ATTR_FAN_MODE, + ATTR_HVAC_MODE, + ATTR_SWING_MODE, + ATTR_TEMPERATURE, + DOMAIN as CLIMATE_DOMAIN, + FAN_AUTO, + SERVICE_SET_FAN_MODE, + SERVICE_SET_HVAC_MODE, + SERVICE_SET_SWING_MODE, + SERVICE_SET_TEMPERATURE, + SWING_BOTH, + HVACMode, +) +from homeassistant.components.fujitsu_fglair.const import ( + HA_TO_FUJI_FAN, + HA_TO_FUJI_HVAC, + HA_TO_FUJI_SWING, +) +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import entity_id, setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_entities( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_ayla_api: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that coordinator returns the data we expect after the first refresh.""" + await setup_integration(hass, mock_config_entry) + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_set_attributes( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_ayla_api: AsyncMock, + mock_devices: list[AsyncMock], + mock_config_entry: MockConfigEntry, +) -> None: + """Test that setting the attributes calls the correct functions on the device.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + service_data={ATTR_HVAC_MODE: HVACMode.COOL}, + target={ATTR_ENTITY_ID: entity_id(mock_devices[0])}, + blocking=True, + ) + mock_devices[0].async_set_op_mode.assert_called_once_with( + HA_TO_FUJI_HVAC[HVACMode.COOL] + ) + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_FAN_MODE, + service_data={ATTR_FAN_MODE: FAN_AUTO}, + target={ATTR_ENTITY_ID: entity_id(mock_devices[0])}, + blocking=True, + ) + mock_devices[0].async_set_fan_speed.assert_called_once_with( + HA_TO_FUJI_FAN[FAN_AUTO] + ) + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_SWING_MODE, + service_data={ATTR_SWING_MODE: SWING_BOTH}, + target={ATTR_ENTITY_ID: entity_id(mock_devices[0])}, + blocking=True, + ) + mock_devices[0].async_set_swing_mode.assert_called_once_with( + HA_TO_FUJI_SWING[SWING_BOTH] + ) + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + service_data={ATTR_TEMPERATURE: 23.0}, + target={ATTR_ENTITY_ID: entity_id(mock_devices[0])}, + blocking=True, + ) + mock_devices[0].async_set_set_temp.assert_called_once_with(23.0) diff --git a/tests/components/fujitsu_fglair/test_config_flow.py b/tests/components/fujitsu_fglair/test_config_flow.py new file mode 100644 index 00000000000..06e4b2e5bd3 --- /dev/null +++ b/tests/components/fujitsu_fglair/test_config_flow.py @@ -0,0 +1,107 @@ +"""Test the Fujitsu HVAC (based on Ayla IOT) config flow.""" + +from unittest.mock import AsyncMock + +from ayla_iot_unofficial import AylaAuthError +import pytest + +from homeassistant.components.fujitsu_fglair.const import CONF_EUROPE, DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResult, FlowResultType + +from .conftest import TEST_PASSWORD, TEST_USERNAME + +from tests.common import MockConfigEntry + + +async def _initial_step(hass: HomeAssistant) -> FlowResult: + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + return await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: TEST_PASSWORD, + CONF_EUROPE: False, + }, + ) + + +async def test_full_flow( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_ayla_api: AsyncMock +) -> None: + """Test full config flow.""" + result = await _initial_step(hass) + mock_ayla_api.async_sign_in.assert_called_once() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == f"FGLair ({TEST_USERNAME})" + assert result["data"] == { + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: TEST_PASSWORD, + CONF_EUROPE: False, + } + + +async def test_duplicate_entry( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_ayla_api: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that re-adding the same account fails.""" + mock_config_entry.add_to_hass(hass) + result = await _initial_step(hass) + mock_ayla_api.async_sign_in.assert_not_called() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +@pytest.mark.parametrize( + ("exception", "err_msg"), + [ + (AylaAuthError, "invalid_auth"), + (TimeoutError, "cannot_connect"), + (Exception, "unknown"), + ], +) +async def test_form_exceptions( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_ayla_api: AsyncMock, + exception: Exception, + err_msg: str, +) -> None: + """Test we handle exceptions.""" + + mock_ayla_api.async_sign_in.side_effect = exception + result = await _initial_step(hass) + mock_ayla_api.async_sign_in.assert_called_once() + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": err_msg} + + mock_ayla_api.async_sign_in.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: TEST_PASSWORD, + CONF_EUROPE: False, + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == f"FGLair ({TEST_USERNAME})" + assert result["data"] == { + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: TEST_PASSWORD, + CONF_EUROPE: False, + } diff --git a/tests/components/fujitsu_fglair/test_init.py b/tests/components/fujitsu_fglair/test_init.py new file mode 100644 index 00000000000..fa67ea08661 --- /dev/null +++ b/tests/components/fujitsu_fglair/test_init.py @@ -0,0 +1,128 @@ +"""Test the initialization of fujitsu_fglair entities.""" + +from unittest.mock import AsyncMock + +from ayla_iot_unofficial import AylaAuthError +from freezegun.api import FrozenDateTimeFactory +import pytest + +from homeassistant.components.fujitsu_fglair.const import API_REFRESH, DOMAIN +from homeassistant.const import STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import entity_id, setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed + + +async def test_auth_failure( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_ayla_api: AsyncMock, + mock_config_entry: MockConfigEntry, + mock_devices: list[AsyncMock], +) -> None: + """Test entities become unavailable after auth failure.""" + await setup_integration(hass, mock_config_entry) + + mock_ayla_api.async_get_devices.side_effect = AylaAuthError + freezer.tick(API_REFRESH) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get(entity_id(mock_devices[0])).state == STATE_UNAVAILABLE + assert hass.states.get(entity_id(mock_devices[1])).state == STATE_UNAVAILABLE + + +async def test_device_auth_failure( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_ayla_api: AsyncMock, + mock_config_entry: MockConfigEntry, + mock_devices: list[AsyncMock], +) -> None: + """Test entities become unavailable after auth failure with updating devices.""" + await setup_integration(hass, mock_config_entry) + + for d in mock_ayla_api.async_get_devices.return_value: + d.async_update.side_effect = AylaAuthError + + freezer.tick(API_REFRESH) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get(entity_id(mock_devices[0])).state == STATE_UNAVAILABLE + assert hass.states.get(entity_id(mock_devices[1])).state == STATE_UNAVAILABLE + + +async def test_token_expired( + hass: HomeAssistant, + mock_ayla_api: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Make sure sign_in is called if the token expired.""" + mock_ayla_api.token_expired = True + await setup_integration(hass, mock_config_entry) + + # Called once during setup and once during update + assert mock_ayla_api.async_sign_in.call_count == 2 + + +async def test_token_expiring_soon( + hass: HomeAssistant, + mock_ayla_api: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Make sure sign_in is called if the token expired.""" + mock_ayla_api.token_expiring_soon = True + await setup_integration(hass, mock_config_entry) + + mock_ayla_api.async_refresh_auth.assert_called_once() + + +@pytest.mark.parametrize("exception", [AylaAuthError, TimeoutError]) +async def test_startup_exception( + hass: HomeAssistant, + mock_ayla_api: AsyncMock, + mock_config_entry: MockConfigEntry, + exception: Exception, +) -> None: + """Make sure that no devices are added if there was an exception while logging in.""" + mock_ayla_api.async_sign_in.side_effect = exception + await setup_integration(hass, mock_config_entry) + + assert len(hass.states.async_all()) == 0 + + +async def test_one_device_disabled( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, + mock_devices: list[AsyncMock], + mock_ayla_api: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that coordinator only updates devices that are currently listening.""" + await setup_integration(hass, mock_config_entry) + + for d in mock_devices: + d.async_update.assert_called_once() + d.reset_mock() + + entity = entity_registry.async_get( + entity_registry.async_get_entity_id( + Platform.CLIMATE, DOMAIN, mock_devices[0].device_serial_number + ) + ) + entity_registry.async_update_entity( + entity.entity_id, disabled_by=er.RegistryEntryDisabler.USER + ) + await hass.async_block_till_done() + freezer.tick(API_REFRESH) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == len(mock_devices) - 1 + mock_devices[0].async_update.assert_not_called() + mock_devices[1].async_update.assert_called_once() From 489ceab4b5665eef553a9b907297f1117d682ff7 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Sun, 18 Aug 2024 15:39:26 +0200 Subject: [PATCH 0846/1635] Add missing hass type hint in component tests (b) (#124065) --- tests/components/blebox/conftest.py | 7 +++++-- tests/components/bluetooth/test_wrappers.py | 8 ++++++-- tests/components/broadlink/__init__.py | 8 +++++++- tests/components/buienradar/test_camera.py | 3 ++- 4 files changed, 20 insertions(+), 6 deletions(-) diff --git a/tests/components/blebox/conftest.py b/tests/components/blebox/conftest.py index 89229575a0b..fb35bae43a1 100644 --- a/tests/components/blebox/conftest.py +++ b/tests/components/blebox/conftest.py @@ -9,6 +9,7 @@ import pytest from homeassistant.components.blebox.const import DOMAIN from homeassistant.const import CONF_HOST, CONF_PORT +from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from tests.common import MockConfigEntry @@ -77,7 +78,9 @@ def feature_fixture(request: pytest.FixtureRequest) -> Any: return request.getfixturevalue(request.param) -async def async_setup_entities(hass, entity_ids): +async def async_setup_entities( + hass: HomeAssistant, entity_ids: list[str] +) -> list[er.RegistryEntry]: """Return configured entries with the given entity ids.""" config_entry = mock_config() @@ -90,7 +93,7 @@ async def async_setup_entities(hass, entity_ids): return [entity_registry.async_get(entity_id) for entity_id in entity_ids] -async def async_setup_entity(hass, entity_id): +async def async_setup_entity(hass: HomeAssistant, entity_id: str) -> er.RegistryEntry: """Return a configured entry with the given entity_id.""" return (await async_setup_entities(hass, [entity_id]))[0] diff --git a/tests/components/bluetooth/test_wrappers.py b/tests/components/bluetooth/test_wrappers.py index 5fc3d70c97a..c5908776882 100644 --- a/tests/components/bluetooth/test_wrappers.py +++ b/tests/components/bluetooth/test_wrappers.py @@ -22,7 +22,7 @@ from homeassistant.components.bluetooth import ( HaBluetoothConnector, HomeAssistantBluetoothManager, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import CALLBACK_TYPE, HomeAssistant from . import _get_manager, generate_advertisement_data, generate_ble_device @@ -164,7 +164,11 @@ def mock_platform_client_that_raises_on_connect_fixture(): yield -def _generate_scanners_with_fake_devices(hass): +def _generate_scanners_with_fake_devices( + hass: HomeAssistant, +) -> tuple[ + dict[str, tuple[BLEDevice, AdvertisementData]], CALLBACK_TYPE, CALLBACK_TYPE +]: """Generate scanners with fake devices.""" manager = _get_manager() hci0_device_advs = {} diff --git a/tests/components/broadlink/__init__.py b/tests/components/broadlink/__init__.py index 207014d0958..6185e9bdefc 100644 --- a/tests/components/broadlink/__init__.py +++ b/tests/components/broadlink/__init__.py @@ -4,6 +4,7 @@ from dataclasses import dataclass from unittest.mock import MagicMock, patch from homeassistant.components.broadlink.const import DOMAIN +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -137,7 +138,12 @@ class BroadlinkDevice: self.timeout = timeout self.fwversion = fwversion - async def setup_entry(self, hass, mock_api=None, mock_entry=None): + async def setup_entry( + self, + hass: HomeAssistant, + mock_api: MagicMock | None = None, + mock_entry: MockConfigEntry | None = None, + ) -> MockSetup: """Set up the device.""" mock_api = mock_api or self.get_mock_api() mock_entry = mock_entry or self.get_mock_entry() diff --git a/tests/components/buienradar/test_camera.py b/tests/components/buienradar/test_camera.py index 9ef986b094c..f1518a1a0ea 100644 --- a/tests/components/buienradar/test_camera.py +++ b/tests/components/buienradar/test_camera.py @@ -8,6 +8,7 @@ from http import HTTPStatus from aiohttp.client_exceptions import ClientResponseError from homeassistant.components.buienradar.const import CONF_DELTA, DOMAIN +from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_COUNTRY_CODE, CONF_LATITUDE, CONF_LONGITUDE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -31,7 +32,7 @@ def radar_map_url(country_code: str = "NL") -> str: return f"https://api.buienradar.nl/image/1.0/RadarMap{country_code}?w=700&h=700" -async def _setup_config_entry(hass, entry): +async def _setup_config_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: entity_registry = er.async_get(hass) entity_registry.async_get_or_create( domain="camera", From a4fb4e76cb17b9594a38019a645fa3eaf22ea1ef Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 18 Aug 2024 08:39:56 -0500 Subject: [PATCH 0847/1635] Bump aiohttp to 3.10.4 (#124137) changelog: https://github.com/aio-libs/aiohttp/compare/v3.10.3...v3.10.4 --- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 1e90333a198..27966ccf611 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -4,7 +4,7 @@ aiodhcpwatcher==1.0.2 aiodiscover==2.1.0 aiodns==3.2.0 aiohttp-fast-zlib==0.1.1 -aiohttp==3.10.3 +aiohttp==3.10.4 aiohttp_cors==0.7.0 aiozoneinfo==0.2.1 astral==2.2 diff --git a/pyproject.toml b/pyproject.toml index 5f6324bbac5..ce521594399 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ requires-python = ">=3.12.0" dependencies = [ "aiodns==3.2.0", - "aiohttp==3.10.3", + "aiohttp==3.10.4", "aiohttp_cors==0.7.0", "aiohttp-fast-zlib==0.1.1", "aiozoneinfo==0.2.1", diff --git a/requirements.txt b/requirements.txt index 556f9013cee..f78ea9f6d54 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ # Home Assistant Core aiodns==3.2.0 -aiohttp==3.10.3 +aiohttp==3.10.4 aiohttp_cors==0.7.0 aiohttp-fast-zlib==0.1.1 aiozoneinfo==0.2.1 From 69843e9ac44e604bb2790de17a54617be0a67e0e Mon Sep 17 00:00:00 2001 From: Eric Trudeau Date: Sun, 18 Aug 2024 06:42:05 -0700 Subject: [PATCH 0848/1635] Add support for Levoit EverestAir air purifiers (#123428) --- homeassistant/components/vesync/const.py | 6 ++++++ homeassistant/components/vesync/fan.py | 7 ++++++- homeassistant/components/vesync/sensor.py | 10 +++++++++- 3 files changed, 21 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/vesync/const.py b/homeassistant/components/vesync/const.py index 54fc21d2659..50dce95e42a 100644 --- a/homeassistant/components/vesync/const.py +++ b/homeassistant/components/vesync/const.py @@ -23,6 +23,7 @@ DEV_TYPE_TO_HA = { "Core300S": "fan", "Core400S": "fan", "Core600S": "fan", + "EverestAir": "fan", "Vital200S": "fan", "Vital100S": "fan", "ESD16": "walldimmer", @@ -60,4 +61,9 @@ SKU_TO_BASE_DEVICE = { "LAP-V102S-AASR": "Vital100S", # Alt ID Model Vital100S "LAP-V102S-WEU": "Vital100S", # Alt ID Model Vital100S "LAP-V102S-WUK": "Vital100S", # Alt ID Model Vital100S + "EverestAir": "EverestAir", + "LAP-EL551S-AUS": "EverestAir", # Alt ID Model EverestAir + "LAP-EL551S-AEUR": "EverestAir", # Alt ID Model EverestAir + "LAP-EL551S-WEU": "EverestAir", # Alt ID Model EverestAir + "LAP-EL551S-WUS": "EverestAir", # Alt ID Model EverestAir } diff --git a/homeassistant/components/vesync/fan.py b/homeassistant/components/vesync/fan.py index 4dce2762eef..6ef9e41eb43 100644 --- a/homeassistant/components/vesync/fan.py +++ b/homeassistant/components/vesync/fan.py @@ -25,6 +25,7 @@ _LOGGER = logging.getLogger(__name__) FAN_MODE_AUTO = "auto" FAN_MODE_SLEEP = "sleep" FAN_MODE_PET = "pet" +FAN_MODE_TURBO = "turbo" PRESET_MODES = { "LV-PUR131S": [FAN_MODE_AUTO, FAN_MODE_SLEEP], @@ -32,6 +33,7 @@ PRESET_MODES = { "Core300S": [FAN_MODE_AUTO, FAN_MODE_SLEEP], "Core400S": [FAN_MODE_AUTO, FAN_MODE_SLEEP], "Core600S": [FAN_MODE_AUTO, FAN_MODE_SLEEP], + "EverestAir": [FAN_MODE_AUTO, FAN_MODE_SLEEP, FAN_MODE_TURBO], "Vital200S": [FAN_MODE_AUTO, FAN_MODE_SLEEP, FAN_MODE_PET], "Vital100S": [FAN_MODE_AUTO, FAN_MODE_SLEEP, FAN_MODE_PET], } @@ -41,6 +43,7 @@ SPEED_RANGE = { # off is not included "Core300S": (1, 3), "Core400S": (1, 4), "Core600S": (1, 4), + "EverestAir": (1, 3), "Vital200S": (1, 4), "Vital100S": (1, 4), } @@ -125,7 +128,7 @@ class VeSyncFanHA(VeSyncDevice, FanEntity): @property def preset_mode(self) -> str | None: """Get the current preset mode.""" - if self.smartfan.mode in (FAN_MODE_AUTO, FAN_MODE_SLEEP): + if self.smartfan.mode in (FAN_MODE_AUTO, FAN_MODE_SLEEP, FAN_MODE_TURBO): return self.smartfan.mode return None @@ -192,6 +195,8 @@ class VeSyncFanHA(VeSyncDevice, FanEntity): self.smartfan.sleep_mode() elif preset_mode == FAN_MODE_PET: self.smartfan.pet_mode() + elif preset_mode == FAN_MODE_TURBO: + self.smartfan.turbo_mode() self.schedule_update_ha_state() diff --git a/homeassistant/components/vesync/sensor.py b/homeassistant/components/vesync/sensor.py index 81f42f4c2ee..8939295a2db 100644 --- a/homeassistant/components/vesync/sensor.py +++ b/homeassistant/components/vesync/sensor.py @@ -72,6 +72,7 @@ FILTER_LIFE_SUPPORTED = [ "Core300S", "Core400S", "Core600S", + "EverestAir", "Vital100S", "Vital200S", ] @@ -83,7 +84,14 @@ AIR_QUALITY_SUPPORTED = [ "Vital100S", "Vital200S", ] -PM25_SUPPORTED = ["Core300S", "Core400S", "Core600S", "Vital100S", "Vital200S"] +PM25_SUPPORTED = [ + "Core300S", + "Core400S", + "Core600S", + "EverestAir", + "Vital100S", + "Vital200S", +] SENSORS: tuple[VeSyncSensorEntityDescription, ...] = ( VeSyncSensorEntityDescription( From ba3872ff872a69d4aa53cad80876325d83f1ea89 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Sun, 18 Aug 2024 15:42:41 +0200 Subject: [PATCH 0849/1635] Add missing hass type in tests/*.py (#124048) --- tests/common.py | 17 ++++++++++++----- tests/test_bootstrap.py | 24 ++++++++++++------------ tests/test_config.py | 3 ++- tests/test_config_entries.py | 35 ++++++++++++++++++----------------- tests/test_loader.py | 24 ++++++++++++++++++------ tests/test_setup.py | 21 ++++++++++++--------- 6 files changed, 74 insertions(+), 50 deletions(-) diff --git a/tests/common.py b/tests/common.py index 3a4c3f2851c..ecf8c4f5418 100644 --- a/tests/common.py +++ b/tests/common.py @@ -52,7 +52,7 @@ from homeassistant.components import device_automation, persistent_notification from homeassistant.components.device_automation import ( # noqa: F401 _async_get_device_automation_capabilities as async_get_device_automation_capabilities, ) -from homeassistant.config import async_process_component_config +from homeassistant.config import IntegrationConfigInfo, async_process_component_config from homeassistant.config_entries import ConfigEntry, ConfigFlow from homeassistant.const import ( DEVICE_DEFAULT_NAME, @@ -439,14 +439,16 @@ mock_service = threadsafe_callback_factory(async_mock_service) @callback -def async_mock_intent(hass, intent_typ): +def async_mock_intent(hass: HomeAssistant, intent_typ: str) -> list[intent.Intent]: """Set up a fake intent handler.""" - intents = [] + intents: list[intent.Intent] = [] class MockIntentHandler(intent.IntentHandler): intent_type = intent_typ - async def async_handle(self, intent_obj): + async def async_handle( + self, intent_obj: intent.Intent + ) -> intent.IntentResponse: """Handle the intent.""" intents.append(intent_obj) return intent_obj.create_response() @@ -1159,7 +1161,12 @@ def assert_setup_component(count, domain=None): """ config = {} - async def mock_psc(hass, config_input, integration, component=None): + async def mock_psc( + hass: HomeAssistant, + config_input: ConfigType, + integration: loader.Integration, + component: loader.ComponentProtocol | None = None, + ) -> IntegrationConfigInfo: """Mock the prepare_setup_component to capture config.""" domain_input = integration.domain integration_config_info = await async_process_component_config( diff --git a/tests/test_bootstrap.py b/tests/test_bootstrap.py index 278bfc631fd..a32d7d1e50b 100644 --- a/tests/test_bootstrap.py +++ b/tests/test_bootstrap.py @@ -213,7 +213,7 @@ async def test_setup_after_deps_all_present(hass: HomeAssistant) -> None: order = [] def gen_domain_setup(domain): - async def async_setup(hass, config): + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: order.append(domain) return True @@ -260,7 +260,7 @@ async def test_setup_after_deps_in_stage_1_ignored(hass: HomeAssistant) -> None: order = [] def gen_domain_setup(domain): - async def async_setup(hass, config): + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: order.append(domain) return True @@ -315,7 +315,7 @@ async def test_setup_after_deps_manifests_are_loaded_even_if_not_setup( order = [] def gen_domain_setup(domain): - async def async_setup(hass, config): + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: order.append(domain) return True @@ -392,7 +392,7 @@ async def test_setup_frontend_before_recorder(hass: HomeAssistant) -> None: order = [] def gen_domain_setup(domain): - async def async_setup(hass, config): + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: order.append(domain) return True @@ -471,7 +471,7 @@ async def test_setup_after_deps_via_platform(hass: HomeAssistant) -> None: after_dep_event = asyncio.Event() def gen_domain_setup(domain): - async def async_setup(hass, config): + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: if domain == "after_dep_of_platform_int": await after_dep_event.wait() @@ -520,7 +520,7 @@ async def test_setup_after_deps_not_trigger_load(hass: HomeAssistant) -> None: order = [] def gen_domain_setup(domain): - async def async_setup(hass, config): + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: order.append(domain) return True @@ -559,7 +559,7 @@ async def test_setup_after_deps_not_present(hass: HomeAssistant) -> None: order = [] def gen_domain_setup(domain): - async def async_setup(hass, config): + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: order.append(domain) return True @@ -969,7 +969,7 @@ async def test_empty_integrations_list_is_only_sent_at_the_end_of_bootstrap( order = [] def gen_domain_setup(domain): - async def async_setup(hass, config): + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: order.append(domain) await asyncio.sleep(0.05) @@ -1029,7 +1029,7 @@ async def test_warning_logged_on_wrap_up_timeout( task: asyncio.Task | None = None def gen_domain_setup(domain): - async def async_setup(hass, config): + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: nonlocal task async def _not_marked_background_task(): @@ -1067,7 +1067,7 @@ async def test_tasks_logged_that_block_stage_1( """Test we log tasks that delay stage 1 startup.""" def gen_domain_setup(domain): - async def async_setup(hass, config): + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def _not_marked_background_task(): await asyncio.sleep(0.2) @@ -1110,7 +1110,7 @@ async def test_tasks_logged_that_block_stage_2( done_future = hass.loop.create_future() def gen_domain_setup(domain): - async def async_setup(hass, config): + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def _not_marked_background_task(): await done_future @@ -1452,7 +1452,7 @@ async def test_setup_does_base_platforms_first(hass: HomeAssistant) -> None: order = [] def gen_domain_setup(domain): - async def async_setup(hass, config): + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: order.append(domain) return True diff --git a/tests/test_config.py b/tests/test_config.py index c7039cabe8b..02f8e1fc078 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -36,6 +36,7 @@ from homeassistant.core import ( DOMAIN as HOMEASSISTANT_DOMAIN, ConfigSource, HomeAssistant, + State, ) from homeassistant.exceptions import ConfigValidationError, HomeAssistantError from homeassistant.helpers import ( @@ -579,7 +580,7 @@ def test_customize_glob_is_ordered() -> None: assert isinstance(conf["customize_glob"], OrderedDict) -async def _compute_state(hass, config): +async def _compute_state(hass: HomeAssistant, config: dict[str, Any]) -> State | None: await config_util.async_process_ha_core_config(hass, config) entity = Entity() diff --git a/tests/test_config_entries.py b/tests/test_config_entries.py index 9983886ce44..dccebff13e5 100644 --- a/tests/test_config_entries.py +++ b/tests/test_config_entries.py @@ -18,6 +18,7 @@ from syrupy.assertion import SnapshotAssertion from homeassistant import config_entries, data_entry_flow, loader from homeassistant.components import dhcp from homeassistant.components.hassio import HassioServiceInfo +from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( EVENT_COMPONENT_LOADED, EVENT_HOMEASSISTANT_STARTED, @@ -104,12 +105,12 @@ async def test_setup_race_only_setup_once(hass: HomeAssistant) -> None: fast_config_entry_setup_future = hass.loop.create_future() slow_setup_future = hass.loop.create_future() - async def async_setup(hass, config): + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Mock setup.""" await slow_setup_future return True - async def async_setup_entry(hass, entry): + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Mock setup entry.""" slow = entry.data["slow"] if slow: @@ -122,7 +123,7 @@ async def test_setup_race_only_setup_once(hass: HomeAssistant) -> None: await fast_config_entry_setup_future return True - async def async_unload_entry(hass, entry): + async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Mock unload entry.""" return True @@ -582,7 +583,7 @@ async def test_remove_entry_raises( ) -> None: """Test if a component raises while removing entry.""" - async def mock_unload_entry(hass, entry): + async def mock_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Mock unload entry function.""" raise Exception("BROKEN") # noqa: TRY002 @@ -1326,7 +1327,7 @@ async def test_update_entry_options_and_trigger_listener( entry.add_to_manager(manager) update_listener_calls = [] - async def update_listener(hass, entry): + async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: """Test function.""" assert entry.options == {"second": True} update_listener_calls.append(None) @@ -1491,7 +1492,7 @@ async def test_reload_during_setup_retrying_waits(hass: HomeAssistant) -> None: load_attempts = [] sleep_duration = 0 - async def _mock_setup_entry(hass, entry): + async def _mock_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Mock setup entry.""" nonlocal sleep_duration await asyncio.sleep(sleep_duration) @@ -1536,7 +1537,7 @@ async def test_create_entry_options( ) -> None: """Test a config entry being created with options.""" - async def mock_async_setup(hass, config): + async def mock_async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Mock setup.""" hass.async_create_task( hass.config_entries.flow.async_init( @@ -3234,7 +3235,7 @@ async def test_async_setup_init_entry_completes_before_loaded_event_fires( """Test a config entry being initialized during integration setup before the loaded event fires.""" load_events = async_capture_events(hass, EVENT_COMPONENT_LOADED) - async def mock_async_setup(hass, config): + async def mock_async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Mock setup.""" hass.async_create_task( hass.config_entries.flow.async_init( @@ -3292,7 +3293,7 @@ async def test_async_setup_update_entry(hass: HomeAssistant) -> None: entry = MockConfigEntry(domain="comp", data={"value": "initial"}) entry.add_to_hass(hass) - async def mock_async_setup(hass, config): + async def mock_async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Mock setup.""" hass.async_create_task( hass.config_entries.flow.async_init( @@ -3303,7 +3304,7 @@ async def test_async_setup_update_entry(hass: HomeAssistant) -> None: ) return True - async def mock_async_setup_entry(hass, entry): + async def mock_async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Mock setting up an entry.""" assert entry.data["value"] == "updated" return True @@ -3791,7 +3792,7 @@ async def test_setup_raise_entry_error_from_first_coordinator_update( entry = MockConfigEntry(title="test_title", domain="test") entry.add_to_hass(hass) - async def async_setup_entry(hass, entry): + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Mock setup entry with a simple coordinator.""" async def _async_update_data(): @@ -3831,7 +3832,7 @@ async def test_setup_not_raise_entry_error_from_future_coordinator_update( entry = MockConfigEntry(title="test_title", domain="test") entry.add_to_hass(hass) - async def async_setup_entry(hass, entry): + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Mock setup entry with a simple coordinator.""" async def _async_update_data(): @@ -3910,7 +3911,7 @@ async def test_setup_raise_auth_failed_from_first_coordinator_update( entry = MockConfigEntry(title="test_title", domain="test") entry.add_to_hass(hass) - async def async_setup_entry(hass, entry): + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Mock setup entry with a simple coordinator.""" async def _async_update_data(): @@ -3962,7 +3963,7 @@ async def test_setup_raise_auth_failed_from_future_coordinator_update( entry = MockConfigEntry(title="test_title", domain="test") entry.add_to_hass(hass) - async def async_setup_entry(hass, entry): + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Mock setup entry with a simple coordinator.""" async def _async_update_data(): @@ -4409,12 +4410,12 @@ async def test_unique_id_update_while_setup_in_progress( ) -> None: """Test we handle the case where the config entry is updated while setup is in progress.""" - async def mock_setup_entry(hass, entry): + async def mock_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Mock setting up entry.""" await asyncio.sleep(0.1) return True - async def mock_unload_entry(hass, entry): + async def mock_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Mock unloading an entry.""" return True @@ -5463,7 +5464,7 @@ async def test_reload_during_setup(hass: HomeAssistant) -> None: in_setup = False setup_calls = 0 - async def mock_async_setup_entry(hass, entry): + async def mock_async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Mock setting up an entry.""" nonlocal in_setup nonlocal setup_calls diff --git a/tests/test_loader.py b/tests/test_loader.py index ae5280b2dcd..01305dde002 100644 --- a/tests/test_loader.py +++ b/tests/test_loader.py @@ -658,7 +658,9 @@ def _get_test_integration( ) -def _get_test_integration_with_application_credentials(hass, name): +def _get_test_integration_with_application_credentials( + hass: HomeAssistant, name: str +) -> loader.Integration: """Return a generated test integration with application_credentials support.""" return loader.Integration( hass, @@ -678,7 +680,9 @@ def _get_test_integration_with_application_credentials(hass, name): ) -def _get_test_integration_with_zeroconf_matcher(hass, name, config_flow): +def _get_test_integration_with_zeroconf_matcher( + hass: HomeAssistant, name: str, config_flow: bool +) -> loader.Integration: """Return a generated test integration with a zeroconf matcher.""" return loader.Integration( hass, @@ -697,7 +701,9 @@ def _get_test_integration_with_zeroconf_matcher(hass, name, config_flow): ) -def _get_test_integration_with_legacy_zeroconf_matcher(hass, name, config_flow): +def _get_test_integration_with_legacy_zeroconf_matcher( + hass: HomeAssistant, name: str, config_flow: bool +) -> loader.Integration: """Return a generated test integration with a legacy zeroconf matcher.""" return loader.Integration( hass, @@ -724,7 +730,9 @@ def _get_test_integration_with_legacy_zeroconf_matcher(hass, name, config_flow): ) -def _get_test_integration_with_dhcp_matcher(hass, name, config_flow): +def _get_test_integration_with_dhcp_matcher( + hass: HomeAssistant, name: str, config_flow: bool +) -> loader.Integration: """Return a generated test integration with a dhcp matcher.""" return loader.Integration( hass, @@ -748,7 +756,9 @@ def _get_test_integration_with_dhcp_matcher(hass, name, config_flow): ) -def _get_test_integration_with_bluetooth_matcher(hass, name, config_flow): +def _get_test_integration_with_bluetooth_matcher( + hass: HomeAssistant, name: str, config_flow: bool +) -> loader.Integration: """Return a generated test integration with a bluetooth matcher.""" return loader.Integration( hass, @@ -767,7 +777,9 @@ def _get_test_integration_with_bluetooth_matcher(hass, name, config_flow): ) -def _get_test_integration_with_usb_matcher(hass, name, config_flow): +def _get_test_integration_with_usb_matcher( + hass: HomeAssistant, name: str, config_flow: bool +) -> loader.Integration: """Return a generated test integration with a usb matcher.""" return loader.Integration( hass, diff --git a/tests/test_setup.py b/tests/test_setup.py index 4b7df9563ba..c50f8392d66 100644 --- a/tests/test_setup.py +++ b/tests/test_setup.py @@ -9,6 +9,7 @@ import pytest import voluptuous as vol from homeassistant import config_entries, loader, setup +from homeassistant.config_entries import ConfigEntry from homeassistant.const import EVENT_COMPONENT_LOADED, EVENT_HOMEASSISTANT_START from homeassistant.core import ( DOMAIN as HOMEASSISTANT_DOMAIN, @@ -23,6 +24,7 @@ from homeassistant.helpers.dispatcher import ( async_dispatcher_send, ) from homeassistant.helpers.issue_registry import IssueRegistry +from homeassistant.helpers.typing import ConfigType from .common import ( MockConfigEntry, @@ -298,9 +300,10 @@ async def test_component_not_setup_twice_if_loaded_during_other_setup( """Test component setup while waiting for lock is not set up twice.""" result = [] - async def async_setup(hass, config): + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Tracking Setup.""" result.append(1) + return True mock_integration(hass, MockModule("comp", async_setup=async_setup)) @@ -345,7 +348,7 @@ async def test_component_exception_setup(hass: HomeAssistant) -> None: """Test component that raises exception during setup.""" setup.async_set_domains_to_be_loaded(hass, {"comp"}) - def exception_setup(hass, config): + def exception_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Raise exception.""" raise Exception("fail!") # noqa: TRY002 @@ -359,7 +362,7 @@ async def test_component_base_exception_setup(hass: HomeAssistant) -> None: """Test component that raises exception during setup.""" setup.async_set_domains_to_be_loaded(hass, {"comp"}) - def exception_setup(hass, config): + def exception_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Raise exception.""" raise BaseException("fail!") # noqa: TRY002 @@ -377,7 +380,7 @@ async def test_component_setup_with_validation_and_dependency( ) -> None: """Test all config is passed to dependencies.""" - def config_check_setup(hass, config): + def config_check_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Test that config is passed in.""" if config.get("comp_a", {}).get("valid", False): return True @@ -499,7 +502,7 @@ async def test_all_work_done_before_start(hass: HomeAssistant) -> None: """Test all init work done till start.""" call_order = [] - async def component1_setup(hass, config): + async def component1_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up mock component.""" await discovery.async_discover( hass, "test_component2", {}, "test_component2", {} @@ -509,7 +512,7 @@ async def test_all_work_done_before_start(hass: HomeAssistant) -> None: ) return True - def component_track_setup(hass, config): + def component_track_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up mock component.""" call_order.append(1) return True @@ -585,7 +588,7 @@ async def test_when_setup_already_loaded(hass: HomeAssistant) -> None: """Test when setup.""" calls = [] - async def mock_callback(hass, component): + async def mock_callback(hass: HomeAssistant, component: str) -> None: """Mock callback.""" calls.append(component) @@ -613,7 +616,7 @@ async def test_async_when_setup_or_start_already_loaded(hass: HomeAssistant) -> """Test when setup or start.""" calls = [] - async def mock_callback(hass, component): + async def mock_callback(hass: HomeAssistant, component: str) -> None: """Mock callback.""" calls.append(component) @@ -659,7 +662,7 @@ async def test_parallel_entry_setup(hass: HomeAssistant, mock_handlers) -> None: calls = [] - async def mock_async_setup_entry(hass, entry): + async def mock_async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Mock setting up an entry.""" calls.append(entry.data["value"]) await asyncio.sleep(0) From 1a628588b476a1d3e0b675ab8c322821f772efe3 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Sun, 18 Aug 2024 15:43:15 +0200 Subject: [PATCH 0850/1635] Improve type hints in scripts/auth (#124049) --- homeassistant/scripts/auth.py | 30 ++++++++++++++++++++++++------ 1 file changed, 24 insertions(+), 6 deletions(-) diff --git a/homeassistant/scripts/auth.py b/homeassistant/scripts/auth.py index fff57c7adfe..b034021e6e7 100644 --- a/homeassistant/scripts/auth.py +++ b/homeassistant/scripts/auth.py @@ -2,8 +2,10 @@ import argparse import asyncio +from collections.abc import Sequence import logging import os +from typing import TYPE_CHECKING from homeassistant import runner from homeassistant.auth import auth_manager_from_config @@ -15,7 +17,7 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er # mypy: allow-untyped-calls, allow-untyped-defs -def run(args): +def run(args: Sequence[str] | None) -> None: """Handle Home Assistant auth provider script.""" parser = argparse.ArgumentParser(description="Manage Home Assistant users") parser.add_argument("--script", choices=["auth"]) @@ -50,7 +52,7 @@ def run(args): asyncio.run(run_command(parser.parse_args(args))) -async def run_command(args): +async def run_command(args: argparse.Namespace) -> None: """Run the command.""" hass = HomeAssistant(os.path.join(os.getcwd(), args.config)) await asyncio.gather(dr.async_load(hass), er.async_load(hass)) @@ -65,9 +67,13 @@ async def run_command(args): await hass.async_stop() -async def list_users(hass, provider, args): +async def list_users( + hass: HomeAssistant, provider: hass_auth.HassAuthProvider, args: argparse.Namespace +) -> None: """List the users.""" count = 0 + if TYPE_CHECKING: + assert provider.data for user in provider.data.users: count += 1 print(user["username"]) @@ -76,8 +82,12 @@ async def list_users(hass, provider, args): print("Total users:", count) -async def add_user(hass, provider, args): +async def add_user( + hass: HomeAssistant, provider: hass_auth.HassAuthProvider, args: argparse.Namespace +) -> None: """Create a user.""" + if TYPE_CHECKING: + assert provider.data try: provider.data.add_auth(args.username, args.password) except hass_auth.InvalidUser: @@ -89,8 +99,12 @@ async def add_user(hass, provider, args): print("Auth created") -async def validate_login(hass, provider, args): +async def validate_login( + hass: HomeAssistant, provider: hass_auth.HassAuthProvider, args: argparse.Namespace +) -> None: """Validate a login.""" + if TYPE_CHECKING: + assert provider.data try: provider.data.validate_login(args.username, args.password) print("Auth valid") @@ -98,8 +112,12 @@ async def validate_login(hass, provider, args): print("Auth invalid") -async def change_password(hass, provider, args): +async def change_password( + hass: HomeAssistant, provider: hass_auth.HassAuthProvider, args: argparse.Namespace +) -> None: """Change password.""" + if TYPE_CHECKING: + assert provider.data try: provider.data.change_password(args.username, args.new_password) await provider.data.async_save() From 55cf3b60eb79f462865c19c576848bbbdde7b5a8 Mon Sep 17 00:00:00 2001 From: IceBotYT <34712694+IceBotYT@users.noreply.github.com> Date: Sun, 18 Aug 2024 09:57:05 -0400 Subject: [PATCH 0851/1635] Add light platform to Nice G.O. (#124019) * Add light platform to Nice G.O. * Update homeassistant/components/nice_go/light.py --------- Co-authored-by: Joost Lekkerkerker --- homeassistant/components/nice_go/__init__.py | 2 +- homeassistant/components/nice_go/light.py | 48 ++++++++++ homeassistant/components/nice_go/strings.json | 7 ++ tests/components/nice_go/test_light.py | 88 +++++++++++++++++++ 4 files changed, 144 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/nice_go/light.py create mode 100644 tests/components/nice_go/test_light.py diff --git a/homeassistant/components/nice_go/__init__.py b/homeassistant/components/nice_go/__init__.py index 33c81b70966..f0bfea58b89 100644 --- a/homeassistant/components/nice_go/__init__.py +++ b/homeassistant/components/nice_go/__init__.py @@ -11,7 +11,7 @@ from homeassistant.core import HomeAssistant from .coordinator import NiceGOUpdateCoordinator _LOGGER = logging.getLogger(__name__) -PLATFORMS: list[Platform] = [Platform.COVER] +PLATFORMS: list[Platform] = [Platform.COVER, Platform.LIGHT] type NiceGOConfigEntry = ConfigEntry[NiceGOUpdateCoordinator] diff --git a/homeassistant/components/nice_go/light.py b/homeassistant/components/nice_go/light.py new file mode 100644 index 00000000000..76730ea822f --- /dev/null +++ b/homeassistant/components/nice_go/light.py @@ -0,0 +1,48 @@ +"""Nice G.O. light.""" + +from typing import Any + +from homeassistant.components.light import ColorMode, LightEntity +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import NiceGOConfigEntry +from .entity import NiceGOEntity + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: NiceGOConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Nice G.O. light.""" + + coordinator = config_entry.runtime_data + + async_add_entities( + NiceGOLightEntity(coordinator, device_id, device_data.name, "light") + for device_id, device_data in coordinator.data.items() + ) + + +class NiceGOLightEntity(NiceGOEntity, LightEntity): + """Light for Nice G.O. devices.""" + + _attr_color_mode = ColorMode.ONOFF + _attr_supported_color_modes = {ColorMode.ONOFF} + _attr_translation_key = "light" + + @property + def is_on(self) -> bool: + """Return if the light is on or not.""" + return self.data.light_status + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on the light.""" + + await self.coordinator.api.light_on(self._device_id) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off the light.""" + + await self.coordinator.api.light_off(self._device_id) diff --git a/homeassistant/components/nice_go/strings.json b/homeassistant/components/nice_go/strings.json index 8d21f6d9740..261f71ebcbe 100644 --- a/homeassistant/components/nice_go/strings.json +++ b/homeassistant/components/nice_go/strings.json @@ -17,6 +17,13 @@ "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, + "entity": { + "light": { + "light": { + "name": "[%key:component::light::title%]" + } + } + }, "issues": { "firmware_update_required": { "title": "Firmware update required", diff --git a/tests/components/nice_go/test_light.py b/tests/components/nice_go/test_light.py new file mode 100644 index 00000000000..e1852581fe6 --- /dev/null +++ b/tests/components/nice_go/test_light.py @@ -0,0 +1,88 @@ +"""Test Nice G.O. light.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.components.light import ( + DOMAIN as LIGHT_DOMAIN, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) +from homeassistant.components.nice_go.const import DOMAIN +from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, load_json_object_fixture, snapshot_platform + + +async def test_data( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that data gets parsed and returned appropriately.""" + + await setup_integration(hass, mock_config_entry, [Platform.LIGHT]) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_turn_on( + hass: HomeAssistant, mock_nice_go: AsyncMock, mock_config_entry: MockConfigEntry +) -> None: + """Test that turning on the light works as intended.""" + + await setup_integration(hass, mock_config_entry, [Platform.LIGHT]) + + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: "light.test_garage_2_light"}, + blocking=True, + ) + + assert mock_nice_go.light_on.call_count == 1 + + +async def test_turn_off( + hass: HomeAssistant, mock_nice_go: AsyncMock, mock_config_entry: MockConfigEntry +) -> None: + """Test that turning off the light works as intended.""" + + await setup_integration(hass, mock_config_entry, [Platform.LIGHT]) + + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: "light.test_garage_1_light"}, + blocking=True, + ) + + assert mock_nice_go.light_off.call_count == 1 + + +async def test_update_light_state( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that turning off the light works as intended.""" + + await setup_integration(hass, mock_config_entry, [Platform.LIGHT]) + + assert hass.states.get("light.test_garage_1_light").state == STATE_ON + assert hass.states.get("light.test_garage_2_light").state == STATE_OFF + + device_update = load_json_object_fixture("device_state_update.json", DOMAIN) + await mock_config_entry.runtime_data.on_data(device_update) + device_update_1 = load_json_object_fixture("device_state_update_1.json", DOMAIN) + await mock_config_entry.runtime_data.on_data(device_update_1) + + assert hass.states.get("light.test_garage_1_light").state == STATE_OFF + assert hass.states.get("light.test_garage_2_light").state == STATE_ON From 4e7e896601a58c4a2084ae85a07d2f58041d60ce Mon Sep 17 00:00:00 2001 From: Lenn <78048721+LennP@users.noreply.github.com> Date: Sun, 18 Aug 2024 17:14:05 +0200 Subject: [PATCH 0852/1635] Disable entities for Motiontionblinds Bluetooth (#124159) * Set entity_registry_enabled_default to False for RSSI sensor * Use entity description * Update homeassistant/components/motionblinds_ble/sensor.py Co-authored-by: Joost Lekkerkerker * Update homeassistant/components/motionblinds_ble/sensor.py Co-authored-by: Joost Lekkerkerker * Use entity_registry_enabled_by_default fixture for tests --------- Co-authored-by: Joost Lekkerkerker --- homeassistant/components/motionblinds_ble/sensor.py | 1 + tests/components/motionblinds_ble/test_sensor.py | 1 + 2 files changed, 2 insertions(+) diff --git a/homeassistant/components/motionblinds_ble/sensor.py b/homeassistant/components/motionblinds_ble/sensor.py index fbab5d06251..aa0f5ef7c90 100644 --- a/homeassistant/components/motionblinds_ble/sensor.py +++ b/homeassistant/components/motionblinds_ble/sensor.py @@ -89,6 +89,7 @@ SENSORS: tuple[MotionblindsBLESensorEntityDescription, ...] = ( native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT, register_callback_func=lambda device: device.register_signal_strength_callback, value_func=lambda value: value, + entity_registry_enabled_default=False, ), ) diff --git a/tests/components/motionblinds_ble/test_sensor.py b/tests/components/motionblinds_ble/test_sensor.py index 54d2fbcb064..c2468b876ae 100644 --- a/tests/components/motionblinds_ble/test_sensor.py +++ b/tests/components/motionblinds_ble/test_sensor.py @@ -24,6 +24,7 @@ from . import setup_integration from tests.common import MockConfigEntry +@pytest.mark.usefixtures("entity_registry_enabled_by_default") @pytest.mark.parametrize("blind_type", [MotionBlindType.CURTAIN]) @pytest.mark.parametrize( ("sensor", "register_callback", "initial_value", "args", "expected_value"), From e6c61f207d3f7d5c8c94f143628f98c0a77562bc Mon Sep 17 00:00:00 2001 From: Artur Pragacz <49985303+arturpragacz@users.noreply.github.com> Date: Sun, 18 Aug 2024 17:14:33 +0200 Subject: [PATCH 0853/1635] Rework Onkyo discovery and interview (#120668) * Rework Onkyo discovery and interview * Move class outside setup function * Revert changing default name * Rename to volume resolution --- .../components/onkyo/media_player.py | 156 +++++++++++------- 1 file changed, 94 insertions(+), 62 deletions(-) diff --git a/homeassistant/components/onkyo/media_player.py b/homeassistant/components/onkyo/media_player.py index 63e76e28dbb..8107c62e4a1 100644 --- a/homeassistant/components/onkyo/media_player.py +++ b/homeassistant/components/onkyo/media_player.py @@ -3,6 +3,7 @@ from __future__ import annotations import asyncio +from dataclasses import dataclass import logging from typing import Any @@ -137,6 +138,16 @@ ONKYO_SELECT_OUTPUT_SCHEMA = vol.Schema( SERVICE_SELECT_HDMI_OUTPUT = "onkyo_select_hdmi_output" +@dataclass +class ReceiverInfo: + """Onkyo Receiver information.""" + + host: str + port: int + model_name: str + identifier: str + + async def async_setup_platform( hass: HomeAssistant, config: ConfigType, @@ -169,43 +180,62 @@ async def async_setup_platform( ) host = config.get(CONF_HOST) - name = config[CONF_NAME] + name = config.get(CONF_NAME) max_volume = config[CONF_MAX_VOLUME] receiver_max_volume = config[CONF_RECEIVER_MAX_VOLUME] sources = config[CONF_SOURCES] - @callback - def async_onkyo_update_callback(message: tuple[str, str, Any], origin: str) -> None: - """Process new message from receiver.""" - receiver = receivers[origin] - _LOGGER.debug("Received update callback from %s: %s", receiver.name, message) - - zone, _, value = message - entity = entities[origin].get(zone) - if entity is not None: - if entity.enabled: - entity.process_update(message) - elif zone in ZONES and value != "N/A": - # When we receive the status for a zone, and the value is not "N/A", - # then zone is available on the receiver, so we create the entity for it. - _LOGGER.debug("Discovered %s on %s", ZONES[zone], receiver.name) - zone_entity = OnkyoMediaPlayer( - receiver, sources, zone, max_volume, receiver_max_volume + async def async_setup_receiver( + info: ReceiverInfo, discovered: bool, name: str | None + ) -> None: + @callback + def async_onkyo_update_callback( + message: tuple[str, str, Any], origin: str + ) -> None: + """Process new message from receiver.""" + receiver = receivers[origin] + _LOGGER.debug( + "Received update callback from %s: %s", receiver.name, message ) - entities[origin][zone] = zone_entity - async_add_entities([zone_entity]) - @callback - def async_onkyo_connect_callback(origin: str) -> None: - """Receiver (re)connected.""" - receiver = receivers[origin] - _LOGGER.debug("Receiver (re)connected: %s (%s)", receiver.name, receiver.host) + zone, _, value = message + entity = entities[origin].get(zone) + if entity is not None: + if entity.enabled: + entity.process_update(message) + elif zone in ZONES and value != "N/A": + # When we receive the status for a zone, and the value is not "N/A", + # then zone is available on the receiver, so we create the entity for it. + _LOGGER.debug("Discovered %s on %s", ZONES[zone], receiver.name) + zone_entity = OnkyoMediaPlayer( + receiver, sources, zone, max_volume, receiver_max_volume + ) + entities[origin][zone] = zone_entity + async_add_entities([zone_entity]) - for entity in entities[origin].values(): - entity.backfill_state() + @callback + def async_onkyo_connect_callback(origin: str) -> None: + """Receiver (re)connected.""" + receiver = receivers[origin] + _LOGGER.debug( + "Receiver (re)connected: %s (%s)", receiver.name, receiver.host + ) - def setup_receiver(receiver: pyeiscp.Connection) -> None: - KNOWN_HOSTS.append(receiver.host) + for entity in entities[origin].values(): + entity.backfill_state() + + _LOGGER.debug("Creating receiver: %s (%s)", info.model_name, info.host) + receiver = await pyeiscp.Connection.create( + host=info.host, + port=info.port, + update_callback=async_onkyo_update_callback, + connect_callback=async_onkyo_connect_callback, + ) + + receiver.model_name = info.model_name + receiver.identifier = info.identifier + receiver.name = name or info.model_name + receiver.discovered = discovered # Store the receiver object and create a dictionary to store its entities. receivers[receiver.host] = receiver @@ -224,34 +254,38 @@ async def async_setup_platform( entities[receiver.host]["main"] = main_entity async_add_entities([main_entity]) - if host is not None and host not in KNOWN_HOSTS: + if host is not None: + if host in KNOWN_HOSTS: + return + _LOGGER.debug("Manually creating receiver: %s (%s)", name, host) - receiver = await pyeiscp.Connection.create( - host=host, - update_callback=async_onkyo_update_callback, - connect_callback=async_onkyo_connect_callback, - ) - - # The library automatically adds a name and identifier only on discovered hosts, - # so manually add them here instead. - receiver.name = name - receiver.identifier = None - - setup_receiver(receiver) - else: @callback - async def async_onkyo_discovery_callback(receiver: pyeiscp.Connection): - """Receiver discovered, connection not yet active.""" - _LOGGER.debug("Receiver discovered: %s (%s)", receiver.name, receiver.host) - if receiver.host not in KNOWN_HOSTS: - await receiver.connect() - setup_receiver(receiver) + async def async_onkyo_interview_callback(conn: pyeiscp.Connection): + """Receiver interviewed, connection not yet active.""" + info = ReceiverInfo(conn.host, conn.port, conn.name, conn.identifier) + _LOGGER.debug("Receiver interviewed: %s (%s)", info.model_name, info.host) + if info.host not in KNOWN_HOSTS: + KNOWN_HOSTS.append(info.host) + await async_setup_receiver(info, False, name) + + await pyeiscp.Connection.discover( + host=host, + discovery_callback=async_onkyo_interview_callback, + ) + else: + _LOGGER.debug("Discovering receivers") + + @callback + async def async_onkyo_discovery_callback(conn: pyeiscp.Connection): + """Receiver discovered, connection not yet active.""" + info = ReceiverInfo(conn.host, conn.port, conn.name, conn.identifier) + _LOGGER.debug("Receiver discovered: %s (%s)", info.model_name, info.host) + if info.host not in KNOWN_HOSTS: + KNOWN_HOSTS.append(info.host) + await async_setup_receiver(info, True, None) - _LOGGER.debug("Discovering receivers") await pyeiscp.Connection.discover( - update_callback=async_onkyo_update_callback, - connect_callback=async_onkyo_connect_callback, discovery_callback=async_onkyo_discovery_callback, ) @@ -279,29 +313,27 @@ class OnkyoMediaPlayer(MediaPlayerEntity): sources: dict[str, str], zone: str, max_volume: int, - receiver_max_volume: int, + volume_resolution: int, ) -> None: """Initialize the Onkyo Receiver.""" self._receiver = receiver name = receiver.name - self._attr_name = f"{name}{' ' + ZONES[zone] if zone != 'main' else ''}" identifier = receiver.identifier - if identifier is not None: - # discovered + self._attr_name = f"{name}{' ' + ZONES[zone] if zone != 'main' else ''}" + if receiver.discovered: if zone == "main": # keep legacy unique_id self._attr_unique_id = f"{name}_{identifier}" else: self._attr_unique_id = f"{identifier}_{zone}" else: - # not discovered self._attr_unique_id = None self._zone = zone self._source_mapping = sources self._reverse_mapping = {value: key for key, value in sources.items()} self._max_volume = max_volume - self._receiver_max_volume = receiver_max_volume + self._volume_resolution = volume_resolution self._attr_source_list = list(sources.values()) self._attr_extra_state_attributes = {} @@ -350,9 +382,9 @@ class OnkyoMediaPlayer(MediaPlayerEntity): will give 80% volume on the receiver. Then we convert that to the correct scale for the receiver. """ - # HA_VOL * (MAX VOL / 100) * MAX_RECEIVER_VOL + # HA_VOL * (MAX VOL / 100) * VOL_RESOLUTION self._update_receiver( - "volume", int(volume * (self._max_volume / 100) * self._receiver_max_volume) + "volume", int(volume * (self._max_volume / 100) * self._volume_resolution) ) async def async_volume_up(self) -> None: @@ -430,9 +462,9 @@ class OnkyoMediaPlayer(MediaPlayerEntity): self._attr_extra_state_attributes.pop(ATTR_VIDEO_OUT, None) elif command in ["volume", "master-volume"] and value != "N/A": self._supports_volume = True - # AMP_VOL / (MAX_RECEIVER_VOL * (MAX_VOL / 100)) + # AMP_VOL / (VOL_RESOLUTION * (MAX_VOL / 100)) self._attr_volume_level = value / ( - self._receiver_max_volume * self._max_volume / 100 + self._volume_resolution * self._max_volume / 100 ) elif command in ["muting", "audio-muting"]: self._attr_is_volume_muted = bool(value == "on") From 4ab3f1f41f6c66de32500980e83705f7f5bfd409 Mon Sep 17 00:00:00 2001 From: John Hollowell Date: Sun, 18 Aug 2024 11:22:44 -0400 Subject: [PATCH 0854/1635] Add Venstar air filter sensors (#115832) * Add sensors for HVAC filter usage The number of hours and the number of days are not linked. The number of hours is a sum of the hours the filter has been in use (fan running). The days is just the number of days since the filter was reset. * Update filter sensors' names * Split consumables entity descriptions and move names to translations * Scale filterHours to match real-world time It looks like the integer returned by the thermostat is actually 100 times the number of hours. * Address review comments/changes --- homeassistant/components/venstar/sensor.py | 40 ++++++++++++++++--- homeassistant/components/venstar/strings.json | 6 +++ 2 files changed, 40 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/venstar/sensor.py b/homeassistant/components/venstar/sensor.py index ee4ad43ade6..484aa711c1e 100644 --- a/homeassistant/components/venstar/sensor.py +++ b/homeassistant/components/venstar/sensor.py @@ -75,7 +75,7 @@ class VenstarSensorEntityDescription(SensorEntityDescription): value_fn: Callable[[VenstarDataUpdateCoordinator, str], Any] name_fn: Callable[[str], str] | None - uom_fn: Callable[[Any], str | None] + uom_fn: Callable[[VenstarDataUpdateCoordinator], str | None] async def async_setup_entry( @@ -99,11 +99,18 @@ async def async_setup_entry( ) runtimes = coordinator.runtimes[-1] - entities.extend( - VenstarSensor(coordinator, config_entry, RUNTIME_ENTITY, sensor_name) - for sensor_name in runtimes - if sensor_name in RUNTIME_DEVICES - ) + for sensor_name in runtimes: + if sensor_name in RUNTIME_DEVICES: + entities.append( + VenstarSensor( + coordinator, config_entry, RUNTIME_ENTITY, sensor_name + ) + ) + entities.extend( + VenstarSensor(coordinator, config_entry, description, sensor_name) + for description in CONSUMABLE_ENTITIES + if description.key == sensor_name + ) for description in INFO_ENTITIES: try: @@ -224,6 +231,27 @@ RUNTIME_ENTITY = VenstarSensorEntityDescription( name_fn=lambda sensor_name: f"{RUNTIME_ATTRIBUTES[sensor_name]} Runtime", ) +CONSUMABLE_ENTITIES: tuple[VenstarSensorEntityDescription, ...] = ( + VenstarSensorEntityDescription( + key="filterHours", + state_class=SensorStateClass.MEASUREMENT, + uom_fn=lambda _: UnitOfTime.HOURS, + value_fn=lambda coordinator, sensor_name: ( + coordinator.runtimes[-1][sensor_name] / 100 + ), + name_fn=None, + translation_key="filter_install_time", + ), + VenstarSensorEntityDescription( + key="filterDays", + state_class=SensorStateClass.MEASUREMENT, + uom_fn=lambda _: UnitOfTime.DAYS, + value_fn=lambda coordinator, sensor_name: coordinator.runtimes[-1][sensor_name], + name_fn=None, + translation_key="filter_usage", + ), +) + INFO_ENTITIES: tuple[VenstarSensorEntityDescription, ...] = ( VenstarSensorEntityDescription( key="schedulepart", diff --git a/homeassistant/components/venstar/strings.json b/homeassistant/components/venstar/strings.json index 952353dcbfe..fdc75162651 100644 --- a/homeassistant/components/venstar/strings.json +++ b/homeassistant/components/venstar/strings.json @@ -25,6 +25,12 @@ }, "entity": { "sensor": { + "filter_install_time": { + "name": "Filter installation time" + }, + "filter_usage": { + "name": "Filter usage" + }, "schedule_part": { "name": "Schedule Part", "state": { From 49c59339d9394f83fa85951be9b57e3041bc76f6 Mon Sep 17 00:00:00 2001 From: Shay Levy Date: Sun, 18 Aug 2024 18:35:02 +0300 Subject: [PATCH 0855/1635] Shelly RPC - do not stop BLE scanner if a sleeping device (#124147) --- .../components/shelly/coordinator.py | 3 ++- tests/components/shelly/test_coordinator.py | 21 +++++++++++++++++++ 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/shelly/coordinator.py b/homeassistant/components/shelly/coordinator.py index 50140e1890d..2710565f960 100644 --- a/homeassistant/components/shelly/coordinator.py +++ b/homeassistant/components/shelly/coordinator.py @@ -711,7 +711,8 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]): """Shutdown the coordinator.""" if self.device.connected: try: - await async_stop_scanner(self.device) + if not self.sleep_period: + await async_stop_scanner(self.device) await super().shutdown() except InvalidAuthError: self.entry.async_start_reauth(self.hass) diff --git a/tests/components/shelly/test_coordinator.py b/tests/components/shelly/test_coordinator.py index d3494c094f9..1140c93775b 100644 --- a/tests/components/shelly/test_coordinator.py +++ b/tests/components/shelly/test_coordinator.py @@ -854,6 +854,27 @@ async def test_rpc_runs_connected_events_when_initialized( assert call.script_list() in mock_rpc_device.mock_calls +async def test_rpc_sleeping_device_unload_ignore_ble_scanner( + hass: HomeAssistant, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test RPC sleeping device does not stop ble scanner on unload.""" + monkeypatch.setattr(mock_rpc_device, "connected", True) + entry = await init_integration(hass, 2, sleep_period=1000) + + # Make device online + mock_rpc_device.mock_online() + await hass.async_block_till_done(wait_background_tasks=True) + + # Unload + await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + # BLE script list is called during stop ble scanner + assert call.script_list() not in mock_rpc_device.mock_calls + + async def test_block_sleeping_device_connection_error( hass: HomeAssistant, device_registry: dr.DeviceRegistry, From 7d326ff076dab194105850828694d3112cc3339e Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Sun, 18 Aug 2024 18:57:29 +0200 Subject: [PATCH 0856/1635] Add missing hass type hint in component tests (d) (#124074) --- .../components/denonavr/test_media_player.py | 2 +- tests/components/derivative/test_sensor.py | 15 ++++++++--- tests/components/duckdns/test_init.py | 4 +-- tests/components/dynalite/common.py | 9 ++++--- tests/components/dynalite/test_cover.py | 26 +++++++++++++------ 5 files changed, 38 insertions(+), 18 deletions(-) diff --git a/tests/components/denonavr/test_media_player.py b/tests/components/denonavr/test_media_player.py index c294c449518..6550b31b1f9 100644 --- a/tests/components/denonavr/test_media_player.py +++ b/tests/components/denonavr/test_media_player.py @@ -60,7 +60,7 @@ def client_fixture(): yield mock_client_class.return_value -async def setup_denonavr(hass): +async def setup_denonavr(hass: HomeAssistant) -> None: """Initialize media_player for tests.""" entry_data = { CONF_HOST: TEST_HOST, diff --git a/tests/components/derivative/test_sensor.py b/tests/components/derivative/test_sensor.py index df050c58f10..3646340cac3 100644 --- a/tests/components/derivative/test_sensor.py +++ b/tests/components/derivative/test_sensor.py @@ -3,12 +3,13 @@ from datetime import timedelta from math import sin import random +from typing import Any from freezegun import freeze_time from homeassistant.components.derivative.const import DOMAIN from homeassistant.const import UnitOfPower, UnitOfTime -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, State from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util @@ -49,7 +50,9 @@ async def test_state(hass: HomeAssistant) -> None: assert state.attributes.get("unit_of_measurement") == "kW" -async def _setup_sensor(hass, config): +async def _setup_sensor( + hass: HomeAssistant, config: dict[str, Any] +) -> tuple[dict[str, Any], str]: default_config = { "platform": "derivative", "name": "power", @@ -67,7 +70,13 @@ async def _setup_sensor(hass, config): return config, entity_id -async def setup_tests(hass, config, times, values, expected_state): +async def setup_tests( + hass: HomeAssistant, + config: dict[str, Any], + times: list[int], + values: list[float], + expected_state: float, +) -> State: """Test derivative sensor state.""" config, entity_id = await _setup_sensor(hass, config) diff --git a/tests/components/duckdns/test_init.py b/tests/components/duckdns/test_init.py index c06add7156a..313cc91aa18 100644 --- a/tests/components/duckdns/test_init.py +++ b/tests/components/duckdns/test_init.py @@ -8,7 +8,6 @@ import pytest from homeassistant.components import duckdns from homeassistant.components.duckdns import async_track_time_interval_backoff from homeassistant.core import HomeAssistant -from homeassistant.loader import bind_hass from homeassistant.setup import async_setup_component from homeassistant.util.dt import utcnow @@ -21,8 +20,7 @@ _LOGGER = logging.getLogger(__name__) INTERVAL = duckdns.INTERVAL -@bind_hass -async def async_set_txt(hass, txt): +async def async_set_txt(hass: HomeAssistant, txt: str | None) -> None: """Set the txt record. Pass in None to remove it. This is a legacy helper method. Do not use it for new tests. diff --git a/tests/components/dynalite/common.py b/tests/components/dynalite/common.py index 91458b0aaff..640b6b3e24f 100644 --- a/tests/components/dynalite/common.py +++ b/tests/components/dynalite/common.py @@ -2,8 +2,11 @@ from unittest.mock import AsyncMock, Mock, call, patch +from dynalite_devices_lib.dynalitebase import DynaliteBaseDevice + from homeassistant.components import dynalite from homeassistant.const import ATTR_SERVICE +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -21,14 +24,14 @@ def create_mock_device(platform, spec): return device -async def get_entry_id_from_hass(hass): +async def get_entry_id_from_hass(hass: HomeAssistant) -> str: """Get the config entry id from hass.""" conf_entries = hass.config_entries.async_entries(dynalite.DOMAIN) assert len(conf_entries) == 1 return conf_entries[0].entry_id -async def create_entity_from_device(hass, device): +async def create_entity_from_device(hass: HomeAssistant, device: DynaliteBaseDevice): """Set up the component and platform and create a light based on the device provided.""" host = "1.2.3.4" entry = MockConfigEntry(domain=dynalite.DOMAIN, data={dynalite.CONF_HOST: host}) @@ -45,7 +48,7 @@ async def create_entity_from_device(hass, device): return mock_dyn_dev.mock_calls[1][2]["update_device_func"] -async def run_service_tests(hass, device, platform, services): +async def run_service_tests(hass: HomeAssistant, device, platform, services): """Run a series of service calls and check that the entity and device behave correctly.""" for cur_item in services: service = cur_item[ATTR_SERVICE] diff --git a/tests/components/dynalite/test_cover.py b/tests/components/dynalite/test_cover.py index c43d349d184..930318978fc 100644 --- a/tests/components/dynalite/test_cover.py +++ b/tests/components/dynalite/test_cover.py @@ -1,8 +1,10 @@ """Test Dynalite cover.""" +from collections.abc import Callable from unittest.mock import Mock from dynalite_devices_lib.cover import DynaliteTimeCoverWithTiltDevice +from dynalite_devices_lib.dynalitebase import DynaliteBaseDevice import pytest from homeassistant.components.cover import ( @@ -36,7 +38,7 @@ from tests.common import mock_restore_cache @pytest.fixture -def mock_device(): +def mock_device() -> Mock: """Mock a Dynalite device.""" mock_dev = create_mock_device("cover", DynaliteTimeCoverWithTiltDevice) mock_dev.device_class = CoverDeviceClass.BLIND.value @@ -54,7 +56,7 @@ def mock_device(): return mock_dev -async def test_cover_setup(hass: HomeAssistant, mock_device) -> None: +async def test_cover_setup(hass: HomeAssistant, mock_device: Mock) -> None: """Test a successful setup.""" await create_entity_from_device(hass, mock_device) entity_state = hass.states.get("cover.name") @@ -93,7 +95,7 @@ async def test_cover_setup(hass: HomeAssistant, mock_device) -> None: ) -async def test_cover_without_tilt(hass: HomeAssistant, mock_device) -> None: +async def test_cover_without_tilt(hass: HomeAssistant, mock_device: Mock) -> None: """Test a cover with no tilt.""" mock_device.has_tilt = False await create_entity_from_device(hass, mock_device) @@ -106,8 +108,14 @@ async def test_cover_without_tilt(hass: HomeAssistant, mock_device) -> None: async def check_cover_position( - hass, update_func, device, closing, opening, closed, expected -): + hass: HomeAssistant, + update_func: Callable[[DynaliteBaseDevice | None], None], + device: Mock, + closing: bool, + opening: bool, + closed: bool, + expected: str, +) -> None: """Check that a given position behaves correctly.""" device.is_closing = closing device.is_opening = opening @@ -118,7 +126,7 @@ async def check_cover_position( assert entity_state.state == expected -async def test_cover_positions(hass: HomeAssistant, mock_device) -> None: +async def test_cover_positions(hass: HomeAssistant, mock_device: Mock) -> None: """Test that the state updates in the various positions.""" update_func = await create_entity_from_device(hass, mock_device) await check_cover_position( @@ -135,7 +143,7 @@ async def test_cover_positions(hass: HomeAssistant, mock_device) -> None: ) -async def test_cover_restore_state(hass: HomeAssistant, mock_device) -> None: +async def test_cover_restore_state(hass: HomeAssistant, mock_device: Mock) -> None: """Test restore from cache.""" mock_restore_cache( hass, @@ -147,7 +155,9 @@ async def test_cover_restore_state(hass: HomeAssistant, mock_device) -> None: assert entity_state.state == STATE_OPEN -async def test_cover_restore_state_bad_cache(hass: HomeAssistant, mock_device) -> None: +async def test_cover_restore_state_bad_cache( + hass: HomeAssistant, mock_device: Mock +) -> None: """Test restore from a cache without the attribute.""" mock_restore_cache( hass, From df58068e84564f6d4cec27dc48b0b14153c9de9f Mon Sep 17 00:00:00 2001 From: Ryan Mattson Date: Sun, 18 Aug 2024 13:05:30 -0500 Subject: [PATCH 0857/1635] Update aiolyric to 2.0.1 (#123424) update aiolyric to 2.0.1 --- homeassistant/components/lyric/__init__.py | 22 ++++--- homeassistant/components/lyric/climate.py | 64 ++++++++++---------- homeassistant/components/lyric/manifest.json | 2 +- homeassistant/components/lyric/sensor.py | 43 ++++++------- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 6 files changed, 69 insertions(+), 66 deletions(-) diff --git a/homeassistant/components/lyric/__init__.py b/homeassistant/components/lyric/__init__.py index e1eaed6602c..6c35e084424 100644 --- a/homeassistant/components/lyric/__init__.py +++ b/homeassistant/components/lyric/__init__.py @@ -12,7 +12,7 @@ from aiolyric import Lyric from aiolyric.exceptions import LyricAuthenticationException, LyricException from aiolyric.objects.device import LyricDevice from aiolyric.objects.location import LyricLocation -from aiolyric.objects.priority import LyricAccessories, LyricRoom +from aiolyric.objects.priority import LyricAccessory, LyricRoom from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform @@ -80,11 +80,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await lyric.get_locations() await asyncio.gather( *( - lyric.get_thermostat_rooms(location.locationID, device.deviceID) + lyric.get_thermostat_rooms( + location.location_id, device.device_id + ) for location in lyric.locations for device in location.devices - if device.deviceClass == "Thermostat" - and device.deviceID.startswith("LCC") + if device.device_class == "Thermostat" + and device.device_id.startswith("LCC") ) ) @@ -143,7 +145,7 @@ class LyricEntity(CoordinatorEntity[DataUpdateCoordinator[Lyric]]): super().__init__(coordinator) self._key = key self._location = location - self._mac_id = device.macID + self._mac_id = device.mac_id self._update_thermostat = coordinator.data.update_thermostat self._update_fan = coordinator.data.update_fan @@ -155,7 +157,7 @@ class LyricEntity(CoordinatorEntity[DataUpdateCoordinator[Lyric]]): @property def location(self) -> LyricLocation: """Get the Lyric Location.""" - return self.coordinator.data.locations_dict[self._location.locationID] + return self.coordinator.data.locations_dict[self._location.location_id] @property def device(self) -> LyricDevice: @@ -173,7 +175,7 @@ class LyricDeviceEntity(LyricEntity): identifiers={(dr.CONNECTION_NETWORK_MAC, self._mac_id)}, connections={(dr.CONNECTION_NETWORK_MAC, self._mac_id)}, manufacturer="Honeywell", - model=self.device.deviceModel, + model=self.device.device_model, name=f"{self.device.name} Thermostat", ) @@ -187,7 +189,7 @@ class LyricAccessoryEntity(LyricDeviceEntity): location: LyricLocation, device: LyricDevice, room: LyricRoom, - accessory: LyricAccessories, + accessory: LyricAccessory, key: str, ) -> None: """Initialize the Honeywell Lyric accessory entity.""" @@ -207,7 +209,7 @@ class LyricAccessoryEntity(LyricDeviceEntity): }, manufacturer="Honeywell", model="RCHTSENSOR", - name=f"{self.room.roomName} Sensor", + name=f"{self.room.room_name} Sensor", via_device=(dr.CONNECTION_NETWORK_MAC, self._mac_id), ) @@ -217,7 +219,7 @@ class LyricAccessoryEntity(LyricDeviceEntity): return self.coordinator.data.rooms_dict[self._mac_id][self._room_id] @property - def accessory(self) -> LyricAccessories: + def accessory(self) -> LyricAccessory: """Get the Lyric Device.""" return next( accessory diff --git a/homeassistant/components/lyric/climate.py b/homeassistant/components/lyric/climate.py index 50add155915..1c459c2c66a 100644 --- a/homeassistant/components/lyric/climate.py +++ b/homeassistant/components/lyric/climate.py @@ -132,7 +132,7 @@ async def async_setup_entry( LyricClimate( coordinator, ClimateEntityDescription( - key=f"{device.macID}_thermostat", + key=f"{device.mac_id}_thermostat", name=device.name, ), location, @@ -185,7 +185,7 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): ) -> None: """Initialize Honeywell Lyric climate entity.""" # Define thermostat type (TCC - e.g., Lyric round; LCC - e.g., T5,6) - if device.changeableValues.thermostatSetpointStatus: + if device.changeable_values.thermostat_setpoint_status: self._attr_thermostat_type = LyricThermostatType.LCC else: self._attr_thermostat_type = LyricThermostatType.TCC @@ -202,15 +202,15 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): self._attr_hvac_modes = [HVACMode.OFF] # Add supported lyric thermostat features - if LYRIC_HVAC_MODE_HEAT in device.allowedModes: + if LYRIC_HVAC_MODE_HEAT in device.allowed_modes: self._attr_hvac_modes.append(HVACMode.HEAT) - if LYRIC_HVAC_MODE_COOL in device.allowedModes: + if LYRIC_HVAC_MODE_COOL in device.allowed_modes: self._attr_hvac_modes.append(HVACMode.COOL) if ( - LYRIC_HVAC_MODE_HEAT in device.allowedModes - and LYRIC_HVAC_MODE_COOL in device.allowedModes + LYRIC_HVAC_MODE_HEAT in device.allowed_modes + and LYRIC_HVAC_MODE_COOL in device.allowed_modes ): self._attr_hvac_modes.append(HVACMode.HEAT_COOL) @@ -242,19 +242,19 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): coordinator, location, device, - f"{device.macID}_thermostat", + f"{device.mac_id}_thermostat", ) self.entity_description = description @property def current_temperature(self) -> float | None: """Return the current temperature.""" - return self.device.indoorTemperature + return self.device.indoor_temperature @property def hvac_action(self) -> HVACAction | None: """Return the current hvac action.""" - action = HVAC_ACTIONS.get(self.device.operationStatus.mode, None) + action = HVAC_ACTIONS.get(self.device.operation_status.mode, None) if action == HVACAction.OFF and self.hvac_mode != HVACMode.OFF: action = HVACAction.IDLE return action @@ -262,63 +262,63 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): @property def hvac_mode(self) -> HVACMode: """Return the hvac mode.""" - return HVAC_MODES[self.device.changeableValues.mode] + return HVAC_MODES[self.device.changeable_values.mode] @property def target_temperature(self) -> float | None: """Return the temperature we try to reach.""" device = self.device if ( - device.changeableValues.autoChangeoverActive - or HVAC_MODES[device.changeableValues.mode] == HVACMode.OFF + device.changeable_values.auto_changeover_active + or HVAC_MODES[device.changeable_values.mode] == HVACMode.OFF ): return None if self.hvac_mode == HVACMode.COOL: - return device.changeableValues.coolSetpoint - return device.changeableValues.heatSetpoint + return device.changeable_values.cool_setpoint + return device.changeable_values.heat_setpoint @property def target_temperature_high(self) -> float | None: """Return the highbound target temperature we try to reach.""" device = self.device if ( - not device.changeableValues.autoChangeoverActive - or HVAC_MODES[device.changeableValues.mode] == HVACMode.OFF + not device.changeable_values.auto_changeover_active + or HVAC_MODES[device.changeable_values.mode] == HVACMode.OFF ): return None - return device.changeableValues.coolSetpoint + return device.changeable_values.cool_setpoint @property def target_temperature_low(self) -> float | None: """Return the lowbound target temperature we try to reach.""" device = self.device if ( - not device.changeableValues.autoChangeoverActive - or HVAC_MODES[device.changeableValues.mode] == HVACMode.OFF + not device.changeable_values.auto_changeover_active + or HVAC_MODES[device.changeable_values.mode] == HVACMode.OFF ): return None - return device.changeableValues.heatSetpoint + return device.changeable_values.heat_setpoint @property def preset_mode(self) -> str | None: """Return current preset mode.""" - return self.device.changeableValues.thermostatSetpointStatus + return self.device.changeable_values.thermostat_setpoint_status @property def min_temp(self) -> float: """Identify min_temp in Lyric API or defaults if not available.""" device = self.device - if LYRIC_HVAC_MODE_COOL in device.allowedModes: - return device.minCoolSetpoint - return device.minHeatSetpoint + if LYRIC_HVAC_MODE_COOL in device.allowed_modes: + return device.min_cool_setpoint + return device.min_heat_setpoint @property def max_temp(self) -> float: """Identify max_temp in Lyric API or defaults if not available.""" device = self.device - if LYRIC_HVAC_MODE_HEAT in device.allowedModes: - return device.maxHeatSetpoint - return device.maxCoolSetpoint + if LYRIC_HVAC_MODE_HEAT in device.allowed_modes: + return device.max_heat_setpoint + return device.max_cool_setpoint @property def fan_mode(self) -> str | None: @@ -339,7 +339,7 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): target_temp_low = kwargs.get(ATTR_TARGET_TEMP_LOW) target_temp_high = kwargs.get(ATTR_TARGET_TEMP_HIGH) - if device.changeableValues.mode == LYRIC_HVAC_MODE_HEAT_COOL: + if device.changeable_values.mode == LYRIC_HVAC_MODE_HEAT_COOL: if target_temp_low is None or target_temp_high is None: raise HomeAssistantError( "Could not find target_temp_low and/or target_temp_high in" @@ -349,7 +349,7 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): # If TCC device pass the heatCoolMode value, otherwise # if LCC device can skip the mode altogether if self._attr_thermostat_type is LyricThermostatType.TCC: - mode = HVAC_MODES[device.changeableValues.heatCoolMode] + mode = HVAC_MODES[device.changeable_values.heat_cool_mode] else: mode = None @@ -401,7 +401,7 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): # otherwise it turns to Auto briefly and then reverts to Off. # This is the behavior that happens with the native app as well, # so likely a bug in the api itself. - if HVAC_MODES[self.device.changeableValues.mode] == HVACMode.OFF: + if HVAC_MODES[self.device.changeable_values.mode] == HVACMode.OFF: _LOGGER.debug( "HVAC mode passed to lyric: %s", HVAC_MODES[LYRIC_HVAC_MODE_COOL], @@ -427,7 +427,7 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): else: _LOGGER.debug( "HVAC mode passed to lyric: %s", - HVAC_MODES[self.device.changeableValues.mode], + HVAC_MODES[self.device.changeable_values.mode], ) await self._update_thermostat( self.location, self.device, autoChangeoverActive=True @@ -448,7 +448,7 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): # otherwise leave unchanged. if ( LYRIC_HVAC_MODES[hvac_mode] == LYRIC_HVAC_MODE_HEAT_COOL - and not self.device.changeableValues.autoChangeoverActive + and not self.device.changeable_values.auto_changeover_active ): auto_changeover = True else: diff --git a/homeassistant/components/lyric/manifest.json b/homeassistant/components/lyric/manifest.json index a55f9c1d7cb..8bed909ace2 100644 --- a/homeassistant/components/lyric/manifest.json +++ b/homeassistant/components/lyric/manifest.json @@ -22,5 +22,5 @@ "iot_class": "cloud_polling", "loggers": ["aiolyric"], "quality_scale": "silver", - "requirements": ["aiolyric==1.1.0"] + "requirements": ["aiolyric==2.0.1"] } diff --git a/homeassistant/components/lyric/sensor.py b/homeassistant/components/lyric/sensor.py index 9f05354c399..7e006bc7bfe 100644 --- a/homeassistant/components/lyric/sensor.py +++ b/homeassistant/components/lyric/sensor.py @@ -9,7 +9,7 @@ from datetime import datetime, timedelta from aiolyric import Lyric from aiolyric.objects.device import LyricDevice from aiolyric.objects.location import LyricLocation -from aiolyric.objects.priority import LyricAccessories, LyricRoom +from aiolyric.objects.priority import LyricAccessory, LyricRoom from homeassistant.components.sensor import ( SensorDeviceClass, @@ -55,8 +55,8 @@ class LyricSensorEntityDescription(SensorEntityDescription): class LyricSensorAccessoryEntityDescription(SensorEntityDescription): """Class describing Honeywell Lyric room sensor entities.""" - value_fn: Callable[[LyricRoom, LyricAccessories], StateType | datetime] - suitable_fn: Callable[[LyricRoom, LyricAccessories], bool] + value_fn: Callable[[LyricRoom, LyricAccessory], StateType | datetime] + suitable_fn: Callable[[LyricRoom, LyricAccessory], bool] DEVICE_SENSORS: list[LyricSensorEntityDescription] = [ @@ -65,8 +65,8 @@ DEVICE_SENSORS: list[LyricSensorEntityDescription] = [ translation_key="indoor_temperature", device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda device: device.indoorTemperature, - suitable_fn=lambda device: device.indoorTemperature, + value_fn=lambda device: device.indoor_temperature, + suitable_fn=lambda device: device.indoor_temperature, ), LyricSensorEntityDescription( key="indoor_humidity", @@ -74,16 +74,16 @@ DEVICE_SENSORS: list[LyricSensorEntityDescription] = [ device_class=SensorDeviceClass.HUMIDITY, state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=PERCENTAGE, - value_fn=lambda device: device.indoorHumidity, - suitable_fn=lambda device: device.indoorHumidity, + value_fn=lambda device: device.indoor_humidity, + suitable_fn=lambda device: device.indoor_humidity, ), LyricSensorEntityDescription( key="outdoor_temperature", translation_key="outdoor_temperature", device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda device: device.outdoorTemperature, - suitable_fn=lambda device: device.outdoorTemperature, + value_fn=lambda device: device.outdoor_temperature, + suitable_fn=lambda device: device.outdoor_temperature, ), LyricSensorEntityDescription( key="outdoor_humidity", @@ -91,29 +91,30 @@ DEVICE_SENSORS: list[LyricSensorEntityDescription] = [ device_class=SensorDeviceClass.HUMIDITY, state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=PERCENTAGE, - value_fn=lambda device: device.displayedOutdoorHumidity, - suitable_fn=lambda device: device.displayedOutdoorHumidity, + value_fn=lambda device: device.displayed_outdoor_humidity, + suitable_fn=lambda device: device.displayed_outdoor_humidity, ), LyricSensorEntityDescription( key="next_period_time", translation_key="next_period_time", device_class=SensorDeviceClass.TIMESTAMP, value_fn=lambda device: get_datetime_from_future_time( - device.changeableValues.nextPeriodTime + device.changeable_values.next_period_time ), suitable_fn=lambda device: ( - device.changeableValues and device.changeableValues.nextPeriodTime + device.changeable_values and device.changeable_values.next_period_time ), ), LyricSensorEntityDescription( key="setpoint_status", translation_key="setpoint_status", value_fn=lambda device: get_setpoint_status( - device.changeableValues.thermostatSetpointStatus, - device.changeableValues.nextPeriodTime, + device.changeable_values.thermostat_setpoint_status, + device.changeable_values.next_period_time, ), suitable_fn=lambda device: ( - device.changeableValues and device.changeableValues.thermostatSetpointStatus + device.changeable_values + and device.changeable_values.thermostat_setpoint_status ), ), ] @@ -133,7 +134,7 @@ ACCESSORY_SENSORS: list[LyricSensorAccessoryEntityDescription] = [ device_class=SensorDeviceClass.HUMIDITY, state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=PERCENTAGE, - value_fn=lambda room, _: room.roomAvgHumidity, + value_fn=lambda room, _: room.room_avg_humidity, suitable_fn=lambda _, accessory: accessory.type == "IndoorAirSensor", ), ] @@ -182,7 +183,7 @@ async def async_setup_entry( ) for location in coordinator.data.locations for device in location.devices - for room in coordinator.data.rooms_dict.get(device.macID, {}).values() + for room in coordinator.data.rooms_dict.get(device.mac_id, {}).values() for accessory in room.accessories for accessory_sensor in ACCESSORY_SENSORS if accessory_sensor.suitable_fn(room, accessory) @@ -206,7 +207,7 @@ class LyricSensor(LyricDeviceEntity, SensorEntity): coordinator, location, device, - f"{device.macID}_{description.key}", + f"{device.mac_id}_{description.key}", ) self.entity_description = description if description.device_class == SensorDeviceClass.TEMPERATURE: @@ -233,7 +234,7 @@ class LyricAccessorySensor(LyricAccessoryEntity, SensorEntity): location: LyricLocation, parentDevice: LyricDevice, room: LyricRoom, - accessory: LyricAccessories, + accessory: LyricAccessory, ) -> None: """Initialize.""" super().__init__( @@ -242,7 +243,7 @@ class LyricAccessorySensor(LyricAccessoryEntity, SensorEntity): parentDevice, room, accessory, - f"{parentDevice.macID}_room{room.id}_acc{accessory.id}_{description.key}", + f"{parentDevice.mac_id}_room{room.id}_acc{accessory.id}_{description.key}", ) self.entity_description = description if description.device_class == SensorDeviceClass.TEMPERATURE: diff --git a/requirements_all.txt b/requirements_all.txt index 0988a50a4cc..f7b6fb4e245 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -285,7 +285,7 @@ aiolivisi==0.0.19 aiolookin==1.0.0 # homeassistant.components.lyric -aiolyric==1.1.0 +aiolyric==2.0.1 # homeassistant.components.mealie aiomealie==0.8.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 7a27406deae..290fba5d673 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -267,7 +267,7 @@ aiolivisi==0.0.19 aiolookin==1.0.0 # homeassistant.components.lyric -aiolyric==1.1.0 +aiolyric==2.0.1 # homeassistant.components.mealie aiomealie==0.8.1 From 7d5ddbf51cf126b4e142480429f7c534532e2e35 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Sun, 18 Aug 2024 20:05:41 +0200 Subject: [PATCH 0858/1635] Bump python-holidays to 0.54 (#124170) --- homeassistant/components/holiday/manifest.json | 2 +- homeassistant/components/workday/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/holiday/manifest.json b/homeassistant/components/holiday/manifest.json index ebe472d7f0e..0a714815ae3 100644 --- a/homeassistant/components/holiday/manifest.json +++ b/homeassistant/components/holiday/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/holiday", "iot_class": "local_polling", - "requirements": ["holidays==0.53", "babel==2.15.0"] + "requirements": ["holidays==0.54", "babel==2.15.0"] } diff --git a/homeassistant/components/workday/manifest.json b/homeassistant/components/workday/manifest.json index 69df8080fa5..133c82454bc 100644 --- a/homeassistant/components/workday/manifest.json +++ b/homeassistant/components/workday/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_polling", "loggers": ["holidays"], "quality_scale": "internal", - "requirements": ["holidays==0.53"] + "requirements": ["holidays==0.54"] } diff --git a/requirements_all.txt b/requirements_all.txt index f7b6fb4e245..29cd8401045 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1099,7 +1099,7 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.53 +holidays==0.54 # homeassistant.components.frontend home-assistant-frontend==20240809.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 290fba5d673..f351b43fdb8 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -922,7 +922,7 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.53 +holidays==0.54 # homeassistant.components.frontend home-assistant-frontend==20240809.0 From ce2ffde22ec6bd2c2e6623bcfca0e369a4fbfd27 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Sun, 18 Aug 2024 20:20:58 +0200 Subject: [PATCH 0859/1635] Update sensor recorder tests to async (#124061) --- pylint/plugins/hass_enforce_type_hints.py | 1 + tests/common.py | 3 +- tests/components/recorder/test_init.py | 2 +- .../sensor/test_recorder_missing_stats.py | 73 ++++++++++--------- tests/conftest.py | 13 +++- 5 files changed, 54 insertions(+), 38 deletions(-) diff --git a/pylint/plugins/hass_enforce_type_hints.py b/pylint/plugins/hass_enforce_type_hints.py index 2c58e7aae15..e1812de44d3 100644 --- a/pylint/plugins/hass_enforce_type_hints.py +++ b/pylint/plugins/hass_enforce_type_hints.py @@ -107,6 +107,7 @@ _TEST_FIXTURES: dict[str, list[str] | str] = { "device_registry": "DeviceRegistry", "enable_bluetooth": "None", "enable_custom_integrations": "None", + "enable_missing_statistics": "bool", "enable_nightly_purge": "bool", "enable_statistics": "bool", "enable_schema_validation": "bool", diff --git a/tests/common.py b/tests/common.py index ecf8c4f5418..093d2b5c5e1 100644 --- a/tests/common.py +++ b/tests/common.py @@ -251,6 +251,7 @@ async def async_test_home_assistant( event_loop: asyncio.AbstractEventLoop | None = None, load_registries: bool = True, config_dir: str | None = None, + initial_state: CoreState = CoreState.running, ) -> AsyncGenerator[HomeAssistant]: """Return a Home Assistant object pointing at test config dir.""" hass = HomeAssistant(config_dir or get_test_config_dir()) @@ -378,7 +379,7 @@ async def async_test_home_assistant( await rs.async_load(hass) hass.data[bootstrap.DATA_REGISTRIES_LOADED] = None - hass.set_state(CoreState.running) + hass.set_state(initial_state) @callback def clear_instance(event): diff --git a/tests/components/recorder/test_init.py b/tests/components/recorder/test_init.py index c8e58d58105..74b0adc1b8b 100644 --- a/tests/components/recorder/test_init.py +++ b/tests/components/recorder/test_init.py @@ -1366,7 +1366,7 @@ async def test_statistics_runs_initiated( @pytest.mark.freeze_time("2022-09-13 09:00:00+02:00") @pytest.mark.parametrize("persistent_database", [True]) -@pytest.mark.parametrize("enable_statistics", [True]) +@pytest.mark.parametrize("enable_missing_statistics", [True]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_compile_missing_statistics( async_test_recorder: RecorderInstanceGenerator, freezer: FrozenDateTimeFactory diff --git a/tests/components/sensor/test_recorder_missing_stats.py b/tests/components/sensor/test_recorder_missing_stats.py index d770c459426..43e18b89e72 100644 --- a/tests/components/sensor/test_recorder_missing_stats.py +++ b/tests/components/sensor/test_recorder_missing_stats.py @@ -1,7 +1,6 @@ """The tests for sensor recorder platform can catch up.""" from datetime import datetime, timedelta -from pathlib import Path import threading from unittest.mock import patch @@ -17,11 +16,15 @@ from homeassistant.components.recorder.statistics import ( from homeassistant.components.recorder.util import session_scope from homeassistant.core import CoreState from homeassistant.helpers import recorder as recorder_helper -from homeassistant.setup import setup_component +from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.common import get_test_home_assistant -from tests.components.recorder.common import do_adhoc_statistics, wait_recording_done +from tests.common import async_test_home_assistant +from tests.components.recorder.common import ( + async_wait_recording_done, + do_adhoc_statistics, +) +from tests.typing import RecorderInstanceGenerator POWER_SENSOR_ATTRIBUTES = { "device_class": "energy", @@ -40,37 +43,34 @@ def disable_db_issue_creation(): @pytest.mark.timeout(25) -def test_compile_missing_statistics( - freezer: FrozenDateTimeFactory, recorder_db_url: str, tmp_path: Path +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.parametrize("enable_missing_statistics", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage +async def test_compile_missing_statistics( + async_test_recorder: RecorderInstanceGenerator, freezer: FrozenDateTimeFactory ) -> None: """Test compile missing statistics.""" - if recorder_db_url == "sqlite://": - # On-disk database because we need to stop and start hass - # and have it persist. - recorder_db_url = "sqlite:///" + str(tmp_path / "pytest.db") - config = { - "db_url": recorder_db_url, - } three_days_ago = datetime(2021, 1, 1, 0, 0, 0, tzinfo=dt_util.UTC) start_time = three_days_ago + timedelta(days=3) freezer.move_to(three_days_ago) - with get_test_home_assistant() as hass: - hass.set_state(CoreState.not_running) + async with ( + async_test_home_assistant(initial_state=CoreState.not_running) as hass, + async_test_recorder(hass, wait_recorder=False), + ): recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "sensor", {}) - setup_component(hass, "recorder", {"recorder": config}) + await async_setup_component(hass, "sensor", {}) get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) - hass.start() - wait_recording_done(hass) - wait_recording_done(hass) + await hass.async_start() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) - hass.states.set("sensor.test1", "0", POWER_SENSOR_ATTRIBUTES) - wait_recording_done(hass) + hass.states.async_set("sensor.test1", "0", POWER_SENSOR_ATTRIBUTES) + await async_wait_recording_done(hass) two_days_ago = three_days_ago + timedelta(days=1) freezer.move_to(two_days_ago) do_adhoc_statistics(hass, start=two_days_ago) - wait_recording_done(hass) + await async_wait_recording_done(hass) with session_scope(hass=hass, read_only=True) as session: latest = get_latest_short_term_statistics_with_session( hass, session, {"sensor.test1"}, {"state", "sum"} @@ -82,29 +82,32 @@ def test_compile_missing_statistics( past_time = two_days_ago while past_time <= start_time: freezer.move_to(past_time) - hass.states.set("sensor.test1", str(count), POWER_SENSOR_ATTRIBUTES) + hass.states.async_set("sensor.test1", str(count), POWER_SENSOR_ATTRIBUTES) past_time += timedelta(minutes=5) count += 1 - wait_recording_done(hass) + await async_wait_recording_done(hass) states = get_significant_states( hass, three_days_ago, past_time, ["sensor.test1"] ) assert len(states["sensor.test1"]) == 577 - hass.stop() + await hass.async_stop() + await hass.async_block_till_done() + freezer.move_to(start_time) - with get_test_home_assistant() as hass: - hass.set_state(CoreState.not_running) + async with ( + async_test_home_assistant(initial_state=CoreState.not_running) as hass, + async_test_recorder(hass, wait_recorder=False), + ): recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "sensor", {}) - hass.states.set("sensor.test1", "0", POWER_SENSOR_ATTRIBUTES) - setup_component(hass, "recorder", {"recorder": config}) + await async_setup_component(hass, "sensor", {}) + hass.states.async_set("sensor.test1", "0", POWER_SENSOR_ATTRIBUTES) get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) - hass.start() - wait_recording_done(hass) - wait_recording_done(hass) + await hass.async_start() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) with session_scope(hass=hass, read_only=True) as session: latest = get_latest_short_term_statistics_with_session( hass, session, {"sensor.test1"}, {"state", "sum", "max", "mean", "min"} @@ -128,4 +131,4 @@ def test_compile_missing_statistics( assert len(stats["sensor.test1"]) == 48 # Make sure the last mean is 570.5 assert stats["sensor.test1"][-1]["mean"] == 570.5 - hass.stop() + await hass.async_stop() diff --git a/tests/conftest.py b/tests/conftest.py index ea0453e7450..df183f955cb 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1262,6 +1262,16 @@ def enable_statistics() -> bool: return False +@pytest.fixture +def enable_missing_statistics() -> bool: + """Fixture to control enabling of recorder's statistics compilation. + + To enable statistics, tests can be marked with: + @pytest.mark.parametrize("enable_missing_statistics", [True]) + """ + return False + + @pytest.fixture def enable_schema_validation() -> bool: """Fixture to control enabling of recorder's statistics table validation. @@ -1453,6 +1463,7 @@ async def async_test_recorder( recorder_db_url: str, enable_nightly_purge: bool, enable_statistics: bool, + enable_missing_statistics: bool, enable_schema_validation: bool, enable_migrate_context_ids: bool, enable_migrate_event_type_ids: bool, @@ -1511,7 +1522,7 @@ async def async_test_recorder( ) compile_missing = ( recorder.Recorder._schedule_compile_missing_statistics - if enable_statistics + if enable_missing_statistics else None ) migrate_states_context_ids = ( From 135ebaafa0a588689c0d3ab59ad0406d0b2b9583 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Sun, 18 Aug 2024 20:21:38 +0200 Subject: [PATCH 0860/1635] Add missing hass type hint in august tests (#124062) --- tests/components/august/mocks.py | 48 ++++++++++++++----------- tests/components/august/test_gateway.py | 8 ++--- 2 files changed, 31 insertions(+), 25 deletions(-) diff --git a/tests/components/august/mocks.py b/tests/components/august/mocks.py index a0f5b55a607..c2ab8ce743c 100644 --- a/tests/components/august/mocks.py +++ b/tests/components/august/mocks.py @@ -18,6 +18,7 @@ from yalexs.activity import ( ACTIVITY_ACTIONS_LOCK_OPERATION, SOURCE_LOCK_OPERATE, SOURCE_LOG, + Activity, BridgeOperationActivity, DoorbellDingActivity, DoorbellMotionActivity, @@ -65,8 +66,8 @@ def _timetoken(): @patch("yalexs.manager.gateway.ApiAsync") @patch("yalexs.manager.gateway.AuthenticatorAsync.async_authenticate") async def _mock_setup_august( - hass, api_instance, pubnub_mock, authenticate_mock, api_mock, brand -): + hass: HomeAssistant, api_instance, pubnub_mock, authenticate_mock, api_mock, brand +) -> MockConfigEntry: """Set up august integration.""" authenticate_mock.side_effect = MagicMock( return_value=_mock_august_authentication( @@ -107,13 +108,13 @@ async def _create_august_with_devices( async def _create_august_api_with_devices( - hass, - devices, - api_call_side_effects=None, - activities=None, - pubnub=None, - brand=Brand.AUGUST, -): + hass: HomeAssistant, + devices: Iterable[LockDetail | DoorbellDetail], + api_call_side_effects: dict[str, Any] | None = None, + activities: list[Any] | None = None, + pubnub: AugustPubNub | None = None, + brand: Brand = Brand.AUGUST, +) -> tuple[MockConfigEntry, MagicMock]: if api_call_side_effects is None: api_call_side_effects = {} if pubnub is None: @@ -215,7 +216,10 @@ async def _create_august_api_with_devices( async def _mock_setup_august_with_api_side_effects( - hass, api_call_side_effects, pubnub, brand=Brand.AUGUST + hass: HomeAssistant, + api_call_side_effects: dict[str, Any], + pubnub: AugustPubNub, + brand: Brand = Brand.AUGUST, ): api_instance = MagicMock(name="Api", brand=brand) @@ -335,19 +339,21 @@ def _mock_august_lock_data(lockid="mocklockid1", houseid="mockhouseid1"): } -async def _mock_operative_august_lock_detail(hass): +async def _mock_operative_august_lock_detail(hass: HomeAssistant) -> LockDetail: return await _mock_lock_from_fixture(hass, "get_lock.online.json") -async def _mock_lock_with_offline_key(hass): +async def _mock_lock_with_offline_key(hass: HomeAssistant) -> LockDetail: return await _mock_lock_from_fixture(hass, "get_lock.online_with_keys.json") -async def _mock_inoperative_august_lock_detail(hass): +async def _mock_inoperative_august_lock_detail(hass: HomeAssistant) -> LockDetail: return await _mock_lock_from_fixture(hass, "get_lock.offline.json") -async def _mock_activities_from_fixture(hass, path): +async def _mock_activities_from_fixture( + hass: HomeAssistant, path: str +) -> list[Activity]: json_dict = await _load_json_fixture(hass, path) activities = [] for activity_json in json_dict: @@ -358,32 +364,32 @@ async def _mock_activities_from_fixture(hass, path): return activities -async def _mock_lock_from_fixture(hass, path): +async def _mock_lock_from_fixture(hass: HomeAssistant, path: str) -> LockDetail: json_dict = await _load_json_fixture(hass, path) return LockDetail(json_dict) -async def _mock_doorbell_from_fixture(hass, path): +async def _mock_doorbell_from_fixture(hass: HomeAssistant, path: str) -> DoorbellDetail: json_dict = await _load_json_fixture(hass, path) return DoorbellDetail(json_dict) -async def _load_json_fixture(hass, path): +async def _load_json_fixture(hass: HomeAssistant, path: str) -> Any: fixture = await hass.async_add_executor_job( load_fixture, os.path.join("august", path) ) return json.loads(fixture) -async def _mock_doorsense_enabled_august_lock_detail(hass): +async def _mock_doorsense_enabled_august_lock_detail(hass: HomeAssistant) -> LockDetail: return await _mock_lock_from_fixture(hass, "get_lock.online_with_doorsense.json") -async def _mock_doorsense_missing_august_lock_detail(hass): +async def _mock_doorsense_missing_august_lock_detail(hass: HomeAssistant) -> LockDetail: return await _mock_lock_from_fixture(hass, "get_lock.online_missing_doorsense.json") -async def _mock_lock_with_unlatch(hass): +async def _mock_lock_with_unlatch(hass: HomeAssistant) -> LockDetail: return await _mock_lock_from_fixture(hass, "get_lock.online_with_unlatch.json") @@ -411,7 +417,7 @@ def _mock_door_operation_activity(lock, action, offset): ) -def _activity_from_dict(activity_dict): +def _activity_from_dict(activity_dict: dict[str, Any]) -> Activity | None: action = activity_dict.get("action") activity_dict["dateTime"] = time.time() * 1000 diff --git a/tests/components/august/test_gateway.py b/tests/components/august/test_gateway.py index 74266397ed5..1603aeb3ecb 100644 --- a/tests/components/august/test_gateway.py +++ b/tests/components/august/test_gateway.py @@ -22,14 +22,14 @@ async def test_refresh_access_token(hass: HomeAssistant) -> None: @patch("yalexs.manager.gateway.AuthenticatorAsync.should_refresh") @patch("yalexs.manager.gateway.AuthenticatorAsync.async_refresh_access_token") async def _patched_refresh_access_token( - hass, - new_token, - new_token_expire_time, + hass: HomeAssistant, + new_token: str, + new_token_expire_time: int, refresh_access_token_mock, should_refresh_mock, authenticate_mock, async_get_operable_locks_mock, -): +) -> None: authenticate_mock.side_effect = MagicMock( return_value=_mock_august_authentication( "original_token", 1234, AuthenticationState.AUTHENTICATED From 06d1bbc20f57d3aa7d8fa5f720a8703f0d74e64b Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Sun, 18 Aug 2024 21:14:41 +0200 Subject: [PATCH 0861/1635] Update recorder tests to async (#124161) --- .../recorder/test_statistics_v23_migration.py | 272 +++++++++--------- 1 file changed, 140 insertions(+), 132 deletions(-) diff --git a/tests/components/recorder/test_statistics_v23_migration.py b/tests/components/recorder/test_statistics_v23_migration.py index dfa87fc9391..53c59635e8c 100644 --- a/tests/components/recorder/test_statistics_v23_migration.py +++ b/tests/components/recorder/test_statistics_v23_migration.py @@ -17,18 +17,17 @@ import pytest from homeassistant.components import recorder from homeassistant.components.recorder import get_instance from homeassistant.components.recorder.util import session_scope -from homeassistant.helpers import recorder as recorder_helper -from homeassistant.setup import setup_component import homeassistant.util.dt as dt_util from .common import ( CREATE_ENGINE_TARGET, + async_wait_recording_done, create_engine_test_for_schema_version_postfix, get_schema_module_path, - wait_recording_done, ) -from tests.common import get_test_home_assistant +from tests.common import async_test_home_assistant +from tests.typing import RecorderInstanceGenerator SCHEMA_VERSION_POSTFIX = "23_with_newer_columns" SCHEMA_MODULE = get_schema_module_path(SCHEMA_VERSION_POSTFIX) @@ -37,8 +36,8 @@ SCHEMA_MODULE = get_schema_module_path(SCHEMA_VERSION_POSTFIX) @pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) @pytest.mark.usefixtures("skip_by_db_engine") @pytest.mark.parametrize("persistent_database", [True]) -def test_delete_duplicates( - recorder_db_url: str, caplog: pytest.LogCaptureFixture +async def test_delete_duplicates( + async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture ) -> None: """Test removal of duplicated statistics. @@ -176,42 +175,42 @@ def test_delete_duplicates( schema_version_postfix=SCHEMA_VERSION_POSTFIX, ), ), - get_test_home_assistant() as hass, ): - recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) - get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) - wait_recording_done(hass) - wait_recording_done(hass) + async with async_test_home_assistant() as hass, async_test_recorder(hass): + get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) - with session_scope(hass=hass) as session: - session.add( - recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_1) - ) - session.add( - recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_2) - ) - session.add( - recorder.db_schema.StatisticsMeta.from_meta(external_co2_metadata) - ) - with session_scope(hass=hass) as session: - for stat in external_energy_statistics_1: - session.add(recorder.db_schema.Statistics.from_stats(1, stat)) - for stat in external_energy_statistics_2: - session.add(recorder.db_schema.Statistics.from_stats(2, stat)) - for stat in external_co2_statistics: - session.add(recorder.db_schema.Statistics.from_stats(3, stat)) + with session_scope(hass=hass) as session: + session.add( + recorder.db_schema.StatisticsMeta.from_meta( + external_energy_metadata_1 + ) + ) + session.add( + recorder.db_schema.StatisticsMeta.from_meta( + external_energy_metadata_2 + ) + ) + session.add( + recorder.db_schema.StatisticsMeta.from_meta(external_co2_metadata) + ) + with session_scope(hass=hass) as session: + for stat in external_energy_statistics_1: + session.add(recorder.db_schema.Statistics.from_stats(1, stat)) + for stat in external_energy_statistics_2: + session.add(recorder.db_schema.Statistics.from_stats(2, stat)) + for stat in external_co2_statistics: + session.add(recorder.db_schema.Statistics.from_stats(3, stat)) - hass.stop() + await hass.async_stop() # Test that the duplicates are removed during migration from schema 23 - with get_test_home_assistant() as hass: - recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) - hass.start() - wait_recording_done(hass) - wait_recording_done(hass) - hass.stop() + async with async_test_home_assistant() as hass, async_test_recorder(hass): + await hass.async_start() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + await hass.async_stop() assert "Deleted 2 duplicated statistics rows" in caplog.text assert "Found non identical" not in caplog.text @@ -221,8 +220,8 @@ def test_delete_duplicates( @pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) @pytest.mark.usefixtures("skip_by_db_engine") @pytest.mark.parametrize("persistent_database", [True]) -def test_delete_duplicates_many( - recorder_db_url: str, caplog: pytest.LogCaptureFixture +async def test_delete_duplicates_many( + async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture ) -> None: """Test removal of duplicated statistics. @@ -360,48 +359,48 @@ def test_delete_duplicates_many( schema_version_postfix=SCHEMA_VERSION_POSTFIX, ), ), - get_test_home_assistant() as hass, ): - recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) - get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) - wait_recording_done(hass) - wait_recording_done(hass) + async with async_test_home_assistant() as hass, async_test_recorder(hass): + get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) - with session_scope(hass=hass) as session: - session.add( - recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_1) - ) - session.add( - recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_2) - ) - session.add( - recorder.db_schema.StatisticsMeta.from_meta(external_co2_metadata) - ) - with session_scope(hass=hass) as session: - for stat in external_energy_statistics_1: - session.add(recorder.db_schema.Statistics.from_stats(1, stat)) - for _ in range(3000): + with session_scope(hass=hass) as session: session.add( - recorder.db_schema.Statistics.from_stats( - 1, external_energy_statistics_1[-1] + recorder.db_schema.StatisticsMeta.from_meta( + external_energy_metadata_1 ) ) - for stat in external_energy_statistics_2: - session.add(recorder.db_schema.Statistics.from_stats(2, stat)) - for stat in external_co2_statistics: - session.add(recorder.db_schema.Statistics.from_stats(3, stat)) + session.add( + recorder.db_schema.StatisticsMeta.from_meta( + external_energy_metadata_2 + ) + ) + session.add( + recorder.db_schema.StatisticsMeta.from_meta(external_co2_metadata) + ) + with session_scope(hass=hass) as session: + for stat in external_energy_statistics_1: + session.add(recorder.db_schema.Statistics.from_stats(1, stat)) + for _ in range(3000): + session.add( + recorder.db_schema.Statistics.from_stats( + 1, external_energy_statistics_1[-1] + ) + ) + for stat in external_energy_statistics_2: + session.add(recorder.db_schema.Statistics.from_stats(2, stat)) + for stat in external_co2_statistics: + session.add(recorder.db_schema.Statistics.from_stats(3, stat)) - hass.stop() + await hass.async_stop() # Test that the duplicates are removed during migration from schema 23 - with get_test_home_assistant() as hass: - recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) - hass.start() - wait_recording_done(hass) - wait_recording_done(hass) - hass.stop() + async with async_test_home_assistant() as hass, async_test_recorder(hass): + await hass.async_start() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + await hass.async_stop() assert "Deleted 3002 duplicated statistics rows" in caplog.text assert "Found non identical" not in caplog.text @@ -412,8 +411,10 @@ def test_delete_duplicates_many( @pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) @pytest.mark.usefixtures("skip_by_db_engine") @pytest.mark.parametrize("persistent_database", [True]) -def test_delete_duplicates_non_identical( - recorder_db_url: str, caplog: pytest.LogCaptureFixture, tmp_path: Path +async def test_delete_duplicates_non_identical( + async_test_recorder: RecorderInstanceGenerator, + caplog: pytest.LogCaptureFixture, + tmp_path: Path, ) -> None: """Test removal of duplicated statistics. @@ -521,38 +522,40 @@ def test_delete_duplicates_non_identical( schema_version_postfix=SCHEMA_VERSION_POSTFIX, ), ), - get_test_home_assistant() as hass, ): - recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) - get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) - wait_recording_done(hass) - wait_recording_done(hass) + async with async_test_home_assistant() as hass, async_test_recorder(hass): + get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) - with session_scope(hass=hass) as session: - session.add( - recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_1) - ) - session.add( - recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_2) - ) - with session_scope(hass=hass) as session: - for stat in external_energy_statistics_1: - session.add(recorder.db_schema.Statistics.from_stats(1, stat)) - for stat in external_energy_statistics_2: - session.add(recorder.db_schema.Statistics.from_stats(2, stat)) + with session_scope(hass=hass) as session: + session.add( + recorder.db_schema.StatisticsMeta.from_meta( + external_energy_metadata_1 + ) + ) + session.add( + recorder.db_schema.StatisticsMeta.from_meta( + external_energy_metadata_2 + ) + ) + with session_scope(hass=hass) as session: + for stat in external_energy_statistics_1: + session.add(recorder.db_schema.Statistics.from_stats(1, stat)) + for stat in external_energy_statistics_2: + session.add(recorder.db_schema.Statistics.from_stats(2, stat)) - hass.stop() + await hass.async_stop() # Test that the duplicates are removed during migration from schema 23 - with get_test_home_assistant() as hass: - hass.config.config_dir = tmp_path - recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) - hass.start() - wait_recording_done(hass) - wait_recording_done(hass) - hass.stop() + async with ( + async_test_home_assistant(config_dir=tmp_path) as hass, + async_test_recorder(hass), + ): + await hass.async_start() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + await hass.async_stop() assert "Deleted 2 duplicated statistics rows" in caplog.text assert "Deleted 1 non identical" in caplog.text @@ -561,8 +564,11 @@ def test_delete_duplicates_non_identical( isotime = dt_util.utcnow().isoformat() backup_file_name = f".storage/deleted_statistics.{isotime}.json" - with open(hass.config.path(backup_file_name), encoding="utf8") as backup_file: - backup = json.load(backup_file) + def read_backup(): + with open(hass.config.path(backup_file_name), encoding="utf8") as backup_file: + return json.load(backup_file) + + backup = await hass.async_add_executor_job(read_backup) assert backup == [ { @@ -597,8 +603,10 @@ def test_delete_duplicates_non_identical( @pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) @pytest.mark.usefixtures("skip_by_db_engine") -def test_delete_duplicates_short_term( - recorder_db_url: str, caplog: pytest.LogCaptureFixture, tmp_path: Path +async def test_delete_duplicates_short_term( + async_test_recorder: RecorderInstanceGenerator, + caplog: pytest.LogCaptureFixture, + tmp_path: Path, ) -> None: """Test removal of duplicated statistics. @@ -637,37 +645,37 @@ def test_delete_duplicates_short_term( schema_version_postfix=SCHEMA_VERSION_POSTFIX, ), ), - get_test_home_assistant() as hass, ): - recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) - get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) - wait_recording_done(hass) - wait_recording_done(hass) + async with async_test_home_assistant() as hass, async_test_recorder(hass): + get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) - with session_scope(hass=hass) as session: - session.add( - recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_1) - ) - with session_scope(hass=hass) as session: - session.add( - recorder.db_schema.StatisticsShortTerm.from_stats(1, statistic_row) - ) - session.add( - recorder.db_schema.StatisticsShortTerm.from_stats(1, statistic_row) - ) + with session_scope(hass=hass) as session: + session.add( + recorder.db_schema.StatisticsMeta.from_meta( + external_energy_metadata_1 + ) + ) + with session_scope(hass=hass) as session: + session.add( + recorder.db_schema.StatisticsShortTerm.from_stats(1, statistic_row) + ) + session.add( + recorder.db_schema.StatisticsShortTerm.from_stats(1, statistic_row) + ) - hass.stop() + await hass.async_stop() # Test that the duplicates are removed during migration from schema 23 - with get_test_home_assistant() as hass: - hass.config.config_dir = tmp_path - recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) - hass.start() - wait_recording_done(hass) - wait_recording_done(hass) - hass.stop() + async with ( + async_test_home_assistant(config_dir=tmp_path) as hass, + async_test_recorder(hass), + ): + await hass.async_start() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + await hass.async_stop() assert "duplicated statistics rows" not in caplog.text assert "Found non identical" not in caplog.text From 9e8260736b9a131dc2060b9774ddce6af774a092 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Sun, 18 Aug 2024 21:14:57 +0200 Subject: [PATCH 0862/1635] Fix schema for input_datetime.set_datetime action (#124108) --- homeassistant/components/input_datetime/__init__.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/homeassistant/components/input_datetime/__init__.py b/homeassistant/components/input_datetime/__init__.py index 5d2c1e7ff8d..dcc2865acad 100644 --- a/homeassistant/components/input_datetime/__init__.py +++ b/homeassistant/components/input_datetime/__init__.py @@ -176,14 +176,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: component.async_register_entity_service( "set_datetime", vol.All( - vol.Schema( + cv.make_entity_service_schema( { vol.Optional(ATTR_DATE): cv.date, vol.Optional(ATTR_TIME): cv.time, vol.Optional(ATTR_DATETIME): cv.datetime, vol.Optional(ATTR_TIMESTAMP): vol.Coerce(float), }, - extra=vol.ALLOW_EXTRA, ), cv.has_at_least_one_key( ATTR_DATE, ATTR_TIME, ATTR_DATETIME, ATTR_TIMESTAMP From 1010edf4bd6a7f99322920754644e4552945028a Mon Sep 17 00:00:00 2001 From: ilan <31193909+iloveicedgreentea@users.noreply.github.com> Date: Sun, 18 Aug 2024 15:17:10 -0400 Subject: [PATCH 0863/1635] Add reconfigure flow to Madvr (#122477) * feat: add reconfigure * feat: add reconfigure step * fix: don't abort unique on reconfigure * fix: add success string * fix: improve reconfigure * fix: entry will never be none * fix: update ip in abort * fix: check unique id on reconfigure * feat: add test in case of new device * fix: fail reconfigure if mac changes * fix: abort instead of form * feat: use is, dont mock config flow * fix: implement comments --- homeassistant/components/madvr/config_flow.py | 145 +++++++++++------- homeassistant/components/madvr/strings.json | 20 ++- tests/components/madvr/const.py | 1 + tests/components/madvr/test_config_flow.py | 121 ++++++++++++++- 4 files changed, 227 insertions(+), 60 deletions(-) diff --git a/homeassistant/components/madvr/config_flow.py b/homeassistant/components/madvr/config_flow.py index cf43e03a68b..1ca1dd296d8 100644 --- a/homeassistant/components/madvr/config_flow.py +++ b/homeassistant/components/madvr/config_flow.py @@ -8,8 +8,9 @@ import aiohttp from madvr.madvr import HeartBeatError, Madvr import voluptuous as vol -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PORT +from homeassistant.core import HomeAssistant from .const import DEFAULT_NAME, DEFAULT_PORT, DOMAIN from .errors import CannotConnect @@ -18,13 +19,8 @@ _LOGGER = logging.getLogger(__name__) STEP_USER_DATA_SCHEMA = vol.Schema( { - vol.Required( - CONF_HOST, - ): str, - vol.Required( - CONF_PORT, - default=DEFAULT_PORT, - ): int, + vol.Required(CONF_HOST): str, + vol.Required(CONF_PORT, default=DEFAULT_PORT): int, } ) @@ -36,81 +32,120 @@ class MadVRConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 + entry: ConfigEntry | None = None + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" + return await self._handle_config_step(user_input) + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reconfiguration of the device.""" + self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) + return await self.async_step_reconfigure_confirm(user_input) + + async def async_step_reconfigure_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a reconfiguration flow initialized by the user.""" + return await self._handle_config_step(user_input, step_id="reconfigure") + + async def _handle_config_step( + self, user_input: dict[str, Any] | None = None, step_id: str = "user" + ) -> ConfigFlowResult: + """Handle the configuration step for both initial setup and reconfiguration.""" errors: dict[str, str] = {} if user_input is not None: + _LOGGER.debug("User input: %s", user_input) host = user_input[CONF_HOST] port = user_input[CONF_PORT] try: - # ensure we can connect and get the mac address from device - mac = await self._test_connection(host, port) + mac = await test_connection(self.hass, host, port) except CannotConnect: _LOGGER.error("CannotConnect error caught") errors["base"] = "cannot_connect" else: if not mac: errors["base"] = "no_mac" - if not errors: - _LOGGER.debug("MAC address found: %s", mac) - # this will prevent the user from adding the same device twice and persist the mac address - await self.async_set_unique_id(mac) - self._abort_if_unique_id_configured() + else: + _LOGGER.debug("MAC address found: %s", mac) + # abort if the detected mac differs from the one in the entry + if self.entry: + existing_mac = self.entry.unique_id + if existing_mac != mac: + _LOGGER.debug( + "MAC address changed from %s to %s", existing_mac, mac + ) + # abort + return self.async_abort(reason="set_up_new_device") - # create the entry - return self.async_create_entry( - title=DEFAULT_NAME, - data=user_input, - ) + _LOGGER.debug("Reconfiguration done") + return self.async_update_reload_and_abort( + entry=self.entry, + data={**user_input, CONF_HOST: host, CONF_PORT: port}, + reason="reconfigure_successful", + ) + # abort if already configured with same mac + await self.async_set_unique_id(mac) + self._abort_if_unique_id_configured(updates={CONF_HOST: host}) - # this will show the form or allow the user to retry if there was an error + _LOGGER.debug("Configuration successful") + return self.async_create_entry( + title=DEFAULT_NAME, + data=user_input, + ) + _LOGGER.debug("Showing form with errors: %s", errors) return self.async_show_form( - step_id="user", + step_id=step_id, data_schema=self.add_suggested_values_to_schema( STEP_USER_DATA_SCHEMA, user_input ), errors=errors, ) - async def _test_connection(self, host: str, port: int) -> str: - """Test if we can connect to the device and grab the mac.""" - madvr_client = Madvr(host=host, port=port, loop=self.hass.loop) - _LOGGER.debug("Testing connection to madVR at %s:%s", host, port) - # try to connect - try: - await asyncio.wait_for(madvr_client.open_connection(), timeout=15) - # connection can raise HeartBeatError if the device is not available or connection does not work - except (TimeoutError, aiohttp.ClientError, OSError, HeartBeatError) as err: - _LOGGER.error("Error connecting to madVR: %s", err) - raise CannotConnect from err - # check if we are connected - if not madvr_client.connected: - raise CannotConnect("Connection failed") +async def test_connection(hass: HomeAssistant, host: str, port: int) -> str: + """Test if we can connect to the device and grab the mac.""" + madvr_client = Madvr(host=host, port=port, loop=hass.loop) + _LOGGER.debug("Testing connection to madVR at %s:%s", host, port) + # try to connect + try: + await asyncio.wait_for(madvr_client.open_connection(), timeout=15) + # connection can raise HeartBeatError if the device is not available or connection does not work + except (TimeoutError, aiohttp.ClientError, OSError, HeartBeatError) as err: + _LOGGER.error("Error connecting to madVR: %s", err) + raise CannotConnect from err - # background tasks needed to capture realtime info - await madvr_client.async_add_tasks() + # check if we are connected + if not madvr_client.connected: + raise CannotConnect("Connection failed") - # wait for client to capture device info - retry_time = 15 - while not madvr_client.mac_address and retry_time > 0: - await asyncio.sleep(RETRY_INTERVAL) - retry_time -= 1 + # background tasks needed to capture realtime info + await madvr_client.async_add_tasks() - mac_address = madvr_client.mac_address - if mac_address: - _LOGGER.debug("Connected to madVR with MAC: %s", mac_address) - # close this connection because this client object will not be reused - await self._close_test_connection(madvr_client) - _LOGGER.debug("Connection test successful") - return mac_address + # wait for client to capture device info + retry_time = 15 + while not madvr_client.mac_address and retry_time > 0: + await asyncio.sleep(RETRY_INTERVAL) + retry_time -= 1 - async def _close_test_connection(self, madvr_client: Madvr) -> None: - """Close the test connection.""" - madvr_client.stop() - await madvr_client.async_cancel_tasks() - await madvr_client.close_connection() + mac_address = madvr_client.mac_address + if mac_address: + _LOGGER.debug("Connected to madVR with MAC: %s", mac_address) + # close this connection because this client object will not be reused + await close_test_connection(madvr_client) + _LOGGER.debug("Connection test successful") + return mac_address + + +async def close_test_connection(madvr_client: Madvr) -> None: + """Close the test connection.""" + _LOGGER.debug("Closing test connection") + madvr_client.stop() + await madvr_client.async_cancel_tasks() + await madvr_client.close_connection() diff --git a/homeassistant/components/madvr/strings.json b/homeassistant/components/madvr/strings.json index 3e8e786f775..b8d30be23aa 100644 --- a/homeassistant/components/madvr/strings.json +++ b/homeassistant/components/madvr/strings.json @@ -3,7 +3,19 @@ "step": { "user": { "title": "Setup madVR Envy", - "description": "Your device needs to be turned in order to add the integation. ", + "description": "Your device needs to be on in order to add the integation. ", + "data": { + "host": "[%key:common::config_flow::data::host%]", + "port": "[%key:common::config_flow::data::port%]" + }, + "data_description": { + "host": "The hostname or IP address of your madVR Envy device.", + "port": "The port your madVR Envy is listening on. In 99% of cases, leave this as the default." + } + }, + "reconfigure": { + "title": "Reconfigure madVR Envy", + "description": "Your device needs to be on in order to reconfigure the integation. ", "data": { "host": "[%key:common::config_flow::data::host%]", "port": "[%key:common::config_flow::data::port%]" @@ -15,11 +27,13 @@ } }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "no_mac": "A MAC address was not found. It required to identify the device. Please ensure your device is connectable." + "no_mac": "A MAC address was not found. It required to identify the device. Please ensure your device is connectable.", + "set_up_new_device": "A new device was detected. Please set it up as a new entity instead of reconfiguring." } }, "entity": { diff --git a/tests/components/madvr/const.py b/tests/components/madvr/const.py index 8c5e122377b..e1c5435fcbb 100644 --- a/tests/components/madvr/const.py +++ b/tests/components/madvr/const.py @@ -8,6 +8,7 @@ MOCK_CONFIG = { } MOCK_MAC = "00:11:22:33:44:55" +MOCK_MAC_NEW = "00:00:00:00:00:01" TEST_CON_ERROR = ConnectionError("Connection failed") TEST_IMP_ERROR = NotImplementedError("Not implemented") diff --git a/tests/components/madvr/test_config_flow.py b/tests/components/madvr/test_config_flow.py index 6dc84fd6b00..65eba05c802 100644 --- a/tests/components/madvr/test_config_flow.py +++ b/tests/components/madvr/test_config_flow.py @@ -6,12 +6,12 @@ from unittest.mock import AsyncMock, patch import pytest from homeassistant.components.madvr.const import DEFAULT_NAME, DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_RECONFIGURE, SOURCE_USER from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from .const import MOCK_CONFIG, MOCK_MAC +from .const import MOCK_CONFIG, MOCK_MAC, MOCK_MAC_NEW from tests.common import MockConfigEntry @@ -126,3 +126,120 @@ async def test_duplicate( ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" + + +async def test_reconfigure_flow( + hass: HomeAssistant, + mock_madvr_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reconfigure flow.""" + mock_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config_entry.entry_id}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + assert result["errors"] == {} + + # define new host + new_host = "192.168.1.100" + # make sure setting port works + new_port = 44078 + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: new_host, CONF_PORT: new_port}, + ) + + # should get the abort with success result + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + # Verify that the config entry was updated + assert mock_config_entry.data[CONF_HOST] == new_host + assert mock_config_entry.data[CONF_PORT] == new_port + + # Verify that the connection was tested + mock_madvr_client.open_connection.assert_called() + mock_madvr_client.async_add_tasks.assert_called() + mock_madvr_client.async_cancel_tasks.assert_called() + + +async def test_reconfigure_new_device( + hass: HomeAssistant, + mock_madvr_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reconfigure flow.""" + mock_config_entry.add_to_hass(hass) + # test reconfigure with a new device (should fail) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config_entry.entry_id}, + ) + + # define new host + new_host = "192.168.1.100" + # make sure setting port works + new_port = 44078 + + # modify test_connection so it returns new_mac + mock_madvr_client.mac_address = MOCK_MAC_NEW + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: new_host, CONF_PORT: new_port}, + ) + + # unique id should remain unchanged with new device, should fail + assert mock_config_entry.unique_id == MOCK_MAC + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "set_up_new_device" + + +async def test_reconfigure_flow_errors( + hass: HomeAssistant, + mock_madvr_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test error handling in reconfigure flow.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config_entry.entry_id}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + # Test CannotConnect error + mock_madvr_client.open_connection.side_effect = TimeoutError + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.1.100", CONF_PORT: 44077}, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + + # Test no_mac error + mock_madvr_client.open_connection.side_effect = None + mock_madvr_client.connected = True + mock_madvr_client.mac_address = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.1.100", CONF_PORT: 44077}, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "no_mac"} + + # Ensure errors are recoverable + mock_madvr_client.mac_address = MOCK_MAC + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.1.100", CONF_PORT: 44077}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" From 14c2ca85ec3a8ce8779c26a7af2bb3a6aa3d3735 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 18 Aug 2024 14:17:17 -0500 Subject: [PATCH 0864/1635] Refactor websocket handler to reduce complexity (#124174) --- .../components/websocket_api/http.py | 348 ++++++++++-------- 1 file changed, 188 insertions(+), 160 deletions(-) diff --git a/homeassistant/components/websocket_api/http.py b/homeassistant/components/websocket_api/http.py index 8ed3469d7ed..e33da9a8b4a 100644 --- a/homeassistant/components/websocket_api/http.py +++ b/homeassistant/components/websocket_api/http.py @@ -11,6 +11,7 @@ import logging from typing import TYPE_CHECKING, Any, Final from aiohttp import WSMsgType, web +from aiohttp.http_websocket import WebSocketWriter from homeassistant.components.http import KEY_HASS, HomeAssistantView from homeassistant.const import EVENT_HOMEASSISTANT_STOP @@ -124,7 +125,9 @@ class WebSocketHandler: return "finished connection" async def _writer( - self, send_bytes_text: Callable[[bytes], Coroutine[Any, Any, None]] + self, + connection: ActiveConnection, + send_bytes_text: Callable[[bytes], Coroutine[Any, Any, None]], ) -> None: """Write outgoing messages.""" # Variables are set locally to avoid lookups in the loop @@ -134,7 +137,7 @@ class WebSocketHandler: loop = self._loop is_debug_log_enabled = partial(logger.isEnabledFor, logging.DEBUG) debug = logger.debug - can_coalesce = self._connection and self._connection.can_coalesce + can_coalesce = connection.can_coalesce ready_message_count = len(message_queue) # Exceptions if Socket disconnected or cancelled by connection handler try: @@ -148,7 +151,7 @@ class WebSocketHandler: if not can_coalesce: # coalesce may be enabled later in the connection - can_coalesce = self._connection and self._connection.can_coalesce + can_coalesce = connection.can_coalesce if not can_coalesce or ready_message_count == 1: message = message_queue.popleft() @@ -298,19 +301,16 @@ class WebSocketHandler: request = self._request wsock = self._wsock logger = self._logger - debug = logger.debug hass = self._hass - is_enabled_for = logger.isEnabledFor - logging_debug = logging.DEBUG try: async with asyncio.timeout(10): await wsock.prepare(request) except TimeoutError: - self._logger.warning("Timeout preparing request from %s", request.remote) + logger.warning("Timeout preparing request from %s", request.remote) return wsock - debug("%s: Connected from %s", self.description, request.remote) + logger.debug("%s: Connected from %s", self.description, request.remote) self._handle_task = asyncio.current_task() unsub_stop = hass.bus.async_listen( @@ -325,134 +325,25 @@ class WebSocketHandler: auth = AuthPhase( logger, hass, self._send_message, self._cancel, request, send_bytes_text ) - connection = None - disconnect_warn = None + connection: ActiveConnection | None = None + disconnect_warn: str | None = None try: - await send_bytes_text(AUTH_REQUIRED_MESSAGE) - - # Auth Phase - try: - msg = await wsock.receive(10) - except TimeoutError as err: - disconnect_warn = "Did not receive auth message within 10 seconds" - raise Disconnect from err - - if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSED, WSMsgType.CLOSING): - raise Disconnect # noqa: TRY301 - - if msg.type != WSMsgType.TEXT: - disconnect_warn = "Received non-Text message." - raise Disconnect # noqa: TRY301 - - try: - auth_msg_data = json_loads(msg.data) - except ValueError as err: - disconnect_warn = "Received invalid JSON." - raise Disconnect from err - - if is_enabled_for(logging_debug): - debug("%s: Received %s", self.description, auth_msg_data) - connection = await auth.async_handle(auth_msg_data) - # As the webserver is now started before the start - # event we do not want to block for websocket responses - # - # We only start the writer queue after the auth phase is completed - # since there is no need to queue messages before the auth phase - self._connection = connection - self._writer_task = create_eager_task(self._writer(send_bytes_text)) - hass.data[DATA_CONNECTIONS] = hass.data.get(DATA_CONNECTIONS, 0) + 1 - async_dispatcher_send(hass, SIGNAL_WEBSOCKET_CONNECTED) - - self._authenticated = True - # - # - # Our websocket implementation is backed by a deque - # - # As back-pressure builds, the queue will back up and use more memory - # until we disconnect the client when the queue size reaches - # MAX_PENDING_MSG. When we are generating a high volume of websocket messages, - # we hit a bottleneck in aiohttp where it will wait for - # the buffer to drain before sending the next message and messages - # start backing up in the queue. - # - # https://github.com/aio-libs/aiohttp/issues/1367 added drains - # to the websocket writer to handle malicious clients and network issues. - # The drain causes multiple problems for us since the buffer cannot be - # drained fast enough when we deliver a high volume or large messages: - # - # - We end up disconnecting the client. The client will then reconnect, - # and the cycle repeats itself, which results in a significant amount of - # CPU usage. - # - # - Messages latency increases because messages cannot be moved into - # the TCP buffer because it is blocked waiting for the drain to happen because - # of the low default limit of 16KiB. By increasing the limit, we instead - # rely on the underlying TCP buffer and stack to deliver the messages which - # can typically happen much faster. - # - # After the auth phase is completed, and we are not concerned about - # the user being a malicious client, we set the limit to force a drain - # to 1MiB. 1MiB is the maximum expected size of the serialized entity - # registry, which is the largest message we usually send. - # - # https://github.com/aio-libs/aiohttp/commit/b3c80ee3f7d5d8f0b8bc27afe52e4d46621eaf99 - # added a way to set the limit, but there is no way to actually - # reach the code to set the limit, so we have to set it directly. - # - writer._limit = 2**20 # noqa: SLF001 - async_handle_str = connection.async_handle - async_handle_binary = connection.async_handle_binary - - # Command phase - while not wsock.closed: - msg = await wsock.receive() - - if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSED, WSMsgType.CLOSING): - break - - if msg.type is WSMsgType.BINARY: - if len(msg.data) < 1: - disconnect_warn = "Received invalid binary message." - break - handler = msg.data[0] - payload = msg.data[1:] - async_handle_binary(handler, payload) - continue - - if msg.type is not WSMsgType.TEXT: - disconnect_warn = "Received non-Text message." - break - - try: - command_msg_data = json_loads(msg.data) - except ValueError: - disconnect_warn = "Received invalid JSON." - break - - if is_enabled_for(logging_debug): - debug("%s: Received %s", self.description, command_msg_data) - - # command_msg_data is always deserialized from JSON as a list - if type(command_msg_data) is not list: # noqa: E721 - async_handle_str(command_msg_data) - continue - - for split_msg in command_msg_data: - async_handle_str(split_msg) - + connection = await self._async_handle_auth_phase(auth, send_bytes_text) + self._async_increase_writer_limit(writer) + await self._async_websocket_command_phase(connection, send_bytes_text) except asyncio.CancelledError: - debug("%s: Connection cancelled", self.description) + logger.debug("%s: Connection cancelled", self.description) raise - except Disconnect as ex: - debug("%s: Connection closed by client: %s", self.description, ex) + if disconnect_msg := str(ex): + disconnect_warn = disconnect_msg + logger.debug("%s: Connection closed by client: %s", self.description, ex) except Exception: - self._logger.exception( + logger.exception( "%s: Unexpected error inside websocket API", self.description ) - finally: unsub_stop() @@ -465,38 +356,175 @@ class WebSocketHandler: if self._ready_future and not self._ready_future.done(): self._ready_future.set_result(len(self._message_queue)) - # If the writer gets canceled we still need to close the websocket - # so we have another finally block to make sure we close the websocket - # if the writer gets canceled. - try: - if self._writer_task: - await self._writer_task - finally: - try: - # Make sure all error messages are written before closing - await wsock.close() - finally: - if disconnect_warn is None: - debug("%s: Disconnected", self.description) - else: - self._logger.warning( - "%s: Disconnected: %s", self.description, disconnect_warn - ) - - if connection is not None: - hass.data[DATA_CONNECTIONS] -= 1 - self._connection = None - - async_dispatcher_send(hass, SIGNAL_WEBSOCKET_DISCONNECTED) - - # Break reference cycles to make sure GC can happen sooner - self._wsock = None # type: ignore[assignment] - self._request = None # type: ignore[assignment] - self._hass = None # type: ignore[assignment] - self._logger = None # type: ignore[assignment] - self._message_queue = None # type: ignore[assignment] - self._handle_task = None - self._writer_task = None - self._ready_future = None + await self._async_cleanup_writer_and_close(disconnect_warn, connection) return wsock + + async def _async_handle_auth_phase( + self, + auth: AuthPhase, + send_bytes_text: Callable[[bytes], Coroutine[Any, Any, None]], + ) -> ActiveConnection: + """Handle the auth phase of the websocket connection.""" + await send_bytes_text(AUTH_REQUIRED_MESSAGE) + + # Auth Phase + try: + msg = await self._wsock.receive(10) + except TimeoutError as err: + raise Disconnect("Did not receive auth message within 10 seconds") from err + + if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSED, WSMsgType.CLOSING): + raise Disconnect("Received close message during auth phase") + + if msg.type is not WSMsgType.TEXT: + raise Disconnect("Received non-Text message during auth phase") + + try: + auth_msg_data = json_loads(msg.data) + except ValueError as err: + raise Disconnect("Received invalid JSON during auth phase") from err + + if self._logger.isEnabledFor(logging.DEBUG): + self._logger.debug("%s: Received %s", self.description, auth_msg_data) + connection = await auth.async_handle(auth_msg_data) + # As the webserver is now started before the start + # event we do not want to block for websocket responses + # + # We only start the writer queue after the auth phase is completed + # since there is no need to queue messages before the auth phase + self._connection = connection + self._writer_task = create_eager_task(self._writer(connection, send_bytes_text)) + self._hass.data[DATA_CONNECTIONS] = self._hass.data.get(DATA_CONNECTIONS, 0) + 1 + async_dispatcher_send(self._hass, SIGNAL_WEBSOCKET_CONNECTED) + + self._authenticated = True + return connection + + @callback + def _async_increase_writer_limit(self, writer: WebSocketWriter) -> None: + # + # + # Our websocket implementation is backed by a deque + # + # As back-pressure builds, the queue will back up and use more memory + # until we disconnect the client when the queue size reaches + # MAX_PENDING_MSG. When we are generating a high volume of websocket messages, + # we hit a bottleneck in aiohttp where it will wait for + # the buffer to drain before sending the next message and messages + # start backing up in the queue. + # + # https://github.com/aio-libs/aiohttp/issues/1367 added drains + # to the websocket writer to handle malicious clients and network issues. + # The drain causes multiple problems for us since the buffer cannot be + # drained fast enough when we deliver a high volume or large messages: + # + # - We end up disconnecting the client. The client will then reconnect, + # and the cycle repeats itself, which results in a significant amount of + # CPU usage. + # + # - Messages latency increases because messages cannot be moved into + # the TCP buffer because it is blocked waiting for the drain to happen because + # of the low default limit of 16KiB. By increasing the limit, we instead + # rely on the underlying TCP buffer and stack to deliver the messages which + # can typically happen much faster. + # + # After the auth phase is completed, and we are not concerned about + # the user being a malicious client, we set the limit to force a drain + # to 1MiB. 1MiB is the maximum expected size of the serialized entity + # registry, which is the largest message we usually send. + # + # https://github.com/aio-libs/aiohttp/commit/b3c80ee3f7d5d8f0b8bc27afe52e4d46621eaf99 + # added a way to set the limit, but there is no way to actually + # reach the code to set the limit, so we have to set it directly. + # + writer._limit = 2**20 # noqa: SLF001 + + async def _async_websocket_command_phase( + self, + connection: ActiveConnection, + send_bytes_text: Callable[[bytes], Coroutine[Any, Any, None]], + ) -> None: + """Handle the command phase of the websocket connection.""" + wsock = self._wsock + async_handle_str = connection.async_handle + async_handle_binary = connection.async_handle_binary + _debug_enabled = partial(self._logger.isEnabledFor, logging.DEBUG) + + # Command phase + while not wsock.closed: + msg = await wsock.receive() + + if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSED, WSMsgType.CLOSING): + break + + if msg.type is WSMsgType.BINARY: + if len(msg.data) < 1: + raise Disconnect("Received invalid binary message.") + + handler = msg.data[0] + payload = msg.data[1:] + async_handle_binary(handler, payload) + continue + + if msg.type is not WSMsgType.TEXT: + raise Disconnect("Received non-Text message.") + + try: + command_msg_data = json_loads(msg.data) + except ValueError as ex: + raise Disconnect("Received invalid JSON.") from ex + + if _debug_enabled(): + self._logger.debug( + "%s: Received %s", self.description, command_msg_data + ) + + # command_msg_data is always deserialized from JSON as a list + if type(command_msg_data) is not list: # noqa: E721 + async_handle_str(command_msg_data) + continue + + for split_msg in command_msg_data: + async_handle_str(split_msg) + + async def _async_cleanup_writer_and_close( + self, disconnect_warn: str | None, connection: ActiveConnection | None + ) -> None: + """Cleanup the writer and close the websocket.""" + # If the writer gets canceled we still need to close the websocket + # so we have another finally block to make sure we close the websocket + # if the writer gets canceled. + wsock = self._wsock + hass = self._hass + logger = self._logger + try: + if self._writer_task: + await self._writer_task + finally: + try: + # Make sure all error messages are written before closing + await wsock.close() + finally: + if disconnect_warn is None: + logger.debug("%s: Disconnected", self.description) + else: + logger.warning( + "%s: Disconnected: %s", self.description, disconnect_warn + ) + + if connection is not None: + hass.data[DATA_CONNECTIONS] -= 1 + self._connection = None + + async_dispatcher_send(hass, SIGNAL_WEBSOCKET_DISCONNECTED) + + # Break reference cycles to make sure GC can happen sooner + self._wsock = None # type: ignore[assignment] + self._request = None # type: ignore[assignment] + self._hass = None # type: ignore[assignment] + self._logger = None # type: ignore[assignment] + self._message_queue = None # type: ignore[assignment] + self._handle_task = None + self._writer_task = None + self._ready_future = None From 11d2258afc3fbbced7b1e70c33325f85c3e63954 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Sun, 18 Aug 2024 21:31:44 +0200 Subject: [PATCH 0865/1635] Improve code quality in Manual alarm (#123142) * Improve code quality in Manual alarm * Review * Remove helper * Remove unique id * Reset demo and fix unique id * next_state variable * Fixes * Is helper * Fix unique id * exception message * Fix mypy --- .strict-typing | 1 + .../components/demo/alarm_control_panel.py | 2 +- .../components/manual/alarm_control_panel.py | 192 +++++++++--------- homeassistant/components/manual/strings.json | 7 + mypy.ini | 10 + .../manual/test_alarm_control_panel.py | 16 +- 6 files changed, 128 insertions(+), 100 deletions(-) create mode 100644 homeassistant/components/manual/strings.json diff --git a/.strict-typing b/.strict-typing index c8f28c84f8a..07aed7b4ca1 100644 --- a/.strict-typing +++ b/.strict-typing @@ -295,6 +295,7 @@ homeassistant.components.lookin.* homeassistant.components.luftdaten.* homeassistant.components.madvr.* homeassistant.components.mailbox.* +homeassistant.components.manual.* homeassistant.components.map.* homeassistant.components.mastodon.* homeassistant.components.matrix.* diff --git a/homeassistant/components/demo/alarm_control_panel.py b/homeassistant/components/demo/alarm_control_panel.py index d1b558842b6..f9b791668e8 100644 --- a/homeassistant/components/demo/alarm_control_panel.py +++ b/homeassistant/components/demo/alarm_control_panel.py @@ -30,7 +30,7 @@ async def async_setup_entry( """Set up the Demo config entry.""" async_add_entities( [ - ManualAlarm( # type:ignore[no-untyped-call] + ManualAlarm( hass, "Security", "demo_alarm_control_panel", diff --git a/homeassistant/components/manual/alarm_control_panel.py b/homeassistant/components/manual/alarm_control_panel.py index 055e79867ab..c1910d0dfa1 100644 --- a/homeassistant/components/manual/alarm_control_panel.py +++ b/homeassistant/components/manual/alarm_control_panel.py @@ -3,12 +3,12 @@ from __future__ import annotations import datetime -import logging from typing import Any import voluptuous as vol from homeassistant.components.alarm_control_panel import ( + PLATFORM_SCHEMA as ALARM_CONTROL_PANEL_PLATFORM_SCHEMA, AlarmControlPanelEntity, AlarmControlPanelEntityFeature, CodeFormat, @@ -19,7 +19,6 @@ from homeassistant.const import ( CONF_DELAY_TIME, CONF_DISARM_AFTER_TRIGGER, CONF_NAME, - CONF_PLATFORM, CONF_TRIGGER_TIME, CONF_UNIQUE_ID, STATE_ALARM_ARMED_AWAY, @@ -33,15 +32,16 @@ from homeassistant.const import ( STATE_ALARM_TRIGGERED, ) from homeassistant.core import HomeAssistant, callback -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import ServiceValidationError import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.event import async_track_point_in_time from homeassistant.helpers.restore_state import RestoreEntity +from homeassistant.helpers.template import Template from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType import homeassistant.util.dt as dt_util -_LOGGER = logging.getLogger(__name__) +DOMAIN = "manual" CONF_ARMING_STATES = "arming_states" CONF_CODE_TEMPLATE = "code_template" @@ -85,7 +85,7 @@ ATTR_PREVIOUS_STATE = "previous_state" ATTR_NEXT_STATE = "next_state" -def _state_validator(config): +def _state_validator(config: dict[str, Any]) -> dict[str, Any]: """Validate the state.""" for state in SUPPORTED_PRETRIGGER_STATES: if CONF_DELAY_TIME not in config[state]: @@ -101,7 +101,7 @@ def _state_validator(config): return config -def _state_schema(state): +def _state_schema(state: str) -> vol.Schema: """Validate the state.""" schema = {} if state in SUPPORTED_PRETRIGGER_STATES: @@ -120,63 +120,64 @@ def _state_schema(state): PLATFORM_SCHEMA = vol.Schema( vol.All( - { - vol.Required(CONF_PLATFORM): "manual", - vol.Optional(CONF_NAME, default=DEFAULT_ALARM_NAME): cv.string, - vol.Optional(CONF_UNIQUE_ID): cv.string, - vol.Exclusive(CONF_CODE, "code validation"): cv.string, - vol.Exclusive(CONF_CODE_TEMPLATE, "code validation"): cv.template, - vol.Optional(CONF_CODE_ARM_REQUIRED, default=True): cv.boolean, - vol.Optional(CONF_DELAY_TIME, default=DEFAULT_DELAY_TIME): vol.All( - cv.time_period, cv.positive_timedelta - ), - vol.Optional(CONF_ARMING_TIME, default=DEFAULT_ARMING_TIME): vol.All( - cv.time_period, cv.positive_timedelta - ), - vol.Optional(CONF_TRIGGER_TIME, default=DEFAULT_TRIGGER_TIME): vol.All( - cv.time_period, cv.positive_timedelta - ), - vol.Optional( - CONF_DISARM_AFTER_TRIGGER, default=DEFAULT_DISARM_AFTER_TRIGGER - ): cv.boolean, - vol.Optional(CONF_ARMING_STATES, default=SUPPORTED_ARMING_STATES): vol.All( - cv.ensure_list, [vol.In(SUPPORTED_ARMING_STATES)] - ), - vol.Optional(STATE_ALARM_ARMED_AWAY, default={}): _state_schema( - STATE_ALARM_ARMED_AWAY - ), - vol.Optional(STATE_ALARM_ARMED_HOME, default={}): _state_schema( - STATE_ALARM_ARMED_HOME - ), - vol.Optional(STATE_ALARM_ARMED_NIGHT, default={}): _state_schema( - STATE_ALARM_ARMED_NIGHT - ), - vol.Optional(STATE_ALARM_ARMED_VACATION, default={}): _state_schema( - STATE_ALARM_ARMED_VACATION - ), - vol.Optional(STATE_ALARM_ARMED_CUSTOM_BYPASS, default={}): _state_schema( - STATE_ALARM_ARMED_CUSTOM_BYPASS - ), - vol.Optional(STATE_ALARM_DISARMED, default={}): _state_schema( - STATE_ALARM_DISARMED - ), - vol.Optional(STATE_ALARM_TRIGGERED, default={}): _state_schema( - STATE_ALARM_TRIGGERED - ), - }, + ALARM_CONTROL_PANEL_PLATFORM_SCHEMA.extend( + { + vol.Optional(CONF_NAME, default=DEFAULT_ALARM_NAME): cv.string, + vol.Optional(CONF_UNIQUE_ID): cv.string, + vol.Exclusive(CONF_CODE, "code validation"): cv.string, + vol.Exclusive(CONF_CODE_TEMPLATE, "code validation"): cv.template, + vol.Optional(CONF_CODE_ARM_REQUIRED, default=True): cv.boolean, + vol.Optional(CONF_DELAY_TIME, default=DEFAULT_DELAY_TIME): vol.All( + cv.time_period, cv.positive_timedelta + ), + vol.Optional(CONF_ARMING_TIME, default=DEFAULT_ARMING_TIME): vol.All( + cv.time_period, cv.positive_timedelta + ), + vol.Optional(CONF_TRIGGER_TIME, default=DEFAULT_TRIGGER_TIME): vol.All( + cv.time_period, cv.positive_timedelta + ), + vol.Optional( + CONF_DISARM_AFTER_TRIGGER, default=DEFAULT_DISARM_AFTER_TRIGGER + ): cv.boolean, + vol.Optional( + CONF_ARMING_STATES, default=SUPPORTED_ARMING_STATES + ): vol.All(cv.ensure_list, [vol.In(SUPPORTED_ARMING_STATES)]), + vol.Optional(STATE_ALARM_ARMED_AWAY, default={}): _state_schema( + STATE_ALARM_ARMED_AWAY + ), + vol.Optional(STATE_ALARM_ARMED_HOME, default={}): _state_schema( + STATE_ALARM_ARMED_HOME + ), + vol.Optional(STATE_ALARM_ARMED_NIGHT, default={}): _state_schema( + STATE_ALARM_ARMED_NIGHT + ), + vol.Optional(STATE_ALARM_ARMED_VACATION, default={}): _state_schema( + STATE_ALARM_ARMED_VACATION + ), + vol.Optional( + STATE_ALARM_ARMED_CUSTOM_BYPASS, default={} + ): _state_schema(STATE_ALARM_ARMED_CUSTOM_BYPASS), + vol.Optional(STATE_ALARM_DISARMED, default={}): _state_schema( + STATE_ALARM_DISARMED + ), + vol.Optional(STATE_ALARM_TRIGGERED, default={}): _state_schema( + STATE_ALARM_TRIGGERED + ), + }, + ), _state_validator, ) ) -def setup_platform( +async def async_setup_platform( hass: HomeAssistant, config: ConfigType, - add_entities: AddEntitiesCallback, + async_add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the manual alarm platform.""" - add_entities( + async_add_entities( [ ManualAlarm( hass, @@ -184,8 +185,8 @@ def setup_platform( config.get(CONF_UNIQUE_ID), config.get(CONF_CODE), config.get(CONF_CODE_TEMPLATE), - config.get(CONF_CODE_ARM_REQUIRED), - config.get(CONF_DISARM_AFTER_TRIGGER, DEFAULT_DISARM_AFTER_TRIGGER), + config[CONF_CODE_ARM_REQUIRED], + config[CONF_DISARM_AFTER_TRIGGER], config, ) ] @@ -206,28 +207,25 @@ class ManualAlarm(AlarmControlPanelEntity, RestoreEntity): def __init__( self, - hass, - name, - unique_id, - code, - code_template, - code_arm_required, - disarm_after_trigger, - config, - ): + hass: HomeAssistant, + name: str, + unique_id: str | None, + code: str | None, + code_template: Template | None, + code_arm_required: bool, + disarm_after_trigger: bool, + config: dict[str, Any], + ) -> None: """Init the manual alarm panel.""" self._state = STATE_ALARM_DISARMED self._hass = hass self._attr_name = name self._attr_unique_id = unique_id - if code_template: - self._code = code_template - else: - self._code = code or None + self._code = code_template or code or None self._attr_code_arm_required = code_arm_required self._disarm_after_trigger = disarm_after_trigger self._previous_state = self._state - self._state_ts = None + self._state_ts: datetime.datetime = dt_util.utcnow() self._delay_time_by_state = { state: config[state][CONF_DELAY_TIME] @@ -253,7 +251,9 @@ class ManualAlarm(AlarmControlPanelEntity, RestoreEntity): if self._state == STATE_ALARM_TRIGGERED: if self._within_pending_time(self._state): return STATE_ALARM_PENDING - trigger_time = self._trigger_time_by_state[self._previous_state] + trigger_time: datetime.timedelta = self._trigger_time_by_state[ + self._previous_state + ] if ( self._state_ts + self._pending_time(self._state) + trigger_time ) < dt_util.utcnow(): @@ -270,25 +270,27 @@ class ManualAlarm(AlarmControlPanelEntity, RestoreEntity): return self._state @property - def _active_state(self): + def _active_state(self) -> str: """Get the current state.""" if self.state in (STATE_ALARM_PENDING, STATE_ALARM_ARMING): return self._previous_state return self._state - def _arming_time(self, state): + def _arming_time(self, state: str) -> datetime.timedelta: """Get the arming time.""" - return self._arming_time_by_state[state] + arming_time: datetime.timedelta = self._arming_time_by_state[state] + return arming_time - def _pending_time(self, state): + def _pending_time(self, state: str) -> datetime.timedelta: """Get the pending time.""" - return self._delay_time_by_state[self._previous_state] + delay_time: datetime.timedelta = self._delay_time_by_state[self._previous_state] + return delay_time - def _within_arming_time(self, state): + def _within_arming_time(self, state: str) -> bool: """Get if the action is in the arming time window.""" return self._state_ts + self._arming_time(state) > dt_util.utcnow() - def _within_pending_time(self, state): + def _within_pending_time(self, state: str) -> bool: """Get if the action is in the pending time window.""" return self._state_ts + self._pending_time(state) > dt_util.utcnow() @@ -377,7 +379,7 @@ class ManualAlarm(AlarmControlPanelEntity, RestoreEntity): self._state_ts + arming_time, ) - def _async_validate_code(self, code, state): + def _async_validate_code(self, code: str | None, state: str) -> None: """Validate given code.""" if ( state != STATE_ALARM_DISARMED and not self.code_arm_required @@ -394,24 +396,28 @@ class ManualAlarm(AlarmControlPanelEntity, RestoreEntity): if not alarm_code or code == alarm_code: return - raise HomeAssistantError("Invalid alarm code provided") + raise ServiceValidationError( + "Invalid alarm code provided", + translation_domain=DOMAIN, + translation_key="invalid_code", + ) @property def extra_state_attributes(self) -> dict[str, Any]: """Return the state attributes.""" if self.state in (STATE_ALARM_PENDING, STATE_ALARM_ARMING): - return { - ATTR_PREVIOUS_STATE: self._previous_state, - ATTR_NEXT_STATE: self._state, - } - if self.state == STATE_ALARM_TRIGGERED: - return { - ATTR_PREVIOUS_STATE: self._previous_state, - } - return {} + prev_state: str | None = self._previous_state + state: str | None = self._state + elif self.state == STATE_ALARM_TRIGGERED: + prev_state = self._previous_state + state = None + else: + prev_state = None + state = None + return {ATTR_PREVIOUS_STATE: prev_state, ATTR_NEXT_STATE: state} @callback - def async_scheduled_update(self, now): + def async_scheduled_update(self, now: datetime.datetime) -> None: """Update state at a scheduled point in time.""" self.async_write_ha_state() @@ -420,13 +426,13 @@ class ManualAlarm(AlarmControlPanelEntity, RestoreEntity): await super().async_added_to_hass() if state := await self.async_get_last_state(): self._state_ts = state.last_updated - if hasattr(state, "attributes") and ATTR_NEXT_STATE in state.attributes: + if next_state := state.attributes.get(ATTR_NEXT_STATE): # If in arming or pending state we record the transition, # not the current state - self._state = state.attributes[ATTR_NEXT_STATE] + self._state = next_state else: self._state = state.state - if hasattr(state, "attributes") and ATTR_PREVIOUS_STATE in state.attributes: - self._previous_state = state.attributes[ATTR_PREVIOUS_STATE] + if prev_state := state.attributes.get(ATTR_PREVIOUS_STATE): + self._previous_state = prev_state self._async_set_state_update_events() diff --git a/homeassistant/components/manual/strings.json b/homeassistant/components/manual/strings.json new file mode 100644 index 00000000000..f26a1570d05 --- /dev/null +++ b/homeassistant/components/manual/strings.json @@ -0,0 +1,7 @@ +{ + "exceptions": { + "invalid_code": { + "message": "Invalid alarm code provided" + } + } +} diff --git a/mypy.ini b/mypy.ini index ca10d05af86..2a361f56397 100644 --- a/mypy.ini +++ b/mypy.ini @@ -2706,6 +2706,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.manual.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.map.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/tests/components/manual/test_alarm_control_panel.py b/tests/components/manual/test_alarm_control_panel.py index 6c9ba9ee9a0..7900dfd1c91 100644 --- a/tests/components/manual/test_alarm_control_panel.py +++ b/tests/components/manual/test_alarm_control_panel.py @@ -9,6 +9,10 @@ import pytest from homeassistant.components import alarm_control_panel from homeassistant.components.alarm_control_panel import AlarmControlPanelEntityFeature from homeassistant.components.demo import alarm_control_panel as demo +from homeassistant.components.manual.alarm_control_panel import ( + ATTR_NEXT_STATE, + ATTR_PREVIOUS_STATE, +) from homeassistant.const import ( ATTR_CODE, ATTR_ENTITY_ID, @@ -28,7 +32,7 @@ from homeassistant.const import ( STATE_ALARM_TRIGGERED, ) from homeassistant.core import CoreState, HomeAssistant, State -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import ServiceValidationError from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util @@ -227,7 +231,7 @@ async def test_with_invalid_code(hass: HomeAssistant, service, expected_state) - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED - with pytest.raises(HomeAssistantError, match=r"^Invalid alarm code provided$"): + with pytest.raises(ServiceValidationError, match=r"^Invalid alarm code provided$"): await hass.services.async_call( alarm_control_panel.DOMAIN, service, @@ -1089,7 +1093,7 @@ async def test_disarm_during_trigger_with_invalid_code(hass: HomeAssistant) -> N assert hass.states.get(entity_id).state == STATE_ALARM_PENDING - with pytest.raises(HomeAssistantError, match=r"^Invalid alarm code provided$"): + with pytest.raises(ServiceValidationError, match=r"^Invalid alarm code provided$"): await common.async_alarm_disarm(hass, entity_id=entity_id) assert hass.states.get(entity_id).state == STATE_ALARM_PENDING @@ -1133,7 +1137,7 @@ async def test_disarm_with_template_code(hass: HomeAssistant) -> None: state = hass.states.get(entity_id) assert state.state == STATE_ALARM_ARMED_HOME - with pytest.raises(HomeAssistantError, match=r"^Invalid alarm code provided$"): + with pytest.raises(ServiceValidationError, match=r"^Invalid alarm code provided$"): await common.async_alarm_disarm(hass, "def") state = hass.states.get(entity_id) @@ -1411,8 +1415,8 @@ async def test_restore_state_triggered(hass: HomeAssistant, previous_state) -> N state = hass.states.get(entity_id) assert state - assert state.attributes["previous_state"] == previous_state - assert "next_state" not in state.attributes + assert state.attributes[ATTR_PREVIOUS_STATE] == previous_state + assert state.attributes[ATTR_NEXT_STATE] is None assert state.state == STATE_ALARM_TRIGGERED future = time + timedelta(seconds=121) From 02b26ac4e69b93249a1dffaf51d0fb5148b9d37a Mon Sep 17 00:00:00 2001 From: Dylan Corrales Date: Sun, 18 Aug 2024 16:11:37 -0400 Subject: [PATCH 0866/1635] Add state for alarm_control_panel in Prometheus (#123753) * Prometheus: Add state for alarm_control_panel * Prometheus: Add pytest for alarm_control_panel --- .../components/prometheus/__init__.py | 39 +++++++++++ tests/components/prometheus/test_init.py | 70 +++++++++++++++++++ 2 files changed, 109 insertions(+) diff --git a/homeassistant/components/prometheus/__init__.py b/homeassistant/components/prometheus/__init__.py index f7037a685a6..8cc0a8f4b6a 100644 --- a/homeassistant/components/prometheus/__init__.py +++ b/homeassistant/components/prometheus/__init__.py @@ -51,6 +51,16 @@ from homeassistant.const import ( CONTENT_TYPE_TEXT_PLAIN, EVENT_STATE_CHANGED, PERCENTAGE, + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_ARMED_CUSTOM_BYPASS, + STATE_ALARM_ARMED_HOME, + STATE_ALARM_ARMED_NIGHT, + STATE_ALARM_ARMED_VACATION, + STATE_ALARM_ARMING, + STATE_ALARM_DISARMED, + STATE_ALARM_DISARMING, + STATE_ALARM_PENDING, + STATE_ALARM_TRIGGERED, STATE_CLOSED, STATE_CLOSING, STATE_ON, @@ -807,6 +817,35 @@ class PrometheusMetrics: value = self.state_as_number(state) metric.labels(**self._labels(state)).set(value) + def _handle_alarm_control_panel(self, state: State) -> None: + current_state = state.state + + if current_state: + metric = self._metric( + "alarm_control_panel_state", + prometheus_client.Gauge, + "State of the alarm control panel (0/1)", + ["state"], + ) + + alarm_states = [ + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_ARMED_CUSTOM_BYPASS, + STATE_ALARM_ARMED_HOME, + STATE_ALARM_ARMED_NIGHT, + STATE_ALARM_ARMED_VACATION, + STATE_ALARM_DISARMED, + STATE_ALARM_TRIGGERED, + STATE_ALARM_PENDING, + STATE_ALARM_ARMING, + STATE_ALARM_DISARMING, + ] + + for alarm_state in alarm_states: + metric.labels(**dict(self._labels(state), state=alarm_state)).set( + float(alarm_state == current_state) + ) + class PrometheusView(HomeAssistantView): """Handle Prometheus requests.""" diff --git a/tests/components/prometheus/test_init.py b/tests/components/prometheus/test_init.py index 12643c39dfa..0dfa3210671 100644 --- a/tests/components/prometheus/test_init.py +++ b/tests/components/prometheus/test_init.py @@ -11,6 +11,7 @@ import prometheus_client import pytest from homeassistant.components import ( + alarm_control_panel, binary_sensor, climate, counter, @@ -61,6 +62,8 @@ from homeassistant.const import ( CONTENT_TYPE_TEXT_PLAIN, DEGREE, PERCENTAGE, + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_ARMED_HOME, STATE_CLOSED, STATE_CLOSING, STATE_HOME, @@ -632,6 +635,43 @@ async def test_fan( ) +@pytest.mark.parametrize("namespace", [""]) +async def test_alarm_control_panel( + client: ClientSessionGenerator, + alarm_control_panel_entities: dict[str, er.RegistryEntry], +) -> None: + """Test prometheus metrics for alarm control panel.""" + body = await generate_latest_metrics(client) + + assert ( + 'alarm_control_panel_state{domain="alarm_control_panel",' + 'entity="alarm_control_panel.alarm_control_panel_1",' + 'friendly_name="Alarm Control Panel 1",' + 'state="armed_away"} 1.0' in body + ) + + assert ( + 'alarm_control_panel_state{domain="alarm_control_panel",' + 'entity="alarm_control_panel.alarm_control_panel_1",' + 'friendly_name="Alarm Control Panel 1",' + 'state="disarmed"} 0.0' in body + ) + + assert ( + 'alarm_control_panel_state{domain="alarm_control_panel",' + 'entity="alarm_control_panel.alarm_control_panel_2",' + 'friendly_name="Alarm Control Panel 2",' + 'state="armed_home"} 1.0' in body + ) + + assert ( + 'alarm_control_panel_state{domain="alarm_control_panel",' + 'entity="alarm_control_panel.alarm_control_panel_2",' + 'friendly_name="Alarm Control Panel 2",' + 'state="armed_away"} 0.0' in body + ) + + @pytest.mark.parametrize("namespace", [""]) async def test_cover( client: ClientSessionGenerator, cover_entities: dict[str, er.RegistryEntry] @@ -1903,6 +1943,36 @@ async def fan_fixture( return data +@pytest.fixture(name="alarm_control_panel_entities") +async def alarm_control_panel_fixture( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> dict[str, er.RegistryEntry]: + """Simulate alarm control panel entities.""" + data = {} + alarm_control_panel_1 = entity_registry.async_get_or_create( + domain=alarm_control_panel.DOMAIN, + platform="test", + unique_id="alarm_control_panel_1", + suggested_object_id="alarm_control_panel_1", + original_name="Alarm Control Panel 1", + ) + set_state_with_entry(hass, alarm_control_panel_1, STATE_ALARM_ARMED_AWAY) + data["alarm_control_panel_1"] = alarm_control_panel_1 + + alarm_control_panel_2 = entity_registry.async_get_or_create( + domain=alarm_control_panel.DOMAIN, + platform="test", + unique_id="alarm_control_panel_2", + suggested_object_id="alarm_control_panel_2", + original_name="Alarm Control Panel 2", + ) + set_state_with_entry(hass, alarm_control_panel_2, STATE_ALARM_ARMED_HOME) + data["alarm_control_panel_2"] = alarm_control_panel_2 + + await hass.async_block_till_done() + return data + + @pytest.fixture(name="person_entities") async def person_fixture( hass: HomeAssistant, entity_registry: er.EntityRegistry From 50f9c1e5a4433717cc84beb836d12def4039e90a Mon Sep 17 00:00:00 2001 From: G Johansson Date: Sun, 18 Aug 2024 22:41:31 +0200 Subject: [PATCH 0867/1635] Fix Pure AQI value sensor in Sensibo (#124151) * fix Pure AQI value sensor in Sensibo * Fix tests * Make enum --- homeassistant/components/sensibo/manifest.json | 2 +- homeassistant/components/sensibo/sensor.py | 10 +++++----- homeassistant/components/sensibo/strings.json | 8 ++++++++ requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- .../sensibo/snapshots/test_diagnostics.ambr | 7 +++++-- tests/components/sensibo/snapshots/test_sensor.ambr | 11 +++++++---- tests/components/sensibo/test_sensor.py | 12 ++++++------ 8 files changed, 34 insertions(+), 20 deletions(-) diff --git a/homeassistant/components/sensibo/manifest.json b/homeassistant/components/sensibo/manifest.json index 5a195a8a4cc..610695aaf7b 100644 --- a/homeassistant/components/sensibo/manifest.json +++ b/homeassistant/components/sensibo/manifest.json @@ -15,5 +15,5 @@ "iot_class": "cloud_polling", "loggers": ["pysensibo"], "quality_scale": "platinum", - "requirements": ["pysensibo==1.0.36"] + "requirements": ["pysensibo==1.1.0"] } diff --git a/homeassistant/components/sensibo/sensor.py b/homeassistant/components/sensibo/sensor.py index 16adfd5afe3..a6a70ea6c49 100644 --- a/homeassistant/components/sensibo/sensor.py +++ b/homeassistant/components/sensibo/sensor.py @@ -7,7 +7,7 @@ from dataclasses import dataclass from datetime import datetime from typing import TYPE_CHECKING, Any -from pysensibo.model import MotionSensor, SensiboDevice +from pysensibo.model import MotionSensor, PureAQI, SensiboDevice from homeassistant.components.sensor import ( SensorDeviceClass, @@ -97,11 +97,11 @@ MOTION_SENSOR_TYPES: tuple[SensiboMotionSensorEntityDescription, ...] = ( PURE_SENSOR_TYPES: tuple[SensiboDeviceSensorEntityDescription, ...] = ( SensiboDeviceSensorEntityDescription( key="pm25", - device_class=SensorDeviceClass.PM25, - native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, - state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda data: data.pm25, + translation_key="pm25_pure", + device_class=SensorDeviceClass.ENUM, + value_fn=lambda data: data.pm25_pure.name.lower() if data.pm25_pure else None, extra_fn=None, + options=[aqi.name.lower() for aqi in PureAQI], ), SensiboDeviceSensorEntityDescription( key="pure_sensitivity", diff --git a/homeassistant/components/sensibo/strings.json b/homeassistant/components/sensibo/strings.json index d93c2a54adb..60a32028017 100644 --- a/homeassistant/components/sensibo/strings.json +++ b/homeassistant/components/sensibo/strings.json @@ -110,6 +110,14 @@ "s": "Sensitive" } }, + "pm25_pure": { + "name": "Pure AQI", + "state": { + "good": "Good", + "moderate": "Moderate", + "bad": "Bad" + } + }, "timer_time": { "name": "Timer end time" }, diff --git a/requirements_all.txt b/requirements_all.txt index 29cd8401045..e0b7d069b18 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2169,7 +2169,7 @@ pysaj==0.0.16 pyschlage==2024.8.0 # homeassistant.components.sensibo -pysensibo==1.0.36 +pysensibo==1.1.0 # homeassistant.components.serial pyserial-asyncio-fast==0.13 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f351b43fdb8..137cdf14b86 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1732,7 +1732,7 @@ pysabnzbd==1.1.1 pyschlage==2024.8.0 # homeassistant.components.sensibo -pysensibo==1.0.36 +pysensibo==1.1.0 # homeassistant.components.acer_projector # homeassistant.components.crownstone diff --git a/tests/components/sensibo/snapshots/test_diagnostics.ambr b/tests/components/sensibo/snapshots/test_diagnostics.ambr index a33209f7c88..cc77318239e 100644 --- a/tests/components/sensibo/snapshots/test_diagnostics.ambr +++ b/tests/components/sensibo/snapshots/test_diagnostics.ambr @@ -91,7 +91,8 @@ 'motion_sensors': dict({ }), 'name': 'Kitchen', - 'pm25': 1, + 'pm25': None, + 'pm25_pure': 1, 'pure_ac_integration': False, 'pure_boost_enabled': False, 'pure_conf': dict({ @@ -424,6 +425,7 @@ }), 'name': 'Hallway', 'pm25': None, + 'pm25_pure': None, 'pure_ac_integration': None, 'pure_boost_enabled': None, 'pure_conf': dict({ @@ -550,7 +552,8 @@ 'motion_sensors': dict({ }), 'name': 'Bedroom', - 'pm25': 1, + 'pm25': None, + 'pm25_pure': 1, 'pure_ac_integration': False, 'pure_boost_enabled': False, 'pure_conf': dict({ diff --git a/tests/components/sensibo/snapshots/test_sensor.ambr b/tests/components/sensibo/snapshots/test_sensor.ambr index d645bdbd383..cd8d510b6cc 100644 --- a/tests/components/sensibo/snapshots/test_sensor.ambr +++ b/tests/components/sensibo/snapshots/test_sensor.ambr @@ -1,10 +1,13 @@ # serializer version: 1 # name: test_sensor ReadOnlyDict({ - 'device_class': 'pm25', - 'friendly_name': 'Kitchen PM2.5', - 'state_class': , - 'unit_of_measurement': 'µg/m³', + 'device_class': 'enum', + 'friendly_name': 'Kitchen Pure AQI', + 'options': list([ + 'good', + 'moderate', + 'bad', + ]), }) # --- # name: test_sensor.1 diff --git a/tests/components/sensibo/test_sensor.py b/tests/components/sensibo/test_sensor.py index 3c6fb584a6e..5fc761f178a 100644 --- a/tests/components/sensibo/test_sensor.py +++ b/tests/components/sensibo/test_sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations from datetime import timedelta from unittest.mock import patch -from pysensibo.model import SensiboData +from pysensibo.model import PureAQI, SensiboData import pytest from syrupy.assertion import SnapshotAssertion @@ -27,17 +27,17 @@ async def test_sensor( """Test the Sensibo sensor.""" state1 = hass.states.get("sensor.hallway_motion_sensor_battery_voltage") - state2 = hass.states.get("sensor.kitchen_pm2_5") + state2 = hass.states.get("sensor.kitchen_pure_aqi") state3 = hass.states.get("sensor.kitchen_pure_sensitivity") state4 = hass.states.get("sensor.hallway_climate_react_low_temperature_threshold") assert state1.state == "3000" - assert state2.state == "1" + assert state2.state == "good" assert state3.state == "n" assert state4.state == "0.0" assert state2.attributes == snapshot assert state4.attributes == snapshot - monkeypatch.setattr(get_data.parsed["AAZZAAZZ"], "pm25", 2) + monkeypatch.setattr(get_data.parsed["AAZZAAZZ"], "pm25_pure", PureAQI(2)) with patch( "homeassistant.components.sensibo.coordinator.SensiboClient.async_get_devices_data", @@ -49,5 +49,5 @@ async def test_sensor( ) await hass.async_block_till_done() - state1 = hass.states.get("sensor.kitchen_pm2_5") - assert state1.state == "2" + state1 = hass.states.get("sensor.kitchen_pure_aqi") + assert state1.state == "moderate" From 05aeb3fbd1ed4c0788e4b4bab48332c3eb2cfeec Mon Sep 17 00:00:00 2001 From: G Johansson Date: Sun, 18 Aug 2024 22:41:45 +0200 Subject: [PATCH 0868/1635] Bump yalesmartalarmclient to 0.4.0 (#124165) * Bump yalesmartalarmclient to 0.4.0 * Fix various --- .../yale_smart_alarm/coordinator.py | 15 ++---- .../yale_smart_alarm/diagnostics.py | 3 +- .../components/yale_smart_alarm/lock.py | 11 ++-- .../components/yale_smart_alarm/manifest.json | 2 +- .../components/yale_smart_alarm/strings.json | 3 ++ requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/yale_smart_alarm/conftest.py | 53 +++++++++++++++++-- .../snapshots/test_diagnostics.ambr | 16 +++--- .../yale_smart_alarm/test_coordinator.py | 39 +++++++------- .../components/yale_smart_alarm/test_lock.py | 32 ++++++----- .../yale_smart_alarm/test_sensor.py | 5 +- 12 files changed, 121 insertions(+), 62 deletions(-) diff --git a/homeassistant/components/yale_smart_alarm/coordinator.py b/homeassistant/components/yale_smart_alarm/coordinator.py index 328558d0aba..1067b9279a4 100644 --- a/homeassistant/components/yale_smart_alarm/coordinator.py +++ b/homeassistant/components/yale_smart_alarm/coordinator.py @@ -146,12 +146,7 @@ class YaleDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): """Fetch data from Yale.""" try: arm_status = self.yale.get_armed_status() - data = self.yale.get_all() - cycle = data["CYCLE"] - status = data["STATUS"] - online = data["ONLINE"] - panel_info = data["PANEL INFO"] - + data = self.yale.get_information() except AuthenticationError as error: raise ConfigEntryAuthFailed from error except YALE_BASE_ERRORS as error: @@ -159,8 +154,8 @@ class YaleDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): return { "arm_status": arm_status, - "cycle": cycle, - "status": status, - "online": online, - "panel_info": panel_info, + "cycle": data.cycle, + "status": data.status, + "online": data.online, + "panel_info": data.panel_info, } diff --git a/homeassistant/components/yale_smart_alarm/diagnostics.py b/homeassistant/components/yale_smart_alarm/diagnostics.py index 82d2ca9a915..eb7b2be9fb4 100644 --- a/homeassistant/components/yale_smart_alarm/diagnostics.py +++ b/homeassistant/components/yale_smart_alarm/diagnostics.py @@ -2,6 +2,7 @@ from __future__ import annotations +from dataclasses import asdict from typing import Any from homeassistant.components.diagnostics import async_redact_data @@ -29,4 +30,4 @@ async def async_get_config_entry_diagnostics( assert coordinator.yale get_all_data = await hass.async_add_executor_job(coordinator.yale.get_all) - return async_redact_data(get_all_data, TO_REDACT) + return async_redact_data(asdict(get_all_data), TO_REDACT) diff --git a/homeassistant/components/yale_smart_alarm/lock.py b/homeassistant/components/yale_smart_alarm/lock.py index 3b4d0a19039..386e546afbf 100644 --- a/homeassistant/components/yale_smart_alarm/lock.py +++ b/homeassistant/components/yale_smart_alarm/lock.py @@ -7,7 +7,7 @@ from typing import TYPE_CHECKING, Any from homeassistant.components.lock import LockEntity from homeassistant.const import ATTR_CODE from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import YaleConfigEntry @@ -61,17 +61,22 @@ class YaleDoorlock(YaleEntity, LockEntity): """Set lock.""" if TYPE_CHECKING: assert self.coordinator.yale, "Connection to API is missing" + if command == "unlocked" and not code: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="no_code", + ) try: get_lock = await self.hass.async_add_executor_job( self.coordinator.yale.lock_api.get, self.lock_name ) - if command == "locked": + if get_lock and command == "locked": lock_state = await self.hass.async_add_executor_job( self.coordinator.yale.lock_api.close_lock, get_lock, ) - if command == "unlocked": + if code and get_lock and command == "unlocked": lock_state = await self.hass.async_add_executor_job( self.coordinator.yale.lock_api.open_lock, get_lock, code ) diff --git a/homeassistant/components/yale_smart_alarm/manifest.json b/homeassistant/components/yale_smart_alarm/manifest.json index ed494505bae..92dd774d1d9 100644 --- a/homeassistant/components/yale_smart_alarm/manifest.json +++ b/homeassistant/components/yale_smart_alarm/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/yale_smart_alarm", "iot_class": "cloud_polling", "loggers": ["yalesmartalarmclient"], - "requirements": ["yalesmartalarmclient==0.3.9"] + "requirements": ["yalesmartalarmclient==0.4.0"] } diff --git a/homeassistant/components/yale_smart_alarm/strings.json b/homeassistant/components/yale_smart_alarm/strings.json index ce89c9e69ea..63260c03e7f 100644 --- a/homeassistant/components/yale_smart_alarm/strings.json +++ b/homeassistant/components/yale_smart_alarm/strings.json @@ -67,6 +67,9 @@ "set_lock": { "message": "Could not set lock for {name}: {error}" }, + "no_code": { + "message": "Can not unlock without code" + }, "could_not_change_lock": { "message": "Could not set lock, check system ready for lock" }, diff --git a/requirements_all.txt b/requirements_all.txt index e0b7d069b18..9d3825d4f4e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2955,7 +2955,7 @@ xmltodict==0.13.0 xs1-api-client==3.0.0 # homeassistant.components.yale_smart_alarm -yalesmartalarmclient==0.3.9 +yalesmartalarmclient==0.4.0 # homeassistant.components.august # homeassistant.components.yalexs_ble diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 137cdf14b86..828cafcf37b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2335,7 +2335,7 @@ xknxproject==3.7.1 xmltodict==0.13.0 # homeassistant.components.yale_smart_alarm -yalesmartalarmclient==0.3.9 +yalesmartalarmclient==0.4.0 # homeassistant.components.august # homeassistant.components.yalexs_ble diff --git a/tests/components/yale_smart_alarm/conftest.py b/tests/components/yale_smart_alarm/conftest.py index 9583df5faa6..6ac6dfc6871 100644 --- a/tests/components/yale_smart_alarm/conftest.py +++ b/tests/components/yale_smart_alarm/conftest.py @@ -7,6 +7,7 @@ from typing import Any from unittest.mock import Mock, patch import pytest +from yalesmartalarmclient import YaleSmartAlarmData from yalesmartalarmclient.const import YALE_STATE_ARM_FULL from homeassistant.components.yale_smart_alarm.const import DOMAIN, PLATFORMS @@ -33,7 +34,10 @@ async def patch_platform_constant() -> list[Platform]: @pytest.fixture async def load_config_entry( - hass: HomeAssistant, load_json: dict[str, Any], load_platforms: list[Platform] + hass: HomeAssistant, + get_data: YaleSmartAlarmData, + get_all_data: YaleSmartAlarmData, + load_platforms: list[Platform], ) -> tuple[MockConfigEntry, Mock]: """Set up the Yale Smart Living integration in Home Assistant.""" with patch("homeassistant.components.yale_smart_alarm.PLATFORMS", load_platforms): @@ -56,7 +60,8 @@ async def load_config_entry( client = mock_client_class.return_value client.auth = Mock() client.lock_api = Mock() - client.get_all.return_value = load_json + client.get_all.return_value = get_all_data + client.get_information.return_value = get_data client.get_armed_status.return_value = YALE_STATE_ARM_FULL await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() @@ -64,10 +69,50 @@ async def load_config_entry( return (config_entry, client) -@pytest.fixture(name="load_json", scope="package") -def load_json_from_fixture() -> dict[str, Any]: +@pytest.fixture(name="loaded_fixture", scope="package") +def get_fixture_data() -> dict[str, Any]: """Load fixture with json data and return.""" data_fixture = load_fixture("get_all.json", "yale_smart_alarm") json_data: dict[str, Any] = json.loads(data_fixture) return json_data + + +@pytest.fixture(name="get_data", scope="package") +def get_update_data(loaded_fixture: dict[str, Any]) -> YaleSmartAlarmData: + """Load update data and return.""" + + status = loaded_fixture["STATUS"] + cycle = loaded_fixture["CYCLE"] + online = loaded_fixture["ONLINE"] + panel_info = loaded_fixture["PANEL INFO"] + return YaleSmartAlarmData( + status=status, + cycle=cycle, + online=online, + panel_info=panel_info, + ) + + +@pytest.fixture(name="get_all_data", scope="package") +def get_diag_data(loaded_fixture: dict[str, Any]) -> YaleSmartAlarmData: + """Load all data and return.""" + + devices = loaded_fixture["DEVICES"] + mode = loaded_fixture["MODE"] + status = loaded_fixture["STATUS"] + cycle = loaded_fixture["CYCLE"] + online = loaded_fixture["ONLINE"] + history = loaded_fixture["HISTORY"] + panel_info = loaded_fixture["PANEL INFO"] + auth_check = loaded_fixture["AUTH CHECK"] + return YaleSmartAlarmData( + devices=devices, + mode=mode, + status=status, + cycle=cycle, + online=online, + history=history, + panel_info=panel_info, + auth_check=auth_check, + ) diff --git a/tests/components/yale_smart_alarm/snapshots/test_diagnostics.ambr b/tests/components/yale_smart_alarm/snapshots/test_diagnostics.ambr index a5dfe4b50dd..d4bbd42aaeb 100644 --- a/tests/components/yale_smart_alarm/snapshots/test_diagnostics.ambr +++ b/tests/components/yale_smart_alarm/snapshots/test_diagnostics.ambr @@ -1,7 +1,7 @@ # serializer version: 1 # name: test_diagnostics dict({ - 'AUTH CHECK': dict({ + 'auth_check': dict({ 'agent': False, 'dealer_group': 'yale', 'dealer_id': '605', @@ -16,7 +16,7 @@ 'user_id': '**REDACTED**', 'xml_version': '2', }), - 'CYCLE': dict({ + 'cycle': dict({ 'alarm_event_latest': None, 'capture_latest': None, 'device_status': list([ @@ -650,7 +650,7 @@ 'utc_event_time': None, }), }), - 'DEVICES': list([ + 'devices': list([ dict({ 'address': '**REDACTED**', 'area': '1', @@ -1249,7 +1249,7 @@ 'type_no': '40', }), ]), - 'HISTORY': list([ + 'history': list([ dict({ 'area': 1, 'cid': '18180701000', @@ -1391,14 +1391,14 @@ 'zone': 1, }), ]), - 'MODE': list([ + 'mode': list([ dict({ 'area': '1', 'mode': 'disarm', }), ]), - 'ONLINE': 'online', - 'PANEL INFO': dict({ + 'online': 'online', + 'panel_info': dict({ 'SMS_Balance': '50', 'contact': '', 'dealer_name': 'Poland', @@ -1416,7 +1416,7 @@ 'zb_version': '4.1.2.6.2', 'zw_version': '', }), - 'STATUS': dict({ + 'status': dict({ 'acfail': 'main.normal', 'battery': 'main.normal', 'gsm_rssi': '0', diff --git a/tests/components/yale_smart_alarm/test_coordinator.py b/tests/components/yale_smart_alarm/test_coordinator.py index 6f1125fcf65..41362f2318a 100644 --- a/tests/components/yale_smart_alarm/test_coordinator.py +++ b/tests/components/yale_smart_alarm/test_coordinator.py @@ -3,12 +3,15 @@ from __future__ import annotations from datetime import timedelta -from typing import Any from unittest.mock import Mock, patch import pytest -from yalesmartalarmclient.const import YALE_STATE_ARM_FULL -from yalesmartalarmclient.exceptions import AuthenticationError, UnknownError +from yalesmartalarmclient import ( + YALE_STATE_ARM_FULL, + AuthenticationError, + UnknownError, + YaleSmartAlarmData, +) from homeassistant.components.yale_smart_alarm.const import DOMAIN from homeassistant.config_entries import SOURCE_USER @@ -32,7 +35,7 @@ from tests.common import MockConfigEntry, async_fire_time_changed ) async def test_coordinator_setup_errors( hass: HomeAssistant, - load_json: dict[str, Any], + get_data: YaleSmartAlarmData, p_error: Exception, ) -> None: """Test the Yale Smart Living coordinator with errors.""" @@ -64,7 +67,7 @@ async def test_coordinator_setup_errors( async def test_coordinator_setup_and_update_errors( hass: HomeAssistant, load_config_entry: tuple[MockConfigEntry, Mock], - load_json: dict[str, Any], + get_data: YaleSmartAlarmData, ) -> None: """Test the Yale Smart Living coordinator with errors.""" @@ -74,51 +77,51 @@ async def test_coordinator_setup_and_update_errors( assert state.state == STATE_ALARM_ARMED_AWAY client.reset_mock() - client.get_all.side_effect = ConnectionError("Could not connect") + client.get_information.side_effect = ConnectionError("Could not connect") async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=1)) await hass.async_block_till_done(wait_background_tasks=True) - client.get_all.assert_called_once() + client.get_information.assert_called_once() state = hass.states.get("alarm_control_panel.yale_smart_alarm") assert state.state == STATE_UNAVAILABLE client.reset_mock() - client.get_all.side_effect = ConnectionError("Could not connect") + client.get_information.side_effect = ConnectionError("Could not connect") async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=2)) await hass.async_block_till_done(wait_background_tasks=True) - client.get_all.assert_called_once() + client.get_information.assert_called_once() state = hass.states.get("alarm_control_panel.yale_smart_alarm") assert state.state == STATE_UNAVAILABLE client.reset_mock() - client.get_all.side_effect = TimeoutError("Could not connect") + client.get_information.side_effect = TimeoutError("Could not connect") async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=3)) await hass.async_block_till_done(wait_background_tasks=True) - client.get_all.assert_called_once() + client.get_information.assert_called_once() state = hass.states.get("alarm_control_panel.yale_smart_alarm") assert state.state == STATE_UNAVAILABLE client.reset_mock() - client.get_all.side_effect = UnknownError("info") + client.get_information.side_effect = UnknownError("info") async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=4)) await hass.async_block_till_done(wait_background_tasks=True) - client.get_all.assert_called_once() + client.get_information.assert_called_once() state = hass.states.get("alarm_control_panel.yale_smart_alarm") assert state.state == STATE_UNAVAILABLE client.reset_mock() - client.get_all.side_effect = None - client.get_all.return_value = load_json + client.get_information.side_effect = None + client.get_information.return_value = get_data client.get_armed_status.return_value = YALE_STATE_ARM_FULL async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=5)) await hass.async_block_till_done(wait_background_tasks=True) - client.get_all.assert_called_once() + client.get_information.assert_called_once() state = hass.states.get("alarm_control_panel.yale_smart_alarm") assert state.state == STATE_ALARM_ARMED_AWAY client.reset_mock() - client.get_all.side_effect = AuthenticationError("Can not authenticate") + client.get_information.side_effect = AuthenticationError("Can not authenticate") async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=6)) await hass.async_block_till_done(wait_background_tasks=True) - client.get_all.assert_called_once() + client.get_information.assert_called_once() state = hass.states.get("alarm_control_panel.yale_smart_alarm") assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/yale_smart_alarm/test_lock.py b/tests/components/yale_smart_alarm/test_lock.py index 09ce8529084..7c67703924b 100644 --- a/tests/components/yale_smart_alarm/test_lock.py +++ b/tests/components/yale_smart_alarm/test_lock.py @@ -3,13 +3,11 @@ from __future__ import annotations from copy import deepcopy -from typing import Any from unittest.mock import Mock import pytest from syrupy.assertion import SnapshotAssertion -from yalesmartalarmclient.exceptions import UnknownError -from yalesmartalarmclient.lock import YaleDoorManAPI +from yalesmartalarmclient import UnknownError, YaleDoorManAPI, YaleSmartAlarmData from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN from homeassistant.const import ( @@ -20,7 +18,7 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import entity_registry as er from tests.common import MockConfigEntry, snapshot_platform @@ -47,7 +45,7 @@ async def test_lock( ) async def test_lock_service_calls( hass: HomeAssistant, - load_json: dict[str, Any], + get_data: YaleSmartAlarmData, load_config_entry: tuple[MockConfigEntry, Mock], entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion, @@ -56,8 +54,8 @@ async def test_lock_service_calls( client = load_config_entry[1] - data = deepcopy(load_json) - data["data"] = data.pop("DEVICES") + data = deepcopy(get_data.cycle) + data["data"] = data.pop("device_status") client.auth.get_authenticated = Mock(return_value=data) client.auth.post_authenticated = Mock(return_value={"code": "000"}) @@ -66,6 +64,14 @@ async def test_lock_service_calls( state = hass.states.get("lock.device1") assert state.state == "locked" + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + LOCK_DOMAIN, + SERVICE_UNLOCK, + {ATTR_ENTITY_ID: "lock.device1"}, + blocking=True, + ) + await hass.services.async_call( LOCK_DOMAIN, SERVICE_UNLOCK, @@ -93,7 +99,7 @@ async def test_lock_service_calls( ) async def test_lock_service_call_fails( hass: HomeAssistant, - load_json: dict[str, Any], + get_data: YaleSmartAlarmData, load_config_entry: tuple[MockConfigEntry, Mock], entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion, @@ -102,8 +108,8 @@ async def test_lock_service_call_fails( client = load_config_entry[1] - data = deepcopy(load_json) - data["data"] = data.pop("DEVICES") + data = deepcopy(get_data.cycle) + data["data"] = data.pop("device_status") client.auth.get_authenticated = Mock(return_value=data) client.auth.post_authenticated = Mock(side_effect=UnknownError("test_side_effect")) @@ -145,7 +151,7 @@ async def test_lock_service_call_fails( ) async def test_lock_service_call_fails_with_incorrect_status( hass: HomeAssistant, - load_json: dict[str, Any], + get_data: YaleSmartAlarmData, load_config_entry: tuple[MockConfigEntry, Mock], entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion, @@ -154,8 +160,8 @@ async def test_lock_service_call_fails_with_incorrect_status( client = load_config_entry[1] - data = deepcopy(load_json) - data["data"] = data.pop("DEVICES") + data = deepcopy(get_data.cycle) + data["data"] = data.pop("device_status") client.auth.get_authenticated = Mock(return_value=data) client.auth.post_authenticated = Mock(return_value={"code": "FFF"}) diff --git a/tests/components/yale_smart_alarm/test_sensor.py b/tests/components/yale_smart_alarm/test_sensor.py index d91ddc0e6ce..848d31cedc3 100644 --- a/tests/components/yale_smart_alarm/test_sensor.py +++ b/tests/components/yale_smart_alarm/test_sensor.py @@ -2,9 +2,10 @@ from __future__ import annotations -from typing import Any from unittest.mock import Mock +from yalesmartalarmclient import YaleSmartAlarmData + from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -13,7 +14,7 @@ from tests.common import MockConfigEntry async def test_coordinator_setup_and_update_errors( hass: HomeAssistant, load_config_entry: tuple[MockConfigEntry, Mock], - load_json: dict[str, Any], + get_data: YaleSmartAlarmData, ) -> None: """Test the Yale Smart Living coordinator with errors.""" From 1fdcbc3f1550b049dc47540a5f581180edf18b11 Mon Sep 17 00:00:00 2001 From: tdfountain <174762217+tdfountain@users.noreply.github.com> Date: Sun, 18 Aug 2024 14:14:54 -0700 Subject: [PATCH 0869/1635] Fix comment typo for _serial_from_status() in NUT (#124175) Fix comment typo for _serial_from_status() --- homeassistant/components/nut/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/nut/__init__.py b/homeassistant/components/nut/__init__.py index 3825db92983..2ce67c76649 100644 --- a/homeassistant/components/nut/__init__.py +++ b/homeassistant/components/nut/__init__.py @@ -172,7 +172,7 @@ def _firmware_from_status(status: dict[str, str]) -> str | None: def _serial_from_status(status: dict[str, str]) -> str | None: - """Find the best serialvalue from the status.""" + """Find the best serial value from the status.""" serial = status.get("device.serial") or status.get("ups.serial") if serial and ( serial.lower() in NUT_FAKE_SERIAL or serial.count("0") == len(serial.strip()) From 25d33e96cc8e52cb5f33b0e61bb02f8f9b107b04 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 19 Aug 2024 09:29:26 +0200 Subject: [PATCH 0870/1635] Add missing hass type hint in component tests (g) (#124203) --- .../generic_hygrostat/test_humidifier.py | 14 +++++--- .../generic_thermostat/test_climate.py | 14 +++++--- tests/components/google/test_config_flow.py | 2 +- .../test_conversation.py | 13 +++++-- tests/components/google_pubsub/test_init.py | 3 +- tests/components/google_wifi/test_sensor.py | 11 ++++-- tests/components/group/common.py | 36 +++++++++---------- 7 files changed, 57 insertions(+), 36 deletions(-) diff --git a/tests/components/generic_hygrostat/test_humidifier.py b/tests/components/generic_hygrostat/test_humidifier.py index fc46db48664..2beaf423201 100644 --- a/tests/components/generic_hygrostat/test_humidifier.py +++ b/tests/components/generic_hygrostat/test_humidifier.py @@ -32,10 +32,12 @@ from homeassistant.core import ( DOMAIN as HOMEASSISTANT_DOMAIN, CoreState, HomeAssistant, + ServiceCall, State, callback, ) from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers.typing import StateType from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util @@ -206,7 +208,7 @@ async def test_unique_id( assert entry.unique_id == unique_id -def _setup_sensor(hass, humidity): +def _setup_sensor(hass: HomeAssistant, humidity: StateType) -> None: """Set up the test sensor.""" hass.states.async_set(ENT_SENSOR, humidity) @@ -669,13 +671,13 @@ async def test_operation_mode_humidify(hass: HomeAssistant) -> None: assert call.data["entity_id"] == ENT_SWITCH -async def _setup_switch(hass, is_on): +async def _setup_switch(hass: HomeAssistant, is_on: bool) -> list[ServiceCall]: """Set up the test switch.""" hass.states.async_set(ENT_SWITCH, STATE_ON if is_on else STATE_OFF) calls = [] @callback - def log_call(call): + def log_call(call: ServiceCall) -> None: """Log service calls.""" calls.append(call) @@ -1615,7 +1617,7 @@ async def test_restore_state_uncoherence_case(hass: HomeAssistant) -> None: assert state.state == STATE_OFF -async def _setup_humidifier(hass): +async def _setup_humidifier(hass: HomeAssistant) -> None: assert await async_setup_component( hass, DOMAIN, @@ -1635,7 +1637,9 @@ async def _setup_humidifier(hass): await hass.async_block_till_done() -def _mock_restore_cache(hass, humidity=40, state=STATE_OFF): +def _mock_restore_cache( + hass: HomeAssistant, humidity: int = 40, state: str = STATE_OFF +) -> None: mock_restore_cache( hass, ( diff --git a/tests/components/generic_thermostat/test_climate.py b/tests/components/generic_thermostat/test_climate.py index 0f438056fbd..f1c41270a2f 100644 --- a/tests/components/generic_thermostat/test_climate.py +++ b/tests/components/generic_thermostat/test_climate.py @@ -40,11 +40,13 @@ from homeassistant.core import ( DOMAIN as HOMEASSISTANT_DOMAIN, CoreState, HomeAssistant, + ServiceCall, State, callback, ) from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers.typing import StateType from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util from homeassistant.util.unit_system import METRIC_SYSTEM, US_CUSTOMARY_SYSTEM @@ -208,7 +210,7 @@ async def test_unique_id( assert entry.unique_id == unique_id -def _setup_sensor(hass, temp): +def _setup_sensor(hass: HomeAssistant, temp: StateType) -> None: """Set up the test sensor.""" hass.states.async_set(ENT_SENSOR, temp) @@ -594,13 +596,13 @@ async def test_hvac_mode_heat(hass: HomeAssistant) -> None: assert call.data["entity_id"] == ENT_SWITCH -def _setup_switch(hass, is_on): +def _setup_switch(hass: HomeAssistant, is_on: bool) -> list[ServiceCall]: """Set up the test switch.""" hass.states.async_set(ENT_SWITCH, STATE_ON if is_on else STATE_OFF) calls = [] @callback - def log_call(call): + def log_call(call: ServiceCall) -> None: """Log service calls.""" calls.append(call) @@ -1374,7 +1376,7 @@ async def test_restore_state_uncoherence_case(hass: HomeAssistant) -> None: assert state.state == HVACMode.OFF -async def _setup_climate(hass): +async def _setup_climate(hass: HomeAssistant) -> None: assert await async_setup_component( hass, DOMAIN, @@ -1393,7 +1395,9 @@ async def _setup_climate(hass): ) -def _mock_restore_cache(hass, temperature=20, hvac_mode=HVACMode.OFF): +def _mock_restore_cache( + hass: HomeAssistant, temperature: int = 20, hvac_mode: HVACMode = HVACMode.OFF +) -> None: mock_restore_cache( hass, ( diff --git a/tests/components/google/test_config_flow.py b/tests/components/google/test_config_flow.py index 47156299b57..f4a6c97f50d 100644 --- a/tests/components/google/test_config_flow.py +++ b/tests/components/google/test_config_flow.py @@ -130,7 +130,7 @@ async def primary_calendar( ) -async def fire_alarm(hass, point_in_time): +async def fire_alarm(hass: HomeAssistant, point_in_time: datetime.datetime) -> None: """Fire an alarm and wait for callbacks to run.""" with freeze_time(point_in_time): async_fire_time_changed(hass, point_in_time) diff --git a/tests/components/google_generative_ai_conversation/test_conversation.py b/tests/components/google_generative_ai_conversation/test_conversation.py index a8eae34e08b..1ea5c2ad9b8 100644 --- a/tests/components/google_generative_ai_conversation/test_conversation.py +++ b/tests/components/google_generative_ai_conversation/test_conversation.py @@ -1,5 +1,6 @@ """Tests for the Google Generative AI Conversation integration conversation platform.""" +from typing import Any from unittest.mock import AsyncMock, MagicMock, patch from freezegun import freeze_time @@ -215,7 +216,9 @@ async def test_function_call( }, ) - def tool_call(hass, tool_input, tool_context): + def tool_call( + hass: HomeAssistant, tool_input: llm.ToolInput, tool_context: llm.LLMContext + ) -> dict[str, Any]: mock_part.function_call = None mock_part.text = "Hi there!" return {"result": "Test response"} @@ -314,7 +317,9 @@ async def test_function_call_without_parameters( mock_part = MagicMock() mock_part.function_call = FunctionCall(name="test_tool", args={}) - def tool_call(hass, tool_input, tool_context): + def tool_call( + hass: HomeAssistant, tool_input: llm.ToolInput, tool_context: llm.LLMContext + ) -> dict[str, Any]: mock_part.function_call = None mock_part.text = "Hi there!" return {"result": "Test response"} @@ -400,7 +405,9 @@ async def test_function_exception( mock_part = MagicMock() mock_part.function_call = FunctionCall(name="test_tool", args={"param1": 1}) - def tool_call(hass, tool_input, tool_context): + def tool_call( + hass: HomeAssistant, tool_input: llm.ToolInput, tool_context: llm.LLMContext + ) -> dict[str, Any]: mock_part.function_call = None mock_part.text = "Hi there!" raise HomeAssistantError("Test tool exception") diff --git a/tests/components/google_pubsub/test_init.py b/tests/components/google_pubsub/test_init.py index fba561f6df1..97e499d5d6d 100644 --- a/tests/components/google_pubsub/test_init.py +++ b/tests/components/google_pubsub/test_init.py @@ -4,6 +4,7 @@ from collections.abc import Generator from dataclasses import dataclass from datetime import datetime import os +from typing import Any from unittest.mock import MagicMock, Mock, patch import pytest @@ -111,7 +112,7 @@ async def test_full_config(hass: HomeAssistant, mock_client) -> None: ) -async def _setup(hass, filter_config): +async def _setup(hass: HomeAssistant, filter_config: dict[str, Any]) -> None: """Shared set up for filtering tests.""" config = { google_pubsub.DOMAIN: { diff --git a/tests/components/google_wifi/test_sensor.py b/tests/components/google_wifi/test_sensor.py index c7df2b4e822..af870a2136d 100644 --- a/tests/components/google_wifi/test_sensor.py +++ b/tests/components/google_wifi/test_sensor.py @@ -2,6 +2,7 @@ from datetime import datetime, timedelta from http import HTTPStatus +from typing import Any from unittest.mock import Mock, patch import requests_mock @@ -78,7 +79,9 @@ async def test_setup_get( assert_setup_component(6, "sensor") -def setup_api(hass, data, requests_mock): +def setup_api( + hass: HomeAssistant | None, data: str | None, requests_mock: requests_mock.Mocker +) -> tuple[google_wifi.GoogleWifiAPI, dict[str, Any]]: """Set up API with fake data.""" resource = f"http://localhost{google_wifi.ENDPOINT}" now = datetime(1970, month=1, day=1) @@ -101,7 +104,7 @@ def setup_api(hass, data, requests_mock): return api, sensor_dict -def fake_delay(hass, ha_delay): +def fake_delay(hass: HomeAssistant, ha_delay: int) -> None: """Fake delay to prevent update throttle.""" hass_now = dt_util.utcnow() shifted_time = hass_now + timedelta(seconds=ha_delay) @@ -220,7 +223,9 @@ def test_update_when_unavailable( assert sensor.state is None -def update_side_effect(hass, requests_mock): +def update_side_effect( + hass: HomeAssistant, requests_mock: requests_mock.Mocker +) -> None: """Mock representation of update function.""" api, sensor_dict = setup_api(hass, MOCK_DATA, requests_mock) api.data = None diff --git a/tests/components/group/common.py b/tests/components/group/common.py index 86fe537a776..a9b6356418c 100644 --- a/tests/components/group/common.py +++ b/tests/components/group/common.py @@ -13,32 +13,32 @@ from homeassistant.components.group import ( SERVICE_SET, ) from homeassistant.const import ATTR_ICON, ATTR_NAME, SERVICE_RELOAD -from homeassistant.core import callback +from homeassistant.core import HomeAssistant, callback from homeassistant.loader import bind_hass @bind_hass -def reload(hass): +def reload(hass: HomeAssistant) -> None: """Reload the automation from config.""" hass.add_job(async_reload, hass) @callback @bind_hass -def async_reload(hass): +def async_reload(hass: HomeAssistant) -> None: """Reload the automation from config.""" hass.async_create_task(hass.services.async_call(DOMAIN, SERVICE_RELOAD)) @bind_hass def set_group( - hass, - object_id, - name=None, - entity_ids=None, - icon=None, - add=None, -): + hass: HomeAssistant, + object_id: str, + name: str | None = None, + entity_ids: list[str] | None = None, + icon: str | None = None, + add: list[str] | None = None, +) -> None: """Create/Update a group.""" hass.add_job( async_set_group, @@ -54,13 +54,13 @@ def set_group( @callback @bind_hass def async_set_group( - hass, - object_id, - name=None, - entity_ids=None, - icon=None, - add=None, -): + hass: HomeAssistant, + object_id: str, + name: str | None = None, + entity_ids: list[str] | None = None, + icon: str | None = None, + add: list[str] | None = None, +) -> None: """Create/Update a group.""" data = { key: value @@ -79,7 +79,7 @@ def async_set_group( @callback @bind_hass -def async_remove(hass, object_id): +def async_remove(hass: HomeAssistant, object_id: str) -> None: """Remove a user group.""" data = {ATTR_OBJECT_ID: object_id} hass.async_create_task(hass.services.async_call(DOMAIN, SERVICE_REMOVE, data)) From 88d79d35ebe44d2050cf72570301f3f14ca29327 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 19 Aug 2024 10:18:09 +0200 Subject: [PATCH 0871/1635] Simplify bring todo service schema (#124206) --- homeassistant/components/bring/todo.py | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/bring/todo.py b/homeassistant/components/bring/todo.py index 001466bc1fe..4fb90860899 100644 --- a/homeassistant/components/bring/todo.py +++ b/homeassistant/components/bring/todo.py @@ -22,7 +22,6 @@ from homeassistant.components.todo import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import config_validation as cv, entity_platform -from homeassistant.helpers.config_validation import make_entity_service_schema from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -62,14 +61,12 @@ async def async_setup_entry( platform.async_register_entity_service( SERVICE_PUSH_NOTIFICATION, - make_entity_service_schema( - { - vol.Required(ATTR_NOTIFICATION_TYPE): vol.All( - vol.Upper, cv.enum(BringNotificationType) - ), - vol.Optional(ATTR_ITEM_NAME): cv.string, - } - ), + { + vol.Required(ATTR_NOTIFICATION_TYPE): vol.All( + vol.Upper, cv.enum(BringNotificationType) + ), + vol.Optional(ATTR_ITEM_NAME): cv.string, + }, "async_send_message", ) From d9c98316fda76c8a061ebad6412f51c92f44694d Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 19 Aug 2024 10:20:58 +0200 Subject: [PATCH 0872/1635] Bump pyhomeworks to 1.1.2 (#124199) --- homeassistant/components/homeworks/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/homeworks/manifest.json b/homeassistant/components/homeworks/manifest.json index a399e0a98e7..011c301d00d 100644 --- a/homeassistant/components/homeworks/manifest.json +++ b/homeassistant/components/homeworks/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/homeworks", "iot_class": "local_push", "loggers": ["pyhomeworks"], - "requirements": ["pyhomeworks==1.1.1"] + "requirements": ["pyhomeworks==1.1.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 9d3825d4f4e..e39815c271a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1915,7 +1915,7 @@ pyhiveapi==0.5.16 pyhomematic==0.1.77 # homeassistant.components.homeworks -pyhomeworks==1.1.1 +pyhomeworks==1.1.2 # homeassistant.components.ialarm pyialarm==2.2.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 828cafcf37b..7b05fd6eecf 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1529,7 +1529,7 @@ pyhiveapi==0.5.16 pyhomematic==0.1.77 # homeassistant.components.homeworks -pyhomeworks==1.1.1 +pyhomeworks==1.1.2 # homeassistant.components.ialarm pyialarm==2.2.0 From 013b91394e62e9910ba85d7f1b333692d0f3fd00 Mon Sep 17 00:00:00 2001 From: IceBotYT <34712694+IceBotYT@users.noreply.github.com> Date: Mon, 19 Aug 2024 04:22:36 -0400 Subject: [PATCH 0873/1635] Add diagnostics to Nice G.O. (#124194) --- .../components/nice_go/diagnostics.py | 30 +++++++++++++++++++ .../nice_go/snapshots/test_diagnostics.ambr | 2 +- tests/components/nice_go/test_diagnostics.py | 29 ++++++++++++++++++ 3 files changed, 60 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/nice_go/diagnostics.py create mode 100644 tests/components/nice_go/test_diagnostics.py diff --git a/homeassistant/components/nice_go/diagnostics.py b/homeassistant/components/nice_go/diagnostics.py new file mode 100644 index 00000000000..2c9a695d4b5 --- /dev/null +++ b/homeassistant/components/nice_go/diagnostics.py @@ -0,0 +1,30 @@ +"""Diagnostics support for Nice G.O..""" + +from __future__ import annotations + +from dataclasses import asdict +from typing import Any + +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.core import HomeAssistant + +from . import NiceGOConfigEntry +from .const import CONF_REFRESH_TOKEN + +TO_REDACT = {CONF_PASSWORD, CONF_EMAIL, CONF_REFRESH_TOKEN, "title", "unique_id"} + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: NiceGOConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + coordinator = entry.runtime_data + + return { + "entry": async_redact_data(entry.as_dict(), TO_REDACT), + "coordinator_data": { + device_id: asdict(device_data) + for device_id, device_data in coordinator.data.items() + }, + } diff --git a/tests/components/nice_go/snapshots/test_diagnostics.ambr b/tests/components/nice_go/snapshots/test_diagnostics.ambr index b7d564b619b..f79b0607ce2 100644 --- a/tests/components/nice_go/snapshots/test_diagnostics.ambr +++ b/tests/components/nice_go/snapshots/test_diagnostics.ambr @@ -36,7 +36,7 @@ 'pref_disable_polling': False, 'source': 'user', 'title': '**REDACTED**', - 'unique_id': None, + 'unique_id': '**REDACTED**', 'version': 1, }), }) diff --git a/tests/components/nice_go/test_diagnostics.py b/tests/components/nice_go/test_diagnostics.py new file mode 100644 index 00000000000..1c88c6a8dc6 --- /dev/null +++ b/tests/components/nice_go/test_diagnostics.py @@ -0,0 +1,29 @@ +"""Test diagnostics of Nice G.O..""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion +from syrupy.filters import props + +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test config entry diagnostics.""" + await setup_integration(hass, mock_config_entry, []) + result = await get_diagnostics_for_config_entry( + hass, hass_client, mock_config_entry + ) + assert result == snapshot(exclude=props("created_at", "modified_at")) From 511ec4ba8ac36215d082e53de11104b64185b355 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 19 Aug 2024 10:23:25 +0200 Subject: [PATCH 0874/1635] Remove test helper get_test_home_assistant (#124177) --- tests/common.py | 58 +------------------------------------------------ 1 file changed, 1 insertion(+), 57 deletions(-) diff --git a/tests/common.py b/tests/common.py index 093d2b5c5e1..893c9ffcd67 100644 --- a/tests/common.py +++ b/tests/common.py @@ -13,12 +13,7 @@ from collections.abc import ( Mapping, Sequence, ) -from contextlib import ( - AbstractAsyncContextManager, - asynccontextmanager, - contextmanager, - suppress, -) +from contextlib import asynccontextmanager, contextmanager, suppress from datetime import UTC, datetime, timedelta from enum import Enum import functools as ft @@ -28,7 +23,6 @@ import json import logging import os import pathlib -import threading import time from types import FrameType, ModuleType from typing import Any, Literal, NoReturn @@ -179,56 +173,6 @@ def get_test_config_dir(*add_path): return os.path.join(os.path.dirname(__file__), "testing_config", *add_path) -@contextmanager -def get_test_home_assistant() -> Generator[HomeAssistant]: - """Return a Home Assistant object pointing at test config directory.""" - hass_created_event = threading.Event() - loop_stop_event = threading.Event() - - context_manager: AbstractAsyncContextManager = None - hass: HomeAssistant = None - loop: asyncio.AbstractEventLoop = None - orig_stop: Callable = None - - def run_loop() -> None: - """Create and run event loop.""" - nonlocal context_manager, hass, loop, orig_stop - - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - context_manager = async_test_home_assistant(loop) - hass = loop.run_until_complete(context_manager.__aenter__()) - - orig_stop = hass.stop - hass._stopped = Mock(set=loop.stop) - hass.start = start_hass - hass.stop = stop_hass - - hass_created_event.set() - - loop.run_forever() - loop_stop_event.set() - - def start_hass(*mocks: Any) -> None: - """Start hass.""" - asyncio.run_coroutine_threadsafe(hass.async_start(), loop).result() - - def stop_hass() -> None: - """Stop hass.""" - orig_stop() - loop_stop_event.wait() - - threading.Thread(name="LoopThread", target=run_loop, daemon=False).start() - - hass_created_event.wait() - - try: - yield hass - finally: - loop.run_until_complete(context_manager.__aexit__(None, None, None)) - loop.close() - - class StoreWithoutWriteLoad[_T: (Mapping[str, Any] | Sequence[Any])](storage.Store[_T]): """Fake store that does not write or load. Used for testing.""" From bab930a4561d16a08fc9c61a46b8cda4e7028e62 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 19 Aug 2024 10:37:17 +0200 Subject: [PATCH 0875/1635] Fix flapping ista_ecotrend tests (#124205) --- tests/components/ista_ecotrend/test_statistics.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/components/ista_ecotrend/test_statistics.py b/tests/components/ista_ecotrend/test_statistics.py index 6b2f98affe9..21877f686df 100644 --- a/tests/components/ista_ecotrend/test_statistics.py +++ b/tests/components/ista_ecotrend/test_statistics.py @@ -14,7 +14,7 @@ from homeassistant.helpers import entity_registry as er from .conftest import extend_statistics -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_fire_time_changed from tests.components.recorder.common import async_wait_recording_done @@ -61,8 +61,12 @@ async def test_statistics_import( mock_ista.get_consumption_data = extend_statistics freezer.tick(datetime.timedelta(days=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() await async_wait_recording_done(hass) freezer.tick(datetime.timedelta(days=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() await async_wait_recording_done(hass) for entity in entities: From 2577fb804bc96a87334048e0ec979aa3e38f58ae Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Mon, 19 Aug 2024 11:00:33 +0200 Subject: [PATCH 0876/1635] Add sonos snapshot test (#124036) --- .../sonos/snapshots/test_media_player.ambr | 58 +++++++++++++++++++ tests/components/sonos/test_media_player.py | 20 ++++--- 2 files changed, 70 insertions(+), 8 deletions(-) create mode 100644 tests/components/sonos/snapshots/test_media_player.ambr diff --git a/tests/components/sonos/snapshots/test_media_player.ambr b/tests/components/sonos/snapshots/test_media_player.ambr new file mode 100644 index 00000000000..9c43bceb43b --- /dev/null +++ b/tests/components/sonos/snapshots/test_media_player.ambr @@ -0,0 +1,58 @@ +# serializer version: 1 +# name: test_entity_basic[media_player.zone_a-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'media_player', + 'entity_category': None, + 'entity_id': 'media_player.zone_a', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'sonos', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'RINCON_test', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_basic[media_player.zone_a-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'speaker', + 'friendly_name': 'Zone A', + 'group_members': list([ + 'media_player.zone_a', + ]), + 'is_volume_muted': False, + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'supported_features': , + 'volume_level': 0.19, + }), + 'context': , + 'entity_id': 'media_player.zone_a', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- diff --git a/tests/components/sonos/test_media_player.py b/tests/components/sonos/test_media_player.py index b7b879dc368..d96ceb900e7 100644 --- a/tests/components/sonos/test_media_player.py +++ b/tests/components/sonos/test_media_player.py @@ -5,6 +5,7 @@ from typing import Any from unittest.mock import patch import pytest +from syrupy import SnapshotAssertion from homeassistant.components.media_player import ( ATTR_INPUT_SOURCE, @@ -44,10 +45,10 @@ from homeassistant.const import ( SERVICE_VOLUME_DOWN, SERVICE_VOLUME_SET, SERVICE_VOLUME_UP, - STATE_IDLE, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.device_registry import ( CONNECTION_NETWORK_MAC, CONNECTION_UPNP, @@ -93,15 +94,18 @@ async def test_device_registry_not_portable( async def test_entity_basic( - hass: HomeAssistant, async_autosetup_sonos, discover + hass: HomeAssistant, + async_autosetup_sonos, + discover, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, ) -> None: """Test basic state and attributes.""" - state = hass.states.get("media_player.zone_a") - assert state.state == STATE_IDLE - attributes = state.attributes - assert attributes["friendly_name"] == "Zone A" - assert attributes["is_volume_muted"] is False - assert attributes["volume_level"] == 0.19 + entity_id = "media_player.zone_a" + entity_entry = entity_registry.async_get(entity_id) + assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") + state = hass.states.get(entity_entry.entity_id) + assert state == snapshot(name=f"{entity_entry.entity_id}-state") @pytest.mark.parametrize( From d9aa931fac64c0cf689c3922c711088ebb913bdf Mon Sep 17 00:00:00 2001 From: Antoine Reversat Date: Mon, 19 Aug 2024 05:04:34 -0400 Subject: [PATCH 0877/1635] Add reauth to fujitsu_fglair (#124166) * Add reauth to fujitsu_fglair * Add test for reauth when an exception occurs. * Address comments * Always assert * Address comments --- .../components/fujitsu_fglair/config_flow.py | 48 +++++++- .../components/fujitsu_fglair/coordinator.py | 8 +- .../components/fujitsu_fglair/strings.json | 10 +- .../fujitsu_fglair/test_config_flow.py | 105 +++++++++++++++++- 4 files changed, 163 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/fujitsu_fglair/config_flow.py b/homeassistant/components/fujitsu_fglair/config_flow.py index 10f703df6d9..db1975298a8 100644 --- a/homeassistant/components/fujitsu_fglair/config_flow.py +++ b/homeassistant/components/fujitsu_fglair/config_flow.py @@ -1,12 +1,13 @@ """Config flow for Fujitsu HVAC (based on Ayla IOT) integration.""" +from collections.abc import Mapping import logging from typing import Any from ayla_iot_unofficial import AylaAuthError, new_ayla_api import voluptuous as vol -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.helpers import aiohttp_client @@ -22,11 +23,18 @@ STEP_USER_DATA_SCHEMA = vol.Schema( vol.Required(CONF_EUROPE): bool, } ) +STEP_REAUTH_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_PASSWORD): str, + } +) class FGLairConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Fujitsu HVAC (based on Ayla IOT).""" + _reauth_entry: ConfigEntry | None = None + async def _async_validate_credentials( self, user_input: dict[str, Any] ) -> dict[str, str]: @@ -71,3 +79,41 @@ class FGLairConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors ) + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Perform reauth upon an API authentication error.""" + self._reauth_entry = self.hass.config_entries.async_get_entry( + self.context["entry_id"] + ) + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Dialog that informs the user that reauth is required.""" + errors: dict[str, str] = {} + assert self._reauth_entry + + if user_input: + reauth_data = { + **self._reauth_entry.data, + CONF_PASSWORD: user_input[CONF_PASSWORD], + } + errors = await self._async_validate_credentials(reauth_data) + + if len(errors) == 0: + return self.async_update_reload_and_abort( + self._reauth_entry, data=reauth_data + ) + + return self.async_show_form( + step_id="reauth_confirm", + data_schema=STEP_REAUTH_DATA_SCHEMA, + description_placeholders={ + CONF_USERNAME: self._reauth_entry.data[CONF_USERNAME], + **self.context["title_placeholders"], + }, + errors=errors, + ) diff --git a/homeassistant/components/fujitsu_fglair/coordinator.py b/homeassistant/components/fujitsu_fglair/coordinator.py index 267c0b2c3e5..902464bdd80 100644 --- a/homeassistant/components/fujitsu_fglair/coordinator.py +++ b/homeassistant/components/fujitsu_fglair/coordinator.py @@ -6,7 +6,7 @@ from ayla_iot_unofficial import AylaApi, AylaAuthError from ayla_iot_unofficial.fujitsu_hvac import FujitsuHVAC from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryError +from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from .const import API_REFRESH @@ -31,7 +31,7 @@ class FGLairCoordinator(DataUpdateCoordinator[dict[str, FujitsuHVAC]]): try: await self.api.async_sign_in() except AylaAuthError as e: - raise ConfigEntryError("Credentials expired for Ayla IoT API") from e + raise ConfigEntryAuthFailed("Credentials expired for Ayla IoT API") from e async def _async_update_data(self) -> dict[str, FujitsuHVAC]: """Fetch data from api endpoint.""" @@ -45,7 +45,7 @@ class FGLairCoordinator(DataUpdateCoordinator[dict[str, FujitsuHVAC]]): devices = await self.api.async_get_devices() except AylaAuthError as e: - raise ConfigEntryError("Credentials expired for Ayla IoT API") from e + raise ConfigEntryAuthFailed("Credentials expired for Ayla IoT API") from e if len(listening_entities) == 0: devices = list(filter(lambda x: isinstance(x, FujitsuHVAC), devices)) @@ -58,6 +58,6 @@ class FGLairCoordinator(DataUpdateCoordinator[dict[str, FujitsuHVAC]]): for dev in devices: await dev.async_update() except AylaAuthError as e: - raise ConfigEntryError("Credentials expired for Ayla IoT API") from e + raise ConfigEntryAuthFailed("Credentials expired for Ayla IoT API") from e return {d.device_serial_number: d for d in devices} diff --git a/homeassistant/components/fujitsu_fglair/strings.json b/homeassistant/components/fujitsu_fglair/strings.json index 71d8542cd3e..8f7d775d7e4 100644 --- a/homeassistant/components/fujitsu_fglair/strings.json +++ b/homeassistant/components/fujitsu_fglair/strings.json @@ -11,6 +11,13 @@ "data_description": { "is_europe": "Allows the user to choose whether to use european servers or not since the API uses different endoint URLs for european vs non-european users" } + }, + "reauth_confirm": { + "title": "[%key:common::config_flow::title::reauth%]", + "description": "Please re-enter the password for {username}:", + "data": { + "password": "[%key:common::config_flow::data::password%]" + } } }, "error": { @@ -19,7 +26,8 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } } } diff --git a/tests/components/fujitsu_fglair/test_config_flow.py b/tests/components/fujitsu_fglair/test_config_flow.py index 06e4b2e5bd3..fc6afd9b8f0 100644 --- a/tests/components/fujitsu_fglair/test_config_flow.py +++ b/tests/components/fujitsu_fglair/test_config_flow.py @@ -6,12 +6,12 @@ from ayla_iot_unofficial import AylaAuthError import pytest from homeassistant.components.fujitsu_fglair.const import CONF_EUROPE, DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResult, FlowResultType -from .conftest import TEST_PASSWORD, TEST_USERNAME +from .conftest import TEST_PASSWORD, TEST_PASSWORD2, TEST_USERNAME from tests.common import MockConfigEntry @@ -105,3 +105,104 @@ async def test_form_exceptions( CONF_PASSWORD: TEST_PASSWORD, CONF_EUROPE: False, } + + +async def test_reauth_success( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_ayla_api: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reauth flow.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "entry_id": mock_config_entry.entry_id, + "title_placeholders": {"name": "test"}, + }, + data={ + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: TEST_PASSWORD, + CONF_EUROPE: False, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_PASSWORD: TEST_PASSWORD2, + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + assert mock_config_entry.data[CONF_PASSWORD] == TEST_PASSWORD2 + + +@pytest.mark.parametrize( + ("exception", "err_msg"), + [ + (AylaAuthError, "invalid_auth"), + (TimeoutError, "cannot_connect"), + (Exception, "unknown"), + ], +) +async def test_reauth_exceptions( + hass: HomeAssistant, + exception: Exception, + err_msg: str, + mock_setup_entry: AsyncMock, + mock_ayla_api: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reauth flow when an exception occurs.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_REAUTH, + "entry_id": mock_config_entry.entry_id, + "title_placeholders": {"name": "test"}, + }, + data={ + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: TEST_PASSWORD, + CONF_EUROPE: False, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + mock_ayla_api.async_sign_in.side_effect = exception + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_PASSWORD: TEST_PASSWORD2, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + assert result["errors"] == {"base": err_msg} + + mock_ayla_api.async_sign_in.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_PASSWORD: TEST_PASSWORD2, + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + assert mock_config_entry.data[CONF_PASSWORD] == TEST_PASSWORD2 From 197e65c3a9d288b48f54b63cc6b1170cf620509f Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 19 Aug 2024 11:06:50 +0200 Subject: [PATCH 0878/1635] Improve type hints in homematicip_cloud tests (#124207) Add missing hass type hint in homematicip_cloud tests --- .../components/homematicip_cloud/conftest.py | 9 ++- tests/components/homematicip_cloud/helper.py | 24 ++++++-- .../test_alarm_control_panel.py | 14 +++-- .../homematicip_cloud/test_binary_sensor.py | 42 +++++++------- .../homematicip_cloud/test_button.py | 6 +- .../homematicip_cloud/test_climate.py | 23 +++++--- .../homematicip_cloud/test_cover.py | 22 +++++--- .../homematicip_cloud/test_device.py | 24 ++++---- .../components/homematicip_cloud/test_hap.py | 8 +-- .../components/homematicip_cloud/test_init.py | 4 +- .../homematicip_cloud/test_light.py | 22 +++++--- .../components/homematicip_cloud/test_lock.py | 6 +- .../homematicip_cloud/test_sensor.py | 56 ++++++++++--------- .../homematicip_cloud/test_switch.py | 22 +++++--- .../homematicip_cloud/test_weather.py | 10 ++-- 15 files changed, 172 insertions(+), 120 deletions(-) diff --git a/tests/components/homematicip_cloud/conftest.py b/tests/components/homematicip_cloud/conftest.py index a43a342478b..ad3957fea69 100644 --- a/tests/components/homematicip_cloud/conftest.py +++ b/tests/components/homematicip_cloud/conftest.py @@ -8,7 +8,6 @@ from homematicip.aio.home import AsyncHome from homematicip.base.enums import WeatherCondition, WeatherDayTime import pytest -from homeassistant import config_entries from homeassistant.components.homematicip_cloud import ( DOMAIN as HMIPC_DOMAIN, async_setup as hmip_async_setup, @@ -46,7 +45,7 @@ def mock_connection_fixture() -> AsyncConnection: @pytest.fixture(name="hmip_config_entry") -def hmip_config_entry_fixture() -> config_entries.ConfigEntry: +def hmip_config_entry_fixture() -> MockConfigEntry: """Create a mock config entry for homematic ip cloud.""" entry_data = { HMIPC_HAPID: HAPID, @@ -66,8 +65,8 @@ def hmip_config_entry_fixture() -> config_entries.ConfigEntry: @pytest.fixture(name="default_mock_hap_factory") async def default_mock_hap_factory_fixture( - hass: HomeAssistant, mock_connection, hmip_config_entry -) -> HomematicipHAP: + hass: HomeAssistant, mock_connection, hmip_config_entry: MockConfigEntry +) -> HomeFactory: """Create a mocked homematic access point.""" return HomeFactory(hass, mock_connection, hmip_config_entry) @@ -94,7 +93,7 @@ def dummy_config_fixture() -> ConfigType: @pytest.fixture(name="mock_hap_with_service") async def mock_hap_with_service_fixture( - hass: HomeAssistant, default_mock_hap_factory, dummy_config + hass: HomeAssistant, default_mock_hap_factory: HomeFactory, dummy_config ) -> HomematicipHAP: """Create a fake homematic access point with hass services.""" mock_hap = await default_mock_hap_factory.async_get_mock_hap() diff --git a/tests/components/homematicip_cloud/helper.py b/tests/components/homematicip_cloud/helper.py index bf20d37f2a3..229b3c20251 100644 --- a/tests/components/homematicip_cloud/helper.py +++ b/tests/components/homematicip_cloud/helper.py @@ -1,6 +1,7 @@ """Helper for HomematicIP Cloud Tests.""" import json +from typing import Any from unittest.mock import Mock, patch from homematicip.aio.class_maps import ( @@ -11,19 +12,19 @@ from homematicip.aio.class_maps import ( from homematicip.aio.device import AsyncDevice from homematicip.aio.group import AsyncGroup from homematicip.aio.home import AsyncHome +from homematicip.base.homematicip_object import HomeMaticIPObject from homematicip.home import Home -from homeassistant import config_entries from homeassistant.components.homematicip_cloud import DOMAIN as HMIPC_DOMAIN from homeassistant.components.homematicip_cloud.generic_entity import ( ATTR_IS_GROUP, ATTR_MODEL_TYPE, ) from homeassistant.components.homematicip_cloud.hap import HomematicipHAP -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, State from homeassistant.setup import async_setup_component -from tests.common import load_fixture +from tests.common import MockConfigEntry, load_fixture HAPID = "3014F7110000000000000001" HAPPIN = "5678" @@ -31,7 +32,13 @@ AUTH_TOKEN = "1234" FIXTURE_DATA = load_fixture("homematicip_cloud.json", "homematicip_cloud") -def get_and_check_entity_basics(hass, mock_hap, entity_id, entity_name, device_model): +def get_and_check_entity_basics( + hass: HomeAssistant, + mock_hap: HomematicipHAP, + entity_id: str, + entity_name: str, + device_model: str | None, +) -> tuple[State, HomeMaticIPObject | None]: """Get and test basic device.""" ha_state = hass.states.get(entity_id) assert ha_state is not None @@ -50,7 +57,12 @@ def get_and_check_entity_basics(hass, mock_hap, entity_id, entity_name, device_m async def async_manipulate_test_data( - hass, hmip_device, attribute, new_value, channel=1, fire_device=None + hass: HomeAssistant, + hmip_device: HomeMaticIPObject, + attribute: str, + new_value: Any, + channel: int = 1, + fire_device: HomeMaticIPObject | None = None, ): """Set new value on hmip device.""" if channel == 1: @@ -76,7 +88,7 @@ class HomeFactory: self, hass: HomeAssistant, mock_connection, - hmip_config_entry: config_entries.ConfigEntry, + hmip_config_entry: MockConfigEntry, ) -> None: """Initialize the Factory.""" self.hass = hass diff --git a/tests/components/homematicip_cloud/test_alarm_control_panel.py b/tests/components/homematicip_cloud/test_alarm_control_panel.py index 05d7963cea8..cf27aed7a84 100644 --- a/tests/components/homematicip_cloud/test_alarm_control_panel.py +++ b/tests/components/homematicip_cloud/test_alarm_control_panel.py @@ -1,5 +1,7 @@ """Tests for HomematicIP Cloud alarm control panel.""" +from homematicip.aio.home import AsyncHome + from homeassistant.components.alarm_control_panel import ( DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, ) @@ -13,12 +15,16 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from .helper import get_and_check_entity_basics +from .helper import HomeFactory, get_and_check_entity_basics async def _async_manipulate_security_zones( - hass, home, internal_active=False, external_active=False, alarm_triggered=False -): + hass: HomeAssistant, + home: AsyncHome, + internal_active: bool = False, + external_active: bool = False, + alarm_triggered: bool = False, +) -> None: """Set new values on hmip security zones.""" json = home._rawJSONData json["functionalHomes"]["SECURITY_AND_ALARM"]["alarmActive"] = alarm_triggered @@ -50,7 +56,7 @@ async def test_manually_configured_platform(hass: HomeAssistant) -> None: async def test_hmip_alarm_control_panel( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipAlarmControlPanel.""" entity_id = "alarm_control_panel.hmip_alarm_control_panel" diff --git a/tests/components/homematicip_cloud/test_binary_sensor.py b/tests/components/homematicip_cloud/test_binary_sensor.py index 54f8e2141d2..d6ea33ed5fb 100644 --- a/tests/components/homematicip_cloud/test_binary_sensor.py +++ b/tests/components/homematicip_cloud/test_binary_sensor.py @@ -27,7 +27,7 @@ from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from .helper import async_manipulate_test_data, get_and_check_entity_basics +from .helper import HomeFactory, async_manipulate_test_data, get_and_check_entity_basics async def test_manually_configured_platform(hass: HomeAssistant) -> None: @@ -41,7 +41,7 @@ async def test_manually_configured_platform(hass: HomeAssistant) -> None: async def test_hmip_home_cloud_connection_sensor( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipCloudConnectionSensor.""" entity_id = "binary_sensor.cloud_connection" @@ -64,7 +64,7 @@ async def test_hmip_home_cloud_connection_sensor( async def test_hmip_acceleration_sensor( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipAccelerationSensor.""" entity_id = "binary_sensor.garagentor" @@ -103,7 +103,7 @@ async def test_hmip_acceleration_sensor( async def test_hmip_tilt_vibration_sensor( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipTiltVibrationSensor.""" entity_id = "binary_sensor.garage_neigungs_und_erschutterungssensor" @@ -141,7 +141,7 @@ async def test_hmip_tilt_vibration_sensor( async def test_hmip_contact_interface( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipContactInterface.""" entity_id = "binary_sensor.kontakt_schnittstelle_unterputz_1_fach" @@ -166,7 +166,7 @@ async def test_hmip_contact_interface( async def test_hmip_shutter_contact( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipShutterContact.""" entity_id = "binary_sensor.fenstergriffsensor" @@ -208,7 +208,7 @@ async def test_hmip_shutter_contact( async def test_hmip_shutter_contact_optical( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipShutterContact.""" entity_id = "binary_sensor.sitzplatzture" @@ -240,7 +240,7 @@ async def test_hmip_shutter_contact_optical( async def test_hmip_motion_detector( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipMotionDetector.""" entity_id = "binary_sensor.bewegungsmelder_fur_55er_rahmen_innen" @@ -261,7 +261,7 @@ async def test_hmip_motion_detector( async def test_hmip_presence_detector( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipPresenceDetector.""" entity_id = "binary_sensor.spi_1" @@ -287,7 +287,7 @@ async def test_hmip_presence_detector( async def test_hmip_pluggable_mains_failure_surveillance_sensor( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipPresenceDetector.""" entity_id = "binary_sensor.netzausfalluberwachung" @@ -308,7 +308,7 @@ async def test_hmip_pluggable_mains_failure_surveillance_sensor( async def test_hmip_smoke_detector( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipSmokeDetector.""" entity_id = "binary_sensor.rauchwarnmelder" @@ -342,7 +342,7 @@ async def test_hmip_smoke_detector( async def test_hmip_water_detector( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipWaterDetector.""" entity_id = "binary_sensor.wassersensor" @@ -378,7 +378,9 @@ async def test_hmip_water_detector( assert ha_state.state == STATE_OFF -async def test_hmip_storm_sensor(hass: HomeAssistant, default_mock_hap_factory) -> None: +async def test_hmip_storm_sensor( + hass: HomeAssistant, default_mock_hap_factory: HomeFactory +) -> None: """Test HomematicipStormSensor.""" entity_id = "binary_sensor.weather_sensor_plus_storm" entity_name = "Weather Sensor – plus Storm" @@ -397,7 +399,9 @@ async def test_hmip_storm_sensor(hass: HomeAssistant, default_mock_hap_factory) assert ha_state.state == STATE_ON -async def test_hmip_rain_sensor(hass: HomeAssistant, default_mock_hap_factory) -> None: +async def test_hmip_rain_sensor( + hass: HomeAssistant, default_mock_hap_factory: HomeFactory +) -> None: """Test HomematicipRainSensor.""" entity_id = "binary_sensor.wettersensor_pro_raining" entity_name = "Wettersensor - pro Raining" @@ -417,7 +421,7 @@ async def test_hmip_rain_sensor(hass: HomeAssistant, default_mock_hap_factory) - async def test_hmip_sunshine_sensor( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipSunshineSensor.""" entity_id = "binary_sensor.wettersensor_pro_sunshine" @@ -439,7 +443,7 @@ async def test_hmip_sunshine_sensor( async def test_hmip_battery_sensor( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipSunshineSensor.""" entity_id = "binary_sensor.wohnungsture_battery" @@ -460,7 +464,7 @@ async def test_hmip_battery_sensor( async def test_hmip_security_zone_sensor_group( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipSecurityZoneSensorGroup.""" entity_id = "binary_sensor.internal_securityzone" @@ -497,7 +501,7 @@ async def test_hmip_security_zone_sensor_group( async def test_hmip_security_sensor_group( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipSecuritySensorGroup.""" entity_id = "binary_sensor.buro_sensors" @@ -571,7 +575,7 @@ async def test_hmip_security_sensor_group( async def test_hmip_multi_contact_interface( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipMultiContactInterface.""" entity_id = "binary_sensor.wired_eingangsmodul_32_fach_channel5" diff --git a/tests/components/homematicip_cloud/test_button.py b/tests/components/homematicip_cloud/test_button.py index 0b5e81dd703..7da86607096 100644 --- a/tests/components/homematicip_cloud/test_button.py +++ b/tests/components/homematicip_cloud/test_button.py @@ -7,11 +7,13 @@ from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.util import dt as dt_util -from .helper import get_and_check_entity_basics +from .helper import HomeFactory, get_and_check_entity_basics async def test_hmip_garage_door_controller_button( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, default_mock_hap_factory + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + default_mock_hap_factory: HomeFactory, ) -> None: """Test HomematicipGarageDoorControllerButton.""" entity_id = "button.garagentor" diff --git a/tests/components/homematicip_cloud/test_climate.py b/tests/components/homematicip_cloud/test_climate.py index 2b4d023baf8..c059ed4b744 100644 --- a/tests/components/homematicip_cloud/test_climate.py +++ b/tests/components/homematicip_cloud/test_climate.py @@ -28,7 +28,12 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.setup import async_setup_component -from .helper import HAPID, async_manipulate_test_data, get_and_check_entity_basics +from .helper import ( + HAPID, + HomeFactory, + async_manipulate_test_data, + get_and_check_entity_basics, +) async def test_manually_configured_platform(hass: HomeAssistant) -> None: @@ -40,7 +45,7 @@ async def test_manually_configured_platform(hass: HomeAssistant) -> None: async def test_hmip_heating_group_heat( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipHeatingGroup.""" entity_id = "climate.badezimmer" @@ -257,7 +262,7 @@ async def test_hmip_heating_group_heat( async def test_hmip_heating_group_cool( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipHeatingGroup.""" entity_id = "climate.badezimmer" @@ -380,7 +385,7 @@ async def test_hmip_heating_group_cool( async def test_hmip_heating_group_heat_with_switch( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipHeatingGroup.""" entity_id = "climate.schlafzimmer" @@ -411,7 +416,7 @@ async def test_hmip_heating_group_heat_with_switch( async def test_hmip_heating_group_heat_with_radiator( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipHeatingGroup.""" entity_id = "climate.vorzimmer" @@ -440,7 +445,7 @@ async def test_hmip_heating_group_heat_with_radiator( async def test_hmip_heating_profile_default_name( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test visible profile 1 without a name should be displayed as 'Default'.""" entity_id = "climate.vorzimmer3" @@ -465,7 +470,7 @@ async def test_hmip_heating_profile_default_name( async def test_hmip_heating_profile_naming( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test Heating Profile Naming.""" entity_id = "climate.vorzimmer2" @@ -490,7 +495,7 @@ async def test_hmip_heating_profile_naming( async def test_hmip_heating_profile_name_not_in_list( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test set profile when profile is not in available profiles.""" expected_profile = "Testprofile" @@ -684,7 +689,7 @@ async def test_hmip_set_home_cooling_mode( async def test_hmip_heating_group_services( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipHeatingGroup services.""" entity_id = "climate.badezimmer" diff --git a/tests/components/homematicip_cloud/test_cover.py b/tests/components/homematicip_cloud/test_cover.py index ee126dff936..4d32ae547ef 100644 --- a/tests/components/homematicip_cloud/test_cover.py +++ b/tests/components/homematicip_cloud/test_cover.py @@ -12,7 +12,7 @@ from homeassistant.const import STATE_CLOSED, STATE_OPEN, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from .helper import async_manipulate_test_data, get_and_check_entity_basics +from .helper import HomeFactory, async_manipulate_test_data, get_and_check_entity_basics async def test_manually_configured_platform(hass: HomeAssistant) -> None: @@ -24,7 +24,7 @@ async def test_manually_configured_platform(hass: HomeAssistant) -> None: async def test_hmip_cover_shutter( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipCoverShutte.""" entity_id = "cover.broll_1" @@ -90,7 +90,9 @@ async def test_hmip_cover_shutter( assert ha_state.state == STATE_UNKNOWN -async def test_hmip_cover_slats(hass: HomeAssistant, default_mock_hap_factory) -> None: +async def test_hmip_cover_slats( + hass: HomeAssistant, default_mock_hap_factory: HomeFactory +) -> None: """Test HomematicipCoverSlats.""" entity_id = "cover.sofa_links" entity_name = "Sofa links" @@ -165,7 +167,7 @@ async def test_hmip_cover_slats(hass: HomeAssistant, default_mock_hap_factory) - async def test_hmip_multi_cover_slats( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipCoverSlats.""" entity_id = "cover.wohnzimmer_fenster" @@ -244,7 +246,9 @@ async def test_hmip_multi_cover_slats( assert ha_state.state == STATE_UNKNOWN -async def test_hmip_blind_module(hass: HomeAssistant, default_mock_hap_factory) -> None: +async def test_hmip_blind_module( + hass: HomeAssistant, default_mock_hap_factory: HomeFactory +) -> None: """Test HomematicipBlindModule.""" entity_id = "cover.sonnenschutz_balkontur" entity_name = "Sonnenschutz Balkontür" @@ -355,7 +359,7 @@ async def test_hmip_blind_module(hass: HomeAssistant, default_mock_hap_factory) async def test_hmip_garage_door_tormatic( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipCoverShutte.""" entity_id = "cover.garage_door_module" @@ -404,7 +408,7 @@ async def test_hmip_garage_door_tormatic( async def test_hmip_garage_door_hoermann( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipCoverShutte.""" entity_id = "cover.garage_door" @@ -453,7 +457,7 @@ async def test_hmip_garage_door_hoermann( async def test_hmip_cover_shutter_group( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipCoverShutteGroup.""" entity_id = "cover.rollos_shuttergroup" @@ -518,7 +522,7 @@ async def test_hmip_cover_shutter_group( async def test_hmip_cover_slats_group( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test slats with HomematicipCoverShutteGroup.""" entity_id = "cover.rollos_shuttergroup" diff --git a/tests/components/homematicip_cloud/test_device.py b/tests/components/homematicip_cloud/test_device.py index 074a30e94b2..25fb31c3c62 100644 --- a/tests/components/homematicip_cloud/test_device.py +++ b/tests/components/homematicip_cloud/test_device.py @@ -17,9 +17,11 @@ from .helper import ( get_and_check_entity_basics, ) +from tests.common import MockConfigEntry + async def test_hmip_load_all_supported_devices( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Ensure that all supported devices could be loaded.""" mock_hap = await default_mock_hap_factory.async_get_mock_hap( @@ -33,7 +35,7 @@ async def test_hmip_remove_device( hass: HomeAssistant, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, - default_mock_hap_factory, + default_mock_hap_factory: HomeFactory, ) -> None: """Test Remove of hmip device.""" entity_id = "light.treppe_ch" @@ -67,8 +69,8 @@ async def test_hmip_add_device( hass: HomeAssistant, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, - default_mock_hap_factory, - hmip_config_entry, + default_mock_hap_factory: HomeFactory, + hmip_config_entry: MockConfigEntry, ) -> None: """Test Remove of hmip device.""" entity_id = "light.treppe_ch" @@ -121,7 +123,7 @@ async def test_hmip_remove_group( hass: HomeAssistant, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, - default_mock_hap_factory, + default_mock_hap_factory: HomeFactory, ) -> None: """Test Remove of hmip group.""" entity_id = "switch.strom_group" @@ -149,7 +151,7 @@ async def test_hmip_remove_group( async def test_all_devices_unavailable_when_hap_not_connected( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test make all devices unavaulable when hap is not connected.""" entity_id = "light.treppe_ch" @@ -174,7 +176,9 @@ async def test_all_devices_unavailable_when_hap_not_connected( assert ha_state.state == STATE_UNAVAILABLE -async def test_hap_reconnected(hass: HomeAssistant, default_mock_hap_factory) -> None: +async def test_hap_reconnected( + hass: HomeAssistant, default_mock_hap_factory: HomeFactory +) -> None: """Test reconnect hap.""" entity_id = "light.treppe_ch" entity_name = "Treppe CH" @@ -205,7 +209,7 @@ async def test_hap_reconnected(hass: HomeAssistant, default_mock_hap_factory) -> async def test_hap_with_name( - hass: HomeAssistant, mock_connection, hmip_config_entry + hass: HomeAssistant, mock_connection, hmip_config_entry: MockConfigEntry ) -> None: """Test hap with name.""" home_name = "TestName" @@ -232,7 +236,7 @@ async def test_hap_with_name( async def test_hmip_reset_energy_counter_services( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test reset_energy_counter service.""" entity_id = "switch.pc" @@ -267,7 +271,7 @@ async def test_hmip_multi_area_device( hass: HomeAssistant, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, - default_mock_hap_factory, + default_mock_hap_factory: HomeFactory, ) -> None: """Test multi area device. Check if devices are created and referenced.""" entity_id = "binary_sensor.wired_eingangsmodul_32_fach_channel5" diff --git a/tests/components/homematicip_cloud/test_hap.py b/tests/components/homematicip_cloud/test_hap.py index 2da32b2844d..ded1bf88292 100644 --- a/tests/components/homematicip_cloud/test_hap.py +++ b/tests/components/homematicip_cloud/test_hap.py @@ -22,7 +22,7 @@ from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from .helper import HAPID, HAPPIN +from .helper import HAPID, HAPPIN, HomeFactory from tests.common import MockConfigEntry @@ -114,7 +114,7 @@ async def test_hap_setup_connection_error() -> None: async def test_hap_reset_unloads_entry_if_setup( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test calling reset while the entry has been setup.""" mock_hap = await default_mock_hap_factory.async_get_mock_hap() @@ -129,7 +129,7 @@ async def test_hap_reset_unloads_entry_if_setup( async def test_hap_create( - hass: HomeAssistant, hmip_config_entry, simple_mock_home + hass: HomeAssistant, hmip_config_entry: MockConfigEntry, simple_mock_home ) -> None: """Mock AsyncHome to execute get_hap.""" hass.config.components.add(HMIPC_DOMAIN) @@ -141,7 +141,7 @@ async def test_hap_create( async def test_hap_create_exception( - hass: HomeAssistant, hmip_config_entry, mock_connection_init + hass: HomeAssistant, hmip_config_entry: MockConfigEntry, mock_connection_init ) -> None: """Mock AsyncHome to execute get_hap.""" hass.config.components.add(HMIPC_DOMAIN) diff --git a/tests/components/homematicip_cloud/test_init.py b/tests/components/homematicip_cloud/test_init.py index ad1c8140aea..07c53248d92 100644 --- a/tests/components/homematicip_cloud/test_init.py +++ b/tests/components/homematicip_cloud/test_init.py @@ -100,7 +100,7 @@ async def test_config_already_registered_not_passed_to_config_entry( async def test_load_entry_fails_due_to_connection_error( - hass: HomeAssistant, hmip_config_entry, mock_connection_init + hass: HomeAssistant, hmip_config_entry: MockConfigEntry, mock_connection_init ) -> None: """Test load entry fails due to connection error.""" hmip_config_entry.add_to_hass(hass) @@ -116,7 +116,7 @@ async def test_load_entry_fails_due_to_connection_error( async def test_load_entry_fails_due_to_generic_exception( - hass: HomeAssistant, hmip_config_entry + hass: HomeAssistant, hmip_config_entry: MockConfigEntry ) -> None: """Test load entry fails due to generic exception.""" hmip_config_entry.add_to_hass(hass) diff --git a/tests/components/homematicip_cloud/test_light.py b/tests/components/homematicip_cloud/test_light.py index 18f002a5dbc..18d490c3786 100644 --- a/tests/components/homematicip_cloud/test_light.py +++ b/tests/components/homematicip_cloud/test_light.py @@ -16,7 +16,7 @@ from homeassistant.const import ATTR_SUPPORTED_FEATURES, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from .helper import async_manipulate_test_data, get_and_check_entity_basics +from .helper import HomeFactory, async_manipulate_test_data, get_and_check_entity_basics async def test_manually_configured_platform(hass: HomeAssistant) -> None: @@ -27,7 +27,9 @@ async def test_manually_configured_platform(hass: HomeAssistant) -> None: assert not hass.data.get(HMIPC_DOMAIN) -async def test_hmip_light(hass: HomeAssistant, default_mock_hap_factory) -> None: +async def test_hmip_light( + hass: HomeAssistant, default_mock_hap_factory: HomeFactory +) -> None: """Test HomematicipLight.""" entity_id = "light.treppe_ch" entity_name = "Treppe CH" @@ -73,7 +75,7 @@ async def test_hmip_light(hass: HomeAssistant, default_mock_hap_factory) -> None async def test_hmip_notification_light( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipNotificationLight.""" entity_id = "light.alarm_status" @@ -171,7 +173,9 @@ async def test_hmip_notification_light( assert not ha_state.attributes.get(ATTR_BRIGHTNESS) -async def test_hmip_dimmer(hass: HomeAssistant, default_mock_hap_factory) -> None: +async def test_hmip_dimmer( + hass: HomeAssistant, default_mock_hap_factory: HomeFactory +) -> None: """Test HomematicipDimmer.""" entity_id = "light.schlafzimmerlicht" entity_name = "Schlafzimmerlicht" @@ -230,7 +234,7 @@ async def test_hmip_dimmer(hass: HomeAssistant, default_mock_hap_factory) -> Non async def test_hmip_light_measuring( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipLightMeasuring.""" entity_id = "light.flur_oben" @@ -276,7 +280,7 @@ async def test_hmip_light_measuring( async def test_hmip_wired_multi_dimmer( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipMultiDimmer.""" entity_id = "light.raumlich_kuche" @@ -336,7 +340,7 @@ async def test_hmip_wired_multi_dimmer( async def test_hmip_din_rail_dimmer_3_channel1( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicIP DinRailDimmer3 Channel 1.""" entity_id = "light.3_dimmer_channel1" @@ -395,7 +399,7 @@ async def test_hmip_din_rail_dimmer_3_channel1( async def test_hmip_din_rail_dimmer_3_channel2( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicIP DinRailDimmer3 Channel 2.""" entity_id = "light.3_dimmer_channel2" @@ -454,7 +458,7 @@ async def test_hmip_din_rail_dimmer_3_channel2( async def test_hmip_din_rail_dimmer_3_channel3( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicIP DinRailDimmer3 Channel 3.""" entity_id = "light.esstisch" diff --git a/tests/components/homematicip_cloud/test_lock.py b/tests/components/homematicip_cloud/test_lock.py index f49ad42b013..7035cf979c4 100644 --- a/tests/components/homematicip_cloud/test_lock.py +++ b/tests/components/homematicip_cloud/test_lock.py @@ -17,7 +17,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component -from .helper import async_manipulate_test_data, get_and_check_entity_basics +from .helper import HomeFactory, async_manipulate_test_data, get_and_check_entity_basics async def test_manually_configured_platform(hass: HomeAssistant) -> None: @@ -29,7 +29,7 @@ async def test_manually_configured_platform(hass: HomeAssistant) -> None: async def test_hmip_doorlockdrive( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipDoorLockDrive.""" entity_id = "lock.haustuer" @@ -87,7 +87,7 @@ async def test_hmip_doorlockdrive( async def test_hmip_doorlockdrive_handle_errors( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipDoorLockDrive.""" entity_id = "lock.haustuer" diff --git a/tests/components/homematicip_cloud/test_sensor.py b/tests/components/homematicip_cloud/test_sensor.py index 2b62c46fd72..4028f6d189e 100644 --- a/tests/components/homematicip_cloud/test_sensor.py +++ b/tests/components/homematicip_cloud/test_sensor.py @@ -36,7 +36,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from .helper import async_manipulate_test_data, get_and_check_entity_basics +from .helper import HomeFactory, async_manipulate_test_data, get_and_check_entity_basics async def test_manually_configured_platform(hass: HomeAssistant) -> None: @@ -48,7 +48,7 @@ async def test_manually_configured_platform(hass: HomeAssistant) -> None: async def test_hmip_accesspoint_status( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipSwitch.""" entity_id = "sensor.home_control_access_point_duty_cycle" @@ -67,7 +67,7 @@ async def test_hmip_accesspoint_status( async def test_hmip_heating_thermostat( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipHeatingThermostat.""" entity_id = "sensor.heizkorperthermostat_heating" @@ -103,7 +103,7 @@ async def test_hmip_heating_thermostat( async def test_hmip_humidity_sensor( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipHumiditySensor.""" entity_id = "sensor.bwth_1_humidity" @@ -128,7 +128,7 @@ async def test_hmip_humidity_sensor( async def test_hmip_temperature_sensor1( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipTemperatureSensor.""" entity_id = "sensor.bwth_1_temperature" @@ -155,7 +155,7 @@ async def test_hmip_temperature_sensor1( async def test_hmip_temperature_sensor2( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipTemperatureSensor.""" entity_id = "sensor.heizkorperthermostat_temperature" @@ -182,7 +182,7 @@ async def test_hmip_temperature_sensor2( async def test_hmip_temperature_sensor3( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipTemperatureSensor.""" entity_id = "sensor.raumbediengerat_analog_temperature" @@ -209,7 +209,7 @@ async def test_hmip_temperature_sensor3( async def test_hmip_thermostat_evo_heating( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipHeatingThermostat for HmIP-eTRV-E.""" entity_id = "sensor.thermostat_evo_heating" @@ -231,7 +231,7 @@ async def test_hmip_thermostat_evo_heating( async def test_hmip_thermostat_evo_temperature( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipTemperatureSensor.""" entity_id = "sensor.thermostat_evo_temperature" @@ -256,7 +256,9 @@ async def test_hmip_thermostat_evo_temperature( assert ha_state.attributes[ATTR_TEMPERATURE_OFFSET] == 0.7 -async def test_hmip_power_sensor(hass: HomeAssistant, default_mock_hap_factory) -> None: +async def test_hmip_power_sensor( + hass: HomeAssistant, default_mock_hap_factory: HomeFactory +) -> None: """Test HomematicipPowerSensor.""" entity_id = "sensor.flur_oben_power" entity_name = "Flur oben Power" @@ -294,7 +296,7 @@ async def test_hmip_power_sensor(hass: HomeAssistant, default_mock_hap_factory) async def test_hmip_illuminance_sensor1( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipIlluminanceSensor.""" entity_id = "sensor.wettersensor_illuminance" @@ -316,7 +318,7 @@ async def test_hmip_illuminance_sensor1( async def test_hmip_illuminance_sensor2( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipIlluminanceSensor.""" entity_id = "sensor.lichtsensor_nord_illuminance" @@ -341,7 +343,7 @@ async def test_hmip_illuminance_sensor2( async def test_hmip_windspeed_sensor( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipWindspeedSensor.""" entity_id = "sensor.wettersensor_pro_windspeed" @@ -392,7 +394,7 @@ async def test_hmip_windspeed_sensor( async def test_hmip_today_rain_sensor( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipTodayRainSensor.""" entity_id = "sensor.weather_sensor_plus_today_rain" @@ -414,7 +416,7 @@ async def test_hmip_today_rain_sensor( async def test_hmip_temperature_external_sensor_channel_1( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipTemperatureDifferenceSensor Channel 1 HmIP-STE2-PCB.""" entity_id = "sensor.ste2_channel_1_temperature" @@ -439,7 +441,7 @@ async def test_hmip_temperature_external_sensor_channel_1( async def test_hmip_temperature_external_sensor_channel_2( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipTemperatureDifferenceSensor Channel 2 HmIP-STE2-PCB.""" entity_id = "sensor.ste2_channel_2_temperature" @@ -464,7 +466,7 @@ async def test_hmip_temperature_external_sensor_channel_2( async def test_hmip_temperature_external_sensor_delta( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipTemperatureDifferenceSensor Delta HmIP-STE2-PCB.""" entity_id = "sensor.ste2_delta_temperature" @@ -491,7 +493,7 @@ async def test_hmip_temperature_external_sensor_delta( async def test_hmip_passage_detector_delta_counter( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipPassageDetectorDeltaCounter.""" entity_id = "sensor.spdr_1" @@ -514,7 +516,7 @@ async def test_hmip_passage_detector_delta_counter( async def test_hmip_esi_iec_current_power_consumption( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test ESI-IEC currentPowerConsumption Sensor.""" entity_id = "sensor.esi_iec_currentPowerConsumption" @@ -532,7 +534,7 @@ async def test_hmip_esi_iec_current_power_consumption( async def test_hmip_esi_iec_energy_counter_usage_high_tariff( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test ESI-IEC ENERGY_COUNTER_USAGE_HIGH_TARIFF.""" entity_id = "sensor.esi_iec_energy_counter_usage_high_tariff" @@ -550,7 +552,7 @@ async def test_hmip_esi_iec_energy_counter_usage_high_tariff( async def test_hmip_esi_iec_energy_counter_usage_low_tariff( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test ESI-IEC ENERGY_COUNTER_USAGE_LOW_TARIFF.""" entity_id = "sensor.esi_iec_energy_counter_usage_low_tariff" @@ -568,7 +570,7 @@ async def test_hmip_esi_iec_energy_counter_usage_low_tariff( async def test_hmip_esi_iec_energy_counter_input_single_tariff( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test ESI-IEC ENERGY_COUNTER_INPUT_SINGLE_TARIFF.""" entity_id = "sensor.esi_iec_energy_counter_input_single_tariff" @@ -586,7 +588,7 @@ async def test_hmip_esi_iec_energy_counter_input_single_tariff( async def test_hmip_esi_iec_unknown_channel( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test devices are loaded partially.""" not_existing_entity_id = "sensor.esi_iec2_energy_counter_input_single_tariff" @@ -601,7 +603,7 @@ async def test_hmip_esi_iec_unknown_channel( async def test_hmip_esi_gas_current_gas_flow( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test ESI-IEC CurrentGasFlow.""" entity_id = "sensor.esi_gas_currentgasflow" @@ -619,7 +621,7 @@ async def test_hmip_esi_gas_current_gas_flow( async def test_hmip_esi_gas_gas_volume( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test ESI-IEC GasVolume.""" entity_id = "sensor.esi_gas_gasvolume" @@ -637,7 +639,7 @@ async def test_hmip_esi_gas_gas_volume( async def test_hmip_esi_led_current_power_consumption( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test ESI-IEC currentPowerConsumption Sensor.""" entity_id = "sensor.esi_led_currentPowerConsumption" @@ -655,7 +657,7 @@ async def test_hmip_esi_led_current_power_consumption( async def test_hmip_esi_led_energy_counter_usage_high_tariff( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test ESI-IEC ENERGY_COUNTER_USAGE_HIGH_TARIFF.""" entity_id = "sensor.esi_led_energy_counter_usage_high_tariff" diff --git a/tests/components/homematicip_cloud/test_switch.py b/tests/components/homematicip_cloud/test_switch.py index a249c52393d..e4b51688ba7 100644 --- a/tests/components/homematicip_cloud/test_switch.py +++ b/tests/components/homematicip_cloud/test_switch.py @@ -9,7 +9,7 @@ from homeassistant.const import STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from .helper import async_manipulate_test_data, get_and_check_entity_basics +from .helper import HomeFactory, async_manipulate_test_data, get_and_check_entity_basics async def test_manually_configured_platform(hass: HomeAssistant) -> None: @@ -20,7 +20,9 @@ async def test_manually_configured_platform(hass: HomeAssistant) -> None: assert not hass.data.get(HMIPC_DOMAIN) -async def test_hmip_switch(hass: HomeAssistant, default_mock_hap_factory) -> None: +async def test_hmip_switch( + hass: HomeAssistant, default_mock_hap_factory: HomeFactory +) -> None: """Test HomematicipSwitch.""" entity_id = "switch.schrank" entity_name = "Schrank" @@ -57,7 +59,9 @@ async def test_hmip_switch(hass: HomeAssistant, default_mock_hap_factory) -> Non assert ha_state.state == STATE_ON -async def test_hmip_switch_input(hass: HomeAssistant, default_mock_hap_factory) -> None: +async def test_hmip_switch_input( + hass: HomeAssistant, default_mock_hap_factory: HomeFactory +) -> None: """Test HomematicipSwitch.""" entity_id = "switch.wohnzimmer_beleuchtung" entity_name = "Wohnzimmer Beleuchtung" @@ -95,7 +99,7 @@ async def test_hmip_switch_input(hass: HomeAssistant, default_mock_hap_factory) async def test_hmip_switch_measuring( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipSwitchMeasuring.""" entity_id = "switch.pc" @@ -134,7 +138,9 @@ async def test_hmip_switch_measuring( assert ha_state.state == STATE_ON -async def test_hmip_group_switch(hass: HomeAssistant, default_mock_hap_factory) -> None: +async def test_hmip_group_switch( + hass: HomeAssistant, default_mock_hap_factory: HomeFactory +) -> None: """Test HomematicipGroupSwitch.""" entity_id = "switch.strom_group" entity_name = "Strom Group" @@ -174,7 +180,9 @@ async def test_hmip_group_switch(hass: HomeAssistant, default_mock_hap_factory) assert ha_state.attributes[ATTR_GROUP_MEMBER_UNREACHABLE] -async def test_hmip_multi_switch(hass: HomeAssistant, default_mock_hap_factory) -> None: +async def test_hmip_multi_switch( + hass: HomeAssistant, default_mock_hap_factory: HomeFactory +) -> None: """Test HomematicipMultiSwitch.""" entity_id = "switch.jalousien_1_kizi_2_schlazi_channel1" entity_name = "Jalousien - 1 KiZi, 2 SchlaZi Channel1" @@ -228,7 +236,7 @@ async def test_hmip_multi_switch(hass: HomeAssistant, default_mock_hap_factory) async def test_hmip_wired_multi_switch( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipMultiSwitch.""" entity_id = "switch.fernseher_wohnzimmer" diff --git a/tests/components/homematicip_cloud/test_weather.py b/tests/components/homematicip_cloud/test_weather.py index 44005afd511..44df907fcc5 100644 --- a/tests/components/homematicip_cloud/test_weather.py +++ b/tests/components/homematicip_cloud/test_weather.py @@ -12,7 +12,7 @@ from homeassistant.const import ATTR_ATTRIBUTION from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from .helper import async_manipulate_test_data, get_and_check_entity_basics +from .helper import HomeFactory, async_manipulate_test_data, get_and_check_entity_basics async def test_manually_configured_platform(hass: HomeAssistant) -> None: @@ -24,7 +24,7 @@ async def test_manually_configured_platform(hass: HomeAssistant) -> None: async def test_hmip_weather_sensor( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipWeatherSensor.""" entity_id = "weather.weather_sensor_plus" @@ -50,7 +50,7 @@ async def test_hmip_weather_sensor( async def test_hmip_weather_sensor_pro( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipWeatherSensorPro.""" entity_id = "weather.wettersensor_pro" @@ -76,7 +76,9 @@ async def test_hmip_weather_sensor_pro( assert ha_state.attributes[ATTR_WEATHER_TEMPERATURE] == 12.1 -async def test_hmip_home_weather(hass: HomeAssistant, default_mock_hap_factory) -> None: +async def test_hmip_home_weather( + hass: HomeAssistant, default_mock_hap_factory: HomeFactory +) -> None: """Test HomematicipHomeWeather.""" entity_id = "weather.weather_1010_wien_osterreich" entity_name = "Weather 1010 Wien, Österreich" From 057f31132bda75d5f793f14e5bc14f94cd752a09 Mon Sep 17 00:00:00 2001 From: Shai Ungar Date: Mon, 19 Aug 2024 13:02:34 +0300 Subject: [PATCH 0879/1635] Bump pyseventeentrack to 1.0.1 (#124211) --- homeassistant/components/seventeentrack/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/seventeentrack/manifest.json b/homeassistant/components/seventeentrack/manifest.json index c4be80ca506..a130fbe9aee 100644 --- a/homeassistant/components/seventeentrack/manifest.json +++ b/homeassistant/components/seventeentrack/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["pyseventeentrack"], - "requirements": ["pyseventeentrack==1.0.0"] + "requirements": ["pyseventeentrack==1.0.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index e39815c271a..f20838efc35 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2184,7 +2184,7 @@ pyserial==3.5 pysesame2==1.0.1 # homeassistant.components.seventeentrack -pyseventeentrack==1.0.0 +pyseventeentrack==1.0.1 # homeassistant.components.sia pysiaalarm==3.1.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 7b05fd6eecf..6c003b8f0e1 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1741,7 +1741,7 @@ pysensibo==1.1.0 pyserial==3.5 # homeassistant.components.seventeentrack -pyseventeentrack==1.0.0 +pyseventeentrack==1.0.1 # homeassistant.components.sia pysiaalarm==3.1.1 From f0af33bd2b8086cb6c95e74db8e7402985014628 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 19 Aug 2024 12:40:17 +0200 Subject: [PATCH 0880/1635] Add missing hass type hint in component tests (i) (#124216) --- tests/components/iaqualink/test_init.py | 2 +- tests/components/image_processing/common.py | 6 +++--- tests/components/image_processing/test_init.py | 14 ++++++++------ tests/components/influxdb/test_init.py | 4 +++- tests/components/influxdb/test_sensor.py | 6 ++++-- tests/components/input_datetime/test_init.py | 12 +++++++++--- tests/components/input_number/test_init.py | 6 +++--- tests/components/input_text/test_init.py | 2 +- tests/components/insteon/test_api_aldb.py | 7 +++++-- tests/components/insteon/test_api_properties.py | 10 ++++++++-- tests/components/insteon/test_config_flow.py | 15 +++++++++++---- tests/components/izone/test_config_flow.py | 12 +++++++----- 12 files changed, 63 insertions(+), 33 deletions(-) diff --git a/tests/components/iaqualink/test_init.py b/tests/components/iaqualink/test_init.py index 8e157b8d1e3..1df199f706a 100644 --- a/tests/components/iaqualink/test_init.py +++ b/tests/components/iaqualink/test_init.py @@ -30,7 +30,7 @@ from .conftest import get_aqualink_device, get_aqualink_system from tests.common import async_fire_time_changed -async def _ffwd_next_update_interval(hass): +async def _ffwd_next_update_interval(hass: HomeAssistant) -> None: now = dt_util.utcnow() async_fire_time_changed(hass, now + UPDATE_INTERVAL) await hass.async_block_till_done() diff --git a/tests/components/image_processing/common.py b/tests/components/image_processing/common.py index 4b3a008c6cd..35b94f2c91c 100644 --- a/tests/components/image_processing/common.py +++ b/tests/components/image_processing/common.py @@ -6,19 +6,19 @@ components. Instead call the service directly. from homeassistant.components.image_processing import DOMAIN, SERVICE_SCAN from homeassistant.const import ATTR_ENTITY_ID, ENTITY_MATCH_ALL -from homeassistant.core import callback +from homeassistant.core import HomeAssistant, callback from homeassistant.loader import bind_hass @bind_hass -def scan(hass, entity_id=ENTITY_MATCH_ALL): +def scan(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Force process of all cameras or given entity.""" hass.add_job(async_scan, hass, entity_id) @callback @bind_hass -def async_scan(hass, entity_id=ENTITY_MATCH_ALL): +def async_scan(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Force process of all cameras or given entity.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None hass.async_create_task(hass.services.async_call(DOMAIN, SERVICE_SCAN, data)) diff --git a/tests/components/image_processing/test_init.py b/tests/components/image_processing/test_init.py index 577d3fc47db..3e7c8f2fb91 100644 --- a/tests/components/image_processing/test_init.py +++ b/tests/components/image_processing/test_init.py @@ -35,13 +35,15 @@ def aiohttp_unused_port_factory( return unused_tcp_port_factory -def get_url(hass): +def get_url(hass: HomeAssistant) -> str: """Return camera url.""" state = hass.states.get("camera.demo_camera") return f"{hass.config.internal_url}{state.attributes.get(ATTR_ENTITY_PICTURE)}" -async def setup_image_processing(hass, aiohttp_unused_port_factory): +async def setup_image_processing( + hass: HomeAssistant, aiohttp_unused_port_factory: Callable[[], int] +) -> None: """Set up things to be run when tests are started.""" await async_setup_component( hass, @@ -55,7 +57,7 @@ async def setup_image_processing(hass, aiohttp_unused_port_factory): await hass.async_block_till_done() -async def setup_image_processing_face(hass): +async def setup_image_processing_face(hass: HomeAssistant) -> None: """Set up things to be run when tests are started.""" config = {ip.DOMAIN: {"platform": "demo"}, "camera": {"platform": "demo"}} @@ -93,7 +95,7 @@ async def test_setup_component_with_service(hass: HomeAssistant) -> None: async def test_get_image_from_camera( mock_camera_read, hass: HomeAssistant, - aiohttp_unused_port_factory, + aiohttp_unused_port_factory: Callable[[], int], ) -> None: """Grab an image from camera entity.""" await setup_image_processing(hass, aiohttp_unused_port_factory) @@ -116,7 +118,7 @@ async def test_get_image_from_camera( async def test_get_image_without_exists_camera( mock_image, hass: HomeAssistant, - aiohttp_unused_port_factory, + aiohttp_unused_port_factory: Callable[[], int], ) -> None: """Try to get image without exists camera.""" await setup_image_processing(hass, aiohttp_unused_port_factory) @@ -191,7 +193,7 @@ async def test_face_event_call_no_confidence( @pytest.mark.usefixtures("enable_custom_integrations") async def test_update_missing_camera( hass: HomeAssistant, - aiohttp_unused_port_factory, + aiohttp_unused_port_factory: Callable[[], int], caplog: pytest.LogCaptureFixture, ) -> None: """Test when entity does not set camera.""" diff --git a/tests/components/influxdb/test_init.py b/tests/components/influxdb/test_init.py index e9592a06fe2..f900be7b700 100644 --- a/tests/components/influxdb/test_init.py +++ b/tests/components/influxdb/test_init.py @@ -334,7 +334,9 @@ async def test_invalid_config( assert not await async_setup_component(hass, influxdb.DOMAIN, config) -async def _setup(hass, mock_influx_client, config_ext, get_write_api): +async def _setup( + hass: HomeAssistant, mock_influx_client, config_ext, get_write_api +) -> None: """Prepare client for next test and return event handler method.""" config = { "influxdb": { diff --git a/tests/components/influxdb/test_sensor.py b/tests/components/influxdb/test_sensor.py index 73dd8375a00..7f5954728a6 100644 --- a/tests/components/influxdb/test_sensor.py +++ b/tests/components/influxdb/test_sensor.py @@ -25,7 +25,7 @@ from homeassistant.components.influxdb.const import ( ) from homeassistant.components.influxdb.sensor import PLATFORM_SCHEMA from homeassistant.const import STATE_UNKNOWN -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, State from homeassistant.helpers.entity_platform import PLATFORM_NOT_READY_BASE_WAIT_TIME from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util @@ -190,7 +190,9 @@ def _set_query_mock_v2( return query_api -async def _setup(hass, config_ext, queries, expected_sensors): +async def _setup( + hass: HomeAssistant, config_ext, queries, expected_sensors +) -> list[State]: """Create client and test expected sensors.""" config = { DOMAIN: config_ext, diff --git a/tests/components/input_datetime/test_init.py b/tests/components/input_datetime/test_init.py index fdbb9a7803f..411f084d39a 100644 --- a/tests/components/input_datetime/test_init.py +++ b/tests/components/input_datetime/test_init.py @@ -79,7 +79,9 @@ def storage_setup(hass: HomeAssistant, hass_storage: dict[str, Any]): return _storage -async def async_set_date_and_time(hass, entity_id, dt_value): +async def async_set_date_and_time( + hass: HomeAssistant, entity_id: str, dt_value: datetime.datetime +) -> None: """Set date and / or time of input_datetime.""" await hass.services.async_call( DOMAIN, @@ -93,7 +95,9 @@ async def async_set_date_and_time(hass, entity_id, dt_value): ) -async def async_set_datetime(hass, entity_id, dt_value): +async def async_set_datetime( + hass: HomeAssistant, entity_id: str, dt_value: datetime.datetime +) -> None: """Set date and / or time of input_datetime.""" await hass.services.async_call( DOMAIN, @@ -103,7 +107,9 @@ async def async_set_datetime(hass, entity_id, dt_value): ) -async def async_set_timestamp(hass, entity_id, timestamp): +async def async_set_timestamp( + hass: HomeAssistant, entity_id: str, timestamp: float +) -> None: """Set date and / or time of input_datetime.""" await hass.services.async_call( DOMAIN, diff --git a/tests/components/input_number/test_init.py b/tests/components/input_number/test_init.py index 73e41f347ce..8ea1c2e25b6 100644 --- a/tests/components/input_number/test_init.py +++ b/tests/components/input_number/test_init.py @@ -65,7 +65,7 @@ def storage_setup(hass: HomeAssistant, hass_storage: dict[str, Any]): return _storage -async def set_value(hass, entity_id, value): +async def set_value(hass: HomeAssistant, entity_id: str, value: str) -> None: """Set input_number to value. This is a legacy helper method. Do not use it for new tests. @@ -78,7 +78,7 @@ async def set_value(hass, entity_id, value): ) -async def increment(hass, entity_id): +async def increment(hass: HomeAssistant, entity_id: str) -> None: """Increment value of entity. This is a legacy helper method. Do not use it for new tests. @@ -88,7 +88,7 @@ async def increment(hass, entity_id): ) -async def decrement(hass, entity_id): +async def decrement(hass: HomeAssistant, entity_id: str) -> None: """Decrement value of entity. This is a legacy helper method. Do not use it for new tests. diff --git a/tests/components/input_text/test_init.py b/tests/components/input_text/test_init.py index 3cae98b6dfe..2ca1d39a983 100644 --- a/tests/components/input_text/test_init.py +++ b/tests/components/input_text/test_init.py @@ -71,7 +71,7 @@ def storage_setup(hass: HomeAssistant, hass_storage: dict[str, Any]): return _storage -async def async_set_value(hass, entity_id, value): +async def async_set_value(hass: HomeAssistant, entity_id: str, value: str) -> None: """Set input_text to value.""" await hass.services.async_call( DOMAIN, diff --git a/tests/components/insteon/test_api_aldb.py b/tests/components/insteon/test_api_aldb.py index 4376628d9a4..9f3c78b4b39 100644 --- a/tests/components/insteon/test_api_aldb.py +++ b/tests/components/insteon/test_api_aldb.py @@ -1,6 +1,7 @@ """Test the Insteon All-Link Database APIs.""" import json +from typing import Any from unittest.mock import patch from pyinsteon import pub @@ -23,7 +24,7 @@ from homeassistant.core import HomeAssistant from .mock_devices import MockDevices from tests.common import load_fixture -from tests.typing import WebSocketGenerator +from tests.typing import MockHAClientWebSocket, WebSocketGenerator @pytest.fixture(name="aldb_data", scope="module") @@ -32,7 +33,9 @@ def aldb_data_fixture(): return json.loads(load_fixture("insteon/aldb_data.json")) -async def _setup(hass, hass_ws_client, aldb_data): +async def _setup( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aldb_data: dict[str, Any] +) -> tuple[MockHAClientWebSocket, MockDevices]: """Set up tests.""" ws_client = await hass_ws_client(hass) devices = MockDevices() diff --git a/tests/components/insteon/test_api_properties.py b/tests/components/insteon/test_api_properties.py index aee35cb8994..35ff95a5cc8 100644 --- a/tests/components/insteon/test_api_properties.py +++ b/tests/components/insteon/test_api_properties.py @@ -1,6 +1,7 @@ """Test the Insteon properties APIs.""" import json +from typing import Any from unittest.mock import AsyncMock, patch from pyinsteon.config import MOMENTARY_DELAY, RELAY_MODE, TOGGLE_BUTTON @@ -26,7 +27,7 @@ from homeassistant.core import HomeAssistant from .mock_devices import MockDevices from tests.common import load_fixture -from tests.typing import WebSocketGenerator +from tests.typing import MockHAClientWebSocket, WebSocketGenerator @pytest.fixture(name="kpl_properties_data", scope="module") @@ -41,7 +42,12 @@ def iolinc_properties_data_fixture(): return json.loads(load_fixture("insteon/iolinc_properties.json")) -async def _setup(hass, hass_ws_client, address, properties_data): +async def _setup( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + address: str, + properties_data: dict[str, Any], +) -> tuple[MockHAClientWebSocket, MockDevices]: """Set up tests.""" ws_client = await hass_ws_client(hass) devices = MockDevices() diff --git a/tests/components/insteon/test_config_flow.py b/tests/components/insteon/test_config_flow.py index 51fdd7a550d..31d38a603f1 100644 --- a/tests/components/insteon/test_config_flow.py +++ b/tests/components/insteon/test_config_flow.py @@ -1,6 +1,8 @@ """Test the config flow for the Insteon integration.""" -from unittest.mock import patch +from collections.abc import Callable +from typing import Any +from unittest.mock import AsyncMock, patch import pytest from voluptuous_serialize import convert @@ -14,7 +16,7 @@ from homeassistant.components.insteon.config_flow import ( STEP_PLM_MANUALLY, ) from homeassistant.components.insteon.const import CONF_HUB_VERSION, DOMAIN -from homeassistant.config_entries import ConfigEntryState +from homeassistant.config_entries import ConfigEntryState, ConfigFlowResult from homeassistant.const import CONF_DEVICE, CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -60,7 +62,7 @@ async def mock_failed_connection(*args, **kwargs): raise ConnectionError("Connection failed") -async def _init_form(hass, modem_type): +async def _init_form(hass: HomeAssistant, modem_type: str) -> ConfigFlowResult: """Run the user form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -73,7 +75,12 @@ async def _init_form(hass, modem_type): ) -async def _device_form(hass, flow_id, connection, user_input): +async def _device_form( + hass: HomeAssistant, + flow_id: str, + connection: Callable[..., Any], + user_input: dict[str, Any] | None, +) -> tuple[ConfigFlowResult, AsyncMock]: """Test the PLM, Hub v1 or Hub v2 form.""" with ( patch( diff --git a/tests/components/izone/test_config_flow.py b/tests/components/izone/test_config_flow.py index 6591e402ec2..3c9707b34c6 100644 --- a/tests/components/izone/test_config_flow.py +++ b/tests/components/izone/test_config_flow.py @@ -1,5 +1,7 @@ """Tests for iZone.""" +from collections.abc import Callable +from typing import Any from unittest.mock import Mock, patch import pytest @@ -12,7 +14,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_send @pytest.fixture -def mock_disco(): +def mock_disco() -> Mock: """Mock discovery service.""" disco = Mock() disco.pi_disco = Mock() @@ -20,15 +22,15 @@ def mock_disco(): return disco -def _mock_start_discovery(hass, mock_disco): - def do_disovered(*args): +def _mock_start_discovery(hass: HomeAssistant, mock_disco: Mock) -> Callable[..., Mock]: + def do_disovered(*args: Any) -> Mock: async_dispatcher_send(hass, DISPATCH_CONTROLLER_DISCOVERED, True) return mock_disco return do_disovered -async def test_not_found(hass: HomeAssistant, mock_disco) -> None: +async def test_not_found(hass: HomeAssistant, mock_disco: Mock) -> None: """Test not finding iZone controller.""" with ( @@ -56,7 +58,7 @@ async def test_not_found(hass: HomeAssistant, mock_disco) -> None: stop_disco.assert_called_once() -async def test_found(hass: HomeAssistant, mock_disco) -> None: +async def test_found(hass: HomeAssistant, mock_disco: Mock) -> None: """Test not finding iZone controller.""" mock_disco.pi_disco.controllers["blah"] = object() From a24fdd1c2b7512a82584a447e3a22ec6df7bff98 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 19 Aug 2024 12:40:56 +0200 Subject: [PATCH 0881/1635] Add missing hass type hint in component tests (h) (#124209) * Add missing hass type hint in component tests (h) * Fix import --- tests/components/harmony/test_remote.py | 5 ++++- tests/components/harmony/test_select.py | 4 +++- tests/components/heos/test_media_player.py | 7 ++++++- tests/components/heos/test_services.py | 4 +++- tests/components/homeassistant_hardware/conftest.py | 4 +++- tests/components/homeassistant_sky_connect/conftest.py | 4 +++- tests/components/homeassistant_yellow/conftest.py | 4 +++- tests/components/html5/test_notify.py | 8 +++++++- tests/components/http/test_auth.py | 7 ++++++- 9 files changed, 38 insertions(+), 9 deletions(-) diff --git a/tests/components/harmony/test_remote.py b/tests/components/harmony/test_remote.py index c0ec2235b84..8f488f9bf0d 100644 --- a/tests/components/harmony/test_remote.py +++ b/tests/components/harmony/test_remote.py @@ -1,6 +1,7 @@ """Test the Logitech Harmony Hub remote.""" from datetime import timedelta +from typing import Any from aioharmony.const import SendCommandDevice @@ -387,7 +388,9 @@ async def test_sync( mock_write_config.assert_called() -async def _send_commands_and_wait(hass, service_data): +async def _send_commands_and_wait( + hass: HomeAssistant, service_data: dict[str, Any] +) -> None: await hass.services.async_call( REMOTE_DOMAIN, SERVICE_SEND_COMMAND, diff --git a/tests/components/harmony/test_select.py b/tests/components/harmony/test_select.py index 2568feb1412..1451f146b98 100644 --- a/tests/components/harmony/test_select.py +++ b/tests/components/harmony/test_select.py @@ -91,7 +91,9 @@ async def test_select_option( assert hass.states.is_state(ENTITY_SELECT, "power_off") -async def _select_option_and_wait(hass, entity, option): +async def _select_option_and_wait( + hass: HomeAssistant, entity: str, option: str +) -> None: await hass.services.async_call( SELECT_DOMAIN, SERVICE_SELECT_OPTION, diff --git a/tests/components/heos/test_media_player.py b/tests/components/heos/test_media_player.py index 19f7ec74daf..089fa1cceea 100644 --- a/tests/components/heos/test_media_player.py +++ b/tests/components/heos/test_media_player.py @@ -1,6 +1,7 @@ """Tests for the Heos Media Player platform.""" import asyncio +from typing import Any from pyheos import CommandFailedError, const from pyheos.error import HeosError @@ -58,8 +59,12 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.setup import async_setup_component +from tests.common import MockConfigEntry -async def setup_platform(hass, config_entry, config): + +async def setup_platform( + hass: HomeAssistant, config_entry: MockConfigEntry, config: dict[str, Any] +) -> None: """Set up the media player platform for testing.""" config_entry.add_to_hass(hass) assert await async_setup_component(hass, DOMAIN, config) diff --git a/tests/components/heos/test_services.py b/tests/components/heos/test_services.py index 2d812eb83ab..d8b8b5038b0 100644 --- a/tests/components/heos/test_services.py +++ b/tests/components/heos/test_services.py @@ -13,8 +13,10 @@ from homeassistant.components.heos.const import ( from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component +from tests.common import MockConfigEntry -async def setup_component(hass, config_entry): + +async def setup_component(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: """Set up the component for testing.""" config_entry.add_to_hass(hass) assert await async_setup_component(hass, DOMAIN, {}) diff --git a/tests/components/homeassistant_hardware/conftest.py b/tests/components/homeassistant_hardware/conftest.py index b62ccaf855b..c495b5132e0 100644 --- a/tests/components/homeassistant_hardware/conftest.py +++ b/tests/components/homeassistant_hardware/conftest.py @@ -6,6 +6,8 @@ from unittest.mock import AsyncMock, MagicMock, patch import pytest +from homeassistant.core import HomeAssistant + @pytest.fixture(autouse=True) def mock_zha_config_flow_setup() -> Generator[None]: @@ -122,7 +124,7 @@ def set_addon_options_fixture(): def install_addon_side_effect_fixture(addon_store_info, addon_info): """Return the install add-on side effect.""" - async def install_addon(hass, slug): + async def install_addon(hass: HomeAssistant, slug: str) -> None: """Mock install add-on.""" addon_store_info.return_value = { "installed": "1.0.0", diff --git a/tests/components/homeassistant_sky_connect/conftest.py b/tests/components/homeassistant_sky_connect/conftest.py index 69b0901aadf..e93b90c4c7c 100644 --- a/tests/components/homeassistant_sky_connect/conftest.py +++ b/tests/components/homeassistant_sky_connect/conftest.py @@ -5,6 +5,8 @@ from unittest.mock import AsyncMock, MagicMock, patch import pytest +from homeassistant.core import HomeAssistant + @pytest.fixture(name="mock_usb_serial_by_id", autouse=True) def mock_usb_serial_by_id_fixture() -> Generator[MagicMock]: @@ -122,7 +124,7 @@ def set_addon_options_fixture(): def install_addon_side_effect_fixture(addon_store_info, addon_info): """Return the install add-on side effect.""" - async def install_addon(hass, slug): + async def install_addon(hass: HomeAssistant, slug: str) -> None: """Mock install add-on.""" addon_store_info.return_value = { "installed": "1.0.0", diff --git a/tests/components/homeassistant_yellow/conftest.py b/tests/components/homeassistant_yellow/conftest.py index 0077fb27058..9882d7613d5 100644 --- a/tests/components/homeassistant_yellow/conftest.py +++ b/tests/components/homeassistant_yellow/conftest.py @@ -6,6 +6,8 @@ from unittest.mock import AsyncMock, MagicMock, patch import pytest +from homeassistant.core import HomeAssistant + @pytest.fixture(autouse=True) def mock_zha_config_flow_setup() -> Generator[None]: @@ -122,7 +124,7 @@ def set_addon_options_fixture(): def install_addon_side_effect_fixture(addon_store_info, addon_info): """Return the install add-on side effect.""" - async def install_addon(hass, slug): + async def install_addon(hass: HomeAssistant, slug: str) -> None: """Mock install add-on.""" addon_store_info.return_value = { "installed": "1.0.0", diff --git a/tests/components/html5/test_notify.py b/tests/components/html5/test_notify.py index f54ec9fa8f7..42ca6067418 100644 --- a/tests/components/html5/test_notify.py +++ b/tests/components/html5/test_notify.py @@ -2,9 +2,11 @@ from http import HTTPStatus import json +from typing import Any from unittest.mock import mock_open, patch from aiohttp.hdrs import AUTHORIZATION +from aiohttp.test_utils import TestClient import homeassistant.components.html5.notify as html5 from homeassistant.core import HomeAssistant @@ -69,7 +71,11 @@ REGISTER_URL = "/api/notify.html5" PUBLISH_URL = "/api/notify.html5/callback" -async def mock_client(hass, hass_client, registrations=None): +async def mock_client( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + registrations: dict[str, Any] | None = None, +) -> TestClient: """Create a test client for HTML5 views.""" if registrations is None: registrations = {} diff --git a/tests/components/http/test_auth.py b/tests/components/http/test_auth.py index 7f29f8a4b9f..76c512c9686 100644 --- a/tests/components/http/test_auth.py +++ b/tests/components/http/test_auth.py @@ -4,6 +4,7 @@ from datetime import timedelta from http import HTTPStatus from ipaddress import ip_network import logging +from typing import Any from unittest.mock import Mock, patch from aiohttp import BasicAuth, web @@ -476,7 +477,11 @@ async def test_auth_access_signed_path_via_websocket( @websocket_api.websocket_command({"type": "diagnostics/list"}) @callback - def get_signed_path(hass, connection, msg): + def get_signed_path( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], + ) -> None: connection.send_result( msg["id"], {"path": async_sign_path(hass, "/", timedelta(seconds=5))} ) From e88007af2dab978b940f559ae5a53bf8ec5cb86f Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 19 Aug 2024 12:41:47 +0200 Subject: [PATCH 0882/1635] Improve suggested values in homeworks config flow (#124200) --- homeassistant/components/homeworks/config_flow.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/homeworks/config_flow.py b/homeassistant/components/homeworks/config_flow.py index 9247670b40b..8e9c8e3b29a 100644 --- a/homeassistant/components/homeworks/config_flow.py +++ b/homeassistant/components/homeworks/config_flow.py @@ -588,12 +588,16 @@ class HomeworksConfigFlowHandler(ConfigFlow, domain=DOMAIN): suggested_values = { CONF_HOST: entry.options[CONF_HOST], CONF_PORT: entry.options[CONF_PORT], + CONF_USERNAME: entry.data.get(CONF_USERNAME), + CONF_PASSWORD: entry.data.get(CONF_PASSWORD), } if user_input: suggested_values = { CONF_HOST: user_input[CONF_HOST], CONF_PORT: user_input[CONF_PORT], + CONF_USERNAME: user_input.get(CONF_USERNAME), + CONF_PASSWORD: user_input.get(CONF_PASSWORD), } try: await self._validate_edit_controller(user_input) @@ -652,7 +656,9 @@ class HomeworksConfigFlowHandler(ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="user", - data_schema=DATA_SCHEMA_ADD_CONTROLLER, + data_schema=self.add_suggested_values_to_schema( + DATA_SCHEMA_ADD_CONTROLLER, user_input + ), errors=errors, ) From fd0f093e10b005a049604d2a9fda4fd19856ae97 Mon Sep 17 00:00:00 2001 From: Philip Vanloo <26272906+dukeofphilberg@users.noreply.github.com> Date: Mon, 19 Aug 2024 12:45:11 +0200 Subject: [PATCH 0883/1635] Bump python-linkplay to 0.0.8 (#123875) * Bump python-linkplay to 0.0.7 * Bump python-linkplay to 0.0.8 --- homeassistant/components/linkplay/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/linkplay/manifest.json b/homeassistant/components/linkplay/manifest.json index 9ac2a9e66e6..5212f3f99b8 100644 --- a/homeassistant/components/linkplay/manifest.json +++ b/homeassistant/components/linkplay/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/linkplay", "integration_type": "hub", "iot_class": "local_polling", - "requirements": ["python-linkplay==0.0.6"], + "requirements": ["python-linkplay==0.0.8"], "zeroconf": ["_linkplay._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index f20838efc35..d1be3b1b423 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2307,7 +2307,7 @@ python-juicenet==1.1.0 python-kasa[speedups]==0.7.1 # homeassistant.components.linkplay -python-linkplay==0.0.6 +python-linkplay==0.0.8 # homeassistant.components.lirc # python-lirc==1.2.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 6c003b8f0e1..b49b6537944 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1825,7 +1825,7 @@ python-juicenet==1.1.0 python-kasa[speedups]==0.7.1 # homeassistant.components.linkplay -python-linkplay==0.0.6 +python-linkplay==0.0.8 # homeassistant.components.matter python-matter-server==6.3.0 From 8907b7e911a859a106868a7e684770a77e318f4e Mon Sep 17 00:00:00 2001 From: cnico Date: Mon, 19 Aug 2024 12:58:00 +0200 Subject: [PATCH 0884/1635] Bump dio-chacon-wifi-api to 1.2.1 (#124215) * chacon_dio api version to 1.2.1 * corrected CI for PR --- homeassistant/components/chacon_dio/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- script/licenses.py | 1 - 4 files changed, 3 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/chacon_dio/manifest.json b/homeassistant/components/chacon_dio/manifest.json index c0f4059e798..edee24444f7 100644 --- a/homeassistant/components/chacon_dio/manifest.json +++ b/homeassistant/components/chacon_dio/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/chacon_dio", "iot_class": "cloud_push", "loggers": ["dio_chacon_api"], - "requirements": ["dio-chacon-wifi-api==1.2.0"] + "requirements": ["dio-chacon-wifi-api==1.2.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index d1be3b1b423..bc4c0146a38 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -741,7 +741,7 @@ devolo-home-control-api==0.18.3 devolo-plc-api==1.4.1 # homeassistant.components.chacon_dio -dio-chacon-wifi-api==1.2.0 +dio-chacon-wifi-api==1.2.1 # homeassistant.components.directv directv==0.4.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index b49b6537944..8f9217e2d73 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -631,7 +631,7 @@ devolo-home-control-api==0.18.3 devolo-plc-api==1.4.1 # homeassistant.components.chacon_dio -dio-chacon-wifi-api==1.2.0 +dio-chacon-wifi-api==1.2.1 # homeassistant.components.directv directv==0.4.0 diff --git a/script/licenses.py b/script/licenses.py index 9d7da912398..bf14adca474 100644 --- a/script/licenses.py +++ b/script/licenses.py @@ -139,7 +139,6 @@ EXCEPTIONS = { "crownstone-core", # https://github.com/crownstone/crownstone-lib-python-core/pull/6 "crownstone-sse", # https://github.com/crownstone/crownstone-lib-python-sse/pull/2 "crownstone-uart", # https://github.com/crownstone/crownstone-lib-python-uart/pull/12 - "dio-chacon-wifi-api", "eliqonline", # https://github.com/molobrakos/eliqonline/pull/17 "enocean", # https://github.com/kipe/enocean/pull/142 "gardena-bluetooth", # https://github.com/elupus/gardena-bluetooth/pull/11 From 96edaebdd30c43be4d64939e4a4577b5c0ba7e6b Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 19 Aug 2024 13:15:50 +0200 Subject: [PATCH 0885/1635] Remove unused function otbr.async_get_active_dataset_tlvs (#124210) --- homeassistant/components/otbr/__init__.py | 13 ----- tests/components/otbr/test_init.py | 71 ----------------------- 2 files changed, 84 deletions(-) diff --git a/homeassistant/components/otbr/__init__.py b/homeassistant/components/otbr/__init__.py index 97c2f40eb99..1d79b1c52e9 100644 --- a/homeassistant/components/otbr/__init__.py +++ b/homeassistant/components/otbr/__init__.py @@ -81,16 +81,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_reload_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: """Handle an options update.""" await hass.config_entries.async_reload(entry.entry_id) - - -async def async_get_active_dataset_tlvs(hass: HomeAssistant) -> bytes | None: - """Get current active operational dataset in TLVS format, or None. - - Returns None if there is no active operational dataset. - Raises if the http status is 400 or higher or if the response is invalid. - """ - if DOMAIN not in hass.data: - raise HomeAssistantError("OTBR API not available") - - data: OTBRData = hass.data[DOMAIN] - return await data.get_active_dataset_tlvs() diff --git a/tests/components/otbr/test_init.py b/tests/components/otbr/test_init.py index 0c56e9ac8da..5cf29662be3 100644 --- a/tests/components/otbr/test_init.py +++ b/tests/components/otbr/test_init.py @@ -1,7 +1,6 @@ """Test the Open Thread Border Router integration.""" import asyncio -from http import HTTPStatus from typing import Any from unittest.mock import ANY, AsyncMock, MagicMock, patch @@ -13,7 +12,6 @@ from zeroconf.asyncio import AsyncServiceInfo from homeassistant.components import otbr, thread from homeassistant.components.thread import discovery from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component @@ -346,78 +344,9 @@ async def test_remove_entry( aioclient_mock.get(f"{BASE_URL}/node/dataset/active", text="0E") - assert await otbr.async_get_active_dataset_tlvs(hass) == bytes.fromhex("0E") - config_entry = hass.config_entries.async_entries(otbr.DOMAIN)[0] await hass.config_entries.async_remove(config_entry.entry_id) - with pytest.raises(HomeAssistantError): - assert await otbr.async_get_active_dataset_tlvs(hass) - - -async def test_get_active_dataset_tlvs( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_config_entry_multipan -) -> None: - """Test async_get_active_dataset_tlvs.""" - - mock_response = ( - "0E080000000000010000000300001035060004001FFFE00208F642646DA209B1C00708FDF57B5A" - "0FE2AAF60510DE98B5BA1A528FEE049D4B4B01835375030D4F70656E5468726561642048410102" - "25A40410F5DD18371BFD29E1A601EF6FFAD94C030C0402A0F7F8" - ) - - aioclient_mock.get(f"{BASE_URL}/node/dataset/active", text=mock_response) - - assert await otbr.async_get_active_dataset_tlvs(hass) == bytes.fromhex( - mock_response - ) - - -async def test_get_active_dataset_tlvs_empty( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_config_entry_multipan -) -> None: - """Test async_get_active_dataset_tlvs.""" - - aioclient_mock.get(f"{BASE_URL}/node/dataset/active", status=HTTPStatus.NO_CONTENT) - assert await otbr.async_get_active_dataset_tlvs(hass) is None - - -async def test_get_active_dataset_tlvs_addon_not_installed(hass: HomeAssistant) -> None: - """Test async_get_active_dataset_tlvs when the multi-PAN addon is not installed.""" - - with pytest.raises(HomeAssistantError): - await otbr.async_get_active_dataset_tlvs(hass) - - -async def test_get_active_dataset_tlvs_404( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_config_entry_multipan -) -> None: - """Test async_get_active_dataset_tlvs with error.""" - - aioclient_mock.get(f"{BASE_URL}/node/dataset/active", status=HTTPStatus.NOT_FOUND) - with pytest.raises(HomeAssistantError): - await otbr.async_get_active_dataset_tlvs(hass) - - -async def test_get_active_dataset_tlvs_201( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_config_entry_multipan -) -> None: - """Test async_get_active_dataset_tlvs with error.""" - - aioclient_mock.get(f"{BASE_URL}/node/dataset/active", status=HTTPStatus.CREATED) - with pytest.raises(HomeAssistantError): - assert await otbr.async_get_active_dataset_tlvs(hass) - - -async def test_get_active_dataset_tlvs_invalid( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_config_entry_multipan -) -> None: - """Test async_get_active_dataset_tlvs with error.""" - - aioclient_mock.get(f"{BASE_URL}/node/dataset/active", text="unexpected") - with pytest.raises(HomeAssistantError): - assert await otbr.async_get_active_dataset_tlvs(hass) - async def test_remove_extra_entries( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker From 16e52f04271703a23024501d809dfd573fea8015 Mon Sep 17 00:00:00 2001 From: cnico Date: Mon, 19 Aug 2024 14:05:12 +0200 Subject: [PATCH 0886/1635] Allow manually updating entity state in chacon dio (#124187) * Addition of a reload service to manually retrieve the status of the devices. * Removal of reload_state service replaced by the homeassistant.update_entity supported service * remove api update to v1.2.1 for another PR * Review corrections * Review corrections --- homeassistant/components/chacon_dio/entity.py | 8 ++++ tests/components/chacon_dio/test_cover.py | 38 ++++++++++++++++++- 2 files changed, 44 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/chacon_dio/entity.py b/homeassistant/components/chacon_dio/entity.py index 38f3d7f5831..7cec6810897 100644 --- a/homeassistant/components/chacon_dio/entity.py +++ b/homeassistant/components/chacon_dio/entity.py @@ -51,3 +51,11 @@ class ChaconDioEntity(Entity): _LOGGER.debug("Data received from server %s", data) self._update_attr(data) self.async_write_ha_state() + + async def async_update(self) -> None: + """Update the state when the entity is requested to.""" + + _LOGGER.debug("Update called for %s, %s", self, self.target_id) + data = await self.client.get_status_details([self.target_id]) + _LOGGER.debug("Received data from server %s", data) + self._update_attr(data[self.target_id]) diff --git a/tests/components/chacon_dio/test_cover.py b/tests/components/chacon_dio/test_cover.py index be606e67e1e..24e6e8581d8 100644 --- a/tests/components/chacon_dio/test_cover.py +++ b/tests/components/chacon_dio/test_cover.py @@ -17,9 +17,11 @@ from homeassistant.components.cover import ( STATE_OPEN, STATE_OPENING, ) +from homeassistant.components.homeassistant import SERVICE_UPDATE_ENTITY from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import HomeAssistant +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.helpers import entity_registry as er +from homeassistant.setup import async_setup_component from . import setup_integration @@ -42,6 +44,38 @@ async def test_entities( await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) +async def test_update( + hass: HomeAssistant, + mock_dio_chacon_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the creation and values of the Chacon Dio covers.""" + + await setup_integration(hass, mock_config_entry) + + mock_dio_chacon_client.get_status_details.return_value = { + "L4HActuator_idmock1": { + "id": "L4HActuator_idmock1", + "connected": True, + "openlevel": 51, + "movement": "stop", + } + } + + await async_setup_component(hass, HOMEASSISTANT_DOMAIN, {}) + await hass.services.async_call( + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: COVER_ENTITY_ID}, + blocking=True, + ) + + state = hass.states.get(COVER_ENTITY_ID) + assert state + assert state.attributes.get(ATTR_CURRENT_POSITION) == 51 + assert state.state == STATE_OPEN + + async def test_cover_actions( hass: HomeAssistant, mock_dio_chacon_client: AsyncMock, @@ -100,7 +134,7 @@ async def test_cover_callbacks( mock_config_entry: MockConfigEntry, entity_registry: er.EntityRegistry, ) -> None: - """Test the creation and values of the Chacon Dio covers.""" + """Test the callbacks on the Chacon Dio covers.""" await setup_integration(hass, mock_config_entry) From 02139fcca64792f6eaabd654b29c52eee7b5de91 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 19 Aug 2024 15:36:23 +0200 Subject: [PATCH 0887/1635] Add missing hass type hint in component tests (l) (#124220) --- .../lg_soundbar/test_config_flow.py | 10 +- tests/components/light/common.py | 147 ++++++++++-------- tests/components/light/conftest.py | 3 +- tests/components/litejet/test_trigger.py | 37 +++-- tests/components/logbook/test_init.py | 9 +- tests/components/logger/test_init.py | 2 +- .../lutron_caseta/test_device_trigger.py | 2 +- 7 files changed, 119 insertions(+), 91 deletions(-) diff --git a/tests/components/lg_soundbar/test_config_flow.py b/tests/components/lg_soundbar/test_config_flow.py index 806c993e792..01e16ecb8d0 100644 --- a/tests/components/lg_soundbar/test_config_flow.py +++ b/tests/components/lg_soundbar/test_config_flow.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Callable import socket from typing import Any -from unittest.mock import DEFAULT, patch +from unittest.mock import DEFAULT, MagicMock, patch from homeassistant import config_entries from homeassistant.components.lg_soundbar.const import DEFAULT_PORT, DOMAIN @@ -17,8 +17,12 @@ from tests.common import MockConfigEntry def setup_mock_temescal( - hass, mock_temescal, mac_info_dev=None, product_info=None, info=None -): + hass: HomeAssistant, + mock_temescal: MagicMock, + mac_info_dev: dict[str, Any] | None = None, + product_info: dict[str, Any] | None = None, + info: dict[str, Any] | None = None, +) -> None: """Set up a mock of the temescal object to craft our expected responses.""" tmock = mock_temescal.temescal instance = tmock.return_value diff --git a/tests/components/light/common.py b/tests/components/light/common.py index 4c3e95b5ef9..0ad492a31e9 100644 --- a/tests/components/light/common.py +++ b/tests/components/light/common.py @@ -33,6 +33,7 @@ from homeassistant.const import ( SERVICE_TURN_OFF, SERVICE_TURN_ON, ) +from homeassistant.core import HomeAssistant from homeassistant.loader import bind_hass from tests.common import MockToggleEntity @@ -40,24 +41,24 @@ from tests.common import MockToggleEntity @bind_hass def turn_on( - hass, - entity_id=ENTITY_MATCH_ALL, - transition=None, - brightness=None, - brightness_pct=None, - rgb_color=None, - rgbw_color=None, - rgbww_color=None, - xy_color=None, - hs_color=None, - color_temp=None, - kelvin=None, - profile=None, - flash=None, - effect=None, - color_name=None, - white=None, -): + hass: HomeAssistant, + entity_id: str = ENTITY_MATCH_ALL, + transition: float | None = None, + brightness: int | None = None, + brightness_pct: float | None = None, + rgb_color: tuple[int, int, int] | None = None, + rgbw_color: tuple[int, int, int, int] | None = None, + rgbww_color: tuple[int, int, int, int, int] | None = None, + xy_color: tuple[float, float] | None = None, + hs_color: tuple[float, float] | None = None, + color_temp: int | None = None, + kelvin: int | None = None, + profile: str | None = None, + flash: str | None = None, + effect: str | None = None, + color_name: str | None = None, + white: bool | None = None, +) -> None: """Turn all or specified light on.""" hass.add_job( async_turn_on, @@ -82,24 +83,24 @@ def turn_on( async def async_turn_on( - hass, - entity_id=ENTITY_MATCH_ALL, - transition=None, - brightness=None, - brightness_pct=None, - rgb_color=None, - rgbw_color=None, - rgbww_color=None, - xy_color=None, - hs_color=None, - color_temp=None, - kelvin=None, - profile=None, - flash=None, - effect=None, - color_name=None, - white=None, -): + hass: HomeAssistant, + entity_id: str = ENTITY_MATCH_ALL, + transition: float | None = None, + brightness: int | None = None, + brightness_pct: float | None = None, + rgb_color: tuple[int, int, int] | None = None, + rgbw_color: tuple[int, int, int, int] | None = None, + rgbww_color: tuple[int, int, int, int, int] | None = None, + xy_color: tuple[float, float] | None = None, + hs_color: tuple[float, float] | None = None, + color_temp: int | None = None, + kelvin: int | None = None, + profile: str | None = None, + flash: str | None = None, + effect: str | None = None, + color_name: str | None = None, + white: bool | None = None, +) -> None: """Turn all or specified light on.""" data = { key: value @@ -128,12 +129,22 @@ async def async_turn_on( @bind_hass -def turn_off(hass, entity_id=ENTITY_MATCH_ALL, transition=None, flash=None): +def turn_off( + hass: HomeAssistant, + entity_id: str = ENTITY_MATCH_ALL, + transition: float | None = None, + flash: str | None = None, +) -> None: """Turn all or specified light off.""" hass.add_job(async_turn_off, hass, entity_id, transition, flash) -async def async_turn_off(hass, entity_id=ENTITY_MATCH_ALL, transition=None, flash=None): +async def async_turn_off( + hass: HomeAssistant, + entity_id: str = ENTITY_MATCH_ALL, + transition: float | None = None, + flash: str | None = None, +) -> None: """Turn all or specified light off.""" data = { key: value @@ -150,21 +161,21 @@ async def async_turn_off(hass, entity_id=ENTITY_MATCH_ALL, transition=None, flas @bind_hass def toggle( - hass, - entity_id=ENTITY_MATCH_ALL, - transition=None, - brightness=None, - brightness_pct=None, - rgb_color=None, - xy_color=None, - hs_color=None, - color_temp=None, - kelvin=None, - profile=None, - flash=None, - effect=None, - color_name=None, -): + hass: HomeAssistant, + entity_id: str = ENTITY_MATCH_ALL, + transition: float | None = None, + brightness: int | None = None, + brightness_pct: float | None = None, + rgb_color: tuple[int, int, int] | None = None, + xy_color: tuple[float, float] | None = None, + hs_color: tuple[float, float] | None = None, + color_temp: int | None = None, + kelvin: int | None = None, + profile: str | None = None, + flash: str | None = None, + effect: str | None = None, + color_name: str | None = None, +) -> None: """Toggle all or specified light.""" hass.add_job( async_toggle, @@ -186,21 +197,21 @@ def toggle( async def async_toggle( - hass, - entity_id=ENTITY_MATCH_ALL, - transition=None, - brightness=None, - brightness_pct=None, - rgb_color=None, - xy_color=None, - hs_color=None, - color_temp=None, - kelvin=None, - profile=None, - flash=None, - effect=None, - color_name=None, -): + hass: HomeAssistant, + entity_id: str = ENTITY_MATCH_ALL, + transition: float | None = None, + brightness: int | None = None, + brightness_pct: float | None = None, + rgb_color: tuple[int, int, int] | None = None, + xy_color: tuple[float, float] | None = None, + hs_color: tuple[float, float] | None = None, + color_temp: int | None = None, + kelvin: int | None = None, + profile: str | None = None, + flash: str | None = None, + effect: str | None = None, + color_name: str | None = None, +) -> None: """Turn all or specified light on.""" data = { key: value diff --git a/tests/components/light/conftest.py b/tests/components/light/conftest.py index 12bd62edcb7..58f2d23db95 100644 --- a/tests/components/light/conftest.py +++ b/tests/components/light/conftest.py @@ -5,6 +5,7 @@ from unittest.mock import AsyncMock, patch import pytest from homeassistant.components.light import Profiles +from homeassistant.core import HomeAssistant @pytest.fixture(autouse=True) @@ -12,7 +13,7 @@ def mock_light_profiles(): """Mock loading of profiles.""" data = {} - def mock_profiles_class(hass): + def mock_profiles_class(hass: HomeAssistant) -> Profiles: profiles = Profiles(hass) profiles.data = data profiles.async_initialize = AsyncMock() diff --git a/tests/components/litejet/test_trigger.py b/tests/components/litejet/test_trigger.py index b4374652955..c13fda9068c 100644 --- a/tests/components/litejet/test_trigger.py +++ b/tests/components/litejet/test_trigger.py @@ -2,8 +2,9 @@ from datetime import timedelta import logging +from typing import Any from unittest import mock -from unittest.mock import patch +from unittest.mock import MagicMock, patch import pytest @@ -30,7 +31,9 @@ ENTITY_OTHER_SWITCH = "switch.mock_switch_2" ENTITY_OTHER_SWITCH_NUMBER = 2 -async def simulate_press(hass, mock_litejet, number): +async def simulate_press( + hass: HomeAssistant, mock_litejet: MagicMock, number: int +) -> None: """Test to simulate a press.""" _LOGGER.info("*** simulate press of %d", number) callback = mock_litejet.switch_pressed_callbacks.get(number) @@ -43,7 +46,9 @@ async def simulate_press(hass, mock_litejet, number): await hass.async_block_till_done() -async def simulate_release(hass, mock_litejet, number): +async def simulate_release( + hass: HomeAssistant, mock_litejet: MagicMock, number: int +) -> None: """Test to simulate releasing.""" _LOGGER.info("*** simulate release of %d", number) callback = mock_litejet.switch_released_callbacks.get(number) @@ -56,7 +61,9 @@ async def simulate_release(hass, mock_litejet, number): await hass.async_block_till_done() -async def simulate_time(hass, mock_litejet, delta): +async def simulate_time( + hass: HomeAssistant, mock_litejet: MagicMock, delta: timedelta +) -> None: """Test to simulate time.""" _LOGGER.info( "*** simulate time change by %s: %s", delta, mock_litejet.start_time + delta @@ -72,7 +79,7 @@ async def simulate_time(hass, mock_litejet, delta): _LOGGER.info("*** done with now=%s", dt_util.utcnow()) -async def setup_automation(hass, trigger): +async def setup_automation(hass: HomeAssistant, trigger: dict[str, Any]) -> None: """Test setting up the automation.""" await async_init_integration(hass, use_switch=True) assert await setup.async_setup_component( @@ -95,7 +102,7 @@ async def setup_automation(hass, trigger): async def test_simple( - hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet: MagicMock ) -> None: """Test the simplest form of a LiteJet trigger.""" await setup_automation( @@ -110,7 +117,7 @@ async def test_simple( async def test_only_release( - hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet: MagicMock ) -> None: """Test the simplest form of a LiteJet trigger.""" await setup_automation( @@ -123,7 +130,7 @@ async def test_only_release( async def test_held_more_than_short( - hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet: MagicMock ) -> None: """Test a too short hold.""" await setup_automation( @@ -142,7 +149,7 @@ async def test_held_more_than_short( async def test_held_more_than_long( - hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet: MagicMock ) -> None: """Test a hold that is long enough.""" await setup_automation( @@ -164,7 +171,7 @@ async def test_held_more_than_long( async def test_held_less_than_short( - hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet: MagicMock ) -> None: """Test a hold that is short enough.""" await setup_automation( @@ -185,7 +192,7 @@ async def test_held_less_than_short( async def test_held_less_than_long( - hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet: MagicMock ) -> None: """Test a hold that is too long.""" await setup_automation( @@ -206,7 +213,7 @@ async def test_held_less_than_long( async def test_held_in_range_short( - hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet: MagicMock ) -> None: """Test an in-range trigger with a too short hold.""" await setup_automation( @@ -226,7 +233,7 @@ async def test_held_in_range_short( async def test_held_in_range_just_right( - hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet: MagicMock ) -> None: """Test an in-range trigger with a just right hold.""" await setup_automation( @@ -249,7 +256,7 @@ async def test_held_in_range_just_right( async def test_held_in_range_long( - hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet: MagicMock ) -> None: """Test an in-range trigger with a too long hold.""" await setup_automation( @@ -271,7 +278,7 @@ async def test_held_in_range_long( async def test_reload( - hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet: MagicMock ) -> None: """Test reloading automation.""" await setup_automation( diff --git a/tests/components/logbook/test_init.py b/tests/components/logbook/test_init.py index 9dc96410166..606c398c31f 100644 --- a/tests/components/logbook/test_init.py +++ b/tests/components/logbook/test_init.py @@ -524,7 +524,7 @@ async def test_exclude_described_event( entity_id2 = "automation.included_rule" entity_id3 = "sensor.excluded_domain" - def _describe(event): + def _describe(event: Event) -> dict[str, str]: """Describe an event.""" return { "name": "Test Name", @@ -532,7 +532,12 @@ async def test_exclude_described_event( "entity_id": event.data[ATTR_ENTITY_ID], } - def async_describe_events(hass, async_describe_event): + def async_describe_events( + hass: HomeAssistant, + async_describe_event: Callable[ + [str, str, Callable[[Event], dict[str, str]]], None + ], + ) -> None: """Mock to describe events.""" async_describe_event("automation", "some_automation_event", _describe) async_describe_event("sensor", "some_event", _describe) diff --git a/tests/components/logger/test_init.py b/tests/components/logger/test_init.py index d6df1f92a72..24e58a77226 100644 --- a/tests/components/logger/test_init.py +++ b/tests/components/logger/test_init.py @@ -226,7 +226,7 @@ async def test_can_set_level_from_store( _reset_logging() -async def _assert_log_levels(hass): +async def _assert_log_levels(hass: HomeAssistant) -> None: assert logging.getLogger(UNCONFIG_NS).level == logging.NOTSET assert logging.getLogger(UNCONFIG_NS).isEnabledFor(logging.CRITICAL) is True assert ( diff --git a/tests/components/lutron_caseta/test_device_trigger.py b/tests/components/lutron_caseta/test_device_trigger.py index 9353b897602..1ab45bf7582 100644 --- a/tests/components/lutron_caseta/test_device_trigger.py +++ b/tests/components/lutron_caseta/test_device_trigger.py @@ -98,7 +98,7 @@ MOCK_BUTTON_DEVICES = [ ] -async def _async_setup_lutron_with_picos(hass): +async def _async_setup_lutron_with_picos(hass: HomeAssistant) -> str: """Setups a lutron bridge with picos.""" config_entry = MockConfigEntry( domain=DOMAIN, From 5470d14a11357d6cedde98fa7673e765ae15480f Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 19 Aug 2024 15:36:37 +0200 Subject: [PATCH 0888/1635] Add missing hass type hint in component tests (m) (#124222) --- tests/components/media_player/common.py | 120 +++++++++++++----- tests/components/microsoft_face/test_init.py | 14 +- .../components/monoprice/test_media_player.py | 25 ++-- .../components/mqtt_eventstream/test_init.py | 7 +- .../mqtt_json/test_device_tracker.py | 10 +- tests/components/mqtt_room/test_sensor.py | 9 +- 6 files changed, 128 insertions(+), 57 deletions(-) diff --git a/tests/components/media_player/common.py b/tests/components/media_player/common.py index 77076d903a6..c0cdfbf26d7 100644 --- a/tests/components/media_player/common.py +++ b/tests/components/media_player/common.py @@ -16,6 +16,7 @@ from homeassistant.components.media_player import ( SERVICE_CLEAR_PLAYLIST, SERVICE_PLAY_MEDIA, SERVICE_SELECT_SOURCE, + MediaPlayerEnqueue, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -35,70 +36,79 @@ from homeassistant.const import ( SERVICE_VOLUME_SET, SERVICE_VOLUME_UP, ) +from homeassistant.core import HomeAssistant from homeassistant.loader import bind_hass -async def async_turn_on(hass, entity_id=ENTITY_MATCH_ALL): +async def async_turn_on(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Turn on specified media player or all.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, data, blocking=True) @bind_hass -def turn_on(hass, entity_id=ENTITY_MATCH_ALL): +def turn_on(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Turn on specified media player or all.""" hass.add_job(async_turn_on, hass, entity_id) -async def async_turn_off(hass, entity_id=ENTITY_MATCH_ALL): +async def async_turn_off( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Turn off specified media player or all.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call(DOMAIN, SERVICE_TURN_OFF, data, blocking=True) @bind_hass -def turn_off(hass, entity_id=ENTITY_MATCH_ALL): +def turn_off(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Turn off specified media player or all.""" hass.add_job(async_turn_off, hass, entity_id) -async def async_toggle(hass, entity_id=ENTITY_MATCH_ALL): +async def async_toggle(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Toggle specified media player or all.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call(DOMAIN, SERVICE_TOGGLE, data, blocking=True) @bind_hass -def toggle(hass, entity_id=ENTITY_MATCH_ALL): +def toggle(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Toggle specified media player or all.""" hass.add_job(async_toggle, hass, entity_id) -async def async_volume_up(hass, entity_id=ENTITY_MATCH_ALL): +async def async_volume_up( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command for volume up.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call(DOMAIN, SERVICE_VOLUME_UP, data, blocking=True) @bind_hass -def volume_up(hass, entity_id=ENTITY_MATCH_ALL): +def volume_up(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Send the media player the command for volume up.""" hass.add_job(async_volume_up, hass, entity_id) -async def async_volume_down(hass, entity_id=ENTITY_MATCH_ALL): +async def async_volume_down( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command for volume down.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call(DOMAIN, SERVICE_VOLUME_DOWN, data, blocking=True) @bind_hass -def volume_down(hass, entity_id=ENTITY_MATCH_ALL): +def volume_down(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Send the media player the command for volume down.""" hass.add_job(async_volume_down, hass, entity_id) -async def async_mute_volume(hass, mute, entity_id=ENTITY_MATCH_ALL): +async def async_mute_volume( + hass: HomeAssistant, mute: bool, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command for muting the volume.""" data = {ATTR_MEDIA_VOLUME_MUTED: mute} @@ -109,12 +119,16 @@ async def async_mute_volume(hass, mute, entity_id=ENTITY_MATCH_ALL): @bind_hass -def mute_volume(hass, mute, entity_id=ENTITY_MATCH_ALL): +def mute_volume( + hass: HomeAssistant, mute: bool, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command for muting the volume.""" hass.add_job(async_mute_volume, hass, mute, entity_id) -async def async_set_volume_level(hass, volume, entity_id=ENTITY_MATCH_ALL): +async def async_set_volume_level( + hass: HomeAssistant, volume: float, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command for setting the volume.""" data = {ATTR_MEDIA_VOLUME_LEVEL: volume} @@ -125,12 +139,16 @@ async def async_set_volume_level(hass, volume, entity_id=ENTITY_MATCH_ALL): @bind_hass -def set_volume_level(hass, volume, entity_id=ENTITY_MATCH_ALL): +def set_volume_level( + hass: HomeAssistant, volume: float, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command for setting the volume.""" hass.add_job(async_set_volume_level, hass, volume, entity_id) -async def async_media_play_pause(hass, entity_id=ENTITY_MATCH_ALL): +async def async_media_play_pause( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command for play/pause.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call( @@ -139,48 +157,56 @@ async def async_media_play_pause(hass, entity_id=ENTITY_MATCH_ALL): @bind_hass -def media_play_pause(hass, entity_id=ENTITY_MATCH_ALL): +def media_play_pause(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Send the media player the command for play/pause.""" hass.add_job(async_media_play_pause, hass, entity_id) -async def async_media_play(hass, entity_id=ENTITY_MATCH_ALL): +async def async_media_play( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command for play/pause.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call(DOMAIN, SERVICE_MEDIA_PLAY, data, blocking=True) @bind_hass -def media_play(hass, entity_id=ENTITY_MATCH_ALL): +def media_play(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Send the media player the command for play/pause.""" hass.add_job(async_media_play, hass, entity_id) -async def async_media_pause(hass, entity_id=ENTITY_MATCH_ALL): +async def async_media_pause( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command for pause.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call(DOMAIN, SERVICE_MEDIA_PAUSE, data, blocking=True) @bind_hass -def media_pause(hass, entity_id=ENTITY_MATCH_ALL): +def media_pause(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Send the media player the command for pause.""" hass.add_job(async_media_pause, hass, entity_id) -async def async_media_stop(hass, entity_id=ENTITY_MATCH_ALL): +async def async_media_stop( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command for stop.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call(DOMAIN, SERVICE_MEDIA_STOP, data, blocking=True) @bind_hass -def media_stop(hass, entity_id=ENTITY_MATCH_ALL): +def media_stop(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Send the media player the command for stop.""" hass.add_job(async_media_stop, hass, entity_id) -async def async_media_next_track(hass, entity_id=ENTITY_MATCH_ALL): +async def async_media_next_track( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command for next track.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call( @@ -189,12 +215,14 @@ async def async_media_next_track(hass, entity_id=ENTITY_MATCH_ALL): @bind_hass -def media_next_track(hass, entity_id=ENTITY_MATCH_ALL): +def media_next_track(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Send the media player the command for next track.""" hass.add_job(async_media_next_track, hass, entity_id) -async def async_media_previous_track(hass, entity_id=ENTITY_MATCH_ALL): +async def async_media_previous_track( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command for prev track.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call( @@ -203,12 +231,16 @@ async def async_media_previous_track(hass, entity_id=ENTITY_MATCH_ALL): @bind_hass -def media_previous_track(hass, entity_id=ENTITY_MATCH_ALL): +def media_previous_track( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command for prev track.""" hass.add_job(async_media_previous_track, hass, entity_id) -async def async_media_seek(hass, position, entity_id=ENTITY_MATCH_ALL): +async def async_media_seek( + hass: HomeAssistant, position: float, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command to seek in current playing media.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} data[ATTR_MEDIA_SEEK_POSITION] = position @@ -216,14 +248,20 @@ async def async_media_seek(hass, position, entity_id=ENTITY_MATCH_ALL): @bind_hass -def media_seek(hass, position, entity_id=ENTITY_MATCH_ALL): +def media_seek( + hass: HomeAssistant, position: float, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command to seek in current playing media.""" hass.add_job(async_media_seek, hass, position, entity_id) async def async_play_media( - hass, media_type, media_id, entity_id=ENTITY_MATCH_ALL, enqueue=None -): + hass: HomeAssistant, + media_type: str, + media_id: str, + entity_id: str = ENTITY_MATCH_ALL, + enqueue: MediaPlayerEnqueue | bool | None = None, +) -> None: """Send the media player the command for playing media.""" data = {ATTR_MEDIA_CONTENT_TYPE: media_type, ATTR_MEDIA_CONTENT_ID: media_id} @@ -237,12 +275,20 @@ async def async_play_media( @bind_hass -def play_media(hass, media_type, media_id, entity_id=ENTITY_MATCH_ALL, enqueue=None): +def play_media( + hass: HomeAssistant, + media_type: str, + media_id: str, + entity_id: str = ENTITY_MATCH_ALL, + enqueue: MediaPlayerEnqueue | bool | None = None, +) -> None: """Send the media player the command for playing media.""" hass.add_job(async_play_media, hass, media_type, media_id, entity_id, enqueue) -async def async_select_source(hass, source, entity_id=ENTITY_MATCH_ALL): +async def async_select_source( + hass: HomeAssistant, source: str, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command to select input source.""" data = {ATTR_INPUT_SOURCE: source} @@ -253,18 +299,22 @@ async def async_select_source(hass, source, entity_id=ENTITY_MATCH_ALL): @bind_hass -def select_source(hass, source, entity_id=ENTITY_MATCH_ALL): +def select_source( + hass: HomeAssistant, source: str, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command to select input source.""" hass.add_job(async_select_source, hass, source, entity_id) -async def async_clear_playlist(hass, entity_id=ENTITY_MATCH_ALL): +async def async_clear_playlist( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command for clear playlist.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call(DOMAIN, SERVICE_CLEAR_PLAYLIST, data, blocking=True) @bind_hass -def clear_playlist(hass, entity_id=ENTITY_MATCH_ALL): +def clear_playlist(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Send the media player the command for clear playlist.""" hass.add_job(async_clear_playlist, hass, entity_id) diff --git a/tests/components/microsoft_face/test_init.py b/tests/components/microsoft_face/test_init.py index 63014a095c0..0819dd82f21 100644 --- a/tests/components/microsoft_face/test_init.py +++ b/tests/components/microsoft_face/test_init.py @@ -31,7 +31,7 @@ async def setup_homeassistant(hass: HomeAssistant): await async_setup_component(hass, "homeassistant", {}) -def create_group(hass, name): +def create_group(hass: HomeAssistant, name: str) -> None: """Create a new person group. This is a legacy helper method. Do not use it for new tests. @@ -40,7 +40,7 @@ def create_group(hass, name): hass.async_create_task(hass.services.async_call(DOMAIN, SERVICE_CREATE_GROUP, data)) -def delete_group(hass, name): +def delete_group(hass: HomeAssistant, name: str) -> None: """Delete a person group. This is a legacy helper method. Do not use it for new tests. @@ -49,7 +49,7 @@ def delete_group(hass, name): hass.async_create_task(hass.services.async_call(DOMAIN, SERVICE_DELETE_GROUP, data)) -def train_group(hass, group): +def train_group(hass: HomeAssistant, group: str) -> None: """Train a person group. This is a legacy helper method. Do not use it for new tests. @@ -58,7 +58,7 @@ def train_group(hass, group): hass.async_create_task(hass.services.async_call(DOMAIN, SERVICE_TRAIN_GROUP, data)) -def create_person(hass, group, name): +def create_person(hass: HomeAssistant, group: str, name: str) -> None: """Create a person in a group. This is a legacy helper method. Do not use it for new tests. @@ -69,7 +69,7 @@ def create_person(hass, group, name): ) -def delete_person(hass, group, name): +def delete_person(hass: HomeAssistant, group: str, name: str) -> None: """Delete a person in a group. This is a legacy helper method. Do not use it for new tests. @@ -80,7 +80,9 @@ def delete_person(hass, group, name): ) -def face_person(hass, group, person, camera_entity): +def face_person( + hass: HomeAssistant, group: str, person: str, camera_entity: str +) -> None: """Add a new face picture to a person. This is a legacy helper method. Do not use it for new tests. diff --git a/tests/components/monoprice/test_media_player.py b/tests/components/monoprice/test_media_player.py index c4ba998261b..7d05003153d 100644 --- a/tests/components/monoprice/test_media_player.py +++ b/tests/components/monoprice/test_media_player.py @@ -1,6 +1,7 @@ """The tests for Monoprice Media player platform.""" from collections import defaultdict +from typing import Any from unittest.mock import patch from serial import SerialException @@ -105,7 +106,7 @@ async def test_cannot_connect(hass: HomeAssistant) -> None: assert hass.states.get(ZONE_1_ID) is None -async def _setup_monoprice(hass, monoprice): +async def _setup_monoprice(hass: HomeAssistant, monoprice: MockMonoprice) -> None: with patch( "homeassistant.components.monoprice.get_monoprice", new=lambda *a: monoprice, @@ -116,7 +117,9 @@ async def _setup_monoprice(hass, monoprice): await hass.async_block_till_done() -async def _setup_monoprice_with_options(hass, monoprice): +async def _setup_monoprice_with_options( + hass: HomeAssistant, monoprice: MockMonoprice +) -> None: with patch( "homeassistant.components.monoprice.get_monoprice", new=lambda *a: monoprice, @@ -129,7 +132,9 @@ async def _setup_monoprice_with_options(hass, monoprice): await hass.async_block_till_done() -async def _setup_monoprice_not_first_run(hass, monoprice): +async def _setup_monoprice_not_first_run( + hass: HomeAssistant, monoprice: MockMonoprice +) -> None: with patch( "homeassistant.components.monoprice.get_monoprice", new=lambda *a: monoprice, @@ -141,19 +146,17 @@ async def _setup_monoprice_not_first_run(hass, monoprice): await hass.async_block_till_done() -async def _call_media_player_service(hass, name, data): +async def _call_media_player_service( + hass: HomeAssistant, name: str, data: dict[str, Any] +) -> None: await hass.services.async_call( MEDIA_PLAYER_DOMAIN, name, service_data=data, blocking=True ) -async def _call_homeassistant_service(hass, name, data): - await hass.services.async_call( - "homeassistant", name, service_data=data, blocking=True - ) - - -async def _call_monoprice_service(hass, name, data): +async def _call_monoprice_service( + hass: HomeAssistant, name: str, data: dict[str, Any] +) -> None: await hass.services.async_call(DOMAIN, name, service_data=data, blocking=True) diff --git a/tests/components/mqtt_eventstream/test_init.py b/tests/components/mqtt_eventstream/test_init.py index 82def7ef145..b6c1940b149 100644 --- a/tests/components/mqtt_eventstream/test_init.py +++ b/tests/components/mqtt_eventstream/test_init.py @@ -20,7 +20,12 @@ from tests.common import ( from tests.typing import MqttMockHAClient -async def add_eventstream(hass, sub_topic=None, pub_topic=None, ignore_event=None): +async def add_eventstream( + hass: HomeAssistant, + sub_topic: str | None = None, + pub_topic: str | None = None, + ignore_event: list[str] | None = None, +) -> bool: """Add a mqtt_eventstream component.""" config = {} if sub_topic: diff --git a/tests/components/mqtt_json/test_device_tracker.py b/tests/components/mqtt_json/test_device_tracker.py index 36073c11a5d..c372a448d98 100644 --- a/tests/components/mqtt_json/test_device_tracker.py +++ b/tests/components/mqtt_json/test_device_tracker.py @@ -11,11 +11,13 @@ import pytest from homeassistant.components.device_tracker.legacy import ( DOMAIN as DT_DOMAIN, YAML_DEVICES, + AsyncSeeCallback, ) from homeassistant.components.mqtt import DOMAIN as MQTT_DOMAIN from homeassistant.config_entries import ConfigEntryDisabler from homeassistant.const import CONF_PLATFORM from homeassistant.core import HomeAssistant +from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.setup import async_setup_component from tests.common import async_fire_mqtt_message @@ -71,9 +73,15 @@ async def test_setup_fails_without_mqtt_being_setup( async def test_ensure_device_tracker_platform_validation(hass: HomeAssistant) -> None: """Test if platform validation was done.""" - async def mock_setup_scanner(hass, config, see, discovery_info=None): + async def mock_setup_scanner( + hass: HomeAssistant, + config: ConfigType, + see: AsyncSeeCallback, + discovery_info: DiscoveryInfoType | None = None, + ) -> bool: """Check that Qos was added by validation.""" assert "qos" in config + return True with patch( "homeassistant.components.mqtt_json.device_tracker.async_setup_scanner", diff --git a/tests/components/mqtt_room/test_sensor.py b/tests/components/mqtt_room/test_sensor.py index e6fe7db3b8e..658dda4b6f8 100644 --- a/tests/components/mqtt_room/test_sensor.py +++ b/tests/components/mqtt_room/test_sensor.py @@ -2,6 +2,7 @@ import datetime import json +from typing import Any from unittest.mock import patch import pytest @@ -40,20 +41,22 @@ FAR_MESSAGE = {"id": DEVICE_ID, "name": NAME, "distance": 10} REALLY_FAR_MESSAGE = {"id": DEVICE_ID, "name": NAME, "distance": 20} -async def send_message(hass, topic, message): +async def send_message( + hass: HomeAssistant, topic: str, message: dict[str, Any] +) -> None: """Test the sending of a message.""" async_fire_mqtt_message(hass, topic, json.dumps(message)) await hass.async_block_till_done() await hass.async_block_till_done() -async def assert_state(hass, room): +async def assert_state(hass: HomeAssistant, room: str) -> None: """Test the assertion of a room state.""" state = hass.states.get(SENSOR_STATE) assert state.state == room -async def assert_distance(hass, distance): +async def assert_distance(hass: HomeAssistant, distance: int) -> None: """Test the assertion of a distance state.""" state = hass.states.get(SENSOR_STATE) assert state.attributes.get("distance") == distance From c76d68503a584cc9baab2fe0bbae4939f51a43cd Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 19 Aug 2024 15:41:09 +0200 Subject: [PATCH 0889/1635] Add missing hass type hint in history and recorder tests (#124204) --- tests/components/history/test_init.py | 14 +++++--------- tests/components/history/test_init_db_schema_30.py | 14 +++++--------- tests/components/recorder/common.py | 8 ++++++-- tests/components/recorder/test_init.py | 11 ++++++----- tests/components/recorder/test_migrate.py | 4 ++-- tests/components/recorder/test_purge.py | 4 +++- 6 files changed, 27 insertions(+), 28 deletions(-) diff --git a/tests/components/history/test_init.py b/tests/components/history/test_init.py index 7806b7c9ef4..3b4b02a877e 100644 --- a/tests/components/history/test_init.py +++ b/tests/components/history/test_init.py @@ -1,6 +1,6 @@ """The tests the History component.""" -from datetime import timedelta +from datetime import datetime, timedelta from http import HTTPStatus import json from unittest.mock import sentinel @@ -13,7 +13,7 @@ from homeassistant.components.recorder import Recorder from homeassistant.components.recorder.history import get_significant_states from homeassistant.components.recorder.models import process_timestamp from homeassistant.const import EVENT_HOMEASSISTANT_FINAL_WRITE -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, State from homeassistant.helpers.json import JSONEncoder from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util @@ -291,13 +291,9 @@ async def test_get_significant_states_only(hass: HomeAssistant, hass_history) -> ) -async def check_significant_states(hass, zero, four, states, config): - """Check if significant states are retrieved.""" - hist = get_significant_states(hass, zero, four) - assert_dict_of_states_equal_without_context_and_last_changed(states, hist) - - -async def async_record_states(hass): +async def async_record_states( + hass: HomeAssistant, +) -> tuple[datetime, datetime, dict[str, list[State | None]]]: """Record some test states. We inject a bunch of state updates from media player, zone and diff --git a/tests/components/history/test_init_db_schema_30.py b/tests/components/history/test_init_db_schema_30.py index bec074362ca..1520d5363d5 100644 --- a/tests/components/history/test_init_db_schema_30.py +++ b/tests/components/history/test_init_db_schema_30.py @@ -2,7 +2,7 @@ from __future__ import annotations -from datetime import timedelta +from datetime import datetime, timedelta from http import HTTPStatus import json from unittest.mock import patch, sentinel @@ -14,7 +14,7 @@ from homeassistant.components import recorder from homeassistant.components.recorder import Recorder from homeassistant.components.recorder.history import get_significant_states from homeassistant.components.recorder.models import process_timestamp -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, State from homeassistant.helpers.json import JSONEncoder from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util @@ -309,13 +309,9 @@ async def test_get_significant_states_only( ) -def check_significant_states(hass, zero, four, states, config): - """Check if significant states are retrieved.""" - hist = get_significant_states(hass, zero, four) - assert_dict_of_states_equal_without_context_and_last_changed(states, hist) - - -async def async_record_states(hass): +async def async_record_states( + hass: HomeAssistant, +) -> tuple[datetime, datetime, dict[str, list[State | None]]]: """Record some test states. We inject a bunch of state updates from media player, zone and diff --git a/tests/components/recorder/common.py b/tests/components/recorder/common.py index aee35fceb80..18e58d9e572 100644 --- a/tests/components/recorder/common.py +++ b/tests/components/recorder/common.py @@ -265,12 +265,16 @@ def assert_dict_of_states_equal_without_context_and_last_changed( ) -async def async_record_states(hass: HomeAssistant): +async def async_record_states( + hass: HomeAssistant, +) -> tuple[datetime, datetime, dict[str, list[State | None]]]: """Record some test states.""" return await hass.async_add_executor_job(record_states, hass) -def record_states(hass): +def record_states( + hass: HomeAssistant, +) -> tuple[datetime, datetime, dict[str, list[State | None]]]: """Record some test states. We inject a bunch of state updates temperature sensors. diff --git a/tests/components/recorder/test_init.py b/tests/components/recorder/test_init.py index 74b0adc1b8b..72d47515edd 100644 --- a/tests/components/recorder/test_init.py +++ b/tests/components/recorder/test_init.py @@ -72,12 +72,13 @@ from homeassistant.const import ( STATE_LOCKED, STATE_UNLOCKED, ) -from homeassistant.core import Context, CoreState, Event, HomeAssistant, callback +from homeassistant.core import Context, CoreState, Event, HomeAssistant, State, callback from homeassistant.helpers import ( entity_registry as er, issue_registry as ir, recorder as recorder_helper, ) +from homeassistant.helpers.typing import ConfigType from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util from homeassistant.util.json import json_loads @@ -123,7 +124,7 @@ def small_cache_size() -> Generator[None]: yield -def _default_recorder(hass): +def _default_recorder(hass: HomeAssistant) -> Recorder: """Return a recorder with reasonable defaults.""" return Recorder( hass, @@ -581,7 +582,7 @@ async def test_saving_state_with_commit_interval_zero( assert db_states[0].event_id is None -async def _add_entities(hass, entity_ids): +async def _add_entities(hass: HomeAssistant, entity_ids: list[str]) -> list[State]: """Add entities.""" attributes = {"test_attr": 5, "test_attr_10": "nice"} for idx, entity_id in enumerate(entity_ids): @@ -605,7 +606,7 @@ async def _add_entities(hass, entity_ids): return states -def _state_with_context(hass, entity_id): +def _state_with_context(hass: HomeAssistant, entity_id: str) -> State | None: # We don't restore context unless we need it by joining the # events table on the event_id for state_changed events return hass.states.get(entity_id) @@ -1004,7 +1005,7 @@ async def test_defaults_set(hass: HomeAssistant) -> None: """Test the config defaults are set.""" recorder_config = None - async def mock_setup(hass, config): + async def mock_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Mock setup.""" nonlocal recorder_config recorder_config = config["recorder"] diff --git a/tests/components/recorder/test_migrate.py b/tests/components/recorder/test_migrate.py index 1edbac3c566..05d59facf09 100644 --- a/tests/components/recorder/test_migrate.py +++ b/tests/components/recorder/test_migrate.py @@ -29,7 +29,7 @@ from homeassistant.components.recorder.db_schema import ( States, ) from homeassistant.components.recorder.util import session_scope -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, State from homeassistant.helpers import recorder as recorder_helper import homeassistant.util.dt as dt_util @@ -47,7 +47,7 @@ async def mock_recorder_before_hass( """Set up recorder.""" -def _get_native_states(hass, entity_id): +def _get_native_states(hass: HomeAssistant, entity_id: str) -> list[State]: with session_scope(hass=hass, read_only=True) as session: instance = recorder.get_instance(hass) metadata_id = instance.states_meta_manager.get(entity_id, session, True) diff --git a/tests/components/recorder/test_purge.py b/tests/components/recorder/test_purge.py index 60ee913cb66..245acf4603d 100644 --- a/tests/components/recorder/test_purge.py +++ b/tests/components/recorder/test_purge.py @@ -1330,7 +1330,9 @@ async def test_purge_filtered_events_state_changed( async def test_purge_entities(hass: HomeAssistant, recorder_mock: Recorder) -> None: """Test purging of specific entities.""" - async def _purge_entities(hass, entity_ids, domains, entity_globs): + async def _purge_entities( + hass: HomeAssistant, entity_ids: str, domains: str, entity_globs: str + ) -> None: service_data = { "entity_id": entity_ids, "domains": domains, From 110ee9ff35481b429107adfb2c36051743d97b47 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 19 Aug 2024 16:06:20 +0200 Subject: [PATCH 0890/1635] Strip trailing / from OTBR url (#124223) --- homeassistant/components/otbr/config_flow.py | 4 ++-- tests/components/otbr/test_config_flow.py | 16 ++++++++++++---- 2 files changed, 14 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/otbr/config_flow.py b/homeassistant/components/otbr/config_flow.py index 8342a965bd3..c13a3f36f99 100644 --- a/homeassistant/components/otbr/config_flow.py +++ b/homeassistant/components/otbr/config_flow.py @@ -111,7 +111,7 @@ class OTBRConfigFlow(ConfigFlow, domain=DOMAIN): errors = {} if user_input is not None: - url = user_input[CONF_URL] + url = user_input[CONF_URL].rstrip("/") try: await self._connect_and_set_dataset(url) except ( @@ -124,7 +124,7 @@ class OTBRConfigFlow(ConfigFlow, domain=DOMAIN): await self.async_set_unique_id(DOMAIN) return self.async_create_entry( title="Open Thread Border Router", - data=user_input, + data={CONF_URL: url}, ) data_schema = vol.Schema({CONF_URL: str}) diff --git a/tests/components/otbr/test_config_flow.py b/tests/components/otbr/test_config_flow.py index 224f77931e5..b3f829eb4f0 100644 --- a/tests/components/otbr/test_config_flow.py +++ b/tests/components/otbr/test_config_flow.py @@ -49,17 +49,25 @@ def addon_info_fixture(): yield addon_info +@pytest.mark.parametrize( + "url", + [ + "http://custom_url:1234", + "http://custom_url:1234/", + "http://custom_url:1234//", + ], +) async def test_user_flow( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, url: str ) -> None: """Test the user flow.""" - url = "http://custom_url:1234" - aioclient_mock.get(f"{url}/node/dataset/active", text="aa") + stripped_url = "http://custom_url:1234" + aioclient_mock.get(f"{stripped_url}/node/dataset/active", text="aa") result = await hass.config_entries.flow.async_init( otbr.DOMAIN, context={"source": "user"} ) - expected_data = {"url": url} + expected_data = {"url": stripped_url} assert result["type"] is FlowResultType.FORM assert result["errors"] == {} From f4997e46fbb024ba951edcb9a9f82f68cd638b18 Mon Sep 17 00:00:00 2001 From: Marlon Date: Mon, 19 Aug 2024 16:15:27 +0200 Subject: [PATCH 0891/1635] Bump apsystems-ez1 to 2.1.0 (#123225) Library update for apsystems fixing breaking changes --- homeassistant/components/apsystems/binary_sensor.py | 8 ++++---- homeassistant/components/apsystems/manifest.json | 2 +- homeassistant/components/apsystems/switch.py | 7 +++---- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/apsystems/conftest.py | 12 ++++++------ 6 files changed, 16 insertions(+), 17 deletions(-) diff --git a/homeassistant/components/apsystems/binary_sensor.py b/homeassistant/components/apsystems/binary_sensor.py index 528203dc2d9..9e361ca883e 100644 --- a/homeassistant/components/apsystems/binary_sensor.py +++ b/homeassistant/components/apsystems/binary_sensor.py @@ -35,28 +35,28 @@ BINARY_SENSORS: tuple[ApsystemsLocalApiBinarySensorDescription, ...] = ( translation_key="off_grid_status", device_class=BinarySensorDeviceClass.PROBLEM, entity_category=EntityCategory.DIAGNOSTIC, - is_on=lambda c: bool(c.og), + is_on=lambda c: c.offgrid, ), ApsystemsLocalApiBinarySensorDescription( key="dc_1_short_circuit_error_status", translation_key="dc_1_short_circuit_error_status", device_class=BinarySensorDeviceClass.PROBLEM, entity_category=EntityCategory.DIAGNOSTIC, - is_on=lambda c: bool(c.isce1), + is_on=lambda c: c.shortcircuit_1, ), ApsystemsLocalApiBinarySensorDescription( key="dc_2_short_circuit_error_status", translation_key="dc_2_short_circuit_error_status", device_class=BinarySensorDeviceClass.PROBLEM, entity_category=EntityCategory.DIAGNOSTIC, - is_on=lambda c: bool(c.isce2), + is_on=lambda c: c.shortcircuit_2, ), ApsystemsLocalApiBinarySensorDescription( key="output_fault_status", translation_key="output_fault_status", device_class=BinarySensorDeviceClass.PROBLEM, entity_category=EntityCategory.DIAGNOSTIC, - is_on=lambda c: bool(c.oe), + is_on=lambda c: not c.operating, ), ) diff --git a/homeassistant/components/apsystems/manifest.json b/homeassistant/components/apsystems/manifest.json index cba3e59dba0..d312901249e 100644 --- a/homeassistant/components/apsystems/manifest.json +++ b/homeassistant/components/apsystems/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/apsystems", "integration_type": "device", "iot_class": "local_polling", - "requirements": ["apsystems-ez1==1.3.3"] + "requirements": ["apsystems-ez1==2.1.0"] } diff --git a/homeassistant/components/apsystems/switch.py b/homeassistant/components/apsystems/switch.py index 405adc94b27..93a21ec9f05 100644 --- a/homeassistant/components/apsystems/switch.py +++ b/homeassistant/components/apsystems/switch.py @@ -5,7 +5,6 @@ from __future__ import annotations from typing import Any from aiohttp.client_exceptions import ClientConnectionError -from APsystemsEZ1 import Status from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity from homeassistant.core import HomeAssistant @@ -45,12 +44,12 @@ class ApSystemsInverterSwitch(ApSystemsEntity, SwitchEntity): self._attr_available = False else: self._attr_available = True - self._attr_is_on = status == Status.normal + self._attr_is_on = status async def async_turn_on(self, **kwargs: Any) -> None: """Turn the switch on.""" - await self._api.set_device_power_status(0) + await self._api.set_device_power_status(True) async def async_turn_off(self, **kwargs: Any) -> None: """Turn the switch off.""" - await self._api.set_device_power_status(1) + await self._api.set_device_power_status(False) diff --git a/requirements_all.txt b/requirements_all.txt index bc4c0146a38..e29dbe3a285 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -464,7 +464,7 @@ apprise==1.8.0 aprslib==0.7.2 # homeassistant.components.apsystems -apsystems-ez1==1.3.3 +apsystems-ez1==2.1.0 # homeassistant.components.aqualogic aqualogic==2.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 8f9217e2d73..91068c52598 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -437,7 +437,7 @@ apprise==1.8.0 aprslib==0.7.2 # homeassistant.components.apsystems -apsystems-ez1==1.3.3 +apsystems-ez1==2.1.0 # homeassistant.components.aranet aranet4==2.3.4 diff --git a/tests/components/apsystems/conftest.py b/tests/components/apsystems/conftest.py index 7e6140e8279..0feccf21578 100644 --- a/tests/components/apsystems/conftest.py +++ b/tests/components/apsystems/conftest.py @@ -3,7 +3,7 @@ from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch -from APsystemsEZ1 import ReturnAlarmInfo, ReturnDeviceInfo, ReturnOutputData, Status +from APsystemsEZ1 import ReturnAlarmInfo, ReturnDeviceInfo, ReturnOutputData import pytest from homeassistant.components.apsystems.const import DOMAIN @@ -53,12 +53,12 @@ def mock_apsystems() -> Generator[MagicMock]: te2=7.0, ) mock_api.get_alarm_info.return_value = ReturnAlarmInfo( - og=Status.normal, - isce1=Status.alarm, - isce2=Status.normal, - oe=Status.alarm, + offgrid=False, + shortcircuit_1=True, + shortcircuit_2=False, + operating=False, ) - mock_api.get_device_power_status.return_value = Status.normal + mock_api.get_device_power_status.return_value = True yield mock_api From b53ae884a6247b0971a41c20c345331cb4e1fe61 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 19 Aug 2024 16:16:34 +0200 Subject: [PATCH 0892/1635] Add missing hass type hint in component tests (o) (#124226) --- tests/components/onvif/__init__.py | 4 ++- tests/components/otbr/test_config_flow.py | 2 +- .../components/owntracks/test_config_flow.py | 2 +- .../owntracks/test_device_tracker.py | 31 +++++++++++++------ 4 files changed, 26 insertions(+), 13 deletions(-) diff --git a/tests/components/onvif/__init__.py b/tests/components/onvif/__init__.py index 0857dfef798..8a86538b977 100644 --- a/tests/components/onvif/__init__.py +++ b/tests/components/onvif/__init__.py @@ -151,7 +151,9 @@ def setup_mock_device(mock_device, capabilities=None): pullpoint_manager=MagicMock(state=PullPointManagerState.PAUSED), ) - def mock_constructor(hass, config): + def mock_constructor( + hass: HomeAssistant, config: config_entries.ConfigEntry + ) -> MagicMock: """Fake the controller constructor.""" return mock_device diff --git a/tests/components/otbr/test_config_flow.py b/tests/components/otbr/test_config_flow.py index b3f829eb4f0..c4972bb5f83 100644 --- a/tests/components/otbr/test_config_flow.py +++ b/tests/components/otbr/test_config_flow.py @@ -355,7 +355,7 @@ async def test_hassio_discovery_flow_2x_addons( aioclient_mock.get(f"{url1}/node/dataset/active", text="aa") aioclient_mock.get(f"{url2}/node/dataset/active", text="bb") - async def _addon_info(hass, slug): + async def _addon_info(hass: HomeAssistant, slug: str) -> dict[str, Any]: await asyncio.sleep(0) if slug == "otbr": return { diff --git a/tests/components/owntracks/test_config_flow.py b/tests/components/owntracks/test_config_flow.py index 818524c1c50..b1172eb4a31 100644 --- a/tests/components/owntracks/test_config_flow.py +++ b/tests/components/owntracks/test_config_flow.py @@ -51,7 +51,7 @@ def mock_not_supports_encryption(): yield -async def init_config_flow(hass): +async def init_config_flow(hass: HomeAssistant) -> config_flow.OwnTracksFlow: """Init a configuration flow.""" await async_process_ha_core_config( hass, diff --git a/tests/components/owntracks/test_device_tracker.py b/tests/components/owntracks/test_device_tracker.py index bc2ae7ce4d8..2f35139c021 100644 --- a/tests/components/owntracks/test_device_tracker.py +++ b/tests/components/owntracks/test_device_tracker.py @@ -4,6 +4,7 @@ import base64 from collections.abc import Callable, Generator import json import pickle +from typing import Any from unittest.mock import patch from nacl.encoding import Base64Encoder @@ -305,7 +306,9 @@ def setup_comp( hass.states.async_set("zone.outer", "zoning", OUTER_ZONE) -async def setup_owntracks(hass, config, ctx_cls=owntracks.OwnTracksContext): +async def setup_owntracks( + hass: HomeAssistant, config: dict[str, Any], ctx_cls=owntracks.OwnTracksContext +) -> None: """Set up OwnTracks.""" MockConfigEntry( domain="owntracks", data={"webhook_id": "owntracks_test", "secret": "abcd"} @@ -347,7 +350,9 @@ def context(hass: HomeAssistant, setup_comp: None) -> OwnTracksContextFactory: return get_context -async def send_message(hass, topic, message, corrupt=False): +async def send_message( + hass: HomeAssistant, topic: str, message: dict[str, Any], corrupt: bool = False +) -> None: """Test the sending of a message.""" str_message = json.dumps(message) if corrupt: @@ -359,51 +364,57 @@ async def send_message(hass, topic, message, corrupt=False): await hass.async_block_till_done() -def assert_location_state(hass, location): +def assert_location_state(hass: HomeAssistant, location: str) -> None: """Test the assertion of a location state.""" state = hass.states.get(DEVICE_TRACKER_STATE) assert state.state == location -def assert_location_latitude(hass, latitude): +def assert_location_latitude(hass: HomeAssistant, latitude: float) -> None: """Test the assertion of a location latitude.""" state = hass.states.get(DEVICE_TRACKER_STATE) assert state.attributes.get("latitude") == latitude -def assert_location_longitude(hass, longitude): +def assert_location_longitude(hass: HomeAssistant, longitude: float) -> None: """Test the assertion of a location longitude.""" state = hass.states.get(DEVICE_TRACKER_STATE) assert state.attributes.get("longitude") == longitude -def assert_location_accuracy(hass, accuracy): +def assert_location_accuracy(hass: HomeAssistant, accuracy: int) -> None: """Test the assertion of a location accuracy.""" state = hass.states.get(DEVICE_TRACKER_STATE) assert state.attributes.get("gps_accuracy") == accuracy -def assert_location_source_type(hass, source_type): +def assert_location_source_type(hass: HomeAssistant, source_type: str) -> None: """Test the assertion of source_type.""" state = hass.states.get(DEVICE_TRACKER_STATE) assert state.attributes.get("source_type") == source_type -def assert_mobile_tracker_state(hass, location, beacon=IBEACON_DEVICE): +def assert_mobile_tracker_state( + hass: HomeAssistant, location: str, beacon: str = IBEACON_DEVICE +) -> None: """Test the assertion of a mobile beacon tracker state.""" dev_id = MOBILE_BEACON_FMT.format(beacon) state = hass.states.get(dev_id) assert state.state == location -def assert_mobile_tracker_latitude(hass, latitude, beacon=IBEACON_DEVICE): +def assert_mobile_tracker_latitude( + hass: HomeAssistant, latitude: float, beacon: str = IBEACON_DEVICE +) -> None: """Test the assertion of a mobile beacon tracker latitude.""" dev_id = MOBILE_BEACON_FMT.format(beacon) state = hass.states.get(dev_id) assert state.attributes.get("latitude") == latitude -def assert_mobile_tracker_accuracy(hass, accuracy, beacon=IBEACON_DEVICE): +def assert_mobile_tracker_accuracy( + hass: HomeAssistant, accuracy: int, beacon: str = IBEACON_DEVICE +) -> None: """Test the assertion of a mobile beacon tracker accuracy.""" dev_id = MOBILE_BEACON_FMT.format(beacon) state = hass.states.get(dev_id) From f2d41bd99cf1819a180d0d81681eb38d0c2b188f Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 19 Aug 2024 16:29:34 +0200 Subject: [PATCH 0893/1635] Minor improvements of otbr tests (#124224) --- tests/components/otbr/test_silabs_multiprotocol.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/components/otbr/test_silabs_multiprotocol.py b/tests/components/otbr/test_silabs_multiprotocol.py index 8d7bed13df6..dcb0c10b9e4 100644 --- a/tests/components/otbr/test_silabs_multiprotocol.py +++ b/tests/components/otbr/test_silabs_multiprotocol.py @@ -35,7 +35,7 @@ DATASET_CH16_PENDING = ( async def test_async_change_channel( hass: HomeAssistant, otbr_config_entry_multipan ) -> None: - """Test test_async_change_channel.""" + """Test async_change_channel.""" store = await dataset_store.async_get_store(hass) assert len(store.datasets) == 1 @@ -63,7 +63,7 @@ async def test_async_change_channel( async def test_async_change_channel_no_pending( hass: HomeAssistant, otbr_config_entry_multipan ) -> None: - """Test test_async_change_channel when the pending dataset already expired.""" + """Test async_change_channel when the pending dataset already expired.""" store = await dataset_store.async_get_store(hass) assert len(store.datasets) == 1 @@ -95,7 +95,7 @@ async def test_async_change_channel_no_pending( async def test_async_change_channel_no_update( hass: HomeAssistant, otbr_config_entry_multipan ) -> None: - """Test test_async_change_channel when we didn't get a dataset from the OTBR.""" + """Test async_change_channel when we didn't get a dataset from the OTBR.""" store = await dataset_store.async_get_store(hass) assert len(store.datasets) == 1 @@ -169,7 +169,7 @@ async def test_async_get_channel_no_otbr(hass: HomeAssistant) -> None: """Test test_async_get_channel when otbr is not configured.""" with patch("python_otbr_api.OTBR.get_active_dataset") as mock_get_active_dataset: - await otbr_silabs_multiprotocol.async_get_channel(hass) + assert await otbr_silabs_multiprotocol.async_get_channel(hass) is None mock_get_active_dataset.assert_not_awaited() From e3ab30a2a53aa9033c1d5121a6367e58130ef1b2 Mon Sep 17 00:00:00 2001 From: Marlon Date: Mon, 19 Aug 2024 16:57:43 +0200 Subject: [PATCH 0894/1635] Add support for whole apsystems ez1 series (#123356) * Add support for whole apsystems ez1 series by configuring the max_output at setup to match the value by the inverter * Check Max output for apsystems on startup, not setup * Move max output check into coordinator * Raise UpdateFailed on error in apsystems --- homeassistant/components/apsystems/coordinator.py | 9 ++++++++- homeassistant/components/apsystems/number.py | 2 +- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/apsystems/coordinator.py b/homeassistant/components/apsystems/coordinator.py index 96956bafc3e..6ba4f01dbc8 100644 --- a/homeassistant/components/apsystems/coordinator.py +++ b/homeassistant/components/apsystems/coordinator.py @@ -8,7 +8,7 @@ from datetime import timedelta from APsystemsEZ1 import APsystemsEZ1M, ReturnAlarmInfo, ReturnOutputData from homeassistant.core import HomeAssistant -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import LOGGER @@ -34,6 +34,13 @@ class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]): ) self.api = api + async def _async_setup(self) -> None: + try: + max_power = (await self.api.get_device_info()).maxPower + except (ConnectionError, TimeoutError): + raise UpdateFailed from None + self.api.max_power = max_power + async def _async_update_data(self) -> ApSystemsSensorData: output_data = await self.api.get_output_data() alarm_info = await self.api.get_alarm_info() diff --git a/homeassistant/components/apsystems/number.py b/homeassistant/components/apsystems/number.py index f9b535d7d6a..51e7130587f 100644 --- a/homeassistant/components/apsystems/number.py +++ b/homeassistant/components/apsystems/number.py @@ -26,7 +26,6 @@ async def async_setup_entry( class ApSystemsMaxOutputNumber(ApSystemsEntity, NumberEntity): """Base sensor to be used with description.""" - _attr_native_max_value = 800 _attr_native_min_value = 30 _attr_native_step = 1 _attr_device_class = NumberDeviceClass.POWER @@ -42,6 +41,7 @@ class ApSystemsMaxOutputNumber(ApSystemsEntity, NumberEntity): super().__init__(data) self._api = data.coordinator.api self._attr_unique_id = f"{data.device_id}_output_limit" + self._attr_native_max_value = data.coordinator.api.max_power async def async_update(self) -> None: """Set the state with the value fetched from the inverter.""" From 50f3c891fa8ccd0a5306cd3ceb8885d0531a768a Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 19 Aug 2024 17:30:48 +0200 Subject: [PATCH 0895/1635] Prepare `otbr.silabs_multiprotocol` for multiple config entries (#124219) * Prepare otbr.silabs_multiprotocol for multiple config entries * Simplify --- .../components/otbr/silabs_multiprotocol.py | 68 ++++++++++++++----- .../otbr/test_silabs_multiprotocol.py | 31 +++++++++ 2 files changed, 82 insertions(+), 17 deletions(-) diff --git a/homeassistant/components/otbr/silabs_multiprotocol.py b/homeassistant/components/otbr/silabs_multiprotocol.py index bd7eb997558..411da0a9361 100644 --- a/homeassistant/components/otbr/silabs_multiprotocol.py +++ b/homeassistant/components/otbr/silabs_multiprotocol.py @@ -2,7 +2,10 @@ from __future__ import annotations +from collections.abc import Callable, Coroutine +from functools import wraps import logging +from typing import Any, Concatenate import aiohttp from python_otbr_api import tlv_parser @@ -21,15 +24,53 @@ from .util import OTBRData _LOGGER = logging.getLogger(__name__) -async def async_change_channel(hass: HomeAssistant, channel: int, delay: float) -> None: +def async_get_otbr_data[**_P, _R, _R_Def]( + retval: _R_Def, +) -> Callable[ + [Callable[Concatenate[HomeAssistant, OTBRData, _P], Coroutine[Any, Any, _R]]], + Callable[Concatenate[HomeAssistant, _P], Coroutine[Any, Any, _R | _R_Def]], +]: + """Decorate function to get OTBR data.""" + + def _async_get_otbr_data( + orig_func: Callable[ + Concatenate[HomeAssistant, OTBRData, _P], + Coroutine[Any, Any, _R], + ], + ) -> Callable[Concatenate[HomeAssistant, _P], Coroutine[Any, Any, _R | _R_Def]]: + """Decorate function to get OTBR data.""" + + @wraps(orig_func) + async def async_get_otbr_data_wrapper( + hass: HomeAssistant, *args: _P.args, **kwargs: _P.kwargs + ) -> _R | _R_Def: + """Fetch OTBR data and pass to orig_func.""" + if DOMAIN not in hass.data: + return retval + + data: OTBRData = hass.data[DOMAIN] + + if not is_multiprotocol_url(data.url): + return retval + + return await orig_func(hass, data, *args, **kwargs) + + return async_get_otbr_data_wrapper + + return _async_get_otbr_data + + +@async_get_otbr_data(None) +async def async_change_channel( + hass: HomeAssistant, + data: OTBRData, + channel: int, + delay: float, +) -> None: """Set the channel to be used. Does nothing if not configured. """ - if DOMAIN not in hass.data: - return - - data: OTBRData = hass.data[DOMAIN] await data.set_channel(channel, delay) # Import the new dataset @@ -48,16 +89,12 @@ async def async_change_channel(hass: HomeAssistant, channel: int, delay: float) await async_add_dataset(hass, DOMAIN, dataset_tlvs_str) -async def async_get_channel(hass: HomeAssistant) -> int | None: +@async_get_otbr_data(None) +async def async_get_channel(hass: HomeAssistant, data: OTBRData) -> int | None: """Return the channel. Returns None if not configured. """ - if DOMAIN not in hass.data: - return None - - data: OTBRData = hass.data[DOMAIN] - try: dataset = await data.get_active_dataset() except ( @@ -74,13 +111,10 @@ async def async_get_channel(hass: HomeAssistant) -> int | None: return dataset.channel -async def async_using_multipan(hass: HomeAssistant) -> bool: +@async_get_otbr_data(False) +async def async_using_multipan(hass: HomeAssistant, data: OTBRData) -> bool: """Return if the multiprotocol device is used. Returns False if not configured. """ - if DOMAIN not in hass.data: - return False - - data: OTBRData = hass.data[DOMAIN] - return is_multiprotocol_url(data.url) + return True diff --git a/tests/components/otbr/test_silabs_multiprotocol.py b/tests/components/otbr/test_silabs_multiprotocol.py index dcb0c10b9e4..ad3d6540b47 100644 --- a/tests/components/otbr/test_silabs_multiprotocol.py +++ b/tests/components/otbr/test_silabs_multiprotocol.py @@ -126,6 +126,17 @@ async def test_async_change_channel_no_otbr(hass: HomeAssistant) -> None: mock_set_channel.assert_not_awaited() +async def test_async_change_channel_non_matching_url( + hass: HomeAssistant, otbr_config_entry_multipan +) -> None: + """Test async_change_channel when otbr is not configured.""" + data: otbr.OTBRData = hass.data[otbr.DOMAIN] + data.url = OTBR_NON_MULTIPAN_URL + with patch("python_otbr_api.OTBR.set_channel") as mock_set_channel: + await otbr_silabs_multiprotocol.async_change_channel(hass, 16, delay=0) + mock_set_channel.assert_not_awaited() + + async def test_async_get_channel( hass: HomeAssistant, otbr_config_entry_multipan ) -> None: @@ -173,6 +184,17 @@ async def test_async_get_channel_no_otbr(hass: HomeAssistant) -> None: mock_get_active_dataset.assert_not_awaited() +async def test_async_get_channel_non_matching_url( + hass: HomeAssistant, otbr_config_entry_multipan +) -> None: + """Test async_change_channel when otbr is not configured.""" + data: otbr.OTBRData = hass.data[otbr.DOMAIN] + data.url = OTBR_NON_MULTIPAN_URL + with patch("python_otbr_api.OTBR.get_active_dataset") as mock_get_active_dataset: + assert await otbr_silabs_multiprotocol.async_get_channel(hass) is None + mock_get_active_dataset.assert_not_awaited() + + @pytest.mark.parametrize( ("url", "expected"), [(OTBR_MULTIPAN_URL, True), (OTBR_NON_MULTIPAN_URL, False)], @@ -191,3 +213,12 @@ async def test_async_using_multipan_no_otbr(hass: HomeAssistant) -> None: """Test async_change_channel when otbr is not configured.""" assert await otbr_silabs_multiprotocol.async_using_multipan(hass) is False + + +async def test_async_using_multipan_non_matching_url( + hass: HomeAssistant, otbr_config_entry_multipan +) -> None: + """Test async_change_channel when otbr is not configured.""" + data: otbr.OTBRData = hass.data[otbr.DOMAIN] + data.url = OTBR_NON_MULTIPAN_URL + assert await otbr_silabs_multiprotocol.async_using_multipan(hass) is False From 984a534300bd9bdba6cdc5d66a9dc9b6e54e854f Mon Sep 17 00:00:00 2001 From: Marlon Date: Mon, 19 Aug 2024 19:44:28 +0200 Subject: [PATCH 0896/1635] Bump apsystems-ez1 to 2.2.1 (#124243) * Update apsystems library * Bump apsystems library to 2.2.1 --- homeassistant/components/apsystems/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/apsystems/manifest.json b/homeassistant/components/apsystems/manifest.json index d312901249e..9376d21ba28 100644 --- a/homeassistant/components/apsystems/manifest.json +++ b/homeassistant/components/apsystems/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/apsystems", "integration_type": "device", "iot_class": "local_polling", - "requirements": ["apsystems-ez1==2.1.0"] + "requirements": ["apsystems-ez1==2.2.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index e29dbe3a285..9012aa91fe8 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -464,7 +464,7 @@ apprise==1.8.0 aprslib==0.7.2 # homeassistant.components.apsystems -apsystems-ez1==2.1.0 +apsystems-ez1==2.2.1 # homeassistant.components.aqualogic aqualogic==2.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 91068c52598..15bec27206c 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -437,7 +437,7 @@ apprise==1.8.0 aprslib==0.7.2 # homeassistant.components.apsystems -apsystems-ez1==2.1.0 +apsystems-ez1==2.2.1 # homeassistant.components.aranet aranet4==2.3.4 From de82f214469136ed1f6d16de84c4c0352509d8b6 Mon Sep 17 00:00:00 2001 From: Christopher Fenner <9592452+CFenner@users.noreply.github.com> Date: Mon, 19 Aug 2024 20:14:33 +0200 Subject: [PATCH 0897/1635] Use library fork for ViCare integration (#124107) * switch dependency to fork * Update manifest.json * Update requirements_all.txt * Update requirements_test_all.txt --- homeassistant/components/vicare/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/vicare/manifest.json b/homeassistant/components/vicare/manifest.json index 97c4b91022d..186e9ef6289 100644 --- a/homeassistant/components/vicare/manifest.json +++ b/homeassistant/components/vicare/manifest.json @@ -11,5 +11,5 @@ "documentation": "https://www.home-assistant.io/integrations/vicare", "iot_class": "cloud_polling", "loggers": ["PyViCare"], - "requirements": ["PyViCare==2.32.0"] + "requirements": ["PyViCare-neo==0.2.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 9012aa91fe8..b582bdc1903 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -100,7 +100,7 @@ PyTransportNSW==0.1.1 PyTurboJPEG==1.7.1 # homeassistant.components.vicare -PyViCare==2.32.0 +PyViCare-neo==0.2.1 # homeassistant.components.xiaomi_aqara PyXiaomiGateway==0.14.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 15bec27206c..88ec3b0776d 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -94,7 +94,7 @@ PyTransportNSW==0.1.1 PyTurboJPEG==1.7.1 # homeassistant.components.vicare -PyViCare==2.32.0 +PyViCare-neo==0.2.1 # homeassistant.components.xiaomi_aqara PyXiaomiGateway==0.14.3 From bcd92b43cea4ba0dc8d04ace20035d972d3b04de Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Mon, 19 Aug 2024 20:40:23 +0200 Subject: [PATCH 0898/1635] Bump aiowithings to 3.0.3 (#124154) --- homeassistant/components/withings/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/withings/snapshots/test_diagnostics.ambr | 6 +++--- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/withings/manifest.json b/homeassistant/components/withings/manifest.json index 090f8c4588e..a7f632325a0 100644 --- a/homeassistant/components/withings/manifest.json +++ b/homeassistant/components/withings/manifest.json @@ -9,5 +9,5 @@ "iot_class": "cloud_push", "loggers": ["aiowithings"], "quality_scale": "platinum", - "requirements": ["aiowithings==3.0.2"] + "requirements": ["aiowithings==3.0.3"] } diff --git a/requirements_all.txt b/requirements_all.txt index b582bdc1903..d14a1111726 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -404,7 +404,7 @@ aiowatttime==0.1.1 aiowebostv==0.4.2 # homeassistant.components.withings -aiowithings==3.0.2 +aiowithings==3.0.3 # homeassistant.components.yandex_transport aioymaps==1.2.5 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 88ec3b0776d..861a84cc876 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -386,7 +386,7 @@ aiowatttime==0.1.1 aiowebostv==0.4.2 # homeassistant.components.withings -aiowithings==3.0.2 +aiowithings==3.0.3 # homeassistant.components.yandex_transport aioymaps==1.2.5 diff --git a/tests/components/withings/snapshots/test_diagnostics.ambr b/tests/components/withings/snapshots/test_diagnostics.ambr index df2a3b95388..f7c704a2c49 100644 --- a/tests/components/withings/snapshots/test_diagnostics.ambr +++ b/tests/components/withings/snapshots/test_diagnostics.ambr @@ -15,7 +15,7 @@ 'extracellular_water', 'intracellular_water', 'visceral_fat', - 'unknown', + 'basal_metabolic_rate', 'fat_ratio', 'height', 'temperature', @@ -78,7 +78,7 @@ 'extracellular_water', 'intracellular_water', 'visceral_fat', - 'unknown', + 'basal_metabolic_rate', 'fat_ratio', 'height', 'temperature', @@ -141,7 +141,7 @@ 'extracellular_water', 'intracellular_water', 'visceral_fat', - 'unknown', + 'basal_metabolic_rate', 'fat_ratio', 'height', 'temperature', From 9283d766d3d063494645950ec1222005edd7f1c3 Mon Sep 17 00:00:00 2001 From: Sid <27780930+autinerd@users.noreply.github.com> Date: Mon, 19 Aug 2024 20:43:50 +0200 Subject: [PATCH 0899/1635] Bump ruff to 0.6.1 (#124250) * Bump ruff to 0.6.0 * Bump ruff to 0.6.1 --- .pre-commit-config.yaml | 2 +- requirements_test_pre_commit.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4c3ce7fe104..f29fd92a880 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.6.0 + rev: v0.6.1 hooks: - id: ruff args: diff --git a/requirements_test_pre_commit.txt b/requirements_test_pre_commit.txt index 091f872d511..6bd2fbbc145 100644 --- a/requirements_test_pre_commit.txt +++ b/requirements_test_pre_commit.txt @@ -1,5 +1,5 @@ # Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit codespell==2.3.0 -ruff==0.6.0 +ruff==0.6.1 yamllint==1.35.1 From 89728f41e1da665f9b77291dcafcdf26fe804a16 Mon Sep 17 00:00:00 2001 From: IceBotYT <34712694+IceBotYT@users.noreply.github.com> Date: Mon, 19 Aug 2024 14:45:25 -0400 Subject: [PATCH 0900/1635] Bump nice-go to 0.2.1 (#124238) * Bump nice-go to 0.2.0 * Bump nice-go to 0.2.1 --- homeassistant/components/nice_go/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/nice_go/manifest.json b/homeassistant/components/nice_go/manifest.json index e86c68b491f..bb005f9cc27 100644 --- a/homeassistant/components/nice_go/manifest.json +++ b/homeassistant/components/nice_go/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/nice_go", "iot_class": "cloud_push", "loggers": ["nice-go"], - "requirements": ["nice-go==0.1.6"] + "requirements": ["nice-go==0.2.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index d14a1111726..3871ad80990 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1426,7 +1426,7 @@ nextdns==3.1.0 nibe==2.11.0 # homeassistant.components.nice_go -nice-go==0.1.6 +nice-go==0.2.1 # homeassistant.components.niko_home_control niko-home-control==0.2.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 861a84cc876..de2d9d63ca7 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1183,7 +1183,7 @@ nextdns==3.1.0 nibe==2.11.0 # homeassistant.components.nice_go -nice-go==0.1.6 +nice-go==0.2.1 # homeassistant.components.nfandroidtv notifications-android-tv==0.1.5 From 69652ca2ca25d010f6afb1021843305b84b862c8 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 19 Aug 2024 21:13:48 +0200 Subject: [PATCH 0901/1635] Use PEP 695 for decorator typing with type aliases in esphome (#124234) --- homeassistant/components/esphome/entity.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/esphome/entity.py b/homeassistant/components/esphome/entity.py index 6e02f8de869..764390c861b 100644 --- a/homeassistant/components/esphome/entity.py +++ b/homeassistant/components/esphome/entity.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Awaitable, Callable, Coroutine import functools import math -from typing import TYPE_CHECKING, Any, Concatenate, Generic, ParamSpec, TypeVar, cast +from typing import TYPE_CHECKING, Any, Concatenate, Generic, TypeVar, cast from aioesphomeapi import ( APIConnectionError, @@ -30,8 +30,6 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .entry_data import ESPHomeConfigEntry, RuntimeEntryData from .enum_mapper import EsphomeEnumMapper -_R = TypeVar("_R") -_P = ParamSpec("_P") _InfoT = TypeVar("_InfoT", bound=EntityInfo) _EntityT = TypeVar("_EntityT", bound="EsphomeEntity[Any,Any]") _StateT = TypeVar("_StateT", bound=EntityState) @@ -116,7 +114,7 @@ async def platform_async_setup_entry( ) -def esphome_state_property( +def esphome_state_property[_R, _EntityT: EsphomeEntity[Any, Any]]( func: Callable[[_EntityT], _R], ) -> Callable[[_EntityT], _R | None]: """Wrap a state property of an esphome entity. @@ -139,7 +137,7 @@ def esphome_state_property( return _wrapper -def convert_api_error_ha_error( +def convert_api_error_ha_error[**_P, _R, _EntityT: EsphomeEntity[Any, Any]]( func: Callable[Concatenate[_EntityT, _P], Awaitable[None]], ) -> Callable[Concatenate[_EntityT, _P], Coroutine[Any, Any, None]]: """Decorate ESPHome command calls that send commands/make changes to the device. From 20f7af25e937d471a60b06585fde8c6644c9ddb1 Mon Sep 17 00:00:00 2001 From: IceBotYT <34712694+IceBotYT@users.noreply.github.com> Date: Mon, 19 Aug 2024 15:25:11 -0400 Subject: [PATCH 0902/1635] Add switch platform to Nice G.O. (#124237) * Add switch platform to Nice G.O. * Replace cover with switch in switch.py * Use icon translations * Fix tests * Use constants in test_switch.py * Use ATTR_ENTITY_ID --- homeassistant/components/nice_go/__init__.py | 2 +- .../components/nice_go/coordinator.py | 3 ++ homeassistant/components/nice_go/icons.json | 9 ++++ homeassistant/components/nice_go/strings.json | 5 ++ homeassistant/components/nice_go/switch.py | 49 +++++++++++++++++++ .../nice_go/fixtures/get_all_barriers.json | 2 +- .../nice_go/snapshots/test_diagnostics.ambr | 2 + tests/components/nice_go/test_init.py | 2 +- tests/components/nice_go/test_switch.py | 43 ++++++++++++++++ 9 files changed, 114 insertions(+), 3 deletions(-) create mode 100644 homeassistant/components/nice_go/icons.json create mode 100644 homeassistant/components/nice_go/switch.py create mode 100644 tests/components/nice_go/test_switch.py diff --git a/homeassistant/components/nice_go/__init__.py b/homeassistant/components/nice_go/__init__.py index f0bfea58b89..bf8dbfcaecd 100644 --- a/homeassistant/components/nice_go/__init__.py +++ b/homeassistant/components/nice_go/__init__.py @@ -11,7 +11,7 @@ from homeassistant.core import HomeAssistant from .coordinator import NiceGOUpdateCoordinator _LOGGER = logging.getLogger(__name__) -PLATFORMS: list[Platform] = [Platform.COVER, Platform.LIGHT] +PLATFORMS: list[Platform] = [Platform.COVER, Platform.LIGHT, Platform.SWITCH] type NiceGOConfigEntry = ConfigEntry[NiceGOUpdateCoordinator] diff --git a/homeassistant/components/nice_go/coordinator.py b/homeassistant/components/nice_go/coordinator.py index 196ed0a211c..323e0a08fe8 100644 --- a/homeassistant/components/nice_go/coordinator.py +++ b/homeassistant/components/nice_go/coordinator.py @@ -46,6 +46,7 @@ class NiceGODevice: light_status: bool fw_version: str connected: bool + vacation_mode: bool class NiceGOUpdateCoordinator(DataUpdateCoordinator[dict[str, NiceGODevice]]): @@ -105,6 +106,7 @@ class NiceGOUpdateCoordinator(DataUpdateCoordinator[dict[str, NiceGODevice]]): connected = barrier_state.connectionState.connected else: connected = False + vacation_mode = barrier_state.reported["vcnMode"] return NiceGODevice( id=device_id, @@ -113,6 +115,7 @@ class NiceGOUpdateCoordinator(DataUpdateCoordinator[dict[str, NiceGODevice]]): light_status=light_status, fw_version=fw_version, connected=connected, + vacation_mode=vacation_mode, ) async def _async_update_data(self) -> dict[str, NiceGODevice]: diff --git a/homeassistant/components/nice_go/icons.json b/homeassistant/components/nice_go/icons.json new file mode 100644 index 00000000000..fdce7c43e24 --- /dev/null +++ b/homeassistant/components/nice_go/icons.json @@ -0,0 +1,9 @@ +{ + "entity": { + "switch": { + "vacation_mode": { + "default": "mdi:beach" + } + } + } +} diff --git a/homeassistant/components/nice_go/strings.json b/homeassistant/components/nice_go/strings.json index 261f71ebcbe..7f2ea1512dd 100644 --- a/homeassistant/components/nice_go/strings.json +++ b/homeassistant/components/nice_go/strings.json @@ -22,6 +22,11 @@ "light": { "name": "[%key:component::light::title%]" } + }, + "switch": { + "vacation_mode": { + "name": "Vacation mode" + } } }, "issues": { diff --git a/homeassistant/components/nice_go/switch.py b/homeassistant/components/nice_go/switch.py new file mode 100644 index 00000000000..e7290aabdd6 --- /dev/null +++ b/homeassistant/components/nice_go/switch.py @@ -0,0 +1,49 @@ +"""Nice G.O. switch platform.""" + +from __future__ import annotations + +import logging +from typing import Any + +from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import NiceGOConfigEntry +from .entity import NiceGOEntity + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: NiceGOConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Nice G.O. switch.""" + coordinator = config_entry.runtime_data + + async_add_entities( + NiceGOSwitchEntity(coordinator, device_id, device_data.name, "switch") + for device_id, device_data in coordinator.data.items() + ) + + +class NiceGOSwitchEntity(NiceGOEntity, SwitchEntity): + """Representation of a Nice G.O. switch.""" + + _attr_device_class = SwitchDeviceClass.SWITCH + _attr_translation_key = "vacation_mode" + + @property + def is_on(self) -> bool: + """Return if switch is on.""" + return self.data.vacation_mode + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the switch on.""" + await self.coordinator.api.vacation_mode_on(self.data.id) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the switch off.""" + await self.coordinator.api.vacation_mode_off(self.data.id) diff --git a/tests/components/nice_go/fixtures/get_all_barriers.json b/tests/components/nice_go/fixtures/get_all_barriers.json index 481c73d91a8..adb0fb4bacd 100644 --- a/tests/components/nice_go/fixtures/get_all_barriers.json +++ b/tests/components/nice_go/fixtures/get_all_barriers.json @@ -49,7 +49,7 @@ "migrationStatus": "DONE", "deviceId": "2", "lightStatus": "0,100", - "vcnMode": false, + "vcnMode": true, "deviceFwVersion": "1.2.3.4.5.6", "barrierStatus": "1,100,0,0,-1,0,3,0" }, diff --git a/tests/components/nice_go/snapshots/test_diagnostics.ambr b/tests/components/nice_go/snapshots/test_diagnostics.ambr index f79b0607ce2..abd3b3103d1 100644 --- a/tests/components/nice_go/snapshots/test_diagnostics.ambr +++ b/tests/components/nice_go/snapshots/test_diagnostics.ambr @@ -9,6 +9,7 @@ 'id': '1', 'light_status': True, 'name': 'Test Garage 1', + 'vacation_mode': False, }), '2': dict({ 'barrier_status': 'open', @@ -17,6 +18,7 @@ 'id': '2', 'light_status': False, 'name': 'Test Garage 2', + 'vacation_mode': True, }), }), 'entry': dict({ diff --git a/tests/components/nice_go/test_init.py b/tests/components/nice_go/test_init.py index d6877d72724..249622d23b0 100644 --- a/tests/components/nice_go/test_init.py +++ b/tests/components/nice_go/test_init.py @@ -275,7 +275,7 @@ async def test_no_connection_state( "item": { "deviceId": "1", "desired": '{"key": "value"}', - "reported": '{"displayName":"Test Garage 1", "migrationStatus":"DONE", "barrierStatus": "1,100,0", "deviceFwVersion": "1.0.0", "lightStatus": "1,100"}', + "reported": '{"displayName":"Test Garage 1", "migrationStatus":"DONE", "barrierStatus": "1,100,0", "deviceFwVersion": "1.0.0", "lightStatus": "1,100", "vcnMode": false}', "connectionState": None, "version": None, "timestamp": None, diff --git a/tests/components/nice_go/test_switch.py b/tests/components/nice_go/test_switch.py new file mode 100644 index 00000000000..f34cba495c9 --- /dev/null +++ b/tests/components/nice_go/test_switch.py @@ -0,0 +1,43 @@ +"""Nice G.O. switch tests.""" + +from unittest.mock import AsyncMock + +from homeassistant.components.switch import ( + DOMAIN as SWITCH_DOMAIN, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_turn_on( + hass: HomeAssistant, mock_nice_go: AsyncMock, mock_config_entry: MockConfigEntry +) -> None: + """Test turn on switch.""" + await setup_integration(hass, mock_config_entry, [Platform.SWITCH]) + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: "switch.test_garage_1_vacation_mode"}, + blocking=True, + ) + mock_nice_go.vacation_mode_on.assert_called_once_with("1") + + +async def test_turn_off( + hass: HomeAssistant, mock_nice_go: AsyncMock, mock_config_entry: MockConfigEntry +) -> None: + """Test turn off switch.""" + await setup_integration(hass, mock_config_entry, [Platform.SWITCH]) + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: "switch.test_garage_2_vacation_mode"}, + blocking=True, + ) + mock_nice_go.vacation_mode_off.assert_called_once_with("2") From b4afca3e7ede34d948659c7a0f23c66fae34f755 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 19 Aug 2024 21:38:41 +0200 Subject: [PATCH 0903/1635] Add missing hass type hint in component tests (r) (#124231) --- .../components/repairs/test_websocket_api.py | 18 +++++- tests/components/rfxtrx/test_config_flow.py | 4 +- tests/components/rfxtrx/test_device_action.py | 9 ++- .../components/rfxtrx/test_device_trigger.py | 2 +- tests/components/ring/common.py | 4 +- .../risco/test_alarm_control_panel.py | 63 +++++++++++++++---- tests/components/risco/test_binary_sensor.py | 30 +++++++-- tests/components/risco/test_sensor.py | 2 +- tests/components/risco/test_switch.py | 19 +++++- .../rpi_power/test_binary_sensor.py | 2 +- 10 files changed, 122 insertions(+), 31 deletions(-) diff --git a/tests/components/repairs/test_websocket_api.py b/tests/components/repairs/test_websocket_api.py index dcc6932cf4a..bb3d50f9eb5 100644 --- a/tests/components/repairs/test_websocket_api.py +++ b/tests/components/repairs/test_websocket_api.py @@ -18,7 +18,11 @@ from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component from tests.common import MockUser, mock_platform -from tests.typing import ClientSessionGenerator, WebSocketGenerator +from tests.typing import ( + ClientSessionGenerator, + MockHAClientWebSocket, + WebSocketGenerator, +) DEFAULT_ISSUES = [ { @@ -34,7 +38,11 @@ DEFAULT_ISSUES = [ ] -async def create_issues(hass, ws_client, issues=None): +async def create_issues( + hass: HomeAssistant, + ws_client: MockHAClientWebSocket, + issues: list[dict[str, Any]] | None = None, +) -> list[dict[str, Any]]: """Create issues.""" def api_issue(issue): @@ -119,7 +127,11 @@ async def mock_repairs_integration(hass: HomeAssistant) -> None: """Mock a repairs integration.""" hass.config.components.add("fake_integration") - def async_create_fix_flow(hass, issue_id, data): + def async_create_fix_flow( + hass: HomeAssistant, + issue_id: str, + data: dict[str, str | int | float | None] | None, + ) -> RepairsFlow: assert issue_id in EXPECTED_DATA assert data == EXPECTED_DATA[issue_id] diff --git a/tests/components/rfxtrx/test_config_flow.py b/tests/components/rfxtrx/test_config_flow.py index b61440c31b6..1e23bdaf982 100644 --- a/tests/components/rfxtrx/test_config_flow.py +++ b/tests/components/rfxtrx/test_config_flow.py @@ -29,7 +29,9 @@ def com_port(): return port -async def start_options_flow(hass, entry): +async def start_options_flow( + hass: HomeAssistant, entry: MockConfigEntry +) -> config_entries.ConfigFlowResult: """Start the options flow with the entry under test.""" entry.add_to_hass(hass) diff --git a/tests/components/rfxtrx/test_device_action.py b/tests/components/rfxtrx/test_device_action.py index c678f2dfc62..a3522934c57 100644 --- a/tests/components/rfxtrx/test_device_action.py +++ b/tests/components/rfxtrx/test_device_action.py @@ -47,7 +47,7 @@ async def test_device_test_data(rfxtrx, device: DeviceTestData) -> None: } -async def setup_entry(hass, devices): +async def setup_entry(hass: HomeAssistant, devices: dict[str, Any]) -> None: """Construct a config setup.""" entry_data = create_rfx_test_cfg(devices=devices) mock_entry = MockConfigEntry(domain="rfxtrx", unique_id=DOMAIN, data=entry_data) @@ -79,7 +79,10 @@ def _get_expected_actions(data): ], ) async def test_get_actions( - hass: HomeAssistant, device_registry: dr.DeviceRegistry, device, expected + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + device: DeviceTestData, + expected, ) -> None: """Test we get the expected actions from a rfxtrx.""" await setup_entry(hass, {device.code: {}}) @@ -136,7 +139,7 @@ async def test_action( hass: HomeAssistant, device_registry: dr.DeviceRegistry, rfxtrx: RFXtrx.Connect, - device, + device: DeviceTestData, config, expected, ) -> None: diff --git a/tests/components/rfxtrx/test_device_trigger.py b/tests/components/rfxtrx/test_device_trigger.py index 38f7cccc072..9c56951761b 100644 --- a/tests/components/rfxtrx/test_device_trigger.py +++ b/tests/components/rfxtrx/test_device_trigger.py @@ -46,7 +46,7 @@ EVENT_FIREALARM_1 = EventTestData( ) -async def setup_entry(hass, devices): +async def setup_entry(hass: HomeAssistant, devices: dict[str, Any]) -> None: """Construct a config setup.""" entry_data = create_rfx_test_cfg(devices=devices) mock_entry = MockConfigEntry(domain="rfxtrx", unique_id=DOMAIN, data=entry_data) diff --git a/tests/components/ring/common.py b/tests/components/ring/common.py index b129623aa95..3b78adf0e09 100644 --- a/tests/components/ring/common.py +++ b/tests/components/ring/common.py @@ -3,12 +3,14 @@ from unittest.mock import patch from homeassistant.components.ring import DOMAIN +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry -async def setup_platform(hass, platform): +async def setup_platform(hass: HomeAssistant, platform: Platform) -> None: """Set up the ring platform and prerequisites.""" MockConfigEntry(domain=DOMAIN, data={"username": "foo", "token": {}}).add_to_hass( hass diff --git a/tests/components/risco/test_alarm_control_panel.py b/tests/components/risco/test_alarm_control_panel.py index 53d5b9573b6..9b554ddbf28 100644 --- a/tests/components/risco/test_alarm_control_panel.py +++ b/tests/components/risco/test_alarm_control_panel.py @@ -1,5 +1,7 @@ """Tests for the Risco alarm control panel device.""" +from collections.abc import Callable +from typing import Any from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch import pytest @@ -180,8 +182,13 @@ async def test_cloud_setup( async def _check_cloud_state( - hass, partitions, property, state, entity_id, partition_id -): + hass: HomeAssistant, + partitions: dict[int, Any], + property: str, + state: str, + entity_id: str, + partition_id: int, +) -> None: with patch.object(partitions[partition_id], property, return_value=True): await async_update_entity(hass, entity_id) await hass.async_block_till_done() @@ -256,7 +263,9 @@ async def test_cloud_states( ) -async def _call_alarm_service(hass, service, entity_id, **kwargs): +async def _call_alarm_service( + hass: HomeAssistant, service: str, entity_id: str, **kwargs: Any +) -> None: data = {"entity_id": entity_id, **kwargs} await hass.services.async_call( @@ -265,16 +274,27 @@ async def _call_alarm_service(hass, service, entity_id, **kwargs): async def _test_cloud_service_call( - hass, service, method, entity_id, partition_id, *args, **kwargs -): + hass: HomeAssistant, + service: str, + method: str, + entity_id: str, + partition_id: int, + *args: Any, + **kwargs: Any, +) -> None: with patch(f"homeassistant.components.risco.RiscoCloud.{method}") as set_mock: await _call_alarm_service(hass, service, entity_id, **kwargs) set_mock.assert_awaited_once_with(partition_id, *args) async def _test_cloud_no_service_call( - hass, service, method, entity_id, partition_id, **kwargs -): + hass: HomeAssistant, + service: str, + method: str, + entity_id: str, + partition_id: int, + **kwargs: Any, +) -> None: with patch(f"homeassistant.components.risco.RiscoCloud.{method}") as set_mock: await _call_alarm_service(hass, service, entity_id, **kwargs) set_mock.assert_not_awaited() @@ -531,8 +551,14 @@ async def test_local_setup( async def _check_local_state( - hass, partitions, property, state, entity_id, partition_id, callback -): + hass: HomeAssistant, + partitions: dict[int, Any], + property: str, + state: str, + entity_id: str, + partition_id: int, + callback: Callable, +) -> None: with patch.object(partitions[partition_id], property, return_value=True): await callback(partition_id, partitions[partition_id]) @@ -629,16 +655,27 @@ async def test_local_states( async def _test_local_service_call( - hass, service, method, entity_id, partition, *args, **kwargs -): + hass: HomeAssistant, + service: str, + method: str, + entity_id: str, + partition: int, + *args: Any, + **kwargs: Any, +) -> None: with patch.object(partition, method, AsyncMock()) as set_mock: await _call_alarm_service(hass, service, entity_id, **kwargs) set_mock.assert_awaited_once_with(*args) async def _test_local_no_service_call( - hass, service, method, entity_id, partition, **kwargs -): + hass: HomeAssistant, + service: str, + method: str, + entity_id: str, + partition: int, + **kwargs: Any, +) -> None: with patch.object(partition, method, AsyncMock()) as set_mock: await _call_alarm_service(hass, service, entity_id, **kwargs) set_mock.assert_not_awaited() diff --git a/tests/components/risco/test_binary_sensor.py b/tests/components/risco/test_binary_sensor.py index b6ff29a0bce..600cfa02c0e 100644 --- a/tests/components/risco/test_binary_sensor.py +++ b/tests/components/risco/test_binary_sensor.py @@ -1,6 +1,8 @@ """Tests for the Risco binary sensors.""" -from unittest.mock import PropertyMock, patch +from collections.abc import Callable +from typing import Any +from unittest.mock import MagicMock, PropertyMock, patch import pytest @@ -59,7 +61,13 @@ async def test_cloud_setup( assert device.manufacturer == "Risco" -async def _check_cloud_state(hass, zones, triggered, entity_id, zone_id): +async def _check_cloud_state( + hass: HomeAssistant, + zones: dict[int, Any], + triggered: bool, + entity_id: str, + zone_id: int, +) -> None: with patch.object( zones[zone_id], "triggered", @@ -130,8 +138,14 @@ async def test_local_setup( async def _check_local_state( - hass, zones, entity_property, value, entity_id, zone_id, callback -): + hass: HomeAssistant, + zones: dict[int, Any], + entity_property: str, + value: bool, + entity_id: str, + zone_id: int, + callback: Callable, +) -> None: with patch.object( zones[zone_id], entity_property, @@ -218,7 +232,13 @@ async def test_armed_local_states( ) -async def _check_system_state(hass, system, entity_property, value, callback): +async def _check_system_state( + hass: HomeAssistant, + system: MagicMock, + entity_property: str, + value: bool, + callback: Callable, +) -> None: with patch.object( system, entity_property, diff --git a/tests/components/risco/test_sensor.py b/tests/components/risco/test_sensor.py index 2b1094554ae..6a3ac6f42e3 100644 --- a/tests/components/risco/test_sensor.py +++ b/tests/components/risco/test_sensor.py @@ -136,7 +136,7 @@ async def test_error_on_login( assert not entity_registry.async_is_registered(entity_id) -def _check_state(hass, category, entity_id): +def _check_state(hass: HomeAssistant, category: str, entity_id: str) -> None: event_index = CATEGORIES_TO_EVENTS[category] event = TEST_EVENTS[event_index] state = hass.states.get(entity_id) diff --git a/tests/components/risco/test_switch.py b/tests/components/risco/test_switch.py index acf80462d54..54e7bc3ca0c 100644 --- a/tests/components/risco/test_switch.py +++ b/tests/components/risco/test_switch.py @@ -1,5 +1,7 @@ """Tests for the Risco binary sensors.""" +from collections.abc import Callable +from typing import Any from unittest.mock import PropertyMock, patch import pytest @@ -40,7 +42,13 @@ async def test_cloud_setup( assert entity_registry.async_is_registered(SECOND_ENTITY_ID) -async def _check_cloud_state(hass, zones, bypassed, entity_id, zone_id): +async def _check_cloud_state( + hass: HomeAssistant, + zones: dict[int, Any], + bypassed: bool, + entity_id: str, + zone_id: int, +) -> None: with patch.object( zones[zone_id], "bypassed", @@ -117,7 +125,14 @@ async def test_local_setup( assert entity_registry.async_is_registered(SECOND_ENTITY_ID) -async def _check_local_state(hass, zones, bypassed, entity_id, zone_id, callback): +async def _check_local_state( + hass: HomeAssistant, + zones: dict[int, Any], + bypassed: bool, + entity_id: str, + zone_id: int, + callback: Callable, +) -> None: with patch.object( zones[zone_id], "bypassed", diff --git a/tests/components/rpi_power/test_binary_sensor.py b/tests/components/rpi_power/test_binary_sensor.py index 1643df6c993..865d7c035b8 100644 --- a/tests/components/rpi_power/test_binary_sensor.py +++ b/tests/components/rpi_power/test_binary_sensor.py @@ -24,7 +24,7 @@ ENTITY_ID = "binary_sensor.rpi_power_status" MODULE = "homeassistant.components.rpi_power.binary_sensor.new_under_voltage" -async def _async_setup_component(hass, detected): +async def _async_setup_component(hass: HomeAssistant, detected: bool) -> MagicMock: mocked_under_voltage = MagicMock() type(mocked_under_voltage).get = MagicMock(return_value=detected) entry = MockConfigEntry(domain=DOMAIN) From 254aa8c9ead8476a214700edbe9b5d37554a51f1 Mon Sep 17 00:00:00 2001 From: Andrii Mitnovych <10116550+formatBCE@users.noreply.github.com> Date: Mon, 19 Aug 2024 13:00:23 -0700 Subject: [PATCH 0904/1635] Add entity deduplication by assist device ID in conversation agent (#123957) * Add entities deduplication by assist device ID in default conversation agent * Updated test. --- .../components/conversation/default_agent.py | 25 +++++-- .../conversation/test_default_agent.py | 66 +++++++++++++++++++ 2 files changed, 84 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/conversation/default_agent.py b/homeassistant/components/conversation/default_agent.py index 1661d2ad30d..05b4d194d33 100644 --- a/homeassistant/components/conversation/default_agent.py +++ b/homeassistant/components/conversation/default_agent.py @@ -349,6 +349,9 @@ class DefaultAgent(ConversationEntity): } for entity in result.entities_list } + device_area = self._get_device_area(user_input.device_id) + if device_area: + slots["preferred_area_id"] = {"value": device_area.id} async_conversation_trace_append( ConversationTraceEventType.TOOL_CALL, { @@ -917,18 +920,26 @@ class DefaultAgent(ConversationEntity): if not user_input.device_id: return None - devices = dr.async_get(self.hass) - device = devices.async_get(user_input.device_id) - if (device is None) or (device.area_id is None): - return None - - areas = ar.async_get(self.hass) - device_area = areas.async_get_area(device.area_id) + device_area = self._get_device_area(user_input.device_id) if device_area is None: return None return {"area": {"value": device_area.name, "text": device_area.name}} + def _get_device_area(self, device_id: str | None) -> ar.AreaEntry | None: + """Return area object for given device identifier.""" + if device_id is None: + return None + + devices = dr.async_get(self.hass) + device = devices.async_get(device_id) + if (device is None) or (device.area_id is None): + return None + + areas = ar.async_get(self.hass) + + return areas.async_get_area(device.area_id) + def _get_error_text( self, error_key: ErrorKey | str, diff --git a/tests/components/conversation/test_default_agent.py b/tests/components/conversation/test_default_agent.py index 315b73bacfd..935ef205d4f 100644 --- a/tests/components/conversation/test_default_agent.py +++ b/tests/components/conversation/test_default_agent.py @@ -308,6 +308,72 @@ async def test_unexposed_entities_skipped( assert result.response.matched_states[0].entity_id == exposed_light.entity_id +@pytest.mark.usefixtures("init_components") +async def test_duplicated_names_resolved_with_device_area( + hass: HomeAssistant, + area_registry: ar.AreaRegistry, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test entities deduplication with device ID context.""" + area_kitchen = area_registry.async_get_or_create("kitchen_id") + area_bedroom = area_registry.async_get_or_create("bedroom_id") + + kitchen_light = entity_registry.async_get_or_create("light", "demo", "1234") + bedroom_light = entity_registry.async_get_or_create("light", "demo", "5678") + + # Same name and alias + for light in (kitchen_light, bedroom_light): + light = entity_registry.async_update_entity( + light.entity_id, + name="top light", + aliases={"overhead light"}, + ) + hass.states.async_set( + light.entity_id, + "off", + attributes={ATTR_FRIENDLY_NAME: light.name}, + ) + # Different areas + kitchen_light = entity_registry.async_update_entity( + kitchen_light.entity_id, + area_id=area_kitchen.id, + ) + bedroom_light = entity_registry.async_update_entity( + bedroom_light.entity_id, + area_id=area_bedroom.id, + ) + + # Pipeline device in bedroom area + entry = MockConfigEntry() + entry.add_to_hass(hass) + assist_device = device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + connections=set(), + identifiers={("demo", "id-1234")}, + ) + assist_device = device_registry.async_update_device( + assist_device.id, + area_id=area_bedroom.id, + ) + + # Check name and alias + for name in ("top light", "overhead light"): + # Only one light should be turned on + calls = async_mock_service(hass, "light", "turn_on") + result = await conversation.async_converse( + hass, f"turn on {name}", None, Context(), device_id=assist_device.id + ) + + assert len(calls) == 1 + assert calls[0].data["entity_id"][0] == bedroom_light.entity_id + + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert result.response.intent is not None + assert result.response.intent.slots.get("name", {}).get("value") == name + assert result.response.intent.slots.get("name", {}).get("text") == name + + @pytest.mark.usefixtures("init_components") async def test_trigger_sentences(hass: HomeAssistant) -> None: """Test registering/unregistering/matching a few trigger sentences.""" From 407e4f6ca2ac4d90e5df82c39e7d93806b6f0e74 Mon Sep 17 00:00:00 2001 From: Artur Pragacz <49985303+arturpragacz@users.noreply.github.com> Date: Mon, 19 Aug 2024 22:01:35 +0200 Subject: [PATCH 0905/1635] Add entity matching to intent_script (#120973) --- .../components/intent_script/__init__.py | 55 +++++++++++- tests/components/intent_script/test_init.py | 85 ++++++++++++++++++- 2 files changed, 138 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/intent_script/__init__.py b/homeassistant/components/intent_script/__init__.py index 371163ce00a..6f47cadb04f 100644 --- a/homeassistant/components/intent_script/__init__.py +++ b/homeassistant/components/intent_script/__init__.py @@ -144,6 +144,12 @@ class _IntentCardData(TypedDict): class ScriptIntentHandler(intent.IntentHandler): """Respond to an intent with a script.""" + slot_schema = { + vol.Any("name", "area", "floor"): cv.string, + vol.Optional("domain"): vol.All(cv.ensure_list, [cv.string]), + vol.Optional("device_class"): vol.All(cv.ensure_list, [cv.string]), + } + def __init__(self, intent_type: str, config: ConfigType) -> None: """Initialize the script intent handler.""" self.intent_type = intent_type @@ -158,8 +164,10 @@ class ScriptIntentHandler(intent.IntentHandler): card: _IntentCardData | None = self.config.get(CONF_CARD) action: script.Script | None = self.config.get(CONF_ACTION) is_async_action: bool = self.config[CONF_ASYNC_ACTION] + hass: HomeAssistant = intent_obj.hass + intent_slots = self.async_validate_slots(intent_obj.slots) slots: dict[str, Any] = { - key: value["value"] for key, value in intent_obj.slots.items() + key: value["value"] for key, value in intent_slots.items() } _LOGGER.debug( @@ -172,6 +180,51 @@ class ScriptIntentHandler(intent.IntentHandler): }, ) + entity_name = slots.get("name") + area_name = slots.get("area") + floor_name = slots.get("floor") + + # Optional domain/device class filters. + # Convert to sets for speed. + domains: set[str] | None = None + device_classes: set[str] | None = None + + if "domain" in slots: + domains = set(slots["domain"]) + + if "device_class" in slots: + device_classes = set(slots["device_class"]) + + match_constraints = intent.MatchTargetsConstraints( + name=entity_name, + area_name=area_name, + floor_name=floor_name, + domains=domains, + device_classes=device_classes, + assistant=intent_obj.assistant, + ) + + if match_constraints.has_constraints: + match_result = intent.async_match_targets(hass, match_constraints) + if match_result.is_match: + targets = {} + + if match_result.states: + targets["entities"] = [ + state.entity_id for state in match_result.states + ] + + if match_result.areas: + targets["areas"] = [area.id for area in match_result.areas] + + if match_result.floors: + targets["floors"] = [ + floor.floor_id for floor in match_result.floors + ] + + if targets: + slots["targets"] = targets + if action is not None: if is_async_action: intent_obj.hass.async_create_task( diff --git a/tests/components/intent_script/test_init.py b/tests/components/intent_script/test_init.py index 86f3a7aba46..26c575f0407 100644 --- a/tests/components/intent_script/test_init.py +++ b/tests/components/intent_script/test_init.py @@ -6,7 +6,12 @@ from homeassistant import config as hass_config from homeassistant.components.intent_script import DOMAIN from homeassistant.const import SERVICE_RELOAD from homeassistant.core import HomeAssistant -from homeassistant.helpers import intent +from homeassistant.helpers import ( + area_registry as ar, + entity_registry as er, + floor_registry as fr, + intent, +) from homeassistant.setup import async_setup_component from tests.common import async_mock_service, get_fixture_path @@ -197,6 +202,84 @@ async def test_intent_script_falsy_reprompt(hass: HomeAssistant) -> None: assert response.card["simple"]["content"] == "Content for Paulus" +async def test_intent_script_targets( + hass: HomeAssistant, + area_registry: ar.AreaRegistry, + entity_registry: er.EntityRegistry, + floor_registry: fr.FloorRegistry, +) -> None: + """Test intent scripts work.""" + calls = async_mock_service(hass, "test", "service") + + await async_setup_component( + hass, + "intent_script", + { + "intent_script": { + "Targets": { + "description": "Intent to control a test service.", + "action": { + "service": "test.service", + "data_template": { + "targets": "{{ targets if targets is defined }}", + }, + }, + "speech": { + "text": "{{ targets.entities[0] if targets is defined }}" + }, + } + } + }, + ) + + floor_1 = floor_registry.async_create("first floor") + kitchen = area_registry.async_get_or_create("kitchen") + area_registry.async_update(kitchen.id, floor_id=floor_1.floor_id) + entity_registry.async_get_or_create( + "light", "demo", "1234", suggested_object_id="kitchen" + ) + entity_registry.async_update_entity("light.kitchen", area_id=kitchen.id) + hass.states.async_set("light.kitchen", "off") + + response = await intent.async_handle( + hass, + "test", + "Targets", + {"name": {"value": "kitchen"}, "domain": {"value": "light"}}, + ) + assert len(calls) == 1 + assert calls[0].data["targets"] == {"entities": ["light.kitchen"]} + assert response.speech["plain"]["speech"] == "light.kitchen" + calls.clear() + + response = await intent.async_handle( + hass, + "test", + "Targets", + { + "area": {"value": "kitchen"}, + "floor": {"value": "first floor"}, + }, + ) + assert len(calls) == 1 + assert calls[0].data["targets"] == { + "entities": ["light.kitchen"], + "areas": ["kitchen"], + "floors": ["first_floor"], + } + calls.clear() + + response = await intent.async_handle( + hass, + "test", + "Targets", + {"device_class": {"value": "door"}}, + ) + assert len(calls) == 1 + assert calls[0].data["targets"] == "" + calls.clear() + + async def test_reload(hass: HomeAssistant) -> None: """Verify we can reload intent config.""" From 566c00ef12cca16a4c51177fbba6c3a3b8e83579 Mon Sep 17 00:00:00 2001 From: Sid <27780930+autinerd@users.noreply.github.com> Date: Mon, 19 Aug 2024 22:05:43 +0200 Subject: [PATCH 0906/1635] Skip interfaces without mac in enigma2 device_info (#124249) --- homeassistant/components/enigma2/coordinator.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/enigma2/coordinator.py b/homeassistant/components/enigma2/coordinator.py index f1da56309e8..a35e74f582f 100644 --- a/homeassistant/components/enigma2/coordinator.py +++ b/homeassistant/components/enigma2/coordinator.py @@ -72,11 +72,14 @@ class Enigma2UpdateCoordinator(DataUpdateCoordinator[OpenWebIfStatus]): self.device_info["model"] = about["info"]["model"] self.device_info["manufacturer"] = about["info"]["brand"] self.device_info[ATTR_IDENTIFIERS] = { - (DOMAIN, format_mac(iface["mac"])) for iface in about["info"]["ifaces"] + (DOMAIN, format_mac(iface["mac"])) + for iface in about["info"]["ifaces"] + if "mac" in iface and iface["mac"] is not None } self.device_info[ATTR_CONNECTIONS] = { (CONNECTION_NETWORK_MAC, format_mac(iface["mac"])) for iface in about["info"]["ifaces"] + if "mac" in iface and iface["mac"] is not None } async def _async_update_data(self) -> OpenWebIfStatus: From fc767ee562d1f6ab565087fa41f2858831e4bd39 Mon Sep 17 00:00:00 2001 From: IceBotYT <34712694+IceBotYT@users.noreply.github.com> Date: Mon, 19 Aug 2024 16:09:22 -0400 Subject: [PATCH 0907/1635] Add event platform to Nice G.O. (#124253) * Add event platform to Nice G.O. * Add icon for barrier obstructed event * Better assertions * More test improvements --- homeassistant/components/nice_go/__init__.py | 7 ++- homeassistant/components/nice_go/event.py | 51 +++++++++++++++++++ homeassistant/components/nice_go/icons.json | 5 ++ homeassistant/components/nice_go/strings.json | 12 +++++ tests/components/nice_go/test_event.py | 31 +++++++++++ 5 files changed, 105 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/nice_go/event.py create mode 100644 tests/components/nice_go/test_event.py diff --git a/homeassistant/components/nice_go/__init__.py b/homeassistant/components/nice_go/__init__.py index bf8dbfcaecd..ab3dc06e3c1 100644 --- a/homeassistant/components/nice_go/__init__.py +++ b/homeassistant/components/nice_go/__init__.py @@ -11,7 +11,12 @@ from homeassistant.core import HomeAssistant from .coordinator import NiceGOUpdateCoordinator _LOGGER = logging.getLogger(__name__) -PLATFORMS: list[Platform] = [Platform.COVER, Platform.LIGHT, Platform.SWITCH] +PLATFORMS: list[Platform] = [ + Platform.COVER, + Platform.EVENT, + Platform.LIGHT, + Platform.SWITCH, +] type NiceGOConfigEntry = ConfigEntry[NiceGOUpdateCoordinator] diff --git a/homeassistant/components/nice_go/event.py b/homeassistant/components/nice_go/event.py new file mode 100644 index 00000000000..caf984f824f --- /dev/null +++ b/homeassistant/components/nice_go/event.py @@ -0,0 +1,51 @@ +"""Nice G.O. event platform.""" + +import logging +from typing import Any + +from homeassistant.components.event import EventEntity +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import NiceGOConfigEntry +from .entity import NiceGOEntity + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: NiceGOConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Nice G.O. event.""" + + coordinator = config_entry.runtime_data + + async_add_entities( + NiceGOEventEntity(coordinator, device_id, device_data.name, "event") + for device_id, device_data in coordinator.data.items() + ) + + +EVENT_BARRIER_OBSTRUCTED = "barrier_obstructed" + + +class NiceGOEventEntity(NiceGOEntity, EventEntity): + """Event for Nice G.O. devices.""" + + _attr_translation_key = "barrier_obstructed" + _attr_event_types = [EVENT_BARRIER_OBSTRUCTED] + + async def async_added_to_hass(self) -> None: + """Listen for events.""" + await super().async_added_to_hass() + self.coordinator.api.event(self.on_barrier_obstructed) + + async def on_barrier_obstructed(self, data: dict[str, Any]) -> None: + """Handle barrier obstructed event.""" + _LOGGER.debug("Barrier obstructed event: %s", data) + if data["deviceId"] == self.data.id: + _LOGGER.debug("Barrier obstructed event for %s, triggering", self.data.name) + self._trigger_event(EVENT_BARRIER_OBSTRUCTED) + self.schedule_update_ha_state() diff --git a/homeassistant/components/nice_go/icons.json b/homeassistant/components/nice_go/icons.json index fdce7c43e24..61e36c9eb3a 100644 --- a/homeassistant/components/nice_go/icons.json +++ b/homeassistant/components/nice_go/icons.json @@ -4,6 +4,11 @@ "vacation_mode": { "default": "mdi:beach" } + }, + "event": { + "barrier_obstructed": { + "default": "mdi:garage-alert" + } } } } diff --git a/homeassistant/components/nice_go/strings.json b/homeassistant/components/nice_go/strings.json index 7f2ea1512dd..30a2bbf58b6 100644 --- a/homeassistant/components/nice_go/strings.json +++ b/homeassistant/components/nice_go/strings.json @@ -27,6 +27,18 @@ "vacation_mode": { "name": "Vacation mode" } + }, + "event": { + "barrier_obstructed": { + "name": "Barrier obstructed", + "state_attributes": { + "event_type": { + "state": { + "barrier_obstructed": "Barrier obstructed" + } + } + } + } } }, "issues": { diff --git a/tests/components/nice_go/test_event.py b/tests/components/nice_go/test_event.py new file mode 100644 index 00000000000..0038b2882ad --- /dev/null +++ b/tests/components/nice_go/test_event.py @@ -0,0 +1,31 @@ +"""Nice G.O. event tests.""" + +from unittest.mock import AsyncMock, MagicMock + +import pytest + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +@pytest.mark.freeze_time("2024-08-19") +async def test_barrier_obstructed( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test barrier obstructed.""" + mock_nice_go.event = MagicMock() + await setup_integration(hass, mock_config_entry, [Platform.EVENT]) + + await mock_nice_go.event.call_args_list[2][0][0]({"deviceId": "1"}) + await hass.async_block_till_done() + + event_state = hass.states.get("event.test_garage_1_barrier_obstructed") + + assert event_state.state == "2024-08-19T00:00:00.000+00:00" + assert event_state.attributes["event_type"] == "barrier_obstructed" From 108a54a4a873e749bbb9e45f7c19c8097a79a7c9 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 19 Aug 2024 15:28:05 -0500 Subject: [PATCH 0908/1635] Handle WebSocket client disconnect during prepare (#124173) --- .../components/websocket_api/http.py | 7 ++++++ tests/components/websocket_api/test_http.py | 22 +++++++++++++++++-- 2 files changed, 27 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/websocket_api/http.py b/homeassistant/components/websocket_api/http.py index e33da9a8b4a..1ad8d909ce8 100644 --- a/homeassistant/components/websocket_api/http.py +++ b/homeassistant/components/websocket_api/http.py @@ -306,6 +306,13 @@ class WebSocketHandler: try: async with asyncio.timeout(10): await wsock.prepare(request) + except ConnectionResetError: + # Likely the client disconnected before we prepared the websocket + logger.debug( + "%s: Connection reset by peer while preparing WebSocket", + self.description, + ) + return wsock except TimeoutError: logger.warning("Timeout preparing request from %s", request.remote) return wsock diff --git a/tests/components/websocket_api/test_http.py b/tests/components/websocket_api/test_http.py index 11665da11b4..2530d885942 100644 --- a/tests/components/websocket_api/test_http.py +++ b/tests/components/websocket_api/test_http.py @@ -363,12 +363,12 @@ async def test_non_json_message( assert "bad= None: - """Test failing to prepare.""" + """Test failing to prepare due to timeout.""" with ( patch( "homeassistant.components.websocket_api.http.web.WebSocketResponse.prepare", @@ -381,6 +381,24 @@ async def test_prepare_fail( assert "Timeout preparing request" in caplog.text +async def test_prepare_fail_connection_reset( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test failing to prepare due to connection reset.""" + with ( + patch( + "homeassistant.components.websocket_api.http.web.WebSocketResponse.prepare", + side_effect=(ConnectionResetError, web.WebSocketResponse.prepare), + ), + pytest.raises(WSServerHandshakeError), + ): + await hass_ws_client(hass) + + assert "Connection reset by peer while preparing WebSocket" in caplog.text + + async def test_enable_coalesce( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, From 0f90a3cf606e2665d9576b97f8709cb7e9d803c1 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 19 Aug 2024 15:40:32 -0500 Subject: [PATCH 0909/1635] Bump aiohttp to 3.10.5 (#124254) --- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 27966ccf611..170d453badd 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -4,7 +4,7 @@ aiodhcpwatcher==1.0.2 aiodiscover==2.1.0 aiodns==3.2.0 aiohttp-fast-zlib==0.1.1 -aiohttp==3.10.4 +aiohttp==3.10.5 aiohttp_cors==0.7.0 aiozoneinfo==0.2.1 astral==2.2 diff --git a/pyproject.toml b/pyproject.toml index ce521594399..0f3dfd92067 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ requires-python = ">=3.12.0" dependencies = [ "aiodns==3.2.0", - "aiohttp==3.10.4", + "aiohttp==3.10.5", "aiohttp_cors==0.7.0", "aiohttp-fast-zlib==0.1.1", "aiozoneinfo==0.2.1", diff --git a/requirements.txt b/requirements.txt index f78ea9f6d54..102358cc75e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ # Home Assistant Core aiodns==3.2.0 -aiohttp==3.10.4 +aiohttp==3.10.5 aiohttp_cors==0.7.0 aiohttp-fast-zlib==0.1.1 aiozoneinfo==0.2.1 From d8cbb3540fe77785ca4ed47f1ce5503e56e5e69e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 19 Aug 2024 19:13:35 -0500 Subject: [PATCH 0910/1635] Bump aioshelly to 11.2.4 (#124080) --- homeassistant/components/shelly/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/shelly/manifest.json b/homeassistant/components/shelly/manifest.json index c742b45632c..da3bbc4bb6e 100644 --- a/homeassistant/components/shelly/manifest.json +++ b/homeassistant/components/shelly/manifest.json @@ -9,7 +9,7 @@ "iot_class": "local_push", "loggers": ["aioshelly"], "quality_scale": "platinum", - "requirements": ["aioshelly==11.2.0"], + "requirements": ["aioshelly==11.2.4"], "zeroconf": [ { "type": "_http._tcp.local.", diff --git a/requirements_all.txt b/requirements_all.txt index 3871ad80990..be69fabecc4 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -359,7 +359,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==11.2.0 +aioshelly==11.2.4 # homeassistant.components.skybell aioskybell==22.7.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index de2d9d63ca7..746b62f075a 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -341,7 +341,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==11.2.0 +aioshelly==11.2.4 # homeassistant.components.skybell aioskybell==22.7.0 From 097162ecebc47cd84b5d97c91e9d6a8f5c65a3b0 Mon Sep 17 00:00:00 2001 From: Yuxin Wang Date: Tue, 20 Aug 2024 00:09:01 -0400 Subject: [PATCH 0911/1635] Add handling for `RoborockTooFrequentCodeRequests` for roborock integration (#123759) * Add handling for RoborockTooFrequentCodeRequests * Add tests for coverage --- homeassistant/components/roborock/config_flow.py | 3 +++ homeassistant/components/roborock/strings.json | 1 + tests/components/roborock/test_config_flow.py | 2 ++ 3 files changed, 6 insertions(+) diff --git a/homeassistant/components/roborock/config_flow.py b/homeassistant/components/roborock/config_flow.py index 2b409bdf8c4..c6dee7ce4ed 100644 --- a/homeassistant/components/roborock/config_flow.py +++ b/homeassistant/components/roborock/config_flow.py @@ -12,6 +12,7 @@ from roborock.exceptions import ( RoborockException, RoborockInvalidCode, RoborockInvalidEmail, + RoborockTooFrequentCodeRequests, RoborockUrlException, ) from roborock.web_api import RoborockApiClient @@ -83,6 +84,8 @@ class RoborockFlowHandler(ConfigFlow, domain=DOMAIN): errors["base"] = "unknown_url" except RoborockInvalidEmail: errors["base"] = "invalid_email_format" + except RoborockTooFrequentCodeRequests: + errors["base"] = "too_frequent_code_requests" except RoborockException: _LOGGER.exception("Unexpected exception") errors["base"] = "unknown_roborock" diff --git a/homeassistant/components/roborock/strings.json b/homeassistant/components/roborock/strings.json index 081e4c68a75..d1fc50f27e8 100644 --- a/homeassistant/components/roborock/strings.json +++ b/homeassistant/components/roborock/strings.json @@ -22,6 +22,7 @@ "invalid_code": "The code you entered was incorrect, please check it and try again.", "invalid_email": "There is no account associated with the email you entered, please try again.", "invalid_email_format": "There is an issue with the formatting of your email - please try again.", + "too_frequent_code_requests": "You have attempted to request too many codes. Try again later.", "unknown_roborock": "There was an unknown roborock exception - please check your logs.", "unknown_url": "There was an issue determining the correct url for your roborock account - please check your logs.", "unknown": "[%key:common::config_flow::error::unknown%]" diff --git a/tests/components/roborock/test_config_flow.py b/tests/components/roborock/test_config_flow.py index a5a86e44372..39d8117847c 100644 --- a/tests/components/roborock/test_config_flow.py +++ b/tests/components/roborock/test_config_flow.py @@ -4,6 +4,7 @@ from copy import deepcopy from unittest.mock import patch import pytest +from roborock import RoborockTooFrequentCodeRequests from roborock.exceptions import ( RoborockAccountDoesNotExist, RoborockException, @@ -71,6 +72,7 @@ async def test_config_flow_success( (RoborockException(), {"base": "unknown_roborock"}), (RoborockAccountDoesNotExist(), {"base": "invalid_email"}), (RoborockInvalidEmail(), {"base": "invalid_email_format"}), + (RoborockTooFrequentCodeRequests(), {"base": "too_frequent_code_requests"}), (RoborockUrlException(), {"base": "unknown_url"}), (Exception(), {"base": "unknown"}), ], From 82602644168e27279b99416e711554006d0c877c Mon Sep 17 00:00:00 2001 From: IceBotYT <34712694+IceBotYT@users.noreply.github.com> Date: Tue, 20 Aug 2024 01:38:10 -0400 Subject: [PATCH 0912/1635] Bump nice-go to 0.3.0 (#124262) --- homeassistant/components/nice_go/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/nice_go/manifest.json b/homeassistant/components/nice_go/manifest.json index bb005f9cc27..c2ff8370e2a 100644 --- a/homeassistant/components/nice_go/manifest.json +++ b/homeassistant/components/nice_go/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/nice_go", "iot_class": "cloud_push", "loggers": ["nice-go"], - "requirements": ["nice-go==0.2.1"] + "requirements": ["nice-go==0.3.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index be69fabecc4..04d89fb9ba1 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1426,7 +1426,7 @@ nextdns==3.1.0 nibe==2.11.0 # homeassistant.components.nice_go -nice-go==0.2.1 +nice-go==0.3.0 # homeassistant.components.niko_home_control niko-home-control==0.2.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 746b62f075a..5a0ca106973 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1183,7 +1183,7 @@ nextdns==3.1.0 nibe==2.11.0 # homeassistant.components.nice_go -nice-go==0.2.1 +nice-go==0.3.0 # homeassistant.components.nfandroidtv notifications-android-tv==0.1.5 From d99f1631cafffd20ae09bac26d3147a0a7afb3c1 Mon Sep 17 00:00:00 2001 From: Ian Date: Mon, 19 Aug 2024 22:57:03 -0700 Subject: [PATCH 0913/1635] Add Uplink info to UniFi Device tracker attributes (#123032) Add device uplink mac sensor --- homeassistant/components/unifi/sensor.py | 19 +++++++ tests/components/unifi/test_sensor.py | 71 +++++++++++++++++++++++- 2 files changed, 89 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/unifi/sensor.py b/homeassistant/components/unifi/sensor.py index 39e487c0d57..697df00fe55 100644 --- a/homeassistant/components/unifi/sensor.py +++ b/homeassistant/components/unifi/sensor.py @@ -174,6 +174,12 @@ def async_device_outlet_supported_fn(hub: UnifiHub, obj_id: str) -> bool: return hub.api.devices[obj_id].outlet_ac_power_budget is not None +@callback +def async_device_uplink_mac_supported_fn(hub: UnifiHub, obj_id: str) -> bool: + """Determine if a device supports reading uplink MAC address.""" + return "uplink_mac" in hub.api.devices[obj_id].raw.get("uplink", {}) + + def device_system_stats_supported_fn( stat_index: int, hub: UnifiHub, obj_id: str ) -> bool: @@ -571,6 +577,19 @@ ENTITY_DESCRIPTIONS: tuple[UnifiSensorEntityDescription, ...] = ( unique_id_fn=lambda hub, obj_id: f"device_temperature-{obj_id}", value_fn=lambda hub, device: device.general_temperature, ), + UnifiSensorEntityDescription[Devices, Device]( + key="Device Uplink MAC", + entity_category=EntityCategory.DIAGNOSTIC, + api_handler_fn=lambda api: api.devices, + available_fn=async_device_available_fn, + device_info_fn=async_device_device_info_fn, + name_fn=lambda device: "Uplink MAC", + object_fn=lambda api, obj_id: api.devices[obj_id], + unique_id_fn=lambda hub, obj_id: f"device_uplink_mac-{obj_id}", + supported_fn=async_device_uplink_mac_supported_fn, + value_fn=lambda hub, device: device.raw.get("uplink", {}).get("uplink_mac"), + is_connected_fn=lambda hub, obj_id: hub.api.devices[obj_id].state == 1, + ), UnifiSensorEntityDescription[Devices, Device]( key="Device State", device_class=SensorDeviceClass.ENUM, diff --git a/tests/components/unifi/test_sensor.py b/tests/components/unifi/test_sensor.py index afa256c087e..3c94d12018d 100644 --- a/tests/components/unifi/test_sensor.py +++ b/tests/components/unifi/test_sensor.py @@ -27,7 +27,12 @@ from homeassistant.components.unifi.const import ( DEVICE_STATES, ) from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY -from homeassistant.const import ATTR_DEVICE_CLASS, STATE_UNAVAILABLE, Platform +from homeassistant.const import ( + ATTR_DEVICE_CLASS, + STATE_UNAVAILABLE, + EntityCategory, + Platform, +) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_registry import RegistryEntryDisabler @@ -1681,3 +1686,67 @@ async def test_device_with_no_matching_temperatures( ) assert temperature_entity is None + + +@pytest.mark.parametrize( + "device_payload", + [ + [ + { + "board_rev": 3, + "device_id": "device-with-uplink", + "ip": "10.0.1.1", + "last_seen": 1562600145, + "mac": "00:00:00:00:01:01", + "model": "US16P150", + "name": "Device", + "next_interval": 20, + "state": 1, + "type": "usw", + "upgradable": True, + "uptime": 60, + "version": "4.0.42.10433", + "uplink": { + "uplink_mac": "00:00:00:00:00:02", + "port_idx": 1, + }, + }, + { + "board_rev": 3, + "device_id": "device-without-uplink", + "ip": "10.0.1.2", + "last_seen": 1562600145, + "mac": "00:00:00:00:01:02", + "model": "US16P150", + "name": "Other Device", + "next_interval": 20, + "state": 1, + "type": "usw", + "upgradable": True, + "uptime": 60, + "version": "4.0.42.10433", + "uplink": {}, + }, + ], + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_device_uplink( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_websocket_message, + device_payload: list[dict[str, Any]], +) -> None: + """Verify that uplink sensors are working as expected.""" + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 5 + assert hass.states.get("sensor.device_uplink_mac").state == "00:00:00:00:00:02" + assert ( + entity_registry.async_get("sensor.device_uplink_mac").entity_category + is EntityCategory.DIAGNOSTIC + ) + + # Verify new event change temperature + device = device_payload[0] + device["uplink"]["uplink_mac"] = "00:00:00:00:00:03" + mock_websocket_message(message=MessageKey.DEVICE, data=device) + assert hass.states.get("sensor.device_uplink_mac").state == "00:00:00:00:00:03" From b31c6012aec3bc53185212867feb8bea9f8eeae6 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 20 Aug 2024 08:16:59 +0200 Subject: [PATCH 0914/1635] Use HassKey in otbr (#124240) --- homeassistant/components/otbr/__init__.py | 6 +++--- homeassistant/components/otbr/const.py | 10 ++++++++++ .../components/otbr/silabs_multiprotocol.py | 6 +++--- homeassistant/components/otbr/websocket_api.py | 10 +++++----- .../components/otbr/test_silabs_multiprotocol.py | 12 ++++-------- tests/components/otbr/test_util.py | 16 ++++------------ 6 files changed, 29 insertions(+), 31 deletions(-) diff --git a/homeassistant/components/otbr/__init__.py b/homeassistant/components/otbr/__init__.py index 1d79b1c52e9..3e53358a162 100644 --- a/homeassistant/components/otbr/__init__.py +++ b/homeassistant/components/otbr/__init__.py @@ -14,7 +14,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.typing import ConfigType from . import websocket_api -from .const import DOMAIN +from .const import DATA_OTBR, DOMAIN from .util import OTBRData, update_issues CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) @@ -67,14 +67,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: entry.async_on_unload(entry.add_update_listener(async_reload_entry)) - hass.data[DOMAIN] = otbrdata + hass.data[DATA_OTBR] = otbrdata return True async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" - hass.data.pop(DOMAIN) + hass.data.pop(DATA_OTBR) return True diff --git a/homeassistant/components/otbr/const.py b/homeassistant/components/otbr/const.py index cc3e4a9e6c3..cf1678466a4 100644 --- a/homeassistant/components/otbr/const.py +++ b/homeassistant/components/otbr/const.py @@ -1,5 +1,15 @@ """Constants for the Open Thread Border Router integration.""" +from __future__ import annotations + +from typing import TYPE_CHECKING + +from homeassistant.util.hass_dict import HassKey + +if TYPE_CHECKING: + from .util import OTBRData + DOMAIN = "otbr" +DATA_OTBR: HassKey[OTBRData] = HassKey(DOMAIN) DEFAULT_CHANNEL = 15 diff --git a/homeassistant/components/otbr/silabs_multiprotocol.py b/homeassistant/components/otbr/silabs_multiprotocol.py index 411da0a9361..b3a711968fd 100644 --- a/homeassistant/components/otbr/silabs_multiprotocol.py +++ b/homeassistant/components/otbr/silabs_multiprotocol.py @@ -18,7 +18,7 @@ from homeassistant.components.thread import async_add_dataset from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from . import DOMAIN +from .const import DATA_OTBR, DOMAIN from .util import OTBRData _LOGGER = logging.getLogger(__name__) @@ -45,10 +45,10 @@ def async_get_otbr_data[**_P, _R, _R_Def]( hass: HomeAssistant, *args: _P.args, **kwargs: _P.kwargs ) -> _R | _R_Def: """Fetch OTBR data and pass to orig_func.""" - if DOMAIN not in hass.data: + if DATA_OTBR not in hass.data: return retval - data: OTBRData = hass.data[DOMAIN] + data = hass.data[DATA_OTBR] if not is_multiprotocol_url(data.url): return retval diff --git a/homeassistant/components/otbr/websocket_api.py b/homeassistant/components/otbr/websocket_api.py index 9b7e46bc362..577f9cc381d 100644 --- a/homeassistant/components/otbr/websocket_api.py +++ b/homeassistant/components/otbr/websocket_api.py @@ -17,7 +17,7 @@ from homeassistant.components.thread import async_add_dataset, async_get_dataset from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError -from .const import DEFAULT_CHANNEL, DOMAIN +from .const import DATA_OTBR, DEFAULT_CHANNEL, DOMAIN from .util import ( OTBRData, compose_default_network_name, @@ -47,11 +47,11 @@ async def websocket_info( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict ) -> None: """Get OTBR info.""" - if DOMAIN not in hass.data: + if DATA_OTBR not in hass.data: connection.send_error(msg["id"], "not_loaded", "No OTBR API loaded") return - data: OTBRData = hass.data[DOMAIN] + data = hass.data[DATA_OTBR] try: border_agent_id = await data.get_border_agent_id() @@ -99,11 +99,11 @@ def async_get_otbr_data( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict ) -> None: """Fetch OTBR data and pass to orig_func.""" - if DOMAIN not in hass.data: + if DATA_OTBR not in hass.data: connection.send_error(msg["id"], "not_loaded", "No OTBR API loaded") return - data: OTBRData = hass.data[DOMAIN] + data = hass.data[DATA_OTBR] try: extended_address = await data.get_extended_address() diff --git a/tests/components/otbr/test_silabs_multiprotocol.py b/tests/components/otbr/test_silabs_multiprotocol.py index ad3d6540b47..e842f40ad4c 100644 --- a/tests/components/otbr/test_silabs_multiprotocol.py +++ b/tests/components/otbr/test_silabs_multiprotocol.py @@ -130,8 +130,7 @@ async def test_async_change_channel_non_matching_url( hass: HomeAssistant, otbr_config_entry_multipan ) -> None: """Test async_change_channel when otbr is not configured.""" - data: otbr.OTBRData = hass.data[otbr.DOMAIN] - data.url = OTBR_NON_MULTIPAN_URL + hass.data[otbr.DATA_OTBR].url = OTBR_NON_MULTIPAN_URL with patch("python_otbr_api.OTBR.set_channel") as mock_set_channel: await otbr_silabs_multiprotocol.async_change_channel(hass, 16, delay=0) mock_set_channel.assert_not_awaited() @@ -188,8 +187,7 @@ async def test_async_get_channel_non_matching_url( hass: HomeAssistant, otbr_config_entry_multipan ) -> None: """Test async_change_channel when otbr is not configured.""" - data: otbr.OTBRData = hass.data[otbr.DOMAIN] - data.url = OTBR_NON_MULTIPAN_URL + hass.data[otbr.DATA_OTBR].url = OTBR_NON_MULTIPAN_URL with patch("python_otbr_api.OTBR.get_active_dataset") as mock_get_active_dataset: assert await otbr_silabs_multiprotocol.async_get_channel(hass) is None mock_get_active_dataset.assert_not_awaited() @@ -203,8 +201,7 @@ async def test_async_using_multipan( hass: HomeAssistant, otbr_config_entry_multipan, url: str, expected: bool ) -> None: """Test async_change_channel when otbr is not configured.""" - data: otbr.OTBRData = hass.data[otbr.DOMAIN] - data.url = url + hass.data[otbr.DATA_OTBR].url = url assert await otbr_silabs_multiprotocol.async_using_multipan(hass) is expected @@ -219,6 +216,5 @@ async def test_async_using_multipan_non_matching_url( hass: HomeAssistant, otbr_config_entry_multipan ) -> None: """Test async_change_channel when otbr is not configured.""" - data: otbr.OTBRData = hass.data[otbr.DOMAIN] - data.url = OTBR_NON_MULTIPAN_URL + hass.data[otbr.DATA_OTBR].url = OTBR_NON_MULTIPAN_URL assert await otbr_silabs_multiprotocol.async_using_multipan(hass) is False diff --git a/tests/components/otbr/test_util.py b/tests/components/otbr/test_util.py index 3b1edcfeb5b..ec325b8819e 100644 --- a/tests/components/otbr/test_util.py +++ b/tests/components/otbr/test_util.py @@ -33,15 +33,13 @@ async def test_get_allowed_channel( async def test_factory_reset(hass: HomeAssistant, otbr_config_entry_multipan) -> None: """Test factory_reset.""" - data: otbr.OTBRData = hass.data[otbr.DOMAIN] - with ( patch("python_otbr_api.OTBR.factory_reset") as factory_reset_mock, patch( "python_otbr_api.OTBR.delete_active_dataset" ) as delete_active_dataset_mock, ): - await data.factory_reset() + await hass.data[otbr.DATA_OTBR].factory_reset() delete_active_dataset_mock.assert_not_called() factory_reset_mock.assert_called_once_with() @@ -51,8 +49,6 @@ async def test_factory_reset_not_supported( hass: HomeAssistant, otbr_config_entry_multipan ) -> None: """Test factory_reset.""" - data: otbr.OTBRData = hass.data[otbr.DOMAIN] - with ( patch( "python_otbr_api.OTBR.factory_reset", @@ -62,7 +58,7 @@ async def test_factory_reset_not_supported( "python_otbr_api.OTBR.delete_active_dataset" ) as delete_active_dataset_mock, ): - await data.factory_reset() + await hass.data[otbr.DATA_OTBR].factory_reset() delete_active_dataset_mock.assert_called_once_with() factory_reset_mock.assert_called_once_with() @@ -72,8 +68,6 @@ async def test_factory_reset_error_1( hass: HomeAssistant, otbr_config_entry_multipan ) -> None: """Test factory_reset.""" - data: otbr.OTBRData = hass.data[otbr.DOMAIN] - with ( patch( "python_otbr_api.OTBR.factory_reset", @@ -86,7 +80,7 @@ async def test_factory_reset_error_1( HomeAssistantError, ), ): - await data.factory_reset() + await hass.data[otbr.DATA_OTBR].factory_reset() delete_active_dataset_mock.assert_not_called() factory_reset_mock.assert_called_once_with() @@ -96,8 +90,6 @@ async def test_factory_reset_error_2( hass: HomeAssistant, otbr_config_entry_multipan ) -> None: """Test factory_reset.""" - data: otbr.OTBRData = hass.data[otbr.DOMAIN] - with ( patch( "python_otbr_api.OTBR.factory_reset", @@ -111,7 +103,7 @@ async def test_factory_reset_error_2( HomeAssistantError, ), ): - await data.factory_reset() + await hass.data[otbr.DATA_OTBR].factory_reset() delete_active_dataset_mock.assert_called_once_with() factory_reset_mock.assert_called_once_with() From 24f0c881238fec3c4c80132aee1386212e20328c Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Tue, 20 Aug 2024 00:51:44 -0700 Subject: [PATCH 0915/1635] Bump python-roborock to 2.6.0 (#124268) --- homeassistant/components/roborock/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/roborock/manifest.json b/homeassistant/components/roborock/manifest.json index 7a80a9083e9..3bb3b9b2046 100644 --- a/homeassistant/components/roborock/manifest.json +++ b/homeassistant/components/roborock/manifest.json @@ -7,7 +7,7 @@ "iot_class": "local_polling", "loggers": ["roborock"], "requirements": [ - "python-roborock==2.5.0", + "python-roborock==2.6.0", "vacuum-map-parser-roborock==0.1.2" ] } diff --git a/requirements_all.txt b/requirements_all.txt index 04d89fb9ba1..f640bb15e5b 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2344,7 +2344,7 @@ python-rabbitair==0.0.8 python-ripple-api==0.0.3 # homeassistant.components.roborock -python-roborock==2.5.0 +python-roborock==2.6.0 # homeassistant.components.smarttub python-smarttub==0.0.36 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5a0ca106973..f4db2963fe2 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1856,7 +1856,7 @@ python-picnic-api==1.1.0 python-rabbitair==0.0.8 # homeassistant.components.roborock -python-roborock==2.5.0 +python-roborock==2.6.0 # homeassistant.components.smarttub python-smarttub==0.0.36 From e81aa1cdb28d5f81856eb61c7baa2f8bde005f8a Mon Sep 17 00:00:00 2001 From: mvn23 Date: Tue, 20 Aug 2024 10:20:27 +0200 Subject: [PATCH 0916/1635] Update opentherm_gw.binary_sensor to use entity_description (#121969) * Update opentherm_gw.binary_sensor to use entity_description * Move binary_sensor related code to binary_sensor.py Move common entity code to entity.py * Remove unused logger from binary_sensor.py * Add type hints Address feedback --- .../components/opentherm_gw/binary_sensor.py | 350 +++++++++++++++--- .../components/opentherm_gw/const.py | 163 -------- .../components/opentherm_gw/entity.py | 68 ++++ 3 files changed, 359 insertions(+), 222 deletions(-) create mode 100644 homeassistant/components/opentherm_gw/entity.py diff --git a/homeassistant/components/opentherm_gw/binary_sensor.py b/homeassistant/components/opentherm_gw/binary_sensor.py index 7c3760653e8..2d55cd9ddfb 100644 --- a/homeassistant/components/opentherm_gw/binary_sensor.py +++ b/homeassistant/components/opentherm_gw/binary_sensor.py @@ -1,25 +1,287 @@ """Support for OpenTherm Gateway binary sensors.""" -import logging +from dataclasses import dataclass -from homeassistant.components.binary_sensor import ENTITY_ID_FORMAT, BinarySensorEntity +from pyotgw import vars as gw_vars + +from homeassistant.components.binary_sensor import ( + ENTITY_ID_FORMAT, + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ID from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity import async_generate_entity_id from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN -from .const import ( - BINARY_SENSOR_INFO, - DATA_GATEWAYS, - DATA_OPENTHERM_GW, - TRANSLATE_SOURCE, -) +from . import OpenThermGatewayDevice +from .const import DATA_GATEWAYS, DATA_OPENTHERM_GW +from .entity import OpenThermEntity, OpenThermEntityDescription -_LOGGER = logging.getLogger(__name__) + +@dataclass(frozen=True, kw_only=True) +class OpenThermBinarySensorEntityDescription( + BinarySensorEntityDescription, OpenThermEntityDescription +): + """Describes opentherm_gw binary sensor entity.""" + + +BINARY_SENSOR_INFO: tuple[ + tuple[list[str], OpenThermBinarySensorEntityDescription], ... +] = ( + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_MASTER_CH_ENABLED, + friendly_name_format="Thermostat Central Heating {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_MASTER_DHW_ENABLED, + friendly_name_format="Thermostat Hot Water {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_MASTER_COOLING_ENABLED, + friendly_name_format="Thermostat Cooling {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_MASTER_OTC_ENABLED, + friendly_name_format="Thermostat Outside Temperature Correction {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_MASTER_CH2_ENABLED, + friendly_name_format="Thermostat Central Heating 2 {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_FAULT_IND, + friendly_name_format="Boiler Fault {}", + device_class=BinarySensorDeviceClass.PROBLEM, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_CH_ACTIVE, + friendly_name_format="Boiler Central Heating {}", + device_class=BinarySensorDeviceClass.HEAT, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_DHW_ACTIVE, + friendly_name_format="Boiler Hot Water {}", + device_class=BinarySensorDeviceClass.HEAT, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_FLAME_ON, + friendly_name_format="Boiler Flame {}", + device_class=BinarySensorDeviceClass.HEAT, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_COOLING_ACTIVE, + friendly_name_format="Boiler Cooling {}", + device_class=BinarySensorDeviceClass.COLD, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_CH2_ACTIVE, + friendly_name_format="Boiler Central Heating 2 {}", + device_class=BinarySensorDeviceClass.HEAT, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_DIAG_IND, + friendly_name_format="Boiler Diagnostics {}", + device_class=BinarySensorDeviceClass.PROBLEM, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_DHW_PRESENT, + friendly_name_format="Boiler Hot Water Present {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_CONTROL_TYPE, + friendly_name_format="Boiler Control Type {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_COOLING_SUPPORTED, + friendly_name_format="Boiler Cooling Support {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_DHW_CONFIG, + friendly_name_format="Boiler Hot Water Configuration {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_MASTER_LOW_OFF_PUMP, + friendly_name_format="Boiler Pump Commands Support {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_CH2_PRESENT, + friendly_name_format="Boiler Central Heating 2 Present {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_SERVICE_REQ, + friendly_name_format="Boiler Service Required {}", + device_class=BinarySensorDeviceClass.PROBLEM, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_REMOTE_RESET, + friendly_name_format="Boiler Remote Reset Support {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_LOW_WATER_PRESS, + friendly_name_format="Boiler Low Water Pressure {}", + device_class=BinarySensorDeviceClass.PROBLEM, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_GAS_FAULT, + friendly_name_format="Boiler Gas Fault {}", + device_class=BinarySensorDeviceClass.PROBLEM, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_AIR_PRESS_FAULT, + friendly_name_format="Boiler Air Pressure Fault {}", + device_class=BinarySensorDeviceClass.PROBLEM, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_WATER_OVERTEMP, + friendly_name_format="Boiler Water Overtemperature {}", + device_class=BinarySensorDeviceClass.PROBLEM, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_REMOTE_TRANSFER_DHW, + friendly_name_format="Remote Hot Water Setpoint Transfer Support {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_REMOTE_TRANSFER_MAX_CH, + friendly_name_format="Remote Maximum Central Heating Setpoint Write Support {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_REMOTE_RW_DHW, + friendly_name_format="Remote Hot Water Setpoint Write Support {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_REMOTE_RW_MAX_CH, + friendly_name_format="Remote Central Heating Setpoint Write Support {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_ROVRD_MAN_PRIO, + friendly_name_format="Remote Override Manual Change Priority {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_ROVRD_AUTO_PRIO, + friendly_name_format="Remote Override Program Change Priority {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermBinarySensorEntityDescription( + key=gw_vars.OTGW_GPIO_A_STATE, + friendly_name_format="Gateway GPIO A {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermBinarySensorEntityDescription( + key=gw_vars.OTGW_GPIO_B_STATE, + friendly_name_format="Gateway GPIO B {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermBinarySensorEntityDescription( + key=gw_vars.OTGW_IGNORE_TRANSITIONS, + friendly_name_format="Gateway Ignore Transitions {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermBinarySensorEntityDescription( + key=gw_vars.OTGW_OVRD_HB, + friendly_name_format="Gateway Override High Byte {}", + ), + ), +) async def async_setup_entry( @@ -31,65 +293,35 @@ async def async_setup_entry( gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][config_entry.data[CONF_ID]] async_add_entities( - OpenThermBinarySensor( - gw_dev, - var, - source, - info[0], - info[1], - ) - for var, info in BINARY_SENSOR_INFO.items() - for source in info[2] + OpenThermBinarySensor(gw_dev, source, description) + for sources, description in BINARY_SENSOR_INFO + for source in sources ) -class OpenThermBinarySensor(BinarySensorEntity): +class OpenThermBinarySensor(OpenThermEntity, BinarySensorEntity): """Represent an OpenTherm Gateway binary sensor.""" - _attr_should_poll = False - _attr_entity_registry_enabled_default = False - _attr_available = False + entity_description: OpenThermBinarySensorEntityDescription - def __init__(self, gw_dev, var, source, device_class, friendly_name_format): + def __init__( + self, + gw_dev: OpenThermGatewayDevice, + source: str, + description: OpenThermBinarySensorEntityDescription, + ) -> None: """Initialize the binary sensor.""" self.entity_id = async_generate_entity_id( - ENTITY_ID_FORMAT, f"{var}_{source}_{gw_dev.gw_id}", hass=gw_dev.hass + ENTITY_ID_FORMAT, + f"{description.key}_{source}_{gw_dev.gw_id}", + hass=gw_dev.hass, ) - self._gateway = gw_dev - self._var = var - self._source = source - self._attr_device_class = device_class - if TRANSLATE_SOURCE[source] is not None: - friendly_name_format = ( - f"{friendly_name_format} ({TRANSLATE_SOURCE[source]})" - ) - self._attr_name = friendly_name_format.format(gw_dev.name) - self._unsub_updates = None - self._attr_unique_id = f"{gw_dev.gw_id}-{source}-{var}" - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, gw_dev.gw_id)}, - manufacturer="Schelte Bron", - model="OpenTherm Gateway", - name=gw_dev.name, - sw_version=gw_dev.gw_version, - ) - - async def async_added_to_hass(self) -> None: - """Subscribe to updates from the component.""" - _LOGGER.debug("Added OpenTherm Gateway binary sensor %s", self._attr_name) - self._unsub_updates = async_dispatcher_connect( - self.hass, self._gateway.update_signal, self.receive_report - ) - - async def async_will_remove_from_hass(self) -> None: - """Unsubscribe from updates from the component.""" - _LOGGER.debug("Removing OpenTherm Gateway binary sensor %s", self._attr_name) - self._unsub_updates() + super().__init__(gw_dev, source, description) @callback - def receive_report(self, status): + def receive_report(self, status: dict[str, dict]) -> None: """Handle status updates from the component.""" self._attr_available = self._gateway.connected - state = status[self._source].get(self._var) + state = status[self._source].get(self.entity_description.key) self._attr_is_on = None if state is None else bool(state) self.async_write_ha_state() diff --git a/homeassistant/components/opentherm_gw/const.py b/homeassistant/components/opentherm_gw/const.py index 6b0a27aec92..1d007d86181 100644 --- a/homeassistant/components/opentherm_gw/const.py +++ b/homeassistant/components/opentherm_gw/const.py @@ -4,7 +4,6 @@ from __future__ import annotations import pyotgw.vars as gw_vars -from homeassistant.components.binary_sensor import BinarySensorDeviceClass from homeassistant.components.sensor import SensorDeviceClass from homeassistant.const import ( PERCENTAGE, @@ -57,168 +56,6 @@ TRANSLATE_SOURCE = { SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION = 1 -BINARY_SENSOR_INFO: dict[str, list] = { - # [device_class, friendly_name format, [status source, ...]] - gw_vars.DATA_MASTER_CH_ENABLED: [ - None, - "Thermostat Central Heating {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_MASTER_DHW_ENABLED: [ - None, - "Thermostat Hot Water {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_MASTER_COOLING_ENABLED: [ - None, - "Thermostat Cooling {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_MASTER_OTC_ENABLED: [ - None, - "Thermostat Outside Temperature Correction {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_MASTER_CH2_ENABLED: [ - None, - "Thermostat Central Heating 2 {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_FAULT_IND: [ - BinarySensorDeviceClass.PROBLEM, - "Boiler Fault {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_CH_ACTIVE: [ - BinarySensorDeviceClass.HEAT, - "Boiler Central Heating {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_DHW_ACTIVE: [ - BinarySensorDeviceClass.HEAT, - "Boiler Hot Water {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_FLAME_ON: [ - BinarySensorDeviceClass.HEAT, - "Boiler Flame {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_COOLING_ACTIVE: [ - BinarySensorDeviceClass.COLD, - "Boiler Cooling {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_CH2_ACTIVE: [ - BinarySensorDeviceClass.HEAT, - "Boiler Central Heating 2 {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_DIAG_IND: [ - BinarySensorDeviceClass.PROBLEM, - "Boiler Diagnostics {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_DHW_PRESENT: [ - None, - "Boiler Hot Water Present {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_CONTROL_TYPE: [ - None, - "Boiler Control Type {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_COOLING_SUPPORTED: [ - None, - "Boiler Cooling Support {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_DHW_CONFIG: [ - None, - "Boiler Hot Water Configuration {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_MASTER_LOW_OFF_PUMP: [ - None, - "Boiler Pump Commands Support {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_CH2_PRESENT: [ - None, - "Boiler Central Heating 2 Present {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_SERVICE_REQ: [ - BinarySensorDeviceClass.PROBLEM, - "Boiler Service Required {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_REMOTE_RESET: [ - None, - "Boiler Remote Reset Support {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_LOW_WATER_PRESS: [ - BinarySensorDeviceClass.PROBLEM, - "Boiler Low Water Pressure {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_GAS_FAULT: [ - BinarySensorDeviceClass.PROBLEM, - "Boiler Gas Fault {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_AIR_PRESS_FAULT: [ - BinarySensorDeviceClass.PROBLEM, - "Boiler Air Pressure Fault {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_WATER_OVERTEMP: [ - BinarySensorDeviceClass.PROBLEM, - "Boiler Water Overtemperature {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_REMOTE_TRANSFER_DHW: [ - None, - "Remote Hot Water Setpoint Transfer Support {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_REMOTE_TRANSFER_MAX_CH: [ - None, - "Remote Maximum Central Heating Setpoint Write Support {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_REMOTE_RW_DHW: [ - None, - "Remote Hot Water Setpoint Write Support {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_REMOTE_RW_MAX_CH: [ - None, - "Remote Central Heating Setpoint Write Support {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_ROVRD_MAN_PRIO: [ - None, - "Remote Override Manual Change Priority {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_ROVRD_AUTO_PRIO: [ - None, - "Remote Override Program Change Priority {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.OTGW_GPIO_A_STATE: [None, "Gateway GPIO A {}", [gw_vars.OTGW]], - gw_vars.OTGW_GPIO_B_STATE: [None, "Gateway GPIO B {}", [gw_vars.OTGW]], - gw_vars.OTGW_IGNORE_TRANSITIONS: [ - None, - "Gateway Ignore Transitions {}", - [gw_vars.OTGW], - ], - gw_vars.OTGW_OVRD_HB: [None, "Gateway Override High Byte {}", [gw_vars.OTGW]], -} - SENSOR_INFO: dict[str, list] = { # [device_class, unit, friendly_name, suggested_display_precision, [status source, ...]] gw_vars.DATA_CONTROL_SETPOINT: [ diff --git a/homeassistant/components/opentherm_gw/entity.py b/homeassistant/components/opentherm_gw/entity.py new file mode 100644 index 00000000000..dd023896f70 --- /dev/null +++ b/homeassistant/components/opentherm_gw/entity.py @@ -0,0 +1,68 @@ +"""Common opentherm_gw entity properties.""" + +import logging + +from homeassistant.core import callback +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.entity import Entity, EntityDescription + +from . import OpenThermGatewayDevice +from .const import DOMAIN, TRANSLATE_SOURCE + +_LOGGER = logging.getLogger(__name__) + + +class OpenThermEntityDescription(EntityDescription): + """Describe common opentherm_gw entity properties.""" + + friendly_name_format: str + + +class OpenThermEntity(Entity): + """Represent an OpenTherm Gateway entity.""" + + _attr_should_poll = False + _attr_entity_registry_enabled_default = False + _attr_available = False + entity_description: OpenThermEntityDescription + + def __init__( + self, + gw_dev: OpenThermGatewayDevice, + source: str, + description: OpenThermEntityDescription, + ) -> None: + """Initialize the entity.""" + self.entity_description = description + self._gateway = gw_dev + self._source = source + friendly_name_format = ( + f"{description.friendly_name_format} ({TRANSLATE_SOURCE[source]})" + if TRANSLATE_SOURCE[source] is not None + else description.friendly_name_format + ) + self._attr_name = friendly_name_format.format(gw_dev.name) + self._attr_unique_id = f"{gw_dev.gw_id}-{source}-{description.key}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, gw_dev.gw_id)}, + manufacturer="Schelte Bron", + model="OpenTherm Gateway", + name=gw_dev.name, + sw_version=gw_dev.gw_version, + ) + + async def async_added_to_hass(self) -> None: + """Subscribe to updates from the component.""" + _LOGGER.debug("Added OpenTherm Gateway entity %s", self._attr_name) + self.async_on_remove( + async_dispatcher_connect( + self.hass, self._gateway.update_signal, self.receive_report + ) + ) + + @callback + def receive_report(self, status: dict[str, dict]) -> None: + """Handle status updates from the component.""" + # Must be implemented at the platform level. + raise NotImplementedError From d3deaa6a82ce96618a25cc0b868d7f957f116a72 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 20 Aug 2024 10:33:02 +0200 Subject: [PATCH 0917/1635] Improve otbr error handling (#124277) --- homeassistant/components/otbr/config_flow.py | 6 +++++- homeassistant/components/otbr/util.py | 3 ++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/otbr/config_flow.py b/homeassistant/components/otbr/config_flow.py index c13a3f36f99..8cffc0a99e6 100644 --- a/homeassistant/components/otbr/config_flow.py +++ b/homeassistant/components/otbr/config_flow.py @@ -168,7 +168,11 @@ class OTBRConfigFlow(ConfigFlow, domain=DOMAIN): try: await self._connect_and_set_dataset(url) - except python_otbr_api.OTBRError as exc: + except ( + python_otbr_api.OTBRError, + aiohttp.ClientError, + TimeoutError, + ) as exc: _LOGGER.warning("Failed to communicate with OTBR@%s: %s", url, exc) return self.async_abort(reason="unknown") diff --git a/homeassistant/components/otbr/util.py b/homeassistant/components/otbr/util.py index 16cf3b60e37..d426ca9ba17 100644 --- a/homeassistant/components/otbr/util.py +++ b/homeassistant/components/otbr/util.py @@ -9,6 +9,7 @@ import logging import random from typing import Any, Concatenate, cast +import aiohttp import python_otbr_api from python_otbr_api import PENDING_DATASET_DELAY_TIMER, tlv_parser from python_otbr_api.pskc import compute_pskc @@ -67,7 +68,7 @@ def _handle_otbr_error[**_P, _R]( async def _func(self: OTBRData, *args: _P.args, **kwargs: _P.kwargs) -> _R: try: return await func(self, *args, **kwargs) - except python_otbr_api.OTBRError as exc: + except (python_otbr_api.OTBRError, aiohttp.ClientError, TimeoutError) as exc: raise HomeAssistantError("Failed to call OTBR API") from exc return _func From b4648136c5428a2161ec795fb6fa59fc08c97d49 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 20 Aug 2024 10:33:16 +0200 Subject: [PATCH 0918/1635] Deduplicate otbr tests (#124270) --- tests/components/otbr/conftest.py | 38 ++++++++++++++- tests/components/otbr/test_init.py | 76 ++++++++---------------------- 2 files changed, 56 insertions(+), 58 deletions(-) diff --git a/tests/components/otbr/conftest.py b/tests/components/otbr/conftest.py index 56f29bdc79b..8344c734572 100644 --- a/tests/components/otbr/conftest.py +++ b/tests/components/otbr/conftest.py @@ -1,6 +1,8 @@ """Test fixtures for the Open Thread Border Router integration.""" -from unittest.mock import MagicMock, Mock, patch +from collections.abc import Generator +from typing import Any +from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest @@ -18,6 +20,40 @@ from . import ( from tests.common import MockConfigEntry +@pytest.fixture(name="dataset") +def dataset_fixture() -> Any: + """Return the discovery info from the supervisor.""" + return DATASET_CH16 + + +@pytest.fixture(name="get_active_dataset_tlvs") +def get_active_dataset_tlvs_fixture(dataset: Any) -> Generator[AsyncMock]: + """Mock get_active_dataset_tlvs.""" + with patch( + "python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=dataset + ) as get_active_dataset_tlvs: + yield get_active_dataset_tlvs + + +@pytest.fixture(name="get_border_agent_id") +def get_border_agent_id_fixture() -> Generator[AsyncMock]: + """Mock get_border_agent_id.""" + with patch( + "python_otbr_api.OTBR.get_border_agent_id", return_value=TEST_BORDER_AGENT_ID + ) as get_border_agent_id: + yield get_border_agent_id + + +@pytest.fixture(name="get_extended_address") +def get_extended_address_fixture() -> Generator[AsyncMock]: + """Mock get_extended_address.""" + with patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ) as get_extended_address: + yield get_extended_address + + @pytest.fixture(name="otbr_config_entry_multipan") async def otbr_config_entry_multipan_fixture(hass: HomeAssistant) -> None: """Mock Open Thread Border Router config entry.""" diff --git a/tests/components/otbr/test_init.py b/tests/components/otbr/test_init.py index 5cf29662be3..7d54dd16f56 100644 --- a/tests/components/otbr/test_init.py +++ b/tests/components/otbr/test_init.py @@ -38,6 +38,15 @@ DATASET_NO_CHANNEL = bytes.fromhex( ) +@pytest.fixture(name="enable_mocks", autouse=True) +def enable_mocks_fixture( + get_active_dataset_tlvs: AsyncMock, + get_border_agent_id: AsyncMock, + get_extended_address: AsyncMock, +) -> None: + """Enable API mocks.""" + + async def test_import_dataset( hass: HomeAssistant, mock_async_zeroconf: MagicMock, @@ -66,17 +75,6 @@ async def test_import_dataset( config_entry.add_to_hass(hass) with ( - patch( - "python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET_CH16 - ), - patch( - "python_otbr_api.OTBR.get_border_agent_id", - return_value=TEST_BORDER_AGENT_ID, - ), - patch( - "python_otbr_api.OTBR.get_extended_address", - return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, - ), patch( "homeassistant.components.thread.dataset_store.BORDER_AGENT_DISCOVERY_TIMEOUT", 0.1, @@ -143,17 +141,6 @@ async def test_import_share_radio_channel_collision( ) config_entry.add_to_hass(hass) with ( - patch( - "python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET_CH16 - ), - patch( - "python_otbr_api.OTBR.get_border_agent_id", - return_value=TEST_BORDER_AGENT_ID, - ), - patch( - "python_otbr_api.OTBR.get_extended_address", - return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, - ), patch( "homeassistant.components.thread.dataset_store.DatasetStore.async_add" ) as mock_add, @@ -193,15 +180,6 @@ async def test_import_share_radio_no_channel_collision( ) config_entry.add_to_hass(hass) with ( - patch("python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=dataset), - patch( - "python_otbr_api.OTBR.get_border_agent_id", - return_value=TEST_BORDER_AGENT_ID, - ), - patch( - "python_otbr_api.OTBR.get_extended_address", - return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, - ), patch( "homeassistant.components.thread.dataset_store.DatasetStore.async_add" ) as mock_add, @@ -238,15 +216,6 @@ async def test_import_insecure_dataset( ) config_entry.add_to_hass(hass) with ( - patch("python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=dataset), - patch( - "python_otbr_api.OTBR.get_border_agent_id", - return_value=TEST_BORDER_AGENT_ID, - ), - patch( - "python_otbr_api.OTBR.get_extended_address", - return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, - ), patch( "homeassistant.components.thread.dataset_store.DatasetStore.async_add" ) as mock_add, @@ -272,7 +241,9 @@ async def test_import_insecure_dataset( aiohttp.ClientError, ], ) -async def test_config_entry_not_ready(hass: HomeAssistant, error) -> None: +async def test_config_entry_not_ready( + hass: HomeAssistant, get_active_dataset_tlvs: AsyncMock, error +) -> None: """Test raising ConfigEntryNotReady .""" config_entry = MockConfigEntry( @@ -282,11 +253,13 @@ async def test_config_entry_not_ready(hass: HomeAssistant, error) -> None: title="My OTBR", ) config_entry.add_to_hass(hass) - with patch("python_otbr_api.OTBR.get_active_dataset_tlvs", side_effect=error): - assert not await hass.config_entries.async_setup(config_entry.entry_id) + get_active_dataset_tlvs.side_effect = error + assert not await hass.config_entries.async_setup(config_entry.entry_id) -async def test_border_agent_id_not_supported(hass: HomeAssistant) -> None: +async def test_border_agent_id_not_supported( + hass: HomeAssistant, get_border_agent_id: AsyncMock +) -> None: """Test border router does not support border agent ID.""" config_entry = MockConfigEntry( @@ -296,16 +269,8 @@ async def test_border_agent_id_not_supported(hass: HomeAssistant) -> None: title="My OTBR", ) config_entry.add_to_hass(hass) - with ( - patch( - "python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET_CH16 - ), - patch( - "python_otbr_api.OTBR.get_border_agent_id", - side_effect=python_otbr_api.GetBorderAgentIdNotSupportedError, - ), - ): - assert not await hass.config_entries.async_setup(config_entry.entry_id) + get_border_agent_id.side_effect = python_otbr_api.GetBorderAgentIdNotSupportedError + assert not await hass.config_entries.async_setup(config_entry.entry_id) async def test_config_entry_update(hass: HomeAssistant) -> None: @@ -369,9 +334,6 @@ async def test_remove_extra_entries( config_entry2.add_to_hass(hass) assert len(hass.config_entries.async_entries(otbr.DOMAIN)) == 2 with ( - patch( - "python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET_CH16 - ), patch("homeassistant.components.otbr.util.compute_pskc"), ): # Patch to speed up tests assert await async_setup_component(hass, otbr.DOMAIN, {}) From 98a007cb2f04eaba6bc28651d964816f45e57724 Mon Sep 17 00:00:00 2001 From: TimL Date: Tue, 20 Aug 2024 18:44:06 +1000 Subject: [PATCH 0919/1635] New Integration: SMLIGHT SLZB-06 Adapters Integration (#118675) * Initial SMLIGHT integration Signed-off-by: Tim Lunn * Generated content Signed-off-by: Tim Lunn * Cleanup LOGGING * Use runtime data * Call super first * coordinator instance attributes * Move coordinatorEntity and attr to base class * cleanup sensors * update strings to use sentence case * Improve reauth flow on incorrect credentials * Use fixture for config_flow tests and test to completion * Split uptime hndling into a new uptime sensor entity * Drop server side events and internet callback will bring this back with binary sensor Platform * consolidate coordinator setup * entity always include connections * get_hostname tweak * Add tests for init, coordinator and sensor * Use custom type SmConfigEntry * update sensor snapshot * Drop reauth flow for later PR * Use _async_setup for initial setup * drop internet to be set later * sensor fixes * config flow re * typing fixes * Bump pysmlight dependency to 0.0.12 * dont trigger invalid auth message when first loading auth step * Merge uptime sensors back into main sensor class * clarify uptime handling * Apply suggestions from code review Co-authored-by: Joost Lekkerkerker * address review comments * pass host as parameter to the dataCoordinator * drop uptime sensors for a later PR * update sensor test snapshot * move coordinator unique_id to _async_setup * fix CI * Apply suggestions from code review Co-authored-by: Joost Lekkerkerker * drop invalid_auth test tag * use snapshot_platform, update fixtures * Finish all tests with abort or create entry * drop coordinator tests and remove hostname support * add test for update failure on connection error * use freezer for update_failed test * fix pysmlight imports --------- Signed-off-by: Tim Lunn Co-authored-by: Tim Lunn Co-authored-by: Joost Lekkerkerker --- CODEOWNERS | 2 + homeassistant/components/smlight/__init__.py | 30 + .../components/smlight/config_flow.py | 151 ++++ homeassistant/components/smlight/const.py | 11 + .../components/smlight/coordinator.py | 71 ++ homeassistant/components/smlight/entity.py | 31 + .../components/smlight/manifest.json | 15 + homeassistant/components/smlight/sensor.py | 103 +++ homeassistant/components/smlight/strings.json | 49 ++ homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 6 + homeassistant/generated/zeroconf.py | 3 + requirements_all.txt | 3 + requirements_test_all.txt | 3 + tests/components/smlight/__init__.py | 1 + tests/components/smlight/conftest.py | 74 ++ tests/components/smlight/fixtures/info.json | 16 + .../components/smlight/fixtures/sensors.json | 14 + .../smlight/snapshots/test_init.ambr | 33 + .../smlight/snapshots/test_sensor.ambr | 741 ++++++++++++++++++ tests/components/smlight/test_config_flow.py | 365 +++++++++ tests/components/smlight/test_init.py | 94 +++ tests/components/smlight/test_sensor.py | 54 ++ 23 files changed, 1871 insertions(+) create mode 100644 homeassistant/components/smlight/__init__.py create mode 100644 homeassistant/components/smlight/config_flow.py create mode 100644 homeassistant/components/smlight/const.py create mode 100644 homeassistant/components/smlight/coordinator.py create mode 100644 homeassistant/components/smlight/entity.py create mode 100644 homeassistant/components/smlight/manifest.json create mode 100644 homeassistant/components/smlight/sensor.py create mode 100644 homeassistant/components/smlight/strings.json create mode 100644 tests/components/smlight/__init__.py create mode 100644 tests/components/smlight/conftest.py create mode 100644 tests/components/smlight/fixtures/info.json create mode 100644 tests/components/smlight/fixtures/sensors.json create mode 100644 tests/components/smlight/snapshots/test_init.ambr create mode 100644 tests/components/smlight/snapshots/test_sensor.ambr create mode 100644 tests/components/smlight/test_config_flow.py create mode 100644 tests/components/smlight/test_init.py create mode 100644 tests/components/smlight/test_sensor.py diff --git a/CODEOWNERS b/CODEOWNERS index 367c6eee2bf..1618b18a8be 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1329,6 +1329,8 @@ build.json @home-assistant/supervisor /homeassistant/components/smarty/ @z0mbieprocess /homeassistant/components/smhi/ @gjohansson-ST /tests/components/smhi/ @gjohansson-ST +/homeassistant/components/smlight/ @tl-sl +/tests/components/smlight/ @tl-sl /homeassistant/components/sms/ @ocalvo /tests/components/sms/ @ocalvo /homeassistant/components/snapcast/ @luar123 diff --git a/homeassistant/components/smlight/__init__.py b/homeassistant/components/smlight/__init__.py new file mode 100644 index 00000000000..16eb60b9c87 --- /dev/null +++ b/homeassistant/components/smlight/__init__.py @@ -0,0 +1,30 @@ +"""SMLIGHT SLZB Zigbee device integration.""" + +from __future__ import annotations + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_HOST, Platform +from homeassistant.core import HomeAssistant + +from .coordinator import SmDataUpdateCoordinator + +PLATFORMS: list[Platform] = [ + Platform.SENSOR, +] +type SmConfigEntry = ConfigEntry[SmDataUpdateCoordinator] + + +async def async_setup_entry(hass: HomeAssistant, entry: SmConfigEntry) -> bool: + """Set up SMLIGHT Zigbee from a config entry.""" + coordinator = SmDataUpdateCoordinator(hass, entry.data[CONF_HOST]) + await coordinator.async_config_entry_first_refresh() + entry.runtime_data = coordinator + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: SmConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/smlight/config_flow.py b/homeassistant/components/smlight/config_flow.py new file mode 100644 index 00000000000..1b8cc4efeb1 --- /dev/null +++ b/homeassistant/components/smlight/config_flow.py @@ -0,0 +1,151 @@ +"""Config flow for SMLIGHT Zigbee integration.""" + +from __future__ import annotations + +from typing import Any + +from pysmlight import Api2 +from pysmlight.exceptions import SmlightAuthError, SmlightConnectionError +import voluptuous as vol + +from homeassistant.components import zeroconf +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNAME +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.device_registry import format_mac + +from .const import DOMAIN + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_HOST): str, + } +) + +STEP_AUTH_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_USERNAME): str, + vol.Required(CONF_PASSWORD): str, + } +) + + +class SmlightConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for SMLIGHT Zigbee.""" + + def __init__(self) -> None: + """Initialize the config flow.""" + self.client: Api2 + self.host: str | None = None + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + errors: dict[str, str] = {} + + if user_input is not None: + host = user_input[CONF_HOST] + self.client = Api2(host, session=async_get_clientsession(self.hass)) + self.host = host + + try: + if not await self._async_check_auth_required(user_input): + return await self._async_complete_entry(user_input) + except SmlightConnectionError: + errors["base"] = "cannot_connect" + except SmlightAuthError: + return await self.async_step_auth() + + return self.async_show_form( + step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors + ) + + async def async_step_auth( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle authentication to SLZB-06 device.""" + errors: dict[str, str] = {} + + if user_input is not None: + try: + if not await self._async_check_auth_required(user_input): + return await self._async_complete_entry(user_input) + except SmlightConnectionError: + return self.async_abort(reason="cannot_connect") + except SmlightAuthError: + errors["base"] = "invalid_auth" + + return self.async_show_form( + step_id="auth", data_schema=STEP_AUTH_DATA_SCHEMA, errors=errors + ) + + async def async_step_zeroconf( + self, discovery_info: zeroconf.ZeroconfServiceInfo + ) -> ConfigFlowResult: + """Handle a discovered Lan coordinator.""" + local_name = discovery_info.hostname[:-1] + node_name = local_name.removesuffix(".local") + + self.host = local_name + self.context["title_placeholders"] = {CONF_NAME: node_name} + self.client = Api2(self.host, session=async_get_clientsession(self.hass)) + + mac = discovery_info.properties.get("mac") + # fallback for legacy firmware + if mac is None: + info = await self.client.get_info() + mac = info.MAC + await self.async_set_unique_id(format_mac(mac)) + self._abort_if_unique_id_configured() + + return await self.async_step_confirm_discovery() + + async def async_step_confirm_discovery( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle discovery confirm.""" + errors: dict[str, str] = {} + + if user_input is not None: + user_input[CONF_HOST] = self.host + try: + if not await self._async_check_auth_required(user_input): + return await self._async_complete_entry(user_input) + + except SmlightConnectionError: + return self.async_abort(reason="cannot_connect") + + except SmlightAuthError: + return await self.async_step_auth() + + self._set_confirm_only() + + return self.async_show_form( + step_id="confirm_discovery", + description_placeholders={"host": self.host}, + errors=errors, + ) + + async def _async_check_auth_required(self, user_input: dict[str, Any]) -> bool: + """Check if auth required and attempt to authenticate.""" + if await self.client.check_auth_needed(): + if user_input.get(CONF_USERNAME) and user_input.get(CONF_PASSWORD): + return not await self.client.authenticate( + user_input[CONF_USERNAME], user_input[CONF_PASSWORD] + ) + raise SmlightAuthError + return False + + async def _async_complete_entry( + self, user_input: dict[str, Any] + ) -> ConfigFlowResult: + info = await self.client.get_info() + await self.async_set_unique_id(format_mac(info.MAC)) + self._abort_if_unique_id_configured() + + if user_input.get(CONF_HOST) is None: + user_input[CONF_HOST] = self.host + + assert info.model is not None + return self.async_create_entry(title=info.model, data=user_input) diff --git a/homeassistant/components/smlight/const.py b/homeassistant/components/smlight/const.py new file mode 100644 index 00000000000..de3270fe3be --- /dev/null +++ b/homeassistant/components/smlight/const.py @@ -0,0 +1,11 @@ +"""Constants for the SMLIGHT Zigbee integration.""" + +from datetime import timedelta +import logging + +DOMAIN = "smlight" + +ATTR_MANUFACTURER = "SMLIGHT" + +LOGGER = logging.getLogger(__package__) +SCAN_INTERVAL = timedelta(seconds=300) diff --git a/homeassistant/components/smlight/coordinator.py b/homeassistant/components/smlight/coordinator.py new file mode 100644 index 00000000000..6a29f14fafd --- /dev/null +++ b/homeassistant/components/smlight/coordinator.py @@ -0,0 +1,71 @@ +"""DataUpdateCoordinator for Smlight.""" + +from dataclasses import dataclass + +from pysmlight import Api2, Info, Sensors +from pysmlight.exceptions import SmlightAuthError, SmlightConnectionError + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryError +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.device_registry import format_mac +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DOMAIN, LOGGER, SCAN_INTERVAL + + +@dataclass +class SmData: + """SMLIGHT data stored in the DataUpdateCoordinator.""" + + sensors: Sensors + info: Info + + +class SmDataUpdateCoordinator(DataUpdateCoordinator[SmData]): + """Class to manage fetching SMLIGHT data.""" + + config_entry: ConfigEntry + + def __init__(self, hass: HomeAssistant, host: str) -> None: + """Initialize the coordinator.""" + super().__init__( + hass, + LOGGER, + name=f"{DOMAIN}_{host}", + update_interval=SCAN_INTERVAL, + ) + + self.unique_id: str | None = None + self.client = Api2(host=host, session=async_get_clientsession(hass)) + + async def _async_setup(self) -> None: + """Authenticate if needed during initial setup.""" + if await self.client.check_auth_needed(): + if ( + CONF_USERNAME in self.config_entry.data + and CONF_PASSWORD in self.config_entry.data + ): + try: + await self.client.authenticate( + self.config_entry.data[CONF_USERNAME], + self.config_entry.data[CONF_PASSWORD], + ) + except SmlightAuthError as err: + LOGGER.error("Failed to authenticate: %s", err) + raise ConfigEntryError from err + + info = await self.client.get_info() + self.unique_id = format_mac(info.MAC) + + async def _async_update_data(self) -> SmData: + """Fetch data from the SMLIGHT device.""" + try: + return SmData( + sensors=await self.client.get_sensors(), + info=await self.client.get_info(), + ) + except SmlightConnectionError as err: + raise UpdateFailed(err) from err diff --git a/homeassistant/components/smlight/entity.py b/homeassistant/components/smlight/entity.py new file mode 100644 index 00000000000..50767d3bf74 --- /dev/null +++ b/homeassistant/components/smlight/entity.py @@ -0,0 +1,31 @@ +"""Base class for all SMLIGHT entities.""" + +from __future__ import annotations + +from homeassistant.helpers.device_registry import ( + CONNECTION_NETWORK_MAC, + DeviceInfo, + format_mac, +) +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import ATTR_MANUFACTURER +from .coordinator import SmDataUpdateCoordinator + + +class SmEntity(CoordinatorEntity[SmDataUpdateCoordinator]): + """Base class for all SMLight entities.""" + + _attr_has_entity_name = True + + def __init__(self, coordinator: SmDataUpdateCoordinator) -> None: + """Initialize entity with device.""" + super().__init__(coordinator) + mac = format_mac(coordinator.data.info.MAC) + self._attr_device_info = DeviceInfo( + configuration_url=f"http://{coordinator.client.host}", + connections={(CONNECTION_NETWORK_MAC, mac)}, + manufacturer=ATTR_MANUFACTURER, + model=coordinator.data.info.model, + sw_version=f"core: {coordinator.data.info.sw_version} / zigbee: {coordinator.data.info.zb_version}", + ) diff --git a/homeassistant/components/smlight/manifest.json b/homeassistant/components/smlight/manifest.json new file mode 100644 index 00000000000..0dbd25e90bd --- /dev/null +++ b/homeassistant/components/smlight/manifest.json @@ -0,0 +1,15 @@ +{ + "domain": "smlight", + "name": "SMLIGHT SLZB", + "codeowners": ["@tl-sl"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/smlight", + "integration_type": "device", + "iot_class": "local_polling", + "requirements": ["pysmlight==0.0.12"], + "zeroconf": [ + { + "type": "_slzb-06._tcp.local." + } + ] +} diff --git a/homeassistant/components/smlight/sensor.py b/homeassistant/components/smlight/sensor.py new file mode 100644 index 00000000000..d9c03760fb8 --- /dev/null +++ b/homeassistant/components/smlight/sensor.py @@ -0,0 +1,103 @@ +"""Support for SLZB-06 sensors.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass + +from pysmlight import Sensors + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import EntityCategory, UnitOfInformation, UnitOfTemperature +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import SmConfigEntry +from .coordinator import SmDataUpdateCoordinator +from .entity import SmEntity + + +@dataclass(frozen=True, kw_only=True) +class SmSensorEntityDescription(SensorEntityDescription): + """Class describing SMLIGHT sensor entities.""" + + entity_category = EntityCategory.DIAGNOSTIC + value_fn: Callable[[Sensors], float | None] + + +SENSORS = [ + SmSensorEntityDescription( + key="core_temperature", + translation_key="core_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, + value_fn=lambda x: x.esp32_temp, + ), + SmSensorEntityDescription( + key="zigbee_temperature", + translation_key="zigbee_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, + value_fn=lambda x: x.zb_temp, + ), + SmSensorEntityDescription( + key="ram_usage", + translation_key="ram_usage", + device_class=SensorDeviceClass.DATA_SIZE, + native_unit_of_measurement=UnitOfInformation.KILOBYTES, + entity_registry_enabled_default=False, + value_fn=lambda x: x.ram_usage, + ), + SmSensorEntityDescription( + key="fs_usage", + translation_key="fs_usage", + device_class=SensorDeviceClass.DATA_SIZE, + native_unit_of_measurement=UnitOfInformation.KILOBYTES, + entity_registry_enabled_default=False, + value_fn=lambda x: x.fs_used, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SmConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up SMLIGHT sensor based on a config entry.""" + coordinator = entry.runtime_data + + async_add_entities( + SmSensorEntity(coordinator, description) for description in SENSORS + ) + + +class SmSensorEntity(SmEntity, SensorEntity): + """Representation of a slzb sensor.""" + + entity_description: SmSensorEntityDescription + + def __init__( + self, + coordinator: SmDataUpdateCoordinator, + description: SmSensorEntityDescription, + ) -> None: + """Initiate slzb sensor.""" + super().__init__(coordinator) + + self.entity_description = description + self._attr_unique_id = f"{coordinator.unique_id}_{description.key}" + + @property + def native_value(self) -> float | None: + """Return the sensor value.""" + return self.entity_description.value_fn(self.coordinator.data.sensors) diff --git a/homeassistant/components/smlight/strings.json b/homeassistant/components/smlight/strings.json new file mode 100644 index 00000000000..02b9ebcc4e8 --- /dev/null +++ b/homeassistant/components/smlight/strings.json @@ -0,0 +1,49 @@ +{ + "config": { + "step": { + "user": { + "description": "Set up SMLIGHT Zigbee Integration", + "data": { + "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The hostname or IP address of the SMLIGHT SLZB-06x device" + } + }, + "auth": { + "description": "Please enter the username and password", + "data": { + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" + } + }, + "confirm_discovery": { + "description": "Do you want to set up SMLIGHT at {host}?" + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + }, + "entity": { + "sensor": { + "zigbee_temperature": { + "name": "Zigbee chip temp" + }, + "core_temperature": { + "name": "Core chip temp" + }, + "fs_usage": { + "name": "Filesystem usage" + }, + "ram_usage": { + "name": "RAM usage" + } + } + } +} diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index b474fbaf54f..eecb3a76aac 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -525,6 +525,7 @@ FLOWS = { "smartthings", "smarttub", "smhi", + "smlight", "sms", "snapcast", "snooz", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 9e1250e3b60..b14531e1731 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -5588,6 +5588,12 @@ "config_flow": true, "iot_class": "cloud_polling" }, + "smlight": { + "name": "SMLIGHT SLZB", + "integration_type": "device", + "config_flow": true, + "iot_class": "local_polling" + }, "sms": { "name": "SMS notifications via GSM-modem", "integration_type": "hub", diff --git a/homeassistant/generated/zeroconf.py b/homeassistant/generated/zeroconf.py index 7cd60da2d0e..3e5e34090d1 100644 --- a/homeassistant/generated/zeroconf.py +++ b/homeassistant/generated/zeroconf.py @@ -747,6 +747,9 @@ ZEROCONF = { }, ], "_slzb-06._tcp.local.": [ + { + "domain": "smlight", + }, { "domain": "zha", "name": "slzb-06*", diff --git a/requirements_all.txt b/requirements_all.txt index f640bb15e5b..ce1db53c844 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2210,6 +2210,9 @@ pysmartthings==0.7.8 # homeassistant.components.edl21 pysml==0.0.12 +# homeassistant.components.smlight +pysmlight==0.0.12 + # homeassistant.components.snmp pysnmp==6.2.5 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f4db2963fe2..ea248a662bd 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1764,6 +1764,9 @@ pysmartthings==0.7.8 # homeassistant.components.edl21 pysml==0.0.12 +# homeassistant.components.smlight +pysmlight==0.0.12 + # homeassistant.components.snmp pysnmp==6.2.5 diff --git a/tests/components/smlight/__init__.py b/tests/components/smlight/__init__.py new file mode 100644 index 00000000000..37184226507 --- /dev/null +++ b/tests/components/smlight/__init__.py @@ -0,0 +1 @@ +"""Tests for the SMLIGHT Zigbee adapter integration.""" diff --git a/tests/components/smlight/conftest.py b/tests/components/smlight/conftest.py new file mode 100644 index 00000000000..0338bf4b672 --- /dev/null +++ b/tests/components/smlight/conftest.py @@ -0,0 +1,74 @@ +"""Common fixtures for the SMLIGHT Zigbee tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch + +from pysmlight.web import Info, Sensors +import pytest + +from homeassistant.components.smlight.const import DOMAIN +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry, load_json_object_fixture + +MOCK_HOST = "slzb-06.local" +MOCK_USERNAME = "test-user" +MOCK_PASSWORD = "test-pass" + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + domain=DOMAIN, + data={ + CONF_HOST: MOCK_HOST, + CONF_USERNAME: MOCK_USERNAME, + CONF_PASSWORD: MOCK_PASSWORD, + }, + unique_id="aa:bb:cc:dd:ee:ff", + ) + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock, None, None]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.smlight.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_smlight_client(request: pytest.FixtureRequest) -> Generator[MagicMock]: + """Mock the SMLIGHT API client.""" + with ( + patch( + "homeassistant.components.smlight.coordinator.Api2", autospec=True + ) as smlight_mock, + patch("homeassistant.components.smlight.config_flow.Api2", new=smlight_mock), + ): + api = smlight_mock.return_value + api.host = MOCK_HOST + api.get_info.return_value = Info.from_dict( + load_json_object_fixture("info.json", DOMAIN) + ) + api.get_sensors.return_value = Sensors.from_dict( + load_json_object_fixture("sensors.json", DOMAIN) + ) + + api.check_auth_needed.return_value = False + api.authenticate.return_value = True + + yield api + + +async def setup_integration(hass: HomeAssistant, mock_config_entry: MockConfigEntry): + """Set up the integration.""" + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + return mock_config_entry diff --git a/tests/components/smlight/fixtures/info.json b/tests/components/smlight/fixtures/info.json new file mode 100644 index 00000000000..72bb7c1ed9b --- /dev/null +++ b/tests/components/smlight/fixtures/info.json @@ -0,0 +1,16 @@ +{ + "coord_mode": 0, + "device_ip": "192.168.1.161", + "fs_total": 3456, + "fw_channel": "dev", + "MAC": "AA:BB:CC:DD:EE:FF", + "model": "SLZB-06p7", + "ram_total": 296, + "sw_version": "v2.3.1.dev", + "wifi_mode": 0, + "zb_flash_size": 704, + "zb_hw": "CC2652P7", + "zb_ram_size": 152, + "zb_version": -1, + "zb_type": -1 +} diff --git a/tests/components/smlight/fixtures/sensors.json b/tests/components/smlight/fixtures/sensors.json new file mode 100644 index 00000000000..0b2f9055e01 --- /dev/null +++ b/tests/components/smlight/fixtures/sensors.json @@ -0,0 +1,14 @@ +{ + "esp32_temp": 35.0, + "zb_temp": 32.7, + "uptime": 508125, + "socket_uptime": 127, + "ram_usage": 99, + "fs_used": 188, + "ethernet": true, + "wifi_connected": false, + "wifi_status": 255, + "disable_leds": false, + "night_mode": false, + "auto_zigbee": false +} diff --git a/tests/components/smlight/snapshots/test_init.ambr b/tests/components/smlight/snapshots/test_init.ambr new file mode 100644 index 00000000000..528a7b7b340 --- /dev/null +++ b/tests/components/smlight/snapshots/test_init.ambr @@ -0,0 +1,33 @@ +# serializer version: 1 +# name: test_device_info + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': 'http://slzb-06.local', + 'connections': set({ + tuple( + 'mac', + 'aa:bb:cc:dd:ee:ff', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'SMLIGHT', + 'model': 'SLZB-06p7', + 'model_id': None, + 'name': 'Mock Title', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': 'core: v2.3.1.dev / zigbee: -1', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/smlight/snapshots/test_sensor.ambr b/tests/components/smlight/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..0ff3d37b735 --- /dev/null +++ b/tests/components/smlight/snapshots/test_sensor.ambr @@ -0,0 +1,741 @@ +# serializer version: 1 +# name: test_sensors[sensor.mock_title_core_chip_temp-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_title_core_chip_temp', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Core chip temp', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'core_temperature', + 'unique_id': 'aa:bb:cc:dd:ee:ff_core_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.mock_title_core_chip_temp-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Mock Title Core chip temp', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_title_core_chip_temp', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '35.0', + }) +# --- +# name: test_sensors[sensor.mock_title_filesystem_usage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_title_filesystem_usage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Filesystem usage', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'fs_usage', + 'unique_id': 'aa:bb:cc:dd:ee:ff_fs_usage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.mock_title_filesystem_usage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'Mock Title Filesystem usage', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_title_filesystem_usage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '188', + }) +# --- +# name: test_sensors[sensor.mock_title_ram_usage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_title_ram_usage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'RAM usage', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ram_usage', + 'unique_id': 'aa:bb:cc:dd:ee:ff_ram_usage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.mock_title_ram_usage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'Mock Title RAM usage', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_title_ram_usage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '99', + }) +# --- +# name: test_sensors[sensor.mock_title_zigbee_chip_temp-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_title_zigbee_chip_temp', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Zigbee chip temp', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'zigbee_temperature', + 'unique_id': 'aa:bb:cc:dd:ee:ff_zigbee_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.mock_title_zigbee_chip_temp-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Mock Title Zigbee chip temp', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_title_zigbee_chip_temp', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '32.7', + }) +# --- +# name: test_sensors[sensor.slzb_06_core_chip_temp-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.slzb_06_core_chip_temp', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Core chip temp', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'core_temperature', + 'unique_id': 'aa:bb:cc:dd:ee:ff_core_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.slzb_06_core_chip_temp-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'slzb-06 Core chip temp', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.slzb_06_core_chip_temp', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '35.0', + }) +# --- +# name: test_sensors[sensor.slzb_06_core_chip_temp] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'slzb-06 Core chip temp', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.slzb_06_core_chip_temp', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '35.0', + }) +# --- +# name: test_sensors[sensor.slzb_06_core_chip_temp].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.slzb_06_core_chip_temp', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Core chip temp', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'core_temperature', + 'unique_id': 'aa:bb:cc:dd:ee:ff_core_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.slzb_06_core_chip_temp].2 + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': 'http://slzb-06.local', + 'connections': set({ + tuple( + 'mac', + 'aa:bb:cc:dd:ee:ff', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'SMLIGHT', + 'model': 'SLZB-06p7', + 'model_id': None, + 'name': 'slzb-06', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': 'core: v2.3.1.dev / zigbee: -1', + 'via_device_id': None, + }) +# --- +# name: test_sensors[sensor.slzb_06_filesystem_usage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.slzb_06_filesystem_usage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Filesystem usage', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'fs_usage', + 'unique_id': 'aa:bb:cc:dd:ee:ff_fs_usage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.slzb_06_filesystem_usage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'slzb-06 Filesystem usage', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.slzb_06_filesystem_usage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '188', + }) +# --- +# name: test_sensors[sensor.slzb_06_filesystem_usage] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'slzb-06 Filesystem usage', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.slzb_06_filesystem_usage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '188', + }) +# --- +# name: test_sensors[sensor.slzb_06_filesystem_usage].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.slzb_06_filesystem_usage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Filesystem usage', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'fs_usage', + 'unique_id': 'aa:bb:cc:dd:ee:ff_fs_usage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.slzb_06_filesystem_usage].2 + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': 'http://slzb-06.local', + 'connections': set({ + tuple( + 'mac', + 'aa:bb:cc:dd:ee:ff', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'SMLIGHT', + 'model': 'SLZB-06p7', + 'model_id': None, + 'name': 'slzb-06', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': 'core: v2.3.1.dev / zigbee: -1', + 'via_device_id': None, + }) +# --- +# name: test_sensors[sensor.slzb_06_ram_usage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.slzb_06_ram_usage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'RAM usage', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ram_usage', + 'unique_id': 'aa:bb:cc:dd:ee:ff_ram_usage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.slzb_06_ram_usage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'slzb-06 RAM usage', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.slzb_06_ram_usage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '99', + }) +# --- +# name: test_sensors[sensor.slzb_06_ram_usage] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'slzb-06 RAM usage', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.slzb_06_ram_usage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '99', + }) +# --- +# name: test_sensors[sensor.slzb_06_ram_usage].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.slzb_06_ram_usage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'RAM usage', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ram_usage', + 'unique_id': 'aa:bb:cc:dd:ee:ff_ram_usage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.slzb_06_ram_usage].2 + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': 'http://slzb-06.local', + 'connections': set({ + tuple( + 'mac', + 'aa:bb:cc:dd:ee:ff', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'SMLIGHT', + 'model': 'SLZB-06p7', + 'model_id': None, + 'name': 'slzb-06', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': 'core: v2.3.1.dev / zigbee: -1', + 'via_device_id': None, + }) +# --- +# name: test_sensors[sensor.slzb_06_zigbee_chip_temp-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.slzb_06_zigbee_chip_temp', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Zigbee chip temp', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'zigbee_temperature', + 'unique_id': 'aa:bb:cc:dd:ee:ff_zigbee_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.slzb_06_zigbee_chip_temp-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'slzb-06 Zigbee chip temp', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.slzb_06_zigbee_chip_temp', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '32.7', + }) +# --- +# name: test_sensors[sensor.slzb_06_zigbee_chip_temp] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'slzb-06 Zigbee chip temp', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.slzb_06_zigbee_chip_temp', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '32.7', + }) +# --- +# name: test_sensors[sensor.slzb_06_zigbee_chip_temp].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.slzb_06_zigbee_chip_temp', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Zigbee chip temp', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'zigbee_temperature', + 'unique_id': 'aa:bb:cc:dd:ee:ff_zigbee_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.slzb_06_zigbee_chip_temp].2 + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': 'http://slzb-06.local', + 'connections': set({ + tuple( + 'mac', + 'aa:bb:cc:dd:ee:ff', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'SMLIGHT', + 'model': 'SLZB-06p7', + 'model_id': None, + 'name': 'slzb-06', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': 'core: v2.3.1.dev / zigbee: -1', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/smlight/test_config_flow.py b/tests/components/smlight/test_config_flow.py new file mode 100644 index 00000000000..9a23a8de753 --- /dev/null +++ b/tests/components/smlight/test_config_flow.py @@ -0,0 +1,365 @@ +"""Test the SMLIGHT SLZB config flow.""" + +from ipaddress import ip_address +from unittest.mock import AsyncMock, MagicMock + +from pysmlight.exceptions import SmlightAuthError, SmlightConnectionError +import pytest + +from homeassistant.components import zeroconf +from homeassistant.components.smlight.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .conftest import MOCK_HOST, MOCK_PASSWORD, MOCK_USERNAME + +from tests.common import MockConfigEntry + +DISCOVERY_INFO = zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("127.0.0.1"), + ip_addresses=[ip_address("127.0.0.1")], + hostname="slzb-06.local.", + name="mock_name", + port=6638, + properties={"mac": "AA:BB:CC:DD:EE:FF"}, + type="mock_type", +) + +DISCOVERY_INFO_LEGACY = zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("127.0.0.1"), + ip_addresses=[ip_address("127.0.0.1")], + hostname="slzb-06.local.", + name="mock_name", + port=6638, + properties={}, + type="mock_type", +) + + +@pytest.mark.usefixtures("mock_smlight_client") +async def test_user_flow(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: + """Test the full manual user flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: MOCK_HOST, + }, + ) + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == "SLZB-06p7" + assert result2["data"] == { + CONF_HOST: MOCK_HOST, + } + assert result2["context"]["unique_id"] == "aa:bb:cc:dd:ee:ff" + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_zeroconf_flow( + hass: HomeAssistant, + mock_smlight_client: MagicMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test the zeroconf flow.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_ZEROCONF}, data=DISCOVERY_INFO + ) + + assert result["description_placeholders"] == {"host": MOCK_HOST} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_discovery" + + progress = hass.config_entries.flow.async_progress() + assert len(progress) == 1 + assert progress[0]["flow_id"] == result["flow_id"] + assert progress[0]["context"]["confirm_only"] is True + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["context"]["source"] == "zeroconf" + assert result2["context"]["unique_id"] == "aa:bb:cc:dd:ee:ff" + assert result2["title"] == "SLZB-06p7" + assert result2["data"] == { + CONF_HOST: MOCK_HOST, + } + + assert len(mock_setup_entry.mock_calls) == 1 + assert len(mock_smlight_client.get_info.mock_calls) == 1 + + +async def test_zeroconf_flow_auth( + hass: HomeAssistant, + mock_smlight_client: MagicMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test the full zeroconf flow including authentication.""" + mock_smlight_client.check_auth_needed.return_value = True + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_ZEROCONF}, data=DISCOVERY_INFO + ) + + assert result["description_placeholders"] == {"host": MOCK_HOST} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_discovery" + + progress = hass.config_entries.flow.async_progress() + assert len(progress) == 1 + assert progress[0]["flow_id"] == result["flow_id"] + assert progress[0]["context"]["confirm_only"] is True + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + assert result2["type"] is FlowResultType.FORM + assert result2["step_id"] == "auth" + + progress2 = hass.config_entries.flow.async_progress() + assert len(progress2) == 1 + assert progress2[0]["flow_id"] == result["flow_id"] + + result3 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_USERNAME: MOCK_USERNAME, + CONF_PASSWORD: MOCK_PASSWORD, + }, + ) + + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["context"]["source"] == "zeroconf" + assert result3["context"]["unique_id"] == "aa:bb:cc:dd:ee:ff" + assert result3["title"] == "SLZB-06p7" + assert result3["data"] == { + CONF_USERNAME: MOCK_USERNAME, + CONF_PASSWORD: MOCK_PASSWORD, + CONF_HOST: MOCK_HOST, + } + + assert len(mock_setup_entry.mock_calls) == 1 + assert len(mock_smlight_client.get_info.mock_calls) == 1 + + +@pytest.mark.usefixtures("mock_smlight_client") +async def test_user_device_exists_abort( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test we abort user flow if device already configured.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={ + CONF_HOST: MOCK_HOST, + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +@pytest.mark.usefixtures("mock_smlight_client") +async def test_zeroconf_device_exists_abort( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test we abort zeroconf flow if device already configured.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=DISCOVERY_INFO, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_user_invalid_auth( + hass: HomeAssistant, mock_smlight_client: MagicMock, mock_setup_entry: AsyncMock +) -> None: + """Test we handle invalid auth.""" + mock_smlight_client.check_auth_needed.return_value = True + mock_smlight_client.authenticate.side_effect = SmlightAuthError + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={ + CONF_HOST: MOCK_HOST, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "auth" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "test", + CONF_PASSWORD: "bad", + }, + ) + + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "invalid_auth"} + assert result2["step_id"] == "auth" + + mock_smlight_client.authenticate.side_effect = None + + result3 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "test", + CONF_PASSWORD: "good", + }, + ) + + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == "SLZB-06p7" + assert result3["data"] == { + CONF_HOST: MOCK_HOST, + CONF_USERNAME: "test", + CONF_PASSWORD: "good", + } + + assert len(mock_setup_entry.mock_calls) == 1 + assert len(mock_smlight_client.get_info.mock_calls) == 1 + + +async def test_user_cannot_connect( + hass: HomeAssistant, mock_smlight_client: MagicMock, mock_setup_entry: AsyncMock +) -> None: + """Test we handle user cannot connect error.""" + mock_smlight_client.check_auth_needed.side_effect = SmlightConnectionError + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "unknown.local", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + assert result["step_id"] == "user" + + mock_smlight_client.check_auth_needed.side_effect = None + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: MOCK_HOST, + }, + ) + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == "SLZB-06p7" + + assert len(mock_setup_entry.mock_calls) == 1 + assert len(mock_smlight_client.get_info.mock_calls) == 1 + + +async def test_auth_cannot_connect( + hass: HomeAssistant, mock_smlight_client: MagicMock +) -> None: + """Test we abort auth step on cannot connect error.""" + mock_smlight_client.check_auth_needed.return_value = True + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: MOCK_HOST, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "auth" + + mock_smlight_client.check_auth_needed.side_effect = SmlightConnectionError + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: MOCK_USERNAME, + CONF_PASSWORD: MOCK_PASSWORD, + }, + ) + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "cannot_connect" + + +async def test_zeroconf_cannot_connect( + hass: HomeAssistant, mock_smlight_client: MagicMock +) -> None: + """Test we abort flow on zeroconf cannot connect error.""" + mock_smlight_client.check_auth_needed.side_effect = SmlightConnectionError + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=DISCOVERY_INFO, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_discovery" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {}, + ) + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "cannot_connect" + + +@pytest.mark.usefixtures("mock_smlight_client") +async def test_zeroconf_legacy_mac( + hass: HomeAssistant, mock_smlight_client: MagicMock, mock_setup_entry: AsyncMock +) -> None: + """Test we can get unique id MAC address for older firmwares.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=DISCOVERY_INFO_LEGACY, + ) + + assert result["description_placeholders"] == {"host": MOCK_HOST} + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["context"]["source"] == "zeroconf" + assert result2["context"]["unique_id"] == "aa:bb:cc:dd:ee:ff" + assert result2["title"] == "SLZB-06p7" + assert result2["data"] == { + CONF_HOST: MOCK_HOST, + } + + assert len(mock_setup_entry.mock_calls) == 1 + assert len(mock_smlight_client.get_info.mock_calls) == 2 diff --git a/tests/components/smlight/test_init.py b/tests/components/smlight/test_init.py new file mode 100644 index 00000000000..682993cb943 --- /dev/null +++ b/tests/components/smlight/test_init.py @@ -0,0 +1,94 @@ +"Test SMLIGHT SLZB device integration initialization." + +from unittest.mock import MagicMock + +from freezegun.api import FrozenDateTimeFactory +from pysmlight.exceptions import SmlightAuthError, SmlightConnectionError +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.smlight.const import SCAN_INTERVAL +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from .conftest import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed + +pytestmark = [ + pytest.mark.usefixtures( + "mock_smlight_client", + ) +] + + +async def test_async_setup_entry( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test async_setup_entry.""" + entry = await setup_integration(hass, mock_config_entry) + + assert entry.state is ConfigEntryState.LOADED + assert entry.unique_id == "aa:bb:cc:dd:ee:ff" + + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + assert entry.state is ConfigEntryState.NOT_LOADED + + +async def test_async_setup_auth_failed( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_smlight_client: MagicMock, +) -> None: + """Test async_setup_entry when authentication fails.""" + mock_smlight_client.check_auth_needed.return_value = True + mock_smlight_client.authenticate.side_effect = SmlightAuthError + entry = await setup_integration(hass, mock_config_entry) + + assert entry.state is ConfigEntryState.SETUP_ERROR + + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + assert entry.state is ConfigEntryState.NOT_LOADED + + +async def test_update_failed( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_smlight_client: MagicMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test update failed due to connection error.""" + + await setup_integration(hass, mock_config_entry) + entity = hass.states.get("sensor.mock_title_core_chip_temp") + assert entity.state is not STATE_UNAVAILABLE + + mock_smlight_client.get_info.side_effect = SmlightConnectionError + + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + entity = hass.states.get("sensor.mock_title_core_chip_temp") + assert entity is not None + assert entity.state == STATE_UNAVAILABLE + + +async def test_device_info( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test device registry information.""" + entry = await setup_integration(hass, mock_config_entry) + + device_entry = device_registry.async_get_device( + connections={(dr.CONNECTION_NETWORK_MAC, entry.unique_id)} + ) + assert device_entry is not None + assert device_entry == snapshot diff --git a/tests/components/smlight/test_sensor.py b/tests/components/smlight/test_sensor.py new file mode 100644 index 00000000000..4d16a73a0a7 --- /dev/null +++ b/tests/components/smlight/test_sensor.py @@ -0,0 +1,54 @@ +"""Tests for the SMLIGHT sensor platform.""" + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from .conftest import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + +pytestmark = [ + pytest.mark.usefixtures( + "mock_smlight_client", + ) +] + + +@pytest.fixture +def platforms() -> Platform | list[Platform]: + """Platforms, which should be loaded during the test.""" + return Platform.SENSOR + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensors( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test the SMLIGHT sensors.""" + entry = await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + + +async def test_disabled_by_default_sensors( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the disabled by default SMLIGHT sensors.""" + await setup_integration(hass, mock_config_entry) + + for sensor in ("ram_usage", "filesystem_usage"): + assert not hass.states.get(f"sensor.mock_title_{sensor}") + + assert (entry := entity_registry.async_get(f"sensor.mock_title_{sensor}")) + assert entry.disabled + assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION From be255613de0a7fb7220834060e41836729005bab Mon Sep 17 00:00:00 2001 From: Stefan Agner Date: Tue, 20 Aug 2024 12:10:25 +0200 Subject: [PATCH 0920/1635] Add additional tests for Matter update entity (#122575) * Add additional tests for Matter update entity Extend test coverage for Matter update entity. This includes tests for error handling and state store/restore. * Improve test descriptions * Add restore test only (using mock_restore_cache_with_extra_data) * Fix test_update_state_save_and_restore test * Use homeassistant constants * Use update component constants * Use freezer to skip time for device update check We check device updates every 12h currently. Use the freezer to skip time. Still add a test which uses the service call to make sure this works too. --- tests/components/matter/test_update.py | 268 ++++++++++++++++++++++++- 1 file changed, 258 insertions(+), 10 deletions(-) diff --git a/tests/components/matter/test_update.py b/tests/components/matter/test_update.py index 73c69407bbc..19c57b0f3c7 100644 --- a/tests/components/matter/test_update.py +++ b/tests/components/matter/test_update.py @@ -5,12 +5,26 @@ from unittest.mock import AsyncMock, MagicMock from chip.clusters import Objects as clusters from chip.clusters.ClusterObjects import ClusterAttributeDescriptor +from freezegun.api import FrozenDateTimeFactory from matter_server.client.models.node import MatterNode +from matter_server.common.errors import UpdateCheckError, UpdateError from matter_server.common.models import MatterSoftwareVersion, UpdateSource import pytest +from homeassistant.components.homeassistant import ( + DOMAIN as HA_DOMAIN, + SERVICE_UPDATE_ENTITY, +) +from homeassistant.components.matter.update import SCAN_INTERVAL +from homeassistant.components.update import ( + ATTR_VERSION, + DOMAIN as UPDATE_DOMAIN, + SERVICE_INSTALL, +) from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, State +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.restore_state import STORAGE_KEY as RESTORE_STATE_KEY from homeassistant.setup import async_setup_component from .common import ( @@ -19,6 +33,24 @@ from .common import ( trigger_subscription_callback, ) +from tests.common import ( + async_fire_time_changed, + async_mock_restore_state_shutdown_restart, + mock_restore_cache_with_extra_data, +) + +TEST_SOFTWARE_VERSION = MatterSoftwareVersion( + vid=65521, + pid=32768, + software_version=2, + software_version_string="v2.0", + firmware_information="", + min_applicable_software_version=0, + max_applicable_software_version=1, + release_notes_url="http://home-assistant.io/non-existing-product", + update_source=UpdateSource.LOCAL, +) + def set_node_attribute_typed( node: MatterNode, @@ -34,11 +66,18 @@ def set_node_attribute_typed( @pytest.fixture(name="check_node_update") async def check_node_update_fixture(matter_client: MagicMock) -> AsyncMock: - """Fixture for a flow sensor node.""" + """Fixture to check for node updates.""" matter_client.check_node_update = AsyncMock(return_value=None) return matter_client.check_node_update +@pytest.fixture(name="update_node") +async def update_node_fixture(matter_client: MagicMock) -> AsyncMock: + """Fixture to install update.""" + matter_client.update_node = AsyncMock(return_value=None) + return matter_client.update_node + + @pytest.fixture(name="updateable_node") async def updateable_node_fixture( hass: HomeAssistant, matter_client: MagicMock @@ -63,19 +102,19 @@ async def test_update_entity( assert matter_client.check_node_update.call_count == 1 -async def test_update_install( +async def test_update_check_service( hass: HomeAssistant, matter_client: MagicMock, check_node_update: AsyncMock, updateable_node: MatterNode, ) -> None: - """Test update entity exists and update check got made.""" + """Test check device update through service call.""" state = hass.states.get("update.mock_dimmable_light") assert state assert state.state == STATE_OFF assert state.attributes.get("installed_version") == "v1.0" - await async_setup_component(hass, "homeassistant", {}) + await async_setup_component(hass, HA_DOMAIN, {}) check_node_update.return_value = MatterSoftwareVersion( vid=65521, @@ -90,8 +129,8 @@ async def test_update_install( ) await hass.services.async_call( - "homeassistant", - "update_entity", + HA_DOMAIN, + SERVICE_UPDATE_ENTITY, { ATTR_ENTITY_ID: "update.mock_dimmable_light", }, @@ -109,11 +148,50 @@ async def test_update_install( == "http://home-assistant.io/non-existing-product" ) - await async_setup_component(hass, "update", {}) + +async def test_update_install( + hass: HomeAssistant, + matter_client: MagicMock, + check_node_update: AsyncMock, + updateable_node: MatterNode, + freezer: FrozenDateTimeFactory, +) -> None: + """Test device update with Matter attribute changes influence progress.""" + state = hass.states.get("update.mock_dimmable_light") + assert state + assert state.state == STATE_OFF + assert state.attributes.get("installed_version") == "v1.0" + + check_node_update.return_value = MatterSoftwareVersion( + vid=65521, + pid=32768, + software_version=2, + software_version_string="v2.0", + firmware_information="", + min_applicable_software_version=0, + max_applicable_software_version=1, + release_notes_url="http://home-assistant.io/non-existing-product", + update_source=UpdateSource.LOCAL, + ) + + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + assert matter_client.check_node_update.call_count == 2 + + state = hass.states.get("update.mock_dimmable_light") + assert state + assert state.state == STATE_ON + assert state.attributes.get("latest_version") == "v2.0" + assert ( + state.attributes.get("release_url") + == "http://home-assistant.io/non-existing-product" + ) await hass.services.async_call( - "update", - "install", + UPDATE_DOMAIN, + SERVICE_INSTALL, { ATTR_ENTITY_ID: "update.mock_dimmable_light", }, @@ -169,3 +247,173 @@ async def test_update_install( state = hass.states.get("update.mock_dimmable_light") assert state.state == STATE_OFF assert state.attributes.get("installed_version") == "v2.0" + + +async def test_update_install_failure( + hass: HomeAssistant, + matter_client: MagicMock, + check_node_update: AsyncMock, + update_node: AsyncMock, + updateable_node: MatterNode, + freezer: FrozenDateTimeFactory, +) -> None: + """Test update entity service call errors.""" + state = hass.states.get("update.mock_dimmable_light") + assert state + assert state.state == STATE_OFF + assert state.attributes.get("installed_version") == "v1.0" + + check_node_update.return_value = MatterSoftwareVersion( + vid=65521, + pid=32768, + software_version=2, + software_version_string="v2.0", + firmware_information="", + min_applicable_software_version=0, + max_applicable_software_version=1, + release_notes_url="http://home-assistant.io/non-existing-product", + update_source=UpdateSource.LOCAL, + ) + + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + assert matter_client.check_node_update.call_count == 2 + + state = hass.states.get("update.mock_dimmable_light") + assert state + assert state.state == STATE_ON + assert state.attributes.get("latest_version") == "v2.0" + assert ( + state.attributes.get("release_url") + == "http://home-assistant.io/non-existing-product" + ) + + update_node.side_effect = UpdateCheckError("Error finding applicable update") + + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + UPDATE_DOMAIN, + SERVICE_INSTALL, + { + ATTR_ENTITY_ID: "update.mock_dimmable_light", + ATTR_VERSION: "v3.0", + }, + blocking=True, + ) + + update_node.side_effect = UpdateError("Error updating node") + + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + UPDATE_DOMAIN, + SERVICE_INSTALL, + { + ATTR_ENTITY_ID: "update.mock_dimmable_light", + ATTR_VERSION: "v3.0", + }, + blocking=True, + ) + + +async def test_update_state_save_and_restore( + hass: HomeAssistant, + hass_storage: dict[str, Any], + matter_client: MagicMock, + check_node_update: AsyncMock, + updateable_node: MatterNode, + freezer: FrozenDateTimeFactory, +) -> None: + """Test latest update information is retained across reload/restart.""" + state = hass.states.get("update.mock_dimmable_light") + assert state + assert state.state == STATE_OFF + assert state.attributes.get("installed_version") == "v1.0" + + check_node_update.return_value = TEST_SOFTWARE_VERSION + + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + assert matter_client.check_node_update.call_count == 2 + + state = hass.states.get("update.mock_dimmable_light") + assert state + assert state.state == STATE_ON + assert state.attributes.get("latest_version") == "v2.0" + await hass.async_block_till_done() + await async_mock_restore_state_shutdown_restart(hass) + + assert len(hass_storage[RESTORE_STATE_KEY]["data"]) == 1 + state = hass_storage[RESTORE_STATE_KEY]["data"][0]["state"] + assert state["entity_id"] == "update.mock_dimmable_light" + extra_data = hass_storage[RESTORE_STATE_KEY]["data"][0]["extra_data"] + + # Check that the extra data has the format we expect. + assert extra_data == { + "software_update": { + "vid": 65521, + "pid": 32768, + "software_version": 2, + "software_version_string": "v2.0", + "firmware_information": "", + "min_applicable_software_version": 0, + "max_applicable_software_version": 1, + "release_notes_url": "http://home-assistant.io/non-existing-product", + "update_source": "local", + } + } + + +async def test_update_state_restore( + hass: HomeAssistant, + matter_client: MagicMock, + check_node_update: AsyncMock, + update_node: AsyncMock, +) -> None: + """Test latest update information extra data is restored.""" + mock_restore_cache_with_extra_data( + hass, + ( + ( + State( + "update.mock_dimmable_light", + STATE_ON, + { + "auto_update": False, + "installed_version": "v1.0", + "in_progress": False, + "latest_version": "v2.0", + }, + ), + {"software_update": TEST_SOFTWARE_VERSION.as_dict()}, + ), + ), + ) + await setup_integration_with_node_fixture(hass, "dimmable-light", matter_client) + + assert check_node_update.call_count == 0 + + state = hass.states.get("update.mock_dimmable_light") + assert state + assert state.state == STATE_ON + assert state.attributes.get("latest_version") == "v2.0" + + await hass.services.async_call( + UPDATE_DOMAIN, + SERVICE_INSTALL, + { + ATTR_ENTITY_ID: "update.mock_dimmable_light", + }, + blocking=True, + ) + + # Validate that the integer software version from the extra data is passed + # to the update_node call. + assert update_node.call_count == 1 + assert ( + update_node.call_args[1]["software_version"] + == TEST_SOFTWARE_VERSION.software_version + ) From db92f29c0041ec9b0f3bb4df4b8aa197a577d957 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 20 Aug 2024 12:47:41 +0200 Subject: [PATCH 0921/1635] Improve type hints in homekit and homekit_controller tests (#124213) Add missing hass type hint in homekit and homekit_controller tests --- tests/components/homekit/test_homekit.py | 11 ++++++-- tests/components/homekit/test_type_cameras.py | 20 +++++++++----- tests/components/homekit/test_type_lights.py | 2 +- tests/components/homekit_controller/common.py | 26 ++++++++++++++----- .../test_alarm_control_panel.py | 3 ++- .../homekit_controller/test_binary_sensor.py | 13 +++++----- .../homekit_controller/test_button.py | 7 ++--- .../homekit_controller/test_camera.py | 3 ++- .../homekit_controller/test_climate.py | 15 ++++++----- .../homekit_controller/test_config_flow.py | 5 +++- .../homekit_controller/test_connection.py | 5 ++-- .../homekit_controller/test_cover.py | 17 ++++++------ .../homekit_controller/test_device_trigger.py | 7 ++--- .../homekit_controller/test_event.py | 7 ++--- .../components/homekit_controller/test_fan.py | 11 ++++---- .../homekit_controller/test_humidifier.py | 7 ++--- .../homekit_controller/test_init.py | 6 ++--- .../homekit_controller/test_light.py | 9 ++++--- .../homekit_controller/test_lock.py | 5 ++-- .../homekit_controller/test_media_player.py | 7 ++--- .../homekit_controller/test_number.py | 5 ++-- .../homekit_controller/test_sensor.py | 16 ++++++------ .../homekit_controller/test_storage.py | 3 ++- .../homekit_controller/test_switch.py | 7 ++--- 24 files changed, 132 insertions(+), 85 deletions(-) diff --git a/tests/components/homekit/test_homekit.py b/tests/components/homekit/test_homekit.py index 93458724c5e..ba8c1919e73 100644 --- a/tests/components/homekit/test_homekit.py +++ b/tests/components/homekit/test_homekit.py @@ -73,6 +73,7 @@ from homeassistant.helpers.entityfilter import ( CONF_INCLUDE_DOMAINS, CONF_INCLUDE_ENTITIES, CONF_INCLUDE_ENTITY_GLOBS, + EntityFilter, convert_filter, ) from homeassistant.setup import async_setup_component @@ -119,7 +120,13 @@ def patch_source_ip(): yield -def _mock_homekit(hass, entry, homekit_mode, entity_filter=None, devices=None): +def _mock_homekit( + hass: HomeAssistant, + entry: MockConfigEntry, + homekit_mode: str, + entity_filter: EntityFilter | None = None, + devices: list[str] | None = None, +) -> HomeKit: return HomeKit( hass=hass, name=BRIDGE_NAME, @@ -136,7 +143,7 @@ def _mock_homekit(hass, entry, homekit_mode, entity_filter=None, devices=None): ) -def _mock_homekit_bridge(hass, entry): +def _mock_homekit_bridge(hass: HomeAssistant, entry: MockConfigEntry) -> HomeKit: homekit = _mock_homekit(hass, entry, HOMEKIT_MODE_BRIDGE) homekit.driver = MagicMock() homekit.iid_storage = MagicMock() diff --git a/tests/components/homekit/test_type_cameras.py b/tests/components/homekit/test_type_cameras.py index a32656e9f2b..a42980ec2af 100644 --- a/tests/components/homekit/test_type_cameras.py +++ b/tests/components/homekit/test_type_cameras.py @@ -1,6 +1,7 @@ """Test different accessory types: Camera.""" import asyncio +from typing import Any from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch from uuid import UUID @@ -53,12 +54,12 @@ PID_THAT_WILL_NEVER_BE_ALIVE = 2147483647 @pytest.fixture(autouse=True) -async def setup_homeassistant(hass: HomeAssistant): +async def setup_homeassistant(hass: HomeAssistant) -> None: """Set up the homeassistant integration.""" await async_setup_component(hass, "homeassistant", {}) -async def _async_start_streaming(hass, acc): +async def _async_start_streaming(hass: HomeAssistant, acc: Camera) -> None: """Start streaming a camera.""" acc.set_selected_stream_configuration(MOCK_START_STREAM_TLV) await hass.async_block_till_done() @@ -66,28 +67,35 @@ async def _async_start_streaming(hass, acc): await hass.async_block_till_done() -async def _async_setup_endpoints(hass, acc): +async def _async_setup_endpoints(hass: HomeAssistant, acc: Camera) -> None: """Set camera endpoints.""" acc.set_endpoints(MOCK_END_POINTS_TLV) acc.run() await hass.async_block_till_done() -async def _async_reconfigure_stream(hass, acc, session_info, stream_config): +async def _async_reconfigure_stream( + hass: HomeAssistant, + acc: Camera, + session_info: dict[str, Any], + stream_config: dict[str, Any], +) -> None: """Reconfigure the stream.""" await acc.reconfigure_stream(session_info, stream_config) acc.run() await hass.async_block_till_done() -async def _async_stop_all_streams(hass, acc): +async def _async_stop_all_streams(hass: HomeAssistant, acc: Camera) -> None: """Stop all camera streams.""" await acc.stop() acc.run() await hass.async_block_till_done() -async def _async_stop_stream(hass, acc, session_info): +async def _async_stop_stream( + hass: HomeAssistant, acc: Camera, session_info: dict[str, Any] +) -> None: """Stop a camera stream.""" await acc.stop_stream(session_info) acc.run() diff --git a/tests/components/homekit/test_type_lights.py b/tests/components/homekit/test_type_lights.py index 02532a91e6d..0f85e07c0bb 100644 --- a/tests/components/homekit/test_type_lights.py +++ b/tests/components/homekit/test_type_lights.py @@ -46,7 +46,7 @@ import homeassistant.util.dt as dt_util from tests.common import async_fire_time_changed, async_mock_service -async def _wait_for_light_coalesce(hass): +async def _wait_for_light_coalesce(hass: HomeAssistant) -> None: async_fire_time_changed( hass, dt_util.utcnow() + timedelta(seconds=CHANGE_COALESCE_TIME_WINDOW) ) diff --git a/tests/components/homekit_controller/common.py b/tests/components/homekit_controller/common.py index 9aba3ef3225..b94a267104b 100644 --- a/tests/components/homekit_controller/common.py +++ b/tests/components/homekit_controller/common.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Callable from dataclasses import dataclass from datetime import timedelta import logging @@ -12,6 +13,7 @@ from unittest import mock from aiohomekit.controller.abstract import AbstractDescription, AbstractPairing from aiohomekit.hkjson import loads as hkloads from aiohomekit.model import Accessories, AccessoriesState, Accessory +from aiohomekit.model.services import Service from aiohomekit.testing import FakeController, FakePairing from homeassistant.components.device_automation import DeviceAutomationType @@ -177,7 +179,7 @@ class Helper: return state -async def time_changed(hass, seconds): +async def time_changed(hass: HomeAssistant, seconds: int) -> None: """Trigger time changed.""" next_update = dt_util.utcnow() + timedelta(seconds) async_fire_time_changed(hass, next_update) @@ -193,7 +195,7 @@ async def setup_accessories_from_file(hass: HomeAssistant, path: str) -> Accesso return Accessories.from_list(accessories_json) -async def setup_platform(hass): +async def setup_platform(hass: HomeAssistant) -> FakeController: """Load the platform but with a fake Controller API.""" config = {"discovery": {}} @@ -205,7 +207,9 @@ async def setup_platform(hass): return await async_get_controller(hass) -async def setup_test_accessories(hass, accessories, connection=None): +async def setup_test_accessories( + hass: HomeAssistant, accessories: list[Accessory], connection: str | None = None +) -> tuple[MockConfigEntry, AbstractPairing]: """Load a fake homekit device based on captured JSON profile.""" fake_controller = await setup_platform(hass) return await setup_test_accessories_with_controller( @@ -214,8 +218,11 @@ async def setup_test_accessories(hass, accessories, connection=None): async def setup_test_accessories_with_controller( - hass, accessories, fake_controller, connection=None -): + hass: HomeAssistant, + accessories: list[Accessory], + fake_controller: FakeController, + connection: str | None = None, +) -> tuple[MockConfigEntry, AbstractPairing]: """Load a fake homekit device based on captured JSON profile.""" pairing_id = "00:00:00:00:00:00" @@ -277,8 +284,13 @@ async def device_config_changed(hass: HomeAssistant, accessories: Accessories): async def setup_test_component( - hass, aid, setup_accessory, capitalize=False, suffix=None, connection=None -): + hass: HomeAssistant, + aid: int, + setup_accessory: Callable[[Accessory], Service | None], + capitalize: bool = False, + suffix: str | None = None, + connection: str | None = None, +) -> Helper: """Load a fake homekit accessory based on a homekit accessory model. If capitalize is True, property names will be in upper case. diff --git a/tests/components/homekit_controller/test_alarm_control_panel.py b/tests/components/homekit_controller/test_alarm_control_panel.py index d08478641b3..1e9f023fc46 100644 --- a/tests/components/homekit_controller/test_alarm_control_panel.py +++ b/tests/components/homekit_controller/test_alarm_control_panel.py @@ -2,6 +2,7 @@ from collections.abc import Callable +from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes @@ -11,7 +12,7 @@ from homeassistant.helpers import entity_registry as er from .common import setup_test_component -def create_security_system_service(accessory): +def create_security_system_service(accessory: Accessory) -> None: """Define a security-system characteristics as per page 219 of HAP spec.""" service = accessory.add_service(ServicesTypes.SECURITY_SYSTEM) diff --git a/tests/components/homekit_controller/test_binary_sensor.py b/tests/components/homekit_controller/test_binary_sensor.py index 63b35fbe1b8..a46d5eca2f5 100644 --- a/tests/components/homekit_controller/test_binary_sensor.py +++ b/tests/components/homekit_controller/test_binary_sensor.py @@ -2,6 +2,7 @@ from collections.abc import Callable +from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes @@ -12,7 +13,7 @@ from homeassistant.helpers import entity_registry as er from .common import setup_test_component -def create_motion_sensor_service(accessory): +def create_motion_sensor_service(accessory: Accessory) -> None: """Define motion characteristics as per page 225 of HAP spec.""" service = accessory.add_service(ServicesTypes.MOTION_SENSOR) @@ -43,7 +44,7 @@ async def test_motion_sensor_read_state( assert state.attributes["device_class"] == BinarySensorDeviceClass.MOTION -def create_contact_sensor_service(accessory): +def create_contact_sensor_service(accessory: Accessory) -> None: """Define contact characteristics.""" service = accessory.add_service(ServicesTypes.CONTACT_SENSOR) @@ -74,7 +75,7 @@ async def test_contact_sensor_read_state( assert state.attributes["device_class"] == BinarySensorDeviceClass.OPENING -def create_smoke_sensor_service(accessory): +def create_smoke_sensor_service(accessory: Accessory) -> None: """Define smoke sensor characteristics.""" service = accessory.add_service(ServicesTypes.SMOKE_SENSOR) @@ -105,7 +106,7 @@ async def test_smoke_sensor_read_state( assert state.attributes["device_class"] == BinarySensorDeviceClass.SMOKE -def create_carbon_monoxide_sensor_service(accessory): +def create_carbon_monoxide_sensor_service(accessory: Accessory) -> None: """Define carbon monoxide sensor characteristics.""" service = accessory.add_service(ServicesTypes.CARBON_MONOXIDE_SENSOR) @@ -138,7 +139,7 @@ async def test_carbon_monoxide_sensor_read_state( assert state.attributes["device_class"] == BinarySensorDeviceClass.CO -def create_occupancy_sensor_service(accessory): +def create_occupancy_sensor_service(accessory: Accessory) -> None: """Define occupancy characteristics.""" service = accessory.add_service(ServicesTypes.OCCUPANCY_SENSOR) @@ -169,7 +170,7 @@ async def test_occupancy_sensor_read_state( assert state.attributes["device_class"] == BinarySensorDeviceClass.OCCUPANCY -def create_leak_sensor_service(accessory): +def create_leak_sensor_service(accessory: Accessory) -> None: """Define leak characteristics.""" service = accessory.add_service(ServicesTypes.LEAK_SENSOR) diff --git a/tests/components/homekit_controller/test_button.py b/tests/components/homekit_controller/test_button.py index 058194a7ebd..18391e00df3 100644 --- a/tests/components/homekit_controller/test_button.py +++ b/tests/components/homekit_controller/test_button.py @@ -2,8 +2,9 @@ from collections.abc import Callable +from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes -from aiohomekit.model.services import ServicesTypes +from aiohomekit.model.services import Service, ServicesTypes from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -11,7 +12,7 @@ from homeassistant.helpers import entity_registry as er from .common import Helper, setup_test_component -def create_switch_with_setup_button(accessory): +def create_switch_with_setup_button(accessory: Accessory) -> Service: """Define setup button characteristics.""" service = accessory.add_service(ServicesTypes.OUTLET) @@ -26,7 +27,7 @@ def create_switch_with_setup_button(accessory): return service -def create_switch_with_ecobee_clear_hold_button(accessory): +def create_switch_with_ecobee_clear_hold_button(accessory: Accessory) -> Service: """Define setup button characteristics.""" service = accessory.add_service(ServicesTypes.OUTLET) diff --git a/tests/components/homekit_controller/test_camera.py b/tests/components/homekit_controller/test_camera.py index 6e20c1feb3c..1c57d579dc8 100644 --- a/tests/components/homekit_controller/test_camera.py +++ b/tests/components/homekit_controller/test_camera.py @@ -3,6 +3,7 @@ import base64 from collections.abc import Callable +from aiohomekit.model import Accessory from aiohomekit.model.services import ServicesTypes from aiohomekit.testing import FAKE_CAMERA_IMAGE @@ -13,7 +14,7 @@ from homeassistant.helpers import entity_registry as er from .common import setup_test_component -def create_camera(accessory): +def create_camera(accessory: Accessory) -> None: """Define camera characteristics.""" accessory.add_service(ServicesTypes.CAMERA_RTP_STREAM_MANAGEMENT) diff --git a/tests/components/homekit_controller/test_climate.py b/tests/components/homekit_controller/test_climate.py index 183e020eb25..29033887953 100644 --- a/tests/components/homekit_controller/test_climate.py +++ b/tests/components/homekit_controller/test_climate.py @@ -2,6 +2,7 @@ from collections.abc import Callable +from aiohomekit.model import Accessory from aiohomekit.model.characteristics import ( ActivationStateValues, CharacteristicsTypes, @@ -28,7 +29,7 @@ from .common import setup_test_component # Test thermostat devices -def create_thermostat_service(accessory): +def create_thermostat_service(accessory: Accessory) -> None: """Define thermostat characteristics.""" service = accessory.add_service(ServicesTypes.THERMOSTAT) @@ -66,7 +67,7 @@ def create_thermostat_service(accessory): char.value = 0 -def create_thermostat_service_min_max(accessory): +def create_thermostat_service_min_max(accessory: Accessory) -> None: """Define thermostat characteristics.""" service = accessory.add_service(ServicesTypes.THERMOSTAT) char = service.add_char(CharacteristicsTypes.HEATING_COOLING_TARGET) @@ -86,7 +87,7 @@ async def test_climate_respect_supported_op_modes_1( assert state.attributes["hvac_modes"] == ["off", "heat"] -def create_thermostat_service_valid_vals(accessory): +def create_thermostat_service_valid_vals(accessory: Accessory) -> None: """Define thermostat characteristics.""" service = accessory.add_service(ServicesTypes.THERMOSTAT) char = service.add_char(CharacteristicsTypes.HEATING_COOLING_TARGET) @@ -364,7 +365,7 @@ async def test_climate_cannot_set_thermostat_temp_range_in_wrong_mode( ) -def create_thermostat_single_set_point_auto(accessory): +def create_thermostat_single_set_point_auto(accessory: Accessory) -> None: """Define thermostat characteristics with a single set point in auto.""" service = accessory.add_service(ServicesTypes.THERMOSTAT) @@ -685,7 +686,7 @@ async def test_hvac_mode_vs_hvac_action_current_mode_wrong( assert state.attributes["hvac_action"] == "idle" -def create_heater_cooler_service(accessory): +def create_heater_cooler_service(accessory: Accessory) -> None: """Define thermostat characteristics.""" service = accessory.add_service(ServicesTypes.HEATER_COOLER) @@ -719,7 +720,7 @@ def create_heater_cooler_service(accessory): # Test heater-cooler devices -def create_heater_cooler_service_min_max(accessory): +def create_heater_cooler_service_min_max(accessory: Accessory) -> None: """Define thermostat characteristics.""" service = accessory.add_service(ServicesTypes.HEATER_COOLER) char = service.add_char(CharacteristicsTypes.TARGET_HEATER_COOLER_STATE) @@ -739,7 +740,7 @@ async def test_heater_cooler_respect_supported_op_modes_1( assert state.attributes["hvac_modes"] == ["heat", "cool", "off"] -def create_theater_cooler_service_valid_vals(accessory): +def create_theater_cooler_service_valid_vals(accessory: Accessory) -> None: """Define heater-cooler characteristics.""" service = accessory.add_service(ServicesTypes.HEATER_COOLER) char = service.add_char(CharacteristicsTypes.TARGET_HEATER_COOLER_STATE) diff --git a/tests/components/homekit_controller/test_config_flow.py b/tests/components/homekit_controller/test_config_flow.py index 420c9d45803..8c83d8e4b1b 100644 --- a/tests/components/homekit_controller/test_config_flow.py +++ b/tests/components/homekit_controller/test_config_flow.py @@ -2,6 +2,7 @@ import asyncio from ipaddress import ip_address +from typing import Any import unittest.mock from unittest.mock import AsyncMock, patch @@ -160,7 +161,9 @@ def test_valid_pairing_codes(pairing_code) -> None: assert len(valid_pin[2]) == 3 -def get_flow_context(hass, result): +def get_flow_context( + hass: HomeAssistant, result: config_flow.ConfigFlowResult +) -> dict[str, Any]: """Get the flow context from the result of async_init or async_configure.""" flow = next( flow diff --git a/tests/components/homekit_controller/test_connection.py b/tests/components/homekit_controller/test_connection.py index 8d3cc02fab9..503ff171533 100644 --- a/tests/components/homekit_controller/test_connection.py +++ b/tests/components/homekit_controller/test_connection.py @@ -5,8 +5,9 @@ import dataclasses from unittest import mock from aiohomekit.controller import TransportType +from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes -from aiohomekit.model.services import ServicesTypes +from aiohomekit.model.services import Service, ServicesTypes from aiohomekit.testing import FakeController import pytest @@ -349,7 +350,7 @@ async def test_poll_firmware_version_only_all_watchable_accessory_mode( ) -> None: """Test that we only poll firmware if available and all chars are watchable accessory mode.""" - def _create_accessory(accessory): + def _create_accessory(accessory: Accessory) -> Service: service = accessory.add_service(ServicesTypes.LIGHTBULB, name="TestDevice") on_char = service.add_char(CharacteristicsTypes.ON) diff --git a/tests/components/homekit_controller/test_cover.py b/tests/components/homekit_controller/test_cover.py index c819eac1f5a..7415e97a4b1 100644 --- a/tests/components/homekit_controller/test_cover.py +++ b/tests/components/homekit_controller/test_cover.py @@ -2,8 +2,9 @@ from collections.abc import Callable +from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes -from aiohomekit.model.services import ServicesTypes +from aiohomekit.model.services import Service, ServicesTypes from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant @@ -12,7 +13,7 @@ from homeassistant.helpers import entity_registry as er from .common import setup_test_component -def create_window_covering_service(accessory): +def create_window_covering_service(accessory: Accessory) -> Service: """Define a window-covering characteristics as per page 219 of HAP spec.""" service = accessory.add_service(ServicesTypes.WINDOW_COVERING) @@ -37,7 +38,7 @@ def create_window_covering_service(accessory): return service -def create_window_covering_service_with_h_tilt(accessory): +def create_window_covering_service_with_h_tilt(accessory: Accessory) -> None: """Define a window-covering characteristics as per page 219 of HAP spec.""" service = create_window_covering_service(accessory) @@ -52,7 +53,7 @@ def create_window_covering_service_with_h_tilt(accessory): tilt_target.maxValue = 90 -def create_window_covering_service_with_h_tilt_2(accessory): +def create_window_covering_service_with_h_tilt_2(accessory: Accessory) -> None: """Define a window-covering characteristics as per page 219 of HAP spec.""" service = create_window_covering_service(accessory) @@ -67,7 +68,7 @@ def create_window_covering_service_with_h_tilt_2(accessory): tilt_target.maxValue = 0 -def create_window_covering_service_with_v_tilt(accessory): +def create_window_covering_service_with_v_tilt(accessory: Accessory) -> None: """Define a window-covering characteristics as per page 219 of HAP spec.""" service = create_window_covering_service(accessory) @@ -82,7 +83,7 @@ def create_window_covering_service_with_v_tilt(accessory): tilt_target.maxValue = 90 -def create_window_covering_service_with_v_tilt_2(accessory): +def create_window_covering_service_with_v_tilt_2(accessory: Accessory) -> None: """Define a window-covering characteristics as per page 219 of HAP spec.""" service = create_window_covering_service(accessory) @@ -97,7 +98,7 @@ def create_window_covering_service_with_v_tilt_2(accessory): tilt_target.maxValue = 0 -def create_window_covering_service_with_none_tilt(accessory): +def create_window_covering_service_with_none_tilt(accessory: Accessory) -> None: """Define a window-covering characteristics as per page 219 of HAP spec. This accessory uses None for the tilt value unexpectedly. @@ -377,7 +378,7 @@ async def test_window_cover_stop( ) -def create_garage_door_opener_service(accessory): +def create_garage_door_opener_service(accessory: Accessory) -> None: """Define a garage-door-opener chars as per page 217 of HAP spec.""" service = accessory.add_service(ServicesTypes.GARAGE_DOOR_OPENER) diff --git a/tests/components/homekit_controller/test_device_trigger.py b/tests/components/homekit_controller/test_device_trigger.py index ecf34868b6c..ba952ac5913 100644 --- a/tests/components/homekit_controller/test_device_trigger.py +++ b/tests/components/homekit_controller/test_device_trigger.py @@ -2,6 +2,7 @@ from collections.abc import Callable +from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes import pytest @@ -25,7 +26,7 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -def create_remote(accessory): +def create_remote(accessory: Accessory) -> None: """Define characteristics for a button (that is inn a group).""" service_label = accessory.add_service(ServicesTypes.SERVICE_LABEL) @@ -50,7 +51,7 @@ def create_remote(accessory): battery.add_char(CharacteristicsTypes.BATTERY_LEVEL) -def create_button(accessory): +def create_button(accessory: Accessory) -> None: """Define a button (that is not in a group).""" button = accessory.add_service(ServicesTypes.STATELESS_PROGRAMMABLE_SWITCH) @@ -65,7 +66,7 @@ def create_button(accessory): battery.add_char(CharacteristicsTypes.BATTERY_LEVEL) -def create_doorbell(accessory): +def create_doorbell(accessory: Accessory) -> None: """Define a button (that is not in a group).""" button = accessory.add_service(ServicesTypes.DOORBELL) diff --git a/tests/components/homekit_controller/test_event.py b/tests/components/homekit_controller/test_event.py index 99dcf38fafc..2254845964a 100644 --- a/tests/components/homekit_controller/test_event.py +++ b/tests/components/homekit_controller/test_event.py @@ -2,6 +2,7 @@ from collections.abc import Callable +from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes @@ -12,7 +13,7 @@ from homeassistant.helpers import entity_registry as er from .common import setup_test_component -def create_remote(accessory): +def create_remote(accessory: Accessory) -> None: """Define characteristics for a button (that is inn a group).""" service_label = accessory.add_service(ServicesTypes.SERVICE_LABEL) @@ -37,7 +38,7 @@ def create_remote(accessory): battery.add_char(CharacteristicsTypes.BATTERY_LEVEL) -def create_button(accessory): +def create_button(accessory: Accessory) -> None: """Define a button (that is not in a group).""" button = accessory.add_service(ServicesTypes.STATELESS_PROGRAMMABLE_SWITCH) @@ -52,7 +53,7 @@ def create_button(accessory): battery.add_char(CharacteristicsTypes.BATTERY_LEVEL) -def create_doorbell(accessory): +def create_doorbell(accessory: Accessory) -> None: """Define a button (that is not in a group).""" button = accessory.add_service(ServicesTypes.DOORBELL) diff --git a/tests/components/homekit_controller/test_fan.py b/tests/components/homekit_controller/test_fan.py index 8de447144af..2c498e1a9c1 100644 --- a/tests/components/homekit_controller/test_fan.py +++ b/tests/components/homekit_controller/test_fan.py @@ -2,6 +2,7 @@ from collections.abc import Callable +from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes @@ -11,7 +12,7 @@ from homeassistant.helpers import entity_registry as er from .common import setup_test_component -def create_fan_service(accessory): +def create_fan_service(accessory: Accessory) -> None: """Define fan v1 characteristics as per HAP spec. This service is no longer documented in R2 of the public HAP spec but existing @@ -29,7 +30,7 @@ def create_fan_service(accessory): speed.value = 0 -def create_fanv2_service(accessory): +def create_fanv2_service(accessory: Accessory) -> None: """Define fan v2 characteristics as per HAP spec.""" service = accessory.add_service(ServicesTypes.FAN_V2) @@ -46,7 +47,7 @@ def create_fanv2_service(accessory): swing_mode.value = 0 -def create_fanv2_service_non_standard_rotation_range(accessory): +def create_fanv2_service_non_standard_rotation_range(accessory: Accessory) -> None: """Define fan v2 with a non-standard rotation range.""" service = accessory.add_service(ServicesTypes.FAN_V2) @@ -60,7 +61,7 @@ def create_fanv2_service_non_standard_rotation_range(accessory): speed.minStep = 1 -def create_fanv2_service_with_min_step(accessory): +def create_fanv2_service_with_min_step(accessory: Accessory) -> None: """Define fan v2 characteristics as per HAP spec.""" service = accessory.add_service(ServicesTypes.FAN_V2) @@ -78,7 +79,7 @@ def create_fanv2_service_with_min_step(accessory): swing_mode.value = 0 -def create_fanv2_service_without_rotation_speed(accessory): +def create_fanv2_service_without_rotation_speed(accessory: Accessory) -> None: """Define fan v2 characteristics as per HAP spec.""" service = accessory.add_service(ServicesTypes.FAN_V2) diff --git a/tests/components/homekit_controller/test_humidifier.py b/tests/components/homekit_controller/test_humidifier.py index a031086e93d..4b429959c67 100644 --- a/tests/components/homekit_controller/test_humidifier.py +++ b/tests/components/homekit_controller/test_humidifier.py @@ -2,8 +2,9 @@ from collections.abc import Callable +from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes -from aiohomekit.model.services import ServicesTypes +from aiohomekit.model.services import Service, ServicesTypes from homeassistant.components.humidifier import DOMAIN, MODE_AUTO, MODE_NORMAL from homeassistant.core import HomeAssistant @@ -12,7 +13,7 @@ from homeassistant.helpers import entity_registry as er from .common import setup_test_component -def create_humidifier_service(accessory): +def create_humidifier_service(accessory: Accessory) -> Service: """Define a humidifier characteristics as per page 219 of HAP spec.""" service = accessory.add_service(ServicesTypes.HUMIDIFIER_DEHUMIDIFIER) @@ -39,7 +40,7 @@ def create_humidifier_service(accessory): return service -def create_dehumidifier_service(accessory): +def create_dehumidifier_service(accessory: Accessory) -> Service: """Define a dehumidifier characteristics as per page 219 of HAP spec.""" service = accessory.add_service(ServicesTypes.HUMIDIFIER_DEHUMIDIFIER) diff --git a/tests/components/homekit_controller/test_init.py b/tests/components/homekit_controller/test_init.py index c443e56b3a4..2a017b8d592 100644 --- a/tests/components/homekit_controller/test_init.py +++ b/tests/components/homekit_controller/test_init.py @@ -8,7 +8,7 @@ from unittest.mock import patch from aiohomekit import AccessoryNotFoundError from aiohomekit.model import Accessory, Transport from aiohomekit.model.characteristics import CharacteristicsTypes -from aiohomekit.model.services import ServicesTypes +from aiohomekit.model.services import Service, ServicesTypes from aiohomekit.testing import FakePairing from attr import asdict import pytest @@ -40,7 +40,7 @@ ALIVE_DEVICE_NAME = "testdevice" ALIVE_DEVICE_ENTITY_ID = "light.testdevice" -def create_motion_sensor_service(accessory): +def create_motion_sensor_service(accessory: Accessory) -> None: """Define motion characteristics as per page 225 of HAP spec.""" service = accessory.add_service(ServicesTypes.MOTION_SENSOR) cur_state = service.add_char(CharacteristicsTypes.MOTION_DETECTED) @@ -83,7 +83,7 @@ async def test_async_remove_entry( assert hkid not in hass.data[ENTITY_MAP].storage_data -def create_alive_service(accessory): +def create_alive_service(accessory: Accessory) -> Service: """Create a service to validate we can only remove dead devices.""" service = accessory.add_service(ServicesTypes.LIGHTBULB, name=ALIVE_DEVICE_NAME) service.add_char(CharacteristicsTypes.ON) diff --git a/tests/components/homekit_controller/test_light.py b/tests/components/homekit_controller/test_light.py index 04f4d3f5e29..a4a5b59d5cb 100644 --- a/tests/components/homekit_controller/test_light.py +++ b/tests/components/homekit_controller/test_light.py @@ -3,8 +3,9 @@ from collections.abc import Callable from unittest import mock +from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes -from aiohomekit.model.services import ServicesTypes +from aiohomekit.model.services import Service, ServicesTypes from aiohomekit.testing import FakeController from homeassistant.components.homekit_controller.const import KNOWN_DEVICES @@ -23,7 +24,7 @@ LIGHT_BULB_NAME = "TestDevice" LIGHT_BULB_ENTITY_ID = "light.testdevice" -def create_lightbulb_service(accessory): +def create_lightbulb_service(accessory: Accessory) -> Service: """Define lightbulb characteristics.""" service = accessory.add_service(ServicesTypes.LIGHTBULB, name=LIGHT_BULB_NAME) @@ -36,7 +37,7 @@ def create_lightbulb_service(accessory): return service -def create_lightbulb_service_with_hs(accessory): +def create_lightbulb_service_with_hs(accessory: Accessory) -> Service: """Define a lightbulb service with hue + saturation.""" service = create_lightbulb_service(accessory) @@ -49,7 +50,7 @@ def create_lightbulb_service_with_hs(accessory): return service -def create_lightbulb_service_with_color_temp(accessory): +def create_lightbulb_service_with_color_temp(accessory: Accessory) -> Service: """Define a lightbulb service with color temp.""" service = create_lightbulb_service(accessory) diff --git a/tests/components/homekit_controller/test_lock.py b/tests/components/homekit_controller/test_lock.py index e56ca5fcffe..0963537c7d0 100644 --- a/tests/components/homekit_controller/test_lock.py +++ b/tests/components/homekit_controller/test_lock.py @@ -2,8 +2,9 @@ from collections.abc import Callable +from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes -from aiohomekit.model.services import ServicesTypes +from aiohomekit.model.services import Service, ServicesTypes from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -11,7 +12,7 @@ from homeassistant.helpers import entity_registry as er from .common import setup_test_component -def create_lock_service(accessory): +def create_lock_service(accessory: Accessory) -> Service: """Define a lock characteristics as per page 219 of HAP spec.""" service = accessory.add_service(ServicesTypes.LOCK_MECHANISM) diff --git a/tests/components/homekit_controller/test_media_player.py b/tests/components/homekit_controller/test_media_player.py index a7f900217d7..d1d280ef265 100644 --- a/tests/components/homekit_controller/test_media_player.py +++ b/tests/components/homekit_controller/test_media_player.py @@ -2,11 +2,12 @@ from collections.abc import Callable +from aiohomekit.model import Accessory from aiohomekit.model.characteristics import ( CharacteristicPermissions, CharacteristicsTypes, ) -from aiohomekit.model.services import ServicesTypes +from aiohomekit.model.services import Service, ServicesTypes import pytest from homeassistant.core import HomeAssistant @@ -15,7 +16,7 @@ from homeassistant.helpers import entity_registry as er from .common import setup_test_component -def create_tv_service(accessory): +def create_tv_service(accessory: Accessory) -> Service: """Define tv characteristics. The TV is not currently documented publicly - this is based on observing really TV's that have HomeKit support. @@ -53,7 +54,7 @@ def create_tv_service(accessory): return tv_service -def create_tv_service_with_target_media_state(accessory): +def create_tv_service_with_target_media_state(accessory: Accessory) -> Service: """Define a TV service that can play/pause/stop without generate remote events.""" service = create_tv_service(accessory) diff --git a/tests/components/homekit_controller/test_number.py b/tests/components/homekit_controller/test_number.py index fcbcc3ca7a8..243b34cfc75 100644 --- a/tests/components/homekit_controller/test_number.py +++ b/tests/components/homekit_controller/test_number.py @@ -2,8 +2,9 @@ from collections.abc import Callable +from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes -from aiohomekit.model.services import ServicesTypes +from aiohomekit.model.services import Service, ServicesTypes from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -11,7 +12,7 @@ from homeassistant.helpers import entity_registry as er from .common import Helper, setup_test_component -def create_switch_with_spray_level(accessory): +def create_switch_with_spray_level(accessory: Accessory) -> Service: """Define battery level characteristics.""" service = accessory.add_service(ServicesTypes.OUTLET) diff --git a/tests/components/homekit_controller/test_sensor.py b/tests/components/homekit_controller/test_sensor.py index ad896395e75..c40864c9629 100644 --- a/tests/components/homekit_controller/test_sensor.py +++ b/tests/components/homekit_controller/test_sensor.py @@ -3,10 +3,10 @@ from collections.abc import Callable from unittest.mock import patch -from aiohomekit.model import Transport +from aiohomekit.model import Accessory, Transport from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.characteristics.const import ThreadNodeCapabilities, ThreadStatus -from aiohomekit.model.services import ServicesTypes +from aiohomekit.model.services import Service, ServicesTypes from aiohomekit.protocol.statuscodes import HapStatusCode from aiohomekit.testing import FakePairing import pytest @@ -24,7 +24,7 @@ from .common import TEST_DEVICE_SERVICE_INFO, Helper, setup_test_component from tests.components.bluetooth import inject_bluetooth_service_info -def create_temperature_sensor_service(accessory): +def create_temperature_sensor_service(accessory: Accessory) -> None: """Define temperature characteristics.""" service = accessory.add_service(ServicesTypes.TEMPERATURE_SENSOR) @@ -32,7 +32,7 @@ def create_temperature_sensor_service(accessory): cur_state.value = 0 -def create_humidity_sensor_service(accessory): +def create_humidity_sensor_service(accessory: Accessory) -> None: """Define humidity characteristics.""" service = accessory.add_service(ServicesTypes.HUMIDITY_SENSOR) @@ -40,7 +40,7 @@ def create_humidity_sensor_service(accessory): cur_state.value = 0 -def create_light_level_sensor_service(accessory): +def create_light_level_sensor_service(accessory: Accessory) -> None: """Define light level characteristics.""" service = accessory.add_service(ServicesTypes.LIGHT_SENSOR) @@ -48,7 +48,7 @@ def create_light_level_sensor_service(accessory): cur_state.value = 0 -def create_carbon_dioxide_level_sensor_service(accessory): +def create_carbon_dioxide_level_sensor_service(accessory: Accessory) -> None: """Define carbon dioxide level characteristics.""" service = accessory.add_service(ServicesTypes.CARBON_DIOXIDE_SENSOR) @@ -56,7 +56,7 @@ def create_carbon_dioxide_level_sensor_service(accessory): cur_state.value = 0 -def create_battery_level_sensor(accessory): +def create_battery_level_sensor(accessory: Accessory) -> Service: """Define battery level characteristics.""" service = accessory.add_service(ServicesTypes.BATTERY_SERVICE) @@ -280,7 +280,7 @@ async def test_battery_low( assert state.attributes["icon"] == "mdi:battery-alert" -def create_switch_with_sensor(accessory): +def create_switch_with_sensor(accessory: Accessory) -> Service: """Define battery level characteristics.""" service = accessory.add_service(ServicesTypes.OUTLET) diff --git a/tests/components/homekit_controller/test_storage.py b/tests/components/homekit_controller/test_storage.py index ab7d7afd6fe..97856c2c784 100644 --- a/tests/components/homekit_controller/test_storage.py +++ b/tests/components/homekit_controller/test_storage.py @@ -3,6 +3,7 @@ from collections.abc import Callable from typing import Any +from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes @@ -65,7 +66,7 @@ async def test_storage_is_removed_idempotent(hass: HomeAssistant) -> None: assert hkid not in entity_map.storage_data -def create_lightbulb_service(accessory): +def create_lightbulb_service(accessory: Accessory) -> None: """Define lightbulb characteristics.""" service = accessory.add_service(ServicesTypes.LIGHTBULB) on_char = service.add_char(CharacteristicsTypes.ON) diff --git a/tests/components/homekit_controller/test_switch.py b/tests/components/homekit_controller/test_switch.py index 1fc49c5c636..a2586f7355e 100644 --- a/tests/components/homekit_controller/test_switch.py +++ b/tests/components/homekit_controller/test_switch.py @@ -2,6 +2,7 @@ from collections.abc import Callable +from aiohomekit.model import Accessory from aiohomekit.model.characteristics import ( CharacteristicsTypes, InUseValues, @@ -15,7 +16,7 @@ from homeassistant.helpers import entity_registry as er from .common import setup_test_component -def create_switch_service(accessory): +def create_switch_service(accessory: Accessory) -> None: """Define outlet characteristics.""" service = accessory.add_service(ServicesTypes.OUTLET) @@ -26,7 +27,7 @@ def create_switch_service(accessory): outlet_in_use.value = False -def create_valve_service(accessory): +def create_valve_service(accessory: Accessory) -> None: """Define valve characteristics.""" service = accessory.add_service(ServicesTypes.VALVE) @@ -43,7 +44,7 @@ def create_valve_service(accessory): remaining.value = 99 -def create_char_switch_service(accessory): +def create_char_switch_service(accessory: Accessory) -> None: """Define swtch characteristics.""" service = accessory.add_service(ServicesTypes.OUTLET) From 93f791e5d0e76d71bf3830e38d33ab2b8267d5f7 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 20 Aug 2024 12:48:06 +0200 Subject: [PATCH 0922/1635] Add missing hass type hint in component tests (n) (#124225) --- tests/components/nest/test_camera.py | 6 +- tests/components/nest/test_device_trigger.py | 4 +- tests/components/notify/common.py | 7 +- tests/components/notify/test_legacy.py | 72 +++++++++++++++----- tests/components/nx584/test_binary_sensor.py | 9 ++- 5 files changed, 75 insertions(+), 23 deletions(-) diff --git a/tests/components/nest/test_camera.py b/tests/components/nest/test_camera.py index fd2b5ef0388..6aa25134563 100644 --- a/tests/components/nest/test_camera.py +++ b/tests/components/nest/test_camera.py @@ -165,7 +165,9 @@ async def mock_create_stream(hass: HomeAssistant) -> Generator[AsyncMock]: yield mock_stream -async def async_get_image(hass, width=None, height=None): +async def async_get_image( + hass: HomeAssistant, width: int | None = None, height: int | None = None +) -> bytes: """Get the camera image.""" image = await camera.async_get_image( hass, "camera.my_camera", width=width, height=height @@ -174,7 +176,7 @@ async def async_get_image(hass, width=None, height=None): return image.content -async def fire_alarm(hass, point_in_time): +async def fire_alarm(hass: HomeAssistant, point_in_time: datetime.datetime) -> None: """Fire an alarm and wait for callbacks to run.""" with freeze_time(point_in_time): async_fire_time_changed(hass, point_in_time) diff --git a/tests/components/nest/test_device_trigger.py b/tests/components/nest/test_device_trigger.py index f818713d382..cf0e1c5ecce 100644 --- a/tests/components/nest/test_device_trigger.py +++ b/tests/components/nest/test_device_trigger.py @@ -59,7 +59,9 @@ def make_camera( } -async def setup_automation(hass, device_id, trigger_type): +async def setup_automation( + hass: HomeAssistant, device_id: str, trigger_type: str +) -> bool: """Set up an automation trigger for testing triggering.""" return await async_setup_component( hass, diff --git a/tests/components/notify/common.py b/tests/components/notify/common.py index 418de96d1aa..1b5c0d6d6ba 100644 --- a/tests/components/notify/common.py +++ b/tests/components/notify/common.py @@ -4,6 +4,8 @@ All containing methods are legacy helpers that should not be used by new components. Instead call the service directly. """ +from typing import Any + from homeassistant.components.notify import ( ATTR_DATA, ATTR_MESSAGE, @@ -11,11 +13,14 @@ from homeassistant.components.notify import ( DOMAIN, SERVICE_NOTIFY, ) +from homeassistant.core import HomeAssistant from homeassistant.loader import bind_hass @bind_hass -def send_message(hass, message, title=None, data=None): +def send_message( + hass: HomeAssistant, message: str, title: str | None = None, data: Any = None +) -> None: """Send a notification message.""" info = {ATTR_MESSAGE: message} diff --git a/tests/components/notify/test_legacy.py b/tests/components/notify/test_legacy.py index 8be80650053..79a1b75dcae 100644 --- a/tests/components/notify/test_legacy.py +++ b/tests/components/notify/test_legacy.py @@ -1,7 +1,7 @@ """The tests for legacy notify services.""" import asyncio -from collections.abc import Mapping +from collections.abc import Callable, Coroutine, Mapping from pathlib import Path from typing import Any from unittest.mock import MagicMock, Mock, patch @@ -63,8 +63,16 @@ def mock_notify_platform( hass: HomeAssistant, tmp_path: Path, integration: str = "notify", - async_get_service: Any = None, - get_service: Any = None, + async_get_service: Callable[ + [HomeAssistant, ConfigType, DiscoveryInfoType | None], + Coroutine[Any, Any, notify.BaseNotificationService], + ] + | None = None, + get_service: Callable[ + [HomeAssistant, ConfigType, DiscoveryInfoType | None], + notify.BaseNotificationService, + ] + | None = None, ): """Specialize the mock platform for legacy notify service.""" loaded_platform = MockNotifyPlatform(async_get_service, get_service) @@ -263,7 +271,11 @@ async def test_platform_setup_with_error( ) -> None: """Test service setup with an invalid setup.""" - async def async_get_service(hass, config, discovery_info=None): + async def async_get_service( + hass: HomeAssistant, + config: ConfigType, + discovery_info: DiscoveryInfoType | None = None, + ) -> notify.BaseNotificationService | None: """Return None for an invalid notify service.""" raise Exception("Setup error") # noqa: TRY002 @@ -283,11 +295,15 @@ async def test_platform_setup_with_error( async def test_reload_with_notify_builtin_platform_reload( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, tmp_path: Path + hass: HomeAssistant, tmp_path: Path ) -> None: """Test reload using the legacy notify platform reload method.""" - async def async_get_service(hass, config, discovery_info=None): + async def async_get_service( + hass: HomeAssistant, + config: ConfigType, + discovery_info: DiscoveryInfoType | None = None, + ) -> NotificationService: """Get notify service for mocked platform.""" targetlist = {"a": 1, "b": 2} return NotificationService(hass, targetlist, "testnotify") @@ -314,19 +330,25 @@ async def test_reload_with_notify_builtin_platform_reload( assert hass.services.has_service(notify.DOMAIN, "testnotify_b") -async def test_setup_platform_and_reload( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, tmp_path: Path -) -> None: +async def test_setup_platform_and_reload(hass: HomeAssistant, tmp_path: Path) -> None: """Test service setup and reload.""" get_service_called = Mock() - async def async_get_service(hass, config, discovery_info=None): + async def async_get_service( + hass: HomeAssistant, + config: ConfigType, + discovery_info: DiscoveryInfoType | None = None, + ) -> NotificationService: """Get notify service for mocked platform.""" get_service_called(config, discovery_info) targetlist = {"a": 1, "b": 2} return NotificationService(hass, targetlist, "testnotify") - async def async_get_service2(hass, config, discovery_info=None): + async def async_get_service2( + hass: HomeAssistant, + config: ConfigType, + discovery_info: DiscoveryInfoType | None = None, + ) -> NotificationService: """Get legacy notify service for mocked platform.""" get_service_called(config, discovery_info) targetlist = {"c": 3, "d": 4} @@ -405,18 +427,26 @@ async def test_setup_platform_and_reload( async def test_setup_platform_before_notify_setup( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, tmp_path: Path + hass: HomeAssistant, tmp_path: Path ) -> None: """Test trying to setup a platform before legacy notify service is setup.""" get_service_called = Mock() - async def async_get_service(hass, config, discovery_info=None): + async def async_get_service( + hass: HomeAssistant, + config: ConfigType, + discovery_info: DiscoveryInfoType | None = None, + ) -> NotificationService: """Get notify service for mocked platform.""" get_service_called(config, discovery_info) targetlist = {"a": 1, "b": 2} return NotificationService(hass, targetlist, "testnotify") - async def async_get_service2(hass, config, discovery_info=None): + async def async_get_service2( + hass: HomeAssistant, + config: ConfigType, + discovery_info: DiscoveryInfoType | None = None, + ) -> NotificationService: """Get notify service for mocked platform.""" get_service_called(config, discovery_info) targetlist = {"c": 3, "d": 4} @@ -455,18 +485,26 @@ async def test_setup_platform_before_notify_setup( async def test_setup_platform_after_notify_setup( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, tmp_path: Path + hass: HomeAssistant, tmp_path: Path ) -> None: """Test trying to setup a platform after legacy notify service is set up.""" get_service_called = Mock() - async def async_get_service(hass, config, discovery_info=None): + async def async_get_service( + hass: HomeAssistant, + config: ConfigType, + discovery_info: DiscoveryInfoType | None = None, + ) -> NotificationService: """Get notify service for mocked platform.""" get_service_called(config, discovery_info) targetlist = {"a": 1, "b": 2} return NotificationService(hass, targetlist, "testnotify") - async def async_get_service2(hass, config, discovery_info=None): + async def async_get_service2( + hass: HomeAssistant, + config: ConfigType, + discovery_info: DiscoveryInfoType | None = None, + ) -> NotificationService: """Get notify service for mocked platform.""" get_service_called(config, discovery_info) targetlist = {"c": 3, "d": 4} diff --git a/tests/components/nx584/test_binary_sensor.py b/tests/components/nx584/test_binary_sensor.py index 9261521f850..d59cbdcf69d 100644 --- a/tests/components/nx584/test_binary_sensor.py +++ b/tests/components/nx584/test_binary_sensor.py @@ -1,5 +1,6 @@ """The tests for the nx584 sensor platform.""" +from typing import Any from unittest import mock from nx584 import client as nx584_client @@ -99,7 +100,9 @@ def test_nx584_sensor_setup_full_config( assert mock_watcher.called -async def _test_assert_graceful_fail(hass, config): +async def _test_assert_graceful_fail( + hass: HomeAssistant, config: dict[str, Any] +) -> None: """Test the failing.""" assert not await async_setup_component(hass, "nx584", config) @@ -114,7 +117,9 @@ async def _test_assert_graceful_fail(hass, config): ({"zone_types": {"notazone": "motion"}}), ], ) -async def test_nx584_sensor_setup_bad_config(hass: HomeAssistant, config) -> None: +async def test_nx584_sensor_setup_bad_config( + hass: HomeAssistant, config: dict[str, Any] +) -> None: """Test the setup with bad configuration.""" await _test_assert_graceful_fail(hass, config) From d961e20b15c7c97879aec5267e3e08013f451f38 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 20 Aug 2024 12:52:33 +0200 Subject: [PATCH 0923/1635] Add missing hass type hint in component tests (p) (#124227) --- tests/components/panasonic_viera/test_remote.py | 2 +- tests/components/pilight/test_sensor.py | 5 ++++- tests/components/plex/helpers.py | 3 ++- tests/components/point/test_config_flow.py | 6 ++++-- tests/components/powerwall/mocks.py | 5 +++-- tests/components/ps4/test_init.py | 2 +- tests/components/ps4/test_media_player.py | 9 +++++++-- 7 files changed, 22 insertions(+), 10 deletions(-) diff --git a/tests/components/panasonic_viera/test_remote.py b/tests/components/panasonic_viera/test_remote.py index 3ae241fc5e9..43f11c7d766 100644 --- a/tests/components/panasonic_viera/test_remote.py +++ b/tests/components/panasonic_viera/test_remote.py @@ -18,7 +18,7 @@ from .conftest import MOCK_CONFIG_DATA, MOCK_DEVICE_INFO, MOCK_ENCRYPTION_DATA from tests.common import MockConfigEntry -async def setup_panasonic_viera(hass): +async def setup_panasonic_viera(hass: HomeAssistant) -> None: """Initialize integration for tests.""" mock_entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/pilight/test_sensor.py b/tests/components/pilight/test_sensor.py index 9f529117642..e960e46b50a 100644 --- a/tests/components/pilight/test_sensor.py +++ b/tests/components/pilight/test_sensor.py @@ -1,6 +1,7 @@ """The tests for the Pilight sensor platform.""" import logging +from typing import Any import pytest @@ -17,7 +18,9 @@ def setup_comp(hass: HomeAssistant) -> None: mock_component(hass, "pilight") -def fire_pilight_message(hass, protocol, data): +def fire_pilight_message( + hass: HomeAssistant, protocol: str, data: dict[str, Any] +) -> None: """Fire the fake Pilight message.""" message = {pilight.CONF_PROTOCOL: protocol} message.update(data) diff --git a/tests/components/plex/helpers.py b/tests/components/plex/helpers.py index 4828b972d9d..434c31996e4 100644 --- a/tests/components/plex/helpers.py +++ b/tests/components/plex/helpers.py @@ -5,6 +5,7 @@ from typing import Any from plexwebsocket import SIGNAL_CONNECTION_STATE, STATE_CONNECTED +from homeassistant.core import HomeAssistant from homeassistant.helpers.typing import UNDEFINED, UndefinedType import homeassistant.util.dt as dt_util @@ -39,7 +40,7 @@ def trigger_plex_update( callback(msgtype, UPDATE_PAYLOAD if payload is UNDEFINED else payload, None) -async def wait_for_debouncer(hass): +async def wait_for_debouncer(hass: HomeAssistant) -> None: """Move time forward to wait for sensor debouncer.""" next_update = dt_util.utcnow() + timedelta(seconds=3) async_fire_time_changed(hass, next_update) diff --git a/tests/components/point/test_config_flow.py b/tests/components/point/test_config_flow.py index ec71b04b84b..71f3f31ce8d 100644 --- a/tests/components/point/test_config_flow.py +++ b/tests/components/point/test_config_flow.py @@ -10,7 +10,9 @@ from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -def init_config_flow(hass, side_effect=None): +def init_config_flow( + hass: HomeAssistant, side_effect: type[Exception] | None = None +) -> config_flow.PointFlowHandler: """Init a configuration flow.""" config_flow.register_flow_implementation(hass, DOMAIN, "id", "secret") flow = config_flow.PointFlowHandler() @@ -22,7 +24,7 @@ def init_config_flow(hass, side_effect=None): @pytest.fixture -def is_authorized(): +def is_authorized() -> bool: """Set PointSession authorized.""" return True diff --git a/tests/components/powerwall/mocks.py b/tests/components/powerwall/mocks.py index e43ccee16f1..3081776483c 100644 --- a/tests/components/powerwall/mocks.py +++ b/tests/components/powerwall/mocks.py @@ -17,6 +17,7 @@ from tesla_powerwall import ( ) from homeassistant.core import HomeAssistant +from homeassistant.util.json import JsonValueType from tests.common import load_fixture @@ -87,7 +88,7 @@ async def _mock_powerwall_return_value( return powerwall_mock -async def _mock_powerwall_site_name(hass, site_name): +async def _mock_powerwall_site_name(hass: HomeAssistant, site_name: str) -> MagicMock: powerwall_mock = MagicMock(Powerwall) powerwall_mock.__aenter__.return_value = powerwall_mock @@ -110,7 +111,7 @@ async def _mock_powerwall_side_effect(site_info=None): return powerwall_mock -async def _async_load_json_fixture(hass, path): +async def _async_load_json_fixture(hass: HomeAssistant, path: str) -> JsonValueType: fixture = await hass.async_add_executor_job( load_fixture, os.path.join("powerwall", path) ) diff --git a/tests/components/ps4/test_init.py b/tests/components/ps4/test_init.py index 180f51295ac..3a9aac38646 100644 --- a/tests/components/ps4/test_init.py +++ b/tests/components/ps4/test_init.py @@ -199,7 +199,7 @@ async def test_media_player_is_setup(hass: HomeAssistant) -> None: assert len(hass.data[PS4_DATA].devices) == 1 -async def setup_mock_component(hass): +async def setup_mock_component(hass: HomeAssistant) -> None: """Set up Mock Media Player.""" entry = MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA, version=VERSION) entry.add_to_manager(hass.config_entries) diff --git a/tests/components/ps4/test_media_player.py b/tests/components/ps4/test_media_player.py index e0be9d508fc..5268306c87a 100644 --- a/tests/components/ps4/test_media_player.py +++ b/tests/components/ps4/test_media_player.py @@ -1,5 +1,6 @@ """Tests for the PS4 media player platform.""" +from typing import Any from unittest.mock import MagicMock, patch from pyps4_2ndscreen.credential import get_ddp_message @@ -130,7 +131,9 @@ MOCK_CONFIG = MockConfigEntry(domain=DOMAIN, data=MOCK_DATA, entry_id=MOCK_ENTRY MOCK_LOAD = "homeassistant.components.ps4.media_player.load_games" -async def setup_mock_component(hass, entry=None): +async def setup_mock_component( + hass: HomeAssistant, entry: MockConfigEntry | None = None +) -> str: """Set up Mock Media Player.""" if entry is None: mock_entry = MockConfigEntry( @@ -150,7 +153,9 @@ async def setup_mock_component(hass, entry=None): return mock_entities[0] -async def mock_ddp_response(hass, mock_status_data): +async def mock_ddp_response( + hass: HomeAssistant, mock_status_data: dict[str, Any] +) -> None: """Mock raw UDP response from device.""" mock_protocol = hass.data[PS4_DATA].protocol assert mock_protocol.local_port == DEFAULT_UDP_PORT From f66b539027c6f56f394bf5ac70d16689b5cddf1e Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 20 Aug 2024 12:53:29 +0200 Subject: [PATCH 0924/1635] Add missing hass type hint in component tests (s) (#124272) --- tests/components/scene/common.py | 3 ++- tests/components/scene/test_init.py | 8 +++++--- tests/components/sensor/test_recorder.py | 4 +++- tests/components/smart_meter_texas/conftest.py | 15 +++++++++++++-- tests/components/smartthings/test_init.py | 9 ++++++++- tests/components/solarlog/test_config_flow.py | 2 +- tests/components/songpal/test_config_flow.py | 11 ++++++++--- tests/components/songpal/test_media_player.py | 5 +++-- tests/components/sonos/conftest.py | 8 +++++++- tests/components/stream/test_worker.py | 14 ++++++++++++-- tests/components/switch/common.py | 11 +++++++---- tests/components/system_health/test_init.py | 11 +++++++---- tests/components/system_log/test_init.py | 4 +++- 13 files changed, 79 insertions(+), 26 deletions(-) diff --git a/tests/components/scene/common.py b/tests/components/scene/common.py index e20da63c402..39f86818744 100644 --- a/tests/components/scene/common.py +++ b/tests/components/scene/common.py @@ -6,11 +6,12 @@ components. Instead call the service directly. from homeassistant.components.scene import DOMAIN from homeassistant.const import ATTR_ENTITY_ID, ENTITY_MATCH_ALL, SERVICE_TURN_ON +from homeassistant.core import HomeAssistant from homeassistant.loader import bind_hass @bind_hass -def activate(hass, entity_id=ENTITY_MATCH_ALL): +def activate(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Activate a scene.""" data = {} diff --git a/tests/components/scene/test_init.py b/tests/components/scene/test_init.py index 5afdebda9da..3747610298d 100644 --- a/tests/components/scene/test_init.py +++ b/tests/components/scene/test_init.py @@ -222,7 +222,7 @@ async def test_restore_state_does_not_restore_unavailable( assert hass.states.get("scene.test").state == STATE_UNKNOWN -async def activate(hass, entity_id=ENTITY_MATCH_ALL): +async def activate(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Activate a scene.""" data = {} @@ -241,7 +241,9 @@ async def test_services_registered(hass: HomeAssistant) -> None: assert hass.services.has_service("scene", "apply") -async def setup_lights(hass, entities): +async def setup_lights( + hass: HomeAssistant, entities: list[MockLight] +) -> tuple[MockLight, MockLight]: """Set up the light component.""" assert await async_setup_component( hass, light.DOMAIN, {light.DOMAIN: {"platform": "test"}} @@ -261,7 +263,7 @@ async def setup_lights(hass, entities): return light_1, light_2 -async def turn_off_lights(hass, entity_ids): +async def turn_off_lights(hass: HomeAssistant, entity_ids: list[str]) -> None: """Turn lights off.""" await hass.services.async_call( "light", diff --git a/tests/components/sensor/test_recorder.py b/tests/components/sensor/test_recorder.py index 27fab9c0b3b..4d271785114 100644 --- a/tests/components/sensor/test_recorder.py +++ b/tests/components/sensor/test_recorder.py @@ -5104,7 +5104,9 @@ async def async_record_meter_state( return states -async def async_record_states_partially_unavailable(hass, zero, entity_id, attributes): +async def async_record_states_partially_unavailable( + hass: HomeAssistant, zero: datetime, entity_id: str, attributes: dict[str, Any] +) -> tuple[datetime, dict[str, list[State]]]: """Record some test states. We inject a bunch of state updates temperature sensors. diff --git a/tests/components/smart_meter_texas/conftest.py b/tests/components/smart_meter_texas/conftest.py index 9c0301037a9..14ba6199c3d 100644 --- a/tests/components/smart_meter_texas/conftest.py +++ b/tests/components/smart_meter_texas/conftest.py @@ -2,6 +2,7 @@ from http import HTTPStatus import json +from typing import Any import pytest from smart_meter_texas.const import ( @@ -23,6 +24,7 @@ from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, load_fixture +from tests.test_util.aiohttp import AiohttpClientMocker TEST_ENTITY_ID = "sensor.electric_meter_123456789" @@ -33,14 +35,23 @@ def load_smt_fixture(name): return json.loads(json_fixture) -async def setup_integration(hass, config_entry, aioclient_mock, **kwargs): +async def setup_integration( + hass: HomeAssistant, + config_entry: MockConfigEntry, + aioclient_mock: AiohttpClientMocker, + **kwargs: Any, +) -> None: """Initialize the Smart Meter Texas integration for testing.""" mock_connection(aioclient_mock, **kwargs) await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() -async def refresh_data(hass, config_entry, aioclient_mock): +async def refresh_data( + hass: HomeAssistant, + config_entry: MockConfigEntry, + aioclient_mock: AiohttpClientMocker, +) -> None: """Request a DataUpdateCoordinator refresh.""" mock_connection(aioclient_mock) await async_setup_component(hass, HA_DOMAIN, {}) diff --git a/tests/components/smartthings/test_init.py b/tests/components/smartthings/test_init.py index ae8a288e3a5..fa30fa258cf 100644 --- a/tests/components/smartthings/test_init.py +++ b/tests/components/smartthings/test_init.py @@ -1,6 +1,9 @@ """Tests for the SmartThings component init module.""" +from collections.abc import Callable, Coroutine +from datetime import datetime, timedelta from http import HTTPStatus +from typing import Any from unittest.mock import Mock, patch from uuid import uuid4 @@ -419,7 +422,11 @@ async def test_broker_regenerates_token(hass: HomeAssistant, config_entry) -> No stored_action = None config_entry.add_to_hass(hass) - def async_track_time_interval(hass, action, interval): + def async_track_time_interval( + hass: HomeAssistant, + action: Callable[[datetime], Coroutine[Any, Any, None] | None], + interval: timedelta, + ) -> None: nonlocal stored_action stored_action = action diff --git a/tests/components/solarlog/test_config_flow.py b/tests/components/solarlog/test_config_flow.py index 34da13cdf8f..f71282a7c9b 100644 --- a/tests/components/solarlog/test_config_flow.py +++ b/tests/components/solarlog/test_config_flow.py @@ -45,7 +45,7 @@ async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: assert len(mock_setup_entry.mock_calls) == 1 -def init_config_flow(hass): +def init_config_flow(hass: HomeAssistant) -> config_flow.SolarLogConfigFlow: """Init a configuration flow.""" flow = config_flow.SolarLogConfigFlow() flow.hass = hass diff --git a/tests/components/songpal/test_config_flow.py b/tests/components/songpal/test_config_flow.py index 8f503360702..5215e9b3c0e 100644 --- a/tests/components/songpal/test_config_flow.py +++ b/tests/components/songpal/test_config_flow.py @@ -6,7 +6,12 @@ from unittest.mock import patch from homeassistant.components import ssdp from homeassistant.components.songpal.const import CONF_ENDPOINT, DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_SSDP, SOURCE_USER +from homeassistant.config_entries import ( + SOURCE_IMPORT, + SOURCE_SSDP, + SOURCE_USER, + ConfigFlowResult, +) from homeassistant.const import CONF_HOST, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -42,7 +47,7 @@ SSDP_DATA = ssdp.SsdpServiceInfo( ) -def _flow_next(hass, flow_id): +def _flow_next(hass: HomeAssistant, flow_id: str) -> ConfigFlowResult: return next( flow for flow in hass.config_entries.flow.async_progress() @@ -143,7 +148,7 @@ async def test_flow_import_without_name(hass: HomeAssistant) -> None: mocked_device.get_interface_information.assert_called_once() -def _create_mock_config_entry(hass): +def _create_mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: MockConfigEntry( domain=DOMAIN, unique_id="uuid:0000", diff --git a/tests/components/songpal/test_media_player.py b/tests/components/songpal/test_media_player.py index 8f56170b839..2baea6cb5c9 100644 --- a/tests/components/songpal/test_media_player.py +++ b/tests/components/songpal/test_media_player.py @@ -2,6 +2,7 @@ from datetime import timedelta import logging +from typing import Any from unittest.mock import AsyncMock, MagicMock, call, patch import pytest @@ -54,12 +55,12 @@ SUPPORT_SONGPAL = ( ) -def _get_attributes(hass): +def _get_attributes(hass: HomeAssistant) -> dict[str, Any]: state = hass.states.get(ENTITY_ID) return state.as_dict()["attributes"] -async def _call(hass, service, **argv): +async def _call(hass: HomeAssistant, service: str, **argv: Any) -> None: await hass.services.async_call( media_player.DOMAIN, service, diff --git a/tests/components/sonos/conftest.py b/tests/components/sonos/conftest.py index 840fcb4dcdb..cf75f23c322 100644 --- a/tests/components/sonos/conftest.py +++ b/tests/components/sonos/conftest.py @@ -295,7 +295,13 @@ def silent_ssdp_scanner() -> Generator[None]: def discover_fixture(soco): """Create a mock soco discover fixture.""" - def do_callback(hass, callback, *args, **kwargs): + def do_callback( + hass: HomeAssistant, + callback: Callable[ + [ssdp.SsdpServiceInfo, ssdp.SsdpChange], Coroutine[Any, Any, None] | None + ], + match_dict: dict[str, str] | None = None, + ) -> MagicMock: callback( ssdp.SsdpServiceInfo( ssdp_location=f"http://{soco.ip_address}/", diff --git a/tests/components/stream/test_worker.py b/tests/components/stream/test_worker.py index d61530f9076..73c51087ef1 100644 --- a/tests/components/stream/test_worker.py +++ b/tests/components/stream/test_worker.py @@ -283,7 +283,12 @@ class MockPyAv: return self.container -def run_worker(hass, stream, stream_source, stream_settings=None): +def run_worker( + hass: HomeAssistant, + stream: Stream, + stream_source: str, + stream_settings: StreamSettings | None = None, +) -> None: """Run the stream worker under test.""" stream_state = StreamState(hass, stream.outputs, stream._diagnostics) stream_worker( @@ -296,7 +301,12 @@ def run_worker(hass, stream, stream_source, stream_settings=None): ) -async def async_decode_stream(hass, packets, py_av=None, stream_settings=None): +async def async_decode_stream( + hass: HomeAssistant, + packets: PacketSequence, + py_av: MockPyAv | None = None, + stream_settings: StreamSettings | None = None, +) -> FakePyAvBuffer: """Start a stream worker that decodes incoming stream packets into output segments.""" stream = Stream( hass, diff --git a/tests/components/switch/common.py b/tests/components/switch/common.py index e9764d59d7c..96c79fb7d55 100644 --- a/tests/components/switch/common.py +++ b/tests/components/switch/common.py @@ -15,28 +15,31 @@ from homeassistant.const import ( STATE_OFF, STATE_ON, ) +from homeassistant.core import HomeAssistant from homeassistant.loader import bind_hass @bind_hass -def turn_on(hass, entity_id=ENTITY_MATCH_ALL): +def turn_on(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Turn all or specified switch on.""" hass.add_job(async_turn_on, hass, entity_id) -async def async_turn_on(hass, entity_id=ENTITY_MATCH_ALL): +async def async_turn_on(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Turn all or specified switch on.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, data, blocking=True) @bind_hass -def turn_off(hass, entity_id=ENTITY_MATCH_ALL): +def turn_off(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Turn all or specified switch off.""" hass.add_job(async_turn_off, hass, entity_id) -async def async_turn_off(hass, entity_id=ENTITY_MATCH_ALL): +async def async_turn_off( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Turn all or specified switch off.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_TURN_OFF, data, blocking=True) diff --git a/tests/components/system_health/test_init.py b/tests/components/system_health/test_init.py index b93dccffb92..2237edc9647 100644 --- a/tests/components/system_health/test_init.py +++ b/tests/components/system_health/test_init.py @@ -1,5 +1,6 @@ """Tests for the system health component init.""" +from typing import Any from unittest.mock import AsyncMock, Mock, patch from aiohttp.client_exceptions import ClientError @@ -14,7 +15,9 @@ from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import WebSocketGenerator -async def gather_system_health_info(hass, hass_ws_client): +async def gather_system_health_info( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> dict[str, Any]: """Gather all info.""" client = await hass_ws_client(hass) @@ -72,7 +75,7 @@ async def test_info_endpoint_register_callback( ) -> None: """Test that the info endpoint allows registering callbacks.""" - async def mock_info(hass): + async def mock_info(hass: HomeAssistant) -> dict[str, Any]: return {"storage": "YAML"} async_register_info(hass, "lovelace", mock_info) @@ -92,7 +95,7 @@ async def test_info_endpoint_register_callback_timeout( ) -> None: """Test that the info endpoint timing out.""" - async def mock_info(hass): + async def mock_info(hass: HomeAssistant) -> dict[str, Any]: raise TimeoutError async_register_info(hass, "lovelace", mock_info) @@ -109,7 +112,7 @@ async def test_info_endpoint_register_callback_exc( ) -> None: """Test that the info endpoint requires auth.""" - async def mock_info(hass): + async def mock_info(hass: HomeAssistant) -> dict[str, Any]: raise Exception("TEST ERROR") # noqa: TRY002 async_register_info(hass, "lovelace", mock_info) diff --git a/tests/components/system_log/test_init.py b/tests/components/system_log/test_init.py index 83adab8200b..a81a92681f2 100644 --- a/tests/components/system_log/test_init.py +++ b/tests/components/system_log/test_init.py @@ -371,7 +371,9 @@ def get_frame(path: str, previous_frame: MagicMock | None) -> MagicMock: ) -async def async_log_error_from_test_path(hass, path, watcher): +async def async_log_error_from_test_path( + hass: HomeAssistant, path: str, watcher: WatchLogErrorHandler +) -> None: """Log error while mocking the path.""" call_path = "internal_path.py" main_frame = get_frame("main_path/main.py", None) From d901cb04b85e749087fb1e1b4091f7c148806d10 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 20 Aug 2024 12:53:48 +0200 Subject: [PATCH 0925/1635] Add missing hass type hint in component tests (u) (#124275) --- tests/components/upb/test_config_flow.py | 5 ++++- tests/components/upnp/conftest.py | 27 ++++++++++++++++++++---- tests/components/upnp/test_init.py | 10 ++++++++- 3 files changed, 36 insertions(+), 6 deletions(-) diff --git a/tests/components/upb/test_config_flow.py b/tests/components/upb/test_config_flow.py index efa6d60c344..5f28f1d9b17 100644 --- a/tests/components/upb/test_config_flow.py +++ b/tests/components/upb/test_config_flow.py @@ -5,6 +5,7 @@ from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch from homeassistant import config_entries from homeassistant.components.upb.const import DOMAIN +from homeassistant.config_entries import ConfigFlowResult from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -26,7 +27,9 @@ def mocked_upb(sync_complete=True, config_ok=True): ) -async def valid_tcp_flow(hass, sync_complete=True, config_ok=True): +async def valid_tcp_flow( + hass: HomeAssistant, sync_complete: bool = True, config_ok: bool = True +) -> ConfigFlowResult: """Get result dict that are standard for most tests.""" with ( diff --git a/tests/components/upnp/conftest.py b/tests/components/upnp/conftest.py index 1431ce2c9ef..4bee5c0e589 100644 --- a/tests/components/upnp/conftest.py +++ b/tests/components/upnp/conftest.py @@ -2,10 +2,11 @@ from __future__ import annotations -from collections.abc import Generator +from collections.abc import Callable, Coroutine, Generator import copy from datetime import datetime import socket +from typing import Any from unittest.mock import AsyncMock, MagicMock, PropertyMock, create_autospec, patch from urllib.parse import urlparse @@ -175,7 +176,13 @@ async def ssdp_instant_discovery(): """Instant discovery.""" # Set up device discovery callback. - async def register_callback(hass, callback, match_dict): + async def register_callback( + hass: HomeAssistant, + callback: Callable[ + [ssdp.SsdpServiceInfo, ssdp.SsdpChange], Coroutine[Any, Any, None] | None + ], + match_dict: dict[str, str] | None = None, + ) -> MagicMock: """Immediately do callback.""" await callback(TEST_DISCOVERY, ssdp.SsdpChange.ALIVE) return MagicMock() @@ -202,7 +209,13 @@ async def ssdp_instant_discovery_multi_location(): test_discovery.ssdp_all_locations = {TEST_LOCATION6, TEST_LOCATION} # Set up device discovery callback. - async def register_callback(hass, callback, match_dict): + async def register_callback( + hass: HomeAssistant, + callback: Callable[ + [ssdp.SsdpServiceInfo, ssdp.SsdpChange], Coroutine[Any, Any, None] | None + ], + match_dict: dict[str, str] | None = None, + ) -> MagicMock: """Immediately do callback.""" await callback(test_discovery, ssdp.SsdpChange.ALIVE) return MagicMock() @@ -225,7 +238,13 @@ async def ssdp_no_discovery(): """No discovery.""" # Set up device discovery callback. - async def register_callback(hass, callback, match_dict): + async def register_callback( + hass: HomeAssistant, + callback: Callable[ + [ssdp.SsdpServiceInfo, ssdp.SsdpChange], Coroutine[Any, Any, None] | None + ], + match_dict: dict[str, str] | None = None, + ) -> MagicMock: """Don't do callback.""" return MagicMock() diff --git a/tests/components/upnp/test_init.py b/tests/components/upnp/test_init.py index f87696b0bd1..0e8551dd8a1 100644 --- a/tests/components/upnp/test_init.py +++ b/tests/components/upnp/test_init.py @@ -2,7 +2,9 @@ from __future__ import annotations +from collections.abc import Callable, Coroutine import copy +from typing import Any from unittest.mock import AsyncMock, MagicMock, patch from async_upnp_client.profiles.igd import IgdDevice @@ -140,7 +142,13 @@ async def test_async_setup_udn_mismatch( ) # Set up device discovery callback. - async def register_callback(hass, callback, match_dict): + async def register_callback( + hass: HomeAssistant, + callback: Callable[ + [ssdp.SsdpServiceInfo, ssdp.SsdpChange], Coroutine[Any, Any, None] | None + ], + match_dict: dict[str, str] | None = None, + ) -> MagicMock: """Immediately do callback.""" await callback(test_discovery, ssdp.SsdpChange.ALIVE) return MagicMock() From e6eedc071742b8135ff5e5741eacfc180707fb69 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 20 Aug 2024 12:54:15 +0200 Subject: [PATCH 0926/1635] Add missing hass type hint in component tests (v) (#124281) --- tests/components/vacuum/common.py | 73 +++++++++++++++++++---------- tests/components/valve/test_init.py | 12 +++-- tests/components/venstar/util.py | 4 +- 3 files changed, 58 insertions(+), 31 deletions(-) diff --git a/tests/components/vacuum/common.py b/tests/components/vacuum/common.py index 0e46ebf5e44..6228c1d2f74 100644 --- a/tests/components/vacuum/common.py +++ b/tests/components/vacuum/common.py @@ -4,6 +4,8 @@ All containing methods are legacy helpers that should not be used by new components. Instead call the service directly. """ +from typing import Any + from homeassistant.components.vacuum import ( ATTR_FAN_SPEED, ATTR_PARAMS, @@ -26,136 +28,149 @@ from homeassistant.const import ( SERVICE_TURN_OFF, SERVICE_TURN_ON, ) +from homeassistant.core import HomeAssistant from homeassistant.loader import bind_hass @bind_hass -def turn_on(hass, entity_id=ENTITY_MATCH_ALL): +def turn_on(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Turn all or specified vacuum on.""" hass.add_job(async_turn_on, hass, entity_id) -async def async_turn_on(hass, entity_id=ENTITY_MATCH_ALL): +async def async_turn_on(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Turn all or specified vacuum on.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, data, blocking=True) @bind_hass -def turn_off(hass, entity_id=ENTITY_MATCH_ALL): +def turn_off(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Turn all or specified vacuum off.""" hass.add_job(async_turn_off, hass, entity_id) -async def async_turn_off(hass, entity_id=ENTITY_MATCH_ALL): +async def async_turn_off( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Turn all or specified vacuum off.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_TURN_OFF, data, blocking=True) @bind_hass -def toggle(hass, entity_id=ENTITY_MATCH_ALL): +def toggle(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Toggle all or specified vacuum.""" hass.add_job(async_toggle, hass, entity_id) -async def async_toggle(hass, entity_id=ENTITY_MATCH_ALL): +async def async_toggle(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Toggle all or specified vacuum.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_TOGGLE, data, blocking=True) @bind_hass -def locate(hass, entity_id=ENTITY_MATCH_ALL): +def locate(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Locate all or specified vacuum.""" hass.add_job(async_locate, hass, entity_id) -async def async_locate(hass, entity_id=ENTITY_MATCH_ALL): +async def async_locate(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Locate all or specified vacuum.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_LOCATE, data, blocking=True) @bind_hass -def clean_spot(hass, entity_id=ENTITY_MATCH_ALL): +def clean_spot(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Tell all or specified vacuum to perform a spot clean-up.""" hass.add_job(async_clean_spot, hass, entity_id) -async def async_clean_spot(hass, entity_id=ENTITY_MATCH_ALL): +async def async_clean_spot( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Tell all or specified vacuum to perform a spot clean-up.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_CLEAN_SPOT, data, blocking=True) @bind_hass -def return_to_base(hass, entity_id=ENTITY_MATCH_ALL): +def return_to_base(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Tell all or specified vacuum to return to base.""" hass.add_job(async_return_to_base, hass, entity_id) -async def async_return_to_base(hass, entity_id=ENTITY_MATCH_ALL): +async def async_return_to_base( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Tell all or specified vacuum to return to base.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_RETURN_TO_BASE, data, blocking=True) @bind_hass -def start_pause(hass, entity_id=ENTITY_MATCH_ALL): +def start_pause(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Tell all or specified vacuum to start or pause the current task.""" hass.add_job(async_start_pause, hass, entity_id) -async def async_start_pause(hass, entity_id=ENTITY_MATCH_ALL): +async def async_start_pause( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Tell all or specified vacuum to start or pause the current task.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_START_PAUSE, data, blocking=True) @bind_hass -def start(hass, entity_id=ENTITY_MATCH_ALL): +def start(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Tell all or specified vacuum to start or resume the current task.""" hass.add_job(async_start, hass, entity_id) -async def async_start(hass, entity_id=ENTITY_MATCH_ALL): +async def async_start(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Tell all or specified vacuum to start or resume the current task.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_START, data, blocking=True) @bind_hass -def pause(hass, entity_id=ENTITY_MATCH_ALL): +def pause(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Tell all or the specified vacuum to pause the current task.""" hass.add_job(async_pause, hass, entity_id) -async def async_pause(hass, entity_id=ENTITY_MATCH_ALL): +async def async_pause(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Tell all or the specified vacuum to pause the current task.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_PAUSE, data, blocking=True) @bind_hass -def stop(hass, entity_id=ENTITY_MATCH_ALL): +def stop(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Stop all or specified vacuum.""" hass.add_job(async_stop, hass, entity_id) -async def async_stop(hass, entity_id=ENTITY_MATCH_ALL): +async def async_stop(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Stop all or specified vacuum.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_STOP, data, blocking=True) @bind_hass -def set_fan_speed(hass, fan_speed, entity_id=ENTITY_MATCH_ALL): +def set_fan_speed( + hass: HomeAssistant, fan_speed: str, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Set fan speed for all or specified vacuum.""" hass.add_job(async_set_fan_speed, hass, fan_speed, entity_id) -async def async_set_fan_speed(hass, fan_speed, entity_id=ENTITY_MATCH_ALL): +async def async_set_fan_speed( + hass: HomeAssistant, fan_speed: str, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Set fan speed for all or specified vacuum.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} data[ATTR_FAN_SPEED] = fan_speed @@ -163,12 +178,22 @@ async def async_set_fan_speed(hass, fan_speed, entity_id=ENTITY_MATCH_ALL): @bind_hass -def send_command(hass, command, params=None, entity_id=ENTITY_MATCH_ALL): +def send_command( + hass: HomeAssistant, + command: str, + params: dict[str, Any] | list[Any] | None = None, + entity_id: str = ENTITY_MATCH_ALL, +) -> None: """Send command to all or specified vacuum.""" hass.add_job(async_send_command, hass, command, params, entity_id) -async def async_send_command(hass, command, params=None, entity_id=ENTITY_MATCH_ALL): +async def async_send_command( + hass: HomeAssistant, + command: str, + params: dict[str, Any] | list[Any] | None = None, + entity_id: str = ENTITY_MATCH_ALL, +) -> None: """Send command to all or specified vacuum.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} data[ATTR_COMMAND] = command diff --git a/tests/components/valve/test_init.py b/tests/components/valve/test_init.py index e4519bcef08..378ddb2a94b 100644 --- a/tests/components/valve/test_init.py +++ b/tests/components/valve/test_init.py @@ -332,7 +332,9 @@ async def test_supported_features(hass: HomeAssistant) -> None: assert valve.supported_features is None -def call_service(hass, service, ent, position=None): +def call_service( + hass: HomeAssistant, service: str, ent: ValveEntity, position: int | None = None +): """Call any service on entity.""" params = {ATTR_ENTITY_ID: ent.entity_id} if position is not None: @@ -345,21 +347,21 @@ def set_valve_position(ent, position) -> None: ent._values["current_valve_position"] = position -def is_open(hass, ent): +def is_open(hass: HomeAssistant, ent: ValveEntity) -> bool: """Return if the valve is closed based on the statemachine.""" return hass.states.is_state(ent.entity_id, STATE_OPEN) -def is_opening(hass, ent): +def is_opening(hass: HomeAssistant, ent: ValveEntity) -> bool: """Return if the valve is closed based on the statemachine.""" return hass.states.is_state(ent.entity_id, STATE_OPENING) -def is_closed(hass, ent): +def is_closed(hass: HomeAssistant, ent: ValveEntity) -> bool: """Return if the valve is closed based on the statemachine.""" return hass.states.is_state(ent.entity_id, STATE_CLOSED) -def is_closing(hass, ent): +def is_closing(hass: HomeAssistant, ent: ValveEntity) -> bool: """Return if the valve is closed based on the statemachine.""" return hass.states.is_state(ent.entity_id, STATE_CLOSING) diff --git a/tests/components/venstar/util.py b/tests/components/venstar/util.py index 369d3332135..f1e85e9019e 100644 --- a/tests/components/venstar/util.py +++ b/tests/components/venstar/util.py @@ -15,7 +15,7 @@ TEST_MODELS = ["t2k", "colortouch"] def mock_venstar_devices(f): """Decorate function to mock a Venstar Colortouch and T2000 thermostat API.""" - async def wrapper(hass): + async def wrapper(hass: HomeAssistant) -> None: # Mock thermostats are: # Venstar T2000, FW 4.38 # Venstar "colortouch" T7850, FW 5.1 @@ -37,7 +37,7 @@ def mock_venstar_devices(f): f"http://venstar-{model}.localdomain/query/alerts", text=load_fixture(f"venstar/{model}_alerts.json"), ) - return await f(hass) + await f(hass) return wrapper From c2dc4ef2153f2d24516e8069d384a1b7a2748bb2 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 20 Aug 2024 12:54:42 +0200 Subject: [PATCH 0927/1635] Add missing hass type hint in component tests (w) (#124284) --- tests/components/water_heater/common.py | 15 +++++-- tests/components/webhook/test_init.py | 4 +- tests/components/webostv/__init__.py | 5 ++- .../websocket_api/test_decorators.py | 32 ++++++++++++--- tests/components/wemo/entity_test_helpers.py | 39 ++++++++++++++----- tests/components/wemo/test_binary_sensor.py | 35 +++++++++++++---- tests/components/wemo/test_coordinator.py | 21 +++++----- tests/components/wemo/test_device_trigger.py | 9 ++++- tests/components/wemo/test_sensor.py | 5 ++- tests/components/wiz/__init__.py | 35 ++++++++++++----- tests/components/ws66i/test_media_player.py | 5 ++- 11 files changed, 152 insertions(+), 53 deletions(-) diff --git a/tests/components/water_heater/common.py b/tests/components/water_heater/common.py index e0a8075f4cc..e2fca153fe6 100644 --- a/tests/components/water_heater/common.py +++ b/tests/components/water_heater/common.py @@ -19,7 +19,9 @@ from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE, ENTITY_MATCH_A from homeassistant.core import HomeAssistant -async def async_set_away_mode(hass, away_mode, entity_id=ENTITY_MATCH_ALL): +async def async_set_away_mode( + hass: HomeAssistant, away_mode: bool, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Turn all or specified water_heater devices away mode on.""" data = {ATTR_AWAY_MODE: away_mode} @@ -30,8 +32,11 @@ async def async_set_away_mode(hass, away_mode, entity_id=ENTITY_MATCH_ALL): async def async_set_temperature( - hass, temperature=None, entity_id=ENTITY_MATCH_ALL, operation_mode=None -): + hass: HomeAssistant, + temperature: float, + entity_id: str = ENTITY_MATCH_ALL, + operation_mode: str | None = None, +) -> None: """Set new target temperature.""" kwargs = { key: value @@ -48,7 +53,9 @@ async def async_set_temperature( ) -async def async_set_operation_mode(hass, operation_mode, entity_id=ENTITY_MATCH_ALL): +async def async_set_operation_mode( + hass: HomeAssistant, operation_mode: str, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Set new target operation mode.""" data = {ATTR_OPERATION_MODE: operation_mode} diff --git a/tests/components/webhook/test_init.py b/tests/components/webhook/test_init.py index 6f4ae1ebefc..af07616024a 100644 --- a/tests/components/webhook/test_init.py +++ b/tests/components/webhook/test_init.py @@ -319,7 +319,9 @@ async def test_ws_webhook( received = [] - async def handler(hass, webhook_id, request): + async def handler( + hass: HomeAssistant, webhook_id: str, request: web.Request + ) -> web.Response: """Handle a webhook.""" received.append(request) return web.json_response({"from": "handler"}) diff --git a/tests/components/webostv/__init__.py b/tests/components/webostv/__init__.py index 5ef210da56d..d6c096f9d3a 100644 --- a/tests/components/webostv/__init__.py +++ b/tests/components/webostv/__init__.py @@ -2,6 +2,7 @@ from homeassistant.components.webostv.const import DOMAIN from homeassistant.const import CONF_CLIENT_SECRET, CONF_HOST +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from .const import CLIENT_KEY, FAKE_UUID, HOST, TV_NAME @@ -9,7 +10,9 @@ from .const import CLIENT_KEY, FAKE_UUID, HOST, TV_NAME from tests.common import MockConfigEntry -async def setup_webostv(hass, unique_id=FAKE_UUID): +async def setup_webostv( + hass: HomeAssistant, unique_id: str | None = FAKE_UUID +) -> MockConfigEntry: """Initialize webostv and media_player for tests.""" entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/websocket_api/test_decorators.py b/tests/components/websocket_api/test_decorators.py index 0ade5329190..81ac4b96409 100644 --- a/tests/components/websocket_api/test_decorators.py +++ b/tests/components/websocket_api/test_decorators.py @@ -1,5 +1,7 @@ """Test decorators.""" +from typing import Any + import voluptuous as vol from homeassistant.components import http, websocket_api @@ -19,24 +21,40 @@ async def test_async_response_request_context( @websocket_api.websocket_command({"type": "test-get-request-executor"}) @websocket_api.async_response - async def executor_get_request(hass, connection, msg): + async def executor_get_request( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], + ) -> None: handle_request( await hass.async_add_executor_job(http.current_request.get), connection, msg ) @websocket_api.websocket_command({"type": "test-get-request-async"}) @websocket_api.async_response - async def async_get_request(hass, connection, msg): + async def async_get_request( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], + ) -> None: handle_request(http.current_request.get(), connection, msg) @websocket_api.websocket_command({"type": "test-get-request"}) - def get_request(hass, connection, msg): + def get_request( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], + ) -> None: handle_request(http.current_request.get(), connection, msg) @websocket_api.websocket_command( {"type": "test-get-request-with-arg", vol.Required("arg"): str} ) - def get_with_arg_request(hass, connection, msg): + def get_with_arg_request( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], + ) -> None: handle_request(http.current_request.get(), connection, msg) websocket_api.async_register_command(hass, executor_get_request) @@ -145,7 +163,11 @@ async def test_supervisor_only(hass: HomeAssistant, websocket_client) -> None: @websocket_api.ws_require_user(only_supervisor=True) @websocket_api.websocket_command({"type": "test-require-supervisor-user"}) - def require_supervisor_request(hass, connection, msg): + def require_supervisor_request( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], + ) -> None: connection.send_result(msg["id"]) websocket_api.async_register_command(hass, require_supervisor_request) diff --git a/tests/components/wemo/entity_test_helpers.py b/tests/components/wemo/entity_test_helpers.py index 6700b00ec38..f57dffad6f9 100644 --- a/tests/components/wemo/entity_test_helpers.py +++ b/tests/components/wemo/entity_test_helpers.py @@ -4,7 +4,11 @@ This is not a test module. These test methods are used by the platform test modu """ import asyncio +from collections.abc import Callable, Coroutine import threading +from typing import Any + +import pywemo from homeassistant.components.homeassistant import DOMAIN as HA_DOMAIN from homeassistant.components.wemo.coordinator import async_get_coordinator @@ -17,6 +21,7 @@ from homeassistant.const import ( STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component @@ -40,7 +45,12 @@ def _perform_async_update(coordinator): return async_callback -async def _async_multiple_call_helper(hass, pywemo_device, call1, call2): +async def _async_multiple_call_helper( + hass: HomeAssistant, + pywemo_device: pywemo.WeMoDevice, + call1: Callable[[], Coroutine[Any, Any, None]], + call2: Callable[[], Coroutine[Any, Any, None]], +) -> None: """Create two calls (call1 & call2) in parallel; verify only one polls the device. There should only be one poll on the device at a time. Any parallel updates @@ -87,7 +97,7 @@ async def _async_multiple_call_helper(hass, pywemo_device, call1, call2): async def test_async_update_locked_callback_and_update( - hass: HomeAssistant, pywemo_device, wemo_entity + hass: HomeAssistant, pywemo_device: pywemo.WeMoDevice, wemo_entity: er.RegistryEntry ) -> None: """Test that a callback and a state update request can't both happen at the same time. @@ -102,7 +112,7 @@ async def test_async_update_locked_callback_and_update( async def test_async_update_locked_multiple_updates( - hass: HomeAssistant, pywemo_device, wemo_entity + hass: HomeAssistant, pywemo_device: pywemo.WeMoDevice, wemo_entity: er.RegistryEntry ) -> None: """Test that two hass async_update state updates do not proceed at the same time.""" coordinator = async_get_coordinator(hass, wemo_entity.device_id) @@ -112,7 +122,7 @@ async def test_async_update_locked_multiple_updates( async def test_async_update_locked_multiple_callbacks( - hass: HomeAssistant, pywemo_device, wemo_entity + hass: HomeAssistant, pywemo_device: pywemo.WeMoDevice, wemo_entity: er.RegistryEntry ) -> None: """Test that two device callback state updates do not proceed at the same time.""" coordinator = async_get_coordinator(hass, wemo_entity.device_id) @@ -158,24 +168,33 @@ class EntityTestHelpers: """Common state update helpers.""" async def test_async_update_locked_multiple_updates( - self, hass, pywemo_device, wemo_entity - ): + self, + hass: HomeAssistant, + pywemo_device: pywemo.WeMoDevice, + wemo_entity: er.RegistryEntry, + ) -> None: """Test that two hass async_update state updates do not proceed at the same time.""" await test_async_update_locked_multiple_updates( hass, pywemo_device, wemo_entity ) async def test_async_update_locked_multiple_callbacks( - self, hass, pywemo_device, wemo_entity - ): + self, + hass: HomeAssistant, + pywemo_device: pywemo.WeMoDevice, + wemo_entity: er.RegistryEntry, + ) -> None: """Test that two device callback state updates do not proceed at the same time.""" await test_async_update_locked_multiple_callbacks( hass, pywemo_device, wemo_entity ) async def test_async_update_locked_callback_and_update( - self, hass, pywemo_device, wemo_entity - ): + self, + hass: HomeAssistant, + pywemo_device: pywemo.WeMoDevice, + wemo_entity: er.RegistryEntry, + ) -> None: """Test that a callback and a state update request can't both happen at the same time. When a state update is received via a callback from the device at the same time diff --git a/tests/components/wemo/test_binary_sensor.py b/tests/components/wemo/test_binary_sensor.py index 99a5df47e25..576283577c2 100644 --- a/tests/components/wemo/test_binary_sensor.py +++ b/tests/components/wemo/test_binary_sensor.py @@ -1,6 +1,7 @@ """Tests for the Wemo binary_sensor entity.""" import pytest +import pywemo from pywemo import StandbyState from homeassistant.components.homeassistant import ( @@ -12,6 +13,8 @@ from homeassistant.components.wemo.binary_sensor import ( MakerBinarySensor, ) from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component from .entity_test_helpers import EntityTestHelpers @@ -26,8 +29,12 @@ class TestMotion(EntityTestHelpers): return "Motion" async def test_binary_sensor_registry_state_callback( - self, hass, pywemo_registry, pywemo_device, wemo_entity - ): + self, + hass: HomeAssistant, + pywemo_registry: pywemo.SubscriptionRegistry, + pywemo_device: pywemo.WeMoDevice, + wemo_entity: er.RegistryEntry, + ) -> None: """Verify that the binary_sensor receives state updates from the registry.""" # On state. pywemo_device.get_state.return_value = 1 @@ -42,8 +49,12 @@ class TestMotion(EntityTestHelpers): assert hass.states.get(wemo_entity.entity_id).state == STATE_OFF async def test_binary_sensor_update_entity( - self, hass, pywemo_registry, pywemo_device, wemo_entity - ): + self, + hass: HomeAssistant, + pywemo_registry: pywemo.SubscriptionRegistry, + pywemo_device: pywemo.WeMoDevice, + wemo_entity: er.RegistryEntry, + ) -> None: """Verify that the binary_sensor performs state updates.""" await async_setup_component(hass, HA_DOMAIN, {}) @@ -82,8 +93,12 @@ class TestMaker(EntityTestHelpers): return MakerBinarySensor._name_suffix.lower() async def test_registry_state_callback( - self, hass, pywemo_registry, pywemo_device, wemo_entity - ): + self, + hass: HomeAssistant, + pywemo_registry: pywemo.SubscriptionRegistry, + pywemo_device: pywemo.WeMoDevice, + wemo_entity: er.RegistryEntry, + ) -> None: """Verify that the binary_sensor receives state updates from the registry.""" # On state. pywemo_device.sensor_state = 0 @@ -112,8 +127,12 @@ class TestInsight(EntityTestHelpers): return InsightBinarySensor._name_suffix.lower() async def test_registry_state_callback( - self, hass, pywemo_registry, pywemo_device, wemo_entity - ): + self, + hass: HomeAssistant, + pywemo_registry: pywemo.SubscriptionRegistry, + pywemo_device: pywemo.WeMoDevice, + wemo_entity: er.RegistryEntry, + ) -> None: """Verify that the binary_sensor receives state updates from the registry.""" # On state. pywemo_device.get_state.return_value = 1 diff --git a/tests/components/wemo/test_coordinator.py b/tests/components/wemo/test_coordinator.py index 198b132bbd0..f524633e701 100644 --- a/tests/components/wemo/test_coordinator.py +++ b/tests/components/wemo/test_coordinator.py @@ -3,9 +3,10 @@ import asyncio from dataclasses import asdict from datetime import timedelta -from unittest.mock import call, patch +from unittest.mock import _Call, call, patch import pytest +import pywemo from pywemo.exceptions import ActionException, PyWeMoException from pywemo.subscribe import EVENT_TYPE_LONG_PRESS @@ -14,7 +15,7 @@ from homeassistant.components.wemo import CONF_DISCOVERY, CONF_STATIC from homeassistant.components.wemo.const import DOMAIN, WEMO_SUBSCRIPTION_EVENT from homeassistant.components.wemo.coordinator import Options, async_get_coordinator from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers import device_registry as dr +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.update_coordinator import UpdateFailed from homeassistant.setup import async_setup_component from homeassistant.util.dt import utcnow @@ -248,14 +249,14 @@ class TestInsight: ) async def test_should_poll( self, - hass, - subscribed, - state, - expected_calls, - wemo_entity, - pywemo_device, - pywemo_registry, - ): + hass: HomeAssistant, + subscribed: bool, + state: int, + expected_calls: list[_Call], + wemo_entity: er.RegistryEntry, + pywemo_device: pywemo.WeMoDevice, + pywemo_registry: pywemo.SubscriptionRegistry, + ) -> None: """Validate the should_poll returns the correct value.""" pywemo_registry.is_subscribed.return_value = subscribed pywemo_device.get_state.reset_mock() diff --git a/tests/components/wemo/test_device_trigger.py b/tests/components/wemo/test_device_trigger.py index 47b704dae5d..477f5ee3960 100644 --- a/tests/components/wemo/test_device_trigger.py +++ b/tests/components/wemo/test_device_trigger.py @@ -16,6 +16,7 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component from tests.common import async_get_device_automations, async_mock_service @@ -29,7 +30,9 @@ def pywemo_model(): return "LightSwitchLongPress" -async def setup_automation(hass, device_id, trigger_type): +async def setup_automation( + hass: HomeAssistant, device_id: str, trigger_type: str +) -> None: """Set up an automation trigger for testing triggering.""" return await async_setup_component( hass, @@ -96,7 +99,9 @@ async def test_get_triggers(hass: HomeAssistant, wemo_entity) -> None: assert triggers == unordered(expected_triggers) -async def test_fires_on_long_press(hass: HomeAssistant, wemo_entity) -> None: +async def test_fires_on_long_press( + hass: HomeAssistant, wemo_entity: er.RegistryEntry +) -> None: """Test wemo long press trigger firing.""" assert await setup_automation(hass, wemo_entity.device_id, EVENT_TYPE_LONG_PRESS) calls = async_mock_service(hass, "test", "automation") diff --git a/tests/components/wemo/test_sensor.py b/tests/components/wemo/test_sensor.py index 7e0c8fa72f0..2259bfbbf18 100644 --- a/tests/components/wemo/test_sensor.py +++ b/tests/components/wemo/test_sensor.py @@ -2,6 +2,9 @@ import pytest +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + from .conftest import MOCK_INSIGHT_CURRENT_WATTS, MOCK_INSIGHT_TODAY_KWH from .entity_test_helpers import EntityTestHelpers @@ -24,7 +27,7 @@ class InsightTestTemplate(EntityTestHelpers): """Select the appropriate entity for the test.""" return cls.ENTITY_ID_SUFFIX - def test_state(self, hass, wemo_entity): + def test_state(self, hass: HomeAssistant, wemo_entity: er.RegistryEntry) -> None: """Test the sensor state.""" assert hass.states.get(wemo_entity.entity_id).state == self.EXPECTED_STATE_VALUE diff --git a/tests/components/wiz/__init__.py b/tests/components/wiz/__init__.py index e80a1ed8249..d84074e37d3 100644 --- a/tests/components/wiz/__init__.py +++ b/tests/components/wiz/__init__.py @@ -1,9 +1,10 @@ """Tests for the WiZ Platform integration.""" -from collections.abc import Callable -from contextlib import contextmanager +from collections.abc import Callable, Generator +from contextlib import _GeneratorContextManager, contextmanager from copy import deepcopy import json +from typing import Any from unittest.mock import AsyncMock, MagicMock, patch from pywizlight import SCENES, BulbType, PilotParser, wizlight @@ -194,7 +195,11 @@ async def setup_integration(hass: HomeAssistant) -> MockConfigEntry: return entry -def _mocked_wizlight(device, extended_white_range, bulb_type) -> wizlight: +def _mocked_wizlight( + device: dict[str, Any] | None, + extended_white_range: list[int] | None, + bulb_type: BulbType | None, +) -> wizlight: bulb = MagicMock(auto_spec=wizlight, name="Mocked wizlight") async def _save_setup_callback(callback: Callable) -> None: @@ -228,9 +233,13 @@ def _mocked_wizlight(device, extended_white_range, bulb_type) -> wizlight: return bulb -def _patch_wizlight(device=None, extended_white_range=None, bulb_type=None): +def _patch_wizlight( + device: dict[str, Any] | None = None, + extended_white_range: list[int] | None = None, + bulb_type: BulbType | None = None, +) -> _GeneratorContextManager: @contextmanager - def _patcher(): + def _patcher() -> Generator[None]: bulb = device or _mocked_wizlight(device, extended_white_range, bulb_type) with ( patch("homeassistant.components.wiz.wizlight", return_value=bulb), @@ -244,9 +253,9 @@ def _patch_wizlight(device=None, extended_white_range=None, bulb_type=None): return _patcher() -def _patch_discovery(): +def _patch_discovery() -> _GeneratorContextManager[None]: @contextmanager - def _patcher(): + def _patcher() -> Generator[None]: with patch( "homeassistant.components.wiz.discovery.find_wizlights", return_value=[DiscoveredBulb(FAKE_IP, FAKE_MAC)], @@ -257,8 +266,12 @@ def _patch_discovery(): async def async_setup_integration( - hass, wizlight=None, device=None, extended_white_range=None, bulb_type=None -): + hass: HomeAssistant, + wizlight: wizlight | None = None, + device: dict[str, Any] | None = None, + extended_white_range: list[int] | None = None, + bulb_type: BulbType | None = None, +) -> tuple[wizlight, MockConfigEntry]: """Set up the integration with a mock device.""" entry = MockConfigEntry( domain=DOMAIN, @@ -273,7 +286,9 @@ async def async_setup_integration( return bulb, entry -async def async_push_update(hass, device, params): +async def async_push_update( + hass: HomeAssistant, device: wizlight, params: dict[str, Any] +) -> None: """Push an update to the device.""" device.state = PilotParser(params) device.status = params.get("state") diff --git a/tests/components/ws66i/test_media_player.py b/tests/components/ws66i/test_media_player.py index aa67ea24b63..23f64d7d514 100644 --- a/tests/components/ws66i/test_media_player.py +++ b/tests/components/ws66i/test_media_player.py @@ -1,6 +1,7 @@ """The tests for WS66i Media player platform.""" from collections import defaultdict +from typing import Any from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory @@ -170,7 +171,9 @@ async def _setup_ws66i_with_options(hass: HomeAssistant, ws66i) -> MockConfigEnt return config_entry -async def _call_media_player_service(hass, name, data): +async def _call_media_player_service( + hass: HomeAssistant, name: str, data: dict[str, Any] +) -> None: await hass.services.async_call( MEDIA_PLAYER_DOMAIN, name, service_data=data, blocking=True ) From 69a560555140c6d0867aa6f650903dda8e51bfea Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 20 Aug 2024 12:54:57 +0200 Subject: [PATCH 0928/1635] Add missing hass type hint in component tests (x) (#124285) --- tests/components/xiaomi_miio/test_config_flow.py | 8 +++++--- tests/components/xiaomi_miio/test_select.py | 2 +- tests/components/xiaomi_miio/test_vacuum.py | 2 +- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/tests/components/xiaomi_miio/test_config_flow.py b/tests/components/xiaomi_miio/test_config_flow.py index 481be189ddd..707da4bff12 100644 --- a/tests/components/xiaomi_miio/test_config_flow.py +++ b/tests/components/xiaomi_miio/test_config_flow.py @@ -704,7 +704,7 @@ async def test_config_flow_step_device_manual_model_succes(hass: HomeAssistant) } -async def config_flow_device_success(hass, model_to_test): +async def config_flow_device_success(hass: HomeAssistant, model_to_test: str) -> None: """Test a successful config flow for a device (base class).""" result = await hass.config_entries.flow.async_init( const.DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -748,7 +748,7 @@ async def config_flow_device_success(hass, model_to_test): } -async def config_flow_generic_roborock(hass): +async def config_flow_generic_roborock(hass: HomeAssistant) -> None: """Test a successful config flow for a generic roborock vacuum.""" dummy_model = "roborock.vacuum.dummy" @@ -794,7 +794,9 @@ async def config_flow_generic_roborock(hass): } -async def zeroconf_device_success(hass, zeroconf_name_to_test, model_to_test): +async def zeroconf_device_success( + hass: HomeAssistant, zeroconf_name_to_test: str, model_to_test: str +) -> None: """Test a successful zeroconf discovery of a device (base class).""" result = await hass.config_entries.flow.async_init( const.DOMAIN, diff --git a/tests/components/xiaomi_miio/test_select.py b/tests/components/xiaomi_miio/test_select.py index f2f04127d75..584ef910c98 100644 --- a/tests/components/xiaomi_miio/test_select.py +++ b/tests/components/xiaomi_miio/test_select.py @@ -141,7 +141,7 @@ async def test_select_coordinator_update(hass: HomeAssistant, setup_test) -> Non assert state.state == "left" -async def setup_component(hass, entity_name): +async def setup_component(hass: HomeAssistant, entity_name: str) -> str: """Set up component.""" entity_id = f"{DOMAIN}.{entity_name}" diff --git a/tests/components/xiaomi_miio/test_vacuum.py b/tests/components/xiaomi_miio/test_vacuum.py index 54646d30513..64612f6f464 100644 --- a/tests/components/xiaomi_miio/test_vacuum.py +++ b/tests/components/xiaomi_miio/test_vacuum.py @@ -533,7 +533,7 @@ async def test_xiaomi_vacuum_fanspeeds( assert "Fan speed step not recognized" in caplog.text -async def setup_component(hass, entity_name): +async def setup_component(hass: HomeAssistant, entity_name: str) -> str: """Set up vacuum component.""" entity_id = f"{DOMAIN}.{entity_name}" From 14775c822fa3043d116d6e1ecfce1de4823c7290 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 20 Aug 2024 12:55:14 +0200 Subject: [PATCH 0929/1635] Add missing hass type hint in component tests (y) (#124286) --- tests/components/yandex_transport/test_sensor.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/components/yandex_transport/test_sensor.py b/tests/components/yandex_transport/test_sensor.py index 5ad9fa92c39..13432850b2b 100644 --- a/tests/components/yandex_transport/test_sensor.py +++ b/tests/components/yandex_transport/test_sensor.py @@ -1,6 +1,7 @@ """Tests for the yandex transport platform.""" import json +from typing import Any from unittest.mock import AsyncMock, patch import pytest @@ -76,7 +77,9 @@ SUBURBAN_RESULT_STATE = dt_util.utc_from_timestamp(1634984640).isoformat( ) -async def assert_setup_sensor(hass, config, count=1): +async def assert_setup_sensor( + hass: HomeAssistant, config: dict[str, Any], count: int = 1 +) -> None: """Set up the sensor and assert it's been created.""" with assert_setup_component(count): assert await async_setup_component(hass, sensor.DOMAIN, config) From 3dc83ef19dd6884e4483227859df6bd41f46e215 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 20 Aug 2024 12:55:39 +0200 Subject: [PATCH 0930/1635] Add missing hass type hint in component tests (t) (#124274) --- tests/components/tasmota/conftest.py | 2 +- tests/components/tasmota/test_common.py | 174 ++++++++++-------- tests/components/tasmota/test_cover.py | 5 +- tests/components/tasmota/test_init.py | 5 +- tests/components/tasmota/test_light.py | 16 +- .../tellduslive/test_config_flow.py | 4 +- tests/components/template/test_button.py | 11 +- tests/components/template/test_fan.py | 23 ++- tests/components/template/test_init.py | 2 +- tests/components/template/test_number.py | 12 +- tests/components/template/test_select.py | 7 +- tests/components/template/test_sensor.py | 11 +- tests/components/template/test_vacuum.py | 8 +- tests/components/toon/test_config_flow.py | 2 +- tests/components/totalconnect/common.py | 7 +- tests/components/tplink_omada/test_switch.py | 4 +- tests/components/trace/test_websocket_api.py | 16 +- tests/components/tractive/conftest.py | 3 +- 18 files changed, 186 insertions(+), 126 deletions(-) diff --git a/tests/components/tasmota/conftest.py b/tests/components/tasmota/conftest.py index 0de0788d7d9..e6bb8c61994 100644 --- a/tests/components/tasmota/conftest.py +++ b/tests/components/tasmota/conftest.py @@ -37,7 +37,7 @@ def disable_status_sensor(status_sensor_disabled): yield -async def setup_tasmota_helper(hass): +async def setup_tasmota_helper(hass: HomeAssistant) -> None: """Set up Tasmota.""" hass.config.components.add("tasmota") diff --git a/tests/components/tasmota/test_common.py b/tests/components/tasmota/test_common.py index f3d85f019f3..4d2c821fff4 100644 --- a/tests/components/tasmota/test_common.py +++ b/tests/components/tasmota/test_common.py @@ -2,7 +2,8 @@ import copy import json -from unittest.mock import ANY +from typing import Any +from unittest.mock import ANY, AsyncMock from hatasmota.const import ( CONF_DEEP_SLEEP, @@ -19,6 +20,7 @@ from hatasmota.utils import ( get_topic_tele_state, get_topic_tele_will, ) +import pytest from homeassistant.components.tasmota.const import DEFAULT_PREFIX, DOMAIN from homeassistant.const import STATE_UNAVAILABLE @@ -26,7 +28,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from tests.common import async_fire_mqtt_message -from tests.typing import WebSocketGenerator +from tests.typing import MqttMockHAClient, MqttMockPahoClient, WebSocketGenerator DEFAULT_CONFIG = { "ip": "192.168.15.10", @@ -125,14 +127,14 @@ async def remove_device( async def help_test_availability_when_connection_lost( - hass, - mqtt_client_mock, - mqtt_mock, - domain, - config, - sensor_config=None, - object_id="tasmota_test", -): + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, + mqtt_mock: MqttMockHAClient, + domain: str, + config: dict[str, Any], + sensor_config: dict[str, Any] | None = None, + object_id: str = "tasmota_test", +) -> None: """Test availability after MQTT disconnection. This is a test helper for the TasmotaAvailability mixin. @@ -191,14 +193,14 @@ async def help_test_availability_when_connection_lost( async def help_test_deep_sleep_availability_when_connection_lost( - hass, - mqtt_client_mock, - mqtt_mock, - domain, - config, - sensor_config=None, - object_id="tasmota_test", -): + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, + mqtt_mock: MqttMockHAClient, + domain: str, + config: dict[str, Any], + sensor_config: dict[str, Any] | None = None, + object_id: str = "tasmota_test", +) -> None: """Test availability after MQTT disconnection when deep sleep is enabled. This is a test helper for the TasmotaAvailability mixin. @@ -261,13 +263,13 @@ async def help_test_deep_sleep_availability_when_connection_lost( async def help_test_availability( - hass, - mqtt_mock, - domain, - config, - sensor_config=None, - object_id="tasmota_test", -): + hass: HomeAssistant, + mqtt_mock: MqttMockHAClient, + domain: str, + config: dict[str, Any], + sensor_config: dict[str, Any] | None = None, + object_id: str = "tasmota_test", +) -> None: """Test availability. This is a test helper for the TasmotaAvailability mixin. @@ -309,13 +311,13 @@ async def help_test_availability( async def help_test_deep_sleep_availability( - hass, - mqtt_mock, - domain, - config, - sensor_config=None, - object_id="tasmota_test", -): + hass: HomeAssistant, + mqtt_mock: MqttMockHAClient, + domain: str, + config: dict[str, Any], + sensor_config: dict[str, Any] | None = None, + object_id: str = "tasmota_test", +) -> None: """Test availability when deep sleep is enabled. This is a test helper for the TasmotaAvailability mixin. @@ -358,13 +360,13 @@ async def help_test_deep_sleep_availability( async def help_test_availability_discovery_update( - hass, - mqtt_mock, - domain, - config, - sensor_config=None, - object_id="tasmota_test", -): + hass: HomeAssistant, + mqtt_mock: MqttMockHAClient, + domain: str, + config: dict[str, Any], + sensor_config: dict[str, Any] | None = None, + object_id: str = "tasmota_test", +) -> None: """Test update of discovered TasmotaAvailability. This is a test helper for the TasmotaAvailability mixin. @@ -434,15 +436,15 @@ async def help_test_availability_discovery_update( async def help_test_availability_poll_state( - hass, - mqtt_client_mock, - mqtt_mock, - domain, - config, - poll_topic, - poll_payload, - sensor_config=None, -): + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, + mqtt_mock: MqttMockHAClient, + domain: str, + config: dict[str, Any], + poll_topic: str, + poll_payload: str, + sensor_config: dict[str, Any] | None = None, +) -> None: """Test polling of state when device is available. This is a test helper for the TasmotaAvailability mixin. @@ -503,17 +505,17 @@ async def help_test_availability_poll_state( async def help_test_discovery_removal( - hass, - mqtt_mock, - caplog, - domain, - config1, - config2, - sensor_config1=None, - sensor_config2=None, - object_id="tasmota_test", - name="Tasmota Test", -): + hass: HomeAssistant, + mqtt_mock: MqttMockHAClient, + caplog: pytest.LogCaptureFixture, + domain: str, + config1: dict[str, Any], + config2: dict[str, Any], + sensor_config1: dict[str, Any] | None = None, + sensor_config2: dict[str, Any] | None = None, + object_id: str = "tasmota_test", + name: str = "Tasmota Test", +) -> None: """Test removal of discovered entity.""" device_reg = dr.async_get(hass) entity_reg = er.async_get(hass) @@ -569,16 +571,16 @@ async def help_test_discovery_removal( async def help_test_discovery_update_unchanged( - hass, - mqtt_mock, - caplog, - domain, - config, - discovery_update, - sensor_config=None, - object_id="tasmota_test", - name="Tasmota Test", -): + hass: HomeAssistant, + mqtt_mock: MqttMockHAClient, + caplog: pytest.LogCaptureFixture, + domain: str, + config: dict[str, Any], + discovery_update: AsyncMock, + sensor_config: dict[str, Any] | None = None, + object_id: str = "tasmota_test", + name: str = "Tasmota Test", +) -> None: """Test update of discovered component with and without changes. This is a test helper for the MqttDiscoveryUpdate mixin. @@ -623,8 +625,13 @@ async def help_test_discovery_update_unchanged( async def help_test_discovery_device_remove( - hass, mqtt_mock, domain, unique_id, config, sensor_config=None -): + hass: HomeAssistant, + mqtt_mock: MqttMockHAClient, + domain: str, + unique_id: str, + config: dict[str, Any], + sensor_config: dict[str, Any] | None = None, +) -> None: """Test domain entity is removed when device is removed.""" device_reg = dr.async_get(hass) entity_reg = er.async_get(hass) @@ -659,14 +666,14 @@ async def help_test_discovery_device_remove( async def help_test_entity_id_update_subscriptions( - hass, - mqtt_mock, - domain, - config, - topics=None, - sensor_config=None, - object_id="tasmota_test", -): + hass: HomeAssistant, + mqtt_mock: MqttMockHAClient, + domain: str, + config: dict[str, Any], + topics: list[str] | None = None, + sensor_config: dict[str, Any] | None = None, + object_id: str = "tasmota_test", +) -> None: """Test MQTT subscriptions are managed when entity_id is updated.""" entity_reg = er.async_get(hass) @@ -711,8 +718,13 @@ async def help_test_entity_id_update_subscriptions( async def help_test_entity_id_update_discovery_update( - hass, mqtt_mock, domain, config, sensor_config=None, object_id="tasmota_test" -): + hass: HomeAssistant, + mqtt_mock: MqttMockHAClient, + domain: str, + config: dict[str, Any], + sensor_config: dict[str, Any] | None = None, + object_id: str = "tasmota_test", +) -> None: """Test MQTT discovery update after entity_id is updated.""" entity_reg = er.async_get(hass) diff --git a/tests/components/tasmota/test_cover.py b/tests/components/tasmota/test_cover.py index 7da3cdbd1ec..70bf33d0105 100644 --- a/tests/components/tasmota/test_cover.py +++ b/tests/components/tasmota/test_cover.py @@ -2,6 +2,7 @@ import copy import json +from typing import Any from unittest.mock import patch from hatasmota.utils import ( @@ -464,7 +465,9 @@ async def test_controlling_state_via_mqtt_inverted( assert state.attributes["current_position"] == 0 -async def call_service(hass, entity_id, service, **kwargs): +async def call_service( + hass: HomeAssistant, entity_id: str, service: str, **kwargs: Any +) -> None: """Call a fan service.""" await hass.services.async_call( cover.DOMAIN, diff --git a/tests/components/tasmota/test_init.py b/tests/components/tasmota/test_init.py index 125dba811e6..2765ed724ea 100644 --- a/tests/components/tasmota/test_init.py +++ b/tests/components/tasmota/test_init.py @@ -5,6 +5,7 @@ import json from unittest.mock import call from homeassistant.components.tasmota.const import DEFAULT_PREFIX, DOMAIN +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component @@ -74,7 +75,9 @@ async def test_device_remove_non_tasmota_device( """Test removing a non Tasmota device through device registry.""" assert await async_setup_component(hass, "config", {}) - async def async_remove_config_entry_device(hass, config_entry, device_entry): + async def async_remove_config_entry_device( + hass: HomeAssistant, config_entry: ConfigEntry, device_entry: dr.DeviceEntry + ) -> bool: return True mock_integration( diff --git a/tests/components/tasmota/test_light.py b/tests/components/tasmota/test_light.py index c4c3f0ec8dc..f5802c509bf 100644 --- a/tests/components/tasmota/test_light.py +++ b/tests/components/tasmota/test_light.py @@ -2,6 +2,7 @@ import copy import json +from typing import Any from unittest.mock import patch from hatasmota.const import CONF_MAC @@ -1478,7 +1479,13 @@ async def test_relay_as_light( assert state is not None -async def _test_split_light(hass, mqtt_mock, config, num_lights, num_switches): +async def _test_split_light( + hass: HomeAssistant, + mqtt_mock: MqttMockHAClient, + config: dict[str, Any], + num_lights: int, + num_switches: int, +) -> None: """Test multi-channel light split to single-channel dimmers.""" mac = config["mac"] @@ -1553,7 +1560,12 @@ async def test_split_light2( await _test_split_light(hass, mqtt_mock, config, 5, 2) -async def _test_unlinked_light(hass, mqtt_mock, config, num_switches): +async def _test_unlinked_light( + hass: HomeAssistant, + mqtt_mock: MqttMockHAClient, + config: dict[str, Any], + num_switches: int, +) -> None: """Test rgbww light split to rgb+ww.""" mac = config["mac"] num_lights = 2 diff --git a/tests/components/tellduslive/test_config_flow.py b/tests/components/tellduslive/test_config_flow.py index c575e7fb5c1..abce2858bf3 100644 --- a/tests/components/tellduslive/test_config_flow.py +++ b/tests/components/tellduslive/test_config_flow.py @@ -20,7 +20,9 @@ from homeassistant.data_entry_flow import FlowResultType from tests.common import MockConfigEntry -def init_config_flow(hass, side_effect=None): +def init_config_flow( + hass: HomeAssistant, side_effect: type[Exception] | None = None +) -> config_flow.FlowHandler: """Init a configuration flow.""" flow = config_flow.FlowHandler() flow.hass = hass diff --git a/tests/components/template/test_button.py b/tests/components/template/test_button.py index 72c3d2351f5..b201385240c 100644 --- a/tests/components/template/test_button.py +++ b/tests/components/template/test_button.py @@ -1,6 +1,7 @@ """The tests for the Template button platform.""" import datetime as dt +from typing import Any from freezegun.api import FrozenDateTimeFactory import pytest @@ -232,11 +233,11 @@ async def test_unique_id(hass: HomeAssistant) -> None: def _verify( - hass, - expected_value, - attributes=None, - entity_id=_TEST_BUTTON, -): + hass: HomeAssistant, + expected_value: str, + attributes: dict[str, Any] | None = None, + entity_id: str = _TEST_BUTTON, +) -> None: """Verify button's state.""" attributes = attributes or {} if CONF_FRIENDLY_NAME not in attributes: diff --git a/tests/components/template/test_fan.py b/tests/components/template/test_fan.py index 82ad4ede91c..40966d5557c 100644 --- a/tests/components/template/test_fan.py +++ b/tests/components/template/test_fan.py @@ -699,13 +699,13 @@ async def test_set_invalid_osc(hass: HomeAssistant, calls: list[ServiceCall]) -> def _verify( - hass, - expected_state, - expected_percentage, - expected_oscillating, - expected_direction, - expected_preset_mode, -): + hass: HomeAssistant, + expected_state: str, + expected_percentage: int | None, + expected_oscillating: bool | None, + expected_direction: str | None, + expected_preset_mode: str | None, +) -> None: """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes @@ -716,7 +716,7 @@ def _verify( assert attributes.get(ATTR_PRESET_MODE) == expected_preset_mode -async def _register_fan_sources(hass): +async def _register_fan_sources(hass: HomeAssistant) -> None: with assert_setup_component(1, "input_boolean"): assert await setup.async_setup_component( hass, "input_boolean", {"input_boolean": {"state": None}} @@ -760,8 +760,11 @@ async def _register_fan_sources(hass): async def _register_components( - hass, speed_list=None, preset_modes=None, speed_count=None -): + hass: HomeAssistant, + speed_list: list[str] | None = None, + preset_modes: list[str] | None = None, + speed_count: int | None = None, +) -> None: """Register basic components for testing.""" await _register_fan_sources(hass) diff --git a/tests/components/template/test_init.py b/tests/components/template/test_init.py index 2e5870217a2..06d59d4d176 100644 --- a/tests/components/template/test_init.py +++ b/tests/components/template/test_init.py @@ -258,7 +258,7 @@ async def test_reload_sensors_that_reference_other_template_sensors( assert hass.states.get("sensor.test3").state == "2" -async def async_yaml_patch_helper(hass, filename): +async def async_yaml_patch_helper(hass: HomeAssistant, filename: str) -> None: """Help update configuration.yaml.""" yaml_path = get_fixture_path(filename, "template") with patch.object(config, "YAML_CONFIG_FILE", yaml_path): diff --git a/tests/components/template/test_number.py b/tests/components/template/test_number.py index ca9fe2d7688..c8befc2b8f8 100644 --- a/tests/components/template/test_number.py +++ b/tests/components/template/test_number.py @@ -364,12 +364,12 @@ async def test_trigger_number(hass: HomeAssistant) -> None: def _verify( - hass, - expected_value, - expected_step, - expected_minimum, - expected_maximum, -): + hass: HomeAssistant, + expected_value: int, + expected_step: int, + expected_minimum: int, + expected_maximum: int, +) -> None: """Verify number's state.""" state = hass.states.get(_TEST_NUMBER) attributes = state.attributes diff --git a/tests/components/template/test_select.py b/tests/components/template/test_select.py index 2268c0840aa..5b4723a3034 100644 --- a/tests/components/template/test_select.py +++ b/tests/components/template/test_select.py @@ -318,7 +318,12 @@ async def test_trigger_select(hass: HomeAssistant) -> None: assert events[0].event_type == "test_number_event" -def _verify(hass, expected_current_option, expected_options, entity_name=_TEST_SELECT): +def _verify( + hass: HomeAssistant, + expected_current_option: str, + expected_options: list[str], + entity_name: str = _TEST_SELECT, +) -> None: """Verify select's state.""" state = hass.states.get(entity_name) attributes = state.attributes diff --git a/tests/components/template/test_sensor.py b/tests/components/template/test_sensor.py index 37d6d120491..fb352ebcb8c 100644 --- a/tests/components/template/test_sensor.py +++ b/tests/components/template/test_sensor.py @@ -23,7 +23,9 @@ from homeassistant.const import ( from homeassistant.core import Context, CoreState, HomeAssistant, State, callback from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_component import async_update_entity +from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.template import Template +from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.setup import ATTR_COMPONENT, async_setup_component import homeassistant.util.dt as dt_util @@ -374,7 +376,7 @@ async def test_creating_sensor_loads_group(hass: HomeAssistant) -> None: order = [] after_dep_event = Event() - async def async_setup_group(hass, config): + async def async_setup_group(hass: HomeAssistant, config: ConfigType) -> bool: # Make sure group takes longer to load, so that it won't # be loaded first by chance await after_dep_event.wait() @@ -383,8 +385,11 @@ async def test_creating_sensor_loads_group(hass: HomeAssistant) -> None: return True async def async_setup_template( - hass, config, async_add_entities, discovery_info=None - ): + hass: HomeAssistant, + config: ConfigType, + async_add_entities: AddEntitiesCallback, + discovery_info: DiscoveryInfoType | None = None, + ) -> bool: order.append("sensor.template") return True diff --git a/tests/components/template/test_vacuum.py b/tests/components/template/test_vacuum.py index 8b1d082a62b..fd3e3e872ad 100644 --- a/tests/components/template/test_vacuum.py +++ b/tests/components/template/test_vacuum.py @@ -484,7 +484,9 @@ async def test_set_invalid_fan_speed( assert hass.states.get(_FAN_SPEED_INPUT_SELECT).state == "high" -def _verify(hass, expected_state, expected_battery_level): +def _verify( + hass: HomeAssistant, expected_state: str, expected_battery_level: int +) -> None: """Verify vacuum's state and speed.""" state = hass.states.get(_TEST_VACUUM) attributes = state.attributes @@ -492,7 +494,7 @@ def _verify(hass, expected_state, expected_battery_level): assert attributes.get(ATTR_BATTERY_LEVEL) == expected_battery_level -async def _register_basic_vacuum(hass): +async def _register_basic_vacuum(hass: HomeAssistant) -> None: """Register basic vacuum with only required options for testing.""" with assert_setup_component(1, "input_select"): assert await setup.async_setup_component( @@ -528,7 +530,7 @@ async def _register_basic_vacuum(hass): await hass.async_block_till_done() -async def _register_components(hass): +async def _register_components(hass: HomeAssistant) -> None: """Register basic components for testing.""" with assert_setup_component(2, "input_boolean"): assert await setup.async_setup_component( diff --git a/tests/components/toon/test_config_flow.py b/tests/components/toon/test_config_flow.py index 588924b416f..492e2a220ad 100644 --- a/tests/components/toon/test_config_flow.py +++ b/tests/components/toon/test_config_flow.py @@ -20,7 +20,7 @@ from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import ClientSessionGenerator -async def setup_component(hass): +async def setup_component(hass: HomeAssistant) -> None: """Set up Toon component.""" await async_process_ha_core_config( hass, diff --git a/tests/components/totalconnect/common.py b/tests/components/totalconnect/common.py index 1ceb893112c..6e9bb28a9b6 100644 --- a/tests/components/totalconnect/common.py +++ b/tests/components/totalconnect/common.py @@ -5,7 +5,8 @@ from unittest.mock import patch from total_connect_client import ArmingState, ResultCode, ZoneStatus, ZoneType from homeassistant.components.totalconnect.const import CONF_USERCODES, DOMAIN -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry @@ -394,7 +395,7 @@ TOTALCONNECT_REQUEST = ( ) -async def setup_platform(hass, platform): +async def setup_platform(hass: HomeAssistant, platform: Platform) -> MockConfigEntry: """Set up the TotalConnect platform.""" # first set up a config entry and add it to hass mock_entry = MockConfigEntry(domain=DOMAIN, data=CONFIG_DATA) @@ -422,7 +423,7 @@ async def setup_platform(hass, platform): return mock_entry -async def init_integration(hass): +async def init_integration(hass: HomeAssistant) -> MockConfigEntry: """Set up the TotalConnect integration.""" # first set up a config entry and add it to hass mock_entry = MockConfigEntry(domain=DOMAIN, data=CONFIG_DATA) diff --git a/tests/components/tplink_omada/test_switch.py b/tests/components/tplink_omada/test_switch.py index 7d83140cc95..abce87714a9 100644 --- a/tests/components/tplink_omada/test_switch.py +++ b/tests/components/tplink_omada/test_switch.py @@ -19,7 +19,7 @@ from tplink_omada_client.exceptions import InvalidDevice from homeassistant.components import switch from homeassistant.components.tplink_omada.coordinator import POLL_GATEWAY from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, ServiceResponse from homeassistant.helpers import entity_registry as er from homeassistant.util.dt import utcnow @@ -336,7 +336,7 @@ def _get_updated_gateway_port_status( return OmadaGatewayPortStatus(gateway_data["portStats"][port]) -def call_service(hass: HomeAssistant, service: str, entity_id: str): +def call_service(hass: HomeAssistant, service: str, entity_id: str) -> ServiceResponse: """Call any service on entity.""" return hass.services.async_call( switch.DOMAIN, service, {ATTR_ENTITY_ID: entity_id}, blocking=True diff --git a/tests/components/trace/test_websocket_api.py b/tests/components/trace/test_websocket_api.py index b0b982d4825..7b292ed39e3 100644 --- a/tests/components/trace/test_websocket_api.py +++ b/tests/components/trace/test_websocket_api.py @@ -39,8 +39,12 @@ def _find_traces(traces, trace_type, item_id): async def _setup_automation_or_script( - hass, domain, configs, script_config=None, stored_traces=None -): + hass: HomeAssistant, + domain: str, + configs: list[dict[str, Any]], + script_config: dict[str, Any] | None = None, + stored_traces: int | None = None, +) -> None: """Set up automations or scripts from automation config.""" if domain == "script": configs = {config["id"]: {"sequence": config["action"]} for config in configs} @@ -66,7 +70,13 @@ async def _setup_automation_or_script( assert await async_setup_component(hass, domain, {domain: configs}) -async def _run_automation_or_script(hass, domain, config, event, context=None): +async def _run_automation_or_script( + hass: HomeAssistant, + domain: str, + config: dict[str, Any], + event: str, + context: dict[str, Any] | None = None, +) -> None: if domain == "automation": hass.bus.async_fire(event, context=context) else: diff --git a/tests/components/tractive/conftest.py b/tests/components/tractive/conftest.py index 7f319a87b5b..88c68a4b62f 100644 --- a/tests/components/tractive/conftest.py +++ b/tests/components/tractive/conftest.py @@ -10,6 +10,7 @@ import pytest from homeassistant.components.tractive.const import DOMAIN, SERVER_UNAVAILABLE from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_send from tests.common import MockConfigEntry, load_json_object_fixture @@ -76,7 +77,7 @@ def mock_tractive_client() -> Generator[AsyncMock]: } entry.runtime_data.client._send_switch_update(event) - def send_server_unavailable_event(hass): + def send_server_unavailable_event(hass: HomeAssistant) -> None: """Send server unavailable event.""" async_dispatcher_send(hass, f"{SERVER_UNAVAILABLE}-12345") From b1d9e5580c7442be4123e2a80b4f83412e7f5b8c Mon Sep 17 00:00:00 2001 From: mvn23 Date: Tue, 20 Aug 2024 14:51:08 +0200 Subject: [PATCH 0931/1635] Update opentherm_gw.sensor to use entity_description (#124283) --- .../components/opentherm_gw/const.py | 474 ------------ .../components/opentherm_gw/entity.py | 10 +- .../components/opentherm_gw/sensor.py | 687 ++++++++++++++++-- 3 files changed, 633 insertions(+), 538 deletions(-) diff --git a/homeassistant/components/opentherm_gw/const.py b/homeassistant/components/opentherm_gw/const.py index 1d007d86181..c1932c7b2bd 100644 --- a/homeassistant/components/opentherm_gw/const.py +++ b/homeassistant/components/opentherm_gw/const.py @@ -1,19 +1,5 @@ """Constants for the opentherm_gw integration.""" -from __future__ import annotations - -import pyotgw.vars as gw_vars - -from homeassistant.components.sensor import SensorDeviceClass -from homeassistant.const import ( - PERCENTAGE, - UnitOfPower, - UnitOfPressure, - UnitOfTemperature, - UnitOfTime, - UnitOfVolumeFlowRate, -) - ATTR_GW_ID = "gateway_id" ATTR_LEVEL = "level" ATTR_DHW_OVRD = "dhw_override" @@ -47,463 +33,3 @@ SERVICE_SET_MAX_MOD = "set_max_modulation" SERVICE_SET_OAT = "set_outside_temperature" SERVICE_SET_SB_TEMP = "set_setback_temperature" SERVICE_SEND_TRANSP_CMD = "send_transparent_command" - -TRANSLATE_SOURCE = { - gw_vars.BOILER: "Boiler", - gw_vars.OTGW: None, - gw_vars.THERMOSTAT: "Thermostat", -} - -SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION = 1 - -SENSOR_INFO: dict[str, list] = { - # [device_class, unit, friendly_name, suggested_display_precision, [status source, ...]] - gw_vars.DATA_CONTROL_SETPOINT: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Control Setpoint {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_MASTER_MEMBERID: [ - None, - None, - "Thermostat Member ID {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_MEMBERID: [ - None, - None, - "Boiler Member ID {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_OEM_FAULT: [ - None, - None, - "Boiler OEM Fault Code {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_COOLING_CONTROL: [ - None, - PERCENTAGE, - "Cooling Control Signal {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_CONTROL_SETPOINT_2: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Control Setpoint 2 {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_ROOM_SETPOINT_OVRD: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Room Setpoint Override {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_MAX_RELATIVE_MOD: [ - None, - PERCENTAGE, - "Boiler Maximum Relative Modulation {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_MAX_CAPACITY: [ - SensorDeviceClass.POWER, - UnitOfPower.KILO_WATT, - "Boiler Maximum Capacity {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_MIN_MOD_LEVEL: [ - None, - PERCENTAGE, - "Boiler Minimum Modulation Level {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_ROOM_SETPOINT: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Room Setpoint {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_REL_MOD_LEVEL: [ - None, - PERCENTAGE, - "Relative Modulation Level {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_CH_WATER_PRESS: [ - SensorDeviceClass.PRESSURE, - UnitOfPressure.BAR, - "Central Heating Water Pressure {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_DHW_FLOW_RATE: [ - SensorDeviceClass.VOLUME_FLOW_RATE, - UnitOfVolumeFlowRate.LITERS_PER_MINUTE, - "Hot Water Flow Rate {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_ROOM_SETPOINT_2: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Room Setpoint 2 {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_ROOM_TEMP: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Room Temperature {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_CH_WATER_TEMP: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Central Heating Water Temperature {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_DHW_TEMP: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Hot Water Temperature {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_OUTSIDE_TEMP: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Outside Temperature {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_RETURN_WATER_TEMP: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Return Water Temperature {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SOLAR_STORAGE_TEMP: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Solar Storage Temperature {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SOLAR_COLL_TEMP: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Solar Collector Temperature {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_CH_WATER_TEMP_2: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Central Heating 2 Water Temperature {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_DHW_TEMP_2: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Hot Water 2 Temperature {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_EXHAUST_TEMP: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Exhaust Temperature {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_DHW_MAX_SETP: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Hot Water Maximum Setpoint {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_DHW_MIN_SETP: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Hot Water Minimum Setpoint {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_CH_MAX_SETP: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Boiler Maximum Central Heating Setpoint {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_CH_MIN_SETP: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Boiler Minimum Central Heating Setpoint {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_DHW_SETPOINT: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Hot Water Setpoint {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_MAX_CH_SETPOINT: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Maximum Central Heating Setpoint {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_OEM_DIAG: [ - None, - None, - "OEM Diagnostic Code {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_TOTAL_BURNER_STARTS: [ - None, - "starts", - "Total Burner Starts {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_CH_PUMP_STARTS: [ - None, - "starts", - "Central Heating Pump Starts {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_DHW_PUMP_STARTS: [ - None, - "starts", - "Hot Water Pump Starts {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_DHW_BURNER_STARTS: [ - None, - "starts", - "Hot Water Burner Starts {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_TOTAL_BURNER_HOURS: [ - SensorDeviceClass.DURATION, - UnitOfTime.HOURS, - "Total Burner Hours {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_CH_PUMP_HOURS: [ - SensorDeviceClass.DURATION, - UnitOfTime.HOURS, - "Central Heating Pump Hours {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_DHW_PUMP_HOURS: [ - SensorDeviceClass.DURATION, - UnitOfTime.HOURS, - "Hot Water Pump Hours {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_DHW_BURNER_HOURS: [ - SensorDeviceClass.DURATION, - UnitOfTime.HOURS, - "Hot Water Burner Hours {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_MASTER_OT_VERSION: [ - None, - None, - "Thermostat OpenTherm Version {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_OT_VERSION: [ - None, - None, - "Boiler OpenTherm Version {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_MASTER_PRODUCT_TYPE: [ - None, - None, - "Thermostat Product Type {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_MASTER_PRODUCT_VERSION: [ - None, - None, - "Thermostat Product Version {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_PRODUCT_TYPE: [ - None, - None, - "Boiler Product Type {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_PRODUCT_VERSION: [ - None, - None, - "Boiler Product Version {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.OTGW_MODE: [ - None, - None, - "Gateway/Monitor Mode {}", - None, - [gw_vars.OTGW], - ], - gw_vars.OTGW_DHW_OVRD: [ - None, - None, - "Gateway Hot Water Override Mode {}", - None, - [gw_vars.OTGW], - ], - gw_vars.OTGW_ABOUT: [ - None, - None, - "Gateway Firmware Version {}", - None, - [gw_vars.OTGW], - ], - gw_vars.OTGW_BUILD: [ - None, - None, - "Gateway Firmware Build {}", - None, - [gw_vars.OTGW], - ], - gw_vars.OTGW_CLOCKMHZ: [ - None, - None, - "Gateway Clock Speed {}", - None, - [gw_vars.OTGW], - ], - gw_vars.OTGW_LED_A: [ - None, - None, - "Gateway LED A Mode {}", - None, - [gw_vars.OTGW], - ], - gw_vars.OTGW_LED_B: [ - None, - None, - "Gateway LED B Mode {}", - None, - [gw_vars.OTGW], - ], - gw_vars.OTGW_LED_C: [ - None, - None, - "Gateway LED C Mode {}", - None, - [gw_vars.OTGW], - ], - gw_vars.OTGW_LED_D: [ - None, - None, - "Gateway LED D Mode {}", - None, - [gw_vars.OTGW], - ], - gw_vars.OTGW_LED_E: [ - None, - None, - "Gateway LED E Mode {}", - None, - [gw_vars.OTGW], - ], - gw_vars.OTGW_LED_F: [ - None, - None, - "Gateway LED F Mode {}", - None, - [gw_vars.OTGW], - ], - gw_vars.OTGW_GPIO_A: [ - None, - None, - "Gateway GPIO A Mode {}", - None, - [gw_vars.OTGW], - ], - gw_vars.OTGW_GPIO_B: [ - None, - None, - "Gateway GPIO B Mode {}", - None, - [gw_vars.OTGW], - ], - gw_vars.OTGW_SB_TEMP: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Gateway Setback Temperature {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.OTGW], - ], - gw_vars.OTGW_SETP_OVRD_MODE: [ - None, - None, - "Gateway Room Setpoint Override Mode {}", - None, - [gw_vars.OTGW], - ], - gw_vars.OTGW_SMART_PWR: [ - None, - None, - "Gateway Smart Power Mode {}", - None, - [gw_vars.OTGW], - ], - gw_vars.OTGW_THRM_DETECT: [ - None, - None, - "Gateway Thermostat Detection {}", - None, - [gw_vars.OTGW], - ], - gw_vars.OTGW_VREF: [ - None, - None, - "Gateway Reference Voltage Setting {}", - None, - [gw_vars.OTGW], - ], -} diff --git a/homeassistant/components/opentherm_gw/entity.py b/homeassistant/components/opentherm_gw/entity.py index dd023896f70..1de4aa0a2d9 100644 --- a/homeassistant/components/opentherm_gw/entity.py +++ b/homeassistant/components/opentherm_gw/entity.py @@ -2,16 +2,24 @@ import logging +import pyotgw.vars as gw_vars + from homeassistant.core import callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity import Entity, EntityDescription from . import OpenThermGatewayDevice -from .const import DOMAIN, TRANSLATE_SOURCE +from .const import DOMAIN _LOGGER = logging.getLogger(__name__) +TRANSLATE_SOURCE = { + gw_vars.BOILER: "Boiler", + gw_vars.OTGW: None, + gw_vars.THERMOSTAT: "Thermostat", +} + class OpenThermEntityDescription(EntityDescription): """Describe common opentherm_gw entity properties.""" diff --git a/homeassistant/components/opentherm_gw/sensor.py b/homeassistant/components/opentherm_gw/sensor.py index 8c17aca4516..20a69def433 100644 --- a/homeassistant/components/opentherm_gw/sensor.py +++ b/homeassistant/components/opentherm_gw/sensor.py @@ -1,20 +1,621 @@ """Support for OpenTherm Gateway sensors.""" -import logging +from dataclasses import dataclass -from homeassistant.components.sensor import ENTITY_ID_FORMAT, SensorEntity +import pyotgw.vars as gw_vars + +from homeassistant.components.sensor import ( + ENTITY_ID_FORMAT, + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_ID +from homeassistant.const import ( + CONF_ID, + PERCENTAGE, + UnitOfPower, + UnitOfPressure, + UnitOfTemperature, + UnitOfTime, + UnitOfVolumeFlowRate, +) from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity import async_generate_entity_id from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN -from .const import DATA_GATEWAYS, DATA_OPENTHERM_GW, SENSOR_INFO, TRANSLATE_SOURCE +from . import OpenThermGatewayDevice +from .const import DATA_GATEWAYS, DATA_OPENTHERM_GW +from .entity import OpenThermEntity, OpenThermEntityDescription -_LOGGER = logging.getLogger(__name__) +SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION = 1 + + +@dataclass(frozen=True, kw_only=True) +class OpenThermSensorEntityDescription( + SensorEntityDescription, OpenThermEntityDescription +): + """Describes opentherm_gw sensor entity.""" + + +SENSOR_INFO: tuple[tuple[list[str], OpenThermSensorEntityDescription], ...] = ( + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CONTROL_SETPOINT, + friendly_name_format="Control Setpoint {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_MASTER_MEMBERID, + friendly_name_format="Thermostat Member ID {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_MEMBERID, + friendly_name_format="Boiler Member ID {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_OEM_FAULT, + friendly_name_format="Boiler OEM Fault Code {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_COOLING_CONTROL, + friendly_name_format="Cooling Control Signal {}", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CONTROL_SETPOINT_2, + friendly_name_format="Control Setpoint 2 {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_ROOM_SETPOINT_OVRD, + friendly_name_format="Room Setpoint Override {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_MAX_RELATIVE_MOD, + friendly_name_format="Boiler Maximum Relative Modulation {}", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_MAX_CAPACITY, + friendly_name_format="Boiler Maximum Capacity {}", + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.POWER, + native_unit_of_measurement=UnitOfPower.KILO_WATT, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_MIN_MOD_LEVEL, + friendly_name_format="Boiler Minimum Modulation Level {}", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_ROOM_SETPOINT, + friendly_name_format="Room Setpoint {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_REL_MOD_LEVEL, + friendly_name_format="Relative Modulation Level {}", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CH_WATER_PRESS, + friendly_name_format="Central Heating Water Pressure {}", + device_class=SensorDeviceClass.PRESSURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPressure.BAR, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_FLOW_RATE, + friendly_name_format="Hot Water Flow Rate {}", + device_class=SensorDeviceClass.VOLUME_FLOW_RATE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfVolumeFlowRate.LITERS_PER_MINUTE, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_ROOM_SETPOINT_2, + friendly_name_format="Room Setpoint 2 {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_ROOM_TEMP, + friendly_name_format="Room Temperature {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CH_WATER_TEMP, + friendly_name_format="Central Heating Water Temperature {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_TEMP, + friendly_name_format="Hot Water Temperature {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_OUTSIDE_TEMP, + friendly_name_format="Outside Temperature {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_RETURN_WATER_TEMP, + friendly_name_format="Return Water Temperature {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SOLAR_STORAGE_TEMP, + friendly_name_format="Solar Storage Temperature {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SOLAR_COLL_TEMP, + friendly_name_format="Solar Collector Temperature {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CH_WATER_TEMP_2, + friendly_name_format="Central Heating 2 Water Temperature {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_TEMP_2, + friendly_name_format="Hot Water 2 Temperature {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_EXHAUST_TEMP, + friendly_name_format="Exhaust Temperature {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_DHW_MAX_SETP, + friendly_name_format="Hot Water Maximum Setpoint {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_DHW_MIN_SETP, + friendly_name_format="Hot Water Minimum Setpoint {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_CH_MAX_SETP, + friendly_name_format="Boiler Maximum Central Heating Setpoint {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_CH_MIN_SETP, + friendly_name_format="Boiler Minimum Central Heating Setpoint {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_SETPOINT, + friendly_name_format="Hot Water Setpoint {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_MAX_CH_SETPOINT, + friendly_name_format="Maximum Central Heating Setpoint {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_OEM_DIAG, + friendly_name_format="OEM Diagnostic Code {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_TOTAL_BURNER_STARTS, + friendly_name_format="Total Burner Starts {}", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="starts", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CH_PUMP_STARTS, + friendly_name_format="Central Heating Pump Starts {}", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="starts", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_PUMP_STARTS, + friendly_name_format="Hot Water Pump Starts {}", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="starts", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_BURNER_STARTS, + friendly_name_format="Hot Water Burner Starts {}", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="starts", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_TOTAL_BURNER_HOURS, + friendly_name_format="Total Burner Hours {}", + device_class=SensorDeviceClass.DURATION, + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement=UnitOfTime.HOURS, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CH_PUMP_HOURS, + friendly_name_format="Central Heating Pump Hours {}", + device_class=SensorDeviceClass.DURATION, + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement=UnitOfTime.HOURS, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_PUMP_HOURS, + friendly_name_format="Hot Water Pump Hours {}", + device_class=SensorDeviceClass.DURATION, + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement=UnitOfTime.HOURS, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_BURNER_HOURS, + friendly_name_format="Hot Water Burner Hours {}", + device_class=SensorDeviceClass.DURATION, + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement=UnitOfTime.HOURS, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_MASTER_OT_VERSION, + friendly_name_format="Thermostat OpenTherm Version {}", + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_OT_VERSION, + friendly_name_format="Boiler OpenTherm Version {}", + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_MASTER_PRODUCT_TYPE, + friendly_name_format="Thermostat Product Type {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_MASTER_PRODUCT_VERSION, + friendly_name_format="Thermostat Product Version {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_PRODUCT_TYPE, + friendly_name_format="Boiler Product Type {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_PRODUCT_VERSION, + friendly_name_format="Boiler Product Version {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_MODE, + friendly_name_format="Gateway/Monitor Mode {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_DHW_OVRD, + friendly_name_format="Gateway Hot Water Override Mode {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_ABOUT, + friendly_name_format="Gateway Firmware Version {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_BUILD, + friendly_name_format="Gateway Firmware Build {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_CLOCKMHZ, + friendly_name_format="Gateway Clock Speed {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_LED_A, + friendly_name_format="Gateway LED A Mode {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_LED_B, + friendly_name_format="Gateway LED B Mode {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_LED_C, + friendly_name_format="Gateway LED C Mode {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_LED_D, + friendly_name_format="Gateway LED D Mode {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_LED_E, + friendly_name_format="Gateway LED E Mode {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_LED_F, + friendly_name_format="Gateway LED F Mode {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_GPIO_A, + friendly_name_format="Gateway GPIO A Mode {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_GPIO_B, + friendly_name_format="Gateway GPIO B Mode {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_SB_TEMP, + friendly_name_format="Gateway Setback Temperature {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_SETP_OVRD_MODE, + friendly_name_format="Gateway Room Setpoint Override Mode {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_SMART_PWR, + friendly_name_format="Gateway Smart Power Mode {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_THRM_DETECT, + friendly_name_format="Gateway Thermostat Detection {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_VREF, + friendly_name_format="Gateway Reference Voltage Setting {}", + ), + ), +) async def async_setup_entry( @@ -28,77 +629,37 @@ async def async_setup_entry( async_add_entities( OpenThermSensor( gw_dev, - var, source, - info[0], - info[1], - info[2], - info[3], + description, ) - for var, info in SENSOR_INFO.items() - for source in info[4] + for sources, description in SENSOR_INFO + for source in sources ) -class OpenThermSensor(SensorEntity): +class OpenThermSensor(OpenThermEntity, SensorEntity): """Representation of an OpenTherm Gateway sensor.""" - _attr_should_poll = False - _attr_entity_registry_enabled_default = False - _attr_available = False + entity_description: OpenThermSensorEntityDescription def __init__( self, - gw_dev, - var, - source, - device_class, - unit, - friendly_name_format, - suggested_display_precision, - ): + gw_dev: OpenThermGatewayDevice, + source: str, + description: OpenThermSensorEntityDescription, + ) -> None: """Initialize the OpenTherm Gateway sensor.""" self.entity_id = async_generate_entity_id( - ENTITY_ID_FORMAT, f"{var}_{source}_{gw_dev.gw_id}", hass=gw_dev.hass + ENTITY_ID_FORMAT, + f"{description.key}_{source}_{gw_dev.gw_id}", + hass=gw_dev.hass, ) - self._gateway = gw_dev - self._var = var - self._source = source - self._attr_device_class = device_class - self._attr_native_unit_of_measurement = unit - if TRANSLATE_SOURCE[source] is not None: - friendly_name_format = ( - f"{friendly_name_format} ({TRANSLATE_SOURCE[source]})" - ) - self._attr_name = friendly_name_format.format(gw_dev.name) - self._unsub_updates = None - self._attr_unique_id = f"{gw_dev.gw_id}-{source}-{var}" - if suggested_display_precision: - self._attr_suggested_display_precision = suggested_display_precision - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, gw_dev.gw_id)}, - manufacturer="Schelte Bron", - model="OpenTherm Gateway", - name=gw_dev.name, - sw_version=gw_dev.gw_version, - ) - - async def async_added_to_hass(self) -> None: - """Subscribe to updates from the component.""" - _LOGGER.debug("Added OpenTherm Gateway sensor %s", self._attr_name) - self._unsub_updates = async_dispatcher_connect( - self.hass, self._gateway.update_signal, self.receive_report - ) - - async def async_will_remove_from_hass(self) -> None: - """Unsubscribe from updates from the component.""" - _LOGGER.debug("Removing OpenTherm Gateway sensor %s", self._attr_name) - self._unsub_updates() + super().__init__(gw_dev, source, description) @callback - def receive_report(self, status): + def receive_report(self, status: dict[str, dict]) -> None: """Handle status updates from the component.""" self._attr_available = self._gateway.connected - value = status[self._source].get(self._var) + value = status[self._source].get(self.entity_description.key) self._attr_native_value = value self.async_write_ha_state() From 15976b82077f5a9153fbd3c2c94fad6f399b8c0b Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 20 Aug 2024 14:53:06 +0200 Subject: [PATCH 0932/1635] Disable expensive pskc computation in all otbr tests (#124292) * Disable expensive pskc computation in all otbr tests * Update tests and fixtures which patched compute_pskc --- tests/components/otbr/conftest.py | 23 +++++++++++++++++++---- tests/components/otbr/test_init.py | 6 ++---- 2 files changed, 21 insertions(+), 8 deletions(-) diff --git a/tests/components/otbr/conftest.py b/tests/components/otbr/conftest.py index 8344c734572..fdd5b93d6cd 100644 --- a/tests/components/otbr/conftest.py +++ b/tests/components/otbr/conftest.py @@ -20,6 +20,23 @@ from . import ( from tests.common import MockConfigEntry +@pytest.fixture(name="enable_compute_pskc") +def enable_compute_pskc_fixture() -> Any: + """Allow controlling if compute_pskc should be enabled.""" + return False + + +@pytest.fixture(name="compute_pskc", autouse=True) +def compute_pskc_fixture(enable_compute_pskc: bool) -> Any: + """Patch homeassistant.components.otbr.util.compute_pskc.""" + compute_pskc = otbr.util.compute_pskc if enable_compute_pskc else None + + with patch( + "homeassistant.components.otbr.util.compute_pskc", side_effect=compute_pskc + ) as compute_pskc_mock: + yield compute_pskc_mock + + @pytest.fixture(name="dataset") def dataset_fixture() -> Any: """Return the discovery info from the supervisor.""" @@ -76,8 +93,7 @@ async def otbr_config_entry_multipan_fixture(hass: HomeAssistant) -> None: "python_otbr_api.OTBR.get_extended_address", return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, ), - patch("homeassistant.components.otbr.util.compute_pskc"), - ): # Patch to speed up tests + ): assert await hass.config_entries.async_setup(config_entry.entry_id) @@ -103,8 +119,7 @@ async def otbr_config_entry_thread_fixture(hass: HomeAssistant) -> None: "python_otbr_api.OTBR.get_extended_address", return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, ), - patch("homeassistant.components.otbr.util.compute_pskc"), - ): # Patch to speed up tests + ): assert await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/otbr/test_init.py b/tests/components/otbr/test_init.py index 7d54dd16f56..86bab71cbda 100644 --- a/tests/components/otbr/test_init.py +++ b/tests/components/otbr/test_init.py @@ -198,6 +198,7 @@ async def test_import_share_radio_no_channel_collision( ) +@pytest.mark.parametrize("enable_compute_pskc", [True]) @pytest.mark.parametrize( "dataset", [DATASET_INSECURE_NW_KEY, DATASET_INSECURE_PASSPHRASE] ) @@ -333,8 +334,5 @@ async def test_remove_extra_entries( config_entry1.add_to_hass(hass) config_entry2.add_to_hass(hass) assert len(hass.config_entries.async_entries(otbr.DOMAIN)) == 2 - with ( - patch("homeassistant.components.otbr.util.compute_pskc"), - ): # Patch to speed up tests - assert await async_setup_component(hass, otbr.DOMAIN, {}) + assert await async_setup_component(hass, otbr.DOMAIN, {}) assert len(hass.config_entries.async_entries(otbr.DOMAIN)) == 1 From a1e3e7f24f694df0ba870223770ffacf13ade52d Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 20 Aug 2024 14:58:34 +0200 Subject: [PATCH 0933/1635] Deduplicate OTBR test fixtures (#124293) --- tests/components/otbr/conftest.py | 44 ++++++++++--------------------- 1 file changed, 14 insertions(+), 30 deletions(-) diff --git a/tests/components/otbr/conftest.py b/tests/components/otbr/conftest.py index fdd5b93d6cd..3811ff66ebb 100644 --- a/tests/components/otbr/conftest.py +++ b/tests/components/otbr/conftest.py @@ -72,7 +72,12 @@ def get_extended_address_fixture() -> Generator[AsyncMock]: @pytest.fixture(name="otbr_config_entry_multipan") -async def otbr_config_entry_multipan_fixture(hass: HomeAssistant) -> None: +async def otbr_config_entry_multipan_fixture( + hass: HomeAssistant, + get_active_dataset_tlvs: AsyncMock, + get_border_agent_id: AsyncMock, + get_extended_address: AsyncMock, +) -> None: """Mock Open Thread Border Router config entry.""" config_entry = MockConfigEntry( data=CONFIG_ENTRY_DATA_MULTIPAN, @@ -81,24 +86,16 @@ async def otbr_config_entry_multipan_fixture(hass: HomeAssistant) -> None: title="Open Thread Border Router", ) config_entry.add_to_hass(hass) - with ( - patch( - "python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET_CH16 - ), - patch( - "python_otbr_api.OTBR.get_border_agent_id", - return_value=TEST_BORDER_AGENT_ID, - ), - patch( - "python_otbr_api.OTBR.get_extended_address", - return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, - ), - ): - assert await hass.config_entries.async_setup(config_entry.entry_id) + assert await hass.config_entries.async_setup(config_entry.entry_id) @pytest.fixture(name="otbr_config_entry_thread") -async def otbr_config_entry_thread_fixture(hass: HomeAssistant) -> None: +async def otbr_config_entry_thread_fixture( + hass: HomeAssistant, + get_active_dataset_tlvs: AsyncMock, + get_border_agent_id: AsyncMock, + get_extended_address: AsyncMock, +) -> None: """Mock Open Thread Border Router config entry.""" config_entry = MockConfigEntry( data=CONFIG_ENTRY_DATA_THREAD, @@ -107,20 +104,7 @@ async def otbr_config_entry_thread_fixture(hass: HomeAssistant) -> None: title="Open Thread Border Router", ) config_entry.add_to_hass(hass) - with ( - patch( - "python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET_CH16 - ), - patch( - "python_otbr_api.OTBR.get_border_agent_id", - return_value=TEST_BORDER_AGENT_ID, - ), - patch( - "python_otbr_api.OTBR.get_extended_address", - return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, - ), - ): - assert await hass.config_entries.async_setup(config_entry.entry_id) + assert await hass.config_entries.async_setup(config_entry.entry_id) @pytest.fixture(autouse=True) From b74aced6f3a0bed8dc4020af9792d96e44441636 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Tue, 20 Aug 2024 17:02:48 +0200 Subject: [PATCH 0934/1635] Subscribe per component for MQTT discovery (#119974) * Subscribe per component for MQTT discovery * Use single assignment * Handle wildcard subscriptions first * Split subsRecription handling, update helper * Fix help_all_subscribe_calls * Fix import * Fix test * Update import order * Undo move self._last_subscribe * Recover removed test * Revert not needed changes to binary_sensor platform tests * Revert line removal * Rework interation of discovery topics * Reduce * Add comment * Move comment * Chain subscriptions --- homeassistant/components/mqtt/client.py | 20 +++++++++++++++++--- homeassistant/components/mqtt/discovery.py | 17 ++++++++++------- tests/components/mqtt/test_client.py | 6 ++++-- tests/components/mqtt/test_common.py | 19 ++++++++++++++----- tests/components/mqtt/test_discovery.py | 14 +++++--------- 5 files changed, 50 insertions(+), 26 deletions(-) diff --git a/homeassistant/components/mqtt/client.py b/homeassistant/components/mqtt/client.py index 6762f440c5a..4fa8b7db02a 100644 --- a/homeassistant/components/mqtt/client.py +++ b/homeassistant/components/mqtt/client.py @@ -111,6 +111,7 @@ UNSUBSCRIBE_COOLDOWN = 0.1 TIMEOUT_ACK = 10 RECONNECT_INTERVAL_SECONDS = 10 +MAX_WILDCARD_SUBSCRIBES_PER_CALL = 1 MAX_SUBSCRIBES_PER_CALL = 500 MAX_UNSUBSCRIBES_PER_CALL = 500 @@ -893,14 +894,27 @@ class MQTT: if not self._pending_subscriptions: return - subscriptions: dict[str, int] = self._pending_subscriptions + # Split out the wildcard subscriptions, we subscribe to them one by one + pending_subscriptions: dict[str, int] = self._pending_subscriptions + pending_wildcard_subscriptions = { + subscription.topic: pending_subscriptions.pop(subscription.topic) + for subscription in self._wildcard_subscriptions + if subscription.topic in pending_subscriptions + } + self._pending_subscriptions = {} - subscription_list = list(subscriptions.items()) debug_enabled = _LOGGER.isEnabledFor(logging.DEBUG) - for chunk in chunked_or_all(subscription_list, MAX_SUBSCRIBES_PER_CALL): + for chunk in chain( + chunked_or_all( + pending_wildcard_subscriptions.items(), MAX_WILDCARD_SUBSCRIBES_PER_CALL + ), + chunked_or_all(pending_subscriptions.items(), MAX_SUBSCRIBES_PER_CALL), + ): chunk_list = list(chunk) + if not chunk_list: + continue result, mid = self._mqttc.subscribe(chunk_list) diff --git a/homeassistant/components/mqtt/discovery.py b/homeassistant/components/mqtt/discovery.py index cf2941a3665..8e379633674 100644 --- a/homeassistant/components/mqtt/discovery.py +++ b/homeassistant/components/mqtt/discovery.py @@ -5,6 +5,7 @@ from __future__ import annotations import asyncio from collections import deque import functools +from itertools import chain import logging import re import time @@ -238,10 +239,6 @@ async def async_start( # noqa: C901 component, node_id, object_id = match.groups() - if component not in SUPPORTED_COMPONENTS: - _LOGGER.warning("Integration %s is not supported", component) - return - if payload: try: discovery_payload = MQTTDiscoveryPayload(json_loads_object(payload)) @@ -351,9 +348,15 @@ async def async_start( # noqa: C901 0, job_type=HassJobType.Callback, ) - for topic in ( - f"{discovery_topic}/+/+/config", - f"{discovery_topic}/+/+/+/config", + for topic in chain( + ( + f"{discovery_topic}/{component}/+/config" + for component in SUPPORTED_COMPONENTS + ), + ( + f"{discovery_topic}/{component}/+/+/config" + for component in SUPPORTED_COMPONENTS + ), ) ] diff --git a/tests/components/mqtt/test_client.py b/tests/components/mqtt/test_client.py index c5887016f2e..dcded7d187a 100644 --- a/tests/components/mqtt/test_client.py +++ b/tests/components/mqtt/test_client.py @@ -13,6 +13,7 @@ import pytest from homeassistant.components import mqtt from homeassistant.components.mqtt.client import RECONNECT_INTERVAL_SECONDS +from homeassistant.components.mqtt.const import SUPPORTED_COMPONENTS from homeassistant.components.mqtt.models import MessageCallbackType, ReceiveMessage from homeassistant.config_entries import ConfigEntryDisabler, ConfigEntryState from homeassistant.const import ( @@ -1614,8 +1615,9 @@ async def test_subscription_done_when_birth_message_is_sent( """Test sending birth message until initial subscription has been completed.""" mqtt_client_mock = setup_with_birth_msg_client_mock subscribe_calls = help_all_subscribe_calls(mqtt_client_mock) - assert ("homeassistant/+/+/config", 0) in subscribe_calls - assert ("homeassistant/+/+/+/config", 0) in subscribe_calls + for component in SUPPORTED_COMPONENTS: + assert (f"homeassistant/{component}/+/config", 0) in subscribe_calls + assert (f"homeassistant/{component}/+/+/config", 0) in subscribe_calls mqtt_client_mock.publish.assert_called_with( "homeassistant/status", "online", 0, False ) diff --git a/tests/components/mqtt/test_common.py b/tests/components/mqtt/test_common.py index f7ebd039d1a..c135c29ebc5 100644 --- a/tests/components/mqtt/test_common.py +++ b/tests/components/mqtt/test_common.py @@ -16,7 +16,10 @@ import yaml from homeassistant import config as module_hass_config from homeassistant.components import mqtt from homeassistant.components.mqtt import debug_info -from homeassistant.components.mqtt.const import MQTT_CONNECTION_STATE +from homeassistant.components.mqtt.const import ( + MQTT_CONNECTION_STATE, + SUPPORTED_COMPONENTS, +) from homeassistant.components.mqtt.mixins import MQTT_ATTRIBUTES_BLOCKED from homeassistant.components.mqtt.models import PublishPayloadType from homeassistant.config_entries import ConfigEntryState @@ -75,9 +78,12 @@ type _StateDataType = list[tuple[_MqttMessageType, str | None, _AttributesType | def help_all_subscribe_calls(mqtt_client_mock: MqttMockPahoClient) -> list[Any]: """Test of a call.""" all_calls = [] - for calls in mqtt_client_mock.subscribe.mock_calls: - for call in calls[1]: - all_calls.extend(call) + for call_l1 in mqtt_client_mock.subscribe.mock_calls: + if isinstance(call_l1[1][0], list): + for call_l2 in call_l1[1]: + all_calls.extend(call_l2) + else: + all_calls.append(call_l1[1]) return all_calls @@ -1178,7 +1184,10 @@ async def help_test_entity_id_update_subscriptions( state = hass.states.get(f"{domain}.test") assert state is not None - assert mqtt_mock.async_subscribe.call_count == len(topics) + 2 + DISCOVERY_COUNT + assert ( + mqtt_mock.async_subscribe.call_count + == len(topics) + 2 * len(SUPPORTED_COMPONENTS) + DISCOVERY_COUNT + ) for topic in topics: mqtt_mock.async_subscribe.assert_any_call( topic, ANY, ANY, ANY, HassJobType.Callback diff --git a/tests/components/mqtt/test_discovery.py b/tests/components/mqtt/test_discovery.py index 58de3c53c52..7f58fc75dae 100644 --- a/tests/components/mqtt/test_discovery.py +++ b/tests/components/mqtt/test_discovery.py @@ -15,6 +15,7 @@ from homeassistant.components.mqtt.abbreviations import ( ABBREVIATIONS, DEVICE_ABBREVIATIONS, ) +from homeassistant.components.mqtt.const import SUPPORTED_COMPONENTS from homeassistant.components.mqtt.discovery import ( MQTT_DISCOVERY_DONE, MQTT_DISCOVERY_NEW, @@ -73,13 +74,10 @@ async def test_subscribing_config_topic( discovery_topic = "homeassistant" await async_start(hass, discovery_topic, entry) - call_args1 = mqtt_mock.async_subscribe.mock_calls[0][1] - assert call_args1[2] == 0 - call_args2 = mqtt_mock.async_subscribe.mock_calls[1][1] - assert call_args2[2] == 0 - topics = [call_args1[0], call_args2[0]] - assert discovery_topic + "/+/+/config" in topics - assert discovery_topic + "/+/+/+/config" in topics + topics = [call[1][0] for call in mqtt_mock.async_subscribe.mock_calls] + for component in SUPPORTED_COMPONENTS: + assert f"{discovery_topic}/{component}/+/config" in topics + assert f"{discovery_topic}/{component}/+/+/config" in topics @pytest.mark.parametrize( @@ -198,8 +196,6 @@ async def test_only_valid_components( await hass.async_block_till_done() - assert f"Integration {invalid_component} is not supported" in caplog.text - assert not mock_dispatcher_send.called From d327ec904c9e6ac65b39713439c1e4ad13f62cd0 Mon Sep 17 00:00:00 2001 From: Pete Sage <76050312+PeteRager@users.noreply.github.com> Date: Tue, 20 Aug 2024 12:38:04 -0400 Subject: [PATCH 0935/1635] Add Sonos tests for media_player play Sonos Playlist and improve error handling (#124126) * initial commit * initial commit * initial commit * updates * add json fixture * use match on pytest.raises --- .../components/sonos/media_player.py | 15 +++-- homeassistant/components/sonos/strings.json | 3 + tests/components/sonos/conftest.py | 27 ++++++++- .../sonos/fixtures/sonos_playlists.json | 13 +++++ tests/components/sonos/test_media_player.py | 57 +++++++++++++++++-- 5 files changed, 102 insertions(+), 13 deletions(-) create mode 100644 tests/components/sonos/fixtures/sonos_playlists.json diff --git a/homeassistant/components/sonos/media_player.py b/homeassistant/components/sonos/media_player.py index 590761752c5..ff6981cfc9b 100644 --- a/homeassistant/components/sonos/media_player.py +++ b/homeassistant/components/sonos/media_player.py @@ -643,11 +643,16 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity): playlists = soco.get_sonos_playlists(complete_result=True) playlist = next((p for p in playlists if p.title == media_id), None) if not playlist: - _LOGGER.error('Could not find a Sonos playlist named "%s"', media_id) - else: - soco.clear_queue() - soco.add_to_queue(playlist, timeout=LONG_SERVICE_TIMEOUT) - soco.play_from_queue(0) + raise ServiceValidationError( + translation_domain=SONOS_DOMAIN, + translation_key="invalid_sonos_playlist", + translation_placeholders={ + "name": media_id, + }, + ) + soco.clear_queue() + soco.add_to_queue(playlist, timeout=LONG_SERVICE_TIMEOUT) + soco.play_from_queue(0) elif media_type in PLAYABLE_MEDIA_TYPES: item = media_browser.get_media(self.media.library, media_id, media_type) diff --git a/homeassistant/components/sonos/strings.json b/homeassistant/components/sonos/strings.json index 6521302b007..5833e5a27f2 100644 --- a/homeassistant/components/sonos/strings.json +++ b/homeassistant/components/sonos/strings.json @@ -177,6 +177,9 @@ "exceptions": { "invalid_favorite": { "message": "Could not find a Sonos favorite: {name}" + }, + "invalid_sonos_playlist": { + "message": "Could not find Sonos playlist: {name}" } } } diff --git a/tests/components/sonos/conftest.py b/tests/components/sonos/conftest.py index cf75f23c322..996fd4c6663 100644 --- a/tests/components/sonos/conftest.py +++ b/tests/components/sonos/conftest.py @@ -10,7 +10,7 @@ from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest from soco import SoCo from soco.alarms import Alarms -from soco.data_structures import DidlFavorite, SearchResult +from soco.data_structures import DidlFavorite, DidlPlaylistContainer, SearchResult from soco.events_base import Event as SonosEvent from homeassistant.components import ssdp, zeroconf @@ -184,6 +184,7 @@ class SoCoMockFactory: current_track_info_empty, battery_info, alarm_clock, + sonos_playlists: SearchResult, ) -> None: """Initialize the mock factory.""" self.mock_list: dict[str, MockSoCo] = {} @@ -192,6 +193,7 @@ class SoCoMockFactory: self.current_track_info = current_track_info_empty self.battery_info = battery_info self.alarm_clock = alarm_clock + self.sonos_playlists = sonos_playlists def cache_mock( self, mock_soco: MockSoCo, ip_address: str, name: str = "Zone A" @@ -204,6 +206,7 @@ class SoCoMockFactory: mock_soco.music_library = self.music_library mock_soco.get_current_track_info.return_value = self.current_track_info mock_soco.music_source_from_uri = SoCo.music_source_from_uri + mock_soco.get_sonos_playlists.return_value = self.sonos_playlists my_speaker_info = self.speaker_info.copy() my_speaker_info["zone_name"] = name my_speaker_info["uid"] = mock_soco.uid @@ -254,11 +257,21 @@ def soco_sharelink(): @pytest.fixture(name="soco_factory") def soco_factory( - music_library, speaker_info, current_track_info_empty, battery_info, alarm_clock + music_library, + speaker_info, + current_track_info_empty, + battery_info, + alarm_clock, + sonos_playlists: SearchResult, ): """Create factory for instantiating SoCo mocks.""" factory = SoCoMockFactory( - music_library, speaker_info, current_track_info_empty, battery_info, alarm_clock + music_library, + speaker_info, + current_track_info_empty, + battery_info, + alarm_clock, + sonos_playlists, ) with ( patch("homeassistant.components.sonos.SoCo", new=factory.get_mock), @@ -335,6 +348,14 @@ def sonos_favorites_fixture() -> SearchResult: return SearchResult(favorite_list, "favorites", 3, 3, 1) +@pytest.fixture(name="sonos_playlists") +def sonos_playlists_fixture() -> SearchResult: + """Create sonos playlist fixture.""" + playlists = load_json_value_fixture("sonos_playlists.json", "sonos") + playlists_list = [DidlPlaylistContainer.from_dict(pl) for pl in playlists] + return SearchResult(playlists_list, "sonos_playlists", 1, 1, 0) + + class MockMusicServiceItem: """Mocks a Soco MusicServiceItem.""" diff --git a/tests/components/sonos/fixtures/sonos_playlists.json b/tests/components/sonos/fixtures/sonos_playlists.json new file mode 100644 index 00000000000..f0731467697 --- /dev/null +++ b/tests/components/sonos/fixtures/sonos_playlists.json @@ -0,0 +1,13 @@ +[ + { + "title": "sample playlist", + "parent_id": "SQ:", + "item_id": "SQ:0", + "resources": [ + { + "uri": "file:///jffs/settings/savedqueues.rsq#0", + "protocol_info": "file:*:audio/mpegurl:*" + } + ] + } +] diff --git a/tests/components/sonos/test_media_player.py b/tests/components/sonos/test_media_player.py index d96ceb900e7..b29ae1b6cfb 100644 --- a/tests/components/sonos/test_media_player.py +++ b/tests/components/sonos/test_media_player.py @@ -1,10 +1,10 @@ """Tests for the Sonos Media Player platform.""" -import logging from typing import Any from unittest.mock import patch import pytest +from soco.data_structures import SearchResult from syrupy import SnapshotAssertion from homeassistant.components.media_player import ( @@ -535,8 +535,10 @@ async def test_play_media_music_library_playlist_dne( soco_mock = soco_factory.mock_list.get("192.168.42.2") soco_mock.music_library.get_playlists.return_value = _mock_playlists - with caplog.at_level(logging.ERROR): - caplog.clear() + with pytest.raises( + ServiceValidationError, + match=f"Could not find Sonos playlist: {media_content_id}", + ): await hass.services.async_call( MP_DOMAIN, SERVICE_PLAY_MEDIA, @@ -548,8 +550,53 @@ async def test_play_media_music_library_playlist_dne( blocking=True, ) assert soco_mock.play_uri.call_count == 0 - assert media_content_id in caplog.text - assert "playlist" in caplog.text + + +async def test_play_sonos_playlist( + hass: HomeAssistant, + async_autosetup_sonos, + soco: MockSoCo, + sonos_playlists: SearchResult, +) -> None: + """Test that sonos playlists can be played.""" + + # Test a successful call + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "playlist", + ATTR_MEDIA_CONTENT_ID: "sample playlist", + }, + blocking=True, + ) + assert soco.clear_queue.call_count == 1 + assert soco.add_to_queue.call_count == 1 + soco.add_to_queue.asset_called_with( + sonos_playlists[0], timeout=LONG_SERVICE_TIMEOUT + ) + + # Test playing a non-existent playlist + soco.clear_queue.reset_mock() + soco.add_to_queue.reset_mock() + media_content_id: str = "bad playlist" + with pytest.raises( + ServiceValidationError, + match=f"Could not find Sonos playlist: {media_content_id}", + ): + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "playlist", + ATTR_MEDIA_CONTENT_ID: media_content_id, + }, + blocking=True, + ) + assert soco.clear_queue.call_count == 0 + assert soco.add_to_queue.call_count == 0 @pytest.mark.parametrize( From ce82c79ff982eda3ba4982f442ab0563043ecaec Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 20 Aug 2024 11:38:29 -0500 Subject: [PATCH 0936/1635] Bump yalexs to 8.1.2 (#124303) --- homeassistant/components/august/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/august/manifest.json b/homeassistant/components/august/manifest.json index 13035d68dfe..5d7b253e952 100644 --- a/homeassistant/components/august/manifest.json +++ b/homeassistant/components/august/manifest.json @@ -28,5 +28,5 @@ "documentation": "https://www.home-assistant.io/integrations/august", "iot_class": "cloud_push", "loggers": ["pubnub", "yalexs"], - "requirements": ["yalexs==8.0.2", "yalexs-ble==2.4.3"] + "requirements": ["yalexs==8.1.2", "yalexs-ble==2.4.3"] } diff --git a/requirements_all.txt b/requirements_all.txt index ce1db53c844..e96a7d355ab 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2965,7 +2965,7 @@ yalesmartalarmclient==0.4.0 yalexs-ble==2.4.3 # homeassistant.components.august -yalexs==8.0.2 +yalexs==8.1.2 # homeassistant.components.yeelight yeelight==0.7.14 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index ea248a662bd..87007d2ba05 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2345,7 +2345,7 @@ yalesmartalarmclient==0.4.0 yalexs-ble==2.4.3 # homeassistant.components.august -yalexs==8.0.2 +yalexs==8.1.2 # homeassistant.components.yeelight yeelight==0.7.14 From d248bf596ac7e3ad24ef39a46da3cf0f1669eba3 Mon Sep 17 00:00:00 2001 From: karwosts <32912880+karwosts@users.noreply.github.com> Date: Tue, 20 Aug 2024 09:45:16 -0700 Subject: [PATCH 0937/1635] Add missing strings for riemann options flow (#124317) --- homeassistant/components/integration/strings.json | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/integration/strings.json b/homeassistant/components/integration/strings.json index 55d4df1b45e..6186521aa1b 100644 --- a/homeassistant/components/integration/strings.json +++ b/homeassistant/components/integration/strings.json @@ -31,12 +31,14 @@ "round": "[%key:component::integration::config::step::user::data::round%]", "source": "[%key:component::integration::config::step::user::data::source%]", "unit_prefix": "[%key:component::integration::config::step::user::data::unit_prefix%]", - "unit_time": "[%key:component::integration::config::step::user::data::unit_time%]" + "unit_time": "[%key:component::integration::config::step::user::data::unit_time%]", + "max_sub_interval": "[%key:component::integration::config::step::user::data::max_sub_interval%]" }, "data_description": { "round": "[%key:component::integration::config::step::user::data_description::round%]", "unit_prefix": "[%key:component::integration::config::step::user::data_description::unit_prefix%]", - "unit_time": "[%key:component::integration::config::step::user::data_description::unit_time%]" + "unit_time": "[%key:component::integration::config::step::user::data_description::unit_time%]", + "max_sub_interval": "[%key:component::integration::config::step::user::data_description::max_sub_interval%]" } } } From 1d1a6ee52fb1937c6e38f197171f975b7a78b783 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 20 Aug 2024 12:50:37 -0500 Subject: [PATCH 0938/1635] Bump habluetooth to 3.3.2 (#124321) --- homeassistant/components/bluetooth/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/bluetooth/manifest.json b/homeassistant/components/bluetooth/manifest.json index 657209cdba0..0dbe1475b8d 100644 --- a/homeassistant/components/bluetooth/manifest.json +++ b/homeassistant/components/bluetooth/manifest.json @@ -20,6 +20,6 @@ "bluetooth-auto-recovery==1.4.2", "bluetooth-data-tools==1.19.4", "dbus-fast==2.22.1", - "habluetooth==3.1.3" + "habluetooth==3.3.2" ] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 170d453badd..58bf203c2de 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -27,7 +27,7 @@ dbus-fast==2.22.1 fnv-hash-fast==0.5.0 ha-av==10.1.1 ha-ffmpeg==3.2.0 -habluetooth==3.1.3 +habluetooth==3.3.2 hass-nabucasa==0.81.1 hassil==1.7.4 home-assistant-bluetooth==1.12.2 diff --git a/requirements_all.txt b/requirements_all.txt index e96a7d355ab..6cb3438675c 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1059,7 +1059,7 @@ ha-philipsjs==3.2.2 habitipy==0.3.1 # homeassistant.components.bluetooth -habluetooth==3.1.3 +habluetooth==3.3.2 # homeassistant.components.cloud hass-nabucasa==0.81.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 87007d2ba05..38b6f91a6f7 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -894,7 +894,7 @@ ha-philipsjs==3.2.2 habitipy==0.3.1 # homeassistant.components.bluetooth -habluetooth==3.1.3 +habluetooth==3.3.2 # homeassistant.components.cloud hass-nabucasa==0.81.1 From f33328308c445747932e854e39647551e0537c88 Mon Sep 17 00:00:00 2001 From: Markus Jacobsen Date: Wed, 21 Aug 2024 11:31:19 +0200 Subject: [PATCH 0939/1635] Improve Bang & Olufsen notification type comparison (#123067) * Change notification comparison * Use try_parse_enum to determine notification type --- .../components/bang_olufsen/websocket.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/bang_olufsen/websocket.py b/homeassistant/components/bang_olufsen/websocket.py index 7415d0f362b..dc505ddf3c4 100644 --- a/homeassistant/components/bang_olufsen/websocket.py +++ b/homeassistant/components/bang_olufsen/websocket.py @@ -20,6 +20,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from homeassistant.helpers.dispatcher import async_dispatcher_send +from homeassistant.util.enum import try_parse_enum from .const import ( BANG_OLUFSEN_WEBSOCKET_EVENT, @@ -92,12 +93,14 @@ class BangOlufsenWebsocket(BangOlufsenBase): self, notification: WebsocketNotificationTag ) -> None: """Send notification dispatch.""" - if notification.value: - if WebsocketNotification.REMOTE_MENU_CHANGED in notification.value: - async_dispatcher_send( - self.hass, - f"{self._unique_id}_{WebsocketNotification.REMOTE_MENU_CHANGED}", - ) + # Try to match the notification type with available WebsocketNotification members + notification_type = try_parse_enum(WebsocketNotification, notification.value) + + if notification_type is WebsocketNotification.REMOTE_MENU_CHANGED: + async_dispatcher_send( + self.hass, + f"{self._unique_id}_{WebsocketNotification.REMOTE_MENU_CHANGED}", + ) def on_playback_error_notification(self, notification: PlaybackError) -> None: """Send playback_error dispatch.""" From c276cfc37192f5f1e3ce1cc06b512d8b6f65c6ae Mon Sep 17 00:00:00 2001 From: Andre Lengwenus Date: Wed, 21 Aug 2024 11:33:47 +0200 Subject: [PATCH 0940/1635] Add custom panel for LCN configuration (#108664) * Add LCN panel using lcn-frontend module * Move panel from sidebar to integration configuration * Change OptionFlow to reconfigure step * Change OptionFlow to reconfigure step * Remove deprecation warning * Fix docstring * Add tests for lcn websockets * Remove deepcopy * Bump lcn-frontend to 0.1.3 * Add tests for lcn websockets * Remove websocket command lcn/hosts * Websocket scan tests cover modules not stored in config_entry * Add comment to mock of hass.http * Add a decorater to ensure the config_entry exists and return it * Use entry_id instead of host_id * Bump lcn-frontend to 0.1.5 * Use auto_id for websocket client send_json * Create issues on yaml import errors * Remove low level key deprecation warnings * Method renaming * Change issue id in issue creation * Update tests for issue creation --- homeassistant/components/lcn/__init__.py | 5 + homeassistant/components/lcn/binary_sensor.py | 11 +- homeassistant/components/lcn/climate.py | 5 + homeassistant/components/lcn/config_flow.py | 176 +++++-- homeassistant/components/lcn/const.py | 1 + homeassistant/components/lcn/cover.py | 11 +- homeassistant/components/lcn/helpers.py | 10 + homeassistant/components/lcn/light.py | 5 + homeassistant/components/lcn/manifest.json | 4 +- homeassistant/components/lcn/scene.py | 5 + homeassistant/components/lcn/schemas.py | 47 +- homeassistant/components/lcn/sensor.py | 5 + homeassistant/components/lcn/strings.json | 52 ++ homeassistant/components/lcn/switch.py | 11 +- homeassistant/components/lcn/websocket.py | 450 ++++++++++++++++++ homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 2 +- requirements_all.txt | 3 + requirements_test_all.txt | 3 + tests/components/lcn/conftest.py | 11 +- .../lcn/fixtures/config_entry_pchk.json | 4 +- tests/components/lcn/test_config_flow.py | 181 ++++++- tests/components/lcn/test_init.py | 3 +- tests/components/lcn/test_websocket.py | 303 ++++++++++++ 24 files changed, 1234 insertions(+), 75 deletions(-) create mode 100644 homeassistant/components/lcn/websocket.py create mode 100644 tests/components/lcn/test_websocket.py diff --git a/homeassistant/components/lcn/__init__.py b/homeassistant/components/lcn/__init__.py index 6866a10d55e..75f417cb3a5 100644 --- a/homeassistant/components/lcn/__init__.py +++ b/homeassistant/components/lcn/__init__.py @@ -27,6 +27,7 @@ from homeassistant.helpers.entity import Entity from homeassistant.helpers.typing import ConfigType from .const import ( + ADD_ENTITIES_CALLBACKS, CONF_DIM_MODE, CONF_DOMAIN_DATA, CONF_SK_NUM_TRIES, @@ -47,6 +48,7 @@ from .helpers import ( ) from .schemas import CONFIG_SCHEMA # noqa: F401 from .services import SERVICES +from .websocket import register_panel_and_ws_api _LOGGER = logging.getLogger(__name__) @@ -115,6 +117,7 @@ async def async_setup_entry( _LOGGER.debug('LCN connected to "%s"', config_entry.title) hass.data[DOMAIN][config_entry.entry_id] = { CONNECTION: lcn_connection, + ADD_ENTITIES_CALLBACKS: {}, } # Update config_entry with LCN device serials await async_update_config_entry(hass, config_entry) @@ -140,6 +143,8 @@ async def async_setup_entry( DOMAIN, service_name, service(hass).async_call_service, service.schema ) + await register_panel_and_ws_api(hass) + return True diff --git a/homeassistant/components/lcn/binary_sensor.py b/homeassistant/components/lcn/binary_sensor.py index 35836e4653e..d7e5798bb91 100644 --- a/homeassistant/components/lcn/binary_sensor.py +++ b/homeassistant/components/lcn/binary_sensor.py @@ -15,7 +15,13 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType from . import LcnEntity -from .const import BINSENSOR_PORTS, CONF_DOMAIN_DATA, SETPOINTS +from .const import ( + ADD_ENTITIES_CALLBACKS, + BINSENSOR_PORTS, + CONF_DOMAIN_DATA, + DOMAIN, + SETPOINTS, +) from .helpers import DeviceConnectionType, InputType, get_device_connection @@ -43,6 +49,9 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up LCN switch entities from a config entry.""" + hass.data[DOMAIN][config_entry.entry_id][ADD_ENTITIES_CALLBACKS].update( + {DOMAIN_BINARY_SENSOR: (async_add_entities, create_lcn_binary_sensor_entity)} + ) async_add_entities( create_lcn_binary_sensor_entity(hass, entity_config, config_entry) diff --git a/homeassistant/components/lcn/climate.py b/homeassistant/components/lcn/climate.py index c03061618f7..d34a872d867 100644 --- a/homeassistant/components/lcn/climate.py +++ b/homeassistant/components/lcn/climate.py @@ -28,11 +28,13 @@ from homeassistant.helpers.typing import ConfigType from . import LcnEntity from .const import ( + ADD_ENTITIES_CALLBACKS, CONF_DOMAIN_DATA, CONF_LOCKABLE, CONF_MAX_TEMP, CONF_MIN_TEMP, CONF_SETPOINT, + DOMAIN, ) from .helpers import DeviceConnectionType, InputType, get_device_connection @@ -56,6 +58,9 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up LCN switch entities from a config entry.""" + hass.data[DOMAIN][config_entry.entry_id][ADD_ENTITIES_CALLBACKS].update( + {DOMAIN_CLIMATE: (async_add_entities, create_lcn_climate_entity)} + ) async_add_entities( create_lcn_climate_entity(hass, entity_config, config_entry) diff --git a/homeassistant/components/lcn/config_flow.py b/homeassistant/components/lcn/config_flow.py index d05eb896f27..664f32e5585 100644 --- a/homeassistant/components/lcn/config_flow.py +++ b/homeassistant/components/lcn/config_flow.py @@ -3,32 +3,50 @@ from __future__ import annotations import logging +from typing import Any import pypck +import voluptuous as vol -from homeassistant.config_entries import ( - SOURCE_IMPORT, - ConfigEntry, - ConfigFlow, - ConfigFlowResult, -) +from homeassistant import config_entries from homeassistant.const import ( + CONF_BASE, + CONF_DEVICES, + CONF_ENTITIES, CONF_HOST, CONF_IP_ADDRESS, CONF_PASSWORD, CONF_PORT, CONF_USERNAME, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType -from .const import CONF_DIM_MODE, CONF_SK_NUM_TRIES, DOMAIN +from .const import CONF_DIM_MODE, CONF_SK_NUM_TRIES, DIM_MODES, DOMAIN from .helpers import purge_device_registry, purge_entity_registry _LOGGER = logging.getLogger(__name__) +CONFIG_DATA = { + vol.Required(CONF_IP_ADDRESS, default=""): str, + vol.Required(CONF_PORT, default=4114): cv.positive_int, + vol.Required(CONF_USERNAME, default=""): str, + vol.Required(CONF_PASSWORD, default=""): str, + vol.Required(CONF_SK_NUM_TRIES, default=0): cv.positive_int, + vol.Required(CONF_DIM_MODE, default="STEPS200"): vol.In(DIM_MODES), +} -def get_config_entry(hass: HomeAssistant, data: ConfigType) -> ConfigEntry | None: +USER_DATA = {vol.Required(CONF_HOST, default="pchk"): str, **CONFIG_DATA} + +CONFIG_SCHEMA = vol.Schema(CONFIG_DATA) +USER_SCHEMA = vol.Schema(USER_DATA) + + +def get_config_entry( + hass: HomeAssistant, data: ConfigType +) -> config_entries.ConfigEntry | None: """Check config entries for already configured entries based on the ip address/port.""" return next( ( @@ -41,8 +59,10 @@ def get_config_entry(hass: HomeAssistant, data: ConfigType) -> ConfigEntry | Non ) -async def validate_connection(host_name: str, data: ConfigType) -> ConfigType: +async def validate_connection(data: ConfigType) -> str | None: """Validate if a connection to LCN can be established.""" + error = None + host_name = data[CONF_HOST] host = data[CONF_IP_ADDRESS] port = data[CONF_PORT] username = data[CONF_USERNAME] @@ -61,43 +81,71 @@ async def validate_connection(host_name: str, data: ConfigType) -> ConfigType: host, port, username, password, settings=settings ) - await connection.async_connect(timeout=5) + try: + await connection.async_connect(timeout=5) + _LOGGER.debug("LCN connection validated") + except pypck.connection.PchkAuthenticationError: + _LOGGER.warning('Authentication on PCHK "%s" failed', host_name) + error = "authentication_error" + except pypck.connection.PchkLicenseError: + _LOGGER.warning( + 'Maximum number of connections on PCHK "%s" was ' + "reached. An additional license key is required", + host_name, + ) + error = "license_error" + except (TimeoutError, ConnectionRefusedError): + _LOGGER.warning('Connection to PCHK "%s" failed', host_name) + error = "connection_refused" - _LOGGER.debug("LCN connection validated") await connection.async_close() - return data + return error -class LcnFlowHandler(ConfigFlow, domain=DOMAIN): +class LcnFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): """Handle a LCN config flow.""" VERSION = 1 - async def async_step_import(self, data: ConfigType) -> ConfigFlowResult: + async def async_step_import( + self, data: ConfigType + ) -> config_entries.ConfigFlowResult: """Import existing configuration from LCN.""" - host_name = data[CONF_HOST] # validate the imported connection parameters - try: - await validate_connection(host_name, data) - except pypck.connection.PchkAuthenticationError: - _LOGGER.warning('Authentication on PCHK "%s" failed', host_name) - return self.async_abort(reason="authentication_error") - except pypck.connection.PchkLicenseError: - _LOGGER.warning( - ( - 'Maximum number of connections on PCHK "%s" was ' - "reached. An additional license key is required" - ), - host_name, + if error := await validate_connection(data): + async_create_issue( + self.hass, + DOMAIN, + error, + is_fixable=False, + issue_domain=DOMAIN, + severity=IssueSeverity.ERROR, + translation_key=error, + translation_placeholders={ + "url": "/config/integrations/dashboard/add?domain=lcn" + }, ) - return self.async_abort(reason="license_error") - except TimeoutError: - _LOGGER.warning('Connection to PCHK "%s" failed', host_name) - return self.async_abort(reason="connection_timeout") + return self.async_abort(reason=error) + + async_create_issue( + self.hass, + HOMEASSISTANT_DOMAIN, + f"deprecated_yaml_{DOMAIN}", + breaks_in_ha_version="2024.12.0", + is_fixable=False, + is_persistent=False, + issue_domain=DOMAIN, + severity=IssueSeverity.WARNING, + translation_key="deprecated_yaml", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "LCN", + }, + ) # check if we already have a host with the same address configured if entry := get_config_entry(self.hass, data): - entry.source = SOURCE_IMPORT + entry.source = config_entries.SOURCE_IMPORT # Cleanup entity and device registry, if we imported from configuration.yaml to # remove orphans when entities were removed from configuration purge_entity_registry(self.hass, entry.entry_id, data) @@ -106,4 +154,64 @@ class LcnFlowHandler(ConfigFlow, domain=DOMAIN): self.hass.config_entries.async_update_entry(entry, data=data) return self.async_abort(reason="existing_configuration_updated") - return self.async_create_entry(title=f"{host_name}", data=data) + return self.async_create_entry(title=f"{data[CONF_HOST]}", data=data) + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> config_entries.ConfigFlowResult: + """Handle a flow initiated by the user.""" + if user_input is None: + return self.async_show_form(step_id="user", data_schema=USER_SCHEMA) + + errors = None + if get_config_entry(self.hass, user_input): + errors = {CONF_BASE: "already_configured"} + elif (error := await validate_connection(user_input)) is not None: + errors = {CONF_BASE: error} + + if errors is not None: + return self.async_show_form( + step_id="user", + data_schema=self.add_suggested_values_to_schema( + USER_SCHEMA, user_input + ), + errors=errors, + ) + + data: dict = { + **user_input, + CONF_DEVICES: [], + CONF_ENTITIES: [], + } + + return self.async_create_entry(title=data[CONF_HOST], data=data) + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> config_entries.ConfigFlowResult: + """Reconfigure LCN configuration.""" + errors = None + entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) + assert entry + + if user_input is not None: + user_input[CONF_HOST] = entry.data[CONF_HOST] + + await self.hass.config_entries.async_unload(entry.entry_id) + if (error := await validate_connection(user_input)) is not None: + errors = {CONF_BASE: error} + + if errors is None: + data = entry.data.copy() + data.update(user_input) + self.hass.config_entries.async_update_entry(entry, data=data) + await self.hass.config_entries.async_setup(entry.entry_id) + return self.async_abort(reason="reconfigure_successful") + + await self.hass.config_entries.async_setup(entry.entry_id) + + return self.async_show_form( + step_id="reconfigure", + data_schema=self.add_suggested_values_to_schema(CONFIG_SCHEMA, entry.data), + errors=errors or {}, + ) diff --git a/homeassistant/components/lcn/const.py b/homeassistant/components/lcn/const.py index bcf9ecdf295..24d2e68495c 100644 --- a/homeassistant/components/lcn/const.py +++ b/homeassistant/components/lcn/const.py @@ -18,6 +18,7 @@ DOMAIN = "lcn" DATA_LCN = "lcn" DEFAULT_NAME = "pchk" +ADD_ENTITIES_CALLBACKS = "add_entities_callbacks" CONNECTION = "connection" CONF_HARDWARE_SERIAL = "hardware_serial" CONF_SOFTWARE_SERIAL = "software_serial" diff --git a/homeassistant/components/lcn/cover.py b/homeassistant/components/lcn/cover.py index edc60a202a1..a2f508cee97 100644 --- a/homeassistant/components/lcn/cover.py +++ b/homeassistant/components/lcn/cover.py @@ -14,7 +14,13 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType from . import LcnEntity -from .const import CONF_DOMAIN_DATA, CONF_MOTOR, CONF_REVERSE_TIME +from .const import ( + ADD_ENTITIES_CALLBACKS, + CONF_DOMAIN_DATA, + CONF_MOTOR, + CONF_REVERSE_TIME, + DOMAIN, +) from .helpers import DeviceConnectionType, InputType, get_device_connection PARALLEL_UPDATES = 0 @@ -40,6 +46,9 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up LCN cover entities from a config entry.""" + hass.data[DOMAIN][config_entry.entry_id][ADD_ENTITIES_CALLBACKS].update( + {DOMAIN_COVER: (async_add_entities, create_lcn_cover_entity)} + ) async_add_entities( create_lcn_cover_entity(hass, entity_config, config_entry) diff --git a/homeassistant/components/lcn/helpers.py b/homeassistant/components/lcn/helpers.py index d46628fc6da..fd8c59ad46f 100644 --- a/homeassistant/components/lcn/helpers.py +++ b/homeassistant/components/lcn/helpers.py @@ -423,6 +423,16 @@ async def async_update_config_entry( hass.config_entries.async_update_entry(config_entry, data=new_data) +def get_device_config( + address: AddressType, config_entry: ConfigEntry +) -> ConfigType | None: + """Return the device configuration for given address and ConfigEntry.""" + for device_config in config_entry.data[CONF_DEVICES]: + if tuple(device_config[CONF_ADDRESS]) == address: + return cast(ConfigType, device_config) + return None + + def has_unique_host_names(hosts: list[ConfigType]) -> list[ConfigType]: """Validate that all connection names are unique. diff --git a/homeassistant/components/lcn/light.py b/homeassistant/components/lcn/light.py index 584161a0829..b896462c8a1 100644 --- a/homeassistant/components/lcn/light.py +++ b/homeassistant/components/lcn/light.py @@ -22,10 +22,12 @@ from homeassistant.helpers.typing import ConfigType from . import LcnEntity from .const import ( + ADD_ENTITIES_CALLBACKS, CONF_DIMMABLE, CONF_DOMAIN_DATA, CONF_OUTPUT, CONF_TRANSITION, + DOMAIN, OUTPUT_PORTS, ) from .helpers import DeviceConnectionType, InputType, get_device_connection @@ -53,6 +55,9 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up LCN light entities from a config entry.""" + hass.data[DOMAIN][config_entry.entry_id][ADD_ENTITIES_CALLBACKS].update( + {DOMAIN_LIGHT: (async_add_entities, create_lcn_light_entity)} + ) async_add_entities( create_lcn_light_entity(hass, entity_config, config_entry) diff --git a/homeassistant/components/lcn/manifest.json b/homeassistant/components/lcn/manifest.json index e9717774e17..44aae34e9e6 100644 --- a/homeassistant/components/lcn/manifest.json +++ b/homeassistant/components/lcn/manifest.json @@ -2,9 +2,9 @@ "domain": "lcn", "name": "LCN", "codeowners": ["@alengwenus"], - "config_flow": false, + "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/lcn", "iot_class": "local_push", "loggers": ["pypck"], - "requirements": ["pypck==0.7.21"] + "requirements": ["pypck==0.7.21", "lcn-frontend==0.1.5"] } diff --git a/homeassistant/components/lcn/scene.py b/homeassistant/components/lcn/scene.py index 7e476987c53..f9220f676d6 100644 --- a/homeassistant/components/lcn/scene.py +++ b/homeassistant/components/lcn/scene.py @@ -15,10 +15,12 @@ from homeassistant.helpers.typing import ConfigType from . import LcnEntity from .const import ( + ADD_ENTITIES_CALLBACKS, CONF_DOMAIN_DATA, CONF_OUTPUTS, CONF_REGISTER, CONF_TRANSITION, + DOMAIN, OUTPUT_PORTS, ) from .helpers import DeviceConnectionType, get_device_connection @@ -43,6 +45,9 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up LCN switch entities from a config entry.""" + hass.data[DOMAIN][config_entry.entry_id][ADD_ENTITIES_CALLBACKS].update( + {DOMAIN_SCENE: (async_add_entities, create_lcn_scene_entity)} + ) async_add_entities( create_lcn_scene_entity(hass, entity_config, config_entry) diff --git a/homeassistant/components/lcn/schemas.py b/homeassistant/components/lcn/schemas.py index 9927ea5908d..0539e83dea8 100644 --- a/homeassistant/components/lcn/schemas.py +++ b/homeassistant/components/lcn/schemas.py @@ -58,6 +58,8 @@ from .const import ( ) from .helpers import has_unique_host_names, is_address +ADDRESS_SCHEMA = vol.Coerce(tuple) + # # Domain data # @@ -169,23 +171,32 @@ CONNECTION_SCHEMA = vol.Schema( ) CONFIG_SCHEMA = vol.Schema( - { - DOMAIN: vol.Schema( - { - vol.Required(CONF_CONNECTIONS): vol.All( - cv.ensure_list, has_unique_host_names, [CONNECTION_SCHEMA] - ), - vol.Optional(CONF_BINARY_SENSORS): vol.All( - cv.ensure_list, [BINARY_SENSORS_SCHEMA] - ), - vol.Optional(CONF_CLIMATES): vol.All(cv.ensure_list, [CLIMATES_SCHEMA]), - vol.Optional(CONF_COVERS): vol.All(cv.ensure_list, [COVERS_SCHEMA]), - vol.Optional(CONF_LIGHTS): vol.All(cv.ensure_list, [LIGHTS_SCHEMA]), - vol.Optional(CONF_SCENES): vol.All(cv.ensure_list, [SCENES_SCHEMA]), - vol.Optional(CONF_SENSORS): vol.All(cv.ensure_list, [SENSORS_SCHEMA]), - vol.Optional(CONF_SWITCHES): vol.All(cv.ensure_list, [SWITCHES_SCHEMA]), - } - ) - }, + vol.All( + cv.deprecated(DOMAIN), + { + DOMAIN: vol.Schema( + { + vol.Required(CONF_CONNECTIONS): vol.All( + cv.ensure_list, has_unique_host_names, [CONNECTION_SCHEMA] + ), + vol.Optional(CONF_BINARY_SENSORS): vol.All( + cv.ensure_list, [BINARY_SENSORS_SCHEMA] + ), + vol.Optional(CONF_CLIMATES): vol.All( + cv.ensure_list, [CLIMATES_SCHEMA] + ), + vol.Optional(CONF_COVERS): vol.All(cv.ensure_list, [COVERS_SCHEMA]), + vol.Optional(CONF_LIGHTS): vol.All(cv.ensure_list, [LIGHTS_SCHEMA]), + vol.Optional(CONF_SCENES): vol.All(cv.ensure_list, [SCENES_SCHEMA]), + vol.Optional(CONF_SENSORS): vol.All( + cv.ensure_list, [SENSORS_SCHEMA] + ), + vol.Optional(CONF_SWITCHES): vol.All( + cv.ensure_list, [SWITCHES_SCHEMA] + ), + }, + ) + }, + ), extra=vol.ALLOW_EXTRA, ) diff --git a/homeassistant/components/lcn/sensor.py b/homeassistant/components/lcn/sensor.py index 32b97ab8317..b63ddbef8ad 100644 --- a/homeassistant/components/lcn/sensor.py +++ b/homeassistant/components/lcn/sensor.py @@ -22,7 +22,9 @@ from homeassistant.helpers.typing import ConfigType from . import LcnEntity from .const import ( + ADD_ENTITIES_CALLBACKS, CONF_DOMAIN_DATA, + DOMAIN, LED_PORTS, S0_INPUTS, SETPOINTS, @@ -56,6 +58,9 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up LCN switch entities from a config entry.""" + hass.data[DOMAIN][config_entry.entry_id][ADD_ENTITIES_CALLBACKS].update( + {DOMAIN_SENSOR: (async_add_entities, create_lcn_sensor_entity)} + ) async_add_entities( create_lcn_sensor_entity(hass, entity_config, config_entry) diff --git a/homeassistant/components/lcn/strings.json b/homeassistant/components/lcn/strings.json index 3bab17cbbcd..a5f303c6392 100644 --- a/homeassistant/components/lcn/strings.json +++ b/homeassistant/components/lcn/strings.json @@ -14,6 +14,58 @@ "level": "Level" } }, + "config": { + "step": { + "user": { + "title": "Setup LCN host", + "description": "Set up new connection to LCN host.", + "data": { + "host": "[%key:common::config_flow::data::name%]", + "ip_address": "[%key:common::config_flow::data::ip%]", + "port": "[%key:common::config_flow::data::port%]", + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]", + "sk_num_tries": "Segment coupler scan attempts", + "dim_mode": "Dimming mode" + } + }, + "reconfigure": { + "title": "Reconfigure LCN host", + "description": "Reconfigure connection to LCN host.", + "data": { + "ip_address": "[%key:common::config_flow::data::ip%]", + "port": "[%key:common::config_flow::data::port%]", + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]", + "sk_num_tries": "Segment coupler scan attempts", + "dim_mode": "Dimming mode" + } + } + }, + "error": { + "authentication_error": "Authentication failed. Wrong username or password.", + "license_error": "Maximum number of connections was reached. An additional licence key is required.", + "connection_refused": "Unable to connect to PCHK. Check IP and port.", + "already_configured": "PCHK connection using the same ip address/port is already configured." + }, + "abort": { + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" + } + }, + "issues": { + "authentication_error": { + "title": "Authentication failed.", + "description": "Configuring LCN using YAML is being removed but there was an error importing your YAML configuration.\n\nEnsure username and password are correct.\n\nConsider removing the LCN YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." + }, + "license_error": { + "title": "Maximum number of connections was reached.", + "description": "Configuring LCN using YAML is being removed but there was an error importing your YAML configuration.\n\nEnsure sufficient PCHK licenses are registered and restart Home Assistant.\n\nConsider removing the LCN YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." + }, + "connection_refused": { + "title": "Unable to connect to PCHK.", + "description": "Configuring LCN using YAML is being removed but there was an error importing your YAML configuration.\n\nEnsure the connection (IP and port) to the LCN bus coupler is correct.\n\nConsider removing the LCN YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." + } + }, "services": { "output_abs": { "name": "Output absolute brightness", diff --git a/homeassistant/components/lcn/switch.py b/homeassistant/components/lcn/switch.py index b82394ced0d..1136e4c27a1 100644 --- a/homeassistant/components/lcn/switch.py +++ b/homeassistant/components/lcn/switch.py @@ -14,7 +14,13 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType from . import LcnEntity -from .const import CONF_DOMAIN_DATA, CONF_OUTPUT, OUTPUT_PORTS +from .const import ( + ADD_ENTITIES_CALLBACKS, + CONF_DOMAIN_DATA, + CONF_OUTPUT, + DOMAIN, + OUTPUT_PORTS, +) from .helpers import DeviceConnectionType, InputType, get_device_connection PARALLEL_UPDATES = 0 @@ -40,6 +46,9 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up LCN switch entities from a config entry.""" + hass.data[DOMAIN][config_entry.entry_id][ADD_ENTITIES_CALLBACKS].update( + {DOMAIN_SWITCH: (async_add_entities, create_lcn_switch_entity)} + ) async_add_entities( create_lcn_switch_entity(hass, entity_config, config_entry) diff --git a/homeassistant/components/lcn/websocket.py b/homeassistant/components/lcn/websocket.py new file mode 100644 index 00000000000..5317bc86763 --- /dev/null +++ b/homeassistant/components/lcn/websocket.py @@ -0,0 +1,450 @@ +"""LCN Websocket API.""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable +from functools import wraps +from typing import TYPE_CHECKING, Any, Final + +import lcn_frontend as lcn_panel +import voluptuous as vol + +from homeassistant.components import panel_custom, websocket_api +from homeassistant.components.websocket_api import AsyncWebSocketCommandHandler +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ( + CONF_ADDRESS, + CONF_DEVICES, + CONF_DOMAIN, + CONF_ENTITIES, + CONF_ENTITY_ID, + CONF_NAME, + CONF_RESOURCE, +) +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import device_registry as dr, entity_registry as er +import homeassistant.helpers.config_validation as cv + +from .const import ( + ADD_ENTITIES_CALLBACKS, + CONF_DOMAIN_DATA, + CONF_HARDWARE_SERIAL, + CONF_HARDWARE_TYPE, + CONF_SOFTWARE_SERIAL, + CONNECTION, + DOMAIN, +) +from .helpers import ( + DeviceConnectionType, + async_update_device_config, + generate_unique_id, + get_device_config, + get_device_connection, + get_resource, + purge_device_registry, + purge_entity_registry, + register_lcn_address_devices, +) +from .schemas import ( + ADDRESS_SCHEMA, + DOMAIN_DATA_BINARY_SENSOR, + DOMAIN_DATA_CLIMATE, + DOMAIN_DATA_COVER, + DOMAIN_DATA_LIGHT, + DOMAIN_DATA_SCENE, + DOMAIN_DATA_SENSOR, + DOMAIN_DATA_SWITCH, +) + +if TYPE_CHECKING: + from homeassistant.components.websocket_api import ActiveConnection + +type AsyncLcnWebSocketCommandHandler = Callable[ + [HomeAssistant, ActiveConnection, dict[str, Any], ConfigEntry], Awaitable[None] +] + +URL_BASE: Final = "/lcn_static" + + +async def register_panel_and_ws_api(hass: HomeAssistant) -> None: + """Register the LCN Panel and Websocket API.""" + websocket_api.async_register_command(hass, websocket_get_device_configs) + websocket_api.async_register_command(hass, websocket_get_entity_configs) + websocket_api.async_register_command(hass, websocket_scan_devices) + websocket_api.async_register_command(hass, websocket_add_device) + websocket_api.async_register_command(hass, websocket_delete_device) + websocket_api.async_register_command(hass, websocket_add_entity) + websocket_api.async_register_command(hass, websocket_delete_entity) + + if DOMAIN not in hass.data.get("frontend_panels", {}): + hass.http.register_static_path( + URL_BASE, + path=lcn_panel.locate_dir(), + cache_headers=lcn_panel.is_prod_build, + ) + await panel_custom.async_register_panel( + hass=hass, + frontend_url_path=DOMAIN, + webcomponent_name=lcn_panel.webcomponent_name, + config_panel_domain=DOMAIN, + module_url=f"{URL_BASE}/{lcn_panel.entrypoint_js}", + embed_iframe=True, + require_admin=True, + ) + + +def get_config_entry( + func: AsyncLcnWebSocketCommandHandler, +) -> AsyncWebSocketCommandHandler: + """Websocket decorator to ensure the config_entry exists and return it.""" + + @callback + @wraps(func) + async def get_entry( + hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict + ) -> None: + """Get config_entry.""" + if not (config_entry := hass.config_entries.async_get_entry(msg["entry_id"])): + connection.send_result(msg["id"], False) + else: + await func(hass, connection, msg, config_entry) + + return get_entry + + +@websocket_api.require_admin +@websocket_api.websocket_command( + {vol.Required("type"): "lcn/devices", vol.Required("entry_id"): cv.string} +) +@websocket_api.async_response +@get_config_entry +async def websocket_get_device_configs( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict, + config_entry: ConfigEntry, +) -> None: + """Get device configs.""" + connection.send_result(msg["id"], config_entry.data[CONF_DEVICES]) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required("type"): "lcn/entities", + vol.Required("entry_id"): cv.string, + vol.Optional(CONF_ADDRESS): ADDRESS_SCHEMA, + } +) +@websocket_api.async_response +@get_config_entry +async def websocket_get_entity_configs( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict, + config_entry: ConfigEntry, +) -> None: + """Get entities configs.""" + if CONF_ADDRESS in msg: + entity_configs = [ + entity_config + for entity_config in config_entry.data[CONF_ENTITIES] + if tuple(entity_config[CONF_ADDRESS]) == msg[CONF_ADDRESS] + ] + else: + entity_configs = config_entry.data[CONF_ENTITIES] + + entity_registry = er.async_get(hass) + for entity_config in entity_configs: + entity_unique_id = generate_unique_id( + config_entry.entry_id, + entity_config[CONF_ADDRESS], + entity_config[CONF_RESOURCE], + ) + entity_id = entity_registry.async_get_entity_id( + entity_config[CONF_DOMAIN], DOMAIN, entity_unique_id + ) + + entity_config[CONF_ENTITY_ID] = entity_id + + connection.send_result(msg["id"], entity_configs) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + {vol.Required("type"): "lcn/devices/scan", vol.Required("entry_id"): cv.string} +) +@websocket_api.async_response +@get_config_entry +async def websocket_scan_devices( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict, + config_entry: ConfigEntry, +) -> None: + """Scan for new devices.""" + host_connection = hass.data[DOMAIN][config_entry.entry_id][CONNECTION] + await host_connection.scan_modules() + + for device_connection in host_connection.address_conns.values(): + if not device_connection.is_group: + await async_create_or_update_device_in_config_entry( + hass, device_connection, config_entry + ) + + # create/update devices in device registry + register_lcn_address_devices(hass, config_entry) + + connection.send_result(msg["id"], config_entry.data[CONF_DEVICES]) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required("type"): "lcn/devices/add", + vol.Required("entry_id"): cv.string, + vol.Required(CONF_ADDRESS): ADDRESS_SCHEMA, + } +) +@websocket_api.async_response +@get_config_entry +async def websocket_add_device( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict, + config_entry: ConfigEntry, +) -> None: + """Add a device.""" + if get_device_config(msg[CONF_ADDRESS], config_entry): + connection.send_result( + msg["id"], False + ) # device_config already in config_entry + return + + device_config = { + CONF_ADDRESS: msg[CONF_ADDRESS], + CONF_NAME: "", + CONF_HARDWARE_SERIAL: -1, + CONF_SOFTWARE_SERIAL: -1, + CONF_HARDWARE_TYPE: -1, + } + + # update device info from LCN + device_connection = get_device_connection(hass, msg[CONF_ADDRESS], config_entry) + await async_update_device_config(device_connection, device_config) + + # add device_config to config_entry + device_configs = [*config_entry.data[CONF_DEVICES], device_config] + data = {**config_entry.data, CONF_DEVICES: device_configs} + hass.config_entries.async_update_entry(config_entry, data=data) + + # create/update devices in device registry + register_lcn_address_devices(hass, config_entry) + + connection.send_result(msg["id"], True) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required("type"): "lcn/devices/delete", + vol.Required("entry_id"): cv.string, + vol.Required(CONF_ADDRESS): ADDRESS_SCHEMA, + } +) +@websocket_api.async_response +@get_config_entry +async def websocket_delete_device( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict, + config_entry: ConfigEntry, +) -> None: + """Delete a device.""" + device_config = get_device_config(msg[CONF_ADDRESS], config_entry) + + device_registry = dr.async_get(hass) + identifiers = { + (DOMAIN, generate_unique_id(config_entry.entry_id, msg[CONF_ADDRESS])) + } + device = device_registry.async_get_device(identifiers, set()) + + if not (device and device_config): + connection.send_result(msg["id"], False) + return + + # remove module/group device from config_entry data + device_configs = [ + dc for dc in config_entry.data[CONF_DEVICES] if dc != device_config + ] + data = {**config_entry.data, CONF_DEVICES: device_configs} + hass.config_entries.async_update_entry(config_entry, data=data) + + # remove all child devices (and entities) from config_entry data + for entity_config in data[CONF_ENTITIES][:]: + if tuple(entity_config[CONF_ADDRESS]) == msg[CONF_ADDRESS]: + data[CONF_ENTITIES].remove(entity_config) + + hass.config_entries.async_update_entry(config_entry, data=data) + + # cleanup registries + purge_entity_registry(hass, config_entry.entry_id, data) + purge_device_registry(hass, config_entry.entry_id, data) + + # return the device config, not all devices !!! + connection.send_result(msg["id"]) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required("type"): "lcn/entities/add", + vol.Required("entry_id"): cv.string, + vol.Required(CONF_ADDRESS): ADDRESS_SCHEMA, + vol.Required(CONF_NAME): cv.string, + vol.Required(CONF_DOMAIN): cv.string, + vol.Required(CONF_DOMAIN_DATA): vol.Any( + DOMAIN_DATA_BINARY_SENSOR, + DOMAIN_DATA_SENSOR, + DOMAIN_DATA_SWITCH, + DOMAIN_DATA_LIGHT, + DOMAIN_DATA_CLIMATE, + DOMAIN_DATA_COVER, + DOMAIN_DATA_SCENE, + ), + } +) +@websocket_api.async_response +@get_config_entry +async def websocket_add_entity( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict, + config_entry: ConfigEntry, +) -> None: + """Add an entity.""" + if not (device_config := get_device_config(msg[CONF_ADDRESS], config_entry)): + connection.send_result(msg["id"], False) + return + + domain_name = msg[CONF_DOMAIN] + domain_data = msg[CONF_DOMAIN_DATA] + resource = get_resource(domain_name, domain_data).lower() + unique_id = generate_unique_id( + config_entry.entry_id, + device_config[CONF_ADDRESS], + resource, + ) + + entity_registry = er.async_get(hass) + if entity_registry.async_get_entity_id(msg[CONF_DOMAIN], DOMAIN, unique_id): + connection.send_result(msg["id"], False) + return + + entity_config = { + CONF_ADDRESS: msg[CONF_ADDRESS], + CONF_NAME: msg[CONF_NAME], + CONF_RESOURCE: resource, + CONF_DOMAIN: domain_name, + CONF_DOMAIN_DATA: domain_data, + } + + # Create new entity and add to corresponding component + callbacks = hass.data[DOMAIN][msg["entry_id"]][ADD_ENTITIES_CALLBACKS] + async_add_entities, create_lcn_entity = callbacks[msg[CONF_DOMAIN]] + + entity = create_lcn_entity(hass, entity_config, config_entry) + async_add_entities([entity]) + + # Add entity config to config_entry + entity_configs = [*config_entry.data[CONF_ENTITIES], entity_config] + data = {**config_entry.data, CONF_ENTITIES: entity_configs} + + # schedule config_entry for save + hass.config_entries.async_update_entry(config_entry, data=data) + + connection.send_result(msg["id"], True) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required("type"): "lcn/entities/delete", + vol.Required("entry_id"): cv.string, + vol.Required(CONF_ADDRESS): ADDRESS_SCHEMA, + vol.Required(CONF_DOMAIN): cv.string, + vol.Required(CONF_RESOURCE): cv.string, + } +) +@websocket_api.async_response +@get_config_entry +async def websocket_delete_entity( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict, + config_entry: ConfigEntry, +) -> None: + """Delete an entity.""" + entity_config = next( + ( + entity_config + for entity_config in config_entry.data[CONF_ENTITIES] + if ( + tuple(entity_config[CONF_ADDRESS]) == msg[CONF_ADDRESS] + and entity_config[CONF_DOMAIN] == msg[CONF_DOMAIN] + and entity_config[CONF_RESOURCE] == msg[CONF_RESOURCE] + ) + ), + None, + ) + + if entity_config is None: + connection.send_result(msg["id"], False) + return + + entity_configs = [ + ec for ec in config_entry.data[CONF_ENTITIES] if ec != entity_config + ] + data = {**config_entry.data, CONF_ENTITIES: entity_configs} + + hass.config_entries.async_update_entry(config_entry, data=data) + + # cleanup registries + purge_entity_registry(hass, config_entry.entry_id, data) + purge_device_registry(hass, config_entry.entry_id, data) + + connection.send_result(msg["id"]) + + +async def async_create_or_update_device_in_config_entry( + hass: HomeAssistant, + device_connection: DeviceConnectionType, + config_entry: ConfigEntry, +) -> None: + """Create or update device in config_entry according to given device_connection.""" + address = ( + device_connection.seg_id, + device_connection.addr_id, + device_connection.is_group, + ) + + device_configs = [*config_entry.data[CONF_DEVICES]] + data = {**config_entry.data, CONF_DEVICES: device_configs} + for device_config in data[CONF_DEVICES]: + if tuple(device_config[CONF_ADDRESS]) == address: + break # device already in config_entry + else: + # create new device_entry + device_config = { + CONF_ADDRESS: address, + CONF_NAME: "", + CONF_HARDWARE_SERIAL: -1, + CONF_SOFTWARE_SERIAL: -1, + CONF_HARDWARE_TYPE: -1, + } + data[CONF_DEVICES].append(device_config) + + # update device_entry + await async_update_device_config(device_connection, device_config) + + hass.config_entries.async_update_entry(config_entry, data=data) diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index eecb3a76aac..5e6d29f29f9 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -310,6 +310,7 @@ FLOWS = { "lastfm", "launch_library", "laundrify", + "lcn", "ld2410_ble", "leaone", "led_ble", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index b14531e1731..52215d232ad 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -3178,7 +3178,7 @@ "lcn": { "name": "LCN", "integration_type": "hub", - "config_flow": false, + "config_flow": true, "iot_class": "local_push" }, "ld2410_ble": { diff --git a/requirements_all.txt b/requirements_all.txt index 6cb3438675c..a7bee65a8b1 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1242,6 +1242,9 @@ lakeside==0.13 # homeassistant.components.laundrify laundrify-aio==1.2.2 +# homeassistant.components.lcn +lcn-frontend==0.1.5 + # homeassistant.components.ld2410_ble ld2410-ble==0.1.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 38b6f91a6f7..7585c0cfa4e 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1035,6 +1035,9 @@ lacrosse-view==1.0.2 # homeassistant.components.laundrify laundrify-aio==1.2.2 +# homeassistant.components.lcn +lcn-frontend==0.1.5 + # homeassistant.components.ld2410_ble ld2410-ble==0.1.1 diff --git a/tests/components/lcn/conftest.py b/tests/components/lcn/conftest.py index e29a7076430..b1f28b28465 100644 --- a/tests/components/lcn/conftest.py +++ b/tests/components/lcn/conftest.py @@ -3,7 +3,7 @@ from collections.abc import AsyncGenerator import json from typing import Any -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock, Mock, patch import pypck from pypck.connection import PchkConnectionManager @@ -13,7 +13,7 @@ import pytest from homeassistant.components.lcn.const import DOMAIN from homeassistant.components.lcn.helpers import AddressType, generate_unique_id -from homeassistant.const import CONF_HOST +from homeassistant.const import CONF_ADDRESS, CONF_DEVICES, CONF_ENTITIES, CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component @@ -60,6 +60,7 @@ class MockPchkConnectionManager(PchkConnectionManager): """Get LCN address connection.""" return super().get_address_conn(addr, request_serials) + scan_modules = AsyncMock() send_command = AsyncMock() @@ -67,6 +68,11 @@ def create_config_entry(name: str) -> MockConfigEntry: """Set up config entries with configuration data.""" fixture_filename = f"lcn/config_entry_{name}.json" entry_data = json.loads(load_fixture(fixture_filename)) + for device in entry_data[CONF_DEVICES]: + device[CONF_ADDRESS] = tuple(device[CONF_ADDRESS]) + for entity in entry_data[CONF_ENTITIES]: + entity[CONF_ADDRESS] = tuple(entity[CONF_ADDRESS]) + options = {} title = entry_data[CONF_HOST] @@ -97,6 +103,7 @@ async def init_integration( hass: HomeAssistant, entry: MockConfigEntry ) -> AsyncGenerator[MockPchkConnectionManager]: """Set up the LCN integration in Home Assistant.""" + hass.http = Mock() # needs to be mocked as hass.http.register_static_path is called when registering the frontend lcn_connection = None def lcn_connection_factory(*args, **kwargs): diff --git a/tests/components/lcn/fixtures/config_entry_pchk.json b/tests/components/lcn/fixtures/config_entry_pchk.json index 31b51adfce7..08ccd194578 100644 --- a/tests/components/lcn/fixtures/config_entry_pchk.json +++ b/tests/components/lcn/fixtures/config_entry_pchk.json @@ -9,14 +9,14 @@ "devices": [ { "address": [0, 7, false], - "name": "", + "name": "TestModule", "hardware_serial": -1, "software_serial": -1, "hardware_type": -1 }, { "address": [0, 5, true], - "name": "", + "name": "TestGroup", "hardware_serial": -1, "software_serial": -1, "hardware_type": -1 diff --git a/tests/components/lcn/test_config_flow.py b/tests/components/lcn/test_config_flow.py index e1705e4b349..e4cb4c588e9 100644 --- a/tests/components/lcn/test_config_flow.py +++ b/tests/components/lcn/test_config_flow.py @@ -5,9 +5,11 @@ from unittest.mock import patch from pypck.connection import PchkAuthenticationError, PchkLicenseError import pytest -from homeassistant import config_entries +from homeassistant import config_entries, data_entry_flow +from homeassistant.components.lcn.config_flow import LcnFlowHandler, validate_connection from homeassistant.components.lcn.const import CONF_DIM_MODE, CONF_SK_NUM_TRIES, DOMAIN from homeassistant.const import ( + CONF_BASE, CONF_DEVICES, CONF_ENTITIES, CONF_HOST, @@ -16,25 +18,33 @@ from homeassistant.const import ( CONF_PORT, CONF_USERNAME, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import issue_registry as ir from tests.common import MockConfigEntry -IMPORT_DATA = { - CONF_HOST: "pchk", +CONFIG_DATA = { CONF_IP_ADDRESS: "127.0.0.1", - CONF_PORT: 4114, + CONF_PORT: 1234, CONF_USERNAME: "lcn", CONF_PASSWORD: "lcn", CONF_SK_NUM_TRIES: 0, CONF_DIM_MODE: "STEPS200", +} + +CONNECTION_DATA = {CONF_HOST: "pchk", **CONFIG_DATA} + +IMPORT_DATA = { + **CONNECTION_DATA, CONF_DEVICES: [], CONF_ENTITIES: [], } -async def test_step_import(hass: HomeAssistant) -> None: +async def test_step_import( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: """Test for import step.""" with ( @@ -46,14 +56,18 @@ async def test_step_import(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=data ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "pchk" assert result["data"] == IMPORT_DATA + assert issue_registry.async_get_issue( + HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{DOMAIN}" + ) -async def test_step_import_existing_host(hass: HomeAssistant) -> None: +async def test_step_import_existing_host( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: """Test for update of config_entry if imported host already exists.""" # Create config entry and add it to hass @@ -67,13 +81,15 @@ async def test_step_import_existing_host(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=imported_data ) - await hass.async_block_till_done() # Check if config entry was updated assert result["type"] is FlowResultType.ABORT assert result["reason"] == "existing_configuration_updated" assert mock_entry.source == config_entries.SOURCE_IMPORT assert mock_entry.data == IMPORT_DATA + assert issue_registry.async_get_issue( + HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{DOMAIN}" + ) @pytest.mark.parametrize( @@ -81,10 +97,12 @@ async def test_step_import_existing_host(hass: HomeAssistant) -> None: [ (PchkAuthenticationError, "authentication_error"), (PchkLicenseError, "license_error"), - (TimeoutError, "connection_timeout"), + (TimeoutError, "connection_refused"), ], ) -async def test_step_import_error(hass: HomeAssistant, error, reason) -> None: +async def test_step_import_error( + hass: HomeAssistant, issue_registry: ir.IssueRegistry, error, reason +) -> None: """Test for error in import is handled correctly.""" with patch( "pypck.connection.PchkConnectionManager.async_connect", side_effect=error @@ -94,7 +112,146 @@ async def test_step_import_error(hass: HomeAssistant, error, reason) -> None: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=data ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT assert result["reason"] == reason + assert issue_registry.async_get_issue(DOMAIN, reason) + + +async def test_show_form(hass: HomeAssistant) -> None: + """Test that the form is served with no input.""" + flow = LcnFlowHandler() + flow.hass = hass + + result = await flow.async_step_user(user_input=None) + + assert result["type"] == data_entry_flow.FlowResultType.FORM + assert result["step_id"] == "user" + + +async def test_step_user(hass): + """Test for user step.""" + with ( + patch("pypck.connection.PchkConnectionManager.async_connect"), + patch("homeassistant.components.lcn.async_setup", return_value=True), + patch("homeassistant.components.lcn.async_setup_entry", return_value=True), + ): + data = CONNECTION_DATA.copy() + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER}, data=data + ) + + assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY + assert result["title"] == CONNECTION_DATA[CONF_HOST] + assert result["data"] == { + **CONNECTION_DATA, + CONF_DEVICES: [], + CONF_ENTITIES: [], + } + + +async def test_step_user_existing_host(hass, entry): + """Test for user defined host already exists.""" + entry.add_to_hass(hass) + + with patch("pypck.connection.PchkConnectionManager.async_connect"): + config_data = entry.data.copy() + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER}, data=config_data + ) + + assert result["type"] == data_entry_flow.FlowResultType.FORM + assert result["errors"] == {CONF_BASE: "already_configured"} + + +@pytest.mark.parametrize( + ("error", "errors"), + [ + (PchkAuthenticationError, {CONF_BASE: "authentication_error"}), + (PchkLicenseError, {CONF_BASE: "license_error"}), + (TimeoutError, {CONF_BASE: "connection_refused"}), + ], +) +async def test_step_user_error(hass, error, errors): + """Test for error in user step is handled correctly.""" + with patch( + "pypck.connection.PchkConnectionManager.async_connect", side_effect=error + ): + data = CONNECTION_DATA.copy() + data.update({CONF_HOST: "pchk"}) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER}, data=data + ) + + assert result["type"] == data_entry_flow.FlowResultType.FORM + assert result["errors"] == errors + + +async def test_step_reconfigure(hass, entry): + """Test for reconfigure step.""" + entry.add_to_hass(hass) + old_entry_data = entry.data.copy() + + with ( + patch("pypck.connection.PchkConnectionManager.async_connect"), + patch("homeassistant.components.lcn.async_setup", return_value=True), + patch("homeassistant.components.lcn.async_setup_entry", return_value=True), + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": entry.entry_id, + }, + data=CONFIG_DATA.copy(), + ) + + assert result["type"] == data_entry_flow.FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + entry = hass.config_entries.async_get_entry(entry.entry_id) + assert entry.title == CONNECTION_DATA[CONF_HOST] + assert entry.data == {**old_entry_data, **CONFIG_DATA} + + +@pytest.mark.parametrize( + ("error", "errors"), + [ + (PchkAuthenticationError, {CONF_BASE: "authentication_error"}), + (PchkLicenseError, {CONF_BASE: "license_error"}), + (TimeoutError, {CONF_BASE: "connection_refused"}), + ], +) +async def test_step_reconfigure_error(hass, entry, error, errors): + """Test for error in reconfigure step is handled correctly.""" + entry.add_to_hass(hass) + with patch( + "pypck.connection.PchkConnectionManager.async_connect", side_effect=error + ): + data = {**CONNECTION_DATA, CONF_HOST: "pchk"} + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": entry.entry_id, + }, + data=data, + ) + + assert result["type"] == data_entry_flow.FlowResultType.FORM + assert result["errors"] == errors + + +async def test_validate_connection(): + """Test the connection validation.""" + data = CONNECTION_DATA.copy() + + with ( + patch("pypck.connection.PchkConnectionManager.async_connect") as async_connect, + patch("pypck.connection.PchkConnectionManager.async_close") as async_close, + ): + result = await validate_connection(data=data) + + assert async_connect.is_called + assert async_close.is_called + assert result is None diff --git a/tests/components/lcn/test_init.py b/tests/components/lcn/test_init.py index 670735439ce..c118b98ecef 100644 --- a/tests/components/lcn/test_init.py +++ b/tests/components/lcn/test_init.py @@ -1,6 +1,6 @@ """Test init of LCN integration.""" -from unittest.mock import patch +from unittest.mock import Mock, patch from pypck.connection import ( PchkAuthenticationError, @@ -31,6 +31,7 @@ async def test_async_setup_entry(hass: HomeAssistant, entry, lcn_connection) -> async def test_async_setup_multiple_entries(hass: HomeAssistant, entry, entry2) -> None: """Test a successful setup and unload of multiple entries.""" + hass.http = Mock() with patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager): for config_entry in (entry, entry2): config_entry.add_to_hass(hass) diff --git a/tests/components/lcn/test_websocket.py b/tests/components/lcn/test_websocket.py new file mode 100644 index 00000000000..f1f0a19b572 --- /dev/null +++ b/tests/components/lcn/test_websocket.py @@ -0,0 +1,303 @@ +"""LCN Websocket Tests.""" + +from pypck.lcn_addr import LcnAddr +import pytest + +from homeassistant.components.lcn.const import CONF_DOMAIN_DATA +from homeassistant.components.lcn.helpers import get_device_config, get_resource +from homeassistant.const import ( + CONF_ADDRESS, + CONF_DEVICES, + CONF_DOMAIN, + CONF_ENTITIES, + CONF_NAME, + CONF_RESOURCE, + CONF_TYPE, +) +from homeassistant.core import HomeAssistant + +from tests.typing import WebSocketGenerator + +DEVICES_PAYLOAD = {CONF_TYPE: "lcn/devices", "entry_id": ""} +ENTITIES_PAYLOAD = { + CONF_TYPE: "lcn/entities", + "entry_id": "", +} +SCAN_PAYLOAD = {CONF_TYPE: "lcn/devices/scan", "entry_id": ""} +DEVICES_ADD_PAYLOAD = { + CONF_TYPE: "lcn/devices/add", + "entry_id": "", + CONF_ADDRESS: (0, 10, False), +} +DEVICES_DELETE_PAYLOAD = { + CONF_TYPE: "lcn/devices/delete", + "entry_id": "", + CONF_ADDRESS: (0, 7, False), +} +ENTITIES_ADD_PAYLOAD = { + CONF_TYPE: "lcn/entities/add", + "entry_id": "", + CONF_ADDRESS: (0, 7, False), + CONF_NAME: "test_switch", + CONF_DOMAIN: "switch", + CONF_DOMAIN_DATA: {"output": "RELAY5"}, +} +ENTITIES_DELETE_PAYLOAD = { + CONF_TYPE: "lcn/entities/delete", + "entry_id": "", + CONF_ADDRESS: (0, 7, False), + CONF_DOMAIN: "switch", + CONF_RESOURCE: "relay1", +} + + +async def test_lcn_devices_command( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry, lcn_connection +) -> None: + """Test lcn/devices command.""" + client = await hass_ws_client(hass) + + await client.send_json_auto_id({**DEVICES_PAYLOAD, "entry_id": entry.entry_id}) + + res = await client.receive_json() + assert res["success"], res + assert len(res["result"]) == len(entry.data[CONF_DEVICES]) + assert all( + {**result, CONF_ADDRESS: tuple(result[CONF_ADDRESS])} + in entry.data[CONF_DEVICES] + for result in res["result"] + ) + + +@pytest.mark.parametrize( + "payload", + [ + ENTITIES_PAYLOAD, + {**ENTITIES_PAYLOAD, CONF_ADDRESS: (0, 7, False)}, + ], +) +async def test_lcn_entities_command( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + entry, + lcn_connection, + payload, +) -> None: + """Test lcn/entities command.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + **payload, + "entry_id": entry.entry_id, + } + ) + + res = await client.receive_json() + assert res["success"], res + entities = [ + entity + for entity in entry.data[CONF_ENTITIES] + if CONF_ADDRESS not in payload or entity[CONF_ADDRESS] == payload[CONF_ADDRESS] + ] + assert len(res["result"]) == len(entities) + assert all( + {**result, CONF_ADDRESS: tuple(result[CONF_ADDRESS])} in entities + for result in res["result"] + ) + + +async def test_lcn_devices_scan_command( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry, lcn_connection +) -> None: + """Test lcn/devices/scan command.""" + # add new module which is not stored in config_entry + lcn_connection.get_address_conn(LcnAddr(0, 10, False)) + + client = await hass_ws_client(hass) + await client.send_json_auto_id({**SCAN_PAYLOAD, "entry_id": entry.entry_id}) + + res = await client.receive_json() + assert res["success"], res + + lcn_connection.scan_modules.assert_awaited() + assert len(res["result"]) == len(entry.data[CONF_DEVICES]) + assert all( + {**result, CONF_ADDRESS: tuple(result[CONF_ADDRESS])} + in entry.data[CONF_DEVICES] + for result in res["result"] + ) + + +async def test_lcn_devices_add_command( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry, lcn_connection +) -> None: + """Test lcn/devices/add command.""" + client = await hass_ws_client(hass) + assert get_device_config((0, 10, False), entry) is None + + await client.send_json_auto_id({**DEVICES_ADD_PAYLOAD, "entry_id": entry.entry_id}) + + res = await client.receive_json() + assert res["success"], res + + assert get_device_config((0, 10, False), entry) + + +async def test_lcn_devices_delete_command( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry, lcn_connection +) -> None: + """Test lcn/devices/delete command.""" + client = await hass_ws_client(hass) + assert get_device_config((0, 7, False), entry) + + await client.send_json_auto_id( + {**DEVICES_DELETE_PAYLOAD, "entry_id": entry.entry_id} + ) + + res = await client.receive_json() + assert res["success"], res + assert get_device_config((0, 7, False), entry) is None + + +async def test_lcn_entities_add_command( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry, lcn_connection +) -> None: + """Test lcn/entities/add command.""" + client = await hass_ws_client(hass) + + entity_config = { + key: ENTITIES_ADD_PAYLOAD[key] + for key in (CONF_ADDRESS, CONF_NAME, CONF_DOMAIN, CONF_DOMAIN_DATA) + } + + resource = get_resource( + ENTITIES_ADD_PAYLOAD[CONF_DOMAIN], ENTITIES_ADD_PAYLOAD[CONF_DOMAIN_DATA] + ).lower() + + assert {**entity_config, CONF_RESOURCE: resource} not in entry.data[CONF_ENTITIES] + + await client.send_json_auto_id({**ENTITIES_ADD_PAYLOAD, "entry_id": entry.entry_id}) + + res = await client.receive_json() + assert res["success"], res + + assert {**entity_config, CONF_RESOURCE: resource} in entry.data[CONF_ENTITIES] + + +async def test_lcn_entities_delete_command( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry, lcn_connection +) -> None: + """Test lcn/entities/delete command.""" + client = await hass_ws_client(hass) + + assert ( + len( + [ + entity + for entity in entry.data[CONF_ENTITIES] + if entity[CONF_ADDRESS] == ENTITIES_DELETE_PAYLOAD[CONF_ADDRESS] + and entity[CONF_DOMAIN] == ENTITIES_DELETE_PAYLOAD[CONF_DOMAIN] + and entity[CONF_RESOURCE] == ENTITIES_DELETE_PAYLOAD[CONF_RESOURCE] + ] + ) + == 1 + ) + + await client.send_json_auto_id( + {**ENTITIES_DELETE_PAYLOAD, "entry_id": entry.entry_id} + ) + + res = await client.receive_json() + assert res["success"], res + + assert ( + len( + [ + entity + for entity in entry.data[CONF_ENTITIES] + if entity[CONF_ADDRESS] == ENTITIES_DELETE_PAYLOAD[CONF_ADDRESS] + and entity[CONF_DOMAIN] == ENTITIES_DELETE_PAYLOAD[CONF_DOMAIN] + and entity[CONF_RESOURCE] == ENTITIES_DELETE_PAYLOAD[CONF_RESOURCE] + ] + ) + == 0 + ) + + +@pytest.mark.parametrize( + ("payload", "entity_id", "result"), + [ + (DEVICES_PAYLOAD, "12345", False), + (ENTITIES_PAYLOAD, "12345", False), + (SCAN_PAYLOAD, "12345", False), + (DEVICES_ADD_PAYLOAD, "12345", False), + (DEVICES_DELETE_PAYLOAD, "12345", False), + (ENTITIES_ADD_PAYLOAD, "12345", False), + (ENTITIES_DELETE_PAYLOAD, "12345", False), + ], +) +async def test_lcn_command_host_error( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + lcn_connection, + payload, + entity_id, + result, +) -> None: + """Test lcn commands for unknown host.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id({**payload, "entry_id": entity_id}) + + res = await client.receive_json() + assert res["success"], res + assert res["result"] == result + + +@pytest.mark.parametrize( + ("payload", "address", "result"), + [ + (DEVICES_ADD_PAYLOAD, (0, 7, False), False), # device already existing + (DEVICES_DELETE_PAYLOAD, (0, 42, False), False), + (ENTITIES_ADD_PAYLOAD, (0, 42, False), False), + (ENTITIES_DELETE_PAYLOAD, (0, 42, 0), False), + ], +) +async def test_lcn_command_address_error( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + entry, + lcn_connection, + payload, + address, + result, +) -> None: + """Test lcn commands for address error.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id( + {**payload, "entry_id": entry.entry_id, CONF_ADDRESS: address} + ) + + res = await client.receive_json() + assert res["success"], res + assert res["result"] == result + + +async def test_lcn_entities_add_existing_error( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + entry, + lcn_connection, +) -> None: + """Test lcn commands for address error.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + **ENTITIES_ADD_PAYLOAD, + "entry_id": entry.entry_id, + CONF_DOMAIN_DATA: {"output": "RELAY1"}, + } + ) + + res = await client.receive_json() + assert res["success"], res + assert res["result"] is False From a287c8259d683071f05b45b1c7681b528b4189a3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Jaworski?= Date: Wed, 21 Aug 2024 12:53:09 +0200 Subject: [PATCH 0941/1635] blebox: use blebox_uniapi.cover.BleboxCoverState enum members instead of plain integers (#124302) * blebox: use blebox_uniapi.cover.BleboxCoverState enum members instead of plain integers * Apply suggestion from code review --------- Co-authored-by: Erik Montnemery --- homeassistant/components/blebox/cover.py | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/blebox/cover.py b/homeassistant/components/blebox/cover.py index f9e974991f5..71e47d87894 100644 --- a/homeassistant/components/blebox/cover.py +++ b/homeassistant/components/blebox/cover.py @@ -6,6 +6,7 @@ from typing import Any from blebox_uniapi.box import Box import blebox_uniapi.cover +from blebox_uniapi.cover import BleboxCoverState from homeassistant.components.cover import ( ATTR_POSITION, @@ -31,16 +32,16 @@ BLEBOX_TO_COVER_DEVICE_CLASSES = { BLEBOX_TO_HASS_COVER_STATES = { None: None, - 0: STATE_CLOSING, # moving down - 1: STATE_OPENING, # moving up - 2: STATE_OPEN, # manually stopped - 3: STATE_CLOSED, # lower limit - 4: STATE_OPEN, # upper limit / open - # gateController - 5: STATE_OPEN, # overload - 6: STATE_OPEN, # motor failure - # 7 is not used - 8: STATE_OPEN, # safety stop + # all blebox covers + BleboxCoverState.MOVING_DOWN: STATE_CLOSING, + BleboxCoverState.MOVING_UP: STATE_OPENING, + BleboxCoverState.MANUALLY_STOPPED: STATE_OPEN, + BleboxCoverState.LOWER_LIMIT_REACHED: STATE_CLOSED, + BleboxCoverState.UPPER_LIMIT_REACHED: STATE_OPEN, + # extra states of gateController product + BleboxCoverState.OVERLOAD: STATE_OPEN, + BleboxCoverState.MOTOR_FAILURE: STATE_OPEN, + BleboxCoverState.SAFETY_STOP: STATE_OPEN, } From 316a57864a5fca9f99b9d29cc4af6eaf423c4670 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Jaworski?= Date: Wed, 21 Aug 2024 12:53:40 +0200 Subject: [PATCH 0942/1635] Extend blebox shutterbox tilt support (#110547) * blebox: extend shutterbox tilt support * feat: add test for new open/close tilt code in blebox covers * blebox: resign from using future compat branch for cover open/close tilt * Update homeassistant/components/blebox/cover.py Co-authored-by: Erik Montnemery * blebox: revert changes to BLEBOX_TO_HASS_COVER_STATES --------- Co-authored-by: Erik Montnemery --- homeassistant/components/blebox/cover.py | 32 +++++++++----- tests/components/blebox/test_cover.py | 56 ++++++++++++++++++++++++ 2 files changed, 78 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/blebox/cover.py b/homeassistant/components/blebox/cover.py index 71e47d87894..bb75c88ca2a 100644 --- a/homeassistant/components/blebox/cover.py +++ b/homeassistant/components/blebox/cover.py @@ -29,7 +29,6 @@ BLEBOX_TO_COVER_DEVICE_CLASSES = { "shutter": CoverDeviceClass.SHUTTER, } - BLEBOX_TO_HASS_COVER_STATES = { None: None, # all blebox covers @@ -65,14 +64,20 @@ class BleBoxCoverEntity(BleBoxEntity[blebox_uniapi.cover.Cover], CoverEntity): """Initialize a BleBox cover feature.""" super().__init__(feature) self._attr_device_class = BLEBOX_TO_COVER_DEVICE_CLASSES[feature.device_class] - position = CoverEntityFeature.SET_POSITION if feature.is_slider else 0 - stop = CoverEntityFeature.STOP if feature.has_stop else 0 self._attr_supported_features = ( - position | stop | CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE + CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE ) + if feature.is_slider: + self._attr_supported_features |= CoverEntityFeature.SET_POSITION + + if feature.has_stop: + self._attr_supported_features |= CoverEntityFeature.STOP + if feature.has_tilt: - self._attr_supported_features = ( - self._attr_supported_features | CoverEntityFeature.SET_TILT_POSITION + self._attr_supported_features |= ( + CoverEntityFeature.SET_TILT_POSITION + | CoverEntityFeature.OPEN_TILT + | CoverEntityFeature.CLOSE_TILT ) @property @@ -106,16 +111,24 @@ class BleBoxCoverEntity(BleBoxEntity[blebox_uniapi.cover.Cover], CoverEntity): return self._is_state(STATE_CLOSED) async def async_open_cover(self, **kwargs: Any) -> None: - """Open the cover position.""" + """Fully open the cover position.""" await self._feature.async_open() async def async_close_cover(self, **kwargs: Any) -> None: - """Close the cover position.""" + """Fully close the cover position.""" await self._feature.async_close() + async def async_open_cover_tilt(self, **kwargs: Any) -> None: + """Fully open the cover tilt.""" + await self._feature.async_set_tilt_position(0) + + async def async_close_cover_tilt(self, **kwargs: Any) -> None: + """Fully close the cover tilt.""" + # note: values are reversed + await self._feature.async_set_tilt_position(100) + async def async_set_cover_position(self, **kwargs: Any) -> None: """Set the cover position.""" - position = kwargs[ATTR_POSITION] await self._feature.async_set_position(100 - position) @@ -125,7 +138,6 @@ class BleBoxCoverEntity(BleBoxEntity[blebox_uniapi.cover.Cover], CoverEntity): async def async_set_cover_tilt_position(self, **kwargs: Any) -> None: """Set the tilt position.""" - position = kwargs[ATTR_TILT_POSITION] await self._feature.async_set_tilt_position(100 - position) diff --git a/tests/components/blebox/test_cover.py b/tests/components/blebox/test_cover.py index 1596de134c0..1900a6d6834 100644 --- a/tests/components/blebox/test_cover.py +++ b/tests/components/blebox/test_cover.py @@ -22,7 +22,9 @@ from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_SUPPORTED_FEATURES, SERVICE_CLOSE_COVER, + SERVICE_CLOSE_COVER_TILT, SERVICE_OPEN_COVER, + SERVICE_OPEN_COVER_TILT, SERVICE_SET_COVER_POSITION, SERVICE_SET_COVER_TILT_POSITION, SERVICE_STOP_COVER, @@ -473,3 +475,57 @@ async def test_set_tilt_position(shutterbox, hass: HomeAssistant) -> None: blocking=True, ) assert hass.states.get(entity_id).state == STATE_OPENING + + +async def test_open_tilt(shutterbox, hass: HomeAssistant) -> None: + """Test closing tilt.""" + feature_mock, entity_id = shutterbox + + def initial_update(): + feature_mock.tilt_current = 100 + + def set_tilt_position(tilt_position): + assert tilt_position == 0 + feature_mock.tilt_current = tilt_position + + feature_mock.async_update = AsyncMock(side_effect=initial_update) + feature_mock.async_set_tilt_position = AsyncMock(side_effect=set_tilt_position) + + await async_setup_entity(hass, entity_id) + feature_mock.async_update = AsyncMock() + + await hass.services.async_call( + "cover", + SERVICE_OPEN_COVER_TILT, + {"entity_id": entity_id}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 # inverted + + +async def test_close_tilt(shutterbox, hass: HomeAssistant) -> None: + """Test closing tilt.""" + feature_mock, entity_id = shutterbox + + def initial_update(): + feature_mock.tilt_current = 0 + + def set_tilt_position(tilt_position): + assert tilt_position == 100 + feature_mock.tilt_current = tilt_position + + feature_mock.async_update = AsyncMock(side_effect=initial_update) + feature_mock.async_set_tilt_position = AsyncMock(side_effect=set_tilt_position) + + await async_setup_entity(hass, entity_id) + feature_mock.async_update = AsyncMock() + + await hass.services.async_call( + "cover", + SERVICE_CLOSE_COVER_TILT, + {"entity_id": entity_id}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 # inverted From fa5f47127c86773ff49567fc116b9b8385c5c789 Mon Sep 17 00:00:00 2001 From: Joram <10565565+JoramQ@users.noreply.github.com> Date: Wed, 21 Aug 2024 13:37:43 +0200 Subject: [PATCH 0943/1635] Fix state name for binary_sensor Power from clear/detected to on/off (#116994) Fixed state name for binary_sensor Power from clear/detected to on/off --- homeassistant/components/binary_sensor/strings.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/binary_sensor/strings.json b/homeassistant/components/binary_sensor/strings.json index 162cf139a1d..b86a6374f28 100644 --- a/homeassistant/components/binary_sensor/strings.json +++ b/homeassistant/components/binary_sensor/strings.json @@ -243,8 +243,8 @@ "power": { "name": "Power", "state": { - "off": "[%key:component::binary_sensor::entity_component::gas::state::off%]", - "on": "[%key:component::binary_sensor::entity_component::gas::state::on%]" + "off": "[%key:common::state::off%]", + "on": "[%key:common::state::on%]" } }, "presence": { From 8d97fafb2dd917ba15ba072267ca0a00052b4d76 Mon Sep 17 00:00:00 2001 From: "Mr. Bubbles" Date: Wed, 21 Aug 2024 14:03:40 +0200 Subject: [PATCH 0944/1635] Add tests for IronOS integration (#123078) Add tests --- tests/components/iron_os/test_init.py | 34 +++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/tests/components/iron_os/test_init.py b/tests/components/iron_os/test_init.py index fb0a782ea36..f7db2a813ec 100644 --- a/tests/components/iron_os/test_init.py +++ b/tests/components/iron_os/test_init.py @@ -11,6 +11,40 @@ from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry +@pytest.mark.usefixtures("mock_pynecil", "ble_device") +async def test_setup_and_unload( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test integration setup and unload.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.NOT_LOADED + + +@pytest.mark.usefixtures("ble_device") +async def test_update_data_config_entry_not_ready( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pynecil: AsyncMock, +) -> None: + """Test config entry not ready.""" + mock_pynecil.get_live_data.side_effect = CommunicationError + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.SETUP_RETRY + + @pytest.mark.usefixtures("ble_device") async def test_setup_config_entry_not_ready( hass: HomeAssistant, From 52c0a091079b84cf3d966f099074fc8e4d875262 Mon Sep 17 00:00:00 2001 From: Christopher Fenner <9592452+CFenner@users.noreply.github.com> Date: Wed, 21 Aug 2024 14:42:58 +0200 Subject: [PATCH 0945/1635] Add test cases for ViCare entities (#122983) * add test case * add test case * add test case * add test case * add test case * add test case * remove commented code * test only fan platform * add test case * remove fan mock --- tests/components/vicare/__init__.py | 11 + tests/components/vicare/conftest.py | 9 +- .../components/vicare/fixtures/ViAir300F.json | 882 ++++++++++++++ .../vicare/snapshots/test_binary_sensor.ambr | 374 ++++++ .../vicare/snapshots/test_button.ambr | 47 + .../vicare/snapshots/test_climate.ambr | 167 +++ .../components/vicare/snapshots/test_fan.ambr | 64 + .../vicare/snapshots/test_number.ambr | 567 +++++++++ .../vicare/snapshots/test_sensor.ambr | 1052 +++++++++++++++++ .../vicare/snapshots/test_water_heater.ambr | 113 ++ tests/components/vicare/test_binary_sensor.py | 27 +- tests/components/vicare/test_button.py | 33 + tests/components/vicare/test_climate.py | 33 + tests/components/vicare/test_fan.py | 33 + tests/components/vicare/test_number.py | 33 + tests/components/vicare/test_sensor.py | 33 + tests/components/vicare/test_water_heater.py | 33 + 17 files changed, 3504 insertions(+), 7 deletions(-) create mode 100644 tests/components/vicare/fixtures/ViAir300F.json create mode 100644 tests/components/vicare/snapshots/test_button.ambr create mode 100644 tests/components/vicare/snapshots/test_climate.ambr create mode 100644 tests/components/vicare/snapshots/test_fan.ambr create mode 100644 tests/components/vicare/snapshots/test_number.ambr create mode 100644 tests/components/vicare/snapshots/test_sensor.ambr create mode 100644 tests/components/vicare/snapshots/test_water_heater.ambr create mode 100644 tests/components/vicare/test_button.py create mode 100644 tests/components/vicare/test_climate.py create mode 100644 tests/components/vicare/test_fan.py create mode 100644 tests/components/vicare/test_number.py create mode 100644 tests/components/vicare/test_sensor.py create mode 100644 tests/components/vicare/test_water_heater.py diff --git a/tests/components/vicare/__init__.py b/tests/components/vicare/__init__.py index 329a3b04d58..c2a1ab49e5c 100644 --- a/tests/components/vicare/__init__.py +++ b/tests/components/vicare/__init__.py @@ -6,6 +6,9 @@ from typing import Final from homeassistant.components.vicare.const import CONF_HEATING_TYPE from homeassistant.const import CONF_CLIENT_ID, CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry MODULE = "homeassistant.components.vicare" @@ -17,3 +20,11 @@ ENTRY_CONFIG: Final[dict[str, str]] = { } MOCK_MAC = "B874241B7B9" + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/vicare/conftest.py b/tests/components/vicare/conftest.py index 372314d9fe2..c78669d1c3e 100644 --- a/tests/components/vicare/conftest.py +++ b/tests/components/vicare/conftest.py @@ -13,7 +13,7 @@ from PyViCare.PyViCareService import ViCareDeviceAccessor, readFeature from homeassistant.components.vicare.const import DOMAIN from homeassistant.core import HomeAssistant -from . import ENTRY_CONFIG, MODULE +from . import ENTRY_CONFIG, MODULE, setup_integration from tests.common import MockConfigEntry, load_json_object_fixture @@ -40,7 +40,7 @@ class MockPyViCare: ), f"deviceId{idx}", f"model{idx}", - f"online{idx}", + "online", ) ) @@ -87,10 +87,7 @@ async def mock_vicare_gas_boiler( f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures), ): - mock_config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) yield mock_config_entry diff --git a/tests/components/vicare/fixtures/ViAir300F.json b/tests/components/vicare/fixtures/ViAir300F.json new file mode 100644 index 00000000000..b1ec747e127 --- /dev/null +++ b/tests/components/vicare/fixtures/ViAir300F.json @@ -0,0 +1,882 @@ +{ + "data": [ + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "device.productIdentification", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "product": { + "type": "object", + "value": { + "busAddress": 1, + "busType": "CanExternal", + "productFamily": "B_00028_VA330", + "viessmannIdentificationNumber": "################" + } + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/device.productIdentification" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "device.messages.errors.raw", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "entries": { + "type": "array", + "value": [] + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/device.messages.errors.raw" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "device.serial", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "value": { + "type": "string", + "value": "################" + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/device.serial" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "ventilation", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": true + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "ventilation.levels.levelFour", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "volumeFlow": { + "type": "number", + "unit": "cubicMeter/hour", + "value": 234 + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.levels.levelFour" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "ventilation.levels.levelOne", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "volumeFlow": { + "type": "number", + "unit": "cubicMeter/hour", + "value": 54 + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.levels.levelOne" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "ventilation.levels.levelThree", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "volumeFlow": { + "type": "number", + "unit": "cubicMeter/hour", + "value": 180 + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.levels.levelThree" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "ventilation.levels.levelTwo", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "volumeFlow": { + "type": "number", + "unit": "cubicMeter/hour", + "value": 125 + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.levels.levelTwo" + }, + { + "apiVersion": 1, + "commands": { + "setMode": { + "isExecutable": true, + "name": "setMode", + "params": { + "mode": { + "constraints": { + "enum": [ + "permanent", + "ventilation", + "sensorOverride", + "sensorDriven" + ] + }, + "required": true, + "type": "string" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.modes.active/commands/setMode" + }, + "setModeContinuousSensorOverride": { + "isExecutable": "true", + "name": "setModeContinuousSensorOverride", + "params": {}, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.modes.active/commands/setModeContinuousSensorOverride" + } + }, + "deviceId": "0", + "feature": "ventilation.operating.modes.active", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "value": { + "type": "string", + "value": "permanent" + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.modes.active" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "ventilation.operating.modes.filterChange", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.modes.filterChange" + }, + { + "apiVersion": 1, + "commands": { + "setLevel": { + "isExecutable": true, + "name": "setLevel", + "params": { + "level": { + "constraints": { + "enum": ["levelOne", "levelTwo", "levelThree", "levelFour"] + }, + "required": true, + "type": "string" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.modes.permanent/commands/setLevel" + } + }, + "deviceId": "0", + "feature": "ventilation.operating.modes.permanent", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": true + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.modes.permanent" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "ventilation.operating.modes.sensorDriven", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.modes.sensorDriven" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "ventilation.operating.modes.sensorOverride", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.modes.sensorOverride" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "ventilation.operating.modes.ventilation", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.modes.ventilation" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "ventilation.operating.programs.active", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "value": { + "type": "string", + "value": "levelOne" + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.active" + }, + { + "apiVersion": 1, + "commands": { + "activate": { + "isExecutable": true, + "name": "activate", + "params": { + "timeout": { + "constraints": { + "max": 1440, + "min": 1, + "stepping": 1 + }, + "required": false, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.forcedLevelFour/commands/activate" + }, + "deactivate": { + "isExecutable": true, + "name": "deactivate", + "params": {}, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.forcedLevelFour/commands/deactivate" + }, + "setDefaultRuntime": { + "isExecutable": true, + "name": "setDefaultRuntime", + "params": { + "defaultRuntime": { + "constraints": { + "max": 1440, + "min": 1, + "stepping": 1 + }, + "required": true, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.forcedLevelFour/commands/setDefaultRuntime" + }, + "setTimeout": { + "isExecutable": true, + "name": "setTimeout", + "params": { + "timeout": { + "constraints": { + "max": 1440, + "min": 1, + "stepping": 1 + }, + "required": true, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.forcedLevelFour/commands/setTimeout" + } + }, + "deviceId": "0", + "feature": "ventilation.operating.programs.forcedLevelFour", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + }, + "defaultRuntime": { + "type": "number", + "unit": "minutes", + "value": 30 + }, + "isActiveWritable": { + "type": "boolean", + "value": true + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.forcedLevelFour" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "ventilation.operating.programs.levelFour", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + }, + "volumeFlow": { + "type": "number", + "unit": "cubicMeter/hour", + "value": 234 + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.levelFour" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "ventilation.operating.programs.levelOne", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": true + }, + "volumeFlow": { + "type": "number", + "unit": "cubicMeter/hour", + "value": 54 + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.levelOne" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "ventilation.operating.programs.levelThree", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + }, + "volumeFlow": { + "type": "number", + "unit": "cubicMeter/hour", + "value": 180 + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.levelThree" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "ventilation.operating.programs.levelTwo", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + }, + "volumeFlow": { + "type": "number", + "unit": "cubicMeter/hour", + "value": 125 + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.levelTwo" + }, + { + "apiVersion": 1, + "commands": { + "activate": { + "isExecutable": true, + "name": "activate", + "params": { + "timeout": { + "constraints": { + "max": 1440, + "min": 1, + "stepping": 1 + }, + "required": false, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.silent/commands/activate" + }, + "deactivate": { + "isExecutable": true, + "name": "deactivate", + "params": {}, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.silent/commands/deactivate" + }, + "setDefaultRuntime": { + "isExecutable": true, + "name": "setDefaultRuntime", + "params": { + "defaultRuntime": { + "constraints": { + "max": 1440, + "min": 1, + "stepping": 1 + }, + "required": true, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.silent/commands/setDefaultRuntime" + }, + "setTimeout": { + "isExecutable": true, + "name": "setTimeout", + "params": { + "timeout": { + "constraints": { + "max": 1440, + "min": 1, + "stepping": 1 + }, + "required": true, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.silent/commands/setTimeout" + } + }, + "deviceId": "0", + "feature": "ventilation.operating.programs.silent", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + }, + "defaultRuntime": { + "type": "number", + "unit": "minutes", + "value": 30 + }, + "isActiveWritable": { + "type": "boolean", + "value": true + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.silent" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "ventilation.operating.programs.standby", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + }, + "volumeFlow": { + "type": "number", + "unit": "cubicMeter/hour", + "value": 0 + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.standby" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "ventilation.operating.state", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "demand": { + "type": "string", + "value": "ventilation" + }, + "level": { + "type": "string", + "value": "levelOne" + }, + "reason": { + "type": "string", + "value": "permanent" + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.state" + }, + { + "apiVersion": 1, + "commands": { + "activate": { + "isExecutable": true, + "name": "activate", + "params": { + "timeout": { + "constraints": { + "max": 1440, + "min": 1, + "stepping": 1 + }, + "required": false, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.quickmodes.forcedLevelFour/commands/activate" + }, + "deactivate": { + "isExecutable": true, + "name": "deactivate", + "params": {}, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.quickmodes.forcedLevelFour/commands/deactivate" + }, + "setDefaultRuntime": { + "isExecutable": true, + "name": "setDefaultRuntime", + "params": { + "defaultRuntime": { + "constraints": { + "max": 1440, + "min": 1, + "stepping": 1 + }, + "required": true, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.quickmodes.forcedLevelFour/commands/setDefaultRuntime" + }, + "setTimeout": { + "isExecutable": true, + "name": "setTimeout", + "params": { + "timeout": { + "constraints": { + "max": 1440, + "min": 1, + "stepping": 1 + }, + "required": true, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.quickmodes.forcedLevelFour/commands/setTimeout" + } + }, + "deviceId": "0", + "feature": "ventilation.quickmodes.forcedLevelFour", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + }, + "defaultRuntime": { + "type": "number", + "unit": "minutes", + "value": 30 + }, + "isActiveWritable": { + "type": "boolean", + "value": true + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.quickmodes.forcedLevelFour" + }, + { + "apiVersion": 1, + "commands": { + "activate": { + "isExecutable": true, + "name": "activate", + "params": { + "timeout": { + "constraints": { + "max": 1440, + "min": 1, + "stepping": 1 + }, + "required": false, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.quickmodes.silent/commands/activate" + }, + "deactivate": { + "isExecutable": true, + "name": "deactivate", + "params": {}, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.quickmodes.silent/commands/deactivate" + }, + "setDefaultRuntime": { + "isExecutable": true, + "name": "setDefaultRuntime", + "params": { + "defaultRuntime": { + "constraints": { + "max": 1440, + "min": 1, + "stepping": 1 + }, + "required": true, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.quickmodes.silent/commands/setDefaultRuntime" + }, + "setTimeout": { + "isExecutable": true, + "name": "setTimeout", + "params": { + "timeout": { + "constraints": { + "max": 1440, + "min": 1, + "stepping": 1 + }, + "required": true, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.quickmodes.silent/commands/setTimeout" + } + }, + "deviceId": "0", + "feature": "ventilation.quickmodes.silent", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + }, + "defaultRuntime": { + "type": "number", + "unit": "minutes", + "value": 30 + }, + "isActiveWritable": { + "type": "boolean", + "value": true + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.quickmodes.silent" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.boiler.serial", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "value": { + "type": "string", + "value": "################" + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.boiler.serial" + }, + { + "apiVersion": 1, + "commands": { + "setSchedule": { + "isExecutable": true, + "name": "setSchedule", + "params": { + "newSchedule": { + "constraints": { + "defaultMode": "levelOne", + "maxEntries": 4, + "modes": ["levelTwo", "levelThree"], + "overlapAllowed": false, + "resolution": 10 + }, + "required": true, + "type": "Schedule" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.schedule/commands/setSchedule" + } + }, + "deviceId": "0", + "feature": "ventilation.schedule", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + }, + "entries": { + "type": "Schedule", + "value": { + "fri": [ + { + "end": "22:00", + "mode": "levelTwo", + "position": 0, + "start": "06:00" + } + ], + "mon": [ + { + "end": "22:00", + "mode": "levelTwo", + "position": 0, + "start": "06:00" + } + ], + "sat": [ + { + "end": "22:00", + "mode": "levelTwo", + "position": 0, + "start": "06:00" + } + ], + "sun": [ + { + "end": "22:00", + "mode": "levelTwo", + "position": 0, + "start": "06:00" + } + ], + "thu": [ + { + "end": "22:00", + "mode": "levelTwo", + "position": 0, + "start": "06:00" + } + ], + "tue": [ + { + "end": "22:00", + "mode": "levelTwo", + "position": 0, + "start": "06:00" + } + ], + "wed": [ + { + "end": "22:00", + "mode": "levelTwo", + "position": 0, + "start": "06:00" + } + ] + } + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.schedule" + } + ] +} diff --git a/tests/components/vicare/snapshots/test_binary_sensor.ambr b/tests/components/vicare/snapshots/test_binary_sensor.ambr index 7454f914435..a03a6150c45 100644 --- a/tests/components/vicare/snapshots/test_binary_sensor.ambr +++ b/tests/components/vicare/snapshots/test_binary_sensor.ambr @@ -1,4 +1,378 @@ # serializer version: 1 +# name: test_all_entities[binary_sensor.model0_burner-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.model0_burner', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Burner', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'burner', + 'unique_id': 'gateway0-burner_active-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.model0_burner-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'model0 Burner', + }), + 'context': , + 'entity_id': 'binary_sensor.model0_burner', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_all_entities[binary_sensor.model0_circulation_pump-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.model0_circulation_pump', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Circulation pump', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'circulation_pump', + 'unique_id': 'gateway0-circulationpump_active-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.model0_circulation_pump-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'model0 Circulation pump', + }), + 'context': , + 'entity_id': 'binary_sensor.model0_circulation_pump', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_all_entities[binary_sensor.model0_circulation_pump_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.model0_circulation_pump_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Circulation pump', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'circulation_pump', + 'unique_id': 'gateway0-circulationpump_active-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.model0_circulation_pump_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'model0 Circulation pump', + }), + 'context': , + 'entity_id': 'binary_sensor.model0_circulation_pump_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_all_entities[binary_sensor.model0_dhw_charging-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.model0_dhw_charging', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DHW charging', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'domestic_hot_water_charging', + 'unique_id': 'gateway0-charging_active', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.model0_dhw_charging-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'model0 DHW charging', + }), + 'context': , + 'entity_id': 'binary_sensor.model0_dhw_charging', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_all_entities[binary_sensor.model0_dhw_circulation_pump-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.model0_dhw_circulation_pump', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DHW circulation pump', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'domestic_hot_water_circulation_pump', + 'unique_id': 'gateway0-dhw_circulationpump_active', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.model0_dhw_circulation_pump-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'model0 DHW circulation pump', + }), + 'context': , + 'entity_id': 'binary_sensor.model0_dhw_circulation_pump', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_all_entities[binary_sensor.model0_dhw_pump-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.model0_dhw_pump', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DHW pump', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'domestic_hot_water_pump', + 'unique_id': 'gateway0-dhw_pump_active', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.model0_dhw_pump-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'model0 DHW pump', + }), + 'context': , + 'entity_id': 'binary_sensor.model0_dhw_pump', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_all_entities[binary_sensor.model0_frost_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.model0_frost_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Frost protection', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'frost_protection', + 'unique_id': 'gateway0-frost_protection_active-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.model0_frost_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 Frost protection', + }), + 'context': , + 'entity_id': 'binary_sensor.model0_frost_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_all_entities[binary_sensor.model0_frost_protection_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.model0_frost_protection_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Frost protection', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'frost_protection', + 'unique_id': 'gateway0-frost_protection_active-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.model0_frost_protection_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 Frost protection', + }), + 'context': , + 'entity_id': 'binary_sensor.model0_frost_protection_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- # name: test_binary_sensors[burner] StateSnapshot({ 'attributes': ReadOnlyDict({ diff --git a/tests/components/vicare/snapshots/test_button.ambr b/tests/components/vicare/snapshots/test_button.ambr new file mode 100644 index 00000000000..01120b8b0d6 --- /dev/null +++ b/tests/components/vicare/snapshots/test_button.ambr @@ -0,0 +1,47 @@ +# serializer version: 1 +# name: test_all_entities[button.model0_activate_one_time_charge-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.model0_activate_one_time_charge', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Activate one-time charge', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'activate_onetimecharge', + 'unique_id': 'gateway0-activate_onetimecharge', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[button.model0_activate_one_time_charge-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 Activate one-time charge', + }), + 'context': , + 'entity_id': 'button.model0_activate_one_time_charge', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/vicare/snapshots/test_climate.ambr b/tests/components/vicare/snapshots/test_climate.ambr new file mode 100644 index 00000000000..a01d1c43bea --- /dev/null +++ b/tests/components/vicare/snapshots/test_climate.ambr @@ -0,0 +1,167 @@ +# serializer version: 1 +# name: test_all_entities[climate.model0_heating-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + ]), + 'max_temp': 37, + 'min_temp': 3, + 'preset_modes': list([ + 'comfort', + 'eco', + 'home', + 'sleep', + ]), + 'target_temp_step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.model0_heating', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Heating', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'heating', + 'unique_id': 'gateway0-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[climate.model0_heating-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': None, + 'friendly_name': 'model0 Heating', + 'hvac_action': , + 'hvac_modes': list([ + ]), + 'max_temp': 37, + 'min_temp': 3, + 'preset_mode': None, + 'preset_modes': list([ + 'comfort', + 'eco', + 'home', + 'sleep', + ]), + 'supported_features': , + 'target_temp_step': 1, + 'temperature': None, + 'vicare_programs': list([ + 'comfort', + 'eco', + 'external', + 'holiday', + 'normal', + 'reduced', + 'standby', + ]), + }), + 'context': , + 'entity_id': 'climate.model0_heating', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_entities[climate.model0_heating_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + ]), + 'max_temp': 37, + 'min_temp': 3, + 'preset_modes': list([ + 'comfort', + 'eco', + 'home', + 'sleep', + ]), + 'target_temp_step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.model0_heating_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Heating', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'heating', + 'unique_id': 'gateway0-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[climate.model0_heating_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': None, + 'friendly_name': 'model0 Heating', + 'hvac_action': , + 'hvac_modes': list([ + ]), + 'max_temp': 37, + 'min_temp': 3, + 'preset_mode': None, + 'preset_modes': list([ + 'comfort', + 'eco', + 'home', + 'sleep', + ]), + 'supported_features': , + 'target_temp_step': 1, + 'temperature': None, + 'vicare_programs': list([ + 'comfort', + 'eco', + 'external', + 'holiday', + 'normal', + 'reduced', + 'standby', + ]), + }), + 'context': , + 'entity_id': 'climate.model0_heating_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/vicare/snapshots/test_fan.ambr b/tests/components/vicare/snapshots/test_fan.ambr new file mode 100644 index 00000000000..48c8d728569 --- /dev/null +++ b/tests/components/vicare/snapshots/test_fan.ambr @@ -0,0 +1,64 @@ +# serializer version: 1 +# name: test_all_entities[fan.model0_ventilation-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'preset_modes': list([ + , + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'fan', + 'entity_category': None, + 'entity_id': 'fan.model0_ventilation', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Ventilation', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'ventilation', + 'unique_id': 'gateway0-ventilation', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[fan.model0_ventilation-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 Ventilation', + 'percentage': 0, + 'percentage_step': 25.0, + 'preset_mode': None, + 'preset_modes': list([ + , + , + , + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'fan.model0_ventilation', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/vicare/snapshots/test_number.ambr b/tests/components/vicare/snapshots/test_number.ambr new file mode 100644 index 00000000000..a55c29ab8c1 --- /dev/null +++ b/tests/components/vicare/snapshots/test_number.ambr @@ -0,0 +1,567 @@ +# serializer version: 1 +# name: test_all_entities[number.model0_comfort_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.model0_comfort_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Comfort temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'comfort_temperature', + 'unique_id': 'gateway0-comfort_temperature-0', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[number.model0_comfort_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 Comfort temperature', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.model0_comfort_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_all_entities[number.model0_comfort_temperature_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.model0_comfort_temperature_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Comfort temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'comfort_temperature', + 'unique_id': 'gateway0-comfort_temperature-1', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[number.model0_comfort_temperature_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 Comfort temperature', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.model0_comfort_temperature_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_all_entities[number.model0_heating_curve_shift-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 40, + 'min': -13, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.model0_heating_curve_shift', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Heating curve shift', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'heating_curve_shift', + 'unique_id': 'gateway0-heating curve shift-0', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[number.model0_heating_curve_shift-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 Heating curve shift', + 'max': 40, + 'min': -13, + 'mode': , + 'step': 1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.model0_heating_curve_shift', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_all_entities[number.model0_heating_curve_shift_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 40, + 'min': -13, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.model0_heating_curve_shift_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Heating curve shift', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'heating_curve_shift', + 'unique_id': 'gateway0-heating curve shift-1', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[number.model0_heating_curve_shift_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 Heating curve shift', + 'max': 40, + 'min': -13, + 'mode': , + 'step': 1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.model0_heating_curve_shift_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_all_entities[number.model0_heating_curve_slope-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 3.5, + 'min': 0.2, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.model0_heating_curve_slope', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Heating curve slope', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'heating_curve_slope', + 'unique_id': 'gateway0-heating curve slope-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[number.model0_heating_curve_slope-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 Heating curve slope', + 'max': 3.5, + 'min': 0.2, + 'mode': , + 'step': 0.1, + }), + 'context': , + 'entity_id': 'number.model0_heating_curve_slope', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_all_entities[number.model0_heating_curve_slope_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 3.5, + 'min': 0.2, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.model0_heating_curve_slope_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Heating curve slope', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'heating_curve_slope', + 'unique_id': 'gateway0-heating curve slope-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[number.model0_heating_curve_slope_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 Heating curve slope', + 'max': 3.5, + 'min': 0.2, + 'mode': , + 'step': 0.1, + }), + 'context': , + 'entity_id': 'number.model0_heating_curve_slope_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_all_entities[number.model0_normal_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.model0_normal_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Normal temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'normal_temperature', + 'unique_id': 'gateway0-normal_temperature-0', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[number.model0_normal_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 Normal temperature', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.model0_normal_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_all_entities[number.model0_normal_temperature_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.model0_normal_temperature_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Normal temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'normal_temperature', + 'unique_id': 'gateway0-normal_temperature-1', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[number.model0_normal_temperature_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 Normal temperature', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.model0_normal_temperature_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_all_entities[number.model0_reduced_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.model0_reduced_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reduced temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reduced_temperature', + 'unique_id': 'gateway0-reduced_temperature-0', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[number.model0_reduced_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 Reduced temperature', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.model0_reduced_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_all_entities[number.model0_reduced_temperature_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.model0_reduced_temperature_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reduced temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reduced_temperature', + 'unique_id': 'gateway0-reduced_temperature-1', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[number.model0_reduced_temperature_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 Reduced temperature', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.model0_reduced_temperature_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- diff --git a/tests/components/vicare/snapshots/test_sensor.ambr b/tests/components/vicare/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..7bbac75bedc --- /dev/null +++ b/tests/components/vicare/snapshots/test_sensor.ambr @@ -0,0 +1,1052 @@ +# serializer version: 1 +# name: test_all_entities[sensor.model0_boiler_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_boiler_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Boiler temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'boiler_temperature', + 'unique_id': 'gateway0-boiler_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.model0_boiler_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 Boiler temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_boiler_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '63', + }) +# --- +# name: test_all_entities[sensor.model0_burner_hours-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.model0_burner_hours', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Burner hours', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'burner_hours', + 'unique_id': 'gateway0-burner_hours-0', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.model0_burner_hours-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 Burner hours', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_burner_hours', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '18726.3', + }) +# --- +# name: test_all_entities[sensor.model0_burner_modulation-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_burner_modulation', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Burner modulation', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'burner_modulation', + 'unique_id': 'gateway0-burner_modulation-0', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[sensor.model0_burner_modulation-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 Burner modulation', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.model0_burner_modulation', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[sensor.model0_burner_starts-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.model0_burner_starts', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Burner starts', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'burner_starts', + 'unique_id': 'gateway0-burner_starts-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.model0_burner_starts-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 Burner starts', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.model0_burner_starts', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '14315', + }) +# --- +# name: test_all_entities[sensor.model0_dhw_gas_consumption_this_month-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_dhw_gas_consumption_this_month', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DHW gas consumption this month', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'hotwater_gas_consumption_heating_this_month', + 'unique_id': 'gateway0-hotwater_gas_consumption_heating_this_month', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.model0_dhw_gas_consumption_this_month-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 DHW gas consumption this month', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.model0_dhw_gas_consumption_this_month', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '805', + }) +# --- +# name: test_all_entities[sensor.model0_dhw_gas_consumption_this_week-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_dhw_gas_consumption_this_week', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DHW gas consumption this week', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'hotwater_gas_consumption_heating_this_week', + 'unique_id': 'gateway0-hotwater_gas_consumption_heating_this_week', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.model0_dhw_gas_consumption_this_week-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 DHW gas consumption this week', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.model0_dhw_gas_consumption_this_week', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '84', + }) +# --- +# name: test_all_entities[sensor.model0_dhw_gas_consumption_this_year-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_dhw_gas_consumption_this_year', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DHW gas consumption this year', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'hotwater_gas_consumption_heating_this_year', + 'unique_id': 'gateway0-hotwater_gas_consumption_heating_this_year', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.model0_dhw_gas_consumption_this_year-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 DHW gas consumption this year', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.model0_dhw_gas_consumption_this_year', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '8203', + }) +# --- +# name: test_all_entities[sensor.model0_dhw_gas_consumption_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_dhw_gas_consumption_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DHW gas consumption today', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'hotwater_gas_consumption_today', + 'unique_id': 'gateway0-hotwater_gas_consumption_today', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.model0_dhw_gas_consumption_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 DHW gas consumption today', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.model0_dhw_gas_consumption_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '22', + }) +# --- +# name: test_all_entities[sensor.model0_dhw_max_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_dhw_max_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DHW max temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'hotwater_max_temperature', + 'unique_id': 'gateway0-hotwater_max_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.model0_dhw_max_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 DHW max temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_dhw_max_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60', + }) +# --- +# name: test_all_entities[sensor.model0_dhw_min_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_dhw_min_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DHW min temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'hotwater_min_temperature', + 'unique_id': 'gateway0-hotwater_min_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.model0_dhw_min_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 DHW min temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_dhw_min_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_all_entities[sensor.model0_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power consumption this month', + 'unique_id': 'gateway0-power consumption this month', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.model0_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'model0 Energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7.843', + }) +# --- +# name: test_all_entities[sensor.model0_energy_consumption_this_year-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_energy_consumption_this_year', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy consumption this year', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_consumption_this_year', + 'unique_id': 'gateway0-power consumption this year', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.model0_energy_consumption_this_year-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'model0 Energy consumption this year', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_energy_consumption_this_year', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '207.106', + }) +# --- +# name: test_all_entities[sensor.model0_energy_consumption_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_energy_consumption_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy consumption today', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_consumption_today', + 'unique_id': 'gateway0-power consumption today', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.model0_energy_consumption_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'model0 Energy consumption today', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_energy_consumption_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.219', + }) +# --- +# name: test_all_entities[sensor.model0_heating_gas_consumption_this_month-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_heating_gas_consumption_this_month', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Heating gas consumption this month', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'gas_consumption_heating_this_month', + 'unique_id': 'gateway0-gas_consumption_heating_this_month', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.model0_heating_gas_consumption_this_month-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 Heating gas consumption this month', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.model0_heating_gas_consumption_this_month', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[sensor.model0_heating_gas_consumption_this_week-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_heating_gas_consumption_this_week', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Heating gas consumption this week', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'gas_consumption_heating_this_week', + 'unique_id': 'gateway0-gas_consumption_heating_this_week', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.model0_heating_gas_consumption_this_week-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 Heating gas consumption this week', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.model0_heating_gas_consumption_this_week', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[sensor.model0_heating_gas_consumption_this_year-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_heating_gas_consumption_this_year', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Heating gas consumption this year', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'gas_consumption_heating_this_year', + 'unique_id': 'gateway0-gas_consumption_heating_this_year', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.model0_heating_gas_consumption_this_year-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 Heating gas consumption this year', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.model0_heating_gas_consumption_this_year', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '30946', + }) +# --- +# name: test_all_entities[sensor.model0_heating_gas_consumption_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_heating_gas_consumption_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Heating gas consumption today', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'gas_consumption_heating_today', + 'unique_id': 'gateway0-gas_consumption_heating_today', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.model0_heating_gas_consumption_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 Heating gas consumption today', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.model0_heating_gas_consumption_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[sensor.model0_outside_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_outside_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outside temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'outside_temperature', + 'unique_id': 'gateway0-outside_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.model0_outside_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 Outside temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_outside_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20.8', + }) +# --- +# name: test_all_entities[sensor.model0_power_consumption_this_week-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_power_consumption_this_week', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power consumption this week', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_consumption_this_week', + 'unique_id': 'gateway0-power consumption this week', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.model0_power_consumption_this_week-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'model0 Power consumption this week', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_power_consumption_this_week', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.829', + }) +# --- +# name: test_all_entities[sensor.model0_supply_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_supply_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Supply temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'supply_temperature', + 'unique_id': 'gateway0-supply_temperature-0', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.model0_supply_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 Supply temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_supply_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '63', + }) +# --- +# name: test_all_entities[sensor.model0_supply_temperature_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_supply_temperature_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Supply temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'supply_temperature', + 'unique_id': 'gateway0-supply_temperature-1', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.model0_supply_temperature_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 Supply temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_supply_temperature_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '25.5', + }) +# --- diff --git a/tests/components/vicare/snapshots/test_water_heater.ambr b/tests/components/vicare/snapshots/test_water_heater.ambr new file mode 100644 index 00000000000..5ab4fcc78bd --- /dev/null +++ b/tests/components/vicare/snapshots/test_water_heater.ambr @@ -0,0 +1,113 @@ +# serializer version: 1 +# name: test_all_entities[water_heater.model0_domestic_hot_water-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_temp': 60, + 'min_temp': 10, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'water_heater', + 'entity_category': None, + 'entity_id': 'water_heater.model0_domestic_hot_water', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Domestic hot water', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'domestic_hot_water', + 'unique_id': 'gateway0-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[water_heater.model0_domestic_hot_water-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': None, + 'friendly_name': 'model0 Domestic hot water', + 'max_temp': 60, + 'min_temp': 10, + 'supported_features': , + 'target_temp_high': None, + 'target_temp_low': None, + 'temperature': None, + }), + 'context': , + 'entity_id': 'water_heater.model0_domestic_hot_water', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_entities[water_heater.model0_domestic_hot_water_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_temp': 60, + 'min_temp': 10, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'water_heater', + 'entity_category': None, + 'entity_id': 'water_heater.model0_domestic_hot_water_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Domestic hot water', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'domestic_hot_water', + 'unique_id': 'gateway0-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[water_heater.model0_domestic_hot_water_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': None, + 'friendly_name': 'model0 Domestic hot water', + 'max_temp': 60, + 'min_temp': 10, + 'supported_features': , + 'target_temp_high': None, + 'target_temp_low': None, + 'temperature': None, + }), + 'context': , + 'entity_id': 'water_heater.model0_domestic_hot_water_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/vicare/test_binary_sensor.py b/tests/components/vicare/test_binary_sensor.py index 79ce91642af..b9b8a57a59b 100644 --- a/tests/components/vicare/test_binary_sensor.py +++ b/tests/components/vicare/test_binary_sensor.py @@ -1,11 +1,18 @@ """Test ViCare binary sensors.""" -from unittest.mock import MagicMock +from unittest.mock import MagicMock, patch import pytest from syrupy.assertion import SnapshotAssertion +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import MODULE, setup_integration +from .conftest import Fixture, MockPyViCare + +from tests.common import MockConfigEntry, snapshot_platform @pytest.mark.parametrize( @@ -24,3 +31,21 @@ async def test_binary_sensors( ) -> None: """Test the ViCare binary sensor.""" assert hass.states.get(f"binary_sensor.model0_{entity_id}") == snapshot + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + fixtures: list[Fixture] = [Fixture({"type:boiler"}, "vicare/Vitodens300W.json")] + with ( + patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.PLATFORMS", [Platform.BINARY_SENSOR]), + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/vicare/test_button.py b/tests/components/vicare/test_button.py new file mode 100644 index 00000000000..c024af41d78 --- /dev/null +++ b/tests/components/vicare/test_button.py @@ -0,0 +1,33 @@ +"""Test ViCare button entity.""" + +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import MODULE, setup_integration +from .conftest import Fixture, MockPyViCare + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + fixtures: list[Fixture] = [Fixture({"type:boiler"}, "vicare/Vitodens300W.json")] + with ( + patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.PLATFORMS", [Platform.BUTTON]), + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/vicare/test_climate.py b/tests/components/vicare/test_climate.py new file mode 100644 index 00000000000..44df87276e7 --- /dev/null +++ b/tests/components/vicare/test_climate.py @@ -0,0 +1,33 @@ +"""Test ViCare climate entity.""" + +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import MODULE, setup_integration +from .conftest import Fixture, MockPyViCare + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + fixtures: list[Fixture] = [Fixture({"type:boiler"}, "vicare/Vitodens300W.json")] + with ( + patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.PLATFORMS", [Platform.CLIMATE]), + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/vicare/test_fan.py b/tests/components/vicare/test_fan.py new file mode 100644 index 00000000000..ba5db6e42c7 --- /dev/null +++ b/tests/components/vicare/test_fan.py @@ -0,0 +1,33 @@ +"""Test ViCare fan.""" + +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import MODULE, setup_integration +from .conftest import Fixture, MockPyViCare + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + fixtures: list[Fixture] = [Fixture({"type:ventilation"}, "vicare/ViAir300F.json")] + with ( + patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.PLATFORMS", [Platform.FAN]), + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/vicare/test_number.py b/tests/components/vicare/test_number.py new file mode 100644 index 00000000000..c3aa66a86f6 --- /dev/null +++ b/tests/components/vicare/test_number.py @@ -0,0 +1,33 @@ +"""Test ViCare number entity.""" + +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import MODULE, setup_integration +from .conftest import Fixture, MockPyViCare + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + fixtures: list[Fixture] = [Fixture({"type:boiler"}, "vicare/Vitodens300W.json")] + with ( + patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.PLATFORMS", [Platform.NUMBER]), + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/vicare/test_sensor.py b/tests/components/vicare/test_sensor.py new file mode 100644 index 00000000000..624fdf2cd5d --- /dev/null +++ b/tests/components/vicare/test_sensor.py @@ -0,0 +1,33 @@ +"""Test ViCare sensor entity.""" + +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import MODULE, setup_integration +from .conftest import Fixture, MockPyViCare + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + fixtures: list[Fixture] = [Fixture({"type:boiler"}, "vicare/Vitodens300W.json")] + with ( + patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.PLATFORMS", [Platform.SENSOR]), + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/vicare/test_water_heater.py b/tests/components/vicare/test_water_heater.py new file mode 100644 index 00000000000..fbb5863cf7a --- /dev/null +++ b/tests/components/vicare/test_water_heater.py @@ -0,0 +1,33 @@ +"""Test ViCare water heater entity.""" + +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import MODULE, setup_integration +from .conftest import Fixture, MockPyViCare + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + fixtures: list[Fixture] = [Fixture({"type:boiler"}, "vicare/Vitodens300W.json")] + with ( + patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.PLATFORMS", [Platform.WATER_HEATER]), + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) From b21b3006258fbc8705fa35cfbc7473126aea6fcd Mon Sep 17 00:00:00 2001 From: "Mr. Bubbles" Date: Wed, 21 Aug 2024 14:59:08 +0200 Subject: [PATCH 0946/1635] Disable Habitica deprecated entities by default (#123522) Deprecated sensor task entites disable by default --- homeassistant/components/habitica/sensor.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/homeassistant/components/habitica/sensor.py b/homeassistant/components/habitica/sensor.py index 6d1a11ed9c3..8762345b597 100644 --- a/homeassistant/components/habitica/sensor.py +++ b/homeassistant/components/habitica/sensor.py @@ -172,6 +172,7 @@ TASK_SENSOR_DESCRIPTION: tuple[HabitipyTaskSensorEntityDescription, ...] = ( translation_key=HabitipySensorEntity.DAILIES, native_unit_of_measurement=UNIT_TASKS, value_fn=lambda tasks: [r for r in tasks if r.get("type") == "daily"], + entity_registry_enabled_default=False, ), HabitipyTaskSensorEntityDescription( key=HabitipySensorEntity.TODOS, @@ -180,6 +181,7 @@ TASK_SENSOR_DESCRIPTION: tuple[HabitipyTaskSensorEntityDescription, ...] = ( value_fn=lambda tasks: [ r for r in tasks if r.get("type") == "todo" and not r.get("completed") ], + entity_registry_enabled_default=False, ), HabitipyTaskSensorEntityDescription( key=HabitipySensorEntity.REWARDS, From 3e53cc175f78d8224f539e08dc6978c6cee3b41f Mon Sep 17 00:00:00 2001 From: Christian Neumeier <47736781+NECH2004@users.noreply.github.com> Date: Wed, 21 Aug 2024 15:36:45 +0200 Subject: [PATCH 0947/1635] Change POWER_VOLT_AMPERE_REACTIVE to UnitOfReactivePower (#117153) * Refactoring: exchange POWER_VOLT_AMPERE_REACTIVE with UnitOfReactivePower * updated iotawatt and mysensors from VOLT_AMPERE_REACTIVE to UnitOfReactivePower.VOLT_AMPERE_REACTIVE * deprecation period for POWER_VOLT_AMPERE_REACTIVE changed to one year. * POWER_VOLT_AMPERE_REACTIVE changed to UnitOfReactivePower in blebox integration * Update homeassistant/const.py --------- Co-authored-by: Erik Montnemery --- homeassistant/components/blebox/sensor.py | 4 +- homeassistant/components/fronius/sensor.py | 10 ++--- homeassistant/components/goodwe/sensor.py | 4 +- homeassistant/components/homewizard/sensor.py | 10 ++--- homeassistant/components/iotawatt/const.py | 1 - homeassistant/components/iotawatt/sensor.py | 5 ++- homeassistant/components/isy994/const.py | 4 +- homeassistant/components/mysensors/sensor.py | 3 +- homeassistant/components/number/const.py | 4 +- homeassistant/components/sensor/const.py | 4 +- homeassistant/components/sma/sensor.py | 10 ++--- homeassistant/components/tasmota/sensor.py | 4 +- homeassistant/const.py | 13 +++++- .../homewizard/snapshots/test_sensor.ambr | 44 +++++++++---------- tests/components/sensor/common.py | 4 +- 15 files changed, 68 insertions(+), 56 deletions(-) diff --git a/homeassistant/components/blebox/sensor.py b/homeassistant/components/blebox/sensor.py index 2642bfd0139..fa11f6d6680 100644 --- a/homeassistant/components/blebox/sensor.py +++ b/homeassistant/components/blebox/sensor.py @@ -14,13 +14,13 @@ from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, LIGHT_LUX, PERCENTAGE, - POWER_VOLT_AMPERE_REACTIVE, UnitOfApparentPower, UnitOfElectricCurrent, UnitOfElectricPotential, UnitOfEnergy, UnitOfFrequency, UnitOfPower, + UnitOfReactivePower, UnitOfSpeed, UnitOfTemperature, ) @@ -85,7 +85,7 @@ SENSOR_TYPES = ( SensorEntityDescription( key="reactivePower", device_class=SensorDeviceClass.POWER, - native_unit_of_measurement=POWER_VOLT_AMPERE_REACTIVE, + native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, ), SensorEntityDescription( key="activePower", diff --git a/homeassistant/components/fronius/sensor.py b/homeassistant/components/fronius/sensor.py index 31f080c1f51..c8a840b1c2c 100644 --- a/homeassistant/components/fronius/sensor.py +++ b/homeassistant/components/fronius/sensor.py @@ -14,7 +14,6 @@ from homeassistant.components.sensor import ( ) from homeassistant.const import ( PERCENTAGE, - POWER_VOLT_AMPERE_REACTIVE, EntityCategory, UnitOfApparentPower, UnitOfElectricCurrent, @@ -22,6 +21,7 @@ from homeassistant.const import ( UnitOfEnergy, UnitOfFrequency, UnitOfPower, + UnitOfReactivePower, UnitOfTemperature, ) from homeassistant.core import HomeAssistant, callback @@ -381,28 +381,28 @@ METER_ENTITY_DESCRIPTIONS: list[FroniusSensorEntityDescription] = [ ), FroniusSensorEntityDescription( key="power_reactive_phase_1", - native_unit_of_measurement=POWER_VOLT_AMPERE_REACTIVE, + native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, device_class=SensorDeviceClass.REACTIVE_POWER, state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, ), FroniusSensorEntityDescription( key="power_reactive_phase_2", - native_unit_of_measurement=POWER_VOLT_AMPERE_REACTIVE, + native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, device_class=SensorDeviceClass.REACTIVE_POWER, state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, ), FroniusSensorEntityDescription( key="power_reactive_phase_3", - native_unit_of_measurement=POWER_VOLT_AMPERE_REACTIVE, + native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, device_class=SensorDeviceClass.REACTIVE_POWER, state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, ), FroniusSensorEntityDescription( key="power_reactive", - native_unit_of_measurement=POWER_VOLT_AMPERE_REACTIVE, + native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, device_class=SensorDeviceClass.REACTIVE_POWER, state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, diff --git a/homeassistant/components/goodwe/sensor.py b/homeassistant/components/goodwe/sensor.py index 795b26a0c9f..03912c9a1ec 100644 --- a/homeassistant/components/goodwe/sensor.py +++ b/homeassistant/components/goodwe/sensor.py @@ -20,7 +20,6 @@ from homeassistant.components.sensor import ( from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( PERCENTAGE, - POWER_VOLT_AMPERE_REACTIVE, EntityCategory, UnitOfApparentPower, UnitOfElectricCurrent, @@ -28,6 +27,7 @@ from homeassistant.const import ( UnitOfEnergy, UnitOfFrequency, UnitOfPower, + UnitOfReactivePower, UnitOfTemperature, UnitOfTime, ) @@ -126,7 +126,7 @@ _DESCRIPTIONS: dict[str, GoodweSensorEntityDescription] = { key="var", device_class=SensorDeviceClass.REACTIVE_POWER, state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=POWER_VOLT_AMPERE_REACTIVE, + native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, entity_registry_enabled_default=False, ), "C": GoodweSensorEntityDescription( diff --git a/homeassistant/components/homewizard/sensor.py b/homeassistant/components/homewizard/sensor.py index 1f77af110b0..c5cf0bc64c7 100644 --- a/homeassistant/components/homewizard/sensor.py +++ b/homeassistant/components/homewizard/sensor.py @@ -18,7 +18,6 @@ from homeassistant.components.sensor import ( from homeassistant.const import ( ATTR_VIA_DEVICE, PERCENTAGE, - POWER_VOLT_AMPERE_REACTIVE, EntityCategory, Platform, UnitOfApparentPower, @@ -27,6 +26,7 @@ from homeassistant.const import ( UnitOfEnergy, UnitOfFrequency, UnitOfPower, + UnitOfReactivePower, UnitOfVolume, ) from homeassistant.core import HomeAssistant @@ -404,7 +404,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = ( ), HomeWizardSensorEntityDescription( key="active_reactive_power_var", - native_unit_of_measurement=POWER_VOLT_AMPERE_REACTIVE, + native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, device_class=SensorDeviceClass.REACTIVE_POWER, state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, @@ -415,7 +415,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = ( key="active_reactive_power_l1_var", translation_key="active_reactive_power_phase_var", translation_placeholders={"phase": "1"}, - native_unit_of_measurement=POWER_VOLT_AMPERE_REACTIVE, + native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, device_class=SensorDeviceClass.REACTIVE_POWER, state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, @@ -426,7 +426,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = ( key="active_reactive_power_l2_var", translation_key="active_reactive_power_phase_var", translation_placeholders={"phase": "2"}, - native_unit_of_measurement=POWER_VOLT_AMPERE_REACTIVE, + native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, device_class=SensorDeviceClass.REACTIVE_POWER, state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, @@ -437,7 +437,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = ( key="active_reactive_power_l3_var", translation_key="active_reactive_power_phase_var", translation_placeholders={"phase": "3"}, - native_unit_of_measurement=POWER_VOLT_AMPERE_REACTIVE, + native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, device_class=SensorDeviceClass.REACTIVE_POWER, state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, diff --git a/homeassistant/components/iotawatt/const.py b/homeassistant/components/iotawatt/const.py index de008388f62..278030d4f10 100644 --- a/homeassistant/components/iotawatt/const.py +++ b/homeassistant/components/iotawatt/const.py @@ -7,7 +7,6 @@ import json import httpx DOMAIN = "iotawatt" -VOLT_AMPERE_REACTIVE = "VAR" VOLT_AMPERE_REACTIVE_HOURS = "VARh" CONNECTION_ERRORS = (KeyError, json.JSONDecodeError, httpx.HTTPError) diff --git a/homeassistant/components/iotawatt/sensor.py b/homeassistant/components/iotawatt/sensor.py index c9af588c160..e0328ea9d58 100644 --- a/homeassistant/components/iotawatt/sensor.py +++ b/homeassistant/components/iotawatt/sensor.py @@ -23,6 +23,7 @@ from homeassistant.const import ( UnitOfEnergy, UnitOfFrequency, UnitOfPower, + UnitOfReactivePower, ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -31,7 +32,7 @@ from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.util import dt as dt_util -from .const import DOMAIN, VOLT_AMPERE_REACTIVE, VOLT_AMPERE_REACTIVE_HOURS +from .const import DOMAIN, VOLT_AMPERE_REACTIVE_HOURS from .coordinator import IotawattUpdater _LOGGER = logging.getLogger(__name__) @@ -89,7 +90,7 @@ ENTITY_DESCRIPTION_KEY_MAP: dict[str, IotaWattSensorEntityDescription] = { ), "VAR": IotaWattSensorEntityDescription( key="VAR", - native_unit_of_measurement=VOLT_AMPERE_REACTIVE, + native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, state_class=SensorStateClass.MEASUREMENT, icon="mdi:flash", entity_registry_enabled_default=False, diff --git a/homeassistant/components/isy994/const.py b/homeassistant/components/isy994/const.py index 85ecafd6490..57b30c88075 100644 --- a/homeassistant/components/isy994/const.py +++ b/homeassistant/components/isy994/const.py @@ -23,7 +23,6 @@ from homeassistant.const import ( DEGREE, LIGHT_LUX, PERCENTAGE, - POWER_VOLT_AMPERE_REACTIVE, REVOLUTIONS_PER_MINUTE, SERVICE_LOCK, SERVICE_UNLOCK, @@ -50,6 +49,7 @@ from homeassistant.const import ( UnitOfMass, UnitOfPower, UnitOfPressure, + UnitOfReactivePower, UnitOfSoundPressure, UnitOfSpeed, UnitOfTemperature, @@ -438,7 +438,7 @@ UOM_FRIENDLY_NAME = { "133": UnitOfFrequency.KILOHERTZ, "134": f"{UnitOfLength.METERS}/{UnitOfTime.SECONDS}²", "135": UnitOfApparentPower.VOLT_AMPERE, # Volt-Amp - "136": POWER_VOLT_AMPERE_REACTIVE, # VAR = Volt-Amp Reactive + "136": UnitOfReactivePower.VOLT_AMPERE_REACTIVE, # VAR = Volt-Amp Reactive "137": "", # NTP DateTime - Number of seconds since 1900 "138": UnitOfPressure.PSI, "139": DEGREE, # Degree 0-360 diff --git a/homeassistant/components/mysensors/sensor.py b/homeassistant/components/mysensors/sensor.py index 9c6c0de89e6..82e6833f664 100644 --- a/homeassistant/components/mysensors/sensor.py +++ b/homeassistant/components/mysensors/sensor.py @@ -28,6 +28,7 @@ from homeassistant.const import ( UnitOfLength, UnitOfMass, UnitOfPower, + UnitOfReactivePower, UnitOfSoundPressure, UnitOfTemperature, UnitOfVolume, @@ -195,7 +196,7 @@ SENSORS: dict[str, SensorEntityDescription] = { ), "V_VAR": SensorEntityDescription( key="V_VAR", - native_unit_of_measurement="var", + native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, ), "V_VA": SensorEntityDescription( key="V_VA", diff --git a/homeassistant/components/number/const.py b/homeassistant/components/number/const.py index 6343c3a599f..ad95c9b5358 100644 --- a/homeassistant/components/number/const.py +++ b/homeassistant/components/number/const.py @@ -14,7 +14,6 @@ from homeassistant.const import ( CONCENTRATION_PARTS_PER_MILLION, LIGHT_LUX, PERCENTAGE, - POWER_VOLT_AMPERE_REACTIVE, SIGNAL_STRENGTH_DECIBELS, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, UnitOfApparentPower, @@ -31,6 +30,7 @@ from homeassistant.const import ( UnitOfPower, UnitOfPrecipitationDepth, UnitOfPressure, + UnitOfReactivePower, UnitOfSoundPressure, UnitOfSpeed, UnitOfTemperature, @@ -468,7 +468,7 @@ DEVICE_CLASS_UNITS: dict[NumberDeviceClass, set[type[StrEnum] | str | None]] = { NumberDeviceClass.PRECIPITATION: set(UnitOfPrecipitationDepth), NumberDeviceClass.PRECIPITATION_INTENSITY: set(UnitOfVolumetricFlux), NumberDeviceClass.PRESSURE: set(UnitOfPressure), - NumberDeviceClass.REACTIVE_POWER: {POWER_VOLT_AMPERE_REACTIVE}, + NumberDeviceClass.REACTIVE_POWER: {UnitOfReactivePower.VOLT_AMPERE_REACTIVE}, NumberDeviceClass.SIGNAL_STRENGTH: { SIGNAL_STRENGTH_DECIBELS, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, diff --git a/homeassistant/components/sensor/const.py b/homeassistant/components/sensor/const.py index 5acf2ecef23..8f63e346caf 100644 --- a/homeassistant/components/sensor/const.py +++ b/homeassistant/components/sensor/const.py @@ -14,7 +14,6 @@ from homeassistant.const import ( CONCENTRATION_PARTS_PER_MILLION, LIGHT_LUX, PERCENTAGE, - POWER_VOLT_AMPERE_REACTIVE, SIGNAL_STRENGTH_DECIBELS, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, UnitOfApparentPower, @@ -31,6 +30,7 @@ from homeassistant.const import ( UnitOfPower, UnitOfPrecipitationDepth, UnitOfPressure, + UnitOfReactivePower, UnitOfSoundPressure, UnitOfSpeed, UnitOfTemperature, @@ -563,7 +563,7 @@ DEVICE_CLASS_UNITS: dict[SensorDeviceClass, set[type[StrEnum] | str | None]] = { SensorDeviceClass.PRECIPITATION: set(UnitOfPrecipitationDepth), SensorDeviceClass.PRECIPITATION_INTENSITY: set(UnitOfVolumetricFlux), SensorDeviceClass.PRESSURE: set(UnitOfPressure), - SensorDeviceClass.REACTIVE_POWER: {POWER_VOLT_AMPERE_REACTIVE}, + SensorDeviceClass.REACTIVE_POWER: {UnitOfReactivePower.VOLT_AMPERE_REACTIVE}, SensorDeviceClass.SIGNAL_STRENGTH: { SIGNAL_STRENGTH_DECIBELS, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, diff --git a/homeassistant/components/sma/sensor.py b/homeassistant/components/sma/sensor.py index 9d580a76d9e..302c4f6b197 100644 --- a/homeassistant/components/sma/sensor.py +++ b/homeassistant/components/sma/sensor.py @@ -15,7 +15,6 @@ from homeassistant.components.sensor import ( from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( PERCENTAGE, - POWER_VOLT_AMPERE_REACTIVE, EntityCategory, UnitOfApparentPower, UnitOfElectricCurrent, @@ -23,6 +22,7 @@ from homeassistant.const import ( UnitOfEnergy, UnitOfFrequency, UnitOfPower, + UnitOfReactivePower, UnitOfTemperature, ) from homeassistant.core import HomeAssistant @@ -204,7 +204,7 @@ SENSOR_ENTITIES: dict[str, SensorEntityDescription] = { "grid_reactive_power": SensorEntityDescription( key="grid_reactive_power", name="Grid Reactive Power", - native_unit_of_measurement=POWER_VOLT_AMPERE_REACTIVE, + native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.REACTIVE_POWER, entity_registry_enabled_default=False, @@ -212,7 +212,7 @@ SENSOR_ENTITIES: dict[str, SensorEntityDescription] = { "grid_reactive_power_l1": SensorEntityDescription( key="grid_reactive_power_l1", name="Grid Reactive Power L1", - native_unit_of_measurement=POWER_VOLT_AMPERE_REACTIVE, + native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.REACTIVE_POWER, entity_registry_enabled_default=False, @@ -220,7 +220,7 @@ SENSOR_ENTITIES: dict[str, SensorEntityDescription] = { "grid_reactive_power_l2": SensorEntityDescription( key="grid_reactive_power_l2", name="Grid Reactive Power L2", - native_unit_of_measurement=POWER_VOLT_AMPERE_REACTIVE, + native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.REACTIVE_POWER, entity_registry_enabled_default=False, @@ -228,7 +228,7 @@ SENSOR_ENTITIES: dict[str, SensorEntityDescription] = { "grid_reactive_power_l3": SensorEntityDescription( key="grid_reactive_power_l3", name="Grid Reactive Power L3", - native_unit_of_measurement=POWER_VOLT_AMPERE_REACTIVE, + native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.REACTIVE_POWER, entity_registry_enabled_default=False, diff --git a/homeassistant/components/tasmota/sensor.py b/homeassistant/components/tasmota/sensor.py index e87ff88092e..30649fa38bd 100644 --- a/homeassistant/components/tasmota/sensor.py +++ b/homeassistant/components/tasmota/sensor.py @@ -22,7 +22,6 @@ from homeassistant.const import ( CONCENTRATION_PARTS_PER_MILLION, LIGHT_LUX, PERCENTAGE, - POWER_VOLT_AMPERE_REACTIVE, SIGNAL_STRENGTH_DECIBELS, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, EntityCategory, @@ -35,6 +34,7 @@ from homeassistant.const import ( UnitOfMass, UnitOfPower, UnitOfPressure, + UnitOfReactivePower, UnitOfSpeed, UnitOfTemperature, ) @@ -227,7 +227,7 @@ SENSOR_UNIT_MAP = { hc.PERCENTAGE: PERCENTAGE, hc.POWER_WATT: UnitOfPower.WATT, hc.PRESSURE_HPA: UnitOfPressure.HPA, - hc.REACTIVE_POWER: POWER_VOLT_AMPERE_REACTIVE, + hc.REACTIVE_POWER: UnitOfReactivePower.VOLT_AMPERE_REACTIVE, hc.SIGNAL_STRENGTH_DECIBELS: SIGNAL_STRENGTH_DECIBELS, hc.SIGNAL_STRENGTH_DECIBELS_MILLIWATT: SIGNAL_STRENGTH_DECIBELS_MILLIWATT, hc.SPEED_KILOMETERS_PER_HOUR: UnitOfSpeed.KILOMETERS_PER_HOUR, diff --git a/homeassistant/const.py b/homeassistant/const.py index 891cc0cc023..953f65efce2 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -673,8 +673,19 @@ _DEPRECATED_POWER_BTU_PER_HOUR: Final = DeprecatedConstantEnum( ) """Deprecated: please use UnitOfPower.BTU_PER_HOUR.""" + # Reactive power units -POWER_VOLT_AMPERE_REACTIVE: Final = "var" +class UnitOfReactivePower(StrEnum): + """Reactive power units.""" + + VOLT_AMPERE_REACTIVE = "var" + + +_DEPRECATED_POWER_VOLT_AMPERE_REACTIVE: Final = DeprecatedConstantEnum( + UnitOfReactivePower.VOLT_AMPERE_REACTIVE, + "2025.9", +) +"""Deprecated: please use UnitOfReactivePower.VOLT_AMPERE_REACTIVE.""" # Energy units diff --git a/tests/components/homewizard/snapshots/test_sensor.ambr b/tests/components/homewizard/snapshots/test_sensor.ambr index 63ee9312a13..dd50b098d40 100644 --- a/tests/components/homewizard/snapshots/test_sensor.ambr +++ b/tests/components/homewizard/snapshots/test_sensor.ambr @@ -712,7 +712,7 @@ 'supported_features': 0, 'translation_key': None, 'unique_id': 'aabbccddeeff_active_reactive_power_var', - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }) # --- # name: test_sensors[HWE-KWH1-entity_ids7][sensor.device_reactive_power:state] @@ -721,7 +721,7 @@ 'device_class': 'reactive_power', 'friendly_name': 'Device Reactive power', 'state_class': , - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.device_reactive_power', @@ -2632,7 +2632,7 @@ 'supported_features': 0, 'translation_key': None, 'unique_id': 'aabbccddeeff_active_reactive_power_var', - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }) # --- # name: test_sensors[HWE-KWH3-entity_ids8][sensor.device_reactive_power:state] @@ -2641,7 +2641,7 @@ 'device_class': 'reactive_power', 'friendly_name': 'Device Reactive power', 'state_class': , - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.device_reactive_power', @@ -2719,7 +2719,7 @@ 'supported_features': 0, 'translation_key': 'active_reactive_power_phase_var', 'unique_id': 'aabbccddeeff_active_reactive_power_l1_var', - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }) # --- # name: test_sensors[HWE-KWH3-entity_ids8][sensor.device_reactive_power_phase_1:state] @@ -2728,7 +2728,7 @@ 'device_class': 'reactive_power', 'friendly_name': 'Device Reactive power phase 1', 'state_class': , - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.device_reactive_power_phase_1', @@ -2806,7 +2806,7 @@ 'supported_features': 0, 'translation_key': 'active_reactive_power_phase_var', 'unique_id': 'aabbccddeeff_active_reactive_power_l2_var', - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }) # --- # name: test_sensors[HWE-KWH3-entity_ids8][sensor.device_reactive_power_phase_2:state] @@ -2815,7 +2815,7 @@ 'device_class': 'reactive_power', 'friendly_name': 'Device Reactive power phase 2', 'state_class': , - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.device_reactive_power_phase_2', @@ -2893,7 +2893,7 @@ 'supported_features': 0, 'translation_key': 'active_reactive_power_phase_var', 'unique_id': 'aabbccddeeff_active_reactive_power_l3_var', - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }) # --- # name: test_sensors[HWE-KWH3-entity_ids8][sensor.device_reactive_power_phase_3:state] @@ -2902,7 +2902,7 @@ 'device_class': 'reactive_power', 'friendly_name': 'Device Reactive power phase 3', 'state_class': , - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.device_reactive_power_phase_3', @@ -14905,7 +14905,7 @@ 'supported_features': 0, 'translation_key': None, 'unique_id': 'aabbccddeeff_active_reactive_power_var', - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }) # --- # name: test_sensors[HWE-SKT-21-entity_ids3][sensor.device_reactive_power:state] @@ -14914,7 +14914,7 @@ 'device_class': 'reactive_power', 'friendly_name': 'Device Reactive power', 'state_class': , - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.device_reactive_power', @@ -16200,7 +16200,7 @@ 'supported_features': 0, 'translation_key': None, 'unique_id': 'aabbccddeeff_active_reactive_power_var', - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }) # --- # name: test_sensors[SDM230-entity_ids5][sensor.device_reactive_power:state] @@ -16209,7 +16209,7 @@ 'device_class': 'reactive_power', 'friendly_name': 'Device Reactive power', 'state_class': , - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.device_reactive_power', @@ -18120,7 +18120,7 @@ 'supported_features': 0, 'translation_key': None, 'unique_id': 'aabbccddeeff_active_reactive_power_var', - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }) # --- # name: test_sensors[SDM630-entity_ids6][sensor.device_reactive_power:state] @@ -18129,7 +18129,7 @@ 'device_class': 'reactive_power', 'friendly_name': 'Device Reactive power', 'state_class': , - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.device_reactive_power', @@ -18207,7 +18207,7 @@ 'supported_features': 0, 'translation_key': 'active_reactive_power_phase_var', 'unique_id': 'aabbccddeeff_active_reactive_power_l1_var', - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }) # --- # name: test_sensors[SDM630-entity_ids6][sensor.device_reactive_power_phase_1:state] @@ -18216,7 +18216,7 @@ 'device_class': 'reactive_power', 'friendly_name': 'Device Reactive power phase 1', 'state_class': , - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.device_reactive_power_phase_1', @@ -18294,7 +18294,7 @@ 'supported_features': 0, 'translation_key': 'active_reactive_power_phase_var', 'unique_id': 'aabbccddeeff_active_reactive_power_l2_var', - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }) # --- # name: test_sensors[SDM630-entity_ids6][sensor.device_reactive_power_phase_2:state] @@ -18303,7 +18303,7 @@ 'device_class': 'reactive_power', 'friendly_name': 'Device Reactive power phase 2', 'state_class': , - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.device_reactive_power_phase_2', @@ -18381,7 +18381,7 @@ 'supported_features': 0, 'translation_key': 'active_reactive_power_phase_var', 'unique_id': 'aabbccddeeff_active_reactive_power_l3_var', - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }) # --- # name: test_sensors[SDM630-entity_ids6][sensor.device_reactive_power_phase_3:state] @@ -18390,7 +18390,7 @@ 'device_class': 'reactive_power', 'friendly_name': 'Device Reactive power phase 3', 'state_class': , - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.device_reactive_power_phase_3', diff --git a/tests/components/sensor/common.py b/tests/components/sensor/common.py index 53a93b73da3..458009b2690 100644 --- a/tests/components/sensor/common.py +++ b/tests/components/sensor/common.py @@ -10,11 +10,11 @@ from homeassistant.const import ( CONCENTRATION_PARTS_PER_MILLION, LIGHT_LUX, PERCENTAGE, - POWER_VOLT_AMPERE_REACTIVE, SIGNAL_STRENGTH_DECIBELS, UnitOfApparentPower, UnitOfFrequency, UnitOfPressure, + UnitOfReactivePower, UnitOfVolume, ) @@ -44,7 +44,7 @@ UNITS_OF_MEASUREMENT = { SensorDeviceClass.ENERGY: "kWh", # energy (Wh/kWh/MWh) SensorDeviceClass.FREQUENCY: UnitOfFrequency.GIGAHERTZ, # energy (Hz/kHz/MHz/GHz) SensorDeviceClass.POWER_FACTOR: PERCENTAGE, # power factor (no unit, min: -1.0, max: 1.0) - SensorDeviceClass.REACTIVE_POWER: POWER_VOLT_AMPERE_REACTIVE, # reactive power (var) + SensorDeviceClass.REACTIVE_POWER: UnitOfReactivePower.VOLT_AMPERE_REACTIVE, # reactive power (var) SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS: CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, # µg/m³ of vocs SensorDeviceClass.VOLTAGE: "V", # voltage (V) SensorDeviceClass.GAS: UnitOfVolume.CUBIC_METERS, # gas (m³) From 9de90ca7d522c8cdbf86c7abfa0027cba4938fe7 Mon Sep 17 00:00:00 2001 From: Jeef Date: Wed, 21 Aug 2024 08:16:16 -0600 Subject: [PATCH 0948/1635] Fix Weatherflow Cloud lightning (#124082) * Adding a error condition and a fix for missing lightning epoch values * adding parentheses around the lambda * Updating PR * removed the dumb test * Updated tests to support good stuff * Updated snapshots --- .../components/weatherflow_cloud/sensor.py | 8 +- .../components/weatherflow_cloud/conftest.py | 36 +++++++ .../fixtures/station_observation_error.json | 99 +++++++++++++++++++ .../weatherflow_cloud/test_sensor.py | 52 +++++++++- 4 files changed, 191 insertions(+), 4 deletions(-) create mode 100644 tests/components/weatherflow_cloud/fixtures/station_observation_error.json diff --git a/homeassistant/components/weatherflow_cloud/sensor.py b/homeassistant/components/weatherflow_cloud/sensor.py index 9314c77a65c..1c7fa5fb377 100644 --- a/homeassistant/components/weatherflow_cloud/sensor.py +++ b/homeassistant/components/weatherflow_cloud/sensor.py @@ -158,8 +158,12 @@ WF_SENSORS: tuple[WeatherFlowCloudSensorEntityDescription, ...] = ( key="lightning_strike_last_epoch", translation_key="lightning_strike_last_epoch", device_class=SensorDeviceClass.TIMESTAMP, - value_fn=lambda data: datetime.fromtimestamp( - data.lightning_strike_last_epoch, tz=UTC + value_fn=( + lambda data: datetime.fromtimestamp( + data.lightning_strike_last_epoch, tz=UTC + ) + if data.lightning_strike_last_epoch is not None + else None ), ), ) diff --git a/tests/components/weatherflow_cloud/conftest.py b/tests/components/weatherflow_cloud/conftest.py index 36b42bf24a8..d83ee082b26 100644 --- a/tests/components/weatherflow_cloud/conftest.py +++ b/tests/components/weatherflow_cloud/conftest.py @@ -113,3 +113,39 @@ def mock_api(): mock_api_class.return_value = mock_api yield mock_api + + +# +# @pytest.fixture +# def mock_api_with_lightning_error(): +# """Fixture for Mock WeatherFlowRestAPI.""" +# get_stations_response_data = StationsResponseREST.from_json( +# load_fixture("stations.json", DOMAIN) +# ) +# get_forecast_response_data = WeatherDataForecastREST.from_json( +# load_fixture("forecast.json", DOMAIN) +# ) +# get_observation_response_data = ObservationStationREST.from_json( +# load_fixture("station_observation_error.json", DOMAIN) +# ) +# +# data = { +# 24432: WeatherFlowDataREST( +# weather=get_forecast_response_data, +# observation=get_observation_response_data, +# station=get_stations_response_data.stations[0], +# device_observations=None, +# ) +# } +# +# with patch( +# "homeassistant.components.weatherflow_cloud.coordinator.WeatherFlowRestAPI", +# autospec=True, +# ) as mock_api_class: +# # Create an instance of AsyncMock for the API +# mock_api = AsyncMock() +# mock_api.get_all_data.return_value = data +# # Patch the class to return our mock_api instance +# mock_api_class.return_value = mock_api +# +# yield mock_api diff --git a/tests/components/weatherflow_cloud/fixtures/station_observation_error.json b/tests/components/weatherflow_cloud/fixtures/station_observation_error.json new file mode 100644 index 00000000000..41bb452c911 --- /dev/null +++ b/tests/components/weatherflow_cloud/fixtures/station_observation_error.json @@ -0,0 +1,99 @@ +{ + "elevation": 2063.150146484375, + "is_public": true, + "latitude": 43.94962, + "longitude": -102.86831, + "obs": [ + { + "air_density": 0.96139, + "air_temperature": 10.5, + "barometric_pressure": 782.8, + "brightness": 757, + "delta_t": 8.4, + "dew_point": -10.4, + "feels_like": 10.5, + "heat_index": 10.5, + "lightning_strike_count": 0, + "lightning_strike_count_last_1hr": 0, + "lightning_strike_count_last_3hr": 0, + "lightning_strike_last_distance": 26, + "precip": 0.0, + "precip_accum_last_1hr": 0.0, + "precip_accum_local_day": 0.0, + "precip_accum_local_day_final": 0.0, + "precip_accum_local_yesterday": 0.0, + "precip_accum_local_yesterday_final": 0.0, + "precip_analysis_type_yesterday": 0, + "precip_minutes_local_day": 0, + "precip_minutes_local_yesterday": 0, + "precip_minutes_local_yesterday_final": 0, + "pressure_trend": "steady", + "relative_humidity": 22, + "sea_level_pressure": 1006.2, + "solar_radiation": 6, + "station_pressure": 782.8, + "timestamp": 1708994629, + "uv": 0.03, + "wet_bulb_globe_temperature": 4.6, + "wet_bulb_temperature": 2.1, + "wind_avg": 1.4, + "wind_chill": 10.5, + "wind_direction": 203, + "wind_gust": 3.2, + "wind_lull": 0.3 + } + ], + "outdoor_keys": [ + "timestamp", + "air_temperature", + "barometric_pressure", + "station_pressure", + "pressure_trend", + "sea_level_pressure", + "relative_humidity", + "precip", + "precip_accum_last_1hr", + "precip_accum_local_day", + "precip_accum_local_day_final", + "precip_accum_local_yesterday_final", + "precip_minutes_local_day", + "precip_minutes_local_yesterday_final", + "wind_avg", + "wind_direction", + "wind_gust", + "wind_lull", + "solar_radiation", + "uv", + "brightness", + "lightning_strike_last_epoch", + "lightning_strike_last_distance", + "lightning_strike_count", + "lightning_strike_count_last_1hr", + "lightning_strike_count_last_3hr", + "feels_like", + "heat_index", + "wind_chill", + "dew_point", + "wet_bulb_temperature", + "wet_bulb_globe_temperature", + "delta_t", + "air_density" + ], + "public_name": "My Home Station", + "station_id": 24432, + "station_name": "My Home Station", + "station_units": { + "units_direction": "degrees", + "units_distance": "mi", + "units_other": "metric", + "units_precip": "in", + "units_pressure": "hpa", + "units_temp": "f", + "units_wind": "bft" + }, + "status": { + "status_code": 0, + "status_message": "SUCCESS" + }, + "timezone": "America/Denver" +} diff --git a/tests/components/weatherflow_cloud/test_sensor.py b/tests/components/weatherflow_cloud/test_sensor.py index 35ce098f5a7..4d6ff0c8c9f 100644 --- a/tests/components/weatherflow_cloud/test_sensor.py +++ b/tests/components/weatherflow_cloud/test_sensor.py @@ -1,16 +1,25 @@ """Tests for the WeatherFlow Cloud sensor platform.""" +from datetime import timedelta from unittest.mock import AsyncMock, patch +from freezegun.api import FrozenDateTimeFactory from syrupy import SnapshotAssertion +from weatherflow4py.models.rest.observation import ObservationStationREST -from homeassistant.const import Platform +from homeassistant.components.weatherflow_cloud import DOMAIN +from homeassistant.const import STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from . import setup_integration -from tests.common import MockConfigEntry, snapshot_platform +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + load_fixture, + snapshot_platform, +) async def test_all_entities( @@ -27,3 +36,42 @@ async def test_all_entities( await setup_integration(hass, mock_config_entry) await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_all_entities_with_lightning_error( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_api: AsyncMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test all entities.""" + + get_observation_response_data = ObservationStationREST.from_json( + load_fixture("station_observation_error.json", DOMAIN) + ) + + with patch( + "homeassistant.components.weatherflow_cloud.PLATFORMS", [Platform.SENSOR] + ): + await setup_integration(hass, mock_config_entry) + + assert ( + hass.states.get("sensor.my_home_station_lightning_last_strike").state + == "2024-02-07T23:01:15+00:00" + ) + + # Update the data in our API + all_data = await mock_api.get_all_data() + all_data[24432].observation = get_observation_response_data + mock_api.get_all_data.return_value = all_data + + # Move time forward + freezer.tick(timedelta(minutes=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert ( + hass.states.get("sensor.my_home_station_lightning_last_strike").state + == STATE_UNKNOWN + ) From ec256166cc8fd421bcdc7c2c5fb8999b2dbc5db1 Mon Sep 17 00:00:00 2001 From: mvn23 Date: Wed, 21 Aug 2024 16:56:57 +0200 Subject: [PATCH 0949/1635] Rename OpenThermGatewayDevice to OpenThermGatewayHub (#124361) * Rename OpenThermGatewayDevice to OpenThermGatewayHub Update references accordingly * Update tests --- .../components/opentherm_gw/__init__.py | 62 +++++++++---------- .../components/opentherm_gw/binary_sensor.py | 14 ++--- .../components/opentherm_gw/climate.py | 16 ++--- .../components/opentherm_gw/entity.py | 16 ++--- .../components/opentherm_gw/sensor.py | 14 ++--- .../opentherm_gw/test_config_flow.py | 2 +- tests/components/opentherm_gw/test_init.py | 4 +- 7 files changed, 64 insertions(+), 64 deletions(-) diff --git a/homeassistant/components/opentherm_gw/__init__.py b/homeassistant/components/opentherm_gw/__init__.py index a0d791fddd4..30410f73c2d 100644 --- a/homeassistant/components/opentherm_gw/__init__.py +++ b/homeassistant/components/opentherm_gw/__init__.py @@ -99,7 +99,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b if DATA_OPENTHERM_GW not in hass.data: hass.data[DATA_OPENTHERM_GW] = {DATA_GATEWAYS: {}} - gateway = OpenThermGatewayDevice(hass, config_entry) + gateway = OpenThermGatewayHub(hass, config_entry) hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][config_entry.data[CONF_ID]] = gateway if config_entry.options.get(CONF_PRECISION): @@ -273,9 +273,9 @@ def register_services(hass: HomeAssistant) -> None: async def reset_gateway(call: ServiceCall) -> None: """Reset the OpenTherm Gateway.""" - gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] mode_rst = gw_vars.OTGW_MODE_RESET - await gw_dev.gateway.set_mode(mode_rst) + await gw_hub.gateway.set_mode(mode_rst) hass.services.async_register( DOMAIN, SERVICE_RESET_GATEWAY, reset_gateway, service_reset_schema @@ -283,8 +283,8 @@ def register_services(hass: HomeAssistant) -> None: async def set_ch_ovrd(call: ServiceCall) -> None: """Set the central heating override on the OpenTherm Gateway.""" - gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] - await gw_dev.gateway.set_ch_enable_bit(1 if call.data[ATTR_CH_OVRD] else 0) + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] + await gw_hub.gateway.set_ch_enable_bit(1 if call.data[ATTR_CH_OVRD] else 0) hass.services.async_register( DOMAIN, @@ -295,8 +295,8 @@ def register_services(hass: HomeAssistant) -> None: async def set_control_setpoint(call: ServiceCall) -> None: """Set the control setpoint on the OpenTherm Gateway.""" - gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] - await gw_dev.gateway.set_control_setpoint(call.data[ATTR_TEMPERATURE]) + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] + await gw_hub.gateway.set_control_setpoint(call.data[ATTR_TEMPERATURE]) hass.services.async_register( DOMAIN, @@ -307,8 +307,8 @@ def register_services(hass: HomeAssistant) -> None: async def set_dhw_ovrd(call: ServiceCall) -> None: """Set the domestic hot water override on the OpenTherm Gateway.""" - gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] - await gw_dev.gateway.set_hot_water_ovrd(call.data[ATTR_DHW_OVRD]) + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] + await gw_hub.gateway.set_hot_water_ovrd(call.data[ATTR_DHW_OVRD]) hass.services.async_register( DOMAIN, @@ -319,8 +319,8 @@ def register_services(hass: HomeAssistant) -> None: async def set_dhw_setpoint(call: ServiceCall) -> None: """Set the domestic hot water setpoint on the OpenTherm Gateway.""" - gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] - await gw_dev.gateway.set_dhw_setpoint(call.data[ATTR_TEMPERATURE]) + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] + await gw_hub.gateway.set_dhw_setpoint(call.data[ATTR_TEMPERATURE]) hass.services.async_register( DOMAIN, @@ -331,10 +331,10 @@ def register_services(hass: HomeAssistant) -> None: async def set_device_clock(call: ServiceCall) -> None: """Set the clock on the OpenTherm Gateway.""" - gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] attr_date = call.data[ATTR_DATE] attr_time = call.data[ATTR_TIME] - await gw_dev.gateway.set_clock(datetime.combine(attr_date, attr_time)) + await gw_hub.gateway.set_clock(datetime.combine(attr_date, attr_time)) hass.services.async_register( DOMAIN, SERVICE_SET_CLOCK, set_device_clock, service_set_clock_schema @@ -342,10 +342,10 @@ def register_services(hass: HomeAssistant) -> None: async def set_gpio_mode(call: ServiceCall) -> None: """Set the OpenTherm Gateway GPIO modes.""" - gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] gpio_id = call.data[ATTR_ID] gpio_mode = call.data[ATTR_MODE] - await gw_dev.gateway.set_gpio_mode(gpio_id, gpio_mode) + await gw_hub.gateway.set_gpio_mode(gpio_id, gpio_mode) hass.services.async_register( DOMAIN, SERVICE_SET_GPIO_MODE, set_gpio_mode, service_set_gpio_mode_schema @@ -353,10 +353,10 @@ def register_services(hass: HomeAssistant) -> None: async def set_led_mode(call: ServiceCall) -> None: """Set the OpenTherm Gateway LED modes.""" - gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] led_id = call.data[ATTR_ID] led_mode = call.data[ATTR_MODE] - await gw_dev.gateway.set_led_mode(led_id, led_mode) + await gw_hub.gateway.set_led_mode(led_id, led_mode) hass.services.async_register( DOMAIN, SERVICE_SET_LED_MODE, set_led_mode, service_set_led_mode_schema @@ -364,12 +364,12 @@ def register_services(hass: HomeAssistant) -> None: async def set_max_mod(call: ServiceCall) -> None: """Set the max modulation level.""" - gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] level = call.data[ATTR_LEVEL] if level == -1: # Backend only clears setting on non-numeric values. level = "-" - await gw_dev.gateway.set_max_relative_mod(level) + await gw_hub.gateway.set_max_relative_mod(level) hass.services.async_register( DOMAIN, SERVICE_SET_MAX_MOD, set_max_mod, service_set_max_mod_schema @@ -377,8 +377,8 @@ def register_services(hass: HomeAssistant) -> None: async def set_outside_temp(call: ServiceCall) -> None: """Provide the outside temperature to the OpenTherm Gateway.""" - gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] - await gw_dev.gateway.set_outside_temp(call.data[ATTR_TEMPERATURE]) + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] + await gw_hub.gateway.set_outside_temp(call.data[ATTR_TEMPERATURE]) hass.services.async_register( DOMAIN, SERVICE_SET_OAT, set_outside_temp, service_set_oat_schema @@ -386,8 +386,8 @@ def register_services(hass: HomeAssistant) -> None: async def set_setback_temp(call: ServiceCall) -> None: """Set the OpenTherm Gateway SetBack temperature.""" - gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] - await gw_dev.gateway.set_setback_temp(call.data[ATTR_TEMPERATURE]) + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] + await gw_hub.gateway.set_setback_temp(call.data[ATTR_TEMPERATURE]) hass.services.async_register( DOMAIN, SERVICE_SET_SB_TEMP, set_setback_temp, service_set_sb_temp_schema @@ -395,10 +395,10 @@ def register_services(hass: HomeAssistant) -> None: async def send_transparent_cmd(call: ServiceCall) -> None: """Send a transparent OpenTherm Gateway command.""" - gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] transp_cmd = call.data[ATTR_TRANSP_CMD] transp_arg = call.data[ATTR_TRANSP_ARG] - await gw_dev.gateway.send_transparent_command(transp_cmd, transp_arg) + await gw_hub.gateway.send_transparent_command(transp_cmd, transp_arg) hass.services.async_register( DOMAIN, @@ -416,20 +416,20 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return unload_ok -class OpenThermGatewayDevice: - """OpenTherm Gateway device class.""" +class OpenThermGatewayHub: + """OpenTherm Gateway hub class.""" def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry) -> None: """Initialize the OpenTherm Gateway.""" self.hass = hass self.device_path = config_entry.data[CONF_DEVICE] - self.gw_id = config_entry.data[CONF_ID] + self.hub_id = config_entry.data[CONF_ID] self.name = config_entry.data[CONF_NAME] self.climate_config = config_entry.options self.config_entry_id = config_entry.entry_id self.status = gw_vars.DEFAULT_STATUS - self.update_signal = f"{DATA_OPENTHERM_GW}_{self.gw_id}_update" - self.options_update_signal = f"{DATA_OPENTHERM_GW}_{self.gw_id}_options_update" + self.update_signal = f"{DATA_OPENTHERM_GW}_{self.hub_id}_update" + self.options_update_signal = f"{DATA_OPENTHERM_GW}_{self.hub_id}_options_update" self.gateway = pyotgw.OpenThermGateway() self.gw_version = None @@ -453,7 +453,7 @@ class OpenThermGatewayDevice: dev_reg = dr.async_get(self.hass) gw_dev = dev_reg.async_get_or_create( config_entry_id=self.config_entry_id, - identifiers={(DOMAIN, self.gw_id)}, + identifiers={(DOMAIN, self.hub_id)}, name=self.name, manufacturer="Schelte Bron", model="OpenTherm Gateway", diff --git a/homeassistant/components/opentherm_gw/binary_sensor.py b/homeassistant/components/opentherm_gw/binary_sensor.py index 2d55cd9ddfb..f978a2695d7 100644 --- a/homeassistant/components/opentherm_gw/binary_sensor.py +++ b/homeassistant/components/opentherm_gw/binary_sensor.py @@ -16,7 +16,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity import async_generate_entity_id from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import OpenThermGatewayDevice +from . import OpenThermGatewayHub from .const import DATA_GATEWAYS, DATA_OPENTHERM_GW from .entity import OpenThermEntity, OpenThermEntityDescription @@ -290,10 +290,10 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the OpenTherm Gateway binary sensors.""" - gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][config_entry.data[CONF_ID]] + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][config_entry.data[CONF_ID]] async_add_entities( - OpenThermBinarySensor(gw_dev, source, description) + OpenThermBinarySensor(gw_hub, source, description) for sources, description in BINARY_SENSOR_INFO for source in sources ) @@ -306,17 +306,17 @@ class OpenThermBinarySensor(OpenThermEntity, BinarySensorEntity): def __init__( self, - gw_dev: OpenThermGatewayDevice, + gw_hub: OpenThermGatewayHub, source: str, description: OpenThermBinarySensorEntityDescription, ) -> None: """Initialize the binary sensor.""" self.entity_id = async_generate_entity_id( ENTITY_ID_FORMAT, - f"{description.key}_{source}_{gw_dev.gw_id}", - hass=gw_dev.hass, + f"{description.key}_{source}_{gw_hub.hub_id}", + hass=gw_hub.hass, ) - super().__init__(gw_dev, source, description) + super().__init__(gw_hub, source, description) @callback def receive_report(self, status: dict[str, dict]) -> None: diff --git a/homeassistant/components/opentherm_gw/climate.py b/homeassistant/components/opentherm_gw/climate.py index 5eb1246e55f..bf295fb1fb7 100644 --- a/homeassistant/components/opentherm_gw/climate.py +++ b/homeassistant/components/opentherm_gw/climate.py @@ -87,13 +87,13 @@ class OpenThermClimate(ClimateEntity): _current_operation: HVACAction | None = None _enable_turn_on_off_backwards_compatibility = False - def __init__(self, gw_dev, options): + def __init__(self, gw_hub, options): """Initialize the device.""" - self._gateway = gw_dev + self._gateway = gw_hub self.entity_id = async_generate_entity_id( - ENTITY_ID_FORMAT, gw_dev.gw_id, hass=gw_dev.hass + ENTITY_ID_FORMAT, gw_hub.hub_id, hass=gw_hub.hass ) - self.friendly_name = gw_dev.name + self.friendly_name = gw_hub.name self._attr_name = self.friendly_name self.floor_temp = options.get(CONF_FLOOR_TEMP, DEFAULT_FLOOR_TEMP) self.temp_read_precision = options.get(CONF_READ_PRECISION) @@ -102,13 +102,13 @@ class OpenThermClimate(ClimateEntity): self._unsub_options = None self._unsub_updates = None self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, gw_dev.gw_id)}, + identifiers={(DOMAIN, gw_hub.hub_id)}, manufacturer="Schelte Bron", model="OpenTherm Gateway", - name=gw_dev.name, - sw_version=gw_dev.gw_version, + name=gw_hub.name, + sw_version=gw_hub.gw_version, ) - self._attr_unique_id = gw_dev.gw_id + self._attr_unique_id = gw_hub.hub_id @callback def update_options(self, entry): diff --git a/homeassistant/components/opentherm_gw/entity.py b/homeassistant/components/opentherm_gw/entity.py index 1de4aa0a2d9..a1035b946c2 100644 --- a/homeassistant/components/opentherm_gw/entity.py +++ b/homeassistant/components/opentherm_gw/entity.py @@ -9,7 +9,7 @@ from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity import Entity, EntityDescription -from . import OpenThermGatewayDevice +from . import OpenThermGatewayHub from .const import DOMAIN _LOGGER = logging.getLogger(__name__) @@ -37,27 +37,27 @@ class OpenThermEntity(Entity): def __init__( self, - gw_dev: OpenThermGatewayDevice, + gw_hub: OpenThermGatewayHub, source: str, description: OpenThermEntityDescription, ) -> None: """Initialize the entity.""" self.entity_description = description - self._gateway = gw_dev + self._gateway = gw_hub self._source = source friendly_name_format = ( f"{description.friendly_name_format} ({TRANSLATE_SOURCE[source]})" if TRANSLATE_SOURCE[source] is not None else description.friendly_name_format ) - self._attr_name = friendly_name_format.format(gw_dev.name) - self._attr_unique_id = f"{gw_dev.gw_id}-{source}-{description.key}" + self._attr_name = friendly_name_format.format(gw_hub.name) + self._attr_unique_id = f"{gw_hub.hub_id}-{source}-{description.key}" self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, gw_dev.gw_id)}, + identifiers={(DOMAIN, gw_hub.hub_id)}, manufacturer="Schelte Bron", model="OpenTherm Gateway", - name=gw_dev.name, - sw_version=gw_dev.gw_version, + name=gw_hub.name, + sw_version=gw_hub.gw_version, ) async def async_added_to_hass(self) -> None: diff --git a/homeassistant/components/opentherm_gw/sensor.py b/homeassistant/components/opentherm_gw/sensor.py index 20a69def433..fb30b2ce35c 100644 --- a/homeassistant/components/opentherm_gw/sensor.py +++ b/homeassistant/components/opentherm_gw/sensor.py @@ -25,7 +25,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity import async_generate_entity_id from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import OpenThermGatewayDevice +from . import OpenThermGatewayHub from .const import DATA_GATEWAYS, DATA_OPENTHERM_GW from .entity import OpenThermEntity, OpenThermEntityDescription @@ -624,11 +624,11 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the OpenTherm Gateway sensors.""" - gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][config_entry.data[CONF_ID]] + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][config_entry.data[CONF_ID]] async_add_entities( OpenThermSensor( - gw_dev, + gw_hub, source, description, ) @@ -644,17 +644,17 @@ class OpenThermSensor(OpenThermEntity, SensorEntity): def __init__( self, - gw_dev: OpenThermGatewayDevice, + gw_hub: OpenThermGatewayHub, source: str, description: OpenThermSensorEntityDescription, ) -> None: """Initialize the OpenTherm Gateway sensor.""" self.entity_id = async_generate_entity_id( ENTITY_ID_FORMAT, - f"{description.key}_{source}_{gw_dev.gw_id}", - hass=gw_dev.hass, + f"{description.key}_{source}_{gw_hub.hub_id}", + hass=gw_hub.hass, ) - super().__init__(gw_dev, source, description) + super().__init__(gw_hub, source, description) @callback def receive_report(self, status: dict[str, dict]) -> None: diff --git a/tests/components/opentherm_gw/test_config_flow.py b/tests/components/opentherm_gw/test_config_flow.py index 24b41df8124..e61a87bb55e 100644 --- a/tests/components/opentherm_gw/test_config_flow.py +++ b/tests/components/opentherm_gw/test_config_flow.py @@ -223,7 +223,7 @@ async def test_options_migration(hass: HomeAssistant) -> None: with ( patch( - "homeassistant.components.opentherm_gw.OpenThermGatewayDevice.connect_and_subscribe", + "homeassistant.components.opentherm_gw.OpenThermGatewayHub.connect_and_subscribe", return_value=True, ), patch( diff --git a/tests/components/opentherm_gw/test_init.py b/tests/components/opentherm_gw/test_init.py index a1ff5b75f47..a466f788f1a 100644 --- a/tests/components/opentherm_gw/test_init.py +++ b/tests/components/opentherm_gw/test_init.py @@ -40,7 +40,7 @@ async def test_device_registry_insert( with ( patch( - "homeassistant.components.opentherm_gw.OpenThermGatewayDevice.cleanup", + "homeassistant.components.opentherm_gw.OpenThermGatewayHub.cleanup", return_value=None, ), patch("pyotgw.OpenThermGateway.connect", return_value=MINIMAL_STATUS), @@ -72,7 +72,7 @@ async def test_device_registry_update( with ( patch( - "homeassistant.components.opentherm_gw.OpenThermGatewayDevice.cleanup", + "homeassistant.components.opentherm_gw.OpenThermGatewayHub.cleanup", return_value=None, ), patch("pyotgw.OpenThermGateway.connect", return_value=MINIMAL_STATUS_UPD), From e56c235424f2ca0d2c1b9cf6baec1eb70ff2c737 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 21 Aug 2024 09:59:41 -0500 Subject: [PATCH 0950/1635] Bump async-interrupt to 1.2.0 (#124360) changelog: https://github.com/bdraco/async_interrupt/compare/v1.1.2...v1.2.0 --- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 58bf203c2de..31100828978 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -8,7 +8,7 @@ aiohttp==3.10.5 aiohttp_cors==0.7.0 aiozoneinfo==0.2.1 astral==2.2 -async-interrupt==1.1.2 +async-interrupt==1.2.0 async-upnp-client==0.40.0 atomicwrites-homeassistant==1.4.1 attrs==23.2.0 diff --git a/pyproject.toml b/pyproject.toml index 0f3dfd92067..c9b0ed43e51 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,7 +29,7 @@ dependencies = [ "aiohttp-fast-zlib==0.1.1", "aiozoneinfo==0.2.1", "astral==2.2", - "async-interrupt==1.1.2", + "async-interrupt==1.2.0", "attrs==23.2.0", "atomicwrites-homeassistant==1.4.1", "awesomeversion==24.6.0", diff --git a/requirements.txt b/requirements.txt index 102358cc75e..35290a5ff17 100644 --- a/requirements.txt +++ b/requirements.txt @@ -9,7 +9,7 @@ aiohttp_cors==0.7.0 aiohttp-fast-zlib==0.1.1 aiozoneinfo==0.2.1 astral==2.2 -async-interrupt==1.1.2 +async-interrupt==1.2.0 attrs==23.2.0 atomicwrites-homeassistant==1.4.1 awesomeversion==24.6.0 From 88b95c1236ca91e5f448ab842be2e98e31c1139e Mon Sep 17 00:00:00 2001 From: Markus Jacobsen Date: Wed, 21 Aug 2024 17:03:21 +0200 Subject: [PATCH 0951/1635] Remove unneeded check for Bang & Olufsen events and device update (#124363) Remove unneeded check for device and HomeAssistant availability --- homeassistant/components/bang_olufsen/util.py | 5 +---- homeassistant/components/bang_olufsen/websocket.py | 9 --------- 2 files changed, 1 insertion(+), 13 deletions(-) diff --git a/homeassistant/components/bang_olufsen/util.py b/homeassistant/components/bang_olufsen/util.py index 617eb4b1df6..c54b3059ee4 100644 --- a/homeassistant/components/bang_olufsen/util.py +++ b/homeassistant/components/bang_olufsen/util.py @@ -9,11 +9,8 @@ from homeassistant.helpers.device_registry import DeviceEntry from .const import DOMAIN -def get_device(hass: HomeAssistant | None, unique_id: str) -> DeviceEntry | None: +def get_device(hass: HomeAssistant, unique_id: str) -> DeviceEntry: """Get the device.""" - if not isinstance(hass, HomeAssistant): - return None - device_registry = dr.async_get(hass) device = device_registry.async_get_device({(DOMAIN, unique_id)}) assert device diff --git a/homeassistant/components/bang_olufsen/websocket.py b/homeassistant/components/bang_olufsen/websocket.py index dc505ddf3c4..0c0a5096d91 100644 --- a/homeassistant/components/bang_olufsen/websocket.py +++ b/homeassistant/components/bang_olufsen/websocket.py @@ -157,11 +157,6 @@ class BangOlufsenWebsocket(BangOlufsenBase): software_status = await self._client.get_softwareupdate_status() # Update the HA device if the sw version does not match - if not self._device: - self._device = get_device(self.hass, self._unique_id) - - assert self._device - if software_status.software_version != self._device.sw_version: device_registry = dr.async_get(self.hass) @@ -172,10 +167,6 @@ class BangOlufsenWebsocket(BangOlufsenBase): def on_all_notifications_raw(self, notification: dict) -> None: """Receive all notifications.""" - if not self._device: - self._device = get_device(self.hass, self._unique_id) - - assert self._device # Add the device_id and serial_number to the notification notification["device_id"] = self._device.id From d86b81649138698090214f1250cdec532d80fab0 Mon Sep 17 00:00:00 2001 From: Markus Jacobsen Date: Wed, 21 Aug 2024 17:11:01 +0200 Subject: [PATCH 0952/1635] Convert Bang & Olufsen testing logging patches to caplog (#124366) * Convert logging patches to caplog * Remove unnecessary caplog log level handling --- .../bang_olufsen/test_media_player.py | 83 ++++++++++--------- 1 file changed, 42 insertions(+), 41 deletions(-) diff --git a/tests/components/bang_olufsen/test_media_player.py b/tests/components/bang_olufsen/test_media_player.py index 74867a8eedf..37f375bdb39 100644 --- a/tests/components/bang_olufsen/test_media_player.py +++ b/tests/components/bang_olufsen/test_media_player.py @@ -1,7 +1,8 @@ """Test the Bang & Olufsen media_player entity.""" from contextlib import nullcontext as does_not_raise -from unittest.mock import ANY, patch +import logging +from unittest.mock import patch from mozart_api.models import PlaybackContentMetadata import pytest @@ -71,21 +72,21 @@ from tests.typing import WebSocketGenerator async def test_initialization( - hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_mozart_client + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + mock_config_entry: MockConfigEntry, + mock_mozart_client, ) -> None: """Test the integration is initialized properly in _initialize, async_added_to_hass and __init__.""" + caplog.set_level(logging.DEBUG) + # Setup entity - with patch( - "homeassistant.components.bang_olufsen.media_player._LOGGER.debug" - ) as mock_logger: - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) # Ensure that the logger has been called with the debug message - mock_logger.assert_called_once_with( - "Connected to: %s %s running SW %s", "Beosound Balance", "11111111", "1.0.0" - ) + assert "Connected to: Beosound Balance 11111111 running SW 1.0.0" in caplog.text # Check state (The initial state in this test does not contain all that much. # States are tested using simulated WebSocket events.) @@ -167,7 +168,10 @@ async def test_async_update_playback_metadata( async def test_async_update_playback_error( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + mock_mozart_client, + mock_config_entry, ) -> None: """Test _async_update_playback_error.""" @@ -175,18 +179,15 @@ async def test_async_update_playback_error( await hass.config_entries.async_setup(mock_config_entry.entry_id) # The async_dispatcher_send function seems to swallow exceptions, making pytest.raises unusable - with patch("homeassistant.helpers.dispatcher._LOGGER.error") as mock_logger: - async_dispatcher_send( - hass, - f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_ERROR}", - TEST_PLAYBACK_ERROR, - ) + async_dispatcher_send( + hass, + f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_ERROR}", + TEST_PLAYBACK_ERROR, + ) - # The traceback can't be tested, so it is replaced with "ANY" - mock_logger.assert_called_once_with( - "%s\n%s", - "Exception in _async_update_playback_error when dispatching '11111111_playback_error': (PlaybackError(error='Test error', item=None),)", - ANY, + assert ( + "Exception in _async_update_playback_error when dispatching '11111111_playback_error': (PlaybackError(error='Test error', item=None),)" + in caplog.text ) @@ -731,32 +732,32 @@ async def test_async_play_media_overlay_absolute_volume_uri( async def test_async_play_media_overlay_invalid_offset_volume_tts( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + mock_mozart_client, + mock_config_entry, ) -> None: """Test async_play_media with Home Assistant invalid offset volume and B&O tts.""" mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) - with patch( - "homeassistant.components.bang_olufsen.media_player._LOGGER.warning" - ) as mock_logger: - await hass.services.async_call( - "media_player", - "play_media", - { - ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, - ATTR_MEDIA_CONTENT_ID: "Dette er en test", - ATTR_MEDIA_CONTENT_TYPE: "overlay_tts", - ATTR_MEDIA_ANNOUNCE: True, - ATTR_MEDIA_EXTRA: { - "overlay_offset_volume": 20, - "overlay_tts_language": "da-dk", - }, + await hass.services.async_call( + "media_player", + "play_media", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_CONTENT_ID: "Dette er en test", + ATTR_MEDIA_CONTENT_TYPE: "overlay_tts", + ATTR_MEDIA_ANNOUNCE: True, + ATTR_MEDIA_EXTRA: { + "overlay_offset_volume": 20, + "overlay_tts_language": "da-dk", }, - blocking=True, - ) - mock_logger.assert_called_once_with("Error setting volume") + }, + blocking=True, + ) + assert "Error setting volume" in caplog.text mock_mozart_client.post_overlay_play.assert_called_once_with( TEST_OVERLAY_INVALID_OFFSET_VOLUME_TTS From 9399a54c7a1372eaea0973905ff745b84c68d4bc Mon Sep 17 00:00:00 2001 From: Pete Sage <76050312+PeteRager@users.noreply.github.com> Date: Wed, 21 Aug 2024 15:05:09 -0400 Subject: [PATCH 0953/1635] Fix Spotify Media Browsing fails for new config entries (#124368) * initial commit * tests * tests * update tests * update tests * update tests --- .../components/spotify/browse_media.py | 11 +- tests/components/spotify/conftest.py | 128 ++++++++++ .../spotify/snapshots/test_media_browser.ambr | 236 ++++++++++++++++++ .../components/spotify/test_media_browser.py | 61 +++++ 4 files changed, 434 insertions(+), 2 deletions(-) create mode 100644 tests/components/spotify/conftest.py create mode 100644 tests/components/spotify/snapshots/test_media_browser.ambr create mode 100644 tests/components/spotify/test_media_browser.py diff --git a/homeassistant/components/spotify/browse_media.py b/homeassistant/components/spotify/browse_media.py index cff7cae5ebd..abcb6df6205 100644 --- a/homeassistant/components/spotify/browse_media.py +++ b/homeassistant/components/spotify/browse_media.py @@ -172,10 +172,17 @@ async def async_browse_media( # Check for config entry specifier, and extract Spotify URI parsed_url = yarl.URL(media_content_id) + host = parsed_url.host if ( - parsed_url.host is None - or (entry := hass.config_entries.async_get_entry(parsed_url.host)) is None + host is None + # config entry ids can be upper or lower case. Yarl always returns host + # names in lower case, so we need to look for the config entry in both + or ( + entry := hass.config_entries.async_get_entry(host) + or hass.config_entries.async_get_entry(host.upper()) + ) + is None or not isinstance(entry.runtime_data, HomeAssistantSpotifyData) ): raise BrowseError("Invalid Spotify account specified") diff --git a/tests/components/spotify/conftest.py b/tests/components/spotify/conftest.py new file mode 100644 index 00000000000..3f248b54529 --- /dev/null +++ b/tests/components/spotify/conftest.py @@ -0,0 +1,128 @@ +"""Common test fixtures.""" + +from collections.abc import Generator +from typing import Any +from unittest.mock import MagicMock, patch + +import pytest + +from homeassistant.components.application_credentials import ( + ClientCredential, + async_import_client_credential, +) +from homeassistant.components.spotify import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_config_entry_1() -> MockConfigEntry: + """Mock a config entry with an upper case entry id.""" + return MockConfigEntry( + domain=DOMAIN, + title="spotify_1", + data={ + "auth_implementation": "spotify_c95e4090d4d3438b922331e7428f8171", + "token": { + "access_token": "AccessToken", + "token_type": "Bearer", + "expires_in": 3600, + "refresh_token": "RefreshToken", + "scope": "playlist-read-private ...", + "expires_at": 1724198975.8829377, + }, + "id": "32oesphrnacjcf7vw5bf6odx3oiu", + "name": "spotify_account_1", + }, + unique_id="84fce612f5b8", + entry_id="01J5TX5A0FF6G5V0QJX6HBC94T", + ) + + +@pytest.fixture +def mock_config_entry_2() -> MockConfigEntry: + """Mock a config entry with a lower case entry id.""" + return MockConfigEntry( + domain=DOMAIN, + title="spotify_2", + data={ + "auth_implementation": "spotify_c95e4090d4d3438b922331e7428f8171", + "token": { + "access_token": "AccessToken", + "token_type": "Bearer", + "expires_in": 3600, + "refresh_token": "RefreshToken", + "scope": "playlist-read-private ...", + "expires_at": 1724198975.8829377, + }, + "id": "55oesphrnacjcf7vw5bf6odx3oiu", + "name": "spotify_account_2", + }, + unique_id="99fce612f5b8", + entry_id="32oesphrnacjcf7vw5bf6odx3", + ) + + +@pytest.fixture +def spotify_playlists() -> dict[str, Any]: + """Mock the return from getting a list of playlists.""" + return { + "href": "https://api.spotify.com/v1/users/31oesphrnacjcf7vw5bf6odx3oiu/playlists?offset=0&limit=48", + "limit": 48, + "next": None, + "offset": 0, + "previous": None, + "total": 1, + "items": [ + { + "collaborative": False, + "description": "", + "id": "unique_identifier_00", + "name": "Playlist1", + "type": "playlist", + "uri": "spotify:playlist:unique_identifier_00", + } + ], + } + + +@pytest.fixture +def spotify_mock(spotify_playlists: dict[str, Any]) -> Generator[MagicMock]: + """Mock the Spotify API.""" + with patch("homeassistant.components.spotify.Spotify") as spotify_mock: + mock = MagicMock() + mock.current_user_playlists.return_value = spotify_playlists + spotify_mock.return_value = mock + yield spotify_mock + + +@pytest.fixture +async def spotify_setup( + hass: HomeAssistant, + spotify_mock: MagicMock, + mock_config_entry_1: MockConfigEntry, + mock_config_entry_2: MockConfigEntry, +): + """Set up the spotify integration.""" + with patch( + "homeassistant.components.spotify.OAuth2Session.async_ensure_token_valid" + ): + await async_setup_component(hass, "application_credentials", {}) + await hass.async_block_till_done() + await async_import_client_credential( + hass, + DOMAIN, + ClientCredential("CLIENT_ID", "CLIENT_SECRET"), + "spotify_c95e4090d4d3438b922331e7428f8171", + ) + await hass.async_block_till_done() + mock_config_entry_1.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry_1.entry_id) + mock_config_entry_2.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry_2.entry_id) + await hass.async_block_till_done(wait_background_tasks=True) + await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done(wait_background_tasks=True) + yield diff --git a/tests/components/spotify/snapshots/test_media_browser.ambr b/tests/components/spotify/snapshots/test_media_browser.ambr new file mode 100644 index 00000000000..4236fcb2e79 --- /dev/null +++ b/tests/components/spotify/snapshots/test_media_browser.ambr @@ -0,0 +1,236 @@ +# serializer version: 1 +# name: test_browse_media_categories + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_playlists', + 'media_content_type': 'spotify://current_user_playlists', + 'thumbnail': None, + 'title': 'Playlists', + }), + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_followed_artists', + 'media_content_type': 'spotify://current_user_followed_artists', + 'thumbnail': None, + 'title': 'Artists', + }), + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_saved_albums', + 'media_content_type': 'spotify://current_user_saved_albums', + 'thumbnail': None, + 'title': 'Albums', + }), + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_saved_tracks', + 'media_content_type': 'spotify://current_user_saved_tracks', + 'thumbnail': None, + 'title': 'Tracks', + }), + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_saved_shows', + 'media_content_type': 'spotify://current_user_saved_shows', + 'thumbnail': None, + 'title': 'Podcasts', + }), + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_recently_played', + 'media_content_type': 'spotify://current_user_recently_played', + 'thumbnail': None, + 'title': 'Recently played', + }), + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_top_artists', + 'media_content_type': 'spotify://current_user_top_artists', + 'thumbnail': None, + 'title': 'Top Artists', + }), + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_top_tracks', + 'media_content_type': 'spotify://current_user_top_tracks', + 'thumbnail': None, + 'title': 'Top Tracks', + }), + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/categories', + 'media_content_type': 'spotify://categories', + 'thumbnail': None, + 'title': 'Categories', + }), + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/featured_playlists', + 'media_content_type': 'spotify://featured_playlists', + 'thumbnail': None, + 'title': 'Featured Playlists', + }), + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/new_releases', + 'media_content_type': 'spotify://new_releases', + 'thumbnail': None, + 'title': 'New Releases', + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/library', + 'media_content_type': 'spotify://library', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'Media Library', + }) +# --- +# name: test_browse_media_playlists + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:playlist:unique_identifier_00', + 'media_content_type': 'spotify://playlist', + 'thumbnail': None, + 'title': 'Playlist1', + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_playlists', + 'media_content_type': 'spotify://current_user_playlists', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'Playlists', + }) +# --- +# name: test_browse_media_playlists[01J5TX5A0FF6G5V0QJX6HBC94T] + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/spotify:playlist:unique_identifier_00', + 'media_content_type': 'spotify://playlist', + 'thumbnail': None, + 'title': 'Playlist1', + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://01j5tx5a0ff6g5v0qjx6hbc94t/current_user_playlists', + 'media_content_type': 'spotify://current_user_playlists', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'Playlists', + }) +# --- +# name: test_browse_media_playlists[32oesphrnacjcf7vw5bf6odx3] + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': True, + 'can_play': True, + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://32oesphrnacjcf7vw5bf6odx3/spotify:playlist:unique_identifier_00', + 'media_content_type': 'spotify://playlist', + 'thumbnail': None, + 'title': 'Playlist1', + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://32oesphrnacjcf7vw5bf6odx3/current_user_playlists', + 'media_content_type': 'spotify://current_user_playlists', + 'not_shown': 0, + 'thumbnail': None, + 'title': 'Playlists', + }) +# --- +# name: test_browse_media_root + dict({ + 'can_expand': True, + 'can_play': False, + 'children': list([ + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://01J5TX5A0FF6G5V0QJX6HBC94T', + 'media_content_type': 'spotify://library', + 'thumbnail': 'https://brands.home-assistant.io/_/spotify/logo.png', + 'title': 'spotify_1', + }), + dict({ + 'can_expand': True, + 'can_play': False, + 'children_media_class': None, + 'media_class': , + 'media_content_id': 'spotify://32oesphrnacjcf7vw5bf6odx3', + 'media_content_type': 'spotify://library', + 'thumbnail': 'https://brands.home-assistant.io/_/spotify/logo.png', + 'title': 'spotify_2', + }), + ]), + 'children_media_class': , + 'media_class': , + 'media_content_id': 'spotify://', + 'media_content_type': 'spotify', + 'not_shown': 0, + 'thumbnail': 'https://brands.home-assistant.io/_/spotify/logo.png', + 'title': 'Spotify', + }) +# --- diff --git a/tests/components/spotify/test_media_browser.py b/tests/components/spotify/test_media_browser.py new file mode 100644 index 00000000000..2b47aed9ee3 --- /dev/null +++ b/tests/components/spotify/test_media_browser.py @@ -0,0 +1,61 @@ +"""Test the media browser interface.""" + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.spotify import DOMAIN +from homeassistant.components.spotify.browse_media import async_browse_media +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done(wait_background_tasks=True) + await async_setup_component(hass, DOMAIN, {}) + await hass.async_block_till_done(wait_background_tasks=True) + + +async def test_browse_media_root( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + spotify_setup, +) -> None: + """Test browsing the root.""" + response = await async_browse_media(hass, None, None) + assert response.as_dict() == snapshot + + +async def test_browse_media_categories( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + spotify_setup, +) -> None: + """Test browsing categories.""" + response = await async_browse_media( + hass, "spotify://library", "spotify://01J5TX5A0FF6G5V0QJX6HBC94T" + ) + assert response.as_dict() == snapshot + + +@pytest.mark.parametrize( + ("config_entry_id"), [("01J5TX5A0FF6G5V0QJX6HBC94T"), ("32oesphrnacjcf7vw5bf6odx3")] +) +async def test_browse_media_playlists( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + config_entry_id: str, + spotify_setup, +) -> None: + """Test browsing playlists for the two config entries.""" + response = await async_browse_media( + hass, + "spotify://current_user_playlists", + f"spotify://{config_entry_id}/current_user_playlists", + ) + assert response.as_dict() == snapshot From 4ef55e50880a03c56a3ef6075c95ed58c012fd7f Mon Sep 17 00:00:00 2001 From: Anrijs Date: Wed, 21 Aug 2024 22:12:20 +0300 Subject: [PATCH 0954/1635] Add Aranet Radon Plus support (#124197) added aranet radon plus support --- homeassistant/components/aranet/icons.json | 3 + homeassistant/components/aranet/manifest.json | 2 +- homeassistant/components/aranet/sensor.py | 7 ++ requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/aranet/__init__.py | 8 +++ tests/components/aranet/test_sensor.py | 66 +++++++++++++++++++ 7 files changed, 87 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/aranet/icons.json b/homeassistant/components/aranet/icons.json index 6d6e9a83b03..8e2b66c0150 100644 --- a/homeassistant/components/aranet/icons.json +++ b/homeassistant/components/aranet/icons.json @@ -6,6 +6,9 @@ }, "radiation_rate": { "default": "mdi:radioactive" + }, + "radon_concentration": { + "default": "mdi:radioactive" } } } diff --git a/homeassistant/components/aranet/manifest.json b/homeassistant/components/aranet/manifest.json index 3f74d480c17..6cce7554dd1 100644 --- a/homeassistant/components/aranet/manifest.json +++ b/homeassistant/components/aranet/manifest.json @@ -19,5 +19,5 @@ "documentation": "https://www.home-assistant.io/integrations/aranet", "integration_type": "device", "iot_class": "local_push", - "requirements": ["aranet4==2.3.4"] + "requirements": ["aranet4==2.4.0"] } diff --git a/homeassistant/components/aranet/sensor.py b/homeassistant/components/aranet/sensor.py index c0fe194e87b..1dc4b9f956e 100644 --- a/homeassistant/components/aranet/sensor.py +++ b/homeassistant/components/aranet/sensor.py @@ -99,6 +99,13 @@ SENSOR_DESCRIPTIONS = { suggested_display_precision=4, scale=0.000001, ), + "radon_concentration": AranetSensorEntityDescription( + key="radon_concentration", + translation_key="radon_concentration", + name="Radon Concentration", + native_unit_of_measurement="Bq/m³", + state_class=SensorStateClass.MEASUREMENT, + ), "battery": AranetSensorEntityDescription( key="battery", name="Battery", diff --git a/requirements_all.txt b/requirements_all.txt index a7bee65a8b1..a6c7a9a8918 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -470,7 +470,7 @@ apsystems-ez1==2.2.1 aqualogic==2.6 # homeassistant.components.aranet -aranet4==2.3.4 +aranet4==2.4.0 # homeassistant.components.arcam_fmj arcam-fmj==1.5.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 7585c0cfa4e..bb4a4137ebd 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -440,7 +440,7 @@ aprslib==0.7.2 apsystems-ez1==2.2.1 # homeassistant.components.aranet -aranet4==2.3.4 +aranet4==2.4.0 # homeassistant.components.arcam_fmj arcam-fmj==1.5.2 diff --git a/tests/components/aranet/__init__.py b/tests/components/aranet/__init__.py index 18bebfb44a4..711c605fd28 100644 --- a/tests/components/aranet/__init__.py +++ b/tests/components/aranet/__init__.py @@ -82,3 +82,11 @@ VALID_ARANET_RADIATION_DATA_SERVICE_INFO = fake_service_info( 1794: b"\x02!&\x04\x01\x00`-\x00\x00\x08\x98\x05\x00n\x00\x00d\x00,\x01\xfd\x00\xc7" }, ) + +VALID_ARANET_RADON_DATA_SERVICE_INFO = fake_service_info( + "AranetRn+ 12345", + "0000fce0-0000-1000-8000-00805f9b34fb", + { + 1794: b"\x03!\x04\x06\x01\x00\x00\x00\x07\x00\xfe\x01\xc9'\xce\x01\x00d\x01X\x02\xf6\x01\x08" + }, +) diff --git a/tests/components/aranet/test_sensor.py b/tests/components/aranet/test_sensor.py index c932a92c1e8..7bd00af4837 100644 --- a/tests/components/aranet/test_sensor.py +++ b/tests/components/aranet/test_sensor.py @@ -11,6 +11,7 @@ from . import ( DISABLED_INTEGRATIONS_SERVICE_INFO, VALID_ARANET2_DATA_SERVICE_INFO, VALID_ARANET_RADIATION_DATA_SERVICE_INFO, + VALID_ARANET_RADON_DATA_SERVICE_INFO, VALID_DATA_SERVICE_INFO, ) @@ -188,6 +189,71 @@ async def test_sensors_aranet4(hass: HomeAssistant) -> None: await hass.async_block_till_done() +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensors_aranetrn(hass: HomeAssistant) -> None: + """Test setting up creates the sensors for Aranet Radon device.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id="aa:bb:cc:dd:ee:ff", + ) + entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert len(hass.states.async_all("sensor")) == 0 + inject_bluetooth_service_info(hass, VALID_ARANET_RADON_DATA_SERVICE_INFO) + await hass.async_block_till_done() + assert len(hass.states.async_all("sensor")) == 6 + + batt_sensor = hass.states.get("sensor.aranetrn_12345_battery") + batt_sensor_attrs = batt_sensor.attributes + assert batt_sensor.state == "100" + assert batt_sensor_attrs[ATTR_FRIENDLY_NAME] == "AranetRn+ 12345 Battery" + assert batt_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "%" + assert batt_sensor_attrs[ATTR_STATE_CLASS] == "measurement" + + co2_sensor = hass.states.get("sensor.aranetrn_12345_radon_concentration") + co2_sensor_attrs = co2_sensor.attributes + assert co2_sensor.state == "7" + assert co2_sensor_attrs[ATTR_FRIENDLY_NAME] == "AranetRn+ 12345 Radon Concentration" + assert co2_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "Bq/m³" + assert co2_sensor_attrs[ATTR_STATE_CLASS] == "measurement" + + humid_sensor = hass.states.get("sensor.aranetrn_12345_humidity") + humid_sensor_attrs = humid_sensor.attributes + assert humid_sensor.state == "46.2" + assert humid_sensor_attrs[ATTR_FRIENDLY_NAME] == "AranetRn+ 12345 Humidity" + assert humid_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "%" + assert humid_sensor_attrs[ATTR_STATE_CLASS] == "measurement" + + temp_sensor = hass.states.get("sensor.aranetrn_12345_temperature") + temp_sensor_attrs = temp_sensor.attributes + assert temp_sensor.state == "25.5" + assert temp_sensor_attrs[ATTR_FRIENDLY_NAME] == "AranetRn+ 12345 Temperature" + assert temp_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "°C" + assert temp_sensor_attrs[ATTR_STATE_CLASS] == "measurement" + + press_sensor = hass.states.get("sensor.aranetrn_12345_pressure") + press_sensor_attrs = press_sensor.attributes + assert press_sensor.state == "1018.5" + assert press_sensor_attrs[ATTR_FRIENDLY_NAME] == "AranetRn+ 12345 Pressure" + assert press_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "hPa" + assert press_sensor_attrs[ATTR_STATE_CLASS] == "measurement" + + interval_sensor = hass.states.get("sensor.aranetrn_12345_update_interval") + interval_sensor_attrs = interval_sensor.attributes + assert interval_sensor.state == "600" + assert ( + interval_sensor_attrs[ATTR_FRIENDLY_NAME] == "AranetRn+ 12345 Update Interval" + ) + assert interval_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "s" + assert interval_sensor_attrs[ATTR_STATE_CLASS] == "measurement" + + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_smart_home_integration_disabled(hass: HomeAssistant) -> None: """Test disabling smart home integration marks entities as unavailable.""" From e9798cd1b47e1dd895f089eb50c1c0c1be761087 Mon Sep 17 00:00:00 2001 From: Angel Nunez Mencias Date: Wed, 21 Aug 2024 21:14:03 +0200 Subject: [PATCH 0955/1635] update ttn_client - fix crash with SenseCAP devices (#124370) update ttn_client --- homeassistant/components/thethingsnetwork/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/thethingsnetwork/manifest.json b/homeassistant/components/thethingsnetwork/manifest.json index c39b2b7c421..8d826750e39 100644 --- a/homeassistant/components/thethingsnetwork/manifest.json +++ b/homeassistant/components/thethingsnetwork/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/thethingsnetwork", "integration_type": "hub", "iot_class": "cloud_polling", - "requirements": ["ttn_client==1.1.0"] + "requirements": ["ttn_client==1.2.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index a6c7a9a8918..4820889dfab 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2810,7 +2810,7 @@ transmission-rpc==7.0.3 ttls==1.8.3 # homeassistant.components.thethingsnetwork -ttn_client==1.1.0 +ttn_client==1.2.0 # homeassistant.components.tuya tuya-device-sharing-sdk==0.1.9 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index bb4a4137ebd..6f1e23f9d10 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2211,7 +2211,7 @@ transmission-rpc==7.0.3 ttls==1.8.3 # homeassistant.components.thethingsnetwork -ttn_client==1.1.0 +ttn_client==1.2.0 # homeassistant.components.tuya tuya-device-sharing-sdk==0.1.9 From 5f53d3f917d985eb6ad4ca01300c43ccfa512ed2 Mon Sep 17 00:00:00 2001 From: Patrick Frazer Date: Wed, 21 Aug 2024 15:56:59 -0400 Subject: [PATCH 0956/1635] Add DROP Alert product support (#117867) * Add DROP Alert product support * Add DROP Alert to sensor selftest * Fix Alert sensor naming ambiguity * Reorder a constant * Alphabetize strings * Remove unnecessary translation key --- .../components/drop_connect/binary_sensor.py | 16 ++++ .../components/drop_connect/const.py | 1 + .../components/drop_connect/sensor.py | 2 + .../components/drop_connect/strings.json | 27 +++--- tests/components/drop_connect/common.py | 23 +++++ .../snapshots/test_binary_sensor.ambr | 94 +++++++++++++++++++ .../drop_connect/snapshots/test_sensor.ambr | 64 +++++++++++++ .../drop_connect/test_binary_sensor.py | 11 +++ tests/components/drop_connect/test_sensor.py | 11 +++ 9 files changed, 236 insertions(+), 13 deletions(-) diff --git a/homeassistant/components/drop_connect/binary_sensor.py b/homeassistant/components/drop_connect/binary_sensor.py index 73e0e254607..093c5bcbb8e 100644 --- a/homeassistant/components/drop_connect/binary_sensor.py +++ b/homeassistant/components/drop_connect/binary_sensor.py @@ -17,6 +17,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import ( CONF_DEVICE_TYPE, + DEV_ALERT, DEV_HUB, DEV_LEAK_DETECTOR, DEV_PROTECTION_VALVE, @@ -33,8 +34,10 @@ _LOGGER = logging.getLogger(__name__) # Binary sensor type constants +ALERT_SENSOR = "alert_sensor" LEAK_DETECTED = "leak" PENDING_NOTIFICATION = "pending_notification" +POWER = "power" PUMP_STATUS = "pump" RESERVE_IN_USE = "reserve_in_use" SALT_LOW = "salt" @@ -74,10 +77,23 @@ BINARY_SENSORS: list[DROPBinarySensorEntityDescription] = [ translation_key=PUMP_STATUS, value_fn=lambda device: device.drop_api.pump_status(), ), + DROPBinarySensorEntityDescription( + key=ALERT_SENSOR, + translation_key=ALERT_SENSOR, + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda device: device.drop_api.sensor_high(), + ), + DROPBinarySensorEntityDescription( + key=POWER, + translation_key=None, # Use name provided by binary sensor device class + device_class=BinarySensorDeviceClass.POWER, + value_fn=lambda device: device.drop_api.power(), + ), ] # Defines which binary sensors are used by each device type DEVICE_BINARY_SENSORS: dict[str, list[str]] = { + DEV_ALERT: [ALERT_SENSOR, POWER], DEV_HUB: [LEAK_DETECTED, PENDING_NOTIFICATION], DEV_LEAK_DETECTOR: [LEAK_DETECTED], DEV_PROTECTION_VALVE: [LEAK_DETECTED], diff --git a/homeassistant/components/drop_connect/const.py b/homeassistant/components/drop_connect/const.py index 38a8a57ea72..f1012f9652c 100644 --- a/homeassistant/components/drop_connect/const.py +++ b/homeassistant/components/drop_connect/const.py @@ -11,6 +11,7 @@ CONF_DEVICE_NAME = "name" CONF_DEVICE_OWNER_ID = "drop_device_owner_id" # Values for DROP device types +DEV_ALERT = "alrt" DEV_FILTER = "filt" DEV_HUB = "hub" DEV_LEAK_DETECTOR = "leak" diff --git a/homeassistant/components/drop_connect/sensor.py b/homeassistant/components/drop_connect/sensor.py index 0806737254e..ad123ee13c7 100644 --- a/homeassistant/components/drop_connect/sensor.py +++ b/homeassistant/components/drop_connect/sensor.py @@ -27,6 +27,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import ( CONF_DEVICE_TYPE, + DEV_ALERT, DEV_FILTER, DEV_HUB, DEV_LEAK_DETECTOR, @@ -222,6 +223,7 @@ DEVICE_SENSORS: dict[str, list[str]] = { ], DEV_FILTER: [BATTERY, CURRENT_FLOW_RATE, CURRENT_SYSTEM_PRESSURE], DEV_LEAK_DETECTOR: [BATTERY, TEMPERATURE], + DEV_ALERT: [BATTERY, TEMPERATURE], DEV_PROTECTION_VALVE: [ BATTERY, CURRENT_FLOW_RATE, diff --git a/homeassistant/components/drop_connect/strings.json b/homeassistant/components/drop_connect/strings.json index 761d134bd18..93df4dc3310 100644 --- a/homeassistant/components/drop_connect/strings.json +++ b/homeassistant/components/drop_connect/strings.json @@ -12,26 +12,27 @@ }, "entity": { "sensor": { - "current_flow_rate": { "name": "Water flow rate" }, - "peak_flow_rate": { "name": "Peak water flow rate today" }, - "water_used_today": { "name": "Total water used today" }, "average_water_used": { "name": "Average daily water usage" }, "capacity_remaining": { "name": "Capacity remaining" }, - "current_system_pressure": { "name": "Current water pressure" }, - "high_system_pressure": { "name": "High water pressure today" }, - "low_system_pressure": { "name": "Low water pressure today" }, - "inlet_tds": { "name": "Inlet TDS" }, - "outlet_tds": { "name": "Outlet TDS" }, "cart1": { "name": "Cartridge 1 life remaining" }, "cart2": { "name": "Cartridge 2 life remaining" }, - "cart3": { "name": "Cartridge 3 life remaining" } + "cart3": { "name": "Cartridge 3 life remaining" }, + "current_flow_rate": { "name": "Water flow rate" }, + "current_system_pressure": { "name": "Current water pressure" }, + "high_system_pressure": { "name": "High water pressure today" }, + "inlet_tds": { "name": "Inlet TDS" }, + "low_system_pressure": { "name": "Low water pressure today" }, + "outlet_tds": { "name": "Outlet TDS" }, + "peak_flow_rate": { "name": "Peak water flow rate today" }, + "water_used_today": { "name": "Total water used today" } }, "binary_sensor": { + "alert_sensor": { "name": "Sensor" }, "leak": { "name": "Leak detected" }, "pending_notification": { "name": "Notification unread" }, + "pump": { "name": "Pump status" }, "reserve_in_use": { "name": "Reserve capacity in use" }, - "salt": { "name": "Salt low" }, - "pump": { "name": "Pump status" } + "salt": { "name": "Salt low" } }, "select": { "protect_mode": { @@ -44,8 +45,8 @@ } }, "switch": { - "water": { "name": "Water supply" }, - "bypass": { "name": "Treatment bypass" } + "bypass": { "name": "Treatment bypass" }, + "water": { "name": "Water supply" } } } } diff --git a/tests/components/drop_connect/common.py b/tests/components/drop_connect/common.py index bdba79bbd95..9eb76f57dad 100644 --- a/tests/components/drop_connect/common.py +++ b/tests/components/drop_connect/common.py @@ -34,6 +34,10 @@ TEST_DATA_SALT_TOPIC = "drop_connect/DROP-1_C0FFEE/8" TEST_DATA_SALT = '{"salt":1}' TEST_DATA_SALT_RESET = '{"salt":0}' +TEST_DATA_ALERT_TOPIC = "drop_connect/DROP-1_C0FFEE/81" +TEST_DATA_ALERT = '{"battery":100,"sens":1,"pwrOff":0,"temp":68.2}' +TEST_DATA_ALERT_RESET = '{"battery":0,"sens":0,"pwrOff":1,"temp":0}' + TEST_DATA_LEAK_TOPIC = "drop_connect/DROP-1_C0FFEE/20" TEST_DATA_LEAK = '{"battery":100,"leak":1,"temp":68.2}' TEST_DATA_LEAK_RESET = '{"battery":0,"leak":0,"temp":0}' @@ -109,6 +113,25 @@ def config_entry_salt() -> ConfigEntry: ) +def config_entry_alert() -> ConfigEntry: + """Config entry version 1 fixture.""" + return MockConfigEntry( + domain=DOMAIN, + unique_id="DROP-1_C0FFEE_81", + data={ + CONF_COMMAND_TOPIC: "drop_connect/DROP-1_C0FFEE/81/cmd", + CONF_DATA_TOPIC: "drop_connect/DROP-1_C0FFEE/81/#", + CONF_DEVICE_DESC: "Alert", + CONF_DEVICE_ID: 81, + CONF_DEVICE_NAME: "Alert", + CONF_DEVICE_TYPE: "alrt", + CONF_HUB_ID: "DROP-1_C0FFEE", + CONF_DEVICE_OWNER_ID: "DROP-1_C0FFEE_255", + }, + version=1, + ) + + def config_entry_leak() -> ConfigEntry: """Config entry version 1 fixture.""" return MockConfigEntry( diff --git a/tests/components/drop_connect/snapshots/test_binary_sensor.ambr b/tests/components/drop_connect/snapshots/test_binary_sensor.ambr index c42cdb8cde1..9b0cc201573 100644 --- a/tests/components/drop_connect/snapshots/test_binary_sensor.ambr +++ b/tests/components/drop_connect/snapshots/test_binary_sensor.ambr @@ -1,4 +1,98 @@ # serializer version: 1 +# name: test_sensors[alert][binary_sensor.alert_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.alert_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'drop_connect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'DROP-1_C0FFEE_81_power', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[alert][binary_sensor.alert_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Alert Power', + }), + 'context': , + 'entity_id': 'binary_sensor.alert_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensors[alert][binary_sensor.alert_sensor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.alert_sensor', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Sensor', + 'platform': 'drop_connect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'alert_sensor', + 'unique_id': 'DROP-1_C0FFEE_81_alert_sensor', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[alert][binary_sensor.alert_sensor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Alert Sensor', + }), + 'context': , + 'entity_id': 'binary_sensor.alert_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- # name: test_sensors[hub][binary_sensor.hub_drop_1_c0ffee_leak_detected-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/drop_connect/snapshots/test_sensor.ambr b/tests/components/drop_connect/snapshots/test_sensor.ambr index 54e3259e455..a5c91dbe3e4 100644 --- a/tests/components/drop_connect/snapshots/test_sensor.ambr +++ b/tests/components/drop_connect/snapshots/test_sensor.ambr @@ -1,4 +1,68 @@ # serializer version: 1 +# name: test_sensors[alert][sensor.alert_battery-data] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Alert Battery', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.alert_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensors[alert][sensor.alert_battery-reset] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Alert Battery', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.alert_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[alert][sensor.alert_temperature-data] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Alert Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.alert_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20.1111111111111', + }) +# --- +# name: test_sensors[alert][sensor.alert_temperature-reset] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Alert Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.alert_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-17.7777777777778', + }) +# --- # name: test_sensors[filter][sensor.filter_battery-data] StateSnapshot({ 'attributes': ReadOnlyDict({ diff --git a/tests/components/drop_connect/test_binary_sensor.py b/tests/components/drop_connect/test_binary_sensor.py index 895921291ef..ab89e05d809 100644 --- a/tests/components/drop_connect/test_binary_sensor.py +++ b/tests/components/drop_connect/test_binary_sensor.py @@ -10,6 +10,9 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from .common import ( + TEST_DATA_ALERT, + TEST_DATA_ALERT_RESET, + TEST_DATA_ALERT_TOPIC, TEST_DATA_HUB, TEST_DATA_HUB_RESET, TEST_DATA_HUB_TOPIC, @@ -28,6 +31,7 @@ from .common import ( TEST_DATA_SOFTENER, TEST_DATA_SOFTENER_RESET, TEST_DATA_SOFTENER_TOPIC, + config_entry_alert, config_entry_hub, config_entry_leak, config_entry_protection_valve, @@ -44,6 +48,12 @@ from tests.typing import MqttMockHAClient ("config_entry", "topic", "reset", "data"), [ (config_entry_hub(), TEST_DATA_HUB_TOPIC, TEST_DATA_HUB_RESET, TEST_DATA_HUB), + ( + config_entry_alert(), + TEST_DATA_ALERT_TOPIC, + TEST_DATA_ALERT_RESET, + TEST_DATA_ALERT, + ), ( config_entry_leak(), TEST_DATA_LEAK_TOPIC, @@ -77,6 +87,7 @@ from tests.typing import MqttMockHAClient ], ids=[ "hub", + "alert", "leak", "softener", "protection_valve", diff --git a/tests/components/drop_connect/test_sensor.py b/tests/components/drop_connect/test_sensor.py index cb56522a09d..c33f0aefe37 100644 --- a/tests/components/drop_connect/test_sensor.py +++ b/tests/components/drop_connect/test_sensor.py @@ -11,6 +11,9 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from .common import ( + TEST_DATA_ALERT, + TEST_DATA_ALERT_RESET, + TEST_DATA_ALERT_TOPIC, TEST_DATA_FILTER, TEST_DATA_FILTER_RESET, TEST_DATA_FILTER_TOPIC, @@ -32,6 +35,7 @@ from .common import ( TEST_DATA_SOFTENER, TEST_DATA_SOFTENER_RESET, TEST_DATA_SOFTENER_TOPIC, + config_entry_alert, config_entry_filter, config_entry_hub, config_entry_leak, @@ -57,6 +61,12 @@ def only_sensor_platform() -> Generator[None]: ("config_entry", "topic", "reset", "data"), [ (config_entry_hub(), TEST_DATA_HUB_TOPIC, TEST_DATA_HUB_RESET, TEST_DATA_HUB), + ( + config_entry_alert(), + TEST_DATA_ALERT_TOPIC, + TEST_DATA_ALERT_RESET, + TEST_DATA_ALERT, + ), ( config_entry_leak(), TEST_DATA_LEAK_TOPIC, @@ -96,6 +106,7 @@ def only_sensor_platform() -> Generator[None]: ], ids=[ "hub", + "alert", "leak", "softener", "filter", From 67bc568db67d64cfbc6d2b8c7a44312189731de6 Mon Sep 17 00:00:00 2001 From: "Mr. Bubbles" Date: Wed, 21 Aug 2024 22:18:33 +0200 Subject: [PATCH 0957/1635] Add tests for Bring integration (#123087) * Add tests to bring integration * use more parametrization * json fixture loading, move notification tests --- tests/components/bring/conftest.py | 18 +- tests/components/bring/fixtures/items.json | 26 ++ tests/components/bring/fixtures/lists.json | 14 + .../components/bring/snapshots/test_todo.ambr | 95 ++++++ tests/components/bring/test_init.py | 45 ++- tests/components/bring/test_notification.py | 106 ++++++ tests/components/bring/test_todo.py | 302 ++++++++++++++++++ 7 files changed, 601 insertions(+), 5 deletions(-) create mode 100644 tests/components/bring/fixtures/items.json create mode 100644 tests/components/bring/fixtures/lists.json create mode 100644 tests/components/bring/snapshots/test_todo.ambr create mode 100644 tests/components/bring/test_notification.py create mode 100644 tests/components/bring/test_todo.py diff --git a/tests/components/bring/conftest.py b/tests/components/bring/conftest.py index 6c39c5020f9..60c13a1c208 100644 --- a/tests/components/bring/conftest.py +++ b/tests/components/bring/conftest.py @@ -3,6 +3,7 @@ from collections.abc import Generator from typing import cast from unittest.mock import AsyncMock, patch +import uuid from bring_api.types import BringAuthResponse import pytest @@ -10,7 +11,7 @@ import pytest from homeassistant.components.bring import DOMAIN from homeassistant.const import CONF_EMAIL, CONF_PASSWORD -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, load_json_object_fixture EMAIL = "test-email" PASSWORD = "test-password" @@ -43,10 +44,23 @@ def mock_bring_client() -> Generator[AsyncMock]: client = mock_client.return_value client.uuid = UUID client.login.return_value = cast(BringAuthResponse, {"name": "Bring"}) - client.load_lists.return_value = {"lists": []} + client.load_lists.return_value = load_json_object_fixture("lists.json", DOMAIN) + client.get_list.return_value = load_json_object_fixture("items.json", DOMAIN) yield client +@pytest.fixture +def mock_uuid() -> Generator[AsyncMock]: + """Mock uuid.""" + + with patch( + "homeassistant.components.bring.todo.uuid.uuid4", + autospec=True, + ) as mock_client: + mock_client.return_value = uuid.UUID("b669ad23-606a-4652-b302-995d34b1cb1c") + yield mock_client + + @pytest.fixture(name="bring_config_entry") def mock_bring_config_entry() -> MockConfigEntry: """Mock bring configuration entry.""" diff --git a/tests/components/bring/fixtures/items.json b/tests/components/bring/fixtures/items.json new file mode 100644 index 00000000000..43e05a39fbb --- /dev/null +++ b/tests/components/bring/fixtures/items.json @@ -0,0 +1,26 @@ +{ + "uuid": "77a151f8-77c4-47a3-8295-c750a0e69d4f", + "status": "REGISTERED", + "purchase": [ + { + "uuid": "b5d0790b-5f32-4d5c-91da-e29066f167de", + "itemId": "Paprika", + "specification": "Rot", + "attributes": [] + }, + { + "uuid": "72d370ab-d8ca-4e41-b956-91df94795b4e", + "itemId": "Pouletbrüstli", + "specification": "Bio", + "attributes": [] + } + ], + "recently": [ + { + "uuid": "fc8db30a-647e-4e6c-9d71-3b85d6a2d954", + "itemId": "Ananas", + "specification": "", + "attributes": [] + } + ] +} diff --git a/tests/components/bring/fixtures/lists.json b/tests/components/bring/fixtures/lists.json new file mode 100644 index 00000000000..5891d94f7de --- /dev/null +++ b/tests/components/bring/fixtures/lists.json @@ -0,0 +1,14 @@ +{ + "lists": [ + { + "listUuid": "e542eef6-dba7-4c31-a52c-29e6ab9d83a5", + "name": "Einkauf", + "theme": "ch.publisheria.bring.theme.home" + }, + { + "listUuid": "b4776778-7f6c-496e-951b-92a35d3db0dd", + "name": "Baumarkt", + "theme": "ch.publisheria.bring.theme.home" + } + ] +} diff --git a/tests/components/bring/snapshots/test_todo.ambr b/tests/components/bring/snapshots/test_todo.ambr new file mode 100644 index 00000000000..6a24b4148b7 --- /dev/null +++ b/tests/components/bring/snapshots/test_todo.ambr @@ -0,0 +1,95 @@ +# serializer version: 1 +# name: test_todo[todo.baumarkt-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'todo', + 'entity_category': None, + 'entity_id': 'todo.baumarkt', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Baumarkt', + 'platform': 'bring', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'shopping_list', + 'unique_id': '00000000-00000000-00000000-00000000_b4776778-7f6c-496e-951b-92a35d3db0dd', + 'unit_of_measurement': None, + }) +# --- +# name: test_todo[todo.baumarkt-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Baumarkt', + 'supported_features': , + }), + 'context': , + 'entity_id': 'todo.baumarkt', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_todo[todo.einkauf-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'todo', + 'entity_category': None, + 'entity_id': 'todo.einkauf', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Einkauf', + 'platform': 'bring', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'shopping_list', + 'unique_id': '00000000-00000000-00000000-00000000_e542eef6-dba7-4c31-a52c-29e6ab9d83a5', + 'unit_of_measurement': None, + }) +# --- +# name: test_todo[todo.einkauf-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Einkauf', + 'supported_features': , + }), + 'context': , + 'entity_id': 'todo.einkauf', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- diff --git a/tests/components/bring/test_init.py b/tests/components/bring/test_init.py index f1b1f78e775..613b65e38b6 100644 --- a/tests/components/bring/test_init.py +++ b/tests/components/bring/test_init.py @@ -28,9 +28,9 @@ async def setup_integration( await hass.async_block_till_done() +@pytest.mark.usefixtures("mock_bring_client") async def test_load_unload( hass: HomeAssistant, - mock_bring_client: AsyncMock, bring_config_entry: MockConfigEntry, ) -> None: """Test loading and unloading of the config entry.""" @@ -58,7 +58,7 @@ async def test_init_failure( mock_bring_client: AsyncMock, status: ConfigEntryState, exception: Exception, - bring_config_entry: MockConfigEntry | None, + bring_config_entry: MockConfigEntry, ) -> None: """Test an initialization error on integration load.""" mock_bring_client.login.side_effect = exception @@ -79,7 +79,7 @@ async def test_init_exceptions( mock_bring_client: AsyncMock, exception: Exception, expected: Exception, - bring_config_entry: MockConfigEntry | None, + bring_config_entry: MockConfigEntry, ) -> None: """Test an initialization error on integration load.""" bring_config_entry.add_to_hass(hass) @@ -87,3 +87,42 @@ async def test_init_exceptions( with pytest.raises(expected): await async_setup_entry(hass, bring_config_entry) + + +@pytest.mark.parametrize("exception", [BringRequestException, BringParseException]) +@pytest.mark.parametrize("bring_method", ["load_lists", "get_list"]) +async def test_config_entry_not_ready( + hass: HomeAssistant, + bring_config_entry: MockConfigEntry, + mock_bring_client: AsyncMock, + exception: Exception, + bring_method: str, +) -> None: + """Test config entry not ready.""" + getattr(mock_bring_client, bring_method).side_effect = exception + bring_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(bring_config_entry.entry_id) + await hass.async_block_till_done() + + assert bring_config_entry.state is ConfigEntryState.SETUP_RETRY + + +@pytest.mark.parametrize( + "exception", [None, BringAuthException, BringRequestException, BringParseException] +) +async def test_config_entry_not_ready_auth_error( + hass: HomeAssistant, + bring_config_entry: MockConfigEntry, + mock_bring_client: AsyncMock, + exception: Exception | None, +) -> None: + """Test config entry not ready from authentication error.""" + + mock_bring_client.load_lists.side_effect = BringAuthException + mock_bring_client.retrieve_new_access_token.side_effect = exception + + bring_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(bring_config_entry.entry_id) + await hass.async_block_till_done() + + assert bring_config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/bring/test_notification.py b/tests/components/bring/test_notification.py new file mode 100644 index 00000000000..b1fa28335ad --- /dev/null +++ b/tests/components/bring/test_notification.py @@ -0,0 +1,106 @@ +"""Test todo entity notification action of the Bring! integration.""" + +import re +from unittest.mock import AsyncMock + +from bring_api import BringNotificationType, BringRequestException +import pytest + +from homeassistant.components.bring.const import ( + ATTR_ITEM_NAME, + ATTR_NOTIFICATION_TYPE, + DOMAIN, + SERVICE_PUSH_NOTIFICATION, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError + +from tests.common import MockConfigEntry + + +async def test_send_notification( + hass: HomeAssistant, + bring_config_entry: MockConfigEntry, + mock_bring_client: AsyncMock, +) -> None: + """Test send bring push notification.""" + + bring_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(bring_config_entry.entry_id) + await hass.async_block_till_done() + + assert bring_config_entry.state is ConfigEntryState.LOADED + + await hass.services.async_call( + DOMAIN, + SERVICE_PUSH_NOTIFICATION, + service_data={ + ATTR_NOTIFICATION_TYPE: "GOING_SHOPPING", + }, + target={ATTR_ENTITY_ID: "todo.einkauf"}, + blocking=True, + ) + + mock_bring_client.notify.assert_called_once_with( + "e542eef6-dba7-4c31-a52c-29e6ab9d83a5", + BringNotificationType.GOING_SHOPPING, + None, + ) + + +async def test_send_notification_exception( + hass: HomeAssistant, + bring_config_entry: MockConfigEntry, + mock_bring_client: AsyncMock, +) -> None: + """Test send bring push notification with exception.""" + + bring_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(bring_config_entry.entry_id) + await hass.async_block_till_done() + + assert bring_config_entry.state is ConfigEntryState.LOADED + mock_bring_client.notify.side_effect = BringRequestException + with pytest.raises( + HomeAssistantError, + match="Failed to send push notification for bring due to a connection error, try again later", + ): + await hass.services.async_call( + DOMAIN, + SERVICE_PUSH_NOTIFICATION, + service_data={ + ATTR_NOTIFICATION_TYPE: "GOING_SHOPPING", + }, + target={ATTR_ENTITY_ID: "todo.einkauf"}, + blocking=True, + ) + + +async def test_send_notification_service_validation_error( + hass: HomeAssistant, + bring_config_entry: MockConfigEntry, + mock_bring_client: AsyncMock, +) -> None: + """Test send bring push notification.""" + + bring_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(bring_config_entry.entry_id) + await hass.async_block_till_done() + + assert bring_config_entry.state is ConfigEntryState.LOADED + mock_bring_client.notify.side_effect = ValueError + with pytest.raises( + HomeAssistantError, + match=re.escape( + "Failed to perform action bring.send_message. 'URGENT_MESSAGE' requires a value @ data['item']. Got None" + ), + ): + await hass.services.async_call( + DOMAIN, + SERVICE_PUSH_NOTIFICATION, + service_data={ATTR_NOTIFICATION_TYPE: "URGENT_MESSAGE", ATTR_ITEM_NAME: ""}, + target={ATTR_ENTITY_ID: "todo.einkauf"}, + blocking=True, + ) diff --git a/tests/components/bring/test_todo.py b/tests/components/bring/test_todo.py new file mode 100644 index 00000000000..d67429e8f49 --- /dev/null +++ b/tests/components/bring/test_todo.py @@ -0,0 +1,302 @@ +"""Test for todo platform of the Bring! integration.""" + +import re +from unittest.mock import AsyncMock + +from bring_api import BringItemOperation, BringRequestException +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.todo import ( + ATTR_DESCRIPTION, + ATTR_ITEM, + ATTR_RENAME, + DOMAIN as TODO_DOMAIN, + TodoServices, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("mock_bring_client") +async def test_todo( + hass: HomeAssistant, + bring_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Snapshot test states of todo platform.""" + + bring_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(bring_config_entry.entry_id) + await hass.async_block_till_done() + + assert bring_config_entry.state is ConfigEntryState.LOADED + + await snapshot_platform( + hass, entity_registry, snapshot, bring_config_entry.entry_id + ) + + +@pytest.mark.usefixtures("mock_uuid") +async def test_add_item( + hass: HomeAssistant, + bring_config_entry: MockConfigEntry, + mock_bring_client: AsyncMock, +) -> None: + """Test add item to list.""" + + bring_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(bring_config_entry.entry_id) + await hass.async_block_till_done() + + assert bring_config_entry.state is ConfigEntryState.LOADED + + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.ADD_ITEM, + service_data={ATTR_ITEM: "Äpfel", ATTR_DESCRIPTION: "rot"}, + target={ATTR_ENTITY_ID: "todo.einkauf"}, + blocking=True, + ) + + mock_bring_client.save_item.assert_called_once_with( + "e542eef6-dba7-4c31-a52c-29e6ab9d83a5", + "Äpfel", + "rot", + "b669ad23-606a-4652-b302-995d34b1cb1c", + ) + + +async def test_add_item_exception( + hass: HomeAssistant, + bring_config_entry: MockConfigEntry, + mock_bring_client: AsyncMock, +) -> None: + """Test add item to list with exception.""" + + bring_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(bring_config_entry.entry_id) + await hass.async_block_till_done() + + assert bring_config_entry.state is ConfigEntryState.LOADED + + mock_bring_client.save_item.side_effect = BringRequestException + with pytest.raises( + HomeAssistantError, match="Failed to save item Äpfel to Bring! list" + ): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.ADD_ITEM, + service_data={ATTR_ITEM: "Äpfel", ATTR_DESCRIPTION: "rot"}, + target={ATTR_ENTITY_ID: "todo.einkauf"}, + blocking=True, + ) + + +@pytest.mark.usefixtures("mock_uuid") +async def test_update_item( + hass: HomeAssistant, + bring_config_entry: MockConfigEntry, + mock_bring_client: AsyncMock, +) -> None: + """Test update item.""" + + bring_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(bring_config_entry.entry_id) + await hass.async_block_till_done() + + assert bring_config_entry.state is ConfigEntryState.LOADED + + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + service_data={ + ATTR_ITEM: "b5d0790b-5f32-4d5c-91da-e29066f167de", + ATTR_RENAME: "Paprika", + ATTR_DESCRIPTION: "Rot", + }, + target={ATTR_ENTITY_ID: "todo.einkauf"}, + blocking=True, + ) + + mock_bring_client.batch_update_list.assert_called_once_with( + "e542eef6-dba7-4c31-a52c-29e6ab9d83a5", + { + "itemId": "Paprika", + "spec": "Rot", + "uuid": "b5d0790b-5f32-4d5c-91da-e29066f167de", + }, + BringItemOperation.ADD, + ) + + +async def test_update_item_exception( + hass: HomeAssistant, + bring_config_entry: MockConfigEntry, + mock_bring_client: AsyncMock, +) -> None: + """Test update item with exception.""" + + bring_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(bring_config_entry.entry_id) + await hass.async_block_till_done() + + assert bring_config_entry.state is ConfigEntryState.LOADED + + mock_bring_client.batch_update_list.side_effect = BringRequestException + with pytest.raises( + HomeAssistantError, match="Failed to update item Paprika to Bring! list" + ): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + service_data={ + ATTR_ITEM: "b5d0790b-5f32-4d5c-91da-e29066f167de", + ATTR_RENAME: "Paprika", + ATTR_DESCRIPTION: "Rot", + }, + target={ATTR_ENTITY_ID: "todo.einkauf"}, + blocking=True, + ) + + +@pytest.mark.usefixtures("mock_uuid") +async def test_rename_item( + hass: HomeAssistant, + bring_config_entry: MockConfigEntry, + mock_bring_client: AsyncMock, +) -> None: + """Test rename item.""" + + bring_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(bring_config_entry.entry_id) + await hass.async_block_till_done() + + assert bring_config_entry.state is ConfigEntryState.LOADED + + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + service_data={ + ATTR_ITEM: "b5d0790b-5f32-4d5c-91da-e29066f167de", + ATTR_RENAME: "Gurke", + ATTR_DESCRIPTION: "", + }, + target={ATTR_ENTITY_ID: "todo.einkauf"}, + blocking=True, + ) + + mock_bring_client.batch_update_list.assert_called_once_with( + "e542eef6-dba7-4c31-a52c-29e6ab9d83a5", + [ + { + "itemId": "Paprika", + "spec": "", + "uuid": "b5d0790b-5f32-4d5c-91da-e29066f167de", + "operation": BringItemOperation.REMOVE, + }, + { + "itemId": "Gurke", + "spec": "", + "uuid": "b669ad23-606a-4652-b302-995d34b1cb1c", + "operation": BringItemOperation.ADD, + }, + ], + ) + + +async def test_rename_item_exception( + hass: HomeAssistant, + bring_config_entry: MockConfigEntry, + mock_bring_client: AsyncMock, +) -> None: + """Test rename item with exception.""" + + bring_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(bring_config_entry.entry_id) + await hass.async_block_till_done() + + assert bring_config_entry.state is ConfigEntryState.LOADED + + mock_bring_client.batch_update_list.side_effect = BringRequestException + with pytest.raises( + HomeAssistantError, match="Failed to rename item Gurke to Bring! list" + ): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + service_data={ + ATTR_ITEM: "b5d0790b-5f32-4d5c-91da-e29066f167de", + ATTR_RENAME: "Gurke", + ATTR_DESCRIPTION: "", + }, + target={ATTR_ENTITY_ID: "todo.einkauf"}, + blocking=True, + ) + + +@pytest.mark.usefixtures("mock_uuid") +async def test_delete_items( + hass: HomeAssistant, + bring_config_entry: MockConfigEntry, + mock_bring_client: AsyncMock, +) -> None: + """Test delete item.""" + + bring_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(bring_config_entry.entry_id) + await hass.async_block_till_done() + + assert bring_config_entry.state is ConfigEntryState.LOADED + + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.REMOVE_ITEM, + service_data={ATTR_ITEM: "b5d0790b-5f32-4d5c-91da-e29066f167de"}, + target={ATTR_ENTITY_ID: "todo.einkauf"}, + blocking=True, + ) + + mock_bring_client.batch_update_list.assert_called_once_with( + "e542eef6-dba7-4c31-a52c-29e6ab9d83a5", + [ + { + "itemId": "b5d0790b-5f32-4d5c-91da-e29066f167de", + "spec": "", + "uuid": "b5d0790b-5f32-4d5c-91da-e29066f167de", + }, + ], + BringItemOperation.REMOVE, + ) + + +async def test_delete_items_exception( + hass: HomeAssistant, + bring_config_entry: MockConfigEntry, + mock_bring_client: AsyncMock, +) -> None: + """Test delete item.""" + + bring_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(bring_config_entry.entry_id) + await hass.async_block_till_done() + + assert bring_config_entry.state is ConfigEntryState.LOADED + mock_bring_client.batch_update_list.side_effect = BringRequestException + with pytest.raises( + HomeAssistantError, + match=re.escape("Failed to delete 1 item(s) from Bring! list"), + ): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.REMOVE_ITEM, + service_data={ATTR_ITEM: "b5d0790b-5f32-4d5c-91da-e29066f167de"}, + target={ATTR_ENTITY_ID: "todo.einkauf"}, + blocking=True, + ) From 913e5404da064d0b4a0c57be82b1af467ec75ed6 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 21 Aug 2024 22:42:58 +0200 Subject: [PATCH 0958/1635] Improve config flow type hints (part 1) (#124343) * Improve config flow type hints * Revert sms --- homeassistant/components/airtouch4/config_flow.py | 8 ++++++-- .../components/ambient_station/config_flow.py | 6 +++++- homeassistant/components/control4/config_flow.py | 12 ++++++++++-- homeassistant/components/dexcom/config_flow.py | 13 +++++++++++-- homeassistant/components/econet/config_flow.py | 8 ++++++-- .../components/emulated_roku/config_flow.py | 10 +++++++--- homeassistant/components/enocean/config_flow.py | 8 ++++++-- .../components/environment_canada/config_flow.py | 7 +++++-- homeassistant/components/epson/config_flow.py | 7 +++++-- .../components/flick_electric/config_flow.py | 7 +++++-- homeassistant/components/flo/config_flow.py | 8 ++++++-- .../components/forked_daapd/config_flow.py | 5 ++++- homeassistant/components/foscam/config_flow.py | 8 ++++++-- homeassistant/components/freedompro/config_flow.py | 8 ++++++-- .../components/geonetnz_quakes/config_flow.py | 7 +++++-- .../components/geonetnz_volcano/config_flow.py | 8 ++++++-- homeassistant/components/goodwe/config_flow.py | 5 ++++- homeassistant/components/habitica/config_flow.py | 7 +++++-- homeassistant/components/heos/config_flow.py | 6 ++++-- homeassistant/components/hlk_sw16/config_flow.py | 7 +++++-- homeassistant/components/huisbaasje/config_flow.py | 7 +++++-- homeassistant/components/ialarm/config_flow.py | 7 +++++-- homeassistant/components/insteon/config_flow.py | 5 ++++- homeassistant/components/juicenet/config_flow.py | 7 +++++-- .../components/keymitt_ble/config_flow.py | 4 ++-- homeassistant/components/kmtronic/config_flow.py | 12 ++++++++++-- homeassistant/components/kodi/config_flow.py | 5 ++++- homeassistant/components/konnected/config_flow.py | 4 +++- .../components/meteoclimatic/config_flow.py | 9 ++++++--- homeassistant/components/mill/config_flow.py | 8 ++++++-- homeassistant/components/mobile_app/config_flow.py | 7 +++++-- homeassistant/components/monoprice/config_flow.py | 12 ++++++++++-- homeassistant/components/netgear/config_flow.py | 6 ++++-- homeassistant/components/nexia/config_flow.py | 7 +++++-- homeassistant/components/nextbus/config_flow.py | 2 +- homeassistant/components/nuheat/config_flow.py | 7 +++++-- homeassistant/components/octoprint/config_flow.py | 4 +++- homeassistant/components/omnilogic/config_flow.py | 14 +++++++++++--- .../components/opentherm_gw/config_flow.py | 12 ++++++++++-- homeassistant/components/owntracks/config_flow.py | 7 +++++-- homeassistant/components/picnic/config_flow.py | 4 +++- homeassistant/components/point/config_flow.py | 7 +++++-- homeassistant/components/profiler/config_flow.py | 8 ++++++-- homeassistant/components/prosegur/config_flow.py | 4 +++- homeassistant/components/rachio/config_flow.py | 5 ++++- homeassistant/components/smappee/config_flow.py | 5 ++++- .../components/smart_meter_texas/config_flow.py | 7 +++++-- .../components/smartthings/config_flow.py | 7 +++++-- homeassistant/components/soma/config_flow.py | 7 +++++-- homeassistant/components/songpal/config_flow.py | 5 ++++- homeassistant/components/spider/config_flow.py | 7 +++++-- homeassistant/components/squeezebox/config_flow.py | 4 +++- homeassistant/components/starline/config_flow.py | 8 ++++++-- homeassistant/components/syncthing/config_flow.py | 8 ++++++-- homeassistant/components/syncthru/config_flow.py | 5 ++++- .../components/tellduslive/config_flow.py | 7 +++++-- homeassistant/components/twinkly/config_flow.py | 4 +++- homeassistant/components/upb/config_flow.py | 7 +++++-- homeassistant/components/vilfo/config_flow.py | 7 +++++-- homeassistant/components/volumio/config_flow.py | 5 ++++- homeassistant/components/vulcan/config_flow.py | 4 +++- homeassistant/components/wiffi/config_flow.py | 12 ++++++++++-- homeassistant/components/wolflink/config_flow.py | 7 +++++-- homeassistant/components/ws66i/config_flow.py | 11 +++++++++-- homeassistant/components/xbox/config_flow.py | 6 +++++- 65 files changed, 348 insertions(+), 114 deletions(-) diff --git a/homeassistant/components/airtouch4/config_flow.py b/homeassistant/components/airtouch4/config_flow.py index 12e01ffde29..02bb5cc3ad0 100644 --- a/homeassistant/components/airtouch4/config_flow.py +++ b/homeassistant/components/airtouch4/config_flow.py @@ -1,9 +1,11 @@ """Config flow for AirTouch4.""" +from typing import Any + from airtouch4pyapi import AirTouch, AirTouchStatus import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST from .const import DOMAIN @@ -16,7 +18,9 @@ class AirtouchConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" if user_input is None: return self.async_show_form(step_id="user", data_schema=DATA_SCHEMA) diff --git a/homeassistant/components/ambient_station/config_flow.py b/homeassistant/components/ambient_station/config_flow.py index 66e603ba2ff..072ca68b865 100644 --- a/homeassistant/components/ambient_station/config_flow.py +++ b/homeassistant/components/ambient_station/config_flow.py @@ -2,6 +2,8 @@ from __future__ import annotations +from typing import Any + from aioambient import API from aioambient.errors import AmbientError import voluptuous as vol @@ -32,7 +34,9 @@ class AmbientStationFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors if errors else {}, ) - async def async_step_user(self, user_input: dict | None = None) -> ConfigFlowResult: + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the start of the config flow.""" if not user_input: return await self._show_form() diff --git a/homeassistant/components/control4/config_flow.py b/homeassistant/components/control4/config_flow.py index f6eb410cbf2..aa7839b4383 100644 --- a/homeassistant/components/control4/config_flow.py +++ b/homeassistant/components/control4/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations import logging +from typing import Any from aiohttp.client_exceptions import ClientError from pyControl4.account import C4Account @@ -10,7 +11,12 @@ from pyControl4.director import C4Director from pyControl4.error_handling import NotFound, Unauthorized import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, OptionsFlow +from homeassistant.config_entries import ( + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -93,7 +99,9 @@ class Control4ConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/dexcom/config_flow.py b/homeassistant/components/dexcom/config_flow.py index 19b35c2b03d..17bd1b3f7a8 100644 --- a/homeassistant/components/dexcom/config_flow.py +++ b/homeassistant/components/dexcom/config_flow.py @@ -2,10 +2,17 @@ from __future__ import annotations +from typing import Any + from pydexcom import AccountError, Dexcom, SessionError import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, OptionsFlow +from homeassistant.config_entries import ( + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.const import CONF_PASSWORD, CONF_UNIT_OF_MEASUREMENT, CONF_USERNAME from homeassistant.core import callback @@ -25,7 +32,9 @@ class DexcomConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/econet/config_flow.py b/homeassistant/components/econet/config_flow.py index 81a5fdf75f0..145b9cf9f7d 100644 --- a/homeassistant/components/econet/config_flow.py +++ b/homeassistant/components/econet/config_flow.py @@ -1,10 +1,12 @@ """Config flow to configure the EcoNet component.""" +from typing import Any + from pyeconet import EcoNetApiInterface from pyeconet.errors import InvalidCredentialsError, PyeconetError import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from .const import DOMAIN @@ -24,7 +26,9 @@ class EcoNetFlowHandler(ConfigFlow, domain=DOMAIN): } ) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the start of the config flow.""" if not user_input: return self.async_show_form( diff --git a/homeassistant/components/emulated_roku/config_flow.py b/homeassistant/components/emulated_roku/config_flow.py index 1a3b2c0e2af..0e5cc1ba55a 100644 --- a/homeassistant/components/emulated_roku/config_flow.py +++ b/homeassistant/components/emulated_roku/config_flow.py @@ -1,8 +1,10 @@ """Config flow to configure emulated_roku component.""" +from typing import Any + import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_NAME from homeassistant.core import callback @@ -22,9 +24,11 @@ class EmulatedRokuFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" - errors = {} + errors: dict[str, str] = {} if user_input is not None: self._async_abort_entries_match({CONF_NAME: user_input[CONF_NAME]}) diff --git a/homeassistant/components/enocean/config_flow.py b/homeassistant/components/enocean/config_flow.py index 157d58bbf23..b68026a34ba 100644 --- a/homeassistant/components/enocean/config_flow.py +++ b/homeassistant/components/enocean/config_flow.py @@ -1,8 +1,10 @@ """Config flows for the ENOcean integration.""" +from typing import Any + import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_DEVICE from . import dongle @@ -32,7 +34,9 @@ class EnOceanFlowHandler(ConfigFlow, domain=DOMAIN): return self.create_enocean_entry(data) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle an EnOcean config flow start.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") diff --git a/homeassistant/components/environment_canada/config_flow.py b/homeassistant/components/environment_canada/config_flow.py index 79b37c64c1b..c4fd16f9522 100644 --- a/homeassistant/components/environment_canada/config_flow.py +++ b/homeassistant/components/environment_canada/config_flow.py @@ -1,13 +1,14 @@ """Config flow for Environment Canada integration.""" import logging +from typing import Any import xml.etree.ElementTree as ET import aiohttp from env_canada import ECWeather, ec_exc import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_LANGUAGE, CONF_LATITUDE, CONF_LONGITUDE from homeassistant.helpers import config_validation as cv @@ -46,7 +47,9 @@ class EnvironmentCanadaConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/epson/config_flow.py b/homeassistant/components/epson/config_flow.py index 4f038de9318..1e3b006a984 100644 --- a/homeassistant/components/epson/config_flow.py +++ b/homeassistant/components/epson/config_flow.py @@ -1,10 +1,11 @@ """Config flow for epson integration.""" import logging +from typing import Any import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT from . import validate_projector @@ -26,7 +27,9 @@ class EpsonConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/flick_electric/config_flow.py b/homeassistant/components/flick_electric/config_flow.py index 7fe5fda3f4e..8a2455b9d14 100644 --- a/homeassistant/components/flick_electric/config_flow.py +++ b/homeassistant/components/flick_electric/config_flow.py @@ -2,12 +2,13 @@ import asyncio import logging +from typing import Any from pyflick.authentication import AuthException, SimpleFlickAuth from pyflick.const import DEFAULT_CLIENT_ID, DEFAULT_CLIENT_SECRET import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import ( CONF_CLIENT_ID, CONF_CLIENT_SECRET, @@ -55,7 +56,9 @@ class FlickConfigFlow(ConfigFlow, domain=DOMAIN): return token is not None - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle gathering login info.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/flo/config_flow.py b/homeassistant/components/flo/config_flow.py index ec92b60c740..bd524c590fa 100644 --- a/homeassistant/components/flo/config_flow.py +++ b/homeassistant/components/flo/config_flow.py @@ -1,10 +1,12 @@ """Config flow for flo integration.""" +from typing import Any + from aioflo import async_get_api from aioflo.errors import RequestError import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -36,7 +38,9 @@ class FloConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/forked_daapd/config_flow.py b/homeassistant/components/forked_daapd/config_flow.py index 7edf25a2595..1f76fe21bad 100644 --- a/homeassistant/components/forked_daapd/config_flow.py +++ b/homeassistant/components/forked_daapd/config_flow.py @@ -2,6 +2,7 @@ from contextlib import suppress import logging +from typing import Any from pyforked_daapd import ForkedDaapdAPI import voluptuous as vol @@ -135,7 +136,9 @@ class ForkedDaapdFlowHandler(ConfigFlow, domain=DOMAIN): ) return validate_result - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a forked-daapd config flow start. Manage device specific parameters. diff --git a/homeassistant/components/foscam/config_flow.py b/homeassistant/components/foscam/config_flow.py index 8a005f19f09..19c19a1a5f5 100644 --- a/homeassistant/components/foscam/config_flow.py +++ b/homeassistant/components/foscam/config_flow.py @@ -1,5 +1,7 @@ """Config flow for foscam integration.""" +from typing import Any + from libpyfoscam import FoscamCamera from libpyfoscam.foscam import ( ERROR_FOSCAM_AUTH, @@ -8,7 +10,7 @@ from libpyfoscam.foscam import ( ) import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import ( CONF_HOST, CONF_NAME, @@ -90,7 +92,9 @@ class FoscamConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_create_entry(title=name, data=data) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} diff --git a/homeassistant/components/freedompro/config_flow.py b/homeassistant/components/freedompro/config_flow.py index f1dd9dbbf14..f986cd05904 100644 --- a/homeassistant/components/freedompro/config_flow.py +++ b/homeassistant/components/freedompro/config_flow.py @@ -1,9 +1,11 @@ """Config flow to configure Freedompro.""" +from typing import Any + from pyfreedompro import get_list import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -45,7 +47,9 @@ class FreedomProConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Show the setup form to the user.""" if user_input is None: return self.async_show_form( diff --git a/homeassistant/components/geonetnz_quakes/config_flow.py b/homeassistant/components/geonetnz_quakes/config_flow.py index 4367f820bd3..ac5a2e8c48e 100644 --- a/homeassistant/components/geonetnz_quakes/config_flow.py +++ b/homeassistant/components/geonetnz_quakes/config_flow.py @@ -1,10 +1,11 @@ """Config flow to configure the GeoNet NZ Quakes integration.""" import logging +from typing import Any import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, @@ -48,7 +49,9 @@ class GeonetnzQuakesFlowHandler(ConfigFlow, domain=DOMAIN): """Import a config entry from configuration.yaml.""" return await self.async_step_user(import_config) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the start of the config flow.""" _LOGGER.debug("User input: %s", user_input) if not user_input: diff --git a/homeassistant/components/geonetnz_volcano/config_flow.py b/homeassistant/components/geonetnz_volcano/config_flow.py index 461da61ae1a..12c7157b7e4 100644 --- a/homeassistant/components/geonetnz_volcano/config_flow.py +++ b/homeassistant/components/geonetnz_volcano/config_flow.py @@ -1,8 +1,10 @@ """Config flow to configure the GeoNet NZ Volcano integration.""" +from typing import Any + import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, @@ -49,7 +51,9 @@ class GeonetnzVolcanoFlowHandler(ConfigFlow, domain=DOMAIN): """Import a config entry from configuration.yaml.""" return await self.async_step_user(import_config) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the start of the config flow.""" if not user_input: return await self._show_form() diff --git a/homeassistant/components/goodwe/config_flow.py b/homeassistant/components/goodwe/config_flow.py index d6a3be7e56a..354877e782f 100644 --- a/homeassistant/components/goodwe/config_flow.py +++ b/homeassistant/components/goodwe/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations import logging +from typing import Any from goodwe import InverterError, connect import voluptuous as vol @@ -26,7 +27,9 @@ class GoodweFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input: dict | None = None) -> ConfigFlowResult: + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/habitica/config_flow.py b/homeassistant/components/habitica/config_flow.py index 5dd9fb2aa22..742523751a2 100644 --- a/homeassistant/components/habitica/config_flow.py +++ b/homeassistant/components/habitica/config_flow.py @@ -3,12 +3,13 @@ from __future__ import annotations import logging +from typing import Any from aiohttp import ClientResponseError from habitipy.aio import HabitipyAsync import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY, CONF_NAME, CONF_URL from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -56,7 +57,9 @@ class HabiticaConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} diff --git a/homeassistant/components/heos/config_flow.py b/homeassistant/components/heos/config_flow.py index b68d7d16717..968f677df23 100644 --- a/homeassistant/components/heos/config_flow.py +++ b/homeassistant/components/heos/config_flow.py @@ -1,6 +1,6 @@ """Config flow to configure Heos.""" -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any from urllib.parse import urlparse from pyheos import Heos, HeosError @@ -51,7 +51,9 @@ class HeosFlowHandler(ConfigFlow, domain=DOMAIN): await self.async_set_unique_id(DOMAIN, raise_on_progress=False) return self.async_create_entry(title=format_title(host), data={CONF_HOST: host}) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Obtain host and validate connection.""" self.hass.data.setdefault(DATA_DISCOVERED_HOSTS, {}) # Only a single entry is needed for all devices diff --git a/homeassistant/components/hlk_sw16/config_flow.py b/homeassistant/components/hlk_sw16/config_flow.py index b315d0daa78..df2f32088cf 100644 --- a/homeassistant/components/hlk_sw16/config_flow.py +++ b/homeassistant/components/hlk_sw16/config_flow.py @@ -1,11 +1,12 @@ """Config flow for HLK-SW16.""" import asyncio +from typing import Any from hlk_sw16 import create_hlk_sw16_connection import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant @@ -73,7 +74,9 @@ class SW16FlowHandler(ConfigFlow, domain=DOMAIN): """Handle import.""" return await self.async_step_user(user_input) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/huisbaasje/config_flow.py b/homeassistant/components/huisbaasje/config_flow.py index ecf8cdbe431..43fbe839fa6 100644 --- a/homeassistant/components/huisbaasje/config_flow.py +++ b/homeassistant/components/huisbaasje/config_flow.py @@ -1,11 +1,12 @@ """Config flow for EnergyFlip integration.""" import logging +from typing import Any from energyflip import EnergyFlip, EnergyFlipConnectionException, EnergyFlipException import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_ID, CONF_PASSWORD, CONF_USERNAME from homeassistant.data_entry_flow import AbortFlow @@ -23,7 +24,9 @@ class EnergyFlipConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" if user_input is None: return await self._show_setup_form(user_input) diff --git a/homeassistant/components/ialarm/config_flow.py b/homeassistant/components/ialarm/config_flow.py index 08cb9868357..6df1b0f8290 100644 --- a/homeassistant/components/ialarm/config_flow.py +++ b/homeassistant/components/ialarm/config_flow.py @@ -1,11 +1,12 @@ """Config flow for Antifurto365 iAlarm integration.""" import logging +from typing import Any from pyialarm import IAlarm import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant @@ -31,7 +32,9 @@ class IAlarmConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} mac = None diff --git a/homeassistant/components/insteon/config_flow.py b/homeassistant/components/insteon/config_flow.py index baf06b13860..7a701db1b82 100644 --- a/homeassistant/components/insteon/config_flow.py +++ b/homeassistant/components/insteon/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations import logging +from typing import Any from pyinsteon import async_connect @@ -54,7 +55,9 @@ class InsteonFlowHandler(ConfigFlow, domain=DOMAIN): _device_name: str | None = None discovered_conf: dict[str, str] = {} - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Init the config flow.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") diff --git a/homeassistant/components/juicenet/config_flow.py b/homeassistant/components/juicenet/config_flow.py index 607ffb6ffe2..393e6842274 100644 --- a/homeassistant/components/juicenet/config_flow.py +++ b/homeassistant/components/juicenet/config_flow.py @@ -1,13 +1,14 @@ """Config flow for JuiceNet integration.""" import logging +from typing import Any import aiohttp from pyjuicenet import Api, TokenError import voluptuous as vol from homeassistant import core, exceptions -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_ACCESS_TOKEN from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -44,7 +45,9 @@ class JuiceNetConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/keymitt_ble/config_flow.py b/homeassistant/components/keymitt_ble/config_flow.py index 589798a281a..217ce3cc923 100644 --- a/homeassistant/components/keymitt_ble/config_flow.py +++ b/homeassistant/components/keymitt_ble/config_flow.py @@ -42,9 +42,9 @@ class MicroBotConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize.""" - self._errors = {} + self._errors: dict[str, str] = {} self._discovered_adv: MicroBotAdvertisement | None = None self._discovered_advs: dict[str, MicroBotAdvertisement] = {} self._client: MicroBotApiClient | None = None diff --git a/homeassistant/components/kmtronic/config_flow.py b/homeassistant/components/kmtronic/config_flow.py index 746b075789f..f83d102ac05 100644 --- a/homeassistant/components/kmtronic/config_flow.py +++ b/homeassistant/components/kmtronic/config_flow.py @@ -3,13 +3,19 @@ from __future__ import annotations import logging +from typing import Any import aiohttp from pykmtronic.auth import Auth from pykmtronic.hub import KMTronicHubAPI import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, OptionsFlow +from homeassistant.config_entries import ( + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError @@ -62,7 +68,9 @@ class KmtronicConfigFlow(ConfigFlow, domain=DOMAIN): """Get the options flow for this handler.""" return KMTronicOptionsFlow(config_entry) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/kodi/config_flow.py b/homeassistant/components/kodi/config_flow.py index e431c72d21e..c740aeb6057 100644 --- a/homeassistant/components/kodi/config_flow.py +++ b/homeassistant/components/kodi/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations import logging +from typing import Any from pykodi import CannotConnectError, InvalidAuthError, Kodi, get_kodi_connection import voluptuous as vol @@ -149,7 +150,9 @@ class KodiConfigFlow(ConfigFlow, domain=DOMAIN): return self._create_entry() - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} diff --git a/homeassistant/components/konnected/config_flow.py b/homeassistant/components/konnected/config_flow.py index 29f4fbe2a49..6c9a542c53b 100644 --- a/homeassistant/components/konnected/config_flow.py +++ b/homeassistant/components/konnected/config_flow.py @@ -303,7 +303,9 @@ class KonnectedFlowHandler(ConfigFlow, domain=DOMAIN): return self.async_abort(reason="unknown") - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Connect to panel and get config.""" errors = {} if user_input: diff --git a/homeassistant/components/meteoclimatic/config_flow.py b/homeassistant/components/meteoclimatic/config_flow.py index d772a6c9d62..59877941fee 100644 --- a/homeassistant/components/meteoclimatic/config_flow.py +++ b/homeassistant/components/meteoclimatic/config_flow.py @@ -1,12 +1,13 @@ """Config flow to configure the Meteoclimatic integration.""" import logging +from typing import Any from meteoclimatic import MeteoclimaticClient from meteoclimatic.exceptions import MeteoclimaticError, StationNotFound import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from .const import CONF_STATION_CODE, DOMAIN @@ -35,9 +36,11 @@ class MeteoclimaticFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors or {}, ) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" - errors = {} + errors: dict[str, str] = {} if user_input is None: return self._show_setup_form(user_input, errors) diff --git a/homeassistant/components/mill/config_flow.py b/homeassistant/components/mill/config_flow.py index 58660d6358e..db1b2711575 100644 --- a/homeassistant/components/mill/config_flow.py +++ b/homeassistant/components/mill/config_flow.py @@ -1,10 +1,12 @@ """Adds config flow for Mill integration.""" +from typing import Any + from mill import Mill from mill_local import Mill as MillLocal import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD, CONF_USERNAME from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -16,7 +18,9 @@ class MillConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" data_schema = vol.Schema( { diff --git a/homeassistant/components/mobile_app/config_flow.py b/homeassistant/components/mobile_app/config_flow.py index 66035733c33..bd72b2d7f42 100644 --- a/homeassistant/components/mobile_app/config_flow.py +++ b/homeassistant/components/mobile_app/config_flow.py @@ -1,9 +1,10 @@ """Config flow for Mobile App.""" +from typing import Any import uuid from homeassistant.components import person -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import ATTR_DEVICE_ID from homeassistant.helpers import entity_registry as er @@ -15,7 +16,9 @@ class MobileAppFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" placeholders = { "apps_url": "https://www.home-assistant.io/integrations/mobile_app/#apps" diff --git a/homeassistant/components/monoprice/config_flow.py b/homeassistant/components/monoprice/config_flow.py index 2c7163123b6..5f0b1bf27b5 100644 --- a/homeassistant/components/monoprice/config_flow.py +++ b/homeassistant/components/monoprice/config_flow.py @@ -3,12 +3,18 @@ from __future__ import annotations import logging +from typing import Any from pymonoprice import get_monoprice from serial import SerialException import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, OptionsFlow +from homeassistant.config_entries import ( + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.const import CONF_PORT from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError @@ -76,7 +82,9 @@ class MonoPriceConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/netgear/config_flow.py b/homeassistant/components/netgear/config_flow.py index a872e9fb4ac..55112c6662c 100644 --- a/homeassistant/components/netgear/config_flow.py +++ b/homeassistant/components/netgear/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from typing import cast +from typing import Any, cast from urllib.parse import urlparse from pynetgear import DEFAULT_HOST, DEFAULT_PORT, DEFAULT_USER @@ -175,7 +175,9 @@ class NetgearFlowHandler(ConfigFlow, domain=DOMAIN): return await self.async_step_user() - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" errors = {} diff --git a/homeassistant/components/nexia/config_flow.py b/homeassistant/components/nexia/config_flow.py index 6d1f4af043b..592ebde61c3 100644 --- a/homeassistant/components/nexia/config_flow.py +++ b/homeassistant/components/nexia/config_flow.py @@ -1,13 +1,14 @@ """Config flow for Nexia integration.""" import logging +from typing import Any import aiohttp from nexia.const import BRAND_ASAIR, BRAND_NEXIA, BRAND_TRANE from nexia.home import NexiaHome import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -81,7 +82,9 @@ class NexiaConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/nextbus/config_flow.py b/homeassistant/components/nextbus/config_flow.py index 05290733bd9..90a6a4fc912 100644 --- a/homeassistant/components/nextbus/config_flow.py +++ b/homeassistant/components/nextbus/config_flow.py @@ -79,7 +79,7 @@ class NextBusFlowHandler(ConfigFlow, domain=DOMAIN): _route_tags: dict[str, str] _stop_tags: dict[str, str] - def __init__(self): + def __init__(self) -> None: """Initialize NextBus config flow.""" self.data: dict[str, str] = {} self._client = NextBusClient() diff --git a/homeassistant/components/nuheat/config_flow.py b/homeassistant/components/nuheat/config_flow.py index a5d34f7ae6c..0e090eeab3e 100644 --- a/homeassistant/components/nuheat/config_flow.py +++ b/homeassistant/components/nuheat/config_flow.py @@ -2,12 +2,13 @@ from http import HTTPStatus import logging +from typing import Any import nuheat import requests.exceptions import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -64,7 +65,9 @@ class NuHeatConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/octoprint/config_flow.py b/homeassistant/components/octoprint/config_flow.py index 32f5fa88fff..22943b85f4e 100644 --- a/homeassistant/components/octoprint/config_flow.py +++ b/homeassistant/components/octoprint/config_flow.py @@ -63,7 +63,9 @@ class OctoPrintConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for OctoPrint.""" self._sessions: list[aiohttp.ClientSession] = [] - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" # When coming back from the progress steps, the user_input is stored in the # instance variable instead of being passed in diff --git a/homeassistant/components/omnilogic/config_flow.py b/homeassistant/components/omnilogic/config_flow.py index 229f458ceb4..166e4414767 100644 --- a/homeassistant/components/omnilogic/config_flow.py +++ b/homeassistant/components/omnilogic/config_flow.py @@ -3,11 +3,17 @@ from __future__ import annotations import logging +from typing import Any from omnilogic import LoginException, OmniLogic, OmniLogicException import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, OptionsFlow +from homeassistant.config_entries import ( + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import callback from homeassistant.helpers import aiohttp_client @@ -30,9 +36,11 @@ class OmniLogicConfigFlow(ConfigFlow, domain=DOMAIN): """Get the options flow for this handler.""" return OptionsFlowHandler(config_entry) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" - errors = {} + errors: dict[str, str] = {} config_entry = self._async_current_entries() if config_entry: diff --git a/homeassistant/components/opentherm_gw/config_flow.py b/homeassistant/components/opentherm_gw/config_flow.py index 19906689b57..1ebf462a5c7 100644 --- a/homeassistant/components/opentherm_gw/config_flow.py +++ b/homeassistant/components/opentherm_gw/config_flow.py @@ -3,13 +3,19 @@ from __future__ import annotations import asyncio +from typing import Any import pyotgw from pyotgw import vars as gw_vars from serial import SerialException import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, OptionsFlow +from homeassistant.config_entries import ( + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.const import ( CONF_DEVICE, CONF_ID, @@ -80,7 +86,9 @@ class OpenThermGwConfigFlow(ConfigFlow, domain=DOMAIN): return self._show_form() - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle manual initiation of the config flow.""" return await self.async_step_init(user_input) diff --git a/homeassistant/components/owntracks/config_flow.py b/homeassistant/components/owntracks/config_flow.py index 29fe4f0cf65..390cc880c1e 100644 --- a/homeassistant/components/owntracks/config_flow.py +++ b/homeassistant/components/owntracks/config_flow.py @@ -1,9 +1,10 @@ """Config flow for OwnTracks.""" import secrets +from typing import Any from homeassistant.components import cloud, webhook -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_WEBHOOK_ID from .const import DOMAIN @@ -18,7 +19,9 @@ class OwnTracksFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a user initiated set up flow to create OwnTracks webhook.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") diff --git a/homeassistant/components/picnic/config_flow.py b/homeassistant/components/picnic/config_flow.py index 3023b5309de..9548029209b 100644 --- a/homeassistant/components/picnic/config_flow.py +++ b/homeassistant/components/picnic/config_flow.py @@ -87,7 +87,9 @@ class PicnicConfigFlow(ConfigFlow, domain=DOMAIN): """Perform the re-auth step upon an API authentication error.""" return await self.async_step_user() - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the authentication step, this is the generic step for both `step_user` and `step_reauth`.""" if user_input is None: return self.async_show_form( diff --git a/homeassistant/components/point/config_flow.py b/homeassistant/components/point/config_flow.py index 279561b4e2b..b2455438208 100644 --- a/homeassistant/components/point/config_flow.py +++ b/homeassistant/components/point/config_flow.py @@ -3,12 +3,13 @@ import asyncio from collections import OrderedDict import logging +from typing import Any from pypoint import PointSession import voluptuous as vol from homeassistant.components.http import KEY_HASS, HomeAssistantView -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET from homeassistant.core import callback from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -59,7 +60,9 @@ class PointFlowHandler(ConfigFlow, domain=DOMAIN): return await self.async_step_auth() - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow start.""" flows = self.hass.data.get(DATA_FLOW_IMPL, {}) diff --git a/homeassistant/components/profiler/config_flow.py b/homeassistant/components/profiler/config_flow.py index 4acce51e25f..19995cf79aa 100644 --- a/homeassistant/components/profiler/config_flow.py +++ b/homeassistant/components/profiler/config_flow.py @@ -1,6 +1,8 @@ """Config flow for Profiler integration.""" -from homeassistant.config_entries import ConfigFlow +from typing import Any + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from .const import DEFAULT_NAME, DOMAIN @@ -10,7 +12,9 @@ class ProfilerConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") diff --git a/homeassistant/components/prosegur/config_flow.py b/homeassistant/components/prosegur/config_flow.py index 82cf1d424c7..7a8f67cef7d 100644 --- a/homeassistant/components/prosegur/config_flow.py +++ b/homeassistant/components/prosegur/config_flow.py @@ -51,7 +51,9 @@ class ProsegurConfigFlow(ConfigFlow, domain=DOMAIN): user_input: dict contracts: list[dict[str, str]] - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} diff --git a/homeassistant/components/rachio/config_flow.py b/homeassistant/components/rachio/config_flow.py index 77fe20946b4..bdd2f81536d 100644 --- a/homeassistant/components/rachio/config_flow.py +++ b/homeassistant/components/rachio/config_flow.py @@ -4,6 +4,7 @@ from __future__ import annotations from http import HTTPStatus import logging +from typing import Any from rachiopy import Rachio from requests.exceptions import ConnectTimeout @@ -67,7 +68,9 @@ class RachioConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/smappee/config_flow.py b/homeassistant/components/smappee/config_flow.py index 6ed18905233..d5073bd9c34 100644 --- a/homeassistant/components/smappee/config_flow.py +++ b/homeassistant/components/smappee/config_flow.py @@ -1,6 +1,7 @@ """Config flow for Smappee.""" import logging +from typing import Any from pysmappee import helper, mqtt import voluptuous as vol @@ -106,7 +107,9 @@ class SmappeeFlowHandler( data={CONF_IP_ADDRESS: ip_address, CONF_SERIALNUMBER: serial_number}, ) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" # If there is a CLOUD entry already, abort a new LOCAL entry diff --git a/homeassistant/components/smart_meter_texas/config_flow.py b/homeassistant/components/smart_meter_texas/config_flow.py index bbe1361b795..b60855b62c8 100644 --- a/homeassistant/components/smart_meter_texas/config_flow.py +++ b/homeassistant/components/smart_meter_texas/config_flow.py @@ -1,6 +1,7 @@ """Config flow for Smart Meter Texas integration.""" import logging +from typing import Any from aiohttp import ClientError from smart_meter_texas import Account, Client, ClientSSLContext @@ -10,7 +11,7 @@ from smart_meter_texas.exceptions import ( ) import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -52,7 +53,9 @@ class SMTConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} diff --git a/homeassistant/components/smartthings/config_flow.py b/homeassistant/components/smartthings/config_flow.py index 2ecc3375026..9072683328d 100644 --- a/homeassistant/components/smartthings/config_flow.py +++ b/homeassistant/components/smartthings/config_flow.py @@ -2,13 +2,14 @@ from http import HTTPStatus import logging +from typing import Any from aiohttp import ClientResponseError from pysmartthings import APIResponseError, AppOAuth, SmartThings from pysmartthings.installedapp import format_install_url import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_ACCESS_TOKEN, CONF_CLIENT_ID, CONF_CLIENT_SECRET from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -57,7 +58,9 @@ class SmartThingsFlowHandler(ConfigFlow, domain=DOMAIN): """Occurs when a previously entry setup fails and is re-initiated.""" return await self.async_step_user(user_input) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Validate and confirm webhook setup.""" if not self.endpoints_initialized: self.endpoints_initialized = True diff --git a/homeassistant/components/soma/config_flow.py b/homeassistant/components/soma/config_flow.py index 773a24d5b44..23aabf5a5e0 100644 --- a/homeassistant/components/soma/config_flow.py +++ b/homeassistant/components/soma/config_flow.py @@ -1,12 +1,13 @@ """Config flow for Soma.""" import logging +from typing import Any from api.soma_api import SomaApi from requests import RequestException import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PORT from .const import DOMAIN @@ -24,7 +25,9 @@ class SomaFlowHandler(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Instantiate config flow.""" - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow start.""" if user_input is None: data = { diff --git a/homeassistant/components/songpal/config_flow.py b/homeassistant/components/songpal/config_flow.py index f8a0db3815d..0724646a594 100644 --- a/homeassistant/components/songpal/config_flow.py +++ b/homeassistant/components/songpal/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations import logging +from typing import Any from urllib.parse import urlparse from songpal import Device, SongpalException @@ -36,7 +37,9 @@ class SongpalConfigFlow(ConfigFlow, domain=DOMAIN): """Initialize the flow.""" self.conf: SongpalConfig | None = None - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" if user_input is None: return self.async_show_form( diff --git a/homeassistant/components/spider/config_flow.py b/homeassistant/components/spider/config_flow.py index a678ea73051..f3076c0c28d 100644 --- a/homeassistant/components/spider/config_flow.py +++ b/homeassistant/components/spider/config_flow.py @@ -1,11 +1,12 @@ """Config flow for Spider.""" import logging +from typing import Any from spiderpy.spiderapi import SpiderApi, SpiderApiException, UnauthorizedException import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_SCAN_INTERVAL, CONF_USERNAME from .const import DEFAULT_SCAN_INTERVAL, DOMAIN @@ -49,7 +50,9 @@ class SpiderConfigFlow(ConfigFlow, domain=DOMAIN): return RESULT_SUCCESS - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") diff --git a/homeassistant/components/squeezebox/config_flow.py b/homeassistant/components/squeezebox/config_flow.py index 95af3e8032a..fe57b12516a 100644 --- a/homeassistant/components/squeezebox/config_flow.py +++ b/homeassistant/components/squeezebox/config_flow.py @@ -131,7 +131,9 @@ class SqueezeboxConfigFlow(ConfigFlow, domain=DOMAIN): return None - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" errors = {} if user_input and CONF_HOST in user_input: diff --git a/homeassistant/components/starline/config_flow.py b/homeassistant/components/starline/config_flow.py index fbb7fa9acdc..e27885e6c60 100644 --- a/homeassistant/components/starline/config_flow.py +++ b/homeassistant/components/starline/config_flow.py @@ -2,10 +2,12 @@ from __future__ import annotations +from typing import Any + from starline import StarlineAuth import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import callback @@ -52,7 +54,9 @@ class StarlineFlowHandler(ConfigFlow, domain=DOMAIN): self._auth = StarlineAuth() - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" return await self.async_step_auth_app(user_input) diff --git a/homeassistant/components/syncthing/config_flow.py b/homeassistant/components/syncthing/config_flow.py index 2d7d2ddcc92..86ea52c43a3 100644 --- a/homeassistant/components/syncthing/config_flow.py +++ b/homeassistant/components/syncthing/config_flow.py @@ -1,9 +1,11 @@ """Config flow for syncthing integration.""" +from typing import Any + import aiosyncthing import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_TOKEN, CONF_URL, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -42,7 +44,9 @@ class SyncThingConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} diff --git a/homeassistant/components/syncthru/config_flow.py b/homeassistant/components/syncthru/config_flow.py index 8cd1c2c7b3b..180ba0d9e34 100644 --- a/homeassistant/components/syncthru/config_flow.py +++ b/homeassistant/components/syncthru/config_flow.py @@ -1,6 +1,7 @@ """Config flow for Samsung SyncThru.""" import re +from typing import Any from urllib.parse import urlparse from pysyncthru import ConnectionMode, SyncThru, SyncThruAPINotSupported @@ -23,7 +24,9 @@ class SyncThruConfigFlow(ConfigFlow, domain=DOMAIN): url: str name: str - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle user initiated flow.""" if user_input is None: return await self._async_show_form(step_id="user") diff --git a/homeassistant/components/tellduslive/config_flow.py b/homeassistant/components/tellduslive/config_flow.py index 6f1318ca61e..6d68c37d821 100644 --- a/homeassistant/components/tellduslive/config_flow.py +++ b/homeassistant/components/tellduslive/config_flow.py @@ -3,11 +3,12 @@ import asyncio import logging import os +from typing import Any from tellduslive import Session, supports_local_api import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST from homeassistant.util.json import load_json_object @@ -50,7 +51,9 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): ) return self._session.authorize_url - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Let user select host or cloud.""" if self._async_current_entries(): return self.async_abort(reason="already_setup") diff --git a/homeassistant/components/twinkly/config_flow.py b/homeassistant/components/twinkly/config_flow.py index 98802c8bd33..68c455dc619 100644 --- a/homeassistant/components/twinkly/config_flow.py +++ b/homeassistant/components/twinkly/config_flow.py @@ -28,7 +28,9 @@ class TwinklyConfigFlow(ConfigFlow, domain=DOMAIN): """Initialize the config flow.""" self._discovered_device: tuple[dict[str, Any], str] | None = None - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle config steps.""" host = user_input[CONF_HOST] if user_input else None diff --git a/homeassistant/components/upb/config_flow.py b/homeassistant/components/upb/config_flow.py index fec93a51202..6efd3a685ed 100644 --- a/homeassistant/components/upb/config_flow.py +++ b/homeassistant/components/upb/config_flow.py @@ -3,12 +3,13 @@ import asyncio from contextlib import suppress import logging +from typing import Any from urllib.parse import urlparse import upb_lib import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_ADDRESS, CONF_FILE_PATH, CONF_HOST, CONF_PROTOCOL from homeassistant.exceptions import HomeAssistantError @@ -82,7 +83,9 @@ class UPBConfigFlow(ConfigFlow, domain=DOMAIN): """Initialize the UPB config flow.""" self.importing = False - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/vilfo/config_flow.py b/homeassistant/components/vilfo/config_flow.py index b21c63bfb97..a6cff506f79 100644 --- a/homeassistant/components/vilfo/config_flow.py +++ b/homeassistant/components/vilfo/config_flow.py @@ -1,6 +1,7 @@ """Config flow for Vilfo Router integration.""" import logging +from typing import Any from vilfo import Client as VilfoClient from vilfo.exceptions import ( @@ -9,7 +10,7 @@ from vilfo.exceptions import ( ) import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_ACCESS_TOKEN, CONF_HOST, CONF_ID, CONF_MAC from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -99,7 +100,9 @@ class DomainConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/volumio/config_flow.py b/homeassistant/components/volumio/config_flow.py index 8edda1d20b0..4c7a48f36c7 100644 --- a/homeassistant/components/volumio/config_flow.py +++ b/homeassistant/components/volumio/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations import logging +from typing import Any from pyvolumio import CannotConnectError, Volumio import voluptuous as vol @@ -68,7 +69,9 @@ class VolumioConfigFlow(ConfigFlow, domain=DOMAIN): } ) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/vulcan/config_flow.py b/homeassistant/components/vulcan/config_flow.py index 560d777b517..5938e4ce690 100644 --- a/homeassistant/components/vulcan/config_flow.py +++ b/homeassistant/components/vulcan/config_flow.py @@ -44,7 +44,9 @@ class VulcanFlowHandler(ConfigFlow, domain=DOMAIN): self.keystore = None self.students = None - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle config flow.""" if self._async_current_entries(): return await self.async_step_add_next_config_entry() diff --git a/homeassistant/components/wiffi/config_flow.py b/homeassistant/components/wiffi/config_flow.py index 17262dd0276..6e4872ea400 100644 --- a/homeassistant/components/wiffi/config_flow.py +++ b/homeassistant/components/wiffi/config_flow.py @@ -6,11 +6,17 @@ Used by UI to setup a wiffi integration. from __future__ import annotations import errno +from typing import Any import voluptuous as vol from wiffi import WiffiTcpServer -from homeassistant.config_entries import ConfigEntry, ConfigFlow, OptionsFlow +from homeassistant.config_entries import ( + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.const import CONF_PORT, CONF_TIMEOUT from homeassistant.core import callback @@ -30,7 +36,9 @@ class WiffiFlowHandler(ConfigFlow, domain=DOMAIN): """Create Wiffi server setup option flow.""" return OptionsFlowHandler(config_entry) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the start of the config flow. Called after wiffi integration has been selected in the 'add integration diff --git a/homeassistant/components/wolflink/config_flow.py b/homeassistant/components/wolflink/config_flow.py index 6e218bfd1ce..a2678580a23 100644 --- a/homeassistant/components/wolflink/config_flow.py +++ b/homeassistant/components/wolflink/config_flow.py @@ -1,13 +1,14 @@ """Config flow for Wolf SmartSet Service integration.""" import logging +from typing import Any from httpcore import ConnectError import voluptuous as vol from wolf_comm.token_auth import InvalidAuth from wolf_comm.wolf_client import WolfClient -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from .const import DEVICE_GATEWAY, DEVICE_ID, DEVICE_NAME, DOMAIN @@ -30,7 +31,9 @@ class WolfLinkConfigFlow(ConfigFlow, domain=DOMAIN): self.password = None self.fetched_systems = None - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step to get connection parameters.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/ws66i/config_flow.py b/homeassistant/components/ws66i/config_flow.py index b0cf6717e4d..330e9963f95 100644 --- a/homeassistant/components/ws66i/config_flow.py +++ b/homeassistant/components/ws66i/config_flow.py @@ -8,7 +8,12 @@ from typing import Any from pyws66i import WS66i, get_ws66i import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, OptionsFlow +from homeassistant.config_entries import ( + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.const import CONF_IP_ADDRESS from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError @@ -94,7 +99,9 @@ class WS66iConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/xbox/config_flow.py b/homeassistant/components/xbox/config_flow.py index e1434aac67c..86157be5d7f 100644 --- a/homeassistant/components/xbox/config_flow.py +++ b/homeassistant/components/xbox/config_flow.py @@ -1,7 +1,9 @@ """Config flow for xbox.""" import logging +from typing import Any +from homeassistant.config_entries import ConfigFlowResult from homeassistant.helpers import config_entry_oauth2_flow from .const import DOMAIN @@ -25,7 +27,9 @@ class OAuth2FlowHandler( scopes = ["Xboxlive.signin", "Xboxlive.offline_access"] return {"scope": " ".join(scopes)} - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow start.""" await self.async_set_unique_id(DOMAIN) From 47beddc6c6d3dc01de9544781c97fa5734bbc28b Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 21 Aug 2024 22:43:42 +0200 Subject: [PATCH 0959/1635] Improve config flow type hints (part 4) (#124348) --- .../components/plaato/config_flow.py | 21 ++++++++++++++----- homeassistant/components/plex/config_flow.py | 14 ++++++++----- .../components/progettihwsw/config_flow.py | 10 ++++++--- homeassistant/components/ps4/config_flow.py | 11 ++++++---- homeassistant/components/roon/config_flow.py | 19 ++++++++++------- .../components/smarttub/config_flow.py | 8 +++++-- .../components/somfy_mylink/config_flow.py | 13 +++++++----- .../components/soundtouch/config_flow.py | 9 +++++--- .../components/subaru/config_flow.py | 16 ++++++++++---- 9 files changed, 82 insertions(+), 39 deletions(-) diff --git a/homeassistant/components/plaato/config_flow.py b/homeassistant/components/plaato/config_flow.py index 1240abc5e81..3ada4fdc312 100644 --- a/homeassistant/components/plaato/config_flow.py +++ b/homeassistant/components/plaato/config_flow.py @@ -2,11 +2,18 @@ from __future__ import annotations +from typing import Any + from pyplaato.plaato import PlaatoDeviceType import voluptuous as vol from homeassistant.components import cloud, webhook -from homeassistant.config_entries import ConfigEntry, ConfigFlow, OptionsFlow +from homeassistant.config_entries import ( + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.core import callback import homeassistant.helpers.config_validation as cv @@ -31,11 +38,13 @@ class PlaatoConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize.""" - self._init_info = {} + self._init_info: dict[str, Any] = {} - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle user step.""" if user_input is not None: @@ -185,7 +194,9 @@ class PlaatoOptionsFlowHandler(OptionsFlow): return await self.async_step_user() - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Manage the options.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) diff --git a/homeassistant/components/plex/config_flow.py b/homeassistant/components/plex/config_flow.py index 374067c94cd..7162e517e23 100644 --- a/homeassistant/components/plex/config_flow.py +++ b/homeassistant/components/plex/config_flow.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Mapping import copy import logging -from typing import Any +from typing import TYPE_CHECKING, Any from aiohttp import web_response import plexapi.exceptions @@ -105,15 +105,15 @@ class PlexFlowHandler(ConfigFlow, domain=DOMAIN): """Get the options flow for this handler.""" return PlexOptionsFlowHandler(config_entry) - def __init__(self): + def __init__(self) -> None: """Initialize the Plex flow.""" - self.current_login = {} + self.current_login: dict[str, Any] = {} self.available_servers = None self.plexauth = None self.token = None self.client_id = None self._manual = False - self._reauth_config = None + self._reauth_config: dict[str, Any] | None = None async def async_step_user(self, user_input=None, errors=None): """Handle a flow initialized by the user.""" @@ -184,7 +184,9 @@ class PlexFlowHandler(ConfigFlow, domain=DOMAIN): step_id="manual_setup", data_schema=data_schema, errors=errors ) - async def async_step_server_validate(self, server_config): + async def async_step_server_validate( + self, server_config: dict[str, Any] + ) -> ConfigFlowResult: """Validate a provided configuration.""" if self._reauth_config: server_config = {**self._reauth_config, **server_config} @@ -249,6 +251,8 @@ class PlexFlowHandler(ConfigFlow, domain=DOMAIN): entry = await self.async_set_unique_id(server_id) if self.context[CONF_SOURCE] == SOURCE_REAUTH: + if TYPE_CHECKING: + assert entry self.hass.config_entries.async_update_entry(entry, data=data) _LOGGER.debug("Updated config entry for %s", plex_server.friendly_name) await self.hass.config_entries.async_reload(entry.entry_id) diff --git a/homeassistant/components/progettihwsw/config_flow.py b/homeassistant/components/progettihwsw/config_flow.py index dbe12184a10..95596b940a4 100644 --- a/homeassistant/components/progettihwsw/config_flow.py +++ b/homeassistant/components/progettihwsw/config_flow.py @@ -1,9 +1,11 @@ """Config flow for ProgettiHWSW Automation integration.""" +from typing import Any + from ProgettiHWSW.ProgettiHWSWAPI import ProgettiHWSWAPI import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -38,7 +40,7 @@ class ProgettiHWSWConfigFlow(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize class variables.""" - self.s1_in = None + self.s1_in: dict[str, Any] | None = None async def async_step_relay_modes(self, user_input=None): """Manage relay modes step.""" @@ -66,7 +68,9 @@ class ProgettiHWSWConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/ps4/config_flow.py b/homeassistant/components/ps4/config_flow.py index b842c2f7cfb..cdbf02dcc90 100644 --- a/homeassistant/components/ps4/config_flow.py +++ b/homeassistant/components/ps4/config_flow.py @@ -1,13 +1,14 @@ """Config Flow for PlayStation 4.""" from collections import OrderedDict +from typing import Any from pyps4_2ndscreen.errors import CredentialTimeout from pyps4_2ndscreen.helpers import Helper from pyps4_2ndscreen.media_art import COUNTRIES import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import ( CONF_CODE, CONF_HOST, @@ -44,7 +45,7 @@ class PlayStation4FlowHandler(ConfigFlow, domain=DOMAIN): VERSION = CONFIG_ENTRY_VERSION - def __init__(self): + def __init__(self) -> None: """Initialize the config flow.""" self.helper = Helper() self.creds = None @@ -54,9 +55,11 @@ class PlayStation4FlowHandler(ConfigFlow, domain=DOMAIN): self.pin = None self.m_device = None self.location = None - self.device_list = [] + self.device_list: list[str] = [] - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a user config flow.""" # Check if able to bind to ports: UDP 987, TCP 997. ports = PORT_MSG.keys() diff --git a/homeassistant/components/roon/config_flow.py b/homeassistant/components/roon/config_flow.py index f555cc52dd1..de220454852 100644 --- a/homeassistant/components/roon/config_flow.py +++ b/homeassistant/components/roon/config_flow.py @@ -2,11 +2,12 @@ import asyncio import logging +from typing import Any from roonapi import RoonApi, RoonDiscovery import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -36,14 +37,14 @@ TIMEOUT = 120 class RoonHub: """Interact with roon during config flow.""" - def __init__(self, hass): + def __init__(self, hass: HomeAssistant) -> None: """Initialise the RoonHub.""" self._hass = hass - async def discover(self): + async def discover(self) -> list[tuple[str, int]]: """Try and discover roon servers.""" - def get_discovered_servers(discovery): + def get_discovered_servers(discovery: RoonDiscovery) -> list[tuple[str, int]]: servers = discovery.all() discovery.stop() return servers @@ -93,7 +94,7 @@ class RoonHub: return (token, core_id, core_name) -async def discover(hass): +async def discover(hass: HomeAssistant) -> list[tuple[str, int]]: """Connect and authenticate home assistant.""" hub = RoonHub(hass) @@ -122,13 +123,15 @@ class RoonConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize the Roon flow.""" self._host = None self._port = None - self._servers = [] + self._servers: list[tuple[str, int]] = [] - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Get roon core details via discovery.""" self._servers = await discover(self.hass) diff --git a/homeassistant/components/smarttub/config_flow.py b/homeassistant/components/smarttub/config_flow.py index 60f14b03e45..827375c907c 100644 --- a/homeassistant/components/smarttub/config_flow.py +++ b/homeassistant/components/smarttub/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations from collections.abc import Mapping -from typing import Any +from typing import TYPE_CHECKING, Any from smarttub import LoginFailed import voluptuous as vol @@ -30,7 +30,9 @@ class SmartTubConfigFlow(ConfigFlow, domain=DOMAIN): self._reauth_input: Mapping[str, Any] | None = None self._reauth_entry: ConfigEntry | None = None - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" errors = {} @@ -53,6 +55,8 @@ class SmartTubConfigFlow(ConfigFlow, domain=DOMAIN): ) # this is a reauth attempt + if TYPE_CHECKING: + assert self._reauth_entry if self._reauth_entry.unique_id != self.unique_id: # there is a config entry matching this account, # but it is not the one we were trying to reauth diff --git a/homeassistant/components/somfy_mylink/config_flow.py b/homeassistant/components/somfy_mylink/config_flow.py index a13f036210d..9a8b5d76d3f 100644 --- a/homeassistant/components/somfy_mylink/config_flow.py +++ b/homeassistant/components/somfy_mylink/config_flow.py @@ -4,6 +4,7 @@ from __future__ import annotations from copy import deepcopy import logging +from typing import Any from somfy_mylink_synergy import SomfyMyLinkSynergy import voluptuous as vol @@ -61,11 +62,11 @@ class SomfyConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize the somfy_mylink flow.""" - self.host = None - self.mac = None - self.ip_address = None + self.host: str | None = None + self.mac: str | None = None + self.ip_address: str | None = None async def async_step_dhcp( self, discovery_info: dhcp.DhcpServiceInfo @@ -82,7 +83,9 @@ class SomfyConfigFlow(ConfigFlow, domain=DOMAIN): self.context["title_placeholders"] = {"ip": self.ip_address, "mac": self.mac} return await self.async_step_user() - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} diff --git a/homeassistant/components/soundtouch/config_flow.py b/homeassistant/components/soundtouch/config_flow.py index c8e8ce945db..fea63366db9 100644 --- a/homeassistant/components/soundtouch/config_flow.py +++ b/homeassistant/components/soundtouch/config_flow.py @@ -1,6 +1,7 @@ """Config flow for Bose SoundTouch integration.""" import logging +from typing import Any from libsoundtouch import soundtouch_device from requests import RequestException @@ -21,12 +22,14 @@ class SoundtouchConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize a new SoundTouch config flow.""" - self.host = None + self.host: str | None = None self.name = None - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" errors = {} diff --git a/homeassistant/components/subaru/config_flow.py b/homeassistant/components/subaru/config_flow.py index 5ecaf9670d7..3d96a89a14f 100644 --- a/homeassistant/components/subaru/config_flow.py +++ b/homeassistant/components/subaru/config_flow.py @@ -4,7 +4,7 @@ from __future__ import annotations from datetime import datetime import logging -from typing import Any +from typing import TYPE_CHECKING, Any from subarulink import ( Controller as SubaruAPI, @@ -44,10 +44,10 @@ class SubaruConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize config flow.""" - self.config_data = {CONF_PIN: None} - self.controller = None + self.config_data: dict[str, Any] = {CONF_PIN: None} + self.controller: SubaruAPI | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -66,6 +66,8 @@ class SubaruConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.error("Unable to communicate with Subaru API: %s", ex.message) return self.async_abort(reason="cannot_connect") else: + if TYPE_CHECKING: + assert self.controller if not self.controller.device_registered: _LOGGER.debug("2FA validation is required") return await self.async_step_two_factor() @@ -137,6 +139,8 @@ class SubaruConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Select contact method and request 2FA code from Subaru.""" error = None + if TYPE_CHECKING: + assert self.controller if user_input: # self.controller.contact_methods is a dict: # {"phone":"555-555-5555", "userName":"my@email.com"} @@ -165,6 +169,8 @@ class SubaruConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Validate received 2FA code with Subaru.""" error = None + if TYPE_CHECKING: + assert self.controller if user_input: try: vol.Match(r"^[0-9]{6}$")(user_input[CONF_VALIDATION_CODE]) @@ -190,6 +196,8 @@ class SubaruConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle second part of config flow, if required.""" error = None + if TYPE_CHECKING: + assert self.controller if user_input and self.controller.update_saved_pin(user_input[CONF_PIN]): try: vol.Match(r"[0-9]{4}")(user_input[CONF_PIN]) From ad362b1d4cd8c25be3af348f75926b1d670e3d19 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 22 Aug 2024 00:48:25 -0500 Subject: [PATCH 0960/1635] Bump dbus-fast to 2.23.0 (#124384) --- homeassistant/components/bluetooth/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/bluetooth/manifest.json b/homeassistant/components/bluetooth/manifest.json index 0dbe1475b8d..07ea970de3d 100644 --- a/homeassistant/components/bluetooth/manifest.json +++ b/homeassistant/components/bluetooth/manifest.json @@ -19,7 +19,7 @@ "bluetooth-adapters==0.19.4", "bluetooth-auto-recovery==1.4.2", "bluetooth-data-tools==1.19.4", - "dbus-fast==2.22.1", + "dbus-fast==2.23.0", "habluetooth==3.3.2" ] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 31100828978..b6bcd1dac6b 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -23,7 +23,7 @@ cached_ipaddress==0.3.0 certifi>=2021.5.30 ciso8601==2.3.1 cryptography==43.0.0 -dbus-fast==2.22.1 +dbus-fast==2.23.0 fnv-hash-fast==0.5.0 ha-av==10.1.1 ha-ffmpeg==3.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 4820889dfab..17ab928a92b 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -703,7 +703,7 @@ datadog==0.15.0 datapoint==0.9.9 # homeassistant.components.bluetooth -dbus-fast==2.22.1 +dbus-fast==2.23.0 # homeassistant.components.debugpy debugpy==1.8.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 6f1e23f9d10..765352c89ff 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -599,7 +599,7 @@ datadog==0.15.0 datapoint==0.9.9 # homeassistant.components.bluetooth -dbus-fast==2.22.1 +dbus-fast==2.23.0 # homeassistant.components.debugpy debugpy==1.8.1 From 266cd623a5e822ab29118419621472681976ccd5 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Thu, 22 Aug 2024 07:59:21 +0200 Subject: [PATCH 0961/1635] Bump python-holidays to 0.55 (#124314) --- homeassistant/components/holiday/manifest.json | 2 +- homeassistant/components/workday/binary_sensor.py | 4 ++-- homeassistant/components/workday/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/holiday/manifest.json b/homeassistant/components/holiday/manifest.json index 0a714815ae3..0a3064450d4 100644 --- a/homeassistant/components/holiday/manifest.json +++ b/homeassistant/components/holiday/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/holiday", "iot_class": "local_polling", - "requirements": ["holidays==0.54", "babel==2.15.0"] + "requirements": ["holidays==0.55", "babel==2.15.0"] } diff --git a/homeassistant/components/workday/binary_sensor.py b/homeassistant/components/workday/binary_sensor.py index 4635b2209a6..33c2e249024 100644 --- a/homeassistant/components/workday/binary_sensor.py +++ b/homeassistant/components/workday/binary_sensor.py @@ -92,7 +92,7 @@ def _get_obj_holidays( subdiv=province, years=year, language=language, - categories=set_categories, # type: ignore[arg-type] + categories=set_categories, ) if (supported_languages := obj_holidays.supported_languages) and language == "en": for lang in supported_languages: @@ -102,7 +102,7 @@ def _get_obj_holidays( subdiv=province, years=year, language=lang, - categories=set_categories, # type: ignore[arg-type] + categories=set_categories, ) LOGGER.debug("Changing language from %s to %s", language, lang) return obj_holidays diff --git a/homeassistant/components/workday/manifest.json b/homeassistant/components/workday/manifest.json index 133c82454bc..fafa870d00a 100644 --- a/homeassistant/components/workday/manifest.json +++ b/homeassistant/components/workday/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_polling", "loggers": ["holidays"], "quality_scale": "internal", - "requirements": ["holidays==0.54"] + "requirements": ["holidays==0.55"] } diff --git a/requirements_all.txt b/requirements_all.txt index 17ab928a92b..f0df486d1c6 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1099,7 +1099,7 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.54 +holidays==0.55 # homeassistant.components.frontend home-assistant-frontend==20240809.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 765352c89ff..67f19ae081e 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -922,7 +922,7 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.54 +holidays==0.55 # homeassistant.components.frontend home-assistant-frontend==20240809.0 From 2ff326b563f34068dcf79247cf4bee282ff747b0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 22 Aug 2024 08:55:16 +0200 Subject: [PATCH 0962/1635] Bump github/codeql-action from 3.26.2 to 3.26.4 (#124390) --- .github/workflows/codeql.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 45c2b31d772..e9824f42b91 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -24,11 +24,11 @@ jobs: uses: actions/checkout@v4.1.7 - name: Initialize CodeQL - uses: github/codeql-action/init@v3.26.2 + uses: github/codeql-action/init@v3.26.4 with: languages: python - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3.26.2 + uses: github/codeql-action/analyze@v3.26.4 with: category: "/language:python" From 60f9fbf800cd346a18782cf6e6b413acc07fc49d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 22 Aug 2024 08:55:43 +0200 Subject: [PATCH 0963/1635] Bump home-assistant/builder from 2024.08.1 to 2024.08.2 (#124341) --- .github/workflows/builder.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index 7f3c0b0e66e..e1f95c5c0a9 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -197,7 +197,7 @@ jobs: password: ${{ secrets.GITHUB_TOKEN }} - name: Build base image - uses: home-assistant/builder@2024.08.1 + uses: home-assistant/builder@2024.08.2 with: args: | $BUILD_ARGS \ @@ -263,7 +263,7 @@ jobs: password: ${{ secrets.GITHUB_TOKEN }} - name: Build base image - uses: home-assistant/builder@2024.08.1 + uses: home-assistant/builder@2024.08.2 with: args: | $BUILD_ARGS \ From 9b9e98a26e25ad6aa9f1ceee9215043476cfd548 Mon Sep 17 00:00:00 2001 From: spongebobberts <133538584+spongebobberts@users.noreply.github.com> Date: Thu, 22 Aug 2024 03:05:13 -0400 Subject: [PATCH 0964/1635] Fix typo in Alexa resources docstring (#124388) Fixed typo in Alexa resources docstring --- homeassistant/components/alexa/resources.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/alexa/resources.py b/homeassistant/components/alexa/resources.py index 7782716798a..4541801d31f 100644 --- a/homeassistant/components/alexa/resources.py +++ b/homeassistant/components/alexa/resources.py @@ -283,7 +283,7 @@ class AlexaPresetResource(AlexaCapabilityResource): """Implements Alexa PresetResources. Use presetResources with RangeController to provide a set of - friendlyNamesfor each RangeController preset. + friendlyNames for each RangeController preset. https://developer.amazon.com/docs/device-apis/resources-and-assets.html#presetresources """ From 61114d83285e815742c8ae2e47e5765f9d83d744 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Thu, 22 Aug 2024 09:07:45 +0200 Subject: [PATCH 0965/1635] Install and start Mosquitto MQTT broker add on from MQTT config flow (#124106) * Opt in to install Mosquitto broker add-on in MQTT config flow * rephrase * Tests with supervisor and running add-on * Complete tests for success flows * Also set up entry in success flow * Use realistic names for addon and broker * Finetuning and fail test cases * Spelling * Improve translation strings * Update addon docstr Co-authored-by: Erik Montnemery * Raise AddonError if add-on does not start * Only show the option to use the add-on * Simplify flow, rework and cleanup * Revert unrelated cleanup, process suggestion * Move ADDON_SLUG const to addon module * Move fixture to component level * Move back supervisor fixture * Move addon_setup_time_fixture and superfixe to config flow model tests * Refactor hassio fixture * Rename helpers as they are no fixtures, remove fixture from their names --------- Co-authored-by: Erik Montnemery --- homeassistant/components/mqtt/addon.py | 22 + homeassistant/components/mqtt/config_flow.py | 185 +++++++- homeassistant/components/mqtt/const.py | 3 + homeassistant/components/mqtt/strings.json | 17 + tests/components/conftest.py | 91 +++- tests/components/hassio/common.py | 125 +++++ tests/components/mqtt/test_config_flow.py | 451 ++++++++++++++++++- 7 files changed, 876 insertions(+), 18 deletions(-) create mode 100644 homeassistant/components/mqtt/addon.py create mode 100644 tests/components/hassio/common.py diff --git a/homeassistant/components/mqtt/addon.py b/homeassistant/components/mqtt/addon.py new file mode 100644 index 00000000000..3ac6748033f --- /dev/null +++ b/homeassistant/components/mqtt/addon.py @@ -0,0 +1,22 @@ +"""Provide MQTT add-on management. + +Currently only supports the official mosquitto add-on. +""" + +from __future__ import annotations + +from homeassistant.components.hassio import AddonManager +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.singleton import singleton + +from .const import DOMAIN, LOGGER + +ADDON_SLUG = "core_mosquitto" +DATA_ADDON_MANAGER = f"{DOMAIN}_addon_manager" + + +@singleton(DATA_ADDON_MANAGER) +@callback +def get_addon_manager(hass: HomeAssistant) -> AddonManager: + """Get the add-on manager.""" + return AddonManager(hass, LOGGER, "Mosquitto Mqtt Broker", ADDON_SLUG) diff --git a/homeassistant/components/mqtt/config_flow.py b/homeassistant/components/mqtt/config_flow.py index 17dfc6512b3..a27e34c364c 100644 --- a/homeassistant/components/mqtt/config_flow.py +++ b/homeassistant/components/mqtt/config_flow.py @@ -2,8 +2,10 @@ from __future__ import annotations +import asyncio from collections import OrderedDict from collections.abc import Callable, Mapping +import logging import queue from ssl import PROTOCOL_TLS_CLIENT, SSLContext, SSLError from types import MappingProxyType @@ -14,7 +16,12 @@ from cryptography.x509 import load_pem_x509_certificate import voluptuous as vol from homeassistant.components.file_upload import process_uploaded_file -from homeassistant.components.hassio import HassioServiceInfo +from homeassistant.components.hassio import HassioServiceInfo, is_hassio +from homeassistant.components.hassio.addon_manager import ( + AddonError, + AddonManager, + AddonState, +) from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, @@ -32,6 +39,7 @@ from homeassistant.const import ( CONF_USERNAME, ) from homeassistant.core import callback +from homeassistant.data_entry_flow import AbortFlow from homeassistant.helpers import config_validation as cv from homeassistant.helpers.json import json_dumps from homeassistant.helpers.selector import ( @@ -51,6 +59,7 @@ from homeassistant.helpers.selector import ( ) from homeassistant.util.json import JSON_DECODE_EXCEPTIONS, json_loads +from .addon import get_addon_manager from .client import MqttClientSetup from .const import ( ATTR_PAYLOAD, @@ -91,6 +100,11 @@ from .util import ( valid_publish_topic, ) +_LOGGER = logging.getLogger(__name__) + +ADDON_SETUP_TIMEOUT = 5 +ADDON_SETUP_TIMEOUT_ROUNDS = 5 + MQTT_TIMEOUT = 5 ADVANCED_OPTIONS = "advanced_options" @@ -197,6 +211,12 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): entry: ConfigEntry | None _hassio_discovery: dict[str, Any] | None = None + _addon_manager: AddonManager + + def __init__(self) -> None: + """Set up flow instance.""" + self.install_task: asyncio.Task | None = None + self.start_task: asyncio.Task | None = None @staticmethod @callback @@ -206,6 +226,118 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): """Get the options flow for this handler.""" return MQTTOptionsFlowHandler(config_entry) + async def _async_install_addon(self) -> None: + """Install the Mosquitto Mqtt broker add-on.""" + addon_manager: AddonManager = get_addon_manager(self.hass) + await addon_manager.async_schedule_install_addon() + + async def async_step_install_failed( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Add-on installation failed.""" + return self.async_abort( + reason="addon_install_failed", + description_placeholders={"addon": self._addon_manager.addon_name}, + ) + + async def async_step_install_addon( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Install Mosquitto Broker add-on.""" + if self.install_task is None: + self.install_task = self.hass.async_create_task(self._async_install_addon()) + + if not self.install_task.done(): + return self.async_show_progress( + step_id="install_addon", + progress_action="install_addon", + progress_task=self.install_task, + ) + + try: + await self.install_task + except AddonError as err: + _LOGGER.error(err) + return self.async_show_progress_done(next_step_id="install_failed") + finally: + self.install_task = None + + return self.async_show_progress_done(next_step_id="start_addon") + + async def async_step_start_failed( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Add-on start failed.""" + return self.async_abort( + reason="addon_start_failed", + description_placeholders={"addon": self._addon_manager.addon_name}, + ) + + async def async_step_start_addon( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Start Mosquitto Broker add-on.""" + if not self.start_task: + self.start_task = self.hass.async_create_task(self._async_start_addon()) + if not self.start_task.done(): + return self.async_show_progress( + step_id="start_addon", + progress_action="start_addon", + progress_task=self.start_task, + ) + try: + await self.start_task + except AddonError as err: + _LOGGER.error(err) + return self.async_show_progress_done(next_step_id="start_failed") + finally: + self.start_task = None + + return self.async_show_progress_done(next_step_id="setup_entry_from_discovery") + + async def _async_get_config_and_try(self) -> dict[str, Any] | None: + """Get the MQTT add-on discovery info and try the connection.""" + if self._hassio_discovery is not None: + return self._hassio_discovery + addon_manager: AddonManager = get_addon_manager(self.hass) + try: + addon_discovery_config = ( + await addon_manager.async_get_addon_discovery_info() + ) + config: dict[str, Any] = { + CONF_BROKER: addon_discovery_config[CONF_HOST], + CONF_PORT: addon_discovery_config[CONF_PORT], + CONF_USERNAME: addon_discovery_config.get(CONF_USERNAME), + CONF_PASSWORD: addon_discovery_config.get(CONF_PASSWORD), + CONF_DISCOVERY: DEFAULT_DISCOVERY, + } + except AddonError: + # We do not have discovery information yet + return None + if await self.hass.async_add_executor_job( + try_connection, + config, + ): + self._hassio_discovery = config + return config + return None + + async def _async_start_addon(self) -> None: + """Start the Mosquitto Broker add-on.""" + addon_manager: AddonManager = get_addon_manager(self.hass) + await addon_manager.async_schedule_start_addon() + + # Sleep some seconds to let the add-on start properly before connecting. + for _ in range(ADDON_SETUP_TIMEOUT_ROUNDS): + await asyncio.sleep(ADDON_SETUP_TIMEOUT) + # Finish setup using discovery info to test the connection + if await self._async_get_config_and_try(): + break + else: + raise AddonError( + f"Failed to correctly start {addon_manager.addon_name} add-on" + ) + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -213,8 +345,57 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") + if is_hassio(self.hass): + # Offer to set up broker add-on if supervisor is available + self._addon_manager = get_addon_manager(self.hass) + return self.async_show_menu( + step_id="user", + menu_options=["addon", "broker"], + description_placeholders={"addon": self._addon_manager.addon_name}, + ) + + # Start up a flow for manual setup return await self.async_step_broker() + async def async_step_setup_entry_from_discovery( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Set up mqtt entry from discovery info.""" + if (config := await self._async_get_config_and_try()) is not None: + return self.async_create_entry( + title=self._addon_manager.addon_name, + data=config, + ) + + raise AbortFlow( + "addon_connection_failed", + description_placeholders={"addon": self._addon_manager.addon_name}, + ) + + async def async_step_addon( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Install and start MQTT broker add-on.""" + addon_manager = self._addon_manager + + try: + addon_info = await addon_manager.async_get_addon_info() + except AddonError as err: + raise AbortFlow( + "addon_info_failed", + description_placeholders={"addon": self._addon_manager.addon_name}, + ) from err + + if addon_info.state == AddonState.RUNNING: + # Finish setup using discovery info + return await self.async_step_setup_entry_from_discovery() + + if addon_info.state == AddonState.NOT_RUNNING: + return await self.async_step_start_addon() + + # Install the add-on and start it + return await self.async_step_install_addon() + async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: @@ -293,7 +474,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): async def async_step_hassio( self, discovery_info: HassioServiceInfo ) -> ConfigFlowResult: - """Receive a Hass.io discovery.""" + """Receive a Hass.io discovery or process setup after addon install.""" await self._async_handle_discovery_without_unique_id() self._hassio_discovery = discovery_info.config diff --git a/homeassistant/components/mqtt/const.py b/homeassistant/components/mqtt/const.py index 9a8e6ae22df..3bf9cf218f2 100644 --- a/homeassistant/components/mqtt/const.py +++ b/homeassistant/components/mqtt/const.py @@ -1,5 +1,7 @@ """Constants used by multiple MQTT modules.""" +import logging + import jinja2 from homeassistant.const import CONF_PAYLOAD, Platform @@ -148,6 +150,7 @@ DEFAULT_WILL = { } DOMAIN = "mqtt" +LOGGER = logging.getLogger(__package__) MQTT_CONNECTION_STATE = "mqtt_connection_state" diff --git a/homeassistant/components/mqtt/strings.json b/homeassistant/components/mqtt/strings.json index c786d7e08a1..1550d6c8b62 100644 --- a/homeassistant/components/mqtt/strings.json +++ b/homeassistant/components/mqtt/strings.json @@ -23,6 +23,13 @@ }, "config": { "step": { + "user": { + "description": "Please choose how you want to connect to the MQTT broker:", + "menu_options": { + "addon": "Use the official {addon} add-on.", + "broker": "Manually enter the MQTT broker connection details" + } + }, "broker": { "description": "Please enter the connection information of your MQTT broker.", "data": { @@ -63,6 +70,12 @@ "ws_path": "The WebSocket path to be used for the connection to your MQTT broker." } }, + "install_addon": { + "title": "Installing add-on" + }, + "start_addon": { + "title": "Starting add-on" + }, "hassio_confirm": { "title": "MQTT Broker via Home Assistant add-on", "description": "Do you want to configure Home Assistant to connect to the MQTT broker provided by the add-on {addon}?", @@ -87,6 +100,10 @@ } }, "abort": { + "addon_info_failed": "Failed get info for the {addon} add-on.", + "addon_install_failed": "Failed to install the {addon} add-on.", + "addon_start_failed": "Failed to start the {addon} add-on.", + "addon_connection_failed": "Failed to connect to the {addon} add-on. Check the add-on status and try again later.", "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" diff --git a/tests/components/conftest.py b/tests/components/conftest.py index 7d15bde88c0..26da794e03b 100644 --- a/tests/components/conftest.py +++ b/tests/components/conftest.py @@ -6,7 +6,7 @@ from collections.abc import Callable, Generator from importlib.util import find_spec from pathlib import Path from typing import TYPE_CHECKING, Any -from unittest.mock import MagicMock, patch +from unittest.mock import AsyncMock, MagicMock, patch import pytest @@ -180,3 +180,92 @@ def mock_legacy_device_tracker_setup() -> Callable[[HomeAssistant, MockScanner], from .device_tracker.common import mock_legacy_device_tracker_setup return mock_legacy_device_tracker_setup + + +@pytest.fixture(name="discovery_info") +def discovery_info_fixture() -> Any: + """Return the discovery info from the supervisor.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_discovery_info + + return mock_discovery_info() + + +@pytest.fixture(name="get_addon_discovery_info") +def get_addon_discovery_info_fixture(discovery_info: Any) -> Generator[AsyncMock]: + """Mock get add-on discovery info.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_get_addon_discovery_info + + yield from mock_get_addon_discovery_info(discovery_info) + + +@pytest.fixture(name="addon_store_info") +def addon_store_info_fixture() -> Generator[AsyncMock]: + """Mock Supervisor add-on store info.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_addon_store_info + + yield from mock_addon_store_info() + + +@pytest.fixture(name="addon_info") +def addon_info_fixture() -> Generator[AsyncMock]: + """Mock Supervisor add-on info.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_addon_info + + yield from mock_addon_info() + + +@pytest.fixture(name="addon_not_installed") +def addon_not_installed_fixture( + addon_store_info: AsyncMock, addon_info: AsyncMock +) -> AsyncMock: + """Mock add-on not installed.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_addon_not_installed + + return mock_addon_not_installed(addon_store_info, addon_info) + + +@pytest.fixture(name="addon_installed") +def addon_installed_fixture( + addon_store_info: AsyncMock, addon_info: AsyncMock +) -> AsyncMock: + """Mock add-on already installed but not running.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_addon_installed + + return mock_addon_installed(addon_store_info, addon_info) + + +@pytest.fixture(name="addon_running") +def addon_running_fixture( + addon_store_info: AsyncMock, addon_info: AsyncMock +) -> AsyncMock: + """Mock add-on already running.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_addon_running + + return mock_addon_running(addon_store_info, addon_info) + + +@pytest.fixture(name="install_addon") +def install_addon_fixture( + addon_store_info: AsyncMock, addon_info: AsyncMock +) -> Generator[AsyncMock]: + """Mock install add-on.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_install_addon + + yield from mock_install_addon(addon_store_info, addon_info) + + +@pytest.fixture(name="start_addon") +def start_addon_fixture() -> Generator[AsyncMock]: + """Mock start add-on.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_start_addon + + yield from mock_start_addon() diff --git a/tests/components/hassio/common.py b/tests/components/hassio/common.py new file mode 100644 index 00000000000..f9a77628bbe --- /dev/null +++ b/tests/components/hassio/common.py @@ -0,0 +1,125 @@ +"""Provide common test tools for hassio.""" + +from __future__ import annotations + +from collections.abc import Generator +from typing import Any +from unittest.mock import DEFAULT, AsyncMock, patch + +from homeassistant.core import HomeAssistant + + +def mock_discovery_info() -> Any: + """Return the discovery info from the supervisor.""" + return DEFAULT + + +def mock_get_addon_discovery_info(discovery_info: Any) -> Generator[AsyncMock]: + """Mock get add-on discovery info.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_get_addon_discovery_info", + return_value=discovery_info, + ) as get_addon_discovery_info: + yield get_addon_discovery_info + + +def mock_addon_store_info() -> Generator[AsyncMock]: + """Mock Supervisor add-on store info.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_get_addon_store_info" + ) as addon_store_info: + addon_store_info.return_value = { + "available": False, + "installed": None, + "state": None, + "version": "1.0.0", + } + yield addon_store_info + + +def mock_addon_info() -> Generator[AsyncMock]: + """Mock Supervisor add-on info.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_get_addon_info", + ) as addon_info: + addon_info.return_value = { + "available": False, + "hostname": None, + "options": {}, + "state": None, + "update_available": False, + "version": None, + } + yield addon_info + + +def mock_addon_not_installed( + addon_store_info: AsyncMock, addon_info: AsyncMock +) -> AsyncMock: + """Mock add-on not installed.""" + addon_store_info.return_value["available"] = True + return addon_info + + +def mock_addon_installed( + addon_store_info: AsyncMock, addon_info: AsyncMock +) -> AsyncMock: + """Mock add-on already installed but not running.""" + addon_store_info.return_value = { + "available": True, + "installed": "1.0.0", + "state": "stopped", + "version": "1.0.0", + } + addon_info.return_value["available"] = True + addon_info.return_value["hostname"] = "core-matter-server" + addon_info.return_value["state"] = "stopped" + addon_info.return_value["version"] = "1.0.0" + return addon_info + + +def mock_addon_running(addon_store_info: AsyncMock, addon_info: AsyncMock) -> AsyncMock: + """Mock add-on already running.""" + addon_store_info.return_value = { + "available": True, + "installed": "1.0.0", + "state": "started", + "version": "1.0.0", + } + addon_info.return_value["available"] = True + addon_info.return_value["hostname"] = "core-mosquitto" + addon_info.return_value["state"] = "started" + addon_info.return_value["version"] = "1.0.0" + return addon_info + + +def mock_install_addon( + addon_store_info: AsyncMock, addon_info: AsyncMock +) -> Generator[AsyncMock]: + """Mock install add-on.""" + + async def install_addon_side_effect(hass: HomeAssistant, slug: str) -> None: + """Mock install add-on.""" + addon_store_info.return_value = { + "available": True, + "installed": "1.0.0", + "state": "stopped", + "version": "1.0.0", + } + addon_info.return_value["available"] = True + addon_info.return_value["state"] = "stopped" + addon_info.return_value["version"] = "1.0.0" + + with patch( + "homeassistant.components.hassio.addon_manager.async_install_addon" + ) as install_addon: + install_addon.side_effect = install_addon_side_effect + yield install_addon + + +def mock_start_addon() -> Generator[AsyncMock]: + """Mock start add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_start_addon" + ) as start_addon: + yield start_addon diff --git a/tests/components/mqtt/test_config_flow.py b/tests/components/mqtt/test_config_flow.py index 2b4cb20ccf9..8e29658e017 100644 --- a/tests/components/mqtt/test_config_flow.py +++ b/tests/components/mqtt/test_config_flow.py @@ -14,6 +14,8 @@ import voluptuous as vol from homeassistant import config_entries from homeassistant.components import mqtt from homeassistant.components.hassio import HassioServiceInfo +from homeassistant.components.hassio.addon_manager import AddonError +from homeassistant.components.hassio.handler import HassioAPIError from homeassistant.components.mqtt.config_flow import PWD_NOT_CHANGED from homeassistant.const import ( CONF_CLIENT_ID, @@ -28,6 +30,15 @@ from homeassistant.data_entry_flow import FlowResultType from tests.common import MockConfigEntry from tests.typing import MqttMockHAClientGenerator, MqttMockPahoClient +ADD_ON_DISCOVERY_INFO = { + "addon": "Mosquitto Mqtt Broker", + "host": "core-mosquitto", + "port": 1883, + "username": "mock-user", + "password": "mock-pass", + "protocol": "3.1.1", + "ssl": False, +} MOCK_CLIENT_CERT = b"## mock client certificate file ##" MOCK_CLIENT_KEY = b"## mock key file ##" @@ -186,6 +197,29 @@ def mock_process_uploaded_file( yield mock_upload +@pytest.fixture(name="supervisor") +def supervisor_fixture() -> Generator[MagicMock]: + """Mock Supervisor.""" + with patch( + "homeassistant.components.mqtt.config_flow.is_hassio", return_value=True + ) as is_hassio: + yield is_hassio + + +@pytest.fixture(name="addon_setup_time", autouse=True) +def addon_setup_time_fixture() -> Generator[int]: + """Mock add-on setup sleep time.""" + with patch( + "homeassistant.components.mqtt.config_flow.ADDON_SETUP_TIMEOUT", new=0 + ) as addon_setup_time: + yield addon_setup_time + + +@pytest.fixture(autouse=True) +def mock_get_addon_discovery_info(get_addon_discovery_info: AsyncMock) -> None: + """Mock get add-on discovery info.""" + + @pytest.mark.usefixtures("mqtt_client_mock") async def test_user_connection_works( hass: HomeAssistant, @@ -216,6 +250,47 @@ async def test_user_connection_works( assert len(mock_finish_setup.mock_calls) == 1 +@pytest.mark.usefixtures("mqtt_client_mock", "supervisor") +async def test_user_connection_works_with_supervisor( + hass: HomeAssistant, + mock_try_connection: MagicMock, + mock_finish_setup: MagicMock, +) -> None: + """Test we can finish a config flow with a supervised install.""" + mock_try_connection.return_value = True + + result = await hass.config_entries.flow.async_init( + "mqtt", context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["menu_options"] == ["addon", "broker"] + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "broker"}, + ) + + # Assert a manual setup flow + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {"broker": "127.0.0.1"} + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].data == { + "broker": "127.0.0.1", + "port": 1883, + "discovery": True, + } + # Check we tried the connection + assert len(mock_try_connection.mock_calls) == 1 + # Check config entry got setup + assert len(mock_finish_setup.mock_calls) == 1 + await hass.async_block_till_done(wait_background_tasks=True) + + @pytest.mark.usefixtures("mqtt_client_mock") async def test_user_v5_connection_works( hass: HomeAssistant, @@ -382,16 +457,8 @@ async def test_hassio_confirm( result = await hass.config_entries.flow.async_init( "mqtt", data=HassioServiceInfo( - config={ - "addon": "Mock Addon", - "host": "mock-broker", - "port": 1883, - "username": "mock-user", - "password": "mock-pass", - "protocol": "3.1.1", # Set by the addon's discovery, ignored by HA - "ssl": False, # Set by the addon's discovery, ignored by HA - }, - name="Mock Addon", + config=ADD_ON_DISCOVERY_INFO.copy(), + name="Mosquitto Mqtt Broker", slug="mosquitto", uuid="1234", ), @@ -399,7 +466,7 @@ async def test_hassio_confirm( ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "hassio_confirm" - assert result["description_placeholders"] == {"addon": "Mock Addon"} + assert result["description_placeholders"] == {"addon": "Mosquitto Mqtt Broker"} mock_try_connection_success.reset_mock() result = await hass.config_entries.flow.async_configure( @@ -408,7 +475,7 @@ async def test_hassio_confirm( assert result["type"] is FlowResultType.CREATE_ENTRY assert result["result"].data == { - "broker": "mock-broker", + "broker": "core-mosquitto", "port": 1883, "username": "mock-user", "password": "mock-pass", @@ -426,14 +493,12 @@ async def test_hassio_cannot_connect( mock_finish_setup: MagicMock, ) -> None: """Test a config flow is aborted when a connection was not successful.""" - mock_try_connection.return_value = True - result = await hass.config_entries.flow.async_init( "mqtt", data=HassioServiceInfo( config={ "addon": "Mock Addon", - "host": "mock-broker", + "host": "core-mosquitto", "port": 1883, "username": "mock-user", "password": "mock-pass", @@ -463,6 +528,362 @@ async def test_hassio_cannot_connect( assert len(mock_finish_setup.mock_calls) == 0 +@pytest.mark.usefixtures( + "mqtt_client_mock", "supervisor", "addon_info", "addon_running" +) +@pytest.mark.parametrize("discovery_info", [{"config": ADD_ON_DISCOVERY_INFO.copy()}]) +async def test_addon_flow_with_supervisor_addon_running( + hass: HomeAssistant, + mock_try_connection_success: MagicMock, + mock_finish_setup: MagicMock, +) -> None: + """Test we perform an auto config flow with a supervised install. + + Case: The Mosquitto add-on is already installed, and running. + """ + # show menu + result = await hass.config_entries.flow.async_init( + "mqtt", context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["menu_options"] == ["addon", "broker"] + assert result["step_id"] == "user" + + # select install via add-on + mock_try_connection_success.reset_mock() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "addon"}, + ) + await hass.async_block_till_done(wait_background_tasks=True) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].data == { + "broker": "core-mosquitto", + "port": 1883, + "username": "mock-user", + "password": "mock-pass", + "discovery": True, + } + # Check we tried the connection + assert len(mock_try_connection_success.mock_calls) + # Check config entry got setup + assert len(mock_finish_setup.mock_calls) == 1 + + +@pytest.mark.usefixtures( + "mqtt_client_mock", "supervisor", "addon_info", "addon_installed", "start_addon" +) +@pytest.mark.parametrize("discovery_info", [{"config": ADD_ON_DISCOVERY_INFO.copy()}]) +async def test_addon_flow_with_supervisor_addon_installed( + hass: HomeAssistant, + mock_try_connection_success: MagicMock, + mock_finish_setup: MagicMock, +) -> None: + """Test we perform an auto config flow with a supervised install. + + Case: The Mosquitto add-on is installed, but not running. + """ + # show menu + result = await hass.config_entries.flow.async_init( + "mqtt", context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["menu_options"] == ["addon", "broker"] + assert result["step_id"] == "user" + + # select install via add-on + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "addon"}, + ) + + # add-on installed but not started, so we wait for start-up + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "start_addon" + assert result["step_id"] == "start_addon" + await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) + mock_try_connection_success.reset_mock() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "start_addon"}, + ) + + # add-on is running, so entry can be installed + await hass.async_block_till_done(wait_background_tasks=True) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].data == { + "broker": "core-mosquitto", + "port": 1883, + "username": "mock-user", + "password": "mock-pass", + "discovery": True, + } + # Check we tried the connection + assert len(mock_try_connection_success.mock_calls) + # Check config entry got setup + assert len(mock_finish_setup.mock_calls) == 1 + + +@pytest.mark.usefixtures( + "mqtt_client_mock", "supervisor", "addon_info", "addon_running" +) +@pytest.mark.parametrize("discovery_info", [{"config": ADD_ON_DISCOVERY_INFO.copy()}]) +async def test_addon_flow_with_supervisor_addon_running_connection_fails( + hass: HomeAssistant, + mock_try_connection: MagicMock, +) -> None: + """Test we perform an auto config flow with a supervised install. + + Case: The Mosquitto add-on is already installed, and running. + """ + # show menu + result = await hass.config_entries.flow.async_init( + "mqtt", context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["menu_options"] == ["addon", "broker"] + assert result["step_id"] == "user" + + # select install via add-on but the connection fails and the flow will be aborted. + mock_try_connection.return_value = False + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "addon"}, + ) + await hass.async_block_till_done(wait_background_tasks=True) + assert result["type"] is FlowResultType.ABORT + + +@pytest.mark.usefixtures( + "mqtt_client_mock", + "supervisor", + "addon_info", + "addon_installed", +) +async def test_addon_not_running_api_error( + hass: HomeAssistant, + start_addon: AsyncMock, +) -> None: + """Test we perform an auto config flow with a supervised install. + + Case: The Mosquitto add-on start fails on a API error. + """ + start_addon.side_effect = HassioAPIError() + + result = await hass.config_entries.flow.async_init( + "mqtt", context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["menu_options"] == ["addon", "broker"] + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "addon"}, + ) + # add-on not installed, so we wait for install + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "start_addon" + assert result["step_id"] == "start_addon" + await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "install_addon"}, + ) + + # add-on start-up failed + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "addon_start_failed" + + +@pytest.mark.usefixtures( + "mqtt_client_mock", + "supervisor", + "start_addon", + "addon_installed", +) +async def test_addon_discovery_info_error( + hass: HomeAssistant, + addon_info: AsyncMock, + get_addon_discovery_info: AsyncMock, +) -> None: + """Test we perform an auto config flow with a supervised install. + + Case: The Mosquitto add-on start on a discovery error. + """ + get_addon_discovery_info.side_effect = AddonError + + result = await hass.config_entries.flow.async_init( + "mqtt", context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["menu_options"] == ["addon", "broker"] + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "addon"}, + ) + # Addon will retry + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "start_addon" + assert result["step_id"] == "start_addon" + await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "start_addon"}, + ) + + # add-on start-up failed + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "addon_start_failed" + + +@pytest.mark.usefixtures( + "mqtt_client_mock", + "supervisor", + "start_addon", + "addon_installed", +) +async def test_addon_info_error( + hass: HomeAssistant, + addon_info: AsyncMock, +) -> None: + """Test we perform an auto config flow with a supervised install. + + Case: The Mosquitto add-on info could not be retrieved. + """ + addon_info.side_effect = AddonError() + + result = await hass.config_entries.flow.async_init( + "mqtt", context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["menu_options"] == ["addon", "broker"] + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "addon"}, + ) + + # add-on info failed + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "addon_info_failed" + + +@pytest.mark.usefixtures( + "mqtt_client_mock", + "supervisor", + "addon_info", + "addon_not_installed", + "install_addon", + "start_addon", +) +@pytest.mark.parametrize("discovery_info", [{"config": ADD_ON_DISCOVERY_INFO.copy()}]) +async def test_addon_flow_with_supervisor_addon_not_installed( + hass: HomeAssistant, + mock_try_connection_success: MagicMock, + mock_finish_setup: MagicMock, +) -> None: + """Test we perform an auto config flow with a supervised install. + + Case: The Mosquitto add-on is not yet installed nor running. + """ + result = await hass.config_entries.flow.async_init( + "mqtt", context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["menu_options"] == ["addon", "broker"] + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "addon"}, + ) + # add-on not installed, so we wait for install + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "install_addon" + assert result["step_id"] == "install_addon" + await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "install_addon"}, + ) + + # add-on installed but not started, so we wait for start-up + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "start_addon" + assert result["step_id"] == "start_addon" + await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) + mock_try_connection_success.reset_mock() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "start_addon"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].data == { + "broker": "core-mosquitto", + "port": 1883, + "username": "mock-user", + "password": "mock-pass", + "discovery": True, + } + # Check we tried the connection + assert len(mock_try_connection_success.mock_calls) + # Check config entry got setup + assert len(mock_finish_setup.mock_calls) == 1 + + +@pytest.mark.usefixtures( + "mqtt_client_mock", + "supervisor", + "addon_info", + "addon_not_installed", + "start_addon", +) +async def test_addon_not_installed_failures( + hass: HomeAssistant, + install_addon: AsyncMock, +) -> None: + """Test we perform an auto config flow with a supervised install. + + Case: The Mosquitto add-on install fails. + """ + install_addon.side_effect = HassioAPIError() + + result = await hass.config_entries.flow.async_init( + "mqtt", context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["menu_options"] == ["addon", "broker"] + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "addon"}, + ) + # add-on not installed, so we wait for install + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "install_addon" + assert result["step_id"] == "install_addon" + await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "install_addon"}, + ) + + # add-on install failed + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "addon_install_failed" + + async def test_option_flow( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, From 0c8ed18ca5ded9746bb7d820974494d07b9406a1 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Thu, 22 Aug 2024 09:38:27 +0200 Subject: [PATCH 0966/1635] Use setup_test_component_platform in Climate tests (#123941) * Use setup_test_component_platform in Climate tests * Rename mock integration helper * Change name to register_test_integration * Change name in tests * Fix ruff format --- tests/components/climate/conftest.py | 50 +++- tests/components/climate/test_init.py | 362 +++++--------------------- 2 files changed, 110 insertions(+), 302 deletions(-) diff --git a/tests/components/climate/conftest.py b/tests/components/climate/conftest.py index fd4368c4219..4ade8606e77 100644 --- a/tests/components/climate/conftest.py +++ b/tests/components/climate/conftest.py @@ -4,10 +4,18 @@ from collections.abc import Generator import pytest -from homeassistant.config_entries import ConfigFlow +from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN +from homeassistant.config_entries import ConfigEntry, ConfigFlow +from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from tests.common import mock_config_flow, mock_platform +from tests.common import ( + MockConfigEntry, + MockModule, + mock_config_flow, + mock_integration, + mock_platform, +) class MockFlow(ConfigFlow): @@ -21,3 +29,41 @@ def config_flow_fixture(hass: HomeAssistant) -> Generator[None]: with mock_config_flow("test", MockFlow): yield + + +@pytest.fixture +def register_test_integration( + hass: HomeAssistant, config_flow_fixture: None +) -> Generator: + """Provide a mocked integration for tests.""" + + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + + async def help_async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups( + config_entry, [CLIMATE_DOMAIN] + ) + return True + + async def help_async_unload_entry( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Unload test config emntry.""" + return await hass.config_entries.async_unload_platforms( + config_entry, [Platform.CLIMATE] + ) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=help_async_setup_entry_init, + async_unload_entry=help_async_unload_entry, + ), + ) + + return config_entry diff --git a/tests/components/climate/test_init.py b/tests/components/climate/test_init.py index f306551e540..256ecf92b1d 100644 --- a/tests/components/climate/test_init.py +++ b/tests/components/climate/test_init.py @@ -56,6 +56,7 @@ from tests.common import ( import_and_test_deprecated_constant_enum, mock_integration, mock_platform, + setup_test_component_platform, ) @@ -237,42 +238,15 @@ def test_deprecated_current_constants( async def test_preset_mode_validation( - hass: HomeAssistant, config_flow_fixture: None + hass: HomeAssistant, register_test_integration: MockConfigEntry ) -> None: """Test mode validation for fan, swing and preset.""" + climate_entity = MockClimateEntity(name="test", entity_id="climate.test") - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - async def async_setup_entry_climate_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test climate platform via config entry.""" - async_add_entities([MockClimateEntity(name="test", entity_id="climate.test")]) - - mock_integration( - hass, - MockModule( - "test", - async_setup_entry=async_setup_entry_init, - ), - built_in=False, + setup_test_component_platform( + hass, DOMAIN, entities=[climate_entity], from_config_entry=True ) - mock_platform( - hass, - "test.climate", - MockPlatform(async_setup_entry=async_setup_entry_climate_platform), - ) - - config_entry = MockConfigEntry(domain="test") - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.config_entries.async_setup(register_test_integration.entry_id) await hass.async_block_till_done() state = hass.states.get("climate.test") @@ -402,7 +376,9 @@ def test_deprecated_supported_features_ints( async def test_warning_not_implemented_turn_on_off_feature( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + register_test_integration: MockConfigEntry, ) -> None: """Test adding feature flag and warn if missing when methods are set.""" @@ -419,43 +395,15 @@ async def test_warning_not_implemented_turn_on_off_feature( """Turn off.""" called.append("turn_off") - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - async def async_setup_entry_climate_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test climate platform via config entry.""" - async_add_entities( - [MockClimateEntityTest(name="test", entity_id="climate.test")] - ) - - mock_integration( - hass, - MockModule( - "test", - async_setup_entry=async_setup_entry_init, - ), - built_in=False, - ) - mock_platform( - hass, - "test.climate", - MockPlatform(async_setup_entry=async_setup_entry_climate_platform), - ) + climate_entity = MockClimateEntityTest(name="test", entity_id="climate.test") with patch.object( MockClimateEntityTest, "__module__", "tests.custom_components.climate.test_init" ): - config_entry = MockConfigEntry(domain="test") - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) + setup_test_component_platform( + hass, DOMAIN, entities=[climate_entity], from_config_entry=True + ) + await hass.config_entries.async_setup(register_test_integration.entry_id) await hass.async_block_till_done() state = hass.states.get("climate.test") @@ -499,7 +447,9 @@ async def test_warning_not_implemented_turn_on_off_feature( async def test_implicit_warning_not_implemented_turn_on_off_feature( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + register_test_integration: MockConfigEntry, ) -> None: """Test adding feature flag and warn if missing when methods are not set. @@ -527,43 +477,15 @@ async def test_implicit_warning_not_implemented_turn_on_off_feature( """ return [HVACMode.OFF, HVACMode.HEAT] - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - async def async_setup_entry_climate_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test climate platform via config entry.""" - async_add_entities( - [MockClimateEntityTest(name="test", entity_id="climate.test")] - ) - - mock_integration( - hass, - MockModule( - "test", - async_setup_entry=async_setup_entry_init, - ), - built_in=False, - ) - mock_platform( - hass, - "test.climate", - MockPlatform(async_setup_entry=async_setup_entry_climate_platform), - ) + climate_entity = MockClimateEntityTest(name="test", entity_id="climate.test") with patch.object( MockClimateEntityTest, "__module__", "tests.custom_components.climate.test_init" ): - config_entry = MockConfigEntry(domain="test") - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) + setup_test_component_platform( + hass, DOMAIN, entities=[climate_entity], from_config_entry=True + ) + await hass.config_entries.async_setup(register_test_integration.entry_id) await hass.async_block_till_done() state = hass.states.get("climate.test") @@ -579,7 +501,9 @@ async def test_implicit_warning_not_implemented_turn_on_off_feature( async def test_no_warning_implemented_turn_on_off_feature( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + register_test_integration: MockConfigEntry, ) -> None: """Test no warning when feature flags are set.""" @@ -594,43 +518,15 @@ async def test_no_warning_implemented_turn_on_off_feature( | ClimateEntityFeature.TURN_ON ) - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - async def async_setup_entry_climate_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test climate platform via config entry.""" - async_add_entities( - [MockClimateEntityTest(name="test", entity_id="climate.test")] - ) - - mock_integration( - hass, - MockModule( - "test", - async_setup_entry=async_setup_entry_init, - ), - built_in=False, - ) - mock_platform( - hass, - "test.climate", - MockPlatform(async_setup_entry=async_setup_entry_climate_platform), - ) + climate_entity = MockClimateEntityTest(name="test", entity_id="climate.test") with patch.object( MockClimateEntityTest, "__module__", "tests.custom_components.climate.test_init" ): - config_entry = MockConfigEntry(domain="test") - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) + setup_test_component_platform( + hass, DOMAIN, entities=[climate_entity], from_config_entry=True + ) + await hass.config_entries.async_setup(register_test_integration.entry_id) await hass.async_block_till_done() state = hass.states.get("climate.test") @@ -651,7 +547,9 @@ async def test_no_warning_implemented_turn_on_off_feature( async def test_no_warning_integration_has_migrated( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + register_test_integration: MockConfigEntry, ) -> None: """Test no warning when integration migrated using `_enable_turn_on_off_backwards_compatibility`.""" @@ -665,43 +563,15 @@ async def test_no_warning_integration_has_migrated( | ClimateEntityFeature.SWING_MODE ) - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - async def async_setup_entry_climate_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test climate platform via config entry.""" - async_add_entities( - [MockClimateEntityTest(name="test", entity_id="climate.test")] - ) - - mock_integration( - hass, - MockModule( - "test", - async_setup_entry=async_setup_entry_init, - ), - built_in=False, - ) - mock_platform( - hass, - "test.climate", - MockPlatform(async_setup_entry=async_setup_entry_climate_platform), - ) + climate_entity = MockClimateEntityTest(name="test", entity_id="climate.test") with patch.object( MockClimateEntityTest, "__module__", "tests.custom_components.climate.test_init" ): - config_entry = MockConfigEntry(domain="test") - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) + setup_test_component_platform( + hass, DOMAIN, entities=[climate_entity], from_config_entry=True + ) + await hass.config_entries.async_setup(register_test_integration.entry_id) await hass.async_block_till_done() state = hass.states.get("climate.test") @@ -722,7 +592,9 @@ async def test_no_warning_integration_has_migrated( async def test_no_warning_integration_implement_feature_flags( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + register_test_integration: MockConfigEntry, ) -> None: """Test no warning when integration uses the correct feature flags.""" @@ -737,43 +609,15 @@ async def test_no_warning_integration_implement_feature_flags( | ClimateEntityFeature.TURN_ON ) - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - async def async_setup_entry_climate_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test climate platform via config entry.""" - async_add_entities( - [MockClimateEntityTest(name="test", entity_id="climate.test")] - ) - - mock_integration( - hass, - MockModule( - "test", - async_setup_entry=async_setup_entry_init, - ), - built_in=False, - ) - mock_platform( - hass, - "test.climate", - MockPlatform(async_setup_entry=async_setup_entry_climate_platform), - ) + climate_entity = MockClimateEntityTest(name="test", entity_id="climate.test") with patch.object( MockClimateEntityTest, "__module__", "tests.custom_components.climate.test_init" ): - config_entry = MockConfigEntry(domain="test") - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) + setup_test_component_platform( + hass, DOMAIN, entities=[climate_entity], from_config_entry=True + ) + await hass.config_entries.async_setup(register_test_integration.entry_id) await hass.async_block_till_done() state = hass.states.get("climate.test") @@ -1022,7 +866,7 @@ async def test_issue_aux_property_deprecated( async def test_no_issue_aux_property_deprecated_for_core( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - config_flow_fixture: None, + register_test_integration: MockConfigEntry, manifest_extra: dict[str, str], translation_key: str, translation_placeholders_extra: dict[str, str], @@ -1061,39 +905,10 @@ async def test_no_issue_aux_property_deprecated_for_core( entity_id="climate.testing", ) - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - async def async_setup_entry_climate_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test weather platform via config entry.""" - async_add_entities([climate_entity]) - - mock_integration( - hass, - MockModule( - "test", - async_setup_entry=async_setup_entry_init, - partial_manifest=manifest_extra, - ), - built_in=False, + setup_test_component_platform( + hass, DOMAIN, entities=[climate_entity], from_config_entry=True ) - mock_platform( - hass, - "test.climate", - MockPlatform(async_setup_entry=async_setup_entry_climate_platform), - ) - - config_entry = MockConfigEntry(domain="test") - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.config_entries.async_setup(register_test_integration.entry_id) await hass.async_block_till_done() assert climate_entity.state == HVACMode.HEAT @@ -1111,7 +926,7 @@ async def test_no_issue_aux_property_deprecated_for_core( async def test_no_issue_no_aux_property( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - config_flow_fixture: None, + register_test_integration: MockConfigEntry, issue_registry: ir.IssueRegistry, ) -> None: """Test the issue is raised on deprecated auxiliary heater attributes.""" @@ -1121,38 +936,10 @@ async def test_no_issue_no_aux_property( entity_id="climate.testing", ) - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - async def async_setup_entry_climate_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test weather platform via config entry.""" - async_add_entities([climate_entity]) - - mock_integration( - hass, - MockModule( - "test", - async_setup_entry=async_setup_entry_init, - ), - built_in=False, + setup_test_component_platform( + hass, DOMAIN, entities=[climate_entity], from_config_entry=True ) - mock_platform( - hass, - "test.climate", - MockPlatform(async_setup_entry=async_setup_entry_climate_platform), - ) - - config_entry = MockConfigEntry(domain="test") - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) + assert await hass.config_entries.async_setup(register_test_integration.entry_id) await hass.async_block_till_done() assert climate_entity.state == HVACMode.HEAT @@ -1167,7 +954,7 @@ async def test_no_issue_no_aux_property( async def test_temperature_validation( - hass: HomeAssistant, config_flow_fixture: None + hass: HomeAssistant, register_test_integration: MockConfigEntry ) -> None: """Test validation for temperatures.""" @@ -1194,40 +981,15 @@ async def test_temperature_validation( self._attr_target_temperature_high = kwargs[ATTR_TARGET_TEMP_HIGH] self._attr_target_temperature_low = kwargs[ATTR_TARGET_TEMP_LOW] - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - async def async_setup_entry_climate_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test climate platform via config entry.""" - async_add_entities( - [MockClimateEntityTemp(name="test", entity_id="climate.test")] - ) - - mock_integration( - hass, - MockModule( - "test", - async_setup_entry=async_setup_entry_init, - ), - built_in=False, - ) - mock_platform( - hass, - "test.climate", - MockPlatform(async_setup_entry=async_setup_entry_climate_platform), + test_climate = MockClimateEntityTemp( + name="Test", + unique_id="unique_climate_test", ) - config_entry = MockConfigEntry(domain="test") - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) + setup_test_component_platform( + hass, DOMAIN, entities=[test_climate], from_config_entry=True + ) + await hass.config_entries.async_setup(register_test_integration.entry_id) await hass.async_block_till_done() state = hass.states.get("climate.test") From b541d214ed8859a95cfce115610289f5675d0a4e Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Thu, 22 Aug 2024 11:03:35 +0200 Subject: [PATCH 0967/1635] Fix lingering mqtt unload entry test (#124392) --- tests/components/mqtt/test_device_trigger.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/components/mqtt/test_device_trigger.py b/tests/components/mqtt/test_device_trigger.py index 10322dd9046..1acfe8dd9f5 100644 --- a/tests/components/mqtt/test_device_trigger.py +++ b/tests/components/mqtt/test_device_trigger.py @@ -18,7 +18,7 @@ from homeassistant.setup import async_setup_component from .test_common import help_test_unload_config_entry from tests.common import async_fire_mqtt_message, async_get_device_automations -from tests.typing import MqttMockHAClient, MqttMockHAClientGenerator, WebSocketGenerator +from tests.typing import MqttMockHAClientGenerator, WebSocketGenerator @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -1672,11 +1672,11 @@ async def test_trigger_debug_info( assert debug_info_data["triggers"][0]["discovery_data"]["payload"] == config2 +@pytest.mark.usefixtures("mqtt_mock") async def test_unload_entry( hass: HomeAssistant, service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, - mqtt_mock: MqttMockHAClient, ) -> None: """Test unloading the MQTT entry.""" @@ -1738,3 +1738,4 @@ async def test_unload_entry( async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() assert len(service_calls) == 2 + await hass.async_block_till_done(wait_background_tasks=True) From 480e748b04942bf582ab19a3a7dde2423d83bbc2 Mon Sep 17 00:00:00 2001 From: TimL Date: Thu, 22 Aug 2024 20:04:03 +1000 Subject: [PATCH 0968/1635] Bump pysmlight to 0.0.13 (#124398) Better handle socket uptime disconnected Raise auth error on get/post requests that fail --- homeassistant/components/smlight/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/smlight/manifest.json b/homeassistant/components/smlight/manifest.json index 0dbd25e90bd..72d915666e5 100644 --- a/homeassistant/components/smlight/manifest.json +++ b/homeassistant/components/smlight/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/smlight", "integration_type": "device", "iot_class": "local_polling", - "requirements": ["pysmlight==0.0.12"], + "requirements": ["pysmlight==0.0.13"], "zeroconf": [ { "type": "_slzb-06._tcp.local." diff --git a/requirements_all.txt b/requirements_all.txt index f0df486d1c6..78215689479 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2214,7 +2214,7 @@ pysmartthings==0.7.8 pysml==0.0.12 # homeassistant.components.smlight -pysmlight==0.0.12 +pysmlight==0.0.13 # homeassistant.components.snmp pysnmp==6.2.5 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 67f19ae081e..afbbfb396ac 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1768,7 +1768,7 @@ pysmartthings==0.7.8 pysml==0.0.12 # homeassistant.components.smlight -pysmlight==0.0.12 +pysmlight==0.0.13 # homeassistant.components.snmp pysnmp==6.2.5 From ab6d0e32776ea402dc8cce9a7c8580097c1d4f2f Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 22 Aug 2024 12:08:24 +0200 Subject: [PATCH 0969/1635] Handle !include without arguments in configuration.yaml (#124399) * Prevent !include without arguments in configuration.yaml from crashing core * Add test --- homeassistant/util/yaml/loader.py | 18 ++++++++++++++++++ tests/util/yaml/test_init.py | 20 ++++++++++++++++++++ 2 files changed, 38 insertions(+) diff --git a/homeassistant/util/yaml/loader.py b/homeassistant/util/yaml/loader.py index ff9b7cb3601..4b7236d7022 100644 --- a/homeassistant/util/yaml/loader.py +++ b/homeassistant/util/yaml/loader.py @@ -348,6 +348,20 @@ def _add_reference_to_node_class( return obj +def _raise_if_no_value[NodeT: yaml.nodes.Node, _R]( + func: Callable[[LoaderType, NodeT], _R], +) -> Callable[[LoaderType, NodeT], _R]: + def wrapper(loader: LoaderType, node: NodeT) -> _R: + if not node.value: + raise HomeAssistantError( + f"{node.start_mark}: {node.tag} needs an argument." + ) + return func(loader, node) + + return wrapper + + +@_raise_if_no_value def _include_yaml(loader: LoaderType, node: yaml.nodes.Node) -> JSON_TYPE: """Load another YAML file and embed it using the !include tag. @@ -382,6 +396,7 @@ def _find_files(directory: str, pattern: str) -> Iterator[str]: yield filename +@_raise_if_no_value def _include_dir_named_yaml(loader: LoaderType, node: yaml.nodes.Node) -> NodeDictClass: """Load multiple files from directory as a dictionary.""" mapping = NodeDictClass() @@ -399,6 +414,7 @@ def _include_dir_named_yaml(loader: LoaderType, node: yaml.nodes.Node) -> NodeDi return _add_reference_to_node_class(mapping, loader, node) +@_raise_if_no_value def _include_dir_merge_named_yaml( loader: LoaderType, node: yaml.nodes.Node ) -> NodeDictClass: @@ -414,6 +430,7 @@ def _include_dir_merge_named_yaml( return _add_reference_to_node_class(mapping, loader, node) +@_raise_if_no_value def _include_dir_list_yaml( loader: LoaderType, node: yaml.nodes.Node ) -> list[JSON_TYPE]: @@ -427,6 +444,7 @@ def _include_dir_list_yaml( ] +@_raise_if_no_value def _include_dir_merge_list_yaml( loader: LoaderType, node: yaml.nodes.Node ) -> JSON_TYPE: diff --git a/tests/util/yaml/test_init.py b/tests/util/yaml/test_init.py index ece65504ed6..793ba3183ad 100644 --- a/tests/util/yaml/test_init.py +++ b/tests/util/yaml/test_init.py @@ -726,3 +726,23 @@ def test_load_yaml_dict_fail() -> None: """Test item without a key.""" with pytest.raises(yaml_loader.YamlTypeError): yaml_loader.load_yaml_dict(YAML_CONFIG_FILE) + + +@pytest.mark.parametrize( + "tag", + [ + "!include", + "!include_dir_named", + "!include_dir_merge_named", + "!include_dir_list", + "!include_dir_merge_list", + ], +) +@pytest.mark.usefixtures("try_both_loaders") +def test_include_without_parameter(tag: str) -> None: + """Test include extensions without parameters.""" + with ( + io.StringIO(f"key: {tag}") as file, + pytest.raises(HomeAssistantError, match=f"{tag} needs an argument"), + ): + yaml_loader.parse_yaml(file) From 4e3bd55c737807e77f6ac853026a67136458d7e0 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 22 Aug 2024 12:11:07 +0200 Subject: [PATCH 0970/1635] Revert UnitOfReactivePower change from iotawatt (#124393) Revert "Change POWER_VOLT_AMPERE_REACTIVE to UnitOfReactivePower (#117153)" This reverts commit 3e53cc175f78d8224f539e08dc6978c6cee3b41f. --- homeassistant/components/iotawatt/const.py | 1 + homeassistant/components/iotawatt/sensor.py | 5 ++--- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/iotawatt/const.py b/homeassistant/components/iotawatt/const.py index 278030d4f10..de008388f62 100644 --- a/homeassistant/components/iotawatt/const.py +++ b/homeassistant/components/iotawatt/const.py @@ -7,6 +7,7 @@ import json import httpx DOMAIN = "iotawatt" +VOLT_AMPERE_REACTIVE = "VAR" VOLT_AMPERE_REACTIVE_HOURS = "VARh" CONNECTION_ERRORS = (KeyError, json.JSONDecodeError, httpx.HTTPError) diff --git a/homeassistant/components/iotawatt/sensor.py b/homeassistant/components/iotawatt/sensor.py index e0328ea9d58..c9af588c160 100644 --- a/homeassistant/components/iotawatt/sensor.py +++ b/homeassistant/components/iotawatt/sensor.py @@ -23,7 +23,6 @@ from homeassistant.const import ( UnitOfEnergy, UnitOfFrequency, UnitOfPower, - UnitOfReactivePower, ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -32,7 +31,7 @@ from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.util import dt as dt_util -from .const import DOMAIN, VOLT_AMPERE_REACTIVE_HOURS +from .const import DOMAIN, VOLT_AMPERE_REACTIVE, VOLT_AMPERE_REACTIVE_HOURS from .coordinator import IotawattUpdater _LOGGER = logging.getLogger(__name__) @@ -90,7 +89,7 @@ ENTITY_DESCRIPTION_KEY_MAP: dict[str, IotaWattSensorEntityDescription] = { ), "VAR": IotaWattSensorEntityDescription( key="VAR", - native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, + native_unit_of_measurement=VOLT_AMPERE_REACTIVE, state_class=SensorStateClass.MEASUREMENT, icon="mdi:flash", entity_registry_enabled_default=False, From 403db6277fce9332e5978e9de74af040bbc94fb2 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 22 Aug 2024 14:50:45 +0200 Subject: [PATCH 0971/1635] Improve type hints in lcn tests (#124415) --- tests/components/lcn/test_config_flow.py | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/tests/components/lcn/test_config_flow.py b/tests/components/lcn/test_config_flow.py index e4cb4c588e9..d002c5fe625 100644 --- a/tests/components/lcn/test_config_flow.py +++ b/tests/components/lcn/test_config_flow.py @@ -129,7 +129,7 @@ async def test_show_form(hass: HomeAssistant) -> None: assert result["step_id"] == "user" -async def test_step_user(hass): +async def test_step_user(hass: HomeAssistant) -> None: """Test for user step.""" with ( patch("pypck.connection.PchkConnectionManager.async_connect"), @@ -150,7 +150,9 @@ async def test_step_user(hass): } -async def test_step_user_existing_host(hass, entry): +async def test_step_user_existing_host( + hass: HomeAssistant, entry: MockConfigEntry +) -> None: """Test for user defined host already exists.""" entry.add_to_hass(hass) @@ -172,7 +174,9 @@ async def test_step_user_existing_host(hass, entry): (TimeoutError, {CONF_BASE: "connection_refused"}), ], ) -async def test_step_user_error(hass, error, errors): +async def test_step_user_error( + hass: HomeAssistant, error: type[Exception], errors: dict[str, str] +) -> None: """Test for error in user step is handled correctly.""" with patch( "pypck.connection.PchkConnectionManager.async_connect", side_effect=error @@ -187,7 +191,7 @@ async def test_step_user_error(hass, error, errors): assert result["errors"] == errors -async def test_step_reconfigure(hass, entry): +async def test_step_reconfigure(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test for reconfigure step.""" entry.add_to_hass(hass) old_entry_data = entry.data.copy() @@ -222,7 +226,12 @@ async def test_step_reconfigure(hass, entry): (TimeoutError, {CONF_BASE: "connection_refused"}), ], ) -async def test_step_reconfigure_error(hass, entry, error, errors): +async def test_step_reconfigure_error( + hass: HomeAssistant, + entry: MockConfigEntry, + error: type[Exception], + errors: dict[str, str], +) -> None: """Test for error in reconfigure step is handled correctly.""" entry.add_to_hass(hass) with patch( @@ -242,7 +251,7 @@ async def test_step_reconfigure_error(hass, entry, error, errors): assert result["errors"] == errors -async def test_validate_connection(): +async def test_validate_connection() -> None: """Test the connection validation.""" data = CONNECTION_DATA.copy() From 1bc0ec201ad311af0726431f7e105b4e91ec9f93 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 22 Aug 2024 15:13:00 +0200 Subject: [PATCH 0972/1635] Drop ignore-missing-annotations from pylint tests (#120302) --- .github/workflows/ci.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 0f0850ade1a..f6ffa439d9b 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -716,14 +716,14 @@ jobs: run: | . venv/bin/activate python --version - pylint --ignore-missing-annotations=y tests + pylint tests - name: Run pylint (partially) if: needs.info.outputs.test_full_suite == 'false' shell: bash run: | . venv/bin/activate python --version - pylint --ignore-missing-annotations=y tests/components/${{ needs.info.outputs.tests_glob }} + pylint tests/components/${{ needs.info.outputs.tests_glob }} mypy: name: Check mypy From df8256735672abbe09d79b72787d55b96d5e14ba Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 22 Aug 2024 08:14:45 -0500 Subject: [PATCH 0973/1635] Fix shelly available check when device is not initialized (#124182) * Fix shelly available check when device is not initialized available needs to check for device.initialized or if the device is sleepy as calls to status will raise NotInitialized which results in many unretrieved exceptions while writing state fixes ``` 2024-08-18 09:33:03.757 ERROR (MainThread) [homeassistant] Error doing job: Task exception was never retrieved (None) Traceback (most recent call last): File "/usr/src/homeassistant/homeassistant/helpers/update_coordinator.py", line 258, in _handle_refresh_interval await self._async_refresh(log_failures=True, scheduled=True) File "/usr/src/homeassistant/homeassistant/helpers/update_coordinator.py", line 453, in _async_refresh self.async_update_listeners() File "/usr/src/homeassistant/homeassistant/helpers/update_coordinator.py", line 168, in async_update_listeners update_callback() File "/config/custom_components/shelly/entity.py", line 374, in _update_callback self.async_write_ha_state() File "/usr/src/homeassistant/homeassistant/helpers/entity.py", line 1005, in async_write_ha_state self._async_write_ha_state() File "/usr/src/homeassistant/homeassistant/helpers/entity.py", line 1130, in _async_write_ha_state self.__async_calculate_state() File "/usr/src/homeassistant/homeassistant/helpers/entity.py", line 1067, in __async_calculate_state state = self._stringify_state(available) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/src/homeassistant/homeassistant/helpers/entity.py", line 1011, in _stringify_state if (state := self.state) is None: ^^^^^^^^^^ File "/usr/src/homeassistant/homeassistant/components/binary_sensor/__init__.py", line 293, in state if (is_on := self.is_on) is None: ^^^^^^^^^^ File "/config/custom_components/shelly/binary_sensor.py", line 331, in is_on return bool(self.attribute_value) ^^^^^^^^^^^^^^^^^^^^ File "/config/custom_components/shelly/entity.py", line 545, in attribute_value self._last_value = self.sub_status ^^^^^^^^^^^^^^^ File "/config/custom_components/shelly/entity.py", line 534, in sub_status return self.status[self.entity_description.sub_key] ^^^^^^^^^^^ File "/config/custom_components/shelly/entity.py", line 364, in status return cast(dict, self.coordinator.device.status[self.key]) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/local/lib/python3.12/site-packages/aioshelly/rpc_device/device.py", line 390, in status raise NotInitialized aioshelly.exceptions.NotInitialized ``` * tweak * cover * fix * cover * fixes --- .../components/shelly/coordinator.py | 3 ++ homeassistant/components/shelly/entity.py | 8 +++++ tests/components/shelly/test_sensor.py | 34 ++++++++++++++++++- 3 files changed, 44 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/shelly/coordinator.py b/homeassistant/components/shelly/coordinator.py index 2710565f960..6286e515727 100644 --- a/homeassistant/components/shelly/coordinator.py +++ b/homeassistant/components/shelly/coordinator.py @@ -682,6 +682,7 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]): self.entry.async_create_background_task( self.hass, self._async_connected(), "rpc device init", eager_start=True ) + # Make sure entities are marked available self.async_set_updated_data(None) elif update_type is RpcUpdateType.DISCONNECTED: self.entry.async_create_background_task( @@ -690,6 +691,8 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]): "rpc device disconnected", eager_start=True, ) + # Make sure entities are marked as unavailable + self.async_set_updated_data(None) elif update_type is RpcUpdateType.STATUS: self.async_set_updated_data(None) if self.sleep_period: diff --git a/homeassistant/components/shelly/entity.py b/homeassistant/components/shelly/entity.py index 5bf8a411377..980a39feaba 100644 --- a/homeassistant/components/shelly/entity.py +++ b/homeassistant/components/shelly/entity.py @@ -358,6 +358,14 @@ class ShellyRpcEntity(CoordinatorEntity[ShellyRpcCoordinator]): self._attr_unique_id = f"{coordinator.mac}-{key}" self._attr_name = get_rpc_entity_name(coordinator.device, key) + @property + def available(self) -> bool: + """Check if device is available and initialized or sleepy.""" + coordinator = self.coordinator + return super().available and ( + coordinator.device.initialized or bool(coordinator.sleep_period) + ) + @property def status(self) -> dict: """Device status by entity key.""" diff --git a/tests/components/shelly/test_sensor.py b/tests/components/shelly/test_sensor.py index 843270107e0..7aa4218341f 100644 --- a/tests/components/shelly/test_sensor.py +++ b/tests/components/shelly/test_sensor.py @@ -47,7 +47,7 @@ from . import ( register_entity, ) -from tests.common import mock_restore_cache_with_extra_data +from tests.common import async_fire_time_changed, mock_restore_cache_with_extra_data RELAY_BLOCK_ID = 0 SENSOR_BLOCK_ID = 3 @@ -1279,3 +1279,35 @@ async def test_rpc_rgbw_sensors( entry = entity_registry.async_get(entity_id) assert entry assert entry.unique_id == f"123456789ABC-{light_type}:0-temperature_{light_type}" + + +async def test_rpc_device_sensor_goes_unavailable_on_disconnect( + hass: HomeAssistant, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + freezer: FrozenDateTimeFactory, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test RPC device with sensor goes unavailable on disconnect.""" + await init_integration(hass, 2) + temp_sensor_state = hass.states.get("sensor.test_name_temperature") + assert temp_sensor_state is not None + assert temp_sensor_state.state != STATE_UNAVAILABLE + monkeypatch.setattr(mock_rpc_device, "connected", False) + monkeypatch.setattr(mock_rpc_device, "initialized", False) + mock_rpc_device.mock_disconnected() + await hass.async_block_till_done() + temp_sensor_state = hass.states.get("sensor.test_name_temperature") + assert temp_sensor_state.state == STATE_UNAVAILABLE + + freezer.tick(60) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert "NotInitialized" not in caplog.text + + monkeypatch.setattr(mock_rpc_device, "connected", True) + monkeypatch.setattr(mock_rpc_device, "initialized", True) + mock_rpc_device.mock_initialized() + await hass.async_block_till_done() + temp_sensor_state = hass.states.get("sensor.test_name_temperature") + assert temp_sensor_state.state != STATE_UNAVAILABLE From c541cb5cbaf1ddb59e977436ffdc9e8a3a492364 Mon Sep 17 00:00:00 2001 From: Jakob Schlyter Date: Thu, 22 Aug 2024 15:36:11 +0200 Subject: [PATCH 0974/1635] Automatically generate Amazon Polly list of voices and regions (#119198) * Automatically generate list of voices and regions. Requires AWS credentials. * add missing commit * replace pydantic with dataclass * dictionary values are strings or list of strings * also generated set of supported engines * use sets for amazon polly parameters * move default for readability --- .../components/amazon_polly/const.py | 121 ++-------------- homeassistant/components/amazon_polly/tts.py | 8 +- homeassistant/generated/amazon_polly.py | 137 ++++++++++++++++++ script/amazon_polly.py | 70 +++++++++ 4 files changed, 221 insertions(+), 115 deletions(-) create mode 100644 homeassistant/generated/amazon_polly.py create mode 100644 script/amazon_polly.py diff --git a/homeassistant/components/amazon_polly/const.py b/homeassistant/components/amazon_polly/const.py index bb196544fc3..40b1bba3ddd 100644 --- a/homeassistant/components/amazon_polly/const.py +++ b/homeassistant/components/amazon_polly/const.py @@ -8,128 +8,23 @@ CONF_REGION: Final = "region_name" CONF_ACCESS_KEY_ID: Final = "aws_access_key_id" CONF_SECRET_ACCESS_KEY: Final = "aws_secret_access_key" -DEFAULT_REGION: Final = "us-east-1" -SUPPORTED_REGIONS: Final[list[str]] = [ - "us-east-1", - "us-east-2", - "us-west-1", - "us-west-2", - "ca-central-1", - "eu-west-1", - "eu-central-1", - "eu-west-2", - "eu-west-3", - "ap-southeast-1", - "ap-southeast-2", - "ap-northeast-2", - "ap-northeast-1", - "ap-south-1", - "sa-east-1", -] - CONF_ENGINE: Final = "engine" CONF_VOICE: Final = "voice" CONF_OUTPUT_FORMAT: Final = "output_format" CONF_SAMPLE_RATE: Final = "sample_rate" CONF_TEXT_TYPE: Final = "text_type" -SUPPORTED_VOICES: Final[list[str]] = [ - "Aditi", # Hindi - "Amy", # English (British) - "Aria", # English (New Zealand), Neural - "Arlet", # Catalan, Neural - "Arthur", # English, Neural - "Astrid", # Swedish - "Ayanda", # English (South African), Neural - "Bianca", # Italian - "Brian", # English (British) - "Camila", # Portuguese, Brazilian - "Carla", # Italian - "Carmen", # Romanian - "Celine", # French - "Chantal", # French Canadian - "Conchita", # Spanish (European) - "Cristiano", # Portuguese (European) - "Daniel", # German, Neural - "Dora", # Icelandic - "Elin", # Swedish, Neural - "Emma", # English - "Enrique", # Spanish (European) - "Ewa", # Polish - "Filiz", # Turkish - "Gabrielle", # French (Canadian) - "Geraint", # English Welsh - "Giorgio", # Italian - "Gwyneth", # Welsh - "Hala", # Arabic (Gulf), Neural - "Hannah", # German (Austrian), Neural - "Hans", # German - "Hiujin", # Chinese (Cantonese), Neural - "Ida", # Norwegian, Neural - "Ines", # Portuguese, European # codespell:ignore ines - "Ivy", # English - "Jacek", # Polish - "Jan", # Polish - "Joanna", # English - "Joey", # English - "Justin", # English - "Kajal", # English (Indian)/Hindi (Bilingual ), Neural - "Karl", # Icelandic - "Kendra", # English - "Kevin", # English, Neural - "Kimberly", # English - "Laura", # Dutch, Neural - "Lea", # French - "Liam", # Canadian French, Neural - "Liv", # Norwegian - "Lotte", # Dutch - "Lucia", # Spanish European - "Lupe", # Spanish US - "Mads", # Danish - "Maja", # Polish - "Marlene", # German - "Mathieu", # French - "Matthew", # English - "Maxim", # Russian - "Mia", # Spanish Mexican - "Miguel", # Spanish US - "Mizuki", # Japanese - "Naja", # Danish - "Nicole", # English Australian - "Ola", # Polish, Neural - "Olivia", # Female, Australian, Neural - "Penelope", # Spanish US - "Pedro", # Spanish US, Neural - "Raveena", # English, Indian - "Ricardo", # Portuguese (Brazilian) - "Ruben", # Dutch - "Russell", # English (Australian) - "Ruth", # English, Neural - "Salli", # English - "Seoyeon", # Korean - "Stephen", # English, Neural - "Suvi", # Finnish - "Takumi", # Japanese - "Tatyana", # Russian - "Vicki", # German - "Vitoria", # Portuguese, Brazilian - "Zeina", # Arabic - "Zhiyu", # Chinese -] +SUPPORTED_OUTPUT_FORMATS: Final[set[str]] = {"mp3", "ogg_vorbis", "pcm"} -SUPPORTED_OUTPUT_FORMATS: Final[list[str]] = ["mp3", "ogg_vorbis", "pcm"] +SUPPORTED_SAMPLE_RATES: Final[set[str]] = {"8000", "16000", "22050", "24000"} -SUPPORTED_ENGINES: Final[list[str]] = ["neural", "standard"] - -SUPPORTED_SAMPLE_RATES: Final[list[str]] = ["8000", "16000", "22050", "24000"] - -SUPPORTED_SAMPLE_RATES_MAP: Final[dict[str, list[str]]] = { - "mp3": ["8000", "16000", "22050", "24000"], - "ogg_vorbis": ["8000", "16000", "22050"], - "pcm": ["8000", "16000"], +SUPPORTED_SAMPLE_RATES_MAP: Final[dict[str, set[str]]] = { + "mp3": {"8000", "16000", "22050", "24000"}, + "ogg_vorbis": {"8000", "16000", "22050"}, + "pcm": {"8000", "16000"}, } -SUPPORTED_TEXT_TYPES: Final[list[str]] = ["text", "ssml"] +SUPPORTED_TEXT_TYPES: Final[set[str]] = {"text", "ssml"} CONTENT_TYPE_EXTENSIONS: Final[dict[str, str]] = { "audio/mpeg": "mp3", @@ -137,6 +32,8 @@ CONTENT_TYPE_EXTENSIONS: Final[dict[str, str]] = { "audio/pcm": "pcm", } +DEFAULT_REGION: Final = "us-east-1" + DEFAULT_ENGINE: Final = "standard" DEFAULT_VOICE: Final = "Joanna" DEFAULT_OUTPUT_FORMAT: Final = "mp3" diff --git a/homeassistant/components/amazon_polly/tts.py b/homeassistant/components/amazon_polly/tts.py index d5cb7092fe3..1fc972fa3a1 100644 --- a/homeassistant/components/amazon_polly/tts.py +++ b/homeassistant/components/amazon_polly/tts.py @@ -16,6 +16,11 @@ from homeassistant.components.tts import ( ) from homeassistant.const import ATTR_CREDENTIALS, CONF_PROFILE_NAME from homeassistant.core import HomeAssistant +from homeassistant.generated.amazon_polly import ( + SUPPORTED_ENGINES, + SUPPORTED_REGIONS, + SUPPORTED_VOICES, +) import homeassistant.helpers.config_validation as cv from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType @@ -38,13 +43,10 @@ from .const import ( DEFAULT_SAMPLE_RATES, DEFAULT_TEXT_TYPE, DEFAULT_VOICE, - SUPPORTED_ENGINES, SUPPORTED_OUTPUT_FORMATS, - SUPPORTED_REGIONS, SUPPORTED_SAMPLE_RATES, SUPPORTED_SAMPLE_RATES_MAP, SUPPORTED_TEXT_TYPES, - SUPPORTED_VOICES, ) _LOGGER: Final = logging.getLogger(__name__) diff --git a/homeassistant/generated/amazon_polly.py b/homeassistant/generated/amazon_polly.py new file mode 100644 index 00000000000..1d870bf6c92 --- /dev/null +++ b/homeassistant/generated/amazon_polly.py @@ -0,0 +1,137 @@ +"""Automatically generated file. + +To update, run python3 -m script.amazon_polly +""" + +from __future__ import annotations + +from typing import Final + +SUPPORTED_ENGINES: Final[set[str]] = { + "generative", + "long-form", + "neural", + "standard", +} + +SUPPORTED_REGIONS: Final[set[str]] = { + "af-south-1", + "ap-east-1", + "ap-northeast-1", + "ap-northeast-2", + "ap-northeast-3", + "ap-south-1", + "ap-southeast-1", + "ap-southeast-2", + "ca-central-1", + "eu-central-1", + "eu-north-1", + "eu-west-1", + "eu-west-2", + "eu-west-3", + "me-south-1", + "sa-east-1", + "us-east-1", + "us-east-2", + "us-west-1", + "us-west-2", +} + +SUPPORTED_VOICES: Final[set[str]] = { + "Aditi", + "Adriano", + "Amy", + "Andres", + "Aria", + "Arlet", + "Arthur", + "Astrid", + "Ayanda", + "Bianca", + "Brian", + "Burcu", + "Camila", + "Carla", + "Carmen", + "Celine", + "Chantal", + "Conchita", + "Cristiano", + "Daniel", + "Danielle", + "Dora", + "Elin", + "Emma", + "Enrique", + "Ewa", + "Filiz", + "Gabrielle", + "Geraint", + "Giorgio", + "Gregory", + "Gwyneth", + "Hala", + "Hannah", + "Hans", + "Hiujin", + "Ida", + "Ines", + "Isabelle", + "Ivy", + "Jacek", + "Jan", + "Joanna", + "Joey", + "Justin", + "Kajal", + "Karl", + "Kazuha", + "Kendra", + "Kevin", + "Kimberly", + "Laura", + "Lea", + "Liam", + "Lisa", + "Liv", + "Lotte", + "Lucia", + "Lupe", + "Mads", + "Maja", + "Marlene", + "Mathieu", + "Matthew", + "Maxim", + "Mia", + "Miguel", + "Mizuki", + "Naja", + "Niamh", + "Nicole", + "Ola", + "Olivia", + "Pedro", + "Penelope", + "Raveena", + "Remi", + "Ricardo", + "Ruben", + "Russell", + "Ruth", + "Salli", + "Seoyeon", + "Sergio", + "Sofie", + "Stephen", + "Suvi", + "Takumi", + "Tatyana", + "Thiago", + "Tomoko", + "Vicki", + "Vitoria", + "Zayd", + "Zeina", + "Zhiyu", +} diff --git a/script/amazon_polly.py b/script/amazon_polly.py new file mode 100644 index 00000000000..fcb0a4b7987 --- /dev/null +++ b/script/amazon_polly.py @@ -0,0 +1,70 @@ +"""Helper script to update supported languages for Amazone Polly text-to-speech (TTS). + +N.B. This script requires AWS credentials. +""" + +from dataclasses import dataclass +from pathlib import Path +from typing import Self + +import boto3 + +from .hassfest.serializer import format_python_namespace + + +@dataclass(frozen=True) +class AmazonPollyVoice: + """Amazon Polly Voice.""" + + id: str + name: str + gender: str + language_name: str + language_code: str + supported_engines: set[str] + additional_language_codes: set[str] + + @classmethod + def validate(cls, model: dict[str, str | list[str]]) -> Self: + """Validate data model.""" + return cls( + id=model["Id"], + name=model["Name"], + gender=model["Gender"], + language_name=model["LanguageName"], + language_code=model["LanguageCode"], + supported_engines=set(model["SupportedEngines"]), + additional_language_codes=set(model.get("AdditionalLanguageCodes", [])), + ) + + +def get_all_voices(client: boto3.client) -> list[AmazonPollyVoice]: + """Get list of all supported voices from Amazon Polly.""" + response = client.describe_voices() + return [AmazonPollyVoice.validate(voice) for voice in response["Voices"]] + + +supported_regions = set( + boto3.session.Session().get_available_regions(service_name="polly") +) + +polly_client = boto3.client(service_name="polly", region_name="us-east-1") +voices = get_all_voices(polly_client) +supported_voices = set({v.id for v in voices}) +supported_engines = set().union(*[v.supported_engines for v in voices]) + +Path("homeassistant/generated/amazon_polly.py").write_text( + format_python_namespace( + { + "SUPPORTED_VOICES": supported_voices, + "SUPPORTED_REGIONS": supported_regions, + "SUPPORTED_ENGINES": supported_engines, + }, + annotations={ + "SUPPORTED_VOICES": "Final[set[str]]", + "SUPPORTED_REGIONS": "Final[set[str]]", + "SUPPORTED_ENGINES": "Final[set[str]]", + }, + generator="script.amazon_polly", + ) +) From 4a6f833fca20c7687279aa18c971bc6ae0f9c6e8 Mon Sep 17 00:00:00 2001 From: Penny Wood Date: Thu, 22 Aug 2024 21:39:09 +0800 Subject: [PATCH 0975/1635] Add supported features for iZone (#124416) * Fix for #123462 * Set outside of constructor --- homeassistant/components/izone/climate.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/homeassistant/components/izone/climate.py b/homeassistant/components/izone/climate.py index 3a1279a9bd4..617cdc730cc 100644 --- a/homeassistant/components/izone/climate.py +++ b/homeassistant/components/izone/climate.py @@ -441,6 +441,9 @@ class ZoneDevice(ClimateEntity): _attr_name = None _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_target_temperature_step = 0.5 + _attr_supported_features = ( + ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON + ) def __init__(self, controller: ControllerDevice, zone: Zone) -> None: """Initialise ZoneDevice.""" From 2337c3ff69ad9b1e5d0a79b193c659b20c8f6948 Mon Sep 17 00:00:00 2001 From: Markus Jacobsen Date: Thu, 22 Aug 2024 15:49:42 +0200 Subject: [PATCH 0976/1635] Improve Bang olufsen test typing and parameter order (#124419) * Add proper media_player typing to tests * Add proper typing to init tests * Add proper typing to config_flow tests * Fix order of parameters of tests * Add more typing Reorder parameters --- tests/components/bang_olufsen/conftest.py | 10 +- .../bang_olufsen/test_config_flow.py | 16 +- tests/components/bang_olufsen/test_init.py | 16 +- .../bang_olufsen/test_media_player.py | 155 +++++++++++------- 4 files changed, 128 insertions(+), 69 deletions(-) diff --git a/tests/components/bang_olufsen/conftest.py b/tests/components/bang_olufsen/conftest.py index 4764798f34d..66b40f90899 100644 --- a/tests/components/bang_olufsen/conftest.py +++ b/tests/components/bang_olufsen/conftest.py @@ -36,7 +36,7 @@ from tests.common import MockConfigEntry @pytest.fixture -def mock_config_entry(): +def mock_config_entry() -> MockConfigEntry: """Mock config entry.""" return MockConfigEntry( domain=DOMAIN, @@ -47,7 +47,11 @@ def mock_config_entry(): @pytest.fixture -async def mock_media_player(hass: HomeAssistant, mock_config_entry, mock_mozart_client): +async def mock_media_player( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_mozart_client: AsyncMock, +) -> None: """Mock media_player entity.""" mock_config_entry.add_to_hass(hass) @@ -248,7 +252,7 @@ def mock_mozart_client() -> Generator[AsyncMock]: @pytest.fixture -def mock_setup_entry(): +def mock_setup_entry() -> Generator[AsyncMock]: """Mock successful setup entry.""" with patch( "homeassistant.components.bang_olufsen.async_setup_entry", return_value=True diff --git a/tests/components/bang_olufsen/test_config_flow.py b/tests/components/bang_olufsen/test_config_flow.py index e637120a6ae..5d5f34a79e6 100644 --- a/tests/components/bang_olufsen/test_config_flow.py +++ b/tests/components/bang_olufsen/test_config_flow.py @@ -1,6 +1,6 @@ """Test the bang_olufsen config_flow.""" -from unittest.mock import Mock +from unittest.mock import AsyncMock, Mock from aiohttp.client_exceptions import ClientConnectorError from mozart_api.exceptions import ApiException @@ -25,7 +25,7 @@ pytestmark = pytest.mark.usefixtures("mock_setup_entry") async def test_config_flow_timeout_error( - hass: HomeAssistant, mock_mozart_client + hass: HomeAssistant, mock_mozart_client: AsyncMock ) -> None: """Test we handle timeout_error.""" mock_mozart_client.get_beolink_self.side_effect = TimeoutError() @@ -42,7 +42,7 @@ async def test_config_flow_timeout_error( async def test_config_flow_client_connector_error( - hass: HomeAssistant, mock_mozart_client + hass: HomeAssistant, mock_mozart_client: AsyncMock ) -> None: """Test we handle client_connector_error.""" mock_mozart_client.get_beolink_self.side_effect = ClientConnectorError( @@ -73,7 +73,7 @@ async def test_config_flow_invalid_ip(hass: HomeAssistant) -> None: async def test_config_flow_api_exception( - hass: HomeAssistant, mock_mozart_client + hass: HomeAssistant, mock_mozart_client: AsyncMock ) -> None: """Test we handle api_exception.""" mock_mozart_client.get_beolink_self.side_effect = ApiException() @@ -89,7 +89,7 @@ async def test_config_flow_api_exception( assert mock_mozart_client.get_beolink_self.call_count == 1 -async def test_config_flow(hass: HomeAssistant, mock_mozart_client) -> None: +async def test_config_flow(hass: HomeAssistant, mock_mozart_client: AsyncMock) -> None: """Test config flow.""" result_init = await hass.config_entries.flow.async_init( @@ -112,7 +112,9 @@ async def test_config_flow(hass: HomeAssistant, mock_mozart_client) -> None: assert mock_mozart_client.get_beolink_self.call_count == 1 -async def test_config_flow_zeroconf(hass: HomeAssistant, mock_mozart_client) -> None: +async def test_config_flow_zeroconf( + hass: HomeAssistant, mock_mozart_client: AsyncMock +) -> None: """Test zeroconf discovery.""" result_zeroconf = await hass.config_entries.flow.async_init( @@ -162,7 +164,7 @@ async def test_config_flow_zeroconf_ipv6(hass: HomeAssistant) -> None: async def test_config_flow_zeroconf_invalid_ip( - hass: HomeAssistant, mock_mozart_client + hass: HomeAssistant, mock_mozart_client: AsyncMock ) -> None: """Test zeroconf discovery with invalid IP address.""" mock_mozart_client.get_beolink_self.side_effect = ClientConnectorError( diff --git a/tests/components/bang_olufsen/test_init.py b/tests/components/bang_olufsen/test_init.py index 11742b846ae..3eb98e956be 100644 --- a/tests/components/bang_olufsen/test_init.py +++ b/tests/components/bang_olufsen/test_init.py @@ -1,5 +1,7 @@ """Test the bang_olufsen __init__.""" +from unittest.mock import AsyncMock + from aiohttp.client_exceptions import ServerTimeoutError from homeassistant.components.bang_olufsen import DOMAIN @@ -9,12 +11,14 @@ from homeassistant.helpers.device_registry import DeviceRegistry from .const import TEST_MODEL_BALANCE, TEST_NAME, TEST_SERIAL_NUMBER +from tests.common import MockConfigEntry + async def test_setup_entry( hass: HomeAssistant, - mock_config_entry, - mock_mozart_client, device_registry: DeviceRegistry, + mock_config_entry: MockConfigEntry, + mock_mozart_client: AsyncMock, ) -> None: """Test async_setup_entry.""" @@ -41,7 +45,9 @@ async def test_setup_entry( async def test_setup_entry_failed( - hass: HomeAssistant, mock_config_entry, mock_mozart_client + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_mozart_client: AsyncMock, ) -> None: """Test failed async_setup_entry.""" @@ -66,7 +72,9 @@ async def test_setup_entry_failed( async def test_unload_entry( - hass: HomeAssistant, mock_config_entry, mock_mozart_client + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_mozart_client: AsyncMock, ) -> None: """Test unload_entry.""" diff --git a/tests/components/bang_olufsen/test_media_player.py b/tests/components/bang_olufsen/test_media_player.py index 37f375bdb39..e169c023d1d 100644 --- a/tests/components/bang_olufsen/test_media_player.py +++ b/tests/components/bang_olufsen/test_media_player.py @@ -1,10 +1,11 @@ """Test the Bang & Olufsen media_player entity.""" +from collections.abc import Callable from contextlib import nullcontext as does_not_raise import logging -from unittest.mock import patch +from unittest.mock import AsyncMock, patch -from mozart_api.models import PlaybackContentMetadata +from mozart_api.models import PlaybackContentMetadata, RenderingState, Source import pytest from homeassistant.components.bang_olufsen.const import ( @@ -75,7 +76,7 @@ async def test_initialization( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_config_entry: MockConfigEntry, - mock_mozart_client, + mock_mozart_client: AsyncMock, ) -> None: """Test the integration is initialized properly in _initialize, async_added_to_hass and __init__.""" @@ -131,7 +132,9 @@ async def test_async_update_sources_outdated_api( async def test_async_update_playback_metadata( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test _async_update_playback_metadata.""" @@ -170,8 +173,8 @@ async def test_async_update_playback_metadata( async def test_async_update_playback_error( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - mock_mozart_client, - mock_config_entry, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test _async_update_playback_error.""" @@ -192,7 +195,9 @@ async def test_async_update_playback_error( async def test_async_update_playback_progress( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test _async_update_playback_progress.""" @@ -218,7 +223,9 @@ async def test_async_update_playback_progress( async def test_async_update_playback_state( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test _async_update_playback_state.""" @@ -293,14 +300,14 @@ async def test_async_update_playback_state( ], ) async def test_async_update_source_change( - reported_source, - real_source, - content_type, - progress, - metadata, hass: HomeAssistant, - mock_mozart_client, - mock_config_entry, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, + reported_source: Source, + real_source: Source, + content_type: MediaType, + progress: int, + metadata: PlaybackContentMetadata, ) -> None: """Test _async_update_source_change.""" @@ -338,7 +345,9 @@ async def test_async_update_source_change( async def test_async_turn_off( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_turn_off.""" @@ -366,7 +375,9 @@ async def test_async_turn_off( async def test_async_set_volume_level( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_set_volume_level and _async_update_volume by proxy.""" @@ -404,7 +415,9 @@ async def test_async_set_volume_level( async def test_async_mute_volume( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_mute_volume.""" @@ -452,11 +465,11 @@ async def test_async_mute_volume( ], ) async def test_async_media_play_pause( - initial_state, - command, hass: HomeAssistant, - mock_mozart_client, - mock_config_entry, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, + initial_state: RenderingState, + command: str, ) -> None: """Test async_media_play_pause.""" @@ -484,7 +497,9 @@ async def test_async_media_play_pause( async def test_async_media_stop( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_media_stop.""" @@ -513,7 +528,9 @@ async def test_async_media_stop( async def test_async_media_next_track( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_media_next_track.""" @@ -540,12 +557,12 @@ async def test_async_media_next_track( ], ) async def test_async_media_seek( - source, - expected_result, - seek_called_times, hass: HomeAssistant, - mock_mozart_client, - mock_config_entry, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, + source: Source, + expected_result: Callable, + seek_called_times: int, ) -> None: """Test async_media_seek.""" @@ -575,7 +592,9 @@ async def test_async_media_seek( async def test_async_media_previous_track( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_media_previous_track.""" @@ -593,7 +612,9 @@ async def test_async_media_previous_track( async def test_async_clear_playlist( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_clear_playlist.""" @@ -622,13 +643,13 @@ async def test_async_clear_playlist( ], ) async def test_async_select_source( - source, - expected_result, - audio_source_call, - video_source_call, hass: HomeAssistant, - mock_mozart_client, - mock_config_entry, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, + source: str, + expected_result: Callable, + audio_source_call: int, + video_source_call: int, ) -> None: """Test async_select_source with an invalid source.""" @@ -651,7 +672,9 @@ async def test_async_select_source( async def test_async_play_media_invalid_type( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_play_media only accepts valid media types.""" @@ -676,7 +699,9 @@ async def test_async_play_media_invalid_type( async def test_async_play_media_url( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_play_media URL.""" @@ -701,7 +726,9 @@ async def test_async_play_media_url( async def test_async_play_media_overlay_absolute_volume_uri( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_play_media overlay with Home Assistant local URI and absolute volume.""" @@ -734,8 +761,8 @@ async def test_async_play_media_overlay_absolute_volume_uri( async def test_async_play_media_overlay_invalid_offset_volume_tts( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - mock_mozart_client, - mock_config_entry, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_play_media with Home Assistant invalid offset volume and B&O tts.""" @@ -765,7 +792,9 @@ async def test_async_play_media_overlay_invalid_offset_volume_tts( async def test_async_play_media_overlay_offset_volume_tts( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_play_media with Home Assistant invalid offset volume and B&O tts.""" @@ -798,7 +827,9 @@ async def test_async_play_media_overlay_offset_volume_tts( async def test_async_play_media_tts( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_play_media with Home Assistant tts.""" @@ -822,7 +853,9 @@ async def test_async_play_media_tts( async def test_async_play_media_radio( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_play_media with B&O radio.""" @@ -846,7 +879,9 @@ async def test_async_play_media_radio( async def test_async_play_media_favourite( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_play_media with B&O favourite.""" @@ -868,7 +903,9 @@ async def test_async_play_media_favourite( async def test_async_play_media_deezer_flow( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_play_media with Deezer flow.""" @@ -894,7 +931,9 @@ async def test_async_play_media_deezer_flow( async def test_async_play_media_deezer_playlist( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_play_media with Deezer playlist.""" @@ -919,7 +958,9 @@ async def test_async_play_media_deezer_playlist( async def test_async_play_media_deezer_track( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_play_media with Deezer track.""" @@ -943,7 +984,9 @@ async def test_async_play_media_deezer_track( async def test_async_play_media_invalid_deezer( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_play_media with an invalid/no Deezer login.""" @@ -972,7 +1015,9 @@ async def test_async_play_media_invalid_deezer( async def test_async_play_media_url_m3u( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_play_media URL with the m3u extension.""" @@ -1041,12 +1086,12 @@ async def test_async_play_media_url_m3u( ], ) async def test_async_browse_media( - child, - present, hass: HomeAssistant, - mock_mozart_client, - mock_config_entry, hass_ws_client: WebSocketGenerator, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, + child: dict[str, str | bool | None], + present: bool, ) -> None: """Test async_browse_media with audio and video source.""" From 404a7bab185e553735ed78198f7081cc6919a477 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 22 Aug 2024 16:06:41 +0200 Subject: [PATCH 0977/1635] Wrap OSError in loader.load_yaml (#124406) --- homeassistant/util/yaml/loader.py | 10 +++++++++- tests/util/yaml/test_init.py | 21 +++++++++++++++++++++ 2 files changed, 30 insertions(+), 1 deletion(-) diff --git a/homeassistant/util/yaml/loader.py b/homeassistant/util/yaml/loader.py index 4b7236d7022..a56cf126f79 100644 --- a/homeassistant/util/yaml/loader.py +++ b/homeassistant/util/yaml/loader.py @@ -221,13 +221,21 @@ type LoaderType = FastSafeLoader | PythonSafeLoader def load_yaml( fname: str | os.PathLike[str], secrets: Secrets | None = None ) -> JSON_TYPE | None: - """Load a YAML file.""" + """Load a YAML file. + + If opening the file raises an OSError it will be wrapped in a HomeAssistantError, + except for FileNotFoundError which will be re-raised. + """ try: with open(fname, encoding="utf-8") as conf_file: return parse_yaml(conf_file, secrets) except UnicodeDecodeError as exc: _LOGGER.error("Unable to read file %s: %s", fname, exc) raise HomeAssistantError(exc) from exc + except FileNotFoundError: + raise + except OSError as exc: + raise HomeAssistantError(exc) from exc def load_yaml_dict( diff --git a/tests/util/yaml/test_init.py b/tests/util/yaml/test_init.py index 793ba3183ad..dbd7f1d2e99 100644 --- a/tests/util/yaml/test_init.py +++ b/tests/util/yaml/test_init.py @@ -746,3 +746,24 @@ def test_include_without_parameter(tag: str) -> None: pytest.raises(HomeAssistantError, match=f"{tag} needs an argument"), ): yaml_loader.parse_yaml(file) + + +@pytest.mark.parametrize( + ("open_exception", "load_yaml_exception"), + [ + (FileNotFoundError, OSError), + (NotADirectoryError, HomeAssistantError), + (PermissionError, HomeAssistantError), + ], +) +@pytest.mark.usefixtures("try_both_loaders") +def test_load_yaml_wrap_oserror( + open_exception: Exception, + load_yaml_exception: Exception, +) -> None: + """Test load_yaml wraps OSError in HomeAssistantError.""" + with ( + patch("homeassistant.util.yaml.loader.open", side_effect=open_exception), + pytest.raises(load_yaml_exception), + ): + yaml_loader.load_yaml("bla") From 890c6e97fd837f318d8fa9359bce37c28cd554c9 Mon Sep 17 00:00:00 2001 From: Markus Jacobsen Date: Thu, 22 Aug 2024 16:31:15 +0200 Subject: [PATCH 0978/1635] Add Bang & Olufsen websocket testing (#123075) * Add websocket.py testing Convert media_player testing dispatch events to the proper websocket events * Fix WebSocket testing by using callbacks * Add typing * Add caplog checking Check dispatch events directly Check event bus directly Avoid using internals * Fix event and / dispatch callbacks not necessarily being checked * Remove unnecessary caplog log level handling --- tests/components/bang_olufsen/conftest.py | 3 + .../bang_olufsen/test_media_player.py | 166 ++++++++++-------- .../components/bang_olufsen/test_websocket.py | 163 +++++++++++++++++ 3 files changed, 257 insertions(+), 75 deletions(-) create mode 100644 tests/components/bang_olufsen/test_websocket.py diff --git a/tests/components/bang_olufsen/conftest.py b/tests/components/bang_olufsen/conftest.py index 66b40f90899..dd6c4a73469 100644 --- a/tests/components/bang_olufsen/conftest.py +++ b/tests/components/bang_olufsen/conftest.py @@ -245,8 +245,11 @@ def mock_mozart_client() -> Generator[AsyncMock]: # Non-REST API client methods client.check_device_connection = AsyncMock() client.close_api_client = AsyncMock() + + # WebSocket listener client.connect_notifications = AsyncMock() client.disconnect_notifications = Mock() + client.websocket_connected = False yield client diff --git a/tests/components/bang_olufsen/test_media_player.py b/tests/components/bang_olufsen/test_media_player.py index e169c023d1d..3dd3c43f5a5 100644 --- a/tests/components/bang_olufsen/test_media_player.py +++ b/tests/components/bang_olufsen/test_media_player.py @@ -5,14 +5,18 @@ from contextlib import nullcontext as does_not_raise import logging from unittest.mock import AsyncMock, patch -from mozart_api.models import PlaybackContentMetadata, RenderingState, Source +from mozart_api.models import ( + PlaybackContentMetadata, + RenderingState, + Source, + WebsocketNotificationTag, +) import pytest from homeassistant.components.bang_olufsen.const import ( BANG_OLUFSEN_STATES, DOMAIN, BangOlufsenSource, - WebsocketNotification, ) from homeassistant.components.media_player import ( ATTR_INPUT_SOURCE, @@ -38,7 +42,6 @@ from homeassistant.components.media_player import ( from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError -from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.setup import async_setup_component from .const import ( @@ -59,7 +62,6 @@ from .const import ( TEST_PLAYBACK_STATE_TURN_OFF, TEST_RADIO_STATION, TEST_SEEK_POSITION_HOME_ASSISTANT_FORMAT, - TEST_SERIAL_NUMBER, TEST_SOURCES, TEST_VIDEO_SOURCES, TEST_VOLUME, @@ -131,6 +133,29 @@ async def test_async_update_sources_outdated_api( ) +async def test_async_update_sources_remote( + hass: HomeAssistant, mock_mozart_client, mock_config_entry: MockConfigEntry +) -> None: + """Test _async_update_sources is called when there are new video sources.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + notification_callback = mock_mozart_client.get_notification_notifications.call_args[ + 0 + ][0] + + # This is not an ideal check, but I couldn't get anything else to work + assert mock_mozart_client.get_available_sources.call_count == 1 + assert mock_mozart_client.get_remote_menu.call_count == 1 + + # Send the remote menu Websocket event + notification_callback(WebsocketNotificationTag(value="remoteMenuChanged")) + + assert mock_mozart_client.get_available_sources.call_count == 2 + assert mock_mozart_client.get_remote_menu.call_count == 2 + + async def test_async_update_playback_metadata( hass: HomeAssistant, mock_mozart_client: AsyncMock, @@ -141,6 +166,10 @@ async def test_async_update_playback_metadata( mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) + playback_metadata_callback = ( + mock_mozart_client.get_playback_metadata_notifications.call_args[0][0] + ) + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) assert ATTR_MEDIA_DURATION not in states.attributes assert ATTR_MEDIA_TITLE not in states.attributes @@ -150,11 +179,7 @@ async def test_async_update_playback_metadata( assert ATTR_MEDIA_CHANNEL not in states.attributes # Send the WebSocket event dispatch - async_dispatcher_send( - hass, - f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_METADATA}", - TEST_PLAYBACK_METADATA, - ) + playback_metadata_callback(TEST_PLAYBACK_METADATA) states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) assert ( @@ -181,13 +206,13 @@ async def test_async_update_playback_error( mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) - # The async_dispatcher_send function seems to swallow exceptions, making pytest.raises unusable - async_dispatcher_send( - hass, - f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_ERROR}", - TEST_PLAYBACK_ERROR, + playback_error_callback = ( + mock_mozart_client.get_playback_error_notifications.call_args[0][0] ) + # The async_dispatcher_send function seems to swallow exceptions, making pytest.raises unusable + playback_error_callback(TEST_PLAYBACK_ERROR) + assert ( "Exception in _async_update_playback_error when dispatching '11111111_playback_error': (PlaybackError(error='Test error', item=None),)" in caplog.text @@ -204,16 +229,16 @@ async def test_async_update_playback_progress( mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) + playback_progress_callback = ( + mock_mozart_client.get_playback_progress_notifications.call_args[0][0] + ) + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) assert ATTR_MEDIA_POSITION not in states.attributes old_updated_at = states.attributes[ATTR_MEDIA_POSITION_UPDATED_AT] assert old_updated_at - async_dispatcher_send( - hass, - f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_PROGRESS}", - TEST_PLAYBACK_PROGRESS, - ) + playback_progress_callback(TEST_PLAYBACK_PROGRESS) states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) assert states.attributes[ATTR_MEDIA_POSITION] == TEST_PLAYBACK_PROGRESS.progress @@ -232,14 +257,14 @@ async def test_async_update_playback_state( mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) + playback_state_callback = ( + mock_mozart_client.get_playback_state_notifications.call_args[0][0] + ) + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) assert states.state == MediaPlayerState.PLAYING - async_dispatcher_send( - hass, - f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_STATE}", - TEST_PLAYBACK_STATE_PAUSED, - ) + playback_state_callback(TEST_PLAYBACK_STATE_PAUSED) states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) assert states.state == TEST_PLAYBACK_STATE_PAUSED.value @@ -314,29 +339,26 @@ async def test_async_update_source_change( mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) + playback_progress_callback = ( + mock_mozart_client.get_playback_progress_notifications.call_args[0][0] + ) + playback_metadata_callback = ( + mock_mozart_client.get_playback_metadata_notifications.call_args[0][0] + ) + source_change_callback = ( + mock_mozart_client.get_source_change_notifications.call_args[0][0] + ) + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) assert ATTR_INPUT_SOURCE not in states.attributes assert states.attributes[ATTR_MEDIA_CONTENT_TYPE] == MediaType.MUSIC # Simulate progress attribute being available - async_dispatcher_send( - hass, - f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_PROGRESS}", - TEST_PLAYBACK_PROGRESS, - ) + playback_progress_callback(TEST_PLAYBACK_PROGRESS) # Simulate metadata - async_dispatcher_send( - hass, - f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_METADATA}", - metadata, - ) - - async_dispatcher_send( - hass, - f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.SOURCE_CHANGE}", - reported_source, - ) + playback_metadata_callback(metadata) + source_change_callback(reported_source) states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) assert states.attributes[ATTR_INPUT_SOURCE] == real_source.name @@ -354,6 +376,10 @@ async def test_async_turn_off( mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) + playback_state_callback = ( + mock_mozart_client.get_playback_state_notifications.call_args[0][0] + ) + await hass.services.async_call( "media_player", "turn_off", @@ -361,11 +387,7 @@ async def test_async_turn_off( blocking=True, ) - async_dispatcher_send( - hass, - f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_STATE}", - TEST_PLAYBACK_STATE_TURN_OFF, - ) + playback_state_callback(TEST_PLAYBACK_STATE_TURN_OFF) states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) assert states.state == BANG_OLUFSEN_STATES[TEST_PLAYBACK_STATE_TURN_OFF.value] @@ -384,6 +406,8 @@ async def test_async_set_volume_level( mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) + volume_callback = mock_mozart_client.get_volume_notifications.call_args[0][0] + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) assert ATTR_MEDIA_VOLUME_LEVEL not in states.attributes @@ -398,11 +422,7 @@ async def test_async_set_volume_level( ) # The service call will trigger a WebSocket notification - async_dispatcher_send( - hass, - f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.VOLUME}", - TEST_VOLUME, - ) + volume_callback(TEST_VOLUME) states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) assert ( @@ -424,6 +444,8 @@ async def test_async_mute_volume( mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) + volume_callback = mock_mozart_client.get_volume_notifications.call_args[0][0] + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) assert ATTR_MEDIA_VOLUME_MUTED not in states.attributes @@ -438,11 +460,7 @@ async def test_async_mute_volume( ) # The service call will trigger a WebSocket notification - async_dispatcher_send( - hass, - f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.VOLUME}", - TEST_VOLUME_MUTED, - ) + volume_callback(TEST_VOLUME_MUTED) states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) assert ( @@ -476,13 +494,13 @@ async def test_async_media_play_pause( mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) - # Set the initial state - async_dispatcher_send( - hass, - f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_STATE}", - initial_state, + playback_state_callback = ( + mock_mozart_client.get_playback_state_notifications.call_args[0][0] ) + # Set the initial state + playback_state_callback(initial_state) + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) assert states.state == BANG_OLUFSEN_STATES[initial_state.value] @@ -506,13 +524,13 @@ async def test_async_media_stop( mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) - # Set the state to playing - async_dispatcher_send( - hass, - f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_STATE}", - TEST_PLAYBACK_STATE_PLAYING, + playback_state_callback = ( + mock_mozart_client.get_playback_state_notifications.call_args[0][0] ) + # Set the state to playing + playback_state_callback(TEST_PLAYBACK_STATE_PLAYING) + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) assert states.state == BANG_OLUFSEN_STATES[TEST_PLAYBACK_STATE_PLAYING.value] @@ -569,13 +587,13 @@ async def test_async_media_seek( mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) - # Set the source - async_dispatcher_send( - hass, - f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.SOURCE_CHANGE}", - source, + source_change_callback = ( + mock_mozart_client.get_source_change_notifications.call_args[0][0] ) + # Set the source + source_change_callback(source) + # Check results with expected_result: await hass.services.async_call( @@ -801,12 +819,10 @@ async def test_async_play_media_overlay_offset_volume_tts( mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) + volume_callback = mock_mozart_client.get_volume_notifications.call_args[0][0] + # Set the volume to enable offset - async_dispatcher_send( - hass, - f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.VOLUME}", - TEST_VOLUME, - ) + volume_callback(TEST_VOLUME) await hass.services.async_call( "media_player", diff --git a/tests/components/bang_olufsen/test_websocket.py b/tests/components/bang_olufsen/test_websocket.py new file mode 100644 index 00000000000..209550faee5 --- /dev/null +++ b/tests/components/bang_olufsen/test_websocket.py @@ -0,0 +1,163 @@ +"""Test the Bang & Olufsen WebSocket listener.""" + +import logging +from unittest.mock import AsyncMock, Mock + +from mozart_api.models import SoftwareUpdateState +import pytest + +from homeassistant.components.bang_olufsen.const import ( + BANG_OLUFSEN_WEBSOCKET_EVENT, + CONNECTION_STATUS, + DOMAIN, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceRegistry +from homeassistant.helpers.dispatcher import async_dispatcher_connect + +from .const import TEST_NAME + +from tests.common import MockConfigEntry + + +async def test_connection( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + mock_config_entry: MockConfigEntry, + mock_mozart_client: AsyncMock, +) -> None: + """Test on_connection and on_connection_lost logs and calls correctly.""" + + mock_mozart_client.websocket_connected = True + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + connection_callback = mock_mozart_client.get_on_connection.call_args[0][0] + + caplog.set_level(logging.DEBUG) + + mock_connection_callback = Mock() + + async_dispatcher_connect( + hass, + f"{mock_config_entry.unique_id}_{CONNECTION_STATUS}", + mock_connection_callback, + ) + + # Call the WebSocket connection status method + connection_callback() + await hass.async_block_till_done() + + mock_connection_callback.assert_called_once_with(True) + assert f"Connected to the {TEST_NAME} notification channel" in caplog.text + + +async def test_connection_lost( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + mock_config_entry: MockConfigEntry, + mock_mozart_client: AsyncMock, +) -> None: + """Test on_connection_lost logs and calls correctly.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + connection_lost_callback = mock_mozart_client.get_on_connection_lost.call_args[0][0] + + mock_connection_lost_callback = Mock() + + async_dispatcher_connect( + hass, + f"{mock_config_entry.unique_id}_{CONNECTION_STATUS}", + mock_connection_lost_callback, + ) + + connection_lost_callback() + await hass.async_block_till_done() + + mock_connection_lost_callback.assert_called_once_with(False) + assert f"Lost connection to the {TEST_NAME}" in caplog.text + + +async def test_on_software_update_state( + hass: HomeAssistant, + device_registry: DeviceRegistry, + mock_config_entry: MockConfigEntry, + mock_mozart_client: AsyncMock, +) -> None: + """Test software version is updated through on_software_update_state.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + software_update_state_callback = ( + mock_mozart_client.get_software_update_state_notifications.call_args[0][0] + ) + + # Trigger the notification + await software_update_state_callback(SoftwareUpdateState()) + + await hass.async_block_till_done() + + device = device_registry.async_get_device( + identifiers={(DOMAIN, mock_config_entry.unique_id)} + ) + assert device.sw_version == "1.0.0" + + +async def test_on_all_notifications_raw( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + device_registry: DeviceRegistry, + mock_config_entry: MockConfigEntry, + mock_mozart_client: AsyncMock, +) -> None: + """Test on_all_notifications_raw logs and fires as expected.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + all_notifications_raw_callback = ( + mock_mozart_client.get_all_notifications_raw.call_args[0][0] + ) + + raw_notification = { + "eventData": { + "default": {"level": 40}, + "level": {"level": 40}, + "maximum": {"level": 100}, + "muted": {"muted": False}, + }, + "eventType": "WebSocketEventVolume", + } + raw_notification_full = raw_notification + + # Get device ID for the modified notification that is sent as an event and in the log + device = device_registry.async_get_device( + identifiers={(DOMAIN, mock_config_entry.unique_id)} + ) + raw_notification_full.update( + { + "device_id": device.id, + "serial_number": mock_config_entry.unique_id, + } + ) + + caplog.set_level(logging.DEBUG) + + mock_event_callback = Mock() + + # Listen to BANG_OLUFSEN_WEBSOCKET_EVENT events + hass.bus.async_listen(BANG_OLUFSEN_WEBSOCKET_EVENT, mock_event_callback) + + # Trigger the notification + all_notifications_raw_callback(raw_notification) + await hass.async_block_till_done() + + assert str(raw_notification_full) in caplog.text + + mocked_call = mock_event_callback.call_args[0][0].as_dict() + assert mocked_call["event_type"] == BANG_OLUFSEN_WEBSOCKET_EVENT + assert mocked_call["data"] == raw_notification_full From 6f66f37fc7a35436af40ac7598294620e57a73be Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 22 Aug 2024 09:47:22 -0500 Subject: [PATCH 0979/1635] Shutdown database engine before waiting for executor shutdown (#117339) * Close database connection before stopping the executor related issue #117004 * Close database connection before waiting for database executor to finish related issue #117004 * fix test * fix * drop unused arg * Revert "drop unused arg" This reverts commit 8a9fe6a24a614d7fda2058248f653fa400a5a252. * docstring * comment --- homeassistant/components/recorder/core.py | 17 +++++++++-------- homeassistant/util/executor.py | 14 +++++++++++--- tests/components/recorder/test_init.py | 19 +++++++++---------- 3 files changed, 29 insertions(+), 21 deletions(-) diff --git a/homeassistant/components/recorder/core.py b/homeassistant/components/recorder/core.py index 31c36be9c88..c57274317e3 100644 --- a/homeassistant/components/recorder/core.py +++ b/homeassistant/components/recorder/core.py @@ -367,13 +367,6 @@ class Recorder(threading.Thread): """Add an executor job from within the event loop.""" return self.hass.loop.run_in_executor(self._db_executor, target, *args) - def _stop_executor(self) -> None: - """Stop the executor.""" - if self._db_executor is None: - return - self._db_executor.shutdown() - self._db_executor = None - @callback def _async_check_queue(self, *_: Any) -> None: """Periodic check of the queue size to ensure we do not exhaust memory. @@ -1501,5 +1494,13 @@ class Recorder(threading.Thread): try: self._end_session() finally: - self._stop_executor() + if self._db_executor: + # We shutdown the executor without forcefully + # joining the threads until after we have tried + # to cleanly close the connection. + self._db_executor.shutdown(join_threads_or_timeout=False) self._close_connection() + if self._db_executor: + # After the connection is closed, we can join the threads + # or forcefully shutdown the threads if they take too long. + self._db_executor.join_threads_or_timeout() diff --git a/homeassistant/util/executor.py b/homeassistant/util/executor.py index 47b6d08a197..5f0fdd5c273 100644 --- a/homeassistant/util/executor.py +++ b/homeassistant/util/executor.py @@ -63,10 +63,18 @@ def join_or_interrupt_threads( class InterruptibleThreadPoolExecutor(ThreadPoolExecutor): """A ThreadPoolExecutor instance that will not deadlock on shutdown.""" - def shutdown(self, *args: Any, **kwargs: Any) -> None: - """Shutdown with interrupt support added.""" + def shutdown( + self, *args: Any, join_threads_or_timeout: bool = True, **kwargs: Any + ) -> None: + """Shutdown with interrupt support added. + + By default shutdown will wait for threads to finish up + to the timeout before forcefully stopping them. This can + be disabled by setting `join_threads_or_timeout` to False. + """ super().shutdown(wait=False, cancel_futures=True) - self.join_threads_or_timeout() + if join_threads_or_timeout: + self.join_threads_or_timeout() def join_threads_or_timeout(self) -> None: """Join threads or timeout.""" diff --git a/tests/components/recorder/test_init.py b/tests/components/recorder/test_init.py index 72d47515edd..3bbc78e21ce 100644 --- a/tests/components/recorder/test_init.py +++ b/tests/components/recorder/test_init.py @@ -166,11 +166,10 @@ async def test_shutdown_before_startup_finishes( await hass.async_block_till_done() await hass.async_stop() - def _run_information_with_session(): - instance.recorder_and_worker_thread_ids.add(threading.get_ident()) - return run_information_with_session(session) - - run_info = await instance.async_add_executor_job(_run_information_with_session) + # The database executor is shutdown so we must run the + # query in the main thread for testing + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + run_info = run_information_with_session(session) assert run_info.run_id == 1 assert run_info.start is not None @@ -216,8 +215,7 @@ async def test_shutdown_closes_connections( instance = recorder.get_instance(hass) await instance.async_db_ready await hass.async_block_till_done() - pool = instance.engine.pool - pool.shutdown = Mock() + pool = instance.engine def _ensure_connected(): with session_scope(hass=hass, read_only=True) as session: @@ -225,10 +223,11 @@ async def test_shutdown_closes_connections( await instance.async_add_executor_job(_ensure_connected) - hass.bus.async_fire(EVENT_HOMEASSISTANT_FINAL_WRITE) - await hass.async_block_till_done() + with patch.object(pool, "dispose", wraps=pool.dispose) as dispose: + hass.bus.async_fire(EVENT_HOMEASSISTANT_FINAL_WRITE) + await hass.async_block_till_done() - assert len(pool.shutdown.mock_calls) == 1 + assert len(dispose.mock_calls) == 1 with pytest.raises(RuntimeError): assert instance.get_session() From 5d64155bb670d890e38cecc735fd05cc22c84eba Mon Sep 17 00:00:00 2001 From: Petro31 <35082313+Petro31@users.noreply.github.com> Date: Thu, 22 Aug 2024 13:11:08 -0400 Subject: [PATCH 0980/1635] Add zip to template engine (#122460) * add zip to template engine * fix doc strings --- homeassistant/helpers/template.py | 1 + tests/helpers/test_template.py | 45 +++++++++++++++++++++++++++++++ 2 files changed, 46 insertions(+) diff --git a/homeassistant/helpers/template.py b/homeassistant/helpers/template.py index 7742418c5a7..e090e0de2d1 100644 --- a/homeassistant/helpers/template.py +++ b/homeassistant/helpers/template.py @@ -2844,6 +2844,7 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment): self.globals["iif"] = iif self.globals["bool"] = forgiving_boolean self.globals["version"] = version + self.globals["zip"] = zip self.tests["is_number"] = is_number self.tests["list"] = _is_list self.tests["set"] = _is_set diff --git a/tests/helpers/test_template.py b/tests/helpers/test_template.py index 3123c01f500..0676ae21ab7 100644 --- a/tests/helpers/test_template.py +++ b/tests/helpers/test_template.py @@ -6236,3 +6236,48 @@ async def test_template_thread_safety_checks(hass: HomeAssistant) -> None: await hass.async_add_executor_job(template_obj.async_render_to_info) assert template_obj.async_render_to_info().result() == 23 + + +@pytest.mark.parametrize( + ("cola", "colb", "expected"), + [ + ([1, 2], [3, 4], [(1, 3), (2, 4)]), + ([1, 2], [3, 4, 5], [(1, 3), (2, 4)]), + ([1, 2, 3, 4], [3, 4], [(1, 3), (2, 4)]), + ], +) +def test_zip(hass: HomeAssistant, cola, colb, expected) -> None: + """Test zip.""" + assert ( + template.Template("{{ zip(cola, colb) | list }}", hass).async_render( + {"cola": cola, "colb": colb} + ) + == expected + ) + assert ( + template.Template( + "[{% for a, b in zip(cola, colb) %}({{a}}, {{b}}), {% endfor %}]", hass + ).async_render({"cola": cola, "colb": colb}) + == expected + ) + + +@pytest.mark.parametrize( + ("col", "expected"), + [ + ([(1, 3), (2, 4)], [(1, 2), (3, 4)]), + (["ax", "by", "cz"], [("a", "b", "c"), ("x", "y", "z")]), + ], +) +def test_unzip(hass: HomeAssistant, col, expected) -> None: + """Test unzipping using zip.""" + assert ( + template.Template("{{ zip(*col) | list }}", hass).async_render({"col": col}) + == expected + ) + assert ( + template.Template( + "{% set a, b = zip(*col) %}[{{a}}, {{b}}]", hass + ).async_render({"col": col}) + == expected + ) From 3a928990818086442cdc27e8b752f08b4f81dc2a Mon Sep 17 00:00:00 2001 From: Michael Arthur Date: Fri, 23 Aug 2024 05:13:54 +1200 Subject: [PATCH 0981/1635] Add LawnMowerActivity.RETURNING to Lawn Mower (#124261) * Add returning activity state * update tests * update tests * set can return to returning state * update MQTT tests * update mqtt back to docked * fix up other tests that I broke again * re-add mower_can_dock --- .../components/kitchen_sink/lawn_mower.py | 14 +++++++++++--- homeassistant/components/lawn_mower/const.py | 3 +++ .../components/lawn_mower/strings.json | 3 ++- .../snapshots/test_lawn_mower.ambr | 12 ++++++++++++ .../components/kitchen_sink/test_lawn_mower.py | 6 ++++++ tests/components/mqtt/test_lawn_mower.py | 18 +++++++++++++++++- 6 files changed, 51 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/kitchen_sink/lawn_mower.py b/homeassistant/components/kitchen_sink/lawn_mower.py index 50ec70f6759..51814fb262d 100644 --- a/homeassistant/components/kitchen_sink/lawn_mower.py +++ b/homeassistant/components/kitchen_sink/lawn_mower.py @@ -30,18 +30,26 @@ async def async_setup_platform( ), DemoLawnMower( "kitchen_sink_mower_002", + "Mower can return", + LawnMowerActivity.RETURNING, + LawnMowerEntityFeature.DOCK + | LawnMowerEntityFeature.PAUSE + | LawnMowerEntityFeature.START_MOWING, + ), + DemoLawnMower( + "kitchen_sink_mower_003", "Mower can dock", LawnMowerActivity.MOWING, LawnMowerEntityFeature.DOCK | LawnMowerEntityFeature.START_MOWING, ), DemoLawnMower( - "kitchen_sink_mower_003", + "kitchen_sink_mower_004", "Mower can pause", LawnMowerActivity.DOCKED, LawnMowerEntityFeature.PAUSE | LawnMowerEntityFeature.START_MOWING, ), DemoLawnMower( - "kitchen_sink_mower_004", + "kitchen_sink_mower_005", "Mower can do all", LawnMowerActivity.DOCKED, LawnMowerEntityFeature.DOCK @@ -49,7 +57,7 @@ async def async_setup_platform( | LawnMowerEntityFeature.START_MOWING, ), DemoLawnMower( - "kitchen_sink_mower_005", + "kitchen_sink_mower_006", "Mower is paused", LawnMowerActivity.PAUSED, LawnMowerEntityFeature.DOCK diff --git a/homeassistant/components/lawn_mower/const.py b/homeassistant/components/lawn_mower/const.py index e060abe6423..231be83ed88 100644 --- a/homeassistant/components/lawn_mower/const.py +++ b/homeassistant/components/lawn_mower/const.py @@ -18,6 +18,9 @@ class LawnMowerActivity(StrEnum): DOCKED = "docked" """Device is docked.""" + RETURNING = "returning" + """Device is returning.""" + class LawnMowerEntityFeature(IntFlag): """Supported features of the lawn mower entity.""" diff --git a/homeassistant/components/lawn_mower/strings.json b/homeassistant/components/lawn_mower/strings.json index 15ed50ca6c5..ebaea4ffd6a 100644 --- a/homeassistant/components/lawn_mower/strings.json +++ b/homeassistant/components/lawn_mower/strings.json @@ -7,7 +7,8 @@ "error": "Error", "paused": "[%key:common::state::paused%]", "mowing": "Mowing", - "docked": "Docked" + "docked": "Docked", + "returning": "Returning" } } }, diff --git a/tests/components/kitchen_sink/snapshots/test_lawn_mower.ambr b/tests/components/kitchen_sink/snapshots/test_lawn_mower.ambr index 4189de18ce4..e3e413c5a44 100644 --- a/tests/components/kitchen_sink/snapshots/test_lawn_mower.ambr +++ b/tests/components/kitchen_sink/snapshots/test_lawn_mower.ambr @@ -49,6 +49,18 @@ 'last_updated': , 'state': 'docked', }), + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mower can return', + 'supported_features': , + }), + 'context': , + 'entity_id': 'lawn_mower.mower_can_return', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'returning', + }), StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Mower is paused', diff --git a/tests/components/kitchen_sink/test_lawn_mower.py b/tests/components/kitchen_sink/test_lawn_mower.py index 48914ab5a46..e1ba201a722 100644 --- a/tests/components/kitchen_sink/test_lawn_mower.py +++ b/tests/components/kitchen_sink/test_lawn_mower.py @@ -72,6 +72,12 @@ async def test_states(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: LawnMowerActivity.MOWING, LawnMowerActivity.DOCKED, ), + ( + "lawn_mower.mower_can_return", + SERVICE_DOCK, + LawnMowerActivity.RETURNING, + LawnMowerActivity.DOCKED, + ), ], ) async def test_mower( diff --git a/tests/components/mqtt/test_lawn_mower.py b/tests/components/mqtt/test_lawn_mower.py index 4906f6cfda3..101a45787ef 100644 --- a/tests/components/mqtt/test_lawn_mower.py +++ b/tests/components/mqtt/test_lawn_mower.py @@ -103,6 +103,13 @@ async def test_run_lawn_mower_setup_and_state_updates( state = hass.states.get("lawn_mower.test_lawn_mower") assert state.state == "mowing" + async_fire_mqtt_message(hass, "test/lawn_mower_stat", "returning") + + await hass.async_block_till_done() + + state = hass.states.get("lawn_mower.test_lawn_mower") + assert state.state == "returning" + async_fire_mqtt_message(hass, "test/lawn_mower_stat", "docked") await hass.async_block_till_done() @@ -198,6 +205,13 @@ async def test_value_template( state = hass.states.get("lawn_mower.test_lawn_mower") assert state.state == "paused" + async_fire_mqtt_message(hass, "test/lawn_mower_stat", '{"val":"returning"}') + + await hass.async_block_till_done() + + state = hass.states.get("lawn_mower.test_lawn_mower") + assert state.state == "returning" + async_fire_mqtt_message(hass, "test/lawn_mower_stat", '{"val": null}') await hass.async_block_till_done() @@ -702,7 +716,8 @@ async def test_mqtt_payload_not_a_valid_activity_warning( assert ( "Invalid activity for lawn_mower.test_lawn_mower: 'painting' " - "(valid activities: ['error', 'paused', 'mowing', 'docked'])" in caplog.text + "(valid activities: ['error', 'paused', 'mowing', 'docked', 'returning'])" + in caplog.text ) @@ -774,6 +789,7 @@ async def test_reloadable( [ ("activity_state_topic", "paused", None, "paused"), ("activity_state_topic", "docked", None, "docked"), + ("activity_state_topic", "returning", None, "returning"), ("activity_state_topic", "mowing", None, "mowing"), ], ) From 7887bcba89d5843f7b93be4b647d7a1654e59951 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Thu, 22 Aug 2024 19:16:08 +0200 Subject: [PATCH 0982/1635] Allow to set options for an MQTT enum sensor (#123248) * Add options attribute support for MQTT sensor * Add comment --- homeassistant/components/mqtt/const.py | 1 + homeassistant/components/mqtt/select.py | 9 +- homeassistant/components/mqtt/sensor.py | 39 +++++++-- tests/components/mqtt/test_sensor.py | 105 ++++++++++++++++++++++++ 4 files changed, 146 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/mqtt/const.py b/homeassistant/components/mqtt/const.py index 3bf9cf218f2..1e1011cc381 100644 --- a/homeassistant/components/mqtt/const.py +++ b/homeassistant/components/mqtt/const.py @@ -39,6 +39,7 @@ CONF_ENCODING = "encoding" CONF_JSON_ATTRS_TOPIC = "json_attributes_topic" CONF_JSON_ATTRS_TEMPLATE = "json_attributes_template" CONF_KEEPALIVE = "keepalive" +CONF_OPTIONS = "options" CONF_ORIGIN = "origin" CONF_QOS = ATTR_QOS CONF_RETAIN = ATTR_RETAIN diff --git a/homeassistant/components/mqtt/select.py b/homeassistant/components/mqtt/select.py index 5cc7a586c71..e301892dee2 100644 --- a/homeassistant/components/mqtt/select.py +++ b/homeassistant/components/mqtt/select.py @@ -19,7 +19,12 @@ from homeassistant.helpers.typing import ConfigType, VolSchemaType from . import subscription from .config import MQTT_RW_SCHEMA -from .const import CONF_COMMAND_TEMPLATE, CONF_COMMAND_TOPIC, CONF_STATE_TOPIC +from .const import ( + CONF_COMMAND_TEMPLATE, + CONF_COMMAND_TOPIC, + CONF_OPTIONS, + CONF_STATE_TOPIC, +) from .mixins import MqttEntity, async_setup_entity_entry_helper from .models import ( MqttCommandTemplate, @@ -32,8 +37,6 @@ from .schemas import MQTT_ENTITY_COMMON_SCHEMA _LOGGER = logging.getLogger(__name__) -CONF_OPTIONS = "options" - DEFAULT_NAME = "MQTT Select" MQTT_SELECT_ATTRIBUTES_BLOCKED = frozenset( diff --git a/homeassistant/components/mqtt/sensor.py b/homeassistant/components/mqtt/sensor.py index e983f1b66f3..d689b6bb767 100644 --- a/homeassistant/components/mqtt/sensor.py +++ b/homeassistant/components/mqtt/sensor.py @@ -38,7 +38,7 @@ from homeassistant.util import dt as dt_util from . import subscription from .config import MQTT_RO_SCHEMA -from .const import CONF_STATE_TOPIC, PAYLOAD_NONE +from .const import CONF_OPTIONS, CONF_STATE_TOPIC, PAYLOAD_NONE from .mixins import MqttAvailabilityMixin, MqttEntity, async_setup_entity_entry_helper from .models import ( MqttValueTemplate, @@ -72,6 +72,7 @@ _PLATFORM_SCHEMA_BASE = MQTT_RO_SCHEMA.extend( vol.Optional(CONF_FORCE_UPDATE, default=DEFAULT_FORCE_UPDATE): cv.boolean, vol.Optional(CONF_LAST_RESET_VALUE_TEMPLATE): cv.template, vol.Optional(CONF_NAME): vol.Any(cv.string, None), + vol.Optional(CONF_OPTIONS): cv.ensure_list, vol.Optional(CONF_SUGGESTED_DISPLAY_PRECISION): cv.positive_int, vol.Optional(CONF_STATE_CLASS): vol.Any(STATE_CLASSES_SCHEMA, None), vol.Optional(CONF_UNIT_OF_MEASUREMENT): vol.Any(cv.string, None), @@ -79,8 +80,8 @@ _PLATFORM_SCHEMA_BASE = MQTT_RO_SCHEMA.extend( ).extend(MQTT_ENTITY_COMMON_SCHEMA.schema) -def validate_sensor_state_class_config(config: ConfigType) -> ConfigType: - """Validate the sensor state class config.""" +def validate_sensor_state_and_device_class_config(config: ConfigType) -> ConfigType: + """Validate the sensor options, state and device class config.""" if ( CONF_LAST_RESET_VALUE_TEMPLATE in config and (state_class := config.get(CONF_STATE_CLASS)) != SensorStateClass.TOTAL @@ -90,17 +91,35 @@ def validate_sensor_state_class_config(config: ConfigType) -> ConfigType: f"together with state class `{state_class}`" ) + # Only allow `options` to be set for `enum` sensors + # to limit the possible sensor values + if (options := config.get(CONF_OPTIONS)) is not None: + if not options: + raise vol.Invalid("An empty options list is not allowed") + if config.get(CONF_STATE_CLASS) or config.get(CONF_UNIT_OF_MEASUREMENT): + raise vol.Invalid( + f"Specifying `{CONF_OPTIONS}` is not allowed together with " + f"the `{CONF_STATE_CLASS}` or `{CONF_UNIT_OF_MEASUREMENT}` option" + ) + + if (device_class := config.get(CONF_DEVICE_CLASS)) != SensorDeviceClass.ENUM: + raise vol.Invalid( + f"The option `{CONF_OPTIONS}` can only be used " + f"together with device class `{SensorDeviceClass.ENUM}`, " + f"got `{CONF_DEVICE_CLASS}` '{device_class}'" + ) + return config PLATFORM_SCHEMA_MODERN = vol.All( _PLATFORM_SCHEMA_BASE, - validate_sensor_state_class_config, + validate_sensor_state_and_device_class_config, ) DISCOVERY_SCHEMA = vol.All( _PLATFORM_SCHEMA_BASE.extend({}, extra=vol.REMOVE_EXTRA), - validate_sensor_state_class_config, + validate_sensor_state_and_device_class_config, ) @@ -197,6 +216,7 @@ class MqttSensor(MqttEntity, RestoreSensor): CONF_SUGGESTED_DISPLAY_PRECISION ) self._attr_native_unit_of_measurement = config.get(CONF_UNIT_OF_MEASUREMENT) + self._attr_options = config.get(CONF_OPTIONS) self._attr_state_class = config.get(CONF_STATE_CLASS) self._expire_after = config.get(CONF_EXPIRE_AFTER) @@ -252,6 +272,15 @@ class MqttSensor(MqttEntity, RestoreSensor): else: self._attr_native_value = payload return + if self.options and payload not in self.options: + _LOGGER.warning( + "Ignoring invalid option received on topic '%s', got '%s', allowed: %s", + msg.topic, + payload, + ", ".join(self.options), + ) + return + if self.device_class in { None, SensorDeviceClass.ENUM, diff --git a/tests/components/mqtt/test_sensor.py b/tests/components/mqtt/test_sensor.py index 4b117aaa4d5..a62c36404ca 100644 --- a/tests/components/mqtt/test_sensor.py +++ b/tests/components/mqtt/test_sensor.py @@ -3,6 +3,7 @@ import copy from datetime import datetime, timedelta import json +import logging from pathlib import Path from typing import Any from unittest.mock import MagicMock, patch @@ -110,6 +111,48 @@ async def test_setting_sensor_value_via_mqtt_message( assert state.attributes.get("unit_of_measurement") == "fav unit" +@pytest.mark.parametrize( + "hass_config", + [ + { + mqtt.DOMAIN: { + sensor.DOMAIN: { + "name": "test", + "state_topic": "test-topic", + "device_class": "enum", + "options": ["red", "green", "blue"], + } + } + }, + ], +) +async def test_setting_enum_sensor_value_via_mqtt_message( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test the setting of the value via MQTT of an enum type sensor.""" + await mqtt_mock_entry() + + async_fire_mqtt_message(hass, "test-topic", "red") + state = hass.states.get("sensor.test") + assert state.state == "red" + + async_fire_mqtt_message(hass, "test-topic", "green") + state = hass.states.get("sensor.test") + assert state.state == "green" + + with caplog.at_level(logging.WARNING): + async_fire_mqtt_message(hass, "test-topic", "yellow") + assert ( + "Ignoring invalid option received on topic 'test-topic', " + "got 'yellow', allowed: red, green, blue" in caplog.text + ) + # Assert the state update was filtered out and ignored + state = hass.states.get("sensor.test") + assert state.state == "green" + + @pytest.mark.parametrize( "hass_config", [ @@ -874,6 +917,61 @@ async def test_invalid_state_class( assert "expected SensorStateClass or one of" in caplog.text +@pytest.mark.parametrize( + ("hass_config", "error_logged"), + [ + ( + { + mqtt.DOMAIN: { + sensor.DOMAIN: { + "name": "test", + "state_topic": "test-topic", + "state_class": "measurement", + "options": ["red", "green", "blue"], + } + } + }, + "Specifying `options` is not allowed together with the `state_class` " + "or `unit_of_measurement` option", + ), + ( + { + mqtt.DOMAIN: { + sensor.DOMAIN: { + "name": "test", + "state_topic": "test-topic", + "device_class": "gas", + "options": ["red", "green", "blue"], + } + } + }, + "The option `options` can only be used together with " + "device class `enum`, got `device_class` 'gas'", + ), + ( + { + mqtt.DOMAIN: { + sensor.DOMAIN: { + "name": "test", + "state_topic": "test-topic", + "options": [], + } + } + }, + "An empty options list is not allowed", + ), + ], +) +async def test_invalid_options_config( + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, + error_logged: str, +) -> None: + """Test state_class, deviceclass with sensor options.""" + assert await mqtt_mock_entry() + assert error_logged in caplog.text + + @pytest.mark.parametrize( "hass_config", [ @@ -891,6 +989,13 @@ async def test_invalid_state_class( "state_topic": "test-topic", "state_class": None, }, + { + "name": "Test 4", + "state_topic": "test-topic", + "state_class": None, + "device_class": "enum", + "options": ["red", "green", "blue"], + }, ] } } From e13f8996f25be750c5910388766731843ee3ea20 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Thu, 22 Aug 2024 20:13:30 +0200 Subject: [PATCH 0983/1635] Fix Import ReceivePayloadType (#124431) * Fix Import ReceivePayloadType * Do not assert export `ReceivePayloadType` on mqtt integration level as helper should be used. --- homeassistant/components/mqtt/__init__.py | 1 - homeassistant/components/mqtt/climate.py | 2 +- homeassistant/components/mqtt/device_tracker.py | 3 ++- homeassistant/components/mqtt/event.py | 2 +- homeassistant/components/mqtt/fan.py | 2 +- homeassistant/components/mqtt/humidifier.py | 2 +- homeassistant/components/mqtt/lawn_mower.py | 2 +- homeassistant/components/mqtt/light/schema_basic.py | 2 +- homeassistant/components/mqtt/light/schema_template.py | 2 +- homeassistant/components/mqtt/lock.py | 2 +- homeassistant/components/mqtt/number.py | 2 +- homeassistant/components/mqtt/select.py | 2 +- homeassistant/components/mqtt/sensor.py | 8 ++------ homeassistant/components/mqtt/siren.py | 2 +- homeassistant/components/mqtt/tag.py | 2 +- homeassistant/components/mqtt/text.py | 2 +- homeassistant/components/mqtt/trigger.py | 2 +- homeassistant/components/mysensors/gateway.py | 2 +- tests/components/mqtt/test_init.py | 1 - 19 files changed, 19 insertions(+), 24 deletions(-) diff --git a/homeassistant/components/mqtt/__init__.py b/homeassistant/components/mqtt/__init__.py index b2adb7665fc..86eeca2017c 100644 --- a/homeassistant/components/mqtt/__init__.py +++ b/homeassistant/components/mqtt/__init__.py @@ -89,7 +89,6 @@ from .models import ( # noqa: F401 PayloadSentinel, PublishPayloadType, ReceiveMessage, - ReceivePayloadType, convert_outgoing_mqtt_payload, ) from .subscription import ( # noqa: F401 diff --git a/homeassistant/components/mqtt/climate.py b/homeassistant/components/mqtt/climate.py index 7873b056889..426bac8e9ca 100644 --- a/homeassistant/components/mqtt/climate.py +++ b/homeassistant/components/mqtt/climate.py @@ -46,6 +46,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.helpers.template import Template from homeassistant.helpers.typing import ConfigType, VolSchemaType from homeassistant.util.unit_conversion import TemperatureConverter @@ -84,7 +85,6 @@ from .models import ( MqttValueTemplate, PublishPayloadType, ReceiveMessage, - ReceivePayloadType, ) from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import valid_publish_topic, valid_subscribe_topic diff --git a/homeassistant/components/mqtt/device_tracker.py b/homeassistant/components/mqtt/device_tracker.py index b2aeb4c0fc1..57614106d4e 100644 --- a/homeassistant/components/mqtt/device_tracker.py +++ b/homeassistant/components/mqtt/device_tracker.py @@ -27,13 +27,14 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.helpers.typing import ConfigType, VolSchemaType from . import subscription from .config import MQTT_BASE_SCHEMA from .const import CONF_PAYLOAD_RESET, CONF_STATE_TOPIC from .mixins import CONF_JSON_ATTRS_TOPIC, MqttEntity, async_setup_entity_entry_helper -from .models import MqttValueTemplate, ReceiveMessage, ReceivePayloadType +from .models import MqttValueTemplate, ReceiveMessage from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import valid_subscribe_topic diff --git a/homeassistant/components/mqtt/event.py b/homeassistant/components/mqtt/event.py index 5e801fda54b..0dc267f80f9 100644 --- a/homeassistant/components/mqtt/event.py +++ b/homeassistant/components/mqtt/event.py @@ -19,6 +19,7 @@ from homeassistant.const import CONF_DEVICE_CLASS, CONF_NAME, CONF_VALUE_TEMPLAT from homeassistant.core import HomeAssistant, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.helpers.typing import ConfigType, VolSchemaType from homeassistant.util.json import JSON_DECODE_EXCEPTIONS, json_loads_object @@ -32,7 +33,6 @@ from .models import ( MqttValueTemplateException, PayloadSentinel, ReceiveMessage, - ReceivePayloadType, ) from .schemas import MQTT_ENTITY_COMMON_SCHEMA diff --git a/homeassistant/components/mqtt/fan.py b/homeassistant/components/mqtt/fan.py index 1838ce20e4d..a22dba4ae93 100644 --- a/homeassistant/components/mqtt/fan.py +++ b/homeassistant/components/mqtt/fan.py @@ -29,6 +29,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.helpers.template import Template from homeassistant.helpers.typing import ConfigType, VolSchemaType from homeassistant.util.percentage import ( @@ -52,7 +53,6 @@ from .models import ( MqttValueTemplate, PublishPayloadType, ReceiveMessage, - ReceivePayloadType, ) from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import valid_publish_topic, valid_subscribe_topic diff --git a/homeassistant/components/mqtt/humidifier.py b/homeassistant/components/mqtt/humidifier.py index a4510ee5951..d55c1d3cebf 100644 --- a/homeassistant/components/mqtt/humidifier.py +++ b/homeassistant/components/mqtt/humidifier.py @@ -32,6 +32,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.helpers.template import Template from homeassistant.helpers.typing import ConfigType, VolSchemaType @@ -54,7 +55,6 @@ from .models import ( MqttValueTemplate, PublishPayloadType, ReceiveMessage, - ReceivePayloadType, ) from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import valid_publish_topic, valid_subscribe_topic diff --git a/homeassistant/components/mqtt/lawn_mower.py b/homeassistant/components/mqtt/lawn_mower.py index a74d278401c..f4aa248929e 100644 --- a/homeassistant/components/mqtt/lawn_mower.py +++ b/homeassistant/components/mqtt/lawn_mower.py @@ -20,6 +20,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.helpers.typing import ConfigType, VolSchemaType from . import subscription @@ -31,7 +32,6 @@ from .models import ( MqttValueTemplate, PublishPayloadType, ReceiveMessage, - ReceivePayloadType, ) from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import valid_publish_topic, valid_subscribe_topic diff --git a/homeassistant/components/mqtt/light/schema_basic.py b/homeassistant/components/mqtt/light/schema_basic.py index b0ffae4e328..1a64b1eecb4 100644 --- a/homeassistant/components/mqtt/light/schema_basic.py +++ b/homeassistant/components/mqtt/light/schema_basic.py @@ -39,6 +39,7 @@ from homeassistant.const import ( from homeassistant.core import callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.restore_state import RestoreEntity +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.helpers.typing import ConfigType, VolSchemaType import homeassistant.util.color as color_util @@ -57,7 +58,6 @@ from ..models import ( PayloadSentinel, PublishPayloadType, ReceiveMessage, - ReceivePayloadType, TemplateVarsType, ) from ..schemas import MQTT_ENTITY_COMMON_SCHEMA diff --git a/homeassistant/components/mqtt/light/schema_template.py b/homeassistant/components/mqtt/light/schema_template.py index c35b0e6ced9..a1f4ea2e81a 100644 --- a/homeassistant/components/mqtt/light/schema_template.py +++ b/homeassistant/components/mqtt/light/schema_template.py @@ -31,6 +31,7 @@ from homeassistant.const import ( from homeassistant.core import callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.restore_state import RestoreEntity +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.helpers.typing import ConfigType, TemplateVarsType, VolSchemaType import homeassistant.util.color as color_util @@ -43,7 +44,6 @@ from ..models import ( MqttValueTemplate, PublishPayloadType, ReceiveMessage, - ReceivePayloadType, ) from ..schemas import MQTT_ENTITY_COMMON_SCHEMA from .schema import MQTT_LIGHT_SCHEMA_SCHEMA diff --git a/homeassistant/components/mqtt/lock.py b/homeassistant/components/mqtt/lock.py index 22b0e24b3c6..c72dcd8dc21 100644 --- a/homeassistant/components/mqtt/lock.py +++ b/homeassistant/components/mqtt/lock.py @@ -21,6 +21,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.helpers.typing import ConfigType, TemplateVarsType from . import subscription @@ -39,7 +40,6 @@ from .models import ( MqttValueTemplate, PublishPayloadType, ReceiveMessage, - ReceivePayloadType, ) from .schemas import MQTT_ENTITY_COMMON_SCHEMA diff --git a/homeassistant/components/mqtt/number.py b/homeassistant/components/mqtt/number.py index e8f2cf0cfe4..ce441a2de6e 100644 --- a/homeassistant/components/mqtt/number.py +++ b/homeassistant/components/mqtt/number.py @@ -28,6 +28,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.helpers.typing import ConfigType, VolSchemaType from . import subscription @@ -44,7 +45,6 @@ from .models import ( MqttValueTemplate, PublishPayloadType, ReceiveMessage, - ReceivePayloadType, ) from .schemas import MQTT_ENTITY_COMMON_SCHEMA diff --git a/homeassistant/components/mqtt/select.py b/homeassistant/components/mqtt/select.py index e301892dee2..5f9c4a11c23 100644 --- a/homeassistant/components/mqtt/select.py +++ b/homeassistant/components/mqtt/select.py @@ -15,6 +15,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.helpers.typing import ConfigType, VolSchemaType from . import subscription @@ -31,7 +32,6 @@ from .models import ( MqttValueTemplate, PublishPayloadType, ReceiveMessage, - ReceivePayloadType, ) from .schemas import MQTT_ENTITY_COMMON_SCHEMA diff --git a/homeassistant/components/mqtt/sensor.py b/homeassistant/components/mqtt/sensor.py index d689b6bb767..fc95807b8a5 100644 --- a/homeassistant/components/mqtt/sensor.py +++ b/homeassistant/components/mqtt/sensor.py @@ -33,6 +33,7 @@ from homeassistant.core import CALLBACK_TYPE, HomeAssistant, State, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.event import async_call_later +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.helpers.typing import ConfigType, VolSchemaType from homeassistant.util import dt as dt_util @@ -40,12 +41,7 @@ from . import subscription from .config import MQTT_RO_SCHEMA from .const import CONF_OPTIONS, CONF_STATE_TOPIC, PAYLOAD_NONE from .mixins import MqttAvailabilityMixin, MqttEntity, async_setup_entity_entry_helper -from .models import ( - MqttValueTemplate, - PayloadSentinel, - ReceiveMessage, - ReceivePayloadType, -) +from .models import MqttValueTemplate, PayloadSentinel, ReceiveMessage from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import check_state_too_long diff --git a/homeassistant/components/mqtt/siren.py b/homeassistant/components/mqtt/siren.py index 9f1466dd95d..e7cf9e270bd 100644 --- a/homeassistant/components/mqtt/siren.py +++ b/homeassistant/components/mqtt/siren.py @@ -31,6 +31,7 @@ from homeassistant.core import HomeAssistant, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.json import json_dumps +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.helpers.template import Template from homeassistant.helpers.typing import ConfigType, TemplateVarsType, VolSchemaType from homeassistant.util.json import JSON_DECODE_EXCEPTIONS, json_loads_object @@ -51,7 +52,6 @@ from .models import ( MqttValueTemplate, PublishPayloadType, ReceiveMessage, - ReceivePayloadType, ) from .schemas import MQTT_ENTITY_COMMON_SCHEMA diff --git a/homeassistant/components/mqtt/tag.py b/homeassistant/components/mqtt/tag.py index d5f5371c357..031c620af4a 100644 --- a/homeassistant/components/mqtt/tag.py +++ b/homeassistant/components/mqtt/tag.py @@ -13,6 +13,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_DEVICE, CONF_VALUE_TEMPLATE from homeassistant.core import HassJobType, HomeAssistant, callback import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from . import subscription @@ -31,7 +32,6 @@ from .models import ( MqttValueTemplate, MqttValueTemplateException, ReceiveMessage, - ReceivePayloadType, ) from .schemas import MQTT_ENTITY_DEVICE_INFO_SCHEMA from .subscription import EntitySubscription diff --git a/homeassistant/components/mqtt/text.py b/homeassistant/components/mqtt/text.py index 0b122dec7b5..0db711cc456 100644 --- a/homeassistant/components/mqtt/text.py +++ b/homeassistant/components/mqtt/text.py @@ -22,6 +22,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.helpers.typing import ConfigType, VolSchemaType from . import subscription @@ -33,7 +34,6 @@ from .models import ( MqttValueTemplate, PublishPayloadType, ReceiveMessage, - ReceivePayloadType, ) from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import check_state_too_long diff --git a/homeassistant/components/mqtt/trigger.py b/homeassistant/components/mqtt/trigger.py index 3f7f03d7f19..b901176cf88 100644 --- a/homeassistant/components/mqtt/trigger.py +++ b/homeassistant/components/mqtt/trigger.py @@ -18,6 +18,7 @@ from homeassistant.core import ( callback, ) from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.helpers.template import Template from homeassistant.helpers.trigger import TriggerActionType, TriggerData, TriggerInfo from homeassistant.helpers.typing import ConfigType, TemplateVarsType @@ -31,7 +32,6 @@ from .models import ( PayloadSentinel, PublishPayloadType, ReceiveMessage, - ReceivePayloadType, ) TRIGGER_SCHEMA = cv.TRIGGER_BASE_SCHEMA.extend( diff --git a/homeassistant/components/mysensors/gateway.py b/homeassistant/components/mysensors/gateway.py index 11f27f8a108..00c8d5eecfb 100644 --- a/homeassistant/components/mysensors/gateway.py +++ b/homeassistant/components/mysensors/gateway.py @@ -16,7 +16,6 @@ import voluptuous as vol from homeassistant.components.mqtt import ( DOMAIN as MQTT_DOMAIN, ReceiveMessage as MQTTReceiveMessage, - ReceivePayloadType, async_publish, async_subscribe, ) @@ -24,6 +23,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_DEVICE, EVENT_HOMEASSISTANT_STOP from homeassistant.core import Event, HomeAssistant, callback import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.setup import SetupPhases, async_pause_setup from homeassistant.util.unit_system import METRIC_SYSTEM diff --git a/tests/components/mqtt/test_init.py b/tests/components/mqtt/test_init.py index 333960d8ad4..5dab5689518 100644 --- a/tests/components/mqtt/test_init.py +++ b/tests/components/mqtt/test_init.py @@ -2458,7 +2458,6 @@ async def test_multi_platform_discovery( "PayloadSentinel", "PublishPayloadType", "ReceiveMessage", - "ReceivePayloadType", "async_prepare_subscribe_topics", "async_publish", "async_subscribe", From 6e5e96b0476329ddbc497cc75f69325d7674b2fe Mon Sep 17 00:00:00 2001 From: Sid <27780930+autinerd@users.noreply.github.com> Date: Thu, 22 Aug 2024 20:51:57 +0200 Subject: [PATCH 0984/1635] Bump ruff to 0.6.2 (#124433) --- .pre-commit-config.yaml | 2 +- requirements_test_pre_commit.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f29fd92a880..ab5e59139cf 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.6.1 + rev: v0.6.2 hooks: - id: ruff args: diff --git a/requirements_test_pre_commit.txt b/requirements_test_pre_commit.txt index 6bd2fbbc145..0c8d2b3796b 100644 --- a/requirements_test_pre_commit.txt +++ b/requirements_test_pre_commit.txt @@ -1,5 +1,5 @@ # Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit codespell==2.3.0 -ruff==0.6.1 +ruff==0.6.2 yamllint==1.35.1 From 51dba1eec359a2a6a801c90f095305e0b1f53e7a Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Thu, 22 Aug 2024 20:58:05 +0200 Subject: [PATCH 0985/1635] Cleanup redundant fixtures and move all other hassio addon fixtures to be shared (#124437) --- tests/components/conftest.py | 65 ++++++++ tests/components/hassio/common.py | 59 ++++++- tests/components/hassio/test_addon_manager.py | 152 +----------------- 3 files changed, 124 insertions(+), 152 deletions(-) diff --git a/tests/components/conftest.py b/tests/components/conftest.py index 26da794e03b..9f13d52e0e8 100644 --- a/tests/components/conftest.py +++ b/tests/components/conftest.py @@ -14,6 +14,8 @@ from homeassistant.const import STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant if TYPE_CHECKING: + from homeassistant.components.hassio.addon_manager import AddonManager + from .conversation import MockAgent from .device_tracker.common import MockScanner from .light.common import MockLight @@ -182,6 +184,15 @@ def mock_legacy_device_tracker_setup() -> Callable[[HomeAssistant, MockScanner], return mock_legacy_device_tracker_setup +@pytest.fixture(name="addon_manager") +def addon_manager_fixture(hass: HomeAssistant) -> AddonManager: + """Return an AddonManager instance.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_addon_manager + + return mock_addon_manager(hass) + + @pytest.fixture(name="discovery_info") def discovery_info_fixture() -> Any: """Return the discovery info from the supervisor.""" @@ -269,3 +280,57 @@ def start_addon_fixture() -> Generator[AsyncMock]: from .hassio.common import mock_start_addon yield from mock_start_addon() + + +@pytest.fixture(name="restart_addon") +def restart_addon_fixture() -> Generator[AsyncMock]: + """Mock restart add-on.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_restart_addon + + yield from mock_restart_addon() + + +@pytest.fixture(name="stop_addon") +def stop_addon_fixture() -> Generator[AsyncMock]: + """Mock stop add-on.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_stop_addon + + yield from mock_stop_addon() + + +@pytest.fixture(name="set_addon_options") +def set_addon_options_fixture() -> Generator[AsyncMock]: + """Mock set add-on options.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_set_addon_options + + yield from mock_set_addon_options() + + +@pytest.fixture(name="uninstall_addon") +def uninstall_addon_fixture() -> Generator[AsyncMock]: + """Mock uninstall add-on.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_uninstall_addon + + yield from mock_uninstall_addon() + + +@pytest.fixture(name="create_backup") +def create_backup_fixture() -> Generator[AsyncMock]: + """Mock create backup.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_create_backup + + yield from mock_create_backup() + + +@pytest.fixture(name="update_addon") +def update_addon_fixture() -> Generator[AsyncMock]: + """Mock update add-on.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_update_addon + + yield from mock_update_addon() diff --git a/tests/components/hassio/common.py b/tests/components/hassio/common.py index f9a77628bbe..f39e65dcb1e 100644 --- a/tests/components/hassio/common.py +++ b/tests/components/hassio/common.py @@ -3,11 +3,20 @@ from __future__ import annotations from collections.abc import Generator +import logging from typing import Any from unittest.mock import DEFAULT, AsyncMock, patch +from homeassistant.components.hassio.addon_manager import AddonManager from homeassistant.core import HomeAssistant +LOGGER = logging.getLogger(__name__) + + +def mock_addon_manager(hass: HomeAssistant) -> AddonManager: + """Return an AddonManager instance.""" + return AddonManager(hass, LOGGER, "Test", "test_addon") + def mock_discovery_info() -> Any: """Return the discovery info from the supervisor.""" @@ -72,7 +81,7 @@ def mock_addon_installed( "version": "1.0.0", } addon_info.return_value["available"] = True - addon_info.return_value["hostname"] = "core-matter-server" + addon_info.return_value["hostname"] = "core-test-addon" addon_info.return_value["state"] = "stopped" addon_info.return_value["version"] = "1.0.0" return addon_info @@ -123,3 +132,51 @@ def mock_start_addon() -> Generator[AsyncMock]: "homeassistant.components.hassio.addon_manager.async_start_addon" ) as start_addon: yield start_addon + + +def mock_stop_addon() -> Generator[AsyncMock]: + """Mock stop add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_stop_addon" + ) as stop_addon: + yield stop_addon + + +def mock_restart_addon() -> Generator[AsyncMock]: + """Mock restart add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_restart_addon" + ) as restart_addon: + yield restart_addon + + +def mock_uninstall_addon() -> Generator[AsyncMock]: + """Mock uninstall add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_uninstall_addon" + ) as uninstall_addon: + yield uninstall_addon + + +def mock_set_addon_options() -> Generator[AsyncMock]: + """Mock set add-on options.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_set_addon_options" + ) as set_options: + yield set_options + + +def mock_create_backup() -> Generator[AsyncMock]: + """Mock create backup.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_create_backup" + ) as create_backup: + yield create_backup + + +def mock_update_addon() -> Generator[AsyncMock]: + """Mock update add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_update_addon" + ) as update_addon: + yield update_addon diff --git a/tests/components/hassio/test_addon_manager.py b/tests/components/hassio/test_addon_manager.py index 6a20c6eec88..af7a3551684 100644 --- a/tests/components/hassio/test_addon_manager.py +++ b/tests/components/hassio/test_addon_manager.py @@ -3,10 +3,8 @@ from __future__ import annotations import asyncio -from collections.abc import Generator -import logging from typing import Any -from unittest.mock import AsyncMock, call, patch +from unittest.mock import AsyncMock, call import pytest @@ -19,154 +17,6 @@ from homeassistant.components.hassio.addon_manager import ( from homeassistant.components.hassio.handler import HassioAPIError from homeassistant.core import HomeAssistant -LOGGER = logging.getLogger(__name__) - - -@pytest.fixture(name="addon_manager") -def addon_manager_fixture(hass: HomeAssistant) -> AddonManager: - """Return an AddonManager instance.""" - return AddonManager(hass, LOGGER, "Test", "test_addon") - - -@pytest.fixture(name="addon_not_installed") -def addon_not_installed_fixture( - addon_store_info: AsyncMock, addon_info: AsyncMock -) -> AsyncMock: - """Mock add-on not installed.""" - addon_store_info.return_value["available"] = True - return addon_info - - -@pytest.fixture(name="addon_installed") -def mock_addon_installed( - addon_store_info: AsyncMock, addon_info: AsyncMock -) -> AsyncMock: - """Mock add-on already installed but not running.""" - addon_store_info.return_value = { - "available": True, - "installed": "1.0.0", - "state": "stopped", - "version": "1.0.0", - } - addon_info.return_value["available"] = True - addon_info.return_value["hostname"] = "core-test-addon" - addon_info.return_value["state"] = "stopped" - addon_info.return_value["version"] = "1.0.0" - return addon_info - - -@pytest.fixture(name="get_addon_discovery_info") -def get_addon_discovery_info_fixture() -> Generator[AsyncMock]: - """Mock get add-on discovery info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_discovery_info" - ) as get_addon_discovery_info: - yield get_addon_discovery_info - - -@pytest.fixture(name="addon_store_info") -def addon_store_info_fixture() -> Generator[AsyncMock]: - """Mock Supervisor add-on store info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_store_info" - ) as addon_store_info: - addon_store_info.return_value = { - "available": False, - "installed": None, - "state": None, - "version": "1.0.0", - } - yield addon_store_info - - -@pytest.fixture(name="addon_info") -def addon_info_fixture() -> Generator[AsyncMock]: - """Mock Supervisor add-on info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_info", - ) as addon_info: - addon_info.return_value = { - "available": False, - "hostname": None, - "options": {}, - "state": None, - "update_available": False, - "version": None, - } - yield addon_info - - -@pytest.fixture(name="set_addon_options") -def set_addon_options_fixture() -> Generator[AsyncMock]: - """Mock set add-on options.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_set_addon_options" - ) as set_options: - yield set_options - - -@pytest.fixture(name="install_addon") -def install_addon_fixture() -> Generator[AsyncMock]: - """Mock install add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_install_addon" - ) as install_addon: - yield install_addon - - -@pytest.fixture(name="uninstall_addon") -def uninstall_addon_fixture() -> Generator[AsyncMock]: - """Mock uninstall add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_uninstall_addon" - ) as uninstall_addon: - yield uninstall_addon - - -@pytest.fixture(name="start_addon") -def start_addon_fixture() -> Generator[AsyncMock]: - """Mock start add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_start_addon" - ) as start_addon: - yield start_addon - - -@pytest.fixture(name="restart_addon") -def restart_addon_fixture() -> Generator[AsyncMock]: - """Mock restart add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_restart_addon" - ) as restart_addon: - yield restart_addon - - -@pytest.fixture(name="stop_addon") -def stop_addon_fixture() -> Generator[AsyncMock]: - """Mock stop add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_stop_addon" - ) as stop_addon: - yield stop_addon - - -@pytest.fixture(name="create_backup") -def create_backup_fixture() -> Generator[AsyncMock]: - """Mock create backup.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_create_backup" - ) as create_backup: - yield create_backup - - -@pytest.fixture(name="update_addon") -def mock_update_addon() -> Generator[AsyncMock]: - """Mock update add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_update_addon" - ) as update_addon: - yield update_addon - async def test_not_installed_raises_exception( addon_manager: AddonManager, From 1d35c745bb2390ae52dc683991906ba57d4a5bba Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 22 Aug 2024 14:00:30 -0500 Subject: [PATCH 0986/1635] Reduce ESPHome entity state write overhead (#124329) --- homeassistant/components/esphome/entity.py | 8 +++----- homeassistant/components/esphome/entry_data.py | 5 ++++- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/esphome/entity.py b/homeassistant/components/esphome/entity.py index 764390c861b..a484a988ea9 100644 --- a/homeassistant/components/esphome/entity.py +++ b/homeassistant/components/esphome/entity.py @@ -94,7 +94,6 @@ async def platform_async_setup_entry( """ entry_data = entry.runtime_data entry_data.info[info_type] = {} - entry_data.state.setdefault(state_type, {}) platform = entity_platform.async_get_current_platform() on_static_info_update = functools.partial( async_static_info_updated, @@ -188,6 +187,7 @@ class EsphomeEntity(Entity, Generic[_InfoT, _StateT]): ) -> None: """Initialize.""" self._entry_data = entry_data + self._states = cast(dict[int, _StateT], entry_data.state[state_type]) assert entry_data.device_info is not None device_info = entry_data.device_info self._device_info = device_info @@ -265,11 +265,9 @@ class EsphomeEntity(Entity, Generic[_InfoT, _StateT]): @callback def _update_state_from_entry_data(self) -> None: """Update state from entry data.""" - state = self._entry_data.state key = self._key - state_type = self._state_type - if has_state := key in state[state_type]: - self._state = cast(_StateT, state[state_type][key]) + if has_state := key in self._states: + self._state = self._states[key] self._has_state = has_state @callback diff --git a/homeassistant/components/esphome/entry_data.py b/homeassistant/components/esphome/entry_data.py index ff6f048eba1..6fc40612c48 100644 --- a/homeassistant/components/esphome/entry_data.py +++ b/homeassistant/components/esphome/entry_data.py @@ -3,6 +3,7 @@ from __future__ import annotations import asyncio +from collections import defaultdict from collections.abc import Callable, Iterable from dataclasses import dataclass, field from functools import partial @@ -111,7 +112,9 @@ class RuntimeEntryData: title: str client: APIClient store: ESPHomeStorage - state: dict[type[EntityState], dict[int, EntityState]] = field(default_factory=dict) + state: defaultdict[type[EntityState], dict[int, EntityState]] = field( + default_factory=lambda: defaultdict(dict) + ) # When the disconnect callback is called, we mark all states # as stale so we will always dispatch a state update when the # device reconnects. This is the same format as state_subscriptions. From 281a9f042be23a31d72509f1acd697c837e02db8 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Thu, 22 Aug 2024 21:01:48 +0200 Subject: [PATCH 0987/1635] Check minimum amount of integrations for a brand (#124310) --- script/hassfest/brand.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/script/hassfest/brand.py b/script/hassfest/brand.py index fe47d31067a..6139e12393e 100644 --- a/script/hassfest/brand.py +++ b/script/hassfest/brand.py @@ -18,6 +18,8 @@ BRAND_SCHEMA = vol.Schema( } ) +BRAND_EXCEPTIONS = ["u_tec"] + def _validate_brand( brand: Brand, integrations: dict[str, Integration], config: Config @@ -38,10 +40,14 @@ def _validate_brand( f"Domain '{brand.domain}' does not match file name {brand.path.name}", ) - if not brand.integrations and not brand.iot_standards: + if ( + len(brand.integrations) < 2 + and not brand.iot_standards + and brand.domain not in BRAND_EXCEPTIONS + ): config.add_error( "brand", - f"{brand.path.name}: At least one of integrations or " + f"{brand.path.name}: At least two integrations or " "iot_standards must be non-empty", ) From fc1ed7d7cb39964f39577cf61fa71a91edf50317 Mon Sep 17 00:00:00 2001 From: Raman Gupta <7243222+raman325@users.noreply.github.com> Date: Thu, 22 Aug 2024 15:07:21 -0400 Subject: [PATCH 0988/1635] Refactor targets for zwave_js services (#115734) * Let labels be used as targets for zwave_js services * add coverage * Fix test bug and switch from targets to fields * Remove label addition * Remove labels from service descriptions * Remove labels from strings * More changes --- homeassistant/components/zwave_js/helpers.py | 26 ++- homeassistant/components/zwave_js/services.py | 64 ++---- .../components/zwave_js/services.yaml | 193 ++++++++++++++++-- .../components/zwave_js/strings.json | 98 +++++++++ 4 files changed, 295 insertions(+), 86 deletions(-) diff --git a/homeassistant/components/zwave_js/helpers.py b/homeassistant/components/zwave_js/helpers.py index 737b8deff34..5885527e01c 100644 --- a/homeassistant/components/zwave_js/helpers.py +++ b/homeassistant/components/zwave_js/helpers.py @@ -343,20 +343,18 @@ def async_get_nodes_from_area_id( } ) # Add devices in an area that are Z-Wave JS devices - for device in dr.async_entries_for_area(dev_reg, area_id): - if next( - ( - config_entry_id - for config_entry_id in device.config_entries - if cast( - ConfigEntry, - hass.config_entries.async_get_entry(config_entry_id), - ).domain - == DOMAIN - ), - None, - ): - nodes.add(async_get_node_from_device_id(hass, device.id, dev_reg)) + nodes.update( + async_get_node_from_device_id(hass, device.id, dev_reg) + for device in dr.async_entries_for_area(dev_reg, area_id) + if any( + cast( + ConfigEntry, + hass.config_entries.async_get_entry(config_entry_id), + ).domain + == DOMAIN + for config_entry_id in device.config_entries + ) + ) return nodes diff --git a/homeassistant/components/zwave_js/services.py b/homeassistant/components/zwave_js/services.py index e5c0bd64781..969a235bb41 100644 --- a/homeassistant/components/zwave_js/services.py +++ b/homeassistant/components/zwave_js/services.py @@ -48,6 +48,12 @@ _LOGGER = logging.getLogger(__name__) type _NodeOrEndpointType = ZwaveNode | Endpoint +TARGET_VALIDATORS = { + vol.Optional(ATTR_AREA_ID): vol.All(cv.ensure_list, [cv.string]), + vol.Optional(ATTR_DEVICE_ID): vol.All(cv.ensure_list, [cv.string]), + vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, +} + def parameter_name_does_not_need_bitmask( val: dict[str, int | str | list[str]], @@ -261,13 +267,7 @@ class ZWaveServices: schema=vol.Schema( vol.All( { - vol.Optional(ATTR_AREA_ID): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(ATTR_DEVICE_ID): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, + **TARGET_VALIDATORS, vol.Optional(const.ATTR_ENDPOINT, default=0): vol.Coerce(int), vol.Required(const.ATTR_CONFIG_PARAMETER): vol.Any( vol.Coerce(int), cv.string @@ -305,13 +305,7 @@ class ZWaveServices: schema=vol.Schema( vol.All( { - vol.Optional(ATTR_AREA_ID): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(ATTR_DEVICE_ID): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, + **TARGET_VALIDATORS, vol.Optional(const.ATTR_ENDPOINT, default=0): vol.Coerce(int), vol.Required(const.ATTR_CONFIG_PARAMETER): vol.Coerce(int), vol.Required(const.ATTR_CONFIG_VALUE): vol.Any( @@ -356,13 +350,7 @@ class ZWaveServices: schema=vol.Schema( vol.All( { - vol.Optional(ATTR_AREA_ID): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(ATTR_DEVICE_ID): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, + **TARGET_VALIDATORS, vol.Required(const.ATTR_COMMAND_CLASS): vol.Coerce(int), vol.Required(const.ATTR_PROPERTY): vol.Any( vol.Coerce(int), str @@ -391,13 +379,7 @@ class ZWaveServices: schema=vol.Schema( vol.All( { - vol.Optional(ATTR_AREA_ID): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(ATTR_DEVICE_ID): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, + **TARGET_VALIDATORS, vol.Optional(const.ATTR_BROADCAST, default=False): cv.boolean, vol.Required(const.ATTR_COMMAND_CLASS): vol.Coerce(int), vol.Required(const.ATTR_PROPERTY): vol.Any( @@ -428,15 +410,7 @@ class ZWaveServices: self.async_ping, schema=vol.Schema( vol.All( - { - vol.Optional(ATTR_AREA_ID): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(ATTR_DEVICE_ID): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, - }, + TARGET_VALIDATORS, cv.has_at_least_one_key( ATTR_DEVICE_ID, ATTR_ENTITY_ID, ATTR_AREA_ID ), @@ -453,13 +427,7 @@ class ZWaveServices: schema=vol.Schema( vol.All( { - vol.Optional(ATTR_AREA_ID): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(ATTR_DEVICE_ID): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, + **TARGET_VALIDATORS, vol.Required(const.ATTR_COMMAND_CLASS): vol.All( vol.Coerce(int), vol.Coerce(CommandClass) ), @@ -483,13 +451,7 @@ class ZWaveServices: schema=vol.Schema( vol.All( { - vol.Optional(ATTR_AREA_ID): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(ATTR_DEVICE_ID): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, + **TARGET_VALIDATORS, vol.Required(const.ATTR_NOTIFICATION_TYPE): vol.All( vol.Coerce(int), vol.Coerce(NotificationType) ), diff --git a/homeassistant/components/zwave_js/services.yaml b/homeassistant/components/zwave_js/services.yaml index 81809e3fbeb..1e521ddbe2d 100644 --- a/homeassistant/components/zwave_js/services.yaml +++ b/homeassistant/components/zwave_js/services.yaml @@ -89,10 +89,28 @@ set_lock_configuration: boolean: set_config_parameter: - target: - entity: - integration: zwave_js fields: + area_id: + example: living_room + selector: + area: + device: + - integration: zwave_js + multiple: true + device_id: + example: "8f4219cfa57e23f6f669c4616c2205e2" + selector: + device: + filter: + - integration: zwave_js + multiple: true + entity_id: + example: sensor.living_room_temperature + selector: + entity: + filter: + - integration: zwave_js + multiple: true endpoint: example: 1 default: 0 @@ -127,10 +145,28 @@ set_config_parameter: max: 3 bulk_set_partial_config_parameters: - target: - entity: - integration: zwave_js fields: + area_id: + example: living_room + selector: + area: + device: + - integration: zwave_js + multiple: true + device_id: + example: "8f4219cfa57e23f6f669c4616c2205e2" + selector: + device: + filter: + - integration: zwave_js + multiple: true + entity_id: + example: sensor.living_room_temperature + selector: + entity: + filter: + - integration: zwave_js + multiple: true endpoint: example: 1 default: 0 @@ -169,10 +205,28 @@ refresh_value: boolean: set_value: - target: - entity: - integration: zwave_js fields: + area_id: + example: living_room + selector: + area: + device: + - integration: zwave_js + multiple: true + device_id: + example: "8f4219cfa57e23f6f669c4616c2205e2" + selector: + device: + filter: + - integration: zwave_js + multiple: true + entity_id: + example: sensor.living_room_temperature + selector: + entity: + filter: + - integration: zwave_js + multiple: true command_class: example: 117 required: true @@ -208,10 +262,28 @@ set_value: boolean: multicast_set_value: - target: - entity: - integration: zwave_js fields: + area_id: + example: living_room + selector: + area: + device: + - integration: zwave_js + multiple: true + device_id: + example: "8f4219cfa57e23f6f669c4616c2205e2" + selector: + device: + filter: + - integration: zwave_js + multiple: true + entity_id: + example: sensor.living_room_temperature + selector: + entity: + filter: + - integration: zwave_js + multiple: true broadcast: example: true required: false @@ -248,9 +320,28 @@ multicast_set_value: object: ping: - target: - entity: - integration: zwave_js + fields: + area_id: + example: living_room + selector: + area: + device: + - integration: zwave_js + multiple: true + device_id: + example: "8f4219cfa57e23f6f669c4616c2205e2" + selector: + device: + filter: + - integration: zwave_js + multiple: true + entity_id: + example: sensor.living_room_temperature + selector: + entity: + filter: + - integration: zwave_js + multiple: true reset_meter: target: @@ -258,6 +349,30 @@ reset_meter: domain: sensor integration: zwave_js fields: + area_id: + example: living_room + selector: + area: + entity: + - integration: zwave_js + domain: sensor + multiple: true + device_id: + example: "8f4219cfa57e23f6f669c4616c2205e2" + selector: + device: + entity: + - integration: zwave_js + domain: sensor + multiple: true + entity_id: + example: sensor.living_room_temperature + selector: + entity: + filter: + - integration: zwave_js + domain: sensor + multiple: true meter_type: example: 1 required: false @@ -270,10 +385,28 @@ reset_meter: text: invoke_cc_api: - target: - entity: - integration: zwave_js fields: + area_id: + example: living_room + selector: + area: + device: + - integration: zwave_js + multiple: true + device_id: + example: "8f4219cfa57e23f6f669c4616c2205e2" + selector: + device: + filter: + - integration: zwave_js + multiple: true + entity_id: + example: sensor.living_room_temperature + selector: + entity: + filter: + - integration: zwave_js + multiple: true command_class: example: 132 required: true @@ -296,10 +429,28 @@ invoke_cc_api: object: refresh_notifications: - target: - entity: - integration: zwave_js fields: + area_id: + example: living_room + selector: + area: + device: + - integration: zwave_js + multiple: true + device_id: + example: "8f4219cfa57e23f6f669c4616c2205e2" + selector: + device: + filter: + - integration: zwave_js + multiple: true + entity_id: + example: sensor.living_room_temperature + selector: + entity: + filter: + - integration: zwave_js + multiple: true notification_type: example: 1 required: true diff --git a/homeassistant/components/zwave_js/strings.json b/homeassistant/components/zwave_js/strings.json index 4bba3e0538c..ca7d5153e6e 100644 --- a/homeassistant/components/zwave_js/strings.json +++ b/homeassistant/components/zwave_js/strings.json @@ -265,10 +265,22 @@ "bulk_set_partial_config_parameters": { "description": "Allows for bulk setting partial parameters. Useful when multiple partial parameters have to be set at the same time.", "fields": { + "area_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::area_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::area_id::name%]" + }, + "device_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::device_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::device_id::name%]" + }, "endpoint": { "description": "[%key:component::zwave_js::services::set_config_parameter::fields::endpoint::description%]", "name": "[%key:component::zwave_js::services::set_config_parameter::fields::endpoint::name%]" }, + "entity_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::entity_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::entity_id::name%]" + }, "parameter": { "description": "[%key:component::zwave_js::services::set_config_parameter::fields::parameter::description%]", "name": "[%key:component::zwave_js::services::set_config_parameter::fields::parameter::name%]" @@ -293,14 +305,26 @@ "invoke_cc_api": { "description": "Calls a Command Class API on a node. Some Command Classes can't be fully controlled via the `set_value` action and require direct calls to the Command Class API.", "fields": { + "area_id": { + "description": "The area(s) to target for this service. If an area is specified, all zwave_js devices and entities in that area will be targeted for this service.", + "name": "Area ID(s)" + }, "command_class": { "description": "The ID of the command class that you want to issue a command to.", "name": "[%key:component::zwave_js::services::set_value::fields::command_class::name%]" }, + "device_id": { + "description": "The device(s) to target for this service.", + "name": "Device ID(s)" + }, "endpoint": { "description": "The endpoint to call the API on. If an endpoint is specified, that endpoint will be targeted for all nodes associated with the target areas, devices, and/or entities. If an endpoint is not specified, the root endpoint (0) will be targeted for nodes associated with target areas and devices, and the endpoint for the primary value of each entity will be targeted.", "name": "[%key:component::zwave_js::services::set_config_parameter::fields::endpoint::name%]" }, + "entity_id": { + "description": "The entity ID(s) to target for this service.", + "name": "Entity ID(s)" + }, "method_name": { "description": "The name of the API method to call. Refer to the Z-Wave JS Command Class API documentation (https://zwave-js.github.io/node-zwave-js/#/api/CCs/index) for available methods.", "name": "Method name" @@ -315,6 +339,10 @@ "multicast_set_value": { "description": "Changes any value that Z-Wave JS recognizes on multiple Z-Wave devices using multicast, so all devices receive the message simultaneously. This action has minimal validation so only use this action if you know what you are doing.", "fields": { + "area_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::area_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::area_id::name%]" + }, "broadcast": { "description": "Whether command should be broadcast to all devices on the network.", "name": "Broadcast?" @@ -323,10 +351,18 @@ "description": "[%key:component::zwave_js::services::set_value::fields::command_class::description%]", "name": "[%key:component::zwave_js::services::set_value::fields::command_class::name%]" }, + "device_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::device_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::device_id::name%]" + }, "endpoint": { "description": "[%key:component::zwave_js::services::set_value::fields::endpoint::description%]", "name": "[%key:component::zwave_js::services::set_config_parameter::fields::endpoint::name%]" }, + "entity_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::entity_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::entity_id::name%]" + }, "options": { "description": "[%key:component::zwave_js::services::set_value::fields::options::description%]", "name": "[%key:component::zwave_js::services::set_value::fields::options::name%]" @@ -348,11 +384,37 @@ }, "ping": { "description": "Forces Z-Wave JS to try to reach a node. This can be used to update the status of the node in Z-Wave JS when you think it doesn't accurately reflect reality, e.g. reviving a failed/dead node or marking the node as asleep.", + "fields": { + "area_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::area_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::area_id::name%]" + }, + "device_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::device_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::device_id::name%]" + }, + "entity_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::entity_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::entity_id::name%]" + } + }, "name": "Ping a node" }, "refresh_notifications": { "description": "Refreshes notifications on a node based on notification type and optionally notification event.", "fields": { + "area_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::area_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::area_id::name%]" + }, + "device_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::device_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::device_id::name%]" + }, + "entity_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::entity_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::entity_id::name%]" + }, "notification_event": { "description": "The Notification Event number as defined in the Z-Wave specs.", "name": "Notification Event" @@ -381,6 +443,18 @@ "reset_meter": { "description": "Resets the meters on a node.", "fields": { + "area_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::area_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::area_id::name%]" + }, + "device_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::device_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::device_id::name%]" + }, + "entity_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::entity_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::entity_id::name%]" + }, "meter_type": { "description": "The type of meter to reset. Not all meters support the ability to pick a meter type to reset.", "name": "Meter type" @@ -395,14 +469,26 @@ "set_config_parameter": { "description": "Changes the configuration parameters of your Z-Wave devices.", "fields": { + "area_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::area_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::area_id::name%]" + }, "bitmask": { "description": "Target a specific bitmask (see the documentation for more information). Cannot be combined with value_size or value_format.", "name": "Bitmask" }, + "device_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::device_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::device_id::name%]" + }, "endpoint": { "description": "The configuration parameter's endpoint.", "name": "Endpoint" }, + "entity_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::entity_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::entity_id::name%]" + }, "parameter": { "description": "The name (or ID) of the configuration parameter you want to configure.", "name": "Parameter" @@ -477,14 +563,26 @@ "set_value": { "description": "Changes any value that Z-Wave JS recognizes on a Z-Wave device. This action has minimal validation so only use this action if you know what you are doing.", "fields": { + "area_id": { + "description": "The area(s) to target for this service. If an area is specified, all zwave_js devices and entities in that area will be targeted for this service.", + "name": "Area ID(s)" + }, "command_class": { "description": "The ID of the command class for the value.", "name": "Command class" }, + "device_id": { + "description": "The device(s) to target for this service.", + "name": "Device ID(s)" + }, "endpoint": { "description": "The endpoint for the value.", "name": "[%key:component::zwave_js::services::set_config_parameter::fields::endpoint::name%]" }, + "entity_id": { + "description": "The entity ID(s) to target for this service.", + "name": "Entity ID(s)" + }, "options": { "description": "Set value options map. Refer to the Z-Wave JS documentation for more information on what options can be set.", "name": "Options" From 2533bde27a87b9eee13378e3dc2bba2435f95323 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 22 Aug 2024 14:46:54 -0500 Subject: [PATCH 0989/1635] Bump yalexs to 8.1.4 (#124425) changelog: https://github.com/bdraco/yalexs/compare/v8.1.2...v8.1.4 --- homeassistant/components/august/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/august/manifest.json b/homeassistant/components/august/manifest.json index 5d7b253e952..7e73e55e6ba 100644 --- a/homeassistant/components/august/manifest.json +++ b/homeassistant/components/august/manifest.json @@ -28,5 +28,5 @@ "documentation": "https://www.home-assistant.io/integrations/august", "iot_class": "cloud_push", "loggers": ["pubnub", "yalexs"], - "requirements": ["yalexs==8.1.2", "yalexs-ble==2.4.3"] + "requirements": ["yalexs==8.1.4", "yalexs-ble==2.4.3"] } diff --git a/requirements_all.txt b/requirements_all.txt index 78215689479..4caea33f8e8 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2968,7 +2968,7 @@ yalesmartalarmclient==0.4.0 yalexs-ble==2.4.3 # homeassistant.components.august -yalexs==8.1.2 +yalexs==8.1.4 # homeassistant.components.yeelight yeelight==0.7.14 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index afbbfb396ac..4deb4a075b5 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2348,7 +2348,7 @@ yalesmartalarmclient==0.4.0 yalexs-ble==2.4.3 # homeassistant.components.august -yalexs==8.1.2 +yalexs==8.1.4 # homeassistant.components.yeelight yeelight==0.7.14 From 61ac4c7af7a3495e985e7ac75cde16346476d97d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 22 Aug 2024 14:50:15 -0500 Subject: [PATCH 0990/1635] Use identity checks for ESPHome Enums (#124334) Enums are singletons and should use is to compare. The valve platform was updated but cover and lock were missed. --- homeassistant/components/esphome/cover.py | 4 ++-- homeassistant/components/esphome/lock.py | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/esphome/cover.py b/homeassistant/components/esphome/cover.py index 19ce4cbf55a..83c749f89ca 100644 --- a/homeassistant/components/esphome/cover.py +++ b/homeassistant/components/esphome/cover.py @@ -61,13 +61,13 @@ class EsphomeCover(EsphomeEntity[CoverInfo, CoverState], CoverEntity): @esphome_state_property def is_opening(self) -> bool: """Return if the cover is opening or not.""" - return self._state.current_operation == CoverOperation.IS_OPENING + return self._state.current_operation is CoverOperation.IS_OPENING @property @esphome_state_property def is_closing(self) -> bool: """Return if the cover is closing or not.""" - return self._state.current_operation == CoverOperation.IS_CLOSING + return self._state.current_operation is CoverOperation.IS_CLOSING @property @esphome_state_property diff --git a/homeassistant/components/esphome/lock.py b/homeassistant/components/esphome/lock.py index 4caa1f68612..15a402ccb91 100644 --- a/homeassistant/components/esphome/lock.py +++ b/homeassistant/components/esphome/lock.py @@ -40,25 +40,25 @@ class EsphomeLock(EsphomeEntity[LockInfo, LockEntityState], LockEntity): @esphome_state_property def is_locked(self) -> bool | None: """Return true if the lock is locked.""" - return self._state.state == LockState.LOCKED + return self._state.state is LockState.LOCKED @property @esphome_state_property def is_locking(self) -> bool | None: """Return true if the lock is locking.""" - return self._state.state == LockState.LOCKING + return self._state.state is LockState.LOCKING @property @esphome_state_property def is_unlocking(self) -> bool | None: """Return true if the lock is unlocking.""" - return self._state.state == LockState.UNLOCKING + return self._state.state is LockState.UNLOCKING @property @esphome_state_property def is_jammed(self) -> bool | None: """Return true if the lock is jammed (incomplete locking).""" - return self._state.state == LockState.JAMMED + return self._state.state is LockState.JAMMED @convert_api_error_ha_error async def async_lock(self, **kwargs: Any) -> None: From 7579ebc02a6769330121b7bf9d10dccb976a1db1 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 22 Aug 2024 21:14:31 -0500 Subject: [PATCH 0991/1635] Replace statistics _get_unit_class function with a dict lookup (#124452) --- homeassistant/components/recorder/statistics.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/recorder/statistics.py b/homeassistant/components/recorder/statistics.py index aeeb30816d7..6532935ae0e 100644 --- a/homeassistant/components/recorder/statistics.py +++ b/homeassistant/components/recorder/statistics.py @@ -148,6 +148,12 @@ STATISTIC_UNIT_TO_UNIT_CONVERTER: dict[str | None, type[BaseUnitConverter]] = { **{unit: VolumeFlowRateConverter for unit in VolumeFlowRateConverter.VALID_UNITS}, } + +UNIT_CLASSES = { + unit: converter.UNIT_CLASS + for unit, converter in STATISTIC_UNIT_TO_UNIT_CONVERTER.items() +} + DATA_SHORT_TERM_STATISTICS_RUN_CACHE = "recorder_short_term_statistics_run_cache" @@ -211,13 +217,6 @@ class StatisticsRow(BaseStatisticsRow, total=False): change: float | None -def _get_unit_class(unit: str | None) -> str | None: - """Get corresponding unit class from from the statistics unit.""" - if converter := STATISTIC_UNIT_TO_UNIT_CONVERTER.get(unit): - return converter.UNIT_CLASS - return None - - def get_display_unit( hass: HomeAssistant, statistic_id: str, @@ -807,7 +806,7 @@ def _statistic_by_id_from_metadata( "has_sum": meta["has_sum"], "name": meta["name"], "source": meta["source"], - "unit_class": _get_unit_class(meta["unit_of_measurement"]), + "unit_class": UNIT_CLASSES.get(meta["unit_of_measurement"]), "unit_of_measurement": meta["unit_of_measurement"], } for _, meta in metadata.values() @@ -881,7 +880,7 @@ def list_statistic_ids( "has_sum": meta["has_sum"], "name": meta["name"], "source": meta["source"], - "unit_class": _get_unit_class(meta["unit_of_measurement"]), + "unit_class": UNIT_CLASSES.get(meta["unit_of_measurement"]), "unit_of_measurement": meta["unit_of_measurement"], } From 7c6e3fe9c48d88cb9697cfe3a52cd7e34a31a211 Mon Sep 17 00:00:00 2001 From: Raman Gupta <7243222+raman325@users.noreply.github.com> Date: Fri, 23 Aug 2024 02:38:08 -0400 Subject: [PATCH 0992/1635] Fix `zwave_js` `services.yaml` schema (#124455) Fix zwave_Js services.yaml schema --- homeassistant/components/zwave_js/services.yaml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/homeassistant/components/zwave_js/services.yaml b/homeassistant/components/zwave_js/services.yaml index 1e521ddbe2d..f5063fdfd93 100644 --- a/homeassistant/components/zwave_js/services.yaml +++ b/homeassistant/components/zwave_js/services.yaml @@ -344,10 +344,6 @@ ping: multiple: true reset_meter: - target: - entity: - domain: sensor - integration: zwave_js fields: area_id: example: living_room From 4e94ce0cc7f76cb3c0eec2c68819b199878127fb Mon Sep 17 00:00:00 2001 From: Willem-Jan van Rootselaar Date: Fri, 23 Aug 2024 08:42:36 +0200 Subject: [PATCH 0993/1635] Refactor bsblan coordinator (#124308) * chore: Refactor BSBLanUpdateCoordinator to improve code readability and maintainability * feat: Add BSBLan integration models This commit adds the models for the BSB-Lan integration. It includes a dataclass for the BSBLanCoordinatorData, which stores the state and sensor information. * refactor: Update BSBLANClimate class to use DataUpdateCoordinator without specifying the State type * chore: Remove unused Sensor import in BSBLan models * feat: Refactor BSBLanEntity to use CoordinatorEntity The BSBLanEntity class has been refactored to inherit from the CoordinatorEntity class, which provides better integration with the update coordinator. This change improves code readability and maintainability. * refactor: Remove unused config_entry variable in BSBLanUpdateCoordinator * refactor: Update BSBLANClimate class to use DataUpdateCoordinator Refactor the BSBLANClimate class to use the Coordinator of the entity * refactor: Update tests to use the new structure * fix coverage it should be the same as before * refactor: moved dataclass BSBLanCoordinatorData * use the data class inside init * refactor: Remove unused config_entry variable in BSBLanUpdateCoordinator * refactor: use BSBLanData from init * remove entry data from diagnostics * fix: add random interval back * refactor: Simplify coordinator_data assignment in async_get_config_entry_diagnostics * revert back to original except dataclass import * revert: Add MAC address back to device info in BSBLanEntity --- homeassistant/components/bsblan/__init__.py | 4 +- homeassistant/components/bsblan/climate.py | 59 +++++++------------ .../components/bsblan/coordinator.py | 49 ++++++++------- .../components/bsblan/diagnostics.py | 7 ++- homeassistant/components/bsblan/entity.py | 42 ++++++------- tests/components/bsblan/conftest.py | 8 ++- tests/components/bsblan/fixtures/static.json | 20 +++++++ tests/components/bsblan/test_diagnostics.py | 7 +-- 8 files changed, 101 insertions(+), 95 deletions(-) create mode 100644 tests/components/bsblan/fixtures/static.json diff --git a/homeassistant/components/bsblan/__init__.py b/homeassistant/components/bsblan/__init__.py index 113a582f403..5ce90db5043 100644 --- a/homeassistant/components/bsblan/__init__.py +++ b/homeassistant/components/bsblan/__init__.py @@ -22,7 +22,7 @@ PLATFORMS = [Platform.CLIMATE] @dataclasses.dataclass -class HomeAssistantBSBLANData: +class BSBLanData: """BSBLan data stored in the Home Assistant data object.""" coordinator: BSBLanUpdateCoordinator @@ -57,7 +57,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: info = await bsblan.info() static = await bsblan.static_values() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = HomeAssistantBSBLANData( + hass.data.setdefault(DOMAIN, {})[entry.entry_id] = BSBLanData( client=bsblan, coordinator=coordinator, device=device, diff --git a/homeassistant/components/bsblan/climate.py b/homeassistant/components/bsblan/climate.py index 4d6514251cb..ae7116143df 100644 --- a/homeassistant/components/bsblan/climate.py +++ b/homeassistant/components/bsblan/climate.py @@ -4,7 +4,7 @@ from __future__ import annotations from typing import Any -from bsblan import BSBLAN, BSBLANError, Device, Info, State, StaticState +from bsblan import BSBLANError from homeassistant.components.climate import ( ATTR_HVAC_MODE, @@ -21,15 +21,11 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.device_registry import format_mac from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, -) from homeassistant.util.enum import try_parse_enum -from . import HomeAssistantBSBLANData +from . import BSBLanData from .const import ATTR_TARGET_TEMPERATURE, DOMAIN -from .entity import BSBLANEntity +from .entity import BSBLanEntity PARALLEL_UPDATES = 1 @@ -51,24 +47,17 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up BSBLAN device based on a config entry.""" - data: HomeAssistantBSBLANData = hass.data[DOMAIN][entry.entry_id] + data: BSBLanData = hass.data[DOMAIN][entry.entry_id] async_add_entities( [ BSBLANClimate( - data.coordinator, - data.client, - data.device, - data.info, - data.static, - entry, + data, ) ] ) -class BSBLANClimate( - BSBLANEntity, CoordinatorEntity[DataUpdateCoordinator[State]], ClimateEntity -): +class BSBLANClimate(BSBLanEntity, ClimateEntity): """Defines a BSBLAN climate device.""" _attr_has_entity_name = True @@ -80,30 +69,22 @@ class BSBLANClimate( | ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON ) - _attr_preset_modes = PRESET_MODES - # Determine hvac modes + _attr_preset_modes = PRESET_MODES _attr_hvac_modes = HVAC_MODES _enable_turn_on_off_backwards_compatibility = False def __init__( self, - coordinator: DataUpdateCoordinator[State], - client: BSBLAN, - device: Device, - info: Info, - static: StaticState, - entry: ConfigEntry, + data: BSBLanData, ) -> None: """Initialize BSBLAN climate device.""" - super().__init__(client, device, info, static, entry) - CoordinatorEntity.__init__(self, coordinator) - self._attr_unique_id = f"{format_mac(device.MAC)}-climate" + super().__init__(data.coordinator, data) + self._attr_unique_id = f"{format_mac(data.device.MAC)}-climate" - self._attr_min_temp = float(static.min_temp.value) - self._attr_max_temp = float(static.max_temp.value) - # check if self.coordinator.data.current_temperature.unit is "°C" or "°C" - if static.min_temp.unit in ("°C", "°C"): + self._attr_min_temp = float(data.static.min_temp.value) + self._attr_max_temp = float(data.static.max_temp.value) + if data.static.min_temp.unit in ("°C", "°C"): self._attr_temperature_unit = UnitOfTemperature.CELSIUS else: self._attr_temperature_unit = UnitOfTemperature.FAHRENHEIT @@ -111,30 +92,30 @@ class BSBLANClimate( @property def current_temperature(self) -> float | None: """Return the current temperature.""" - if self.coordinator.data.current_temperature.value == "---": + if self.coordinator.data.state.current_temperature.value == "---": # device returns no current temperature return None - return float(self.coordinator.data.current_temperature.value) + return float(self.coordinator.data.state.current_temperature.value) @property def target_temperature(self) -> float | None: """Return the temperature we try to reach.""" - return float(self.coordinator.data.target_temperature.value) + return float(self.coordinator.data.state.target_temperature.value) @property def hvac_mode(self) -> HVACMode | None: """Return hvac operation ie. heat, cool mode.""" - if self.coordinator.data.hvac_mode.value == PRESET_ECO: + if self.coordinator.data.state.hvac_mode.value == PRESET_ECO: return HVACMode.AUTO - return try_parse_enum(HVACMode, self.coordinator.data.hvac_mode.value) + return try_parse_enum(HVACMode, self.coordinator.data.state.hvac_mode.value) @property def preset_mode(self) -> str | None: """Return the current preset mode.""" if ( self.hvac_mode == HVACMode.AUTO - and self.coordinator.data.hvac_mode.value == PRESET_ECO + and self.coordinator.data.state.hvac_mode.value == PRESET_ECO ): return PRESET_ECO return PRESET_NONE @@ -173,7 +154,7 @@ class BSBLANClimate( else: data[ATTR_HVAC_MODE] = kwargs[ATTR_PRESET_MODE] try: - await self.client.thermostat(**data) + await self.coordinator.client.thermostat(**data) except BSBLANError as err: raise HomeAssistantError( "An error occurred while updating the BSBLAN device", diff --git a/homeassistant/components/bsblan/coordinator.py b/homeassistant/components/bsblan/coordinator.py index 864daacc562..3320c0f7500 100644 --- a/homeassistant/components/bsblan/coordinator.py +++ b/homeassistant/components/bsblan/coordinator.py @@ -1,12 +1,10 @@ """DataUpdateCoordinator for the BSB-Lan integration.""" -from __future__ import annotations - +from dataclasses import dataclass from datetime import timedelta from random import randint -from bsblan import BSBLAN, BSBLANConnectionError -from bsblan.models import State +from bsblan import BSBLAN, BSBLANConnectionError, State from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST @@ -16,7 +14,14 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda from .const import DOMAIN, LOGGER, SCAN_INTERVAL -class BSBLanUpdateCoordinator(DataUpdateCoordinator[State]): +@dataclass +class BSBLanCoordinatorData: + """BSBLan data stored in the Home Assistant data object.""" + + state: State + + +class BSBLanUpdateCoordinator(DataUpdateCoordinator[BSBLanCoordinatorData]): """The BSB-Lan update coordinator.""" config_entry: ConfigEntry @@ -28,30 +33,32 @@ class BSBLanUpdateCoordinator(DataUpdateCoordinator[State]): client: BSBLAN, ) -> None: """Initialize the BSB-Lan coordinator.""" - - self.client = client - super().__init__( hass, - LOGGER, + logger=LOGGER, name=f"{DOMAIN}_{config_entry.data[CONF_HOST]}", - # use the default scan interval and add a random number of seconds to avoid timeouts when - # the BSB-Lan device is already/still busy retrieving data, - # e.g. for MQTT or internal logging. - update_interval=SCAN_INTERVAL + timedelta(seconds=randint(1, 8)), + update_interval=self._get_update_interval(), ) + self.client = client - async def _async_update_data(self) -> State: - """Get state from BSB-Lan device.""" + def _get_update_interval(self) -> timedelta: + """Get the update interval with a random offset. - # use the default scan interval and add a random number of seconds to avoid timeouts when - # the BSB-Lan device is already/still busy retrieving data, e.g. for MQTT or internal logging. - self.update_interval = SCAN_INTERVAL + timedelta(seconds=randint(1, 8)) + Use the default scan interval and add a random number of seconds to avoid timeouts when + the BSB-Lan device is already/still busy retrieving data, + e.g. for MQTT or internal logging. + """ + return SCAN_INTERVAL + timedelta(seconds=randint(1, 8)) + async def _async_update_data(self) -> BSBLanCoordinatorData: + """Get state and sensor data from BSB-Lan device.""" try: - return await self.client.state() + state = await self.client.state() except BSBLANConnectionError as err: + host = self.config_entry.data[CONF_HOST] if self.config_entry else "unknown" raise UpdateFailed( - f"Error while establishing connection with " - f"BSB-Lan device at {self.config_entry.data[CONF_HOST]}" + f"Error while establishing connection with BSB-Lan device at {host}" ) from err + + self.update_interval = self._get_update_interval() + return BSBLanCoordinatorData(state=state) diff --git a/homeassistant/components/bsblan/diagnostics.py b/homeassistant/components/bsblan/diagnostics.py index a24082fd698..3b42d47e1d3 100644 --- a/homeassistant/components/bsblan/diagnostics.py +++ b/homeassistant/components/bsblan/diagnostics.py @@ -7,7 +7,7 @@ from typing import Any from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from . import HomeAssistantBSBLANData +from . import BSBLanData from .const import DOMAIN @@ -15,9 +15,10 @@ async def async_get_config_entry_diagnostics( hass: HomeAssistant, entry: ConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - data: HomeAssistantBSBLANData = hass.data[DOMAIN][entry.entry_id] + data: BSBLanData = hass.data[DOMAIN][entry.entry_id] + return { "info": data.info.to_dict(), "device": data.device.to_dict(), - "state": data.coordinator.data.to_dict(), + "state": data.coordinator.data.state.to_dict(), } diff --git a/homeassistant/components/bsblan/entity.py b/homeassistant/components/bsblan/entity.py index a69c4d2217e..0c507938794 100644 --- a/homeassistant/components/bsblan/entity.py +++ b/homeassistant/components/bsblan/entity.py @@ -1,41 +1,35 @@ -"""Base entity for the BSBLAN integration.""" +"""BSBLan base entity.""" from __future__ import annotations -from bsblan import BSBLAN, Device, Info, StaticState - -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_HOST from homeassistant.helpers.device_registry import ( CONNECTION_NETWORK_MAC, DeviceInfo, format_mac, ) -from homeassistant.helpers.entity import Entity +from homeassistant.helpers.update_coordinator import CoordinatorEntity +from . import BSBLanData from .const import DOMAIN +from .coordinator import BSBLanUpdateCoordinator -class BSBLANEntity(Entity): - """Defines a BSBLAN entity.""" +class BSBLanEntity(CoordinatorEntity[BSBLanUpdateCoordinator]): + """Defines a base BSBLan entity.""" - def __init__( - self, - client: BSBLAN, - device: Device, - info: Info, - static: StaticState, - entry: ConfigEntry, - ) -> None: - """Initialize an BSBLAN entity.""" - self.client = client + _attr_has_entity_name = True + def __init__(self, coordinator: BSBLanUpdateCoordinator, data: BSBLanData) -> None: + """Initialize BSBLan entity.""" + super().__init__(coordinator, data) + host = self.coordinator.config_entry.data["host"] + mac = self.coordinator.config_entry.data["mac"] self._attr_device_info = DeviceInfo( - connections={(CONNECTION_NETWORK_MAC, format_mac(device.MAC))}, - identifiers={(DOMAIN, format_mac(device.MAC))}, + identifiers={(DOMAIN, data.device.MAC)}, + connections={(CONNECTION_NETWORK_MAC, format_mac(mac))}, + name=data.device.name, manufacturer="BSBLAN Inc.", - model=info.device_identification.value, - name=device.name, - sw_version=f"{device.version})", - configuration_url=f"http://{entry.data[CONF_HOST]}", + model=data.info.device_identification.value, + sw_version=data.device.version, + configuration_url=f"http://{host}", ) diff --git a/tests/components/bsblan/conftest.py b/tests/components/bsblan/conftest.py index 07ca8b648f3..13d4017d7c8 100644 --- a/tests/components/bsblan/conftest.py +++ b/tests/components/bsblan/conftest.py @@ -3,7 +3,7 @@ from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch -from bsblan import Device, Info, State +from bsblan import Device, Info, State, StaticState import pytest from homeassistant.components.bsblan.const import CONF_PASSKEY, DOMAIN @@ -42,7 +42,6 @@ def mock_setup_entry() -> Generator[AsyncMock]: @pytest.fixture def mock_bsblan() -> Generator[MagicMock]: """Return a mocked BSBLAN client.""" - with ( patch("homeassistant.components.bsblan.BSBLAN", autospec=True) as bsblan_mock, patch("homeassistant.components.bsblan.config_flow.BSBLAN", new=bsblan_mock), @@ -53,6 +52,11 @@ def mock_bsblan() -> Generator[MagicMock]: load_fixture("device.json", DOMAIN) ) bsblan.state.return_value = State.from_json(load_fixture("state.json", DOMAIN)) + + bsblan.static_values.return_value = StaticState.from_json( + load_fixture("static.json", DOMAIN) + ) + yield bsblan diff --git a/tests/components/bsblan/fixtures/static.json b/tests/components/bsblan/fixtures/static.json new file mode 100644 index 00000000000..8c7abc3397b --- /dev/null +++ b/tests/components/bsblan/fixtures/static.json @@ -0,0 +1,20 @@ +{ + "min_temp": { + "name": "Room temp frost protection setpoint", + "error": 0, + "value": "8.0", + "desc": "", + "dataType": 0, + "readonly": 0, + "unit": "°C" + }, + "max_temp": { + "name": "Summer/winter changeover temp heat circuit 1", + "error": 0, + "value": "20.0", + "desc": "", + "dataType": 0, + "readonly": 0, + "unit": "°C" + } +} diff --git a/tests/components/bsblan/test_diagnostics.py b/tests/components/bsblan/test_diagnostics.py index 316296df78a..8939456c2ac 100644 --- a/tests/components/bsblan/test_diagnostics.py +++ b/tests/components/bsblan/test_diagnostics.py @@ -16,8 +16,7 @@ async def test_diagnostics( snapshot: SnapshotAssertion, ) -> None: """Test diagnostics.""" - - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, init_integration) - == snapshot + diagnostics_data = await get_diagnostics_for_config_entry( + hass, hass_client, init_integration ) + assert diagnostics_data == snapshot From ab064a7f3623fffb19b1347b7b5a38b756d31f58 Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Fri, 23 Aug 2024 12:34:58 +0200 Subject: [PATCH 0994/1635] Reolink add 100% coverage of number platform (#124465) * Add 100% number test coverage * review comments * fix styling * re-add AsyncMock for chime --- tests/components/reolink/test_config_flow.py | 2 +- tests/components/reolink/test_host.py | 4 +- tests/components/reolink/test_init.py | 10 +- tests/components/reolink/test_media_source.py | 2 +- tests/components/reolink/test_number.py | 111 ++++++++++++++++++ tests/components/reolink/test_select.py | 12 +- 6 files changed, 121 insertions(+), 20 deletions(-) create mode 100644 tests/components/reolink/test_number.py diff --git a/tests/components/reolink/test_config_flow.py b/tests/components/reolink/test_config_flow.py index 55dd0d4fea9..716e66b8d6c 100644 --- a/tests/components/reolink/test_config_flow.py +++ b/tests/components/reolink/test_config_flow.py @@ -471,7 +471,7 @@ async def test_dhcp_ip_update( if not last_update_success: # ensure the last_update_succes is False for the device_coordinator. - reolink_connect.get_states = AsyncMock(side_effect=ReolinkError("Test error")) + reolink_connect.get_states.side_effect = ReolinkError("Test error") freezer.tick(DEVICE_UPDATE_INTERVAL) async_fire_time_changed(hass) await hass.async_block_till_done() diff --git a/tests/components/reolink/test_host.py b/tests/components/reolink/test_host.py index 690bfd035f8..c4096a4582f 100644 --- a/tests/components/reolink/test_host.py +++ b/tests/components/reolink/test_host.py @@ -59,9 +59,7 @@ async def test_webhook_callback( # test webhook callback single channel with error in event callback signal_ch.reset_mock() - reolink_connect.ONVIF_event_callback = AsyncMock( - side_effect=Exception("Test error") - ) + reolink_connect.ONVIF_event_callback.side_effect = Exception("Test error") await client.post(f"/api/webhook/{webhook_id}", data="test_data") signal_ch.assert_not_called() diff --git a/tests/components/reolink/test_init.py b/tests/components/reolink/test_init.py index f5cd56a05d2..fd54f298966 100644 --- a/tests/components/reolink/test_init.py +++ b/tests/components/reolink/test_init.py @@ -108,9 +108,7 @@ async def test_firmware_error_twice( config_entry: MockConfigEntry, ) -> None: """Test when the firmware update fails 2 times.""" - reolink_connect.check_new_firmware = AsyncMock( - side_effect=ReolinkError("Test error") - ) + reolink_connect.check_new_firmware.side_effect = ReolinkError("Test error") with patch("homeassistant.components.reolink.PLATFORMS", [Platform.UPDATE]): assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() @@ -140,9 +138,7 @@ async def test_credential_error_three( await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.LOADED - reolink_connect.get_states = AsyncMock( - side_effect=CredentialsInvalidError("Test error") - ) + reolink_connect.get_states.side_effect = CredentialsInvalidError("Test error") issue_id = f"config_entry_reauth_{const.DOMAIN}_{config_entry.entry_id}" for _ in range(NUM_CRED_ERRORS): @@ -549,7 +545,7 @@ async def test_port_repair_issue( issue_registry: ir.IssueRegistry, ) -> None: """Test repairs issue is raised when auto enable of ports fails.""" - reolink_connect.set_net_port = AsyncMock(side_effect=ReolinkError("Test error")) + reolink_connect.set_net_port.side_effect = ReolinkError("Test error") reolink_connect.onvif_enabled = False reolink_connect.rtsp_enabled = False reolink_connect.rtmp_enabled = False diff --git a/tests/components/reolink/test_media_source.py b/tests/components/reolink/test_media_source.py index b09c267fcfd..cbc9bf51705 100644 --- a/tests/components/reolink/test_media_source.py +++ b/tests/components/reolink/test_media_source.py @@ -318,7 +318,7 @@ async def test_browsing_not_loaded( assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - reolink_connect.get_host_data = AsyncMock(side_effect=ReolinkError("Test error")) + reolink_connect.get_host_data.side_effect = ReolinkError("Test error") config_entry2 = MockConfigEntry( domain=const.DOMAIN, unique_id=format_mac(TEST_MAC2), diff --git a/tests/components/reolink/test_number.py b/tests/components/reolink/test_number.py new file mode 100644 index 00000000000..e9abcec946c --- /dev/null +++ b/tests/components/reolink/test_number.py @@ -0,0 +1,111 @@ +"""Test the Reolink number platform.""" + +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from reolink_aio.api import Chime +from reolink_aio.exceptions import InvalidParameterError, ReolinkError + +from homeassistant.components.number import ( + ATTR_VALUE, + DOMAIN as NUMBER_DOMAIN, + SERVICE_SET_VALUE, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError + +from .conftest import TEST_NVR_NAME + +from tests.common import MockConfigEntry + + +async def test_number( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test number entity with volume.""" + reolink_connect.volume.return_value = 80 + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.NUMBER]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.NUMBER}.{TEST_NVR_NAME}_volume" + + assert hass.states.get(entity_id).state == "80" + + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 50}, + blocking=True, + ) + reolink_connect.set_volume.assert_called_with(0, volume=50) + + reolink_connect.set_volume.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 50}, + blocking=True, + ) + + reolink_connect.set_volume.side_effect = InvalidParameterError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 50}, + blocking=True, + ) + + +async def test_chime_number( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + test_chime: Chime, +) -> None: + """Test number entity of a chime with chime volume.""" + test_chime.volume = 3 + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.NUMBER]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.NUMBER}.test_chime_volume" + + assert hass.states.get(entity_id).state == "3" + + test_chime.set_option = AsyncMock() + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 2}, + blocking=True, + ) + test_chime.set_option.assert_called_with(volume=2) + + test_chime.set_option.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 1}, + blocking=True, + ) + + test_chime.set_option.side_effect = InvalidParameterError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 1}, + blocking=True, + ) diff --git a/tests/components/reolink/test_select.py b/tests/components/reolink/test_select.py index 5536e85afb9..082a543e392 100644 --- a/tests/components/reolink/test_select.py +++ b/tests/components/reolink/test_select.py @@ -41,7 +41,6 @@ async def test_floodlight_mode_select( entity_id = f"{Platform.SELECT}.{TEST_NVR_NAME}_floodlight_mode" assert hass.states.get(entity_id).state == "auto" - reolink_connect.set_whiteled = AsyncMock() await hass.services.async_call( SELECT_DOMAIN, SERVICE_SELECT_OPTION, @@ -50,7 +49,7 @@ async def test_floodlight_mode_select( ) reolink_connect.set_whiteled.assert_called_once() - reolink_connect.set_whiteled = AsyncMock(side_effect=ReolinkError("Test error")) + reolink_connect.set_whiteled.side_effect = ReolinkError("Test error") with pytest.raises(HomeAssistantError): await hass.services.async_call( SELECT_DOMAIN, @@ -59,9 +58,7 @@ async def test_floodlight_mode_select( blocking=True, ) - reolink_connect.set_whiteled = AsyncMock( - side_effect=InvalidParameterError("Test error") - ) + reolink_connect.set_whiteled.side_effect = InvalidParameterError("Test error") with pytest.raises(ServiceValidationError): await hass.services.async_call( SELECT_DOMAIN, @@ -94,7 +91,6 @@ async def test_play_quick_reply_message( entity_id = f"{Platform.SELECT}.{TEST_NVR_NAME}_play_quick_reply_message" assert hass.states.get(entity_id).state == STATE_UNKNOWN - reolink_connect.play_quick_reply = AsyncMock() await hass.services.async_call( SELECT_DOMAIN, SERVICE_SELECT_OPTION, @@ -131,7 +127,7 @@ async def test_chime_select( ) test_chime.set_tone.assert_called_once() - test_chime.set_tone = AsyncMock(side_effect=ReolinkError("Test error")) + test_chime.set_tone.side_effect = ReolinkError("Test error") with pytest.raises(HomeAssistantError): await hass.services.async_call( SELECT_DOMAIN, @@ -140,7 +136,7 @@ async def test_chime_select( blocking=True, ) - test_chime.set_tone = AsyncMock(side_effect=InvalidParameterError("Test error")) + test_chime.set_tone.side_effect = InvalidParameterError("Test error") with pytest.raises(ServiceValidationError): await hass.services.async_call( SELECT_DOMAIN, From f8e65c1161ce383f5bf82e1c6f50c0a230dcb0dd Mon Sep 17 00:00:00 2001 From: tronikos Date: Fri, 23 Aug 2024 03:43:16 -0700 Subject: [PATCH 0995/1635] Bump opower to 0.7.0 (#124475) --- homeassistant/components/opower/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/opower/manifest.json b/homeassistant/components/opower/manifest.json index b869356cdf9..02b98cfaf00 100644 --- a/homeassistant/components/opower/manifest.json +++ b/homeassistant/components/opower/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/opower", "iot_class": "cloud_polling", "loggers": ["opower"], - "requirements": ["opower==0.6.0"] + "requirements": ["opower==0.7.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 4caea33f8e8..79027747563 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1523,7 +1523,7 @@ openwrt-luci-rpc==1.1.17 openwrt-ubus-rpc==0.0.2 # homeassistant.components.opower -opower==0.6.0 +opower==0.7.0 # homeassistant.components.oralb oralb-ble==0.17.6 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 4deb4a075b5..f4fabe51b14 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1250,7 +1250,7 @@ openhomedevice==2.2.0 openwebifpy==4.2.7 # homeassistant.components.opower -opower==0.6.0 +opower==0.7.0 # homeassistant.components.oralb oralb-ble==0.17.6 From 611723e44bd22bbf43226897fc4632af42f69bf0 Mon Sep 17 00:00:00 2001 From: Ino Dekker Date: Fri, 23 Aug 2024 13:43:17 +0200 Subject: [PATCH 0996/1635] Bump aiohue to version 4.7.3 (#124436) --- homeassistant/components/hue/manifest.json | 2 +- homeassistant/components/hue/v2/hue_event.py | 6 +++--- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/hue/fixtures/v2_resources.json | 12 +++++++++--- 5 files changed, 15 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/hue/manifest.json b/homeassistant/components/hue/manifest.json index 71aabd4c204..dbd9b511977 100644 --- a/homeassistant/components/hue/manifest.json +++ b/homeassistant/components/hue/manifest.json @@ -11,6 +11,6 @@ "iot_class": "local_push", "loggers": ["aiohue"], "quality_scale": "platinum", - "requirements": ["aiohue==4.7.2"], + "requirements": ["aiohue==4.7.3"], "zeroconf": ["_hue._tcp.local."] } diff --git a/homeassistant/components/hue/v2/hue_event.py b/homeassistant/components/hue/v2/hue_event.py index b286a11aade..2eace5139af 100644 --- a/homeassistant/components/hue/v2/hue_event.py +++ b/homeassistant/components/hue/v2/hue_event.py @@ -80,9 +80,9 @@ async def async_setup_hue_events(bridge: HueBridge): CONF_DEVICE_ID: device.id, # type: ignore[union-attr] CONF_UNIQUE_ID: hue_resource.id, CONF_TYPE: hue_resource.relative_rotary.rotary_report.action.value, - CONF_SUBTYPE: hue_resource.relative_rotary.last_event.rotation.direction.value, - CONF_DURATION: hue_resource.relative_rotary.last_event.rotation.duration, - CONF_STEPS: hue_resource.relative_rotary.last_event.rotation.steps, + CONF_SUBTYPE: hue_resource.relative_rotary.rotary_report.rotation.direction.value, + CONF_DURATION: hue_resource.relative_rotary.rotary_report.rotation.duration, + CONF_STEPS: hue_resource.relative_rotary.rotary_report.rotation.steps, } hass.bus.async_fire(ATTR_HUE_EVENT, data) diff --git a/requirements_all.txt b/requirements_all.txt index 79027747563..0a9cb638cae 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -258,7 +258,7 @@ aioharmony==0.2.10 aiohomekit==3.2.3 # homeassistant.components.hue -aiohue==4.7.2 +aiohue==4.7.3 # homeassistant.components.imap aioimaplib==1.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f4fabe51b14..361de6746c1 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -243,7 +243,7 @@ aioharmony==0.2.10 aiohomekit==3.2.3 # homeassistant.components.hue -aiohue==4.7.2 +aiohue==4.7.3 # homeassistant.components.imap aioimaplib==1.1.0 diff --git a/tests/components/hue/fixtures/v2_resources.json b/tests/components/hue/fixtures/v2_resources.json index 980086d0988..3d718f24c50 100644 --- a/tests/components/hue/fixtures/v2_resources.json +++ b/tests/components/hue/fixtures/v2_resources.json @@ -1288,7 +1288,9 @@ }, { "button": { - "last_event": "short_release" + "button_report": { + "event": "short_release" + } }, "id": "c658d3d8-a013-4b81-8ac6-78b248537e70", "id_v1": "/sensors/50", @@ -1327,7 +1329,9 @@ }, { "button": { - "last_event": "short_release" + "button_report": { + "event": "short_release" + } }, "id": "7f1ab9f6-cc2b-4b40-9011-65e2af153f75", "id_v1": "/sensors/10", @@ -1366,7 +1370,9 @@ }, { "button": { - "last_event": "short_release" + "button_report": { + "event": "short_release" + } }, "id": "31cffcda-efc2-401f-a152-e10db3eed232", "id_v1": "/sensors/5", From 0bd9386df285be769a58e7224f0f47830f63bd78 Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Fri, 23 Aug 2024 15:15:04 +0200 Subject: [PATCH 0997/1635] Reolink add 100% coverage of siren platform (#124474) --- tests/components/reolink/test_siren.py | 134 +++++++++++++++++++++++++ 1 file changed, 134 insertions(+) create mode 100644 tests/components/reolink/test_siren.py diff --git a/tests/components/reolink/test_siren.py b/tests/components/reolink/test_siren.py new file mode 100644 index 00000000000..0d9d3e0b800 --- /dev/null +++ b/tests/components/reolink/test_siren.py @@ -0,0 +1,134 @@ +"""Test the Reolink siren platform.""" + +from typing import Any +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from reolink_aio.exceptions import InvalidParameterError, ReolinkError + +from homeassistant.components.siren import ( + ATTR_DURATION, + ATTR_VOLUME_LEVEL, + DOMAIN as SIREN_DOMAIN, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_UNKNOWN, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError + +from .conftest import TEST_NVR_NAME + +from tests.common import MockConfigEntry + + +async def test_siren( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test siren entity.""" + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SIREN]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.SIREN}.{TEST_NVR_NAME}_siren" + assert hass.states.get(entity_id).state == STATE_UNKNOWN + + # test siren turn on + await hass.services.async_call( + SIREN_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + reolink_connect.set_volume.assert_not_called() + reolink_connect.set_siren.assert_called_with(0, True, None) + + await hass.services.async_call( + SIREN_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id, ATTR_VOLUME_LEVEL: 0.85, ATTR_DURATION: 2}, + blocking=True, + ) + reolink_connect.set_volume.assert_called_with(0, volume=85) + reolink_connect.set_siren.assert_called_with(0, True, 2) + + # test siren turn off + reolink_connect.set_siren.side_effect = None + await hass.services.async_call( + SIREN_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + reolink_connect.set_siren.assert_called_with(0, False, None) + + +@pytest.mark.parametrize("attr", ["set_volume", "set_siren"]) +@pytest.mark.parametrize( + ("value", "expected"), + [ + ( + AsyncMock(side_effect=ReolinkError("Test error")), + HomeAssistantError, + ), + ( + AsyncMock(side_effect=InvalidParameterError("Test error")), + ServiceValidationError, + ), + ], +) +async def test_siren_turn_on_errors( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + attr: str, + value: Any, + expected: Any, +) -> None: + """Test errors when calling siren turn on service.""" + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SIREN]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.SIREN}.{TEST_NVR_NAME}_siren" + + setattr(reolink_connect, attr, value) + with pytest.raises(expected): + await hass.services.async_call( + SIREN_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id, ATTR_VOLUME_LEVEL: 0.85, ATTR_DURATION: 2}, + blocking=True, + ) + + +async def test_siren_turn_off_errors( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test errors when calling siren turn off service.""" + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SIREN]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.SIREN}.{TEST_NVR_NAME}_siren" + + reolink_connect.set_siren.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + SIREN_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) From 44b6bca89a3026833261b3205c8223251c8f0220 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Fri, 23 Aug 2024 15:42:57 +0200 Subject: [PATCH 0998/1635] Refactor and cleanup zwave_js fixtures to share them (#124485) Refactor and cleanup zwave_js fixtures to use --- tests/components/conftest.py | 103 ++++++-- tests/components/hassio/common.py | 86 +++++-- tests/components/hassio/test_addon_manager.py | 10 +- tests/components/zwave_js/conftest.py | 238 ------------------ tests/components/zwave_js/test_config_flow.py | 27 +- 5 files changed, 164 insertions(+), 300 deletions(-) diff --git a/tests/components/conftest.py b/tests/components/conftest.py index 9f13d52e0e8..39ff7071dc4 100644 --- a/tests/components/conftest.py +++ b/tests/components/conftest.py @@ -202,31 +202,53 @@ def discovery_info_fixture() -> Any: return mock_discovery_info() +@pytest.fixture(name="discovery_info_side_effect") +def discovery_info_side_effect_fixture() -> Any | None: + """Return the discovery info from the supervisor.""" + return None + + @pytest.fixture(name="get_addon_discovery_info") -def get_addon_discovery_info_fixture(discovery_info: Any) -> Generator[AsyncMock]: +def get_addon_discovery_info_fixture( + discovery_info: dict[str, Any], discovery_info_side_effect: Any | None +) -> Generator[AsyncMock]: """Mock get add-on discovery info.""" # pylint: disable-next=import-outside-toplevel from .hassio.common import mock_get_addon_discovery_info - yield from mock_get_addon_discovery_info(discovery_info) + yield from mock_get_addon_discovery_info(discovery_info, discovery_info_side_effect) + + +@pytest.fixture(name="addon_store_info_side_effect") +def addon_store_info_side_effect_fixture() -> Any | None: + """Return the add-on store info side effect.""" + return None @pytest.fixture(name="addon_store_info") -def addon_store_info_fixture() -> Generator[AsyncMock]: +def addon_store_info_fixture( + addon_store_info_side_effect: Any | None, +) -> Generator[AsyncMock]: """Mock Supervisor add-on store info.""" # pylint: disable-next=import-outside-toplevel from .hassio.common import mock_addon_store_info - yield from mock_addon_store_info() + yield from mock_addon_store_info(addon_store_info_side_effect) + + +@pytest.fixture(name="addon_info_side_effect") +def addon_info_side_effect_fixture() -> Any | None: + """Return the add-on info side effect.""" + return None @pytest.fixture(name="addon_info") -def addon_info_fixture() -> Generator[AsyncMock]: +def addon_info_fixture(addon_info_side_effect: Any | None) -> Generator[AsyncMock]: """Mock Supervisor add-on info.""" # pylint: disable-next=import-outside-toplevel from .hassio.common import mock_addon_info - yield from mock_addon_info() + yield from mock_addon_info(addon_info_side_effect) @pytest.fixture(name="addon_not_installed") @@ -262,33 +284,64 @@ def addon_running_fixture( return mock_addon_running(addon_store_info, addon_info) +@pytest.fixture(name="install_addon_side_effect") +def install_addon_side_effect_fixture( + addon_store_info: AsyncMock, addon_info: AsyncMock +) -> Any | None: + """Return the install add-on side effect.""" + + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_install_addon_side_effect + + return mock_install_addon_side_effect(addon_store_info, addon_info) + + @pytest.fixture(name="install_addon") def install_addon_fixture( - addon_store_info: AsyncMock, addon_info: AsyncMock + install_addon_side_effect: Any | None, ) -> Generator[AsyncMock]: """Mock install add-on.""" # pylint: disable-next=import-outside-toplevel from .hassio.common import mock_install_addon - yield from mock_install_addon(addon_store_info, addon_info) + yield from mock_install_addon(install_addon_side_effect) + + +@pytest.fixture(name="start_addon_side_effect") +def start_addon_side_effect_fixture( + addon_store_info: AsyncMock, addon_info: AsyncMock +) -> Any | None: + """Return the start add-on options side effect.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_start_addon_side_effect + + return mock_start_addon_side_effect(addon_store_info, addon_info) @pytest.fixture(name="start_addon") -def start_addon_fixture() -> Generator[AsyncMock]: +def start_addon_fixture(start_addon_side_effect: Any | None) -> Generator[AsyncMock]: """Mock start add-on.""" # pylint: disable-next=import-outside-toplevel from .hassio.common import mock_start_addon - yield from mock_start_addon() + yield from mock_start_addon(start_addon_side_effect) + + +@pytest.fixture(name="restart_addon_side_effect") +def restart_addon_side_effect_fixture() -> Any | None: + """Return the restart add-on options side effect.""" + return None @pytest.fixture(name="restart_addon") -def restart_addon_fixture() -> Generator[AsyncMock]: +def restart_addon_fixture( + restart_addon_side_effect: Any | None, +) -> Generator[AsyncMock]: """Mock restart add-on.""" # pylint: disable-next=import-outside-toplevel from .hassio.common import mock_restart_addon - yield from mock_restart_addon() + yield from mock_restart_addon(restart_addon_side_effect) @pytest.fixture(name="stop_addon") @@ -300,13 +353,35 @@ def stop_addon_fixture() -> Generator[AsyncMock]: yield from mock_stop_addon() +@pytest.fixture(name="addon_options") +def addon_options_fixture(addon_info: AsyncMock) -> dict[str, Any]: + """Mock add-on options.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_addon_options + + return mock_addon_options(addon_info) + + +@pytest.fixture(name="set_addon_options_side_effect") +def set_addon_options_side_effect_fixture( + addon_options: dict[str, Any], +) -> Any | None: + """Return the set add-on options side effect.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_set_addon_options_side_effect + + return mock_set_addon_options_side_effect(addon_options) + + @pytest.fixture(name="set_addon_options") -def set_addon_options_fixture() -> Generator[AsyncMock]: +def set_addon_options_fixture( + set_addon_options_side_effect: Any | None, +) -> Generator[AsyncMock]: """Mock set add-on options.""" # pylint: disable-next=import-outside-toplevel from .hassio.common import mock_set_addon_options - yield from mock_set_addon_options() + yield from mock_set_addon_options(set_addon_options_side_effect) @pytest.fixture(name="uninstall_addon") diff --git a/tests/components/hassio/common.py b/tests/components/hassio/common.py index f39e65dcb1e..5ef4a55d26e 100644 --- a/tests/components/hassio/common.py +++ b/tests/components/hassio/common.py @@ -23,19 +23,25 @@ def mock_discovery_info() -> Any: return DEFAULT -def mock_get_addon_discovery_info(discovery_info: Any) -> Generator[AsyncMock]: +def mock_get_addon_discovery_info( + discovery_info: dict[str, Any], discovery_info_side_effect: Any | None +) -> Generator[AsyncMock]: """Mock get add-on discovery info.""" with patch( "homeassistant.components.hassio.addon_manager.async_get_addon_discovery_info", + side_effect=discovery_info_side_effect, return_value=discovery_info, ) as get_addon_discovery_info: yield get_addon_discovery_info -def mock_addon_store_info() -> Generator[AsyncMock]: +def mock_addon_store_info( + addon_store_info_side_effect: Any | None, +) -> Generator[AsyncMock]: """Mock Supervisor add-on store info.""" with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_store_info" + "homeassistant.components.hassio.addon_manager.async_get_addon_store_info", + side_effect=addon_store_info_side_effect, ) as addon_store_info: addon_store_info.return_value = { "available": False, @@ -46,10 +52,11 @@ def mock_addon_store_info() -> Generator[AsyncMock]: yield addon_store_info -def mock_addon_info() -> Generator[AsyncMock]: +def mock_addon_info(addon_info_side_effect: Any | None) -> Generator[AsyncMock]: """Mock Supervisor add-on info.""" with patch( "homeassistant.components.hassio.addon_manager.async_get_addon_info", + side_effect=addon_info_side_effect, ) as addon_info: addon_info.return_value = { "available": False, @@ -96,18 +103,18 @@ def mock_addon_running(addon_store_info: AsyncMock, addon_info: AsyncMock) -> As "version": "1.0.0", } addon_info.return_value["available"] = True - addon_info.return_value["hostname"] = "core-mosquitto" + addon_info.return_value["hostname"] = "core-test-addon" addon_info.return_value["state"] = "started" addon_info.return_value["version"] = "1.0.0" return addon_info -def mock_install_addon( +def mock_install_addon_side_effect( addon_store_info: AsyncMock, addon_info: AsyncMock -) -> Generator[AsyncMock]: - """Mock install add-on.""" +) -> Any | None: + """Return the install add-on side effect.""" - async def install_addon_side_effect(hass: HomeAssistant, slug: str) -> None: + async def install_addon(hass: HomeAssistant, slug): """Mock install add-on.""" addon_store_info.return_value = { "available": True, @@ -119,17 +126,43 @@ def mock_install_addon( addon_info.return_value["state"] = "stopped" addon_info.return_value["version"] = "1.0.0" + return install_addon + + +def mock_install_addon(install_addon_side_effect: Any | None) -> Generator[AsyncMock]: + """Mock install add-on.""" + with patch( - "homeassistant.components.hassio.addon_manager.async_install_addon" + "homeassistant.components.hassio.addon_manager.async_install_addon", + side_effect=install_addon_side_effect, ) as install_addon: - install_addon.side_effect = install_addon_side_effect yield install_addon -def mock_start_addon() -> Generator[AsyncMock]: +def mock_start_addon_side_effect( + addon_store_info: AsyncMock, addon_info: AsyncMock +) -> Any | None: + """Return the start add-on options side effect.""" + + async def start_addon(hass: HomeAssistant, slug): + """Mock start add-on.""" + addon_store_info.return_value = { + "available": True, + "installed": "1.0.0", + "state": "started", + "version": "1.0.0", + } + addon_info.return_value["available"] = True + addon_info.return_value["state"] = "started" + + return start_addon + + +def mock_start_addon(start_addon_side_effect: Any | None) -> Generator[AsyncMock]: """Mock start add-on.""" with patch( - "homeassistant.components.hassio.addon_manager.async_start_addon" + "homeassistant.components.hassio.addon_manager.async_start_addon", + side_effect=start_addon_side_effect, ) as start_addon: yield start_addon @@ -142,10 +175,11 @@ def mock_stop_addon() -> Generator[AsyncMock]: yield stop_addon -def mock_restart_addon() -> Generator[AsyncMock]: +def mock_restart_addon(restart_addon_side_effect: Any | None) -> Generator[AsyncMock]: """Mock restart add-on.""" with patch( - "homeassistant.components.hassio.addon_manager.async_restart_addon" + "homeassistant.components.hassio.addon_manager.async_restart_addon", + side_effect=restart_addon_side_effect, ) as restart_addon: yield restart_addon @@ -158,10 +192,28 @@ def mock_uninstall_addon() -> Generator[AsyncMock]: yield uninstall_addon -def mock_set_addon_options() -> Generator[AsyncMock]: +def mock_addon_options(addon_info: AsyncMock) -> dict[str, Any]: + """Mock add-on options.""" + return addon_info.return_value["options"] + + +def mock_set_addon_options_side_effect(addon_options: dict[str, Any]) -> Any | None: + """Return the set add-on options side effect.""" + + async def set_addon_options(hass: HomeAssistant, slug: str, options: dict) -> None: + """Mock set add-on options.""" + addon_options.update(options["options"]) + + return set_addon_options + + +def mock_set_addon_options( + set_addon_options_side_effect: Any | None, +) -> Generator[AsyncMock]: """Mock set add-on options.""" with patch( - "homeassistant.components.hassio.addon_manager.async_set_addon_options" + "homeassistant.components.hassio.addon_manager.async_set_addon_options", + side_effect=set_addon_options_side_effect, ) as set_options: yield set_options diff --git a/tests/components/hassio/test_addon_manager.py b/tests/components/hassio/test_addon_manager.py index af7a3551684..4cb57e5b8d8 100644 --- a/tests/components/hassio/test_addon_manager.py +++ b/tests/components/hassio/test_addon_manager.py @@ -738,9 +738,10 @@ async def test_create_backup_error( ) +@pytest.mark.usefixtures("addon_installed") +@pytest.mark.parametrize("set_addon_options_side_effect", [None]) async def test_schedule_install_setup_addon( addon_manager: AddonManager, - addon_installed: AsyncMock, install_addon: AsyncMock, set_addon_options: AsyncMock, start_addon: AsyncMock, @@ -915,11 +916,10 @@ async def test_schedule_install_setup_addon_logs_error( assert start_addon.call_count == start_addon_calls +@pytest.mark.usefixtures("addon_installed") +@pytest.mark.parametrize("set_addon_options_side_effect", [None]) async def test_schedule_setup_addon( - addon_manager: AddonManager, - addon_installed: AsyncMock, - set_addon_options: AsyncMock, - start_addon: AsyncMock, + addon_manager: AddonManager, set_addon_options: AsyncMock, start_addon: AsyncMock ) -> None: """Test schedule setup addon.""" start_task = addon_manager.async_schedule_setup_addon({"test_key": "test"}) diff --git a/tests/components/zwave_js/conftest.py b/tests/components/zwave_js/conftest.py index 60deb7dbce8..a6bbe554f9a 100644 --- a/tests/components/zwave_js/conftest.py +++ b/tests/components/zwave_js/conftest.py @@ -1,11 +1,9 @@ """Provide common Z-Wave JS fixtures.""" import asyncio -from collections.abc import Generator import copy import io import json -from typing import Any from unittest.mock import DEFAULT, AsyncMock, patch import pytest @@ -18,242 +16,6 @@ from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_fixture -# Add-on fixtures - - -@pytest.fixture(name="addon_info_side_effect") -def addon_info_side_effect_fixture() -> Any | None: - """Return the add-on info side effect.""" - return None - - -@pytest.fixture(name="addon_info") -def mock_addon_info(addon_info_side_effect: Any | None) -> Generator[AsyncMock]: - """Mock Supervisor add-on info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_info", - side_effect=addon_info_side_effect, - ) as addon_info: - addon_info.return_value = { - "available": False, - "hostname": None, - "options": {}, - "state": None, - "update_available": False, - "version": None, - } - yield addon_info - - -@pytest.fixture(name="addon_store_info_side_effect") -def addon_store_info_side_effect_fixture() -> Any | None: - """Return the add-on store info side effect.""" - return None - - -@pytest.fixture(name="addon_store_info") -def mock_addon_store_info( - addon_store_info_side_effect: Any | None, -) -> Generator[AsyncMock]: - """Mock Supervisor add-on info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_store_info", - side_effect=addon_store_info_side_effect, - ) as addon_store_info: - addon_store_info.return_value = { - "available": False, - "installed": None, - "state": None, - "version": "1.0.0", - } - yield addon_store_info - - -@pytest.fixture(name="addon_running") -def mock_addon_running(addon_store_info: AsyncMock, addon_info: AsyncMock) -> AsyncMock: - """Mock add-on already running.""" - addon_store_info.return_value = { - "available": True, - "installed": "1.0.0", - "state": "started", - "version": "1.0.0", - } - addon_info.return_value["available"] = True - addon_info.return_value["state"] = "started" - addon_info.return_value["version"] = "1.0.0" - return addon_info - - -@pytest.fixture(name="addon_installed") -def mock_addon_installed( - addon_store_info: AsyncMock, addon_info: AsyncMock -) -> AsyncMock: - """Mock add-on already installed but not running.""" - addon_store_info.return_value = { - "available": True, - "installed": "1.0.0", - "state": "stopped", - "version": "1.0.0", - } - addon_info.return_value["available"] = True - addon_info.return_value["state"] = "stopped" - addon_info.return_value["version"] = "1.0.0" - return addon_info - - -@pytest.fixture(name="addon_not_installed") -def mock_addon_not_installed( - addon_store_info: AsyncMock, addon_info: AsyncMock -) -> AsyncMock: - """Mock add-on not installed.""" - addon_store_info.return_value["available"] = True - return addon_info - - -@pytest.fixture(name="addon_options") -def mock_addon_options(addon_info: AsyncMock): - """Mock add-on options.""" - return addon_info.return_value["options"] - - -@pytest.fixture(name="set_addon_options_side_effect") -def set_addon_options_side_effect_fixture( - addon_options: dict[str, Any], -) -> Any | None: - """Return the set add-on options side effect.""" - - async def set_addon_options(hass: HomeAssistant, slug: str, options: dict) -> None: - """Mock set add-on options.""" - addon_options.update(options["options"]) - - return set_addon_options - - -@pytest.fixture(name="set_addon_options") -def mock_set_addon_options( - set_addon_options_side_effect: Any | None, -) -> Generator[AsyncMock]: - """Mock set add-on options.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_set_addon_options", - side_effect=set_addon_options_side_effect, - ) as set_options: - yield set_options - - -@pytest.fixture(name="install_addon_side_effect") -def install_addon_side_effect_fixture( - addon_store_info: AsyncMock, addon_info: AsyncMock -) -> Any | None: - """Return the install add-on side effect.""" - - async def install_addon(hass: HomeAssistant, slug): - """Mock install add-on.""" - addon_store_info.return_value = { - "available": True, - "installed": "1.0.0", - "state": "stopped", - "version": "1.0.0", - } - addon_info.return_value["available"] = True - addon_info.return_value["state"] = "stopped" - addon_info.return_value["version"] = "1.0.0" - - return install_addon - - -@pytest.fixture(name="install_addon") -def mock_install_addon(install_addon_side_effect: Any | None) -> Generator[AsyncMock]: - """Mock install add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_install_addon", - side_effect=install_addon_side_effect, - ) as install_addon: - yield install_addon - - -@pytest.fixture(name="update_addon") -def mock_update_addon() -> Generator[AsyncMock]: - """Mock update add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_update_addon" - ) as update_addon: - yield update_addon - - -@pytest.fixture(name="start_addon_side_effect") -def start_addon_side_effect_fixture( - addon_store_info: AsyncMock, addon_info: AsyncMock -) -> Any | None: - """Return the start add-on options side effect.""" - - async def start_addon(hass: HomeAssistant, slug): - """Mock start add-on.""" - addon_store_info.return_value = { - "available": True, - "installed": "1.0.0", - "state": "started", - "version": "1.0.0", - } - addon_info.return_value["available"] = True - addon_info.return_value["state"] = "started" - - return start_addon - - -@pytest.fixture(name="start_addon") -def mock_start_addon(start_addon_side_effect: Any | None) -> Generator[AsyncMock]: - """Mock start add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_start_addon", - side_effect=start_addon_side_effect, - ) as start_addon: - yield start_addon - - -@pytest.fixture(name="stop_addon") -def stop_addon_fixture() -> Generator[AsyncMock]: - """Mock stop add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_stop_addon" - ) as stop_addon: - yield stop_addon - - -@pytest.fixture(name="restart_addon_side_effect") -def restart_addon_side_effect_fixture() -> Any | None: - """Return the restart add-on options side effect.""" - return None - - -@pytest.fixture(name="restart_addon") -def mock_restart_addon(restart_addon_side_effect: Any | None) -> Generator[AsyncMock]: - """Mock restart add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_restart_addon", - side_effect=restart_addon_side_effect, - ) as restart_addon: - yield restart_addon - - -@pytest.fixture(name="uninstall_addon") -def uninstall_addon_fixture() -> Generator[AsyncMock]: - """Mock uninstall add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_uninstall_addon" - ) as uninstall_addon: - yield uninstall_addon - - -@pytest.fixture(name="create_backup") -def create_backup_fixture() -> Generator[AsyncMock]: - """Mock create backup.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_create_backup" - ) as create_backup: - yield create_backup - - # State fixtures diff --git a/tests/components/zwave_js/test_config_flow.py b/tests/components/zwave_js/test_config_flow.py index 46172f72b2f..fe16f38257a 100644 --- a/tests/components/zwave_js/test_config_flow.py +++ b/tests/components/zwave_js/test_config_flow.py @@ -5,7 +5,7 @@ from collections.abc import Generator from copy import copy from ipaddress import ip_address from typing import Any -from unittest.mock import DEFAULT, AsyncMock, MagicMock, call, patch +from unittest.mock import AsyncMock, MagicMock, call, patch import aiohttp import pytest @@ -77,31 +77,6 @@ def mock_supervisor_fixture() -> Generator[None]: yield -@pytest.fixture(name="discovery_info") -def discovery_info_fixture() -> dict[str, Any]: - """Return the discovery info from the supervisor.""" - return DEFAULT - - -@pytest.fixture(name="discovery_info_side_effect") -def discovery_info_side_effect_fixture() -> Any | None: - """Return the discovery info from the supervisor.""" - return None - - -@pytest.fixture(name="get_addon_discovery_info") -def mock_get_addon_discovery_info( - discovery_info: dict[str, Any], discovery_info_side_effect: Any | None -) -> Generator[AsyncMock]: - """Mock get add-on discovery info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_discovery_info", - side_effect=discovery_info_side_effect, - return_value=discovery_info, - ) as get_addon_discovery_info: - yield get_addon_discovery_info - - @pytest.fixture(name="server_version_side_effect") def server_version_side_effect_fixture() -> Any | None: """Return the server version side effect.""" From fd57931cc911c4dd5a396a9d27080a40b66a4624 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Fri, 23 Aug 2024 16:27:14 +0200 Subject: [PATCH 0999/1635] Cleanup redundant fixtures on matter integration for addon tests (#124445) --- tests/components/matter/conftest.py | 149 +------------------- tests/components/matter/test_config_flow.py | 18 +-- 2 files changed, 4 insertions(+), 163 deletions(-) diff --git a/tests/components/matter/conftest.py b/tests/components/matter/conftest.py index f3d8740a73b..b4af00a0b47 100644 --- a/tests/components/matter/conftest.py +++ b/tests/components/matter/conftest.py @@ -3,7 +3,7 @@ from __future__ import annotations import asyncio -from collections.abc import AsyncGenerator, Generator +from collections.abc import AsyncGenerator from unittest.mock import AsyncMock, MagicMock, patch from matter_server.client.models.node import MatterNode @@ -70,153 +70,6 @@ async def integration_fixture( return entry -@pytest.fixture(name="create_backup") -def create_backup_fixture() -> Generator[AsyncMock]: - """Mock Supervisor create backup of add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_create_backup" - ) as create_backup: - yield create_backup - - -@pytest.fixture(name="addon_store_info") -def addon_store_info_fixture() -> Generator[AsyncMock]: - """Mock Supervisor add-on store info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_store_info" - ) as addon_store_info: - addon_store_info.return_value = { - "available": False, - "installed": None, - "state": None, - "version": "1.0.0", - } - yield addon_store_info - - -@pytest.fixture(name="addon_info") -def addon_info_fixture() -> Generator[AsyncMock]: - """Mock Supervisor add-on info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_info", - ) as addon_info: - addon_info.return_value = { - "available": False, - "hostname": None, - "options": {}, - "state": None, - "update_available": False, - "version": None, - } - yield addon_info - - -@pytest.fixture(name="addon_not_installed") -def addon_not_installed_fixture( - addon_store_info: AsyncMock, addon_info: AsyncMock -) -> AsyncMock: - """Mock add-on not installed.""" - addon_store_info.return_value["available"] = True - return addon_info - - -@pytest.fixture(name="addon_installed") -def addon_installed_fixture( - addon_store_info: AsyncMock, addon_info: AsyncMock -) -> AsyncMock: - """Mock add-on already installed but not running.""" - addon_store_info.return_value = { - "available": True, - "installed": "1.0.0", - "state": "stopped", - "version": "1.0.0", - } - addon_info.return_value["available"] = True - addon_info.return_value["hostname"] = "core-matter-server" - addon_info.return_value["state"] = "stopped" - addon_info.return_value["version"] = "1.0.0" - return addon_info - - -@pytest.fixture(name="addon_running") -def addon_running_fixture( - addon_store_info: AsyncMock, addon_info: AsyncMock -) -> AsyncMock: - """Mock add-on already running.""" - addon_store_info.return_value = { - "available": True, - "installed": "1.0.0", - "state": "started", - "version": "1.0.0", - } - addon_info.return_value["available"] = True - addon_info.return_value["hostname"] = "core-matter-server" - addon_info.return_value["state"] = "started" - addon_info.return_value["version"] = "1.0.0" - return addon_info - - -@pytest.fixture(name="install_addon") -def install_addon_fixture( - addon_store_info: AsyncMock, addon_info: AsyncMock -) -> Generator[AsyncMock]: - """Mock install add-on.""" - - async def install_addon_side_effect(hass: HomeAssistant, slug: str) -> None: - """Mock install add-on.""" - addon_store_info.return_value = { - "available": True, - "installed": "1.0.0", - "state": "stopped", - "version": "1.0.0", - } - addon_info.return_value["available"] = True - addon_info.return_value["state"] = "stopped" - addon_info.return_value["version"] = "1.0.0" - - with patch( - "homeassistant.components.hassio.addon_manager.async_install_addon" - ) as install_addon: - install_addon.side_effect = install_addon_side_effect - yield install_addon - - -@pytest.fixture(name="start_addon") -def start_addon_fixture() -> Generator[AsyncMock]: - """Mock start add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_start_addon" - ) as start_addon: - yield start_addon - - -@pytest.fixture(name="stop_addon") -def stop_addon_fixture() -> Generator[AsyncMock]: - """Mock stop add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_stop_addon" - ) as stop_addon: - yield stop_addon - - -@pytest.fixture(name="uninstall_addon") -def uninstall_addon_fixture() -> Generator[AsyncMock]: - """Mock uninstall add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_uninstall_addon" - ) as uninstall_addon: - yield uninstall_addon - - -@pytest.fixture(name="update_addon") -def update_addon_fixture() -> Generator[AsyncMock]: - """Mock update add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_update_addon" - ) as update_addon: - yield update_addon - - @pytest.fixture(name="door_lock") async def door_lock_fixture( hass: HomeAssistant, matter_client: MagicMock diff --git a/tests/components/matter/test_config_flow.py b/tests/components/matter/test_config_flow.py index 642bfe0f804..a4ddc18802f 100644 --- a/tests/components/matter/test_config_flow.py +++ b/tests/components/matter/test_config_flow.py @@ -4,8 +4,7 @@ from __future__ import annotations from collections.abc import Generator from ipaddress import ip_address -from typing import Any -from unittest.mock import DEFAULT, AsyncMock, MagicMock, call, patch +from unittest.mock import AsyncMock, MagicMock, call, patch from matter_server.client.exceptions import CannotConnect, InvalidServerVersion import pytest @@ -93,20 +92,9 @@ def supervisor_fixture() -> Generator[MagicMock]: yield is_hassio -@pytest.fixture(name="discovery_info") -def discovery_info_fixture() -> Any: - """Return the discovery info from the supervisor.""" - return DEFAULT - - -@pytest.fixture(name="get_addon_discovery_info", autouse=True) -def get_addon_discovery_info_fixture(discovery_info: Any) -> Generator[AsyncMock]: +@pytest.fixture(autouse=True) +def mock_get_addon_discovery_info(get_addon_discovery_info: AsyncMock) -> None: """Mock get add-on discovery info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_discovery_info", - return_value=discovery_info, - ) as get_addon_discovery_info: - yield get_addon_discovery_info @pytest.fixture(name="addon_setup_time", autouse=True) From 61cee043e68254526635757af1a07c05db629c03 Mon Sep 17 00:00:00 2001 From: "Steven B." <51370195+sdb9696@users.noreply.github.com> Date: Fri, 23 Aug 2024 16:17:48 +0100 Subject: [PATCH 1000/1635] Do not report tplink discovery failures as legacy connection failures (#124432) * Do not report discovery failures as legacy connection failures * Fix catching BaseException --- .../components/tplink/config_flow.py | 15 ++++++++--- tests/components/tplink/test_config_flow.py | 27 +++++++++++++++++++ 2 files changed, 39 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/tplink/config_flow.py b/homeassistant/components/tplink/config_flow.py index a0f0ca6eb76..1c02466aef1 100644 --- a/homeassistant/components/tplink/config_flow.py +++ b/homeassistant/components/tplink/config_flow.py @@ -410,9 +410,18 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): self._discovered_device = await Discover.discover_single( host, credentials=credentials ) - except TimeoutError: - # Try connect() to legacy devices if discovery fails - self._discovered_device = await Device.connect(config=DeviceConfig(host)) + except TimeoutError as ex: + # Try connect() to legacy devices if discovery fails. This is a + # fallback mechanism for legacy that can handle connections without + # discovery info but if it fails raise the original error which is + # applicable for newer devices. + try: + self._discovered_device = await Device.connect( + config=DeviceConfig(host) + ) + except Exception: # noqa: BLE001 + # Raise the original error instead of the fallback error + raise ex from ex else: if self._discovered_device.config.uses_http: self._discovered_device.config.http_client = ( diff --git a/tests/components/tplink/test_config_flow.py b/tests/components/tplink/test_config_flow.py index ddd67f249e6..f90eb985d38 100644 --- a/tests/components/tplink/test_config_flow.py +++ b/tests/components/tplink/test_config_flow.py @@ -1289,6 +1289,33 @@ async def test_discovery_timeout_connect( assert mock_connect["connect"].call_count == 1 +async def test_discovery_timeout_connect_legacy_error( + hass: HomeAssistant, + mock_discovery: AsyncMock, + mock_connect: AsyncMock, + mock_init, +) -> None: + """Test discovery tries legacy connect on timeout.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + mock_discovery["discover_single"].side_effect = TimeoutError + mock_connect["connect"].side_effect = KasaException + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert not result["errors"] + assert mock_connect["connect"].call_count == 0 + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: IP_ADDRESS} + ) + await hass.async_block_till_done() + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "cannot_connect"} + assert mock_connect["connect"].call_count == 1 + + async def test_reauth_update_other_flows( hass: HomeAssistant, mock_discovery: AsyncMock, From af2fec89d86725db290e4e5bf76b109e2c7ae0e0 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 23 Aug 2024 12:23:05 -0500 Subject: [PATCH 1001/1635] Bump yalexs to 8.3.3 (#124492) * Bump yalexs to 8.2.0 changelog: https://github.com/bdraco/yalexs/compare/v8.1.4...v8.2.0 * bump to 8.3.1 * bump * one more bump to ensure we do not hit the ratelimit/shutdown cleanly * empty commit to restart ci since close/open did not work in flight --- homeassistant/components/august/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/august/conftest.py | 8 ++++++++ 4 files changed, 11 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/august/manifest.json b/homeassistant/components/august/manifest.json index 7e73e55e6ba..49c23dac660 100644 --- a/homeassistant/components/august/manifest.json +++ b/homeassistant/components/august/manifest.json @@ -28,5 +28,5 @@ "documentation": "https://www.home-assistant.io/integrations/august", "iot_class": "cloud_push", "loggers": ["pubnub", "yalexs"], - "requirements": ["yalexs==8.1.4", "yalexs-ble==2.4.3"] + "requirements": ["yalexs==8.3.3", "yalexs-ble==2.4.3"] } diff --git a/requirements_all.txt b/requirements_all.txt index 0a9cb638cae..c204a156845 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2968,7 +2968,7 @@ yalesmartalarmclient==0.4.0 yalexs-ble==2.4.3 # homeassistant.components.august -yalexs==8.1.4 +yalexs==8.3.3 # homeassistant.components.yeelight yeelight==0.7.14 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 361de6746c1..1d6923448da 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2348,7 +2348,7 @@ yalesmartalarmclient==0.4.0 yalexs-ble==2.4.3 # homeassistant.components.august -yalexs==8.1.4 +yalexs==8.3.3 # homeassistant.components.yeelight yeelight==0.7.14 diff --git a/tests/components/august/conftest.py b/tests/components/august/conftest.py index 052cde7d2a2..78cb2cdad89 100644 --- a/tests/components/august/conftest.py +++ b/tests/components/august/conftest.py @@ -3,6 +3,7 @@ from unittest.mock import patch import pytest +from yalexs.manager.ratelimit import _RateLimitChecker @pytest.fixture(name="mock_discovery", autouse=True) @@ -12,3 +13,10 @@ def mock_discovery_fixture(): "homeassistant.components.august.data.discovery_flow.async_create_flow" ) as mock_discovery: yield mock_discovery + + +@pytest.fixture(name="disable_ratelimit_checks", autouse=True) +def disable_ratelimit_checks_fixture(): + """Disable rate limit checks.""" + with patch.object(_RateLimitChecker, "register_wakeup"): + yield From 26e87509be6d866942183de775b86e214efceb3e Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Fri, 23 Aug 2024 19:40:35 +0200 Subject: [PATCH 1002/1635] Cleanup hassio fixtures in home assistant hardware integration tests (#124500) --- tests/components/hassio/common.py | 2 +- .../homeassistant_hardware/conftest.py | 117 ------------------ .../homeassistant_sky_connect/conftest.py | 117 ------------------ .../homeassistant_yellow/conftest.py | 108 ---------------- 4 files changed, 1 insertion(+), 343 deletions(-) diff --git a/tests/components/hassio/common.py b/tests/components/hassio/common.py index 5ef4a55d26e..630368a0a7a 100644 --- a/tests/components/hassio/common.py +++ b/tests/components/hassio/common.py @@ -44,7 +44,7 @@ def mock_addon_store_info( side_effect=addon_store_info_side_effect, ) as addon_store_info: addon_store_info.return_value = { - "available": False, + "available": True, "installed": None, "state": None, "version": "1.0.0", diff --git a/tests/components/homeassistant_hardware/conftest.py b/tests/components/homeassistant_hardware/conftest.py index c495b5132e0..c63dca74391 100644 --- a/tests/components/homeassistant_hardware/conftest.py +++ b/tests/components/homeassistant_hardware/conftest.py @@ -6,8 +6,6 @@ from unittest.mock import AsyncMock, MagicMock, patch import pytest -from homeassistant.core import HomeAssistant - @pytest.fixture(autouse=True) def mock_zha_config_flow_setup() -> Generator[None]: @@ -51,112 +49,6 @@ def mock_zha_get_last_network_settings() -> Generator[None]: yield -@pytest.fixture(name="addon_running") -def mock_addon_running(addon_store_info, addon_info): - """Mock add-on already running.""" - addon_store_info.return_value = { - "installed": "1.0.0", - "state": "started", - "version": "1.0.0", - } - addon_info.return_value["hostname"] = "core-silabs-multiprotocol" - addon_info.return_value["state"] = "started" - addon_info.return_value["version"] = "1.0.0" - return addon_info - - -@pytest.fixture(name="addon_installed") -def mock_addon_installed(addon_store_info, addon_info): - """Mock add-on already installed but not running.""" - addon_store_info.return_value = { - "installed": "1.0.0", - "state": "stopped", - "version": "1.0.0", - } - addon_info.return_value["hostname"] = "core-silabs-multiprotocol" - addon_info.return_value["state"] = "stopped" - addon_info.return_value["version"] = "1.0.0" - return addon_info - - -@pytest.fixture(name="addon_store_info") -def addon_store_info_fixture(): - """Mock Supervisor add-on store info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_store_info" - ) as addon_store_info: - addon_store_info.return_value = { - "available": True, - "installed": None, - "state": None, - "version": "1.0.0", - } - yield addon_store_info - - -@pytest.fixture(name="addon_info") -def addon_info_fixture(): - """Mock Supervisor add-on info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_info", - ) as addon_info: - addon_info.return_value = { - "available": True, - "hostname": None, - "options": {}, - "state": None, - "update_available": False, - "version": None, - } - yield addon_info - - -@pytest.fixture(name="set_addon_options") -def set_addon_options_fixture(): - """Mock set add-on options.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_set_addon_options" - ) as set_options: - yield set_options - - -@pytest.fixture(name="install_addon_side_effect") -def install_addon_side_effect_fixture(addon_store_info, addon_info): - """Return the install add-on side effect.""" - - async def install_addon(hass: HomeAssistant, slug: str) -> None: - """Mock install add-on.""" - addon_store_info.return_value = { - "installed": "1.0.0", - "state": "stopped", - "version": "1.0.0", - } - addon_info.return_value["hostname"] = "core-silabs-multiprotocol" - addon_info.return_value["state"] = "stopped" - addon_info.return_value["version"] = "1.0.0" - - return install_addon - - -@pytest.fixture(name="install_addon") -def mock_install_addon(install_addon_side_effect): - """Mock install add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_install_addon", - side_effect=install_addon_side_effect, - ) as install_addon: - yield install_addon - - -@pytest.fixture(name="start_addon") -def start_addon_fixture(): - """Mock start add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_start_addon" - ) as start_addon: - yield start_addon - - @pytest.fixture(name="stop_addon") def stop_addon_fixture(): """Mock stop add-on.""" @@ -164,12 +56,3 @@ def stop_addon_fixture(): "homeassistant.components.hassio.addon_manager.async_stop_addon" ) as stop_addon: yield stop_addon - - -@pytest.fixture(name="uninstall_addon") -def uninstall_addon_fixture(): - """Mock uninstall add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_uninstall_addon" - ) as uninstall_addon: - yield uninstall_addon diff --git a/tests/components/homeassistant_sky_connect/conftest.py b/tests/components/homeassistant_sky_connect/conftest.py index e93b90c4c7c..d71bf4305b3 100644 --- a/tests/components/homeassistant_sky_connect/conftest.py +++ b/tests/components/homeassistant_sky_connect/conftest.py @@ -5,8 +5,6 @@ from unittest.mock import AsyncMock, MagicMock, patch import pytest -from homeassistant.core import HomeAssistant - @pytest.fixture(name="mock_usb_serial_by_id", autouse=True) def mock_usb_serial_by_id_fixture() -> Generator[MagicMock]: @@ -51,112 +49,6 @@ def mock_zha_get_last_network_settings() -> Generator[None]: yield -@pytest.fixture(name="addon_running") -def mock_addon_running(addon_store_info, addon_info): - """Mock add-on already running.""" - addon_store_info.return_value = { - "installed": "1.0.0", - "state": "started", - "version": "1.0.0", - } - addon_info.return_value["hostname"] = "core-silabs-multiprotocol" - addon_info.return_value["state"] = "started" - addon_info.return_value["version"] = "1.0.0" - return addon_info - - -@pytest.fixture(name="addon_installed") -def mock_addon_installed(addon_store_info, addon_info): - """Mock add-on already installed but not running.""" - addon_store_info.return_value = { - "installed": "1.0.0", - "state": "stopped", - "version": "1.0.0", - } - addon_info.return_value["hostname"] = "core-silabs-multiprotocol" - addon_info.return_value["state"] = "stopped" - addon_info.return_value["version"] = "1.0.0" - return addon_info - - -@pytest.fixture(name="addon_store_info") -def addon_store_info_fixture(): - """Mock Supervisor add-on store info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_store_info" - ) as addon_store_info: - addon_store_info.return_value = { - "available": True, - "installed": None, - "state": None, - "version": "1.0.0", - } - yield addon_store_info - - -@pytest.fixture(name="addon_info") -def addon_info_fixture(): - """Mock Supervisor add-on info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_info", - ) as addon_info: - addon_info.return_value = { - "available": True, - "hostname": None, - "options": {}, - "state": None, - "update_available": False, - "version": None, - } - yield addon_info - - -@pytest.fixture(name="set_addon_options") -def set_addon_options_fixture(): - """Mock set add-on options.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_set_addon_options" - ) as set_options: - yield set_options - - -@pytest.fixture(name="install_addon_side_effect") -def install_addon_side_effect_fixture(addon_store_info, addon_info): - """Return the install add-on side effect.""" - - async def install_addon(hass: HomeAssistant, slug: str) -> None: - """Mock install add-on.""" - addon_store_info.return_value = { - "installed": "1.0.0", - "state": "stopped", - "version": "1.0.0", - } - addon_info.return_value["hostname"] = "core-silabs-multiprotocol" - addon_info.return_value["state"] = "stopped" - addon_info.return_value["version"] = "1.0.0" - - return install_addon - - -@pytest.fixture(name="install_addon") -def mock_install_addon(install_addon_side_effect): - """Mock install add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_install_addon", - side_effect=install_addon_side_effect, - ) as install_addon: - yield install_addon - - -@pytest.fixture(name="start_addon") -def start_addon_fixture(): - """Mock start add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_start_addon" - ) as start_addon: - yield start_addon - - @pytest.fixture(name="stop_addon") def stop_addon_fixture(): """Mock stop add-on.""" @@ -164,12 +56,3 @@ def stop_addon_fixture(): "homeassistant.components.hassio.addon_manager.async_stop_addon" ) as stop_addon: yield stop_addon - - -@pytest.fixture(name="uninstall_addon") -def uninstall_addon_fixture(): - """Mock uninstall add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_uninstall_addon" - ) as uninstall_addon: - yield uninstall_addon diff --git a/tests/components/homeassistant_yellow/conftest.py b/tests/components/homeassistant_yellow/conftest.py index 9882d7613d5..7247c7da4e2 100644 --- a/tests/components/homeassistant_yellow/conftest.py +++ b/tests/components/homeassistant_yellow/conftest.py @@ -6,8 +6,6 @@ from unittest.mock import AsyncMock, MagicMock, patch import pytest -from homeassistant.core import HomeAssistant - @pytest.fixture(autouse=True) def mock_zha_config_flow_setup() -> Generator[None]: @@ -49,109 +47,3 @@ def mock_zha_get_last_network_settings() -> Generator[None]: AsyncMock(return_value=None), ): yield - - -@pytest.fixture(name="addon_running") -def mock_addon_running(addon_store_info, addon_info): - """Mock add-on already running.""" - addon_store_info.return_value = { - "installed": "1.0.0", - "state": "started", - "version": "1.0.0", - } - addon_info.return_value["hostname"] = "core-silabs-multiprotocol" - addon_info.return_value["state"] = "started" - addon_info.return_value["version"] = "1.0.0" - return addon_info - - -@pytest.fixture(name="addon_installed") -def mock_addon_installed(addon_store_info, addon_info): - """Mock add-on already installed but not running.""" - addon_store_info.return_value = { - "installed": "1.0.0", - "state": "stopped", - "version": "1.0.0", - } - addon_info.return_value["hostname"] = "core-silabs-multiprotocol" - addon_info.return_value["state"] = "stopped" - addon_info.return_value["version"] = "1.0.0" - return addon_info - - -@pytest.fixture(name="addon_store_info") -def addon_store_info_fixture(): - """Mock Supervisor add-on store info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_store_info" - ) as addon_store_info: - addon_store_info.return_value = { - "available": True, - "installed": None, - "state": None, - "version": "1.0.0", - } - yield addon_store_info - - -@pytest.fixture(name="addon_info") -def addon_info_fixture(): - """Mock Supervisor add-on info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_info", - ) as addon_info: - addon_info.return_value = { - "available": True, - "hostname": None, - "options": {}, - "state": None, - "update_available": False, - "version": None, - } - yield addon_info - - -@pytest.fixture(name="set_addon_options") -def set_addon_options_fixture(): - """Mock set add-on options.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_set_addon_options" - ) as set_options: - yield set_options - - -@pytest.fixture(name="install_addon_side_effect") -def install_addon_side_effect_fixture(addon_store_info, addon_info): - """Return the install add-on side effect.""" - - async def install_addon(hass: HomeAssistant, slug: str) -> None: - """Mock install add-on.""" - addon_store_info.return_value = { - "installed": "1.0.0", - "state": "stopped", - "version": "1.0.0", - } - addon_info.return_value["hostname"] = "core-silabs-multiprotocol" - addon_info.return_value["state"] = "stopped" - addon_info.return_value["version"] = "1.0.0" - - return install_addon - - -@pytest.fixture(name="install_addon") -def mock_install_addon(install_addon_side_effect): - """Mock install add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_install_addon", - side_effect=install_addon_side_effect, - ) as install_addon: - yield install_addon - - -@pytest.fixture(name="start_addon") -def start_addon_fixture(): - """Mock start add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_start_addon" - ) as start_addon: - yield start_addon From 79ba31500874b643bff7f72f87b04ce1a6a50436 Mon Sep 17 00:00:00 2001 From: Christophe Gagnier Date: Fri, 23 Aug 2024 16:45:26 -0400 Subject: [PATCH 1003/1635] Add `charging enabled` switch to TechnoVE (#121484) * Add session_active switch to TechnoVE * Replace multi-line lambda with function def * Make lambda one line --- .../components/technove/binary_sensor.py | 44 +++++++++++++++ homeassistant/components/technove/icons.json | 5 ++ .../components/technove/strings.json | 12 ++++ homeassistant/components/technove/switch.py | 55 +++++++++++++++---- .../technove/fixtures/station_charging.json | 2 +- .../technove/snapshots/test_diagnostics.ambr | 2 +- .../technove/snapshots/test_switch.ambr | 46 ++++++++++++++++ .../components/technove/test_binary_sensor.py | 11 ++-- tests/components/technove/test_switch.py | 44 ++++++++++++++- 9 files changed, 204 insertions(+), 17 deletions(-) diff --git a/homeassistant/components/technove/binary_sensor.py b/homeassistant/components/technove/binary_sensor.py index a1ff9d16baf..1ecefe6f85c 100644 --- a/homeassistant/components/technove/binary_sensor.py +++ b/homeassistant/components/technove/binary_sensor.py @@ -4,19 +4,28 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass +from typing import TYPE_CHECKING from technove import Station as TechnoVEStation from homeassistant.components.binary_sensor import ( + DOMAIN as BINARY_SENSOR_DOMAIN, BinarySensorDeviceClass, BinarySensorEntity, BinarySensorEntityDescription, ) from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.issue_registry import ( + IssueSeverity, + async_create_issue, + async_delete_issue, +) from . import TechnoVEConfigEntry +from .const import DOMAIN from .coordinator import TechnoVEDataUpdateCoordinator from .entity import TechnoVEEntity @@ -25,6 +34,7 @@ from .entity import TechnoVEEntity class TechnoVEBinarySensorDescription(BinarySensorEntityDescription): """Describes TechnoVE binary sensor entity.""" + deprecated_version: str | None = None value_fn: Callable[[TechnoVEStation], bool | None] @@ -52,6 +62,9 @@ BINARY_SENSORS = [ entity_category=EntityCategory.DIAGNOSTIC, device_class=BinarySensorDeviceClass.BATTERY_CHARGING, value_fn=lambda station: station.info.is_session_active, + deprecated_version="2025.2.0", + # Disabled by default, as this entity is deprecated + entity_registry_enabled_default=False, ), TechnoVEBinarySensorDescription( key="is_static_ip", @@ -100,3 +113,34 @@ class TechnoVEBinarySensorEntity(TechnoVEEntity, BinarySensorEntity): """Return the state of the sensor.""" return self.entity_description.value_fn(self.coordinator.data) + + async def async_added_to_hass(self) -> None: + """Raise issue when entity is registered and was not disabled.""" + if TYPE_CHECKING: + assert self.unique_id + if entity_id := er.async_get(self.hass).async_get_entity_id( + BINARY_SENSOR_DOMAIN, DOMAIN, self.unique_id + ): + if self.enabled and self.entity_description.deprecated_version: + async_create_issue( + self.hass, + DOMAIN, + f"deprecated_entity_{self.entity_description.key}", + breaks_in_ha_version=self.entity_description.deprecated_version, + is_fixable=False, + severity=IssueSeverity.WARNING, + translation_key=f"deprecated_entity_{self.entity_description.key}", + translation_placeholders={ + "sensor_name": self.name + if isinstance(self.name, str) + else entity_id, + "entity": entity_id, + }, + ) + else: + async_delete_issue( + self.hass, + DOMAIN, + f"deprecated_entity_{self.entity_description.key}", + ) + await super().async_added_to_hass() diff --git a/homeassistant/components/technove/icons.json b/homeassistant/components/technove/icons.json index ff47d3c32bc..52307405ff7 100644 --- a/homeassistant/components/technove/icons.json +++ b/homeassistant/components/technove/icons.json @@ -4,6 +4,11 @@ "ssid": { "default": "mdi:wifi" } + }, + "switch": { + "session_active": { + "default": "mdi:ev-station" + } } } } diff --git a/homeassistant/components/technove/strings.json b/homeassistant/components/technove/strings.json index 8799909d95c..06c93939db8 100644 --- a/homeassistant/components/technove/strings.json +++ b/homeassistant/components/technove/strings.json @@ -77,12 +77,24 @@ "switch": { "auto_charge": { "name": "Auto charge" + }, + "session_active": { + "name": "Charging Enabled" } } }, "exceptions": { "max_current_in_sharing_mode": { "message": "Cannot set the max current when power sharing mode is enabled." + }, + "set_charging_enabled_on_auto_charge": { + "message": "Cannot enable or disable charging when auto-charge is enabled. Try disabling auto-charge first." + } + }, + "issues": { + "deprecated_entity_is_session_active": { + "title": "The TechnoVE `{sensor_name}` binary sensor is deprecated", + "description": "`{entity}` is deprecated.\nPlease update your automations and scripts to replace the binary sensor entity with the newly added switch entity.\nWhen you are done migrating you can disable `{entity}`." } } } diff --git a/homeassistant/components/technove/switch.py b/homeassistant/components/technove/switch.py index bb9250215be..a8ad7581da5 100644 --- a/homeassistant/components/technove/switch.py +++ b/homeassistant/components/technove/switch.py @@ -2,30 +2,59 @@ from __future__ import annotations -from collections.abc import Awaitable, Callable +from collections.abc import Callable, Coroutine from dataclasses import dataclass from typing import Any -from technove import Station as TechnoVEStation, TechnoVE +from technove import Station as TechnoVEStation from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import TechnoVEConfigEntry +from .const import DOMAIN from .coordinator import TechnoVEDataUpdateCoordinator from .entity import TechnoVEEntity from .helpers import technove_exception_handler +async def _set_charging_enabled( + coordinator: TechnoVEDataUpdateCoordinator, enabled: bool +) -> None: + if coordinator.data.info.auto_charge: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="set_charging_enabled_on_auto_charge", + ) + await coordinator.technove.set_charging_enabled(enabled=enabled) + coordinator.data.info.is_session_active = enabled + coordinator.async_set_updated_data(coordinator.data) + + +async def _enable_charging(coordinator: TechnoVEDataUpdateCoordinator) -> None: + await _set_charging_enabled(coordinator, True) + + +async def _disable_charging(coordinator: TechnoVEDataUpdateCoordinator) -> None: + await _set_charging_enabled(coordinator, False) + + +async def _set_auto_charge( + coordinator: TechnoVEDataUpdateCoordinator, enabled: bool +) -> None: + await coordinator.technove.set_auto_charge(enabled=enabled) + + @dataclass(frozen=True, kw_only=True) class TechnoVESwitchDescription(SwitchEntityDescription): """Describes TechnoVE binary sensor entity.""" is_on_fn: Callable[[TechnoVEStation], bool] - turn_on_fn: Callable[[TechnoVE], Awaitable[dict[str, Any]]] - turn_off_fn: Callable[[TechnoVE], Awaitable[dict[str, Any]]] + turn_on_fn: Callable[[TechnoVEDataUpdateCoordinator], Coroutine[Any, Any, None]] + turn_off_fn: Callable[[TechnoVEDataUpdateCoordinator], Coroutine[Any, Any, None]] SWITCHES = [ @@ -34,8 +63,16 @@ SWITCHES = [ translation_key="auto_charge", entity_category=EntityCategory.CONFIG, is_on_fn=lambda station: station.info.auto_charge, - turn_on_fn=lambda technoVE: technoVE.set_auto_charge(enabled=True), - turn_off_fn=lambda technoVE: technoVE.set_auto_charge(enabled=False), + turn_on_fn=lambda coordinator: _set_auto_charge(coordinator, True), + turn_off_fn=lambda coordinator: _set_auto_charge(coordinator, False), + ), + TechnoVESwitchDescription( + key="session_active", + translation_key="session_active", + entity_category=EntityCategory.CONFIG, + is_on_fn=lambda station: station.info.is_session_active, + turn_on_fn=_enable_charging, + turn_off_fn=_disable_charging, ), ] @@ -76,11 +113,9 @@ class TechnoVESwitchEntity(TechnoVEEntity, SwitchEntity): @technove_exception_handler async def async_turn_on(self, **kwargs: Any) -> None: """Turn on the TechnoVE switch.""" - await self.entity_description.turn_on_fn(self.coordinator.technove) - await self.coordinator.async_request_refresh() + await self.entity_description.turn_on_fn(self.coordinator) @technove_exception_handler async def async_turn_off(self, **kwargs: Any) -> None: """Turn off the TechnoVE switch.""" - await self.entity_description.turn_off_fn(self.coordinator.technove) - await self.coordinator.async_request_refresh() + await self.entity_description.turn_off_fn(self.coordinator) diff --git a/tests/components/technove/fixtures/station_charging.json b/tests/components/technove/fixtures/station_charging.json index 63e68d0db0e..4f50bf1a645 100644 --- a/tests/components/technove/fixtures/station_charging.json +++ b/tests/components/technove/fixtures/station_charging.json @@ -6,7 +6,7 @@ "current": 23.75, "network_ssid": "Connecting...", "id": "AA:AA:AA:AA:AA:BB", - "auto_charge": true, + "auto_charge": false, "highChargePeriodActive": false, "normalPeriodActive": false, "maxChargePourcentage": 0.9, diff --git a/tests/components/technove/snapshots/test_diagnostics.ambr b/tests/components/technove/snapshots/test_diagnostics.ambr index 2e81f124ba5..175e8f2022a 100644 --- a/tests/components/technove/snapshots/test_diagnostics.ambr +++ b/tests/components/technove/snapshots/test_diagnostics.ambr @@ -1,7 +1,7 @@ # serializer version: 1 # name: test_diagnostics dict({ - 'auto_charge': True, + 'auto_charge': False, 'conflict_in_sharing_config': False, 'current': 23.75, 'energy_session': 12.34, diff --git a/tests/components/technove/snapshots/test_switch.ambr b/tests/components/technove/snapshots/test_switch.ambr index 1a707971fc8..6febc8c768c 100644 --- a/tests/components/technove/snapshots/test_switch.ambr +++ b/tests/components/technove/snapshots/test_switch.ambr @@ -42,6 +42,52 @@ 'last_changed': , 'last_reported': , 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switches[switch.technove_station_charging_enabled-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.technove_station_charging_enabled', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Charging Enabled', + 'platform': 'technove', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'session_active', + 'unique_id': 'AA:AA:AA:AA:AA:BB_session_active', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches[switch.technove_station_charging_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'TechnoVE Station Charging Enabled', + }), + 'context': , + 'entity_id': 'switch.technove_station_charging_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , 'state': 'on', }) # --- diff --git a/tests/components/technove/test_binary_sensor.py b/tests/components/technove/test_binary_sensor.py index 0ee4f3f3db7..0a90093779e 100644 --- a/tests/components/technove/test_binary_sensor.py +++ b/tests/components/technove/test_binary_sensor.py @@ -8,7 +8,7 @@ import pytest from syrupy import SnapshotAssertion from technove import TechnoVEError -from homeassistant.const import STATE_ON, STATE_UNAVAILABLE, Platform +from homeassistant.const import STATE_OFF, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -43,7 +43,10 @@ async def test_sensors( @pytest.mark.parametrize( "entity_id", - ["binary_sensor.technove_station_static_ip"], + [ + "binary_sensor.technove_station_static_ip", + "binary_sensor.technove_station_charging", + ], ) @pytest.mark.usefixtures("init_integration") async def test_disabled_by_default_binary_sensors( @@ -64,9 +67,9 @@ async def test_binary_sensor_update_failure( freezer: FrozenDateTimeFactory, ) -> None: """Test coordinator update failure.""" - entity_id = "binary_sensor.technove_station_charging" + entity_id = "binary_sensor.technove_station_power_sharing_mode" - assert hass.states.get(entity_id).state == STATE_ON + assert hass.states.get(entity_id).state == STATE_OFF mock_technove.update.side_effect = TechnoVEError("Test error") freezer.tick(timedelta(minutes=5, seconds=1)) diff --git a/tests/components/technove/test_switch.py b/tests/components/technove/test_switch.py index b1a66607f66..dc0293b6443 100644 --- a/tests/components/technove/test_switch.py +++ b/tests/components/technove/test_switch.py @@ -15,7 +15,7 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import entity_registry as er from . import setup_with_selected_platforms @@ -53,6 +53,12 @@ async def test_switches( {"enabled": True}, {"enabled": False}, ), + ( + "switch.technove_station_charging_enabled", + "set_charging_enabled", + {"enabled": True}, + {"enabled": False}, + ), ], ) @pytest.mark.usefixtures("init_integration") @@ -96,6 +102,10 @@ async def test_switch_on_off( "switch.technove_station_auto_charge", "set_auto_charge", ), + ( + "switch.technove_station_charging_enabled", + "set_charging_enabled", + ), ], ) @pytest.mark.usefixtures("init_integration") @@ -130,6 +140,10 @@ async def test_invalid_response( "switch.technove_station_auto_charge", "set_auto_charge", ), + ( + "switch.technove_station_charging_enabled", + "set_charging_enabled", + ), ], ) @pytest.mark.usefixtures("init_integration") @@ -157,3 +171,31 @@ async def test_connection_error( assert method_mock.call_count == 1 assert (state := hass.states.get(state.entity_id)) assert state.state == STATE_UNAVAILABLE + + +@pytest.mark.usefixtures("init_integration") +async def test_disable_charging_auto_charge( + hass: HomeAssistant, + mock_technove: MagicMock, +) -> None: + """Test failure to disable charging when the station is in auto charge mode.""" + entity_id = "switch.technove_station_charging_enabled" + state = hass.states.get(entity_id) + + # Enable auto-charge mode + device = mock_technove.update.return_value + device.info.auto_charge = True + + with pytest.raises( + ServiceValidationError, + match="auto-charge is enabled", + ): + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + assert (state := hass.states.get(state.entity_id)) + assert state.state != STATE_UNAVAILABLE From a7c6abc54ead541ff6246448b58cd25bc18d5f9c Mon Sep 17 00:00:00 2001 From: Artur Pragacz <49985303+arturpragacz@users.noreply.github.com> Date: Sat, 24 Aug 2024 06:04:44 +0200 Subject: [PATCH 1004/1635] Always assign unique_id in Onkyo (#120543) --- homeassistant/components/onkyo/media_player.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/onkyo/media_player.py b/homeassistant/components/onkyo/media_player.py index 8107c62e4a1..acc0459e258 100644 --- a/homeassistant/components/onkyo/media_player.py +++ b/homeassistant/components/onkyo/media_player.py @@ -320,14 +320,11 @@ class OnkyoMediaPlayer(MediaPlayerEntity): name = receiver.name identifier = receiver.identifier self._attr_name = f"{name}{' ' + ZONES[zone] if zone != 'main' else ''}" - if receiver.discovered: - if zone == "main": - # keep legacy unique_id - self._attr_unique_id = f"{name}_{identifier}" - else: - self._attr_unique_id = f"{identifier}_{zone}" + if receiver.discovered and zone == "main": + # keep legacy unique_id + self._attr_unique_id = f"{name}_{identifier}" else: - self._attr_unique_id = None + self._attr_unique_id = f"{identifier}_{zone}" self._zone = zone self._source_mapping = sources From c2ce71a38ce5e6a1a9640e7b6fa9e9cc6043de43 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Sat, 24 Aug 2024 06:48:02 +0200 Subject: [PATCH 1005/1635] Update xknx to 3.1.1 (#124257) --- homeassistant/components/knx/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/knx/manifest.json b/homeassistant/components/knx/manifest.json index 9ecf687d6b9..b7efd14fa2a 100644 --- a/homeassistant/components/knx/manifest.json +++ b/homeassistant/components/knx/manifest.json @@ -11,7 +11,7 @@ "loggers": ["xknx", "xknxproject"], "quality_scale": "platinum", "requirements": [ - "xknx==3.1.0", + "xknx==3.1.1", "xknxproject==3.7.1", "knx-frontend==2024.8.9.225351" ], diff --git a/requirements_all.txt b/requirements_all.txt index c204a156845..eb3abd3abf0 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2945,7 +2945,7 @@ xbox-webapi==2.0.11 xiaomi-ble==0.30.2 # homeassistant.components.knx -xknx==3.1.0 +xknx==3.1.1 # homeassistant.components.knx xknxproject==3.7.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 1d6923448da..ce1daaec451 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2328,7 +2328,7 @@ xbox-webapi==2.0.11 xiaomi-ble==0.30.2 # homeassistant.components.knx -xknx==3.1.0 +xknx==3.1.1 # homeassistant.components.knx xknxproject==3.7.1 From d47a296d7a4286083a714b0b0fe2e6a4f6b81fad Mon Sep 17 00:00:00 2001 From: "David F. Mulcahey" Date: Sat, 24 Aug 2024 00:52:58 -0400 Subject: [PATCH 1006/1635] Resolve versions of ZHA dependencies earlier to prevent blocking call in event loop (#124496) --- homeassistant/components/zha/diagnostics.py | 25 ++++++++++++++------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/zha/diagnostics.py b/homeassistant/components/zha/diagnostics.py index f276630dfee..ad73978d24d 100644 --- a/homeassistant/components/zha/diagnostics.py +++ b/homeassistant/components/zha/diagnostics.py @@ -50,6 +50,15 @@ ATTRIBUTES = "attributes" CLUSTER_DETAILS = "cluster_details" UNSUPPORTED_ATTRIBUTES = "unsupported_attributes" +BELLOWS_VERSION = version("bellows") +ZIGPY_VERSION = version("zigpy") +ZIGPY_DECONZ_VERSION = version("zigpy-deconz") +ZIGPY_XBEE_VERSION = version("zigpy-xbee") +ZIGPY_ZNP_VERSION = version("zigpy-znp") +ZIGPY_ZIGATE_VERSION = version("zigpy-zigate") +ZHA_QUIRKS_VERSION = version("zha-quirks") +ZHA_VERSION = version("zha") + def shallow_asdict(obj: Any) -> dict: """Return a shallow copy of a dataclass as a dict.""" @@ -86,14 +95,14 @@ async def async_get_config_entry_diagnostics( channel: 100 * energy / 255 for channel, energy in energy_scan.items() }, "versions": { - "bellows": version("bellows"), - "zigpy": version("zigpy"), - "zigpy_deconz": version("zigpy-deconz"), - "zigpy_xbee": version("zigpy-xbee"), - "zigpy_znp": version("zigpy_znp"), - "zigpy_zigate": version("zigpy-zigate"), - "zhaquirks": version("zha-quirks"), - "zha": version("zha"), + "bellows": BELLOWS_VERSION, + "zigpy": ZIGPY_VERSION, + "zigpy_deconz": ZIGPY_DECONZ_VERSION, + "zigpy_xbee": ZIGPY_XBEE_VERSION, + "zigpy_znp": ZIGPY_ZNP_VERSION, + "zigpy_zigate": ZIGPY_ZIGATE_VERSION, + "zhaquirks": ZHA_QUIRKS_VERSION, + "zha": ZHA_VERSION, }, "devices": [ { From 098a006f32df60cca904fcc2753aeb5a94958857 Mon Sep 17 00:00:00 2001 From: Andre Lengwenus Date: Sat, 24 Aug 2024 06:56:48 +0200 Subject: [PATCH 1007/1635] Bump lcn-frontend to 0.1.6 (#124490) --- homeassistant/components/lcn/manifest.json | 4 +++- homeassistant/components/lcn/websocket.py | 27 ++++++++-------------- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 14 insertions(+), 21 deletions(-) diff --git a/homeassistant/components/lcn/manifest.json b/homeassistant/components/lcn/manifest.json index 44aae34e9e6..f8b7d02b103 100644 --- a/homeassistant/components/lcn/manifest.json +++ b/homeassistant/components/lcn/manifest.json @@ -1,10 +1,12 @@ { "domain": "lcn", "name": "LCN", + "after_dependencies": ["panel_custom"], "codeowners": ["@alengwenus"], "config_flow": true, + "dependencies": ["http", "websocket_api"], "documentation": "https://www.home-assistant.io/integrations/lcn", "iot_class": "local_push", "loggers": ["pypck"], - "requirements": ["pypck==0.7.21", "lcn-frontend==0.1.5"] + "requirements": ["pypck==0.7.21", "lcn-frontend==0.1.6"] } diff --git a/homeassistant/components/lcn/websocket.py b/homeassistant/components/lcn/websocket.py index 5317bc86763..6fda20aba93 100644 --- a/homeassistant/components/lcn/websocket.py +++ b/homeassistant/components/lcn/websocket.py @@ -10,6 +10,7 @@ import lcn_frontend as lcn_panel import voluptuous as vol from homeassistant.components import panel_custom, websocket_api +from homeassistant.components.http import StaticPathConfig from homeassistant.components.websocket_api import AsyncWebSocketCommandHandler from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( @@ -17,7 +18,6 @@ from homeassistant.const import ( CONF_DEVICES, CONF_DOMAIN, CONF_ENTITIES, - CONF_ENTITY_ID, CONF_NAME, CONF_RESOURCE, ) @@ -77,10 +77,14 @@ async def register_panel_and_ws_api(hass: HomeAssistant) -> None: websocket_api.async_register_command(hass, websocket_delete_entity) if DOMAIN not in hass.data.get("frontend_panels", {}): - hass.http.register_static_path( - URL_BASE, - path=lcn_panel.locate_dir(), - cache_headers=lcn_panel.is_prod_build, + await hass.http.async_register_static_paths( + [ + StaticPathConfig( + URL_BASE, + path=lcn_panel.locate_dir(), + cache_headers=lcn_panel.is_prod_build, + ) + ] ) await panel_custom.async_register_panel( hass=hass, @@ -154,19 +158,6 @@ async def websocket_get_entity_configs( else: entity_configs = config_entry.data[CONF_ENTITIES] - entity_registry = er.async_get(hass) - for entity_config in entity_configs: - entity_unique_id = generate_unique_id( - config_entry.entry_id, - entity_config[CONF_ADDRESS], - entity_config[CONF_RESOURCE], - ) - entity_id = entity_registry.async_get_entity_id( - entity_config[CONF_DOMAIN], DOMAIN, entity_unique_id - ) - - entity_config[CONF_ENTITY_ID] = entity_id - connection.send_result(msg["id"], entity_configs) diff --git a/requirements_all.txt b/requirements_all.txt index eb3abd3abf0..1d177fadeb0 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1243,7 +1243,7 @@ lakeside==0.13 laundrify-aio==1.2.2 # homeassistant.components.lcn -lcn-frontend==0.1.5 +lcn-frontend==0.1.6 # homeassistant.components.ld2410_ble ld2410-ble==0.1.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index ce1daaec451..1fee30e8e47 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1036,7 +1036,7 @@ lacrosse-view==1.0.2 laundrify-aio==1.2.2 # homeassistant.components.lcn -lcn-frontend==0.1.5 +lcn-frontend==0.1.6 # homeassistant.components.ld2410_ble ld2410-ble==0.1.1 From 2bb4a8747c8ba6839a02db4ec6fb52090e138175 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 23 Aug 2024 23:58:52 -0500 Subject: [PATCH 1008/1635] Bump fnv-hash-fast to 1.0.2 (#124489) --- homeassistant/components/homekit/manifest.json | 2 +- homeassistant/components/recorder/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/homekit/manifest.json b/homeassistant/components/homekit/manifest.json index 17d1237e579..eebdc0026fd 100644 --- a/homeassistant/components/homekit/manifest.json +++ b/homeassistant/components/homekit/manifest.json @@ -10,7 +10,7 @@ "loggers": ["pyhap"], "requirements": [ "HAP-python==4.9.1", - "fnv-hash-fast==0.5.0", + "fnv-hash-fast==1.0.2", "PyQRCode==1.2.1", "base36==0.1.1" ], diff --git a/homeassistant/components/recorder/manifest.json b/homeassistant/components/recorder/manifest.json index 7d5576e4672..2be4b6862ba 100644 --- a/homeassistant/components/recorder/manifest.json +++ b/homeassistant/components/recorder/manifest.json @@ -8,7 +8,7 @@ "quality_scale": "internal", "requirements": [ "SQLAlchemy==2.0.31", - "fnv-hash-fast==0.5.0", + "fnv-hash-fast==1.0.2", "psutil-home-assistant==0.0.1" ] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index b6bcd1dac6b..8090c17f334 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -24,7 +24,7 @@ certifi>=2021.5.30 ciso8601==2.3.1 cryptography==43.0.0 dbus-fast==2.23.0 -fnv-hash-fast==0.5.0 +fnv-hash-fast==1.0.2 ha-av==10.1.1 ha-ffmpeg==3.2.0 habluetooth==3.3.2 diff --git a/pyproject.toml b/pyproject.toml index c9b0ed43e51..393a459c70d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,7 +36,7 @@ dependencies = [ "bcrypt==4.1.3", "certifi>=2021.5.30", "ciso8601==2.3.1", - "fnv-hash-fast==0.5.0", + "fnv-hash-fast==1.0.2", # hass-nabucasa is imported by helpers which don't depend on the cloud # integration "hass-nabucasa==0.81.1", diff --git a/requirements.txt b/requirements.txt index 35290a5ff17..8aeb1f1d5ec 100644 --- a/requirements.txt +++ b/requirements.txt @@ -16,7 +16,7 @@ awesomeversion==24.6.0 bcrypt==4.1.3 certifi>=2021.5.30 ciso8601==2.3.1 -fnv-hash-fast==0.5.0 +fnv-hash-fast==1.0.2 hass-nabucasa==0.81.1 httpx==0.27.0 home-assistant-bluetooth==1.12.2 diff --git a/requirements_all.txt b/requirements_all.txt index 1d177fadeb0..bfe88ff38f6 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -902,7 +902,7 @@ flux-led==1.0.4 # homeassistant.components.homekit # homeassistant.components.recorder -fnv-hash-fast==0.5.0 +fnv-hash-fast==1.0.2 # homeassistant.components.foobot foobot_async==1.0.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 1fee30e8e47..abc52554066 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -761,7 +761,7 @@ flux-led==1.0.4 # homeassistant.components.homekit # homeassistant.components.recorder -fnv-hash-fast==0.5.0 +fnv-hash-fast==1.0.2 # homeassistant.components.foobot foobot_async==1.0.0 From 22c322fc374a78db4c96471bdcc9ac8e68094128 Mon Sep 17 00:00:00 2001 From: jjlawren Date: Sat, 24 Aug 2024 00:01:25 -0500 Subject: [PATCH 1009/1635] Bump plexapi to 4.15.16 (#124512) --- homeassistant/components/plex/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/plex/manifest.json b/homeassistant/components/plex/manifest.json index 323bca0477a..6270a6d3496 100644 --- a/homeassistant/components/plex/manifest.json +++ b/homeassistant/components/plex/manifest.json @@ -8,7 +8,7 @@ "iot_class": "local_push", "loggers": ["plexapi", "plexwebsocket"], "requirements": [ - "PlexAPI==4.15.14", + "PlexAPI==4.15.16", "plexauth==0.0.6", "plexwebsocket==0.0.14" ], diff --git a/requirements_all.txt b/requirements_all.txt index bfe88ff38f6..51a08f13ec7 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -36,7 +36,7 @@ Mastodon.py==1.8.1 Pillow==10.4.0 # homeassistant.components.plex -PlexAPI==4.15.14 +PlexAPI==4.15.16 # homeassistant.components.progettihwsw ProgettiHWSW==0.1.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index abc52554066..612e8c6ba6c 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -36,7 +36,7 @@ Mastodon.py==1.8.1 Pillow==10.4.0 # homeassistant.components.plex -PlexAPI==4.15.14 +PlexAPI==4.15.16 # homeassistant.components.progettihwsw ProgettiHWSW==0.1.3 From 8023cbcc38751f02a68211b3fbe378f50693ac07 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sat, 24 Aug 2024 07:04:50 +0200 Subject: [PATCH 1010/1635] Don't raise WLED user flow unique_id check (#124481) --- homeassistant/components/wled/config_flow.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/wled/config_flow.py b/homeassistant/components/wled/config_flow.py index 7853ad2101e..2798e0d46d1 100644 --- a/homeassistant/components/wled/config_flow.py +++ b/homeassistant/components/wled/config_flow.py @@ -46,7 +46,9 @@ class WLEDFlowHandler(ConfigFlow, domain=DOMAIN): except WLEDConnectionError: errors["base"] = "cannot_connect" else: - await self.async_set_unique_id(device.info.mac_address) + await self.async_set_unique_id( + device.info.mac_address, raise_on_progress=False + ) self._abort_if_unique_id_configured( updates={CONF_HOST: user_input[CONF_HOST]} ) @@ -56,8 +58,6 @@ class WLEDFlowHandler(ConfigFlow, domain=DOMAIN): CONF_HOST: user_input[CONF_HOST], }, ) - else: - user_input = {} return self.async_show_form( step_id="user", From 664e0258bf902167ab0bc1b75b75dc0d7191b996 Mon Sep 17 00:00:00 2001 From: Lenn <78048721+LennP@users.noreply.github.com> Date: Sat, 24 Aug 2024 07:19:11 +0200 Subject: [PATCH 1011/1635] Bump motionblindsble to 0.1.1 (#124322) --- homeassistant/components/motionblinds_ble/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/motionblinds_ble/manifest.json b/homeassistant/components/motionblinds_ble/manifest.json index 454c873dfa2..d9968cfde4c 100644 --- a/homeassistant/components/motionblinds_ble/manifest.json +++ b/homeassistant/components/motionblinds_ble/manifest.json @@ -14,5 +14,5 @@ "integration_type": "device", "iot_class": "assumed_state", "loggers": ["motionblindsble"], - "requirements": ["motionblindsble==0.1.0"] + "requirements": ["motionblindsble==0.1.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 51a08f13ec7..3a2054b4b6d 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1369,7 +1369,7 @@ mopeka-iot-ble==0.8.0 motionblinds==0.6.24 # homeassistant.components.motionblinds_ble -motionblindsble==0.1.0 +motionblindsble==0.1.1 # homeassistant.components.motioneye motioneye-client==0.3.14 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 612e8c6ba6c..ec8059c4e6f 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1135,7 +1135,7 @@ mopeka-iot-ble==0.8.0 motionblinds==0.6.24 # homeassistant.components.motionblinds_ble -motionblindsble==0.1.0 +motionblindsble==0.1.1 # homeassistant.components.motioneye motioneye-client==0.3.14 From 7ae8f4c9d0c261e86404883188b39e5b960ae1e0 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sat, 24 Aug 2024 07:20:00 +0200 Subject: [PATCH 1012/1635] Don't abort airgradient user flow if flow in progress (#124300) --- .../components/airgradient/config_flow.py | 4 ++- .../airgradient/test_config_flow.py | 29 +++++++++++++++++++ 2 files changed, 32 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/airgradient/config_flow.py b/homeassistant/components/airgradient/config_flow.py index 93cd0be61c4..70fa8a1755b 100644 --- a/homeassistant/components/airgradient/config_flow.py +++ b/homeassistant/components/airgradient/config_flow.py @@ -92,7 +92,9 @@ class AirGradientConfigFlow(ConfigFlow, domain=DOMAIN): except AirGradientError: errors["base"] = "cannot_connect" else: - await self.async_set_unique_id(current_measures.serial_number) + await self.async_set_unique_id( + current_measures.serial_number, raise_on_progress=False + ) self._abort_if_unique_id_configured() await self.set_configuration_source() return self.async_create_entry( diff --git a/tests/components/airgradient/test_config_flow.py b/tests/components/airgradient/test_config_flow.py index 222ac5d04af..8730b18676f 100644 --- a/tests/components/airgradient/test_config_flow.py +++ b/tests/components/airgradient/test_config_flow.py @@ -253,3 +253,32 @@ async def test_zeroconf_flow_abort_old_firmware(hass: HomeAssistant) -> None: ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "invalid_version" + + +async def test_user_flow_works_discovery( + hass: HomeAssistant, + mock_new_airgradient_client: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test user flow can continue after discovery happened.""" + await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZEROCONF_DISCOVERY, + ) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + assert len(hass.config_entries.flow.async_progress(DOMAIN)) == 2 + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "10.0.0.131"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + + # Verify the discovery flow was aborted + assert not hass.config_entries.flow.async_progress(DOMAIN) From e26d363b5ef1c32358c33212c6cf85b627e6e66e Mon Sep 17 00:00:00 2001 From: "Steven B." <51370195+sdb9696@users.noreply.github.com> Date: Sat, 24 Aug 2024 07:23:31 +0100 Subject: [PATCH 1013/1635] Convert ring integration to the async ring-doorbell api (#124365) * Bump ring-doorbell to 0.9.0 * Convert ring integration to async ring-doorbell api * Use mock auth fixture class to get token_updater * Fix typo in fixture name --- homeassistant/components/ring/__init__.py | 18 ++++---- homeassistant/components/ring/button.py | 4 +- homeassistant/components/ring/camera.py | 20 ++++----- homeassistant/components/ring/config_flow.py | 3 +- homeassistant/components/ring/coordinator.py | 20 ++++++--- homeassistant/components/ring/entity.py | 18 ++++---- homeassistant/components/ring/light.py | 14 +++--- homeassistant/components/ring/manifest.json | 2 +- homeassistant/components/ring/siren.py | 4 +- homeassistant/components/ring/switch.py | 14 +++--- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/ring/conftest.py | 12 +++++- tests/components/ring/device_mocks.py | 8 ++-- tests/components/ring/test_button.py | 4 +- tests/components/ring/test_camera.py | 35 ++++++++------- tests/components/ring/test_config_flow.py | 32 +++++++------- tests/components/ring/test_init.py | 45 ++++++++++++++++---- tests/components/ring/test_light.py | 7 +-- tests/components/ring/test_siren.py | 13 +++--- tests/components/ring/test_switch.py | 7 +-- 21 files changed, 159 insertions(+), 125 deletions(-) diff --git a/homeassistant/components/ring/__init__.py b/homeassistant/components/ring/__init__.py index 36c66550ddc..14ab435fda6 100644 --- a/homeassistant/components/ring/__init__.py +++ b/homeassistant/components/ring/__init__.py @@ -3,7 +3,6 @@ from __future__ import annotations from dataclasses import dataclass -from functools import partial import logging from typing import Any, cast @@ -13,6 +12,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import APPLICATION_NAME, CONF_TOKEN, __version__ from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from .const import DOMAIN, PLATFORMS @@ -35,17 +35,17 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" def token_updater(token: dict[str, Any]) -> None: - """Handle from sync context when token is updated.""" - hass.loop.call_soon_threadsafe( - partial( - hass.config_entries.async_update_entry, - entry, - data={**entry.data, CONF_TOKEN: token}, - ) + """Handle from async context when token is updated.""" + hass.config_entries.async_update_entry( + entry, + data={**entry.data, CONF_TOKEN: token}, ) auth = Auth( - f"{APPLICATION_NAME}/{__version__}", entry.data[CONF_TOKEN], token_updater + f"{APPLICATION_NAME}/{__version__}", + entry.data[CONF_TOKEN], + token_updater, + http_client_session=async_get_clientsession(hass), ) ring = Ring(auth) diff --git a/homeassistant/components/ring/button.py b/homeassistant/components/ring/button.py index 15d56a8b7cf..c8d7d902d18 100644 --- a/homeassistant/components/ring/button.py +++ b/homeassistant/components/ring/button.py @@ -53,6 +53,6 @@ class RingDoorButton(RingEntity[RingOther], ButtonEntity): self._attr_unique_id = f"{device.id}-{description.key}" @exception_wrap - def press(self) -> None: + async def async_press(self) -> None: """Open the door.""" - self._device.open_door() + await self._device.async_open_door() diff --git a/homeassistant/components/ring/camera.py b/homeassistant/components/ring/camera.py index ba75b68434d..b45803f3618 100644 --- a/homeassistant/components/ring/camera.py +++ b/homeassistant/components/ring/camera.py @@ -159,36 +159,36 @@ class RingCam(RingEntity[RingDoorBell], Camera): if self._last_video_id != self._last_event["id"]: self._image = None - self._video_url = await self.hass.async_add_executor_job(self._get_video) + self._video_url = await self._async_get_video() self._last_video_id = self._last_event["id"] self._expires_at = FORCE_REFRESH_INTERVAL + utcnow @exception_wrap - def _get_video(self) -> str | None: + async def _async_get_video(self) -> str | None: if TYPE_CHECKING: # _last_event is set before calling update so will never be None assert self._last_event event_id = self._last_event.get("id") assert event_id and isinstance(event_id, int) - return self._device.recording_url(event_id) + return await self._device.async_recording_url(event_id) @exception_wrap - def _set_motion_detection_enabled(self, new_state: bool) -> None: + async def _async_set_motion_detection_enabled(self, new_state: bool) -> None: if not self._device.has_capability(MOTION_DETECTION_CAPABILITY): _LOGGER.error( "Entity %s does not have motion detection capability", self.entity_id ) return - self._device.motion_detection = new_state + await self._device.async_set_motion_detection(new_state) self._attr_motion_detection_enabled = new_state - self.schedule_update_ha_state(False) + self.async_schedule_update_ha_state(False) - def enable_motion_detection(self) -> None: + async def async_enable_motion_detection(self) -> None: """Enable motion detection in the camera.""" - self._set_motion_detection_enabled(True) + await self._async_set_motion_detection_enabled(True) - def disable_motion_detection(self) -> None: + async def async_disable_motion_detection(self) -> None: """Disable motion detection in camera.""" - self._set_motion_detection_enabled(False) + await self._async_set_motion_detection_enabled(False) diff --git a/homeassistant/components/ring/config_flow.py b/homeassistant/components/ring/config_flow.py index 6239105580d..ee78541dec7 100644 --- a/homeassistant/components/ring/config_flow.py +++ b/homeassistant/components/ring/config_flow.py @@ -34,8 +34,7 @@ async def validate_input(hass: HomeAssistant, data: dict[str, str]) -> dict[str, auth = Auth(f"{APPLICATION_NAME}/{ha_version}") try: - token = await hass.async_add_executor_job( - auth.fetch_token, + token = await auth.async_fetch_token( data[CONF_USERNAME], data[CONF_PASSWORD], data.get(CONF_2FA), diff --git a/homeassistant/components/ring/coordinator.py b/homeassistant/components/ring/coordinator.py index 1a52fc78988..600743005eb 100644 --- a/homeassistant/components/ring/coordinator.py +++ b/homeassistant/components/ring/coordinator.py @@ -1,8 +1,9 @@ """Data coordinators for the ring integration.""" from asyncio import TaskGroup -from collections.abc import Callable +from collections.abc import Callable, Coroutine import logging +from typing import Any from ring_doorbell import AuthenticationError, Ring, RingDevices, RingError, RingTimeout @@ -16,10 +17,13 @@ _LOGGER = logging.getLogger(__name__) async def _call_api[*_Ts, _R]( - hass: HomeAssistant, target: Callable[[*_Ts], _R], *args: *_Ts, msg_suffix: str = "" + hass: HomeAssistant, + target: Callable[[*_Ts], Coroutine[Any, Any, _R]], + *args: *_Ts, + msg_suffix: str = "", ) -> _R: try: - return await hass.async_add_executor_job(target, *args) + return await target(*args) except AuthenticationError as err: # Raising ConfigEntryAuthFailed will cancel future updates # and start a config flow with SOURCE_REAUTH (async_step_reauth) @@ -52,7 +56,9 @@ class RingDataCoordinator(DataUpdateCoordinator[RingDevices]): async def _async_update_data(self) -> RingDevices: """Fetch data from API endpoint.""" - update_method: str = "update_data" if self.first_call else "update_devices" + update_method: str = ( + "async_update_data" if self.first_call else "async_update_devices" + ) await _call_api(self.hass, getattr(self.ring_api, update_method)) self.first_call = False devices: RingDevices = self.ring_api.devices() @@ -67,7 +73,7 @@ class RingDataCoordinator(DataUpdateCoordinator[RingDevices]): tg.create_task( _call_api( self.hass, - lambda device: device.history(limit=10), + lambda device: device.async_history(limit=10), device, msg_suffix=f" for device {device.name}", # device_id is the mac ) @@ -75,7 +81,7 @@ class RingDataCoordinator(DataUpdateCoordinator[RingDevices]): tg.create_task( _call_api( self.hass, - device.update_health_data, + device.async_update_health_data, msg_suffix=f" for device {device.name}", ) ) @@ -100,4 +106,4 @@ class RingNotificationsCoordinator(DataUpdateCoordinator[None]): async def _async_update_data(self) -> None: """Fetch data from API endpoint.""" - await _call_api(self.hass, self.ring_api.update_dings) + await _call_api(self.hass, self.ring_api.async_update_dings) diff --git a/homeassistant/components/ring/entity.py b/homeassistant/components/ring/entity.py index a4275815450..72deb09b76f 100644 --- a/homeassistant/components/ring/entity.py +++ b/homeassistant/components/ring/entity.py @@ -1,6 +1,6 @@ """Base class for Ring entity.""" -from collections.abc import Callable +from collections.abc import Callable, Coroutine from typing import Any, Concatenate, Generic, cast from ring_doorbell import ( @@ -29,25 +29,23 @@ _RingCoordinatorT = TypeVar( def exception_wrap[_RingBaseEntityT: RingBaseEntity[Any, Any], **_P, _R]( - func: Callable[Concatenate[_RingBaseEntityT, _P], _R], -) -> Callable[Concatenate[_RingBaseEntityT, _P], _R]: + async_func: Callable[Concatenate[_RingBaseEntityT, _P], Coroutine[Any, Any, _R]], +) -> Callable[Concatenate[_RingBaseEntityT, _P], Coroutine[Any, Any, _R]]: """Define a wrapper to catch exceptions and raise HomeAssistant errors.""" - def _wrap(self: _RingBaseEntityT, *args: _P.args, **kwargs: _P.kwargs) -> _R: + async def _wrap(self: _RingBaseEntityT, *args: _P.args, **kwargs: _P.kwargs) -> _R: try: - return func(self, *args, **kwargs) + return await async_func(self, *args, **kwargs) except AuthenticationError as err: - self.hass.loop.call_soon_threadsafe( - self.coordinator.config_entry.async_start_reauth, self.hass - ) + self.coordinator.config_entry.async_start_reauth(self.hass) raise HomeAssistantError(err) from err except RingTimeout as err: raise HomeAssistantError( - f"Timeout communicating with API {func}: {err}" + f"Timeout communicating with API {async_func}: {err}" ) from err except RingError as err: raise HomeAssistantError( - f"Error communicating with API{func}: {err}" + f"Error communicating with API{async_func}: {err}" ) from err return _wrap diff --git a/homeassistant/components/ring/light.py b/homeassistant/components/ring/light.py index 5747c9e77f7..f7f7f9b44ae 100644 --- a/homeassistant/components/ring/light.py +++ b/homeassistant/components/ring/light.py @@ -80,18 +80,18 @@ class RingLight(RingEntity[RingStickUpCam], LightEntity): super()._handle_coordinator_update() @exception_wrap - def _set_light(self, new_state: OnOffState) -> None: + async def _async_set_light(self, new_state: OnOffState) -> None: """Update light state, and causes Home Assistant to correctly update.""" - self._device.lights = new_state + await self._device.async_set_lights(new_state) self._attr_is_on = new_state == OnOffState.ON self._no_updates_until = dt_util.utcnow() + SKIP_UPDATES_DELAY - self.schedule_update_ha_state() + self.async_schedule_update_ha_state() - def turn_on(self, **kwargs: Any) -> None: + async def async_turn_on(self, **kwargs: Any) -> None: """Turn the light on for 30 seconds.""" - self._set_light(OnOffState.ON) + await self._async_set_light(OnOffState.ON) - def turn_off(self, **kwargs: Any) -> None: + async def async_turn_off(self, **kwargs: Any) -> None: """Turn the light off.""" - self._set_light(OnOffState.OFF) + await self._async_set_light(OnOffState.OFF) diff --git a/homeassistant/components/ring/manifest.json b/homeassistant/components/ring/manifest.json index a3d15bd711d..23e7b882efe 100644 --- a/homeassistant/components/ring/manifest.json +++ b/homeassistant/components/ring/manifest.json @@ -14,5 +14,5 @@ "iot_class": "cloud_polling", "loggers": ["ring_doorbell"], "quality_scale": "silver", - "requirements": ["ring-doorbell[listen]==0.8.12"] + "requirements": ["ring-doorbell[listen]==0.9.0"] } diff --git a/homeassistant/components/ring/siren.py b/homeassistant/components/ring/siren.py index f63f9d33182..665de07a5bb 100644 --- a/homeassistant/components/ring/siren.py +++ b/homeassistant/components/ring/siren.py @@ -47,8 +47,8 @@ class RingChimeSiren(RingEntity[RingChime], SirenEntity): self._attr_unique_id = f"{self._device.id}-siren" @exception_wrap - def turn_on(self, **kwargs: Any) -> None: + async def async_turn_on(self, **kwargs: Any) -> None: """Play the test sound on a Ring Chime device.""" tone = kwargs.get(ATTR_TONE) or RingEventKind.DING.value - self._device.test_sound(kind=tone) + await self._device.async_test_sound(kind=tone) diff --git a/homeassistant/components/ring/switch.py b/homeassistant/components/ring/switch.py index 0e032907bae..810011d68c8 100644 --- a/homeassistant/components/ring/switch.py +++ b/homeassistant/components/ring/switch.py @@ -81,18 +81,18 @@ class SirenSwitch(BaseRingSwitch): super()._handle_coordinator_update() @exception_wrap - def _set_switch(self, new_state: int) -> None: + async def _async_set_switch(self, new_state: int) -> None: """Update switch state, and causes Home Assistant to correctly update.""" - self._device.siren = new_state + await self._device.async_set_siren(new_state) self._attr_is_on = new_state > 0 self._no_updates_until = dt_util.utcnow() + SKIP_UPDATES_DELAY - self.schedule_update_ha_state() + self.async_schedule_update_ha_state() - def turn_on(self, **kwargs: Any) -> None: + async def async_turn_on(self, **kwargs: Any) -> None: """Turn the siren on for 30 seconds.""" - self._set_switch(1) + await self._async_set_switch(1) - def turn_off(self, **kwargs: Any) -> None: + async def async_turn_off(self, **kwargs: Any) -> None: """Turn the siren off.""" - self._set_switch(0) + await self._async_set_switch(0) diff --git a/requirements_all.txt b/requirements_all.txt index 3a2054b4b6d..d2f63ec9ee7 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2507,7 +2507,7 @@ rfk101py==0.0.1 rflink==0.0.66 # homeassistant.components.ring -ring-doorbell[listen]==0.8.12 +ring-doorbell[listen]==0.9.0 # homeassistant.components.fleetgo ritassist==0.9.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index ec8059c4e6f..b246cb714f2 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1986,7 +1986,7 @@ reolink-aio==0.9.7 rflink==0.0.66 # homeassistant.components.ring -ring-doorbell[listen]==0.8.12 +ring-doorbell[listen]==0.9.0 # homeassistant.components.roku rokuecp==0.19.3 diff --git a/tests/components/ring/conftest.py b/tests/components/ring/conftest.py index cd4447c1a9a..4456a9daa26 100644 --- a/tests/components/ring/conftest.py +++ b/tests/components/ring/conftest.py @@ -26,13 +26,23 @@ def mock_setup_entry() -> Generator[AsyncMock]: yield mock_setup_entry +@pytest.fixture +def mock_ring_init_auth_class(): + """Mock ring_doorbell.Auth in init and return the mock class.""" + with patch("homeassistant.components.ring.Auth", autospec=True) as mock_ring_auth: + mock_ring_auth.return_value.async_fetch_token.return_value = { + "access_token": "mock-token" + } + yield mock_ring_auth + + @pytest.fixture def mock_ring_auth(): """Mock ring_doorbell.Auth.""" with patch( "homeassistant.components.ring.config_flow.Auth", autospec=True ) as mock_ring_auth: - mock_ring_auth.return_value.fetch_token.return_value = { + mock_ring_auth.return_value.async_fetch_token.return_value = { "access_token": "mock-token" } yield mock_ring_auth.return_value diff --git a/tests/components/ring/device_mocks.py b/tests/components/ring/device_mocks.py index 88ad37bdd36..d2671c3896d 100644 --- a/tests/components/ring/device_mocks.py +++ b/tests/components/ring/device_mocks.py @@ -10,7 +10,7 @@ Mocks the api calls on the devices such as history() and health(). from copy import deepcopy from datetime import datetime from time import time -from unittest.mock import MagicMock +from unittest.mock import AsyncMock, MagicMock from ring_doorbell import ( RingCapability, @@ -132,18 +132,18 @@ def _mocked_ring_device(device_dict, device_family, device_class, capabilities): # Configure common methods mock_device.has_capability.side_effect = has_capability - mock_device.update_health_data.side_effect = lambda: update_health_data( + mock_device.async_update_health_data.side_effect = lambda: update_health_data( DOORBOT_HEALTH if device_family != "chimes" else CHIME_HEALTH ) # Configure methods based on capability if has_capability(RingCapability.HISTORY): mock_device.configure_mock(last_history=[]) - mock_device.history.side_effect = lambda *_, **__: update_history_data( + mock_device.async_history.side_effect = lambda *_, **__: update_history_data( DOORBOT_HISTORY if device_family != "other" else INTERCOM_HISTORY ) if has_capability(RingCapability.VIDEO): - mock_device.recording_url = MagicMock(return_value="http://dummy.url") + mock_device.async_recording_url = AsyncMock(return_value="http://dummy.url") if has_capability(RingCapability.MOTION_DETECTION): mock_device.configure_mock( diff --git a/tests/components/ring/test_button.py b/tests/components/ring/test_button.py index 6fef3295159..946a893c8ad 100644 --- a/tests/components/ring/test_button.py +++ b/tests/components/ring/test_button.py @@ -28,11 +28,11 @@ async def test_button_opens_door( await setup_platform(hass, Platform.BUTTON) mock_intercom = mock_ring_devices.get_device(185036587) - mock_intercom.open_door.assert_not_called() + mock_intercom.async_open_door.assert_not_called() await hass.services.async_call( "button", "press", {"entity_id": "button.ingress_open_door"}, blocking=True ) await hass.async_block_till_done(wait_background_tasks=True) - mock_intercom.open_door.assert_called_once() + mock_intercom.async_open_door.assert_called_once() diff --git a/tests/components/ring/test_camera.py b/tests/components/ring/test_camera.py index 49b7dc10f05..619fb52846c 100644 --- a/tests/components/ring/test_camera.py +++ b/tests/components/ring/test_camera.py @@ -1,6 +1,6 @@ """The tests for the Ring switch platform.""" -from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch +from unittest.mock import AsyncMock, patch from aiohttp.test_utils import make_mocked_request from freezegun.api import FrozenDateTimeFactory @@ -180,8 +180,7 @@ async def test_motion_detection_errors_when_turned_on( assert not any(config_entry.async_get_active_flows(hass, {SOURCE_REAUTH})) front_camera_mock = mock_ring_devices.get_device(765432) - p = PropertyMock(side_effect=exception_type) - type(front_camera_mock).motion_detection = p + front_camera_mock.async_set_motion_detection.side_effect = exception_type with pytest.raises(HomeAssistantError): await hass.services.async_call( @@ -191,7 +190,7 @@ async def test_motion_detection_errors_when_turned_on( blocking=True, ) await hass.async_block_till_done() - p.assert_called_once() + front_camera_mock.async_set_motion_detection.assert_called_once() assert ( any( flow @@ -212,7 +211,7 @@ async def test_camera_handle_mjpeg_stream( await setup_platform(hass, Platform.CAMERA) front_camera_mock = mock_ring_devices.get_device(765432) - front_camera_mock.recording_url.return_value = None + front_camera_mock.async_recording_url.return_value = None state = hass.states.get("camera.front") assert state is not None @@ -220,8 +219,8 @@ async def test_camera_handle_mjpeg_stream( mock_request = make_mocked_request("GET", "/", headers={"token": "x"}) # history not updated yet - front_camera_mock.history.assert_not_called() - front_camera_mock.recording_url.assert_not_called() + front_camera_mock.async_history.assert_not_called() + front_camera_mock.async_recording_url.assert_not_called() stream = await camera.async_get_mjpeg_stream(hass, mock_request, "camera.front") assert stream is None @@ -229,30 +228,30 @@ async def test_camera_handle_mjpeg_stream( freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - front_camera_mock.history.assert_called_once() - front_camera_mock.recording_url.assert_called_once() + front_camera_mock.async_history.assert_called_once() + front_camera_mock.async_recording_url.assert_called_once() stream = await camera.async_get_mjpeg_stream(hass, mock_request, "camera.front") assert stream is None # Stop the history updating so we can update the values manually - front_camera_mock.history = MagicMock() + front_camera_mock.async_history = AsyncMock() front_camera_mock.last_history[0]["recording"]["status"] = "not ready" freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - front_camera_mock.recording_url.assert_called_once() + front_camera_mock.async_recording_url.assert_called_once() stream = await camera.async_get_mjpeg_stream(hass, mock_request, "camera.front") assert stream is None # If the history id hasn't changed the camera will not check again for the video url # until the FORCE_REFRESH_INTERVAL has passed front_camera_mock.last_history[0]["recording"]["status"] = "ready" - front_camera_mock.recording_url = MagicMock(return_value="http://dummy.url") + front_camera_mock.async_recording_url = AsyncMock(return_value="http://dummy.url") freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - front_camera_mock.recording_url.assert_not_called() + front_camera_mock.async_recording_url.assert_not_called() stream = await camera.async_get_mjpeg_stream(hass, mock_request, "camera.front") assert stream is None @@ -260,7 +259,7 @@ async def test_camera_handle_mjpeg_stream( freezer.tick(FORCE_REFRESH_INTERVAL) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - front_camera_mock.recording_url.assert_called_once() + front_camera_mock.async_recording_url.assert_called_once() # Now the stream should be returned stream_reader = MockStreamReader(SMALLEST_VALID_JPEG_BYTES) @@ -290,8 +289,8 @@ async def test_camera_image( assert state is not None # history not updated yet - front_camera_mock.history.assert_not_called() - front_camera_mock.recording_url.assert_not_called() + front_camera_mock.async_history.assert_not_called() + front_camera_mock.async_recording_url.assert_not_called() with ( patch( "homeassistant.components.ring.camera.ffmpeg.async_get_image", @@ -305,8 +304,8 @@ async def test_camera_image( async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) # history updated so image available - front_camera_mock.history.assert_called_once() - front_camera_mock.recording_url.assert_called_once() + front_camera_mock.async_history.assert_called_once() + front_camera_mock.async_recording_url.assert_called_once() with patch( "homeassistant.components.ring.camera.ffmpeg.async_get_image", diff --git a/tests/components/ring/test_config_flow.py b/tests/components/ring/test_config_flow.py index 2420bb9cc50..bbaec2e37c4 100644 --- a/tests/components/ring/test_config_flow.py +++ b/tests/components/ring/test_config_flow.py @@ -57,7 +57,7 @@ async def test_form_error( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - mock_ring_auth.fetch_token.side_effect = error_type + mock_ring_auth.async_fetch_token.side_effect = error_type result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"username": "hello@home-assistant.io", "password": "test-password"}, @@ -79,7 +79,7 @@ async def test_form_2fa( assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - mock_ring_auth.fetch_token.side_effect = ring_doorbell.Requires2FAError + mock_ring_auth.async_fetch_token.side_effect = ring_doorbell.Requires2FAError result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -88,20 +88,20 @@ async def test_form_2fa( }, ) await hass.async_block_till_done() - mock_ring_auth.fetch_token.assert_called_once_with( + mock_ring_auth.async_fetch_token.assert_called_once_with( "foo@bar.com", "fake-password", None ) assert result2["type"] is FlowResultType.FORM assert result2["step_id"] == "2fa" - mock_ring_auth.fetch_token.reset_mock(side_effect=True) - mock_ring_auth.fetch_token.return_value = "new-foobar" + mock_ring_auth.async_fetch_token.reset_mock(side_effect=True) + mock_ring_auth.async_fetch_token.return_value = "new-foobar" result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], user_input={"2fa": "123456"}, ) - mock_ring_auth.fetch_token.assert_called_once_with( + mock_ring_auth.async_fetch_token.assert_called_once_with( "foo@bar.com", "fake-password", "123456" ) assert result3["type"] is FlowResultType.CREATE_ENTRY @@ -128,7 +128,7 @@ async def test_reauth( [result] = flows assert result["step_id"] == "reauth_confirm" - mock_ring_auth.fetch_token.side_effect = ring_doorbell.Requires2FAError + mock_ring_auth.async_fetch_token.side_effect = ring_doorbell.Requires2FAError result2 = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -136,19 +136,19 @@ async def test_reauth( }, ) - mock_ring_auth.fetch_token.assert_called_once_with( + mock_ring_auth.async_fetch_token.assert_called_once_with( "foo@bar.com", "other_fake_password", None ) assert result2["type"] is FlowResultType.FORM assert result2["step_id"] == "2fa" - mock_ring_auth.fetch_token.reset_mock(side_effect=True) - mock_ring_auth.fetch_token.return_value = "new-foobar" + mock_ring_auth.async_fetch_token.reset_mock(side_effect=True) + mock_ring_auth.async_fetch_token.return_value = "new-foobar" result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], user_input={"2fa": "123456"}, ) - mock_ring_auth.fetch_token.assert_called_once_with( + mock_ring_auth.async_fetch_token.assert_called_once_with( "foo@bar.com", "other_fake_password", "123456" ) assert result3["type"] is FlowResultType.ABORT @@ -185,7 +185,7 @@ async def test_reauth_error( [result] = flows assert result["step_id"] == "reauth_confirm" - mock_ring_auth.fetch_token.side_effect = error_type + mock_ring_auth.async_fetch_token.side_effect = error_type result2 = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -194,15 +194,15 @@ async def test_reauth_error( ) await hass.async_block_till_done() - mock_ring_auth.fetch_token.assert_called_once_with( + mock_ring_auth.async_fetch_token.assert_called_once_with( "foo@bar.com", "error_fake_password", None ) assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": errors_msg} # Now test reauth can go on to succeed - mock_ring_auth.fetch_token.reset_mock(side_effect=True) - mock_ring_auth.fetch_token.return_value = "new-foobar" + mock_ring_auth.async_fetch_token.reset_mock(side_effect=True) + mock_ring_auth.async_fetch_token.return_value = "new-foobar" result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], user_input={ @@ -210,7 +210,7 @@ async def test_reauth_error( }, ) - mock_ring_auth.fetch_token.assert_called_once_with( + mock_ring_auth.async_fetch_token.assert_called_once_with( "foo@bar.com", "other_fake_password", None ) assert result3["type"] is FlowResultType.ABORT diff --git a/tests/components/ring/test_init.py b/tests/components/ring/test_init.py index d8529e874b9..4ab3e1bd366 100644 --- a/tests/components/ring/test_init.py +++ b/tests/components/ring/test_init.py @@ -10,7 +10,7 @@ from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN from homeassistant.components.ring import DOMAIN from homeassistant.components.ring.const import SCAN_INTERVAL from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState -from homeassistant.const import CONF_USERNAME +from homeassistant.const import CONF_TOKEN, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er, issue_registry as ir from homeassistant.setup import async_setup_component @@ -42,11 +42,11 @@ async def test_setup_entry_device_update( """Test devices are updating after setup entry.""" front_door_doorbell = mock_ring_devices.get_device(987654) - front_door_doorbell.history.assert_not_called() + front_door_doorbell.async_history.assert_not_called() freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - front_door_doorbell.history.assert_called_once() + front_door_doorbell.async_history.assert_called_once() async def test_auth_failed_on_setup( @@ -56,7 +56,7 @@ async def test_auth_failed_on_setup( ) -> None: """Test auth failure on setup entry.""" mock_config_entry.add_to_hass(hass) - mock_ring_client.update_data.side_effect = AuthenticationError + mock_ring_client.async_update_data.side_effect = AuthenticationError assert not any(mock_config_entry.async_get_active_flows(hass, {SOURCE_REAUTH})) await hass.config_entries.async_setup(mock_config_entry.entry_id) @@ -90,7 +90,7 @@ async def test_error_on_setup( """Test non-auth errors on setup entry.""" mock_config_entry.add_to_hass(hass) - mock_ring_client.update_data.side_effect = error_type + mock_ring_client.async_update_data.side_effect = error_type await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() @@ -113,7 +113,7 @@ async def test_auth_failure_on_global_update( await hass.async_block_till_done() assert not any(mock_config_entry.async_get_active_flows(hass, {SOURCE_REAUTH})) - mock_ring_client.update_devices.side_effect = AuthenticationError + mock_ring_client.async_update_devices.side_effect = AuthenticationError freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) @@ -139,7 +139,7 @@ async def test_auth_failure_on_device_update( assert not any(mock_config_entry.async_get_active_flows(hass, {SOURCE_REAUTH})) front_door_doorbell = mock_ring_devices.get_device(987654) - front_door_doorbell.history.side_effect = AuthenticationError + front_door_doorbell.async_history.side_effect = AuthenticationError freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) @@ -178,7 +178,7 @@ async def test_error_on_global_update( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - mock_ring_client.update_devices.side_effect = error_type + mock_ring_client.async_update_devices.side_effect = error_type freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) @@ -219,7 +219,7 @@ async def test_error_on_device_update( await hass.async_block_till_done() front_door_doorbell = mock_ring_devices.get_device(765432) - front_door_doorbell.history.side_effect = error_type + front_door_doorbell.async_history.side_effect = error_type freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) @@ -386,3 +386,30 @@ async def test_update_unique_id_no_update( assert entity_migrated assert entity_migrated.unique_id == correct_unique_id assert "Fixing non string unique id" not in caplog.text + + +async def test_token_updated( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_config_entry: MockConfigEntry, + mock_ring_client, + mock_ring_init_auth_class, +) -> None: + """Test that the token value is updated in the config entry. + + This simulates the api calling the callback. + """ + mock_config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + + assert mock_ring_init_auth_class.call_count == 1 + token_updater = mock_ring_init_auth_class.call_args.args[2] + assert mock_config_entry.data[CONF_TOKEN] == {"access_token": "mock-token"} + + mock_ring_client.async_update_devices.side_effect = lambda: token_updater( + {"access_token": "new-mock-token"} + ) + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert mock_config_entry.data[CONF_TOKEN] == {"access_token": "new-mock-token"} diff --git a/tests/components/ring/test_light.py b/tests/components/ring/test_light.py index c2d21a22951..22ed4a31cf8 100644 --- a/tests/components/ring/test_light.py +++ b/tests/components/ring/test_light.py @@ -1,7 +1,5 @@ """The tests for the Ring light platform.""" -from unittest.mock import PropertyMock - import pytest import ring_doorbell @@ -109,15 +107,14 @@ async def test_light_errors_when_turned_on( assert not any(config_entry.async_get_active_flows(hass, {SOURCE_REAUTH})) front_light_mock = mock_ring_devices.get_device(765432) - p = PropertyMock(side_effect=exception_type) - type(front_light_mock).lights = p + front_light_mock.async_set_lights.side_effect = exception_type with pytest.raises(HomeAssistantError): await hass.services.async_call( "light", "turn_on", {"entity_id": "light.front_light"}, blocking=True ) await hass.async_block_till_done() - p.assert_called_once() + front_light_mock.async_set_lights.assert_called_once() assert ( any( diff --git a/tests/components/ring/test_siren.py b/tests/components/ring/test_siren.py index 695b54c3971..e71dd1e6e77 100644 --- a/tests/components/ring/test_siren.py +++ b/tests/components/ring/test_siren.py @@ -49,7 +49,7 @@ async def test_default_ding_chime_can_be_played( await hass.async_block_till_done() downstairs_chime_mock = mock_ring_devices.get_device(123456) - downstairs_chime_mock.test_sound.assert_called_once_with(kind="ding") + downstairs_chime_mock.async_test_sound.assert_called_once_with(kind="ding") state = hass.states.get("siren.downstairs_siren") assert state.state == "unknown" @@ -71,7 +71,7 @@ async def test_turn_on_plays_default_chime( await hass.async_block_till_done() downstairs_chime_mock = mock_ring_devices.get_device(123456) - downstairs_chime_mock.test_sound.assert_called_once_with(kind="ding") + downstairs_chime_mock.async_test_sound.assert_called_once_with(kind="ding") state = hass.states.get("siren.downstairs_siren") assert state.state == "unknown" @@ -95,7 +95,7 @@ async def test_explicit_ding_chime_can_be_played( await hass.async_block_till_done() downstairs_chime_mock = mock_ring_devices.get_device(123456) - downstairs_chime_mock.test_sound.assert_called_once_with(kind="ding") + downstairs_chime_mock.async_test_sound.assert_called_once_with(kind="ding") state = hass.states.get("siren.downstairs_siren") assert state.state == "unknown" @@ -117,7 +117,7 @@ async def test_motion_chime_can_be_played( await hass.async_block_till_done() downstairs_chime_mock = mock_ring_devices.get_device(123456) - downstairs_chime_mock.test_sound.assert_called_once_with(kind="motion") + downstairs_chime_mock.async_test_sound.assert_called_once_with(kind="motion") state = hass.states.get("siren.downstairs_siren") assert state.state == "unknown" @@ -146,7 +146,7 @@ async def test_siren_errors_when_turned_on( assert not any(config_entry.async_get_active_flows(hass, {SOURCE_REAUTH})) downstairs_chime_mock = mock_ring_devices.get_device(123456) - downstairs_chime_mock.test_sound.side_effect = exception_type + downstairs_chime_mock.async_test_sound.side_effect = exception_type with pytest.raises(HomeAssistantError): await hass.services.async_call( @@ -155,7 +155,8 @@ async def test_siren_errors_when_turned_on( {"entity_id": "siren.downstairs_siren", "tone": "motion"}, blocking=True, ) - downstairs_chime_mock.test_sound.assert_called_once_with(kind="motion") + downstairs_chime_mock.async_test_sound.assert_called_once_with(kind="motion") + await hass.async_block_till_done() assert ( any( flow diff --git a/tests/components/ring/test_switch.py b/tests/components/ring/test_switch.py index 405f20420b7..f7aa885342a 100644 --- a/tests/components/ring/test_switch.py +++ b/tests/components/ring/test_switch.py @@ -1,7 +1,5 @@ """The tests for the Ring switch platform.""" -from unittest.mock import PropertyMock - import pytest import ring_doorbell @@ -116,15 +114,14 @@ async def test_switch_errors_when_turned_on( assert not any(config_entry.async_get_active_flows(hass, {SOURCE_REAUTH})) front_siren_mock = mock_ring_devices.get_device(765432) - p = PropertyMock(side_effect=exception_type) - type(front_siren_mock).siren = p + front_siren_mock.async_set_siren.side_effect = exception_type with pytest.raises(HomeAssistantError): await hass.services.async_call( "switch", "turn_on", {"entity_id": "switch.front_siren"}, blocking=True ) await hass.async_block_till_done() - p.assert_called_once() + front_siren_mock.async_set_siren.assert_called_once() assert ( any( flow From b7170c78a57b71c245bc2bc4e7d298b73e04ff50 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 24 Aug 2024 01:44:12 -0500 Subject: [PATCH 1014/1635] Bump yalexs to 8.4.0 (#124520) --- homeassistant/components/august/config_flow.py | 6 +++--- homeassistant/components/august/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/august/config_flow.py b/homeassistant/components/august/config_flow.py index 3523a4f7c39..2a1a20a9dc4 100644 --- a/homeassistant/components/august/config_flow.py +++ b/homeassistant/components/august/config_flow.py @@ -9,7 +9,7 @@ from typing import Any import aiohttp import voluptuous as vol from yalexs.authenticator_common import ValidationResult -from yalexs.const import BRANDS, DEFAULT_BRAND +from yalexs.const import BRANDS_WITHOUT_OAUTH, DEFAULT_BRAND from yalexs.manager.exceptions import CannotConnect, InvalidAuth, RequireValidation from homeassistant.config_entries import ConfigFlow, ConfigFlowResult @@ -118,7 +118,7 @@ class AugustConfigFlow(ConfigFlow, domain=DOMAIN): vol.Required( CONF_BRAND, default=self._user_auth_details.get(CONF_BRAND, DEFAULT_BRAND), - ): vol.In(BRANDS), + ): vol.In(BRANDS_WITHOUT_OAUTH), vol.Required( CONF_LOGIN_METHOD, default=self._user_auth_details.get( @@ -208,7 +208,7 @@ class AugustConfigFlow(ConfigFlow, domain=DOMAIN): vol.Required( CONF_BRAND, default=self._user_auth_details.get(CONF_BRAND, DEFAULT_BRAND), - ): vol.In(BRANDS), + ): vol.In(BRANDS_WITHOUT_OAUTH), vol.Required(CONF_PASSWORD): str, } ), diff --git a/homeassistant/components/august/manifest.json b/homeassistant/components/august/manifest.json index 49c23dac660..abe9dc707c3 100644 --- a/homeassistant/components/august/manifest.json +++ b/homeassistant/components/august/manifest.json @@ -28,5 +28,5 @@ "documentation": "https://www.home-assistant.io/integrations/august", "iot_class": "cloud_push", "loggers": ["pubnub", "yalexs"], - "requirements": ["yalexs==8.3.3", "yalexs-ble==2.4.3"] + "requirements": ["yalexs==8.4.0", "yalexs-ble==2.4.3"] } diff --git a/requirements_all.txt b/requirements_all.txt index d2f63ec9ee7..16d5e05bdbd 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2968,7 +2968,7 @@ yalesmartalarmclient==0.4.0 yalexs-ble==2.4.3 # homeassistant.components.august -yalexs==8.3.3 +yalexs==8.4.0 # homeassistant.components.yeelight yeelight==0.7.14 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index b246cb714f2..859cb375b37 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2348,7 +2348,7 @@ yalesmartalarmclient==0.4.0 yalexs-ble==2.4.3 # homeassistant.components.august -yalexs==8.3.3 +yalexs==8.4.0 # homeassistant.components.yeelight yeelight==0.7.14 From 9e1318425631b2e3629bd915434d6bbadc0fd2d9 Mon Sep 17 00:00:00 2001 From: Robert Svensson Date: Sat, 24 Aug 2024 09:51:09 +0200 Subject: [PATCH 1015/1635] Add deCONZ Air Purifier Fan Mode select entity support (#124513) * Add deCONZ Air Purifier Fan Mode select entity support * Remove unused constants --- homeassistant/components/deconz/select.py | 45 +++++++++++++ .../deconz/snapshots/test_select.ambr | 65 +++++++++++++++++++ tests/components/deconz/test_select.py | 37 +++++++++++ 3 files changed, 147 insertions(+) diff --git a/homeassistant/components/deconz/select.py b/homeassistant/components/deconz/select.py index dad3ba9d78d..7f3f8cca060 100644 --- a/homeassistant/components/deconz/select.py +++ b/homeassistant/components/deconz/select.py @@ -3,6 +3,7 @@ from __future__ import annotations from pydeconz.models.event import EventType +from pydeconz.models.sensor.air_purifier import AirPurifier, AirPurifierFanMode from pydeconz.models.sensor.presence import ( Presence, PresenceConfigDeviceMode, @@ -36,6 +37,17 @@ async def async_setup_entry( hub = DeconzHub.get_hub(hass, config_entry) hub.entities[DOMAIN] = set() + @callback + def async_add_air_purifier_sensor(_: EventType, sensor_id: str) -> None: + """Add air purifier select entity from deCONZ.""" + sensor = hub.api.sensors.air_purifier[sensor_id] + async_add_entities([DeconzAirPurifierFanMode(sensor, hub)]) + + hub.register_platform_add_device_callback( + async_add_air_purifier_sensor, + hub.api.sensors.air_purifier, + ) + @callback def async_add_presence_sensor(_: EventType, sensor_id: str) -> None: """Add presence select entity from deCONZ.""" @@ -55,6 +67,39 @@ async def async_setup_entry( ) +class DeconzAirPurifierFanMode(DeconzDevice[AirPurifier], SelectEntity): + """Representation of a deCONZ air purifier fan mode entity.""" + + _name_suffix = "Fan Mode" + unique_id_suffix = "fan_mode" + _update_key = "mode" + + _attr_entity_category = EntityCategory.CONFIG + _attr_options = [ + AirPurifierFanMode.OFF.value, + AirPurifierFanMode.AUTO.value, + AirPurifierFanMode.SPEED_1.value, + AirPurifierFanMode.SPEED_2.value, + AirPurifierFanMode.SPEED_3.value, + AirPurifierFanMode.SPEED_4.value, + AirPurifierFanMode.SPEED_5.value, + ] + + TYPE = DOMAIN + + @property + def current_option(self) -> str: + """Return the selected entity option to represent the entity state.""" + return self._device.fan_mode.value + + async def async_select_option(self, option: str) -> None: + """Change the selected option.""" + await self.hub.api.sensors.air_purifier.set_config( + id=self._device.resource_id, + fan_mode=AirPurifierFanMode(option), + ) + + class DeconzPresenceDeviceModeSelect(DeconzDevice[Presence], SelectEntity): """Representation of a deCONZ presence device mode entity.""" diff --git a/tests/components/deconz/snapshots/test_select.ambr b/tests/components/deconz/snapshots/test_select.ambr index 12966709947..997eab0901f 100644 --- a/tests/components/deconz/snapshots/test_select.ambr +++ b/tests/components/deconz/snapshots/test_select.ambr @@ -506,3 +506,68 @@ 'state': 'medium', }) # --- +# name: test_select[sensor_payload3-expected3][select.ikea_starkvind_fan_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'off', + 'auto', + 'speed_1', + 'speed_2', + 'speed_3', + 'speed_4', + 'speed_5', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.ikea_starkvind_fan_mode', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IKEA Starkvind Fan Mode', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '0c:43:14:ff:fe:6c:20:12-01-fc7d-fan_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[sensor_payload3-expected3][select.ikea_starkvind_fan_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'IKEA Starkvind Fan Mode', + 'options': list([ + 'off', + 'auto', + 'speed_1', + 'speed_2', + 'speed_3', + 'speed_4', + 'speed_5', + ]), + }), + 'context': , + 'entity_id': 'select.ikea_starkvind_fan_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'speed_1', + }) +# --- diff --git a/tests/components/deconz/test_select.py b/tests/components/deconz/test_select.py index 900283d88bb..c677853841c 100644 --- a/tests/components/deconz/test_select.py +++ b/tests/components/deconz/test_select.py @@ -4,6 +4,7 @@ from collections.abc import Callable from typing import Any from unittest.mock import patch +from pydeconz.models.sensor.air_purifier import AirPurifierFanMode from pydeconz.models.sensor.presence import ( PresenceConfigDeviceMode, PresenceConfigTriggerDistance, @@ -119,6 +120,42 @@ TEST_DATA = [ "request_data": {"triggerdistance": "far"}, }, ), + ( # Air Purifier Fan Mode + { + "config": { + "filterlifetime": 259200, + "ledindication": True, + "locked": False, + "mode": "speed_1", + "on": True, + "reachable": True, + }, + "ep": 1, + "etag": "de26d19d9e91b2db3ded6ee7ab6b6a4b", + "lastannounced": None, + "lastseen": "2024-08-07T18:27Z", + "manufacturername": "IKEA of Sweden", + "modelid": "STARKVIND Air purifier", + "name": "IKEA Starkvind", + "productid": "E2007", + "state": { + "deviceruntime": 73405, + "filterruntime": 73405, + "lastupdated": "2024-08-07T18:27:52.543", + "replacefilter": False, + "speed": 20, + }, + "swversion": "1.1.001", + "type": "ZHAAirPurifier", + "uniqueid": "0c:43:14:ff:fe:6c:20:12-01-fc7d", + }, + { + "entity_id": "select.ikea_starkvind_fan_mode", + "option": AirPurifierFanMode.AUTO.value, + "request": "/sensors/0/config", + "request_data": {"mode": "auto"}, + }, + ), ] From b26446bd888dd34c3916937c0a2dbfff804b9af6 Mon Sep 17 00:00:00 2001 From: Michael Arthur Date: Sat, 24 Aug 2024 22:53:49 +1200 Subject: [PATCH 1016/1635] Add returning activity to ecovacs lawn mower (#124519) --- homeassistant/components/ecovacs/lawn_mower.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/ecovacs/lawn_mower.py b/homeassistant/components/ecovacs/lawn_mower.py index a1dc8acf3a2..bf773207dc5 100644 --- a/homeassistant/components/ecovacs/lawn_mower.py +++ b/homeassistant/components/ecovacs/lawn_mower.py @@ -27,7 +27,7 @@ _LOGGER = logging.getLogger(__name__) _STATE_TO_MOWER_STATE = { State.IDLE: LawnMowerActivity.PAUSED, State.CLEANING: LawnMowerActivity.MOWING, - State.RETURNING: LawnMowerActivity.MOWING, + State.RETURNING: LawnMowerActivity.RETURNING, State.DOCKED: LawnMowerActivity.DOCKED, State.ERROR: LawnMowerActivity.ERROR, State.PAUSED: LawnMowerActivity.PAUSED, From 5dc03752ca44335c0a3e9b84698d712f7cb8feb2 Mon Sep 17 00:00:00 2001 From: Markus Jacobsen Date: Sat, 24 Aug 2024 12:59:08 +0200 Subject: [PATCH 1017/1635] Add type hinting to Bang & Olufsen test (#124423) --- tests/components/bang_olufsen/test_media_player.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/tests/components/bang_olufsen/test_media_player.py b/tests/components/bang_olufsen/test_media_player.py index 3dd3c43f5a5..9928a626a4f 100644 --- a/tests/components/bang_olufsen/test_media_player.py +++ b/tests/components/bang_olufsen/test_media_player.py @@ -105,7 +105,9 @@ async def test_initialization( async def test_async_update_sources_audio_only( - hass: HomeAssistant, mock_config_entry, mock_mozart_client + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_mozart_client: AsyncMock, ) -> None: """Test sources are correctly handled in _async_update_sources.""" mock_mozart_client.get_remote_menu.return_value = {} @@ -118,7 +120,9 @@ async def test_async_update_sources_audio_only( async def test_async_update_sources_outdated_api( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test fallback sources are correctly handled in _async_update_sources.""" mock_mozart_client.get_available_sources.side_effect = ValueError() From d7d35f74f2db78073fdebf00c2563575b92cffa4 Mon Sep 17 00:00:00 2001 From: cnico Date: Sat, 24 Aug 2024 17:09:52 +0200 Subject: [PATCH 1018/1635] Bump flipr-api to 1.6.0 (#124522) bump flipr-api to 1.6.0 --- homeassistant/components/flipr/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/flipr/manifest.json b/homeassistant/components/flipr/manifest.json index 898cd640349..1f9b04e3d57 100644 --- a/homeassistant/components/flipr/manifest.json +++ b/homeassistant/components/flipr/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/flipr", "iot_class": "cloud_polling", "loggers": ["flipr_api"], - "requirements": ["flipr-api==1.5.1"] + "requirements": ["flipr-api==1.6.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 16d5e05bdbd..31d5b68e7e7 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -895,7 +895,7 @@ fjaraskupan==2.3.0 flexit_bacnet==2.2.1 # homeassistant.components.flipr -flipr-api==1.5.1 +flipr-api==1.6.0 # homeassistant.components.flux_led flux-led==1.0.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 859cb375b37..a5fd9ee2239 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -754,7 +754,7 @@ fjaraskupan==2.3.0 flexit_bacnet==2.2.1 # homeassistant.components.flipr -flipr-api==1.5.1 +flipr-api==1.6.0 # homeassistant.components.flux_led flux-led==1.0.4 From 32f75597a9e3a48348c4c26b564643c5bd1d7295 Mon Sep 17 00:00:00 2001 From: red-island <114957153+red-island@users.noreply.github.com> Date: Sat, 24 Aug 2024 17:12:32 +0200 Subject: [PATCH 1019/1635] Fix tilt calculation for HomeKit cover devices (#123532) --- .../components/homekit_controller/cover.py | 96 +- .../fixtures/somfy_venetian_blinds.json | 146 ++ .../fixtures/velux_active_netatmo_co2.json | 162 ++ .../fixtures/velux_window.json | 122 + .../fixtures/velux_window_cover.json | 122 + .../snapshots/test_init.ambr | 2211 +++++++++++++++++ .../homekit_controller/test_cover.py | 134 + 7 files changed, 2943 insertions(+), 50 deletions(-) create mode 100644 tests/components/homekit_controller/fixtures/somfy_venetian_blinds.json create mode 100644 tests/components/homekit_controller/fixtures/velux_active_netatmo_co2.json create mode 100644 tests/components/homekit_controller/fixtures/velux_window.json create mode 100644 tests/components/homekit_controller/fixtures/velux_window_cover.json diff --git a/homeassistant/components/homekit_controller/cover.py b/homeassistant/components/homekit_controller/cover.py index d0944db38f8..0eebb72c988 100644 --- a/homeassistant/components/homekit_controller/cover.py +++ b/homeassistant/components/homekit_controller/cover.py @@ -214,34 +214,32 @@ class HomeKitWindowCover(HomeKitEntity, CoverEntity): @property def current_cover_tilt_position(self) -> int | None: """Return current position of cover tilt.""" - tilt_position = self.service.value(CharacteristicsTypes.VERTICAL_TILT_CURRENT) - if not tilt_position: - tilt_position = self.service.value( - CharacteristicsTypes.HORIZONTAL_TILT_CURRENT - ) - if tilt_position is None: - return None - # Recalculate to convert from arcdegree scale to percentage scale. if self.is_vertical_tilt: - scale = 0.9 - if ( - self.service[CharacteristicsTypes.VERTICAL_TILT_CURRENT].minValue == -90 - and self.service[CharacteristicsTypes.VERTICAL_TILT_CURRENT].maxValue - == 0 - ): - scale = -0.9 - tilt_position = int(tilt_position / scale) + char = self.service[CharacteristicsTypes.VERTICAL_TILT_CURRENT] elif self.is_horizontal_tilt: - scale = 0.9 - if ( - self.service[CharacteristicsTypes.HORIZONTAL_TILT_TARGET].minValue - == -90 - and self.service[CharacteristicsTypes.HORIZONTAL_TILT_TARGET].maxValue - == 0 - ): - scale = -0.9 - tilt_position = int(tilt_position / scale) - return tilt_position + char = self.service[CharacteristicsTypes.HORIZONTAL_TILT_CURRENT] + else: + return None + + # Recalculate tilt_position. Convert arc to percent scale based on min/max values. + tilt_position = char.value + min_value = char.minValue + max_value = char.maxValue + total_range = int(max_value or 0) - int(min_value or 0) + + if ( + tilt_position is None + or min_value is None + or max_value is None + or total_range <= 0 + ): + return None + + # inverted scale + if min_value == -90 and max_value == 0: + return abs(int(100 / total_range * (tilt_position - max_value))) + # normal scale + return abs(int(100 / total_range * (tilt_position - min_value))) async def async_stop_cover(self, **kwargs: Any) -> None: """Send hold command.""" @@ -265,34 +263,32 @@ class HomeKitWindowCover(HomeKitEntity, CoverEntity): async def async_set_cover_tilt_position(self, **kwargs: Any) -> None: """Move the cover tilt to a specific position.""" tilt_position = kwargs[ATTR_TILT_POSITION] + if self.is_vertical_tilt: - # Recalculate to convert from percentage scale to arcdegree scale. - scale = 0.9 - if ( - self.service[CharacteristicsTypes.VERTICAL_TILT_TARGET].minValue == -90 - and self.service[CharacteristicsTypes.VERTICAL_TILT_TARGET].maxValue - == 0 - ): - scale = -0.9 - tilt_position = int(tilt_position * scale) - await self.async_put_characteristics( - {CharacteristicsTypes.VERTICAL_TILT_TARGET: tilt_position} - ) + char = self.service[CharacteristicsTypes.VERTICAL_TILT_TARGET] elif self.is_horizontal_tilt: - # Recalculate to convert from percentage scale to arcdegree scale. - scale = 0.9 - if ( - self.service[CharacteristicsTypes.HORIZONTAL_TILT_TARGET].minValue - == -90 - and self.service[CharacteristicsTypes.HORIZONTAL_TILT_TARGET].maxValue - == 0 - ): - scale = -0.9 - tilt_position = int(tilt_position * scale) - await self.async_put_characteristics( - {CharacteristicsTypes.HORIZONTAL_TILT_TARGET: tilt_position} + char = self.service[CharacteristicsTypes.HORIZONTAL_TILT_TARGET] + + # Calculate tilt_position. Convert from 1-100 scale to arc degree scale respecting possible min/max Values. + min_value = char.minValue + max_value = char.maxValue + if min_value is None or max_value is None: + raise ValueError( + "Entity does not provide minValue and maxValue for the tilt" ) + # inverted scale + if min_value == -90 and max_value == 0: + tilt_position = int( + tilt_position / 100 * (min_value - max_value) + max_value + ) + else: + tilt_position = int( + tilt_position / 100 * (max_value - min_value) + min_value + ) + + await self.async_put_characteristics({char.type: tilt_position}) + @property def extra_state_attributes(self) -> dict[str, Any]: """Return the optional state attributes.""" diff --git a/tests/components/homekit_controller/fixtures/somfy_venetian_blinds.json b/tests/components/homekit_controller/fixtures/somfy_venetian_blinds.json new file mode 100644 index 00000000000..65d3126cc4b --- /dev/null +++ b/tests/components/homekit_controller/fixtures/somfy_venetian_blinds.json @@ -0,0 +1,146 @@ +[ + { + "aid": 1, + "services": [ + { + "iid": 1, + "type": "0000003E-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "00000023-0000-1000-8000-0026BB765291", + "iid": 2, + "perms": ["pr"], + "format": "string", + "value": "VELUX Internal Cover", + "description": "Name", + "maxLen": 64 + }, + { + "type": "00000020-0000-1000-8000-0026BB765291", + "iid": 3, + "perms": ["pr"], + "format": "string", + "value": "Netatmo", + "description": "Manufacturer", + "maxLen": 64 + }, + { + "type": "00000021-0000-1000-8000-0026BB765291", + "iid": 4, + "perms": ["pr"], + "format": "string", + "value": "VELUX Internal Cover", + "description": "Model", + "maxLen": 64 + }, + { + "type": "00000030-0000-1000-8000-0026BB765291", + "iid": 5, + "perms": ["pr"], + "format": "string", + "value": "**REDACTED**", + "description": "Serial Number", + "maxLen": 64 + }, + { + "type": "00000014-0000-1000-8000-0026BB765291", + "iid": 7, + "perms": ["pw"], + "format": "bool", + "description": "Identify" + }, + { + "type": "00000052-0000-1000-8000-0026BB765291", + "iid": 6, + "perms": ["pr"], + "format": "string", + "value": "0.0.0", + "description": "Firmware Revision", + "maxLen": 64 + }, + { + "type": "00000220-0000-1000-8000-0026BB765291", + "iid": 15, + "perms": ["pr"], + "format": "data", + "value": "+nvrOv1cCQU=" + } + ] + }, + { + "iid": 8, + "type": "0000008C-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "00000023-0000-1000-8000-0026BB765291", + "iid": 9, + "perms": ["pr"], + "format": "string", + "value": "Venetian Blinds", + "description": "Name", + "maxLen": 64 + }, + { + "type": "0000007C-0000-1000-8000-0026BB765291", + "iid": 11, + "perms": ["pr", "pw", "ev"], + "format": "uint8", + "value": 0, + "description": "Target Position", + "unit": "percentage", + "minValue": 0, + "maxValue": 100, + "minStep": 1 + }, + { + "type": "0000006D-0000-1000-8000-0026BB765291", + "iid": 10, + "perms": ["pr", "ev"], + "format": "uint8", + "value": 0, + "description": "Current Position", + "unit": "percentage", + "minValue": 0, + "maxValue": 100, + "minStep": 1 + }, + { + "type": "00000072-0000-1000-8000-0026BB765291", + "iid": 12, + "perms": ["pr", "ev"], + "format": "uint8", + "value": 2, + "description": "Position State", + "minValue": 0, + "maxValue": 2, + "minStep": 1 + }, + { + "type": "0000006C-0000-1000-8000-0026BB765291", + "iid": 13, + "perms": ["pr", "ev"], + "format": "int", + "value": 90, + "description": "Current Horizontal Tilt Angle", + "unit": "arcdegrees", + "minValue": -90, + "maxValue": 90, + "minStep": 1 + }, + { + "type": "0000007B-0000-1000-8000-0026BB765291", + "iid": 14, + "perms": ["pr", "pw", "ev"], + "format": "int", + "value": 90, + "description": "Target Horizontal Tilt Angle", + "unit": "arcdegrees", + "minValue": -90, + "maxValue": 90, + "minStep": 1 + } + ] + } + ] + } +] diff --git a/tests/components/homekit_controller/fixtures/velux_active_netatmo_co2.json b/tests/components/homekit_controller/fixtures/velux_active_netatmo_co2.json new file mode 100644 index 00000000000..80b2b34648e --- /dev/null +++ b/tests/components/homekit_controller/fixtures/velux_active_netatmo_co2.json @@ -0,0 +1,162 @@ +[ + { + "aid": 1, + "services": [ + { + "iid": 1, + "type": "0000003E-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "00000023-0000-1000-8000-0026BB765291", + "iid": 2, + "perms": ["pr"], + "format": "string", + "value": "VELUX Sensor", + "description": "Name", + "maxLen": 64 + }, + { + "type": "00000020-0000-1000-8000-0026BB765291", + "iid": 3, + "perms": ["pr"], + "format": "string", + "value": "Netatmo", + "description": "Manufacturer", + "maxLen": 64 + }, + { + "type": "00000021-0000-1000-8000-0026BB765291", + "iid": 4, + "perms": ["pr"], + "format": "string", + "value": "VELUX Sensor", + "description": "Model", + "maxLen": 64 + }, + { + "type": "00000030-0000-1000-8000-0026BB765291", + "iid": 5, + "perms": ["pr"], + "format": "string", + "value": "**REDACTED**", + "description": "Serial Number", + "maxLen": 64 + }, + { + "type": "00000014-0000-1000-8000-0026BB765291", + "iid": 7, + "perms": ["pw"], + "format": "bool", + "description": "Identify" + }, + { + "type": "00000052-0000-1000-8000-0026BB765291", + "iid": 6, + "perms": ["pr"], + "format": "string", + "value": "16.0.0", + "description": "Firmware Revision", + "maxLen": 64 + }, + { + "type": "00000220-0000-1000-8000-0026BB765291", + "iid": 18, + "perms": ["pr"], + "format": "data", + "value": "+nvrOv1cCQU=" + } + ] + }, + { + "iid": 8, + "type": "0000008A-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "00000023-0000-1000-8000-0026BB765291", + "iid": 9, + "perms": ["pr"], + "format": "string", + "value": "Temperature sensor", + "description": "Name", + "maxLen": 64 + }, + { + "type": "00000011-0000-1000-8000-0026BB765291", + "iid": 10, + "perms": ["pr", "ev"], + "format": "float", + "value": 23.9, + "description": "Current Temperature", + "unit": "celsius", + "minValue": 0.0, + "maxValue": 50.0, + "minStep": 0.1 + } + ] + }, + { + "iid": 11, + "type": "00000082-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "00000023-0000-1000-8000-0026BB765291", + "iid": 12, + "perms": ["pr"], + "format": "string", + "value": "Humidity sensor", + "description": "Name", + "maxLen": 64 + }, + { + "type": "00000010-0000-1000-8000-0026BB765291", + "iid": 13, + "perms": ["pr", "ev"], + "format": "float", + "value": 69.0, + "description": "Current Relative Humidity", + "unit": "percentage", + "minValue": 0.0, + "maxValue": 100.0, + "minStep": 1.0 + } + ] + }, + { + "iid": 14, + "type": "00000097-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "00000023-0000-1000-8000-0026BB765291", + "iid": 15, + "perms": ["pr"], + "format": "string", + "value": "Carbon Dioxide sensor", + "description": "Name", + "maxLen": 64 + }, + { + "type": "00000092-0000-1000-8000-0026BB765291", + "iid": 16, + "perms": ["pr", "ev"], + "format": "uint8", + "value": 0, + "description": "Carbon Dioxide Detected", + "minValue": 0, + "maxValue": 1, + "minStep": 1 + }, + { + "type": "00000093-0000-1000-8000-0026BB765291", + "iid": 17, + "perms": ["pr", "ev"], + "format": "float", + "value": 1124.0, + "description": "Carbon Dioxide Level", + "minValue": 0.0, + "maxValue": 5000.0 + } + ] + } + ] + } +] diff --git a/tests/components/homekit_controller/fixtures/velux_window.json b/tests/components/homekit_controller/fixtures/velux_window.json new file mode 100644 index 00000000000..4d9a09344bb --- /dev/null +++ b/tests/components/homekit_controller/fixtures/velux_window.json @@ -0,0 +1,122 @@ +[ + { + "aid": 1, + "services": [ + { + "iid": 1, + "type": "0000003E-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "00000023-0000-1000-8000-0026BB765291", + "iid": 2, + "perms": ["pr"], + "format": "string", + "value": "VELUX Window", + "description": "Name", + "maxLen": 64 + }, + { + "type": "00000020-0000-1000-8000-0026BB765291", + "iid": 3, + "perms": ["pr"], + "format": "string", + "value": "Netatmo", + "description": "Manufacturer", + "maxLen": 64 + }, + { + "type": "00000021-0000-1000-8000-0026BB765291", + "iid": 4, + "perms": ["pr"], + "format": "string", + "value": "VELUX Window", + "description": "Model", + "maxLen": 64 + }, + { + "type": "00000030-0000-1000-8000-0026BB765291", + "iid": 5, + "perms": ["pr"], + "format": "string", + "value": "**REDACTED**", + "description": "Serial Number", + "maxLen": 64 + }, + { + "type": "00000014-0000-1000-8000-0026BB765291", + "iid": 7, + "perms": ["pw"], + "format": "bool", + "description": "Identify" + }, + { + "type": "00000052-0000-1000-8000-0026BB765291", + "iid": 6, + "perms": ["pr"], + "format": "string", + "value": "0.0.0", + "description": "Firmware Revision", + "maxLen": 64 + }, + { + "type": "00000220-0000-1000-8000-0026BB765291", + "iid": 13, + "perms": ["pr"], + "format": "data", + "value": "+nvrOv1cCQU=" + } + ] + }, + { + "iid": 8, + "type": "0000008B-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "00000023-0000-1000-8000-0026BB765291", + "iid": 9, + "perms": ["pr"], + "format": "string", + "value": "Roof Window", + "description": "Name", + "maxLen": 64 + }, + { + "type": "0000007C-0000-1000-8000-0026BB765291", + "iid": 11, + "perms": ["pr", "pw", "ev"], + "format": "uint8", + "value": 0, + "description": "Target Position", + "unit": "percentage", + "minValue": 0, + "maxValue": 100, + "minStep": 1 + }, + { + "type": "0000006D-0000-1000-8000-0026BB765291", + "iid": 10, + "perms": ["pr", "ev"], + "format": "uint8", + "value": 0, + "description": "Current Position", + "unit": "percentage", + "minValue": 0, + "maxValue": 100, + "minStep": 1 + }, + { + "type": "00000072-0000-1000-8000-0026BB765291", + "iid": 12, + "perms": ["pr", "ev"], + "format": "uint8", + "value": 2, + "description": "Position State", + "minValue": 0, + "maxValue": 2, + "minStep": 1 + } + ] + } + ] + } +] diff --git a/tests/components/homekit_controller/fixtures/velux_window_cover.json b/tests/components/homekit_controller/fixtures/velux_window_cover.json new file mode 100644 index 00000000000..d95fbbd42bf --- /dev/null +++ b/tests/components/homekit_controller/fixtures/velux_window_cover.json @@ -0,0 +1,122 @@ +[ + { + "aid": 1, + "services": [ + { + "iid": 1, + "type": "0000003E-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "00000023-0000-1000-8000-0026BB765291", + "iid": 2, + "perms": ["pr"], + "format": "string", + "value": "VELUX External Cover", + "description": "Name", + "maxLen": 64 + }, + { + "type": "00000020-0000-1000-8000-0026BB765291", + "iid": 3, + "perms": ["pr"], + "format": "string", + "value": "Netatmo", + "description": "Manufacturer", + "maxLen": 64 + }, + { + "type": "00000021-0000-1000-8000-0026BB765291", + "iid": 4, + "perms": ["pr"], + "format": "string", + "value": "VELUX External Cover", + "description": "Model", + "maxLen": 64 + }, + { + "type": "00000030-0000-1000-8000-0026BB765291", + "iid": 5, + "perms": ["pr"], + "format": "string", + "value": "**REDACTED**", + "description": "Serial Number", + "maxLen": 64 + }, + { + "type": "00000014-0000-1000-8000-0026BB765291", + "iid": 7, + "perms": ["pw"], + "format": "bool", + "description": "Identify" + }, + { + "type": "00000052-0000-1000-8000-0026BB765291", + "iid": 6, + "perms": ["pr"], + "format": "string", + "value": "15.0.0", + "description": "Firmware Revision", + "maxLen": 64 + }, + { + "type": "00000220-0000-1000-8000-0026BB765291", + "iid": 13, + "perms": ["pr"], + "format": "data", + "value": "+nvrOv1cCQU=" + } + ] + }, + { + "iid": 8, + "type": "0000008C-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "00000023-0000-1000-8000-0026BB765291", + "iid": 9, + "perms": ["pr"], + "format": "string", + "value": "Awning Blinds", + "description": "Name", + "maxLen": 64 + }, + { + "type": "0000007C-0000-1000-8000-0026BB765291", + "iid": 11, + "perms": ["pr", "pw", "ev"], + "format": "uint8", + "value": 0, + "description": "Target Position", + "unit": "percentage", + "minValue": 0, + "maxValue": 100, + "minStep": 1 + }, + { + "type": "0000006D-0000-1000-8000-0026BB765291", + "iid": 10, + "perms": ["pr", "ev"], + "format": "uint8", + "value": 0, + "description": "Current Position", + "unit": "percentage", + "minValue": 0, + "maxValue": 100, + "minStep": 1 + }, + { + "type": "00000072-0000-1000-8000-0026BB765291", + "iid": 12, + "perms": ["pr", "ev"], + "format": "uint8", + "value": 2, + "description": "Position State", + "minValue": 0, + "maxValue": 2, + "minStep": 1 + } + ] + } + ] + } +] diff --git a/tests/components/homekit_controller/snapshots/test_init.ambr b/tests/components/homekit_controller/snapshots/test_init.ambr index 078ef792a55..6a0fead65d3 100644 --- a/tests/components/homekit_controller/snapshots/test_init.ambr +++ b/tests/components/homekit_controller/snapshots/test_init.ambr @@ -17636,6 +17636,342 @@ }), ]) # --- +# name: test_snapshots[somfy_venetian_blinds] + list([ + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:1', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX Internal Cover', + 'model_id': None, + 'name': 'VELUX Internal Cover', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '0.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_internal_cover_identify', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Internal Cover Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_1_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX Internal Cover Identify', + }), + 'entity_id': 'button.velux_internal_cover_identify', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.velux_internal_cover_venetian_blinds', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'VELUX Internal Cover Venetian Blinds', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_1_8', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'current_position': 0, + 'current_tilt_position': 100, + 'friendly_name': 'VELUX Internal Cover Venetian Blinds', + 'supported_features': , + }), + 'entity_id': 'cover.velux_internal_cover_venetian_blinds', + 'state': 'closed', + }), + }), + ]), + }), + ]) +# --- +# name: test_snapshots[velux_active_netatmo_co2] + list([ + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:1', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX Sensor', + 'model_id': None, + 'name': 'VELUX Sensor', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '16.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_sensor_identify', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Sensor Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_1_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX Sensor Identify', + }), + 'entity_id': 'button.velux_sensor_identify', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.velux_sensor_carbon_dioxide_sensor', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Sensor Carbon Dioxide sensor', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_1_14', + 'unit_of_measurement': 'ppm', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'carbon_dioxide', + 'friendly_name': 'VELUX Sensor Carbon Dioxide sensor', + 'state_class': , + 'unit_of_measurement': 'ppm', + }), + 'entity_id': 'sensor.velux_sensor_carbon_dioxide_sensor', + 'state': '1124.0', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.velux_sensor_humidity_sensor', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Sensor Humidity sensor', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_1_11', + 'unit_of_measurement': '%', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'humidity', + 'friendly_name': 'VELUX Sensor Humidity sensor', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'entity_id': 'sensor.velux_sensor_humidity_sensor', + 'state': '69.0', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.velux_sensor_temperature_sensor', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Sensor Temperature sensor', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_1_8', + 'unit_of_measurement': , + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'temperature', + 'friendly_name': 'VELUX Sensor Temperature sensor', + 'state_class': , + 'unit_of_measurement': , + }), + 'entity_id': 'sensor.velux_sensor_temperature_sensor', + 'state': '23.9', + }), + }), + ]), + }), + ]) +# --- # name: test_snapshots[velux_gateway] list([ dict({ @@ -18043,6 +18379,1881 @@ }), ]) # --- +# name: test_snapshots[velux_somfy_venetian_blinds] + list([ + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:5', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX External Cover', + 'model_id': None, + 'name': 'VELUX External Cover', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '15.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_external_cover_identify', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX External Cover Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_5_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX External Cover Identify', + }), + 'entity_id': 'button.velux_external_cover_identify', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.velux_external_cover_awning_blinds', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'VELUX External Cover Awning Blinds', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_5_8', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'current_position': 0, + 'friendly_name': 'VELUX External Cover Awning Blinds', + 'supported_features': , + }), + 'entity_id': 'cover.velux_external_cover_awning_blinds', + 'state': 'closed', + }), + }), + ]), + }), + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:8', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX External Cover', + 'model_id': None, + 'name': 'VELUX External Cover', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '0.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_external_cover_identify_2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX External Cover Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_8_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX External Cover Identify', + }), + 'entity_id': 'button.velux_external_cover_identify_2', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.velux_external_cover_awning_blinds_2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'VELUX External Cover Awning Blinds', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_8_8', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'current_position': 45, + 'friendly_name': 'VELUX External Cover Awning Blinds', + 'supported_features': , + }), + 'entity_id': 'cover.velux_external_cover_awning_blinds_2', + 'state': 'open', + }), + }), + ]), + }), + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:11', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX External Cover', + 'model_id': None, + 'name': 'VELUX External Cover', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '15.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_external_cover_identify_3', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX External Cover Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_11_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX External Cover Identify', + }), + 'entity_id': 'button.velux_external_cover_identify_3', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.velux_external_cover_awning_blinds_3', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'VELUX External Cover Awning Blinds', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_11_8', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'current_position': 0, + 'friendly_name': 'VELUX External Cover Awning Blinds', + 'supported_features': , + }), + 'entity_id': 'cover.velux_external_cover_awning_blinds_3', + 'state': 'closed', + }), + }), + ]), + }), + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:12', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX External Cover', + 'model_id': None, + 'name': 'VELUX External Cover', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '15.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_external_cover_identify_4', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX External Cover Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_12_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX External Cover Identify', + }), + 'entity_id': 'button.velux_external_cover_identify_4', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.velux_external_cover_awning_blinds_4', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'VELUX External Cover Awning Blinds', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_12_8', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'current_position': 0, + 'friendly_name': 'VELUX External Cover Awning Blinds', + 'supported_features': , + }), + 'entity_id': 'cover.velux_external_cover_awning_blinds_4', + 'state': 'closed', + }), + }), + ]), + }), + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:1', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX Gateway', + 'model_id': None, + 'name': 'VELUX Gateway', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '132.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_gateway_identify', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Gateway Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_1_1_6', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX Gateway Identify', + }), + 'entity_id': 'button.velux_gateway_identify', + 'state': 'unknown', + }), + }), + ]), + }), + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:9', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX Internal Cover', + 'model_id': None, + 'name': 'VELUX Internal Cover', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '0.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_internal_cover_identify', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Internal Cover Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_9_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX Internal Cover Identify', + }), + 'entity_id': 'button.velux_internal_cover_identify', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.velux_internal_cover_venetian_blinds', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'VELUX Internal Cover Venetian Blinds', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_9_8', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'current_position': 0, + 'current_tilt_position': 100, + 'friendly_name': 'VELUX Internal Cover Venetian Blinds', + 'supported_features': , + }), + 'entity_id': 'cover.velux_internal_cover_venetian_blinds', + 'state': 'closed', + }), + }), + ]), + }), + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:13', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX Internal Cover', + 'model_id': None, + 'name': 'VELUX Internal Cover', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '0.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_internal_cover_identify_2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Internal Cover Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_13_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX Internal Cover Identify', + }), + 'entity_id': 'button.velux_internal_cover_identify_2', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.velux_internal_cover_venetian_blinds_2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'VELUX Internal Cover Venetian Blinds', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_13_8', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'current_position': 100, + 'current_tilt_position': 0, + 'friendly_name': 'VELUX Internal Cover Venetian Blinds', + 'supported_features': , + }), + 'entity_id': 'cover.velux_internal_cover_venetian_blinds_2', + 'state': 'open', + }), + }), + ]), + }), + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:14', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX Internal Cover', + 'model_id': None, + 'name': 'VELUX Internal Cover', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '0.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_internal_cover_identify_3', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Internal Cover Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_14_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX Internal Cover Identify', + }), + 'entity_id': 'button.velux_internal_cover_identify_3', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.velux_internal_cover_venetian_blinds_3', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'VELUX Internal Cover Venetian Blinds', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_14_8', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'current_position': 0, + 'current_tilt_position': 100, + 'friendly_name': 'VELUX Internal Cover Venetian Blinds', + 'supported_features': , + }), + 'entity_id': 'cover.velux_internal_cover_venetian_blinds_3', + 'state': 'closed', + }), + }), + ]), + }), + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:15', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX Internal Cover', + 'model_id': None, + 'name': 'VELUX Internal Cover', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '0.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_internal_cover_identify_4', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Internal Cover Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_15_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX Internal Cover Identify', + }), + 'entity_id': 'button.velux_internal_cover_identify_4', + 'state': 'unknown', + }), + }), + ]), + }), + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:2', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX Sensor', + 'model_id': None, + 'name': 'VELUX Sensor', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '16.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_sensor_identify', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Sensor Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_2_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX Sensor Identify', + }), + 'entity_id': 'button.velux_sensor_identify', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.velux_sensor_carbon_dioxide_sensor', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Sensor Carbon Dioxide sensor', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_2_14', + 'unit_of_measurement': 'ppm', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'carbon_dioxide', + 'friendly_name': 'VELUX Sensor Carbon Dioxide sensor', + 'state_class': , + 'unit_of_measurement': 'ppm', + }), + 'entity_id': 'sensor.velux_sensor_carbon_dioxide_sensor', + 'state': '1124.0', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.velux_sensor_humidity_sensor', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Sensor Humidity sensor', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_2_11', + 'unit_of_measurement': '%', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'humidity', + 'friendly_name': 'VELUX Sensor Humidity sensor', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'entity_id': 'sensor.velux_sensor_humidity_sensor', + 'state': '69.0', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.velux_sensor_temperature_sensor', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Sensor Temperature sensor', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_2_8', + 'unit_of_measurement': , + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'temperature', + 'friendly_name': 'VELUX Sensor Temperature sensor', + 'state_class': , + 'unit_of_measurement': , + }), + 'entity_id': 'sensor.velux_sensor_temperature_sensor', + 'state': '23.9', + }), + }), + ]), + }), + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:3', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX Sensor', + 'model_id': None, + 'name': 'VELUX Sensor', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '16.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_sensor_identify_2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Sensor Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_3_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX Sensor Identify', + }), + 'entity_id': 'button.velux_sensor_identify_2', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.velux_sensor_carbon_dioxide_sensor_2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Sensor Carbon Dioxide sensor', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_3_14', + 'unit_of_measurement': 'ppm', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'carbon_dioxide', + 'friendly_name': 'VELUX Sensor Carbon Dioxide sensor', + 'state_class': , + 'unit_of_measurement': 'ppm', + }), + 'entity_id': 'sensor.velux_sensor_carbon_dioxide_sensor_2', + 'state': '1074.0', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.velux_sensor_humidity_sensor_2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Sensor Humidity sensor', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_3_11', + 'unit_of_measurement': '%', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'humidity', + 'friendly_name': 'VELUX Sensor Humidity sensor', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'entity_id': 'sensor.velux_sensor_humidity_sensor_2', + 'state': '64.0', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.velux_sensor_temperature_sensor_2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Sensor Temperature sensor', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_3_8', + 'unit_of_measurement': , + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'temperature', + 'friendly_name': 'VELUX Sensor Temperature sensor', + 'state_class': , + 'unit_of_measurement': , + }), + 'entity_id': 'sensor.velux_sensor_temperature_sensor_2', + 'state': '24.5', + }), + }), + ]), + }), + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:4', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX Window', + 'model_id': None, + 'name': 'VELUX Window', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '0.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_window_identify', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Window Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_4_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX Window Identify', + }), + 'entity_id': 'button.velux_window_identify', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.velux_window_roof_window', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Window Roof Window', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_4_8', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'current_position': 0, + 'device_class': 'window', + 'friendly_name': 'VELUX Window Roof Window', + 'supported_features': , + }), + 'entity_id': 'cover.velux_window_roof_window', + 'state': 'closed', + }), + }), + ]), + }), + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:7', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX Window', + 'model_id': None, + 'name': 'VELUX Window', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '0.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_window_identify_2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Window Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_7_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX Window Identify', + }), + 'entity_id': 'button.velux_window_identify_2', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.velux_window_roof_window_2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Window Roof Window', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_7_8', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'current_position': 0, + 'device_class': 'window', + 'friendly_name': 'VELUX Window Roof Window', + 'supported_features': , + }), + 'entity_id': 'cover.velux_window_roof_window_2', + 'state': 'closed', + }), + }), + ]), + }), + ]) +# --- +# name: test_snapshots[velux_window] + list([ + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:1', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX Window', + 'model_id': None, + 'name': 'VELUX Window', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '0.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_window_identify', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Window Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_1_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX Window Identify', + }), + 'entity_id': 'button.velux_window_identify', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.velux_window_roof_window', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Window Roof Window', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_1_8', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'current_position': 0, + 'device_class': 'window', + 'friendly_name': 'VELUX Window Roof Window', + 'supported_features': , + }), + 'entity_id': 'cover.velux_window_roof_window', + 'state': 'closed', + }), + }), + ]), + }), + ]) +# --- +# name: test_snapshots[velux_window_cover] + list([ + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:1', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX External Cover', + 'model_id': None, + 'name': 'VELUX External Cover', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '15.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_external_cover_identify', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX External Cover Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_1_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX External Cover Identify', + }), + 'entity_id': 'button.velux_external_cover_identify', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.velux_external_cover_awning_blinds', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'VELUX External Cover Awning Blinds', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_1_8', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'current_position': 0, + 'friendly_name': 'VELUX External Cover Awning Blinds', + 'supported_features': , + }), + 'entity_id': 'cover.velux_external_cover_awning_blinds', + 'state': 'closed', + }), + }), + ]), + }), + ]) +# --- # name: test_snapshots[vocolinc_flowerbud] list([ dict({ diff --git a/tests/components/homekit_controller/test_cover.py b/tests/components/homekit_controller/test_cover.py index 7415e97a4b1..11870c801e1 100644 --- a/tests/components/homekit_controller/test_cover.py +++ b/tests/components/homekit_controller/test_cover.py @@ -116,6 +116,32 @@ def create_window_covering_service_with_none_tilt(accessory: Accessory) -> None: tilt_target.maxValue = 0 +def create_window_covering_service_with_no_minmax_tilt(accessory): + """Apply use values (-90 to 90) if min/max not provided.""" + service = create_window_covering_service(accessory) + + tilt_current = service.add_char(CharacteristicsTypes.HORIZONTAL_TILT_CURRENT) + tilt_current.value = 0 + + tilt_target = service.add_char(CharacteristicsTypes.HORIZONTAL_TILT_TARGET) + tilt_target.value = 0 + + +def create_window_covering_service_with_full_range_tilt(accessory): + """Somfi Velux Integration.""" + service = create_window_covering_service(accessory) + + tilt_current = service.add_char(CharacteristicsTypes.HORIZONTAL_TILT_CURRENT) + tilt_current.value = 0 + tilt_current.minValue = -90 + tilt_current.maxValue = 90 + + tilt_target = service.add_char(CharacteristicsTypes.HORIZONTAL_TILT_TARGET) + tilt_target.value = 0 + tilt_target.minValue = -90 + tilt_target.maxValue = 90 + + async def test_change_window_cover_state( hass: HomeAssistant, get_next_aid: Callable[[], int] ) -> None: @@ -267,6 +293,40 @@ async def test_read_window_cover_tilt_missing_tilt( assert state.state != STATE_UNAVAILABLE +async def test_read_window_cover_tilt_full_range( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: + """Test that horizontal tilt is handled correctly.""" + helper = await setup_test_component( + hass, get_next_aid(), create_window_covering_service_with_full_range_tilt + ) + + await helper.async_update( + ServicesTypes.WINDOW_COVERING, + {CharacteristicsTypes.HORIZONTAL_TILT_CURRENT: 0}, + ) + state = await helper.poll_and_get_state() + # Expect converted value from arcdegree scale to percentage scale. + assert state.attributes["current_tilt_position"] == 50 + + +async def test_read_window_cover_tilt_no_minmax( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: + """Test that horizontal tilt is handled correctly.""" + helper = await setup_test_component( + hass, get_next_aid(), create_window_covering_service_with_no_minmax_tilt + ) + + await helper.async_update( + ServicesTypes.WINDOW_COVERING, + {CharacteristicsTypes.HORIZONTAL_TILT_CURRENT: 90}, + ) + state = await helper.poll_and_get_state() + # Expect converted value from arcdegree scale to percentage scale. + assert state.attributes["current_tilt_position"] == 100 + + async def test_write_window_cover_tilt_horizontal( hass: HomeAssistant, get_next_aid: Callable[[], int] ) -> None: @@ -359,6 +419,29 @@ async def test_write_window_cover_tilt_vertical_2( ) +async def test_write_window_cover_tilt_no_minmax( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: + """Test that horizontal tilt is written correctly.""" + helper = await setup_test_component( + hass, get_next_aid(), create_window_covering_service_with_no_minmax_tilt + ) + + await hass.services.async_call( + "cover", + "set_cover_tilt_position", + {"entity_id": helper.entity_id, "tilt_position": 90}, + blocking=True, + ) + # Expect converted value from percentage scale to arcdegree scale. + helper.async_assert_service_values( + ServicesTypes.WINDOW_COVERING, + { + CharacteristicsTypes.HORIZONTAL_TILT_TARGET: 72, + }, + ) + + async def test_window_cover_stop( hass: HomeAssistant, get_next_aid: Callable[[], int] ) -> None: @@ -378,6 +461,57 @@ async def test_window_cover_stop( ) +async def test_write_window_cover_tilt_full_range( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: + """Test that full-range tilt is working correctly.""" + helper = await setup_test_component( + hass, get_next_aid(), create_window_covering_service_with_full_range_tilt + ) + + await hass.services.async_call( + "cover", + "set_cover_tilt_position", + {"entity_id": helper.entity_id, "tilt_position": 10}, + blocking=True, + ) + # Expect converted value from percentage scale to arc on -90 to +90 scale. + helper.async_assert_service_values( + ServicesTypes.WINDOW_COVERING, + { + CharacteristicsTypes.HORIZONTAL_TILT_TARGET: -72, + }, + ) + + await hass.services.async_call( + "cover", + "set_cover_tilt_position", + {"entity_id": helper.entity_id, "tilt_position": 50}, + blocking=True, + ) + # Expect converted value from percentage scale to arc on -90 to +90 scale. + helper.async_assert_service_values( + ServicesTypes.WINDOW_COVERING, + { + CharacteristicsTypes.HORIZONTAL_TILT_TARGET: 0, + }, + ) + + await hass.services.async_call( + "cover", + "set_cover_tilt_position", + {"entity_id": helper.entity_id, "tilt_position": 90}, + blocking=True, + ) + # Expect converted value from percentage scale to arc on -90 to +90 scale. + helper.async_assert_service_values( + ServicesTypes.WINDOW_COVERING, + { + CharacteristicsTypes.HORIZONTAL_TILT_TARGET: 72, + }, + ) + + def create_garage_door_opener_service(accessory: Accessory) -> None: """Define a garage-door-opener chars as per page 217 of HAP spec.""" service = accessory.add_service(ServicesTypes.GARAGE_DOOR_OPENER) From 3e3d27f48de058afba43fea4e435406baca96b74 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Sat, 24 Aug 2024 08:44:44 -0700 Subject: [PATCH 1020/1635] Add nest event platform (#123042) * Add nest event platform * Translate entities * Put motion events into a single entity type * Remove none types * Set event entity descriptions as kw only * Update translations for event entities * Add single event entity per trait type * Update translation keys --- homeassistant/components/nest/__init__.py | 2 +- homeassistant/components/nest/event.py | 124 +++++++++++++ homeassistant/components/nest/strings.json | 26 +++ tests/components/nest/test_event.py | 193 +++++++++++++++++++++ 4 files changed, 344 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/nest/event.py create mode 100644 tests/components/nest/test_event.py diff --git a/homeassistant/components/nest/__init__.py b/homeassistant/components/nest/__init__.py index bdec44a3c85..5d3e9652039 100644 --- a/homeassistant/components/nest/__init__.py +++ b/homeassistant/components/nest/__init__.py @@ -97,7 +97,7 @@ CONFIG_SCHEMA = vol.Schema( ) # Platforms for SDM API -PLATFORMS = [Platform.CAMERA, Platform.CLIMATE, Platform.SENSOR] +PLATFORMS = [Platform.CAMERA, Platform.CLIMATE, Platform.EVENT, Platform.SENSOR] # Fetch media events with a disk backed cache, with a limit for each camera # device. The largest media items are mp4 clips at ~120kb each, and we target diff --git a/homeassistant/components/nest/event.py b/homeassistant/components/nest/event.py new file mode 100644 index 00000000000..d2897793270 --- /dev/null +++ b/homeassistant/components/nest/event.py @@ -0,0 +1,124 @@ +"""Event platform for Google Nest.""" + +from dataclasses import dataclass +import logging + +from google_nest_sdm.device import Device +from google_nest_sdm.device_manager import DeviceManager +from google_nest_sdm.event import EventMessage, EventType +from google_nest_sdm.traits import TraitType + +from homeassistant.components.event import ( + EventDeviceClass, + EventEntity, + EventEntityDescription, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import DATA_DEVICE_MANAGER, DOMAIN +from .device_info import NestDeviceInfo +from .events import ( + EVENT_CAMERA_MOTION, + EVENT_CAMERA_PERSON, + EVENT_CAMERA_SOUND, + EVENT_DOORBELL_CHIME, + EVENT_NAME_MAP, +) + +_LOGGER = logging.getLogger(__name__) + + +@dataclass(kw_only=True, frozen=True) +class NestEventEntityDescription(EventEntityDescription): + """Entity description for nest event entities.""" + + trait_types: list[TraitType] + api_event_types: list[EventType] + event_types: list[str] + + +ENTITY_DESCRIPTIONS = [ + NestEventEntityDescription( + key=EVENT_DOORBELL_CHIME, + translation_key="chime", + device_class=EventDeviceClass.DOORBELL, + event_types=[EVENT_DOORBELL_CHIME], + trait_types=[TraitType.DOORBELL_CHIME], + api_event_types=[EventType.DOORBELL_CHIME], + ), + NestEventEntityDescription( + key=EVENT_CAMERA_MOTION, + translation_key="motion", + device_class=EventDeviceClass.MOTION, + event_types=[EVENT_CAMERA_MOTION, EVENT_CAMERA_PERSON, EVENT_CAMERA_SOUND], + trait_types=[ + TraitType.CAMERA_MOTION, + TraitType.CAMERA_PERSON, + TraitType.CAMERA_SOUND, + ], + api_event_types=[ + EventType.CAMERA_MOTION, + EventType.CAMERA_PERSON, + EventType.CAMERA_SOUND, + ], + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback +) -> None: + """Set up the sensors.""" + + device_manager: DeviceManager = hass.data[DOMAIN][entry.entry_id][ + DATA_DEVICE_MANAGER + ] + async_add_entities( + NestTraitEventEntity(desc, device) + for device in device_manager.devices.values() + for desc in ENTITY_DESCRIPTIONS + if any(trait in device.traits for trait in desc.trait_types) + ) + + +class NestTraitEventEntity(EventEntity): + """Nest doorbell event entity.""" + + entity_description: NestEventEntityDescription + _attr_has_entity_name = True + + def __init__( + self, entity_description: NestEventEntityDescription, device: Device + ) -> None: + """Initialize the event entity.""" + self.entity_description = entity_description + self._device = device + self._attr_unique_id = f"{device.name}-{entity_description.key}" + self._attr_device_info = NestDeviceInfo(device).device_info + + async def _async_handle_event(self, event_message: EventMessage) -> None: + """Handle a device event.""" + if ( + event_message.relation_update + or not event_message.resource_update_name + or not (events := event_message.resource_update_events) + ): + return + for api_event_type, nest_event in events.items(): + if api_event_type not in self.entity_description.api_event_types: + continue + + event_type = EVENT_NAME_MAP[api_event_type] + + self._trigger_event( + event_type, + {"nest_event_id": nest_event.event_token}, + ) + self.async_write_ha_state() + return + + async def async_added_to_hass(self) -> None: + """Run when entity is added to attach an event listener.""" + self.async_on_remove(self._device.add_event_callback(self._async_handle_event)) diff --git a/homeassistant/components/nest/strings.json b/homeassistant/components/nest/strings.json index 35e1cc68165..cd915acfbe5 100644 --- a/homeassistant/components/nest/strings.json +++ b/homeassistant/components/nest/strings.json @@ -72,5 +72,31 @@ "title": "Legacy Works With Nest has been removed", "description": "Legacy Works With Nest has been removed from Home Assistant, and the API shuts down as of September 2023.\n\nYou must take action to use the SDM API. Remove all `nest` configuration from `configuration.yaml` and restart Home Assistant, then see the Nest [integration instructions]({documentation_url}) for set up instructions and supported devices." } + }, + "entity": { + "event": { + "chime": { + "name": "Chime", + "state_attributes": { + "event_type": { + "state": { + "doorbell_chime": "[%key:component::nest::entity::event::chime::name%]" + } + } + } + }, + "motion": { + "name": "[%key:component::event::entity_component::motion::name%]", + "state_attributes": { + "event_type": { + "state": { + "camera_motion": "[%key:component::event::entity_component::motion::name%]", + "camera_person": "Person", + "camera_sound": "Sound" + } + } + } + } + } } } diff --git a/tests/components/nest/test_event.py b/tests/components/nest/test_event.py new file mode 100644 index 00000000000..531252285e0 --- /dev/null +++ b/tests/components/nest/test_event.py @@ -0,0 +1,193 @@ +"""Test for Nest event platform.""" + +from typing import Any + +from google_nest_sdm.event import EventMessage, EventType +from google_nest_sdm.traits import TraitType +import pytest + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.util.dt import utcnow + +from .common import DEVICE_ID, CreateDevice +from .conftest import FakeSubscriber, PlatformSetup + +EVENT_SESSION_ID = "CjY5Y3VKaTZwR3o4Y19YbTVfMF..." +EVENT_ID = "FWWVQVUdGNUlTU2V4MGV2aTNXV..." +ENCODED_EVENT_ID = "WyJDalk1WTNWS2FUWndSM280WTE5WWJUVmZNRi4uLiIsICJGV1dWUVZVZEdOVWxUVTJWNE1HVjJhVE5YVi4uLiJd" + + +@pytest.fixture +def platforms() -> list[Platform]: + """Fixture for platforms to setup.""" + return [Platform.EVENT] + + +@pytest.fixture +def device_type() -> str: + """Fixture for the type of device under test.""" + return "sdm.devices.types.DOORBELL" + + +@pytest.fixture +async def device_traits() -> dict[str, Any]: + """Fixture to set default device traits used when creating devices.""" + return { + "sdm.devices.traits.Info": { + "customName": "Front", + }, + "sdm.devices.traits.CameraLiveStream": { + "maxVideoResolution": { + "width": 640, + "height": 480, + }, + "videoCodecs": ["H264"], + "audioCodecs": ["AAC"], + }, + } + + +def create_events(events: str) -> EventMessage: + """Create an EventMessage for events.""" + return EventMessage.create_event( + { + "eventId": "some-event-id", + "timestamp": utcnow().isoformat(timespec="seconds"), + "resourceUpdate": { + "name": DEVICE_ID, + "events": { + event: { + "eventSessionId": EVENT_SESSION_ID, + "eventId": EVENT_ID, + } + for event in events + }, + }, + }, + auth=None, + ) + + +@pytest.mark.parametrize( + ( + "trait_types", + "entity_id", + "expected_attributes", + "api_event_type", + "expected_event_type", + ), + [ + ( + [TraitType.DOORBELL_CHIME, TraitType.CAMERA_MOTION], + "event.front_chime", + { + "device_class": "doorbell", + "event_types": ["doorbell_chime"], + "friendly_name": "Front Chime", + }, + EventType.DOORBELL_CHIME, + "doorbell_chime", + ), + ( + [TraitType.CAMERA_MOTION, TraitType.CAMERA_PERSON, TraitType.CAMERA_SOUND], + "event.front_motion", + { + "device_class": "motion", + "event_types": ["camera_motion", "camera_person", "camera_sound"], + "friendly_name": "Front Motion", + }, + EventType.CAMERA_MOTION, + "camera_motion", + ), + ( + [TraitType.CAMERA_MOTION, TraitType.CAMERA_PERSON, TraitType.CAMERA_SOUND], + "event.front_motion", + { + "device_class": "motion", + "event_types": ["camera_motion", "camera_person", "camera_sound"], + "friendly_name": "Front Motion", + }, + EventType.CAMERA_PERSON, + "camera_person", + ), + ( + [TraitType.CAMERA_MOTION, TraitType.CAMERA_PERSON, TraitType.CAMERA_SOUND], + "event.front_motion", + { + "device_class": "motion", + "event_types": ["camera_motion", "camera_person", "camera_sound"], + "friendly_name": "Front Motion", + }, + EventType.CAMERA_SOUND, + "camera_sound", + ), + ], +) +async def test_receive_events( + hass: HomeAssistant, + subscriber: FakeSubscriber, + setup_platform: PlatformSetup, + create_device: CreateDevice, + trait_types: list[TraitType], + entity_id: str, + expected_attributes: dict[str, str], + api_event_type: EventType, + expected_event_type: str, +) -> None: + """Test a pubsub message for a camera person event.""" + create_device.create( + raw_traits={ + **{trait_type: {} for trait_type in trait_types}, + api_event_type: {}, + } + ) + await setup_platform() + + state = hass.states.get(entity_id) + assert state.state == "unknown" + assert state.attributes == { + **expected_attributes, + "event_type": None, + } + + await subscriber.async_receive_event(create_events([api_event_type])) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state.state != "unknown" + assert state.attributes == { + **expected_attributes, + "event_type": expected_event_type, + "nest_event_id": ENCODED_EVENT_ID, + } + + +@pytest.mark.parametrize(("trait_type"), [(TraitType.DOORBELL_CHIME)]) +async def test_ignore_unrelated_event( + hass: HomeAssistant, + subscriber: FakeSubscriber, + setup_platform: PlatformSetup, + create_device: CreateDevice, + trait_type: TraitType, +) -> None: + """Test a pubsub message for a camera person event.""" + create_device.create( + raw_traits={ + trait_type: {}, + } + ) + await setup_platform() + + # Device does not have traits matching this event type + await subscriber.async_receive_event(create_events([EventType.CAMERA_MOTION])) + await hass.async_block_till_done() + + state = hass.states.get("event.front_chime") + assert state.state == "unknown" + assert state.attributes == { + "device_class": "doorbell", + "event_type": None, + "event_types": ["doorbell_chime"], + "friendly_name": "Front Chime", + } From e85755fbda15973c6c63eac826597fd2db65b9e2 Mon Sep 17 00:00:00 2001 From: "David F. Mulcahey" Date: Sat, 24 Aug 2024 15:52:27 -0400 Subject: [PATCH 1021/1635] Fix ZHA device lookup when ZHA devices are shared with additional integrations (#124429) * Fix ZHA device lookup when ZHA devices are shared with additional integrations * clean up except error types * remove impossible to create exceptions --- homeassistant/components/zha/helpers.py | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/zha/helpers.py b/homeassistant/components/zha/helpers.py index 35a794e8631..a5446af7e76 100644 --- a/homeassistant/components/zha/helpers.py +++ b/homeassistant/components/zha/helpers.py @@ -1012,16 +1012,12 @@ def async_get_zha_device_proxy(hass: HomeAssistant, device_id: str) -> ZHADevice _LOGGER.error("Device id `%s` not found in registry", device_id) raise KeyError(f"Device id `{device_id}` not found in registry.") zha_gateway_proxy = get_zha_gateway_proxy(hass) - try: - ieee_address = list(registry_device.identifiers)[0][1] - ieee = EUI64.convert(ieee_address) - except (IndexError, ValueError) as ex: - _LOGGER.error( - "Unable to determine device IEEE for device with device id `%s`", device_id - ) - raise KeyError( - f"Unable to determine device IEEE for device with device id `{device_id}`." - ) from ex + ieee_address = next( + identifier + for domain, identifier in registry_device.identifiers + if domain == DOMAIN + ) + ieee = EUI64.convert(ieee_address) return zha_gateway_proxy.device_proxies[ieee] From b64c21cce62565a26ebed21fe78ce6b9dd627e37 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Sat, 24 Aug 2024 13:16:55 -0700 Subject: [PATCH 1022/1635] Bump to python-nest-sdm to 5.0.0 (#124536) --- homeassistant/components/nest/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/nest/manifest.json b/homeassistant/components/nest/manifest.json index 3472fa64e8f..1b0697f7602 100644 --- a/homeassistant/components/nest/manifest.json +++ b/homeassistant/components/nest/manifest.json @@ -20,5 +20,5 @@ "iot_class": "cloud_push", "loggers": ["google_nest_sdm"], "quality_scale": "platinum", - "requirements": ["google-nest-sdm==4.0.7"] + "requirements": ["google-nest-sdm==5.0.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 31d5b68e7e7..415d5e383c8 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -992,7 +992,7 @@ google-cloud-texttospeech==2.16.3 google-generativeai==0.6.0 # homeassistant.components.nest -google-nest-sdm==4.0.7 +google-nest-sdm==5.0.0 # homeassistant.components.google_travel_time googlemaps==2.5.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a5fd9ee2239..d0e7128f535 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -839,7 +839,7 @@ google-cloud-pubsub==2.13.11 google-generativeai==0.6.0 # homeassistant.components.nest -google-nest-sdm==4.0.7 +google-nest-sdm==5.0.0 # homeassistant.components.google_travel_time googlemaps==2.5.1 From c506188c1304f1ecd2e62c31a60d105fda400555 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Sat, 24 Aug 2024 13:17:59 -0700 Subject: [PATCH 1023/1635] Fix nest event entities to only register a single event per session (#124535) --- homeassistant/components/nest/event.py | 7 +- tests/components/nest/test_event.py | 152 +++++++++++++++++++++++-- 2 files changed, 148 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/nest/event.py b/homeassistant/components/nest/event.py index d2897793270..a6d70fe86d5 100644 --- a/homeassistant/components/nest/event.py +++ b/homeassistant/components/nest/event.py @@ -106,15 +106,20 @@ class NestTraitEventEntity(EventEntity): or not (events := event_message.resource_update_events) ): return + last_nest_event_id = self.state_attributes.get("nest_event_id") for api_event_type, nest_event in events.items(): if api_event_type not in self.entity_description.api_event_types: continue event_type = EVENT_NAME_MAP[api_event_type] + nest_event_id = nest_event.event_token + if last_nest_event_id is not None and last_nest_event_id == nest_event_id: + # This event is a duplicate message in the same thread + return self._trigger_event( event_type, - {"nest_event_id": nest_event.event_token}, + {"nest_event_id": nest_event_id}, ) self.async_write_ha_state() return diff --git a/tests/components/nest/test_event.py b/tests/components/nest/test_event.py index 531252285e0..f45e6c1c6e6 100644 --- a/tests/components/nest/test_event.py +++ b/tests/components/nest/test_event.py @@ -1,7 +1,10 @@ """Test for Nest event platform.""" +import datetime from typing import Any +from unittest.mock import patch +from freezegun.api import FrozenDateTimeFactory from google_nest_sdm.event import EventMessage, EventType from google_nest_sdm.traits import TraitType import pytest @@ -10,13 +13,17 @@ from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.util.dt import utcnow -from .common import DEVICE_ID, CreateDevice -from .conftest import FakeSubscriber, PlatformSetup +from .common import DEVICE_ID, CreateDevice, FakeSubscriber +from .conftest import PlatformSetup EVENT_SESSION_ID = "CjY5Y3VKaTZwR3o4Y19YbTVfMF..." EVENT_ID = "FWWVQVUdGNUlTU2V4MGV2aTNXV..." ENCODED_EVENT_ID = "WyJDalk1WTNWS2FUWndSM280WTE5WWJUVmZNRi4uLiIsICJGV1dWUVZVZEdOVWxUVTJWNE1HVjJhVE5YVi4uLiJd" +EVENT_SESSION_ID2 = "DjY5Y3VKaTZwR3o4Y19YbTVfMF..." +EVENT_ID2 = "GWWVQVUdGNUlTU2V4MGV2aTNXV..." +ENCODED_EVENT_ID2 = "WyJEalk1WTNWS2FUWndSM280WTE5WWJUVmZNRi4uLiIsICJHV1dWUVZVZEdOVWxUVTJWNE1HVjJhVE5YVi4uLiJd" + @pytest.fixture def platforms() -> list[Platform]: @@ -24,6 +31,14 @@ def platforms() -> list[Platform]: return [Platform.EVENT] +@pytest.fixture(autouse=True) +def enable_prefetch(subscriber: FakeSubscriber) -> None: + """Fixture to enable media fetching for tests to exercise.""" + subscriber.cache_policy.fetch = True + with patch("homeassistant.components.nest.EVENT_MEDIA_CACHE_SIZE", new=5): + yield + + @pytest.fixture def device_type() -> str: """Fixture for the type of device under test.""" @@ -49,6 +64,21 @@ async def device_traits() -> dict[str, Any]: def create_events(events: str) -> EventMessage: + """Create an EventMessage for events.""" + return create_event_messages( + { + event: { + "eventSessionId": EVENT_SESSION_ID, + "eventId": EVENT_ID, + } + for event in events + } + ) + + +def create_event_messages( + events: dict[str, Any], parameters: dict[str, Any] | None = None +) -> EventMessage: """Create an EventMessage for events.""" return EventMessage.create_event( { @@ -56,19 +86,15 @@ def create_events(events: str) -> EventMessage: "timestamp": utcnow().isoformat(timespec="seconds"), "resourceUpdate": { "name": DEVICE_ID, - "events": { - event: { - "eventSessionId": EVENT_SESSION_ID, - "eventId": EVENT_ID, - } - for event in events - }, + "events": events, }, + **(parameters if parameters else {}), }, auth=None, ) +@pytest.mark.freeze_time("2024-08-24T12:00:00Z") @pytest.mark.parametrize( ( "trait_types", @@ -155,7 +181,7 @@ async def test_receive_events( await hass.async_block_till_done() state = hass.states.get(entity_id) - assert state.state != "unknown" + assert state.state == "2024-08-24T12:00:00.000+00:00" assert state.attributes == { **expected_attributes, "event_type": expected_event_type, @@ -191,3 +217,109 @@ async def test_ignore_unrelated_event( "event_types": ["doorbell_chime"], "friendly_name": "Front Chime", } + + +@pytest.mark.freeze_time("2024-08-24T12:00:00Z") +async def test_event_threads( + hass: HomeAssistant, + subscriber: FakeSubscriber, + setup_platform: PlatformSetup, + create_device: CreateDevice, + freezer: FrozenDateTimeFactory, +) -> None: + """Test multiple events delivered as part of a thread are a single home assistant event.""" + create_device.create( + raw_traits={ + TraitType.DOORBELL_CHIME: {}, + TraitType.CAMERA_CLIP_PREVIEW: {}, + } + ) + await setup_platform() + + state = hass.states.get("event.front_chime") + assert state.state == "unknown" + + # Doorbell event is received + freezer.tick(datetime.timedelta(seconds=2)) + await subscriber.async_receive_event( + create_event_messages( + { + EventType.DOORBELL_CHIME: { + "eventSessionId": EVENT_SESSION_ID, + "eventId": EVENT_ID, + } + }, + parameters={"eventThreadState": "STARTED"}, + ) + ) + await hass.async_block_till_done() + + state = hass.states.get("event.front_chime") + assert state.state == "2024-08-24T12:00:02.000+00:00" + assert state.attributes == { + "device_class": "doorbell", + "event_types": ["doorbell_chime"], + "friendly_name": "Front Chime", + "event_type": "doorbell_chime", + "nest_event_id": ENCODED_EVENT_ID, + } + + # Media arrives in a second message that ends the thread + freezer.tick(datetime.timedelta(seconds=2)) + await subscriber.async_receive_event( + create_event_messages( + { + EventType.DOORBELL_CHIME: { + "eventSessionId": EVENT_SESSION_ID, + "eventId": EVENT_ID, + }, + EventType.CAMERA_CLIP_PREVIEW: { + "eventSessionId": EVENT_SESSION_ID, + "previewUrl": "http://example", + }, + }, + parameters={"eventThreadState": "ENDED"}, + ) + ) + await hass.async_block_till_done() + + state = hass.states.get("event.front_chime") + assert ( + state.state == "2024-08-24T12:00:02.000+00:00" + ) # A second event is not received + assert state.attributes == { + "device_class": "doorbell", + "event_types": ["doorbell_chime"], + "friendly_name": "Front Chime", + "event_type": "doorbell_chime", + "nest_event_id": ENCODED_EVENT_ID, + } + + # An additional doorbell press event happens (with an updated session id) + freezer.tick(datetime.timedelta(seconds=2)) + await subscriber.async_receive_event( + create_event_messages( + { + EventType.DOORBELL_CHIME: { + "eventSessionId": EVENT_SESSION_ID2, + "eventId": EVENT_ID2, + }, + EventType.CAMERA_CLIP_PREVIEW: { + "eventSessionId": EVENT_SESSION_ID2, + "previewUrl": "http://example", + }, + }, + parameters={"eventThreadState": "ENDED"}, + ) + ) + await hass.async_block_till_done() + + state = hass.states.get("event.front_chime") + assert state.state == "2024-08-24T12:00:06.000+00:00" # Third event is received + assert state.attributes == { + "device_class": "doorbell", + "event_types": ["doorbell_chime"], + "friendly_name": "Front Chime", + "event_type": "doorbell_chime", + "nest_event_id": ENCODED_EVENT_ID2, + } From 156e39ebb23e2ed84a8b05d45e642280e3f7ac14 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Sat, 24 Aug 2024 15:21:03 -0500 Subject: [PATCH 1024/1635] Add minimum command seconds to VAD (#124447) --- .../components/assist_pipeline/vad.py | 14 ++++++++++++- tests/components/assist_pipeline/test_vad.py | 20 +++++++++++++++++++ 2 files changed, 33 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/assist_pipeline/vad.py b/homeassistant/components/assist_pipeline/vad.py index 8372dbc54c7..4782d14dee4 100644 --- a/homeassistant/components/assist_pipeline/vad.py +++ b/homeassistant/components/assist_pipeline/vad.py @@ -78,6 +78,9 @@ class VoiceCommandSegmenter: speech_seconds: float = 0.3 """Seconds of speech before voice command has started.""" + command_seconds: float = 1.0 + """Minimum number of seconds for a voice command.""" + silence_seconds: float = 0.7 """Seconds of silence after voice command has ended.""" @@ -96,6 +99,9 @@ class VoiceCommandSegmenter: _speech_seconds_left: float = 0.0 """Seconds left before considering voice command as started.""" + _command_seconds_left: float = 0.0 + """Seconds left before voice command could stop.""" + _silence_seconds_left: float = 0.0 """Seconds left before considering voice command as stopped.""" @@ -112,6 +118,7 @@ class VoiceCommandSegmenter: def reset(self) -> None: """Reset all counters and state.""" self._speech_seconds_left = self.speech_seconds + self._command_seconds_left = self.command_seconds - self.speech_seconds self._silence_seconds_left = self.silence_seconds self._timeout_seconds_left = self.timeout_seconds self._reset_seconds_left = self.reset_seconds @@ -142,6 +149,9 @@ class VoiceCommandSegmenter: if self._speech_seconds_left <= 0: # Inside voice command self.in_command = True + self._command_seconds_left = ( + self.command_seconds - self.speech_seconds + ) self._silence_seconds_left = self.silence_seconds _LOGGER.debug("Voice command started") else: @@ -154,7 +164,8 @@ class VoiceCommandSegmenter: # Silence in command self._reset_seconds_left = self.reset_seconds self._silence_seconds_left -= chunk_seconds - if self._silence_seconds_left <= 0: + self._command_seconds_left -= chunk_seconds + if (self._silence_seconds_left <= 0) and (self._command_seconds_left <= 0): # Command finished successfully self.reset() _LOGGER.debug("Voice command finished") @@ -163,6 +174,7 @@ class VoiceCommandSegmenter: # Speech in command. # Reset silence counter if enough speech. self._reset_seconds_left -= chunk_seconds + self._command_seconds_left -= chunk_seconds if self._reset_seconds_left <= 0: self._silence_seconds_left = self.silence_seconds self._reset_seconds_left = self.reset_seconds diff --git a/tests/components/assist_pipeline/test_vad.py b/tests/components/assist_pipeline/test_vad.py index db039ab3140..fda26d2fb94 100644 --- a/tests/components/assist_pipeline/test_vad.py +++ b/tests/components/assist_pipeline/test_vad.py @@ -206,3 +206,23 @@ def test_timeout() -> None: assert not segmenter.process(_ONE_SECOND * 0.5, False) assert segmenter.timed_out + + +def test_command_seconds() -> None: + """Test minimum number of seconds for voice command.""" + + segmenter = VoiceCommandSegmenter( + command_seconds=3, speech_seconds=1, silence_seconds=1, reset_seconds=1 + ) + + assert segmenter.process(_ONE_SECOND, True) + + # Silence counts towards total command length + assert segmenter.process(_ONE_SECOND * 0.5, False) + + # Enough to finish command now + assert segmenter.process(_ONE_SECOND, True) + assert segmenter.process(_ONE_SECOND * 0.5, False) + + # Silence to finish + assert not segmenter.process(_ONE_SECOND * 0.5, False) From 9b3718edfb1e7baac89e869c3b8b13cc9aaabeba Mon Sep 17 00:00:00 2001 From: IceBotYT <34712694+IceBotYT@users.noreply.github.com> Date: Sat, 24 Aug 2024 21:27:08 -0400 Subject: [PATCH 1025/1635] Remove sync API use in Nice G.O. (#124546) --- homeassistant/components/nice_go/event.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/nice_go/event.py b/homeassistant/components/nice_go/event.py index caf984f824f..fbbeaffcea9 100644 --- a/homeassistant/components/nice_go/event.py +++ b/homeassistant/components/nice_go/event.py @@ -48,4 +48,4 @@ class NiceGOEventEntity(NiceGOEntity, EventEntity): if data["deviceId"] == self.data.id: _LOGGER.debug("Barrier obstructed event for %s, triggering", self.data.name) self._trigger_event(EVENT_BARRIER_OBSTRUCTED) - self.schedule_update_ha_state() + self.async_write_ha_state() From b63fb9f17f963221cb979316c40b46e896eabb0c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 24 Aug 2024 18:18:54 -1000 Subject: [PATCH 1026/1635] Bump bluetooth-data-tools to 1.20.0 (#124542) changelog: https://github.com/Bluetooth-Devices/bluetooth-data-tools/compare/v1.19.4...v1.20.0 --- homeassistant/components/bluetooth/manifest.json | 2 +- homeassistant/components/ld2410_ble/manifest.json | 2 +- homeassistant/components/led_ble/manifest.json | 2 +- homeassistant/components/private_ble_device/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/bluetooth/manifest.json b/homeassistant/components/bluetooth/manifest.json index 07ea970de3d..93b07cb120c 100644 --- a/homeassistant/components/bluetooth/manifest.json +++ b/homeassistant/components/bluetooth/manifest.json @@ -18,7 +18,7 @@ "bleak-retry-connector==3.5.0", "bluetooth-adapters==0.19.4", "bluetooth-auto-recovery==1.4.2", - "bluetooth-data-tools==1.19.4", + "bluetooth-data-tools==1.20.0", "dbus-fast==2.23.0", "habluetooth==3.3.2" ] diff --git a/homeassistant/components/ld2410_ble/manifest.json b/homeassistant/components/ld2410_ble/manifest.json index a1b0e9a1398..d3e21eeae90 100644 --- a/homeassistant/components/ld2410_ble/manifest.json +++ b/homeassistant/components/ld2410_ble/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/ld2410_ble", "integration_type": "device", "iot_class": "local_push", - "requirements": ["bluetooth-data-tools==1.19.4", "ld2410-ble==0.1.1"] + "requirements": ["bluetooth-data-tools==1.20.0", "ld2410-ble==0.1.1"] } diff --git a/homeassistant/components/led_ble/manifest.json b/homeassistant/components/led_ble/manifest.json index e22d23fb971..1d12e355a0d 100644 --- a/homeassistant/components/led_ble/manifest.json +++ b/homeassistant/components/led_ble/manifest.json @@ -35,5 +35,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/led_ble", "iot_class": "local_polling", - "requirements": ["bluetooth-data-tools==1.19.4", "led-ble==1.0.2"] + "requirements": ["bluetooth-data-tools==1.20.0", "led-ble==1.0.2"] } diff --git a/homeassistant/components/private_ble_device/manifest.json b/homeassistant/components/private_ble_device/manifest.json index 8b072361d34..6759cdda0f0 100644 --- a/homeassistant/components/private_ble_device/manifest.json +++ b/homeassistant/components/private_ble_device/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/private_ble_device", "iot_class": "local_push", - "requirements": ["bluetooth-data-tools==1.19.4"] + "requirements": ["bluetooth-data-tools==1.20.0"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 8090c17f334..249654d1066 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -18,7 +18,7 @@ bleak-retry-connector==3.5.0 bleak==0.22.2 bluetooth-adapters==0.19.4 bluetooth-auto-recovery==1.4.2 -bluetooth-data-tools==1.19.4 +bluetooth-data-tools==1.20.0 cached_ipaddress==0.3.0 certifi>=2021.5.30 ciso8601==2.3.1 diff --git a/requirements_all.txt b/requirements_all.txt index 415d5e383c8..c5b36ce396f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -603,7 +603,7 @@ bluetooth-auto-recovery==1.4.2 # homeassistant.components.ld2410_ble # homeassistant.components.led_ble # homeassistant.components.private_ble_device -bluetooth-data-tools==1.19.4 +bluetooth-data-tools==1.20.0 # homeassistant.components.bond bond-async==0.2.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index d0e7128f535..873c908819d 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -527,7 +527,7 @@ bluetooth-auto-recovery==1.4.2 # homeassistant.components.ld2410_ble # homeassistant.components.led_ble # homeassistant.components.private_ble_device -bluetooth-data-tools==1.19.4 +bluetooth-data-tools==1.20.0 # homeassistant.components.bond bond-async==0.2.1 From 31f55393118216ad8b09650c4c78e7a603e90218 Mon Sep 17 00:00:00 2001 From: Andrew Jackson Date: Sun, 25 Aug 2024 08:53:30 +0100 Subject: [PATCH 1027/1635] Change logger level on config migration for Mastodon (#124558) --- homeassistant/components/mastodon/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/mastodon/__init__.py b/homeassistant/components/mastodon/__init__.py index 77e66b6e45c..0d680170f3d 100644 --- a/homeassistant/components/mastodon/__init__.py +++ b/homeassistant/components/mastodon/__init__.py @@ -104,7 +104,7 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: unique_id=slugify(construct_mastodon_username(instance, account)), ) - LOGGER.info( + LOGGER.debug( "Entry %s successfully migrated to version %s.%s", entry.entry_id, entry.version, From ce72157bf7a151ebb57b906c630a204e5dbac0db Mon Sep 17 00:00:00 2001 From: Jonas Bergler Date: Sun, 25 Aug 2024 21:43:21 +1200 Subject: [PATCH 1028/1635] Bump pyEmby to 1.10 (#124549) --- homeassistant/components/emby/manifest.json | 2 +- requirements_all.txt | 2 +- script/licenses.py | 1 - 3 files changed, 2 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/emby/manifest.json b/homeassistant/components/emby/manifest.json index f90dda79352..3f57f62eb0b 100644 --- a/homeassistant/components/emby/manifest.json +++ b/homeassistant/components/emby/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/emby", "iot_class": "local_push", "loggers": ["pyemby"], - "requirements": ["pyEmby==1.9"] + "requirements": ["pyEmby==1.10"] } diff --git a/requirements_all.txt b/requirements_all.txt index c5b36ce396f..ac04f70b225 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1695,7 +1695,7 @@ pyDuotecno==2024.5.1 pyElectra==1.2.4 # homeassistant.components.emby -pyEmby==1.9 +pyEmby==1.10 # homeassistant.components.hikvision pyHik==0.3.2 diff --git a/script/licenses.py b/script/licenses.py index bf14adca474..8a10b9a7530 100644 --- a/script/licenses.py +++ b/script/licenses.py @@ -154,7 +154,6 @@ EXCEPTIONS = { "neurio", # https://github.com/jordanh/neurio-python/pull/13 "nsw-fuel-api-client", # https://github.com/nickw444/nsw-fuel-api-client/pull/14 "pigpio", # https://github.com/joan2937/pigpio/pull/608 - "pyEmby", # https://github.com/mezz64/pyEmby/pull/12 "pymitv", # MIT "pyTibber", # https://github.com/Danielhiversen/pyTibber/pull/294 "pybbox", # https://github.com/HydrelioxGitHub/pybbox/pull/5 From 7c8e00e5e062ced0820ab9f777b23e531f668e47 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 25 Aug 2024 01:16:20 -1000 Subject: [PATCH 1029/1635] Bump yalexs to 8.4.1 (#124553) changelog: https://github.com/bdraco/yalexs/compare/v8.4.0...v8.4.1 --- homeassistant/components/august/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/august/manifest.json b/homeassistant/components/august/manifest.json index abe9dc707c3..e0739aadff0 100644 --- a/homeassistant/components/august/manifest.json +++ b/homeassistant/components/august/manifest.json @@ -28,5 +28,5 @@ "documentation": "https://www.home-assistant.io/integrations/august", "iot_class": "cloud_push", "loggers": ["pubnub", "yalexs"], - "requirements": ["yalexs==8.4.0", "yalexs-ble==2.4.3"] + "requirements": ["yalexs==8.4.1", "yalexs-ble==2.4.3"] } diff --git a/requirements_all.txt b/requirements_all.txt index ac04f70b225..e0b15c4b566 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2968,7 +2968,7 @@ yalesmartalarmclient==0.4.0 yalexs-ble==2.4.3 # homeassistant.components.august -yalexs==8.4.0 +yalexs==8.4.1 # homeassistant.components.yeelight yeelight==0.7.14 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 873c908819d..a8dbff4d232 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2348,7 +2348,7 @@ yalesmartalarmclient==0.4.0 yalexs-ble==2.4.3 # homeassistant.components.august -yalexs==8.4.0 +yalexs==8.4.1 # homeassistant.components.yeelight yeelight==0.7.14 From fcf0bef2cd09428cf957ff344bfd96a29875dd4f Mon Sep 17 00:00:00 2001 From: IceBotYT <34712694+IceBotYT@users.noreply.github.com> Date: Sun, 25 Aug 2024 08:13:50 -0400 Subject: [PATCH 1030/1635] Remove platform from unique ID in Nice G.O. (#124548) * Remove domain from unique ID in Nice G.O. * Snapshots --- homeassistant/components/nice_go/cover.py | 2 +- homeassistant/components/nice_go/entity.py | 3 +-- homeassistant/components/nice_go/event.py | 2 +- homeassistant/components/nice_go/light.py | 2 +- homeassistant/components/nice_go/switch.py | 2 +- tests/components/nice_go/snapshots/test_cover.ambr | 4 ++-- tests/components/nice_go/snapshots/test_light.ambr | 4 ++-- 7 files changed, 9 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/nice_go/cover.py b/homeassistant/components/nice_go/cover.py index 70bd4b136a5..4098d9ef426 100644 --- a/homeassistant/components/nice_go/cover.py +++ b/homeassistant/components/nice_go/cover.py @@ -25,7 +25,7 @@ async def async_setup_entry( coordinator = config_entry.runtime_data async_add_entities( - NiceGOCoverEntity(coordinator, device_id, device_data.name, "cover") + NiceGOCoverEntity(coordinator, device_id, device_data.name) for device_id, device_data in coordinator.data.items() ) diff --git a/homeassistant/components/nice_go/entity.py b/homeassistant/components/nice_go/entity.py index 5af4b9c8731..266ad72add3 100644 --- a/homeassistant/components/nice_go/entity.py +++ b/homeassistant/components/nice_go/entity.py @@ -17,12 +17,11 @@ class NiceGOEntity(CoordinatorEntity[NiceGOUpdateCoordinator]): coordinator: NiceGOUpdateCoordinator, device_id: str, device_name: str, - sub_device_id: str, ) -> None: """Initialize the entity.""" super().__init__(coordinator) - self._attr_unique_id = f"{device_id}-{sub_device_id}" + self._attr_unique_id = device_id self._device_id = device_id self._attr_device_info = DeviceInfo( identifiers={(DOMAIN, device_id)}, diff --git a/homeassistant/components/nice_go/event.py b/homeassistant/components/nice_go/event.py index fbbeaffcea9..a19511b0b11 100644 --- a/homeassistant/components/nice_go/event.py +++ b/homeassistant/components/nice_go/event.py @@ -23,7 +23,7 @@ async def async_setup_entry( coordinator = config_entry.runtime_data async_add_entities( - NiceGOEventEntity(coordinator, device_id, device_data.name, "event") + NiceGOEventEntity(coordinator, device_id, device_data.name) for device_id, device_data in coordinator.data.items() ) diff --git a/homeassistant/components/nice_go/light.py b/homeassistant/components/nice_go/light.py index 76730ea822f..4a08364688e 100644 --- a/homeassistant/components/nice_go/light.py +++ b/homeassistant/components/nice_go/light.py @@ -20,7 +20,7 @@ async def async_setup_entry( coordinator = config_entry.runtime_data async_add_entities( - NiceGOLightEntity(coordinator, device_id, device_data.name, "light") + NiceGOLightEntity(coordinator, device_id, device_data.name) for device_id, device_data in coordinator.data.items() ) diff --git a/homeassistant/components/nice_go/switch.py b/homeassistant/components/nice_go/switch.py index e7290aabdd6..26d42dab124 100644 --- a/homeassistant/components/nice_go/switch.py +++ b/homeassistant/components/nice_go/switch.py @@ -24,7 +24,7 @@ async def async_setup_entry( coordinator = config_entry.runtime_data async_add_entities( - NiceGOSwitchEntity(coordinator, device_id, device_data.name, "switch") + NiceGOSwitchEntity(coordinator, device_id, device_data.name) for device_id, device_data in coordinator.data.items() ) diff --git a/tests/components/nice_go/snapshots/test_cover.ambr b/tests/components/nice_go/snapshots/test_cover.ambr index 8f85fea2726..391d91584bf 100644 --- a/tests/components/nice_go/snapshots/test_cover.ambr +++ b/tests/components/nice_go/snapshots/test_cover.ambr @@ -28,7 +28,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '1-cover', + 'unique_id': '1', 'unit_of_measurement': None, }) # --- @@ -76,7 +76,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': None, - 'unique_id': '2-cover', + 'unique_id': '2', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/nice_go/snapshots/test_light.ambr b/tests/components/nice_go/snapshots/test_light.ambr index 294488e3d46..2e29d9589dd 100644 --- a/tests/components/nice_go/snapshots/test_light.ambr +++ b/tests/components/nice_go/snapshots/test_light.ambr @@ -32,7 +32,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'light', - 'unique_id': '1-light', + 'unique_id': '1', 'unit_of_measurement': None, }) # --- @@ -87,7 +87,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'light', - 'unique_id': '2-light', + 'unique_id': '2', 'unit_of_measurement': None, }) # --- From f06c21c8cc06ad07d1f36777ef40b7383721ab87 Mon Sep 17 00:00:00 2001 From: Christopher Maio Date: Sun, 25 Aug 2024 09:05:13 -0400 Subject: [PATCH 1031/1635] Update Matter light transition blocklist to include GE Cync Undercabinet Lights (#124138) --- homeassistant/components/matter/light.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/homeassistant/components/matter/light.py b/homeassistant/components/matter/light.py index 6e9019c46fa..58ef8081fa9 100644 --- a/homeassistant/components/matter/light.py +++ b/homeassistant/components/matter/light.py @@ -60,6 +60,8 @@ TRANSITION_BLOCKLIST = ( (4456, 1011, "1.0.0", "2.00.00"), (4488, 260, "1.0", "1.0.0"), (4488, 514, "1.0", "1.0.0"), + (4921, 42, "1.0", "1.01.060"), + (4921, 43, "1.0", "1.01.060"), (4999, 24875, "1.0", "27.0"), (4999, 25057, "1.0", "27.0"), (5009, 514, "1.0", "1.0.0"), From 51b520db0cbd06133a408871a74296cd4ddb254f Mon Sep 17 00:00:00 2001 From: Marcel van der Veldt Date: Sun, 25 Aug 2024 15:06:16 +0200 Subject: [PATCH 1032/1635] Prevent KeyError when Matter device sends invalid value for StartUpOnOff (#124280) --- homeassistant/components/matter/select.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/matter/select.py b/homeassistant/components/matter/select.py index 4a9ef3780d1..b46cad53123 100644 --- a/homeassistant/components/matter/select.py +++ b/homeassistant/components/matter/select.py @@ -229,12 +229,12 @@ DISCOVERY_SCHEMAS = [ entity_category=EntityCategory.CONFIG, translation_key="startup_on_off", options=["On", "Off", "Toggle", "Previous"], - measurement_to_ha=lambda x: { + measurement_to_ha=lambda x: { # pylint: disable=unnecessary-lambda 0: "Off", 1: "On", 2: "Toggle", None: "Previous", - }[x], + }.get(x), ha_to_native_value=lambda x: { "Off": 0, "On": 1, From 5550b1a74e62539666d5e40d6592da71a70f1ad2 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Sun, 25 Aug 2024 15:09:08 +0200 Subject: [PATCH 1033/1635] Only support remote activity on Alexa if feature is set and at least one feature is in the activity_list (#124567) Only support remote activity on Alexa if feaure is set and at least one feature is in the activity_list --- homeassistant/components/alexa/entities.py | 9 +++++--- tests/components/alexa/test_capabilities.py | 24 +++++++++++++++++---- 2 files changed, 26 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/alexa/entities.py b/homeassistant/components/alexa/entities.py index 8bba4ed2468..ca7b389a0f1 100644 --- a/homeassistant/components/alexa/entities.py +++ b/homeassistant/components/alexa/entities.py @@ -661,9 +661,12 @@ class RemoteCapabilities(AlexaEntity): def interfaces(self) -> Generator[AlexaCapability]: """Yield the supported interfaces.""" yield AlexaPowerController(self.entity) - yield AlexaModeController( - self.entity, instance=f"{remote.DOMAIN}.{remote.ATTR_ACTIVITY}" - ) + supported = self.entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0) + activities = self.entity.attributes.get(remote.ATTR_ACTIVITY_LIST) or [] + if activities and supported & remote.RemoteEntityFeature.ACTIVITY: + yield AlexaModeController( + self.entity, instance=f"{remote.DOMAIN}.{remote.ATTR_ACTIVITY}" + ) yield AlexaEndpointHealth(self.hass, self.entity) yield Alexa(self.entity) diff --git a/tests/components/alexa/test_capabilities.py b/tests/components/alexa/test_capabilities.py index 162149f095b..b56d8054d7b 100644 --- a/tests/components/alexa/test_capabilities.py +++ b/tests/components/alexa/test_capabilities.py @@ -70,6 +70,7 @@ async def test_discovery_remote( { "current_activity": current_activity, "activity_list": activity_list, + "supported_features": 4, }, ) msg = await smart_home.async_handle_message(hass, get_default_config(hass), request) @@ -790,22 +791,37 @@ async def test_report_remote_activity(hass: HomeAssistant) -> None: hass.states.async_set( "remote.unknown", "on", - {"current_activity": "UNKNOWN"}, + { + "current_activity": "UNKNOWN", + "supported_features": 4, + }, ) hass.states.async_set( "remote.tv", "on", - {"current_activity": "TV", "activity_list": ["TV", "MUSIC", "DVD"]}, + { + "current_activity": "TV", + "activity_list": ["TV", "MUSIC", "DVD"], + "supported_features": 4, + }, ) hass.states.async_set( "remote.music", "on", - {"current_activity": "MUSIC", "activity_list": ["TV", "MUSIC", "DVD"]}, + { + "current_activity": "MUSIC", + "activity_list": ["TV", "MUSIC", "DVD"], + "supported_features": 4, + }, ) hass.states.async_set( "remote.dvd", "on", - {"current_activity": "DVD", "activity_list": ["TV", "MUSIC", "DVD"]}, + { + "current_activity": "DVD", + "activity_list": ["TV", "MUSIC", "DVD"], + "supported_features": 4, + }, ) properties = await reported_properties(hass, "remote#unknown") From c9e7c76ee55c628e59c659bd331ab6bf0352bed6 Mon Sep 17 00:00:00 2001 From: "Mr. Bubbles" Date: Sun, 25 Aug 2024 15:15:47 +0200 Subject: [PATCH 1034/1635] Fix missing id in Habitica completed todos API response (#124565) * Fix missing id in completed todos API response * Copy id only if none * Update homeassistant/components/habitica/coordinator.py Co-authored-by: Paulus Schoutsen --------- Co-authored-by: Paulus Schoutsen --- .../components/habitica/coordinator.py | 9 +++++- tests/components/habitica/test_init.py | 28 +++++++++---------- 2 files changed, 22 insertions(+), 15 deletions(-) diff --git a/homeassistant/components/habitica/coordinator.py b/homeassistant/components/habitica/coordinator.py index 4e949b703fb..357643593e4 100644 --- a/homeassistant/components/habitica/coordinator.py +++ b/homeassistant/components/habitica/coordinator.py @@ -56,7 +56,14 @@ class HabiticaDataUpdateCoordinator(DataUpdateCoordinator[HabiticaData]): try: user_response = await self.api.user.get() tasks_response = await self.api.tasks.user.get() - tasks_response.extend(await self.api.tasks.user.get(type="completedTodos")) + tasks_response.extend( + [ + {"id": task["_id"], **task} + for task in await self.api.tasks.user.get(type="completedTodos") + if task.get("_id") + ] + ) + except ClientResponseError as error: if error.status == HTTPStatus.TOO_MANY_REQUESTS: _LOGGER.debug("Currently rate limited, skipping update") diff --git a/tests/components/habitica/test_init.py b/tests/components/habitica/test_init.py index 31c3a1fae39..4c2b1e2aae6 100644 --- a/tests/components/habitica/test_init.py +++ b/tests/components/habitica/test_init.py @@ -73,7 +73,20 @@ def common_requests(aioclient_mock: AiohttpClientMocker) -> AiohttpClientMocker: } }, ) - + aioclient_mock.get( + "https://habitica.com/api/v3/tasks/user?type=completedTodos", + json={ + "data": [ + { + "text": "this is a mock todo #5", + "id": 5, + "_id": 5, + "type": "todo", + "completed": True, + } + ] + }, + ) aioclient_mock.get( "https://habitica.com/api/v3/tasks/user", json={ @@ -88,19 +101,6 @@ def common_requests(aioclient_mock: AiohttpClientMocker) -> AiohttpClientMocker: ] }, ) - aioclient_mock.get( - "https://habitica.com/api/v3/tasks/user?type=completedTodos", - json={ - "data": [ - { - "text": "this is a mock todo #5", - "id": 5, - "type": "todo", - "completed": True, - } - ] - }, - ) aioclient_mock.post( "https://habitica.com/api/v3/tasks/user", From f84a04e11348ccbe25eed09b17cb549bfa99daca Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 25 Aug 2024 04:13:40 -1000 Subject: [PATCH 1035/1635] Bump ulid-transform to 1.0.2 (#124544) changelog: https://github.com/bdraco/ulid-transform/compare/v0.13.1...v1.0.2 python 3.13 support added python 3.10 support dropped --- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 249654d1066..23d759f270d 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -56,7 +56,7 @@ PyYAML==6.0.2 requests==2.32.3 SQLAlchemy==2.0.31 typing-extensions>=4.12.2,<5.0 -ulid-transform==0.13.1 +ulid-transform==1.0.2 urllib3>=1.26.5,<2 voluptuous-openapi==0.0.5 voluptuous-serialize==2.6.0 diff --git a/pyproject.toml b/pyproject.toml index 393a459c70d..10bc26f1a0a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,7 +61,7 @@ dependencies = [ "requests==2.32.3", "SQLAlchemy==2.0.31", "typing-extensions>=4.12.2,<5.0", - "ulid-transform==0.13.1", + "ulid-transform==1.0.2", # Constrain urllib3 to ensure we deal with CVE-2020-26137 and CVE-2021-33503 # Temporary setting an upper bound, to prevent compat issues with urllib3>=2 # https://github.com/home-assistant/core/issues/97248 diff --git a/requirements.txt b/requirements.txt index 8aeb1f1d5ec..ad6a39ddb54 100644 --- a/requirements.txt +++ b/requirements.txt @@ -36,7 +36,7 @@ PyYAML==6.0.2 requests==2.32.3 SQLAlchemy==2.0.31 typing-extensions>=4.12.2,<5.0 -ulid-transform==0.13.1 +ulid-transform==1.0.2 urllib3>=1.26.5,<2 voluptuous==0.15.2 voluptuous-serialize==2.6.0 From 36bfd7b9cea3ce4f646d44863db8e51686840d03 Mon Sep 17 00:00:00 2001 From: Avi Miller Date: Mon, 26 Aug 2024 00:15:51 +1000 Subject: [PATCH 1036/1635] Bump aiolifx to 1.0.8 to support LIFX B10 and T10 Candles (#124263) Signed-off-by: Avi Miller --- homeassistant/components/lifx/manifest.json | 4 +++- homeassistant/generated/zeroconf.py | 8 ++++++++ requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 13 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/lifx/manifest.json b/homeassistant/components/lifx/manifest.json index 54cff7d6e1f..08540702736 100644 --- a/homeassistant/components/lifx/manifest.json +++ b/homeassistant/components/lifx/manifest.json @@ -17,6 +17,7 @@ "models": [ "LIFX A19", "LIFX A21", + "LIFX B10", "LIFX Beam", "LIFX BR30", "LIFX Candle", @@ -40,6 +41,7 @@ "LIFX Round", "LIFX Square", "LIFX String", + "LIFX T10", "LIFX Tile", "LIFX White", "LIFX Z" @@ -48,7 +50,7 @@ "iot_class": "local_polling", "loggers": ["aiolifx", "aiolifx_effects", "bitstring"], "requirements": [ - "aiolifx==1.0.6", + "aiolifx==1.0.8", "aiolifx-effects==0.3.2", "aiolifx-themes==0.5.0" ] diff --git a/homeassistant/generated/zeroconf.py b/homeassistant/generated/zeroconf.py index 3e5e34090d1..389a4435910 100644 --- a/homeassistant/generated/zeroconf.py +++ b/homeassistant/generated/zeroconf.py @@ -68,6 +68,10 @@ HOMEKIT = { "always_discover": True, "domain": "lifx", }, + "LIFX B10": { + "always_discover": True, + "domain": "lifx", + }, "LIFX BR30": { "always_discover": True, "domain": "lifx", @@ -160,6 +164,10 @@ HOMEKIT = { "always_discover": True, "domain": "lifx", }, + "LIFX T10": { + "always_discover": True, + "domain": "lifx", + }, "LIFX Tile": { "always_discover": True, "domain": "lifx", diff --git a/requirements_all.txt b/requirements_all.txt index e0b15c4b566..5c8584b0df9 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -276,7 +276,7 @@ aiolifx-effects==0.3.2 aiolifx-themes==0.5.0 # homeassistant.components.lifx -aiolifx==1.0.6 +aiolifx==1.0.8 # homeassistant.components.livisi aiolivisi==0.0.19 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a8dbff4d232..353182b1714 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -258,7 +258,7 @@ aiolifx-effects==0.3.2 aiolifx-themes==0.5.0 # homeassistant.components.lifx -aiolifx==1.0.6 +aiolifx==1.0.8 # homeassistant.components.livisi aiolivisi==0.0.19 From 1eeb3bdcdfb7f8fc741e77ae1e5cff8dd05a1683 Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Sun, 25 Aug 2024 17:23:21 +0200 Subject: [PATCH 1037/1635] Bump fyta_cli to 0.6.3 (#124574) --- homeassistant/components/fyta/manifest.json | 2 +- homeassistant/components/fyta/sensor.py | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/fyta/manifest.json b/homeassistant/components/fyta/manifest.json index 07387f4ab05..c07a19a3db0 100644 --- a/homeassistant/components/fyta/manifest.json +++ b/homeassistant/components/fyta/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "cloud_polling", "quality_scale": "platinum", - "requirements": ["fyta_cli==0.6.0"] + "requirements": ["fyta_cli==0.6.3"] } diff --git a/homeassistant/components/fyta/sensor.py b/homeassistant/components/fyta/sensor.py index 262d0b6d1f4..a351d79dd8b 100644 --- a/homeassistant/components/fyta/sensor.py +++ b/homeassistant/components/fyta/sensor.py @@ -145,7 +145,7 @@ async def async_setup_entry( FytaPlantSensor(coordinator, entry, sensor, plant_id) for plant_id in coordinator.fyta.plant_list for sensor in SENSORS - if sensor.key in dir(coordinator.data[plant_id]) + if sensor.key in dir(coordinator.data.get(plant_id)) ] async_add_entities(plant_entities) diff --git a/requirements_all.txt b/requirements_all.txt index 5c8584b0df9..699193d350e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -924,7 +924,7 @@ freesms==0.2.0 fritzconnection[qr]==1.13.2 # homeassistant.components.fyta -fyta_cli==0.6.0 +fyta_cli==0.6.3 # homeassistant.components.google_translate gTTS==2.2.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 353182b1714..31420e7f7af 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -777,7 +777,7 @@ freebox-api==1.1.0 fritzconnection[qr]==1.13.2 # homeassistant.components.fyta -fyta_cli==0.6.0 +fyta_cli==0.6.3 # homeassistant.components.google_translate gTTS==2.2.4 From d3293336b125200a378351f52e2bfc76dcdc28b1 Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Sun, 25 Aug 2024 17:24:06 +0200 Subject: [PATCH 1038/1635] Ensure write access to hassrelease data folder (#124573) Co-authored-by: Robert Resch --- Dockerfile.dev | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Dockerfile.dev b/Dockerfile.dev index d7a2f2b7bf9..d05c6df425c 100644 --- a/Dockerfile.dev +++ b/Dockerfile.dev @@ -42,7 +42,8 @@ WORKDIR /usr/src # Setup hass-release RUN git clone --depth 1 https://github.com/home-assistant/hass-release \ - && uv pip install --system -e hass-release/ + && uv pip install --system -e hass-release/ \ + && chown -R vscode /usr/src/hass-release/data USER vscode ENV VIRTUAL_ENV="/home/vscode/.local/ha-venv" From d18e81f93223cdd71eface609c172e7d7b22855b Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Sun, 25 Aug 2024 08:56:17 -0700 Subject: [PATCH 1039/1635] Update a roborock blocking call to be fully async (#124266) Remove a blocking call in roborock --- homeassistant/components/roborock/__init__.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/roborock/__init__.py b/homeassistant/components/roborock/__init__.py index d107a0bee8b..88a603eca2b 100644 --- a/homeassistant/components/roborock/__init__.py +++ b/homeassistant/components/roborock/__init__.py @@ -168,7 +168,9 @@ async def setup_device_v1( home_data_rooms: list[HomeDataRoom], ) -> RoborockDataUpdateCoordinator | None: """Set up a device Coordinator.""" - mqtt_client = RoborockMqttClientV1(user_data, DeviceData(device, product_info.name)) + mqtt_client = await hass.async_add_executor_job( + RoborockMqttClientV1, user_data, DeviceData(device, product_info.name) + ) try: networking = await mqtt_client.get_networking() if networking is None: From d94b1e6e8a643aee572b97ac2fd0bda9c5b509c0 Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Sun, 25 Aug 2024 18:14:38 +0200 Subject: [PATCH 1040/1635] Add icons for conductivity (#124576) --- homeassistant/components/number/icons.json | 3 +++ homeassistant/components/number/strings.json | 3 +++ homeassistant/components/sensor/icons.json | 3 +++ 3 files changed, 9 insertions(+) diff --git a/homeassistant/components/number/icons.json b/homeassistant/components/number/icons.json index 2ce22fcaa4a..d74aa1bf408 100644 --- a/homeassistant/components/number/icons.json +++ b/homeassistant/components/number/icons.json @@ -21,6 +21,9 @@ "carbon_monoxide": { "default": "mdi:molecule-co" }, + "conductivity": { + "default": "mdi:sprout-outline" + }, "current": { "default": "mdi:current-ac" }, diff --git a/homeassistant/components/number/strings.json b/homeassistant/components/number/strings.json index d6932286469..580385172e3 100644 --- a/homeassistant/components/number/strings.json +++ b/homeassistant/components/number/strings.json @@ -49,6 +49,9 @@ "carbon_monoxide": { "name": "[%key:component::sensor::entity_component::carbon_monoxide::name%]" }, + "conductivity": { + "name": "[%key:component::sensor::entity_component::conductivity::name%]" + }, "current": { "name": "[%key:component::sensor::entity_component::current::name%]" }, diff --git a/homeassistant/components/sensor/icons.json b/homeassistant/components/sensor/icons.json index f23826cfe95..6132fcbc1e9 100644 --- a/homeassistant/components/sensor/icons.json +++ b/homeassistant/components/sensor/icons.json @@ -18,6 +18,9 @@ "carbon_monoxide": { "default": "mdi:molecule-co" }, + "conductivity": { + "default": "mdi:sprout-outline" + }, "current": { "default": "mdi:current-ac" }, From 242aae514ec29469befd6ba9e22ef03881bc821f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 25 Aug 2024 06:32:32 -1000 Subject: [PATCH 1041/1635] Small cleanups to list_statistic_ids (#124451) --- homeassistant/components/sensor/recorder.py | 25 ++++++++++----------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/homeassistant/components/sensor/recorder.py b/homeassistant/components/sensor/recorder.py index c02c3ce7b7a..be4dce28546 100644 --- a/homeassistant/components/sensor/recorder.py +++ b/homeassistant/components/sensor/recorder.py @@ -640,32 +640,31 @@ def list_statistic_ids( result: dict[str, StatisticMetaData] = {} for state in entities: - state_class = state.attributes[ATTR_STATE_CLASS] - state_unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) + entity_id = state.entity_id + if statistic_ids is not None and entity_id not in statistic_ids: + continue + attributes = state.attributes + state_class = attributes[ATTR_STATE_CLASS] provided_statistics = DEFAULT_STATISTICS[state_class] if statistic_type is not None and statistic_type not in provided_statistics: continue - if statistic_ids is not None and state.entity_id not in statistic_ids: - continue - if ( - "sum" in provided_statistics - and ATTR_LAST_RESET not in state.attributes - and state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + (has_sum := "sum" in provided_statistics) + and ATTR_LAST_RESET not in attributes + and state_class == SensorStateClass.MEASUREMENT ): continue - result[state.entity_id] = { + result[entity_id] = { "has_mean": "mean" in provided_statistics, - "has_sum": "sum" in provided_statistics, + "has_sum": has_sum, "name": None, "source": RECORDER_DOMAIN, - "statistic_id": state.entity_id, - "unit_of_measurement": state_unit, + "statistic_id": entity_id, + "unit_of_measurement": attributes.get(ATTR_UNIT_OF_MEASUREMENT), } - continue return result From 70fc8fa2ebadf7df101101136fe96a5499b0a99c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 25 Aug 2024 06:33:21 -1000 Subject: [PATCH 1042/1635] Improve performance of fetching stats metadata (#124428) --- .../table_managers/statistics_meta.py | 46 ++++++++++--------- 1 file changed, 24 insertions(+), 22 deletions(-) diff --git a/homeassistant/components/recorder/table_managers/statistics_meta.py b/homeassistant/components/recorder/table_managers/statistics_meta.py index 9b33eff0c9b..77fc34518db 100644 --- a/homeassistant/components/recorder/table_managers/statistics_meta.py +++ b/homeassistant/components/recorder/table_managers/statistics_meta.py @@ -4,7 +4,7 @@ from __future__ import annotations import logging import threading -from typing import TYPE_CHECKING, Literal, cast +from typing import TYPE_CHECKING, Final, Literal from lru import LRU from sqlalchemy import lambda_stmt, select @@ -33,6 +33,14 @@ QUERY_STATISTIC_META = ( StatisticsMeta.name, ) +INDEX_ID: Final = 0 +INDEX_STATISTIC_ID: Final = 1 +INDEX_SOURCE: Final = 2 +INDEX_UNIT_OF_MEASUREMENT: Final = 3 +INDEX_HAS_MEAN: Final = 4 +INDEX_HAS_SUM: Final = 5 +INDEX_NAME: Final = 6 + def _generate_get_metadata_stmt( statistic_ids: set[str] | None = None, @@ -52,23 +60,6 @@ def _generate_get_metadata_stmt( return stmt -def _statistics_meta_to_id_statistics_metadata( - meta: StatisticsMeta, -) -> tuple[int, StatisticMetaData]: - """Convert StatisticsMeta tuple of metadata_id and StatisticMetaData.""" - return ( - meta.id, - { - "has_mean": meta.has_mean, # type: ignore[typeddict-item] - "has_sum": meta.has_sum, # type: ignore[typeddict-item] - "name": meta.name, - "source": meta.source, # type: ignore[typeddict-item] - "statistic_id": meta.statistic_id, # type: ignore[typeddict-item] - "unit_of_measurement": meta.unit_of_measurement, - }, - ) - - class StatisticsMetaManager: """Manage the StatisticsMeta table.""" @@ -100,6 +91,10 @@ class StatisticsMetaManager: and self.recorder.thread_id == threading.get_ident() ) results: dict[str, tuple[int, StatisticMetaData]] = {} + id_meta: tuple[int, StatisticMetaData] + meta: StatisticMetaData + statistic_id: str + row_id: int with session.no_autoflush: stat_id_to_id_meta = self._stat_id_to_id_meta for row in execute_stmt_lambda_element( @@ -109,10 +104,17 @@ class StatisticsMetaManager: ), orm_rows=False, ): - statistics_meta = cast(StatisticsMeta, row) - id_meta = _statistics_meta_to_id_statistics_metadata(statistics_meta) - - statistic_id = cast(str, statistics_meta.statistic_id) + statistic_id = row[INDEX_STATISTIC_ID] + row_id = row[INDEX_ID] + meta = { + "has_mean": row[INDEX_HAS_MEAN], + "has_sum": row[INDEX_HAS_SUM], + "name": row[INDEX_NAME], + "source": row[INDEX_SOURCE], + "statistic_id": statistic_id, + "unit_of_measurement": row[INDEX_UNIT_OF_MEASUREMENT], + } + id_meta = (row_id, meta) results[statistic_id] = id_meta if update_cache: stat_id_to_id_meta[statistic_id] = id_meta From 909dfcc436aba8f51cd506d2e3f252045171e025 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Sun, 25 Aug 2024 18:34:39 +0200 Subject: [PATCH 1043/1635] Improve config flow type hints (part 5) (#124349) --- .../components/totalconnect/config_flow.py | 8 ++++--- .../components/ukraine_alarm/config_flow.py | 17 ++++++++++----- .../components/wilight/config_flow.py | 4 ++-- .../components/xiaomi_aqara/config_flow.py | 21 ++++++++++++------- .../components/yeelight/config_flow.py | 13 ++++++++---- 5 files changed, 42 insertions(+), 21 deletions(-) diff --git a/homeassistant/components/totalconnect/config_flow.py b/homeassistant/components/totalconnect/config_flow.py index 19d8f09933e..63973fd44e9 100644 --- a/homeassistant/components/totalconnect/config_flow.py +++ b/homeassistant/components/totalconnect/config_flow.py @@ -28,14 +28,16 @@ class TotalConnectConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize the config flow.""" self.username = None self.password = None - self.usercodes = {} + self.usercodes: dict[str, Any] = {} self.client = None - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" errors = {} diff --git a/homeassistant/components/ukraine_alarm/config_flow.py b/homeassistant/components/ukraine_alarm/config_flow.py index bafe6d1fe11..faaa9240df3 100644 --- a/homeassistant/components/ukraine_alarm/config_flow.py +++ b/homeassistant/components/ukraine_alarm/config_flow.py @@ -3,12 +3,13 @@ from __future__ import annotations import logging +from typing import Any import aiohttp from uasiren.client import Client import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_NAME, CONF_REGION from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -22,12 +23,14 @@ class UkraineAlarmConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize a new UkraineAlarmConfigFlow.""" self.states = None - self.selected_region = None + self.selected_region: dict[str, Any] | None = None - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" if len(self._async_current_entries()) == 5: @@ -91,7 +94,11 @@ class UkraineAlarmConfigFlow(ConfigFlow, domain=DOMAIN): ): self.selected_region = _find(source, user_input[CONF_REGION]) - if next_step and self.selected_region["regionChildIds"]: + if ( + next_step + and self.selected_region + and self.selected_region["regionChildIds"] + ): return await getattr(self, f"async_step_{next_step}")() return await self._async_finish_flow() diff --git a/homeassistant/components/wilight/config_flow.py b/homeassistant/components/wilight/config_flow.py index 52b3b426c20..babc011fc35 100644 --- a/homeassistant/components/wilight/config_flow.py +++ b/homeassistant/components/wilight/config_flow.py @@ -24,13 +24,13 @@ class WiLightFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize the WiLight flow.""" self._host = None self._serial_number = None self._title = None self._model_name = None - self._wilight_components = [] + self._wilight_components: list[str] = [] self._components_text = "" def _wilight_update(self, host, serial_number, model_name): diff --git a/homeassistant/components/xiaomi_aqara/config_flow.py b/homeassistant/components/xiaomi_aqara/config_flow.py index 8f391c8ddf3..a89bb8447a3 100644 --- a/homeassistant/components/xiaomi_aqara/config_flow.py +++ b/homeassistant/components/xiaomi_aqara/config_flow.py @@ -2,6 +2,7 @@ import logging from socket import gaierror +from typing import TYPE_CHECKING, Any import voluptuous as vol from xiaomi_gateway import MULTICAST_PORT, XiaomiGateway, XiaomiGatewayDiscovery @@ -49,13 +50,13 @@ class XiaomiAqaraFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize.""" - self.host = None + self.host: str | None = None self.interface = DEFAULT_INTERFACE - self.sid = None - self.gateways = None - self.selected_gateway = None + self.sid: str | None = None + self.gateways: dict[str, XiaomiGateway] | None = None + self.selected_gateway: XiaomiGateway | None = None @callback def async_show_form_step_user(self, errors): @@ -66,9 +67,11 @@ class XiaomiAqaraFlowHandler(ConfigFlow, domain=DOMAIN): return self.async_show_form(step_id="user", data_schema=schema, errors=errors) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" - errors = {} + errors: dict[str, str] = {} if user_input is None: return self.async_show_form_step_user(errors) @@ -96,6 +99,8 @@ class XiaomiAqaraFlowHandler(ConfigFlow, domain=DOMAIN): None, ) + if TYPE_CHECKING: + assert self.selected_gateway if self.selected_gateway.connection_error: errors[CONF_HOST] = "invalid_host" if self.selected_gateway.mac_error: @@ -115,6 +120,8 @@ class XiaomiAqaraFlowHandler(ConfigFlow, domain=DOMAIN): self.gateways = xiaomi.gateways + if TYPE_CHECKING: + assert self.gateways is not None if len(self.gateways) == 1: self.selected_gateway = list(self.gateways.values())[0] self.sid = self.selected_gateway.sid diff --git a/homeassistant/components/yeelight/config_flow.py b/homeassistant/components/yeelight/config_flow.py index d7bf4e25996..b4bb7da9a22 100644 --- a/homeassistant/components/yeelight/config_flow.py +++ b/homeassistant/components/yeelight/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations import logging +from typing import TYPE_CHECKING, Any from urllib.parse import urlparse import voluptuous as vol @@ -57,11 +58,11 @@ class YeelightConfigFlow(ConfigFlow, domain=DOMAIN): """Return the options flow.""" return OptionsFlowHandler(config_entry) - def __init__(self): + def __init__(self) -> None: """Initialize the config flow.""" - self._discovered_devices = {} + self._discovered_devices: dict[str, Any] = {} self._discovered_model = None - self._discovered_ip = None + self._discovered_ip: str | None = None async def async_step_homekit( self, discovery_info: zeroconf.ZeroconfServiceInfo @@ -162,7 +163,9 @@ class YeelightConfigFlow(ConfigFlow, domain=DOMAIN): step_id="discovery_confirm", description_placeholders=placeholders ) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: @@ -176,6 +179,8 @@ class YeelightConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" else: self._abort_if_unique_id_configured() + if TYPE_CHECKING: + assert self.unique_id return self.async_create_entry( title=async_format_model_id(model, self.unique_id), data={ From 58b7711bdd50d00264e4ab66686d9ac29d8d845b Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Sun, 25 Aug 2024 18:36:03 +0200 Subject: [PATCH 1044/1635] Improve config flow type hints (part 3) (#124346) --- .../components/growatt_server/config_flow.py | 14 +++++++++----- .../components/hvv_departures/config_flow.py | 19 +++++++++++++------ .../components/icloud/config_flow.py | 14 ++++++++------ .../components/iotawatt/config_flow.py | 11 +++++++---- .../kostal_plenticore/config_flow.py | 11 ++++++----- .../components/lutron_caseta/config_flow.py | 11 +++++++---- .../components/nobo_hub/config_flow.py | 11 +++++++---- homeassistant/components/nuki/config_flow.py | 10 ++++++---- homeassistant/components/onvif/config_flow.py | 10 ++++++---- 9 files changed, 69 insertions(+), 42 deletions(-) diff --git a/homeassistant/components/growatt_server/config_flow.py b/homeassistant/components/growatt_server/config_flow.py index 95002a70a95..8123d7ff067 100644 --- a/homeassistant/components/growatt_server/config_flow.py +++ b/homeassistant/components/growatt_server/config_flow.py @@ -1,9 +1,11 @@ """Config flow for growatt server integration.""" +from typing import Any + import growattServer import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_URL, CONF_USERNAME from homeassistant.core import callback @@ -21,11 +23,11 @@ class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialise growatt server flow.""" - self.api = None + self.api: growattServer.GrowattApi | None = None self.user_id = None - self.data = {} + self.data: dict[str, Any] = {} @callback def _async_show_user_form(self, errors=None): @@ -42,7 +44,9 @@ class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN): step_id="user", data_schema=data_schema, errors=errors ) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the start of the config flow.""" if not user_input: return self._async_show_user_form() diff --git a/homeassistant/components/hvv_departures/config_flow.py b/homeassistant/components/hvv_departures/config_flow.py index 0c909e2d8c1..a02796dbffb 100644 --- a/homeassistant/components/hvv_departures/config_flow.py +++ b/homeassistant/components/hvv_departures/config_flow.py @@ -9,7 +9,12 @@ from pygti.auth import GTI_DEFAULT_HOST from pygti.exceptions import CannotConnect, InvalidAuth import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, OptionsFlow +from homeassistant.config_entries import ( + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.const import CONF_HOST, CONF_OFFSET, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import callback from homeassistant.helpers import aiohttp_client @@ -44,13 +49,15 @@ class HVVDeparturesConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize component.""" - self.hub = None - self.data = None - self.stations = {} + self.hub: GTIHub | None = None + self.data: dict[str, Any] | None = None + self.stations: dict[str, Any] = {} - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} diff --git a/homeassistant/components/icloud/config_flow.py b/homeassistant/components/icloud/config_flow.py index 30942ce6727..544f751dc0b 100644 --- a/homeassistant/components/icloud/config_flow.py +++ b/homeassistant/components/icloud/config_flow.py @@ -43,7 +43,7 @@ class IcloudFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize iCloud config flow.""" self.api = None self._username = None @@ -55,8 +55,8 @@ class IcloudFlowHandler(ConfigFlow, domain=DOMAIN): self._trusted_device = None self._verification_code = None - self._existing_entry_data = None - self._description_placeholders = None + self._existing_entry_data: dict[str, Any] | None = None + self._description_placeholders: dict[str, str] | None = None def _show_setup_form(self, user_input=None, errors=None, step_id="user"): """Show the setup form to the user.""" @@ -164,11 +164,13 @@ class IcloudFlowHandler(ConfigFlow, domain=DOMAIN): await self.hass.config_entries.async_reload(entry.entry_id) return self.async_abort(reason="reauth_successful") - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" - errors = {} + errors: dict[str, str] = {} - icloud_dir = Store(self.hass, STORAGE_VERSION, STORAGE_KEY) + icloud_dir = Store[Any](self.hass, STORAGE_VERSION, STORAGE_KEY) if not os.path.exists(icloud_dir.path): await self.hass.async_add_executor_job(os.makedirs, icloud_dir.path) diff --git a/homeassistant/components/iotawatt/config_flow.py b/homeassistant/components/iotawatt/config_flow.py index f8821784a1d..187423c7d8b 100644 --- a/homeassistant/components/iotawatt/config_flow.py +++ b/homeassistant/components/iotawatt/config_flow.py @@ -3,11 +3,12 @@ from __future__ import annotations import logging +from typing import Any from iotawattpy.iotawatt import Iotawatt import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -46,11 +47,13 @@ class IOTaWattConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize.""" - self._data = {} + self._data: dict[str, Any] = {} - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" if user_input is None: user_input = {} diff --git a/homeassistant/components/kostal_plenticore/config_flow.py b/homeassistant/components/kostal_plenticore/config_flow.py index 547afa9d71b..59c737a0874 100644 --- a/homeassistant/components/kostal_plenticore/config_flow.py +++ b/homeassistant/components/kostal_plenticore/config_flow.py @@ -1,12 +1,13 @@ """Config flow for Kostal Plenticore Solar Inverter integration.""" import logging +from typing import Any from aiohttp.client_exceptions import ClientError from pykoplenti import ApiClient, AuthenticationException import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_BASE, CONF_HOST, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -44,10 +45,11 @@ class KostalPlenticoreConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} - hostname = None if user_input is not None: self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]}) @@ -62,8 +64,7 @@ class KostalPlenticoreConfigFlow(ConfigFlow, domain=DOMAIN): except Exception: _LOGGER.exception("Unexpected exception") errors[CONF_BASE] = "unknown" - - if not errors: + else: return self.async_create_entry(title=hostname, data=user_input) return self.async_show_form( diff --git a/homeassistant/components/lutron_caseta/config_flow.py b/homeassistant/components/lutron_caseta/config_flow.py index d7b47aebc7e..0458b8ee185 100644 --- a/homeassistant/components/lutron_caseta/config_flow.py +++ b/homeassistant/components/lutron_caseta/config_flow.py @@ -6,6 +6,7 @@ import asyncio import logging import os import ssl +from typing import Any from pylutron_caseta.pairing import PAIR_CA, PAIR_CERT, PAIR_KEY, async_pair from pylutron_caseta.smartbridge import Smartbridge @@ -50,14 +51,16 @@ class LutronCasetaFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize a Lutron Caseta flow.""" - self.data = {} - self.lutron_id = None + self.data: dict[str, Any] = {} + self.lutron_id: str | None = None self.tls_assets_validated = False self.attempted_tls_validation = False - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" if user_input is not None: self.data[CONF_HOST] = user_input[CONF_HOST] diff --git a/homeassistant/components/nobo_hub/config_flow.py b/homeassistant/components/nobo_hub/config_flow.py index 6fc5bba2c1b..8aed520f21e 100644 --- a/homeassistant/components/nobo_hub/config_flow.py +++ b/homeassistant/components/nobo_hub/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations import socket -from typing import Any +from typing import TYPE_CHECKING, Any from pynobo import nobo import voluptuous as vol @@ -36,10 +36,10 @@ class NoboHubConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize the config flow.""" - self._discovered_hubs = None - self._hub = None + self._discovered_hubs: dict[str, Any] | None = None + self._hub: str | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -75,6 +75,9 @@ class NoboHubConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle configuration of a selected discovered device.""" errors = {} + if TYPE_CHECKING: + assert self._discovered_hubs + assert self._hub if user_input is not None: serial_prefix = self._discovered_hubs[self._hub] serial_suffix = user_input["serial_suffix"] diff --git a/homeassistant/components/nuki/config_flow.py b/homeassistant/components/nuki/config_flow.py index 286395e1ff3..3b8015827f1 100644 --- a/homeassistant/components/nuki/config_flow.py +++ b/homeassistant/components/nuki/config_flow.py @@ -62,12 +62,14 @@ async def validate_input(hass, data): class NukiConfigFlow(ConfigFlow, domain=DOMAIN): """Nuki config flow.""" - def __init__(self): + def __init__(self) -> None: """Initialize the Nuki config flow.""" - self.discovery_schema = {} - self._data = {} + self.discovery_schema: vol.Schema | None = None + self._data: Mapping[str, Any] = {} - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" return await self.async_step_validate(user_input) diff --git a/homeassistant/components/onvif/config_flow.py b/homeassistant/components/onvif/config_flow.py index 36ae0e1bf18..30184d1abc3 100644 --- a/homeassistant/components/onvif/config_flow.py +++ b/homeassistant/components/onvif/config_flow.py @@ -112,13 +112,15 @@ class OnvifFlowHandler(ConfigFlow, domain=DOMAIN): """Get the options flow for this handler.""" return OnvifOptionsFlowHandler(config_entry) - def __init__(self): + def __init__(self) -> None: """Initialize the ONVIF config flow.""" self.device_id = None - self.devices = [] - self.onvif_config = {} + self.devices: list[dict[str, Any]] = [] + self.onvif_config: dict[str, Any] = {} - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle user flow.""" if user_input: if user_input["auto"]: From ebc49d938a68cf7fc06e32629a41c070adec5593 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Sun, 25 Aug 2024 18:36:44 +0200 Subject: [PATCH 1045/1635] Improve config flow type hints (part 2) (#124344) --- .../aurora_abb_powerone/config_flow.py | 5 +-- .../aussie_broadband/config_flow.py | 4 +- .../components/blebox/config_flow.py | 40 +++++++++++-------- .../components/broadlink/config_flow.py | 14 +++++-- homeassistant/components/cast/config_flow.py | 12 +++--- .../components/directv/config_flow.py | 4 +- homeassistant/components/eafm/config_flow.py | 20 ++++++---- .../components/ecobee/config_flow.py | 10 +++-- .../components/emonitor/config_flow.py | 11 +++-- .../components/fireservicerota/config_flow.py | 12 +++--- 10 files changed, 79 insertions(+), 53 deletions(-) diff --git a/homeassistant/components/aurora_abb_powerone/config_flow.py b/homeassistant/components/aurora_abb_powerone/config_flow.py index f0093c62631..47c349ab48a 100644 --- a/homeassistant/components/aurora_abb_powerone/config_flow.py +++ b/homeassistant/components/aurora_abb_powerone/config_flow.py @@ -75,11 +75,10 @@ class AuroraABBConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialise the config flow.""" - self.config = None self._com_ports_list: list[str] | None = None - self._default_com_port = None + self._default_com_port: str | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/aussie_broadband/config_flow.py b/homeassistant/components/aussie_broadband/config_flow.py index 587c7df2b36..b434ab69ae5 100644 --- a/homeassistant/components/aussie_broadband/config_flow.py +++ b/homeassistant/components/aussie_broadband/config_flow.py @@ -22,11 +22,11 @@ class AussieBroadbandConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize the config flow.""" self.data: dict = {} self.options: dict = {CONF_SERVICES: []} - self.services: list[dict[str]] = [] + self.services: list[dict[str, Any]] = [] self.client: AussieBB | None = None self._reauth_username: str | None = None diff --git a/homeassistant/components/blebox/config_flow.py b/homeassistant/components/blebox/config_flow.py index 1f04f06a05a..2221e35a81f 100644 --- a/homeassistant/components/blebox/config_flow.py +++ b/homeassistant/components/blebox/config_flow.py @@ -35,15 +35,11 @@ from .const import ( _LOGGER = logging.getLogger(__name__) -def host_port(data): - """Return a list with host and port.""" - return (data[CONF_HOST], data[CONF_PORT]) - - def create_schema(previous_input=None): """Create a schema with given values as default.""" if previous_input is not None: - host, port = host_port(previous_input) + host = previous_input[CONF_HOST] + port = previous_input[CONF_PORT] else: host = DEFAULT_HOST port = DEFAULT_PORT @@ -70,9 +66,9 @@ class BleBoxConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize the BleBox config flow.""" - self.device_config = {} + self.device_config: dict[str, Any] = {} def handle_step_exception( self, step, exception, schema, host, port, message_id, log_fn @@ -146,7 +142,9 @@ class BleBoxConfigFlow(ConfigFlow, domain=DOMAIN): }, ) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle initial user-triggered config step.""" hass = self.hass schema = create_schema(user_input) @@ -159,14 +157,14 @@ class BleBoxConfigFlow(ConfigFlow, domain=DOMAIN): description_placeholders={}, ) - addr = host_port(user_input) + host = user_input[CONF_HOST] + port = user_input[CONF_PORT] username = user_input.get(CONF_USERNAME) password = user_input.get(CONF_PASSWORD) for entry in self._async_current_entries(): - if addr == host_port(entry.data): - host, port = addr + if host == entry.data[CONF_HOST] and port == entry.data[CONF_PORT]: return self.async_abort( reason=ADDRESS_ALREADY_CONFIGURED, description_placeholders={"address": f"{host}:{port}"}, @@ -174,27 +172,35 @@ class BleBoxConfigFlow(ConfigFlow, domain=DOMAIN): websession = get_maybe_authenticated_session(hass, password, username) - api_host = ApiHost(*addr, DEFAULT_SETUP_TIMEOUT, websession, hass.loop, _LOGGER) + api_host = ApiHost( + host, port, DEFAULT_SETUP_TIMEOUT, websession, hass.loop, _LOGGER + ) try: product = await Box.async_from_host(api_host) except UnsupportedBoxVersion as ex: return self.handle_step_exception( - "user", ex, schema, *addr, UNSUPPORTED_VERSION, _LOGGER.debug + "user", + ex, + schema, + host, + port, + UNSUPPORTED_VERSION, + _LOGGER.debug, ) except UnauthorizedRequest as ex: return self.handle_step_exception( - "user", ex, schema, *addr, CANNOT_CONNECT, _LOGGER.error + "user", ex, schema, host, port, CANNOT_CONNECT, _LOGGER.error ) except Error as ex: return self.handle_step_exception( - "user", ex, schema, *addr, CANNOT_CONNECT, _LOGGER.warning + "user", ex, schema, host, port, CANNOT_CONNECT, _LOGGER.warning ) except RuntimeError as ex: return self.handle_step_exception( - "user", ex, schema, *addr, UNKNOWN, _LOGGER.error + "user", ex, schema, host, port, UNKNOWN, _LOGGER.error ) # Check if configured but IP changed since diff --git a/homeassistant/components/broadlink/config_flow.py b/homeassistant/components/broadlink/config_flow.py index 89d540a27fc..2d79ba4bea1 100644 --- a/homeassistant/components/broadlink/config_flow.py +++ b/homeassistant/components/broadlink/config_flow.py @@ -5,7 +5,7 @@ import errno from functools import partial import logging import socket -from typing import Any +from typing import TYPE_CHECKING, Any import broadlink as blk from broadlink.exceptions import ( @@ -39,9 +39,11 @@ class BroadlinkFlowHandler(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize the Broadlink flow.""" - self.device = None + self.device: blk.Device | None = None - async def async_set_device(self, device, raise_on_progress=True): + async def async_set_device( + self, device: blk.Device, raise_on_progress: bool = True + ) -> None: """Define a device for the config flow.""" if device.type not in DEVICE_TYPES: _LOGGER.error( @@ -90,7 +92,9 @@ class BroadlinkFlowHandler(ConfigFlow, domain=DOMAIN): await self.async_set_device(device) return await self.async_step_auth() - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" errors = {} @@ -127,6 +131,8 @@ class BroadlinkFlowHandler(ConfigFlow, domain=DOMAIN): ) return await self.async_step_auth() + if TYPE_CHECKING: + assert self.device if device.mac == self.device.mac: await self.async_set_device(device, raise_on_progress=False) return await self.async_step_auth() diff --git a/homeassistant/components/cast/config_flow.py b/homeassistant/components/cast/config_flow.py index 6ccd7be19c3..22351f5d2f7 100644 --- a/homeassistant/components/cast/config_flow.py +++ b/homeassistant/components/cast/config_flow.py @@ -29,11 +29,11 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize flow.""" - self._ignore_cec = set() - self._known_hosts = set() - self._wanted_uuid = set() + self._ignore_cec = set[str]() + self._known_hosts = set[str]() + self._wanted_uuid = set[str]() @staticmethod @callback @@ -43,7 +43,9 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): """Get the options flow for this handler.""" return CastOptionsFlowHandler(config_entry) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") diff --git a/homeassistant/components/directv/config_flow.py b/homeassistant/components/directv/config_flow.py index 7cdfd5c07c9..56d8f262d1c 100644 --- a/homeassistant/components/directv/config_flow.py +++ b/homeassistant/components/directv/config_flow.py @@ -40,9 +40,9 @@ class DirecTVConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Set up the instance.""" - self.discovery_info = {} + self.discovery_info: dict[str, Any] = {} async def async_step_user( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/eafm/config_flow.py b/homeassistant/components/eafm/config_flow.py index 0345d2acf94..6be1066575f 100644 --- a/homeassistant/components/eafm/config_flow.py +++ b/homeassistant/components/eafm/config_flow.py @@ -1,9 +1,11 @@ """Config flow to configure flood monitoring gauges.""" +from typing import Any + from aioeafm import get_stations import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN @@ -14,21 +16,23 @@ class UKFloodsFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Handle a UK Floods config flow.""" - self.stations = {} + self.stations: dict[str, str] = {} - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow start.""" - errors = {} + errors: dict[str, str] = {} if user_input is not None: - station = self.stations[user_input["station"]] - await self.async_set_unique_id(station, raise_on_progress=False) + selected_station = self.stations[user_input["station"]] + await self.async_set_unique_id(selected_station, raise_on_progress=False) self._abort_if_unique_id_configured() return self.async_create_entry( title=user_input["station"], - data={"station": station}, + data={"station": selected_station}, ) session = async_get_clientsession(hass=self.hass) diff --git a/homeassistant/components/ecobee/config_flow.py b/homeassistant/components/ecobee/config_flow.py index dd5c2c62c85..e9a89e0fba5 100644 --- a/homeassistant/components/ecobee/config_flow.py +++ b/homeassistant/components/ecobee/config_flow.py @@ -1,5 +1,7 @@ """Config flow to configure ecobee.""" +from typing import Any + from pyecobee import ( ECOBEE_API_KEY, ECOBEE_CONFIG_FILENAME, @@ -8,7 +10,7 @@ from pyecobee import ( ) import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY from homeassistant.exceptions import HomeAssistantError from homeassistant.util.json import load_json_object @@ -23,9 +25,11 @@ class EcobeeFlowHandler(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize the ecobee flow.""" - self._ecobee = None + self._ecobee: Ecobee | None = None - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" if self._async_current_entries(): # Config entry already exists, only one allowed. diff --git a/homeassistant/components/emonitor/config_flow.py b/homeassistant/components/emonitor/config_flow.py index 9909ddff19c..b90b1477f87 100644 --- a/homeassistant/components/emonitor/config_flow.py +++ b/homeassistant/components/emonitor/config_flow.py @@ -1,6 +1,7 @@ """Config flow for SiteSage Emonitor integration.""" import logging +from typing import Any from aioemonitor import Emonitor import aiohttp @@ -33,12 +34,14 @@ class EmonitorConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize Emonitor ConfigFlow.""" - self.discovered_ip = None - self.discovered_info = None + self.discovered_ip: str | None = None + self.discovered_info: dict[str, str] | None = None - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/fireservicerota/config_flow.py b/homeassistant/components/fireservicerota/config_flow.py index afaef17c5a6..7b7248d44a1 100644 --- a/homeassistant/components/fireservicerota/config_flow.py +++ b/homeassistant/components/fireservicerota/config_flow.py @@ -27,18 +27,20 @@ class FireServiceRotaFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize config flow.""" self.api = None self._base_url = None self._username = None self._password = None - self._existing_entry = None - self._description_placeholders = None + self._existing_entry: dict[str, Any] | None = None + self._description_placeholders: dict[str, str] | None = None - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" - errors = {} + errors: dict[str, str] = {} if user_input is None: return self._show_setup_form(user_input, errors) From 41b129e990d9c2993dab3e34e9df37faa1059856 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 25 Aug 2024 06:48:17 -1000 Subject: [PATCH 1046/1635] Split esphome state property decorators (#124332) --- homeassistant/components/esphome/climate.py | 9 +++--- homeassistant/components/esphome/entity.py | 29 ++++++++++++----- .../components/esphome/media_player.py | 3 +- homeassistant/components/esphome/number.py | 9 ++---- homeassistant/components/esphome/sensor.py | 31 ++++++++++--------- homeassistant/components/esphome/text.py | 4 +-- 6 files changed, 49 insertions(+), 36 deletions(-) diff --git a/homeassistant/components/esphome/climate.py b/homeassistant/components/esphome/climate.py index da1cdfb0eab..1b9b53f24cd 100644 --- a/homeassistant/components/esphome/climate.py +++ b/homeassistant/components/esphome/climate.py @@ -58,6 +58,7 @@ from homeassistant.core import callback from .entity import ( EsphomeEntity, convert_api_error_ha_error, + esphome_float_state_property, esphome_state_property, platform_async_setup_entry, ) @@ -227,7 +228,7 @@ class EsphomeClimateEntity(EsphomeEntity[ClimateInfo, ClimateState], ClimateEnti return _SWING_MODES.from_esphome(self._state.swing_mode) @property - @esphome_state_property + @esphome_float_state_property def current_temperature(self) -> float | None: """Return the current temperature.""" return self._state.current_temperature @@ -241,19 +242,19 @@ class EsphomeClimateEntity(EsphomeEntity[ClimateInfo, ClimateState], ClimateEnti return round(self._state.current_humidity) @property - @esphome_state_property + @esphome_float_state_property def target_temperature(self) -> float | None: """Return the temperature we try to reach.""" return self._state.target_temperature @property - @esphome_state_property + @esphome_float_state_property def target_temperature_low(self) -> float | None: """Return the lowbound target temperature we try to reach.""" return self._state.target_temperature_low @property - @esphome_state_property + @esphome_float_state_property def target_temperature_high(self) -> float | None: """Return the highbound target temperature we try to reach.""" return self._state.target_temperature_high diff --git a/homeassistant/components/esphome/entity.py b/homeassistant/components/esphome/entity.py index a484a988ea9..455a3f8d105 100644 --- a/homeassistant/components/esphome/entity.py +++ b/homeassistant/components/esphome/entity.py @@ -118,20 +118,35 @@ def esphome_state_property[_R, _EntityT: EsphomeEntity[Any, Any]]( ) -> Callable[[_EntityT], _R | None]: """Wrap a state property of an esphome entity. - This checks if the state object in the entity is set, and - prevents writing NAN values to the Home Assistant state machine. + This checks if the state object in the entity is set + and returns None if it is not set. """ @functools.wraps(func) def _wrapper(self: _EntityT) -> _R | None: + return func(self) if self._has_state else None + + return _wrapper + + +def esphome_float_state_property[_EntityT: EsphomeEntity[Any, Any]]( + func: Callable[[_EntityT], float | None], +) -> Callable[[_EntityT], float | None]: + """Wrap a state property of an esphome entity that returns a float. + + This checks if the state object in the entity is set, and returns + None if its not set. If also prevents writing NAN values to the + Home Assistant state machine. + """ + + @functools.wraps(func) + def _wrapper(self: _EntityT) -> float | None: if not self._has_state: return None val = func(self) - if isinstance(val, float) and not math.isfinite(val): - # Home Assistant doesn't use NaN or inf values in state machine - # (not JSON serializable) - return None - return val + # Home Assistant doesn't use NaN or inf values in state machine + # (not JSON serializable) + return None if val is None or not math.isfinite(val) else val return _wrapper diff --git a/homeassistant/components/esphome/media_player.py b/homeassistant/components/esphome/media_player.py index ec9d61fb9e7..f7c5d7011f8 100644 --- a/homeassistant/components/esphome/media_player.py +++ b/homeassistant/components/esphome/media_player.py @@ -29,6 +29,7 @@ from homeassistant.core import callback from .entity import ( EsphomeEntity, convert_api_error_ha_error, + esphome_float_state_property, esphome_state_property, platform_async_setup_entry, ) @@ -79,7 +80,7 @@ class EsphomeMediaPlayer( return self._state.muted @property - @esphome_state_property + @esphome_float_state_property def volume_level(self) -> float | None: """Volume level of the media player (0..1).""" return self._state.volume diff --git a/homeassistant/components/esphome/number.py b/homeassistant/components/esphome/number.py index 1e588c8d35e..2d74dad1bcf 100644 --- a/homeassistant/components/esphome/number.py +++ b/homeassistant/components/esphome/number.py @@ -3,7 +3,6 @@ from __future__ import annotations from functools import partial -import math from aioesphomeapi import ( EntityInfo, @@ -19,7 +18,7 @@ from homeassistant.util.enum import try_parse_enum from .entity import ( EsphomeEntity, convert_api_error_ha_error, - esphome_state_property, + esphome_float_state_property, platform_async_setup_entry, ) from .enum_mapper import EsphomeEnumMapper @@ -57,13 +56,11 @@ class EsphomeNumber(EsphomeEntity[NumberInfo, NumberState], NumberEntity): self._attr_mode = NumberMode.AUTO @property - @esphome_state_property + @esphome_float_state_property def native_value(self) -> float | None: """Return the state of the entity.""" state = self._state - if state.missing_state or not math.isfinite(state.state): - return None - return state.state + return None if state.missing_state else state.state @convert_api_error_ha_error async def async_set_native_value(self, value: float) -> None: diff --git a/homeassistant/components/esphome/sensor.py b/homeassistant/components/esphome/sensor.py index 0742bebed28..670c92d291e 100644 --- a/homeassistant/components/esphome/sensor.py +++ b/homeassistant/components/esphome/sensor.py @@ -26,7 +26,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import dt as dt_util from homeassistant.util.enum import try_parse_enum -from .entity import EsphomeEntity, esphome_state_property, platform_async_setup_entry +from .entity import EsphomeEntity, platform_async_setup_entry from .enum_mapper import EsphomeEnumMapper @@ -93,15 +93,16 @@ class EsphomeSensor(EsphomeEntity[SensorInfo, SensorState], SensorEntity): self._attr_state_class = _STATE_CLASSES.from_esphome(state_class) @property - @esphome_state_property def native_value(self) -> datetime | str | None: """Return the state of the entity.""" - state = self._state - if state.missing_state or not math.isfinite(state.state): + if not self._has_state or (state := self._state).missing_state: return None - if self._attr_device_class is SensorDeviceClass.TIMESTAMP: - return dt_util.utc_from_timestamp(state.state) - return f"{state.state:.{self._static_info.accuracy_decimals}f}" + state_float = state.state + if not math.isfinite(state_float): + return None + if self.device_class is SensorDeviceClass.TIMESTAMP: + return dt_util.utc_from_timestamp(state_float) + return f"{state_float:.{self._static_info.accuracy_decimals}f}" class EsphomeTextSensor(EsphomeEntity[TextSensorInfo, TextSensorState], SensorEntity): @@ -117,17 +118,17 @@ class EsphomeTextSensor(EsphomeEntity[TextSensorInfo, TextSensorState], SensorEn ) @property - @esphome_state_property def native_value(self) -> str | datetime | date | None: """Return the state of the entity.""" - state = self._state - if state.missing_state: + if not self._has_state or (state := self._state).missing_state: return None - if self._attr_device_class is SensorDeviceClass.TIMESTAMP: - return dt_util.parse_datetime(state.state) + state_str = state.state + device_class = self.device_class + if device_class is SensorDeviceClass.TIMESTAMP: + return dt_util.parse_datetime(state_str) if ( - self._attr_device_class is SensorDeviceClass.DATE - and (value := dt_util.parse_datetime(state.state)) is not None + device_class is SensorDeviceClass.DATE + and (value := dt_util.parse_datetime(state_str)) is not None ): return value.date() - return state.state + return state_str diff --git a/homeassistant/components/esphome/text.py b/homeassistant/components/esphome/text.py index f9dbbbcd853..36d77aac4a0 100644 --- a/homeassistant/components/esphome/text.py +++ b/homeassistant/components/esphome/text.py @@ -43,9 +43,7 @@ class EsphomeText(EsphomeEntity[TextInfo, TextState], TextEntity): def native_value(self) -> str | None: """Return the state of the entity.""" state = self._state - if state.missing_state: - return None - return state.state + return None if state.missing_state else state.state @convert_api_error_ha_error async def async_set_value(self, value: str) -> None: From 0628f967130a2aaef8ca4fff7b6a81aa5550864c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 25 Aug 2024 09:21:15 -1000 Subject: [PATCH 1047/1635] Ensure all chars are polling when requesting manual update in homekit_controller (#124582) related issue #123963 --- .../homekit_controller/connection.py | 7 +++- .../homekit_controller/test_connection.py | 40 +++++++++++++++++++ 2 files changed, 46 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/homekit_controller/connection.py b/homeassistant/components/homekit_controller/connection.py index 4da907daf3e..934e7e883ae 100644 --- a/homeassistant/components/homekit_controller/connection.py +++ b/homeassistant/components/homekit_controller/connection.py @@ -154,6 +154,7 @@ class HKDevice: self._pending_subscribes: set[tuple[int, int]] = set() self._subscribe_timer: CALLBACK_TYPE | None = None self._load_platforms_lock = asyncio.Lock() + self._full_update_requested: bool = False @property def entity_map(self) -> Accessories: @@ -841,6 +842,7 @@ class HKDevice: async def async_request_update(self, now: datetime | None = None) -> None: """Request an debounced update from the accessory.""" + self._full_update_requested = True await self._debounced_update.async_call() async def async_update(self, now: datetime | None = None) -> None: @@ -849,7 +851,8 @@ class HKDevice: accessories = self.entity_map.accessories if ( - len(accessories) == 1 + not self._full_update_requested + and len(accessories) == 1 and self.available and not (to_poll - self.watchable_characteristics) and self.pairing.is_available @@ -879,6 +882,8 @@ class HKDevice: firmware_iid = accessory_info[CharacteristicsTypes.FIRMWARE_REVISION].iid to_poll = {(first_accessory.aid, firmware_iid)} + self._full_update_requested = False + if not to_poll: self.async_update_available_state() _LOGGER.debug( diff --git a/tests/components/homekit_controller/test_connection.py b/tests/components/homekit_controller/test_connection.py index 503ff171533..7ea791f9a1e 100644 --- a/tests/components/homekit_controller/test_connection.py +++ b/tests/components/homekit_controller/test_connection.py @@ -12,6 +12,7 @@ from aiohomekit.testing import FakeController import pytest from homeassistant.components.homekit_controller.const import ( + DEBOUNCE_COOLDOWN, DOMAIN, IDENTIFIER_ACCESSORY_ID, IDENTIFIER_LEGACY_ACCESSORY_ID, @@ -22,12 +23,14 @@ from homeassistant.const import STATE_OFF, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers.entity_component import async_update_entity from .common import ( setup_accessories_from_file, setup_platform, setup_test_accessories, setup_test_component, + time_changed, ) from tests.common import MockConfigEntry @@ -399,3 +402,40 @@ async def test_poll_firmware_version_only_all_watchable_accessory_mode( state = await helper.poll_and_get_state() assert state.state == STATE_OFF assert mock_get_characteristics.call_count == 8 + + +async def test_manual_poll_all_chars( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: + """Test that a manual poll will check all chars.""" + + def _create_accessory(accessory: Accessory) -> Service: + service = accessory.add_service(ServicesTypes.LIGHTBULB, name="TestDevice") + + on_char = service.add_char(CharacteristicsTypes.ON) + on_char.value = 0 + + brightness = service.add_char(CharacteristicsTypes.BRIGHTNESS) + brightness.value = 0 + + return service + + helper = await setup_test_component(hass, get_next_aid(), _create_accessory) + + with mock.patch.object( + helper.pairing, + "get_characteristics", + wraps=helper.pairing.get_characteristics, + ) as mock_get_characteristics: + # Initial state is that the light is off + await helper.poll_and_get_state() + # Verify only firmware version is polled + assert mock_get_characteristics.call_args_list[0][0][0] == {(1, 7)} + + # Now do a manual poll to ensure all chars are polled + mock_get_characteristics.reset_mock() + await async_update_entity(hass, helper.entity_id) + await time_changed(hass, 60) + await time_changed(hass, DEBOUNCE_COOLDOWN) + await hass.async_block_till_done() + assert len(mock_get_characteristics.call_args_list[0][0][0]) > 1 From be206156b0c6c8dea1bc61d3817e94bf9e52dfe6 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 25 Aug 2024 09:21:23 -1000 Subject: [PATCH 1048/1635] Fix shelly sleepy entities never being created (#124547) --- homeassistant/components/shelly/__init__.py | 19 +++++++- tests/components/shelly/test_binary_sensor.py | 36 +++++++++++---- tests/components/shelly/test_climate.py | 15 ++++--- tests/components/shelly/test_coordinator.py | 44 +++++++++++++++++-- tests/components/shelly/test_number.py | 6 ++- tests/components/shelly/test_sensor.py | 33 ++++++++++---- tests/components/shelly/test_switch.py | 6 ++- tests/components/shelly/test_update.py | 6 ++- 8 files changed, 132 insertions(+), 33 deletions(-) diff --git a/homeassistant/components/shelly/__init__.py b/homeassistant/components/shelly/__init__.py index 994b1ed0430..1d3f67220fa 100644 --- a/homeassistant/components/shelly/__init__.py +++ b/homeassistant/components/shelly/__init__.py @@ -21,6 +21,7 @@ from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers import ( config_validation as cv, device_registry as dr, + entity_registry as er, issue_registry as ir, ) from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -192,8 +193,15 @@ async def _async_setup_block_entry( await hass.config_entries.async_forward_entry_setups( entry, runtime_data.platforms ) - elif sleep_period is None or device_entry is None: + elif ( + sleep_period is None + or device_entry is None + or not er.async_entries_for_device(er.async_get(hass), device_entry.id) + ): # Need to get sleep info or first time sleeping device setup, wait for device + # If there are no entities for the device, it means we added the device, but + # Home Assistant was restarted before the device was online. In this case we + # cannot restore the entities, so we need to wait for the device to be online. LOGGER.debug( "Setup for device %s will resume when device is online", entry.title ) @@ -268,8 +276,15 @@ async def _async_setup_rpc_entry(hass: HomeAssistant, entry: ShellyConfigEntry) await hass.config_entries.async_forward_entry_setups( entry, runtime_data.platforms ) - elif sleep_period is None or device_entry is None: + elif ( + sleep_period is None + or device_entry is None + or not er.async_entries_for_device(er.async_get(hass), device_entry.id) + ): # Need to get sleep info or first time sleeping device setup, wait for device + # If there are no entities for the device, it means we added the device, but + # Home Assistant was restarted before the device was online. In this case we + # cannot restore the entities, so we need to wait for the device to be online. LOGGER.debug( "Setup for device %s will resume when device is online", entry.title ) diff --git a/tests/components/shelly/test_binary_sensor.py b/tests/components/shelly/test_binary_sensor.py index 18f65deb907..fadfe28db3e 100644 --- a/tests/components/shelly/test_binary_sensor.py +++ b/tests/components/shelly/test_binary_sensor.py @@ -169,9 +169,14 @@ async def test_block_restored_sleeping_binary_sensor( ) -> None: """Test block restored sleeping binary sensor.""" entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( - hass, BINARY_SENSOR_DOMAIN, "test_name_motion", "sensor_0-motion", entry + hass, + BINARY_SENSOR_DOMAIN, + "test_name_motion", + "sensor_0-motion", + entry, + device_id=device.id, ) mock_restore_cache(hass, [State(entity_id, STATE_ON)]) monkeypatch.setattr(mock_block_device, "initialized", False) @@ -196,9 +201,14 @@ async def test_block_restored_sleeping_binary_sensor_no_last_state( ) -> None: """Test block restored sleeping binary sensor missing last state.""" entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( - hass, BINARY_SENSOR_DOMAIN, "test_name_motion", "sensor_0-motion", entry + hass, + BINARY_SENSOR_DOMAIN, + "test_name_motion", + "sensor_0-motion", + entry, + device_id=device.id, ) monkeypatch.setattr(mock_block_device, "initialized", False) await hass.config_entries.async_setup(entry.entry_id) @@ -305,9 +315,14 @@ async def test_rpc_restored_sleeping_binary_sensor( ) -> None: """Test RPC restored binary sensor.""" entry = await init_integration(hass, 2, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( - hass, BINARY_SENSOR_DOMAIN, "test_name_cloud", "cloud-cloud", entry + hass, + BINARY_SENSOR_DOMAIN, + "test_name_cloud", + "cloud-cloud", + entry, + device_id=device.id, ) mock_restore_cache(hass, [State(entity_id, STATE_ON)]) @@ -334,9 +349,14 @@ async def test_rpc_restored_sleeping_binary_sensor_no_last_state( ) -> None: """Test RPC restored sleeping binary sensor missing last state.""" entry = await init_integration(hass, 2, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( - hass, BINARY_SENSOR_DOMAIN, "test_name_cloud", "cloud-cloud", entry + hass, + BINARY_SENSOR_DOMAIN, + "test_name_cloud", + "cloud-cloud", + entry, + device_id=device.id, ) monkeypatch.setattr(mock_rpc_device, "initialized", False) diff --git a/tests/components/shelly/test_climate.py b/tests/components/shelly/test_climate.py index fea46b1d2d1..1156d7e0ed5 100644 --- a/tests/components/shelly/test_climate.py +++ b/tests/components/shelly/test_climate.py @@ -254,13 +254,14 @@ async def test_block_restored_climate( monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "valveError", 0) monkeypatch.delattr(mock_block_device.blocks[EMETER_BLOCK_ID], "targetTemp") entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( hass, CLIMATE_DOMAIN, "test_name", "sensor_0", entry, + device_id=device.id, ) attrs = {"current_temperature": 20.5, "temperature": 4.0} extra_data = {"last_target_temp": 22.0} @@ -321,13 +322,14 @@ async def test_block_restored_climate_us_customery( monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "valveError", 0) monkeypatch.delattr(mock_block_device.blocks[EMETER_BLOCK_ID], "targetTemp") entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( hass, CLIMATE_DOMAIN, "test_name", "sensor_0", entry, + device_id=device.id, ) attrs = {"current_temperature": 67, "temperature": 39} extra_data = {"last_target_temp": 10.0} @@ -390,13 +392,14 @@ async def test_block_restored_climate_unavailable( monkeypatch.delattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "targetTemp") monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "valveError", 0) entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( hass, CLIMATE_DOMAIN, "test_name", "sensor_0", entry, + device_id=device.id, ) mock_restore_cache(hass, [State(entity_id, STATE_UNAVAILABLE)]) @@ -417,13 +420,14 @@ async def test_block_restored_climate_set_preset_before_online( monkeypatch.delattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "targetTemp") monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "valveError", 0) entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( hass, CLIMATE_DOMAIN, "test_name", "sensor_0", entry, + device_id=device.id, ) mock_restore_cache(hass, [State(entity_id, HVACMode.HEAT)]) @@ -518,13 +522,14 @@ async def test_block_restored_climate_auth_error( monkeypatch.delattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "targetTemp") monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "valveError", 0) entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( hass, CLIMATE_DOMAIN, "test_name", "sensor_0", entry, + device_id=device.id, ) mock_restore_cache(hass, [State(entity_id, HVACMode.HEAT)]) diff --git a/tests/components/shelly/test_coordinator.py b/tests/components/shelly/test_coordinator.py index 1140c93775b..bb9694cf9b4 100644 --- a/tests/components/shelly/test_coordinator.py +++ b/tests/components/shelly/test_coordinator.py @@ -16,6 +16,7 @@ from homeassistant.components.shelly.const import ( ATTR_DEVICE, ATTR_GENERATION, CONF_BLE_SCANNER_MODE, + CONF_SLEEP_PERIOD, DOMAIN, ENTRY_RELOAD_COOLDOWN, MAX_PUSH_UPDATE_FAILURES, @@ -886,9 +887,14 @@ async def test_block_sleeping_device_connection_error( """Test block sleeping device connection error during initialize.""" sleep_period = 1000 entry = await init_integration(hass, 1, sleep_period=sleep_period, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( - hass, BINARY_SENSOR_DOMAIN, "test_name_motion", "sensor_0-motion", entry + hass, + BINARY_SENSOR_DOMAIN, + "test_name_motion", + "sensor_0-motion", + entry, + device_id=device.id, ) mock_restore_cache(hass, [State(entity_id, STATE_ON)]) monkeypatch.setattr(mock_block_device, "initialized", False) @@ -931,9 +937,14 @@ async def test_rpc_sleeping_device_connection_error( """Test RPC sleeping device connection error during initialize.""" sleep_period = 1000 entry = await init_integration(hass, 2, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( - hass, BINARY_SENSOR_DOMAIN, "test_name_cloud", "cloud-cloud", entry + hass, + BINARY_SENSOR_DOMAIN, + "test_name_cloud", + "cloud-cloud", + entry, + device_id=device.id, ) mock_restore_cache(hass, [State(entity_id, STATE_ON)]) monkeypatch.setattr(mock_rpc_device, "connected", False) @@ -966,6 +977,31 @@ async def test_rpc_sleeping_device_connection_error( assert get_entity_state(hass, entity_id) == STATE_UNAVAILABLE +async def test_rpc_sleeping_device_late_setup( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test RPC sleeping device creates entities if they do not exist yet.""" + entry = await init_integration(hass, 2, sleep_period=1000, skip_setup=True) + monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) + assert entry.data[CONF_SLEEP_PERIOD] == 1000 + register_device(device_registry, entry) + monkeypatch.setattr(mock_rpc_device, "connected", False) + monkeypatch.setattr(mock_rpc_device, "initialized", False) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + monkeypatch.setattr(mock_rpc_device, "initialized", True) + mock_rpc_device.mock_online() + await hass.async_block_till_done(wait_background_tasks=True) + monkeypatch.setattr(mock_rpc_device, "connected", True) + mock_rpc_device.mock_initialized() + await hass.async_block_till_done(wait_background_tasks=True) + assert hass.states.get("sensor.test_name_temperature") is not None + + async def test_rpc_already_connected( hass: HomeAssistant, freezer: FrozenDateTimeFactory, diff --git a/tests/components/shelly/test_number.py b/tests/components/shelly/test_number.py index 73f432094b9..6c1cc394b64 100644 --- a/tests/components/shelly/test_number.py +++ b/tests/components/shelly/test_number.py @@ -72,7 +72,7 @@ async def test_block_restored_number( ) -> None: """Test block restored number.""" entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) capabilities = { "min": 0, "max": 100, @@ -86,6 +86,7 @@ async def test_block_restored_number( "device_0-valvePos", entry, capabilities, + device_id=device.id, ) extra_data = { "native_max_value": 100, @@ -118,7 +119,7 @@ async def test_block_restored_number_no_last_state( ) -> None: """Test block restored number missing last state.""" entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) capabilities = { "min": 0, "max": 100, @@ -132,6 +133,7 @@ async def test_block_restored_number_no_last_state( "device_0-valvePos", entry, capabilities, + device_id=device.id, ) monkeypatch.setattr(mock_block_device, "initialized", False) await hass.config_entries.async_setup(entry.entry_id) diff --git a/tests/components/shelly/test_sensor.py b/tests/components/shelly/test_sensor.py index 7aa4218341f..6b665b8fea2 100644 --- a/tests/components/shelly/test_sensor.py +++ b/tests/components/shelly/test_sensor.py @@ -193,9 +193,14 @@ async def test_block_restored_sleeping_sensor( ) -> None: """Test block restored sleeping sensor.""" entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( - hass, SENSOR_DOMAIN, "test_name_temperature", "sensor_0-temp", entry + hass, + SENSOR_DOMAIN, + "test_name_temperature", + "sensor_0-temp", + entry, + device_id=device.id, ) extra_data = {"native_value": "20.4", "native_unit_of_measurement": "°C"} @@ -226,9 +231,14 @@ async def test_block_restored_sleeping_sensor_no_last_state( ) -> None: """Test block restored sleeping sensor missing last state.""" entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( - hass, SENSOR_DOMAIN, "test_name_temperature", "sensor_0-temp", entry + hass, + SENSOR_DOMAIN, + "test_name_temperature", + "sensor_0-temp", + entry, + device_id=device.id, ) monkeypatch.setattr(mock_block_device, "initialized", False) await hass.config_entries.async_setup(entry.entry_id) @@ -293,9 +303,14 @@ async def test_block_not_matched_restored_sleeping_sensor( ) -> None: """Test block not matched to restored sleeping sensor.""" entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( - hass, SENSOR_DOMAIN, "test_name_temperature", "sensor_0-temp", entry + hass, + SENSOR_DOMAIN, + "test_name_temperature", + "sensor_0-temp", + entry, + device_id=device.id, ) extra_data = {"native_value": "20.4", "native_unit_of_measurement": "°C"} @@ -489,13 +504,14 @@ async def test_rpc_restored_sleeping_sensor( ) -> None: """Test RPC restored sensor.""" entry = await init_integration(hass, 2, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( hass, SENSOR_DOMAIN, "test_name_temperature", "temperature:0-temperature_0", entry, + device_id=device.id, ) extra_data = {"native_value": "21.0", "native_unit_of_measurement": "°C"} @@ -527,13 +543,14 @@ async def test_rpc_restored_sleeping_sensor_no_last_state( ) -> None: """Test RPC restored sensor missing last state.""" entry = await init_integration(hass, 2, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( hass, SENSOR_DOMAIN, "test_name_temperature", "temperature:0-temperature_0", entry, + device_id=device.id, ) monkeypatch.setattr(mock_rpc_device, "initialized", False) diff --git a/tests/components/shelly/test_switch.py b/tests/components/shelly/test_switch.py index 124562be8d5..c891d1d7b2d 100644 --- a/tests/components/shelly/test_switch.py +++ b/tests/components/shelly/test_switch.py @@ -118,13 +118,14 @@ async def test_block_restored_motion_switch( entry = await init_integration( hass, 1, sleep_period=1000, model=model, skip_setup=True ) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( hass, SWITCH_DOMAIN, "test_name_motion_detection", "sensor_0-motionActive", entry, + device_id=device.id, ) mock_restore_cache(hass, [State(entity_id, STATE_OFF)]) @@ -154,13 +155,14 @@ async def test_block_restored_motion_switch_no_last_state( entry = await init_integration( hass, 1, sleep_period=1000, model=model, skip_setup=True ) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( hass, SWITCH_DOMAIN, "test_name_motion_detection", "sensor_0-motionActive", entry, + device_id=device.id, ) monkeypatch.setattr(mock_block_device, "initialized", False) await hass.config_entries.async_setup(entry.entry_id) diff --git a/tests/components/shelly/test_update.py b/tests/components/shelly/test_update.py index 721e86559a3..c6434c0b988 100644 --- a/tests/components/shelly/test_update.py +++ b/tests/components/shelly/test_update.py @@ -385,13 +385,14 @@ async def test_rpc_restored_sleeping_update( ) -> None: """Test RPC restored update entity.""" entry = await init_integration(hass, 2, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( hass, UPDATE_DOMAIN, "test_name_firmware_update", "sys-fwupdate", entry, + device_id=device.id, ) attr = {ATTR_INSTALLED_VERSION: "1", ATTR_LATEST_VERSION: "2"} @@ -443,13 +444,14 @@ async def test_rpc_restored_sleeping_update_no_last_state( }, ) entry = await init_integration(hass, 2, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( hass, UPDATE_DOMAIN, "test_name_firmware_update", "sys-fwupdate", entry, + device_id=device.id, ) monkeypatch.setattr(mock_rpc_device, "initialized", False) From 3304e27fa3d21a8a11f6032839becf65dc7a5fb9 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Sun, 25 Aug 2024 12:22:57 -0700 Subject: [PATCH 1049/1635] Add ollama context window size configuration (#124555) * Add ollama context window size configuration * Set higher max context size --- homeassistant/components/ollama/__init__.py | 2 ++ .../components/ollama/config_flow.py | 12 +++++++ homeassistant/components/ollama/const.py | 5 +++ .../components/ollama/conversation.py | 3 ++ homeassistant/components/ollama/strings.json | 4 ++- tests/components/ollama/conftest.py | 13 ++++++-- tests/components/ollama/test_config_flow.py | 7 ++++- tests/components/ollama/test_conversation.py | 31 +++++++++++++++++++ 8 files changed, 73 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/ollama/__init__.py b/homeassistant/components/ollama/__init__.py index 2286a2c7b75..2ad389c55c3 100644 --- a/homeassistant/components/ollama/__init__.py +++ b/homeassistant/components/ollama/__init__.py @@ -18,6 +18,7 @@ from .const import ( CONF_KEEP_ALIVE, CONF_MAX_HISTORY, CONF_MODEL, + CONF_NUM_CTX, CONF_PROMPT, DEFAULT_TIMEOUT, DOMAIN, @@ -30,6 +31,7 @@ __all__ = [ "CONF_PROMPT", "CONF_MODEL", "CONF_MAX_HISTORY", + "CONF_NUM_CTX", "CONF_KEEP_ALIVE", "DOMAIN", ] diff --git a/homeassistant/components/ollama/config_flow.py b/homeassistant/components/ollama/config_flow.py index bcdd6e06f48..6b516d67138 100644 --- a/homeassistant/components/ollama/config_flow.py +++ b/homeassistant/components/ollama/config_flow.py @@ -38,12 +38,16 @@ from .const import ( CONF_KEEP_ALIVE, CONF_MAX_HISTORY, CONF_MODEL, + CONF_NUM_CTX, CONF_PROMPT, DEFAULT_KEEP_ALIVE, DEFAULT_MAX_HISTORY, DEFAULT_MODEL, + DEFAULT_NUM_CTX, DEFAULT_TIMEOUT, DOMAIN, + MAX_NUM_CTX, + MIN_NUM_CTX, MODEL_NAMES, ) @@ -255,6 +259,14 @@ def ollama_config_option_schema( description={"suggested_value": options.get(CONF_LLM_HASS_API)}, default="none", ): SelectSelector(SelectSelectorConfig(options=hass_apis)), + vol.Optional( + CONF_NUM_CTX, + description={"suggested_value": options.get(CONF_NUM_CTX, DEFAULT_NUM_CTX)}, + ): NumberSelector( + NumberSelectorConfig( + min=MIN_NUM_CTX, max=MAX_NUM_CTX, step=1, mode=NumberSelectorMode.BOX + ) + ), vol.Optional( CONF_MAX_HISTORY, description={ diff --git a/homeassistant/components/ollama/const.py b/homeassistant/components/ollama/const.py index 97c4f1186fc..6152b223d6d 100644 --- a/homeassistant/components/ollama/const.py +++ b/homeassistant/components/ollama/const.py @@ -11,6 +11,11 @@ DEFAULT_KEEP_ALIVE = -1 # seconds. -1 = indefinite, 0 = never KEEP_ALIVE_FOREVER = -1 DEFAULT_TIMEOUT = 5.0 # seconds +CONF_NUM_CTX = "num_ctx" +DEFAULT_NUM_CTX = 8192 +MIN_NUM_CTX = 2048 +MAX_NUM_CTX = 131072 + CONF_MAX_HISTORY = "max_history" DEFAULT_MAX_HISTORY = 20 diff --git a/homeassistant/components/ollama/conversation.py b/homeassistant/components/ollama/conversation.py index c0423b258f0..1a91c790d27 100644 --- a/homeassistant/components/ollama/conversation.py +++ b/homeassistant/components/ollama/conversation.py @@ -26,9 +26,11 @@ from .const import ( CONF_KEEP_ALIVE, CONF_MAX_HISTORY, CONF_MODEL, + CONF_NUM_CTX, CONF_PROMPT, DEFAULT_KEEP_ALIVE, DEFAULT_MAX_HISTORY, + DEFAULT_NUM_CTX, DOMAIN, MAX_HISTORY_SECONDS, ) @@ -263,6 +265,7 @@ class OllamaConversationEntity( stream=False, # keep_alive requires specifying unit. In this case, seconds keep_alive=f"{settings.get(CONF_KEEP_ALIVE, DEFAULT_KEEP_ALIVE)}s", + options={CONF_NUM_CTX: settings.get(CONF_NUM_CTX, DEFAULT_NUM_CTX)}, ) except (ollama.RequestError, ollama.ResponseError) as err: _LOGGER.error("Unexpected error talking to Ollama server: %s", err) diff --git a/homeassistant/components/ollama/strings.json b/homeassistant/components/ollama/strings.json index 2366ecd0848..c307f160228 100644 --- a/homeassistant/components/ollama/strings.json +++ b/homeassistant/components/ollama/strings.json @@ -27,11 +27,13 @@ "prompt": "Instructions", "llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]", "max_history": "Max history messages", + "num_ctx": "Context window size", "keep_alive": "Keep alive" }, "data_description": { "prompt": "Instruct how the LLM should respond. This can be a template.", - "keep_alive": "Duration in seconds for Ollama to keep model in memory. -1 = indefinite, 0 = never." + "keep_alive": "Duration in seconds for Ollama to keep model in memory. -1 = indefinite, 0 = never.", + "num_ctx": "Maximum number of text tokens the model can process. Lower to reduce Ollama RAM, or increase for a large number of exposed entities." } } } diff --git a/tests/components/ollama/conftest.py b/tests/components/ollama/conftest.py index b28b8850cd5..7658d1cbfab 100644 --- a/tests/components/ollama/conftest.py +++ b/tests/components/ollama/conftest.py @@ -1,5 +1,6 @@ """Tests Ollama integration.""" +from typing import Any from unittest.mock import patch import pytest @@ -16,12 +17,20 @@ from tests.common import MockConfigEntry @pytest.fixture -def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: +def mock_config_entry_options() -> dict[str, Any]: + """Fixture for configuration entry options.""" + return TEST_OPTIONS + + +@pytest.fixture +def mock_config_entry( + hass: HomeAssistant, mock_config_entry_options: dict[str, Any] +) -> MockConfigEntry: """Mock a config entry.""" entry = MockConfigEntry( domain=ollama.DOMAIN, data=TEST_USER_DATA, - options=TEST_OPTIONS, + options=mock_config_entry_options, ) entry.add_to_hass(hass) return entry diff --git a/tests/components/ollama/test_config_flow.py b/tests/components/ollama/test_config_flow.py index b1b74197139..7755f2208b4 100644 --- a/tests/components/ollama/test_config_flow.py +++ b/tests/components/ollama/test_config_flow.py @@ -164,13 +164,18 @@ async def test_options( ) options = await hass.config_entries.options.async_configure( options_flow["flow_id"], - {ollama.CONF_PROMPT: "test prompt", ollama.CONF_MAX_HISTORY: 100}, + { + ollama.CONF_PROMPT: "test prompt", + ollama.CONF_MAX_HISTORY: 100, + ollama.CONF_NUM_CTX: 32768, + }, ) await hass.async_block_till_done() assert options["type"] is FlowResultType.CREATE_ENTRY assert options["data"] == { ollama.CONF_PROMPT: "test prompt", ollama.CONF_MAX_HISTORY: 100, + ollama.CONF_NUM_CTX: 32768, } diff --git a/tests/components/ollama/test_conversation.py b/tests/components/ollama/test_conversation.py index cb56b398342..f10805e747d 100644 --- a/tests/components/ollama/test_conversation.py +++ b/tests/components/ollama/test_conversation.py @@ -578,3 +578,34 @@ async def test_conversation_agent_with_assist( state.attributes[ATTR_SUPPORTED_FEATURES] == conversation.ConversationEntityFeature.CONTROL ) + + +@pytest.mark.parametrize( + ("mock_config_entry_options", "expected_options"), + [ + ({}, {"num_ctx": 8192}), + ({"num_ctx": 16384}, {"num_ctx": 16384}), + ], +) +async def test_options( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_init_component, + expected_options: dict[str, Any], +) -> None: + """Test that options are passed correctly to ollama client.""" + with patch( + "ollama.AsyncClient.chat", + return_value={"message": {"role": "assistant", "content": "test response"}}, + ) as mock_chat: + await conversation.async_converse( + hass, + "test message", + None, + Context(), + agent_id="conversation.mock_title", + ) + + assert mock_chat.call_count == 1 + args = mock_chat.call_args.kwargs + assert args.get("options") == expected_options From 18212052a4b01707322d1c47327e5fd141f523a6 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Sun, 25 Aug 2024 12:24:06 -0700 Subject: [PATCH 1050/1635] Update nest events to include attachment image and video urls (#124554) --- homeassistant/components/nest/__init__.py | 33 ++++++- tests/components/nest/test_events.py | 4 + tests/components/nest/test_media_source.py | 109 ++++++++++++++++++++- 3 files changed, 142 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/nest/__init__.py b/homeassistant/components/nest/__init__.py index 5d3e9652039..da72fdfd53b 100644 --- a/homeassistant/components/nest/__init__.py +++ b/homeassistant/components/nest/__init__.py @@ -20,6 +20,7 @@ from google_nest_sdm.exceptions import ( DecodeException, SubscriberException, ) +from google_nest_sdm.traits import TraitType import voluptuous as vol from homeassistant.auth.permissions.const import POLICY_READ @@ -65,6 +66,8 @@ from .const import ( ) from .events import EVENT_NAME_MAP, NEST_EVENT from .media_source import ( + EVENT_MEDIA_API_URL_FORMAT, + EVENT_THUMBNAIL_URL_FORMAT, async_get_media_event_store, async_get_media_source_devices, async_get_transcoder, @@ -136,11 +139,15 @@ class SignalUpdateCallback: """An EventCallback invoked when new events arrive from subscriber.""" def __init__( - self, hass: HomeAssistant, config_reload_cb: Callable[[], Awaitable[None]] + self, + hass: HomeAssistant, + config_reload_cb: Callable[[], Awaitable[None]], + config_entry_id: str, ) -> None: """Initialize EventCallback.""" self._hass = hass self._config_reload_cb = config_reload_cb + self._config_entry_id = config_entry_id async def async_handle_event(self, event_message: EventMessage) -> None: """Process an incoming EventMessage.""" @@ -162,16 +169,36 @@ class SignalUpdateCallback: for api_event_type, image_event in events.items(): if not (event_type := EVENT_NAME_MAP.get(api_event_type)): continue + nest_event_id = image_event.event_token + attachment = { + "image": EVENT_THUMBNAIL_URL_FORMAT.format( + device_id=device_entry.id, event_token=image_event.event_token + ), + } + if self._supports_clip(device_id): + attachment["video"] = EVENT_MEDIA_API_URL_FORMAT.format( + device_id=device_entry.id, event_token=image_event.event_token + ) message = { "device_id": device_entry.id, "type": event_type, "timestamp": event_message.timestamp, - "nest_event_id": image_event.event_token, + "nest_event_id": nest_event_id, + "attachment": attachment, } if image_event.zones: message["zones"] = image_event.zones self._hass.bus.async_fire(NEST_EVENT, message) + def _supports_clip(self, device_id: str) -> bool: + if not ( + device_manager := self._hass.data[DOMAIN] + .get(self._config_entry_id, {}) + .get(DATA_DEVICE_MANAGER) + ) or not (device := device_manager.devices.get(device_id)): + return False + return TraitType.CAMERA_CLIP_PREVIEW in device.traits + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Nest from a config entry with dispatch between old/new flows.""" @@ -197,7 +224,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_config_reload() -> None: await hass.config_entries.async_reload(entry.entry_id) - update_callback = SignalUpdateCallback(hass, async_config_reload) + update_callback = SignalUpdateCallback(hass, async_config_reload, entry.entry_id) subscriber.set_update_callback(update_callback.async_handle_event) try: await subscriber.start_async() diff --git a/tests/components/nest/test_events.py b/tests/components/nest/test_events.py index 08cf9f775b7..643a2614bbc 100644 --- a/tests/components/nest/test_events.py +++ b/tests/components/nest/test_events.py @@ -186,6 +186,8 @@ async def test_event( "type": expected_type, "timestamp": event_time, } + assert "image" in events[0].data["attachment"] + assert "video" not in events[0].data["attachment"] @pytest.mark.parametrize( @@ -344,6 +346,8 @@ async def test_doorbell_event_thread( "type": "camera_motion", "timestamp": timestamp1.replace(microsecond=0), } + assert "image" in events[0].data["attachment"] + assert "video" in events[0].data["attachment"] @pytest.mark.parametrize( diff --git a/tests/components/nest/test_media_source.py b/tests/components/nest/test_media_source.py index 3cfa4ee6687..4bc3559e308 100644 --- a/tests/components/nest/test_media_source.py +++ b/tests/components/nest/test_media_source.py @@ -74,7 +74,6 @@ GENERATE_IMAGE_URL_RESPONSE = { } IMAGE_BYTES_FROM_EVENT = b"test url image bytes" IMAGE_AUTHORIZATION_HEADERS = {"Authorization": "Basic g.0.eventToken"} -NEST_EVENT = "nest_event" def frame_image_data(frame_i, total_frames): @@ -1461,3 +1460,111 @@ async def test_camera_image_resize( assert browse.title == "Front: Recent Events" assert not browse.thumbnail assert len(browse.children) == 1 + + +async def test_event_media_attachment( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + device_registry: dr.DeviceRegistry, + subscriber, + auth, + setup_platform, +) -> None: + """Verify that an event media attachment is successfully resolved.""" + await setup_platform() + + assert len(hass.states.async_all()) == 1 + camera = hass.states.get("camera.front") + assert camera is not None + + device = device_registry.async_get_device(identifiers={(DOMAIN, DEVICE_ID)}) + assert device + assert device.name == DEVICE_NAME + + # Capture any events published + received_events = async_capture_events(hass, NEST_EVENT) + + # Set up fake media, and publish image events + auth.responses = [ + aiohttp.web.json_response(GENERATE_IMAGE_URL_RESPONSE), + aiohttp.web.Response(body=IMAGE_BYTES_FROM_EVENT), + ] + event_timestamp = dt_util.now() + await subscriber.async_receive_event( + create_event( + EVENT_SESSION_ID, + EVENT_ID, + PERSON_EVENT, + timestamp=event_timestamp, + ) + ) + await hass.async_block_till_done() + + assert len(received_events) == 1 + received_event = received_events[0] + attachment = received_event.data.get("attachment") + assert attachment + assert list(attachment.keys()) == ["image"] + assert attachment["image"].startswith("/api/nest/event_media") + assert attachment["image"].endswith("/thumbnail") + + # Download the attachment content and verify it works + client = await hass_client() + response = await client.get(attachment["image"]) + assert response.status == HTTPStatus.OK, f"Response not matched: {response}" + await response.read() + + +@pytest.mark.parametrize("device_traits", [BATTERY_CAMERA_TRAITS]) +async def test_event_clip_media_attachment( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + device_registry: dr.DeviceRegistry, + subscriber, + auth, + setup_platform, + mp4, +) -> None: + """Verify that an event media attachment is successfully resolved.""" + await setup_platform() + + assert len(hass.states.async_all()) == 1 + camera = hass.states.get("camera.front") + assert camera is not None + + device = device_registry.async_get_device(identifiers={(DOMAIN, DEVICE_ID)}) + assert device + assert device.name == DEVICE_NAME + + # Capture any events published + received_events = async_capture_events(hass, NEST_EVENT) + + # Set up fake media, and publish clip events + auth.responses = [ + aiohttp.web.Response(body=mp4.getvalue()), + ] + event_timestamp = dt_util.now() + await subscriber.async_receive_event( + create_event_message( + create_battery_event_data(MOTION_EVENT), + timestamp=event_timestamp, + ) + ) + await hass.async_block_till_done() + + assert len(received_events) == 1 + received_event = received_events[0] + attachment = received_event.data.get("attachment") + assert attachment + assert list(attachment.keys()) == ["image", "video"] + assert attachment["image"].startswith("/api/nest/event_media") + assert attachment["image"].endswith("/thumbnail") + assert attachment["video"].startswith("/api/nest/event_media") + assert not attachment["video"].endswith("/thumbnail") + + # Download the attachment content and verify it works + for content_path in attachment.values(): + client = await hass_client() + response = await client.get(content_path) + assert response.status == HTTPStatus.OK, f"Response not matched: {response}" + await response.read() From 8bc9fd23bbe9d7ddcc3d190b327cdb4a2dc4521b Mon Sep 17 00:00:00 2001 From: Pete Sage <76050312+PeteRager@users.noreply.github.com> Date: Sun, 25 Aug 2024 15:25:20 -0400 Subject: [PATCH 1051/1635] Add Sonos tests for announce and update error handling (#124539) * initial commit * update error message * use match for error message --- .../components/sonos/media_player.py | 7 +- homeassistant/components/sonos/strings.json | 3 + tests/components/sonos/conftest.py | 14 ++++ tests/components/sonos/test_media_player.py | 73 ++++++++++++++++++- 4 files changed, 95 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/sonos/media_player.py b/homeassistant/components/sonos/media_player.py index ff6981cfc9b..e68d3dfa97a 100644 --- a/homeassistant/components/sonos/media_player.py +++ b/homeassistant/components/sonos/media_player.py @@ -542,8 +542,13 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity): raise HomeAssistantError( f"Error when calling Sonos websocket: {exc}" ) from exc - if response["success"]: + if response.get("success"): return + raise HomeAssistantError( + translation_domain=SONOS_DOMAIN, + translation_key="announce_media_error", + translation_placeholders={"media_id": media_id, "response": response}, + ) if spotify.is_spotify_media_type(media_type): media_type = spotify.resolve_spotify_media_type(media_type) diff --git a/homeassistant/components/sonos/strings.json b/homeassistant/components/sonos/strings.json index 5833e5a27f2..7a73378d69b 100644 --- a/homeassistant/components/sonos/strings.json +++ b/homeassistant/components/sonos/strings.json @@ -180,6 +180,9 @@ }, "invalid_sonos_playlist": { "message": "Could not find Sonos playlist: {name}" + }, + "announce_media_error": { + "message": "Announcing clip {media_id} failed {response}" } } } diff --git a/tests/components/sonos/conftest.py b/tests/components/sonos/conftest.py index 996fd4c6663..6abb010557e 100644 --- a/tests/components/sonos/conftest.py +++ b/tests/components/sonos/conftest.py @@ -255,6 +255,19 @@ def soco_sharelink(): yield mock_instance +@pytest.fixture(name="sonos_websocket") +def sonos_websocket(): + """Fixture to mock SonosWebSocket.""" + with patch( + "homeassistant.components.sonos.speaker.SonosWebsocket" + ) as mock_sonos_ws: + mock_instance = AsyncMock() + mock_instance.play_clip = AsyncMock() + mock_instance.play_clip.return_value = [{"success": 1}, {}] + mock_sonos_ws.return_value = mock_instance + yield mock_instance + + @pytest.fixture(name="soco_factory") def soco_factory( music_library, @@ -263,6 +276,7 @@ def soco_factory( battery_info, alarm_clock, sonos_playlists: SearchResult, + sonos_websocket, ): """Create factory for instantiating SoCo mocks.""" factory = SoCoMockFactory( diff --git a/tests/components/sonos/test_media_player.py b/tests/components/sonos/test_media_player.py index b29ae1b6cfb..fa77293fbde 100644 --- a/tests/components/sonos/test_media_player.py +++ b/tests/components/sonos/test_media_player.py @@ -5,13 +5,16 @@ from unittest.mock import patch import pytest from soco.data_structures import SearchResult +from sonos_websocket.exception import SonosWebsocketError from syrupy import SnapshotAssertion from homeassistant.components.media_player import ( ATTR_INPUT_SOURCE, + ATTR_MEDIA_ANNOUNCE, ATTR_MEDIA_CONTENT_ID, ATTR_MEDIA_CONTENT_TYPE, ATTR_MEDIA_ENQUEUE, + ATTR_MEDIA_EXTRA, ATTR_MEDIA_REPEAT, ATTR_MEDIA_SHUFFLE, ATTR_MEDIA_VOLUME_LEVEL, @@ -47,7 +50,7 @@ from homeassistant.const import ( SERVICE_VOLUME_UP, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import entity_registry as er from homeassistant.helpers.device_registry import ( CONNECTION_NETWORK_MAC, @@ -1050,3 +1053,71 @@ async def test_media_transport( blocking=True, ) assert getattr(soco, client_call).call_count == 1 + + +async def test_play_media_announce( + hass: HomeAssistant, + soco: MockSoCo, + async_autosetup_sonos, + sonos_websocket, +) -> None: + """Test playing media with the announce.""" + content_id: str = "http://10.0.0.1:8123/local/sounds/doorbell.mp3" + volume: float = 0.30 + + # Test the success path + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "music", + ATTR_MEDIA_CONTENT_ID: content_id, + ATTR_MEDIA_ANNOUNCE: True, + ATTR_MEDIA_EXTRA: {"volume": volume}, + }, + blocking=True, + ) + assert sonos_websocket.play_clip.call_count == 1 + sonos_websocket.play_clip.assert_called_with(content_id, volume=volume) + + # Test receiving a websocket exception + sonos_websocket.play_clip.reset_mock() + sonos_websocket.play_clip.side_effect = SonosWebsocketError("Error Message") + with pytest.raises( + HomeAssistantError, match="Error when calling Sonos websocket: Error Message" + ): + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "music", + ATTR_MEDIA_CONTENT_ID: content_id, + ATTR_MEDIA_ANNOUNCE: True, + }, + blocking=True, + ) + assert sonos_websocket.play_clip.call_count == 1 + sonos_websocket.play_clip.assert_called_with(content_id, volume=None) + + # Test receiving a non success result + sonos_websocket.play_clip.reset_mock() + sonos_websocket.play_clip.side_effect = None + retval = {"success": 0} + sonos_websocket.play_clip.return_value = [retval, {}] + with pytest.raises( + HomeAssistantError, match=f"Announcing clip {content_id} failed {retval}" + ): + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "music", + ATTR_MEDIA_CONTENT_ID: content_id, + ATTR_MEDIA_ANNOUNCE: True, + }, + blocking=True, + ) + assert sonos_websocket.play_clip.call_count == 1 From 9ea41b4d260e0a6ed4d6f389ac50387f148b35d4 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 25 Aug 2024 10:45:06 -1000 Subject: [PATCH 1052/1635] Bump yalexs to 8.4.2 (#124593) changelog: https://github.com/bdraco/yalexs/compare/v8.4.1...v8.4.2 If the pubnub connection failed, the integration would fallback to polling. This would could get users banned so we no longer do that. --- homeassistant/components/august/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/august/manifest.json b/homeassistant/components/august/manifest.json index e0739aadff0..42f5dfd8409 100644 --- a/homeassistant/components/august/manifest.json +++ b/homeassistant/components/august/manifest.json @@ -28,5 +28,5 @@ "documentation": "https://www.home-assistant.io/integrations/august", "iot_class": "cloud_push", "loggers": ["pubnub", "yalexs"], - "requirements": ["yalexs==8.4.1", "yalexs-ble==2.4.3"] + "requirements": ["yalexs==8.4.2", "yalexs-ble==2.4.3"] } diff --git a/requirements_all.txt b/requirements_all.txt index 699193d350e..19adc435c90 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2968,7 +2968,7 @@ yalesmartalarmclient==0.4.0 yalexs-ble==2.4.3 # homeassistant.components.august -yalexs==8.4.1 +yalexs==8.4.2 # homeassistant.components.yeelight yeelight==0.7.14 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 31420e7f7af..a0182161fe7 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2348,7 +2348,7 @@ yalesmartalarmclient==0.4.0 yalexs-ble==2.4.3 # homeassistant.components.august -yalexs==8.4.1 +yalexs==8.4.2 # homeassistant.components.yeelight yeelight==0.7.14 From d915fee8339c6c52b0d1bc74f741b5b6bc84ceb3 Mon Sep 17 00:00:00 2001 From: Jesse Hills <3060199+jesserockz@users.noreply.github.com> Date: Mon, 26 Aug 2024 15:47:48 +1200 Subject: [PATCH 1053/1635] Bump aioesphomeapi to 25.2.0 (#124607) --- homeassistant/components/esphome/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/esphome/manifest.json b/homeassistant/components/esphome/manifest.json index b2647709e8e..6f2284d5a4b 100644 --- a/homeassistant/components/esphome/manifest.json +++ b/homeassistant/components/esphome/manifest.json @@ -17,7 +17,7 @@ "mqtt": ["esphome/discover/#"], "quality_scale": "platinum", "requirements": [ - "aioesphomeapi==25.1.0", + "aioesphomeapi==25.2.0", "esphome-dashboard-api==1.2.3", "bleak-esphome==1.0.0" ], diff --git a/requirements_all.txt b/requirements_all.txt index 19adc435c90..fc626c59c39 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -237,7 +237,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==25.1.0 +aioesphomeapi==25.2.0 # homeassistant.components.flo aioflo==2021.11.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a0182161fe7..858ba6d1197 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -225,7 +225,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==25.1.0 +aioesphomeapi==25.2.0 # homeassistant.components.flo aioflo==2021.11.0 From 0dfe12840d10188deb079dc35fd5c4ba4b233121 Mon Sep 17 00:00:00 2001 From: Jeef Date: Sun, 25 Aug 2024 21:57:57 -0600 Subject: [PATCH 1054/1635] Add SimpleFin binary sensor for errors (#122554) * Binary Sensor Support * updated requirements * ng ambr * update strings.json * update snapshot --- .../components/simplefin/__init__.py | 5 +- .../components/simplefin/binary_sensor.py | 68 ++ .../components/simplefin/strings.json | 3 + .../snapshots/test_binary_sensor.ambr | 769 ++++++++++++++++++ .../simplefin/test_binary_sensor.py | 29 + 5 files changed, 873 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/simplefin/binary_sensor.py create mode 100644 tests/components/simplefin/snapshots/test_binary_sensor.ambr create mode 100644 tests/components/simplefin/test_binary_sensor.py diff --git a/homeassistant/components/simplefin/__init__.py b/homeassistant/components/simplefin/__init__.py index 0aa33dec9ac..c47b3118415 100644 --- a/homeassistant/components/simplefin/__init__.py +++ b/homeassistant/components/simplefin/__init__.py @@ -11,7 +11,10 @@ from homeassistant.core import HomeAssistant from .const import CONF_ACCESS_URL from .coordinator import SimpleFinDataUpdateCoordinator -PLATFORMS: list[str] = [Platform.SENSOR] +PLATFORMS: list[str] = [ + Platform.BINARY_SENSOR, + Platform.SENSOR, +] type SimpleFinConfigEntry = ConfigEntry[SimpleFinDataUpdateCoordinator] diff --git a/homeassistant/components/simplefin/binary_sensor.py b/homeassistant/components/simplefin/binary_sensor.py new file mode 100644 index 00000000000..5805fc370b6 --- /dev/null +++ b/homeassistant/components/simplefin/binary_sensor.py @@ -0,0 +1,68 @@ +"""Binary Sensor for SimpleFin.""" + +from collections.abc import Callable +from dataclasses import dataclass + +from simplefin4py import Account + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import SimpleFinConfigEntry +from .entity import SimpleFinEntity + + +@dataclass(frozen=True, kw_only=True) +class SimpleFinBinarySensorEntityDescription(BinarySensorEntityDescription): + """Describes a sensor entity.""" + + value_fn: Callable[[Account], bool] + + +SIMPLEFIN_BINARY_SENSORS: tuple[SimpleFinBinarySensorEntityDescription, ...] = ( + SimpleFinBinarySensorEntityDescription( + key="possible_error", + translation_key="possible_error", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + value_fn=lambda account: account.possible_error, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: SimpleFinConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up SimpleFIN sensors for config entries.""" + + sf_coordinator = config_entry.runtime_data + accounts = sf_coordinator.data.accounts + + async_add_entities( + SimpleFinBinarySensor( + sf_coordinator, + sensor_description, + account, + ) + for account in accounts + for sensor_description in SIMPLEFIN_BINARY_SENSORS + ) + + +class SimpleFinBinarySensor(SimpleFinEntity, BinarySensorEntity): + """Extends IntellifireEntity with Binary Sensor specific logic.""" + + entity_description: SimpleFinBinarySensorEntityDescription + + @property + def is_on(self) -> bool: + """Use this to get the correct value.""" + return self.entity_description.value_fn(self.account_data) diff --git a/homeassistant/components/simplefin/strings.json b/homeassistant/components/simplefin/strings.json index d6690e604c5..3ac03fe2cc0 100644 --- a/homeassistant/components/simplefin/strings.json +++ b/homeassistant/components/simplefin/strings.json @@ -21,6 +21,9 @@ } }, "entity": { + "binary_sensor": { + "possible_error": { "name": "Possible error" } + }, "sensor": { "balance": { "name": "Balance" diff --git a/tests/components/simplefin/snapshots/test_binary_sensor.ambr b/tests/components/simplefin/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..be26ae1a03d --- /dev/null +++ b/tests/components/simplefin/snapshots/test_binary_sensor.ambr @@ -0,0 +1,769 @@ +# serializer version: 1 +# name: test_all_entities[binary_sensor.investments_dr_evil_possible_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.investments_dr_evil_possible_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Possible error', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'possible_error', + 'unique_id': 'account_ACT-4k5l6m7n-8o9p-1q2r-3s4t_possible_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.investments_dr_evil_possible_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'problem', + 'friendly_name': 'Investments Dr Evil Possible error', + }), + 'context': , + 'entity_id': 'binary_sensor.investments_dr_evil_possible_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[binary_sensor.investments_dr_evil_problem-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.investments_dr_evil_problem', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Problem', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'possible_error', + 'unique_id': 'account_ACT-4k5l6m7n-8o9p-1q2r-3s4t_possible_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.investments_dr_evil_problem-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'problem', + 'friendly_name': 'Investments Dr Evil Problem', + }), + 'context': , + 'entity_id': 'binary_sensor.investments_dr_evil_problem', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[binary_sensor.investments_my_checking_possible_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.investments_my_checking_possible_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Possible error', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'possible_error', + 'unique_id': 'account_ACT-1k2l3m4n-5o6p-7q8r-9s0t_possible_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.investments_my_checking_possible_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'problem', + 'friendly_name': 'Investments My Checking Possible error', + }), + 'context': , + 'entity_id': 'binary_sensor.investments_my_checking_possible_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[binary_sensor.investments_my_checking_problem-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.investments_my_checking_problem', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Problem', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'possible_error', + 'unique_id': 'account_ACT-1k2l3m4n-5o6p-7q8r-9s0t_possible_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.investments_my_checking_problem-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'problem', + 'friendly_name': 'Investments My Checking Problem', + }), + 'context': , + 'entity_id': 'binary_sensor.investments_my_checking_problem', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[binary_sensor.investments_nerdcorp_series_b_possible_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.investments_nerdcorp_series_b_possible_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Possible error', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'possible_error', + 'unique_id': 'account_ACT-5k6l7m8n-9o0p-1q2r-3s4t_possible_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.investments_nerdcorp_series_b_possible_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'problem', + 'friendly_name': 'Investments NerdCorp Series B Possible error', + }), + 'context': , + 'entity_id': 'binary_sensor.investments_nerdcorp_series_b_possible_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[binary_sensor.investments_nerdcorp_series_b_problem-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.investments_nerdcorp_series_b_problem', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Problem', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'possible_error', + 'unique_id': 'account_ACT-5k6l7m8n-9o0p-1q2r-3s4t_possible_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.investments_nerdcorp_series_b_problem-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'problem', + 'friendly_name': 'Investments NerdCorp Series B Problem', + }), + 'context': , + 'entity_id': 'binary_sensor.investments_nerdcorp_series_b_problem', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[binary_sensor.mythical_randomsavings_castle_mortgage_possible_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.mythical_randomsavings_castle_mortgage_possible_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Possible error', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'possible_error', + 'unique_id': 'account_ACT-7a8b9c0d-1e2f-3g4h-5i6j_possible_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.mythical_randomsavings_castle_mortgage_possible_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'problem', + 'friendly_name': 'Mythical RandomSavings Castle Mortgage Possible error', + }), + 'context': , + 'entity_id': 'binary_sensor.mythical_randomsavings_castle_mortgage_possible_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.mythical_randomsavings_castle_mortgage_problem-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.mythical_randomsavings_castle_mortgage_problem', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Problem', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'possible_error', + 'unique_id': 'account_ACT-7a8b9c0d-1e2f-3g4h-5i6j_possible_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.mythical_randomsavings_castle_mortgage_problem-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'problem', + 'friendly_name': 'Mythical RandomSavings Castle Mortgage Problem', + }), + 'context': , + 'entity_id': 'binary_sensor.mythical_randomsavings_castle_mortgage_problem', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.mythical_randomsavings_unicorn_pot_possible_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.mythical_randomsavings_unicorn_pot_possible_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Possible error', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'possible_error', + 'unique_id': 'account_ACT-6a7b8c9d-0e1f-2g3h-4i5j_possible_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.mythical_randomsavings_unicorn_pot_possible_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'problem', + 'friendly_name': 'Mythical RandomSavings Unicorn Pot Possible error', + }), + 'context': , + 'entity_id': 'binary_sensor.mythical_randomsavings_unicorn_pot_possible_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.mythical_randomsavings_unicorn_pot_problem-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.mythical_randomsavings_unicorn_pot_problem', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Problem', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'possible_error', + 'unique_id': 'account_ACT-6a7b8c9d-0e1f-2g3h-4i5j_possible_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.mythical_randomsavings_unicorn_pot_problem-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'problem', + 'friendly_name': 'Mythical RandomSavings Unicorn Pot Problem', + }), + 'context': , + 'entity_id': 'binary_sensor.mythical_randomsavings_unicorn_pot_problem', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.random_bank_costco_anywhere_visa_r_card_possible_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.random_bank_costco_anywhere_visa_r_card_possible_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Possible error', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'possible_error', + 'unique_id': 'account_ACT-3a4b5c6d-7e8f-9g0h-1i2j_possible_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.random_bank_costco_anywhere_visa_r_card_possible_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'problem', + 'friendly_name': 'Random Bank Costco Anywhere Visa® Card Possible error', + }), + 'context': , + 'entity_id': 'binary_sensor.random_bank_costco_anywhere_visa_r_card_possible_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.random_bank_costco_anywhere_visa_r_card_problem-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.random_bank_costco_anywhere_visa_r_card_problem', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Problem', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'possible_error', + 'unique_id': 'account_ACT-3a4b5c6d-7e8f-9g0h-1i2j_possible_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.random_bank_costco_anywhere_visa_r_card_problem-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'problem', + 'friendly_name': 'Random Bank Costco Anywhere Visa® Card Problem', + }), + 'context': , + 'entity_id': 'binary_sensor.random_bank_costco_anywhere_visa_r_card_problem', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.the_bank_of_go_prime_savings_possible_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.the_bank_of_go_prime_savings_possible_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Possible error', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'possible_error', + 'unique_id': 'account_ACT-2a3b4c5d-6e7f-8g9h-0i1j_possible_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.the_bank_of_go_prime_savings_possible_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'problem', + 'friendly_name': 'The Bank of Go PRIME SAVINGS Possible error', + }), + 'context': , + 'entity_id': 'binary_sensor.the_bank_of_go_prime_savings_possible_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[binary_sensor.the_bank_of_go_prime_savings_problem-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.the_bank_of_go_prime_savings_problem', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Problem', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'possible_error', + 'unique_id': 'account_ACT-2a3b4c5d-6e7f-8g9h-0i1j_possible_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.the_bank_of_go_prime_savings_problem-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'problem', + 'friendly_name': 'The Bank of Go PRIME SAVINGS Problem', + }), + 'context': , + 'entity_id': 'binary_sensor.the_bank_of_go_prime_savings_problem', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[binary_sensor.the_bank_of_go_the_bank_possible_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.the_bank_of_go_the_bank_possible_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Possible error', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'possible_error', + 'unique_id': 'account_ACT-1a2b3c4d-5e6f-7g8h-9i0j_possible_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.the_bank_of_go_the_bank_possible_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'problem', + 'friendly_name': 'The Bank of Go The Bank Possible error', + }), + 'context': , + 'entity_id': 'binary_sensor.the_bank_of_go_the_bank_possible_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[binary_sensor.the_bank_of_go_the_bank_problem-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.the_bank_of_go_the_bank_problem', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Problem', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'possible_error', + 'unique_id': 'account_ACT-1a2b3c4d-5e6f-7g8h-9i0j_possible_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.the_bank_of_go_the_bank_problem-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'problem', + 'friendly_name': 'The Bank of Go The Bank Problem', + }), + 'context': , + 'entity_id': 'binary_sensor.the_bank_of_go_the_bank_problem', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/simplefin/test_binary_sensor.py b/tests/components/simplefin/test_binary_sensor.py new file mode 100644 index 00000000000..40c6882153d --- /dev/null +++ b/tests/components/simplefin/test_binary_sensor.py @@ -0,0 +1,29 @@ +"""Test SimpleFin Sensor with Snapshot data.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_simplefin_client: AsyncMock, +) -> None: + """Test all entities.""" + with patch( + "homeassistant.components.simplefin.PLATFORMS", [Platform.BINARY_SENSOR] + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) From 6e727a49bf50889a802d1c6026bce59b8feef27d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 25 Aug 2024 17:58:20 -1000 Subject: [PATCH 1055/1635] Remove unused constant in dhcp (#124605) --- homeassistant/components/dhcp/__init__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/homeassistant/components/dhcp/__init__.py b/homeassistant/components/dhcp/__init__.py index e830de39f29..0897729ec72 100644 --- a/homeassistant/components/dhcp/__init__.py +++ b/homeassistant/components/dhcp/__init__.py @@ -63,7 +63,6 @@ from .const import DOMAIN CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) -FILTER = "udp and (port 67 or 68)" HOSTNAME: Final = "hostname" MAC_ADDRESS: Final = "macaddress" IP_ADDRESS: Final = "ip" From 3035588dfaf9750aaf51700be59e5082b40a955f Mon Sep 17 00:00:00 2001 From: Brett Adams Date: Mon, 26 Aug 2024 13:58:59 +1000 Subject: [PATCH 1056/1635] Dont turn HVAC off with preset in Teslemetry (#124604) Dont turn HVAC off with preset --- homeassistant/components/teslemetry/climate.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/teslemetry/climate.py b/homeassistant/components/teslemetry/climate.py index 5b093b0c6f1..bd4fb0eba53 100644 --- a/homeassistant/components/teslemetry/climate.py +++ b/homeassistant/components/teslemetry/climate.py @@ -168,9 +168,8 @@ class TeslemetryClimateEntity(TeslemetryVehicleEntity, ClimateEntity): ) ) self._attr_preset_mode = preset_mode - if preset_mode == self._attr_preset_modes[0]: - self._attr_hvac_mode = HVACMode.OFF - else: + if preset_mode != self._attr_preset_modes[0]: + # Changing preset mode will also turn on climate self._attr_hvac_mode = HVACMode.HEAT_COOL self.async_write_ha_state() From 592f60643a8b34bd777a6b3276440fcc61fc7a30 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Sun, 25 Aug 2024 23:50:32 -0500 Subject: [PATCH 1057/1635] Use speex for noise suppression and auto gain (#124591) --- .../assist_pipeline/audio_enhancer.py | 30 +++++++++++++++++-- .../components/assist_pipeline/manifest.json | 2 +- .../components/assist_pipeline/pipeline.py | 4 +-- homeassistant/components/voip/voip.py | 4 +-- homeassistant/package_constraints.txt | 1 + requirements_all.txt | 3 ++ requirements_test_all.txt | 3 ++ 7 files changed, 39 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/assist_pipeline/audio_enhancer.py b/homeassistant/components/assist_pipeline/audio_enhancer.py index c9c60f421b1..ff2b122187a 100644 --- a/homeassistant/components/assist_pipeline/audio_enhancer.py +++ b/homeassistant/components/assist_pipeline/audio_enhancer.py @@ -5,6 +5,7 @@ from dataclasses import dataclass import logging from pymicro_vad import MicroVad +from pyspeex_noise import AudioProcessor from .const import BYTES_PER_CHUNK @@ -41,8 +42,8 @@ class AudioEnhancer(ABC): """Enhance chunk of PCM audio @ 16Khz with 16-bit mono samples.""" -class MicroVadEnhancer(AudioEnhancer): - """Audio enhancer that just runs microVAD.""" +class MicroVadSpeexEnhancer(AudioEnhancer): + """Audio enhancer that runs microVAD and speex.""" def __init__( self, auto_gain: int, noise_suppression: int, is_vad_enabled: bool @@ -50,6 +51,24 @@ class MicroVadEnhancer(AudioEnhancer): """Initialize audio enhancer.""" super().__init__(auto_gain, noise_suppression, is_vad_enabled) + self.audio_processor: AudioProcessor | None = None + + # Scale from 0-4 + self.noise_suppression = noise_suppression * -15 + + # Scale from 0-31 + self.auto_gain = auto_gain * 300 + + if (self.auto_gain != 0) or (self.noise_suppression != 0): + self.audio_processor = AudioProcessor( + self.auto_gain, self.noise_suppression + ) + _LOGGER.debug( + "Initialized speex with auto_gain=%s, noise_suppression=%s", + self.auto_gain, + self.noise_suppression, + ) + self.vad: MicroVad | None = None self.threshold = 0.5 @@ -61,12 +80,17 @@ class MicroVadEnhancer(AudioEnhancer): """Enhance 10ms chunk of PCM audio @ 16Khz with 16-bit mono samples.""" is_speech: bool | None = None + assert len(audio) == BYTES_PER_CHUNK + if self.vad is not None: # Run VAD - assert len(audio) == BYTES_PER_CHUNK speech_prob = self.vad.Process10ms(audio) is_speech = speech_prob > self.threshold + if self.audio_processor is not None: + # Run noise suppression and auto gain + audio = self.audio_processor.Process10ms(audio).audio + return EnhancedAudioChunk( audio=audio, timestamp_ms=timestamp_ms, is_speech=is_speech ) diff --git a/homeassistant/components/assist_pipeline/manifest.json b/homeassistant/components/assist_pipeline/manifest.json index 00950b138fd..c22b7391d33 100644 --- a/homeassistant/components/assist_pipeline/manifest.json +++ b/homeassistant/components/assist_pipeline/manifest.json @@ -7,5 +7,5 @@ "integration_type": "system", "iot_class": "local_push", "quality_scale": "internal", - "requirements": ["pymicro-vad==1.0.1"] + "requirements": ["pymicro-vad==1.0.1", "pyspeex-noise==1.0.0"] } diff --git a/homeassistant/components/assist_pipeline/pipeline.py b/homeassistant/components/assist_pipeline/pipeline.py index 9fada934ca1..342f811c99b 100644 --- a/homeassistant/components/assist_pipeline/pipeline.py +++ b/homeassistant/components/assist_pipeline/pipeline.py @@ -49,7 +49,7 @@ from homeassistant.util import ( ) from homeassistant.util.limited_size_dict import LimitedSizeDict -from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, MicroVadEnhancer +from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, MicroVadSpeexEnhancer from .const import ( BYTES_PER_CHUNK, CONF_DEBUG_RECORDING_DIR, @@ -589,7 +589,7 @@ class PipelineRun: # Initialize with audio settings if self.audio_settings.needs_processor and (self.audio_enhancer is None): # Default audio enhancer - self.audio_enhancer = MicroVadEnhancer( + self.audio_enhancer = MicroVadSpeexEnhancer( self.audio_settings.auto_gain_dbfs, self.audio_settings.noise_suppression_level, self.audio_settings.is_vad_enabled, diff --git a/homeassistant/components/voip/voip.py b/homeassistant/components/voip/voip.py index 161e938a3b6..be1e58b6eec 100644 --- a/homeassistant/components/voip/voip.py +++ b/homeassistant/components/voip/voip.py @@ -33,7 +33,7 @@ from homeassistant.components.assist_pipeline import ( ) from homeassistant.components.assist_pipeline.audio_enhancer import ( AudioEnhancer, - MicroVadEnhancer, + MicroVadSpeexEnhancer, ) from homeassistant.components.assist_pipeline.vad import ( AudioBuffer, @@ -235,7 +235,7 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): try: # Wait for speech before starting pipeline segmenter = VoiceCommandSegmenter(silence_seconds=self.silence_seconds) - audio_enhancer = MicroVadEnhancer(0, 0, True) + audio_enhancer = MicroVadSpeexEnhancer(0, 0, True) chunk_buffer: deque[bytes] = deque( maxlen=self.buffered_chunks_before_speech, ) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 23d759f270d..5f9b9023718 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -49,6 +49,7 @@ pymicro-vad==1.0.1 PyNaCl==1.5.0 pyOpenSSL==24.2.1 pyserial==3.5 +pyspeex-noise==1.0.0 python-slugify==8.0.4 PyTurboJPEG==1.7.1 pyudev==0.24.1 diff --git a/requirements_all.txt b/requirements_all.txt index fc626c59c39..5d5d5931818 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2228,6 +2228,9 @@ pysoma==0.0.12 # homeassistant.components.spc pyspcwebgw==0.7.0 +# homeassistant.components.assist_pipeline +pyspeex-noise==1.0.0 + # homeassistant.components.squeezebox pysqueezebox==0.7.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 858ba6d1197..ab1d0bc2a1b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1782,6 +1782,9 @@ pysoma==0.0.12 # homeassistant.components.spc pyspcwebgw==0.7.0 +# homeassistant.components.assist_pipeline +pyspeex-noise==1.0.0 + # homeassistant.components.squeezebox pysqueezebox==0.7.1 From bb6f9ec8449f2f09aa3b766798d72eef7f0cd732 Mon Sep 17 00:00:00 2001 From: Yuxin Wang Date: Mon, 26 Aug 2024 01:09:37 -0400 Subject: [PATCH 1058/1635] Set native value to be None instead of STATE_UNKNOWN for APCUPSD integration (#124609) --- homeassistant/components/apcupsd/sensor.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/homeassistant/components/apcupsd/sensor.py b/homeassistant/components/apcupsd/sensor.py index ff72208e9ce..f2b5943bdf0 100644 --- a/homeassistant/components/apcupsd/sensor.py +++ b/homeassistant/components/apcupsd/sensor.py @@ -13,7 +13,6 @@ from homeassistant.components.sensor import ( from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( PERCENTAGE, - STATE_UNKNOWN, UnitOfApparentPower, UnitOfElectricCurrent, UnitOfElectricPotential, @@ -484,7 +483,7 @@ class APCUPSdSensor(CoordinatorEntity[APCUPSdCoordinator], SensorEntity): # performed) and may disappear again after certain event. So we mark the state as "unknown" # when it becomes unknown after such events. if key not in self.coordinator.data: - self._attr_native_value = STATE_UNKNOWN + self._attr_native_value = None return self._attr_native_value, inferred_unit = infer_unit(self.coordinator.data[key]) From 0591b5e47b9f1173a8b65b5b48ef562183d9d731 Mon Sep 17 00:00:00 2001 From: Christopher Fenner <9592452+CFenner@users.noreply.github.com> Date: Mon, 26 Aug 2024 08:30:28 +0200 Subject: [PATCH 1059/1635] Improve code quality for ViCare integration (#124613) * move type to module * set translation key outside of init * align import alias * align constructor parameter order/naming * move static attr init outside init function * add missing types * fix test * revert move of _attributes --- .../components/vicare/binary_sensor.py | 10 ++-- homeassistant/components/vicare/button.py | 6 +-- homeassistant/components/vicare/climate.py | 17 +++---- homeassistant/components/vicare/fan.py | 51 ++++++++++++++++++- homeassistant/components/vicare/number.py | 6 +-- homeassistant/components/vicare/sensor.py | 10 ++-- homeassistant/components/vicare/types.py | 49 ------------------ .../components/vicare/water_heater.py | 20 ++++---- tests/components/vicare/test_types.py | 3 +- 9 files changed, 85 insertions(+), 87 deletions(-) diff --git a/homeassistant/components/vicare/binary_sensor.py b/homeassistant/components/vicare/binary_sensor.py index 2df8a2f06d3..2c114d15b85 100644 --- a/homeassistant/components/vicare/binary_sensor.py +++ b/homeassistant/components/vicare/binary_sensor.py @@ -10,7 +10,7 @@ import logging from PyViCare.PyViCareDevice import Device as PyViCareDevice from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig from PyViCare.PyViCareHeatingDevice import ( - HeatingDeviceWithComponent as PyViCareHeatingDeviceWithComponent, + HeatingDeviceWithComponent as PyViCareHeatingDeviceComponent, ) from PyViCare.PyViCareUtils import ( PyViCareInvalidDataError, @@ -139,8 +139,8 @@ def _build_entities_for_device( return [ ViCareBinarySensor( - device, device_config, + device, description, ) for description in GLOBAL_SENSORS @@ -149,7 +149,7 @@ def _build_entities_for_device( def _build_entities_for_component( - components: list[PyViCareHeatingDeviceWithComponent], + components: list[PyViCareHeatingDeviceComponent], device_config: PyViCareDeviceConfig, entity_descriptions: tuple[ViCareBinarySensorEntityDescription, ...], ) -> list[ViCareBinarySensor]: @@ -157,8 +157,8 @@ def _build_entities_for_component( return [ ViCareBinarySensor( - component, device_config, + component, description, ) for component in components @@ -190,8 +190,8 @@ class ViCareBinarySensor(ViCareEntity, BinarySensorEntity): def __init__( self, - api: PyViCareDevice, device_config: PyViCareDeviceConfig, + api: PyViCareDevice | PyViCareHeatingDeviceComponent, description: ViCareBinarySensorEntityDescription, ) -> None: """Initialize the sensor.""" diff --git a/homeassistant/components/vicare/button.py b/homeassistant/components/vicare/button.py index c927055dadd..f880c39ddea 100644 --- a/homeassistant/components/vicare/button.py +++ b/homeassistant/components/vicare/button.py @@ -54,8 +54,8 @@ def _build_entities( return [ ViCareButton( - device.api, device.config, + device.api, description, ) for device in device_list @@ -87,12 +87,12 @@ class ViCareButton(ViCareEntity, ButtonEntity): def __init__( self, - api: PyViCareDevice, device_config: PyViCareDeviceConfig, + device: PyViCareDevice, description: ViCareButtonEntityDescription, ) -> None: """Initialize the button.""" - super().__init__(device_config, api, description.key) + super().__init__(device_config, device, description.key) self.entity_description = description def press(self) -> None: diff --git a/homeassistant/components/vicare/climate.py b/homeassistant/components/vicare/climate.py index 1333327609d..df1cde2abca 100644 --- a/homeassistant/components/vicare/climate.py +++ b/homeassistant/components/vicare/climate.py @@ -87,10 +87,9 @@ def _build_entities( """Create ViCare climate entities for a device.""" return [ ViCareClimate( + device.config, device.api, circuit, - device.config, - "heating", ) for device in device_list for circuit in get_circuits(device.api) @@ -136,24 +135,22 @@ class ViCareClimate(ViCareEntity, ClimateEntity): _attr_min_temp = VICARE_TEMP_HEATING_MIN _attr_max_temp = VICARE_TEMP_HEATING_MAX _attr_target_temperature_step = PRECISION_WHOLE + _attr_translation_key = "heating" _current_action: bool | None = None _current_mode: str | None = None + _current_program: str | None = None _enable_turn_on_off_backwards_compatibility = False def __init__( self, - api: PyViCareDevice, - circuit: PyViCareHeatingCircuit, device_config: PyViCareDeviceConfig, - translation_key: str, + device: PyViCareDevice, + circuit: PyViCareHeatingCircuit, ) -> None: """Initialize the climate device.""" - super().__init__(device_config, api, circuit.id) + super().__init__(device_config, device, circuit.id) self._circuit = circuit self._attributes: dict[str, Any] = {} - self._current_program = None - self._attr_translation_key = translation_key - self._attributes["vicare_programs"] = self._circuit.getPrograms() self._attr_preset_modes = [ preset @@ -340,7 +337,7 @@ class ViCareClimate(ViCareEntity, ClimateEntity): ) from err @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any]: """Show Device Attributes.""" return self._attributes diff --git a/homeassistant/components/vicare/fan.py b/homeassistant/components/vicare/fan.py index 088e54c7354..5b9dd2787e8 100644 --- a/homeassistant/components/vicare/fan.py +++ b/homeassistant/components/vicare/fan.py @@ -3,6 +3,7 @@ from __future__ import annotations from contextlib import suppress +import enum import logging from PyViCare.PyViCareDevice import Device as PyViCareDevice @@ -28,10 +29,58 @@ from homeassistant.util.percentage import ( from .const import DEVICE_LIST, DOMAIN from .entity import ViCareEntity -from .types import VentilationMode, VentilationProgram _LOGGER = logging.getLogger(__name__) + +class VentilationProgram(enum.StrEnum): + """ViCare preset ventilation programs. + + As listed in https://github.com/somm15/PyViCare/blob/6c5b023ca6c8bb2d38141dd1746dc1705ec84ce8/PyViCare/PyViCareVentilationDevice.py#L37 + """ + + LEVEL_ONE = "levelOne" + LEVEL_TWO = "levelTwo" + LEVEL_THREE = "levelThree" + LEVEL_FOUR = "levelFour" + + +class VentilationMode(enum.StrEnum): + """ViCare ventilation modes.""" + + PERMANENT = "permanent" # on, speed controlled by program (levelOne-levelFour) + VENTILATION = "ventilation" # activated by schedule + SENSOR_DRIVEN = "sensor_driven" # activated by schedule, override by sensor + SENSOR_OVERRIDE = "sensor_override" # activated by sensor + + @staticmethod + def to_vicare_mode(mode: str | None) -> str | None: + """Return the mapped ViCare ventilation mode for the Home Assistant mode.""" + if mode: + try: + ventilation_mode = VentilationMode(mode) + except ValueError: + # ignore unsupported / unmapped modes + return None + return HA_TO_VICARE_MODE_VENTILATION.get(ventilation_mode) if mode else None + return None + + @staticmethod + def from_vicare_mode(vicare_mode: str | None) -> str | None: + """Return the mapped Home Assistant mode for the ViCare ventilation mode.""" + for mode in VentilationMode: + if HA_TO_VICARE_MODE_VENTILATION.get(VentilationMode(mode)) == vicare_mode: + return mode + return None + + +HA_TO_VICARE_MODE_VENTILATION = { + VentilationMode.PERMANENT: "permanent", + VentilationMode.VENTILATION: "ventilation", + VentilationMode.SENSOR_DRIVEN: "sensorDriven", + VentilationMode.SENSOR_OVERRIDE: "sensorOverride", +} + ORDERED_NAMED_FAN_SPEEDS = [ VentilationProgram.LEVEL_ONE, VentilationProgram.LEVEL_TWO, diff --git a/homeassistant/components/vicare/number.py b/homeassistant/components/vicare/number.py index c0564170274..d53b7183327 100644 --- a/homeassistant/components/vicare/number.py +++ b/homeassistant/components/vicare/number.py @@ -235,8 +235,8 @@ def _build_entities( entities: list[ViCareNumber] = [ ViCareNumber( - device.api, device.config, + device.api, description, ) for device in device_list @@ -247,8 +247,8 @@ def _build_entities( entities.extend( [ ViCareNumber( - circuit, device.config, + circuit, description, ) for device in device_list @@ -283,8 +283,8 @@ class ViCareNumber(ViCareEntity, NumberEntity): def __init__( self, - api: PyViCareHeatingDeviceComponent, device_config: PyViCareDeviceConfig, + api: PyViCareDevice | PyViCareHeatingDeviceComponent, description: ViCareNumberEntityDescription, ) -> None: """Initialize the number.""" diff --git a/homeassistant/components/vicare/sensor.py b/homeassistant/components/vicare/sensor.py index 0271ffc9798..5d51abfbbf6 100644 --- a/homeassistant/components/vicare/sensor.py +++ b/homeassistant/components/vicare/sensor.py @@ -10,7 +10,7 @@ import logging from PyViCare.PyViCareDevice import Device as PyViCareDevice from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig from PyViCare.PyViCareHeatingDevice import ( - HeatingDeviceWithComponent as PyViCareHeatingDeviceWithComponent, + HeatingDeviceWithComponent as PyViCareHeatingDeviceComponent, ) from PyViCare.PyViCareUtils import ( PyViCareInvalidDataError, @@ -892,8 +892,8 @@ def _build_entities_for_device( return [ ViCareSensor( - device, device_config, + device, description, ) for description in GLOBAL_SENSORS @@ -902,7 +902,7 @@ def _build_entities_for_device( def _build_entities_for_component( - components: list[PyViCareHeatingDeviceWithComponent], + components: list[PyViCareHeatingDeviceComponent], device_config: PyViCareDeviceConfig, entity_descriptions: tuple[ViCareSensorEntityDescription, ...], ) -> list[ViCareSensor]: @@ -910,8 +910,8 @@ def _build_entities_for_component( return [ ViCareSensor( - component, device_config, + component, description, ) for component in components @@ -943,8 +943,8 @@ class ViCareSensor(ViCareEntity, SensorEntity): def __init__( self, - api, device_config: PyViCareDeviceConfig, + api: PyViCareDevice | PyViCareHeatingDeviceComponent, description: ViCareSensorEntityDescription, ) -> None: """Initialize the sensor.""" diff --git a/homeassistant/components/vicare/types.py b/homeassistant/components/vicare/types.py index 596605fccdd..7e1ec7f8bee 100644 --- a/homeassistant/components/vicare/types.py +++ b/homeassistant/components/vicare/types.py @@ -64,55 +64,6 @@ VICARE_TO_HA_PRESET_HEATING = { } -class VentilationMode(enum.StrEnum): - """ViCare ventilation modes.""" - - PERMANENT = "permanent" # on, speed controlled by program (levelOne-levelFour) - VENTILATION = "ventilation" # activated by schedule - SENSOR_DRIVEN = "sensor_driven" # activated by schedule, override by sensor - SENSOR_OVERRIDE = "sensor_override" # activated by sensor - - @staticmethod - def to_vicare_mode(mode: str | None) -> str | None: - """Return the mapped ViCare ventilation mode for the Home Assistant mode.""" - if mode: - try: - ventilation_mode = VentilationMode(mode) - except ValueError: - # ignore unsupported / unmapped modes - return None - return HA_TO_VICARE_MODE_VENTILATION.get(ventilation_mode) if mode else None - return None - - @staticmethod - def from_vicare_mode(vicare_mode: str | None) -> str | None: - """Return the mapped Home Assistant mode for the ViCare ventilation mode.""" - for mode in VentilationMode: - if HA_TO_VICARE_MODE_VENTILATION.get(VentilationMode(mode)) == vicare_mode: - return mode - return None - - -HA_TO_VICARE_MODE_VENTILATION = { - VentilationMode.PERMANENT: "permanent", - VentilationMode.VENTILATION: "ventilation", - VentilationMode.SENSOR_DRIVEN: "sensorDriven", - VentilationMode.SENSOR_OVERRIDE: "sensorOverride", -} - - -class VentilationProgram(enum.StrEnum): - """ViCare preset ventilation programs. - - As listed in https://github.com/somm15/PyViCare/blob/6c5b023ca6c8bb2d38141dd1746dc1705ec84ce8/PyViCare/PyViCareVentilationDevice.py#L37 - """ - - LEVEL_ONE = "levelOne" - LEVEL_TWO = "levelTwo" - LEVEL_THREE = "levelThree" - LEVEL_FOUR = "levelFour" - - @dataclass(frozen=True) class ViCareDevice: """Dataclass holding the device api and config.""" diff --git a/homeassistant/components/vicare/water_heater.py b/homeassistant/components/vicare/water_heater.py index 223217f4e13..c76c6ea81aa 100644 --- a/homeassistant/components/vicare/water_heater.py +++ b/homeassistant/components/vicare/water_heater.py @@ -69,10 +69,9 @@ def _build_entities( return [ ViCareWater( + device.config, device.api, circuit, - device.config, - "domestic_hot_water", ) for device in device_list for circuit in get_circuits(device.api) @@ -104,20 +103,19 @@ class ViCareWater(ViCareEntity, WaterHeaterEntity): _attr_min_temp = VICARE_TEMP_WATER_MIN _attr_max_temp = VICARE_TEMP_WATER_MAX _attr_operation_list = list(HA_TO_VICARE_HVAC_DHW) + _attr_translation_key = "domestic_hot_water" + _current_mode: str | None = None def __init__( self, - api: PyViCareDevice, - circuit: PyViCareHeatingCircuit, device_config: PyViCareDeviceConfig, - translation_key: str, + device: PyViCareDevice, + circuit: PyViCareHeatingCircuit, ) -> None: """Initialize the DHW water_heater device.""" - super().__init__(device_config, api, circuit.id) + super().__init__(device_config, device, circuit.id) self._circuit = circuit self._attributes: dict[str, Any] = {} - self._current_mode = None - self._attr_translation_key = translation_key def update(self) -> None: """Let HA know there has been an update from the ViCare API.""" @@ -151,6 +149,8 @@ class ViCareWater(ViCareEntity, WaterHeaterEntity): self._attr_target_temperature = temp @property - def current_operation(self): + def current_operation(self) -> str | None: """Return current operation ie. heat, cool, idle.""" - return VICARE_TO_HA_HVAC_DHW.get(self._current_mode) + if self._current_mode is None: + return None + return VICARE_TO_HA_HVAC_DHW.get(self._current_mode, None) diff --git a/tests/components/vicare/test_types.py b/tests/components/vicare/test_types.py index 575e549f0d9..13d8255cf8d 100644 --- a/tests/components/vicare/test_types.py +++ b/tests/components/vicare/test_types.py @@ -3,7 +3,8 @@ import pytest from homeassistant.components.climate import PRESET_COMFORT, PRESET_SLEEP -from homeassistant.components.vicare.types import HeatingProgram, VentilationMode +from homeassistant.components.vicare.fan import VentilationMode +from homeassistant.components.vicare.types import HeatingProgram @pytest.mark.parametrize( From 76ebb0df0852214be410123956186c02032bca30 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 25 Aug 2024 20:49:31 -1000 Subject: [PATCH 1060/1635] Improve performance of generate diffs of state change events (#124601) dict comps are inlined in cpython 3.12+ --- homeassistant/components/websocket_api/messages.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/websocket_api/messages.py b/homeassistant/components/websocket_api/messages.py index 238f8be0c3b..0a8200c5700 100644 --- a/homeassistant/components/websocket_api/messages.py +++ b/homeassistant/components/websocket_api/messages.py @@ -224,9 +224,12 @@ def _state_diff_event( if (old_attributes := old_state.attributes) != ( new_attributes := new_state.attributes ): - for key, value in new_attributes.items(): - if old_attributes.get(key) != value: - additions.setdefault(COMPRESSED_STATE_ATTRIBUTES, {})[key] = value + if added := { + key: value + for key, value in new_attributes.items() + if key not in old_attributes or old_attributes[key] != value + }: + additions[COMPRESSED_STATE_ATTRIBUTES] = added if removed := old_attributes.keys() - new_attributes: # sets are not JSON serializable by default so we convert to list # here if there are any values to avoid jumping into the json_encoder_default From 65216df3a5615ca05a3f4b43ca3a30ed54ff020a Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Mon, 26 Aug 2024 09:40:25 +0200 Subject: [PATCH 1061/1635] Auto recover mqtt config entry secret if Mosquitto add-on was re-installed (#124514) Auto recover mqtt config entry secret if Mosquitto add-on is re-installed --- homeassistant/components/mqtt/config_flow.py | 30 ++++++ tests/components/mqtt/test_config_flow.py | 102 +++++++++++++++++++ 2 files changed, 132 insertions(+) diff --git a/homeassistant/components/mqtt/config_flow.py b/homeassistant/components/mqtt/config_flow.py index a27e34c364c..ca799ff3653 100644 --- a/homeassistant/components/mqtt/config_flow.py +++ b/homeassistant/components/mqtt/config_flow.py @@ -401,6 +401,36 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle re-authentication with MQTT broker.""" self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) + if is_hassio(self.hass): + # Check if entry setup matches the add-on discovery config + addon_manager = get_addon_manager(self.hass) + try: + addon_discovery_config = ( + await addon_manager.async_get_addon_discovery_info() + ) + except AddonError: + # Follow manual flow if we have an error + pass + else: + # Check if the addon secrets need to be renewed. + # This will repair the config entry, + # in case the official Mosquitto Broker addon was re-installed. + if ( + entry_data[CONF_BROKER] == addon_discovery_config[CONF_HOST] + and entry_data[CONF_PORT] == addon_discovery_config[CONF_PORT] + and entry_data.get(CONF_USERNAME) + == (username := addon_discovery_config.get(CONF_USERNAME)) + and entry_data.get(CONF_PASSWORD) + != (password := addon_discovery_config.get(CONF_PASSWORD)) + ): + _LOGGER.info( + "Executing autorecovery %s add-on secrets", + addon_manager.addon_name, + ) + return await self.async_step_reauth_confirm( + user_input={CONF_USERNAME: username, CONF_PASSWORD: password} + ) + return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( diff --git a/tests/components/mqtt/test_config_flow.py b/tests/components/mqtt/test_config_flow.py index 8e29658e017..907e3ef9946 100644 --- a/tests/components/mqtt/test_config_flow.py +++ b/tests/components/mqtt/test_config_flow.py @@ -1587,6 +1587,108 @@ async def test_step_reauth( await hass.async_block_till_done() +@pytest.mark.parametrize("discovery_info", [{"config": ADD_ON_DISCOVERY_INFO.copy()}]) +@pytest.mark.usefixtures( + "mqtt_client_mock", "mock_reload_after_entry_update", "supervisor", "addon_running" +) +async def test_step_hassio_reauth( + hass: HomeAssistant, mock_try_connection: MagicMock, addon_info: AsyncMock +) -> None: + """Test that the reauth step works in case the Mosquitto broker add-on was re-installed.""" + + # Set up entry data based on the discovery data, but with a stale password + entry_data = { + mqtt.CONF_BROKER: "core-mosquitto", + CONF_PORT: 1883, + CONF_USERNAME: "mock-user", + CONF_PASSWORD: "stale-secret", + } + + addon_info["hostname"] = "core-mosquitto" + + # Prepare the config entry + config_entry = MockConfigEntry(domain=mqtt.DOMAIN, data=entry_data) + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + + assert config_entry.data.get(CONF_PASSWORD) == "stale-secret" + + # Start reauth flow + mock_try_connection.reset_mock() + mock_try_connection.return_value = True + config_entry.async_start_reauth(hass) + await hass.async_block_till_done() + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 0 + + # Assert the entry is updated automatically + assert config_entry.data.get(CONF_PASSWORD) == "mock-pass" + mock_try_connection.assert_called_once_with( + { + "broker": "core-mosquitto", + "port": 1883, + "username": "mock-user", + "password": "mock-pass", + } + ) + + +@pytest.mark.parametrize( + ("discovery_info", "discovery_info_side_effect", "broker"), + [ + ({"config": ADD_ON_DISCOVERY_INFO.copy()}, AddonError, "core-mosquitto"), + ({"config": ADD_ON_DISCOVERY_INFO.copy()}, None, "broker-not-addon"), + ], +) +@pytest.mark.usefixtures( + "mqtt_client_mock", "mock_reload_after_entry_update", "supervisor", "addon_running" +) +async def test_step_hassio_reauth_no_discovery_info( + hass: HomeAssistant, + mock_try_connection: MagicMock, + addon_info: AsyncMock, + broker: str, +) -> None: + """Test hassio reauth flow defaults to manual flow. + + Test that the reauth step defaults to + normal reauth flow if fetching add-on discovery info failed, + or the broker is not the add-on. + """ + + # Set up entry data based on the discovery data, but with a stale password + entry_data = { + mqtt.CONF_BROKER: broker, + CONF_PORT: 1883, + CONF_USERNAME: "mock-user", + CONF_PASSWORD: "wrong-pass", + } + + addon_info["hostname"] = "core-mosquitto" + + # Prepare the config entry + config_entry = MockConfigEntry(domain=mqtt.DOMAIN, data=entry_data) + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + + assert config_entry.data.get(CONF_PASSWORD) == "wrong-pass" + + # Start reauth flow + mock_try_connection.reset_mock() + mock_try_connection.return_value = True + config_entry.async_start_reauth(hass) + await hass.async_block_till_done() + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + result = flows[0] + assert result["step_id"] == "reauth_confirm" + assert result["context"]["source"] == "reauth" + + # Assert the entry is not updated + assert config_entry.data.get(CONF_PASSWORD) == "wrong-pass" + mock_try_connection.assert_not_called() + + async def test_options_user_connection_fails( hass: HomeAssistant, mock_try_connection_time_out: MagicMock ) -> None: From 302ffe5e56488f2f139686d375e5c82339304c87 Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Mon, 26 Aug 2024 10:12:05 +0200 Subject: [PATCH 1062/1635] Revert "Use speex for noise suppression and auto gain" (#124620) Revert "Use speex for noise suppression and auto gain (#124591)" This reverts commit 592f60643a8b34bd777a6b3276440fcc61fc7a30. --- .../assist_pipeline/audio_enhancer.py | 30 ++----------------- .../components/assist_pipeline/manifest.json | 2 +- .../components/assist_pipeline/pipeline.py | 4 +-- homeassistant/components/voip/voip.py | 4 +-- homeassistant/package_constraints.txt | 1 - requirements_all.txt | 3 -- requirements_test_all.txt | 3 -- 7 files changed, 8 insertions(+), 39 deletions(-) diff --git a/homeassistant/components/assist_pipeline/audio_enhancer.py b/homeassistant/components/assist_pipeline/audio_enhancer.py index ff2b122187a..c9c60f421b1 100644 --- a/homeassistant/components/assist_pipeline/audio_enhancer.py +++ b/homeassistant/components/assist_pipeline/audio_enhancer.py @@ -5,7 +5,6 @@ from dataclasses import dataclass import logging from pymicro_vad import MicroVad -from pyspeex_noise import AudioProcessor from .const import BYTES_PER_CHUNK @@ -42,8 +41,8 @@ class AudioEnhancer(ABC): """Enhance chunk of PCM audio @ 16Khz with 16-bit mono samples.""" -class MicroVadSpeexEnhancer(AudioEnhancer): - """Audio enhancer that runs microVAD and speex.""" +class MicroVadEnhancer(AudioEnhancer): + """Audio enhancer that just runs microVAD.""" def __init__( self, auto_gain: int, noise_suppression: int, is_vad_enabled: bool @@ -51,24 +50,6 @@ class MicroVadSpeexEnhancer(AudioEnhancer): """Initialize audio enhancer.""" super().__init__(auto_gain, noise_suppression, is_vad_enabled) - self.audio_processor: AudioProcessor | None = None - - # Scale from 0-4 - self.noise_suppression = noise_suppression * -15 - - # Scale from 0-31 - self.auto_gain = auto_gain * 300 - - if (self.auto_gain != 0) or (self.noise_suppression != 0): - self.audio_processor = AudioProcessor( - self.auto_gain, self.noise_suppression - ) - _LOGGER.debug( - "Initialized speex with auto_gain=%s, noise_suppression=%s", - self.auto_gain, - self.noise_suppression, - ) - self.vad: MicroVad | None = None self.threshold = 0.5 @@ -80,17 +61,12 @@ class MicroVadSpeexEnhancer(AudioEnhancer): """Enhance 10ms chunk of PCM audio @ 16Khz with 16-bit mono samples.""" is_speech: bool | None = None - assert len(audio) == BYTES_PER_CHUNK - if self.vad is not None: # Run VAD + assert len(audio) == BYTES_PER_CHUNK speech_prob = self.vad.Process10ms(audio) is_speech = speech_prob > self.threshold - if self.audio_processor is not None: - # Run noise suppression and auto gain - audio = self.audio_processor.Process10ms(audio).audio - return EnhancedAudioChunk( audio=audio, timestamp_ms=timestamp_ms, is_speech=is_speech ) diff --git a/homeassistant/components/assist_pipeline/manifest.json b/homeassistant/components/assist_pipeline/manifest.json index c22b7391d33..00950b138fd 100644 --- a/homeassistant/components/assist_pipeline/manifest.json +++ b/homeassistant/components/assist_pipeline/manifest.json @@ -7,5 +7,5 @@ "integration_type": "system", "iot_class": "local_push", "quality_scale": "internal", - "requirements": ["pymicro-vad==1.0.1", "pyspeex-noise==1.0.0"] + "requirements": ["pymicro-vad==1.0.1"] } diff --git a/homeassistant/components/assist_pipeline/pipeline.py b/homeassistant/components/assist_pipeline/pipeline.py index 342f811c99b..9fada934ca1 100644 --- a/homeassistant/components/assist_pipeline/pipeline.py +++ b/homeassistant/components/assist_pipeline/pipeline.py @@ -49,7 +49,7 @@ from homeassistant.util import ( ) from homeassistant.util.limited_size_dict import LimitedSizeDict -from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, MicroVadSpeexEnhancer +from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, MicroVadEnhancer from .const import ( BYTES_PER_CHUNK, CONF_DEBUG_RECORDING_DIR, @@ -589,7 +589,7 @@ class PipelineRun: # Initialize with audio settings if self.audio_settings.needs_processor and (self.audio_enhancer is None): # Default audio enhancer - self.audio_enhancer = MicroVadSpeexEnhancer( + self.audio_enhancer = MicroVadEnhancer( self.audio_settings.auto_gain_dbfs, self.audio_settings.noise_suppression_level, self.audio_settings.is_vad_enabled, diff --git a/homeassistant/components/voip/voip.py b/homeassistant/components/voip/voip.py index be1e58b6eec..161e938a3b6 100644 --- a/homeassistant/components/voip/voip.py +++ b/homeassistant/components/voip/voip.py @@ -33,7 +33,7 @@ from homeassistant.components.assist_pipeline import ( ) from homeassistant.components.assist_pipeline.audio_enhancer import ( AudioEnhancer, - MicroVadSpeexEnhancer, + MicroVadEnhancer, ) from homeassistant.components.assist_pipeline.vad import ( AudioBuffer, @@ -235,7 +235,7 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): try: # Wait for speech before starting pipeline segmenter = VoiceCommandSegmenter(silence_seconds=self.silence_seconds) - audio_enhancer = MicroVadSpeexEnhancer(0, 0, True) + audio_enhancer = MicroVadEnhancer(0, 0, True) chunk_buffer: deque[bytes] = deque( maxlen=self.buffered_chunks_before_speech, ) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 5f9b9023718..23d759f270d 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -49,7 +49,6 @@ pymicro-vad==1.0.1 PyNaCl==1.5.0 pyOpenSSL==24.2.1 pyserial==3.5 -pyspeex-noise==1.0.0 python-slugify==8.0.4 PyTurboJPEG==1.7.1 pyudev==0.24.1 diff --git a/requirements_all.txt b/requirements_all.txt index 5d5d5931818..fc626c59c39 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2228,9 +2228,6 @@ pysoma==0.0.12 # homeassistant.components.spc pyspcwebgw==0.7.0 -# homeassistant.components.assist_pipeline -pyspeex-noise==1.0.0 - # homeassistant.components.squeezebox pysqueezebox==0.7.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index ab1d0bc2a1b..858ba6d1197 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1782,9 +1782,6 @@ pysoma==0.0.12 # homeassistant.components.spc pyspcwebgw==0.7.0 -# homeassistant.components.assist_pipeline -pyspeex-noise==1.0.0 - # homeassistant.components.squeezebox pysqueezebox==0.7.1 From 7ddd755acc774d4e9e76d84f3005c13ee966e289 Mon Sep 17 00:00:00 2001 From: Andre Lengwenus Date: Mon, 26 Aug 2024 10:17:51 +0200 Subject: [PATCH 1063/1635] Handle LCN entity instances only in corresponding platform (#124589) * Handle switch entity instance only in switch platform * Add other platforms --- homeassistant/components/lcn/binary_sensor.py | 69 +++++++++++++------ homeassistant/components/lcn/climate.py | 50 +++++++++----- homeassistant/components/lcn/cover.py | 58 +++++++++++----- homeassistant/components/lcn/light.py | 58 +++++++++++----- homeassistant/components/lcn/scene.py | 50 +++++++++----- homeassistant/components/lcn/sensor.py | 68 +++++++++++------- homeassistant/components/lcn/switch.py | 58 +++++++++++----- homeassistant/components/lcn/websocket.py | 9 ++- 8 files changed, 281 insertions(+), 139 deletions(-) diff --git a/homeassistant/components/lcn/binary_sensor.py b/homeassistant/components/lcn/binary_sensor.py index d7e5798bb91..a0f8e1cf360 100644 --- a/homeassistant/components/lcn/binary_sensor.py +++ b/homeassistant/components/lcn/binary_sensor.py @@ -1,6 +1,7 @@ """Support for LCN binary sensors.""" -from __future__ import annotations +from collections.abc import Iterable +from functools import partial import pypck @@ -25,22 +26,37 @@ from .const import ( from .helpers import DeviceConnectionType, InputType, get_device_connection -def create_lcn_binary_sensor_entity( - hass: HomeAssistant, entity_config: ConfigType, config_entry: ConfigEntry -) -> LcnEntity: - """Set up an entity for this domain.""" - device_connection = get_device_connection( - hass, entity_config[CONF_ADDRESS], config_entry - ) - - if entity_config[CONF_DOMAIN_DATA][CONF_SOURCE] in SETPOINTS: - return LcnRegulatorLockSensor( - entity_config, config_entry.entry_id, device_connection +def add_lcn_entities( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + entity_configs: Iterable[ConfigType], +) -> None: + """Add entities for this domain.""" + entities: list[LcnRegulatorLockSensor | LcnBinarySensor | LcnLockKeysSensor] = [] + for entity_config in entity_configs: + device_connection = get_device_connection( + hass, entity_config[CONF_ADDRESS], config_entry ) - if entity_config[CONF_DOMAIN_DATA][CONF_SOURCE] in BINSENSOR_PORTS: - return LcnBinarySensor(entity_config, config_entry.entry_id, device_connection) - # in KEY - return LcnLockKeysSensor(entity_config, config_entry.entry_id, device_connection) + + if entity_config[CONF_DOMAIN_DATA][CONF_SOURCE] in SETPOINTS: + entities.append( + LcnRegulatorLockSensor( + entity_config, config_entry.entry_id, device_connection + ) + ) + elif entity_config[CONF_DOMAIN_DATA][CONF_SOURCE] in BINSENSOR_PORTS: + entities.append( + LcnBinarySensor(entity_config, config_entry.entry_id, device_connection) + ) + else: # in KEY + entities.append( + LcnLockKeysSensor( + entity_config, config_entry.entry_id, device_connection + ) + ) + + async_add_entities(entities) async def async_setup_entry( @@ -49,14 +65,23 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up LCN switch entities from a config entry.""" - hass.data[DOMAIN][config_entry.entry_id][ADD_ENTITIES_CALLBACKS].update( - {DOMAIN_BINARY_SENSOR: (async_add_entities, create_lcn_binary_sensor_entity)} + add_entities = partial( + add_lcn_entities, + hass, + config_entry, + async_add_entities, ) - async_add_entities( - create_lcn_binary_sensor_entity(hass, entity_config, config_entry) - for entity_config in config_entry.data[CONF_ENTITIES] - if entity_config[CONF_DOMAIN] == DOMAIN_BINARY_SENSOR + hass.data[DOMAIN][config_entry.entry_id][ADD_ENTITIES_CALLBACKS].update( + {DOMAIN_BINARY_SENSOR: add_entities} + ) + + add_entities( + ( + entity_config + for entity_config in config_entry.data[CONF_ENTITIES] + if entity_config[CONF_DOMAIN] == DOMAIN_BINARY_SENSOR + ), ) diff --git a/homeassistant/components/lcn/climate.py b/homeassistant/components/lcn/climate.py index d34a872d867..0142894a16b 100644 --- a/homeassistant/components/lcn/climate.py +++ b/homeassistant/components/lcn/climate.py @@ -1,7 +1,7 @@ """Support for LCN climate control.""" -from __future__ import annotations - +from collections.abc import Iterable +from functools import partial from typing import Any, cast import pypck @@ -41,15 +41,24 @@ from .helpers import DeviceConnectionType, InputType, get_device_connection PARALLEL_UPDATES = 0 -def create_lcn_climate_entity( - hass: HomeAssistant, entity_config: ConfigType, config_entry: ConfigEntry -) -> LcnEntity: - """Set up an entity for this domain.""" - device_connection = get_device_connection( - hass, entity_config[CONF_ADDRESS], config_entry - ) +def add_lcn_entities( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + entity_configs: Iterable[ConfigType], +) -> None: + """Add entities for this domain.""" + entities: list[LcnClimate] = [] + for entity_config in entity_configs: + device_connection = get_device_connection( + hass, entity_config[CONF_ADDRESS], config_entry + ) - return LcnClimate(entity_config, config_entry.entry_id, device_connection) + entities.append( + LcnClimate(entity_config, config_entry.entry_id, device_connection) + ) + + async_add_entities(entities) async def async_setup_entry( @@ -58,14 +67,23 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up LCN switch entities from a config entry.""" - hass.data[DOMAIN][config_entry.entry_id][ADD_ENTITIES_CALLBACKS].update( - {DOMAIN_CLIMATE: (async_add_entities, create_lcn_climate_entity)} + add_entities = partial( + add_lcn_entities, + hass, + config_entry, + async_add_entities, ) - async_add_entities( - create_lcn_climate_entity(hass, entity_config, config_entry) - for entity_config in config_entry.data[CONF_ENTITIES] - if entity_config[CONF_DOMAIN] == DOMAIN_CLIMATE + hass.data[DOMAIN][config_entry.entry_id][ADD_ENTITIES_CALLBACKS].update( + {DOMAIN_CLIMATE: add_entities} + ) + + add_entities( + ( + entity_config + for entity_config in config_entry.data[CONF_ENTITIES] + if entity_config[CONF_DOMAIN] == DOMAIN_CLIMATE + ), ) diff --git a/homeassistant/components/lcn/cover.py b/homeassistant/components/lcn/cover.py index a2f508cee97..1e428a350d6 100644 --- a/homeassistant/components/lcn/cover.py +++ b/homeassistant/components/lcn/cover.py @@ -1,7 +1,7 @@ """Support for LCN covers.""" -from __future__ import annotations - +from collections.abc import Iterable +from functools import partial from typing import Any import pypck @@ -26,18 +26,29 @@ from .helpers import DeviceConnectionType, InputType, get_device_connection PARALLEL_UPDATES = 0 -def create_lcn_cover_entity( - hass: HomeAssistant, entity_config: ConfigType, config_entry: ConfigEntry -) -> LcnEntity: - """Set up an entity for this domain.""" - device_connection = get_device_connection( - hass, entity_config[CONF_ADDRESS], config_entry - ) +def add_lcn_entities( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + entity_configs: Iterable[ConfigType], +) -> None: + """Add entities for this domain.""" + entities: list[LcnOutputsCover | LcnRelayCover] = [] + for entity_config in entity_configs: + device_connection = get_device_connection( + hass, entity_config[CONF_ADDRESS], config_entry + ) - if entity_config[CONF_DOMAIN_DATA][CONF_MOTOR] in "OUTPUTS": - return LcnOutputsCover(entity_config, config_entry.entry_id, device_connection) - # in RELAYS - return LcnRelayCover(entity_config, config_entry.entry_id, device_connection) + if entity_config[CONF_DOMAIN_DATA][CONF_MOTOR] in "OUTPUTS": + entities.append( + LcnOutputsCover(entity_config, config_entry.entry_id, device_connection) + ) + else: # in RELAYS + entities.append( + LcnRelayCover(entity_config, config_entry.entry_id, device_connection) + ) + + async_add_entities(entities) async def async_setup_entry( @@ -46,14 +57,23 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up LCN cover entities from a config entry.""" - hass.data[DOMAIN][config_entry.entry_id][ADD_ENTITIES_CALLBACKS].update( - {DOMAIN_COVER: (async_add_entities, create_lcn_cover_entity)} + add_entities = partial( + add_lcn_entities, + hass, + config_entry, + async_add_entities, ) - async_add_entities( - create_lcn_cover_entity(hass, entity_config, config_entry) - for entity_config in config_entry.data[CONF_ENTITIES] - if entity_config[CONF_DOMAIN] == DOMAIN_COVER + hass.data[DOMAIN][config_entry.entry_id][ADD_ENTITIES_CALLBACKS].update( + {DOMAIN_COVER: add_entities} + ) + + add_entities( + ( + entity_config + for entity_config in config_entry.data[CONF_ENTITIES] + if entity_config[CONF_DOMAIN] == DOMAIN_COVER + ), ) diff --git a/homeassistant/components/lcn/light.py b/homeassistant/components/lcn/light.py index b896462c8a1..799ed0036d8 100644 --- a/homeassistant/components/lcn/light.py +++ b/homeassistant/components/lcn/light.py @@ -1,7 +1,7 @@ """Support for LCN lights.""" -from __future__ import annotations - +from collections.abc import Iterable +from functools import partial from typing import Any import pypck @@ -35,18 +35,29 @@ from .helpers import DeviceConnectionType, InputType, get_device_connection PARALLEL_UPDATES = 0 -def create_lcn_light_entity( - hass: HomeAssistant, entity_config: ConfigType, config_entry: ConfigEntry -) -> LcnEntity: - """Set up an entity for this domain.""" - device_connection = get_device_connection( - hass, entity_config[CONF_ADDRESS], config_entry - ) +def add_lcn_entities( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + entity_configs: Iterable[ConfigType], +) -> None: + """Add entities for this domain.""" + entities: list[LcnOutputLight | LcnRelayLight] = [] + for entity_config in entity_configs: + device_connection = get_device_connection( + hass, entity_config[CONF_ADDRESS], config_entry + ) - if entity_config[CONF_DOMAIN_DATA][CONF_OUTPUT] in OUTPUT_PORTS: - return LcnOutputLight(entity_config, config_entry.entry_id, device_connection) - # in RELAY_PORTS - return LcnRelayLight(entity_config, config_entry.entry_id, device_connection) + if entity_config[CONF_DOMAIN_DATA][CONF_OUTPUT] in OUTPUT_PORTS: + entities.append( + LcnOutputLight(entity_config, config_entry.entry_id, device_connection) + ) + else: # in RELAY_PORTS + entities.append( + LcnRelayLight(entity_config, config_entry.entry_id, device_connection) + ) + + async_add_entities(entities) async def async_setup_entry( @@ -55,14 +66,23 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up LCN light entities from a config entry.""" - hass.data[DOMAIN][config_entry.entry_id][ADD_ENTITIES_CALLBACKS].update( - {DOMAIN_LIGHT: (async_add_entities, create_lcn_light_entity)} + add_entities = partial( + add_lcn_entities, + hass, + config_entry, + async_add_entities, ) - async_add_entities( - create_lcn_light_entity(hass, entity_config, config_entry) - for entity_config in config_entry.data[CONF_ENTITIES] - if entity_config[CONF_DOMAIN] == DOMAIN_LIGHT + hass.data[DOMAIN][config_entry.entry_id][ADD_ENTITIES_CALLBACKS].update( + {DOMAIN_LIGHT: add_entities} + ) + + add_entities( + ( + entity_config + for entity_config in config_entry.data[CONF_ENTITIES] + if entity_config[CONF_DOMAIN] == DOMAIN_LIGHT + ), ) diff --git a/homeassistant/components/lcn/scene.py b/homeassistant/components/lcn/scene.py index f9220f676d6..52ec0262b55 100644 --- a/homeassistant/components/lcn/scene.py +++ b/homeassistant/components/lcn/scene.py @@ -1,7 +1,7 @@ """Support for LCN scenes.""" -from __future__ import annotations - +from collections.abc import Iterable +from functools import partial from typing import Any import pypck @@ -28,15 +28,24 @@ from .helpers import DeviceConnectionType, get_device_connection PARALLEL_UPDATES = 0 -def create_lcn_scene_entity( - hass: HomeAssistant, entity_config: ConfigType, config_entry: ConfigEntry -) -> LcnEntity: - """Set up an entity for this domain.""" - device_connection = get_device_connection( - hass, entity_config[CONF_ADDRESS], config_entry - ) +def add_lcn_entities( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + entity_configs: Iterable[ConfigType], +) -> None: + """Add entities for this domain.""" + entities: list[LcnScene] = [] + for entity_config in entity_configs: + device_connection = get_device_connection( + hass, entity_config[CONF_ADDRESS], config_entry + ) - return LcnScene(entity_config, config_entry.entry_id, device_connection) + entities.append( + LcnScene(entity_config, config_entry.entry_id, device_connection) + ) + + async_add_entities(entities) async def async_setup_entry( @@ -45,14 +54,23 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up LCN switch entities from a config entry.""" - hass.data[DOMAIN][config_entry.entry_id][ADD_ENTITIES_CALLBACKS].update( - {DOMAIN_SCENE: (async_add_entities, create_lcn_scene_entity)} + add_entities = partial( + add_lcn_entities, + hass, + config_entry, + async_add_entities, ) - async_add_entities( - create_lcn_scene_entity(hass, entity_config, config_entry) - for entity_config in config_entry.data[CONF_ENTITIES] - if entity_config[CONF_DOMAIN] == DOMAIN_SCENE + hass.data[DOMAIN][config_entry.entry_id][ADD_ENTITIES_CALLBACKS].update( + {DOMAIN_SCENE: add_entities} + ) + + add_entities( + ( + entity_config + for entity_config in config_entry.data[CONF_ENTITIES] + if entity_config[CONF_DOMAIN] == DOMAIN_SCENE + ), ) diff --git a/homeassistant/components/lcn/sensor.py b/homeassistant/components/lcn/sensor.py index b63ddbef8ad..7e8941a0bf9 100644 --- a/homeassistant/components/lcn/sensor.py +++ b/homeassistant/components/lcn/sensor.py @@ -1,7 +1,7 @@ """Support for LCN sensors.""" -from __future__ import annotations - +from collections.abc import Iterable +from functools import partial from itertools import chain from typing import cast @@ -34,22 +34,35 @@ from .const import ( from .helpers import DeviceConnectionType, InputType, get_device_connection -def create_lcn_sensor_entity( - hass: HomeAssistant, entity_config: ConfigType, config_entry: ConfigEntry -) -> LcnEntity: - """Set up an entity for this domain.""" - device_connection = get_device_connection( - hass, entity_config[CONF_ADDRESS], config_entry - ) - - if entity_config[CONF_DOMAIN_DATA][CONF_SOURCE] in chain( - VARIABLES, SETPOINTS, THRESHOLDS, S0_INPUTS - ): - return LcnVariableSensor( - entity_config, config_entry.entry_id, device_connection +def add_lcn_entities( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + entity_configs: Iterable[ConfigType], +) -> None: + """Add entities for this domain.""" + entities: list[LcnVariableSensor | LcnLedLogicSensor] = [] + for entity_config in entity_configs: + device_connection = get_device_connection( + hass, entity_config[CONF_ADDRESS], config_entry ) - # in LED_PORTS + LOGICOP_PORTS - return LcnLedLogicSensor(entity_config, config_entry.entry_id, device_connection) + + if entity_config[CONF_DOMAIN_DATA][CONF_SOURCE] in chain( + VARIABLES, SETPOINTS, THRESHOLDS, S0_INPUTS + ): + entities.append( + LcnVariableSensor( + entity_config, config_entry.entry_id, device_connection + ) + ) + else: # in LED_PORTS + LOGICOP_PORTS + entities.append( + LcnLedLogicSensor( + entity_config, config_entry.entry_id, device_connection + ) + ) + + async_add_entities(entities) async def async_setup_entry( @@ -58,14 +71,23 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up LCN switch entities from a config entry.""" - hass.data[DOMAIN][config_entry.entry_id][ADD_ENTITIES_CALLBACKS].update( - {DOMAIN_SENSOR: (async_add_entities, create_lcn_sensor_entity)} + add_entities = partial( + add_lcn_entities, + hass, + config_entry, + async_add_entities, ) - async_add_entities( - create_lcn_sensor_entity(hass, entity_config, config_entry) - for entity_config in config_entry.data[CONF_ENTITIES] - if entity_config[CONF_DOMAIN] == DOMAIN_SENSOR + hass.data[DOMAIN][config_entry.entry_id][ADD_ENTITIES_CALLBACKS].update( + {DOMAIN_SENSOR: add_entities} + ) + + add_entities( + ( + entity_config + for entity_config in config_entry.data[CONF_ENTITIES] + if entity_config[CONF_DOMAIN] == DOMAIN_SENSOR + ), ) diff --git a/homeassistant/components/lcn/switch.py b/homeassistant/components/lcn/switch.py index 1136e4c27a1..4c316cef547 100644 --- a/homeassistant/components/lcn/switch.py +++ b/homeassistant/components/lcn/switch.py @@ -1,7 +1,7 @@ """Support for LCN switches.""" -from __future__ import annotations - +from collections.abc import Iterable +from functools import partial from typing import Any import pypck @@ -26,18 +26,29 @@ from .helpers import DeviceConnectionType, InputType, get_device_connection PARALLEL_UPDATES = 0 -def create_lcn_switch_entity( - hass: HomeAssistant, entity_config: ConfigType, config_entry: ConfigEntry -) -> LcnEntity: - """Set up an entity for this domain.""" - device_connection = get_device_connection( - hass, entity_config[CONF_ADDRESS], config_entry - ) +def add_lcn_switch_entities( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + entity_configs: Iterable[ConfigType], +) -> None: + """Add entities for this domain.""" + entities: list[LcnOutputSwitch | LcnRelaySwitch] = [] + for entity_config in entity_configs: + device_connection = get_device_connection( + hass, entity_config[CONF_ADDRESS], config_entry + ) - if entity_config[CONF_DOMAIN_DATA][CONF_OUTPUT] in OUTPUT_PORTS: - return LcnOutputSwitch(entity_config, config_entry.entry_id, device_connection) - # in RELAY_PORTS - return LcnRelaySwitch(entity_config, config_entry.entry_id, device_connection) + if entity_config[CONF_DOMAIN_DATA][CONF_OUTPUT] in OUTPUT_PORTS: + entities.append( + LcnOutputSwitch(entity_config, config_entry.entry_id, device_connection) + ) + else: # in RELAY_PORTS + entities.append( + LcnRelaySwitch(entity_config, config_entry.entry_id, device_connection) + ) + + async_add_entities(entities) async def async_setup_entry( @@ -46,14 +57,23 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up LCN switch entities from a config entry.""" - hass.data[DOMAIN][config_entry.entry_id][ADD_ENTITIES_CALLBACKS].update( - {DOMAIN_SWITCH: (async_add_entities, create_lcn_switch_entity)} + add_entities = partial( + add_lcn_switch_entities, + hass, + config_entry, + async_add_entities, ) - async_add_entities( - create_lcn_switch_entity(hass, entity_config, config_entry) - for entity_config in config_entry.data[CONF_ENTITIES] - if entity_config[CONF_DOMAIN] == DOMAIN_SWITCH + hass.data[DOMAIN][config_entry.entry_id][ADD_ENTITIES_CALLBACKS].update( + {DOMAIN_SWITCH: add_entities} + ) + + add_entities( + ( + entity_config + for entity_config in config_entry.data[CONF_ENTITIES] + if entity_config[CONF_DOMAIN] == DOMAIN_SWITCH + ), ) diff --git a/homeassistant/components/lcn/websocket.py b/homeassistant/components/lcn/websocket.py index 6fda20aba93..b418e362b27 100644 --- a/homeassistant/components/lcn/websocket.py +++ b/homeassistant/components/lcn/websocket.py @@ -341,11 +341,10 @@ async def websocket_add_entity( } # Create new entity and add to corresponding component - callbacks = hass.data[DOMAIN][msg["entry_id"]][ADD_ENTITIES_CALLBACKS] - async_add_entities, create_lcn_entity = callbacks[msg[CONF_DOMAIN]] - - entity = create_lcn_entity(hass, entity_config, config_entry) - async_add_entities([entity]) + add_entities = hass.data[DOMAIN][msg["entry_id"]][ADD_ENTITIES_CALLBACKS][ + msg[CONF_DOMAIN] + ] + add_entities([entity_config]) # Add entity config to config_entry entity_configs = [*config_entry.data[CONF_ENTITIES], entity_config] From cafd953f87ea05babccf3c620dec42a2c1d4c95d Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 26 Aug 2024 11:10:58 +0200 Subject: [PATCH 1064/1635] Add test showing we prefer tts entity over legacy tts provider (#124624) --- tests/components/tts/test_init.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/tests/components/tts/test_init.py b/tests/components/tts/test_init.py index 7a54ecc26b0..dee8e569ce1 100644 --- a/tests/components/tts/test_init.py +++ b/tests/components/tts/test_init.py @@ -1838,3 +1838,22 @@ async def test_ttsentity_subclass_properties( if record.exc_info is not None ] ) + + +async def test_default_engine_prefer_entity( + hass: HomeAssistant, + mock_tts_entity: MockTTSEntity, + mock_provider: MockProvider, +) -> None: + """Test async_default_engine.""" + mock_tts_entity._attr_name = "New test" + + await mock_setup(hass, mock_provider) + await mock_config_entry_setup(hass, mock_tts_entity) + await hass.async_block_till_done() + + entity_engine = tts.async_resolve_engine(hass, "tts.test") + assert entity_engine == "tts.test" + provider_engine = tts.async_resolve_engine(hass, "test") + assert provider_engine == "test" + assert tts.async_default_engine(hass) == "tts.test" From 106559371c111eb9746d87da5d9d1340a99a5229 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 13:33:55 +0200 Subject: [PATCH 1065/1635] Bump github/codeql-action from 3.26.4 to 3.26.5 (#124615) Bumps [github/codeql-action](https://github.com/github/codeql-action) from 3.26.4 to 3.26.5. - [Release notes](https://github.com/github/codeql-action/releases) - [Changelog](https://github.com/github/codeql-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/github/codeql-action/compare/v3.26.4...v3.26.5) --- updated-dependencies: - dependency-name: github/codeql-action dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/codeql.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index e9824f42b91..a4653a833c4 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -24,11 +24,11 @@ jobs: uses: actions/checkout@v4.1.7 - name: Initialize CodeQL - uses: github/codeql-action/init@v3.26.4 + uses: github/codeql-action/init@v3.26.5 with: languages: python - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3.26.4 + uses: github/codeql-action/analyze@v3.26.5 with: category: "/language:python" From 0a05cdc38151543cf93cc6b3034077b5df66d5f4 Mon Sep 17 00:00:00 2001 From: darkfader Date: Mon, 26 Aug 2024 13:40:40 +0200 Subject: [PATCH 1066/1635] Add conductivity sensor to bthome (#124312) * Add conductivity sensor to bthome * Update icons.json * Update sensor.py --- homeassistant/components/bthome/sensor.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/homeassistant/components/bthome/sensor.py b/homeassistant/components/bthome/sensor.py index 656addad620..64e6d61cefb 100644 --- a/homeassistant/components/bthome/sensor.py +++ b/homeassistant/components/bthome/sensor.py @@ -27,6 +27,7 @@ from homeassistant.const import ( PERCENTAGE, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, EntityCategory, + UnitOfConductivity, UnitOfElectricCurrent, UnitOfElectricPotential, UnitOfEnergy, @@ -356,6 +357,16 @@ SENSOR_DESCRIPTIONS = { native_unit_of_measurement=UnitOfVolume.LITERS, state_class=SensorStateClass.TOTAL, ), + # Conductivity (µS/cm) + ( + BTHomeSensorDeviceClass.CONDUCTIVITY, + Units.CONDUCTIVITY, + ): SensorEntityDescription( + key=f"{BTHomeSensorDeviceClass.CONDUCTIVITY}_{Units.CONDUCTIVITY}", + device_class=SensorDeviceClass.CONDUCTIVITY, + native_unit_of_measurement=UnitOfConductivity.MICROSIEMENS, + state_class=SensorStateClass.MEASUREMENT, + ), } From 7b71f024fb47554e98b7fd2278380955168627a7 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 26 Aug 2024 13:43:14 +0200 Subject: [PATCH 1067/1635] Prefer stt entity over legacy stt provider (#124625) * Prefer stt entity over legacy stt provider * Update assist_pipeline tests --- homeassistant/components/stt/__init__.py | 4 ++-- .../assist_pipeline/snapshots/test_init.ambr | 4 ++-- .../snapshots/test_websocket.ambr | 16 +++++++-------- tests/components/assist_pipeline/test_init.py | 20 +++++++++---------- .../assist_pipeline/test_pipeline.py | 14 ++++++------- .../assist_pipeline/test_websocket.py | 14 ++++++------- tests/components/stt/test_init.py | 4 ++-- 7 files changed, 38 insertions(+), 38 deletions(-) diff --git a/homeassistant/components/stt/__init__.py b/homeassistant/components/stt/__init__.py index 676d8b8aa76..227c92f2b98 100644 --- a/homeassistant/components/stt/__init__.py +++ b/homeassistant/components/stt/__init__.py @@ -72,9 +72,9 @@ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) @callback def async_default_engine(hass: HomeAssistant) -> str | None: """Return the domain or entity id of the default engine.""" - return async_default_provider(hass) or next( + return next( iter(hass.states.async_entity_ids(DOMAIN)), None - ) + ) or async_default_provider(hass) @callback diff --git a/tests/components/assist_pipeline/snapshots/test_init.ambr b/tests/components/assist_pipeline/snapshots/test_init.ambr index 8124ed4ab85..7f29534e473 100644 --- a/tests/components/assist_pipeline/snapshots/test_init.ambr +++ b/tests/components/assist_pipeline/snapshots/test_init.ambr @@ -10,7 +10,7 @@ }), dict({ 'data': dict({ - 'engine': 'test', + 'engine': 'stt.mock_stt', 'metadata': dict({ 'bit_rate': , 'channel': , @@ -301,7 +301,7 @@ }), dict({ 'data': dict({ - 'engine': 'test', + 'engine': 'stt.mock_stt', 'metadata': dict({ 'bit_rate': , 'channel': , diff --git a/tests/components/assist_pipeline/snapshots/test_websocket.ambr b/tests/components/assist_pipeline/snapshots/test_websocket.ambr index fb1ca6db121..7ea6af7e0bd 100644 --- a/tests/components/assist_pipeline/snapshots/test_websocket.ambr +++ b/tests/components/assist_pipeline/snapshots/test_websocket.ambr @@ -11,7 +11,7 @@ # --- # name: test_audio_pipeline.1 dict({ - 'engine': 'test', + 'engine': 'stt.mock_stt', 'metadata': dict({ 'bit_rate': 16, 'channel': 1, @@ -92,7 +92,7 @@ # --- # name: test_audio_pipeline_debug.1 dict({ - 'engine': 'test', + 'engine': 'stt.mock_stt', 'metadata': dict({ 'bit_rate': 16, 'channel': 1, @@ -185,7 +185,7 @@ # --- # name: test_audio_pipeline_with_enhancements.1 dict({ - 'engine': 'test', + 'engine': 'stt.mock_stt', 'metadata': dict({ 'bit_rate': 16, 'channel': 1, @@ -288,7 +288,7 @@ # --- # name: test_audio_pipeline_with_wake_word_no_timeout.3 dict({ - 'engine': 'test', + 'engine': 'stt.mock_stt', 'metadata': dict({ 'bit_rate': 16, 'channel': 1, @@ -401,7 +401,7 @@ # --- # name: test_device_capture.1 dict({ - 'engine': 'test', + 'engine': 'stt.mock_stt', 'metadata': dict({ 'bit_rate': 16, 'channel': 1, @@ -427,7 +427,7 @@ # --- # name: test_device_capture_override.1 dict({ - 'engine': 'test', + 'engine': 'stt.mock_stt', 'metadata': dict({ 'bit_rate': 16, 'channel': 1, @@ -475,7 +475,7 @@ # --- # name: test_device_capture_queue_full.1 dict({ - 'engine': 'test', + 'engine': 'stt.mock_stt', 'metadata': dict({ 'bit_rate': 16, 'channel': 1, @@ -649,7 +649,7 @@ # --- # name: test_stt_stream_failed.1 dict({ - 'engine': 'test', + 'engine': 'stt.mock_stt', 'metadata': dict({ 'bit_rate': 16, 'channel': 1, diff --git a/tests/components/assist_pipeline/test_init.py b/tests/components/assist_pipeline/test_init.py index 4206a288331..04edab7131f 100644 --- a/tests/components/assist_pipeline/test_init.py +++ b/tests/components/assist_pipeline/test_init.py @@ -47,7 +47,7 @@ def process_events(events: list[assist_pipeline.PipelineEvent]) -> list[dict]: async def test_pipeline_from_audio_stream_auto( hass: HomeAssistant, - mock_stt_provider: MockSttProvider, + mock_stt_provider_entity: MockSttProviderEntity, init_components, snapshot: SnapshotAssertion, ) -> None: @@ -80,9 +80,9 @@ async def test_pipeline_from_audio_stream_auto( ) assert process_events(events) == snapshot - assert len(mock_stt_provider.received) == 2 - assert mock_stt_provider.received[0].startswith(b"part1") - assert mock_stt_provider.received[1].startswith(b"part2") + assert len(mock_stt_provider_entity.received) == 2 + assert mock_stt_provider_entity.received[0].startswith(b"part1") + assert mock_stt_provider_entity.received[1].startswith(b"part2") async def test_pipeline_from_audio_stream_legacy( @@ -319,7 +319,7 @@ async def test_pipeline_from_audio_stream_unknown_pipeline( async def test_pipeline_from_audio_stream_wake_word( hass: HomeAssistant, - mock_stt_provider: MockSttProvider, + mock_stt_provider_entity: MockSttProviderEntity, mock_wake_word_provider_entity: MockWakeWordEntity, init_components, snapshot: SnapshotAssertion, @@ -381,16 +381,16 @@ async def test_pipeline_from_audio_stream_wake_word( # 2. queued audio (from mock wake word entity) # 3. part1 # 4. part2 - assert len(mock_stt_provider.received) > 3 + assert len(mock_stt_provider_entity.received) > 3 first_chunk = bytes( - [c_byte for c in mock_stt_provider.received[:-3] for c_byte in c] + [c_byte for c in mock_stt_provider_entity.received[:-3] for c_byte in c] ) assert first_chunk == wake_chunk_1[len(wake_chunk_1) // 2 :] + wake_chunk_2 - assert mock_stt_provider.received[-3] == b"queued audio" - assert mock_stt_provider.received[-2].startswith(b"part1") - assert mock_stt_provider.received[-1].startswith(b"part2") + assert mock_stt_provider_entity.received[-3] == b"queued audio" + assert mock_stt_provider_entity.received[-2].startswith(b"part1") + assert mock_stt_provider_entity.received[-1].startswith(b"part2") async def test_pipeline_save_audio( diff --git a/tests/components/assist_pipeline/test_pipeline.py b/tests/components/assist_pipeline/test_pipeline.py index 45a661c0f07..ef5d5edff9e 100644 --- a/tests/components/assist_pipeline/test_pipeline.py +++ b/tests/components/assist_pipeline/test_pipeline.py @@ -26,7 +26,7 @@ from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from . import MANY_LANGUAGES -from .conftest import MockSttProvider, MockTTSProvider +from .conftest import MockSttProviderEntity, MockTTSProvider from tests.common import flush_store @@ -398,7 +398,7 @@ async def test_default_pipeline_no_stt_tts( @pytest.mark.usefixtures("init_supporting_components") async def test_default_pipeline( hass: HomeAssistant, - mock_stt_provider: MockSttProvider, + mock_stt_provider_entity: MockSttProviderEntity, mock_tts_provider: MockTTSProvider, ha_language: str, ha_country: str | None, @@ -412,7 +412,7 @@ async def test_default_pipeline( hass.config.language = ha_language with ( - patch.object(mock_stt_provider, "_supported_languages", MANY_LANGUAGES), + patch.object(mock_stt_provider_entity, "_supported_languages", MANY_LANGUAGES), patch.object(mock_tts_provider, "_supported_languages", MANY_LANGUAGES), ): assert await async_setup_component(hass, "assist_pipeline", {}) @@ -429,7 +429,7 @@ async def test_default_pipeline( id=pipeline.id, language=pipeline_language, name="Home Assistant", - stt_engine="test", + stt_engine="stt.mock_stt", stt_language=stt_language, tts_engine="test", tts_language=tts_language, @@ -441,10 +441,10 @@ async def test_default_pipeline( @pytest.mark.usefixtures("init_supporting_components") async def test_default_pipeline_unsupported_stt_language( - hass: HomeAssistant, mock_stt_provider: MockSttProvider + hass: HomeAssistant, mock_stt_provider_entity: MockSttProviderEntity ) -> None: """Test async_get_pipeline.""" - with patch.object(mock_stt_provider, "_supported_languages", ["smurfish"]): + with patch.object(mock_stt_provider_entity, "_supported_languages", ["smurfish"]): assert await async_setup_component(hass, "assist_pipeline", {}) pipeline_data: PipelineData = hass.data[DOMAIN] @@ -489,7 +489,7 @@ async def test_default_pipeline_unsupported_tts_language( id=pipeline.id, language="en", name="Home Assistant", - stt_engine="test", + stt_engine="stt.mock_stt", stt_language="en-US", tts_engine=None, tts_language=None, diff --git a/tests/components/assist_pipeline/test_websocket.py b/tests/components/assist_pipeline/test_websocket.py index 2da914f4252..f1f68d4a423 100644 --- a/tests/components/assist_pipeline/test_websocket.py +++ b/tests/components/assist_pipeline/test_websocket.py @@ -682,7 +682,7 @@ async def test_stt_provider_missing( ) -> None: """Test events from a pipeline run with a non-existent STT provider.""" with patch( - "homeassistant.components.stt.async_get_provider", + "homeassistant.components.stt.async_get_speech_to_text_entity", return_value=None, ): client = await hass_ws_client(hass) @@ -708,11 +708,11 @@ async def test_stt_provider_bad_metadata( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, init_components, - mock_stt_provider, + mock_stt_provider_entity, snapshot: SnapshotAssertion, ) -> None: """Test events from a pipeline run with wrong metadata.""" - with patch.object(mock_stt_provider, "check_metadata", return_value=False): + with patch.object(mock_stt_provider_entity, "check_metadata", return_value=False): client = await hass_ws_client(hass) await client.send_json_auto_id( @@ -743,7 +743,7 @@ async def test_stt_stream_failed( client = await hass_ws_client(hass) with patch( - "tests.components.assist_pipeline.conftest.MockSttProvider.async_process_audio_stream", + "tests.components.assist_pipeline.conftest.MockSttProviderEntity.async_process_audio_stream", side_effect=RuntimeError, ): await client.send_json_auto_id( @@ -1188,7 +1188,7 @@ async def test_get_pipeline( "id": ANY, "language": "en", "name": "Home Assistant", - "stt_engine": "test", + "stt_engine": "stt.mock_stt", "stt_language": "en-US", "tts_engine": "test", "tts_language": "en-US", @@ -1213,7 +1213,7 @@ async def test_get_pipeline( "language": "en", "name": "Home Assistant", # It found these defaults - "stt_engine": "test", + "stt_engine": "stt.mock_stt", "stt_language": "en-US", "tts_engine": "test", "tts_language": "en-US", @@ -1297,7 +1297,7 @@ async def test_list_pipelines( "id": ANY, "language": "en", "name": "Home Assistant", - "stt_engine": "test", + "stt_engine": "stt.mock_stt", "stt_language": "en-US", "tts_engine": "test", "tts_language": "en-US", diff --git a/tests/components/stt/test_init.py b/tests/components/stt/test_init.py index ca2685ff827..a42ac44112e 100644 --- a/tests/components/stt/test_init.py +++ b/tests/components/stt/test_init.py @@ -497,7 +497,7 @@ async def test_default_engine_entity( @pytest.mark.parametrize("config_flow_test_domain", ["new_test"]) -async def test_default_engine_prefer_provider( +async def test_default_engine_prefer_entity( hass: HomeAssistant, tmp_path: Path, mock_provider_entity: MockProviderEntity, @@ -520,7 +520,7 @@ async def test_default_engine_prefer_provider( provider_engine = async_get_speech_to_text_engine(hass, "test") assert provider_engine is not None assert provider_engine.name == "test" - assert async_default_engine(hass) == "test" + assert async_default_engine(hass) == "stt.new_test" async def test_get_engine_legacy( From 16231da5efe298b5d138ff2f4cc212b9ae5f1dc4 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 26 Aug 2024 01:48:52 -1000 Subject: [PATCH 1068/1635] Bump cached-ipaddress to 0.5.0 (#124602) changelog: https://github.com/bdraco/cached-ipaddress/compare/v0.3.0...v0.5.0 --- homeassistant/components/dhcp/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/dhcp/manifest.json b/homeassistant/components/dhcp/manifest.json index ff81540b0ea..6023e55faf3 100644 --- a/homeassistant/components/dhcp/manifest.json +++ b/homeassistant/components/dhcp/manifest.json @@ -16,6 +16,6 @@ "requirements": [ "aiodhcpwatcher==1.0.2", "aiodiscover==2.1.0", - "cached_ipaddress==0.3.0" + "cached-ipaddress==0.5.0" ] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 23d759f270d..56187150c45 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -19,7 +19,7 @@ bleak==0.22.2 bluetooth-adapters==0.19.4 bluetooth-auto-recovery==1.4.2 bluetooth-data-tools==1.20.0 -cached_ipaddress==0.3.0 +cached-ipaddress==0.5.0 certifi>=2021.5.30 ciso8601==2.3.1 cryptography==43.0.0 diff --git a/requirements_all.txt b/requirements_all.txt index fc626c59c39..98dd06ad428 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -649,7 +649,7 @@ btsmarthub-devicelist==0.2.3 buienradar==1.0.6 # homeassistant.components.dhcp -cached_ipaddress==0.3.0 +cached-ipaddress==0.5.0 # homeassistant.components.caldav caldav==1.3.9 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 858ba6d1197..da09d09e12a 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -560,7 +560,7 @@ bthome-ble==3.9.1 buienradar==1.0.6 # homeassistant.components.dhcp -cached_ipaddress==0.3.0 +cached-ipaddress==0.5.0 # homeassistant.components.caldav caldav==1.3.9 From 743df84569eb66fce9663988cc1f26550996c52b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 26 Aug 2024 01:49:26 -1000 Subject: [PATCH 1069/1635] Reduce multiple calls to calculate sensor unit in tplink (#124606) accessing unit on the feature is not cached and it had to be looked up every time --- homeassistant/components/tplink/sensor.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/tplink/sensor.py b/homeassistant/components/tplink/sensor.py index 3da414d74d3..1307079937f 100644 --- a/homeassistant/components/tplink/sensor.py +++ b/homeassistant/components/tplink/sensor.py @@ -154,7 +154,5 @@ class TPLinkSensorEntity(CoordinatedTPLinkFeatureEntity, SensorEntity): self._attr_native_value = value # Map to homeassistant units and fallback to upstream one if none found - if self._feature.unit is not None: - self._attr_native_unit_of_measurement = UNIT_MAPPING.get( - self._feature.unit, self._feature.unit - ) + if (unit := self._feature.unit) is not None: + self._attr_native_unit_of_measurement = UNIT_MAPPING.get(unit, unit) From 1aa0dbdaf570517fbc5aac55f6faecc07b611159 Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Mon, 26 Aug 2024 15:10:43 +0200 Subject: [PATCH 1070/1635] Revert "Revert "Use speex for noise suppression and auto gain"" (#124637) Revert "Revert "Use speex for noise suppression and auto gain" (#124620)" This reverts commit 302ffe5e56488f2f139686d375e5c82339304c87. --- .../assist_pipeline/audio_enhancer.py | 30 +++++++++++++++++-- .../components/assist_pipeline/manifest.json | 2 +- .../components/assist_pipeline/pipeline.py | 4 +-- homeassistant/components/voip/voip.py | 4 +-- homeassistant/package_constraints.txt | 1 + requirements_all.txt | 3 ++ requirements_test_all.txt | 3 ++ 7 files changed, 39 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/assist_pipeline/audio_enhancer.py b/homeassistant/components/assist_pipeline/audio_enhancer.py index c9c60f421b1..ff2b122187a 100644 --- a/homeassistant/components/assist_pipeline/audio_enhancer.py +++ b/homeassistant/components/assist_pipeline/audio_enhancer.py @@ -5,6 +5,7 @@ from dataclasses import dataclass import logging from pymicro_vad import MicroVad +from pyspeex_noise import AudioProcessor from .const import BYTES_PER_CHUNK @@ -41,8 +42,8 @@ class AudioEnhancer(ABC): """Enhance chunk of PCM audio @ 16Khz with 16-bit mono samples.""" -class MicroVadEnhancer(AudioEnhancer): - """Audio enhancer that just runs microVAD.""" +class MicroVadSpeexEnhancer(AudioEnhancer): + """Audio enhancer that runs microVAD and speex.""" def __init__( self, auto_gain: int, noise_suppression: int, is_vad_enabled: bool @@ -50,6 +51,24 @@ class MicroVadEnhancer(AudioEnhancer): """Initialize audio enhancer.""" super().__init__(auto_gain, noise_suppression, is_vad_enabled) + self.audio_processor: AudioProcessor | None = None + + # Scale from 0-4 + self.noise_suppression = noise_suppression * -15 + + # Scale from 0-31 + self.auto_gain = auto_gain * 300 + + if (self.auto_gain != 0) or (self.noise_suppression != 0): + self.audio_processor = AudioProcessor( + self.auto_gain, self.noise_suppression + ) + _LOGGER.debug( + "Initialized speex with auto_gain=%s, noise_suppression=%s", + self.auto_gain, + self.noise_suppression, + ) + self.vad: MicroVad | None = None self.threshold = 0.5 @@ -61,12 +80,17 @@ class MicroVadEnhancer(AudioEnhancer): """Enhance 10ms chunk of PCM audio @ 16Khz with 16-bit mono samples.""" is_speech: bool | None = None + assert len(audio) == BYTES_PER_CHUNK + if self.vad is not None: # Run VAD - assert len(audio) == BYTES_PER_CHUNK speech_prob = self.vad.Process10ms(audio) is_speech = speech_prob > self.threshold + if self.audio_processor is not None: + # Run noise suppression and auto gain + audio = self.audio_processor.Process10ms(audio).audio + return EnhancedAudioChunk( audio=audio, timestamp_ms=timestamp_ms, is_speech=is_speech ) diff --git a/homeassistant/components/assist_pipeline/manifest.json b/homeassistant/components/assist_pipeline/manifest.json index 00950b138fd..c22b7391d33 100644 --- a/homeassistant/components/assist_pipeline/manifest.json +++ b/homeassistant/components/assist_pipeline/manifest.json @@ -7,5 +7,5 @@ "integration_type": "system", "iot_class": "local_push", "quality_scale": "internal", - "requirements": ["pymicro-vad==1.0.1"] + "requirements": ["pymicro-vad==1.0.1", "pyspeex-noise==1.0.0"] } diff --git a/homeassistant/components/assist_pipeline/pipeline.py b/homeassistant/components/assist_pipeline/pipeline.py index 9fada934ca1..342f811c99b 100644 --- a/homeassistant/components/assist_pipeline/pipeline.py +++ b/homeassistant/components/assist_pipeline/pipeline.py @@ -49,7 +49,7 @@ from homeassistant.util import ( ) from homeassistant.util.limited_size_dict import LimitedSizeDict -from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, MicroVadEnhancer +from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, MicroVadSpeexEnhancer from .const import ( BYTES_PER_CHUNK, CONF_DEBUG_RECORDING_DIR, @@ -589,7 +589,7 @@ class PipelineRun: # Initialize with audio settings if self.audio_settings.needs_processor and (self.audio_enhancer is None): # Default audio enhancer - self.audio_enhancer = MicroVadEnhancer( + self.audio_enhancer = MicroVadSpeexEnhancer( self.audio_settings.auto_gain_dbfs, self.audio_settings.noise_suppression_level, self.audio_settings.is_vad_enabled, diff --git a/homeassistant/components/voip/voip.py b/homeassistant/components/voip/voip.py index 161e938a3b6..be1e58b6eec 100644 --- a/homeassistant/components/voip/voip.py +++ b/homeassistant/components/voip/voip.py @@ -33,7 +33,7 @@ from homeassistant.components.assist_pipeline import ( ) from homeassistant.components.assist_pipeline.audio_enhancer import ( AudioEnhancer, - MicroVadEnhancer, + MicroVadSpeexEnhancer, ) from homeassistant.components.assist_pipeline.vad import ( AudioBuffer, @@ -235,7 +235,7 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): try: # Wait for speech before starting pipeline segmenter = VoiceCommandSegmenter(silence_seconds=self.silence_seconds) - audio_enhancer = MicroVadEnhancer(0, 0, True) + audio_enhancer = MicroVadSpeexEnhancer(0, 0, True) chunk_buffer: deque[bytes] = deque( maxlen=self.buffered_chunks_before_speech, ) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 56187150c45..767804e5136 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -49,6 +49,7 @@ pymicro-vad==1.0.1 PyNaCl==1.5.0 pyOpenSSL==24.2.1 pyserial==3.5 +pyspeex-noise==1.0.0 python-slugify==8.0.4 PyTurboJPEG==1.7.1 pyudev==0.24.1 diff --git a/requirements_all.txt b/requirements_all.txt index 98dd06ad428..630e9180798 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2228,6 +2228,9 @@ pysoma==0.0.12 # homeassistant.components.spc pyspcwebgw==0.7.0 +# homeassistant.components.assist_pipeline +pyspeex-noise==1.0.0 + # homeassistant.components.squeezebox pysqueezebox==0.7.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index da09d09e12a..476bae2570e 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1782,6 +1782,9 @@ pysoma==0.0.12 # homeassistant.components.spc pyspcwebgw==0.7.0 +# homeassistant.components.assist_pipeline +pyspeex-noise==1.0.0 + # homeassistant.components.squeezebox pysqueezebox==0.7.1 From f4528b288fbd04302852be41c99f47e2941fb2f2 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 26 Aug 2024 15:28:55 +0200 Subject: [PATCH 1071/1635] Fix overriding name in MockTTSEntity (#124639) * Fix overriding name in MockTTSEntity * Fix --- tests/components/tts/common.py | 5 +---- tests/components/tts/test_init.py | 6 +++--- 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/tests/components/tts/common.py b/tests/components/tts/common.py index 1331f441940..71edf29721f 100644 --- a/tests/components/tts/common.py +++ b/tests/components/tts/common.py @@ -175,10 +175,7 @@ class MockProvider(BaseProvider, Provider): class MockTTSEntity(BaseProvider, TextToSpeechEntity): """Test speech API provider.""" - @property - def name(self) -> str: - """Return the name of the entity.""" - return "Test" + _attr_name = "Test" class MockTTS(MockPlatform): diff --git a/tests/components/tts/test_init.py b/tests/components/tts/test_init.py index dee8e569ce1..55ff4492e80 100644 --- a/tests/components/tts/test_init.py +++ b/tests/components/tts/test_init.py @@ -1852,8 +1852,8 @@ async def test_default_engine_prefer_entity( await mock_config_entry_setup(hass, mock_tts_entity) await hass.async_block_till_done() - entity_engine = tts.async_resolve_engine(hass, "tts.test") - assert entity_engine == "tts.test" + entity_engine = tts.async_resolve_engine(hass, "tts.new_test") + assert entity_engine == "tts.new_test" provider_engine = tts.async_resolve_engine(hass, "test") assert provider_engine == "test" - assert tts.async_default_engine(hass) == "tts.test" + assert tts.async_default_engine(hass) == "tts.new_test" From ff029e5efc2e56e085f2495de7309f6a77f9c45b Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Mon, 26 Aug 2024 17:33:47 +0200 Subject: [PATCH 1072/1635] Cleanup not used data field for mqtt hassio config flow confirm step (#124486) --- homeassistant/components/mqtt/strings.json | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/mqtt/strings.json b/homeassistant/components/mqtt/strings.json index 1550d6c8b62..75855f6d9f3 100644 --- a/homeassistant/components/mqtt/strings.json +++ b/homeassistant/components/mqtt/strings.json @@ -78,13 +78,7 @@ }, "hassio_confirm": { "title": "MQTT Broker via Home Assistant add-on", - "description": "Do you want to configure Home Assistant to connect to the MQTT broker provided by the add-on {addon}?", - "data": { - "discovery": "Enable discovery" - }, - "data_description": { - "discovery": "Option to enable MQTT automatic discovery." - } + "description": "Do you want to configure Home Assistant to connect to the MQTT broker provided by the add-on {addon}?" }, "reauth_confirm": { "title": "Re-authentication required with the MQTT broker", @@ -191,7 +185,7 @@ "title": "MQTT options", "description": "Discovery - If discovery is enabled (recommended), Home Assistant will automatically discover devices and entities which publish their configuration on the MQTT broker. If discovery is disabled, all configuration must be done manually.\nDiscovery prefix - The prefix a configuration topic for automatic discovery must start with.\nBirth message - The birth message will be sent each time Home Assistant (re)connects to the MQTT broker.\nWill message - The will message will be sent each time Home Assistant loses its connection to the broker, both in case of a clean (e.g. Home Assistant shutting down) and in case of an unclean (e.g. Home Assistant crashing or losing its network connection) disconnect.", "data": { - "discovery": "[%key:component::mqtt::config::step::hassio_confirm::data::discovery%]", + "discovery": "Enable discovery", "discovery_prefix": "Discovery prefix", "birth_enable": "Enable birth message", "birth_topic": "Birth message topic", From 657ff58500bf820f18282d4f1d4d0158b3f7719a Mon Sep 17 00:00:00 2001 From: WebSpider Date: Mon, 26 Aug 2024 17:39:27 +0200 Subject: [PATCH 1073/1635] Fix device class for motion_light blueprint (#124495) * Fix device class for motion_light blueprint. Fixes #124353 * Update to accept both motion and occupancy --- .../components/automation/blueprints/motion_light.yaml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/automation/blueprints/motion_light.yaml b/homeassistant/components/automation/blueprints/motion_light.yaml index 8f5d3f957f9..ad9c6f0286b 100644 --- a/homeassistant/components/automation/blueprints/motion_light.yaml +++ b/homeassistant/components/automation/blueprints/motion_light.yaml @@ -10,8 +10,10 @@ blueprint: selector: entity: filter: - device_class: motion - domain: binary_sensor + - device_class: occupancy + domain: binary_sensor + - device_class: motion + domain: binary_sensor light_target: name: Light selector: From 95fa123a0b519167edaba5323a45a936e2a3744e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Fern=C3=A1ndez=20Rojas?= Date: Mon, 26 Aug 2024 17:57:31 +0200 Subject: [PATCH 1074/1635] Add Airzone main zone mode select (#124566) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * airzone: select: add zone master mode Signed-off-by: Álvaro Fernández Rojas * airzone: select: use MAIN instead of MASTER Signed-off-by: Álvaro Fernández Rojas * airzone: select: call async_add_entities once Signed-off-by: Álvaro Fernández Rojas * airzone: select: add options lambda function Signed-off-by: Álvaro Fernández Rojas * airzone: select: implement requested changes Signed-off-by: Álvaro Fernández Rojas * airzone: select: options_fn: return list Signed-off-by: Álvaro Fernández Rojas --------- Signed-off-by: Álvaro Fernández Rojas --- homeassistant/components/airzone/select.py | 62 +++++++++++++++- homeassistant/components/airzone/strings.json | 11 +++ tests/components/airzone/test_select.py | 70 ++++++++++++++++++- 3 files changed, 139 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/airzone/select.py b/homeassistant/components/airzone/select.py index 493150e5c6a..2bc11bc4228 100644 --- a/homeassistant/components/airzone/select.py +++ b/homeassistant/components/airzone/select.py @@ -2,16 +2,21 @@ from __future__ import annotations +from collections.abc import Callable from dataclasses import dataclass from typing import Any, Final -from aioairzone.common import GrilleAngle, SleepTimeout +from aioairzone.common import GrilleAngle, OperationMode, SleepTimeout from aioairzone.const import ( API_COLD_ANGLE, API_HEAT_ANGLE, + API_MODE, API_SLEEP, AZD_COLD_ANGLE, AZD_HEAT_ANGLE, + AZD_MASTER, + AZD_MODE, + AZD_MODES, AZD_SLEEP, AZD_ZONES, ) @@ -33,6 +38,9 @@ class AirzoneSelectDescription(SelectEntityDescription): api_param: str options_dict: dict[str, int] + options_fn: Callable[[dict[str, Any], dict[str, int]], list[str]] = ( + lambda zone_data, value: list(value) + ) GRILLE_ANGLE_DICT: Final[dict[str, int]] = { @@ -42,6 +50,15 @@ GRILLE_ANGLE_DICT: Final[dict[str, int]] = { "40deg": GrilleAngle.DEG_40, } +MODE_DICT: Final[dict[str, int]] = { + "cool": OperationMode.COOLING, + "dry": OperationMode.DRY, + "fan": OperationMode.FAN, + "heat": OperationMode.HEATING, + "heat_cool": OperationMode.AUTO, + "stop": OperationMode.STOP, +} + SLEEP_DICT: Final[dict[str, int]] = { "off": SleepTimeout.SLEEP_OFF, "30m": SleepTimeout.SLEEP_30, @@ -50,6 +67,26 @@ SLEEP_DICT: Final[dict[str, int]] = { } +def main_zone_options( + zone_data: dict[str, Any], + options: dict[str, int], +) -> list[str]: + """Filter available modes.""" + modes = zone_data.get(AZD_MODES, []) + return [k for k, v in options.items() if v in modes] + + +MAIN_ZONE_SELECT_TYPES: Final[tuple[AirzoneSelectDescription, ...]] = ( + AirzoneSelectDescription( + api_param=API_MODE, + key=AZD_MODE, + options_dict=MODE_DICT, + options_fn=main_zone_options, + translation_key="modes", + ), +) + + ZONE_SELECT_TYPES: Final[tuple[AirzoneSelectDescription, ...]] = ( AirzoneSelectDescription( api_param=API_COLD_ANGLE, @@ -95,7 +132,20 @@ async def async_setup_entry( received_zones = set(zones_data) new_zones = received_zones - added_zones if new_zones: - async_add_entities( + entities: list[AirzoneZoneSelect] = [ + AirzoneZoneSelect( + coordinator, + description, + entry, + system_zone_id, + zones_data.get(system_zone_id), + ) + for system_zone_id in new_zones + for description in MAIN_ZONE_SELECT_TYPES + if description.key in zones_data.get(system_zone_id) + and zones_data.get(system_zone_id).get(AZD_MASTER) is True + ] + entities += [ AirzoneZoneSelect( coordinator, description, @@ -106,7 +156,8 @@ async def async_setup_entry( for system_zone_id in new_zones for description in ZONE_SELECT_TYPES if description.key in zones_data.get(system_zone_id) - ) + ] + async_add_entities(entities) added_zones.update(new_zones) entry.async_on_unload(coordinator.async_add_listener(_async_entity_listener)) @@ -153,6 +204,11 @@ class AirzoneZoneSelect(AirzoneZoneEntity, AirzoneBaseSelect): f"{self._attr_unique_id}_{system_zone_id}_{description.key}" ) self.entity_description = description + + self._attr_options = self.entity_description.options_fn( + zone_data, description.options_dict + ) + self.values_dict = {v: k for k, v in description.options_dict.items()} self._async_update_attrs() diff --git a/homeassistant/components/airzone/strings.json b/homeassistant/components/airzone/strings.json index 438304d7f41..cd313b821aa 100644 --- a/homeassistant/components/airzone/strings.json +++ b/homeassistant/components/airzone/strings.json @@ -52,6 +52,17 @@ "40deg": "[%key:component::airzone::entity::select::grille_angles::state::40deg%]" } }, + "modes": { + "name": "Mode", + "state": { + "cool": "[%key:component::climate::entity_component::_::state::cool%]", + "dry": "[%key:component::climate::entity_component::_::state::dry%]", + "fan": "[%key:component::climate::entity_component::_::state::fan_only%]", + "heat": "[%key:component::climate::entity_component::_::state::heat%]", + "heat_cool": "[%key:component::climate::entity_component::_::state::heat_cool%]", + "stop": "Stop" + } + }, "sleep_times": { "name": "Sleep", "state": { diff --git a/tests/components/airzone/test_select.py b/tests/components/airzone/test_select.py index 01617eab175..343c033728a 100644 --- a/tests/components/airzone/test_select.py +++ b/tests/components/airzone/test_select.py @@ -2,17 +2,19 @@ from unittest.mock import patch +from aioairzone.common import OperationMode from aioairzone.const import ( API_COLD_ANGLE, API_DATA, API_HEAT_ANGLE, + API_MODE, API_SLEEP, API_SYSTEM_ID, API_ZONE_ID, ) import pytest -from homeassistant.components.select import DOMAIN as SELECT_DOMAIN +from homeassistant.components.select import ATTR_OPTIONS, DOMAIN as SELECT_DOMAIN from homeassistant.const import ATTR_ENTITY_ID, ATTR_OPTION, SERVICE_SELECT_OPTION from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError @@ -31,6 +33,9 @@ async def test_airzone_create_selects(hass: HomeAssistant) -> None: state = hass.states.get("select.despacho_heat_angle") assert state.state == "90deg" + state = hass.states.get("select.despacho_mode") + assert state is None + state = hass.states.get("select.despacho_sleep") assert state.state == "off" @@ -40,6 +45,9 @@ async def test_airzone_create_selects(hass: HomeAssistant) -> None: state = hass.states.get("select.dorm_1_heat_angle") assert state.state == "90deg" + state = hass.states.get("select.dorm_1_mode") + assert state is None + state = hass.states.get("select.dorm_1_sleep") assert state.state == "off" @@ -49,6 +57,9 @@ async def test_airzone_create_selects(hass: HomeAssistant) -> None: state = hass.states.get("select.dorm_2_heat_angle") assert state.state == "90deg" + state = hass.states.get("select.dorm_2_mode") + assert state is None + state = hass.states.get("select.dorm_2_sleep") assert state.state == "off" @@ -58,6 +69,9 @@ async def test_airzone_create_selects(hass: HomeAssistant) -> None: state = hass.states.get("select.dorm_ppal_heat_angle") assert state.state == "50deg" + state = hass.states.get("select.dorm_ppal_mode") + assert state is None + state = hass.states.get("select.dorm_ppal_sleep") assert state.state == "30m" @@ -67,6 +81,16 @@ async def test_airzone_create_selects(hass: HomeAssistant) -> None: state = hass.states.get("select.salon_heat_angle") assert state.state == "90deg" + state = hass.states.get("select.salon_mode") + assert state.state == "heat" + assert state.attributes.get(ATTR_OPTIONS) == [ + "cool", + "dry", + "fan", + "heat", + "stop", + ] + state = hass.states.get("select.salon_sleep") assert state.state == "off" @@ -115,6 +139,50 @@ async def test_airzone_select_sleep(hass: HomeAssistant) -> None: assert state.state == "30m" +async def test_airzone_select_mode(hass: HomeAssistant) -> None: + """Test select HVAC mode.""" + + await async_init_integration(hass) + + put_hvac_mode = { + API_DATA: [ + { + API_SYSTEM_ID: 1, + API_ZONE_ID: 1, + API_MODE: OperationMode.COOLING, + } + ] + } + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: "select.salon_mode", + ATTR_OPTION: "Invalid", + }, + blocking=True, + ) + + with patch( + "homeassistant.components.airzone.AirzoneLocalApi.put_hvac", + return_value=put_hvac_mode, + ): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: "select.salon_mode", + ATTR_OPTION: "cool", + }, + blocking=True, + ) + + state = hass.states.get("select.salon_mode") + assert state.state == "cool" + + async def test_airzone_select_grille_angle(hass: HomeAssistant) -> None: """Test select sleep.""" From 9e7aeed848feb2e4ead73553bbe1c19e81f6b098 Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Mon, 26 Aug 2024 18:25:10 +0200 Subject: [PATCH 1075/1635] Bump nextdns to version 3.2.0 (#124646) --- homeassistant/components/nextdns/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/nextdns/manifest.json b/homeassistant/components/nextdns/manifest.json index b65706ef1ce..be9eee5049c 100644 --- a/homeassistant/components/nextdns/manifest.json +++ b/homeassistant/components/nextdns/manifest.json @@ -8,5 +8,5 @@ "iot_class": "cloud_polling", "loggers": ["nextdns"], "quality_scale": "platinum", - "requirements": ["nextdns==3.1.0"] + "requirements": ["nextdns==3.2.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 630e9180798..f1aa4e6990f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1423,7 +1423,7 @@ nextcloudmonitor==1.5.1 nextcord==2.6.0 # homeassistant.components.nextdns -nextdns==3.1.0 +nextdns==3.2.0 # homeassistant.components.nibe_heatpump nibe==2.11.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 476bae2570e..7154046e3a6 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1180,7 +1180,7 @@ nextcloudmonitor==1.5.1 nextcord==2.6.0 # homeassistant.components.nextdns -nextdns==3.1.0 +nextdns==3.2.0 # homeassistant.components.nibe_heatpump nibe==2.11.0 From a68cd712c65765102a80d57b556a586d15995246 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 26 Aug 2024 18:45:28 +0200 Subject: [PATCH 1076/1635] Move data entry section translations (#124648) --- homeassistant/components/kitchen_sink/icons.json | 2 +- homeassistant/components/kitchen_sink/strings.json | 2 +- script/hassfest/icons.py | 2 +- script/hassfest/translations.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/kitchen_sink/icons.json b/homeassistant/components/kitchen_sink/icons.json index 85472996819..2947cfa7ec5 100644 --- a/homeassistant/components/kitchen_sink/icons.json +++ b/homeassistant/components/kitchen_sink/icons.json @@ -2,7 +2,7 @@ "options": { "step": { "options_1": { - "section": { + "sections": { "section_1": "mdi:robot" } } diff --git a/homeassistant/components/kitchen_sink/strings.json b/homeassistant/components/kitchen_sink/strings.json index e67527d8468..c25964ab2ab 100644 --- a/homeassistant/components/kitchen_sink/strings.json +++ b/homeassistant/components/kitchen_sink/strings.json @@ -12,7 +12,7 @@ "data": {} }, "options_1": { - "section": { + "sections": { "section_1": { "data": { "bool": "Optional boolean", diff --git a/script/hassfest/icons.py b/script/hassfest/icons.py index 087d395afeb..10f666b9013 100644 --- a/script/hassfest/icons.py +++ b/script/hassfest/icons.py @@ -51,7 +51,7 @@ DATA_ENTRY_ICONS_SCHEMA = vol.Schema( { "step": { str: { - "section": { + "sections": { str: icon_value_validator, } } diff --git a/script/hassfest/translations.py b/script/hassfest/translations.py index c5efd05948f..fa12ce626ad 100644 --- a/script/hassfest/translations.py +++ b/script/hassfest/translations.py @@ -167,7 +167,7 @@ def gen_data_entry_schema( vol.Optional("data_description"): {str: translation_value_validator}, vol.Optional("menu_options"): {str: translation_value_validator}, vol.Optional("submit"): translation_value_validator, - vol.Optional("section"): { + vol.Optional("sections"): { str: { vol.Optional("data"): {str: translation_value_validator}, vol.Optional("description"): translation_value_validator, From 547dbf77aaf4026743dc7e4353578caa076a3a69 Mon Sep 17 00:00:00 2001 From: ilan <31193909+iloveicedgreentea@users.noreply.github.com> Date: Mon, 26 Aug 2024 13:26:53 -0400 Subject: [PATCH 1077/1635] Use runtime data instead of hass.data in jvc projector (#124608) * feat: use runtime data instead of hass.data * fix: extend ConfigEntry --- homeassistant/components/jvc_projector/__init__.py | 12 ++++++------ .../components/jvc_projector/binary_sensor.py | 8 +++----- homeassistant/components/jvc_projector/remote.py | 7 +++---- homeassistant/components/jvc_projector/select.py | 8 +++----- homeassistant/components/jvc_projector/sensor.py | 8 +++----- tests/components/jvc_projector/test_coordinator.py | 5 ++--- tests/components/jvc_projector/test_init.py | 2 -- 7 files changed, 20 insertions(+), 30 deletions(-) diff --git a/homeassistant/components/jvc_projector/__init__.py b/homeassistant/components/jvc_projector/__init__.py index 8ce1fb46e3d..09e93127e40 100644 --- a/homeassistant/components/jvc_projector/__init__.py +++ b/homeassistant/components/jvc_projector/__init__.py @@ -15,13 +15,14 @@ from homeassistant.const import ( from homeassistant.core import Event, HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from .const import DOMAIN from .coordinator import JvcProjectorDataUpdateCoordinator +type JVCConfigEntry = ConfigEntry[JvcProjectorDataUpdateCoordinator] + PLATFORMS = [Platform.BINARY_SENSOR, Platform.REMOTE, Platform.SELECT, Platform.SENSOR] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: JVCConfigEntry) -> bool: """Set up integration from a config entry.""" device = JvcProjector( host=entry.data[CONF_HOST], @@ -43,7 +44,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = JvcProjectorDataUpdateCoordinator(hass, device) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator async def disconnect(event: Event) -> None: await device.disconnect() @@ -57,9 +58,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: JVCConfigEntry) -> bool: """Unload config entry.""" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - await hass.data[DOMAIN][entry.entry_id].device.disconnect() - hass.data[DOMAIN].pop(entry.entry_id) + await entry.runtime_data.device.disconnect() return unload_ok diff --git a/homeassistant/components/jvc_projector/binary_sensor.py b/homeassistant/components/jvc_projector/binary_sensor.py index 7e8788aa0a6..6dfac63892b 100644 --- a/homeassistant/components/jvc_projector/binary_sensor.py +++ b/homeassistant/components/jvc_projector/binary_sensor.py @@ -5,22 +5,20 @@ from __future__ import annotations from jvcprojector import const from homeassistant.components.binary_sensor import BinarySensorEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import JvcProjectorDataUpdateCoordinator -from .const import DOMAIN +from . import JVCConfigEntry, JvcProjectorDataUpdateCoordinator from .entity import JvcProjectorEntity ON_STATUS = (const.ON, const.WARMING) async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, entry: JVCConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up the JVC Projector platform from a config entry.""" - coordinator: JvcProjectorDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data async_add_entities([JvcBinarySensor(coordinator)]) diff --git a/homeassistant/components/jvc_projector/remote.py b/homeassistant/components/jvc_projector/remote.py index b69d3b0118b..f90a2816363 100644 --- a/homeassistant/components/jvc_projector/remote.py +++ b/homeassistant/components/jvc_projector/remote.py @@ -10,12 +10,11 @@ from typing import Any from jvcprojector import const from homeassistant.components.remote import RemoteEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import JVCConfigEntry from .entity import JvcProjectorEntity COMMANDS = { @@ -55,10 +54,10 @@ _LOGGER = logging.getLogger(__name__) async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, entry: JVCConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up the JVC Projector platform from a config entry.""" - coordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data async_add_entities([JvcProjectorRemote(coordinator)], True) diff --git a/homeassistant/components/jvc_projector/select.py b/homeassistant/components/jvc_projector/select.py index 1395637fad1..60c80f98fc0 100644 --- a/homeassistant/components/jvc_projector/select.py +++ b/homeassistant/components/jvc_projector/select.py @@ -9,12 +9,10 @@ from typing import Final from jvcprojector import JvcProjector, const from homeassistant.components.select import SelectEntity, SelectEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import JvcProjectorDataUpdateCoordinator -from .const import DOMAIN +from . import JVCConfigEntry, JvcProjectorDataUpdateCoordinator from .entity import JvcProjectorEntity @@ -41,11 +39,11 @@ SELECTS: Final[list[JvcProjectorSelectDescription]] = [ async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: JVCConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the JVC Projector platform from a config entry.""" - coordinator: JvcProjectorDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data async_add_entities( JvcProjectorSelectEntity(coordinator, description) for description in SELECTS diff --git a/homeassistant/components/jvc_projector/sensor.py b/homeassistant/components/jvc_projector/sensor.py index 9be04b367e6..3edf51e4316 100644 --- a/homeassistant/components/jvc_projector/sensor.py +++ b/homeassistant/components/jvc_projector/sensor.py @@ -9,13 +9,11 @@ from homeassistant.components.sensor import ( SensorEntity, SensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import JvcProjectorDataUpdateCoordinator -from .const import DOMAIN +from . import JVCConfigEntry, JvcProjectorDataUpdateCoordinator from .entity import JvcProjectorEntity JVC_SENSORS = ( @@ -36,10 +34,10 @@ JVC_SENSORS = ( async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, entry: JVCConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up the JVC Projector platform from a config entry.""" - coordinator: JvcProjectorDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data async_add_entities( JvcSensor(coordinator, description) for description in JVC_SENSORS diff --git a/tests/components/jvc_projector/test_coordinator.py b/tests/components/jvc_projector/test_coordinator.py index 24297348653..b9211250aff 100644 --- a/tests/components/jvc_projector/test_coordinator.py +++ b/tests/components/jvc_projector/test_coordinator.py @@ -5,7 +5,6 @@ from unittest.mock import AsyncMock from jvcprojector import JvcProjectorAuthError, JvcProjectorConnectError -from homeassistant.components.jvc_projector import DOMAIN from homeassistant.components.jvc_projector.coordinator import ( INTERVAL_FAST, INTERVAL_SLOW, @@ -29,7 +28,7 @@ async def test_coordinator_update( ) await hass.async_block_till_done() assert mock_device.get_state.call_count == 3 - coordinator = hass.data[DOMAIN][mock_integration.entry_id] + coordinator = mock_integration.runtime_data assert coordinator.update_interval == INTERVAL_SLOW @@ -69,5 +68,5 @@ async def test_coordinator_device_on( mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - coordinator = hass.data[DOMAIN][mock_config_entry.entry_id] + coordinator = mock_config_entry.runtime_data assert coordinator.update_interval == INTERVAL_FAST diff --git a/tests/components/jvc_projector/test_init.py b/tests/components/jvc_projector/test_init.py index ef9de41ca32..baf088a5dba 100644 --- a/tests/components/jvc_projector/test_init.py +++ b/tests/components/jvc_projector/test_init.py @@ -38,8 +38,6 @@ async def test_unload_config_entry( await hass.config_entries.async_unload(mock_config_entry.entry_id) await hass.async_block_till_done() - assert mock_config_entry.entry_id not in hass.data[DOMAIN] - async def test_config_entry_connect_error( hass: HomeAssistant, From 156948c4960b34e579a2047eb7b570960767c2fa Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Mon, 26 Aug 2024 19:39:09 +0200 Subject: [PATCH 1078/1635] Fix defaults for cloud STT/TTS (#121229) * Fix defaults for cloud STT/TTS * Prefer entity over legacy provider * Remove unrealistic tests * Add tests which show cloud stt/tts entity is preferred --------- Co-authored-by: Erik --- homeassistant/components/stt/__init__.py | 15 ++++- homeassistant/components/tts/__init__.py | 13 +++-- tests/components/stt/test_init.py | 72 ++++++++++++++++++++---- tests/components/tts/common.py | 10 ++-- tests/components/tts/conftest.py | 24 ++++++-- tests/components/tts/test_init.py | 44 +++++++++++++-- 6 files changed, 144 insertions(+), 34 deletions(-) diff --git a/homeassistant/components/stt/__init__.py b/homeassistant/components/stt/__init__.py index 227c92f2b98..f6c38c1e0b7 100644 --- a/homeassistant/components/stt/__init__.py +++ b/homeassistant/components/stt/__init__.py @@ -72,9 +72,18 @@ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) @callback def async_default_engine(hass: HomeAssistant) -> str | None: """Return the domain or entity id of the default engine.""" - return next( - iter(hass.states.async_entity_ids(DOMAIN)), None - ) or async_default_provider(hass) + component: EntityComponent[SpeechToTextEntity] = hass.data[DOMAIN] + + default_entity_id: str | None = None + + for entity in component.entities: + if entity.platform and entity.platform.platform_name == "cloud": + return entity.entity_id + + if default_entity_id is None: + default_entity_id = entity.entity_id + + return default_entity_id or async_default_provider(hass) @callback diff --git a/homeassistant/components/tts/__init__.py b/homeassistant/components/tts/__init__.py index 5286b01f67f..583db4472d4 100644 --- a/homeassistant/components/tts/__init__.py +++ b/homeassistant/components/tts/__init__.py @@ -137,15 +137,16 @@ def async_default_engine(hass: HomeAssistant) -> str | None: component: EntityComponent[TextToSpeechEntity] = hass.data[DOMAIN] manager: SpeechManager = hass.data[DATA_TTS_MANAGER] - if "cloud" in manager.providers: - return "cloud" + default_entity_id: str | None = None - entity = next(iter(component.entities), None) + for entity in component.entities: + if entity.platform and entity.platform.platform_name == "cloud": + return entity.entity_id - if entity is not None: - return entity.entity_id + if default_entity_id is None: + default_entity_id = entity.entity_id - return next(iter(manager.providers), None) + return default_entity_id or next(iter(manager.providers), None) @callback diff --git a/tests/components/stt/test_init.py b/tests/components/stt/test_init.py index a42ac44112e..e5d75d3c4a5 100644 --- a/tests/components/stt/test_init.py +++ b/tests/components/stt/test_init.py @@ -1,6 +1,7 @@ """Test STT component setup.""" -from collections.abc import AsyncIterable, Generator +from collections.abc import AsyncIterable, Generator, Iterable +from contextlib import ExitStack from http import HTTPStatus from pathlib import Path from unittest.mock import AsyncMock @@ -122,20 +123,23 @@ class STTFlow(ConfigFlow): """Test flow.""" -@pytest.fixture(name="config_flow_test_domain") -def config_flow_test_domain_fixture() -> str: +@pytest.fixture(name="config_flow_test_domains") +def config_flow_test_domain_fixture() -> Iterable[str]: """Test domain fixture.""" - return TEST_DOMAIN + return (TEST_DOMAIN,) @pytest.fixture(autouse=True) def config_flow_fixture( - hass: HomeAssistant, config_flow_test_domain: str + hass: HomeAssistant, config_flow_test_domains: Iterable[str] ) -> Generator[None]: """Mock config flow.""" - mock_platform(hass, f"{config_flow_test_domain}.config_flow") + for domain in config_flow_test_domains: + mock_platform(hass, f"{domain}.config_flow") - with mock_config_flow(config_flow_test_domain, STTFlow): + with ExitStack() as stack: + for domain in config_flow_test_domains: + stack.enter_context(mock_config_flow(domain, STTFlow)) yield @@ -496,21 +500,25 @@ async def test_default_engine_entity( assert async_default_engine(hass) == f"{DOMAIN}.{TEST_DOMAIN}" -@pytest.mark.parametrize("config_flow_test_domain", ["new_test"]) +@pytest.mark.parametrize("config_flow_test_domains", [("new_test",)]) async def test_default_engine_prefer_entity( hass: HomeAssistant, tmp_path: Path, mock_provider_entity: MockProviderEntity, mock_provider: MockProvider, - config_flow_test_domain: str, + config_flow_test_domains: str, ) -> None: - """Test async_default_engine.""" + """Test async_default_engine. + + In this tests there's an entity and a legacy provider. + The test asserts async_default_engine returns the entity. + """ mock_provider_entity.url_path = "stt.new_test" mock_provider_entity._attr_name = "New test" await mock_setup(hass, tmp_path, mock_provider) await mock_config_entry_setup( - hass, tmp_path, mock_provider_entity, test_domain=config_flow_test_domain + hass, tmp_path, mock_provider_entity, test_domain=config_flow_test_domains[0] ) await hass.async_block_till_done() @@ -523,6 +531,48 @@ async def test_default_engine_prefer_entity( assert async_default_engine(hass) == "stt.new_test" +@pytest.mark.parametrize( + "config_flow_test_domains", + [ + # Test different setup order to ensure the default is not influenced + # by setup order. + ("cloud", "new_test"), + ("new_test", "cloud"), + ], +) +async def test_default_engine_prefer_cloud_entity( + hass: HomeAssistant, + tmp_path: Path, + mock_provider: MockProvider, + config_flow_test_domains: str, +) -> None: + """Test async_default_engine. + + In this tests there's an entity from domain cloud, an entity from domain new_test + and a legacy provider. + The test asserts async_default_engine returns the entity from domain cloud. + """ + await mock_setup(hass, tmp_path, mock_provider) + for domain in config_flow_test_domains: + entity = MockProviderEntity() + entity.url_path = f"stt.{domain}" + entity._attr_name = f"{domain} STT entity" + await mock_config_entry_setup(hass, tmp_path, entity, test_domain=domain) + await hass.async_block_till_done() + + for domain in config_flow_test_domains: + entity_engine = async_get_speech_to_text_engine( + hass, f"stt.{domain}_stt_entity" + ) + assert entity_engine is not None + assert entity_engine.name == f"{domain} STT entity" + + provider_engine = async_get_speech_to_text_engine(hass, "test") + assert provider_engine is not None + assert provider_engine.name == "test" + assert async_default_engine(hass) == "stt.cloud_stt_entity" + + async def test_get_engine_legacy( hass: HomeAssistant, tmp_path: Path, mock_provider: MockProvider ) -> None: diff --git a/tests/components/tts/common.py b/tests/components/tts/common.py index 71edf29721f..4acba401fad 100644 --- a/tests/components/tts/common.py +++ b/tests/components/tts/common.py @@ -215,7 +215,9 @@ async def mock_setup( async def mock_config_entry_setup( - hass: HomeAssistant, tts_entity: MockTTSEntity + hass: HomeAssistant, + tts_entity: MockTTSEntity, + test_domain: str = TEST_DOMAIN, ) -> MockConfigEntry: """Set up a test tts platform via config entry.""" @@ -236,7 +238,7 @@ async def mock_config_entry_setup( mock_integration( hass, MockModule( - TEST_DOMAIN, + test_domain, async_setup_entry=async_setup_entry_init, async_unload_entry=async_unload_entry_init, ), @@ -251,9 +253,9 @@ async def mock_config_entry_setup( async_add_entities([tts_entity]) loaded_platform = MockPlatform(async_setup_entry=async_setup_entry_platform) - mock_platform(hass, f"{TEST_DOMAIN}.{TTS_DOMAIN}", loaded_platform) + mock_platform(hass, f"{test_domain}.{TTS_DOMAIN}", loaded_platform) - config_entry = MockConfigEntry(domain=TEST_DOMAIN) + config_entry = MockConfigEntry(domain=test_domain) config_entry.add_to_hass(hass) assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/tts/conftest.py b/tests/components/tts/conftest.py index d9a4499f544..91ddd7742af 100644 --- a/tests/components/tts/conftest.py +++ b/tests/components/tts/conftest.py @@ -3,7 +3,8 @@ From http://doc.pytest.org/en/latest/example/simple.html#making-test-result-information-available-in-fixtures """ -from collections.abc import Generator +from collections.abc import Generator, Iterable +from contextlib import ExitStack from pathlib import Path from unittest.mock import MagicMock @@ -81,12 +82,23 @@ class TTSFlow(ConfigFlow): """Test flow.""" -@pytest.fixture(autouse=True) -def config_flow_fixture(hass: HomeAssistant) -> Generator[None]: - """Mock config flow.""" - mock_platform(hass, f"{TEST_DOMAIN}.config_flow") +@pytest.fixture(name="config_flow_test_domains") +def config_flow_test_domain_fixture() -> Iterable[str]: + """Test domain fixture.""" + return (TEST_DOMAIN,) - with mock_config_flow(TEST_DOMAIN, TTSFlow): + +@pytest.fixture(autouse=True) +def config_flow_fixture( + hass: HomeAssistant, config_flow_test_domains: Iterable[str] +) -> Generator[None]: + """Mock config flow.""" + for domain in config_flow_test_domains: + mock_platform(hass, f"{domain}.config_flow") + + with ExitStack() as stack: + for domain in config_flow_test_domains: + stack.enter_context(mock_config_flow(domain, TTSFlow)) yield diff --git a/tests/components/tts/test_init.py b/tests/components/tts/test_init.py index 55ff4492e80..05c19622e84 100644 --- a/tests/components/tts/test_init.py +++ b/tests/components/tts/test_init.py @@ -1389,9 +1389,6 @@ def test_resolve_engine(hass: HomeAssistant, setup: str, engine_id: str) -> None ): assert tts.async_resolve_engine(hass, None) is None - with patch.dict(hass.data[tts.DATA_TTS_MANAGER].providers, {"cloud": object()}): - assert tts.async_resolve_engine(hass, None) == "cloud" - @pytest.mark.parametrize( ("setup", "engine_id"), @@ -1845,7 +1842,11 @@ async def test_default_engine_prefer_entity( mock_tts_entity: MockTTSEntity, mock_provider: MockProvider, ) -> None: - """Test async_default_engine.""" + """Test async_default_engine. + + In this tests there's an entity and a legacy provider. + The test asserts async_default_engine returns the entity. + """ mock_tts_entity._attr_name = "New test" await mock_setup(hass, mock_provider) @@ -1857,3 +1858,38 @@ async def test_default_engine_prefer_entity( provider_engine = tts.async_resolve_engine(hass, "test") assert provider_engine == "test" assert tts.async_default_engine(hass) == "tts.new_test" + + +@pytest.mark.parametrize( + "config_flow_test_domains", + [ + # Test different setup order to ensure the default is not influenced + # by setup order. + ("cloud", "new_test"), + ("new_test", "cloud"), + ], +) +async def test_default_engine_prefer_cloud_entity( + hass: HomeAssistant, + mock_provider: MockProvider, + config_flow_test_domains: str, +) -> None: + """Test async_default_engine. + + In this tests there's an entity from domain cloud, an entity from domain new_test + and a legacy provider. + The test asserts async_default_engine returns the entity from domain cloud. + """ + await mock_setup(hass, mock_provider) + for domain in config_flow_test_domains: + entity = MockTTSEntity(DEFAULT_LANG) + entity._attr_name = f"{domain} TTS entity" + await mock_config_entry_setup(hass, entity, test_domain=domain) + await hass.async_block_till_done() + + for domain in config_flow_test_domains: + entity_engine = tts.async_resolve_engine(hass, f"tts.{domain}_tts_entity") + assert entity_engine == f"tts.{domain}_tts_entity" + provider_engine = tts.async_resolve_engine(hass, "test") + assert provider_engine == "test" + assert tts.async_default_engine(hass) == "tts.cloud_tts_entity" From b9aaba0432b797222b533cd598870e97464ade46 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 26 Aug 2024 20:35:54 +0200 Subject: [PATCH 1079/1635] Prevent duplicating constraints during schema migration (#124616) --- .../components/recorder/migration.py | 12 + tests/components/recorder/test_migrate.py | 284 ++++++++++++++++++ 2 files changed, 296 insertions(+) diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index a4a5fa87466..7127a576580 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -695,6 +695,18 @@ def _restore_foreign_key_constraints( _LOGGER.info("Did not find a matching constraint for %s.%s", table, column) continue + inspector = sqlalchemy.inspect(engine) + if any( + foreign_key["name"] and foreign_key["constrained_columns"] == [column] + for foreign_key in inspector.get_foreign_keys(table) + ): + _LOGGER.info( + "The database already has a matching constraint for %s.%s", + table, + column, + ) + continue + if TYPE_CHECKING: assert foreign_table is not None assert foreign_column is not None diff --git a/tests/components/recorder/test_migrate.py b/tests/components/recorder/test_migrate.py index 05d59facf09..0e473b702ef 100644 --- a/tests/components/recorder/test_migrate.py +++ b/tests/components/recorder/test_migrate.py @@ -1028,6 +1028,287 @@ def test_drop_restore_foreign_key_constraints(recorder_db_url: str) -> None: engine.dispose() +@pytest.mark.skip_on_db_engine(["sqlite"]) +@pytest.mark.usefixtures("skip_by_db_engine") +def test_restore_foreign_key_constraints_twice(recorder_db_url: str) -> None: + """Test we can drop and then restore foreign keys. + + This is not supported on SQLite + """ + + constraints_to_recreate = ( + ("events", "data_id", "event_data", "data_id"), + ("states", "event_id", None, None), # This won't be found + ("states", "old_state_id", "states", "state_id"), + ) + + db_engine = recorder_db_url.partition("://")[0] + + expected_dropped_constraints = { + "mysql": [ + ( + "events", + "data_id", + { + "constrained_columns": ["data_id"], + "name": ANY, + "options": {}, + "referred_columns": ["data_id"], + "referred_schema": None, + "referred_table": "event_data", + }, + ), + ( + "states", + "old_state_id", + { + "constrained_columns": ["old_state_id"], + "name": ANY, + "options": {}, + "referred_columns": ["state_id"], + "referred_schema": None, + "referred_table": "states", + }, + ), + ], + "postgresql": [ + ( + "events", + "data_id", + { + "comment": None, + "constrained_columns": ["data_id"], + "name": "events_data_id_fkey", + "options": {}, + "referred_columns": ["data_id"], + "referred_schema": None, + "referred_table": "event_data", + }, + ), + ( + "states", + "old_state_id", + { + "comment": None, + "constrained_columns": ["old_state_id"], + "name": "states_old_state_id_fkey", + "options": {}, + "referred_columns": ["state_id"], + "referred_schema": None, + "referred_table": "states", + }, + ), + ], + } + + def find_constraints( + engine: Engine, table: str, column: str + ) -> list[tuple[str, str, ReflectedForeignKeyConstraint]]: + inspector = inspect(engine) + return [ + (table, column, foreign_key) + for foreign_key in inspector.get_foreign_keys(table) + if foreign_key["name"] and foreign_key["constrained_columns"] == [column] + ] + + engine = create_engine(recorder_db_url) + db_schema.Base.metadata.create_all(engine) + + matching_constraints_1 = [ + dropped_constraint + for table, column, _, _ in constraints_to_recreate + for dropped_constraint in find_constraints(engine, table, column) + ] + assert matching_constraints_1 == expected_dropped_constraints[db_engine] + + with Session(engine) as session: + session_maker = Mock(return_value=session) + for table, column, _, _ in constraints_to_recreate: + migration._drop_foreign_key_constraints( + session_maker, engine, table, column + ) + + # Check we don't find the constrained columns again (they are removed) + matching_constraints_2 = [ + dropped_constraint + for table, column, _, _ in constraints_to_recreate + for dropped_constraint in find_constraints(engine, table, column) + ] + assert matching_constraints_2 == [] + + # Restore the constraints + with Session(engine) as session: + session_maker = Mock(return_value=session) + migration._restore_foreign_key_constraints( + session_maker, engine, constraints_to_recreate + ) + + # Restore the constraints again + with Session(engine) as session: + session_maker = Mock(return_value=session) + migration._restore_foreign_key_constraints( + session_maker, engine, constraints_to_recreate + ) + + # Check we do find a single the constrained columns again (they are restored + # only once, even though we called _restore_foreign_key_constraints twice) + matching_constraints_3 = [ + dropped_constraint + for table, column, _, _ in constraints_to_recreate + for dropped_constraint in find_constraints(engine, table, column) + ] + assert matching_constraints_3 == expected_dropped_constraints[db_engine] + + engine.dispose() + + +@pytest.mark.skip_on_db_engine(["sqlite"]) +@pytest.mark.usefixtures("skip_by_db_engine") +def test_drop_duplicated_foreign_key_constraints(recorder_db_url: str) -> None: + """Test we can drop and then restore foreign keys. + + This is not supported on SQLite + """ + + constraints_to_recreate = ( + ("events", "data_id", "event_data", "data_id"), + ("states", "event_id", None, None), # This won't be found + ("states", "old_state_id", "states", "state_id"), + ) + + db_engine = recorder_db_url.partition("://")[0] + + expected_dropped_constraints = { + "mysql": [ + ( + "events", + "data_id", + { + "constrained_columns": ["data_id"], + "name": ANY, + "options": {}, + "referred_columns": ["data_id"], + "referred_schema": None, + "referred_table": "event_data", + }, + ), + ( + "states", + "old_state_id", + { + "constrained_columns": ["old_state_id"], + "name": ANY, + "options": {}, + "referred_columns": ["state_id"], + "referred_schema": None, + "referred_table": "states", + }, + ), + ], + "postgresql": [ + ( + "events", + "data_id", + { + "comment": None, + "constrained_columns": ["data_id"], + "name": ANY, + "options": {}, + "referred_columns": ["data_id"], + "referred_schema": None, + "referred_table": "event_data", + }, + ), + ( + "states", + "old_state_id", + { + "comment": None, + "constrained_columns": ["old_state_id"], + "name": ANY, + "options": {}, + "referred_columns": ["state_id"], + "referred_schema": None, + "referred_table": "states", + }, + ), + ], + } + + def find_constraints( + engine: Engine, table: str, column: str + ) -> list[tuple[str, str, ReflectedForeignKeyConstraint]]: + inspector = inspect(engine) + return [ + (table, column, foreign_key) + for foreign_key in inspector.get_foreign_keys(table) + if foreign_key["name"] and foreign_key["constrained_columns"] == [column] + ] + + engine = create_engine(recorder_db_url) + db_schema.Base.metadata.create_all(engine) + + # Create a duplicate of the constraints + inspector = Mock(name="inspector") + inspector.get_foreign_keys = Mock(name="get_foreign_keys", return_value=[]) + with ( + patch( + "homeassistant.components.recorder.migration.sqlalchemy.inspect", + return_value=inspector, + ), + Session(engine) as session, + ): + session_maker = Mock(return_value=session) + migration._restore_foreign_key_constraints( + session_maker, engine, constraints_to_recreate + ) + + matching_constraints_1 = [ + dropped_constraint + for table, column, _, _ in constraints_to_recreate + for dropped_constraint in find_constraints(engine, table, column) + ] + _expected_dropped_constraints = [ + _dropped_constraint + for dropped_constraint in expected_dropped_constraints[db_engine] + for _dropped_constraint in (dropped_constraint, dropped_constraint) + ] + assert matching_constraints_1 == _expected_dropped_constraints + + with Session(engine) as session: + session_maker = Mock(return_value=session) + for table, column, _, _ in constraints_to_recreate: + migration._drop_foreign_key_constraints( + session_maker, engine, table, column + ) + + # Check we don't find the constrained columns again (they are removed) + matching_constraints_2 = [ + dropped_constraint + for table, column, _, _ in constraints_to_recreate + for dropped_constraint in find_constraints(engine, table, column) + ] + assert matching_constraints_2 == [] + + # Restore the constraints + with Session(engine) as session: + session_maker = Mock(return_value=session) + migration._restore_foreign_key_constraints( + session_maker, engine, constraints_to_recreate + ) + + # Check we do find a single the constrained columns again (they are restored + # only once, even though we called _restore_foreign_key_constraints twice) + matching_constraints_3 = [ + dropped_constraint + for table, column, _, _ in constraints_to_recreate + for dropped_constraint in find_constraints(engine, table, column) + ] + assert matching_constraints_3 == expected_dropped_constraints[db_engine] + + engine.dispose() + + def test_restore_foreign_key_constraints_with_error( caplog: pytest.LogCaptureFixture, ) -> None: @@ -1045,6 +1326,9 @@ def test_restore_foreign_key_constraints_with_error( instance = Mock() instance.get_session = Mock(return_value=session) engine = Mock() + inspector = Mock(name="inspector") + inspector.get_foreign_keys = Mock(name="get_foreign_keys", return_value=[]) + engine._sa_instance_state = inspector session_maker = Mock(return_value=session) with pytest.raises(InternalError): From 76182c246d8f2c302d1b85f4443aa3d00b9bb9cc Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 26 Aug 2024 08:37:36 -1000 Subject: [PATCH 1080/1635] Auto configure outbound websocket for sleepy shelly RPC devices (#124545) --- homeassistant/components/shelly/const.py | 3 + .../components/shelly/coordinator.py | 82 ++++++-- homeassistant/components/shelly/utils.py | 17 +- tests/components/shelly/conftest.py | 1 + tests/components/shelly/test_config_flow.py | 178 +++++++++++++++++- 5 files changed, 259 insertions(+), 22 deletions(-) diff --git a/homeassistant/components/shelly/const.py b/homeassistant/components/shelly/const.py index 1759f4bdd18..fe4108a1f52 100644 --- a/homeassistant/components/shelly/const.py +++ b/homeassistant/components/shelly/const.py @@ -254,3 +254,6 @@ VIRTUAL_NUMBER_MODE_MAP = { "field": NumberMode.BOX, "slider": NumberMode.SLIDER, } + + +API_WS_URL = "/api/shelly/ws" diff --git a/homeassistant/components/shelly/coordinator.py b/homeassistant/components/shelly/coordinator.py index 6286e515727..03dcdedbb6f 100644 --- a/homeassistant/components/shelly/coordinator.py +++ b/homeassistant/components/shelly/coordinator.py @@ -64,6 +64,7 @@ from .utils import ( get_host, get_http_port, get_rpc_device_wakeup_period, + get_rpc_ws_url, update_device_fw_info, ) @@ -101,6 +102,9 @@ class ShellyCoordinatorBase[_DeviceT: BlockDevice | RpcDevice]( self._pending_platforms: list[Platform] | None = None device_name = device.name if device.initialized else entry.title interval_td = timedelta(seconds=update_interval) + # The device has come online at least once. In the case of a sleeping RPC + # device, this means that the device has connected to the WS server at least once. + self._came_online_once = False super().__init__(hass, LOGGER, name=device_name, update_interval=interval_td) self._debounced_reload: Debouncer[Coroutine[Any, Any, None]] = Debouncer( @@ -184,7 +188,7 @@ class ShellyCoordinatorBase[_DeviceT: BlockDevice | RpcDevice]( if not self._pending_platforms: return True - LOGGER.debug("Device %s is online, resuming setup", self.entry.title) + LOGGER.debug("Device %s is online, resuming setup", self.name) platforms = self._pending_platforms self._pending_platforms = None @@ -372,6 +376,7 @@ class ShellyBlockCoordinator(ShellyCoordinatorBase[BlockDevice]): """Handle device update.""" LOGGER.debug("Shelly %s handle update, type: %s", self.name, update_type) if update_type is BlockUpdateType.ONLINE: + self._came_online_once = True self.entry.async_create_background_task( self.hass, self._async_device_connect_task(), @@ -472,9 +477,24 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]): self._event_listeners: list[Callable[[dict[str, Any]], None]] = [] self._ota_event_listeners: list[Callable[[dict[str, Any]], None]] = [] self._input_event_listeners: list[Callable[[dict[str, Any]], None]] = [] - + self._connect_task: asyncio.Task | None = None entry.async_on_unload(entry.add_update_listener(self._async_update_listener)) + async def async_device_online(self) -> None: + """Handle device going online.""" + if not self.sleep_period: + await self.async_request_refresh() + elif not self._came_online_once or not self.device.initialized: + LOGGER.debug( + "Sleepy device %s is online, trying to poll and configure", self.name + ) + # Zeroconf told us the device is online, try to poll + # the device and if possible, set up the outbound + # websocket so the device will send us updates + # instead of relying on polling it fast enough before + # it goes to sleep again + self._async_handle_rpc_device_online() + def update_sleep_period(self) -> bool: """Check device sleep period & update if changed.""" if ( @@ -598,15 +618,15 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]): async def _async_disconnected(self, reconnect: bool) -> None: """Handle device disconnected.""" - # Sleeping devices send data and disconnect - # There are no disconnect events for sleeping devices - if self.sleep_period: - return - async with self._connection_lock: if not self.connected: # Already disconnected return self.connected = False + # Sleeping devices send data and disconnect + # There are no disconnect events for sleeping devices + # but we do need to make sure self.connected is False + if self.sleep_period: + return self._async_run_disconnected_events() # Try to reconnect right away if triggered by disconnect event if reconnect: @@ -645,6 +665,21 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]): """ if not self.sleep_period: await self._async_connect_ble_scanner() + else: + await self._async_setup_outbound_websocket() + + async def _async_setup_outbound_websocket(self) -> None: + """Set up outbound websocket if it is not enabled.""" + config = self.device.config + if ( + (ws_config := config.get("ws")) + and (not ws_config["server"] or not ws_config["enable"]) + and (ws_url := get_rpc_ws_url(self.hass)) + ): + LOGGER.debug( + "Setting up outbound websocket for device %s - %s", self.name, ws_url + ) + await self.device.update_outbound_websocket(ws_url) async def _async_connect_ble_scanner(self) -> None: """Connect BLE scanner.""" @@ -662,6 +697,21 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]): await async_connect_scanner(self.hass, self, ble_scanner_mode) ) + @callback + def _async_handle_rpc_device_online(self) -> None: + """Handle device going online.""" + if self.device.connected or ( + self._connect_task and not self._connect_task.done() + ): + LOGGER.debug("Device %s already connected/connecting", self.name) + return + self._connect_task = self.entry.async_create_background_task( + self.hass, + self._async_device_connect_task(), + "rpc device online", + eager_start=True, + ) + @callback def _async_handle_update( self, device_: RpcDevice, update_type: RpcUpdateType @@ -669,15 +719,8 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]): """Handle device update.""" LOGGER.debug("Shelly %s handle update, type: %s", self.name, update_type) if update_type is RpcUpdateType.ONLINE: - if self.device.connected: - LOGGER.debug("Device %s already connected", self.name) - return - self.entry.async_create_background_task( - self.hass, - self._async_device_connect_task(), - "rpc device online", - eager_start=True, - ) + self._came_online_once = True + self._async_handle_rpc_device_online() elif update_type is RpcUpdateType.INITIALIZED: self.entry.async_create_background_task( self.hass, self._async_connected(), "rpc device init", eager_start=True @@ -798,14 +841,13 @@ def get_rpc_coordinator_by_device_id( async def async_reconnect_soon(hass: HomeAssistant, entry: ShellyConfigEntry) -> None: """Try to reconnect soon.""" if ( - not entry.data.get(CONF_SLEEP_PERIOD) - and not hass.is_stopping - and entry.state == ConfigEntryState.LOADED + not hass.is_stopping + and entry.state is ConfigEntryState.LOADED and (coordinator := entry.runtime_data.rpc) ): entry.async_create_background_task( hass, - coordinator.async_request_refresh(), + coordinator.async_device_online(), "reconnect soon", eager_start=True, ) diff --git a/homeassistant/components/shelly/utils.py b/homeassistant/components/shelly/utils.py index 339f6781171..d0a8a1230c5 100644 --- a/homeassistant/components/shelly/utils.py +++ b/homeassistant/components/shelly/utils.py @@ -23,6 +23,7 @@ from aioshelly.const import ( RPC_GENERATIONS, ) from aioshelly.rpc_device import RpcDevice, WsServer +from yarl import URL from homeassistant.components import network from homeassistant.components.http import HomeAssistantView @@ -36,9 +37,11 @@ from homeassistant.helpers import ( singleton, ) from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC +from homeassistant.helpers.network import NoURLAvailableError, get_url from homeassistant.util.dt import utcnow from .const import ( + API_WS_URL, BASIC_INPUTS_EVENTS_TYPES, CONF_COAP_PORT, CONF_GEN, @@ -254,7 +257,7 @@ class ShellyReceiver(HomeAssistantView): """Handle pushes from Shelly Gen2 devices.""" requires_auth = False - url = "/api/shelly/ws" + url = API_WS_URL name = "api:shelly:ws" def __init__(self, ws_server: WsServer) -> None: @@ -571,3 +574,15 @@ def async_remove_orphaned_virtual_entities( if orphaned_entities: async_remove_shelly_rpc_entities(hass, platform, mac, orphaned_entities) + + +def get_rpc_ws_url(hass: HomeAssistant) -> str | None: + """Return the RPC websocket URL.""" + try: + raw_url = get_url(hass, prefer_external=False, allow_cloud=False) + except NoURLAvailableError: + LOGGER.debug("URL not available, skipping outbound websocket setup") + return None + url = URL(raw_url) + ws_url = url.with_scheme("wss" if url.scheme == "https" else "ws") + return str(ws_url.joinpath(API_WS_URL.removeprefix("/"))) diff --git a/tests/components/shelly/conftest.py b/tests/components/shelly/conftest.py index a2629d21362..34e4ce1379e 100644 --- a/tests/components/shelly/conftest.py +++ b/tests/components/shelly/conftest.py @@ -186,6 +186,7 @@ MOCK_CONFIG = { "device": {"name": "Test name"}, }, "wifi": {"sta": {"enable": True}, "sta1": {"enable": False}}, + "ws": {"enable": False, "server": None}, } MOCK_SHELLY_COAP = { diff --git a/tests/components/shelly/test_config_flow.py b/tests/components/shelly/test_config_flow.py index 0c574a33e0c..c0c089f469a 100644 --- a/tests/components/shelly/test_config_flow.py +++ b/tests/components/shelly/test_config_flow.py @@ -4,7 +4,7 @@ from dataclasses import replace from datetime import timedelta from ipaddress import ip_address from typing import Any -from unittest.mock import AsyncMock, Mock, patch +from unittest.mock import AsyncMock, Mock, call, patch from aioshelly.const import DEFAULT_HTTP_PORT, MODEL_1, MODEL_PLUS_2PM from aioshelly.exceptions import ( @@ -1153,6 +1153,182 @@ async def test_zeroconf_sleeping_device_not_triggers_refresh( assert "device did not update" not in caplog.text +async def test_zeroconf_sleeping_device_attempts_configure( + hass: HomeAssistant, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test zeroconf discovery configures a sleeping device outbound websocket.""" + monkeypatch.setattr(mock_rpc_device, "connected", False) + monkeypatch.setattr(mock_rpc_device, "initialized", False) + monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) + entry = MockConfigEntry( + domain="shelly", + unique_id="AABBCCDDEEFF", + data={"host": "1.1.1.1", "gen": 2, "sleep_period": 1000, "model": MODEL_1}, + ) + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + mock_rpc_device.mock_disconnected() + await hass.async_block_till_done() + + mock_rpc_device.mock_online() + await hass.async_block_till_done(wait_background_tasks=True) + + assert "online, resuming setup" in caplog.text + assert len(mock_rpc_device.initialize.mock_calls) == 1 + + with patch( + "homeassistant.components.shelly.config_flow.get_info", + return_value={"mac": "AABBCCDDEEFF", "type": MODEL_1, "auth": False}, + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + data=DISCOVERY_INFO, + context={"source": config_entries.SOURCE_ZEROCONF}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + assert mock_rpc_device.update_outbound_websocket.mock_calls == [] + + monkeypatch.setattr(mock_rpc_device, "connected", True) + monkeypatch.setattr(mock_rpc_device, "initialized", True) + mock_rpc_device.mock_initialized() + async_fire_time_changed( + hass, dt_util.utcnow() + timedelta(seconds=ENTRY_RELOAD_COOLDOWN) + ) + await hass.async_block_till_done() + assert "device did not update" not in caplog.text + + monkeypatch.setattr(mock_rpc_device, "connected", False) + mock_rpc_device.mock_disconnected() + assert mock_rpc_device.update_outbound_websocket.mock_calls == [ + call("ws://10.10.10.10:8123/api/shelly/ws") + ] + + +async def test_zeroconf_sleeping_device_attempts_configure_ws_disabled( + hass: HomeAssistant, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test zeroconf discovery configures a sleeping device outbound websocket when its disabled.""" + monkeypatch.setattr(mock_rpc_device, "connected", False) + monkeypatch.setattr(mock_rpc_device, "initialized", False) + monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) + monkeypatch.setitem( + mock_rpc_device.config, "ws", {"enable": False, "server": "ws://oldha"} + ) + entry = MockConfigEntry( + domain="shelly", + unique_id="AABBCCDDEEFF", + data={"host": "1.1.1.1", "gen": 2, "sleep_period": 1000, "model": MODEL_1}, + ) + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + mock_rpc_device.mock_disconnected() + await hass.async_block_till_done() + + mock_rpc_device.mock_online() + await hass.async_block_till_done(wait_background_tasks=True) + + assert "online, resuming setup" in caplog.text + assert len(mock_rpc_device.initialize.mock_calls) == 1 + + with patch( + "homeassistant.components.shelly.config_flow.get_info", + return_value={"mac": "AABBCCDDEEFF", "type": MODEL_1, "auth": False}, + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + data=DISCOVERY_INFO, + context={"source": config_entries.SOURCE_ZEROCONF}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + assert mock_rpc_device.update_outbound_websocket.mock_calls == [] + + monkeypatch.setattr(mock_rpc_device, "connected", True) + monkeypatch.setattr(mock_rpc_device, "initialized", True) + mock_rpc_device.mock_initialized() + async_fire_time_changed( + hass, dt_util.utcnow() + timedelta(seconds=ENTRY_RELOAD_COOLDOWN) + ) + await hass.async_block_till_done() + assert "device did not update" not in caplog.text + + monkeypatch.setattr(mock_rpc_device, "connected", False) + mock_rpc_device.mock_disconnected() + assert mock_rpc_device.update_outbound_websocket.mock_calls == [ + call("ws://10.10.10.10:8123/api/shelly/ws") + ] + + +async def test_zeroconf_sleeping_device_attempts_configure_no_url_available( + hass: HomeAssistant, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test zeroconf discovery for sleeping device with no hass url.""" + hass.config.internal_url = None + hass.config.external_url = None + hass.config.api = None + monkeypatch.setattr(mock_rpc_device, "connected", False) + monkeypatch.setattr(mock_rpc_device, "initialized", False) + monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) + entry = MockConfigEntry( + domain="shelly", + unique_id="AABBCCDDEEFF", + data={"host": "1.1.1.1", "gen": 2, "sleep_period": 1000, "model": MODEL_1}, + ) + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + mock_rpc_device.mock_disconnected() + await hass.async_block_till_done() + + mock_rpc_device.mock_online() + await hass.async_block_till_done(wait_background_tasks=True) + + assert "online, resuming setup" in caplog.text + assert len(mock_rpc_device.initialize.mock_calls) == 1 + + with patch( + "homeassistant.components.shelly.config_flow.get_info", + return_value={"mac": "AABBCCDDEEFF", "type": MODEL_1, "auth": False}, + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + data=DISCOVERY_INFO, + context={"source": config_entries.SOURCE_ZEROCONF}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + assert mock_rpc_device.update_outbound_websocket.mock_calls == [] + + monkeypatch.setattr(mock_rpc_device, "connected", True) + monkeypatch.setattr(mock_rpc_device, "initialized", True) + mock_rpc_device.mock_initialized() + async_fire_time_changed( + hass, dt_util.utcnow() + timedelta(seconds=ENTRY_RELOAD_COOLDOWN) + ) + await hass.async_block_till_done() + assert "device did not update" not in caplog.text + + monkeypatch.setattr(mock_rpc_device, "connected", False) + mock_rpc_device.mock_disconnected() + # No url available so no attempt to configure the device + assert mock_rpc_device.update_outbound_websocket.mock_calls == [] + + async def test_sleeping_device_gen2_with_new_firmware( hass: HomeAssistant, mock_rpc_device: Mock, monkeypatch: pytest.MonkeyPatch ) -> None: From d8fe3c53774ad7a1a81f611fbf6240ed507a39a2 Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Mon, 26 Aug 2024 20:59:18 +0200 Subject: [PATCH 1081/1635] Add missing units for `xpercent`, `xfreq` and `xcounts` Shelly sensors (#124288) --- homeassistant/components/shelly/sensor.py | 3 ++ tests/components/shelly/conftest.py | 16 +++++- tests/components/shelly/test_sensor.py | 63 +++++++++++++++++++++-- 3 files changed, 76 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/shelly/sensor.py b/homeassistant/components/shelly/sensor.py index 6cabee6ccba..0d782f46c24 100644 --- a/homeassistant/components/shelly/sensor.py +++ b/homeassistant/components/shelly/sensor.py @@ -1082,6 +1082,7 @@ RPC_SENSORS: Final = { or config[key]["enable"] is False or status[key].get("xpercent") is None ), + unit=lambda config: config["xpercent"]["unit"] or None, ), "pulse_counter": RpcSensorDescription( key="input", @@ -1104,6 +1105,7 @@ RPC_SENSORS: Final = { or config[key]["enable"] is False or status[key]["counts"].get("xtotal") is None ), + unit=lambda config: config["xcounts"]["unit"] or None, ), "counter_frequency": RpcSensorDescription( key="input", @@ -1124,6 +1126,7 @@ RPC_SENSORS: Final = { or config[key]["enable"] is False or status[key].get("xfreq") is None ), + unit=lambda config: config["xfreq"]["unit"] or None, ), "text": RpcSensorDescription( key="text", diff --git a/tests/components/shelly/conftest.py b/tests/components/shelly/conftest.py index 34e4ce1379e..a983cbbcda9 100644 --- a/tests/components/shelly/conftest.py +++ b/tests/components/shelly/conftest.py @@ -166,8 +166,20 @@ MOCK_BLOCKS = [ MOCK_CONFIG = { "input:0": {"id": 0, "name": "Test name input 0", "type": "button"}, - "input:1": {"id": 1, "type": "analog", "enable": True}, - "input:2": {"id": 2, "name": "Gas", "type": "count", "enable": True}, + "input:1": { + "id": 1, + "type": "analog", + "enable": True, + "xpercent": {"expr": None, "unit": None}, + }, + "input:2": { + "id": 2, + "name": "Gas", + "type": "count", + "enable": True, + "xcounts": {"expr": None, "unit": None}, + "xfreq": {"expr": None, "unit": None}, + }, "light:0": {"name": "test light_0"}, "light:1": {"name": "test light_1"}, "light:2": {"name": "test light_2"}, diff --git a/tests/components/shelly/test_sensor.py b/tests/components/shelly/test_sensor.py index 6b665b8fea2..ef8a609998a 100644 --- a/tests/components/shelly/test_sensor.py +++ b/tests/components/shelly/test_sensor.py @@ -706,10 +706,27 @@ async def test_block_sleeping_update_entity_service( ) +@pytest.mark.parametrize( + ("original_unit", "expected_unit"), + [ + ("m/s", "m/s"), + (None, None), + ("", None), + ], +) async def test_rpc_analog_input_sensors( - hass: HomeAssistant, mock_rpc_device: Mock, entity_registry: EntityRegistry + hass: HomeAssistant, + mock_rpc_device: Mock, + entity_registry: EntityRegistry, + monkeypatch: pytest.MonkeyPatch, + original_unit: str | None, + expected_unit: str | None, ) -> None: """Test RPC analog input xpercent sensor.""" + config = deepcopy(mock_rpc_device.config) + config["input:1"]["xpercent"] = {"expr": "x*0.2995", "unit": original_unit} + monkeypatch.setattr(mock_rpc_device, "config", config) + await init_integration(hass, 2) entity_id = f"{SENSOR_DOMAIN}.test_name_analog_input" @@ -720,7 +737,10 @@ async def test_rpc_analog_input_sensors( assert entry.unique_id == "123456789ABC-input:1-analoginput" entity_id = f"{SENSOR_DOMAIN}.test_name_analog_value" - assert hass.states.get(entity_id).state == "8.9" + state = hass.states.get(entity_id) + assert state + assert state.state == "8.9" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == expected_unit entry = entity_registry.async_get(entity_id) assert entry @@ -764,13 +784,27 @@ async def test_rpc_disabled_xpercent( assert hass.states.get(entity_id) is None +@pytest.mark.parametrize( + ("original_unit", "expected_unit"), + [ + ("l/h", "l/h"), + (None, None), + ("", None), + ], +) async def test_rpc_pulse_counter_sensors( hass: HomeAssistant, mock_rpc_device: Mock, entity_registry: EntityRegistry, monkeypatch: pytest.MonkeyPatch, + original_unit: str | None, + expected_unit: str | None, ) -> None: """Test RPC counter sensor.""" + config = deepcopy(mock_rpc_device.config) + config["input:2"]["xcounts"] = {"expr": "x/10", "unit": original_unit} + monkeypatch.setattr(mock_rpc_device, "config", config) + await init_integration(hass, 2) entity_id = f"{SENSOR_DOMAIN}.gas_pulse_counter" @@ -784,7 +818,10 @@ async def test_rpc_pulse_counter_sensors( assert entry.unique_id == "123456789ABC-input:2-pulse_counter" entity_id = f"{SENSOR_DOMAIN}.gas_counter_value" - assert hass.states.get(entity_id).state == "561.74" + state = hass.states.get(entity_id) + assert state + assert state.state == "561.74" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == expected_unit entry = entity_registry.async_get(entity_id) assert entry @@ -828,12 +865,27 @@ async def test_rpc_disabled_xtotal_counter( assert hass.states.get(entity_id) is None +@pytest.mark.parametrize( + ("original_unit", "expected_unit"), + [ + ("W", "W"), + (None, None), + ("", None), + ], +) async def test_rpc_pulse_counter_frequency_sensors( hass: HomeAssistant, mock_rpc_device: Mock, entity_registry: EntityRegistry, + monkeypatch: pytest.MonkeyPatch, + original_unit: str | None, + expected_unit: str | None, ) -> None: """Test RPC counter sensor.""" + config = deepcopy(mock_rpc_device.config) + config["input:2"]["xfreq"] = {"expr": "x**2", "unit": original_unit} + monkeypatch.setattr(mock_rpc_device, "config", config) + await init_integration(hass, 2) entity_id = f"{SENSOR_DOMAIN}.gas_pulse_counter_frequency" @@ -847,7 +899,10 @@ async def test_rpc_pulse_counter_frequency_sensors( assert entry.unique_id == "123456789ABC-input:2-counter_frequency" entity_id = f"{SENSOR_DOMAIN}.gas_pulse_counter_frequency_value" - assert hass.states.get(entity_id).state == "6.11" + state = hass.states.get(entity_id) + assert state + assert state.state == "6.11" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == expected_unit entry = entity_registry.async_get(entity_id) assert entry From 7334fb01250e07f771e3319061af9be06e20e032 Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Mon, 26 Aug 2024 21:12:32 +0200 Subject: [PATCH 1082/1635] Add Reolink chime play action (#123245) * Add chime play service * fix supported_feature * finalize * add tests * Adjust to device service * fix issue * Add tests * actions -> services * fix styling * Use conftest fixture for test_chime * Update tests/components/reolink/test_services.py Co-authored-by: Joost Lekkerkerker * use ATTR_RINGTONE and rename chime_play to play_chime * Add translatable exceptions * fix styling * Remove option to use entity_id * fixes * Fix translations * fix * fix translation key * remove translation key * use callback for async_setup_services * fix styling * Add test_play_chime_service_unloaded --------- Co-authored-by: Joost Lekkerkerker --- homeassistant/components/reolink/__init__.py | 44 +++---- homeassistant/components/reolink/icons.json | 3 +- homeassistant/components/reolink/services.py | 80 ++++++++++++ .../components/reolink/services.yaml | 27 ++++ homeassistant/components/reolink/strings.json | 38 ++++++ homeassistant/components/reolink/util.py | 37 +++++- tests/components/reolink/test_select.py | 2 + tests/components/reolink/test_services.py | 116 ++++++++++++++++++ 8 files changed, 315 insertions(+), 32 deletions(-) create mode 100644 homeassistant/components/reolink/services.py create mode 100644 tests/components/reolink/test_services.py diff --git a/homeassistant/components/reolink/__init__.py b/homeassistant/components/reolink/__init__.py index a319024633c..f64c6bd9cf3 100644 --- a/homeassistant/components/reolink/__init__.py +++ b/homeassistant/components/reolink/__init__.py @@ -3,7 +3,6 @@ from __future__ import annotations import asyncio -from dataclasses import dataclass from datetime import timedelta import logging @@ -14,13 +13,20 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import EVENT_HOMEASSISTANT_STOP, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import ( + config_validation as cv, + device_registry as dr, + entity_registry as er, +) from homeassistant.helpers.device_registry import format_mac +from homeassistant.helpers.typing import ConfigType from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import DOMAIN from .exceptions import PasswordIncompatible, ReolinkException, UserNotAdmin from .host import ReolinkHost +from .services import async_setup_services +from .util import ReolinkData, get_device_uid_and_ch _LOGGER = logging.getLogger(__name__) @@ -40,14 +46,14 @@ DEVICE_UPDATE_INTERVAL = timedelta(seconds=60) FIRMWARE_UPDATE_INTERVAL = timedelta(hours=12) NUM_CRED_ERRORS = 3 +CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) -@dataclass -class ReolinkData: - """Data for the Reolink integration.""" - host: ReolinkHost - device_coordinator: DataUpdateCoordinator[None] - firmware_coordinator: DataUpdateCoordinator[None] +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up Reolink shared code.""" + + async_setup_services(hass) + return True async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: @@ -265,28 +271,6 @@ async def async_remove_config_entry_device( return False -def get_device_uid_and_ch( - device: dr.DeviceEntry, host: ReolinkHost -) -> tuple[list[str], int | None, bool]: - """Get the channel and the split device_uid from a reolink DeviceEntry.""" - device_uid = [ - dev_id[1].split("_") for dev_id in device.identifiers if dev_id[0] == DOMAIN - ][0] - - is_chime = False - if len(device_uid) < 2: - # NVR itself - ch = None - elif device_uid[1].startswith("ch") and len(device_uid[1]) <= 5: - ch = int(device_uid[1][2:]) - elif device_uid[1].startswith("chime"): - ch = int(device_uid[1][5:]) - is_chime = True - else: - ch = host.api.channel_for_uid(device_uid[1]) - return (device_uid, ch, is_chime) - - def migrate_entity_ids( hass: HomeAssistant, config_entry_id: str, host: ReolinkHost ) -> None: diff --git a/homeassistant/components/reolink/icons.json b/homeassistant/components/reolink/icons.json index 7ca4c2d7f2b..f7729789c4e 100644 --- a/homeassistant/components/reolink/icons.json +++ b/homeassistant/components/reolink/icons.json @@ -300,6 +300,7 @@ } }, "services": { - "ptz_move": "mdi:pan" + "ptz_move": "mdi:pan", + "play_chime": "mdi:music" } } diff --git a/homeassistant/components/reolink/services.py b/homeassistant/components/reolink/services.py new file mode 100644 index 00000000000..d5cb402c74b --- /dev/null +++ b/homeassistant/components/reolink/services.py @@ -0,0 +1,80 @@ +"""Reolink additional services.""" + +from __future__ import annotations + +from reolink_aio.api import Chime +from reolink_aio.enums import ChimeToneEnum +from reolink_aio.exceptions import InvalidParameterError, ReolinkError +import voluptuous as vol + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_DEVICE_ID +from homeassistant.core import HomeAssistant, ServiceCall, callback +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import device_registry as dr + +from .const import DOMAIN +from .host import ReolinkHost +from .util import get_device_uid_and_ch + +ATTR_RINGTONE = "ringtone" + + +@callback +def async_setup_services(hass: HomeAssistant) -> None: + """Set up Reolink services.""" + + async def async_play_chime(service_call: ServiceCall) -> None: + """Play a ringtone.""" + service_data = service_call.data + device_registry = dr.async_get(hass) + + for device_id in service_data[ATTR_DEVICE_ID]: + config_entry = None + device = device_registry.async_get(device_id) + if device is not None: + for entry_id in device.config_entries: + config_entry = hass.config_entries.async_get_entry(entry_id) + if config_entry is not None and config_entry.domain == DOMAIN: + break + if ( + config_entry is None + or device is None + or config_entry.state == ConfigEntryState.NOT_LOADED + ): + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="service_entry_ex", + translation_placeholders={"service_name": "play_chime"}, + ) + host: ReolinkHost = hass.data[DOMAIN][config_entry.entry_id].host + (device_uid, chime_id, is_chime) = get_device_uid_and_ch(device, host) + chime: Chime | None = host.api.chime(chime_id) + if not is_chime or chime is None: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="service_not_chime", + translation_placeholders={"device_name": str(device.name)}, + ) + + ringtone = service_data[ATTR_RINGTONE] + try: + await chime.play(ChimeToneEnum[ringtone].value) + except InvalidParameterError as err: + raise ServiceValidationError(err) from err + except ReolinkError as err: + raise HomeAssistantError(err) from err + + hass.services.async_register( + DOMAIN, + "play_chime", + async_play_chime, + schema=vol.Schema( + { + vol.Required(ATTR_DEVICE_ID): list[str], + vol.Required(ATTR_RINGTONE): vol.In( + [method.name for method in ChimeToneEnum][1:] + ), + } + ), + ) diff --git a/homeassistant/components/reolink/services.yaml b/homeassistant/components/reolink/services.yaml index 42b9af34eb0..fe7fba9cdc7 100644 --- a/homeassistant/components/reolink/services.yaml +++ b/homeassistant/components/reolink/services.yaml @@ -16,3 +16,30 @@ ptz_move: min: 1 max: 64 step: 1 + +play_chime: + fields: + device_id: + required: true + selector: + device: + multiple: true + filter: + integration: reolink + model: "Reolink Chime" + ringtone: + required: true + selector: + select: + translation_key: ringtone + options: + - citybird + - originaltune + - pianokey + - loop + - attraction + - hophop + - goodday + - operetta + - moonlight + - waybackhome diff --git a/homeassistant/components/reolink/strings.json b/homeassistant/components/reolink/strings.json index cad09f71562..3710c3743fa 100644 --- a/homeassistant/components/reolink/strings.json +++ b/homeassistant/components/reolink/strings.json @@ -50,6 +50,14 @@ } } }, + "exceptions": { + "service_entry_ex": { + "message": "Reolink {service_name} error: config entry not found or not loaded" + }, + "service_not_chime": { + "message": "Reolink play_chime error: {device_name} is not a chime" + } + }, "issues": { "https_webhook": { "title": "Reolink webhook URL uses HTTPS (SSL)", @@ -86,6 +94,36 @@ "description": "PTZ move speed." } } + }, + "play_chime": { + "name": "Play chime", + "description": "Play a ringtone on a chime.", + "fields": { + "device_id": { + "name": "Target chime", + "description": "The chime to play the ringtone on." + }, + "ringtone": { + "name": "Ringtone", + "description": "Ringtone to play." + } + } + } + }, + "selector": { + "ringtone": { + "options": { + "citybird": "[%key:component::reolink::entity::select::motion_tone::state::citybird%]", + "originaltune": "[%key:component::reolink::entity::select::motion_tone::state::originaltune%]", + "pianokey": "[%key:component::reolink::entity::select::motion_tone::state::pianokey%]", + "loop": "[%key:component::reolink::entity::select::motion_tone::state::loop%]", + "attraction": "[%key:component::reolink::entity::select::motion_tone::state::attraction%]", + "hophop": "[%key:component::reolink::entity::select::motion_tone::state::hophop%]", + "goodday": "[%key:component::reolink::entity::select::motion_tone::state::goodday%]", + "operetta": "[%key:component::reolink::entity::select::motion_tone::state::operetta%]", + "moonlight": "[%key:component::reolink::entity::select::motion_tone::state::moonlight%]", + "waybackhome": "[%key:component::reolink::entity::select::motion_tone::state::waybackhome%]" + } } }, "entity": { diff --git a/homeassistant/components/reolink/util.py b/homeassistant/components/reolink/util.py index cf4659224e3..305579e35cb 100644 --- a/homeassistant/components/reolink/util.py +++ b/homeassistant/components/reolink/util.py @@ -2,11 +2,24 @@ from __future__ import annotations +from dataclasses import dataclass + from homeassistant import config_entries from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator -from . import ReolinkData from .const import DOMAIN +from .host import ReolinkHost + + +@dataclass +class ReolinkData: + """Data for the Reolink integration.""" + + host: ReolinkHost + device_coordinator: DataUpdateCoordinator[None] + firmware_coordinator: DataUpdateCoordinator[None] def is_connected(hass: HomeAssistant, config_entry: config_entries.ConfigEntry) -> bool: @@ -19,3 +32,25 @@ def is_connected(hass: HomeAssistant, config_entry: config_entries.ConfigEntry) and config_entry.state == config_entries.ConfigEntryState.LOADED and reolink_data.device_coordinator.last_update_success ) + + +def get_device_uid_and_ch( + device: dr.DeviceEntry, host: ReolinkHost +) -> tuple[list[str], int | None, bool]: + """Get the channel and the split device_uid from a reolink DeviceEntry.""" + device_uid = [ + dev_id[1].split("_") for dev_id in device.identifiers if dev_id[0] == DOMAIN + ][0] + + is_chime = False + if len(device_uid) < 2: + # NVR itself + ch = None + elif device_uid[1].startswith("ch") and len(device_uid[1]) <= 5: + ch = int(device_uid[1][2:]) + elif device_uid[1].startswith("chime"): + ch = int(device_uid[1][5:]) + is_chime = True + else: + ch = host.api.channel_for_uid(device_uid[1]) + return (device_uid, ch, is_chime) diff --git a/tests/components/reolink/test_select.py b/tests/components/reolink/test_select.py index 082a543e392..0534f36f4c5 100644 --- a/tests/components/reolink/test_select.py +++ b/tests/components/reolink/test_select.py @@ -118,6 +118,7 @@ async def test_chime_select( entity_id = f"{Platform.SELECT}.test_chime_visitor_ringtone" assert hass.states.get(entity_id).state == "pianokey" + # Test selecting chime ringtone option test_chime.set_tone = AsyncMock() await hass.services.async_call( SELECT_DOMAIN, @@ -145,6 +146,7 @@ async def test_chime_select( blocking=True, ) + # Test unavailable test_chime.event_info = {} freezer.tick(DEVICE_UPDATE_INTERVAL) async_fire_time_changed(hass) diff --git a/tests/components/reolink/test_services.py b/tests/components/reolink/test_services.py new file mode 100644 index 00000000000..a4b7d8f0da4 --- /dev/null +++ b/tests/components/reolink/test_services.py @@ -0,0 +1,116 @@ +"""Test the Reolink services.""" + +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from reolink_aio.api import Chime +from reolink_aio.exceptions import InvalidParameterError, ReolinkError + +from homeassistant.components.reolink.const import DOMAIN as REOLINK_DOMAIN +from homeassistant.components.reolink.services import ATTR_RINGTONE +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_DEVICE_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry + + +async def test_play_chime_service_entity( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + test_chime: Chime, + entity_registry: er.EntityRegistry, +) -> None: + """Test chime play service.""" + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SELECT]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.SELECT}.test_chime_visitor_ringtone" + entity = entity_registry.async_get(entity_id) + assert entity is not None + device_id = entity.device_id + + # Test chime play service with device + test_chime.play = AsyncMock() + await hass.services.async_call( + REOLINK_DOMAIN, + "play_chime", + {ATTR_DEVICE_ID: [device_id], ATTR_RINGTONE: "attraction"}, + blocking=True, + ) + test_chime.play.assert_called_once() + + # Test errors + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + REOLINK_DOMAIN, + "play_chime", + {ATTR_DEVICE_ID: ["invalid_id"], ATTR_RINGTONE: "attraction"}, + blocking=True, + ) + + test_chime.play = AsyncMock(side_effect=ReolinkError("Test error")) + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + REOLINK_DOMAIN, + "play_chime", + {ATTR_DEVICE_ID: [device_id], ATTR_RINGTONE: "attraction"}, + blocking=True, + ) + + test_chime.play = AsyncMock(side_effect=InvalidParameterError("Test error")) + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + REOLINK_DOMAIN, + "play_chime", + {ATTR_DEVICE_ID: [device_id], ATTR_RINGTONE: "attraction"}, + blocking=True, + ) + + reolink_connect.chime.return_value = None + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + REOLINK_DOMAIN, + "play_chime", + {ATTR_DEVICE_ID: [device_id], ATTR_RINGTONE: "attraction"}, + blocking=True, + ) + + +async def test_play_chime_service_unloaded( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + test_chime: Chime, + entity_registry: er.EntityRegistry, +) -> None: + """Test chime play service when config entry is unloaded.""" + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SELECT]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.SELECT}.test_chime_visitor_ringtone" + entity = entity_registry.async_get(entity_id) + assert entity is not None + device_id = entity.device_id + + # Unload the config entry + assert await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.NOT_LOADED + + # Test chime play service + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + REOLINK_DOMAIN, + "play_chime", + {ATTR_DEVICE_ID: [device_id], ATTR_RINGTONE: "attraction"}, + blocking=True, + ) From b960ebeb8bbf791a8072e8503b417474fec134ac Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Mon, 26 Aug 2024 21:17:38 +0200 Subject: [PATCH 1083/1635] Add WS outbound config to Shelly diagnostics (#124654) Add WS Outbound config to diagnostics --- .../components/shelly/diagnostics.py | 7 +++++ tests/components/shelly/test_diagnostics.py | 30 ++++++++++++++++++- 2 files changed, 36 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/shelly/diagnostics.py b/homeassistant/components/shelly/diagnostics.py index e70b76a7c00..a5fe1f5b6c0 100644 --- a/homeassistant/components/shelly/diagnostics.py +++ b/homeassistant/components/shelly/diagnostics.py @@ -11,6 +11,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import format_mac from .coordinator import ShellyConfigEntry +from .utils import get_rpc_ws_url TO_REDACT = {CONF_USERNAME, CONF_PASSWORD} @@ -73,6 +74,12 @@ async def async_get_config_entry_diagnostics( device_settings = { k: v for k, v in rpc_coordinator.device.config.items() if k in ["cloud"] } + ws_config = rpc_coordinator.device.config["ws"] + device_settings["ws_outbound_enabled"] = ws_config["enable"] + if ws_config["enable"]: + device_settings["ws_outbound_server_valid"] = bool( + ws_config["server"] == get_rpc_ws_url(hass) + ) device_status = { k: v for k, v in rpc_coordinator.device.status.items() diff --git a/tests/components/shelly/test_diagnostics.py b/tests/components/shelly/test_diagnostics.py index 4fc8ea6ca8f..395c7ccfeaf 100644 --- a/tests/components/shelly/test_diagnostics.py +++ b/tests/components/shelly/test_diagnostics.py @@ -1,5 +1,6 @@ """Tests for Shelly diagnostics platform.""" +from copy import deepcopy from unittest.mock import ANY, Mock, PropertyMock from aioshelly.ble.const import BLE_SCAN_RESULT_EVENT @@ -151,7 +152,7 @@ async def test_rpc_config_entry_diagnostics( "model": MODEL_25, "sw_version": "some fw string", }, - "device_settings": {}, + "device_settings": {"ws_outbound_enabled": False}, "device_status": { "sys": { "available_updates": { @@ -164,3 +165,30 @@ async def test_rpc_config_entry_diagnostics( }, "last_error": "DeviceConnectionError()", } + + +@pytest.mark.parametrize( + ("ws_outbound_server", "ws_outbound_server_valid"), + [("ws://10.10.10.10:8123/api/shelly/ws", True), ("wrong_url", False)], +) +async def test_rpc_config_entry_diagnostics_ws_outbound( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + ws_outbound_server: str, + ws_outbound_server_valid: bool, +) -> None: + """Test config entry diagnostics for rpc device with websocket outbound.""" + config = deepcopy(mock_rpc_device.config) + config["ws"] = {"enable": True, "server": ws_outbound_server} + monkeypatch.setattr(mock_rpc_device, "config", config) + + entry = await init_integration(hass, 2, sleep_period=60) + + result = await get_diagnostics_for_config_entry(hass, hass_client, entry) + + assert ( + result["device_settings"]["ws_outbound_server_valid"] + == ws_outbound_server_valid + ) From 0fe939cd7cebed6682fb16ec61d97547ad07c4ce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexey=20ALERT=20Rubash=D1=91ff?= Date: Mon, 26 Aug 2024 22:31:07 +0300 Subject: [PATCH 1084/1635] Update overkiz Atlantic Water Heater operation mode switching (#124619) * conventional operation state usage * MartinHjelmare indentation request * Manual Mode binary sensor * Removed usage of unconventional operation states * Removed usage of unconventional operation state * STATE_OFF operation mode support --- .../components/overkiz/binary_sensor.py | 18 +++++++--- ...stic_hot_water_production_mlb_component.py | 35 ++++++++----------- 2 files changed, 28 insertions(+), 25 deletions(-) diff --git a/homeassistant/components/overkiz/binary_sensor.py b/homeassistant/components/overkiz/binary_sensor.py index 8ea86e03e8c..57df3cd4e09 100644 --- a/homeassistant/components/overkiz/binary_sensor.py +++ b/homeassistant/components/overkiz/binary_sensor.py @@ -115,14 +115,24 @@ BINARY_SENSOR_DESCRIPTIONS: list[OverkizBinarySensorDescription] = [ OverkizBinarySensorDescription( key=OverkizState.MODBUSLINK_DHW_ABSENCE_MODE, name="Absence mode", - value_fn=lambda state: state - in (OverkizCommandParam.ON, OverkizCommandParam.PROG), + value_fn=( + lambda state: state in (OverkizCommandParam.ON, OverkizCommandParam.PROG) + ), ), OverkizBinarySensorDescription( key=OverkizState.MODBUSLINK_DHW_BOOST_MODE, name="Boost mode", - value_fn=lambda state: state - in (OverkizCommandParam.ON, OverkizCommandParam.PROG), + value_fn=( + lambda state: state in (OverkizCommandParam.ON, OverkizCommandParam.PROG) + ), + ), + OverkizBinarySensorDescription( + key=OverkizState.MODBUSLINK_DHW_MODE, + name="Manual mode", + value_fn=( + lambda state: state + in (OverkizCommandParam.MANUAL, OverkizCommandParam.MANUAL_ECO_INACTIVE) + ), ), ] diff --git a/homeassistant/components/overkiz/water_heater_entities/atlantic_domestic_hot_water_production_mlb_component.py b/homeassistant/components/overkiz/water_heater_entities/atlantic_domestic_hot_water_production_mlb_component.py index de995a2bd1a..0f57d13433b 100644 --- a/homeassistant/components/overkiz/water_heater_entities/atlantic_domestic_hot_water_production_mlb_component.py +++ b/homeassistant/components/overkiz/water_heater_entities/atlantic_domestic_hot_water_production_mlb_component.py @@ -6,6 +6,7 @@ from pyoverkiz.enums import OverkizCommand, OverkizCommandParam, OverkizState from homeassistant.components.water_heater import ( STATE_ECO, + STATE_ELECTRIC, STATE_OFF, STATE_PERFORMANCE, WaterHeaterEntity, @@ -28,9 +29,10 @@ class AtlanticDomesticHotWaterProductionMBLComponent(OverkizEntity, WaterHeaterE | WaterHeaterEntityFeature.ON_OFF ) _attr_operation_list = [ - OverkizCommandParam.PERFORMANCE, - OverkizCommandParam.ECO, - OverkizCommandParam.MANUAL, + STATE_ECO, + STATE_OFF, + STATE_PERFORMANCE, + STATE_ELECTRIC, ] def __init__( @@ -116,20 +118,20 @@ class AtlanticDomesticHotWaterProductionMBLComponent(OverkizEntity, WaterHeaterE cast(str, self.executor.select_state(OverkizState.MODBUSLINK_DHW_MODE)) == OverkizCommandParam.MANUAL_ECO_INACTIVE ): - return OverkizCommandParam.MANUAL + # STATE_ELECTRIC is a substitution for OverkizCommandParam.MANUAL + # to keep up with the conventional state usage only + # https://developers.home-assistant.io/docs/core/entity/water-heater/#states + return STATE_ELECTRIC return STATE_OFF async def async_set_operation_mode(self, operation_mode: str) -> None: """Set new operation mode.""" - if operation_mode in (STATE_PERFORMANCE, OverkizCommandParam.BOOST): + if operation_mode == STATE_PERFORMANCE: if self.is_away_mode_on: await self.async_turn_away_mode_off() await self.async_turn_boost_mode_on() - elif operation_mode in ( - OverkizCommandParam.ECO, - OverkizCommandParam.MANUAL_ECO_ACTIVE, - ): + elif operation_mode == STATE_ECO: if self.is_away_mode_on: await self.async_turn_away_mode_off() if self.is_boost_mode_on: @@ -137,10 +139,7 @@ class AtlanticDomesticHotWaterProductionMBLComponent(OverkizEntity, WaterHeaterE await self.executor.async_execute_command( OverkizCommand.SET_DHW_MODE, OverkizCommandParam.AUTO_MODE ) - elif operation_mode in ( - OverkizCommandParam.MANUAL, - OverkizCommandParam.MANUAL_ECO_INACTIVE, - ): + elif operation_mode == STATE_ELECTRIC: if self.is_away_mode_on: await self.async_turn_away_mode_off() if self.is_boost_mode_on: @@ -148,14 +147,8 @@ class AtlanticDomesticHotWaterProductionMBLComponent(OverkizEntity, WaterHeaterE await self.executor.async_execute_command( OverkizCommand.SET_DHW_MODE, OverkizCommandParam.MANUAL_ECO_INACTIVE ) - else: - if self.is_away_mode_on: - await self.async_turn_away_mode_off() - if self.is_boost_mode_on: - await self.async_turn_boost_mode_off() - await self.executor.async_execute_command( - OverkizCommand.SET_DHW_MODE, operation_mode - ) + elif operation_mode == STATE_OFF: + await self.async_turn_away_mode_on() async def async_turn_away_mode_on(self) -> None: """Turn away mode on.""" From 37019d33fd40e02fc1614538f97925c4cc2e241b Mon Sep 17 00:00:00 2001 From: Jesse Hills <3060199+jesserockz@users.noreply.github.com> Date: Tue, 27 Aug 2024 10:29:54 +1200 Subject: [PATCH 1085/1635] Bump aioesphomeapi to 25.2.1 (#124659) --- homeassistant/components/esphome/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/esphome/manifest.json b/homeassistant/components/esphome/manifest.json index 6f2284d5a4b..454b547cdf4 100644 --- a/homeassistant/components/esphome/manifest.json +++ b/homeassistant/components/esphome/manifest.json @@ -17,7 +17,7 @@ "mqtt": ["esphome/discover/#"], "quality_scale": "platinum", "requirements": [ - "aioesphomeapi==25.2.0", + "aioesphomeapi==25.2.1", "esphome-dashboard-api==1.2.3", "bleak-esphome==1.0.0" ], diff --git a/requirements_all.txt b/requirements_all.txt index f1aa4e6990f..6320f05fffb 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -237,7 +237,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==25.2.0 +aioesphomeapi==25.2.1 # homeassistant.components.flo aioflo==2021.11.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 7154046e3a6..34b482aa39f 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -225,7 +225,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==25.2.0 +aioesphomeapi==25.2.1 # homeassistant.components.flo aioflo==2021.11.0 From 2fe19c04b98e1c151b46fb7baffe03fa94758056 Mon Sep 17 00:00:00 2001 From: Richard Kroegel <42204099+rikroe@users.noreply.github.com> Date: Tue, 27 Aug 2024 01:57:25 +0200 Subject: [PATCH 1086/1635] Bump bimmer_connected to 0.16.2 (#124651) --- homeassistant/components/bmw_connected_drive/manifest.json | 2 +- homeassistant/components/bmw_connected_drive/strings.json | 3 ++- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- .../bmw_connected_drive/snapshots/test_sensor.ambr | 6 ++++++ tests/components/bmw_connected_drive/test_button.py | 2 +- 6 files changed, 12 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/bmw_connected_drive/manifest.json b/homeassistant/components/bmw_connected_drive/manifest.json index 304973b816f..7ee91388d29 100644 --- a/homeassistant/components/bmw_connected_drive/manifest.json +++ b/homeassistant/components/bmw_connected_drive/manifest.json @@ -7,5 +7,5 @@ "iot_class": "cloud_polling", "loggers": ["bimmer_connected"], "quality_scale": "platinum", - "requirements": ["bimmer-connected[china]==0.16.1"] + "requirements": ["bimmer-connected[china]==0.16.2"] } diff --git a/homeassistant/components/bmw_connected_drive/strings.json b/homeassistant/components/bmw_connected_drive/strings.json index 8121ab6f65f..c59900ef4f9 100644 --- a/homeassistant/components/bmw_connected_drive/strings.json +++ b/homeassistant/components/bmw_connected_drive/strings.json @@ -148,7 +148,8 @@ "cooling": "Cooling", "heating": "Heating", "inactive": "Inactive", - "standby": "Standby" + "standby": "Standby", + "ventilation": "Ventilation" } }, "front_left_current_pressure": { diff --git a/requirements_all.txt b/requirements_all.txt index 6320f05fffb..99229c45f47 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -559,7 +559,7 @@ beautifulsoup4==4.12.3 # beewi-smartclim==0.0.10 # homeassistant.components.bmw_connected_drive -bimmer-connected[china]==0.16.1 +bimmer-connected[china]==0.16.2 # homeassistant.components.bizkaibus bizkaibus==0.1.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 34b482aa39f..d5dd61bdf6e 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -493,7 +493,7 @@ base36==0.1.1 beautifulsoup4==4.12.3 # homeassistant.components.bmw_connected_drive -bimmer-connected[china]==0.16.1 +bimmer-connected[china]==0.16.2 # homeassistant.components.eq3btsmart # homeassistant.components.esphome diff --git a/tests/components/bmw_connected_drive/snapshots/test_sensor.ambr b/tests/components/bmw_connected_drive/snapshots/test_sensor.ambr index 8a26acd1040..2182ff2bb48 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_sensor.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_sensor.ambr @@ -929,6 +929,7 @@ 'options': list([ 'cooling', 'heating', + 'ventilation', 'inactive', 'standby', ]), @@ -968,6 +969,7 @@ 'options': list([ 'cooling', 'heating', + 'ventilation', 'inactive', 'standby', ]), @@ -1933,6 +1935,7 @@ 'options': list([ 'cooling', 'heating', + 'ventilation', 'inactive', 'standby', ]), @@ -1972,6 +1975,7 @@ 'options': list([ 'cooling', 'heating', + 'ventilation', 'inactive', 'standby', ]), @@ -2665,6 +2669,7 @@ 'options': list([ 'cooling', 'heating', + 'ventilation', 'inactive', 'standby', ]), @@ -2704,6 +2709,7 @@ 'options': list([ 'cooling', 'heating', + 'ventilation', 'inactive', 'standby', ]), diff --git a/tests/components/bmw_connected_drive/test_button.py b/tests/components/bmw_connected_drive/test_button.py index 99cabc900fa..88c7990cde9 100644 --- a/tests/components/bmw_connected_drive/test_button.py +++ b/tests/components/bmw_connected_drive/test_button.py @@ -165,7 +165,7 @@ async def test_service_call_success_state_change( ( "button.i4_edrive40_find_vehicle", "device_tracker.i4_edrive40", - {"latitude": 123.456, "longitude": 34.5678, "direction": 121}, + {"latitude": 12.345, "longitude": 34.5678, "direction": 121}, {"latitude": 48.177334, "longitude": 11.556274, "direction": 180}, ), ], From 51fd8e12885d41365bb92bbe7b16b04a68ff4661 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A5le=20Stor=C3=B8=20Hauknes?= Date: Tue, 27 Aug 2024 01:59:28 +0200 Subject: [PATCH 1087/1635] Bump airthings-ble to 0.9.1 (#124658) --- homeassistant/components/airthings_ble/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- script/licenses.py | 1 - 4 files changed, 3 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/airthings_ble/manifest.json b/homeassistant/components/airthings_ble/manifest.json index b86bc314819..6c00fe79e7b 100644 --- a/homeassistant/components/airthings_ble/manifest.json +++ b/homeassistant/components/airthings_ble/manifest.json @@ -24,5 +24,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/airthings_ble", "iot_class": "local_polling", - "requirements": ["airthings-ble==0.9.0"] + "requirements": ["airthings-ble==0.9.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 99229c45f47..fa29a830768 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -416,7 +416,7 @@ airgradient==0.8.0 airly==1.1.0 # homeassistant.components.airthings_ble -airthings-ble==0.9.0 +airthings-ble==0.9.1 # homeassistant.components.airthings airthings-cloud==0.2.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index d5dd61bdf6e..c233fae27a0 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -398,7 +398,7 @@ airgradient==0.8.0 airly==1.1.0 # homeassistant.components.airthings_ble -airthings-ble==0.9.0 +airthings-ble==0.9.1 # homeassistant.components.airthings airthings-cloud==0.2.0 diff --git a/script/licenses.py b/script/licenses.py index 8a10b9a7530..7bcbfb6179a 100644 --- a/script/licenses.py +++ b/script/licenses.py @@ -129,7 +129,6 @@ EXCEPTIONS = { "aiooui", # https://github.com/Bluetooth-Devices/aiooui/pull/8 "aioruuvigateway", # https://github.com/akx/aioruuvigateway/pull/6 "aiovodafone", # https://github.com/chemelli74/aiovodafone/pull/131 - "airthings-ble", # https://github.com/Airthings/airthings-ble/pull/42 "apple_weatherkit", # https://github.com/tjhorner/python-weatherkit/pull/3 "asyncio", # PSF License "chacha20poly1305", # LGPL From 74a12bb8029f856948a6ef80c867ad091f0f4d48 Mon Sep 17 00:00:00 2001 From: Yuxin Wang Date: Tue, 27 Aug 2024 00:50:15 -0400 Subject: [PATCH 1088/1635] Replace LASTSTEST with LAST_S_TEST (#124668) --- homeassistant/components/apcupsd/const.py | 2 +- homeassistant/components/apcupsd/sensor.py | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/apcupsd/const.py b/homeassistant/components/apcupsd/const.py index 56bf229579d..974c860afb8 100644 --- a/homeassistant/components/apcupsd/const.py +++ b/homeassistant/components/apcupsd/const.py @@ -6,4 +6,4 @@ DOMAIN: Final = "apcupsd" CONNECTION_TIMEOUT: int = 10 # Field name of last self test retrieved from apcupsd. -LASTSTEST: Final = "laststest" +LAST_S_TEST: Final = "laststest" diff --git a/homeassistant/components/apcupsd/sensor.py b/homeassistant/components/apcupsd/sensor.py index f2b5943bdf0..d4bbfb148e5 100644 --- a/homeassistant/components/apcupsd/sensor.py +++ b/homeassistant/components/apcupsd/sensor.py @@ -25,7 +25,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN, LASTSTEST +from .const import DOMAIN, LAST_S_TEST from .coordinator import APCUPSdCoordinator PARALLEL_UPDATES = 0 @@ -156,8 +156,8 @@ SENSORS: dict[str, SensorEntityDescription] = { device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, ), - LASTSTEST: SensorEntityDescription( - key=LASTSTEST, + LAST_S_TEST: SensorEntityDescription( + key=LAST_S_TEST, translation_key="last_self_test", ), "lastxfer": SensorEntityDescription( @@ -422,7 +422,7 @@ async def async_setup_entry( # periodical (or manual) self test since last daemon restart. It might not be available # when we set up the integration, and we do not know if it would ever be available. Here we # add it anyway and mark it as unknown initially. - for resource in available_resources | {LASTSTEST}: + for resource in available_resources | {LAST_S_TEST}: if resource not in SENSORS: _LOGGER.warning("Invalid resource from APCUPSd: %s", resource.upper()) continue From 715f4bd2c379424075f80bf25db85ce4a58f73e3 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 27 Aug 2024 10:09:49 +0200 Subject: [PATCH 1089/1635] Set deprecated flag on TTS engines replaced by entities in WS list (#124676) --- homeassistant/components/tts/__init__.py | 5 +++ tests/components/tts/test_init.py | 56 +++++++++++++++++++++++- 2 files changed, 60 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/tts/__init__.py b/homeassistant/components/tts/__init__.py index 583db4472d4..5406b11fccb 100644 --- a/homeassistant/components/tts/__init__.py +++ b/homeassistant/components/tts/__init__.py @@ -1086,6 +1086,7 @@ def websocket_list_engines( language = msg.get("language") providers = [] provider_info: dict[str, Any] + entity_domains: set[str] = set() for entity in component.entities: provider_info = { @@ -1097,6 +1098,8 @@ def websocket_list_engines( language, entity.supported_languages, country ) providers.append(provider_info) + if entity.platform: + entity_domains.add(entity.platform.platform_name) for engine_id, provider in manager.providers.items(): provider_info = { "engine_id": engine_id, @@ -1106,6 +1109,8 @@ def websocket_list_engines( provider_info["supported_languages"] = language_util.matches( language, provider.supported_languages, country ) + if engine_id in entity_domains: + provider_info["deprecated"] = True providers.append(provider_info) connection.send_message( diff --git a/tests/components/tts/test_init.py b/tests/components/tts/test_init.py index 05c19622e84..dbbcdbd40f6 100644 --- a/tests/components/tts/test_init.py +++ b/tests/components/tts/test_init.py @@ -31,6 +31,7 @@ from .common import ( SUPPORT_LANGUAGES, TEST_DOMAIN, MockProvider, + MockTTS, MockTTSEntity, get_media_source_url, mock_config_entry_setup, @@ -38,7 +39,13 @@ from .common import ( retrieve_media, ) -from tests.common import async_mock_service, mock_restore_cache +from tests.common import ( + MockModule, + async_mock_service, + mock_integration, + mock_platform, + mock_restore_cache, +) from tests.typing import ClientSessionGenerator, WebSocketGenerator ORIG_WRITE_TAGS = tts.SpeechManager.write_tags @@ -1635,6 +1642,53 @@ async def test_ws_list_engines( } +async def test_ws_list_engines_deprecated( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + mock_tts_entity: MockTTSEntity, +) -> None: + """Test listing tts engines. + + This test asserts the deprecated flag is set on a legacy engine whose integration + also provides tts entities. + """ + + mock_provider = MockProvider(DEFAULT_LANG) + mock_provider_2 = MockProvider(DEFAULT_LANG) + mock_integration(hass, MockModule(domain="test")) + mock_platform(hass, "test.tts", MockTTS(mock_provider)) + mock_integration(hass, MockModule(domain="test_2")) + mock_platform(hass, "test_2.tts", MockTTS(mock_provider_2)) + await async_setup_component( + hass, "tts", {"tts": [{"platform": "test"}, {"platform": "test_2"}]} + ) + await mock_config_entry_setup(hass, mock_tts_entity) + + client = await hass_ws_client() + + await client.send_json_auto_id({"type": "tts/engine/list"}) + + msg = await client.receive_json() + assert msg["success"] + assert msg["result"] == { + "providers": [ + { + "engine_id": "tts.test", + "supported_languages": ["de_CH", "de_DE", "en_GB", "en_US"], + }, + { + "deprecated": True, + "engine_id": "test", + "supported_languages": ["de_CH", "de_DE", "en_GB", "en_US"], + }, + { + "engine_id": "test_2", + "supported_languages": ["de_CH", "de_DE", "en_GB", "en_US"], + }, + ] + } + + @pytest.mark.parametrize( ("setup", "engine_id"), [ From 831a1d7ad191ae9222fc4bae480761ceded5ea89 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 27 Aug 2024 10:34:47 +0200 Subject: [PATCH 1090/1635] Standardize import step variable name (part 1) (#124674) --- homeassistant/components/demo/config_flow.py | 4 ++-- .../components/downloader/config_flow.py | 6 +++--- .../components/dynalite/config_flow.py | 10 +++++----- homeassistant/components/elkm1/config_flow.py | 6 +++--- .../components/enigma2/config_flow.py | 18 +++++++++--------- .../components/feedreader/config_flow.py | 6 +++--- homeassistant/components/fitbit/config_flow.py | 4 ++-- .../components/geniushub/config_flow.py | 8 ++++---- homeassistant/components/google/config_flow.py | 6 +++--- .../components/homekit/config_flow.py | 6 +++--- .../homematicip_cloud/config_flow.py | 8 ++++---- homeassistant/components/hue/config_flow.py | 6 +++--- .../components/kitchen_sink/config_flow.py | 4 ++-- .../components/lg_netcast/config_flow.py | 8 ++++---- .../components/mastodon/config_flow.py | 13 ++++++------- homeassistant/components/otp/config_flow.py | 8 ++++---- homeassistant/components/pyload/config_flow.py | 14 +++++++------- homeassistant/components/rova/config_flow.py | 8 ++++---- .../components/solarlog/config_flow.py | 4 ++-- homeassistant/components/sun/config_flow.py | 4 ++-- .../components/venstar/config_flow.py | 3 +-- homeassistant/components/vera/config_flow.py | 4 ++-- .../components/zwave_js/config_flow.py | 6 +++--- 23 files changed, 81 insertions(+), 83 deletions(-) diff --git a/homeassistant/components/demo/config_flow.py b/homeassistant/components/demo/config_flow.py index 468d9cb042b..c866873732c 100644 --- a/homeassistant/components/demo/config_flow.py +++ b/homeassistant/components/demo/config_flow.py @@ -37,12 +37,12 @@ class DemoConfigFlow(ConfigFlow, domain=DOMAIN): """Get the options flow for this handler.""" return OptionsFlowHandler(config_entry) - async def async_step_import(self, import_info: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Set the config entry up from yaml.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") - return self.async_create_entry(title="Demo", data=import_info) + return self.async_create_entry(title="Demo", data=import_data) class OptionsFlowHandler(OptionsFlow): diff --git a/homeassistant/components/downloader/config_flow.py b/homeassistant/components/downloader/config_flow.py index e7191e055a6..61a7ba8fe52 100644 --- a/homeassistant/components/downloader/config_flow.py +++ b/homeassistant/components/downloader/config_flow.py @@ -43,13 +43,13 @@ class DownloaderConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle a flow initiated by configuration file.""" try: - await self._validate_input(user_input) + await self._validate_input(import_data) except DirectoryDoesNotExist: return self.async_abort(reason="directory_does_not_exist") - return self.async_create_entry(title=DEFAULT_NAME, data=user_input) + return self.async_create_entry(title=DEFAULT_NAME, data=import_data) async def _validate_input(self, user_input: dict[str, Any]) -> None: """Validate the user input if the directory exists.""" diff --git a/homeassistant/components/dynalite/config_flow.py b/homeassistant/components/dynalite/config_flow.py index 3ae4828b668..928f7043a49 100644 --- a/homeassistant/components/dynalite/config_flow.py +++ b/homeassistant/components/dynalite/config_flow.py @@ -26,9 +26,9 @@ class DynaliteFlowHandler(ConfigFlow, domain=DOMAIN): """Initialize the Dynalite flow.""" self.host = None - async def async_step_import(self, import_info: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a new bridge as a config entry.""" - LOGGER.debug("Starting async_step_import (deprecated) - %s", import_info) + LOGGER.debug("Starting async_step_import (deprecated) - %s", import_data) # Raise an issue that this is deprecated and has been imported async_create_issue( self.hass, @@ -46,17 +46,17 @@ class DynaliteFlowHandler(ConfigFlow, domain=DOMAIN): }, ) - host = import_info[CONF_HOST] + host = import_data[CONF_HOST] # Check if host already exists for entry in self._async_current_entries(): if entry.data[CONF_HOST] == host: self.hass.config_entries.async_update_entry( - entry, data=dict(import_info) + entry, data=dict(import_data) ) return self.async_abort(reason="already_configured") # New entry - return await self._try_create(import_info) + return await self._try_create(import_data) async def async_step_user( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/elkm1/config_flow.py b/homeassistant/components/elkm1/config_flow.py index 4ab8d1fe181..2f9d3338d76 100644 --- a/homeassistant/components/elkm1/config_flow.py +++ b/homeassistant/components/elkm1/config_flow.py @@ -335,10 +335,10 @@ class Elkm1ConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle import.""" _LOGGER.debug("Elk is importing from yaml") - url = _make_url_from_data(user_input) + url = _make_url_from_data(import_data) if self._url_already_configured(url): return self.async_abort(reason="address_already_configured") @@ -357,7 +357,7 @@ class Elkm1ConfigFlow(ConfigFlow, domain=DOMAIN): ) self._abort_if_unique_id_configured() - errors, result = await self._async_create_or_error(user_input, True) + errors, result = await self._async_create_or_error(import_data, True) if errors: return self.async_abort(reason=list(errors.values())[0]) assert result is not None diff --git a/homeassistant/components/enigma2/config_flow.py b/homeassistant/components/enigma2/config_flow.py index 71c5830d550..55c0f6fc6ae 100644 --- a/homeassistant/components/enigma2/config_flow.py +++ b/homeassistant/components/enigma2/config_flow.py @@ -152,20 +152,20 @@ class Enigma2ConfigFlowHandler(ConfigFlow, domain=DOMAIN): ) return self.async_create_entry(data=user_input, title=user_input[CONF_HOST]) - async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle the import step.""" - if CONF_PORT not in user_input: - user_input[CONF_PORT] = DEFAULT_PORT - if CONF_SSL not in user_input: - user_input[CONF_SSL] = DEFAULT_SSL - user_input[CONF_VERIFY_SSL] = DEFAULT_VERIFY_SSL + if CONF_PORT not in import_data: + import_data[CONF_PORT] = DEFAULT_PORT + if CONF_SSL not in import_data: + import_data[CONF_SSL] = DEFAULT_SSL + import_data[CONF_VERIFY_SSL] = DEFAULT_VERIFY_SSL - data = {key: user_input[key] for key in user_input if key in self.DATA_KEYS} + data = {key: import_data[key] for key in import_data if key in self.DATA_KEYS} options = { - key: user_input[key] for key in user_input if key in self.OPTIONS_KEYS + key: import_data[key] for key in import_data if key in self.OPTIONS_KEYS } - if errors := await self.validate_user_input(user_input): + if errors := await self.validate_user_input(import_data): async_create_issue( self.hass, DOMAIN, diff --git a/homeassistant/components/feedreader/config_flow.py b/homeassistant/components/feedreader/config_flow.py index d367432ff8c..4553978a47e 100644 --- a/homeassistant/components/feedreader/config_flow.py +++ b/homeassistant/components/feedreader/config_flow.py @@ -115,10 +115,10 @@ class FeedReaderConfigFlow(ConfigFlow, domain=DOMAIN): options={CONF_MAX_ENTRIES: self._max_entries or DEFAULT_MAX_ENTRIES}, ) - async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle an import flow.""" - self._max_entries = user_input[CONF_MAX_ENTRIES] - return await self.async_step_user({CONF_URL: user_input[CONF_URL]}) + self._max_entries = import_data[CONF_MAX_ENTRIES] + return await self.async_step_user({CONF_URL: import_data[CONF_URL]}) async def async_step_reconfigure( self, _: dict[str, Any] | None = None diff --git a/homeassistant/components/fitbit/config_flow.py b/homeassistant/components/fitbit/config_flow.py index 0ae1973b5fb..eff4ba37773 100644 --- a/homeassistant/components/fitbit/config_flow.py +++ b/homeassistant/components/fitbit/config_flow.py @@ -93,6 +93,6 @@ class OAuth2FlowHandler( self._abort_if_unique_id_configured() return self.async_create_entry(title=profile.display_name, data=data) - async def async_step_import(self, data: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle import from YAML.""" - return await self.async_oauth_create_entry(data) + return await self.async_oauth_create_entry(import_data) diff --git a/homeassistant/components/geniushub/config_flow.py b/homeassistant/components/geniushub/config_flow.py index 5f026c91ee1..601eac6c2f2 100644 --- a/homeassistant/components/geniushub/config_flow.py +++ b/homeassistant/components/geniushub/config_flow.py @@ -124,12 +124,12 @@ class GeniusHubConfigFlow(ConfigFlow, domain=DOMAIN): step_id="cloud_api", errors=errors, data_schema=CLOUD_API_SCHEMA ) - async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import the yaml config.""" - if CONF_HOST in user_input: - result = await self.async_step_local_api(user_input) + if CONF_HOST in import_data: + result = await self.async_step_local_api(import_data) else: - result = await self.async_step_cloud_api(user_input) + result = await self.async_step_cloud_api(import_data) if result["type"] is FlowResultType.FORM: assert result["errors"] return self.async_abort(reason=result["errors"]["base"]) diff --git a/homeassistant/components/google/config_flow.py b/homeassistant/components/google/config_flow.py index 6207303c8a6..726af854f75 100644 --- a/homeassistant/components/google/config_flow.py +++ b/homeassistant/components/google/config_flow.py @@ -94,7 +94,7 @@ class OAuth2FlowHandler( "prompt": "consent", } - async def async_step_import(self, info: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import existing auth into a new config entry.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") @@ -103,8 +103,8 @@ class OAuth2FlowHandler( ) assert len(implementations) == 1 self.flow_impl = list(implementations.values())[0] - self.external_data = info - return await super().async_step_creation(info) + self.external_data = import_data + return await super().async_step_creation(import_data) async def async_step_auth( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/homekit/config_flow.py b/homeassistant/components/homekit/config_flow.py index 78979f73490..f88aa646f04 100644 --- a/homeassistant/components/homekit/config_flow.py +++ b/homeassistant/components/homekit/config_flow.py @@ -311,12 +311,12 @@ class HomeKitConfigFlow(ConfigFlow, domain=DOMAIN): title=f"{name}:{entry_data[CONF_PORT]}", data=entry_data ) - async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle import from yaml.""" - if not self._async_is_unique_name_port(user_input): + if not self._async_is_unique_name_port(import_data): return self.async_abort(reason="port_name_in_use") return self.async_create_entry( - title=f"{user_input[CONF_NAME]}:{user_input[CONF_PORT]}", data=user_input + title=f"{import_data[CONF_NAME]}:{import_data[CONF_PORT]}", data=import_data ) @callback diff --git a/homeassistant/components/homematicip_cloud/config_flow.py b/homeassistant/components/homematicip_cloud/config_flow.py index c2277e16c79..a8b17a80aff 100644 --- a/homeassistant/components/homematicip_cloud/config_flow.py +++ b/homeassistant/components/homematicip_cloud/config_flow.py @@ -83,11 +83,11 @@ class HomematicipCloudFlowHandler(ConfigFlow, domain=DOMAIN): return self.async_show_form(step_id="link", errors=errors) - async def async_step_import(self, import_info: dict[str, str]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, str]) -> ConfigFlowResult: """Import a new access point as a config entry.""" - hapid = import_info[HMIPC_HAPID].replace("-", "").upper() - authtoken = import_info[HMIPC_AUTHTOKEN] - name = import_info[HMIPC_NAME] + hapid = import_data[HMIPC_HAPID].replace("-", "").upper() + authtoken = import_data[HMIPC_AUTHTOKEN] + name = import_data[HMIPC_NAME] await self.async_set_unique_id(hapid) self._abort_if_unique_id_configured() diff --git a/homeassistant/components/hue/config_flow.py b/homeassistant/components/hue/config_flow.py index fb32f568ee1..e73ae8fe11d 100644 --- a/homeassistant/components/hue/config_flow.py +++ b/homeassistant/components/hue/config_flow.py @@ -258,7 +258,7 @@ class HueFlowHandler(ConfigFlow, domain=DOMAIN): await self._async_handle_discovery_without_unique_id() return await self.async_step_link() - async def async_step_import(self, import_info: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a new bridge as a config entry. This flow is triggered by `async_setup` for both configured and @@ -268,9 +268,9 @@ class HueFlowHandler(ConfigFlow, domain=DOMAIN): This flow is also triggered by `async_step_discovery`. """ # Check if host exists, abort if so. - self._async_abort_entries_match({"host": import_info["host"]}) + self._async_abort_entries_match({"host": import_data["host"]}) - bridge = await self._get_bridge(import_info["host"]) + bridge = await self._get_bridge(import_data["host"]) if bridge is None: return self.async_abort(reason="cannot_connect") self.bridge = bridge diff --git a/homeassistant/components/kitchen_sink/config_flow.py b/homeassistant/components/kitchen_sink/config_flow.py index c561ca29b8a..2e05db71b89 100644 --- a/homeassistant/components/kitchen_sink/config_flow.py +++ b/homeassistant/components/kitchen_sink/config_flow.py @@ -34,12 +34,12 @@ class KitchenSinkConfigFlow(ConfigFlow, domain=DOMAIN): """Get the options flow for this handler.""" return OptionsFlowHandler(config_entry) - async def async_step_import(self, import_info: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Set the config entry up from yaml.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") - return self.async_create_entry(title="Kitchen Sink", data=import_info) + return self.async_create_entry(title="Kitchen Sink", data=import_data) async def async_step_reauth(self, data): """Reauth step.""" diff --git a/homeassistant/components/lg_netcast/config_flow.py b/homeassistant/components/lg_netcast/config_flow.py index c4e6c75edea..4b1780d41ae 100644 --- a/homeassistant/components/lg_netcast/config_flow.py +++ b/homeassistant/components/lg_netcast/config_flow.py @@ -68,11 +68,11 @@ class LGNetCast(config_entries.ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import(self, config: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import configuration from yaml.""" self.device_config = { - CONF_HOST: config[CONF_HOST], - CONF_NAME: config[CONF_NAME], + CONF_HOST: import_data[CONF_HOST], + CONF_NAME: import_data[CONF_NAME], } def _create_issue(): @@ -92,7 +92,7 @@ class LGNetCast(config_entries.ConfigFlow, domain=DOMAIN): ) try: - result: ConfigFlowResult = await self.async_step_authorize(config) + result: ConfigFlowResult = await self.async_step_authorize(import_data) except AbortFlow as err: if err.reason != "already_configured": async_create_issue( diff --git a/homeassistant/components/mastodon/config_flow.py b/homeassistant/components/mastodon/config_flow.py index 4e856275736..5c9419cd12d 100644 --- a/homeassistant/components/mastodon/config_flow.py +++ b/homeassistant/components/mastodon/config_flow.py @@ -19,7 +19,6 @@ from homeassistant.helpers.selector import ( TextSelectorConfig, TextSelectorType, ) -from homeassistant.helpers.typing import ConfigType from homeassistant.util import slugify from .const import CONF_BASE_URL, DEFAULT_URL, DOMAIN, LOGGER @@ -126,17 +125,17 @@ class MastodonConfigFlow(ConfigFlow, domain=DOMAIN): return self.show_user_form(user_input, errors) - async def async_step_import(self, import_config: ConfigType) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry from configuration.yaml.""" errors: dict[str, str] | None = None LOGGER.debug("Importing Mastodon from configuration.yaml") - base_url = str(import_config.get(CONF_BASE_URL, DEFAULT_URL)) - client_id = str(import_config.get(CONF_CLIENT_ID)) - client_secret = str(import_config.get(CONF_CLIENT_SECRET)) - access_token = str(import_config.get(CONF_ACCESS_TOKEN)) - name = import_config.get(CONF_NAME, None) + base_url = str(import_data.get(CONF_BASE_URL, DEFAULT_URL)) + client_id = str(import_data.get(CONF_CLIENT_ID)) + client_secret = str(import_data.get(CONF_CLIENT_SECRET)) + access_token = str(import_data.get(CONF_ACCESS_TOKEN)) + name = import_data.get(CONF_NAME) instance, account, errors = await self.hass.async_add_executor_job( self.check_connection, diff --git a/homeassistant/components/otp/config_flow.py b/homeassistant/components/otp/config_flow.py index 6aa4532683a..33f63a04d68 100644 --- a/homeassistant/components/otp/config_flow.py +++ b/homeassistant/components/otp/config_flow.py @@ -82,15 +82,15 @@ class TOTPConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import(self, import_info: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import config from yaml.""" - await self.async_set_unique_id(import_info[CONF_TOKEN]) + await self.async_set_unique_id(import_data[CONF_TOKEN]) self._abort_if_unique_id_configured() return self.async_create_entry( - title=import_info.get(CONF_NAME, DEFAULT_NAME), - data=import_info, + title=import_data.get(CONF_NAME, DEFAULT_NAME), + data=import_data, ) async def async_step_confirm( diff --git a/homeassistant/components/pyload/config_flow.py b/homeassistant/components/pyload/config_flow.py index 2f4f9519d30..79b90ff917d 100644 --- a/homeassistant/components/pyload/config_flow.py +++ b/homeassistant/components/pyload/config_flow.py @@ -133,16 +133,16 @@ class PyLoadConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import(self, import_info: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import config from yaml.""" config = { - CONF_NAME: import_info.get(CONF_NAME), - CONF_HOST: import_info.get(CONF_HOST, DEFAULT_HOST), - CONF_PASSWORD: import_info.get(CONF_PASSWORD, ""), - CONF_PORT: import_info.get(CONF_PORT, DEFAULT_PORT), - CONF_SSL: import_info.get(CONF_SSL, False), - CONF_USERNAME: import_info.get(CONF_USERNAME, ""), + CONF_NAME: import_data.get(CONF_NAME), + CONF_HOST: import_data.get(CONF_HOST, DEFAULT_HOST), + CONF_PASSWORD: import_data.get(CONF_PASSWORD, ""), + CONF_PORT: import_data.get(CONF_PORT, DEFAULT_PORT), + CONF_SSL: import_data.get(CONF_SSL, False), + CONF_USERNAME: import_data.get(CONF_USERNAME, ""), CONF_VERIFY_SSL: False, } diff --git a/homeassistant/components/rova/config_flow.py b/homeassistant/components/rova/config_flow.py index e5e3a31b8af..a28e6202466 100644 --- a/homeassistant/components/rova/config_flow.py +++ b/homeassistant/components/rova/config_flow.py @@ -60,11 +60,11 @@ class RovaConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import the yaml config.""" - zip_code = user_input[CONF_ZIP_CODE] - number = user_input[CONF_HOUSE_NUMBER] - suffix = user_input[CONF_HOUSE_NUMBER_SUFFIX] + zip_code = import_data[CONF_ZIP_CODE] + number = import_data[CONF_HOUSE_NUMBER] + suffix = import_data[CONF_HOUSE_NUMBER_SUFFIX] await self.async_set_unique_id(f"{zip_code}{number}{suffix}".strip()) self._abort_if_unique_id_configured() diff --git a/homeassistant/components/solarlog/config_flow.py b/homeassistant/components/solarlog/config_flow.py index 7c8401be2b8..7824c98cf5d 100644 --- a/homeassistant/components/solarlog/config_flow.py +++ b/homeassistant/components/solarlog/config_flow.py @@ -101,14 +101,14 @@ class SolarLogConfigFlow(ConfigFlow, domain=DOMAIN): errors=self._errors, ) - async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry.""" user_input = { CONF_HOST: DEFAULT_HOST, CONF_NAME: DEFAULT_NAME, "extended_data": False, - **user_input, + **import_data, } user_input[CONF_HOST] = self._parse_url(user_input[CONF_HOST]) diff --git a/homeassistant/components/sun/config_flow.py b/homeassistant/components/sun/config_flow.py index 30b64c60b9f..16c465be8ad 100644 --- a/homeassistant/components/sun/config_flow.py +++ b/homeassistant/components/sun/config_flow.py @@ -23,6 +23,6 @@ class SunConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form(step_id="user") - async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle import from configuration.yaml.""" - return await self.async_step_user(user_input) + return await self.async_step_user(import_data) diff --git a/homeassistant/components/venstar/config_flow.py b/homeassistant/components/venstar/config_flow.py index 289f7936676..929f5718c19 100644 --- a/homeassistant/components/venstar/config_flow.py +++ b/homeassistant/components/venstar/config_flow.py @@ -15,7 +15,6 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.typing import ConfigType from .const import _LOGGER, DOMAIN, VENSTAR_TIMEOUT @@ -85,7 +84,7 @@ class VenstarConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import(self, import_data: ConfigType) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import entry from configuration.yaml.""" self._async_abort_entries_match({CONF_HOST: import_data[CONF_HOST]}) return await self.async_step_user( diff --git a/homeassistant/components/vera/config_flow.py b/homeassistant/components/vera/config_flow.py index 181849f46a1..08e7640773b 100644 --- a/homeassistant/components/vera/config_flow.py +++ b/homeassistant/components/vera/config_flow.py @@ -127,7 +127,7 @@ class VeraFlowHandler(ConfigFlow, domain=DOMAIN): ), ) - async def async_step_import(self, config: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle a flow initialized by import.""" # If there are entities with the legacy unique_id, then this imported config @@ -146,7 +146,7 @@ class VeraFlowHandler(ConfigFlow, domain=DOMAIN): return await self.async_step_finish( { - **config, + **import_data, CONF_SOURCE: SOURCE_IMPORT, CONF_LEGACY_UNIQUE_ID: use_legacy_unique_id, } diff --git a/homeassistant/components/zwave_js/config_flow.py b/homeassistant/components/zwave_js/config_flow.py index e73fa9fc3a7..6d9f88d93f1 100644 --- a/homeassistant/components/zwave_js/config_flow.py +++ b/homeassistant/components/zwave_js/config_flow.py @@ -366,7 +366,7 @@ class ZWaveJSConfigFlow(BaseZwaveJSFlow, ConfigFlow, domain=DOMAIN): """Return the options flow.""" return OptionsFlowHandler(config_entry) - async def async_step_import(self, data: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle imported data. This step will be used when importing data @@ -374,8 +374,8 @@ class ZWaveJSConfigFlow(BaseZwaveJSFlow, ConfigFlow, domain=DOMAIN): """ # Note that the data comes from the zwave integration. # So we don't use our constants here. - self.s0_legacy_key = data.get("network_key") - self.usb_path = data.get("usb_path") + self.s0_legacy_key = import_data.get("network_key") + self.usb_path = import_data.get("usb_path") return await self.async_step_user() async def async_step_user( From 68cdf8877cf2c62e81a615f0d86b91fbf2e1c1e6 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 27 Aug 2024 10:50:13 +0200 Subject: [PATCH 1091/1635] Standardize import step variable name (part 3) (#124680) --- homeassistant/components/canary/config_flow.py | 6 ++---- homeassistant/components/file/config_flow.py | 5 +---- homeassistant/components/hive/config_flow.py | 6 ++---- homeassistant/components/jewish_calendar/config_flow.py | 7 ++----- homeassistant/components/media_extractor/config_flow.py | 4 +--- homeassistant/components/thread/config_flow.py | 4 +--- homeassistant/components/toon/config_flow.py | 6 +++--- 7 files changed, 12 insertions(+), 26 deletions(-) diff --git a/homeassistant/components/canary/config_flow.py b/homeassistant/components/canary/config_flow.py index 6ae7632a7e2..5af7142af8f 100644 --- a/homeassistant/components/canary/config_flow.py +++ b/homeassistant/components/canary/config_flow.py @@ -54,11 +54,9 @@ class CanaryConfigFlow(ConfigFlow, domain=DOMAIN): """Get the options flow for this handler.""" return CanaryOptionsFlowHandler(config_entry) - async def async_step_import( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle a flow initiated by configuration file.""" - return await self.async_step_user(user_input) + return await self.async_step_user(import_data) async def async_step_user( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/file/config_flow.py b/homeassistant/components/file/config_flow.py index 8cb58ec1f47..d74e36ce935 100644 --- a/homeassistant/components/file/config_flow.py +++ b/homeassistant/components/file/config_flow.py @@ -129,11 +129,8 @@ class FileConfigFlowHandler(ConfigFlow, domain=DOMAIN): """Handle file sensor config flow.""" return await self._async_handle_step(Platform.SENSOR.value, user_input) - async def async_step_import( - self, import_data: dict[str, Any] | None = None - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import `file`` config from configuration.yaml.""" - assert import_data is not None self._async_abort_entries_match(import_data) platform = import_data[CONF_PLATFORM] name: str = import_data.get(CONF_NAME, DEFAULT_NAME) diff --git a/homeassistant/components/hive/config_flow.py b/homeassistant/components/hive/config_flow.py index f8cb089834a..d6be2d1efab 100644 --- a/homeassistant/components/hive/config_flow.py +++ b/homeassistant/components/hive/config_flow.py @@ -163,11 +163,9 @@ class HiveFlowHandler(ConfigFlow, domain=DOMAIN): } return await self.async_step_user(data) - async def async_step_import( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import user.""" - return await self.async_step_user(user_input) + return await self.async_step_user(import_data) @staticmethod @callback diff --git a/homeassistant/components/jewish_calendar/config_flow.py b/homeassistant/components/jewish_calendar/config_flow.py index 8f04d73915f..518db38b3bb 100644 --- a/homeassistant/components/jewish_calendar/config_flow.py +++ b/homeassistant/components/jewish_calendar/config_flow.py @@ -30,7 +30,6 @@ from homeassistant.helpers.selector import ( SelectSelector, SelectSelectorConfig, ) -from homeassistant.helpers.typing import ConfigType from .const import ( CONF_CANDLE_LIGHT_MINUTES, @@ -125,11 +124,9 @@ class JewishCalendarConfigFlow(ConfigFlow, domain=DOMAIN): ), ) - async def async_step_import( - self, import_config: ConfigType | None - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry from configuration.yaml.""" - return await self.async_step_user(import_config) + return await self.async_step_user(import_data) class JewishCalendarOptionsFlowHandler(OptionsFlowWithConfigEntry): diff --git a/homeassistant/components/media_extractor/config_flow.py b/homeassistant/components/media_extractor/config_flow.py index 4343d0551e0..b91942d7b13 100644 --- a/homeassistant/components/media_extractor/config_flow.py +++ b/homeassistant/components/media_extractor/config_flow.py @@ -25,8 +25,6 @@ class MediaExtractorConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form(step_id="user", data_schema=vol.Schema({})) - async def async_step_import( - self, import_config: dict[str, Any] - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: None) -> ConfigFlowResult: """Handle import.""" return self.async_create_entry(title="Media extractor", data={}) diff --git a/homeassistant/components/thread/config_flow.py b/homeassistant/components/thread/config_flow.py index b4b6eac0fc8..568b76d4999 100644 --- a/homeassistant/components/thread/config_flow.py +++ b/homeassistant/components/thread/config_flow.py @@ -15,9 +15,7 @@ class ThreadConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_import( - self, import_data: dict[str, str] | None = None - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: None) -> ConfigFlowResult: """Set up by import from async_setup.""" await self._async_handle_discovery_without_unique_id() return self.async_create_entry(title="Thread", data={}) diff --git a/homeassistant/components/toon/config_flow.py b/homeassistant/components/toon/config_flow.py index 40e83c3c9be..af9f7b06850 100644 --- a/homeassistant/components/toon/config_flow.py +++ b/homeassistant/components/toon/config_flow.py @@ -48,7 +48,7 @@ class ToonFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): return await self.async_step_agreement() async def async_step_import( - self, config: dict[str, Any] | None = None + self, import_data: dict[str, Any] | None ) -> ConfigFlowResult: """Start a configuration flow based on imported data. @@ -57,8 +57,8 @@ class ToonFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): the version 1 schema. """ - if config is not None and CONF_MIGRATE in config: - self.context.update({CONF_MIGRATE: config[CONF_MIGRATE]}) + if import_data is not None and CONF_MIGRATE in import_data: + self.context.update({CONF_MIGRATE: import_data[CONF_MIGRATE]}) else: await self._async_handle_discovery_without_unique_id() From dece7e0f9c2e658bb23d5dacdfc38d221e37078a Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 27 Aug 2024 11:08:33 +0200 Subject: [PATCH 1092/1635] Cleanup unused import in proximity config flow (#124681) * Cleanup unused import in proximity config flow * Cleanup tests --- .../components/proximity/config_flow.py | 6 --- .../components/proximity/test_config_flow.py | 40 +------------------ 2 files changed, 2 insertions(+), 44 deletions(-) diff --git a/homeassistant/components/proximity/config_flow.py b/homeassistant/components/proximity/config_flow.py index d133b14cb6a..1758b182ad7 100644 --- a/homeassistant/components/proximity/config_flow.py +++ b/homeassistant/components/proximity/config_flow.py @@ -117,12 +117,6 @@ class ProximityConfigFlow(ConfigFlow, domain=DOMAIN): data_schema=self._user_form_schema(user_input), ) - async def async_step_import( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Import a yaml config entry.""" - return await self.async_step_user(user_input) - class ProximityOptionsFlow(OptionsFlow): """Handle a option flow.""" diff --git a/tests/components/proximity/test_config_flow.py b/tests/components/proximity/test_config_flow.py index 3ed9f5cba27..626565146d1 100644 --- a/tests/components/proximity/test_config_flow.py +++ b/tests/components/proximity/test_config_flow.py @@ -10,8 +10,8 @@ from homeassistant.components.proximity.const import ( CONF_TRACKED_ENTITIES, DOMAIN, ) -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER -from homeassistant.const import CONF_NAME, CONF_UNIT_OF_MEASUREMENT, CONF_ZONE +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_ZONE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -120,42 +120,6 @@ async def test_options_flow(hass: HomeAssistant) -> None: } -async def test_import_flow(hass: HomeAssistant) -> None: - """Test import of yaml configuration.""" - with patch( - "homeassistant.components.proximity.async_setup_entry", return_value=True - ) as mock_setup_entry: - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={ - CONF_NAME: "home", - CONF_ZONE: "zone.home", - CONF_TRACKED_ENTITIES: ["device_tracker.test1"], - CONF_IGNORED_ZONES: ["zone.work"], - CONF_TOLERANCE: 10, - CONF_UNIT_OF_MEASUREMENT: "km", - }, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == { - CONF_NAME: "home", - CONF_ZONE: "zone.home", - CONF_TRACKED_ENTITIES: ["device_tracker.test1"], - CONF_IGNORED_ZONES: ["zone.work"], - CONF_TOLERANCE: 10, - CONF_UNIT_OF_MEASUREMENT: "km", - } - - zone = hass.states.get("zone.home") - assert result["title"] == zone.name - - await hass.async_block_till_done() - - assert mock_setup_entry.called - - async def test_abort_duplicated_entry(hass: HomeAssistant) -> None: """Test if we abort on duplicate user input data.""" DATA = { From 823b62d8ab962df2ef130630d2ea36874bf1225b Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 27 Aug 2024 11:14:17 +0200 Subject: [PATCH 1093/1635] Include engine name in STT WS responses (#124684) --- homeassistant/components/stt/__init__.py | 1 + tests/components/stt/test_init.py | 29 ++++++++++++++++++------ 2 files changed, 23 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/stt/__init__.py b/homeassistant/components/stt/__init__.py index f6c38c1e0b7..f82d6c2ab93 100644 --- a/homeassistant/components/stt/__init__.py +++ b/homeassistant/components/stt/__init__.py @@ -448,6 +448,7 @@ def websocket_list_engines( for engine_id, provider in legacy_providers.items(): provider_info = { "engine_id": engine_id, + "name": provider.name, "supported_languages": provider.supported_languages, } if language: diff --git a/tests/components/stt/test_init.py b/tests/components/stt/test_init.py index e5d75d3c4a5..5c98b0f8d57 100644 --- a/tests/components/stt/test_init.py +++ b/tests/components/stt/test_init.py @@ -399,8 +399,11 @@ async def test_restore_state( @pytest.mark.parametrize( - ("setup", "engine_id"), - [("mock_setup", "test"), ("mock_config_entry_setup", "stt.test")], + ("setup", "engine_id", "extra_data"), + [ + ("mock_setup", "test", {"name": "test"}), + ("mock_config_entry_setup", "stt.test", {}), + ], indirect=["setup"], ) async def test_ws_list_engines( @@ -408,6 +411,7 @@ async def test_ws_list_engines( hass_ws_client: WebSocketGenerator, setup: MockProvider | MockProviderEntity, engine_id: str, + extra_data: dict[str, str], ) -> None: """Test listing speech-to-text engines.""" client = await hass_ws_client() @@ -419,6 +423,7 @@ async def test_ws_list_engines( assert msg["result"] == { "providers": [ {"engine_id": engine_id, "supported_languages": ["de", "de-CH", "en"]} + | extra_data ] } @@ -427,7 +432,7 @@ async def test_ws_list_engines( msg = await client.receive_json() assert msg["success"] assert msg["result"] == { - "providers": [{"engine_id": engine_id, "supported_languages": []}] + "providers": [{"engine_id": engine_id, "supported_languages": []} | extra_data] } await client.send_json_auto_id({"type": "stt/engine/list", "language": "en"}) @@ -435,7 +440,9 @@ async def test_ws_list_engines( msg = await client.receive_json() assert msg["success"] assert msg["result"] == { - "providers": [{"engine_id": engine_id, "supported_languages": ["en"]}] + "providers": [ + {"engine_id": engine_id, "supported_languages": ["en"]} | extra_data + ] } await client.send_json_auto_id({"type": "stt/engine/list", "language": "en-UK"}) @@ -443,7 +450,9 @@ async def test_ws_list_engines( msg = await client.receive_json() assert msg["success"] assert msg["result"] == { - "providers": [{"engine_id": engine_id, "supported_languages": ["en"]}] + "providers": [ + {"engine_id": engine_id, "supported_languages": ["en"]} | extra_data + ] } await client.send_json_auto_id({"type": "stt/engine/list", "language": "de"}) @@ -451,7 +460,10 @@ async def test_ws_list_engines( assert msg["type"] == "result" assert msg["success"] assert msg["result"] == { - "providers": [{"engine_id": engine_id, "supported_languages": ["de", "de-CH"]}] + "providers": [ + {"engine_id": engine_id, "supported_languages": ["de", "de-CH"]} + | extra_data + ] } await client.send_json_auto_id( @@ -461,7 +473,10 @@ async def test_ws_list_engines( assert msg["type"] == "result" assert msg["success"] assert msg["result"] == { - "providers": [{"engine_id": engine_id, "supported_languages": ["de-CH", "de"]}] + "providers": [ + {"engine_id": engine_id, "supported_languages": ["de-CH", "de"]} + | extra_data + ] } From ef1d53c207c6dcf0dd0c432878a0e948d0fa40aa Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 27 Aug 2024 11:14:41 +0200 Subject: [PATCH 1094/1635] Include engine name in TTS WS responses (#124683) --- homeassistant/components/tts/__init__.py | 3 +++ tests/components/tts/test_init.py | 34 +++++++++++++++++------- 2 files changed, 28 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/tts/__init__.py b/homeassistant/components/tts/__init__.py index 5406b11fccb..70bb2b4c713 100644 --- a/homeassistant/components/tts/__init__.py +++ b/homeassistant/components/tts/__init__.py @@ -1103,6 +1103,7 @@ def websocket_list_engines( for engine_id, provider in manager.providers.items(): provider_info = { "engine_id": engine_id, + "name": provider.name, "supported_languages": provider.supported_languages, } if language: @@ -1153,6 +1154,8 @@ def websocket_get_engine( "engine_id": engine_id, "supported_languages": provider.supported_languages, } + if isinstance(provider, Provider): + provider_info["name"] = provider.name connection.send_message( websocket_api.result_message(msg["id"], {"provider": provider_info}) diff --git a/tests/components/tts/test_init.py b/tests/components/tts/test_init.py index dbbcdbd40f6..1417fcda2a7 100644 --- a/tests/components/tts/test_init.py +++ b/tests/components/tts/test_init.py @@ -1565,15 +1565,19 @@ async def test_fetching_in_async( @pytest.mark.parametrize( - ("setup", "engine_id"), + ("setup", "engine_id", "extra_data"), [ - ("mock_setup", "test"), - ("mock_config_entry_setup", "tts.test"), + ("mock_setup", "test", {"name": "Test"}), + ("mock_config_entry_setup", "tts.test", {}), ], indirect=["setup"], ) async def test_ws_list_engines( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, setup: str, engine_id: str + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + setup: str, + engine_id: str, + extra_data: dict[str, str], ) -> None: """Test listing tts engines and supported languages.""" client = await hass_ws_client() @@ -1588,6 +1592,7 @@ async def test_ws_list_engines( "engine_id": engine_id, "supported_languages": ["de_CH", "de_DE", "en_GB", "en_US"], } + | extra_data ] } @@ -1596,7 +1601,7 @@ async def test_ws_list_engines( msg = await client.receive_json() assert msg["success"] assert msg["result"] == { - "providers": [{"engine_id": engine_id, "supported_languages": []}] + "providers": [{"engine_id": engine_id, "supported_languages": []} | extra_data] } await client.send_json_auto_id({"type": "tts/engine/list", "language": "en"}) @@ -1606,6 +1611,7 @@ async def test_ws_list_engines( assert msg["result"] == { "providers": [ {"engine_id": engine_id, "supported_languages": ["en_US", "en_GB"]} + | extra_data ] } @@ -1616,6 +1622,7 @@ async def test_ws_list_engines( assert msg["result"] == { "providers": [ {"engine_id": engine_id, "supported_languages": ["en_GB", "en_US"]} + | extra_data ] } @@ -1626,6 +1633,7 @@ async def test_ws_list_engines( assert msg["result"] == { "providers": [ {"engine_id": engine_id, "supported_languages": ["de_DE", "de_CH"]} + | extra_data ] } @@ -1638,6 +1646,7 @@ async def test_ws_list_engines( assert msg["result"] == { "providers": [ {"engine_id": engine_id, "supported_languages": ["de_CH", "de_DE"]} + | extra_data ] } @@ -1679,10 +1688,12 @@ async def test_ws_list_engines_deprecated( { "deprecated": True, "engine_id": "test", + "name": "Test", "supported_languages": ["de_CH", "de_DE", "en_GB", "en_US"], }, { "engine_id": "test_2", + "name": "Test", "supported_languages": ["de_CH", "de_DE", "en_GB", "en_US"], }, ] @@ -1690,15 +1701,19 @@ async def test_ws_list_engines_deprecated( @pytest.mark.parametrize( - ("setup", "engine_id"), + ("setup", "engine_id", "extra_data"), [ - ("mock_setup", "test"), - ("mock_config_entry_setup", "tts.test"), + ("mock_setup", "test", {"name": "Test"}), + ("mock_config_entry_setup", "tts.test", {}), ], indirect=["setup"], ) async def test_ws_get_engine( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, setup: str, engine_id: str + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + setup: str, + engine_id: str, + extra_data: dict[str, str], ) -> None: """Test getting an tts engine.""" client = await hass_ws_client() @@ -1712,6 +1727,7 @@ async def test_ws_get_engine( "engine_id": engine_id, "supported_languages": ["de_CH", "de_DE", "en_GB", "en_US"], } + | extra_data } From 68d6f1c1aab2cf80b51191f2c617c9a7fc81aa7a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 26 Aug 2024 23:15:00 -1000 Subject: [PATCH 1095/1635] Bump zeroconf to 0.133.0 (#124673) changelog: https://github.com/python-zeroconf/python-zeroconf/compare/0.132.2...0.133.0 --- homeassistant/components/zeroconf/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/zeroconf/manifest.json b/homeassistant/components/zeroconf/manifest.json index 0a76af3b9c2..8b332400805 100644 --- a/homeassistant/components/zeroconf/manifest.json +++ b/homeassistant/components/zeroconf/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_push", "loggers": ["zeroconf"], "quality_scale": "internal", - "requirements": ["zeroconf==0.132.2"] + "requirements": ["zeroconf==0.133.0"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 767804e5136..ed1a1f88059 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -63,7 +63,7 @@ voluptuous-openapi==0.0.5 voluptuous-serialize==2.6.0 voluptuous==0.15.2 yarl==1.9.4 -zeroconf==0.132.2 +zeroconf==0.133.0 # Constrain pycryptodome to avoid vulnerability # see https://github.com/home-assistant/core/pull/16238 diff --git a/requirements_all.txt b/requirements_all.txt index fa29a830768..f8d7b908ec3 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2998,7 +2998,7 @@ zamg==0.3.6 zengge==0.2 # homeassistant.components.zeroconf -zeroconf==0.132.2 +zeroconf==0.133.0 # homeassistant.components.zeversolar zeversolar==0.3.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c233fae27a0..0307433e514 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2372,7 +2372,7 @@ yt-dlp==2024.08.06 zamg==0.3.6 # homeassistant.components.zeroconf -zeroconf==0.132.2 +zeroconf==0.133.0 # homeassistant.components.zeversolar zeversolar==0.3.1 From d8161c431fdc5552f57b1614354ba749b1ebbc8f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 26 Aug 2024 23:17:05 -1000 Subject: [PATCH 1096/1635] Add support for using an entityfilter to subscribe_entities (#124641) * Add support for using an entityfilter to subscribe_entities * filter init * fix * coverage --- .../components/websocket_api/commands.py | 55 +++++++++++++------ .../components/websocket_api/test_commands.py | 48 ++++++++++++++++ 2 files changed, 87 insertions(+), 16 deletions(-) diff --git a/homeassistant/components/websocket_api/commands.py b/homeassistant/components/websocket_api/commands.py index f66930c8d00..c9347012183 100644 --- a/homeassistant/components/websocket_api/commands.py +++ b/homeassistant/components/websocket_api/commands.py @@ -36,6 +36,10 @@ from homeassistant.exceptions import ( ) from homeassistant.helpers import config_validation as cv, entity, template from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.entityfilter import ( + INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA, + convert_include_exclude_filter, +) from homeassistant.helpers.event import ( TrackTemplate, TrackTemplateResult, @@ -366,14 +370,17 @@ def _send_handle_get_states_response( @callback def _forward_entity_changes( send_message: Callable[[str | bytes | dict[str, Any]], None], - entity_ids: set[str], + entity_ids: set[str] | None, + entity_filter: Callable[[str], bool] | None, user: User, message_id_as_bytes: bytes, event: Event[EventStateChangedData], ) -> None: """Forward entity state changed events to websocket.""" entity_id = event.data["entity_id"] - if entity_ids and entity_id not in entity_ids: + if (entity_ids and entity_id not in entity_ids) or ( + entity_filter and not entity_filter(entity_id) + ): return # We have to lookup the permissions again because the user might have # changed since the subscription was created. @@ -381,7 +388,7 @@ def _forward_entity_changes( if ( not user.is_admin and not permissions.access_all_entities(POLICY_READ) - and not permissions.check_entity(event.data["entity_id"], POLICY_READ) + and not permissions.check_entity(entity_id, POLICY_READ) ): return send_message(messages.cached_state_diff_message(message_id_as_bytes, event)) @@ -392,43 +399,55 @@ def _forward_entity_changes( { vol.Required("type"): "subscribe_entities", vol.Optional("entity_ids"): cv.entity_ids, + **INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA.schema, } ) def handle_subscribe_entities( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Handle subscribe entities command.""" - entity_ids = set(msg.get("entity_ids", [])) + entity_ids = set(msg.get("entity_ids", [])) or None + _filter = convert_include_exclude_filter(msg) + entity_filter = None if _filter.empty_filter else _filter.get_filter() # We must never await between sending the states and listening for # state changed events or we will introduce a race condition # where some states are missed states = _async_get_allowed_states(hass, connection) - message_id_as_bytes = str(msg["id"]).encode() - connection.subscriptions[msg["id"]] = hass.bus.async_listen( + msg_id = msg["id"] + message_id_as_bytes = str(msg_id).encode() + connection.subscriptions[msg_id] = hass.bus.async_listen( EVENT_STATE_CHANGED, partial( _forward_entity_changes, connection.send_message, entity_ids, + entity_filter, connection.user, message_id_as_bytes, ), ) - connection.send_result(msg["id"]) + connection.send_result(msg_id) # JSON serialize here so we can recover if it blows up due to the # state machine containing unserializable data. This command is required # to succeed for the UI to show. try: - serialized_states = [ - state.as_compressed_state_json - for state in states - if not entity_ids or state.entity_id in entity_ids - ] + if entity_ids or entity_filter: + serialized_states = [ + state.as_compressed_state_json + for state in states + if (not entity_ids or state.entity_id in entity_ids) + and (not entity_filter or entity_filter(state.entity_id)) + ] + else: + # Fast path when not filtering + serialized_states = [state.as_compressed_state_json for state in states] except (ValueError, TypeError): pass else: - _send_handle_entities_init_response(connection, msg["id"], serialized_states) + _send_handle_entities_init_response( + connection, message_id_as_bytes, serialized_states + ) return serialized_states = [] @@ -443,18 +462,22 @@ def handle_subscribe_entities( ), ) - _send_handle_entities_init_response(connection, msg["id"], serialized_states) + _send_handle_entities_init_response( + connection, message_id_as_bytes, serialized_states + ) def _send_handle_entities_init_response( - connection: ActiveConnection, msg_id: int, serialized_states: list[bytes] + connection: ActiveConnection, + message_id_as_bytes: bytes, + serialized_states: list[bytes], ) -> None: """Send handle entities init response.""" connection.send_message( b"".join( ( b'{"id":', - str(msg_id).encode(), + message_id_as_bytes, b',"type":"event","event":{"a":{', b",".join(serialized_states), b"}}}", diff --git a/tests/components/websocket_api/test_commands.py b/tests/components/websocket_api/test_commands.py index 772a8ee793e..54a87e033dc 100644 --- a/tests/components/websocket_api/test_commands.py +++ b/tests/components/websocket_api/test_commands.py @@ -1262,6 +1262,54 @@ async def test_subscribe_unsubscribe_entities_specific_entities( } +async def test_subscribe_unsubscribe_entities_with_filter( + hass: HomeAssistant, + websocket_client: MockHAClientWebSocket, + hass_admin_user: MockUser, +) -> None: + """Test subscribe/unsubscribe entities with an entity filter.""" + hass.states.async_set("switch.not_included", "off") + hass.states.async_set("light.include", "off") + await websocket_client.send_json( + {"id": 7, "type": "subscribe_entities", "include": {"domains": ["light"]}} + ) + + msg = await websocket_client.receive_json() + assert msg["id"] == 7 + assert msg["type"] == const.TYPE_RESULT + assert msg["success"] + + msg = await websocket_client.receive_json() + assert msg["id"] == 7 + assert msg["type"] == "event" + assert msg["event"] == { + "a": { + "light.include": { + "a": {}, + "c": ANY, + "lc": ANY, + "s": "off", + } + } + } + hass.states.async_set("switch.not_included", "on") + hass.states.async_set("light.include", "on") + msg = await websocket_client.receive_json() + assert msg["id"] == 7 + assert msg["type"] == "event" + assert msg["event"] == { + "c": { + "light.include": { + "+": { + "c": ANY, + "lc": ANY, + "s": "on", + } + } + } + } + + async def test_render_template_renders_template( hass: HomeAssistant, websocket_client ) -> None: From 902d76da3693c84e10ff71dd7fa120fa2f09a22c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 26 Aug 2024 23:17:34 -1000 Subject: [PATCH 1097/1635] Small cleanup to normalize states (#124614) - Remove duplicate state_unit lookup - Use undefined instead of object --- homeassistant/components/sensor/recorder.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/sensor/recorder.py b/homeassistant/components/sensor/recorder.py index be4dce28546..fce41a13ca6 100644 --- a/homeassistant/components/sensor/recorder.py +++ b/homeassistant/components/sensor/recorder.py @@ -33,6 +33,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, State, split_entity_id from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity import entity_sources +from homeassistant.helpers.typing import UNDEFINED, UndefinedType from homeassistant.loader import async_suggest_report_issue from homeassistant.util import dt as dt_util from homeassistant.util.enum import try_parse_enum @@ -220,13 +221,13 @@ def _normalize_states( LINK_DEV_STATISTICS, ) return None, [] - state_unit = fstates[0][1].attributes.get(ATTR_UNIT_OF_MEASUREMENT) + return state_unit, fstates converter = statistics.STATISTIC_UNIT_TO_UNIT_CONVERTER[statistics_unit] valid_fstates: list[tuple[float, State]] = [] convert: Callable[[float], float] | None = None - last_unit: str | None | object = object() + last_unit: str | None | UndefinedType = UNDEFINED valid_units = converter.VALID_UNITS for fstate, state in fstates: From 4bc19876ca6d2235bd9679a098283f8ed4537e2c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 26 Aug 2024 23:18:12 -1000 Subject: [PATCH 1098/1635] Small speed up to creating stats database rows (#124587) * Small speed up to creating stats database rows Calling .timestamp() directly is faster on new cpython * more cleanup --- homeassistant/components/recorder/db_schema.py | 4 ++-- homeassistant/components/recorder/models/time.py | 4 +--- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/recorder/db_schema.py b/homeassistant/components/recorder/db_schema.py index f84459675ae..6ba9d971f2c 100644 --- a/homeassistant/components/recorder/db_schema.py +++ b/homeassistant/components/recorder/db_schema.py @@ -693,13 +693,13 @@ class StatisticsBase: @classmethod def from_stats(cls, metadata_id: int, stats: StatisticData) -> Self: - """Create object from a statistics with datatime objects.""" + """Create object from a statistics with datetime objects.""" return cls( # type: ignore[call-arg] metadata_id=metadata_id, created=None, created_ts=time.time(), start=None, - start_ts=dt_util.utc_to_timestamp(stats["start"]), + start_ts=stats["start"].timestamp(), mean=stats.get("mean"), min=stats.get("min"), max=stats.get("max"), diff --git a/homeassistant/components/recorder/models/time.py b/homeassistant/components/recorder/models/time.py index 6295060c8d3..8f0f89a9ffa 100644 --- a/homeassistant/components/recorder/models/time.py +++ b/homeassistant/components/recorder/models/time.py @@ -65,9 +65,7 @@ def process_datetime_to_timestamp(ts: datetime) -> float: def datetime_to_timestamp_or_none(dt: datetime | None) -> float | None: """Convert a datetime to a timestamp.""" - if dt is None: - return None - return dt_util.utc_to_timestamp(dt) + return None if dt is None else dt.timestamp() def timestamp_to_datetime_or_none(ts: float | None) -> datetime | None: From 0d2f22838aef2b32b4c46275f4c28769cd8e0228 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 26 Aug 2024 23:22:10 -1000 Subject: [PATCH 1099/1635] Reduce complexity of _sorted_statistics_to_dict (#123936) * comp stats queries * tweak * reduce --- .../components/recorder/statistics.py | 76 +++++-------------- 1 file changed, 20 insertions(+), 56 deletions(-) diff --git a/homeassistant/components/recorder/statistics.py b/homeassistant/components/recorder/statistics.py index 6532935ae0e..ba19c016d19 100644 --- a/homeassistant/components/recorder/statistics.py +++ b/homeassistant/components/recorder/statistics.py @@ -2088,71 +2088,38 @@ def _build_stats( db_rows: list[Row], table_duration_seconds: float, start_ts_idx: int, - mean_idx: int | None, - min_idx: int | None, - max_idx: int | None, - last_reset_ts_idx: int | None, - state_idx: int | None, - sum_idx: int | None, + row_mapping: tuple[tuple[str, int], ...], ) -> list[StatisticsRow]: """Build a list of statistics without unit conversion.""" - result: list[StatisticsRow] = [] - ent_results_append = result.append - for db_row in db_rows: - row: StatisticsRow = { + return [ + { "start": (start_ts := db_row[start_ts_idx]), "end": start_ts + table_duration_seconds, + **{key: db_row[idx] for key, idx in row_mapping}, # type: ignore[typeddict-item] } - if last_reset_ts_idx is not None: - row["last_reset"] = db_row[last_reset_ts_idx] - if mean_idx is not None: - row["mean"] = db_row[mean_idx] - if min_idx is not None: - row["min"] = db_row[min_idx] - if max_idx is not None: - row["max"] = db_row[max_idx] - if state_idx is not None: - row["state"] = db_row[state_idx] - if sum_idx is not None: - row["sum"] = db_row[sum_idx] - ent_results_append(row) - return result + for db_row in db_rows + ] def _build_converted_stats( db_rows: list[Row], table_duration_seconds: float, start_ts_idx: int, - mean_idx: int | None, - min_idx: int | None, - max_idx: int | None, - last_reset_ts_idx: int | None, - state_idx: int | None, - sum_idx: int | None, + row_mapping: tuple[tuple[str, int], ...], convert: Callable[[float | None], float | None] | Callable[[float], float], ) -> list[StatisticsRow]: """Build a list of statistics with unit conversion.""" - result: list[StatisticsRow] = [] - ent_results_append = result.append - for db_row in db_rows: - row: StatisticsRow = { + return [ + { "start": (start_ts := db_row[start_ts_idx]), "end": start_ts + table_duration_seconds, + **{ + key: None if (v := db_row[idx]) is None else convert(v) # type: ignore[typeddict-item] + for key, idx in row_mapping + }, } - if last_reset_ts_idx is not None: - row["last_reset"] = db_row[last_reset_ts_idx] - if mean_idx is not None: - row["mean"] = None if (v := db_row[mean_idx]) is None else convert(v) - if min_idx is not None: - row["min"] = None if (v := db_row[min_idx]) is None else convert(v) - if max_idx is not None: - row["max"] = None if (v := db_row[max_idx]) is None else convert(v) - if state_idx is not None: - row["state"] = None if (v := db_row[state_idx]) is None else convert(v) - if sum_idx is not None: - row["sum"] = None if (v := db_row[sum_idx]) is None else convert(v) - ent_results_append(row) - return result + for db_row in db_rows + ] def _sorted_statistics_to_dict( @@ -2192,14 +2159,11 @@ def _sorted_statistics_to_dict( # Figure out which fields we need to extract from the SQL result # and which indices they have in the result so we can avoid the overhead # of doing a dict lookup for each row - mean_idx = field_map["mean"] if "mean" in types else None - min_idx = field_map["min"] if "min" in types else None - max_idx = field_map["max"] if "max" in types else None - last_reset_ts_idx = field_map["last_reset_ts"] if "last_reset" in types else None - state_idx = field_map["state"] if "state" in types else None + if "last_reset_ts" in field_map: + field_map["last_reset"] = field_map.pop("last_reset_ts") sum_idx = field_map["sum"] if "sum" in types else None sum_only = len(types) == 1 and sum_idx is not None - row_idxes = (mean_idx, min_idx, max_idx, last_reset_ts_idx, state_idx, sum_idx) + row_mapping = tuple((key, field_map[key]) for key in types if key in field_map) # Append all statistic entries, and optionally do unit conversion table_duration_seconds = table.duration.total_seconds() for meta_id, db_rows in stats_by_meta_id.items(): @@ -2228,9 +2192,9 @@ def _sorted_statistics_to_dict( else: _stats = _build_sum_stats(*build_args, sum_idx) elif convert: - _stats = _build_converted_stats(*build_args, *row_idxes, convert) + _stats = _build_converted_stats(*build_args, row_mapping, convert) else: - _stats = _build_stats(*build_args, *row_idxes) + _stats = _build_stats(*build_args, row_mapping) result[statistic_id] = _stats From 6b0428774d8af298e5498a3350d1a5bc67ceaefd Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 27 Aug 2024 11:22:35 +0200 Subject: [PATCH 1100/1635] Standardize import step variable name (part 2) (#124679) --- .../components/airvisual_pro/config_flow.py | 6 ++-- homeassistant/components/aws/config_flow.py | 7 ++--- .../components/azure_event_hub/config_flow.py | 14 ++++----- .../components/firmata/config_flow.py | 14 ++++----- homeassistant/components/lcn/config_flow.py | 19 ++++++------ homeassistant/components/mpd/config_flow.py | 20 ++++++------- .../components/panasonic_viera/config_flow.py | 6 ++-- .../components/russound_rio/config_flow.py | 10 +++---- .../components/sleepiq/config_flow.py | 10 +++---- homeassistant/components/tile/config_flow.py | 6 ++-- .../components/traccar_server/config_flow.py | 29 ++++++++---------- homeassistant/components/vizio/config_flow.py | 30 +++++++++---------- 12 files changed, 73 insertions(+), 98 deletions(-) diff --git a/homeassistant/components/airvisual_pro/config_flow.py b/homeassistant/components/airvisual_pro/config_flow.py index ebdbc807b18..da0cca2e6a2 100644 --- a/homeassistant/components/airvisual_pro/config_flow.py +++ b/homeassistant/components/airvisual_pro/config_flow.py @@ -80,11 +80,9 @@ class AirVisualProFlowHandler(ConfigFlow, domain=DOMAIN): """Initialize.""" self._reauth_entry: ConfigEntry | None = None - async def async_step_import( - self, import_config: dict[str, Any] - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry from configuration.yaml.""" - return await self.async_step_user(import_config) + return await self.async_step_user(import_data) async def async_step_reauth( self, entry_data: Mapping[str, Any] diff --git a/homeassistant/components/aws/config_flow.py b/homeassistant/components/aws/config_flow.py index 8c80b0d487d..3175e6bc56c 100644 --- a/homeassistant/components/aws/config_flow.py +++ b/homeassistant/components/aws/config_flow.py @@ -1,6 +1,5 @@ """Config flow for AWS component.""" -from collections.abc import Mapping from typing import Any from homeassistant.config_entries import ConfigFlow, ConfigFlowResult @@ -13,11 +12,9 @@ class AWSFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_import( - self, user_input: Mapping[str, Any] - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") - return self.async_create_entry(title="configuration.yaml", data=user_input) + return self.async_create_entry(title="configuration.yaml", data=import_data) diff --git a/homeassistant/components/azure_event_hub/config_flow.py b/homeassistant/components/azure_event_hub/config_flow.py index 264daa683bc..046851e6926 100644 --- a/homeassistant/components/azure_event_hub/config_flow.py +++ b/homeassistant/components/azure_event_hub/config_flow.py @@ -154,17 +154,15 @@ class AEHConfigFlow(ConfigFlow, domain=DOMAIN): options=self._options, ) - async def async_step_import( - self, import_config: dict[str, Any] - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import config from configuration.yaml.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") - if CONF_SEND_INTERVAL in import_config: - self._options[CONF_SEND_INTERVAL] = import_config.pop(CONF_SEND_INTERVAL) - if CONF_MAX_DELAY in import_config: - self._options[CONF_MAX_DELAY] = import_config.pop(CONF_MAX_DELAY) - self._data = import_config + if CONF_SEND_INTERVAL in import_data: + self._options[CONF_SEND_INTERVAL] = import_data.pop(CONF_SEND_INTERVAL) + if CONF_MAX_DELAY in import_data: + self._options[CONF_MAX_DELAY] = import_data.pop(CONF_MAX_DELAY) + self._data = import_data errors = await validate_data(self._data) if errors: return self.async_abort(reason=errors["base"]) diff --git a/homeassistant/components/firmata/config_flow.py b/homeassistant/components/firmata/config_flow.py index 571df351b25..4c0f800fff4 100644 --- a/homeassistant/components/firmata/config_flow.py +++ b/homeassistant/components/firmata/config_flow.py @@ -19,9 +19,7 @@ class FirmataFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_import( - self, import_config: dict[str, Any] - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a firmata board as a config entry. This flow is triggered by `async_setup` for configured boards. @@ -30,14 +28,14 @@ class FirmataFlowHandler(ConfigFlow, domain=DOMAIN): config entry yet (based on entry_id). It validates a connection and then adds the entry. """ - name = f"serial-{import_config[CONF_SERIAL_PORT]}" - import_config[CONF_NAME] = name + name = f"serial-{import_data[CONF_SERIAL_PORT]}" + import_data[CONF_NAME] = name # Connect to the board to verify connection and then shutdown # If either fail then we cannot continue _LOGGER.debug("Connecting to Firmata board %s to test connection", name) try: - api = await get_board(import_config) + api = await get_board(import_data) await api.shutdown() except RuntimeError as err: _LOGGER.error("Error connecting to PyMata board %s: %s", name, err) @@ -54,6 +52,4 @@ class FirmataFlowHandler(ConfigFlow, domain=DOMAIN): return self.async_abort(reason="cannot_connect") _LOGGER.debug("Connection test to Firmata board %s successful", name) - return self.async_create_entry( - title=import_config[CONF_NAME], data=import_config - ) + return self.async_create_entry(title=import_data[CONF_NAME], data=import_data) diff --git a/homeassistant/components/lcn/config_flow.py b/homeassistant/components/lcn/config_flow.py index 664f32e5585..c38a16cc21e 100644 --- a/homeassistant/components/lcn/config_flow.py +++ b/homeassistant/components/lcn/config_flow.py @@ -9,6 +9,7 @@ import pypck import voluptuous as vol from homeassistant import config_entries +from homeassistant.config_entries import ConfigFlowResult from homeassistant.const import ( CONF_BASE, CONF_DEVICES, @@ -107,12 +108,10 @@ class LcnFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_import( - self, data: ConfigType - ) -> config_entries.ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import existing configuration from LCN.""" # validate the imported connection parameters - if error := await validate_connection(data): + if error := await validate_connection(import_data): async_create_issue( self.hass, DOMAIN, @@ -144,17 +143,19 @@ class LcnFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): ) # check if we already have a host with the same address configured - if entry := get_config_entry(self.hass, data): + if entry := get_config_entry(self.hass, import_data): entry.source = config_entries.SOURCE_IMPORT # Cleanup entity and device registry, if we imported from configuration.yaml to # remove orphans when entities were removed from configuration - purge_entity_registry(self.hass, entry.entry_id, data) - purge_device_registry(self.hass, entry.entry_id, data) + purge_entity_registry(self.hass, entry.entry_id, import_data) + purge_device_registry(self.hass, entry.entry_id, import_data) - self.hass.config_entries.async_update_entry(entry, data=data) + self.hass.config_entries.async_update_entry(entry, data=import_data) return self.async_abort(reason="existing_configuration_updated") - return self.async_create_entry(title=f"{data[CONF_HOST]}", data=data) + return self.async_create_entry( + title=f"{import_data[CONF_HOST]}", data=import_data + ) async def async_step_user( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/mpd/config_flow.py b/homeassistant/components/mpd/config_flow.py index f37ebe5e5e8..36777a205f9 100644 --- a/homeassistant/components/mpd/config_flow.py +++ b/homeassistant/components/mpd/config_flow.py @@ -67,19 +67,17 @@ class MPDConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import( - self, import_config: dict[str, Any] - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Attempt to import the existing configuration.""" - self._async_abort_entries_match({CONF_HOST: import_config[CONF_HOST]}) + self._async_abort_entries_match({CONF_HOST: import_data[CONF_HOST]}) client = MPDClient() client.timeout = 30 client.idletimeout = 10 try: async with timeout(35): - await client.connect(import_config[CONF_HOST], import_config[CONF_PORT]) - if CONF_PASSWORD in import_config: - await client.password(import_config[CONF_PASSWORD]) + await client.connect(import_data[CONF_HOST], import_data[CONF_PORT]) + if CONF_PASSWORD in import_data: + await client.password(import_data[CONF_PASSWORD]) with suppress(mpd.ConnectionError): client.disconnect() except ( @@ -94,10 +92,10 @@ class MPDConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_abort(reason="unknown") return self.async_create_entry( - title=import_config.get(CONF_NAME, "Music Player Daemon"), + title=import_data.get(CONF_NAME, "Music Player Daemon"), data={ - CONF_HOST: import_config[CONF_HOST], - CONF_PORT: import_config[CONF_PORT], - CONF_PASSWORD: import_config.get(CONF_PASSWORD), + CONF_HOST: import_data[CONF_HOST], + CONF_PORT: import_data[CONF_PORT], + CONF_PASSWORD: import_data.get(CONF_PASSWORD), }, ) diff --git a/homeassistant/components/panasonic_viera/config_flow.py b/homeassistant/components/panasonic_viera/config_flow.py index 0226fb33c9e..b00fee513a6 100644 --- a/homeassistant/components/panasonic_viera/config_flow.py +++ b/homeassistant/components/panasonic_viera/config_flow.py @@ -157,11 +157,9 @@ class PanasonicVieraConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import( - self, import_config: dict[str, Any] - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry from configuration.yaml.""" - return await self.async_step_user(user_input=import_config) + return await self.async_step_user(user_input=import_data) async def async_load_data(self, config: dict[str, Any]) -> None: """Load the data.""" diff --git a/homeassistant/components/russound_rio/config_flow.py b/homeassistant/components/russound_rio/config_flow.py index e25ac7dde2e..df173d29f61 100644 --- a/homeassistant/components/russound_rio/config_flow.py +++ b/homeassistant/components/russound_rio/config_flow.py @@ -80,13 +80,11 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): step_id="user", data_schema=DATA_SCHEMA, errors=errors ) - async def async_step_import( - self, import_config: dict[str, Any] - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Attempt to import the existing configuration.""" - self._async_abort_entries_match({CONF_HOST: import_config[CONF_HOST]}) - host = import_config[CONF_HOST] - port = import_config.get(CONF_PORT, 9621) + self._async_abort_entries_match({CONF_HOST: import_data[CONF_HOST]}) + host = import_data[CONF_HOST] + port = import_data.get(CONF_PORT, 9621) # Connection logic is repeated here since this method will be removed in future releases russ = Russound(self.hass.loop, host, port) diff --git a/homeassistant/components/sleepiq/config_flow.py b/homeassistant/components/sleepiq/config_flow.py index 4a4813192c3..26f3672d588 100644 --- a/homeassistant/components/sleepiq/config_flow.py +++ b/homeassistant/components/sleepiq/config_flow.py @@ -28,22 +28,20 @@ class SleepIQFlowHandler(ConfigFlow, domain=DOMAIN): """Initialize the config flow.""" self._reauth_entry: ConfigEntry | None = None - async def async_step_import( - self, import_config: dict[str, Any] - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a SleepIQ account as a config entry. This flow is triggered by 'async_setup' for configured accounts. """ - await self.async_set_unique_id(import_config[CONF_USERNAME].lower()) + await self.async_set_unique_id(import_data[CONF_USERNAME].lower()) self._abort_if_unique_id_configured() - if error := await try_connection(self.hass, import_config): + if error := await try_connection(self.hass, import_data): _LOGGER.error("Could not authenticate with SleepIQ server: %s", error) return self.async_abort(reason=error) return self.async_create_entry( - title=import_config[CONF_USERNAME], data=import_config + title=import_data[CONF_USERNAME], data=import_data ) async def async_step_user( diff --git a/homeassistant/components/tile/config_flow.py b/homeassistant/components/tile/config_flow.py index 108d9b1b300..53425958341 100644 --- a/homeassistant/components/tile/config_flow.py +++ b/homeassistant/components/tile/config_flow.py @@ -71,11 +71,9 @@ class TileFlowHandler(ConfigFlow, domain=DOMAIN): return self.async_create_entry(title=self._username, data=data) - async def async_step_import( - self, import_config: dict[str, Any] - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry from configuration.yaml.""" - return await self.async_step_user(import_config) + return await self.async_step_user(import_data) async def async_step_reauth( self, entry_data: Mapping[str, Any] diff --git a/homeassistant/components/traccar_server/config_flow.py b/homeassistant/components/traccar_server/config_flow.py index 45a43c08685..a4d109030ae 100644 --- a/homeassistant/components/traccar_server/config_flow.py +++ b/homeassistant/components/traccar_server/config_flow.py @@ -2,7 +2,6 @@ from __future__ import annotations -from collections.abc import Mapping from typing import Any from pytraccar import ApiClient, ServerModel, TraccarException @@ -161,36 +160,34 @@ class TraccarServerConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import( - self, import_info: Mapping[str, Any] - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import an entry.""" - configured_port = str(import_info[CONF_PORT]) + configured_port = str(import_data[CONF_PORT]) self._async_abort_entries_match( { - CONF_HOST: import_info[CONF_HOST], + CONF_HOST: import_data[CONF_HOST], CONF_PORT: configured_port, } ) - if "all_events" in (imported_events := import_info.get("event", [])): + if "all_events" in (imported_events := import_data.get("event", [])): events = list(EVENTS.values()) else: events = imported_events return self.async_create_entry( - title=f"{import_info[CONF_HOST]}:{configured_port}", + title=f"{import_data[CONF_HOST]}:{configured_port}", data={ - CONF_HOST: import_info[CONF_HOST], + CONF_HOST: import_data[CONF_HOST], CONF_PORT: configured_port, - CONF_SSL: import_info.get(CONF_SSL, False), - CONF_VERIFY_SSL: import_info.get(CONF_VERIFY_SSL, True), - CONF_USERNAME: import_info[CONF_USERNAME], - CONF_PASSWORD: import_info[CONF_PASSWORD], + CONF_SSL: import_data.get(CONF_SSL, False), + CONF_VERIFY_SSL: import_data.get(CONF_VERIFY_SSL, True), + CONF_USERNAME: import_data[CONF_USERNAME], + CONF_PASSWORD: import_data[CONF_PASSWORD], }, options={ - CONF_MAX_ACCURACY: import_info[CONF_MAX_ACCURACY], + CONF_MAX_ACCURACY: import_data[CONF_MAX_ACCURACY], CONF_EVENTS: events, - CONF_CUSTOM_ATTRIBUTES: import_info.get("monitored_conditions", []), - CONF_SKIP_ACCURACY_FILTER_FOR: import_info.get( + CONF_CUSTOM_ATTRIBUTES: import_data.get("monitored_conditions", []), + CONF_SKIP_ACCURACY_FILTER_FOR: import_data.get( "skip_accuracy_filter_on", [] ), }, diff --git a/homeassistant/components/vizio/config_flow.py b/homeassistant/components/vizio/config_flow.py index d8b99595f54..c8f1aaa21cb 100644 --- a/homeassistant/components/vizio/config_flow.py +++ b/homeassistant/components/vizio/config_flow.py @@ -285,9 +285,7 @@ class VizioConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form(step_id="user", data_schema=schema, errors=errors) - async def async_step_import( - self, import_config: dict[str, Any] - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry from configuration.yaml.""" # Check if new config entry matches any existing config entries for entry in self._async_current_entries(): @@ -296,28 +294,28 @@ class VizioConfigFlow(ConfigFlow, domain=DOMAIN): continue if await self.hass.async_add_executor_job( - _host_is_same, entry.data[CONF_HOST], import_config[CONF_HOST] + _host_is_same, entry.data[CONF_HOST], import_data[CONF_HOST] ): updated_options: dict[str, Any] = {} updated_data: dict[str, Any] = {} remove_apps = False - if entry.data[CONF_HOST] != import_config[CONF_HOST]: - updated_data[CONF_HOST] = import_config[CONF_HOST] + if entry.data[CONF_HOST] != import_data[CONF_HOST]: + updated_data[CONF_HOST] = import_data[CONF_HOST] - if entry.data[CONF_NAME] != import_config[CONF_NAME]: - updated_data[CONF_NAME] = import_config[CONF_NAME] + if entry.data[CONF_NAME] != import_data[CONF_NAME]: + updated_data[CONF_NAME] = import_data[CONF_NAME] # Update entry.data[CONF_APPS] if import_config[CONF_APPS] differs, and # pop entry.data[CONF_APPS] if import_config[CONF_APPS] is not specified - if entry.data.get(CONF_APPS) != import_config.get(CONF_APPS): - if not import_config.get(CONF_APPS): + if entry.data.get(CONF_APPS) != import_data.get(CONF_APPS): + if not import_data.get(CONF_APPS): remove_apps = True else: - updated_options[CONF_APPS] = import_config[CONF_APPS] + updated_options[CONF_APPS] = import_data[CONF_APPS] - if entry.data.get(CONF_VOLUME_STEP) != import_config[CONF_VOLUME_STEP]: - updated_options[CONF_VOLUME_STEP] = import_config[CONF_VOLUME_STEP] + if entry.data.get(CONF_VOLUME_STEP) != import_data[CONF_VOLUME_STEP]: + updated_options[CONF_VOLUME_STEP] = import_data[CONF_VOLUME_STEP] if updated_options or updated_data or remove_apps: new_data = entry.data.copy() @@ -345,9 +343,9 @@ class VizioConfigFlow(ConfigFlow, domain=DOMAIN): self._must_show_form = True # Store config key/value pairs that are not configurable in user step so they # don't get lost on user step - if import_config.get(CONF_APPS): - self._apps = copy.deepcopy(import_config[CONF_APPS]) - return await self.async_step_user(user_input=import_config) + if import_data.get(CONF_APPS): + self._apps = copy.deepcopy(import_data[CONF_APPS]) + return await self.async_step_user(user_input=import_data) async def async_step_zeroconf( self, discovery_info: zeroconf.ZeroconfServiceInfo From 37e2839fa3e2f7653d1d86bde068c2a3aec2e2f4 Mon Sep 17 00:00:00 2001 From: Matrix Date: Tue, 27 Aug 2024 18:03:50 +0800 Subject: [PATCH 1101/1635] Add power entity for yolink plug (#124678) * Add Plug power entity * change state class to total * Add translations and moving icon to icon.json * sort translation key * Fix suggestions --- homeassistant/components/yolink/const.py | 4 +++ homeassistant/components/yolink/icons.json | 3 ++ homeassistant/components/yolink/sensor.py | 34 +++++++++++++++++++- homeassistant/components/yolink/strings.json | 6 ++++ 4 files changed, 46 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/yolink/const.py b/homeassistant/components/yolink/const.py index 686160d9248..6fe1da0d20b 100644 --- a/homeassistant/components/yolink/const.py +++ b/homeassistant/components/yolink/const.py @@ -23,3 +23,7 @@ DEV_MODEL_TH_SENSOR_YS8014_UC = "YS8014-UC" DEV_MODEL_TH_SENSOR_YS8014_EC = "YS8014-EC" DEV_MODEL_TH_SENSOR_YS8017_UC = "YS8017-UC" DEV_MODEL_TH_SENSOR_YS8017_EC = "YS8017-EC" +DEV_MODEL_PLUG_YS6602_UC = "YS6602-UC" +DEV_MODEL_PLUG_YS6602_EC = "YS6602-EC" +DEV_MODEL_PLUG_YS6803_UC = "YS6803-UC" +DEV_MODEL_PLUG_YS6803_EC = "YS6803-EC" diff --git a/homeassistant/components/yolink/icons.json b/homeassistant/components/yolink/icons.json index ee9037c864a..1158c9bd8f2 100644 --- a/homeassistant/components/yolink/icons.json +++ b/homeassistant/components/yolink/icons.json @@ -17,6 +17,9 @@ }, "power_failure_alarm_beep": { "default": "mdi:bullhorn" + }, + "water_meter_reading": { + "default": "mdi:gauge" } }, "switch": { diff --git a/homeassistant/components/yolink/sensor.py b/homeassistant/components/yolink/sensor.py index 77bbccb2f6a..b8f2a77516c 100644 --- a/homeassistant/components/yolink/sensor.py +++ b/homeassistant/components/yolink/sensor.py @@ -40,7 +40,9 @@ from homeassistant.const import ( PERCENTAGE, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, EntityCategory, + UnitOfEnergy, UnitOfLength, + UnitOfPower, UnitOfTemperature, UnitOfVolume, ) @@ -49,6 +51,10 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import percentage from .const import ( + DEV_MODEL_PLUG_YS6602_EC, + DEV_MODEL_PLUG_YS6602_UC, + DEV_MODEL_PLUG_YS6803_EC, + DEV_MODEL_PLUG_YS6803_UC, DEV_MODEL_TH_SENSOR_YS8004_EC, DEV_MODEL_TH_SENSOR_YS8004_UC, DEV_MODEL_TH_SENSOR_YS8014_EC, @@ -125,6 +131,13 @@ NONE_HUMIDITY_SENSOR_MODELS = [ DEV_MODEL_TH_SENSOR_YS8017_EC, ] +POWER_SUPPORT_MODELS = [ + DEV_MODEL_PLUG_YS6602_UC, + DEV_MODEL_PLUG_YS6602_EC, + DEV_MODEL_PLUG_YS6803_UC, + DEV_MODEL_PLUG_YS6803_EC, +] + def cvt_battery(val: int | None) -> int | None: """Convert battery to percentage.""" @@ -228,13 +241,32 @@ SENSOR_TYPES: tuple[YoLinkSensorEntityDescription, ...] = ( key="meter_reading", translation_key="water_meter_reading", device_class=SensorDeviceClass.WATER, - icon="mdi:gauge", native_unit_of_measurement=UnitOfVolume.CUBIC_METERS, state_class=SensorStateClass.TOTAL_INCREASING, should_update_entity=lambda value: value is not None, exists_fn=lambda device: device.device_type in ATTR_DEVICE_WATER_METER_CONTROLLER, ), + YoLinkSensorEntityDescription( + key="power", + translation_key="current_power", + device_class=SensorDeviceClass.POWER, + native_unit_of_measurement=UnitOfPower.WATT, + state_class=SensorStateClass.MEASUREMENT, + should_update_entity=lambda value: value is not None, + exists_fn=lambda device: device.device_model_name in POWER_SUPPORT_MODELS, + value=lambda value: value / 10 if value is not None else None, + ), + YoLinkSensorEntityDescription( + key="watt", + translation_key="power_consumption", + device_class=SensorDeviceClass.ENERGY, + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + state_class=SensorStateClass.TOTAL, + should_update_entity=lambda value: value is not None, + exists_fn=lambda device: device.device_model_name in POWER_SUPPORT_MODELS, + value=lambda value: value / 100 if value is not None else None, + ), ) diff --git a/homeassistant/components/yolink/strings.json b/homeassistant/components/yolink/strings.json index bc8fb435e76..cefc7737a79 100644 --- a/homeassistant/components/yolink/strings.json +++ b/homeassistant/components/yolink/strings.json @@ -51,6 +51,12 @@ "plug_4": { "name": "Plug 4" } }, "sensor": { + "current_power": { + "name": "Current power" + }, + "power_consumption": { + "name": "Power consumption" + }, "power_failure_alarm": { "name": "Power failure alarm", "state": { From 9119884e530db0688e53cd98563a0751f7621240 Mon Sep 17 00:00:00 2001 From: Jon Seager Date: Tue, 27 Aug 2024 12:15:31 +0100 Subject: [PATCH 1102/1635] Add touchlinesl integration (#124557) * touchlinesl: init integration * integration(touchlinesl): address review feedback * integration(touchlinesl): address review feedback * integration(touchlinesl): add a coordinator to manage data updates * integration(touchlinesl): address review feedback * integration(touchline_sl): address feedback (and rename) * integration(touchline_sl): address feedback * integration(touchline_sl): address feedback * integration(touchline_sl): update strings * integration(touchline_sl): address feedback * integration(touchline_sl): address feedback --- CODEOWNERS | 2 + homeassistant/brands/roth.json | 5 + .../components/touchline_sl/__init__.py | 63 +++++++++ .../components/touchline_sl/climate.py | 126 ++++++++++++++++++ .../components/touchline_sl/config_flow.py | 62 +++++++++ .../components/touchline_sl/const.py | 3 + .../components/touchline_sl/coordinator.py | 59 ++++++++ .../components/touchline_sl/manifest.json | 10 ++ .../components/touchline_sl/strings.json | 36 +++++ homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 23 +++- requirements_all.txt | 3 + requirements_test_all.txt | 3 + tests/components/touchline_sl/__init__.py | 1 + tests/components/touchline_sl/conftest.py | 61 +++++++++ .../touchline_sl/test_config_flow.py | 113 ++++++++++++++++ 16 files changed, 565 insertions(+), 6 deletions(-) create mode 100644 homeassistant/brands/roth.json create mode 100644 homeassistant/components/touchline_sl/__init__.py create mode 100644 homeassistant/components/touchline_sl/climate.py create mode 100644 homeassistant/components/touchline_sl/config_flow.py create mode 100644 homeassistant/components/touchline_sl/const.py create mode 100644 homeassistant/components/touchline_sl/coordinator.py create mode 100644 homeassistant/components/touchline_sl/manifest.json create mode 100644 homeassistant/components/touchline_sl/strings.json create mode 100644 tests/components/touchline_sl/__init__.py create mode 100644 tests/components/touchline_sl/conftest.py create mode 100644 tests/components/touchline_sl/test_config_flow.py diff --git a/CODEOWNERS b/CODEOWNERS index 1618b18a8be..20e0b3adced 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1493,6 +1493,8 @@ build.json @home-assistant/supervisor /tests/components/tomorrowio/ @raman325 @lymanepp /homeassistant/components/totalconnect/ @austinmroczek /tests/components/totalconnect/ @austinmroczek +/homeassistant/components/touchline_sl/ @jnsgruk +/tests/components/touchline_sl/ @jnsgruk /homeassistant/components/tplink/ @rytilahti @bdraco @sdb9696 /tests/components/tplink/ @rytilahti @bdraco @sdb9696 /homeassistant/components/tplink_omada/ @MarkGodwin diff --git a/homeassistant/brands/roth.json b/homeassistant/brands/roth.json new file mode 100644 index 00000000000..21542b5b641 --- /dev/null +++ b/homeassistant/brands/roth.json @@ -0,0 +1,5 @@ +{ + "domain": "roth", + "name": "Roth", + "integrations": ["touchline", "touchline_sl"] +} diff --git a/homeassistant/components/touchline_sl/__init__.py b/homeassistant/components/touchline_sl/__init__.py new file mode 100644 index 00000000000..45a85185673 --- /dev/null +++ b/homeassistant/components/touchline_sl/__init__.py @@ -0,0 +1,63 @@ +"""The Roth Touchline SL integration.""" + +from __future__ import annotations + +import asyncio + +from pytouchlinesl import TouchlineSL + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from .const import DOMAIN +from .coordinator import TouchlineSLModuleCoordinator + +PLATFORMS: list[Platform] = [Platform.CLIMATE] + +type TouchlineSLConfigEntry = ConfigEntry[list[TouchlineSLModuleCoordinator]] + + +async def async_setup_entry(hass: HomeAssistant, entry: TouchlineSLConfigEntry) -> bool: + """Set up Roth Touchline SL from a config entry.""" + account = TouchlineSL( + username=entry.data[CONF_USERNAME], password=entry.data[CONF_PASSWORD] + ) + + coordinators: list[TouchlineSLModuleCoordinator] = [ + TouchlineSLModuleCoordinator(hass, module) for module in await account.modules() + ] + + await asyncio.gather( + *[ + coordinator.async_config_entry_first_refresh() + for coordinator in coordinators + ] + ) + + device_registry = dr.async_get(hass) + + # Create a new Device for each coorodinator to represent each module + for c in coordinators: + module = c.data.module + device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + identifiers={(DOMAIN, module.id)}, + name=module.name, + manufacturer="Roth", + model=module.type, + sw_version=module.version, + ) + + entry.runtime_data = coordinators + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry( + hass: HomeAssistant, entry: TouchlineSLConfigEntry +) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/touchline_sl/climate.py b/homeassistant/components/touchline_sl/climate.py new file mode 100644 index 00000000000..93328823749 --- /dev/null +++ b/homeassistant/components/touchline_sl/climate.py @@ -0,0 +1,126 @@ +"""Roth Touchline SL climate integration implementation for Home Assistant.""" + +from typing import Any + +from pytouchlinesl import Zone + +from homeassistant.components.climate import ( + ClimateEntity, + ClimateEntityFeature, + HVACMode, +) +from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from . import TouchlineSLConfigEntry +from .const import DOMAIN +from .coordinator import TouchlineSLModuleCoordinator + + +async def async_setup_entry( + hass: HomeAssistant, + entry: TouchlineSLConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Touchline devices.""" + coordinators = entry.runtime_data + async_add_entities( + TouchlineSLZone(coordinator=coordinator, zone_id=zone_id) + for coordinator in coordinators + for zone_id in coordinator.data.zones + ) + + +CONSTANT_TEMPERATURE = "constant_temperature" + + +class TouchlineSLZone(CoordinatorEntity[TouchlineSLModuleCoordinator], ClimateEntity): + """Roth Touchline SL Zone.""" + + _attr_has_entity_name = True + _attr_hvac_mode = HVACMode.HEAT + _attr_hvac_modes = [HVACMode.HEAT] + _attr_name = None + _attr_supported_features = ( + ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.PRESET_MODE + ) + _attr_temperature_unit = UnitOfTemperature.CELSIUS + _attr_translation_key = "zone" + + def __init__(self, coordinator: TouchlineSLModuleCoordinator, zone_id: int) -> None: + """Construct a Touchline SL climate zone.""" + super().__init__(coordinator) + self.zone_id: int = zone_id + + self._attr_unique_id = ( + f"module-{self.coordinator.data.module.id}-zone-{self.zone_id}" + ) + + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, str(zone_id))}, + name=self.zone.name, + manufacturer="Roth", + via_device=(DOMAIN, coordinator.data.module.id), + model="zone", + suggested_area=self.zone.name, + ) + + # Call this in __init__ so data is populated right away, since it's + # already available in the coordinator data. + self.set_attr() + + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + self.set_attr() + super()._handle_coordinator_update() + + @property + def zone(self) -> Zone: + """Return the device object from the coordinator data.""" + return self.coordinator.data.zones[self.zone_id] + + @property + def available(self) -> bool: + """Return if the device is available.""" + return super().available and self.zone_id in self.coordinator.data.zones + + async def async_set_temperature(self, **kwargs: Any) -> None: + """Set new target temperature.""" + if (temperature := kwargs.get(ATTR_TEMPERATURE)) is None: + return + + await self.zone.set_temperature(temperature) + await self.coordinator.async_request_refresh() + + async def async_set_preset_mode(self, preset_mode: str) -> None: + """Assign the zone to a particular global schedule.""" + if not self.zone: + return + + if preset_mode == CONSTANT_TEMPERATURE and self._attr_target_temperature: + await self.zone.set_temperature(temperature=self._attr_target_temperature) + await self.coordinator.async_request_refresh() + return + + if schedule := self.coordinator.data.schedules[preset_mode]: + await self.zone.set_schedule(schedule_id=schedule.id) + await self.coordinator.async_request_refresh() + + def set_attr(self) -> None: + """Populate attributes with data from the coordinator.""" + schedule_names = self.coordinator.data.schedules.keys() + + self._attr_current_temperature = self.zone.temperature + self._attr_target_temperature = self.zone.target_temperature + self._attr_current_humidity = int(self.zone.humidity) + self._attr_preset_modes = [*schedule_names, CONSTANT_TEMPERATURE] + + if self.zone.mode == "constantTemp": + self._attr_preset_mode = CONSTANT_TEMPERATURE + elif self.zone.mode == "globalSchedule": + schedule = self.zone.schedule + self._attr_preset_mode = schedule.name diff --git a/homeassistant/components/touchline_sl/config_flow.py b/homeassistant/components/touchline_sl/config_flow.py new file mode 100644 index 00000000000..91d959b5a0a --- /dev/null +++ b/homeassistant/components/touchline_sl/config_flow.py @@ -0,0 +1,62 @@ +"""Config flow for Roth Touchline SL integration.""" + +from __future__ import annotations + +import logging +from typing import Any + +from pytouchlinesl import TouchlineSL +from pytouchlinesl.client import RothAPIError +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_USERNAME): str, + vol.Required(CONF_PASSWORD): str, + } +) + + +class TouchlineSLConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Roth Touchline SL.""" + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step that gathers username and password.""" + errors: dict[str, str] = {} + + if user_input is not None: + try: + account = TouchlineSL( + username=user_input[CONF_USERNAME], + password=user_input[CONF_PASSWORD], + ) + await account.user_id() + except RothAPIError as e: + if e.status == 401: + errors["base"] = "invalid_auth" + else: + errors["base"] = "cannot_connect" + except Exception: + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + unique_account_id = await account.user_id() + await self.async_set_unique_id(str(unique_account_id)) + self._abort_if_unique_id_configured() + + return self.async_create_entry( + title=user_input[CONF_USERNAME], data=user_input + ) + + return self.async_show_form( + step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors + ) diff --git a/homeassistant/components/touchline_sl/const.py b/homeassistant/components/touchline_sl/const.py new file mode 100644 index 00000000000..e441e3721b3 --- /dev/null +++ b/homeassistant/components/touchline_sl/const.py @@ -0,0 +1,3 @@ +"""Constants for the Roth Touchline SL integration.""" + +DOMAIN = "touchline_sl" diff --git a/homeassistant/components/touchline_sl/coordinator.py b/homeassistant/components/touchline_sl/coordinator.py new file mode 100644 index 00000000000..cd74ba6130f --- /dev/null +++ b/homeassistant/components/touchline_sl/coordinator.py @@ -0,0 +1,59 @@ +"""Define an object to manage fetching Touchline SL data.""" + +from __future__ import annotations + +from dataclasses import dataclass +from datetime import timedelta +import logging + +from pytouchlinesl import Module, Zone +from pytouchlinesl.client import RothAPIError +from pytouchlinesl.client.models import GlobalScheduleModel + +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +_LOGGER = logging.getLogger(__name__) + + +@dataclass +class TouchlineSLModuleData: + """Provide type safe way of accessing module data from the coordinator.""" + + module: Module + zones: dict[int, Zone] + schedules: dict[str, GlobalScheduleModel] + + +class TouchlineSLModuleCoordinator(DataUpdateCoordinator[TouchlineSLModuleData]): + """A coordinator to manage the fetching of Touchline SL data.""" + + def __init__(self, hass: HomeAssistant, module: Module) -> None: + """Initialize coordinator.""" + super().__init__( + hass, + logger=_LOGGER, + name=f"Touchline SL ({module.name})", + update_interval=timedelta(seconds=30), + ) + + self.module = module + + async def _async_update_data(self) -> TouchlineSLModuleData: + """Fetch data from the upstream API and pre-process into the right format.""" + try: + zones = await self.module.zones() + schedules = await self.module.schedules() + except RothAPIError as error: + if error.status == 401: + # Trigger a reauthentication if the data update fails due to + # bad authentication. + raise ConfigEntryAuthFailed from error + raise UpdateFailed(error) from error + + return TouchlineSLModuleData( + module=self.module, + zones={z.id: z for z in zones}, + schedules={s.name: s for s in schedules}, + ) diff --git a/homeassistant/components/touchline_sl/manifest.json b/homeassistant/components/touchline_sl/manifest.json new file mode 100644 index 00000000000..8a50b06d613 --- /dev/null +++ b/homeassistant/components/touchline_sl/manifest.json @@ -0,0 +1,10 @@ +{ + "domain": "touchline_sl", + "name": "Roth Touchline SL", + "codeowners": ["@jnsgruk"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/touchline_sl", + "integration_type": "hub", + "iot_class": "cloud_polling", + "requirements": ["pytouchlinesl==0.1.5"] +} diff --git a/homeassistant/components/touchline_sl/strings.json b/homeassistant/components/touchline_sl/strings.json new file mode 100644 index 00000000000..e3a0ef5a741 --- /dev/null +++ b/homeassistant/components/touchline_sl/strings.json @@ -0,0 +1,36 @@ +{ + "config": { + "flow_title": "Touchline SL Setup Flow", + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "step": { + "user": { + "title": "Login to Touchline SL", + "description": "Your credentials for the Roth Touchline SL mobile app/web service", + "data": { + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" + } + } + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + }, + "entity": { + "climate": { + "zone": { + "state_attributes": { + "preset_mode": { + "state": { + "constant_temperature": "Constant temperature" + } + } + } + } + } + } +} diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index 5e6d29f29f9..acf0439e842 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -594,6 +594,7 @@ FLOWS = { "tomorrowio", "toon", "totalconnect", + "touchline_sl", "tplink", "tplink_omada", "traccar", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 52215d232ad..d6ce9e0a3aa 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -5134,6 +5134,23 @@ "config_flow": true, "iot_class": "local_push" }, + "roth": { + "name": "Roth", + "integrations": { + "touchline": { + "integration_type": "hub", + "config_flow": false, + "iot_class": "local_polling", + "name": "Roth Touchline" + }, + "touchline_sl": { + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling", + "name": "Roth Touchline SL" + } + } + }, "rova": { "name": "ROVA", "integration_type": "hub", @@ -6297,12 +6314,6 @@ "config_flow": true, "iot_class": "cloud_polling" }, - "touchline": { - "name": "Roth Touchline", - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_polling" - }, "tplink": { "name": "TP-Link", "integrations": { diff --git a/requirements_all.txt b/requirements_all.txt index f8d7b908ec3..6a833b546bd 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2385,6 +2385,9 @@ pytomorrowio==0.3.6 # homeassistant.components.touchline pytouchline==0.7 +# homeassistant.components.touchline_sl +pytouchlinesl==0.1.5 + # homeassistant.components.traccar # homeassistant.components.traccar_server pytraccar==2.1.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 0307433e514..93005222ffd 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1888,6 +1888,9 @@ pytile==2023.12.0 # homeassistant.components.tomorrowio pytomorrowio==0.3.6 +# homeassistant.components.touchline_sl +pytouchlinesl==0.1.5 + # homeassistant.components.traccar # homeassistant.components.traccar_server pytraccar==2.1.1 diff --git a/tests/components/touchline_sl/__init__.py b/tests/components/touchline_sl/__init__.py new file mode 100644 index 00000000000..c22e9d329db --- /dev/null +++ b/tests/components/touchline_sl/__init__.py @@ -0,0 +1 @@ +"""Tests for the Roth Touchline SL integration.""" diff --git a/tests/components/touchline_sl/conftest.py b/tests/components/touchline_sl/conftest.py new file mode 100644 index 00000000000..4edeb048f5b --- /dev/null +++ b/tests/components/touchline_sl/conftest.py @@ -0,0 +1,61 @@ +"""Common fixtures for the Roth Touchline SL tests.""" + +from collections.abc import Generator +from typing import NamedTuple +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.touchline_sl.const import DOMAIN +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME + +from tests.common import MockConfigEntry + + +class FakeModule(NamedTuple): + """Fake Module used for unit testing only.""" + + name: str + id: str + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.touchline_sl.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_touchlinesl_client() -> Generator[AsyncMock]: + """Mock a pytouchlinesl client.""" + with ( + patch( + "homeassistant.components.touchline_sl.TouchlineSL", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.touchline_sl.config_flow.TouchlineSL", + new=mock_client, + ), + ): + client = mock_client.return_value + client.user_id.return_value = 12345 + client.modules.return_value = [FakeModule(name="Foobar", id="deadbeef")] + yield client + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title="TouchlineSL", + data={ + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + unique_id="12345", + ) diff --git a/tests/components/touchline_sl/test_config_flow.py b/tests/components/touchline_sl/test_config_flow.py new file mode 100644 index 00000000000..992fa2bdb3e --- /dev/null +++ b/tests/components/touchline_sl/test_config_flow.py @@ -0,0 +1,113 @@ +"""Test the Roth Touchline SL config flow.""" + +from unittest.mock import AsyncMock + +import pytest +from pytouchlinesl.client import RothAPIError + +from homeassistant.components.touchline_sl.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + +RESULT_UNIQUE_ID = "12345" + +CONFIG_DATA = { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", +} + + +async def test_config_flow_success( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_touchlinesl_client: AsyncMock +) -> None: + """Test the happy path where the provided username/password result in a new entry.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], CONFIG_DATA + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "test-username" + assert result["data"] == CONFIG_DATA + assert result["result"].unique_id == RESULT_UNIQUE_ID + assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.parametrize( + ("exception", "error_base"), + [ + (RothAPIError(status=401), "invalid_auth"), + (RothAPIError(status=502), "cannot_connect"), + (Exception, "unknown"), + ], +) +async def test_config_flow_failure_api_exceptions( + hass: HomeAssistant, + exception: Exception, + error_base: str, + mock_setup_entry: AsyncMock, + mock_touchlinesl_client: AsyncMock, +) -> None: + """Test for invalid credentials or API connection errors, and that the form can recover.""" + mock_touchlinesl_client.user_id.side_effect = exception + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], CONFIG_DATA + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": error_base} + + # "Fix" the problem, and try again. + mock_touchlinesl_client.user_id.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], CONFIG_DATA + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "test-username" + assert result["data"] == CONFIG_DATA + assert result["result"].unique_id == RESULT_UNIQUE_ID + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_config_flow_failure_adding_non_unique_account( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_touchlinesl_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that the config flow fails when user tries to add duplicate accounts.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], CONFIG_DATA + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" From a45ba51f890a86367f639e82a785d0ce76dcd503 Mon Sep 17 00:00:00 2001 From: Matrix Date: Tue, 27 Aug 2024 19:17:14 +0800 Subject: [PATCH 1103/1635] Update YoLink FlexFob Automation (#123631) * Change FlexFob Trigger * Update device trigger tests --- homeassistant/components/yolink/const.py | 4 +++ .../components/yolink/device_trigger.py | 31 ++++++++++++++----- homeassistant/components/yolink/entity.py | 1 + .../components/yolink/test_device_trigger.py | 4 +++ 4 files changed, 33 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/yolink/const.py b/homeassistant/components/yolink/const.py index 6fe1da0d20b..217dd66d063 100644 --- a/homeassistant/components/yolink/const.py +++ b/homeassistant/components/yolink/const.py @@ -23,6 +23,10 @@ DEV_MODEL_TH_SENSOR_YS8014_UC = "YS8014-UC" DEV_MODEL_TH_SENSOR_YS8014_EC = "YS8014-EC" DEV_MODEL_TH_SENSOR_YS8017_UC = "YS8017-UC" DEV_MODEL_TH_SENSOR_YS8017_EC = "YS8017-EC" +DEV_MODEL_FLEX_FOB_YS3604_UC = "YS3604-UC" +DEV_MODEL_FLEX_FOB_YS3604_EC = "YS3604-EC" +DEV_MODEL_FLEX_FOB_YS3614_UC = "YS3614-UC" +DEV_MODEL_FLEX_FOB_YS3614_EC = "YS3614-EC" DEV_MODEL_PLUG_YS6602_UC = "YS6602-UC" DEV_MODEL_PLUG_YS6602_EC = "YS6602-EC" DEV_MODEL_PLUG_YS6803_UC = "YS6803-UC" diff --git a/homeassistant/components/yolink/device_trigger.py b/homeassistant/components/yolink/device_trigger.py index b7f83623be5..6e247bf858e 100644 --- a/homeassistant/components/yolink/device_trigger.py +++ b/homeassistant/components/yolink/device_trigger.py @@ -16,6 +16,12 @@ from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo from homeassistant.helpers.typing import ConfigType from . import DOMAIN, YOLINK_EVENT +from .const import ( + DEV_MODEL_FLEX_FOB_YS3604_EC, + DEV_MODEL_FLEX_FOB_YS3604_UC, + DEV_MODEL_FLEX_FOB_YS3614_EC, + DEV_MODEL_FLEX_FOB_YS3614_UC, +) CONF_BUTTON_1 = "button_1" CONF_BUTTON_2 = "button_2" @@ -24,7 +30,7 @@ CONF_BUTTON_4 = "button_4" CONF_SHORT_PRESS = "short_press" CONF_LONG_PRESS = "long_press" -REMOTE_TRIGGER_TYPES = { +FLEX_FOB_4_BUTTONS = { f"{CONF_BUTTON_1}_{CONF_SHORT_PRESS}", f"{CONF_BUTTON_1}_{CONF_LONG_PRESS}", f"{CONF_BUTTON_2}_{CONF_SHORT_PRESS}", @@ -35,14 +41,24 @@ REMOTE_TRIGGER_TYPES = { f"{CONF_BUTTON_4}_{CONF_LONG_PRESS}", } +FLEX_FOB_2_BUTTONS = { + f"{CONF_BUTTON_1}_{CONF_SHORT_PRESS}", + f"{CONF_BUTTON_1}_{CONF_LONG_PRESS}", + f"{CONF_BUTTON_2}_{CONF_SHORT_PRESS}", + f"{CONF_BUTTON_2}_{CONF_LONG_PRESS}", +} + TRIGGER_SCHEMA = DEVICE_TRIGGER_BASE_SCHEMA.extend( - {vol.Required(CONF_TYPE): vol.In(REMOTE_TRIGGER_TYPES)} + {vol.Required(CONF_TYPE): vol.In(FLEX_FOB_4_BUTTONS)} ) -# YoLink Remotes YS3604/YS3605/YS3606/YS3607 -DEVICE_TRIGGER_TYPES: dict[str, set[str]] = { - ATTR_DEVICE_SMART_REMOTER: REMOTE_TRIGGER_TYPES, +# YoLink Remotes YS3604/YS3614 +FLEX_FOB_TRIGGER_TYPES: dict[str, set[str]] = { + DEV_MODEL_FLEX_FOB_YS3604_EC: FLEX_FOB_4_BUTTONS, + DEV_MODEL_FLEX_FOB_YS3604_UC: FLEX_FOB_4_BUTTONS, + DEV_MODEL_FLEX_FOB_YS3614_UC: FLEX_FOB_2_BUTTONS, + DEV_MODEL_FLEX_FOB_YS3614_EC: FLEX_FOB_2_BUTTONS, } @@ -54,7 +70,8 @@ async def async_get_triggers( registry_device = device_registry.async_get(device_id) if not registry_device or registry_device.model != ATTR_DEVICE_SMART_REMOTER: return [] - + if registry_device.model_id not in list(FLEX_FOB_TRIGGER_TYPES.keys()): + return [] return [ { CONF_DEVICE_ID: device_id, @@ -62,7 +79,7 @@ async def async_get_triggers( CONF_PLATFORM: "device", CONF_TYPE: trigger, } - for trigger in DEVICE_TRIGGER_TYPES[ATTR_DEVICE_SMART_REMOTER] + for trigger in FLEX_FOB_TRIGGER_TYPES[registry_device.model_id] ] diff --git a/homeassistant/components/yolink/entity.py b/homeassistant/components/yolink/entity.py index d9ca2968493..0f500b72404 100644 --- a/homeassistant/components/yolink/entity.py +++ b/homeassistant/components/yolink/entity.py @@ -55,6 +55,7 @@ class YoLinkEntity(CoordinatorEntity[YoLinkCoordinator]): identifiers={(DOMAIN, self.coordinator.device.device_id)}, manufacturer=MANUFACTURER, model=self.coordinator.device.device_type, + model_id=self.coordinator.device.device_model_name, name=self.coordinator.device.device_name, ) diff --git a/tests/components/yolink/test_device_trigger.py b/tests/components/yolink/test_device_trigger.py index 6b48b32fd62..c1d3a8acda8 100644 --- a/tests/components/yolink/test_device_trigger.py +++ b/tests/components/yolink/test_device_trigger.py @@ -6,6 +6,7 @@ from yolink.const import ATTR_DEVICE_DIMMER, ATTR_DEVICE_SMART_REMOTER from homeassistant.components import automation from homeassistant.components.device_automation import DeviceAutomationType from homeassistant.components.yolink import DOMAIN, YOLINK_EVENT +from homeassistant.components.yolink.const import DEV_MODEL_FLEX_FOB_YS3604_UC from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component @@ -23,6 +24,7 @@ async def test_get_triggers( config_entry_id=config_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, model=ATTR_DEVICE_SMART_REMOTER, + model_id=DEV_MODEL_FLEX_FOB_YS3604_UC, ) expected_triggers = [ @@ -99,6 +101,7 @@ async def test_get_triggers_exception( config_entry_id=config_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, model=ATTR_DEVICE_DIMMER, + model_id=None, ) expected_triggers = [] @@ -123,6 +126,7 @@ async def test_if_fires_on_event( connections={connection}, identifiers={(DOMAIN, mac_address)}, model=ATTR_DEVICE_SMART_REMOTER, + model_id=DEV_MODEL_FLEX_FOB_YS3604_UC, ) assert await async_setup_component( From 48292beec876fb0f0281e7233dc37a5339dde8b1 Mon Sep 17 00:00:00 2001 From: functionpointer Date: Tue, 27 Aug 2024 13:19:15 +0200 Subject: [PATCH 1104/1635] Update pyTibber to 0.30.1 (#124407) Update to pyTibber==0.30.1 --- homeassistant/components/tibber/__init__.py | 6 +++--- homeassistant/components/tibber/config_flow.py | 6 +++--- homeassistant/components/tibber/coordinator.py | 4 ++-- homeassistant/components/tibber/manifest.json | 2 +- homeassistant/components/tibber/sensor.py | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- script/licenses.py | 1 - tests/components/tibber/test_config_flow.py | 12 ++++++++---- 9 files changed, 20 insertions(+), 17 deletions(-) diff --git a/homeassistant/components/tibber/__init__.py b/homeassistant/components/tibber/__init__.py index 7c44e797780..3d63098b2e9 100644 --- a/homeassistant/components/tibber/__init__.py +++ b/homeassistant/components/tibber/__init__.py @@ -61,13 +61,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except ( TimeoutError, aiohttp.ClientError, - tibber.RetryableHttpException, + tibber.RetryableHttpExceptionError, ) as err: raise ConfigEntryNotReady("Unable to connect") from err - except tibber.InvalidLogin as exp: + except tibber.InvalidLoginError as exp: _LOGGER.error("Failed to login. %s", exp) return False - except tibber.FatalHttpException: + except tibber.FatalHttpExceptionError: return False await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) diff --git a/homeassistant/components/tibber/config_flow.py b/homeassistant/components/tibber/config_flow.py index abee3ea50bc..2d4df5107a2 100644 --- a/homeassistant/components/tibber/config_flow.py +++ b/homeassistant/components/tibber/config_flow.py @@ -47,12 +47,12 @@ class TibberConfigFlow(ConfigFlow, domain=DOMAIN): await tibber_connection.update_info() except TimeoutError: errors[CONF_ACCESS_TOKEN] = ERR_TIMEOUT - except tibber.InvalidLogin: + except tibber.InvalidLoginError: errors[CONF_ACCESS_TOKEN] = ERR_TOKEN except ( aiohttp.ClientError, - tibber.RetryableHttpException, - tibber.FatalHttpException, + tibber.RetryableHttpExceptionError, + tibber.FatalHttpExceptionError, ): errors[CONF_ACCESS_TOKEN] = ERR_CLIENT diff --git a/homeassistant/components/tibber/coordinator.py b/homeassistant/components/tibber/coordinator.py index c3746cb9a58..78841f9db91 100644 --- a/homeassistant/components/tibber/coordinator.py +++ b/homeassistant/components/tibber/coordinator.py @@ -49,9 +49,9 @@ class TibberDataCoordinator(DataUpdateCoordinator[None]): await self._tibber_connection.fetch_consumption_data_active_homes() await self._tibber_connection.fetch_production_data_active_homes() await self._insert_statistics() - except tibber.RetryableHttpException as err: + except tibber.RetryableHttpExceptionError as err: raise UpdateFailed(f"Error communicating with API ({err.status})") from err - except tibber.FatalHttpException: + except tibber.FatalHttpExceptionError: # Fatal error. Reload config entry to show correct error. self.hass.async_create_task( self.hass.config_entries.async_reload(self.config_entry.entry_id) diff --git a/homeassistant/components/tibber/manifest.json b/homeassistant/components/tibber/manifest.json index 1d8120a4321..527364b6866 100644 --- a/homeassistant/components/tibber/manifest.json +++ b/homeassistant/components/tibber/manifest.json @@ -8,5 +8,5 @@ "iot_class": "cloud_polling", "loggers": ["tibber"], "quality_scale": "silver", - "requirements": ["pyTibber==0.28.2"] + "requirements": ["pyTibber==0.30.1"] } diff --git a/homeassistant/components/tibber/sensor.py b/homeassistant/components/tibber/sensor.py index a9090add49b..d6f8baa931c 100644 --- a/homeassistant/components/tibber/sensor.py +++ b/homeassistant/components/tibber/sensor.py @@ -411,7 +411,7 @@ class TibberSensorElPrice(TibberSensor): return res = self._tibber_home.current_price_data() - self._attr_native_value, price_level, self._last_updated = res + self._attr_native_value, price_level, self._last_updated, _ = res self._attr_extra_state_attributes["price_level"] = price_level attrs = self._tibber_home.current_attributes() diff --git a/requirements_all.txt b/requirements_all.txt index 6a833b546bd..370012e3e15 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1707,7 +1707,7 @@ pyRFXtrx==0.31.1 pySDCP==1 # homeassistant.components.tibber -pyTibber==0.28.2 +pyTibber==0.30.1 # homeassistant.components.dlink pyW215==0.7.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 93005222ffd..fd934a4f929 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1381,7 +1381,7 @@ pyElectra==1.2.4 pyRFXtrx==0.31.1 # homeassistant.components.tibber -pyTibber==0.28.2 +pyTibber==0.30.1 # homeassistant.components.dlink pyW215==0.7.0 diff --git a/script/licenses.py b/script/licenses.py index 7bcbfb6179a..ac9a836396c 100644 --- a/script/licenses.py +++ b/script/licenses.py @@ -154,7 +154,6 @@ EXCEPTIONS = { "nsw-fuel-api-client", # https://github.com/nickw444/nsw-fuel-api-client/pull/14 "pigpio", # https://github.com/joan2937/pigpio/pull/608 "pymitv", # MIT - "pyTibber", # https://github.com/Danielhiversen/pyTibber/pull/294 "pybbox", # https://github.com/HydrelioxGitHub/pybbox/pull/5 "pyeconet", # https://github.com/w1ll1am23/pyeconet/pull/41 "pysabnzbd", # https://github.com/jeradM/pysabnzbd/pull/6 diff --git a/tests/components/tibber/test_config_flow.py b/tests/components/tibber/test_config_flow.py index 28b590a29d2..0c12c4a247b 100644 --- a/tests/components/tibber/test_config_flow.py +++ b/tests/components/tibber/test_config_flow.py @@ -5,7 +5,11 @@ from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch from aiohttp import ClientError import pytest -from tibber import FatalHttpException, InvalidLogin, RetryableHttpException +from tibber import ( + FatalHttpExceptionError, + InvalidLoginError, + RetryableHttpExceptionError, +) from homeassistant import config_entries from homeassistant.components.recorder import Recorder @@ -66,9 +70,9 @@ async def test_create_entry(recorder_mock: Recorder, hass: HomeAssistant) -> Non [ (TimeoutError, ERR_TIMEOUT), (ClientError, ERR_CLIENT), - (InvalidLogin(401), ERR_TOKEN), - (RetryableHttpException(503), ERR_CLIENT), - (FatalHttpException(404), ERR_CLIENT), + (InvalidLoginError(401), ERR_TOKEN), + (RetryableHttpExceptionError(503), ERR_CLIENT), + (FatalHttpExceptionError(404), ERR_CLIENT), ], ) async def test_create_entry_exceptions( From 9cae786f40c9deec0058abf6a3574cb01e7d778b Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 27 Aug 2024 14:20:57 +0200 Subject: [PATCH 1105/1635] Add ConfigEntries.async_get_loaded (#124705) --- homeassistant/config_entries.py | 10 ++++++++ tests/test_config_entries.py | 41 +++++++++++++++++++++++++++++++++ 2 files changed, 51 insertions(+) diff --git a/homeassistant/config_entries.py b/homeassistant/config_entries.py index e48313cab33..f3b0aa03383 100644 --- a/homeassistant/config_entries.py +++ b/homeassistant/config_entries.py @@ -1742,6 +1742,16 @@ class ConfigEntries: and (include_disabled or not entry.disabled_by) ] + @callback + def async_loaded_entries(self, domain: str) -> list[ConfigEntry]: + """Return loaded entries for a specific domain. + + This will exclude ignored or disabled config entruis. + """ + entries = self._entries.get_entries_for_domain(domain) + + return [entry for entry in entries if entry.state == ConfigEntryState.LOADED] + @callback def async_entry_for_domain_unique_id( self, domain: str, unique_id: str diff --git a/tests/test_config_entries.py b/tests/test_config_entries.py index dccebff13e5..3042ccb28d9 100644 --- a/tests/test_config_entries.py +++ b/tests/test_config_entries.py @@ -6001,3 +6001,44 @@ async def test_migration_from_1_2( ] }, } + + +async def test_async_loaded_entries( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test that we can get loaded config entries.""" + entry1 = MockConfigEntry(domain="comp") + entry1.add_to_hass(hass) + entry2 = MockConfigEntry(domain="comp", source=config_entries.SOURCE_IGNORE) + entry2.add_to_hass(hass) + entry3 = MockConfigEntry( + domain="comp", disabled_by=config_entries.ConfigEntryDisabler.USER + ) + entry3.add_to_hass(hass) + + mock_setup = AsyncMock(return_value=True) + mock_setup_entry = AsyncMock(return_value=True) + mock_unload_entry = AsyncMock(return_value=True) + + mock_integration( + hass, + MockModule( + "comp", + async_setup=mock_setup, + async_setup_entry=mock_setup_entry, + async_unload_entry=mock_unload_entry, + ), + ) + mock_platform(hass, "comp.config_flow", None) + + assert hass.config_entries.async_loaded_entries("comp") == [] + + assert await manager.async_setup(entry1.entry_id) + assert not await manager.async_setup(entry2.entry_id) + assert not await manager.async_setup(entry3.entry_id) + + assert hass.config_entries.async_loaded_entries("comp") == [entry1] + + assert await hass.config_entries.async_unload(entry1.entry_id) + + assert hass.config_entries.async_loaded_entries("comp") == [] From 53479b592454ccf7d8ab5d60ae36e6fe06230d92 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 27 Aug 2024 15:13:58 +0200 Subject: [PATCH 1106/1635] Adjust name of legacy Home Assistant Cloud TTS provider (#124685) --- homeassistant/components/cloud/tts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/cloud/tts.py b/homeassistant/components/cloud/tts.py index 8cf18c08314..4dbee10fbaf 100644 --- a/homeassistant/components/cloud/tts.py +++ b/homeassistant/components/cloud/tts.py @@ -221,7 +221,7 @@ class CloudProvider(Provider): def __init__(self, cloud: Cloud[CloudClient]) -> None: """Initialize cloud provider.""" self.cloud = cloud - self.name = "Cloud" + self.name = "Home Assistant Cloud" self._language, self._voice = cloud.client.prefs.tts_default_voice cloud.client.prefs.async_listen_updates(self._sync_prefs) From f802611359ec14dfcdf944bf043f15851f900ea5 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 27 Aug 2024 16:04:00 +0200 Subject: [PATCH 1107/1635] Cleanup unused import in somfy_mylink config flow (#124709) --- homeassistant/components/somfy_mylink/config_flow.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/homeassistant/components/somfy_mylink/config_flow.py b/homeassistant/components/somfy_mylink/config_flow.py index 9a8b5d76d3f..231f93b0cb7 100644 --- a/homeassistant/components/somfy_mylink/config_flow.py +++ b/homeassistant/components/somfy_mylink/config_flow.py @@ -116,11 +116,6 @@ class SomfyConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import(self, user_input): - """Handle import.""" - self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]}) - return await self.async_step_user(user_input) - @staticmethod @callback def async_get_options_flow( From 318259689ffdaa000ee7ff4bb1f02e135dfe8fa0 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 27 Aug 2024 16:18:11 +0200 Subject: [PATCH 1108/1635] Standardize import step variable name (part 4) (#124692) * Standardize import step variable name (part 4) * One more * Revert geonetnz_volcano * Revert SMS due to coverage * Revert somfy_mylink due to coverage --- homeassistant/components/broadlink/config_flow.py | 6 +++--- homeassistant/components/ecobee/config_flow.py | 2 +- .../components/emulated_roku/config_flow.py | 4 ++-- .../components/geonetnz_quakes/config_flow.py | 4 ++-- .../components/google_assistant/config_flow.py | 10 ++++++---- homeassistant/components/habitica/config_flow.py | 2 +- homeassistant/components/hlk_sw16/config_flow.py | 4 ++-- homeassistant/components/juicenet/config_flow.py | 4 ++-- homeassistant/components/kodi/config_flow.py | 10 ++++++---- homeassistant/components/konnected/config_flow.py | 14 +++++++------- .../components/lutron_caseta/config_flow.py | 10 +++++----- homeassistant/components/octoprint/config_flow.py | 4 ++-- .../components/opentherm_gw/config_flow.py | 8 ++++---- homeassistant/components/sabnzbd/config_flow.py | 2 +- homeassistant/components/spider/config_flow.py | 2 +- .../components/tellduslive/config_flow.py | 10 +++++----- 16 files changed, 50 insertions(+), 46 deletions(-) diff --git a/homeassistant/components/broadlink/config_flow.py b/homeassistant/components/broadlink/config_flow.py index 2d79ba4bea1..5d7acfd8b84 100644 --- a/homeassistant/components/broadlink/config_flow.py +++ b/homeassistant/components/broadlink/config_flow.py @@ -314,10 +314,10 @@ class BroadlinkFlowHandler(ConfigFlow, domain=DOMAIN): step_id="finish", data_schema=vol.Schema(data_schema), errors=errors ) - async def async_step_import(self, import_info): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a device.""" - self._async_abort_entries_match({CONF_HOST: import_info[CONF_HOST]}) - return await self.async_step_user(import_info) + self._async_abort_entries_match({CONF_HOST: import_data[CONF_HOST]}) + return await self.async_step_user(import_data) async def async_step_reauth( self, entry_data: Mapping[str, Any] diff --git a/homeassistant/components/ecobee/config_flow.py b/homeassistant/components/ecobee/config_flow.py index e9a89e0fba5..c0d4d9b03fc 100644 --- a/homeassistant/components/ecobee/config_flow.py +++ b/homeassistant/components/ecobee/config_flow.py @@ -80,7 +80,7 @@ class EcobeeFlowHandler(ConfigFlow, domain=DOMAIN): description_placeholders={"pin": self._ecobee.pin}, ) - async def async_step_import(self, import_data): + async def async_step_import(self, import_data: None) -> ConfigFlowResult: """Import ecobee config from configuration.yaml. Triggered by async_setup only if a config entry doesn't already exist. diff --git a/homeassistant/components/emulated_roku/config_flow.py b/homeassistant/components/emulated_roku/config_flow.py index 0e5cc1ba55a..eed0298fc57 100644 --- a/homeassistant/components/emulated_roku/config_flow.py +++ b/homeassistant/components/emulated_roku/config_flow.py @@ -56,6 +56,6 @@ class EmulatedRokuFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import(self, import_config): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle a flow import.""" - return await self.async_step_user(import_config) + return await self.async_step_user(import_data) diff --git a/homeassistant/components/geonetnz_quakes/config_flow.py b/homeassistant/components/geonetnz_quakes/config_flow.py index ac5a2e8c48e..083ac29b362 100644 --- a/homeassistant/components/geonetnz_quakes/config_flow.py +++ b/homeassistant/components/geonetnz_quakes/config_flow.py @@ -45,9 +45,9 @@ class GeonetnzQuakesFlowHandler(ConfigFlow, domain=DOMAIN): step_id="user", data_schema=DATA_SCHEMA, errors=errors or {} ) - async def async_step_import(self, import_config): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry from configuration.yaml.""" - return await self.async_step_user(import_config) + return await self.async_step_user(import_data) async def async_step_user( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/google_assistant/config_flow.py b/homeassistant/components/google_assistant/config_flow.py index 9504c623138..5934657f9ae 100644 --- a/homeassistant/components/google_assistant/config_flow.py +++ b/homeassistant/components/google_assistant/config_flow.py @@ -1,6 +1,8 @@ """Config flow for google assistant component.""" -from homeassistant.config_entries import ConfigFlow +from typing import Any + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from .const import CONF_PROJECT_ID, DOMAIN @@ -10,10 +12,10 @@ class GoogleAssistantHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_import(self, user_input): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry.""" - await self.async_set_unique_id(unique_id=user_input[CONF_PROJECT_ID]) + await self.async_set_unique_id(unique_id=import_data[CONF_PROJECT_ID]) self._abort_if_unique_id_configured() return self.async_create_entry( - title=user_input[CONF_PROJECT_ID], data=user_input + title=import_data[CONF_PROJECT_ID], data=import_data ) diff --git a/homeassistant/components/habitica/config_flow.py b/homeassistant/components/habitica/config_flow.py index 742523751a2..a40261c0902 100644 --- a/homeassistant/components/habitica/config_flow.py +++ b/homeassistant/components/habitica/config_flow.py @@ -82,7 +82,7 @@ class HabiticaConfigFlow(ConfigFlow, domain=DOMAIN): description_placeholders={}, ) - async def async_step_import(self, import_data): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import habitica config from configuration.yaml.""" async_create_issue( diff --git a/homeassistant/components/hlk_sw16/config_flow.py b/homeassistant/components/hlk_sw16/config_flow.py index df2f32088cf..8dd75561af3 100644 --- a/homeassistant/components/hlk_sw16/config_flow.py +++ b/homeassistant/components/hlk_sw16/config_flow.py @@ -70,9 +70,9 @@ class SW16FlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_import(self, user_input): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle import.""" - return await self.async_step_user(user_input) + return await self.async_step_user(import_data) async def async_step_user( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/juicenet/config_flow.py b/homeassistant/components/juicenet/config_flow.py index 393e6842274..8bcee5677e6 100644 --- a/homeassistant/components/juicenet/config_flow.py +++ b/homeassistant/components/juicenet/config_flow.py @@ -69,9 +69,9 @@ class JuiceNetConfigFlow(ConfigFlow, domain=DOMAIN): step_id="user", data_schema=DATA_SCHEMA, errors=errors ) - async def async_step_import(self, user_input): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle import.""" - return await self.async_step_user(user_input) + return await self.async_step_user(import_data) class CannotConnect(exceptions.HomeAssistantError): diff --git a/homeassistant/components/kodi/config_flow.py b/homeassistant/components/kodi/config_flow.py index c740aeb6057..26b5214c733 100644 --- a/homeassistant/components/kodi/config_flow.py +++ b/homeassistant/components/kodi/config_flow.py @@ -226,12 +226,12 @@ class KodiConfigFlow(ConfigFlow, domain=DOMAIN): return self._show_ws_port_form(errors) - async def async_step_import(self, data): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle import from YAML.""" reason = None try: - await validate_http(self.hass, data) - await validate_ws(self.hass, data) + await validate_http(self.hass, import_data) + await validate_ws(self.hass, import_data) except InvalidAuth: _LOGGER.exception("Invalid Kodi credentials") reason = "invalid_auth" @@ -242,7 +242,9 @@ class KodiConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected exception") reason = "unknown" else: - return self.async_create_entry(title=data[CONF_NAME], data=data) + return self.async_create_entry( + title=import_data[CONF_NAME], data=import_data + ) return self.async_abort(reason=reason) diff --git a/homeassistant/components/konnected/config_flow.py b/homeassistant/components/konnected/config_flow.py index 6c9a542c53b..48016cd066a 100644 --- a/homeassistant/components/konnected/config_flow.py +++ b/homeassistant/components/konnected/config_flow.py @@ -202,24 +202,24 @@ class KonnectedFlowHandler(ConfigFlow, domain=DOMAIN): random.choices(f"{string.ascii_uppercase}{string.digits}", k=20) ) - async def async_step_import(self, device_config): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a configuration.yaml config. This flow is triggered by `async_setup` for configured panels. """ - _LOGGER.debug(device_config) + _LOGGER.debug(import_data) # save the data and confirm connection via user step - await self.async_set_unique_id(device_config["id"]) - self.options = device_config[CONF_DEFAULT_OPTIONS] + await self.async_set_unique_id(import_data["id"]) + self.options = import_data[CONF_DEFAULT_OPTIONS] # config schema ensures we have port if we have host - if device_config.get(CONF_HOST): + if import_data.get(CONF_HOST): # automatically connect if we have host info return await self.async_step_user( user_input={ - CONF_HOST: device_config[CONF_HOST], - CONF_PORT: device_config[CONF_PORT], + CONF_HOST: import_data[CONF_HOST], + CONF_PORT: import_data[CONF_PORT], } ) diff --git a/homeassistant/components/lutron_caseta/config_flow.py b/homeassistant/components/lutron_caseta/config_flow.py index 0458b8ee185..703fbb813c6 100644 --- a/homeassistant/components/lutron_caseta/config_flow.py +++ b/homeassistant/components/lutron_caseta/config_flow.py @@ -166,21 +166,21 @@ class LutronCasetaFlowHandler(ConfigFlow, domain=DOMAIN): for asset_key, conf_key in FILE_MAPPING.items(): self.data[conf_key] = TLS_ASSET_TEMPLATE.format(self.bridge_id, asset_key) - async def async_step_import(self, import_info): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a new Caseta bridge as a config entry. This flow is triggered by `async_setup`. """ - host = import_info[CONF_HOST] + host = import_data[CONF_HOST] # Store the imported config for other steps in this flow to access. self.data[CONF_HOST] = host # Abort if existing entry with matching host exists. self._async_abort_entries_match({CONF_HOST: self.data[CONF_HOST]}) - self.data[CONF_KEYFILE] = import_info[CONF_KEYFILE] - self.data[CONF_CERTFILE] = import_info[CONF_CERTFILE] - self.data[CONF_CA_CERTS] = import_info[CONF_CA_CERTS] + self.data[CONF_KEYFILE] = import_data[CONF_KEYFILE] + self.data[CONF_CERTFILE] = import_data[CONF_CERTFILE] + self.data[CONF_CA_CERTS] = import_data[CONF_CA_CERTS] if not (lutron_id := await self.async_get_lutron_id()): # Ultimately we won't have a dedicated step for import failure, but diff --git a/homeassistant/components/octoprint/config_flow.py b/homeassistant/components/octoprint/config_flow.py index 22943b85f4e..8e4257ab821 100644 --- a/homeassistant/components/octoprint/config_flow.py +++ b/homeassistant/components/octoprint/config_flow.py @@ -160,9 +160,9 @@ class OctoPrintConfigFlow(ConfigFlow, domain=DOMAIN): """Handle api fetch failure.""" return self.async_abort(reason="auth_failed") - async def async_step_import(self, user_input): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle import.""" - return await self.async_step_user(user_input) + return await self.async_step_user(import_data) async def async_step_zeroconf( self, discovery_info: zeroconf.ZeroconfServiceInfo diff --git a/homeassistant/components/opentherm_gw/config_flow.py b/homeassistant/components/opentherm_gw/config_flow.py index 1ebf462a5c7..c1d1caa2fb0 100644 --- a/homeassistant/components/opentherm_gw/config_flow.py +++ b/homeassistant/components/opentherm_gw/config_flow.py @@ -92,15 +92,15 @@ class OpenThermGwConfigFlow(ConfigFlow, domain=DOMAIN): """Handle manual initiation of the config flow.""" return await self.async_step_init(user_input) - async def async_step_import(self, import_config): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import an OpenTherm Gateway device as a config entry. This flow is triggered by `async_setup` for configured devices. """ formatted_config = { - CONF_NAME: import_config.get(CONF_NAME, import_config[CONF_ID]), - CONF_DEVICE: import_config[CONF_DEVICE], - CONF_ID: import_config[CONF_ID], + CONF_NAME: import_data.get(CONF_NAME, import_data[CONF_ID]), + CONF_DEVICE: import_data[CONF_DEVICE], + CONF_ID: import_data[CONF_ID], } return await self.async_step_init(info=formatted_config) diff --git a/homeassistant/components/sabnzbd/config_flow.py b/homeassistant/components/sabnzbd/config_flow.py index 944c3f2936c..2637659e91a 100644 --- a/homeassistant/components/sabnzbd/config_flow.py +++ b/homeassistant/components/sabnzbd/config_flow.py @@ -65,7 +65,7 @@ class SABnzbdConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import(self, import_data): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import sabnzbd config from configuration.yaml.""" protocol = "https://" if import_data[CONF_SSL] else "http://" import_data[CONF_URL] = ( diff --git a/homeassistant/components/spider/config_flow.py b/homeassistant/components/spider/config_flow.py index f3076c0c28d..0c305adbc39 100644 --- a/homeassistant/components/spider/config_flow.py +++ b/homeassistant/components/spider/config_flow.py @@ -82,6 +82,6 @@ class SpiderConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import(self, import_data): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import spider config from configuration.yaml.""" return await self.async_step_user(import_data) diff --git a/homeassistant/components/tellduslive/config_flow.py b/homeassistant/components/tellduslive/config_flow.py index 6d68c37d821..6b5e7150d67 100644 --- a/homeassistant/components/tellduslive/config_flow.py +++ b/homeassistant/components/tellduslive/config_flow.py @@ -125,14 +125,14 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): return await self.async_step_user() - async def async_step_import(self, user_input): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry.""" if self._async_current_entries(): return self.async_abort(reason="already_setup") - self._scan_interval = user_input[KEY_SCAN_INTERVAL] - if user_input[CONF_HOST] != DOMAIN: - self._hosts.append(user_input[CONF_HOST]) + self._scan_interval = import_data[KEY_SCAN_INTERVAL] + if import_data[CONF_HOST] != DOMAIN: + self._hosts.append(import_data[CONF_HOST]) if not await self.hass.async_add_executor_job( os.path.isfile, self.hass.config.path(TELLDUS_CONFIG_FILE) @@ -144,7 +144,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): ) host = next(iter(conf)) - if user_input[CONF_HOST] != host: + if import_data[CONF_HOST] != host: return await self.async_step_user() host = CLOUD_NAME if host == "tellduslive" else host From de8bbaadd17a3011fe0b9875b2f66f82ddb6f278 Mon Sep 17 00:00:00 2001 From: Antoine Reversat Date: Tue, 27 Aug 2024 10:26:18 -0400 Subject: [PATCH 1109/1635] Fix review comments in fglair (#124710) * Replace if len() == 0 with if not * Replace list + filter with list comprehensions * Move consts that are only used in one module to said module --- .../components/fujitsu_fglair/climate.py | 52 +++++++++++++++---- .../components/fujitsu_fglair/config_flow.py | 2 +- .../components/fujitsu_fglair/const.py | 39 -------------- .../components/fujitsu_fglair/coordinator.py | 10 ++-- .../components/fujitsu_fglair/test_climate.py | 2 +- 5 files changed, 49 insertions(+), 56 deletions(-) diff --git a/homeassistant/components/fujitsu_fglair/climate.py b/homeassistant/components/fujitsu_fglair/climate.py index 558f4b73a18..726096eab1a 100644 --- a/homeassistant/components/fujitsu_fglair/climate.py +++ b/homeassistant/components/fujitsu_fglair/climate.py @@ -2,9 +2,23 @@ from typing import Any -from ayla_iot_unofficial.fujitsu_hvac import Capability, FujitsuHVAC +from ayla_iot_unofficial.fujitsu_hvac import ( + Capability, + FanSpeed, + FujitsuHVAC, + OpMode, + SwingMode, +) from homeassistant.components.climate import ( + FAN_AUTO, + FAN_HIGH, + FAN_LOW, + FAN_MEDIUM, + SWING_BOTH, + SWING_HORIZONTAL, + SWING_OFF, + SWING_VERTICAL, ClimateEntity, ClimateEntityFeature, HVACMode, @@ -16,17 +30,35 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import FGLairConfigEntry -from .const import ( - DOMAIN, - FUJI_TO_HA_FAN, - FUJI_TO_HA_HVAC, - FUJI_TO_HA_SWING, - HA_TO_FUJI_FAN, - HA_TO_FUJI_HVAC, - HA_TO_FUJI_SWING, -) +from .const import DOMAIN from .coordinator import FGLairCoordinator +HA_TO_FUJI_FAN = { + FAN_LOW: FanSpeed.LOW, + FAN_MEDIUM: FanSpeed.MEDIUM, + FAN_HIGH: FanSpeed.HIGH, + FAN_AUTO: FanSpeed.AUTO, +} +FUJI_TO_HA_FAN = {value: key for key, value in HA_TO_FUJI_FAN.items()} + +HA_TO_FUJI_HVAC = { + HVACMode.OFF: OpMode.OFF, + HVACMode.HEAT: OpMode.HEAT, + HVACMode.COOL: OpMode.COOL, + HVACMode.HEAT_COOL: OpMode.AUTO, + HVACMode.DRY: OpMode.DRY, + HVACMode.FAN_ONLY: OpMode.FAN, +} +FUJI_TO_HA_HVAC = {value: key for key, value in HA_TO_FUJI_HVAC.items()} + +HA_TO_FUJI_SWING = { + SWING_OFF: SwingMode.OFF, + SWING_VERTICAL: SwingMode.SWING_VERTICAL, + SWING_HORIZONTAL: SwingMode.SWING_HORIZONTAL, + SWING_BOTH: SwingMode.SWING_BOTH, +} +FUJI_TO_HA_SWING = {value: key for key, value in HA_TO_FUJI_SWING.items()} + async def async_setup_entry( hass: HomeAssistant, diff --git a/homeassistant/components/fujitsu_fglair/config_flow.py b/homeassistant/components/fujitsu_fglair/config_flow.py index db1975298a8..5021e495656 100644 --- a/homeassistant/components/fujitsu_fglair/config_flow.py +++ b/homeassistant/components/fujitsu_fglair/config_flow.py @@ -70,7 +70,7 @@ class FGLairConfigFlow(ConfigFlow, domain=DOMAIN): self._abort_if_unique_id_configured() errors = await self._async_validate_credentials(user_input) - if len(errors) == 0: + if not errors: return self.async_create_entry( title=f"FGLair ({user_input[CONF_USERNAME]})", data=user_input, diff --git a/homeassistant/components/fujitsu_fglair/const.py b/homeassistant/components/fujitsu_fglair/const.py index 0e93361f20b..a9d485281a3 100644 --- a/homeassistant/components/fujitsu_fglair/const.py +++ b/homeassistant/components/fujitsu_fglair/const.py @@ -6,19 +6,6 @@ from ayla_iot_unofficial.fujitsu_consts import ( # noqa: F401 FGLAIR_APP_ID, FGLAIR_APP_SECRET, ) -from ayla_iot_unofficial.fujitsu_hvac import FanSpeed, OpMode, SwingMode - -from homeassistant.components.climate import ( - FAN_AUTO, - FAN_HIGH, - FAN_LOW, - FAN_MEDIUM, - SWING_BOTH, - SWING_HORIZONTAL, - SWING_OFF, - SWING_VERTICAL, - HVACMode, -) API_TIMEOUT = 10 API_REFRESH = timedelta(minutes=5) @@ -26,29 +13,3 @@ API_REFRESH = timedelta(minutes=5) DOMAIN = "fujitsu_fglair" CONF_EUROPE = "is_europe" - -HA_TO_FUJI_FAN = { - FAN_LOW: FanSpeed.LOW, - FAN_MEDIUM: FanSpeed.MEDIUM, - FAN_HIGH: FanSpeed.HIGH, - FAN_AUTO: FanSpeed.AUTO, -} -FUJI_TO_HA_FAN = {value: key for key, value in HA_TO_FUJI_FAN.items()} - -HA_TO_FUJI_HVAC = { - HVACMode.OFF: OpMode.OFF, - HVACMode.HEAT: OpMode.HEAT, - HVACMode.COOL: OpMode.COOL, - HVACMode.HEAT_COOL: OpMode.AUTO, - HVACMode.DRY: OpMode.DRY, - HVACMode.FAN_ONLY: OpMode.FAN, -} -FUJI_TO_HA_HVAC = {value: key for key, value in HA_TO_FUJI_HVAC.items()} - -HA_TO_FUJI_SWING = { - SWING_OFF: SwingMode.OFF, - SWING_VERTICAL: SwingMode.SWING_VERTICAL, - SWING_HORIZONTAL: SwingMode.SWING_HORIZONTAL, - SWING_BOTH: SwingMode.SWING_BOTH, -} -FUJI_TO_HA_SWING = {value: key for key, value in HA_TO_FUJI_SWING.items()} diff --git a/homeassistant/components/fujitsu_fglair/coordinator.py b/homeassistant/components/fujitsu_fglair/coordinator.py index 902464bdd80..eac3cfd6ce5 100644 --- a/homeassistant/components/fujitsu_fglair/coordinator.py +++ b/homeassistant/components/fujitsu_fglair/coordinator.py @@ -47,12 +47,12 @@ class FGLairCoordinator(DataUpdateCoordinator[dict[str, FujitsuHVAC]]): except AylaAuthError as e: raise ConfigEntryAuthFailed("Credentials expired for Ayla IoT API") from e - if len(listening_entities) == 0: - devices = list(filter(lambda x: isinstance(x, FujitsuHVAC), devices)) + if not listening_entities: + devices = [dev for dev in devices if isinstance(dev, FujitsuHVAC)] else: - devices = list( - filter(lambda x: x.device_serial_number in listening_entities, devices) - ) + devices = [ + dev for dev in devices if dev.device_serial_number in listening_entities + ] try: for dev in devices: diff --git a/tests/components/fujitsu_fglair/test_climate.py b/tests/components/fujitsu_fglair/test_climate.py index fd016e4e226..daddc83a871 100644 --- a/tests/components/fujitsu_fglair/test_climate.py +++ b/tests/components/fujitsu_fglair/test_climate.py @@ -18,7 +18,7 @@ from homeassistant.components.climate import ( SWING_BOTH, HVACMode, ) -from homeassistant.components.fujitsu_fglair.const import ( +from homeassistant.components.fujitsu_fglair.climate import ( HA_TO_FUJI_FAN, HA_TO_FUJI_HVAC, HA_TO_FUJI_SWING, From bcfc7ea48198cc95782399dee2fd68534ef762d5 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 27 Aug 2024 16:36:07 +0200 Subject: [PATCH 1110/1635] Cleanup unused import in google config flow (#124711) --- homeassistant/components/google/config_flow.py | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/homeassistant/components/google/config_flow.py b/homeassistant/components/google/config_flow.py index 726af854f75..98424ef24f5 100644 --- a/homeassistant/components/google/config_flow.py +++ b/homeassistant/components/google/config_flow.py @@ -94,18 +94,6 @@ class OAuth2FlowHandler( "prompt": "consent", } - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: - """Import existing auth into a new config entry.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - implementations = await config_entry_oauth2_flow.async_get_implementations( - self.hass, self.DOMAIN - ) - assert len(implementations) == 1 - self.flow_impl = list(implementations.values())[0] - self.external_data = import_data - return await super().async_step_creation(import_data) - async def async_step_auth( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: From c1c158c0aa84fe91f862f33aeddcbfdd0929a11b Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 27 Aug 2024 16:48:04 +0200 Subject: [PATCH 1111/1635] Adjust docstring in airvisual_pro config flow (#124712) --- homeassistant/components/airvisual_pro/config_flow.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/airvisual_pro/config_flow.py b/homeassistant/components/airvisual_pro/config_flow.py index da0cca2e6a2..db83411b4a4 100644 --- a/homeassistant/components/airvisual_pro/config_flow.py +++ b/homeassistant/components/airvisual_pro/config_flow.py @@ -81,7 +81,7 @@ class AirVisualProFlowHandler(ConfigFlow, domain=DOMAIN): self._reauth_entry: ConfigEntry | None = None async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: - """Import a config entry from configuration.yaml.""" + """Import a config entry from `airvisual` integration (see #83882).""" return await self.async_step_user(import_data) async def async_step_reauth( From 1936aeccb99a6897616892e3304c2f1cfd3ce6e2 Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Tue, 27 Aug 2024 18:01:00 +0200 Subject: [PATCH 1112/1635] Add a test for async_converse (#124697) --- .../components/conversation/models.py | 11 ++++++ .../conversation/test_agent_manager.py | 34 +++++++++++++++++++ 2 files changed, 45 insertions(+) create mode 100644 tests/components/conversation/test_agent_manager.py diff --git a/homeassistant/components/conversation/models.py b/homeassistant/components/conversation/models.py index 902b52483e0..724e520e6df 100644 --- a/homeassistant/components/conversation/models.py +++ b/homeassistant/components/conversation/models.py @@ -23,11 +23,22 @@ class ConversationInput: """User input to be processed.""" text: str + """User spoken text.""" + context: Context + """Context of the request.""" + conversation_id: str | None + """Unique identifier for the conversation.""" + device_id: str | None + """Unique identifier for the device.""" + language: str + """Language of the request.""" + agent_id: str | None = None + """Agent to use for processing.""" @dataclass(slots=True) diff --git a/tests/components/conversation/test_agent_manager.py b/tests/components/conversation/test_agent_manager.py new file mode 100644 index 00000000000..47b58a522a8 --- /dev/null +++ b/tests/components/conversation/test_agent_manager.py @@ -0,0 +1,34 @@ +"""Test agent manager.""" + +from unittest.mock import patch + +from homeassistant.components.conversation import ConversationResult, async_converse +from homeassistant.core import Context, HomeAssistant +from homeassistant.helpers.intent import IntentResponse + + +async def test_async_converse(hass: HomeAssistant, init_components) -> None: + """Test the async_converse method.""" + context = Context() + with patch( + "homeassistant.components.conversation.default_agent.DefaultAgent.async_process", + return_value=ConversationResult(response=IntentResponse(language="test lang")), + ) as mock_process: + await async_converse( + hass, + text="test command", + conversation_id="test id", + context=context, + language="test lang", + agent_id="conversation.home_assistant", + device_id="test device id", + ) + + assert mock_process.called + conversation_input = mock_process.call_args[0][0] + assert conversation_input.text == "test command" + assert conversation_input.conversation_id == "test id" + assert conversation_input.context is context + assert conversation_input.language == "test lang" + assert conversation_input.agent_id == "conversation.home_assistant" + assert conversation_input.device_id == "test device id" From 0dc1eb87573292594e116483c0b9d3350916634f Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 27 Aug 2024 18:41:27 +0200 Subject: [PATCH 1113/1635] Cleanup unused import in zwave_js config flow (#124716) --- .../components/zwave_js/config_flow.py | 12 --- tests/components/zwave_js/test_config_flow.py | 98 ------------------- 2 files changed, 110 deletions(-) diff --git a/homeassistant/components/zwave_js/config_flow.py b/homeassistant/components/zwave_js/config_flow.py index 6d9f88d93f1..3e979b224ae 100644 --- a/homeassistant/components/zwave_js/config_flow.py +++ b/homeassistant/components/zwave_js/config_flow.py @@ -366,18 +366,6 @@ class ZWaveJSConfigFlow(BaseZwaveJSFlow, ConfigFlow, domain=DOMAIN): """Return the options flow.""" return OptionsFlowHandler(config_entry) - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: - """Handle imported data. - - This step will be used when importing data - during Z-Wave to Z-Wave JS migration. - """ - # Note that the data comes from the zwave integration. - # So we don't use our constants here. - self.s0_legacy_key = import_data.get("network_key") - self.usb_path = import_data.get("usb_path") - return await self.async_step_user() - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/tests/components/zwave_js/test_config_flow.py b/tests/components/zwave_js/test_config_flow.py index fe16f38257a..a3affb6b977 100644 --- a/tests/components/zwave_js/test_config_flow.py +++ b/tests/components/zwave_js/test_config_flow.py @@ -2726,104 +2726,6 @@ async def test_options_addon_not_installed( assert client.disconnect.call_count == 1 -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) -async def test_import_addon_installed( - hass: HomeAssistant, - supervisor, - addon_installed, - addon_options, - set_addon_options, - start_addon, - get_addon_discovery_info, - serial_port, -) -> None: - """Test import step while add-on already installed on Supervisor.""" - serial_port.device = "/test/imported" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={"usb_path": "/test/imported", "network_key": "imported123"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "on_supervisor" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], {"use_addon": True} - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "configure_addon" - - # the default input should be the imported data - default_input = result["data_schema"]({}) - - assert default_input == { - "usb_path": "/test/imported", - "s0_legacy_key": "imported123", - "s2_access_control_key": "", - "s2_authenticated_key": "", - "s2_unauthenticated_key": "", - "lr_s2_access_control_key": "", - "lr_s2_authenticated_key": "", - } - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], default_input - ) - - assert set_addon_options.call_args == call( - hass, - "core_zwave_js", - { - "options": { - "device": "/test/imported", - "s0_legacy_key": "imported123", - "s2_access_control_key": "", - "s2_authenticated_key": "", - "s2_unauthenticated_key": "", - "lr_s2_access_control_key": "", - "lr_s2_authenticated_key": "", - } - }, - ) - - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "start_addon" - - with ( - patch( - "homeassistant.components.zwave_js.async_setup", return_value=True - ) as mock_setup, - patch( - "homeassistant.components.zwave_js.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): - await hass.async_block_till_done() - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - await hass.async_block_till_done() - - assert start_addon.call_args == call(hass, "core_zwave_js") - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == TITLE - assert result["data"] == { - "url": "ws://host1:3001", - "usb_path": "/test/imported", - "s0_legacy_key": "imported123", - "s2_access_control_key": "", - "s2_authenticated_key": "", - "s2_unauthenticated_key": "", - "lr_s2_access_control_key": "", - "lr_s2_authenticated_key": "", - "use_addon": True, - "integration_created_addon": False, - } - assert len(mock_setup.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 - - async def test_zeroconf(hass: HomeAssistant) -> None: """Test zeroconf discovery.""" From 55c42fde88c7a6989a8489b755375fef92360f35 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 27 Aug 2024 19:05:49 +0200 Subject: [PATCH 1114/1635] Improve validation of entity service schemas (#124102) * Improve validation of entity service schemas * Update tests/helpers/test_entity_platform.py Co-authored-by: Joost Lekkerkerker --------- Co-authored-by: Joost Lekkerkerker --- homeassistant/helpers/config_validation.py | 23 ++++++++++++- homeassistant/helpers/service.py | 13 ++------ tests/helpers/test_config_validation.py | 24 ++++++++++++++ tests/helpers/test_entity_component.py | 38 +++++++++++----------- tests/helpers/test_entity_platform.py | 38 ++++++++++++---------- 5 files changed, 87 insertions(+), 49 deletions(-) diff --git a/homeassistant/helpers/config_validation.py b/homeassistant/helpers/config_validation.py index d7a5d5ae8a1..2904efb75e9 100644 --- a/homeassistant/helpers/config_validation.py +++ b/homeassistant/helpers/config_validation.py @@ -1305,9 +1305,28 @@ TARGET_SERVICE_FIELDS = { _HAS_ENTITY_SERVICE_FIELD = has_at_least_one_key(*ENTITY_SERVICE_FIELDS) +def is_entity_service_schema(validator: VolSchemaType) -> bool: + """Check if the passed validator is an entity schema validator. + + The validator must be either of: + - A validator returned by cv._make_entity_service_schema + - A validator returned by cv._make_entity_service_schema, wrapped in a vol.Schema + - A validator returned by cv._make_entity_service_schema, wrapped in a vol.All + Nesting is allowed. + """ + if hasattr(validator, "_entity_service_schema"): + return True + if isinstance(validator, (vol.All)): + return any(is_entity_service_schema(val) for val in validator.validators) + if isinstance(validator, (vol.Schema)): + return is_entity_service_schema(validator.schema) + + return False + + def _make_entity_service_schema(schema: dict, extra: int) -> VolSchemaType: """Create an entity service schema.""" - return vol.All( + validator = vol.All( vol.Schema( { # The frontend stores data here. Don't use in core. @@ -1319,6 +1338,8 @@ def _make_entity_service_schema(schema: dict, extra: int) -> VolSchemaType: ), _HAS_ENTITY_SERVICE_FIELD, ) + setattr(validator, "_entity_service_schema", True) + return validator BASE_ENTITY_SCHEMA = _make_entity_service_schema({}, vol.PREVENT_EXTRA) diff --git a/homeassistant/helpers/service.py b/homeassistant/helpers/service.py index 0551b5289c5..573073f3809 100644 --- a/homeassistant/helpers/service.py +++ b/homeassistant/helpers/service.py @@ -1267,17 +1267,8 @@ def async_register_entity_service( # Do a sanity check to check this is a valid entity service schema, # the check could be extended to require All/Any to have sub schema(s) # with all entity service fields - elif ( - # Don't check All/Any - not isinstance(schema, (vol.All, vol.Any)) - # Don't check All/Any wrapped in schema - and not isinstance(schema.schema, (vol.All, vol.Any)) - and any(key not in schema.schema for key in cv.ENTITY_SERVICE_FIELDS) - ): - raise HomeAssistantError( - "The schema does not include all required keys: " - f"{", ".join(str(key) for key in cv.ENTITY_SERVICE_FIELDS)}" - ) + elif not cv.is_entity_service_schema(schema): + raise HomeAssistantError("The schema is not an entity service schema") service_func: str | HassJob[..., Any] service_func = func if isinstance(func, str) else HassJob(func) diff --git a/tests/helpers/test_config_validation.py b/tests/helpers/test_config_validation.py index 973f504df08..57c712e2f10 100644 --- a/tests/helpers/test_config_validation.py +++ b/tests/helpers/test_config_validation.py @@ -1805,3 +1805,27 @@ async def test_async_validate(hass: HomeAssistant, tmpdir: py.path.local) -> Non "string": [hass.loop_thread_id], } validator_calls = {} + + +async def test_is_entity_service_schema( + hass: HomeAssistant, +) -> None: + """Test cv.is_entity_service_schema.""" + for schema in ( + vol.Schema({"some": str}), + vol.All(vol.Schema({"some": str})), + vol.Any(vol.Schema({"some": str})), + vol.Any(cv.make_entity_service_schema({"some": str})), + ): + assert cv.is_entity_service_schema(schema) is False + + for schema in ( + cv.make_entity_service_schema({"some": str}), + vol.Schema(cv.make_entity_service_schema({"some": str})), + vol.Schema(vol.All(cv.make_entity_service_schema({"some": str}))), + vol.Schema(vol.Schema(cv.make_entity_service_schema({"some": str}))), + vol.All(cv.make_entity_service_schema({"some": str})), + vol.All(vol.All(cv.make_entity_service_schema({"some": str}))), + vol.All(vol.Schema(cv.make_entity_service_schema({"some": str}))), + ): + assert cv.is_entity_service_schema(schema) is True diff --git a/tests/helpers/test_entity_component.py b/tests/helpers/test_entity_component.py index 5ce0292c2ec..8f4ece09a17 100644 --- a/tests/helpers/test_entity_component.py +++ b/tests/helpers/test_entity_component.py @@ -23,7 +23,7 @@ from homeassistant.core import ( callback, ) from homeassistant.exceptions import HomeAssistantError, PlatformNotReady -from homeassistant.helpers import discovery +from homeassistant.helpers import config_validation as cv, discovery from homeassistant.helpers.entity_component import EntityComponent, async_update_entity from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType @@ -559,28 +559,28 @@ async def test_register_entity_service( async def test_register_entity_service_non_entity_service_schema( hass: HomeAssistant, ) -> None: - """Test attempting to register a service with an incomplete schema.""" + """Test attempting to register a service with a non entity service schema.""" component = EntityComponent(_LOGGER, DOMAIN, hass) - with pytest.raises( - HomeAssistantError, - match=( - "The schema does not include all required keys: entity_id, device_id, area_id, " - "floor_id, label_id" - ), + for schema in ( + vol.Schema({"some": str}), + vol.All(vol.Schema({"some": str})), + vol.Any(vol.Schema({"some": str})), ): - component.async_register_entity_service( - "hello", vol.Schema({"some": str}), Mock() - ) + with pytest.raises( + HomeAssistantError, + match=("The schema is not an entity service schema"), + ): + component.async_register_entity_service("hello", schema, Mock()) - # The check currently does not recurse into vol.All or vol.Any allowing these - # non-compliant schemas to pass - component.async_register_entity_service( - "hello", vol.All(vol.Schema({"some": str})), Mock() - ) - component.async_register_entity_service( - "hello", vol.Any(vol.Schema({"some": str})), Mock() - ) + for idx, schema in enumerate( + ( + cv.make_entity_service_schema({"some": str}), + vol.Schema(cv.make_entity_service_schema({"some": str})), + vol.All(cv.make_entity_service_schema({"some": str})), + ) + ): + component.async_register_entity_service(f"test_service_{idx}", schema, Mock()) async def test_register_entity_service_response_data(hass: HomeAssistant) -> None: diff --git a/tests/helpers/test_entity_platform.py b/tests/helpers/test_entity_platform.py index 2cc3348626c..2b0598cfe9d 100644 --- a/tests/helpers/test_entity_platform.py +++ b/tests/helpers/test_entity_platform.py @@ -23,6 +23,7 @@ from homeassistant.core import ( from homeassistant.exceptions import HomeAssistantError, PlatformNotReady from homeassistant.helpers import ( area_registry as ar, + config_validation as cv, device_registry as dr, entity_platform, entity_registry as er, @@ -1812,31 +1813,32 @@ async def test_register_entity_service_none_schema( async def test_register_entity_service_non_entity_service_schema( hass: HomeAssistant, ) -> None: - """Test attempting to register a service with an incomplete schema.""" + """Test attempting to register a service with a non entity service schema.""" entity_platform = MockEntityPlatform( hass, domain="mock_integration", platform_name="mock_platform", platform=None ) - with pytest.raises( - HomeAssistantError, - match=( - "The schema does not include all required keys: entity_id, device_id, area_id, " - "floor_id, label_id" - ), + for schema in ( + vol.Schema({"some": str}), + vol.All(vol.Schema({"some": str})), + vol.Any(vol.Schema({"some": str})), + ): + with pytest.raises( + HomeAssistantError, + match="The schema is not an entity service schema", + ): + entity_platform.async_register_entity_service("hello", schema, Mock()) + + for idx, schema in enumerate( + ( + cv.make_entity_service_schema({"some": str}), + vol.Schema(cv.make_entity_service_schema({"some": str})), + vol.All(cv.make_entity_service_schema({"some": str})), + ) ): entity_platform.async_register_entity_service( - "hello", - vol.Schema({"some": str}), - Mock(), + f"test_service_{idx}", schema, Mock() ) - # The check currently does not recurse into vol.All or vol.Any allowing these - # non-compliant schemas to pass - entity_platform.async_register_entity_service( - "hello", vol.All(vol.Schema({"some": str})), Mock() - ) - entity_platform.async_register_entity_service( - "hello", vol.Any(vol.Schema({"some": str})), Mock() - ) @pytest.mark.parametrize("update_before_add", [True, False]) From 52b6f00363e4dd25702f3413b3645358bf5bbfdf Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 27 Aug 2024 19:07:04 +0200 Subject: [PATCH 1115/1635] Adjust name of Google Translate TTS provider (#124688) --- homeassistant/components/google_translate/tts.py | 4 ++-- tests/components/google_translate/test_tts.py | 14 +++++++------- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/google_translate/tts.py b/homeassistant/components/google_translate/tts.py index 221c99e7c20..13e0ca4c273 100644 --- a/homeassistant/components/google_translate/tts.py +++ b/homeassistant/components/google_translate/tts.py @@ -74,7 +74,7 @@ class GoogleTTSEntity(TextToSpeechEntity): else: self._lang = lang self._tld = tld - self._attr_name = f"Google {self._lang} {self._tld}" + self._attr_name = f"Google Translate {self._lang} {self._tld}" self._attr_unique_id = config_entry.entry_id @property @@ -130,7 +130,7 @@ class GoogleProvider(Provider): else: self._lang = lang self._tld = tld - self.name = "Google" + self.name = "Google Translate" @property def default_language(self) -> str: diff --git a/tests/components/google_translate/test_tts.py b/tests/components/google_translate/test_tts.py index 95313df6140..1f199a5db97 100644 --- a/tests/components/google_translate/test_tts.py +++ b/tests/components/google_translate/test_tts.py @@ -103,7 +103,7 @@ async def mock_config_entry_setup(hass: HomeAssistant, config: dict[str, Any]) - "mock_config_entry_setup", "speak", { - ATTR_ENTITY_ID: "tts.google_en_com", + ATTR_ENTITY_ID: "tts.google_translate_en_com", tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", tts.ATTR_MESSAGE: "There is a person at the front door.", }, @@ -160,7 +160,7 @@ async def test_tts_service( "mock_config_entry_setup", "speak", { - ATTR_ENTITY_ID: "tts.google_de_com", + ATTR_ENTITY_ID: "tts.google_translate_de_com", tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", tts.ATTR_MESSAGE: "There is a person at the front door.", }, @@ -216,7 +216,7 @@ async def test_service_say_german_config( "mock_config_entry_setup", "speak", { - ATTR_ENTITY_ID: "tts.google_en_com", + ATTR_ENTITY_ID: "tts.google_translate_en_com", tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", tts.ATTR_MESSAGE: "There is a person at the front door.", tts.ATTR_LANGUAGE: "de", @@ -273,7 +273,7 @@ async def test_service_say_german_service( "mock_config_entry_setup", "speak", { - ATTR_ENTITY_ID: "tts.google_en_co_uk", + ATTR_ENTITY_ID: "tts.google_translate_en_co_uk", tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", tts.ATTR_MESSAGE: "There is a person at the front door.", }, @@ -329,7 +329,7 @@ async def test_service_say_en_uk_config( "mock_config_entry_setup", "speak", { - ATTR_ENTITY_ID: "tts.google_en_com", + ATTR_ENTITY_ID: "tts.google_translate_en_com", tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", tts.ATTR_MESSAGE: "There is a person at the front door.", tts.ATTR_LANGUAGE: "en-uk", @@ -386,7 +386,7 @@ async def test_service_say_en_uk_service( "mock_config_entry_setup", "speak", { - ATTR_ENTITY_ID: "tts.google_en_com", + ATTR_ENTITY_ID: "tts.google_translate_en_com", tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", tts.ATTR_MESSAGE: "There is a person at the front door.", tts.ATTR_OPTIONS: {"tld": "co.uk"}, @@ -443,7 +443,7 @@ async def test_service_say_en_couk( "mock_config_entry_setup", "speak", { - ATTR_ENTITY_ID: "tts.google_en_com", + ATTR_ENTITY_ID: "tts.google_translate_en_com", tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", tts.ATTR_MESSAGE: "There is a person at the front door.", }, From e2d84f9a58b773de3b28d2f9c55593faffdad475 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 27 Aug 2024 19:07:35 +0200 Subject: [PATCH 1116/1635] Add support for multiple otbr config entries (#124289) * Add support for multiple otbr config entries * Fix test * Drop useless fixture * Address review comments * Change unique id from xa to id * Improve error text * Store data in ConfigEntry.runtime_data * Remove useless function --- homeassistant/components/otbr/__init__.py | 43 ++- homeassistant/components/otbr/config_flow.py | 89 +++-- homeassistant/components/otbr/const.py | 8 - .../components/otbr/silabs_multiprotocol.py | 21 +- homeassistant/components/otbr/strings.json | 1 + homeassistant/components/otbr/util.py | 38 +- .../components/otbr/websocket_api.py | 98 ++--- tests/components/otbr/__init__.py | 1 + tests/components/otbr/conftest.py | 5 +- tests/components/otbr/test_config_flow.py | 334 ++++++++++++++++-- tests/components/otbr/test_init.py | 45 ++- .../otbr/test_silabs_multiprotocol.py | 21 +- tests/components/otbr/test_util.py | 33 +- 13 files changed, 563 insertions(+), 174 deletions(-) diff --git a/homeassistant/components/otbr/__init__.py b/homeassistant/components/otbr/__init__.py index 3e53358a162..4b95be1d40d 100644 --- a/homeassistant/components/otbr/__init__.py +++ b/homeassistant/components/otbr/__init__.py @@ -2,6 +2,8 @@ from __future__ import annotations +import logging + import aiohttp import python_otbr_api @@ -14,22 +16,28 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.typing import ConfigType from . import websocket_api -from .const import DATA_OTBR, DOMAIN -from .util import OTBRData, update_issues +from .const import DOMAIN +from .util import ( + GetBorderAgentIdNotSupported, + OTBRData, + update_issues, + update_unique_id, +) + +_LOGGER = logging.getLogger(__name__) CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) +type OTBRConfigEntry = ConfigEntry[OTBRData] + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Open Thread Border Router component.""" websocket_api.async_setup(hass) - if len(config_entries := hass.config_entries.async_entries(DOMAIN)): - for config_entry in config_entries[1:]: - await hass.config_entries.async_remove(config_entry.entry_id) return True -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: OTBRConfigEntry) -> bool: """Set up an Open Thread Border Router config entry.""" api = python_otbr_api.OTBR(entry.data["url"], async_get_clientsession(hass), 10) @@ -38,13 +46,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: border_agent_id = await otbrdata.get_border_agent_id() dataset_tlvs = await otbrdata.get_active_dataset_tlvs() extended_address = await otbrdata.get_extended_address() - except ( - HomeAssistantError, - aiohttp.ClientError, - TimeoutError, - ) as err: - raise ConfigEntryNotReady("Unable to connect") from err - if border_agent_id is None: + except GetBorderAgentIdNotSupported: ir.async_create_issue( hass, DOMAIN, @@ -55,6 +57,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: translation_key="get_get_border_agent_id_unsupported", ) return False + except ( + HomeAssistantError, + aiohttp.ClientError, + TimeoutError, + ) as err: + raise ConfigEntryNotReady("Unable to connect") from err + await update_unique_id(hass, entry, border_agent_id) if dataset_tlvs: await update_issues(hass, otbrdata, dataset_tlvs) await async_add_dataset( @@ -66,18 +75,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: ) entry.async_on_unload(entry.add_update_listener(async_reload_entry)) - - hass.data[DATA_OTBR] = otbrdata + entry.runtime_data = otbrdata return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: OTBRConfigEntry) -> bool: """Unload a config entry.""" - hass.data.pop(DATA_OTBR) return True -async def async_reload_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def async_reload_entry(hass: HomeAssistant, entry: OTBRConfigEntry) -> None: """Handle an options update.""" await hass.config_entries.async_reload(entry.entry_id) diff --git a/homeassistant/components/otbr/config_flow.py b/homeassistant/components/otbr/config_flow.py index 8cffc0a99e6..c1747981b07 100644 --- a/homeassistant/components/otbr/config_flow.py +++ b/homeassistant/components/otbr/config_flow.py @@ -4,7 +4,7 @@ from __future__ import annotations from contextlib import suppress import logging -from typing import cast +from typing import TYPE_CHECKING, cast import aiohttp import python_otbr_api @@ -33,9 +33,16 @@ from .util import ( get_allowed_channel, ) +if TYPE_CHECKING: + from . import OTBRConfigEntry + _LOGGER = logging.getLogger(__name__) +class AlreadyConfigured(HomeAssistantError): + """Raised when the router is already configured.""" + + def _is_yellow(hass: HomeAssistant) -> bool: """Return True if Home Assistant is running on a Home Assistant Yellow.""" try: @@ -70,9 +77,8 @@ class OTBRConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def _connect_and_set_dataset(self, otbr_url: str) -> None: + async def _set_dataset(self, api: python_otbr_api.OTBR, otbr_url: str) -> None: """Connect to the OTBR and create or apply a dataset if it doesn't have one.""" - api = python_otbr_api.OTBR(otbr_url, async_get_clientsession(self.hass), 10) if await api.get_active_dataset_tlvs() is None: allowed_channel = await get_allowed_channel(self.hass, otbr_url) @@ -89,7 +95,9 @@ class OTBRConfigFlow(ConfigFlow, domain=DOMAIN): await api.set_active_dataset_tlvs(bytes.fromhex(thread_dataset_tlv)) else: _LOGGER.debug( - "not importing TLV with channel %s", thread_dataset_channel + "not importing TLV with channel %s for %s", + thread_dataset_channel, + otbr_url, ) pan_id = generate_random_pan_id() await api.create_active_dataset( @@ -101,27 +109,65 @@ class OTBRConfigFlow(ConfigFlow, domain=DOMAIN): ) await api.set_enabled(True) + async def _is_border_agent_id_configured(self, border_agent_id: bytes) -> bool: + """Return True if another config entry's OTBR has the same border agent id.""" + config_entry: OTBRConfigEntry + for config_entry in self.hass.config_entries.async_loaded_entries(DOMAIN): + data = config_entry.runtime_data + try: + other_border_agent_id = await data.get_border_agent_id() + except HomeAssistantError: + _LOGGER.debug( + "Could not read border agent id from %s", data.url, exc_info=True + ) + continue + _LOGGER.debug( + "border agent id for existing url %s: %s", + data.url, + other_border_agent_id.hex(), + ) + if border_agent_id == other_border_agent_id: + return True + return False + + async def _connect_and_configure_router(self, otbr_url: str) -> bytes: + """Connect to the router and configure it if needed. + + Will raise if the router's border agent id is in use by another config entry. + Returns the router's border agent id. + """ + api = python_otbr_api.OTBR(otbr_url, async_get_clientsession(self.hass), 10) + border_agent_id = await api.get_border_agent_id() + _LOGGER.debug("border agent id for url %s: %s", otbr_url, border_agent_id.hex()) + + if await self._is_border_agent_id_configured(border_agent_id): + raise AlreadyConfigured + + await self._set_dataset(api, otbr_url) + + return border_agent_id + async def async_step_user( self, user_input: dict[str, str] | None = None ) -> ConfigFlowResult: """Set up by user.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - errors = {} if user_input is not None: url = user_input[CONF_URL].rstrip("/") try: - await self._connect_and_set_dataset(url) + border_agent_id = await self._connect_and_configure_router(url) + except AlreadyConfigured: + errors["base"] = "already_configured" except ( python_otbr_api.OTBRError, aiohttp.ClientError, TimeoutError, - ): + ) as exc: + _LOGGER.debug("Failed to communicate with OTBR@%s: %s", url, exc) errors["base"] = "cannot_connect" else: - await self.async_set_unique_id(DOMAIN) + await self.async_set_unique_id(border_agent_id.hex()) return self.async_create_entry( title="Open Thread Border Router", data={CONF_URL: url}, @@ -140,34 +186,35 @@ class OTBRConfigFlow(ConfigFlow, domain=DOMAIN): url = f"http://{config['host']}:{config['port']}" config_entry_data = {"url": url} - if self._async_in_progress(include_uninitialized=True): - # We currently don't handle multiple config entries, abort if hassio - # discovers multiple addons with otbr support - return self.async_abort(reason="single_instance_allowed") - if current_entries := self._async_current_entries(): for current_entry in current_entries: if current_entry.source != SOURCE_HASSIO: continue current_url = yarl.URL(current_entry.data["url"]) - if ( + if not (unique_id := current_entry.unique_id): # The first version did not set a unique_id # so if the entry does not have a unique_id # we have to assume it's the first version - current_entry.unique_id - and (current_entry.unique_id != discovery_info.uuid) + # This check can be removed in HA Core 2025.9 + unique_id = discovery_info.uuid + if ( + unique_id != discovery_info.uuid or current_url.host != config["host"] or current_url.port == config["port"] ): continue # Update URL with the new port self.hass.config_entries.async_update_entry( - current_entry, data=config_entry_data + current_entry, + data=config_entry_data, + unique_id=unique_id, # Remove in HA Core 2025.9 ) - return self.async_abort(reason="single_instance_allowed") + return self.async_abort(reason="already_configured") try: - await self._connect_and_set_dataset(url) + await self._connect_and_configure_router(url) + except AlreadyConfigured: + return self.async_abort(reason="already_configured") except ( python_otbr_api.OTBRError, aiohttp.ClientError, diff --git a/homeassistant/components/otbr/const.py b/homeassistant/components/otbr/const.py index cf1678466a4..c38b3cc1250 100644 --- a/homeassistant/components/otbr/const.py +++ b/homeassistant/components/otbr/const.py @@ -2,14 +2,6 @@ from __future__ import annotations -from typing import TYPE_CHECKING - -from homeassistant.util.hass_dict import HassKey - -if TYPE_CHECKING: - from .util import OTBRData - DOMAIN = "otbr" -DATA_OTBR: HassKey[OTBRData] = HassKey(DOMAIN) DEFAULT_CHANNEL = 15 diff --git a/homeassistant/components/otbr/silabs_multiprotocol.py b/homeassistant/components/otbr/silabs_multiprotocol.py index b3a711968fd..d97e6811e6d 100644 --- a/homeassistant/components/otbr/silabs_multiprotocol.py +++ b/homeassistant/components/otbr/silabs_multiprotocol.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Callable, Coroutine from functools import wraps import logging -from typing import Any, Concatenate +from typing import TYPE_CHECKING, Any, Concatenate import aiohttp from python_otbr_api import tlv_parser @@ -18,9 +18,12 @@ from homeassistant.components.thread import async_add_dataset from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from .const import DATA_OTBR, DOMAIN +from .const import DOMAIN from .util import OTBRData +if TYPE_CHECKING: + from . import OTBRConfigEntry + _LOGGER = logging.getLogger(__name__) @@ -45,15 +48,13 @@ def async_get_otbr_data[**_P, _R, _R_Def]( hass: HomeAssistant, *args: _P.args, **kwargs: _P.kwargs ) -> _R | _R_Def: """Fetch OTBR data and pass to orig_func.""" - if DATA_OTBR not in hass.data: - return retval + config_entry: OTBRConfigEntry + for config_entry in hass.config_entries.async_loaded_entries(DOMAIN): + data = config_entry.runtime_data + if is_multiprotocol_url(data.url): + return await orig_func(hass, data, *args, **kwargs) - data = hass.data[DATA_OTBR] - - if not is_multiprotocol_url(data.url): - return retval - - return await orig_func(hass, data, *args, **kwargs) + return retval return async_get_otbr_data_wrapper diff --git a/homeassistant/components/otbr/strings.json b/homeassistant/components/otbr/strings.json index 838ebeb5b8c..bc7812c1db7 100644 --- a/homeassistant/components/otbr/strings.json +++ b/homeassistant/components/otbr/strings.json @@ -9,6 +9,7 @@ } }, "error": { + "already_configured": "The Thread border router is already configured", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" }, "abort": { diff --git a/homeassistant/components/otbr/util.py b/homeassistant/components/otbr/util.py index d426ca9ba17..351e23c7736 100644 --- a/homeassistant/components/otbr/util.py +++ b/homeassistant/components/otbr/util.py @@ -7,7 +7,7 @@ import dataclasses from functools import wraps import logging import random -from typing import Any, Concatenate, cast +from typing import TYPE_CHECKING, Any, Concatenate, cast import aiohttp import python_otbr_api @@ -22,12 +22,16 @@ from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon multi_pan_addon_using_device, ) from homeassistant.components.homeassistant_yellow import RADIO_DEVICE as YELLOW_RADIO +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import issue_registry as ir from .const import DOMAIN +if TYPE_CHECKING: + from . import OTBRConfigEntry + _LOGGER = logging.getLogger(__name__) INFO_URL_SKY_CONNECT = ( @@ -48,6 +52,10 @@ INSECURE_PASSPHRASES = ( ) +class GetBorderAgentIdNotSupported(HomeAssistantError): + """Raised from python_otbr_api.GetBorderAgentIdNotSupportedError.""" + + def compose_default_network_name(pan_id: int) -> str: """Generate a default network name.""" return f"ha-thread-{pan_id:04x}" @@ -83,7 +91,7 @@ class OTBRData: entry_id: str @_handle_otbr_error - async def factory_reset(self) -> None: + async def factory_reset(self, hass: HomeAssistant) -> None: """Reset the router.""" try: await self.api.factory_reset() @@ -92,14 +100,19 @@ class OTBRData: "OTBR does not support factory reset, attempting to delete dataset" ) await self.delete_active_dataset() + await update_unique_id( + hass, + hass.config_entries.async_get_entry(self.entry_id), + await self.get_border_agent_id(), + ) @_handle_otbr_error - async def get_border_agent_id(self) -> bytes | None: + async def get_border_agent_id(self) -> bytes: """Get the border agent ID or None if not supported by the router.""" try: return await self.api.get_border_agent_id() - except python_otbr_api.GetBorderAgentIdNotSupportedError: - return None + except python_otbr_api.GetBorderAgentIdNotSupportedError as exc: + raise GetBorderAgentIdNotSupported from exc @_handle_otbr_error async def set_enabled(self, enabled: bool) -> None: @@ -258,3 +271,18 @@ async def update_issues( """Raise or clear repair issues related to network settings.""" await _warn_on_channel_collision(hass, otbrdata, dataset_tlvs) _warn_on_default_network_settings(hass, otbrdata, dataset_tlvs) + + +async def update_unique_id( + hass: HomeAssistant, entry: OTBRConfigEntry | None, border_agent_id: bytes +) -> None: + """Update the config entry's unique_id if not matching.""" + border_agent_id_hex = border_agent_id.hex() + if entry and entry.source == SOURCE_USER and entry.unique_id != border_agent_id_hex: + _LOGGER.debug( + "Updating unique_id of entry %s from %s to %s", + entry.entry_id, + entry.unique_id, + border_agent_id_hex, + ) + hass.config_entries.async_update_entry(entry, unique_id=border_agent_id_hex) diff --git a/homeassistant/components/otbr/websocket_api.py b/homeassistant/components/otbr/websocket_api.py index 577f9cc381d..2bcd0da8f16 100644 --- a/homeassistant/components/otbr/websocket_api.py +++ b/homeassistant/components/otbr/websocket_api.py @@ -2,7 +2,7 @@ from collections.abc import Callable, Coroutine from functools import wraps -from typing import Any, cast +from typing import TYPE_CHECKING, Any, cast import python_otbr_api from python_otbr_api import PENDING_DATASET_DELAY_TIMER, tlv_parser @@ -17,7 +17,7 @@ from homeassistant.components.thread import async_add_dataset, async_get_dataset from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError -from .const import DATA_OTBR, DEFAULT_CHANNEL, DOMAIN +from .const import DEFAULT_CHANNEL, DOMAIN from .util import ( OTBRData, compose_default_network_name, @@ -26,6 +26,9 @@ from .util import ( update_issues, ) +if TYPE_CHECKING: + from . import OTBRConfigEntry + @callback def async_setup(hass: HomeAssistant) -> None: @@ -47,41 +50,45 @@ async def websocket_info( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict ) -> None: """Get OTBR info.""" - if DATA_OTBR not in hass.data: + config_entries: list[OTBRConfigEntry] + config_entries = hass.config_entries.async_loaded_entries(DOMAIN) + + if not config_entries: connection.send_error(msg["id"], "not_loaded", "No OTBR API loaded") return - data = hass.data[DATA_OTBR] + response: dict[str, dict[str, Any]] = {} - try: - border_agent_id = await data.get_border_agent_id() - dataset = await data.get_active_dataset() - dataset_tlvs = await data.get_active_dataset_tlvs() - extended_address = (await data.get_extended_address()).hex() - except HomeAssistantError as exc: - connection.send_error(msg["id"], "otbr_info_failed", str(exc)) - return + for config_entry in config_entries: + data = config_entry.runtime_data + try: + border_agent_id = await data.get_border_agent_id() + dataset = await data.get_active_dataset() + dataset_tlvs = await data.get_active_dataset_tlvs() + extended_address = (await data.get_extended_address()).hex() + except HomeAssistantError as exc: + connection.send_error(msg["id"], "otbr_info_failed", str(exc)) + return - # The border agent ID is checked when the OTBR config entry is setup, - # we can assert it's not None - assert border_agent_id is not None + # The border agent ID is checked when the OTBR config entry is setup, + # we can assert it's not None + assert border_agent_id is not None - extended_pan_id = ( - dataset.extended_pan_id.lower() if dataset and dataset.extended_pan_id else None - ) - connection.send_result( - msg["id"], - { - extended_address: { - "active_dataset_tlvs": dataset_tlvs.hex() if dataset_tlvs else None, - "border_agent_id": border_agent_id.hex(), - "channel": dataset.channel if dataset else None, - "extended_address": extended_address, - "extended_pan_id": extended_pan_id, - "url": data.url, - } - }, - ) + extended_pan_id = ( + dataset.extended_pan_id.lower() + if dataset and dataset.extended_pan_id + else None + ) + response[extended_address] = { + "active_dataset_tlvs": dataset_tlvs.hex() if dataset_tlvs else None, + "border_agent_id": border_agent_id.hex(), + "channel": dataset.channel if dataset else None, + "extended_address": extended_address, + "extended_pan_id": extended_pan_id, + "url": data.url, + } + + connection.send_result(msg["id"], response) def async_get_otbr_data( @@ -99,22 +106,29 @@ def async_get_otbr_data( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict ) -> None: """Fetch OTBR data and pass to orig_func.""" - if DATA_OTBR not in hass.data: + config_entries: list[OTBRConfigEntry] + config_entries = hass.config_entries.async_loaded_entries(DOMAIN) + + if not config_entries: connection.send_error(msg["id"], "not_loaded", "No OTBR API loaded") return - data = hass.data[DATA_OTBR] + for config_entry in config_entries: + data = config_entry.runtime_data + try: + extended_address = await data.get_extended_address() + except HomeAssistantError as exc: + connection.send_error( + msg["id"], "get_extended_address_failed", str(exc) + ) + return + if extended_address.hex() != msg["extended_address"]: + continue - try: - extended_address = await data.get_extended_address() - except HomeAssistantError as exc: - connection.send_error(msg["id"], "get_extended_address_failed", str(exc)) - return - if extended_address.hex() != msg["extended_address"]: - connection.send_error(msg["id"], "unknown_router", "") + await orig_func(hass, connection, msg, data) return - await orig_func(hass, connection, msg, data) + connection.send_error(msg["id"], "unknown_router", "") return async_check_extended_address_func @@ -144,7 +158,7 @@ async def websocket_create_network( return try: - await data.factory_reset() + await data.factory_reset(hass) except HomeAssistantError as exc: connection.send_error(msg["id"], "factory_reset_failed", str(exc)) return diff --git a/tests/components/otbr/__init__.py b/tests/components/otbr/__init__.py index 2c9daa127c2..7d52318b477 100644 --- a/tests/components/otbr/__init__.py +++ b/tests/components/otbr/__init__.py @@ -31,6 +31,7 @@ DATASET_INSECURE_PASSPHRASE = bytes.fromhex( TEST_BORDER_AGENT_EXTENDED_ADDRESS = bytes.fromhex("AEEB2F594B570BBF") TEST_BORDER_AGENT_ID = bytes.fromhex("230C6A1AC57F6F4BE262ACF32E5EF52C") +TEST_BORDER_AGENT_ID_2 = bytes.fromhex("230C6A1AC57F6F4BE262ACF32E5EF52D") ROUTER_DISCOVERY_HASS = { "type_": "_meshcop._udp.local.", diff --git a/tests/components/otbr/conftest.py b/tests/components/otbr/conftest.py index 3811ff66ebb..5ab3e442183 100644 --- a/tests/components/otbr/conftest.py +++ b/tests/components/otbr/conftest.py @@ -77,16 +77,18 @@ async def otbr_config_entry_multipan_fixture( get_active_dataset_tlvs: AsyncMock, get_border_agent_id: AsyncMock, get_extended_address: AsyncMock, -) -> None: +) -> str: """Mock Open Thread Border Router config entry.""" config_entry = MockConfigEntry( data=CONFIG_ENTRY_DATA_MULTIPAN, domain=otbr.DOMAIN, options={}, title="Open Thread Border Router", + unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) assert await hass.config_entries.async_setup(config_entry.entry_id) + return config_entry.entry_id @pytest.fixture(name="otbr_config_entry_thread") @@ -102,6 +104,7 @@ async def otbr_config_entry_thread_fixture( domain=otbr.DOMAIN, options={}, title="Open Thread Border Router", + unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) assert await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/otbr/test_config_flow.py b/tests/components/otbr/test_config_flow.py index c4972bb5f83..edd92591b1b 100644 --- a/tests/components/otbr/test_config_flow.py +++ b/tests/components/otbr/test_config_flow.py @@ -13,7 +13,7 @@ from homeassistant.components import hassio, otbr from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from . import DATASET_CH15, DATASET_CH16 +from . import DATASET_CH15, DATASET_CH16, TEST_BORDER_AGENT_ID, TEST_BORDER_AGENT_ID_2 from tests.common import MockConfigEntry, MockModule, mock_integration from tests.test_util.aiohttp import AiohttpClientMocker @@ -57,12 +57,91 @@ def addon_info_fixture(): "http://custom_url:1234//", ], ) +@pytest.mark.usefixtures( + "get_active_dataset_tlvs", + "get_border_agent_id", +) async def test_user_flow( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, url: str ) -> None: """Test the user flow.""" + await _finish_user_flow(hass, url) + + +@pytest.mark.usefixtures( + "get_active_dataset_tlvs", + "get_extended_address", +) +async def test_user_flow_additional_entry( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test more than a single entry is allowed.""" + url1 = "http://custom_url:1234" + url2 = "http://custom_url_2:1234" + aioclient_mock.get(f"{url1}/node/ba-id", json=TEST_BORDER_AGENT_ID.hex()) + aioclient_mock.get(f"{url2}/node/ba-id", json=TEST_BORDER_AGENT_ID_2.hex()) + + mock_integration(hass, MockModule("hassio")) + + # Setup a config entry + config_entry = MockConfigEntry( + data={"url": url2}, + domain=otbr.DOMAIN, + options={}, + title="Open Thread Border Router", + unique_id=TEST_BORDER_AGENT_ID_2.hex(), + ) + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + + # Do a user flow + await _finish_user_flow(hass) + + +@pytest.mark.usefixtures( + "get_active_dataset_tlvs", + "get_extended_address", +) +async def test_user_flow_additional_entry_fail_get_address( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test more than a single entry is allowed. + + This tets the behavior when we can't read the extended address from the existing + config entry. + """ + url1 = "http://custom_url:1234" + url2 = "http://custom_url_2:1234" + aioclient_mock.get(f"{url2}/node/ba-id", json=TEST_BORDER_AGENT_ID_2.hex()) + + mock_integration(hass, MockModule("hassio")) + + # Setup a config entry + config_entry = MockConfigEntry( + data={"url": url2}, + domain=otbr.DOMAIN, + options={}, + title="Open Thread Border Router", + unique_id=TEST_BORDER_AGENT_ID_2.hex(), + ) + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + + # Do a user flow + aioclient_mock.clear_requests() + aioclient_mock.get(f"{url1}/node/ba-id", json=TEST_BORDER_AGENT_ID.hex()) + aioclient_mock.get(f"{url2}/node/ba-id", status=HTTPStatus.NOT_FOUND) + await _finish_user_flow(hass) + assert f"Could not read border agent id from {url2}" in caplog.text + + +async def _finish_user_flow( + hass: HomeAssistant, url: str = "http://custom_url:1234" +) -> None: + """Finish a user flow.""" stripped_url = "http://custom_url:1234" - aioclient_mock.get(f"{stripped_url}/node/dataset/active", text="aa") result = await hass.config_entries.flow.async_init( otbr.DOMAIN, context={"source": "user"} ) @@ -88,13 +167,56 @@ async def test_user_flow( assert result["options"] == {} assert len(mock_setup_entry.mock_calls) == 1 - config_entry = hass.config_entries.async_entries(otbr.DOMAIN)[0] + config_entry = result["result"] assert config_entry.data == expected_data assert config_entry.options == {} assert config_entry.title == "Open Thread Border Router" - assert config_entry.unique_id == otbr.DOMAIN + assert config_entry.unique_id == TEST_BORDER_AGENT_ID.hex() +@pytest.mark.usefixtures( + "get_active_dataset_tlvs", + "get_border_agent_id", + "get_extended_address", +) +async def test_user_flow_additional_entry_same_address( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test more than a single entry is allowed.""" + mock_integration(hass, MockModule("hassio")) + + # Setup a config entry + config_entry = MockConfigEntry( + data={"url": "http://custom_url:1234"}, + domain=otbr.DOMAIN, + options={}, + title="Open Thread Border Router", + unique_id=TEST_BORDER_AGENT_ID.hex(), + ) + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + + # Start user flow + url = "http://custom_url:1234" + aioclient_mock.get(f"{url}/node/dataset/active", text="aa") + result = await hass.config_entries.flow.async_init( + otbr.DOMAIN, context={"source": "user"} + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "url": url, + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "already_configured"} + + +@pytest.mark.usefixtures("get_border_agent_id") async def test_user_flow_router_not_setup( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: @@ -158,10 +280,11 @@ async def test_user_flow_router_not_setup( assert config_entry.data == expected_data assert config_entry.options == {} assert config_entry.title == "Open Thread Border Router" - assert config_entry.unique_id == otbr.DOMAIN + assert config_entry.unique_id == TEST_BORDER_AGENT_ID.hex() -async def test_user_flow_404( +@pytest.mark.usefixtures("get_border_agent_id") +async def test_user_flow_get_dataset_404( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test the user flow.""" @@ -192,7 +315,30 @@ async def test_user_flow_404( aiohttp.ClientError, ], ) -async def test_user_flow_connect_error(hass: HomeAssistant, error) -> None: +async def test_user_flow_get_ba_id_connect_error( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, error +) -> None: + """Test the user flow.""" + await _test_user_flow_connect_error(hass, "get_border_agent_id", error) + + +@pytest.mark.usefixtures("get_border_agent_id") +@pytest.mark.parametrize( + "error", + [ + TimeoutError, + python_otbr_api.OTBRError, + aiohttp.ClientError, + ], +) +async def test_user_flow_get_dataset_connect_error( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, error +) -> None: + """Test the user flow.""" + await _test_user_flow_connect_error(hass, "get_active_dataset_tlvs", error) + + +async def _test_user_flow_connect_error(hass: HomeAssistant, func, error) -> None: """Test the user flow.""" result = await hass.config_entries.flow.async_init( otbr.DOMAIN, context={"source": "user"} @@ -201,7 +347,7 @@ async def test_user_flow_connect_error(hass: HomeAssistant, error) -> None: assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - with patch("python_otbr_api.OTBR.get_active_dataset_tlvs", side_effect=error): + with patch(f"python_otbr_api.OTBR.{func}", side_effect=error): result = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -212,6 +358,7 @@ async def test_user_flow_connect_error(hass: HomeAssistant, error) -> None: assert result["errors"] == {"base": "cannot_connect"} +@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, addon_info ) -> None: @@ -244,6 +391,7 @@ async def test_hassio_discovery_flow( assert config_entry.unique_id == HASSIO_DATA.uuid +@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow_yellow( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, addon_info ) -> None: @@ -301,6 +449,7 @@ async def test_hassio_discovery_flow_yellow( ), ], ) +@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow_sky_connect( device: str, title: str, @@ -346,6 +495,7 @@ async def test_hassio_discovery_flow_sky_connect( assert config_entry.unique_id == HASSIO_DATA.uuid +@pytest.mark.usefixtures("get_active_dataset_tlvs", "get_extended_address") async def test_hassio_discovery_flow_2x_addons( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, addon_info ) -> None: @@ -354,6 +504,8 @@ async def test_hassio_discovery_flow_2x_addons( url2 = "http://core-silabs-multiprotocol_2:8081" aioclient_mock.get(f"{url1}/node/dataset/active", text="aa") aioclient_mock.get(f"{url2}/node/dataset/active", text="bb") + aioclient_mock.get(f"{url1}/node/ba-id", json=TEST_BORDER_AGENT_ID.hex()) + aioclient_mock.get(f"{url2}/node/ba-id", json=TEST_BORDER_AGENT_ID_2.hex()) async def _addon_info(hass: HomeAssistant, slug: str) -> dict[str, Any]: await asyncio.sleep(0) @@ -387,18 +539,107 @@ async def test_hassio_discovery_flow_2x_addons( addon_info.side_effect = _addon_info - with patch( - "homeassistant.components.otbr.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - result1 = await hass.config_entries.flow.async_init( - otbr.DOMAIN, context={"source": "hassio"}, data=HASSIO_DATA - ) - result2 = await hass.config_entries.flow.async_init( - otbr.DOMAIN, context={"source": "hassio"}, data=HASSIO_DATA_2 - ) + result1 = await hass.config_entries.flow.async_init( + otbr.DOMAIN, context={"source": "hassio"}, data=HASSIO_DATA + ) + result2 = await hass.config_entries.flow.async_init( + otbr.DOMAIN, context={"source": "hassio"}, data=HASSIO_DATA_2 + ) - results = [result1, result2] + results = [result1, result2] + + expected_data = { + "url": f"http://{HASSIO_DATA.config['host']}:{HASSIO_DATA.config['port']}", + } + expected_data_2 = { + "url": f"http://{HASSIO_DATA_2.config['host']}:{HASSIO_DATA_2.config['port']}", + } + + assert results[0]["type"] is FlowResultType.CREATE_ENTRY + assert ( + results[0]["title"] == "Home Assistant SkyConnect (Silicon Labs Multiprotocol)" + ) + assert results[0]["data"] == expected_data + assert results[0]["options"] == {} + + assert results[1]["type"] is FlowResultType.CREATE_ENTRY + assert ( + results[1]["title"] == "Home Assistant SkyConnect (Silicon Labs Multiprotocol)" + ) + assert results[1]["data"] == expected_data_2 + assert results[1]["options"] == {} + + assert len(hass.config_entries.async_entries(otbr.DOMAIN)) == 2 + + config_entry = hass.config_entries.async_entries(otbr.DOMAIN)[0] + assert config_entry.data == expected_data + assert config_entry.options == {} + assert ( + config_entry.title == "Home Assistant SkyConnect (Silicon Labs Multiprotocol)" + ) + assert config_entry.unique_id == HASSIO_DATA.uuid + + config_entry = hass.config_entries.async_entries(otbr.DOMAIN)[1] + assert config_entry.data == expected_data_2 + assert config_entry.options == {} + assert ( + config_entry.title == "Home Assistant SkyConnect (Silicon Labs Multiprotocol)" + ) + assert config_entry.unique_id == HASSIO_DATA_2.uuid + + +@pytest.mark.usefixtures("get_active_dataset_tlvs", "get_extended_address") +async def test_hassio_discovery_flow_2x_addons_same_ext_address( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, addon_info +) -> None: + """Test the hassio discovery flow when the user has 2 addons with otbr support.""" + url1 = "http://core-silabs-multiprotocol:8081" + url2 = "http://core-silabs-multiprotocol_2:8081" + aioclient_mock.get(f"{url1}/node/dataset/active", text="aa") + aioclient_mock.get(f"{url2}/node/dataset/active", text="bb") + aioclient_mock.get(f"{url1}/node/ba-id", json=TEST_BORDER_AGENT_ID.hex()) + aioclient_mock.get(f"{url2}/node/ba-id", json=TEST_BORDER_AGENT_ID.hex()) + + async def _addon_info(hass: HomeAssistant, slug: str) -> dict[str, Any]: + await asyncio.sleep(0) + if slug == "otbr": + return { + "available": True, + "hostname": None, + "options": { + "device": ( + "/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_" + "9e2adbd75b8beb119fe564a0f320645d-if00-port0" + ) + }, + "state": None, + "update_available": False, + "version": None, + } + return { + "available": True, + "hostname": None, + "options": { + "device": ( + "/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_" + "9e2adbd75b8beb119fe564a0f320645d-if00-port1" + ) + }, + "state": None, + "update_available": False, + "version": None, + } + + addon_info.side_effect = _addon_info + + result1 = await hass.config_entries.flow.async_init( + otbr.DOMAIN, context={"source": "hassio"}, data=HASSIO_DATA + ) + result2 = await hass.config_entries.flow.async_init( + otbr.DOMAIN, context={"source": "hassio"}, data=HASSIO_DATA_2 + ) + + results = [result1, result2] expected_data = { "url": f"http://{HASSIO_DATA.config['host']}:{HASSIO_DATA.config['port']}", @@ -411,9 +652,8 @@ async def test_hassio_discovery_flow_2x_addons( assert results[0]["data"] == expected_data assert results[0]["options"] == {} assert results[1]["type"] is FlowResultType.ABORT - assert results[1]["reason"] == "single_instance_allowed" + assert results[1]["reason"] == "already_configured" assert len(hass.config_entries.async_entries(otbr.DOMAIN)) == 1 - assert len(mock_setup_entry.mock_calls) == 1 config_entry = hass.config_entries.async_entries(otbr.DOMAIN)[0] assert config_entry.data == expected_data @@ -424,6 +664,7 @@ async def test_hassio_discovery_flow_2x_addons( assert config_entry.unique_id == HASSIO_DATA.uuid +@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow_router_not_setup( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, addon_info ) -> None: @@ -481,6 +722,7 @@ async def test_hassio_discovery_flow_router_not_setup( assert config_entry.unique_id == HASSIO_DATA.uuid +@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow_router_not_setup_has_preferred( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, addon_info ) -> None: @@ -533,6 +775,7 @@ async def test_hassio_discovery_flow_router_not_setup_has_preferred( assert config_entry.unique_id == HASSIO_DATA.uuid +@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow_router_not_setup_has_preferred_2( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, @@ -596,6 +839,7 @@ async def test_hassio_discovery_flow_router_not_setup_has_preferred_2( assert config_entry.unique_id == HASSIO_DATA.uuid +@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow_404( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: @@ -610,6 +854,7 @@ async def test_hassio_discovery_flow_404( assert result["reason"] == "unknown" +@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow_new_port_missing_unique_id( hass: HomeAssistant, ) -> None: @@ -633,7 +878,7 @@ async def test_hassio_discovery_flow_new_port_missing_unique_id( ) assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "single_instance_allowed" + assert result["reason"] == "already_configured" expected_data = { "url": f"http://{HASSIO_DATA.config['host']}:{HASSIO_DATA.config['port']}", @@ -642,6 +887,7 @@ async def test_hassio_discovery_flow_new_port_missing_unique_id( assert config_entry.data == expected_data +@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow_new_port(hass: HomeAssistant) -> None: """Test the port can be updated.""" mock_integration(hass, MockModule("hassio")) @@ -664,7 +910,7 @@ async def test_hassio_discovery_flow_new_port(hass: HomeAssistant) -> None: ) assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "single_instance_allowed" + assert result["reason"] == "already_configured" expected_data = { "url": f"http://{HASSIO_DATA.config['host']}:{HASSIO_DATA.config['port']}", @@ -673,6 +919,12 @@ async def test_hassio_discovery_flow_new_port(hass: HomeAssistant) -> None: assert config_entry.data == expected_data +@pytest.mark.usefixtures( + "addon_info", + "get_active_dataset_tlvs", + "get_border_agent_id", + "get_extended_address", +) async def test_hassio_discovery_flow_new_port_other_addon(hass: HomeAssistant) -> None: """Test the port is not updated if we get data for another addon hosting OTBR.""" mock_integration(hass, MockModule("hassio")) @@ -691,22 +943,34 @@ async def test_hassio_discovery_flow_new_port_other_addon(hass: HomeAssistant) - otbr.DOMAIN, context={"source": "hassio"}, data=HASSIO_DATA ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "single_instance_allowed" + # Another entry will be created + assert result["type"] is FlowResultType.CREATE_ENTRY - # Make sure the data was not updated + # Make sure the data of the existing entry was not updated expected_data = { "url": f"http://openthread_border_router:{HASSIO_DATA.config['port']+1}", } - config_entry = hass.config_entries.async_entries(otbr.DOMAIN)[0] + config_entry = hass.config_entries.async_get_entry(config_entry.entry_id) assert config_entry.data == expected_data -@pytest.mark.parametrize(("source", "data"), [("hassio", HASSIO_DATA), ("user", None)]) -async def test_config_flow_single_entry( - hass: HomeAssistant, source: str, data: Any +@pytest.mark.parametrize( + ("source", "data", "expected_result"), + [ + ("hassio", HASSIO_DATA, FlowResultType.CREATE_ENTRY), + ("user", None, FlowResultType.FORM), + ], +) +@pytest.mark.usefixtures( + "addon_info", + "get_active_dataset_tlvs", + "get_border_agent_id", + "get_extended_address", +) +async def test_config_flow_additional_entry( + hass: HomeAssistant, source: str, data: Any, expected_result: FlowResultType ) -> None: - """Test only a single entry is allowed.""" + """Test more than a single entry is allowed.""" mock_integration(hass, MockModule("hassio")) # Setup the config entry @@ -719,13 +983,11 @@ async def test_config_flow_single_entry( config_entry.add_to_hass(hass) with patch( - "homeassistant.components.homeassistant_yellow.async_setup_entry", + "homeassistant.components.otbr.async_setup_entry", return_value=True, - ) as mock_setup_entry: + ): result = await hass.config_entries.flow.async_init( otbr.DOMAIN, context={"source": source}, data=data ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "single_instance_allowed" - mock_setup_entry.assert_not_called() + assert result["type"] is expected_result diff --git a/tests/components/otbr/test_init.py b/tests/components/otbr/test_init.py index 86bab71cbda..ca1cbd6483b 100644 --- a/tests/components/otbr/test_init.py +++ b/tests/components/otbr/test_init.py @@ -11,6 +11,7 @@ from zeroconf.asyncio import AsyncServiceInfo from homeassistant.components import otbr, thread from homeassistant.components.thread import discovery +from homeassistant.config_entries import SOURCE_HASSIO, SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component @@ -18,7 +19,6 @@ from homeassistant.setup import async_setup_component from . import ( BASE_URL, CONFIG_ENTRY_DATA_MULTIPAN, - CONFIG_ENTRY_DATA_THREAD, DATASET_CH15, DATASET_CH16, DATASET_INSECURE_NW_KEY, @@ -71,6 +71,7 @@ async def test_import_dataset( domain=otbr.DOMAIN, options={}, title="My OTBR", + unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) @@ -138,6 +139,7 @@ async def test_import_share_radio_channel_collision( domain=otbr.DOMAIN, options={}, title="My OTBR", + unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) with ( @@ -177,6 +179,7 @@ async def test_import_share_radio_no_channel_collision( domain=otbr.DOMAIN, options={}, title="My OTBR", + unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) with ( @@ -214,6 +217,7 @@ async def test_import_insecure_dataset( domain=otbr.DOMAIN, options={}, title="My OTBR", + unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) with ( @@ -252,6 +256,7 @@ async def test_config_entry_not_ready( domain=otbr.DOMAIN, options={}, title="My OTBR", + unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) get_active_dataset_tlvs.side_effect = error @@ -268,6 +273,7 @@ async def test_border_agent_id_not_supported( domain=otbr.DOMAIN, options={}, title="My OTBR", + unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) get_border_agent_id.side_effect = python_otbr_api.GetBorderAgentIdNotSupportedError @@ -281,6 +287,7 @@ async def test_config_entry_update(hass: HomeAssistant) -> None: domain=otbr.DOMAIN, options={}, title="My OTBR", + unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) mock_api = MagicMock() @@ -314,25 +321,33 @@ async def test_remove_entry( await hass.config_entries.async_remove(config_entry.entry_id) -async def test_remove_extra_entries( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +@pytest.mark.parametrize( + ("source", "unique_id", "updated_unique_id"), + [ + (SOURCE_HASSIO, None, None), + (SOURCE_HASSIO, "abcd", "abcd"), + (SOURCE_USER, None, TEST_BORDER_AGENT_ID.hex()), + (SOURCE_USER, "abcd", TEST_BORDER_AGENT_ID.hex()), + ], +) +async def test_update_unique_id( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + source: str, + unique_id: str | None, + updated_unique_id: str | None, ) -> None: - """Test we remove additional config entries.""" + """Test we update the unique id if extended address has changed.""" - config_entry1 = MockConfigEntry( + config_entry = MockConfigEntry( data=CONFIG_ENTRY_DATA_MULTIPAN, domain=otbr.DOMAIN, options={}, + source=source, title="Open Thread Border Router", + unique_id=unique_id, ) - config_entry2 = MockConfigEntry( - data=CONFIG_ENTRY_DATA_THREAD, - domain=otbr.DOMAIN, - options={}, - title="Open Thread Border Router", - ) - config_entry1.add_to_hass(hass) - config_entry2.add_to_hass(hass) - assert len(hass.config_entries.async_entries(otbr.DOMAIN)) == 2 + config_entry.add_to_hass(hass) assert await async_setup_component(hass, otbr.DOMAIN, {}) - assert len(hass.config_entries.async_entries(otbr.DOMAIN)) == 1 + config_entry = hass.config_entries.async_get_entry(config_entry.entry_id) + assert config_entry.unique_id == updated_unique_id diff --git a/tests/components/otbr/test_silabs_multiprotocol.py b/tests/components/otbr/test_silabs_multiprotocol.py index e842f40ad4c..01b1ab63f56 100644 --- a/tests/components/otbr/test_silabs_multiprotocol.py +++ b/tests/components/otbr/test_silabs_multiprotocol.py @@ -5,7 +5,6 @@ from unittest.mock import patch import pytest from python_otbr_api import ActiveDataSet, tlv_parser -from homeassistant.components import otbr from homeassistant.components.otbr import ( silabs_multiprotocol as otbr_silabs_multiprotocol, ) @@ -127,10 +126,11 @@ async def test_async_change_channel_no_otbr(hass: HomeAssistant) -> None: async def test_async_change_channel_non_matching_url( - hass: HomeAssistant, otbr_config_entry_multipan + hass: HomeAssistant, otbr_config_entry_multipan: str ) -> None: """Test async_change_channel when otbr is not configured.""" - hass.data[otbr.DATA_OTBR].url = OTBR_NON_MULTIPAN_URL + config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) + config_entry.runtime_data.url = OTBR_NON_MULTIPAN_URL with patch("python_otbr_api.OTBR.set_channel") as mock_set_channel: await otbr_silabs_multiprotocol.async_change_channel(hass, 16, delay=0) mock_set_channel.assert_not_awaited() @@ -184,10 +184,11 @@ async def test_async_get_channel_no_otbr(hass: HomeAssistant) -> None: async def test_async_get_channel_non_matching_url( - hass: HomeAssistant, otbr_config_entry_multipan + hass: HomeAssistant, otbr_config_entry_multipan: str ) -> None: """Test async_change_channel when otbr is not configured.""" - hass.data[otbr.DATA_OTBR].url = OTBR_NON_MULTIPAN_URL + config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) + config_entry.runtime_data.url = OTBR_NON_MULTIPAN_URL with patch("python_otbr_api.OTBR.get_active_dataset") as mock_get_active_dataset: assert await otbr_silabs_multiprotocol.async_get_channel(hass) is None mock_get_active_dataset.assert_not_awaited() @@ -198,10 +199,11 @@ async def test_async_get_channel_non_matching_url( [(OTBR_MULTIPAN_URL, True), (OTBR_NON_MULTIPAN_URL, False)], ) async def test_async_using_multipan( - hass: HomeAssistant, otbr_config_entry_multipan, url: str, expected: bool + hass: HomeAssistant, otbr_config_entry_multipan: str, url: str, expected: bool ) -> None: """Test async_change_channel when otbr is not configured.""" - hass.data[otbr.DATA_OTBR].url = url + config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) + config_entry.runtime_data.url = url assert await otbr_silabs_multiprotocol.async_using_multipan(hass) is expected @@ -213,8 +215,9 @@ async def test_async_using_multipan_no_otbr(hass: HomeAssistant) -> None: async def test_async_using_multipan_non_matching_url( - hass: HomeAssistant, otbr_config_entry_multipan + hass: HomeAssistant, otbr_config_entry_multipan: str ) -> None: """Test async_change_channel when otbr is not configured.""" - hass.data[otbr.DATA_OTBR].url = OTBR_NON_MULTIPAN_URL + config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) + config_entry.runtime_data.url = OTBR_NON_MULTIPAN_URL assert await otbr_silabs_multiprotocol.async_using_multipan(hass) is False diff --git a/tests/components/otbr/test_util.py b/tests/components/otbr/test_util.py index ec325b8819e..0ed3041bea8 100644 --- a/tests/components/otbr/test_util.py +++ b/tests/components/otbr/test_util.py @@ -1,6 +1,6 @@ """Test OTBR Utility functions.""" -from unittest.mock import patch +from unittest.mock import AsyncMock, patch import pytest import python_otbr_api @@ -31,24 +31,37 @@ async def test_get_allowed_channel( assert await otbr.util.get_allowed_channel(hass, OTBR_NON_MULTIPAN_URL) is None -async def test_factory_reset(hass: HomeAssistant, otbr_config_entry_multipan) -> None: +async def test_factory_reset( + hass: HomeAssistant, + otbr_config_entry_multipan: str, + get_border_agent_id: AsyncMock, +) -> None: """Test factory_reset.""" + new_ba_id = b"new_ba_id" + get_border_agent_id.return_value = new_ba_id + config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) + assert config_entry.unique_id != new_ba_id.hex() with ( patch("python_otbr_api.OTBR.factory_reset") as factory_reset_mock, patch( "python_otbr_api.OTBR.delete_active_dataset" ) as delete_active_dataset_mock, ): - await hass.data[otbr.DATA_OTBR].factory_reset() + await config_entry.runtime_data.factory_reset(hass) delete_active_dataset_mock.assert_not_called() factory_reset_mock.assert_called_once_with() + # Check the unique_id is updated + config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) + assert config_entry.unique_id == new_ba_id.hex() + async def test_factory_reset_not_supported( - hass: HomeAssistant, otbr_config_entry_multipan + hass: HomeAssistant, otbr_config_entry_multipan: str ) -> None: """Test factory_reset.""" + config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) with ( patch( "python_otbr_api.OTBR.factory_reset", @@ -58,16 +71,17 @@ async def test_factory_reset_not_supported( "python_otbr_api.OTBR.delete_active_dataset" ) as delete_active_dataset_mock, ): - await hass.data[otbr.DATA_OTBR].factory_reset() + await config_entry.runtime_data.factory_reset(hass) delete_active_dataset_mock.assert_called_once_with() factory_reset_mock.assert_called_once_with() async def test_factory_reset_error_1( - hass: HomeAssistant, otbr_config_entry_multipan + hass: HomeAssistant, otbr_config_entry_multipan: str ) -> None: """Test factory_reset.""" + config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) with ( patch( "python_otbr_api.OTBR.factory_reset", @@ -80,16 +94,17 @@ async def test_factory_reset_error_1( HomeAssistantError, ), ): - await hass.data[otbr.DATA_OTBR].factory_reset() + await config_entry.runtime_data.factory_reset(hass) delete_active_dataset_mock.assert_not_called() factory_reset_mock.assert_called_once_with() async def test_factory_reset_error_2( - hass: HomeAssistant, otbr_config_entry_multipan + hass: HomeAssistant, otbr_config_entry_multipan: str ) -> None: """Test factory_reset.""" + config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) with ( patch( "python_otbr_api.OTBR.factory_reset", @@ -103,7 +118,7 @@ async def test_factory_reset_error_2( HomeAssistantError, ), ): - await hass.data[otbr.DATA_OTBR].factory_reset() + await config_entry.runtime_data.factory_reset(hass) delete_active_dataset_mock.assert_called_once_with() factory_reset_mock.assert_called_once_with() From ea04269c49f4711813a9328022721cf4e22f89ab Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Tue, 27 Aug 2024 19:48:39 +0200 Subject: [PATCH 1117/1635] Improve test coverage for nextcloud (#123148) * add first data driven tests * remove unused mock * test unique_id migration * test errors during setup * test error during data update * test update entity * system_versionis always available * make use of snapshot_platform helper * use parametrize test for coordinator update errors * apply suggestions * don't touch internals on coordinator tests * rework to use async_get_or_create instead of mock_registry --- homeassistant/components/nextcloud/update.py | 12 +- tests/components/nextcloud/__init__.py | 37 + tests/components/nextcloud/conftest.py | 10 +- tests/components/nextcloud/const.py | 182 + .../snapshots/test_binary_sensor.ambr | 277 ++ .../nextcloud/snapshots/test_config_flow.ambr | 4 +- .../nextcloud/snapshots/test_sensor.ambr | 3973 +++++++++++++++++ .../nextcloud/snapshots/test_update.ambr | 57 + .../nextcloud/test_binary_sensor.py | 33 + .../components/nextcloud/test_config_flow.py | 35 +- .../components/nextcloud/test_coordinator.py | 69 + tests/components/nextcloud/test_init.py | 95 + tests/components/nextcloud/test_sensor.py | 31 + tests/components/nextcloud/test_update.py | 80 + 14 files changed, 4855 insertions(+), 40 deletions(-) create mode 100644 tests/components/nextcloud/const.py create mode 100644 tests/components/nextcloud/snapshots/test_binary_sensor.ambr create mode 100644 tests/components/nextcloud/snapshots/test_sensor.ambr create mode 100644 tests/components/nextcloud/snapshots/test_update.ambr create mode 100644 tests/components/nextcloud/test_binary_sensor.py create mode 100644 tests/components/nextcloud/test_coordinator.py create mode 100644 tests/components/nextcloud/test_init.py create mode 100644 tests/components/nextcloud/test_sensor.py create mode 100644 tests/components/nextcloud/test_update.py diff --git a/homeassistant/components/nextcloud/update.py b/homeassistant/components/nextcloud/update.py index 8c292e1bba2..5b9de52ad1d 100644 --- a/homeassistant/components/nextcloud/update.py +++ b/homeassistant/components/nextcloud/update.py @@ -32,12 +32,12 @@ class NextcloudUpdateSensor(NextcloudEntity, UpdateEntity): """Represents a Nextcloud update entity.""" @property - def installed_version(self) -> str | None: + def installed_version(self) -> str: """Version installed and in use.""" - return self.coordinator.data.get("system_version") + return self.coordinator.data["system_version"] @property - def latest_version(self) -> str | None: + def latest_version(self) -> str: """Latest version available for install.""" return self.coordinator.data.get( "update_available_version", self.installed_version @@ -46,7 +46,5 @@ class NextcloudUpdateSensor(NextcloudEntity, UpdateEntity): @property def release_url(self) -> str | None: """URL to the full release notes of the latest version available.""" - if self.latest_version: - ver = "-".join(self.latest_version.split(".")[:3]) - return f"https://nextcloud.com/changelog/#{ver}" - return None + ver = "-".join(self.latest_version.split(".")[:3]) + return f"https://nextcloud.com/changelog/#{ver}" diff --git a/tests/components/nextcloud/__init__.py b/tests/components/nextcloud/__init__.py index e2102ed8c25..4bc5a041650 100644 --- a/tests/components/nextcloud/__init__.py +++ b/tests/components/nextcloud/__init__.py @@ -1 +1,38 @@ """Tests for the Nextcloud integration.""" + +from unittest.mock import Mock, patch + +from homeassistant.components.nextcloud.const import DOMAIN +from homeassistant.const import CONF_URL +from homeassistant.core import HomeAssistant + +from .const import MOCKED_ENTRY_ID + +from tests.common import MockConfigEntry + + +def mock_config_entry(config: dict) -> MockConfigEntry: + """Return a mocked config entry.""" + return MockConfigEntry( + domain=DOMAIN, title=config[CONF_URL], data=config, entry_id=MOCKED_ENTRY_ID + ) + + +async def init_integration( + hass: HomeAssistant, config: dict, data: dict +) -> MockConfigEntry: + """Set up the nextcloud integration.""" + entry = mock_config_entry(config) + entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.nextcloud.NextcloudMonitor", + ) as mock_nextcloud_monitor, + ): + mock_nextcloud_monitor.update = Mock(return_value=True) + mock_nextcloud_monitor.return_value.data = data + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + return entry diff --git a/tests/components/nextcloud/conftest.py b/tests/components/nextcloud/conftest.py index cf3eda55fe1..3234e3773b8 100644 --- a/tests/components/nextcloud/conftest.py +++ b/tests/components/nextcloud/conftest.py @@ -1,19 +1,11 @@ """Fixtrues for the Nextcloud integration tests.""" from collections.abc import Generator -from unittest.mock import AsyncMock, Mock, patch +from unittest.mock import AsyncMock, patch import pytest -@pytest.fixture -def mock_nextcloud_monitor() -> Mock: - """Mock of NextcloudMonitor.""" - return Mock( - update=Mock(return_value=True), - ) - - @pytest.fixture def mock_setup_entry() -> Generator[AsyncMock]: """Override async_setup_entry.""" diff --git a/tests/components/nextcloud/const.py b/tests/components/nextcloud/const.py new file mode 100644 index 00000000000..2d328292b6f --- /dev/null +++ b/tests/components/nextcloud/const.py @@ -0,0 +1,182 @@ +"""Constants for nextcloud tests.""" + +from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME, CONF_VERIFY_SSL + +MOCKED_ENTRY_ID = "1234567890abcdef" + +VALID_CONFIG = { + CONF_URL: "https://my.nc_url.local", + CONF_USERNAME: "nc_user", + CONF_PASSWORD: "nc_pass", + CONF_VERIFY_SSL: True, +} + +NC_DATA = { + "nextcloud": { + "system": { + "version": "28.0.4.1", + "theme": "", + "enable_avatars": "yes", + "enable_previews": "yes", + "memcache.local": "\\OC\\Memcache\\APCu", + "memcache.distributed": "none", + "filelocking.enabled": "yes", + "memcache.locking": "none", + "debug": "no", + "freespace": 32769138688, + "cpuload": [2.06640625, 1.58447265625, 1.45263671875], + "mem_total": 30728192, + "mem_free": 6753280, + "swap_total": 10484736, + "swap_free": 10484736, + "apps": { + "num_installed": 41, + "num_updates_available": 0, + "app_updates": [], + }, + "update": {"lastupdatedat": 1713048517, "available": False}, + }, + "storage": { + "num_users": 2, + "num_files": 6783, + "num_storages": 4, + "num_storages_local": 1, + "num_storages_home": 2, + "num_storages_other": 1, + }, + "shares": { + "num_shares": 2, + "num_shares_user": 0, + "num_shares_groups": 0, + "num_shares_link": 2, + "num_shares_mail": 0, + "num_shares_room": 0, + "num_shares_link_no_password": 2, + "num_fed_shares_sent": 0, + "num_fed_shares_received": 1, + "permissions_3_17": 1, + "permissions_3_31": 1, + }, + }, + "server": { + "webserver": "Apache/2.4.57 (Debian)", + "php": { + "version": "8.2.18", + "memory_limit": 536870912, + "max_execution_time": 3600, + "upload_max_filesize": 536870912, + "opcache_revalidate_freq": 60, + "opcache": { + "opcache_enabled": True, + "cache_full": False, + "restart_pending": False, + "restart_in_progress": False, + "memory_usage": { + "used_memory": 72027112, + "free_memory": 62190616, + "wasted_memory": 0, + "current_wasted_percentage": 0, + }, + "interned_strings_usage": { + "buffer_size": 33554432, + "used_memory": 12630360, + "free_memory": 20924072, + "number_of_strings": 69242, + }, + "opcache_statistics": { + "num_cached_scripts": 1406, + "num_cached_keys": 2654, + "max_cached_keys": 16229, + "hits": 9739971, + "start_time": 1722222008, + "last_restart_time": 0, + "oom_restarts": 0, + "hash_restarts": 0, + "manual_restarts": 0, + "misses": 1406, + "blacklist_misses": 0, + "blacklist_miss_ratio": 0, + "opcache_hit_rate": 99.9855667222406, + }, + "jit": { + "enabled": True, + "on": True, + "kind": 5, + "opt_level": 5, + "opt_flags": 6, + "buffer_size": 134217712, + "buffer_free": 133190688, + }, + }, + "apcu": { + "cache": { + "num_slots": 4099, + "ttl": 0, + "num_hits": 590911, + "num_misses": 55250, + "num_inserts": 55421, + "num_entries": 102, + "expunges": 0, + "start_time": 1722222008, + "mem_size": 175296, + "memory_type": "mmap", + }, + "sma": {"num_seg": 1, "seg_size": 33554312, "avail_mem": 33342368}, + }, + "extensions": [ + "Core", + "date", + "libxml", + "openssl", + "pcre", + "sqlite3", + "zlib", + "ctype", + "curl", + "dom", + "fileinfo", + "filter", + "hash", + "iconv", + "json", + "mbstring", + "SPL", + "session", + "PDO", + "pdo_sqlite", + "standard", + "posix", + "random", + "Reflection", + "Phar", + "SimpleXML", + "tokenizer", + "xml", + "xmlreader", + "xmlwriter", + "mysqlnd", + "apache2handler", + "apcu", + "bcmath", + "exif", + "ftp", + "gd", + "gmp", + "imagick", + "intl", + "ldap", + "memcached", + "pcntl", + "pdo_mysql", + "pdo_pgsql", + "redis", + "sodium", + "sysvsem", + "zip", + "Zend OPcache", + ], + }, + "database": {"type": "sqlite3", "version": "3.40.1", "size": "4784128"}, + }, + "activeUsers": {"last5minutes": 0, "last1hour": 0, "last24hours": 0}, +} diff --git a/tests/components/nextcloud/snapshots/test_binary_sensor.ambr b/tests/components/nextcloud/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..1831419af52 --- /dev/null +++ b/tests/components/nextcloud/snapshots/test_binary_sensor.ambr @@ -0,0 +1,277 @@ +# serializer version: 1 +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_avatars_enabled-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.my_nc_url_local_avatars_enabled', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Avatars enabled', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_enable_avatars', + 'unique_id': '1234567890abcdef#system_enable_avatars', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_avatars_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Avatars enabled', + }), + 'context': , + 'entity_id': 'binary_sensor.my_nc_url_local_avatars_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_debug_enabled-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.my_nc_url_local_debug_enabled', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Debug enabled', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_debug', + 'unique_id': '1234567890abcdef#system_debug', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_debug_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Debug enabled', + }), + 'context': , + 'entity_id': 'binary_sensor.my_nc_url_local_debug_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_filelocking_enabled-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.my_nc_url_local_filelocking_enabled', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Filelocking enabled', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_filelocking_enabled', + 'unique_id': '1234567890abcdef#system_filelocking.enabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_filelocking_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Filelocking enabled', + }), + 'context': , + 'entity_id': 'binary_sensor.my_nc_url_local_filelocking_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_jit_active-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.my_nc_url_local_jit_active', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'JIT active', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_jit_on', + 'unique_id': '1234567890abcdef#jit_on', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_jit_active-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local JIT active', + }), + 'context': , + 'entity_id': 'binary_sensor.my_nc_url_local_jit_active', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_jit_enabled-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.my_nc_url_local_jit_enabled', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'JIT enabled', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_jit_enabled', + 'unique_id': '1234567890abcdef#jit_enabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_jit_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local JIT enabled', + }), + 'context': , + 'entity_id': 'binary_sensor.my_nc_url_local_jit_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_previews_enabled-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.my_nc_url_local_previews_enabled', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Previews enabled', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_enable_previews', + 'unique_id': '1234567890abcdef#system_enable_previews', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_previews_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Previews enabled', + }), + 'context': , + 'entity_id': 'binary_sensor.my_nc_url_local_previews_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/nextcloud/snapshots/test_config_flow.ambr b/tests/components/nextcloud/snapshots/test_config_flow.ambr index 06c4ce216db..e87db0a25c0 100644 --- a/tests/components/nextcloud/snapshots/test_config_flow.ambr +++ b/tests/components/nextcloud/snapshots/test_config_flow.ambr @@ -2,7 +2,7 @@ # name: test_reauth dict({ 'password': 'other_password', - 'url': 'nc_url', + 'url': 'https://my.nc_url.local', 'username': 'other_user', 'verify_ssl': True, }) @@ -10,7 +10,7 @@ # name: test_user_create_entry dict({ 'password': 'nc_pass', - 'url': 'nc_url', + 'url': 'https://my.nc_url.local', 'username': 'nc_user', 'verify_ssl': True, }) diff --git a/tests/components/nextcloud/snapshots/test_sensor.ambr b/tests/components/nextcloud/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..c49ba3496da --- /dev/null +++ b/tests/components/nextcloud/snapshots/test_sensor.ambr @@ -0,0 +1,3973 @@ +# serializer version: 1 +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_active_users_last_5_minutes-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_active_users_last_5_minutes', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of active users last 5 minutes', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_activeusers_last5minutes', + 'unique_id': '1234567890abcdef#activeUsers_last5minutes', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_active_users_last_5_minutes-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of active users last 5 minutes', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_active_users_last_5_minutes', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_active_users_last_day-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_active_users_last_day', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of active users last day', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_activeusers_last24hours', + 'unique_id': '1234567890abcdef#activeUsers_last24hours', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_active_users_last_day-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of active users last day', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_active_users_last_day', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_active_users_last_hour-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_active_users_last_hour', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of active users last hour', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_activeusers_last1hour', + 'unique_id': '1234567890abcdef#activeUsers_last1hour', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_active_users_last_hour-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of active users last hour', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_active_users_last_hour', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_files-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_amount_of_files', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of files', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_storage_num_files', + 'unique_id': '1234567890abcdef#storage_num_files', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_files-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of files', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_files', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6783', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_group_shares-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_group_shares', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of group shares', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_shares_num_shares_groups', + 'unique_id': '1234567890abcdef#shares_num_shares_groups', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_group_shares-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of group shares', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_group_shares', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_link_shares-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_link_shares', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of link shares', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_shares_num_shares_link', + 'unique_id': '1234567890abcdef#shares_num_shares_link', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_link_shares-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of link shares', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_link_shares', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_local_storages-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_local_storages', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of local storages', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_storage_num_storages_local', + 'unique_id': '1234567890abcdef#storage_num_storages_local', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_local_storages-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of local storages', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_local_storages', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_mail_shares-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_mail_shares', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of mail shares', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_shares_num_shares_mail', + 'unique_id': '1234567890abcdef#shares_num_shares_mail', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_mail_shares-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of mail shares', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_mail_shares', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_other_storages-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_other_storages', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of other storages', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_storage_num_storages_other', + 'unique_id': '1234567890abcdef#storage_num_storages_other', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_other_storages-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of other storages', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_other_storages', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_passwordless_link_shares-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_passwordless_link_shares', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of passwordless link shares', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_shares_num_shares_link_no_password', + 'unique_id': '1234567890abcdef#shares_num_shares_link_no_password', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_passwordless_link_shares-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of passwordless link shares', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_passwordless_link_shares', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_room_shares-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_room_shares', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of room shares', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_shares_num_shares_room', + 'unique_id': '1234567890abcdef#shares_num_shares_room', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_room_shares-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of room shares', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_room_shares', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_shares-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_amount_of_shares', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of shares', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_shares_num_shares', + 'unique_id': '1234567890abcdef#shares_num_shares', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_shares-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of shares', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_shares', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_shares_received-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_shares_received', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of shares received', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_shares_num_fed_shares_received', + 'unique_id': '1234567890abcdef#shares_num_fed_shares_received', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_shares_received-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of shares received', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_shares_received', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_shares_sent-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_shares_sent', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of shares sent', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_shares_num_fed_shares_sent', + 'unique_id': '1234567890abcdef#shares_num_fed_shares_sent', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_shares_sent-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of shares sent', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_shares_sent', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_storages-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_amount_of_storages', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of storages', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_storage_num_storages', + 'unique_id': '1234567890abcdef#storage_num_storages', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_storages-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of storages', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_storages', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_storages_at_home-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_storages_at_home', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of storages at home', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_storage_num_storages_home', + 'unique_id': '1234567890abcdef#storage_num_storages_home', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_storages_at_home-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of storages at home', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_storages_at_home', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_user-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_amount_of_user', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of user', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_storage_num_users', + 'unique_id': '1234567890abcdef#storage_num_users', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_user-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of user', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_user', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_user_shares-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_user_shares', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of user shares', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_shares_num_shares_user', + 'unique_id': '1234567890abcdef#shares_num_shares_user', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_user_shares-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of user shares', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_user_shares', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_apps_installed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_apps_installed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Apps installed', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_apps_num_installed', + 'unique_id': '1234567890abcdef#system_apps_num_installed', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_apps_installed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Apps installed', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_apps_installed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '41', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_expunges-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_cache_expunges', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache expunges', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_cache_expunges', + 'unique_id': '1234567890abcdef#cache_expunges', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_expunges-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Cache expunges', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cache_expunges', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_memory-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_cache_memory', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache memory', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_cache_memory_type', + 'unique_id': '1234567890abcdef#cache_memory_type', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_memory-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Cache memory', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cache_memory', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'mmap', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_memory_size-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_cache_memory_size', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cache memory size', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_cache_mem_size', + 'unique_id': '1234567890abcdef#cache_mem_size', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_memory_size-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Cache memory size', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cache_memory_size', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.175296', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_entires-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_cache_number_of_entires', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache number of entires', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_cache_num_entries', + 'unique_id': '1234567890abcdef#cache_num_entries', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_entires-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Cache number of entires', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cache_number_of_entires', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '102', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_hits-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_cache_number_of_hits', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache number of hits', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_cache_num_hits', + 'unique_id': '1234567890abcdef#cache_num_hits', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_hits-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Cache number of hits', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cache_number_of_hits', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '590911', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_inserts-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_cache_number_of_inserts', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache number of inserts', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_cache_num_inserts', + 'unique_id': '1234567890abcdef#cache_num_inserts', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_inserts-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Cache number of inserts', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cache_number_of_inserts', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '55421', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_misses-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_cache_number_of_misses', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache number of misses', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_cache_num_misses', + 'unique_id': '1234567890abcdef#cache_num_misses', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_misses-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Cache number of misses', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cache_number_of_misses', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '55250', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_slots-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_cache_number_of_slots', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache number of slots', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_cache_num_slots', + 'unique_id': '1234567890abcdef#cache_num_slots', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_slots-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Cache number of slots', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cache_number_of_slots', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4099', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_start_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_cache_start_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cache start time', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_cache_start_time', + 'unique_id': '1234567890abcdef#cache_start_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_start_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'my.nc_url.local Cache start time', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cache_start_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-07-29T03:00:08+00:00', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_ttl-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_cache_ttl', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache ttl', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_cache_ttl', + 'unique_id': '1234567890abcdef#cache_ttl', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_ttl-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Cache ttl', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cache_ttl', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cpu_load_last_15_minutes-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_cpu_load_last_15_minutes', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CPU Load last 15 minutes', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_cpuload_15', + 'unique_id': '1234567890abcdef#system_cpuload_15', + 'unit_of_measurement': 'load', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cpu_load_last_15_minutes-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local CPU Load last 15 minutes', + 'unit_of_measurement': 'load', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cpu_load_last_15_minutes', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.45263671875', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cpu_load_last_1_minute-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_cpu_load_last_1_minute', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CPU Load last 1 minute', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_cpuload_1', + 'unique_id': '1234567890abcdef#system_cpuload_1', + 'unit_of_measurement': 'load', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cpu_load_last_1_minute-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local CPU Load last 1 minute', + 'unit_of_measurement': 'load', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cpu_load_last_1_minute', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.06640625', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cpu_load_last_5_minutes-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_cpu_load_last_5_minutes', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CPU Load last 5 minutes', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_cpuload_5', + 'unique_id': '1234567890abcdef#system_cpuload_5', + 'unit_of_measurement': 'load', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cpu_load_last_5_minutes-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local CPU Load last 5 minutes', + 'unit_of_measurement': 'load', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cpu_load_last_5_minutes', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.58447265625', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_database_size-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_database_size', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Database size', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_database_size', + 'unique_id': '1234567890abcdef#database_size', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_database_size-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Database size', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_database_size', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4.784128', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_database_type-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_database_type', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Database type', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_database_type', + 'unique_id': '1234567890abcdef#database_type', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_database_type-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Database type', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_database_type', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'sqlite3', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_database_version-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_database_version', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Database version', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_database_version', + 'unique_id': '1234567890abcdef#database_version', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_database_version-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Database version', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_database_version', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.40.1', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_free_memory-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_free_memory', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Free memory', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_mem_free', + 'unique_id': '1234567890abcdef#system_mem_free', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_free_memory-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Free memory', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_free_memory', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6.75328', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_free_space-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_free_space', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Free space', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_freespace', + 'unique_id': '1234567890abcdef#system_freespace', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_free_space-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Free space', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_free_space', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '32.769138688', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_free_swap_memory-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_free_swap_memory', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Free swap memory', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_swap_free', + 'unique_id': '1234567890abcdef#system_swap_free', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_free_swap_memory-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Free swap memory', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_free_swap_memory', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10.484736', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_interned_buffer_size-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_interned_buffer_size', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Interned buffer size', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_interned_strings_usage_buffer_size', + 'unique_id': '1234567890abcdef#interned_strings_usage_buffer_size', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_interned_buffer_size-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Interned buffer size', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_interned_buffer_size', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '33.554432', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_interned_free_memory-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_interned_free_memory', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Interned free memory', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_interned_strings_usage_free_memory', + 'unique_id': '1234567890abcdef#interned_strings_usage_free_memory', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_interned_free_memory-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Interned free memory', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_interned_free_memory', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20.924072', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_interned_number_of_strings-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_interned_number_of_strings', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Interned number of strings', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_interned_strings_usage_number_of_strings', + 'unique_id': '1234567890abcdef#interned_strings_usage_number_of_strings', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_interned_number_of_strings-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Interned number of strings', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_interned_number_of_strings', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '69242', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_interned_used_memory-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_interned_used_memory', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Interned used memory', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_interned_strings_usage_used_memory', + 'unique_id': '1234567890abcdef#interned_strings_usage_used_memory', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_interned_used_memory-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Interned used memory', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_interned_used_memory', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '12.63036', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_jit_buffer_free-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_jit_buffer_free', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'JIT buffer free', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_jit_buffer_free', + 'unique_id': '1234567890abcdef#jit_buffer_free', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_jit_buffer_free-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local JIT buffer free', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_jit_buffer_free', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '133.190688', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_jit_buffer_size-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_jit_buffer_size', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'JIT buffer size', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_jit_buffer_size', + 'unique_id': '1234567890abcdef#jit_buffer_size', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_jit_buffer_size-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local JIT buffer size', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_jit_buffer_size', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '134.217712', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_jit_kind-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_jit_kind', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'JIT kind', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_jit_kind', + 'unique_id': '1234567890abcdef#jit_kind', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_jit_kind-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local JIT kind', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_jit_kind', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_jit_opt_flags-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_jit_opt_flags', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'JIT opt flags', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_jit_opt_flags', + 'unique_id': '1234567890abcdef#jit_opt_flags', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_jit_opt_flags-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local JIT opt flags', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_jit_opt_flags', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_jit_opt_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_jit_opt_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'JIT opt level', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_jit_opt_level', + 'unique_id': '1234567890abcdef#jit_opt_level', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_jit_opt_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local JIT opt level', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_jit_opt_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_blacklist_miss_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_blacklist_miss_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache blacklist miss ratio', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_blacklist_miss_ratio', + 'unique_id': '1234567890abcdef#opcache_statistics_blacklist_miss_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_blacklist_miss_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache blacklist miss ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_blacklist_miss_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_blacklist_misses-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_blacklist_misses', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache blacklist misses', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_blacklist_misses', + 'unique_id': '1234567890abcdef#opcache_statistics_blacklist_misses', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_blacklist_misses-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache blacklist misses', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_blacklist_misses', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_cached_keys-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_cached_keys', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache cached keys', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_num_cached_keys', + 'unique_id': '1234567890abcdef#opcache_statistics_num_cached_keys', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_cached_keys-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache cached keys', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_cached_keys', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2654', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_cached_scripts-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_cached_scripts', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache cached scripts', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_num_cached_scripts', + 'unique_id': '1234567890abcdef#opcache_statistics_num_cached_scripts', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_cached_scripts-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache cached scripts', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_cached_scripts', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1406', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_current_wasted_percentage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_current_wasted_percentage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache current wasted percentage', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_server_php_opcache_memory_usage_current_wasted_percentage', + 'unique_id': '1234567890abcdef#server_php_opcache_memory_usage_current_wasted_percentage', + 'unit_of_measurement': '%', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_current_wasted_percentage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache current wasted percentage', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_current_wasted_percentage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_free_memory-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_free_memory', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Opcache free memory', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_server_php_opcache_memory_usage_free_memory', + 'unique_id': '1234567890abcdef#server_php_opcache_memory_usage_free_memory', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_free_memory-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Opcache free memory', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_free_memory', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '62.190616', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_hash_restarts-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_hash_restarts', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache hash restarts', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_hash_restarts', + 'unique_id': '1234567890abcdef#opcache_statistics_hash_restarts', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_hash_restarts-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache hash restarts', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_hash_restarts', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_hit_rate-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_hit_rate', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache hit rate', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_opcache_hit_rate', + 'unique_id': '1234567890abcdef#opcache_statistics_opcache_hit_rate', + 'unit_of_measurement': '%', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_hit_rate-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache hit rate', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_hit_rate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '99.9855667222406', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_hits-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_hits', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache hits', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_hits', + 'unique_id': '1234567890abcdef#opcache_statistics_hits', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_hits-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache hits', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_hits', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '9739971', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_last_restart_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_last_restart_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Opcache last restart time', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_last_restart_time', + 'unique_id': '1234567890abcdef#opcache_statistics_last_restart_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_last_restart_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'my.nc_url.local Opcache last restart time', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_last_restart_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1970-01-01T00:00:00+00:00', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_manual_restarts-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_manual_restarts', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache manual restarts', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_manual_restarts', + 'unique_id': '1234567890abcdef#opcache_statistics_manual_restarts', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_manual_restarts-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache manual restarts', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_manual_restarts', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_max_cached_keys-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_max_cached_keys', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache max cached keys', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_max_cached_keys', + 'unique_id': '1234567890abcdef#opcache_statistics_max_cached_keys', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_max_cached_keys-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache max cached keys', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_max_cached_keys', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '16229', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_misses-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_misses', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache misses', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_misses', + 'unique_id': '1234567890abcdef#opcache_statistics_misses', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_misses-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache misses', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_misses', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1406', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_out_of_memory_restarts-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_out_of_memory_restarts', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache out of memory restarts', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_oom_restarts', + 'unique_id': '1234567890abcdef#opcache_statistics_oom_restarts', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_out_of_memory_restarts-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache out of memory restarts', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_out_of_memory_restarts', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_start_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_start_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Opcache start time', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_start_time', + 'unique_id': '1234567890abcdef#opcache_statistics_start_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_start_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'my.nc_url.local Opcache start time', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_start_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-07-29T03:00:08+00:00', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_used_memory-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_used_memory', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Opcache used memory', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_server_php_opcache_memory_usage_used_memory', + 'unique_id': '1234567890abcdef#server_php_opcache_memory_usage_used_memory', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_used_memory-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Opcache used memory', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_used_memory', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '72.027112', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_wasted_memory-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_wasted_memory', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Opcache wasted memory', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_server_php_opcache_memory_usage_wasted_memory', + 'unique_id': '1234567890abcdef#server_php_opcache_memory_usage_wasted_memory', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_wasted_memory-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Opcache wasted memory', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_wasted_memory', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_php_max_execution_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_php_max_execution_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PHP max execution time', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_server_php_max_execution_time', + 'unique_id': '1234567890abcdef#server_php_max_execution_time', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_php_max_execution_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'my.nc_url.local PHP max execution time', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_php_max_execution_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3600', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_php_memory_limit-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_php_memory_limit', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PHP memory limit', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_server_php_memory_limit', + 'unique_id': '1234567890abcdef#server_php_memory_limit', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_php_memory_limit-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local PHP memory limit', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_php_memory_limit', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '536.870912', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_php_upload_maximum_filesize-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_php_upload_maximum_filesize', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PHP upload maximum filesize', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_server_php_upload_max_filesize', + 'unique_id': '1234567890abcdef#server_php_upload_max_filesize', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_php_upload_maximum_filesize-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local PHP upload maximum filesize', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_php_upload_maximum_filesize', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '536.870912', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_php_version-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_php_version', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'PHP version', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_server_php_version', + 'unique_id': '1234567890abcdef#server_php_version', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_php_version-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local PHP version', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_php_version', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '8.2.18', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_sma_available_memory-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_sma_available_memory', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'SMA available memory', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_sma_avail_mem', + 'unique_id': '1234567890abcdef#sma_avail_mem', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_sma_available_memory-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local SMA available memory', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_sma_available_memory', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '33.342368', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_sma_number_of_segments-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_sma_number_of_segments', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'SMA number of segments', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_sma_num_seg', + 'unique_id': '1234567890abcdef#sma_num_seg', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_sma_number_of_segments-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local SMA number of segments', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_sma_number_of_segments', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_sma_segment_size-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_sma_segment_size', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'SMA segment size', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_sma_seg_size', + 'unique_id': '1234567890abcdef#sma_seg_size', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_sma_segment_size-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local SMA segment size', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_sma_segment_size', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '33.554312', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_system_memcache_distributed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_system_memcache_distributed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'System memcache distributed', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_memcache_distributed', + 'unique_id': '1234567890abcdef#system_memcache.distributed', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_system_memcache_distributed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local System memcache distributed', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_system_memcache_distributed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'none', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_system_memcache_local-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_system_memcache_local', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'System memcache local', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_memcache_local', + 'unique_id': '1234567890abcdef#system_memcache.local', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_system_memcache_local-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local System memcache local', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_system_memcache_local', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '\\OC\\Memcache\\APCu', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_system_memcache_locking-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_system_memcache_locking', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'System memcache locking', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_memcache_locking', + 'unique_id': '1234567890abcdef#system_memcache.locking', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_system_memcache_locking-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local System memcache locking', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_system_memcache_locking', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'none', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_system_theme-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_system_theme', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'System theme', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_theme', + 'unique_id': '1234567890abcdef#system_theme', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_system_theme-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local System theme', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_system_theme', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_system_version-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_system_version', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'System version', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_version', + 'unique_id': '1234567890abcdef#system_version', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_system_version-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local System version', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_system_version', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '28.0.4.1', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_total_memory-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_total_memory', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total memory', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_mem_total', + 'unique_id': '1234567890abcdef#system_mem_total', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_total_memory-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Total memory', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_total_memory', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '30.728192', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_total_swap_memory-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_total_swap_memory', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total swap memory', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_swap_total', + 'unique_id': '1234567890abcdef#system_swap_total', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_total_swap_memory-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Total swap memory', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_total_swap_memory', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10.484736', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_updates_available-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_updates_available', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Updates available', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_apps_num_updates_available', + 'unique_id': '1234567890abcdef#system_apps_num_updates_available', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_updates_available-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Updates available', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_updates_available', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_webserver-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_webserver', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Webserver', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_server_webserver', + 'unique_id': '1234567890abcdef#server_webserver', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_webserver-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Webserver', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_webserver', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Apache/2.4.57 (Debian)', + }) +# --- diff --git a/tests/components/nextcloud/snapshots/test_update.ambr b/tests/components/nextcloud/snapshots/test_update.ambr new file mode 100644 index 00000000000..1ee6264c204 --- /dev/null +++ b/tests/components/nextcloud/snapshots/test_update.ambr @@ -0,0 +1,57 @@ +# serializer version: 1 +# name: test_async_setup_entry[update.my_nc_url_local_none-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.my_nc_url_local_none', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234567890abcdef#update', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[update.my_nc_url_local_none-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'entity_picture': 'https://brands.home-assistant.io/_/nextcloud/icon.png', + 'friendly_name': 'my.nc_url.local None', + 'in_progress': False, + 'installed_version': '28.0.4.1', + 'latest_version': '28.0.4.1', + 'release_summary': None, + 'release_url': 'https://nextcloud.com/changelog/#28-0-4', + 'skipped_version': None, + 'supported_features': , + 'title': None, + }), + 'context': , + 'entity_id': 'update.my_nc_url_local_none', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/nextcloud/test_binary_sensor.py b/tests/components/nextcloud/test_binary_sensor.py new file mode 100644 index 00000000000..ff121c53ec3 --- /dev/null +++ b/tests/components/nextcloud/test_binary_sensor.py @@ -0,0 +1,33 @@ +"""Tests for the Nextcloud binary sensors.""" + +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import init_integration +from .const import NC_DATA, VALID_CONFIG + +from tests.common import snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_async_setup_entry( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test a successful setup entry.""" + with patch( + "homeassistant.components.nextcloud.PLATFORMS", [Platform.BINARY_SENSOR] + ): + entry = await init_integration(hass, VALID_CONFIG, NC_DATA) + + states = hass.states.async_all() + assert len(states) == 6 + + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) diff --git a/tests/components/nextcloud/test_config_flow.py b/tests/components/nextcloud/test_config_flow.py index c02516fdc99..536464d5253 100644 --- a/tests/components/nextcloud/test_config_flow.py +++ b/tests/components/nextcloud/test_config_flow.py @@ -1,6 +1,6 @@ """Tests for the Nextcloud config flow.""" -from unittest.mock import Mock, patch +from unittest.mock import patch from nextcloudmonitor import ( NextcloudMonitorAuthorizationError, @@ -12,24 +12,19 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.components.nextcloud.const import DOMAIN from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER -from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME, CONF_VERIFY_SSL +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from .const import VALID_CONFIG + from tests.common import MockConfigEntry pytestmark = pytest.mark.usefixtures("mock_setup_entry") -VALID_CONFIG = { - CONF_URL: "nc_url", - CONF_USERNAME: "nc_user", - CONF_PASSWORD: "nc_pass", - CONF_VERIFY_SSL: True, -} - async def test_user_create_entry( - hass: HomeAssistant, mock_nextcloud_monitor: Mock, snapshot: SnapshotAssertion + hass: HomeAssistant, snapshot: SnapshotAssertion ) -> None: """Test that the user step works.""" # start user flow @@ -85,7 +80,7 @@ async def test_user_create_entry( # test success with patch( "homeassistant.components.nextcloud.config_flow.NextcloudMonitor", - return_value=mock_nextcloud_monitor, + return_value=True, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -94,17 +89,15 @@ async def test_user_create_entry( await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "nc_url" + assert result["title"] == "https://my.nc_url.local" assert result["data"] == snapshot -async def test_user_already_configured( - hass: HomeAssistant, mock_nextcloud_monitor: Mock -) -> None: +async def test_user_already_configured(hass: HomeAssistant) -> None: """Test that errors are shown when duplicates are added.""" entry = MockConfigEntry( domain=DOMAIN, - title="nc_url", + title="https://my.nc_url.local", unique_id="nc_url", data=VALID_CONFIG, ) @@ -119,7 +112,7 @@ async def test_user_already_configured( with patch( "homeassistant.components.nextcloud.config_flow.NextcloudMonitor", - return_value=mock_nextcloud_monitor, + return_value=True, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -131,13 +124,11 @@ async def test_user_already_configured( assert result["reason"] == "already_configured" -async def test_reauth( - hass: HomeAssistant, mock_nextcloud_monitor: Mock, snapshot: SnapshotAssertion -) -> None: +async def test_reauth(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: """Test that the re-auth flow works.""" entry = MockConfigEntry( domain=DOMAIN, - title="nc_url", + title="https://my.nc_url.local", unique_id="nc_url", data=VALID_CONFIG, ) @@ -206,7 +197,7 @@ async def test_reauth( # test success with patch( "homeassistant.components.nextcloud.config_flow.NextcloudMonitor", - return_value=mock_nextcloud_monitor, + return_value=True, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], diff --git a/tests/components/nextcloud/test_coordinator.py b/tests/components/nextcloud/test_coordinator.py new file mode 100644 index 00000000000..91f7e7967a3 --- /dev/null +++ b/tests/components/nextcloud/test_coordinator.py @@ -0,0 +1,69 @@ +"""Tests for the Nextcloud coordinator.""" + +from unittest.mock import Mock, patch + +from freezegun.api import FrozenDateTimeFactory +from nextcloudmonitor import ( + NextcloudMonitor, + NextcloudMonitorAuthorizationError, + NextcloudMonitorConnectionError, + NextcloudMonitorError, + NextcloudMonitorRequestError, +) +import pytest + +from homeassistant.components.nextcloud.const import DEFAULT_SCAN_INTERVAL +from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant + +from . import mock_config_entry +from .const import NC_DATA, VALID_CONFIG + +from tests.common import async_fire_time_changed + + +@pytest.mark.parametrize( + ("error"), + [ + (NextcloudMonitorAuthorizationError), + (NextcloudMonitorConnectionError), + (NextcloudMonitorRequestError), + ], +) +async def test_data_update( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, error: NextcloudMonitorError +) -> None: + """Test a coordinator data updates.""" + entry = mock_config_entry(VALID_CONFIG) + entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.nextcloud.NextcloudMonitor", spec=NextcloudMonitor + ) as mock_nextcloud_monitor, + ): + mock_nextcloud_monitor.return_value.update = Mock( + return_value=True, + side_effect=[None, error, None], + ) + mock_nextcloud_monitor.return_value.data = NC_DATA + assert await hass.config_entries.async_setup(entry.entry_id) + + # Test successful setup and first data fetch + await hass.async_block_till_done(wait_background_tasks=True) + states = hass.states.async_all() + assert (state != STATE_UNAVAILABLE for state in states) + + # Test states get unavailable on error + freezer.tick(DEFAULT_SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + states = hass.states.async_all() + assert (state == STATE_UNAVAILABLE for state in states) + + # Test successful data fetch + freezer.tick(DEFAULT_SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + states = hass.states.async_all() + assert (state != STATE_UNAVAILABLE for state in states) diff --git a/tests/components/nextcloud/test_init.py b/tests/components/nextcloud/test_init.py new file mode 100644 index 00000000000..70c8f545c6b --- /dev/null +++ b/tests/components/nextcloud/test_init.py @@ -0,0 +1,95 @@ +"""Tests for the Nextcloud init.""" + +from unittest.mock import Mock, patch + +from nextcloudmonitor import ( + NextcloudMonitorAuthorizationError, + NextcloudMonitorConnectionError, + NextcloudMonitorError, + NextcloudMonitorRequestError, +) +import pytest + +from homeassistant.components.nextcloud.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_URL, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import init_integration, mock_config_entry +from .const import MOCKED_ENTRY_ID, NC_DATA, VALID_CONFIG + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_async_setup_entry( + hass: HomeAssistant, +) -> None: + """Test a successful setup entry.""" + assert await init_integration(hass, VALID_CONFIG, NC_DATA) + + +async def test_unique_id_migration( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, +) -> None: + """Test migration of unique ids to stable ones.""" + + object_id = "my_nc_url_local_system_version" + entity_id = f"{Platform.SENSOR}.{object_id}" + + entry = mock_config_entry(VALID_CONFIG) + entry.add_to_hass(hass) + + entity = entity_registry.async_get_or_create( + Platform.SENSOR, + DOMAIN, + f"{VALID_CONFIG[CONF_URL]}#nextcloud_system_version", + suggested_object_id=object_id, + config_entry=entry, + ) + + # test old unique id + assert entity.entity_id == entity_id + assert entity.unique_id == f"{VALID_CONFIG[CONF_URL]}#nextcloud_system_version" + + with ( + patch( + "homeassistant.components.nextcloud.NextcloudMonitor" + ) as mock_nextcloud_monitor, + ): + mock_nextcloud_monitor.update = Mock(return_value=True) + mock_nextcloud_monitor.return_value.data = NC_DATA + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + # test migrated unique id + reg_entry = entity_registry.async_get(entity_id) + assert reg_entry.unique_id == f"{MOCKED_ENTRY_ID}#system_version" + + +@pytest.mark.parametrize( + ("exception", "expcted_entry_state"), + [ + (NextcloudMonitorAuthorizationError, ConfigEntryState.SETUP_ERROR), + (NextcloudMonitorConnectionError, ConfigEntryState.SETUP_RETRY), + (NextcloudMonitorRequestError, ConfigEntryState.SETUP_RETRY), + ], +) +async def test_setup_entry_errors( + hass: HomeAssistant, + exception: NextcloudMonitorError, + expcted_entry_state: ConfigEntryState, +) -> None: + """Test a successful setup entry.""" + + entry = mock_config_entry(VALID_CONFIG) + entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.nextcloud.NextcloudMonitor", side_effect=exception + ), + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert entry.state == expcted_entry_state diff --git a/tests/components/nextcloud/test_sensor.py b/tests/components/nextcloud/test_sensor.py new file mode 100644 index 00000000000..1ea2c87db11 --- /dev/null +++ b/tests/components/nextcloud/test_sensor.py @@ -0,0 +1,31 @@ +"""Tests for the Nextcloud sensors.""" + +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import init_integration +from .const import NC_DATA, VALID_CONFIG + +from tests.common import snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_async_setup_entry( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test a successful setup entry.""" + with patch("homeassistant.components.nextcloud.PLATFORMS", [Platform.SENSOR]): + entry = await init_integration(hass, VALID_CONFIG, NC_DATA) + + states = hass.states.async_all() + assert len(states) == 80 + + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) diff --git a/tests/components/nextcloud/test_update.py b/tests/components/nextcloud/test_update.py new file mode 100644 index 00000000000..d47c9f1df53 --- /dev/null +++ b/tests/components/nextcloud/test_update.py @@ -0,0 +1,80 @@ +"""Tests for the Nextcloud update entity.""" + +from copy import deepcopy +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import STATE_OFF, STATE_ON, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import init_integration +from .const import NC_DATA, VALID_CONFIG + +from tests.common import snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_async_setup_entry( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test a successful setup entry.""" + with patch("homeassistant.components.nextcloud.PLATFORMS", [Platform.UPDATE]): + entry = await init_integration(hass, VALID_CONFIG, NC_DATA) + + states = hass.states.async_all() + assert len(states) == 1 + + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + + +async def test_setup_entity_without_update( + hass: HomeAssistant, snapshot: SnapshotAssertion +) -> None: + """Test update entity is created w/o available update.""" + with patch("homeassistant.components.nextcloud.PLATFORMS", [Platform.UPDATE]): + await init_integration(hass, VALID_CONFIG, NC_DATA) + + states = hass.states.async_all() + assert len(states) == 1 + assert states[0].state == STATE_OFF + assert states[0].attributes["installed_version"] == "28.0.4.1" + assert states[0].attributes["latest_version"] == "28.0.4.1" + assert ( + states[0].attributes["release_url"] == "https://nextcloud.com/changelog/#28-0-4" + ) + + +async def test_setup_entity_with_update( + hass: HomeAssistant, snapshot: SnapshotAssertion +) -> None: + """Test update entity is created with available update.""" + data = deepcopy(NC_DATA) + data["nextcloud"]["system"]["update"]["available"] = True + data["nextcloud"]["system"]["update"]["available_version"] = "30.0.0.0" + with patch("homeassistant.components.nextcloud.PLATFORMS", [Platform.UPDATE]): + await init_integration(hass, VALID_CONFIG, data) + + states = hass.states.async_all() + assert len(states) == 1 + assert states[0].state == STATE_ON + assert states[0].attributes["installed_version"] == "28.0.4.1" + assert states[0].attributes["latest_version"] == "30.0.0.0" + assert ( + states[0].attributes["release_url"] == "https://nextcloud.com/changelog/#30-0-0" + ) + + +async def test_setup_no_entity(hass: HomeAssistant) -> None: + """Test no update entity is created, when no data available.""" + data = deepcopy(NC_DATA) + data["nextcloud"]["system"].pop("update") # only nc<28.0.0 + with patch("homeassistant.components.nextcloud.PLATFORMS", [Platform.UPDATE]): + await init_integration(hass, VALID_CONFIG, data) + + states = hass.states.async_all() + assert len(states) == 0 From 9e762fa2223bd4c1610785108b7afa29f220b292 Mon Sep 17 00:00:00 2001 From: IceBotYT <34712694+IceBotYT@users.noreply.github.com> Date: Tue, 27 Aug 2024 13:53:00 -0400 Subject: [PATCH 1118/1635] Bump Nice G.O. to 0.3.5 (#124667) * Bump Nice G.O. to 0.3.1 * Bump to 0.3.5 --- homeassistant/components/nice_go/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/nice_go/manifest.json b/homeassistant/components/nice_go/manifest.json index c2ff8370e2a..45dd3c8b5b4 100644 --- a/homeassistant/components/nice_go/manifest.json +++ b/homeassistant/components/nice_go/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/nice_go", "iot_class": "cloud_push", "loggers": ["nice-go"], - "requirements": ["nice-go==0.3.0"] + "requirements": ["nice-go==0.3.5"] } diff --git a/requirements_all.txt b/requirements_all.txt index 370012e3e15..784f2e65c00 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1429,7 +1429,7 @@ nextdns==3.2.0 nibe==2.11.0 # homeassistant.components.nice_go -nice-go==0.3.0 +nice-go==0.3.5 # homeassistant.components.niko_home_control niko-home-control==0.2.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index fd934a4f929..293aa62a4d8 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1186,7 +1186,7 @@ nextdns==3.2.0 nibe==2.11.0 # homeassistant.components.nice_go -nice-go==0.3.0 +nice-go==0.3.5 # homeassistant.components.nfandroidtv notifications-android-tv==0.1.5 From dd0c353afbb1282da55f061c89910a8edf1a823e Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Tue, 27 Aug 2024 22:28:54 +0200 Subject: [PATCH 1119/1635] Fix nice_go integration tests (#124736) --- tests/components/nice_go/conftest.py | 3 ++- tests/components/nice_go/snapshots/test_diagnostics.ambr | 1 - tests/components/nice_go/test_diagnostics.py | 4 +++- tests/components/nice_go/test_init.py | 2 +- 4 files changed, 6 insertions(+), 4 deletions(-) diff --git a/tests/components/nice_go/conftest.py b/tests/components/nice_go/conftest.py index 31b21083c05..9ed3d0d19cf 100644 --- a/tests/components/nice_go/conftest.py +++ b/tests/components/nice_go/conftest.py @@ -1,6 +1,7 @@ """Common fixtures for the Nice G.O. tests.""" from collections.abc import Generator +from datetime import datetime from unittest.mock import AsyncMock, patch from nice_go import Barrier, BarrierState, ConnectionState @@ -71,7 +72,7 @@ def mock_config_entry() -> MockConfigEntry: CONF_EMAIL: "test-email", CONF_PASSWORD: "test-password", CONF_REFRESH_TOKEN: "test-refresh-token", - CONF_REFRESH_TOKEN_CREATION_TIME: 1722184160.738171, + CONF_REFRESH_TOKEN_CREATION_TIME: datetime.now().timestamp(), }, version=1, unique_id="test-email", diff --git a/tests/components/nice_go/snapshots/test_diagnostics.ambr b/tests/components/nice_go/snapshots/test_diagnostics.ambr index abd3b3103d1..6f9428ed246 100644 --- a/tests/components/nice_go/snapshots/test_diagnostics.ambr +++ b/tests/components/nice_go/snapshots/test_diagnostics.ambr @@ -26,7 +26,6 @@ 'email': '**REDACTED**', 'password': '**REDACTED**', 'refresh_token': '**REDACTED**', - 'refresh_token_creation_time': 1722184160.738171, }), 'disabled_by': None, 'domain': 'nice_go', diff --git a/tests/components/nice_go/test_diagnostics.py b/tests/components/nice_go/test_diagnostics.py index 1c88c6a8dc6..f91f5748792 100644 --- a/tests/components/nice_go/test_diagnostics.py +++ b/tests/components/nice_go/test_diagnostics.py @@ -26,4 +26,6 @@ async def test_entry_diagnostics( result = await get_diagnostics_for_config_entry( hass, hass_client, mock_config_entry ) - assert result == snapshot(exclude=props("created_at", "modified_at")) + assert result == snapshot( + exclude=props("created_at", "modified_at", "refresh_token_creation_time") + ) diff --git a/tests/components/nice_go/test_init.py b/tests/components/nice_go/test_init.py index 249622d23b0..5568a7ea62a 100644 --- a/tests/components/nice_go/test_init.py +++ b/tests/components/nice_go/test_init.py @@ -110,7 +110,7 @@ async def test_update_refresh_token( assert mock_nice_go.authenticate.call_count == 0 mock_nice_go.authenticate.return_value = "new-refresh-token" - freezer.tick(timedelta(days=30)) + freezer.tick(timedelta(days=30, seconds=1)) async_fire_time_changed(hass) assert await hass.config_entries.async_reload(mock_config_entry.entry_id) await hass.async_block_till_done() From 467749eb579e8bd7e57ad96c677eee21485d10b8 Mon Sep 17 00:00:00 2001 From: Shay Levy Date: Tue, 27 Aug 2024 23:48:13 +0300 Subject: [PATCH 1120/1635] Fix Shelly sleepy RPC setup if device is already awake (#124734) --- homeassistant/components/shelly/__init__.py | 5 ++ .../components/shelly/coordinator.py | 12 +++-- tests/components/shelly/test_init.py | 46 +++++++++++++++++++ 3 files changed, 58 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/shelly/__init__.py b/homeassistant/components/shelly/__init__.py index 1d3f67220fa..e0d9d17d55d 100644 --- a/homeassistant/components/shelly/__init__.py +++ b/homeassistant/components/shelly/__init__.py @@ -290,6 +290,11 @@ async def _async_setup_rpc_entry(hass: HomeAssistant, entry: ShellyConfigEntry) ) runtime_data.rpc = ShellyRpcCoordinator(hass, entry, device) runtime_data.rpc.async_setup(runtime_data.platforms) + # Try to connect to the device, if we reached here from config flow + # and user woke up the device when adding it, we can continue setup + # otherwise we will wait for the device to wake up + if sleep_period: + await runtime_data.rpc.async_device_online("setup") else: # Restore sensors for sleeping device LOGGER.debug("Setting up offline RPC device %s", entry.title) diff --git a/homeassistant/components/shelly/coordinator.py b/homeassistant/components/shelly/coordinator.py index 03dcdedbb6f..918dd920765 100644 --- a/homeassistant/components/shelly/coordinator.py +++ b/homeassistant/components/shelly/coordinator.py @@ -173,7 +173,7 @@ class ShellyCoordinatorBase[_DeviceT: BlockDevice | RpcDevice]( await self.device.initialize() update_device_fw_info(self.hass, self.device, self.entry) except DeviceConnectionError as err: - LOGGER.error( + LOGGER.debug( "Error connecting to Shelly device %s, error: %r", self.name, err ) return False @@ -480,15 +480,17 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]): self._connect_task: asyncio.Task | None = None entry.async_on_unload(entry.add_update_listener(self._async_update_listener)) - async def async_device_online(self) -> None: + async def async_device_online(self, source: str) -> None: """Handle device going online.""" if not self.sleep_period: await self.async_request_refresh() elif not self._came_online_once or not self.device.initialized: LOGGER.debug( - "Sleepy device %s is online, trying to poll and configure", self.name + "Sleepy device %s is online (source: %s), trying to poll and configure", + self.name, + source, ) - # Zeroconf told us the device is online, try to poll + # Source told us the device is online, try to poll # the device and if possible, set up the outbound # websocket so the device will send us updates # instead of relying on polling it fast enough before @@ -847,7 +849,7 @@ async def async_reconnect_soon(hass: HomeAssistant, entry: ShellyConfigEntry) -> ): entry.async_create_background_task( hass, - coordinator.async_device_online(), + coordinator.async_device_online("zeroconf"), "reconnect soon", eager_start=True, ) diff --git a/tests/components/shelly/test_init.py b/tests/components/shelly/test_init.py index 46698c23c0a..b5516485501 100644 --- a/tests/components/shelly/test_init.py +++ b/tests/components/shelly/test_init.py @@ -310,6 +310,52 @@ async def test_sleeping_rpc_device_online_new_firmware( assert entry.data["sleep_period"] == 1500 +async def test_sleeping_rpc_device_online_during_setup( + hass: HomeAssistant, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test sleeping device Gen2 woke up by user during setup.""" + monkeypatch.setattr(mock_rpc_device, "connected", False) + monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) + await init_integration(hass, 2, sleep_period=1000) + await hass.async_block_till_done(wait_background_tasks=True) + + assert "will resume when device is online" in caplog.text + assert "is online (source: setup)" in caplog.text + assert hass.states.get("sensor.test_name_temperature") is not None + + +async def test_sleeping_rpc_device_offline_during_setup( + hass: HomeAssistant, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test sleeping device Gen2 woke up by user during setup.""" + monkeypatch.setattr(mock_rpc_device, "connected", False) + monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) + monkeypatch.setattr( + mock_rpc_device, "initialize", AsyncMock(side_effect=DeviceConnectionError) + ) + + # Init integration, should fail since device is offline + await init_integration(hass, 2, sleep_period=1000) + await hass.async_block_till_done(wait_background_tasks=True) + + assert "will resume when device is online" in caplog.text + assert "is online (source: setup)" in caplog.text + assert hass.states.get("sensor.test_name_temperature") is None + + # Create an online event and verify that device is init successfully + monkeypatch.setattr(mock_rpc_device, "initialize", AsyncMock()) + mock_rpc_device.mock_online() + await hass.async_block_till_done(wait_background_tasks=True) + + assert hass.states.get("sensor.test_name_temperature") is not None + + @pytest.mark.parametrize( ("gen", "entity_id"), [ From 5818e2c2d44252f0222820d28b8f9c6aef613b24 Mon Sep 17 00:00:00 2001 From: puddly <32534428+puddly@users.noreply.github.com> Date: Tue, 27 Aug 2024 17:14:41 -0400 Subject: [PATCH 1121/1635] Yellow firmware selection options flow (#122868) * Implement Yellow config flow for firmware selection * Use the probed firmware type when setting up Yellow * Add translation strings * Ensure (most) existing `init` tests pass * Remove multi-PAN setup config flow unit tests * Get existing config flow unit tests passing * Add unit tests for uninstalling multi-PAN and such * Consolidate entity creation for Yellow and clean up steps * Be explicit with multiple inheritance overrides * Address review comments --- .../homeassistant_yellow/__init__.py | 84 +++-- .../homeassistant_yellow/config_flow.py | 183 +++++++-- .../components/homeassistant_yellow/const.py | 3 + .../homeassistant_yellow/strings.json | 53 ++- .../homeassistant_yellow/test_config_flow.py | 346 +++++++++--------- .../homeassistant_yellow/test_init.py | 185 ++-------- 6 files changed, 481 insertions(+), 373 deletions(-) diff --git a/homeassistant/components/homeassistant_yellow/__init__.py b/homeassistant/components/homeassistant_yellow/__init__.py index 14c2de2c9a1..04abe5a1dca 100644 --- a/homeassistant/components/homeassistant_yellow/__init__.py +++ b/homeassistant/components/homeassistant_yellow/__init__.py @@ -2,18 +2,24 @@ from __future__ import annotations +import logging + from homeassistant.components.hassio import get_os_info, is_hassio from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon import ( check_multi_pan_addon, - get_zigbee_socket, - multi_pan_addon_using_device, +) +from homeassistant.components.homeassistant_hardware.util import ( + ApplicationType, + guess_firmware_type, ) from homeassistant.config_entries import SOURCE_HARDWARE, ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError from homeassistant.helpers import discovery_flow -from .const import RADIO_DEVICE, ZHA_HW_DISCOVERY_DATA +from .const import FIRMWARE, RADIO_DEVICE, ZHA_HW_DISCOVERY_DATA + +_LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: @@ -27,34 +33,26 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: # The hassio integration has not yet fetched data from the supervisor raise ConfigEntryNotReady - board: str | None - if (board := os_info.get("board")) is None or board != "yellow": + if os_info.get("board") != "yellow": # Not running on a Home Assistant Yellow, Home Assistant may have been migrated hass.async_create_task(hass.config_entries.async_remove(entry.entry_id)) return False - try: - await check_multi_pan_addon(hass) - except HomeAssistantError as err: - raise ConfigEntryNotReady from err + firmware = ApplicationType(entry.data[FIRMWARE]) - if not await multi_pan_addon_using_device(hass, RADIO_DEVICE): - hw_discovery_data = ZHA_HW_DISCOVERY_DATA - else: - hw_discovery_data = { - "name": "Yellow Multiprotocol", - "port": { - "path": get_zigbee_socket(), - }, - "radio_type": "ezsp", - } + if firmware is ApplicationType.CPC: + try: + await check_multi_pan_addon(hass) + except HomeAssistantError as err: + raise ConfigEntryNotReady from err - discovery_flow.async_create_flow( - hass, - "zha", - context={"source": SOURCE_HARDWARE}, - data=hw_discovery_data, - ) + if firmware is ApplicationType.EZSP: + discovery_flow.async_create_flow( + hass, + "zha", + context={"source": SOURCE_HARDWARE}, + data=ZHA_HW_DISCOVERY_DATA, + ) return True @@ -62,3 +60,39 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" return True + + +async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: + """Migrate old entry.""" + + _LOGGER.debug( + "Migrating from version %s.%s", config_entry.version, config_entry.minor_version + ) + + if config_entry.version == 1: + if config_entry.minor_version == 1: + # Add-on startup with type service get started before Core, always (e.g. the + # Multi-Protocol add-on). Probing the firmware would interfere with the add-on, + # so we can't safely probe here. Instead, we must make an educated guess! + firmware_guess = await guess_firmware_type(hass, RADIO_DEVICE) + + new_data = {**config_entry.data} + new_data[FIRMWARE] = firmware_guess.firmware_type.value + + hass.config_entries.async_update_entry( + config_entry, + data=new_data, + version=1, + minor_version=2, + ) + + _LOGGER.debug( + "Migration to version %s.%s successful", + config_entry.version, + config_entry.minor_version, + ) + + return True + + # This means the user has downgraded from a future version + return False diff --git a/homeassistant/components/homeassistant_yellow/config_flow.py b/homeassistant/components/homeassistant_yellow/config_flow.py index d2212a968db..ce6475ceb3c 100644 --- a/homeassistant/components/homeassistant_yellow/config_flow.py +++ b/homeassistant/components/homeassistant_yellow/config_flow.py @@ -2,11 +2,13 @@ from __future__ import annotations +from abc import ABC, abstractmethod import asyncio import logging -from typing import Any +from typing import Any, final import aiohttp +from universal_silabs_flasher.const import ApplicationType import voluptuous as vol from homeassistant.components.hassio import ( @@ -15,12 +17,25 @@ from homeassistant.components.hassio import ( async_reboot_host, async_set_yellow_settings, ) -from homeassistant.components.homeassistant_hardware import silabs_multiprotocol_addon -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.components.homeassistant_hardware.firmware_config_flow import ( + BaseFirmwareConfigFlow, + BaseFirmwareOptionsFlow, +) +from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon import ( + OptionsFlowHandler as MultiprotocolOptionsFlowHandler, + SerialPortSettings as MultiprotocolSerialPortSettings, +) +from homeassistant.config_entries import ( + SOURCE_HARDWARE, + ConfigEntry, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.core import callback -from homeassistant.helpers import selector +from homeassistant.helpers import discovery_flow, selector -from .const import DOMAIN, ZHA_HW_DISCOVERY_DATA +from .const import DOMAIN, FIRMWARE, RADIO_DEVICE, ZHA_DOMAIN, ZHA_HW_DISCOVERY_DATA +from .hardware import BOARD_NAME _LOGGER = logging.getLogger(__name__) @@ -33,18 +48,30 @@ STEP_HW_SETTINGS_SCHEMA = vol.Schema( ) -class HomeAssistantYellowConfigFlow(ConfigFlow, domain=DOMAIN): +class HomeAssistantYellowConfigFlow(BaseFirmwareConfigFlow, domain=DOMAIN): """Handle a config flow for Home Assistant Yellow.""" VERSION = 1 + MINOR_VERSION = 2 + + def __init__(self, *args: Any, **kwargs: Any) -> None: + """Instantiate config flow.""" + super().__init__(*args, **kwargs) + + self._device = RADIO_DEVICE @staticmethod @callback def async_get_options_flow( config_entry: ConfigEntry, - ) -> HomeAssistantYellowOptionsFlow: + ) -> OptionsFlow: """Return the options flow.""" - return HomeAssistantYellowOptionsFlow(config_entry) + firmware_type = ApplicationType(config_entry.data[FIRMWARE]) + + if firmware_type is ApplicationType.CPC: + return HomeAssistantYellowMultiPanOptionsFlowHandler(config_entry) + + return HomeAssistantYellowOptionsFlowHandler(config_entry) async def async_step_system( self, data: dict[str, Any] | None = None @@ -53,30 +80,56 @@ class HomeAssistantYellowConfigFlow(ConfigFlow, domain=DOMAIN): if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") - return self.async_create_entry(title="Home Assistant Yellow", data={}) + # We do not actually use any portion of `BaseFirmwareConfigFlow` beyond this + await self._probe_firmware_type() + + # Kick off ZHA hardware discovery automatically if Zigbee firmware is running + if self._probed_firmware_type is ApplicationType.EZSP: + discovery_flow.async_create_flow( + self.hass, + ZHA_DOMAIN, + context={"source": SOURCE_HARDWARE}, + data=ZHA_HW_DISCOVERY_DATA, + ) + + return self._async_flow_finished() + + def _async_flow_finished(self) -> ConfigFlowResult: + """Create the config entry.""" + assert self._probed_firmware_type is not None + + return self.async_create_entry( + title=BOARD_NAME, + data={ + # Assume the firmware type is EZSP if we cannot probe it + FIRMWARE: (self._probed_firmware_type or ApplicationType.EZSP).value, + }, + ) -class HomeAssistantYellowOptionsFlow(silabs_multiprotocol_addon.OptionsFlowHandler): - """Handle an option flow for Home Assistant Yellow.""" +class BaseHomeAssistantYellowOptionsFlow(OptionsFlow, ABC): + """Base Home Assistant Yellow options flow shared between firmware and multi-PAN.""" _hw_settings: dict[str, bool] | None = None + @abstractmethod + async def async_step_main_menu(self, _: None = None) -> ConfigFlowResult: + """Show the main menu.""" + + @final + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Manage the options flow.""" + return await self.async_step_main_menu() + + @final async def async_step_on_supervisor( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle logic when on Supervisor host.""" return await self.async_step_main_menu() - async def async_step_main_menu(self, _: None = None) -> ConfigFlowResult: - """Show the main menu.""" - return self.async_show_menu( - step_id="main_menu", - menu_options=[ - "hardware_settings", - "multipan_settings", - ], - ) - async def async_step_hardware_settings( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -133,18 +186,36 @@ class HomeAssistantYellowOptionsFlow(silabs_multiprotocol_addon.OptionsFlowHandl """Reboot later.""" return self.async_create_entry(data={}) + +class HomeAssistantYellowMultiPanOptionsFlowHandler( + BaseHomeAssistantYellowOptionsFlow, MultiprotocolOptionsFlowHandler +): + """Handle a multi-PAN options flow for Home Assistant Yellow.""" + + async def async_step_main_menu(self, _: None = None) -> ConfigFlowResult: + """Show the main menu.""" + return self.async_show_menu( + step_id="main_menu", + menu_options=[ + "hardware_settings", + "multipan_settings", + ], + ) + async def async_step_multipan_settings( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle multipan settings.""" - return await super().async_step_on_supervisor(user_input) + return await MultiprotocolOptionsFlowHandler.async_step_on_supervisor( + self, user_input + ) async def _async_serial_port_settings( self, - ) -> silabs_multiprotocol_addon.SerialPortSettings: + ) -> MultiprotocolSerialPortSettings: """Return the radio serial port settings.""" - return silabs_multiprotocol_addon.SerialPortSettings( - device="/dev/ttyAMA1", + return MultiprotocolSerialPortSettings( + device=RADIO_DEVICE, baudrate="115200", flow_control=True, ) @@ -163,4 +234,64 @@ class HomeAssistantYellowOptionsFlow(silabs_multiprotocol_addon.OptionsFlowHandl def _hardware_name(self) -> str: """Return the name of the hardware.""" - return "Home Assistant Yellow" + return BOARD_NAME + + async def async_step_flashing_complete( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Finish flashing and update the config entry.""" + self.hass.config_entries.async_update_entry( + entry=self.config_entry, + data={ + **self.config_entry.data, + FIRMWARE: ApplicationType.EZSP.value, + }, + ) + + return await super().async_step_flashing_complete(user_input) + + +class HomeAssistantYellowOptionsFlowHandler( + BaseHomeAssistantYellowOptionsFlow, BaseFirmwareOptionsFlow +): + """Handle a firmware options flow for Home Assistant Yellow.""" + + def __init__(self, *args: Any, **kwargs: Any) -> None: + """Instantiate options flow.""" + super().__init__(*args, **kwargs) + + self._hardware_name = BOARD_NAME + self._device = RADIO_DEVICE + + # Regenerate the translation placeholders + self._get_translation_placeholders() + + async def async_step_main_menu(self, _: None = None) -> ConfigFlowResult: + """Show the main menu.""" + return self.async_show_menu( + step_id="main_menu", + menu_options=[ + "hardware_settings", + "firmware_settings", + ], + ) + + async def async_step_firmware_settings( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle firmware configuration settings.""" + return await super().async_step_pick_firmware() + + def _async_flow_finished(self) -> ConfigFlowResult: + """Create the config entry.""" + assert self._probed_firmware_type is not None + + self.hass.config_entries.async_update_entry( + entry=self.config_entry, + data={ + **self.config_entry.data, + FIRMWARE: self._probed_firmware_type.value, + }, + ) + + return self.async_create_entry(title="", data={}) diff --git a/homeassistant/components/homeassistant_yellow/const.py b/homeassistant/components/homeassistant_yellow/const.py index 8f1f9a4c2b8..79753ae9b9e 100644 --- a/homeassistant/components/homeassistant_yellow/const.py +++ b/homeassistant/components/homeassistant_yellow/const.py @@ -12,3 +12,6 @@ ZHA_HW_DISCOVERY_DATA = { }, "radio_type": "efr32", } + +FIRMWARE = "firmware" +ZHA_DOMAIN = "zha" diff --git a/homeassistant/components/homeassistant_yellow/strings.json b/homeassistant/components/homeassistant_yellow/strings.json index 95442d31500..fd3be3586b1 100644 --- a/homeassistant/components/homeassistant_yellow/strings.json +++ b/homeassistant/components/homeassistant_yellow/strings.json @@ -42,6 +42,7 @@ "main_menu": { "menu_options": { "hardware_settings": "[%key:component::homeassistant_yellow::options::step::hardware_settings::title%]", + "firmware_settings": "Switch between Zigbee or Thread firmware.", "multipan_settings": "Configure IEEE 802.15.4 radio multiprotocol support" } }, @@ -79,6 +80,46 @@ "start_flasher_addon": { "title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::start_flasher_addon::title%]", "description": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::start_flasher_addon::description%]" + }, + "pick_firmware": { + "title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::title%]", + "description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::description%]", + "menu_options": { + "pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread%]", + "pick_firmware_zigbee": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_zigbee%]" + } + }, + "install_zigbee_flasher_addon": { + "title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_flasher_addon::title%]", + "description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_flasher_addon::description%]" + }, + "run_zigbee_flasher_addon": { + "title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::run_zigbee_flasher_addon::title%]", + "description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::run_zigbee_flasher_addon::description%]" + }, + "zigbee_flasher_failed": { + "title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_flasher_failed::title%]", + "description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_flasher_failed::description%]" + }, + "confirm_zigbee": { + "title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::title%]", + "description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::description%]" + }, + "install_otbr_addon": { + "title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]", + "description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::description%]" + }, + "start_otbr_addon": { + "title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]", + "description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::description%]" + }, + "otbr_failed": { + "title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::title%]", + "description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::description%]" + }, + "confirm_otbr": { + "title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_otbr::title%]", + "description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_otbr::description%]" } }, "error": { @@ -93,11 +134,19 @@ "not_hassio": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::not_hassio%]", "read_hw_settings_error": "Failed to read hardware settings", "write_hw_settings_error": "Failed to write hardware settings", - "zha_migration_failed": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::zha_migration_failed%]" + "zha_migration_failed": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::zha_migration_failed%]", + "not_hassio_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::not_hassio_thread%]", + "otbr_addon_already_running": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::otbr_addon_already_running%]", + "zha_still_using_stick": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::zha_still_using_stick%]", + "otbr_still_using_stick": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::otbr_still_using_stick%]" }, "progress": { "install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]", - "start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]" + "start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]", + "start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]", + "install_zigbee_flasher_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_zigbee_flasher_addon%]", + "run_zigbee_flasher_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::run_zigbee_flasher_addon%]", + "uninstall_zigbee_flasher_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::uninstall_zigbee_flasher_addon%]" } } } diff --git a/tests/components/homeassistant_yellow/test_config_flow.py b/tests/components/homeassistant_yellow/test_config_flow.py index 95d7df89c9d..949e58e61b6 100644 --- a/tests/components/homeassistant_yellow/test_config_flow.py +++ b/tests/components/homeassistant_yellow/test_config_flow.py @@ -6,8 +6,17 @@ from unittest.mock import Mock, patch import pytest from homeassistant.components.hassio import DOMAIN as HASSIO_DOMAIN -from homeassistant.components.homeassistant_yellow.const import DOMAIN -from homeassistant.components.zha import DOMAIN as ZHA_DOMAIN +from homeassistant.components.hassio.addon_manager import AddonInfo, AddonState +from homeassistant.components.homeassistant_hardware.firmware_config_flow import ( + STEP_PICK_FIRMWARE_ZIGBEE, +) +from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon import ( + CONF_DISABLE_MULTI_PAN, + get_flasher_addon_manager, + get_multiprotocol_addon_manager, +) +from homeassistant.components.homeassistant_hardware.util import ApplicationType +from homeassistant.components.homeassistant_yellow.const import DOMAIN, RADIO_DEVICE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.setup import async_setup_component @@ -57,22 +66,28 @@ async def test_config_flow(hass: HomeAssistant) -> None: mock_integration(hass, MockModule("hassio")) await async_setup_component(hass, HASSIO_DOMAIN, {}) - with patch( - "homeassistant.components.homeassistant_yellow.async_setup_entry", - return_value=True, - ) as mock_setup_entry: + with ( + patch( + "homeassistant.components.homeassistant_yellow.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + patch( + "homeassistant.components.homeassistant_hardware.firmware_config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.EZSP, + ), + ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "system"} ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "Home Assistant Yellow" - assert result["data"] == {} + assert result["data"] == {"firmware": "ezsp"} assert result["options"] == {} assert len(mock_setup_entry.mock_calls) == 1 config_entry = hass.config_entries.async_entries(DOMAIN)[0] - assert config_entry.data == {} + assert config_entry.data == {"firmware": "ezsp"} assert config_entry.options == {} assert config_entry.title == "Home Assistant Yellow" @@ -84,10 +99,12 @@ async def test_config_flow_single_entry(hass: HomeAssistant) -> None: # Setup the config entry config_entry = MockConfigEntry( - data={}, + data={"firmware": ApplicationType.EZSP}, domain=DOMAIN, options={}, title="Home Assistant Yellow", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) @@ -104,165 +121,6 @@ async def test_config_flow_single_entry(hass: HomeAssistant) -> None: mock_setup_entry.assert_not_called() -async def test_option_flow_install_multi_pan_addon( - hass: HomeAssistant, - addon_store_info, - addon_info, - install_addon, - set_addon_options, - start_addon, -) -> None: - """Test installing the multi pan addon.""" - mock_integration(hass, MockModule("hassio")) - await async_setup_component(hass, HASSIO_DOMAIN, {}) - - # Setup the config entry - config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options={}, - title="Home Assistant Yellow", - ) - config_entry.add_to_hass(hass) - - result = await hass.config_entries.options.async_init(config_entry.entry_id) - assert result["type"] is FlowResultType.MENU - - with patch( - "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.is_hassio", - side_effect=Mock(return_value=True), - ): - result = await hass.config_entries.options.async_configure( - result["flow_id"], - {"next_step_id": "multipan_settings"}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "addon_not_installed" - - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={ - "enable_multi_pan": True, - }, - ) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "install_addon" - assert result["progress_action"] == "install_addon" - - await hass.async_block_till_done() - install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") - - result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "start_addon" - set_addon_options.assert_called_once_with( - hass, - "core_silabs_multiprotocol", - { - "options": { - "autoflash_firmware": True, - "device": "/dev/ttyAMA1", - "baudrate": "115200", - "flow_control": True, - } - }, - ) - - await hass.async_block_till_done() - start_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") - - result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.CREATE_ENTRY - - -async def test_option_flow_install_multi_pan_addon_zha( - hass: HomeAssistant, - addon_store_info, - addon_info, - install_addon, - set_addon_options, - start_addon, -) -> None: - """Test installing the multi pan addon when a zha config entry exists.""" - mock_integration(hass, MockModule("hassio")) - await async_setup_component(hass, HASSIO_DOMAIN, {}) - - # Setup the config entry - config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options={}, - title="Home Assistant Yellow", - ) - config_entry.add_to_hass(hass) - - zha_config_entry = MockConfigEntry( - data={"device": {"path": "/dev/ttyAMA1"}, "radio_type": "ezsp"}, - domain=ZHA_DOMAIN, - options={}, - title="Yellow", - ) - zha_config_entry.add_to_hass(hass) - - result = await hass.config_entries.options.async_init(config_entry.entry_id) - assert result["type"] is FlowResultType.MENU - - with patch( - "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.is_hassio", - side_effect=Mock(return_value=True), - ): - result = await hass.config_entries.options.async_configure( - result["flow_id"], - {"next_step_id": "multipan_settings"}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "addon_not_installed" - - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={ - "enable_multi_pan": True, - }, - ) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "install_addon" - assert result["progress_action"] == "install_addon" - - await hass.async_block_till_done() - install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") - - result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "start_addon" - set_addon_options.assert_called_once_with( - hass, - "core_silabs_multiprotocol", - { - "options": { - "autoflash_firmware": True, - "device": "/dev/ttyAMA1", - "baudrate": "115200", - "flow_control": True, - } - }, - ) - # Check the ZHA config entry data is updated - assert zha_config_entry.data == { - "device": { - "path": "socket://core-silabs-multiprotocol:9999", - "baudrate": 115200, - "flow_control": None, - }, - "radio_type": "ezsp", - } - - await hass.async_block_till_done() - start_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") - - result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.CREATE_ENTRY - - @pytest.mark.parametrize( ("reboot_menu_choice", "reboot_calls"), [("reboot_now", 1), ("reboot_later", 0)], @@ -281,10 +139,12 @@ async def test_option_flow_led_settings( # Setup the config entry config_entry = MockConfigEntry( - data={}, + data={"firmware": ApplicationType.EZSP}, domain=DOMAIN, options={}, title="Home Assistant Yellow", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) @@ -327,10 +187,12 @@ async def test_option_flow_led_settings_unchanged( # Setup the config entry config_entry = MockConfigEntry( - data={}, + data={"firmware": ApplicationType.EZSP}, domain=DOMAIN, options={}, title="Home Assistant Yellow", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) @@ -359,10 +221,12 @@ async def test_option_flow_led_settings_fail_1(hass: HomeAssistant) -> None: # Setup the config entry config_entry = MockConfigEntry( - data={}, + data={"firmware": ApplicationType.EZSP}, domain=DOMAIN, options={}, title="Home Assistant Yellow", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) @@ -391,10 +255,12 @@ async def test_option_flow_led_settings_fail_2( # Setup the config entry config_entry = MockConfigEntry( - data={}, + data={"firmware": ApplicationType.EZSP}, domain=DOMAIN, options={}, title="Home Assistant Yellow", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) @@ -418,3 +284,139 @@ async def test_option_flow_led_settings_fail_2( ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "write_hw_settings_error" + + +async def test_firmware_options_flow(hass: HomeAssistant) -> None: + """Test the firmware options flow for Yellow.""" + mock_integration(hass, MockModule("hassio")) + await async_setup_component(hass, HASSIO_DOMAIN, {}) + + config_entry = MockConfigEntry( + data={"firmware": ApplicationType.SPINEL}, + domain=DOMAIN, + options={}, + title="Home Assistant Yellow", + version=1, + minor_version=2, + ) + config_entry.add_to_hass(hass) + + # First step is confirmation + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "main_menu" + assert "firmware_settings" in result["menu_options"] + + # Pick firmware settings + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={"next_step_id": "firmware_settings"}, + ) + + assert result["step_id"] == "pick_firmware" + assert result["description_placeholders"]["firmware_type"] == "spinel" + assert result["description_placeholders"]["model"] == "Home Assistant Yellow" + + async def mock_async_step_pick_firmware_zigbee(self, data): + return await self.async_step_confirm_zigbee(user_input={}) + + with patch( + "homeassistant.components.homeassistant_hardware.firmware_config_flow.BaseFirmwareOptionsFlow.async_step_pick_firmware_zigbee", + autospec=True, + side_effect=mock_async_step_pick_firmware_zigbee, + ): + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"] is True + + assert config_entry.data == { + "firmware": "ezsp", + } + + +async def test_options_flow_multipan_uninstall(hass: HomeAssistant) -> None: + """Test options flow for when multi-PAN firmware is installed.""" + mock_integration(hass, MockModule("hassio")) + await async_setup_component(hass, HASSIO_DOMAIN, {}) + + config_entry = MockConfigEntry( + data={"firmware": ApplicationType.CPC}, + domain=DOMAIN, + options={}, + title="Home Assistant Yellow", + version=1, + minor_version=2, + ) + config_entry.add_to_hass(hass) + + # Multi-PAN addon is running + mock_multipan_manager = Mock(spec_set=await get_multiprotocol_addon_manager(hass)) + mock_multipan_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={"device": RADIO_DEVICE}, + state=AddonState.RUNNING, + update_available=False, + version="1.0.0", + ) + + mock_flasher_manager = Mock(spec_set=get_flasher_addon_manager(hass)) + mock_flasher_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_RUNNING, + update_available=False, + version="1.0.0", + ) + + with ( + patch( + "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.get_multiprotocol_addon_manager", + return_value=mock_multipan_manager, + ), + patch( + "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.get_flasher_addon_manager", + return_value=mock_flasher_manager, + ), + patch( + "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.is_hassio", + return_value=True, + ), + ): + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "main_menu" + assert "multipan_settings" in result["menu_options"] + + # Pick multi-PAN settings + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={"next_step_id": "multipan_settings"}, + ) + + # Pick the uninstall option + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={"next_step_id": "uninstall_addon"}, + ) + + # Check the box + result = await hass.config_entries.options.async_configure( + result["flow_id"], user_input={CONF_DISABLE_MULTI_PAN: True} + ) + + # Finish the flow + result = await hass.config_entries.options.async_configure(result["flow_id"]) + await hass.async_block_till_done(wait_background_tasks=True) + result = await hass.config_entries.options.async_configure(result["flow_id"]) + await hass.async_block_till_done(wait_background_tasks=True) + result = await hass.config_entries.options.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.CREATE_ENTRY + + # We've reverted the firmware back to Zigbee + assert config_entry.data["firmware"] == "ezsp" diff --git a/tests/components/homeassistant_yellow/test_init.py b/tests/components/homeassistant_yellow/test_init.py index ec3ba4e7005..5d534dad1e7 100644 --- a/tests/components/homeassistant_yellow/test_init.py +++ b/tests/components/homeassistant_yellow/test_init.py @@ -6,10 +6,14 @@ import pytest from homeassistant.components import zha from homeassistant.components.hassio import DOMAIN as HASSIO_DOMAIN -from homeassistant.components.hassio.handler import HassioAPIError +from homeassistant.components.homeassistant_hardware.util import ( + ApplicationType, + FirmwareGuess, +) from homeassistant.components.homeassistant_yellow.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, MockModule, mock_integration @@ -27,10 +31,12 @@ async def test_setup_entry( # Setup the config entry config_entry = MockConfigEntry( - data={}, + data={"firmware": ApplicationType.EZSP}, domain=DOMAIN, options={}, title="Home Assistant Yellow", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) with ( @@ -42,6 +48,14 @@ async def test_setup_entry( "homeassistant.components.onboarding.async_is_onboarded", return_value=onboarded, ), + patch( + "homeassistant.components.homeassistant_yellow.guess_firmware_type", + return_value=FirmwareGuess( # Nothing is setup + is_running=False, + firmware_type=ApplicationType.EZSP, + source="unknown", + ), + ), ): assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done(wait_background_tasks=True) @@ -74,118 +88,12 @@ async def test_setup_zha(hass: HomeAssistant, addon_store_info) -> None: # Setup the config entry config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options={}, - title="Home Assistant Yellow", - ) - config_entry.add_to_hass(hass) - with ( - patch( - "homeassistant.components.homeassistant_yellow.get_os_info", - return_value={"board": "yellow"}, - ) as mock_get_os_info, - patch( - "homeassistant.components.onboarding.async_is_onboarded", return_value=False - ), - ): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done(wait_background_tasks=True) - assert len(mock_get_os_info.mock_calls) == 1 - - # Finish setting up ZHA - zha_flows = hass.config_entries.flow.async_progress_by_handler("zha") - assert len(zha_flows) == 1 - assert zha_flows[0]["step_id"] == "choose_formation_strategy" - - await hass.config_entries.flow.async_configure( - zha_flows[0]["flow_id"], - user_input={"next_step_id": zha.config_flow.FORMATION_REUSE_SETTINGS}, - ) - await hass.async_block_till_done() - - config_entry = hass.config_entries.async_entries("zha")[0] - assert config_entry.data == { - "device": { - "baudrate": 115200, - "flow_control": "hardware", - "path": "/dev/ttyAMA1", - }, - "radio_type": "ezsp", - } - assert config_entry.options == {} - assert config_entry.title == "Yellow" - - -async def test_setup_zha_multipan( - hass: HomeAssistant, addon_info, addon_running -) -> None: - """Test zha gets the right config.""" - mock_integration(hass, MockModule("hassio")) - await async_setup_component(hass, HASSIO_DOMAIN, {}) - - addon_info.return_value["options"]["device"] = "/dev/ttyAMA1" - - # Setup the config entry - config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options={}, - title="Home Assistant Yellow", - ) - config_entry.add_to_hass(hass) - with ( - patch( - "homeassistant.components.homeassistant_yellow.get_os_info", - return_value={"board": "yellow"}, - ) as mock_get_os_info, - patch( - "homeassistant.components.onboarding.async_is_onboarded", return_value=False - ), - ): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done(wait_background_tasks=True) - assert len(mock_get_os_info.mock_calls) == 1 - - # Finish setting up ZHA - zha_flows = hass.config_entries.flow.async_progress_by_handler("zha") - assert len(zha_flows) == 1 - assert zha_flows[0]["step_id"] == "choose_formation_strategy" - - await hass.config_entries.flow.async_configure( - zha_flows[0]["flow_id"], - user_input={"next_step_id": zha.config_flow.FORMATION_REUSE_SETTINGS}, - ) - await hass.async_block_till_done() - - config_entry = hass.config_entries.async_entries("zha")[0] - assert config_entry.data == { - "device": { - "baudrate": 115200, - "flow_control": None, - "path": "socket://core-silabs-multiprotocol:9999", - }, - "radio_type": "ezsp", - } - assert config_entry.options == {} - assert config_entry.title == "Yellow Multiprotocol" - - -async def test_setup_zha_multipan_other_device( - hass: HomeAssistant, addon_info, addon_running -) -> None: - """Test zha gets the right config.""" - mock_integration(hass, MockModule("hassio")) - await async_setup_component(hass, HASSIO_DOMAIN, {}) - - addon_info.return_value["options"]["device"] = "/dev/not_yellow_radio" - - # Setup the config entry - config_entry = MockConfigEntry( - data={}, + data={"firmware": ApplicationType.EZSP}, domain=DOMAIN, options={}, title="Home Assistant Yellow", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) with ( @@ -229,10 +137,12 @@ async def test_setup_entry_no_hassio(hass: HomeAssistant) -> None: """Test setup of a config entry without hassio.""" # Setup the config entry config_entry = MockConfigEntry( - data={}, + data={"firmware": ApplicationType.EZSP}, domain=DOMAIN, options={}, title="Home Assistant Yellow", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) assert len(hass.config_entries.async_entries()) == 1 @@ -254,10 +164,12 @@ async def test_setup_entry_wrong_board(hass: HomeAssistant) -> None: # Setup the config entry config_entry = MockConfigEntry( - data={}, + data={"firmware": ApplicationType.EZSP}, domain=DOMAIN, options={}, title="Home Assistant Yellow", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) assert len(hass.config_entries.async_entries()) == 1 @@ -280,10 +192,12 @@ async def test_setup_entry_wait_hassio(hass: HomeAssistant) -> None: # Setup the config entry config_entry = MockConfigEntry( - data={}, + data={"firmware": ApplicationType.EZSP}, domain=DOMAIN, options={}, title="Home Assistant Yellow", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) with patch( @@ -303,14 +217,15 @@ async def test_setup_entry_addon_info_fails( """Test setup of a config entry when fetching addon info fails.""" mock_integration(hass, MockModule("hassio")) await async_setup_component(hass, HASSIO_DOMAIN, {}) - addon_store_info.side_effect = HassioAPIError("Boom") # Setup the config entry config_entry = MockConfigEntry( - data={}, + data={"firmware": ApplicationType.CPC}, domain=DOMAIN, options={}, title="Home Assistant Yellow", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) with ( @@ -319,41 +234,15 @@ async def test_setup_entry_addon_info_fails( return_value={"board": "yellow"}, ), patch( - "homeassistant.components.onboarding.async_is_onboarded", return_value=False + "homeassistant.components.onboarding.async_is_onboarded", + return_value=False, + ), + patch( + "homeassistant.components.homeassistant_yellow.check_multi_pan_addon", + side_effect=HomeAssistantError("Boom"), ), ): assert not await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.SETUP_RETRY - - -async def test_setup_entry_addon_not_running( - hass: HomeAssistant, addon_installed, start_addon -) -> None: - """Test the addon is started if it is not running.""" - mock_integration(hass, MockModule("hassio")) - await async_setup_component(hass, HASSIO_DOMAIN, {}) - - # Setup the config entry - config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options={}, - title="Home Assistant Yellow", - ) - config_entry.add_to_hass(hass) - with ( - patch( - "homeassistant.components.homeassistant_yellow.get_os_info", - return_value={"board": "yellow"}, - ), - patch( - "homeassistant.components.onboarding.async_is_onboarded", return_value=False - ), - ): - assert not await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.SETUP_RETRY - start_addon.assert_called_once() From ca17c70109ac26959b9bcb48ea222ba54e00fbf2 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 27 Aug 2024 11:42:42 -1000 Subject: [PATCH 1122/1635] Remove socketio constraints (#124730) We have been stuck on an old version of socketio for a few years now due to https://github.com/jkeljo/sisyphus-control/issues/6 and no solution has been reached. The constraint is blocking any integration from using socketio 4.x servers as the old version only supports socketio 3.x servers which have reached EOL (https://socket.io/docs/v3/) --- homeassistant/package_constraints.txt | 6 ------ script/gen_requirements_all.py | 6 ------ 2 files changed, 12 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index ed1a1f88059..b652484e675 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -109,12 +109,6 @@ hyperframe>=5.2.0 # Ensure we run compatible with musllinux build env numpy==1.26.0 -# Prevent dependency conflicts between sisyphus-control and aioambient -# until upper bounds for sisyphus-control have been updated -# https://github.com/jkeljo/sisyphus-control/issues/6 -python-engineio>=3.13.1,<4.0 -python-socketio>=4.6.0,<5.0 - # Constrain multidict to avoid typing issues # https://github.com/home-assistant/core/pull/67046 multidict>=6.0.2 diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index 6ce97468699..e8fdce6fa15 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -130,12 +130,6 @@ hyperframe>=5.2.0 # Ensure we run compatible with musllinux build env numpy==1.26.0 -# Prevent dependency conflicts between sisyphus-control and aioambient -# until upper bounds for sisyphus-control have been updated -# https://github.com/jkeljo/sisyphus-control/issues/6 -python-engineio>=3.13.1,<4.0 -python-socketio>=4.6.0,<5.0 - # Constrain multidict to avoid typing issues # https://github.com/home-assistant/core/pull/67046 multidict>=6.0.2 From fa084143ef2c9a3d7ae3f33a2cd96bac6787fb5a Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 27 Aug 2024 11:58:11 -1000 Subject: [PATCH 1123/1635] Bump dbus-fast to 2.24.0 (#124737) changelog: https://github.com/Bluetooth-Devices/dbus-fast/compare/v2.23.0...v2.24.0 --- homeassistant/components/bluetooth/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/bluetooth/manifest.json b/homeassistant/components/bluetooth/manifest.json index 93b07cb120c..027e2450bb4 100644 --- a/homeassistant/components/bluetooth/manifest.json +++ b/homeassistant/components/bluetooth/manifest.json @@ -19,7 +19,7 @@ "bluetooth-adapters==0.19.4", "bluetooth-auto-recovery==1.4.2", "bluetooth-data-tools==1.20.0", - "dbus-fast==2.23.0", + "dbus-fast==2.24.0", "habluetooth==3.3.2" ] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index b652484e675..3695317f064 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -23,7 +23,7 @@ cached-ipaddress==0.5.0 certifi>=2021.5.30 ciso8601==2.3.1 cryptography==43.0.0 -dbus-fast==2.23.0 +dbus-fast==2.24.0 fnv-hash-fast==1.0.2 ha-av==10.1.1 ha-ffmpeg==3.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 784f2e65c00..24be22033ae 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -703,7 +703,7 @@ datadog==0.15.0 datapoint==0.9.9 # homeassistant.components.bluetooth -dbus-fast==2.23.0 +dbus-fast==2.24.0 # homeassistant.components.debugpy debugpy==1.8.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 293aa62a4d8..49354157ea5 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -599,7 +599,7 @@ datadog==0.15.0 datapoint==0.9.9 # homeassistant.components.bluetooth -dbus-fast==2.23.0 +dbus-fast==2.24.0 # homeassistant.components.debugpy debugpy==1.8.1 From e84d9e21f7a5e4d68a70029fe34bae2079b240f7 Mon Sep 17 00:00:00 2001 From: Jesse Hills <3060199+jesserockz@users.noreply.github.com> Date: Wed, 28 Aug 2024 10:15:41 +1200 Subject: [PATCH 1124/1635] Handle single state requests from ESPHome (#124660) Co-authored-by: J. Nick Koston --- homeassistant/components/esphome/manager.py | 14 ++++++++++- tests/components/esphome/conftest.py | 14 ++++++++++- tests/components/esphome/test_manager.py | 28 +++++++++++++++++++++ 3 files changed, 54 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/esphome/manager.py b/homeassistant/components/esphome/manager.py index 7629d1fa9cd..93e8d7b5bc2 100644 --- a/homeassistant/components/esphome/manager.py +++ b/homeassistant/components/esphome/manager.py @@ -329,6 +329,15 @@ class ESPHomeManager: entity_id, attribute, hass.states.get(entity_id) ) + @callback + def async_on_state_request( + self, entity_id: str, attribute: str | None = None + ) -> None: + """Forward state for requested entity.""" + self._send_home_assistant_state( + entity_id, attribute, self.hass.states.get(entity_id) + ) + def _handle_pipeline_finished(self) -> None: self.entry_data.async_set_assist_pipeline_state(False) @@ -526,7 +535,10 @@ class ESPHomeManager: cli.subscribe_states(entry_data.async_update_state) cli.subscribe_service_calls(self.async_on_service_call) - cli.subscribe_home_assistant_states(self.async_on_state_subscription) + cli.subscribe_home_assistant_states( + self.async_on_state_subscription, + self.async_on_state_request, + ) entry_data.async_save_to_store() _async_check_firmware_version(hass, device_info, api_version) diff --git a/tests/components/esphome/conftest.py b/tests/components/esphome/conftest.py index ea4099560cd..b3966875a31 100644 --- a/tests/components/esphome/conftest.py +++ b/tests/components/esphome/conftest.py @@ -205,6 +205,7 @@ class MockESPHomeDevice: self.home_assistant_state_subscription_callback: Callable[ [str, str | None], None ] + self.home_assistant_state_request_callback: Callable[[str, str | None], None] self.voice_assistant_handle_start_callback: Callable[ [str, int, VoiceAssistantAudioSettings, str | None], Coroutine[Any, Any, int | None], @@ -268,9 +269,11 @@ class MockESPHomeDevice: def set_home_assistant_state_subscription_callback( self, on_state_sub: Callable[[str, str | None], None], + on_state_request: Callable[[str, str | None], None], ) -> None: """Set the state call callback.""" self.home_assistant_state_subscription_callback = on_state_sub + self.home_assistant_state_request_callback = on_state_request def mock_home_assistant_state_subscription( self, entity_id: str, attribute: str | None @@ -278,6 +281,12 @@ class MockESPHomeDevice: """Mock a state subscription.""" self.home_assistant_state_subscription_callback(entity_id, attribute) + def mock_home_assistant_state_request( + self, entity_id: str, attribute: str | None + ) -> None: + """Mock a state request.""" + self.home_assistant_state_request_callback(entity_id, attribute) + def set_subscribe_voice_assistant_callbacks( self, handle_start: Callable[ @@ -378,9 +387,12 @@ async def _mock_generic_device_entry( def _subscribe_home_assistant_states( on_state_sub: Callable[[str, str | None], None], + on_state_request: Callable[[str, str | None], None], ) -> None: """Subscribe to home assistant states.""" - mock_device.set_home_assistant_state_subscription_callback(on_state_sub) + mock_device.set_home_assistant_state_subscription_callback( + on_state_sub, on_state_request + ) def _subscribe_voice_assistant( *, diff --git a/tests/components/esphome/test_manager.py b/tests/components/esphome/test_manager.py index 9d2a906466e..a14c83bf265 100644 --- a/tests/components/esphome/test_manager.py +++ b/tests/components/esphome/test_manager.py @@ -721,6 +721,34 @@ async def test_state_subscription( assert mock_client.send_home_assistant_state.mock_calls == [] +async def test_state_request( + mock_client: APIClient, + hass: HomeAssistant, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test ESPHome requests state change.""" + device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + ) + await hass.async_block_till_done() + hass.states.async_set("binary_sensor.test", "on", {"bool": True, "float": 3.0}) + device.mock_home_assistant_state_request("binary_sensor.test", None) + await hass.async_block_till_done() + assert mock_client.send_home_assistant_state.mock_calls == [ + call("binary_sensor.test", None, "on") + ] + mock_client.send_home_assistant_state.reset_mock() + hass.states.async_set("binary_sensor.test", "off", {"bool": False, "float": 5.0}) + await hass.async_block_till_done() + assert mock_client.send_home_assistant_state.mock_calls == [] + + async def test_debug_logging( mock_client: APIClient, hass: HomeAssistant, From 136f0e423e3324f5eade78aab984109f27cb7945 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 27 Aug 2024 12:33:17 -1000 Subject: [PATCH 1125/1635] Bump aioambient to 2024.08.0 (#124729) --- homeassistant/components/ambient_network/manifest.json | 2 +- homeassistant/components/ambient_station/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/ambient_network/manifest.json b/homeassistant/components/ambient_network/manifest.json index 553adb240b0..4800ffcb29d 100644 --- a/homeassistant/components/ambient_network/manifest.json +++ b/homeassistant/components/ambient_network/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["aioambient"], - "requirements": ["aioambient==2024.01.0"] + "requirements": ["aioambient==2024.08.0"] } diff --git a/homeassistant/components/ambient_station/manifest.json b/homeassistant/components/ambient_station/manifest.json index 046ab9f73e9..a14de5f37c5 100644 --- a/homeassistant/components/ambient_station/manifest.json +++ b/homeassistant/components/ambient_station/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "cloud_push", "loggers": ["aioambient"], - "requirements": ["aioambient==2024.01.0"] + "requirements": ["aioambient==2024.08.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 24be22033ae..ddf4463d693 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -183,7 +183,7 @@ aioairzone==0.8.2 # homeassistant.components.ambient_network # homeassistant.components.ambient_station -aioambient==2024.01.0 +aioambient==2024.08.0 # homeassistant.components.apcupsd aioapcaccess==0.4.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 49354157ea5..e7c74ccf400 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -171,7 +171,7 @@ aioairzone==0.8.2 # homeassistant.components.ambient_network # homeassistant.components.ambient_station -aioambient==2024.01.0 +aioambient==2024.08.0 # homeassistant.components.apcupsd aioapcaccess==0.4.2 From 5bd17c91985592c5e5a7284bb08ac0e086e5bc8f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 27 Aug 2024 12:34:30 -1000 Subject: [PATCH 1126/1635] Bump aioshelly to 11.3.0 (#124741) --- homeassistant/components/shelly/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/shelly/manifest.json b/homeassistant/components/shelly/manifest.json index da3bbc4bb6e..a384255705c 100644 --- a/homeassistant/components/shelly/manifest.json +++ b/homeassistant/components/shelly/manifest.json @@ -9,7 +9,7 @@ "iot_class": "local_push", "loggers": ["aioshelly"], "quality_scale": "platinum", - "requirements": ["aioshelly==11.2.4"], + "requirements": ["aioshelly==11.3.0"], "zeroconf": [ { "type": "_http._tcp.local.", diff --git a/requirements_all.txt b/requirements_all.txt index ddf4463d693..031baffc60f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -359,7 +359,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==11.2.4 +aioshelly==11.3.0 # homeassistant.components.skybell aioskybell==22.7.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index e7c74ccf400..5c860c386db 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -341,7 +341,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==11.2.4 +aioshelly==11.3.0 # homeassistant.components.skybell aioskybell==22.7.0 From 1b304e60d926ceffbe79e25c5065af233fc4c059 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 27 Aug 2024 12:53:48 -1000 Subject: [PATCH 1127/1635] Disable sisyphus integration (#124742) --- homeassistant/components/sisyphus/__init__.py | 3 ++- homeassistant/components/sisyphus/manifest.json | 1 + homeassistant/components/sisyphus/media_player.py | 3 ++- homeassistant/components/sisyphus/ruff.toml | 5 +++++ requirements_all.txt | 3 --- 5 files changed, 10 insertions(+), 5 deletions(-) create mode 100644 homeassistant/components/sisyphus/ruff.toml diff --git a/homeassistant/components/sisyphus/__init__.py b/homeassistant/components/sisyphus/__init__.py index da8d670d412..1fc440f260d 100644 --- a/homeassistant/components/sisyphus/__init__.py +++ b/homeassistant/components/sisyphus/__init__.py @@ -1,9 +1,10 @@ """Support for controlling Sisyphus Kinetic Art Tables.""" +# mypy: ignore-errors import asyncio import logging -from sisyphus_control import Table +# from sisyphus_control import Table import voluptuous as vol from homeassistant.const import CONF_HOST, CONF_NAME, EVENT_HOMEASSISTANT_STOP, Platform diff --git a/homeassistant/components/sisyphus/manifest.json b/homeassistant/components/sisyphus/manifest.json index dbb40344d66..f1d90cebbd3 100644 --- a/homeassistant/components/sisyphus/manifest.json +++ b/homeassistant/components/sisyphus/manifest.json @@ -2,6 +2,7 @@ "domain": "sisyphus", "name": "Sisyphus", "codeowners": ["@jkeljo"], + "disabled": "This integration is disabled because it uses an old version of socketio.", "documentation": "https://www.home-assistant.io/integrations/sisyphus", "iot_class": "local_push", "loggers": ["sisyphus_control"], diff --git a/homeassistant/components/sisyphus/media_player.py b/homeassistant/components/sisyphus/media_player.py index 3884a83928a..0248bbeac32 100644 --- a/homeassistant/components/sisyphus/media_player.py +++ b/homeassistant/components/sisyphus/media_player.py @@ -1,10 +1,11 @@ """Support for track controls on the Sisyphus Kinetic Art Table.""" +# mypy: ignore-errors from __future__ import annotations import aiohttp -from sisyphus_control import Track +# from sisyphus_control import Track from homeassistant.components.media_player import ( MediaPlayerEntity, MediaPlayerEntityFeature, diff --git a/homeassistant/components/sisyphus/ruff.toml b/homeassistant/components/sisyphus/ruff.toml new file mode 100644 index 00000000000..38f6f586aef --- /dev/null +++ b/homeassistant/components/sisyphus/ruff.toml @@ -0,0 +1,5 @@ +extend = "../../../pyproject.toml" + +lint.extend-ignore = [ + "F821" +] diff --git a/requirements_all.txt b/requirements_all.txt index 031baffc60f..53f7ed08480 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2618,9 +2618,6 @@ simplepush==2.2.3 # homeassistant.components.simplisafe simplisafe-python==2024.01.0 -# homeassistant.components.sisyphus -sisyphus-control==3.1.3 - # homeassistant.components.slack slackclient==2.5.0 From 3533ac163c4fe8e62254d7f66a3c8519ea332e78 Mon Sep 17 00:00:00 2001 From: puddly <32534428+puddly@users.noreply.github.com> Date: Tue, 27 Aug 2024 19:57:08 -0400 Subject: [PATCH 1128/1635] Remove unnecessary assertion for the Yellow firmware type (#124747) --- homeassistant/components/homeassistant_yellow/config_flow.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/homeassistant/components/homeassistant_yellow/config_flow.py b/homeassistant/components/homeassistant_yellow/config_flow.py index ce6475ceb3c..1f4d150e49b 100644 --- a/homeassistant/components/homeassistant_yellow/config_flow.py +++ b/homeassistant/components/homeassistant_yellow/config_flow.py @@ -96,8 +96,6 @@ class HomeAssistantYellowConfigFlow(BaseFirmwareConfigFlow, domain=DOMAIN): def _async_flow_finished(self) -> ConfigFlowResult: """Create the config entry.""" - assert self._probed_firmware_type is not None - return self.async_create_entry( title=BOARD_NAME, data={ From 8fee1975b40e1b06e3d9b6152a23e9426e875c15 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Tue, 27 Aug 2024 19:26:46 -0500 Subject: [PATCH 1129/1635] Bump pyspeex-noise to 1.0.2 (#124721) Co-authored-by: TheJulianJES --- homeassistant/components/assist_pipeline/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/assist_pipeline/manifest.json b/homeassistant/components/assist_pipeline/manifest.json index c22b7391d33..1b93ecd9eef 100644 --- a/homeassistant/components/assist_pipeline/manifest.json +++ b/homeassistant/components/assist_pipeline/manifest.json @@ -7,5 +7,5 @@ "integration_type": "system", "iot_class": "local_push", "quality_scale": "internal", - "requirements": ["pymicro-vad==1.0.1", "pyspeex-noise==1.0.0"] + "requirements": ["pymicro-vad==1.0.1", "pyspeex-noise==1.0.2"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 3695317f064..79c846d6c11 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -49,7 +49,7 @@ pymicro-vad==1.0.1 PyNaCl==1.5.0 pyOpenSSL==24.2.1 pyserial==3.5 -pyspeex-noise==1.0.0 +pyspeex-noise==1.0.2 python-slugify==8.0.4 PyTurboJPEG==1.7.1 pyudev==0.24.1 diff --git a/requirements_all.txt b/requirements_all.txt index 53f7ed08480..78028cf31d4 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2229,7 +2229,7 @@ pysoma==0.0.12 pyspcwebgw==0.7.0 # homeassistant.components.assist_pipeline -pyspeex-noise==1.0.0 +pyspeex-noise==1.0.2 # homeassistant.components.squeezebox pysqueezebox==0.7.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5c860c386db..18c9dea0cf7 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1783,7 +1783,7 @@ pysoma==0.0.12 pyspcwebgw==0.7.0 # homeassistant.components.assist_pipeline -pyspeex-noise==1.0.0 +pyspeex-noise==1.0.2 # homeassistant.components.squeezebox pysqueezebox==0.7.1 From e447d8302479dc91ccd12e08fb801d199d76fa48 Mon Sep 17 00:00:00 2001 From: functionpointer Date: Wed, 28 Aug 2024 03:04:08 +0200 Subject: [PATCH 1130/1635] Use ssl util in Tibber (#123369) Tibber: Use homeassistant.util.ssl --- homeassistant/components/tibber/__init__.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/tibber/__init__.py b/homeassistant/components/tibber/__init__.py index 3d63098b2e9..ce05b8070f6 100644 --- a/homeassistant/components/tibber/__init__.py +++ b/homeassistant/components/tibber/__init__.py @@ -18,7 +18,7 @@ from homeassistant.helpers import discovery from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.typing import ConfigType -from homeassistant.util import dt as dt_util +from homeassistant.util import dt as dt_util, ssl as ssl_util from .const import DATA_HASS_CONFIG, DOMAIN from .services import async_setup_services @@ -47,6 +47,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: access_token=entry.data[CONF_ACCESS_TOKEN], websession=async_get_clientsession(hass), time_zone=dt_util.get_default_time_zone(), + ssl=ssl_util.get_default_context(), ) hass.data[DOMAIN] = tibber_connection From 16dd6b1712f55a19dece113e0a8d32da2d3d0309 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 27 Aug 2024 15:12:52 -1000 Subject: [PATCH 1131/1635] Disable control4 integration (#124746) --- homeassistant/components/control4/__init__.py | 8 +++++--- homeassistant/components/control4/config_flow.py | 9 ++++++--- homeassistant/components/control4/director_utils.py | 8 ++++---- homeassistant/components/control4/light.py | 7 ++++--- homeassistant/components/control4/manifest.json | 1 + homeassistant/components/control4/media_player.py | 7 ++++--- homeassistant/components/control4/ruff.toml | 5 +++++ requirements_all.txt | 3 --- requirements_test_all.txt | 3 --- tests/components/control4/ruff.toml | 5 +++++ tests/components/control4/test_config_flow.py | 10 ++++++---- 11 files changed, 40 insertions(+), 26 deletions(-) create mode 100644 homeassistant/components/control4/ruff.toml create mode 100644 tests/components/control4/ruff.toml diff --git a/homeassistant/components/control4/__init__.py b/homeassistant/components/control4/__init__.py index a3d0cebd1fc..d4432a83c84 100644 --- a/homeassistant/components/control4/__init__.py +++ b/homeassistant/components/control4/__init__.py @@ -1,5 +1,7 @@ """The Control4 integration.""" +# mypy: ignore-errors + from __future__ import annotations import json @@ -7,10 +9,10 @@ import logging from typing import Any from aiohttp import client_exceptions -from pyControl4.account import C4Account -from pyControl4.director import C4Director -from pyControl4.error_handling import BadCredentials +# from pyControl4.account import C4Account +# from pyControl4.director import C4Director +# from pyControl4.error_handling import BadCredentials from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_HOST, diff --git a/homeassistant/components/control4/config_flow.py b/homeassistant/components/control4/config_flow.py index aa7839b4383..40652767019 100644 --- a/homeassistant/components/control4/config_flow.py +++ b/homeassistant/components/control4/config_flow.py @@ -1,14 +1,17 @@ """Config flow for Control4 integration.""" +# mypy: ignore-errors + from __future__ import annotations import logging from typing import Any from aiohttp.client_exceptions import ClientError -from pyControl4.account import C4Account -from pyControl4.director import C4Director -from pyControl4.error_handling import NotFound, Unauthorized + +# from pyControl4.account import C4Account +# from pyControl4.director import C4Director +# from pyControl4.error_handling import NotFound, Unauthorized import voluptuous as vol from homeassistant.config_entries import ( diff --git a/homeassistant/components/control4/director_utils.py b/homeassistant/components/control4/director_utils.py index 10e9486ee89..92ca9f9d615 100644 --- a/homeassistant/components/control4/director_utils.py +++ b/homeassistant/components/control4/director_utils.py @@ -1,13 +1,13 @@ """Provides data updates from the Control4 controller for platforms.""" +# mypy: ignore-errors from collections import defaultdict import logging from typing import Any -from pyControl4.account import C4Account -from pyControl4.director import C4Director -from pyControl4.error_handling import BadToken - +# from pyControl4.account import C4Account +# from pyControl4.director import C4Director +# from pyControl4.error_handling import BadToken from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME from homeassistant.core import HomeAssistant diff --git a/homeassistant/components/control4/light.py b/homeassistant/components/control4/light.py index d7cfd44dc43..4d20f966cc2 100644 --- a/homeassistant/components/control4/light.py +++ b/homeassistant/components/control4/light.py @@ -1,5 +1,7 @@ """Platform for Control4 Lights.""" +# mypy: ignore-errors + from __future__ import annotations import asyncio @@ -7,9 +9,8 @@ from datetime import timedelta import logging from typing import Any -from pyControl4.error_handling import C4Exception -from pyControl4.light import C4Light - +# from pyControl4.error_handling import C4Exception +# from pyControl4.light import C4Light from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_TRANSITION, diff --git a/homeassistant/components/control4/manifest.json b/homeassistant/components/control4/manifest.json index 765f0dce78c..7c846617bf0 100644 --- a/homeassistant/components/control4/manifest.json +++ b/homeassistant/components/control4/manifest.json @@ -3,6 +3,7 @@ "name": "Control4", "codeowners": ["@lawtancool"], "config_flow": true, + "disabled": "This integration is disabled because it uses an old version of socketio.", "documentation": "https://www.home-assistant.io/integrations/control4", "iot_class": "local_polling", "loggers": ["pyControl4"], diff --git a/homeassistant/components/control4/media_player.py b/homeassistant/components/control4/media_player.py index 72aa44faaed..21c26fa1152 100644 --- a/homeassistant/components/control4/media_player.py +++ b/homeassistant/components/control4/media_player.py @@ -1,5 +1,7 @@ """Platform for Control4 Rooms Media Players.""" +# mypy: ignore-errors + from __future__ import annotations from dataclasses import dataclass @@ -8,9 +10,8 @@ import enum import logging from typing import Any -from pyControl4.error_handling import C4Exception -from pyControl4.room import C4Room - +# from pyControl4.error_handling import C4Exception +# from pyControl4.room import C4Room from homeassistant.components.media_player import ( MediaPlayerDeviceClass, MediaPlayerEntity, diff --git a/homeassistant/components/control4/ruff.toml b/homeassistant/components/control4/ruff.toml new file mode 100644 index 00000000000..38f6f586aef --- /dev/null +++ b/homeassistant/components/control4/ruff.toml @@ -0,0 +1,5 @@ +extend = "../../../pyproject.toml" + +lint.extend-ignore = [ + "F821" +] diff --git a/requirements_all.txt b/requirements_all.txt index 78028cf31d4..f29766ad260 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1685,9 +1685,6 @@ pyAtome==0.1.1 # homeassistant.components.hdmi_cec pyCEC==0.5.2 -# homeassistant.components.control4 -pyControl4==1.1.0 - # homeassistant.components.duotecno pyDuotecno==2024.5.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 18c9dea0cf7..68460b9ec98 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1368,9 +1368,6 @@ py-synologydsm-api==2.5.2 # homeassistant.components.hdmi_cec pyCEC==0.5.2 -# homeassistant.components.control4 -pyControl4==1.1.0 - # homeassistant.components.duotecno pyDuotecno==2024.5.1 diff --git a/tests/components/control4/ruff.toml b/tests/components/control4/ruff.toml new file mode 100644 index 00000000000..38f6f586aef --- /dev/null +++ b/tests/components/control4/ruff.toml @@ -0,0 +1,5 @@ +extend = "../../../pyproject.toml" + +lint.extend-ignore = [ + "F821" +] diff --git a/tests/components/control4/test_config_flow.py b/tests/components/control4/test_config_flow.py index 9a1b392f61c..01456cc0598 100644 --- a/tests/components/control4/test_config_flow.py +++ b/tests/components/control4/test_config_flow.py @@ -1,11 +1,13 @@ """Test the Control4 config flow.""" +# mypy: ignore-errors from unittest.mock import AsyncMock, patch -from pyControl4.account import C4Account -from pyControl4.director import C4Director -from pyControl4.error_handling import Unauthorized +import pytest +# from pyControl4.account import C4Account +# from pyControl4.director import C4Director +# from pyControl4.error_handling import Unauthorized from homeassistant import config_entries from homeassistant.components.control4.const import DEFAULT_SCAN_INTERVAL, DOMAIN from homeassistant.const import ( @@ -16,7 +18,6 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType - from tests.common import MockConfigEntry @@ -41,6 +42,7 @@ def _get_mock_c4_director(): return c4_director_mock +@pytest.skip(allow_module_level=True) async def test_form(hass: HomeAssistant) -> None: """Test we get the form.""" From 9d3895d69a919ed45cf2b8b80da43a1bf890bcbe Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 27 Aug 2024 15:36:00 -1000 Subject: [PATCH 1132/1635] Bump yalexs to 8.5.4 (#124672) --- homeassistant/components/august/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/august/mocks.py | 5 +---- 4 files changed, 4 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/august/manifest.json b/homeassistant/components/august/manifest.json index 42f5dfd8409..a20ec5a2205 100644 --- a/homeassistant/components/august/manifest.json +++ b/homeassistant/components/august/manifest.json @@ -28,5 +28,5 @@ "documentation": "https://www.home-assistant.io/integrations/august", "iot_class": "cloud_push", "loggers": ["pubnub", "yalexs"], - "requirements": ["yalexs==8.4.2", "yalexs-ble==2.4.3"] + "requirements": ["yalexs==8.5.4", "yalexs-ble==2.4.3"] } diff --git a/requirements_all.txt b/requirements_all.txt index f29766ad260..8bac05a047a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2968,7 +2968,7 @@ yalesmartalarmclient==0.4.0 yalexs-ble==2.4.3 # homeassistant.components.august -yalexs==8.4.2 +yalexs==8.5.4 # homeassistant.components.yeelight yeelight==0.7.14 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 68460b9ec98..71f4a1ea012 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2351,7 +2351,7 @@ yalesmartalarmclient==0.4.0 yalexs-ble==2.4.3 # homeassistant.components.august -yalexs==8.4.2 +yalexs==8.5.4 # homeassistant.components.yeelight yeelight==0.7.14 diff --git a/tests/components/august/mocks.py b/tests/components/august/mocks.py index c2ab8ce743c..43cc4957445 100644 --- a/tests/components/august/mocks.py +++ b/tests/components/august/mocks.py @@ -82,10 +82,7 @@ async def _mock_setup_august( ) entry.add_to_hass(hass) with ( - patch( - "yalexs.manager.data.async_create_pubnub", - return_value=AsyncMock(), - ), + patch.object(pubnub_mock, "run"), patch("yalexs.manager.data.AugustPubNub", return_value=pubnub_mock), ): assert await hass.config_entries.async_setup(entry.entry_id) From e720a14dc484bd523e8a9121faaf1ecf29b9f1f9 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Wed, 28 Aug 2024 07:49:05 +0200 Subject: [PATCH 1133/1635] Cleanup removed `schema` option from mqtt vacuum platform (#124722) --- homeassistant/components/mqtt/vacuum.py | 30 +------- tests/components/mqtt/test_legacy_vacuum.py | 83 --------------------- tests/components/mqtt/test_vacuum.py | 27 ------- 3 files changed, 3 insertions(+), 137 deletions(-) delete mode 100644 tests/components/mqtt/test_legacy_vacuum.py diff --git a/homeassistant/components/mqtt/vacuum.py b/homeassistant/components/mqtt/vacuum.py index c9898465184..87d6c9dd744 100644 --- a/homeassistant/components/mqtt/vacuum.py +++ b/homeassistant/components/mqtt/vacuum.py @@ -1,10 +1,5 @@ """Support for MQTT vacuums.""" -# The legacy schema for MQTT vacuum was deprecated with HA Core 2023.8.0 -# and was removed with HA Core 2024.2.0 -# The use of the schema attribute with MQTT vacuum was deprecated with HA Core 2024.2 -# the attribute will be remove with HA Core 2024.8 - from __future__ import annotations import logging @@ -38,15 +33,12 @@ from homeassistant.util.json import json_loads_object from . import subscription from .config import MQTT_BASE_SCHEMA -from .const import CONF_COMMAND_TOPIC, CONF_RETAIN, CONF_SCHEMA, CONF_STATE_TOPIC +from .const import CONF_COMMAND_TOPIC, CONF_RETAIN, CONF_STATE_TOPIC from .mixins import MqttEntity, async_setup_entity_entry_helper from .models import ReceiveMessage from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import valid_publish_topic -LEGACY = "legacy" -STATE = "state" - BATTERY = "battery_level" FAN_SPEED = "fan_speed" STATE = "state" @@ -149,7 +141,7 @@ MQTT_VACUUM_ATTRIBUTES_BLOCKED = frozenset( MQTT_VACUUM_DOCS_URL = "https://www.home-assistant.io/integrations/vacuum.mqtt/" -VACUUM_BASE_SCHEMA = MQTT_BASE_SCHEMA.extend( +PLATFORM_SCHEMA_MODERN = MQTT_BASE_SCHEMA.extend( { vol.Optional(CONF_FAN_SPEED_LIST, default=[]): vol.All( cv.ensure_list, [cv.string] @@ -173,26 +165,10 @@ VACUUM_BASE_SCHEMA = MQTT_BASE_SCHEMA.extend( ), vol.Optional(CONF_COMMAND_TOPIC): valid_publish_topic, vol.Optional(CONF_RETAIN, default=DEFAULT_RETAIN): cv.boolean, - vol.Optional(CONF_SCHEMA): vol.All(vol.Lower, vol.Any(LEGACY, STATE)), } ).extend(MQTT_ENTITY_COMMON_SCHEMA.schema) -DISCOVERY_SCHEMA = vol.All( - VACUUM_BASE_SCHEMA.extend({}, extra=vol.ALLOW_EXTRA), - # Do not fail a config is the schema option is still present, - # De option was deprecated with HA Core 2024.2 and removed with HA Core 2024.8. - # As we allow extra options, and we will remove this check silently - # with HA Core 2025.8.0, we will only warn, - # if a adiscovery config still uses this option. - cv.removed(CONF_SCHEMA, raise_if_present=False), -) - -PLATFORM_SCHEMA_MODERN = vol.All( - VACUUM_BASE_SCHEMA, - # The schema options was removed with HA Core 2024.8, - # the cleanup is planned for HA Core 2025.8. - cv.removed(CONF_SCHEMA, raise_if_present=True), -) +DISCOVERY_SCHEMA = PLATFORM_SCHEMA_MODERN.extend({}, extra=vol.ALLOW_EXTRA) async def async_setup_entry( diff --git a/tests/components/mqtt/test_legacy_vacuum.py b/tests/components/mqtt/test_legacy_vacuum.py deleted file mode 100644 index 9b45b65d2cc..00000000000 --- a/tests/components/mqtt/test_legacy_vacuum.py +++ /dev/null @@ -1,83 +0,0 @@ -"""The tests for the Legacy Mqtt vacuum platform.""" - -# The legacy schema for MQTT vacuum was deprecated with HA Core 2023.8.0 -# and was removed with HA Core 2024.2.0 -# cleanup is planned with HA Core 2025.2 - -import json - -import pytest - -from homeassistant.components import mqtt, vacuum -from homeassistant.core import HomeAssistant -from homeassistant.helpers.typing import DiscoveryInfoType - -from tests.common import async_fire_mqtt_message -from tests.typing import MqttMockHAClientGenerator - -DEFAULT_CONFIG = {mqtt.DOMAIN: {vacuum.DOMAIN: {"name": "test"}}} - - -@pytest.mark.parametrize( - ("hass_config", "removed"), - [ - ({mqtt.DOMAIN: {vacuum.DOMAIN: {"name": "test", "schema": "legacy"}}}, True), - ({mqtt.DOMAIN: {vacuum.DOMAIN: {"name": "test"}}}, False), - ({mqtt.DOMAIN: {vacuum.DOMAIN: {"name": "test", "schema": "state"}}}, True), - ], -) -async def test_removed_support_yaml( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, - removed: bool, -) -> None: - """Test that the removed support validation for the legacy schema works.""" - assert await mqtt_mock_entry() - entity = hass.states.get("vacuum.test") - - if removed: - assert entity is None - assert ( - "The 'schema' option has been removed, " - "please remove it from your configuration" in caplog.text - ) - else: - assert entity is not None - - -@pytest.mark.parametrize( - ("config", "removed"), - [ - ({"name": "test", "schema": "legacy"}, True), - ({"name": "test"}, False), - ({"name": "test", "schema": "state"}, True), - ], -) -async def test_removed_support_discovery( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, - config: DiscoveryInfoType, - removed: bool, -) -> None: - """Test that the removed support validation for the legacy schema works.""" - assert await mqtt_mock_entry() - - config_payload = json.dumps(config) - async_fire_mqtt_message(hass, "homeassistant/vacuum/test/config", config_payload) - await hass.async_block_till_done() - - entity = hass.states.get("vacuum.test") - assert entity is not None - - if removed: - assert ( - "The 'schema' option has been removed, " - "please remove it from your configuration" in caplog.text - ) - else: - assert ( - "The 'schema' option has been removed, " - "please remove it from your configuration" not in caplog.text - ) diff --git a/tests/components/mqtt/test_vacuum.py b/tests/components/mqtt/test_vacuum.py index 7fc4ff981fd..fbffe062261 100644 --- a/tests/components/mqtt/test_vacuum.py +++ b/tests/components/mqtt/test_vacuum.py @@ -2,7 +2,6 @@ from copy import deepcopy import json -import logging from typing import Any from unittest.mock import patch @@ -102,32 +101,6 @@ CONFIG_ALL_SERVICES = help_custom_config( ) -async def test_warning_schema_option( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test the warning on use of deprecated schema option.""" - await mqtt_mock_entry() - # Send discovery message with deprecated schema option - async_fire_mqtt_message( - hass, - f"homeassistant/{vacuum.DOMAIN}/bla/config", - '{"name": "test", "schema": "state", "o": {"name": "Bla2MQTT", "sw": "0.99", "url":"https://example.com/support"}}', - ) - await hass.async_block_till_done() - await hass.async_block_till_done(wait_background_tasks=True) - - state = hass.states.get("vacuum.test") - # We do not fail if the schema option is still in the payload, but we log an error - assert state is not None - with caplog.at_level(logging.WARNING): - assert ( - "The 'schema' option has been removed, " - "please remove it from your configuration" in caplog.text - ) - - @pytest.mark.parametrize("hass_config", [DEFAULT_CONFIG]) async def test_default_supported_features( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator From 42388450e1cc293e18af1e2305ba734f6f6da011 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 27 Aug 2024 19:54:28 -1000 Subject: [PATCH 1134/1635] Restore control4 integration (#124750) * Revert "Disable control4 integration (#124746)" This reverts commit 16dd6b1712f55a19dece113e0a8d32da2d3d0309. * Restore control4 integration reverts #124746 and updates the lib instead changelog: https://github.com/lawtancool/pyControl4/compare/v1.1.0...v1.1.3 Note that there is no release yet, see https://github.com/lawtancool/pyControl4/pull/32 * Apply suggestions from code review --- homeassistant/components/control4/__init__.py | 8 +++----- homeassistant/components/control4/config_flow.py | 9 +++------ homeassistant/components/control4/director_utils.py | 8 ++++---- homeassistant/components/control4/light.py | 7 +++---- homeassistant/components/control4/manifest.json | 3 +-- homeassistant/components/control4/media_player.py | 7 +++---- homeassistant/components/control4/ruff.toml | 5 ----- requirements_all.txt | 3 +++ requirements_test_all.txt | 3 +++ tests/components/control4/ruff.toml | 5 ----- tests/components/control4/test_config_flow.py | 10 ++++------ 11 files changed, 27 insertions(+), 41 deletions(-) delete mode 100644 homeassistant/components/control4/ruff.toml delete mode 100644 tests/components/control4/ruff.toml diff --git a/homeassistant/components/control4/__init__.py b/homeassistant/components/control4/__init__.py index d4432a83c84..a3d0cebd1fc 100644 --- a/homeassistant/components/control4/__init__.py +++ b/homeassistant/components/control4/__init__.py @@ -1,7 +1,5 @@ """The Control4 integration.""" -# mypy: ignore-errors - from __future__ import annotations import json @@ -9,10 +7,10 @@ import logging from typing import Any from aiohttp import client_exceptions +from pyControl4.account import C4Account +from pyControl4.director import C4Director +from pyControl4.error_handling import BadCredentials -# from pyControl4.account import C4Account -# from pyControl4.director import C4Director -# from pyControl4.error_handling import BadCredentials from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_HOST, diff --git a/homeassistant/components/control4/config_flow.py b/homeassistant/components/control4/config_flow.py index 40652767019..aa7839b4383 100644 --- a/homeassistant/components/control4/config_flow.py +++ b/homeassistant/components/control4/config_flow.py @@ -1,17 +1,14 @@ """Config flow for Control4 integration.""" -# mypy: ignore-errors - from __future__ import annotations import logging from typing import Any from aiohttp.client_exceptions import ClientError - -# from pyControl4.account import C4Account -# from pyControl4.director import C4Director -# from pyControl4.error_handling import NotFound, Unauthorized +from pyControl4.account import C4Account +from pyControl4.director import C4Director +from pyControl4.error_handling import NotFound, Unauthorized import voluptuous as vol from homeassistant.config_entries import ( diff --git a/homeassistant/components/control4/director_utils.py b/homeassistant/components/control4/director_utils.py index 92ca9f9d615..10e9486ee89 100644 --- a/homeassistant/components/control4/director_utils.py +++ b/homeassistant/components/control4/director_utils.py @@ -1,13 +1,13 @@ """Provides data updates from the Control4 controller for platforms.""" -# mypy: ignore-errors from collections import defaultdict import logging from typing import Any -# from pyControl4.account import C4Account -# from pyControl4.director import C4Director -# from pyControl4.error_handling import BadToken +from pyControl4.account import C4Account +from pyControl4.director import C4Director +from pyControl4.error_handling import BadToken + from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME from homeassistant.core import HomeAssistant diff --git a/homeassistant/components/control4/light.py b/homeassistant/components/control4/light.py index 4d20f966cc2..d7cfd44dc43 100644 --- a/homeassistant/components/control4/light.py +++ b/homeassistant/components/control4/light.py @@ -1,7 +1,5 @@ """Platform for Control4 Lights.""" -# mypy: ignore-errors - from __future__ import annotations import asyncio @@ -9,8 +7,9 @@ from datetime import timedelta import logging from typing import Any -# from pyControl4.error_handling import C4Exception -# from pyControl4.light import C4Light +from pyControl4.error_handling import C4Exception +from pyControl4.light import C4Light + from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_TRANSITION, diff --git a/homeassistant/components/control4/manifest.json b/homeassistant/components/control4/manifest.json index 7c846617bf0..3088ebf8672 100644 --- a/homeassistant/components/control4/manifest.json +++ b/homeassistant/components/control4/manifest.json @@ -3,11 +3,10 @@ "name": "Control4", "codeowners": ["@lawtancool"], "config_flow": true, - "disabled": "This integration is disabled because it uses an old version of socketio.", "documentation": "https://www.home-assistant.io/integrations/control4", "iot_class": "local_polling", "loggers": ["pyControl4"], - "requirements": ["pyControl4==1.1.0"], + "requirements": ["pyControl4==1.2.0"], "ssdp": [ { "st": "c4:director" diff --git a/homeassistant/components/control4/media_player.py b/homeassistant/components/control4/media_player.py index 21c26fa1152..72aa44faaed 100644 --- a/homeassistant/components/control4/media_player.py +++ b/homeassistant/components/control4/media_player.py @@ -1,7 +1,5 @@ """Platform for Control4 Rooms Media Players.""" -# mypy: ignore-errors - from __future__ import annotations from dataclasses import dataclass @@ -10,8 +8,9 @@ import enum import logging from typing import Any -# from pyControl4.error_handling import C4Exception -# from pyControl4.room import C4Room +from pyControl4.error_handling import C4Exception +from pyControl4.room import C4Room + from homeassistant.components.media_player import ( MediaPlayerDeviceClass, MediaPlayerEntity, diff --git a/homeassistant/components/control4/ruff.toml b/homeassistant/components/control4/ruff.toml deleted file mode 100644 index 38f6f586aef..00000000000 --- a/homeassistant/components/control4/ruff.toml +++ /dev/null @@ -1,5 +0,0 @@ -extend = "../../../pyproject.toml" - -lint.extend-ignore = [ - "F821" -] diff --git a/requirements_all.txt b/requirements_all.txt index 8bac05a047a..4633f0b093c 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1685,6 +1685,9 @@ pyAtome==0.1.1 # homeassistant.components.hdmi_cec pyCEC==0.5.2 +# homeassistant.components.control4 +pyControl4==1.2.0 + # homeassistant.components.duotecno pyDuotecno==2024.5.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 71f4a1ea012..8688b7a6dcc 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1368,6 +1368,9 @@ py-synologydsm-api==2.5.2 # homeassistant.components.hdmi_cec pyCEC==0.5.2 +# homeassistant.components.control4 +pyControl4==1.2.0 + # homeassistant.components.duotecno pyDuotecno==2024.5.1 diff --git a/tests/components/control4/ruff.toml b/tests/components/control4/ruff.toml deleted file mode 100644 index 38f6f586aef..00000000000 --- a/tests/components/control4/ruff.toml +++ /dev/null @@ -1,5 +0,0 @@ -extend = "../../../pyproject.toml" - -lint.extend-ignore = [ - "F821" -] diff --git a/tests/components/control4/test_config_flow.py b/tests/components/control4/test_config_flow.py index 01456cc0598..9a1b392f61c 100644 --- a/tests/components/control4/test_config_flow.py +++ b/tests/components/control4/test_config_flow.py @@ -1,13 +1,11 @@ """Test the Control4 config flow.""" -# mypy: ignore-errors from unittest.mock import AsyncMock, patch -import pytest +from pyControl4.account import C4Account +from pyControl4.director import C4Director +from pyControl4.error_handling import Unauthorized -# from pyControl4.account import C4Account -# from pyControl4.director import C4Director -# from pyControl4.error_handling import Unauthorized from homeassistant import config_entries from homeassistant.components.control4.const import DEFAULT_SCAN_INTERVAL, DOMAIN from homeassistant.const import ( @@ -18,6 +16,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType + from tests.common import MockConfigEntry @@ -42,7 +41,6 @@ def _get_mock_c4_director(): return c4_director_mock -@pytest.skip(allow_module_level=True) async def test_form(hass: HomeAssistant) -> None: """Test we get the form.""" From 4108b7ada65a27877d67758371de0f34aca02fb7 Mon Sep 17 00:00:00 2001 From: Sergey Dudanov Date: Wed, 28 Aug 2024 10:45:40 +0400 Subject: [PATCH 1135/1635] Remove unused normalized units from unit converters (#122797) remove unused normalized units in unit converters --- homeassistant/util/unit_conversion.py | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/homeassistant/util/unit_conversion.py b/homeassistant/util/unit_conversion.py index 2b9f73afab7..d5586704fc5 100644 --- a/homeassistant/util/unit_conversion.py +++ b/homeassistant/util/unit_conversion.py @@ -68,7 +68,6 @@ class BaseUnitConverter: """Define the format of a conversion utility.""" UNIT_CLASS: str - NORMALIZED_UNIT: str | None VALID_UNITS: set[str | None] _UNIT_CONVERSION: dict[str | None, float] @@ -125,7 +124,6 @@ class DataRateConverter(BaseUnitConverter): """Utility to convert data rate values.""" UNIT_CLASS = "data_rate" - NORMALIZED_UNIT = UnitOfDataRate.BITS_PER_SECOND # Units in terms of bits _UNIT_CONVERSION: dict[str | None, float] = { UnitOfDataRate.BITS_PER_SECOND: 1, @@ -147,7 +145,6 @@ class DistanceConverter(BaseUnitConverter): """Utility to convert distance values.""" UNIT_CLASS = "distance" - NORMALIZED_UNIT = UnitOfLength.METERS _UNIT_CONVERSION: dict[str | None, float] = { UnitOfLength.METERS: 1, UnitOfLength.MILLIMETERS: 1 / _MM_TO_M, @@ -174,7 +171,6 @@ class ConductivityConverter(BaseUnitConverter): """Utility to convert electric current values.""" UNIT_CLASS = "conductivity" - NORMALIZED_UNIT = UnitOfConductivity.MICROSIEMENS _UNIT_CONVERSION: dict[str | None, float] = { UnitOfConductivity.MICROSIEMENS: 1, UnitOfConductivity.MILLISIEMENS: 1e-3, @@ -187,7 +183,6 @@ class ElectricCurrentConverter(BaseUnitConverter): """Utility to convert electric current values.""" UNIT_CLASS = "electric_current" - NORMALIZED_UNIT = UnitOfElectricCurrent.AMPERE _UNIT_CONVERSION: dict[str | None, float] = { UnitOfElectricCurrent.AMPERE: 1, UnitOfElectricCurrent.MILLIAMPERE: 1e3, @@ -199,7 +194,6 @@ class ElectricPotentialConverter(BaseUnitConverter): """Utility to convert electric potential values.""" UNIT_CLASS = "voltage" - NORMALIZED_UNIT = UnitOfElectricPotential.VOLT _UNIT_CONVERSION: dict[str | None, float] = { UnitOfElectricPotential.VOLT: 1, UnitOfElectricPotential.MILLIVOLT: 1e3, @@ -214,7 +208,6 @@ class EnergyConverter(BaseUnitConverter): """Utility to convert energy values.""" UNIT_CLASS = "energy" - NORMALIZED_UNIT = UnitOfEnergy.KILO_WATT_HOUR _UNIT_CONVERSION: dict[str | None, float] = { UnitOfEnergy.WATT_HOUR: 1 * 1000, UnitOfEnergy.KILO_WATT_HOUR: 1, @@ -235,7 +228,6 @@ class InformationConverter(BaseUnitConverter): """Utility to convert information values.""" UNIT_CLASS = "information" - NORMALIZED_UNIT = UnitOfInformation.BITS # Units in terms of bits _UNIT_CONVERSION: dict[str | None, float] = { UnitOfInformation.BITS: 1, @@ -267,7 +259,6 @@ class MassConverter(BaseUnitConverter): """Utility to convert mass values.""" UNIT_CLASS = "mass" - NORMALIZED_UNIT = UnitOfMass.GRAMS _UNIT_CONVERSION: dict[str | None, float] = { UnitOfMass.MICROGRAMS: 1 * 1000 * 1000, UnitOfMass.MILLIGRAMS: 1 * 1000, @@ -292,7 +283,6 @@ class PowerConverter(BaseUnitConverter): """Utility to convert power values.""" UNIT_CLASS = "power" - NORMALIZED_UNIT = UnitOfPower.WATT _UNIT_CONVERSION: dict[str | None, float] = { UnitOfPower.WATT: 1, UnitOfPower.KILO_WATT: 1 / 1000, @@ -307,7 +297,6 @@ class PressureConverter(BaseUnitConverter): """Utility to convert pressure values.""" UNIT_CLASS = "pressure" - NORMALIZED_UNIT = UnitOfPressure.PA _UNIT_CONVERSION: dict[str | None, float] = { UnitOfPressure.PA: 1, UnitOfPressure.HPA: 1 / 100, @@ -338,7 +327,6 @@ class SpeedConverter(BaseUnitConverter): """Utility to convert speed values.""" UNIT_CLASS = "speed" - NORMALIZED_UNIT = UnitOfSpeed.METERS_PER_SECOND _UNIT_CONVERSION: dict[str | None, float] = { UnitOfVolumetricFlux.INCHES_PER_DAY: _DAYS_TO_SECS / _IN_TO_M, UnitOfVolumetricFlux.INCHES_PER_HOUR: _HRS_TO_SECS / _IN_TO_M, @@ -433,7 +421,6 @@ class TemperatureConverter(BaseUnitConverter): """Utility to convert temperature values.""" UNIT_CLASS = "temperature" - NORMALIZED_UNIT = UnitOfTemperature.CELSIUS VALID_UNITS = { UnitOfTemperature.CELSIUS, UnitOfTemperature.FAHRENHEIT, @@ -564,7 +551,6 @@ class UnitlessRatioConverter(BaseUnitConverter): """Utility to convert unitless ratios.""" UNIT_CLASS = "unitless" - NORMALIZED_UNIT = None _UNIT_CONVERSION: dict[str | None, float] = { None: 1, CONCENTRATION_PARTS_PER_BILLION: 1000000000, @@ -581,7 +567,6 @@ class VolumeConverter(BaseUnitConverter): """Utility to convert volume values.""" UNIT_CLASS = "volume" - NORMALIZED_UNIT = UnitOfVolume.CUBIC_METERS # Units in terms of m³ _UNIT_CONVERSION: dict[str | None, float] = { UnitOfVolume.LITERS: 1 / _L_TO_CUBIC_METER, @@ -607,7 +592,6 @@ class VolumeFlowRateConverter(BaseUnitConverter): """Utility to convert volume values.""" UNIT_CLASS = "volume_flow_rate" - NORMALIZED_UNIT = UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR # Units in terms of m³/h _UNIT_CONVERSION: dict[str | None, float] = { UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR: 1, @@ -630,7 +614,6 @@ class DurationConverter(BaseUnitConverter): """Utility to convert duration values.""" UNIT_CLASS = "duration" - NORMALIZED_UNIT = UnitOfTime.SECONDS _UNIT_CONVERSION: dict[str | None, float] = { UnitOfTime.MICROSECONDS: 1000000, UnitOfTime.MILLISECONDS: 1000, From a63c5e67257c6b2c1f06692edb62f945db7c7d08 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 27 Aug 2024 21:00:20 -1000 Subject: [PATCH 1136/1635] Cache shelly coordinator properties that never change (#124756) --- homeassistant/components/shelly/coordinator.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/shelly/coordinator.py b/homeassistant/components/shelly/coordinator.py index 918dd920765..012f6b43dc7 100644 --- a/homeassistant/components/shelly/coordinator.py +++ b/homeassistant/components/shelly/coordinator.py @@ -6,6 +6,7 @@ import asyncio from collections.abc import Callable, Coroutine from dataclasses import dataclass from datetime import timedelta +from functools import cached_property from typing import Any, cast from aioshelly.ble import async_ensure_ble_enabled, async_stop_scanner @@ -120,12 +121,12 @@ class ShellyCoordinatorBase[_DeviceT: BlockDevice | RpcDevice]( hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, self._handle_ha_stop) ) - @property + @cached_property def model(self) -> str: """Model of the device.""" return cast(str, self.entry.data["model"]) - @property + @cached_property def mac(self) -> str: """Mac address of the device.""" return cast(str, self.entry.unique_id) From f9bf7f7e05f7e7719b3a65ec993ef66eed245d5f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 27 Aug 2024 21:00:31 -1000 Subject: [PATCH 1137/1635] Small cleanups to shelly (#124758) --- homeassistant/components/shelly/entity.py | 6 +++--- homeassistant/components/shelly/number.py | 8 ++++---- homeassistant/components/shelly/sensor.py | 10 ++++++---- 3 files changed, 13 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/shelly/entity.py b/homeassistant/components/shelly/entity.py index 980a39feaba..aea060e09e2 100644 --- a/homeassistant/components/shelly/entity.py +++ b/homeassistant/components/shelly/entity.py @@ -488,7 +488,7 @@ class ShellyRestAttributeEntity(CoordinatorEntity[ShellyBlockCoordinator]): @property def attribute_value(self) -> StateType: """Value of sensor.""" - if callable(self.entity_description.value): + if self.entity_description.value is not None: self._last_value = self.entity_description.value( self.block_coordinator.device.status, self._last_value ) @@ -518,7 +518,7 @@ class ShellyRpcAttributeEntity(ShellyRpcEntity, Entity): id_key = key.split(":")[-1] self._id = int(id_key) if id_key.isnumeric() else None - if callable(description.unit): + if description.unit is not None: self._attr_native_unit_of_measurement = description.unit( coordinator.device.config[key] ) @@ -544,7 +544,7 @@ class ShellyRpcAttributeEntity(ShellyRpcEntity, Entity): @property def attribute_value(self) -> StateType: """Value of sensor.""" - if callable(self.entity_description.value): + if self.entity_description.value is not None: # using "get" here since subkey might not exist (e.g. "errors" sub_key) self._last_value = self.entity_description.value( self.status.get(self.entity_description.sub_key), self._last_value diff --git a/homeassistant/components/shelly/number.py b/homeassistant/components/shelly/number.py index 67c33faf150..1e0f5b020ac 100644 --- a/homeassistant/components/shelly/number.py +++ b/homeassistant/components/shelly/number.py @@ -207,17 +207,17 @@ class RpcNumber(ShellyRpcAttributeEntity, NumberEntity): """Initialize sensor.""" super().__init__(coordinator, key, attribute, description) - if callable(description.max_fn): + if description.max_fn is not None: self._attr_native_max_value = description.max_fn( coordinator.device.config[key] ) - if callable(description.min_fn): + if description.min_fn is not None: self._attr_native_min_value = description.min_fn( coordinator.device.config[key] ) - if callable(description.step_fn): + if description.step_fn is not None: self._attr_native_step = description.step_fn(coordinator.device.config[key]) - if callable(description.mode_fn): + if description.mode_fn is not None: self._attr_mode = description.mode_fn(coordinator.device.config[key]) @property diff --git a/homeassistant/components/shelly/sensor.py b/homeassistant/components/shelly/sensor.py index 0d782f46c24..1ef174119e4 100644 --- a/homeassistant/components/shelly/sensor.py +++ b/homeassistant/components/shelly/sensor.py @@ -1266,13 +1266,15 @@ class RpcSensor(ShellyRpcAttributeEntity, SensorEntity): @property def native_value(self) -> StateType: """Return value of sensor.""" - if not self.option_map: - return self.attribute_value + attribute_value = self.attribute_value - if not isinstance(self.attribute_value, str): + if not self.option_map: + return attribute_value + + if not isinstance(attribute_value, str): return None - return self.option_map[self.attribute_value] + return self.option_map[attribute_value] class BlockSleepingSensor(ShellySleepingBlockAttributeEntity, RestoreSensor): From 1add00a68daf0a966f7491a0455589e9d9eed947 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 28 Aug 2024 09:25:56 +0200 Subject: [PATCH 1138/1635] Deduplicate STT mocks (#124754) --- tests/components/assist_pipeline/conftest.py | 76 ++-------- tests/components/assist_pipeline/test_init.py | 26 ++-- .../assist_pipeline/test_pipeline.py | 6 +- .../assist_pipeline/test_websocket.py | 2 +- tests/components/stt/common.py | 89 +++++++++++- tests/components/stt/test_init.py | 137 +++++------------- 6 files changed, 147 insertions(+), 189 deletions(-) diff --git a/tests/components/assist_pipeline/conftest.py b/tests/components/assist_pipeline/conftest.py index b7bf83a7ed0..c03874c16af 100644 --- a/tests/components/assist_pipeline/conftest.py +++ b/tests/components/assist_pipeline/conftest.py @@ -36,6 +36,7 @@ from tests.common import ( mock_integration, mock_platform, ) +from tests.components.stt.common import MockSTTProvider, MockSTTProviderEntity _TRANSCRIPT = "test transcript" @@ -47,67 +48,6 @@ def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: """Mock the TTS cache dir with empty dir.""" -class BaseProvider: - """Mock STT provider.""" - - _supported_languages = ["en-US"] - - def __init__(self, text: str) -> None: - """Init test provider.""" - self.text = text - self.received: list[bytes] = [] - - @property - def supported_languages(self) -> list[str]: - """Return a list of supported languages.""" - return self._supported_languages - - @property - def supported_formats(self) -> list[stt.AudioFormats]: - """Return a list of supported formats.""" - return [stt.AudioFormats.WAV] - - @property - def supported_codecs(self) -> list[stt.AudioCodecs]: - """Return a list of supported codecs.""" - return [stt.AudioCodecs.PCM] - - @property - def supported_bit_rates(self) -> list[stt.AudioBitRates]: - """Return a list of supported bitrates.""" - return [stt.AudioBitRates.BITRATE_16] - - @property - def supported_sample_rates(self) -> list[stt.AudioSampleRates]: - """Return a list of supported samplerates.""" - return [stt.AudioSampleRates.SAMPLERATE_16000] - - @property - def supported_channels(self) -> list[stt.AudioChannels]: - """Return a list of supported channels.""" - return [stt.AudioChannels.CHANNEL_MONO] - - async def async_process_audio_stream( - self, metadata: stt.SpeechMetadata, stream: AsyncIterable[bytes] - ) -> stt.SpeechResult: - """Process an audio stream.""" - async for data in stream: - if not data: - break - self.received.append(data) - return stt.SpeechResult(self.text, stt.SpeechResultState.SUCCESS) - - -class MockSttProvider(BaseProvider, stt.Provider): - """Mock provider.""" - - -class MockSttProviderEntity(BaseProvider, stt.SpeechToTextEntity): - """Mock provider entity.""" - - _attr_name = "Mock STT" - - class MockTTSProvider(tts.Provider): """Mock TTS provider.""" @@ -166,15 +106,17 @@ async def mock_tts_provider() -> MockTTSProvider: @pytest.fixture -async def mock_stt_provider() -> MockSttProvider: +async def mock_stt_provider() -> MockSTTProvider: """Mock STT provider.""" - return MockSttProvider(_TRANSCRIPT) + return MockSTTProvider(supported_languages=["en-US"], text=_TRANSCRIPT) @pytest.fixture -def mock_stt_provider_entity() -> MockSttProviderEntity: +def mock_stt_provider_entity() -> MockSTTProviderEntity: """Test provider entity fixture.""" - return MockSttProviderEntity(_TRANSCRIPT) + entity = MockSTTProviderEntity(supported_languages=["en-US"], text=_TRANSCRIPT) + entity._attr_name = "Mock STT" + return entity class MockSttPlatform(MockPlatform): @@ -290,8 +232,8 @@ def config_flow_fixture(hass: HomeAssistant) -> Generator[None]: @pytest.fixture async def init_supporting_components( hass: HomeAssistant, - mock_stt_provider: MockSttProvider, - mock_stt_provider_entity: MockSttProviderEntity, + mock_stt_provider: MockSTTProvider, + mock_stt_provider_entity: MockSTTProviderEntity, mock_tts_provider: MockTTSProvider, mock_wake_word_provider_entity: MockWakeWordEntity, mock_wake_word_provider_entity2: MockWakeWordEntity2, diff --git a/tests/components/assist_pipeline/test_init.py b/tests/components/assist_pipeline/test_init.py index 04edab7131f..31cc1268098 100644 --- a/tests/components/assist_pipeline/test_init.py +++ b/tests/components/assist_pipeline/test_init.py @@ -22,8 +22,8 @@ from homeassistant.setup import async_setup_component from .conftest import ( BYTES_ONE_SECOND, - MockSttProvider, - MockSttProviderEntity, + MockSTTProvider, + MockSTTProviderEntity, MockTTSProvider, MockWakeWordEntity, make_10ms_chunk, @@ -47,7 +47,7 @@ def process_events(events: list[assist_pipeline.PipelineEvent]) -> list[dict]: async def test_pipeline_from_audio_stream_auto( hass: HomeAssistant, - mock_stt_provider_entity: MockSttProviderEntity, + mock_stt_provider_entity: MockSTTProviderEntity, init_components, snapshot: SnapshotAssertion, ) -> None: @@ -88,7 +88,7 @@ async def test_pipeline_from_audio_stream_auto( async def test_pipeline_from_audio_stream_legacy( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - mock_stt_provider: MockSttProvider, + mock_stt_provider: MockSTTProvider, init_components, snapshot: SnapshotAssertion, ) -> None: @@ -153,7 +153,7 @@ async def test_pipeline_from_audio_stream_legacy( async def test_pipeline_from_audio_stream_entity( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - mock_stt_provider_entity: MockSttProviderEntity, + mock_stt_provider_entity: MockSTTProviderEntity, init_components, snapshot: SnapshotAssertion, ) -> None: @@ -218,7 +218,7 @@ async def test_pipeline_from_audio_stream_entity( async def test_pipeline_from_audio_stream_no_stt( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - mock_stt_provider: MockSttProvider, + mock_stt_provider: MockSTTProvider, init_components, snapshot: SnapshotAssertion, ) -> None: @@ -281,7 +281,7 @@ async def test_pipeline_from_audio_stream_no_stt( async def test_pipeline_from_audio_stream_unknown_pipeline( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - mock_stt_provider: MockSttProvider, + mock_stt_provider: MockSTTProvider, init_components, snapshot: SnapshotAssertion, ) -> None: @@ -319,7 +319,7 @@ async def test_pipeline_from_audio_stream_unknown_pipeline( async def test_pipeline_from_audio_stream_wake_word( hass: HomeAssistant, - mock_stt_provider_entity: MockSttProviderEntity, + mock_stt_provider_entity: MockSTTProviderEntity, mock_wake_word_provider_entity: MockWakeWordEntity, init_components, snapshot: SnapshotAssertion, @@ -395,7 +395,7 @@ async def test_pipeline_from_audio_stream_wake_word( async def test_pipeline_save_audio( hass: HomeAssistant, - mock_stt_provider: MockSttProvider, + mock_stt_provider: MockSTTProvider, mock_wake_word_provider_entity: MockWakeWordEntity, init_supporting_components, snapshot: SnapshotAssertion, @@ -474,7 +474,7 @@ async def test_pipeline_save_audio( async def test_pipeline_saved_audio_with_device_id( hass: HomeAssistant, - mock_stt_provider: MockSttProvider, + mock_stt_provider: MockSTTProvider, mock_wake_word_provider_entity: MockWakeWordEntity, init_supporting_components, snapshot: SnapshotAssertion, @@ -529,7 +529,7 @@ async def test_pipeline_saved_audio_with_device_id( async def test_pipeline_saved_audio_write_error( hass: HomeAssistant, - mock_stt_provider: MockSttProvider, + mock_stt_provider: MockSTTProvider, mock_wake_word_provider_entity: MockWakeWordEntity, init_supporting_components, snapshot: SnapshotAssertion, @@ -578,7 +578,7 @@ async def test_pipeline_saved_audio_write_error( async def test_pipeline_saved_audio_empty_queue( hass: HomeAssistant, - mock_stt_provider: MockSttProvider, + mock_stt_provider: MockSTTProvider, mock_wake_word_provider_entity: MockWakeWordEntity, init_supporting_components, snapshot: SnapshotAssertion, @@ -641,7 +641,7 @@ async def test_pipeline_saved_audio_empty_queue( async def test_wake_word_detection_aborted( hass: HomeAssistant, - mock_stt_provider: MockSttProvider, + mock_stt_provider: MockSTTProvider, mock_wake_word_provider_entity: MockWakeWordEntity, init_components, pipeline_data: assist_pipeline.pipeline.PipelineData, diff --git a/tests/components/assist_pipeline/test_pipeline.py b/tests/components/assist_pipeline/test_pipeline.py index ef5d5edff9e..50d0fc9bed8 100644 --- a/tests/components/assist_pipeline/test_pipeline.py +++ b/tests/components/assist_pipeline/test_pipeline.py @@ -26,7 +26,7 @@ from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from . import MANY_LANGUAGES -from .conftest import MockSttProviderEntity, MockTTSProvider +from .conftest import MockSTTProviderEntity, MockTTSProvider from tests.common import flush_store @@ -398,7 +398,7 @@ async def test_default_pipeline_no_stt_tts( @pytest.mark.usefixtures("init_supporting_components") async def test_default_pipeline( hass: HomeAssistant, - mock_stt_provider_entity: MockSttProviderEntity, + mock_stt_provider_entity: MockSTTProviderEntity, mock_tts_provider: MockTTSProvider, ha_language: str, ha_country: str | None, @@ -441,7 +441,7 @@ async def test_default_pipeline( @pytest.mark.usefixtures("init_supporting_components") async def test_default_pipeline_unsupported_stt_language( - hass: HomeAssistant, mock_stt_provider_entity: MockSttProviderEntity + hass: HomeAssistant, mock_stt_provider_entity: MockSTTProviderEntity ) -> None: """Test async_get_pipeline.""" with patch.object(mock_stt_provider_entity, "_supported_languages", ["smurfish"]): diff --git a/tests/components/assist_pipeline/test_websocket.py b/tests/components/assist_pipeline/test_websocket.py index f1f68d4a423..e339ee74fbb 100644 --- a/tests/components/assist_pipeline/test_websocket.py +++ b/tests/components/assist_pipeline/test_websocket.py @@ -743,7 +743,7 @@ async def test_stt_stream_failed( client = await hass_ws_client(hass) with patch( - "tests.components.assist_pipeline.conftest.MockSttProviderEntity.async_process_audio_stream", + "tests.components.assist_pipeline.conftest.MockSTTProviderEntity.async_process_audio_stream", side_effect=RuntimeError, ): await client.send_json_auto_id( diff --git a/tests/components/stt/common.py b/tests/components/stt/common.py index e6c36c5b350..f964fca6b67 100644 --- a/tests/components/stt/common.py +++ b/tests/components/stt/common.py @@ -2,11 +2,22 @@ from __future__ import annotations -from collections.abc import Callable, Coroutine +from collections.abc import AsyncIterable, Callable, Coroutine from pathlib import Path from typing import Any -from homeassistant.components.stt import Provider +from homeassistant.components.stt import ( + AudioBitRates, + AudioChannels, + AudioCodecs, + AudioFormats, + AudioSampleRates, + Provider, + SpeechMetadata, + SpeechResult, + SpeechResultState, + SpeechToTextEntity, +) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -14,6 +25,80 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from tests.common import MockPlatform, mock_platform +TEST_DOMAIN = "test" + + +class BaseProvider: + """Mock STT provider.""" + + fail_process_audio = False + + def __init__( + self, *, supported_languages: list[str] | None = None, text: str = "test_result" + ) -> None: + """Init test provider.""" + self._supported_languages = supported_languages or ["de", "de-CH", "en"] + self.calls: list[tuple[SpeechMetadata, AsyncIterable[bytes]]] = [] + self.received: list[bytes] = [] + self.text = text + + @property + def supported_languages(self) -> list[str]: + """Return a list of supported languages.""" + return self._supported_languages + + @property + def supported_formats(self) -> list[AudioFormats]: + """Return a list of supported formats.""" + return [AudioFormats.WAV, AudioFormats.OGG] + + @property + def supported_codecs(self) -> list[AudioCodecs]: + """Return a list of supported codecs.""" + return [AudioCodecs.PCM, AudioCodecs.OPUS] + + @property + def supported_bit_rates(self) -> list[AudioBitRates]: + """Return a list of supported bitrates.""" + return [AudioBitRates.BITRATE_16] + + @property + def supported_sample_rates(self) -> list[AudioSampleRates]: + """Return a list of supported samplerates.""" + return [AudioSampleRates.SAMPLERATE_16000] + + @property + def supported_channels(self) -> list[AudioChannels]: + """Return a list of supported channels.""" + return [AudioChannels.CHANNEL_MONO] + + async def async_process_audio_stream( + self, metadata: SpeechMetadata, stream: AsyncIterable[bytes] + ) -> SpeechResult: + """Process an audio stream.""" + self.calls.append((metadata, stream)) + async for data in stream: + if not data: + break + self.received.append(data) + if self.fail_process_audio: + return SpeechResult(None, SpeechResultState.ERROR) + + return SpeechResult(self.text, SpeechResultState.SUCCESS) + + +class MockSTTProvider(BaseProvider, Provider): + """Mock provider.""" + + url_path = TEST_DOMAIN + + +class MockSTTProviderEntity(BaseProvider, SpeechToTextEntity): + """Mock provider entity.""" + + url_path = "stt.test" + _attr_name = "test" + class MockSTTPlatform(MockPlatform): """Help to set up test stt service.""" diff --git a/tests/components/stt/test_init.py b/tests/components/stt/test_init.py index 5c98b0f8d57..92225123995 100644 --- a/tests/components/stt/test_init.py +++ b/tests/components/stt/test_init.py @@ -1,6 +1,6 @@ """Test STT component setup.""" -from collections.abc import AsyncIterable, Generator, Iterable +from collections.abc import Generator, Iterable from contextlib import ExitStack from http import HTTPStatus from pathlib import Path @@ -10,16 +10,6 @@ import pytest from homeassistant.components.stt import ( DOMAIN, - AudioBitRates, - AudioChannels, - AudioCodecs, - AudioFormats, - AudioSampleRates, - Provider, - SpeechMetadata, - SpeechResult, - SpeechResultState, - SpeechToTextEntity, async_default_engine, async_get_provider, async_get_speech_to_text_engine, @@ -29,7 +19,13 @@ from homeassistant.core import HomeAssistant, State from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.setup import async_setup_component -from .common import mock_stt_entity_platform, mock_stt_platform +from .common import ( + TEST_DOMAIN, + MockSTTProvider, + MockSTTProviderEntity, + mock_stt_entity_platform, + mock_stt_platform, +) from tests.common import ( MockConfigEntry, @@ -41,82 +37,17 @@ from tests.common import ( ) from tests.typing import ClientSessionGenerator, WebSocketGenerator -TEST_DOMAIN = "test" - - -class BaseProvider: - """Mock provider.""" - - fail_process_audio = False - - def __init__(self) -> None: - """Init test provider.""" - self.calls: list[tuple[SpeechMetadata, AsyncIterable[bytes]]] = [] - - @property - def supported_languages(self) -> list[str]: - """Return a list of supported languages.""" - return ["de", "de-CH", "en"] - - @property - def supported_formats(self) -> list[AudioFormats]: - """Return a list of supported formats.""" - return [AudioFormats.WAV, AudioFormats.OGG] - - @property - def supported_codecs(self) -> list[AudioCodecs]: - """Return a list of supported codecs.""" - return [AudioCodecs.PCM, AudioCodecs.OPUS] - - @property - def supported_bit_rates(self) -> list[AudioBitRates]: - """Return a list of supported bitrates.""" - return [AudioBitRates.BITRATE_16] - - @property - def supported_sample_rates(self) -> list[AudioSampleRates]: - """Return a list of supported samplerates.""" - return [AudioSampleRates.SAMPLERATE_16000] - - @property - def supported_channels(self) -> list[AudioChannels]: - """Return a list of supported channels.""" - return [AudioChannels.CHANNEL_MONO] - - async def async_process_audio_stream( - self, metadata: SpeechMetadata, stream: AsyncIterable[bytes] - ) -> SpeechResult: - """Process an audio stream.""" - self.calls.append((metadata, stream)) - if self.fail_process_audio: - return SpeechResult(None, SpeechResultState.ERROR) - - return SpeechResult("test_result", SpeechResultState.SUCCESS) - - -class MockProvider(BaseProvider, Provider): - """Mock provider.""" - - url_path = TEST_DOMAIN - - -class MockProviderEntity(BaseProvider, SpeechToTextEntity): - """Mock provider entity.""" - - url_path = "stt.test" - _attr_name = "test" - @pytest.fixture -def mock_provider() -> MockProvider: +def mock_provider() -> MockSTTProvider: """Test provider fixture.""" - return MockProvider() + return MockSTTProvider() @pytest.fixture -def mock_provider_entity() -> MockProviderEntity: +def mock_provider_entity() -> MockSTTProviderEntity: """Test provider entity fixture.""" - return MockProviderEntity() + return MockSTTProviderEntity() class STTFlow(ConfigFlow): @@ -148,14 +79,14 @@ async def setup_fixture( hass: HomeAssistant, tmp_path: Path, request: pytest.FixtureRequest, -) -> MockProvider | MockProviderEntity: +) -> MockSTTProvider | MockSTTProviderEntity: """Set up the test environment.""" - provider: MockProvider | MockProviderEntity + provider: MockSTTProvider | MockSTTProviderEntity if request.param == "mock_setup": - provider = MockProvider() + provider = MockSTTProvider() await mock_setup(hass, tmp_path, provider) elif request.param == "mock_config_entry_setup": - provider = MockProviderEntity() + provider = MockSTTProviderEntity() await mock_config_entry_setup(hass, tmp_path, provider) else: raise RuntimeError("Invalid setup fixture") @@ -166,7 +97,7 @@ async def setup_fixture( async def mock_setup( hass: HomeAssistant, tmp_path: Path, - mock_provider: MockProvider, + mock_provider: MockSTTProvider, ) -> None: """Set up a test provider.""" mock_stt_platform( @@ -182,7 +113,7 @@ async def mock_setup( async def mock_config_entry_setup( hass: HomeAssistant, tmp_path: Path, - mock_provider_entity: MockProviderEntity, + mock_provider_entity: MockSTTProviderEntity, test_domain: str = TEST_DOMAIN, ) -> MockConfigEntry: """Set up a test provider via config entry.""" @@ -234,7 +165,7 @@ async def mock_config_entry_setup( async def test_get_provider_info( hass: HomeAssistant, hass_client: ClientSessionGenerator, - setup: MockProvider | MockProviderEntity, + setup: MockSTTProvider | MockSTTProviderEntity, ) -> None: """Test engine that doesn't exist.""" client = await hass_client() @@ -256,7 +187,7 @@ async def test_get_provider_info( async def test_non_existing_provider( hass: HomeAssistant, hass_client: ClientSessionGenerator, - setup: MockProvider | MockProviderEntity, + setup: MockSTTProvider | MockSTTProviderEntity, ) -> None: """Test streaming to engine that doesn't exist.""" client = await hass_client() @@ -282,7 +213,7 @@ async def test_non_existing_provider( async def test_stream_audio( hass: HomeAssistant, hass_client: ClientSessionGenerator, - setup: MockProvider | MockProviderEntity, + setup: MockSTTProvider | MockSTTProviderEntity, ) -> None: """Test streaming audio and getting response.""" client = await hass_client() @@ -343,7 +274,7 @@ async def test_metadata_errors( header: str | None, status: int, error: str, - setup: MockProvider | MockProviderEntity, + setup: MockSTTProvider | MockSTTProviderEntity, ) -> None: """Test metadata errors.""" client = await hass_client() @@ -359,7 +290,7 @@ async def test_metadata_errors( async def test_get_provider( hass: HomeAssistant, tmp_path: Path, - mock_provider: MockProvider, + mock_provider: MockSTTProvider, ) -> None: """Test we can get STT providers.""" await mock_setup(hass, tmp_path, mock_provider) @@ -370,7 +301,7 @@ async def test_get_provider( async def test_config_entry_unload( - hass: HomeAssistant, tmp_path: Path, mock_provider_entity: MockProviderEntity + hass: HomeAssistant, tmp_path: Path, mock_provider_entity: MockSTTProviderEntity ) -> None: """Test we can unload config entry.""" config_entry = await mock_config_entry_setup(hass, tmp_path, mock_provider_entity) @@ -382,7 +313,7 @@ async def test_config_entry_unload( async def test_restore_state( hass: HomeAssistant, tmp_path: Path, - mock_provider_entity: MockProviderEntity, + mock_provider_entity: MockSTTProviderEntity, ) -> None: """Test we restore state in the integration.""" entity_id = f"{DOMAIN}.{TEST_DOMAIN}" @@ -409,7 +340,7 @@ async def test_restore_state( async def test_ws_list_engines( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - setup: MockProvider | MockProviderEntity, + setup: MockSTTProvider | MockSTTProviderEntity, engine_id: str, extra_data: dict[str, str], ) -> None: @@ -491,7 +422,7 @@ async def test_default_engine_none(hass: HomeAssistant, tmp_path: Path) -> None: async def test_default_engine( hass: HomeAssistant, tmp_path: Path, - mock_provider: MockProvider, + mock_provider: MockSTTProvider, ) -> None: """Test async_default_engine.""" mock_stt_platform( @@ -507,7 +438,7 @@ async def test_default_engine( async def test_default_engine_entity( - hass: HomeAssistant, tmp_path: Path, mock_provider_entity: MockProviderEntity + hass: HomeAssistant, tmp_path: Path, mock_provider_entity: MockSTTProviderEntity ) -> None: """Test async_default_engine.""" await mock_config_entry_setup(hass, tmp_path, mock_provider_entity) @@ -519,8 +450,8 @@ async def test_default_engine_entity( async def test_default_engine_prefer_entity( hass: HomeAssistant, tmp_path: Path, - mock_provider_entity: MockProviderEntity, - mock_provider: MockProvider, + mock_provider_entity: MockSTTProviderEntity, + mock_provider: MockSTTProvider, config_flow_test_domains: str, ) -> None: """Test async_default_engine. @@ -558,7 +489,7 @@ async def test_default_engine_prefer_entity( async def test_default_engine_prefer_cloud_entity( hass: HomeAssistant, tmp_path: Path, - mock_provider: MockProvider, + mock_provider: MockSTTProvider, config_flow_test_domains: str, ) -> None: """Test async_default_engine. @@ -569,7 +500,7 @@ async def test_default_engine_prefer_cloud_entity( """ await mock_setup(hass, tmp_path, mock_provider) for domain in config_flow_test_domains: - entity = MockProviderEntity() + entity = MockSTTProviderEntity() entity.url_path = f"stt.{domain}" entity._attr_name = f"{domain} STT entity" await mock_config_entry_setup(hass, tmp_path, entity, test_domain=domain) @@ -589,7 +520,7 @@ async def test_default_engine_prefer_cloud_entity( async def test_get_engine_legacy( - hass: HomeAssistant, tmp_path: Path, mock_provider: MockProvider + hass: HomeAssistant, tmp_path: Path, mock_provider: MockSTTProvider ) -> None: """Test async_get_speech_to_text_engine.""" mock_stt_platform( @@ -614,7 +545,7 @@ async def test_get_engine_legacy( async def test_get_engine_entity( - hass: HomeAssistant, tmp_path: Path, mock_provider_entity: MockProviderEntity + hass: HomeAssistant, tmp_path: Path, mock_provider_entity: MockSTTProviderEntity ) -> None: """Test async_get_speech_to_text_engine.""" await mock_config_entry_setup(hass, tmp_path, mock_provider_entity) From b085ac929698a89be508a0fdda39dce997c6d9c4 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 28 Aug 2024 09:48:17 +0200 Subject: [PATCH 1139/1635] Standardize import step variable name in geonetnz_volcano (#124699) --- .../geonetnz_volcano/config_flow.py | 4 +-- .../geonetnz_volcano/test_config_flow.py | 30 +++++++++---------- 2 files changed, 16 insertions(+), 18 deletions(-) diff --git a/homeassistant/components/geonetnz_volcano/config_flow.py b/homeassistant/components/geonetnz_volcano/config_flow.py index 12c7157b7e4..45a074d215c 100644 --- a/homeassistant/components/geonetnz_volcano/config_flow.py +++ b/homeassistant/components/geonetnz_volcano/config_flow.py @@ -47,9 +47,9 @@ class GeonetnzVolcanoFlowHandler(ConfigFlow, domain=DOMAIN): step_id="user", data_schema=data_schema, errors=errors or {} ) - async def async_step_import(self, import_config): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry from configuration.yaml.""" - return await self.async_step_user(import_config) + return await self.async_step_user(import_data) async def async_step_user( self, user_input: dict[str, Any] | None = None diff --git a/tests/components/geonetnz_volcano/test_config_flow.py b/tests/components/geonetnz_volcano/test_config_flow.py index b074bdffa20..110fb3b0a9e 100644 --- a/tests/components/geonetnz_volcano/test_config_flow.py +++ b/tests/components/geonetnz_volcano/test_config_flow.py @@ -3,7 +3,8 @@ from datetime import timedelta from unittest.mock import patch -from homeassistant.components.geonetnz_volcano import config_flow +from homeassistant.components.geonetnz_volcano import DOMAIN +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, @@ -20,19 +21,18 @@ async def test_duplicate_error(hass: HomeAssistant, config_entry) -> None: conf = {CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25} config_entry.add_to_hass(hass) - flow = config_flow.GeonetnzVolcanoFlowHandler() - flow.hass = hass - result = await flow.async_step_user(user_input=conf) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER}, data=conf + ) assert result["errors"] == {"base": "already_configured"} async def test_show_form(hass: HomeAssistant) -> None: """Test that the form is served with no input.""" - flow = config_flow.GeonetnzVolcanoFlowHandler() - flow.hass = hass - - result = await flow.async_step_user(user_input=None) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER}, data=None + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" @@ -48,9 +48,6 @@ async def test_step_import(hass: HomeAssistant) -> None: CONF_SCAN_INTERVAL: timedelta(minutes=4), } - flow = config_flow.GeonetnzVolcanoFlowHandler() - flow.hass = hass - with ( patch( "homeassistant.components.geonetnz_volcano.async_setup_entry", @@ -60,7 +57,9 @@ async def test_step_import(hass: HomeAssistant) -> None: "homeassistant.components.geonetnz_volcano.async_setup", return_value=True ), ): - result = await flow.async_step_import(import_config=conf) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data=conf + ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { @@ -78,9 +77,6 @@ async def test_step_user(hass: HomeAssistant) -> None: hass.config.longitude = 174.7 conf = {CONF_RADIUS: 25} - flow = config_flow.GeonetnzVolcanoFlowHandler() - flow.hass = hass - with ( patch( "homeassistant.components.geonetnz_volcano.async_setup_entry", @@ -90,7 +86,9 @@ async def test_step_user(hass: HomeAssistant) -> None: "homeassistant.components.geonetnz_volcano.async_setup", return_value=True ), ): - result = await flow.async_step_user(user_input=conf) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER}, data=conf + ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { From 1f3c99dff39712de3bf354ae5fb5506c3dbc7db2 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 28 Aug 2024 09:51:25 +0200 Subject: [PATCH 1140/1635] Standardize import step variable name in cert_expiry (#124696) --- homeassistant/components/cert_expiry/config_flow.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/cert_expiry/config_flow.py b/homeassistant/components/cert_expiry/config_flow.py index 8f937ef61ea..22d443c700d 100644 --- a/homeassistant/components/cert_expiry/config_flow.py +++ b/homeassistant/components/cert_expiry/config_flow.py @@ -95,12 +95,9 @@ class CertexpiryConfigFlow(ConfigFlow, domain=DOMAIN): errors=self._errors, ) - async def async_step_import( - self, - user_input: Mapping[str, Any] | None = None, - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry. Only host was required in the yaml file all other fields are optional """ - return await self.async_step_user(user_input) + return await self.async_step_user(import_data) From bcc66c9a86b9ed09d7fadaae51fa7f88698adac9 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 28 Aug 2024 09:51:49 +0200 Subject: [PATCH 1141/1635] Standardize import step variable name (part 5) (#124698) * Standardize import step variable name (part 5) * Revert point * Adjust soma tests --- .../components/enocean/config_flow.py | 8 +-- homeassistant/components/heos/config_flow.py | 4 +- .../components/smartthings/config_flow.py | 4 +- homeassistant/components/soma/config_flow.py | 4 +- .../components/songpal/config_flow.py | 8 +-- .../components/yeelight/config_flow.py | 12 ++-- tests/components/soma/test_config_flow.py | 56 +++++++++++-------- 7 files changed, 54 insertions(+), 42 deletions(-) diff --git a/homeassistant/components/enocean/config_flow.py b/homeassistant/components/enocean/config_flow.py index b68026a34ba..3105b3ab595 100644 --- a/homeassistant/components/enocean/config_flow.py +++ b/homeassistant/components/enocean/config_flow.py @@ -22,17 +22,17 @@ class EnOceanFlowHandler(ConfigFlow, domain=DOMAIN): self.dongle_path = None self.discovery_info = None - async def async_step_import(self, data=None): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a yaml configuration.""" - if not await self.validate_enocean_conf(data): + if not await self.validate_enocean_conf(import_data): LOGGER.warning( "Cannot import yaml configuration: %s is not a valid dongle path", - data[CONF_DEVICE], + import_data[CONF_DEVICE], ) return self.async_abort(reason="invalid_dongle_path") - return self.create_enocean_entry(data) + return self.create_enocean_entry(import_data) async def async_step_user( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/heos/config_flow.py b/homeassistant/components/heos/config_flow.py index 968f677df23..57ed51a3c05 100644 --- a/homeassistant/components/heos/config_flow.py +++ b/homeassistant/components/heos/config_flow.py @@ -43,9 +43,9 @@ class HeosFlowHandler(ConfigFlow, domain=DOMAIN): # Show selection form return self.async_show_form(step_id="user") - async def async_step_import(self, user_input=None): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Occurs when an entry is setup through config.""" - host = user_input[CONF_HOST] + host = import_data[CONF_HOST] # raise_on_progress is False here in case ssdp discovers # heos first which would block the import await self.async_set_unique_id(DOMAIN, raise_on_progress=False) diff --git a/homeassistant/components/smartthings/config_flow.py b/homeassistant/components/smartthings/config_flow.py index 9072683328d..df5b7a8acfa 100644 --- a/homeassistant/components/smartthings/config_flow.py +++ b/homeassistant/components/smartthings/config_flow.py @@ -54,9 +54,9 @@ class SmartThingsFlowHandler(ConfigFlow, domain=DOMAIN): self.location_id = None self.endpoints_initialized = False - async def async_step_import(self, user_input=None): + async def async_step_import(self, import_data: None) -> ConfigFlowResult: """Occurs when a previously entry setup fails and is re-initiated.""" - return await self.async_step_user(user_input) + return await self.async_step_user(import_data) async def async_step_user( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/soma/config_flow.py b/homeassistant/components/soma/config_flow.py index 23aabf5a5e0..586567611f7 100644 --- a/homeassistant/components/soma/config_flow.py +++ b/homeassistant/components/soma/config_flow.py @@ -67,8 +67,8 @@ class SomaFlowHandler(ConfigFlow, domain=DOMAIN): _LOGGER.error("Connection to SOMA Connect failed with KeyError") return self.async_abort(reason="connection_error") - async def async_step_import(self, user_input=None): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle flow start from existing config section.""" if self._async_current_entries(): return self.async_abort(reason="already_setup") - return await self.async_step_creation(user_input) + return await self.async_step_creation(import_data) diff --git a/homeassistant/components/songpal/config_flow.py b/homeassistant/components/songpal/config_flow.py index 0724646a594..9ccf7a8f19c 100644 --- a/homeassistant/components/songpal/config_flow.py +++ b/homeassistant/components/songpal/config_flow.py @@ -126,10 +126,10 @@ class SongpalConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_init() - async def async_step_import(self, user_input=None): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry.""" - name = user_input.get(CONF_NAME) - endpoint = user_input.get(CONF_ENDPOINT) + name = import_data.get(CONF_NAME) + endpoint = import_data.get(CONF_ENDPOINT) parsed_url = urlparse(endpoint) # Try to connect to test the endpoint @@ -146,4 +146,4 @@ class SongpalConfigFlow(ConfigFlow, domain=DOMAIN): self.conf = SongpalConfig(name, parsed_url.hostname, endpoint) - return await self.async_step_init(user_input) + return await self.async_step_init(import_data) diff --git a/homeassistant/components/yeelight/config_flow.py b/homeassistant/components/yeelight/config_flow.py index b4bb7da9a22..1b36fba59df 100644 --- a/homeassistant/components/yeelight/config_flow.py +++ b/homeassistant/components/yeelight/config_flow.py @@ -244,21 +244,21 @@ class YeelightConfigFlow(ConfigFlow, domain=DOMAIN): data_schema=vol.Schema({vol.Required(CONF_DEVICE): vol.In(devices_name)}), ) - async def async_step_import(self, user_input=None): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle import step.""" - host = user_input[CONF_HOST] + host = import_data[CONF_HOST] try: await self._async_try_connect(host, raise_on_progress=False) except CannotConnect: _LOGGER.error("Failed to import %s: cannot connect", host) return self.async_abort(reason="cannot_connect") - if CONF_NIGHTLIGHT_SWITCH_TYPE in user_input: - user_input[CONF_NIGHTLIGHT_SWITCH] = ( - user_input.pop(CONF_NIGHTLIGHT_SWITCH_TYPE) + if CONF_NIGHTLIGHT_SWITCH_TYPE in import_data: + import_data[CONF_NIGHTLIGHT_SWITCH] = ( + import_data.pop(CONF_NIGHTLIGHT_SWITCH_TYPE) == NIGHTLIGHT_SWITCH_TYPE_LIGHT ) self._abort_if_unique_id_configured() - return self.async_create_entry(title=user_input[CONF_NAME], data=user_input) + return self.async_create_entry(title=import_data[CONF_NAME], data=import_data) async def _async_try_connect(self, host, raise_on_progress=True): """Set up with options.""" diff --git a/tests/components/soma/test_config_flow.py b/tests/components/soma/test_config_flow.py index 8b8548bfe3e..67109e37c6d 100644 --- a/tests/components/soma/test_config_flow.py +++ b/tests/components/soma/test_config_flow.py @@ -5,7 +5,8 @@ from unittest.mock import patch from api.soma_api import SomaApi from requests import RequestException -from homeassistant.components.soma import DOMAIN, config_flow +from homeassistant.components.soma import DOMAIN +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -17,57 +18,66 @@ MOCK_PORT = 3000 async def test_form(hass: HomeAssistant) -> None: """Test user form showing.""" - flow = config_flow.SomaFlowHandler() - flow.hass = hass - result = await flow.async_step_user() + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) assert result["type"] is FlowResultType.FORM async def test_import_abort(hass: HomeAssistant) -> None: """Test configuration from YAML aborting with existing entity.""" - flow = config_flow.SomaFlowHandler() - flow.hass = hass MockConfigEntry(domain=DOMAIN).add_to_hass(hass) - result = await flow.async_step_import() + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT} + ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_setup" async def test_import_create(hass: HomeAssistant) -> None: """Test configuration from YAML.""" - flow = config_flow.SomaFlowHandler() - flow.hass = hass with patch.object(SomaApi, "list_devices", return_value={"result": "success"}): - result = await flow.async_step_import({"host": MOCK_HOST, "port": MOCK_PORT}) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={"host": MOCK_HOST, "port": MOCK_PORT}, + ) assert result["type"] is FlowResultType.CREATE_ENTRY async def test_error_status(hass: HomeAssistant) -> None: """Test Connect successfully returning error status.""" - flow = config_flow.SomaFlowHandler() - flow.hass = hass with patch.object(SomaApi, "list_devices", return_value={"result": "error"}): - result = await flow.async_step_import({"host": MOCK_HOST, "port": MOCK_PORT}) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={"host": MOCK_HOST, "port": MOCK_PORT}, + ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "result_error" async def test_key_error(hass: HomeAssistant) -> None: """Test Connect returning empty string.""" - flow = config_flow.SomaFlowHandler() - flow.hass = hass + with patch.object(SomaApi, "list_devices", return_value={}): - result = await flow.async_step_import({"host": MOCK_HOST, "port": MOCK_PORT}) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={"host": MOCK_HOST, "port": MOCK_PORT}, + ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "connection_error" async def test_exception(hass: HomeAssistant) -> None: """Test if RequestException fires when no connection can be made.""" - flow = config_flow.SomaFlowHandler() - flow.hass = hass with patch.object(SomaApi, "list_devices", side_effect=RequestException()): - result = await flow.async_step_import({"host": MOCK_HOST, "port": MOCK_PORT}) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={"host": MOCK_HOST, "port": MOCK_PORT}, + ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "connection_error" @@ -75,8 +85,10 @@ async def test_exception(hass: HomeAssistant) -> None: async def test_full_flow(hass: HomeAssistant) -> None: """Check classic use case.""" hass.data[DOMAIN] = {} - flow = config_flow.SomaFlowHandler() - flow.hass = hass with patch.object(SomaApi, "list_devices", return_value={"result": "success"}): - result = await flow.async_step_user({"host": MOCK_HOST, "port": MOCK_PORT}) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={"host": MOCK_HOST, "port": MOCK_PORT}, + ) assert result["type"] is FlowResultType.CREATE_ENTRY From e9830f0835e2b9fe7cdbd8c4ede7ccba3b416917 Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Wed, 28 Aug 2024 11:13:37 +0200 Subject: [PATCH 1142/1635] Bump reolink_aio to 0.9.8 (#124763) --- homeassistant/components/reolink/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/reolink/manifest.json b/homeassistant/components/reolink/manifest.json index 9671a4b4fc1..b90f7f4a045 100644 --- a/homeassistant/components/reolink/manifest.json +++ b/homeassistant/components/reolink/manifest.json @@ -18,5 +18,5 @@ "documentation": "https://www.home-assistant.io/integrations/reolink", "iot_class": "local_push", "loggers": ["reolink_aio"], - "requirements": ["reolink-aio==0.9.7"] + "requirements": ["reolink-aio==0.9.8"] } diff --git a/requirements_all.txt b/requirements_all.txt index 4633f0b093c..3e4fedbc15b 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2504,7 +2504,7 @@ renault-api==0.2.5 renson-endura-delta==1.7.1 # homeassistant.components.reolink -reolink-aio==0.9.7 +reolink-aio==0.9.8 # homeassistant.components.idteck_prox rfk101py==0.0.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 8688b7a6dcc..ab1aa13ec3c 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1986,7 +1986,7 @@ renault-api==0.2.5 renson-endura-delta==1.7.1 # homeassistant.components.reolink -reolink-aio==0.9.7 +reolink-aio==0.9.8 # homeassistant.components.rflink rflink==0.0.66 From c772c4a2d524b90c48e76a7db67f4751250e09f8 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 28 Aug 2024 11:15:26 +0200 Subject: [PATCH 1143/1635] Allow specifying icons for service sections (#124656) * Allow specifying icons for service sections * Improve kitchen_sink example --- .../components/kitchen_sink/__init__.py | 22 ++++++++++++- .../components/kitchen_sink/icons.json | 8 +++++ .../components/kitchen_sink/services.yaml | 32 +++++++++++++++++++ .../components/kitchen_sink/strings.json | 30 +++++++++++++++++ homeassistant/helpers/icon.py | 26 +++++++++++++-- script/hassfest/icons.py | 19 ++++++++++- tests/components/kitchen_sink/test_init.py | 22 +++++++++++++ tests/helpers/test_icon.py | 4 +-- 8 files changed, 157 insertions(+), 6 deletions(-) create mode 100644 homeassistant/components/kitchen_sink/services.yaml diff --git a/homeassistant/components/kitchen_sink/__init__.py b/homeassistant/components/kitchen_sink/__init__.py index 94dfca77410..2c3887bb383 100644 --- a/homeassistant/components/kitchen_sink/__init__.py +++ b/homeassistant/components/kitchen_sink/__init__.py @@ -9,6 +9,8 @@ from __future__ import annotations import datetime from random import random +import voluptuous as vol + from homeassistant.components.recorder import DOMAIN as RECORDER_DOMAIN, get_instance from homeassistant.components.recorder.models import StatisticData, StatisticMetaData from homeassistant.components.recorder.statistics import ( @@ -18,7 +20,7 @@ from homeassistant.components.recorder.statistics import ( ) from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import Platform, UnitOfEnergy, UnitOfTemperature, UnitOfVolume -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.helpers import config_validation as cv from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType @@ -40,6 +42,15 @@ COMPONENTS_WITH_DEMO_PLATFORM = [ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) +SCHEMA_SERVICE_TEST_SERVICE_1 = vol.Schema( + { + vol.Required("field_1"): vol.Coerce(int), + vol.Required("field_2"): vol.In(["off", "auto", "cool"]), + vol.Optional("field_3"): vol.Coerce(int), + vol.Optional("field_4"): vol.In(["forwards", "reverse"]), + } +) + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the demo environment.""" @@ -48,6 +59,15 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: DOMAIN, context={"source": SOURCE_IMPORT}, data={} ) ) + + @callback + def service_handler(call: ServiceCall | None = None) -> None: + """Do nothing.""" + + hass.services.async_register( + DOMAIN, "test_service_1", service_handler, SCHEMA_SERVICE_TEST_SERVICE_1 + ) + return True diff --git a/homeassistant/components/kitchen_sink/icons.json b/homeassistant/components/kitchen_sink/icons.json index 2947cfa7ec5..565d595d9c7 100644 --- a/homeassistant/components/kitchen_sink/icons.json +++ b/homeassistant/components/kitchen_sink/icons.json @@ -7,5 +7,13 @@ } } } + }, + "services": { + "test_service_1": { + "service": "mdi:flask", + "sections": { + "advanced_fields": "mdi:test-tube" + } + } } } diff --git a/homeassistant/components/kitchen_sink/services.yaml b/homeassistant/components/kitchen_sink/services.yaml new file mode 100644 index 00000000000..c65495095dc --- /dev/null +++ b/homeassistant/components/kitchen_sink/services.yaml @@ -0,0 +1,32 @@ +test_service_1: + fields: + field_1: + required: true + selector: + number: + min: 0 + max: 60 + unit_of_measurement: seconds + field_2: + required: true + selector: + select: + options: + - "off" + - "auto" + - "cool" + advanced_fields: + collapsed: true + fields: + field_3: + selector: + number: + min: 0 + max: 24 + unit_of_measurement: hours + field_4: + selector: + select: + options: + - "forward" + - "reverse" diff --git a/homeassistant/components/kitchen_sink/strings.json b/homeassistant/components/kitchen_sink/strings.json index c25964ab2ab..b10534eac00 100644 --- a/homeassistant/components/kitchen_sink/strings.json +++ b/homeassistant/components/kitchen_sink/strings.json @@ -71,5 +71,35 @@ "title": "This is not a fixable problem", "description": "This issue is never going to give up." } + }, + "services": { + "test_service_1": { + "name": "Test service 1", + "description": "Fake service for testing", + "fields": { + "field_1": { + "name": "Field 1", + "description": "Number of seconds" + }, + "field_2": { + "name": "Field 2", + "description": "Mode" + }, + "field_3": { + "name": "Field 3", + "description": "Number of hours" + }, + "field_4": { + "name": "Field 4", + "description": "Direction" + } + }, + "sections": { + "advanced_fields": { + "name": "Advanced options", + "description": "Some very advanced things" + } + } + } } } diff --git a/homeassistant/helpers/icon.py b/homeassistant/helpers/icon.py index e759719f667..ce8205eb915 100644 --- a/homeassistant/helpers/icon.py +++ b/homeassistant/helpers/icon.py @@ -7,7 +7,7 @@ from collections.abc import Iterable from functools import lru_cache import logging import pathlib -from typing import Any +from typing import Any, cast from homeassistant.core import HomeAssistant, callback from homeassistant.loader import Integration, async_get_integrations @@ -21,12 +21,34 @@ ICON_CACHE: HassKey[_IconsCache] = HassKey("icon_cache") _LOGGER = logging.getLogger(__name__) +def convert_shorthand_service_icon( + value: str | dict[str, str | dict[str, str]], +) -> dict[str, str | dict[str, str]]: + """Convert shorthand service icon to dict.""" + if isinstance(value, str): + return {"service": value} + return value + + +def _load_icons_file( + icons_file: pathlib.Path, +) -> dict[str, Any]: + """Load and parse an icons.json file.""" + icons = load_json_object(icons_file) + if "services" not in icons: + return icons + services = cast(dict[str, str | dict[str, str | dict[str, str]]], icons["services"]) + for service, service_icons in services.items(): + services[service] = convert_shorthand_service_icon(service_icons) + return icons + + def _load_icons_files( icons_files: dict[str, pathlib.Path], ) -> dict[str, dict[str, Any]]: """Load and parse icons.json files.""" return { - component: load_json_object(icons_file) + component: _load_icons_file(icons_file) for component, icons_file in icons_files.items() } diff --git a/script/hassfest/icons.py b/script/hassfest/icons.py index 10f666b9013..92d42efb842 100644 --- a/script/hassfest/icons.py +++ b/script/hassfest/icons.py @@ -9,6 +9,7 @@ import voluptuous as vol from voluptuous.humanize import humanize_error import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.icon import convert_shorthand_service_icon from .model import Config, Integration from .translations import translation_key_validator @@ -60,6 +61,22 @@ DATA_ENTRY_ICONS_SCHEMA = vol.Schema( ) +SERVICE_ICONS_SCHEMA = cv.schema_with_slug_keys( + vol.All( + convert_shorthand_service_icon, + vol.Schema( + { + vol.Optional("service"): icon_value_validator, + vol.Optional("sections"): cv.schema_with_slug_keys( + icon_value_validator, slug_validator=translation_key_validator + ), + } + ), + ), + slug_validator=translation_key_validator, +) + + def icon_schema(integration_type: str, no_entity_platform: bool) -> vol.Schema: """Create an icon schema.""" @@ -91,7 +108,7 @@ def icon_schema(integration_type: str, no_entity_platform: bool) -> vol.Schema: {str: {"fix_flow": DATA_ENTRY_ICONS_SCHEMA}} ), vol.Optional("options"): DATA_ENTRY_ICONS_SCHEMA, - vol.Optional("services"): state_validator, + vol.Optional("services"): SERVICE_ICONS_SCHEMA, } ) diff --git a/tests/components/kitchen_sink/test_init.py b/tests/components/kitchen_sink/test_init.py index 0575141bb3b..b832577a48a 100644 --- a/tests/components/kitchen_sink/test_init.py +++ b/tests/components/kitchen_sink/test_init.py @@ -5,6 +5,7 @@ from http import HTTPStatus from unittest.mock import ANY import pytest +import voluptuous as vol from homeassistant.components.kitchen_sink import DOMAIN from homeassistant.components.recorder import get_instance @@ -324,3 +325,24 @@ async def test_issues_created( }, ] } + + +async def test_service( + hass: HomeAssistant, +) -> None: + """Test we can call the service.""" + assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) + + with pytest.raises(vol.error.MultipleInvalid): + await hass.services.async_call(DOMAIN, "test_service_1", blocking=True) + + await hass.services.async_call( + DOMAIN, "test_service_1", {"field_1": 1, "field_2": "auto"}, blocking=True + ) + + await hass.services.async_call( + DOMAIN, + "test_service_1", + {"field_1": 1, "field_2": "auto", "field_3": 1, "field_4": "forwards"}, + blocking=True, + ) diff --git a/tests/helpers/test_icon.py b/tests/helpers/test_icon.py index 732f9971ac0..e0dc89f5322 100644 --- a/tests/helpers/test_icon.py +++ b/tests/helpers/test_icon.py @@ -101,7 +101,7 @@ async def test_get_icons(hass: HomeAssistant) -> None: # Test services icons are available icons = await icon.async_get_icons(hass, "services") assert len(icons) == 1 - assert icons["switch"]["turn_off"] == "mdi:toggle-switch-variant-off" + assert icons["switch"]["turn_off"] == {"service": "mdi:toggle-switch-variant-off"} # Ensure icons file for platform isn't loaded, as that isn't supported icons = await icon.async_get_icons(hass, "entity") @@ -126,7 +126,7 @@ async def test_get_icons(hass: HomeAssistant) -> None: icons = await icon.async_get_icons(hass, "services") assert len(icons) == 2 - assert icons["test_package"]["enable_god_mode"] == "mdi:shield" + assert icons["test_package"]["enable_god_mode"] == {"service": "mdi:shield"} # Load another one hass.config.components.add("test_embedded") From 10b3119b4a6eca9f1e2449758062b606b81961e6 Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Wed, 28 Aug 2024 11:27:34 +0200 Subject: [PATCH 1144/1635] Use Position instead of Angle for TiltOnlyDevice in motion blinds (#123521) Use Position instead of Angle for TiltOnlyBlinds --- .../components/motion_blinds/cover.py | 56 ++++++++++++++++--- 1 file changed, 48 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/motion_blinds/cover.py b/homeassistant/components/motion_blinds/cover.py index 72b78915bad..e60e7fa0ae8 100644 --- a/homeassistant/components/motion_blinds/cover.py +++ b/homeassistant/components/motion_blinds/cover.py @@ -330,23 +330,63 @@ class MotionTiltOnlyDevice(MotionTiltDevice): """Return current position of cover.""" return None + @property + def current_cover_tilt_position(self) -> int | None: + """Return current angle of cover. + + None is unknown, 0 is closed/minimum tilt, 100 is fully open/maximum tilt. + """ + if self._blind.position is None: + if self._blind.angle is None: + return None + return self._blind.angle * 100 / 180 + + return self._blind.position + @property def is_closed(self) -> bool | None: """Return if the cover is closed or not.""" - if self._blind.angle is None: - return None - return self._blind.angle == 0 + if self._blind.position is None: + if self._blind.angle is None: + return None + return self._blind.angle == 0 + + return self._blind.position == 0 + + async def async_open_cover_tilt(self, **kwargs: Any) -> None: + """Open the cover tilt.""" + async with self._api_lock: + await self.hass.async_add_executor_job(self._blind.Open) + + async def async_close_cover_tilt(self, **kwargs: Any) -> None: + """Close the cover tilt.""" + async with self._api_lock: + await self.hass.async_add_executor_job(self._blind.Close) + + async def async_set_cover_tilt_position(self, **kwargs: Any) -> None: + """Move the cover tilt to a specific position.""" + angle = kwargs[ATTR_TILT_POSITION] + if self._blind.position is None: + angle = angle * 180 / 100 + async with self._api_lock: + await self.hass.async_add_executor_job(self._blind.Set_angle, angle) + else: + async with self._api_lock: + await self.hass.async_add_executor_job(self._blind.Set_position, angle) async def async_set_absolute_position(self, **kwargs): """Move the cover to a specific absolute position (see TDBU).""" angle = kwargs.get(ATTR_TILT_POSITION) - if angle is not None: + if angle is None: + return + + if self._blind.position is None: angle = angle * 180 / 100 async with self._api_lock: - await self.hass.async_add_executor_job( - self._blind.Set_angle, - angle, - ) + await self.hass.async_add_executor_job(self._blind.Set_angle, angle) + else: + async with self._api_lock: + await self.hass.async_add_executor_job(self._blind.Set_position, angle) class MotionTDBUDevice(MotionBaseDevice): From 41e66edd14bdbdbb9a8e837db4b9df7dff300051 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Wed, 28 Aug 2024 12:31:30 +0200 Subject: [PATCH 1145/1635] Set default name for KNX outgoing telegram source (#124439) --- homeassistant/components/knx/telegrams.py | 4 +++- tests/components/knx/test_telegrams.py | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/knx/telegrams.py b/homeassistant/components/knx/telegrams.py index a96d841a07d..f4b31fd11f9 100644 --- a/homeassistant/components/knx/telegrams.py +++ b/homeassistant/components/knx/telegrams.py @@ -9,7 +9,7 @@ from xknx import XKNX from xknx.dpt import DPTArray, DPTBase, DPTBinary from xknx.dpt.dpt import DPTComplexData, DPTEnumData from xknx.exceptions import XKNXException -from xknx.telegram import Telegram +from xknx.telegram import Telegram, TelegramDirection from xknx.telegram.apci import GroupValueResponse, GroupValueWrite from homeassistant.core import HomeAssistant @@ -119,6 +119,8 @@ class Telegrams: device := self.project.devices.get(f"{telegram.source_address}") ) is not None: src_name = f"{device['manufacturer_name']} {device['name']}" + elif telegram.direction is TelegramDirection.OUTGOING: + src_name = "Home Assistant" if isinstance(telegram.payload, (GroupValueWrite, GroupValueResponse)): payload_data = telegram.payload.value.value diff --git a/tests/components/knx/test_telegrams.py b/tests/components/knx/test_telegrams.py index 2eda718f5ac..69e3208879c 100644 --- a/tests/components/knx/test_telegrams.py +++ b/tests/components/knx/test_telegrams.py @@ -39,7 +39,7 @@ MOCK_TELEGRAMS = [ "dpt_name": None, "payload": [1, 2, 3, 4], "source": "0.0.0", - "source_name": "", + "source_name": "Home Assistant", "telegramtype": "GroupValueWrite", "timestamp": MOCK_TIMESTAMP, "unit": None, From 8504a16e836645f8b2cf6b505dd9631aba951145 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Wed, 28 Aug 2024 12:34:28 +0200 Subject: [PATCH 1146/1635] Use KNX group address format from project (#124084) --- homeassistant/components/knx/__init__.py | 1 + homeassistant/components/knx/project.py | 14 +++++++++++++- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/knx/__init__.py b/homeassistant/components/knx/__init__.py index a401ee2ccac..01d5294639c 100644 --- a/homeassistant/components/knx/__init__.py +++ b/homeassistant/components/knx/__init__.py @@ -297,6 +297,7 @@ class KNXModule: self.config_store = KNXConfigStore(hass=hass, config_entry=entry) self.xknx = XKNX( + address_format=self.project.get_address_format(), connection_config=self.connection_config(), rate_limit=self.entry.data[CONF_KNX_RATE_LIMIT], state_updater=self.entry.data[CONF_KNX_STATE_UPDATER], diff --git a/homeassistant/components/knx/project.py b/homeassistant/components/knx/project.py index b5bafe00724..04cac68aab0 100644 --- a/homeassistant/components/knx/project.py +++ b/homeassistant/components/knx/project.py @@ -8,12 +8,13 @@ from typing import Final from xknx import XKNX from xknx.dpt import DPTBase -from xknx.telegram.address import DeviceAddressableType +from xknx.telegram.address import DeviceAddressableType, GroupAddress, GroupAddressType from xknxproject import XKNXProj from xknxproject.models import ( Device, DPTType, GroupAddress as GroupAddressModel, + GroupAddressStyle as XknxProjectGroupAddressStyle, KNXProject as KNXProjectModel, ProjectInfo, ) @@ -90,6 +91,7 @@ class KNXProject: if project := data or await self._store.async_load(): self.devices = project["devices"] self.info = project["info"] + GroupAddress.address_format = self.get_address_format() xknx.group_address_dpt.clear() xknx_ga_dict: dict[DeviceAddressableType, DPTType] = {} @@ -133,3 +135,13 @@ class KNXProject: async def get_knxproject(self) -> KNXProjectModel | None: """Load the project file from local storage.""" return await self._store.async_load() + + def get_address_format(self) -> GroupAddressType: + """Return the address format for group addresses used in the project.""" + if self.info: + match self.info["group_address_style"]: + case XknxProjectGroupAddressStyle.TWOLEVEL.value: + return GroupAddressType.SHORT + case XknxProjectGroupAddressStyle.FREE.value: + return GroupAddressType.FREE + return GroupAddressType.LONG From d1681fac72b335420e2a483717881ecb2c79d5e0 Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Wed, 28 Aug 2024 12:54:30 +0200 Subject: [PATCH 1147/1635] Improve mediabrowser names for DUO lens Reolink cameras (#124766) * Improve playback of DUO lens cameras like TrackMix * fix styling * Adjust tests accordingly --- .../components/reolink/media_source.py | 25 +++++++++++++------ tests/components/reolink/test_media_source.py | 21 ++++++++-------- 2 files changed, 29 insertions(+), 17 deletions(-) diff --git a/homeassistant/components/reolink/media_source.py b/homeassistant/components/reolink/media_source.py index ae865b77913..3c5d60030a3 100644 --- a/homeassistant/components/reolink/media_source.py +++ b/homeassistant/components/reolink/media_source.py @@ -174,10 +174,7 @@ class ReolinkVODMediaSource(MediaSource): if len(ch_id) > 3: ch = host.api.channel_for_uid(ch_id) - if ( - host.api.api_version("recReplay", int(ch)) < 1 - or not host.api.hdd_info - ): + if not host.api.supported(int(ch), "replay") or not host.api.hdd_info: # playback stream not supported by this camera or no storage installed continue @@ -281,12 +278,16 @@ class ReolinkVODMediaSource(MediaSource): config_entry_id, channel, "sub" ) + title = host.api.camera_name(channel) + if host.api.model in DUAL_LENS_MODELS: + title = f"{host.api.camera_name(channel)} lens {channel}" + return BrowseMediaSource( domain=DOMAIN, identifier=f"RESs|{config_entry_id}|{channel}", media_class=MediaClass.CHANNEL, media_content_type=MediaType.PLAYLIST, - title=host.api.camera_name(channel), + title=title, can_play=False, can_expand=True, children=children, @@ -328,12 +329,16 @@ class ReolinkVODMediaSource(MediaSource): for day in status.days ] + title = f"{host.api.camera_name(channel)} {res_name(stream)}" + if host.api.model in DUAL_LENS_MODELS: + title = f"{host.api.camera_name(channel)} lens {channel} {res_name(stream)}" + return BrowseMediaSource( domain=DOMAIN, identifier=f"DAYS|{config_entry_id}|{channel}|{stream}", media_class=MediaClass.CHANNEL, media_content_type=MediaType.PLAYLIST, - title=f"{host.api.camera_name(channel)} {res_name(stream)}", + title=title, can_play=False, can_expand=True, children=children, @@ -388,12 +393,18 @@ class ReolinkVODMediaSource(MediaSource): ) ) + title = ( + f"{host.api.camera_name(channel)} {res_name(stream)} {year}/{month}/{day}" + ) + if host.api.model in DUAL_LENS_MODELS: + title = f"{host.api.camera_name(channel)} lens {channel} {res_name(stream)} {year}/{month}/{day}" + return BrowseMediaSource( domain=DOMAIN, identifier=f"FILES|{config_entry_id}|{channel}|{stream}", media_class=MediaClass.CHANNEL, media_content_type=MediaType.PLAYLIST, - title=f"{host.api.camera_name(channel)} {res_name(stream)} {year}/{month}/{day}", + title=title, can_play=False, can_expand=True, children=children, diff --git a/tests/components/reolink/test_media_source.py b/tests/components/reolink/test_media_source.py index cbc9bf51705..31985bd10f7 100644 --- a/tests/components/reolink/test_media_source.py +++ b/tests/components/reolink/test_media_source.py @@ -130,7 +130,7 @@ async def test_browsing( ) -> None: """Test browsing the Reolink three.""" entry_id = config_entry.entry_id - reolink_connect.api_version.return_value = 1 + reolink_connect.supported.return_value = 1 reolink_connect.model = "Reolink TrackMix PoE" with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): @@ -162,7 +162,7 @@ async def test_browsing( browse_res_AT_sub_id = f"RES|{entry_id}|{TEST_CHANNEL}|autotrack_sub" browse_res_AT_main_id = f"RES|{entry_id}|{TEST_CHANNEL}|autotrack_main" assert browse.domain == DOMAIN - assert browse.title == TEST_NVR_NAME + assert browse.title == f"{TEST_NVR_NAME} lens 0" assert browse.identifier == browse_resolution_id assert browse.children[0].identifier == browse_res_sub_id assert browse.children[1].identifier == browse_res_main_id @@ -178,19 +178,19 @@ async def test_browsing( browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}/{browse_res_sub_id}") assert browse.domain == DOMAIN - assert browse.title == f"{TEST_NVR_NAME} Low res." + assert browse.title == f"{TEST_NVR_NAME} lens 0 Low res." browse = await async_browse_media( hass, f"{URI_SCHEME}{DOMAIN}/{browse_res_AT_sub_id}" ) assert browse.domain == DOMAIN - assert browse.title == f"{TEST_NVR_NAME} Autotrack low res." + assert browse.title == f"{TEST_NVR_NAME} lens 0 Autotrack low res." browse = await async_browse_media( hass, f"{URI_SCHEME}{DOMAIN}/{browse_res_AT_main_id}" ) assert browse.domain == DOMAIN - assert browse.title == f"{TEST_NVR_NAME} Autotrack high res." + assert browse.title == f"{TEST_NVR_NAME} lens 0 Autotrack high res." browse = await async_browse_media( hass, f"{URI_SCHEME}{DOMAIN}/{browse_res_main_id}" @@ -200,7 +200,7 @@ async def test_browsing( browse_day_0_id = f"DAY|{entry_id}|{TEST_CHANNEL}|{TEST_STREAM}|{TEST_YEAR}|{TEST_MONTH}|{TEST_DAY}" browse_day_1_id = f"DAY|{entry_id}|{TEST_CHANNEL}|{TEST_STREAM}|{TEST_YEAR}|{TEST_MONTH}|{TEST_DAY2}" assert browse.domain == DOMAIN - assert browse.title == f"{TEST_NVR_NAME} High res." + assert browse.title == f"{TEST_NVR_NAME} lens 0 High res." assert browse.identifier == browse_days_id assert browse.children[0].identifier == browse_day_0_id assert browse.children[1].identifier == browse_day_1_id @@ -220,7 +220,8 @@ async def test_browsing( browse_file_id = f"FILE|{entry_id}|{TEST_CHANNEL}|{TEST_STREAM}|{TEST_FILE_NAME}" assert browse.domain == DOMAIN assert ( - browse.title == f"{TEST_NVR_NAME} High res. {TEST_YEAR}/{TEST_MONTH}/{TEST_DAY}" + browse.title + == f"{TEST_NVR_NAME} lens 0 High res. {TEST_YEAR}/{TEST_MONTH}/{TEST_DAY}" ) assert browse.identifier == browse_files_id assert browse.children[0].identifier == browse_file_id @@ -272,7 +273,7 @@ async def test_browsing_rec_playback_unsupported( config_entry: MockConfigEntry, ) -> None: """Test browsing a Reolink camera which does not support playback of recordings.""" - reolink_connect.api_version.return_value = 0 + reolink_connect.supported.return_value = 0 with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): assert await hass.config_entries.async_setup(config_entry.entry_id) @@ -293,7 +294,7 @@ async def test_browsing_errors( config_entry: MockConfigEntry, ) -> None: """Test browsing a Reolink camera errors.""" - reolink_connect.api_version.return_value = 1 + reolink_connect.supported.return_value = 1 with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): assert await hass.config_entries.async_setup(config_entry.entry_id) @@ -312,7 +313,7 @@ async def test_browsing_not_loaded( config_entry: MockConfigEntry, ) -> None: """Test browsing a Reolink camera integration which is not loaded.""" - reolink_connect.api_version.return_value = 1 + reolink_connect.supported.return_value = 1 with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): assert await hass.config_entries.async_setup(config_entry.entry_id) From 0a942423376f2d18c6a03169e905faa818faf1a9 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 28 Aug 2024 12:56:22 +0200 Subject: [PATCH 1148/1635] Improve config flow type hints in vesync (#124351) --- .../components/vesync/config_flow.py | 38 ++++++++++--------- 1 file changed, 20 insertions(+), 18 deletions(-) diff --git a/homeassistant/components/vesync/config_flow.py b/homeassistant/components/vesync/config_flow.py index 15f9f548e35..6115cb9ee76 100644 --- a/homeassistant/components/vesync/config_flow.py +++ b/homeassistant/components/vesync/config_flow.py @@ -1,40 +1,42 @@ """Config flow utilities.""" -from collections import OrderedDict +from typing import Any from pyvesync import VeSync import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import callback +import homeassistant.helpers.config_validation as cv from .const import DOMAIN +DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_USERNAME): cv.string, + vol.Required(CONF_PASSWORD): cv.string, + } +) + class VeSyncFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a config flow.""" VERSION = 1 - def __init__(self) -> None: - """Instantiate config flow.""" - self._username = None - self._password = None - self.data_schema = OrderedDict() - self.data_schema[vol.Required(CONF_USERNAME)] = str - self.data_schema[vol.Required(CONF_PASSWORD)] = str - @callback - def _show_form(self, errors=None): + def _show_form(self, errors: dict[str, str] | None = None) -> ConfigFlowResult: """Show form to the user.""" return self.async_show_form( step_id="user", - data_schema=vol.Schema(self.data_schema), + data_schema=DATA_SCHEMA, errors=errors if errors else {}, ) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow start.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") @@ -42,15 +44,15 @@ class VeSyncFlowHandler(ConfigFlow, domain=DOMAIN): if not user_input: return self._show_form() - self._username = user_input[CONF_USERNAME] - self._password = user_input[CONF_PASSWORD] + username = user_input[CONF_USERNAME] + password = user_input[CONF_PASSWORD] - manager = VeSync(self._username, self._password) + manager = VeSync(username, password) login = await self.hass.async_add_executor_job(manager.login) if not login: return self._show_form(errors={"base": "invalid_auth"}) return self.async_create_entry( - title=self._username, - data={CONF_USERNAME: self._username, CONF_PASSWORD: self._password}, + title=username, + data={CONF_USERNAME: username, CONF_PASSWORD: password}, ) From 35d318818a030453d619604d982f0fd67d5bb15f Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 28 Aug 2024 12:59:12 +0200 Subject: [PATCH 1149/1635] Improve config flow type hints in sense (#124350) --- homeassistant/components/sense/config_flow.py | 29 ++++++++++++------- 1 file changed, 18 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/sense/config_flow.py b/homeassistant/components/sense/config_flow.py index dab80b99e1a..222c6b30f79 100644 --- a/homeassistant/components/sense/config_flow.py +++ b/homeassistant/components/sense/config_flow.py @@ -3,7 +3,7 @@ from collections.abc import Mapping from functools import partial import logging -from typing import Any +from typing import TYPE_CHECKING, Any from sense_energy import ( ASyncSenseable, @@ -34,13 +34,12 @@ class SenseConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Init Config .""" - self._gateway = None - self._auth_data = {} - super().__init__() + self._gateway: ASyncSenseable | None = None + self._auth_data: dict[str, Any] = {} - async def validate_input(self, data): + async def validate_input(self, data: Mapping[str, Any]) -> None: """Validate the user input allows us to connect. Data has the keys from DATA_SCHEMA with values provided by the user. @@ -59,6 +58,8 @@ class SenseConfigFlow(ConfigFlow, domain=DOMAIN): client_session=client_session, ) ) + if TYPE_CHECKING: + assert self._gateway self._gateway.rate_limit = ACTIVE_UPDATE_RATE await self._gateway.authenticate( self._auth_data[CONF_EMAIL], self._auth_data[CONF_PASSWORD] @@ -79,7 +80,9 @@ class SenseConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_update_reload_and_abort(existing_entry, data=self._auth_data) - async def validate_input_and_create_entry(self, user_input, errors): + async def validate_input_and_create_entry( + self, user_input: Mapping[str, Any], errors: dict[str, str] + ) -> ConfigFlowResult | None: """Validate the input and create the entry from the data.""" try: await self.validate_input(user_input) @@ -118,9 +121,11 @@ class SenseConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" - errors = {} + errors: dict[str, str] = {} if user_input is not None: if result := await self.validate_input_and_create_entry(user_input, errors): return result @@ -136,9 +141,11 @@ class SenseConfigFlow(ConfigFlow, domain=DOMAIN): self._auth_data = dict(entry_data) return await self.async_step_reauth_validate(entry_data) - async def async_step_reauth_validate(self, user_input=None): + async def async_step_reauth_validate( + self, user_input: Mapping[str, Any] + ) -> ConfigFlowResult: """Handle reauth and validation.""" - errors = {} + errors: dict[str, str] = {} if user_input is not None: if result := await self.validate_input_and_create_entry(user_input, errors): return result From 18b49a6f621e8f06296b3c307fd6d7fe4ef25bef Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 28 Aug 2024 12:59:53 +0200 Subject: [PATCH 1150/1635] Cleanup unused import in solarlog config flow (#124713) --- .../components/solarlog/config_flow.py | 17 -------- tests/components/solarlog/test_config_flow.py | 41 +++---------------- 2 files changed, 6 insertions(+), 52 deletions(-) diff --git a/homeassistant/components/solarlog/config_flow.py b/homeassistant/components/solarlog/config_flow.py index 7824c98cf5d..4587cb7d886 100644 --- a/homeassistant/components/solarlog/config_flow.py +++ b/homeassistant/components/solarlog/config_flow.py @@ -101,23 +101,6 @@ class SolarLogConfigFlow(ConfigFlow, domain=DOMAIN): errors=self._errors, ) - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: - """Import a config entry.""" - - user_input = { - CONF_HOST: DEFAULT_HOST, - CONF_NAME: DEFAULT_NAME, - "extended_data": False, - **import_data, - } - - user_input[CONF_HOST] = self._parse_url(user_input[CONF_HOST]) - - if self._host_in_configuration_exists(user_input[CONF_HOST]): - return self.async_abort(reason="already_configured") - - return await self.async_step_user(user_input) - async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/tests/components/solarlog/test_config_flow.py b/tests/components/solarlog/test_config_flow.py index f71282a7c9b..b06f2ac0587 100644 --- a/tests/components/solarlog/test_config_flow.py +++ b/tests/components/solarlog/test_config_flow.py @@ -7,7 +7,7 @@ from solarlog_cli.solarlog_exceptions import SolarLogConnectionError, SolarLogEr from homeassistant import config_entries from homeassistant.components.solarlog import config_flow -from homeassistant.components.solarlog.const import DEFAULT_HOST, DOMAIN +from homeassistant.components.solarlog.const import DOMAIN from homeassistant.const import CONF_HOST, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -123,35 +123,6 @@ async def test_form_exceptions( assert result["data"]["extended_data"] is False -async def test_import(hass: HomeAssistant, test_connect) -> None: - """Test import step.""" - flow = init_config_flow(hass) - - # import with only host - result = await flow.async_step_import({CONF_HOST: HOST}) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "solarlog" - assert result["data"][CONF_HOST] == HOST - - # import with only name - result = await flow.async_step_import({CONF_NAME: NAME}) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "solarlog_test_1_2_3" - assert result["data"][CONF_HOST] == DEFAULT_HOST - - # import with host and name - result = await flow.async_step_import({CONF_HOST: HOST, CONF_NAME: NAME}) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "solarlog_test_1_2_3" - assert result["data"][CONF_HOST] == HOST - - async def test_abort_if_already_setup(hass: HomeAssistant, test_connect) -> None: """Test we abort if the device is already setup.""" flow = init_config_flow(hass) @@ -160,11 +131,11 @@ async def test_abort_if_already_setup(hass: HomeAssistant, test_connect) -> None ).add_to_hass(hass) # Should fail, same HOST different NAME (default) - result = await flow.async_step_import( + result = await flow.async_step_user( {CONF_HOST: HOST, CONF_NAME: "solarlog_test_7_8_9", "extended_data": False} ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {CONF_HOST: "already_configured"} # Should fail, same HOST and NAME result = await flow.async_step_user({CONF_HOST: HOST, CONF_NAME: NAME}) @@ -172,7 +143,7 @@ async def test_abort_if_already_setup(hass: HomeAssistant, test_connect) -> None assert result["errors"] == {CONF_HOST: "already_configured"} # SHOULD pass, diff HOST (without http://), different NAME - result = await flow.async_step_import( + result = await flow.async_step_user( {CONF_HOST: "2.2.2.2", CONF_NAME: "solarlog_test_7_8_9", "extended_data": False} ) assert result["type"] is FlowResultType.CREATE_ENTRY @@ -180,7 +151,7 @@ async def test_abort_if_already_setup(hass: HomeAssistant, test_connect) -> None assert result["data"][CONF_HOST] == "http://2.2.2.2" # SHOULD pass, diff HOST, same NAME - result = await flow.async_step_import( + result = await flow.async_step_user( {CONF_HOST: "http://2.2.2.2", CONF_NAME: NAME, "extended_data": False} ) await hass.async_block_till_done() From 8ff8ed7f76002966e23a1742552426a3f5265fa6 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 28 Aug 2024 13:00:10 +0200 Subject: [PATCH 1151/1635] Cleanup unused import in upb config flow (#124694) * Cleanup unused import in upb config flow * More cleanup --- homeassistant/components/upb/config_flow.py | 12 ------- tests/components/upb/test_config_flow.py | 39 --------------------- 2 files changed, 51 deletions(-) diff --git a/homeassistant/components/upb/config_flow.py b/homeassistant/components/upb/config_flow.py index 6efd3a685ed..d9f111049fd 100644 --- a/homeassistant/components/upb/config_flow.py +++ b/homeassistant/components/upb/config_flow.py @@ -79,10 +79,6 @@ class UPBConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Initialize the UPB config flow.""" - self.importing = False - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -105,9 +101,6 @@ class UPBConfigFlow(ConfigFlow, domain=DOMAIN): await self.async_set_unique_id(network_id) self._abort_if_unique_id_configured() - if self.importing: - return self.async_create_entry(title=info["title"], data=user_input) - return self.async_create_entry( title=info["title"], data={ @@ -120,11 +113,6 @@ class UPBConfigFlow(ConfigFlow, domain=DOMAIN): step_id="user", data_schema=DATA_SCHEMA, errors=errors ) - async def async_step_import(self, user_input): - """Handle import.""" - self.importing = True - return await self.async_step_user(user_input) - def _url_already_configured(self, url): """See if we already have a UPB PIM matching user input configured.""" existing_hosts = { diff --git a/tests/components/upb/test_config_flow.py b/tests/components/upb/test_config_flow.py index 5f28f1d9b17..59a4e97d22b 100644 --- a/tests/components/upb/test_config_flow.py +++ b/tests/components/upb/test_config_flow.py @@ -114,42 +114,3 @@ async def test_form_user_with_already_configured(hass: HomeAssistant) -> None: assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "already_configured" await hass.async_block_till_done() - - -async def test_form_import(hass: HomeAssistant) -> None: - """Test we get the form with import source.""" - - with ( - mocked_upb(), - patch( - "homeassistant.components.upb.async_setup_entry", return_value=True - ) as mock_setup_entry, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={"host": "tcp://42.4.2.42", "file_path": "upb.upe"}, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "UPB" - - assert result["data"] == {"host": "tcp://42.4.2.42", "file_path": "upb.upe"} - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_form_junk_input(hass: HomeAssistant) -> None: - """Test we get the form with import source.""" - - with mocked_upb(): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={"foo": "goo", "goo": "foo"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "unknown"} - - await hass.async_block_till_done() From d4ae592a85a0b3b631605dce27c0ceb34c14aa46 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 28 Aug 2024 13:00:30 +0200 Subject: [PATCH 1152/1635] Improve config flow type hints in sms (#124352) * Improve config flow type hints in sms * Drop async_step_import --- homeassistant/components/sms/config_flow.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/sms/config_flow.py b/homeassistant/components/sms/config_flow.py index aec9674da9d..d2188a94632 100644 --- a/homeassistant/components/sms/config_flow.py +++ b/homeassistant/components/sms/config_flow.py @@ -1,11 +1,12 @@ """Config flow for SMS integration.""" import logging +from typing import Any import gammu import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_DEVICE from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -26,7 +27,7 @@ DATA_SCHEMA = vol.Schema( ) -async def get_imei_from_config(hass: HomeAssistant, data): +async def get_imei_from_config(hass: HomeAssistant, data: dict[str, Any]) -> str: """Validate the user input allows us to connect. Data has the keys from DATA_SCHEMA with values provided by the user. @@ -56,7 +57,9 @@ class SMSFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") @@ -79,10 +82,6 @@ class SMSFlowHandler(ConfigFlow, domain=DOMAIN): step_id="user", data_schema=DATA_SCHEMA, errors=errors ) - async def async_step_import(self, user_input): - """Handle import.""" - return await self.async_step_user(user_input) - class CannotConnect(HomeAssistantError): """Error to indicate we cannot connect.""" From 51a5a78eb5c48af93525ffc9233d7abfbe8b4703 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 28 Aug 2024 13:01:00 +0200 Subject: [PATCH 1153/1635] Standardize reauth step variable name in config flows (#124760) --- .../components/devolo_home_network/config_flow.py | 6 ++++-- homeassistant/components/ezviz/config_flow.py | 2 +- homeassistant/components/frontier_silicon/config_flow.py | 6 ++++-- homeassistant/components/hydrawise/config_flow.py | 2 +- homeassistant/components/justnimbus/config_flow.py | 2 +- homeassistant/components/jvc_projector/config_flow.py | 2 +- homeassistant/components/kitchen_sink/config_flow.py | 5 ++++- homeassistant/components/lidarr/config_flow.py | 2 +- homeassistant/components/mikrotik/config_flow.py | 4 +++- homeassistant/components/octoprint/config_flow.py | 8 +++++--- homeassistant/components/osoenergy/config_flow.py | 4 ++-- homeassistant/components/radarr/config_flow.py | 4 +++- homeassistant/components/simplisafe/config_flow.py | 4 +++- homeassistant/components/tailwind/config_flow.py | 4 +++- homeassistant/components/tessie/config_flow.py | 2 +- homeassistant/components/thethingsnetwork/config_flow.py | 2 +- homeassistant/components/tuya/config_flow.py | 4 +++- homeassistant/components/volvooncall/config_flow.py | 2 +- 18 files changed, 42 insertions(+), 23 deletions(-) diff --git a/homeassistant/components/devolo_home_network/config_flow.py b/homeassistant/components/devolo_home_network/config_flow.py index 63d86d46e8a..fca72471693 100644 --- a/homeassistant/components/devolo_home_network/config_flow.py +++ b/homeassistant/components/devolo_home_network/config_flow.py @@ -112,10 +112,12 @@ class DevoloHomeNetworkConfigFlow(ConfigFlow, domain=DOMAIN): description_placeholders={"host_name": title}, ) - async def async_step_reauth(self, data: Mapping[str, Any]) -> ConfigFlowResult: + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: """Handle reauthentication.""" if entry := self.hass.config_entries.async_get_entry(self.context["entry_id"]): - self.context[CONF_HOST] = data[CONF_IP_ADDRESS] + self.context[CONF_HOST] = entry_data[CONF_IP_ADDRESS] self.context["title_placeholders"][PRODUCT] = ( entry.runtime_data.device.product ) diff --git a/homeassistant/components/ezviz/config_flow.py b/homeassistant/components/ezviz/config_flow.py index 2b47b120cf8..66425c675cc 100644 --- a/homeassistant/components/ezviz/config_flow.py +++ b/homeassistant/components/ezviz/config_flow.py @@ -319,7 +319,7 @@ class EzvizConfigFlow(ConfigFlow, domain=DOMAIN): ) async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle a flow for reauthentication with password.""" diff --git a/homeassistant/components/frontier_silicon/config_flow.py b/homeassistant/components/frontier_silicon/config_flow.py index 103323ff575..8a3c5fe086f 100644 --- a/homeassistant/components/frontier_silicon/config_flow.py +++ b/homeassistant/components/frontier_silicon/config_flow.py @@ -172,9 +172,11 @@ class FrontierSiliconConfigFlow(ConfigFlow, domain=DOMAIN): step_id="confirm", description_placeholders={"name": self._name} ) - async def async_step_reauth(self, config: Mapping[str, Any]) -> ConfigFlowResult: + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self._webfsapi_url = config[CONF_WEBFSAPI_URL] + self._webfsapi_url = entry_data[CONF_WEBFSAPI_URL] self._reauth_entry = self.hass.config_entries.async_get_entry( self.context["entry_id"] diff --git a/homeassistant/components/hydrawise/config_flow.py b/homeassistant/components/hydrawise/config_flow.py index ab9ebbb065d..a5e7d616fcf 100644 --- a/homeassistant/components/hydrawise/config_flow.py +++ b/homeassistant/components/hydrawise/config_flow.py @@ -90,7 +90,7 @@ class HydrawiseConfigFlow(ConfigFlow, domain=DOMAIN): ) async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth after updating config to username/password.""" self.reauth_entry = self.hass.config_entries.async_get_entry( diff --git a/homeassistant/components/justnimbus/config_flow.py b/homeassistant/components/justnimbus/config_flow.py index 0520c558266..8c816c1ac1b 100644 --- a/homeassistant/components/justnimbus/config_flow.py +++ b/homeassistant/components/justnimbus/config_flow.py @@ -77,7 +77,7 @@ class JustNimbusConfigFlow(ConfigFlow, domain=DOMAIN): ) async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" self.reauth_entry = self.hass.config_entries.async_get_entry( diff --git a/homeassistant/components/jvc_projector/config_flow.py b/homeassistant/components/jvc_projector/config_flow.py index 7fbfb17a976..253aa640f71 100644 --- a/homeassistant/components/jvc_projector/config_flow.py +++ b/homeassistant/components/jvc_projector/config_flow.py @@ -74,7 +74,7 @@ class JvcProjectorConfigFlow(ConfigFlow, domain=DOMAIN): ) async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth on password authentication error.""" self._reauth_entry = self.hass.config_entries.async_get_entry( diff --git a/homeassistant/components/kitchen_sink/config_flow.py b/homeassistant/components/kitchen_sink/config_flow.py index 2e05db71b89..9a0b78c80e6 100644 --- a/homeassistant/components/kitchen_sink/config_flow.py +++ b/homeassistant/components/kitchen_sink/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Mapping from typing import Any import voluptuous as vol @@ -41,7 +42,9 @@ class KitchenSinkConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_create_entry(title="Kitchen Sink", data=import_data) - async def async_step_reauth(self, data): + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: """Reauth step.""" return await self.async_step_reauth_confirm() diff --git a/homeassistant/components/lidarr/config_flow.py b/homeassistant/components/lidarr/config_flow.py index 05d6900bb41..bc7a40c976e 100644 --- a/homeassistant/components/lidarr/config_flow.py +++ b/homeassistant/components/lidarr/config_flow.py @@ -29,7 +29,7 @@ class LidarrConfigFlow(ConfigFlow, domain=DOMAIN): self.entry: LidarrConfigEntry | None = None async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) diff --git a/homeassistant/components/mikrotik/config_flow.py b/homeassistant/components/mikrotik/config_flow.py index fe0d020d373..6035565acf1 100644 --- a/homeassistant/components/mikrotik/config_flow.py +++ b/homeassistant/components/mikrotik/config_flow.py @@ -83,7 +83,9 @@ class MikrotikFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_reauth(self, data: Mapping[str, Any]) -> ConfigFlowResult: + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" self._reauth_entry = self.hass.config_entries.async_get_entry( self.context["entry_id"] diff --git a/homeassistant/components/octoprint/config_flow.py b/homeassistant/components/octoprint/config_flow.py index 8e4257ab821..706670738a6 100644 --- a/homeassistant/components/octoprint/config_flow.py +++ b/homeassistant/components/octoprint/config_flow.py @@ -215,13 +215,15 @@ class OctoPrintConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_user() - async def async_step_reauth(self, config: Mapping[str, Any]) -> ConfigFlowResult: + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: """Handle reauthorization request from Octoprint.""" - self._reauth_data = dict(config) + self._reauth_data = dict(entry_data) self.context.update( { - "title_placeholders": {CONF_HOST: config[CONF_HOST]}, + "title_placeholders": {CONF_HOST: entry_data[CONF_HOST]}, } ) diff --git a/homeassistant/components/osoenergy/config_flow.py b/homeassistant/components/osoenergy/config_flow.py index e0afc5292ae..0642250e9ed 100644 --- a/homeassistant/components/osoenergy/config_flow.py +++ b/homeassistant/components/osoenergy/config_flow.py @@ -69,9 +69,9 @@ class OSOEnergyFlowHandler(ConfigFlow, domain=DOMAIN): return None async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Re Authenticate a user.""" self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - data = {CONF_API_KEY: user_input[CONF_API_KEY]} + data = {CONF_API_KEY: entry_data[CONF_API_KEY]} return await self.async_step_user(data) diff --git a/homeassistant/components/radarr/config_flow.py b/homeassistant/components/radarr/config_flow.py index 3bf0796a9a8..c748c63e992 100644 --- a/homeassistant/components/radarr/config_flow.py +++ b/homeassistant/components/radarr/config_flow.py @@ -26,7 +26,9 @@ class RadarrConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 entry: RadarrConfigEntry | None = None - async def async_step_reauth(self, _: Mapping[str, Any]) -> ConfigFlowResult: + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: """Handle configuration by re-auth.""" self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) diff --git a/homeassistant/components/simplisafe/config_flow.py b/homeassistant/components/simplisafe/config_flow.py index c0d98c5644f..6fdbd351a29 100644 --- a/homeassistant/components/simplisafe/config_flow.py +++ b/homeassistant/components/simplisafe/config_flow.py @@ -69,7 +69,9 @@ class SimpliSafeFlowHandler(ConfigFlow, domain=DOMAIN): """Define the config flow to handle options.""" return SimpliSafeOptionsFlowHandler(config_entry) - async def async_step_reauth(self, config: Mapping[str, Any]) -> ConfigFlowResult: + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: """Handle configuration by re-auth.""" self._reauth = True return await self.async_step_user() diff --git a/homeassistant/components/tailwind/config_flow.py b/homeassistant/components/tailwind/config_flow.py index 1cb94625266..13682a3e9c4 100644 --- a/homeassistant/components/tailwind/config_flow.py +++ b/homeassistant/components/tailwind/config_flow.py @@ -144,7 +144,9 @@ class TailwindFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_reauth(self, _: Mapping[str, Any]) -> ConfigFlowResult: + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: """Handle initiation of re-authentication with a Tailwind device.""" self.reauth_entry = self.hass.config_entries.async_get_entry( self.context["entry_id"] diff --git a/homeassistant/components/tessie/config_flow.py b/homeassistant/components/tessie/config_flow.py index 1cbc070e463..bee518ce95f 100644 --- a/homeassistant/components/tessie/config_flow.py +++ b/homeassistant/components/tessie/config_flow.py @@ -66,7 +66,7 @@ class TessieConfigFlow(ConfigFlow, domain=DOMAIN): ) async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle re-auth.""" self._reauth_entry = self.hass.config_entries.async_get_entry( diff --git a/homeassistant/components/thethingsnetwork/config_flow.py b/homeassistant/components/thethingsnetwork/config_flow.py index cbb780e7064..7480e4cb1d9 100644 --- a/homeassistant/components/thethingsnetwork/config_flow.py +++ b/homeassistant/components/thethingsnetwork/config_flow.py @@ -89,7 +89,7 @@ class TTNFlowHandler(ConfigFlow, domain=DOMAIN): return self.async_show_form(step_id="user", data_schema=schema, errors=errors) async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle a flow initialized by a reauth event.""" diff --git a/homeassistant/components/tuya/config_flow.py b/homeassistant/components/tuya/config_flow.py index bdef321de7a..104c3b7c9fa 100644 --- a/homeassistant/components/tuya/config_flow.py +++ b/homeassistant/components/tuya/config_flow.py @@ -146,7 +146,9 @@ class TuyaConfigFlow(ConfigFlow, domain=DOMAIN): data=entry_data, ) - async def async_step_reauth(self, _: Mapping[str, Any]) -> ConfigFlowResult: + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: """Handle initiation of re-authentication with Tuya.""" self.__reauth_entry = self.hass.config_entries.async_get_entry( self.context["entry_id"] diff --git a/homeassistant/components/volvooncall/config_flow.py b/homeassistant/components/volvooncall/config_flow.py index 80358a28ced..b3a1745351b 100644 --- a/homeassistant/components/volvooncall/config_flow.py +++ b/homeassistant/components/volvooncall/config_flow.py @@ -107,7 +107,7 @@ class VolvoOnCallConfigFlow(ConfigFlow, domain=DOMAIN): ) async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" self._reauth_entry = self.hass.config_entries.async_get_entry( From 274d98f4d73712dac2b5df4bf025a1a322cf17a5 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 28 Aug 2024 13:02:21 +0200 Subject: [PATCH 1154/1635] Standardize reauth step variable name in permobil (#124764) --- homeassistant/components/permobil/config_flow.py | 11 +++-------- tests/components/permobil/test_config_flow.py | 3 +++ 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/permobil/config_flow.py b/homeassistant/components/permobil/config_flow.py index cb47640e55f..f7f247a412e 100644 --- a/homeassistant/components/permobil/config_flow.py +++ b/homeassistant/components/permobil/config_flow.py @@ -161,17 +161,12 @@ class PermobilConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_create_entry(title=self.data[CONF_EMAIL], data=self.data) async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - assert reauth_entry - try: - email: str = reauth_entry.data[CONF_EMAIL] - region: str = reauth_entry.data[CONF_REGION] + email: str = entry_data[CONF_EMAIL] + region: str = entry_data[CONF_REGION] self.p_api.set_email(email) self.p_api.set_region(region) self.data = { diff --git a/tests/components/permobil/test_config_flow.py b/tests/components/permobil/test_config_flow.py index ea39e678459..4474340f811 100644 --- a/tests/components/permobil/test_config_flow.py +++ b/tests/components/permobil/test_config_flow.py @@ -287,6 +287,7 @@ async def test_config_flow_reauth_success( result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": "reauth", "entry_id": mock_entry.entry_id}, + data=mock_entry.data, ) assert result["type"] is FlowResultType.FORM @@ -329,6 +330,7 @@ async def test_config_flow_reauth_fail_invalid_code( result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": "reauth", "entry_id": mock_entry.entry_id}, + data=mock_entry.data, ) assert result["type"] is FlowResultType.FORM @@ -366,6 +368,7 @@ async def test_config_flow_reauth_fail_code_request( result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": "reauth", "entry_id": reauth_entry.entry_id}, + data=mock_entry.data, ) assert result["type"] is FlowResultType.ABORT From 14eec2e57ab43b39df072b1de3d72cae66d89f2b Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 28 Aug 2024 13:08:26 +0200 Subject: [PATCH 1155/1635] Add start_reauth helper method to MockConfigEntry (#124767) * Add start_reauth helper method to MockConfigEntry * Two more --- tests/common.py | 15 ++++++++++- tests/components/abode/test_config_flow.py | 13 ++++----- .../components/airvisual/test_config_flow.py | 10 +++---- .../airvisual_pro/test_config_flow.py | 16 ++++------- tests/components/august/test_config_flow.py | 8 ++---- tests/components/awair/test_config_flow.py | 14 +++------- tests/components/axis/test_config_flow.py | 8 +----- .../azure_devops/test_config_flow.py | 27 +++++-------------- 8 files changed, 41 insertions(+), 70 deletions(-) diff --git a/tests/common.py b/tests/common.py index 893c9ffcd67..523bd12d590 100644 --- a/tests/common.py +++ b/tests/common.py @@ -47,7 +47,7 @@ from homeassistant.components.device_automation import ( # noqa: F401 _async_get_device_automation_capabilities as async_get_device_automation_capabilities, ) from homeassistant.config import IntegrationConfigInfo, async_process_component_config -from homeassistant.config_entries import ConfigEntry, ConfigFlow +from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult from homeassistant.const import ( DEVICE_DEFAULT_NAME, EVENT_HOMEASSISTANT_CLOSE, @@ -1054,6 +1054,19 @@ class MockConfigEntry(config_entries.ConfigEntry): """ self._async_set_state(hass, state, reason) + async def start_reauth_flow(self, hass: HomeAssistant) -> ConfigFlowResult: + """Start a reauthentication flow.""" + return await hass.config_entries.flow.async_init( + self.domain, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": self.entry_id, + "title_placeholders": {"name": self.title}, + "unique_id": self.unique_id, + }, + data=self.data, + ) + def patch_yaml_files(files_dict, endswith=True): """Patch load_yaml with a dictionary of yaml files.""" diff --git a/tests/components/abode/test_config_flow.py b/tests/components/abode/test_config_flow.py index 265a77560f7..a37fb8cbe33 100644 --- a/tests/components/abode/test_config_flow.py +++ b/tests/components/abode/test_config_flow.py @@ -12,7 +12,7 @@ from requests.exceptions import ConnectTimeout from homeassistant.components.abode import config_flow from homeassistant.components.abode.const import CONF_POLLING, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -161,18 +161,15 @@ async def test_step_reauth(hass: HomeAssistant) -> None: """Test the reauth flow.""" conf = {CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"} - MockConfigEntry( + entry = MockConfigEntry( domain=DOMAIN, unique_id="user@email.com", data=conf, - ).add_to_hass(hass) + ) + entry.add_to_hass(hass) with patch("homeassistant.components.abode.config_flow.Abode"): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH}, - data=conf, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/airvisual/test_config_flow.py b/tests/components/airvisual/test_config_flow.py index b9643b17c07..e38fc64587e 100644 --- a/tests/components/airvisual/test_config_flow.py +++ b/tests/components/airvisual/test_config_flow.py @@ -18,7 +18,7 @@ from homeassistant.components.airvisual import ( INTEGRATION_TYPE_GEOGRAPHY_COORDS, INTEGRATION_TYPE_GEOGRAPHY_NAME, ) -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_SHOW_ON_MAP from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -33,6 +33,8 @@ from .conftest import ( TEST_STATE, ) +from tests.common import MockConfigEntry + pytestmark = pytest.mark.usefixtures("mock_setup_entry") @@ -146,12 +148,10 @@ async def test_options_flow( async def test_step_reauth( - hass: HomeAssistant, config_entry, setup_config_entry + hass: HomeAssistant, config_entry: MockConfigEntry, setup_config_entry ) -> None: """Test that the reauth step works.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_REAUTH}, data=config_entry.data - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/airvisual_pro/test_config_flow.py b/tests/components/airvisual_pro/test_config_flow.py index 803a335f52c..9298b8cf528 100644 --- a/tests/components/airvisual_pro/test_config_flow.py +++ b/tests/components/airvisual_pro/test_config_flow.py @@ -10,11 +10,13 @@ from pyairvisual.node import ( import pytest from homeassistant.components.airvisual_pro.const import DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from tests.common import MockConfigEntry + pytestmark = pytest.mark.usefixtures("mock_setup_entry") @@ -98,22 +100,14 @@ async def test_step_import(hass: HomeAssistant, config, setup_airvisual_pro) -> async def test_reauth( hass: HomeAssistant, config, - config_entry, + config_entry: MockConfigEntry, connect_errors, connect_mock, pro, setup_airvisual_pro, ) -> None: """Test re-auth (including errors).""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - "unique_id": config_entry.unique_id, - }, - data=config, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/august/test_config_flow.py b/tests/components/august/test_config_flow.py index fdebb8d5c46..e0ccee55f10 100644 --- a/tests/components/august/test_config_flow.py +++ b/tests/components/august/test_config_flow.py @@ -248,9 +248,7 @@ async def test_form_reauth(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -294,9 +292,7 @@ async def test_form_reauth_with_2fa(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} diff --git a/tests/components/awair/test_config_flow.py b/tests/components/awair/test_config_flow.py index ab9f5faa425..ac17cf41448 100644 --- a/tests/components/awair/test_config_flow.py +++ b/tests/components/awair/test_config_flow.py @@ -7,7 +7,7 @@ from aiohttp.client_exceptions import ClientConnectorError from python_awair.exceptions import AuthError, AwairError from homeassistant.components.awair.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF from homeassistant.const import CONF_ACCESS_TOKEN, CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -136,11 +136,7 @@ async def test_reauth(hass: HomeAssistant, user, cloud_devices) -> None: ) mock_config.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "unique_id": CLOUD_UNIQUE_ID}, - data={**CLOUD_CONFIG, CONF_ACCESS_TOKEN: "blah"}, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} @@ -180,11 +176,7 @@ async def test_reauth_error(hass: HomeAssistant) -> None: ) mock_config.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "unique_id": CLOUD_UNIQUE_ID}, - data={**CLOUD_CONFIG, CONF_ACCESS_TOKEN: "blah"}, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} diff --git a/tests/components/axis/test_config_flow.py b/tests/components/axis/test_config_flow.py index 5ceb6588fbd..8591b4583c1 100644 --- a/tests/components/axis/test_config_flow.py +++ b/tests/components/axis/test_config_flow.py @@ -17,7 +17,6 @@ from homeassistant.components.axis.const import ( ) from homeassistant.config_entries import ( SOURCE_DHCP, - SOURCE_REAUTH, SOURCE_RECONFIGURE, SOURCE_SSDP, SOURCE_USER, @@ -205,12 +204,7 @@ async def test_reauth_flow_update_configuration( assert config_entry_setup.data[CONF_USERNAME] == "root" assert config_entry_setup.data[CONF_PASSWORD] == "pass" - result = await hass.config_entries.flow.async_init( - AXIS_DOMAIN, - context={"source": SOURCE_REAUTH}, - data=config_entry_setup.data, - ) - + result = await config_entry_setup.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" diff --git a/tests/components/azure_devops/test_config_flow.py b/tests/components/azure_devops/test_config_flow.py index 45dc10802b9..9ebc9991939 100644 --- a/tests/components/azure_devops/test_config_flow.py +++ b/tests/components/azure_devops/test_config_flow.py @@ -53,18 +53,14 @@ async def test_authorization_error( async def test_reauth_authorization_error( hass: HomeAssistant, + mock_config_entry: MockConfigEntry, mock_devops_client: AsyncMock, ) -> None: """Test we show user form on Azure DevOps authorization error.""" mock_devops_client.authorize.return_value = False mock_devops_client.authorized = False - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=FIXTURE_USER_INPUT, - ) - + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth" @@ -108,17 +104,14 @@ async def test_connection_error( async def test_reauth_connection_error( hass: HomeAssistant, + mock_config_entry: MockConfigEntry, mock_devops_client: AsyncMock, ) -> None: """Test we show user form on Azure DevOps connection error.""" mock_devops_client.authorize.side_effect = aiohttp.ClientError mock_devops_client.authorized = False - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=FIXTURE_USER_INPUT, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth" @@ -174,11 +167,7 @@ async def test_reauth_project_error( mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=FIXTURE_USER_INPUT, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth" @@ -205,11 +194,7 @@ async def test_reauth_flow( mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=FIXTURE_USER_INPUT, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth" From 45eebf32856d521dc70741def9bfd69b4bafb6a8 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 28 Aug 2024 13:09:21 +0200 Subject: [PATCH 1156/1635] Use reauth_confirm in sharkiq (#124762) --- .../components/sharkiq/config_flow.py | 10 ++++- homeassistant/components/sharkiq/strings.json | 2 +- tests/components/sharkiq/test_config_flow.py | 37 +++++++++++-------- 3 files changed, 31 insertions(+), 18 deletions(-) diff --git a/homeassistant/components/sharkiq/config_flow.py b/homeassistant/components/sharkiq/config_flow.py index 492b8f2a365..87367fcf093 100644 --- a/homeassistant/components/sharkiq/config_flow.py +++ b/homeassistant/components/sharkiq/config_flow.py @@ -116,9 +116,15 @@ class SharkIqConfigFlow(ConfigFlow, domain=DOMAIN): ) async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle re-auth if login is invalid.""" + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a flow initiated by reauthentication.""" errors: dict[str, str] = {} if user_input is not None: @@ -134,7 +140,7 @@ class SharkIqConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_abort(reason=errors["base"]) return self.async_show_form( - step_id="reauth", + step_id="reauth_confirm", data_schema=SHARKIQ_SCHEMA, errors=errors, ) diff --git a/homeassistant/components/sharkiq/strings.json b/homeassistant/components/sharkiq/strings.json index 63d4f6af48b..40b569e13b7 100644 --- a/homeassistant/components/sharkiq/strings.json +++ b/homeassistant/components/sharkiq/strings.json @@ -13,7 +13,7 @@ "region": "Shark IQ uses different services in the EU. Select your region to connect to the correct service for your account." } }, - "reauth": { + "reauth_confirm": { "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", diff --git a/tests/components/sharkiq/test_config_flow.py b/tests/components/sharkiq/test_config_flow.py index cf75bff1686..ae037834c57 100644 --- a/tests/components/sharkiq/test_config_flow.py +++ b/tests/components/sharkiq/test_config_flow.py @@ -96,18 +96,22 @@ async def test_form_error(hass: HomeAssistant, exc: Exception, base_error: str) async def test_reauth_success(hass: HomeAssistant) -> None: """Test reauth flow.""" - with patch("sharkiq.AylaApi.async_sign_in", return_value=True): - mock_config = MockConfigEntry(domain=DOMAIN, unique_id=UNIQUE_ID, data=CONFIG) - mock_config.add_to_hass(hass) + mock_config = MockConfigEntry(domain=DOMAIN, unique_id=UNIQUE_ID, data=CONFIG) + mock_config.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "unique_id": UNIQUE_ID}, - data=CONFIG, + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_REAUTH, "unique_id": UNIQUE_ID}, + data=mock_config.data, + ) + + with patch("sharkiq.AylaApi.async_sign_in", return_value=True): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input=CONFIG ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" @pytest.mark.parametrize( @@ -127,13 +131,16 @@ async def test_reauth( msg: str, ) -> None: """Test reauth failures.""" - with patch("sharkiq.AylaApi.async_sign_in", side_effect=side_effect): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "unique_id": UNIQUE_ID}, - data=CONFIG, - ) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_REAUTH, "unique_id": UNIQUE_ID}, + data=CONFIG, + ) + with patch("sharkiq.AylaApi.async_sign_in", side_effect=side_effect): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input=CONFIG + ) msg_value = result[msg_field] if msg_field == "errors": msg_value = msg_value.get("base") From 163795e73a4180671cf8cf0776c1c0de7074a834 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 28 Aug 2024 13:09:45 +0200 Subject: [PATCH 1157/1635] Use reauth_confirm in weatherflow_cloud (#124761) --- .../components/weatherflow_cloud/config_flow.py | 10 ++++++++-- .../components/weatherflow_cloud/strings.json | 2 +- .../weatherflow_cloud/test_config_flow.py | 13 ++++++++----- 3 files changed, 17 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/weatherflow_cloud/config_flow.py b/homeassistant/components/weatherflow_cloud/config_flow.py index e8972c320ed..cbb83b6f25b 100644 --- a/homeassistant/components/weatherflow_cloud/config_flow.py +++ b/homeassistant/components/weatherflow_cloud/config_flow.py @@ -33,9 +33,15 @@ class WeatherFlowCloudConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle a flow for reauth.""" + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a flow initiated by reauthentication.""" errors = {} if user_input is not None: @@ -54,7 +60,7 @@ class WeatherFlowCloudConfigFlow(ConfigFlow, domain=DOMAIN): ) return self.async_show_form( - step_id="reauth", + step_id="reauth_confirm", data_schema=vol.Schema({vol.Required(CONF_API_TOKEN): str}), errors=errors, ) diff --git a/homeassistant/components/weatherflow_cloud/strings.json b/homeassistant/components/weatherflow_cloud/strings.json index df561c8b753..f707cbb0353 100644 --- a/homeassistant/components/weatherflow_cloud/strings.json +++ b/homeassistant/components/weatherflow_cloud/strings.json @@ -7,7 +7,7 @@ "api_token": "Personal api token" } }, - "reauth": { + "reauth_confirm": { "description": "Reauthenticate with WeatherFlow", "data": { "api_token": "[%key:component::weatherflow_cloud::config::step::user::data::api_token%]" diff --git a/tests/components/weatherflow_cloud/test_config_flow.py b/tests/components/weatherflow_cloud/test_config_flow.py index 7ade007ceac..3a1f41563fe 100644 --- a/tests/components/weatherflow_cloud/test_config_flow.py +++ b/tests/components/weatherflow_cloud/test_config_flow.py @@ -111,15 +111,18 @@ async def test_reauth(hass: HomeAssistant, mock_get_stations_401_error) -> None: assert not await hass.config_entries.async_setup(entry.entry_id) assert entry.state is ConfigEntryState.SETUP_ERROR - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_REAUTH, "entry_id": entry.entry_id}, data=None - ) - assert result["type"] is FlowResultType.FORM result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_REAUTH, "entry_id": entry.entry_id}, - data={CONF_API_TOKEN: "SAME_SAME"}, + data=entry.data, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_API_TOKEN: "SAME_SAME"} ) assert result["reason"] == "reauth_successful" assert result["type"] is FlowResultType.ABORT + assert entry.data[CONF_API_TOKEN] == "SAME_SAME" From 633ff0ea42a327f38e54959c972bcf148dc4062c Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Wed, 28 Aug 2024 13:14:34 +0200 Subject: [PATCH 1158/1635] Handle KNX expose conversion exceptions and unavailable states (#124776) --- homeassistant/components/knx/expose.py | 24 +++++++++++++++------ tests/components/knx/test_expose.py | 29 ++++++++++++++++++++++---- 2 files changed, 43 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/knx/expose.py b/homeassistant/components/knx/expose.py index 921af6ba4a9..82bee48ba69 100644 --- a/homeassistant/components/knx/expose.py +++ b/homeassistant/components/knx/expose.py @@ -125,6 +125,8 @@ class KNXExposeSensor: def _get_expose_value(self, state: State | None) -> bool | int | float | str | None: """Extract value from state.""" if state is None or state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE): + if self.expose_default is None: + return None value = self.expose_default elif self.expose_attribute is not None: _attr = state.attributes.get(self.expose_attribute) @@ -154,12 +156,22 @@ class KNXExposeSensor: if value is not None and ( isinstance(self.device.sensor_value, RemoteValueSensor) ): - if issubclass(self.device.sensor_value.dpt_class, DPTNumeric): - return float(value) - if issubclass(self.device.sensor_value.dpt_class, DPTString): - # DPT 16.000 only allows up to 14 Bytes - return str(value)[:14] - return value + try: + if issubclass(self.device.sensor_value.dpt_class, DPTNumeric): + return float(value) + if issubclass(self.device.sensor_value.dpt_class, DPTString): + # DPT 16.000 only allows up to 14 Bytes + return str(value)[:14] + except (ValueError, TypeError) as err: + _LOGGER.warning( + 'Could not expose %s %s value "%s" to KNX: Conversion failed: %s', + self.entity_id, + self.expose_attribute or "state", + value, + err, + ) + return None + return value # type: ignore[no-any-return] async def _async_entity_changed(self, event: Event[EventStateChangedData]) -> None: """Handle entity change.""" diff --git a/tests/components/knx/test_expose.py b/tests/components/knx/test_expose.py index c4d0acf0ce2..0fd790a3e33 100644 --- a/tests/components/knx/test_expose.py +++ b/tests/components/knx/test_expose.py @@ -108,6 +108,11 @@ async def test_expose_attribute(hass: HomeAssistant, knx: KNXTestKit) -> None: await hass.async_block_till_done() await knx.assert_telegram_count(0) + # Ignore "unavailable" state + hass.states.async_set(entity_id, "unavailable", {attribute: None}) + await hass.async_block_till_done() + await knx.assert_telegram_count(0) + async def test_expose_attribute_with_default( hass: HomeAssistant, knx: KNXTestKit @@ -131,7 +136,7 @@ async def test_expose_attribute_with_default( await knx.receive_read("1/1/8") await knx.assert_response("1/1/8", (0,)) - # Change state to "on"; no attribute + # Change state to "on"; no attribute -> default hass.states.async_set(entity_id, "on", {}) await hass.async_block_till_done() await knx.assert_write("1/1/8", (0,)) @@ -146,6 +151,11 @@ async def test_expose_attribute_with_default( await hass.async_block_till_done() await knx.assert_no_telegram() + # Use default for "unavailable" state + hass.states.async_set(entity_id, "unavailable") + await hass.async_block_till_done() + await knx.assert_write("1/1/8", (0,)) + # Change state and attribute hass.states.async_set(entity_id, "on", {attribute: 3}) await hass.async_block_till_done() @@ -290,8 +300,18 @@ async def test_expose_value_template( assert "Error rendering value template for KNX expose" in caplog.text +@pytest.mark.parametrize( + "invalid_attribute", + [ + 101.0, + "invalid", # can't cast to float + ], +) async def test_expose_conversion_exception( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, knx: KNXTestKit + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + knx: KNXTestKit, + invalid_attribute: str, ) -> None: """Test expose throws exception.""" @@ -313,16 +333,17 @@ async def test_expose_conversion_exception( await knx.receive_read("1/1/8") await knx.assert_response("1/1/8", (3,)) + caplog.clear() # Change attribute: Expect no exception hass.states.async_set( entity_id, "on", - {attribute: 101}, + {attribute: invalid_attribute}, ) await hass.async_block_till_done() await knx.assert_no_telegram() assert ( - 'Could not expose fake.entity fake_attribute value "101.0" to KNX:' + f'Could not expose fake.entity fake_attribute value "{invalid_attribute}" to KNX:' in caplog.text ) From a0089685dd2f98f21a0965b07fff957e8f6dfe41 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 28 Aug 2024 13:19:58 +0200 Subject: [PATCH 1159/1635] Simplify aussie_broadband reauth flow (#124774) * Simplify aussie_broadband config flow * Apply suggestions from code review Co-authored-by: Joost Lekkerkerker * Update config_flow.py * Update config_flow.py --------- Co-authored-by: Joost Lekkerkerker --- .../aussie_broadband/config_flow.py | 14 +++---- .../aussie_broadband/test_config_flow.py | 39 ++++++------------- 2 files changed, 16 insertions(+), 37 deletions(-) diff --git a/homeassistant/components/aussie_broadband/config_flow.py b/homeassistant/components/aussie_broadband/config_flow.py index b434ab69ae5..65507d57e8b 100644 --- a/homeassistant/components/aussie_broadband/config_flow.py +++ b/homeassistant/components/aussie_broadband/config_flow.py @@ -99,15 +99,11 @@ class AussieBroadbandConfigFlow(ConfigFlow, domain=DOMAIN): } if not (errors := await self.async_auth(data)): - entry = await self.async_set_unique_id(self._reauth_username.lower()) - if entry: - self.hass.config_entries.async_update_entry( - entry, - data=data, - ) - await self.hass.config_entries.async_reload(entry.entry_id) - return self.async_abort(reason="reauth_successful") - return self.async_create_entry(title=self._reauth_username, data=data) + entry = self.hass.config_entries.async_get_entry( + self.context["entry_id"] + ) + assert entry + return self.async_update_reload_and_abort(entry, data=data) return self.async_show_form( step_id="reauth_confirm", diff --git a/tests/components/aussie_broadband/test_config_flow.py b/tests/components/aussie_broadband/test_config_flow.py index 6ee674ab0f4..b79ed41b251 100644 --- a/tests/components/aussie_broadband/test_config_flow.py +++ b/tests/components/aussie_broadband/test_config_flow.py @@ -13,6 +13,8 @@ from homeassistant.data_entry_flow import FlowResultType from .common import FAKE_DATA, FAKE_SERVICES +from tests.common import MockConfigEntry + TEST_USERNAME = FAKE_DATA[CONF_USERNAME] TEST_PASSWORD = FAKE_DATA[CONF_PASSWORD] @@ -163,39 +165,20 @@ async def test_form_network_issue(hass: HomeAssistant) -> None: async def test_reauth(hass: HomeAssistant) -> None: """Test reauth flow.""" - - # Test reauth but the entry doesn't exist - result1 = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=FAKE_DATA + mock_entry = MockConfigEntry( + domain=DOMAIN, + data=FAKE_DATA, + unique_id=FAKE_DATA[CONF_USERNAME], ) - - with ( - patch("aussiebb.asyncio.AussieBB.__init__", return_value=None), - patch("aussiebb.asyncio.AussieBB.login", return_value=True), - patch( - "aussiebb.asyncio.AussieBB.get_services", return_value=[FAKE_SERVICES[0]] - ), - patch( - "homeassistant.components.aussie_broadband.async_setup_entry", - return_value=True, - ), - ): - result2 = await hass.config_entries.flow.async_configure( - result1["flow_id"], - { - CONF_PASSWORD: TEST_PASSWORD, - }, - ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == TEST_USERNAME - assert result2["data"] == FAKE_DATA + mock_entry.add_to_hass(hass) # Test failed reauth result5 = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": mock_entry.entry_id, + }, data=FAKE_DATA, ) assert result5["step_id"] == "reauth_confirm" From a0ffa69b49cdc2c57034c1640ca292e4ce4d9b12 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 28 Aug 2024 13:29:18 +0200 Subject: [PATCH 1160/1635] Standardize reauth step variable name in aseko_pool_live (#124765) --- homeassistant/components/aseko_pool_live/config_flow.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/aseko_pool_live/config_flow.py b/homeassistant/components/aseko_pool_live/config_flow.py index cd2f0e4ac7f..ce6de3683d5 100644 --- a/homeassistant/components/aseko_pool_live/config_flow.py +++ b/homeassistant/components/aseko_pool_live/config_flow.py @@ -101,7 +101,7 @@ class AsekoConfigFlow(ConfigFlow, domain=DOMAIN): ) async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" @@ -109,10 +109,10 @@ class AsekoConfigFlow(ConfigFlow, domain=DOMAIN): self.context["entry_id"] ) - return await self.async_step_reauth_confirm(user_input) + return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( - self, user_input: Mapping | None = None + self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" From fce2e21c9fb3e2749568499c917d4556dacf556d Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 28 Aug 2024 13:47:02 +0200 Subject: [PATCH 1161/1635] Update icons.json to new service schema part 1 (#124768) --- homeassistant/components/abode/icons.json | 12 +++-- homeassistant/components/adguard/icons.json | 20 +++++-- homeassistant/components/ads/icons.json | 4 +- .../components/advantage_air/icons.json | 4 +- homeassistant/components/aftership/icons.json | 8 ++- homeassistant/components/agent_dvr/icons.json | 20 +++++-- .../components/alarm_control_panel/icons.json | 28 +++++++--- .../components/alarmdecoder/icons.json | 8 ++- homeassistant/components/alert/icons.json | 12 +++-- homeassistant/components/amcrest/icons.json | 44 ++++++++++++---- homeassistant/components/androidtv/icons.json | 16 ++++-- .../components/automation/icons.json | 20 +++++-- homeassistant/components/backup/icons.json | 4 +- homeassistant/components/bayesian/icons.json | 4 +- homeassistant/components/blackbird/icons.json | 4 +- homeassistant/components/blink/icons.json | 20 +++++-- homeassistant/components/bluesound/icons.json | 16 ++++-- .../components/bluetooth_tracker/icons.json | 4 +- homeassistant/components/bond/icons.json | 28 +++++++--- homeassistant/components/bring/icons.json | 4 +- homeassistant/components/browser/icons.json | 4 +- homeassistant/components/button/icons.json | 4 +- homeassistant/components/calendar/icons.json | 12 +++-- homeassistant/components/camera/icons.json | 28 +++++++--- homeassistant/components/cast/icons.json | 4 +- homeassistant/components/channels/icons.json | 12 +++-- homeassistant/components/climate/icons.json | 40 ++++++++++---- homeassistant/components/cloud/icons.json | 8 ++- .../components/cloudflare/icons.json | 4 +- .../components/color_extractor/icons.json | 4 +- .../components/command_line/icons.json | 4 +- .../components/conversation/icons.json | 8 ++- homeassistant/components/counter/icons.json | 16 ++++-- homeassistant/components/cover/icons.json | 40 ++++++++++---- homeassistant/components/date/icons.json | 4 +- homeassistant/components/datetime/icons.json | 4 +- homeassistant/components/debugpy/icons.json | 4 +- homeassistant/components/deconz/icons.json | 12 +++-- homeassistant/components/demo/icons.json | 4 +- homeassistant/components/denonavr/icons.json | 12 +++-- .../components/device_tracker/icons.json | 4 +- homeassistant/components/dominos/icons.json | 4 +- .../components/downloader/icons.json | 4 +- homeassistant/components/duckdns/icons.json | 4 +- homeassistant/components/dynalite/icons.json | 8 ++- .../components/easyenergy/icons.json | 12 +++-- homeassistant/components/ebusd/icons.json | 4 +- homeassistant/components/ecobee/icons.json | 28 +++++++--- homeassistant/components/ecovacs/icons.json | 4 +- homeassistant/components/elgato/icons.json | 4 +- homeassistant/components/elkm1/icons.json | 52 ++++++++++++++----- .../components/energyzero/icons.json | 8 ++- .../components/environment_canada/icons.json | 4 +- .../components/envisalink/icons.json | 8 ++- homeassistant/components/epson/icons.json | 4 +- homeassistant/components/evohome/icons.json | 20 +++++-- homeassistant/components/ezviz/icons.json | 8 ++- homeassistant/components/fan/icons.json | 36 +++++++++---- homeassistant/components/ffmpeg/icons.json | 12 +++-- homeassistant/components/filter/icons.json | 4 +- homeassistant/components/flo/icons.json | 16 ++++-- homeassistant/components/flume/icons.json | 4 +- homeassistant/components/flux_led/icons.json | 12 +++-- homeassistant/components/foscam/icons.json | 8 ++- .../components/foursquare/icons.json | 4 +- homeassistant/components/freebox/icons.json | 4 +- homeassistant/components/fritz/icons.json | 16 ++++-- homeassistant/components/frontend/icons.json | 8 ++- .../components/fully_kiosk/icons.json | 12 +++-- homeassistant/components/generic/icons.json | 4 +- 70 files changed, 621 insertions(+), 207 deletions(-) diff --git a/homeassistant/components/abode/icons.json b/homeassistant/components/abode/icons.json index 00175628d9a..4ce4e55cab6 100644 --- a/homeassistant/components/abode/icons.json +++ b/homeassistant/components/abode/icons.json @@ -7,8 +7,14 @@ } }, "services": { - "capture_image": "mdi:camera", - "change_setting": "mdi:cog", - "trigger_automation": "mdi:play" + "capture_image": { + "service": "mdi:camera" + }, + "change_setting": { + "service": "mdi:cog" + }, + "trigger_automation": { + "service": "mdi:play" + } } } diff --git a/homeassistant/components/adguard/icons.json b/homeassistant/components/adguard/icons.json index 9c5df8a4a45..18527c0ed98 100644 --- a/homeassistant/components/adguard/icons.json +++ b/homeassistant/components/adguard/icons.json @@ -66,10 +66,20 @@ } }, "services": { - "add_url": "mdi:link-plus", - "remove_url": "mdi:link-off", - "enable_url": "mdi:link-variant", - "disable_url": "mdi:link-variant-off", - "refresh": "mdi:refresh" + "add_url": { + "service": "mdi:link-plus" + }, + "remove_url": { + "service": "mdi:link-off" + }, + "enable_url": { + "service": "mdi:link-variant" + }, + "disable_url": { + "service": "mdi:link-variant-off" + }, + "refresh": { + "service": "mdi:refresh" + } } } diff --git a/homeassistant/components/ads/icons.json b/homeassistant/components/ads/icons.json index 5ab8041fe9b..3732f16bf1a 100644 --- a/homeassistant/components/ads/icons.json +++ b/homeassistant/components/ads/icons.json @@ -1,5 +1,7 @@ { "services": { - "write_data_by_name": "mdi:pencil" + "write_data_by_name": { + "service": "mdi:pencil" + } } } diff --git a/homeassistant/components/advantage_air/icons.json b/homeassistant/components/advantage_air/icons.json index a4168f440cf..8651c9d9eaf 100644 --- a/homeassistant/components/advantage_air/icons.json +++ b/homeassistant/components/advantage_air/icons.json @@ -1,5 +1,7 @@ { "services": { - "set_time_to": "mdi:timer-cog" + "set_time_to": { + "service": "mdi:timer-cog" + } } } diff --git a/homeassistant/components/aftership/icons.json b/homeassistant/components/aftership/icons.json index 1222ab0873d..105d3cef3ec 100644 --- a/homeassistant/components/aftership/icons.json +++ b/homeassistant/components/aftership/icons.json @@ -7,7 +7,11 @@ } }, "services": { - "add_tracking": "mdi:package-variant-plus", - "remove_tracking": "mdi:package-variant-minus" + "add_tracking": { + "service": "mdi:package-variant-plus" + }, + "remove_tracking": { + "service": "mdi:package-variant-minus" + } } } diff --git a/homeassistant/components/agent_dvr/icons.json b/homeassistant/components/agent_dvr/icons.json index 6550d01641e..7dfb4a847f6 100644 --- a/homeassistant/components/agent_dvr/icons.json +++ b/homeassistant/components/agent_dvr/icons.json @@ -1,9 +1,19 @@ { "services": { - "start_recording": "mdi:record-rec", - "stop_recording": "mdi:stop", - "enable_alerts": "mdi:bell-alert", - "disable_alerts": "mdi:bell-off", - "snapshot": "mdi:camera" + "start_recording": { + "service": "mdi:record-rec" + }, + "stop_recording": { + "service": "mdi:stop" + }, + "enable_alerts": { + "service": "mdi:bell-alert" + }, + "disable_alerts": { + "service": "mdi:bell-off" + }, + "snapshot": { + "service": "mdi:camera" + } } } diff --git a/homeassistant/components/alarm_control_panel/icons.json b/homeassistant/components/alarm_control_panel/icons.json index 915448a9962..0295699bae9 100644 --- a/homeassistant/components/alarm_control_panel/icons.json +++ b/homeassistant/components/alarm_control_panel/icons.json @@ -15,12 +15,26 @@ } }, "services": { - "alarm_arm_away": "mdi:shield-lock", - "alarm_arm_home": "mdi:shield-home", - "alarm_arm_night": "mdi:shield-moon", - "alarm_arm_custom_bypass": "mdi:security", - "alarm_disarm": "mdi:shield-off", - "alarm_trigger": "mdi:bell-ring", - "alarm_arm_vacation": "mdi:shield-airplane" + "alarm_arm_away": { + "service": "mdi:shield-lock" + }, + "alarm_arm_home": { + "service": "mdi:shield-home" + }, + "alarm_arm_night": { + "service": "mdi:shield-moon" + }, + "alarm_arm_custom_bypass": { + "service": "mdi:security" + }, + "alarm_disarm": { + "service": "mdi:shield-off" + }, + "alarm_trigger": { + "service": "mdi:bell-ring" + }, + "alarm_arm_vacation": { + "service": "mdi:shield-airplane" + } } } diff --git a/homeassistant/components/alarmdecoder/icons.json b/homeassistant/components/alarmdecoder/icons.json index 80835a049c8..ccb89749d2d 100644 --- a/homeassistant/components/alarmdecoder/icons.json +++ b/homeassistant/components/alarmdecoder/icons.json @@ -7,7 +7,11 @@ } }, "services": { - "alarm_keypress": "mdi:dialpad", - "alarm_toggle_chime": "mdi:abc" + "alarm_keypress": { + "service": "mdi:dialpad" + }, + "alarm_toggle_chime": { + "service": "mdi:abc" + } } } diff --git a/homeassistant/components/alert/icons.json b/homeassistant/components/alert/icons.json index 7f5258706d2..5d8613ec592 100644 --- a/homeassistant/components/alert/icons.json +++ b/homeassistant/components/alert/icons.json @@ -1,7 +1,13 @@ { "services": { - "toggle": "mdi:bell-ring", - "turn_off": "mdi:bell-off", - "turn_on": "mdi:bell-alert" + "toggle": { + "service": "mdi:bell-ring" + }, + "turn_off": { + "service": "mdi:bell-off" + }, + "turn_on": { + "service": "mdi:bell-alert" + } } } diff --git a/homeassistant/components/amcrest/icons.json b/homeassistant/components/amcrest/icons.json index efba49d6b56..e284bc15259 100644 --- a/homeassistant/components/amcrest/icons.json +++ b/homeassistant/components/amcrest/icons.json @@ -1,15 +1,37 @@ { "services": { - "enable_recording": "mdi:record-rec", - "disable_recording": "mdi:stop", - "enable_audio": "mdi:volume-high", - "disable_audio": "mdi:volume-off", - "enable_motion_recording": "mdi:motion-sensor", - "disable_motion_recording": "mdi:motion-sensor-off", - "goto_preset": "mdi:pan", - "set_color_bw": "mdi:palette", - "start_tour": "mdi:panorama", - "stop_tour": "mdi:panorama-outline", - "ptz_control": "mdi:pan" + "enable_recording": { + "service": "mdi:record-rec" + }, + "disable_recording": { + "service": "mdi:stop" + }, + "enable_audio": { + "service": "mdi:volume-high" + }, + "disable_audio": { + "service": "mdi:volume-off" + }, + "enable_motion_recording": { + "service": "mdi:motion-sensor" + }, + "disable_motion_recording": { + "service": "mdi:motion-sensor-off" + }, + "goto_preset": { + "service": "mdi:pan" + }, + "set_color_bw": { + "service": "mdi:palette" + }, + "start_tour": { + "service": "mdi:panorama" + }, + "stop_tour": { + "service": "mdi:panorama-outline" + }, + "ptz_control": { + "service": "mdi:pan" + } } } diff --git a/homeassistant/components/androidtv/icons.json b/homeassistant/components/androidtv/icons.json index 0127d60a72e..d7c646dfdfc 100644 --- a/homeassistant/components/androidtv/icons.json +++ b/homeassistant/components/androidtv/icons.json @@ -1,8 +1,16 @@ { "services": { - "adb_command": "mdi:console", - "download": "mdi:download", - "upload": "mdi:upload", - "learn_sendevent": "mdi:remote" + "adb_command": { + "service": "mdi:console" + }, + "download": { + "service": "mdi:download" + }, + "upload": { + "service": "mdi:upload" + }, + "learn_sendevent": { + "service": "mdi:remote" + } } } diff --git a/homeassistant/components/automation/icons.json b/homeassistant/components/automation/icons.json index 9b68825ffd1..f1e0f26ef65 100644 --- a/homeassistant/components/automation/icons.json +++ b/homeassistant/components/automation/icons.json @@ -9,10 +9,20 @@ } }, "services": { - "turn_on": "mdi:robot", - "turn_off": "mdi:robot-off", - "toggle": "mdi:robot", - "trigger": "mdi:robot", - "reload": "mdi:reload" + "turn_on": { + "service": "mdi:robot" + }, + "turn_off": { + "service": "mdi:robot-off" + }, + "toggle": { + "service": "mdi:robot" + }, + "trigger": { + "service": "mdi:robot" + }, + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/backup/icons.json b/homeassistant/components/backup/icons.json index cba4fb22831..bd5ff4a81ee 100644 --- a/homeassistant/components/backup/icons.json +++ b/homeassistant/components/backup/icons.json @@ -1,5 +1,7 @@ { "services": { - "create": "mdi:cloud-upload" + "create": { + "service": "mdi:cloud-upload" + } } } diff --git a/homeassistant/components/bayesian/icons.json b/homeassistant/components/bayesian/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/bayesian/icons.json +++ b/homeassistant/components/bayesian/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/blackbird/icons.json b/homeassistant/components/blackbird/icons.json index f080fb5f857..815a45ba174 100644 --- a/homeassistant/components/blackbird/icons.json +++ b/homeassistant/components/blackbird/icons.json @@ -1,5 +1,7 @@ { "services": { - "set_all_zones": "mdi:home-sound-in" + "set_all_zones": { + "service": "mdi:home-sound-in" + } } } diff --git a/homeassistant/components/blink/icons.json b/homeassistant/components/blink/icons.json index 615a3c4c6dc..bea67b25f6d 100644 --- a/homeassistant/components/blink/icons.json +++ b/homeassistant/components/blink/icons.json @@ -12,10 +12,20 @@ } }, "services": { - "record": "mdi:video-box", - "trigger_camera": "mdi:image-refresh", - "save_video": "mdi:file-video", - "save_recent_clips": "mdi:file-video", - "send_pin": "mdi:two-factor-authentication" + "record": { + "service": "mdi:video-box" + }, + "trigger_camera": { + "service": "mdi:image-refresh" + }, + "save_video": { + "service": "mdi:file-video" + }, + "save_recent_clips": { + "service": "mdi:file-video" + }, + "send_pin": { + "service": "mdi:two-factor-authentication" + } } } diff --git a/homeassistant/components/bluesound/icons.json b/homeassistant/components/bluesound/icons.json index 8c886f12dfd..2c5e95291c1 100644 --- a/homeassistant/components/bluesound/icons.json +++ b/homeassistant/components/bluesound/icons.json @@ -1,8 +1,16 @@ { "services": { - "join": "mdi:link-variant", - "unjoin": "mdi:link-variant-off", - "set_sleep_timer": "mdi:sleep", - "clear_sleep_timer": "mdi:sleep-off" + "join": { + "service": "mdi:link-variant" + }, + "unjoin": { + "service": "mdi:link-variant-off" + }, + "set_sleep_timer": { + "service": "mdi:sleep" + }, + "clear_sleep_timer": { + "service": "mdi:sleep-off" + } } } diff --git a/homeassistant/components/bluetooth_tracker/icons.json b/homeassistant/components/bluetooth_tracker/icons.json index 650bf0b6d19..217f1240893 100644 --- a/homeassistant/components/bluetooth_tracker/icons.json +++ b/homeassistant/components/bluetooth_tracker/icons.json @@ -1,5 +1,7 @@ { "services": { - "update": "mdi:update" + "update": { + "service": "mdi:update" + } } } diff --git a/homeassistant/components/bond/icons.json b/homeassistant/components/bond/icons.json index 35743d20e65..48b351b1c76 100644 --- a/homeassistant/components/bond/icons.json +++ b/homeassistant/components/bond/icons.json @@ -96,12 +96,26 @@ } }, "services": { - "set_fan_speed_tracked_state": "mdi:fan", - "set_switch_power_tracked_state": "mdi:toggle-switch-variant", - "set_light_power_tracked_state": "mdi:lightbulb", - "set_light_brightness_tracked_state": "mdi:lightbulb-on", - "start_increasing_brightness": "mdi:brightness-7", - "start_decreasing_brightness": "mdi:brightness-1", - "stop": "mdi:stop" + "set_fan_speed_tracked_state": { + "service": "mdi:fan" + }, + "set_switch_power_tracked_state": { + "service": "mdi:toggle-switch-variant" + }, + "set_light_power_tracked_state": { + "service": "mdi:lightbulb" + }, + "set_light_brightness_tracked_state": { + "service": "mdi:lightbulb-on" + }, + "start_increasing_brightness": { + "service": "mdi:brightness-7" + }, + "start_decreasing_brightness": { + "service": "mdi:brightness-1" + }, + "stop": { + "service": "mdi:stop" + } } } diff --git a/homeassistant/components/bring/icons.json b/homeassistant/components/bring/icons.json index 1c6c3bdeca0..6b79fab3c94 100644 --- a/homeassistant/components/bring/icons.json +++ b/homeassistant/components/bring/icons.json @@ -7,6 +7,8 @@ } }, "services": { - "send_message": "mdi:cellphone-message" + "send_message": { + "service": "mdi:cellphone-message" + } } } diff --git a/homeassistant/components/browser/icons.json b/homeassistant/components/browser/icons.json index 7c971009fd7..680aaf14b86 100644 --- a/homeassistant/components/browser/icons.json +++ b/homeassistant/components/browser/icons.json @@ -1,5 +1,7 @@ { "services": { - "browse_url": "mdi:web" + "browse_url": { + "service": "mdi:web" + } } } diff --git a/homeassistant/components/button/icons.json b/homeassistant/components/button/icons.json index 71956124d7f..1364fb2d056 100644 --- a/homeassistant/components/button/icons.json +++ b/homeassistant/components/button/icons.json @@ -14,6 +14,8 @@ } }, "services": { - "press": "mdi:gesture-tap-button" + "press": { + "service": "mdi:gesture-tap-button" + } } } diff --git a/homeassistant/components/calendar/icons.json b/homeassistant/components/calendar/icons.json index e4e526fe75c..9b8df3ec6d3 100644 --- a/homeassistant/components/calendar/icons.json +++ b/homeassistant/components/calendar/icons.json @@ -9,8 +9,14 @@ } }, "services": { - "create_event": "mdi:calendar-plus", - "get_events": "mdi:calendar-month", - "list_events": "mdi:calendar-month" + "create_event": { + "service": "mdi:calendar-plus" + }, + "get_events": { + "service": "mdi:calendar-month" + }, + "list_events": { + "service": "mdi:calendar-month" + } } } diff --git a/homeassistant/components/camera/icons.json b/homeassistant/components/camera/icons.json index 37e71c80a67..982074cd553 100644 --- a/homeassistant/components/camera/icons.json +++ b/homeassistant/components/camera/icons.json @@ -8,12 +8,26 @@ } }, "services": { - "disable_motion_detection": "mdi:motion-sensor-off", - "enable_motion_detection": "mdi:motion-sensor", - "play_stream": "mdi:play", - "record": "mdi:record-rec", - "snapshot": "mdi:camera", - "turn_off": "mdi:video-off", - "turn_on": "mdi:video" + "disable_motion_detection": { + "service": "mdi:motion-sensor-off" + }, + "enable_motion_detection": { + "service": "mdi:motion-sensor" + }, + "play_stream": { + "service": "mdi:play" + }, + "record": { + "service": "mdi:record-rec" + }, + "snapshot": { + "service": "mdi:camera" + }, + "turn_off": { + "service": "mdi:video-off" + }, + "turn_on": { + "service": "mdi:video" + } } } diff --git a/homeassistant/components/cast/icons.json b/homeassistant/components/cast/icons.json index e19ea0b07b2..a43411eaad3 100644 --- a/homeassistant/components/cast/icons.json +++ b/homeassistant/components/cast/icons.json @@ -1,5 +1,7 @@ { "services": { - "show_lovelace_view": "mdi:view-dashboard" + "show_lovelace_view": { + "service": "mdi:view-dashboard" + } } } diff --git a/homeassistant/components/channels/icons.json b/homeassistant/components/channels/icons.json index cbbda1ef623..ad5504a5422 100644 --- a/homeassistant/components/channels/icons.json +++ b/homeassistant/components/channels/icons.json @@ -1,7 +1,13 @@ { "services": { - "seek_forward": "mdi:skip-forward", - "seek_backward": "mdi:skip-backward", - "seek_by": "mdi:timer-check-outline" + "seek_forward": { + "service": "mdi:skip-forward" + }, + "seek_backward": { + "service": "mdi:skip-backward" + }, + "seek_by": { + "service": "mdi:timer-check-outline" + } } } diff --git a/homeassistant/components/climate/icons.json b/homeassistant/components/climate/icons.json index ea6c504ce25..c9a8d12d01b 100644 --- a/homeassistant/components/climate/icons.json +++ b/homeassistant/components/climate/icons.json @@ -56,15 +56,35 @@ } }, "services": { - "set_fan_mode": "mdi:fan", - "set_humidity": "mdi:water-percent", - "set_swing_mode": "mdi:arrow-oscillating", - "set_temperature": "mdi:thermometer", - "set_aux_heat": "mdi:radiator", - "set_preset_mode": "mdi:sofa", - "set_hvac_mode": "mdi:hvac", - "turn_on": "mdi:power-on", - "turn_off": "mdi:power-off", - "toggle": "mdi:toggle-switch" + "set_fan_mode": { + "service": "mdi:fan" + }, + "set_humidity": { + "service": "mdi:water-percent" + }, + "set_swing_mode": { + "service": "mdi:arrow-oscillating" + }, + "set_temperature": { + "service": "mdi:thermometer" + }, + "set_aux_heat": { + "service": "mdi:radiator" + }, + "set_preset_mode": { + "service": "mdi:sofa" + }, + "set_hvac_mode": { + "service": "mdi:hvac" + }, + "turn_on": { + "service": "mdi:power-on" + }, + "turn_off": { + "service": "mdi:power-off" + }, + "toggle": { + "service": "mdi:toggle-switch" + } } } diff --git a/homeassistant/components/cloud/icons.json b/homeassistant/components/cloud/icons.json index 06ee7eb2f19..32888fa75c7 100644 --- a/homeassistant/components/cloud/icons.json +++ b/homeassistant/components/cloud/icons.json @@ -1,6 +1,10 @@ { "services": { - "remote_connect": "mdi:cloud", - "remote_disconnect": "mdi:cloud-off" + "remote_connect": { + "service": "mdi:cloud" + }, + "remote_disconnect": { + "service": "mdi:cloud-off" + } } } diff --git a/homeassistant/components/cloudflare/icons.json b/homeassistant/components/cloudflare/icons.json index 6bf6d773fc3..2d452716c94 100644 --- a/homeassistant/components/cloudflare/icons.json +++ b/homeassistant/components/cloudflare/icons.json @@ -1,5 +1,7 @@ { "services": { - "update_records": "mdi:dns" + "update_records": { + "service": "mdi:dns" + } } } diff --git a/homeassistant/components/color_extractor/icons.json b/homeassistant/components/color_extractor/icons.json index 07b449ffc54..9dab17a9f3b 100644 --- a/homeassistant/components/color_extractor/icons.json +++ b/homeassistant/components/color_extractor/icons.json @@ -1,5 +1,7 @@ { "services": { - "turn_on": "mdi:lightbulb-on" + "turn_on": { + "service": "mdi:lightbulb-on" + } } } diff --git a/homeassistant/components/command_line/icons.json b/homeassistant/components/command_line/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/command_line/icons.json +++ b/homeassistant/components/command_line/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/conversation/icons.json b/homeassistant/components/conversation/icons.json index b39a1603b15..658783f9ae2 100644 --- a/homeassistant/components/conversation/icons.json +++ b/homeassistant/components/conversation/icons.json @@ -1,6 +1,10 @@ { "services": { - "process": "mdi:message-processing", - "reload": "mdi:reload" + "process": { + "service": "mdi:message-processing" + }, + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/counter/icons.json b/homeassistant/components/counter/icons.json index 1e0ef54bbb7..59cd0bb7121 100644 --- a/homeassistant/components/counter/icons.json +++ b/homeassistant/components/counter/icons.json @@ -1,8 +1,16 @@ { "services": { - "decrement": "mdi:numeric-negative-1", - "increment": "mdi:numeric-positive-1", - "reset": "mdi:refresh", - "set_value": "mdi:counter" + "decrement": { + "service": "mdi:numeric-negative-1" + }, + "increment": { + "service": "mdi:numeric-positive-1" + }, + "reset": { + "service": "mdi:refresh" + }, + "set_value": { + "service": "mdi:counter" + } } } diff --git a/homeassistant/components/cover/icons.json b/homeassistant/components/cover/icons.json index f2edaaa0893..91775fe634d 100644 --- a/homeassistant/components/cover/icons.json +++ b/homeassistant/components/cover/icons.json @@ -78,15 +78,35 @@ } }, "services": { - "close_cover": "mdi:arrow-down-box", - "close_cover_tilt": "mdi:arrow-bottom-left", - "open_cover": "mdi:arrow-up-box", - "open_cover_tilt": "mdi:arrow-top-right", - "set_cover_position": "mdi:arrow-down-box", - "set_cover_tilt_position": "mdi:arrow-top-right", - "stop_cover": "mdi:stop", - "stop_cover_tilt": "mdi:stop", - "toggle": "mdi:arrow-up-down", - "toggle_cover_tilt": "mdi:arrow-top-right-bottom-left" + "close_cover": { + "service": "mdi:arrow-down-box" + }, + "close_cover_tilt": { + "service": "mdi:arrow-bottom-left" + }, + "open_cover": { + "service": "mdi:arrow-up-box" + }, + "open_cover_tilt": { + "service": "mdi:arrow-top-right" + }, + "set_cover_position": { + "service": "mdi:arrow-down-box" + }, + "set_cover_tilt_position": { + "service": "mdi:arrow-top-right" + }, + "stop_cover": { + "service": "mdi:stop" + }, + "stop_cover_tilt": { + "service": "mdi:stop" + }, + "toggle": { + "service": "mdi:arrow-up-down" + }, + "toggle_cover_tilt": { + "service": "mdi:arrow-top-right-bottom-left" + } } } diff --git a/homeassistant/components/date/icons.json b/homeassistant/components/date/icons.json index 80ec2691285..b139b897210 100644 --- a/homeassistant/components/date/icons.json +++ b/homeassistant/components/date/icons.json @@ -5,6 +5,8 @@ } }, "services": { - "set_value": "mdi:calendar-edit" + "set_value": { + "service": "mdi:calendar-edit" + } } } diff --git a/homeassistant/components/datetime/icons.json b/homeassistant/components/datetime/icons.json index 563d03e2a8f..d7e9fca8e5c 100644 --- a/homeassistant/components/datetime/icons.json +++ b/homeassistant/components/datetime/icons.json @@ -5,6 +5,8 @@ } }, "services": { - "set_value": "mdi:calendar-edit" + "set_value": { + "service": "mdi:calendar-edit" + } } } diff --git a/homeassistant/components/debugpy/icons.json b/homeassistant/components/debugpy/icons.json index b3bb4dde23a..88086382059 100644 --- a/homeassistant/components/debugpy/icons.json +++ b/homeassistant/components/debugpy/icons.json @@ -1,5 +1,7 @@ { "services": { - "start": "mdi:play" + "start": { + "service": "mdi:play" + } } } diff --git a/homeassistant/components/deconz/icons.json b/homeassistant/components/deconz/icons.json index 5b22daee53f..a7fb0859eec 100644 --- a/homeassistant/components/deconz/icons.json +++ b/homeassistant/components/deconz/icons.json @@ -1,7 +1,13 @@ { "services": { - "configure": "mdi:cog", - "device_refresh": "mdi:refresh", - "remove_orphaned_entries": "mdi:bookmark-remove" + "configure": { + "service": "mdi:cog" + }, + "device_refresh": { + "service": "mdi:refresh" + }, + "remove_orphaned_entries": { + "service": "mdi:bookmark-remove" + } } } diff --git a/homeassistant/components/demo/icons.json b/homeassistant/components/demo/icons.json index d9e1d405490..17425a6d119 100644 --- a/homeassistant/components/demo/icons.json +++ b/homeassistant/components/demo/icons.json @@ -75,6 +75,8 @@ } }, "services": { - "randomize_device_tracker_data": "mdi:dice-multiple" + "randomize_device_tracker_data": { + "service": "mdi:dice-multiple" + } } } diff --git a/homeassistant/components/denonavr/icons.json b/homeassistant/components/denonavr/icons.json index ec6bc0854f9..33d7f1bd3d9 100644 --- a/homeassistant/components/denonavr/icons.json +++ b/homeassistant/components/denonavr/icons.json @@ -1,7 +1,13 @@ { "services": { - "get_command": "mdi:console", - "set_dynamic_eq": "mdi:tune", - "update_audyssey": "mdi:waveform" + "get_command": { + "service": "mdi:console" + }, + "set_dynamic_eq": { + "service": "mdi:tune" + }, + "update_audyssey": { + "service": "mdi:waveform" + } } } diff --git a/homeassistant/components/device_tracker/icons.json b/homeassistant/components/device_tracker/icons.json index c89053701ba..4e5b82576cf 100644 --- a/homeassistant/components/device_tracker/icons.json +++ b/homeassistant/components/device_tracker/icons.json @@ -8,6 +8,8 @@ } }, "services": { - "see": "mdi:account-eye" + "see": { + "service": "mdi:account-eye" + } } } diff --git a/homeassistant/components/dominos/icons.json b/homeassistant/components/dominos/icons.json index d88bfb2542f..ca33ac91dfd 100644 --- a/homeassistant/components/dominos/icons.json +++ b/homeassistant/components/dominos/icons.json @@ -1,5 +1,7 @@ { "services": { - "order": "mdi:pizza" + "order": { + "service": "mdi:pizza" + } } } diff --git a/homeassistant/components/downloader/icons.json b/homeassistant/components/downloader/icons.json index 2a78df93ca7..8f8b5bb2688 100644 --- a/homeassistant/components/downloader/icons.json +++ b/homeassistant/components/downloader/icons.json @@ -1,5 +1,7 @@ { "services": { - "download_file": "mdi:download" + "download_file": { + "service": "mdi:download" + } } } diff --git a/homeassistant/components/duckdns/icons.json b/homeassistant/components/duckdns/icons.json index 79ec18d13ff..c5d0b5329dc 100644 --- a/homeassistant/components/duckdns/icons.json +++ b/homeassistant/components/duckdns/icons.json @@ -1,5 +1,7 @@ { "services": { - "set_txt": "mdi:text-box-edit-outline" + "set_txt": { + "service": "mdi:text-box-edit-outline" + } } } diff --git a/homeassistant/components/dynalite/icons.json b/homeassistant/components/dynalite/icons.json index dedbb1be3ac..27949197b53 100644 --- a/homeassistant/components/dynalite/icons.json +++ b/homeassistant/components/dynalite/icons.json @@ -1,6 +1,10 @@ { "services": { - "request_area_preset": "mdi:texture-box", - "request_channel_level": "mdi:satellite-uplink" + "request_area_preset": { + "service": "mdi:texture-box" + }, + "request_channel_level": { + "service": "mdi:satellite-uplink" + } } } diff --git a/homeassistant/components/easyenergy/icons.json b/homeassistant/components/easyenergy/icons.json index 90cbec17a65..501483eb932 100644 --- a/homeassistant/components/easyenergy/icons.json +++ b/homeassistant/components/easyenergy/icons.json @@ -13,8 +13,14 @@ } }, "services": { - "get_gas_prices": "mdi:gas-station", - "get_energy_usage_prices": "mdi:transmission-tower-import", - "get_energy_return_prices": "mdi:transmission-tower-export" + "get_gas_prices": { + "service": "mdi:gas-station" + }, + "get_energy_usage_prices": { + "service": "mdi:transmission-tower-import" + }, + "get_energy_return_prices": { + "service": "mdi:transmission-tower-export" + } } } diff --git a/homeassistant/components/ebusd/icons.json b/homeassistant/components/ebusd/icons.json index 642be37a43b..ebfa3673a0c 100644 --- a/homeassistant/components/ebusd/icons.json +++ b/homeassistant/components/ebusd/icons.json @@ -1,5 +1,7 @@ { "services": { - "write": "mdi:pencil" + "write": { + "service": "mdi:pencil" + } } } diff --git a/homeassistant/components/ecobee/icons.json b/homeassistant/components/ecobee/icons.json index 3e736d0dc68..f24f1f7cfe5 100644 --- a/homeassistant/components/ecobee/icons.json +++ b/homeassistant/components/ecobee/icons.json @@ -1,11 +1,25 @@ { "services": { - "create_vacation": "mdi:umbrella-beach", - "delete_vacation": "mdi:umbrella-beach-outline", - "resume_program": "mdi:play", - "set_fan_min_on_time": "mdi:fan-clock", - "set_dst_mode": "mdi:sun-clock", - "set_mic_mode": "mdi:microphone", - "set_occupancy_modes": "mdi:eye-settings" + "create_vacation": { + "service": "mdi:umbrella-beach" + }, + "delete_vacation": { + "service": "mdi:umbrella-beach-outline" + }, + "resume_program": { + "service": "mdi:play" + }, + "set_fan_min_on_time": { + "service": "mdi:fan-clock" + }, + "set_dst_mode": { + "service": "mdi:sun-clock" + }, + "set_mic_mode": { + "service": "mdi:microphone" + }, + "set_occupancy_modes": { + "service": "mdi:eye-settings" + } } } diff --git a/homeassistant/components/ecovacs/icons.json b/homeassistant/components/ecovacs/icons.json index 0c7178ced84..6097f43a4e4 100644 --- a/homeassistant/components/ecovacs/icons.json +++ b/homeassistant/components/ecovacs/icons.json @@ -145,6 +145,8 @@ } }, "services": { - "raw_get_positions": "mdi:map-marker-radius-outline" + "raw_get_positions": { + "service": "mdi:map-marker-radius-outline" + } } } diff --git a/homeassistant/components/elgato/icons.json b/homeassistant/components/elgato/icons.json index 1b5eaf3763a..d2c286594c7 100644 --- a/homeassistant/components/elgato/icons.json +++ b/homeassistant/components/elgato/icons.json @@ -10,6 +10,8 @@ } }, "services": { - "identify": "mdi:crosshairs-question" + "identify": { + "service": "mdi:crosshairs-question" + } } } diff --git a/homeassistant/components/elkm1/icons.json b/homeassistant/components/elkm1/icons.json index 3bb9ea8c87d..54827e4b6ef 100644 --- a/homeassistant/components/elkm1/icons.json +++ b/homeassistant/components/elkm1/icons.json @@ -10,18 +10,44 @@ } }, "services": { - "alarm_bypass": "mdi:shield-off", - "alarm_clear_bypass": "mdi:shield", - "alarm_arm_home_instant": "mdi:shield-lock", - "alarm_arm_night_instant": "mdi:shield-moon", - "alarm_arm_vacation": "mdi:beach", - "alarm_display_message": "mdi:message-alert", - "set_time": "mdi:clock-edit", - "speak_phrase": "mdi:message-processing", - "speak_word": "mdi:message-minus", - "sensor_counter_refresh": "mdi:refresh", - "sensor_counter_set": "mdi:counter", - "sensor_zone_bypass": "mdi:shield-off", - "sensor_zone_trigger": "mdi:shield" + "alarm_bypass": { + "service": "mdi:shield-off" + }, + "alarm_clear_bypass": { + "service": "mdi:shield" + }, + "alarm_arm_home_instant": { + "service": "mdi:shield-lock" + }, + "alarm_arm_night_instant": { + "service": "mdi:shield-moon" + }, + "alarm_arm_vacation": { + "service": "mdi:beach" + }, + "alarm_display_message": { + "service": "mdi:message-alert" + }, + "set_time": { + "service": "mdi:clock-edit" + }, + "speak_phrase": { + "service": "mdi:message-processing" + }, + "speak_word": { + "service": "mdi:message-minus" + }, + "sensor_counter_refresh": { + "service": "mdi:refresh" + }, + "sensor_counter_set": { + "service": "mdi:counter" + }, + "sensor_zone_bypass": { + "service": "mdi:shield-off" + }, + "sensor_zone_trigger": { + "service": "mdi:shield" + } } } diff --git a/homeassistant/components/energyzero/icons.json b/homeassistant/components/energyzero/icons.json index bac061dd318..802f8ef6916 100644 --- a/homeassistant/components/energyzero/icons.json +++ b/homeassistant/components/energyzero/icons.json @@ -10,7 +10,11 @@ } }, "services": { - "get_gas_prices": "mdi:gas-station", - "get_energy_prices": "mdi:lightning-bolt" + "get_gas_prices": { + "service": "mdi:gas-station" + }, + "get_energy_prices": { + "service": "mdi:lightning-bolt" + } } } diff --git a/homeassistant/components/environment_canada/icons.json b/homeassistant/components/environment_canada/icons.json index 5e23a96bcfb..c3562ce1840 100644 --- a/homeassistant/components/environment_canada/icons.json +++ b/homeassistant/components/environment_canada/icons.json @@ -19,6 +19,8 @@ } }, "services": { - "set_radar_type": "mdi:radar" + "set_radar_type": { + "service": "mdi:radar" + } } } diff --git a/homeassistant/components/envisalink/icons.json b/homeassistant/components/envisalink/icons.json index 20696067f76..b25e988f478 100644 --- a/homeassistant/components/envisalink/icons.json +++ b/homeassistant/components/envisalink/icons.json @@ -1,6 +1,10 @@ { "services": { - "alarm_keypress": "mdi:alarm-panel", - "invoke_custom_function": "mdi:console" + "alarm_keypress": { + "service": "mdi:alarm-panel" + }, + "invoke_custom_function": { + "service": "mdi:console" + } } } diff --git a/homeassistant/components/epson/icons.json b/homeassistant/components/epson/icons.json index a9237edcfd1..d41ddebcdce 100644 --- a/homeassistant/components/epson/icons.json +++ b/homeassistant/components/epson/icons.json @@ -1,5 +1,7 @@ { "services": { - "select_cmode": "mdi:palette" + "select_cmode": { + "service": "mdi:palette" + } } } diff --git a/homeassistant/components/evohome/icons.json b/homeassistant/components/evohome/icons.json index cd0005e2546..54488440e60 100644 --- a/homeassistant/components/evohome/icons.json +++ b/homeassistant/components/evohome/icons.json @@ -1,9 +1,19 @@ { "services": { - "set_system_mode": "mdi:pencil", - "reset_system": "mdi:refresh", - "refresh_system": "mdi:refresh", - "set_zone_override": "mdi:motion-sensor", - "clear_zone_override": "mdi:motion-sensor-off" + "set_system_mode": { + "service": "mdi:pencil" + }, + "reset_system": { + "service": "mdi:refresh" + }, + "refresh_system": { + "service": "mdi:refresh" + }, + "set_zone_override": { + "service": "mdi:motion-sensor" + }, + "clear_zone_override": { + "service": "mdi:motion-sensor-off" + } } } diff --git a/homeassistant/components/ezviz/icons.json b/homeassistant/components/ezviz/icons.json index 89b4747ed69..e4a2e49a22c 100644 --- a/homeassistant/components/ezviz/icons.json +++ b/homeassistant/components/ezviz/icons.json @@ -26,7 +26,11 @@ } }, "services": { - "set_alarm_detection_sensibility": "mdi:motion-sensor", - "wake_device": "mdi:sleep-off" + "set_alarm_detection_sensibility": { + "service": "mdi:motion-sensor" + }, + "wake_device": { + "service": "mdi:sleep-off" + } } } diff --git a/homeassistant/components/fan/icons.json b/homeassistant/components/fan/icons.json index 60edbce5f01..caf80775f80 100644 --- a/homeassistant/components/fan/icons.json +++ b/homeassistant/components/fan/icons.json @@ -20,14 +20,32 @@ } }, "services": { - "decrease_speed": "mdi:fan-minus", - "increase_speed": "mdi:fan-plus", - "oscillate": "mdi:arrow-oscillating", - "set_direction": "mdi:rotate-3d-variant", - "set_percentage": "mdi:fan", - "set_preset_mode": "mdi:fan-auto", - "toggle": "mdi:fan", - "turn_off": "mdi:fan-off", - "turn_on": "mdi:fan" + "decrease_speed": { + "service": "mdi:fan-minus" + }, + "increase_speed": { + "service": "mdi:fan-plus" + }, + "oscillate": { + "service": "mdi:arrow-oscillating" + }, + "set_direction": { + "service": "mdi:rotate-3d-variant" + }, + "set_percentage": { + "service": "mdi:fan" + }, + "set_preset_mode": { + "service": "mdi:fan-auto" + }, + "toggle": { + "service": "mdi:fan" + }, + "turn_off": { + "service": "mdi:fan-off" + }, + "turn_on": { + "service": "mdi:fan" + } } } diff --git a/homeassistant/components/ffmpeg/icons.json b/homeassistant/components/ffmpeg/icons.json index a23f024599c..780eb071af1 100644 --- a/homeassistant/components/ffmpeg/icons.json +++ b/homeassistant/components/ffmpeg/icons.json @@ -1,7 +1,13 @@ { "services": { - "restart": "mdi:restart", - "start": "mdi:play", - "stop": "mdi:stop" + "restart": { + "service": "mdi:restart" + }, + "start": { + "service": "mdi:play" + }, + "stop": { + "service": "mdi:stop" + } } } diff --git a/homeassistant/components/filter/icons.json b/homeassistant/components/filter/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/filter/icons.json +++ b/homeassistant/components/filter/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/flo/icons.json b/homeassistant/components/flo/icons.json index 3164781c1b4..4bd0380c56c 100644 --- a/homeassistant/components/flo/icons.json +++ b/homeassistant/components/flo/icons.json @@ -10,9 +10,17 @@ } }, "services": { - "set_sleep_mode": "mdi:sleep", - "set_away_mode": "mdi:home-off", - "set_home_mode": "mdi:home", - "run_health_test": "mdi:heart-flash" + "set_sleep_mode": { + "service": "mdi:sleep" + }, + "set_away_mode": { + "service": "mdi:home-off" + }, + "set_home_mode": { + "service": "mdi:home" + }, + "run_health_test": { + "service": "mdi:heart-flash" + } } } diff --git a/homeassistant/components/flume/icons.json b/homeassistant/components/flume/icons.json index 631c0645ed3..90830943689 100644 --- a/homeassistant/components/flume/icons.json +++ b/homeassistant/components/flume/icons.json @@ -10,6 +10,8 @@ } }, "services": { - "list_notifications": "mdi:bell" + "list_notifications": { + "service": "mdi:bell" + } } } diff --git a/homeassistant/components/flux_led/icons.json b/homeassistant/components/flux_led/icons.json index 873fcd7c441..07c27869ff7 100644 --- a/homeassistant/components/flux_led/icons.json +++ b/homeassistant/components/flux_led/icons.json @@ -54,8 +54,14 @@ } }, "services": { - "set_custom_effect": "mdi:creation", - "set_zones": "mdi:texture-box", - "set_music_mode": "mdi:music" + "set_custom_effect": { + "service": "mdi:creation" + }, + "set_zones": { + "service": "mdi:texture-box" + }, + "set_music_mode": { + "service": "mdi:music" + } } } diff --git a/homeassistant/components/foscam/icons.json b/homeassistant/components/foscam/icons.json index 0c7dba9a4df..437575024d1 100644 --- a/homeassistant/components/foscam/icons.json +++ b/homeassistant/components/foscam/icons.json @@ -1,6 +1,10 @@ { "services": { - "ptz": "mdi:pan", - "ptz_preset": "mdi:target-variant" + "ptz": { + "service": "mdi:pan" + }, + "ptz_preset": { + "service": "mdi:target-variant" + } } } diff --git a/homeassistant/components/foursquare/icons.json b/homeassistant/components/foursquare/icons.json index cf60ed9f247..8e2b4e91d5f 100644 --- a/homeassistant/components/foursquare/icons.json +++ b/homeassistant/components/foursquare/icons.json @@ -1,5 +1,7 @@ { "services": { - "checkin": "mdi:map-marker" + "checkin": { + "service": "mdi:map-marker" + } } } diff --git a/homeassistant/components/freebox/icons.json b/homeassistant/components/freebox/icons.json index 81361d2c990..f4184f0673e 100644 --- a/homeassistant/components/freebox/icons.json +++ b/homeassistant/components/freebox/icons.json @@ -1,5 +1,7 @@ { "services": { - "reboot": "mdi:restart" + "reboot": { + "service": "mdi:restart" + } } } diff --git a/homeassistant/components/fritz/icons.json b/homeassistant/components/fritz/icons.json index d2154dc7232..481568a4c2c 100644 --- a/homeassistant/components/fritz/icons.json +++ b/homeassistant/components/fritz/icons.json @@ -51,9 +51,17 @@ } }, "services": { - "reconnect": "mdi:connection", - "reboot": "mdi:refresh", - "cleanup": "mdi:broom", - "set_guest_wifi_password": "mdi:form-textbox-password" + "reconnect": { + "service": "mdi:connection" + }, + "reboot": { + "service": "mdi:refresh" + }, + "cleanup": { + "service": "mdi:broom" + }, + "set_guest_wifi_password": { + "service": "mdi:form-textbox-password" + } } } diff --git a/homeassistant/components/frontend/icons.json b/homeassistant/components/frontend/icons.json index 9fbe4d5b9b0..b4bcdef6194 100644 --- a/homeassistant/components/frontend/icons.json +++ b/homeassistant/components/frontend/icons.json @@ -1,6 +1,10 @@ { "services": { - "set_theme": "mdi:palette-swatch", - "reload_themes": "mdi:reload" + "set_theme": { + "service": "mdi:palette-swatch" + }, + "reload_themes": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/fully_kiosk/icons.json b/homeassistant/components/fully_kiosk/icons.json index 760698f7ac8..0166679abe2 100644 --- a/homeassistant/components/fully_kiosk/icons.json +++ b/homeassistant/components/fully_kiosk/icons.json @@ -1,7 +1,13 @@ { "services": { - "load_url": "mdi:link", - "set_config": "mdi:cog", - "start_application": "mdi:rocket-launch" + "load_url": { + "service": "mdi:link" + }, + "set_config": { + "service": "mdi:cog" + }, + "start_application": { + "service": "mdi:rocket-launch" + } } } diff --git a/homeassistant/components/generic/icons.json b/homeassistant/components/generic/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/generic/icons.json +++ b/homeassistant/components/generic/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } From cffa8b4febfb39124153d5cac38ed8399a4bd53c Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 28 Aug 2024 13:47:41 +0200 Subject: [PATCH 1162/1635] Update icons.json to new service schema part 2 (#124769) --- .../components/generic_thermostat/icons.json | 4 +- homeassistant/components/geniushub/icons.json | 12 ++- homeassistant/components/google/icons.json | 8 +- .../components/google_assistant/icons.json | 4 +- .../google_assistant_sdk/icons.json | 4 +- .../icons.json | 4 +- .../components/google_mail/icons.json | 4 +- .../components/google_sheets/icons.json | 4 +- homeassistant/components/group/icons.json | 12 ++- homeassistant/components/guardian/icons.json | 12 ++- homeassistant/components/habitica/icons.json | 4 +- homeassistant/components/harmony/icons.json | 8 +- homeassistant/components/hassio/icons.json | 44 +++++++--- homeassistant/components/hdmi_cec/icons.json | 24 +++-- homeassistant/components/heos/icons.json | 8 +- .../components/history_stats/icons.json | 4 +- homeassistant/components/hive/icons.json | 12 ++- .../components/home_connect/icons.json | 28 ++++-- .../components/homeassistant/icons.json | 52 ++++++++--- homeassistant/components/homekit/icons.json | 12 ++- homeassistant/components/homematic/icons.json | 24 +++-- .../components/homematicip_cloud/icons.json | 36 ++++++-- homeassistant/components/homeworks/icons.json | 4 +- homeassistant/components/html5/icons.json | 4 +- .../components/huawei_lte/icons.json | 8 +- homeassistant/components/hue/icons.json | 8 +- .../components/humidifier/icons.json | 20 +++-- .../components/husqvarna_automower/icons.json | 8 +- homeassistant/components/hydrawise/icons.json | 12 ++- homeassistant/components/icloud/icons.json | 16 +++- homeassistant/components/ifttt/icons.json | 8 +- homeassistant/components/ihc/icons.json | 16 +++- .../components/image_processing/icons.json | 4 +- homeassistant/components/imap/icons.json | 16 +++- .../components/input_boolean/icons.json | 16 +++- .../components/input_button/icons.json | 8 +- .../components/input_datetime/icons.json | 8 +- .../components/input_number/icons.json | 16 +++- .../components/input_select/icons.json | 28 ++++-- .../components/input_text/icons.json | 8 +- homeassistant/components/insteon/icons.json | 44 +++++++--- .../components/intent_script/icons.json | 4 +- homeassistant/components/iperf3/icons.json | 4 +- homeassistant/components/isy994/icons.json | 32 +++++-- homeassistant/components/izone/icons.json | 8 +- homeassistant/components/keba/icons.json | 32 +++++-- homeassistant/components/kef/icons.json | 32 +++++-- homeassistant/components/keyboard/icons.json | 24 +++-- .../components/keymitt_ble/icons.json | 4 +- homeassistant/components/knx/icons.json | 20 +++-- homeassistant/components/kodi/icons.json | 8 +- homeassistant/components/lametric/icons.json | 8 +- .../components/lawn_mower/icons.json | 12 ++- homeassistant/components/lcn/icons.json | 52 ++++++++--- homeassistant/components/lifx/icons.json | 36 ++++++-- homeassistant/components/light/icons.json | 12 ++- .../components/litterrobot/icons.json | 4 +- .../components/local_file/icons.json | 4 +- homeassistant/components/lock/icons.json | 12 ++- homeassistant/components/logbook/icons.json | 4 +- homeassistant/components/logger/icons.json | 8 +- homeassistant/components/lovelace/icons.json | 4 +- homeassistant/components/lyric/icons.json | 4 +- homeassistant/components/matrix/icons.json | 4 +- homeassistant/components/mealie/icons.json | 20 +++-- .../components/media_extractor/icons.json | 8 +- .../components/media_player/icons.json | 88 ++++++++++++++----- homeassistant/components/melcloud/icons.json | 8 +- .../components/microsoft_face/icons.json | 24 +++-- homeassistant/components/mill/icons.json | 4 +- 70 files changed, 795 insertions(+), 265 deletions(-) diff --git a/homeassistant/components/generic_thermostat/icons.json b/homeassistant/components/generic_thermostat/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/generic_thermostat/icons.json +++ b/homeassistant/components/generic_thermostat/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/geniushub/icons.json b/homeassistant/components/geniushub/icons.json index 41697b419a8..c8a59dedbbd 100644 --- a/homeassistant/components/geniushub/icons.json +++ b/homeassistant/components/geniushub/icons.json @@ -1,7 +1,13 @@ { "services": { - "set_zone_mode": "mdi:auto-mode", - "set_zone_override": "mdi:thermometer-lines", - "set_switch_override": "mdi:toggle-switch-variant" + "set_zone_mode": { + "service": "mdi:auto-mode" + }, + "set_zone_override": { + "service": "mdi:thermometer-lines" + }, + "set_switch_override": { + "service": "mdi:toggle-switch-variant" + } } } diff --git a/homeassistant/components/google/icons.json b/homeassistant/components/google/icons.json index 6dbad61b43d..e4f25442546 100644 --- a/homeassistant/components/google/icons.json +++ b/homeassistant/components/google/icons.json @@ -1,6 +1,10 @@ { "services": { - "add_event": "mdi:calendar-plus", - "create_event": "mdi:calendar-plus" + "add_event": { + "service": "mdi:calendar-plus" + }, + "create_event": { + "service": "mdi:calendar-plus" + } } } diff --git a/homeassistant/components/google_assistant/icons.json b/homeassistant/components/google_assistant/icons.json index 3bcab03d2c2..a522103328a 100644 --- a/homeassistant/components/google_assistant/icons.json +++ b/homeassistant/components/google_assistant/icons.json @@ -1,5 +1,7 @@ { "services": { - "request_sync": "mdi:sync" + "request_sync": { + "service": "mdi:sync" + } } } diff --git a/homeassistant/components/google_assistant_sdk/icons.json b/homeassistant/components/google_assistant_sdk/icons.json index bf1420b2e3f..75747c43f5b 100644 --- a/homeassistant/components/google_assistant_sdk/icons.json +++ b/homeassistant/components/google_assistant_sdk/icons.json @@ -1,5 +1,7 @@ { "services": { - "send_text_command": "mdi:comment-text-outline" + "send_text_command": { + "service": "mdi:comment-text-outline" + } } } diff --git a/homeassistant/components/google_generative_ai_conversation/icons.json b/homeassistant/components/google_generative_ai_conversation/icons.json index 6544532783a..6ac3cc3b21c 100644 --- a/homeassistant/components/google_generative_ai_conversation/icons.json +++ b/homeassistant/components/google_generative_ai_conversation/icons.json @@ -1,5 +1,7 @@ { "services": { - "generate_content": "mdi:receipt-text" + "generate_content": { + "service": "mdi:receipt-text" + } } } diff --git a/homeassistant/components/google_mail/icons.json b/homeassistant/components/google_mail/icons.json index 599ccffe3c7..d0a6eb33715 100644 --- a/homeassistant/components/google_mail/icons.json +++ b/homeassistant/components/google_mail/icons.json @@ -1,5 +1,7 @@ { "services": { - "set_vacation": "mdi:beach" + "set_vacation": { + "service": "mdi:beach" + } } } diff --git a/homeassistant/components/google_sheets/icons.json b/homeassistant/components/google_sheets/icons.json index c8010a690be..e2b6ed57579 100644 --- a/homeassistant/components/google_sheets/icons.json +++ b/homeassistant/components/google_sheets/icons.json @@ -1,5 +1,7 @@ { "services": { - "append_sheet": "mdi:google-spreadsheet" + "append_sheet": { + "service": "mdi:google-spreadsheet" + } } } diff --git a/homeassistant/components/group/icons.json b/homeassistant/components/group/icons.json index 8cca94e08e1..577d1effac0 100644 --- a/homeassistant/components/group/icons.json +++ b/homeassistant/components/group/icons.json @@ -1,7 +1,13 @@ { "services": { - "reload": "mdi:reload", - "set": "mdi:home-group-plus", - "remove": "mdi:home-group-remove" + "reload": { + "service": "mdi:reload" + }, + "set": { + "service": "mdi:home-group-plus" + }, + "remove": { + "service": "mdi:home-group-remove" + } } } diff --git a/homeassistant/components/guardian/icons.json b/homeassistant/components/guardian/icons.json index 4740366e993..fe44eb0460b 100644 --- a/homeassistant/components/guardian/icons.json +++ b/homeassistant/components/guardian/icons.json @@ -18,8 +18,14 @@ } }, "services": { - "pair_sensor": "mdi:link-variant", - "unpair_sensor": "mdi:link-variant-remove", - "upgrade_firmware": "mdi:update" + "pair_sensor": { + "service": "mdi:link-variant" + }, + "unpair_sensor": { + "service": "mdi:link-variant-remove" + }, + "upgrade_firmware": { + "service": "mdi:update" + } } } diff --git a/homeassistant/components/habitica/icons.json b/homeassistant/components/habitica/icons.json index 710b8c9d25b..662cf1d84a5 100644 --- a/homeassistant/components/habitica/icons.json +++ b/homeassistant/components/habitica/icons.json @@ -88,6 +88,8 @@ } }, "services": { - "api_call": "mdi:console" + "api_call": { + "service": "mdi:console" + } } } diff --git a/homeassistant/components/harmony/icons.json b/homeassistant/components/harmony/icons.json index f96fd985323..b6fe0d8c42e 100644 --- a/homeassistant/components/harmony/icons.json +++ b/homeassistant/components/harmony/icons.json @@ -10,7 +10,11 @@ } }, "services": { - "sync": "mdi:sync", - "change_channel": "mdi:remote-tv" + "sync": { + "service": "mdi:sync" + }, + "change_channel": { + "service": "mdi:remote-tv" + } } } diff --git a/homeassistant/components/hassio/icons.json b/homeassistant/components/hassio/icons.json index c55820b58f2..64f032d9f80 100644 --- a/homeassistant/components/hassio/icons.json +++ b/homeassistant/components/hassio/icons.json @@ -10,16 +10,38 @@ } }, "services": { - "addon_start": "mdi:play", - "addon_restart": "mdi:restart", - "addon_stdin": "mdi:console", - "addon_stop": "mdi:stop", - "addon_update": "mdi:update", - "host_reboot": "mdi:restart", - "host_shutdown": "mdi:power", - "backup_full": "mdi:content-save", - "backup_partial": "mdi:content-save", - "restore_full": "mdi:backup-restore", - "restore_partial": "mdi:backup-restore" + "addon_start": { + "service": "mdi:play" + }, + "addon_restart": { + "service": "mdi:restart" + }, + "addon_stdin": { + "service": "mdi:console" + }, + "addon_stop": { + "service": "mdi:stop" + }, + "addon_update": { + "service": "mdi:update" + }, + "host_reboot": { + "service": "mdi:restart" + }, + "host_shutdown": { + "service": "mdi:power" + }, + "backup_full": { + "service": "mdi:content-save" + }, + "backup_partial": { + "service": "mdi:content-save" + }, + "restore_full": { + "service": "mdi:backup-restore" + }, + "restore_partial": { + "service": "mdi:backup-restore" + } } } diff --git a/homeassistant/components/hdmi_cec/icons.json b/homeassistant/components/hdmi_cec/icons.json index 0bfcb98eea2..93647a6bb12 100644 --- a/homeassistant/components/hdmi_cec/icons.json +++ b/homeassistant/components/hdmi_cec/icons.json @@ -1,10 +1,22 @@ { "services": { - "power_on": "mdi:power", - "select_device": "mdi:television", - "send_command": "mdi:console", - "standby": "mdi:power-standby", - "update": "mdi:update", - "volume": "mdi:volume-high" + "power_on": { + "service": "mdi:power" + }, + "select_device": { + "service": "mdi:television" + }, + "send_command": { + "service": "mdi:console" + }, + "standby": { + "service": "mdi:power-standby" + }, + "update": { + "service": "mdi:update" + }, + "volume": { + "service": "mdi:volume-high" + } } } diff --git a/homeassistant/components/heos/icons.json b/homeassistant/components/heos/icons.json index 69c434c8287..23c2c8faeaf 100644 --- a/homeassistant/components/heos/icons.json +++ b/homeassistant/components/heos/icons.json @@ -1,6 +1,10 @@ { "services": { - "sign_in": "mdi:login", - "sign_out": "mdi:logout" + "sign_in": { + "service": "mdi:login" + }, + "sign_out": { + "service": "mdi:logout" + } } } diff --git a/homeassistant/components/history_stats/icons.json b/homeassistant/components/history_stats/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/history_stats/icons.json +++ b/homeassistant/components/history_stats/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/hive/icons.json b/homeassistant/components/hive/icons.json index 2704317779c..e4c06556906 100644 --- a/homeassistant/components/hive/icons.json +++ b/homeassistant/components/hive/icons.json @@ -18,8 +18,14 @@ } }, "services": { - "boost_heating_on": "mdi:radiator", - "boost_heating_off": "mdi:radiator-off", - "boost_hot_water": "mdi:water-boiler" + "boost_heating_on": { + "service": "mdi:radiator" + }, + "boost_heating_off": { + "service": "mdi:radiator-off" + }, + "boost_hot_water": { + "service": "mdi:water-boiler" + } } } diff --git a/homeassistant/components/home_connect/icons.json b/homeassistant/components/home_connect/icons.json index 48965cc554a..33617f5472e 100644 --- a/homeassistant/components/home_connect/icons.json +++ b/homeassistant/components/home_connect/icons.json @@ -1,11 +1,25 @@ { "services": { - "start_program": "mdi:play", - "select_program": "mdi:form-select", - "pause_program": "mdi:pause", - "resume_program": "mdi:play-pause", - "set_option_active": "mdi:gesture-tap", - "set_option_selected": "mdi:gesture-tap", - "change_setting": "mdi:cog" + "start_program": { + "service": "mdi:play" + }, + "select_program": { + "service": "mdi:form-select" + }, + "pause_program": { + "service": "mdi:pause" + }, + "resume_program": { + "service": "mdi:play-pause" + }, + "set_option_active": { + "service": "mdi:gesture-tap" + }, + "set_option_selected": { + "service": "mdi:gesture-tap" + }, + "change_setting": { + "service": "mdi:cog" + } } } diff --git a/homeassistant/components/homeassistant/icons.json b/homeassistant/components/homeassistant/icons.json index ec4d5729918..f08fa8d969b 100644 --- a/homeassistant/components/homeassistant/icons.json +++ b/homeassistant/components/homeassistant/icons.json @@ -1,17 +1,43 @@ { "services": { - "check_config": "mdi:receipt-text-check", - "reload_core_config": "mdi:receipt-text-send", - "restart": "mdi:restart", - "set_location": "mdi:map-marker", - "stop": "mdi:stop", - "toggle": "mdi:toggle-switch", - "turn_on": "mdi:power-on", - "turn_off": "mdi:power-off", - "update_entity": "mdi:update", - "reload_custom_templates": "mdi:palette-swatch", - "reload_config_entry": "mdi:reload", - "save_persistent_states": "mdi:content-save", - "reload_all": "mdi:reload" + "check_config": { + "service": "mdi:receipt-text-check" + }, + "reload_core_config": { + "service": "mdi:receipt-text-send" + }, + "restart": { + "service": "mdi:restart" + }, + "set_location": { + "service": "mdi:map-marker" + }, + "stop": { + "service": "mdi:stop" + }, + "toggle": { + "service": "mdi:toggle-switch" + }, + "turn_on": { + "service": "mdi:power-on" + }, + "turn_off": { + "service": "mdi:power-off" + }, + "update_entity": { + "service": "mdi:update" + }, + "reload_custom_templates": { + "service": "mdi:palette-swatch" + }, + "reload_config_entry": { + "service": "mdi:reload" + }, + "save_persistent_states": { + "service": "mdi:content-save" + }, + "reload_all": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/homekit/icons.json b/homeassistant/components/homekit/icons.json index fb0461eb5d8..7d8ddf131ef 100644 --- a/homeassistant/components/homekit/icons.json +++ b/homeassistant/components/homekit/icons.json @@ -1,7 +1,13 @@ { "services": { - "reload": "mdi:reload", - "reset_accessory": "mdi:cog-refresh", - "unpair": "mdi:link-variant-off" + "reload": { + "service": "mdi:reload" + }, + "reset_accessory": { + "service": "mdi:cog-refresh" + }, + "unpair": { + "service": "mdi:link-variant-off" + } } } diff --git a/homeassistant/components/homematic/icons.json b/homeassistant/components/homematic/icons.json index 998c9a385ba..9e58bbe3a90 100644 --- a/homeassistant/components/homematic/icons.json +++ b/homeassistant/components/homematic/icons.json @@ -1,10 +1,22 @@ { "services": { - "virtualkey": "mdi:keyboard", - "set_variable_value": "mdi:console", - "set_device_value": "mdi:television", - "reconnect": "mdi:wifi-refresh", - "set_install_mode": "mdi:cog", - "put_paramset": "mdi:cog" + "virtualkey": { + "service": "mdi:keyboard" + }, + "set_variable_value": { + "service": "mdi:console" + }, + "set_device_value": { + "service": "mdi:television" + }, + "reconnect": { + "service": "mdi:wifi-refresh" + }, + "set_install_mode": { + "service": "mdi:cog" + }, + "put_paramset": { + "service": "mdi:cog" + } } } diff --git a/homeassistant/components/homematicip_cloud/icons.json b/homeassistant/components/homematicip_cloud/icons.json index 73c60ea8cdd..53a39d8213c 100644 --- a/homeassistant/components/homematicip_cloud/icons.json +++ b/homeassistant/components/homematicip_cloud/icons.json @@ -1,13 +1,31 @@ { "services": { - "activate_eco_mode_with_duration": "mdi:leaf", - "activate_eco_mode_with_period": "mdi:leaf", - "activate_vacation": "mdi:compass", - "deactivate_eco_mode": "mdi:leaf-off", - "deactivate_vacation": "mdi:compass-off", - "set_active_climate_profile": "mdi:home-thermometer", - "dump_hap_config": "mdi:database-export", - "reset_energy_counter": "mdi:reload", - "set_home_cooling_mode": "mdi:snowflake" + "activate_eco_mode_with_duration": { + "service": "mdi:leaf" + }, + "activate_eco_mode_with_period": { + "service": "mdi:leaf" + }, + "activate_vacation": { + "service": "mdi:compass" + }, + "deactivate_eco_mode": { + "service": "mdi:leaf-off" + }, + "deactivate_vacation": { + "service": "mdi:compass-off" + }, + "set_active_climate_profile": { + "service": "mdi:home-thermometer" + }, + "dump_hap_config": { + "service": "mdi:database-export" + }, + "reset_energy_counter": { + "service": "mdi:reload" + }, + "set_home_cooling_mode": { + "service": "mdi:snowflake" + } } } diff --git a/homeassistant/components/homeworks/icons.json b/homeassistant/components/homeworks/icons.json index f53b447d96e..fc39b2ef455 100644 --- a/homeassistant/components/homeworks/icons.json +++ b/homeassistant/components/homeworks/icons.json @@ -1,5 +1,7 @@ { "services": { - "send_command": "mdi:console" + "send_command": { + "service": "mdi:console" + } } } diff --git a/homeassistant/components/html5/icons.json b/homeassistant/components/html5/icons.json index c3d6e27efda..d0a6013dd12 100644 --- a/homeassistant/components/html5/icons.json +++ b/homeassistant/components/html5/icons.json @@ -1,5 +1,7 @@ { "services": { - "dismiss": "mdi:bell-off" + "dismiss": { + "service": "mdi:bell-off" + } } } diff --git a/homeassistant/components/huawei_lte/icons.json b/homeassistant/components/huawei_lte/icons.json index d105702bf51..a338cc65ed4 100644 --- a/homeassistant/components/huawei_lte/icons.json +++ b/homeassistant/components/huawei_lte/icons.json @@ -53,7 +53,11 @@ } }, "services": { - "resume_integration": "mdi:play-pause", - "suspend_integration": "mdi:pause" + "resume_integration": { + "service": "mdi:play-pause" + }, + "suspend_integration": { + "service": "mdi:pause" + } } } diff --git a/homeassistant/components/hue/icons.json b/homeassistant/components/hue/icons.json index 9371ae5843e..31464308b0a 100644 --- a/homeassistant/components/hue/icons.json +++ b/homeassistant/components/hue/icons.json @@ -1,6 +1,10 @@ { "services": { - "hue_activate_scene": "mdi:palette", - "activate_scene": "mdi:palette" + "hue_activate_scene": { + "service": "mdi:palette" + }, + "activate_scene": { + "service": "mdi:palette" + } } } diff --git a/homeassistant/components/humidifier/icons.json b/homeassistant/components/humidifier/icons.json index 2c67f759195..15951df432d 100644 --- a/homeassistant/components/humidifier/icons.json +++ b/homeassistant/components/humidifier/icons.json @@ -33,10 +33,20 @@ } }, "services": { - "set_humidity": "mdi:water-percent", - "set_mode": "mdi:air-humidifier", - "toggle": "mdi:air-humidifier", - "turn_off": "mdi:air-humidifier-off", - "turn_on": "mdi:air-humidifier" + "set_humidity": { + "service": "mdi:water-percent" + }, + "set_mode": { + "service": "mdi:air-humidifier" + }, + "toggle": { + "service": "mdi:air-humidifier" + }, + "turn_off": { + "service": "mdi:air-humidifier-off" + }, + "turn_on": { + "service": "mdi:air-humidifier" + } } } diff --git a/homeassistant/components/husqvarna_automower/icons.json b/homeassistant/components/husqvarna_automower/icons.json index 9dc1cbeb667..bcaf1826260 100644 --- a/homeassistant/components/husqvarna_automower/icons.json +++ b/homeassistant/components/husqvarna_automower/icons.json @@ -34,7 +34,11 @@ } }, "services": { - "override_schedule": "mdi:debug-step-over", - "override_schedule_work_area": "mdi:land-fields" + "override_schedule": { + "service": "mdi:debug-step-over" + }, + "override_schedule_work_area": { + "service": "mdi:land-fields" + } } } diff --git a/homeassistant/components/hydrawise/icons.json b/homeassistant/components/hydrawise/icons.json index 1d1d349dbf9..5baf76454b7 100644 --- a/homeassistant/components/hydrawise/icons.json +++ b/homeassistant/components/hydrawise/icons.json @@ -31,8 +31,14 @@ } }, "services": { - "start_watering": "mdi:sprinkler-variant", - "suspend": "mdi:pause-circle-outline", - "resume": "mdi:play" + "start_watering": { + "service": "mdi:sprinkler-variant" + }, + "suspend": { + "service": "mdi:pause-circle-outline" + }, + "resume": { + "service": "mdi:play" + } } } diff --git a/homeassistant/components/icloud/icons.json b/homeassistant/components/icloud/icons.json index 4ed856aabc1..16280a063e3 100644 --- a/homeassistant/components/icloud/icons.json +++ b/homeassistant/components/icloud/icons.json @@ -1,8 +1,16 @@ { "services": { - "update": "mdi:update", - "play_sound": "mdi:speaker-wireless", - "display_message": "mdi:message-alert", - "lost_device": "mdi:devices" + "update": { + "service": "mdi:update" + }, + "play_sound": { + "service": "mdi:speaker-wireless" + }, + "display_message": { + "service": "mdi:message-alert" + }, + "lost_device": { + "service": "mdi:devices" + } } } diff --git a/homeassistant/components/ifttt/icons.json b/homeassistant/components/ifttt/icons.json index b943478a70b..a90d76f664a 100644 --- a/homeassistant/components/ifttt/icons.json +++ b/homeassistant/components/ifttt/icons.json @@ -1,6 +1,10 @@ { "services": { - "push_alarm_state": "mdi:security", - "trigger": "mdi:play" + "push_alarm_state": { + "service": "mdi:security" + }, + "trigger": { + "service": "mdi:play" + } } } diff --git a/homeassistant/components/ihc/icons.json b/homeassistant/components/ihc/icons.json index 73aab5f80d8..3842d1a48a6 100644 --- a/homeassistant/components/ihc/icons.json +++ b/homeassistant/components/ihc/icons.json @@ -1,8 +1,16 @@ { "services": { - "set_runtime_value_bool": "mdi:toggle-switch", - "set_runtime_value_int": "mdi:numeric", - "set_runtime_value_float": "mdi:numeric", - "pulse": "mdi:pulse" + "set_runtime_value_bool": { + "service": "mdi:toggle-switch" + }, + "set_runtime_value_int": { + "service": "mdi:numeric" + }, + "set_runtime_value_float": { + "service": "mdi:numeric" + }, + "pulse": { + "service": "mdi:pulse" + } } } diff --git a/homeassistant/components/image_processing/icons.json b/homeassistant/components/image_processing/icons.json index b19d29c186d..ae95718e381 100644 --- a/homeassistant/components/image_processing/icons.json +++ b/homeassistant/components/image_processing/icons.json @@ -1,5 +1,7 @@ { "services": { - "scan": "mdi:qrcode-scan" + "scan": { + "service": "mdi:qrcode-scan" + } } } diff --git a/homeassistant/components/imap/icons.json b/homeassistant/components/imap/icons.json index 6672f9a4a7f..17a11d0fe22 100644 --- a/homeassistant/components/imap/icons.json +++ b/homeassistant/components/imap/icons.json @@ -10,9 +10,17 @@ } }, "services": { - "seen": "mdi:email-open-outline", - "move": "mdi:email-arrow-right-outline", - "delete": "mdi:trash-can-outline", - "fetch": "mdi:email-sync-outline" + "seen": { + "service": "mdi:email-open-outline" + }, + "move": { + "service": "mdi:email-arrow-right-outline" + }, + "delete": { + "service": "mdi:trash-can-outline" + }, + "fetch": { + "service": "mdi:email-sync-outline" + } } } diff --git a/homeassistant/components/input_boolean/icons.json b/homeassistant/components/input_boolean/icons.json index dc595a60fba..088c9094b3f 100644 --- a/homeassistant/components/input_boolean/icons.json +++ b/homeassistant/components/input_boolean/icons.json @@ -8,9 +8,17 @@ } }, "services": { - "toggle": "mdi:toggle-switch", - "turn_off": "mdi:toggle-switch-off", - "turn_on": "mdi:toggle-switch", - "reload": "mdi:reload" + "toggle": { + "service": "mdi:toggle-switch" + }, + "turn_off": { + "service": "mdi:toggle-switch-off" + }, + "turn_on": { + "service": "mdi:toggle-switch" + }, + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/input_button/icons.json b/homeassistant/components/input_button/icons.json index 226b8ede110..20d41b4934a 100644 --- a/homeassistant/components/input_button/icons.json +++ b/homeassistant/components/input_button/icons.json @@ -1,6 +1,10 @@ { "services": { - "press": "mdi:gesture-tap-button", - "reload": "mdi:reload" + "press": { + "service": "mdi:gesture-tap-button" + }, + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/input_datetime/icons.json b/homeassistant/components/input_datetime/icons.json index de899023cf2..f3676f02220 100644 --- a/homeassistant/components/input_datetime/icons.json +++ b/homeassistant/components/input_datetime/icons.json @@ -1,6 +1,10 @@ { "services": { - "set_datetime": "mdi:calendar-clock", - "reload": "mdi:reload" + "set_datetime": { + "service": "mdi:calendar-clock" + }, + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/input_number/icons.json b/homeassistant/components/input_number/icons.json index d1423838491..9f90582308b 100644 --- a/homeassistant/components/input_number/icons.json +++ b/homeassistant/components/input_number/icons.json @@ -1,8 +1,16 @@ { "services": { - "decrement": "mdi:minus", - "increment": "mdi:plus", - "set_value": "mdi:numeric", - "reload": "mdi:reload" + "decrement": { + "service": "mdi:minus" + }, + "increment": { + "service": "mdi:plus" + }, + "set_value": { + "service": "mdi:numeric" + }, + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/input_select/icons.json b/homeassistant/components/input_select/icons.json index 03b477ddb36..6ef5cfaf96a 100644 --- a/homeassistant/components/input_select/icons.json +++ b/homeassistant/components/input_select/icons.json @@ -1,11 +1,25 @@ { "services": { - "select_next": "mdi:skip-next", - "select_option": "mdi:check", - "select_previous": "mdi:skip-previous", - "select_first": "mdi:skip-backward", - "select_last": "mdi:skip-forward", - "set_options": "mdi:cog", - "reload": "mdi:reload" + "select_next": { + "service": "mdi:skip-next" + }, + "select_option": { + "service": "mdi:check" + }, + "select_previous": { + "service": "mdi:skip-previous" + }, + "select_first": { + "service": "mdi:skip-backward" + }, + "select_last": { + "service": "mdi:skip-forward" + }, + "set_options": { + "service": "mdi:cog" + }, + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/input_text/icons.json b/homeassistant/components/input_text/icons.json index 0190e4ffba2..8fca66668bc 100644 --- a/homeassistant/components/input_text/icons.json +++ b/homeassistant/components/input_text/icons.json @@ -1,6 +1,10 @@ { "services": { - "set_value": "mdi:form-textbox", - "reload": "mdi:reload" + "set_value": { + "service": "mdi:form-textbox" + }, + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/insteon/icons.json b/homeassistant/components/insteon/icons.json index 4d015e13b0d..530006ca7d4 100644 --- a/homeassistant/components/insteon/icons.json +++ b/homeassistant/components/insteon/icons.json @@ -1,15 +1,37 @@ { "services": { - "add_all_link": "mdi:link-variant", - "delete_all_link": "mdi:link-variant-remove", - "load_all_link_database": "mdi:database", - "print_all_link_database": "mdi:database-export", - "print_im_all_link_database": "mdi:database-export", - "x10_all_units_off": "mdi:power-off", - "x10_all_lights_on": "mdi:lightbulb-on", - "x10_all_lights_off": "mdi:lightbulb-off", - "scene_on": "mdi:palette", - "scene_off": "mdi:palette-outline", - "add_default_links": "mdi:link-variant-plus" + "add_all_link": { + "service": "mdi:link-variant" + }, + "delete_all_link": { + "service": "mdi:link-variant-remove" + }, + "load_all_link_database": { + "service": "mdi:database" + }, + "print_all_link_database": { + "service": "mdi:database-export" + }, + "print_im_all_link_database": { + "service": "mdi:database-export" + }, + "x10_all_units_off": { + "service": "mdi:power-off" + }, + "x10_all_lights_on": { + "service": "mdi:lightbulb-on" + }, + "x10_all_lights_off": { + "service": "mdi:lightbulb-off" + }, + "scene_on": { + "service": "mdi:palette" + }, + "scene_off": { + "service": "mdi:palette-outline" + }, + "add_default_links": { + "service": "mdi:link-variant-plus" + } } } diff --git a/homeassistant/components/intent_script/icons.json b/homeassistant/components/intent_script/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/intent_script/icons.json +++ b/homeassistant/components/intent_script/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/iperf3/icons.json b/homeassistant/components/iperf3/icons.json index 3ef7e301ed6..f6ebe1aee2f 100644 --- a/homeassistant/components/iperf3/icons.json +++ b/homeassistant/components/iperf3/icons.json @@ -1,5 +1,7 @@ { "services": { - "speedtest": "mdi:speedometer" + "speedtest": { + "service": "mdi:speedometer" + } } } diff --git a/homeassistant/components/isy994/icons.json b/homeassistant/components/isy994/icons.json index 27b2ea6954e..9c6e7fa78df 100644 --- a/homeassistant/components/isy994/icons.json +++ b/homeassistant/components/isy994/icons.json @@ -1,12 +1,28 @@ { "services": { - "send_raw_node_command": "mdi:console-line", - "send_node_command": "mdi:console", - "get_zwave_parameter": "mdi:download", - "set_zwave_parameter": "mdi:upload", - "set_zwave_lock_user_code": "mdi:upload-lock", - "delete_zwave_lock_user_code": "mdi:lock-remove", - "rename_node": "mdi:pencil", - "send_program_command": "mdi:console" + "send_raw_node_command": { + "service": "mdi:console-line" + }, + "send_node_command": { + "service": "mdi:console" + }, + "get_zwave_parameter": { + "service": "mdi:download" + }, + "set_zwave_parameter": { + "service": "mdi:upload" + }, + "set_zwave_lock_user_code": { + "service": "mdi:upload-lock" + }, + "delete_zwave_lock_user_code": { + "service": "mdi:lock-remove" + }, + "rename_node": { + "service": "mdi:pencil" + }, + "send_program_command": { + "service": "mdi:console" + } } } diff --git a/homeassistant/components/izone/icons.json b/homeassistant/components/izone/icons.json index e02cd57c141..bb38db27839 100644 --- a/homeassistant/components/izone/icons.json +++ b/homeassistant/components/izone/icons.json @@ -1,6 +1,10 @@ { "services": { - "airflow_min": "mdi:fan-minus", - "airflow_max": "mdi:fan-plus" + "airflow_min": { + "service": "mdi:fan-minus" + }, + "airflow_max": { + "service": "mdi:fan-plus" + } } } diff --git a/homeassistant/components/keba/icons.json b/homeassistant/components/keba/icons.json index 7f64bf7fb34..6de43a84cf6 100644 --- a/homeassistant/components/keba/icons.json +++ b/homeassistant/components/keba/icons.json @@ -1,12 +1,28 @@ { "services": { - "request_data": "mdi:database-arrow-down", - "authorize": "mdi:lock", - "deauthorize": "mdi:lock-open", - "set_energy": "mdi:flash", - "set_current": "mdi:flash", - "enable": "mdi:flash", - "disable": "mdi:fash-off", - "set_failsafe": "mdi:message-alert" + "request_data": { + "service": "mdi:database-arrow-down" + }, + "authorize": { + "service": "mdi:lock" + }, + "deauthorize": { + "service": "mdi:lock-open" + }, + "set_energy": { + "service": "mdi:flash" + }, + "set_current": { + "service": "mdi:flash" + }, + "enable": { + "service": "mdi:flash" + }, + "disable": { + "service": "mdi:fash-off" + }, + "set_failsafe": { + "service": "mdi:message-alert" + } } } diff --git a/homeassistant/components/kef/icons.json b/homeassistant/components/kef/icons.json index eeb6dd099ce..e259e91eb1b 100644 --- a/homeassistant/components/kef/icons.json +++ b/homeassistant/components/kef/icons.json @@ -1,12 +1,28 @@ { "services": { - "update_dsp": "mdi:update", - "set_mode": "mdi:cog", - "set_desk_db": "mdi:volume-high", - "set_wall_db": "mdi:volume-high", - "set_treble_db": "mdi:volume-high", - "set_high_hz": "mdi:sine-wave", - "set_low_hz": "mdi:cosine-wave", - "set_sub_db": "mdi:volume-high" + "update_dsp": { + "service": "mdi:update" + }, + "set_mode": { + "service": "mdi:cog" + }, + "set_desk_db": { + "service": "mdi:volume-high" + }, + "set_wall_db": { + "service": "mdi:volume-high" + }, + "set_treble_db": { + "service": "mdi:volume-high" + }, + "set_high_hz": { + "service": "mdi:sine-wave" + }, + "set_low_hz": { + "service": "mdi:cosine-wave" + }, + "set_sub_db": { + "service": "mdi:volume-high" + } } } diff --git a/homeassistant/components/keyboard/icons.json b/homeassistant/components/keyboard/icons.json index 8186b2684dd..03b6210bf41 100644 --- a/homeassistant/components/keyboard/icons.json +++ b/homeassistant/components/keyboard/icons.json @@ -1,10 +1,22 @@ { "services": { - "volume_up": "mdi:volume-high", - "volume_down": "mdi:volume-low", - "volume_mute": "mdi:volume-off", - "media_play_pause": "mdi:play-pause", - "media_next_track": "mdi:skip-next", - "media_prev_track": "mdi:skip-previous" + "volume_up": { + "service": "mdi:volume-high" + }, + "volume_down": { + "service": "mdi:volume-low" + }, + "volume_mute": { + "service": "mdi:volume-off" + }, + "media_play_pause": { + "service": "mdi:play-pause" + }, + "media_next_track": { + "service": "mdi:skip-next" + }, + "media_prev_track": { + "service": "mdi:skip-previous" + } } } diff --git a/homeassistant/components/keymitt_ble/icons.json b/homeassistant/components/keymitt_ble/icons.json index 77450fbf026..d265d96b395 100644 --- a/homeassistant/components/keymitt_ble/icons.json +++ b/homeassistant/components/keymitt_ble/icons.json @@ -1,5 +1,7 @@ { "services": { - "calibrate": "mdi:wrench" + "calibrate": { + "service": "mdi:wrench" + } } } diff --git a/homeassistant/components/knx/icons.json b/homeassistant/components/knx/icons.json index 2aee34219f6..756b6ab9f9e 100644 --- a/homeassistant/components/knx/icons.json +++ b/homeassistant/components/knx/icons.json @@ -36,10 +36,20 @@ } }, "services": { - "send": "mdi:email-arrow-right", - "read": "mdi:email-search", - "event_register": "mdi:home-import-outline", - "exposure_register": "mdi:home-export-outline", - "reload": "mdi:reload" + "send": { + "service": "mdi:email-arrow-right" + }, + "read": { + "service": "mdi:email-search" + }, + "event_register": { + "service": "mdi:home-import-outline" + }, + "exposure_register": { + "service": "mdi:home-export-outline" + }, + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/kodi/icons.json b/homeassistant/components/kodi/icons.json index 07bd246e92d..d9c32630961 100644 --- a/homeassistant/components/kodi/icons.json +++ b/homeassistant/components/kodi/icons.json @@ -1,6 +1,10 @@ { "services": { - "add_to_playlist": "mdi:playlist-plus", - "call_method": "mdi:console" + "add_to_playlist": { + "service": "mdi:playlist-plus" + }, + "call_method": { + "service": "mdi:console" + } } } diff --git a/homeassistant/components/lametric/icons.json b/homeassistant/components/lametric/icons.json index 7e1841272cf..229770c96dc 100644 --- a/homeassistant/components/lametric/icons.json +++ b/homeassistant/components/lametric/icons.json @@ -39,7 +39,11 @@ } }, "services": { - "chart": "mdi:chart-areaspline-variant", - "message": "mdi:message" + "chart": { + "service": "mdi:chart-areaspline-variant" + }, + "message": { + "service": "mdi:message" + } } } diff --git a/homeassistant/components/lawn_mower/icons.json b/homeassistant/components/lawn_mower/icons.json index b25bf927fcd..2fa1f79efa1 100644 --- a/homeassistant/components/lawn_mower/icons.json +++ b/homeassistant/components/lawn_mower/icons.json @@ -5,8 +5,14 @@ } }, "services": { - "dock": "mdi:home-import-outline", - "pause": "mdi:pause", - "start_mowing": "mdi:play" + "dock": { + "service": "mdi:home-import-outline" + }, + "pause": { + "service": "mdi:pause" + }, + "start_mowing": { + "service": "mdi:play" + } } } diff --git a/homeassistant/components/lcn/icons.json b/homeassistant/components/lcn/icons.json index c8b451a79ea..944c3938a92 100644 --- a/homeassistant/components/lcn/icons.json +++ b/homeassistant/components/lcn/icons.json @@ -1,17 +1,43 @@ { "services": { - "output_abs": "mdi:brightness-auto", - "output_rel": "mdi:brightness-7", - "output_toggle": "mdi:toggle-switch", - "relays": "mdi:light-switch-off", - "led": "mdi:led-on", - "var_abs": "mdi:wrench", - "var_reset": "mdi:reload", - "var_rel": "mdi:wrench", - "lock_regulator": "mdi:lock", - "send_keys": "mdi:alarm-panel", - "lock_keys": "mdi:lock", - "dyn_text": "mdi:form-textbox", - "pck": "mdi:package-variant-closed" + "output_abs": { + "service": "mdi:brightness-auto" + }, + "output_rel": { + "service": "mdi:brightness-7" + }, + "output_toggle": { + "service": "mdi:toggle-switch" + }, + "relays": { + "service": "mdi:light-switch-off" + }, + "led": { + "service": "mdi:led-on" + }, + "var_abs": { + "service": "mdi:wrench" + }, + "var_reset": { + "service": "mdi:reload" + }, + "var_rel": { + "service": "mdi:wrench" + }, + "lock_regulator": { + "service": "mdi:lock" + }, + "send_keys": { + "service": "mdi:alarm-panel" + }, + "lock_keys": { + "service": "mdi:lock" + }, + "dyn_text": { + "service": "mdi:form-textbox" + }, + "pck": { + "service": "mdi:package-variant-closed" + } } } diff --git a/homeassistant/components/lifx/icons.json b/homeassistant/components/lifx/icons.json index e32fdb5e06b..58a7c89e266 100644 --- a/homeassistant/components/lifx/icons.json +++ b/homeassistant/components/lifx/icons.json @@ -1,13 +1,31 @@ { "services": { - "set_hev_cycle_state": "mdi:led-on", - "set_state": "mdi:led-on", - "effect_pulse": "mdi:pulse", - "effect_colorloop": "mdi:looks", - "effect_move": "mdi:cube-send", - "effect_flame": "mdi:fire", - "effect_morph": "mdi:shape-outline", - "effect_sky": "mdi:clouds", - "effect_stop": "mdi:stop" + "set_hev_cycle_state": { + "service": "mdi:led-on" + }, + "set_state": { + "service": "mdi:led-on" + }, + "effect_pulse": { + "service": "mdi:pulse" + }, + "effect_colorloop": { + "service": "mdi:looks" + }, + "effect_move": { + "service": "mdi:cube-send" + }, + "effect_flame": { + "service": "mdi:fire" + }, + "effect_morph": { + "service": "mdi:shape-outline" + }, + "effect_sky": { + "service": "mdi:clouds" + }, + "effect_stop": { + "service": "mdi:stop" + } } } diff --git a/homeassistant/components/light/icons.json b/homeassistant/components/light/icons.json index 5113834e575..df98def090e 100644 --- a/homeassistant/components/light/icons.json +++ b/homeassistant/components/light/icons.json @@ -5,8 +5,14 @@ } }, "services": { - "toggle": "mdi:lightbulb", - "turn_off": "mdi:lightbulb-off", - "turn_on": "mdi:lightbulb-on" + "toggle": { + "service": "mdi:lightbulb" + }, + "turn_off": { + "service": "mdi:lightbulb-off" + }, + "turn_on": { + "service": "mdi:lightbulb-on" + } } } diff --git a/homeassistant/components/litterrobot/icons.json b/homeassistant/components/litterrobot/icons.json index 333f309e9e8..482031f8424 100644 --- a/homeassistant/components/litterrobot/icons.json +++ b/homeassistant/components/litterrobot/icons.json @@ -40,6 +40,8 @@ } }, "services": { - "set_sleep_mode": "mdi:sleep" + "set_sleep_mode": { + "service": "mdi:sleep" + } } } diff --git a/homeassistant/components/local_file/icons.json b/homeassistant/components/local_file/icons.json index c9c92fa86c8..7b0067c6a44 100644 --- a/homeassistant/components/local_file/icons.json +++ b/homeassistant/components/local_file/icons.json @@ -1,5 +1,7 @@ { "services": { - "update_file_path": "mdi:cog" + "update_file_path": { + "service": "mdi:cog" + } } } diff --git a/homeassistant/components/lock/icons.json b/homeassistant/components/lock/icons.json index 009bd84a372..0b1befde9ff 100644 --- a/homeassistant/components/lock/icons.json +++ b/homeassistant/components/lock/icons.json @@ -13,8 +13,14 @@ } }, "services": { - "lock": "mdi:lock", - "open": "mdi:door-open", - "unlock": "mdi:lock-open-variant" + "lock": { + "service": "mdi:lock" + }, + "open": { + "service": "mdi:door-open" + }, + "unlock": { + "service": "mdi:lock-open-variant" + } } } diff --git a/homeassistant/components/logbook/icons.json b/homeassistant/components/logbook/icons.json index cd2cde8600c..a8af6427b8c 100644 --- a/homeassistant/components/logbook/icons.json +++ b/homeassistant/components/logbook/icons.json @@ -1,5 +1,7 @@ { "services": { - "log": "mdi:file-document" + "log": { + "service": "mdi:file-document" + } } } diff --git a/homeassistant/components/logger/icons.json b/homeassistant/components/logger/icons.json index 305dd3ece91..1542e1e5ad3 100644 --- a/homeassistant/components/logger/icons.json +++ b/homeassistant/components/logger/icons.json @@ -1,6 +1,10 @@ { "services": { - "set_default_level": "mdi:cog-outline", - "set_level": "mdi:cog-outline" + "set_default_level": { + "service": "mdi:cog-outline" + }, + "set_level": { + "service": "mdi:cog-outline" + } } } diff --git a/homeassistant/components/lovelace/icons.json b/homeassistant/components/lovelace/icons.json index fe0a0e114ae..8261dc2d0c9 100644 --- a/homeassistant/components/lovelace/icons.json +++ b/homeassistant/components/lovelace/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload_resources": "mdi:reload" + "reload_resources": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/lyric/icons.json b/homeassistant/components/lyric/icons.json index 555215f8685..edb61c3f8e2 100644 --- a/homeassistant/components/lyric/icons.json +++ b/homeassistant/components/lyric/icons.json @@ -7,6 +7,8 @@ } }, "services": { - "set_hold_time": "mdi:timer-pause" + "set_hold_time": { + "service": "mdi:timer-pause" + } } } diff --git a/homeassistant/components/matrix/icons.json b/homeassistant/components/matrix/icons.json index 4fc56ebe0ff..a8b83e67303 100644 --- a/homeassistant/components/matrix/icons.json +++ b/homeassistant/components/matrix/icons.json @@ -1,5 +1,7 @@ { "services": { - "send_message": "mdi:matrix" + "send_message": { + "service": "mdi:matrix" + } } } diff --git a/homeassistant/components/mealie/icons.json b/homeassistant/components/mealie/icons.json index 16176391701..d7e29cc8bbe 100644 --- a/homeassistant/components/mealie/icons.json +++ b/homeassistant/components/mealie/icons.json @@ -24,10 +24,20 @@ } }, "services": { - "get_mealplan": "mdi:food", - "get_recipe": "mdi:map", - "import_recipe": "mdi:map-search", - "set_random_mealplan": "mdi:dice-multiple", - "set_mealplan": "mdi:food" + "get_mealplan": { + "service": "mdi:food" + }, + "get_recipe": { + "service": "mdi:map" + }, + "import_recipe": { + "service": "mdi:map-search" + }, + "set_random_mealplan": { + "service": "mdi:dice-multiple" + }, + "set_mealplan": { + "service": "mdi:food" + } } } diff --git a/homeassistant/components/media_extractor/icons.json b/homeassistant/components/media_extractor/icons.json index 7abc4410b19..611db7c944c 100644 --- a/homeassistant/components/media_extractor/icons.json +++ b/homeassistant/components/media_extractor/icons.json @@ -1,6 +1,10 @@ { "services": { - "play_media": "mdi:play", - "extract_media_url": "mdi:link" + "play_media": { + "service": "mdi:play" + }, + "extract_media_url": { + "service": "mdi:link" + } } } diff --git a/homeassistant/components/media_player/icons.json b/homeassistant/components/media_player/icons.json index 847ce5989d6..c11211c38ec 100644 --- a/homeassistant/components/media_player/icons.json +++ b/homeassistant/components/media_player/icons.json @@ -32,27 +32,71 @@ } }, "services": { - "clear_playlist": "mdi:playlist-remove", - "join": "mdi:group", - "media_next_track": "mdi:skip-next", - "media_pause": "mdi:pause", - "media_play": "mdi:play", - "media_play_pause": "mdi:play-pause", - "media_previous_track": "mdi:skip-previous", - "media_seek": "mdi:fast-forward", - "media_stop": "mdi:stop", - "play_media": "mdi:play", - "repeat_set": "mdi:repeat", - "select_sound_mode": "mdi:surround-sound", - "select_source": "mdi:import", - "shuffle_set": "mdi:shuffle", - "toggle": "mdi:play-pause", - "turn_off": "mdi:power", - "turn_on": "mdi:power", - "unjoin": "mdi:ungroup", - "volume_down": "mdi:volume-minus", - "volume_mute": "mdi:volume-mute", - "volume_set": "mdi:volume-medium", - "volume_up": "mdi:volume-plus" + "clear_playlist": { + "service": "mdi:playlist-remove" + }, + "join": { + "service": "mdi:group" + }, + "media_next_track": { + "service": "mdi:skip-next" + }, + "media_pause": { + "service": "mdi:pause" + }, + "media_play": { + "service": "mdi:play" + }, + "media_play_pause": { + "service": "mdi:play-pause" + }, + "media_previous_track": { + "service": "mdi:skip-previous" + }, + "media_seek": { + "service": "mdi:fast-forward" + }, + "media_stop": { + "service": "mdi:stop" + }, + "play_media": { + "service": "mdi:play" + }, + "repeat_set": { + "service": "mdi:repeat" + }, + "select_sound_mode": { + "service": "mdi:surround-sound" + }, + "select_source": { + "service": "mdi:import" + }, + "shuffle_set": { + "service": "mdi:shuffle" + }, + "toggle": { + "service": "mdi:play-pause" + }, + "turn_off": { + "service": "mdi:power" + }, + "turn_on": { + "service": "mdi:power" + }, + "unjoin": { + "service": "mdi:ungroup" + }, + "volume_down": { + "service": "mdi:volume-minus" + }, + "volume_mute": { + "service": "mdi:volume-mute" + }, + "volume_set": { + "service": "mdi:volume-medium" + }, + "volume_up": { + "service": "mdi:volume-plus" + } } } diff --git a/homeassistant/components/melcloud/icons.json b/homeassistant/components/melcloud/icons.json index de3eb3c0ba2..b91696b5b35 100644 --- a/homeassistant/components/melcloud/icons.json +++ b/homeassistant/components/melcloud/icons.json @@ -7,7 +7,11 @@ } }, "services": { - "set_vane_horizontal": "mdi:arrow-left-right", - "set_vane_vertical": "mdi:arrow-up-down" + "set_vane_horizontal": { + "service": "mdi:arrow-left-right" + }, + "set_vane_vertical": { + "service": "mdi:arrow-up-down" + } } } diff --git a/homeassistant/components/microsoft_face/icons.json b/homeassistant/components/microsoft_face/icons.json index 826e390197a..6e61676224d 100644 --- a/homeassistant/components/microsoft_face/icons.json +++ b/homeassistant/components/microsoft_face/icons.json @@ -1,10 +1,22 @@ { "services": { - "create_group": "mdi:account-multiple-plus", - "create_person": "mdi:account-plus", - "delete_group": "mdi:account-multiple-remove", - "delete_person": "mdi:account-remove", - "face_person": "mdi:face-man", - "train_group": "mdi:account-multiple-check" + "create_group": { + "service": "mdi:account-multiple-plus" + }, + "create_person": { + "service": "mdi:account-plus" + }, + "delete_group": { + "service": "mdi:account-multiple-remove" + }, + "delete_person": { + "service": "mdi:account-remove" + }, + "face_person": { + "service": "mdi:face-man" + }, + "train_group": { + "service": "mdi:account-multiple-check" + } } } diff --git a/homeassistant/components/mill/icons.json b/homeassistant/components/mill/icons.json index 13d6bb650c1..f2595f28057 100644 --- a/homeassistant/components/mill/icons.json +++ b/homeassistant/components/mill/icons.json @@ -1,5 +1,7 @@ { "services": { - "set_room_temperature": "mdi:thermometer" + "set_room_temperature": { + "service": "mdi:thermometer" + } } } From 1a2d013c979d9e4b5e788b638408371bb0cbf3ed Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 28 Aug 2024 13:48:01 +0200 Subject: [PATCH 1163/1635] Update icons.json to new service schema part 3 (#124770) --- homeassistant/components/min_max/icons.json | 4 +- homeassistant/components/minio/icons.json | 12 +++-- homeassistant/components/modbus/icons.json | 20 ++++++-- .../components/modern_forms/icons.json | 16 +++++-- homeassistant/components/monoprice/icons.json | 8 +++- .../components/motion_blinds/icons.json | 4 +- homeassistant/components/motioneye/icons.json | 12 +++-- homeassistant/components/mqtt/icons.json | 12 +++-- homeassistant/components/neato/icons.json | 4 +- .../components/ness_alarm/icons.json | 8 +++- homeassistant/components/netatmo/icons.json | 40 ++++++++++++---- .../components/netgear_lte/icons.json | 16 +++++-- homeassistant/components/nexia/icons.json | 12 +++-- .../components/nissan_leaf/icons.json | 8 +++- homeassistant/components/notify/icons.json | 12 +++-- homeassistant/components/nuki/icons.json | 8 +++- homeassistant/components/number/icons.json | 4 +- homeassistant/components/nws/icons.json | 4 +- homeassistant/components/nx584/icons.json | 8 +++- homeassistant/components/nzbget/icons.json | 12 +++-- homeassistant/components/octoprint/icons.json | 4 +- homeassistant/components/ombi/icons.json | 12 +++-- homeassistant/components/omnilogic/icons.json | 4 +- homeassistant/components/onvif/icons.json | 4 +- .../components/openai_conversation/icons.json | 4 +- homeassistant/components/openhome/icons.json | 4 +- .../components/opentherm_gw/icons.json | 48 ++++++++++++++----- .../persistent_notification/icons.json | 12 +++-- homeassistant/components/person/icons.json | 4 +- homeassistant/components/pi_hole/icons.json | 4 +- homeassistant/components/picnic/icons.json | 4 +- homeassistant/components/pilight/icons.json | 4 +- homeassistant/components/plex/icons.json | 8 +++- homeassistant/components/profiler/icons.json | 48 ++++++++++++++----- homeassistant/components/prosegur/icons.json | 4 +- homeassistant/components/ps4/icons.json | 4 +- .../components/python_script/icons.json | 4 +- .../components/qbittorrent/icons.json | 8 +++- homeassistant/components/qvr_pro/icons.json | 8 +++- homeassistant/components/rachio/icons.json | 24 +++++++--- homeassistant/components/rainbird/icons.json | 8 +++- .../components/rainmachine/icons.json | 44 ++++++++++++----- homeassistant/components/recorder/icons.json | 16 +++++-- .../components/remember_the_milk/icons.json | 8 +++- homeassistant/components/remote/icons.json | 24 +++++++--- homeassistant/components/renault/icons.json | 12 +++-- homeassistant/components/renson/icons.json | 12 +++-- homeassistant/components/reolink/icons.json | 8 +++- homeassistant/components/rest/icons.json | 4 +- .../components/rest_command/icons.json | 4 +- homeassistant/components/rflink/icons.json | 4 +- homeassistant/components/rfxtrx/icons.json | 4 +- homeassistant/components/ring/icons.json | 4 +- homeassistant/components/roborock/icons.json | 4 +- homeassistant/components/roku/icons.json | 4 +- homeassistant/components/roon/icons.json | 4 +- homeassistant/components/route53/icons.json | 4 +- homeassistant/components/sabnzbd/icons.json | 12 +++-- homeassistant/components/scene/icons.json | 20 ++++++-- homeassistant/components/schedule/icons.json | 4 +- .../components/screenlogic/icons.json | 12 +++-- homeassistant/components/script/icons.json | 16 +++++-- homeassistant/components/select/icons.json | 20 ++++++-- homeassistant/components/sensibo/icons.json | 20 ++++++-- .../components/seventeentrack/icons.json | 4 +- homeassistant/components/sharkiq/icons.json | 4 +- .../components/shopping_list/icons.json | 32 +++++++++---- .../components/simplisafe/icons.json | 12 +++-- homeassistant/components/siren/icons.json | 12 +++-- homeassistant/components/smarttub/icons.json | 16 +++++-- 70 files changed, 594 insertions(+), 198 deletions(-) diff --git a/homeassistant/components/min_max/icons.json b/homeassistant/components/min_max/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/min_max/icons.json +++ b/homeassistant/components/min_max/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/minio/icons.json b/homeassistant/components/minio/icons.json index 16deb1a168d..dce148a23de 100644 --- a/homeassistant/components/minio/icons.json +++ b/homeassistant/components/minio/icons.json @@ -1,7 +1,13 @@ { "services": { - "get": "mdi:cloud-download", - "put": "mdi:cloud-upload", - "remove": "mdi:delete" + "get": { + "service": "mdi:cloud-download" + }, + "put": { + "service": "mdi:cloud-upload" + }, + "remove": { + "service": "mdi:delete" + } } } diff --git a/homeassistant/components/modbus/icons.json b/homeassistant/components/modbus/icons.json index eeaeff6403b..05ee76fd44e 100644 --- a/homeassistant/components/modbus/icons.json +++ b/homeassistant/components/modbus/icons.json @@ -1,9 +1,19 @@ { "services": { - "reload": "mdi:reload", - "write_coil": "mdi:pencil", - "write_register": "mdi:database-edit", - "stop": "mdi:stop", - "restart": "mdi:restart" + "reload": { + "service": "mdi:reload" + }, + "write_coil": { + "service": "mdi:pencil" + }, + "write_register": { + "service": "mdi:database-edit" + }, + "stop": { + "service": "mdi:stop" + }, + "restart": { + "service": "mdi:restart" + } } } diff --git a/homeassistant/components/modern_forms/icons.json b/homeassistant/components/modern_forms/icons.json index e5df55dc15e..544e48e17f1 100644 --- a/homeassistant/components/modern_forms/icons.json +++ b/homeassistant/components/modern_forms/icons.json @@ -26,9 +26,17 @@ } }, "services": { - "set_light_sleep_timer": "mdi:timer", - "clear_light_sleep_timer": "mdi:timer-cancel", - "set_fan_sleep_timer": "mdi:timer", - "clear_fan_sleep_timer": "mdi:timer-cancel" + "set_light_sleep_timer": { + "service": "mdi:timer" + }, + "clear_light_sleep_timer": { + "service": "mdi:timer-cancel" + }, + "set_fan_sleep_timer": { + "service": "mdi:timer" + }, + "clear_fan_sleep_timer": { + "service": "mdi:timer-cancel" + } } } diff --git a/homeassistant/components/monoprice/icons.json b/homeassistant/components/monoprice/icons.json index 22610cc2a47..d560c7bcfa8 100644 --- a/homeassistant/components/monoprice/icons.json +++ b/homeassistant/components/monoprice/icons.json @@ -1,6 +1,10 @@ { "services": { - "snapshot": "mdi:content-copy", - "restore": "mdi:content-paste" + "snapshot": { + "service": "mdi:content-copy" + }, + "restore": { + "service": "mdi:content-paste" + } } } diff --git a/homeassistant/components/motion_blinds/icons.json b/homeassistant/components/motion_blinds/icons.json index 9e1cd613e5b..e50e50130f7 100644 --- a/homeassistant/components/motion_blinds/icons.json +++ b/homeassistant/components/motion_blinds/icons.json @@ -10,6 +10,8 @@ } }, "services": { - "set_absolute_position": "mdi:set-square" + "set_absolute_position": { + "service": "mdi:set-square" + } } } diff --git a/homeassistant/components/motioneye/icons.json b/homeassistant/components/motioneye/icons.json index b0a4ea8dcb1..7cc93d528e8 100644 --- a/homeassistant/components/motioneye/icons.json +++ b/homeassistant/components/motioneye/icons.json @@ -1,7 +1,13 @@ { "services": { - "set_text_overlay": "mdi:text-box-outline", - "action": "mdi:gesture-tap-button", - "snapshot": "mdi:camera" + "set_text_overlay": { + "service": "mdi:text-box-outline" + }, + "action": { + "service": "mdi:gesture-tap-button" + }, + "snapshot": { + "service": "mdi:camera" + } } } diff --git a/homeassistant/components/mqtt/icons.json b/homeassistant/components/mqtt/icons.json index 1979359c5a1..73cbf22b629 100644 --- a/homeassistant/components/mqtt/icons.json +++ b/homeassistant/components/mqtt/icons.json @@ -1,7 +1,13 @@ { "services": { - "publish": "mdi:publish", - "dump": "mdi:database-export", - "reload": "mdi:reload" + "publish": { + "service": "mdi:publish" + }, + "dump": { + "service": "mdi:database-export" + }, + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/neato/icons.json b/homeassistant/components/neato/icons.json index ca50d5a9bc7..eb18a7e3196 100644 --- a/homeassistant/components/neato/icons.json +++ b/homeassistant/components/neato/icons.json @@ -1,5 +1,7 @@ { "services": { - "custom_cleaning": "mdi:broom" + "custom_cleaning": { + "service": "mdi:broom" + } } } diff --git a/homeassistant/components/ness_alarm/icons.json b/homeassistant/components/ness_alarm/icons.json index ea17fd2b299..29d8ae1c8f5 100644 --- a/homeassistant/components/ness_alarm/icons.json +++ b/homeassistant/components/ness_alarm/icons.json @@ -1,6 +1,10 @@ { "services": { - "aux": "mdi:audio-input-stereo-minijack", - "panic": "mdi:fire" + "aux": { + "service": "mdi:audio-input-stereo-minijack" + }, + "panic": { + "service": "mdi:fire" + } } } diff --git a/homeassistant/components/netatmo/icons.json b/homeassistant/components/netatmo/icons.json index 31b1740ab21..70a51542126 100644 --- a/homeassistant/components/netatmo/icons.json +++ b/homeassistant/components/netatmo/icons.json @@ -34,15 +34,35 @@ } }, "services": { - "set_camera_light": "mdi:led-on", - "set_schedule": "mdi:calendar-clock", - "set_preset_mode_with_end_datetime": "mdi:calendar-clock", - "set_temperature_with_end_datetime": "mdi:thermometer", - "set_temperature_with_time_period": "mdi:thermometer", - "clear_temperature_setting": "mdi:thermometer", - "set_persons_home": "mdi:home", - "set_person_away": "mdi:walk", - "register_webhook": "mdi:link-variant", - "unregister_webhook": "mdi:link-variant-off" + "set_camera_light": { + "service": "mdi:led-on" + }, + "set_schedule": { + "service": "mdi:calendar-clock" + }, + "set_preset_mode_with_end_datetime": { + "service": "mdi:calendar-clock" + }, + "set_temperature_with_end_datetime": { + "service": "mdi:thermometer" + }, + "set_temperature_with_time_period": { + "service": "mdi:thermometer" + }, + "clear_temperature_setting": { + "service": "mdi:thermometer" + }, + "set_persons_home": { + "service": "mdi:home" + }, + "set_person_away": { + "service": "mdi:walk" + }, + "register_webhook": { + "service": "mdi:link-variant" + }, + "unregister_webhook": { + "service": "mdi:link-variant-off" + } } } diff --git a/homeassistant/components/netgear_lte/icons.json b/homeassistant/components/netgear_lte/icons.json index 543d9bf4690..703d330512b 100644 --- a/homeassistant/components/netgear_lte/icons.json +++ b/homeassistant/components/netgear_lte/icons.json @@ -31,9 +31,17 @@ } }, "services": { - "delete_sms": "mdi:delete", - "set_option": "mdi:cog", - "connect_lte": "mdi:wifi", - "disconnect_lte": "mdi:wifi-off" + "delete_sms": { + "service": "mdi:delete" + }, + "set_option": { + "service": "mdi:cog" + }, + "connect_lte": { + "service": "mdi:wifi" + }, + "disconnect_lte": { + "service": "mdi:wifi-off" + } } } diff --git a/homeassistant/components/nexia/icons.json b/homeassistant/components/nexia/icons.json index 620d1a42c03..a2157f5c035 100644 --- a/homeassistant/components/nexia/icons.json +++ b/homeassistant/components/nexia/icons.json @@ -20,8 +20,14 @@ } }, "services": { - "set_aircleaner_mode": "mdi:air-filter", - "set_humidify_setpoint": "mdi:water-percent", - "set_hvac_run_mode": "mdi:hvac" + "set_aircleaner_mode": { + "service": "mdi:air-filter" + }, + "set_humidify_setpoint": { + "service": "mdi:water-percent" + }, + "set_hvac_run_mode": { + "service": "mdi:hvac" + } } } diff --git a/homeassistant/components/nissan_leaf/icons.json b/homeassistant/components/nissan_leaf/icons.json index 5da03ed5f1a..832fce90c08 100644 --- a/homeassistant/components/nissan_leaf/icons.json +++ b/homeassistant/components/nissan_leaf/icons.json @@ -1,6 +1,10 @@ { "services": { - "start_charge": "mdi:flash", - "update": "mdi:update" + "start_charge": { + "service": "mdi:flash" + }, + "update": { + "service": "mdi:update" + } } } diff --git a/homeassistant/components/notify/icons.json b/homeassistant/components/notify/icons.json index ace8ee0c96b..e5ab34031f7 100644 --- a/homeassistant/components/notify/icons.json +++ b/homeassistant/components/notify/icons.json @@ -5,8 +5,14 @@ } }, "services": { - "notify": "mdi:bell-ring", - "persistent_notification": "mdi:bell-badge", - "send_message": "mdi:message-arrow-right" + "notify": { + "service": "mdi:bell-ring" + }, + "persistent_notification": { + "service": "mdi:bell-badge" + }, + "send_message": { + "service": "mdi:message-arrow-right" + } } } diff --git a/homeassistant/components/nuki/icons.json b/homeassistant/components/nuki/icons.json index f74603cb9dc..ea1ff9c4fed 100644 --- a/homeassistant/components/nuki/icons.json +++ b/homeassistant/components/nuki/icons.json @@ -7,7 +7,11 @@ } }, "services": { - "lock_n_go": "mdi:lock-clock", - "set_continuous_mode": "mdi:bell-cog" + "lock_n_go": { + "service": "mdi:lock-clock" + }, + "set_continuous_mode": { + "service": "mdi:bell-cog" + } } } diff --git a/homeassistant/components/number/icons.json b/homeassistant/components/number/icons.json index d74aa1bf408..a122aaecb09 100644 --- a/homeassistant/components/number/icons.json +++ b/homeassistant/components/number/icons.json @@ -149,6 +149,8 @@ } }, "services": { - "set_value": "mdi:numeric" + "set_value": { + "service": "mdi:numeric" + } } } diff --git a/homeassistant/components/nws/icons.json b/homeassistant/components/nws/icons.json index 8f91388a3ef..2aef3a2e614 100644 --- a/homeassistant/components/nws/icons.json +++ b/homeassistant/components/nws/icons.json @@ -1,5 +1,7 @@ { "services": { - "get_forecasts_extra": "mdi:weather-cloudy-clock" + "get_forecasts_extra": { + "service": "mdi:weather-cloudy-clock" + } } } diff --git a/homeassistant/components/nx584/icons.json b/homeassistant/components/nx584/icons.json index 76e5ae82e09..3bd8e485bfd 100644 --- a/homeassistant/components/nx584/icons.json +++ b/homeassistant/components/nx584/icons.json @@ -1,6 +1,10 @@ { "services": { - "bypass_zone": "mdi:wrench", - "unbypass_zone": "mdi:wrench" + "bypass_zone": { + "service": "mdi:wrench" + }, + "unbypass_zone": { + "service": "mdi:wrench" + } } } diff --git a/homeassistant/components/nzbget/icons.json b/homeassistant/components/nzbget/icons.json index a693e9fec86..ca4f4d584ae 100644 --- a/homeassistant/components/nzbget/icons.json +++ b/homeassistant/components/nzbget/icons.json @@ -1,7 +1,13 @@ { "services": { - "pause": "mdi:pause", - "resume": "mdi:play", - "set_speed": "mdi:speedometer" + "pause": { + "service": "mdi:pause" + }, + "resume": { + "service": "mdi:play" + }, + "set_speed": { + "service": "mdi:speedometer" + } } } diff --git a/homeassistant/components/octoprint/icons.json b/homeassistant/components/octoprint/icons.json index 972ecabb765..720718fcede 100644 --- a/homeassistant/components/octoprint/icons.json +++ b/homeassistant/components/octoprint/icons.json @@ -1,5 +1,7 @@ { "services": { - "printer_connect": "mdi:lan-connect" + "printer_connect": { + "service": "mdi:lan-connect" + } } } diff --git a/homeassistant/components/ombi/icons.json b/homeassistant/components/ombi/icons.json index 4b3e32a1e13..15b8af56188 100644 --- a/homeassistant/components/ombi/icons.json +++ b/homeassistant/components/ombi/icons.json @@ -1,7 +1,13 @@ { "services": { - "submit_movie_request": "mdi:movie-roll", - "submit_tv_request": "mdi:television-classic", - "submit_music_request": "mdi:music" + "submit_movie_request": { + "service": "mdi:movie-roll" + }, + "submit_tv_request": { + "service": "mdi:television-classic" + }, + "submit_music_request": { + "service": "mdi:music" + } } } diff --git a/homeassistant/components/omnilogic/icons.json b/homeassistant/components/omnilogic/icons.json index ee5b5102177..8f0f13fe652 100644 --- a/homeassistant/components/omnilogic/icons.json +++ b/homeassistant/components/omnilogic/icons.json @@ -1,5 +1,7 @@ { "services": { - "set_pump_speed": "mdi:water-pump" + "set_pump_speed": { + "service": "mdi:water-pump" + } } } diff --git a/homeassistant/components/onvif/icons.json b/homeassistant/components/onvif/icons.json index 4db9a9f9e49..d42985d34e8 100644 --- a/homeassistant/components/onvif/icons.json +++ b/homeassistant/components/onvif/icons.json @@ -13,6 +13,8 @@ } }, "services": { - "ptz": "mdi:pan" + "ptz": { + "service": "mdi:pan" + } } } diff --git a/homeassistant/components/openai_conversation/icons.json b/homeassistant/components/openai_conversation/icons.json index 7f736a5ff3b..3abecd640d1 100644 --- a/homeassistant/components/openai_conversation/icons.json +++ b/homeassistant/components/openai_conversation/icons.json @@ -1,5 +1,7 @@ { "services": { - "generate_image": "mdi:image-sync" + "generate_image": { + "service": "mdi:image-sync" + } } } diff --git a/homeassistant/components/openhome/icons.json b/homeassistant/components/openhome/icons.json index 081e97c3489..d75659f17da 100644 --- a/homeassistant/components/openhome/icons.json +++ b/homeassistant/components/openhome/icons.json @@ -1,5 +1,7 @@ { "services": { - "invoke_pin": "mdi:alarm-panel" + "invoke_pin": { + "service": "mdi:alarm-panel" + } } } diff --git a/homeassistant/components/opentherm_gw/icons.json b/homeassistant/components/opentherm_gw/icons.json index 13dbe0a70a1..37942aa0e63 100644 --- a/homeassistant/components/opentherm_gw/icons.json +++ b/homeassistant/components/opentherm_gw/icons.json @@ -1,16 +1,40 @@ { "services": { - "reset_gateway": "mdi:reload", - "set_central_heating_ovrd": "mdi:heat-wave", - "set_clock": "mdi:clock", - "set_control_setpoint": "mdi:thermometer-lines", - "set_hot_water_ovrd": "mdi:thermometer-lines", - "set_hot_water_setpoint": "mdi:thermometer-lines", - "set_gpio_mode": "mdi:cable-data", - "set_led_mode": "mdi:led-on", - "set_max_modulation": "mdi:thermometer-lines", - "set_outside_temperature": "mdi:thermometer-lines", - "set_setback_temperature": "mdi:thermometer-lines", - "send_transparent_command": "mdi:console" + "reset_gateway": { + "service": "mdi:reload" + }, + "set_central_heating_ovrd": { + "service": "mdi:heat-wave" + }, + "set_clock": { + "service": "mdi:clock" + }, + "set_control_setpoint": { + "service": "mdi:thermometer-lines" + }, + "set_hot_water_ovrd": { + "service": "mdi:thermometer-lines" + }, + "set_hot_water_setpoint": { + "service": "mdi:thermometer-lines" + }, + "set_gpio_mode": { + "service": "mdi:cable-data" + }, + "set_led_mode": { + "service": "mdi:led-on" + }, + "set_max_modulation": { + "service": "mdi:thermometer-lines" + }, + "set_outside_temperature": { + "service": "mdi:thermometer-lines" + }, + "set_setback_temperature": { + "service": "mdi:thermometer-lines" + }, + "send_transparent_command": { + "service": "mdi:console" + } } } diff --git a/homeassistant/components/persistent_notification/icons.json b/homeassistant/components/persistent_notification/icons.json index 9c782bd7b21..30847357a47 100644 --- a/homeassistant/components/persistent_notification/icons.json +++ b/homeassistant/components/persistent_notification/icons.json @@ -1,7 +1,13 @@ { "services": { - "create": "mdi:message-badge", - "dismiss": "mdi:bell-off", - "dismiss_all": "mdi:notification-clear-all" + "create": { + "service": "mdi:message-badge" + }, + "dismiss": { + "service": "mdi:bell-off" + }, + "dismiss_all": { + "service": "mdi:notification-clear-all" + } } } diff --git a/homeassistant/components/person/icons.json b/homeassistant/components/person/icons.json index fbfd5be75d2..f645d9c2090 100644 --- a/homeassistant/components/person/icons.json +++ b/homeassistant/components/person/icons.json @@ -8,6 +8,8 @@ } }, "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/pi_hole/icons.json b/homeassistant/components/pi_hole/icons.json index 58f20da5a2d..3a45f8ab454 100644 --- a/homeassistant/components/pi_hole/icons.json +++ b/homeassistant/components/pi_hole/icons.json @@ -36,6 +36,8 @@ } }, "services": { - "disable": "mdi:server-off" + "disable": { + "service": "mdi:server-off" + } } } diff --git a/homeassistant/components/picnic/icons.json b/homeassistant/components/picnic/icons.json index d8f99153f33..78803b6d263 100644 --- a/homeassistant/components/picnic/icons.json +++ b/homeassistant/components/picnic/icons.json @@ -57,6 +57,8 @@ } }, "services": { - "add_product": "mdi:cart-plus" + "add_product": { + "service": "mdi:cart-plus" + } } } diff --git a/homeassistant/components/pilight/icons.json b/homeassistant/components/pilight/icons.json index c1b8e741e45..cbc48cf2105 100644 --- a/homeassistant/components/pilight/icons.json +++ b/homeassistant/components/pilight/icons.json @@ -1,5 +1,7 @@ { "services": { - "send": "mdi:send" + "send": { + "service": "mdi:send" + } } } diff --git a/homeassistant/components/plex/icons.json b/homeassistant/components/plex/icons.json index 03bc835d2f6..2d3a7342ad2 100644 --- a/homeassistant/components/plex/icons.json +++ b/homeassistant/components/plex/icons.json @@ -7,7 +7,11 @@ } }, "services": { - "refresh_library": "mdi:refresh", - "scan_for_clients": "mdi:database-refresh" + "refresh_library": { + "service": "mdi:refresh" + }, + "scan_for_clients": { + "service": "mdi:database-refresh" + } } } diff --git a/homeassistant/components/profiler/icons.json b/homeassistant/components/profiler/icons.json index 4dda003c186..c1f996b6eb1 100644 --- a/homeassistant/components/profiler/icons.json +++ b/homeassistant/components/profiler/icons.json @@ -1,16 +1,40 @@ { "services": { - "start": "mdi:play", - "memory": "mdi:memory", - "start_log_objects": "mdi:invoice-text-plus", - "stop_log_objects": "mdi:invoice-text-remove", - "dump_log_objects": "mdi:invoice-export-outline", - "start_log_object_sources": "mdi:play", - "stop_log_object_sources": "mdi:stop", - "lru_stats": "mdi:chart-areaspline", - "log_current_tasks": "mdi:format-list-bulleted", - "log_thread_frames": "mdi:format-list-bulleted", - "log_event_loop_scheduled": "mdi:calendar-clock", - "set_asyncio_debug": "mdi:bug-check" + "start": { + "service": "mdi:play" + }, + "memory": { + "service": "mdi:memory" + }, + "start_log_objects": { + "service": "mdi:invoice-text-plus" + }, + "stop_log_objects": { + "service": "mdi:invoice-text-remove" + }, + "dump_log_objects": { + "service": "mdi:invoice-export-outline" + }, + "start_log_object_sources": { + "service": "mdi:play" + }, + "stop_log_object_sources": { + "service": "mdi:stop" + }, + "lru_stats": { + "service": "mdi:chart-areaspline" + }, + "log_current_tasks": { + "service": "mdi:format-list-bulleted" + }, + "log_thread_frames": { + "service": "mdi:format-list-bulleted" + }, + "log_event_loop_scheduled": { + "service": "mdi:calendar-clock" + }, + "set_asyncio_debug": { + "service": "mdi:bug-check" + } } } diff --git a/homeassistant/components/prosegur/icons.json b/homeassistant/components/prosegur/icons.json index 33cddefdaea..8f175ab9056 100644 --- a/homeassistant/components/prosegur/icons.json +++ b/homeassistant/components/prosegur/icons.json @@ -1,5 +1,7 @@ { "services": { - "request_image": "mdi:image-sync" + "request_image": { + "service": "mdi:image-sync" + } } } diff --git a/homeassistant/components/ps4/icons.json b/homeassistant/components/ps4/icons.json index 8da5909213b..21f8405f816 100644 --- a/homeassistant/components/ps4/icons.json +++ b/homeassistant/components/ps4/icons.json @@ -7,6 +7,8 @@ } }, "services": { - "send_command": "mdi:console" + "send_command": { + "service": "mdi:console" + } } } diff --git a/homeassistant/components/python_script/icons.json b/homeassistant/components/python_script/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/python_script/icons.json +++ b/homeassistant/components/python_script/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/qbittorrent/icons.json b/homeassistant/components/qbittorrent/icons.json index 68fc1020dae..cede127ebe8 100644 --- a/homeassistant/components/qbittorrent/icons.json +++ b/homeassistant/components/qbittorrent/icons.json @@ -10,7 +10,11 @@ } }, "services": { - "get_torrents": "mdi:file-arrow-up-down-outline", - "get_all_torrents": "mdi:file-arrow-up-down-outline" + "get_torrents": { + "service": "mdi:file-arrow-up-down-outline" + }, + "get_all_torrents": { + "service": "mdi:file-arrow-up-down-outline" + } } } diff --git a/homeassistant/components/qvr_pro/icons.json b/homeassistant/components/qvr_pro/icons.json index 556a8d40752..3b57387d251 100644 --- a/homeassistant/components/qvr_pro/icons.json +++ b/homeassistant/components/qvr_pro/icons.json @@ -1,6 +1,10 @@ { "services": { - "start_record": "mdi:record-rec", - "stop_record": "mdi:stop" + "start_record": { + "service": "mdi:record-rec" + }, + "stop_record": { + "service": "mdi:stop" + } } } diff --git a/homeassistant/components/rachio/icons.json b/homeassistant/components/rachio/icons.json index dfab8788fc8..df30929ab4c 100644 --- a/homeassistant/components/rachio/icons.json +++ b/homeassistant/components/rachio/icons.json @@ -10,11 +10,23 @@ } }, "services": { - "set_zone_moisture_percent": "mdi:water-percent", - "start_multiple_zone_schedule": "mdi:play", - "pause_watering": "mdi:pause", - "resume_watering": "mdi:play", - "stop_watering": "mdi:stop", - "start_watering": "mdi:water" + "set_zone_moisture_percent": { + "service": "mdi:water-percent" + }, + "start_multiple_zone_schedule": { + "service": "mdi:play" + }, + "pause_watering": { + "service": "mdi:pause" + }, + "resume_watering": { + "service": "mdi:play" + }, + "stop_watering": { + "service": "mdi:stop" + }, + "start_watering": { + "service": "mdi:water" + } } } diff --git a/homeassistant/components/rainbird/icons.json b/homeassistant/components/rainbird/icons.json index 79d2256f184..61c09f74e88 100644 --- a/homeassistant/components/rainbird/icons.json +++ b/homeassistant/components/rainbird/icons.json @@ -22,7 +22,11 @@ } }, "services": { - "start_irrigation": "mdi:water", - "set_rain_delay": "mdi:water-sync" + "start_irrigation": { + "service": "mdi:water" + }, + "set_rain_delay": { + "service": "mdi:water-sync" + } } } diff --git a/homeassistant/components/rainmachine/icons.json b/homeassistant/components/rainmachine/icons.json index 32988081a18..ca85d81346e 100644 --- a/homeassistant/components/rainmachine/icons.json +++ b/homeassistant/components/rainmachine/icons.json @@ -70,16 +70,38 @@ } }, "services": { - "pause_watering": "mdi:pause", - "restrict_watering": "mdi:cancel", - "start_program": "mdi:play", - "start_zone": "mdi:play", - "stop_all": "mdi:stop", - "stop_program": "mdi:stop", - "stop_zone": "mdi:stop", - "unpause_watering": "mdi:play-pause", - "push_flow_meter_data": "mdi:database-arrow-up", - "push_weather_data": "mdi:database-arrow-up", - "unrestrict_watering": "mdi:check" + "pause_watering": { + "service": "mdi:pause" + }, + "restrict_watering": { + "service": "mdi:cancel" + }, + "start_program": { + "service": "mdi:play" + }, + "start_zone": { + "service": "mdi:play" + }, + "stop_all": { + "service": "mdi:stop" + }, + "stop_program": { + "service": "mdi:stop" + }, + "stop_zone": { + "service": "mdi:stop" + }, + "unpause_watering": { + "service": "mdi:play-pause" + }, + "push_flow_meter_data": { + "service": "mdi:database-arrow-up" + }, + "push_weather_data": { + "service": "mdi:database-arrow-up" + }, + "unrestrict_watering": { + "service": "mdi:check" + } } } diff --git a/homeassistant/components/recorder/icons.json b/homeassistant/components/recorder/icons.json index 1090401abd5..9e41637184a 100644 --- a/homeassistant/components/recorder/icons.json +++ b/homeassistant/components/recorder/icons.json @@ -1,8 +1,16 @@ { "services": { - "purge": "mdi:database-sync", - "purge_entities": "mdi:database-sync", - "disable": "mdi:database-off", - "enable": "mdi:database" + "purge": { + "service": "mdi:database-sync" + }, + "purge_entities": { + "service": "mdi:database-sync" + }, + "disable": { + "service": "mdi:database-off" + }, + "enable": { + "service": "mdi:database" + } } } diff --git a/homeassistant/components/remember_the_milk/icons.json b/homeassistant/components/remember_the_milk/icons.json index 3ca17113fb8..04502aea5ef 100644 --- a/homeassistant/components/remember_the_milk/icons.json +++ b/homeassistant/components/remember_the_milk/icons.json @@ -1,6 +1,10 @@ { "services": { - "create_task": "mdi:check", - "complete_task": "mdi:check-all" + "create_task": { + "service": "mdi:check" + }, + "complete_task": { + "service": "mdi:check-all" + } } } diff --git a/homeassistant/components/remote/icons.json b/homeassistant/components/remote/icons.json index 07526a4bc79..43a7f6ee7b6 100644 --- a/homeassistant/components/remote/icons.json +++ b/homeassistant/components/remote/icons.json @@ -8,11 +8,23 @@ } }, "services": { - "delete_command": "mdi:delete", - "learn_command": "mdi:school", - "send_command": "mdi:remote", - "toggle": "mdi:remote", - "turn_off": "mdi:remote-off", - "turn_on": "mdi:remote" + "delete_command": { + "service": "mdi:delete" + }, + "learn_command": { + "service": "mdi:school" + }, + "send_command": { + "service": "mdi:remote" + }, + "toggle": { + "service": "mdi:remote" + }, + "turn_off": { + "service": "mdi:remote-off" + }, + "turn_on": { + "service": "mdi:remote" + } } } diff --git a/homeassistant/components/renault/icons.json b/homeassistant/components/renault/icons.json index 75356fda411..883725eb601 100644 --- a/homeassistant/components/renault/icons.json +++ b/homeassistant/components/renault/icons.json @@ -64,8 +64,14 @@ } }, "services": { - "ac_start": "mdi:hvac", - "ac_cancel": "mdi:hvac-off", - "charge_set_schedules": "mdi:calendar-clock" + "ac_start": { + "service": "mdi:hvac" + }, + "ac_cancel": { + "service": "mdi:hvac-off" + }, + "charge_set_schedules": { + "service": "mdi:calendar-clock" + } } } diff --git a/homeassistant/components/renson/icons.json b/homeassistant/components/renson/icons.json index b7b1fdfdd8c..b558759a0dd 100644 --- a/homeassistant/components/renson/icons.json +++ b/homeassistant/components/renson/icons.json @@ -17,8 +17,14 @@ } }, "services": { - "set_timer_level": "mdi:timer", - "set_breeze": "mdi:weather-windy", - "set_pollution_settings": "mdi:air-filter" + "set_timer_level": { + "service": "mdi:timer" + }, + "set_breeze": { + "service": "mdi:weather-windy" + }, + "set_pollution_settings": { + "service": "mdi:air-filter" + } } } diff --git a/homeassistant/components/reolink/icons.json b/homeassistant/components/reolink/icons.json index f7729789c4e..f1c6f88a0f0 100644 --- a/homeassistant/components/reolink/icons.json +++ b/homeassistant/components/reolink/icons.json @@ -300,7 +300,11 @@ } }, "services": { - "ptz_move": "mdi:pan", - "play_chime": "mdi:music" + "ptz_move": { + "service": "mdi:pan" + }, + "play_chime": { + "service": "mdi:music" + } } } diff --git a/homeassistant/components/rest/icons.json b/homeassistant/components/rest/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/rest/icons.json +++ b/homeassistant/components/rest/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/rest_command/icons.json b/homeassistant/components/rest_command/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/rest_command/icons.json +++ b/homeassistant/components/rest_command/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/rflink/icons.json b/homeassistant/components/rflink/icons.json index 988b048eee7..de2942f44ac 100644 --- a/homeassistant/components/rflink/icons.json +++ b/homeassistant/components/rflink/icons.json @@ -1,5 +1,7 @@ { "services": { - "send_command": "mdi:send" + "send_command": { + "service": "mdi:send" + } } } diff --git a/homeassistant/components/rfxtrx/icons.json b/homeassistant/components/rfxtrx/icons.json index c1b8e741e45..cbc48cf2105 100644 --- a/homeassistant/components/rfxtrx/icons.json +++ b/homeassistant/components/rfxtrx/icons.json @@ -1,5 +1,7 @@ { "services": { - "send": "mdi:send" + "send": { + "service": "mdi:send" + } } } diff --git a/homeassistant/components/ring/icons.json b/homeassistant/components/ring/icons.json index 9dd31fd0fd1..5820fbf77c8 100644 --- a/homeassistant/components/ring/icons.json +++ b/homeassistant/components/ring/icons.json @@ -39,6 +39,8 @@ } }, "services": { - "update": "mdi:refresh" + "update": { + "service": "mdi:refresh" + } } } diff --git a/homeassistant/components/roborock/icons.json b/homeassistant/components/roborock/icons.json index 6a615ab82a1..c7df6d35460 100644 --- a/homeassistant/components/roborock/icons.json +++ b/homeassistant/components/roborock/icons.json @@ -119,6 +119,8 @@ } }, "services": { - "get_maps": "mdi:floor-plan" + "get_maps": { + "service": "mdi:floor-plan" + } } } diff --git a/homeassistant/components/roku/icons.json b/homeassistant/components/roku/icons.json index 02e5d1e5698..355b5a715e5 100644 --- a/homeassistant/components/roku/icons.json +++ b/homeassistant/components/roku/icons.json @@ -32,6 +32,8 @@ } }, "services": { - "search": "mdi:magnify" + "search": { + "service": "mdi:magnify" + } } } diff --git a/homeassistant/components/roon/icons.json b/homeassistant/components/roon/icons.json index 571ca3f45a2..1e1dd42b765 100644 --- a/homeassistant/components/roon/icons.json +++ b/homeassistant/components/roon/icons.json @@ -1,5 +1,7 @@ { "services": { - "transfer": "mdi:monitor-multiple" + "transfer": { + "service": "mdi:monitor-multiple" + } } } diff --git a/homeassistant/components/route53/icons.json b/homeassistant/components/route53/icons.json index 30a854991f0..5afe13ce949 100644 --- a/homeassistant/components/route53/icons.json +++ b/homeassistant/components/route53/icons.json @@ -1,5 +1,7 @@ { "services": { - "update_records": "mdi:database-refresh" + "update_records": { + "service": "mdi:database-refresh" + } } } diff --git a/homeassistant/components/sabnzbd/icons.json b/homeassistant/components/sabnzbd/icons.json index a693e9fec86..ca4f4d584ae 100644 --- a/homeassistant/components/sabnzbd/icons.json +++ b/homeassistant/components/sabnzbd/icons.json @@ -1,7 +1,13 @@ { "services": { - "pause": "mdi:pause", - "resume": "mdi:play", - "set_speed": "mdi:speedometer" + "pause": { + "service": "mdi:pause" + }, + "resume": { + "service": "mdi:play" + }, + "set_speed": { + "service": "mdi:speedometer" + } } } diff --git a/homeassistant/components/scene/icons.json b/homeassistant/components/scene/icons.json index 563c0f31ddc..b08d06fb434 100644 --- a/homeassistant/components/scene/icons.json +++ b/homeassistant/components/scene/icons.json @@ -5,10 +5,20 @@ } }, "services": { - "turn_on": "mdi:power", - "reload": "mdi:reload", - "apply": "mdi:check", - "create": "mdi:plus", - "delete": "mdi:delete" + "turn_on": { + "service": "mdi:power" + }, + "reload": { + "service": "mdi:reload" + }, + "apply": { + "service": "mdi:check" + }, + "create": { + "service": "mdi:plus" + }, + "delete": { + "service": "mdi:delete" + } } } diff --git a/homeassistant/components/schedule/icons.json b/homeassistant/components/schedule/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/schedule/icons.json +++ b/homeassistant/components/schedule/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/screenlogic/icons.json b/homeassistant/components/screenlogic/icons.json index d8d021c20e6..ef8dc46f61d 100644 --- a/homeassistant/components/screenlogic/icons.json +++ b/homeassistant/components/screenlogic/icons.json @@ -1,7 +1,13 @@ { "services": { - "set_color_mode": "mdi:palette", - "start_super_chlorination": "mdi:pool", - "stop_super_chlorination": "mdi:pool" + "set_color_mode": { + "service": "mdi:palette" + }, + "start_super_chlorination": { + "service": "mdi:pool" + }, + "stop_super_chlorination": { + "service": "mdi:pool" + } } } diff --git a/homeassistant/components/script/icons.json b/homeassistant/components/script/icons.json index d253d0fd829..7e160941c05 100644 --- a/homeassistant/components/script/icons.json +++ b/homeassistant/components/script/icons.json @@ -8,9 +8,17 @@ } }, "services": { - "reload": "mdi:reload", - "turn_on": "mdi:script-text-play", - "turn_off": "mdi:script-text", - "toggle": "mdi:script-text" + "reload": { + "service": "mdi:reload" + }, + "turn_on": { + "service": "mdi:script-text-play" + }, + "turn_off": { + "service": "mdi:script-text" + }, + "toggle": { + "service": "mdi:script-text" + } } } diff --git a/homeassistant/components/select/icons.json b/homeassistant/components/select/icons.json index 1b440d2a1de..fbd1d4568f1 100644 --- a/homeassistant/components/select/icons.json +++ b/homeassistant/components/select/icons.json @@ -5,10 +5,20 @@ } }, "services": { - "select_first": "mdi:format-list-bulleted", - "select_last": "mdi:format-list-bulleted", - "select_next": "mdi:format-list-bulleted", - "select_option": "mdi:format-list-bulleted", - "select_previous": "mdi:format-list-bulleted" + "select_first": { + "service": "mdi:format-list-bulleted" + }, + "select_last": { + "service": "mdi:format-list-bulleted" + }, + "select_next": { + "service": "mdi:format-list-bulleted" + }, + "select_option": { + "service": "mdi:format-list-bulleted" + }, + "select_previous": { + "service": "mdi:format-list-bulleted" + } } } diff --git a/homeassistant/components/sensibo/icons.json b/homeassistant/components/sensibo/icons.json index e26840e48eb..ccab3c198d2 100644 --- a/homeassistant/components/sensibo/icons.json +++ b/homeassistant/components/sensibo/icons.json @@ -45,10 +45,20 @@ } }, "services": { - "assume_state": "mdi:shape-outline", - "enable_timer": "mdi:timer-play", - "enable_pure_boost": "mdi:air-filter", - "full_state": "mdi:shape", - "enable_climate_react": "mdi:wizard-hat" + "assume_state": { + "service": "mdi:shape-outline" + }, + "enable_timer": { + "service": "mdi:timer-play" + }, + "enable_pure_boost": { + "service": "mdi:air-filter" + }, + "full_state": { + "service": "mdi:shape" + }, + "enable_climate_react": { + "service": "mdi:wizard-hat" + } } } diff --git a/homeassistant/components/seventeentrack/icons.json b/homeassistant/components/seventeentrack/icons.json index 78ca65edc4d..94ca8cd535a 100644 --- a/homeassistant/components/seventeentrack/icons.json +++ b/homeassistant/components/seventeentrack/icons.json @@ -28,6 +28,8 @@ } }, "services": { - "get_packages": "mdi:package" + "get_packages": { + "service": "mdi:package" + } } } diff --git a/homeassistant/components/sharkiq/icons.json b/homeassistant/components/sharkiq/icons.json index 13fd58ce66d..e58a317f503 100644 --- a/homeassistant/components/sharkiq/icons.json +++ b/homeassistant/components/sharkiq/icons.json @@ -1,5 +1,7 @@ { "services": { - "clean_room": "mdi:robot-vacuum" + "clean_room": { + "service": "mdi:robot-vacuum" + } } } diff --git a/homeassistant/components/shopping_list/icons.json b/homeassistant/components/shopping_list/icons.json index 7de3eb1b948..9b3d8a08a79 100644 --- a/homeassistant/components/shopping_list/icons.json +++ b/homeassistant/components/shopping_list/icons.json @@ -7,13 +7,29 @@ } }, "services": { - "add_item": "mdi:cart-plus", - "remove_item": "mdi:cart-remove", - "complete_item": "mdi:cart-check", - "incomplete_item": "mdi:cart-off", - "complete_all": "mdi:cart-check", - "incomplete_all": "mdi:cart-off", - "clear_completed_items": "mdi:cart-remove", - "sort": "mdi:sort" + "add_item": { + "service": "mdi:cart-plus" + }, + "remove_item": { + "service": "mdi:cart-remove" + }, + "complete_item": { + "service": "mdi:cart-check" + }, + "incomplete_item": { + "service": "mdi:cart-off" + }, + "complete_all": { + "service": "mdi:cart-check" + }, + "incomplete_all": { + "service": "mdi:cart-off" + }, + "clear_completed_items": { + "service": "mdi:cart-remove" + }, + "sort": { + "service": "mdi:sort" + } } } diff --git a/homeassistant/components/simplisafe/icons.json b/homeassistant/components/simplisafe/icons.json index 60ddb7f0982..8552993210f 100644 --- a/homeassistant/components/simplisafe/icons.json +++ b/homeassistant/components/simplisafe/icons.json @@ -1,7 +1,13 @@ { "services": { - "remove_pin": "mdi:alarm-panel-outline", - "set_pin": "mdi:alarm-panel", - "set_system_properties": "mdi:cog" + "remove_pin": { + "service": "mdi:alarm-panel-outline" + }, + "set_pin": { + "service": "mdi:alarm-panel" + }, + "set_system_properties": { + "service": "mdi:cog" + } } } diff --git a/homeassistant/components/siren/icons.json b/homeassistant/components/siren/icons.json index 0083a2540c7..75caf6417da 100644 --- a/homeassistant/components/siren/icons.json +++ b/homeassistant/components/siren/icons.json @@ -5,8 +5,14 @@ } }, "services": { - "toggle": "mdi:bullhorn", - "turn_off": "mdi:bullhorn", - "turn_on": "mdi:bullhorn" + "toggle": { + "service": "mdi:bullhorn" + }, + "turn_off": { + "service": "mdi:bullhorn" + }, + "turn_on": { + "service": "mdi:bullhorn" + } } } diff --git a/homeassistant/components/smarttub/icons.json b/homeassistant/components/smarttub/icons.json index 7ae96d03383..2b89445754c 100644 --- a/homeassistant/components/smarttub/icons.json +++ b/homeassistant/components/smarttub/icons.json @@ -1,8 +1,16 @@ { "services": { - "set_primary_filtration": "mdi:filter", - "set_secondary_filtration": "mdi:filter-multiple", - "snooze_reminder": "mdi:timer-pause", - "reset_reminder": "mdi:timer-sync" + "set_primary_filtration": { + "service": "mdi:filter" + }, + "set_secondary_filtration": { + "service": "mdi:filter-multiple" + }, + "snooze_reminder": { + "service": "mdi:timer-pause" + }, + "reset_reminder": { + "service": "mdi:timer-sync" + } } } From 38ef216894a125f6eca79f4fcef85cb41bed9c4a Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 28 Aug 2024 13:48:13 +0200 Subject: [PATCH 1164/1635] Update icons.json to new service schema part 4 (#124771) --- homeassistant/components/smtp/icons.json | 4 +- homeassistant/components/snapcast/icons.json | 20 +++- homeassistant/components/snips/icons.json | 16 +++- homeassistant/components/snooz/icons.json | 8 +- homeassistant/components/songpal/icons.json | 4 +- homeassistant/components/sonos/icons.json | 28 ++++-- .../components/soundtouch/icons.json | 16 +++- .../components/squeezebox/icons.json | 16 +++- homeassistant/components/starline/icons.json | 12 ++- .../components/statistics/icons.json | 4 +- .../components/streamlabswater/icons.json | 4 +- homeassistant/components/subaru/icons.json | 4 +- .../components/surepetcare/icons.json | 8 +- .../swiss_public_transport/icons.json | 4 +- homeassistant/components/switch/icons.json | 12 ++- .../components/switcher_kis/icons.json | 8 +- .../components/synology_dsm/icons.json | 8 +- .../components/system_bridge/icons.json | 28 ++++-- .../components/system_log/icons.json | 8 +- homeassistant/components/tado/icons.json | 16 +++- homeassistant/components/telegram/icons.json | 4 +- .../components/telegram_bot/icons.json | 56 ++++++++--- homeassistant/components/template/icons.json | 4 +- .../components/teslemetry/icons.json | 25 +++-- homeassistant/components/text/icons.json | 4 +- homeassistant/components/tibber/icons.json | 4 +- homeassistant/components/time/icons.json | 4 +- homeassistant/components/timer/icons.json | 24 +++-- homeassistant/components/todo/icons.json | 20 +++- homeassistant/components/todoist/icons.json | 4 +- homeassistant/components/toon/icons.json | 4 +- .../components/totalconnect/icons.json | 8 +- homeassistant/components/tplink/icons.json | 8 +- .../components/transmission/icons.json | 16 +++- homeassistant/components/trend/icons.json | 4 +- homeassistant/components/tts/icons.json | 12 ++- homeassistant/components/unifi/icons.json | 8 +- .../components/unifiprotect/icons.json | 16 +++- homeassistant/components/universal/icons.json | 4 +- homeassistant/components/upb/icons.json | 32 +++++-- homeassistant/components/update/icons.json | 12 ++- .../components/utility_meter/icons.json | 8 +- homeassistant/components/vacuum/icons.json | 48 +++++++--- homeassistant/components/vallox/icons.json | 12 ++- homeassistant/components/valve/icons.json | 20 +++- homeassistant/components/velbus/icons.json | 16 +++- homeassistant/components/velux/icons.json | 4 +- homeassistant/components/verisure/icons.json | 12 ++- homeassistant/components/vesync/icons.json | 4 +- homeassistant/components/vicare/icons.json | 4 +- homeassistant/components/vizio/icons.json | 4 +- .../components/wake_on_lan/icons.json | 4 +- .../components/water_heater/icons.json | 20 +++- .../components/waze_travel_time/icons.json | 4 +- homeassistant/components/weather/icons.json | 8 +- homeassistant/components/webostv/icons.json | 12 ++- homeassistant/components/wemo/icons.json | 8 +- homeassistant/components/wilight/icons.json | 12 ++- homeassistant/components/workday/icons.json | 4 +- .../components/xiaomi_aqara/icons.json | 16 +++- .../components/xiaomi_miio/icons.json | 96 ++++++++++++++----- homeassistant/components/yamaha/icons.json | 12 ++- homeassistant/components/yardian/icons.json | 4 +- homeassistant/components/yeelight/icons.json | 32 +++++-- homeassistant/components/yolink/icons.json | 4 +- homeassistant/components/zha/icons.json | 48 +++++++--- homeassistant/components/zone/icons.json | 4 +- .../components/zoneminder/icons.json | 4 +- homeassistant/components/zwave_js/icons.json | 48 +++++++--- 69 files changed, 729 insertions(+), 244 deletions(-) diff --git a/homeassistant/components/smtp/icons.json b/homeassistant/components/smtp/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/smtp/icons.json +++ b/homeassistant/components/smtp/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/snapcast/icons.json b/homeassistant/components/snapcast/icons.json index bdc20665282..d6511d768e2 100644 --- a/homeassistant/components/snapcast/icons.json +++ b/homeassistant/components/snapcast/icons.json @@ -1,9 +1,19 @@ { "services": { - "join": "mdi:music-note-plus", - "unjoin": "mdi:music-note-minus", - "snapshot": "mdi:camera", - "restore": "mdi:camera-retake", - "set_latency": "mdi:camera-timer" + "join": { + "service": "mdi:music-note-plus" + }, + "unjoin": { + "service": "mdi:music-note-minus" + }, + "snapshot": { + "service": "mdi:camera" + }, + "restore": { + "service": "mdi:camera-retake" + }, + "set_latency": { + "service": "mdi:camera-timer" + } } } diff --git a/homeassistant/components/snips/icons.json b/homeassistant/components/snips/icons.json index 0d465465fe4..9c86a7ad5b3 100644 --- a/homeassistant/components/snips/icons.json +++ b/homeassistant/components/snips/icons.json @@ -1,8 +1,16 @@ { "services": { - "feedback_off": "mdi:message-alert", - "feedback_on": "mdi:message-alert", - "say": "mdi:chat", - "say_action": "mdi:account-voice" + "feedback_off": { + "service": "mdi:message-alert" + }, + "feedback_on": { + "service": "mdi:message-alert" + }, + "say": { + "service": "mdi:chat" + }, + "say_action": { + "service": "mdi:account-voice" + } } } diff --git a/homeassistant/components/snooz/icons.json b/homeassistant/components/snooz/icons.json index d9cccfff4ea..be7d2714a20 100644 --- a/homeassistant/components/snooz/icons.json +++ b/homeassistant/components/snooz/icons.json @@ -1,6 +1,10 @@ { "services": { - "transition_on": "mdi:blur", - "transition_off": "mdi:blur-off" + "transition_on": { + "service": "mdi:blur" + }, + "transition_off": { + "service": "mdi:blur-off" + } } } diff --git a/homeassistant/components/songpal/icons.json b/homeassistant/components/songpal/icons.json index 1c831fbbd00..6e7cf359c23 100644 --- a/homeassistant/components/songpal/icons.json +++ b/homeassistant/components/songpal/icons.json @@ -1,5 +1,7 @@ { "services": { - "set_sound_setting": "mdi:volume-high" + "set_sound_setting": { + "service": "mdi:volume-high" + } } } diff --git a/homeassistant/components/sonos/icons.json b/homeassistant/components/sonos/icons.json index e2545358ba6..e7403b45453 100644 --- a/homeassistant/components/sonos/icons.json +++ b/homeassistant/components/sonos/icons.json @@ -44,12 +44,26 @@ } }, "services": { - "snapshot": "mdi:camera", - "restore": "mdi:camera-retake", - "set_sleep_timer": "mdi:alarm", - "clear_sleep_timer": "mdi:alarm-off", - "play_queue": "mdi:play", - "remove_from_queue": "mdi:playlist-remove", - "update_alarm": "mdi:alarm" + "snapshot": { + "service": "mdi:camera" + }, + "restore": { + "service": "mdi:camera-retake" + }, + "set_sleep_timer": { + "service": "mdi:alarm" + }, + "clear_sleep_timer": { + "service": "mdi:alarm-off" + }, + "play_queue": { + "service": "mdi:play" + }, + "remove_from_queue": { + "service": "mdi:playlist-remove" + }, + "update_alarm": { + "service": "mdi:alarm" + } } } diff --git a/homeassistant/components/soundtouch/icons.json b/homeassistant/components/soundtouch/icons.json index 0dd41f4f881..721a5c77032 100644 --- a/homeassistant/components/soundtouch/icons.json +++ b/homeassistant/components/soundtouch/icons.json @@ -1,8 +1,16 @@ { "services": { - "play_everywhere": "mdi:play", - "create_zone": "mdi:plus", - "add_zone_slave": "mdi:plus", - "remove_zone_slave": "mdi:minus" + "play_everywhere": { + "service": "mdi:play" + }, + "create_zone": { + "service": "mdi:plus" + }, + "add_zone_slave": { + "service": "mdi:plus" + }, + "remove_zone_slave": { + "service": "mdi:minus" + } } } diff --git a/homeassistant/components/squeezebox/icons.json b/homeassistant/components/squeezebox/icons.json index d58f0d5634d..b11311e1292 100644 --- a/homeassistant/components/squeezebox/icons.json +++ b/homeassistant/components/squeezebox/icons.json @@ -1,8 +1,16 @@ { "services": { - "call_method": "mdi:console", - "call_query": "mdi:database", - "sync": "mdi:sync", - "unsync": "mdi:sync-off" + "call_method": { + "service": "mdi:console" + }, + "call_query": { + "service": "mdi:database" + }, + "sync": { + "service": "mdi:sync" + }, + "unsync": { + "service": "mdi:sync-off" + } } } diff --git a/homeassistant/components/starline/icons.json b/homeassistant/components/starline/icons.json index b98c4178af1..8a4f85a89bf 100644 --- a/homeassistant/components/starline/icons.json +++ b/homeassistant/components/starline/icons.json @@ -72,8 +72,14 @@ } }, "services": { - "update_state": "mdi:reload", - "set_scan_interval": "mdi:timer", - "set_scan_obd_interval": "mdi:timer" + "update_state": { + "service": "mdi:reload" + }, + "set_scan_interval": { + "service": "mdi:timer" + }, + "set_scan_obd_interval": { + "service": "mdi:timer" + } } } diff --git a/homeassistant/components/statistics/icons.json b/homeassistant/components/statistics/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/statistics/icons.json +++ b/homeassistant/components/statistics/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/streamlabswater/icons.json b/homeassistant/components/streamlabswater/icons.json index aebe224b35e..0cc64fd24cb 100644 --- a/homeassistant/components/streamlabswater/icons.json +++ b/homeassistant/components/streamlabswater/icons.json @@ -1,5 +1,7 @@ { "services": { - "set_away_mode": "mdi:home" + "set_away_mode": { + "service": "mdi:home" + } } } diff --git a/homeassistant/components/subaru/icons.json b/homeassistant/components/subaru/icons.json index f6c3597c3c3..ca8648296c7 100644 --- a/homeassistant/components/subaru/icons.json +++ b/homeassistant/components/subaru/icons.json @@ -24,6 +24,8 @@ } }, "services": { - "unlock_specific_door": "mdi:lock-open-variant" + "unlock_specific_door": { + "service": "mdi:lock-open-variant" + } } } diff --git a/homeassistant/components/surepetcare/icons.json b/homeassistant/components/surepetcare/icons.json index 1db15b599df..0daad594c48 100644 --- a/homeassistant/components/surepetcare/icons.json +++ b/homeassistant/components/surepetcare/icons.json @@ -1,6 +1,10 @@ { "services": { - "set_lock_state": "mdi:lock", - "set_pet_location": "mdi:dog" + "set_lock_state": { + "service": "mdi:lock" + }, + "set_pet_location": { + "service": "mdi:dog" + } } } diff --git a/homeassistant/components/swiss_public_transport/icons.json b/homeassistant/components/swiss_public_transport/icons.json index 7c2e5436834..0f868c18c1f 100644 --- a/homeassistant/components/swiss_public_transport/icons.json +++ b/homeassistant/components/swiss_public_transport/icons.json @@ -25,6 +25,8 @@ } }, "services": { - "fetch_connections": "mdi:bus-clock" + "fetch_connections": { + "service": "mdi:bus-clock" + } } } diff --git a/homeassistant/components/switch/icons.json b/homeassistant/components/switch/icons.json index fbc1af5a126..10299a2ffc8 100644 --- a/homeassistant/components/switch/icons.json +++ b/homeassistant/components/switch/icons.json @@ -20,8 +20,14 @@ } }, "services": { - "toggle": "mdi:toggle-switch-variant", - "turn_off": "mdi:toggle-switch-variant-off", - "turn_on": "mdi:toggle-switch-variant" + "toggle": { + "service": "mdi:toggle-switch-variant" + }, + "turn_off": { + "service": "mdi:toggle-switch-variant-off" + }, + "turn_on": { + "service": "mdi:toggle-switch-variant" + } } } diff --git a/homeassistant/components/switcher_kis/icons.json b/homeassistant/components/switcher_kis/icons.json index 4d3576f1a99..6ca8e0e8351 100644 --- a/homeassistant/components/switcher_kis/icons.json +++ b/homeassistant/components/switcher_kis/icons.json @@ -24,7 +24,11 @@ } }, "services": { - "set_auto_off": "mdi:progress-clock", - "turn_on_with_timer": "mdi:timer" + "set_auto_off": { + "service": "mdi:progress-clock" + }, + "turn_on_with_timer": { + "service": "mdi:timer" + } } } diff --git a/homeassistant/components/synology_dsm/icons.json b/homeassistant/components/synology_dsm/icons.json index 8e6d2b17f02..3c4d028dc7a 100644 --- a/homeassistant/components/synology_dsm/icons.json +++ b/homeassistant/components/synology_dsm/icons.json @@ -78,7 +78,11 @@ } }, "services": { - "reboot": "mdi:restart", - "shutdown": "mdi:power" + "reboot": { + "service": "mdi:restart" + }, + "shutdown": { + "service": "mdi:power" + } } } diff --git a/homeassistant/components/system_bridge/icons.json b/homeassistant/components/system_bridge/icons.json index cc648889f0b..a03f77049a3 100644 --- a/homeassistant/components/system_bridge/icons.json +++ b/homeassistant/components/system_bridge/icons.json @@ -1,11 +1,25 @@ { "services": { - "get_process_by_id": "mdi:console", - "get_processes_by_name": "mdi:console", - "open_path": "mdi:folder-open", - "open_url": "mdi:web", - "send_keypress": "mdi:keyboard", - "send_text": "mdi:keyboard", - "power_command": "mdi:power" + "get_process_by_id": { + "service": "mdi:console" + }, + "get_processes_by_name": { + "service": "mdi:console" + }, + "open_path": { + "service": "mdi:folder-open" + }, + "open_url": { + "service": "mdi:web" + }, + "send_keypress": { + "service": "mdi:keyboard" + }, + "send_text": { + "service": "mdi:keyboard" + }, + "power_command": { + "service": "mdi:power" + } } } diff --git a/homeassistant/components/system_log/icons.json b/homeassistant/components/system_log/icons.json index 436a6c34808..fe269c5154d 100644 --- a/homeassistant/components/system_log/icons.json +++ b/homeassistant/components/system_log/icons.json @@ -1,6 +1,10 @@ { "services": { - "clear": "mdi:delete", - "write": "mdi:pencil" + "clear": { + "service": "mdi:delete" + }, + "write": { + "service": "mdi:pencil" + } } } diff --git a/homeassistant/components/tado/icons.json b/homeassistant/components/tado/icons.json index 83ef6d4b332..c799bef0260 100644 --- a/homeassistant/components/tado/icons.json +++ b/homeassistant/components/tado/icons.json @@ -1,8 +1,16 @@ { "services": { - "set_climate_timer": "mdi:timer", - "set_water_heater_timer": "mdi:timer", - "set_climate_temperature_offset": "mdi:thermometer", - "add_meter_reading": "mdi:counter" + "set_climate_timer": { + "service": "mdi:timer" + }, + "set_water_heater_timer": { + "service": "mdi:timer" + }, + "set_climate_temperature_offset": { + "service": "mdi:thermometer" + }, + "add_meter_reading": { + "service": "mdi:counter" + } } } diff --git a/homeassistant/components/telegram/icons.json b/homeassistant/components/telegram/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/telegram/icons.json +++ b/homeassistant/components/telegram/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/telegram_bot/icons.json b/homeassistant/components/telegram_bot/icons.json index f410d387435..0acf20d561a 100644 --- a/homeassistant/components/telegram_bot/icons.json +++ b/homeassistant/components/telegram_bot/icons.json @@ -1,18 +1,46 @@ { "services": { - "send_message": "mdi:send", - "send_photo": "mdi:camera", - "send_sticker": "mdi:sticker", - "send_animation": "mdi:animation", - "send_video": "mdi:video", - "send_voice": "mdi:microphone", - "send_document": "mdi:file-document", - "send_location": "mdi:map-marker", - "send_poll": "mdi:poll", - "edit_message": "mdi:pencil", - "edit_caption": "mdi:pencil", - "edit_replymarkup": "mdi:pencil", - "answer_callback_query": "mdi:check", - "delete_message": "mdi:delete" + "send_message": { + "service": "mdi:send" + }, + "send_photo": { + "service": "mdi:camera" + }, + "send_sticker": { + "service": "mdi:sticker" + }, + "send_animation": { + "service": "mdi:animation" + }, + "send_video": { + "service": "mdi:video" + }, + "send_voice": { + "service": "mdi:microphone" + }, + "send_document": { + "service": "mdi:file-document" + }, + "send_location": { + "service": "mdi:map-marker" + }, + "send_poll": { + "service": "mdi:poll" + }, + "edit_message": { + "service": "mdi:pencil" + }, + "edit_caption": { + "service": "mdi:pencil" + }, + "edit_replymarkup": { + "service": "mdi:pencil" + }, + "answer_callback_query": { + "service": "mdi:check" + }, + "delete_message": { + "service": "mdi:delete" + } } } diff --git a/homeassistant/components/template/icons.json b/homeassistant/components/template/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/template/icons.json +++ b/homeassistant/components/template/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/teslemetry/icons.json b/homeassistant/components/teslemetry/icons.json index aea98e95e0b..1912d2265f6 100644 --- a/homeassistant/components/teslemetry/icons.json +++ b/homeassistant/components/teslemetry/icons.json @@ -129,7 +129,6 @@ "off": "mdi:car-seat" } }, - "components_customer_preferred_export_rule": { "default": "mdi:transmission-tower", "state": { @@ -259,11 +258,23 @@ } }, "services": { - "navigation_gps_request": "mdi:crosshairs-gps", - "set_scheduled_charging": "mdi:timeline-clock-outline", - "set_scheduled_departure": "mdi:home-clock", - "speed_limit": "mdi:car-speed-limiter", - "valet_mode": "mdi:speedometer-slow", - "time_of_use": "mdi:clock-time-eight-outline" + "navigation_gps_request": { + "service": "mdi:crosshairs-gps" + }, + "set_scheduled_charging": { + "service": "mdi:timeline-clock-outline" + }, + "set_scheduled_departure": { + "service": "mdi:home-clock" + }, + "speed_limit": { + "service": "mdi:car-speed-limiter" + }, + "valet_mode": { + "service": "mdi:speedometer-slow" + }, + "time_of_use": { + "service": "mdi:clock-time-eight-outline" + } } } diff --git a/homeassistant/components/text/icons.json b/homeassistant/components/text/icons.json index 355c439ec33..9448c9a7325 100644 --- a/homeassistant/components/text/icons.json +++ b/homeassistant/components/text/icons.json @@ -5,6 +5,8 @@ } }, "services": { - "set_value": "mdi:form-textbox" + "set_value": { + "service": "mdi:form-textbox" + } } } diff --git a/homeassistant/components/tibber/icons.json b/homeassistant/components/tibber/icons.json index c6cdd9b0e25..ddc8c735145 100644 --- a/homeassistant/components/tibber/icons.json +++ b/homeassistant/components/tibber/icons.json @@ -1,5 +1,7 @@ { "services": { - "get_prices": "mdi:cash" + "get_prices": { + "service": "mdi:cash" + } } } diff --git a/homeassistant/components/time/icons.json b/homeassistant/components/time/icons.json index c08e457e04d..f172c28ae0d 100644 --- a/homeassistant/components/time/icons.json +++ b/homeassistant/components/time/icons.json @@ -5,6 +5,8 @@ } }, "services": { - "set_value": "mdi:clock-edit" + "set_value": { + "service": "mdi:clock-edit" + } } } diff --git a/homeassistant/components/timer/icons.json b/homeassistant/components/timer/icons.json index 1e352f7280b..a5319688646 100644 --- a/homeassistant/components/timer/icons.json +++ b/homeassistant/components/timer/icons.json @@ -1,10 +1,22 @@ { "services": { - "start": "mdi:play", - "pause": "mdi:pause", - "cancel": "mdi:cancel", - "finish": "mdi:check", - "change": "mdi:pencil", - "reload": "mdi:reload" + "start": { + "service": "mdi:play" + }, + "pause": { + "service": "mdi:pause" + }, + "cancel": { + "service": "mdi:cancel" + }, + "finish": { + "service": "mdi:check" + }, + "change": { + "service": "mdi:pencil" + }, + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/todo/icons.json b/homeassistant/components/todo/icons.json index 05c9af74630..4040a0c6b8f 100644 --- a/homeassistant/components/todo/icons.json +++ b/homeassistant/components/todo/icons.json @@ -5,10 +5,20 @@ } }, "services": { - "add_item": "mdi:clipboard-plus", - "get_items": "mdi:clipboard-arrow-down", - "remove_completed_items": "mdi:clipboard-remove", - "remove_item": "mdi:clipboard-minus", - "update_item": "mdi:clipboard-edit" + "add_item": { + "service": "mdi:clipboard-plus" + }, + "get_items": { + "service": "mdi:clipboard-arrow-down" + }, + "remove_completed_items": { + "service": "mdi:clipboard-remove" + }, + "remove_item": { + "service": "mdi:clipboard-minus" + }, + "update_item": { + "service": "mdi:clipboard-edit" + } } } diff --git a/homeassistant/components/todoist/icons.json b/homeassistant/components/todoist/icons.json index d3b881d480c..73778f1ca23 100644 --- a/homeassistant/components/todoist/icons.json +++ b/homeassistant/components/todoist/icons.json @@ -1,5 +1,7 @@ { "services": { - "new_task": "mdi:checkbox-marked-circle-plus-outline" + "new_task": { + "service": "mdi:checkbox-marked-circle-plus-outline" + } } } diff --git a/homeassistant/components/toon/icons.json b/homeassistant/components/toon/icons.json index 650bf0b6d19..217f1240893 100644 --- a/homeassistant/components/toon/icons.json +++ b/homeassistant/components/toon/icons.json @@ -1,5 +1,7 @@ { "services": { - "update": "mdi:update" + "update": { + "service": "mdi:update" + } } } diff --git a/homeassistant/components/totalconnect/icons.json b/homeassistant/components/totalconnect/icons.json index cb62a79c7bb..a21df03e15d 100644 --- a/homeassistant/components/totalconnect/icons.json +++ b/homeassistant/components/totalconnect/icons.json @@ -10,7 +10,11 @@ } }, "services": { - "arm_away_instant": "mdi:shield-lock", - "arm_home_instant": "mdi:shield-home" + "arm_away_instant": { + "service": "mdi:shield-lock" + }, + "arm_home_instant": { + "service": "mdi:shield-home" + } } } diff --git a/homeassistant/components/tplink/icons.json b/homeassistant/components/tplink/icons.json index 3da3b4806d3..96ea8f41bb7 100644 --- a/homeassistant/components/tplink/icons.json +++ b/homeassistant/components/tplink/icons.json @@ -109,7 +109,11 @@ } }, "services": { - "sequence_effect": "mdi:playlist-play", - "random_effect": "mdi:shuffle-variant" + "sequence_effect": { + "service": "mdi:playlist-play" + }, + "random_effect": { + "service": "mdi:shuffle-variant" + } } } diff --git a/homeassistant/components/transmission/icons.json b/homeassistant/components/transmission/icons.json index 56ae46f933d..4458f510951 100644 --- a/homeassistant/components/transmission/icons.json +++ b/homeassistant/components/transmission/icons.json @@ -1,8 +1,16 @@ { "services": { - "add_torrent": "mdi:download", - "remove_torrent": "mdi:download-off", - "start_torrent": "mdi:play", - "stop_torrent": "mdi:stop" + "add_torrent": { + "service": "mdi:download" + }, + "remove_torrent": { + "service": "mdi:download-off" + }, + "start_torrent": { + "service": "mdi:play" + }, + "stop_torrent": { + "service": "mdi:stop" + } } } diff --git a/homeassistant/components/trend/icons.json b/homeassistant/components/trend/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/trend/icons.json +++ b/homeassistant/components/trend/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/tts/icons.json b/homeassistant/components/tts/icons.json index cda5f877b25..8cfae7cc8e9 100644 --- a/homeassistant/components/tts/icons.json +++ b/homeassistant/components/tts/icons.json @@ -5,8 +5,14 @@ } }, "services": { - "clear_cache": "mdi:delete", - "say": "mdi:speaker-message", - "speak": "mdi:speaker-message" + "clear_cache": { + "service": "mdi:delete" + }, + "say": { + "service": "mdi:speaker-message" + }, + "speak": { + "service": "mdi:speaker-message" + } } } diff --git a/homeassistant/components/unifi/icons.json b/homeassistant/components/unifi/icons.json index 2d5017a3187..b089d8eff9c 100644 --- a/homeassistant/components/unifi/icons.json +++ b/homeassistant/components/unifi/icons.json @@ -1,6 +1,10 @@ { "services": { - "reconnect_client": "mdi:sync", - "remove_clients": "mdi:delete" + "reconnect_client": { + "service": "mdi:sync" + }, + "remove_clients": { + "service": "mdi:delete" + } } } diff --git a/homeassistant/components/unifiprotect/icons.json b/homeassistant/components/unifiprotect/icons.json index bb713d4ee79..5e80e3095b3 100644 --- a/homeassistant/components/unifiprotect/icons.json +++ b/homeassistant/components/unifiprotect/icons.json @@ -1,8 +1,16 @@ { "services": { - "add_doorbell_text": "mdi:message-plus", - "remove_doorbell_text": "mdi:message-minus", - "set_chime_paired_doorbells": "mdi:bell-cog", - "remove_privacy_zone": "mdi:eye-minus" + "add_doorbell_text": { + "service": "mdi:message-plus" + }, + "remove_doorbell_text": { + "service": "mdi:message-minus" + }, + "set_chime_paired_doorbells": { + "service": "mdi:bell-cog" + }, + "remove_privacy_zone": { + "service": "mdi:eye-minus" + } } } diff --git a/homeassistant/components/universal/icons.json b/homeassistant/components/universal/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/universal/icons.json +++ b/homeassistant/components/universal/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/upb/icons.json b/homeassistant/components/upb/icons.json index 187f0f60970..0274233da52 100644 --- a/homeassistant/components/upb/icons.json +++ b/homeassistant/components/upb/icons.json @@ -1,12 +1,28 @@ { "services": { - "light_fade_start": "mdi:transition", - "light_fade_stop": "mdi:transition-masked", - "light_blink": "mdi:eye", - "link_deactivate": "mdi:link-off", - "link_goto": "mdi:link-variant", - "link_fade_start": "mdi:transition", - "link_fade_stop": "mdi:transition-masked", - "link_blink": "mdi:eye" + "light_fade_start": { + "service": "mdi:transition" + }, + "light_fade_stop": { + "service": "mdi:transition-masked" + }, + "light_blink": { + "service": "mdi:eye" + }, + "link_deactivate": { + "service": "mdi:link-off" + }, + "link_goto": { + "service": "mdi:link-variant" + }, + "link_fade_start": { + "service": "mdi:transition" + }, + "link_fade_stop": { + "service": "mdi:transition-masked" + }, + "link_blink": { + "service": "mdi:eye" + } } } diff --git a/homeassistant/components/update/icons.json b/homeassistant/components/update/icons.json index 96920c96253..89af07de67f 100644 --- a/homeassistant/components/update/icons.json +++ b/homeassistant/components/update/icons.json @@ -8,8 +8,14 @@ } }, "services": { - "clear_skipped": "mdi:package", - "install": "mdi:package-down", - "skip": "mdi:package-check" + "clear_skipped": { + "service": "mdi:package" + }, + "install": { + "service": "mdi:package-down" + }, + "skip": { + "service": "mdi:package-check" + } } } diff --git a/homeassistant/components/utility_meter/icons.json b/homeassistant/components/utility_meter/icons.json index 3c447b4a810..2539b73d168 100644 --- a/homeassistant/components/utility_meter/icons.json +++ b/homeassistant/components/utility_meter/icons.json @@ -12,7 +12,11 @@ } }, "services": { - "reset": "mdi:numeric-0-box-outline", - "calibrate": "mdi:auto-fix" + "reset": { + "service": "mdi:numeric-0-box-outline" + }, + "calibrate": { + "service": "mdi:auto-fix" + } } } diff --git a/homeassistant/components/vacuum/icons.json b/homeassistant/components/vacuum/icons.json index 25f0cfd03ef..4169729efec 100644 --- a/homeassistant/components/vacuum/icons.json +++ b/homeassistant/components/vacuum/icons.json @@ -5,17 +5,41 @@ } }, "services": { - "clean_spot": "mdi:target-variant", - "locate": "mdi:map-marker", - "pause": "mdi:pause", - "return_to_base": "mdi:home-import-outline", - "send_command": "mdi:send", - "set_fan_speed": "mdi:fan", - "start": "mdi:play", - "start_pause": "mdi:play-pause", - "stop": "mdi:stop", - "toggle": "mdi:play-pause", - "turn_off": "mdi:stop", - "turn_on": "mdi:play" + "clean_spot": { + "service": "mdi:target-variant" + }, + "locate": { + "service": "mdi:map-marker" + }, + "pause": { + "service": "mdi:pause" + }, + "return_to_base": { + "service": "mdi:home-import-outline" + }, + "send_command": { + "service": "mdi:send" + }, + "set_fan_speed": { + "service": "mdi:fan" + }, + "start": { + "service": "mdi:play" + }, + "start_pause": { + "service": "mdi:play-pause" + }, + "stop": { + "service": "mdi:stop" + }, + "toggle": { + "service": "mdi:play-pause" + }, + "turn_off": { + "service": "mdi:stop" + }, + "turn_on": { + "service": "mdi:play" + } } } diff --git a/homeassistant/components/vallox/icons.json b/homeassistant/components/vallox/icons.json index 67b41d216d2..f6beb55f1da 100644 --- a/homeassistant/components/vallox/icons.json +++ b/homeassistant/components/vallox/icons.json @@ -37,8 +37,14 @@ } }, "services": { - "set_profile_fan_speed_home": "mdi:home", - "set_profile_fan_speed_away": "mdi:walk", - "set_profile_fan_speed_boost": "mdi:speedometer" + "set_profile_fan_speed_home": { + "service": "mdi:home" + }, + "set_profile_fan_speed_away": { + "service": "mdi:walk" + }, + "set_profile_fan_speed_boost": { + "service": "mdi:speedometer" + } } } diff --git a/homeassistant/components/valve/icons.json b/homeassistant/components/valve/icons.json index 2c887ebf273..c9c6b632dcb 100644 --- a/homeassistant/components/valve/icons.json +++ b/homeassistant/components/valve/icons.json @@ -17,10 +17,20 @@ } }, "services": { - "close_valve": "mdi:valve-closed", - "open_valve": "mdi:valve-open", - "set_valve_position": "mdi:valve", - "stop_valve": "mdi:stop", - "toggle": "mdi:valve-open" + "close_valve": { + "service": "mdi:valve-closed" + }, + "open_valve": { + "service": "mdi:valve-open" + }, + "set_valve_position": { + "service": "mdi:valve" + }, + "stop_valve": { + "service": "mdi:stop" + }, + "toggle": { + "service": "mdi:valve-open" + } } } diff --git a/homeassistant/components/velbus/icons.json b/homeassistant/components/velbus/icons.json index a806782d189..a46f5e5fbf1 100644 --- a/homeassistant/components/velbus/icons.json +++ b/homeassistant/components/velbus/icons.json @@ -1,8 +1,16 @@ { "services": { - "sync_clock": "mdi:clock", - "scan": "mdi:magnify", - "clear_cache": "mdi:delete", - "set_memo_text": "mdi:note-text" + "sync_clock": { + "service": "mdi:clock" + }, + "scan": { + "service": "mdi:magnify" + }, + "clear_cache": { + "service": "mdi:delete" + }, + "set_memo_text": { + "service": "mdi:note-text" + } } } diff --git a/homeassistant/components/velux/icons.json b/homeassistant/components/velux/icons.json index a16e7b50093..78cb5b14838 100644 --- a/homeassistant/components/velux/icons.json +++ b/homeassistant/components/velux/icons.json @@ -1,5 +1,7 @@ { "services": { - "reboot_gateway": "mdi:restart" + "reboot_gateway": { + "service": "mdi:restart" + } } } diff --git a/homeassistant/components/verisure/icons.json b/homeassistant/components/verisure/icons.json index 35f6960b1e8..809cf004a3f 100644 --- a/homeassistant/components/verisure/icons.json +++ b/homeassistant/components/verisure/icons.json @@ -1,7 +1,13 @@ { "services": { - "capture_smartcam": "mdi:camera", - "enable_autolock": "mdi:lock", - "disable_autolock": "mdi:lock-off" + "capture_smartcam": { + "service": "mdi:camera" + }, + "enable_autolock": { + "service": "mdi:lock" + }, + "disable_autolock": { + "service": "mdi:lock-off" + } } } diff --git a/homeassistant/components/vesync/icons.json b/homeassistant/components/vesync/icons.json index a4bf4afd410..cfdefb2ed09 100644 --- a/homeassistant/components/vesync/icons.json +++ b/homeassistant/components/vesync/icons.json @@ -1,5 +1,7 @@ { "services": { - "update_devices": "mdi:update" + "update_devices": { + "service": "mdi:update" + } } } diff --git a/homeassistant/components/vicare/icons.json b/homeassistant/components/vicare/icons.json index 2f40d8a8822..9d0f27a863c 100644 --- a/homeassistant/components/vicare/icons.json +++ b/homeassistant/components/vicare/icons.json @@ -88,6 +88,8 @@ } }, "services": { - "set_vicare_mode": "mdi:cog" + "set_vicare_mode": { + "service": "mdi:cog" + } } } diff --git a/homeassistant/components/vizio/icons.json b/homeassistant/components/vizio/icons.json index ccdaf816bb0..be6f727de6f 100644 --- a/homeassistant/components/vizio/icons.json +++ b/homeassistant/components/vizio/icons.json @@ -1,5 +1,7 @@ { "services": { - "update_setting": "mdi:cog" + "update_setting": { + "service": "mdi:cog" + } } } diff --git a/homeassistant/components/wake_on_lan/icons.json b/homeassistant/components/wake_on_lan/icons.json index 6426c478157..f083b0342f4 100644 --- a/homeassistant/components/wake_on_lan/icons.json +++ b/homeassistant/components/wake_on_lan/icons.json @@ -1,5 +1,7 @@ { "services": { - "send_magic_packet": "mdi:cube-send" + "send_magic_packet": { + "service": "mdi:cube-send" + } } } diff --git a/homeassistant/components/water_heater/icons.json b/homeassistant/components/water_heater/icons.json index af6996374c5..bc80128c6a3 100644 --- a/homeassistant/components/water_heater/icons.json +++ b/homeassistant/components/water_heater/icons.json @@ -22,10 +22,20 @@ } }, "services": { - "set_away_mode": "mdi:account-arrow-right", - "set_operation_mode": "mdi:water-boiler", - "set_temperature": "mdi:thermometer", - "turn_off": "mdi:water-boiler-off", - "turn_on": "mdi:water-boiler" + "set_away_mode": { + "service": "mdi:account-arrow-right" + }, + "set_operation_mode": { + "service": "mdi:water-boiler" + }, + "set_temperature": { + "service": "mdi:thermometer" + }, + "turn_off": { + "service": "mdi:water-boiler-off" + }, + "turn_on": { + "service": "mdi:water-boiler" + } } } diff --git a/homeassistant/components/waze_travel_time/icons.json b/homeassistant/components/waze_travel_time/icons.json index fa95e8fdd8a..98e6f26774c 100644 --- a/homeassistant/components/waze_travel_time/icons.json +++ b/homeassistant/components/waze_travel_time/icons.json @@ -7,6 +7,8 @@ } }, "services": { - "get_travel_times": "mdi:timelapse" + "get_travel_times": { + "service": "mdi:timelapse" + } } } diff --git a/homeassistant/components/weather/icons.json b/homeassistant/components/weather/icons.json index cc53861e700..04b3c1d3df8 100644 --- a/homeassistant/components/weather/icons.json +++ b/homeassistant/components/weather/icons.json @@ -21,7 +21,11 @@ } }, "services": { - "get_forecast": "mdi:weather-cloudy-clock", - "get_forecasts": "mdi:weather-cloudy-clock" + "get_forecast": { + "service": "mdi:weather-cloudy-clock" + }, + "get_forecasts": { + "service": "mdi:weather-cloudy-clock" + } } } diff --git a/homeassistant/components/webostv/icons.json b/homeassistant/components/webostv/icons.json index deb9729a99f..edc058d099f 100644 --- a/homeassistant/components/webostv/icons.json +++ b/homeassistant/components/webostv/icons.json @@ -1,7 +1,13 @@ { "services": { - "button": "mdi:button-pointer", - "command": "mdi:console", - "select_sound_output": "mdi:volume-source" + "button": { + "service": "mdi:button-pointer" + }, + "command": { + "service": "mdi:console" + }, + "select_sound_output": { + "service": "mdi:volume-source" + } } } diff --git a/homeassistant/components/wemo/icons.json b/homeassistant/components/wemo/icons.json index c5ddf5912d6..af5024afcff 100644 --- a/homeassistant/components/wemo/icons.json +++ b/homeassistant/components/wemo/icons.json @@ -1,6 +1,10 @@ { "services": { - "set_humidity": "mdi:water-percent", - "reset_filter_life": "mdi:refresh" + "set_humidity": { + "service": "mdi:water-percent" + }, + "reset_filter_life": { + "service": "mdi:refresh" + } } } diff --git a/homeassistant/components/wilight/icons.json b/homeassistant/components/wilight/icons.json index 3c5d0112de1..48bcae2a301 100644 --- a/homeassistant/components/wilight/icons.json +++ b/homeassistant/components/wilight/icons.json @@ -10,8 +10,14 @@ } }, "services": { - "set_watering_time": "mdi:timer", - "set_pause_time": "mdi:timer-pause", - "set_trigger": "mdi:gesture-tap-button" + "set_watering_time": { + "service": "mdi:timer" + }, + "set_pause_time": { + "service": "mdi:timer-pause" + }, + "set_trigger": { + "service": "mdi:gesture-tap-button" + } } } diff --git a/homeassistant/components/workday/icons.json b/homeassistant/components/workday/icons.json index 10d3c93a288..ec5c64dce97 100644 --- a/homeassistant/components/workday/icons.json +++ b/homeassistant/components/workday/icons.json @@ -1,5 +1,7 @@ { "services": { - "check_date": "mdi:calendar-check" + "check_date": { + "service": "mdi:calendar-check" + } } } diff --git a/homeassistant/components/xiaomi_aqara/icons.json b/homeassistant/components/xiaomi_aqara/icons.json index 4975414833d..62149b0dd40 100644 --- a/homeassistant/components/xiaomi_aqara/icons.json +++ b/homeassistant/components/xiaomi_aqara/icons.json @@ -1,8 +1,16 @@ { "services": { - "add_device": "mdi:cellphone-link", - "play_ringtone": "mdi:music", - "remove_device": "mdi:cellphone-link", - "stop_ringtone": "mdi:music-off" + "add_device": { + "service": "mdi:cellphone-link" + }, + "play_ringtone": { + "service": "mdi:music" + }, + "remove_device": { + "service": "mdi:cellphone-link" + }, + "stop_ringtone": { + "service": "mdi:music-off" + } } } diff --git a/homeassistant/components/xiaomi_miio/icons.json b/homeassistant/components/xiaomi_miio/icons.json index 2e5084a1f6c..cc0800f1d9d 100644 --- a/homeassistant/components/xiaomi_miio/icons.json +++ b/homeassistant/components/xiaomi_miio/icons.json @@ -14,29 +14,77 @@ } }, "services": { - "fan_reset_filter": "mdi:refresh", - "fan_set_extra_features": "mdi:cog", - "light_set_scene": "mdi:palette", - "light_set_delayed_turn_off": "mdi:timer", - "light_reminder_on": "mdi:alarm", - "light_reminder_off": "mdi:alarm-off", - "light_night_light_mode_on": "mdi:weather-night", - "light_night_light_mode_off": "mdi:weather-sunny", - "light_eyecare_mode_on": "mdi:eye", - "light_eyecare_mode_off": "mdi:eye-off", - "remote_learn_command": "mdi:remote", - "remote_set_led_on": "mdi:led-on", - "remote_set_led_off": "mdi:led-off", - "switch_set_wifi_led_on": "mdi:wifi", - "switch_set_wifi_led_off": "mdi:wifi-off", - "switch_set_power_price": "mdi:currency-usd", - "switch_set_power_mode": "mdi:power", - "vacuum_remote_control_start": "mdi:play", - "vacuum_remote_control_stop": "mdi:stop", - "vacuum_remote_control_move": "mdi:remote", - "vacuum_remote_control_move_step": "mdi:remote", - "vacuum_clean_zone": "mdi:map-marker", - "vacuum_goto": "mdi:map-marker", - "vacuum_clean_segment": "mdi:map-marker" + "fan_reset_filter": { + "service": "mdi:refresh" + }, + "fan_set_extra_features": { + "service": "mdi:cog" + }, + "light_set_scene": { + "service": "mdi:palette" + }, + "light_set_delayed_turn_off": { + "service": "mdi:timer" + }, + "light_reminder_on": { + "service": "mdi:alarm" + }, + "light_reminder_off": { + "service": "mdi:alarm-off" + }, + "light_night_light_mode_on": { + "service": "mdi:weather-night" + }, + "light_night_light_mode_off": { + "service": "mdi:weather-sunny" + }, + "light_eyecare_mode_on": { + "service": "mdi:eye" + }, + "light_eyecare_mode_off": { + "service": "mdi:eye-off" + }, + "remote_learn_command": { + "service": "mdi:remote" + }, + "remote_set_led_on": { + "service": "mdi:led-on" + }, + "remote_set_led_off": { + "service": "mdi:led-off" + }, + "switch_set_wifi_led_on": { + "service": "mdi:wifi" + }, + "switch_set_wifi_led_off": { + "service": "mdi:wifi-off" + }, + "switch_set_power_price": { + "service": "mdi:currency-usd" + }, + "switch_set_power_mode": { + "service": "mdi:power" + }, + "vacuum_remote_control_start": { + "service": "mdi:play" + }, + "vacuum_remote_control_stop": { + "service": "mdi:stop" + }, + "vacuum_remote_control_move": { + "service": "mdi:remote" + }, + "vacuum_remote_control_move_step": { + "service": "mdi:remote" + }, + "vacuum_clean_zone": { + "service": "mdi:map-marker" + }, + "vacuum_goto": { + "service": "mdi:map-marker" + }, + "vacuum_clean_segment": { + "service": "mdi:map-marker" + } } } diff --git a/homeassistant/components/yamaha/icons.json b/homeassistant/components/yamaha/icons.json index f7075508b0d..40eceda3b3e 100644 --- a/homeassistant/components/yamaha/icons.json +++ b/homeassistant/components/yamaha/icons.json @@ -1,7 +1,13 @@ { "services": { - "enable_output": "mdi:audio-input-stereo-minijack", - "menu_cursor": "mdi:cursor-default", - "select_scene": "mdi:palette" + "enable_output": { + "service": "mdi:audio-input-stereo-minijack" + }, + "menu_cursor": { + "service": "mdi:cursor-default" + }, + "select_scene": { + "service": "mdi:palette" + } } } diff --git a/homeassistant/components/yardian/icons.json b/homeassistant/components/yardian/icons.json index 79bcc32adf2..4ca3d83bd15 100644 --- a/homeassistant/components/yardian/icons.json +++ b/homeassistant/components/yardian/icons.json @@ -7,6 +7,8 @@ } }, "services": { - "start_irrigation": "mdi:water" + "start_irrigation": { + "service": "mdi:water" + } } } diff --git a/homeassistant/components/yeelight/icons.json b/homeassistant/components/yeelight/icons.json index bf0d0c497f0..898637e752c 100644 --- a/homeassistant/components/yeelight/icons.json +++ b/homeassistant/components/yeelight/icons.json @@ -7,13 +7,29 @@ } }, "services": { - "set_mode": "mdi:cog", - "set_color_scene": "mdi:palette", - "set_hsv_scene": "mdi:palette", - "set_color_temp_scene": "mdi:palette", - "set_color_flow_scene": "mdi:palette", - "set_auto_delay_off_scene": "mdi:timer", - "start_flow": "mdi:play", - "set_music_mode": "mdi:music" + "set_mode": { + "service": "mdi:cog" + }, + "set_color_scene": { + "service": "mdi:palette" + }, + "set_hsv_scene": { + "service": "mdi:palette" + }, + "set_color_temp_scene": { + "service": "mdi:palette" + }, + "set_color_flow_scene": { + "service": "mdi:palette" + }, + "set_auto_delay_off_scene": { + "service": "mdi:timer" + }, + "start_flow": { + "service": "mdi:play" + }, + "set_music_mode": { + "service": "mdi:music" + } } } diff --git a/homeassistant/components/yolink/icons.json b/homeassistant/components/yolink/icons.json index 1158c9bd8f2..c58d219a2e0 100644 --- a/homeassistant/components/yolink/icons.json +++ b/homeassistant/components/yolink/icons.json @@ -29,6 +29,8 @@ } }, "services": { - "play_on_speaker_hub": "mdi:speaker" + "play_on_speaker_hub": { + "service": "mdi:speaker" + } } } diff --git a/homeassistant/components/zha/icons.json b/homeassistant/components/zha/icons.json index 9b060e8105a..65ad029a66d 100644 --- a/homeassistant/components/zha/icons.json +++ b/homeassistant/components/zha/icons.json @@ -162,17 +162,41 @@ } }, "services": { - "permit": "mdi:cellphone-link", - "remove": "mdi:cellphone-remove", - "reconfigure_device": "mdi:cellphone-cog", - "set_zigbee_cluster_attribute": "mdi:cog", - "issue_zigbee_cluster_command": "mdi:console", - "issue_zigbee_group_command": "mdi:console", - "warning_device_squawk": "mdi:alert", - "warning_device_warn": "mdi:alert", - "clear_lock_user_code": "mdi:lock-remove", - "enable_lock_user_code": "mdi:lock", - "disable_lock_user_code": "mdi:lock-off", - "set_lock_user_code": "mdi:lock" + "permit": { + "service": "mdi:cellphone-link" + }, + "remove": { + "service": "mdi:cellphone-remove" + }, + "reconfigure_device": { + "service": "mdi:cellphone-cog" + }, + "set_zigbee_cluster_attribute": { + "service": "mdi:cog" + }, + "issue_zigbee_cluster_command": { + "service": "mdi:console" + }, + "issue_zigbee_group_command": { + "service": "mdi:console" + }, + "warning_device_squawk": { + "service": "mdi:alert" + }, + "warning_device_warn": { + "service": "mdi:alert" + }, + "clear_lock_user_code": { + "service": "mdi:lock-remove" + }, + "enable_lock_user_code": { + "service": "mdi:lock" + }, + "disable_lock_user_code": { + "service": "mdi:lock-off" + }, + "set_lock_user_code": { + "service": "mdi:lock" + } } } diff --git a/homeassistant/components/zone/icons.json b/homeassistant/components/zone/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/zone/icons.json +++ b/homeassistant/components/zone/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/zoneminder/icons.json b/homeassistant/components/zoneminder/icons.json index 8ca180d7399..3f9f6410a22 100644 --- a/homeassistant/components/zoneminder/icons.json +++ b/homeassistant/components/zoneminder/icons.json @@ -1,5 +1,7 @@ { "services": { - "set_run_state": "mdi:cog" + "set_run_state": { + "service": "mdi:cog" + } } } diff --git a/homeassistant/components/zwave_js/icons.json b/homeassistant/components/zwave_js/icons.json index 2956cf2c6e0..b52255e09d1 100644 --- a/homeassistant/components/zwave_js/icons.json +++ b/homeassistant/components/zwave_js/icons.json @@ -57,17 +57,41 @@ } }, "services": { - "bulk_set_partial_config_parameters": "mdi:cogs", - "clear_lock_usercode": "mdi:eraser", - "invoke_cc_api": "mdi:api", - "multicast_set_value": "mdi:list-box", - "ping": "mdi:crosshairs-gps", - "refresh_notifications": "mdi:bell", - "refresh_value": "mdi:refresh", - "reset_meter": "mdi:meter-electric", - "set_config_parameter": "mdi:cog", - "set_lock_configuration": "mdi:shield-lock", - "set_lock_usercode": "mdi:lock-smart", - "set_value": "mdi:form-textbox" + "bulk_set_partial_config_parameters": { + "service": "mdi:cogs" + }, + "clear_lock_usercode": { + "service": "mdi:eraser" + }, + "invoke_cc_api": { + "service": "mdi:api" + }, + "multicast_set_value": { + "service": "mdi:list-box" + }, + "ping": { + "service": "mdi:crosshairs-gps" + }, + "refresh_notifications": { + "service": "mdi:bell" + }, + "refresh_value": { + "service": "mdi:refresh" + }, + "reset_meter": { + "service": "mdi:meter-electric" + }, + "set_config_parameter": { + "service": "mdi:cog" + }, + "set_lock_configuration": { + "service": "mdi:shield-lock" + }, + "set_lock_usercode": { + "service": "mdi:lock-smart" + }, + "set_value": { + "service": "mdi:form-textbox" + } } } From cff4e4669426e077fc263d93d0f269adabe69e76 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 28 Aug 2024 13:48:49 +0200 Subject: [PATCH 1165/1635] Deduplicate TTS mocks (#124773) --- tests/components/assist_pipeline/conftest.py | 47 +++----------------- tests/components/tts/common.py | 12 ++--- tests/components/tts/conftest.py | 8 ++-- tests/components/tts/test_init.py | 28 ++++++------ tests/components/tts/test_legacy.py | 8 ++-- tests/components/tts/test_media_source.py | 4 +- 6 files changed, 37 insertions(+), 70 deletions(-) diff --git a/tests/components/assist_pipeline/conftest.py b/tests/components/assist_pipeline/conftest.py index c03874c16af..0f6872edbfe 100644 --- a/tests/components/assist_pipeline/conftest.py +++ b/tests/components/assist_pipeline/conftest.py @@ -23,7 +23,7 @@ from homeassistant.components.assist_pipeline.pipeline import ( ) from homeassistant.config_entries import ConfigEntry, ConfigFlow from homeassistant.const import Platform -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.setup import async_setup_component @@ -37,6 +37,7 @@ from tests.common import ( mock_platform, ) from tests.components.stt.common import MockSTTProvider, MockSTTProviderEntity +from tests.components.tts.common import MockTTSProvider _TRANSCRIPT = "test transcript" @@ -48,46 +49,6 @@ def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: """Mock the TTS cache dir with empty dir.""" -class MockTTSProvider(tts.Provider): - """Mock TTS provider.""" - - name = "Test" - _supported_languages = ["en-US"] - _supported_voices = { - "en-US": [ - tts.Voice("james_earl_jones", "James Earl Jones"), - tts.Voice("fran_drescher", "Fran Drescher"), - ] - } - _supported_options = ["voice", "age", tts.ATTR_AUDIO_OUTPUT] - - @property - def default_language(self) -> str: - """Return the default language.""" - return "en" - - @property - def supported_languages(self) -> list[str]: - """Return list of supported languages.""" - return self._supported_languages - - @callback - def async_get_supported_voices(self, language: str) -> list[tts.Voice] | None: - """Return a list of supported voices for a language.""" - return self._supported_voices.get(language) - - @property - def supported_options(self) -> list[str]: - """Return list of supported options like voice, emotions.""" - return self._supported_options - - def get_tts_audio( - self, message: str, language: str, options: dict[str, Any] - ) -> tts.TtsAudioType: - """Load TTS data.""" - return ("mp3", b"") - - class MockTTSPlatform(MockPlatform): """A mock TTS platform.""" @@ -102,7 +63,9 @@ class MockTTSPlatform(MockPlatform): @pytest.fixture async def mock_tts_provider() -> MockTTSProvider: """Mock TTS provider.""" - return MockTTSProvider() + provider = MockTTSProvider("en") + provider._supported_languages = ["en-US"] + return provider @pytest.fixture diff --git a/tests/components/tts/common.py b/tests/components/tts/common.py index 4acba401fad..b1eae12d694 100644 --- a/tests/components/tts/common.py +++ b/tests/components/tts/common.py @@ -130,6 +130,8 @@ class BaseProvider: def __init__(self, lang: str) -> None: """Initialize test provider.""" self._lang = lang + self._supported_languages = SUPPORT_LANGUAGES + self._supported_options = ["voice", "age"] @property def default_language(self) -> str: @@ -139,7 +141,7 @@ class BaseProvider: @property def supported_languages(self) -> list[str]: """Return list of supported languages.""" - return SUPPORT_LANGUAGES + return self._supported_languages @callback def async_get_supported_voices(self, language: str) -> list[Voice] | None: @@ -154,7 +156,7 @@ class BaseProvider: @property def supported_options(self) -> list[str]: """Return list of supported options like voice, emotions.""" - return ["voice", "age"] + return self._supported_options def get_tts_audio( self, message: str, language: str, options: dict[str, Any] @@ -163,7 +165,7 @@ class BaseProvider: return ("mp3", b"") -class MockProvider(BaseProvider, Provider): +class MockTTSProvider(BaseProvider, Provider): """Test speech API provider.""" def __init__(self, lang: str) -> None: @@ -185,7 +187,7 @@ class MockTTS(MockPlatform): {vol.Optional(CONF_LANG, default=DEFAULT_LANG): vol.In(SUPPORT_LANGUAGES)} ) - def __init__(self, provider: MockProvider, **kwargs: Any) -> None: + def __init__(self, provider: MockTTSProvider, **kwargs: Any) -> None: """Initialize.""" super().__init__(**kwargs) self._provider = provider @@ -202,7 +204,7 @@ class MockTTS(MockPlatform): async def mock_setup( hass: HomeAssistant, - mock_provider: MockProvider, + mock_provider: MockTTSProvider, ) -> None: """Set up a test provider.""" mock_integration(hass, MockModule(domain=TEST_DOMAIN)) diff --git a/tests/components/tts/conftest.py b/tests/components/tts/conftest.py index 91ddd7742af..16c24f006d7 100644 --- a/tests/components/tts/conftest.py +++ b/tests/components/tts/conftest.py @@ -17,9 +17,9 @@ from homeassistant.core import HomeAssistant from .common import ( DEFAULT_LANG, TEST_DOMAIN, - MockProvider, MockTTS, MockTTSEntity, + MockTTSProvider, mock_config_entry_setup, mock_setup, ) @@ -67,9 +67,9 @@ async def mock_tts(hass: HomeAssistant, mock_provider) -> None: @pytest.fixture -def mock_provider() -> MockProvider: +def mock_provider() -> MockTTSProvider: """Test TTS provider.""" - return MockProvider(DEFAULT_LANG) + return MockTTSProvider(DEFAULT_LANG) @pytest.fixture @@ -106,7 +106,7 @@ def config_flow_fixture( async def setup_fixture( hass: HomeAssistant, request: pytest.FixtureRequest, - mock_provider: MockProvider, + mock_provider: MockTTSProvider, mock_tts_entity: MockTTSEntity, ) -> None: """Set up the test environment.""" diff --git a/tests/components/tts/test_init.py b/tests/components/tts/test_init.py index 1417fcda2a7..cf04fbb175b 100644 --- a/tests/components/tts/test_init.py +++ b/tests/components/tts/test_init.py @@ -30,9 +30,9 @@ from .common import ( DEFAULT_LANG, SUPPORT_LANGUAGES, TEST_DOMAIN, - MockProvider, MockTTS, MockTTSEntity, + MockTTSProvider, get_media_source_url, mock_config_entry_setup, mock_setup, @@ -220,7 +220,7 @@ async def test_service( @pytest.mark.parametrize( ("mock_provider", "mock_tts_entity"), - [(MockProvider("de_DE"), MockTTSEntity("de_DE"))], + [(MockTTSProvider("de_DE"), MockTTSEntity("de_DE"))], ) @pytest.mark.parametrize( ("setup", "tts_service", "service_data", "expected_url_suffix"), @@ -281,7 +281,7 @@ async def test_service_default_language( @pytest.mark.parametrize( ("mock_provider", "mock_tts_entity"), - [(MockProvider("en_US"), MockTTSEntity("en_US"))], + [(MockTTSProvider("en_US"), MockTTSEntity("en_US"))], ) @pytest.mark.parametrize( ("setup", "tts_service", "service_data", "expected_url_suffix"), @@ -511,7 +511,7 @@ async def test_service_options( ).is_file() -class MockProviderWithDefaults(MockProvider): +class MockProviderWithDefaults(MockTTSProvider): """Mock provider with default options.""" @property @@ -854,7 +854,7 @@ async def test_service_receive_voice( @pytest.mark.parametrize( ("mock_provider", "mock_tts_entity"), - [(MockProvider("de_DE"), MockTTSEntity("de_DE"))], + [(MockTTSProvider("de_DE"), MockTTSEntity("de_DE"))], ) @pytest.mark.parametrize( ("setup", "tts_service", "service_data", "expected_url_suffix"), @@ -1015,7 +1015,7 @@ async def test_service_without_cache( ).is_file() -class MockProviderBoom(MockProvider): +class MockProviderBoom(MockTTSProvider): """Mock provider that blows up.""" def get_tts_audio( @@ -1041,7 +1041,7 @@ class MockEntityBoom(MockTTSEntity): async def test_setup_legacy_cache_dir( hass: HomeAssistant, mock_tts_cache_dir: Path, - mock_provider: MockProvider, + mock_provider: MockTTSProvider, ) -> None: """Set up a TTS platform with cache and call service without cache.""" calls = async_mock_service(hass, DOMAIN_MP, SERVICE_PLAY_MEDIA) @@ -1106,7 +1106,7 @@ async def test_setup_cache_dir( await hass.async_block_till_done() -class MockProviderEmpty(MockProvider): +class MockProviderEmpty(MockTTSProvider): """Mock provider with empty get_tts_audio.""" def get_tts_audio( @@ -1178,7 +1178,7 @@ async def test_service_get_tts_error( async def test_load_cache_legacy_retrieve_without_mem_cache( hass: HomeAssistant, - mock_provider: MockProvider, + mock_provider: MockTTSProvider, mock_tts_cache_dir: Path, hass_client: ClientSessionGenerator, ) -> None: @@ -1426,7 +1426,7 @@ async def test_legacy_fetching_in_async( """Test async fetching of data for a legacy provider.""" tts_audio: asyncio.Future[bytes] = asyncio.Future() - class ProviderWithAsyncFetching(MockProvider): + class ProviderWithAsyncFetching(MockTTSProvider): """Provider that supports audio output option.""" @property @@ -1662,8 +1662,8 @@ async def test_ws_list_engines_deprecated( also provides tts entities. """ - mock_provider = MockProvider(DEFAULT_LANG) - mock_provider_2 = MockProvider(DEFAULT_LANG) + mock_provider = MockTTSProvider(DEFAULT_LANG) + mock_provider_2 = MockTTSProvider(DEFAULT_LANG) mock_integration(hass, MockModule(domain="test")) mock_platform(hass, "test.tts", MockTTS(mock_provider)) mock_integration(hass, MockModule(domain="test_2")) @@ -1910,7 +1910,7 @@ async def test_ttsentity_subclass_properties( async def test_default_engine_prefer_entity( hass: HomeAssistant, mock_tts_entity: MockTTSEntity, - mock_provider: MockProvider, + mock_provider: MockTTSProvider, ) -> None: """Test async_default_engine. @@ -1941,7 +1941,7 @@ async def test_default_engine_prefer_entity( ) async def test_default_engine_prefer_cloud_entity( hass: HomeAssistant, - mock_provider: MockProvider, + mock_provider: MockTTSProvider, config_flow_test_domains: str, ) -> None: """Test async_default_engine. diff --git a/tests/components/tts/test_legacy.py b/tests/components/tts/test_legacy.py index 0d7f99e8cd1..22e8ac35f16 100644 --- a/tests/components/tts/test_legacy.py +++ b/tests/components/tts/test_legacy.py @@ -17,7 +17,7 @@ from homeassistant.helpers.discovery import async_load_platform from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.setup import async_setup_component -from .common import SUPPORT_LANGUAGES, MockProvider, MockTTS +from .common import SUPPORT_LANGUAGES, MockTTS, MockTTSProvider from tests.common import ( MockModule, @@ -75,7 +75,9 @@ async def test_invalid_platform( async def test_platform_setup_without_provider( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_provider: MockProvider + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + mock_provider: MockTTSProvider, ) -> None: """Test platform setup without provider returned.""" @@ -109,7 +111,7 @@ async def test_platform_setup_without_provider( async def test_platform_setup_with_error( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - mock_provider: MockProvider, + mock_provider: MockTTSProvider, ) -> None: """Test platform setup with an error during setup.""" diff --git a/tests/components/tts/test_media_source.py b/tests/components/tts/test_media_source.py index 4c10d8f0b08..ba856fd9622 100644 --- a/tests/components/tts/test_media_source.py +++ b/tests/components/tts/test_media_source.py @@ -12,8 +12,8 @@ from homeassistant.setup import async_setup_component from .common import ( DEFAULT_LANG, - MockProvider, MockTTSEntity, + MockTTSProvider, mock_config_entry_setup, mock_setup, retrieve_media, @@ -28,7 +28,7 @@ class MSEntity(MockTTSEntity): get_tts_audio = MagicMock(return_value=("mp3", b"")) -class MSProvider(MockProvider): +class MSProvider(MockTTSProvider): """Test speech API provider.""" get_tts_audio = MagicMock(return_value=("mp3", b"")) From c4e5d6755194fd8678b63d9ae8022b5475906e7f Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 28 Aug 2024 14:14:45 +0200 Subject: [PATCH 1166/1635] Enforce new service icon schema for core integrations (#124772) Co-authored-by: Paulus Schoutsen --- script/hassfest/icons.py | 27 +++++++++++++++++++++++---- 1 file changed, 23 insertions(+), 4 deletions(-) diff --git a/script/hassfest/icons.py b/script/hassfest/icons.py index 92d42efb842..f6bcd865c23 100644 --- a/script/hassfest/icons.py +++ b/script/hassfest/icons.py @@ -61,7 +61,20 @@ DATA_ENTRY_ICONS_SCHEMA = vol.Schema( ) -SERVICE_ICONS_SCHEMA = cv.schema_with_slug_keys( +CORE_SERVICE_ICONS_SCHEMA = cv.schema_with_slug_keys( + vol.Schema( + { + vol.Optional("service"): icon_value_validator, + vol.Optional("sections"): cv.schema_with_slug_keys( + icon_value_validator, slug_validator=translation_key_validator + ), + } + ), + slug_validator=translation_key_validator, +) + + +CUSTOM_INTEGRATION_SERVICE_ICONS_SCHEMA = cv.schema_with_slug_keys( vol.All( convert_shorthand_service_icon, vol.Schema( @@ -77,7 +90,9 @@ SERVICE_ICONS_SCHEMA = cv.schema_with_slug_keys( ) -def icon_schema(integration_type: str, no_entity_platform: bool) -> vol.Schema: +def icon_schema( + core_integration: bool, integration_type: str, no_entity_platform: bool +) -> vol.Schema: """Create an icon schema.""" state_validator = cv.schema_with_slug_keys( @@ -108,7 +123,9 @@ def icon_schema(integration_type: str, no_entity_platform: bool) -> vol.Schema: {str: {"fix_flow": DATA_ENTRY_ICONS_SCHEMA}} ), vol.Optional("options"): DATA_ENTRY_ICONS_SCHEMA, - vol.Optional("services"): SERVICE_ICONS_SCHEMA, + vol.Optional("services"): CORE_SERVICE_ICONS_SCHEMA + if core_integration + else CUSTOM_INTEGRATION_SERVICE_ICONS_SCHEMA, } ) @@ -163,7 +180,9 @@ def validate_icon_file(config: Config, integration: Integration) -> None: no_entity_platform = integration.domain in ("notify", "image_processing") - schema = icon_schema(integration.integration_type, no_entity_platform) + schema = icon_schema( + integration.core, integration.integration_type, no_entity_platform + ) try: schema(icons) From f8ac952cd7f5e2b36d259bed9f2652adfb9f86f8 Mon Sep 17 00:00:00 2001 From: Matrix Date: Wed, 28 Aug 2024 20:18:55 +0800 Subject: [PATCH 1167/1635] Add YoLink lock V2 support (#124202) * Add Lock V2 Support * Change as suggestions --- homeassistant/components/yolink/lock.py | 39 +++++++++++++++++++------ 1 file changed, 30 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/yolink/lock.py b/homeassistant/components/yolink/lock.py index 177a8808de1..d675fd8cf06 100644 --- a/homeassistant/components/yolink/lock.py +++ b/homeassistant/components/yolink/lock.py @@ -1,11 +1,11 @@ -"""YoLink Lock.""" +"""YoLink Lock V1/V2.""" from __future__ import annotations from typing import Any from yolink.client_request import ClientRequest -from yolink.const import ATTR_DEVICE_LOCK +from yolink.const import ATTR_DEVICE_LOCK, ATTR_DEVICE_LOCK_V2 from homeassistant.components.lock import LockEntity from homeassistant.config_entries import ConfigEntry @@ -27,7 +27,8 @@ async def async_setup_entry( entities = [ YoLinkLockEntity(config_entry, device_coordinator) for device_coordinator in device_coordinators.values() - if device_coordinator.device.device_type == ATTR_DEVICE_LOCK + if device_coordinator.device.device_type + in [ATTR_DEVICE_LOCK, ATTR_DEVICE_LOCK_V2] ] async_add_entities(entities) @@ -50,21 +51,41 @@ class YoLinkLockEntity(YoLinkEntity, LockEntity): def update_entity_state(self, state: dict[str, Any]) -> None: """Update HA Entity State.""" state_value = state.get("state") - self._attr_is_locked = ( - state_value == "locked" if state_value is not None else None - ) + if self.coordinator.device.device_type == ATTR_DEVICE_LOCK_V2: + self._attr_is_locked = ( + state_value["lock"] == "locked" if state_value is not None else None + ) + else: + self._attr_is_locked = ( + state_value == "locked" if state_value is not None else None + ) self.async_write_ha_state() async def call_lock_state_change(self, state: str) -> None: """Call setState api to change lock state.""" - await self.call_device(ClientRequest("setState", {"state": state})) + if self.coordinator.device.device_type == ATTR_DEVICE_LOCK_V2: + await self.call_device( + ClientRequest("setState", {"state": {"lock": state}}) + ) + else: + await self.call_device(ClientRequest("setState", {"state": state})) self._attr_is_locked = state == "lock" self.async_write_ha_state() async def async_lock(self, **kwargs: Any) -> None: """Lock device.""" - await self.call_lock_state_change("lock") + state_param = ( + "locked" + if self.coordinator.device.device_type == ATTR_DEVICE_LOCK_V2 + else "lock" + ) + await self.call_lock_state_change(state_param) async def async_unlock(self, **kwargs: Any) -> None: """Unlock device.""" - await self.call_lock_state_change("unlock") + state_param = ( + "unlocked" + if self.coordinator.device.device_type == ATTR_DEVICE_LOCK_V2 + else "unlock" + ) + await self.call_lock_state_change(state_param) From 0afae45bc5c652d47866f11ebc0da68b084587b1 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 28 Aug 2024 14:27:01 +0200 Subject: [PATCH 1168/1635] Trigger full CI run on Blueprint integration changes (#124778) --- .core_files.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.core_files.yaml b/.core_files.yaml index 3f92ed87a84..4a11d5da27c 100644 --- a/.core_files.yaml +++ b/.core_files.yaml @@ -61,6 +61,7 @@ components: &components - homeassistant/components/auth/** - homeassistant/components/automation/** - homeassistant/components/backup/** + - homeassistant/components/blueprint/** - homeassistant/components/bluetooth/** - homeassistant/components/cloud/** - homeassistant/components/config/** From 11370979e5c07e028a537b6502f480fe211f93bc Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Wed, 28 Aug 2024 14:54:30 +0200 Subject: [PATCH 1169/1635] Fix translation for AVM Fritz!Box Tools (#124784) add missing ssl option to strings --- homeassistant/components/fritz/strings.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/fritz/strings.json b/homeassistant/components/fritz/strings.json index 3b6c60ed48f..6be393cc636 100644 --- a/homeassistant/components/fritz/strings.json +++ b/homeassistant/components/fritz/strings.json @@ -7,7 +7,8 @@ "description": "Discovered FRITZ!Box: {name}\n\nSet up FRITZ!Box Tools to control your {name}", "data": { "username": "[%key:common::config_flow::data::username%]", - "password": "[%key:common::config_flow::data::password%]" + "password": "[%key:common::config_flow::data::password%]", + "ssl": "[%key:common::config_flow::data::ssl%]" } }, "reauth_confirm": { From e39b3796f34523d29ee82575d7d3b2f602618dad Mon Sep 17 00:00:00 2001 From: Brett Adams Date: Wed, 28 Aug 2024 22:58:16 +1000 Subject: [PATCH 1170/1635] Fix OAuth reauth in Tesla Fleet (#124744) * Fix auth failure * Test * Fix test * Only reauth on 401 * Cover 401 and others * Update homeassistant/components/tesla_fleet/strings.json Co-authored-by: Jan Bouwhuis --------- Co-authored-by: Jan Bouwhuis --- .../components/tesla_fleet/__init__.py | 8 ++- .../components/tesla_fleet/config_flow.py | 5 +- .../components/tesla_fleet/strings.json | 2 +- tests/components/tesla_fleet/conftest.py | 2 +- tests/components/tesla_fleet/test_init.py | 49 ++++++++++++++++++- 5 files changed, 61 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/tesla_fleet/__init__.py b/homeassistant/components/tesla_fleet/__init__.py index 47a2a9173a5..183e7e753b5 100644 --- a/homeassistant/components/tesla_fleet/__init__.py +++ b/homeassistant/components/tesla_fleet/__init__.py @@ -3,6 +3,7 @@ import asyncio from typing import Final +from aiohttp.client_exceptions import ClientResponseError import jwt from tesla_fleet_api import EnergySpecific, TeslaFleetApi, VehicleSpecific from tesla_fleet_api.const import Scope @@ -66,7 +67,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - async def _refresh_token() -> str: async with refresh_lock: - await oauth_session.async_ensure_token_valid() + try: + await oauth_session.async_ensure_token_valid() + except ClientResponseError as e: + if e.status == 401: + raise ConfigEntryAuthFailed from e + raise ConfigEntryNotReady from e token: str = oauth_session.token[CONF_ACCESS_TOKEN] return token diff --git a/homeassistant/components/tesla_fleet/config_flow.py b/homeassistant/components/tesla_fleet/config_flow.py index 0ffdca1aec6..64b88792387 100644 --- a/homeassistant/components/tesla_fleet/config_flow.py +++ b/homeassistant/components/tesla_fleet/config_flow.py @@ -83,5 +83,8 @@ class OAuth2FlowHandler( ) -> ConfigFlowResult: """Confirm reauth dialog.""" if user_input is None: - return self.async_show_form(step_id="reauth_confirm") + return self.async_show_form( + step_id="reauth_confirm", + description_placeholders={"name": "Tesla Fleet"}, + ) return await self.async_step_user() diff --git a/homeassistant/components/tesla_fleet/strings.json b/homeassistant/components/tesla_fleet/strings.json index 6e74714ddd5..d4848836689 100644 --- a/homeassistant/components/tesla_fleet/strings.json +++ b/homeassistant/components/tesla_fleet/strings.json @@ -19,7 +19,7 @@ }, "reauth_confirm": { "title": "[%key:common::config_flow::title::reauth%]", - "description": "The Withings integration needs to re-authenticate your account" + "description": "The {name} integration needs to re-authenticate your account" } }, "create_entry": { diff --git a/tests/components/tesla_fleet/conftest.py b/tests/components/tesla_fleet/conftest.py index 49f0be9cca7..615c62fe16e 100644 --- a/tests/components/tesla_fleet/conftest.py +++ b/tests/components/tesla_fleet/conftest.py @@ -124,7 +124,7 @@ def mock_site_info() -> Generator[AsyncMock]: yield mock_live_status -@pytest.fixture(autouse=True) +@pytest.fixture def mock_find_server() -> Generator[AsyncMock]: """Mock Tesla Fleet find server method.""" with patch( diff --git a/tests/components/tesla_fleet/test_init.py b/tests/components/tesla_fleet/test_init.py index b5eb21d1cdd..9dcac4ec388 100644 --- a/tests/components/tesla_fleet/test_init.py +++ b/tests/components/tesla_fleet/test_init.py @@ -1,7 +1,9 @@ """Test the Tesla Fleet init.""" -from unittest.mock import AsyncMock +from unittest.mock import AsyncMock, patch +from aiohttp import RequestInfo +from aiohttp.client_exceptions import ClientResponseError from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion @@ -16,6 +18,7 @@ from tesla_fleet_api.exceptions import ( VehicleOffline, ) +from homeassistant.components.tesla_fleet.const import AUTHORIZE_URL from homeassistant.components.tesla_fleet.coordinator import ( ENERGY_INTERVAL, ENERGY_INTERVAL_SECONDS, @@ -72,6 +75,50 @@ async def test_init_error( assert normal_config_entry.state is state +async def test_oauth_refresh_expired( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + mock_products: AsyncMock, +) -> None: + """Test init with expired Oauth token.""" + + # Patch the token refresh to raise an error + with patch( + "homeassistant.components.tesla_fleet.OAuth2Session.async_ensure_token_valid", + side_effect=ClientResponseError( + RequestInfo(AUTHORIZE_URL, "POST", {}, AUTHORIZE_URL), None, status=401 + ), + ) as mock_async_ensure_token_valid: + # Trigger an unmocked function call + mock_products.side_effect = InvalidRegion + await setup_platform(hass, normal_config_entry) + + mock_async_ensure_token_valid.assert_called_once() + assert normal_config_entry.state is ConfigEntryState.SETUP_ERROR + + +async def test_oauth_refresh_error( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + mock_products: AsyncMock, +) -> None: + """Test init with Oauth refresh failure.""" + + # Patch the token refresh to raise an error + with patch( + "homeassistant.components.tesla_fleet.OAuth2Session.async_ensure_token_valid", + side_effect=ClientResponseError( + RequestInfo(AUTHORIZE_URL, "POST", {}, AUTHORIZE_URL), None, status=400 + ), + ) as mock_async_ensure_token_valid: + # Trigger an unmocked function call + mock_products.side_effect = InvalidRegion + await setup_platform(hass, normal_config_entry) + + mock_async_ensure_token_valid.assert_called_once() + assert normal_config_entry.state is ConfigEntryState.SETUP_RETRY + + # Test devices async def test_devices( hass: HomeAssistant, From 174f22aa2f918e5aa3eda4c724cfc2090a0a3e0f Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 28 Aug 2024 15:42:15 +0200 Subject: [PATCH 1171/1635] Prevent nesting sections in data entry flows (#124645) --- homeassistant/helpers/config_validation.py | 12 ++++++++++- tests/test_data_entry_flow.py | 24 ++++++++++++++++++++++ 2 files changed, 35 insertions(+), 1 deletion(-) diff --git a/homeassistant/helpers/config_validation.py b/homeassistant/helpers/config_validation.py index 2904efb75e9..3d3de40a2c6 100644 --- a/homeassistant/helpers/config_validation.py +++ b/homeassistant/helpers/config_validation.py @@ -1090,6 +1090,11 @@ def key_dependency[_KT: Hashable, _VT]( def custom_serializer(schema: Any) -> Any: + """Serialize additional types for voluptuous_serialize.""" + return _custom_serializer(schema, allow_section=True) + + +def _custom_serializer(schema: Any, *, allow_section: bool) -> Any: """Serialize additional types for voluptuous_serialize.""" from .. import data_entry_flow # pylint: disable=import-outside-toplevel from . import selector # pylint: disable=import-outside-toplevel @@ -1104,10 +1109,15 @@ def custom_serializer(schema: Any) -> Any: return {"type": "boolean"} if isinstance(schema, data_entry_flow.section): + if not allow_section: + raise ValueError("Nesting expandable sections is not supported") return { "type": "expandable", "schema": voluptuous_serialize.convert( - schema.schema, custom_serializer=custom_serializer + schema.schema, + custom_serializer=functools.partial( + _custom_serializer, allow_section=False + ), ), "expanded": not schema.options["collapsed"], } diff --git a/tests/test_data_entry_flow.py b/tests/test_data_entry_flow.py index 967b2565206..01b6a530105 100644 --- a/tests/test_data_entry_flow.py +++ b/tests/test_data_entry_flow.py @@ -1098,3 +1098,27 @@ def test_section_in_serializer() -> None: ], "type": "expandable", } + + +def test_nested_section_in_serializer() -> None: + """Test section with custom_serializer.""" + with pytest.raises( + ValueError, match="Nesting expandable sections is not supported" + ): + cv.custom_serializer( + data_entry_flow.section( + vol.Schema( + { + vol.Required("section_1"): data_entry_flow.section( + vol.Schema( + { + vol.Optional("option_1", default=False): bool, + vol.Required("option_2"): int, + } + ) + ) + } + ), + {"collapsed": False}, + ) + ) From 9d633f20876fdd8a129066c1efad0b97daf1e257 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 28 Aug 2024 15:47:35 +0200 Subject: [PATCH 1172/1635] Use start_reauth helper method in integration tests (h-l) (#124787) --- tests/components/hive/test_config_flow.py | 18 +------ .../components/homewizard/test_config_flow.py | 16 +------ .../components/honeywell/test_config_flow.py | 41 ++-------------- .../components/huawei_lte/test_config_flow.py | 10 +--- tests/components/hyperion/__init__.py | 4 +- tests/components/hyperion/test_config_flow.py | 18 ++----- tests/components/icloud/test_config_flow.py | 16 ++----- tests/components/imap/test_config_flow.py | 30 ++---------- .../ista_ecotrend/test_config_flow.py | 22 ++------- tests/components/isy994/test_config_flow.py | 5 +- tests/components/jellyfin/test_config_flow.py | 36 ++------------ .../components/justnimbus/test_config_flow.py | 9 +--- .../jvc_projector/test_config_flow.py | 47 +++---------------- .../lacrosse_view/test_config_flow.py | 11 +---- .../components/lamarzocco/test_config_flow.py | 17 +------ tests/components/lametric/test_config_flow.py | 47 ++----------------- .../components/laundrify/test_config_flow.py | 7 ++- tests/components/lidarr/test_config_flow.py | 22 ++++----- .../linear_garage_door/test_config_flow.py | 13 +---- .../litterrobot/test_config_flow.py | 22 ++------- tests/components/lyric/test_config_flow.py | 4 +- 21 files changed, 62 insertions(+), 353 deletions(-) diff --git a/tests/components/hive/test_config_flow.py b/tests/components/hive/test_config_flow.py index fd6eb564a39..e5dba49dcc1 100644 --- a/tests/components/hive/test_config_flow.py +++ b/tests/components/hive/test_config_flow.py @@ -246,14 +246,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: "homeassistant.components.hive.config_flow.Auth.login", side_effect=hive_exceptions.HiveInvalidPassword(), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_config.unique_id, - }, - data=mock_config.data, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": "invalid_password"} @@ -305,14 +298,7 @@ async def test_reauth_2fa_flow(hass: HomeAssistant) -> None: "homeassistant.components.hive.config_flow.Auth.login", side_effect=hive_exceptions.HiveInvalidPassword(), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_config.unique_id, - }, - data=mock_config.data, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": "invalid_password"} diff --git a/tests/components/homewizard/test_config_flow.py b/tests/components/homewizard/test_config_flow.py index 8d12a8a1787..442659f2aad 100644 --- a/tests/components/homewizard/test_config_flow.py +++ b/tests/components/homewizard/test_config_flow.py @@ -341,13 +341,7 @@ async def test_reauth_flow( """Test reauth flow while API is enabled.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - }, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -367,13 +361,7 @@ async def test_reauth_error( mock_homewizardenergy.device.side_effect = DisabledError mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - }, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/honeywell/test_config_flow.py b/tests/components/honeywell/test_config_flow.py index 7cd987f0d83..ed9c86f5e10 100644 --- a/tests/components/honeywell/test_config_flow.py +++ b/tests/components/honeywell/test_config_flow.py @@ -10,7 +10,7 @@ from homeassistant.components.honeywell.const import ( CONF_HEAT_AWAY_TEMPERATURE, DOMAIN, ) -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER, ConfigEntryState +from homeassistant.config_entries import SOURCE_USER, ConfigEntryState from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -129,21 +129,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: unique_id="test-username", ) mock_entry.add_to_hass(hass) - with patch( - "homeassistant.components.honeywell.async_setup_entry", - return_value=True, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data={CONF_USERNAME: "test-username", CONF_PASSWORD: "new-password"}, - ) - - await hass.async_block_till_done() + result = await mock_entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM @@ -177,16 +163,7 @@ async def test_reauth_flow_auth_error(hass: HomeAssistant, client: MagicMock) -> ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data={CONF_USERNAME: "test-username", CONF_PASSWORD: "new-password"}, - ) - await hass.async_block_till_done() + result = await mock_entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM @@ -226,17 +203,7 @@ async def test_reauth_flow_connnection_error( unique_id="test-username", ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data={CONF_USERNAME: "test-username", CONF_PASSWORD: "new-password"}, - ) - await hass.async_block_till_done() - + result = await mock_entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} diff --git a/tests/components/huawei_lte/test_config_flow.py b/tests/components/huawei_lte/test_config_flow.py index 862af02963c..a9a147eb17e 100644 --- a/tests/components/huawei_lte/test_config_flow.py +++ b/tests/components/huawei_lte/test_config_flow.py @@ -385,15 +385,7 @@ async def test_reauth( ) entry.add_to_hass(hass) - context = { - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - } - result = await hass.config_entries.flow.async_init( - DOMAIN, context=context, data=entry.data - ) - + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["data_schema"] is not None diff --git a/tests/components/hyperion/__init__.py b/tests/components/hyperion/__init__.py index 72aba96e81f..36137ce0ddd 100644 --- a/tests/components/hyperion/__init__.py +++ b/tests/components/hyperion/__init__.py @@ -124,9 +124,9 @@ def add_test_config_entry( hass: HomeAssistant, data: dict[str, Any] | None = None, options: dict[str, Any] | None = None, -) -> ConfigEntry: +) -> MockConfigEntry: """Add a test config entry.""" - config_entry: MockConfigEntry = MockConfigEntry( + config_entry = MockConfigEntry( entry_id=TEST_CONFIG_ENTRY_ID, domain=DOMAIN, data=data diff --git a/tests/components/hyperion/test_config_flow.py b/tests/components/hyperion/test_config_flow.py index fb4fa1fe671..4109fe0f653 100644 --- a/tests/components/hyperion/test_config_flow.py +++ b/tests/components/hyperion/test_config_flow.py @@ -20,7 +20,7 @@ from homeassistant.components.hyperion.const import ( DOMAIN, ) from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_SSDP, SOURCE_USER +from homeassistant.config_entries import SOURCE_SSDP, SOURCE_USER from homeassistant.const import ( ATTR_ENTITY_ID, CONF_HOST, @@ -861,12 +861,7 @@ async def test_reauth_success(hass: HomeAssistant) -> None: ), patch("homeassistant.components.hyperion.async_setup_entry", return_value=True), ): - result = await _init_flow( - hass, - source=SOURCE_REAUTH, - data=config_data, - ) - await hass.async_block_till_done() + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM result = await _configure_flow( @@ -886,18 +881,13 @@ async def test_reauth_cannot_connect(hass: HomeAssistant) -> None: CONF_PORT: TEST_PORT, } - add_test_config_entry(hass, data=config_data) + config_entry = add_test_config_entry(hass, data=config_data) client = create_mock_client() client.async_client_connect = AsyncMock(return_value=False) with patch( "homeassistant.components.hyperion.client.HyperionClient", return_value=client ): - result = await _init_flow( - hass, - source=SOURCE_REAUTH, - data=config_data, - ) - await hass.async_block_till_done() + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "cannot_connect" diff --git a/tests/components/icloud/test_config_flow.py b/tests/components/icloud/test_config_flow.py index ec8d11f1135..c0bc5d7ed2e 100644 --- a/tests/components/icloud/test_config_flow.py +++ b/tests/components/icloud/test_config_flow.py @@ -18,7 +18,7 @@ from homeassistant.components.icloud.const import ( DEFAULT_WITH_FAMILY, DOMAIN, ) -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -386,12 +386,7 @@ async def test_password_update( ) config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "unique_id": config_entry.unique_id}, - data={**MOCK_CONFIG}, - ) - + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM result = await hass.config_entries.flow.async_configure( @@ -410,12 +405,7 @@ async def test_password_update_wrong_password(hass: HomeAssistant) -> None: ) config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "unique_id": config_entry.unique_id}, - data={**MOCK_CONFIG}, - ) - + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM with patch( diff --git a/tests/components/imap/test_config_flow.py b/tests/components/imap/test_config_flow.py index 459cecec4a6..fb97bf0505d 100644 --- a/tests/components/imap/test_config_flow.py +++ b/tests/components/imap/test_config_flow.py @@ -215,15 +215,7 @@ async def test_reauth_success(hass: HomeAssistant, mock_setup_entry: AsyncMock) ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=MOCK_CONFIG, - ) - + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["description_placeholders"] == {CONF_USERNAME: "email@email.com"} @@ -256,15 +248,7 @@ async def test_reauth_failed(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=MOCK_CONFIG, - ) - + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -294,15 +278,7 @@ async def test_reauth_failed_conn_error(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=MOCK_CONFIG, - ) - + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/ista_ecotrend/test_config_flow.py b/tests/components/ista_ecotrend/test_config_flow.py index b702b0331e8..d6c88c51c99 100644 --- a/tests/components/ista_ecotrend/test_config_flow.py +++ b/tests/components/ista_ecotrend/test_config_flow.py @@ -6,7 +6,7 @@ from pyecotrend_ista import LoginError, ServerError import pytest from homeassistant.components.ista_ecotrend.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -98,15 +98,7 @@ async def test_reauth( ista_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": ista_config_entry.entry_id, - "unique_id": ista_config_entry.unique_id, - }, - ) - + result = await ista_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -148,15 +140,7 @@ async def test_reauth_error_and_recover( ista_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": ista_config_entry.entry_id, - "unique_id": ista_config_entry.unique_id, - }, - ) - + result = await ista_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/isy994/test_config_flow.py b/tests/components/isy994/test_config_flow.py index 411439e2e70..34e267fe904 100644 --- a/tests/components/isy994/test_config_flow.py +++ b/tests/components/isy994/test_config_flow.py @@ -644,10 +644,7 @@ async def test_reauth(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "unique_id": MOCK_UUID}, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/jellyfin/test_config_flow.py b/tests/components/jellyfin/test_config_flow.py index c84a12d26a5..a8ffbcbf46c 100644 --- a/tests/components/jellyfin/test_config_flow.py +++ b/tests/components/jellyfin/test_config_flow.py @@ -222,14 +222,7 @@ async def test_reauth( assert await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - }, - data=USER_INPUT, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -272,14 +265,7 @@ async def test_reauth_cannot_connect( assert await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - }, - data=USER_INPUT, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -339,14 +325,7 @@ async def test_reauth_invalid( assert await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - }, - data=USER_INPUT, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -400,14 +379,7 @@ async def test_reauth_exception( assert await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - }, - data=USER_INPUT, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/justnimbus/test_config_flow.py b/tests/components/justnimbus/test_config_flow.py index f66693a752c..330b05bf48c 100644 --- a/tests/components/justnimbus/test_config_flow.py +++ b/tests/components/justnimbus/test_config_flow.py @@ -125,14 +125,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: ) mock_config.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config.entry_id, - }, - data=FIXTURE_OLD_USER_INPUT, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" diff --git a/tests/components/jvc_projector/test_config_flow.py b/tests/components/jvc_projector/test_config_flow.py index 282411540a4..d7eb0995bbd 100644 --- a/tests/components/jvc_projector/test_config_flow.py +++ b/tests/components/jvc_projector/test_config_flow.py @@ -6,7 +6,7 @@ from jvcprojector import JvcProjectorAuthError, JvcProjectorConnectError import pytest from homeassistant.components.jvc_projector.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -163,14 +163,7 @@ async def test_reauth_config_flow_success( hass: HomeAssistant, mock_device: AsyncMock, mock_integration: MockConfigEntry ) -> None: """Test reauth config flow success.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": mock_integration.entry_id, - }, - data={CONF_HOST: MOCK_HOST, CONF_PORT: MOCK_PORT}, - ) + result = await mock_integration.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -194,14 +187,7 @@ async def test_reauth_config_flow_auth_error( """Test reauth config flow when connect fails.""" mock_device.connect.side_effect = JvcProjectorAuthError - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": mock_integration.entry_id, - }, - data={CONF_HOST: MOCK_HOST, CONF_PORT: MOCK_PORT}, - ) + result = await mock_integration.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -218,14 +204,7 @@ async def test_reauth_config_flow_auth_error( mock_device.connect.side_effect = None - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": mock_integration.entry_id, - }, - data={CONF_HOST: MOCK_HOST, CONF_PORT: MOCK_PORT}, - ) + result = await mock_integration.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -249,14 +228,7 @@ async def test_reauth_config_flow_connect_error( """Test reauth config flow when connect fails.""" mock_device.connect.side_effect = JvcProjectorConnectError - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": mock_integration.entry_id, - }, - data={CONF_HOST: MOCK_HOST, CONF_PORT: MOCK_PORT}, - ) + result = await mock_integration.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -273,14 +245,7 @@ async def test_reauth_config_flow_connect_error( mock_device.connect.side_effect = None - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": mock_integration.entry_id, - }, - data={CONF_HOST: MOCK_HOST, CONF_PORT: MOCK_PORT}, - ) + result = await mock_integration.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/lacrosse_view/test_config_flow.py b/tests/components/lacrosse_view/test_config_flow.py index 5a48b3d15fe..9ca7fb78bdd 100644 --- a/tests/components/lacrosse_view/test_config_flow.py +++ b/tests/components/lacrosse_view/test_config_flow.py @@ -251,16 +251,7 @@ async def test_reauth(hass: HomeAssistant) -> None: ) mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - "title_placeholders": {"name": mock_config_entry.title}, - "unique_id": mock_config_entry.unique_id, - }, - data=data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" diff --git a/tests/components/lamarzocco/test_config_flow.py b/tests/components/lamarzocco/test_config_flow.py index 92ecd0a13f4..39896926c61 100644 --- a/tests/components/lamarzocco/test_config_flow.py +++ b/tests/components/lamarzocco/test_config_flow.py @@ -7,12 +7,7 @@ from lmcloud.models import LaMarzoccoDeviceInfo from homeassistant.components.lamarzocco.config_flow import CONF_MACHINE from homeassistant.components.lamarzocco.const import CONF_USE_BLUETOOTH, DOMAIN -from homeassistant.config_entries import ( - SOURCE_BLUETOOTH, - SOURCE_REAUTH, - SOURCE_USER, - ConfigEntryState, -) +from homeassistant.config_entries import SOURCE_BLUETOOTH, SOURCE_USER, ConfigEntryState from homeassistant.const import ( CONF_HOST, CONF_MAC, @@ -247,15 +242,7 @@ async def test_reauth_flow( mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/lametric/test_config_flow.py b/tests/components/lametric/test_config_flow.py index 2a21423ad03..3fbe606c7f1 100644 --- a/tests/components/lametric/test_config_flow.py +++ b/tests/components/lametric/test_config_flow.py @@ -20,12 +20,7 @@ from homeassistant.components.ssdp import ( ATTR_UPNP_SERIAL, SsdpServiceInfo, ) -from homeassistant.config_entries import ( - SOURCE_DHCP, - SOURCE_REAUTH, - SOURCE_SSDP, - SOURCE_USER, -) +from homeassistant.config_entries import SOURCE_DHCP, SOURCE_SSDP, SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_DEVICE, CONF_HOST, CONF_MAC from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -753,15 +748,7 @@ async def test_reauth_cloud_import( """Test reauth flow importing api keys from the cloud.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) flow_id = result["flow_id"] @@ -817,15 +804,7 @@ async def test_reauth_cloud_abort_device_not_found( mock_config_entry.add_to_hass(hass) hass.config_entries.async_update_entry(mock_config_entry, unique_id="UKNOWN_DEVICE") - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) flow_id = result["flow_id"] @@ -872,15 +851,7 @@ async def test_reauth_manual( """Test reauth flow with manual entry.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) flow_id = result["flow_id"] @@ -914,15 +885,7 @@ async def test_reauth_manual_sky( """Test reauth flow with manual entry for LaMetric Sky.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) flow_id = result["flow_id"] diff --git a/tests/components/laundrify/test_config_flow.py b/tests/components/laundrify/test_config_flow.py index 69a4b957cf5..8bb8211195c 100644 --- a/tests/components/laundrify/test_config_flow.py +++ b/tests/components/laundrify/test_config_flow.py @@ -3,7 +3,7 @@ from laundrify_aio import exceptions from homeassistant.components.laundrify.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_ACCESS_TOKEN, CONF_CODE, CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -95,9 +95,8 @@ async def test_form_unkown_exception( async def test_step_reauth(hass: HomeAssistant) -> None: """Test the reauth form is shown.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_REAUTH} - ) + config_entry = create_entry(hass) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] is None diff --git a/tests/components/lidarr/test_config_flow.py b/tests/components/lidarr/test_config_flow.py index e44b03cd2a2..0097e66fe24 100644 --- a/tests/components/lidarr/test_config_flow.py +++ b/tests/components/lidarr/test_config_flow.py @@ -1,13 +1,15 @@ """Test Lidarr config flow.""" from homeassistant.components.lidarr.const import DEFAULT_NAME, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from .conftest import CONF_DATA, MOCK_INPUT, ComponentSetup +from tests.common import MockConfigEntry + async def test_flow_user_form(hass: HomeAssistant, connection) -> None: """Test that the user set up form is served.""" @@ -95,20 +97,14 @@ async def test_flow_user_unknown_error(hass: HomeAssistant, unknown) -> None: async def test_flow_reauth( - hass: HomeAssistant, setup_integration: ComponentSetup, connection + hass: HomeAssistant, + setup_integration: ComponentSetup, + connection, + config_entry: MockConfigEntry, ) -> None: """Test reauth.""" await setup_integration() - entry = hass.config_entries.async_entries(DOMAIN)[0] - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=CONF_DATA, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" result = await hass.config_entries.flow.async_configure( @@ -123,4 +119,4 @@ async def test_flow_reauth( ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reauth_successful" - assert entry.data[CONF_API_KEY] == "abc123" + assert config_entry.data[CONF_API_KEY] == "abc123" diff --git a/tests/components/linear_garage_door/test_config_flow.py b/tests/components/linear_garage_door/test_config_flow.py index 4599bd24aef..64bdc589194 100644 --- a/tests/components/linear_garage_door/test_config_flow.py +++ b/tests/components/linear_garage_door/test_config_flow.py @@ -6,7 +6,7 @@ from linear_garage_door.errors import InvalidLoginError import pytest from homeassistant.components.linear_garage_door.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -61,16 +61,7 @@ async def test_reauth( ) -> None: """Test reauthentication.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - "title_placeholders": {"name": mock_config_entry.title}, - "unique_id": mock_config_entry.unique_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" diff --git a/tests/components/litterrobot/test_config_flow.py b/tests/components/litterrobot/test_config_flow.py index 5ffb78c7782..9420d3cb8a8 100644 --- a/tests/components/litterrobot/test_config_flow.py +++ b/tests/components/litterrobot/test_config_flow.py @@ -7,7 +7,7 @@ from pylitterbot.exceptions import LitterRobotException, LitterRobotLoginExcepti from homeassistant import config_entries from homeassistant.components import litterrobot -from homeassistant.const import CONF_PASSWORD, CONF_SOURCE +from homeassistant.const import CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -124,15 +124,7 @@ async def test_step_reauth(hass: HomeAssistant, mock_account: Account) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -164,15 +156,7 @@ async def test_step_reauth_failed(hass: HomeAssistant, mock_account: Account) -> ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/lyric/test_config_flow.py b/tests/components/lyric/test_config_flow.py index 1e0ae04f741..e1916924e9f 100644 --- a/tests/components/lyric/test_config_flow.py +++ b/tests/components/lyric/test_config_flow.py @@ -126,9 +126,7 @@ async def test_reauthentication_flow( ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=old_entry.data - ) + result = await old_entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 From 99335a07e5e476441c07e96fd2db007c72850e69 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 28 Aug 2024 15:47:57 +0200 Subject: [PATCH 1173/1635] Use start_reauth helper method in integration tests (e-g) (#124785) * Use start_reauth helper method in integration tests (e-g) * Include fireservicerota --- tests/components/efergy/test_config_flow.py | 19 ++--- .../electric_kiwi/test_config_flow.py | 14 +--- tests/components/elmax/test_config_flow.py | 76 ++----------------- .../enphase_envoy/test_config_flow.py | 10 +-- tests/components/esphome/test_config_flow.py | 76 +++---------------- tests/components/esphome/test_dashboard.py | 13 +--- tests/components/fibaro/test_config_flow.py | 30 +------- .../fireservicerota/test_config_flow.py | 19 +---- tests/components/fitbit/test_config_flow.py | 16 +--- tests/components/flume/test_config_flow.py | 6 +- tests/components/fritz/test_config_flow.py | 31 ++------ tests/components/fritzbox/test_config_flow.py | 28 +------ .../frontier_silicon/test_config_flow.py | 20 +---- .../fujitsu_fglair/test_config_flow.py | 32 +------- tests/components/fyta/test_config_flow.py | 6 +- .../components/geocaching/test_config_flow.py | 6 +- tests/components/glances/test_config_flow.py | 20 +---- tests/components/google/test_config_flow.py | 18 +---- 18 files changed, 59 insertions(+), 381 deletions(-) diff --git a/tests/components/efergy/test_config_flow.py b/tests/components/efergy/test_config_flow.py index 9a66c42bc9a..8b77bbdc7ab 100644 --- a/tests/components/efergy/test_config_flow.py +++ b/tests/components/efergy/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import patch from pyefergy import exceptions from homeassistant.components.efergy.const import DEFAULT_NAME, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -76,20 +76,11 @@ async def test_flow_user_unknown(hass: HomeAssistant) -> None: async def test_flow_reauth(hass: HomeAssistant) -> None: """Test reauth step.""" entry = create_entry(hass) + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + with _patch_efergy(), _patch_setup(): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=CONF_DATA, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - new_conf = {CONF_API_KEY: "1234567890"} result = await hass.config_entries.flow.async_configure( result["flow_id"], diff --git a/tests/components/electric_kiwi/test_config_flow.py b/tests/components/electric_kiwi/test_config_flow.py index bf248aafb13..d23e70422dd 100644 --- a/tests/components/electric_kiwi/test_config_flow.py +++ b/tests/components/electric_kiwi/test_config_flow.py @@ -18,7 +18,6 @@ from homeassistant.components.electric_kiwi.const import ( OAUTH2_TOKEN, SCOPE_VALUES, ) -from homeassistant.config_entries import SOURCE_REAUTH from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow @@ -160,16 +159,11 @@ async def test_reauthentication( setup_credentials: None, ) -> None: """Test Electric Kiwi reauthentication.""" + result = await config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_REAUTH, "entry_id": DOMAIN} - ) - - flows = hass.config_entries.flow.async_progress() - assert len(flows) == 1 - assert "flow_id" in flows[0] - - result = await hass.config_entries.flow.async_configure(flows[0]["flow_id"], {}) + result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) state = config_entry_oauth2_flow._encode_jwt( hass, diff --git a/tests/components/elmax/test_config_flow.py b/tests/components/elmax/test_config_flow.py index 85e14dd0a3f..7a4d9755fa5 100644 --- a/tests/components/elmax/test_config_flow.py +++ b/tests/components/elmax/test_config_flow.py @@ -21,7 +21,6 @@ from homeassistant.components.elmax.const import ( CONF_ELMAX_USERNAME, DOMAIN, ) -from homeassistant.config_entries import SOURCE_REAUTH from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -544,20 +543,7 @@ async def test_show_reauth(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data={ - CONF_ELMAX_PANEL_ID: MOCK_PANEL_ID, - CONF_ELMAX_PANEL_PIN: MOCK_PANEL_PIN, - CONF_ELMAX_USERNAME: MOCK_USERNAME, - CONF_ELMAX_PASSWORD: MOCK_PASSWORD, - }, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -577,24 +563,11 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: entry.add_to_hass(hass) # Trigger reauth + reauth_result = await entry.start_reauth_flow(hass) with patch( "homeassistant.components.elmax.async_setup_entry", return_value=True, ): - reauth_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data={ - CONF_ELMAX_PANEL_ID: MOCK_PANEL_ID, - CONF_ELMAX_PANEL_PIN: MOCK_PANEL_PIN, - CONF_ELMAX_USERNAME: MOCK_USERNAME, - CONF_ELMAX_PASSWORD: MOCK_PASSWORD, - }, - ) result = await hass.config_entries.flow.async_configure( reauth_result["flow_id"], { @@ -624,24 +597,11 @@ async def test_reauth_panel_disappeared(hass: HomeAssistant) -> None: entry.add_to_hass(hass) # Trigger reauth + reauth_result = await entry.start_reauth_flow(hass) with patch( "elmax_api.http.Elmax.list_control_panels", return_value=[], ): - reauth_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data={ - CONF_ELMAX_PANEL_ID: MOCK_PANEL_ID, - CONF_ELMAX_PANEL_PIN: MOCK_PANEL_PIN, - CONF_ELMAX_USERNAME: MOCK_USERNAME, - CONF_ELMAX_PASSWORD: MOCK_PASSWORD, - }, - ) result = await hass.config_entries.flow.async_configure( reauth_result["flow_id"], { @@ -670,24 +630,11 @@ async def test_reauth_invalid_pin(hass: HomeAssistant) -> None: entry.add_to_hass(hass) # Trigger reauth + reauth_result = await entry.start_reauth_flow(hass) with patch( "elmax_api.http.Elmax.get_panel_status", side_effect=ElmaxBadPinError(), ): - reauth_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data={ - CONF_ELMAX_PANEL_ID: MOCK_PANEL_ID, - CONF_ELMAX_PANEL_PIN: MOCK_PANEL_PIN, - CONF_ELMAX_USERNAME: MOCK_USERNAME, - CONF_ELMAX_PASSWORD: MOCK_PASSWORD, - }, - ) result = await hass.config_entries.flow.async_configure( reauth_result["flow_id"], { @@ -716,24 +663,11 @@ async def test_reauth_bad_login(hass: HomeAssistant) -> None: entry.add_to_hass(hass) # Trigger reauth + reauth_result = await entry.start_reauth_flow(hass) with patch( "elmax_api.http.Elmax.login", side_effect=ElmaxBadLoginError(), ): - reauth_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data={ - CONF_ELMAX_PANEL_ID: MOCK_PANEL_ID, - CONF_ELMAX_PANEL_PIN: MOCK_PANEL_PIN, - CONF_ELMAX_USERNAME: MOCK_USERNAME, - CONF_ELMAX_PASSWORD: MOCK_PASSWORD, - }, - ) result = await hass.config_entries.flow.async_configure( reauth_result["flow_id"], { diff --git a/tests/components/enphase_envoy/test_config_flow.py b/tests/components/enphase_envoy/test_config_flow.py index c2cc02fcc7c..f61a0054ed9 100644 --- a/tests/components/enphase_envoy/test_config_flow.py +++ b/tests/components/enphase_envoy/test_config_flow.py @@ -14,7 +14,6 @@ from homeassistant.components.enphase_envoy.const import ( OPTION_DIAGNOSTICS_INCLUDE_FIXTURES_DEFAULT_VALUE, ) from homeassistant.config_entries import ( - SOURCE_REAUTH, SOURCE_RECONFIGURE, SOURCE_USER, SOURCE_ZEROCONF, @@ -636,14 +635,7 @@ async def test_reauth( ) -> None: """Test we reauth auth.""" await setup_integration(hass, config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": config_entry.unique_id, - "entry_id": config_entry.entry_id, - }, - ) + result = await config_entry.start_reauth_flow(hass) result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { diff --git a/tests/components/esphome/test_config_flow.py b/tests/components/esphome/test_config_flow.py index 68af6665380..2f91921e7f2 100644 --- a/tests/components/esphome/test_config_flow.py +++ b/tests/components/esphome/test_config_flow.py @@ -798,14 +798,7 @@ async def test_reauth_initiation(hass: HomeAssistant, mock_client) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - "esphome", - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -821,14 +814,7 @@ async def test_reauth_confirm_valid( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - "esphome", - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - ) + result = await entry.start_reauth_flow(hass) mock_client.device_info.return_value = DeviceInfo(uses_password=False, name="test") result = await hass.config_entries.flow.async_configure( @@ -875,14 +861,7 @@ async def test_reauth_fixed_via_dashboard( "homeassistant.components.esphome.coordinator.ESPHomeDashboardAPI.get_encryption_key", return_value=VALID_NOISE_PSK, ) as mock_get_encryption_key: - result = await hass.config_entries.flow.async_init( - "esphome", - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.ABORT, result assert result["reason"] == "reauth_successful" @@ -896,7 +875,7 @@ async def test_reauth_fixed_via_dashboard_add_encryption_remove_password( hass: HomeAssistant, mock_client, mock_dashboard: dict[str, Any], - mock_config_entry, + mock_config_entry: MockConfigEntry, mock_setup_entry: None, ) -> None: """Test reauth fixed automatically via dashboard with password removed.""" @@ -918,14 +897,7 @@ async def test_reauth_fixed_via_dashboard_add_encryption_remove_password( "homeassistant.components.esphome.coordinator.ESPHomeDashboardAPI.get_encryption_key", return_value=VALID_NOISE_PSK, ) as mock_get_encryption_key: - result = await hass.config_entries.flow.async_init( - "esphome", - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - "unique_id": mock_config_entry.unique_id, - }, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.ABORT, result assert result["reason"] == "reauth_successful" @@ -938,21 +910,14 @@ async def test_reauth_fixed_via_dashboard_add_encryption_remove_password( async def test_reauth_fixed_via_remove_password( hass: HomeAssistant, mock_client, - mock_config_entry, + mock_config_entry: MockConfigEntry, mock_dashboard: dict[str, Any], mock_setup_entry: None, ) -> None: """Test reauth fixed automatically by seeing password removed.""" mock_client.device_info.return_value = DeviceInfo(uses_password=False, name="test") - result = await hass.config_entries.flow.async_init( - "esphome", - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - "unique_id": mock_config_entry.unique_id, - }, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.ABORT, result assert result["reason"] == "reauth_successful" @@ -981,14 +946,7 @@ async def test_reauth_fixed_via_dashboard_at_confirm( mock_client.device_info.return_value = DeviceInfo(uses_password=False, name="test") - result = await hass.config_entries.flow.async_init( - "esphome", - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM, result assert result["step_id"] == "reauth_confirm" @@ -1027,14 +985,7 @@ async def test_reauth_confirm_invalid( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - "esphome", - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - ) + result = await entry.start_reauth_flow(hass) mock_client.device_info.side_effect = InvalidEncryptionKeyAPIError result = await hass.config_entries.flow.async_configure( @@ -1070,14 +1021,7 @@ async def test_reauth_confirm_invalid_with_unique_id( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - "esphome", - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - ) + result = await entry.start_reauth_flow(hass) mock_client.device_info.side_effect = InvalidEncryptionKeyAPIError result = await hass.config_entries.flow.async_configure( diff --git a/tests/components/esphome/test_dashboard.py b/tests/components/esphome/test_dashboard.py index da805eb2eee..1641804e458 100644 --- a/tests/components/esphome/test_dashboard.py +++ b/tests/components/esphome/test_dashboard.py @@ -6,7 +6,7 @@ from unittest.mock import patch from aioesphomeapi import DeviceInfo, InvalidAuthAPIError from homeassistant.components.esphome import CONF_NOISE_PSK, coordinator, dashboard -from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -150,7 +150,7 @@ async def test_new_info_reload_config_entries( async def test_new_dashboard_fix_reauth( - hass: HomeAssistant, mock_client, mock_config_entry, mock_dashboard + hass: HomeAssistant, mock_client, mock_config_entry: MockConfigEntry, mock_dashboard ) -> None: """Test config entries waiting for reauth are triggered.""" mock_client.device_info.side_effect = ( @@ -162,14 +162,7 @@ async def test_new_dashboard_fix_reauth( "homeassistant.components.esphome.coordinator.ESPHomeDashboardAPI.get_encryption_key", return_value=VALID_NOISE_PSK, ) as mock_get_encryption_key: - result = await hass.config_entries.flow.async_init( - "esphome", - context={ - "source": SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - "unique_id": mock_config_entry.unique_id, - }, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert len(mock_get_encryption_key.mock_calls) == 0 diff --git a/tests/components/fibaro/test_config_flow.py b/tests/components/fibaro/test_config_flow.py index b6b4e3992cd..508bb81973d 100644 --- a/tests/components/fibaro/test_config_flow.py +++ b/tests/components/fibaro/test_config_flow.py @@ -183,15 +183,7 @@ async def test_reauth_success( hass: HomeAssistant, mock_config_entry: MockConfigEntry ) -> None: """Successful reauth flow initialized by the user.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - }, - ) - + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} @@ -211,15 +203,7 @@ async def test_reauth_connect_failure( mock_fibaro_client: Mock, ) -> None: """Successful reauth flow initialized by the user.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - }, - ) - + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} @@ -244,15 +228,7 @@ async def test_reauth_auth_failure( mock_fibaro_client: Mock, ) -> None: """Successful reauth flow initialized by the user.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - }, - ) - + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} diff --git a/tests/components/fireservicerota/test_config_flow.py b/tests/components/fireservicerota/test_config_flow.py index 539906d800b..5555a8d649c 100644 --- a/tests/components/fireservicerota/test_config_flow.py +++ b/tests/components/fireservicerota/test_config_flow.py @@ -120,23 +120,8 @@ async def test_reauth(hass: HomeAssistant) -> None: domain=DOMAIN, data=MOCK_CONF, unique_id=MOCK_CONF[CONF_USERNAME] ) entry.add_to_hass(hass) - with patch( - "homeassistant.components.fireservicerota.config_flow.FireServiceRota" - ) as mock_fsr: - mock_fireservicerota = mock_fsr.return_value - mock_fireservicerota.request_tokens.return_value = MOCK_TOKEN_INFO - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - }, - data=MOCK_CONF, - ) - - await hass.async_block_till_done() - assert result["type"] is FlowResultType.FORM + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM with ( patch( diff --git a/tests/components/fitbit/test_config_flow.py b/tests/components/fitbit/test_config_flow.py index d5f3d09abdd..6f717459486 100644 --- a/tests/components/fitbit/test_config_flow.py +++ b/tests/components/fitbit/test_config_flow.py @@ -472,13 +472,7 @@ async def test_reauth_flow( assert len(entries) == 1 # config_entry.req initiates reauth - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -546,13 +540,7 @@ async def test_reauth_wrong_user_id( entries = hass.config_entries.async_entries(DOMAIN) assert len(entries) == 1 - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/flume/test_config_flow.py b/tests/components/flume/test_config_flow.py index 915299223e9..87fe3a2bbf0 100644 --- a/tests/components/flume/test_config_flow.py +++ b/tests/components/flume/test_config_flow.py @@ -124,11 +124,7 @@ async def test_reauth(hass: HomeAssistant, requests_mock: Mocker) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "unique_id": "test@test.org"}, - ) - + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/fritz/test_config_flow.py b/tests/components/fritz/test_config_flow.py index a54acbb0ac0..deefe7e4e77 100644 --- a/tests/components/fritz/test_config_flow.py +++ b/tests/components/fritz/test_config_flow.py @@ -23,12 +23,7 @@ from homeassistant.components.fritz.const import ( FRITZ_AUTH_EXCEPTIONS, ) from homeassistant.components.ssdp import ATTR_UPNP_UDN -from homeassistant.config_entries import ( - SOURCE_REAUTH, - SOURCE_RECONFIGURE, - SOURCE_SSDP, - SOURCE_USER, -) +from homeassistant.config_entries import SOURCE_RECONFIGURE, SOURCE_SSDP, SOURCE_USER from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -310,6 +305,9 @@ async def test_reauth_successful( mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with ( patch( @@ -335,15 +333,6 @@ async def test_reauth_successful( mock_request_post.return_value.status_code = 200 mock_request_post.return_value.text = MOCK_REQUEST - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, - data=mock_config.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -376,20 +365,14 @@ async def test_reauth_not_successful( mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with patch( "homeassistant.components.fritz.config_flow.FritzConnection", side_effect=side_effect, ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, - data=mock_config.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ diff --git a/tests/components/fritzbox/test_config_flow.py b/tests/components/fritzbox/test_config_flow.py index 72d36a8ab63..fd53bd2e637 100644 --- a/tests/components/fritzbox/test_config_flow.py +++ b/tests/components/fritzbox/test_config_flow.py @@ -12,12 +12,7 @@ from requests.exceptions import HTTPError from homeassistant.components import ssdp from homeassistant.components.fritzbox.const import DOMAIN from homeassistant.components.ssdp import ATTR_UPNP_FRIENDLY_NAME, ATTR_UPNP_UDN -from homeassistant.config_entries import ( - SOURCE_REAUTH, - SOURCE_RECONFIGURE, - SOURCE_SSDP, - SOURCE_USER, -) +from homeassistant.config_entries import SOURCE_RECONFIGURE, SOURCE_SSDP, SOURCE_USER from homeassistant.const import CONF_DEVICES, CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -129,12 +124,7 @@ async def test_reauth_success(hass: HomeAssistant, fritz: Mock) -> None: """Test starting a reauthentication flow.""" mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) mock_config.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, - data=mock_config.data, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -158,12 +148,7 @@ async def test_reauth_auth_failed(hass: HomeAssistant, fritz: Mock) -> None: mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) mock_config.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, - data=mock_config.data, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -186,12 +171,7 @@ async def test_reauth_not_successful(hass: HomeAssistant, fritz: Mock) -> None: mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) mock_config.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, - data=mock_config.data, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/frontier_silicon/test_config_flow.py b/tests/components/frontier_silicon/test_config_flow.py index 04bd1febdf8..a6c1ba1e74f 100644 --- a/tests/components/frontier_silicon/test_config_flow.py +++ b/tests/components/frontier_silicon/test_config_flow.py @@ -356,15 +356,7 @@ async def test_reauth_flow(hass: HomeAssistant, config_entry: MockConfigEntry) - config_entry.add_to_hass(hass) assert config_entry.data[CONF_PIN] == "1234" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": config_entry.unique_id, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "device_config" @@ -395,15 +387,7 @@ async def test_reauth_flow_friendly_name_error( config_entry.add_to_hass(hass) assert config_entry.data[CONF_PIN] == "1234" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": config_entry.unique_id, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "device_config" diff --git a/tests/components/fujitsu_fglair/test_config_flow.py b/tests/components/fujitsu_fglair/test_config_flow.py index fc6afd9b8f0..2828cf95339 100644 --- a/tests/components/fujitsu_fglair/test_config_flow.py +++ b/tests/components/fujitsu_fglair/test_config_flow.py @@ -6,7 +6,7 @@ from ayla_iot_unofficial import AylaAuthError import pytest from homeassistant.components.fujitsu_fglair.const import CONF_EUROPE, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResult, FlowResultType @@ -116,20 +116,7 @@ async def test_reauth_success( """Test reauth flow.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - "title_placeholders": {"name": "test"}, - }, - data={ - CONF_USERNAME: TEST_USERNAME, - CONF_PASSWORD: TEST_PASSWORD, - CONF_EUROPE: False, - }, - ) - + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -164,20 +151,7 @@ async def test_reauth_exceptions( """Test reauth flow when an exception occurs.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - "title_placeholders": {"name": "test"}, - }, - data={ - CONF_USERNAME: TEST_USERNAME, - CONF_PASSWORD: TEST_PASSWORD, - CONF_EUROPE: False, - }, - ) - + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/fyta/test_config_flow.py b/tests/components/fyta/test_config_flow.py index df0626d0af0..e47b78aa893 100644 --- a/tests/components/fyta/test_config_flow.py +++ b/tests/components/fyta/test_config_flow.py @@ -158,11 +158,7 @@ async def test_reauth( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/geocaching/test_config_flow.py b/tests/components/geocaching/test_config_flow.py index 0c2ce66b513..5db89de0868 100644 --- a/tests/components/geocaching/test_config_flow.py +++ b/tests/components/geocaching/test_config_flow.py @@ -14,7 +14,7 @@ from homeassistant.components.geocaching.const import ( ENVIRONMENT, ENVIRONMENT_URLS, ) -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow @@ -195,9 +195,7 @@ async def test_reauthentication( """Test Geocaching reauthentication.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_REAUTH} - ) + result = await mock_config_entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 diff --git a/tests/components/glances/test_config_flow.py b/tests/components/glances/test_config_flow.py index a7d6934e32d..0fabc387a4f 100644 --- a/tests/components/glances/test_config_flow.py +++ b/tests/components/glances/test_config_flow.py @@ -89,15 +89,7 @@ async def test_reauth_success(hass: HomeAssistant) -> None: entry = MockConfigEntry(domain=glances.DOMAIN, data=MOCK_USER_INPUT) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - glances.DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=MOCK_USER_INPUT, - ) - + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["description_placeholders"] == {"username": "username"} @@ -128,15 +120,7 @@ async def test_reauth_fails( entry.add_to_hass(hass) mock_api.return_value.get_ha_sensor_data.side_effect = [error, HA_SENSOR_DATA] - result = await hass.config_entries.flow.async_init( - glances.DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=MOCK_USER_INPUT, - ) - + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["description_placeholders"] == {"username": "username"} diff --git a/tests/components/google/test_config_flow.py b/tests/components/google/test_config_flow.py index f4a6c97f50d..b7962921ffd 100644 --- a/tests/components/google/test_config_flow.py +++ b/tests/components/google/test_config_flow.py @@ -497,14 +497,7 @@ async def test_reauth_flow( entries = hass.config_entries.async_entries(DOMAIN) assert len(entries) == 1 - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -761,14 +754,7 @@ async def test_web_reauth_flow( entries = hass.config_entries.async_entries(DOMAIN) assert len(entries) == 1 - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" From bdd3aa8e399c0c2ec8cfcf8f5f53b3ea41460523 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 28 Aug 2024 15:48:36 +0200 Subject: [PATCH 1174/1635] Use start_reauth helper method in config flow tests (a-d) (#124780) * Use start_reauth helper method in integration tests (a-d) * Revert broadlink * Revert bthome --- .../aseko_pool_live/test_config_flow.py | 16 +------- .../aussie_broadband/test_config_flow.py | 9 +---- tests/components/blink/test_config_flow.py | 9 +++-- .../blue_current/test_config_flow.py | 19 +++------ .../bmw_connected_drive/test_config_flow.py | 10 +---- .../components/bosch_shc/test_config_flow.py | 6 +-- tests/components/braviatv/test_config_flow.py | 14 ++----- tests/components/bring/test_config_flow.py | 22 ++-------- tests/components/brunt/test_config_flow.py | 10 +---- tests/components/caldav/test_config_flow.py | 16 +------- .../components/cloudflare/test_config_flow.py | 12 +----- tests/components/comelit/test_config_flow.py | 26 ++++-------- tests/components/deconz/test_config_flow.py | 14 +------ tests/components/deluge/test_config_flow.py | 13 +----- .../devolo_home_control/test_config_flow.py | 40 +++++++------------ .../devolo_home_network/test_config_flow.py | 13 +----- tests/components/discord/__init__.py | 3 +- tests/components/discord/test_config_flow.py | 13 +----- .../dormakaba_dkey/test_config_flow.py | 6 +-- 19 files changed, 59 insertions(+), 212 deletions(-) diff --git a/tests/components/aseko_pool_live/test_config_flow.py b/tests/components/aseko_pool_live/test_config_flow.py index 4307e527cee..e4dedf36da4 100644 --- a/tests/components/aseko_pool_live/test_config_flow.py +++ b/tests/components/aseko_pool_live/test_config_flow.py @@ -133,13 +133,7 @@ async def test_async_step_reauth_success(hass: HomeAssistant) -> None: ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_entry.entry_id, - }, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -179,13 +173,7 @@ async def test_async_step_reauth_exception( ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_entry.entry_id, - }, - ) + result = await mock_entry.start_reauth_flow(hass) with patch( "homeassistant.components.aseko_pool_live.config_flow.WebAccount.login", diff --git a/tests/components/aussie_broadband/test_config_flow.py b/tests/components/aussie_broadband/test_config_flow.py index b79ed41b251..76e96c5cc02 100644 --- a/tests/components/aussie_broadband/test_config_flow.py +++ b/tests/components/aussie_broadband/test_config_flow.py @@ -173,14 +173,7 @@ async def test_reauth(hass: HomeAssistant) -> None: mock_entry.add_to_hass(hass) # Test failed reauth - result5 = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_entry.entry_id, - }, - data=FAKE_DATA, - ) + result5 = await mock_entry.start_reauth_flow(hass) assert result5["step_id"] == "reauth_confirm" with ( diff --git a/tests/components/blink/test_config_flow.py b/tests/components/blink/test_config_flow.py index 9c3193ec7d6..c89ab65ea1d 100644 --- a/tests/components/blink/test_config_flow.py +++ b/tests/components/blink/test_config_flow.py @@ -10,6 +10,8 @@ from homeassistant.components.blink import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from tests.common import MockConfigEntry + async def test_form(hass: HomeAssistant) -> None: """Test we get the form.""" @@ -292,10 +294,11 @@ async def test_form_unknown_error(hass: HomeAssistant) -> None: async def test_reauth_shows_user_step(hass: HomeAssistant) -> None: """Test reauth shows the user form.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, + mock_entry = MockConfigEntry( + domain=DOMAIN, data={"username": "blink@example.com", "password": "invalid_password"}, ) + mock_entry.add_to_hass(hass) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" diff --git a/tests/components/blue_current/test_config_flow.py b/tests/components/blue_current/test_config_flow.py index 33346990425..a9dea70431f 100644 --- a/tests/components/blue_current/test_config_flow.py +++ b/tests/components/blue_current/test_config_flow.py @@ -129,6 +129,11 @@ async def test_reauth( expected_api_token: str, ) -> None: """Test reauth flow.""" + config_entry.add_to_hass(hass) + result = await config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + with ( patch( "homeassistant.components.blue_current.config_flow.Client.validate_api_token", @@ -146,20 +151,6 @@ async def test_reauth( lambda self, on_data, on_open: hass.loop.create_future(), ), ): - config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - "unique_id": config_entry.unique_id, - }, - data={"api_token": "123"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={"api_token": "1234567890"}, diff --git a/tests/components/bmw_connected_drive/test_config_flow.py b/tests/components/bmw_connected_drive/test_config_flow.py index f346cd70b26..f71730fcc17 100644 --- a/tests/components/bmw_connected_drive/test_config_flow.py +++ b/tests/components/bmw_connected_drive/test_config_flow.py @@ -188,15 +188,7 @@ async def test_reauth(hass: HomeAssistant) -> None: assert config_entry.data == config_entry_with_wrong_password["data"] - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": config_entry.unique_id, - "entry_id": config_entry.entry_id, - }, - ) - + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" assert result["errors"] == {} diff --git a/tests/components/bosch_shc/test_config_flow.py b/tests/components/bosch_shc/test_config_flow.py index 2c43ec0a370..eaabe112807 100644 --- a/tests/components/bosch_shc/test_config_flow.py +++ b/tests/components/bosch_shc/test_config_flow.py @@ -646,11 +646,7 @@ async def test_reauth(hass: HomeAssistant) -> None: title="shc012345", ) mock_config.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=mock_config.data, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/braviatv/test_config_flow.py b/tests/components/braviatv/test_config_flow.py index 6fc02dbd36f..7a4f93f7f16 100644 --- a/tests/components/braviatv/test_config_flow.py +++ b/tests/components/braviatv/test_config_flow.py @@ -17,7 +17,7 @@ from homeassistant.components.braviatv.const import ( DOMAIN, NICKNAME_PREFIX, ) -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_SSDP, SOURCE_USER +from homeassistant.config_entries import SOURCE_SSDP, SOURCE_USER from homeassistant.const import CONF_CLIENT_ID, CONF_HOST, CONF_MAC, CONF_PIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -405,6 +405,9 @@ async def test_reauth_successful(hass: HomeAssistant, use_psk, new_pin) -> None: title="TV-Model", ) config_entry.add_to_hass(hass) + result = await config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "authorize" with ( patch("pybravia.BraviaClient.connect"), @@ -421,15 +424,6 @@ async def test_reauth_successful(hass: HomeAssistant, use_psk, new_pin) -> None: return_value={}, ), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": config_entry.entry_id}, - data=config_entry.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "authorize" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_USE_PSK: use_psk} ) diff --git a/tests/components/bring/test_config_flow.py b/tests/components/bring/test_config_flow.py index d307e0ccbbe..8d215a5d3ee 100644 --- a/tests/components/bring/test_config_flow.py +++ b/tests/components/bring/test_config_flow.py @@ -10,7 +10,7 @@ from bring_api.exceptions import ( import pytest from homeassistant.components.bring.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -123,15 +123,7 @@ async def test_flow_reauth( bring_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": bring_config_entry.entry_id, - "unique_id": bring_config_entry.unique_id, - }, - ) - + result = await bring_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -171,15 +163,7 @@ async def test_flow_reauth_error_and_recover( bring_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": bring_config_entry.entry_id, - "unique_id": bring_config_entry.unique_id, - }, - ) - + result = await bring_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/brunt/test_config_flow.py b/tests/components/brunt/test_config_flow.py index 2796882a3c1..7a805a9ee52 100644 --- a/tests/components/brunt/test_config_flow.py +++ b/tests/components/brunt/test_config_flow.py @@ -110,15 +110,7 @@ async def test_reauth( unique_id="test-username", ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=None, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" with patch( diff --git a/tests/components/caldav/test_config_flow.py b/tests/components/caldav/test_config_flow.py index 0079e59a931..bf22fb0bd9c 100644 --- a/tests/components/caldav/test_config_flow.py +++ b/tests/components/caldav/test_config_flow.py @@ -106,13 +106,7 @@ async def test_reauth_success( config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -147,13 +141,7 @@ async def test_reauth_failure( config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/cloudflare/test_config_flow.py b/tests/components/cloudflare/test_config_flow.py index 1278113c0c7..f34a423833c 100644 --- a/tests/components/cloudflare/test_config_flow.py +++ b/tests/components/cloudflare/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import MagicMock import pycfdns from homeassistant.components.cloudflare.const import CONF_RECORDS, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_TOKEN, CONF_SOURCE, CONF_ZONE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -151,15 +151,7 @@ async def test_reauth_flow(hass: HomeAssistant, cfupdate_flow: MagicMock) -> Non entry = MockConfigEntry(domain=DOMAIN, data=ENTRY_CONFIG) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/comelit/test_config_flow.py b/tests/components/comelit/test_config_flow.py index 333bf09bd20..eeaea0e41e9 100644 --- a/tests/components/comelit/test_config_flow.py +++ b/tests/components/comelit/test_config_flow.py @@ -7,7 +7,7 @@ from aiocomelit import CannotAuthenticate, CannotConnect import pytest from homeassistant.components.comelit.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_HOST, CONF_PIN, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -100,6 +100,9 @@ async def test_reauth_successful(hass: HomeAssistant) -> None: mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_BRIDGE_DATA) mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with ( patch( @@ -113,15 +116,6 @@ async def test_reauth_successful(hass: HomeAssistant) -> None: ): mock_request_get.return_value.status_code = 200 - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, - data=mock_config.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -147,6 +141,9 @@ async def test_reauth_not_successful(hass: HomeAssistant, side_effect, error) -> mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_BRIDGE_DATA) mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with ( patch("aiocomelit.api.ComeliteSerialBridgeApi.login", side_effect=side_effect), @@ -155,15 +152,6 @@ async def test_reauth_not_successful(hass: HomeAssistant, side_effect, error) -> ), patch("homeassistant.components.comelit.async_setup_entry"), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, - data=mock_config.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ diff --git a/tests/components/deconz/test_config_flow.py b/tests/components/deconz/test_config_flow.py index 49711962407..8555a6e333b 100644 --- a/tests/components/deconz/test_config_flow.py +++ b/tests/components/deconz/test_config_flow.py @@ -22,12 +22,7 @@ from homeassistant.components.deconz.const import ( ) from homeassistant.components.hassio import HassioServiceInfo from homeassistant.components.ssdp import ATTR_UPNP_MANUFACTURER_URL, ATTR_UPNP_SERIAL -from homeassistant.config_entries import ( - SOURCE_HASSIO, - SOURCE_REAUTH, - SOURCE_SSDP, - SOURCE_USER, -) +from homeassistant.config_entries import SOURCE_HASSIO, SOURCE_SSDP, SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_PORT, CONTENT_TYPE_JSON from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -407,12 +402,7 @@ async def test_reauth_flow_update_configuration( config_entry_setup: MockConfigEntry, ) -> None: """Verify reauth flow can update gateway API key.""" - result = await hass.config_entries.flow.async_init( - DECONZ_DOMAIN, - data=config_entry_setup.data, - context={"source": SOURCE_REAUTH}, - ) - + result = await config_entry_setup.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "link" diff --git a/tests/components/deluge/test_config_flow.py b/tests/components/deluge/test_config_flow.py index 37229d4a72e..c336fc81cc6 100644 --- a/tests/components/deluge/test_config_flow.py +++ b/tests/components/deluge/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import patch import pytest from homeassistant.components.deluge.const import DEFAULT_NAME, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -113,16 +113,7 @@ async def test_flow_reauth(hass: HomeAssistant, api) -> None: entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=CONF_DATA, - ) - + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" diff --git a/tests/components/devolo_home_control/test_config_flow.py b/tests/components/devolo_home_control/test_config_flow.py index 48f9bf31f4f..7c9bfdeff63 100644 --- a/tests/components/devolo_home_control/test_config_flow.py +++ b/tests/components/devolo_home_control/test_config_flow.py @@ -164,21 +164,17 @@ async def test_zeroconf_wrong_device(hass: HomeAssistant) -> None: async def test_form_reauth(hass: HomeAssistant) -> None: """Test that the reauth confirmation form is served.""" - mock_config = MockConfigEntry(domain=DOMAIN, unique_id="123456", data={}) - mock_config.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config.entry_id, - }, + mock_config = MockConfigEntry( + domain=DOMAIN, + unique_id="123456", data={ "username": "test-username", "password": "test-password", "mydevolo_url": "https://test_mydevolo_url.test", }, ) - + mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM @@ -205,20 +201,17 @@ async def test_form_reauth(hass: HomeAssistant) -> None: @pytest.mark.parametrize("credentials_valid", [False]) async def test_form_invalid_credentials_reauth(hass: HomeAssistant) -> None: """Test if we get the error message on invalid credentials.""" - mock_config = MockConfigEntry(domain=DOMAIN, unique_id="123456", data={}) - mock_config.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config.entry_id, - }, + mock_config = MockConfigEntry( + domain=DOMAIN, + unique_id="123456", data={ "username": "test-username", "password": "test-password", "mydevolo_url": "https://test_mydevolo_url.test", }, ) + mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -230,20 +223,17 @@ async def test_form_invalid_credentials_reauth(hass: HomeAssistant) -> None: async def test_form_uuid_change_reauth(hass: HomeAssistant) -> None: """Test that the reauth confirmation form is served.""" - mock_config = MockConfigEntry(domain=DOMAIN, unique_id="123456", data={}) - mock_config.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config.entry_id, - }, + mock_config = MockConfigEntry( + domain=DOMAIN, + unique_id="123456", data={ "username": "test-username", "password": "test-password", "mydevolo_url": "https://test_mydevolo_url.test", }, ) + mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM diff --git a/tests/components/devolo_home_network/test_config_flow.py b/tests/components/devolo_home_network/test_config_flow.py index 5aa2bfa274e..5234d0f073e 100644 --- a/tests/components/devolo_home_network/test_config_flow.py +++ b/tests/components/devolo_home_network/test_config_flow.py @@ -179,18 +179,7 @@ async def test_form_reauth(hass: HomeAssistant) -> None: entry = configure_integration(hass) await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "title_placeholders": { - CONF_NAME: DISCOVERY_INFO.hostname.split(".")[0], - }, - }, - data=entry.data, - ) - + result = await entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM diff --git a/tests/components/discord/__init__.py b/tests/components/discord/__init__.py index bf7c188b7b5..1d81388d1e3 100644 --- a/tests/components/discord/__init__.py +++ b/tests/components/discord/__init__.py @@ -5,7 +5,6 @@ from unittest.mock import AsyncMock, Mock, patch import nextcord from homeassistant.components.discord.const import DOMAIN -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_TOKEN, CONF_NAME from homeassistant.core import HomeAssistant @@ -22,7 +21,7 @@ CONF_DATA = { } -def create_entry(hass: HomeAssistant) -> ConfigEntry: +def create_entry(hass: HomeAssistant) -> MockConfigEntry: """Add config entry in Home Assistant.""" entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/discord/test_config_flow.py b/tests/components/discord/test_config_flow.py index 9b37179e86d..e9a1344c555 100644 --- a/tests/components/discord/test_config_flow.py +++ b/tests/components/discord/test_config_flow.py @@ -4,7 +4,7 @@ import nextcord from homeassistant import config_entries from homeassistant.components.discord.const import DOMAIN -from homeassistant.const import CONF_API_TOKEN, CONF_SOURCE +from homeassistant.const import CONF_API_TOKEN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -123,16 +123,7 @@ async def test_flow_user_unknown_error(hass: HomeAssistant) -> None: async def test_flow_reauth(hass: HomeAssistant) -> None: """Test a reauth flow.""" entry = create_entry(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) - + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/dormakaba_dkey/test_config_flow.py b/tests/components/dormakaba_dkey/test_config_flow.py index 499e5844949..8d8140d609a 100644 --- a/tests/components/dormakaba_dkey/test_config_flow.py +++ b/tests/components/dormakaba_dkey/test_config_flow.py @@ -310,11 +310,7 @@ async def test_reauth(hass: HomeAssistant) -> None: data={"address": DKEY_DISCOVERY_INFO.address}, ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" From 731aaaafe2627552cc8519d967f42f5f4499325a Mon Sep 17 00:00:00 2001 From: Aidan Timson Date: Wed, 28 Aug 2024 15:01:32 +0100 Subject: [PATCH 1175/1635] Update aioazuredevops to 2.2.1 (#124788) * Update aioazuredevops to 2.2.1 * Update test --- homeassistant/components/azure_devops/coordinator.py | 2 +- homeassistant/components/azure_devops/data.py | 2 +- homeassistant/components/azure_devops/manifest.json | 2 +- homeassistant/components/azure_devops/sensor.py | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/azure_devops/__init__.py | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/azure_devops/coordinator.py b/homeassistant/components/azure_devops/coordinator.py index 22dbe32c103..2460a9bbfce 100644 --- a/homeassistant/components/azure_devops/coordinator.py +++ b/homeassistant/components/azure_devops/coordinator.py @@ -6,7 +6,7 @@ import logging from typing import Final from aioazuredevops.client import DevOpsClient -from aioazuredevops.models.builds import Build +from aioazuredevops.models.build import Build from aioazuredevops.models.core import Project import aiohttp diff --git a/homeassistant/components/azure_devops/data.py b/homeassistant/components/azure_devops/data.py index 6d9e2069b67..c2da38ccc09 100644 --- a/homeassistant/components/azure_devops/data.py +++ b/homeassistant/components/azure_devops/data.py @@ -2,7 +2,7 @@ from dataclasses import dataclass -from aioazuredevops.models.builds import Build +from aioazuredevops.models.build import Build from aioazuredevops.models.core import Project diff --git a/homeassistant/components/azure_devops/manifest.json b/homeassistant/components/azure_devops/manifest.json index 48ceee5f9d8..5086e44ab0f 100644 --- a/homeassistant/components/azure_devops/manifest.json +++ b/homeassistant/components/azure_devops/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/azure_devops", "iot_class": "cloud_polling", "loggers": ["aioazuredevops"], - "requirements": ["aioazuredevops==2.1.1"] + "requirements": ["aioazuredevops==2.2.1"] } diff --git a/homeassistant/components/azure_devops/sensor.py b/homeassistant/components/azure_devops/sensor.py index 029d3d875dc..7b7af1dd666 100644 --- a/homeassistant/components/azure_devops/sensor.py +++ b/homeassistant/components/azure_devops/sensor.py @@ -8,7 +8,7 @@ from datetime import datetime import logging from typing import Any -from aioazuredevops.models.builds import Build +from aioazuredevops.models.build import Build from homeassistant.components.sensor import ( SensorDeviceClass, diff --git a/requirements_all.txt b/requirements_all.txt index 3e4fedbc15b..62def94cdeb 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -201,7 +201,7 @@ aioasuswrt==1.4.0 aioautomower==2024.8.0 # homeassistant.components.azure_devops -aioazuredevops==2.1.1 +aioazuredevops==2.2.1 # homeassistant.components.baf aiobafi6==0.9.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index ab1aa13ec3c..fe296036c50 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -189,7 +189,7 @@ aioasuswrt==1.4.0 aioautomower==2024.8.0 # homeassistant.components.azure_devops -aioazuredevops==2.1.1 +aioazuredevops==2.2.1 # homeassistant.components.baf aiobafi6==0.9.0 diff --git a/tests/components/azure_devops/__init__.py b/tests/components/azure_devops/__init__.py index d636a6fda6d..cc4732b1495 100644 --- a/tests/components/azure_devops/__init__.py +++ b/tests/components/azure_devops/__init__.py @@ -2,7 +2,7 @@ from typing import Final -from aioazuredevops.models.builds import Build, BuildDefinition +from aioazuredevops.models.build import Build, BuildDefinition from aioazuredevops.models.core import Project from homeassistant.components.azure_devops.const import CONF_ORG, CONF_PAT, CONF_PROJECT From 57a73d1b1b8417f1c257f7fb58466a2cd3c78eb7 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 28 Aug 2024 16:19:31 +0200 Subject: [PATCH 1176/1635] Use start_reauth helper method in integration tests (m-o) (#124790) --- tests/components/mealie/test_config_flow.py | 20 ++-------- tests/components/meater/test_config_flow.py | 6 +-- tests/components/melcloud/test_config_flow.py | 32 ++------------- .../components/microbees/test_config_flow.py | 20 ++-------- tests/components/mikrotik/test_config_flow.py | 27 ++----------- tests/components/monzo/test_config_flow.py | 18 +-------- .../components/motioneye/test_config_flow.py | 9 +---- tests/components/mqtt/test_config_flow.py | 9 +---- tests/components/myuplink/test_config_flow.py | 9 +---- tests/components/nam/test_config_flow.py | 25 +++--------- tests/components/nanoleaf/test_config_flow.py | 10 +---- tests/components/neato/test_config_flow.py | 9 ++--- .../components/nextcloud/test_config_flow.py | 8 +--- tests/components/notion/test_config_flow.py | 16 +++----- tests/components/nuki/test_config_flow.py | 16 ++------ .../components/octoprint/test_config_flow.py | 10 +---- tests/components/onvif/test_config_flow.py | 6 +-- .../openexchangerates/test_config_flow.py | 11 +---- tests/components/openuv/test_config_flow.py | 10 ++--- .../components/osoenergy/test_config_flow.py | 10 +---- tests/components/overkiz/test_config_flow.py | 40 ++----------------- .../components/ovo_energy/test_config_flow.py | 35 +++++++--------- 22 files changed, 70 insertions(+), 286 deletions(-) diff --git a/tests/components/mealie/test_config_flow.py b/tests/components/mealie/test_config_flow.py index f2886578744..777d25fdef5 100644 --- a/tests/components/mealie/test_config_flow.py +++ b/tests/components/mealie/test_config_flow.py @@ -6,7 +6,7 @@ from aiomealie import About, MealieAuthenticationError, MealieConnectionError import pytest from homeassistant.components.mealie.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_RECONFIGURE, SOURCE_USER +from homeassistant.config_entries import SOURCE_RECONFIGURE, SOURCE_USER from homeassistant.const import CONF_API_TOKEN, CONF_HOST, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -152,11 +152,7 @@ async def test_reauth_flow( """Test reauth flow.""" await setup_integration(hass, mock_config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config_entry.entry_id}, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -179,11 +175,7 @@ async def test_reauth_flow_wrong_account( """Test reauth flow with wrong account.""" await setup_integration(hass, mock_config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config_entry.entry_id}, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -218,11 +210,7 @@ async def test_reauth_flow_exceptions( await setup_integration(hass, mock_config_entry) mock_mealie_client.get_user_info.side_effect = exception - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config_entry.entry_id}, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/meater/test_config_flow.py b/tests/components/meater/test_config_flow.py index b8c1be15268..9049cf4ac9a 100644 --- a/tests/components/meater/test_config_flow.py +++ b/tests/components/meater/test_config_flow.py @@ -123,11 +123,7 @@ async def test_reauth_flow(hass: HomeAssistant, mock_meater) -> None: ) mock_config.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=data, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/melcloud/test_config_flow.py b/tests/components/melcloud/test_config_flow.py index c1c6c10ac4c..74b16aab6ed 100644 --- a/tests/components/melcloud/test_config_flow.py +++ b/tests/components/melcloud/test_config_flow.py @@ -9,7 +9,7 @@ import pytest from homeassistant import config_entries from homeassistant.components.melcloud.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_RECONFIGURE +from homeassistant.config_entries import SOURCE_RECONFIGURE from homeassistant.const import CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -166,15 +166,7 @@ async def test_token_reauthentication( ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -212,15 +204,7 @@ async def test_form_errors_reauthentication( ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) with patch( "homeassistant.components.melcloud.async_setup_entry", @@ -270,15 +254,7 @@ async def test_client_errors_reauthentication( ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) with patch( "homeassistant.components.melcloud.async_setup_entry", diff --git a/tests/components/microbees/test_config_flow.py b/tests/components/microbees/test_config_flow.py index d168dcd5017..f4e074d000d 100644 --- a/tests/components/microbees/test_config_flow.py +++ b/tests/components/microbees/test_config_flow.py @@ -6,7 +6,7 @@ from microBeesPy import MicroBeesException import pytest from homeassistant.components.microbees.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow @@ -144,14 +144,7 @@ async def test_config_reauth_profile( """Test reauth an existing profile reauthenticates the config entry.""" await setup_integration(hass, config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -205,14 +198,7 @@ async def test_config_reauth_wrong_account( """Test reauth with wrong account.""" await setup_integration(hass, config_entry) microbees.return_value.getMyProfile.return_value.id = 12345 - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/mikrotik/test_config_flow.py b/tests/components/mikrotik/test_config_flow.py index f34fde0c9a5..d95a6488fc7 100644 --- a/tests/components/mikrotik/test_config_flow.py +++ b/tests/components/mikrotik/test_config_flow.py @@ -175,14 +175,7 @@ async def test_reauth_success(hass: HomeAssistant, api) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=DEMO_USER_INPUT, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -207,14 +200,7 @@ async def test_reauth_failed(hass: HomeAssistant, auth_error) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=DEMO_USER_INPUT, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -240,14 +226,7 @@ async def test_reauth_failed_conn_error(hass: HomeAssistant, conn_error) -> None ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=DEMO_USER_INPUT, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/monzo/test_config_flow.py b/tests/components/monzo/test_config_flow.py index b7d0de9cdc3..63daa2bfb43 100644 --- a/tests/components/monzo/test_config_flow.py +++ b/tests/components/monzo/test_config_flow.py @@ -154,14 +154,7 @@ async def test_config_reauth_profile( """Test reauth an existing profile reauthenticates the config entry.""" await setup_integration(hass, polling_config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": polling_config_entry.entry_id, - }, - data=polling_config_entry.data, - ) + result = await polling_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -223,14 +216,7 @@ async def test_config_reauth_wrong_account( """Test reauth with wrong account.""" await setup_integration(hass, polling_config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": polling_config_entry.entry_id, - }, - data=polling_config_entry.data, - ) + result = await polling_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/motioneye/test_config_flow.py b/tests/components/motioneye/test_config_flow.py index 816fb31933a..d2ec91b08e3 100644 --- a/tests/components/motioneye/test_config_flow.py +++ b/tests/components/motioneye/test_config_flow.py @@ -264,14 +264,7 @@ async def test_reauth(hass: HomeAssistant) -> None: config_entry = create_mock_motioneye_config_entry(hass, data=config_data) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert not result["errors"] diff --git a/tests/components/mqtt/test_config_flow.py b/tests/components/mqtt/test_config_flow.py index 907e3ef9946..d2f399899b1 100644 --- a/tests/components/mqtt/test_config_flow.py +++ b/tests/components/mqtt/test_config_flow.py @@ -1551,14 +1551,7 @@ async def test_step_reauth( assert result["context"]["source"] == "reauth" # Show the form - result = await hass.config_entries.flow.async_init( - mqtt.DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/myuplink/test_config_flow.py b/tests/components/myuplink/test_config_flow.py index 3ae32575257..c24d26057de 100644 --- a/tests/components/myuplink/test_config_flow.py +++ b/tests/components/myuplink/test_config_flow.py @@ -105,14 +105,7 @@ async def test_flow_reauth( assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/nam/test_config_flow.py b/tests/components/nam/test_config_flow.py index b96eddfd18b..f3465e59fb6 100644 --- a/tests/components/nam/test_config_flow.py +++ b/tests/components/nam/test_config_flow.py @@ -9,7 +9,6 @@ import pytest from homeassistant.components import zeroconf from homeassistant.components.nam.const import DOMAIN from homeassistant.config_entries import ( - SOURCE_REAUTH, SOURCE_RECONFIGURE, SOURCE_USER, SOURCE_ZEROCONF, @@ -122,6 +121,9 @@ async def test_reauth_successful(hass: HomeAssistant) -> None: data={"host": "10.10.2.3"}, ) entry.add_to_hass(hass) + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with ( patch( @@ -133,15 +135,6 @@ async def test_reauth_successful(hass: HomeAssistant) -> None: return_value="aa:bb:cc:dd:ee:ff", ), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=VALID_AUTH, @@ -160,20 +153,14 @@ async def test_reauth_unsuccessful(hass: HomeAssistant) -> None: data={"host": "10.10.2.3"}, ) entry.add_to_hass(hass) + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with patch( "homeassistant.components.nam.NettigoAirMonitor.async_check_credentials", side_effect=ApiError("API Error"), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=VALID_AUTH, diff --git a/tests/components/nanoleaf/test_config_flow.py b/tests/components/nanoleaf/test_config_flow.py index eaa1c60dcd4..97a314b0bf4 100644 --- a/tests/components/nanoleaf/test_config_flow.py +++ b/tests/components/nanoleaf/test_config_flow.py @@ -297,15 +297,7 @@ async def test_reauth(hass: HomeAssistant) -> None: return_value=True, ), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "link" diff --git a/tests/components/neato/test_config_flow.py b/tests/components/neato/test_config_flow.py index 1b86c4e9980..c5289927d91 100644 --- a/tests/components/neato/test_config_flow.py +++ b/tests/components/neato/test_config_flow.py @@ -111,16 +111,15 @@ async def test_reauth( hass, NEATO_DOMAIN, ClientCredential(CLIENT_ID, CLIENT_SECRET) ) - MockConfigEntry( + entry = MockConfigEntry( entry_id="my_entry", domain=NEATO_DOMAIN, data={"username": "abcdef", "password": "123456", "vendor": "neato"}, - ).add_to_hass(hass) + ) + entry.add_to_hass(hass) # Should show form - result = await hass.config_entries.flow.async_init( - "neato", context={"source": config_entries.SOURCE_REAUTH} - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/nextcloud/test_config_flow.py b/tests/components/nextcloud/test_config_flow.py index 536464d5253..16b6bf3bc04 100644 --- a/tests/components/nextcloud/test_config_flow.py +++ b/tests/components/nextcloud/test_config_flow.py @@ -11,7 +11,7 @@ import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.nextcloud.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -135,11 +135,7 @@ async def test_reauth(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: entry.add_to_hass(hass) # start reauth flow - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/notion/test_config_flow.py b/tests/components/notion/test_config_flow.py index 2cc5e3f04b7..15c211c19cb 100644 --- a/tests/components/notion/test_config_flow.py +++ b/tests/components/notion/test_config_flow.py @@ -6,13 +6,15 @@ from aionotion.errors import InvalidCredentialsError, NotionError import pytest from homeassistant.components.notion import CONF_REFRESH_TOKEN, CONF_USER_UUID, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from .conftest import TEST_PASSWORD, TEST_REFRESH_TOKEN, TEST_USER_UUID, TEST_USERNAME +from tests.common import MockConfigEntry + pytestmark = pytest.mark.usefixtures("mock_setup_entry") @@ -90,21 +92,13 @@ async def test_duplicate_error(hass: HomeAssistant, config, config_entry) -> Non async def test_reauth( hass: HomeAssistant, config, - config_entry, + config_entry: MockConfigEntry, errors, get_client_with_exception, mock_aionotion, ) -> None: """Test that re-auth works.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - "unique_id": config_entry.unique_id, - }, - data=config, - ) + result = await config_entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" # Test errors that can arise when getting a Notion API client: diff --git a/tests/components/nuki/test_config_flow.py b/tests/components/nuki/test_config_flow.py index cdd429c40c5..d4ddc261f1e 100644 --- a/tests/components/nuki/test_config_flow.py +++ b/tests/components/nuki/test_config_flow.py @@ -210,9 +210,7 @@ async def test_reauth_success(hass: HomeAssistant) -> None: """Test starting a reauthentication flow.""" entry = await setup_nuki_integration(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -241,9 +239,7 @@ async def test_reauth_invalid_auth(hass: HomeAssistant) -> None: """Test starting a reauthentication flow with invalid auth.""" entry = await setup_nuki_integration(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -265,9 +261,7 @@ async def test_reauth_cannot_connect(hass: HomeAssistant) -> None: """Test starting a reauthentication flow with cannot connect.""" entry = await setup_nuki_integration(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -289,9 +283,7 @@ async def test_reauth_unknown_exception(hass: HomeAssistant) -> None: """Test starting a reauthentication flow with an unknown exception.""" entry = await setup_nuki_integration(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/octoprint/test_config_flow.py b/tests/components/octoprint/test_config_flow.py index 738fbea0887..e0696486718 100644 --- a/tests/components/octoprint/test_config_flow.py +++ b/tests/components/octoprint/test_config_flow.py @@ -580,15 +580,7 @@ async def test_reauth_form(hass: HomeAssistant) -> None: unique_id="1234", ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "entry_id": entry.entry_id, - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert not result["errors"] diff --git a/tests/components/onvif/test_config_flow.py b/tests/components/onvif/test_config_flow.py index c0e5a6fe545..f7200aa7a00 100644 --- a/tests/components/onvif/test_config_flow.py +++ b/tests/components/onvif/test_config_flow.py @@ -769,11 +769,7 @@ async def test_form_reauth(hass: HomeAssistant) -> None: """Test reauthenticate.""" entry, _, _ = await setup_onvif_integration(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert ( diff --git a/tests/components/openexchangerates/test_config_flow.py b/tests/components/openexchangerates/test_config_flow.py index ec06c662201..0d4744c057a 100644 --- a/tests/components/openexchangerates/test_config_flow.py +++ b/tests/components/openexchangerates/test_config_flow.py @@ -200,16 +200,7 @@ async def test_reauth( ) -> None: """Test we can reauthenticate the config entry.""" mock_config_entry.add_to_hass(hass) - flow_context = { - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - "title_placeholders": {"name": mock_config_entry.title}, - "unique_id": mock_config_entry.unique_id, - } - - result = await hass.config_entries.flow.async_init( - DOMAIN, context=flow_context, data=mock_config_entry.data - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] is None diff --git a/tests/components/openuv/test_config_flow.py b/tests/components/openuv/test_config_flow.py index 3d31cf53250..182f66c887f 100644 --- a/tests/components/openuv/test_config_flow.py +++ b/tests/components/openuv/test_config_flow.py @@ -7,7 +7,7 @@ import pytest import voluptuous as vol from homeassistant.components.openuv import CONF_FROM_WINDOW, CONF_TO_WINDOW, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import ( CONF_API_KEY, CONF_ELEVATION, @@ -19,6 +19,8 @@ from homeassistant.data_entry_flow import FlowResultType from .conftest import TEST_API_KEY, TEST_ELEVATION, TEST_LATITUDE, TEST_LONGITUDE +from tests.common import MockConfigEntry + pytestmark = pytest.mark.usefixtures("mock_setup_entry") @@ -105,12 +107,10 @@ async def test_options_flow( async def test_step_reauth( - hass: HomeAssistant, config, config_entry, setup_config_entry + hass: HomeAssistant, config, config_entry: MockConfigEntry, setup_config_entry ) -> None: """Test that the reauth step works.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_REAUTH}, data=config - ) + result = await config_entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" result = await hass.config_entries.flow.async_configure(result["flow_id"]) diff --git a/tests/components/osoenergy/test_config_flow.py b/tests/components/osoenergy/test_config_flow.py index d9db5888cc3..0b7a3c30cf2 100644 --- a/tests/components/osoenergy/test_config_flow.py +++ b/tests/components/osoenergy/test_config_flow.py @@ -65,15 +65,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: "homeassistant.components.osoenergy.config_flow.OSOEnergy.get_user_email", return_value=None, ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_config.unique_id, - "entry_id": mock_config.entry_id, - }, - data=mock_config.data, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": "invalid_auth"} diff --git a/tests/components/overkiz/test_config_flow.py b/tests/components/overkiz/test_config_flow.py index 50870ae85fe..cef5ef350a9 100644 --- a/tests/components/overkiz/test_config_flow.py +++ b/tests/components/overkiz/test_config_flow.py @@ -573,15 +573,7 @@ async def test_cloud_reauth_success(hass: HomeAssistant) -> None: ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "cloud" @@ -623,15 +615,7 @@ async def test_cloud_reauth_wrong_account(hass: HomeAssistant) -> None: ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "cloud" @@ -672,15 +656,7 @@ async def test_local_reauth_success(hass: HomeAssistant) -> None: ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "local_or_cloud" @@ -731,15 +707,7 @@ async def test_local_reauth_wrong_account(hass: HomeAssistant) -> None: ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "local_or_cloud" diff --git a/tests/components/ovo_energy/test_config_flow.py b/tests/components/ovo_energy/test_config_flow.py index 00899e745b9..c3f77ca5007 100644 --- a/tests/components/ovo_energy/test_config_flow.py +++ b/tests/components/ovo_energy/test_config_flow.py @@ -121,15 +121,15 @@ async def test_full_flow_implementation(hass: HomeAssistant) -> None: async def test_reauth_authorization_error(hass: HomeAssistant) -> None: """Test we show user form on authorization error.""" + mock_config = MockConfigEntry( + domain=DOMAIN, unique_id=UNIQUE_ID, data=FIXTURE_USER_INPUT + ) + mock_config.add_to_hass(hass) with patch( "homeassistant.components.ovo_energy.config_flow.OVOEnergy.authenticate", return_value=False, ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=FIXTURE_USER_INPUT, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth" @@ -147,15 +147,15 @@ async def test_reauth_authorization_error(hass: HomeAssistant) -> None: async def test_reauth_connection_error(hass: HomeAssistant) -> None: """Test we show user form on connection error.""" + mock_config = MockConfigEntry( + domain=DOMAIN, unique_id=UNIQUE_ID, data=FIXTURE_USER_INPUT + ) + mock_config.add_to_hass(hass) with patch( "homeassistant.components.ovo_energy.config_flow.OVOEnergy.authenticate", side_effect=aiohttp.ClientError, ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=FIXTURE_USER_INPUT, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth" @@ -173,20 +173,15 @@ async def test_reauth_connection_error(hass: HomeAssistant) -> None: async def test_reauth_flow(hass: HomeAssistant) -> None: """Test reauth works.""" + mock_config = MockConfigEntry( + domain=DOMAIN, unique_id=UNIQUE_ID, data=FIXTURE_USER_INPUT + ) + mock_config.add_to_hass(hass) with patch( "homeassistant.components.ovo_energy.config_flow.OVOEnergy.authenticate", return_value=False, ): - mock_config = MockConfigEntry( - domain=DOMAIN, unique_id=UNIQUE_ID, data=FIXTURE_USER_INPUT - ) - mock_config.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=FIXTURE_USER_INPUT, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth" From 5824d06fd772249ca789695c1196a86bd4c48c80 Mon Sep 17 00:00:00 2001 From: Pete Sage <76050312+PeteRager@users.noreply.github.com> Date: Wed, 28 Aug 2024 10:19:48 -0400 Subject: [PATCH 1177/1635] Add get_queue action for Sonos (#124707) * initial commit * use constants * use constants * update typing * add queue fixture * remove blank line * update docstring * update icons * use list comprehension --- homeassistant/components/sonos/icons.json | 3 ++ .../components/sonos/media_player.py | 30 +++++++++++++++++-- homeassistant/components/sonos/services.yaml | 5 ++++ homeassistant/components/sonos/strings.json | 4 +++ tests/components/sonos/conftest.py | 19 +++++++++++- .../sonos/fixtures/sonos_queue.json | 30 +++++++++++++++++++ .../sonos/snapshots/test_media_player.ambr | 18 +++++++++++ tests/components/sonos/test_media_player.py | 23 ++++++++++++++ 8 files changed, 129 insertions(+), 3 deletions(-) create mode 100644 tests/components/sonos/fixtures/sonos_queue.json diff --git a/homeassistant/components/sonos/icons.json b/homeassistant/components/sonos/icons.json index e7403b45453..45027d8eabd 100644 --- a/homeassistant/components/sonos/icons.json +++ b/homeassistant/components/sonos/icons.json @@ -64,6 +64,9 @@ }, "update_alarm": { "service": "mdi:alarm" + }, + "get_queue": { + "service": "mdi:queue-first-in-last-out" } } } diff --git a/homeassistant/components/sonos/media_player.py b/homeassistant/components/sonos/media_player.py index e68d3dfa97a..75527bdcb72 100644 --- a/homeassistant/components/sonos/media_player.py +++ b/homeassistant/components/sonos/media_player.py @@ -14,7 +14,7 @@ from soco.core import ( PLAY_MODE_BY_MEANING, PLAY_MODES, ) -from soco.data_structures import DidlFavorite +from soco.data_structures import DidlFavorite, DidlMusicTrack from soco.ms_data_structures import MusicServiceItem from sonos_websocket.exception import SonosWebsocketError import voluptuous as vol @@ -22,8 +22,12 @@ import voluptuous as vol from homeassistant.components import media_source, spotify from homeassistant.components.media_player import ( ATTR_INPUT_SOURCE, + ATTR_MEDIA_ALBUM_NAME, ATTR_MEDIA_ANNOUNCE, + ATTR_MEDIA_ARTIST, + ATTR_MEDIA_CONTENT_ID, ATTR_MEDIA_ENQUEUE, + ATTR_MEDIA_TITLE, BrowseMedia, MediaPlayerDeviceClass, MediaPlayerEnqueue, @@ -38,7 +42,7 @@ from homeassistant.components.plex import PLEX_URI_SCHEME from homeassistant.components.plex.services import process_plex_payload from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TIME -from homeassistant.core import HomeAssistant, ServiceCall, callback +from homeassistant.core import HomeAssistant, ServiceCall, SupportsResponse, callback from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import config_validation as cv, entity_platform, service from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -88,6 +92,7 @@ SERVICE_CLEAR_TIMER = "clear_sleep_timer" SERVICE_UPDATE_ALARM = "update_alarm" SERVICE_PLAY_QUEUE = "play_queue" SERVICE_REMOVE_FROM_QUEUE = "remove_from_queue" +SERVICE_GET_QUEUE = "get_queue" ATTR_SLEEP_TIME = "sleep_time" ATTR_ALARM_ID = "alarm_id" @@ -190,6 +195,13 @@ async def async_setup_entry( "remove_from_queue", ) + platform.async_register_entity_service( + SERVICE_GET_QUEUE, + None, + "get_queue", + supports_response=SupportsResponse.ONLY, + ) + class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity): """Representation of a Sonos entity.""" @@ -741,6 +753,20 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity): """Remove item from the queue.""" self.coordinator.soco.remove_from_queue(queue_position) + @soco_error() + def get_queue(self) -> list[dict]: + """Get the queue.""" + queue: list[DidlMusicTrack] = self.coordinator.soco.get_queue(max_items=0) + return [ + { + ATTR_MEDIA_TITLE: track.title, + ATTR_MEDIA_ALBUM_NAME: track.album, + ATTR_MEDIA_ARTIST: track.creator, + ATTR_MEDIA_CONTENT_ID: track.get_uri(), + } + for track in queue + ] + @property def extra_state_attributes(self) -> dict[str, Any]: """Return entity specific state attributes.""" diff --git a/homeassistant/components/sonos/services.yaml b/homeassistant/components/sonos/services.yaml index f6df83ef6ed..6d6e7ef83f9 100644 --- a/homeassistant/components/sonos/services.yaml +++ b/homeassistant/components/sonos/services.yaml @@ -63,6 +63,11 @@ remove_from_queue: max: 10000 mode: box +get_queue: + target: + entity: + domain: media_player + update_alarm: target: device: diff --git a/homeassistant/components/sonos/strings.json b/homeassistant/components/sonos/strings.json index 7a73378d69b..264420ef758 100644 --- a/homeassistant/components/sonos/strings.json +++ b/homeassistant/components/sonos/strings.json @@ -172,6 +172,10 @@ "description": "Enable or disable including grouped rooms." } } + }, + "get_queue": { + "name": "Get queue", + "description": "Returns the contents of the queue." } }, "exceptions": { diff --git a/tests/components/sonos/conftest.py b/tests/components/sonos/conftest.py index 6abb010557e..04b35e2c021 100644 --- a/tests/components/sonos/conftest.py +++ b/tests/components/sonos/conftest.py @@ -10,7 +10,12 @@ from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest from soco import SoCo from soco.alarms import Alarms -from soco.data_structures import DidlFavorite, DidlPlaylistContainer, SearchResult +from soco.data_structures import ( + DidlFavorite, + DidlMusicTrack, + DidlPlaylistContainer, + SearchResult, +) from soco.events_base import Event as SonosEvent from homeassistant.components import ssdp, zeroconf @@ -185,6 +190,7 @@ class SoCoMockFactory: battery_info, alarm_clock, sonos_playlists: SearchResult, + sonos_queue: list[DidlMusicTrack], ) -> None: """Initialize the mock factory.""" self.mock_list: dict[str, MockSoCo] = {} @@ -194,6 +200,7 @@ class SoCoMockFactory: self.battery_info = battery_info self.alarm_clock = alarm_clock self.sonos_playlists = sonos_playlists + self.sonos_queue = sonos_queue def cache_mock( self, mock_soco: MockSoCo, ip_address: str, name: str = "Zone A" @@ -207,6 +214,7 @@ class SoCoMockFactory: mock_soco.get_current_track_info.return_value = self.current_track_info mock_soco.music_source_from_uri = SoCo.music_source_from_uri mock_soco.get_sonos_playlists.return_value = self.sonos_playlists + mock_soco.get_queue.return_value = self.sonos_queue my_speaker_info = self.speaker_info.copy() my_speaker_info["zone_name"] = name my_speaker_info["uid"] = mock_soco.uid @@ -277,6 +285,7 @@ def soco_factory( alarm_clock, sonos_playlists: SearchResult, sonos_websocket, + sonos_queue: list[DidlMusicTrack], ): """Create factory for instantiating SoCo mocks.""" factory = SoCoMockFactory( @@ -286,6 +295,7 @@ def soco_factory( battery_info, alarm_clock, sonos_playlists, + sonos_queue=sonos_queue, ) with ( patch("homeassistant.components.sonos.SoCo", new=factory.get_mock), @@ -370,6 +380,13 @@ def sonos_playlists_fixture() -> SearchResult: return SearchResult(playlists_list, "sonos_playlists", 1, 1, 0) +@pytest.fixture(name="sonos_queue") +def sonos_queue() -> list[DidlMusicTrack]: + """Create sonos queue fixture.""" + queue = load_json_value_fixture("sonos_queue.json", "sonos") + return [DidlMusicTrack.from_dict(track) for track in queue] + + class MockMusicServiceItem: """Mocks a Soco MusicServiceItem.""" diff --git a/tests/components/sonos/fixtures/sonos_queue.json b/tests/components/sonos/fixtures/sonos_queue.json new file mode 100644 index 00000000000..50689a00e1d --- /dev/null +++ b/tests/components/sonos/fixtures/sonos_queue.json @@ -0,0 +1,30 @@ +[ + { + "title": "Something", + "album": "Abbey Road", + "creator": "The Beatles", + "item_id": "Q:0/1", + "parent_id": "Q:0", + "original_track_number": 3, + "resources": [ + { + "uri": "x-file-cifs://192.168.42.10/music/The%20Beatles/Abbey%20Road/03%20Something.mp3", + "protocol_info": "file:*:audio/mpegurl:*" + } + ] + }, + { + "title": "Come Together", + "album": "Abbey Road", + "creator": "The Beatles", + "item_id": "Q:0/2", + "parent_id": "Q:0", + "original_track_number": 1, + "resources": [ + { + "uri": "x-file-cifs://192.168.42.10/music/The%20Beatles/Abbey%20Road/01%20Come%20Together.mp3", + "protocol_info": "file:*:audio/mpegurl:*" + } + ] + } +] diff --git a/tests/components/sonos/snapshots/test_media_player.ambr b/tests/components/sonos/snapshots/test_media_player.ambr index 9c43bceb43b..f382d341de6 100644 --- a/tests/components/sonos/snapshots/test_media_player.ambr +++ b/tests/components/sonos/snapshots/test_media_player.ambr @@ -56,3 +56,21 @@ 'state': 'idle', }) # --- +# name: test_media_get_queue + dict({ + 'media_player.zone_a': list([ + dict({ + 'media_album_name': 'Abbey Road', + 'media_artist': 'The Beatles', + 'media_content_id': 'x-file-cifs://192.168.42.10/music/The%20Beatles/Abbey%20Road/03%20Something.mp3', + 'media_title': 'Something', + }), + dict({ + 'media_album_name': 'Abbey Road', + 'media_artist': 'The Beatles', + 'media_content_id': 'x-file-cifs://192.168.42.10/music/The%20Beatles/Abbey%20Road/01%20Come%20Together.mp3', + 'media_title': 'Come Together', + }), + ]), + }) +# --- diff --git a/tests/components/sonos/test_media_player.py b/tests/components/sonos/test_media_player.py index fa77293fbde..ac877f47904 100644 --- a/tests/components/sonos/test_media_player.py +++ b/tests/components/sonos/test_media_player.py @@ -32,6 +32,7 @@ from homeassistant.components.sonos.const import ( ) from homeassistant.components.sonos.media_player import ( LONG_SERVICE_TIMEOUT, + SERVICE_GET_QUEUE, SERVICE_RESTORE, SERVICE_SNAPSHOT, VOLUME_INCREMENT, @@ -1121,3 +1122,25 @@ async def test_play_media_announce( blocking=True, ) assert sonos_websocket.play_clip.call_count == 1 + + +async def test_media_get_queue( + hass: HomeAssistant, + soco: MockSoCo, + async_autosetup_sonos, + soco_factory, + snapshot: SnapshotAssertion, +) -> None: + """Test getting the media queue.""" + soco_mock = soco_factory.mock_list.get("192.168.42.2") + result = await hass.services.async_call( + SONOS_DOMAIN, + SERVICE_GET_QUEUE, + { + ATTR_ENTITY_ID: "media_player.zone_a", + }, + blocking=True, + return_response=True, + ) + soco_mock.get_queue.assert_called_with(max_items=0) + assert result == snapshot From ef4caa951c9c6b570513ec2837cad899c80b95c7 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 28 Aug 2024 16:20:47 +0200 Subject: [PATCH 1178/1635] Use start_reauth helper method in integration tests (p-r) (#124792) --- .../components/philips_js/test_config_flow.py | 12 +---- tests/components/picnic/test_config_flow.py | 27 +++++----- tests/components/plex/test_config_flow.py | 13 +---- .../components/powerwall/test_config_flow.py | 6 +-- tests/components/prosegur/test_config_flow.py | 20 +------- .../components/purpleair/test_config_flow.py | 16 ++---- tests/components/pushover/test_config_flow.py | 27 ++-------- tests/components/pvoutput/test_config_flow.py | 32 ++---------- tests/components/pyload/test_config_flow.py | 25 ++-------- tests/components/radarr/test_config_flow.py | 12 +---- tests/components/renault/test_config_flow.py | 17 ++----- tests/components/reolink/test_config_flow.py | 11 +--- tests/components/ridwell/test_config_flow.py | 8 +-- tests/components/risco/test_config_flow.py | 18 +++---- .../ruckus_unleashed/test_config_flow.py | 50 ++----------------- tests/components/rympro/test_config_flow.py | 24 +++------ 16 files changed, 61 insertions(+), 257 deletions(-) diff --git a/tests/components/philips_js/test_config_flow.py b/tests/components/philips_js/test_config_flow.py index d7f539db9cf..80d05961813 100644 --- a/tests/components/philips_js/test_config_flow.py +++ b/tests/components/philips_js/test_config_flow.py @@ -60,7 +60,7 @@ async def test_form(hass: HomeAssistant, mock_setup_entry) -> None: async def test_reauth( - hass: HomeAssistant, mock_setup_entry, mock_config_entry, mock_tv + hass: HomeAssistant, mock_setup_entry, mock_config_entry: MockConfigEntry, mock_tv ) -> None: """Test we get the form.""" @@ -69,15 +69,7 @@ async def test_reauth( assert await hass.config_entries.async_setup(mock_config_entry.entry_id) assert len(mock_setup_entry.mock_calls) == 1 - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" diff --git a/tests/components/picnic/test_config_flow.py b/tests/components/picnic/test_config_flow.py index 9ba18dac9a9..8d668b28c16 100644 --- a/tests/components/picnic/test_config_flow.py +++ b/tests/components/picnic/test_config_flow.py @@ -170,16 +170,15 @@ async def test_step_reauth(hass: HomeAssistant, picnic_api) -> None: # Create a mocked config entry conf = {CONF_ACCESS_TOKEN: "a3p98fsen.a39p3fap", CONF_COUNTRY_CODE: "NL"} - MockConfigEntry( + entry = MockConfigEntry( domain=DOMAIN, unique_id=picnic_api().get_user()["user_id"], data=conf, - ).add_to_hass(hass) + ) + entry.add_to_hass(hass) # Init a re-auth flow - result_init = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=conf - ) + result_init = await entry.start_reauth_flow(hass) assert result_init["type"] is FlowResultType.FORM assert result_init["step_id"] == "user" @@ -210,16 +209,15 @@ async def test_step_reauth_failed(hass: HomeAssistant) -> None: user_id = "f29-2a6-o32n" conf = {CONF_ACCESS_TOKEN: "a3p98fsen.a39p3fap", CONF_COUNTRY_CODE: "NL"} - MockConfigEntry( + entry = MockConfigEntry( domain=DOMAIN, unique_id=user_id, data=conf, - ).add_to_hass(hass) + ) + entry.add_to_hass(hass) # Init a re-auth flow - result_init = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=conf - ) + result_init = await entry.start_reauth_flow(hass) assert result_init["type"] is FlowResultType.FORM assert result_init["step_id"] == "user" @@ -249,16 +247,15 @@ async def test_step_reauth_different_account(hass: HomeAssistant, picnic_api) -> # Create a mocked config entry, unique_id should be different that the user id in the api response conf = {CONF_ACCESS_TOKEN: "a3p98fsen.a39p3fap", CONF_COUNTRY_CODE: "NL"} - MockConfigEntry( + entry = MockConfigEntry( domain=DOMAIN, unique_id="3fpawh-ues-af3ho", data=conf, - ).add_to_hass(hass) + ) + entry.add_to_hass(hass) # Init a re-auth flow - result_init = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=conf - ) + result_init = await entry.start_reauth_flow(hass) assert result_init["type"] is FlowResultType.FORM assert result_init["step_id"] == "user" diff --git a/tests/components/plex/test_config_flow.py b/tests/components/plex/test_config_flow.py index 202d62d70e0..c4ec108bb6b 100644 --- a/tests/components/plex/test_config_flow.py +++ b/tests/components/plex/test_config_flow.py @@ -26,7 +26,6 @@ from homeassistant.components.plex.const import ( ) from homeassistant.config_entries import ( SOURCE_INTEGRATION_DISCOVERY, - SOURCE_REAUTH, SOURCE_USER, ConfigEntryState, ) @@ -744,11 +743,7 @@ async def test_reauth( """Test setup and reauthorization of a Plex token.""" entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) flow_id = result["flow_id"] with ( @@ -795,11 +790,7 @@ async def test_reauth_multiple_servers_available( entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) flow_id = result["flow_id"] diff --git a/tests/components/powerwall/test_config_flow.py b/tests/components/powerwall/test_config_flow.py index db0ef2e9884..5074a289d19 100644 --- a/tests/components/powerwall/test_config_flow.py +++ b/tests/components/powerwall/test_config_flow.py @@ -336,11 +336,7 @@ async def test_form_reauth(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} diff --git a/tests/components/prosegur/test_config_flow.py b/tests/components/prosegur/test_config_flow.py index 9362cecc289..7c3f399ee09 100644 --- a/tests/components/prosegur/test_config_flow.py +++ b/tests/components/prosegur/test_config_flow.py @@ -143,15 +143,7 @@ async def test_reauth_flow(hass: HomeAssistant, mock_list_contracts) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -208,15 +200,7 @@ async def test_reauth_flow_error(hass: HomeAssistant, exception, base_error) -> ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) with patch( "homeassistant.components.prosegur.config_flow.Installation.list", diff --git a/tests/components/purpleair/test_config_flow.py b/tests/components/purpleair/test_config_flow.py index 2345d98b5e1..998cb2b7878 100644 --- a/tests/components/purpleair/test_config_flow.py +++ b/tests/components/purpleair/test_config_flow.py @@ -6,13 +6,15 @@ from aiopurpleair.errors import InvalidApiKeyError, PurpleAirError import pytest from homeassistant.components.purpleair import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import device_registry as dr from .conftest import TEST_API_KEY, TEST_SENSOR_INDEX1, TEST_SENSOR_INDEX2 +from tests.common import MockConfigEntry + TEST_LATITUDE = 51.5285582 TEST_LONGITUDE = -0.2416796 @@ -127,19 +129,11 @@ async def test_reauth( mock_aiopurpleair, check_api_key_errors, check_api_key_mock, - config_entry, + config_entry: MockConfigEntry, setup_config_entry, ) -> None: """Test re-auth (including errors).""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - "unique_id": config_entry.unique_id, - }, - data={"api_key": TEST_API_KEY}, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/pushover/test_config_flow.py b/tests/components/pushover/test_config_flow.py index 14347084288..58485bfb427 100644 --- a/tests/components/pushover/test_config_flow.py +++ b/tests/components/pushover/test_config_flow.py @@ -149,14 +149,7 @@ async def test_reauth_success(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=MOCK_CONFIG, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -180,14 +173,7 @@ async def test_reauth_failed(hass: HomeAssistant, mock_pushover: MagicMock) -> N ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=MOCK_CONFIG, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -223,14 +209,7 @@ async def test_reauth_with_existing_config(hass: HomeAssistant) -> None: ) entry2.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=MOCK_CONFIG, - ) + result = await entry2.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/pvoutput/test_config_flow.py b/tests/components/pvoutput/test_config_flow.py index 20e99f8e497..fc4335de00d 100644 --- a/tests/components/pvoutput/test_config_flow.py +++ b/tests/components/pvoutput/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import AsyncMock, MagicMock from pvo import PVOutputAuthenticationError, PVOutputConnectionError from homeassistant.components.pvoutput.const import CONF_SYSTEM_ID, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -150,15 +150,7 @@ async def test_reauth_flow( """Test the reauthentication configuration flow.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "reauth_confirm" @@ -192,15 +184,7 @@ async def test_reauth_with_authentication_error( """ mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "reauth_confirm" @@ -244,15 +228,7 @@ async def test_reauth_api_error( """Test API error during reauthentication.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "reauth_confirm" diff --git a/tests/components/pyload/test_config_flow.py b/tests/components/pyload/test_config_flow.py index 8c775412371..b4ff63e79f9 100644 --- a/tests/components/pyload/test_config_flow.py +++ b/tests/components/pyload/test_config_flow.py @@ -6,12 +6,7 @@ from pyloadapi.exceptions import CannotConnect, InvalidAuth, ParserError import pytest from homeassistant.components.pyload.const import DEFAULT_NAME, DOMAIN -from homeassistant.config_entries import ( - SOURCE_IMPORT, - SOURCE_REAUTH, - SOURCE_RECONFIGURE, - SOURCE_USER, -) +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_RECONFIGURE, SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -180,14 +175,7 @@ async def test_reauth( config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - "unique_id": config_entry.unique_id, - }, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -222,14 +210,7 @@ async def test_reauth_errors( config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - "unique_id": config_entry.unique_id, - }, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/radarr/test_config_flow.py b/tests/components/radarr/test_config_flow.py index 407b7b50c48..0ff93536957 100644 --- a/tests/components/radarr/test_config_flow.py +++ b/tests/components/radarr/test_config_flow.py @@ -6,7 +6,7 @@ from aiopyarr import exceptions import pytest from homeassistant.components.radarr.const import DEFAULT_NAME, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_SOURCE, CONF_URL, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -143,15 +143,7 @@ async def test_full_reauth_flow_implementation( ) -> None: """Test the manual reauth flow from start to finish.""" entry = await setup_integration(hass, aioclient_mock) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/renault/test_config_flow.py b/tests/components/renault/test_config_flow.py index 7d40cf69314..69bfdf0842e 100644 --- a/tests/components/renault/test_config_flow.py +++ b/tests/components/renault/test_config_flow.py @@ -13,15 +13,12 @@ from homeassistant.components.renault.const import ( CONF_LOCALE, DOMAIN, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import aiohttp_client -from .const import MOCK_CONFIG - -from tests.common import load_fixture +from tests.common import MockConfigEntry, load_fixture pytestmark = pytest.mark.usefixtures("mock_setup_entry") @@ -220,19 +217,11 @@ async def test_config_flow_duplicate( assert len(mock_setup_entry.mock_calls) == 0 -async def test_reauth(hass: HomeAssistant, config_entry: ConfigEntry) -> None: +async def test_reauth(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: """Test the start of the config flow.""" assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - "unique_id": config_entry.unique_id, - }, - data=MOCK_CONFIG, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["description_placeholders"] == {CONF_USERNAME: "email@test.com"} diff --git a/tests/components/reolink/test_config_flow.py b/tests/components/reolink/test_config_flow.py index 716e66b8d6c..926baf324bc 100644 --- a/tests/components/reolink/test_config_flow.py +++ b/tests/components/reolink/test_config_flow.py @@ -327,16 +327,7 @@ async def test_reauth(hass: HomeAssistant, mock_setup_entry: MagicMock) -> None: assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - result = await hass.config_entries.flow.async_init( - const.DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - "title_placeholders": {"name": TEST_NVR_NAME}, - "unique_id": format_mac(TEST_MAC), - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/ridwell/test_config_flow.py b/tests/components/ridwell/test_config_flow.py index 601ac182670..6dd00344c5b 100644 --- a/tests/components/ridwell/test_config_flow.py +++ b/tests/components/ridwell/test_config_flow.py @@ -13,6 +13,8 @@ from homeassistant.data_entry_flow import FlowResultType from .conftest import TEST_PASSWORD, TEST_USERNAME +from tests.common import MockConfigEntry + @pytest.mark.parametrize( ("get_client_response", "errors"), @@ -65,12 +67,10 @@ async def test_duplicate_error(hass: HomeAssistant, config, setup_config_entry) async def test_step_reauth( - hass: HomeAssistant, config, config_entry, setup_config_entry + hass: HomeAssistant, config, config_entry: MockConfigEntry, setup_config_entry ) -> None: """Test a full reauth flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=config - ) + result = await config_entry.start_reauth_flow(hass) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_PASSWORD: "new_password"}, diff --git a/tests/components/risco/test_config_flow.py b/tests/components/risco/test_config_flow.py index 9fade18ea96..cff5f80e6c4 100644 --- a/tests/components/risco/test_config_flow.py +++ b/tests/components/risco/test_config_flow.py @@ -154,14 +154,12 @@ async def test_form_cloud_already_exists(hass: HomeAssistant) -> None: assert result3["reason"] == "already_configured" -async def test_form_reauth(hass: HomeAssistant, cloud_config_entry) -> None: +async def test_form_reauth( + hass: HomeAssistant, cloud_config_entry: MockConfigEntry +) -> None: """Test reauthenticate.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=cloud_config_entry.data, - ) + result = await cloud_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -194,15 +192,11 @@ async def test_form_reauth(hass: HomeAssistant, cloud_config_entry) -> None: async def test_form_reauth_with_new_username( - hass: HomeAssistant, cloud_config_entry + hass: HomeAssistant, cloud_config_entry: MockConfigEntry ) -> None: """Test reauthenticate with new username.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=cloud_config_entry.data, - ) + result = await cloud_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} diff --git a/tests/components/ruckus_unleashed/test_config_flow.py b/tests/components/ruckus_unleashed/test_config_flow.py index 5bfe2d941d5..89bd72d99e4 100644 --- a/tests/components/ruckus_unleashed/test_config_flow.py +++ b/tests/components/ruckus_unleashed/test_config_flow.py @@ -83,15 +83,7 @@ async def test_form_user_reauth(hass: HomeAssistant) -> None: entry = mock_config_entry() entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 @@ -121,15 +113,7 @@ async def test_form_user_reauth_different_unique_id(hass: HomeAssistant) -> None entry = mock_config_entry() entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 @@ -161,15 +145,7 @@ async def test_form_user_reauth_invalid_auth(hass: HomeAssistant) -> None: entry = mock_config_entry() entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 @@ -201,15 +177,7 @@ async def test_form_user_reauth_cannot_connect(hass: HomeAssistant) -> None: entry = mock_config_entry() entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 @@ -241,15 +209,7 @@ async def test_form_user_reauth_general_exception(hass: HomeAssistant) -> None: entry = mock_config_entry() entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 diff --git a/tests/components/rympro/test_config_flow.py b/tests/components/rympro/test_config_flow.py index e92b7c23357..7770889bdeb 100644 --- a/tests/components/rympro/test_config_flow.py +++ b/tests/components/rympro/test_config_flow.py @@ -160,17 +160,10 @@ async def test_form_already_exists(hass: HomeAssistant, config_entry) -> None: assert result2["reason"] == "already_configured" -async def test_form_reauth(hass: HomeAssistant, config_entry) -> None: +async def test_form_reauth(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: """Test reauthentication.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] is None @@ -203,17 +196,12 @@ async def test_form_reauth(hass: HomeAssistant, config_entry) -> None: assert len(mock_setup_entry.mock_calls) == 1 -async def test_form_reauth_with_new_account(hass: HomeAssistant, config_entry) -> None: +async def test_form_reauth_with_new_account( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> None: """Test reauthentication with new account.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] is None From 26006f8036e6e5a53560ea354f119d9763b12779 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 28 Aug 2024 16:34:09 +0200 Subject: [PATCH 1179/1635] Use start_reauth helper method in broadlink and bthome (#124783) * Use start_reauth helper method in broadlink reauth tests * Also include bthome --- tests/common.py | 12 +++++++++--- tests/components/broadlink/test_config_flow.py | 17 +++-------------- tests/components/bthome/test_config_flow.py | 11 +---------- 3 files changed, 13 insertions(+), 27 deletions(-) diff --git a/tests/common.py b/tests/common.py index 523bd12d590..c2d561551ca 100644 --- a/tests/common.py +++ b/tests/common.py @@ -1054,7 +1054,12 @@ class MockConfigEntry(config_entries.ConfigEntry): """ self._async_set_state(hass, state, reason) - async def start_reauth_flow(self, hass: HomeAssistant) -> ConfigFlowResult: + async def start_reauth_flow( + self, + hass: HomeAssistant, + context: dict[str, Any] | None = None, + data: dict[str, Any] | None = None, + ) -> ConfigFlowResult: """Start a reauthentication flow.""" return await hass.config_entries.flow.async_init( self.domain, @@ -1063,8 +1068,9 @@ class MockConfigEntry(config_entries.ConfigEntry): "entry_id": self.entry_id, "title_placeholders": {"name": self.title}, "unique_id": self.unique_id, - }, - data=self.data, + } + | (context or {}), + data=self.data | (data or {}), ) diff --git a/tests/components/broadlink/test_config_flow.py b/tests/components/broadlink/test_config_flow.py index 2def8c0b3b9..f31cb380631 100644 --- a/tests/components/broadlink/test_config_flow.py +++ b/tests/components/broadlink/test_config_flow.py @@ -734,13 +734,9 @@ async def test_flow_reauth_works(hass: HomeAssistant) -> None: mock_entry.add_to_hass(hass) mock_api = device.get_mock_api() mock_api.auth.side_effect = blke.AuthenticationError() - data = {"name": device.name, **device.get_entry_data()} with patch(DEVICE_FACTORY, return_value=mock_api): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=data - ) - + result = await mock_entry.start_reauth_flow(hass, data={"name": device.name}) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reset" @@ -770,12 +766,8 @@ async def test_flow_reauth_invalid_host(hass: HomeAssistant) -> None: mock_entry.add_to_hass(hass) mock_api = device.get_mock_api() mock_api.auth.side_effect = blke.AuthenticationError() - data = {"name": device.name, **device.get_entry_data()} - with patch(DEVICE_FACTORY, return_value=mock_api): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=data - ) + result = await mock_entry.start_reauth_flow(hass, data={"name": device.name}) device.mac = get_device("Office").mac mock_api = device.get_mock_api() @@ -804,12 +796,9 @@ async def test_flow_reauth_valid_host(hass: HomeAssistant) -> None: mock_entry.add_to_hass(hass) mock_api = device.get_mock_api() mock_api.auth.side_effect = blke.AuthenticationError() - data = {"name": device.name, **device.get_entry_data()} with patch(DEVICE_FACTORY, return_value=mock_api): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=data - ) + result = await mock_entry.start_reauth_flow(hass, data={"name": device.name}) device.host = "192.168.1.128" mock_api = device.get_mock_api() diff --git a/tests/components/bthome/test_config_flow.py b/tests/components/bthome/test_config_flow.py index acf490d341e..faf2f1c9ef5 100644 --- a/tests/components/bthome/test_config_flow.py +++ b/tests/components/bthome/test_config_flow.py @@ -563,16 +563,7 @@ async def test_async_step_reauth_abort_early(hass: HomeAssistant) -> None: device = DeviceData() - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "title_placeholders": {"name": entry.title}, - "unique_id": entry.unique_id, - }, - data=entry.data | {"device": device}, - ) + result = await entry.start_reauth_flow(hass, data={"device": device}) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reauth_successful" From a4bfb0e92c987545a91ea48af5f8f03e94c708d0 Mon Sep 17 00:00:00 2001 From: Paul Bottein Date: Wed, 28 Aug 2024 16:35:44 +0200 Subject: [PATCH 1180/1635] Update frontend to 20240828.0 (#124791) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 035b087e481..0e1d443553d 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240809.0"] + "requirements": ["home-assistant-frontend==20240828.0"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 79c846d6c11..3aa1b7a298a 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -31,7 +31,7 @@ habluetooth==3.3.2 hass-nabucasa==0.81.1 hassil==1.7.4 home-assistant-bluetooth==1.12.2 -home-assistant-frontend==20240809.0 +home-assistant-frontend==20240828.0 home-assistant-intents==2024.8.7 httpx==0.27.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 62def94cdeb..457fe3a836c 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1102,7 +1102,7 @@ hole==0.8.0 holidays==0.55 # homeassistant.components.frontend -home-assistant-frontend==20240809.0 +home-assistant-frontend==20240828.0 # homeassistant.components.conversation home-assistant-intents==2024.8.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index fe296036c50..eab788fabf1 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -925,7 +925,7 @@ hole==0.8.0 holidays==0.55 # homeassistant.components.frontend -home-assistant-frontend==20240809.0 +home-assistant-frontend==20240828.0 # homeassistant.components.conversation home-assistant-intents==2024.8.7 From 45bb2cdd82fb90fe1627741891ee77b8c487a96b Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Wed, 28 Aug 2024 16:38:12 +0200 Subject: [PATCH 1181/1635] Build hassfest docker image and pushlish it on beta/stable releases (#124706) --- .github/workflows/builder.yml | 53 ++++++++++++++++++++++++++++ script/gen_requirements_all.py | 20 +++++++++-- script/hassfest/__main__.py | 11 ++++-- script/hassfest/docker/Dockerfile | 22 ++++++++++++ script/hassfest/docker/entrypoint.sh | 16 +++++++++ script/hassfest/model.py | 10 ++++-- tests/hassfest/test_requirements.py | 9 ++++- tests/hassfest/test_version.py | 15 ++++++-- 8 files changed, 145 insertions(+), 11 deletions(-) create mode 100644 script/hassfest/docker/Dockerfile create mode 100755 script/hassfest/docker/entrypoint.sh diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index e1f95c5c0a9..d206f8fe8c8 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -482,3 +482,56 @@ jobs: export TWINE_PASSWORD="${{ secrets.TWINE_TOKEN }}" twine upload dist/* --skip-existing + + hassfest-image: + name: Build and test hassfest image + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + attestations: write + id-token: write + needs: ["init", "build_base"] + if: github.repository_owner == 'home-assistant' + env: + HASSFEST_IMAGE_NAME: ghcr.io/home-assistant/hassfest + HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }} + steps: + - name: Checkout repository + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + + - name: Login to GitHub Container Registry + uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build Docker image + uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0 + with: + context: ./script/hassfest/docker + build-args: BASE_IMAGE=ghcr.io/home-assistant/amd64-homeassistant:${{ needs.init.outputs.version }} + load: true + tags: ${{ env.HASSFEST_IMAGE_TAG }} + + - name: Run hassfest against core + run: docker run --rm -v ${{ github.workspace }}/homeassistant:/github/workspace/homeassistant ${{ env.HASSFEST_IMAGE_TAG }} --core-integrations-path=/github/workspace/homeassistant/components + + - name: Push Docker image + if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true' + id: push + uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0 + with: + context: ./script/hassfest/docker + build-args: BASE_IMAGE=ghcr.io/home-assistant/amd64-homeassistant:${{ needs.init.outputs.version }} + push: true + tags: ${{ env.HASSFEST_IMAGE_TAG }},${{ env.HASSFEST_IMAGE_NAME }}:latest + + - name: Generate artifact attestation + if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true' + uses: actions/attest-build-provenance@6149ea5740be74af77f260b9db67e633f6b0a9a1 # v1.4.2 + with: + subject-name: ${{ env.HASSFEST_IMAGE_NAME }} + subject-digest: ${{ steps.push.outputs.digest }} + push-to-registry: true diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index e8fdce6fa15..b2165289ad8 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -15,7 +15,7 @@ import tomllib from typing import Any from homeassistant.util.yaml.loader import load_yaml -from script.hassfest.model import Integration +from script.hassfest.model import Config, Integration # Requirements which can't be installed on all systems because they rely on additional # system packages. Requirements listed in EXCLUDED_REQUIREMENTS_ALL will be commented-out @@ -270,7 +270,9 @@ def gather_recursive_requirements( seen = set() seen.add(domain) - integration = Integration(Path(f"homeassistant/components/{domain}")) + integration = Integration( + Path(f"homeassistant/components/{domain}"), _get_hassfest_config() + ) integration.load_manifest() reqs = {x for x in integration.requirements if x not in CONSTRAINT_BASE} for dep_domain in integration.dependencies: @@ -336,7 +338,8 @@ def gather_requirements_from_manifests( errors: list[str], reqs: dict[str, list[str]] ) -> None: """Gather all of the requirements from manifests.""" - integrations = Integration.load_dir(Path("homeassistant/components")) + config = _get_hassfest_config() + integrations = Integration.load_dir(config.core_integrations_path, config) for domain in sorted(integrations): integration = integrations[domain] @@ -584,6 +587,17 @@ def main(validate: bool, ci: bool) -> int: return 0 +def _get_hassfest_config() -> Config: + """Get hassfest config.""" + return Config( + root=Path(".").absolute(), + specific_integrations=None, + action="validate", + requirements=True, + core_integrations_path=Path("homeassistant/components"), + ) + + if __name__ == "__main__": _VAL = sys.argv[-1] == "validate" _CI = sys.argv[-1] == "ci" diff --git a/script/hassfest/__main__.py b/script/hassfest/__main__.py index ea3c56200a2..b48871b4651 100644 --- a/script/hassfest/__main__.py +++ b/script/hassfest/__main__.py @@ -107,6 +107,12 @@ def get_config() -> Config: default=ALL_PLUGIN_NAMES, help="Comma-separate list of plugins to run. Valid plugin names: %(default)s", ) + parser.add_argument( + "--core-integrations-path", + type=pathlib.Path, + default=pathlib.Path("homeassistant/components"), + help="Path to core integrations", + ) parsed = parser.parse_args() if parsed.action is None: @@ -129,6 +135,7 @@ def get_config() -> Config: action=parsed.action, requirements=parsed.requirements, plugins=set(parsed.plugins), + core_integrations_path=parsed.core_integrations_path, ) @@ -146,12 +153,12 @@ def main() -> int: integrations = {} for int_path in config.specific_integrations: - integration = Integration(int_path) + integration = Integration(int_path, config) integration.load_manifest() integrations[integration.domain] = integration else: - integrations = Integration.load_dir(pathlib.Path("homeassistant/components")) + integrations = Integration.load_dir(config.core_integrations_path, config) plugins += HASS_PLUGINS for plugin in plugins: diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile new file mode 100644 index 00000000000..8921d92307e --- /dev/null +++ b/script/hassfest/docker/Dockerfile @@ -0,0 +1,22 @@ +ARG BASE_IMAGE=ghcr.io/home-assistant/home-assistant:beta +FROM $BASE_IMAGE + +SHELL ["/bin/bash", "-o", "pipefail", "-c"] + +COPY entrypoint.sh /entrypoint.sh + +RUN \ + uv pip install stdlib-list==0.10.0 \ + $(grep -e "^pipdeptree" -e "^tqdm" /usr/src/homeassistant/requirements_test.txt) \ + $(grep -e "^ruff" /usr/src/homeassistant/requirements_test_pre_commit.txt) + +WORKDIR "/github/workspace" +ENTRYPOINT ["/entrypoint.sh"] + +LABEL "name"="hassfest" +LABEL "maintainer"="Home Assistant " + +LABEL "com.github.actions.name"="hassfest" +LABEL "com.github.actions.description"="Run hassfest to validate standalone integration repositories" +LABEL "com.github.actions.icon"="terminal" +LABEL "com.github.actions.color"="gray-dark" diff --git a/script/hassfest/docker/entrypoint.sh b/script/hassfest/docker/entrypoint.sh new file mode 100755 index 00000000000..33330f63161 --- /dev/null +++ b/script/hassfest/docker/entrypoint.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bashio +declare -a integrations +declare integration_path + +shopt -s globstar nullglob +for manifest in **/manifest.json; do + manifest_path=$(realpath "${manifest}") + integrations+=(--integration-path "${manifest_path%/*}") +done + +if [[ ${#integrations[@]} -eq 0 ]]; then + bashio::exit.nok "No integrations found!" +fi + +cd /usr/src/homeassistant +exec python3 -m script.hassfest --action validate "${integrations[@]}" "$@" \ No newline at end of file diff --git a/script/hassfest/model.py b/script/hassfest/model.py index 736fb6874be..63e9b025ed4 100644 --- a/script/hassfest/model.py +++ b/script/hassfest/model.py @@ -29,6 +29,7 @@ class Config: root: pathlib.Path action: Literal["validate", "generate"] requirements: bool + core_integrations_path: pathlib.Path errors: list[Error] = field(default_factory=list) cache: dict[str, Any] = field(default_factory=dict) plugins: set[str] = field(default_factory=set) @@ -105,7 +106,7 @@ class Integration: """Represent an integration in our validator.""" @classmethod - def load_dir(cls, path: pathlib.Path) -> dict[str, Integration]: + def load_dir(cls, path: pathlib.Path, config: Config) -> dict[str, Integration]: """Load all integrations in a directory.""" assert path.is_dir() integrations: dict[str, Integration] = {} @@ -123,13 +124,14 @@ class Integration: ) continue - integration = cls(fil) + integration = cls(fil, config) integration.load_manifest() integrations[integration.domain] = integration return integrations path: pathlib.Path + _config: Config _manifest: dict[str, Any] | None = None manifest_path: pathlib.Path | None = None errors: list[Error] = field(default_factory=list) @@ -150,7 +152,9 @@ class Integration: @property def core(self) -> bool: """Core integration.""" - return self.path.as_posix().startswith("homeassistant/components") + return self.path.as_posix().startswith( + self._config.core_integrations_path.as_posix() + ) @property def disabled(self) -> str | None: diff --git a/tests/hassfest/test_requirements.py b/tests/hassfest/test_requirements.py index f3b008a6113..433e63d904c 100644 --- a/tests/hassfest/test_requirements.py +++ b/tests/hassfest/test_requirements.py @@ -4,7 +4,7 @@ from pathlib import Path import pytest -from script.hassfest.model import Integration +from script.hassfest.model import Config, Integration from script.hassfest.requirements import validate_requirements_format @@ -13,6 +13,13 @@ def integration(): """Fixture for hassfest integration model.""" return Integration( path=Path("homeassistant/components/test"), + _config=Config( + root=Path(".").absolute(), + specific_integrations=None, + action="validate", + requirements=True, + core_integrations_path=Path("homeassistant/components"), + ), _manifest={ "domain": "test", "documentation": "https://example.com", diff --git a/tests/hassfest/test_version.py b/tests/hassfest/test_version.py index bfe15018fe2..30677356101 100644 --- a/tests/hassfest/test_version.py +++ b/tests/hassfest/test_version.py @@ -1,5 +1,7 @@ """Tests for hassfest version.""" +from pathlib import Path + import pytest import voluptuous as vol @@ -7,13 +9,22 @@ from script.hassfest.manifest import ( CUSTOM_INTEGRATION_MANIFEST_SCHEMA, validate_version, ) -from script.hassfest.model import Integration +from script.hassfest.model import Config, Integration @pytest.fixture def integration(): """Fixture for hassfest integration model.""" - integration = Integration("") + integration = Integration( + "", + _config=Config( + root=Path(".").absolute(), + specific_integrations=None, + action="validate", + requirements=True, + core_integrations_path=Path("homeassistant/components"), + ), + ) integration._manifest = { "domain": "test", "documentation": "https://example.com", From dd52f4c84a914c1326003d09cd2f7cf1a937aad6 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Wed, 28 Aug 2024 16:41:23 +0200 Subject: [PATCH 1182/1635] Remove deprecated mailbox platform (#123740) * Remove deprecated mailbox platform * Remove from const * Remove from type hints * Remove from pyproject --- .strict-typing | 1 - homeassistant/components/mailbox/__init__.py | 291 ------------------ .../components/mailbox/manifest.json | 9 - homeassistant/components/mailbox/strings.json | 9 - homeassistant/const.py | 1 - mypy.ini | 10 - pylint/plugins/hass_enforce_type_hints.py | 33 -- pyproject.toml | 1 - tests/components/mailbox/__init__.py | 1 - tests/components/mailbox/test_init.py | 225 -------------- 10 files changed, 581 deletions(-) delete mode 100644 homeassistant/components/mailbox/__init__.py delete mode 100644 homeassistant/components/mailbox/manifest.json delete mode 100644 homeassistant/components/mailbox/strings.json delete mode 100644 tests/components/mailbox/__init__.py delete mode 100644 tests/components/mailbox/test_init.py diff --git a/.strict-typing b/.strict-typing index 07aed7b4ca1..2566a5349c2 100644 --- a/.strict-typing +++ b/.strict-typing @@ -294,7 +294,6 @@ homeassistant.components.london_underground.* homeassistant.components.lookin.* homeassistant.components.luftdaten.* homeassistant.components.madvr.* -homeassistant.components.mailbox.* homeassistant.components.manual.* homeassistant.components.map.* homeassistant.components.mastodon.* diff --git a/homeassistant/components/mailbox/__init__.py b/homeassistant/components/mailbox/__init__.py deleted file mode 100644 index e0438342a54..00000000000 --- a/homeassistant/components/mailbox/__init__.py +++ /dev/null @@ -1,291 +0,0 @@ -"""Support for Voice mailboxes.""" - -from __future__ import annotations - -import asyncio -from contextlib import suppress -from datetime import timedelta -from http import HTTPStatus -import logging -from typing import Any, Final - -from aiohttp import web -from aiohttp.web_exceptions import HTTPNotFound - -from homeassistant.components import frontend -from homeassistant.components.http import HomeAssistantView -from homeassistant.config import config_per_platform -from homeassistant.core import Event, HomeAssistant, callback -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import config_validation as cv, discovery -from homeassistant.helpers.entity import Entity -from homeassistant.helpers.entity_component import EntityComponent -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from homeassistant.setup import async_prepare_setup_platform - -_LOGGER = logging.getLogger(__name__) - -DOMAIN: Final = "mailbox" - -EVENT: Final = "mailbox_updated" -CONTENT_TYPE_MPEG: Final = "audio/mpeg" -CONTENT_TYPE_NONE: Final = "none" - -SCAN_INTERVAL = timedelta(seconds=30) - -CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Track states and offer events for mailboxes.""" - mailboxes: list[Mailbox] = [] - frontend.async_register_built_in_panel(hass, "mailbox", "mailbox", "mdi:mailbox") - hass.http.register_view(MailboxPlatformsView(mailboxes)) - hass.http.register_view(MailboxMessageView(mailboxes)) - hass.http.register_view(MailboxMediaView(mailboxes)) - hass.http.register_view(MailboxDeleteView(mailboxes)) - - async def async_setup_platform( - p_type: str, - p_config: ConfigType | None = None, - discovery_info: DiscoveryInfoType | None = None, - ) -> None: - """Set up a mailbox platform.""" - if p_config is None: - p_config = {} - if discovery_info is None: - discovery_info = {} - - platform = await async_prepare_setup_platform(hass, config, DOMAIN, p_type) - - if platform is None: - _LOGGER.error("Unknown mailbox platform specified") - return - - if p_type not in ["asterisk_cdr", "asterisk_mbox", "demo"]: - # Asterisk integration will raise a repair issue themselves - # For demo we don't create one - async_create_issue( - hass, - DOMAIN, - f"deprecated_mailbox_{p_type}", - breaks_in_ha_version="2024.9.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_mailbox_integration", - translation_placeholders={ - "integration_domain": p_type, - }, - ) - - _LOGGER.info("Setting up %s.%s", DOMAIN, p_type) - mailbox = None - try: - if hasattr(platform, "async_get_handler"): - mailbox = await platform.async_get_handler( - hass, p_config, discovery_info - ) - elif hasattr(platform, "get_handler"): - mailbox = await hass.async_add_executor_job( - platform.get_handler, hass, p_config, discovery_info - ) - else: - raise HomeAssistantError("Invalid mailbox platform.") # noqa: TRY301 - - if mailbox is None: - _LOGGER.error("Failed to initialize mailbox platform %s", p_type) - return - - except Exception: - _LOGGER.exception("Error setting up platform %s", p_type) - return - - mailboxes.append(mailbox) - mailbox_entity = MailboxEntity(mailbox) - component = EntityComponent[MailboxEntity]( - logging.getLogger(__name__), DOMAIN, hass, SCAN_INTERVAL - ) - component.register_shutdown() - await component.async_add_entities([mailbox_entity]) - - for p_type, p_config in config_per_platform(config, DOMAIN): - if p_type is not None: - hass.async_create_task( - async_setup_platform(p_type, p_config), eager_start=True - ) - - async def async_platform_discovered( - platform: str, info: DiscoveryInfoType | None - ) -> None: - """Handle for discovered platform.""" - await async_setup_platform(platform, discovery_info=info) - - discovery.async_listen_platform(hass, DOMAIN, async_platform_discovered) - - return True - - -class MailboxEntity(Entity): - """Entity for each mailbox platform to provide a badge display.""" - - def __init__(self, mailbox: Mailbox) -> None: - """Initialize mailbox entity.""" - self.mailbox = mailbox - self.message_count = 0 - - async def async_added_to_hass(self) -> None: - """Complete entity initialization.""" - - @callback - def _mailbox_updated(event: Event) -> None: - self.async_schedule_update_ha_state(True) - - self.hass.bus.async_listen(EVENT, _mailbox_updated) - self.async_schedule_update_ha_state(True) - - @property - def state(self) -> str: - """Return the state of the binary sensor.""" - return str(self.message_count) - - @property - def name(self) -> str: - """Return the name of the entity.""" - return self.mailbox.name - - async def async_update(self) -> None: - """Retrieve messages from platform.""" - messages = await self.mailbox.async_get_messages() - self.message_count = len(messages) - - -class Mailbox: - """Represent a mailbox device.""" - - def __init__(self, hass: HomeAssistant, name: str) -> None: - """Initialize mailbox object.""" - self.hass = hass - self.name = name - - @callback - def async_update(self) -> None: - """Send event notification of updated mailbox.""" - self.hass.bus.async_fire(EVENT) - - @property - def media_type(self) -> str: - """Return the supported media type.""" - raise NotImplementedError - - @property - def can_delete(self) -> bool: - """Return if messages can be deleted.""" - return False - - @property - def has_media(self) -> bool: - """Return if messages have attached media files.""" - return False - - async def async_get_media(self, msgid: str) -> bytes: - """Return the media blob for the msgid.""" - raise NotImplementedError - - async def async_get_messages(self) -> list[dict[str, Any]]: - """Return a list of the current messages.""" - raise NotImplementedError - - async def async_delete(self, msgid: str) -> bool: - """Delete the specified messages.""" - raise NotImplementedError - - -class StreamError(Exception): - """Media streaming exception.""" - - -class MailboxView(HomeAssistantView): - """Base mailbox view.""" - - def __init__(self, mailboxes: list[Mailbox]) -> None: - """Initialize a basic mailbox view.""" - self.mailboxes = mailboxes - - def get_mailbox(self, platform: str) -> Mailbox: - """Retrieve the specified mailbox.""" - for mailbox in self.mailboxes: - if mailbox.name == platform: - return mailbox - raise HTTPNotFound - - -class MailboxPlatformsView(MailboxView): - """View to return the list of mailbox platforms.""" - - url = "/api/mailbox/platforms" - name = "api:mailbox:platforms" - - async def get(self, request: web.Request) -> web.Response: - """Retrieve list of platforms.""" - return self.json( - [ - { - "name": mailbox.name, - "has_media": mailbox.has_media, - "can_delete": mailbox.can_delete, - } - for mailbox in self.mailboxes - ] - ) - - -class MailboxMessageView(MailboxView): - """View to return the list of messages.""" - - url = "/api/mailbox/messages/{platform}" - name = "api:mailbox:messages" - - async def get(self, request: web.Request, platform: str) -> web.Response: - """Retrieve messages.""" - mailbox = self.get_mailbox(platform) - messages = await mailbox.async_get_messages() - return self.json(messages) - - -class MailboxDeleteView(MailboxView): - """View to delete selected messages.""" - - url = "/api/mailbox/delete/{platform}/{msgid}" - name = "api:mailbox:delete" - - async def delete(self, request: web.Request, platform: str, msgid: str) -> None: - """Delete items.""" - mailbox = self.get_mailbox(platform) - await mailbox.async_delete(msgid) - - -class MailboxMediaView(MailboxView): - """View to return a media file.""" - - url = r"/api/mailbox/media/{platform}/{msgid}" - name = "api:asteriskmbox:media" - - async def get( - self, request: web.Request, platform: str, msgid: str - ) -> web.Response: - """Retrieve media.""" - mailbox = self.get_mailbox(platform) - - with suppress(asyncio.CancelledError, TimeoutError): - async with asyncio.timeout(10): - try: - stream = await mailbox.async_get_media(msgid) - except StreamError as err: - _LOGGER.error("Error getting media: %s", err) - return web.Response(status=HTTPStatus.INTERNAL_SERVER_ERROR) - if stream: - return web.Response(body=stream, content_type=mailbox.media_type) - - return web.Response(status=HTTPStatus.INTERNAL_SERVER_ERROR) diff --git a/homeassistant/components/mailbox/manifest.json b/homeassistant/components/mailbox/manifest.json deleted file mode 100644 index 43dd133654c..00000000000 --- a/homeassistant/components/mailbox/manifest.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "domain": "mailbox", - "name": "Mailbox", - "codeowners": [], - "dependencies": ["http"], - "documentation": "https://www.home-assistant.io/integrations/mailbox", - "integration_type": "entity", - "quality_scale": "internal" -} diff --git a/homeassistant/components/mailbox/strings.json b/homeassistant/components/mailbox/strings.json deleted file mode 100644 index 01746e3e98d..00000000000 --- a/homeassistant/components/mailbox/strings.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "title": "Mailbox", - "issues": { - "deprecated_mailbox": { - "title": "The mailbox platform is being removed", - "description": "The mailbox platform is being removed. Please report it to the author of the \"{integration_domain}\" custom integration." - } - } -} diff --git a/homeassistant/const.py b/homeassistant/const.py index 953f65efce2..8384a6d44bd 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -60,7 +60,6 @@ class Platform(StrEnum): LAWN_MOWER = "lawn_mower" LIGHT = "light" LOCK = "lock" - MAILBOX = "mailbox" MEDIA_PLAYER = "media_player" NOTIFY = "notify" NUMBER = "number" diff --git a/mypy.ini b/mypy.ini index 2a361f56397..a312a77122f 100644 --- a/mypy.ini +++ b/mypy.ini @@ -2696,16 +2696,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.mailbox.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.manual.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/pylint/plugins/hass_enforce_type_hints.py b/pylint/plugins/hass_enforce_type_hints.py index e1812de44d3..13499134668 100644 --- a/pylint/plugins/hass_enforce_type_hints.py +++ b/pylint/plugins/hass_enforce_type_hints.py @@ -1761,39 +1761,6 @@ _INHERITANCE_MATCH: dict[str, list[ClassTypeHintMatch]] = { ], ), ], - "mailbox": [ - ClassTypeHintMatch( - base_class="Mailbox", - matches=[ - TypeHintMatch( - function_name="media_type", - return_type="str", - ), - TypeHintMatch( - function_name="can_delete", - return_type="bool", - ), - TypeHintMatch( - function_name="has_media", - return_type="bool", - ), - TypeHintMatch( - function_name="async_get_media", - arg_types={1: "str"}, - return_type="bytes", - ), - TypeHintMatch( - function_name="async_get_messages", - return_type="list[dict[str, Any]]", - ), - TypeHintMatch( - function_name="async_delete", - arg_types={1: "str"}, - return_type="bool", - ), - ], - ), - ], "media_player": [ ClassTypeHintMatch( base_class="Entity", diff --git a/pyproject.toml b/pyproject.toml index 10bc26f1a0a..74329da38b8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -844,7 +844,6 @@ voluptuous = "vol" "homeassistant.components.lawn_mower.PLATFORM_SCHEMA" = "LAWN_MOWER_PLATFORM_SCHEMA" "homeassistant.components.light.PLATFORM_SCHEMA" = "LIGHT_PLATFORM_SCHEMA" "homeassistant.components.lock.PLATFORM_SCHEMA" = "LOCK_PLATFORM_SCHEMA" -"homeassistant.components.mailbox.PLATFORM_SCHEMA" = "MAILBOX_PLATFORM_SCHEMA" "homeassistant.components.media_player.PLATFORM_SCHEMA" = "MEDIA_PLAYER_PLATFORM_SCHEMA" "homeassistant.components.notify.PLATFORM_SCHEMA" = "NOTIFY_PLATFORM_SCHEMA" "homeassistant.components.number.PLATFORM_SCHEMA" = "NUMBER_PLATFORM_SCHEMA" diff --git a/tests/components/mailbox/__init__.py b/tests/components/mailbox/__init__.py deleted file mode 100644 index 5e212354579..00000000000 --- a/tests/components/mailbox/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""The tests for mailbox platforms.""" diff --git a/tests/components/mailbox/test_init.py b/tests/components/mailbox/test_init.py deleted file mode 100644 index 6fcf9176aae..00000000000 --- a/tests/components/mailbox/test_init.py +++ /dev/null @@ -1,225 +0,0 @@ -"""The tests for the mailbox component.""" - -from datetime import datetime -from hashlib import sha1 -from http import HTTPStatus -from typing import Any - -from aiohttp.test_utils import TestClient -import pytest - -from homeassistant.components import mailbox -from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from homeassistant.setup import async_setup_component -from homeassistant.util import dt as dt_util - -from tests.common import MockModule, mock_integration, mock_platform -from tests.typing import ClientSessionGenerator - -MAILBOX_NAME = "TestMailbox" -MEDIA_DATA = b"3f67c4ea33b37d1710f" -MESSAGE_TEXT = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. " - - -def _create_message(idx: int) -> dict[str, Any]: - """Create a sample message.""" - msgtime = dt_util.as_timestamp(datetime(2010, 12, idx + 1, 13, 17, 00)) - msgtxt = f"Message {idx + 1}. {MESSAGE_TEXT}" - msgsha = sha1(msgtxt.encode("utf-8")).hexdigest() - return { - "info": { - "origtime": int(msgtime), - "callerid": "John Doe <212-555-1212>", - "duration": "10", - }, - "text": msgtxt, - "sha": msgsha, - } - - -class TestMailbox(mailbox.Mailbox): - """Test Mailbox, with 10 sample messages.""" - - # This class doesn't contain any tests! Skip pytest test collection. - __test__ = False - - def __init__(self, hass: HomeAssistant, name: str) -> None: - """Initialize Test mailbox.""" - super().__init__(hass, name) - self._messages: dict[str, dict[str, Any]] = {} - for idx in range(10): - msg = _create_message(idx) - msgsha = msg["sha"] - self._messages[msgsha] = msg - - @property - def media_type(self) -> str: - """Return the supported media type.""" - return mailbox.CONTENT_TYPE_MPEG - - @property - def can_delete(self) -> bool: - """Return if messages can be deleted.""" - return True - - @property - def has_media(self) -> bool: - """Return if messages have attached media files.""" - return True - - async def async_get_media(self, msgid: str) -> bytes: - """Return the media blob for the msgid.""" - if msgid not in self._messages: - raise mailbox.StreamError("Message not found") - - return MEDIA_DATA - - async def async_get_messages(self) -> list[dict[str, Any]]: - """Return a list of the current messages.""" - return sorted( - self._messages.values(), - key=lambda item: item["info"]["origtime"], # type: ignore[no-any-return] - reverse=True, - ) - - async def async_delete(self, msgid: str) -> bool: - """Delete the specified messages.""" - if msgid in self._messages: - del self._messages[msgid] - self.async_update() - return True - - -class MockMailbox: - """A mock mailbox platform.""" - - async def async_get_handler( - self, - hass: HomeAssistant, - config: ConfigType, - discovery_info: DiscoveryInfoType | None = None, - ) -> mailbox.Mailbox: - """Set up the Test mailbox.""" - return TestMailbox(hass, MAILBOX_NAME) - - -@pytest.fixture -def mock_mailbox(hass: HomeAssistant) -> None: - """Mock mailbox.""" - mock_integration(hass, MockModule(domain="test")) - mock_platform(hass, "test.mailbox", MockMailbox()) - - -@pytest.fixture -async def mock_http_client( - hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_mailbox: None -) -> TestClient: - """Start the Home Assistant HTTP component.""" - assert await async_setup_component( - hass, mailbox.DOMAIN, {mailbox.DOMAIN: {"platform": "test"}} - ) - return await hass_client() - - -async def test_get_platforms_from_mailbox(mock_http_client: TestClient) -> None: - """Get platforms from mailbox.""" - url = "/api/mailbox/platforms" - - req = await mock_http_client.get(url) - assert req.status == HTTPStatus.OK - result = await req.json() - assert len(result) == 1 - assert result[0].get("name") == "TestMailbox" - - -async def test_get_messages_from_mailbox(mock_http_client: TestClient) -> None: - """Get messages from mailbox.""" - url = "/api/mailbox/messages/TestMailbox" - - req = await mock_http_client.get(url) - assert req.status == HTTPStatus.OK - result = await req.json() - assert len(result) == 10 - - -async def test_get_media_from_mailbox(mock_http_client: TestClient) -> None: - """Get audio from mailbox.""" - mp3sha = "7cad61312c7b66f619295be2da8c7ac73b4968f1" - msgtxt = "Message 1. Lorem ipsum dolor sit amet, consectetur adipiscing elit. " - msgsha = sha1(msgtxt.encode("utf-8")).hexdigest() - - url = f"/api/mailbox/media/TestMailbox/{msgsha}" - req = await mock_http_client.get(url) - assert req.status == HTTPStatus.OK - data = await req.read() - assert sha1(data).hexdigest() == mp3sha - - -async def test_delete_from_mailbox(mock_http_client: TestClient) -> None: - """Get audio from mailbox.""" - msgtxt1 = "Message 1. Lorem ipsum dolor sit amet, consectetur adipiscing elit. " - msgtxt2 = "Message 3. Lorem ipsum dolor sit amet, consectetur adipiscing elit. " - msgsha1 = sha1(msgtxt1.encode("utf-8")).hexdigest() - msgsha2 = sha1(msgtxt2.encode("utf-8")).hexdigest() - - for msg in (msgsha1, msgsha2): - url = f"/api/mailbox/delete/TestMailbox/{msg}" - req = await mock_http_client.delete(url) - assert req.status == HTTPStatus.OK - - url = "/api/mailbox/messages/TestMailbox" - req = await mock_http_client.get(url) - assert req.status == HTTPStatus.OK - result = await req.json() - assert len(result) == 8 - - -async def test_get_messages_from_invalid_mailbox(mock_http_client: TestClient) -> None: - """Get messages from mailbox.""" - url = "/api/mailbox/messages/mailbox.invalid_mailbox" - - req = await mock_http_client.get(url) - assert req.status == HTTPStatus.NOT_FOUND - - -async def test_get_media_from_invalid_mailbox(mock_http_client: TestClient) -> None: - """Get messages from mailbox.""" - msgsha = "0000000000000000000000000000000000000000" - url = f"/api/mailbox/media/mailbox.invalid_mailbox/{msgsha}" - - req = await mock_http_client.get(url) - assert req.status == HTTPStatus.NOT_FOUND - - -async def test_get_media_from_invalid_msgid(mock_http_client: TestClient) -> None: - """Get messages from mailbox.""" - msgsha = "0000000000000000000000000000000000000000" - url = f"/api/mailbox/media/TestMailbox/{msgsha}" - - req = await mock_http_client.get(url) - assert req.status == HTTPStatus.INTERNAL_SERVER_ERROR - - -async def test_delete_from_invalid_mailbox(mock_http_client: TestClient) -> None: - """Get audio from mailbox.""" - msgsha = "0000000000000000000000000000000000000000" - url = f"/api/mailbox/delete/mailbox.invalid_mailbox/{msgsha}" - - req = await mock_http_client.delete(url) - assert req.status == HTTPStatus.NOT_FOUND - - -async def test_repair_issue_is_created( - hass: HomeAssistant, issue_registry: ir.IssueRegistry, mock_mailbox: None -) -> None: - """Test repair issue is created.""" - assert await async_setup_component( - hass, mailbox.DOMAIN, {mailbox.DOMAIN: {"platform": "test"}} - ) - await hass.async_block_till_done() - assert ( - mailbox.DOMAIN, - "deprecated_mailbox_test", - ) in issue_registry.issues From 7652c35ee6a0a168b71b8f012c9385a1de66b9a6 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 28 Aug 2024 16:47:10 +0200 Subject: [PATCH 1183/1635] Use start_reauth helper method in integration tests (t) (#124794) --- .../components/tailscale/test_config_flow.py | 32 ++------------- tests/components/tailwind/test_config_flow.py | 27 ++----------- .../tankerkoenig/test_config_flow.py | 15 +++---- tests/components/tautulli/test_config_flow.py | 21 ++-------- tests/components/tedee/test_config_flow.py | 15 +------ .../tesla_fleet/test_config_flow.py | 22 ++-------- .../components/teslemetry/test_config_flow.py | 19 +-------- tests/components/tessie/test_config_flow.py | 19 +-------- .../thethingsnetwork/test_config_flow.py | 16 +++----- tests/components/tile/test_config_flow.py | 10 ++--- .../totalconnect/test_config_flow.py | 6 +-- .../tplink_omada/test_config_flow.py | 18 +-------- tests/components/tractive/test_config_flow.py | 40 ++----------------- .../trafikverket_camera/test_config_flow.py | 20 +--------- .../trafikverket_ferry/test_config_flow.py | 20 +--------- .../trafikverket_train/test_config_flow.py | 30 ++------------ .../test_config_flow.py | 18 +-------- .../transmission/test_config_flow.py | 27 ++----------- tests/components/tuya/test_config_flow.py | 32 ++------------- tests/components/twitch/test_config_flow.py | 20 ++-------- 20 files changed, 61 insertions(+), 366 deletions(-) diff --git a/tests/components/tailscale/test_config_flow.py b/tests/components/tailscale/test_config_flow.py index 86daa40d8dc..3a67f46a496 100644 --- a/tests/components/tailscale/test_config_flow.py +++ b/tests/components/tailscale/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import AsyncMock, MagicMock from tailscale import TailscaleAuthenticationError, TailscaleConnectionError from homeassistant.components.tailscale.const import CONF_TAILNET, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -128,15 +128,7 @@ async def test_reauth_flow( """Test the reauthentication configuration flow.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "reauth_confirm" @@ -170,15 +162,7 @@ async def test_reauth_with_authentication_error( """ mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "reauth_confirm" @@ -222,15 +206,7 @@ async def test_reauth_api_error( """Test API error during reauthentication.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "reauth_confirm" diff --git a/tests/components/tailwind/test_config_flow.py b/tests/components/tailwind/test_config_flow.py index f70ab6e27ff..d2d15172718 100644 --- a/tests/components/tailwind/test_config_flow.py +++ b/tests/components/tailwind/test_config_flow.py @@ -14,12 +14,7 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.components import zeroconf from homeassistant.components.dhcp import DhcpServiceInfo from homeassistant.components.tailwind.const import DOMAIN -from homeassistant.config_entries import ( - SOURCE_DHCP, - SOURCE_REAUTH, - SOURCE_USER, - SOURCE_ZEROCONF, -) +from homeassistant.config_entries import SOURCE_DHCP, SOURCE_USER, SOURCE_ZEROCONF from homeassistant.const import CONF_HOST, CONF_TOKEN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -311,15 +306,7 @@ async def test_reauth_flow( mock_config_entry.add_to_hass(hass) assert mock_config_entry.data[CONF_TOKEN] == "123456" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "reauth_confirm" @@ -354,15 +341,7 @@ async def test_reauth_flow_errors( mock_config_entry.add_to_hass(hass) mock_tailwind.status.side_effect = side_effect - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) result2 = await hass.config_entries.flow.async_configure( result["flow_id"], diff --git a/tests/components/tankerkoenig/test_config_flow.py b/tests/components/tankerkoenig/test_config_flow.py index 022b49fd3f8..bb1e943bbb9 100644 --- a/tests/components/tankerkoenig/test_config_flow.py +++ b/tests/components/tankerkoenig/test_config_flow.py @@ -9,7 +9,7 @@ from homeassistant.components.tankerkoenig.const import ( CONF_STATIONS, DOMAIN, ) -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import ( CONF_API_KEY, CONF_LATITUDE, @@ -162,6 +162,10 @@ async def test_user_no_stations(hass: HomeAssistant) -> None: async def test_reauth(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: """Test starting a flow by user to re-auth.""" config_entry.add_to_hass(hass) + # re-auth initialized + result = await config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with ( patch( @@ -171,15 +175,6 @@ async def test_reauth(hass: HomeAssistant, config_entry: MockConfigEntry) -> Non "homeassistant.components.tankerkoenig.config_flow.Tankerkoenig.nearby_stations", ) as mock_nearby_stations, ): - # re-auth initialized - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": config_entry.entry_id}, - data=config_entry.data, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - # re-auth unsuccessful mock_nearby_stations.side_effect = TankerkoenigInvalidKeyError("Booom!") result = await hass.config_entries.flow.async_configure( diff --git a/tests/components/tautulli/test_config_flow.py b/tests/components/tautulli/test_config_flow.py index ca563cfad77..722fd0a7616 100644 --- a/tests/components/tautulli/test_config_flow.py +++ b/tests/components/tautulli/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import AsyncMock, patch from pytautulli import exceptions from homeassistant.components.tautulli.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_SOURCE, CONF_URL, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -156,15 +156,7 @@ async def test_flow_reauth( """Test reauth flow.""" with patch("homeassistant.components.tautulli.PLATFORMS", []): entry = await setup_integration(hass, aioclient_mock) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=CONF_DATA, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} @@ -193,14 +185,7 @@ async def test_flow_reauth_error( """Test reauth flow with invalid authentication.""" with patch("homeassistant.components.tautulli.PLATFORMS", []): entry = await setup_integration(hass, aioclient_mock) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - ) + result = await entry.start_reauth_flow(hass) with patch_config_flow_tautulli(AsyncMock()) as tautullimock: tautullimock.side_effect = exceptions.PyTautulliAuthenticationException result = await hass.config_entries.flow.async_configure( diff --git a/tests/components/tedee/test_config_flow.py b/tests/components/tedee/test_config_flow.py index d5dc5d4efcf..0fa3d62c26e 100644 --- a/tests/components/tedee/test_config_flow.py +++ b/tests/components/tedee/test_config_flow.py @@ -10,7 +10,7 @@ from pytedee_async import ( import pytest from homeassistant.components.tedee.const import CONF_LOCAL_ACCESS_TOKEN, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_RECONFIGURE, SOURCE_USER +from homeassistant.config_entries import SOURCE_RECONFIGURE, SOURCE_USER from homeassistant.const import CONF_HOST, CONF_WEBHOOK_ID from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -122,18 +122,7 @@ async def test_reauth_flow( mock_config_entry.add_to_hass(hass) - reauth_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data={ - CONF_LOCAL_ACCESS_TOKEN: LOCAL_ACCESS_TOKEN, - CONF_HOST: "192.168.1.42", - }, - ) + reauth_result = await mock_config_entry.start_reauth_flow(hass) result = await hass.config_entries.flow.async_configure( reauth_result["flow_id"], diff --git a/tests/components/tesla_fleet/test_config_flow.py b/tests/components/tesla_fleet/test_config_flow.py index 81ba92f1e9c..b49e090cd5d 100644 --- a/tests/components/tesla_fleet/test_config_flow.py +++ b/tests/components/tesla_fleet/test_config_flow.py @@ -16,7 +16,7 @@ from homeassistant.components.tesla_fleet.const import ( SCOPES, TOKEN_URL, ) -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow @@ -211,15 +211,7 @@ async def test_reauthentication( ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 @@ -267,15 +259,7 @@ async def test_reauth_account_mismatch( old_entry = MockConfigEntry(domain=DOMAIN, unique_id="baduid", version=1, data={}) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() result = await hass.config_entries.flow.async_configure(flows[0]["flow_id"], {}) diff --git a/tests/components/teslemetry/test_config_flow.py b/tests/components/teslemetry/test_config_flow.py index fa35142dc07..03e46c6a8be 100644 --- a/tests/components/teslemetry/test_config_flow.py +++ b/tests/components/teslemetry/test_config_flow.py @@ -94,14 +94,7 @@ async def test_reauth(hass: HomeAssistant, mock_metadata) -> None: ) mock_entry.add_to_hass(hass) - result1 = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_entry.entry_id, - }, - data=BAD_CONFIG, - ) + result1 = await mock_entry.start_reauth_flow(hass) assert result1["type"] is FlowResultType.FORM assert result1["step_id"] == "reauth_confirm" @@ -144,15 +137,7 @@ async def test_reauth_errors( ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=BAD_CONFIG, - ) + result = await mock_entry.start_reauth_flow(hass) mock_metadata.side_effect = side_effect result2 = await hass.config_entries.flow.async_configure( diff --git a/tests/components/tessie/test_config_flow.py b/tests/components/tessie/test_config_flow.py index 043086971fa..d51d467002d 100644 --- a/tests/components/tessie/test_config_flow.py +++ b/tests/components/tessie/test_config_flow.py @@ -143,14 +143,7 @@ async def test_reauth( ) mock_entry.add_to_hass(hass) - result1 = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_entry.entry_id, - }, - data=TEST_CONFIG, - ) + result1 = await mock_entry.start_reauth_flow(hass) assert result1["type"] is FlowResultType.FORM assert result1["step_id"] == "reauth_confirm" @@ -194,15 +187,7 @@ async def test_reauth_errors( ) mock_entry.add_to_hass(hass) - result1 = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=TEST_CONFIG, - ) + result1 = await mock_entry.start_reauth_flow(hass) result2 = await hass.config_entries.flow.async_configure( result1["flow_id"], diff --git a/tests/components/thethingsnetwork/test_config_flow.py b/tests/components/thethingsnetwork/test_config_flow.py index 107d84e099b..99c4a080e17 100644 --- a/tests/components/thethingsnetwork/test_config_flow.py +++ b/tests/components/thethingsnetwork/test_config_flow.py @@ -4,7 +4,7 @@ import pytest from ttn_client import TTNAuthError from homeassistant.components.thethingsnetwork.const import CONF_APP_ID, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -12,6 +12,8 @@ from homeassistant.data_entry_flow import FlowResultType from . import init_integration from .conftest import API_KEY, APP_ID, HOST +from tests.common import MockConfigEntry + USER_DATA = {CONF_HOST: HOST, CONF_APP_ID: APP_ID, CONF_API_KEY: API_KEY} @@ -92,21 +94,13 @@ async def test_duplicate_entry( async def test_step_reauth( - hass: HomeAssistant, mock_ttnclient, mock_config_entry + hass: HomeAssistant, mock_ttnclient, mock_config_entry: MockConfigEntry ) -> None: """Test that the reauth step works.""" await init_integration(hass, mock_config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": APP_ID, - "entry_id": mock_config_entry.entry_id, - }, - data=USER_DATA, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert not result["errors"] diff --git a/tests/components/tile/test_config_flow.py b/tests/components/tile/test_config_flow.py index 87fe976ca3f..849be41d560 100644 --- a/tests/components/tile/test_config_flow.py +++ b/tests/components/tile/test_config_flow.py @@ -6,13 +6,15 @@ import pytest from pytile.errors import InvalidAuthError, TileError from homeassistant.components.tile import DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from .conftest import TEST_PASSWORD, TEST_USERNAME +from tests.common import MockConfigEntry + @pytest.mark.parametrize( ("mock_login_response", "errors"), @@ -77,12 +79,10 @@ async def test_import_entry(hass: HomeAssistant, config, mock_pytile) -> None: async def test_step_reauth( - hass: HomeAssistant, config, config_entry, setup_config_entry + hass: HomeAssistant, config, config_entry: MockConfigEntry, setup_config_entry ) -> None: """Test that the reauth step works.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_REAUTH}, data=config - ) + result = await config_entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" result = await hass.config_entries.flow.async_configure(result["flow_id"]) diff --git a/tests/components/totalconnect/test_config_flow.py b/tests/components/totalconnect/test_config_flow.py index 98de748faea..a0be52afb3b 100644 --- a/tests/components/totalconnect/test_config_flow.py +++ b/tests/components/totalconnect/test_config_flow.py @@ -9,7 +9,7 @@ from homeassistant.components.totalconnect.const import ( CONF_USERCODES, DOMAIN, ) -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -141,9 +141,7 @@ async def test_reauth(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_REAUTH}, data=entry.data - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/tplink_omada/test_config_flow.py b/tests/components/tplink_omada/test_config_flow.py index 08606fe126c..28ef0da170f 100644 --- a/tests/components/tplink_omada/test_config_flow.py +++ b/tests/components/tplink_omada/test_config_flow.py @@ -251,14 +251,7 @@ async def test_async_step_reauth_success(hass: HomeAssistant) -> None: ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -298,14 +291,7 @@ async def test_async_step_reauth_invalid_auth(hass: HomeAssistant) -> None: ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/tractive/test_config_flow.py b/tests/components/tractive/test_config_flow.py index 5cedb51e5af..691bf671afd 100644 --- a/tests/components/tractive/test_config_flow.py +++ b/tests/components/tractive/test_config_flow.py @@ -110,15 +110,7 @@ async def test_reauthentication(hass: HomeAssistant) -> None: ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -151,15 +143,7 @@ async def test_reauthentication_failure(hass: HomeAssistant) -> None: ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -189,15 +173,7 @@ async def test_reauthentication_unknown_failure(hass: HomeAssistant) -> None: ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -227,15 +203,7 @@ async def test_reauthentication_failure_no_existing_entry(hass: HomeAssistant) - ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} diff --git a/tests/components/trafikverket_camera/test_config_flow.py b/tests/components/trafikverket_camera/test_config_flow.py index 2e9e34f4c35..dd75f5e6838 100644 --- a/tests/components/trafikverket_camera/test_config_flow.py +++ b/tests/components/trafikverket_camera/test_config_flow.py @@ -208,15 +208,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -280,15 +272,7 @@ async def test_reauth_flow_error( entry.add_to_hass(hass) await hass.async_block_till_done() - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) with patch( "homeassistant.components.trafikverket_camera.config_flow.TrafikverketCamera.async_get_cameras", diff --git a/tests/components/trafikverket_ferry/test_config_flow.py b/tests/components/trafikverket_ferry/test_config_flow.py index 1c170a917cc..916f9c9f2ec 100644 --- a/tests/components/trafikverket_ferry/test_config_flow.py +++ b/tests/components/trafikverket_ferry/test_config_flow.py @@ -128,15 +128,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -203,15 +195,7 @@ async def test_reauth_flow_error( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) with patch( "homeassistant.components.trafikverket_ferry.config_flow.TrafikverketFerry.async_get_next_ferry_stop", diff --git a/tests/components/trafikverket_train/test_config_flow.py b/tests/components/trafikverket_train/test_config_flow.py index 83cc5a89016..3090a9fe337 100644 --- a/tests/components/trafikverket_train/test_config_flow.py +++ b/tests/components/trafikverket_train/test_config_flow.py @@ -246,15 +246,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -328,15 +320,7 @@ async def test_reauth_flow_error( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) with ( patch( @@ -418,15 +402,7 @@ async def test_reauth_flow_error_departures( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) with ( patch( diff --git a/tests/components/trafikverket_weatherstation/test_config_flow.py b/tests/components/trafikverket_weatherstation/test_config_flow.py index 771336301ff..738d6a8ceac 100644 --- a/tests/components/trafikverket_weatherstation/test_config_flow.py +++ b/tests/components/trafikverket_weatherstation/test_config_flow.py @@ -116,14 +116,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -182,14 +175,7 @@ async def test_reauth_flow_fails( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} diff --git a/tests/components/transmission/test_config_flow.py b/tests/components/transmission/test_config_flow.py index e6c523bf1f6..b318862047e 100644 --- a/tests/components/transmission/test_config_flow.py +++ b/tests/components/transmission/test_config_flow.py @@ -160,14 +160,7 @@ async def test_reauth_success(hass: HomeAssistant) -> None: entry = MockConfigEntry(domain=transmission.DOMAIN, data=MOCK_CONFIG_DATA) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - transmission.DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=MOCK_CONFIG_DATA, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -197,14 +190,7 @@ async def test_reauth_failed(hass: HomeAssistant, mock_api: MagicMock) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - transmission.DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=MOCK_CONFIG_DATA, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -232,14 +218,7 @@ async def test_reauth_failed_connection_error( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - transmission.DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=MOCK_CONFIG_DATA, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/tuya/test_config_flow.py b/tests/components/tuya/test_config_flow.py index 6e971262bc8..247aec02cd1 100644 --- a/tests/components/tuya/test_config_flow.py +++ b/tests/components/tuya/test_config_flow.py @@ -8,7 +8,7 @@ import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.tuya.const import CONF_APP_TYPE, CONF_USER_CODE, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -145,15 +145,7 @@ async def test_reauth_flow( """Test the reauthentication configuration flow.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "scan" @@ -185,15 +177,7 @@ async def test_reauth_flow_migration( assert CONF_APP_TYPE in mock_old_config_entry.data assert CONF_USER_CODE not in mock_old_config_entry.data - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_old_config_entry.unique_id, - "entry_id": mock_old_config_entry.entry_id, - }, - data=mock_old_config_entry.data, - ) + result = await mock_old_config_entry.start_reauth_flow(hass) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "reauth_user_code" @@ -229,15 +213,7 @@ async def test_reauth_flow_failed_qr_code( """Test an error occurring while retrieving the QR code.""" mock_old_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_old_config_entry.unique_id, - "entry_id": mock_old_config_entry.entry_id, - }, - data=mock_old_config_entry.data, - ) + result = await mock_old_config_entry.start_reauth_flow(hass) # Something went wrong getting the QR code (like an invalid user code) mock_tuya_login_control.qr_code.return_value["success"] = False diff --git a/tests/components/twitch/test_config_flow.py b/tests/components/twitch/test_config_flow.py index 6935943a4d3..fc53b17551c 100644 --- a/tests/components/twitch/test_config_flow.py +++ b/tests/components/twitch/test_config_flow.py @@ -10,7 +10,7 @@ from homeassistant.components.twitch.const import ( DOMAIN, OAUTH2_AUTHORIZE, ) -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResult, FlowResultType from homeassistant.helpers import config_entry_oauth2_flow @@ -109,14 +109,7 @@ async def test_reauth( ) -> None: """Check reauth flow.""" await setup_integration(hass, config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -184,14 +177,7 @@ async def test_reauth_wrong_account( twitch_mock.return_value.get_users = lambda *args, **kwargs: get_generator( "get_users_2.json", TwitchUser ) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" From 9bcc31a9fe198b56926ccdb1f3f6838e0b718442 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 28 Aug 2024 16:49:05 +0200 Subject: [PATCH 1184/1635] Use start_reauth helper method in integration tests (s) (#124793) --- .../components/samsungtv/test_config_flow.py | 30 +++----------- tests/components/sense/test_config_flow.py | 8 +--- tests/components/sensibo/test_config_flow.py | 30 ++------------ tests/components/sfr_box/test_config_flow.py | 10 +---- tests/components/sharkiq/test_config_flow.py | 15 +++---- tests/components/shelly/test_config_flow.py | 39 +++++------------- .../components/simplisafe/test_config_flow.py | 22 +++++----- tests/components/skybell/test_config_flow.py | 23 ++--------- tests/components/sleepiq/test_config_flow.py | 14 +------ tests/components/smarttub/test_config_flow.py | 20 +--------- tests/components/sonarr/test_config_flow.py | 12 +----- tests/components/spotify/test_config_flow.py | 22 ++-------- .../steam_online/test_config_flow.py | 18 +++------ tests/components/sunweg/test_config_flow.py | 9 +---- .../surepetcare/test_config_flow.py | 40 ++----------------- .../synology_dsm/test_config_flow.py | 26 +----------- 16 files changed, 59 insertions(+), 279 deletions(-) diff --git a/tests/components/samsungtv/test_config_flow.py b/tests/components/samsungtv/test_config_flow.py index 6c325ae3b04..43d8c81d000 100644 --- a/tests/components/samsungtv/test_config_flow.py +++ b/tests/components/samsungtv/test_config_flow.py @@ -1749,11 +1749,7 @@ async def test_form_reauth_legacy(hass: HomeAssistant) -> None: """Test reauthenticate legacy.""" entry = MockConfigEntry(domain=DOMAIN, data=MOCK_OLD_ENTRY) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"entry_id": entry.entry_id, "source": config_entries.SOURCE_REAUTH}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -1773,11 +1769,7 @@ async def test_form_reauth_websocket(hass: HomeAssistant) -> None: entry.add_to_hass(hass) assert entry.state is ConfigEntryState.NOT_LOADED - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"entry_id": entry.entry_id, "source": config_entries.SOURCE_REAUTH}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -1798,11 +1790,7 @@ async def test_form_reauth_websocket_cannot_connect( """Test reauthenticate websocket when we cannot connect on the first attempt.""" entry = MockConfigEntry(domain=DOMAIN, data=MOCK_ENTRYDATA_WS) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"entry_id": entry.entry_id, "source": config_entries.SOURCE_REAUTH}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -1830,11 +1818,7 @@ async def test_form_reauth_websocket_not_supported(hass: HomeAssistant) -> None: """Test reauthenticate websocket when the device is not supported.""" entry = MockConfigEntry(domain=DOMAIN, data=MOCK_ENTRYDATA_WS) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"entry_id": entry.entry_id, "source": config_entries.SOURCE_REAUTH}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -1863,11 +1847,7 @@ async def test_form_reauth_encrypted(hass: HomeAssistant) -> None: entry.add_to_hass(hass) assert entry.state is ConfigEntryState.NOT_LOADED - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"entry_id": entry.entry_id, "source": config_entries.SOURCE_REAUTH}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} diff --git a/tests/components/sense/test_config_flow.py b/tests/components/sense/test_config_flow.py index e564603ea87..0ba8d94e17b 100644 --- a/tests/components/sense/test_config_flow.py +++ b/tests/components/sense/test_config_flow.py @@ -268,9 +268,7 @@ async def test_reauth_no_form(hass: HomeAssistant, mock_sense) -> None: "homeassistant.config_entries.ConfigEntries.async_reload", return_value=True, ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=MOCK_CONFIG - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reauth_successful" @@ -288,9 +286,7 @@ async def test_reauth_password(hass: HomeAssistant, mock_sense) -> None: mock_sense.return_value.authenticate.side_effect = SenseAuthenticationException # Reauth success without user input - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM mock_sense.return_value.authenticate.side_effect = None diff --git a/tests/components/sensibo/test_config_flow.py b/tests/components/sensibo/test_config_flow.py index e994402b09f..3f53495f0f2 100644 --- a/tests/components/sensibo/test_config_flow.py +++ b/tests/components/sensibo/test_config_flow.py @@ -192,15 +192,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -254,15 +246,7 @@ async def test_reauth_flow_error( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) with patch( "homeassistant.components.sensibo.util.SensiboClient.async_get_devices", @@ -338,15 +322,7 @@ async def test_flow_reauth_no_username_or_device( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/sfr_box/test_config_flow.py b/tests/components/sfr_box/test_config_flow.py index 08c12e9817b..6bf610de661 100644 --- a/tests/components/sfr_box/test_config_flow.py +++ b/tests/components/sfr_box/test_config_flow.py @@ -207,15 +207,7 @@ async def test_reauth(hass: HomeAssistant, config_entry_with_auth: ConfigEntry) """Test the start of the config flow.""" assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry_with_auth.entry_id, - "unique_id": config_entry_with_auth.unique_id, - }, - data=config_entry_with_auth.data, - ) + result = await config_entry_with_auth.start_reauth_flow(hass) assert result.get("type") is FlowResultType.FORM assert result.get("errors") == {} diff --git a/tests/components/sharkiq/test_config_flow.py b/tests/components/sharkiq/test_config_flow.py index ae037834c57..22a77678c0d 100644 --- a/tests/components/sharkiq/test_config_flow.py +++ b/tests/components/sharkiq/test_config_flow.py @@ -99,11 +99,7 @@ async def test_reauth_success(hass: HomeAssistant) -> None: mock_config = MockConfigEntry(domain=DOMAIN, unique_id=UNIQUE_ID, data=CONFIG) mock_config.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "unique_id": UNIQUE_ID}, - data=mock_config.data, - ) + result = await mock_config.start_reauth_flow(hass) with patch("sharkiq.AylaApi.async_sign_in", return_value=True): result = await hass.config_entries.flow.async_configure( @@ -131,11 +127,10 @@ async def test_reauth( msg: str, ) -> None: """Test reauth failures.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "unique_id": UNIQUE_ID}, - data=CONFIG, - ) + mock_config = MockConfigEntry(domain=DOMAIN, unique_id=UNIQUE_ID, data=CONFIG) + mock_config.add_to_hass(hass) + + result = await mock_config.start_reauth_flow(hass) with patch("sharkiq.AylaApi.async_sign_in", side_effect=side_effect): result = await hass.config_entries.flow.async_configure( diff --git a/tests/components/shelly/test_config_flow.py b/tests/components/shelly/test_config_flow.py index c0c089f469a..f03d90dbabb 100644 --- a/tests/components/shelly/test_config_flow.py +++ b/tests/components/shelly/test_config_flow.py @@ -23,7 +23,7 @@ from homeassistant.components.shelly.const import ( BLEScannerMode, ) from homeassistant.components.shelly.coordinator import ENTRY_RELOAD_COOLDOWN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_RECONFIGURE +from homeassistant.config_entries import SOURCE_RECONFIGURE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.setup import async_setup_component @@ -819,20 +819,15 @@ async def test_reauth_successful( domain="shelly", unique_id="test-mac", data={"host": "0.0.0.0", "gen": gen} ) entry.add_to_hass(hass) + result = await entry.start_reauth_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with patch( "homeassistant.components.shelly.config_flow.get_info", return_value={"mac": "test-mac", "type": MODEL_1, "auth": True, "gen": gen}, ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=user_input, @@ -858,6 +853,9 @@ async def test_reauth_unsuccessful( domain="shelly", unique_id="test-mac", data={"host": "0.0.0.0", "gen": gen} ) entry.add_to_hass(hass) + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with ( patch( @@ -873,15 +871,6 @@ async def test_reauth_unsuccessful( new=AsyncMock(side_effect=InvalidAuthError), ), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=user_input, @@ -897,20 +886,14 @@ async def test_reauth_get_info_error(hass: HomeAssistant) -> None: domain="shelly", unique_id="test-mac", data={"host": "0.0.0.0", "gen": 2} ) entry.add_to_hass(hass) + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with patch( "homeassistant.components.shelly.config_flow.get_info", side_effect=DeviceConnectionError, ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={"password": "test2 password"}, diff --git a/tests/components/simplisafe/test_config_flow.py b/tests/components/simplisafe/test_config_flow.py index dde7e37b891..9270fc43c30 100644 --- a/tests/components/simplisafe/test_config_flow.py +++ b/tests/components/simplisafe/test_config_flow.py @@ -8,11 +8,13 @@ from simplipy.errors import InvalidCredentialsError, SimplipyError from homeassistant.components.simplisafe import DOMAIN from homeassistant.components.simplisafe.config_flow import CONF_AUTH_CODE -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_CODE, CONF_TOKEN, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from tests.common import MockConfigEntry + VALID_AUTH_CODE = "code12345123451234512345123451234512345123451" @@ -90,13 +92,11 @@ async def test_options_flow(config_entry, hass: HomeAssistant) -> None: assert config_entry.options == {CONF_CODE: "4321"} -async def test_step_reauth(config_entry, hass: HomeAssistant, setup_simplisafe) -> None: +async def test_step_reauth( + config_entry: MockConfigEntry, hass: HomeAssistant, setup_simplisafe +) -> None: """Test the re-auth step.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH}, - data={CONF_USERNAME: "12345", CONF_TOKEN: "token123"}, - ) + result = await config_entry.start_reauth_flow(hass) assert result["step_id"] == "user" with ( @@ -118,14 +118,10 @@ async def test_step_reauth(config_entry, hass: HomeAssistant, setup_simplisafe) @pytest.mark.parametrize("unique_id", ["some_other_id"]) async def test_step_reauth_wrong_account( - config_entry, hass: HomeAssistant, setup_simplisafe + config_entry: MockConfigEntry, hass: HomeAssistant, setup_simplisafe ) -> None: """Test the re-auth step where the wrong account is used during login.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH}, - data={CONF_USERNAME: "12345", CONF_TOKEN: "token123"}, - ) + result = await config_entry.start_reauth_flow(hass) assert result["step_id"] == "user" with ( diff --git a/tests/components/skybell/test_config_flow.py b/tests/components/skybell/test_config_flow.py index cb62f808efc..f415fef077e 100644 --- a/tests/components/skybell/test_config_flow.py +++ b/tests/components/skybell/test_config_flow.py @@ -5,10 +5,9 @@ from unittest.mock import patch from aioskybell import exceptions import pytest -from homeassistant import config_entries from homeassistant.components.skybell.const import DOMAIN from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_PASSWORD, CONF_SOURCE +from homeassistant.const import CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -104,15 +103,7 @@ async def test_step_reauth(hass: HomeAssistant) -> None: entry = MockConfigEntry(domain=DOMAIN, unique_id=USER_ID, data=CONF_DATA) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -130,15 +121,7 @@ async def test_step_reauth_failed(hass: HomeAssistant, skybell_mock) -> None: entry = MockConfigEntry(domain=DOMAIN, unique_id=USER_ID, data=CONF_DATA) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/sleepiq/test_config_flow.py b/tests/components/sleepiq/test_config_flow.py index af08f5aa9fe..26007d42e7d 100644 --- a/tests/components/sleepiq/test_config_flow.py +++ b/tests/components/sleepiq/test_config_flow.py @@ -101,19 +101,7 @@ async def test_reauth_password(hass: HomeAssistant) -> None: # set up initially entry = await setup_platform(hass) - with patch( - "homeassistant.components.sleepiq.config_flow.AsyncSleepIQ.login", - side_effect=SleepIQLoginException, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) with patch( "homeassistant.components.sleepiq.config_flow.AsyncSleepIQ.login", diff --git a/tests/components/smarttub/test_config_flow.py b/tests/components/smarttub/test_config_flow.py index c625f217405..5832841641c 100644 --- a/tests/components/smarttub/test_config_flow.py +++ b/tests/components/smarttub/test_config_flow.py @@ -66,15 +66,7 @@ async def test_reauth_success(hass: HomeAssistant, smarttub_api, account) -> Non ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -107,15 +99,7 @@ async def test_reauth_wrong_account(hass: HomeAssistant, smarttub_api, account) # we try to reauth account #2, and the user successfully authenticates to account #1 account.id = mock_entry1.unique_id - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry2.unique_id, - "entry_id": mock_entry2.entry_id, - }, - data=mock_entry2.data, - ) + result = await mock_entry2.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/sonarr/test_config_flow.py b/tests/components/sonarr/test_config_flow.py index 6bd14e8b581..118d5020cba 100644 --- a/tests/components/sonarr/test_config_flow.py +++ b/tests/components/sonarr/test_config_flow.py @@ -11,7 +11,7 @@ from homeassistant.components.sonarr.const import ( DEFAULT_WANTED_MAX_ITEMS, DOMAIN, ) -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_SOURCE, CONF_URL, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -96,15 +96,7 @@ async def test_full_reauth_flow_implementation( """Test the manual reauth flow from start to finish.""" entry = init_integration - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/spotify/test_config_flow.py b/tests/components/spotify/test_config_flow.py index 6040fcd84f2..09feb4a6e83 100644 --- a/tests/components/spotify/test_config_flow.py +++ b/tests/components/spotify/test_config_flow.py @@ -13,7 +13,7 @@ from homeassistant.components.application_credentials import ( async_import_client_credential, ) from homeassistant.components.spotify.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow @@ -201,15 +201,7 @@ async def test_reauthentication( ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 @@ -269,15 +261,7 @@ async def test_reauth_account_mismatch( ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() result = await hass.config_entries.flow.async_configure(flows[0]["flow_id"], {}) diff --git a/tests/components/steam_online/test_config_flow.py b/tests/components/steam_online/test_config_flow.py index a5bce80d890..140a8309ff9 100644 --- a/tests/components/steam_online/test_config_flow.py +++ b/tests/components/steam_online/test_config_flow.py @@ -5,8 +5,8 @@ from unittest.mock import patch import steam from homeassistant.components.steam_online.const import CONF_ACCOUNTS, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER -from homeassistant.const import CONF_API_KEY, CONF_SOURCE +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import entity_registry as er @@ -111,18 +111,10 @@ async def test_flow_user_already_configured(hass: HomeAssistant) -> None: async def test_flow_reauth(hass: HomeAssistant) -> None: """Test reauth step.""" entry = create_entry(hass) + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with patch_interface(): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=CONF_DATA, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={}, diff --git a/tests/components/sunweg/test_config_flow.py b/tests/components/sunweg/test_config_flow.py index 80b6a946749..8103003d7fb 100644 --- a/tests/components/sunweg/test_config_flow.py +++ b/tests/components/sunweg/test_config_flow.py @@ -69,14 +69,7 @@ async def test_reauth(hass: HomeAssistant, plant_fixture, inverter_fixture) -> N assert entries[0].data[CONF_USERNAME] == SUNWEG_MOCK_ENTRY.data[CONF_USERNAME] assert entries[0].data[CONF_PASSWORD] == SUNWEG_MOCK_ENTRY.data[CONF_PASSWORD] - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/surepetcare/test_config_flow.py b/tests/components/surepetcare/test_config_flow.py index c3c13195aca..c4055ebe658 100644 --- a/tests/components/surepetcare/test_config_flow.py +++ b/tests/components/surepetcare/test_config_flow.py @@ -155,15 +155,7 @@ async def test_reauthentication(hass: HomeAssistant) -> None: ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -192,15 +184,7 @@ async def test_reauthentication_failure(hass: HomeAssistant) -> None: ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -230,15 +214,7 @@ async def test_reauthentication_cannot_connect(hass: HomeAssistant) -> None: ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -268,15 +244,7 @@ async def test_reauthentication_unknown_failure(hass: HomeAssistant) -> None: ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} diff --git a/tests/components/synology_dsm/test_config_flow.py b/tests/components/synology_dsm/test_config_flow.py index 1574526a701..e5494b7179f 100644 --- a/tests/components/synology_dsm/test_config_flow.py +++ b/tests/components/synology_dsm/test_config_flow.py @@ -21,12 +21,7 @@ from homeassistant.components.synology_dsm.const import ( DEFAULT_SNAPSHOT_QUALITY, DOMAIN, ) -from homeassistant.config_entries import ( - SOURCE_REAUTH, - SOURCE_SSDP, - SOURCE_USER, - SOURCE_ZEROCONF, -) +from homeassistant.config_entries import SOURCE_SSDP, SOURCE_USER, SOURCE_ZEROCONF from homeassistant.const import ( CONF_HOST, CONF_MAC, @@ -297,24 +292,7 @@ async def test_reauth(hass: HomeAssistant, service: MagicMock) -> None: ) entry.add_to_hass(hass) - with patch( - "homeassistant.config_entries.ConfigEntries.async_reload", - return_value=True, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - "title_placeholders": {"name": entry.title}, - }, - data={ - CONF_HOST: HOST, - CONF_USERNAME: USERNAME, - CONF_PASSWORD: PASSWORD, - }, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" From edad766fd3c30cd88676898e5219fff394e9c7ff Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 28 Aug 2024 16:51:16 +0200 Subject: [PATCH 1185/1635] Use start_reauth helper method in integration tests (u-z) (#124796) --- tests/components/unifi/test_config_flow.py | 21 +--------- .../unifiprotect/test_config_flow.py | 8 +--- .../uptimerobot/test_config_flow.py | 40 ++----------------- tests/components/verisure/test_config_flow.py | 30 ++------------ tests/components/vicare/test_config_flow.py | 8 +--- .../components/vlc_telnet/test_config_flow.py | 20 +--------- .../vodafone_station/test_config_flow.py | 27 ++++--------- .../volvooncall/test_config_flow.py | 8 +--- tests/components/vulcan/test_config_flow.py | 22 +++++----- tests/components/wallbox/test_config_flow.py | 16 +------- tests/components/watttime/test_config_flow.py | 15 +++---- .../weatherflow_cloud/test_config_flow.py | 8 +--- tests/components/webostv/test_config_flow.py | 12 +----- .../components/whirlpool/test_config_flow.py | 36 ++--------------- tests/components/withings/test_config_flow.py | 20 ++-------- .../components/xiaomi_ble/test_config_flow.py | 11 +---- .../xiaomi_miio/test_config_flow.py | 6 +-- .../yale_smart_alarm/test_config_flow.py | 20 +--------- .../components/yalexs_ble/test_config_flow.py | 6 +-- tests/components/yolink/test_config_flow.py | 10 +---- 20 files changed, 59 insertions(+), 285 deletions(-) diff --git a/tests/components/unifi/test_config_flow.py b/tests/components/unifi/test_config_flow.py index 1d745511dc5..71b196550da 100644 --- a/tests/components/unifi/test_config_flow.py +++ b/tests/components/unifi/test_config_flow.py @@ -24,7 +24,6 @@ from homeassistant.components.unifi.const import ( CONF_TRACK_WIRED_CLIENTS, DOMAIN as UNIFI_DOMAIN, ) -from homeassistant.config_entries import SOURCE_REAUTH from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -302,15 +301,7 @@ async def test_reauth_flow_update_configuration( """Verify reauth flow can update hub configuration.""" config_entry = config_entry_setup - result = await hass.config_entries.flow.async_init( - UNIFI_DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": config_entry.unique_id, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" @@ -344,15 +335,7 @@ async def test_reauth_flow_update_configuration_on_not_loaded_entry( with patch("aiounifi.Controller.login", side_effect=aiounifi.errors.RequestError): config_entry = await config_entry_factory() - result = await hass.config_entries.flow.async_init( - UNIFI_DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": config_entry.unique_id, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" diff --git a/tests/components/unifiprotect/test_config_flow.py b/tests/components/unifiprotect/test_config_flow.py index 5d02e1cf098..8bfdc004092 100644 --- a/tests/components/unifiprotect/test_config_flow.py +++ b/tests/components/unifiprotect/test_config_flow.py @@ -224,13 +224,7 @@ async def test_form_reauth_auth( ) mock_config.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config.entry_id, - }, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert not result["errors"] flows = hass.config_entries.flow.async_progress_by_handler(DOMAIN) diff --git a/tests/components/uptimerobot/test_config_flow.py b/tests/components/uptimerobot/test_config_flow.py index 1cf0a358a87..3ba5ad696a6 100644 --- a/tests/components/uptimerobot/test_config_flow.py +++ b/tests/components/uptimerobot/test_config_flow.py @@ -168,15 +168,7 @@ async def test_reauthentication( old_entry = MockConfigEntry(**MOCK_UPTIMEROBOT_CONFIG_ENTRY_DATA) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] is None @@ -209,15 +201,7 @@ async def test_reauthentication_failure( old_entry = MockConfigEntry(**MOCK_UPTIMEROBOT_CONFIG_ENTRY_DATA) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] is None @@ -253,15 +237,7 @@ async def test_reauthentication_failure_no_existing_entry( ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] is None @@ -294,15 +270,7 @@ async def test_reauthentication_failure_account_not_matching( old_entry = MockConfigEntry(**MOCK_UPTIMEROBOT_CONFIG_ENTRY_DATA) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] is None diff --git a/tests/components/verisure/test_config_flow.py b/tests/components/verisure/test_config_flow.py index cf478b093c0..e6dd11669d1 100644 --- a/tests/components/verisure/test_config_flow.py +++ b/tests/components/verisure/test_config_flow.py @@ -352,15 +352,7 @@ async def test_reauth_flow( """Test a reauthentication flow.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result.get("step_id") == "reauth_confirm" assert result.get("type") is FlowResultType.FORM assert result.get("errors") == {} @@ -395,15 +387,7 @@ async def test_reauth_flow_with_mfa( """Test a reauthentication flow.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result.get("step_id") == "reauth_confirm" assert result.get("type") is FlowResultType.FORM assert result.get("errors") == {} @@ -466,15 +450,7 @@ async def test_reauth_flow_errors( """Test a reauthentication flow.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) mock_verisure_config_flow.login.side_effect = side_effect result2 = await hass.config_entries.flow.async_configure( diff --git a/tests/components/vicare/test_config_flow.py b/tests/components/vicare/test_config_flow.py index b823bb72dc9..a522cf75d5d 100644 --- a/tests/components/vicare/test_config_flow.py +++ b/tests/components/vicare/test_config_flow.py @@ -11,7 +11,7 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.components import dhcp from homeassistant.components.vicare.const import DOMAIN -from homeassistant.config_entries import SOURCE_DHCP, SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_DHCP, SOURCE_USER from homeassistant.const import CONF_CLIENT_ID, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -104,11 +104,7 @@ async def test_step_reauth(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> ) config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": config_entry.entry_id}, - data=VALID_CONFIG, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/vlc_telnet/test_config_flow.py b/tests/components/vlc_telnet/test_config_flow.py index 54edafab14a..d29a2c06beb 100644 --- a/tests/components/vlc_telnet/test_config_flow.py +++ b/tests/components/vlc_telnet/test_config_flow.py @@ -153,15 +153,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: entry = MockConfigEntry(domain=DOMAIN, data=entry_data) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry_data, - ) + result = await entry.start_reauth_flow(hass) with ( patch("homeassistant.components.vlc_telnet.config_flow.Client.connect"), @@ -209,15 +201,7 @@ async def test_reauth_errors( entry = MockConfigEntry(domain=DOMAIN, data=entry_data) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry_data, - ) + result = await entry.start_reauth_flow(hass) with ( patch( diff --git a/tests/components/vodafone_station/test_config_flow.py b/tests/components/vodafone_station/test_config_flow.py index 0492d32070f..3a54f250871 100644 --- a/tests/components/vodafone_station/test_config_flow.py +++ b/tests/components/vodafone_station/test_config_flow.py @@ -7,7 +7,7 @@ import pytest from homeassistant.components.device_tracker import CONF_CONSIDER_HOME from homeassistant.components.vodafone_station.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -124,6 +124,9 @@ async def test_reauth_successful(hass: HomeAssistant) -> None: mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with ( patch( @@ -136,15 +139,6 @@ async def test_reauth_successful(hass: HomeAssistant) -> None: "homeassistant.components.vodafone_station.async_setup_entry", ), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, - data=mock_config.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -172,6 +166,10 @@ async def test_reauth_not_successful(hass: HomeAssistant, side_effect, error) -> mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + with ( patch( "homeassistant.components.vodafone_station.config_flow.VodafoneStationSercommApi.login", @@ -184,15 +182,6 @@ async def test_reauth_not_successful(hass: HomeAssistant, side_effect, error) -> "homeassistant.components.vodafone_station.async_setup_entry", ), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, - data=mock_config.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ diff --git a/tests/components/volvooncall/test_config_flow.py b/tests/components/volvooncall/test_config_flow.py index 8bf8bcc7412..5268432c17e 100644 --- a/tests/components/volvooncall/test_config_flow.py +++ b/tests/components/volvooncall/test_config_flow.py @@ -153,13 +153,7 @@ async def test_reauth(hass: HomeAssistant) -> None: ) first_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": first_entry.entry_id, - }, - ) + result = await first_entry.start_reauth_flow(hass) # the first form is just the confirmation prompt assert result["type"] is FlowResultType.FORM diff --git a/tests/components/vulcan/test_config_flow.py b/tests/components/vulcan/test_config_flow.py index 3311f3c71b2..a72e77b32e8 100644 --- a/tests/components/vulcan/test_config_flow.py +++ b/tests/components/vulcan/test_config_flow.py @@ -137,14 +137,13 @@ async def test_config_flow_reauth_success( mock_student.return_value = [ Student.load(load_fixture("fake_student_1.json", "vulcan")) ] - MockConfigEntry( + entry = MockConfigEntry( domain=const.DOMAIN, unique_id="0", data={"student_id": "0"}, - ).add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - const.DOMAIN, context={"source": config_entries.SOURCE_REAUTH} ) + entry.add_to_hass(hass) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -176,14 +175,13 @@ async def test_config_flow_reauth_without_matching_entries( mock_student.return_value = [ Student.load(load_fixture("fake_student_1.json", "vulcan")) ] - MockConfigEntry( + entry = MockConfigEntry( domain=const.DOMAIN, unique_id="0", data={"student_id": "1"}, - ).add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - const.DOMAIN, context={"source": config_entries.SOURCE_REAUTH} ) + entry.add_to_hass(hass) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -206,9 +204,13 @@ async def test_config_flow_reauth_with_errors( """Test reauth config flow with errors.""" mock_keystore.return_value = fake_keystore mock_account.return_value = fake_account - result = await hass.config_entries.flow.async_init( - const.DOMAIN, context={"source": config_entries.SOURCE_REAUTH} + entry = MockConfigEntry( + domain=const.DOMAIN, + unique_id="0", + data={"student_id": "0"}, ) + entry.add_to_hass(hass) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} diff --git a/tests/components/wallbox/test_config_flow.py b/tests/components/wallbox/test_config_flow.py index c0ff0b19c94..cc38576eb2f 100644 --- a/tests/components/wallbox/test_config_flow.py +++ b/tests/components/wallbox/test_config_flow.py @@ -160,13 +160,7 @@ async def test_form_reauth(hass: HomeAssistant, entry: MockConfigEntry) -> None: status_code=200, ) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - ) + result = await entry.start_reauth_flow(hass) result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -201,13 +195,7 @@ async def test_form_reauth_invalid(hass: HomeAssistant, entry: MockConfigEntry) status_code=200, ) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - ) + result = await entry.start_reauth_flow(hass) result2 = await hass.config_entries.flow.async_configure( result["flow_id"], diff --git a/tests/components/watttime/test_config_flow.py b/tests/components/watttime/test_config_flow.py index f8eee6b48bf..5087717491f 100644 --- a/tests/components/watttime/test_config_flow.py +++ b/tests/components/watttime/test_config_flow.py @@ -25,6 +25,8 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from tests.common import MockConfigEntry + @pytest.mark.parametrize( ("exc", "error"), @@ -144,21 +146,16 @@ async def test_show_form_user(hass: HomeAssistant) -> None: async def test_step_reauth( - hass: HomeAssistant, config_auth, config_coordinates, config_entry, setup_watttime + hass: HomeAssistant, + config_entry: MockConfigEntry, + setup_watttime, ) -> None: """Test a full reauth flow.""" + result = await config_entry.start_reauth_flow(hass) with patch( "homeassistant.components.watttime.async_setup_entry", return_value=True, ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data={ - **config_auth, - **config_coordinates, - }, - ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_PASSWORD: "password"}, diff --git a/tests/components/weatherflow_cloud/test_config_flow.py b/tests/components/weatherflow_cloud/test_config_flow.py index 3a1f41563fe..9dc5ad1322d 100644 --- a/tests/components/weatherflow_cloud/test_config_flow.py +++ b/tests/components/weatherflow_cloud/test_config_flow.py @@ -4,7 +4,7 @@ import pytest from homeassistant import config_entries from homeassistant.components.weatherflow_cloud.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState +from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_API_TOKEN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -111,11 +111,7 @@ async def test_reauth(hass: HomeAssistant, mock_get_stations_401_error) -> None: assert not await hass.config_entries.async_setup(entry.entry_id) assert entry.state is ConfigEntryState.SETUP_ERROR - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/webostv/test_config_flow.py b/tests/components/webostv/test_config_flow.py index 406bb9c8804..9b2983aab47 100644 --- a/tests/components/webostv/test_config_flow.py +++ b/tests/components/webostv/test_config_flow.py @@ -302,11 +302,7 @@ async def test_reauth_successful( entry = await setup_webostv(hass) assert client - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" result = await hass.config_entries.flow.async_configure(result["flow_id"]) @@ -339,11 +335,7 @@ async def test_reauth_errors( entry = await setup_webostv(hass) assert client - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" result = await hass.config_entries.flow.async_configure(result["flow_id"]) diff --git a/tests/components/whirlpool/test_config_flow.py b/tests/components/whirlpool/test_config_flow.py index e3896a436d4..1240e1303e1 100644 --- a/tests/components/whirlpool/test_config_flow.py +++ b/tests/components/whirlpool/test_config_flow.py @@ -235,15 +235,7 @@ async def test_reauth_flow(hass: HomeAssistant, region, brand) -> None: ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=CONFIG_INPUT | {"region": region[0], "brand": brand[0]}, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM @@ -294,21 +286,7 @@ async def test_reauth_flow_auth_error(hass: HomeAssistant, region, brand) -> Non ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data={ - CONF_USERNAME: "test-username", - CONF_PASSWORD: "new-password", - "region": region[0], - "brand": brand[0], - }, - ) - + result = await mock_entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -345,15 +323,7 @@ async def test_reauth_flow_connnection_error( ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=CONFIG_INPUT | {"region": region[0], "brand": brand[0]}, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM diff --git a/tests/components/withings/test_config_flow.py b/tests/components/withings/test_config_flow.py index 20bef90a31e..39c8340a78e 100644 --- a/tests/components/withings/test_config_flow.py +++ b/tests/components/withings/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import AsyncMock, patch import pytest from homeassistant.components.withings.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow @@ -145,14 +145,7 @@ async def test_config_reauth_profile( """Test reauth an existing profile reauthenticates the config entry.""" await setup_integration(hass, polling_config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": polling_config_entry.entry_id, - }, - data=polling_config_entry.data, - ) + result = await polling_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -207,14 +200,7 @@ async def test_config_reauth_wrong_account( """Test reauth with wrong account.""" await setup_integration(hass, polling_config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": polling_config_entry.entry_id, - }, - data=polling_config_entry.data, - ) + result = await polling_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/xiaomi_ble/test_config_flow.py b/tests/components/xiaomi_ble/test_config_flow.py index b61615e0f79..f690665608b 100644 --- a/tests/components/xiaomi_ble/test_config_flow.py +++ b/tests/components/xiaomi_ble/test_config_flow.py @@ -1083,16 +1083,7 @@ async def test_async_step_reauth_abort_early(hass: HomeAssistant) -> None: device = DeviceData() - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "title_placeholders": {"name": entry.title}, - "unique_id": entry.unique_id, - }, - data=entry.data | {"device": device}, - ) + result = await entry.start_reauth_flow(hass, data={"device": device}) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reauth_successful" diff --git a/tests/components/xiaomi_miio/test_config_flow.py b/tests/components/xiaomi_miio/test_config_flow.py index 707da4bff12..146526c69a5 100644 --- a/tests/components/xiaomi_miio/test_config_flow.py +++ b/tests/components/xiaomi_miio/test_config_flow.py @@ -976,11 +976,7 @@ async def test_reauth(hass: HomeAssistant) -> None: assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - result = await hass.config_entries.flow.async_init( - const.DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/yale_smart_alarm/test_config_flow.py b/tests/components/yale_smart_alarm/test_config_flow.py index 4ef201d2122..d5651503768 100644 --- a/tests/components/yale_smart_alarm/test_config_flow.py +++ b/tests/components/yale_smart_alarm/test_config_flow.py @@ -132,15 +132,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -202,15 +194,7 @@ async def test_reauth_flow_error( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) with patch( "homeassistant.components.yale_smart_alarm.config_flow.YaleSmartAlarmClient", diff --git a/tests/components/yalexs_ble/test_config_flow.py b/tests/components/yalexs_ble/test_config_flow.py index 15552fdec5f..5d57095ccd5 100644 --- a/tests/components/yalexs_ble/test_config_flow.py +++ b/tests/components/yalexs_ble/test_config_flow.py @@ -945,11 +945,7 @@ async def test_reauth(hass: HomeAssistant) -> None: unique_id=YALE_ACCESS_LOCK_DISCOVERY_INFO.address, ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_validate" diff --git a/tests/components/yolink/test_config_flow.py b/tests/components/yolink/test_config_flow.py index d7ba09e4269..1dd71368d73 100644 --- a/tests/components/yolink/test_config_flow.py +++ b/tests/components/yolink/test_config_flow.py @@ -172,15 +172,7 @@ async def test_reauthentication( ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 From 03ead27f6c5130504f1083846175ef783139cf56 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 28 Aug 2024 05:46:03 -1000 Subject: [PATCH 1186/1635] Split august and yale integrations (#124677) * Split august and yale integrations [part 1] (#122253) * merge with dev * Remove unused constant * Remove yale IPv6 workaround (#123409) * Convert yale to use oauth (#123806) Co-authored-by: Joost Lekkerkerker * Update yale for switch from pubnub to websockets (#124675) --------- Co-authored-by: Joost Lekkerkerker --- CODEOWNERS | 2 + homeassistant/brands/yale.json | 8 +- homeassistant/components/august/manifest.json | 4 - homeassistant/components/yale/__init__.py | 81 +++ .../yale/application_credentials.py | 15 + .../components/yale/binary_sensor.py | 189 +++++++ homeassistant/components/yale/button.py | 32 ++ homeassistant/components/yale/camera.py | 90 +++ homeassistant/components/yale/config_flow.py | 57 ++ homeassistant/components/yale/const.py | 47 ++ homeassistant/components/yale/data.py | 52 ++ homeassistant/components/yale/diagnostics.py | 49 ++ homeassistant/components/yale/entity.py | 115 ++++ homeassistant/components/yale/event.py | 104 ++++ homeassistant/components/yale/gateway.py | 43 ++ homeassistant/components/yale/icons.json | 9 + homeassistant/components/yale/lock.py | 147 +++++ homeassistant/components/yale/manifest.json | 16 + homeassistant/components/yale/sensor.py | 210 +++++++ homeassistant/components/yale/strings.json | 71 +++ homeassistant/components/yale/util.py | 83 +++ .../generated/application_credentials.py | 1 + homeassistant/generated/config_flows.py | 1 + homeassistant/generated/dhcp.py | 10 +- homeassistant/generated/integrations.json | 6 + requirements_all.txt | 2 + requirements_test_all.txt | 2 + tests/components/yale/__init__.py | 12 + tests/components/yale/conftest.py | 59 ++ .../fixtures/get_activity.bridge_offline.json | 36 ++ .../fixtures/get_activity.bridge_online.json | 36 ++ .../get_activity.doorbell_motion.json | 58 ++ .../yale/fixtures/get_activity.jammed.json | 36 ++ .../yale/fixtures/get_activity.lock.json | 36 ++ .../get_activity.lock_from_autorelock.json | 36 ++ .../get_activity.lock_from_bluetooth.json | 36 ++ .../get_activity.lock_from_keypad.json | 37 ++ .../get_activity.lock_from_manual.json | 39 ++ .../yale/fixtures/get_activity.locking.json | 36 ++ .../get_activity.unlock_from_manual.json | 39 ++ .../get_activity.unlock_from_tag.json | 39 ++ .../yale/fixtures/get_activity.unlocking.json | 36 ++ .../yale/fixtures/get_doorbell.json | 81 +++ .../yale/fixtures/get_doorbell.nobattery.json | 78 +++ .../yale/fixtures/get_doorbell.offline.json | 126 +++++ .../fixtures/get_lock.doorsense_init.json | 92 ++++ .../fixtures/get_lock.low_keypad_battery.json | 92 ++++ .../yale/fixtures/get_lock.offline.json | 57 ++ .../yale/fixtures/get_lock.online.json | 92 ++++ .../get_lock.online.unknown_state.json | 59 ++ .../get_lock.online_missing_doorsense.json | 50 ++ .../get_lock.online_with_doorsense.json | 52 ++ .../fixtures/get_lock.online_with_keys.json | 100 ++++ .../get_lock.online_with_unlatch.json | 94 ++++ tests/components/yale/fixtures/get_locks.json | 16 + tests/components/yale/fixtures/jwt | 1 + tests/components/yale/fixtures/lock_open.json | 26 + .../fixtures/lock_with_doorbell.online.json | 100 ++++ tests/components/yale/fixtures/reauth_jwt | 1 + .../yale/fixtures/reauth_jwt_wrong_account | 1 + .../yale/fixtures/unlock_closed.json | 26 + tests/components/yale/mocks.py | 515 ++++++++++++++++++ .../yale/snapshots/test_diagnostics.ambr | 125 +++++ tests/components/yale/test_binary_sensor.py | 390 +++++++++++++ tests/components/yale/test_button.py | 24 + tests/components/yale/test_camera.py | 93 ++++ tests/components/yale/test_config_flow.py | 207 +++++++ tests/components/yale/test_diagnostics.py | 31 ++ tests/components/yale/test_event.py | 174 ++++++ tests/components/yale/test_init.py | 238 ++++++++ tests/components/yale/test_lock.py | 501 +++++++++++++++++ tests/components/yale/test_sensor.py | 362 ++++++++++++ 72 files changed, 5811 insertions(+), 10 deletions(-) create mode 100644 homeassistant/components/yale/__init__.py create mode 100644 homeassistant/components/yale/application_credentials.py create mode 100644 homeassistant/components/yale/binary_sensor.py create mode 100644 homeassistant/components/yale/button.py create mode 100644 homeassistant/components/yale/camera.py create mode 100644 homeassistant/components/yale/config_flow.py create mode 100644 homeassistant/components/yale/const.py create mode 100644 homeassistant/components/yale/data.py create mode 100644 homeassistant/components/yale/diagnostics.py create mode 100644 homeassistant/components/yale/entity.py create mode 100644 homeassistant/components/yale/event.py create mode 100644 homeassistant/components/yale/gateway.py create mode 100644 homeassistant/components/yale/icons.json create mode 100644 homeassistant/components/yale/lock.py create mode 100644 homeassistant/components/yale/manifest.json create mode 100644 homeassistant/components/yale/sensor.py create mode 100644 homeassistant/components/yale/strings.json create mode 100644 homeassistant/components/yale/util.py create mode 100644 tests/components/yale/__init__.py create mode 100644 tests/components/yale/conftest.py create mode 100644 tests/components/yale/fixtures/get_activity.bridge_offline.json create mode 100644 tests/components/yale/fixtures/get_activity.bridge_online.json create mode 100644 tests/components/yale/fixtures/get_activity.doorbell_motion.json create mode 100644 tests/components/yale/fixtures/get_activity.jammed.json create mode 100644 tests/components/yale/fixtures/get_activity.lock.json create mode 100644 tests/components/yale/fixtures/get_activity.lock_from_autorelock.json create mode 100644 tests/components/yale/fixtures/get_activity.lock_from_bluetooth.json create mode 100644 tests/components/yale/fixtures/get_activity.lock_from_keypad.json create mode 100644 tests/components/yale/fixtures/get_activity.lock_from_manual.json create mode 100644 tests/components/yale/fixtures/get_activity.locking.json create mode 100644 tests/components/yale/fixtures/get_activity.unlock_from_manual.json create mode 100644 tests/components/yale/fixtures/get_activity.unlock_from_tag.json create mode 100644 tests/components/yale/fixtures/get_activity.unlocking.json create mode 100644 tests/components/yale/fixtures/get_doorbell.json create mode 100644 tests/components/yale/fixtures/get_doorbell.nobattery.json create mode 100644 tests/components/yale/fixtures/get_doorbell.offline.json create mode 100644 tests/components/yale/fixtures/get_lock.doorsense_init.json create mode 100644 tests/components/yale/fixtures/get_lock.low_keypad_battery.json create mode 100644 tests/components/yale/fixtures/get_lock.offline.json create mode 100644 tests/components/yale/fixtures/get_lock.online.json create mode 100644 tests/components/yale/fixtures/get_lock.online.unknown_state.json create mode 100644 tests/components/yale/fixtures/get_lock.online_missing_doorsense.json create mode 100644 tests/components/yale/fixtures/get_lock.online_with_doorsense.json create mode 100644 tests/components/yale/fixtures/get_lock.online_with_keys.json create mode 100644 tests/components/yale/fixtures/get_lock.online_with_unlatch.json create mode 100644 tests/components/yale/fixtures/get_locks.json create mode 100644 tests/components/yale/fixtures/jwt create mode 100644 tests/components/yale/fixtures/lock_open.json create mode 100644 tests/components/yale/fixtures/lock_with_doorbell.online.json create mode 100644 tests/components/yale/fixtures/reauth_jwt create mode 100644 tests/components/yale/fixtures/reauth_jwt_wrong_account create mode 100644 tests/components/yale/fixtures/unlock_closed.json create mode 100644 tests/components/yale/mocks.py create mode 100644 tests/components/yale/snapshots/test_diagnostics.ambr create mode 100644 tests/components/yale/test_binary_sensor.py create mode 100644 tests/components/yale/test_button.py create mode 100644 tests/components/yale/test_camera.py create mode 100644 tests/components/yale/test_config_flow.py create mode 100644 tests/components/yale/test_diagnostics.py create mode 100644 tests/components/yale/test_event.py create mode 100644 tests/components/yale/test_init.py create mode 100644 tests/components/yale/test_lock.py create mode 100644 tests/components/yale/test_sensor.py diff --git a/CODEOWNERS b/CODEOWNERS index 20e0b3adced..6f118ca1ba8 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1660,6 +1660,8 @@ build.json @home-assistant/supervisor /tests/components/xiaomi_miio/ @rytilahti @syssi @starkillerOG /homeassistant/components/xiaomi_tv/ @simse /homeassistant/components/xmpp/ @fabaff @flowolf +/homeassistant/components/yale/ @bdraco +/tests/components/yale/ @bdraco /homeassistant/components/yale_smart_alarm/ @gjohansson-ST /tests/components/yale_smart_alarm/ @gjohansson-ST /homeassistant/components/yalexs_ble/ @bdraco diff --git a/homeassistant/brands/yale.json b/homeassistant/brands/yale.json index 53dc9b43569..a0e7c6bd453 100644 --- a/homeassistant/brands/yale.json +++ b/homeassistant/brands/yale.json @@ -1,5 +1,11 @@ { "domain": "yale", "name": "Yale", - "integrations": ["august", "yale_smart_alarm", "yalexs_ble", "yale_home"] + "integrations": [ + "august", + "yale_smart_alarm", + "yalexs_ble", + "yale_home", + "yale" + ] } diff --git a/homeassistant/components/august/manifest.json b/homeassistant/components/august/manifest.json index a20ec5a2205..fe630638cf2 100644 --- a/homeassistant/components/august/manifest.json +++ b/homeassistant/components/august/manifest.json @@ -4,10 +4,6 @@ "codeowners": ["@bdraco"], "config_flow": true, "dhcp": [ - { - "hostname": "yale-connect-plus", - "macaddress": "00177A*" - }, { "hostname": "connect", "macaddress": "D86162*" diff --git a/homeassistant/components/yale/__init__.py b/homeassistant/components/yale/__init__.py new file mode 100644 index 00000000000..f7a4a6e0f4d --- /dev/null +++ b/homeassistant/components/yale/__init__.py @@ -0,0 +1,81 @@ +"""Support for Yale devices.""" + +from __future__ import annotations + +from pathlib import Path +from typing import cast + +from aiohttp import ClientResponseError +from yalexs.const import Brand +from yalexs.exceptions import YaleApiError +from yalexs.manager.const import CONF_BRAND +from yalexs.manager.exceptions import CannotConnect, InvalidAuth, RequireValidation +from yalexs.manager.gateway import Config as YaleXSConfig + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import EVENT_HOMEASSISTANT_STOP +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady +from homeassistant.helpers import config_entry_oauth2_flow, device_registry as dr + +from .const import DOMAIN, PLATFORMS +from .data import YaleData +from .gateway import YaleGateway +from .util import async_create_yale_clientsession + +type YaleConfigEntry = ConfigEntry[YaleData] + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Set up yale from a config entry.""" + session = async_create_yale_clientsession(hass) + implementation = ( + await config_entry_oauth2_flow.async_get_config_entry_implementation( + hass, entry + ) + ) + oauth_session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation) + yale_gateway = YaleGateway(Path(hass.config.config_dir), session, oauth_session) + try: + await async_setup_yale(hass, entry, yale_gateway) + except (RequireValidation, InvalidAuth) as err: + raise ConfigEntryAuthFailed from err + except TimeoutError as err: + raise ConfigEntryNotReady("Timed out connecting to yale api") from err + except (YaleApiError, ClientResponseError, CannotConnect) as err: + raise ConfigEntryNotReady from err + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: YaleConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +async def async_setup_yale( + hass: HomeAssistant, entry: YaleConfigEntry, yale_gateway: YaleGateway +) -> None: + """Set up the yale component.""" + config = cast(YaleXSConfig, entry.data) + await yale_gateway.async_setup({**config, CONF_BRAND: Brand.YALE_GLOBAL}) + await yale_gateway.async_authenticate() + await yale_gateway.async_refresh_access_token_if_needed() + data = entry.runtime_data = YaleData(hass, yale_gateway) + entry.async_on_unload( + hass.bus.async_listen(EVENT_HOMEASSISTANT_STOP, data.async_stop) + ) + entry.async_on_unload(data.async_stop) + await data.async_setup() + + +async def async_remove_config_entry_device( + hass: HomeAssistant, config_entry: YaleConfigEntry, device_entry: dr.DeviceEntry +) -> bool: + """Remove yale config entry from a device if its no longer present.""" + return not any( + identifier + for identifier in device_entry.identifiers + if identifier[0] == DOMAIN + and config_entry.runtime_data.get_device(identifier[1]) + ) diff --git a/homeassistant/components/yale/application_credentials.py b/homeassistant/components/yale/application_credentials.py new file mode 100644 index 00000000000..31b5b7a92c7 --- /dev/null +++ b/homeassistant/components/yale/application_credentials.py @@ -0,0 +1,15 @@ +"""application_credentials platform the yale integration.""" + +from homeassistant.components.application_credentials import AuthorizationServer +from homeassistant.core import HomeAssistant + +OAUTH2_AUTHORIZE = "https://oauth.aaecosystem.com/authorize" +OAUTH2_TOKEN = "https://oauth.aaecosystem.com/access_token" + + +async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationServer: + """Return authorization server.""" + return AuthorizationServer( + authorize_url=OAUTH2_AUTHORIZE, + token_url=OAUTH2_TOKEN, + ) diff --git a/homeassistant/components/yale/binary_sensor.py b/homeassistant/components/yale/binary_sensor.py new file mode 100644 index 00000000000..cbc0b48b177 --- /dev/null +++ b/homeassistant/components/yale/binary_sensor.py @@ -0,0 +1,189 @@ +"""Support for Yale binary sensors.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from datetime import datetime, timedelta +from functools import partial +import logging + +from yalexs.activity import Activity, ActivityType +from yalexs.doorbell import DoorbellDetail +from yalexs.lock import LockDetail, LockDoorStatus +from yalexs.manager.const import ACTIVITY_UPDATE_INTERVAL +from yalexs.util import update_lock_detail_from_activity + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.event import async_call_later + +from . import YaleConfigEntry, YaleData +from .entity import YaleDescriptionEntity +from .util import ( + retrieve_ding_activity, + retrieve_doorbell_motion_activity, + retrieve_online_state, + retrieve_time_based_activity, +) + +_LOGGER = logging.getLogger(__name__) + +TIME_TO_RECHECK_DETECTION = timedelta( + seconds=ACTIVITY_UPDATE_INTERVAL.total_seconds() * 3 +) + + +@dataclass(frozen=True, kw_only=True) +class YaleDoorbellBinarySensorEntityDescription(BinarySensorEntityDescription): + """Describes Yale binary_sensor entity.""" + + value_fn: Callable[[YaleData, DoorbellDetail | LockDetail], Activity | None] + is_time_based: bool + + +SENSOR_TYPE_DOOR = BinarySensorEntityDescription( + key="open", + device_class=BinarySensorDeviceClass.DOOR, +) + +SENSOR_TYPES_VIDEO_DOORBELL = ( + YaleDoorbellBinarySensorEntityDescription( + key="motion", + device_class=BinarySensorDeviceClass.MOTION, + value_fn=retrieve_doorbell_motion_activity, + is_time_based=True, + ), + YaleDoorbellBinarySensorEntityDescription( + key="image capture", + translation_key="image_capture", + value_fn=partial( + retrieve_time_based_activity, {ActivityType.DOORBELL_IMAGE_CAPTURE} + ), + is_time_based=True, + ), + YaleDoorbellBinarySensorEntityDescription( + key="online", + device_class=BinarySensorDeviceClass.CONNECTIVITY, + entity_category=EntityCategory.DIAGNOSTIC, + value_fn=retrieve_online_state, + is_time_based=False, + ), +) + + +SENSOR_TYPES_DOORBELL: tuple[YaleDoorbellBinarySensorEntityDescription, ...] = ( + YaleDoorbellBinarySensorEntityDescription( + key="ding", + translation_key="ding", + device_class=BinarySensorDeviceClass.OCCUPANCY, + value_fn=retrieve_ding_activity, + is_time_based=True, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: YaleConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Yale binary sensors.""" + data = config_entry.runtime_data + entities: list[BinarySensorEntity] = [] + + for lock in data.locks: + detail = data.get_device_detail(lock.device_id) + if detail.doorsense: + entities.append(YaleDoorBinarySensor(data, lock, SENSOR_TYPE_DOOR)) + + if detail.doorbell: + entities.extend( + YaleDoorbellBinarySensor(data, lock, description) + for description in SENSOR_TYPES_DOORBELL + ) + + for doorbell in data.doorbells: + entities.extend( + YaleDoorbellBinarySensor(data, doorbell, description) + for description in SENSOR_TYPES_DOORBELL + SENSOR_TYPES_VIDEO_DOORBELL + ) + + async_add_entities(entities) + + +class YaleDoorBinarySensor(YaleDescriptionEntity, BinarySensorEntity): + """Representation of an Yale Door binary sensor.""" + + _attr_device_class = BinarySensorDeviceClass.DOOR + description: BinarySensorEntityDescription + + @callback + def _update_from_data(self) -> None: + """Get the latest state of the sensor and update activity.""" + if door_activity := self._get_latest({ActivityType.DOOR_OPERATION}): + update_lock_detail_from_activity(self._detail, door_activity) + if door_activity.was_pushed: + self._detail.set_online(True) + + if bridge_activity := self._get_latest({ActivityType.BRIDGE_OPERATION}): + update_lock_detail_from_activity(self._detail, bridge_activity) + self._attr_available = self._detail.bridge_is_online + self._attr_is_on = self._detail.door_state == LockDoorStatus.OPEN + + +class YaleDoorbellBinarySensor(YaleDescriptionEntity, BinarySensorEntity): + """Representation of an Yale binary sensor.""" + + entity_description: YaleDoorbellBinarySensorEntityDescription + _check_for_off_update_listener: Callable[[], None] | None = None + + @callback + def _update_from_data(self) -> None: + """Get the latest state of the sensor.""" + self._cancel_any_pending_updates() + self._attr_is_on = bool( + self.entity_description.value_fn(self._data, self._detail) + ) + + if self.entity_description.is_time_based: + self._attr_available = retrieve_online_state(self._data, self._detail) + self._schedule_update_to_recheck_turn_off_sensor() + else: + self._attr_available = True + + @callback + def _async_scheduled_update(self, now: datetime) -> None: + """Timer callback for sensor update.""" + self._check_for_off_update_listener = None + self._update_from_data() + if not self.is_on: + self.async_write_ha_state() + + def _schedule_update_to_recheck_turn_off_sensor(self) -> None: + """Schedule an update to recheck the sensor to see if it is ready to turn off.""" + # If the sensor is already off there is nothing to do + if not self.is_on: + return + self._check_for_off_update_listener = async_call_later( + self.hass, TIME_TO_RECHECK_DETECTION, self._async_scheduled_update + ) + + def _cancel_any_pending_updates(self) -> None: + """Cancel any updates to recheck a sensor to see if it is ready to turn off.""" + if not self._check_for_off_update_listener: + return + _LOGGER.debug("%s: canceled pending update", self.entity_id) + self._check_for_off_update_listener() + self._check_for_off_update_listener = None + + async def async_will_remove_from_hass(self) -> None: + """When removing cancel any scheduled updates.""" + self._cancel_any_pending_updates() + await super().async_will_remove_from_hass() diff --git a/homeassistant/components/yale/button.py b/homeassistant/components/yale/button.py new file mode 100644 index 00000000000..de0cff4f0c8 --- /dev/null +++ b/homeassistant/components/yale/button.py @@ -0,0 +1,32 @@ +"""Support for Yale buttons.""" + +from homeassistant.components.button import ButtonEntity +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import YaleConfigEntry +from .entity import YaleEntityMixin + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: YaleConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Yale lock wake buttons.""" + data = config_entry.runtime_data + async_add_entities(YaleWakeLockButton(data, lock, "wake") for lock in data.locks) + + +class YaleWakeLockButton(YaleEntityMixin, ButtonEntity): + """Representation of an Yale lock wake button.""" + + _attr_translation_key = "wake" + + async def async_press(self) -> None: + """Wake the device.""" + await self._data.async_status_async(self._device_id, self._hyper_bridge) + + @callback + def _update_from_data(self) -> None: + """Nothing to update as buttons are stateless.""" diff --git a/homeassistant/components/yale/camera.py b/homeassistant/components/yale/camera.py new file mode 100644 index 00000000000..500239d7f3a --- /dev/null +++ b/homeassistant/components/yale/camera.py @@ -0,0 +1,90 @@ +"""Support for Yale doorbell camera.""" + +from __future__ import annotations + +import logging + +from aiohttp import ClientSession +from yalexs.activity import ActivityType +from yalexs.doorbell import Doorbell +from yalexs.util import update_doorbell_image_from_activity + +from homeassistant.components.camera import Camera +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import aiohttp_client +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import YaleConfigEntry, YaleData +from .const import DEFAULT_NAME, DEFAULT_TIMEOUT +from .entity import YaleEntityMixin + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: YaleConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Yale cameras.""" + data = config_entry.runtime_data + # Create an aiohttp session instead of using the default one since the + # default one is likely to trigger yale's WAF if another integration + # is also using Cloudflare + session = aiohttp_client.async_create_clientsession(hass) + async_add_entities( + YaleCamera(data, doorbell, session, DEFAULT_TIMEOUT) + for doorbell in data.doorbells + ) + + +class YaleCamera(YaleEntityMixin, Camera): + """An implementation of an Yale security camera.""" + + _attr_translation_key = "camera" + _attr_motion_detection_enabled = True + _attr_brand = DEFAULT_NAME + _image_url: str | None = None + _image_content: bytes | None = None + + def __init__( + self, data: YaleData, device: Doorbell, session: ClientSession, timeout: int + ) -> None: + """Initialize an Yale security camera.""" + super().__init__(data, device, "camera") + self._timeout = timeout + self._session = session + self._attr_model = self._detail.model + + @property + def is_recording(self) -> bool: + """Return true if the device is recording.""" + return self._device.has_subscription + + async def _async_update(self): + """Update device.""" + _LOGGER.debug("async_update called %s", self._detail.device_name) + await self._data.refresh_camera_by_id(self._device_id) + self._update_from_data() + + @callback + def _update_from_data(self) -> None: + """Get the latest state of the sensor.""" + if doorbell_activity := self._get_latest( + {ActivityType.DOORBELL_MOTION, ActivityType.DOORBELL_IMAGE_CAPTURE} + ): + update_doorbell_image_from_activity(self._detail, doorbell_activity) + + async def async_camera_image( + self, width: int | None = None, height: int | None = None + ) -> bytes | None: + """Return bytes of camera image.""" + self._update_from_data() + + if self._image_url is not self._detail.image_url: + self._image_content = await self._data.async_get_doorbell_image( + self._device_id, self._session, timeout=self._timeout + ) + self._image_url = self._detail.image_url + + return self._image_content diff --git a/homeassistant/components/yale/config_flow.py b/homeassistant/components/yale/config_flow.py new file mode 100644 index 00000000000..cdd44754103 --- /dev/null +++ b/homeassistant/components/yale/config_flow.py @@ -0,0 +1,57 @@ +"""Config flow for Yale integration.""" + +from collections.abc import Mapping +import logging +from typing import Any + +import jwt + +from homeassistant.config_entries import ConfigEntry, ConfigFlowResult +from homeassistant.helpers import config_entry_oauth2_flow + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +class YaleConfigFlow(config_entry_oauth2_flow.AbstractOAuth2FlowHandler, domain=DOMAIN): + """Handle a config flow for Yale.""" + + VERSION = 1 + DOMAIN = DOMAIN + reauth_entry: ConfigEntry | None = None + + @property + def logger(self) -> logging.Logger: + """Return logger.""" + return _LOGGER + + async def async_step_reauth(self, data: Mapping[str, Any]) -> ConfigFlowResult: + """Handle configuration by re-auth.""" + self.reauth_entry = self.hass.config_entries.async_get_entry( + self.context["entry_id"] + ) + return await self.async_step_user() + + def _async_get_user_id_from_access_token(self, encoded: str) -> str: + """Get user ID from access token.""" + decoded = jwt.decode( + encoded, + "", + verify=False, + options={"verify_signature": False}, + algorithms=["HS256"], + ) + return decoded["userId"] + + async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult: + """Create an entry for the flow.""" + user_id = self._async_get_user_id_from_access_token( + data["token"]["access_token"] + ) + if entry := self.reauth_entry: + if entry.unique_id != user_id: + return self.async_abort(reason="reauth_invalid_user") + return self.async_update_reload_and_abort(entry, data=data) + await self.async_set_unique_id(user_id) + return await super().async_oauth_create_entry(data) diff --git a/homeassistant/components/yale/const.py b/homeassistant/components/yale/const.py new file mode 100644 index 00000000000..630d15f7230 --- /dev/null +++ b/homeassistant/components/yale/const.py @@ -0,0 +1,47 @@ +"""Constants for Yale devices.""" + +from yalexs.const import Brand + +from homeassistant.const import Platform + +DEFAULT_TIMEOUT = 25 + +CONF_ACCESS_TOKEN_CACHE_FILE = "access_token_cache_file" +CONF_BRAND = "brand" +CONF_LOGIN_METHOD = "login_method" +CONF_INSTALL_ID = "install_id" + +VERIFICATION_CODE_KEY = "verification_code" + +DEFAULT_BRAND = Brand.YALE_HOME + +MANUFACTURER = "Yale Home Inc." + +DEFAULT_NAME = "Yale" +DOMAIN = "yale" + +OPERATION_METHOD_AUTORELOCK = "autorelock" +OPERATION_METHOD_REMOTE = "remote" +OPERATION_METHOD_KEYPAD = "keypad" +OPERATION_METHOD_MANUAL = "manual" +OPERATION_METHOD_TAG = "tag" +OPERATION_METHOD_MOBILE_DEVICE = "mobile" + +ATTR_OPERATION_AUTORELOCK = "autorelock" +ATTR_OPERATION_METHOD = "method" +ATTR_OPERATION_REMOTE = "remote" +ATTR_OPERATION_KEYPAD = "keypad" +ATTR_OPERATION_MANUAL = "manual" +ATTR_OPERATION_TAG = "tag" + +LOGIN_METHODS = ["phone", "email"] +DEFAULT_LOGIN_METHOD = "email" + +PLATFORMS = [ + Platform.BINARY_SENSOR, + Platform.BUTTON, + Platform.CAMERA, + Platform.EVENT, + Platform.LOCK, + Platform.SENSOR, +] diff --git a/homeassistant/components/yale/data.py b/homeassistant/components/yale/data.py new file mode 100644 index 00000000000..12736f7733d --- /dev/null +++ b/homeassistant/components/yale/data.py @@ -0,0 +1,52 @@ +"""Support for Yale devices.""" + +from __future__ import annotations + +from yalexs.lock import LockDetail +from yalexs.manager.data import YaleXSData +from yalexs_ble import YaleXSBLEDiscovery + +from homeassistant.config_entries import SOURCE_INTEGRATION_DISCOVERY +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import discovery_flow + +from .gateway import YaleGateway + +YALEXS_BLE_DOMAIN = "yalexs_ble" + + +@callback +def _async_trigger_ble_lock_discovery( + hass: HomeAssistant, locks_with_offline_keys: list[LockDetail] +) -> None: + """Update keys for the yalexs-ble integration if available.""" + for lock_detail in locks_with_offline_keys: + discovery_flow.async_create_flow( + hass, + YALEXS_BLE_DOMAIN, + context={"source": SOURCE_INTEGRATION_DISCOVERY}, + data=YaleXSBLEDiscovery( + { + "name": lock_detail.device_name, + "address": lock_detail.mac_address, + "serial": lock_detail.serial_number, + "key": lock_detail.offline_key, + "slot": lock_detail.offline_slot, + } + ), + ) + + +class YaleData(YaleXSData): + """yale data object.""" + + def __init__(self, hass: HomeAssistant, yale_gateway: YaleGateway) -> None: + """Init yale data object.""" + self._hass = hass + super().__init__(yale_gateway, HomeAssistantError) + + @callback + def async_offline_key_discovered(self, detail: LockDetail) -> None: + """Handle offline key discovery.""" + _async_trigger_ble_lock_discovery(self._hass, [detail]) diff --git a/homeassistant/components/yale/diagnostics.py b/homeassistant/components/yale/diagnostics.py new file mode 100644 index 00000000000..ef8d837b82e --- /dev/null +++ b/homeassistant/components/yale/diagnostics.py @@ -0,0 +1,49 @@ +"""Diagnostics support for yale.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.core import HomeAssistant + +from . import YaleConfigEntry +from .const import CONF_BRAND, DEFAULT_BRAND + +TO_REDACT = { + "HouseID", + "OfflineKeys", + "installUserID", + "invitations", + "key", + "pins", + "pubsubChannel", + "recentImage", + "remoteOperateSecret", + "users", + "zWaveDSK", + "contentToken", +} + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: YaleConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + data = entry.runtime_data + + return { + "locks": { + lock.device_id: async_redact_data( + data.get_device_detail(lock.device_id).raw, TO_REDACT + ) + for lock in data.locks + }, + "doorbells": { + doorbell.device_id: async_redact_data( + data.get_device_detail(doorbell.device_id).raw, TO_REDACT + ) + for doorbell in data.doorbells + }, + "brand": entry.data.get(CONF_BRAND, DEFAULT_BRAND), + } diff --git a/homeassistant/components/yale/entity.py b/homeassistant/components/yale/entity.py new file mode 100644 index 00000000000..7105fda861c --- /dev/null +++ b/homeassistant/components/yale/entity.py @@ -0,0 +1,115 @@ +"""Base class for Yale entity.""" + +from abc import abstractmethod + +from yalexs.activity import Activity, ActivityType +from yalexs.doorbell import Doorbell, DoorbellDetail +from yalexs.keypad import KeypadDetail +from yalexs.lock import Lock, LockDetail +from yalexs.util import get_configuration_url + +from homeassistant.const import ATTR_CONNECTIONS +from homeassistant.core import callback +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import Entity, EntityDescription + +from . import DOMAIN, YaleData +from .const import MANUFACTURER + +DEVICE_TYPES = ["keypad", "lock", "camera", "doorbell", "door", "bell"] + + +class YaleEntityMixin(Entity): + """Base implementation for Yale device.""" + + _attr_should_poll = False + _attr_has_entity_name = True + + def __init__( + self, data: YaleData, device: Doorbell | Lock | KeypadDetail, unique_id: str + ) -> None: + """Initialize an Yale device.""" + super().__init__() + self._data = data + self._stream = data.activity_stream + self._device = device + detail = self._detail + self._device_id = device.device_id + self._attr_unique_id = f"{device.device_id}_{unique_id}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, self._device_id)}, + manufacturer=MANUFACTURER, + model=detail.model, + name=device.device_name, + sw_version=detail.firmware_version, + suggested_area=_remove_device_types(device.device_name, DEVICE_TYPES), + configuration_url=get_configuration_url(data.brand), + ) + if isinstance(detail, LockDetail) and (mac := detail.mac_address): + self._attr_device_info[ATTR_CONNECTIONS] = {(dr.CONNECTION_BLUETOOTH, mac)} + + @property + def _detail(self) -> DoorbellDetail | LockDetail: + return self._data.get_device_detail(self._device.device_id) + + @property + def _hyper_bridge(self) -> bool: + """Check if the lock has a paired hyper bridge.""" + return bool(self._detail.bridge and self._detail.bridge.hyper_bridge) + + @callback + def _get_latest(self, activity_types: set[ActivityType]) -> Activity | None: + """Get the latest activity for the device.""" + return self._stream.get_latest_device_activity(self._device_id, activity_types) + + @callback + def _update_from_data_and_write_state(self) -> None: + self._update_from_data() + self.async_write_ha_state() + + @abstractmethod + def _update_from_data(self) -> None: + """Update the entity state from the data object.""" + + async def async_added_to_hass(self) -> None: + """Subscribe to updates.""" + self.async_on_remove( + self._data.async_subscribe_device_id( + self._device_id, self._update_from_data_and_write_state + ) + ) + self.async_on_remove( + self._stream.async_subscribe_device_id( + self._device_id, self._update_from_data_and_write_state + ) + ) + self._update_from_data() + + +class YaleDescriptionEntity(YaleEntityMixin): + """An Yale entity with a description.""" + + def __init__( + self, + data: YaleData, + device: Doorbell | Lock | KeypadDetail, + description: EntityDescription, + ) -> None: + """Initialize an Yale entity with a description.""" + super().__init__(data, device, description.key) + self.entity_description = description + + +def _remove_device_types(name: str, device_types: list[str]) -> str: + """Strip device types from a string. + + Yale stores the name as Master Bed Lock + or Master Bed Door. We can come up with a + reasonable suggestion by removing the supported + device types from the string. + """ + lower_name = name.lower() + for device_type in device_types: + lower_name = lower_name.removesuffix(f" {device_type}") + return name[: len(lower_name)] diff --git a/homeassistant/components/yale/event.py b/homeassistant/components/yale/event.py new file mode 100644 index 00000000000..7014c5dafbf --- /dev/null +++ b/homeassistant/components/yale/event.py @@ -0,0 +1,104 @@ +"""Support for yale events.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from typing import TYPE_CHECKING + +from yalexs.activity import Activity +from yalexs.doorbell import DoorbellDetail +from yalexs.lock import LockDetail + +from homeassistant.components.event import ( + EventDeviceClass, + EventEntity, + EventEntityDescription, +) +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import YaleConfigEntry, YaleData +from .entity import YaleDescriptionEntity +from .util import ( + retrieve_ding_activity, + retrieve_doorbell_motion_activity, + retrieve_online_state, +) + + +@dataclass(kw_only=True, frozen=True) +class YaleEventEntityDescription(EventEntityDescription): + """Describe yale event entities.""" + + value_fn: Callable[[YaleData, DoorbellDetail | LockDetail], Activity | None] + + +TYPES_VIDEO_DOORBELL: tuple[YaleEventEntityDescription, ...] = ( + YaleEventEntityDescription( + key="motion", + translation_key="motion", + device_class=EventDeviceClass.MOTION, + event_types=["motion"], + value_fn=retrieve_doorbell_motion_activity, + ), +) + + +TYPES_DOORBELL: tuple[YaleEventEntityDescription, ...] = ( + YaleEventEntityDescription( + key="doorbell", + translation_key="doorbell", + device_class=EventDeviceClass.DOORBELL, + event_types=["ring"], + value_fn=retrieve_ding_activity, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: YaleConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the yale event platform.""" + data = config_entry.runtime_data + entities: list[YaleEventEntity] = [] + + for lock in data.locks: + detail = data.get_device_detail(lock.device_id) + if detail.doorbell: + entities.extend( + YaleEventEntity(data, lock, description) + for description in TYPES_DOORBELL + ) + + for doorbell in data.doorbells: + entities.extend( + YaleEventEntity(data, doorbell, description) + for description in TYPES_DOORBELL + TYPES_VIDEO_DOORBELL + ) + + async_add_entities(entities) + + +class YaleEventEntity(YaleDescriptionEntity, EventEntity): + """An yale event entity.""" + + entity_description: YaleEventEntityDescription + _attr_has_entity_name = True + _last_activity: Activity | None = None + + @callback + def _update_from_data(self) -> None: + """Update from data.""" + self._attr_available = retrieve_online_state(self._data, self._detail) + current_activity = self.entity_description.value_fn(self._data, self._detail) + if not current_activity or current_activity == self._last_activity: + return + self._last_activity = current_activity + event_types = self.entity_description.event_types + if TYPE_CHECKING: + assert event_types is not None + self._trigger_event(event_type=event_types[0]) + self.async_write_ha_state() diff --git a/homeassistant/components/yale/gateway.py b/homeassistant/components/yale/gateway.py new file mode 100644 index 00000000000..cd7796182d2 --- /dev/null +++ b/homeassistant/components/yale/gateway.py @@ -0,0 +1,43 @@ +"""Handle Yale connection setup and authentication.""" + +import logging +from pathlib import Path + +from aiohttp import ClientSession +from yalexs.authenticator_common import Authentication, AuthenticationState +from yalexs.manager.gateway import Gateway + +from homeassistant.helpers import config_entry_oauth2_flow + +_LOGGER = logging.getLogger(__name__) + + +class YaleGateway(Gateway): + """Handle the connection to Yale.""" + + def __init__( + self, + config_path: Path, + aiohttp_session: ClientSession, + oauth_session: config_entry_oauth2_flow.OAuth2Session, + ) -> None: + """Init the connection.""" + super().__init__(config_path, aiohttp_session) + self._oauth_session = oauth_session + + async def async_get_access_token(self) -> str: + """Get access token.""" + await self._oauth_session.async_ensure_token_valid() + return self._oauth_session.token["access_token"] + + async def async_refresh_access_token_if_needed(self) -> None: + """Refresh the access token if needed.""" + await self._oauth_session.async_ensure_token_valid() + + async def async_authenticate(self) -> Authentication: + """Authenticate with the details provided to setup.""" + await self._oauth_session.async_ensure_token_valid() + self.authentication = Authentication( + AuthenticationState.AUTHENTICATED, None, None, None + ) + return self.authentication diff --git a/homeassistant/components/yale/icons.json b/homeassistant/components/yale/icons.json new file mode 100644 index 00000000000..b654b6d912a --- /dev/null +++ b/homeassistant/components/yale/icons.json @@ -0,0 +1,9 @@ +{ + "entity": { + "binary_sensor": { + "image_capture": { + "default": "mdi:file-image" + } + } + } +} diff --git a/homeassistant/components/yale/lock.py b/homeassistant/components/yale/lock.py new file mode 100644 index 00000000000..36d865bf527 --- /dev/null +++ b/homeassistant/components/yale/lock.py @@ -0,0 +1,147 @@ +"""Support for Yale lock.""" + +from __future__ import annotations + +from collections.abc import Callable, Coroutine +import logging +from typing import Any + +from aiohttp import ClientResponseError +from yalexs.activity import ActivityType, ActivityTypes +from yalexs.lock import Lock, LockStatus +from yalexs.util import get_latest_activity, update_lock_detail_from_activity + +from homeassistant.components.lock import ATTR_CHANGED_BY, LockEntity, LockEntityFeature +from homeassistant.const import ATTR_BATTERY_LEVEL +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.restore_state import RestoreEntity +import homeassistant.util.dt as dt_util + +from . import YaleConfigEntry, YaleData +from .entity import YaleEntityMixin + +_LOGGER = logging.getLogger(__name__) + +LOCK_JAMMED_ERR = 531 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: YaleConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Yale locks.""" + data = config_entry.runtime_data + async_add_entities(YaleLock(data, lock) for lock in data.locks) + + +class YaleLock(YaleEntityMixin, RestoreEntity, LockEntity): + """Representation of an Yale lock.""" + + _attr_name = None + _lock_status: LockStatus | None = None + + def __init__(self, data: YaleData, device: Lock) -> None: + """Initialize the lock.""" + super().__init__(data, device, "lock") + if self._detail.unlatch_supported: + self._attr_supported_features = LockEntityFeature.OPEN + + async def async_lock(self, **kwargs: Any) -> None: + """Lock the device.""" + if self._data.push_updates_connected: + await self._data.async_lock_async(self._device_id, self._hyper_bridge) + return + await self._call_lock_operation(self._data.async_lock) + + async def async_open(self, **kwargs: Any) -> None: + """Open/unlatch the device.""" + if self._data.push_updates_connected: + await self._data.async_unlatch_async(self._device_id, self._hyper_bridge) + return + await self._call_lock_operation(self._data.async_unlatch) + + async def async_unlock(self, **kwargs: Any) -> None: + """Unlock the device.""" + if self._data.push_updates_connected: + await self._data.async_unlock_async(self._device_id, self._hyper_bridge) + return + await self._call_lock_operation(self._data.async_unlock) + + async def _call_lock_operation( + self, lock_operation: Callable[[str], Coroutine[Any, Any, list[ActivityTypes]]] + ) -> None: + try: + activities = await lock_operation(self._device_id) + except ClientResponseError as err: + if err.status == LOCK_JAMMED_ERR: + self._detail.lock_status = LockStatus.JAMMED + self._detail.lock_status_datetime = dt_util.utcnow() + else: + raise + else: + for lock_activity in activities: + update_lock_detail_from_activity(self._detail, lock_activity) + + if self._update_lock_status_from_detail(): + _LOGGER.debug( + "async_signal_device_id_update (from lock operation): %s", + self._device_id, + ) + self._data.async_signal_device_id_update(self._device_id) + + def _update_lock_status_from_detail(self) -> bool: + self._attr_available = self._detail.bridge_is_online + + if self._lock_status != self._detail.lock_status: + self._lock_status = self._detail.lock_status + return True + return False + + @callback + def _update_from_data(self) -> None: + """Get the latest state of the sensor and update activity.""" + detail = self._detail + if lock_activity := self._get_latest({ActivityType.LOCK_OPERATION}): + self._attr_changed_by = lock_activity.operated_by + lock_activity_without_operator = self._get_latest( + {ActivityType.LOCK_OPERATION_WITHOUT_OPERATOR} + ) + if latest_activity := get_latest_activity( + lock_activity_without_operator, lock_activity + ): + if latest_activity.was_pushed: + self._detail.set_online(True) + update_lock_detail_from_activity(detail, latest_activity) + + if bridge_activity := self._get_latest({ActivityType.BRIDGE_OPERATION}): + update_lock_detail_from_activity(detail, bridge_activity) + + self._update_lock_status_from_detail() + lock_status = self._lock_status + if lock_status is None or lock_status is LockStatus.UNKNOWN: + self._attr_is_locked = None + else: + self._attr_is_locked = lock_status is LockStatus.LOCKED + self._attr_is_jammed = lock_status is LockStatus.JAMMED + self._attr_is_locking = lock_status is LockStatus.LOCKING + self._attr_is_unlocking = lock_status in ( + LockStatus.UNLOCKING, + LockStatus.UNLATCHING, + ) + self._attr_extra_state_attributes = {ATTR_BATTERY_LEVEL: detail.battery_level} + if keypad := detail.keypad: + self._attr_extra_state_attributes["keypad_battery_level"] = ( + keypad.battery_level + ) + + async def async_added_to_hass(self) -> None: + """Restore ATTR_CHANGED_BY on startup since it is likely no longer in the activity log.""" + await super().async_added_to_hass() + + if not (last_state := await self.async_get_last_state()): + return + + if ATTR_CHANGED_BY in last_state.attributes: + self._attr_changed_by = last_state.attributes[ATTR_CHANGED_BY] diff --git a/homeassistant/components/yale/manifest.json b/homeassistant/components/yale/manifest.json new file mode 100644 index 00000000000..2dc84758610 --- /dev/null +++ b/homeassistant/components/yale/manifest.json @@ -0,0 +1,16 @@ +{ + "domain": "yale", + "name": "Yale", + "codeowners": ["@bdraco"], + "config_flow": true, + "dhcp": [ + { + "hostname": "yale-connect-plus", + "macaddress": "00177A*" + } + ], + "documentation": "https://www.home-assistant.io/integrations/yale", + "iot_class": "cloud_push", + "loggers": ["pubnub", "yalexs"], + "requirements": ["yalexs==8.5.4", "yalexs-ble==2.4.3"] +} diff --git a/homeassistant/components/yale/sensor.py b/homeassistant/components/yale/sensor.py new file mode 100644 index 00000000000..f1931c112cb --- /dev/null +++ b/homeassistant/components/yale/sensor.py @@ -0,0 +1,210 @@ +"""Support for Yale sensors.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from typing import Any, Generic, TypeVar, cast + +from yalexs.activity import ActivityType, LockOperationActivity +from yalexs.doorbell import Doorbell +from yalexs.keypad import KeypadDetail +from yalexs.lock import LockDetail + +from homeassistant.components.sensor import ( + RestoreSensor, + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import ( + ATTR_ENTITY_PICTURE, + PERCENTAGE, + STATE_UNAVAILABLE, + EntityCategory, +) +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import YaleConfigEntry +from .const import ( + ATTR_OPERATION_AUTORELOCK, + ATTR_OPERATION_KEYPAD, + ATTR_OPERATION_MANUAL, + ATTR_OPERATION_METHOD, + ATTR_OPERATION_REMOTE, + ATTR_OPERATION_TAG, + OPERATION_METHOD_AUTORELOCK, + OPERATION_METHOD_KEYPAD, + OPERATION_METHOD_MANUAL, + OPERATION_METHOD_MOBILE_DEVICE, + OPERATION_METHOD_REMOTE, + OPERATION_METHOD_TAG, +) +from .entity import YaleDescriptionEntity, YaleEntityMixin + + +def _retrieve_device_battery_state(detail: LockDetail) -> int: + """Get the latest state of the sensor.""" + return detail.battery_level + + +def _retrieve_linked_keypad_battery_state(detail: KeypadDetail) -> int | None: + """Get the latest state of the sensor.""" + return detail.battery_percentage + + +_T = TypeVar("_T", LockDetail, KeypadDetail) + + +@dataclass(frozen=True, kw_only=True) +class YaleSensorEntityDescription(SensorEntityDescription, Generic[_T]): + """Mixin for required keys.""" + + value_fn: Callable[[_T], int | None] + + +SENSOR_TYPE_DEVICE_BATTERY = YaleSensorEntityDescription[LockDetail]( + key="device_battery", + entity_category=EntityCategory.DIAGNOSTIC, + state_class=SensorStateClass.MEASUREMENT, + value_fn=_retrieve_device_battery_state, +) + +SENSOR_TYPE_KEYPAD_BATTERY = YaleSensorEntityDescription[KeypadDetail]( + key="linked_keypad_battery", + entity_category=EntityCategory.DIAGNOSTIC, + state_class=SensorStateClass.MEASUREMENT, + value_fn=_retrieve_linked_keypad_battery_state, +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: YaleConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Yale sensors.""" + data = config_entry.runtime_data + entities: list[SensorEntity] = [] + + for device in data.locks: + detail = data.get_device_detail(device.device_id) + entities.append(YaleOperatorSensor(data, device, "lock_operator")) + if SENSOR_TYPE_DEVICE_BATTERY.value_fn(detail): + entities.append( + YaleBatterySensor[LockDetail](data, device, SENSOR_TYPE_DEVICE_BATTERY) + ) + if keypad := detail.keypad: + entities.append( + YaleBatterySensor[KeypadDetail]( + data, keypad, SENSOR_TYPE_KEYPAD_BATTERY + ) + ) + + entities.extend( + YaleBatterySensor[Doorbell](data, device, SENSOR_TYPE_DEVICE_BATTERY) + for device in data.doorbells + if SENSOR_TYPE_DEVICE_BATTERY.value_fn(data.get_device_detail(device.device_id)) + ) + + async_add_entities(entities) + + +class YaleOperatorSensor(YaleEntityMixin, RestoreSensor): + """Representation of an Yale lock operation sensor.""" + + _attr_translation_key = "operator" + _operated_remote: bool | None = None + _operated_keypad: bool | None = None + _operated_manual: bool | None = None + _operated_tag: bool | None = None + _operated_autorelock: bool | None = None + + @callback + def _update_from_data(self) -> None: + """Get the latest state of the sensor and update activity.""" + self._attr_available = True + if lock_activity := self._get_latest({ActivityType.LOCK_OPERATION}): + lock_activity = cast(LockOperationActivity, lock_activity) + self._attr_native_value = lock_activity.operated_by + self._operated_remote = lock_activity.operated_remote + self._operated_keypad = lock_activity.operated_keypad + self._operated_manual = lock_activity.operated_manual + self._operated_tag = lock_activity.operated_tag + self._operated_autorelock = lock_activity.operated_autorelock + self._attr_entity_picture = lock_activity.operator_thumbnail_url + + @property + def extra_state_attributes(self) -> dict[str, Any]: + """Return the device specific state attributes.""" + attributes: dict[str, Any] = {} + + if self._operated_remote is not None: + attributes[ATTR_OPERATION_REMOTE] = self._operated_remote + if self._operated_keypad is not None: + attributes[ATTR_OPERATION_KEYPAD] = self._operated_keypad + if self._operated_manual is not None: + attributes[ATTR_OPERATION_MANUAL] = self._operated_manual + if self._operated_tag is not None: + attributes[ATTR_OPERATION_TAG] = self._operated_tag + if self._operated_autorelock is not None: + attributes[ATTR_OPERATION_AUTORELOCK] = self._operated_autorelock + + if self._operated_remote: + attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_REMOTE + elif self._operated_keypad: + attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_KEYPAD + elif self._operated_manual: + attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_MANUAL + elif self._operated_tag: + attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_TAG + elif self._operated_autorelock: + attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_AUTORELOCK + else: + attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_MOBILE_DEVICE + + return attributes + + async def async_added_to_hass(self) -> None: + """Restore ATTR_CHANGED_BY on startup since it is likely no longer in the activity log.""" + await super().async_added_to_hass() + + last_state = await self.async_get_last_state() + last_sensor_state = await self.async_get_last_sensor_data() + if ( + not last_state + or not last_sensor_state + or last_state.state == STATE_UNAVAILABLE + ): + return + + self._attr_native_value = last_sensor_state.native_value + last_attrs = last_state.attributes + if ATTR_ENTITY_PICTURE in last_attrs: + self._attr_entity_picture = last_attrs[ATTR_ENTITY_PICTURE] + if ATTR_OPERATION_REMOTE in last_attrs: + self._operated_remote = last_attrs[ATTR_OPERATION_REMOTE] + if ATTR_OPERATION_KEYPAD in last_attrs: + self._operated_keypad = last_attrs[ATTR_OPERATION_KEYPAD] + if ATTR_OPERATION_MANUAL in last_attrs: + self._operated_manual = last_attrs[ATTR_OPERATION_MANUAL] + if ATTR_OPERATION_TAG in last_attrs: + self._operated_tag = last_attrs[ATTR_OPERATION_TAG] + if ATTR_OPERATION_AUTORELOCK in last_attrs: + self._operated_autorelock = last_attrs[ATTR_OPERATION_AUTORELOCK] + + +class YaleBatterySensor(YaleDescriptionEntity, SensorEntity, Generic[_T]): + """Representation of an Yale sensor.""" + + entity_description: YaleSensorEntityDescription[_T] + _attr_device_class = SensorDeviceClass.BATTERY + _attr_native_unit_of_measurement = PERCENTAGE + + @callback + def _update_from_data(self) -> None: + """Get the latest state of the sensor.""" + self._attr_native_value = self.entity_description.value_fn(self._detail) + self._attr_available = self._attr_native_value is not None diff --git a/homeassistant/components/yale/strings.json b/homeassistant/components/yale/strings.json new file mode 100644 index 00000000000..3fb1345a3b0 --- /dev/null +++ b/homeassistant/components/yale/strings.json @@ -0,0 +1,71 @@ +{ + "config": { + "step": { + "pick_implementation": { + "title": "[%key:common::config_flow::title::oauth2_pick_implementation%]" + } + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", + "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", + "oauth_error": "[%key:common::config_flow::abort::oauth2_error%]", + "missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]", + "authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]", + "no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]", + "user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", + "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", + "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]", + "reauth_invalid_user": "Reauthenticate must use the same account." + }, + "create_entry": { + "default": "[%key:common::config_flow::create_entry::authenticated%]" + } + }, + "entity": { + "binary_sensor": { + "ding": { + "name": "Doorbell ding" + }, + "image_capture": { + "name": "Image capture" + } + }, + "button": { + "wake": { + "name": "Wake" + } + }, + "camera": { + "camera": { + "name": "[%key:component::camera::title%]" + } + }, + "sensor": { + "operator": { + "name": "Operator" + } + }, + "event": { + "doorbell": { + "state_attributes": { + "event_type": { + "state": { + "ring": "Ring" + } + } + } + }, + "motion": { + "state_attributes": { + "event_type": { + "state": { + "motion": "Motion" + } + } + } + } + } + } +} diff --git a/homeassistant/components/yale/util.py b/homeassistant/components/yale/util.py new file mode 100644 index 00000000000..d8bdaab4a66 --- /dev/null +++ b/homeassistant/components/yale/util.py @@ -0,0 +1,83 @@ +"""Yale util functions.""" + +from __future__ import annotations + +from datetime import datetime, timedelta +from functools import partial + +import aiohttp +from yalexs.activity import ACTION_DOORBELL_CALL_MISSED, Activity, ActivityType +from yalexs.doorbell import DoorbellDetail +from yalexs.lock import LockDetail +from yalexs.manager.const import ACTIVITY_UPDATE_INTERVAL + +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import aiohttp_client + +from . import YaleData + +TIME_TO_DECLARE_DETECTION = timedelta(seconds=ACTIVITY_UPDATE_INTERVAL.total_seconds()) + + +@callback +def async_create_yale_clientsession(hass: HomeAssistant) -> aiohttp.ClientSession: + """Create an aiohttp session for the yale integration.""" + # Create an aiohttp session instead of using the default one since the + # default one is likely to trigger yale's WAF if another integration + # is also using Cloudflare + return aiohttp_client.async_create_clientsession(hass) + + +def retrieve_time_based_activity( + activities: set[ActivityType], data: YaleData, detail: DoorbellDetail | LockDetail +) -> Activity | None: + """Get the latest state of the sensor.""" + stream = data.activity_stream + if latest := stream.get_latest_device_activity(detail.device_id, activities): + return _activity_time_based(latest) + return False + + +_RING_ACTIVITIES = {ActivityType.DOORBELL_DING} + + +def retrieve_ding_activity( + data: YaleData, detail: DoorbellDetail | LockDetail +) -> Activity | None: + """Get the ring/ding state.""" + stream = data.activity_stream + latest = stream.get_latest_device_activity(detail.device_id, _RING_ACTIVITIES) + if latest is None or ( + data.push_updates_connected and latest.action == ACTION_DOORBELL_CALL_MISSED + ): + return None + return _activity_time_based(latest) + + +retrieve_doorbell_motion_activity = partial( + retrieve_time_based_activity, {ActivityType.DOORBELL_MOTION} +) + + +def _activity_time_based(latest: Activity) -> Activity | None: + """Get the latest state of the sensor.""" + start = latest.activity_start_time + end = latest.activity_end_time + TIME_TO_DECLARE_DETECTION + if start <= _native_datetime() <= end: + return latest + return None + + +def _native_datetime() -> datetime: + """Return time in the format yale uses without timezone.""" + return datetime.now() + + +def retrieve_online_state(data: YaleData, detail: DoorbellDetail | LockDetail) -> bool: + """Get the latest state of the sensor.""" + # The doorbell will go into standby mode when there is no motion + # for a short while. It will wake by itself when needed so we need + # to consider is available or we will not report motion or dings + if isinstance(detail, DoorbellDetail): + return detail.is_online or detail.is_standby + return detail.bridge_is_online diff --git a/homeassistant/generated/application_credentials.py b/homeassistant/generated/application_credentials.py index dc30f9d76f0..75fd489bad3 100644 --- a/homeassistant/generated/application_credentials.py +++ b/homeassistant/generated/application_credentials.py @@ -29,6 +29,7 @@ APPLICATION_CREDENTIALS = [ "twitch", "withings", "xbox", + "yale", "yolink", "youtube", ] diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index acf0439e842..afa906fd371 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -664,6 +664,7 @@ FLOWS = { "xiaomi_aqara", "xiaomi_ble", "xiaomi_miio", + "yale", "yale_smart_alarm", "yalexs_ble", "yamaha_musiccast", diff --git a/homeassistant/generated/dhcp.py b/homeassistant/generated/dhcp.py index f6df799d01e..8f5964f1618 100644 --- a/homeassistant/generated/dhcp.py +++ b/homeassistant/generated/dhcp.py @@ -12,11 +12,6 @@ DHCP: Final[list[dict[str, str | bool]]] = [ "domain": "airzone", "macaddress": "E84F25*", }, - { - "domain": "august", - "hostname": "yale-connect-plus", - "macaddress": "00177A*", - }, { "domain": "august", "hostname": "connect", @@ -1094,6 +1089,11 @@ DHCP: Final[list[dict[str, str | bool]]] = [ "domain": "wiz", "hostname": "wiz_*", }, + { + "domain": "yale", + "hostname": "yale-connect-plus", + "macaddress": "00177A*", + }, { "domain": "yeelight", "hostname": "yeelink-*", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index d6ce9e0a3aa..e204170a06f 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -7007,6 +7007,12 @@ "config_flow": false, "supported_by": "august", "name": "Yale Home" + }, + "yale": { + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_push", + "name": "Yale" } } }, diff --git a/requirements_all.txt b/requirements_all.txt index 457fe3a836c..1660c94ac11 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2967,10 +2967,12 @@ xs1-api-client==3.0.0 yalesmartalarmclient==0.4.0 # homeassistant.components.august +# homeassistant.components.yale # homeassistant.components.yalexs_ble yalexs-ble==2.4.3 # homeassistant.components.august +# homeassistant.components.yale yalexs==8.5.4 # homeassistant.components.yeelight diff --git a/requirements_test_all.txt b/requirements_test_all.txt index eab788fabf1..e4cda97ea71 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2350,10 +2350,12 @@ xmltodict==0.13.0 yalesmartalarmclient==0.4.0 # homeassistant.components.august +# homeassistant.components.yale # homeassistant.components.yalexs_ble yalexs-ble==2.4.3 # homeassistant.components.august +# homeassistant.components.yale yalexs==8.5.4 # homeassistant.components.yeelight diff --git a/tests/components/yale/__init__.py b/tests/components/yale/__init__.py new file mode 100644 index 00000000000..f0604940686 --- /dev/null +++ b/tests/components/yale/__init__.py @@ -0,0 +1,12 @@ +"""Tests for the yale component.""" + +MOCK_CONFIG_ENTRY_DATA = { + "auth_implementation": "cloud", + "token": { + "access_token": "access_token", + "expires_in": 1, + "refresh_token": "refresh_token", + "expires_at": 2, + "service": "yale", + }, +} diff --git a/tests/components/yale/conftest.py b/tests/components/yale/conftest.py new file mode 100644 index 00000000000..c890087ad12 --- /dev/null +++ b/tests/components/yale/conftest.py @@ -0,0 +1,59 @@ +"""Yale tests conftest.""" + +from unittest.mock import patch + +import pytest +from yalexs.manager.ratelimit import _RateLimitChecker + +from homeassistant.components.yale.const import DOMAIN +from homeassistant.core import HomeAssistant + +from .mocks import mock_client_credentials, mock_config_entry + +from tests.common import MockConfigEntry, load_fixture + + +@pytest.fixture(name="mock_discovery", autouse=True) +def mock_discovery_fixture(): + """Mock discovery to avoid loading the whole bluetooth stack.""" + with patch( + "homeassistant.components.yale.data.discovery_flow.async_create_flow" + ) as mock_discovery: + yield mock_discovery + + +@pytest.fixture(name="disable_ratelimit_checks", autouse=True) +def disable_ratelimit_checks_fixture(): + """Disable rate limit checks.""" + with patch.object(_RateLimitChecker, "register_wakeup"): + yield + + +@pytest.fixture(name="mock_config_entry") +def mock_config_entry_fixture(jwt: str) -> MockConfigEntry: + """Return the default mocked config entry.""" + return mock_config_entry(jwt=jwt) + + +@pytest.fixture(name="jwt") +def load_jwt_fixture() -> str: + """Load Fixture data.""" + return load_fixture("jwt", DOMAIN).strip("\n") + + +@pytest.fixture(name="reauth_jwt") +def load_reauth_jwt_fixture() -> str: + """Load Fixture data.""" + return load_fixture("reauth_jwt", DOMAIN).strip("\n") + + +@pytest.fixture(name="reauth_jwt_wrong_account") +def load_reauth_jwt_wrong_account_fixture() -> str: + """Load Fixture data.""" + return load_fixture("reauth_jwt_wrong_account", DOMAIN).strip("\n") + + +@pytest.fixture(name="client_credentials", autouse=True) +async def mock_client_credentials_fixture(hass: HomeAssistant) -> None: + """Mock client credentials.""" + await mock_client_credentials(hass) diff --git a/tests/components/yale/fixtures/get_activity.bridge_offline.json b/tests/components/yale/fixtures/get_activity.bridge_offline.json new file mode 100644 index 00000000000..9c2ded96665 --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.bridge_offline.json @@ -0,0 +1,36 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "elven princess", + "UserID": "mockUserId2", + "FirstName": "Your favorite" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "associated_bridge_offline", + "dateTime": 1582007218000, + "info": { + "remote": true, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_activity.bridge_online.json b/tests/components/yale/fixtures/get_activity.bridge_online.json new file mode 100644 index 00000000000..6f8b5e6a4a6 --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.bridge_online.json @@ -0,0 +1,36 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "elven princess", + "UserID": "mockUserId2", + "FirstName": "Your favorite" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "associated_bridge_online", + "dateTime": 1582007218000, + "info": { + "remote": true, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_activity.doorbell_motion.json b/tests/components/yale/fixtures/get_activity.doorbell_motion.json new file mode 100644 index 00000000000..cf0f231a49a --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.doorbell_motion.json @@ -0,0 +1,58 @@ +[ + { + "otherUser": { + "FirstName": "Unknown", + "UserName": "deleteduser", + "LastName": "User", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "dateTime": 1582663119959, + "deviceID": "K98GiDT45GUL", + "info": { + "videoUploadProgress": "in_progress", + "image": { + "resource_type": "image", + "etag": "fdsf", + "created_at": "2020-02-25T20:38:39Z", + "type": "upload", + "format": "jpg", + "version": 1582663119, + "secure_url": "https://res.cloudinary.com/updated_image.jpg", + "signature": "fdfdfd", + "url": "http://res.cloudinary.com/updated_image.jpg", + "bytes": 48545, + "placeholder": false, + "original_filename": "file", + "width": 720, + "tags": [], + "public_id": "xnsj5gphpzij9brifpf4", + "height": 576 + }, + "dvrID": "dvr", + "videoAvailable": false, + "hasSubscription": false + }, + "callingUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "house": { + "houseName": "K98GiDT45GUL", + "houseID": "na" + }, + "action": "doorbell_motion_detected", + "deviceType": "doorbell", + "entities": { + "otherUser": "deleted", + "house": "na", + "device": "K98GiDT45GUL", + "activity": "de5585cfd4eae900bb5ba3dc", + "callingUser": "deleted" + }, + "deviceName": "Front Door" + } +] diff --git a/tests/components/yale/fixtures/get_activity.jammed.json b/tests/components/yale/fixtures/get_activity.jammed.json new file mode 100644 index 00000000000..782a13f9c73 --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.jammed.json @@ -0,0 +1,36 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "elven princess", + "UserID": "mockUserId2", + "FirstName": "Your favorite" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "jammed", + "dateTime": 1582007218000, + "info": { + "remote": true, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_activity.lock.json b/tests/components/yale/fixtures/get_activity.lock.json new file mode 100644 index 00000000000..b40e7d61ccf --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.lock.json @@ -0,0 +1,36 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "elven princess", + "UserID": "mockUserId2", + "FirstName": "Your favorite" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "lock", + "dateTime": 1582007218000, + "info": { + "remote": true, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_activity.lock_from_autorelock.json b/tests/components/yale/fixtures/get_activity.lock_from_autorelock.json new file mode 100644 index 00000000000..38c26ffb7dd --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.lock_from_autorelock.json @@ -0,0 +1,36 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "Relock", + "UserID": "automaticrelock", + "FirstName": "Auto" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "lock", + "dateTime": 1582007218000, + "info": { + "remote": false, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_activity.lock_from_bluetooth.json b/tests/components/yale/fixtures/get_activity.lock_from_bluetooth.json new file mode 100644 index 00000000000..bfbc621e064 --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.lock_from_bluetooth.json @@ -0,0 +1,36 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "elven princess", + "UserID": "mockUserId2", + "FirstName": "Your favorite" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "lock", + "dateTime": 1582007218000, + "info": { + "remote": false, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_activity.lock_from_keypad.json b/tests/components/yale/fixtures/get_activity.lock_from_keypad.json new file mode 100644 index 00000000000..1b1e13e67dd --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.lock_from_keypad.json @@ -0,0 +1,37 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "elven princess", + "UserID": "mockUserId2", + "FirstName": "Your favorite" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "lock", + "dateTime": 1582007218000, + "info": { + "remote": false, + "keypad": true, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_activity.lock_from_manual.json b/tests/components/yale/fixtures/get_activity.lock_from_manual.json new file mode 100644 index 00000000000..e2fc195cfda --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.lock_from_manual.json @@ -0,0 +1,39 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "elven princess", + "UserID": "mockUserId2", + "FirstName": "Your favorite" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "lock", + "dateTime": 1582007218000, + "info": { + "remote": false, + "keypad": false, + "manual": true, + "tag": false, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_activity.locking.json b/tests/components/yale/fixtures/get_activity.locking.json new file mode 100644 index 00000000000..ad2df6f7e91 --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.locking.json @@ -0,0 +1,36 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "elven princess", + "UserID": "mockUserId2", + "FirstName": "Your favorite" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "locking", + "dateTime": 1582007218000, + "info": { + "remote": true, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_activity.unlock_from_manual.json b/tests/components/yale/fixtures/get_activity.unlock_from_manual.json new file mode 100644 index 00000000000..e8bf95818ce --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.unlock_from_manual.json @@ -0,0 +1,39 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "elven princess", + "UserID": "mockUserId2", + "FirstName": "Your favorite" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "unlock", + "dateTime": 1582007218000, + "info": { + "remote": false, + "keypad": false, + "manual": true, + "tag": false, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_activity.unlock_from_tag.json b/tests/components/yale/fixtures/get_activity.unlock_from_tag.json new file mode 100644 index 00000000000..57876428677 --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.unlock_from_tag.json @@ -0,0 +1,39 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "elven princess", + "UserID": "mockUserId2", + "FirstName": "Your favorite" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "unlock", + "dateTime": 1582007218000, + "info": { + "remote": false, + "keypad": false, + "manual": false, + "tag": true, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_activity.unlocking.json b/tests/components/yale/fixtures/get_activity.unlocking.json new file mode 100644 index 00000000000..0fbd0be3eb8 --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.unlocking.json @@ -0,0 +1,36 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "elven princess", + "UserID": "mockUserId2", + "FirstName": "Your favorite" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "unlocking", + "dateTime": 1582007218000, + "info": { + "remote": true, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_doorbell.json b/tests/components/yale/fixtures/get_doorbell.json new file mode 100644 index 00000000000..32714211618 --- /dev/null +++ b/tests/components/yale/fixtures/get_doorbell.json @@ -0,0 +1,81 @@ +{ + "status_timestamp": 1512811834532, + "appID": "august-iphone", + "LockID": "BBBB1F5F11114C24CCCC97571DD6AAAA", + "recentImage": { + "original_filename": "file", + "placeholder": false, + "bytes": 24476, + "height": 640, + "format": "jpg", + "width": 480, + "version": 1512892814, + "resource_type": "image", + "etag": "54966926be2e93f77d498a55f247661f", + "tags": [], + "public_id": "qqqqt4ctmxwsysylaaaa", + "url": "http://image.com/vmk16naaaa7ibuey7sar.jpg", + "created_at": "2017-12-10T08:01:35Z", + "signature": "75z47ca21b5e8ffda21d2134e478a2307c4625da", + "secure_url": "https://image.com/vmk16naaaa7ibuey7sar.jpg", + "type": "upload" + }, + "settings": { + "keepEncoderRunning": true, + "videoResolution": "640x480", + "minACNoScaling": 40, + "irConfiguration": 8448272, + "directLink": true, + "overlayEnabled": true, + "notify_when_offline": true, + "micVolume": 100, + "bitrateCeiling": 512000, + "initialBitrate": 384000, + "IVAEnabled": false, + "turnOffCamera": false, + "ringSoundEnabled": true, + "JPGQuality": 70, + "motion_notifications": true, + "speakerVolume": 92, + "buttonpush_notifications": true, + "ABREnabled": true, + "debug": false, + "batteryLowThreshold": 3.1, + "batteryRun": false, + "IREnabled": true, + "batteryUseThreshold": 3.4 + }, + "doorbellServerURL": "https://doorbells.august.com", + "name": "Front Door", + "createdAt": "2016-11-26T22:27:11.176Z", + "installDate": "2016-11-26T22:27:11.176Z", + "serialNumber": "tBXZR0Z35E", + "dvrSubscriptionSetupDone": true, + "caps": ["reconnect"], + "doorbellID": "K98GiDT45GUL", + "HouseID": "mockhouseid1", + "telemetry": { + "signal_level": -56, + "date": "2017-12-10 08:05:12", + "battery_soc": 96, + "battery": 4.061763, + "steady_ac_in": 22.196405, + "BSSID": "88:ee:00:dd:aa:11", + "SSID": "foo_ssid", + "updated_at": "2017-12-10T08:05:13.650Z", + "temperature": 28.25, + "wifi_freq": 5745, + "load_average": "0.50 0.47 0.35 1/154 9345", + "link_quality": 54, + "battery_soh": 95, + "uptime": "16168.75 13830.49", + "ip_addr": "10.0.1.11", + "doorbell_low_battery": false, + "ac_in": 23.856874 + }, + "installUserID": "c3b2a94e-373e-aaaa-bbbb-36e996827777", + "status": "doorbell_call_status_online", + "firmwareVersion": "2.3.0-RC153+201711151527", + "pubsubChannel": "7c7a6672-59c8-3333-ffff-dcd98705cccc", + "updatedAt": "2017-12-10T08:05:13.650Z" +} diff --git a/tests/components/yale/fixtures/get_doorbell.nobattery.json b/tests/components/yale/fixtures/get_doorbell.nobattery.json new file mode 100644 index 00000000000..2a7f1e2d3b2 --- /dev/null +++ b/tests/components/yale/fixtures/get_doorbell.nobattery.json @@ -0,0 +1,78 @@ +{ + "status_timestamp": 1512811834532, + "appID": "august-iphone", + "LockID": "BBBB1F5F11114C24CCCC97571DD6AAAA", + "recentImage": { + "original_filename": "file", + "placeholder": false, + "bytes": 24476, + "height": 640, + "format": "jpg", + "width": 480, + "version": 1512892814, + "resource_type": "image", + "etag": "54966926be2e93f77d498a55f247661f", + "tags": [], + "public_id": "qqqqt4ctmxwsysylaaaa", + "url": "http://image.com/vmk16naaaa7ibuey7sar.jpg", + "created_at": "2017-12-10T08:01:35Z", + "signature": "75z47ca21b5e8ffda21d2134e478a2307c4625da", + "secure_url": "https://image.com/vmk16naaaa7ibuey7sar.jpg", + "type": "upload" + }, + "settings": { + "keepEncoderRunning": true, + "videoResolution": "640x480", + "minACNoScaling": 40, + "irConfiguration": 8448272, + "directLink": true, + "overlayEnabled": true, + "notify_when_offline": true, + "micVolume": 100, + "bitrateCeiling": 512000, + "initialBitrate": 384000, + "IVAEnabled": false, + "turnOffCamera": false, + "ringSoundEnabled": true, + "JPGQuality": 70, + "motion_notifications": true, + "speakerVolume": 92, + "buttonpush_notifications": true, + "ABREnabled": true, + "debug": false, + "batteryLowThreshold": 3.1, + "batteryRun": false, + "IREnabled": true, + "batteryUseThreshold": 3.4 + }, + "doorbellServerURL": "https://doorbells.august.com", + "name": "Front Door", + "createdAt": "2016-11-26T22:27:11.176Z", + "installDate": "2016-11-26T22:27:11.176Z", + "serialNumber": "tBXZR0Z35E", + "dvrSubscriptionSetupDone": true, + "caps": ["reconnect"], + "doorbellID": "K98GiDT45GUL", + "HouseID": "3dd2accaea08", + "telemetry": { + "signal_level": -56, + "date": "2017-12-10 08:05:12", + "steady_ac_in": 22.196405, + "BSSID": "88:ee:00:dd:aa:11", + "SSID": "foo_ssid", + "updated_at": "2017-12-10T08:05:13.650Z", + "temperature": 28.25, + "wifi_freq": 5745, + "load_average": "0.50 0.47 0.35 1/154 9345", + "link_quality": 54, + "uptime": "16168.75 13830.49", + "ip_addr": "10.0.1.11", + "doorbell_low_battery": false, + "ac_in": 23.856874 + }, + "installUserID": "c3b2a94e-373e-aaaa-bbbb-36e996827777", + "status": "doorbell_call_status_online", + "firmwareVersion": "2.3.0-RC153+201711151527", + "pubsubChannel": "7c7a6672-59c8-3333-ffff-dcd98705cccc", + "updatedAt": "2017-12-10T08:05:13.650Z" +} diff --git a/tests/components/yale/fixtures/get_doorbell.offline.json b/tests/components/yale/fixtures/get_doorbell.offline.json new file mode 100644 index 00000000000..13a8483c995 --- /dev/null +++ b/tests/components/yale/fixtures/get_doorbell.offline.json @@ -0,0 +1,126 @@ +{ + "recentImage": { + "tags": [], + "height": 576, + "public_id": "fdsfds", + "bytes": 50013, + "resource_type": "image", + "original_filename": "file", + "version": 1582242766, + "format": "jpg", + "signature": "fdsfdsf", + "created_at": "2020-02-20T23:52:46Z", + "type": "upload", + "placeholder": false, + "url": "http://res.cloudinary.com/august-com/image/upload/ccc/ccccc.jpg", + "secure_url": "https://res.cloudinary.com/august-com/image/upload/cc/cccc.jpg", + "etag": "zds", + "width": 720 + }, + "firmwareVersion": "3.1.0-HYDRC75+201909251139", + "doorbellServerURL": "https://doorbells.august.com", + "installUserID": "mock", + "caps": ["reconnect", "webrtc", "tcp_wakeup"], + "messagingProtocol": "pubnub", + "createdAt": "2020-02-12T03:52:28.719Z", + "invitations": [], + "appID": "august-iphone-v5", + "HouseID": "houseid1", + "doorbellID": "tmt100", + "name": "Front Door", + "settings": { + "batteryUseThreshold": 3.4, + "brightness": 50, + "batteryChargeCurrent": 60, + "overCurrentThreshold": -250, + "irLedBrightness": 40, + "videoResolution": "720x576", + "pirPulseCounter": 1, + "contrast": 50, + "micVolume": 50, + "directLink": true, + "auto_contrast_mode": 0, + "saturation": 50, + "motion_notifications": true, + "pirSensitivity": 20, + "pirBlindTime": 7, + "notify_when_offline": false, + "nightModeAlsThreshold": 10, + "minACNoScaling": 40, + "DVRRecordingTimeout": 15, + "turnOffCamera": false, + "debug": false, + "keepEncoderRunning": true, + "pirWindowTime": 0, + "bitrateCeiling": 2000000, + "backlight_comp": false, + "buttonpush_notifications": true, + "buttonpush_notifications_partners": false, + "minimumSnapshotInterval": 30, + "pirConfiguration": 272, + "batteryLowThreshold": 3.1, + "sharpness": 50, + "ABREnabled": true, + "hue": 50, + "initialBitrate": 1000000, + "ringSoundEnabled": true, + "IVAEnabled": false, + "overlayEnabled": true, + "speakerVolume": 92, + "ringRepetitions": 3, + "powerProfilePreset": -1, + "irConfiguration": 16836880, + "JPGQuality": 70, + "IREnabled": true + }, + "updatedAt": "2020-02-20T23:58:21.580Z", + "serialNumber": "abc", + "installDate": "2019-02-12T03:52:28.719Z", + "dvrSubscriptionSetupDone": true, + "pubsubChannel": "mock", + "chimes": [ + { + "updatedAt": "2020-02-12T03:55:38.805Z", + "_id": "cccc", + "type": 1, + "serialNumber": "ccccc", + "doorbellID": "tmt100", + "name": "Living Room", + "chimeID": "cccc", + "createdAt": "2020-02-12T03:55:38.805Z", + "firmware": "3.1.16" + } + ], + "telemetry": { + "battery": 3.985, + "battery_soc": 81, + "load_average": "0.45 0.18 0.07 4/98 831", + "ip_addr": "192.168.100.174", + "BSSID": "snp", + "uptime": "96.55 70.59", + "SSID": "bob", + "updated_at": "2020-02-20T23:53:09.586Z", + "dtim_period": 0, + "wifi_freq": 2462, + "date": "2020-02-20 11:47:36", + "BSSIDManufacturer": "Ubiquiti - Ubiquiti Networks Inc.", + "battery_temp": 22, + "battery_avg_cur": -291, + "beacon_interval": 0, + "signal_level": -49, + "battery_soh": 95, + "doorbell_low_battery": false + }, + "secChipCertSerial": "", + "tcpKeepAlive": { + "keepAliveUUID": "mock", + "wakeUp": { + "token": "wakemeup", + "lastUpdated": 1582242723931 + } + }, + "statusUpdatedAtMs": 1582243101579, + "status": "doorbell_offline", + "type": "hydra1", + "HouseName": "housename" +} diff --git a/tests/components/yale/fixtures/get_lock.doorsense_init.json b/tests/components/yale/fixtures/get_lock.doorsense_init.json new file mode 100644 index 00000000000..1132cc61a8d --- /dev/null +++ b/tests/components/yale/fixtures/get_lock.doorsense_init.json @@ -0,0 +1,92 @@ +{ + "LockName": "Front Door Lock", + "Type": 2, + "Created": "2017-12-10T03:12:09.210Z", + "Updated": "2017-12-10T03:12:09.210Z", + "LockID": "A6697750D607098BAE8D6BAA11EF8063", + "HouseID": "000000000000", + "HouseName": "My House", + "Calibrated": false, + "skuNumber": "AUG-SL02-M02-S02", + "timeZone": "America/Vancouver", + "battery": 0.88, + "SerialNumber": "X2FSW05DGA", + "LockStatus": { + "status": "locked", + "doorState": "init", + "dateTime": "2017-12-10T04:48:30.272Z", + "isLockStatusChanged": false, + "valid": true + }, + "currentFirmwareVersion": "109717e9-3.0.44-3.0.30", + "homeKitEnabled": false, + "zWaveEnabled": false, + "isGalileo": false, + "Bridge": { + "_id": "aaacab87f7efxa0015884999", + "mfgBridgeID": "AAGPP102XX", + "deviceModel": "august-doorbell", + "firmwareVersion": "2.3.0-RC153+201711151527", + "operative": true + }, + "keypad": { + "_id": "5bc65c24e6ef2a263e1450a8", + "serialNumber": "K1GXB0054Z", + "lockID": "92412D1B44004595B5DEB134E151A8D3", + "currentFirmwareVersion": "2.27.0", + "battery": {}, + "batteryLevel": "Medium", + "batteryRaw": 170 + }, + "OfflineKeys": { + "created": [], + "loaded": [], + "deleted": [], + "loadedhk": [ + { + "key": "kkk01d4300c1dcxxx1c330f794941222", + "slot": 256, + "UserID": "cccca94e-373e-aaaa-bbbb-333396827777", + "created": "2017-12-10T03:12:09.218Z", + "loaded": "2017-12-10T03:12:55.563Z" + } + ] + }, + "parametersToSet": {}, + "users": { + "cccca94e-373e-aaaa-bbbb-333396827777": { + "UserType": "superuser", + "FirstName": "Foo", + "LastName": "Bar", + "identifiers": ["email:foo@bar.com", "phone:+177777777777"], + "imageInfo": { + "original": { + "width": 948, + "height": 949, + "format": "jpg", + "url": "http://www.image.com/foo.jpeg", + "secure_url": "https://www.image.com/foo.jpeg" + }, + "thumbnail": { + "width": 128, + "height": 128, + "format": "jpg", + "url": "http://www.image.com/foo.jpeg", + "secure_url": "https://www.image.com/foo.jpeg" + } + } + } + }, + "pubsubChannel": "3333a674-ffff-aaaa-b351-b3a4473f3333", + "ruleHash": {}, + "cameras": [], + "geofenceLimits": { + "ios": { + "debounceInterval": 90, + "gpsAccuracyMultiplier": 2.5, + "maximumGeofence": 5000, + "minimumGeofence": 100, + "minGPSAccuracyRequired": 80 + } + } +} diff --git a/tests/components/yale/fixtures/get_lock.low_keypad_battery.json b/tests/components/yale/fixtures/get_lock.low_keypad_battery.json new file mode 100644 index 00000000000..43b5513a527 --- /dev/null +++ b/tests/components/yale/fixtures/get_lock.low_keypad_battery.json @@ -0,0 +1,92 @@ +{ + "LockName": "Front Door Lock", + "Type": 2, + "Created": "2017-12-10T03:12:09.210Z", + "Updated": "2017-12-10T03:12:09.210Z", + "LockID": "A6697750D607098BAE8D6BAA11EF8063", + "HouseID": "000000000000", + "HouseName": "My House", + "Calibrated": false, + "skuNumber": "AUG-SL02-M02-S02", + "timeZone": "America/Vancouver", + "battery": 0.88, + "SerialNumber": "X2FSW05DGA", + "LockStatus": { + "status": "locked", + "doorState": "closed", + "dateTime": "2017-12-10T04:48:30.272Z", + "isLockStatusChanged": true, + "valid": true + }, + "currentFirmwareVersion": "109717e9-3.0.44-3.0.30", + "homeKitEnabled": false, + "zWaveEnabled": false, + "isGalileo": false, + "Bridge": { + "_id": "aaacab87f7efxa0015884999", + "mfgBridgeID": "AAGPP102XX", + "deviceModel": "august-doorbell", + "firmwareVersion": "2.3.0-RC153+201711151527", + "operative": true + }, + "keypad": { + "_id": "5bc65c24e6ef2a263e1450a8", + "serialNumber": "K1GXB0054Z", + "lockID": "92412D1B44004595B5DEB134E151A8D3", + "currentFirmwareVersion": "2.27.0", + "battery": {}, + "batteryLevel": "Low", + "batteryRaw": 128 + }, + "OfflineKeys": { + "created": [], + "loaded": [], + "deleted": [], + "loadedhk": [ + { + "key": "kkk01d4300c1dcxxx1c330f794941222", + "slot": 256, + "UserID": "cccca94e-373e-aaaa-bbbb-333396827777", + "created": "2017-12-10T03:12:09.218Z", + "loaded": "2017-12-10T03:12:55.563Z" + } + ] + }, + "parametersToSet": {}, + "users": { + "cccca94e-373e-aaaa-bbbb-333396827777": { + "UserType": "superuser", + "FirstName": "Foo", + "LastName": "Bar", + "identifiers": ["email:foo@bar.com", "phone:+177777777777"], + "imageInfo": { + "original": { + "width": 948, + "height": 949, + "format": "jpg", + "url": "http://www.image.com/foo.jpeg", + "secure_url": "https://www.image.com/foo.jpeg" + }, + "thumbnail": { + "width": 128, + "height": 128, + "format": "jpg", + "url": "http://www.image.com/foo.jpeg", + "secure_url": "https://www.image.com/foo.jpeg" + } + } + } + }, + "pubsubChannel": "3333a674-ffff-aaaa-b351-b3a4473f3333", + "ruleHash": {}, + "cameras": [], + "geofenceLimits": { + "ios": { + "debounceInterval": 90, + "gpsAccuracyMultiplier": 2.5, + "maximumGeofence": 5000, + "minimumGeofence": 100, + "minGPSAccuracyRequired": 80 + } + } +} diff --git a/tests/components/yale/fixtures/get_lock.offline.json b/tests/components/yale/fixtures/get_lock.offline.json new file mode 100644 index 00000000000..50d3d345ef8 --- /dev/null +++ b/tests/components/yale/fixtures/get_lock.offline.json @@ -0,0 +1,57 @@ +{ + "Calibrated": false, + "Created": "2000-00-00T00:00:00.447Z", + "HouseID": "houseid", + "HouseName": "MockName", + "LockID": "ABC", + "LockName": "Test", + "LockStatus": { + "status": "unknown" + }, + "OfflineKeys": { + "created": [], + "createdhk": [ + { + "UserID": "mock-user-id", + "created": "2000-00-00T00:00:00.447Z", + "key": "mockkey", + "slot": 12 + } + ], + "deleted": [], + "loaded": [] + }, + "SerialNumber": "ABC", + "Type": 3, + "Updated": "2000-00-00T00:00:00.447Z", + "battery": -1, + "cameras": [], + "currentFirmwareVersion": "undefined-1.59.0-1.13.2", + "geofenceLimits": { + "ios": { + "debounceInterval": 90, + "gpsAccuracyMultiplier": 2.5, + "maximumGeofence": 5000, + "minGPSAccuracyRequired": 80, + "minimumGeofence": 100 + } + }, + "homeKitEnabled": false, + "isGalileo": false, + "macAddress": "a:b:c", + "parametersToSet": {}, + "pubsubChannel": "mockpubsub", + "ruleHash": {}, + "skuNumber": "AUG-X", + "supportsEntryCodes": false, + "users": { + "mockuserid": { + "FirstName": "MockName", + "LastName": "House", + "UserType": "superuser", + "identifiers": ["phone:+15558675309", "email:mockme@mock.org"] + } + }, + "zWaveDSK": "1-2-3-4", + "zWaveEnabled": true +} diff --git a/tests/components/yale/fixtures/get_lock.online.json b/tests/components/yale/fixtures/get_lock.online.json new file mode 100644 index 00000000000..7abadeef4b6 --- /dev/null +++ b/tests/components/yale/fixtures/get_lock.online.json @@ -0,0 +1,92 @@ +{ + "LockName": "Front Door Lock", + "Type": 2, + "Created": "2017-12-10T03:12:09.210Z", + "Updated": "2017-12-10T03:12:09.210Z", + "LockID": "A6697750D607098BAE8D6BAA11EF8063", + "HouseID": "000000000000", + "HouseName": "My House", + "Calibrated": false, + "skuNumber": "AUG-SL02-M02-S02", + "timeZone": "America/Vancouver", + "battery": 0.88, + "SerialNumber": "X2FSW05DGA", + "LockStatus": { + "status": "locked", + "doorState": "closed", + "dateTime": "2017-12-10T04:48:30.272Z", + "isLockStatusChanged": true, + "valid": true + }, + "currentFirmwareVersion": "109717e9-3.0.44-3.0.30", + "homeKitEnabled": false, + "zWaveEnabled": false, + "isGalileo": false, + "Bridge": { + "_id": "aaacab87f7efxa0015884999", + "mfgBridgeID": "AAGPP102XX", + "deviceModel": "august-doorbell", + "firmwareVersion": "2.3.0-RC153+201711151527", + "operative": true + }, + "keypad": { + "_id": "5bc65c24e6ef2a263e1450a8", + "serialNumber": "K1GXB0054Z", + "lockID": "92412D1B44004595B5DEB134E151A8D3", + "currentFirmwareVersion": "2.27.0", + "battery": {}, + "batteryLevel": "Medium", + "batteryRaw": 170 + }, + "OfflineKeys": { + "created": [], + "loaded": [], + "deleted": [], + "loadedhk": [ + { + "key": "kkk01d4300c1dcxxx1c330f794941222", + "slot": 256, + "UserID": "cccca94e-373e-aaaa-bbbb-333396827777", + "created": "2017-12-10T03:12:09.218Z", + "loaded": "2017-12-10T03:12:55.563Z" + } + ] + }, + "parametersToSet": {}, + "users": { + "cccca94e-373e-aaaa-bbbb-333396827777": { + "UserType": "superuser", + "FirstName": "Foo", + "LastName": "Bar", + "identifiers": ["email:foo@bar.com", "phone:+177777777777"], + "imageInfo": { + "original": { + "width": 948, + "height": 949, + "format": "jpg", + "url": "http://www.image.com/foo.jpeg", + "secure_url": "https://www.image.com/foo.jpeg" + }, + "thumbnail": { + "width": 128, + "height": 128, + "format": "jpg", + "url": "http://www.image.com/foo.jpeg", + "secure_url": "https://www.image.com/foo.jpeg" + } + } + } + }, + "pubsubChannel": "3333a674-ffff-aaaa-b351-b3a4473f3333", + "ruleHash": {}, + "cameras": [], + "geofenceLimits": { + "ios": { + "debounceInterval": 90, + "gpsAccuracyMultiplier": 2.5, + "maximumGeofence": 5000, + "minimumGeofence": 100, + "minGPSAccuracyRequired": 80 + } + } +} diff --git a/tests/components/yale/fixtures/get_lock.online.unknown_state.json b/tests/components/yale/fixtures/get_lock.online.unknown_state.json new file mode 100644 index 00000000000..abc8b40a132 --- /dev/null +++ b/tests/components/yale/fixtures/get_lock.online.unknown_state.json @@ -0,0 +1,59 @@ +{ + "LockName": "Side Door", + "Type": 1001, + "Created": "2019-10-07T01:49:06.831Z", + "Updated": "2019-10-07T01:49:06.831Z", + "LockID": "BROKENID", + "HouseID": "abc", + "HouseName": "dog", + "Calibrated": false, + "timeZone": "America/Chicago", + "battery": 0.9524716174964851, + "hostLockInfo": { + "serialNumber": "YR", + "manufacturer": "yale", + "productID": 1536, + "productTypeID": 32770 + }, + "supportsEntryCodes": true, + "skuNumber": "AUG-MD01", + "macAddress": "MAC", + "SerialNumber": "M1FXZ00EZ9", + "LockStatus": { + "status": "unknown_error_during_connect", + "dateTime": "2020-02-22T02:48:11.741Z", + "isLockStatusChanged": true, + "valid": true, + "doorState": "closed" + }, + "currentFirmwareVersion": "undefined-4.3.0-1.8.14", + "homeKitEnabled": true, + "zWaveEnabled": false, + "isGalileo": false, + "Bridge": { + "_id": "id", + "mfgBridgeID": "id", + "deviceModel": "august-connect", + "firmwareVersion": "2.2.1", + "operative": true, + "status": { + "current": "online", + "updated": "2020-02-21T15:06:47.001Z", + "lastOnline": "2020-02-21T15:06:47.001Z", + "lastOffline": "2020-02-06T17:33:21.265Z" + }, + "hyperBridge": true + }, + "parametersToSet": {}, + "ruleHash": {}, + "cameras": [], + "geofenceLimits": { + "ios": { + "debounceInterval": 90, + "gpsAccuracyMultiplier": 2.5, + "maximumGeofence": 5000, + "minimumGeofence": 100, + "minGPSAccuracyRequired": 80 + } + } +} diff --git a/tests/components/yale/fixtures/get_lock.online_missing_doorsense.json b/tests/components/yale/fixtures/get_lock.online_missing_doorsense.json new file mode 100644 index 00000000000..84822df9b89 --- /dev/null +++ b/tests/components/yale/fixtures/get_lock.online_missing_doorsense.json @@ -0,0 +1,50 @@ +{ + "Bridge": { + "_id": "bridgeid", + "deviceModel": "august-connect", + "firmwareVersion": "2.2.1", + "hyperBridge": true, + "mfgBridgeID": "C5WY200WSH", + "operative": true, + "status": { + "current": "online", + "lastOffline": "2000-00-00T00:00:00.447Z", + "lastOnline": "2000-00-00T00:00:00.447Z", + "updated": "2000-00-00T00:00:00.447Z" + } + }, + "Calibrated": false, + "Created": "2000-00-00T00:00:00.447Z", + "HouseID": "123", + "HouseName": "Test", + "LockID": "missing_doorsense_id", + "LockName": "Online door missing doorsense", + "LockStatus": { + "dateTime": "2017-12-10T04:48:30.272Z", + "isLockStatusChanged": false, + "status": "locked", + "valid": true + }, + "SerialNumber": "XY", + "Type": 1001, + "Updated": "2000-00-00T00:00:00.447Z", + "battery": 0.922, + "currentFirmwareVersion": "undefined-4.3.0-1.8.14", + "homeKitEnabled": true, + "hostLockInfo": { + "manufacturer": "yale", + "productID": 1536, + "productTypeID": 32770, + "serialNumber": "ABC" + }, + "isGalileo": false, + "macAddress": "12:22", + "pins": { + "created": [], + "loaded": [] + }, + "skuNumber": "AUG-MD01", + "supportsEntryCodes": true, + "timeZone": "Pacific/Hawaii", + "zWaveEnabled": false +} diff --git a/tests/components/yale/fixtures/get_lock.online_with_doorsense.json b/tests/components/yale/fixtures/get_lock.online_with_doorsense.json new file mode 100644 index 00000000000..d9b413708ca --- /dev/null +++ b/tests/components/yale/fixtures/get_lock.online_with_doorsense.json @@ -0,0 +1,52 @@ +{ + "Bridge": { + "_id": "bridgeid", + "deviceModel": "august-connect", + "firmwareVersion": "2.2.1", + "hyperBridge": true, + "mfgBridgeID": "C5WY200WSH", + "operative": true, + "status": { + "current": "online", + "lastOffline": "2000-00-00T00:00:00.447Z", + "lastOnline": "2000-00-00T00:00:00.447Z", + "updated": "2000-00-00T00:00:00.447Z" + } + }, + "pubsubChannel": "pubsub", + "Calibrated": false, + "Created": "2000-00-00T00:00:00.447Z", + "HouseID": "mockhouseid1", + "HouseName": "Test", + "LockID": "online_with_doorsense", + "LockName": "Online door with doorsense", + "LockStatus": { + "dateTime": "2017-12-10T04:48:30.272Z", + "doorState": "open", + "isLockStatusChanged": false, + "status": "locked", + "valid": true + }, + "SerialNumber": "XY", + "Type": 1001, + "Updated": "2000-00-00T00:00:00.447Z", + "battery": 0.922, + "currentFirmwareVersion": "undefined-4.3.0-1.8.14", + "homeKitEnabled": true, + "hostLockInfo": { + "manufacturer": "yale", + "productID": 1536, + "productTypeID": 32770, + "serialNumber": "ABC" + }, + "isGalileo": false, + "macAddress": "12:22", + "pins": { + "created": [], + "loaded": [] + }, + "skuNumber": "AUG-MD01", + "supportsEntryCodes": true, + "timeZone": "Pacific/Hawaii", + "zWaveEnabled": false +} diff --git a/tests/components/yale/fixtures/get_lock.online_with_keys.json b/tests/components/yale/fixtures/get_lock.online_with_keys.json new file mode 100644 index 00000000000..4efcba44d09 --- /dev/null +++ b/tests/components/yale/fixtures/get_lock.online_with_keys.json @@ -0,0 +1,100 @@ +{ + "LockName": "Front Door Lock", + "Type": 2, + "Created": "2017-12-10T03:12:09.210Z", + "Updated": "2017-12-10T03:12:09.210Z", + "LockID": "A6697750D607098BAE8D6BAA11EF8064", + "HouseID": "000000000000", + "HouseName": "My House", + "Calibrated": false, + "skuNumber": "AUG-SL02-M02-S02", + "timeZone": "America/Vancouver", + "battery": 0.88, + "SerialNumber": "X2FSW05DGA", + "LockStatus": { + "status": "locked", + "doorState": "closed", + "dateTime": "2017-12-10T04:48:30.272Z", + "isLockStatusChanged": true, + "valid": true + }, + "currentFirmwareVersion": "109717e9-3.0.44-3.0.30", + "homeKitEnabled": false, + "zWaveEnabled": false, + "isGalileo": false, + "Bridge": { + "_id": "aaacab87f7efxa0015884999", + "mfgBridgeID": "AAGPP102XX", + "deviceModel": "august-doorbell", + "firmwareVersion": "2.3.0-RC153+201711151527", + "operative": true + }, + "keypad": { + "_id": "5bc65c24e6ef2a263e1450a9", + "serialNumber": "K1GXB0054L", + "lockID": "92412D1B44004595B5DEB134E151A8D4", + "currentFirmwareVersion": "2.27.0", + "battery": {}, + "batteryLevel": "Medium", + "batteryRaw": 170 + }, + "OfflineKeys": { + "created": [], + "loaded": [ + { + "UserID": "cccca94e-373e-aaaa-bbbb-333396827777", + "slot": 1, + "key": "kkk01d4300c1dcxxx1c330f794941111", + "created": "2017-12-10T03:12:09.215Z", + "loaded": "2017-12-10T03:12:54.391Z" + } + ], + "deleted": [], + "loadedhk": [ + { + "key": "kkk01d4300c1dcxxx1c330f794941222", + "slot": 256, + "UserID": "cccca94e-373e-aaaa-bbbb-333396827777", + "created": "2017-12-10T03:12:09.218Z", + "loaded": "2017-12-10T03:12:55.563Z" + } + ] + }, + "parametersToSet": {}, + "users": { + "cccca94e-373e-aaaa-bbbb-333396827777": { + "UserType": "superuser", + "FirstName": "Foo", + "LastName": "Bar", + "identifiers": ["email:foo@bar.com", "phone:+177777777777"], + "imageInfo": { + "original": { + "width": 948, + "height": 949, + "format": "jpg", + "url": "http://www.image.com/foo.jpeg", + "secure_url": "https://www.image.com/foo.jpeg" + }, + "thumbnail": { + "width": 128, + "height": 128, + "format": "jpg", + "url": "http://www.image.com/foo.jpeg", + "secure_url": "https://www.image.com/foo.jpeg" + } + } + } + }, + "pubsubChannel": "3333a674-ffff-aaaa-b351-b3a4473f3333", + "ruleHash": {}, + "cameras": [], + "geofenceLimits": { + "ios": { + "debounceInterval": 90, + "gpsAccuracyMultiplier": 2.5, + "maximumGeofence": 5000, + "minimumGeofence": 100, + "minGPSAccuracyRequired": 80 + } + } +} diff --git a/tests/components/yale/fixtures/get_lock.online_with_unlatch.json b/tests/components/yale/fixtures/get_lock.online_with_unlatch.json new file mode 100644 index 00000000000..288ab1a2f28 --- /dev/null +++ b/tests/components/yale/fixtures/get_lock.online_with_unlatch.json @@ -0,0 +1,94 @@ +{ + "LockName": "Lock online with unlatch supported", + "Type": 17, + "Created": "2024-03-14T18:03:09.003Z", + "Updated": "2024-03-14T18:03:09.003Z", + "LockID": "online_with_unlatch", + "HouseID": "mockhouseid1", + "HouseName": "Zuhause", + "Calibrated": false, + "timeZone": "Europe/Berlin", + "battery": 0.61, + "batteryInfo": { + "level": 0.61, + "warningState": "lock_state_battery_warning_none", + "infoUpdatedDate": "2024-04-30T17:55:09.045Z", + "lastChangeDate": "2024-03-15T07:04:00.000Z", + "lastChangeVoltage": 8350, + "state": "Mittel", + "icon": "https://app-resources.aaecosystem.com/images/lock_battery_state_medium.png" + }, + "hostHardwareID": "xxx", + "supportsEntryCodes": true, + "remoteOperateSecret": "xxxx", + "skuNumber": "NONE", + "macAddress": "DE:AD:BE:00:00:00", + "SerialNumber": "LPOC000000", + "LockStatus": { + "status": "locked", + "dateTime": "2024-04-30T18:41:25.673Z", + "isLockStatusChanged": false, + "valid": true, + "doorState": "init" + }, + "currentFirmwareVersion": "1.0.4", + "homeKitEnabled": false, + "zWaveEnabled": false, + "isGalileo": false, + "Bridge": { + "_id": "65f33445529187c78a100000", + "mfgBridgeID": "LPOCH0004Y", + "deviceModel": "august-lock", + "firmwareVersion": "1.0.4", + "operative": true, + "status": { + "current": "online", + "lastOnline": "2024-04-30T18:41:27.971Z", + "updated": "2024-04-30T18:41:27.971Z", + "lastOffline": "2024-04-25T14:41:40.118Z" + }, + "locks": [ + { + "_id": "656858c182e6c7c555faf758", + "LockID": "68895DD075A1444FAD4C00B273EEEF28", + "macAddress": "DE:AD:BE:EF:0B:BC" + } + ], + "hyperBridge": true + }, + "OfflineKeys": { + "created": [], + "loaded": [ + { + "created": "2024-03-14T18:03:09.034Z", + "key": "055281d4aa9bd7b68c7b7bb78e2f34ca", + "slot": 1, + "UserID": "b4b44424-0000-0000-0000-25c224dad337", + "loaded": "2024-03-14T18:03:33.470Z" + } + ], + "deleted": [] + }, + "parametersToSet": {}, + "users": { + "b4b44424-0000-0000-0000-25c224dad337": { + "UserType": "superuser", + "FirstName": "m10x", + "LastName": "m10x", + "identifiers": ["phone:+494444444", "email:m10x@example.com"] + } + }, + "pubsubChannel": "pubsub", + "ruleHash": {}, + "cameras": [], + "geofenceLimits": { + "ios": { + "debounceInterval": 90, + "gpsAccuracyMultiplier": 2.5, + "maximumGeofence": 5000, + "minimumGeofence": 100, + "minGPSAccuracyRequired": 80 + } + }, + "accessSchedulesAllowed": true +} diff --git a/tests/components/yale/fixtures/get_locks.json b/tests/components/yale/fixtures/get_locks.json new file mode 100644 index 00000000000..3fab55f82c9 --- /dev/null +++ b/tests/components/yale/fixtures/get_locks.json @@ -0,0 +1,16 @@ +{ + "A6697750D607098BAE8D6BAA11EF8063": { + "LockName": "Front Door Lock", + "UserType": "superuser", + "macAddress": "2E:BA:C4:14:3F:09", + "HouseID": "000000000000", + "HouseName": "A House" + }, + "A6697750D607098BAE8D6BAA11EF9999": { + "LockName": "Back Door Lock", + "UserType": "user", + "macAddress": "2E:BA:C4:14:3F:88", + "HouseID": "000000000011", + "HouseName": "A House" + } +} diff --git a/tests/components/yale/fixtures/jwt b/tests/components/yale/fixtures/jwt new file mode 100644 index 00000000000..d64f31b9bb2 --- /dev/null +++ b/tests/components/yale/fixtures/jwt @@ -0,0 +1 @@ +eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpbnN0YWxsSWQiOiIiLCJyZWdpb24iOiJpcmVsYW5kLXByb2QtYXdzIiwiYXBwbGljYXRpb25JZCI6IiIsInVzZXJJZCI6ImE3NmMyNWU1LTQ5YWEtNGMxNC1jZDBjLTQ4YTY5MzFlMjA4MSIsInZJbnN0YWxsSWQiOmZhbHNlLCJ2UGFzc3dvcmQiOnRydWUsInZFbWFpbCI6dHJ1ZSwidlBob25lIjp0cnVlLCJoYXNJbnN0YWxsSWQiOmZhbHNlLCJoYXNQYXNzd29yZCI6ZmFsc2UsImhhc0VtYWlsIjpmYWxzZSwiaGFzUGhvbmUiOmZhbHNlLCJpc0xvY2tlZE91dCI6ZmFsc2UsImNhcHRjaGEiOiIiLCJlbWFpbCI6W10sInBob25lIjpbXSwiZXhwaXJlc0F0IjoiMjAyNC0xMi0xOFQxMzo1NDowNS4xMzRaIiwidGVtcG9yYXJ5QWNjb3VudENyZWF0aW9uUGFzc3dvcmRMaW5rIjoiIiwiaWF0IjoxNzI0MTYyMDQ1LCJleHAiOjE3MzQ1MzAwNDUsIm9hdXRoIjp7ImFwcF9uYW1lIjoiSG9tZSBBc3Npc3RhbnQiLCJjbGllbnRfaWQiOiJiM2NkM2YwYi1mYjk3LTRkNmMtYmVlOS1hZjdhYjA0NzU4YzciLCJyZWRpcmVjdF91cmkiOiJodHRwczovL2FjY291bnQtbGluay5uYWJ1Y2FzYS5jb20vYXV0aG9yaXplX2NhbGxiYWNrIiwicGFydG5lcl9pZCI6IjY1Nzk3NDg4MTA2NmNhNDhjOTljMDgyNiJ9fQ.BdRo-dEr-osbDQGB2XzlI-mIj4gqULtapODt-sj-eA8 diff --git a/tests/components/yale/fixtures/lock_open.json b/tests/components/yale/fixtures/lock_open.json new file mode 100644 index 00000000000..b6cfe3c90fc --- /dev/null +++ b/tests/components/yale/fixtures/lock_open.json @@ -0,0 +1,26 @@ +{ + "status": "kAugLockState_Locked", + "resultsFromOperationCache": false, + "retryCount": 1, + "info": { + "wlanRSSI": -54, + "lockType": "lock_version_1001", + "lockStatusChanged": false, + "serialNumber": "ABC", + "serial": "123", + "action": "lock", + "context": { + "startDate": "2020-02-19T01:59:39.516Z", + "retryCount": 1, + "transactionID": "mock" + }, + "bridgeID": "mock", + "wlanSNR": 41, + "startTime": "2020-02-19T01:59:39.517Z", + "duration": 5149, + "lockID": "ABC", + "rssi": -77 + }, + "totalTime": 5162, + "doorState": "kAugDoorState_Open" +} diff --git a/tests/components/yale/fixtures/lock_with_doorbell.online.json b/tests/components/yale/fixtures/lock_with_doorbell.online.json new file mode 100644 index 00000000000..bb2367d1111 --- /dev/null +++ b/tests/components/yale/fixtures/lock_with_doorbell.online.json @@ -0,0 +1,100 @@ +{ + "LockName": "Front Door Lock", + "Type": 7, + "Created": "2017-12-10T03:12:09.210Z", + "Updated": "2017-12-10T03:12:09.210Z", + "LockID": "A6697750D607098BAE8D6BAA11EF8063", + "HouseID": "000000000000", + "HouseName": "My House", + "Calibrated": false, + "skuNumber": "AUG-SL02-M02-S02", + "timeZone": "America/Vancouver", + "battery": 0.88, + "SerialNumber": "X2FSW05DGA", + "LockStatus": { + "status": "locked", + "doorState": "closed", + "dateTime": "2017-12-10T04:48:30.272Z", + "isLockStatusChanged": true, + "valid": true + }, + "currentFirmwareVersion": "109717e9-3.0.44-3.0.30", + "homeKitEnabled": false, + "zWaveEnabled": false, + "isGalileo": false, + "Bridge": { + "_id": "aaacab87f7efxa0015884999", + "mfgBridgeID": "AAGPP102XX", + "deviceModel": "august-doorbell", + "firmwareVersion": "2.3.0-RC153+201711151527", + "operative": true + }, + "keypad": { + "_id": "5bc65c24e6ef2a263e1450a8", + "serialNumber": "K1GXB0054Z", + "lockID": "92412D1B44004595B5DEB134E151A8D3", + "currentFirmwareVersion": "2.27.0", + "battery": {}, + "batteryLevel": "Medium", + "batteryRaw": 170 + }, + "OfflineKeys": { + "created": [], + "loaded": [ + { + "UserID": "cccca94e-373e-aaaa-bbbb-333396827777", + "slot": 1, + "key": "kkk01d4300c1dcxxx1c330f794941111", + "created": "2017-12-10T03:12:09.215Z", + "loaded": "2017-12-10T03:12:54.391Z" + } + ], + "deleted": [], + "loadedhk": [ + { + "key": "kkk01d4300c1dcxxx1c330f794941222", + "slot": 256, + "UserID": "cccca94e-373e-aaaa-bbbb-333396827777", + "created": "2017-12-10T03:12:09.218Z", + "loaded": "2017-12-10T03:12:55.563Z" + } + ] + }, + "parametersToSet": {}, + "users": { + "cccca94e-373e-aaaa-bbbb-333396827777": { + "UserType": "superuser", + "FirstName": "Foo", + "LastName": "Bar", + "identifiers": ["email:foo@bar.com", "phone:+177777777777"], + "imageInfo": { + "original": { + "width": 948, + "height": 949, + "format": "jpg", + "url": "http://www.image.com/foo.jpeg", + "secure_url": "https://www.image.com/foo.jpeg" + }, + "thumbnail": { + "width": 128, + "height": 128, + "format": "jpg", + "url": "http://www.image.com/foo.jpeg", + "secure_url": "https://www.image.com/foo.jpeg" + } + } + } + }, + "pubsubChannel": "3333a674-ffff-aaaa-b351-b3a4473f3333", + "ruleHash": {}, + "cameras": [], + "geofenceLimits": { + "ios": { + "debounceInterval": 90, + "gpsAccuracyMultiplier": 2.5, + "maximumGeofence": 5000, + "minimumGeofence": 100, + "minGPSAccuracyRequired": 80 + } + } +} diff --git a/tests/components/yale/fixtures/reauth_jwt b/tests/components/yale/fixtures/reauth_jwt new file mode 100644 index 00000000000..4db8d061b68 --- /dev/null +++ b/tests/components/yale/fixtures/reauth_jwt @@ -0,0 +1 @@ +eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpbnN0YWxsSWQiOiIiLCJyZWdpb24iOiJpcmVsYW5kLXByb2QtYXdzIiwiYXBwbGljYXRpb25JZCI6IiIsInVzZXJJZCI6ImE3NmMyNWU1LTQ5YWEtNGMxNC1jZDBjLTQ4YTY5MzFlMjA4MSIsInZJbnN0YWxsSWQiOmZhbHNlLCJ2UGFzc3dvcmQiOnRydWUsInZFbWFpbCI6dHJ1ZSwidlBob25lIjp0cnVlLCJoYXNJbnN0YWxsSWQiOmZhbHNlLCJoYXNQYXNzd29yZCI6ZmFsc2UsImhhc0VtYWlsIjpmYWxzZSwiaGFzUGhvbmUiOmZhbHNlLCJpc0xvY2tlZE91dCI6ZmFsc2UsImNhcHRjaGEiOiIiLCJlbWFpbCI6W10sInBob25lIjpbXSwiZXhwaXJlc0F0IjoiMjAyNC0xMi0xOFQxMzo1NDowNS4xMzRaIiwidGVtcG9yYXJ5QWNjb3VudENyZWF0aW9uUGFzc3dvcmRMaW5rIjoiIiwiaWF0IjoxNzI0MTYyMDQ1LCJleHAiOjI3MzQ1MzAwNDUsIm9hdXRoIjp7ImFwcF9uYW1lIjoiSG9tZSBBc3Npc3RhbnQiLCJjbGllbnRfaWQiOiJiM2NkM2YwYi1mYjk3LTRkNmMtYmVlOS1hZjdhYjA0NzU4YzciLCJyZWRpcmVjdF91cmkiOiJodHRwczovL2FjY291bnQtbGluay5uYWJ1Y2FzYS5jb20vYXV0aG9yaXplX2NhbGxiYWNrIiwicGFydG5lcl9pZCI6IjY1Nzk3NDg4MTA2NmNhNDhjOTljMDgyNiJ9fQ.DtkHscsvbTE-SyKW3RxwXFQIKMf0xJwfPZN1X3JesqA diff --git a/tests/components/yale/fixtures/reauth_jwt_wrong_account b/tests/components/yale/fixtures/reauth_jwt_wrong_account new file mode 100644 index 00000000000..b0b62438178 --- /dev/null +++ b/tests/components/yale/fixtures/reauth_jwt_wrong_account @@ -0,0 +1 @@ +eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpbnN0YWxsSWQiOiIiLCJyZWdpb24iOiJpcmVsYW5kLXByb2QtYXdzIiwiYXBwbGljYXRpb25JZCI6IiIsInVzZXJJZCI6IjQ0NDQ0NDQ0LTQ5YWEtNGMxNC1jZDBjLTQ4YTY5MzFlMjA4MSIsInZJbnN0YWxsSWQiOmZhbHNlLCJ2UGFzc3dvcmQiOnRydWUsInZFbWFpbCI6dHJ1ZSwidlBob25lIjp0cnVlLCJoYXNJbnN0YWxsSWQiOmZhbHNlLCJoYXNQYXNzd29yZCI6ZmFsc2UsImhhc0VtYWlsIjpmYWxzZSwiaGFzUGhvbmUiOmZhbHNlLCJpc0xvY2tlZE91dCI6ZmFsc2UsImNhcHRjaGEiOiIiLCJlbWFpbCI6W10sInBob25lIjpbXSwiZXhwaXJlc0F0IjoiMjAyNC0xMi0xOFQxMzo1NDowNS4xMzRaIiwidGVtcG9yYXJ5QWNjb3VudENyZWF0aW9uUGFzc3dvcmRMaW5rIjoiIiwiaWF0IjoxNzI0MTYyMDQ1LCJleHAiOjE3MzQ1MzAwNDUsIm9hdXRoIjp7ImFwcF9uYW1lIjoiSG9tZSBBc3Npc3RhbnQiLCJjbGllbnRfaWQiOiJiM2NkM2YwYi1mYjk3LTRkNmMtYmVlOS1hZjdhYjA0NzU4YzciLCJyZWRpcmVjdF91cmkiOiJodHRwczovL2FjY291bnQtbGluay5uYWJ1Y2FzYS5jb20vYXV0aG9yaXplX2NhbGxiYWNrIiwicGFydG5lcl9pZCI6IjY1Nzk3NDg4MTA2NmNhNDhjOTljMDgyNiJ9fQ.PenDp4JUIBQZEx2BFxaCqV1-6yMuUPtmnB6jq1wpoX8 diff --git a/tests/components/yale/fixtures/unlock_closed.json b/tests/components/yale/fixtures/unlock_closed.json new file mode 100644 index 00000000000..f676c005a17 --- /dev/null +++ b/tests/components/yale/fixtures/unlock_closed.json @@ -0,0 +1,26 @@ +{ + "status": "kAugLockState_Unlocked", + "resultsFromOperationCache": false, + "retryCount": 1, + "info": { + "wlanRSSI": -54, + "lockType": "lock_version_1001", + "lockStatusChanged": false, + "serialNumber": "ABC", + "serial": "123", + "action": "lock", + "context": { + "startDate": "2020-02-19T01:59:39.516Z", + "retryCount": 1, + "transactionID": "mock" + }, + "bridgeID": "mock", + "wlanSNR": 41, + "startTime": "2020-02-19T01:59:39.517Z", + "duration": 5149, + "lockID": "ABC", + "rssi": -77 + }, + "totalTime": 5162, + "doorState": "kAugDoorState_Closed" +} diff --git a/tests/components/yale/mocks.py b/tests/components/yale/mocks.py new file mode 100644 index 00000000000..03ab3609002 --- /dev/null +++ b/tests/components/yale/mocks.py @@ -0,0 +1,515 @@ +"""Mocks for the yale component.""" + +from __future__ import annotations + +from collections.abc import Iterable +from contextlib import contextmanager +import json +import os +import time +from typing import Any +from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch + +from yalexs.activity import ( + ACTIVITY_ACTIONS_BRIDGE_OPERATION, + ACTIVITY_ACTIONS_DOOR_OPERATION, + ACTIVITY_ACTIONS_DOORBELL_DING, + ACTIVITY_ACTIONS_DOORBELL_MOTION, + ACTIVITY_ACTIONS_DOORBELL_VIEW, + ACTIVITY_ACTIONS_LOCK_OPERATION, + SOURCE_LOCK_OPERATE, + SOURCE_LOG, + Activity, + BridgeOperationActivity, + DoorbellDingActivity, + DoorbellMotionActivity, + DoorbellViewActivity, + DoorOperationActivity, + LockOperationActivity, +) +from yalexs.api_async import ApiAsync +from yalexs.authenticator_common import Authentication, AuthenticationState +from yalexs.const import Brand +from yalexs.doorbell import Doorbell, DoorbellDetail +from yalexs.lock import Lock, LockDetail +from yalexs.manager.ratelimit import _RateLimitChecker +from yalexs.manager.socketio import SocketIORunner + +from homeassistant.components.application_credentials import ( + ClientCredential, + async_import_client_credential, +) +from homeassistant.components.yale.const import DOMAIN +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry, load_fixture + +USER_ID = "a76c25e5-49aa-4c14-cd0c-48a6931e2081" + + +def _mock_get_config( + brand: Brand = Brand.YALE_GLOBAL, jwt: str | None = None +) -> dict[str, Any]: + """Return a default yale config.""" + return { + DOMAIN: { + "auth_implementation": "yale", + "token": { + "access_token": jwt or "access_token", + "expires_in": 1, + "refresh_token": "refresh_token", + "expires_at": time.time() + 3600, + "service": "yale", + }, + } + } + + +def _mock_authenticator(auth_state: AuthenticationState) -> Authentication: + """Mock an yale authenticator.""" + authenticator = MagicMock() + type(authenticator).state = PropertyMock(return_value=auth_state) + return authenticator + + +def _timetoken() -> str: + return str(time.time_ns())[:-2] + + +async def mock_yale_config_entry( + hass: HomeAssistant, +) -> MockConfigEntry: + """Mock yale config entry and client credentials.""" + entry = mock_config_entry() + entry.add_to_hass(hass) + return entry + + +def mock_config_entry(jwt: str | None = None) -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + domain=DOMAIN, + data=_mock_get_config(jwt=jwt)[DOMAIN], + options={}, + unique_id=USER_ID, + ) + + +async def mock_client_credentials(hass: HomeAssistant) -> ClientCredential: + """Mock client credentials.""" + assert await async_setup_component(hass, "application_credentials", {}) + await async_import_client_credential( + hass, + DOMAIN, + ClientCredential("1", "2"), + DOMAIN, + ) + + +@contextmanager +def patch_yale_setup(): + """Patch yale setup process.""" + with ( + patch("yalexs.manager.gateway.ApiAsync") as api_mock, + patch.object(_RateLimitChecker, "register_wakeup") as authenticate_mock, + patch("yalexs.manager.data.SocketIORunner") as socketio_mock, + patch.object(socketio_mock, "run"), + patch( + "homeassistant.components.yale.config_entry_oauth2_flow.async_get_config_entry_implementation" + ), + ): + yield api_mock, authenticate_mock, socketio_mock + + +async def _mock_setup_yale( + hass: HomeAssistant, + api_instance: ApiAsync, + socketio_mock: SocketIORunner, + authenticate_side_effect: MagicMock, +) -> ConfigEntry: + """Set up yale integration.""" + entry = await mock_yale_config_entry(hass) + with patch_yale_setup() as patched_setup: + api_mock, authenticate_mock, sockio_mock_ = patched_setup + authenticate_mock.side_effect = authenticate_side_effect + sockio_mock_.return_value = socketio_mock + api_mock.return_value = api_instance + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + return entry + + +async def _create_yale_with_devices( + hass: HomeAssistant, + devices: Iterable[LockDetail | DoorbellDetail] | None = None, + api_call_side_effects: dict[str, Any] | None = None, + activities: list[Any] | None = None, + brand: Brand = Brand.YALE_GLOBAL, + authenticate_side_effect: MagicMock | None = None, +) -> tuple[ConfigEntry, SocketIORunner]: + entry, _, socketio = await _create_yale_api_with_devices( + hass, + devices, + api_call_side_effects, + activities, + brand, + authenticate_side_effect, + ) + return entry, socketio + + +async def _create_yale_api_with_devices( + hass: HomeAssistant, + devices: Iterable[LockDetail | DoorbellDetail] | None = None, + api_call_side_effects: dict[str, Any] | None = None, + activities: dict[str, Any] | None = None, + brand: Brand = Brand.YALE_GLOBAL, + authenticate_side_effect: MagicMock | None = None, +) -> tuple[ConfigEntry, ApiAsync, SocketIORunner]: + if api_call_side_effects is None: + api_call_side_effects = {} + if devices is None: + devices = () + + update_api_call_side_effects(api_call_side_effects, devices, activities) + + api_instance = await make_mock_api(api_call_side_effects, brand) + socketio = SocketIORunner( + MagicMock( + api=api_instance, async_get_access_token=AsyncMock(return_value="token") + ) + ) + socketio.run = AsyncMock() + + entry = await _mock_setup_yale( + hass, + api_instance, + socketio, + authenticate_side_effect=authenticate_side_effect, + ) + + return entry, api_instance, socketio + + +def update_api_call_side_effects( + api_call_side_effects: dict[str, Any], + devices: Iterable[LockDetail | DoorbellDetail], + activities: dict[str, Any] | None = None, +) -> None: + """Update side effects dict from devices and activities.""" + + device_data = {"doorbells": [], "locks": []} + for device in devices or (): + if isinstance(device, LockDetail): + device_data["locks"].append( + {"base": _mock_yale_lock(device.device_id), "detail": device} + ) + elif isinstance(device, DoorbellDetail): + device_data["doorbells"].append( + { + "base": _mock_yale_doorbell( + deviceid=device.device_id, + brand=device._data.get("brand", Brand.YALE_GLOBAL), + ), + "detail": device, + } + ) + else: + raise ValueError # noqa: TRY004 + + def _get_device_detail(device_type, device_id): + for device in device_data[device_type]: + if device["detail"].device_id == device_id: + return device["detail"] + raise ValueError + + def _get_base_devices(device_type): + return [device["base"] for device in device_data[device_type]] + + def get_lock_detail_side_effect(access_token, device_id): + return _get_device_detail("locks", device_id) + + def get_doorbell_detail_side_effect(access_token, device_id): + return _get_device_detail("doorbells", device_id) + + def get_operable_locks_side_effect(access_token): + return _get_base_devices("locks") + + def get_doorbells_side_effect(access_token): + return _get_base_devices("doorbells") + + def get_house_activities_side_effect(access_token, house_id, limit=10): + if activities is not None: + return activities + return [] + + def lock_return_activities_side_effect(access_token, device_id): + lock = _get_device_detail("locks", device_id) + return [ + # There is a check to prevent out of order events + # so we set the doorclosed & lock event in the future + # to prevent a race condition where we reject the event + # because it happened before the dooropen & unlock event. + _mock_lock_operation_activity(lock, "lock", 2000), + _mock_door_operation_activity(lock, "doorclosed", 2000), + ] + + def unlock_return_activities_side_effect(access_token, device_id): + lock = _get_device_detail("locks", device_id) + return [ + _mock_lock_operation_activity(lock, "unlock", 0), + _mock_door_operation_activity(lock, "dooropen", 0), + ] + + api_call_side_effects.setdefault("get_lock_detail", get_lock_detail_side_effect) + api_call_side_effects.setdefault( + "get_doorbell_detail", get_doorbell_detail_side_effect + ) + api_call_side_effects.setdefault( + "get_operable_locks", get_operable_locks_side_effect + ) + api_call_side_effects.setdefault("get_doorbells", get_doorbells_side_effect) + api_call_side_effects.setdefault( + "get_house_activities", get_house_activities_side_effect + ) + api_call_side_effects.setdefault( + "lock_return_activities", lock_return_activities_side_effect + ) + api_call_side_effects.setdefault( + "unlock_return_activities", unlock_return_activities_side_effect + ) + api_call_side_effects.setdefault( + "async_unlatch_return_activities", unlock_return_activities_side_effect + ) + + +async def make_mock_api( + api_call_side_effects: dict[str, Any], + brand: Brand = Brand.YALE_GLOBAL, +) -> ApiAsync: + """Make a mock ApiAsync instance.""" + api_instance = MagicMock(name="Api", brand=brand) + + if api_call_side_effects["get_lock_detail"]: + type(api_instance).async_get_lock_detail = AsyncMock( + side_effect=api_call_side_effects["get_lock_detail"] + ) + + if api_call_side_effects["get_operable_locks"]: + type(api_instance).async_get_operable_locks = AsyncMock( + side_effect=api_call_side_effects["get_operable_locks"] + ) + + if api_call_side_effects["get_doorbells"]: + type(api_instance).async_get_doorbells = AsyncMock( + side_effect=api_call_side_effects["get_doorbells"] + ) + + if api_call_side_effects["get_doorbell_detail"]: + type(api_instance).async_get_doorbell_detail = AsyncMock( + side_effect=api_call_side_effects["get_doorbell_detail"] + ) + + if api_call_side_effects["get_house_activities"]: + type(api_instance).async_get_house_activities = AsyncMock( + side_effect=api_call_side_effects["get_house_activities"] + ) + + if api_call_side_effects["lock_return_activities"]: + type(api_instance).async_lock_return_activities = AsyncMock( + side_effect=api_call_side_effects["lock_return_activities"] + ) + + if api_call_side_effects["unlock_return_activities"]: + type(api_instance).async_unlock_return_activities = AsyncMock( + side_effect=api_call_side_effects["unlock_return_activities"] + ) + + if api_call_side_effects["async_unlatch_return_activities"]: + type(api_instance).async_unlatch_return_activities = AsyncMock( + side_effect=api_call_side_effects["async_unlatch_return_activities"] + ) + + api_instance.async_unlock_async = AsyncMock() + api_instance.async_lock_async = AsyncMock() + api_instance.async_status_async = AsyncMock() + api_instance.async_get_user = AsyncMock(return_value={"UserID": "abc"}) + api_instance.async_unlatch_async = AsyncMock() + api_instance.async_unlatch = AsyncMock() + api_instance.async_add_websocket_subscription = AsyncMock() + + return api_instance + + +def _mock_yale_authentication( + token_text: str, token_timestamp: float, state: AuthenticationState +) -> Authentication: + authentication = MagicMock(name="yalexs.authentication") + type(authentication).state = PropertyMock(return_value=state) + type(authentication).access_token = PropertyMock(return_value=token_text) + type(authentication).access_token_expires = PropertyMock( + return_value=token_timestamp + ) + return authentication + + +def _mock_yale_lock(lockid: str = "mocklockid1", houseid: str = "mockhouseid1") -> Lock: + return Lock(lockid, _mock_yale_lock_data(lockid=lockid, houseid=houseid)) + + +def _mock_yale_doorbell( + deviceid="mockdeviceid1", houseid="mockhouseid1", brand=Brand.YALE_GLOBAL +) -> Doorbell: + return Doorbell( + deviceid, + _mock_yale_doorbell_data(deviceid=deviceid, houseid=houseid, brand=brand), + ) + + +def _mock_yale_doorbell_data( + deviceid: str = "mockdeviceid1", + houseid: str = "mockhouseid1", + brand: Brand = Brand.YALE_GLOBAL, +) -> dict[str, Any]: + return { + "_id": deviceid, + "DeviceID": deviceid, + "name": f"{deviceid} Name", + "HouseID": houseid, + "UserType": "owner", + "serialNumber": "mockserial", + "battery": 90, + "status": "standby", + "currentFirmwareVersion": "mockfirmware", + "Bridge": { + "_id": "bridgeid1", + "firmwareVersion": "mockfirm", + "operative": True, + }, + "LockStatus": {"doorState": "open"}, + } + + +def _mock_yale_lock_data( + lockid: str = "mocklockid1", houseid: str = "mockhouseid1" +) -> dict[str, Any]: + return { + "_id": lockid, + "LockID": lockid, + "LockName": f"{lockid} Name", + "HouseID": houseid, + "UserType": "owner", + "SerialNumber": "mockserial", + "battery": 90, + "currentFirmwareVersion": "mockfirmware", + "Bridge": { + "_id": "bridgeid1", + "firmwareVersion": "mockfirm", + "operative": True, + }, + "LockStatus": {"doorState": "open"}, + } + + +async def _mock_operative_yale_lock_detail(hass: HomeAssistant) -> LockDetail: + return await _mock_lock_from_fixture(hass, "get_lock.online.json") + + +async def _mock_lock_with_offline_key(hass: HomeAssistant) -> LockDetail: + return await _mock_lock_from_fixture(hass, "get_lock.online_with_keys.json") + + +async def _mock_inoperative_yale_lock_detail(hass: HomeAssistant) -> LockDetail: + return await _mock_lock_from_fixture(hass, "get_lock.offline.json") + + +async def _mock_activities_from_fixture( + hass: HomeAssistant, path: str +) -> list[Activity]: + json_dict = await _load_json_fixture(hass, path) + activities = [] + for activity_json in json_dict: + activity = _activity_from_dict(activity_json) + if activity: + activities.append(activity) + + return activities + + +async def _mock_lock_from_fixture(hass: HomeAssistant, path: str) -> LockDetail: + json_dict = await _load_json_fixture(hass, path) + return LockDetail(json_dict) + + +async def _mock_doorbell_from_fixture(hass: HomeAssistant, path: str) -> LockDetail: + json_dict = await _load_json_fixture(hass, path) + return DoorbellDetail(json_dict) + + +async def _load_json_fixture(hass: HomeAssistant, path: str) -> dict[str, Any]: + fixture = await hass.async_add_executor_job( + load_fixture, os.path.join("yale", path) + ) + return json.loads(fixture) + + +async def _mock_doorsense_enabled_yale_lock_detail(hass: HomeAssistant) -> LockDetail: + return await _mock_lock_from_fixture(hass, "get_lock.online_with_doorsense.json") + + +async def _mock_doorsense_missing_yale_lock_detail(hass: HomeAssistant) -> LockDetail: + return await _mock_lock_from_fixture(hass, "get_lock.online_missing_doorsense.json") + + +async def _mock_lock_with_unlatch(hass: HomeAssistant) -> LockDetail: + return await _mock_lock_from_fixture(hass, "get_lock.online_with_unlatch.json") + + +def _mock_lock_operation_activity( + lock: Lock, action: str, offset: float +) -> LockOperationActivity: + return LockOperationActivity( + SOURCE_LOCK_OPERATE, + { + "dateTime": (time.time() + offset) * 1000, + "deviceID": lock.device_id, + "deviceType": "lock", + "action": action, + }, + ) + + +def _mock_door_operation_activity( + lock: Lock, action: str, offset: float +) -> DoorOperationActivity: + return DoorOperationActivity( + SOURCE_LOCK_OPERATE, + { + "dateTime": (time.time() + offset) * 1000, + "deviceID": lock.device_id, + "deviceType": "lock", + "action": action, + }, + ) + + +def _activity_from_dict(activity_dict: dict[str, Any]) -> Activity | None: + action = activity_dict.get("action") + + activity_dict["dateTime"] = time.time() * 1000 + + if action in ACTIVITY_ACTIONS_DOORBELL_DING: + return DoorbellDingActivity(SOURCE_LOG, activity_dict) + if action in ACTIVITY_ACTIONS_DOORBELL_MOTION: + return DoorbellMotionActivity(SOURCE_LOG, activity_dict) + if action in ACTIVITY_ACTIONS_DOORBELL_VIEW: + return DoorbellViewActivity(SOURCE_LOG, activity_dict) + if action in ACTIVITY_ACTIONS_LOCK_OPERATION: + return LockOperationActivity(SOURCE_LOG, activity_dict) + if action in ACTIVITY_ACTIONS_DOOR_OPERATION: + return DoorOperationActivity(SOURCE_LOG, activity_dict) + if action in ACTIVITY_ACTIONS_BRIDGE_OPERATION: + return BridgeOperationActivity(SOURCE_LOG, activity_dict) + return None diff --git a/tests/components/yale/snapshots/test_diagnostics.ambr b/tests/components/yale/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..fd31bc0ec91 --- /dev/null +++ b/tests/components/yale/snapshots/test_diagnostics.ambr @@ -0,0 +1,125 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'brand': 'yale_home', + 'doorbells': dict({ + 'K98GiDT45GUL': dict({ + 'HouseID': '**REDACTED**', + 'LockID': 'BBBB1F5F11114C24CCCC97571DD6AAAA', + 'appID': 'august-iphone', + 'caps': list([ + 'reconnect', + ]), + 'createdAt': '2016-11-26T22:27:11.176Z', + 'doorbellID': 'K98GiDT45GUL', + 'doorbellServerURL': 'https://doorbells.august.com', + 'dvrSubscriptionSetupDone': True, + 'firmwareVersion': '2.3.0-RC153+201711151527', + 'installDate': '2016-11-26T22:27:11.176Z', + 'installUserID': '**REDACTED**', + 'name': 'Front Door', + 'pubsubChannel': '**REDACTED**', + 'recentImage': '**REDACTED**', + 'serialNumber': 'tBXZR0Z35E', + 'settings': dict({ + 'ABREnabled': True, + 'IREnabled': True, + 'IVAEnabled': False, + 'JPGQuality': 70, + 'batteryLowThreshold': 3.1, + 'batteryRun': False, + 'batteryUseThreshold': 3.4, + 'bitrateCeiling': 512000, + 'buttonpush_notifications': True, + 'debug': False, + 'directLink': True, + 'initialBitrate': 384000, + 'irConfiguration': 8448272, + 'keepEncoderRunning': True, + 'micVolume': 100, + 'minACNoScaling': 40, + 'motion_notifications': True, + 'notify_when_offline': True, + 'overlayEnabled': True, + 'ringSoundEnabled': True, + 'speakerVolume': 92, + 'turnOffCamera': False, + 'videoResolution': '640x480', + }), + 'status': 'doorbell_call_status_online', + 'status_timestamp': 1512811834532, + 'telemetry': dict({ + 'BSSID': '88:ee:00:dd:aa:11', + 'SSID': 'foo_ssid', + 'ac_in': 23.856874, + 'battery': 4.061763, + 'battery_soc': 96, + 'battery_soh': 95, + 'date': '2017-12-10 08:05:12', + 'doorbell_low_battery': False, + 'ip_addr': '10.0.1.11', + 'link_quality': 54, + 'load_average': '0.50 0.47 0.35 1/154 9345', + 'signal_level': -56, + 'steady_ac_in': 22.196405, + 'temperature': 28.25, + 'updated_at': '2017-12-10T08:05:13.650Z', + 'uptime': '16168.75 13830.49', + 'wifi_freq': 5745, + }), + 'updatedAt': '2017-12-10T08:05:13.650Z', + }), + }), + 'locks': dict({ + 'online_with_doorsense': dict({ + 'Bridge': dict({ + '_id': 'bridgeid', + 'deviceModel': 'august-connect', + 'firmwareVersion': '2.2.1', + 'hyperBridge': True, + 'mfgBridgeID': 'C5WY200WSH', + 'operative': True, + 'status': dict({ + 'current': 'online', + 'lastOffline': '2000-00-00T00:00:00.447Z', + 'lastOnline': '2000-00-00T00:00:00.447Z', + 'updated': '2000-00-00T00:00:00.447Z', + }), + }), + 'Calibrated': False, + 'Created': '2000-00-00T00:00:00.447Z', + 'HouseID': '**REDACTED**', + 'HouseName': 'Test', + 'LockID': 'online_with_doorsense', + 'LockName': 'Online door with doorsense', + 'LockStatus': dict({ + 'dateTime': '2017-12-10T04:48:30.272Z', + 'doorState': 'open', + 'isLockStatusChanged': False, + 'status': 'locked', + 'valid': True, + }), + 'SerialNumber': 'XY', + 'Type': 1001, + 'Updated': '2000-00-00T00:00:00.447Z', + 'battery': 0.922, + 'currentFirmwareVersion': 'undefined-4.3.0-1.8.14', + 'homeKitEnabled': True, + 'hostLockInfo': dict({ + 'manufacturer': 'yale', + 'productID': 1536, + 'productTypeID': 32770, + 'serialNumber': 'ABC', + }), + 'isGalileo': False, + 'macAddress': '12:22', + 'pins': '**REDACTED**', + 'pubsubChannel': '**REDACTED**', + 'skuNumber': 'AUG-MD01', + 'supportsEntryCodes': True, + 'timeZone': 'Pacific/Hawaii', + 'zWaveEnabled': False, + }), + }), + }) +# --- diff --git a/tests/components/yale/test_binary_sensor.py b/tests/components/yale/test_binary_sensor.py new file mode 100644 index 00000000000..ad4d4155e5b --- /dev/null +++ b/tests/components/yale/test_binary_sensor.py @@ -0,0 +1,390 @@ +"""The binary_sensor tests for the yale platform.""" + +import datetime +from unittest.mock import patch + +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_LOCK, + SERVICE_UNLOCK, + STATE_OFF, + STATE_ON, + STATE_UNAVAILABLE, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr +import homeassistant.util.dt as dt_util + +from .mocks import ( + _create_yale_with_devices, + _mock_activities_from_fixture, + _mock_doorbell_from_fixture, + _mock_doorsense_enabled_yale_lock_detail, + _mock_lock_from_fixture, +) + +from tests.common import async_fire_time_changed + + +async def test_doorsense(hass: HomeAssistant) -> None: + """Test creation of a lock with doorsense and bridge.""" + lock_one = await _mock_lock_from_fixture( + hass, "get_lock.online_with_doorsense.json" + ) + await _create_yale_with_devices(hass, [lock_one]) + + binary_sensor_online_with_doorsense_name = hass.states.get( + "binary_sensor.online_with_doorsense_name_door" + ) + assert binary_sensor_online_with_doorsense_name.state == STATE_ON + + data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} + await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) + await hass.async_block_till_done() + + binary_sensor_online_with_doorsense_name = hass.states.get( + "binary_sensor.online_with_doorsense_name_door" + ) + assert binary_sensor_online_with_doorsense_name.state == STATE_ON + + await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) + await hass.async_block_till_done() + + binary_sensor_online_with_doorsense_name = hass.states.get( + "binary_sensor.online_with_doorsense_name_door" + ) + assert binary_sensor_online_with_doorsense_name.state == STATE_OFF + + +async def test_lock_bridge_offline(hass: HomeAssistant) -> None: + """Test creation of a lock with doorsense and bridge that goes offline.""" + lock_one = await _mock_lock_from_fixture( + hass, "get_lock.online_with_doorsense.json" + ) + activities = await _mock_activities_from_fixture( + hass, "get_activity.bridge_offline.json" + ) + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + binary_sensor_online_with_doorsense_name = hass.states.get( + "binary_sensor.online_with_doorsense_name_door" + ) + assert binary_sensor_online_with_doorsense_name.state == STATE_UNAVAILABLE + + +async def test_create_doorbell(hass: HomeAssistant) -> None: + """Test creation of a doorbell.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + await _create_yale_with_devices(hass, [doorbell_one]) + + binary_sensor_k98gidt45gul_name_motion = hass.states.get( + "binary_sensor.k98gidt45gul_name_motion" + ) + assert binary_sensor_k98gidt45gul_name_motion.state == STATE_OFF + binary_sensor_k98gidt45gul_name_image_capture = hass.states.get( + "binary_sensor.k98gidt45gul_name_image_capture" + ) + assert binary_sensor_k98gidt45gul_name_image_capture.state == STATE_OFF + binary_sensor_k98gidt45gul_name_online = hass.states.get( + "binary_sensor.k98gidt45gul_name_connectivity" + ) + assert binary_sensor_k98gidt45gul_name_online.state == STATE_ON + binary_sensor_k98gidt45gul_name_ding = hass.states.get( + "binary_sensor.k98gidt45gul_name_doorbell_ding" + ) + assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF + binary_sensor_k98gidt45gul_name_motion = hass.states.get( + "binary_sensor.k98gidt45gul_name_motion" + ) + assert binary_sensor_k98gidt45gul_name_motion.state == STATE_OFF + binary_sensor_k98gidt45gul_name_image_capture = hass.states.get( + "binary_sensor.k98gidt45gul_name_image_capture" + ) + assert binary_sensor_k98gidt45gul_name_image_capture.state == STATE_OFF + + +async def test_create_doorbell_offline(hass: HomeAssistant) -> None: + """Test creation of a doorbell that is offline.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") + await _create_yale_with_devices(hass, [doorbell_one]) + + binary_sensor_tmt100_name_motion = hass.states.get( + "binary_sensor.tmt100_name_motion" + ) + assert binary_sensor_tmt100_name_motion.state == STATE_UNAVAILABLE + binary_sensor_tmt100_name_online = hass.states.get( + "binary_sensor.tmt100_name_connectivity" + ) + assert binary_sensor_tmt100_name_online.state == STATE_OFF + binary_sensor_tmt100_name_ding = hass.states.get( + "binary_sensor.tmt100_name_doorbell_ding" + ) + assert binary_sensor_tmt100_name_ding.state == STATE_UNAVAILABLE + + +async def test_create_doorbell_with_motion(hass: HomeAssistant) -> None: + """Test creation of a doorbell.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + activities = await _mock_activities_from_fixture( + hass, "get_activity.doorbell_motion.json" + ) + await _create_yale_with_devices(hass, [doorbell_one], activities=activities) + + binary_sensor_k98gidt45gul_name_motion = hass.states.get( + "binary_sensor.k98gidt45gul_name_motion" + ) + assert binary_sensor_k98gidt45gul_name_motion.state == STATE_ON + binary_sensor_k98gidt45gul_name_online = hass.states.get( + "binary_sensor.k98gidt45gul_name_connectivity" + ) + assert binary_sensor_k98gidt45gul_name_online.state == STATE_ON + binary_sensor_k98gidt45gul_name_ding = hass.states.get( + "binary_sensor.k98gidt45gul_name_doorbell_ding" + ) + assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF + new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) + native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) + with patch( + "homeassistant.components.yale.util._native_datetime", + return_value=native_time, + ): + async_fire_time_changed(hass, new_time) + await hass.async_block_till_done() + binary_sensor_k98gidt45gul_name_motion = hass.states.get( + "binary_sensor.k98gidt45gul_name_motion" + ) + assert binary_sensor_k98gidt45gul_name_motion.state == STATE_OFF + + +async def test_doorbell_update_via_socketio(hass: HomeAssistant) -> None: + """Test creation of a doorbell that can be updated via socketio.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + + _, socketio = await _create_yale_with_devices(hass, [doorbell_one]) + assert doorbell_one.pubsub_channel == "7c7a6672-59c8-3333-ffff-dcd98705cccc" + + binary_sensor_k98gidt45gul_name_motion = hass.states.get( + "binary_sensor.k98gidt45gul_name_motion" + ) + assert binary_sensor_k98gidt45gul_name_motion.state == STATE_OFF + binary_sensor_k98gidt45gul_name_ding = hass.states.get( + "binary_sensor.k98gidt45gul_name_doorbell_ding" + ) + assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF + + listener = list(socketio._listeners)[0] + listener( + doorbell_one.device_id, + dt_util.utcnow(), + { + "status": "imagecapture", + "data": { + "result": { + "created_at": "2021-03-16T01:07:08.817Z", + "secure_url": ( + "https://dyu7azbnaoi74.cloudfront.net/zip/images/zip.jpeg" + ), + }, + }, + }, + ) + + await hass.async_block_till_done() + + binary_sensor_k98gidt45gul_name_image_capture = hass.states.get( + "binary_sensor.k98gidt45gul_name_image_capture" + ) + assert binary_sensor_k98gidt45gul_name_image_capture.state == STATE_ON + + listener( + doorbell_one.device_id, + dt_util.utcnow(), + { + "status": "doorbell_motion_detected", + "data": { + "event": "doorbell_motion_detected", + "image": { + "height": 640, + "width": 480, + "format": "jpg", + "created_at": "2021-03-16T02:36:26.886Z", + "bytes": 14061, + "secure_url": ( + "https://dyu7azbnaoi74.cloudfront.net/images/1f8.jpeg" + ), + "url": "https://dyu7azbnaoi74.cloudfront.net/images/1f8.jpeg", + "etag": "09e839331c4ea59eef28081f2caa0e90", + }, + "doorbellName": "Front Door", + "callID": None, + "origin": "mars-api", + "mutableContent": True, + }, + }, + ) + + await hass.async_block_till_done() + + binary_sensor_k98gidt45gul_name_motion = hass.states.get( + "binary_sensor.k98gidt45gul_name_motion" + ) + assert binary_sensor_k98gidt45gul_name_motion.state == STATE_ON + + binary_sensor_k98gidt45gul_name_ding = hass.states.get( + "binary_sensor.k98gidt45gul_name_doorbell_ding" + ) + assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF + + new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) + native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) + with patch( + "homeassistant.components.yale.util._native_datetime", + return_value=native_time, + ): + async_fire_time_changed(hass, new_time) + await hass.async_block_till_done() + + binary_sensor_k98gidt45gul_name_image_capture = hass.states.get( + "binary_sensor.k98gidt45gul_name_image_capture" + ) + assert binary_sensor_k98gidt45gul_name_image_capture.state == STATE_OFF + + listener( + doorbell_one.device_id, + dt_util.utcnow(), + { + "status": "buttonpush", + }, + ) + + await hass.async_block_till_done() + + binary_sensor_k98gidt45gul_name_ding = hass.states.get( + "binary_sensor.k98gidt45gul_name_doorbell_ding" + ) + assert binary_sensor_k98gidt45gul_name_ding.state == STATE_ON + new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) + native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) + with patch( + "homeassistant.components.yale.util._native_datetime", + return_value=native_time, + ): + async_fire_time_changed(hass, new_time) + await hass.async_block_till_done() + + binary_sensor_k98gidt45gul_name_ding = hass.states.get( + "binary_sensor.k98gidt45gul_name_doorbell_ding" + ) + assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF + + +async def test_doorbell_device_registry( + hass: HomeAssistant, device_registry: dr.DeviceRegistry +) -> None: + """Test creation of a lock with doorsense and bridge ands up in the registry.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") + await _create_yale_with_devices(hass, [doorbell_one]) + + reg_device = device_registry.async_get_device(identifiers={("yale", "tmt100")}) + assert reg_device.model == "hydra1" + assert reg_device.name == "tmt100 Name" + assert reg_device.manufacturer == "Yale Home Inc." + assert reg_device.sw_version == "3.1.0-HYDRC75+201909251139" + + +async def test_door_sense_update_via_socketio(hass: HomeAssistant) -> None: + """Test creation of a lock with doorsense and bridge.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + assert lock_one.pubsub_channel == "pubsub" + + activities = await _mock_activities_from_fixture(hass, "get_activity.lock.json") + config_entry, socketio = await _create_yale_with_devices( + hass, [lock_one], activities=activities + ) + + binary_sensor_online_with_doorsense_name = hass.states.get( + "binary_sensor.online_with_doorsense_name_door" + ) + assert binary_sensor_online_with_doorsense_name.state == STATE_ON + + listener = list(socketio._listeners)[0] + listener( + lock_one.device_id, + dt_util.utcnow(), + {"status": "kAugLockState_Unlocking", "doorState": "closed"}, + ) + + await hass.async_block_till_done() + binary_sensor_online_with_doorsense_name = hass.states.get( + "binary_sensor.online_with_doorsense_name_door" + ) + assert binary_sensor_online_with_doorsense_name.state == STATE_OFF + + listener( + lock_one.device_id, + dt_util.utcnow(), + {"status": "kAugLockState_Locking", "doorState": "open"}, + ) + + await hass.async_block_till_done() + binary_sensor_online_with_doorsense_name = hass.states.get( + "binary_sensor.online_with_doorsense_name_door" + ) + assert binary_sensor_online_with_doorsense_name.state == STATE_ON + + async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) + await hass.async_block_till_done() + binary_sensor_online_with_doorsense_name = hass.states.get( + "binary_sensor.online_with_doorsense_name_door" + ) + assert binary_sensor_online_with_doorsense_name.state == STATE_ON + + socketio.connected = True + async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) + await hass.async_block_till_done() + binary_sensor_online_with_doorsense_name = hass.states.get( + "binary_sensor.online_with_doorsense_name_door" + ) + assert binary_sensor_online_with_doorsense_name.state == STATE_ON + + # Ensure socketio status is always preserved + async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=2)) + await hass.async_block_till_done() + binary_sensor_online_with_doorsense_name = hass.states.get( + "binary_sensor.online_with_doorsense_name_door" + ) + assert binary_sensor_online_with_doorsense_name.state == STATE_ON + + listener( + lock_one.device_id, + dt_util.utcnow(), + {"status": "kAugLockState_Unlocking", "doorState": "open"}, + ) + + await hass.async_block_till_done() + binary_sensor_online_with_doorsense_name = hass.states.get( + "binary_sensor.online_with_doorsense_name_door" + ) + assert binary_sensor_online_with_doorsense_name.state == STATE_ON + + async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=4)) + await hass.async_block_till_done() + binary_sensor_online_with_doorsense_name = hass.states.get( + "binary_sensor.online_with_doorsense_name_door" + ) + assert binary_sensor_online_with_doorsense_name.state == STATE_ON + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + +async def test_create_lock_with_doorbell(hass: HomeAssistant) -> None: + """Test creation of a lock with a doorbell.""" + lock_one = await _mock_lock_from_fixture(hass, "lock_with_doorbell.online.json") + await _create_yale_with_devices(hass, [lock_one]) + + ding_sensor = hass.states.get( + "binary_sensor.a6697750d607098bae8d6baa11ef8063_name_doorbell_ding" + ) + assert ding_sensor.state == STATE_OFF diff --git a/tests/components/yale/test_button.py b/tests/components/yale/test_button.py new file mode 100644 index 00000000000..ebd22f1da59 --- /dev/null +++ b/tests/components/yale/test_button.py @@ -0,0 +1,24 @@ +"""The button tests for the yale platform.""" + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant + +from .mocks import _create_yale_api_with_devices, _mock_lock_from_fixture + + +async def test_wake_lock(hass: HomeAssistant) -> None: + """Test creation of a lock and wake it.""" + lock_one = await _mock_lock_from_fixture( + hass, "get_lock.online_with_doorsense.json" + ) + _, api_instance, _ = await _create_yale_api_with_devices(hass, [lock_one]) + entity_id = "button.online_with_doorsense_name_wake" + binary_sensor_online_with_doorsense_name = hass.states.get(entity_id) + assert binary_sensor_online_with_doorsense_name is not None + api_instance.async_status_async.reset_mock() + await hass.services.async_call( + BUTTON_DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + await hass.async_block_till_done() + api_instance.async_status_async.assert_called_once() diff --git a/tests/components/yale/test_camera.py b/tests/components/yale/test_camera.py new file mode 100644 index 00000000000..502945b19c1 --- /dev/null +++ b/tests/components/yale/test_camera.py @@ -0,0 +1,93 @@ +"""The camera tests for the yale platform.""" + +from http import HTTPStatus +from unittest.mock import patch + +from yalexs.const import Brand +from yalexs.doorbell import ContentTokenExpired + +from homeassistant.const import STATE_IDLE +from homeassistant.core import HomeAssistant + +from .mocks import _create_yale_with_devices, _mock_doorbell_from_fixture + +from tests.typing import ClientSessionGenerator + + +async def test_create_doorbell( + hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator +) -> None: + """Test creation of a doorbell.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + + with patch.object( + doorbell_one, "async_get_doorbell_image", create=False, return_value="image" + ): + await _create_yale_with_devices(hass, [doorbell_one], brand=Brand.YALE_GLOBAL) + + camera_k98gidt45gul_name_camera = hass.states.get( + "camera.k98gidt45gul_name_camera" + ) + assert camera_k98gidt45gul_name_camera.state == STATE_IDLE + + url = hass.states.get("camera.k98gidt45gul_name_camera").attributes[ + "entity_picture" + ] + + client = await hass_client_no_auth() + resp = await client.get(url) + assert resp.status == HTTPStatus.OK + body = await resp.text() + assert body == "image" + + +async def test_doorbell_refresh_content_token_recover( + hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator +) -> None: + """Test camera image content token expired.""" + doorbell_two = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + with patch.object( + doorbell_two, + "async_get_doorbell_image", + create=False, + side_effect=[ContentTokenExpired, "image"], + ): + await _create_yale_with_devices( + hass, + [doorbell_two], + brand=Brand.YALE_GLOBAL, + ) + url = hass.states.get("camera.k98gidt45gul_name_camera").attributes[ + "entity_picture" + ] + + client = await hass_client_no_auth() + resp = await client.get(url) + assert resp.status == HTTPStatus.OK + body = await resp.text() + assert body == "image" + + +async def test_doorbell_refresh_content_token_fail( + hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator +) -> None: + """Test camera image content token expired.""" + doorbell_two = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + with patch.object( + doorbell_two, + "async_get_doorbell_image", + create=False, + side_effect=ContentTokenExpired, + ): + await _create_yale_with_devices( + hass, + [doorbell_two], + brand=Brand.YALE_GLOBAL, + ) + url = hass.states.get("camera.k98gidt45gul_name_camera").attributes[ + "entity_picture" + ] + + client = await hass_client_no_auth() + resp = await client.get(url) + assert resp.status == HTTPStatus.INTERNAL_SERVER_ERROR diff --git a/tests/components/yale/test_config_flow.py b/tests/components/yale/test_config_flow.py new file mode 100644 index 00000000000..a62aa2d38f9 --- /dev/null +++ b/tests/components/yale/test_config_flow.py @@ -0,0 +1,207 @@ +"""Test the yale config flow.""" + +from collections.abc import Generator +from unittest.mock import Mock, patch + +import pytest + +from homeassistant import config_entries +from homeassistant.components.yale.application_credentials import ( + OAUTH2_AUTHORIZE, + OAUTH2_TOKEN, +) +from homeassistant.components.yale.const import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import config_entry_oauth2_flow + +from .mocks import USER_ID + +from tests.common import MockConfigEntry +from tests.test_util.aiohttp import AiohttpClientMocker +from tests.typing import ClientSessionGenerator + +CLIENT_ID = "1" + + +@pytest.fixture +def mock_setup_entry() -> Generator[Mock, None, None]: + """Patch setup entry.""" + with patch( + "homeassistant.components.yale.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.mark.usefixtures("client_credentials") +@pytest.mark.usefixtures("current_request_with_host") +async def test_full_flow( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + jwt: str, + mock_setup_entry: Mock, +) -> None: + """Check full flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + + assert result["url"] == ( + f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" + "&redirect_uri=https://example.com/auth/external/callback" + f"&state={state}" + ) + + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.clear_requests() + aioclient_mock.post( + OAUTH2_TOKEN, + json={ + "access_token": jwt, + "scope": "any", + "expires_in": 86399, + "refresh_token": "mock-refresh-token", + "user_id": "mock-user-id", + "expires_at": 1697753347, + }, + ) + + await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + entry = hass.config_entries.async_entries(DOMAIN)[0] + assert entry.unique_id == USER_ID + + +@pytest.mark.usefixtures("client_credentials") +@pytest.mark.usefixtures("current_request_with_host") +async def test_reauth( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + mock_config_entry: MockConfigEntry, + reauth_jwt: str, + mock_setup_entry: Mock, +) -> None: + """Test the reauthentication case updates the existing config entry.""" + + mock_config_entry.add_to_hass(hass) + + mock_config_entry.async_start_reauth(hass) + await hass.async_block_till_done() + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + result = flows[0] + assert result["step_id"] == "auth" + + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.post( + OAUTH2_TOKEN, + json={ + "access_token": reauth_jwt, + "expires_in": 86399, + "refresh_token": "mock-refresh-token", + "user_id": USER_ID, + "token_type": "Bearer", + "expires_at": 1697753347, + }, + ) + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + await hass.async_block_till_done() + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + + assert mock_config_entry.unique_id == USER_ID + assert "token" in mock_config_entry.data + # Verify access token is refreshed + assert mock_config_entry.data["token"]["access_token"] == reauth_jwt + + +@pytest.mark.usefixtures("client_credentials") +@pytest.mark.usefixtures("current_request_with_host") +async def test_reauth_wrong_account( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + mock_config_entry: MockConfigEntry, + reauth_jwt_wrong_account: str, + jwt: str, + mock_setup_entry: Mock, +) -> None: + """Test the reauthentication aborts, if user tries to reauthenticate with another account.""" + assert mock_config_entry.data["token"]["access_token"] == jwt + + mock_config_entry.add_to_hass(hass) + + mock_config_entry.async_start_reauth(hass) + await hass.async_block_till_done() + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + result = flows[0] + assert result["step_id"] == "auth" + + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.post( + OAUTH2_TOKEN, + json={ + "access_token": reauth_jwt_wrong_account, + "expires_in": 86399, + "refresh_token": "mock-refresh-token", + "token_type": "Bearer", + "expires_at": 1697753347, + }, + ) + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + await hass.async_block_till_done() + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_invalid_user" + + assert mock_config_entry.unique_id == USER_ID + assert "token" in mock_config_entry.data + # Verify access token is like before + assert mock_config_entry.data["token"]["access_token"] == jwt diff --git a/tests/components/yale/test_diagnostics.py b/tests/components/yale/test_diagnostics.py new file mode 100644 index 00000000000..e5fd6b1c1a7 --- /dev/null +++ b/tests/components/yale/test_diagnostics.py @@ -0,0 +1,31 @@ +"""Test yale diagnostics.""" + +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from .mocks import ( + _create_yale_api_with_devices, + _mock_doorbell_from_fixture, + _mock_lock_from_fixture, +) + +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test generating diagnostics for a config entry.""" + lock_one = await _mock_lock_from_fixture( + hass, "get_lock.online_with_doorsense.json" + ) + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + + entry, _, _ = await _create_yale_api_with_devices(hass, [lock_one, doorbell_one]) + diag = await get_diagnostics_for_config_entry(hass, hass_client, entry) + + assert diag == snapshot diff --git a/tests/components/yale/test_event.py b/tests/components/yale/test_event.py new file mode 100644 index 00000000000..f2f205289ff --- /dev/null +++ b/tests/components/yale/test_event.py @@ -0,0 +1,174 @@ +"""The event tests for the yale.""" + +import datetime +from unittest.mock import patch + +from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN +from homeassistant.core import HomeAssistant +import homeassistant.util.dt as dt_util + +from .mocks import ( + _create_yale_with_devices, + _mock_activities_from_fixture, + _mock_doorbell_from_fixture, + _mock_lock_from_fixture, +) + +from tests.common import async_fire_time_changed + + +async def test_create_doorbell(hass: HomeAssistant) -> None: + """Test creation of a doorbell.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + await _create_yale_with_devices(hass, [doorbell_one]) + + motion_state = hass.states.get("event.k98gidt45gul_name_motion") + assert motion_state is not None + assert motion_state.state == STATE_UNKNOWN + doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") + assert doorbell_state is not None + assert doorbell_state.state == STATE_UNKNOWN + + +async def test_create_doorbell_offline(hass: HomeAssistant) -> None: + """Test creation of a doorbell that is offline.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") + await _create_yale_with_devices(hass, [doorbell_one]) + motion_state = hass.states.get("event.tmt100_name_motion") + assert motion_state is not None + assert motion_state.state == STATE_UNAVAILABLE + doorbell_state = hass.states.get("event.tmt100_name_doorbell") + assert doorbell_state is not None + assert doorbell_state.state == STATE_UNAVAILABLE + + +async def test_create_doorbell_with_motion(hass: HomeAssistant) -> None: + """Test creation of a doorbell.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + activities = await _mock_activities_from_fixture( + hass, "get_activity.doorbell_motion.json" + ) + await _create_yale_with_devices(hass, [doorbell_one], activities=activities) + + motion_state = hass.states.get("event.k98gidt45gul_name_motion") + assert motion_state is not None + assert motion_state.state != STATE_UNKNOWN + isotime = motion_state.state + doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") + assert doorbell_state is not None + assert doorbell_state.state == STATE_UNKNOWN + + new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) + native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) + with patch( + "homeassistant.components.yale.util._native_datetime", + return_value=native_time, + ): + async_fire_time_changed(hass, new_time) + await hass.async_block_till_done() + motion_state = hass.states.get("event.k98gidt45gul_name_motion") + assert motion_state.state == isotime + + +async def test_doorbell_update_via_socketio(hass: HomeAssistant) -> None: + """Test creation of a doorbell that can be updated via socketio.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + + _, socketio = await _create_yale_with_devices(hass, [doorbell_one]) + assert doorbell_one.pubsub_channel == "7c7a6672-59c8-3333-ffff-dcd98705cccc" + + motion_state = hass.states.get("event.k98gidt45gul_name_motion") + assert motion_state is not None + assert motion_state.state == STATE_UNKNOWN + doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") + assert doorbell_state is not None + assert doorbell_state.state == STATE_UNKNOWN + + listener = list(socketio._listeners)[0] + listener( + doorbell_one.device_id, + dt_util.utcnow(), + { + "status": "doorbell_motion_detected", + "data": { + "event": "doorbell_motion_detected", + "image": { + "height": 640, + "width": 480, + "format": "jpg", + "created_at": "2021-03-16T02:36:26.886Z", + "bytes": 14061, + "secure_url": ( + "https://dyu7azbnaoi74.cloudfront.net/images/1f8.jpeg" + ), + "url": "https://dyu7azbnaoi74.cloudfront.net/images/1f8.jpeg", + "etag": "09e839331c4ea59eef28081f2caa0e90", + }, + "doorbellName": "Front Door", + "callID": None, + "origin": "mars-api", + "mutableContent": True, + }, + }, + ) + + await hass.async_block_till_done() + + motion_state = hass.states.get("event.k98gidt45gul_name_motion") + assert motion_state is not None + assert motion_state.state != STATE_UNKNOWN + isotime = motion_state.state + + new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) + native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) + with patch( + "homeassistant.components.yale.util._native_datetime", + return_value=native_time, + ): + async_fire_time_changed(hass, new_time) + await hass.async_block_till_done() + + motion_state = hass.states.get("event.k98gidt45gul_name_motion") + assert motion_state is not None + assert motion_state.state != STATE_UNKNOWN + + listener( + doorbell_one.device_id, + dt_util.utcnow(), + { + "status": "buttonpush", + }, + ) + + await hass.async_block_till_done() + + doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") + assert doorbell_state is not None + assert doorbell_state.state != STATE_UNKNOWN + isotime = motion_state.state + + new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) + native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) + with patch( + "homeassistant.components.yale.util._native_datetime", + return_value=native_time, + ): + async_fire_time_changed(hass, new_time) + await hass.async_block_till_done() + + doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") + assert doorbell_state is not None + assert doorbell_state.state != STATE_UNKNOWN + assert motion_state.state == isotime + + +async def test_create_lock_with_doorbell(hass: HomeAssistant) -> None: + """Test creation of a lock with a doorbell.""" + lock_one = await _mock_lock_from_fixture(hass, "lock_with_doorbell.online.json") + await _create_yale_with_devices(hass, [lock_one]) + + doorbell_state = hass.states.get( + "event.a6697750d607098bae8d6baa11ef8063_name_doorbell" + ) + assert doorbell_state is not None + assert doorbell_state.state == STATE_UNKNOWN diff --git a/tests/components/yale/test_init.py b/tests/components/yale/test_init.py new file mode 100644 index 00000000000..c9cb4be5882 --- /dev/null +++ b/tests/components/yale/test_init.py @@ -0,0 +1,238 @@ +"""The tests for the yale platform.""" + +from unittest.mock import Mock + +from aiohttp import ClientResponseError +import pytest +from yalexs.exceptions import InvalidAuth, YaleApiError + +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN +from homeassistant.components.yale.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_LOCK, + SERVICE_OPEN, + SERVICE_UNLOCK, + STATE_LOCKED, + STATE_ON, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.setup import async_setup_component + +from .mocks import ( + _create_yale_with_devices, + _mock_doorsense_enabled_yale_lock_detail, + _mock_doorsense_missing_yale_lock_detail, + _mock_inoperative_yale_lock_detail, + _mock_lock_with_offline_key, + _mock_operative_yale_lock_detail, +) + +from tests.typing import WebSocketGenerator + + +async def test_yale_api_is_failing(hass: HomeAssistant) -> None: + """Config entry state is SETUP_RETRY when yale api is failing.""" + + config_entry, socketio = await _create_yale_with_devices( + hass, + authenticate_side_effect=YaleApiError( + "offline", ClientResponseError(None, None, status=500) + ), + ) + assert config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_yale_is_offline(hass: HomeAssistant) -> None: + """Config entry state is SETUP_RETRY when yale is offline.""" + + config_entry, socketio = await _create_yale_with_devices( + hass, authenticate_side_effect=TimeoutError + ) + + assert config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_yale_late_auth_failure(hass: HomeAssistant) -> None: + """Test we can detect a late auth failure.""" + config_entry, socketio = await _create_yale_with_devices( + hass, + authenticate_side_effect=InvalidAuth( + "authfailed", ClientResponseError(None, None, status=401) + ), + ) + + assert config_entry.state is ConfigEntryState.SETUP_ERROR + flows = hass.config_entries.flow.async_progress() + + assert flows[0]["step_id"] == "pick_implementation" + + +async def test_unlock_throws_yale_api_http_error(hass: HomeAssistant) -> None: + """Test unlock throws correct error on http error.""" + mocked_lock_detail = await _mock_operative_yale_lock_detail(hass) + aiohttp_client_response_exception = ClientResponseError(None, None, status=400) + + def _unlock_return_activities_side_effect(access_token, device_id): + raise YaleApiError( + "This should bubble up as its user consumable", + aiohttp_client_response_exception, + ) + + await _create_yale_with_devices( + hass, + [mocked_lock_detail], + api_call_side_effects={ + "unlock_return_activities": _unlock_return_activities_side_effect + }, + ) + last_err = None + data = {ATTR_ENTITY_ID: "lock.a6697750d607098bae8d6baa11ef8063_name"} + try: + await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) + except HomeAssistantError as err: + last_err = err + assert str(last_err) == ( + "A6697750D607098BAE8D6BAA11EF8063 Name: This should bubble up as its user" + " consumable" + ) + + +async def test_lock_throws_yale_api_http_error(hass: HomeAssistant) -> None: + """Test lock throws correct error on http error.""" + mocked_lock_detail = await _mock_operative_yale_lock_detail(hass) + aiohttp_client_response_exception = ClientResponseError(None, None, status=400) + + def _lock_return_activities_side_effect(access_token, device_id): + raise YaleApiError( + "This should bubble up as its user consumable", + aiohttp_client_response_exception, + ) + + await _create_yale_with_devices( + hass, + [mocked_lock_detail], + api_call_side_effects={ + "lock_return_activities": _lock_return_activities_side_effect + }, + ) + last_err = None + data = {ATTR_ENTITY_ID: "lock.a6697750d607098bae8d6baa11ef8063_name"} + try: + await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) + except HomeAssistantError as err: + last_err = err + assert str(last_err) == ( + "A6697750D607098BAE8D6BAA11EF8063 Name: This should bubble up as its user" + " consumable" + ) + + +async def test_open_throws_hass_service_not_supported_error( + hass: HomeAssistant, +) -> None: + """Test open throws correct error on entity does not support this service error.""" + mocked_lock_detail = await _mock_operative_yale_lock_detail(hass) + await _create_yale_with_devices(hass, [mocked_lock_detail]) + data = {ATTR_ENTITY_ID: "lock.a6697750d607098bae8d6baa11ef8063_name"} + with pytest.raises(HomeAssistantError): + await hass.services.async_call(LOCK_DOMAIN, SERVICE_OPEN, data, blocking=True) + + +async def test_inoperative_locks_are_filtered_out(hass: HomeAssistant) -> None: + """Ensure inoperative locks do not get setup.""" + yale_operative_lock = await _mock_operative_yale_lock_detail(hass) + yale_inoperative_lock = await _mock_inoperative_yale_lock_detail(hass) + await _create_yale_with_devices(hass, [yale_operative_lock, yale_inoperative_lock]) + + lock_abc_name = hass.states.get("lock.abc_name") + assert lock_abc_name is None + lock_a6697750d607098bae8d6baa11ef8063_name = hass.states.get( + "lock.a6697750d607098bae8d6baa11ef8063_name" + ) + assert lock_a6697750d607098bae8d6baa11ef8063_name.state == STATE_LOCKED + + +async def test_lock_has_doorsense(hass: HomeAssistant) -> None: + """Check to see if a lock has doorsense.""" + doorsenselock = await _mock_doorsense_enabled_yale_lock_detail(hass) + nodoorsenselock = await _mock_doorsense_missing_yale_lock_detail(hass) + await _create_yale_with_devices(hass, [doorsenselock, nodoorsenselock]) + + binary_sensor_online_with_doorsense_name_open = hass.states.get( + "binary_sensor.online_with_doorsense_name_door" + ) + assert binary_sensor_online_with_doorsense_name_open.state == STATE_ON + binary_sensor_missing_doorsense_id_name_open = hass.states.get( + "binary_sensor.missing_with_doorsense_name_door" + ) + assert binary_sensor_missing_doorsense_id_name_open is None + + +async def test_load_unload(hass: HomeAssistant) -> None: + """Config entry can be unloaded.""" + + yale_operative_lock = await _mock_operative_yale_lock_detail(hass) + yale_inoperative_lock = await _mock_inoperative_yale_lock_detail(hass) + config_entry, socketio = await _create_yale_with_devices( + hass, [yale_operative_lock, yale_inoperative_lock] + ) + + assert config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + +async def test_load_triggers_ble_discovery( + hass: HomeAssistant, mock_discovery: Mock +) -> None: + """Test that loading a lock that supports offline ble operation passes the keys to yalexe_ble.""" + + yale_lock_with_key = await _mock_lock_with_offline_key(hass) + yale_lock_without_key = await _mock_operative_yale_lock_detail(hass) + + config_entry, socketio = await _create_yale_with_devices( + hass, [yale_lock_with_key, yale_lock_without_key] + ) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + assert len(mock_discovery.mock_calls) == 1 + assert mock_discovery.mock_calls[0].kwargs["data"] == { + "name": "Front Door Lock", + "address": None, + "serial": "X2FSW05DGA", + "key": "kkk01d4300c1dcxxx1c330f794941111", + "slot": 1, + } + + +async def test_device_remove_devices( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test we can only remove a device that no longer exists.""" + assert await async_setup_component(hass, "config", {}) + yale_operative_lock = await _mock_operative_yale_lock_detail(hass) + config_entry, socketio = await _create_yale_with_devices( + hass, [yale_operative_lock] + ) + entity = entity_registry.entities["lock.a6697750d607098bae8d6baa11ef8063_name"] + + device_entry = device_registry.async_get(entity.device_id) + client = await hass_ws_client(hass) + response = await client.remove_device(device_entry.id, config_entry.entry_id) + assert not response["success"] + + dead_device_entry = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={(DOMAIN, "remove-device-id")}, + ) + response = await client.remove_device(dead_device_entry.id, config_entry.entry_id) + assert response["success"] diff --git a/tests/components/yale/test_lock.py b/tests/components/yale/test_lock.py new file mode 100644 index 00000000000..b449be9153d --- /dev/null +++ b/tests/components/yale/test_lock.py @@ -0,0 +1,501 @@ +"""The lock tests for the yale platform.""" + +import datetime + +from aiohttp import ClientResponseError +from freezegun.api import FrozenDateTimeFactory +import pytest +from yalexs.manager.activity import INITIAL_LOCK_RESYNC_TIME + +from homeassistant.components.lock import ( + DOMAIN as LOCK_DOMAIN, + STATE_JAMMED, + STATE_LOCKING, + STATE_UNLOCKING, +) +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_LOCK, + SERVICE_OPEN, + SERVICE_UNLOCK, + STATE_LOCKED, + STATE_UNAVAILABLE, + STATE_UNKNOWN, + STATE_UNLOCKED, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import device_registry as dr, entity_registry as er +import homeassistant.util.dt as dt_util + +from .mocks import ( + _create_yale_with_devices, + _mock_activities_from_fixture, + _mock_doorsense_enabled_yale_lock_detail, + _mock_lock_from_fixture, + _mock_lock_with_unlatch, + _mock_operative_yale_lock_detail, +) + +from tests.common import async_fire_time_changed + + +async def test_lock_device_registry( + hass: HomeAssistant, device_registry: dr.DeviceRegistry +) -> None: + """Test creation of a lock with doorsense and bridge ands up in the registry.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + await _create_yale_with_devices(hass, [lock_one]) + + reg_device = device_registry.async_get_device( + identifiers={("yale", "online_with_doorsense")} + ) + assert reg_device.model == "AUG-MD01" + assert reg_device.sw_version == "undefined-4.3.0-1.8.14" + assert reg_device.name == "online_with_doorsense Name" + assert reg_device.manufacturer == "Yale Home Inc." + + +async def test_lock_changed_by(hass: HomeAssistant) -> None: + """Test creation of a lock with doorsense and bridge.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture(hass, "get_activity.lock.json") + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + + assert lock_online_with_doorsense_name.state == STATE_LOCKED + + assert ( + lock_online_with_doorsense_name.attributes.get("changed_by") + == "Your favorite elven princess" + ) + + +async def test_state_locking(hass: HomeAssistant) -> None: + """Test creation of a lock with doorsense and bridge that is locking.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture(hass, "get_activity.locking.json") + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + + assert lock_online_with_doorsense_name.state == STATE_LOCKING + + +async def test_state_unlocking(hass: HomeAssistant) -> None: + """Test creation of a lock with doorsense and bridge that is unlocking.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture( + hass, "get_activity.unlocking.json" + ) + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + + assert lock_online_with_doorsense_name.state == STATE_UNLOCKING + + +async def test_state_jammed(hass: HomeAssistant) -> None: + """Test creation of a lock with doorsense and bridge that is jammed.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture(hass, "get_activity.jammed.json") + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + + assert lock_online_with_doorsense_name.state == STATE_JAMMED + + +async def test_one_lock_operation( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test creation of a lock with doorsense and bridge.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + await _create_yale_with_devices(hass, [lock_one]) + + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + + assert lock_online_with_doorsense_name.state == STATE_LOCKED + + assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 + assert ( + lock_online_with_doorsense_name.attributes.get("friendly_name") + == "online_with_doorsense Name" + ) + + data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} + await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) + await hass.async_block_till_done() + + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + assert lock_online_with_doorsense_name.state == STATE_UNLOCKED + + assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 + assert ( + lock_online_with_doorsense_name.attributes.get("friendly_name") + == "online_with_doorsense Name" + ) + + await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) + await hass.async_block_till_done() + + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + assert lock_online_with_doorsense_name.state == STATE_LOCKED + + # No activity means it will be unavailable until the activity feed has data + lock_operator_sensor = entity_registry.async_get( + "sensor.online_with_doorsense_name_operator" + ) + assert lock_operator_sensor + assert ( + hass.states.get("sensor.online_with_doorsense_name_operator").state + == STATE_UNKNOWN + ) + + +async def test_open_lock_operation(hass: HomeAssistant) -> None: + """Test open lock operation using the open service.""" + lock_with_unlatch = await _mock_lock_with_unlatch(hass) + await _create_yale_with_devices(hass, [lock_with_unlatch]) + + lock_online_with_unlatch_name = hass.states.get("lock.online_with_unlatch_name") + assert lock_online_with_unlatch_name.state == STATE_LOCKED + + data = {ATTR_ENTITY_ID: "lock.online_with_unlatch_name"} + await hass.services.async_call(LOCK_DOMAIN, SERVICE_OPEN, data, blocking=True) + await hass.async_block_till_done() + + lock_online_with_unlatch_name = hass.states.get("lock.online_with_unlatch_name") + assert lock_online_with_unlatch_name.state == STATE_UNLOCKED + + +async def test_open_lock_operation_socketio_connected( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test open lock operation using the open service when socketio is connected.""" + lock_with_unlatch = await _mock_lock_with_unlatch(hass) + assert lock_with_unlatch.pubsub_channel == "pubsub" + + _, socketio = await _create_yale_with_devices(hass, [lock_with_unlatch]) + socketio.connected = True + + lock_online_with_unlatch_name = hass.states.get("lock.online_with_unlatch_name") + assert lock_online_with_unlatch_name.state == STATE_LOCKED + + data = {ATTR_ENTITY_ID: "lock.online_with_unlatch_name"} + await hass.services.async_call(LOCK_DOMAIN, SERVICE_OPEN, data, blocking=True) + await hass.async_block_till_done() + + listener = list(socketio._listeners)[0] + listener( + lock_with_unlatch.device_id, + dt_util.utcnow() + datetime.timedelta(seconds=2), + { + "status": "kAugLockState_Unlocked", + }, + ) + + await hass.async_block_till_done() + await hass.async_block_till_done() + + lock_online_with_unlatch_name = hass.states.get("lock.online_with_unlatch_name") + assert lock_online_with_unlatch_name.state == STATE_UNLOCKED + await hass.async_block_till_done() + + +async def test_one_lock_operation_socketio_connected( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test lock and unlock operations are async when socketio is connected.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + assert lock_one.pubsub_channel == "pubsub" + + _, socketio = await _create_yale_with_devices(hass, [lock_one]) + socketio.connected = True + + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + + assert lock_online_with_doorsense_name.state == STATE_LOCKED + + assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 + assert ( + lock_online_with_doorsense_name.attributes.get("friendly_name") + == "online_with_doorsense Name" + ) + + data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} + await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) + await hass.async_block_till_done() + + listener = list(socketio._listeners)[0] + listener( + lock_one.device_id, + dt_util.utcnow() + datetime.timedelta(seconds=1), + { + "status": "kAugLockState_Unlocked", + }, + ) + + await hass.async_block_till_done() + await hass.async_block_till_done() + + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + assert lock_online_with_doorsense_name.state == STATE_UNLOCKED + + assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 + assert ( + lock_online_with_doorsense_name.attributes.get("friendly_name") + == "online_with_doorsense Name" + ) + + await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) + await hass.async_block_till_done() + + listener( + lock_one.device_id, + dt_util.utcnow() + datetime.timedelta(seconds=2), + { + "status": "kAugLockState_Locked", + }, + ) + + await hass.async_block_till_done() + await hass.async_block_till_done() + + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + assert lock_online_with_doorsense_name.state == STATE_LOCKED + + # No activity means it will be unavailable until the activity feed has data + lock_operator_sensor = entity_registry.async_get( + "sensor.online_with_doorsense_name_operator" + ) + assert lock_operator_sensor + assert ( + hass.states.get("sensor.online_with_doorsense_name_operator").state + == STATE_UNKNOWN + ) + + freezer.tick(INITIAL_LOCK_RESYNC_TIME) + + listener( + lock_one.device_id, + dt_util.utcnow() + datetime.timedelta(seconds=2), + { + "status": "kAugLockState_Unlocked", + }, + ) + + await hass.async_block_till_done() + + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + assert lock_online_with_doorsense_name.state == STATE_UNLOCKED + + +async def test_lock_jammed(hass: HomeAssistant) -> None: + """Test lock gets jammed on unlock.""" + + def _unlock_return_activities_side_effect(access_token, device_id): + raise ClientResponseError(None, None, status=531) + + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + await _create_yale_with_devices( + hass, + [lock_one], + api_call_side_effects={ + "unlock_return_activities": _unlock_return_activities_side_effect + }, + ) + + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + + assert lock_online_with_doorsense_name.state == STATE_LOCKED + + assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 + assert ( + lock_online_with_doorsense_name.attributes.get("friendly_name") + == "online_with_doorsense Name" + ) + + data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} + await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) + await hass.async_block_till_done() + + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + assert lock_online_with_doorsense_name.state == STATE_JAMMED + + +async def test_lock_throws_exception_on_unknown_status_code( + hass: HomeAssistant, +) -> None: + """Test lock throws exception.""" + + def _unlock_return_activities_side_effect(access_token, device_id): + raise ClientResponseError(None, None, status=500) + + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + await _create_yale_with_devices( + hass, + [lock_one], + api_call_side_effects={ + "unlock_return_activities": _unlock_return_activities_side_effect + }, + ) + + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + + assert lock_online_with_doorsense_name.state == STATE_LOCKED + + assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 + assert ( + lock_online_with_doorsense_name.attributes.get("friendly_name") + == "online_with_doorsense Name" + ) + + data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} + with pytest.raises(ClientResponseError): + await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) + + +async def test_one_lock_unknown_state(hass: HomeAssistant) -> None: + """Test creation of a lock with doorsense and bridge.""" + lock_one = await _mock_lock_from_fixture( + hass, + "get_lock.online.unknown_state.json", + ) + await _create_yale_with_devices(hass, [lock_one]) + + lock_brokenid_name = hass.states.get("lock.brokenid_name") + + assert lock_brokenid_name.state == STATE_UNKNOWN + + +async def test_lock_bridge_offline(hass: HomeAssistant) -> None: + """Test creation of a lock with doorsense and bridge that goes offline.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture( + hass, "get_activity.bridge_offline.json" + ) + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + + assert lock_online_with_doorsense_name.state == STATE_UNAVAILABLE + + +async def test_lock_bridge_online(hass: HomeAssistant) -> None: + """Test creation of a lock with doorsense and bridge that goes offline.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture( + hass, "get_activity.bridge_online.json" + ) + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + + assert lock_online_with_doorsense_name.state == STATE_LOCKED + + +async def test_lock_update_via_socketio(hass: HomeAssistant) -> None: + """Test creation of a lock with doorsense and bridge.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + assert lock_one.pubsub_channel == "pubsub" + + activities = await _mock_activities_from_fixture(hass, "get_activity.lock.json") + config_entry, socketio = await _create_yale_with_devices( + hass, [lock_one], activities=activities + ) + socketio.connected = True + + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + + assert lock_online_with_doorsense_name.state == STATE_LOCKED + + listener = list(socketio._listeners)[0] + listener( + lock_one.device_id, + dt_util.utcnow(), + { + "status": "kAugLockState_Unlocking", + }, + ) + + await hass.async_block_till_done() + await hass.async_block_till_done() + + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + assert lock_online_with_doorsense_name.state == STATE_UNLOCKING + + listener( + lock_one.device_id, + dt_util.utcnow(), + { + "status": "kAugLockState_Locking", + }, + ) + + await hass.async_block_till_done() + await hass.async_block_till_done() + + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + assert lock_online_with_doorsense_name.state == STATE_LOCKING + + async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) + await hass.async_block_till_done() + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + assert lock_online_with_doorsense_name.state == STATE_LOCKING + + socketio.connected = True + async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) + await hass.async_block_till_done() + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + assert lock_online_with_doorsense_name.state == STATE_LOCKING + + # Ensure socketio status is always preserved + async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=2)) + await hass.async_block_till_done() + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + assert lock_online_with_doorsense_name.state == STATE_LOCKING + + listener( + lock_one.device_id, + dt_util.utcnow() + datetime.timedelta(seconds=2), + { + "status": "kAugLockState_Unlocking", + }, + ) + + await hass.async_block_till_done() + await hass.async_block_till_done() + + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + assert lock_online_with_doorsense_name.state == STATE_UNLOCKING + + async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=4)) + await hass.async_block_till_done() + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + assert lock_online_with_doorsense_name.state == STATE_UNLOCKING + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + +async def test_open_throws_hass_service_not_supported_error( + hass: HomeAssistant, +) -> None: + """Test open throws correct error on entity does not support this service error.""" + mocked_lock_detail = await _mock_operative_yale_lock_detail(hass) + await _create_yale_with_devices(hass, [mocked_lock_detail]) + data = {ATTR_ENTITY_ID: "lock.a6697750d607098bae8d6baa11ef8063_name"} + with pytest.raises(HomeAssistantError, match="does not support this service"): + await hass.services.async_call(LOCK_DOMAIN, SERVICE_OPEN, data, blocking=True) diff --git a/tests/components/yale/test_sensor.py b/tests/components/yale/test_sensor.py new file mode 100644 index 00000000000..caf8781b4ad --- /dev/null +++ b/tests/components/yale/test_sensor.py @@ -0,0 +1,362 @@ +"""The sensor tests for the yale platform.""" + +from typing import Any + +from homeassistant import core as ha +from homeassistant.const import ( + ATTR_ENTITY_PICTURE, + ATTR_UNIT_OF_MEASUREMENT, + PERCENTAGE, + STATE_UNKNOWN, +) +from homeassistant.core import CoreState, HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .mocks import ( + _create_yale_with_devices, + _mock_activities_from_fixture, + _mock_doorbell_from_fixture, + _mock_doorsense_enabled_yale_lock_detail, + _mock_lock_from_fixture, +) + +from tests.common import mock_restore_cache_with_extra_data + + +async def test_create_doorbell(hass: HomeAssistant) -> None: + """Test creation of a doorbell.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + await _create_yale_with_devices(hass, [doorbell_one]) + + sensor_k98gidt45gul_name_battery = hass.states.get( + "sensor.k98gidt45gul_name_battery" + ) + assert sensor_k98gidt45gul_name_battery.state == "96" + assert ( + sensor_k98gidt45gul_name_battery.attributes["unit_of_measurement"] == PERCENTAGE + ) + + +async def test_create_doorbell_offline( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test creation of a doorbell that is offline.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") + await _create_yale_with_devices(hass, [doorbell_one]) + + sensor_tmt100_name_battery = hass.states.get("sensor.tmt100_name_battery") + assert sensor_tmt100_name_battery.state == "81" + assert sensor_tmt100_name_battery.attributes["unit_of_measurement"] == PERCENTAGE + + entry = entity_registry.async_get("sensor.tmt100_name_battery") + assert entry + assert entry.unique_id == "tmt100_device_battery" + + +async def test_create_doorbell_hardwired(hass: HomeAssistant) -> None: + """Test creation of a doorbell that is hardwired without a battery.""" + doorbell_one = await _mock_doorbell_from_fixture( + hass, "get_doorbell.nobattery.json" + ) + await _create_yale_with_devices(hass, [doorbell_one]) + + sensor_tmt100_name_battery = hass.states.get("sensor.tmt100_name_battery") + assert sensor_tmt100_name_battery is None + + +async def test_create_lock_with_linked_keypad( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test creation of a lock with a linked keypad that both have a battery.""" + lock_one = await _mock_lock_from_fixture(hass, "get_lock.doorsense_init.json") + await _create_yale_with_devices(hass, [lock_one]) + + sensor_a6697750d607098bae8d6baa11ef8063_name_battery = hass.states.get( + "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" + ) + assert sensor_a6697750d607098bae8d6baa11ef8063_name_battery.state == "88" + assert ( + sensor_a6697750d607098bae8d6baa11ef8063_name_battery.attributes[ + "unit_of_measurement" + ] + == PERCENTAGE + ) + entry = entity_registry.async_get( + "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" + ) + assert entry + assert entry.unique_id == "A6697750D607098BAE8D6BAA11EF8063_device_battery" + + state = hass.states.get("sensor.front_door_lock_keypad_battery") + assert state.state == "62" + assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE + entry = entity_registry.async_get("sensor.front_door_lock_keypad_battery") + assert entry + assert entry.unique_id == "5bc65c24e6ef2a263e1450a8_linked_keypad_battery" + + +async def test_create_lock_with_low_battery_linked_keypad( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test creation of a lock with a linked keypad that both have a battery.""" + lock_one = await _mock_lock_from_fixture(hass, "get_lock.low_keypad_battery.json") + await _create_yale_with_devices(hass, [lock_one]) + + sensor_a6697750d607098bae8d6baa11ef8063_name_battery = hass.states.get( + "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" + ) + assert sensor_a6697750d607098bae8d6baa11ef8063_name_battery.state == "88" + assert ( + sensor_a6697750d607098bae8d6baa11ef8063_name_battery.attributes[ + "unit_of_measurement" + ] + == PERCENTAGE + ) + entry = entity_registry.async_get( + "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" + ) + assert entry + assert entry.unique_id == "A6697750D607098BAE8D6BAA11EF8063_device_battery" + + state = hass.states.get("sensor.front_door_lock_keypad_battery") + assert state.state == "10" + assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE + entry = entity_registry.async_get("sensor.front_door_lock_keypad_battery") + assert entry + assert entry.unique_id == "5bc65c24e6ef2a263e1450a8_linked_keypad_battery" + + # No activity means it will be unavailable until someone unlocks/locks it + lock_operator_sensor = entity_registry.async_get( + "sensor.a6697750d607098bae8d6baa11ef8063_name_operator" + ) + assert ( + lock_operator_sensor.unique_id + == "A6697750D607098BAE8D6BAA11EF8063_lock_operator" + ) + assert ( + hass.states.get("sensor.a6697750d607098bae8d6baa11ef8063_name_operator").state + == STATE_UNKNOWN + ) + + +async def test_lock_operator_bluetooth( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test operation of a lock with doorsense and bridge.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture( + hass, "get_activity.lock_from_bluetooth.json" + ) + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + lock_operator_sensor = entity_registry.async_get( + "sensor.online_with_doorsense_name_operator" + ) + assert lock_operator_sensor + + state = hass.states.get("sensor.online_with_doorsense_name_operator") + assert state.state == "Your favorite elven princess" + assert state.attributes["manual"] is False + assert state.attributes["tag"] is False + assert state.attributes["remote"] is False + assert state.attributes["keypad"] is False + assert state.attributes["autorelock"] is False + assert state.attributes["method"] == "mobile" + + +async def test_lock_operator_keypad( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test operation of a lock with doorsense and bridge.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture( + hass, "get_activity.lock_from_keypad.json" + ) + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + lock_operator_sensor = entity_registry.async_get( + "sensor.online_with_doorsense_name_operator" + ) + assert lock_operator_sensor + + state = hass.states.get("sensor.online_with_doorsense_name_operator") + assert state.state == "Your favorite elven princess" + assert state.attributes["manual"] is False + assert state.attributes["tag"] is False + assert state.attributes["remote"] is False + assert state.attributes["keypad"] is True + assert state.attributes["autorelock"] is False + assert state.attributes["method"] == "keypad" + + +async def test_lock_operator_remote( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test operation of a lock with doorsense and bridge.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture(hass, "get_activity.lock.json") + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + lock_operator_sensor = entity_registry.async_get( + "sensor.online_with_doorsense_name_operator" + ) + assert lock_operator_sensor + + state = hass.states.get("sensor.online_with_doorsense_name_operator") + assert state.state == "Your favorite elven princess" + assert state.attributes["manual"] is False + assert state.attributes["tag"] is False + assert state.attributes["remote"] is True + assert state.attributes["keypad"] is False + assert state.attributes["autorelock"] is False + assert state.attributes["method"] == "remote" + + +async def test_lock_operator_manual( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test operation of a lock with doorsense and bridge.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture( + hass, "get_activity.lock_from_manual.json" + ) + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + lock_operator_sensor = entity_registry.async_get( + "sensor.online_with_doorsense_name_operator" + ) + assert lock_operator_sensor + state = hass.states.get("sensor.online_with_doorsense_name_operator") + assert state.state == "Your favorite elven princess" + assert state.attributes["manual"] is True + assert state.attributes["tag"] is False + assert state.attributes["remote"] is False + assert state.attributes["keypad"] is False + assert state.attributes["autorelock"] is False + assert state.attributes["method"] == "manual" + + +async def test_lock_operator_autorelock( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test operation of a lock with doorsense and bridge.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture( + hass, "get_activity.lock_from_autorelock.json" + ) + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + lock_operator_sensor = entity_registry.async_get( + "sensor.online_with_doorsense_name_operator" + ) + assert lock_operator_sensor + + state = hass.states.get("sensor.online_with_doorsense_name_operator") + assert state.state == "Auto Relock" + assert state.attributes["manual"] is False + assert state.attributes["tag"] is False + assert state.attributes["remote"] is False + assert state.attributes["keypad"] is False + assert state.attributes["autorelock"] is True + assert state.attributes["method"] == "autorelock" + + +async def test_unlock_operator_manual( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test operation of a lock manually.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture( + hass, "get_activity.unlock_from_manual.json" + ) + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + lock_operator_sensor = entity_registry.async_get( + "sensor.online_with_doorsense_name_operator" + ) + assert lock_operator_sensor + + state = hass.states.get("sensor.online_with_doorsense_name_operator") + assert state.state == "Your favorite elven princess" + assert state.attributes["manual"] is True + assert state.attributes["tag"] is False + assert state.attributes["remote"] is False + assert state.attributes["keypad"] is False + assert state.attributes["autorelock"] is False + assert state.attributes["method"] == "manual" + + +async def test_unlock_operator_tag( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test operation of a lock with a tag.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture( + hass, "get_activity.unlock_from_tag.json" + ) + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + lock_operator_sensor = entity_registry.async_get( + "sensor.online_with_doorsense_name_operator" + ) + assert lock_operator_sensor + + state = hass.states.get("sensor.online_with_doorsense_name_operator") + assert state.state == "Your favorite elven princess" + assert state.attributes["manual"] is False + assert state.attributes["tag"] is True + assert state.attributes["remote"] is False + assert state.attributes["keypad"] is False + assert state.attributes["autorelock"] is False + assert state.attributes["method"] == "tag" + + +async def test_restored_state( + hass: HomeAssistant, hass_storage: dict[str, Any] +) -> None: + """Test restored state.""" + + entity_id = "sensor.online_with_doorsense_name_operator" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + fake_state = ha.State( + entity_id, + state="Tag Unlock", + attributes={ + "method": "tag", + "manual": False, + "remote": False, + "keypad": False, + "tag": True, + "autorelock": False, + ATTR_ENTITY_PICTURE: "image.png", + }, + ) + + # Home assistant is not running yet + hass.set_state(CoreState.not_running) + mock_restore_cache_with_extra_data( + hass, + [ + ( + fake_state, + {"native_value": "Tag Unlock", "native_unit_of_measurement": None}, + ) + ], + ) + + await _create_yale_with_devices(hass, [lock_one]) + + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state.state == "Tag Unlock" + assert state.attributes["method"] == "tag" + assert state.attributes[ATTR_ENTITY_PICTURE] == "image.png" From 9153d16a6d980940c2d34341c4fefe2e285fd3a9 Mon Sep 17 00:00:00 2001 From: Sid <27780930+autinerd@users.noreply.github.com> Date: Wed, 28 Aug 2024 18:01:41 +0200 Subject: [PATCH 1187/1635] Enable Ruff TCH rules (#124396) * Enable Ruff TCH rules * Ignore TCH001-003 --------- Co-authored-by: Martin Hjelmare --- pyproject.toml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 74329da38b8..b4d3bf46916 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -768,6 +768,7 @@ select = [ "SLOT", # flake8-slots "T100", # Trace found: {name} used "T20", # flake8-print + "TCH", # flake8-type-checking "TID251", # Banned imports "TRY", # tryceratops "UP", # pyupgrade @@ -800,6 +801,12 @@ ignore = [ "SIM103", # Return the condition {condition} directly "SIM108", # Use ternary operator {contents} instead of if-else-block "SIM115", # Use context handler for opening files + + # Moving imports into type-checking blocks can mess with pytest.patch() + "TCH001", # Move application import {} into a type-checking block + "TCH002", # Move third-party import {} into a type-checking block + "TCH003", # Move standard library import {} into a type-checking block + "TRY003", # Avoid specifying long messages outside the exception class "TRY400", # Use `logging.exception` instead of `logging.error` # Ignored due to performance: https://github.com/charliermarsh/ruff/issues/2923 From 25cdd737a9274abc8aabb063914dc8ac8a60238e Mon Sep 17 00:00:00 2001 From: Kristian Haugene Date: Wed, 28 Aug 2024 18:05:27 +0200 Subject: [PATCH 1188/1635] Add current intraday price ranking to Tibber price sensor (#124595) --- homeassistant/components/tibber/sensor.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/tibber/sensor.py b/homeassistant/components/tibber/sensor.py index d6f8baa931c..09b36f41929 100644 --- a/homeassistant/components/tibber/sensor.py +++ b/homeassistant/components/tibber/sensor.py @@ -383,6 +383,7 @@ class TibberSensorElPrice(TibberSensor): "off_peak_1": None, "peak": None, "off_peak_2": None, + "intraday_price_ranking": None, } self._attr_icon = ICON self._attr_unique_id = self._tibber_home.home_id @@ -411,8 +412,9 @@ class TibberSensorElPrice(TibberSensor): return res = self._tibber_home.current_price_data() - self._attr_native_value, price_level, self._last_updated, _ = res + self._attr_native_value, price_level, self._last_updated, price_rank = res self._attr_extra_state_attributes["price_level"] = price_level + self._attr_extra_state_attributes["intraday_price_ranking"] = price_rank attrs = self._tibber_home.current_attributes() self._attr_extra_state_attributes.update(attrs) From 1650cee16c7abe2bd85ef27886d649335a93eb3e Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Wed, 28 Aug 2024 18:10:38 +0200 Subject: [PATCH 1189/1635] Check KNX integration is loaded on websocket calls (#123178) --- homeassistant/components/knx/websocket.py | 110 +++++++++++++++++++--- tests/components/knx/test_websocket.py | 27 ++++++ 2 files changed, 124 insertions(+), 13 deletions(-) diff --git a/homeassistant/components/knx/websocket.py b/homeassistant/components/knx/websocket.py index 4af3012741a..5c21a941484 100644 --- a/homeassistant/components/knx/websocket.py +++ b/homeassistant/components/knx/websocket.py @@ -2,7 +2,10 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Final +import asyncio +from collections.abc import Awaitable, Callable +from functools import wraps +from typing import TYPE_CHECKING, Any, Final, overload import knx_frontend as knx_panel import voluptuous as vol @@ -77,21 +80,92 @@ async def register_panel(hass: HomeAssistant) -> None: ) +type KnxWebSocketCommandHandler = Callable[ + [HomeAssistant, KNXModule, websocket_api.ActiveConnection, dict[str, Any]], None +] +type KnxAsyncWebSocketCommandHandler = Callable[ + [HomeAssistant, KNXModule, websocket_api.ActiveConnection, dict[str, Any]], + Awaitable[None], +] + + +@overload +def provide_knx( + func: KnxAsyncWebSocketCommandHandler, +) -> websocket_api.const.AsyncWebSocketCommandHandler: ... +@overload +def provide_knx( + func: KnxWebSocketCommandHandler, +) -> websocket_api.const.WebSocketCommandHandler: ... + + +def provide_knx( + func: KnxAsyncWebSocketCommandHandler | KnxWebSocketCommandHandler, +) -> ( + websocket_api.const.AsyncWebSocketCommandHandler + | websocket_api.const.WebSocketCommandHandler +): + """Websocket decorator to provide a KNXModule instance.""" + + def _send_not_loaded_error( + connection: websocket_api.ActiveConnection, msg_id: int + ) -> None: + connection.send_error( + msg_id, + websocket_api.const.ERR_HOME_ASSISTANT_ERROR, + "KNX integration not loaded.", + ) + + if asyncio.iscoroutinefunction(func): + + @wraps(func) + async def with_knx( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], + ) -> None: + """Add KNX Module to call function.""" + try: + knx: KNXModule = hass.data[DOMAIN] + except KeyError: + _send_not_loaded_error(connection, msg["id"]) + return + await func(hass, knx, connection, msg) + + else: + + @wraps(func) + def with_knx( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], + ) -> None: + """Add KNX Module to call function.""" + try: + knx: KNXModule = hass.data[DOMAIN] + except KeyError: + _send_not_loaded_error(connection, msg["id"]) + return + func(hass, knx, connection, msg) + + return with_knx + + @websocket_api.require_admin @websocket_api.websocket_command( { vol.Required("type"): "knx/info", } ) +@provide_knx @callback def ws_info( hass: HomeAssistant, + knx: KNXModule, connection: websocket_api.ActiveConnection, msg: dict, ) -> None: """Handle get info command.""" - knx: KNXModule = hass.data[DOMAIN] - _project_info = None if project_info := knx.project.info: _project_info = { @@ -119,13 +193,14 @@ def ws_info( } ) @websocket_api.async_response +@provide_knx async def ws_get_knx_project( hass: HomeAssistant, + knx: KNXModule, connection: websocket_api.ActiveConnection, msg: dict, ) -> None: """Handle get KNX project.""" - knx: KNXModule = hass.data[DOMAIN] knxproject = await knx.project.get_knxproject() connection.send_result( msg["id"], @@ -145,13 +220,14 @@ async def ws_get_knx_project( } ) @websocket_api.async_response +@provide_knx async def ws_project_file_process( hass: HomeAssistant, + knx: KNXModule, connection: websocket_api.ActiveConnection, msg: dict, ) -> None: """Handle get info command.""" - knx: KNXModule = hass.data[DOMAIN] try: await knx.project.process_project_file( xknx=knx.xknx, @@ -175,13 +251,14 @@ async def ws_project_file_process( } ) @websocket_api.async_response +@provide_knx async def ws_project_file_remove( hass: HomeAssistant, + knx: KNXModule, connection: websocket_api.ActiveConnection, msg: dict, ) -> None: """Handle get info command.""" - knx: KNXModule = hass.data[DOMAIN] await knx.project.remove_project_file() connection.send_result(msg["id"]) @@ -192,14 +269,15 @@ async def ws_project_file_remove( vol.Required("type"): "knx/group_monitor_info", } ) +@provide_knx @callback def ws_group_monitor_info( hass: HomeAssistant, + knx: KNXModule, connection: websocket_api.ActiveConnection, msg: dict, ) -> None: """Handle get info command of group monitor.""" - knx: KNXModule = hass.data[DOMAIN] recent_telegrams = [*knx.telegrams.recent_telegrams] connection.send_result( msg["id"], @@ -272,8 +350,10 @@ def ws_validate_entity( } ) @websocket_api.async_response +@provide_knx async def ws_create_entity( hass: HomeAssistant, + knx: KNXModule, connection: websocket_api.ActiveConnection, msg: dict, ) -> None: @@ -283,7 +363,6 @@ async def ws_create_entity( except EntityStoreValidationException as exc: connection.send_result(msg["id"], exc.validation_error) return - knx: KNXModule = hass.data[DOMAIN] try: entity_id = await knx.config_store.create_entity( # use validation result so defaults are applied @@ -308,8 +387,10 @@ async def ws_create_entity( } ) @websocket_api.async_response +@provide_knx async def ws_update_entity( hass: HomeAssistant, + knx: KNXModule, connection: websocket_api.ActiveConnection, msg: dict, ) -> None: @@ -319,7 +400,6 @@ async def ws_update_entity( except EntityStoreValidationException as exc: connection.send_result(msg["id"], exc.validation_error) return - knx: KNXModule = hass.data[DOMAIN] try: await knx.config_store.update_entity( validated_data[CONF_PLATFORM], @@ -344,13 +424,14 @@ async def ws_update_entity( } ) @websocket_api.async_response +@provide_knx async def ws_delete_entity( hass: HomeAssistant, + knx: KNXModule, connection: websocket_api.ActiveConnection, msg: dict, ) -> None: """Delete entity from entity store and remove it.""" - knx: KNXModule = hass.data[DOMAIN] try: await knx.config_store.delete_entity(msg[CONF_ENTITY_ID]) except ConfigStoreException as err: @@ -367,14 +448,15 @@ async def ws_delete_entity( vol.Required("type"): "knx/get_entity_entries", } ) +@provide_knx @callback def ws_get_entity_entries( hass: HomeAssistant, + knx: KNXModule, connection: websocket_api.ActiveConnection, msg: dict, ) -> None: """Get entities configured from entity store.""" - knx: KNXModule = hass.data[DOMAIN] entity_entries = [ entry.extended_dict for entry in knx.config_store.get_entity_entries() ] @@ -388,14 +470,15 @@ def ws_get_entity_entries( vol.Required(CONF_ENTITY_ID): str, } ) +@provide_knx @callback def ws_get_entity_config( hass: HomeAssistant, + knx: KNXModule, connection: websocket_api.ActiveConnection, msg: dict, ) -> None: """Get entity configuration from entity store.""" - knx: KNXModule = hass.data[DOMAIN] try: config_info = knx.config_store.get_entity_config(msg[CONF_ENTITY_ID]) except ConfigStoreException as err: @@ -414,14 +497,15 @@ def ws_get_entity_config( vol.Optional("area_id"): str, } ) +@provide_knx @callback def ws_create_device( hass: HomeAssistant, + knx: KNXModule, connection: websocket_api.ActiveConnection, msg: dict, ) -> None: """Create a new KNX device.""" - knx: KNXModule = hass.data[DOMAIN] identifier = f"knx_vdev_{ulid_now()}" device_registry = dr.async_get(hass) _device = device_registry.async_get_or_create( diff --git a/tests/components/knx/test_websocket.py b/tests/components/knx/test_websocket.py index 309ea111709..e747b0daade 100644 --- a/tests/components/knx/test_websocket.py +++ b/tests/components/knx/test_websocket.py @@ -3,6 +3,8 @@ from typing import Any from unittest.mock import patch +import pytest + from homeassistant.components.knx import DOMAIN, KNX_ADDRESS, SwitchSchema from homeassistant.components.knx.project import STORAGE_KEY as KNX_PROJECT_STORAGE_KEY from homeassistant.const import CONF_NAME @@ -355,3 +357,28 @@ async def test_knx_subscribe_telegrams_command_project( ) assert res["event"]["direction"] == "Incoming" assert res["event"]["timestamp"] is not None + + +@pytest.mark.parametrize( + "endpoint", + [ + "knx/info", # sync ws-command + "knx/get_knx_project", # async ws-command + ], +) +async def test_websocket_when_config_entry_unloaded( + hass: HomeAssistant, + knx: KNXTestKit, + hass_ws_client: WebSocketGenerator, + endpoint: str, +) -> None: + """Test websocket connection when config entry is unloaded.""" + await knx.setup_integration({}) + await hass.config_entries.async_unload(knx.mock_config_entry.entry_id) + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": endpoint}) + res = await client.receive_json() + assert not res["success"] + assert res["error"]["code"] == "home_assistant_error" + assert res["error"]["message"] == "KNX integration not loaded." From 2dce876a860cbe163119297290eaa9a560441644 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Wed, 28 Aug 2024 18:51:50 +0200 Subject: [PATCH 1190/1635] Bump version to 2024.10.0dev0 (#124808) --- .github/workflows/ci.yaml | 2 +- homeassistant/const.py | 2 +- pyproject.toml | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index f6ffa439d9b..b62fff06c0c 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -40,7 +40,7 @@ env: CACHE_VERSION: 10 UV_CACHE_VERSION: 1 MYPY_CACHE_VERSION: 8 - HA_SHORT_VERSION: "2024.9" + HA_SHORT_VERSION: "2024.10" DEFAULT_PYTHON: "3.12" ALL_PYTHON_VERSIONS: "['3.12']" # 10.3 is the oldest supported version diff --git a/homeassistant/const.py b/homeassistant/const.py index 8384a6d44bd..1ee73408f98 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -23,7 +23,7 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 -MINOR_VERSION: Final = 9 +MINOR_VERSION: Final = 10 PATCH_VERSION: Final = "0.dev0" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" diff --git a/pyproject.toml b/pyproject.toml index b4d3bf46916..596c0297131 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.9.0.dev0" +version = "2024.10.0.dev0" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" @@ -801,7 +801,7 @@ ignore = [ "SIM103", # Return the condition {condition} directly "SIM108", # Use ternary operator {contents} instead of if-else-block "SIM115", # Use context handler for opening files - + # Moving imports into type-checking blocks can mess with pytest.patch() "TCH001", # Move application import {} into a type-checking block "TCH002", # Move third-party import {} into a type-checking block From c04912914726eec2c7c73508d73bdfbbe6f223aa Mon Sep 17 00:00:00 2001 From: Blake Bryant Date: Wed, 28 Aug 2024 10:16:05 -0700 Subject: [PATCH 1191/1635] Add Deako integration (#121132) * Deako integration using pydeako * fix: address feedback - make unit tests more e2e - use runtime_data to store connection * fix: address feedback part 2 - added better type safety for Deako config entries - refactored the config flow tests to use a conftest mock instead of directly patching - removed pytest.mark.asyncio test decorators * fix: address feedback pt 3 - simplify config entry type - add test for single_instance_allowed - remove light.py get_state(), only used once, no need to be separate function * fix: ruff format * Update homeassistant/components/deako/__init__.py Co-authored-by: Joost Lekkerkerker --------- Co-authored-by: Joost Lekkerkerker --- .strict-typing | 1 + CODEOWNERS | 2 + homeassistant/components/deako/__init__.py | 59 ++++++ homeassistant/components/deako/config_flow.py | 26 +++ homeassistant/components/deako/const.py | 5 + homeassistant/components/deako/light.py | 96 +++++++++ homeassistant/components/deako/manifest.json | 13 ++ homeassistant/components/deako/strings.json | 13 ++ homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 7 + homeassistant/generated/zeroconf.py | 5 + mypy.ini | 10 + requirements_all.txt | 3 + requirements_test_all.txt | 3 + tests/components/deako/__init__.py | 1 + tests/components/deako/conftest.py | 45 ++++ .../deako/snapshots/test_light.ambr | 168 +++++++++++++++ tests/components/deako/test_config_flow.py | 80 ++++++++ tests/components/deako/test_init.py | 87 ++++++++ tests/components/deako/test_light.py | 192 ++++++++++++++++++ 20 files changed, 817 insertions(+) create mode 100644 homeassistant/components/deako/__init__.py create mode 100644 homeassistant/components/deako/config_flow.py create mode 100644 homeassistant/components/deako/const.py create mode 100644 homeassistant/components/deako/light.py create mode 100644 homeassistant/components/deako/manifest.json create mode 100644 homeassistant/components/deako/strings.json create mode 100644 tests/components/deako/__init__.py create mode 100644 tests/components/deako/conftest.py create mode 100644 tests/components/deako/snapshots/test_light.ambr create mode 100644 tests/components/deako/test_config_flow.py create mode 100644 tests/components/deako/test_init.py create mode 100644 tests/components/deako/test_light.py diff --git a/.strict-typing b/.strict-typing index 2566a5349c2..c8aa9878413 100644 --- a/.strict-typing +++ b/.strict-typing @@ -139,6 +139,7 @@ homeassistant.components.cpuspeed.* homeassistant.components.crownstone.* homeassistant.components.date.* homeassistant.components.datetime.* +homeassistant.components.deako.* homeassistant.components.deconz.* homeassistant.components.default_config.* homeassistant.components.demo.* diff --git a/CODEOWNERS b/CODEOWNERS index 6f118ca1ba8..990ed679d2b 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -294,6 +294,8 @@ build.json @home-assistant/supervisor /tests/components/date/ @home-assistant/core /homeassistant/components/datetime/ @home-assistant/core /tests/components/datetime/ @home-assistant/core +/homeassistant/components/deako/ @sebirdman @balake @deakolights +/tests/components/deako/ @sebirdman @balake @deakolights /homeassistant/components/debugpy/ @frenck /tests/components/debugpy/ @frenck /homeassistant/components/deconz/ @Kane610 diff --git a/homeassistant/components/deako/__init__.py b/homeassistant/components/deako/__init__.py new file mode 100644 index 00000000000..fdcf09fad60 --- /dev/null +++ b/homeassistant/components/deako/__init__.py @@ -0,0 +1,59 @@ +"""The deako integration.""" + +from __future__ import annotations + +import logging + +from pydeako.deako import Deako, DeviceListTimeout, FindDevicesTimeout +from pydeako.discover import DeakoDiscoverer + +from homeassistant.components import zeroconf +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady + +_LOGGER: logging.Logger = logging.getLogger(__name__) + +PLATFORMS: list[Platform] = [Platform.LIGHT] + +type DeakoConfigEntry = ConfigEntry[Deako] + + +async def async_setup_entry(hass: HomeAssistant, entry: DeakoConfigEntry) -> bool: + """Set up deako.""" + _zc = await zeroconf.async_get_instance(hass) + discoverer = DeakoDiscoverer(_zc) + + connection = Deako(discoverer.get_address) + + await connection.connect() + try: + await connection.find_devices() + except DeviceListTimeout as exc: # device list never received + _LOGGER.warning("Device not responding to device list") + await connection.disconnect() + raise ConfigEntryNotReady(exc) from exc + except FindDevicesTimeout as exc: # total devices expected not received + _LOGGER.warning("Device not responding to device requests") + await connection.disconnect() + raise ConfigEntryNotReady(exc) from exc + + # If deako devices are advertising on mdns, we should be able to get at least one device + devices = connection.get_devices() + if len(devices) == 0: + await connection.disconnect() + raise ConfigEntryNotReady(devices) + + entry.runtime_data = connection + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: DeakoConfigEntry) -> bool: + """Unload a config entry.""" + await entry.runtime_data.disconnect() + + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/deako/config_flow.py b/homeassistant/components/deako/config_flow.py new file mode 100644 index 00000000000..d0676fa81d9 --- /dev/null +++ b/homeassistant/components/deako/config_flow.py @@ -0,0 +1,26 @@ +"""Config flow for deako.""" + +from pydeako.discover import DeakoDiscoverer, DevicesNotFoundException + +from homeassistant.components import zeroconf +from homeassistant.core import HomeAssistant +from homeassistant.helpers import config_entry_flow + +from .const import DOMAIN, NAME + + +async def _async_has_devices(hass: HomeAssistant) -> bool: + """Return if there are devices that can be discovered.""" + _zc = await zeroconf.async_get_instance(hass) + discoverer = DeakoDiscoverer(_zc) + + try: + await discoverer.get_address() + except DevicesNotFoundException: + return False + else: + # address exists, there's at least one device + return True + + +config_entry_flow.register_discovery_flow(DOMAIN, NAME, _async_has_devices) diff --git a/homeassistant/components/deako/const.py b/homeassistant/components/deako/const.py new file mode 100644 index 00000000000..f6b688b9b07 --- /dev/null +++ b/homeassistant/components/deako/const.py @@ -0,0 +1,5 @@ +"""Constants for Deako.""" + +# Base component constants +NAME = "Deako" +DOMAIN = "deako" diff --git a/homeassistant/components/deako/light.py b/homeassistant/components/deako/light.py new file mode 100644 index 00000000000..c7ff8765402 --- /dev/null +++ b/homeassistant/components/deako/light.py @@ -0,0 +1,96 @@ +"""Binary sensor platform for integration_blueprint.""" + +from typing import Any + +from pydeako.deako import Deako + +from homeassistant.components.light import ATTR_BRIGHTNESS, ColorMode, LightEntity +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import DeakoConfigEntry +from .const import DOMAIN + +# Model names +MODEL_SMART = "smart" +MODEL_DIMMER = "dimmer" + + +async def async_setup_entry( + hass: HomeAssistant, + config: DeakoConfigEntry, + add_entities: AddEntitiesCallback, +) -> None: + """Configure the platform.""" + client = config.runtime_data + + add_entities([DeakoLightEntity(client, uuid) for uuid in client.get_devices()]) + + +class DeakoLightEntity(LightEntity): + """Deako LightEntity class.""" + + _attr_has_entity_name = True + _attr_name = None + _attr_is_on = False + _attr_available = True + + client: Deako + + def __init__(self, client: Deako, uuid: str) -> None: + """Save connection reference.""" + self.client = client + self._attr_unique_id = uuid + + dimmable = client.is_dimmable(uuid) + + model = MODEL_SMART + self._attr_color_mode = ColorMode.ONOFF + if dimmable: + model = MODEL_DIMMER + self._attr_color_mode = ColorMode.BRIGHTNESS + + self._attr_supported_color_modes = {self._attr_color_mode} + + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, uuid)}, + name=client.get_name(uuid), + manufacturer="Deako", + model=model, + ) + + client.set_state_callback(uuid, self.on_update) + self.update() # set initial state + + def on_update(self) -> None: + """State update callback.""" + self.update() + self.schedule_update_ha_state() + + async def control_device(self, power: bool, dim: int | None = None) -> None: + """Control entity state via client.""" + assert self._attr_unique_id is not None + await self.client.control_device(self._attr_unique_id, power, dim) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on the light.""" + dim = None + if ATTR_BRIGHTNESS in kwargs: + dim = round(kwargs[ATTR_BRIGHTNESS] / 2.55, 0) + await self.control_device(True, dim) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off the device.""" + await self.control_device(False) + + def update(self) -> None: + """Call to update state.""" + assert self._attr_unique_id is not None + state = self.client.get_state(self._attr_unique_id) or {} + self._attr_is_on = bool(state.get("power", False)) + if ( + self._attr_supported_color_modes is not None + and ColorMode.BRIGHTNESS in self._attr_supported_color_modes + ): + self._attr_brightness = int(round(state.get("dim", 0) * 2.55)) diff --git a/homeassistant/components/deako/manifest.json b/homeassistant/components/deako/manifest.json new file mode 100644 index 00000000000..e8f6f235107 --- /dev/null +++ b/homeassistant/components/deako/manifest.json @@ -0,0 +1,13 @@ +{ + "domain": "deako", + "name": "Deako", + "codeowners": ["@sebirdman", "@balake", "@deakolights"], + "config_flow": true, + "dependencies": ["zeroconf"], + "documentation": "https://www.home-assistant.io/integrations/deako", + "iot_class": "local_polling", + "loggers": ["pydeako"], + "requirements": ["pydeako==0.4.0"], + "single_config_entry": true, + "zeroconf": ["_deako._tcp.local."] +} diff --git a/homeassistant/components/deako/strings.json b/homeassistant/components/deako/strings.json new file mode 100644 index 00000000000..6bb292d74a9 --- /dev/null +++ b/homeassistant/components/deako/strings.json @@ -0,0 +1,13 @@ +{ + "config": { + "step": { + "confirm": { + "description": "Please confirm setting up the Deako integration" + } + }, + "abort": { + "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]", + "no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]" + } + } +} diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index afa906fd371..ee6658a2515 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -114,6 +114,7 @@ FLOWS = { "cpuspeed", "crownstone", "daikin", + "deako", "deconz", "deluge", "denonavr", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index e204170a06f..aad03b25390 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -1091,6 +1091,13 @@ "config_flow": false, "iot_class": "local_polling" }, + "deako": { + "name": "Deako", + "integration_type": "hub", + "config_flow": true, + "iot_class": "local_polling", + "single_config_entry": true + }, "debugpy": { "name": "Remote Python Debugger", "integration_type": "service", diff --git a/homeassistant/generated/zeroconf.py b/homeassistant/generated/zeroconf.py index 389a4435910..3d5b0b4cfa1 100644 --- a/homeassistant/generated/zeroconf.py +++ b/homeassistant/generated/zeroconf.py @@ -423,6 +423,11 @@ ZEROCONF = { "domain": "forked_daapd", }, ], + "_deako._tcp.local.": [ + { + "domain": "deako", + }, + ], "_devialet-http._tcp.local.": [ { "domain": "devialet", diff --git a/mypy.ini b/mypy.ini index a312a77122f..c7a31d7354c 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1145,6 +1145,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.deako.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.deconz.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/requirements_all.txt b/requirements_all.txt index 1660c94ac11..c8c7412abe2 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1803,6 +1803,9 @@ pydaikin==2.13.4 # homeassistant.components.danfoss_air pydanfossair==0.1.0 +# homeassistant.components.deako +pydeako==0.4.0 + # homeassistant.components.deconz pydeconz==116 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index e4cda97ea71..871957558da 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1447,6 +1447,9 @@ pycsspeechtts==1.0.8 # homeassistant.components.daikin pydaikin==2.13.4 +# homeassistant.components.deako +pydeako==0.4.0 + # homeassistant.components.deconz pydeconz==116 diff --git a/tests/components/deako/__init__.py b/tests/components/deako/__init__.py new file mode 100644 index 00000000000..248a389f2e6 --- /dev/null +++ b/tests/components/deako/__init__.py @@ -0,0 +1 @@ +"""Tests for the Deako integration.""" diff --git a/tests/components/deako/conftest.py b/tests/components/deako/conftest.py new file mode 100644 index 00000000000..659634b8784 --- /dev/null +++ b/tests/components/deako/conftest.py @@ -0,0 +1,45 @@ +"""deako session fixtures.""" + +from collections.abc import Generator +from unittest.mock import MagicMock, patch + +import pytest + +from homeassistant.components.deako.const import DOMAIN + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + domain=DOMAIN, + ) + + +@pytest.fixture(autouse=True) +def pydeako_deako_mock() -> Generator[MagicMock]: + """Mock pydeako deako client.""" + with patch("homeassistant.components.deako.Deako", autospec=True) as mock: + yield mock + + +@pytest.fixture(autouse=True) +def pydeako_discoverer_mock(mock_async_zeroconf: MagicMock) -> Generator[MagicMock]: + """Mock pydeako discovery client.""" + with ( + patch("homeassistant.components.deako.DeakoDiscoverer", autospec=True) as mock, + patch("homeassistant.components.deako.config_flow.DeakoDiscoverer", new=mock), + ): + yield mock + + +@pytest.fixture +def mock_deako_setup() -> Generator[MagicMock]: + """Mock async_setup_entry for config flow tests.""" + with patch( + "homeassistant.components.deako.async_setup_entry", + return_value=True, + ) as mock_setup: + yield mock_setup diff --git a/tests/components/deako/snapshots/test_light.ambr b/tests/components/deako/snapshots/test_light.ambr new file mode 100644 index 00000000000..7bc170654e1 --- /dev/null +++ b/tests/components/deako/snapshots/test_light.ambr @@ -0,0 +1,168 @@ +# serializer version: 1 +# name: test_dimmable_light_props[light.kitchen-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.kitchen', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'deako', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'uuid', + 'unit_of_measurement': None, + }) +# --- +# name: test_dimmable_light_props[light.kitchen-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 127, + 'color_mode': , + 'friendly_name': 'kitchen', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.kitchen', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_light_initial_props[light.kitchen-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.kitchen', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'deako', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'uuid', + 'unit_of_measurement': None, + }) +# --- +# name: test_light_initial_props[light.kitchen-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'color_mode': None, + 'friendly_name': 'kitchen', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.kitchen', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_light_setup_with_device[light.some_device-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.some_device', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'deako', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'some_device', + 'unit_of_measurement': None, + }) +# --- +# name: test_light_setup_with_device[light.some_device-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 1, + 'color_mode': , + 'friendly_name': 'some device', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.some_device', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/deako/test_config_flow.py b/tests/components/deako/test_config_flow.py new file mode 100644 index 00000000000..21b10eaaa36 --- /dev/null +++ b/tests/components/deako/test_config_flow.py @@ -0,0 +1,80 @@ +"""Tests for the deako component config flow.""" + +from unittest.mock import MagicMock + +from pydeako.discover import DevicesNotFoundException + +from homeassistant.components.deako.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_found( + hass: HomeAssistant, + pydeako_discoverer_mock: MagicMock, + mock_deako_setup: MagicMock, +) -> None: + """Test finding a Deako device.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + # Confirmation form + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + pydeako_discoverer_mock.return_value.get_address.assert_called_once() + + mock_deako_setup.assert_called_once() + + +async def test_not_found( + hass: HomeAssistant, + pydeako_discoverer_mock: MagicMock, + mock_deako_setup: MagicMock, +) -> None: + """Test not finding any Deako devices.""" + pydeako_discoverer_mock.return_value.get_address.side_effect = ( + DevicesNotFoundException() + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + # Confirmation form + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "no_devices_found" + pydeako_discoverer_mock.return_value.get_address.assert_called_once() + + mock_deako_setup.assert_not_called() + + +async def test_already_configured( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_deako_setup: MagicMock, +) -> None: + """Test flow aborts when already configured.""" + + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "single_instance_allowed" + + mock_deako_setup.assert_not_called() diff --git a/tests/components/deako/test_init.py b/tests/components/deako/test_init.py new file mode 100644 index 00000000000..b4c0e8bb1f7 --- /dev/null +++ b/tests/components/deako/test_init.py @@ -0,0 +1,87 @@ +"""Tests for the deako component init.""" + +from unittest.mock import MagicMock + +from pydeako.deako import DeviceListTimeout, FindDevicesTimeout + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def test_deako_async_setup_entry( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + pydeako_deako_mock: MagicMock, + pydeako_discoverer_mock: MagicMock, +) -> None: + """Test successful setup entry.""" + pydeako_deako_mock.return_value.get_devices.return_value = { + "id1": {}, + "id2": {}, + } + + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + pydeako_deako_mock.assert_called_once_with( + pydeako_discoverer_mock.return_value.get_address + ) + pydeako_deako_mock.return_value.connect.assert_called_once() + pydeako_deako_mock.return_value.find_devices.assert_called_once() + pydeako_deako_mock.return_value.get_devices.assert_called() + + assert mock_config_entry.runtime_data == pydeako_deako_mock.return_value + + +async def test_deako_async_setup_entry_device_list_timeout( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + pydeako_deako_mock: MagicMock, + pydeako_discoverer_mock: MagicMock, +) -> None: + """Test async_setup_entry raises ConfigEntryNotReady when pydeako raises DeviceListTimeout.""" + + mock_config_entry.add_to_hass(hass) + + pydeako_deako_mock.return_value.find_devices.side_effect = DeviceListTimeout() + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + pydeako_deako_mock.assert_called_once_with( + pydeako_discoverer_mock.return_value.get_address + ) + pydeako_deako_mock.return_value.connect.assert_called_once() + pydeako_deako_mock.return_value.find_devices.assert_called_once() + pydeako_deako_mock.return_value.disconnect.assert_called_once() + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_deako_async_setup_entry_find_devices_timeout( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + pydeako_deako_mock: MagicMock, + pydeako_discoverer_mock: MagicMock, +) -> None: + """Test async_setup_entry raises ConfigEntryNotReady when pydeako raises FindDevicesTimeout.""" + + mock_config_entry.add_to_hass(hass) + + pydeako_deako_mock.return_value.find_devices.side_effect = FindDevicesTimeout() + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + pydeako_deako_mock.assert_called_once_with( + pydeako_discoverer_mock.return_value.get_address + ) + pydeako_deako_mock.return_value.connect.assert_called_once() + pydeako_deako_mock.return_value.find_devices.assert_called_once() + pydeako_deako_mock.return_value.disconnect.assert_called_once() + + assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/deako/test_light.py b/tests/components/deako/test_light.py new file mode 100644 index 00000000000..b969c7f71cb --- /dev/null +++ b/tests/components/deako/test_light.py @@ -0,0 +1,192 @@ +"""Tests for the light module.""" + +from unittest.mock import MagicMock + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.light import ATTR_BRIGHTNESS, DOMAIN as LIGHT_DOMAIN +from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_light_setup_with_device( + hass: HomeAssistant, + pydeako_deako_mock: MagicMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test light platform setup with device returned.""" + mock_config_entry.add_to_hass(hass) + + pydeako_deako_mock.return_value.get_devices.return_value = { + "some_device": {}, + } + pydeako_deako_mock.return_value.get_name.return_value = "some device" + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_light_initial_props( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + pydeako_deako_mock: MagicMock, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test on/off light is setup with accurate initial properties.""" + mock_config_entry.add_to_hass(hass) + + pydeako_deako_mock.return_value.get_devices.return_value = { + "uuid": { + "name": "kitchen", + } + } + pydeako_deako_mock.return_value.get_name.return_value = "kitchen" + pydeako_deako_mock.return_value.get_state.return_value = { + "power": False, + } + pydeako_deako_mock.return_value.is_dimmable.return_value = False + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_dimmable_light_props( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + pydeako_deako_mock: MagicMock, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test dimmable on/off light is setup with accurate initial properties.""" + mock_config_entry.add_to_hass(hass) + + pydeako_deako_mock.return_value.get_devices.return_value = { + "uuid": { + "name": "kitchen", + } + } + pydeako_deako_mock.return_value.get_name.return_value = "kitchen" + pydeako_deako_mock.return_value.get_state.return_value = { + "power": True, + "dim": 50, + } + pydeako_deako_mock.return_value.is_dimmable.return_value = True + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_light_power_change_on( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + pydeako_deako_mock: MagicMock, +) -> None: + """Test turing on a deako device.""" + mock_config_entry.add_to_hass(hass) + + pydeako_deako_mock.return_value.get_devices.return_value = { + "uuid": { + "name": "kitchen", + } + } + pydeako_deako_mock.return_value.get_name.return_value = "kitchen" + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: "light.kitchen"}, + blocking=True, + ) + + pydeako_deako_mock.return_value.control_device.assert_called_once_with( + "uuid", True, None + ) + + +async def test_light_power_change_off( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + pydeako_deako_mock: MagicMock, +) -> None: + """Test turing off a deako device.""" + mock_config_entry.add_to_hass(hass) + + pydeako_deako_mock.return_value.get_devices.return_value = { + "uuid": { + "name": "kitchen", + } + } + pydeako_deako_mock.return_value.get_name.return_value = "kitchen" + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: "light.kitchen"}, + blocking=True, + ) + + pydeako_deako_mock.return_value.control_device.assert_called_once_with( + "uuid", False, None + ) + + +@pytest.mark.parametrize( + ("dim_input", "expected_dim_value"), + [ + (3, 1), + (255, 100), + (127, 50), + ], +) +async def test_light_brightness_change( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + pydeako_deako_mock: MagicMock, + dim_input: int, + expected_dim_value: int, +) -> None: + """Test turing on a deako device.""" + mock_config_entry.add_to_hass(hass) + + pydeako_deako_mock.return_value.get_devices.return_value = { + "uuid": { + "name": "kitchen", + } + } + pydeako_deako_mock.return_value.get_name.return_value = "kitchen" + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: "light.kitchen", + ATTR_BRIGHTNESS: dim_input, + }, + blocking=True, + ) + + pydeako_deako_mock.return_value.control_device.assert_called_once_with( + "uuid", True, expected_dim_value + ) From af8131e68f95ebb33fdb9f0dbc826143fe323cab Mon Sep 17 00:00:00 2001 From: Fredrik Erlandsson Date: Wed, 28 Aug 2024 19:19:04 +0200 Subject: [PATCH 1192/1635] Bump pydaikin to 2.13.5 (#124802) bump pydaikin version --- homeassistant/components/daikin/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/daikin/manifest.json b/homeassistant/components/daikin/manifest.json index 0d93c0e25ad..c395ee35cad 100644 --- a/homeassistant/components/daikin/manifest.json +++ b/homeassistant/components/daikin/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/daikin", "iot_class": "local_polling", "loggers": ["pydaikin"], - "requirements": ["pydaikin==2.13.4"], + "requirements": ["pydaikin==2.13.5"], "zeroconf": ["_dkapi._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index c8c7412abe2..0a9fb74c4dc 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1798,7 +1798,7 @@ pycsspeechtts==1.0.8 # pycups==1.9.73 # homeassistant.components.daikin -pydaikin==2.13.4 +pydaikin==2.13.5 # homeassistant.components.danfoss_air pydanfossair==0.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 871957558da..920895f5a7b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1445,7 +1445,7 @@ pycoolmasternet-async==0.2.2 pycsspeechtts==1.0.8 # homeassistant.components.daikin -pydaikin==2.13.4 +pydaikin==2.13.5 # homeassistant.components.deako pydeako==0.4.0 From 7d61dd13d9f94a98914fdacbd3c593dc04830476 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 28 Aug 2024 20:42:50 +0200 Subject: [PATCH 1193/1635] Use reauth_confirm in discovergy (#124782) --- homeassistant/components/discovergy/config_flow.py | 12 ++++++++++-- homeassistant/components/discovergy/strings.json | 2 +- tests/components/discovergy/test_config_flow.py | 12 +++--------- 3 files changed, 14 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/discovergy/config_flow.py b/homeassistant/components/discovergy/config_flow.py index 5e17f0764b7..47a78ff4308 100644 --- a/homeassistant/components/discovergy/config_flow.py +++ b/homeassistant/components/discovergy/config_flow.py @@ -70,8 +70,16 @@ class DiscovergyConfigFlow(ConfigFlow, domain=DOMAIN): self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle the initial step.""" - self._existing_entry = await self.async_set_unique_id(self.context["unique_id"]) - return await self._validate_and_save(entry_data, step_id="reauth") + self._existing_entry = self.hass.config_entries.async_get_entry( + self.context["entry_id"] + ) + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the reauth step.""" + return await self._validate_and_save(user_input, step_id="reauth_confirm") async def _validate_and_save( self, user_input: Mapping[str, Any] | None = None, step_id: str = "user" diff --git a/homeassistant/components/discovergy/strings.json b/homeassistant/components/discovergy/strings.json index 34c21bc1cfe..9a91fa92dc4 100644 --- a/homeassistant/components/discovergy/strings.json +++ b/homeassistant/components/discovergy/strings.json @@ -7,7 +7,7 @@ "password": "[%key:common::config_flow::data::password%]" } }, - "reauth": { + "reauth_confirm": { "data": { "email": "[%key:common::config_flow::data::email%]", "password": "[%key:common::config_flow::data::password%]" diff --git a/tests/components/discovergy/test_config_flow.py b/tests/components/discovergy/test_config_flow.py index 2464ba3846f..470ef65fccd 100644 --- a/tests/components/discovergy/test_config_flow.py +++ b/tests/components/discovergy/test_config_flow.py @@ -6,7 +6,7 @@ from pydiscovergy.error import DiscovergyClientError, HTTPError, InvalidLogin import pytest from homeassistant.components.discovergy.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -49,15 +49,9 @@ async def test_reauth( ) -> None: """Test reauth flow.""" config_entry.add_to_hass(hass) - - init_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "unique_id": config_entry.unique_id}, - data=None, - ) - + init_result = await config_entry.start_reauth_flow(hass) assert init_result["type"] is FlowResultType.FORM - assert init_result["step_id"] == "reauth" + assert init_result["step_id"] == "reauth_confirm" with patch( "homeassistant.components.discovergy.async_setup_entry", From 2900fa733d97ac649ce2da8551cefbe88d3ac190 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 28 Aug 2024 20:43:11 +0200 Subject: [PATCH 1194/1635] Use reauth_confirm in co2signal (#124781) --- homeassistant/components/co2signal/config_flow.py | 11 +++++++++-- homeassistant/components/co2signal/strings.json | 2 +- tests/components/co2signal/test_config_flow.py | 11 ++--------- tests/components/co2signal/test_sensor.py | 2 +- 4 files changed, 13 insertions(+), 13 deletions(-) diff --git a/homeassistant/components/co2signal/config_flow.py b/homeassistant/components/co2signal/config_flow.py index bf5d645638f..3313d01be85 100644 --- a/homeassistant/components/co2signal/config_flow.py +++ b/homeassistant/components/co2signal/config_flow.py @@ -131,16 +131,23 @@ class ElectricityMapsConfigFlow(ConfigFlow, domain=DOMAIN): self._reauth_entry = self.hass.config_entries.async_get_entry( self.context["entry_id"] ) + return await self.async_step_reauth_confirm() + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the reauth step.""" data_schema = vol.Schema( { vol.Required(CONF_API_KEY): cv.string, } ) - return await self._validate_and_create("reauth", data_schema, entry_data) + return await self._validate_and_create( + "reauth_confirm", data_schema, user_input + ) async def _validate_and_create( - self, step_id: str, data_schema: vol.Schema, data: Mapping[str, Any] + self, step_id: str, data_schema: vol.Schema, data: Mapping[str, Any] | None ) -> ConfigFlowResult: """Validate data and show form if it is invalid.""" errors: dict[str, str] = {} diff --git a/homeassistant/components/co2signal/strings.json b/homeassistant/components/co2signal/strings.json index 7444cde73d7..a4ec916bd42 100644 --- a/homeassistant/components/co2signal/strings.json +++ b/homeassistant/components/co2signal/strings.json @@ -19,7 +19,7 @@ "country_code": "Country code" } }, - "reauth": { + "reauth_confirm": { "data": { "api_key": "[%key:common::config_flow::data::access_token%]" } diff --git a/tests/components/co2signal/test_config_flow.py b/tests/components/co2signal/test_config_flow.py index 7397b6e2355..ad61ae4f897 100644 --- a/tests/components/co2signal/test_config_flow.py +++ b/tests/components/co2signal/test_config_flow.py @@ -198,17 +198,10 @@ async def test_reauth( """Test reauth flow.""" config_entry.add_to_hass(hass) - init_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=None, - ) + init_result = await config_entry.start_reauth_flow(hass) assert init_result["type"] is FlowResultType.FORM - assert init_result["step_id"] == "reauth" + assert init_result["step_id"] == "reauth_confirm" with patch( "homeassistant.components.co2signal.async_setup_entry", diff --git a/tests/components/co2signal/test_sensor.py b/tests/components/co2signal/test_sensor.py index e9f46e483d1..fddda17f3ed 100644 --- a/tests/components/co2signal/test_sensor.py +++ b/tests/components/co2signal/test_sensor.py @@ -109,4 +109,4 @@ async def test_sensor_reauth_triggered( assert (flows := hass.config_entries.flow.async_progress()) assert len(flows) == 1 - assert flows[0]["step_id"] == "reauth" + assert flows[0]["step_id"] == "reauth_confirm" From 70488ffd15a7fa10b2348c4843adc6e914e2233c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 28 Aug 2024 09:00:52 -1000 Subject: [PATCH 1195/1635] Address yale review comments (#124810) Co-authored-by: Joost Lekkerkerker --- homeassistant/components/yale/__init__.py | 2 +- .../components/yale/binary_sensor.py | 11 +- homeassistant/components/yale/button.py | 4 +- homeassistant/components/yale/camera.py | 4 +- homeassistant/components/yale/config_flow.py | 5 +- homeassistant/components/yale/const.py | 4 - homeassistant/components/yale/diagnostics.py | 5 +- homeassistant/components/yale/entity.py | 4 +- homeassistant/components/yale/event.py | 28 +- homeassistant/components/yale/lock.py | 4 +- homeassistant/components/yale/manifest.json | 2 +- homeassistant/components/yale/sensor.py | 21 +- homeassistant/components/yale/util.py | 7 +- tests/components/yale/__init__.py | 11 - .../yale/snapshots/test_binary_sensor.ambr | 33 +++ .../yale/snapshots/test_diagnostics.ambr | 2 +- tests/components/yale/test_binary_sensor.py | 252 ++++++------------ tests/components/yale/test_config_flow.py | 76 +++++- tests/components/yale/test_event.py | 44 ++- tests/components/yale/test_init.py | 31 ++- 20 files changed, 267 insertions(+), 283 deletions(-) create mode 100644 tests/components/yale/snapshots/test_binary_sensor.ambr diff --git a/homeassistant/components/yale/__init__.py b/homeassistant/components/yale/__init__.py index f7a4a6e0f4d..1cbd9c87b57 100644 --- a/homeassistant/components/yale/__init__.py +++ b/homeassistant/components/yale/__init__.py @@ -26,7 +26,7 @@ from .util import async_create_yale_clientsession type YaleConfigEntry = ConfigEntry[YaleData] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: YaleConfigEntry) -> bool: """Set up yale from a config entry.""" session = async_create_yale_clientsession(hass) implementation = ( diff --git a/homeassistant/components/yale/binary_sensor.py b/homeassistant/components/yale/binary_sensor.py index cbc0b48b177..dbb00ad7d42 100644 --- a/homeassistant/components/yale/binary_sensor.py +++ b/homeassistant/components/yale/binary_sensor.py @@ -109,12 +109,11 @@ async def async_setup_entry( for description in SENSOR_TYPES_DOORBELL ) - for doorbell in data.doorbells: - entities.extend( - YaleDoorbellBinarySensor(data, doorbell, description) - for description in SENSOR_TYPES_DOORBELL + SENSOR_TYPES_VIDEO_DOORBELL - ) - + entities.extend( + YaleDoorbellBinarySensor(data, doorbell, description) + for description in SENSOR_TYPES_DOORBELL + SENSOR_TYPES_VIDEO_DOORBELL + for doorbell in data.doorbells + ) async_add_entities(entities) diff --git a/homeassistant/components/yale/button.py b/homeassistant/components/yale/button.py index de0cff4f0c8..b04ad638f0c 100644 --- a/homeassistant/components/yale/button.py +++ b/homeassistant/components/yale/button.py @@ -5,7 +5,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import YaleConfigEntry -from .entity import YaleEntityMixin +from .entity import YaleEntity async def async_setup_entry( @@ -18,7 +18,7 @@ async def async_setup_entry( async_add_entities(YaleWakeLockButton(data, lock, "wake") for lock in data.locks) -class YaleWakeLockButton(YaleEntityMixin, ButtonEntity): +class YaleWakeLockButton(YaleEntity, ButtonEntity): """Representation of an Yale lock wake button.""" _attr_translation_key = "wake" diff --git a/homeassistant/components/yale/camera.py b/homeassistant/components/yale/camera.py index 500239d7f3a..217e8f5f6fd 100644 --- a/homeassistant/components/yale/camera.py +++ b/homeassistant/components/yale/camera.py @@ -16,7 +16,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import YaleConfigEntry, YaleData from .const import DEFAULT_NAME, DEFAULT_TIMEOUT -from .entity import YaleEntityMixin +from .entity import YaleEntity _LOGGER = logging.getLogger(__name__) @@ -38,7 +38,7 @@ async def async_setup_entry( ) -class YaleCamera(YaleEntityMixin, Camera): +class YaleCamera(YaleEntity, Camera): """An implementation of an Yale security camera.""" _attr_translation_key = "camera" diff --git a/homeassistant/components/yale/config_flow.py b/homeassistant/components/yale/config_flow.py index cdd44754103..6cbc9543ea4 100644 --- a/homeassistant/components/yale/config_flow.py +++ b/homeassistant/components/yale/config_flow.py @@ -26,7 +26,9 @@ class YaleConfigFlow(config_entry_oauth2_flow.AbstractOAuth2FlowHandler, domain= """Return logger.""" return _LOGGER - async def async_step_reauth(self, data: Mapping[str, Any]) -> ConfigFlowResult: + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: """Handle configuration by re-auth.""" self.reauth_entry = self.hass.config_entries.async_get_entry( self.context["entry_id"] @@ -54,4 +56,5 @@ class YaleConfigFlow(config_entry_oauth2_flow.AbstractOAuth2FlowHandler, domain= return self.async_abort(reason="reauth_invalid_user") return self.async_update_reload_and_abort(entry, data=data) await self.async_set_unique_id(user_id) + self._abort_if_unique_id_configured() return await super().async_oauth_create_entry(data) diff --git a/homeassistant/components/yale/const.py b/homeassistant/components/yale/const.py index 630d15f7230..3da4fb1dfb4 100644 --- a/homeassistant/components/yale/const.py +++ b/homeassistant/components/yale/const.py @@ -1,7 +1,5 @@ """Constants for Yale devices.""" -from yalexs.const import Brand - from homeassistant.const import Platform DEFAULT_TIMEOUT = 25 @@ -13,8 +11,6 @@ CONF_INSTALL_ID = "install_id" VERIFICATION_CODE_KEY = "verification_code" -DEFAULT_BRAND = Brand.YALE_HOME - MANUFACTURER = "Yale Home Inc." DEFAULT_NAME = "Yale" diff --git a/homeassistant/components/yale/diagnostics.py b/homeassistant/components/yale/diagnostics.py index ef8d837b82e..7e7f6179e7a 100644 --- a/homeassistant/components/yale/diagnostics.py +++ b/homeassistant/components/yale/diagnostics.py @@ -4,11 +4,12 @@ from __future__ import annotations from typing import Any +from yalexs.const import Brand + from homeassistant.components.diagnostics import async_redact_data from homeassistant.core import HomeAssistant from . import YaleConfigEntry -from .const import CONF_BRAND, DEFAULT_BRAND TO_REDACT = { "HouseID", @@ -45,5 +46,5 @@ async def async_get_config_entry_diagnostics( ) for doorbell in data.doorbells }, - "brand": entry.data.get(CONF_BRAND, DEFAULT_BRAND), + "brand": Brand.YALE_GLOBAL.value, } diff --git a/homeassistant/components/yale/entity.py b/homeassistant/components/yale/entity.py index 7105fda861c..152070c0be3 100644 --- a/homeassistant/components/yale/entity.py +++ b/homeassistant/components/yale/entity.py @@ -20,7 +20,7 @@ from .const import MANUFACTURER DEVICE_TYPES = ["keypad", "lock", "camera", "doorbell", "door", "bell"] -class YaleEntityMixin(Entity): +class YaleEntity(Entity): """Base implementation for Yale device.""" _attr_should_poll = False @@ -87,7 +87,7 @@ class YaleEntityMixin(Entity): self._update_from_data() -class YaleDescriptionEntity(YaleEntityMixin): +class YaleDescriptionEntity(YaleEntity): """An Yale entity with a description.""" def __init__( diff --git a/homeassistant/components/yale/event.py b/homeassistant/components/yale/event.py index 7014c5dafbf..935ba7376f8 100644 --- a/homeassistant/components/yale/event.py +++ b/homeassistant/components/yale/event.py @@ -63,22 +63,17 @@ async def async_setup_entry( ) -> None: """Set up the yale event platform.""" data = config_entry.runtime_data - entities: list[YaleEventEntity] = [] - - for lock in data.locks: - detail = data.get_device_detail(lock.device_id) - if detail.doorbell: - entities.extend( - YaleEventEntity(data, lock, description) - for description in TYPES_DOORBELL - ) - - for doorbell in data.doorbells: - entities.extend( - YaleEventEntity(data, doorbell, description) - for description in TYPES_DOORBELL + TYPES_VIDEO_DOORBELL - ) - + entities: list[YaleEventEntity] = [ + YaleEventEntity(data, lock, description) + for description in TYPES_DOORBELL + for lock in data.locks + if (detail := data.get_device_detail(lock.device_id)) and detail.doorbell + ] + entities.extend( + YaleEventEntity(data, doorbell, description) + for description in TYPES_DOORBELL + TYPES_VIDEO_DOORBELL + for doorbell in data.doorbells + ) async_add_entities(entities) @@ -86,7 +81,6 @@ class YaleEventEntity(YaleDescriptionEntity, EventEntity): """An yale event entity.""" entity_description: YaleEventEntityDescription - _attr_has_entity_name = True _last_activity: Activity | None = None @callback diff --git a/homeassistant/components/yale/lock.py b/homeassistant/components/yale/lock.py index 36d865bf527..b911c92ba0f 100644 --- a/homeassistant/components/yale/lock.py +++ b/homeassistant/components/yale/lock.py @@ -19,7 +19,7 @@ from homeassistant.helpers.restore_state import RestoreEntity import homeassistant.util.dt as dt_util from . import YaleConfigEntry, YaleData -from .entity import YaleEntityMixin +from .entity import YaleEntity _LOGGER = logging.getLogger(__name__) @@ -36,7 +36,7 @@ async def async_setup_entry( async_add_entities(YaleLock(data, lock) for lock in data.locks) -class YaleLock(YaleEntityMixin, RestoreEntity, LockEntity): +class YaleLock(YaleEntity, RestoreEntity, LockEntity): """Representation of an Yale lock.""" _attr_name = None diff --git a/homeassistant/components/yale/manifest.json b/homeassistant/components/yale/manifest.json index 2dc84758610..d6da9ba3993 100644 --- a/homeassistant/components/yale/manifest.json +++ b/homeassistant/components/yale/manifest.json @@ -11,6 +11,6 @@ ], "documentation": "https://www.home-assistant.io/integrations/yale", "iot_class": "cloud_push", - "loggers": ["pubnub", "yalexs"], + "loggers": ["socketio", "engineio", "yalexs"], "requirements": ["yalexs==8.5.4", "yalexs-ble==2.4.3"] } diff --git a/homeassistant/components/yale/sensor.py b/homeassistant/components/yale/sensor.py index f1931c112cb..bb3d4317277 100644 --- a/homeassistant/components/yale/sensor.py +++ b/homeassistant/components/yale/sensor.py @@ -4,7 +4,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass -from typing import Any, Generic, TypeVar, cast +from typing import Any, cast from yalexs.activity import ActivityType, LockOperationActivity from yalexs.doorbell import Doorbell @@ -42,7 +42,7 @@ from .const import ( OPERATION_METHOD_REMOTE, OPERATION_METHOD_TAG, ) -from .entity import YaleDescriptionEntity, YaleEntityMixin +from .entity import YaleDescriptionEntity, YaleEntity def _retrieve_device_battery_state(detail: LockDetail) -> int: @@ -55,14 +55,13 @@ def _retrieve_linked_keypad_battery_state(detail: KeypadDetail) -> int | None: return detail.battery_percentage -_T = TypeVar("_T", LockDetail, KeypadDetail) - - @dataclass(frozen=True, kw_only=True) -class YaleSensorEntityDescription(SensorEntityDescription, Generic[_T]): +class YaleSensorEntityDescription[T: LockDetail | KeypadDetail]( + SensorEntityDescription +): """Mixin for required keys.""" - value_fn: Callable[[_T], int | None] + value_fn: Callable[[T], int | None] SENSOR_TYPE_DEVICE_BATTERY = YaleSensorEntityDescription[LockDetail]( @@ -112,7 +111,7 @@ async def async_setup_entry( async_add_entities(entities) -class YaleOperatorSensor(YaleEntityMixin, RestoreSensor): +class YaleOperatorSensor(YaleEntity, RestoreSensor): """Representation of an Yale lock operation sensor.""" _attr_translation_key = "operator" @@ -196,10 +195,12 @@ class YaleOperatorSensor(YaleEntityMixin, RestoreSensor): self._operated_autorelock = last_attrs[ATTR_OPERATION_AUTORELOCK] -class YaleBatterySensor(YaleDescriptionEntity, SensorEntity, Generic[_T]): +class YaleBatterySensor[T: LockDetail | KeypadDetail]( + YaleDescriptionEntity, SensorEntity +): """Representation of an Yale sensor.""" - entity_description: YaleSensorEntityDescription[_T] + entity_description: YaleSensorEntityDescription[T] _attr_device_class = SensorDeviceClass.BATTERY _attr_native_unit_of_measurement = PERCENTAGE diff --git a/homeassistant/components/yale/util.py b/homeassistant/components/yale/util.py index d8bdaab4a66..3462c576fd9 100644 --- a/homeassistant/components/yale/util.py +++ b/homeassistant/components/yale/util.py @@ -63,16 +63,11 @@ def _activity_time_based(latest: Activity) -> Activity | None: """Get the latest state of the sensor.""" start = latest.activity_start_time end = latest.activity_end_time + TIME_TO_DECLARE_DETECTION - if start <= _native_datetime() <= end: + if start <= datetime.now() <= end: return latest return None -def _native_datetime() -> datetime: - """Return time in the format yale uses without timezone.""" - return datetime.now() - - def retrieve_online_state(data: YaleData, detail: DoorbellDetail | LockDetail) -> bool: """Get the latest state of the sensor.""" # The doorbell will go into standby mode when there is no motion diff --git a/tests/components/yale/__init__.py b/tests/components/yale/__init__.py index f0604940686..7f72d348042 100644 --- a/tests/components/yale/__init__.py +++ b/tests/components/yale/__init__.py @@ -1,12 +1 @@ """Tests for the yale component.""" - -MOCK_CONFIG_ENTRY_DATA = { - "auth_implementation": "cloud", - "token": { - "access_token": "access_token", - "expires_in": 1, - "refresh_token": "refresh_token", - "expires_at": 2, - "service": "yale", - }, -} diff --git a/tests/components/yale/snapshots/test_binary_sensor.ambr b/tests/components/yale/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..e294cb7c76c --- /dev/null +++ b/tests/components/yale/snapshots/test_binary_sensor.ambr @@ -0,0 +1,33 @@ +# serializer version: 1 +# name: test_doorbell_device_registry + DeviceRegistryEntrySnapshot({ + 'area_id': 'tmt100_name', + 'config_entries': , + 'configuration_url': 'https://account.aaecosystem.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'yale', + 'tmt100', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Yale Home Inc.', + 'model': 'hydra1', + 'model_id': None, + 'name': 'tmt100 Name', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': 'tmt100 Name', + 'sw_version': '3.1.0-HYDRC75+201909251139', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/yale/snapshots/test_diagnostics.ambr b/tests/components/yale/snapshots/test_diagnostics.ambr index fd31bc0ec91..c3d8d8e2aaa 100644 --- a/tests/components/yale/snapshots/test_diagnostics.ambr +++ b/tests/components/yale/snapshots/test_diagnostics.ambr @@ -1,7 +1,7 @@ # serializer version: 1 # name: test_diagnostics dict({ - 'brand': 'yale_home', + 'brand': 'yale_global', 'doorbells': dict({ 'K98GiDT45GUL': dict({ 'HouseID': '**REDACTED**', diff --git a/tests/components/yale/test_binary_sensor.py b/tests/components/yale/test_binary_sensor.py index ad4d4155e5b..811c845e359 100644 --- a/tests/components/yale/test_binary_sensor.py +++ b/tests/components/yale/test_binary_sensor.py @@ -1,7 +1,9 @@ """The binary_sensor tests for the yale platform.""" import datetime -from unittest.mock import patch + +from freezegun.api import FrozenDateTimeFactory +from syrupy import SnapshotAssertion from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN from homeassistant.const import ( @@ -33,28 +35,19 @@ async def test_doorsense(hass: HomeAssistant) -> None: hass, "get_lock.online_with_doorsense.json" ) await _create_yale_with_devices(hass, [lock_one]) - - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + states = hass.states + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) - await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) - await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" + assert ( + states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_OFF ) - assert binary_sensor_online_with_doorsense_name.state == STATE_OFF async def test_lock_bridge_offline(hass: HomeAssistant) -> None: @@ -66,112 +59,78 @@ async def test_lock_bridge_offline(hass: HomeAssistant) -> None: hass, "get_activity.bridge_offline.json" ) await _create_yale_with_devices(hass, [lock_one], activities=activities) - - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" + states = hass.states + assert ( + states.get("binary_sensor.online_with_doorsense_name_door").state + == STATE_UNAVAILABLE ) - assert binary_sensor_online_with_doorsense_name.state == STATE_UNAVAILABLE async def test_create_doorbell(hass: HomeAssistant) -> None: """Test creation of a doorbell.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") await _create_yale_with_devices(hass, [doorbell_one]) - - binary_sensor_k98gidt45gul_name_motion = hass.states.get( - "binary_sensor.k98gidt45gul_name_motion" + states = hass.states + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_OFF + assert ( + states.get("binary_sensor.k98gidt45gul_name_image_capture").state == STATE_OFF ) - assert binary_sensor_k98gidt45gul_name_motion.state == STATE_OFF - binary_sensor_k98gidt45gul_name_image_capture = hass.states.get( - "binary_sensor.k98gidt45gul_name_image_capture" + assert states.get("binary_sensor.k98gidt45gul_name_connectivity").state == STATE_ON + assert ( + states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF ) - assert binary_sensor_k98gidt45gul_name_image_capture.state == STATE_OFF - binary_sensor_k98gidt45gul_name_online = hass.states.get( - "binary_sensor.k98gidt45gul_name_connectivity" + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_OFF + assert ( + states.get("binary_sensor.k98gidt45gul_name_image_capture").state == STATE_OFF ) - assert binary_sensor_k98gidt45gul_name_online.state == STATE_ON - binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_doorbell_ding" - ) - assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF - binary_sensor_k98gidt45gul_name_motion = hass.states.get( - "binary_sensor.k98gidt45gul_name_motion" - ) - assert binary_sensor_k98gidt45gul_name_motion.state == STATE_OFF - binary_sensor_k98gidt45gul_name_image_capture = hass.states.get( - "binary_sensor.k98gidt45gul_name_image_capture" - ) - assert binary_sensor_k98gidt45gul_name_image_capture.state == STATE_OFF async def test_create_doorbell_offline(hass: HomeAssistant) -> None: """Test creation of a doorbell that is offline.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") await _create_yale_with_devices(hass, [doorbell_one]) - - binary_sensor_tmt100_name_motion = hass.states.get( - "binary_sensor.tmt100_name_motion" + states = hass.states + assert states.get("binary_sensor.tmt100_name_motion").state == STATE_UNAVAILABLE + assert states.get("binary_sensor.tmt100_name_connectivity").state == STATE_OFF + assert ( + states.get("binary_sensor.tmt100_name_doorbell_ding").state == STATE_UNAVAILABLE ) - assert binary_sensor_tmt100_name_motion.state == STATE_UNAVAILABLE - binary_sensor_tmt100_name_online = hass.states.get( - "binary_sensor.tmt100_name_connectivity" - ) - assert binary_sensor_tmt100_name_online.state == STATE_OFF - binary_sensor_tmt100_name_ding = hass.states.get( - "binary_sensor.tmt100_name_doorbell_ding" - ) - assert binary_sensor_tmt100_name_ding.state == STATE_UNAVAILABLE -async def test_create_doorbell_with_motion(hass: HomeAssistant) -> None: +async def test_create_doorbell_with_motion( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: """Test creation of a doorbell.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") activities = await _mock_activities_from_fixture( hass, "get_activity.doorbell_motion.json" ) await _create_yale_with_devices(hass, [doorbell_one], activities=activities) - - binary_sensor_k98gidt45gul_name_motion = hass.states.get( - "binary_sensor.k98gidt45gul_name_motion" + states = hass.states + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_ON + assert states.get("binary_sensor.k98gidt45gul_name_connectivity").state == STATE_ON + assert ( + states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF ) - assert binary_sensor_k98gidt45gul_name_motion.state == STATE_ON - binary_sensor_k98gidt45gul_name_online = hass.states.get( - "binary_sensor.k98gidt45gul_name_connectivity" - ) - assert binary_sensor_k98gidt45gul_name_online.state == STATE_ON - binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_doorbell_ding" - ) - assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF - new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) - native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) - with patch( - "homeassistant.components.yale.util._native_datetime", - return_value=native_time, - ): - async_fire_time_changed(hass, new_time) - await hass.async_block_till_done() - binary_sensor_k98gidt45gul_name_motion = hass.states.get( - "binary_sensor.k98gidt45gul_name_motion" - ) - assert binary_sensor_k98gidt45gul_name_motion.state == STATE_OFF + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_OFF -async def test_doorbell_update_via_socketio(hass: HomeAssistant) -> None: +async def test_doorbell_update_via_socketio( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: """Test creation of a doorbell that can be updated via socketio.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") _, socketio = await _create_yale_with_devices(hass, [doorbell_one]) assert doorbell_one.pubsub_channel == "7c7a6672-59c8-3333-ffff-dcd98705cccc" - - binary_sensor_k98gidt45gul_name_motion = hass.states.get( - "binary_sensor.k98gidt45gul_name_motion" + states = hass.states + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_OFF + assert ( + states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF ) - assert binary_sensor_k98gidt45gul_name_motion.state == STATE_OFF - binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_doorbell_ding" - ) - assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF listener = list(socketio._listeners)[0] listener( @@ -192,10 +151,7 @@ async def test_doorbell_update_via_socketio(hass: HomeAssistant) -> None: await hass.async_block_till_done() - binary_sensor_k98gidt45gul_name_image_capture = hass.states.get( - "binary_sensor.k98gidt45gul_name_image_capture" - ) - assert binary_sensor_k98gidt45gul_name_image_capture.state == STATE_ON + assert states.get("binary_sensor.k98gidt45gul_name_image_capture").state == STATE_ON listener( doorbell_one.device_id, @@ -226,29 +182,18 @@ async def test_doorbell_update_via_socketio(hass: HomeAssistant) -> None: await hass.async_block_till_done() - binary_sensor_k98gidt45gul_name_motion = hass.states.get( - "binary_sensor.k98gidt45gul_name_motion" + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_ON + assert ( + states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF ) - assert binary_sensor_k98gidt45gul_name_motion.state == STATE_ON - binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_doorbell_ding" + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert ( + states.get("binary_sensor.k98gidt45gul_name_image_capture").state == STATE_OFF ) - assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF - - new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) - native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) - with patch( - "homeassistant.components.yale.util._native_datetime", - return_value=native_time, - ): - async_fire_time_changed(hass, new_time) - await hass.async_block_till_done() - - binary_sensor_k98gidt45gul_name_image_capture = hass.states.get( - "binary_sensor.k98gidt45gul_name_image_capture" - ) - assert binary_sensor_k98gidt45gul_name_image_capture.state == STATE_OFF listener( doorbell_one.device_id, @@ -260,37 +205,28 @@ async def test_doorbell_update_via_socketio(hass: HomeAssistant) -> None: await hass.async_block_till_done() - binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_doorbell_ding" - ) - assert binary_sensor_k98gidt45gul_name_ding.state == STATE_ON - new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) - native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) - with patch( - "homeassistant.components.yale.util._native_datetime", - return_value=native_time, - ): - async_fire_time_changed(hass, new_time) - await hass.async_block_till_done() + assert states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_ON - binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_doorbell_ding" + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert ( + states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF ) - assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF async def test_doorbell_device_registry( - hass: HomeAssistant, device_registry: dr.DeviceRegistry + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, ) -> None: """Test creation of a lock with doorsense and bridge ands up in the registry.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") await _create_yale_with_devices(hass, [doorbell_one]) reg_device = device_registry.async_get_device(identifiers={("yale", "tmt100")}) - assert reg_device.model == "hydra1" - assert reg_device.name == "tmt100 Name" - assert reg_device.manufacturer == "Yale Home Inc." - assert reg_device.sw_version == "3.1.0-HYDRC75+201909251139" + assert reg_device == snapshot async def test_door_sense_update_via_socketio(hass: HomeAssistant) -> None: @@ -302,11 +238,8 @@ async def test_door_sense_update_via_socketio(hass: HomeAssistant) -> None: config_entry, socketio = await _create_yale_with_devices( hass, [lock_one], activities=activities ) - - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + states = hass.states + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON listener = list(socketio._listeners)[0] listener( @@ -316,10 +249,10 @@ async def test_door_sense_update_via_socketio(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" + + assert ( + states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_OFF ) - assert binary_sensor_online_with_doorsense_name.state == STATE_OFF listener( lock_one.device_id, @@ -328,33 +261,22 @@ async def test_door_sense_update_via_socketio(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON socketio.connected = True async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON # Ensure socketio status is always preserved async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=2)) await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON listener( lock_one.device_id, @@ -363,17 +285,11 @@ async def test_door_sense_update_via_socketio(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=4)) await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON await hass.config_entries.async_unload(config_entry.entry_id) await hass.async_block_till_done() @@ -383,8 +299,10 @@ async def test_create_lock_with_doorbell(hass: HomeAssistant) -> None: """Test creation of a lock with a doorbell.""" lock_one = await _mock_lock_from_fixture(hass, "lock_with_doorbell.online.json") await _create_yale_with_devices(hass, [lock_one]) - - ding_sensor = hass.states.get( - "binary_sensor.a6697750d607098bae8d6baa11ef8063_name_doorbell_ding" + states = hass.states + assert ( + states.get( + "binary_sensor.a6697750d607098bae8d6baa11ef8063_name_doorbell_ding" + ).state + == STATE_OFF ) - assert ding_sensor.state == STATE_OFF diff --git a/tests/components/yale/test_config_flow.py b/tests/components/yale/test_config_flow.py index a62aa2d38f9..163f8240553 100644 --- a/tests/components/yale/test_config_flow.py +++ b/tests/components/yale/test_config_flow.py @@ -1,16 +1,16 @@ """Test the yale config flow.""" from collections.abc import Generator -from unittest.mock import Mock, patch +from unittest.mock import ANY, Mock, patch import pytest -from homeassistant import config_entries from homeassistant.components.yale.application_credentials import ( OAUTH2_AUTHORIZE, OAUTH2_TOKEN, ) from homeassistant.components.yale.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow @@ -44,7 +44,7 @@ async def test_full_flow( ) -> None: """Check full flow.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) state = config_entry_oauth2_flow._encode_jwt( hass, @@ -78,13 +78,81 @@ async def test_full_flow( }, ) - await hass.config_entries.flow.async_configure(result["flow_id"]) + result2 = await hass.config_entries.flow.async_configure(result["flow_id"]) assert len(hass.config_entries.async_entries(DOMAIN)) == 1 assert len(mock_setup_entry.mock_calls) == 1 entry = hass.config_entries.async_entries(DOMAIN)[0] assert entry.unique_id == USER_ID + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["result"].unique_id == USER_ID + assert entry.data == { + "auth_implementation": "yale", + "token": { + "access_token": jwt, + "expires_at": ANY, + "expires_in": ANY, + "refresh_token": "mock-refresh-token", + "scope": "any", + "user_id": "mock-user-id", + }, + } + + +@pytest.mark.usefixtures("client_credentials") +@pytest.mark.usefixtures("current_request_with_host") +async def test_full_flow_already_exists( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + jwt: str, + mock_setup_entry: Mock, + mock_config_entry: MockConfigEntry, +) -> None: + """Check full flow for a user that already exists.""" + + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + + assert result["url"] == ( + f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" + "&redirect_uri=https://example.com/auth/external/callback" + f"&state={state}" + ) + + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.clear_requests() + aioclient_mock.post( + OAUTH2_TOKEN, + json={ + "access_token": jwt, + "scope": "any", + "expires_in": 86399, + "refresh_token": "mock-refresh-token", + "user_id": "mock-user-id", + "expires_at": 1697753347, + }, + ) + + result2 = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "already_configured" + @pytest.mark.usefixtures("client_credentials") @pytest.mark.usefixtures("current_request_with_host") diff --git a/tests/components/yale/test_event.py b/tests/components/yale/test_event.py index f2f205289ff..7aeb9d8f12b 100644 --- a/tests/components/yale/test_event.py +++ b/tests/components/yale/test_event.py @@ -1,7 +1,6 @@ """The event tests for the yale.""" -import datetime -from unittest.mock import patch +from freezegun.api import FrozenDateTimeFactory from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant @@ -42,7 +41,9 @@ async def test_create_doorbell_offline(hass: HomeAssistant) -> None: assert doorbell_state.state == STATE_UNAVAILABLE -async def test_create_doorbell_with_motion(hass: HomeAssistant) -> None: +async def test_create_doorbell_with_motion( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: """Test creation of a doorbell.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") activities = await _mock_activities_from_fixture( @@ -58,19 +59,16 @@ async def test_create_doorbell_with_motion(hass: HomeAssistant) -> None: assert doorbell_state is not None assert doorbell_state.state == STATE_UNKNOWN - new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) - native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) - with patch( - "homeassistant.components.yale.util._native_datetime", - return_value=native_time, - ): - async_fire_time_changed(hass, new_time) - await hass.async_block_till_done() + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() motion_state = hass.states.get("event.k98gidt45gul_name_motion") assert motion_state.state == isotime -async def test_doorbell_update_via_socketio(hass: HomeAssistant) -> None: +async def test_doorbell_update_via_socketio( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: """Test creation of a doorbell that can be updated via socketio.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") @@ -119,14 +117,9 @@ async def test_doorbell_update_via_socketio(hass: HomeAssistant) -> None: assert motion_state.state != STATE_UNKNOWN isotime = motion_state.state - new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) - native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) - with patch( - "homeassistant.components.yale.util._native_datetime", - return_value=native_time, - ): - async_fire_time_changed(hass, new_time) - await hass.async_block_till_done() + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() motion_state = hass.states.get("event.k98gidt45gul_name_motion") assert motion_state is not None @@ -147,14 +140,9 @@ async def test_doorbell_update_via_socketio(hass: HomeAssistant) -> None: assert doorbell_state.state != STATE_UNKNOWN isotime = motion_state.state - new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) - native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) - with patch( - "homeassistant.components.yale.util._native_datetime", - return_value=native_time, - ): - async_fire_time_changed(hass, new_time) - await hass.async_block_till_done() + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") assert doorbell_state is not None diff --git a/tests/components/yale/test_init.py b/tests/components/yale/test_init.py index c9cb4be5882..4f0a853710c 100644 --- a/tests/components/yale/test_init.py +++ b/tests/components/yale/test_init.py @@ -89,16 +89,15 @@ async def test_unlock_throws_yale_api_http_error(hass: HomeAssistant) -> None: "unlock_return_activities": _unlock_return_activities_side_effect }, ) - last_err = None data = {ATTR_ENTITY_ID: "lock.a6697750d607098bae8d6baa11ef8063_name"} - try: + with pytest.raises( + HomeAssistantError, + match=( + "A6697750D607098BAE8D6BAA11EF8063 Name: This should bubble up as its user" + " consumable" + ), + ): await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) - except HomeAssistantError as err: - last_err = err - assert str(last_err) == ( - "A6697750D607098BAE8D6BAA11EF8063 Name: This should bubble up as its user" - " consumable" - ) async def test_lock_throws_yale_api_http_error(hass: HomeAssistant) -> None: @@ -119,16 +118,15 @@ async def test_lock_throws_yale_api_http_error(hass: HomeAssistant) -> None: "lock_return_activities": _lock_return_activities_side_effect }, ) - last_err = None data = {ATTR_ENTITY_ID: "lock.a6697750d607098bae8d6baa11ef8063_name"} - try: + with pytest.raises( + HomeAssistantError, + match=( + "A6697750D607098BAE8D6BAA11EF8063 Name: This should bubble up as its user" + " consumable" + ), + ): await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) - except HomeAssistantError as err: - last_err = err - assert str(last_err) == ( - "A6697750D607098BAE8D6BAA11EF8063 Name: This should bubble up as its user" - " consumable" - ) async def test_open_throws_hass_service_not_supported_error( @@ -185,6 +183,7 @@ async def test_load_unload(hass: HomeAssistant) -> None: await hass.config_entries.async_unload(config_entry.entry_id) await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.NOT_LOADED async def test_load_triggers_ble_discovery( From 5825e8fee83ed362d3283106fae4a1d749bbb99c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 28 Aug 2024 09:01:17 -1000 Subject: [PATCH 1196/1635] Redirect virtual integration yale_home to point to yale (#124817) --- homeassistant/components/yale_home/manifest.json | 2 +- homeassistant/generated/integrations.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/yale_home/manifest.json b/homeassistant/components/yale_home/manifest.json index 0e45b0da7d0..c497fa3fe34 100644 --- a/homeassistant/components/yale_home/manifest.json +++ b/homeassistant/components/yale_home/manifest.json @@ -2,5 +2,5 @@ "domain": "yale_home", "name": "Yale Home", "integration_type": "virtual", - "supported_by": "august" + "supported_by": "yale" } diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index aad03b25390..bb81b6a5b04 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -7012,7 +7012,7 @@ "yale_home": { "integration_type": "virtual", "config_flow": false, - "supported_by": "august", + "supported_by": "yale", "name": "Yale Home" }, "yale": { From 2b20b2a80b8a007474052a00d9c0e9d9dd5b97ef Mon Sep 17 00:00:00 2001 From: Fredrik Erlandsson Date: Wed, 28 Aug 2024 21:10:49 +0200 Subject: [PATCH 1197/1635] Bump tellduslive to 0.10.12 (#124816) * Bump tellduslive version * update licenses.py too --- homeassistant/components/tellduslive/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- script/licenses.py | 1 - 4 files changed, 3 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/tellduslive/manifest.json b/homeassistant/components/tellduslive/manifest.json index 929d502971f..dc1389c15c5 100644 --- a/homeassistant/components/tellduslive/manifest.json +++ b/homeassistant/components/tellduslive/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/tellduslive", "iot_class": "cloud_polling", "quality_scale": "silver", - "requirements": ["tellduslive==0.10.11"] + "requirements": ["tellduslive==0.10.12"] } diff --git a/requirements_all.txt b/requirements_all.txt index 0a9fb74c4dc..5081fe2fd36 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2742,7 +2742,7 @@ tellcore-net==0.4 tellcore-py==1.1.2 # homeassistant.components.tellduslive -tellduslive==0.10.11 +tellduslive==0.10.12 # homeassistant.components.lg_soundbar temescal==0.5 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 920895f5a7b..5ec8e6cba40 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2167,7 +2167,7 @@ systembridgemodels==4.2.4 tailscale==0.6.1 # homeassistant.components.tellduslive -tellduslive==0.10.11 +tellduslive==0.10.12 # homeassistant.components.lg_soundbar temescal==0.5 diff --git a/script/licenses.py b/script/licenses.py index ac9a836396c..84797372309 100644 --- a/script/licenses.py +++ b/script/licenses.py @@ -165,7 +165,6 @@ EXCEPTIONS = { "sensirion-ble", # https://github.com/akx/sensirion-ble/pull/9 "sharp_aquos_rc", # https://github.com/jmoore987/sharp_aquos_rc/pull/14 "tapsaff", # https://github.com/bazwilliams/python-taps-aff/pull/5 - "tellduslive", # https://github.com/molobrakos/tellduslive/pull/24 "tellsticknet", # https://github.com/molobrakos/tellsticknet/pull/33 "vincenty", # Public domain "zeversolar", # https://github.com/kvanzuijlen/zeversolar/pull/46 From ada6b7875c2a7f4a54ba9dd93c70a93047d71874 Mon Sep 17 00:00:00 2001 From: David Bonnes Date: Wed, 28 Aug 2024 21:40:57 +0100 Subject: [PATCH 1198/1635] Add evohome test for setup (#123129) * allow for different systems * installation is a load_json_*fixture param * allow installation to be parameterized * test setup of various systems * add more fixtures * test setup of integration * tweak test * tweak const * add expected state/services * extend setup test * tidy up * tidy up tweaks * code tweaks * refactor expected results dicts * woops * refatcor serialize * refactor test * tweak * tweak code * rename symbol * ensure actual I/O remains blocked * tweak * typo * use constants * Update conftest.py Co-authored-by: Martin Hjelmare * change filename * add config fixture * config is a fixture * config is a fixture now 2 * lint * lint * refactor * lint * lint * restore email addr * use const * use snapshots instead of helper class * doctweak * correct snapshot --------- Co-authored-by: Martin Hjelmare --- tests/components/evohome/conftest.py | 126 ++- tests/components/evohome/const.py | 9 + .../fixtures/{ => default}/schedule_dhw.json | 0 .../fixtures/{ => default}/schedule_zone.json | 0 .../{ => default}/status_2738909.json | 0 .../fixtures/{ => default}/user_account.json | 0 .../{ => default}/user_locations.json | 4 +- .../fixtures/h032585/status_111111.json | 31 + .../fixtures/h032585/temperatures.json | 3 + .../fixtures/h032585/user_locations.json | 79 ++ .../fixtures/h099625/status_111111.json | 44 + .../fixtures/h099625/user_locations.json | 113 +++ .../fixtures/minimal/status_2738909.json | 28 + .../fixtures/minimal/user_locations.json | 120 +++ .../fixtures/system_004/status_3164610.json | 33 + .../fixtures/system_004/user_locations.json | 99 ++ .../evohome/snapshots/test_init.ambr | 863 ++++++++++++++++++ tests/components/evohome/test_init.py | 25 + tests/components/evohome/test_storage.py | 30 +- 19 files changed, 1549 insertions(+), 58 deletions(-) rename tests/components/evohome/fixtures/{ => default}/schedule_dhw.json (100%) rename tests/components/evohome/fixtures/{ => default}/schedule_zone.json (100%) rename tests/components/evohome/fixtures/{ => default}/status_2738909.json (100%) rename tests/components/evohome/fixtures/{ => default}/user_account.json (100%) rename tests/components/evohome/fixtures/{ => default}/user_locations.json (99%) create mode 100644 tests/components/evohome/fixtures/h032585/status_111111.json create mode 100644 tests/components/evohome/fixtures/h032585/temperatures.json create mode 100644 tests/components/evohome/fixtures/h032585/user_locations.json create mode 100644 tests/components/evohome/fixtures/h099625/status_111111.json create mode 100644 tests/components/evohome/fixtures/h099625/user_locations.json create mode 100644 tests/components/evohome/fixtures/minimal/status_2738909.json create mode 100644 tests/components/evohome/fixtures/minimal/user_locations.json create mode 100644 tests/components/evohome/fixtures/system_004/status_3164610.json create mode 100644 tests/components/evohome/fixtures/system_004/user_locations.json create mode 100644 tests/components/evohome/snapshots/test_init.ambr create mode 100644 tests/components/evohome/test_init.py diff --git a/tests/components/evohome/conftest.py b/tests/components/evohome/conftest.py index 260330896b7..82c5cd76024 100644 --- a/tests/components/evohome/conftest.py +++ b/tests/components/evohome/conftest.py @@ -2,8 +2,10 @@ from __future__ import annotations +from collections.abc import Callable from datetime import datetime, timedelta -from typing import Any, Final +from http import HTTPMethod +from typing import Any from unittest.mock import MagicMock, patch from aiohttp import ClientSession @@ -16,75 +18,112 @@ from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from homeassistant.util.json import JsonArrayType, JsonObjectType -from .const import ACCESS_TOKEN, REFRESH_TOKEN +from .const import ACCESS_TOKEN, REFRESH_TOKEN, USERNAME from tests.common import load_json_array_fixture, load_json_object_fixture -TEST_CONFIG: Final = { - CONF_USERNAME: "username", - CONF_PASSWORD: "password", -} - -def user_account_config_fixture() -> JsonObjectType: +def user_account_config_fixture(install: str) -> JsonObjectType: """Load JSON for the config of a user's account.""" - return load_json_object_fixture("user_account.json", DOMAIN) + try: + return load_json_object_fixture(f"{install}/user_account.json", DOMAIN) + except FileNotFoundError: + return load_json_object_fixture("default/user_account.json", DOMAIN) -def user_locations_config_fixture() -> JsonArrayType: +def user_locations_config_fixture(install: str) -> JsonArrayType: """Load JSON for the config of a user's installation (a list of locations).""" - return load_json_array_fixture("user_locations.json", DOMAIN) + return load_json_array_fixture(f"{install}/user_locations.json", DOMAIN) -def location_status_fixture(loc_id: str) -> JsonObjectType: +def location_status_fixture(install: str, loc_id: str | None = None) -> JsonObjectType: """Load JSON for the status of a specific location.""" - return load_json_object_fixture(f"status_{loc_id}.json", DOMAIN) + if loc_id is None: + _install = load_json_array_fixture(f"{install}/user_locations.json", DOMAIN) + loc_id = _install[0]["locationInfo"]["locationId"] # type: ignore[assignment, call-overload, index] + return load_json_object_fixture(f"{install}/status_{loc_id}.json", DOMAIN) -def dhw_schedule_fixture() -> JsonObjectType: +def dhw_schedule_fixture(install: str) -> JsonObjectType: """Load JSON for the schedule of a domesticHotWater zone.""" - return load_json_object_fixture("schedule_dhw.json", DOMAIN) + try: + return load_json_object_fixture(f"{install}/schedule_dhw.json", DOMAIN) + except FileNotFoundError: + return load_json_object_fixture("default/schedule_dhw.json", DOMAIN) -def zone_schedule_fixture() -> JsonObjectType: +def zone_schedule_fixture(install: str) -> JsonObjectType: """Load JSON for the schedule of a temperatureZone zone.""" - return load_json_object_fixture("schedule_zone.json", DOMAIN) + try: + return load_json_object_fixture(f"{install}/schedule_zone.json", DOMAIN) + except FileNotFoundError: + return load_json_object_fixture("default/schedule_zone.json", DOMAIN) -async def mock_get( - self: Broker, url: str, **kwargs: Any -) -> JsonArrayType | JsonObjectType: - """Return the JSON for a HTTP get of a given URL.""" +def mock_get_factory(install: str) -> Callable: + """Return a get method for a specified installation.""" - # a proxy for the behaviour of the real web API - if self.refresh_token is None: - self.refresh_token = f"new_{REFRESH_TOKEN}" + async def mock_get( + self: Broker, url: str, **kwargs: Any + ) -> JsonArrayType | JsonObjectType: + """Return the JSON for a HTTP get of a given URL.""" - if self.access_token_expires is None or self.access_token_expires < datetime.now(): - self.access_token = f"new_{ACCESS_TOKEN}" - self.access_token_expires = datetime.now() + timedelta(minutes=30) + # a proxy for the behaviour of the real web API + if self.refresh_token is None: + self.refresh_token = f"new_{REFRESH_TOKEN}" - # assume a valid GET, and return the JSON for that web API - if url == "userAccount": # userAccount - return user_account_config_fixture() + if ( + self.access_token_expires is None + or self.access_token_expires < datetime.now() + ): + self.access_token = f"new_{ACCESS_TOKEN}" + self.access_token_expires = datetime.now() + timedelta(minutes=30) - if url.startswith("location"): - if "installationInfo" in url: # location/installationInfo?userId={id} - return user_locations_config_fixture() - if "location" in url: # location/{id}/status - return location_status_fixture("2738909") + # assume a valid GET, and return the JSON for that web API + if url == "userAccount": # userAccount + return user_account_config_fixture(install) - elif "schedule" in url: - if url.startswith("domesticHotWater"): # domesticHotWater/{id}/schedule - return dhw_schedule_fixture() - if url.startswith("temperatureZone"): # temperatureZone/{id}/schedule - return zone_schedule_fixture() + if url.startswith("location"): + if "installationInfo" in url: # location/installationInfo?userId={id} + return user_locations_config_fixture(install) + if "location" in url: # location/{id}/status + return location_status_fixture(install) - pytest.xfail(f"Unexpected URL: {url}") + elif "schedule" in url: + if url.startswith("domesticHotWater"): # domesticHotWater/{id}/schedule + return dhw_schedule_fixture(install) + if url.startswith("temperatureZone"): # temperatureZone/{id}/schedule + return zone_schedule_fixture(install) + + pytest.fail(f"Unexpected request: {HTTPMethod.GET} {url}") + + return mock_get -@patch("evohomeasync2.broker.Broker.get", mock_get) -async def setup_evohome(hass: HomeAssistant, test_config: dict[str, str]) -> MagicMock: +async def block_request( + self: Broker, method: HTTPMethod, url: str, **kwargs: Any +) -> None: + """Fail if the code attempts any actual I/O via aiohttp.""" + + pytest.fail(f"Unexpected request: {method} {url}") + + +@pytest.fixture +def evo_config() -> dict[str, str]: + "Return a default/minimal configuration." + return { + CONF_USERNAME: USERNAME, + CONF_PASSWORD: "password", + } + + +@patch("evohomeasync.broker.Broker._make_request", block_request) +@patch("evohomeasync2.broker.Broker._client", block_request) +async def setup_evohome( + hass: HomeAssistant, + test_config: dict[str, str], + install: str = "default", +) -> MagicMock: """Set up the evohome integration and return its client. The class is mocked here to check the client was instantiated with the correct args. @@ -93,6 +132,7 @@ async def setup_evohome(hass: HomeAssistant, test_config: dict[str, str]) -> Mag with ( patch("homeassistant.components.evohome.evo.EvohomeClient") as mock_client, patch("homeassistant.components.evohome.ev1.EvohomeClient", return_value=None), + patch("evohomeasync2.broker.Broker.get", mock_get_factory(install)), ): mock_client.side_effect = EvohomeClient diff --git a/tests/components/evohome/const.py b/tests/components/evohome/const.py index 0b298db533a..c25a259e602 100644 --- a/tests/components/evohome/const.py +++ b/tests/components/evohome/const.py @@ -8,3 +8,12 @@ ACCESS_TOKEN: Final = "at_1dc7z657UKzbhKA..." REFRESH_TOKEN: Final = "rf_jg68ZCKYdxEI3fF..." SESSION_ID: Final = "F7181186..." USERNAME: Final = "test_user@gmail.com" + +# The h-numbers refer to issues in HA's core repo +TEST_INSTALLS: Final = ( + "minimal", # evohome (single zone, no DHW) + "default", # evohome (multi-zone, with DHW & ghost zones) + "h032585", # VisionProWifi (no preset_mode for TCS) + "h099625", # RoundThermostat + "system_004", # RoundModulation +) diff --git a/tests/components/evohome/fixtures/schedule_dhw.json b/tests/components/evohome/fixtures/default/schedule_dhw.json similarity index 100% rename from tests/components/evohome/fixtures/schedule_dhw.json rename to tests/components/evohome/fixtures/default/schedule_dhw.json diff --git a/tests/components/evohome/fixtures/schedule_zone.json b/tests/components/evohome/fixtures/default/schedule_zone.json similarity index 100% rename from tests/components/evohome/fixtures/schedule_zone.json rename to tests/components/evohome/fixtures/default/schedule_zone.json diff --git a/tests/components/evohome/fixtures/status_2738909.json b/tests/components/evohome/fixtures/default/status_2738909.json similarity index 100% rename from tests/components/evohome/fixtures/status_2738909.json rename to tests/components/evohome/fixtures/default/status_2738909.json diff --git a/tests/components/evohome/fixtures/user_account.json b/tests/components/evohome/fixtures/default/user_account.json similarity index 100% rename from tests/components/evohome/fixtures/user_account.json rename to tests/components/evohome/fixtures/default/user_account.json diff --git a/tests/components/evohome/fixtures/user_locations.json b/tests/components/evohome/fixtures/default/user_locations.json similarity index 99% rename from tests/components/evohome/fixtures/user_locations.json rename to tests/components/evohome/fixtures/default/user_locations.json index cf59aa9ae8a..f2f4091a2dc 100644 --- a/tests/components/evohome/fixtures/user_locations.json +++ b/tests/components/evohome/fixtures/default/user_locations.json @@ -246,7 +246,7 @@ }, { "zoneId": "3450733", - "modelType": "xx", + "modelType": "xxx", "setpointCapabilities": { "maxHeatSetpoint": 35.0, "minHeatSetpoint": 5.0, @@ -268,7 +268,7 @@ "setpointValueResolution": 0.5 }, "name": "Spare Room", - "zoneType": "xx" + "zoneType": "xxx" } ], "dhw": { diff --git a/tests/components/evohome/fixtures/h032585/status_111111.json b/tests/components/evohome/fixtures/h032585/status_111111.json new file mode 100644 index 00000000000..0ea535c2461 --- /dev/null +++ b/tests/components/evohome/fixtures/h032585/status_111111.json @@ -0,0 +1,31 @@ +{ + "locationId": "111111", + "gateways": [ + { + "gatewayId": "222222", + "temperatureControlSystems": [ + { + "systemId": "416856", + "zones": [ + { + "zoneId": "416856", + "temperatureStatus": { + "temperature": 21.5, + "isAvailable": true + }, + "activeFaults": [], + "setpointStatus": { + "targetHeatTemperature": 21.5, + "setpointMode": "FollowSchedule" + }, + "name": "THERMOSTAT" + } + ], + "activeFaults": [], + "systemModeStatus": { "mode": "Heat", "isPermanent": true } + } + ], + "activeFaults": [] + } + ] +} diff --git a/tests/components/evohome/fixtures/h032585/temperatures.json b/tests/components/evohome/fixtures/h032585/temperatures.json new file mode 100644 index 00000000000..a2015c94f46 --- /dev/null +++ b/tests/components/evohome/fixtures/h032585/temperatures.json @@ -0,0 +1,3 @@ +{ + "416856": 21.5 +} diff --git a/tests/components/evohome/fixtures/h032585/user_locations.json b/tests/components/evohome/fixtures/h032585/user_locations.json new file mode 100644 index 00000000000..b4ea2e5c420 --- /dev/null +++ b/tests/components/evohome/fixtures/h032585/user_locations.json @@ -0,0 +1,79 @@ +[ + { + "locationInfo": { + "locationId": "111111", + "name": "My Home", + "timeZone": { + "timeZoneId": "GMTStandardTime", + "displayName": "(UTC+00:00) Dublin, Edinburgh, Lisbon, London", + "offsetMinutes": 0, + "currentOffsetMinutes": 60, + "supportsDaylightSaving": true + } + }, + "gateways": [ + { + "gatewayInfo": { + "gatewayId": "222222", + "mac": "00D02DEE0000", + "crc": "1234", + "isWiFi": false + }, + "temperatureControlSystems": [ + { + "systemId": "416856", + "modelType": "VisionProWifiRetail", + "zones": [ + { + "zoneId": "416856", + "modelType": "VisionProWifiRetail", + "setpointCapabilities": { + "vacationHoldCapabilities": { + "isChangeable": true, + "isCancelable": true, + "minDuration": "1.00:00:00", + "maxDuration": "365.23:45:00", + "timingResolution": "00:15:00" + }, + "maxHeatSetpoint": 32.0, + "minHeatSetpoint": 4.5, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride", + "VacationHold" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:15:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 4, + "minSwitchpointsPerDay": 0, + "timingResolution": "00:15:00", + "setpointValueResolution": 0.5 + }, + "name": "THERMOSTAT", + "zoneType": "Thermostat" + } + ], + "allowedSystemModes": [ + { + "systemMode": "Off", + "canBePermanent": true, + "canBeTemporary": false + }, + { + "systemMode": "Heat", + "canBePermanent": true, + "canBeTemporary": false + } + ] + } + ] + } + ] + } +] diff --git a/tests/components/evohome/fixtures/h099625/status_111111.json b/tests/components/evohome/fixtures/h099625/status_111111.json new file mode 100644 index 00000000000..149d8aba783 --- /dev/null +++ b/tests/components/evohome/fixtures/h099625/status_111111.json @@ -0,0 +1,44 @@ +{ + "locationId": "111111", + "gateways": [ + { + "gatewayId": "222222", + "temperatureControlSystems": [ + { + "systemId": "8557535", + "zones": [ + { + "zoneId": "8557539", + "temperatureStatus": { + "temperature": 21.5, + "isAvailable": true + }, + "activeFaults": [], + "setpointStatus": { + "targetHeatTemperature": 21.5, + "setpointMode": "FollowSchedule" + }, + "name": "THERMOSTAT" + }, + { + "zoneId": "8557541", + "temperatureStatus": { + "temperature": 21.5, + "isAvailable": true + }, + "activeFaults": [], + "setpointStatus": { + "targetHeatTemperature": 21.5, + "setpointMode": "FollowSchedule" + }, + "name": "THERMOSTAT" + } + ], + "activeFaults": [], + "systemModeStatus": { "mode": "Auto", "isPermanent": true } + } + ], + "activeFaults": [] + } + ] +} diff --git a/tests/components/evohome/fixtures/h099625/user_locations.json b/tests/components/evohome/fixtures/h099625/user_locations.json new file mode 100644 index 00000000000..cc32caccc73 --- /dev/null +++ b/tests/components/evohome/fixtures/h099625/user_locations.json @@ -0,0 +1,113 @@ +[ + { + "locationInfo": { + "locationId": "111111", + "name": "My Home", + "timeZone": { + "timeZoneId": "FLEStandardTime", + "displayName": "(UTC+02:00) Helsinki, Kyiv, Riga, Sofia, Tallinn, Vilnius", + "offsetMinutes": 120, + "currentOffsetMinutes": 180, + "supportsDaylightSaving": true + } + }, + "gateways": [ + { + "gatewayInfo": { + "gatewayId": "222222", + "mac": "00D02DEE0000", + "crc": "1234", + "isWiFi": false + }, + "temperatureControlSystems": [ + { + "systemId": "8557535", + "modelType": "EvoTouch", + "zones": [ + { + "zoneId": "8557539", + "modelType": "RoundWireless", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 0, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Thermostat", + "zoneType": "Thermostat" + }, + { + "zoneId": "8557541", + "modelType": "RoundWireless", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 0, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Thermostat 2", + "zoneType": "Thermostat" + } + ], + "allowedSystemModes": [ + { + "systemMode": "Auto", + "canBePermanent": true, + "canBeTemporary": false + }, + { + "systemMode": "AutoWithEco", + "canBePermanent": true, + "canBeTemporary": true, + "maxDuration": "1.00:00:00", + "timingResolution": "01:00:00", + "timingMode": "Duration" + }, + { + "systemMode": "Away", + "canBePermanent": true, + "canBeTemporary": true, + "maxDuration": "99.00:00:00", + "timingResolution": "1.00:00:00", + "timingMode": "Period" + }, + { + "systemMode": "HeatingOff", + "canBePermanent": true, + "canBeTemporary": false + } + ] + } + ] + } + ] + } +] diff --git a/tests/components/evohome/fixtures/minimal/status_2738909.json b/tests/components/evohome/fixtures/minimal/status_2738909.json new file mode 100644 index 00000000000..4b344314a67 --- /dev/null +++ b/tests/components/evohome/fixtures/minimal/status_2738909.json @@ -0,0 +1,28 @@ +{ + "locationId": "2738909", + "gateways": [ + { + "gatewayId": "2499896", + "temperatureControlSystems": [ + { + "systemId": "3432522", + "zones": [ + { + "zoneId": "3432576", + "name": "Main Room", + "temperatureStatus": { "temperature": 19.0, "isAvailable": true }, + "setpointStatus": { + "targetHeatTemperature": 17.0, + "setpointMode": "FollowSchedule" + }, + "activeFaults": [] + } + ], + "activeFaults": [], + "systemModeStatus": { "mode": "AutoWithEco", "isPermanent": true } + } + ], + "activeFaults": [] + } + ] +} diff --git a/tests/components/evohome/fixtures/minimal/user_locations.json b/tests/components/evohome/fixtures/minimal/user_locations.json new file mode 100644 index 00000000000..932686d8728 --- /dev/null +++ b/tests/components/evohome/fixtures/minimal/user_locations.json @@ -0,0 +1,120 @@ +[ + { + "locationInfo": { + "locationId": "2738909", + "name": "My Home", + "streetAddress": "1 Main Street", + "city": "London", + "country": "UnitedKingdom", + "postcode": "E1 1AA", + "locationType": "Residential", + "useDaylightSaveSwitching": true, + "timeZone": { + "timeZoneId": "GMTStandardTime", + "displayName": "(UTC+00:00) Dublin, Edinburgh, Lisbon, London", + "offsetMinutes": 0, + "currentOffsetMinutes": 60, + "supportsDaylightSaving": true + }, + "locationOwner": { + "userId": "2263181", + "username": "user_2263181@gmail.com", + "firstname": "John", + "lastname": "Smith" + } + }, + "gateways": [ + { + "gatewayInfo": { + "gatewayId": "2499896", + "mac": "00D02DEE0000", + "crc": "1234", + "isWiFi": false + }, + "temperatureControlSystems": [ + { + "systemId": "3432522", + "modelType": "EvoTouch", + "zones": [ + { + "zoneId": "3432576", + "modelType": "HeatingZone", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Main Room", + "zoneType": "RadiatorZone" + } + ], + "allowedSystemModes": [ + { + "systemMode": "HeatingOff", + "canBePermanent": true, + "canBeTemporary": false + }, + { + "systemMode": "Auto", + "canBePermanent": true, + "canBeTemporary": false + }, + { + "systemMode": "AutoWithReset", + "canBePermanent": true, + "canBeTemporary": false + }, + { + "systemMode": "AutoWithEco", + "canBePermanent": true, + "canBeTemporary": true, + "maxDuration": "1.00:00:00", + "timingResolution": "01:00:00", + "timingMode": "Duration" + }, + { + "systemMode": "Away", + "canBePermanent": true, + "canBeTemporary": true, + "maxDuration": "99.00:00:00", + "timingResolution": "1.00:00:00", + "timingMode": "Period" + }, + { + "systemMode": "DayOff", + "canBePermanent": true, + "canBeTemporary": true, + "maxDuration": "99.00:00:00", + "timingResolution": "1.00:00:00", + "timingMode": "Period" + }, + { + "systemMode": "Custom", + "canBePermanent": true, + "canBeTemporary": true, + "maxDuration": "99.00:00:00", + "timingResolution": "1.00:00:00", + "timingMode": "Period" + } + ] + } + ] + } + ] + } +] diff --git a/tests/components/evohome/fixtures/system_004/status_3164610.json b/tests/components/evohome/fixtures/system_004/status_3164610.json new file mode 100644 index 00000000000..a9ef3f6ee28 --- /dev/null +++ b/tests/components/evohome/fixtures/system_004/status_3164610.json @@ -0,0 +1,33 @@ +{ + "locationId": "3164610", + "gateways": [ + { + "gatewayId": "2938388", + "temperatureControlSystems": [ + { + "systemId": "4187769", + "zones": [ + { + "zoneId": "4187768", + "temperatureStatus": { "temperature": 19.5, "isAvailable": true }, + "activeFaults": [], + "setpointStatus": { + "targetHeatTemperature": 15.0, + "setpointMode": "PermanentOverride" + }, + "name": "Thermostat" + } + ], + "activeFaults": [], + "systemModeStatus": { "mode": "Auto", "isPermanent": true } + } + ], + "activeFaults": [ + { + "faultType": "GatewayCommunicationLost", + "since": "2023-05-04T18:47:36.7727046" + } + ] + } + ] +} diff --git a/tests/components/evohome/fixtures/system_004/user_locations.json b/tests/components/evohome/fixtures/system_004/user_locations.json new file mode 100644 index 00000000000..9defab8b6ee --- /dev/null +++ b/tests/components/evohome/fixtures/system_004/user_locations.json @@ -0,0 +1,99 @@ +[ + { + "locationInfo": { + "locationId": "3164610", + "name": "Living room", + "streetAddress": "1 Main Road", + "city": "Boomtown", + "country": "Netherlands", + "postcode": "1234XX", + "locationType": "Residential", + "useDaylightSaveSwitching": true, + "timeZone": { + "timeZoneId": "WEuropeStandardTime", + "displayName": "(UTC+01:00) Amsterdam, Berlijn, Bern, Rome, Stockholm, Wenen", + "offsetMinutes": 60, + "currentOffsetMinutes": 120, + "supportsDaylightSaving": true + }, + "locationOwner": { + "userId": "2624305", + "username": "user_2624305@gmail.com", + "firstname": "Chris", + "lastname": "Jones" + } + }, + "gateways": [ + { + "gatewayInfo": { + "gatewayId": "2938388", + "mac": "00D02D5A7000", + "crc": "1234", + "isWiFi": false + }, + "temperatureControlSystems": [ + { + "systemId": "4187769", + "modelType": "EvoTouch", + "zones": [ + { + "zoneId": "4187768", + "modelType": "RoundModulation", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 0, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Thermostat", + "zoneType": "Thermostat" + } + ], + "allowedSystemModes": [ + { + "systemMode": "Auto", + "canBePermanent": true, + "canBeTemporary": false + }, + { + "systemMode": "AutoWithEco", + "canBePermanent": true, + "canBeTemporary": true, + "maxDuration": "1.00:00:00", + "timingResolution": "01:00:00", + "timingMode": "Duration" + }, + { + "systemMode": "Away", + "canBePermanent": true, + "canBeTemporary": true, + "maxDuration": "99.00:00:00", + "timingResolution": "1.00:00:00", + "timingMode": "Period" + }, + { + "systemMode": "HeatingOff", + "canBePermanent": true, + "canBeTemporary": false + } + ] + } + ] + } + ] + } +] diff --git a/tests/components/evohome/snapshots/test_init.ambr b/tests/components/evohome/snapshots/test_init.ambr new file mode 100644 index 00000000000..210c45354a5 --- /dev/null +++ b/tests/components/evohome/snapshots/test_init.ambr @@ -0,0 +1,863 @@ +# serializer version: 1 +# name: test_entities[default] + list([ + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.7, + 'friendly_name': 'My Home', + 'hvac_modes': list([ + , + , + ]), + 'icon': 'mdi:thermostat', + 'max_temp': 35, + 'min_temp': 7, + 'preset_mode': 'eco', + 'preset_modes': list([ + 'Reset', + 'eco', + 'away', + 'home', + 'Custom', + ]), + 'status': dict({ + 'active_system_faults': list([ + ]), + 'system_id': '3432522', + 'system_mode_status': dict({ + 'is_permanent': True, + 'mode': 'AutoWithEco', + }), + }), + 'supported_features': , + }), + 'context': , + 'entity_id': 'climate.my_home', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }), + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': None, + 'friendly_name': 'Dead Zone', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 17.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-08-14T23:00:00-07:00', + 'next_sp_temp': 18.1, + 'this_sp_from': '2024-08-14T15:00:00-07:00', + 'this_sp_temp': 15.9, + }), + 'temperature_status': dict({ + 'is_available': False, + }), + 'zone_id': '3432521', + }), + 'supported_features': , + 'temperature': 17.0, + }), + 'context': , + 'entity_id': 'climate.dead_zone', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }), + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.0, + 'friendly_name': 'Main Room', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'permanent', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + dict({ + 'faultType': 'TempZoneActuatorCommunicationLost', + 'since': '2022-03-02T15:56:01', + }), + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'PermanentOverride', + 'target_heat_temperature': 17.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-08-14T23:00:00-07:00', + 'next_sp_temp': 18.1, + 'this_sp_from': '2024-08-14T15:00:00-07:00', + 'this_sp_temp': 15.9, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 19.0, + }), + 'zone_id': '3432576', + }), + 'supported_features': , + 'temperature': 17.0, + }), + 'context': , + 'entity_id': 'climate.main_room', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }), + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.0, + 'friendly_name': 'Front Room', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'temporary', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + dict({ + 'faultType': 'TempZoneActuatorLowBattery', + 'since': '2022-03-02T04:50:20', + }), + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'TemporaryOverride', + 'target_heat_temperature': 21.0, + 'until': '2022-03-07T11:00:00-08:00', + }), + 'setpoints': dict({ + 'next_sp_from': '2024-08-14T23:00:00-07:00', + 'next_sp_temp': 18.1, + 'this_sp_from': '2024-08-14T15:00:00-07:00', + 'this_sp_temp': 15.9, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 19.0, + }), + 'zone_id': '3432577', + }), + 'supported_features': , + 'temperature': 21.0, + }), + 'context': , + 'entity_id': 'climate.front_room', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }), + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 20.0, + 'friendly_name': 'Kitchen', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 17.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-08-14T23:00:00-07:00', + 'next_sp_temp': 18.1, + 'this_sp_from': '2024-08-14T15:00:00-07:00', + 'this_sp_temp': 15.9, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 20.0, + }), + 'zone_id': '3432578', + }), + 'supported_features': , + 'temperature': 17.0, + }), + 'context': , + 'entity_id': 'climate.kitchen', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }), + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 20.0, + 'friendly_name': 'Bathroom Dn', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 16.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-08-14T23:00:00-07:00', + 'next_sp_temp': 18.1, + 'this_sp_from': '2024-08-14T15:00:00-07:00', + 'this_sp_temp': 15.9, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 20.0, + }), + 'zone_id': '3432579', + }), + 'supported_features': , + 'temperature': 16.0, + }), + 'context': , + 'entity_id': 'climate.bathroom_dn', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }), + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21.0, + 'friendly_name': 'Main Bedroom', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 16.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-08-14T23:00:00-07:00', + 'next_sp_temp': 18.1, + 'this_sp_from': '2024-08-14T15:00:00-07:00', + 'this_sp_temp': 15.9, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 21.0, + }), + 'zone_id': '3432580', + }), + 'supported_features': , + 'temperature': 16.0, + }), + 'context': , + 'entity_id': 'climate.main_bedroom', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }), + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.5, + 'friendly_name': 'Kids Room', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 17.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-08-14T23:00:00-07:00', + 'next_sp_temp': 18.1, + 'this_sp_from': '2024-08-14T15:00:00-07:00', + 'this_sp_temp': 15.9, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 19.5, + }), + 'zone_id': '3449703', + }), + 'supported_features': , + 'temperature': 17.0, + }), + 'context': , + 'entity_id': 'climate.kids_room', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }), + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'away_mode': 'on', + 'current_temperature': 23, + 'friendly_name': 'Domestic Hot Water', + 'icon': 'mdi:thermometer-lines', + 'max_temp': 60, + 'min_temp': 43, + 'operation_list': list([ + 'auto', + 'on', + 'off', + ]), + 'operation_mode': 'off', + 'status': dict({ + 'active_faults': list([ + ]), + 'dhw_id': '3933910', + 'setpoints': dict({ + 'next_sp_from': '2024-08-14T22:30:00-07:00', + 'next_sp_state': 'On', + 'this_sp_from': '2024-08-14T14:30:00-07:00', + 'this_sp_state': 'Off', + }), + 'state_status': dict({ + 'mode': 'PermanentOverride', + 'state': 'Off', + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 23.0, + }), + }), + 'supported_features': , + 'target_temp_high': None, + 'target_temp_low': None, + 'temperature': None, + }), + 'context': , + 'entity_id': 'water_heater.domestic_hot_water', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }), + ]) +# --- +# name: test_entities[h032585] + list([ + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21.5, + 'friendly_name': 'My Home', + 'hvac_modes': list([ + , + , + ]), + 'icon': 'mdi:thermostat', + 'max_temp': 35, + 'min_temp': 7, + 'status': dict({ + 'active_system_faults': list([ + ]), + 'system_id': '416856', + 'system_mode_status': dict({ + 'is_permanent': True, + 'mode': 'Heat', + }), + }), + 'supported_features': , + }), + 'context': , + 'entity_id': 'climate.my_home', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }), + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21.5, + 'friendly_name': 'THERMOSTAT', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 32.0, + 'min_temp': 4.5, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 21.5, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-08-14T23:00:00-07:00', + 'next_sp_temp': 18.1, + 'this_sp_from': '2024-08-14T15:00:00-07:00', + 'this_sp_temp': 15.9, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 21.5, + }), + 'zone_id': '416856', + }), + 'supported_features': , + 'temperature': 21.5, + }), + 'context': , + 'entity_id': 'climate.thermostat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }), + ]) +# --- +# name: test_entities[h099625] + list([ + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21.5, + 'friendly_name': 'My Home', + 'hvac_modes': list([ + , + , + ]), + 'icon': 'mdi:thermostat', + 'max_temp': 35, + 'min_temp': 7, + 'preset_mode': None, + 'preset_modes': list([ + 'eco', + 'away', + ]), + 'status': dict({ + 'active_system_faults': list([ + ]), + 'system_id': '8557535', + 'system_mode_status': dict({ + 'is_permanent': True, + 'mode': 'Auto', + }), + }), + 'supported_features': , + }), + 'context': , + 'entity_id': 'climate.my_home', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }), + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21.5, + 'friendly_name': 'THERMOSTAT', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 21.5, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-08-14T21:00:00-07:00', + 'next_sp_temp': 18.1, + 'this_sp_from': '2024-08-14T13:00:00-07:00', + 'this_sp_temp': 15.9, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 21.5, + }), + 'zone_id': '8557539', + }), + 'supported_features': , + 'temperature': 21.5, + }), + 'context': , + 'entity_id': 'climate.thermostat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }), + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21.5, + 'friendly_name': 'THERMOSTAT', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 21.5, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-08-14T21:00:00-07:00', + 'next_sp_temp': 18.1, + 'this_sp_from': '2024-08-14T13:00:00-07:00', + 'this_sp_temp': 15.9, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 21.5, + }), + 'zone_id': '8557541', + }), + 'supported_features': , + 'temperature': 21.5, + }), + 'context': , + 'entity_id': 'climate.thermostat_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }), + ]) +# --- +# name: test_entities[h118169] + list([ + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21.5, + 'friendly_name': 'My Home', + 'hvac_modes': list([ + , + , + ]), + 'icon': 'mdi:thermostat', + 'max_temp': 35, + 'min_temp': 7, + 'status': dict({ + 'active_system_faults': list([ + ]), + 'system_id': '333333', + 'system_mode_status': dict({ + 'is_permanent': True, + 'mode': 'Heat', + }), + }), + 'supported_features': , + }), + 'context': , + 'entity_id': 'climate.my_home', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }), + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21.5, + 'friendly_name': 'THERMOSTAT', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 32.0, + 'min_temp': 4.5, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 21.5, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-08-14T23:00:00-07:00', + 'next_sp_temp': 18.1, + 'this_sp_from': '2024-08-14T15:00:00-07:00', + 'this_sp_temp': 15.9, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 21.5, + }), + 'zone_id': '444444', + }), + 'supported_features': , + 'temperature': 21.5, + }), + 'context': , + 'entity_id': 'climate.thermostat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }), + ]) +# --- +# name: test_entities[minimal] + list([ + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.0, + 'friendly_name': 'My Home', + 'hvac_modes': list([ + , + , + ]), + 'icon': 'mdi:thermostat', + 'max_temp': 35, + 'min_temp': 7, + 'preset_mode': 'eco', + 'preset_modes': list([ + 'Reset', + 'eco', + 'away', + 'home', + 'Custom', + ]), + 'status': dict({ + 'active_system_faults': list([ + ]), + 'system_id': '3432522', + 'system_mode_status': dict({ + 'is_permanent': True, + 'mode': 'AutoWithEco', + }), + }), + 'supported_features': , + }), + 'context': , + 'entity_id': 'climate.my_home', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }), + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.0, + 'friendly_name': 'Main Room', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'FollowSchedule', + 'target_heat_temperature': 17.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-08-14T23:00:00-07:00', + 'next_sp_temp': 18.1, + 'this_sp_from': '2024-08-14T15:00:00-07:00', + 'this_sp_temp': 15.9, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 19.0, + }), + 'zone_id': '3432576', + }), + 'supported_features': , + 'temperature': 17.0, + }), + 'context': , + 'entity_id': 'climate.main_room', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }), + ]) +# --- +# name: test_entities[system_004] + list([ + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.5, + 'friendly_name': 'Living room', + 'hvac_modes': list([ + , + , + ]), + 'icon': 'mdi:thermostat', + 'max_temp': 35, + 'min_temp': 7, + 'preset_mode': None, + 'preset_modes': list([ + 'eco', + 'away', + ]), + 'status': dict({ + 'active_system_faults': list([ + ]), + 'system_id': '4187769', + 'system_mode_status': dict({ + 'is_permanent': True, + 'mode': 'Auto', + }), + }), + 'supported_features': , + }), + 'context': , + 'entity_id': 'climate.living_room', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }), + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 19.5, + 'friendly_name': 'Thermostat', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 35.0, + 'min_temp': 5.0, + 'preset_mode': 'permanent', + 'preset_modes': list([ + 'none', + 'temporary', + 'permanent', + ]), + 'status': dict({ + 'active_faults': list([ + ]), + 'setpoint_status': dict({ + 'setpoint_mode': 'PermanentOverride', + 'target_heat_temperature': 15.0, + }), + 'setpoints': dict({ + 'next_sp_from': '2024-08-14T22:00:00-07:00', + 'next_sp_temp': 18.1, + 'this_sp_from': '2024-08-14T14:00:00-07:00', + 'this_sp_temp': 15.9, + }), + 'temperature_status': dict({ + 'is_available': True, + 'temperature': 19.5, + }), + 'zone_id': '4187768', + }), + 'supported_features': , + 'temperature': 15.0, + }), + 'context': , + 'entity_id': 'climate.thermostat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }), + ]) +# --- diff --git a/tests/components/evohome/test_init.py b/tests/components/evohome/test_init.py new file mode 100644 index 00000000000..b717b19e6cd --- /dev/null +++ b/tests/components/evohome/test_init.py @@ -0,0 +1,25 @@ +"""The tests for evohome.""" + +from __future__ import annotations + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from .conftest import setup_evohome +from .const import TEST_INSTALLS + + +@pytest.mark.parametrize("install", TEST_INSTALLS) +async def test_entities( + hass: HomeAssistant, + evo_config: dict[str, str], + install: str, + snapshot: SnapshotAssertion, +) -> None: + """Test entities and state after setup of a Honeywell TCC-compatible system.""" + + await setup_evohome(hass, evo_config, install=install) + + assert hass.states.async_all() == snapshot diff --git a/tests/components/evohome/test_storage.py b/tests/components/evohome/test_storage.py index e87b847a9ff..32cd49a1539 100644 --- a/tests/components/evohome/test_storage.py +++ b/tests/components/evohome/test_storage.py @@ -8,7 +8,6 @@ from typing import Any, Final, NotRequired, TypedDict import pytest from homeassistant.components.evohome import ( - CONF_PASSWORD, CONF_USERNAME, DOMAIN, STORAGE_KEY, @@ -56,11 +55,6 @@ ACCESS_TOKEN_EXP_DTM, ACCESS_TOKEN_EXP_STR = dt_pair(dt_util.now() + timedelta(h USERNAME_DIFF: Final = f"not_{USERNAME}" USERNAME_SAME: Final = USERNAME -TEST_CONFIG: Final = { - CONF_USERNAME: USERNAME_SAME, - CONF_PASSWORD: "password", -} - TEST_DATA: Final[dict[str, _TokenStoreT]] = { "sans_session_id": { SZ_USERNAME: USERNAME_SAME, @@ -93,13 +87,14 @@ DOMAIN_STORAGE_BASE: Final = { async def test_auth_tokens_null( hass: HomeAssistant, hass_storage: dict[str, Any], + evo_config: dict[str, str], idx: str, ) -> None: """Test loading/saving authentication tokens when no cached tokens in the store.""" hass_storage[DOMAIN] = DOMAIN_STORAGE_BASE | {"data": TEST_DATA_NULL[idx]} - mock_client = await setup_evohome(hass, TEST_CONFIG) + mock_client = await setup_evohome(hass, evo_config, install="minimal") # Confirm client was instantiated without tokens, as cache was empty... assert SZ_REFRESH_TOKEN not in mock_client.call_args.kwargs @@ -120,13 +115,16 @@ async def test_auth_tokens_null( @pytest.mark.parametrize("idx", TEST_DATA) async def test_auth_tokens_same( - hass: HomeAssistant, hass_storage: dict[str, Any], idx: str + hass: HomeAssistant, + hass_storage: dict[str, Any], + evo_config: dict[str, str], + idx: str, ) -> None: """Test loading/saving authentication tokens when matching username.""" hass_storage[DOMAIN] = DOMAIN_STORAGE_BASE | {"data": TEST_DATA[idx]} - mock_client = await setup_evohome(hass, TEST_CONFIG) + mock_client = await setup_evohome(hass, evo_config, install="minimal") # Confirm client was instantiated with the cached tokens... assert mock_client.call_args.kwargs[SZ_REFRESH_TOKEN] == REFRESH_TOKEN @@ -146,7 +144,10 @@ async def test_auth_tokens_same( @pytest.mark.parametrize("idx", TEST_DATA) async def test_auth_tokens_past( - hass: HomeAssistant, hass_storage: dict[str, Any], idx: str + hass: HomeAssistant, + hass_storage: dict[str, Any], + evo_config: dict[str, str], + idx: str, ) -> None: """Test loading/saving authentication tokens with matching username, but expired.""" @@ -158,7 +159,7 @@ async def test_auth_tokens_past( hass_storage[DOMAIN] = DOMAIN_STORAGE_BASE | {"data": test_data} - mock_client = await setup_evohome(hass, TEST_CONFIG) + mock_client = await setup_evohome(hass, evo_config, install="minimal") # Confirm client was instantiated with the cached tokens... assert mock_client.call_args.kwargs[SZ_REFRESH_TOKEN] == REFRESH_TOKEN @@ -181,14 +182,17 @@ async def test_auth_tokens_past( @pytest.mark.parametrize("idx", TEST_DATA) async def test_auth_tokens_diff( - hass: HomeAssistant, hass_storage: dict[str, Any], idx: str + hass: HomeAssistant, + hass_storage: dict[str, Any], + evo_config: dict[str, str], + idx: str, ) -> None: """Test loading/saving authentication tokens when unmatched username.""" hass_storage[DOMAIN] = DOMAIN_STORAGE_BASE | {"data": TEST_DATA[idx]} mock_client = await setup_evohome( - hass, TEST_CONFIG | {CONF_USERNAME: USERNAME_DIFF} + hass, evo_config | {CONF_USERNAME: USERNAME_DIFF}, install="minimal" ) # Confirm client was instantiated without tokens, as username was different... From c7cfd56b720be8212af2686ecfa5b8cad6ee299b Mon Sep 17 00:00:00 2001 From: AlCalzone Date: Thu, 29 Aug 2024 00:01:53 +0200 Subject: [PATCH 1199/1635] Support Z-Wave JS dimming lights using color intensity (#122639) * Z-Wave JS: support non-dimmable color lights * remove black_is_off light, support on/off/color * fix: tests for on/off light * fix: typo * remove commented out old test code * add test for off and on * support colored lights without separate brightness control * add test for color-only light * refactor: extract color only light * fix: preserve color when changing brightness * extend tests * refactor again * refactor scale check * refactor: remove impossible check * review feedback * review feedback --------- Co-authored-by: Martin Hjelmare --- .../components/zwave_js/discovery.py | 55 +- homeassistant/components/zwave_js/light.py | 281 +++++-- tests/components/zwave_js/test_light.py | 752 ++++++++++++------ 3 files changed, 736 insertions(+), 352 deletions(-) diff --git a/homeassistant/components/zwave_js/discovery.py b/homeassistant/components/zwave_js/discovery.py index 6e750ee8b2d..6de5a56dc33 100644 --- a/homeassistant/components/zwave_js/discovery.py +++ b/homeassistant/components/zwave_js/discovery.py @@ -238,6 +238,12 @@ SWITCH_BINARY_CURRENT_VALUE_SCHEMA = ZWaveValueDiscoverySchema( command_class={CommandClass.SWITCH_BINARY}, property={CURRENT_VALUE_PROPERTY} ) +COLOR_SWITCH_CURRENT_VALUE_SCHEMA = ZWaveValueDiscoverySchema( + command_class={CommandClass.SWITCH_COLOR}, + property={CURRENT_COLOR_PROPERTY}, + property_key={None}, +) + SIREN_TONE_SCHEMA = ZWaveValueDiscoverySchema( command_class={CommandClass.SOUND_SWITCH}, property={TONE_ID_PROPERTY}, @@ -762,33 +768,6 @@ DISCOVERY_SCHEMAS = [ }, ), ), - # HomeSeer HSM-200 v1 - ZWaveDiscoverySchema( - platform=Platform.LIGHT, - hint="black_is_off", - manufacturer_id={0x001E}, - product_id={0x0001}, - product_type={0x0004}, - primary_value=ZWaveValueDiscoverySchema( - command_class={CommandClass.SWITCH_COLOR}, - property={CURRENT_COLOR_PROPERTY}, - property_key={None}, - ), - absent_values=[SWITCH_MULTILEVEL_CURRENT_VALUE_SCHEMA], - ), - # Logic Group ZDB5100 - ZWaveDiscoverySchema( - platform=Platform.LIGHT, - hint="black_is_off", - manufacturer_id={0x0234}, - product_id={0x0121}, - product_type={0x0003}, - primary_value=ZWaveValueDiscoverySchema( - command_class={CommandClass.SWITCH_COLOR}, - property={CURRENT_COLOR_PROPERTY}, - property_key={None}, - ), - ), # ====== START OF GENERIC MAPPING SCHEMAS ======= # locks # Door Lock CC @@ -1014,10 +993,11 @@ DISCOVERY_SCHEMAS = [ ), entity_category=EntityCategory.CONFIG, ), - # binary switches + # binary switches without color support ZWaveDiscoverySchema( platform=Platform.SWITCH, primary_value=SWITCH_BINARY_CURRENT_VALUE_SCHEMA, + absent_values=[COLOR_SWITCH_CURRENT_VALUE_SCHEMA], ), # switch for Indicator CC ZWaveDiscoverySchema( @@ -1111,6 +1091,25 @@ DISCOVERY_SCHEMAS = [ # catch any device with multilevel CC as light # NOTE: keep this at the bottom of the discovery scheme, # to handle all others that need the multilevel CC first + # + # Colored light (legacy device) that can only be controlled through Color Switch CC. + ZWaveDiscoverySchema( + platform=Platform.LIGHT, + hint="color_onoff", + primary_value=COLOR_SWITCH_CURRENT_VALUE_SCHEMA, + absent_values=[ + SWITCH_BINARY_CURRENT_VALUE_SCHEMA, + SWITCH_MULTILEVEL_CURRENT_VALUE_SCHEMA, + ], + ), + # Colored light that can be turned on or off with the Binary Switch CC. + ZWaveDiscoverySchema( + platform=Platform.LIGHT, + hint="color_onoff", + primary_value=SWITCH_BINARY_CURRENT_VALUE_SCHEMA, + required_values=[COLOR_SWITCH_CURRENT_VALUE_SCHEMA], + ), + # Dimmable light with or without color support. ZWaveDiscoverySchema( platform=Platform.LIGHT, primary_value=SWITCH_MULTILEVEL_CURRENT_VALUE_SCHEMA, diff --git a/homeassistant/components/zwave_js/light.py b/homeassistant/components/zwave_js/light.py index 020f1b66b3d..4a044ca3f52 100644 --- a/homeassistant/components/zwave_js/light.py +++ b/homeassistant/components/zwave_js/light.py @@ -76,8 +76,8 @@ async def async_setup_entry( driver = client.driver assert driver is not None # Driver is ready before platforms are loaded. - if info.platform_hint == "black_is_off": - async_add_entities([ZwaveBlackIsOffLight(config_entry, driver, info)]) + if info.platform_hint == "color_onoff": + async_add_entities([ZwaveColorOnOffLight(config_entry, driver, info)]) else: async_add_entities([ZwaveLight(config_entry, driver, info)]) @@ -111,9 +111,10 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): self._supports_color = False self._supports_rgbw = False self._supports_color_temp = False + self._supports_dimming = False + self._color_mode: str | None = None self._hs_color: tuple[float, float] | None = None self._rgbw_color: tuple[int, int, int, int] | None = None - self._color_mode: str | None = None self._color_temp: int | None = None self._min_mireds = 153 # 6500K as a safe default self._max_mireds = 370 # 2700K as a safe default @@ -129,15 +130,28 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): ) self._supported_color_modes: set[ColorMode] = set() + self._target_brightness: Value | None = None + # get additional (optional) values and set features - # If the command class is Basic, we must geenerate a name that includes - # the command class name to avoid ambiguity - self._target_brightness = self.get_zwave_value( - TARGET_VALUE_PROPERTY, - CommandClass.SWITCH_MULTILEVEL, - add_to_watched_value_ids=False, - ) - if self.info.primary_value.command_class == CommandClass.BASIC: + if self.info.primary_value.command_class == CommandClass.SWITCH_BINARY: + # This light can not be dimmed separately from the color channels + self._target_brightness = self.get_zwave_value( + TARGET_VALUE_PROPERTY, + CommandClass.SWITCH_BINARY, + add_to_watched_value_ids=False, + ) + self._supports_dimming = False + elif self.info.primary_value.command_class == CommandClass.SWITCH_MULTILEVEL: + # This light can be dimmed separately from the color channels + self._target_brightness = self.get_zwave_value( + TARGET_VALUE_PROPERTY, + CommandClass.SWITCH_MULTILEVEL, + add_to_watched_value_ids=False, + ) + self._supports_dimming = True + elif self.info.primary_value.command_class == CommandClass.BASIC: + # If the command class is Basic, we must generate a name that includes + # the command class name to avoid ambiguity self._attr_name = self.generate_name( include_value_name=True, alternate_value_name="Basic" ) @@ -146,6 +160,13 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): CommandClass.BASIC, add_to_watched_value_ids=False, ) + self._supports_dimming = True + + self._current_color = self.get_zwave_value( + CURRENT_COLOR_PROPERTY, + CommandClass.SWITCH_COLOR, + value_property_key=None, + ) self._target_color = self.get_zwave_value( TARGET_COLOR_PROPERTY, CommandClass.SWITCH_COLOR, @@ -216,7 +237,7 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): @property def rgbw_color(self) -> tuple[int, int, int, int] | None: - """Return the hs color.""" + """Return the RGBW color.""" return self._rgbw_color @property @@ -243,11 +264,39 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): """Turn the device on.""" transition = kwargs.get(ATTR_TRANSITION) + brightness = kwargs.get(ATTR_BRIGHTNESS) + + hs_color = kwargs.get(ATTR_HS_COLOR) + color_temp = kwargs.get(ATTR_COLOR_TEMP) + rgbw = kwargs.get(ATTR_RGBW_COLOR) + + new_colors = self._get_new_colors(hs_color, color_temp, rgbw) + if new_colors is not None: + await self._async_set_colors(new_colors, transition) + + # set brightness (or turn on if dimming is not supported) + await self._async_set_brightness(brightness, transition) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the light off.""" + await self._async_set_brightness(0, kwargs.get(ATTR_TRANSITION)) + + def _get_new_colors( + self, + hs_color: tuple[float, float] | None, + color_temp: int | None, + rgbw: tuple[int, int, int, int] | None, + brightness_scale: float | None = None, + ) -> dict[ColorComponent, int] | None: + """Determine the new color dict to set.""" # RGB/HS color - hs_color = kwargs.get(ATTR_HS_COLOR) if hs_color is not None and self._supports_color: red, green, blue = color_util.color_hs_to_RGB(*hs_color) + if brightness_scale is not None: + red = round(red * brightness_scale) + green = round(green * brightness_scale) + blue = round(blue * brightness_scale) colors = { ColorComponent.RED: red, ColorComponent.GREEN: green, @@ -257,10 +306,9 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): # turn of white leds when setting rgb colors[ColorComponent.WARM_WHITE] = 0 colors[ColorComponent.COLD_WHITE] = 0 - await self._async_set_colors(colors, transition) + return colors # Color temperature - color_temp = kwargs.get(ATTR_COLOR_TEMP) if color_temp is not None and self._supports_color_temp: # Limit color temp to min/max values cold = max( @@ -275,20 +323,18 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): ), ) warm = 255 - cold - await self._async_set_colors( - { - # turn off color leds when setting color temperature - ColorComponent.RED: 0, - ColorComponent.GREEN: 0, - ColorComponent.BLUE: 0, - ColorComponent.WARM_WHITE: warm, - ColorComponent.COLD_WHITE: cold, - }, - transition, - ) + colors = { + ColorComponent.WARM_WHITE: warm, + ColorComponent.COLD_WHITE: cold, + } + if self._supports_color: + # turn off color leds when setting color temperature + colors[ColorComponent.RED] = 0 + colors[ColorComponent.GREEN] = 0 + colors[ColorComponent.BLUE] = 0 + return colors # RGBW - rgbw = kwargs.get(ATTR_RGBW_COLOR) if rgbw is not None and self._supports_rgbw: rgbw_channels = { ColorComponent.RED: rgbw[0], @@ -300,17 +346,15 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): if self._cold_white: rgbw_channels[ColorComponent.COLD_WHITE] = rgbw[3] - await self._async_set_colors(rgbw_channels, transition) - # set brightness - await self._async_set_brightness(kwargs.get(ATTR_BRIGHTNESS), transition) + return rgbw_channels - async def async_turn_off(self, **kwargs: Any) -> None: - """Turn the light off.""" - await self._async_set_brightness(0, kwargs.get(ATTR_TRANSITION)) + return None async def _async_set_colors( - self, colors: dict[ColorComponent, int], transition: float | None = None + self, + colors: dict[ColorComponent, int], + transition: float | None = None, ) -> None: """Set (multiple) defined colors to given value(s).""" # prefer the (new) combined color property @@ -361,9 +405,14 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): zwave_transition = {TRANSITION_DURATION_OPTION: "default"} # setting a value requires setting targetValue - await self._async_set_value( - self._target_brightness, zwave_brightness, zwave_transition - ) + if self._supports_dimming: + await self._async_set_value( + self._target_brightness, zwave_brightness, zwave_transition + ) + else: + await self._async_set_value( + self._target_brightness, zwave_brightness > 0, zwave_transition + ) # We do an optimistic state update when setting to a previous value # to avoid waiting for the value to be updated from the device which is # typically delayed and causes a confusing UX. @@ -427,15 +476,8 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): """Calculate light colors.""" (red_val, green_val, blue_val, ww_val, cw_val) = self._get_color_values() - # prefer the (new) combined color property - # https://github.com/zwave-js/node-zwave-js/pull/1782 - combined_color_val = self.get_zwave_value( - CURRENT_COLOR_PROPERTY, - CommandClass.SWITCH_COLOR, - value_property_key=None, - ) - if combined_color_val and isinstance(combined_color_val.value, dict): - multi_color = combined_color_val.value + if self._current_color and isinstance(self._current_color.value, dict): + multi_color = self._current_color.value else: multi_color = {} @@ -486,11 +528,10 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity): self._color_mode = ColorMode.RGBW -class ZwaveBlackIsOffLight(ZwaveLight): - """Representation of a Z-Wave light where setting the color to black turns it off. +class ZwaveColorOnOffLight(ZwaveLight): + """Representation of a colored Z-Wave light with an optional binary switch to turn on/off. - Currently only supports lights with RGB, no color temperature, and no white - channels. + Dimming for RGB lights is realized by scaling the color channels. """ def __init__( @@ -499,61 +540,137 @@ class ZwaveBlackIsOffLight(ZwaveLight): """Initialize the light.""" super().__init__(config_entry, driver, info) - self._last_color: dict[str, int] | None = None - self._supported_color_modes.discard(ColorMode.BRIGHTNESS) + self._last_on_color: dict[ColorComponent, int] | None = None + self._last_brightness: int | None = None @property - def brightness(self) -> int: - """Return the brightness of this light between 0..255.""" - return 255 + def brightness(self) -> int | None: + """Return the brightness of this light between 0..255. - @property - def is_on(self) -> bool | None: - """Return true if device is on (brightness above 0).""" + Z-Wave multilevel switches use a range of [0, 99] to control brightness. + """ if self.info.primary_value.value is None: return None - return any(value != 0 for value in self.info.primary_value.value.values()) + if self._target_brightness and self.info.primary_value.value is False: + # Binary switch exists and is turned off + return 0 + + # Brightness is encoded in the color channels by scaling them lower than 255 + color_values = [ + v.value + for v in self._get_color_values() + if v is not None and v.value is not None + ] + return max(color_values) if color_values else 0 async def async_turn_on(self, **kwargs: Any) -> None: """Turn the device on.""" + if ( kwargs.get(ATTR_RGBW_COLOR) is not None or kwargs.get(ATTR_COLOR_TEMP) is not None - or kwargs.get(ATTR_HS_COLOR) is not None ): + # RGBW and color temp are not supported in this mode, + # delegate to the parent class await super().async_turn_on(**kwargs) return transition = kwargs.get(ATTR_TRANSITION) - # turn on light to last color if known, otherwise set to white - if self._last_color is not None: - await self._async_set_colors( - { - ColorComponent.RED: self._last_color["red"], - ColorComponent.GREEN: self._last_color["green"], - ColorComponent.BLUE: self._last_color["blue"], - }, - transition, - ) - else: - await self._async_set_colors( - { + brightness = kwargs.get(ATTR_BRIGHTNESS) + hs_color = kwargs.get(ATTR_HS_COLOR) + new_colors: dict[ColorComponent, int] | None = None + scale: float | None = None + + if brightness is None and hs_color is None: + # Turned on without specifying brightness or color + if self._last_on_color is not None: + if self._target_brightness: + # Color is already set, use the binary switch to turn on + await self._async_set_brightness(None, transition) + return + + # Preserve the previous color + new_colors = self._last_on_color + elif self._supports_color: + # Turned on for the first time. Make it white + new_colors = { ColorComponent.RED: 255, ColorComponent.GREEN: 255, ColorComponent.BLUE: 255, - }, - transition, + } + elif brightness is not None: + # If brightness gets set, preserve the color and mix it with the new brightness + if self.color_mode == ColorMode.HS: + scale = brightness / 255 + if ( + self._last_on_color is not None + and None not in self._last_on_color.values() + ): + # Changed brightness from 0 to >0 + old_brightness = max(self._last_on_color.values()) + new_scale = brightness / old_brightness + scale = new_scale + new_colors = {} + for color, value in self._last_on_color.items(): + new_colors[color] = round(value * new_scale) + elif hs_color is None and self._color_mode == ColorMode.HS: + hs_color = self._hs_color + elif hs_color is not None and brightness is None: + # Turned on by using the color controls + current_brightness = self.brightness + if current_brightness == 0 and self._last_brightness is not None: + # Use the last brightness value if the light is currently off + scale = self._last_brightness / 255 + elif current_brightness is not None: + scale = current_brightness / 255 + + # Reset last color until turning off again + self._last_on_color = None + + if new_colors is None: + new_colors = self._get_new_colors( + hs_color=hs_color, color_temp=None, rgbw=None, brightness_scale=scale ) + if new_colors is not None: + await self._async_set_colors(new_colors, transition) + + # Turn the binary switch on if there is one + await self._async_set_brightness(brightness, transition) + async def async_turn_off(self, **kwargs: Any) -> None: """Turn the light off.""" - self._last_color = self.info.primary_value.value - await self._async_set_colors( - { + + # Remember last color and brightness to restore it when turning on + self._last_brightness = self.brightness + if self._current_color and isinstance(self._current_color.value, dict): + red = self._current_color.value.get(COLOR_SWITCH_COMBINED_RED) + green = self._current_color.value.get(COLOR_SWITCH_COMBINED_GREEN) + blue = self._current_color.value.get(COLOR_SWITCH_COMBINED_BLUE) + + last_color: dict[ColorComponent, int] = {} + if red is not None: + last_color[ColorComponent.RED] = red + if green is not None: + last_color[ColorComponent.GREEN] = green + if blue is not None: + last_color[ColorComponent.BLUE] = blue + + if last_color: + self._last_on_color = last_color + + if self._target_brightness: + # Turn off the binary switch only + await self._async_set_brightness(0, kwargs.get(ATTR_TRANSITION)) + else: + # turn off all color channels + colors = { ColorComponent.RED: 0, ColorComponent.GREEN: 0, ColorComponent.BLUE: 0, - }, - kwargs.get(ATTR_TRANSITION), - ) - await self._async_set_brightness(0, kwargs.get(ATTR_TRANSITION)) + } + + await self._async_set_colors( + colors, + kwargs.get(ATTR_TRANSITION), + ) diff --git a/tests/components/zwave_js/test_light.py b/tests/components/zwave_js/test_light.py index 376bd700a2a..4c725c6dc29 100644 --- a/tests/components/zwave_js/test_light.py +++ b/tests/components/zwave_js/test_light.py @@ -8,6 +8,7 @@ from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_MODE, ATTR_COLOR_TEMP, + ATTR_HS_COLOR, ATTR_MAX_MIREDS, ATTR_MIN_MIREDS, ATTR_RGB_COLOR, @@ -37,8 +38,8 @@ from .common import ( ZEN_31_ENTITY, ) -HSM200_V1_ENTITY = "light.hsm200" ZDB5100_ENTITY = "light.matrix_office" +HSM200_V1_ENTITY = "light.hsm200" async def test_light( @@ -510,14 +511,388 @@ async def test_light_none_color_value( assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == ["hs"] -async def test_black_is_off( +async def test_light_on_off_color( + hass: HomeAssistant, client, logic_group_zdb5100, integration +) -> None: + """Test the light entity for RGB lights without dimming support.""" + node = logic_group_zdb5100 + state = hass.states.get(ZDB5100_ENTITY) + assert state.state == STATE_OFF + + async def update_color(red: int, green: int, blue: int) -> None: + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": node.node_id, + "args": { + "commandClassName": "Color Switch", + "commandClass": 51, + "endpoint": 1, + "property": "currentColor", + "propertyKey": 2, # red + "newValue": red, + "prevValue": None, + "propertyName": "currentColor", + "propertyKeyName": "red", + }, + }, + ) + node.receive_event(event) + await hass.async_block_till_done() + + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": node.node_id, + "args": { + "commandClassName": "Color Switch", + "commandClass": 51, + "endpoint": 1, + "property": "currentColor", + "propertyKey": 3, # green + "newValue": green, + "prevValue": None, + "propertyName": "currentColor", + "propertyKeyName": "green", + }, + }, + ) + node.receive_event(event) + await hass.async_block_till_done() + + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": node.node_id, + "args": { + "commandClassName": "Color Switch", + "commandClass": 51, + "endpoint": 1, + "property": "currentColor", + "propertyKey": 4, # blue + "newValue": blue, + "prevValue": None, + "propertyName": "currentColor", + "propertyKeyName": "blue", + }, + }, + ) + node.receive_event(event) + await hass.async_block_till_done() + + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": node.node_id, + "args": { + "commandClassName": "Color Switch", + "commandClass": 51, + "endpoint": 1, + "property": "currentColor", + "newValue": { + "red": red, + "green": green, + "blue": blue, + }, + "prevValue": None, + "propertyName": "currentColor", + }, + }, + ) + node.receive_event(event) + await hass.async_block_till_done() + + async def update_switch_state(state: bool) -> None: + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": node.node_id, + "args": { + "commandClassName": "Binary Switch", + "commandClass": 37, + "endpoint": 1, + "property": "currentValue", + "newValue": state, + "prevValue": None, + "propertyName": "currentValue", + }, + }, + ) + node.receive_event(event) + await hass.async_block_till_done() + + # Turn on the light. Since this is the first call, the light should default to white + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: ZDB5100_ENTITY}, + blocking=True, + ) + assert len(client.async_send_command.call_args_list) == 2 + args = client.async_send_command.call_args_list[0][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 51, + "endpoint": 1, + "property": "targetColor", + } + assert args["value"] == { + "red": 255, + "green": 255, + "blue": 255, + } + + args = client.async_send_command.call_args_list[1][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 37, + "endpoint": 1, + "property": "targetValue", + } + assert args["value"] is True + + # Force the light to turn off + await update_switch_state(False) + + state = hass.states.get(ZDB5100_ENTITY) + assert state.state == STATE_OFF + + # Force the light to turn on (green) + await update_color(0, 255, 0) + await update_switch_state(True) + + state = hass.states.get(ZDB5100_ENTITY) + assert state.state == STATE_ON + + client.async_send_command.reset_mock() + + # Set the brightness to 128. This should be encoded in the color value + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: ZDB5100_ENTITY, ATTR_BRIGHTNESS: 128}, + blocking=True, + ) + assert len(client.async_send_command.call_args_list) == 2 + args = client.async_send_command.call_args_list[0][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 51, + "endpoint": 1, + "property": "targetColor", + } + assert args["value"] == { + "red": 0, + "green": 128, + "blue": 0, + } + + args = client.async_send_command.call_args_list[1][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 37, + "endpoint": 1, + "property": "targetValue", + } + assert args["value"] is True + + client.async_send_command.reset_mock() + + # Force the light to turn on (green, 50%) + await update_color(0, 128, 0) + + # Set the color to red. This should preserve the previous brightness value + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: ZDB5100_ENTITY, ATTR_HS_COLOR: (0, 100)}, + blocking=True, + ) + assert len(client.async_send_command.call_args_list) == 2 + args = client.async_send_command.call_args_list[0][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 51, + "endpoint": 1, + "property": "targetColor", + } + assert args["value"] == { + "red": 128, + "green": 0, + "blue": 0, + } + + args = client.async_send_command.call_args_list[1][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 37, + "endpoint": 1, + "property": "targetValue", + } + assert args["value"] is True + + client.async_send_command.reset_mock() + + # Force the light to turn on (red, 50%) + await update_color(128, 0, 0) + + # Turn the device off. This should only affect the binary switch, not the color + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: ZDB5100_ENTITY}, + blocking=True, + ) + assert len(client.async_send_command.call_args_list) == 1 + args = client.async_send_command.call_args_list[0][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 37, + "endpoint": 1, + "property": "targetValue", + } + assert args["value"] is False + + client.async_send_command.reset_mock() + + # Force the light to turn off + await update_switch_state(False) + + # Turn the device on again. This should only affect the binary switch, not the color + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: ZDB5100_ENTITY}, + blocking=True, + ) + assert len(client.async_send_command.call_args_list) == 1 + args = client.async_send_command.call_args_list[0][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 37, + "endpoint": 1, + "property": "targetValue", + } + assert args["value"] is True + + +async def test_light_color_only( hass: HomeAssistant, client, express_controls_ezmultipli, integration ) -> None: - """Test the black is off light entity.""" + """Test the light entity for RGB lights with Color Switch CC only.""" node = express_controls_ezmultipli state = hass.states.get(HSM200_V1_ENTITY) assert state.state == STATE_ON + async def update_color(red: int, green: int, blue: int) -> None: + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": node.node_id, + "args": { + "commandClassName": "Color Switch", + "commandClass": 51, + "endpoint": 0, + "property": "currentColor", + "propertyKey": 2, # red + "newValue": red, + "prevValue": None, + "propertyName": "currentColor", + "propertyKeyName": "red", + }, + }, + ) + node.receive_event(event) + await hass.async_block_till_done() + + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": node.node_id, + "args": { + "commandClassName": "Color Switch", + "commandClass": 51, + "endpoint": 0, + "property": "currentColor", + "propertyKey": 3, # green + "newValue": green, + "prevValue": None, + "propertyName": "currentColor", + "propertyKeyName": "green", + }, + }, + ) + node.receive_event(event) + await hass.async_block_till_done() + + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": node.node_id, + "args": { + "commandClassName": "Color Switch", + "commandClass": 51, + "endpoint": 0, + "property": "currentColor", + "propertyKey": 4, # blue + "newValue": blue, + "prevValue": None, + "propertyName": "currentColor", + "propertyKeyName": "blue", + }, + }, + ) + node.receive_event(event) + await hass.async_block_till_done() + + event = Event( + type="value updated", + data={ + "source": "node", + "event": "value updated", + "nodeId": node.node_id, + "args": { + "commandClassName": "Color Switch", + "commandClass": 51, + "endpoint": 0, + "property": "currentColor", + "newValue": { + "red": red, + "green": green, + "blue": blue, + }, + "prevValue": None, + "propertyName": "currentColor", + }, + }, + ) + node.receive_event(event) + await hass.async_block_till_done() + # Attempt to turn on the light and ensure it defaults to white await hass.services.async_call( LIGHT_DOMAIN, @@ -539,64 +914,14 @@ async def test_black_is_off( client.async_send_command.reset_mock() # Force the light to turn off - event = Event( - type="value updated", - data={ - "source": "node", - "event": "value updated", - "nodeId": node.node_id, - "args": { - "commandClassName": "Color Switch", - "commandClass": 51, - "endpoint": 0, - "property": "currentColor", - "newValue": { - "red": 0, - "green": 0, - "blue": 0, - }, - "prevValue": { - "red": 0, - "green": 255, - "blue": 0, - }, - "propertyName": "currentColor", - }, - }, - ) - node.receive_event(event) - await hass.async_block_till_done() + await update_color(0, 0, 0) + state = hass.states.get(HSM200_V1_ENTITY) assert state.state == STATE_OFF - # Force the light to turn on - event = Event( - type="value updated", - data={ - "source": "node", - "event": "value updated", - "nodeId": node.node_id, - "args": { - "commandClassName": "Color Switch", - "commandClass": 51, - "endpoint": 0, - "property": "currentColor", - "newValue": { - "red": 0, - "green": 255, - "blue": 0, - }, - "prevValue": { - "red": 0, - "green": 0, - "blue": 0, - }, - "propertyName": "currentColor", - }, - }, - ) - node.receive_event(event) - await hass.async_block_till_done() + # Force the light to turn on (50% green) + await update_color(0, 128, 0) + state = hass.states.get(HSM200_V1_ENTITY) assert state.state == STATE_ON @@ -619,6 +944,9 @@ async def test_black_is_off( client.async_send_command.reset_mock() + # Force the light to turn off + await update_color(0, 0, 0) + # Assert that the last color is restored await hass.services.async_call( LIGHT_DOMAIN, @@ -635,11 +963,131 @@ async def test_black_is_off( "endpoint": 0, "property": "targetColor", } - assert args["value"] == {"red": 0, "green": 255, "blue": 0} + assert args["value"] == {"red": 0, "green": 128, "blue": 0} client.async_send_command.reset_mock() - # Force the light to turn on + # Force the light to turn on (50% green) + await update_color(0, 128, 0) + + state = hass.states.get(HSM200_V1_ENTITY) + assert state.state == STATE_ON + + client.async_send_command.reset_mock() + + # Assert that the brightness is preserved when changing colors + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: HSM200_V1_ENTITY, ATTR_RGB_COLOR: (255, 0, 0)}, + blocking=True, + ) + assert len(client.async_send_command.call_args_list) == 1 + args = client.async_send_command.call_args_list[0][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 51, + "endpoint": 0, + "property": "targetColor", + } + assert args["value"] == {"red": 128, "green": 0, "blue": 0} + + client.async_send_command.reset_mock() + + # Force the light to turn on (50% red) + await update_color(128, 0, 0) + + state = hass.states.get(HSM200_V1_ENTITY) + assert state.state == STATE_ON + + # Assert that the color is preserved when changing brightness + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: HSM200_V1_ENTITY, ATTR_BRIGHTNESS: 69}, + blocking=True, + ) + assert len(client.async_send_command.call_args_list) == 1 + args = client.async_send_command.call_args_list[0][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 51, + "endpoint": 0, + "property": "targetColor", + } + assert args["value"] == {"red": 69, "green": 0, "blue": 0} + + client.async_send_command.reset_mock() + + await update_color(69, 0, 0) + + # Turn off again + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: HSM200_V1_ENTITY}, + blocking=True, + ) + await update_color(0, 0, 0) + + client.async_send_command.reset_mock() + + # Assert that the color is preserved when turning on with brightness + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: HSM200_V1_ENTITY, ATTR_BRIGHTNESS: 123}, + blocking=True, + ) + assert len(client.async_send_command.call_args_list) == 1 + args = client.async_send_command.call_args_list[0][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 51, + "endpoint": 0, + "property": "targetColor", + } + assert args["value"] == {"red": 123, "green": 0, "blue": 0} + + client.async_send_command.reset_mock() + + await update_color(123, 0, 0) + + # Turn off again + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: HSM200_V1_ENTITY}, + blocking=True, + ) + await update_color(0, 0, 0) + + client.async_send_command.reset_mock() + + # Assert that the brightness is preserved when turning on with color + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: HSM200_V1_ENTITY, ATTR_HS_COLOR: (240, 100)}, + blocking=True, + ) + assert len(client.async_send_command.call_args_list) == 1 + args = client.async_send_command.call_args_list[0][0][0] + assert args["command"] == "node.set_value" + assert args["nodeId"] == node.node_id + assert args["valueId"] == { + "commandClass": 51, + "endpoint": 0, + "property": "targetColor", + } + assert args["value"] == {"red": 0, "green": 0, "blue": 123} + + client.async_send_command.reset_mock() + + # Clear the color value to trigger an unknown state event = Event( type="value updated", data={ @@ -652,17 +1100,14 @@ async def test_black_is_off( "endpoint": 0, "property": "currentColor", "newValue": None, - "prevValue": { - "red": 0, - "green": 255, - "blue": 0, - }, + "prevValue": None, "propertyName": "currentColor", }, }, ) node.receive_event(event) await hass.async_block_till_done() + state = hass.states.get(HSM200_V1_ENTITY) assert state.state == STATE_UNKNOWN @@ -687,183 +1132,6 @@ async def test_black_is_off( assert args["value"] == {"red": 255, "green": 76, "blue": 255} -async def test_black_is_off_zdb5100( - hass: HomeAssistant, client, logic_group_zdb5100, integration -) -> None: - """Test the black is off light entity.""" - node = logic_group_zdb5100 - state = hass.states.get(ZDB5100_ENTITY) - assert state.state == STATE_OFF - - # Attempt to turn on the light and ensure it defaults to white - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ZDB5100_ENTITY}, - blocking=True, - ) - assert len(client.async_send_command.call_args_list) == 1 - args = client.async_send_command.call_args_list[0][0][0] - assert args["command"] == "node.set_value" - assert args["nodeId"] == node.node_id - assert args["valueId"] == { - "commandClass": 51, - "endpoint": 1, - "property": "targetColor", - } - assert args["value"] == {"red": 255, "green": 255, "blue": 255} - - client.async_send_command.reset_mock() - - # Force the light to turn off - event = Event( - type="value updated", - data={ - "source": "node", - "event": "value updated", - "nodeId": node.node_id, - "args": { - "commandClassName": "Color Switch", - "commandClass": 51, - "endpoint": 1, - "property": "currentColor", - "newValue": { - "red": 0, - "green": 0, - "blue": 0, - }, - "prevValue": { - "red": 0, - "green": 255, - "blue": 0, - }, - "propertyName": "currentColor", - }, - }, - ) - node.receive_event(event) - await hass.async_block_till_done() - state = hass.states.get(ZDB5100_ENTITY) - assert state.state == STATE_OFF - - # Force the light to turn on - event = Event( - type="value updated", - data={ - "source": "node", - "event": "value updated", - "nodeId": node.node_id, - "args": { - "commandClassName": "Color Switch", - "commandClass": 51, - "endpoint": 1, - "property": "currentColor", - "newValue": { - "red": 0, - "green": 255, - "blue": 0, - }, - "prevValue": { - "red": 0, - "green": 0, - "blue": 0, - }, - "propertyName": "currentColor", - }, - }, - ) - node.receive_event(event) - await hass.async_block_till_done() - state = hass.states.get(ZDB5100_ENTITY) - assert state.state == STATE_ON - - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: ZDB5100_ENTITY}, - blocking=True, - ) - assert len(client.async_send_command.call_args_list) == 1 - args = client.async_send_command.call_args_list[0][0][0] - assert args["command"] == "node.set_value" - assert args["nodeId"] == node.node_id - assert args["valueId"] == { - "commandClass": 51, - "endpoint": 1, - "property": "targetColor", - } - assert args["value"] == {"red": 0, "green": 0, "blue": 0} - - client.async_send_command.reset_mock() - - # Assert that the last color is restored - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ZDB5100_ENTITY}, - blocking=True, - ) - assert len(client.async_send_command.call_args_list) == 1 - args = client.async_send_command.call_args_list[0][0][0] - assert args["command"] == "node.set_value" - assert args["nodeId"] == node.node_id - assert args["valueId"] == { - "commandClass": 51, - "endpoint": 1, - "property": "targetColor", - } - assert args["value"] == {"red": 0, "green": 255, "blue": 0} - - client.async_send_command.reset_mock() - - # Force the light to turn on - event = Event( - type="value updated", - data={ - "source": "node", - "event": "value updated", - "nodeId": node.node_id, - "args": { - "commandClassName": "Color Switch", - "commandClass": 51, - "endpoint": 1, - "property": "currentColor", - "newValue": None, - "prevValue": { - "red": 0, - "green": 255, - "blue": 0, - }, - "propertyName": "currentColor", - }, - }, - ) - node.receive_event(event) - await hass.async_block_till_done() - state = hass.states.get(ZDB5100_ENTITY) - assert state.state == STATE_UNKNOWN - - client.async_send_command.reset_mock() - - # Assert that call fails if attribute is added to service call - await hass.services.async_call( - LIGHT_DOMAIN, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: ZDB5100_ENTITY, ATTR_RGBW_COLOR: (255, 76, 255, 0)}, - blocking=True, - ) - assert len(client.async_send_command.call_args_list) == 1 - args = client.async_send_command.call_args_list[0][0][0] - assert args["command"] == "node.set_value" - assert args["nodeId"] == node.node_id - assert args["valueId"] == { - "commandClass": 51, - "endpoint": 1, - "property": "targetColor", - } - assert args["value"] == {"red": 255, "green": 76, "blue": 255} - - async def test_basic_cc_light( hass: HomeAssistant, entity_registry: er.EntityRegistry, From 5f810d908f61b06d72e306b4416e5dff7ec63860 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 28 Aug 2024 12:28:41 -1000 Subject: [PATCH 1200/1635] Add missing dependencies to yale (#124821) * Add missing dependencies to yale * try another way * Revert "try another way" This reverts commit fbb731a33491bf51290fd98acde7b532ea39fb88. * patch out cloud setup --- homeassistant/components/yale/manifest.json | 1 + tests/components/yale/conftest.py | 13 +++++++++++++ 2 files changed, 14 insertions(+) diff --git a/homeassistant/components/yale/manifest.json b/homeassistant/components/yale/manifest.json index d6da9ba3993..115036b96d5 100644 --- a/homeassistant/components/yale/manifest.json +++ b/homeassistant/components/yale/manifest.json @@ -3,6 +3,7 @@ "name": "Yale", "codeowners": ["@bdraco"], "config_flow": true, + "dependencies": ["application_credentials", "cloud"], "dhcp": [ { "hostname": "yale-connect-plus", diff --git a/tests/components/yale/conftest.py b/tests/components/yale/conftest.py index c890087ad12..3e633430846 100644 --- a/tests/components/yale/conftest.py +++ b/tests/components/yale/conftest.py @@ -57,3 +57,16 @@ def load_reauth_jwt_wrong_account_fixture() -> str: async def mock_client_credentials_fixture(hass: HomeAssistant) -> None: """Mock client credentials.""" await mock_client_credentials(hass) + + +@pytest.fixture(name="skip_cloud", autouse=True) +def skip_cloud_fixture(): + """Skip setting up cloud. + + Cloud already has its own tests for account link. + + We do not need to test it here as we only need to test our + usage of the oauth2 helpers. + """ + with patch("homeassistant.components.cloud.async_setup", return_value=True): + yield From 3d39f6ce88487998ffb5fb22bcfdd53ddc79c66a Mon Sep 17 00:00:00 2001 From: David Bonnes Date: Wed, 28 Aug 2024 23:34:20 +0100 Subject: [PATCH 1201/1635] Fix evohome test by setting datetime to match snapshot (#124824) * initial commit * freeze time instead * use fixture instead of API --- .../evohome/snapshots/test_init.ambr | 104 +++++++++--------- tests/components/evohome/test_init.py | 5 + 2 files changed, 57 insertions(+), 52 deletions(-) diff --git a/tests/components/evohome/snapshots/test_init.ambr b/tests/components/evohome/snapshots/test_init.ambr index 210c45354a5..8e5338ecb9b 100644 --- a/tests/components/evohome/snapshots/test_init.ambr +++ b/tests/components/evohome/snapshots/test_init.ambr @@ -62,10 +62,10 @@ 'target_heat_temperature': 17.0, }), 'setpoints': dict({ - 'next_sp_from': '2024-08-14T23:00:00-07:00', - 'next_sp_temp': 18.1, - 'this_sp_from': '2024-08-14T15:00:00-07:00', - 'this_sp_temp': 15.9, + 'next_sp_from': '2024-07-10T14:10:00-07:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T00:00:00-07:00', + 'this_sp_temp': 16.0, }), 'temperature_status': dict({ 'is_available': False, @@ -110,10 +110,10 @@ 'target_heat_temperature': 17.0, }), 'setpoints': dict({ - 'next_sp_from': '2024-08-14T23:00:00-07:00', - 'next_sp_temp': 18.1, - 'this_sp_from': '2024-08-14T15:00:00-07:00', - 'this_sp_temp': 15.9, + 'next_sp_from': '2024-07-10T14:10:00-07:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T00:00:00-07:00', + 'this_sp_temp': 16.0, }), 'temperature_status': dict({ 'is_available': True, @@ -160,10 +160,10 @@ 'until': '2022-03-07T11:00:00-08:00', }), 'setpoints': dict({ - 'next_sp_from': '2024-08-14T23:00:00-07:00', - 'next_sp_temp': 18.1, - 'this_sp_from': '2024-08-14T15:00:00-07:00', - 'this_sp_temp': 15.9, + 'next_sp_from': '2024-07-10T14:10:00-07:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T00:00:00-07:00', + 'this_sp_temp': 16.0, }), 'temperature_status': dict({ 'is_available': True, @@ -205,10 +205,10 @@ 'target_heat_temperature': 17.0, }), 'setpoints': dict({ - 'next_sp_from': '2024-08-14T23:00:00-07:00', - 'next_sp_temp': 18.1, - 'this_sp_from': '2024-08-14T15:00:00-07:00', - 'this_sp_temp': 15.9, + 'next_sp_from': '2024-07-10T14:10:00-07:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T00:00:00-07:00', + 'this_sp_temp': 16.0, }), 'temperature_status': dict({ 'is_available': True, @@ -250,10 +250,10 @@ 'target_heat_temperature': 16.0, }), 'setpoints': dict({ - 'next_sp_from': '2024-08-14T23:00:00-07:00', - 'next_sp_temp': 18.1, - 'this_sp_from': '2024-08-14T15:00:00-07:00', - 'this_sp_temp': 15.9, + 'next_sp_from': '2024-07-10T14:10:00-07:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T00:00:00-07:00', + 'this_sp_temp': 16.0, }), 'temperature_status': dict({ 'is_available': True, @@ -295,10 +295,10 @@ 'target_heat_temperature': 16.0, }), 'setpoints': dict({ - 'next_sp_from': '2024-08-14T23:00:00-07:00', - 'next_sp_temp': 18.1, - 'this_sp_from': '2024-08-14T15:00:00-07:00', - 'this_sp_temp': 15.9, + 'next_sp_from': '2024-07-10T14:10:00-07:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T00:00:00-07:00', + 'this_sp_temp': 16.0, }), 'temperature_status': dict({ 'is_available': True, @@ -340,10 +340,10 @@ 'target_heat_temperature': 17.0, }), 'setpoints': dict({ - 'next_sp_from': '2024-08-14T23:00:00-07:00', - 'next_sp_temp': 18.1, - 'this_sp_from': '2024-08-14T15:00:00-07:00', - 'this_sp_temp': 15.9, + 'next_sp_from': '2024-07-10T14:10:00-07:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T00:00:00-07:00', + 'this_sp_temp': 16.0, }), 'temperature_status': dict({ 'is_available': True, @@ -380,10 +380,10 @@ ]), 'dhw_id': '3933910', 'setpoints': dict({ - 'next_sp_from': '2024-08-14T22:30:00-07:00', - 'next_sp_state': 'On', - 'this_sp_from': '2024-08-14T14:30:00-07:00', - 'this_sp_state': 'Off', + 'next_sp_from': '2024-07-10T05:00:00-07:00', + 'next_sp_state': 'Off', + 'this_sp_from': '2024-07-10T04:00:00-07:00', + 'this_sp_state': 'On', }), 'state_status': dict({ 'mode': 'PermanentOverride', @@ -463,10 +463,10 @@ 'target_heat_temperature': 21.5, }), 'setpoints': dict({ - 'next_sp_from': '2024-08-14T23:00:00-07:00', - 'next_sp_temp': 18.1, - 'this_sp_from': '2024-08-14T15:00:00-07:00', - 'this_sp_temp': 15.9, + 'next_sp_from': '2024-07-10T14:10:00-07:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T00:00:00-07:00', + 'this_sp_temp': 16.0, }), 'temperature_status': dict({ 'is_available': True, @@ -546,10 +546,10 @@ 'target_heat_temperature': 21.5, }), 'setpoints': dict({ - 'next_sp_from': '2024-08-14T21:00:00-07:00', - 'next_sp_temp': 18.1, - 'this_sp_from': '2024-08-14T13:00:00-07:00', - 'this_sp_temp': 15.9, + 'next_sp_from': '2024-07-10T12:10:00-07:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-09T22:00:00-07:00', + 'this_sp_temp': 16.0, }), 'temperature_status': dict({ 'is_available': True, @@ -591,10 +591,10 @@ 'target_heat_temperature': 21.5, }), 'setpoints': dict({ - 'next_sp_from': '2024-08-14T21:00:00-07:00', - 'next_sp_temp': 18.1, - 'this_sp_from': '2024-08-14T13:00:00-07:00', - 'this_sp_temp': 15.9, + 'next_sp_from': '2024-07-10T12:10:00-07:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-09T22:00:00-07:00', + 'this_sp_temp': 16.0, }), 'temperature_status': dict({ 'is_available': True, @@ -755,10 +755,10 @@ 'target_heat_temperature': 17.0, }), 'setpoints': dict({ - 'next_sp_from': '2024-08-14T23:00:00-07:00', - 'next_sp_temp': 18.1, - 'this_sp_from': '2024-08-14T15:00:00-07:00', - 'this_sp_temp': 15.9, + 'next_sp_from': '2024-07-10T14:10:00-07:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-10T00:00:00-07:00', + 'this_sp_temp': 16.0, }), 'temperature_status': dict({ 'is_available': True, @@ -838,10 +838,10 @@ 'target_heat_temperature': 15.0, }), 'setpoints': dict({ - 'next_sp_from': '2024-08-14T22:00:00-07:00', - 'next_sp_temp': 18.1, - 'this_sp_from': '2024-08-14T14:00:00-07:00', - 'this_sp_temp': 15.9, + 'next_sp_from': '2024-07-10T13:10:00-07:00', + 'next_sp_temp': 18.6, + 'this_sp_from': '2024-07-09T23:00:00-07:00', + 'this_sp_temp': 16.0, }), 'temperature_status': dict({ 'is_available': True, diff --git a/tests/components/evohome/test_init.py b/tests/components/evohome/test_init.py index b717b19e6cd..ad688d04882 100644 --- a/tests/components/evohome/test_init.py +++ b/tests/components/evohome/test_init.py @@ -2,6 +2,7 @@ from __future__ import annotations +from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion @@ -17,9 +18,13 @@ async def test_entities( evo_config: dict[str, str], install: str, snapshot: SnapshotAssertion, + freezer: FrozenDateTimeFactory, ) -> None: """Test entities and state after setup of a Honeywell TCC-compatible system.""" + # some extended state attrs are relative the current time + freezer.move_to("2024-07-10 12:00:00+00:00") + await setup_evohome(hass, evo_config, install=install) assert hass.states.async_all() == snapshot From 4b59ef4733b8c4e219914b89e4b7df76fc7098ce Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 28 Aug 2024 15:47:11 -1000 Subject: [PATCH 1202/1635] Set GoogleEntity entity_id in constructor (#124830) --- homeassistant/components/google_assistant/helpers.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/google_assistant/helpers.py b/homeassistant/components/google_assistant/helpers.py index 7f8f7a68ffa..76869487ee3 100644 --- a/homeassistant/components/google_assistant/helpers.py +++ b/homeassistant/components/google_assistant/helpers.py @@ -521,7 +521,7 @@ def supported_traits_for_state(state: State) -> list[type[trait._Trait]]: class GoogleEntity: """Adaptation of Entity expressed in Google's terms.""" - __slots__ = ("hass", "config", "state", "_traits") + __slots__ = ("hass", "config", "state", "entity_id", "_traits") def __init__( self, hass: HomeAssistant, config: AbstractConfig, state: State @@ -530,17 +530,13 @@ class GoogleEntity: self.hass = hass self.config = config self.state = state + self.entity_id = state.entity_id self._traits: list[trait._Trait] | None = None def __repr__(self) -> str: """Return the representation.""" return f"" - @property - def entity_id(self): - """Return entity ID.""" - return self.state.entity_id - @callback def traits(self) -> list[trait._Trait]: """Return traits for entity.""" From 7f4fca63ed78f1cdcbf7e70f08a73e0cb9d7c353 Mon Sep 17 00:00:00 2001 From: Jeremy Cook <8317651+jm-cook@users.noreply.github.com> Date: Thu, 29 Aug 2024 07:49:05 +0200 Subject: [PATCH 1203/1635] SmartThings edge driver for heatit thermostats does not require cooling setpoint (#123188) * remove cooling setpoint requirement for thermostats. Air conditioning remains unchanged * remove cooling setpoint requirement for thermostats. Air conditioning remains unchanged * versions should not be set on core integrations. * Added tests for minimal smartthings thermostat with no cooling. * Added tests for minimal smartthings thermostat with no cooling. * Formatted tests with ruff format --- .../components/smartthings/climate.py | 1 - tests/components/smartthings/test_climate.py | 42 +++++++++++++++++++ 2 files changed, 42 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/smartthings/climate.py b/homeassistant/components/smartthings/climate.py index c3929ababc1..0598e549f24 100644 --- a/homeassistant/components/smartthings/climate.py +++ b/homeassistant/components/smartthings/climate.py @@ -143,7 +143,6 @@ def get_capabilities(capabilities: Sequence[str]) -> Sequence[str] | None: # Or must have all of these thermostat capabilities thermostat_capabilities = [ Capability.temperature_measurement, - Capability.thermostat_cooling_setpoint, Capability.thermostat_heating_setpoint, Capability.thermostat_mode, ] diff --git a/tests/components/smartthings/test_climate.py b/tests/components/smartthings/test_climate.py index e4b8cb6d373..d39ee2d6bed 100644 --- a/tests/components/smartthings/test_climate.py +++ b/tests/components/smartthings/test_climate.py @@ -88,6 +88,26 @@ def basic_thermostat_fixture(device_factory): return device +@pytest.fixture(name="minimal_thermostat") +def minimal_thermostat_fixture(device_factory): + """Fixture returns a minimal thermostat without cooling.""" + device = device_factory( + "Minimal Thermostat", + capabilities=[ + Capability.temperature_measurement, + Capability.thermostat_heating_setpoint, + Capability.thermostat_mode, + ], + status={ + Attribute.heating_setpoint: 68, + Attribute.thermostat_mode: "off", + Attribute.supported_thermostat_modes: ["off", "heat"], + }, + ) + device.status.attributes[Attribute.temperature] = Status(70, "F", None) + return device + + @pytest.fixture(name="thermostat") def thermostat_fixture(device_factory): """Fixture returns a fully-featured thermostat.""" @@ -310,6 +330,28 @@ async def test_basic_thermostat_entity_state( assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 21.1 # celsius +async def test_minimal_thermostat_entity_state( + hass: HomeAssistant, minimal_thermostat +) -> None: + """Tests the state attributes properly match the thermostat type.""" + await setup_platform(hass, CLIMATE_DOMAIN, devices=[minimal_thermostat]) + state = hass.states.get("climate.minimal_thermostat") + assert state.state == HVACMode.OFF + assert ( + state.attributes[ATTR_SUPPORTED_FEATURES] + == ClimateEntityFeature.TARGET_TEMPERATURE_RANGE + | ClimateEntityFeature.TARGET_TEMPERATURE + | ClimateEntityFeature.TURN_OFF + | ClimateEntityFeature.TURN_ON + ) + assert ATTR_HVAC_ACTION not in state.attributes + assert sorted(state.attributes[ATTR_HVAC_MODES]) == [ + HVACMode.HEAT, + HVACMode.OFF, + ] + assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 21.1 # celsius + + async def test_thermostat_entity_state(hass: HomeAssistant, thermostat) -> None: """Tests the state attributes properly match the thermostat type.""" await setup_platform(hass, CLIMATE_DOMAIN, devices=[thermostat]) From 3b6128d590dac63ce0d13154805fd72653ae87ec Mon Sep 17 00:00:00 2001 From: Tobias Sauerwein Date: Thu, 29 Aug 2024 07:59:07 +0200 Subject: [PATCH 1204/1635] Bump pyatmo to 8.1.0 (#124340) --- homeassistant/components/netatmo/manifest.json | 2 +- homeassistant/components/netatmo/select.py | 4 ++-- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- .../netatmo/snapshots/test_sensor.ambr | 18 +++++++++--------- 5 files changed, 14 insertions(+), 14 deletions(-) diff --git a/homeassistant/components/netatmo/manifest.json b/homeassistant/components/netatmo/manifest.json index 98734bcb742..0a32777b527 100644 --- a/homeassistant/components/netatmo/manifest.json +++ b/homeassistant/components/netatmo/manifest.json @@ -12,5 +12,5 @@ "integration_type": "hub", "iot_class": "cloud_polling", "loggers": ["pyatmo"], - "requirements": ["pyatmo==8.0.3"] + "requirements": ["pyatmo==8.1.0"] } diff --git a/homeassistant/components/netatmo/select.py b/homeassistant/components/netatmo/select.py index 3fe098a75a9..92568b73e80 100644 --- a/homeassistant/components/netatmo/select.py +++ b/homeassistant/components/netatmo/select.py @@ -72,7 +72,7 @@ class NetatmoScheduleSelect(NetatmoBaseEntity, SelectEntity): self._attr_current_option = getattr(self.home.get_selected_schedule(), "name") self._attr_options = [ - schedule.name for schedule in self.home.schedules.values() + schedule.name for schedule in self.home.schedules.values() if schedule.name ] async def async_added_to_hass(self) -> None: @@ -128,5 +128,5 @@ class NetatmoScheduleSelect(NetatmoBaseEntity, SelectEntity): self.home.schedules ) self._attr_options = [ - schedule.name for schedule in self.home.schedules.values() + schedule.name for schedule in self.home.schedules.values() if schedule.name ] diff --git a/requirements_all.txt b/requirements_all.txt index 5081fe2fd36..2b3d8bb93d8 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1741,7 +1741,7 @@ pyasuswrt==0.1.21 pyatag==0.3.5.3 # homeassistant.components.netatmo -pyatmo==8.0.3 +pyatmo==8.1.0 # homeassistant.components.apple_tv pyatv==0.15.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5ec8e6cba40..de47e944688 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1409,7 +1409,7 @@ pyasuswrt==0.1.21 pyatag==0.3.5.3 # homeassistant.components.netatmo -pyatmo==8.0.3 +pyatmo==8.1.0 # homeassistant.components.apple_tv pyatv==0.15.0 diff --git a/tests/components/netatmo/snapshots/test_sensor.ambr b/tests/components/netatmo/snapshots/test_sensor.ambr index bc2a18d918d..0d13a88cd67 100644 --- a/tests/components/netatmo/snapshots/test_sensor.ambr +++ b/tests/components/netatmo/snapshots/test_sensor.ambr @@ -1159,7 +1159,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', + 'state': 'True', }) # --- # name: test_entity[sensor.cold_water_power-entry] @@ -1508,7 +1508,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', + 'state': 'True', }) # --- # name: test_entity[sensor.gas_power-entry] @@ -3257,7 +3257,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', + 'state': 'True', }) # --- # name: test_entity[sensor.hot_water_power-entry] @@ -3896,7 +3896,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', + 'state': 'True', }) # --- # name: test_entity[sensor.line_1_power-entry] @@ -3995,7 +3995,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', + 'state': 'True', }) # --- # name: test_entity[sensor.line_2_power-entry] @@ -4094,7 +4094,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', + 'state': 'True', }) # --- # name: test_entity[sensor.line_3_power-entry] @@ -4193,7 +4193,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', + 'state': 'True', }) # --- # name: test_entity[sensor.line_4_power-entry] @@ -4292,7 +4292,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', + 'state': 'True', }) # --- # name: test_entity[sensor.line_5_power-entry] @@ -5622,7 +5622,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', + 'state': 'True', }) # --- # name: test_entity[sensor.total_power-entry] From 1101e7ef64e14ee7d5abc188e0348485e770582d Mon Sep 17 00:00:00 2001 From: AutonomousOwl <116417295+AutonomousOwl@users.noreply.github.com> Date: Thu, 29 Aug 2024 02:34:13 -0400 Subject: [PATCH 1205/1635] Update utility_account_id in Opower to be lowercase in statistic id (#124837) Update utility_account_id to be lowercase in statistic id --- homeassistant/components/opower/coordinator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/opower/coordinator.py b/homeassistant/components/opower/coordinator.py index d0795ae4e15..9cef4e4a252 100644 --- a/homeassistant/components/opower/coordinator.py +++ b/homeassistant/components/opower/coordinator.py @@ -98,7 +98,7 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]): account.meter_type.name.lower(), # Some utilities like AEP have "-" in their account id. # Replace it with "_" to avoid "Invalid statistic_id" - account.utility_account_id.replace("-", "_"), + account.utility_account_id.replace("-", "_").lower(), ) ) cost_statistic_id = f"{DOMAIN}:{id_prefix}_energy_cost" From 1cb9690001c71dd9b6016304fa5c12de499f9dca Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Thu, 29 Aug 2024 10:52:57 +0200 Subject: [PATCH 1206/1635] Cleanup unused `hass_storage` mocks in mqtt tests (#124846) --- tests/components/mqtt/test_client.py | 5 ----- tests/components/mqtt/test_init.py | 5 ----- 2 files changed, 10 deletions(-) diff --git a/tests/components/mqtt/test_client.py b/tests/components/mqtt/test_client.py index dcded7d187a..31c062b1abd 100644 --- a/tests/components/mqtt/test_client.py +++ b/tests/components/mqtt/test_client.py @@ -37,11 +37,6 @@ from tests.common import ( from tests.typing import MqttMockHAClient, MqttMockHAClientGenerator, MqttMockPahoClient -@pytest.fixture(autouse=True) -def mock_storage(hass_storage: dict[str, Any]) -> None: - """Autouse hass_storage for the TestCase tests.""" - - def help_assert_message( msg: ReceiveMessage, topic: str | None = None, diff --git a/tests/components/mqtt/test_init.py b/tests/components/mqtt/test_init.py index 5dab5689518..8f7f7ed6289 100644 --- a/tests/components/mqtt/test_init.py +++ b/tests/components/mqtt/test_init.py @@ -77,11 +77,6 @@ class _DebugInfo(TypedDict): config: _DebugDeviceInfo -@pytest.fixture(autouse=True) -def mock_storage(hass_storage: dict[str, Any]) -> None: - """Autouse hass_storage for the TestCase tests.""" - - async def test_command_template_value(hass: HomeAssistant) -> None: """Test the rendering of MQTT command template.""" From eac7794741add5396af01abdd09984746e26e034 Mon Sep 17 00:00:00 2001 From: Pete Sage <76050312+PeteRager@users.noreply.github.com> Date: Thu, 29 Aug 2024 05:29:54 -0400 Subject: [PATCH 1207/1635] Fix sonos get_queue service call to restrict to sonos media_player entities (#124815) add sonos to filter --- homeassistant/components/sonos/services.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/sonos/services.yaml b/homeassistant/components/sonos/services.yaml index 6d6e7ef83f9..89706428899 100644 --- a/homeassistant/components/sonos/services.yaml +++ b/homeassistant/components/sonos/services.yaml @@ -66,6 +66,7 @@ remove_from_queue: get_queue: target: entity: + integration: sonos domain: media_player update_alarm: From a4e9e4b23badb47c80f29f6b529d8b792fff0018 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 29 Aug 2024 11:31:19 +0200 Subject: [PATCH 1208/1635] Tweak exception message in yaml loader (#124841) --- homeassistant/util/yaml/loader.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/util/yaml/loader.py b/homeassistant/util/yaml/loader.py index a56cf126f79..31efced60f6 100644 --- a/homeassistant/util/yaml/loader.py +++ b/homeassistant/util/yaml/loader.py @@ -385,7 +385,7 @@ def _include_yaml(loader: LoaderType, node: yaml.nodes.Node) -> JSON_TYPE: return _add_reference(loaded_yaml, loader, node) except FileNotFoundError as exc: raise HomeAssistantError( - f"{node.start_mark}: Unable to read file {fname}." + f"{node.start_mark}: Unable to read file {fname}" ) from exc From c4fd1cfc8f527928a017b10628b54dc1b46924b7 Mon Sep 17 00:00:00 2001 From: Andrew Jackson Date: Thu, 29 Aug 2024 11:23:04 +0100 Subject: [PATCH 1209/1635] Fix Mastodon migrate config entry log warning (#124848) Fix migrate config entry --- homeassistant/components/mastodon/__init__.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/homeassistant/components/mastodon/__init__.py b/homeassistant/components/mastodon/__init__.py index 0d680170f3d..e8d23434248 100644 --- a/homeassistant/components/mastodon/__init__.py +++ b/homeassistant/components/mastodon/__init__.py @@ -97,10 +97,9 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: LOGGER.error("Migration failed with error %s", ex) return False - entry.minor_version = 2 - hass.config_entries.async_update_entry( entry, + minor_version=2, unique_id=slugify(construct_mastodon_username(instance, account)), ) From 354f4491c86741d4eecb2bc00e46f628ed0126d9 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 29 Aug 2024 13:03:47 +0200 Subject: [PATCH 1210/1635] Avoid unnecessary copying of variables when setting up automations (#124844) --- homeassistant/components/automation/__init__.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/automation/__init__.py b/homeassistant/components/automation/__init__.py index 8ab9c478bc4..2081ea938ae 100644 --- a/homeassistant/components/automation/__init__.py +++ b/homeassistant/components/automation/__init__.py @@ -991,15 +991,15 @@ async def _create_automation_entities( # Add trigger variables to variables variables = None - if CONF_TRIGGER_VARIABLES in config_block: + if CONF_TRIGGER_VARIABLES in config_block and CONF_VARIABLES in config_block: variables = ScriptVariables( dict(config_block[CONF_TRIGGER_VARIABLES].as_dict()) ) - if CONF_VARIABLES in config_block: - if variables: - variables.variables.update(config_block[CONF_VARIABLES].as_dict()) - else: - variables = config_block[CONF_VARIABLES] + variables.variables.update(config_block[CONF_VARIABLES].as_dict()) + elif CONF_TRIGGER_VARIABLES in config_block: + variables = config_block[CONF_TRIGGER_VARIABLES] + elif CONF_VARIABLES in config_block: + variables = config_block[CONF_VARIABLES] entity = AutomationEntity( automation_id, From 34680becaac608a7ea6297f50f3c2a7cf7189160 Mon Sep 17 00:00:00 2001 From: Fredrik Erlandsson Date: Thu, 29 Aug 2024 13:20:57 +0200 Subject: [PATCH 1211/1635] Bump pydaikin to 2.13.6 (#124852) --- homeassistant/components/daikin/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/daikin/manifest.json b/homeassistant/components/daikin/manifest.json index c395ee35cad..88c29a20435 100644 --- a/homeassistant/components/daikin/manifest.json +++ b/homeassistant/components/daikin/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/daikin", "iot_class": "local_polling", "loggers": ["pydaikin"], - "requirements": ["pydaikin==2.13.5"], + "requirements": ["pydaikin==2.13.6"], "zeroconf": ["_dkapi._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index 2b3d8bb93d8..1892eccc8f1 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1798,7 +1798,7 @@ pycsspeechtts==1.0.8 # pycups==1.9.73 # homeassistant.components.daikin -pydaikin==2.13.5 +pydaikin==2.13.6 # homeassistant.components.danfoss_air pydanfossair==0.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index de47e944688..0918fb4d0f2 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1445,7 +1445,7 @@ pycoolmasternet-async==0.2.2 pycsspeechtts==1.0.8 # homeassistant.components.daikin -pydaikin==2.13.5 +pydaikin==2.13.6 # homeassistant.components.deako pydeako==0.4.0 From 681fe3485db8089570fb0eb1c2d54fe994404608 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 29 Aug 2024 17:24:04 +0200 Subject: [PATCH 1212/1635] Improve config flow type hints (a-f) (#124859) --- .../components/broadlink/config_flow.py | 28 +++++++++++-------- .../components/control4/config_flow.py | 14 +++++++--- .../components/dexcom/config_flow.py | 4 ++- .../components/ecobee/config_flow.py | 8 +++--- .../components/emonitor/config_flow.py | 7 +++-- .../components/emulated_roku/config_flow.py | 4 +-- .../components/enocean/config_flow.py | 10 +++++-- .../components/forked_daapd/config_flow.py | 4 ++- .../components/freedompro/config_flow.py | 6 ++-- 9 files changed, 53 insertions(+), 32 deletions(-) diff --git a/homeassistant/components/broadlink/config_flow.py b/homeassistant/components/broadlink/config_flow.py index 5d7acfd8b84..c9b2fb46608 100644 --- a/homeassistant/components/broadlink/config_flow.py +++ b/homeassistant/components/broadlink/config_flow.py @@ -5,7 +5,7 @@ import errno from functools import partial import logging import socket -from typing import TYPE_CHECKING, Any +from typing import Any import broadlink as blk from broadlink.exceptions import ( @@ -37,9 +37,7 @@ class BroadlinkFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Initialize the Broadlink flow.""" - self.device: blk.Device | None = None + device: blk.Device async def async_set_device( self, device: blk.Device, raise_on_progress: bool = True @@ -131,8 +129,6 @@ class BroadlinkFlowHandler(ConfigFlow, domain=DOMAIN): ) return await self.async_step_auth() - if TYPE_CHECKING: - assert self.device if device.mac == self.device.mac: await self.async_set_device(device, raise_on_progress=False) return await self.async_step_auth() @@ -158,10 +154,10 @@ class BroadlinkFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_auth(self): + async def async_step_auth(self) -> ConfigFlowResult: """Authenticate to the device.""" device = self.device - errors = {} + errors: dict[str, str] = {} try: await self.hass.async_add_executor_job(device.auth) @@ -211,7 +207,11 @@ class BroadlinkFlowHandler(ConfigFlow, domain=DOMAIN): ) return self.async_show_form(step_id="auth", errors=errors) - async def async_step_reset(self, user_input=None, errors=None): + async def async_step_reset( + self, + user_input: dict[str, Any] | None = None, + errors: dict[str, str] | None = None, + ) -> ConfigFlowResult: """Guide the user to unlock the device manually. We are unable to authenticate because the device is locked. @@ -234,7 +234,9 @@ class BroadlinkFlowHandler(ConfigFlow, domain=DOMAIN): {CONF_HOST: device.host[0], CONF_TIMEOUT: device.timeout} ) - async def async_step_unlock(self, user_input=None): + async def async_step_unlock( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Unlock the device. The authentication succeeded, but the device is locked. @@ -288,10 +290,12 @@ class BroadlinkFlowHandler(ConfigFlow, domain=DOMAIN): }, ) - async def async_step_finish(self, user_input=None): + async def async_step_finish( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Choose a name for the device and create config entry.""" device = self.device - errors = {} + errors: dict[str, str] = {} # Abort reauthentication flow. self._abort_if_unique_id_configured( diff --git a/homeassistant/components/control4/config_flow.py b/homeassistant/components/control4/config_flow.py index aa7839b4383..77ae2c98c7d 100644 --- a/homeassistant/components/control4/config_flow.py +++ b/homeassistant/components/control4/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from typing import Any +from typing import TYPE_CHECKING, Any from aiohttp.client_exceptions import ClientError from pyControl4.account import C4Account @@ -23,7 +23,7 @@ from homeassistant.const import ( CONF_SCAN_INTERVAL, CONF_USERNAME, ) -from homeassistant.core import callback +from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import aiohttp_client, config_validation as cv from homeassistant.helpers.device_registry import format_mac @@ -49,7 +49,9 @@ DATA_SCHEMA = vol.Schema( class Control4Validator: """Validates that config details can be used to authenticate and communicate with Control4.""" - def __init__(self, host, username, password, hass): + def __init__( + self, host: str, username: str, password: str, hass: HomeAssistant + ) -> None: """Initialize.""" self.host = host self.username = username @@ -126,6 +128,8 @@ class Control4ConfigFlow(ConfigFlow, domain=DOMAIN): if not errors: controller_unique_id = hub.controller_unique_id + if TYPE_CHECKING: + assert hub.controller_unique_id mac = (controller_unique_id.split("_", 3))[2] formatted_mac = format_mac(mac) await self.async_set_unique_id(formatted_mac) @@ -160,7 +164,9 @@ class OptionsFlowHandler(OptionsFlow): """Initialize options flow.""" self.config_entry = config_entry - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle options flow.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) diff --git a/homeassistant/components/dexcom/config_flow.py b/homeassistant/components/dexcom/config_flow.py index 17bd1b3f7a8..c3ed43c8e9a 100644 --- a/homeassistant/components/dexcom/config_flow.py +++ b/homeassistant/components/dexcom/config_flow.py @@ -79,7 +79,9 @@ class DexcomOptionsFlowHandler(OptionsFlow): """Initialize options flow.""" self.config_entry = config_entry - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle options flow.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) diff --git a/homeassistant/components/ecobee/config_flow.py b/homeassistant/components/ecobee/config_flow.py index c0d4d9b03fc..f7709c68d91 100644 --- a/homeassistant/components/ecobee/config_flow.py +++ b/homeassistant/components/ecobee/config_flow.py @@ -23,9 +23,7 @@ class EcobeeFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Initialize the ecobee flow.""" - self._ecobee: Ecobee | None = None + _ecobee: Ecobee async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -59,7 +57,9 @@ class EcobeeFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_authorize(self, user_input=None): + async def async_step_authorize( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Present the user with the PIN so that the app can be authorized on ecobee.com.""" errors = {} diff --git a/homeassistant/components/emonitor/config_flow.py b/homeassistant/components/emonitor/config_flow.py index b90b1477f87..b924c7df522 100644 --- a/homeassistant/components/emonitor/config_flow.py +++ b/homeassistant/components/emonitor/config_flow.py @@ -34,10 +34,11 @@ class EmonitorConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 + discovered_info: dict[str, str] + def __init__(self) -> None: """Initialize Emonitor ConfigFlow.""" self.discovered_ip: str | None = None - self.discovered_info: dict[str, str] | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -87,7 +88,9 @@ class EmonitorConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_user() return await self.async_step_confirm() - async def async_step_confirm(self, user_input=None): + async def async_step_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Attempt to confirm.""" if user_input is not None: return self.async_create_entry( diff --git a/homeassistant/components/emulated_roku/config_flow.py b/homeassistant/components/emulated_roku/config_flow.py index eed0298fc57..725987418da 100644 --- a/homeassistant/components/emulated_roku/config_flow.py +++ b/homeassistant/components/emulated_roku/config_flow.py @@ -6,13 +6,13 @@ import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_NAME -from homeassistant.core import callback +from homeassistant.core import HomeAssistant, callback from .const import CONF_LISTEN_PORT, DEFAULT_NAME, DEFAULT_PORT, DOMAIN @callback -def configured_servers(hass): +def configured_servers(hass: HomeAssistant) -> set[str]: """Return a set of the configured servers.""" return { entry.data[CONF_NAME] for entry in hass.config_entries.async_entries(DOMAIN) diff --git a/homeassistant/components/enocean/config_flow.py b/homeassistant/components/enocean/config_flow.py index 3105b3ab595..fef633d94c3 100644 --- a/homeassistant/components/enocean/config_flow.py +++ b/homeassistant/components/enocean/config_flow.py @@ -43,12 +43,14 @@ class EnOceanFlowHandler(ConfigFlow, domain=DOMAIN): return await self.async_step_detect() - async def async_step_detect(self, user_input=None): + async def async_step_detect( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Propose a list of detected dongles.""" errors = {} if user_input is not None: if user_input[CONF_DEVICE] == self.MANUAL_PATH_VALUE: - return await self.async_step_manual(None) + return await self.async_step_manual() if await self.validate_enocean_conf(user_input): return self.create_enocean_entry(user_input) errors = {CONF_DEVICE: ERROR_INVALID_DONGLE_PATH} @@ -64,7 +66,9 @@ class EnOceanFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_manual(self, user_input=None): + async def async_step_manual( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Request manual USB dongle path.""" default_value = None errors = {} diff --git a/homeassistant/components/forked_daapd/config_flow.py b/homeassistant/components/forked_daapd/config_flow.py index 1f76fe21bad..5f061aa4be1 100644 --- a/homeassistant/components/forked_daapd/config_flow.py +++ b/homeassistant/components/forked_daapd/config_flow.py @@ -56,7 +56,9 @@ class ForkedDaapdOptionsFlowHandler(OptionsFlow): """Initialize.""" self.config_entry = config_entry - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Manage the options.""" if user_input is not None: return self.async_create_entry(title="options", data=user_input) diff --git a/homeassistant/components/freedompro/config_flow.py b/homeassistant/components/freedompro/config_flow.py index f986cd05904..48d075f8a87 100644 --- a/homeassistant/components/freedompro/config_flow.py +++ b/homeassistant/components/freedompro/config_flow.py @@ -19,19 +19,19 @@ STEP_USER_DATA_SCHEMA = vol.Schema({vol.Required(CONF_API_KEY): str}) class Hub: """Freedompro Hub class.""" - def __init__(self, hass, api_key): + def __init__(self, hass: HomeAssistant, api_key: str) -> None: """Freedompro Hub class init.""" self._hass = hass self._api_key = api_key - async def authenticate(self): + async def authenticate(self) -> dict[str, Any]: """Freedompro Hub class authenticate.""" return await get_list( aiohttp_client.async_get_clientsession(self._hass), self._api_key ) -async def validate_input(hass: HomeAssistant, api_key): +async def validate_input(hass: HomeAssistant, api_key: str) -> None: """Validate api key.""" hub = Hub(hass, api_key) result = await hub.authenticate() From c36fc70ab4c6374c07c05ebf4b63ca133134d1d1 Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Thu, 29 Aug 2024 17:24:25 +0200 Subject: [PATCH 1213/1635] Update frontend to 20240829.0 (#124864) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 0e1d443553d..7e934c887fa 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240828.0"] + "requirements": ["home-assistant-frontend==20240829.0"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 3aa1b7a298a..e6a5d6746f5 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -31,7 +31,7 @@ habluetooth==3.3.2 hass-nabucasa==0.81.1 hassil==1.7.4 home-assistant-bluetooth==1.12.2 -home-assistant-frontend==20240828.0 +home-assistant-frontend==20240829.0 home-assistant-intents==2024.8.7 httpx==0.27.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 1892eccc8f1..96274a06631 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1102,7 +1102,7 @@ hole==0.8.0 holidays==0.55 # homeassistant.components.frontend -home-assistant-frontend==20240828.0 +home-assistant-frontend==20240829.0 # homeassistant.components.conversation home-assistant-intents==2024.8.7 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 0918fb4d0f2..7c968bb479c 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -925,7 +925,7 @@ hole==0.8.0 holidays==0.55 # homeassistant.components.frontend -home-assistant-frontend==20240828.0 +home-assistant-frontend==20240829.0 # homeassistant.components.conversation home-assistant-intents==2024.8.7 From 149aebb0bcbef7814255064dd3a263c05226dc1d Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Thu, 29 Aug 2024 17:25:04 +0200 Subject: [PATCH 1214/1635] Add missing translation key in Knocki (#124862) --- homeassistant/components/knocki/strings.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/homeassistant/components/knocki/strings.json b/homeassistant/components/knocki/strings.json index b7a7daad1fc..8f5d0161166 100644 --- a/homeassistant/components/knocki/strings.json +++ b/homeassistant/components/knocki/strings.json @@ -11,6 +11,9 @@ "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" } }, "entity": { From a04970bd54a9cadfc4b61c97d9b4328c7393e51d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 29 Aug 2024 07:32:13 -1000 Subject: [PATCH 1215/1635] Address august review comments (#124819) * Address august review comments Followup to https://github.com/home-assistant/core/pull/124677 * cleanup loop * drop mixin name * event entity add cleanup * remove duplicate prop * pep0695 type * remove some not needed block till done * cleanup august tests * switch to freezegun * snapshots for dev reg * SOURCE_USER nit * snapshots * pytest.raises * not loaded check --- homeassistant/components/august/__init__.py | 2 +- .../components/august/binary_sensor.py | 11 +- homeassistant/components/august/button.py | 4 +- homeassistant/components/august/camera.py | 4 +- homeassistant/components/august/entity.py | 4 +- homeassistant/components/august/event.py | 28 +- homeassistant/components/august/lock.py | 4 +- homeassistant/components/august/sensor.py | 21 +- homeassistant/components/august/util.py | 7 +- .../august/snapshots/test_binary_sensor.ambr | 33 +++ .../august/snapshots/test_lock.ambr | 37 +++ tests/components/august/test_binary_sensor.py | 239 ++++++------------ tests/components/august/test_button.py | 1 - tests/components/august/test_camera.py | 10 +- tests/components/august/test_config_flow.py | 14 +- tests/components/august/test_event.py | 46 ++-- tests/components/august/test_init.py | 32 +-- tests/components/august/test_lock.py | 166 ++++-------- tests/components/august/test_sensor.py | 71 ++---- 19 files changed, 312 insertions(+), 422 deletions(-) create mode 100644 tests/components/august/snapshots/test_binary_sensor.ambr create mode 100644 tests/components/august/snapshots/test_lock.ambr diff --git a/homeassistant/components/august/__init__.py b/homeassistant/components/august/__init__.py index 53aa3cdffd8..47a7f75611a 100644 --- a/homeassistant/components/august/__init__.py +++ b/homeassistant/components/august/__init__.py @@ -24,7 +24,7 @@ from .util import async_create_august_clientsession type AugustConfigEntry = ConfigEntry[AugustData] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: AugustConfigEntry) -> bool: """Set up August from a config entry.""" session = async_create_august_clientsession(hass) august_gateway = AugustGateway(Path(hass.config.config_dir), session) diff --git a/homeassistant/components/august/binary_sensor.py b/homeassistant/components/august/binary_sensor.py index 6a56692bcd6..fb877252010 100644 --- a/homeassistant/components/august/binary_sensor.py +++ b/homeassistant/components/august/binary_sensor.py @@ -109,12 +109,11 @@ async def async_setup_entry( for description in SENSOR_TYPES_DOORBELL ) - for doorbell in data.doorbells: - entities.extend( - AugustDoorbellBinarySensor(data, doorbell, description) - for description in SENSOR_TYPES_DOORBELL + SENSOR_TYPES_VIDEO_DOORBELL - ) - + entities.extend( + AugustDoorbellBinarySensor(data, doorbell, description) + for description in SENSOR_TYPES_DOORBELL + SENSOR_TYPES_VIDEO_DOORBELL + for doorbell in data.doorbells + ) async_add_entities(entities) diff --git a/homeassistant/components/august/button.py b/homeassistant/components/august/button.py index 406475db601..79f2b67888a 100644 --- a/homeassistant/components/august/button.py +++ b/homeassistant/components/august/button.py @@ -5,7 +5,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import AugustConfigEntry -from .entity import AugustEntityMixin +from .entity import AugustEntity async def async_setup_entry( @@ -18,7 +18,7 @@ async def async_setup_entry( async_add_entities(AugustWakeLockButton(data, lock, "wake") for lock in data.locks) -class AugustWakeLockButton(AugustEntityMixin, ButtonEntity): +class AugustWakeLockButton(AugustEntity, ButtonEntity): """Representation of an August lock wake button.""" _attr_translation_key = "wake" diff --git a/homeassistant/components/august/camera.py b/homeassistant/components/august/camera.py index 4e569e2a91e..f4398455256 100644 --- a/homeassistant/components/august/camera.py +++ b/homeassistant/components/august/camera.py @@ -16,7 +16,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import AugustConfigEntry, AugustData from .const import DEFAULT_NAME, DEFAULT_TIMEOUT -from .entity import AugustEntityMixin +from .entity import AugustEntity _LOGGER = logging.getLogger(__name__) @@ -38,7 +38,7 @@ async def async_setup_entry( ) -class AugustCamera(AugustEntityMixin, Camera): +class AugustCamera(AugustEntity, Camera): """An implementation of an August security camera.""" _attr_translation_key = "camera" diff --git a/homeassistant/components/august/entity.py b/homeassistant/components/august/entity.py index babf5c587fb..28c722354ba 100644 --- a/homeassistant/components/august/entity.py +++ b/homeassistant/components/august/entity.py @@ -20,7 +20,7 @@ from .const import MANUFACTURER DEVICE_TYPES = ["keypad", "lock", "camera", "doorbell", "door", "bell"] -class AugustEntityMixin(Entity): +class AugustEntity(Entity): """Base implementation for August device.""" _attr_should_poll = False @@ -87,7 +87,7 @@ class AugustEntityMixin(Entity): self._update_from_data() -class AugustDescriptionEntity(AugustEntityMixin): +class AugustDescriptionEntity(AugustEntity): """An August entity with a description.""" def __init__( diff --git a/homeassistant/components/august/event.py b/homeassistant/components/august/event.py index b65f72272a3..49b14630337 100644 --- a/homeassistant/components/august/event.py +++ b/homeassistant/components/august/event.py @@ -63,22 +63,17 @@ async def async_setup_entry( ) -> None: """Set up the august event platform.""" data = config_entry.runtime_data - entities: list[AugustEventEntity] = [] - - for lock in data.locks: - detail = data.get_device_detail(lock.device_id) - if detail.doorbell: - entities.extend( - AugustEventEntity(data, lock, description) - for description in TYPES_DOORBELL - ) - - for doorbell in data.doorbells: - entities.extend( - AugustEventEntity(data, doorbell, description) - for description in TYPES_DOORBELL + TYPES_VIDEO_DOORBELL - ) - + entities: list[AugustEventEntity] = [ + AugustEventEntity(data, lock, description) + for description in TYPES_DOORBELL + for lock in data.locks + if (detail := data.get_device_detail(lock.device_id)) and detail.doorbell + ] + entities.extend( + AugustEventEntity(data, doorbell, description) + for description in TYPES_DOORBELL + TYPES_VIDEO_DOORBELL + for doorbell in data.doorbells + ) async_add_entities(entities) @@ -86,7 +81,6 @@ class AugustEventEntity(AugustDescriptionEntity, EventEntity): """An august event entity.""" entity_description: AugustEventEntityDescription - _attr_has_entity_name = True _last_activity: Activity | None = None @callback diff --git a/homeassistant/components/august/lock.py b/homeassistant/components/august/lock.py index 5382c710229..fe5d90371ad 100644 --- a/homeassistant/components/august/lock.py +++ b/homeassistant/components/august/lock.py @@ -19,7 +19,7 @@ from homeassistant.helpers.restore_state import RestoreEntity import homeassistant.util.dt as dt_util from . import AugustConfigEntry, AugustData -from .entity import AugustEntityMixin +from .entity import AugustEntity _LOGGER = logging.getLogger(__name__) @@ -36,7 +36,7 @@ async def async_setup_entry( async_add_entities(AugustLock(data, lock) for lock in data.locks) -class AugustLock(AugustEntityMixin, RestoreEntity, LockEntity): +class AugustLock(AugustEntity, RestoreEntity, LockEntity): """Representation of an August lock.""" _attr_name = None diff --git a/homeassistant/components/august/sensor.py b/homeassistant/components/august/sensor.py index 7a4c1a92358..b7c0d618492 100644 --- a/homeassistant/components/august/sensor.py +++ b/homeassistant/components/august/sensor.py @@ -4,7 +4,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass -from typing import Any, Generic, TypeVar, cast +from typing import Any, cast from yalexs.activity import ActivityType, LockOperationActivity from yalexs.doorbell import Doorbell @@ -42,7 +42,7 @@ from .const import ( OPERATION_METHOD_REMOTE, OPERATION_METHOD_TAG, ) -from .entity import AugustDescriptionEntity, AugustEntityMixin +from .entity import AugustDescriptionEntity, AugustEntity def _retrieve_device_battery_state(detail: LockDetail) -> int: @@ -55,14 +55,13 @@ def _retrieve_linked_keypad_battery_state(detail: KeypadDetail) -> int | None: return detail.battery_percentage -_T = TypeVar("_T", LockDetail, KeypadDetail) - - @dataclass(frozen=True, kw_only=True) -class AugustSensorEntityDescription(SensorEntityDescription, Generic[_T]): +class AugustSensorEntityDescription[T: LockDetail | KeypadDetail]( + SensorEntityDescription +): """Mixin for required keys.""" - value_fn: Callable[[_T], int | None] + value_fn: Callable[[T], int | None] SENSOR_TYPE_DEVICE_BATTERY = AugustSensorEntityDescription[LockDetail]( @@ -114,7 +113,7 @@ async def async_setup_entry( async_add_entities(entities) -class AugustOperatorSensor(AugustEntityMixin, RestoreSensor): +class AugustOperatorSensor(AugustEntity, RestoreSensor): """Representation of an August lock operation sensor.""" _attr_translation_key = "operator" @@ -198,10 +197,12 @@ class AugustOperatorSensor(AugustEntityMixin, RestoreSensor): self._operated_autorelock = last_attrs[ATTR_OPERATION_AUTORELOCK] -class AugustBatterySensor(AugustDescriptionEntity, SensorEntity, Generic[_T]): +class AugustBatterySensor[T: LockDetail | KeypadDetail]( + AugustDescriptionEntity, SensorEntity +): """Representation of an August sensor.""" - entity_description: AugustSensorEntityDescription[_T] + entity_description: AugustSensorEntityDescription[T] _attr_device_class = SensorDeviceClass.BATTERY _attr_native_unit_of_measurement = PERCENTAGE diff --git a/homeassistant/components/august/util.py b/homeassistant/components/august/util.py index 6972913ba22..5449d048613 100644 --- a/homeassistant/components/august/util.py +++ b/homeassistant/components/august/util.py @@ -63,16 +63,11 @@ def _activity_time_based(latest: Activity) -> Activity | None: """Get the latest state of the sensor.""" start = latest.activity_start_time end = latest.activity_end_time + TIME_TO_DECLARE_DETECTION - if start <= _native_datetime() <= end: + if start <= datetime.now() <= end: return latest return None -def _native_datetime() -> datetime: - """Return time in the format august uses without timezone.""" - return datetime.now() - - def retrieve_online_state( data: AugustData, detail: DoorbellDetail | LockDetail ) -> bool: diff --git a/tests/components/august/snapshots/test_binary_sensor.ambr b/tests/components/august/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..6e95b0ce552 --- /dev/null +++ b/tests/components/august/snapshots/test_binary_sensor.ambr @@ -0,0 +1,33 @@ +# serializer version: 1 +# name: test_doorbell_device_registry + DeviceRegistryEntrySnapshot({ + 'area_id': 'tmt100_name', + 'config_entries': , + 'configuration_url': 'https://account.august.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'august', + 'tmt100', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'August Home Inc.', + 'model': 'hydra1', + 'model_id': None, + 'name': 'tmt100 Name', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': 'tmt100 Name', + 'sw_version': '3.1.0-HYDRC75+201909251139', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/august/snapshots/test_lock.ambr b/tests/components/august/snapshots/test_lock.ambr new file mode 100644 index 00000000000..6aad3a140ca --- /dev/null +++ b/tests/components/august/snapshots/test_lock.ambr @@ -0,0 +1,37 @@ +# serializer version: 1 +# name: test_lock_device_registry + DeviceRegistryEntrySnapshot({ + 'area_id': 'online_with_doorsense_name', + 'config_entries': , + 'configuration_url': 'https://account.august.com', + 'connections': set({ + tuple( + 'bluetooth', + '12:22', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'august', + 'online_with_doorsense', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'August Home Inc.', + 'model': 'AUG-MD01', + 'model_id': None, + 'name': 'online_with_doorsense Name', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': 'online_with_doorsense Name', + 'sw_version': 'undefined-4.3.0-1.8.14', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/august/test_binary_sensor.py b/tests/components/august/test_binary_sensor.py index 33d582de8d8..4ae300ae56b 100644 --- a/tests/components/august/test_binary_sensor.py +++ b/tests/components/august/test_binary_sensor.py @@ -1,8 +1,10 @@ """The binary_sensor tests for the august platform.""" import datetime -from unittest.mock import Mock, patch +from unittest.mock import Mock +from freezegun.api import FrozenDateTimeFactory +from syrupy import SnapshotAssertion from yalexs.pubnub_async import AugustPubNub from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN @@ -36,28 +38,20 @@ async def test_doorsense(hass: HomeAssistant) -> None: hass, "get_lock.online_with_doorsense.json" ) await _create_august_with_devices(hass, [lock_one]) + states = hass.states - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) - await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) - await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" + assert ( + states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_OFF ) - assert binary_sensor_online_with_doorsense_name.state == STATE_OFF async def test_lock_bridge_offline(hass: HomeAssistant) -> None: @@ -69,113 +63,82 @@ async def test_lock_bridge_offline(hass: HomeAssistant) -> None: hass, "get_activity.bridge_offline.json" ) await _create_august_with_devices(hass, [lock_one], activities=activities) - - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" + states = hass.states + assert ( + states.get("binary_sensor.online_with_doorsense_name_door").state + == STATE_UNAVAILABLE ) - assert binary_sensor_online_with_doorsense_name.state == STATE_UNAVAILABLE async def test_create_doorbell(hass: HomeAssistant) -> None: """Test creation of a doorbell.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") await _create_august_with_devices(hass, [doorbell_one]) + states = hass.states - binary_sensor_k98gidt45gul_name_motion = hass.states.get( - "binary_sensor.k98gidt45gul_name_motion" + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_OFF + assert ( + states.get("binary_sensor.k98gidt45gul_name_image_capture").state == STATE_OFF ) - assert binary_sensor_k98gidt45gul_name_motion.state == STATE_OFF - binary_sensor_k98gidt45gul_name_image_capture = hass.states.get( - "binary_sensor.k98gidt45gul_name_image_capture" + assert states.get("binary_sensor.k98gidt45gul_name_connectivity").state == STATE_ON + assert ( + states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF ) - assert binary_sensor_k98gidt45gul_name_image_capture.state == STATE_OFF - binary_sensor_k98gidt45gul_name_online = hass.states.get( - "binary_sensor.k98gidt45gul_name_connectivity" + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_OFF + assert ( + states.get("binary_sensor.k98gidt45gul_name_image_capture").state == STATE_OFF ) - assert binary_sensor_k98gidt45gul_name_online.state == STATE_ON - binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_doorbell_ding" - ) - assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF - binary_sensor_k98gidt45gul_name_motion = hass.states.get( - "binary_sensor.k98gidt45gul_name_motion" - ) - assert binary_sensor_k98gidt45gul_name_motion.state == STATE_OFF - binary_sensor_k98gidt45gul_name_image_capture = hass.states.get( - "binary_sensor.k98gidt45gul_name_image_capture" - ) - assert binary_sensor_k98gidt45gul_name_image_capture.state == STATE_OFF async def test_create_doorbell_offline(hass: HomeAssistant) -> None: """Test creation of a doorbell that is offline.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") await _create_august_with_devices(hass, [doorbell_one]) + states = hass.states - binary_sensor_tmt100_name_motion = hass.states.get( - "binary_sensor.tmt100_name_motion" + assert states.get("binary_sensor.tmt100_name_motion").state == STATE_UNAVAILABLE + assert states.get("binary_sensor.tmt100_name_connectivity").state == STATE_OFF + assert ( + states.get("binary_sensor.tmt100_name_doorbell_ding").state == STATE_UNAVAILABLE ) - assert binary_sensor_tmt100_name_motion.state == STATE_UNAVAILABLE - binary_sensor_tmt100_name_online = hass.states.get( - "binary_sensor.tmt100_name_connectivity" - ) - assert binary_sensor_tmt100_name_online.state == STATE_OFF - binary_sensor_tmt100_name_ding = hass.states.get( - "binary_sensor.tmt100_name_doorbell_ding" - ) - assert binary_sensor_tmt100_name_ding.state == STATE_UNAVAILABLE -async def test_create_doorbell_with_motion(hass: HomeAssistant) -> None: +async def test_create_doorbell_with_motion( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: """Test creation of a doorbell.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") activities = await _mock_activities_from_fixture( hass, "get_activity.doorbell_motion.json" ) await _create_august_with_devices(hass, [doorbell_one], activities=activities) + states = hass.states - binary_sensor_k98gidt45gul_name_motion = hass.states.get( - "binary_sensor.k98gidt45gul_name_motion" + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_ON + assert states.get("binary_sensor.k98gidt45gul_name_connectivity").state == STATE_ON + assert ( + states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF ) - assert binary_sensor_k98gidt45gul_name_motion.state == STATE_ON - binary_sensor_k98gidt45gul_name_online = hass.states.get( - "binary_sensor.k98gidt45gul_name_connectivity" - ) - assert binary_sensor_k98gidt45gul_name_online.state == STATE_ON - binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_doorbell_ding" - ) - assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF - new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) - native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) - with patch( - "homeassistant.components.august.util._native_datetime", - return_value=native_time, - ): - async_fire_time_changed(hass, new_time) - await hass.async_block_till_done() - binary_sensor_k98gidt45gul_name_motion = hass.states.get( - "binary_sensor.k98gidt45gul_name_motion" - ) - assert binary_sensor_k98gidt45gul_name_motion.state == STATE_OFF + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_OFF -async def test_doorbell_update_via_pubnub(hass: HomeAssistant) -> None: +async def test_doorbell_update_via_pubnub( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: """Test creation of a doorbell that can be updated via pubnub.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") pubnub = AugustPubNub() await _create_august_with_devices(hass, [doorbell_one], pubnub=pubnub) assert doorbell_one.pubsub_channel == "7c7a6672-59c8-3333-ffff-dcd98705cccc" - - binary_sensor_k98gidt45gul_name_motion = hass.states.get( - "binary_sensor.k98gidt45gul_name_motion" + states = hass.states + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_OFF + assert ( + states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF ) - assert binary_sensor_k98gidt45gul_name_motion.state == STATE_OFF - binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_doorbell_ding" - ) - assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF pubnub.message( pubnub, @@ -198,10 +161,7 @@ async def test_doorbell_update_via_pubnub(hass: HomeAssistant) -> None: await hass.async_block_till_done() - binary_sensor_k98gidt45gul_name_image_capture = hass.states.get( - "binary_sensor.k98gidt45gul_name_image_capture" - ) - assert binary_sensor_k98gidt45gul_name_image_capture.state == STATE_ON + assert states.get("binary_sensor.k98gidt45gul_name_image_capture").state == STATE_ON pubnub.message( pubnub, @@ -235,29 +195,19 @@ async def test_doorbell_update_via_pubnub(hass: HomeAssistant) -> None: await hass.async_block_till_done() - binary_sensor_k98gidt45gul_name_motion = hass.states.get( - "binary_sensor.k98gidt45gul_name_motion" - ) - assert binary_sensor_k98gidt45gul_name_motion.state == STATE_ON + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_ON - binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_doorbell_ding" + assert ( + states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF ) - assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF - new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) - native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) - with patch( - "homeassistant.components.august.util._native_datetime", - return_value=native_time, - ): - async_fire_time_changed(hass, new_time) - await hass.async_block_till_done() + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() - binary_sensor_k98gidt45gul_name_image_capture = hass.states.get( - "binary_sensor.k98gidt45gul_name_image_capture" + assert ( + states.get("binary_sensor.k98gidt45gul_name_image_capture").state == STATE_OFF ) - assert binary_sensor_k98gidt45gul_name_image_capture.state == STATE_OFF pubnub.message( pubnub, @@ -271,37 +221,25 @@ async def test_doorbell_update_via_pubnub(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() - binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_doorbell_ding" - ) - assert binary_sensor_k98gidt45gul_name_ding.state == STATE_ON - new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) - native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) - with patch( - "homeassistant.components.august.util._native_datetime", - return_value=native_time, - ): - async_fire_time_changed(hass, new_time) - await hass.async_block_till_done() + assert states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_ON + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() - binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_doorbell_ding" + assert ( + states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF ) - assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF async def test_doorbell_device_registry( - hass: HomeAssistant, device_registry: dr.DeviceRegistry + hass: HomeAssistant, device_registry: dr.DeviceRegistry, snapshot: SnapshotAssertion ) -> None: """Test creation of a lock with doorsense and bridge ands up in the registry.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") await _create_august_with_devices(hass, [doorbell_one]) reg_device = device_registry.async_get_device(identifiers={("august", "tmt100")}) - assert reg_device.model == "hydra1" - assert reg_device.name == "tmt100 Name" - assert reg_device.manufacturer == "August Home Inc." - assert reg_device.sw_version == "3.1.0-HYDRC75+201909251139" + assert reg_device == snapshot async def test_door_sense_update_via_pubnub(hass: HomeAssistant) -> None: @@ -314,11 +252,9 @@ async def test_door_sense_update_via_pubnub(hass: HomeAssistant) -> None: config_entry = await _create_august_with_devices( hass, [lock_one], activities=activities, pubnub=pubnub ) + states = hass.states - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON pubnub.message( pubnub, @@ -330,10 +266,9 @@ async def test_door_sense_update_via_pubnub(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" + assert ( + states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_OFF ) - assert binary_sensor_online_with_doorsense_name.state == STATE_OFF pubnub.message( pubnub, @@ -344,33 +279,22 @@ async def test_door_sense_update_via_pubnub(hass: HomeAssistant) -> None: ), ) await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON pubnub.connected = True async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON # Ensure pubnub status is always preserved async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=2)) await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON pubnub.message( pubnub, @@ -381,17 +305,11 @@ async def test_door_sense_update_via_pubnub(hass: HomeAssistant) -> None: ), ) await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=4)) await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON await hass.config_entries.async_unload(config_entry.entry_id) await hass.async_block_till_done() @@ -402,7 +320,10 @@ async def test_create_lock_with_doorbell(hass: HomeAssistant) -> None: lock_one = await _mock_lock_from_fixture(hass, "lock_with_doorbell.online.json") await _create_august_with_devices(hass, [lock_one]) - ding_sensor = hass.states.get( - "binary_sensor.a6697750d607098bae8d6baa11ef8063_name_doorbell_ding" + states = hass.states + assert ( + states.get( + "binary_sensor.a6697750d607098bae8d6baa11ef8063_name_doorbell_ding" + ).state + == STATE_OFF ) - assert ding_sensor.state == STATE_OFF diff --git a/tests/components/august/test_button.py b/tests/components/august/test_button.py index 8ae2bc8a70d..948b59b2286 100644 --- a/tests/components/august/test_button.py +++ b/tests/components/august/test_button.py @@ -20,5 +20,4 @@ async def test_wake_lock(hass: HomeAssistant) -> None: await hass.services.async_call( BUTTON_DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: entity_id}, blocking=True ) - await hass.async_block_till_done() api_instance.async_status_async.assert_called_once() diff --git a/tests/components/august/test_camera.py b/tests/components/august/test_camera.py index 539a26cc30f..5ab7d49c3b8 100644 --- a/tests/components/august/test_camera.py +++ b/tests/components/august/test_camera.py @@ -25,14 +25,10 @@ async def test_create_doorbell( ): await _create_august_with_devices(hass, [doorbell_one], brand=Brand.AUGUST) - camera_k98gidt45gul_name_camera = hass.states.get( - "camera.k98gidt45gul_name_camera" - ) - assert camera_k98gidt45gul_name_camera.state == STATE_IDLE + camera_state = hass.states.get("camera.k98gidt45gul_name_camera") + assert camera_state.state == STATE_IDLE - url = hass.states.get("camera.k98gidt45gul_name_camera").attributes[ - "entity_picture" - ] + url = camera_state.attributes["entity_picture"] client = await hass_client_no_auth() resp = await client.get(url) diff --git a/tests/components/august/test_config_flow.py b/tests/components/august/test_config_flow.py index e0ccee55f10..9902901d29f 100644 --- a/tests/components/august/test_config_flow.py +++ b/tests/components/august/test_config_flow.py @@ -5,7 +5,6 @@ from unittest.mock import patch from yalexs.authenticator_common import ValidationResult from yalexs.manager.exceptions import CannotConnect, InvalidAuth, RequireValidation -from homeassistant import config_entries from homeassistant.components.august.const import ( CONF_ACCESS_TOKEN_CACHE_FILE, CONF_BRAND, @@ -14,6 +13,7 @@ from homeassistant.components.august.const import ( DOMAIN, VERIFICATION_CODE_KEY, ) +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_PASSWORD, CONF_TIMEOUT, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -25,7 +25,7 @@ async def test_form(hass: HomeAssistant) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -66,7 +66,7 @@ async def test_form(hass: HomeAssistant) -> None: async def test_form_invalid_auth(hass: HomeAssistant) -> None: """Test we handle invalid auth.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) with patch( @@ -90,7 +90,7 @@ async def test_form_invalid_auth(hass: HomeAssistant) -> None: async def test_user_unexpected_exception(hass: HomeAssistant) -> None: """Test we handle an unexpected exception.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) with patch( @@ -115,7 +115,7 @@ async def test_user_unexpected_exception(hass: HomeAssistant) -> None: async def test_form_cannot_connect(hass: HomeAssistant) -> None: """Test we handle cannot connect error.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) with patch( @@ -138,7 +138,7 @@ async def test_form_cannot_connect(hass: HomeAssistant) -> None: async def test_form_needs_validate(hass: HomeAssistant) -> None: """Test we present validation when we need to validate.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) with ( @@ -367,7 +367,7 @@ async def test_switching_brands(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} diff --git a/tests/components/august/test_event.py b/tests/components/august/test_event.py index 61b7560f462..0bb482c5b89 100644 --- a/tests/components/august/test_event.py +++ b/tests/components/august/test_event.py @@ -1,13 +1,12 @@ """The event tests for the august.""" -import datetime -from unittest.mock import Mock, patch +from unittest.mock import Mock +from freezegun.api import FrozenDateTimeFactory from yalexs.pubnub_async import AugustPubNub from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant -import homeassistant.util.dt as dt_util from .mocks import ( _create_august_with_devices, @@ -45,7 +44,9 @@ async def test_create_doorbell_offline(hass: HomeAssistant) -> None: assert doorbell_state.state == STATE_UNAVAILABLE -async def test_create_doorbell_with_motion(hass: HomeAssistant) -> None: +async def test_create_doorbell_with_motion( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: """Test creation of a doorbell.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") activities = await _mock_activities_from_fixture( @@ -61,19 +62,16 @@ async def test_create_doorbell_with_motion(hass: HomeAssistant) -> None: assert doorbell_state is not None assert doorbell_state.state == STATE_UNKNOWN - new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) - native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) - with patch( - "homeassistant.components.august.util._native_datetime", - return_value=native_time, - ): - async_fire_time_changed(hass, new_time) - await hass.async_block_till_done() + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() motion_state = hass.states.get("event.k98gidt45gul_name_motion") assert motion_state.state == isotime -async def test_doorbell_update_via_pubnub(hass: HomeAssistant) -> None: +async def test_doorbell_update_via_pubnub( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: """Test creation of a doorbell that can be updated via pubnub.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") pubnub = AugustPubNub() @@ -125,14 +123,9 @@ async def test_doorbell_update_via_pubnub(hass: HomeAssistant) -> None: assert motion_state.state != STATE_UNKNOWN isotime = motion_state.state - new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) - native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) - with patch( - "homeassistant.components.august.util._native_datetime", - return_value=native_time, - ): - async_fire_time_changed(hass, new_time) - await hass.async_block_till_done() + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() motion_state = hass.states.get("event.k98gidt45gul_name_motion") assert motion_state is not None @@ -155,14 +148,9 @@ async def test_doorbell_update_via_pubnub(hass: HomeAssistant) -> None: assert doorbell_state.state != STATE_UNKNOWN isotime = motion_state.state - new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) - native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) - with patch( - "homeassistant.components.august.util._native_datetime", - return_value=native_time, - ): - async_fire_time_changed(hass, new_time) - await hass.async_block_till_done() + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") assert doorbell_state is not None diff --git a/tests/components/august/test_init.py b/tests/components/august/test_init.py index 8261e32d668..954436f209a 100644 --- a/tests/components/august/test_init.py +++ b/tests/components/august/test_init.py @@ -122,16 +122,16 @@ async def test_unlock_throws_august_api_http_error(hass: HomeAssistant) -> None: "unlock_return_activities": _unlock_return_activities_side_effect }, ) - last_err = None data = {ATTR_ENTITY_ID: "lock.a6697750d607098bae8d6baa11ef8063_name"} - try: + + with pytest.raises( + HomeAssistantError, + match=( + "A6697750D607098BAE8D6BAA11EF8063 Name: This should bubble up as its user" + " consumable" + ), + ): await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) - except HomeAssistantError as err: - last_err = err - assert str(last_err) == ( - "A6697750D607098BAE8D6BAA11EF8063 Name: This should bubble up as its user" - " consumable" - ) async def test_lock_throws_august_api_http_error(hass: HomeAssistant) -> None: @@ -152,16 +152,15 @@ async def test_lock_throws_august_api_http_error(hass: HomeAssistant) -> None: "lock_return_activities": _lock_return_activities_side_effect }, ) - last_err = None data = {ATTR_ENTITY_ID: "lock.a6697750d607098bae8d6baa11ef8063_name"} - try: + with pytest.raises( + HomeAssistantError, + match=( + "A6697750D607098BAE8D6BAA11EF8063 Name: This should bubble up as its user" + " consumable" + ), + ): await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) - except HomeAssistantError as err: - last_err = err - assert str(last_err) == ( - "A6697750D607098BAE8D6BAA11EF8063 Name: This should bubble up as its user" - " consumable" - ) async def test_open_throws_hass_service_not_supported_error( @@ -371,6 +370,7 @@ async def test_load_unload(hass: HomeAssistant) -> None: await hass.config_entries.async_unload(config_entry.entry_id) await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.NOT_LOADED async def test_load_triggers_ble_discovery( diff --git a/tests/components/august/test_lock.py b/tests/components/august/test_lock.py index 8bb71826d24..e786cebf3e1 100644 --- a/tests/components/august/test_lock.py +++ b/tests/components/august/test_lock.py @@ -6,6 +6,7 @@ from unittest.mock import Mock from aiohttp import ClientResponseError from freezegun.api import FrozenDateTimeFactory import pytest +from syrupy import SnapshotAssertion from yalexs.manager.activity import INITIAL_LOCK_RESYNC_TIME from yalexs.pubnub_async import AugustPubNub @@ -43,7 +44,7 @@ from tests.common import async_fire_time_changed async def test_lock_device_registry( - hass: HomeAssistant, device_registry: dr.DeviceRegistry + hass: HomeAssistant, device_registry: dr.DeviceRegistry, snapshot: SnapshotAssertion ) -> None: """Test creation of a lock with doorsense and bridge ands up in the registry.""" lock_one = await _mock_doorsense_enabled_august_lock_detail(hass) @@ -52,10 +53,7 @@ async def test_lock_device_registry( reg_device = device_registry.async_get_device( identifiers={("august", "online_with_doorsense")} ) - assert reg_device.model == "AUG-MD01" - assert reg_device.sw_version == "undefined-4.3.0-1.8.14" - assert reg_device.name == "online_with_doorsense Name" - assert reg_device.manufacturer == "August Home Inc." + assert reg_device == snapshot async def test_lock_changed_by(hass: HomeAssistant) -> None: @@ -65,14 +63,10 @@ async def test_lock_changed_by(hass: HomeAssistant) -> None: activities = await _mock_activities_from_fixture(hass, "get_activity.lock.json") await _create_august_with_devices(hass, [lock_one], activities=activities) - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + lock_state = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKED - - assert ( - lock_online_with_doorsense_name.attributes.get("changed_by") - == "Your favorite elven princess" - ) + assert lock_state.state == STATE_LOCKED + assert lock_state.attributes["changed_by"] == "Your favorite elven princess" async def test_state_locking(hass: HomeAssistant) -> None: @@ -82,9 +76,7 @@ async def test_state_locking(hass: HomeAssistant) -> None: activities = await _mock_activities_from_fixture(hass, "get_activity.locking.json") await _create_august_with_devices(hass, [lock_one], activities=activities) - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - - assert lock_online_with_doorsense_name.state == STATE_LOCKING + assert hass.states.get("lock.online_with_doorsense_name").state == STATE_LOCKING async def test_state_unlocking(hass: HomeAssistant) -> None: @@ -96,9 +88,7 @@ async def test_state_unlocking(hass: HomeAssistant) -> None: ) await _create_august_with_devices(hass, [lock_one], activities=activities) - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - - assert lock_online_with_doorsense_name.state == STATE_UNLOCKING + assert hass.states.get("lock.online_with_doorsense_name").state == STATE_UNLOCKING async def test_state_jammed(hass: HomeAssistant) -> None: @@ -108,9 +98,7 @@ async def test_state_jammed(hass: HomeAssistant) -> None: activities = await _mock_activities_from_fixture(hass, "get_activity.jammed.json") await _create_august_with_devices(hass, [lock_one], activities=activities) - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - - assert lock_online_with_doorsense_name.state == STATE_JAMMED + assert hass.states.get("lock.online_with_doorsense_name").state == STATE_JAMMED async def test_one_lock_operation( @@ -119,35 +107,27 @@ async def test_one_lock_operation( """Test creation of a lock with doorsense and bridge.""" lock_one = await _mock_doorsense_enabled_august_lock_detail(hass) await _create_august_with_devices(hass, [lock_one]) + states = hass.states - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + lock_state = states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKED + assert lock_state.state == STATE_LOCKED - assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 - assert ( - lock_online_with_doorsense_name.attributes.get("friendly_name") - == "online_with_doorsense Name" - ) + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) - await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_UNLOCKED + lock_state = states.get("lock.online_with_doorsense_name") + assert lock_state.state == STATE_UNLOCKED - assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 - assert ( - lock_online_with_doorsense_name.attributes.get("friendly_name") - == "online_with_doorsense Name" - ) + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) - await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKED + assert states.get("lock.online_with_doorsense_name").state == STATE_LOCKED # No activity means it will be unavailable until the activity feed has data lock_operator_sensor = entity_registry.async_get( @@ -155,8 +135,7 @@ async def test_one_lock_operation( ) assert lock_operator_sensor assert ( - hass.states.get("sensor.online_with_doorsense_name_operator").state - == STATE_UNKNOWN + states.get("sensor.online_with_doorsense_name_operator").state == STATE_UNKNOWN ) @@ -170,7 +149,6 @@ async def test_open_lock_operation(hass: HomeAssistant) -> None: data = {ATTR_ENTITY_ID: "lock.online_with_unlatch_name"} await hass.services.async_call(LOCK_DOMAIN, SERVICE_OPEN, data, blocking=True) - await hass.async_block_till_done() lock_online_with_unlatch_name = hass.states.get("lock.online_with_unlatch_name") assert lock_online_with_unlatch_name.state == STATE_UNLOCKED @@ -189,12 +167,10 @@ async def test_open_lock_operation_pubnub_connected( await _create_august_with_devices(hass, [lock_with_unlatch], pubnub=pubnub) pubnub.connected = True - lock_online_with_unlatch_name = hass.states.get("lock.online_with_unlatch_name") - assert lock_online_with_unlatch_name.state == STATE_LOCKED + assert hass.states.get("lock.online_with_unlatch_name").state == STATE_LOCKED data = {ATTR_ENTITY_ID: "lock.online_with_unlatch_name"} await hass.services.async_call(LOCK_DOMAIN, SERVICE_OPEN, data, blocking=True) - await hass.async_block_till_done() pubnub.message( pubnub, @@ -209,8 +185,7 @@ async def test_open_lock_operation_pubnub_connected( await hass.async_block_till_done() await hass.async_block_till_done() - lock_online_with_unlatch_name = hass.states.get("lock.online_with_unlatch_name") - assert lock_online_with_unlatch_name.state == STATE_UNLOCKED + assert hass.states.get("lock.online_with_unlatch_name").state == STATE_UNLOCKED await hass.async_block_till_done() @@ -227,19 +202,15 @@ async def test_one_lock_operation_pubnub_connected( await _create_august_with_devices(hass, [lock_one], pubnub=pubnub) pubnub.connected = True - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + lock_state = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKED + assert lock_state.state == STATE_LOCKED - assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 - assert ( - lock_online_with_doorsense_name.attributes.get("friendly_name") - == "online_with_doorsense Name" - ) + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) - await hass.async_block_till_done() pubnub.message( pubnub, @@ -254,17 +225,13 @@ async def test_one_lock_operation_pubnub_connected( await hass.async_block_till_done() await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_UNLOCKED + lock_state = hass.states.get("lock.online_with_doorsense_name") + assert lock_state.state == STATE_UNLOCKED - assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 - assert ( - lock_online_with_doorsense_name.attributes.get("friendly_name") - == "online_with_doorsense Name" - ) + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) - await hass.async_block_till_done() pubnub.message( pubnub, @@ -279,8 +246,8 @@ async def test_one_lock_operation_pubnub_connected( await hass.async_block_till_done() await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKED + lock_state = hass.states.get("lock.online_with_doorsense_name") + assert lock_state.state == STATE_LOCKED # No activity means it will be unavailable until the activity feed has data lock_operator_sensor = entity_registry.async_get( @@ -306,8 +273,8 @@ async def test_one_lock_operation_pubnub_connected( ) await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_UNLOCKED + lock_state = hass.states.get("lock.online_with_doorsense_name") + assert lock_state.state == STATE_UNLOCKED async def test_lock_jammed(hass: HomeAssistant) -> None: @@ -325,22 +292,18 @@ async def test_lock_jammed(hass: HomeAssistant) -> None: }, ) - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + lock_state = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKED + assert lock_state.state == STATE_LOCKED - assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 - assert ( - lock_online_with_doorsense_name.attributes.get("friendly_name") - == "online_with_doorsense Name" - ) + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) - await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_JAMMED + lock_state = hass.states.get("lock.online_with_doorsense_name") + assert lock_state.state == STATE_JAMMED async def test_lock_throws_exception_on_unknown_status_code( @@ -360,15 +323,12 @@ async def test_lock_throws_exception_on_unknown_status_code( }, ) - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + lock_state = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKED + assert lock_state.state == STATE_LOCKED - assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 - assert ( - lock_online_with_doorsense_name.attributes.get("friendly_name") - == "online_with_doorsense Name" - ) + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} with pytest.raises(ClientResponseError): @@ -383,9 +343,7 @@ async def test_one_lock_unknown_state(hass: HomeAssistant) -> None: ) await _create_august_with_devices(hass, [lock_one]) - lock_brokenid_name = hass.states.get("lock.brokenid_name") - - assert lock_brokenid_name.state == STATE_UNKNOWN + assert hass.states.get("lock.brokenid_name").state == STATE_UNKNOWN async def test_lock_bridge_offline(hass: HomeAssistant) -> None: @@ -397,9 +355,7 @@ async def test_lock_bridge_offline(hass: HomeAssistant) -> None: ) await _create_august_with_devices(hass, [lock_one], activities=activities) - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - - assert lock_online_with_doorsense_name.state == STATE_UNAVAILABLE + assert hass.states.get("lock.online_with_doorsense_name").state == STATE_UNAVAILABLE async def test_lock_bridge_online(hass: HomeAssistant) -> None: @@ -411,14 +367,13 @@ async def test_lock_bridge_online(hass: HomeAssistant) -> None: ) await _create_august_with_devices(hass, [lock_one], activities=activities) - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - - assert lock_online_with_doorsense_name.state == STATE_LOCKED + assert hass.states.get("lock.online_with_doorsense_name").state == STATE_LOCKED async def test_lock_update_via_pubnub(hass: HomeAssistant) -> None: """Test creation of a lock with doorsense and bridge.""" lock_one = await _mock_doorsense_enabled_august_lock_detail(hass) + states = hass.states assert lock_one.pubsub_channel == "pubsub" pubnub = AugustPubNub() @@ -428,9 +383,7 @@ async def test_lock_update_via_pubnub(hass: HomeAssistant) -> None: ) pubnub.connected = True - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - - assert lock_online_with_doorsense_name.state == STATE_LOCKED + assert states.get("lock.online_with_doorsense_name").state == STATE_LOCKED pubnub.message( pubnub, @@ -446,8 +399,7 @@ async def test_lock_update_via_pubnub(hass: HomeAssistant) -> None: await hass.async_block_till_done() await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_UNLOCKING + assert states.get("lock.online_with_doorsense_name").state == STATE_UNLOCKING pubnub.message( pubnub, @@ -463,25 +415,21 @@ async def test_lock_update_via_pubnub(hass: HomeAssistant) -> None: await hass.async_block_till_done() await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKING + assert states.get("lock.online_with_doorsense_name").state == STATE_LOCKING async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKING + assert hass.states.get("lock.online_with_doorsense_name").state == STATE_LOCKING pubnub.connected = True async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKING + assert states.get("lock.online_with_doorsense_name").state == STATE_LOCKING # Ensure pubnub status is always preserved async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=2)) await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKING + assert states.get("lock.online_with_doorsense_name").state == STATE_LOCKING pubnub.message( pubnub, @@ -496,13 +444,11 @@ async def test_lock_update_via_pubnub(hass: HomeAssistant) -> None: await hass.async_block_till_done() await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_UNLOCKING + assert states.get("lock.online_with_doorsense_name").state == STATE_UNLOCKING async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=4)) await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_UNLOCKING + assert states.get("lock.online_with_doorsense_name").state == STATE_UNLOCKING await hass.config_entries.async_unload(config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/august/test_sensor.py b/tests/components/august/test_sensor.py index 67223e9dff0..2d72d287ce3 100644 --- a/tests/components/august/test_sensor.py +++ b/tests/components/august/test_sensor.py @@ -28,13 +28,9 @@ async def test_create_doorbell(hass: HomeAssistant) -> None: doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") await _create_august_with_devices(hass, [doorbell_one]) - sensor_k98gidt45gul_name_battery = hass.states.get( - "sensor.k98gidt45gul_name_battery" - ) - assert sensor_k98gidt45gul_name_battery.state == "96" - assert ( - sensor_k98gidt45gul_name_battery.attributes["unit_of_measurement"] == PERCENTAGE - ) + battery_state = hass.states.get("sensor.k98gidt45gul_name_battery") + assert battery_state.state == "96" + assert battery_state.attributes["unit_of_measurement"] == PERCENTAGE async def test_create_doorbell_offline( @@ -44,9 +40,9 @@ async def test_create_doorbell_offline( doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") await _create_august_with_devices(hass, [doorbell_one]) - sensor_tmt100_name_battery = hass.states.get("sensor.tmt100_name_battery") - assert sensor_tmt100_name_battery.state == "81" - assert sensor_tmt100_name_battery.attributes["unit_of_measurement"] == PERCENTAGE + battery_state = hass.states.get("sensor.tmt100_name_battery") + assert battery_state.state == "81" + assert battery_state.attributes["unit_of_measurement"] == PERCENTAGE entry = entity_registry.async_get("sensor.tmt100_name_battery") assert entry @@ -60,8 +56,7 @@ async def test_create_doorbell_hardwired(hass: HomeAssistant) -> None: ) await _create_august_with_devices(hass, [doorbell_one]) - sensor_tmt100_name_battery = hass.states.get("sensor.tmt100_name_battery") - assert sensor_tmt100_name_battery is None + assert hass.states.get("sensor.tmt100_name_battery") is None async def test_create_lock_with_linked_keypad( @@ -71,25 +66,21 @@ async def test_create_lock_with_linked_keypad( lock_one = await _mock_lock_from_fixture(hass, "get_lock.doorsense_init.json") await _create_august_with_devices(hass, [lock_one]) - sensor_a6697750d607098bae8d6baa11ef8063_name_battery = hass.states.get( + battery_state = hass.states.get( "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" ) - assert sensor_a6697750d607098bae8d6baa11ef8063_name_battery.state == "88" - assert ( - sensor_a6697750d607098bae8d6baa11ef8063_name_battery.attributes[ - "unit_of_measurement" - ] - == PERCENTAGE - ) + assert battery_state.state == "88" + assert battery_state.attributes["unit_of_measurement"] == PERCENTAGE + entry = entity_registry.async_get( "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" ) assert entry assert entry.unique_id == "A6697750D607098BAE8D6BAA11EF8063_device_battery" - state = hass.states.get("sensor.front_door_lock_keypad_battery") - assert state.state == "62" - assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE + keypad_battery_state = hass.states.get("sensor.front_door_lock_keypad_battery") + assert keypad_battery_state.state == "62" + assert keypad_battery_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE entry = entity_registry.async_get("sensor.front_door_lock_keypad_battery") assert entry assert entry.unique_id == "5bc65c24e6ef2a263e1450a8_linked_keypad_battery" @@ -101,42 +92,32 @@ async def test_create_lock_with_low_battery_linked_keypad( """Test creation of a lock with a linked keypad that both have a battery.""" lock_one = await _mock_lock_from_fixture(hass, "get_lock.low_keypad_battery.json") await _create_august_with_devices(hass, [lock_one]) + states = hass.states - sensor_a6697750d607098bae8d6baa11ef8063_name_battery = hass.states.get( - "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" - ) - assert sensor_a6697750d607098bae8d6baa11ef8063_name_battery.state == "88" - assert ( - sensor_a6697750d607098bae8d6baa11ef8063_name_battery.attributes[ - "unit_of_measurement" - ] - == PERCENTAGE - ) + battery_state = states.get("sensor.a6697750d607098bae8d6baa11ef8063_name_battery") + assert battery_state.state == "88" + assert battery_state.attributes["unit_of_measurement"] == PERCENTAGE entry = entity_registry.async_get( "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" ) assert entry assert entry.unique_id == "A6697750D607098BAE8D6BAA11EF8063_device_battery" - state = hass.states.get("sensor.front_door_lock_keypad_battery") - assert state.state == "10" - assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE + keypad_battery_state = states.get("sensor.front_door_lock_keypad_battery") + assert keypad_battery_state.state == "10" + assert keypad_battery_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE entry = entity_registry.async_get("sensor.front_door_lock_keypad_battery") assert entry assert entry.unique_id == "5bc65c24e6ef2a263e1450a8_linked_keypad_battery" # No activity means it will be unavailable until someone unlocks/locks it - lock_operator_sensor = entity_registry.async_get( + operator_entry = entity_registry.async_get( "sensor.a6697750d607098bae8d6baa11ef8063_name_operator" ) - assert ( - lock_operator_sensor.unique_id - == "A6697750D607098BAE8D6BAA11EF8063_lock_operator" - ) - assert ( - hass.states.get("sensor.a6697750d607098bae8d6baa11ef8063_name_operator").state - == STATE_UNKNOWN - ) + assert operator_entry.unique_id == "A6697750D607098BAE8D6BAA11EF8063_lock_operator" + + operator_state = states.get("sensor.a6697750d607098bae8d6baa11ef8063_name_operator") + assert operator_state.state == STATE_UNKNOWN async def test_lock_operator_bluetooth( From ef452427e38b400cfaeeacae09d2f359a12efb34 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Thu, 29 Aug 2024 19:34:19 +0200 Subject: [PATCH 1216/1635] Bump PyTurboJPEG to 1.7.5 (#124865) --- homeassistant/components/camera/manifest.json | 2 +- homeassistant/components/stream/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/camera/manifest.json b/homeassistant/components/camera/manifest.json index b1df158a260..9c56d97f910 100644 --- a/homeassistant/components/camera/manifest.json +++ b/homeassistant/components/camera/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/camera", "integration_type": "entity", "quality_scale": "internal", - "requirements": ["PyTurboJPEG==1.7.1"] + "requirements": ["PyTurboJPEG==1.7.5"] } diff --git a/homeassistant/components/stream/manifest.json b/homeassistant/components/stream/manifest.json index 37158aa5fe3..dffd6d65a6e 100644 --- a/homeassistant/components/stream/manifest.json +++ b/homeassistant/components/stream/manifest.json @@ -7,5 +7,5 @@ "integration_type": "system", "iot_class": "local_push", "quality_scale": "internal", - "requirements": ["PyTurboJPEG==1.7.1", "ha-av==10.1.1", "numpy==1.26.0"] + "requirements": ["PyTurboJPEG==1.7.5", "ha-av==10.1.1", "numpy==1.26.0"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index e6a5d6746f5..329b2535855 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -51,7 +51,7 @@ pyOpenSSL==24.2.1 pyserial==3.5 pyspeex-noise==1.0.2 python-slugify==8.0.4 -PyTurboJPEG==1.7.1 +PyTurboJPEG==1.7.5 pyudev==0.24.1 PyYAML==6.0.2 requests==2.32.3 diff --git a/requirements_all.txt b/requirements_all.txt index 96274a06631..1497033bd81 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -97,7 +97,7 @@ PyTransportNSW==0.1.1 # homeassistant.components.camera # homeassistant.components.stream -PyTurboJPEG==1.7.1 +PyTurboJPEG==1.7.5 # homeassistant.components.vicare PyViCare-neo==0.2.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 7c968bb479c..bbeaf4cfcdb 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -91,7 +91,7 @@ PyTransportNSW==0.1.1 # homeassistant.components.camera # homeassistant.components.stream -PyTurboJPEG==1.7.1 +PyTurboJPEG==1.7.5 # homeassistant.components.vicare PyViCare-neo==0.2.1 From ff9937f942828c18c82e8e6ff66ca9f0004fcea8 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Thu, 29 Aug 2024 13:29:11 -0500 Subject: [PATCH 1217/1635] Bump intents to 2024.8.29 (#124874) --- homeassistant/components/conversation/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/conversation/manifest.json b/homeassistant/components/conversation/manifest.json index d7a308b8b2b..5a689485b29 100644 --- a/homeassistant/components/conversation/manifest.json +++ b/homeassistant/components/conversation/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/conversation", "integration_type": "system", "quality_scale": "internal", - "requirements": ["hassil==1.7.4", "home-assistant-intents==2024.8.7"] + "requirements": ["hassil==1.7.4", "home-assistant-intents==2024.8.29"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 329b2535855..c01b23ab4e4 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -32,7 +32,7 @@ hass-nabucasa==0.81.1 hassil==1.7.4 home-assistant-bluetooth==1.12.2 home-assistant-frontend==20240829.0 -home-assistant-intents==2024.8.7 +home-assistant-intents==2024.8.29 httpx==0.27.0 ifaddr==0.2.0 Jinja2==3.1.4 diff --git a/requirements_all.txt b/requirements_all.txt index 1497033bd81..301fd44af3a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1105,7 +1105,7 @@ holidays==0.55 home-assistant-frontend==20240829.0 # homeassistant.components.conversation -home-assistant-intents==2024.8.7 +home-assistant-intents==2024.8.29 # homeassistant.components.home_connect homeconnect==0.8.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index bbeaf4cfcdb..8c674932b29 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -928,7 +928,7 @@ holidays==0.55 home-assistant-frontend==20240829.0 # homeassistant.components.conversation -home-assistant-intents==2024.8.7 +home-assistant-intents==2024.8.29 # homeassistant.components.home_connect homeconnect==0.8.0 From 175ffe29f6363e3adb3827d1777c312fa6599952 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 29 Aug 2024 13:07:21 -1000 Subject: [PATCH 1218/1635] Bump yalexs to 8.5.5 (#124891) changelog: https://github.com/bdraco/yalexs/compare/v8.5.4...v8.5.5 --- homeassistant/components/august/manifest.json | 2 +- homeassistant/components/yale/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/august/manifest.json b/homeassistant/components/august/manifest.json index fe630638cf2..5f317a20834 100644 --- a/homeassistant/components/august/manifest.json +++ b/homeassistant/components/august/manifest.json @@ -24,5 +24,5 @@ "documentation": "https://www.home-assistant.io/integrations/august", "iot_class": "cloud_push", "loggers": ["pubnub", "yalexs"], - "requirements": ["yalexs==8.5.4", "yalexs-ble==2.4.3"] + "requirements": ["yalexs==8.5.5", "yalexs-ble==2.4.3"] } diff --git a/homeassistant/components/yale/manifest.json b/homeassistant/components/yale/manifest.json index 115036b96d5..9bee7df2e00 100644 --- a/homeassistant/components/yale/manifest.json +++ b/homeassistant/components/yale/manifest.json @@ -13,5 +13,5 @@ "documentation": "https://www.home-assistant.io/integrations/yale", "iot_class": "cloud_push", "loggers": ["socketio", "engineio", "yalexs"], - "requirements": ["yalexs==8.5.4", "yalexs-ble==2.4.3"] + "requirements": ["yalexs==8.5.5", "yalexs-ble==2.4.3"] } diff --git a/requirements_all.txt b/requirements_all.txt index 301fd44af3a..e86b82791ec 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2976,7 +2976,7 @@ yalexs-ble==2.4.3 # homeassistant.components.august # homeassistant.components.yale -yalexs==8.5.4 +yalexs==8.5.5 # homeassistant.components.yeelight yeelight==0.7.14 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 8c674932b29..493fde89f89 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2359,7 +2359,7 @@ yalexs-ble==2.4.3 # homeassistant.components.august # homeassistant.components.yale -yalexs==8.5.4 +yalexs==8.5.5 # homeassistant.components.yeelight yeelight==0.7.14 From 7eeebf198b2c2c2c53ddc47d43b3834c416f4311 Mon Sep 17 00:00:00 2001 From: TheJulianJES Date: Fri, 30 Aug 2024 02:13:47 +0200 Subject: [PATCH 1219/1635] Fix ZHA group removal entity registry cleanup (#124889) * Fix ZHA cleanup entity registry parameter * Fix missing `gateway` when accessing coordinator device * Get `ZHADeviceProxy` for coordinator device --- homeassistant/components/zha/helpers.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/zha/helpers.py b/homeassistant/components/zha/helpers.py index a5446af7e76..f70c8a9cb3e 100644 --- a/homeassistant/components/zha/helpers.py +++ b/homeassistant/components/zha/helpers.py @@ -802,21 +802,24 @@ class ZHAGatewayProxy(EventBase): ) def _cleanup_group_entity_registry_entries( - self, zigpy_group: zigpy.group.Group + self, zha_group_proxy: ZHAGroupProxy ) -> None: """Remove entity registry entries for group entities when the groups are removed from HA.""" # first we collect the potential unique ids for entities that could be created from this group possible_entity_unique_ids = [ - f"{domain}_zha_group_0x{zigpy_group.group_id:04x}" + f"{domain}_zha_group_0x{zha_group_proxy.group.group_id:04x}" for domain in GROUP_ENTITY_DOMAINS ] # then we get all group entity entries tied to the coordinator entity_registry = er.async_get(self.hass) - assert self.coordinator_zha_device + assert self.gateway.coordinator_zha_device + coordinator_proxy = self.device_proxies[ + self.gateway.coordinator_zha_device.ieee + ] all_group_entity_entries = er.async_entries_for_device( entity_registry, - self.coordinator_zha_device.device_id, + coordinator_proxy.device_id, include_disabled_entities=True, ) From 4dfc11a1401ee79dc61196a3c82c388a23cb6a0d Mon Sep 17 00:00:00 2001 From: Tony <29752086+ms264556@users.noreply.github.com> Date: Fri, 30 Aug 2024 14:03:51 +1200 Subject: [PATCH 1220/1635] Bump aioruckus to v0.41 removing blocking call to load_default_certs from ruckus_unleashed integration (#123974) * fix ruckusd_unleashed blocking call to load_default_certs * remove extra loggers, bump aioruckus ver for debian packagers --- homeassistant/components/ruckus_unleashed/manifest.json | 4 ++-- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/ruckus_unleashed/manifest.json b/homeassistant/components/ruckus_unleashed/manifest.json index edaf0aa95d2..039840efc14 100644 --- a/homeassistant/components/ruckus_unleashed/manifest.json +++ b/homeassistant/components/ruckus_unleashed/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/ruckus_unleashed", "integration_type": "hub", "iot_class": "local_polling", - "loggers": ["aioruckus", "xmltodict"], - "requirements": ["aioruckus==0.34"] + "loggers": ["aioruckus"], + "requirements": ["aioruckus==0.41"] } diff --git a/requirements_all.txt b/requirements_all.txt index e86b82791ec..405eb9c2cd7 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -347,7 +347,7 @@ aiorecollect==2023.09.0 aioridwell==2024.01.0 # homeassistant.components.ruckus_unleashed -aioruckus==0.34 +aioruckus==0.41 # homeassistant.components.russound_rio aiorussound==2.3.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 493fde89f89..6c101b2db4c 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -329,7 +329,7 @@ aiorecollect==2023.09.0 aioridwell==2024.01.0 # homeassistant.components.ruckus_unleashed -aioruckus==0.34 +aioruckus==0.41 # homeassistant.components.russound_rio aiorussound==2.3.2 From 7bb93d4f3e2daaa8919d1c84b0ce68f5c0fade10 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 30 Aug 2024 04:05:27 +0200 Subject: [PATCH 1221/1635] Deduplicate warning messages in recorder DB migration (#124845) --- .../components/recorder/migration.py | 44 ++++++++----------- 1 file changed, 18 insertions(+), 26 deletions(-) diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index 7127a576580..3da0bc9abb1 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -128,6 +128,11 @@ MIGRATION_NOTE_OFFLINE = ( "Home Assistant will not start until the upgrade is completed. Please be patient " "and do not turn off or restart Home Assistant while the upgrade is in progress!" ) +MIGRATION_NOTE_MINUTES = ( + "Note: this may take several minutes on large databases and slow machines. " + "Please be patient!" +) +MIGRATION_NOTE_WHILE = "This will take a while; please be patient!" _EMPTY_ENTITY_ID = "missing.entity_id" _EMPTY_EVENT_TYPE = "missing_event_type" @@ -373,11 +378,10 @@ def _create_index( index = index_list[0] _LOGGER.debug("Creating %s index", index_name) _LOGGER.warning( - "Adding index `%s` to table `%s`. Note: this can take several " - "minutes on large databases and slow machines. Please " - "be patient!", + "Adding index `%s` to table `%s`. %s", index_name, table_name, + MIGRATION_NOTE_MINUTES, ) with session_scope(session=session_maker()) as session: try: @@ -422,11 +426,10 @@ def _drop_index( DO NOT USE THIS FUNCTION IN ANY OPERATION THAT TAKES USER INPUT. """ _LOGGER.warning( - "Dropping index `%s` from table `%s`. Note: this can take several " - "minutes on large databases and slow machines. Please " - "be patient!", + "Dropping index `%s` from table `%s`. %s", index_name, table_name, + MIGRATION_NOTE_MINUTES, ) index_to_drop: str | None = None with session_scope(session=session_maker()) as session: @@ -472,13 +475,10 @@ def _add_columns( ) -> None: """Add columns to a table.""" _LOGGER.warning( - ( - "Adding columns %s to table %s. Note: this can take several " - "minutes on large databases and slow machines. Please " - "be patient!" - ), + "Adding columns %s to table %s. %s", ", ".join(column.split(" ")[0] for column in columns_def), table_name, + MIGRATION_NOTE_MINUTES, ) columns_def = [f"ADD {col_def}" for col_def in columns_def] @@ -534,13 +534,10 @@ def _modify_columns( return _LOGGER.warning( - ( - "Modifying columns %s in table %s. Note: this can take several " - "minutes on large databases and slow machines. Please " - "be patient!" - ), + "Modifying columns %s in table %s. %s", ", ".join(column.split(" ")[0] for column in columns_def), table_name, + MIGRATION_NOTE_MINUTES, ) if engine.dialect.name == SupportedDialect.POSTGRESQL: @@ -1781,10 +1778,9 @@ def _migrate_statistics_columns_to_timestamp_removing_duplicates( except IntegrityError as ex: _LOGGER.error( "Statistics table contains duplicate entries: %s; " - "Cleaning up duplicates and trying again; " - "This will take a while; " - "Please be patient!", + "Cleaning up duplicates and trying again; %s", ex, + MIGRATION_NOTE_WHILE, ) # There may be duplicated statistics entries, delete duplicates # and try again @@ -1812,10 +1808,9 @@ def _correct_table_character_set_and_collation( """Correct issues detected by validate_db_schema.""" # Attempt to convert the table to utf8mb4 _LOGGER.warning( - "Updating character set and collation of table %s to utf8mb4. " - "Note: this can take several minutes on large databases and slow " - "machines. Please be patient!", + "Updating character set and collation of table %s to utf8mb4. %s", table, + MIGRATION_NOTE_MINUTES, ) with ( contextlib.suppress(SQLAlchemyError), @@ -2736,10 +2731,7 @@ def rebuild_sqlite_table( orig_name = table_table.name temp_name = f"{table_table.name}_temp_{int(time())}" - _LOGGER.warning( - "Rebuilding SQLite table %s; This will take a while; Please be patient!", - orig_name, - ) + _LOGGER.warning("Rebuilding SQLite table %s; %s", orig_name, MIGRATION_NOTE_WHILE) try: # 12 step SQLite table rebuild From 3e0bd44d2acbdf31e7bea1e521709b479a45b272 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 29 Aug 2024 16:19:12 -1000 Subject: [PATCH 1222/1635] Bump aioesphomeapi to 25.3.1 (#124890) changelog: https://github.com/esphome/aioesphomeapi/compare/v25.2.1...v25.3.1 --- homeassistant/components/esphome/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/esphome/manifest.json b/homeassistant/components/esphome/manifest.json index 454b547cdf4..9d42b7206e3 100644 --- a/homeassistant/components/esphome/manifest.json +++ b/homeassistant/components/esphome/manifest.json @@ -17,7 +17,7 @@ "mqtt": ["esphome/discover/#"], "quality_scale": "platinum", "requirements": [ - "aioesphomeapi==25.2.1", + "aioesphomeapi==25.3.1", "esphome-dashboard-api==1.2.3", "bleak-esphome==1.0.0" ], diff --git a/requirements_all.txt b/requirements_all.txt index 405eb9c2cd7..d04dcee2c88 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -237,7 +237,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==25.2.1 +aioesphomeapi==25.3.1 # homeassistant.components.flo aioflo==2021.11.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 6c101b2db4c..5ebeb167d47 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -225,7 +225,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==25.2.1 +aioesphomeapi==25.3.1 # homeassistant.components.flo aioflo==2021.11.0 From cf90e77e57d3f6bb0cac1df4b169d269d6fac68e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 29 Aug 2024 21:35:19 -1000 Subject: [PATCH 1223/1635] Add a repair issue for Yale Home users using the August integration (#124895) The Yale Home brand will stop working with the August integration very soon. Users must migrate to the Yale integration to avoid an interruption in service. --- homeassistant/components/august/__init__.py | 32 +++++++++++++++++-- .../components/august/config_flow.py | 10 ++++-- homeassistant/components/august/strings.json | 6 ++++ tests/components/august/test_config_flow.py | 4 +-- tests/components/august/test_init.py | 28 +++++++++++++++- 5 files changed, 73 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/august/__init__.py b/homeassistant/components/august/__init__.py index 47a7f75611a..434db46384b 100644 --- a/homeassistant/components/august/__init__.py +++ b/homeassistant/components/august/__init__.py @@ -6,15 +6,16 @@ from pathlib import Path from typing import cast from aiohttp import ClientResponseError +from yalexs.const import Brand from yalexs.exceptions import AugustApiAIOHTTPError from yalexs.manager.exceptions import CannotConnect, InvalidAuth, RequireValidation from yalexs.manager.gateway import Config as YaleXSConfig from homeassistant.config_entries import ConfigEntry from homeassistant.const import EVENT_HOMEASSISTANT_STOP -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers import device_registry as dr +from homeassistant.helpers import device_registry as dr, issue_registry as ir from .const import DOMAIN, PLATFORMS from .data import AugustData @@ -24,6 +25,26 @@ from .util import async_create_august_clientsession type AugustConfigEntry = ConfigEntry[AugustData] +@callback +def _async_create_yale_brand_migration_issue( + hass: HomeAssistant, entry: AugustConfigEntry +) -> None: + """Create an issue for a brand migration.""" + ir.async_create_issue( + hass, + DOMAIN, + "yale_brand_migration", + breaks_in_ha_version="2024.9", + learn_more_url="https://www.home-assistant.io/integrations/yale", + translation_key="yale_brand_migration", + is_fixable=False, + severity=ir.IssueSeverity.CRITICAL, + translation_placeholders={ + "migrate_url": "https://my.home-assistant.io/redirect/config_flow_start?domain=yale" + }, + ) + + async def async_setup_entry(hass: HomeAssistant, entry: AugustConfigEntry) -> bool: """Set up August from a config entry.""" session = async_create_august_clientsession(hass) @@ -40,6 +61,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: AugustConfigEntry) -> bo return True +async def async_remove_entry(hass: HomeAssistant, entry: AugustConfigEntry) -> None: + """Remove an August config entry.""" + ir.async_delete_issue(hass, DOMAIN, "yale_brand_migration") + + async def async_unload_entry(hass: HomeAssistant, entry: AugustConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) @@ -51,6 +77,8 @@ async def async_setup_august( """Set up the August component.""" config = cast(YaleXSConfig, entry.data) await august_gateway.async_setup(config) + if august_gateway.api.brand == Brand.YALE_HOME: + _async_create_yale_brand_migration_issue(hass, entry) await august_gateway.async_authenticate() await august_gateway.async_refresh_access_token_if_needed() data = entry.runtime_data = AugustData(hass, august_gateway) diff --git a/homeassistant/components/august/config_flow.py b/homeassistant/components/august/config_flow.py index 2a1a20a9dc4..58c3549fe4d 100644 --- a/homeassistant/components/august/config_flow.py +++ b/homeassistant/components/august/config_flow.py @@ -9,7 +9,7 @@ from typing import Any import aiohttp import voluptuous as vol from yalexs.authenticator_common import ValidationResult -from yalexs.const import BRANDS_WITHOUT_OAUTH, DEFAULT_BRAND +from yalexs.const import BRANDS_WITHOUT_OAUTH, DEFAULT_BRAND, Brand from yalexs.manager.exceptions import CannotConnect, InvalidAuth, RequireValidation from homeassistant.config_entries import ConfigFlow, ConfigFlowResult @@ -28,6 +28,12 @@ from .const import ( from .gateway import AugustGateway from .util import async_create_august_clientsession +# The Yale Home Brand is not supported by the August integration +# anymore and should migrate to the Yale integration +AVAILABLE_BRANDS = BRANDS_WITHOUT_OAUTH.copy() +del AVAILABLE_BRANDS[Brand.YALE_HOME] + + _LOGGER = logging.getLogger(__name__) @@ -118,7 +124,7 @@ class AugustConfigFlow(ConfigFlow, domain=DOMAIN): vol.Required( CONF_BRAND, default=self._user_auth_details.get(CONF_BRAND, DEFAULT_BRAND), - ): vol.In(BRANDS_WITHOUT_OAUTH), + ): vol.In(AVAILABLE_BRANDS), vol.Required( CONF_LOGIN_METHOD, default=self._user_auth_details.get( diff --git a/homeassistant/components/august/strings.json b/homeassistant/components/august/strings.json index 772a8dca479..589a494590b 100644 --- a/homeassistant/components/august/strings.json +++ b/homeassistant/components/august/strings.json @@ -1,4 +1,10 @@ { + "issues": { + "yale_brand_migration": { + "title": "Yale Home has a new integration", + "description": "Add the [Yale integration]({migrate_url}), and remove the August integration as soon as possible to avoid an interruption in service. The Yale Home brand will stop working with the August integration soon and will be removed in a future release." + } + }, "config": { "error": { "unhandled": "Unhandled error: {error}", diff --git a/tests/components/august/test_config_flow.py b/tests/components/august/test_config_flow.py index 9902901d29f..b3138342b8c 100644 --- a/tests/components/august/test_config_flow.py +++ b/tests/components/august/test_config_flow.py @@ -385,7 +385,7 @@ async def test_switching_brands(hass: HomeAssistant) -> None: result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - CONF_BRAND: "yale_home", + CONF_BRAND: "yale_access", CONF_LOGIN_METHOD: "email", CONF_USERNAME: "my@email.tld", CONF_PASSWORD: "test-password", @@ -396,4 +396,4 @@ async def test_switching_brands(hass: HomeAssistant) -> None: assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "reauth_successful" assert len(mock_setup_entry.mock_calls) == 1 - assert entry.data[CONF_BRAND] == "yale_home" + assert entry.data[CONF_BRAND] == "yale_access" diff --git a/tests/components/august/test_init.py b/tests/components/august/test_init.py index 954436f209a..1bbe8033ec8 100644 --- a/tests/components/august/test_init.py +++ b/tests/components/august/test_init.py @@ -5,6 +5,7 @@ from unittest.mock import Mock, patch from aiohttp import ClientResponseError import pytest from yalexs.authenticator_common import AuthenticationState +from yalexs.const import Brand from yalexs.exceptions import AugustApiAIOHTTPError from homeassistant.components.august.const import DOMAIN @@ -20,7 +21,11 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import ( + device_registry as dr, + entity_registry as er, + issue_registry as ir, +) from homeassistant.setup import async_setup_component from .mocks import ( @@ -420,3 +425,24 @@ async def test_device_remove_devices( ) response = await client.remove_device(dead_device_entry.id, config_entry.entry_id) assert response["success"] + + +async def test_brand_migration_issue(hass: HomeAssistant) -> None: + """Test creating and removing the brand migration issue.""" + august_operative_lock = await _mock_operative_august_lock_detail(hass) + config_entry = await _create_august_with_devices( + hass, [august_operative_lock], brand=Brand.YALE_HOME + ) + + assert config_entry.state is ConfigEntryState.LOADED + + issue_reg = ir.async_get(hass) + issue_entry = issue_reg.async_get_issue(DOMAIN, "yale_brand_migration") + assert issue_entry + assert issue_entry.severity == ir.IssueSeverity.CRITICAL + assert issue_entry.translation_placeholders == { + "migrate_url": "https://my.home-assistant.io/redirect/config_flow_start?domain=yale" + } + + await hass.config_entries.async_remove(config_entry.entry_id) + assert not issue_reg.async_get_issue(DOMAIN, "yale_brand_migration") From df60e59a9541276a8f2a3110fe771f95c841a8f3 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 29 Aug 2024 21:37:19 -1000 Subject: [PATCH 1224/1635] Address yale review comments part 2 (#124887) * Remove some unneeded block till done * Additional state check cleanups and snapshots * Use more snapshots in yale tests --- .../components/yale/snapshots/test_lock.ambr | 37 ++++ .../yale/snapshots/test_sensor.ambr | 95 +++++++++ tests/components/yale/test_button.py | 1 - tests/components/yale/test_lock.py | 193 ++++++------------ tests/components/yale/test_sensor.py | 106 +++------- 5 files changed, 226 insertions(+), 206 deletions(-) create mode 100644 tests/components/yale/snapshots/test_lock.ambr create mode 100644 tests/components/yale/snapshots/test_sensor.ambr diff --git a/tests/components/yale/snapshots/test_lock.ambr b/tests/components/yale/snapshots/test_lock.ambr new file mode 100644 index 00000000000..b1a9f6a4d86 --- /dev/null +++ b/tests/components/yale/snapshots/test_lock.ambr @@ -0,0 +1,37 @@ +# serializer version: 1 +# name: test_lock_device_registry + DeviceRegistryEntrySnapshot({ + 'area_id': 'online_with_doorsense_name', + 'config_entries': , + 'configuration_url': 'https://account.aaecosystem.com', + 'connections': set({ + tuple( + 'bluetooth', + '12:22', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'yale', + 'online_with_doorsense', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Yale Home Inc.', + 'model': 'AUG-MD01', + 'model_id': None, + 'name': 'online_with_doorsense Name', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': 'online_with_doorsense Name', + 'sw_version': 'undefined-4.3.0-1.8.14', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/yale/snapshots/test_sensor.ambr b/tests/components/yale/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..a425cfa90de --- /dev/null +++ b/tests/components/yale/snapshots/test_sensor.ambr @@ -0,0 +1,95 @@ +# serializer version: 1 +# name: test_lock_operator_autorelock + ReadOnlyDict({ + 'autorelock': True, + 'friendly_name': 'online_with_doorsense Name Operator', + 'keypad': False, + 'manual': False, + 'method': 'autorelock', + 'remote': False, + 'tag': False, + }) +# --- +# name: test_lock_operator_keypad + ReadOnlyDict({ + 'autorelock': False, + 'friendly_name': 'online_with_doorsense Name Operator', + 'keypad': True, + 'manual': False, + 'method': 'keypad', + 'remote': False, + 'tag': False, + }) +# --- +# name: test_lock_operator_manual + ReadOnlyDict({ + 'autorelock': False, + 'friendly_name': 'online_with_doorsense Name Operator', + 'keypad': False, + 'manual': True, + 'method': 'manual', + 'remote': False, + 'tag': False, + }) +# --- +# name: test_lock_operator_remote + ReadOnlyDict({ + 'autorelock': False, + 'friendly_name': 'online_with_doorsense Name Operator', + 'keypad': False, + 'manual': False, + 'method': 'remote', + 'remote': True, + 'tag': False, + }) +# --- +# name: test_restored_state + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'autorelock': False, + 'entity_picture': 'image.png', + 'friendly_name': 'online_with_doorsense Name Operator', + 'keypad': False, + 'manual': False, + 'method': 'tag', + 'remote': False, + 'tag': True, + }), + 'context': , + 'entity_id': 'sensor.online_with_doorsense_name_operator', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Tag Unlock', + }) +# --- +# name: test_unlock_operator_manual + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'autorelock': False, + 'friendly_name': 'online_with_doorsense Name Operator', + 'keypad': False, + 'manual': True, + 'method': 'manual', + 'remote': False, + 'tag': False, + }), + 'context': , + 'entity_id': 'sensor.online_with_doorsense_name_operator', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Your favorite elven princess', + }) +# --- +# name: test_unlock_operator_tag + ReadOnlyDict({ + 'autorelock': False, + 'friendly_name': 'online_with_doorsense Name Operator', + 'keypad': False, + 'manual': False, + 'method': 'tag', + 'remote': False, + 'tag': True, + }) +# --- diff --git a/tests/components/yale/test_button.py b/tests/components/yale/test_button.py index ebd22f1da59..92d3ecef859 100644 --- a/tests/components/yale/test_button.py +++ b/tests/components/yale/test_button.py @@ -20,5 +20,4 @@ async def test_wake_lock(hass: HomeAssistant) -> None: await hass.services.async_call( BUTTON_DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: entity_id}, blocking=True ) - await hass.async_block_till_done() api_instance.async_status_async.assert_called_once() diff --git a/tests/components/yale/test_lock.py b/tests/components/yale/test_lock.py index b449be9153d..2bbb7408953 100644 --- a/tests/components/yale/test_lock.py +++ b/tests/components/yale/test_lock.py @@ -5,6 +5,7 @@ import datetime from aiohttp import ClientResponseError from freezegun.api import FrozenDateTimeFactory import pytest +from syrupy import SnapshotAssertion from yalexs.manager.activity import INITIAL_LOCK_RESYNC_TIME from homeassistant.components.lock import ( @@ -41,7 +42,7 @@ from tests.common import async_fire_time_changed async def test_lock_device_registry( - hass: HomeAssistant, device_registry: dr.DeviceRegistry + hass: HomeAssistant, device_registry: dr.DeviceRegistry, snapshot: SnapshotAssertion ) -> None: """Test creation of a lock with doorsense and bridge ands up in the registry.""" lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) @@ -50,10 +51,7 @@ async def test_lock_device_registry( reg_device = device_registry.async_get_device( identifiers={("yale", "online_with_doorsense")} ) - assert reg_device.model == "AUG-MD01" - assert reg_device.sw_version == "undefined-4.3.0-1.8.14" - assert reg_device.name == "online_with_doorsense Name" - assert reg_device.manufacturer == "Yale Home Inc." + assert reg_device == snapshot async def test_lock_changed_by(hass: HomeAssistant) -> None: @@ -63,14 +61,9 @@ async def test_lock_changed_by(hass: HomeAssistant) -> None: activities = await _mock_activities_from_fixture(hass, "get_activity.lock.json") await _create_yale_with_devices(hass, [lock_one], activities=activities) - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - - assert lock_online_with_doorsense_name.state == STATE_LOCKED - - assert ( - lock_online_with_doorsense_name.attributes.get("changed_by") - == "Your favorite elven princess" - ) + lock_state = hass.states.get("lock.online_with_doorsense_name") + assert lock_state.state == STATE_LOCKED + assert lock_state.attributes["changed_by"] == "Your favorite elven princess" async def test_state_locking(hass: HomeAssistant) -> None: @@ -80,9 +73,7 @@ async def test_state_locking(hass: HomeAssistant) -> None: activities = await _mock_activities_from_fixture(hass, "get_activity.locking.json") await _create_yale_with_devices(hass, [lock_one], activities=activities) - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - - assert lock_online_with_doorsense_name.state == STATE_LOCKING + assert hass.states.get("lock.online_with_doorsense_name").state == STATE_LOCKING async def test_state_unlocking(hass: HomeAssistant) -> None: @@ -106,9 +97,7 @@ async def test_state_jammed(hass: HomeAssistant) -> None: activities = await _mock_activities_from_fixture(hass, "get_activity.jammed.json") await _create_yale_with_devices(hass, [lock_one], activities=activities) - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - - assert lock_online_with_doorsense_name.state == STATE_JAMMED + assert hass.states.get("lock.online_with_doorsense_name").state == STATE_JAMMED async def test_one_lock_operation( @@ -118,44 +107,31 @@ async def test_one_lock_operation( lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) await _create_yale_with_devices(hass, [lock_one]) - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + lock_state = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKED + assert lock_state.state == STATE_LOCKED - assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 - assert ( - lock_online_with_doorsense_name.attributes.get("friendly_name") - == "online_with_doorsense Name" - ) + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) - await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_UNLOCKED + lock_state = hass.states.get("lock.online_with_doorsense_name") + assert lock_state.state == STATE_UNLOCKED - assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 - assert ( - lock_online_with_doorsense_name.attributes.get("friendly_name") - == "online_with_doorsense Name" - ) + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) - await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKED + lock_state = hass.states.get("lock.online_with_doorsense_name") + assert lock_state.state == STATE_LOCKED # No activity means it will be unavailable until the activity feed has data - lock_operator_sensor = entity_registry.async_get( - "sensor.online_with_doorsense_name_operator" - ) - assert lock_operator_sensor - assert ( - hass.states.get("sensor.online_with_doorsense_name_operator").state - == STATE_UNKNOWN - ) + assert entity_registry.async_get("sensor.online_with_doorsense_name_operator") + operator_state = hass.states.get("sensor.online_with_doorsense_name_operator") + assert operator_state.state == STATE_UNKNOWN async def test_open_lock_operation(hass: HomeAssistant) -> None: @@ -163,15 +139,12 @@ async def test_open_lock_operation(hass: HomeAssistant) -> None: lock_with_unlatch = await _mock_lock_with_unlatch(hass) await _create_yale_with_devices(hass, [lock_with_unlatch]) - lock_online_with_unlatch_name = hass.states.get("lock.online_with_unlatch_name") - assert lock_online_with_unlatch_name.state == STATE_LOCKED + assert hass.states.get("lock.online_with_unlatch_name").state == STATE_LOCKED data = {ATTR_ENTITY_ID: "lock.online_with_unlatch_name"} await hass.services.async_call(LOCK_DOMAIN, SERVICE_OPEN, data, blocking=True) - await hass.async_block_till_done() - lock_online_with_unlatch_name = hass.states.get("lock.online_with_unlatch_name") - assert lock_online_with_unlatch_name.state == STATE_UNLOCKED + assert hass.states.get("lock.online_with_unlatch_name").state == STATE_UNLOCKED async def test_open_lock_operation_socketio_connected( @@ -186,12 +159,10 @@ async def test_open_lock_operation_socketio_connected( _, socketio = await _create_yale_with_devices(hass, [lock_with_unlatch]) socketio.connected = True - lock_online_with_unlatch_name = hass.states.get("lock.online_with_unlatch_name") - assert lock_online_with_unlatch_name.state == STATE_LOCKED + assert hass.states.get("lock.online_with_unlatch_name").state == STATE_LOCKED data = {ATTR_ENTITY_ID: "lock.online_with_unlatch_name"} await hass.services.async_call(LOCK_DOMAIN, SERVICE_OPEN, data, blocking=True) - await hass.async_block_till_done() listener = list(socketio._listeners)[0] listener( @@ -205,8 +176,7 @@ async def test_open_lock_operation_socketio_connected( await hass.async_block_till_done() await hass.async_block_till_done() - lock_online_with_unlatch_name = hass.states.get("lock.online_with_unlatch_name") - assert lock_online_with_unlatch_name.state == STATE_UNLOCKED + assert hass.states.get("lock.online_with_unlatch_name").state == STATE_UNLOCKED await hass.async_block_till_done() @@ -218,23 +188,18 @@ async def test_one_lock_operation_socketio_connected( """Test lock and unlock operations are async when socketio is connected.""" lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) assert lock_one.pubsub_channel == "pubsub" + states = hass.states _, socketio = await _create_yale_with_devices(hass, [lock_one]) socketio.connected = True - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - - assert lock_online_with_doorsense_name.state == STATE_LOCKED - - assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 - assert ( - lock_online_with_doorsense_name.attributes.get("friendly_name") - == "online_with_doorsense Name" - ) + lock_state = hass.states.get("lock.online_with_doorsense_name") + assert lock_state.state == STATE_LOCKED + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) - await hass.async_block_till_done() listener = list(socketio._listeners)[0] listener( @@ -248,17 +213,12 @@ async def test_one_lock_operation_socketio_connected( await hass.async_block_till_done() await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_UNLOCKED - - assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 - assert ( - lock_online_with_doorsense_name.attributes.get("friendly_name") - == "online_with_doorsense Name" - ) + lock_state = states.get("lock.online_with_doorsense_name") + assert lock_state.state == STATE_UNLOCKED + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) - await hass.async_block_till_done() listener( lock_one.device_id, @@ -271,17 +231,12 @@ async def test_one_lock_operation_socketio_connected( await hass.async_block_till_done() await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKED + assert states.get("lock.online_with_doorsense_name").state == STATE_LOCKED # No activity means it will be unavailable until the activity feed has data - lock_operator_sensor = entity_registry.async_get( - "sensor.online_with_doorsense_name_operator" - ) - assert lock_operator_sensor + assert entity_registry.async_get("sensor.online_with_doorsense_name_operator") assert ( - hass.states.get("sensor.online_with_doorsense_name_operator").state - == STATE_UNKNOWN + states.get("sensor.online_with_doorsense_name_operator").state == STATE_UNKNOWN ) freezer.tick(INITIAL_LOCK_RESYNC_TIME) @@ -296,8 +251,7 @@ async def test_one_lock_operation_socketio_connected( await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_UNLOCKED + assert states.get("lock.online_with_doorsense_name").state == STATE_UNLOCKED async def test_lock_jammed(hass: HomeAssistant) -> None: @@ -315,22 +269,16 @@ async def test_lock_jammed(hass: HomeAssistant) -> None: }, ) - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - - assert lock_online_with_doorsense_name.state == STATE_LOCKED - - assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 - assert ( - lock_online_with_doorsense_name.attributes.get("friendly_name") - == "online_with_doorsense Name" - ) + states = hass.states + lock_state = states.get("lock.online_with_doorsense_name") + assert lock_state.state == STATE_LOCKED + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) - await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_JAMMED + assert states.get("lock.online_with_doorsense_name").state == STATE_JAMMED async def test_lock_throws_exception_on_unknown_status_code( @@ -350,15 +298,10 @@ async def test_lock_throws_exception_on_unknown_status_code( }, ) - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - - assert lock_online_with_doorsense_name.state == STATE_LOCKED - - assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 - assert ( - lock_online_with_doorsense_name.attributes.get("friendly_name") - == "online_with_doorsense Name" - ) + lock_state = hass.states.get("lock.online_with_doorsense_name") + assert lock_state.state == STATE_LOCKED + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} with pytest.raises(ClientResponseError): @@ -373,9 +316,7 @@ async def test_one_lock_unknown_state(hass: HomeAssistant) -> None: ) await _create_yale_with_devices(hass, [lock_one]) - lock_brokenid_name = hass.states.get("lock.brokenid_name") - - assert lock_brokenid_name.state == STATE_UNKNOWN + assert hass.states.get("lock.brokenid_name").state == STATE_UNKNOWN async def test_lock_bridge_offline(hass: HomeAssistant) -> None: @@ -387,9 +328,8 @@ async def test_lock_bridge_offline(hass: HomeAssistant) -> None: ) await _create_yale_with_devices(hass, [lock_one], activities=activities) - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - - assert lock_online_with_doorsense_name.state == STATE_UNAVAILABLE + states = hass.states + assert states.get("lock.online_with_doorsense_name").state == STATE_UNAVAILABLE async def test_lock_bridge_online(hass: HomeAssistant) -> None: @@ -401,9 +341,8 @@ async def test_lock_bridge_online(hass: HomeAssistant) -> None: ) await _create_yale_with_devices(hass, [lock_one], activities=activities) - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - - assert lock_online_with_doorsense_name.state == STATE_LOCKED + states = hass.states + assert states.get("lock.online_with_doorsense_name").state == STATE_LOCKED async def test_lock_update_via_socketio(hass: HomeAssistant) -> None: @@ -416,10 +355,9 @@ async def test_lock_update_via_socketio(hass: HomeAssistant) -> None: hass, [lock_one], activities=activities ) socketio.connected = True + states = hass.states - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - - assert lock_online_with_doorsense_name.state == STATE_LOCKED + assert states.get("lock.online_with_doorsense_name").state == STATE_LOCKED listener = list(socketio._listeners)[0] listener( @@ -433,8 +371,7 @@ async def test_lock_update_via_socketio(hass: HomeAssistant) -> None: await hass.async_block_till_done() await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_UNLOCKING + assert states.get("lock.online_with_doorsense_name").state == STATE_UNLOCKING listener( lock_one.device_id, @@ -447,25 +384,21 @@ async def test_lock_update_via_socketio(hass: HomeAssistant) -> None: await hass.async_block_till_done() await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKING + assert states.get("lock.online_with_doorsense_name").state == STATE_LOCKING async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKING + assert states.get("lock.online_with_doorsense_name").state == STATE_LOCKING socketio.connected = True async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKING + assert states.get("lock.online_with_doorsense_name").state == STATE_LOCKING # Ensure socketio status is always preserved async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=2)) await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKING + assert states.get("lock.online_with_doorsense_name").state == STATE_LOCKING listener( lock_one.device_id, @@ -478,13 +411,11 @@ async def test_lock_update_via_socketio(hass: HomeAssistant) -> None: await hass.async_block_till_done() await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_UNLOCKING + assert states.get("lock.online_with_doorsense_name").state == STATE_UNLOCKING async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=4)) await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_UNLOCKING + assert states.get("lock.online_with_doorsense_name").state == STATE_UNLOCKING await hass.config_entries.async_unload(config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/yale/test_sensor.py b/tests/components/yale/test_sensor.py index caf8781b4ad..5d724b4bb9d 100644 --- a/tests/components/yale/test_sensor.py +++ b/tests/components/yale/test_sensor.py @@ -2,6 +2,8 @@ from typing import Any +from syrupy import SnapshotAssertion + from homeassistant import core as ha from homeassistant.const import ( ATTR_ENTITY_PICTURE, @@ -28,13 +30,9 @@ async def test_create_doorbell(hass: HomeAssistant) -> None: doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") await _create_yale_with_devices(hass, [doorbell_one]) - sensor_k98gidt45gul_name_battery = hass.states.get( - "sensor.k98gidt45gul_name_battery" - ) - assert sensor_k98gidt45gul_name_battery.state == "96" - assert ( - sensor_k98gidt45gul_name_battery.attributes["unit_of_measurement"] == PERCENTAGE - ) + battery_state = hass.states.get("sensor.k98gidt45gul_name_battery") + assert battery_state.state == "96" + assert battery_state.attributes["unit_of_measurement"] == PERCENTAGE async def test_create_doorbell_offline( @@ -44,9 +42,9 @@ async def test_create_doorbell_offline( doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") await _create_yale_with_devices(hass, [doorbell_one]) - sensor_tmt100_name_battery = hass.states.get("sensor.tmt100_name_battery") - assert sensor_tmt100_name_battery.state == "81" - assert sensor_tmt100_name_battery.attributes["unit_of_measurement"] == PERCENTAGE + battery_state = hass.states.get("sensor.tmt100_name_battery") + assert battery_state.state == "81" + assert battery_state.attributes["unit_of_measurement"] == PERCENTAGE entry = entity_registry.async_get("sensor.tmt100_name_battery") assert entry @@ -71,25 +69,21 @@ async def test_create_lock_with_linked_keypad( lock_one = await _mock_lock_from_fixture(hass, "get_lock.doorsense_init.json") await _create_yale_with_devices(hass, [lock_one]) - sensor_a6697750d607098bae8d6baa11ef8063_name_battery = hass.states.get( + battery_state = hass.states.get( "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" ) - assert sensor_a6697750d607098bae8d6baa11ef8063_name_battery.state == "88" - assert ( - sensor_a6697750d607098bae8d6baa11ef8063_name_battery.attributes[ - "unit_of_measurement" - ] - == PERCENTAGE - ) + assert battery_state.state == "88" + assert battery_state.attributes["unit_of_measurement"] == PERCENTAGE + entry = entity_registry.async_get( "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" ) assert entry assert entry.unique_id == "A6697750D607098BAE8D6BAA11EF8063_device_battery" - state = hass.states.get("sensor.front_door_lock_keypad_battery") - assert state.state == "62" - assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE + keypad_battery_state = hass.states.get("sensor.front_door_lock_keypad_battery") + assert keypad_battery_state.state == "62" + assert keypad_battery_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE entry = entity_registry.async_get("sensor.front_door_lock_keypad_battery") assert entry assert entry.unique_id == "5bc65c24e6ef2a263e1450a8_linked_keypad_battery" @@ -102,16 +96,11 @@ async def test_create_lock_with_low_battery_linked_keypad( lock_one = await _mock_lock_from_fixture(hass, "get_lock.low_keypad_battery.json") await _create_yale_with_devices(hass, [lock_one]) - sensor_a6697750d607098bae8d6baa11ef8063_name_battery = hass.states.get( + battery_state = hass.states.get( "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" ) - assert sensor_a6697750d607098bae8d6baa11ef8063_name_battery.state == "88" - assert ( - sensor_a6697750d607098bae8d6baa11ef8063_name_battery.attributes[ - "unit_of_measurement" - ] - == PERCENTAGE - ) + assert battery_state.state == "88" + assert battery_state.attributes["unit_of_measurement"] == PERCENTAGE entry = entity_registry.async_get( "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" ) @@ -166,7 +155,7 @@ async def test_lock_operator_bluetooth( async def test_lock_operator_keypad( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion ) -> None: """Test operation of a lock with doorsense and bridge.""" lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) @@ -183,16 +172,11 @@ async def test_lock_operator_keypad( state = hass.states.get("sensor.online_with_doorsense_name_operator") assert state.state == "Your favorite elven princess" - assert state.attributes["manual"] is False - assert state.attributes["tag"] is False - assert state.attributes["remote"] is False - assert state.attributes["keypad"] is True - assert state.attributes["autorelock"] is False - assert state.attributes["method"] == "keypad" + assert state.attributes == snapshot async def test_lock_operator_remote( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion ) -> None: """Test operation of a lock with doorsense and bridge.""" lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) @@ -207,16 +191,11 @@ async def test_lock_operator_remote( state = hass.states.get("sensor.online_with_doorsense_name_operator") assert state.state == "Your favorite elven princess" - assert state.attributes["manual"] is False - assert state.attributes["tag"] is False - assert state.attributes["remote"] is True - assert state.attributes["keypad"] is False - assert state.attributes["autorelock"] is False - assert state.attributes["method"] == "remote" + assert state.attributes == snapshot async def test_lock_operator_manual( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion ) -> None: """Test operation of a lock with doorsense and bridge.""" lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) @@ -232,16 +211,11 @@ async def test_lock_operator_manual( assert lock_operator_sensor state = hass.states.get("sensor.online_with_doorsense_name_operator") assert state.state == "Your favorite elven princess" - assert state.attributes["manual"] is True - assert state.attributes["tag"] is False - assert state.attributes["remote"] is False - assert state.attributes["keypad"] is False - assert state.attributes["autorelock"] is False - assert state.attributes["method"] == "manual" + assert state.attributes == snapshot async def test_lock_operator_autorelock( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion ) -> None: """Test operation of a lock with doorsense and bridge.""" lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) @@ -258,16 +232,11 @@ async def test_lock_operator_autorelock( state = hass.states.get("sensor.online_with_doorsense_name_operator") assert state.state == "Auto Relock" - assert state.attributes["manual"] is False - assert state.attributes["tag"] is False - assert state.attributes["remote"] is False - assert state.attributes["keypad"] is False - assert state.attributes["autorelock"] is True - assert state.attributes["method"] == "autorelock" + assert state.attributes == snapshot async def test_unlock_operator_manual( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion ) -> None: """Test operation of a lock manually.""" lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) @@ -284,16 +253,11 @@ async def test_unlock_operator_manual( state = hass.states.get("sensor.online_with_doorsense_name_operator") assert state.state == "Your favorite elven princess" - assert state.attributes["manual"] is True - assert state.attributes["tag"] is False - assert state.attributes["remote"] is False - assert state.attributes["keypad"] is False - assert state.attributes["autorelock"] is False - assert state.attributes["method"] == "manual" + assert state == snapshot async def test_unlock_operator_tag( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion ) -> None: """Test operation of a lock with a tag.""" lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) @@ -310,16 +274,11 @@ async def test_unlock_operator_tag( state = hass.states.get("sensor.online_with_doorsense_name_operator") assert state.state == "Your favorite elven princess" - assert state.attributes["manual"] is False - assert state.attributes["tag"] is True - assert state.attributes["remote"] is False - assert state.attributes["keypad"] is False - assert state.attributes["autorelock"] is False - assert state.attributes["method"] == "tag" + assert state.attributes == snapshot async def test_restored_state( - hass: HomeAssistant, hass_storage: dict[str, Any] + hass: HomeAssistant, hass_storage: dict[str, Any], snapshot: SnapshotAssertion ) -> None: """Test restored state.""" @@ -358,5 +317,4 @@ async def test_restored_state( state = hass.states.get(entity_id) assert state.state == "Tag Unlock" - assert state.attributes["method"] == "tag" - assert state.attributes[ATTR_ENTITY_PICTURE] == "image.png" + assert state == snapshot From 600c6a0dcba3285c9b163c121f6bbaf63a57862a Mon Sep 17 00:00:00 2001 From: Josef Zweck <24647999+zweckj@users.noreply.github.com> Date: Fri, 30 Aug 2024 10:05:28 +0200 Subject: [PATCH 1225/1635] Bump lmcloud to 1.2.1 (#124908) --- homeassistant/components/lamarzocco/__init__.py | 1 + homeassistant/components/lamarzocco/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/lamarzocco/__init__.py b/homeassistant/components/lamarzocco/__init__.py index dfcaa54047d..02e47ecd78e 100644 --- a/homeassistant/components/lamarzocco/__init__.py +++ b/homeassistant/components/lamarzocco/__init__.py @@ -53,6 +53,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) - cloud_client = LaMarzoccoCloudClient( username=entry.data[CONF_USERNAME], password=entry.data[CONF_PASSWORD], + client=get_async_client(hass), ) # initialize local API diff --git a/homeassistant/components/lamarzocco/manifest.json b/homeassistant/components/lamarzocco/manifest.json index 73d14250525..37a4e1d0c99 100644 --- a/homeassistant/components/lamarzocco/manifest.json +++ b/homeassistant/components/lamarzocco/manifest.json @@ -22,5 +22,5 @@ "integration_type": "device", "iot_class": "cloud_polling", "loggers": ["lmcloud"], - "requirements": ["lmcloud==1.1.13"] + "requirements": ["lmcloud==1.2.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index d04dcee2c88..171a4ae9cdd 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1282,7 +1282,7 @@ linear-garage-door==0.2.9 linode-api==4.1.9b1 # homeassistant.components.lamarzocco -lmcloud==1.1.13 +lmcloud==1.2.1 # homeassistant.components.google_maps locationsharinglib==5.0.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5ebeb167d47..8c9d9225c65 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1060,7 +1060,7 @@ libsoundtouch==0.8 linear-garage-door==0.2.9 # homeassistant.components.lamarzocco -lmcloud==1.1.13 +lmcloud==1.2.1 # homeassistant.components.london_underground london-tube-status==0.5 From f5e0382123885262c221334f41833a2e80d1cf2b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 30 Aug 2024 10:29:25 +0200 Subject: [PATCH 1226/1635] Bump github/codeql-action from 3.26.5 to 3.26.6 (#124898) Bumps [github/codeql-action](https://github.com/github/codeql-action) from 3.26.5 to 3.26.6. - [Release notes](https://github.com/github/codeql-action/releases) - [Changelog](https://github.com/github/codeql-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/github/codeql-action/compare/v3.26.5...v3.26.6) --- updated-dependencies: - dependency-name: github/codeql-action dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/codeql.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index a4653a833c4..33c7d6a2711 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -24,11 +24,11 @@ jobs: uses: actions/checkout@v4.1.7 - name: Initialize CodeQL - uses: github/codeql-action/init@v3.26.5 + uses: github/codeql-action/init@v3.26.6 with: languages: python - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3.26.5 + uses: github/codeql-action/analyze@v3.26.6 with: category: "/language:python" From 252f05e0f76b93c730bdfca11b7a3573e322e745 Mon Sep 17 00:00:00 2001 From: Willem-Jan van Rootselaar Date: Fri, 30 Aug 2024 10:41:07 +0200 Subject: [PATCH 1227/1635] Update diagnostics for BSBLan (#124508) * update diagnostics to include static and make room for multiple coordinator data objects * fix mac address is not stored in config_entry but on device --- .../components/bsblan/diagnostics.py | 5 +- homeassistant/components/bsblan/entity.py | 6 +- .../bsblan/snapshots/test_diagnostics.ambr | 88 +++++++++++-------- 3 files changed, 60 insertions(+), 39 deletions(-) diff --git a/homeassistant/components/bsblan/diagnostics.py b/homeassistant/components/bsblan/diagnostics.py index 3b42d47e1d3..b4ff67f4fbf 100644 --- a/homeassistant/components/bsblan/diagnostics.py +++ b/homeassistant/components/bsblan/diagnostics.py @@ -20,5 +20,8 @@ async def async_get_config_entry_diagnostics( return { "info": data.info.to_dict(), "device": data.device.to_dict(), - "state": data.coordinator.data.state.to_dict(), + "coordinator_data": { + "state": data.coordinator.data.state.to_dict(), + }, + "static": data.static.to_dict(), } diff --git a/homeassistant/components/bsblan/entity.py b/homeassistant/components/bsblan/entity.py index 0c507938794..252c397f4f2 100644 --- a/homeassistant/components/bsblan/entity.py +++ b/homeassistant/components/bsblan/entity.py @@ -22,10 +22,10 @@ class BSBLanEntity(CoordinatorEntity[BSBLanUpdateCoordinator]): def __init__(self, coordinator: BSBLanUpdateCoordinator, data: BSBLanData) -> None: """Initialize BSBLan entity.""" super().__init__(coordinator, data) - host = self.coordinator.config_entry.data["host"] - mac = self.coordinator.config_entry.data["mac"] + host = coordinator.config_entry.data["host"] + mac = data.device.MAC self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, data.device.MAC)}, + identifiers={(DOMAIN, mac)}, connections={(CONNECTION_NETWORK_MAC, format_mac(mac))}, name=data.device.name, manufacturer="BSBLAN Inc.", diff --git a/tests/components/bsblan/snapshots/test_diagnostics.ambr b/tests/components/bsblan/snapshots/test_diagnostics.ambr index b172d26c249..c9a82edf4e2 100644 --- a/tests/components/bsblan/snapshots/test_diagnostics.ambr +++ b/tests/components/bsblan/snapshots/test_diagnostics.ambr @@ -1,6 +1,52 @@ # serializer version: 1 # name: test_diagnostics dict({ + 'coordinator_data': dict({ + 'state': dict({ + 'current_temperature': dict({ + 'data_type': 0, + 'desc': '', + 'name': 'Room temp 1 actual value', + 'unit': '°C', + 'value': '18.6', + }), + 'hvac_action': dict({ + 'data_type': 1, + 'desc': 'Raumtemp’begrenzung', + 'name': 'Status heating circuit 1', + 'unit': '', + 'value': '122', + }), + 'hvac_mode': dict({ + 'data_type': 1, + 'desc': 'Komfort', + 'name': 'Operating mode', + 'unit': '', + 'value': 'heat', + }), + 'hvac_mode2': dict({ + 'data_type': 1, + 'desc': 'Reduziert', + 'name': 'Operating mode', + 'unit': '', + 'value': '2', + }), + 'room1_thermostat_mode': dict({ + 'data_type': 1, + 'desc': 'Kein Bedarf', + 'name': 'Raumthermostat 1', + 'unit': '', + 'value': '0', + }), + 'target_temperature': dict({ + 'data_type': 0, + 'desc': '', + 'name': 'Room temperature Comfort setpoint', + 'unit': '°C', + 'value': '18.5', + }), + }), + }), 'device': dict({ 'MAC': '00:80:41:19:69:90', 'name': 'BSB-LAN', @@ -30,48 +76,20 @@ 'value': 'RVS21.831F/127', }), }), - 'state': dict({ - 'current_temperature': dict({ + 'static': dict({ + 'max_temp': dict({ 'data_type': 0, 'desc': '', - 'name': 'Room temp 1 actual value', + 'name': 'Summer/winter changeover temp heat circuit 1', 'unit': '°C', - 'value': '18.6', + 'value': '20.0', }), - 'hvac_action': dict({ - 'data_type': 1, - 'desc': 'Raumtemp’begrenzung', - 'name': 'Status heating circuit 1', - 'unit': '', - 'value': '122', - }), - 'hvac_mode': dict({ - 'data_type': 1, - 'desc': 'Komfort', - 'name': 'Operating mode', - 'unit': '', - 'value': 'heat', - }), - 'hvac_mode2': dict({ - 'data_type': 1, - 'desc': 'Reduziert', - 'name': 'Operating mode', - 'unit': '', - 'value': '2', - }), - 'room1_thermostat_mode': dict({ - 'data_type': 1, - 'desc': 'Kein Bedarf', - 'name': 'Raumthermostat 1', - 'unit': '', - 'value': '0', - }), - 'target_temperature': dict({ + 'min_temp': dict({ 'data_type': 0, 'desc': '', - 'name': 'Room temperature Comfort setpoint', + 'name': 'Room temp frost protection setpoint', 'unit': '°C', - 'value': '18.5', + 'value': '8.0', }), }), }) From cc4340b80ceacaecb4ffb8d5d5e3bcb2909ca504 Mon Sep 17 00:00:00 2001 From: Christopher Fenner <9592452+CFenner@users.noreply.github.com> Date: Fri, 30 Aug 2024 10:50:18 +0200 Subject: [PATCH 1228/1635] Remove update call from init in ViCare integration (#124905) fix --- homeassistant/components/vicare/sensor.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/vicare/sensor.py b/homeassistant/components/vicare/sensor.py index 5d51abfbbf6..4ac3c504d9a 100644 --- a/homeassistant/components/vicare/sensor.py +++ b/homeassistant/components/vicare/sensor.py @@ -932,7 +932,9 @@ async def async_setup_entry( await hass.async_add_executor_job( _build_entities, device_list, - ) + ), + # run update to have device_class set depending on unit_of_measurement + True, ) @@ -950,8 +952,6 @@ class ViCareSensor(ViCareEntity, SensorEntity): """Initialize the sensor.""" super().__init__(device_config, api, description.key) self.entity_description = description - # run update to have device_class set depending on unit_of_measurement - self.update() @property def available(self) -> bool: From a9975071c3e868edaa2358b6161e12697e60afc3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 30 Aug 2024 10:53:06 +0200 Subject: [PATCH 1229/1635] Bump actions/setup-python from 5.1.1 to 5.2.0 (#124899) Bumps [actions/setup-python](https://github.com/actions/setup-python) from 5.1.1 to 5.2.0. - [Release notes](https://github.com/actions/setup-python/releases) - [Commits](https://github.com/actions/setup-python/compare/v5.1.1...v5.2.0) --- updated-dependencies: - dependency-name: actions/setup-python dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/builder.yml | 6 +++--- .github/workflows/ci.yaml | 32 +++++++++++++++--------------- .github/workflows/translations.yml | 2 +- .github/workflows/wheels.yml | 2 +- 4 files changed, 21 insertions(+), 21 deletions(-) diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index d206f8fe8c8..ab64f9e5519 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -32,7 +32,7 @@ jobs: fetch-depth: 0 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.2.0 with: python-version: ${{ env.DEFAULT_PYTHON }} @@ -116,7 +116,7 @@ jobs: - name: Set up Python ${{ env.DEFAULT_PYTHON }} if: needs.init.outputs.channel == 'dev' - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.2.0 with: python-version: ${{ env.DEFAULT_PYTHON }} @@ -453,7 +453,7 @@ jobs: uses: actions/checkout@v4.1.7 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.2.0 with: python-version: ${{ env.DEFAULT_PYTHON }} diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index b62fff06c0c..24b204f3f55 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -234,7 +234,7 @@ jobs: uses: actions/checkout@v4.1.7 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.2.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -279,7 +279,7 @@ jobs: - name: Check out code from GitHub uses: actions/checkout@v4.1.7 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.2.0 id: python with: python-version: ${{ env.DEFAULT_PYTHON }} @@ -319,7 +319,7 @@ jobs: - name: Check out code from GitHub uses: actions/checkout@v4.1.7 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.2.0 id: python with: python-version: ${{ env.DEFAULT_PYTHON }} @@ -359,7 +359,7 @@ jobs: - name: Check out code from GitHub uses: actions/checkout@v4.1.7 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.2.0 id: python with: python-version: ${{ env.DEFAULT_PYTHON }} @@ -454,7 +454,7 @@ jobs: uses: actions/checkout@v4.1.7 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.2.0 with: python-version: ${{ matrix.python-version }} check-latest: true @@ -538,7 +538,7 @@ jobs: uses: actions/checkout@v4.1.7 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.2.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -571,7 +571,7 @@ jobs: uses: actions/checkout@v4.1.7 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.2.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -605,7 +605,7 @@ jobs: uses: actions/checkout@v4.1.7 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.2.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -648,7 +648,7 @@ jobs: uses: actions/checkout@v4.1.7 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.2.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -695,7 +695,7 @@ jobs: uses: actions/checkout@v4.1.7 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.2.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -740,7 +740,7 @@ jobs: uses: actions/checkout@v4.1.7 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.2.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -815,7 +815,7 @@ jobs: uses: actions/checkout@v4.1.7 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.2.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true @@ -879,7 +879,7 @@ jobs: uses: actions/checkout@v4.1.7 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.2.0 with: python-version: ${{ matrix.python-version }} check-latest: true @@ -999,7 +999,7 @@ jobs: uses: actions/checkout@v4.1.7 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.2.0 with: python-version: ${{ matrix.python-version }} check-latest: true @@ -1125,7 +1125,7 @@ jobs: uses: actions/checkout@v4.1.7 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.2.0 with: python-version: ${{ matrix.python-version }} check-latest: true @@ -1271,7 +1271,7 @@ jobs: uses: actions/checkout@v4.1.7 - name: Set up Python ${{ matrix.python-version }} id: python - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.2.0 with: python-version: ${{ matrix.python-version }} check-latest: true diff --git a/.github/workflows/translations.yml b/.github/workflows/translations.yml index 0ab95510480..4b3907e6cb9 100644 --- a/.github/workflows/translations.yml +++ b/.github/workflows/translations.yml @@ -22,7 +22,7 @@ jobs: uses: actions/checkout@v4.1.7 - name: Set up Python ${{ env.DEFAULT_PYTHON }} - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.2.0 with: python-version: ${{ env.DEFAULT_PYTHON }} diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 694208d30ac..735163e3b12 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -36,7 +36,7 @@ jobs: - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python - uses: actions/setup-python@v5.1.1 + uses: actions/setup-python@v5.2.0 with: python-version: ${{ env.DEFAULT_PYTHON }} check-latest: true From 4940968cd59ee1d13586a697d5757a9bbe3928ab Mon Sep 17 00:00:00 2001 From: Josef Zweck <24647999+zweckj@users.noreply.github.com> Date: Fri, 30 Aug 2024 11:02:29 +0200 Subject: [PATCH 1230/1635] Bump lmcloud 1.2.2 (#124911) bump lmcloud 1.2.2 --- homeassistant/components/lamarzocco/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/lamarzocco/manifest.json b/homeassistant/components/lamarzocco/manifest.json index 37a4e1d0c99..181a2b9ab9b 100644 --- a/homeassistant/components/lamarzocco/manifest.json +++ b/homeassistant/components/lamarzocco/manifest.json @@ -22,5 +22,5 @@ "integration_type": "device", "iot_class": "cloud_polling", "loggers": ["lmcloud"], - "requirements": ["lmcloud==1.2.1"] + "requirements": ["lmcloud==1.2.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 171a4ae9cdd..4fd2c8932f4 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1282,7 +1282,7 @@ linear-garage-door==0.2.9 linode-api==4.1.9b1 # homeassistant.components.lamarzocco -lmcloud==1.2.1 +lmcloud==1.2.2 # homeassistant.components.google_maps locationsharinglib==5.0.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 8c9d9225c65..63721bc9360 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1060,7 +1060,7 @@ libsoundtouch==0.8 linear-garage-door==0.2.9 # homeassistant.components.lamarzocco -lmcloud==1.2.1 +lmcloud==1.2.2 # homeassistant.components.london_underground london-tube-status==0.5 From 6833af6286da53b60988154a8bde74f285b6024a Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 30 Aug 2024 11:04:58 +0200 Subject: [PATCH 1231/1635] Improve config flow type hints (n-p) (#124909) --- .../components/netgear/config_flow.py | 10 +++++++-- homeassistant/components/nuki/config_flow.py | 11 +++++++--- .../components/octoprint/config_flow.py | 18 +++++++++------ .../components/omnilogic/config_flow.py | 4 +++- homeassistant/components/onvif/config_flow.py | 10 ++++++--- .../components/opentherm_gw/config_flow.py | 10 ++++++--- .../components/plaato/config_flow.py | 18 ++++++++++----- .../components/progettihwsw/config_flow.py | 10 ++++++--- .../components/prosegur/config_flow.py | 6 +++-- homeassistant/components/ps4/config_flow.py | 22 ++++++++++++------- 10 files changed, 81 insertions(+), 38 deletions(-) diff --git a/homeassistant/components/netgear/config_flow.py b/homeassistant/components/netgear/config_flow.py index 55112c6662c..fba934af38d 100644 --- a/homeassistant/components/netgear/config_flow.py +++ b/homeassistant/components/netgear/config_flow.py @@ -67,7 +67,9 @@ class OptionsFlowHandler(OptionsFlow): """Init object.""" self.config_entry = config_entry - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, int] | None = None + ) -> ConfigFlowResult: """Manage the options.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) @@ -109,7 +111,11 @@ class NetgearFlowHandler(ConfigFlow, domain=DOMAIN): """Get the options flow.""" return OptionsFlowHandler(config_entry) - async def _show_setup_form(self, user_input=None, errors=None): + async def _show_setup_form( + self, + user_input: dict[str, Any] | None = None, + errors: dict[str, str] | None = None, + ) -> ConfigFlowResult: """Show the setup form to the user.""" if not user_input: user_input = {} diff --git a/homeassistant/components/nuki/config_flow.py b/homeassistant/components/nuki/config_flow.py index 3b8015827f1..4a9789c7e51 100644 --- a/homeassistant/components/nuki/config_flow.py +++ b/homeassistant/components/nuki/config_flow.py @@ -12,6 +12,7 @@ import voluptuous as vol from homeassistant.components import dhcp from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TOKEN +from homeassistant.core import HomeAssistant from .const import CONF_ENCRYPT_TOKEN, DEFAULT_PORT, DEFAULT_TIMEOUT, DOMAIN from .helpers import CannotConnect, InvalidAuth, parse_id @@ -34,7 +35,7 @@ REAUTH_SCHEMA = vol.Schema( ) -async def validate_input(hass, data): +async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, Any]: """Validate the user input allows us to connect. Data has the keys from USER_SCHEMA with values provided by the user. @@ -99,7 +100,9 @@ class NukiConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_reauth_confirm() - async def async_step_reauth_confirm(self, user_input=None): + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Dialog that inform the user that reauth is required.""" errors = {} if user_input is None: @@ -140,7 +143,9 @@ class NukiConfigFlow(ConfigFlow, domain=DOMAIN): step_id="reauth_confirm", data_schema=REAUTH_SCHEMA, errors=errors ) - async def async_step_validate(self, user_input=None): + async def async_step_validate( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle init step of a flow.""" data_schema = self.discovery_schema or USER_SCHEMA diff --git a/homeassistant/components/octoprint/config_flow.py b/homeassistant/components/octoprint/config_flow.py index 706670738a6..cd8706f2350 100644 --- a/homeassistant/components/octoprint/config_flow.py +++ b/homeassistant/components/octoprint/config_flow.py @@ -5,7 +5,7 @@ from __future__ import annotations import asyncio from collections.abc import Mapping import logging -from typing import Any +from typing import TYPE_CHECKING, Any import aiohttp from pyoctoprintapi import ApiError, OctoprintClient, OctoprintException @@ -104,7 +104,9 @@ class OctoPrintConfigFlow(ConfigFlow, domain=DOMAIN): self._user_input = user_input return await self.async_step_get_api_key() - async def async_step_get_api_key(self, user_input=None): + async def async_step_get_api_key( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Get an Application Api Key.""" if not self.api_key_task: self.api_key_task = self.hass.async_create_task( @@ -130,7 +132,7 @@ class OctoPrintConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_progress_done(next_step_id="user") - async def _finish_config(self, user_input: dict): + async def _finish_config(self, user_input: dict[str, Any]) -> ConfigFlowResult: """Finish the configuration setup.""" existing_entry = await self.async_set_unique_id(self.unique_id) if existing_entry is not None: @@ -156,7 +158,7 @@ class OctoPrintConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_create_entry(title=user_input[CONF_HOST], data=user_input) - async def async_step_auth_failed(self, user_input): + async def async_step_auth_failed(self, user_input: None) -> ConfigFlowResult: """Handle api fetch failure.""" return self.async_abort(reason="auth_failed") @@ -252,15 +254,17 @@ class OctoPrintConfigFlow(ConfigFlow, domain=DOMAIN): self._user_input = self._reauth_data return await self.async_step_get_api_key() - async def _async_get_auth_key(self): + async def _async_get_auth_key(self) -> None: """Get application api key.""" + if TYPE_CHECKING: + assert self._user_input is not None octoprint = self._get_octoprint_client(self._user_input) self._user_input[CONF_API_KEY] = await octoprint.request_app_key( "Home Assistant", self._user_input[CONF_USERNAME], 300 ) - def _get_octoprint_client(self, user_input: dict) -> OctoprintClient: + def _get_octoprint_client(self, user_input: dict[str, Any]) -> OctoprintClient: """Build an octoprint client from the user_input.""" verify_ssl = user_input.get(CONF_VERIFY_SSL, True) @@ -281,7 +285,7 @@ class OctoPrintConfigFlow(ConfigFlow, domain=DOMAIN): path=user_input[CONF_PATH], ) - def async_remove(self): + def async_remove(self) -> None: """Detach the session.""" for session in self._sessions: session.detach() diff --git a/homeassistant/components/omnilogic/config_flow.py b/homeassistant/components/omnilogic/config_flow.py index 166e4414767..77bca0039a9 100644 --- a/homeassistant/components/omnilogic/config_flow.py +++ b/homeassistant/components/omnilogic/config_flow.py @@ -88,7 +88,9 @@ class OptionsFlowHandler(OptionsFlow): """Initialize options flow.""" self.config_entry = config_entry - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Manage options.""" if user_input is not None: diff --git a/homeassistant/components/onvif/config_flow.py b/homeassistant/components/onvif/config_flow.py index 30184d1abc3..f4e3f11d0b7 100644 --- a/homeassistant/components/onvif/config_flow.py +++ b/homeassistant/components/onvif/config_flow.py @@ -198,7 +198,9 @@ class OnvifFlowHandler(ConfigFlow, domain=DOMAIN): hass.async_create_task(self.hass.config_entries.async_reload(entry_id)) return self.async_abort(reason="already_configured") - async def async_step_device(self, user_input=None): + async def async_step_device( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Handle WS-Discovery. Let user choose between discovered devices and manual configuration. @@ -395,11 +397,13 @@ class OnvifOptionsFlowHandler(OptionsFlow): self.config_entry = config_entry self.options = dict(config_entry.options) - async def async_step_init(self, user_input=None): + async def async_step_init(self, user_input: None = None) -> ConfigFlowResult: """Manage the ONVIF options.""" return await self.async_step_onvif_devices() - async def async_step_onvif_devices(self, user_input=None): + async def async_step_onvif_devices( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Manage the ONVIF devices options.""" if user_input is not None: self.options[CONF_EXTRA_ARGUMENTS] = user_input[CONF_EXTRA_ARGUMENTS] diff --git a/homeassistant/components/opentherm_gw/config_flow.py b/homeassistant/components/opentherm_gw/config_flow.py index c1d1caa2fb0..a5ac116ac11 100644 --- a/homeassistant/components/opentherm_gw/config_flow.py +++ b/homeassistant/components/opentherm_gw/config_flow.py @@ -50,7 +50,9 @@ class OpenThermGwConfigFlow(ConfigFlow, domain=DOMAIN): """Get the options flow for this handler.""" return OpenThermGwOptionsFlow(config_entry) - async def async_step_init(self, info=None): + async def async_step_init( + self, info: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle config flow initiation.""" if info: name = info[CONF_NAME] @@ -104,7 +106,7 @@ class OpenThermGwConfigFlow(ConfigFlow, domain=DOMAIN): } return await self.async_step_init(info=formatted_config) - def _show_form(self, errors=None): + def _show_form(self, errors: dict[str, str] | None = None) -> ConfigFlowResult: """Show the config flow form with possible errors.""" return self.async_show_form( step_id="init", @@ -132,7 +134,9 @@ class OpenThermGwOptionsFlow(OptionsFlow): """Initialize the options flow.""" self.config_entry = config_entry - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Manage the opentherm_gw options.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) diff --git a/homeassistant/components/plaato/config_flow.py b/homeassistant/components/plaato/config_flow.py index 3ada4fdc312..74967c417a4 100644 --- a/homeassistant/components/plaato/config_flow.py +++ b/homeassistant/components/plaato/config_flow.py @@ -71,7 +71,9 @@ class PlaatoConfigFlow(ConfigFlow, domain=DOMAIN): ), ) - async def async_step_api_method(self, user_input=None): + async def async_step_api_method( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle device type step.""" device_type = self._init_info[CONF_DEVICE_TYPE] @@ -90,7 +92,9 @@ class PlaatoConfigFlow(ConfigFlow, domain=DOMAIN): return await self._show_api_method_form(device_type) - async def async_step_webhook(self, user_input=None): + async def async_step_webhook( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Validate config step.""" use_webhook = self._init_info[CONF_USE_WEBHOOK] @@ -136,8 +140,8 @@ class PlaatoConfigFlow(ConfigFlow, domain=DOMAIN): ) async def _show_api_method_form( - self, device_type: PlaatoDeviceType, errors: dict | None = None - ): + self, device_type: PlaatoDeviceType, errors: dict[str, str] | None = None + ) -> ConfigFlowResult: data_schema = vol.Schema({vol.Optional(CONF_TOKEN, default=""): str}) if device_type == PlaatoDeviceType.Airlock: @@ -186,7 +190,7 @@ class PlaatoOptionsFlowHandler(OptionsFlow): self._config_entry = config_entry - async def async_step_init(self, user_input=None): + async def async_step_init(self, user_input: None = None) -> ConfigFlowResult: """Manage the options.""" use_webhook = self._config_entry.data.get(CONF_USE_WEBHOOK, False) if use_webhook: @@ -215,7 +219,9 @@ class PlaatoOptionsFlowHandler(OptionsFlow): ), ) - async def async_step_webhook(self, user_input=None): + async def async_step_webhook( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Manage the options for webhook device.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) diff --git a/homeassistant/components/progettihwsw/config_flow.py b/homeassistant/components/progettihwsw/config_flow.py index 95596b940a4..2202678da9b 100644 --- a/homeassistant/components/progettihwsw/config_flow.py +++ b/homeassistant/components/progettihwsw/config_flow.py @@ -1,6 +1,6 @@ """Config flow for ProgettiHWSW Automation integration.""" -from typing import Any +from typing import TYPE_CHECKING, Any from ProgettiHWSW.ProgettiHWSWAPI import ProgettiHWSWAPI import voluptuous as vol @@ -42,9 +42,13 @@ class ProgettiHWSWConfigFlow(ConfigFlow, domain=DOMAIN): """Initialize class variables.""" self.s1_in: dict[str, Any] | None = None - async def async_step_relay_modes(self, user_input=None): + async def async_step_relay_modes( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Manage relay modes step.""" - errors = {} + errors: dict[str, str] = {} + if TYPE_CHECKING: + assert self.s1_in is not None if user_input is not None: whole_data = user_input whole_data.update(self.s1_in) diff --git a/homeassistant/components/prosegur/config_flow.py b/homeassistant/components/prosegur/config_flow.py index 7a8f67cef7d..7bd87e405ef 100644 --- a/homeassistant/components/prosegur/config_flow.py +++ b/homeassistant/components/prosegur/config_flow.py @@ -116,9 +116,11 @@ class ProsegurConfigFlow(ConfigFlow, domain=DOMAIN): ) return await self.async_step_reauth_confirm() - async def async_step_reauth_confirm(self, user_input=None): + async def async_step_reauth_confirm( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Handle re-authentication with Prosegur.""" - errors = {} + errors: dict[str, str] = {} if user_input: try: diff --git a/homeassistant/components/ps4/config_flow.py b/homeassistant/components/ps4/config_flow.py index cdbf02dcc90..877fb595fc0 100644 --- a/homeassistant/components/ps4/config_flow.py +++ b/homeassistant/components/ps4/config_flow.py @@ -48,13 +48,13 @@ class PlayStation4FlowHandler(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize the config flow.""" self.helper = Helper() - self.creds = None + self.creds: str | None = None self.name = None self.host = None self.region = None - self.pin = None + self.pin: str | None = None self.m_device = None - self.location = None + self.location: location.LocationInfo | None = None self.device_list: list[str] = [] async def async_step_user( @@ -69,7 +69,9 @@ class PlayStation4FlowHandler(ConfigFlow, domain=DOMAIN): return self.async_abort(reason=reason) return await self.async_step_creds() - async def async_step_creds(self, user_input=None): + async def async_step_creds( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Return PS4 credentials from 2nd Screen App.""" errors = {} if user_input is not None: @@ -85,7 +87,9 @@ class PlayStation4FlowHandler(ConfigFlow, domain=DOMAIN): return self.async_show_form(step_id="creds", errors=errors) - async def async_step_mode(self, user_input=None): + async def async_step_mode( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Prompt for mode.""" errors = {} mode = [CONF_AUTO, CONF_MANUAL] @@ -100,7 +104,7 @@ class PlayStation4FlowHandler(ConfigFlow, domain=DOMAIN): if not errors: return await self.async_step_link() - mode_schema = OrderedDict() + mode_schema = OrderedDict[vol.Marker, Any]() mode_schema[vol.Required(CONF_MODE, default=CONF_AUTO)] = vol.In(list(mode)) mode_schema[vol.Optional(CONF_IP_ADDRESS)] = str @@ -108,7 +112,9 @@ class PlayStation4FlowHandler(ConfigFlow, domain=DOMAIN): step_id="mode", data_schema=vol.Schema(mode_schema), errors=errors ) - async def async_step_link(self, user_input=None): + async def async_step_link( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Prompt user input. Create or edit entry.""" regions = sorted(COUNTRIES.keys()) default_region = None @@ -193,7 +199,7 @@ class PlayStation4FlowHandler(ConfigFlow, domain=DOMAIN): default_region = country # Show User Input form. - link_schema = OrderedDict() + link_schema = OrderedDict[vol.Marker, Any]() link_schema[vol.Required(CONF_IP_ADDRESS)] = vol.In(list(self.device_list)) link_schema[vol.Required(CONF_REGION, default=default_region)] = vol.In( list(regions) From 74fa30e59d883faff09018e69352120146590c9c Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 30 Aug 2024 11:05:18 +0200 Subject: [PATCH 1232/1635] Improve config flow type hints (g-m) (#124907) --- .../geonetnz_volcano/config_flow.py | 4 ++-- .../components/hlk_sw16/config_flow.py | 5 ++-- .../components/insteon/config_flow.py | 24 ++++++++++++++----- .../components/iotawatt/config_flow.py | 4 +++- .../components/kitchen_sink/config_flow.py | 4 +++- .../components/kmtronic/config_flow.py | 4 +++- homeassistant/components/kodi/config_flow.py | 20 +++++++++++----- .../components/lutron_caseta/config_flow.py | 8 +++++-- homeassistant/components/mill/config_flow.py | 8 +++++-- .../components/monoprice/config_flow.py | 4 +++- 10 files changed, 61 insertions(+), 24 deletions(-) diff --git a/homeassistant/components/geonetnz_volcano/config_flow.py b/homeassistant/components/geonetnz_volcano/config_flow.py index 45a074d215c..cf3d5bc1139 100644 --- a/homeassistant/components/geonetnz_volcano/config_flow.py +++ b/homeassistant/components/geonetnz_volcano/config_flow.py @@ -12,7 +12,7 @@ from homeassistant.const import ( CONF_SCAN_INTERVAL, CONF_UNIT_SYSTEM, ) -from homeassistant.core import callback +from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import config_validation as cv from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM @@ -26,7 +26,7 @@ from .const import ( @callback -def configured_instances(hass): +def configured_instances(hass: HomeAssistant) -> set[str]: """Return a set of configured GeoNet NZ Volcano instances.""" return { f"{entry.data[CONF_LATITUDE]}, {entry.data[CONF_LONGITUDE]}" diff --git a/homeassistant/components/hlk_sw16/config_flow.py b/homeassistant/components/hlk_sw16/config_flow.py index 8dd75561af3..34ee1ebd0e7 100644 --- a/homeassistant/components/hlk_sw16/config_flow.py +++ b/homeassistant/components/hlk_sw16/config_flow.py @@ -4,6 +4,7 @@ import asyncio from typing import Any from hlk_sw16 import create_hlk_sw16_connection +from hlk_sw16.protocol import SW16Client import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult @@ -27,7 +28,7 @@ DATA_SCHEMA = vol.Schema( ) -async def connect_client(hass, user_input): +async def connect_client(hass: HomeAssistant, user_input: dict[str, Any]) -> SW16Client: """Connect the HLK-SW16 client.""" client_aw = create_hlk_sw16_connection( host=user_input[CONF_HOST], @@ -41,7 +42,7 @@ async def connect_client(hass, user_input): return await client_aw -async def validate_input(hass: HomeAssistant, user_input): +async def validate_input(hass: HomeAssistant, user_input: dict[str, Any]) -> None: """Validate the user input allows us to connect.""" try: client = await connect_client(hass, user_input) diff --git a/homeassistant/components/insteon/config_flow.py b/homeassistant/components/insteon/config_flow.py index 7a701db1b82..6b048004ba1 100644 --- a/homeassistant/components/insteon/config_flow.py +++ b/homeassistant/components/insteon/config_flow.py @@ -64,7 +64,9 @@ class InsteonFlowHandler(ConfigFlow, domain=DOMAIN): modem_types = [STEP_PLM, STEP_HUB_V1, STEP_HUB_V2] return self.async_show_menu(step_id="user", menu_options=modem_types) - async def async_step_plm(self, user_input=None): + async def async_step_plm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Set up the PLM modem type.""" errors = {} if user_input is not None: @@ -83,7 +85,9 @@ class InsteonFlowHandler(ConfigFlow, domain=DOMAIN): step_id=STEP_PLM, data_schema=data_schema, errors=errors ) - async def async_step_plm_manually(self, user_input=None): + async def async_step_plm_manually( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Set up the PLM modem type manually.""" errors = {} schema_defaults = {} @@ -97,15 +101,21 @@ class InsteonFlowHandler(ConfigFlow, domain=DOMAIN): step_id=STEP_PLM_MANUALLY, data_schema=data_schema, errors=errors ) - async def async_step_hubv1(self, user_input=None): + async def async_step_hubv1( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Set up the Hub v1 modem type.""" return await self._async_setup_hub(hub_version=1, user_input=user_input) - async def async_step_hubv2(self, user_input=None): + async def async_step_hubv2( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Set up the Hub v2 modem type.""" return await self._async_setup_hub(hub_version=2, user_input=user_input) - async def _async_setup_hub(self, hub_version, user_input): + async def _async_setup_hub( + self, hub_version: int, user_input: dict[str, Any] | None + ) -> ConfigFlowResult: """Set up the Hub versions 1 and 2.""" errors = {} if user_input is not None: @@ -144,7 +154,9 @@ class InsteonFlowHandler(ConfigFlow, domain=DOMAIN): await self.async_set_unique_id(DEFAULT_DISCOVERY_UNIQUE_ID) return await self.async_step_confirm_usb() - async def async_step_confirm_usb(self, user_input=None) -> ConfigFlowResult: + async def async_step_confirm_usb( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Confirm a USB discovery.""" if user_input is not None: return await self.async_step_plm({CONF_DEVICE: self._device_path}) diff --git a/homeassistant/components/iotawatt/config_flow.py b/homeassistant/components/iotawatt/config_flow.py index 187423c7d8b..668844a1c5c 100644 --- a/homeassistant/components/iotawatt/config_flow.py +++ b/homeassistant/components/iotawatt/config_flow.py @@ -75,7 +75,9 @@ class IOTaWattConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form(step_id="user", data_schema=schema, errors=errors) - async def async_step_auth(self, user_input=None): + async def async_step_auth( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Authenticate user if authentication is enabled on the IoTaWatt device.""" if user_input is None: user_input = {} diff --git a/homeassistant/components/kitchen_sink/config_flow.py b/homeassistant/components/kitchen_sink/config_flow.py index 9a0b78c80e6..8cff9321729 100644 --- a/homeassistant/components/kitchen_sink/config_flow.py +++ b/homeassistant/components/kitchen_sink/config_flow.py @@ -48,7 +48,9 @@ class KitchenSinkConfigFlow(ConfigFlow, domain=DOMAIN): """Reauth step.""" return await self.async_step_reauth_confirm() - async def async_step_reauth_confirm(self, user_input=None): + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Reauth confirm step.""" if user_input is None: return self.async_show_form(step_id="reauth_confirm") diff --git a/homeassistant/components/kmtronic/config_flow.py b/homeassistant/components/kmtronic/config_flow.py index f83d102ac05..6bf0b878f72 100644 --- a/homeassistant/components/kmtronic/config_flow.py +++ b/homeassistant/components/kmtronic/config_flow.py @@ -106,7 +106,9 @@ class KMTronicOptionsFlow(OptionsFlow): """Initialize options flow.""" self.config_entry = config_entry - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Manage the options.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) diff --git a/homeassistant/components/kodi/config_flow.py b/homeassistant/components/kodi/config_flow.py index 26b5214c733..ef0798220dd 100644 --- a/homeassistant/components/kodi/config_flow.py +++ b/homeassistant/components/kodi/config_flow.py @@ -140,7 +140,9 @@ class KodiConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_discovery_confirm() - async def async_step_discovery_confirm(self, user_input=None): + async def async_step_discovery_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle user-confirmation of discovered node.""" if user_input is None: return self.async_show_form( @@ -178,7 +180,9 @@ class KodiConfigFlow(ConfigFlow, domain=DOMAIN): return self._show_user_form(errors) - async def async_step_credentials(self, user_input=None): + async def async_step_credentials( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle username and password input.""" errors = {} @@ -203,7 +207,9 @@ class KodiConfigFlow(ConfigFlow, domain=DOMAIN): return self._show_credentials_form(errors) - async def async_step_ws_port(self, user_input=None): + async def async_step_ws_port( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle websocket port of discovered node.""" errors = {} @@ -249,7 +255,9 @@ class KodiConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_abort(reason=reason) @callback - def _show_credentials_form(self, errors=None): + def _show_credentials_form( + self, errors: dict[str, str] | None = None + ) -> ConfigFlowResult: schema = vol.Schema( { vol.Optional( @@ -262,7 +270,7 @@ class KodiConfigFlow(ConfigFlow, domain=DOMAIN): ) return self.async_show_form( - step_id="credentials", data_schema=schema, errors=errors or {} + step_id="credentials", data_schema=schema, errors=errors ) @callback @@ -304,7 +312,7 @@ class KodiConfigFlow(ConfigFlow, domain=DOMAIN): ) @callback - def _get_data(self): + def _get_data(self) -> dict[str, Any]: return { CONF_NAME: self._name, CONF_HOST: self._host, diff --git a/homeassistant/components/lutron_caseta/config_flow.py b/homeassistant/components/lutron_caseta/config_flow.py index 703fbb813c6..cd566b767fb 100644 --- a/homeassistant/components/lutron_caseta/config_flow.py +++ b/homeassistant/components/lutron_caseta/config_flow.py @@ -95,7 +95,9 @@ class LutronCasetaFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a flow initialized by homekit discovery.""" return await self.async_step_zeroconf(discovery_info) - async def async_step_link(self, user_input=None): + async def async_step_link( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle pairing with the hub.""" errors = {} # Abort if existing entry with matching host exists. @@ -198,7 +200,9 @@ class LutronCasetaFlowHandler(ConfigFlow, domain=DOMAIN): self._abort_if_unique_id_configured() return self.async_create_entry(title=ENTRY_DEFAULT_TITLE, data=self.data) - async def async_step_import_failed(self, user_input=None): + async def async_step_import_failed( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Make failed import surfaced to user.""" self.context["title_placeholders"] = {CONF_NAME: self.data[CONF_HOST]} diff --git a/homeassistant/components/mill/config_flow.py b/homeassistant/components/mill/config_flow.py index db1b2711575..7b2e5c3c4d5 100644 --- a/homeassistant/components/mill/config_flow.py +++ b/homeassistant/components/mill/config_flow.py @@ -43,7 +43,9 @@ class MillConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_local() return await self.async_step_cloud() - async def async_step_local(self, user_input=None): + async def async_step_local( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Handle the local step.""" data_schema = vol.Schema({vol.Required(CONF_IP_ADDRESS): str}) if user_input is None: @@ -75,7 +77,9 @@ class MillConfigFlow(ConfigFlow, domain=DOMAIN): }, ) - async def async_step_cloud(self, user_input=None): + async def async_step_cloud( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Handle the cloud step.""" data_schema = vol.Schema( {vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str} diff --git a/homeassistant/components/monoprice/config_flow.py b/homeassistant/components/monoprice/config_flow.py index 5f0b1bf27b5..cac673e38c1 100644 --- a/homeassistant/components/monoprice/config_flow.py +++ b/homeassistant/components/monoprice/config_flow.py @@ -139,7 +139,9 @@ class MonopriceOptionsFlowHandler(OptionsFlow): return previous - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Manage the options.""" if user_input is not None: return self.async_create_entry( From df2ea1e87506fb6b660c415f45b0991846691373 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 30 Aug 2024 11:21:05 +0200 Subject: [PATCH 1233/1635] Improve type hints in nina config flow (#124910) * Improve type hints in nina config flow * Apply suggestions from code review Co-authored-by: Joost Lekkerkerker --------- Co-authored-by: Joost Lekkerkerker --- homeassistant/components/nina/config_flow.py | 52 ++++++++++---------- tests/components/nina/test_config_flow.py | 2 +- 2 files changed, 28 insertions(+), 26 deletions(-) diff --git a/homeassistant/components/nina/config_flow.py b/homeassistant/components/nina/config_flow.py index 1fee6430ffc..e048ce81be3 100644 --- a/homeassistant/components/nina/config_flow.py +++ b/homeassistant/components/nina/config_flow.py @@ -182,9 +182,11 @@ class OptionsFlowHandler(OptionsFlow): if name not in self.data: self.data[name] = [] - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle options flow.""" - errors: dict[str, Any] = {} + errors: dict[str, str] = {} if not self._all_region_codes_sorted: nina: Nina = Nina(async_get_clientsession(self.hass)) @@ -244,33 +246,33 @@ class OptionsFlowHandler(OptionsFlow): self.config_entry, data=user_input ) - return self.async_create_entry(title="", data=None) + return self.async_create_entry(title="", data={}) errors["base"] = "no_selection" + schema: VolDictType = { + **{ + vol.Optional(region, default=self.data[region]): cv.multi_select( + self.regions[region] + ) + for region in CONST_REGIONS + }, + vol.Required( + CONF_MESSAGE_SLOTS, + default=self.data[CONF_MESSAGE_SLOTS], + ): vol.All(int, vol.Range(min=1, max=20)), + vol.Optional( + CONF_HEADLINE_FILTER, + default=self.data[CONF_HEADLINE_FILTER], + ): cv.string, + vol.Optional( + CONF_AREA_FILTER, + default=self.data[CONF_AREA_FILTER], + ): cv.string, + } + return self.async_show_form( step_id="init", - data_schema=vol.Schema( - { - **{ - vol.Optional( - region, default=self.data[region] - ): cv.multi_select(self.regions[region]) - for region in CONST_REGIONS - }, - vol.Required( - CONF_MESSAGE_SLOTS, - default=self.data[CONF_MESSAGE_SLOTS], - ): vol.All(int, vol.Range(min=1, max=20)), - vol.Optional( - CONF_HEADLINE_FILTER, - default=self.data[CONF_HEADLINE_FILTER], - ): cv.string, - vol.Optional( - CONF_AREA_FILTER, - default=self.data[CONF_AREA_FILTER], - ): cv.string, - } - ), + data_schema=vol.Schema(schema), errors=errors, ) diff --git a/tests/components/nina/test_config_flow.py b/tests/components/nina/test_config_flow.py index 23ee8cbf797..6bc17cdf674 100644 --- a/tests/components/nina/test_config_flow.py +++ b/tests/components/nina/test_config_flow.py @@ -188,7 +188,7 @@ async def test_options_flow_init(hass: HomeAssistant) -> None: ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] is None + assert result["data"] == {} assert dict(config_entry.data) == { CONF_HEADLINE_FILTER: deepcopy(DUMMY_DATA[CONF_HEADLINE_FILTER]), From 19cbc1b258db345808587c96957cc284d7e4d1db Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 30 Aug 2024 11:22:07 +0200 Subject: [PATCH 1234/1635] Improve type hints in plex config flow (#124914) --- homeassistant/components/plex/config_flow.py | 59 ++++++++++++++------ 1 file changed, 41 insertions(+), 18 deletions(-) diff --git a/homeassistant/components/plex/config_flow.py b/homeassistant/components/plex/config_flow.py index 7162e517e23..fcd5751effb 100644 --- a/homeassistant/components/plex/config_flow.py +++ b/homeassistant/components/plex/config_flow.py @@ -35,7 +35,7 @@ from homeassistant.const import ( CONF_URL, CONF_VERIFY_SSL, ) -from homeassistant.core import callback +from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import discovery_flow from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv @@ -71,7 +71,7 @@ _LOGGER = logging.getLogger(__package__) @callback -def configured_servers(hass): +def configured_servers(hass: HomeAssistant) -> set[str]: """Return a set of the configured Plex servers.""" return { entry.data[CONF_SERVER_IDENTIFIER] @@ -79,7 +79,7 @@ def configured_servers(hass): } -async def async_discover(hass): +async def async_discover(hass: HomeAssistant) -> None: """Scan for available Plex servers.""" gdm = GDM() await hass.async_add_executor_job(gdm.scan) @@ -97,6 +97,9 @@ class PlexFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 + available_servers: list[tuple[str, str, str]] + plexauth: PlexAuth + @staticmethod @callback def async_get_options_flow( @@ -108,28 +111,34 @@ class PlexFlowHandler(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize the Plex flow.""" self.current_login: dict[str, Any] = {} - self.available_servers = None - self.plexauth = None self.token = None self.client_id = None self._manual = False self._reauth_config: dict[str, Any] | None = None - async def async_step_user(self, user_input=None, errors=None): + async def async_step_user( + self, + user_input: dict[str, Any] | None = None, + errors: dict[str, str] | None = None, + ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" if user_input is not None: - return await self.async_step_plex_website_auth() + return await self._async_step_plex_website_auth() if self.show_advanced_options: return await self.async_step_user_advanced(errors=errors) return self.async_show_form(step_id="user", errors=errors) - async def async_step_user_advanced(self, user_input=None, errors=None): + async def async_step_user_advanced( + self, + user_input: dict[str, str] | None = None, + errors: dict[str, str] | None = None, + ) -> ConfigFlowResult: """Handle an advanced mode flow initialized by the user.""" if user_input is not None: if user_input.get("setup_method") == MANUAL_SETUP_STRING: self._manual = True return await self.async_step_manual_setup() - return await self.async_step_plex_website_auth() + return await self._async_step_plex_website_auth() data_schema = vol.Schema( { @@ -142,7 +151,11 @@ class PlexFlowHandler(ConfigFlow, domain=DOMAIN): step_id="user_advanced", data_schema=data_schema, errors=errors ) - async def async_step_manual_setup(self, user_input=None, errors=None): + async def async_step_manual_setup( + self, + user_input: dict[str, Any] | None = None, + errors: dict[str, str] | None = None, + ) -> ConfigFlowResult: """Begin manual configuration.""" if user_input is not None and errors is None: user_input.pop(CONF_URL, None) @@ -264,7 +277,9 @@ class PlexFlowHandler(ConfigFlow, domain=DOMAIN): return self.async_create_entry(title=url, data=data) - async def async_step_select_server(self, user_input=None): + async def async_step_select_server( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Use selected Plex server.""" config = dict(self.current_login) if user_input is not None: @@ -292,7 +307,9 @@ class PlexFlowHandler(ConfigFlow, domain=DOMAIN): errors={}, ) - async def async_step_integration_discovery(self, discovery_info): + async def async_step_integration_discovery( + self, discovery_info: dict[str, Any] + ) -> ConfigFlowResult: """Handle GDM discovery.""" machine_identifier = discovery_info["data"]["Resource-Identifier"] await self.async_set_unique_id(machine_identifier) @@ -305,7 +322,7 @@ class PlexFlowHandler(ConfigFlow, domain=DOMAIN): } return await self.async_step_user() - async def async_step_plex_website_auth(self): + async def _async_step_plex_website_auth(self) -> ConfigFlowResult: """Begin external auth flow on Plex website.""" self.hass.http.register_view(PlexAuthorizationCallbackView) if (req := http.current_request.get()) is None: @@ -329,7 +346,9 @@ class PlexFlowHandler(ConfigFlow, domain=DOMAIN): auth_url = self.plexauth.auth_url(forward_url) return self.async_external_step(step_id="obtain_token", url=auth_url) - async def async_step_obtain_token(self, user_input=None): + async def async_step_obtain_token( + self, user_input: None = None + ) -> ConfigFlowResult: """Obtain token after external auth completed.""" token = await self.plexauth.token(10) @@ -340,11 +359,13 @@ class PlexFlowHandler(ConfigFlow, domain=DOMAIN): self.client_id = self.plexauth.client_identifier return self.async_external_step_done(next_step_id="use_external_token") - async def async_step_timed_out(self, user_input=None): + async def async_step_timed_out(self, user_input: None = None) -> ConfigFlowResult: """Abort flow when time expires.""" return self.async_abort(reason="token_request_timeout") - async def async_step_use_external_token(self, user_input=None): + async def async_step_use_external_token( + self, user_input: None = None + ) -> ConfigFlowResult: """Continue server validation with external token.""" server_config = {CONF_TOKEN: self.token} return await self.async_step_server_validate(server_config) @@ -367,11 +388,13 @@ class PlexOptionsFlowHandler(OptionsFlow): self.options = copy.deepcopy(dict(config_entry.options)) self.server_id = config_entry.data[CONF_SERVER_IDENTIFIER] - async def async_step_init(self, user_input=None): + async def async_step_init(self, user_input: None = None) -> ConfigFlowResult: """Manage the Plex options.""" return await self.async_step_plex_mp_settings() - async def async_step_plex_mp_settings(self, user_input=None): + async def async_step_plex_mp_settings( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Manage the Plex media_player options.""" plex_server = get_plex_server(self.hass, self.server_id) From 9e2360791d1d52c53d4131de4a86809cc940b6bd Mon Sep 17 00:00:00 2001 From: Christopher Fenner <9592452+CFenner@users.noreply.github.com> Date: Fri, 30 Aug 2024 11:22:48 +0200 Subject: [PATCH 1235/1635] Add hot water target temp number entity in ViCare integration (#123633) * add DHW target temp number entity * Update number.py * Update strings.json * Update strings.json * update test snapshot * fix snapshot --- homeassistant/components/vicare/number.py | 12 ++++ homeassistant/components/vicare/strings.json | 3 + .../vicare/snapshots/test_number.ambr | 57 +++++++++++++++++++ 3 files changed, 72 insertions(+) diff --git a/homeassistant/components/vicare/number.py b/homeassistant/components/vicare/number.py index d53b7183327..a6bb849ce62 100644 --- a/homeassistant/components/vicare/number.py +++ b/homeassistant/components/vicare/number.py @@ -50,6 +50,18 @@ class ViCareNumberEntityDescription(NumberEntityDescription, ViCareRequiredKeysM DEVICE_ENTITY_DESCRIPTIONS: tuple[ViCareNumberEntityDescription, ...] = ( + ViCareNumberEntityDescription( + key="dhw_temperature", + translation_key="dhw_temperature", + entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + value_getter=lambda api: api.getDomesticHotWaterConfiguredTemperature(), + value_setter=lambda api, value: api.setDomesticHotWaterTemperature(value), + min_value_getter=lambda api: api.getDomesticHotWaterMinTemperature(), + max_value_getter=lambda api: api.getDomesticHotWaterMaxTemperature(), + native_step=1, + ), ViCareNumberEntityDescription( key="dhw_secondary_temperature", translation_key="dhw_secondary_temperature", diff --git a/homeassistant/components/vicare/strings.json b/homeassistant/components/vicare/strings.json index 0452a560cb8..1466baab8f3 100644 --- a/homeassistant/components/vicare/strings.json +++ b/homeassistant/components/vicare/strings.json @@ -105,6 +105,9 @@ "comfort_heating_temperature": { "name": "[%key:component::vicare::entity::number::comfort_temperature::name%]" }, + "dhw_temperature": { + "name": "DHW temperature" + }, "dhw_secondary_temperature": { "name": "DHW secondary temperature" } diff --git a/tests/components/vicare/snapshots/test_number.ambr b/tests/components/vicare/snapshots/test_number.ambr index a55c29ab8c1..e6e87ce5dc7 100644 --- a/tests/components/vicare/snapshots/test_number.ambr +++ b/tests/components/vicare/snapshots/test_number.ambr @@ -565,3 +565,60 @@ 'state': 'unavailable', }) # --- +# name: test_all_entities[number.model0_dhw_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.model0_dhw_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DHW temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dhw_temperature', + 'unique_id': 'gateway0-dhw_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[number.model0_dhw_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 DHW temperature', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.model0_dhw_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- From ffabd5d7db225cbf1de066f0b6953009d1aa606e Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 30 Aug 2024 11:24:06 +0200 Subject: [PATCH 1236/1635] Improve type hints in konnected config flow (#124904) --- .../components/konnected/config_flow.py | 64 ++++++++++++------- 1 file changed, 42 insertions(+), 22 deletions(-) diff --git a/homeassistant/components/konnected/config_flow.py b/homeassistant/components/konnected/config_flow.py index 48016cd066a..18e113e146b 100644 --- a/homeassistant/components/konnected/config_flow.py +++ b/homeassistant/components/konnected/config_flow.py @@ -7,7 +7,7 @@ import copy import logging import random import string -from typing import Any +from typing import TYPE_CHECKING, Any from urllib.parse import urlparse import voluptuous as vol @@ -227,8 +227,12 @@ class KonnectedFlowHandler(ConfigFlow, domain=DOMAIN): self._abort_if_unique_id_configured() return await self.async_step_import_confirm() - async def async_step_import_confirm(self, user_input=None): + async def async_step_import_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Confirm the user wants to import the config entry.""" + if TYPE_CHECKING: + assert self.unique_id is not None if user_input is None: return self.async_show_form( step_id="import_confirm", @@ -349,7 +353,9 @@ class KonnectedFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_confirm(self, user_input=None): + async def async_step_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Attempt to link with the Konnected panel. Given a configured host, will ask the user to confirm and finalize @@ -401,8 +407,8 @@ class OptionsFlowHandler(OptionsFlow): self.current_opt = self.entry.options or self.entry.data[CONF_DEFAULT_OPTIONS] # as config proceeds we'll build up new options and then replace what's in the config entry - self.new_opt: dict[str, dict[str, Any]] = {CONF_IO: {}} - self.active_cfg = None + self.new_opt: dict[str, Any] = {CONF_IO: {}} + self.active_cfg: str | None = None self.io_cfg: dict[str, Any] = {} self.current_states: list[dict[str, Any]] = [] self.current_state = 1 @@ -419,13 +425,17 @@ class OptionsFlowHandler(OptionsFlow): {}, ) - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle options flow.""" return await self.async_step_options_io() - async def async_step_options_io(self, user_input=None): + async def async_step_options_io( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Configure legacy panel IO or first half of pro IO.""" - errors = {} + errors: dict[str, str] = {} current_io = self.current_opt.get(CONF_IO, {}) if user_input is not None: @@ -508,9 +518,11 @@ class OptionsFlowHandler(OptionsFlow): return self.async_abort(reason="not_konn_panel") - async def async_step_options_io_ext(self, user_input=None): + async def async_step_options_io_ext( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Allow the user to configure the extended IO for pro.""" - errors = {} + errors: dict[str, str] = {} current_io = self.current_opt.get(CONF_IO, {}) if user_input is not None: @@ -566,10 +578,12 @@ class OptionsFlowHandler(OptionsFlow): return self.async_abort(reason="not_konn_panel") - async def async_step_options_binary(self, user_input=None): + async def async_step_options_binary( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Allow the user to configure the IO options for binary sensors.""" - errors = {} - if user_input is not None: + errors: dict[str, str] = {} + if user_input is not None and self.active_cfg is not None: zone = {"zone": self.active_cfg} zone.update(user_input) self.new_opt[CONF_BINARY_SENSORS] = [ @@ -602,7 +616,7 @@ class OptionsFlowHandler(OptionsFlow): description_placeholders={ "zone": f"Zone {self.active_cfg}" if len(self.active_cfg) < 3 - else self.active_cfg.upper + else self.active_cfg.upper() }, errors=errors, ) @@ -635,17 +649,19 @@ class OptionsFlowHandler(OptionsFlow): description_placeholders={ "zone": f"Zone {self.active_cfg}" if len(self.active_cfg) < 3 - else self.active_cfg.upper + else self.active_cfg.upper() }, errors=errors, ) return await self.async_step_options_digital() - async def async_step_options_digital(self, user_input=None): + async def async_step_options_digital( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Allow the user to configure the IO options for digital sensors.""" - errors = {} - if user_input is not None: + errors: dict[str, str] = {} + if user_input is not None and self.active_cfg is not None: zone = {"zone": self.active_cfg} zone.update(user_input) self.new_opt[CONF_SENSORS] = [*self.new_opt.get(CONF_SENSORS, []), zone] @@ -710,10 +726,12 @@ class OptionsFlowHandler(OptionsFlow): return await self.async_step_options_switch() - async def async_step_options_switch(self, user_input=None): + async def async_step_options_switch( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Allow the user to configure the IO options for switches.""" - errors = {} - if user_input is not None: + errors: dict[str, str] = {} + if user_input is not None and self.active_cfg is not None: zone = {"zone": self.active_cfg} zone.update(user_input) del zone[CONF_MORE_STATES] @@ -825,7 +843,9 @@ class OptionsFlowHandler(OptionsFlow): return await self.async_step_options_misc() - async def async_step_options_misc(self, user_input=None): + async def async_step_options_misc( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Allow the user to configure the LED behavior.""" errors = {} if user_input is not None: From 1906155c18e8398097b5e08d07e57faf3712e071 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 30 Aug 2024 11:24:34 +0200 Subject: [PATCH 1237/1635] Improve type hints in mobile_app config flow (#124906) --- homeassistant/components/mobile_app/config_flow.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/mobile_app/config_flow.py b/homeassistant/components/mobile_app/config_flow.py index bd72b2d7f42..33c0442b529 100644 --- a/homeassistant/components/mobile_app/config_flow.py +++ b/homeassistant/components/mobile_app/config_flow.py @@ -28,7 +28,9 @@ class MobileAppFlowHandler(ConfigFlow, domain=DOMAIN): reason="install_app", description_placeholders=placeholders ) - async def async_step_registration(self, user_input=None): + async def async_step_registration( + self, user_input: dict[str, Any] + ) -> ConfigFlowResult: """Handle a flow initialized during registration.""" if ATTR_DEVICE_ID in user_input: # Unique ID is combi of app + device ID. From febb3820309a9cb0cb4e8e6ee109d5500fde4bee Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 30 Aug 2024 11:25:08 +0200 Subject: [PATCH 1238/1635] Improve type hints in hvv_departures config flow (#124902) --- .../components/hvv_departures/config_flow.py | 21 ++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/hvv_departures/config_flow.py b/homeassistant/components/hvv_departures/config_flow.py index a02796dbffb..3e1b98d9a38 100644 --- a/homeassistant/components/hvv_departures/config_flow.py +++ b/homeassistant/components/hvv_departures/config_flow.py @@ -49,10 +49,11 @@ class HVVDeparturesConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 + hub: GTIHub + data: dict[str, Any] + def __init__(self) -> None: """Initialize component.""" - self.hub: GTIHub | None = None - self.data: dict[str, Any] | None = None self.stations: dict[str, Any] = {} async def async_step_user( @@ -86,7 +87,9 @@ class HVVDeparturesConfigFlow(ConfigFlow, domain=DOMAIN): step_id="user", data_schema=SCHEMA_STEP_USER, errors=errors ) - async def async_step_station(self, user_input=None): + async def async_step_station( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the step where the user inputs his/her station.""" if user_input is not None: errors = {} @@ -116,7 +119,9 @@ class HVVDeparturesConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form(step_id="station", data_schema=SCHEMA_STEP_STATION) - async def async_step_station_select(self, user_input=None): + async def async_step_station_select( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the step where the user inputs his/her station.""" schema = vol.Schema({vol.Required(CONF_STATION): vol.In(list(self.stations))}) @@ -148,7 +153,9 @@ class OptionsFlowHandler(OptionsFlow): self.options = dict(config_entry.options) self.departure_filters: dict[str, Any] = {} - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Manage the options.""" errors = {} if not self.departure_filters: @@ -177,7 +184,7 @@ class OptionsFlowHandler(OptionsFlow): if not errors: self.departure_filters = { str(i): departure_filter - for i, departure_filter in enumerate(departure_list.get("filter")) + for i, departure_filter in enumerate(departure_list["filter"]) } if user_input is not None and not errors: @@ -195,7 +202,7 @@ class OptionsFlowHandler(OptionsFlow): old_filter = [ i for (i, f) in self.departure_filters.items() - if f in self.config_entry.options.get(CONF_FILTER) + if f in self.config_entry.options[CONF_FILTER] ] else: old_filter = [] From afa02dcce9335d07370469550f157c966021947a Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 30 Aug 2024 11:25:29 +0200 Subject: [PATCH 1239/1635] Improve type hints in growatt_server config flow (#124901) --- homeassistant/components/growatt_server/config_flow.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/growatt_server/config_flow.py b/homeassistant/components/growatt_server/config_flow.py index 8123d7ff067..e676d8fae32 100644 --- a/homeassistant/components/growatt_server/config_flow.py +++ b/homeassistant/components/growatt_server/config_flow.py @@ -23,9 +23,10 @@ class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 + api: growattServer.GrowattApi + def __init__(self) -> None: """Initialise growatt server flow.""" - self.api: growattServer.GrowattApi | None = None self.user_id = None self.data: dict[str, Any] = {} @@ -70,7 +71,9 @@ class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN): self.data = user_input return await self.async_step_plant() - async def async_step_plant(self, user_input=None): + async def async_step_plant( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle adding a "plant" to Home Assistant.""" plant_info = await self.hass.async_add_executor_job( self.api.plant_list, self.user_id @@ -86,7 +89,8 @@ class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form(step_id="plant", data_schema=data_schema) - if user_input is None and len(plant_info["data"]) == 1: + if user_input is None: + # single plant => mark it as selected user_input = {CONF_PLANT_ID: plant_info["data"][0]["plantId"]} user_input[CONF_NAME] = plants[user_input[CONF_PLANT_ID]] From 69a9aa4594505bd28923ade30a9274b28e5017a1 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 30 Aug 2024 11:25:58 +0200 Subject: [PATCH 1240/1635] Improve type hints in icloud config flow (#124900) --- .../components/icloud/config_flow.py | 39 +++++++++++++------ 1 file changed, 27 insertions(+), 12 deletions(-) diff --git a/homeassistant/components/icloud/config_flow.py b/homeassistant/components/icloud/config_flow.py index 544f751dc0b..efcef15b4d0 100644 --- a/homeassistant/components/icloud/config_flow.py +++ b/homeassistant/components/icloud/config_flow.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Mapping import logging import os -from typing import Any +from typing import TYPE_CHECKING, Any from pyicloud import PyiCloudService from pyicloud.exceptions import ( @@ -200,11 +200,17 @@ class IcloudFlowHandler(ConfigFlow, domain=DOMAIN): return await self._validate_and_create_entry(user_input, "reauth_confirm") - async def async_step_trusted_device(self, user_input=None, errors=None): + async def async_step_trusted_device( + self, + user_input: dict[str, Any] | None = None, + errors: dict[str, str] | None = None, + ) -> ConfigFlowResult: """We need a trusted device.""" if errors is None: errors = {} + if TYPE_CHECKING: + assert self.api is not None trusted_devices = await self.hass.async_add_executor_job( getattr, self.api, "trusted_devices" ) @@ -216,7 +222,7 @@ class IcloudFlowHandler(ConfigFlow, domain=DOMAIN): if user_input is None: return await self._show_trusted_device_form( - trusted_devices_for_form, user_input, errors + trusted_devices_for_form, errors ) self._trusted_device = trusted_devices[int(user_input[CONF_TRUSTED_DEVICE])] @@ -229,18 +235,18 @@ class IcloudFlowHandler(ConfigFlow, domain=DOMAIN): errors[CONF_TRUSTED_DEVICE] = "send_verification_code" return await self._show_trusted_device_form( - trusted_devices_for_form, user_input, errors + trusted_devices_for_form, errors ) return await self.async_step_verification_code() async def _show_trusted_device_form( - self, trusted_devices, user_input=None, errors=None - ): + self, trusted_devices, errors: dict[str, str] | None = None + ) -> ConfigFlowResult: """Show the trusted_device form to the user.""" return self.async_show_form( - step_id=CONF_TRUSTED_DEVICE, + step_id="trusted_device", data_schema=vol.Schema( { vol.Required(CONF_TRUSTED_DEVICE): vol.All( @@ -251,13 +257,20 @@ class IcloudFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors or {}, ) - async def async_step_verification_code(self, user_input=None, errors=None): + async def async_step_verification_code( + self, + user_input: dict[str, Any] | None = None, + errors: dict[str, str] | None = None, + ) -> ConfigFlowResult: """Ask the verification code to the user.""" if errors is None: errors = {} if user_input is None: - return await self._show_verification_code_form(user_input, errors) + return await self._show_verification_code_form(errors) + + if TYPE_CHECKING: + assert self.api is not None self._verification_code = user_input[CONF_VERIFICATION_CODE] @@ -310,11 +323,13 @@ class IcloudFlowHandler(ConfigFlow, domain=DOMAIN): } ) - async def _show_verification_code_form(self, user_input=None, errors=None): + async def _show_verification_code_form( + self, errors: dict[str, str] | None = None + ) -> ConfigFlowResult: """Show the verification_code form to the user.""" return self.async_show_form( - step_id=CONF_VERIFICATION_CODE, + step_id="verification_code", data_schema=vol.Schema({vol.Required(CONF_VERIFICATION_CODE): str}), - errors=errors or {}, + errors=errors, ) From 6781a76de2c5c9d080df1f2c0e98dc683caa569c Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 29 Aug 2024 23:36:31 -1000 Subject: [PATCH 1241/1635] Speed up ssdp domain matching (#124842) * Speed up ssdp domain matching Switch all() expression to dict.items() <= dict.items() * rewrite as setcomp --- homeassistant/components/ssdp/__init__.py | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/ssdp/__init__.py b/homeassistant/components/ssdp/__init__.py index 7ca2f3e9318..f5e2a012730 100644 --- a/homeassistant/components/ssdp/__init__.py +++ b/homeassistant/components/ssdp/__init__.py @@ -284,16 +284,13 @@ class IntegrationMatchers: def async_matching_domains(self, info_with_desc: CaseInsensitiveDict) -> set[str]: """Find domains matching the passed CaseInsensitiveDict.""" assert self._match_by_key is not None - domains = set() - for key, matchers_by_key in self._match_by_key.items(): - if not (match_value := info_with_desc.get(key)): - continue - for domain, matcher in matchers_by_key.get(match_value, []): - if domain in domains: - continue - if all(info_with_desc.get(k) == v for (k, v) in matcher.items()): - domains.add(domain) - return domains + return { + domain + for key, matchers_by_key in self._match_by_key.items() + if (match_value := info_with_desc.get(key)) + for domain, matcher in matchers_by_key.get(match_value, ()) + if info_with_desc.items() >= matcher.items() + } class Scanner: From f394dfb8d061b96f74c92dc13e79bd9ea43c5ea5 Mon Sep 17 00:00:00 2001 From: Louis Christ Date: Fri, 30 Aug 2024 11:38:07 +0200 Subject: [PATCH 1242/1635] Handle CancelledError in bluesound integration (#124873) Catch CancledError in async_will_remove_from_hass --- homeassistant/components/bluesound/media_player.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/bluesound/media_player.py b/homeassistant/components/bluesound/media_player.py index 92f47977ee5..1ed53d7bfc5 100644 --- a/homeassistant/components/bluesound/media_player.py +++ b/homeassistant/components/bluesound/media_player.py @@ -309,7 +309,7 @@ class BluesoundPlayer(MediaPlayerEntity): return True - async def _start_poll_command(self): + async def _poll_loop(self): """Loop which polls the status of the player.""" while True: try: @@ -335,7 +335,7 @@ class BluesoundPlayer(MediaPlayerEntity): await super().async_added_to_hass() self._polling_task = self.hass.async_create_background_task( - self._start_poll_command(), + self._poll_loop(), name=f"bluesound.polling_{self.host}:{self.port}", ) @@ -345,7 +345,9 @@ class BluesoundPlayer(MediaPlayerEntity): assert self._polling_task is not None if self._polling_task.cancel(): - await self._polling_task + # the sleeps in _poll_loop will raise CancelledError + with suppress(CancelledError): + await self._polling_task self.hass.data[DATA_BLUESOUND].remove(self) From aeb95c45091760be4f1dbdb873b8404d503b1a3b Mon Sep 17 00:00:00 2001 From: Raj Laud <50647620+rajlaud@users.noreply.github.com> Date: Fri, 30 Aug 2024 06:43:29 -0400 Subject: [PATCH 1243/1635] Bump pysqueezebox to v0.8.1 (#124856) --- homeassistant/components/squeezebox/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/squeezebox/manifest.json b/homeassistant/components/squeezebox/manifest.json index 40bc8f36d22..c43225f94cd 100644 --- a/homeassistant/components/squeezebox/manifest.json +++ b/homeassistant/components/squeezebox/manifest.json @@ -12,5 +12,5 @@ "documentation": "https://www.home-assistant.io/integrations/squeezebox", "iot_class": "local_polling", "loggers": ["pysqueezebox"], - "requirements": ["pysqueezebox==0.7.1"] + "requirements": ["pysqueezebox==0.8.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 4fd2c8932f4..ead909dbcc2 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2235,7 +2235,7 @@ pyspcwebgw==0.7.0 pyspeex-noise==1.0.2 # homeassistant.components.squeezebox -pysqueezebox==0.7.1 +pysqueezebox==0.8.1 # homeassistant.components.stiebel_eltron pystiebeleltron==0.0.1.dev2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 63721bc9360..a0bc05e14b3 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1789,7 +1789,7 @@ pyspcwebgw==0.7.0 pyspeex-noise==1.0.2 # homeassistant.components.squeezebox -pysqueezebox==0.7.1 +pysqueezebox==0.8.1 # homeassistant.components.suez_water pysuez==0.2.0 From f3da9de744ce5942abeeb1fb57043e8649a931db Mon Sep 17 00:00:00 2001 From: Jeef Date: Fri, 30 Aug 2024 04:45:08 -0600 Subject: [PATCH 1244/1635] Bump weatherflow4py to 0.2.23 (#124072) patch weatherflow for new data --- homeassistant/components/weatherflow_cloud/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/weatherflow_cloud/manifest.json b/homeassistant/components/weatherflow_cloud/manifest.json index 354b9642c06..aaa5bce2e16 100644 --- a/homeassistant/components/weatherflow_cloud/manifest.json +++ b/homeassistant/components/weatherflow_cloud/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/weatherflow_cloud", "iot_class": "cloud_polling", "loggers": ["weatherflow4py"], - "requirements": ["weatherflow4py==0.2.21"] + "requirements": ["weatherflow4py==0.2.23"] } diff --git a/requirements_all.txt b/requirements_all.txt index ead909dbcc2..9fd599fba93 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2918,7 +2918,7 @@ watchdog==2.3.1 waterfurnace==1.1.0 # homeassistant.components.weatherflow_cloud -weatherflow4py==0.2.21 +weatherflow4py==0.2.23 # homeassistant.components.webmin webmin-xmlrpc==0.0.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a0bc05e14b3..0acc2a9f916 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2307,7 +2307,7 @@ wallbox==0.7.0 watchdog==2.3.1 # homeassistant.components.weatherflow_cloud -weatherflow4py==0.2.21 +weatherflow4py==0.2.23 # homeassistant.components.webmin webmin-xmlrpc==0.0.2 From 54188b4128c795054a59f9e1bd3edf98af66f28b Mon Sep 17 00:00:00 2001 From: Michael Arthur Date: Fri, 30 Aug 2024 22:59:13 +1200 Subject: [PATCH 1245/1635] Add returning activity to Husqvarna lawn mower (#124511) * add returning activity to husqvarna lawn mower * Update test, fix bug with comparison operator --- homeassistant/components/husqvarna_automower/lawn_mower.py | 3 ++- tests/components/husqvarna_automower/test_lawn_mower.py | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/husqvarna_automower/lawn_mower.py b/homeassistant/components/husqvarna_automower/lawn_mower.py index ac0f1fd6af2..eeabaa09f79 100644 --- a/homeassistant/components/husqvarna_automower/lawn_mower.py +++ b/homeassistant/components/husqvarna_automower/lawn_mower.py @@ -26,7 +26,6 @@ DOCKED_ACTIVITIES = (MowerActivities.PARKED_IN_CS, MowerActivities.CHARGING) MOWING_ACTIVITIES = ( MowerActivities.MOWING, MowerActivities.LEAVING, - MowerActivities.GOING_HOME, ) PAUSED_STATES = [ MowerStates.PAUSED, @@ -107,6 +106,8 @@ class AutomowerLawnMowerEntity(AutomowerAvailableEntity, LawnMowerEntity): return LawnMowerActivity.PAUSED if mower_attributes.mower.activity in MOWING_ACTIVITIES: return LawnMowerActivity.MOWING + if mower_attributes.mower.activity == MowerActivities.GOING_HOME: + return LawnMowerActivity.RETURNING if (mower_attributes.mower.state == "RESTRICTED") or ( mower_attributes.mower.activity in DOCKED_ACTIVITIES ): diff --git a/tests/components/husqvarna_automower/test_lawn_mower.py b/tests/components/husqvarna_automower/test_lawn_mower.py index 2ae427e0e1e..552a3a6a9cf 100644 --- a/tests/components/husqvarna_automower/test_lawn_mower.py +++ b/tests/components/husqvarna_automower/test_lawn_mower.py @@ -44,6 +44,7 @@ async def test_lawn_mower_states( ("UNKNOWN", "PAUSED", LawnMowerActivity.PAUSED), ("MOWING", "NOT_APPLICABLE", LawnMowerActivity.MOWING), ("NOT_APPLICABLE", "ERROR", LawnMowerActivity.ERROR), + ("GOING_HOME", "IN_OPERATION", LawnMowerActivity.RETURNING), ): values[TEST_MOWER_ID].mower.activity = activity values[TEST_MOWER_ID].mower.state = state From 397198c6d090287f9d3ab5c17f5424ac131586e2 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Fri, 30 Aug 2024 13:09:10 +0200 Subject: [PATCH 1246/1635] Optimize hassfest image (#124855) * Optimize hassfest docker image * Adjust CI * Use dynamic uv version * Remove workaround --- .github/workflows/builder.yml | 10 +- script/hassfest/docker.py | 134 +++++++++++++++--- script/hassfest/docker/Dockerfile | 35 +++-- .../hassfest/docker/Dockerfile.dockerignore | 8 ++ script/hassfest/docker/entrypoint.sh | 22 +-- 5 files changed, 163 insertions(+), 46 deletions(-) create mode 100644 script/hassfest/docker/Dockerfile.dockerignore diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index ab64f9e5519..910e179cd8e 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -491,7 +491,7 @@ jobs: packages: write attestations: write id-token: write - needs: ["init", "build_base"] + needs: ["init"] if: github.repository_owner == 'home-assistant' env: HASSFEST_IMAGE_NAME: ghcr.io/home-assistant/hassfest @@ -510,8 +510,8 @@ jobs: - name: Build Docker image uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0 with: - context: ./script/hassfest/docker - build-args: BASE_IMAGE=ghcr.io/home-assistant/amd64-homeassistant:${{ needs.init.outputs.version }} + context: . # So action will not pull the repository again + file: ./script/hassfest/docker/Dockerfile load: true tags: ${{ env.HASSFEST_IMAGE_TAG }} @@ -523,8 +523,8 @@ jobs: id: push uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0 with: - context: ./script/hassfest/docker - build-args: BASE_IMAGE=ghcr.io/home-assistant/amd64-homeassistant:${{ needs.init.outputs.version }} + context: . # So action will not pull the repository again + file: ./script/hassfest/docker/Dockerfile push: true tags: ${{ env.HASSFEST_IMAGE_TAG }},${{ env.HASSFEST_IMAGE_NAME }}:latest diff --git a/script/hassfest/docker.py b/script/hassfest/docker.py index e38a238be7d..6e39a5c350b 100644 --- a/script/hassfest/docker.py +++ b/script/hassfest/docker.py @@ -1,7 +1,12 @@ """Generate and validate the dockerfile.""" +from dataclasses import dataclass +from pathlib import Path + from homeassistant import core +from homeassistant.const import Platform from homeassistant.util import executor, thread +from script.gen_requirements_all import gather_recursive_requirements from .model import Config, Integration from .requirements import PACKAGE_REGEX, PIP_VERSION_RANGE_SEPARATOR @@ -20,7 +25,7 @@ ENV \ ARG QEMU_CPU # Install uv -RUN pip3 install uv=={uv_version} +RUN pip3 install uv=={uv} WORKDIR /usr/src @@ -61,30 +66,105 @@ COPY rootfs / WORKDIR /config """ +_HASSFEST_TEMPLATE = r"""# Automatically generated by hassfest. +# +# To update, run python3 -m script.hassfest -p docker +FROM python:alpine3.20 -def _get_uv_version() -> str: - with open("requirements_test.txt") as fp: +ENV \ + UV_SYSTEM_PYTHON=true \ + UV_EXTRA_INDEX_URL="https://wheels.home-assistant.io/musllinux-index/" + +SHELL ["/bin/sh", "-o", "pipefail", "-c"] +ENTRYPOINT ["/usr/src/homeassistant/script/hassfest/docker/entrypoint.sh"] +WORKDIR "/github/workspace" + +# Install uv +COPY --from=ghcr.io/astral-sh/uv:{uv} /uv /bin/uv + +COPY . /usr/src/homeassistant + +RUN \ + # Required for PyTurboJPEG + apk add --no-cache libturbojpeg \ + && cd /usr/src/homeassistant \ + && uv pip install \ + --no-build \ + --no-cache \ + -c homeassistant/package_constraints.txt \ + -r requirements.txt \ + stdlib-list==0.10.0 pipdeptree=={pipdeptree} tqdm=={tqdm} ruff=={ruff} \ + {required_components_packages} + +LABEL "name"="hassfest" +LABEL "maintainer"="Home Assistant " + +LABEL "com.github.actions.name"="hassfest" +LABEL "com.github.actions.description"="Run hassfest to validate standalone integration repositories" +LABEL "com.github.actions.icon"="terminal" +LABEL "com.github.actions.color"="gray-dark" +""" + + +def _get_package_versions(file: str, packages: set[str]) -> dict[str, str]: + package_versions: dict[str, str] = {} + with open(file, encoding="UTF-8") as fp: for _, line in enumerate(fp): + if package_versions.keys() == packages: + return package_versions + if match := PACKAGE_REGEX.match(line): pkg, sep, version = match.groups() - if pkg != "uv": + if pkg not in packages: continue if sep != "==" or not version: raise RuntimeError( - 'Requirement uv need to be pinned "uv==".' + f'Requirement {pkg} need to be pinned "{pkg}==".' ) for part in version.split(";", 1)[0].split(","): version_part = PIP_VERSION_RANGE_SEPARATOR.match(part) if version_part: - return version_part.group(2) + package_versions[pkg] = version_part.group(2) + break - raise RuntimeError("Invalid uv requirement in requirements_test.txt") + if package_versions.keys() == packages: + return package_versions + + raise RuntimeError("At least one package was not found in the requirements file.") -def _generate_dockerfile() -> str: +@dataclass +class File: + """File.""" + + content: str + path: Path + + +def _generate_hassfest_dockerimage( + config: Config, timeout: int, package_versions: dict[str, str] +) -> File: + packages = set() + already_checked_domains = set() + for platform in Platform: + packages.update( + gather_recursive_requirements(platform.value, already_checked_domains) + ) + + return File( + _HASSFEST_TEMPLATE.format( + timeout=timeout, + required_components_packages=" ".join(sorted(packages)), + **package_versions, + ), + config.root / "script/hassfest/docker/Dockerfile", + ) + + +def _generate_files(config: Config) -> list[File]: timeout = ( core.STOPPING_STAGE_SHUTDOWN_TIMEOUT + core.STOP_STAGE_SHUTDOWN_TIMEOUT @@ -93,27 +173,39 @@ def _generate_dockerfile() -> str: + executor.EXECUTOR_SHUTDOWN_TIMEOUT + thread.THREADING_SHUTDOWN_TIMEOUT + 10 + ) * 1000 + + package_versions = _get_package_versions( + "requirements_test.txt", {"pipdeptree", "tqdm", "uv"} ) - return DOCKERFILE_TEMPLATE.format( - timeout=timeout * 1000, uv_version=_get_uv_version() + package_versions |= _get_package_versions( + "requirements_test_pre_commit.txt", {"ruff"} ) + return [ + File( + DOCKERFILE_TEMPLATE.format(timeout=timeout, **package_versions), + config.root / "Dockerfile", + ), + _generate_hassfest_dockerimage(config, timeout, package_versions), + ] + def validate(integrations: dict[str, Integration], config: Config) -> None: """Validate dockerfile.""" - dockerfile_content = _generate_dockerfile() - config.cache["dockerfile"] = dockerfile_content + docker_files = _generate_files(config) + config.cache["docker"] = docker_files - dockerfile_path = config.root / "Dockerfile" - if dockerfile_path.read_text() != dockerfile_content: - config.add_error( - "docker", - "File Dockerfile is not up to date. Run python3 -m script.hassfest", - fixable=True, - ) + for file in docker_files: + if file.content != file.path.read_text(): + config.add_error( + "docker", + f"File {file.path} is not up to date. Run python3 -m script.hassfest", + fixable=True, + ) def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate dockerfile.""" - dockerfile_path = config.root / "Dockerfile" - dockerfile_path.write_text(config.cache["dockerfile"]) + for file in _generate_files(config): + file.path.write_text(file.content) diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile index 8921d92307e..4fc60c0c621 100644 --- a/script/hassfest/docker/Dockerfile +++ b/script/hassfest/docker/Dockerfile @@ -1,17 +1,32 @@ -ARG BASE_IMAGE=ghcr.io/home-assistant/home-assistant:beta -FROM $BASE_IMAGE +# Automatically generated by hassfest. +# +# To update, run python3 -m script.hassfest -p docker +FROM python:alpine3.20 -SHELL ["/bin/bash", "-o", "pipefail", "-c"] +ENV \ + UV_SYSTEM_PYTHON=true \ + UV_EXTRA_INDEX_URL="https://wheels.home-assistant.io/musllinux-index/" -COPY entrypoint.sh /entrypoint.sh +SHELL ["/bin/sh", "-o", "pipefail", "-c"] +ENTRYPOINT ["/usr/src/homeassistant/script/hassfest/docker/entrypoint.sh"] +WORKDIR "/github/workspace" + +# Install uv +COPY --from=ghcr.io/astral-sh/uv:0.2.27 /uv /bin/uv + +COPY . /usr/src/homeassistant RUN \ - uv pip install stdlib-list==0.10.0 \ - $(grep -e "^pipdeptree" -e "^tqdm" /usr/src/homeassistant/requirements_test.txt) \ - $(grep -e "^ruff" /usr/src/homeassistant/requirements_test_pre_commit.txt) - -WORKDIR "/github/workspace" -ENTRYPOINT ["/entrypoint.sh"] + # Required for PyTurboJPEG + apk add --no-cache libturbojpeg \ + && cd /usr/src/homeassistant \ + && uv pip install \ + --no-build \ + --no-cache \ + -c homeassistant/package_constraints.txt \ + -r requirements.txt \ + stdlib-list==0.10.0 pipdeptree==2.23.1 tqdm==4.66.4 ruff==0.6.2 \ + PyTurboJPEG==1.7.5 ha-ffmpeg==3.2.0 hassil==1.7.4 home-assistant-intents==2024.8.29 mutagen==1.47.0 LABEL "name"="hassfest" LABEL "maintainer"="Home Assistant " diff --git a/script/hassfest/docker/Dockerfile.dockerignore b/script/hassfest/docker/Dockerfile.dockerignore new file mode 100644 index 00000000000..75ed4f0e5d3 --- /dev/null +++ b/script/hassfest/docker/Dockerfile.dockerignore @@ -0,0 +1,8 @@ +# Ignore everything except the specified files +* + +!homeassistant/ +!requirements.txt +!script/ +script/hassfest/docker/ +!script/hassfest/docker/entrypoint.sh diff --git a/script/hassfest/docker/entrypoint.sh b/script/hassfest/docker/entrypoint.sh index 33330f63161..7b75eb186d2 100755 --- a/script/hassfest/docker/entrypoint.sh +++ b/script/hassfest/docker/entrypoint.sh @@ -1,16 +1,18 @@ -#!/usr/bin/env bashio -declare -a integrations -declare integration_path +#!/bin/sh -shopt -s globstar nullglob -for manifest in **/manifest.json; do +integrations="" +integration_path="" + +# Enable recursive globbing using find +for manifest in $(find . -name "manifest.json"); do manifest_path=$(realpath "${manifest}") - integrations+=(--integration-path "${manifest_path%/*}") + integrations="$integrations --integration-path ${manifest_path%/*}" done -if [[ ${#integrations[@]} -eq 0 ]]; then - bashio::exit.nok "No integrations found!" +if [ -z "$integrations" ]; then + echo "Error: No integrations found!" + exit 1 fi -cd /usr/src/homeassistant -exec python3 -m script.hassfest --action validate "${integrations[@]}" "$@" \ No newline at end of file +cd /usr/src/homeassistant || exit 1 +exec python3 -m script.hassfest --action validate $integrations "$@" From 5bd736029f7b4eea3ed70786abe9b9ee4ce8116c Mon Sep 17 00:00:00 2001 From: "Lektri.co" <137074859+Lektrico@users.noreply.github.com> Date: Fri, 30 Aug 2024 14:20:15 +0300 Subject: [PATCH 1247/1635] Add lektrico integration (#102371) * Add Lektrico Integration * Make the changes proposed by Lash-L: new coordinator.py, new entity.py; use: translation_key, last_update_sucess, PlatformNotReady; remove: global variables * Replace FlowResult with ConfigFlowResult and add tests. * Remove unused lines. * Remove Options from condif_flow * Fix ruff and mypy. * Fix CODEOWNERS. * Run python3 -m script.hassfest. * Correct rebase mistake. * Make modifications suggested by emontnemery. * Add pytest fixtures. * Remove meaningless patches. * Update .coveragerc * Replace CONF_FRIENDLY_NAME with CONF_NAME. * Remove underscores. * Update tests. * Update test file with is and no config_entries. . * Set serial_number in DeviceInfo and add return type of the async_update_data to DataUpdateCoordinator. * Use suggested_unit_of_measurement for KILO_WATT and replace Any in value_fn (sensor file). * Add device class duration to charging_time sensor. * Change raising PlatformNotReady to raising IntegrationError. * Test the unique id of the entry. * Rename PF Lx with Power factor Lx and remove PF from strings.json. * Remove comment. * Make state and limit reason sensors to be enum sensors. * Use result variable to check unique_id in test. * Remove CONF_NAME from entry and __init__ from LektricoFlowHandler. * Remove session parameter from LektricoDeviceDataUpdateCoordinator. * Use config_entry: ConfigEntry in coordinator. * Replace Connected,NeedAuth with Waiting for Authentication. * Use lektricowifi 0.0.29. * Use lektricowifi 0.0.39 * Use lektricowifi 0.0.40 * Use lektricowifi 0.0.41 * Replace hass.data with entry.runtime_data * Delete .coveragerc * Restructure the user step * Fix tests * Add returned value of _async_update_data to class DataUpdateCoordinator * Use hw_version at DeviceInfo * Remove a variable * Use StateType * Replace friendly_name with device_name * Use sentence case in translation strings * Uncomment and fix test_discovered_zeroconf * Add type LektricoConfigEntry * Remove commented code * Remove the type of coordinator in sensor async_setup_entry * Make zeroconf test end in ABORT, not FORM * Remove all async_block_till_done from tests * End test_user_setup_device_offline with CREATE_ENTRY * Patch the full Device * Add snapshot tests * Overwrite the type LektricoSensorEntityDescription outside of the constructor * Test separate already_configured for zeroconf --------- Co-authored-by: mihaela.tarjoianu Co-authored-by: Erik Montnemery --- .strict-typing | 1 + CODEOWNERS | 2 + homeassistant/components/lektrico/__init__.py | 51 ++ .../components/lektrico/config_flow.py | 138 +++++ homeassistant/components/lektrico/const.py | 9 + .../components/lektrico/coordinator.py | 52 ++ homeassistant/components/lektrico/entity.py | 33 ++ .../components/lektrico/manifest.json | 16 + homeassistant/components/lektrico/sensor.py | 324 +++++++++++ .../components/lektrico/strings.json | 101 ++++ homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 6 + homeassistant/generated/zeroconf.py | 4 + mypy.ini | 10 + requirements_all.txt | 3 + requirements_test_all.txt | 3 + tests/components/lektrico/__init__.py | 13 + tests/components/lektrico/conftest.py | 92 +++ .../lektrico/fixtures/current_measures.json | 16 + .../lektrico/fixtures/get_config.json | 5 + .../lektrico/fixtures/get_info.json | 13 + .../lektrico/snapshots/test_init.ambr | 33 ++ .../lektrico/snapshots/test_sensor.ambr | 534 ++++++++++++++++++ tests/components/lektrico/test_config_flow.py | 173 ++++++ tests/components/lektrico/test_init.py | 29 + tests/components/lektrico/test_sensor.py | 31 + 26 files changed, 1693 insertions(+) create mode 100644 homeassistant/components/lektrico/__init__.py create mode 100644 homeassistant/components/lektrico/config_flow.py create mode 100644 homeassistant/components/lektrico/const.py create mode 100644 homeassistant/components/lektrico/coordinator.py create mode 100644 homeassistant/components/lektrico/entity.py create mode 100644 homeassistant/components/lektrico/manifest.json create mode 100644 homeassistant/components/lektrico/sensor.py create mode 100644 homeassistant/components/lektrico/strings.json create mode 100644 tests/components/lektrico/__init__.py create mode 100644 tests/components/lektrico/conftest.py create mode 100644 tests/components/lektrico/fixtures/current_measures.json create mode 100644 tests/components/lektrico/fixtures/get_config.json create mode 100644 tests/components/lektrico/fixtures/get_info.json create mode 100644 tests/components/lektrico/snapshots/test_init.ambr create mode 100644 tests/components/lektrico/snapshots/test_sensor.ambr create mode 100644 tests/components/lektrico/test_config_flow.py create mode 100644 tests/components/lektrico/test_init.py create mode 100644 tests/components/lektrico/test_sensor.py diff --git a/.strict-typing b/.strict-typing index c8aa9878413..a65ccf3ec88 100644 --- a/.strict-typing +++ b/.strict-typing @@ -279,6 +279,7 @@ homeassistant.components.lawn_mower.* homeassistant.components.lcn.* homeassistant.components.ld2410_ble.* homeassistant.components.led_ble.* +homeassistant.components.lektrico.* homeassistant.components.lidarr.* homeassistant.components.lifx.* homeassistant.components.light.* diff --git a/CODEOWNERS b/CODEOWNERS index 990ed679d2b..97a1a1e49a1 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -799,6 +799,8 @@ build.json @home-assistant/supervisor /tests/components/leaone/ @bdraco /homeassistant/components/led_ble/ @bdraco /tests/components/led_ble/ @bdraco +/homeassistant/components/lektrico/ @lektrico +/tests/components/lektrico/ @lektrico /homeassistant/components/lg_netcast/ @Drafteed @splinter98 /tests/components/lg_netcast/ @Drafteed @splinter98 /homeassistant/components/lidarr/ @tkdrob diff --git a/homeassistant/components/lektrico/__init__.py b/homeassistant/components/lektrico/__init__.py new file mode 100644 index 00000000000..70dbecca77a --- /dev/null +++ b/homeassistant/components/lektrico/__init__.py @@ -0,0 +1,51 @@ +"""The Lektrico Charging Station integration.""" + +from __future__ import annotations + +from lektricowifi import Device + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ATTR_SERIAL_NUMBER, CONF_TYPE, Platform +from homeassistant.core import HomeAssistant + +from .coordinator import LektricoDeviceDataUpdateCoordinator + +# List the platforms that charger supports. +CHARGERS_PLATFORMS = [Platform.SENSOR] + +# List the platforms that load balancer device supports. +LB_DEVICES_PLATFORMS = [Platform.SENSOR] + +type LektricoConfigEntry = ConfigEntry[LektricoDeviceDataUpdateCoordinator] + + +async def async_setup_entry(hass: HomeAssistant, entry: LektricoConfigEntry) -> bool: + """Set up Lektrico Charging Station from a config entry.""" + coordinator = LektricoDeviceDataUpdateCoordinator( + hass, + f"{entry.data[CONF_TYPE]}_{entry.data[ATTR_SERIAL_NUMBER]}", + ) + + await coordinator.async_config_entry_first_refresh() + + entry.runtime_data = coordinator + + await hass.config_entries.async_forward_entry_setups(entry, _get_platforms(entry)) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload a config entry.""" + + return await hass.config_entries.async_unload_platforms( + entry, _get_platforms(entry) + ) + + +def _get_platforms(entry: ConfigEntry) -> list[Platform]: + """Return the platforms for this type of device.""" + _device_type: str = entry.data[CONF_TYPE] + if _device_type in (Device.TYPE_1P7K, Device.TYPE_3P22K): + return CHARGERS_PLATFORMS + return LB_DEVICES_PLATFORMS diff --git a/homeassistant/components/lektrico/config_flow.py b/homeassistant/components/lektrico/config_flow.py new file mode 100644 index 00000000000..7091856f4fd --- /dev/null +++ b/homeassistant/components/lektrico/config_flow.py @@ -0,0 +1,138 @@ +"""Config flow for Lektrico Charging Station.""" + +from __future__ import annotations + +from typing import Any + +from lektricowifi import Device, DeviceConnectionError +import voluptuous as vol + +from homeassistant.components.zeroconf import ZeroconfServiceInfo +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import ( + ATTR_HW_VERSION, + ATTR_SERIAL_NUMBER, + CONF_HOST, + CONF_TYPE, +) +from homeassistant.core import callback +from homeassistant.helpers.httpx_client import get_async_client + +from .const import DOMAIN + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_HOST): str, + } +) + + +class LektricoFlowHandler(ConfigFlow, domain=DOMAIN): + """Handle a Lektrico config flow.""" + + VERSION = 1 + + _host: str + _name: str + _serial_number: str + _board_revision: str + _device_type: str + + async def async_step_user( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: + """Handle a flow initiated by the user.""" + errors = None + + if user_input is not None: + self._host = user_input[CONF_HOST] + + # obtain serial number + try: + await self._get_lektrico_device_settings_and_treat_unique_id() + return self._async_create_entry() + except DeviceConnectionError: + errors = {CONF_HOST: "cannot_connect"} + + return self._async_show_setup_form(user_input=user_input, errors=errors) + + @callback + def _async_show_setup_form( + self, + user_input: dict[str, Any] | None = None, + errors: dict[str, str] | None = None, + ) -> ConfigFlowResult: + """Show the setup form to the user.""" + if user_input is None: + user_input = {} + + schema = self.add_suggested_values_to_schema(STEP_USER_DATA_SCHEMA, user_input) + + return self.async_show_form( + step_id="user", + data_schema=schema, + errors=errors or {}, + ) + + @callback + def _async_create_entry(self) -> ConfigFlowResult: + return self.async_create_entry( + title=self._name, + data={ + CONF_HOST: self._host, + ATTR_SERIAL_NUMBER: self._serial_number, + CONF_TYPE: self._device_type, + ATTR_HW_VERSION: self._board_revision, + }, + ) + + async def async_step_zeroconf( + self, discovery_info: ZeroconfServiceInfo + ) -> ConfigFlowResult: + """Handle zeroconf discovery.""" + self._host = discovery_info.host # 192.168.100.11 + + # read settings from the device + try: + await self._get_lektrico_device_settings_and_treat_unique_id() + except DeviceConnectionError: + return self.async_abort(reason="cannot_connect") + + self.context["title_placeholders"] = { + "serial_number": self._serial_number, + "name": self._name, + } + + return await self.async_step_confirm() + + async def _get_lektrico_device_settings_and_treat_unique_id(self) -> None: + """Get device's serial number from a Lektrico device.""" + device = Device( + _host=self._host, + asyncClient=get_async_client(self.hass), + ) + + settings = await device.device_config() + self._serial_number = str(settings["serial_number"]) + self._device_type = settings["type"] + self._board_revision = settings["board_revision"] + self._name = f"{settings["type"]}_{self._serial_number}" + + # Check if already configured + # Set unique id + await self.async_set_unique_id(self._serial_number, raise_on_progress=True) + # Abort if already configured, but update the last-known host + self._abort_if_unique_id_configured( + updates={CONF_HOST: self._host}, reload_on_update=True + ) + + async def async_step_confirm( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: + """Allow the user to confirm adding the device.""" + + if user_input is not None: + return self._async_create_entry() + + self._set_confirm_only() + return self.async_show_form(step_id="confirm") diff --git a/homeassistant/components/lektrico/const.py b/homeassistant/components/lektrico/const.py new file mode 100644 index 00000000000..d3fc52f61be --- /dev/null +++ b/homeassistant/components/lektrico/const.py @@ -0,0 +1,9 @@ +"""Constants for the Lektrico Charging Station integration.""" + +from logging import Logger, getLogger + +# Integration domain +DOMAIN = "lektrico" + +# Logger +LOGGER: Logger = getLogger(__package__) diff --git a/homeassistant/components/lektrico/coordinator.py b/homeassistant/components/lektrico/coordinator.py new file mode 100644 index 00000000000..7c72a00e2d3 --- /dev/null +++ b/homeassistant/components/lektrico/coordinator.py @@ -0,0 +1,52 @@ +"""Coordinator for the Lektrico Charging Station integration.""" + +from __future__ import annotations + +from datetime import timedelta +from typing import Any + +from lektricowifi import Device, DeviceConnectionError + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ( + ATTR_HW_VERSION, + ATTR_SERIAL_NUMBER, + CONF_HOST, + CONF_TYPE, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.httpx_client import get_async_client +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import LOGGER + +SCAN_INTERVAL = timedelta(seconds=10) + + +class LektricoDeviceDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): + """Data update coordinator for Lektrico device.""" + + config_entry: ConfigEntry + + def __init__(self, hass: HomeAssistant, device_name: str) -> None: + """Initialize a Lektrico Device.""" + super().__init__( + hass, + LOGGER, + name=device_name, + update_interval=SCAN_INTERVAL, + ) + self.device = Device( + self.config_entry.data[CONF_HOST], + asyncClient=get_async_client(hass), + ) + self.serial_number: str = self.config_entry.data[ATTR_SERIAL_NUMBER] + self.board_revision: str = self.config_entry.data[ATTR_HW_VERSION] + self.device_type: str = self.config_entry.data[CONF_TYPE] + + async def _async_update_data(self) -> dict[str, Any]: + """Async Update device state.""" + try: + return await self.device.device_info(self.device_type) + except DeviceConnectionError as lek_ex: + raise UpdateFailed(lek_ex) from lek_ex diff --git a/homeassistant/components/lektrico/entity.py b/homeassistant/components/lektrico/entity.py new file mode 100644 index 00000000000..1a5e08febe3 --- /dev/null +++ b/homeassistant/components/lektrico/entity.py @@ -0,0 +1,33 @@ +"""Entity classes for the Lektrico integration.""" + +from __future__ import annotations + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from . import LektricoDeviceDataUpdateCoordinator +from .const import DOMAIN + + +class LektricoEntity(CoordinatorEntity[LektricoDeviceDataUpdateCoordinator]): + """Define an Lektrico entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: LektricoDeviceDataUpdateCoordinator, + device_name: str, + ) -> None: + """Initialize.""" + super().__init__(coordinator) + + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, coordinator.serial_number)}, + model=coordinator.device_type.upper(), + name=device_name, + manufacturer="Lektrico", + sw_version=coordinator.data["fw_version"], + hw_version=coordinator.board_revision, + serial_number=coordinator.serial_number, + ) diff --git a/homeassistant/components/lektrico/manifest.json b/homeassistant/components/lektrico/manifest.json new file mode 100644 index 00000000000..5aef09f3845 --- /dev/null +++ b/homeassistant/components/lektrico/manifest.json @@ -0,0 +1,16 @@ +{ + "domain": "lektrico", + "name": "Lektrico Charging Station", + "codeowners": ["@lektrico"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/lektrico", + "integration_type": "device", + "iot_class": "local_polling", + "requirements": ["lektricowifi==0.0.41"], + "zeroconf": [ + { + "type": "_http._tcp.local.", + "name": "lektrico*" + } + ] +} diff --git a/homeassistant/components/lektrico/sensor.py b/homeassistant/components/lektrico/sensor.py new file mode 100644 index 00000000000..a8a929d974f --- /dev/null +++ b/homeassistant/components/lektrico/sensor.py @@ -0,0 +1,324 @@ +"""Support for Lektrico charging station sensors.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from typing import Any + +from lektricowifi import Device + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import ( + ATTR_SERIAL_NUMBER, + CONF_TYPE, + PERCENTAGE, + UnitOfElectricCurrent, + UnitOfElectricPotential, + UnitOfEnergy, + UnitOfPower, + UnitOfTemperature, + UnitOfTime, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import IntegrationError +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType + +from . import LektricoConfigEntry, LektricoDeviceDataUpdateCoordinator +from .entity import LektricoEntity + + +@dataclass(frozen=True, kw_only=True) +class LektricoSensorEntityDescription(SensorEntityDescription): + """A class that describes the Lektrico sensor entities.""" + + value_fn: Callable[[dict[str, Any]], StateType] + + +SENSORS_FOR_CHARGERS: tuple[LektricoSensorEntityDescription, ...] = ( + LektricoSensorEntityDescription( + key="state", + device_class=SensorDeviceClass.ENUM, + options=[ + "available", + "connected", + "need_auth", + "paused", + "charging", + "error", + "updating_firmware", + ], + translation_key="state", + value_fn=lambda data: str(data["charger_state"]), + ), + LektricoSensorEntityDescription( + key="charging_time", + translation_key="charging_time", + device_class=SensorDeviceClass.DURATION, + native_unit_of_measurement=UnitOfTime.SECONDS, + value_fn=lambda data: int(data["charging_time"]), + ), + LektricoSensorEntityDescription( + key="power", + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPower.WATT, + suggested_unit_of_measurement=UnitOfPower.KILO_WATT, + value_fn=lambda data: float(data["instant_power"]), + ), + LektricoSensorEntityDescription( + key="energy", + device_class=SensorDeviceClass.ENERGY, + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + value_fn=lambda data: float(data["session_energy"]) / 1000, + ), + LektricoSensorEntityDescription( + key="temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + value_fn=lambda data: float(data["temperature"]), + ), + LektricoSensorEntityDescription( + key="lifetime_energy", + translation_key="lifetime_energy", + state_class=SensorStateClass.TOTAL_INCREASING, + device_class=SensorDeviceClass.ENERGY, + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + value_fn=lambda data: int(data["total_charged_energy"]), + ), + LektricoSensorEntityDescription( + key="installation_current", + translation_key="installation_current", + device_class=SensorDeviceClass.CURRENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_fn=lambda data: int(data["install_current"]), + ), + LektricoSensorEntityDescription( + key="limit_reason", + translation_key="limit_reason", + device_class=SensorDeviceClass.ENUM, + options=[ + "no_limit", + "installation_current", + "user_limit", + "dynamic_limit", + "schedule", + "em_offline", + "em", + "ocpp", + ], + value_fn=lambda data: str(data["current_limit_reason"]), + ), +) + +SENSORS_FOR_LB_DEVICES: tuple[LektricoSensorEntityDescription, ...] = ( + LektricoSensorEntityDescription( + key="breaker_current", + translation_key="breaker_current", + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_fn=lambda data: int(data["breaker_curent"]), + ), +) + +SENSORS_FOR_1_PHASE: tuple[LektricoSensorEntityDescription, ...] = ( + LektricoSensorEntityDescription( + key="voltage", + device_class=SensorDeviceClass.VOLTAGE, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + value_fn=lambda data: float(data["voltage_l1"]), + ), + LektricoSensorEntityDescription( + key="current", + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_fn=lambda data: float(data["current_l1"]), + ), +) + +SENSORS_FOR_3_PHASE: tuple[LektricoSensorEntityDescription, ...] = ( + LektricoSensorEntityDescription( + key="voltage_l1", + translation_key="voltage_l1", + device_class=SensorDeviceClass.VOLTAGE, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + value_fn=lambda data: float(data["voltage_l1"]), + ), + LektricoSensorEntityDescription( + key="voltage_l2", + translation_key="voltage_l2", + device_class=SensorDeviceClass.VOLTAGE, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + value_fn=lambda data: float(data["voltage_l2"]), + ), + LektricoSensorEntityDescription( + key="voltage_l3", + translation_key="voltage_l3", + device_class=SensorDeviceClass.VOLTAGE, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + value_fn=lambda data: float(data["voltage_l3"]), + ), + LektricoSensorEntityDescription( + key="current_l1", + translation_key="current_l1", + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_fn=lambda data: float(data["current_l1"]), + ), + LektricoSensorEntityDescription( + key="current_l2", + translation_key="current_l2", + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_fn=lambda data: float(data["current_l2"]), + ), + LektricoSensorEntityDescription( + key="current_l3", + translation_key="current_l3", + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_fn=lambda data: float(data["current_l3"]), + ), +) + + +SENSORS_FOR_LB_1_PHASE: tuple[LektricoSensorEntityDescription, ...] = ( + LektricoSensorEntityDescription( + key="power", + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPower.WATT, + suggested_unit_of_measurement=UnitOfPower.KILO_WATT, + value_fn=lambda data: float(data["power_l1"]), + ), + LektricoSensorEntityDescription( + key="pf", + device_class=SensorDeviceClass.POWER_FACTOR, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + value_fn=lambda data: float(data["power_factor_l1"]) * 100, + ), +) + + +SENSORS_FOR_LB_3_PHASE: tuple[LektricoSensorEntityDescription, ...] = ( + LektricoSensorEntityDescription( + key="power_l1", + translation_key="power_l1", + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPower.WATT, + suggested_unit_of_measurement=UnitOfPower.KILO_WATT, + value_fn=lambda data: float(data["power_l1"]), + ), + LektricoSensorEntityDescription( + key="power_l2", + translation_key="power_l2", + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPower.WATT, + suggested_unit_of_measurement=UnitOfPower.KILO_WATT, + value_fn=lambda data: float(data["power_l2"]), + ), + LektricoSensorEntityDescription( + key="power_l3", + translation_key="power_l3", + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPower.WATT, + suggested_unit_of_measurement=UnitOfPower.KILO_WATT, + value_fn=lambda data: float(data["power_l3"]), + ), + LektricoSensorEntityDescription( + key="pf_l1", + translation_key="pf_l1", + device_class=SensorDeviceClass.POWER_FACTOR, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + value_fn=lambda data: float(data["power_factor_l1"]) * 100, + ), + LektricoSensorEntityDescription( + key="pf_l2", + translation_key="pf_l2", + device_class=SensorDeviceClass.POWER_FACTOR, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + value_fn=lambda data: float(data["power_factor_l2"]) * 100, + ), + LektricoSensorEntityDescription( + key="pf_l3", + translation_key="pf_l3", + device_class=SensorDeviceClass.POWER_FACTOR, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + value_fn=lambda data: float(data["power_factor_l3"]) * 100, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: LektricoConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Lektrico charger based on a config entry.""" + coordinator = entry.runtime_data + + sensors_to_be_used: tuple[LektricoSensorEntityDescription, ...] + if coordinator.device_type == Device.TYPE_1P7K: + sensors_to_be_used = SENSORS_FOR_CHARGERS + SENSORS_FOR_1_PHASE + elif coordinator.device_type == Device.TYPE_3P22K: + sensors_to_be_used = SENSORS_FOR_CHARGERS + SENSORS_FOR_3_PHASE + elif coordinator.device_type == Device.TYPE_EM: + sensors_to_be_used = ( + SENSORS_FOR_LB_DEVICES + SENSORS_FOR_1_PHASE + SENSORS_FOR_LB_1_PHASE + ) + elif coordinator.device_type == Device.TYPE_3EM: + sensors_to_be_used = ( + SENSORS_FOR_LB_DEVICES + SENSORS_FOR_3_PHASE + SENSORS_FOR_LB_3_PHASE + ) + else: + raise IntegrationError + + async_add_entities( + LektricoSensor( + description, + coordinator, + f"{entry.data[CONF_TYPE]}_{entry.data[ATTR_SERIAL_NUMBER]}", + ) + for description in sensors_to_be_used + ) + + +class LektricoSensor(LektricoEntity, SensorEntity): + """The entity class for Lektrico charging stations sensors.""" + + entity_description: LektricoSensorEntityDescription + + def __init__( + self, + description: LektricoSensorEntityDescription, + coordinator: LektricoDeviceDataUpdateCoordinator, + device_name: str, + ) -> None: + """Initialize Lektrico charger.""" + super().__init__(coordinator, device_name) + self.entity_description = description + self._attr_unique_id = f"{coordinator.serial_number}_{description.key}" + + @property + def native_value(self) -> StateType: + """Return the state of the sensor.""" + return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/lektrico/strings.json b/homeassistant/components/lektrico/strings.json new file mode 100644 index 00000000000..767987e7e64 --- /dev/null +++ b/homeassistant/components/lektrico/strings.json @@ -0,0 +1,101 @@ +{ + "config": { + "step": { + "user": { + "description": "Set required parameters to connect to your device", + "data": { + "host": "[%key:common::config_flow::data::host%]", + "device_name": "[%key:common::config_flow::data::name%]" + } + }, + "zeroconf_confirm": { + "description": "Do you want to add the Lektrico Charger with serial number `{serial_number}` to Home Assistant?", + "title": "Discovered Lektrico Charger device" + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" + } + }, + "entity": { + "sensor": { + "state": { + "name": "State", + "state": { + "available": "Available", + "connected": "Connected", + "need_auth": "Waiting for authentication", + "paused": "Paused", + "charging": "Charging", + "error": "Error", + "updating_firmware": "Updating firmware" + } + }, + "charging_time": { + "name": "Charging time" + }, + "lifetime_energy": { + "name": "Lifetime energy" + }, + "installation_current": { + "name": "Installation current" + }, + "limit_reason": { + "name": "Limit reason", + "state": { + "no_limit": "No limit", + "installation_current": "Installation current", + "user_limit": "User limit", + "dynamic_limit": "Dynamic limit", + "schedule": "Schedule", + "em_offline": "EM offline", + "em": "EM", + "ocpp": "OCPP" + } + }, + "breaker_current": { + "name": "Breaker current" + }, + "voltage_l1": { + "name": "Voltage L1" + }, + "voltage_l2": { + "name": "Voltage L2" + }, + "voltage_l3": { + "name": "Voltage L3" + }, + "current_l1": { + "name": "Current L1" + }, + "current_l2": { + "name": "Current L2" + }, + "current_l3": { + "name": "Current L3" + }, + "power_l1": { + "name": "Power L1" + }, + "power_l2": { + "name": "Power L2" + }, + "power_l3": { + "name": "Power L3" + }, + "pf_l1": { + "name": "Power factor L1" + }, + "pf_l2": { + "name": "Power factor L2" + }, + "pf_l3": { + "name": "Power factor L3" + } + } + } +} diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index ee6658a2515..0ca3335725f 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -315,6 +315,7 @@ FLOWS = { "ld2410_ble", "leaone", "led_ble", + "lektrico", "lg_netcast", "lg_soundbar", "lidarr", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index bb81b6a5b04..2e9199a3b0a 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -3211,6 +3211,12 @@ "integration_type": "virtual", "supported_by": "netatmo" }, + "lektrico": { + "name": "Lektrico Charging Station", + "integration_type": "device", + "config_flow": true, + "iot_class": "local_polling" + }, "leviton": { "name": "Leviton", "iot_standards": [ diff --git a/homeassistant/generated/zeroconf.py b/homeassistant/generated/zeroconf.py index 3d5b0b4cfa1..36b0da4a9f4 100644 --- a/homeassistant/generated/zeroconf.py +++ b/homeassistant/generated/zeroconf.py @@ -527,6 +527,10 @@ ZEROCONF = { "domain": "bosch_shc", "name": "bosch shc*", }, + { + "domain": "lektrico", + "name": "lektrico*", + }, { "domain": "loqed", "name": "loqed*", diff --git a/mypy.ini b/mypy.ini index c7a31d7354c..102ae5c8aa9 100644 --- a/mypy.ini +++ b/mypy.ini @@ -2546,6 +2546,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.lektrico.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.lidarr.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/requirements_all.txt b/requirements_all.txt index 9fd599fba93..42962c759aa 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1254,6 +1254,9 @@ leaone-ble==0.1.0 # homeassistant.components.led_ble led-ble==1.0.2 +# homeassistant.components.lektrico +lektricowifi==0.0.41 + # homeassistant.components.foscam libpyfoscam==1.2.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 0acc2a9f916..6ba5ecbea6c 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1047,6 +1047,9 @@ leaone-ble==0.1.0 # homeassistant.components.led_ble led-ble==1.0.2 +# homeassistant.components.lektrico +lektricowifi==0.0.41 + # homeassistant.components.foscam libpyfoscam==1.2.2 diff --git a/tests/components/lektrico/__init__.py b/tests/components/lektrico/__init__.py new file mode 100644 index 00000000000..449da2b35c4 --- /dev/null +++ b/tests/components/lektrico/__init__.py @@ -0,0 +1,13 @@ +"""Tests for Lektrico integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/lektrico/conftest.py b/tests/components/lektrico/conftest.py new file mode 100644 index 00000000000..fd840b0c290 --- /dev/null +++ b/tests/components/lektrico/conftest.py @@ -0,0 +1,92 @@ +"""Fixtures for Lektrico Charging Station integration tests.""" + +from collections.abc import Generator +from ipaddress import ip_address +import json +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.lektrico.const import DOMAIN +from homeassistant.components.zeroconf import ZeroconfServiceInfo +from homeassistant.const import ( + ATTR_HW_VERSION, + ATTR_SERIAL_NUMBER, + CONF_HOST, + CONF_TYPE, +) + +from tests.common import MockConfigEntry, load_fixture + +MOCKED_DEVICE_IP_ADDRESS = "192.168.100.10" +MOCKED_DEVICE_SERIAL_NUMBER = "500006" +MOCKED_DEVICE_TYPE = "1p7k" +MOCKED_DEVICE_BOARD_REV = "B" + +MOCKED_DEVICE_ZC_NAME = "Lektrico-1p7k-500006._http._tcp" +MOCKED_DEVICE_ZC_TYPE = "_http._tcp.local." +MOCKED_DEVICE_ZEROCONF_DATA = ZeroconfServiceInfo( + ip_address=ip_address(MOCKED_DEVICE_IP_ADDRESS), + ip_addresses=[ip_address(MOCKED_DEVICE_IP_ADDRESS)], + hostname=f"{MOCKED_DEVICE_ZC_NAME.lower()}.local.", + port=80, + type=MOCKED_DEVICE_ZC_TYPE, + name=MOCKED_DEVICE_ZC_NAME, + properties={ + "id": "1p7k_500006", + "fw_id": "20230109-124642/v1.22-36-g56a3edd-develop-dirty", + }, +) + + +@pytest.fixture +def mock_device() -> Generator[AsyncMock]: + """Mock a Lektrico device.""" + with ( + patch( + "homeassistant.components.lektrico.Device", + autospec=True, + ) as mock_device, + patch( + "homeassistant.components.lektrico.config_flow.Device", + new=mock_device, + ), + patch( + "homeassistant.components.lektrico.coordinator.Device", + new=mock_device, + ), + ): + device = mock_device.return_value + + device.device_config.return_value = json.loads( + load_fixture("get_config.json", DOMAIN) + ) + device.device_info.return_value = json.loads( + load_fixture("get_info.json", DOMAIN) + ) + + yield device + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Mock setup entry.""" + with patch( + "homeassistant.components.lektrico.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + data={ + CONF_HOST: MOCKED_DEVICE_IP_ADDRESS, + CONF_TYPE: MOCKED_DEVICE_TYPE, + ATTR_SERIAL_NUMBER: MOCKED_DEVICE_SERIAL_NUMBER, + ATTR_HW_VERSION: "B", + }, + unique_id=MOCKED_DEVICE_SERIAL_NUMBER, + ) diff --git a/tests/components/lektrico/fixtures/current_measures.json b/tests/components/lektrico/fixtures/current_measures.json new file mode 100644 index 00000000000..1175b49f63c --- /dev/null +++ b/tests/components/lektrico/fixtures/current_measures.json @@ -0,0 +1,16 @@ +{ + "charger_state": "Available", + "charging_time": 0, + "instant_power": 0, + "session_energy": 0.0, + "temperature": 34.5, + "total_charged_energy": 0, + "install_current": 6, + "current_limit_reason": "Installation current", + "voltage_l1": 220.0, + "current_l1": 0.0, + "type": "1p7k", + "serial_number": "500006", + "board_revision": "B", + "fw_version": "1.44" +} diff --git a/tests/components/lektrico/fixtures/get_config.json b/tests/components/lektrico/fixtures/get_config.json new file mode 100644 index 00000000000..175475004ec --- /dev/null +++ b/tests/components/lektrico/fixtures/get_config.json @@ -0,0 +1,5 @@ +{ + "type": "1p7k", + "serial_number": "500006", + "board_revision": "B" +} diff --git a/tests/components/lektrico/fixtures/get_info.json b/tests/components/lektrico/fixtures/get_info.json new file mode 100644 index 00000000000..a8f2a56b8d8 --- /dev/null +++ b/tests/components/lektrico/fixtures/get_info.json @@ -0,0 +1,13 @@ +{ + "charger_state": "available", + "charging_time": 0, + "instant_power": 0, + "session_energy": 0.0, + "temperature": 34.5, + "total_charged_energy": 0, + "install_current": 6, + "current_limit_reason": "installation_current", + "voltage_l1": 220.0, + "current_l1": 0.0, + "fw_version": "1.44" +} diff --git a/tests/components/lektrico/snapshots/test_init.ambr b/tests/components/lektrico/snapshots/test_init.ambr new file mode 100644 index 00000000000..63739e1c9d8 --- /dev/null +++ b/tests/components/lektrico/snapshots/test_init.ambr @@ -0,0 +1,33 @@ +# serializer version: 1 +# name: test_device_info + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': None, + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': 'B', + 'id': , + 'identifiers': set({ + tuple( + 'lektrico', + '500006', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Lektrico', + 'model': '1P7K', + 'model_id': None, + 'name': '1p7k_500006', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': '500006', + 'suggested_area': None, + 'sw_version': '1.44', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/lektrico/snapshots/test_sensor.ambr b/tests/components/lektrico/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..7df5df70218 --- /dev/null +++ b/tests/components/lektrico/snapshots/test_sensor.ambr @@ -0,0 +1,534 @@ +# serializer version: 1 +# name: test_all_entities[sensor.1p7k_500006_charging_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.1p7k_500006_charging_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Charging time', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'charging_time', + 'unique_id': '500006_charging_time', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_charging_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': '1p7k_500006 Charging time', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.1p7k_500006_charging_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.1p7k_500006_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Current', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '500006_current', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': '1p7k_500006 Current', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.1p7k_500006_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.1p7k_500006_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '500006_energy', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': '1p7k_500006 Energy', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.1p7k_500006_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_installation_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.1p7k_500006_installation_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Installation current', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'installation_current', + 'unique_id': '500006_installation_current', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_installation_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': '1p7k_500006 Installation current', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.1p7k_500006_installation_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6', + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_lifetime_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.1p7k_500006_lifetime_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Lifetime energy', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_energy', + 'unique_id': '500006_lifetime_energy', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_lifetime_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': '1p7k_500006 Lifetime energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.1p7k_500006_lifetime_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_limit_reason-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_limit', + 'installation_current', + 'user_limit', + 'dynamic_limit', + 'schedule', + 'em_offline', + 'em', + 'ocpp', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.1p7k_500006_limit_reason', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Limit reason', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'limit_reason', + 'unique_id': '500006_limit_reason', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_limit_reason-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': '1p7k_500006 Limit reason', + 'options': list([ + 'no_limit', + 'installation_current', + 'user_limit', + 'dynamic_limit', + 'schedule', + 'em_offline', + 'em', + 'ocpp', + ]), + }), + 'context': , + 'entity_id': 'sensor.1p7k_500006_limit_reason', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'installation_current', + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.1p7k_500006_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '500006_power', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': '1p7k_500006 Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.1p7k_500006_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0000', + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'available', + 'connected', + 'need_auth', + 'paused', + 'charging', + 'error', + 'updating_firmware', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.1p7k_500006_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'State', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'state', + 'unique_id': '500006_state', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': '1p7k_500006 State', + 'options': list([ + 'available', + 'connected', + 'need_auth', + 'paused', + 'charging', + 'error', + 'updating_firmware', + ]), + }), + 'context': , + 'entity_id': 'sensor.1p7k_500006_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'available', + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.1p7k_500006_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '500006_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': '1p7k_500006 Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.1p7k_500006_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '34.5', + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_voltage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.1p7k_500006_voltage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Voltage', + 'platform': 'lektrico', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '500006_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.1p7k_500006_voltage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': '1p7k_500006 Voltage', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.1p7k_500006_voltage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '220.0', + }) +# --- diff --git a/tests/components/lektrico/test_config_flow.py b/tests/components/lektrico/test_config_flow.py new file mode 100644 index 00000000000..15ab5f7cdda --- /dev/null +++ b/tests/components/lektrico/test_config_flow.py @@ -0,0 +1,173 @@ +"""Tests for the Lektrico Charging Station config flow.""" + +import dataclasses +from ipaddress import ip_address + +from lektricowifi import DeviceConnectionError + +from homeassistant.components.lektrico.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.const import ( + ATTR_HW_VERSION, + ATTR_SERIAL_NUMBER, + CONF_HOST, + CONF_TYPE, +) +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .conftest import ( + MOCKED_DEVICE_BOARD_REV, + MOCKED_DEVICE_IP_ADDRESS, + MOCKED_DEVICE_SERIAL_NUMBER, + MOCKED_DEVICE_TYPE, + MOCKED_DEVICE_ZEROCONF_DATA, +) + +from tests.common import MockConfigEntry + + +async def test_user_setup(hass: HomeAssistant, mock_device, mock_setup_entry) -> None: + """Test manually setting up.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == SOURCE_USER + assert "flow_id" in result + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: MOCKED_DEVICE_IP_ADDRESS, + }, + ) + + assert result.get("type") is FlowResultType.CREATE_ENTRY + assert result.get("title") == f"{MOCKED_DEVICE_TYPE}_{MOCKED_DEVICE_SERIAL_NUMBER}" + assert result.get("data") == { + CONF_HOST: MOCKED_DEVICE_IP_ADDRESS, + ATTR_SERIAL_NUMBER: MOCKED_DEVICE_SERIAL_NUMBER, + CONF_TYPE: MOCKED_DEVICE_TYPE, + ATTR_HW_VERSION: MOCKED_DEVICE_BOARD_REV, + } + assert "result" in result + assert len(mock_setup_entry.mock_calls) == 1 + assert result.get("result").unique_id == MOCKED_DEVICE_SERIAL_NUMBER + + +async def test_user_setup_already_exists( + hass: HomeAssistant, mock_device, mock_config_entry: MockConfigEntry +) -> None: + """Test manually setting up when the device already exists.""" + mock_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert not result["errors"] + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: MOCKED_DEVICE_IP_ADDRESS, + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_user_setup_device_offline(hass: HomeAssistant, mock_device) -> None: + """Test manually setting up when device is offline.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert not result["errors"] + + mock_device.device_config.side_effect = DeviceConnectionError + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: MOCKED_DEVICE_IP_ADDRESS, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {CONF_HOST: "cannot_connect"} + assert result["step_id"] == "user" + + mock_device.device_config.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: MOCKED_DEVICE_IP_ADDRESS, + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + +async def test_discovered_zeroconf( + hass: HomeAssistant, mock_device, mock_setup_entry +) -> None: + """Test we can setup when discovered from zeroconf.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=MOCKED_DEVICE_ZEROCONF_DATA, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] is None + assert result.get("step_id") == "confirm" + + result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {}) + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["data"] == { + CONF_HOST: MOCKED_DEVICE_IP_ADDRESS, + ATTR_SERIAL_NUMBER: MOCKED_DEVICE_SERIAL_NUMBER, + CONF_TYPE: MOCKED_DEVICE_TYPE, + ATTR_HW_VERSION: MOCKED_DEVICE_BOARD_REV, + } + assert result2["title"] == f"{MOCKED_DEVICE_TYPE}_{MOCKED_DEVICE_SERIAL_NUMBER}" + + +async def test_zeroconf_setup_already_exists( + hass: HomeAssistant, mock_device, mock_config_entry: MockConfigEntry +) -> None: + """Test we abort zeroconf flow if device already configured.""" + mock_config_entry.add_to_hass(hass) + zc_data_new_ip = dataclasses.replace(MOCKED_DEVICE_ZEROCONF_DATA) + zc_data_new_ip.ip_address = ip_address(MOCKED_DEVICE_IP_ADDRESS) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=zc_data_new_ip, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_discovered_zeroconf_device_connection_error( + hass: HomeAssistant, mock_device +) -> None: + """Test we can setup when discovered from zeroconf but device went offline.""" + + mock_device.device_config.side_effect = DeviceConnectionError + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=MOCKED_DEVICE_ZEROCONF_DATA, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" diff --git a/tests/components/lektrico/test_init.py b/tests/components/lektrico/test_init.py new file mode 100644 index 00000000000..93068ffe531 --- /dev/null +++ b/tests/components/lektrico/test_init.py @@ -0,0 +1,29 @@ +"""Tests for the Lektrico integration.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.components.lektrico.const import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_device_info( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_device: AsyncMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test device registry integration.""" + await setup_integration(hass, mock_config_entry) + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, mock_config_entry.unique_id)} + ) + assert device_entry is not None + assert device_entry == snapshot diff --git a/tests/components/lektrico/test_sensor.py b/tests/components/lektrico/test_sensor.py new file mode 100644 index 00000000000..756f149d3ad --- /dev/null +++ b/tests/components/lektrico/test_sensor.py @@ -0,0 +1,31 @@ +"""Tests for the Lektrico sensor platform.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_device: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch( + "homeassistant.components.lektrico.CHARGERS_PLATFORMS", [Platform.SENSOR] + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) From 7f405686d13b8ac995a8f8676e6ecfe0ae7ae7af Mon Sep 17 00:00:00 2001 From: shapournemati-iotty <130070037+shapournemati-iotty@users.noreply.github.com> Date: Fri, 30 Aug 2024 13:30:56 +0200 Subject: [PATCH 1248/1635] Add shapournemati to iotty codeowners (#123649) * add shapournemati to codeowners for improved support * update codeowners with hassfest script * update codeowners with hassfest script --- CODEOWNERS | 4 ++-- homeassistant/components/iotty/manifest.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CODEOWNERS b/CODEOWNERS index 97a1a1e49a1..c31056089de 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -709,8 +709,8 @@ build.json @home-assistant/supervisor /tests/components/ios/ @robbiet480 /homeassistant/components/iotawatt/ @gtdiehl @jyavenard /tests/components/iotawatt/ @gtdiehl @jyavenard -/homeassistant/components/iotty/ @pburgio -/tests/components/iotty/ @pburgio +/homeassistant/components/iotty/ @pburgio @shapournemati-iotty +/tests/components/iotty/ @pburgio @shapournemati-iotty /homeassistant/components/iperf3/ @rohankapoorcom /homeassistant/components/ipma/ @dgomes /tests/components/ipma/ @dgomes diff --git a/homeassistant/components/iotty/manifest.json b/homeassistant/components/iotty/manifest.json index 87aa49799b2..66baddc6b47 100644 --- a/homeassistant/components/iotty/manifest.json +++ b/homeassistant/components/iotty/manifest.json @@ -1,7 +1,7 @@ { "domain": "iotty", "name": "iotty", - "codeowners": ["@pburgio"], + "codeowners": ["@pburgio", "@shapournemati-iotty"], "config_flow": true, "dependencies": ["application_credentials"], "documentation": "https://www.home-assistant.io/integrations/iotty", From 32babd39589bbfba8baa2df872c418e1e7f2427c Mon Sep 17 00:00:00 2001 From: Jeef Date: Fri, 30 Aug 2024 05:32:07 -0600 Subject: [PATCH 1249/1635] Clean up Weatherflow Cloud (#124643) cleanup --- homeassistant/components/weatherflow_cloud/sensor.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/homeassistant/components/weatherflow_cloud/sensor.py b/homeassistant/components/weatherflow_cloud/sensor.py index 1c7fa5fb377..aeab955878f 100644 --- a/homeassistant/components/weatherflow_cloud/sensor.py +++ b/homeassistant/components/weatherflow_cloud/sensor.py @@ -180,11 +180,9 @@ async def async_setup_entry( entry.entry_id ] - stations = coordinator.data.keys() - async_add_entities( WeatherFlowCloudSensor(coordinator, sensor_description, station_id) - for station_id in stations + for station_id in coordinator.data for sensor_description in WF_SENSORS ) From c9335598db008f551ad913697fd9306f5883fc01 Mon Sep 17 00:00:00 2001 From: tdfountain <174762217+tdfountain@users.noreply.github.com> Date: Fri, 30 Aug 2024 05:32:32 -0700 Subject: [PATCH 1250/1635] Alphabetize keys list for nut sensor icons (#124188) Alphabetize keys list for sensor icons --- homeassistant/components/nut/icons.json | 126 ++++++++++++------------ 1 file changed, 63 insertions(+), 63 deletions(-) diff --git a/homeassistant/components/nut/icons.json b/homeassistant/components/nut/icons.json index a4125d8633f..e0f78d6400b 100644 --- a/homeassistant/components/nut/icons.json +++ b/homeassistant/components/nut/icons.json @@ -1,59 +1,11 @@ { "entity": { "sensor": { - "ups_status_display": { + "battery_alarm_threshold": { "default": "mdi:information-outline" }, - "ups_status": { - "default": "mdi:information-outline" - }, - "ups_alarm": { - "default": "mdi:alarm" - }, - "ups_load": { - "default": "mdi:gauge" - }, - "ups_load_high": { - "default": "mdi:gauge" - }, - "ups_id": { - "default": "mdi:information-outline" - }, - "ups_test_result": { - "default": "mdi:information-outline" - }, - "ups_test_date": { - "default": "mdi:calendar" - }, - "ups_display_language": { - "default": "mdi:information-outline" - }, - "ups_contacts": { - "default": "mdi:information-outline" - }, - "ups_efficiency": { - "default": "mdi:gauge" - }, - "ups_beeper_status": { - "default": "mdi:information-outline" - }, - "ups_type": { - "default": "mdi:information-outline" - }, - "ups_watchdog_status": { - "default": "mdi:information-outline" - }, - "ups_start_auto": { - "default": "mdi:information-outline" - }, - "ups_start_battery": { - "default": "mdi:information-outline" - }, - "ups_start_reboot": { - "default": "mdi:information-outline" - }, - "ups_shutdown": { - "default": "mdi:information-outline" + "battery_capacity": { + "default": "mdi:flash" }, "battery_charge_low": { "default": "mdi:gauge" @@ -67,12 +19,6 @@ "battery_charger_status": { "default": "mdi:information-outline" }, - "battery_capacity": { - "default": "mdi:flash" - }, - "battery_alarm_threshold": { - "default": "mdi:information-outline" - }, "battery_date": { "default": "mdi:calendar" }, @@ -88,19 +34,19 @@ "battery_type": { "default": "mdi:information-outline" }, - "input_sensitivity": { - "default": "mdi:information-outline" - }, - "input_transfer_reason": { + "input_bypass_phases": { "default": "mdi:information-outline" }, "input_frequency_status": { "default": "mdi:information-outline" }, - "input_bypass_phases": { + "input_phases": { "default": "mdi:information-outline" }, - "input_phases": { + "input_sensitivity": { + "default": "mdi:information-outline" + }, + "input_transfer_reason": { "default": "mdi:information-outline" }, "output_l1_power_percent": { @@ -114,6 +60,60 @@ }, "output_phases": { "default": "mdi:information-outline" + }, + "ups_alarm": { + "default": "mdi:alarm" + }, + "ups_beeper_status": { + "default": "mdi:information-outline" + }, + "ups_contacts": { + "default": "mdi:information-outline" + }, + "ups_display_language": { + "default": "mdi:information-outline" + }, + "ups_efficiency": { + "default": "mdi:gauge" + }, + "ups_id": { + "default": "mdi:information-outline" + }, + "ups_load": { + "default": "mdi:gauge" + }, + "ups_load_high": { + "default": "mdi:gauge" + }, + "ups_shutdown": { + "default": "mdi:information-outline" + }, + "ups_start_auto": { + "default": "mdi:information-outline" + }, + "ups_start_battery": { + "default": "mdi:information-outline" + }, + "ups_start_reboot": { + "default": "mdi:information-outline" + }, + "ups_status": { + "default": "mdi:information-outline" + }, + "ups_status_display": { + "default": "mdi:information-outline" + }, + "ups_test_date": { + "default": "mdi:calendar" + }, + "ups_test_result": { + "default": "mdi:information-outline" + }, + "ups_type": { + "default": "mdi:information-outline" + }, + "ups_watchdog_status": { + "default": "mdi:information-outline" } } } From 928ff7c78c657ad690a4b9a76cd9147acf45802d Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Fri, 30 Aug 2024 14:32:57 +0200 Subject: [PATCH 1251/1635] Add 100% coverage of Reolink sensor platform (#124472) * Add 100% sensor test coverage * use DOMAIN instead of const.DOMAIN * snake_case * better split tests * styling * Use entity_registry_enabled_by_default fixture --- tests/components/reolink/test_sensor.py | 62 +++++++++++++++++++++++++ 1 file changed, 62 insertions(+) create mode 100644 tests/components/reolink/test_sensor.py diff --git a/tests/components/reolink/test_sensor.py b/tests/components/reolink/test_sensor.py new file mode 100644 index 00000000000..df164634355 --- /dev/null +++ b/tests/components/reolink/test_sensor.py @@ -0,0 +1,62 @@ +"""Test the Reolink sensor platform.""" + +from unittest.mock import MagicMock, patch + +import pytest + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant + +from .conftest import TEST_NVR_NAME + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensors( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test sensor entities.""" + reolink_connect.ptz_pan_position.return_value = 1200 + reolink_connect.wifi_connection = True + reolink_connect.wifi_signal = 3 + reolink_connect.hdd_list = [0] + reolink_connect.hdd_storage.return_value = 95 + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SENSOR]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.SENSOR}.{TEST_NVR_NAME}_ptz_pan_position" + assert hass.states.get(entity_id).state == "1200" + + entity_id = f"{Platform.SENSOR}.{TEST_NVR_NAME}_wi_fi_signal" + assert hass.states.get(entity_id).state == "3" + + entity_id = f"{Platform.SENSOR}.{TEST_NVR_NAME}_sd_0_storage" + assert hass.states.get(entity_id).state == "95" + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_hdd_sensors( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test hdd sensor entity.""" + reolink_connect.hdd_list = [0] + reolink_connect.hdd_type.return_value = "HDD" + reolink_connect.hdd_storage.return_value = 85 + reolink_connect.hdd_available.return_value = False + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SENSOR]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.SENSOR}.{TEST_NVR_NAME}_hdd_0_storage" + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE From b6dc410464048b65f5ee1f52d3e12bac71f58163 Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Fri, 30 Aug 2024 14:34:17 +0200 Subject: [PATCH 1252/1635] Add 100% coverage of Reolink light platform (#124382) * Add 100% light test coverage * review comments * fix * use STATE_ON * split tests --- homeassistant/components/reolink/light.py | 3 +- tests/components/reolink/test_light.py | 146 ++++++++++++++++++++++ 2 files changed, 147 insertions(+), 2 deletions(-) create mode 100644 tests/components/reolink/test_light.py diff --git a/homeassistant/components/reolink/light.py b/homeassistant/components/reolink/light.py index 877bf80080b..fe34cccc0c4 100644 --- a/homeassistant/components/reolink/light.py +++ b/homeassistant/components/reolink/light.py @@ -108,8 +108,7 @@ class ReolinkLightEntity(ReolinkChannelCoordinatorEntity, LightEntity): @property def brightness(self) -> int | None: """Return the brightness of this light between 0.255.""" - if self.entity_description.get_brightness_fn is None: - return None + assert self.entity_description.get_brightness_fn is not None bright_pct = self.entity_description.get_brightness_fn( self._host.api, self._channel diff --git a/tests/components/reolink/test_light.py b/tests/components/reolink/test_light.py new file mode 100644 index 00000000000..c495a0ff25e --- /dev/null +++ b/tests/components/reolink/test_light.py @@ -0,0 +1,146 @@ +"""Test the Reolink light platform.""" + +from unittest.mock import MagicMock, call, patch + +import pytest +from reolink_aio.exceptions import InvalidParameterError, ReolinkError + +from homeassistant.components.light import ATTR_BRIGHTNESS, DOMAIN as LIGHT_DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_ON, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError + +from .conftest import TEST_NVR_NAME + +from tests.common import MockConfigEntry + + +async def test_light_state( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test light entity state with floodlight.""" + reolink_connect.whiteled_state.return_value = True + reolink_connect.whiteled_brightness.return_value = 100 + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.LIGHT]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.LIGHT}.{TEST_NVR_NAME}_floodlight" + + state = hass.states.get(entity_id) + assert state.state == STATE_ON + assert state.attributes["brightness"] == 255 + + +async def test_light_brightness_none( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test light entity with floodlight and brightness returning None.""" + reolink_connect.whiteled_state.return_value = True + reolink_connect.whiteled_brightness.return_value = None + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.LIGHT]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.LIGHT}.{TEST_NVR_NAME}_floodlight" + + state = hass.states.get(entity_id) + assert state.state == STATE_ON + assert state.attributes["brightness"] is None + + +async def test_light_turn_off( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test light turn off service.""" + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.LIGHT]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.LIGHT}.{TEST_NVR_NAME}_floodlight" + + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + reolink_connect.set_whiteled.assert_called_with(0, state=False) + + reolink_connect.set_whiteled.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + +async def test_light_turn_on( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test light turn on service.""" + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.LIGHT]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.LIGHT}.{TEST_NVR_NAME}_floodlight" + + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 51}, + blocking=True, + ) + reolink_connect.set_whiteled.assert_has_calls( + [call(0, brightness=20), call(0, state=True)] + ) + + reolink_connect.set_whiteled.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + reolink_connect.set_whiteled.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 51}, + blocking=True, + ) + + reolink_connect.set_whiteled.side_effect = InvalidParameterError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 51}, + blocking=True, + ) From 6589216ed35dc4f0b3f38196981bf2030faba2f4 Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Fri, 30 Aug 2024 14:34:49 +0200 Subject: [PATCH 1253/1635] Add 100% coverage of Reolink camera platform (#124381) * Add 100% camera test coverage * review comments * use DOMAIN instead of const.DOMAIN * use entity_registry_enabled_by_default fixture * fixes --- tests/components/reolink/conftest.py | 1 + .../components/reolink/test_binary_sensor.py | 4 +- tests/components/reolink/test_camera.py | 63 +++++++++++++++++++ 3 files changed, 66 insertions(+), 2 deletions(-) create mode 100644 tests/components/reolink/test_camera.py diff --git a/tests/components/reolink/conftest.py b/tests/components/reolink/conftest.py index ddea36cb292..ed6ff4c4ec7 100644 --- a/tests/components/reolink/conftest.py +++ b/tests/components/reolink/conftest.py @@ -38,6 +38,7 @@ TEST_USE_HTTPS = True TEST_HOST_MODEL = "RLN8-410" TEST_ITEM_NUMBER = "P000" TEST_CAM_MODEL = "RLC-123" +TEST_DUO_MODEL = "Reolink Duo PoE" @pytest.fixture diff --git a/tests/components/reolink/test_binary_sensor.py b/tests/components/reolink/test_binary_sensor.py index e02742afe1d..0872c3ab3b2 100644 --- a/tests/components/reolink/test_binary_sensor.py +++ b/tests/components/reolink/test_binary_sensor.py @@ -10,7 +10,7 @@ from homeassistant.const import STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .conftest import TEST_NVR_NAME, TEST_UID +from .conftest import TEST_DUO_MODEL, TEST_NVR_NAME, TEST_UID from tests.common import MockConfigEntry, async_fire_time_changed from tests.typing import ClientSessionGenerator @@ -25,7 +25,7 @@ async def test_motion_sensor( entity_registry: er.EntityRegistry, ) -> None: """Test binary sensor entity with motion sensor.""" - reolink_connect.model = "Reolink Duo PoE" + reolink_connect.model = TEST_DUO_MODEL reolink_connect.motion_detected.return_value = True with patch("homeassistant.components.reolink.PLATFORMS", [Platform.BINARY_SENSOR]): assert await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/reolink/test_camera.py b/tests/components/reolink/test_camera.py new file mode 100644 index 00000000000..96bb5a099c9 --- /dev/null +++ b/tests/components/reolink/test_camera.py @@ -0,0 +1,63 @@ +"""Test the Reolink camera platform.""" + +from unittest.mock import MagicMock, patch + +import pytest +from reolink_aio.exceptions import ReolinkError + +from homeassistant.components.camera import async_get_image, async_get_stream_source +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import STATE_IDLE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError + +from .conftest import TEST_DUO_MODEL, TEST_NVR_NAME + +from tests.common import MockConfigEntry +from tests.typing import ClientSessionGenerator + + +async def test_camera( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test camera entity with fluent.""" + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.CAMERA}.{TEST_NVR_NAME}_fluent" + assert hass.states.get(entity_id).state == STATE_IDLE + + # check getting a image from the camera + reolink_connect.get_snapshot.return_value = b"image" + assert (await async_get_image(hass, entity_id)).content == b"image" + + reolink_connect.get_snapshot.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await async_get_image(hass, entity_id) + + # check getting the stream source + assert await async_get_stream_source(hass, entity_id) is not None + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_camera_no_stream_source( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test camera entity with no stream source.""" + reolink_connect.model = TEST_DUO_MODEL + reolink_connect.get_stream_source.return_value = None + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.CAMERA}.{TEST_NVR_NAME}_snapshots_fluent_lens_0" + assert hass.states.get(entity_id).state == STATE_IDLE From a5bacf5652ce9b9ae0cdcf54e32795270cbaf8f0 Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Fri, 30 Aug 2024 14:39:12 +0200 Subject: [PATCH 1254/1635] Add 100% coverage of Reolink switch platform (#124482) * Add 100% switch test coverage * use DOMAIN instead of const.DOMAIN * Split tests and use parametrize * Revert "Split tests and use parametrize" This reverts commit 50d2184ce67b1ac95bd1517cb4963707f9c7954a. * fixes --- tests/components/reolink/conftest.py | 1 + tests/components/reolink/test_switch.py | 228 ++++++++++++++++++++++-- 2 files changed, 215 insertions(+), 14 deletions(-) diff --git a/tests/components/reolink/conftest.py b/tests/components/reolink/conftest.py index ed6ff4c4ec7..be87aac9291 100644 --- a/tests/components/reolink/conftest.py +++ b/tests/components/reolink/conftest.py @@ -33,6 +33,7 @@ TEST_UID = "ABC1234567D89EFG" TEST_UID_CAM = "DEF7654321D89GHT" TEST_PORT = 1234 TEST_NVR_NAME = "test_reolink_name" +TEST_CAM_NAME = "test_reolink_cam" TEST_NVR_NAME2 = "test2_reolink_name" TEST_USE_HTTPS = True TEST_HOST_MODEL = "RLN8-410" diff --git a/tests/components/reolink/test_switch.py b/tests/components/reolink/test_switch.py index ebf805b593d..7f8d606555d 100644 --- a/tests/components/reolink/test_switch.py +++ b/tests/components/reolink/test_switch.py @@ -1,15 +1,31 @@ """Test the Reolink switch platform.""" -from unittest.mock import MagicMock, patch +from unittest.mock import AsyncMock, MagicMock, patch -from homeassistant.components.reolink import const -from homeassistant.const import Platform +from freezegun.api import FrozenDateTimeFactory +import pytest +from reolink_aio.api import Chime +from reolink_aio.exceptions import ReolinkError + +from homeassistant.components.reolink import DEVICE_UPDATE_INTERVAL +from homeassistant.components.reolink.const import DOMAIN +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_OFF, + STATE_ON, + Platform, +) from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import entity_registry as er, issue_registry as ir -from .conftest import TEST_UID +from .conftest import TEST_CAM_NAME, TEST_NVR_NAME, TEST_UID -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_fire_time_changed async def test_cleanup_hdr_switch_( @@ -27,23 +43,21 @@ async def test_cleanup_hdr_switch_( entity_registry.async_get_or_create( domain=domain, - platform=const.DOMAIN, + platform=DOMAIN, unique_id=original_id, config_entry=config_entry, suggested_object_id=original_id, disabled_by=er.RegistryEntryDisabler.USER, ) - assert entity_registry.async_get_entity_id(domain, const.DOMAIN, original_id) + assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) # setup CH 0 and host entities/device with patch("homeassistant.components.reolink.PLATFORMS", [domain]): assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert ( - entity_registry.async_get_entity_id(domain, const.DOMAIN, original_id) is None - ) + assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) is None async def test_hdr_switch_deprecated_repair_issue( @@ -62,20 +76,206 @@ async def test_hdr_switch_deprecated_repair_issue( entity_registry.async_get_or_create( domain=domain, - platform=const.DOMAIN, + platform=DOMAIN, unique_id=original_id, config_entry=config_entry, suggested_object_id=original_id, disabled_by=None, ) - assert entity_registry.async_get_entity_id(domain, const.DOMAIN, original_id) + assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) # setup CH 0 and host entities/device with patch("homeassistant.components.reolink.PLATFORMS", [domain]): assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert entity_registry.async_get_entity_id(domain, const.DOMAIN, original_id) + assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) - assert (const.DOMAIN, "hdr_switch_deprecated") in issue_registry.issues + assert (DOMAIN, "hdr_switch_deprecated") in issue_registry.issues + + +async def test_switch( + hass: HomeAssistant, + config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + reolink_connect: MagicMock, +) -> None: + """Test switch entity.""" + reolink_connect.camera_name.return_value = TEST_CAM_NAME + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SWITCH]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.SWITCH}.{TEST_CAM_NAME}_record" + assert hass.states.get(entity_id).state == STATE_ON + + reolink_connect.recording_enabled.return_value = False + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state == STATE_OFF + + # test switch turn on + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + reolink_connect.set_recording.assert_called_with(0, True) + + reolink_connect.set_recording.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + # test switch turn off + reolink_connect.set_recording.side_effect = None + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + reolink_connect.set_recording.assert_called_with(0, False) + + reolink_connect.set_recording.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + +async def test_host_switch( + hass: HomeAssistant, + config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + reolink_connect: MagicMock, +) -> None: + """Test host switch entity.""" + reolink_connect.camera_name.return_value = TEST_CAM_NAME + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SWITCH]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.SWITCH}.{TEST_NVR_NAME}_record" + assert hass.states.get(entity_id).state == STATE_ON + + reolink_connect.recording_enabled.return_value = False + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state == STATE_OFF + + # test switch turn on + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + reolink_connect.set_recording.assert_called_with(None, True) + + reolink_connect.set_recording.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + # test switch turn off + reolink_connect.set_recording.side_effect = None + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + reolink_connect.set_recording.assert_called_with(None, False) + + reolink_connect.set_recording.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + +async def test_chime_switch( + hass: HomeAssistant, + config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + reolink_connect: MagicMock, + test_chime: Chime, +) -> None: + """Test host switch entity.""" + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SWITCH]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.SWITCH}.test_chime_led" + assert hass.states.get(entity_id).state == STATE_ON + + test_chime.led_state = False + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state == STATE_OFF + + # test switch turn on + test_chime.set_option = AsyncMock() + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + test_chime.set_option.assert_called_with(led=True) + + test_chime.set_option.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + # test switch turn off + test_chime.set_option.side_effect = None + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + test_chime.set_option.assert_called_with(led=False) + + test_chime.set_option.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) From c47b37af4fe963f78d2f6125ae0f0b598227b5fa Mon Sep 17 00:00:00 2001 From: Robert Svensson Date: Fri, 30 Aug 2024 14:40:28 +0200 Subject: [PATCH 1255/1635] Use snapshot in Axis camera tests (#122677) --- .../axis/snapshots/test_camera.ambr | 101 ++++++++++++++++++ tests/components/axis/test_camera.py | 90 ++++++++-------- 2 files changed, 149 insertions(+), 42 deletions(-) create mode 100644 tests/components/axis/snapshots/test_camera.ambr diff --git a/tests/components/axis/snapshots/test_camera.ambr b/tests/components/axis/snapshots/test_camera.ambr new file mode 100644 index 00000000000..564ff96b3d8 --- /dev/null +++ b/tests/components/axis/snapshots/test_camera.ambr @@ -0,0 +1,101 @@ +# serializer version: 1 +# name: test_camera[config_entry_options0-][camera.home-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'camera', + 'entity_category': None, + 'entity_id': 'camera.home', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-camera', + 'unit_of_measurement': None, + }) +# --- +# name: test_camera[config_entry_options0-][camera.home-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'access_token': '1', + 'entity_picture': '/api/camera_proxy/camera.home?token=1', + 'friendly_name': 'home', + 'frontend_stream_type': , + 'supported_features': , + }), + 'context': , + 'entity_id': 'camera.home', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- +# name: test_camera[config_entry_options1-streamprofile=profile_1][camera.home-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'camera', + 'entity_category': None, + 'entity_id': 'camera.home', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'axis', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:40:8c:12:34:56-camera', + 'unit_of_measurement': None, + }) +# --- +# name: test_camera[config_entry_options1-streamprofile=profile_1][camera.home-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'access_token': '1', + 'entity_picture': '/api/camera_proxy/camera.home?token=1', + 'friendly_name': 'home', + 'frontend_stream_type': , + 'supported_features': , + }), + 'context': , + 'entity_id': 'camera.home', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- diff --git a/tests/components/axis/test_camera.py b/tests/components/axis/test_camera.py index 00fe4391b0c..91e24a8c0c0 100644 --- a/tests/components/axis/test_camera.py +++ b/tests/components/axis/test_camera.py @@ -1,58 +1,31 @@ """Axis camera platform tests.""" +from unittest.mock import patch + import pytest +from syrupy import SnapshotAssertion from homeassistant.components import camera from homeassistant.components.axis.const import CONF_STREAM_PROFILE from homeassistant.components.camera import DOMAIN as CAMERA_DOMAIN -from homeassistant.const import STATE_IDLE +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from .conftest import ConfigEntryFactoryType from .const import MAC, NAME - -@pytest.mark.usefixtures("config_entry_setup") -async def test_camera(hass: HomeAssistant) -> None: - """Test that Axis camera platform is loaded properly.""" - assert len(hass.states.async_entity_ids(CAMERA_DOMAIN)) == 1 - - entity_id = f"{CAMERA_DOMAIN}.{NAME}" - - cam = hass.states.get(entity_id) - assert cam.state == STATE_IDLE - assert cam.name == NAME - - camera_entity = camera._get_camera_from_entity_id(hass, entity_id) - assert camera_entity.image_source == "http://1.2.3.4:80/axis-cgi/jpg/image.cgi" - assert camera_entity.mjpeg_source == "http://1.2.3.4:80/axis-cgi/mjpg/video.cgi" - assert ( - await camera_entity.stream_source() - == "rtsp://root:pass@1.2.3.4/axis-media/media.amp?videocodec=h264" - ) +from tests.common import snapshot_platform -@pytest.mark.parametrize("config_entry_options", [{CONF_STREAM_PROFILE: "profile_1"}]) -@pytest.mark.usefixtures("config_entry_setup") -async def test_camera_with_stream_profile(hass: HomeAssistant) -> None: - """Test that Axis camera entity is using the correct path with stream profike.""" - assert len(hass.states.async_entity_ids(CAMERA_DOMAIN)) == 1 - - entity_id = f"{CAMERA_DOMAIN}.{NAME}" - - cam = hass.states.get(entity_id) - assert cam.state == STATE_IDLE - assert cam.name == NAME - - camera_entity = camera._get_camera_from_entity_id(hass, entity_id) - assert camera_entity.image_source == "http://1.2.3.4:80/axis-cgi/jpg/image.cgi" - assert ( - camera_entity.mjpeg_source - == "http://1.2.3.4:80/axis-cgi/mjpg/video.cgi?streamprofile=profile_1" - ) - assert ( - await camera_entity.stream_source() - == "rtsp://root:pass@1.2.3.4/axis-media/media.amp?videocodec=h264&streamprofile=profile_1" - ) +@pytest.fixture(autouse=True) +def mock_getrandbits(): + """Mock camera access token which normally is randomized.""" + with patch( + "homeassistant.components.camera.SystemRandom.getrandbits", + return_value=1, + ): + yield PROPERTY_DATA = f"""root.Properties.API.HTTP.Version=3 @@ -66,6 +39,39 @@ root.Properties.System.SerialNumber={MAC} """ # No image format data to signal camera support +@pytest.mark.parametrize( + ("config_entry_options", "stream_profile"), + [ + ({}, ""), + ({CONF_STREAM_PROFILE: "profile_1"}, "streamprofile=profile_1"), + ], +) +async def test_camera( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + snapshot: SnapshotAssertion, + stream_profile: str, +) -> None: + """Test that Axis camera platform is loaded properly.""" + with patch("homeassistant.components.deconz.PLATFORMS", [Platform.CAMERA]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + entity_id = f"{CAMERA_DOMAIN}.{NAME}" + camera_entity = camera._get_camera_from_entity_id(hass, entity_id) + assert camera_entity.image_source == "http://1.2.3.4:80/axis-cgi/jpg/image.cgi" + assert ( + camera_entity.mjpeg_source == "http://1.2.3.4:80/axis-cgi/mjpg/video.cgi" + f"{"" if not stream_profile else f"?{stream_profile}"}" + ) + assert ( + await camera_entity.stream_source() + == "rtsp://root:pass@1.2.3.4/axis-media/media.amp?videocodec=h264" + f"{"" if not stream_profile else f"&{stream_profile}"}" + ) + + @pytest.mark.parametrize("param_properties_payload", [PROPERTY_DATA]) @pytest.mark.usefixtures("config_entry_setup") async def test_camera_disabled(hass: HomeAssistant) -> None: From 6467c8d6111c76fd1fcf8ec4443d3a5c19a76a56 Mon Sep 17 00:00:00 2001 From: puddly <32534428+puddly@users.noreply.github.com> Date: Fri, 30 Aug 2024 08:48:09 -0400 Subject: [PATCH 1256/1635] Bump ZHA to 0.0.32 (#124804) * Always prefer XY color mode in ZHA Remove a few more HS remnants * Use new ZHA OTA format * Bump ZHA to 0.0.32 * Fix existing OTA unit tests * Fix schema conversion test to account for new command parameters * Update snapshot with new `zcl_type` kwarg * Migrate existing entities to icon translations * Remove "no longer compatible" test * Test that the library release summary is correctly exposed to ZHA * Revert "Always prefer XY color mode in ZHA" This reverts commit 8fb7789ea8ddb6ed2a287aed5010374c0452f6c9. * Test `release_notes`, not `release_summary` --- homeassistant/components/zha/icons.json | 39 ++++++ homeassistant/components/zha/manifest.json | 2 +- homeassistant/components/zha/update.py | 11 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- .../zha/snapshots/test_diagnostics.ambr | 18 +-- tests/components/zha/test_helpers.py | 10 +- tests/components/zha/test_update.py | 121 ++++++++---------- 8 files changed, 117 insertions(+), 88 deletions(-) diff --git a/homeassistant/components/zha/icons.json b/homeassistant/components/zha/icons.json index 65ad029a66d..9d5254fe237 100644 --- a/homeassistant/components/zha/icons.json +++ b/homeassistant/components/zha/icons.json @@ -86,6 +86,18 @@ }, "presence_detection_timeout": { "default": "mdi:timer-edit" + }, + "exercise_trigger_time": { + "default": "mdi:clock" + }, + "external_temperature_sensor": { + "default": "mdi:thermometer" + }, + "load_room_mean": { + "default": "mdi:scale-balance" + }, + "regulation_setpoint_offset": { + "default": "mdi:thermostat" } }, "select": { @@ -94,6 +106,9 @@ }, "keypad_lockout": { "default": "mdi:lock" + }, + "exercise_day_of_week": { + "default": "mdi:wrench-clock" } }, "sensor": { @@ -132,6 +147,15 @@ }, "hooks_state": { "default": "mdi:hook" + }, + "open_window_detected": { + "default": "mdi:window-open" + }, + "load_estimate": { + "default": "mdi:scale-balance" + }, + "preheat_time": { + "default": "mdi:radiator" } }, "switch": { @@ -158,6 +182,21 @@ }, "hooks_locked": { "default": "mdi:lock" + }, + "external_window_sensor": { + "default": "mdi:window-open" + }, + "use_internal_window_detection": { + "default": "mdi:window-open" + }, + "prioritize_external_temperature_sensor": { + "default": "mdi:thermometer" + }, + "heat_available": { + "default": "mdi:water-boiler" + }, + "use_load_balancing": { + "default": "mdi:scale-balance" } } }, diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index a5e57fcb1ec..df60829a1e2 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -21,7 +21,7 @@ "zha", "universal_silabs_flasher" ], - "requirements": ["universal-silabs-flasher==0.0.22", "zha==0.0.31"], + "requirements": ["universal-silabs-flasher==0.0.22", "zha==0.0.32"], "usb": [ { "vid": "10C4", diff --git a/homeassistant/components/zha/update.py b/homeassistant/components/zha/update.py index e12d048b190..3a857f9d89b 100644 --- a/homeassistant/components/zha/update.py +++ b/homeassistant/components/zha/update.py @@ -95,6 +95,7 @@ class ZHAFirmwareUpdateEntity( UpdateEntityFeature.INSTALL | UpdateEntityFeature.PROGRESS | UpdateEntityFeature.SPECIFIC_VERSION + | UpdateEntityFeature.RELEASE_NOTES ) def __init__(self, entity_data: EntityData, **kwargs: Any) -> None: @@ -143,6 +144,14 @@ class ZHAFirmwareUpdateEntity( """ return self.entity_data.entity.release_summary + async def async_release_notes(self) -> str | None: + """Return full release notes. + + This is suitable for a long changelog that does not fit in the release_summary + property. The returned string can contain markdown. + """ + return self.entity_data.entity.release_notes + @property def release_url(self) -> str | None: """URL to the full release notes of the latest version available.""" @@ -155,7 +164,7 @@ class ZHAFirmwareUpdateEntity( ) -> None: """Install an update.""" try: - await self.entity_data.entity.async_install(version=version, backup=backup) + await self.entity_data.entity.async_install(version=version) except ZHAException as exc: raise HomeAssistantError(exc) from exc finally: diff --git a/requirements_all.txt b/requirements_all.txt index 42962c759aa..1bfe1756173 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3012,7 +3012,7 @@ zeroconf==0.133.0 zeversolar==0.3.1 # homeassistant.components.zha -zha==0.0.31 +zha==0.0.32 # homeassistant.components.zhong_hong zhong-hong-hvac==1.0.12 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 6ba5ecbea6c..4a63a54d0b8 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2389,7 +2389,7 @@ zeroconf==0.133.0 zeversolar==0.3.1 # homeassistant.components.zha -zha==0.0.31 +zha==0.0.32 # homeassistant.components.zwave_js zwave-js-server-python==0.57.0 diff --git a/tests/components/zha/snapshots/test_diagnostics.ambr b/tests/components/zha/snapshots/test_diagnostics.ambr index 67655aebc8c..e0da54e2492 100644 --- a/tests/components/zha/snapshots/test_diagnostics.ambr +++ b/tests/components/zha/snapshots/test_diagnostics.ambr @@ -162,19 +162,19 @@ '0x0500': dict({ 'attributes': dict({ '0x0000': dict({ - 'attribute': "ZCLAttributeDef(id=0x0000, name='zone_state', type=, access=, mandatory=True, is_manufacturer_specific=False)", + 'attribute': "ZCLAttributeDef(id=0x0000, name='zone_state', type=, zcl_type=, access=, mandatory=True, is_manufacturer_specific=False)", 'value': None, }), '0x0001': dict({ - 'attribute': "ZCLAttributeDef(id=0x0001, name='zone_type', type=, access=, mandatory=True, is_manufacturer_specific=False)", + 'attribute': "ZCLAttributeDef(id=0x0001, name='zone_type', type=, zcl_type=, access=, mandatory=True, is_manufacturer_specific=False)", 'value': None, }), '0x0002': dict({ - 'attribute': "ZCLAttributeDef(id=0x0002, name='zone_status', type=, access=, mandatory=True, is_manufacturer_specific=False)", + 'attribute': "ZCLAttributeDef(id=0x0002, name='zone_status', type=, zcl_type=, access=, mandatory=True, is_manufacturer_specific=False)", 'value': None, }), '0x0010': dict({ - 'attribute': "ZCLAttributeDef(id=0x0010, name='cie_addr', type=, access=, mandatory=True, is_manufacturer_specific=False)", + 'attribute': "ZCLAttributeDef(id=0x0010, name='cie_addr', type=, zcl_type=, access=, mandatory=True, is_manufacturer_specific=False)", 'value': list([ 50, 79, @@ -187,15 +187,15 @@ ]), }), '0x0011': dict({ - 'attribute': "ZCLAttributeDef(id=0x0011, name='zone_id', type=, access=, mandatory=True, is_manufacturer_specific=False)", + 'attribute': "ZCLAttributeDef(id=0x0011, name='zone_id', type=, zcl_type=, access=, mandatory=True, is_manufacturer_specific=False)", 'value': None, }), '0x0012': dict({ - 'attribute': "ZCLAttributeDef(id=0x0012, name='num_zone_sensitivity_levels_supported', type=, access=, mandatory=False, is_manufacturer_specific=False)", + 'attribute': "ZCLAttributeDef(id=0x0012, name='num_zone_sensitivity_levels_supported', type=, zcl_type=, access=, mandatory=False, is_manufacturer_specific=False)", 'value': None, }), '0x0013': dict({ - 'attribute': "ZCLAttributeDef(id=0x0013, name='current_zone_sensitivity_level', type=, access=, mandatory=False, is_manufacturer_specific=False)", + 'attribute': "ZCLAttributeDef(id=0x0013, name='current_zone_sensitivity_level', type=, zcl_type=, access=, mandatory=False, is_manufacturer_specific=False)", 'value': None, }), }), @@ -208,11 +208,11 @@ '0x0501': dict({ 'attributes': dict({ '0xfffd': dict({ - 'attribute': "ZCLAttributeDef(id=0xFFFD, name='cluster_revision', type=, access=, mandatory=True, is_manufacturer_specific=False)", + 'attribute': "ZCLAttributeDef(id=0xFFFD, name='cluster_revision', type=, zcl_type=, access=, mandatory=True, is_manufacturer_specific=False)", 'value': None, }), '0xfffe': dict({ - 'attribute': "ZCLAttributeDef(id=0xFFFE, name='reporting_status', type=, access=, mandatory=False, is_manufacturer_specific=False)", + 'attribute': "ZCLAttributeDef(id=0xFFFE, name='reporting_status', type=, zcl_type=, access=, mandatory=False, is_manufacturer_specific=False)", 'value': None, }), }), diff --git a/tests/components/zha/test_helpers.py b/tests/components/zha/test_helpers.py index 13c03c17cf7..d3392685437 100644 --- a/tests/components/zha/test_helpers.py +++ b/tests/components/zha/test_helpers.py @@ -60,16 +60,14 @@ async def test_zcl_schema_conversions(hass: HomeAssistant) -> None: "required": True, }, { - "type": "integer", - "valueMin": 0, - "valueMax": 255, + "type": "multi_select", + "options": ["Execute if off present"], "name": "options_mask", "optional": True, }, { - "type": "integer", - "valueMin": 0, - "valueMax": 255, + "type": "multi_select", + "options": ["Execute if off"], "name": "options_override", "optional": True, }, diff --git a/tests/components/zha/test_update.py b/tests/components/zha/test_update.py index 6a1a19b407f..e2a614915f9 100644 --- a/tests/components/zha/test_update.py +++ b/tests/components/zha/test_update.py @@ -3,8 +3,11 @@ from unittest.mock import AsyncMock, call, patch import pytest +from zha.application.platforms.update import ( + FirmwareUpdateEntity as ZhaFirmwareUpdateEntity, +) from zigpy.exceptions import DeliveryError -from zigpy.ota import OtaImageWithMetadata +from zigpy.ota import OtaImagesResult, OtaImageWithMetadata import zigpy.ota.image as firmware from zigpy.ota.providers import BaseOtaImageMetadata from zigpy.profiles import zha @@ -43,6 +46,8 @@ from homeassistant.setup import async_setup_component from .common import find_entity_id, update_attribute_cache from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE +from tests.typing import WebSocketGenerator + @pytest.fixture(autouse=True) def update_platform_only(): @@ -119,8 +124,11 @@ async def setup_test_data( ), ) - cluster.endpoint.device.application.ota.get_ota_image = AsyncMock( - return_value=None if file_not_found else fw_image + cluster.endpoint.device.application.ota.get_ota_images = AsyncMock( + return_value=OtaImagesResult( + upgrades=() if file_not_found else (fw_image,), + downgrades=(), + ) ) zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) zha_device_proxy.device.async_update_sw_build_id(installed_fw_version) @@ -544,81 +552,56 @@ async def test_firmware_update_raises( ) -async def test_firmware_update_no_longer_compatible( +async def test_update_release_notes( hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, setup_zha, zigpy_device_mock, ) -> None: - """Test ZHA update platform - firmware update is no longer valid.""" + """Test ZHA update platform release notes.""" await setup_zha() - zha_device, cluster, fw_image, installed_fw_version = await setup_test_data( - hass, zigpy_device_mock + + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [general.Basic.cluster_id, general.OnOff.cluster_id], + SIG_EP_OUTPUT: [general.Ota.cluster_id], + SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + }, + node_descriptor=b"\x02@\x84_\x11\x7fd\x00\x00,d\x00\x00", ) + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + + zha_device: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + zha_lib_entity = next( + e + for e in zha_device.device.platform_entities.values() + if isinstance(e, ZhaFirmwareUpdateEntity) + ) + zha_lib_entity._attr_release_notes = "Some lengthy release notes" + zha_lib_entity.maybe_emit_state_changed_event() + await hass.async_block_till_done() + entity_id = find_entity_id(Platform.UPDATE, zha_device, hass) assert entity_id is not None - assert hass.states.get(entity_id).state == STATE_UNKNOWN - - # simulate an image available notification - await cluster._handle_query_next_image( - foundation.ZCLHeader.cluster( - tsn=0x12, command_id=general.Ota.ServerCommandDefs.query_next_image.id - ), - general.QueryNextImageCommand( - fw_image.firmware.header.field_control, - zha_device.device.manufacturer_code, - fw_image.firmware.header.image_type, - installed_fw_version, - fw_image.firmware.header.header_version, - ), + ws_client = await hass_ws_client(hass) + await ws_client.send_json( + { + "id": 1, + "type": "update/release_notes", + "entity_id": entity_id, + } ) - await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state.state == STATE_ON - attrs = state.attributes - assert attrs[ATTR_INSTALLED_VERSION] == f"0x{installed_fw_version:08x}" - assert not attrs[ATTR_IN_PROGRESS] - assert ( - attrs[ATTR_LATEST_VERSION] == f"0x{fw_image.firmware.header.file_version:08x}" - ) - - new_version = 0x99999999 - - async def endpoint_reply(cluster_id, tsn, data, command_id): - if cluster_id == general.Ota.cluster_id: - hdr, cmd = cluster.deserialize(data) - if isinstance(cmd, general.Ota.ImageNotifyCommand): - zha_device.device.device.packet_received( - make_packet( - zha_device.device.device, - cluster, - general.Ota.ServerCommandDefs.query_next_image.name, - field_control=general.Ota.QueryNextImageCommand.FieldControl.HardwareVersion, - manufacturer_code=fw_image.firmware.header.manufacturer_id, - image_type=fw_image.firmware.header.image_type, - # The device reports that it is no longer compatible! - current_file_version=new_version, - hardware_version=1, - ) - ) - - cluster.endpoint.reply = AsyncMock(side_effect=endpoint_reply) - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - UPDATE_DOMAIN, - SERVICE_INSTALL, - { - ATTR_ENTITY_ID: entity_id, - }, - blocking=True, - ) - - # We updated the currently installed firmware version, as it is no longer valid - state = hass.states.get(entity_id) - assert state.state == STATE_OFF - attrs = state.attributes - assert attrs[ATTR_INSTALLED_VERSION] == f"0x{new_version:08x}" - assert not attrs[ATTR_IN_PROGRESS] - assert attrs[ATTR_LATEST_VERSION] == f"0x{new_version:08x}" + result = await ws_client.receive_json() + assert result["success"] is True + assert result["result"] == "Some lengthy release notes" From d7fb245213a846ec9aafab663cd7d2409cc9b211 Mon Sep 17 00:00:00 2001 From: LG-ThinQ-Integration Date: Fri, 30 Aug 2024 22:12:49 +0900 Subject: [PATCH 1257/1635] Add LG ThinQ Integration (#123860) * Add manifest.json * add switch entity * Add tests * fix function's name * adjust the changes after running scipt * Update homeassistant/components/lgthinq/__init__.py Accept the suggested change about format. Co-authored-by: Franck Nijhof * Update homeassistant/components/lgthinq/__init__.py Accept suggested change for log removal Co-authored-by: Franck Nijhof * Delete homeassistant/components/lgthinq/services.yaml * Update homeassistant/components/lgthinq/switch.py Accpet suggested change for log removal Co-authored-by: Franck Nijhof * Update homeassistant/components/lgthinq/strings.json Accept suggested change for service removal Co-authored-by: Franck Nijhof * Update homeassistant/components/lgthinq/manifest.json Accept suggested change for spaces removal Co-authored-by: Franck Nijhof * Delete homeassistant/components/lgthinq/icons.json * Update __init__.py Remove unnecessary check code * Modification to pass ruff-format * Modification for mypy issues * Remove service registry and related code * Update strings.json Modification to pass the prettier issues * Update manifest.json Modification to pass the prettier issues * Update homeassistant/components/lgthinq/__init__.py Remove the unnecessary log. Co-authored-by: Franck Nijhof * Update homeassistant/components/lgthinq/__init__.py Remove unnecessary log. Co-authored-by: Franck Nijhof * Update homeassistant/components/lgthinq/__init__.py Remove unnecessary code. Co-authored-by: Franck Nijhof * Update homeassistant/components/lgthinq/__init__.py Co-authored-by: Franck Nijhof * Modifications for the review and related autocheck * Update homeassistant/components/lgthinq/config_flow.py Co-authored-by: Franck Nijhof * Update homeassistant/components/lgthinq/config_flow.py Co-authored-by: Franck Nijhof * Modifications for reviews and autocheck * Modifications for the reviews and autocheck * Update homeassistant/components/lgthinq/const.py Co-authored-by: Franck Nijhof * Update homeassistant/components/lgthinq/const.py Co-authored-by: Franck Nijhof * Update homeassistant/components/lgthinq/const.py Co-authored-by: Franck Nijhof * Update homeassistant/components/lgthinq/device.py Co-authored-by: Franck Nijhof * Update homeassistant/components/lgthinq/device.py Co-authored-by: Franck Nijhof * Remove type definition after Final * Update const.py Do not use Final for DOMAIN * Refactoring for reviews - remove thinq.py - remove type definition - remove entry name in config flow - put config flow steps into a single step * Update tests - remove region * Refactoring for reviews - move property.py into PyPI library - replace error_code handling with try/catch - remove http response handling - remove generic - remove unnecessary class or map instance - refactor adding entities logic * Refactoring - remove unused code - change import path * Update tests * Refactoring for reviews 1. Use coordinator extended class instead of LGDevice 2. Rename entity_helper.py to entity.py 3. Move entity description to each entity file 4. Remove dynamic device creation code * Refactoring for reviews * Update requirements * Fix for reviews * Modify tests for reviews * Update for reviews * Remove property info and description class * Update tests/components/lgthinq/test_config_flow.py Co-authored-by: Joost Lekkerkerker * Update tests/components/lgthinq/test_config_flow.py Co-authored-by: Joost Lekkerkerker * Update homeassistant/components/lgthinq/entity.py Co-authored-by: Joost Lekkerkerker * Update homeassistant/components/lgthinq/switch.py Co-authored-by: Joost Lekkerkerker * Update tests/components/lgthinq/test_config_flow.py Co-authored-by: Joost Lekkerkerker * Update for reviews * Update homeassistant/components/lgthinq/switch.py Co-authored-by: Joost Lekkerkerker * Update homeassistant/components/lgthinq/switch.py Co-authored-by: Joost Lekkerkerker * Update for reviews * Fix ruff issues * Fix ruff check * Fix for reviews * Fix ruff check * Fix for reviews * Fix prettier failure and hassfest failure --------- Co-authored-by: Jangwon Lee Co-authored-by: yunseon.park Co-authored-by: nahyun.lee Co-authored-by: Franck Nijhof Co-authored-by: Joost Lekkerkerker --- CODEOWNERS | 2 + homeassistant/components/lgthinq/__init__.py | 105 +++++++++++++ .../components/lgthinq/config_flow.py | 103 +++++++++++++ homeassistant/components/lgthinq/const.py | 82 ++++++++++ .../components/lgthinq/coordinator.py | 142 ++++++++++++++++++ homeassistant/components/lgthinq/entity.py | 80 ++++++++++ homeassistant/components/lgthinq/icons.json | 9 ++ .../components/lgthinq/manifest.json | 11 ++ homeassistant/components/lgthinq/strings.json | 28 ++++ homeassistant/components/lgthinq/switch.py | 118 +++++++++++++++ homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 6 + requirements_all.txt | 3 + requirements_test_all.txt | 3 + tests/components/lgthinq/__init__.py | 1 + tests/components/lgthinq/conftest.py | 86 +++++++++++ tests/components/lgthinq/const.py | 8 + tests/components/lgthinq/test_config_flow.py | 66 ++++++++ 18 files changed, 854 insertions(+) create mode 100644 homeassistant/components/lgthinq/__init__.py create mode 100644 homeassistant/components/lgthinq/config_flow.py create mode 100644 homeassistant/components/lgthinq/const.py create mode 100644 homeassistant/components/lgthinq/coordinator.py create mode 100644 homeassistant/components/lgthinq/entity.py create mode 100644 homeassistant/components/lgthinq/icons.json create mode 100644 homeassistant/components/lgthinq/manifest.json create mode 100644 homeassistant/components/lgthinq/strings.json create mode 100644 homeassistant/components/lgthinq/switch.py create mode 100644 tests/components/lgthinq/__init__.py create mode 100644 tests/components/lgthinq/conftest.py create mode 100644 tests/components/lgthinq/const.py create mode 100644 tests/components/lgthinq/test_config_flow.py diff --git a/CODEOWNERS b/CODEOWNERS index c31056089de..0ebc49eda50 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -803,6 +803,8 @@ build.json @home-assistant/supervisor /tests/components/lektrico/ @lektrico /homeassistant/components/lg_netcast/ @Drafteed @splinter98 /tests/components/lg_netcast/ @Drafteed @splinter98 +/homeassistant/components/lgthinq/ @LG-ThinQ-Integration +/tests/components/lgthinq/ @LG-ThinQ-Integration /homeassistant/components/lidarr/ @tkdrob /tests/components/lidarr/ @tkdrob /homeassistant/components/lifx/ @Djelibeybi diff --git a/homeassistant/components/lgthinq/__init__.py b/homeassistant/components/lgthinq/__init__.py new file mode 100644 index 00000000000..259d494902e --- /dev/null +++ b/homeassistant/components/lgthinq/__init__.py @@ -0,0 +1,105 @@ +"""Support for LG ThinQ Connect device.""" + +from __future__ import annotations + +import asyncio +import logging + +from thinqconnect import ThinQApi, ThinQAPIException + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_ACCESS_TOKEN, CONF_COUNTRY, Platform +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.aiohttp_client import async_get_clientsession + +from .const import CONF_CONNECT_CLIENT_ID +from .coordinator import DeviceDataUpdateCoordinator, async_setup_device_coordinator + +type ThinqConfigEntry = ConfigEntry[dict[str, DeviceDataUpdateCoordinator]] + +PLATFORMS = [Platform.SWITCH] + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry(hass: HomeAssistant, entry: ThinqConfigEntry) -> bool: + """Set up an entry.""" + access_token = entry.data[CONF_ACCESS_TOKEN] + client_id = entry.data[CONF_CONNECT_CLIENT_ID] + country_code = entry.data[CONF_COUNTRY] + + thinq_api = ThinQApi( + session=async_get_clientsession(hass), + access_token=access_token, + country_code=country_code, + client_id=client_id, + ) + + # Setup coordinators and register devices. + await async_setup_coordinators(hass, entry, thinq_api) + + # Set up all platforms for this device/entry. + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + # Clean up devices they are no longer in use. + async_cleanup_device_registry(hass, entry) + + return True + + +async def async_setup_coordinators( + hass: HomeAssistant, + entry: ThinqConfigEntry, + thinq_api: ThinQApi, +) -> None: + """Set up coordinators and register devices.""" + entry.runtime_data = {} + + # Get a device list from the server. + try: + device_list = await thinq_api.async_get_device_list() + except ThinQAPIException as exc: + raise ConfigEntryNotReady(exc.message) from exc + + if not device_list: + return + + # Setup coordinator per device. + coordinator_list: list[DeviceDataUpdateCoordinator] = [] + task_list = [ + hass.async_create_task(async_setup_device_coordinator(hass, thinq_api, device)) + for device in device_list + ] + task_result = await asyncio.gather(*task_list) + for coordinators in task_result: + if coordinators: + coordinator_list += coordinators + + for coordinator in coordinator_list: + entry.runtime_data[coordinator.unique_id] = coordinator + + +@callback +def async_cleanup_device_registry(hass: HomeAssistant, entry: ThinqConfigEntry) -> None: + """Clean up device registry.""" + new_device_unique_ids = [ + coordinator.unique_id for coordinator in entry.runtime_data.values() + ] + device_registry = dr.async_get(hass) + existing_entries = dr.async_entries_for_config_entry( + device_registry, entry.entry_id + ) + + # Remove devices that are no longer exist. + for old_entry in existing_entries: + old_unique_id = next(iter(old_entry.identifiers))[1] + if old_unique_id not in new_device_unique_ids: + device_registry.async_remove_device(old_entry.id) + _LOGGER.debug("Remove device_registry: device_id=%s", old_entry.id) + + +async def async_unload_entry(hass: HomeAssistant, entry: ThinqConfigEntry) -> bool: + """Unload the entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/lgthinq/config_flow.py b/homeassistant/components/lgthinq/config_flow.py new file mode 100644 index 00000000000..cdb41916688 --- /dev/null +++ b/homeassistant/components/lgthinq/config_flow.py @@ -0,0 +1,103 @@ +"""Config flow for LG ThinQ.""" + +from __future__ import annotations + +import logging +from typing import Any +import uuid + +from thinqconnect import ThinQApi, ThinQAPIException +from thinqconnect.country import Country +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_ACCESS_TOKEN, CONF_COUNTRY +from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.selector import CountrySelector, CountrySelectorConfig + +from .const import ( + CLIENT_PREFIX, + CONF_CONNECT_CLIENT_ID, + DEFAULT_COUNTRY, + DOMAIN, + THINQ_DEFAULT_NAME, + THINQ_PAT_URL, +) + +SUPPORTED_COUNTRIES = [country.value for country in Country] + +_LOGGER = logging.getLogger(__name__) + + +class ThinQFlowHandler(ConfigFlow, domain=DOMAIN): + """Handle a config flow.""" + + VERSION = 1 + + def _get_default_country_code(self) -> str: + """Get the default country code based on config.""" + country = self.hass.config.country + if country is not None and country in SUPPORTED_COUNTRIES: + return country + + return DEFAULT_COUNTRY + + async def _validate_and_create_entry( + self, access_token: str, country_code: str + ) -> ConfigFlowResult: + """Create an entry for the flow.""" + connect_client_id = f"{CLIENT_PREFIX}-{uuid.uuid4()!s}" + + # To verify PAT, create an api to retrieve the device list. + await ThinQApi( + session=async_get_clientsession(self.hass), + access_token=access_token, + country_code=country_code, + client_id=connect_client_id, + ).async_get_device_list() + + # If verification is success, create entry. + return self.async_create_entry( + title=THINQ_DEFAULT_NAME, + data={ + CONF_ACCESS_TOKEN: access_token, + CONF_CONNECT_CLIENT_ID: connect_client_id, + CONF_COUNTRY: country_code, + }, + ) + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a flow initiated by the user.""" + errors: dict[str, str] = {} + + if user_input is not None: + access_token = user_input[CONF_ACCESS_TOKEN] + country_code = user_input[CONF_COUNTRY] + + # Check if PAT is already configured. + await self.async_set_unique_id(access_token) + self._abort_if_unique_id_configured() + + try: + return await self._validate_and_create_entry(access_token, country_code) + except ThinQAPIException: + errors["base"] = "token_unauthorized" + + return self.async_show_form( + step_id="user", + data_schema=vol.Schema( + { + vol.Required(CONF_ACCESS_TOKEN): cv.string, + vol.Required( + CONF_COUNTRY, default=self._get_default_country_code() + ): CountrySelector( + CountrySelectorConfig(countries=SUPPORTED_COUNTRIES) + ), + } + ), + description_placeholders={"pat_url": THINQ_PAT_URL}, + errors=errors, + ) diff --git a/homeassistant/components/lgthinq/const.py b/homeassistant/components/lgthinq/const.py new file mode 100644 index 00000000000..9b9b162bb06 --- /dev/null +++ b/homeassistant/components/lgthinq/const.py @@ -0,0 +1,82 @@ +"""Constants for LG ThinQ.""" + +# Base component constants. +from typing import Final + +from thinqconnect import ( + AirConditionerDevice, + AirPurifierDevice, + AirPurifierFanDevice, + CeilingFanDevice, + CooktopDevice, + DehumidifierDevice, + DeviceType, + DishWasherDevice, + DryerDevice, + HomeBrewDevice, + HoodDevice, + HumidifierDevice, + KimchiRefrigeratorDevice, + MicrowaveOvenDevice, + OvenDevice, + PlantCultivatorDevice, + RefrigeratorDevice, + RobotCleanerDevice, + StickCleanerDevice, + StylerDevice, + SystemBoilerDevice, + WashcomboMainDevice, + WashcomboMiniDevice, + WasherDevice, + WashtowerDevice, + WashtowerDryerDevice, + WashtowerWasherDevice, + WaterHeaterDevice, + WaterPurifierDevice, + WineCellarDevice, +) + +# Common +DOMAIN = "lgthinq" +COMPANY = "LGE" +THINQ_DEFAULT_NAME: Final = "LG ThinQ" +THINQ_PAT_URL: Final = "https://connect-pat.lgthinq.com" + +# Config Flow +CLIENT_PREFIX: Final = "home-assistant" +CONF_CONNECT_CLIENT_ID: Final = "connect_client_id" +DEFAULT_COUNTRY: Final = "US" + +THINQ_DEVICE_ADDED: Final = "thinq_device_added" + +DEVICE_TYPE_API_MAP: Final = { + DeviceType.AIR_CONDITIONER: AirConditionerDevice, + DeviceType.AIR_PURIFIER_FAN: AirPurifierFanDevice, + DeviceType.AIR_PURIFIER: AirPurifierDevice, + DeviceType.CEILING_FAN: CeilingFanDevice, + DeviceType.COOKTOP: CooktopDevice, + DeviceType.DEHUMIDIFIER: DehumidifierDevice, + DeviceType.DISH_WASHER: DishWasherDevice, + DeviceType.DRYER: DryerDevice, + DeviceType.HOME_BREW: HomeBrewDevice, + DeviceType.HOOD: HoodDevice, + DeviceType.HUMIDIFIER: HumidifierDevice, + DeviceType.KIMCHI_REFRIGERATOR: KimchiRefrigeratorDevice, + DeviceType.MICROWAVE_OVEN: MicrowaveOvenDevice, + DeviceType.OVEN: OvenDevice, + DeviceType.PLANT_CULTIVATOR: PlantCultivatorDevice, + DeviceType.REFRIGERATOR: RefrigeratorDevice, + DeviceType.ROBOT_CLEANER: RobotCleanerDevice, + DeviceType.STICK_CLEANER: StickCleanerDevice, + DeviceType.STYLER: StylerDevice, + DeviceType.SYSTEM_BOILER: SystemBoilerDevice, + DeviceType.WASHER: WasherDevice, + DeviceType.WASHCOMBO_MAIN: WashcomboMainDevice, + DeviceType.WASHCOMBO_MINI: WashcomboMiniDevice, + DeviceType.WASHTOWER_DRYER: WashtowerDryerDevice, + DeviceType.WASHTOWER: WashtowerDevice, + DeviceType.WASHTOWER_WASHER: WashtowerWasherDevice, + DeviceType.WATER_HEATER: WaterHeaterDevice, + DeviceType.WATER_PURIFIER: WaterPurifierDevice, + DeviceType.WINE_CELLAR: WineCellarDevice, +} diff --git a/homeassistant/components/lgthinq/coordinator.py b/homeassistant/components/lgthinq/coordinator.py new file mode 100644 index 00000000000..1a23b70d8a7 --- /dev/null +++ b/homeassistant/components/lgthinq/coordinator.py @@ -0,0 +1,142 @@ +"""DataUpdateCoordinator for the LG ThinQ device.""" + +from __future__ import annotations + +import logging +from typing import Any + +from thinqconnect import ConnectBaseDevice, DeviceType, ThinQApi, ThinQAPIException + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DEVICE_TYPE_API_MAP, DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +class DeviceDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): + """LG Device's Data Update Coordinator.""" + + def __init__( + self, + hass: HomeAssistant, + device_api: ConnectBaseDevice, + *, + sub_id: str | None = None, + ) -> None: + """Initialize data coordinator.""" + super().__init__( + hass, + _LOGGER, + name=f"{DOMAIN}_{device_api.device_id}", + ) + + # For washTower's washer or dryer + self.sub_id = sub_id + + # The device name is usually set to 'alias'. + # But, if the sub_id exists, it will be set to 'alias {sub_id}'. + # e.g. alias='MyWashTower', sub_id='dryer' then 'MyWashTower dryer'. + self.device_name = ( + f"{device_api.alias} {self.sub_id}" if self.sub_id else device_api.alias + ) + + # The unique id is usually set to 'device_id'. + # But, if the sub_id exists, it will be set to 'device_id_{sub_id}'. + # e.g. device_id='TQSXXXX', sub_id='dryer' then 'TQSXXXX_dryer'. + self.unique_id = ( + f"{device_api.device_id}_{self.sub_id}" + if self.sub_id + else device_api.device_id + ) + + # Get the api instance. + self.device_api = device_api.get_sub_device(self.sub_id) or device_api + + async def _async_update_data(self) -> dict[str, Any]: + """Request to the server to update the status from full response data.""" + try: + data = await self.device_api.thinq_api.async_get_device_status( + self.device_api.device_id + ) + except ThinQAPIException as exc: + raise UpdateFailed(exc) from exc + + # Full response data into the device api. + self.device_api.set_status(data) + return data + + +async def async_setup_device_coordinator( + hass: HomeAssistant, thinq_api: ThinQApi, device: dict[str, Any] +) -> list[DeviceDataUpdateCoordinator] | None: + """Create DeviceDataUpdateCoordinator and device_api per device.""" + device_id = device["deviceId"] + device_info = device["deviceInfo"] + + # Get an appropriate class constructor for the device type. + device_type = device_info.get("deviceType") + constructor = DEVICE_TYPE_API_MAP.get(device_type) + if constructor is None: + _LOGGER.error( + "Failed to setup device(%s): not supported device. type=%s", + device_id, + device_type, + ) + return None + + # Get a device profile from the server. + try: + profile = await thinq_api.async_get_device_profile(device_id) + except ThinQAPIException: + _LOGGER.warning("Failed to setup device(%s): no profile", device_id) + return None + + device_group_id = device_info.get("groupId") + + # Create new device api instance. + device_api: ConnectBaseDevice = ( + constructor( + thinq_api=thinq_api, + device_id=device_id, + device_type=device_type, + model_name=device_info.get("modelName"), + alias=device_info.get("alias"), + group_id=device_group_id, + reportable=device_info.get("reportable"), + profile=profile, + ) + if device_group_id + else constructor( + thinq_api=thinq_api, + device_id=device_id, + device_type=device_type, + model_name=device_info.get("modelName"), + alias=device_info.get("alias"), + reportable=device_info.get("reportable"), + profile=profile, + ) + ) + + # Create a list of sub-devices from the profile. + # Note that some devices may have more than two device profiles. + # In this case we should create multiple lg device instance. + # e.g. 'WashTower-Single-Unit' = 'WashTower{dryer}' + 'WashTower{washer}'. + device_sub_ids = ( + list(profile.keys()) + if device_type == DeviceType.WASHTOWER and "property" not in profile + else [None] + ) + + # Create new device coordinator instances. + coordinator_list: list[DeviceDataUpdateCoordinator] = [] + for sub_id in device_sub_ids: + coordinator = DeviceDataUpdateCoordinator(hass, device_api, sub_id=sub_id) + await coordinator.async_config_entry_first_refresh() + + # Finally add a device coordinator into the result list. + coordinator_list.append(coordinator) + _LOGGER.debug("Setup device's coordinator: %s", coordinator) + + return coordinator_list diff --git a/homeassistant/components/lgthinq/entity.py b/homeassistant/components/lgthinq/entity.py new file mode 100644 index 00000000000..151687aabb8 --- /dev/null +++ b/homeassistant/components/lgthinq/entity.py @@ -0,0 +1,80 @@ +"""Base class for ThinQ entities.""" + +from __future__ import annotations + +import logging +from typing import Any + +from thinqconnect import ThinQAPIException +from thinqconnect.integration.homeassistant.property import Property as ThinQProperty + +from homeassistant.core import callback +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import COMPANY, DOMAIN +from .coordinator import DeviceDataUpdateCoordinator + +_LOGGER = logging.getLogger(__name__) + + +class ThinQEntity(CoordinatorEntity[DeviceDataUpdateCoordinator]): + """The base implementation of all lg thinq entities.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: DeviceDataUpdateCoordinator, + entity_description: EntityDescription, + property: ThinQProperty, + ) -> None: + """Initialize an entity.""" + super().__init__(coordinator) + + self.entity_description = entity_description + self.property = property + self._attr_device_info = dr.DeviceInfo( + identifiers={(DOMAIN, coordinator.unique_id)}, + manufacturer=COMPANY, + model=coordinator.device_api.model_name, + name=coordinator.device_name, + ) + + # Set the unique key. If there exist a location, add the prefix location name. + unique_key = ( + f"{entity_description.key}" + if property.location is None + else f"{property.location}_{entity_description.key}" + ) + self._attr_unique_id = f"{coordinator.unique_id}_{unique_key}" + + # Update initial status. + self._update_status() + + async def async_post_value(self, value: Any) -> None: + """Post the value of entity to server.""" + try: + await self.property.async_post_value(value) + except ThinQAPIException as exc: + raise ServiceValidationError( + exc.message, + translation_domain=DOMAIN, + translation_key=exc.code, + ) from exc + finally: + await self.coordinator.async_request_refresh() + + def _update_status(self) -> None: + """Update status itself. + + All inherited classes can update their own status in here. + """ + + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + self._update_status() + self.async_write_ha_state() diff --git a/homeassistant/components/lgthinq/icons.json b/homeassistant/components/lgthinq/icons.json new file mode 100644 index 00000000000..6a4ff48494a --- /dev/null +++ b/homeassistant/components/lgthinq/icons.json @@ -0,0 +1,9 @@ +{ + "entity": { + "switch": { + "operation_power": { + "default": "mdi:power" + } + } + } +} diff --git a/homeassistant/components/lgthinq/manifest.json b/homeassistant/components/lgthinq/manifest.json new file mode 100644 index 00000000000..641c78844f9 --- /dev/null +++ b/homeassistant/components/lgthinq/manifest.json @@ -0,0 +1,11 @@ +{ + "domain": "lgthinq", + "name": "LG ThinQ", + "codeowners": ["@LG-ThinQ-Integration"], + "config_flow": true, + "dependencies": [], + "documentation": "https://www.home-assistant.io/integrations/lgthinq/", + "iot_class": "cloud_push", + "loggers": ["thinqconnect"], + "requirements": ["thinqconnect==0.9.5"] +} diff --git a/homeassistant/components/lgthinq/strings.json b/homeassistant/components/lgthinq/strings.json new file mode 100644 index 00000000000..6334fd9a893 --- /dev/null +++ b/homeassistant/components/lgthinq/strings.json @@ -0,0 +1,28 @@ +{ + "config": { + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]" + }, + "error": { + "token_unauthorized": "The token is invalid or unauthorized." + }, + "step": { + "user": { + "title": "Connect to ThinQ", + "description": "Please enter a ThinQ [PAT(Personal Access Token)]({pat_url}) created with your LG ThinQ account.", + "data": { + "access_token": "Personal Access Token", + "country": "Country" + } + } + } + }, + "entity": { + "switch": { + "operation_power": { + "name": "Power" + } + } + } +} diff --git a/homeassistant/components/lgthinq/switch.py b/homeassistant/components/lgthinq/switch.py new file mode 100644 index 00000000000..ee7dfdb02d7 --- /dev/null +++ b/homeassistant/components/lgthinq/switch.py @@ -0,0 +1,118 @@ +"""Support for switch entities.""" + +from __future__ import annotations + +import logging +from typing import Any + +from thinqconnect import PROPERTY_WRITABLE, DeviceType +from thinqconnect.devices.const import Property as ThinQProperty +from thinqconnect.integration.homeassistant.property import create_properties + +from homeassistant.components.switch import ( + SwitchDeviceClass, + SwitchEntity, + SwitchEntityDescription, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import ThinqConfigEntry +from .entity import ThinQEntity + +OPERATION_SWITCH_DESC: dict[ThinQProperty, SwitchEntityDescription] = { + ThinQProperty.AIR_FAN_OPERATION_MODE: SwitchEntityDescription( + key=ThinQProperty.AIR_FAN_OPERATION_MODE, + translation_key="operation_power", + ), + ThinQProperty.AIR_PURIFIER_OPERATION_MODE: SwitchEntityDescription( + key=ThinQProperty.AIR_PURIFIER_OPERATION_MODE, + translation_key="operation_power", + ), + ThinQProperty.BOILER_OPERATION_MODE: SwitchEntityDescription( + key=ThinQProperty.BOILER_OPERATION_MODE, + translation_key="operation_power", + ), + ThinQProperty.DEHUMIDIFIER_OPERATION_MODE: SwitchEntityDescription( + key=ThinQProperty.DEHUMIDIFIER_OPERATION_MODE, + translation_key="operation_power", + ), + ThinQProperty.HUMIDIFIER_OPERATION_MODE: SwitchEntityDescription( + key=ThinQProperty.HUMIDIFIER_OPERATION_MODE, + translation_key="operation_power", + ), +} + +DEVIE_TYPE_SWITCH_MAP: dict[DeviceType, tuple[SwitchEntityDescription, ...]] = { + DeviceType.AIR_PURIFIER_FAN: ( + OPERATION_SWITCH_DESC[ThinQProperty.AIR_FAN_OPERATION_MODE], + ), + DeviceType.AIR_PURIFIER: ( + OPERATION_SWITCH_DESC[ThinQProperty.AIR_PURIFIER_OPERATION_MODE], + ), + DeviceType.DEHUMIDIFIER: ( + OPERATION_SWITCH_DESC[ThinQProperty.DEHUMIDIFIER_OPERATION_MODE], + ), + DeviceType.HUMIDIFIER: ( + OPERATION_SWITCH_DESC[ThinQProperty.HUMIDIFIER_OPERATION_MODE], + ), + DeviceType.SYSTEM_BOILER: ( + OPERATION_SWITCH_DESC[ThinQProperty.BOILER_OPERATION_MODE], + ), +} + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ThinqConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up an entry for switch platform.""" + entities: list[ThinQSwitchEntity] = [] + for coordinator in entry.runtime_data.values(): + if ( + descriptions := DEVIE_TYPE_SWITCH_MAP.get( + coordinator.device_api.device_type + ) + ) is not None: + for description in descriptions: + properties = create_properties( + device_api=coordinator.device_api, + key=description.key, + children_keys=None, + rw_type=PROPERTY_WRITABLE, + ) + if not properties: + continue + + entities.extend( + ThinQSwitchEntity(coordinator, description, prop) + for prop in properties + ) + + if entities: + async_add_entities(entities) + + +class ThinQSwitchEntity(ThinQEntity, SwitchEntity): + """Represent a thinq switch platform.""" + + _attr_device_class = SwitchDeviceClass.SWITCH + + def _update_status(self) -> None: + """Update status itself.""" + super()._update_status() + + self._attr_is_on = self.property.get_value_as_bool() + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on the switch.""" + _LOGGER.debug("[%s] async_turn_on", self.name) + await self.async_post_value("POWER_ON") + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off the switch.""" + _LOGGER.debug("[%s] async_turn_off", self.name) + await self.async_post_value("POWER_OFF") diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index 0ca3335725f..1756a896d25 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -318,6 +318,7 @@ FLOWS = { "lektrico", "lg_netcast", "lg_soundbar", + "lgthinq", "lidarr", "lifx", "linear_garage_door", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 2e9199a3b0a..d7cfe503dd9 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -3246,6 +3246,12 @@ } } }, + "lgthinq": { + "name": "LG ThinQ", + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_push" + }, "lidarr": { "name": "Lidarr", "integration_type": "service", diff --git a/requirements_all.txt b/requirements_all.txt index 1bfe1756173..8f89d72d9a0 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2785,6 +2785,9 @@ thermoworks-smoke==0.1.8 # homeassistant.components.thingspeak thingspeak==1.0.0 +# homeassistant.components.lgthinq +thinqconnect==0.9.5 + # homeassistant.components.tikteck tikteck==0.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 4a63a54d0b8..a1862a1340d 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2198,6 +2198,9 @@ thermobeacon-ble==0.7.0 # homeassistant.components.thermopro thermopro-ble==0.10.0 +# homeassistant.components.lgthinq +thinqconnect==0.9.5 + # homeassistant.components.tilt_ble tilt-ble==0.2.3 diff --git a/tests/components/lgthinq/__init__.py b/tests/components/lgthinq/__init__.py new file mode 100644 index 00000000000..68ffb960f71 --- /dev/null +++ b/tests/components/lgthinq/__init__.py @@ -0,0 +1 @@ +"""Tests for the lgthinq integration.""" diff --git a/tests/components/lgthinq/conftest.py b/tests/components/lgthinq/conftest.py new file mode 100644 index 00000000000..321c770ee8d --- /dev/null +++ b/tests/components/lgthinq/conftest.py @@ -0,0 +1,86 @@ +"""Configure tests for the LGThinQ integration.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from thinqconnect import ThinQAPIException + +from homeassistant.components.lgthinq.const import CONF_CONNECT_CLIENT_ID, DOMAIN +from homeassistant.const import CONF_ACCESS_TOKEN, CONF_COUNTRY + +from .const import MOCK_CONNECT_CLIENT_ID, MOCK_COUNTRY, MOCK_PAT, MOCK_UUID + +from tests.common import MockConfigEntry + + +def mock_thinq_api_response( + *, + status: int = 200, + body: dict | None = None, + error_code: str | None = None, + error_message: str | None = None, +) -> MagicMock: + """Create a mock thinq api response.""" + response = MagicMock() + response.status = status + response.body = body + response.error_code = error_code + response.error_message = error_message + return response + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Create a mock config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title=f"Test {DOMAIN}", + unique_id=MOCK_PAT, + data={ + CONF_ACCESS_TOKEN: MOCK_PAT, + CONF_CONNECT_CLIENT_ID: MOCK_CONNECT_CLIENT_ID, + CONF_COUNTRY: MOCK_COUNTRY, + }, + ) + + +@pytest.fixture +def mock_uuid() -> Generator[AsyncMock]: + """Mock a uuid.""" + with ( + patch("uuid.uuid4", autospec=True, return_value=MOCK_UUID) as mock_uuid, + patch( + "homeassistant.components.lgthinq.config_flow.uuid.uuid4", + new=mock_uuid, + ), + ): + yield mock_uuid.return_value + + +@pytest.fixture +def mock_thinq_api() -> Generator[AsyncMock]: + """Mock a thinq api.""" + with ( + patch("thinqconnect.ThinQApi", autospec=True) as mock_api, + patch( + "homeassistant.components.lgthinq.config_flow.ThinQApi", + new=mock_api, + ), + ): + thinq_api = mock_api.return_value + thinq_api.async_get_device_list = AsyncMock( + return_value=mock_thinq_api_response(status=200, body={}) + ) + yield thinq_api + + +@pytest.fixture +def mock_invalid_thinq_api(mock_thinq_api: AsyncMock) -> AsyncMock: + """Mock an invalid thinq api.""" + mock_thinq_api.async_get_device_list = AsyncMock( + side_effect=ThinQAPIException( + code="1309", message="Not allowed api call", headers=None + ) + ) + return mock_thinq_api diff --git a/tests/components/lgthinq/const.py b/tests/components/lgthinq/const.py new file mode 100644 index 00000000000..f46baa61c38 --- /dev/null +++ b/tests/components/lgthinq/const.py @@ -0,0 +1,8 @@ +"""Constants for lgthinq test.""" + +from typing import Final + +MOCK_PAT: Final[str] = "123abc4567de8f90g123h4ij56klmn789012p345rst6uvw789xy" +MOCK_UUID: Final[str] = "1b3deabc-123d-456d-987d-2a1c7b3bdb67" +MOCK_CONNECT_CLIENT_ID: Final[str] = f"home-assistant-{MOCK_UUID}" +MOCK_COUNTRY: Final[str] = "KR" diff --git a/tests/components/lgthinq/test_config_flow.py b/tests/components/lgthinq/test_config_flow.py new file mode 100644 index 00000000000..457549ccb7e --- /dev/null +++ b/tests/components/lgthinq/test_config_flow.py @@ -0,0 +1,66 @@ +"""Test the lgthinq config flow.""" + +from unittest.mock import AsyncMock + +from homeassistant.components.lgthinq.const import CONF_CONNECT_CLIENT_ID, DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_ACCESS_TOKEN, CONF_COUNTRY +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .const import MOCK_CONNECT_CLIENT_ID, MOCK_COUNTRY, MOCK_PAT + +from tests.common import MockConfigEntry + + +async def test_config_flow( + hass: HomeAssistant, mock_thinq_api: AsyncMock, mock_uuid: AsyncMock +) -> None: + """Test that an thinq entry is normally created.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_ACCESS_TOKEN: MOCK_PAT, CONF_COUNTRY: MOCK_COUNTRY}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { + CONF_ACCESS_TOKEN: MOCK_PAT, + CONF_COUNTRY: MOCK_COUNTRY, + CONF_CONNECT_CLIENT_ID: MOCK_CONNECT_CLIENT_ID, + } + + mock_thinq_api.async_get_device_list.assert_called_once() + + +async def test_config_flow_invalid_pat( + hass: HomeAssistant, mock_invalid_thinq_api: AsyncMock +) -> None: + """Test that an thinq flow should be aborted with an invalid PAT.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={CONF_ACCESS_TOKEN: MOCK_PAT, CONF_COUNTRY: MOCK_COUNTRY}, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "token_unauthorized"} + mock_invalid_thinq_api.async_get_device_list.assert_called_once() + + +async def test_config_flow_already_configured( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_thinq_api: AsyncMock +) -> None: + """Test that thinq flow should be aborted when already configured.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={CONF_ACCESS_TOKEN: MOCK_PAT, CONF_COUNTRY: MOCK_COUNTRY}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" From a8b55a16fd1cc3aa30249581d338836c3c42511f Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Fri, 30 Aug 2024 16:24:27 +0200 Subject: [PATCH 1258/1635] Add 100% coverage of Reolink host.py (#124577) * Add 100% host test coverage * Add missing test --- homeassistant/components/reolink/host.py | 14 +- tests/components/reolink/test_config_flow.py | 2 + tests/components/reolink/test_host.py | 313 ++++++++++++++++++- 3 files changed, 320 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/reolink/host.py b/homeassistant/components/reolink/host.py index 310188b720e..0df4918be76 100644 --- a/homeassistant/components/reolink/host.py +++ b/homeassistant/components/reolink/host.py @@ -437,7 +437,15 @@ class ReolinkHost: self._long_poll_task.cancel() self._long_poll_task = None - await self._api.unsubscribe(sub_type=SubType.long_poll) + try: + await self._api.unsubscribe(sub_type=SubType.long_poll) + except ReolinkError as err: + _LOGGER.error( + "Reolink error while unsubscribing from host %s:%s: %s", + self._api.host, + self._api.port, + err, + ) async def stop(self, event=None) -> None: """Disconnect the API.""" @@ -511,9 +519,7 @@ class ReolinkHost: ) if sub_type == SubType.push: await self.subscribe() - else: - await self._api.subscribe(self._webhook_url, sub_type) - return + return timer = self._api.renewtimer(sub_type) _LOGGER.debug( diff --git a/tests/components/reolink/test_config_flow.py b/tests/components/reolink/test_config_flow.py index 926baf324bc..2d55f62ec74 100644 --- a/tests/components/reolink/test_config_flow.py +++ b/tests/components/reolink/test_config_flow.py @@ -94,6 +94,8 @@ async def test_config_flow_errors( reolink_connect.is_admin = False reolink_connect.user_level = "guest" + reolink_connect.unsubscribe.side_effect = ReolinkError("Test error") + reolink_connect.logout.side_effect = ReolinkError("Test error") result = await hass.config_entries.flow.async_configure( result["flow_id"], { diff --git a/tests/components/reolink/test_host.py b/tests/components/reolink/test_host.py index c4096a4582f..64c3fe5c1b7 100644 --- a/tests/components/reolink/test_host.py +++ b/tests/components/reolink/test_host.py @@ -1,28 +1,43 @@ """Test the Reolink host.""" from asyncio import CancelledError -from unittest.mock import AsyncMock, MagicMock +from datetime import timedelta +from unittest.mock import AsyncMock, MagicMock, patch from aiohttp import ClientResponseError +from freezegun.api import FrozenDateTimeFactory import pytest +from reolink_aio.enums import SubType +from reolink_aio.exceptions import NotSupportedError, ReolinkError, SubscriptionError -from homeassistant.components.reolink import const +from homeassistant.components.reolink import DEVICE_UPDATE_INTERVAL +from homeassistant.components.reolink.const import DOMAIN +from homeassistant.components.reolink.host import ( + FIRST_ONVIF_LONG_POLL_TIMEOUT, + FIRST_ONVIF_TIMEOUT, + LONG_POLL_COOLDOWN, + LONG_POLL_ERROR_COOLDOWN, + POLL_INTERVAL_NO_PUSH, +) from homeassistant.components.webhook import async_handle_webhook from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.network import NoURLAvailableError from homeassistant.util.aiohttp import MockRequest from .conftest import TEST_UID -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, async_fire_time_changed +from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator async def test_webhook_callback( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, + freezer: FrozenDateTimeFactory, config_entry: MockConfigEntry, reolink_connect: MagicMock, entity_registry: er.EntityRegistry, @@ -32,7 +47,7 @@ async def test_webhook_callback( await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.LOADED - webhook_id = f"{const.DOMAIN}_{TEST_UID.replace(':', '')}_ONVIF" + webhook_id = f"{DOMAIN}_{TEST_UID.replace(':', '')}_ONVIF" signal_all = MagicMock() signal_ch = MagicMock() @@ -46,6 +61,10 @@ async def test_webhook_callback( await client.post(f"/api/webhook/{webhook_id}") signal_all.assert_called_once() + freezer.tick(timedelta(seconds=FIRST_ONVIF_TIMEOUT)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + # test webhook callback all channels with failure to read motion_state signal_all.reset_mock() reolink_connect.get_motion_state_all_ch.return_value = False @@ -59,7 +78,9 @@ async def test_webhook_callback( # test webhook callback single channel with error in event callback signal_ch.reset_mock() - reolink_connect.ONVIF_event_callback.side_effect = Exception("Test error") + reolink_connect.ONVIF_event_callback = AsyncMock( + side_effect=Exception("Test error") + ) await client.post(f"/api/webhook/{webhook_id}", data="test_data") signal_ch.assert_not_called() @@ -81,3 +102,285 @@ async def test_webhook_callback( with pytest.raises(CancelledError): await async_handle_webhook(hass, webhook_id, request) signal_all.assert_not_called() + + +async def test_no_mac( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test setup of host with no mac.""" + reolink_connect.mac_address = None + assert not await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_subscribe_error( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test error when subscribing to ONVIF does not block startup.""" + reolink_connect.subscribe.side_effect = ReolinkError("Test Error") + reolink_connect.subscribed.return_value = False + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + +async def test_subscribe_unsuccesfull( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test that a unsuccessful ONVIF subscription does not block startup.""" + reolink_connect.subscribed.return_value = False + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + +async def test_initial_ONVIF_not_supported( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test setup when initial ONVIF is not supported.""" + + def test_supported(ch, key): + """Test supported function.""" + if key == "initial_ONVIF_state": + return False + return True + + reolink_connect.supported = test_supported + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + +async def test_ONVIF_not_supported( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test setup is not blocked when ONVIF API returns NotSupportedError.""" + + def test_supported(ch, key): + """Test supported function.""" + if key == "initial_ONVIF_state": + return False + return True + + reolink_connect.supported = test_supported + reolink_connect.subscribed.return_value = False + reolink_connect.subscribe.side_effect = NotSupportedError("Test error") + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + +async def test_renew( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test renew of the ONVIF subscription.""" + reolink_connect.renewtimer.return_value = 1 + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + reolink_connect.renew.assert_called() + + reolink_connect.renew.side_effect = SubscriptionError("Test error") + + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + reolink_connect.subscribe.assert_called() + + reolink_connect.subscribe.reset_mock() + reolink_connect.subscribe.side_effect = SubscriptionError("Test error") + + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + reolink_connect.subscribe.assert_called() + + +async def test_long_poll_renew_fail( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test ONVIF long polling errors while renewing.""" + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + reolink_connect.subscribe.side_effect = NotSupportedError("Test error") + + freezer.tick(timedelta(seconds=FIRST_ONVIF_TIMEOUT)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # ensure long polling continues + reolink_connect.pull_point_request.assert_called() + + +async def test_register_webhook_errors( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test errors while registering the webhook.""" + with patch( + "homeassistant.components.reolink.host.get_url", + side_effect=NoURLAvailableError("Test error"), + ): + assert await hass.config_entries.async_setup(config_entry.entry_id) is False + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_long_poll_stop_when_push( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + freezer: FrozenDateTimeFactory, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test ONVIF long polling stops when ONVIF push comes in.""" + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + # start ONVIF long polling because ONVIF push did not came in + freezer.tick(timedelta(seconds=FIRST_ONVIF_TIMEOUT)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # simulate ONVIF push callback + client = await hass_client_no_auth() + reolink_connect.ONVIF_event_callback.return_value = None + webhook_id = f"{DOMAIN}_{TEST_UID.replace(':', '')}_ONVIF" + await client.post(f"/api/webhook/{webhook_id}") + + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + reolink_connect.unsubscribe.assert_called_with(sub_type=SubType.long_poll) + + +async def test_long_poll_errors( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test errors during ONVIF long polling.""" + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + reolink_connect.pull_point_request.side_effect = ReolinkError("Test error") + + # start ONVIF long polling because ONVIF push did not came in + freezer.tick(timedelta(seconds=FIRST_ONVIF_TIMEOUT)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + reolink_connect.pull_point_request.assert_called_once() + reolink_connect.pull_point_request.side_effect = Exception("Test error") + + freezer.tick(timedelta(seconds=LONG_POLL_ERROR_COOLDOWN)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + freezer.tick(timedelta(seconds=LONG_POLL_COOLDOWN)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + reolink_connect.unsubscribe.assert_called_with(sub_type=SubType.long_poll) + + +async def test_fast_polling_errors( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test errors during ONVIF fast polling.""" + reolink_connect.get_motion_state_all_ch.side_effect = ReolinkError("Test error") + reolink_connect.pull_point_request.side_effect = ReolinkError("Test error") + + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + # start ONVIF long polling because ONVIF push did not came in + freezer.tick(timedelta(seconds=FIRST_ONVIF_TIMEOUT)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # start ONVIF fast polling because ONVIF long polling did not came in + freezer.tick(timedelta(seconds=FIRST_ONVIF_LONG_POLL_TIMEOUT)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert reolink_connect.get_motion_state_all_ch.call_count == 1 + + freezer.tick(timedelta(seconds=POLL_INTERVAL_NO_PUSH)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # fast polling continues despite errors + assert reolink_connect.get_motion_state_all_ch.call_count == 2 + + +async def test_diagnostics_event_connection( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + hass_client_no_auth: ClientSessionGenerator, + freezer: FrozenDateTimeFactory, + reolink_connect: MagicMock, + config_entry: MockConfigEntry, +) -> None: + """Test Reolink diagnostics event connection return values.""" + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + diag = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) + assert diag["event connection"] == "Fast polling" + + # start ONVIF long polling because ONVIF push did not came in + freezer.tick(timedelta(seconds=FIRST_ONVIF_TIMEOUT)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + diag = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) + assert diag["event connection"] == "ONVIF long polling" + + # simulate ONVIF push callback + client = await hass_client_no_auth() + reolink_connect.ONVIF_event_callback.return_value = None + webhook_id = f"{DOMAIN}_{TEST_UID.replace(':', '')}_ONVIF" + await client.post(f"/api/webhook/{webhook_id}") + + diag = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) + assert diag["event connection"] == "ONVIF push" From 5e93394ae763516278b7f0be57bdc75409d2b8ff Mon Sep 17 00:00:00 2001 From: TimL Date: Sat, 31 Aug 2024 00:25:30 +1000 Subject: [PATCH 1259/1635] Ensure smilight fixtures select correct platform for tests (#124305) * Fix return type hint for setup_integration * Ensure platform fixture selects tested platform --- tests/components/smlight/conftest.py | 23 ++++++++++++++++++++--- tests/components/smlight/test_sensor.py | 4 ++-- 2 files changed, 22 insertions(+), 5 deletions(-) diff --git a/tests/components/smlight/conftest.py b/tests/components/smlight/conftest.py index 0338bf4b672..93493daf51d 100644 --- a/tests/components/smlight/conftest.py +++ b/tests/components/smlight/conftest.py @@ -1,13 +1,14 @@ """Common fixtures for the SMLIGHT Zigbee tests.""" -from collections.abc import Generator +from collections.abc import AsyncGenerator, Generator from unittest.mock import AsyncMock, MagicMock, patch from pysmlight.web import Info, Sensors import pytest +from homeassistant.components.smlight import PLATFORMS from homeassistant.components.smlight.const import DOMAIN -from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_json_object_fixture @@ -31,6 +32,19 @@ def mock_config_entry() -> MockConfigEntry: ) +@pytest.fixture +def platforms() -> list[Platform]: + """Platforms, which should be loaded during the test.""" + return PLATFORMS + + +@pytest.fixture(autouse=True) +async def mock_patch_platforms(platforms: list[str]) -> AsyncGenerator[None, None]: + """Fixture to set up platforms for tests.""" + with patch(f"homeassistant.components.{DOMAIN}.PLATFORMS", platforms): + yield + + @pytest.fixture def mock_setup_entry() -> Generator[AsyncMock, None, None]: """Override async_setup_entry.""" @@ -64,7 +78,10 @@ def mock_smlight_client(request: pytest.FixtureRequest) -> Generator[MagicMock]: yield api -async def setup_integration(hass: HomeAssistant, mock_config_entry: MockConfigEntry): +async def setup_integration( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> MockConfigEntry: """Set up the integration.""" mock_config_entry.add_to_hass(hass) diff --git a/tests/components/smlight/test_sensor.py b/tests/components/smlight/test_sensor.py index 4d16a73a0a7..e1239c99e32 100644 --- a/tests/components/smlight/test_sensor.py +++ b/tests/components/smlight/test_sensor.py @@ -19,9 +19,9 @@ pytestmark = [ @pytest.fixture -def platforms() -> Platform | list[Platform]: +def platforms() -> list[Platform]: """Platforms, which should be loaded during the test.""" - return Platform.SENSOR + return [Platform.SENSOR] @pytest.mark.usefixtures("entity_registry_enabled_by_default") From c01bb44757f312123d5fabac75bb8a7ed826cb2a Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Fri, 30 Aug 2024 07:27:19 -0700 Subject: [PATCH 1260/1635] Add Google Photos integration (#124835) * Add Google Photos integration * Mark credentials typing * Add code review suggestions to simpilfy google_photos * Update tests/components/google_photos/conftest.py Co-authored-by: Joost Lekkerkerker * Apply suggestions from code review Co-authored-by: Joost Lekkerkerker * Fix comment typo * Update test fixtures from review feedback * Remove unnecessary test for services * Remove keyword argument --------- Co-authored-by: Joost Lekkerkerker --- .strict-typing | 1 + CODEOWNERS | 2 + homeassistant/brands/google.json | 1 + .../components/google_photos/__init__.py | 45 +++ homeassistant/components/google_photos/api.py | 143 +++++++++ .../google_photos/application_credentials.py | 23 ++ .../components/google_photos/config_flow.py | 54 ++++ .../components/google_photos/const.py | 10 + .../components/google_photos/exceptions.py | 7 + .../components/google_photos/manifest.json | 10 + .../components/google_photos/media_source.py | 283 ++++++++++++++++++ .../components/google_photos/strings.json | 29 ++ .../generated/application_credentials.py | 1 + homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 6 + mypy.ini | 10 + requirements_all.txt | 1 + requirements_test_all.txt | 1 + tests/components/google_photos/__init__.py | 1 + tests/components/google_photos/conftest.py | 121 ++++++++ .../fixtures/api_not_enabled_response.json | 17 ++ .../fixtures/list_mediaitems.json | 35 +++ .../fixtures/list_mediaitems_empty.json | 5 + .../google_photos/fixtures/not_dict.json | 1 + .../google_photos/test_config_flow.py | 205 +++++++++++++ tests/components/google_photos/test_init.py | 109 +++++++ .../google_photos/test_media_source.py | 199 ++++++++++++ 27 files changed, 1321 insertions(+) create mode 100644 homeassistant/components/google_photos/__init__.py create mode 100644 homeassistant/components/google_photos/api.py create mode 100644 homeassistant/components/google_photos/application_credentials.py create mode 100644 homeassistant/components/google_photos/config_flow.py create mode 100644 homeassistant/components/google_photos/const.py create mode 100644 homeassistant/components/google_photos/exceptions.py create mode 100644 homeassistant/components/google_photos/manifest.json create mode 100644 homeassistant/components/google_photos/media_source.py create mode 100644 homeassistant/components/google_photos/strings.json create mode 100644 tests/components/google_photos/__init__.py create mode 100644 tests/components/google_photos/conftest.py create mode 100644 tests/components/google_photos/fixtures/api_not_enabled_response.json create mode 100644 tests/components/google_photos/fixtures/list_mediaitems.json create mode 100644 tests/components/google_photos/fixtures/list_mediaitems_empty.json create mode 100644 tests/components/google_photos/fixtures/not_dict.json create mode 100644 tests/components/google_photos/test_config_flow.py create mode 100644 tests/components/google_photos/test_init.py create mode 100644 tests/components/google_photos/test_media_source.py diff --git a/.strict-typing b/.strict-typing index a65ccf3ec88..d77c12293c4 100644 --- a/.strict-typing +++ b/.strict-typing @@ -209,6 +209,7 @@ homeassistant.components.glances.* homeassistant.components.goalzero.* homeassistant.components.google.* homeassistant.components.google_assistant_sdk.* +homeassistant.components.google_photos.* homeassistant.components.google_sheets.* homeassistant.components.gpsd.* homeassistant.components.greeneye_monitor.* diff --git a/CODEOWNERS b/CODEOWNERS index 0ebc49eda50..8ae6aa367b5 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -554,6 +554,8 @@ build.json @home-assistant/supervisor /tests/components/google_generative_ai_conversation/ @tronikos /homeassistant/components/google_mail/ @tkdrob /tests/components/google_mail/ @tkdrob +/homeassistant/components/google_photos/ @allenporter +/tests/components/google_photos/ @allenporter /homeassistant/components/google_sheets/ @tkdrob /tests/components/google_sheets/ @tkdrob /homeassistant/components/google_tasks/ @allenporter diff --git a/homeassistant/brands/google.json b/homeassistant/brands/google.json index 7c6ebc044e9..460c92076d8 100644 --- a/homeassistant/brands/google.json +++ b/homeassistant/brands/google.json @@ -9,6 +9,7 @@ "google_generative_ai_conversation", "google_mail", "google_maps", + "google_photos", "google_pubsub", "google_sheets", "google_tasks", diff --git a/homeassistant/components/google_photos/__init__.py b/homeassistant/components/google_photos/__init__.py new file mode 100644 index 00000000000..ab1ee4a63a4 --- /dev/null +++ b/homeassistant/components/google_photos/__init__.py @@ -0,0 +1,45 @@ +"""The Google Photos integration.""" + +from __future__ import annotations + +from aiohttp import ClientError, ClientResponseError + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import config_entry_oauth2_flow + +from . import api +from .const import DOMAIN + +type GooglePhotosConfigEntry = ConfigEntry[api.AsyncConfigEntryAuth] + +__all__ = [ + "DOMAIN", +] + + +async def async_setup_entry( + hass: HomeAssistant, entry: GooglePhotosConfigEntry +) -> bool: + """Set up Google Photos from a config entry.""" + implementation = ( + await config_entry_oauth2_flow.async_get_config_entry_implementation( + hass, entry + ) + ) + session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation) + auth = api.AsyncConfigEntryAuth(hass, session) + try: + await auth.async_get_access_token() + except (ClientResponseError, ClientError) as err: + raise ConfigEntryNotReady from err + entry.runtime_data = auth + return True + + +async def async_unload_entry( + hass: HomeAssistant, entry: GooglePhotosConfigEntry +) -> bool: + """Unload a config entry.""" + return True diff --git a/homeassistant/components/google_photos/api.py b/homeassistant/components/google_photos/api.py new file mode 100644 index 00000000000..2fa6ee2d8f6 --- /dev/null +++ b/homeassistant/components/google_photos/api.py @@ -0,0 +1,143 @@ +"""API for Google Photos bound to Home Assistant OAuth.""" + +from abc import ABC, abstractmethod +from functools import partial +import logging +from typing import Any, cast + +from google.oauth2.credentials import Credentials +from googleapiclient.discovery import Resource, build +from googleapiclient.errors import HttpError +from googleapiclient.http import BatchHttpRequest, HttpRequest + +from homeassistant.const import CONF_ACCESS_TOKEN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import config_entry_oauth2_flow + +from .exceptions import GooglePhotosApiError + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_PAGE_SIZE = 20 + +# Only included necessary fields to limit response sizes +GET_MEDIA_ITEM_FIELDS = ( + "id,baseUrl,mimeType,filename,mediaMetadata(width,height,photo,video)" +) +LIST_MEDIA_ITEM_FIELDS = f"nextPageToken,mediaItems({GET_MEDIA_ITEM_FIELDS})" + + +class AuthBase(ABC): + """Base class for Google Photos authentication library. + + Provides an asyncio interface around the blocking client library. + """ + + def __init__( + self, + hass: HomeAssistant, + ) -> None: + """Initialize Google Photos auth.""" + self._hass = hass + + @abstractmethod + async def async_get_access_token(self) -> str: + """Return a valid access token.""" + + async def get_user_info(self) -> dict[str, Any]: + """Get the user profile info.""" + service = await self._get_profile_service() + cmd: HttpRequest = service.userinfo().get() + return await self._execute(cmd) + + async def get_media_item(self, media_item_id: str) -> dict[str, Any]: + """Get all MediaItem resources.""" + service = await self._get_photos_service() + cmd: HttpRequest = service.mediaItems().get( + media_item_id, fields=GET_MEDIA_ITEM_FIELDS + ) + return await self._execute(cmd) + + async def list_media_items( + self, page_size: int | None = None, page_token: str | None = None + ) -> dict[str, Any]: + """Get all MediaItem resources.""" + service = await self._get_photos_service() + cmd: HttpRequest = service.mediaItems().list( + pageSize=(page_size or DEFAULT_PAGE_SIZE), + pageToken=page_token, + fields=LIST_MEDIA_ITEM_FIELDS, + ) + return await self._execute(cmd) + + async def _get_photos_service(self) -> Resource: + """Get current photos library API resource.""" + token = await self.async_get_access_token() + return await self._hass.async_add_executor_job( + partial( + build, + "photoslibrary", + "v1", + credentials=Credentials(token=token), # type: ignore[no-untyped-call] + static_discovery=False, + ) + ) + + async def _get_profile_service(self) -> Resource: + """Get current profile service API resource.""" + token = await self.async_get_access_token() + return await self._hass.async_add_executor_job( + partial(build, "oauth2", "v2", credentials=Credentials(token=token)) # type: ignore[no-untyped-call] + ) + + async def _execute(self, request: HttpRequest | BatchHttpRequest) -> dict[str, Any]: + try: + result = await self._hass.async_add_executor_job(request.execute) + except HttpError as err: + raise GooglePhotosApiError( + f"Google Photos API responded with error ({err.status_code}): {err.reason}" + ) from err + if not isinstance(result, dict): + raise GooglePhotosApiError( + f"Google Photos API replied with unexpected response: {result}" + ) + if error := result.get("error"): + message = error.get("message", "Unknown Error") + raise GooglePhotosApiError(f"Google Photos API response: {message}") + return cast(dict[str, Any], result) + + +class AsyncConfigEntryAuth(AuthBase): + """Provide Google Photos authentication tied to an OAuth2 based config entry.""" + + def __init__( + self, + hass: HomeAssistant, + oauth_session: config_entry_oauth2_flow.OAuth2Session, + ) -> None: + """Initialize AsyncConfigEntryAuth.""" + super().__init__(hass) + self._oauth_session = oauth_session + + async def async_get_access_token(self) -> str: + """Return a valid access token.""" + if not self._oauth_session.valid_token: + await self._oauth_session.async_ensure_token_valid() + return cast(str, self._oauth_session.token[CONF_ACCESS_TOKEN]) + + +class AsyncConfigFlowAuth(AuthBase): + """An API client used during the config flow with a fixed token.""" + + def __init__( + self, + hass: HomeAssistant, + token: str, + ) -> None: + """Initialize ConfigFlowAuth.""" + super().__init__(hass) + self._token = token + + async def async_get_access_token(self) -> str: + """Return a valid access token.""" + return self._token diff --git a/homeassistant/components/google_photos/application_credentials.py b/homeassistant/components/google_photos/application_credentials.py new file mode 100644 index 00000000000..fc6cdbd272d --- /dev/null +++ b/homeassistant/components/google_photos/application_credentials.py @@ -0,0 +1,23 @@ +"""application_credentials platform the Google Photos integration.""" + +from homeassistant.components.application_credentials import AuthorizationServer +from homeassistant.core import HomeAssistant + +from .const import OAUTH2_AUTHORIZE, OAUTH2_TOKEN + + +async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationServer: + """Return authorization server.""" + return AuthorizationServer( + authorize_url=OAUTH2_AUTHORIZE, + token_url=OAUTH2_TOKEN, + ) + + +async def async_get_description_placeholders(hass: HomeAssistant) -> dict[str, str]: + """Return description placeholders for the credentials dialog.""" + return { + "oauth_consent_url": "https://console.cloud.google.com/apis/credentials/consent", + "more_info_url": "https://www.home-assistant.io/integrations/google_photos/", + "oauth_creds_url": "https://console.cloud.google.com/apis/credentials", + } diff --git a/homeassistant/components/google_photos/config_flow.py b/homeassistant/components/google_photos/config_flow.py new file mode 100644 index 00000000000..9bc4b35b6b4 --- /dev/null +++ b/homeassistant/components/google_photos/config_flow.py @@ -0,0 +1,54 @@ +"""Config flow for Google Photos.""" + +import logging +from typing import Any + +from homeassistant.config_entries import ConfigFlowResult +from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN +from homeassistant.helpers import config_entry_oauth2_flow + +from . import api +from .const import DOMAIN, OAUTH2_SCOPES +from .exceptions import GooglePhotosApiError + + +class OAuth2FlowHandler( + config_entry_oauth2_flow.AbstractOAuth2FlowHandler, domain=DOMAIN +): + """Config flow to handle Google Photos OAuth2 authentication.""" + + DOMAIN = DOMAIN + + @property + def logger(self) -> logging.Logger: + """Return logger.""" + return logging.getLogger(__name__) + + @property + def extra_authorize_data(self) -> dict[str, Any]: + """Extra data that needs to be appended to the authorize url.""" + return { + "scope": " ".join(OAUTH2_SCOPES), + # Add params to ensure we get back a refresh token + "access_type": "offline", + "prompt": "consent", + } + + async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResult: + """Create an entry for the flow.""" + client = api.AsyncConfigFlowAuth(self.hass, data[CONF_TOKEN][CONF_ACCESS_TOKEN]) + try: + user_resource_info = await client.get_user_info() + await client.list_media_items() + except GooglePhotosApiError as ex: + return self.async_abort( + reason="access_not_configured", + description_placeholders={"message": str(ex)}, + ) + except Exception: + self.logger.exception("Unknown error occurred") + return self.async_abort(reason="unknown") + user_id = user_resource_info["id"] + await self.async_set_unique_id(user_id) + self._abort_if_unique_id_configured() + return self.async_create_entry(title=user_resource_info["name"], data=data) diff --git a/homeassistant/components/google_photos/const.py b/homeassistant/components/google_photos/const.py new file mode 100644 index 00000000000..7752f817608 --- /dev/null +++ b/homeassistant/components/google_photos/const.py @@ -0,0 +1,10 @@ +"""Constants for the Google Photos integration.""" + +DOMAIN = "google_photos" + +OAUTH2_AUTHORIZE = "https://accounts.google.com/o/oauth2/v2/auth" +OAUTH2_TOKEN = "https://oauth2.googleapis.com/token" +OAUTH2_SCOPES = [ + "https://www.googleapis.com/auth/photoslibrary.readonly", + "https://www.googleapis.com/auth/userinfo.profile", +] diff --git a/homeassistant/components/google_photos/exceptions.py b/homeassistant/components/google_photos/exceptions.py new file mode 100644 index 00000000000..b1a40688677 --- /dev/null +++ b/homeassistant/components/google_photos/exceptions.py @@ -0,0 +1,7 @@ +"""Exceptions for Google Photos api calls.""" + +from homeassistant.exceptions import HomeAssistantError + + +class GooglePhotosApiError(HomeAssistantError): + """Error talking to the Google Photos API.""" diff --git a/homeassistant/components/google_photos/manifest.json b/homeassistant/components/google_photos/manifest.json new file mode 100644 index 00000000000..3299b437d29 --- /dev/null +++ b/homeassistant/components/google_photos/manifest.json @@ -0,0 +1,10 @@ +{ + "domain": "google_photos", + "name": "Google Photos", + "codeowners": ["@allenporter"], + "config_flow": true, + "dependencies": ["application_credentials"], + "documentation": "https://www.home-assistant.io/integrations/google_photos", + "iot_class": "cloud_polling", + "requirements": ["google-api-python-client==2.71.0"] +} diff --git a/homeassistant/components/google_photos/media_source.py b/homeassistant/components/google_photos/media_source.py new file mode 100644 index 00000000000..e6011cb0e61 --- /dev/null +++ b/homeassistant/components/google_photos/media_source.py @@ -0,0 +1,283 @@ +"""Media source for Google Photos.""" + +from dataclasses import dataclass +import logging +from typing import Any, cast + +from homeassistant.components.media_player import MediaClass, MediaType +from homeassistant.components.media_source import ( + BrowseError, + BrowseMediaSource, + MediaSource, + MediaSourceItem, + PlayMedia, +) +from homeassistant.core import HomeAssistant + +from . import GooglePhotosConfigEntry +from .const import DOMAIN +from .exceptions import GooglePhotosApiError + +_LOGGER = logging.getLogger(__name__) + +# Media Sources do not support paging, so we only show a subset of recent +# photos when displaying the users library. We fetch a minimum of 50 photos +# unless we run out, but in pages of 100 at a time given sometimes responses +# may only contain a handful of items Fetches at least 50 photos. +MAX_PHOTOS = 50 +PAGE_SIZE = 100 + +THUMBNAIL_SIZE = 256 +LARGE_IMAGE_SIZE = 2048 + + +# Markers for parts of PhotosIdentifier url pattern. +# The PhotosIdentifier can be in the following forms: +# config-entry-id +# config-entry-id/a/album-media-id +# config-entry-id/p/photo-media-id +# +# The album-media-id can contain special reserved folder names for use by +# this integration for virtual folders like the `recent` album. +PHOTO_SOURCE_IDENTIFIER_PHOTO = "p" +PHOTO_SOURCE_IDENTIFIER_ALBUM = "a" + +# Currently supports a single album of recent photos +RECENT_PHOTOS_ALBUM = "recent" +RECENT_PHOTOS_TITLE = "Recent Photos" + + +@dataclass +class PhotosIdentifier: + """Google Photos item identifier in a media source URL.""" + + config_entry_id: str + """Identifies the account for the media item.""" + + album_media_id: str | None = None + """Identifies the album contents to show. + + Not present at the same time as `photo_media_id`. + """ + + photo_media_id: str | None = None + """Identifies an indiviidual photo or video. + + Not present at the same time as `album_media_id`. + """ + + def as_string(self) -> str: + """Serialize the identiifer as a string. + + This is the opposite if parse_identifier(). + """ + if self.photo_media_id is None: + if self.album_media_id is None: + return self.config_entry_id + return f"{self.config_entry_id}/{PHOTO_SOURCE_IDENTIFIER_ALBUM}/{self.album_media_id}" + return f"{self.config_entry_id}/{PHOTO_SOURCE_IDENTIFIER_PHOTO}/{self.photo_media_id}" + + +def parse_identifier(identifier: str) -> PhotosIdentifier: + """Parse a PhotosIdentifier form a string. + + This is the opposite of as_string(). + """ + parts = identifier.split("/") + if len(parts) == 1: + return PhotosIdentifier(parts[0]) + if len(parts) != 3: + raise BrowseError(f"Invalid identifier: {identifier}") + if parts[1] == PHOTO_SOURCE_IDENTIFIER_PHOTO: + return PhotosIdentifier(parts[0], photo_media_id=parts[2]) + return PhotosIdentifier(parts[0], album_media_id=parts[2]) + + +async def async_get_media_source(hass: HomeAssistant) -> MediaSource: + """Set up Synology media source.""" + return GooglePhotosMediaSource(hass) + + +class GooglePhotosMediaSource(MediaSource): + """Provide Google Photos as media sources.""" + + name = "Google Photos" + + def __init__(self, hass: HomeAssistant) -> None: + """Initialize Google Photos source.""" + super().__init__(DOMAIN) + self.hass = hass + + async def async_resolve_media(self, item: MediaSourceItem) -> PlayMedia: + """Resolve media identifier to a url. + + This will resolve a specific media item to a url for the full photo or video contents. + """ + identifier = parse_identifier(item.identifier) + if identifier.photo_media_id is None: + raise BrowseError( + f"Could not resolve identifier without a photo_media_id: {identifier}" + ) + entry = self._async_config_entry(identifier.config_entry_id) + client = entry.runtime_data + media_item = await client.get_media_item( + media_item_id=identifier.photo_media_id + ) + is_video = media_item["mediaMetadata"].get("video") is not None + return PlayMedia( + url=( + _video_url(media_item) + if is_video + else _media_url(media_item, LARGE_IMAGE_SIZE) + ), + mime_type=media_item["mimeType"], + ) + + async def async_browse_media(self, item: MediaSourceItem) -> BrowseMediaSource: + """Return details about the media source. + + This renders the multi-level album structure for an account, its albums, + or the contents of an album. This will return a BrowseMediaSource with a + single level of children at the next level of the hierarchy. + """ + if not item.identifier: + # Top level view that lists all accounts. + return BrowseMediaSource( + domain=DOMAIN, + identifier=None, + media_class=MediaClass.DIRECTORY, + media_content_type=MediaClass.IMAGE, + title="Google Photos", + can_play=False, + can_expand=True, + children_media_class=MediaClass.DIRECTORY, + children=[ + _build_account(entry, PhotosIdentifier(cast(str, entry.unique_id))) + for entry in self.hass.config_entries.async_loaded_entries(DOMAIN) + ], + ) + + # Determine the configuration entry for this item + identifier = parse_identifier(item.identifier) + entry = self._async_config_entry(identifier.config_entry_id) + client = entry.runtime_data + + if identifier.album_media_id is None: + source = _build_account(entry, identifier) + source.children = [ + _build_album( + RECENT_PHOTOS_TITLE, + PhotosIdentifier( + identifier.config_entry_id, album_media_id=RECENT_PHOTOS_ALBUM + ), + ) + ] + return source + + # Currently only supports listing a single album of recent photos. + if identifier.album_media_id != RECENT_PHOTOS_ALBUM: + raise BrowseError(f"Unsupported album: {identifier}") + + # Fetch recent items + media_items: list[dict[str, Any]] = [] + page_token: str | None = None + while len(media_items) < MAX_PHOTOS: + try: + result = await client.list_media_items( + page_size=PAGE_SIZE, page_token=page_token + ) + except GooglePhotosApiError as err: + raise BrowseError(f"Error listing media items: {err}") from err + media_items.extend(result["mediaItems"]) + page_token = result.get("nextPageToken") + if page_token is None: + break + + # Render the grid of media item results + source = _build_account(entry, PhotosIdentifier(cast(str, entry.unique_id))) + source.children = [ + _build_media_item( + PhotosIdentifier( + identifier.config_entry_id, photo_media_id=media_item["id"] + ), + media_item, + ) + for media_item in media_items + ] + return source + + def _async_config_entry(self, config_entry_id: str) -> GooglePhotosConfigEntry: + """Return a config entry with the specified id.""" + entry = self.hass.config_entries.async_entry_for_domain_unique_id( + DOMAIN, config_entry_id + ) + if not entry: + raise BrowseError( + f"Could not find config entry for identifier: {config_entry_id}" + ) + return entry + + +def _build_account( + config_entry: GooglePhotosConfigEntry, + identifier: PhotosIdentifier, +) -> BrowseMediaSource: + """Build the root node for a Google Photos account for a config entry.""" + return BrowseMediaSource( + domain=DOMAIN, + identifier=identifier.as_string(), + media_class=MediaClass.DIRECTORY, + media_content_type=MediaClass.IMAGE, + title=config_entry.title, + can_play=False, + can_expand=True, + ) + + +def _build_album(title: str, identifier: PhotosIdentifier) -> BrowseMediaSource: + """Build an album node.""" + return BrowseMediaSource( + domain=DOMAIN, + identifier=identifier.as_string(), + media_class=MediaClass.ALBUM, + media_content_type=MediaClass.ALBUM, + title=title, + can_play=False, + can_expand=True, + ) + + +def _build_media_item( + identifier: PhotosIdentifier, media_item: dict[str, Any] +) -> BrowseMediaSource: + """Build the node for an individual photos or video.""" + is_video = media_item["mediaMetadata"].get("video") is not None + return BrowseMediaSource( + domain=DOMAIN, + identifier=identifier.as_string(), + media_class=MediaClass.IMAGE if not is_video else MediaClass.VIDEO, + media_content_type=MediaType.IMAGE if not is_video else MediaType.VIDEO, + title=media_item["filename"], + can_play=is_video, + can_expand=False, + thumbnail=_media_url(media_item, THUMBNAIL_SIZE), + ) + + +def _media_url(media_item: dict[str, Any], max_size: int) -> str: + """Return a media item url with the specified max thumbnail size on the longest edge. + + See https://developers.google.com/photos/library/guides/access-media-items#base-urls + """ + width = media_item["mediaMetadata"]["width"] + height = media_item["mediaMetadata"]["height"] + key = "h" if height > width else "w" + return f"{media_item["baseUrl"]}={key}{max_size}" + + +def _video_url(media_item: dict[str, Any]) -> str: + """Return a video url for the item. + + See https://developers.google.com/photos/library/guides/access-media-items#base-urls + """ + return f"{media_item["baseUrl"]}=dv" diff --git a/homeassistant/components/google_photos/strings.json b/homeassistant/components/google_photos/strings.json new file mode 100644 index 00000000000..57bce01d9f8 --- /dev/null +++ b/homeassistant/components/google_photos/strings.json @@ -0,0 +1,29 @@ +{ + "application_credentials": { + "description": "Follow the [instructions]({more_info_url}) for [OAuth consent screen]({oauth_consent_url}) to give Home Assistant access to your Google Photos. You also need to create Application Credentials linked to your account:\n1. Go to [Credentials]({oauth_creds_url}) and click **Create Credentials**.\n1. From the drop-down list select **OAuth client ID**.\n1. Select **Web application** for the Application Type.\n\n" + }, + "config": { + "step": { + "pick_implementation": { + "title": "[%key:common::config_flow::title::oauth2_pick_implementation%]" + } + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", + "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", + "oauth_error": "[%key:common::config_flow::abort::oauth2_error%]", + "missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]", + "authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]", + "no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]", + "user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]", + "access_not_configured": "Unable to access the Google API:\n\n{message}", + "unknown": "[%key:common::config_flow::error::unknown%]", + "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", + "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", + "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]" + }, + "create_entry": { + "default": "[%key:common::config_flow::create_entry::authenticated%]" + } + } +} diff --git a/homeassistant/generated/application_credentials.py b/homeassistant/generated/application_credentials.py index 75fd489bad3..efb6f426d36 100644 --- a/homeassistant/generated/application_credentials.py +++ b/homeassistant/generated/application_credentials.py @@ -10,6 +10,7 @@ APPLICATION_CREDENTIALS = [ "google", "google_assistant_sdk", "google_mail", + "google_photos", "google_sheets", "google_tasks", "home_connect", diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index 1756a896d25..d4342d80d41 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -224,6 +224,7 @@ FLOWS = { "google_assistant_sdk", "google_generative_ai_conversation", "google_mail", + "google_photos", "google_sheets", "google_tasks", "google_translate", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index d7cfe503dd9..8091d48ca4d 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -2280,6 +2280,12 @@ "iot_class": "cloud_polling", "name": "Google Maps" }, + "google_photos": { + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling", + "name": "Google Photos" + }, "google_pubsub": { "integration_type": "hub", "config_flow": false, diff --git a/mypy.ini b/mypy.ini index 102ae5c8aa9..817060ac869 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1846,6 +1846,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.google_photos.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.google_sheets.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/requirements_all.txt b/requirements_all.txt index 8f89d72d9a0..18bdde48625 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -979,6 +979,7 @@ goalzero==0.2.2 goodwe==0.3.6 # homeassistant.components.google_mail +# homeassistant.components.google_photos # homeassistant.components.google_tasks google-api-python-client==2.71.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a1862a1340d..2a1e3e718eb 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -829,6 +829,7 @@ goalzero==0.2.2 goodwe==0.3.6 # homeassistant.components.google_mail +# homeassistant.components.google_photos # homeassistant.components.google_tasks google-api-python-client==2.71.0 diff --git a/tests/components/google_photos/__init__.py b/tests/components/google_photos/__init__.py new file mode 100644 index 00000000000..fa345811216 --- /dev/null +++ b/tests/components/google_photos/__init__.py @@ -0,0 +1 @@ +"""Tests for the Google Photos integration.""" diff --git a/tests/components/google_photos/conftest.py b/tests/components/google_photos/conftest.py new file mode 100644 index 00000000000..874e55f0d33 --- /dev/null +++ b/tests/components/google_photos/conftest.py @@ -0,0 +1,121 @@ +"""Test fixtures for Google Photos.""" + +from collections.abc import Awaitable, Callable, Generator +import time +from typing import Any +from unittest.mock import Mock, patch + +import pytest + +from homeassistant.components.application_credentials import ( + ClientCredential, + async_import_client_credential, +) +from homeassistant.components.google_photos.const import DOMAIN, OAUTH2_SCOPES +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry, load_json_array_fixture + +USER_IDENTIFIER = "user-identifier-1" +CLIENT_ID = "1234" +CLIENT_SECRET = "5678" +FAKE_ACCESS_TOKEN = "some-access-token" +FAKE_REFRESH_TOKEN = "some-refresh-token" + + +@pytest.fixture(name="expires_at") +def mock_expires_at() -> int: + """Fixture to set the oauth token expiration time.""" + return time.time() + 3600 + + +@pytest.fixture(name="token_entry") +def mock_token_entry(expires_at: int) -> dict[str, Any]: + """Fixture for OAuth 'token' data for a ConfigEntry.""" + return { + "access_token": FAKE_ACCESS_TOKEN, + "refresh_token": FAKE_REFRESH_TOKEN, + "scope": " ".join(OAUTH2_SCOPES), + "token_type": "Bearer", + "expires_at": expires_at, + } + + +@pytest.fixture(name="config_entry") +def mock_config_entry(token_entry: dict[str, Any]) -> MockConfigEntry: + """Fixture for a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + unique_id="config-entry-id-123", + data={ + "auth_implementation": DOMAIN, + "token": token_entry, + }, + title="Account Name", + ) + + +@pytest.fixture(autouse=True) +async def setup_credentials(hass: HomeAssistant) -> None: + """Fixture to setup credentials.""" + assert await async_setup_component(hass, "application_credentials", {}) + await async_import_client_credential( + hass, + DOMAIN, + ClientCredential(CLIENT_ID, CLIENT_SECRET), + ) + + +@pytest.fixture(name="fixture_name") +def mock_fixture_name() -> str | None: + """Provide a json fixture file to load for list media item api responses.""" + return None + + +@pytest.fixture(name="setup_api") +def mock_setup_api(fixture_name: str) -> Generator[Mock, None, None]: + """Set up fake Google Photos API responses from fixtures.""" + with patch("homeassistant.components.google_photos.api.build") as mock: + mock.return_value.userinfo.return_value.get.return_value.execute.return_value = { + "id": USER_IDENTIFIER, + "name": "Test Name", + } + + responses = ( + load_json_array_fixture(fixture_name, DOMAIN) if fixture_name else [] + ) + + queue = list(responses) + + def list_media_items(**kwargs: Any) -> Mock: + mock = Mock() + mock.execute.return_value = queue.pop(0) + return mock + + mock.return_value.mediaItems.return_value.list = list_media_items + + # Mock a point lookup by reading contents of the fixture above + def get_media_item(media_item_id: str, **kwargs: Any) -> Mock: + for response in responses: + for media_item in response["mediaItems"]: + if media_item["id"] == media_item_id: + mock = Mock() + mock.execute.return_value = media_item + return mock + return None + + mock.return_value.mediaItems.return_value.get = get_media_item + yield mock + + +@pytest.fixture(name="setup_integration") +async def mock_setup_integration( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> Callable[[], Awaitable[bool]]: + """Fixture to set up the integration.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/google_photos/fixtures/api_not_enabled_response.json b/tests/components/google_photos/fixtures/api_not_enabled_response.json new file mode 100644 index 00000000000..8933fcdc7bd --- /dev/null +++ b/tests/components/google_photos/fixtures/api_not_enabled_response.json @@ -0,0 +1,17 @@ +[ + { + "error": { + "code": 403, + "message": "Google Photos API has not been used in project 0 before or it is disabled. Enable it by visiting https://console.developers.google.com/apis/library/photoslibrary.googleapis.com/overview?project=0 then retry. If you enabled this API recently, wait a few minutes for the action to propagate to our systems and retry.", + "errors": [ + { + "message": "Google Photos API has not been used in project 0 before or it is disabled. Enable it by visiting https://console.developers.google.com/apis/library/photoslibrary.googleapis.com/overview?project=0 then retry. If you enabled this API recently, wait a few minutes for the action to propagate to our systems and retry.", + "domain": "usageLimits", + "reason": "accessNotConfigured", + "extendedHelp": "https://console.developers.google.com" + } + ], + "status": "PERMISSION_DENIED" + } + } +] diff --git a/tests/components/google_photos/fixtures/list_mediaitems.json b/tests/components/google_photos/fixtures/list_mediaitems.json new file mode 100644 index 00000000000..8e470a2fc04 --- /dev/null +++ b/tests/components/google_photos/fixtures/list_mediaitems.json @@ -0,0 +1,35 @@ +[ + { + "mediaItems": [ + { + "id": "id1", + "description": "some-descripton", + "productUrl": "http://example.com/id1", + "baseUrl": "http://img.example.com/id1", + "mimeType": "image/jpeg", + "mediaMetadata": { + "creationTime": "2014-10-02T15:01:23Z", + "width": 1600, + "height": 768 + }, + "filename": "example1.jpg" + }, + { + "id": "id2", + "description": "some-descripton", + "productUrl": "http://example.com/id2", + "baseUrl": "http://img.example.com/id2", + "mimeType": "video/mp4", + "mediaMetadata": { + "creationTime": "2014-10-02T16:01:23Z", + "width": 1600, + "height": 768, + "video": { + "cameraMake": "Pixel" + } + }, + "filename": "example2.mp4" + } + ] + } +] diff --git a/tests/components/google_photos/fixtures/list_mediaitems_empty.json b/tests/components/google_photos/fixtures/list_mediaitems_empty.json new file mode 100644 index 00000000000..bf6a4da855f --- /dev/null +++ b/tests/components/google_photos/fixtures/list_mediaitems_empty.json @@ -0,0 +1,5 @@ +[ + { + "mediaItems": [] + } +] diff --git a/tests/components/google_photos/fixtures/not_dict.json b/tests/components/google_photos/fixtures/not_dict.json new file mode 100644 index 00000000000..05e325337d2 --- /dev/null +++ b/tests/components/google_photos/fixtures/not_dict.json @@ -0,0 +1 @@ +["not a dictionary"] diff --git a/tests/components/google_photos/test_config_flow.py b/tests/components/google_photos/test_config_flow.py new file mode 100644 index 00000000000..e9f2a68f2f5 --- /dev/null +++ b/tests/components/google_photos/test_config_flow.py @@ -0,0 +1,205 @@ +"""Test the Google Photos config flow.""" + +from unittest.mock import Mock, patch + +from googleapiclient.errors import HttpError +from httplib2 import Response +import pytest + +from homeassistant import config_entries +from homeassistant.components.google_photos.const import ( + DOMAIN, + OAUTH2_AUTHORIZE, + OAUTH2_TOKEN, +) +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import config_entry_oauth2_flow + +from .conftest import USER_IDENTIFIER + +from tests.common import load_fixture +from tests.test_util.aiohttp import AiohttpClientMocker +from tests.typing import ClientSessionGenerator + +CLIENT_ID = "1234" +CLIENT_SECRET = "5678" + + +@pytest.mark.usefixtures("current_request_with_host", "setup_api") +@pytest.mark.parametrize("fixture_name", ["list_mediaitems.json"]) +async def test_full_flow( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, +) -> None: + """Check full flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + + assert result["url"] == ( + f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" + "&redirect_uri=https://example.com/auth/external/callback" + f"&state={state}" + "&scope=https://www.googleapis.com/auth/photoslibrary.readonly" + "+https://www.googleapis.com/auth/userinfo.profile" + "&access_type=offline&prompt=consent" + ) + + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.post( + OAUTH2_TOKEN, + json={ + "refresh_token": "mock-refresh-token", + "access_token": "mock-access-token", + "type": "Bearer", + "expires_in": 60, + }, + ) + + with patch( + "homeassistant.components.google_photos.async_setup_entry", return_value=True + ) as mock_setup: + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.CREATE_ENTRY + config_entry = result["result"] + assert config_entry.unique_id == USER_IDENTIFIER + assert config_entry.title == "Test Name" + config_entry_data = dict(config_entry.data) + assert "token" in config_entry_data + assert "expires_at" in config_entry_data["token"] + del config_entry_data["token"]["expires_at"] + assert config_entry_data == { + "auth_implementation": DOMAIN, + "token": { + "access_token": "mock-access-token", + "expires_in": 60, + "refresh_token": "mock-refresh-token", + "type": "Bearer", + }, + } + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + assert len(mock_setup.mock_calls) == 1 + + +@pytest.mark.usefixtures( + "current_request_with_host", + "setup_credentials", +) +async def test_api_not_enabled( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + setup_api: Mock, +) -> None: + """Check flow aborts if api is not enabled.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + + assert result["url"] == ( + f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" + "&redirect_uri=https://example.com/auth/external/callback" + f"&state={state}" + "&scope=https://www.googleapis.com/auth/photoslibrary.readonly" + "+https://www.googleapis.com/auth/userinfo.profile" + "&access_type=offline&prompt=consent" + ) + + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.post( + OAUTH2_TOKEN, + json={ + "refresh_token": "mock-refresh-token", + "access_token": "mock-access-token", + "type": "Bearer", + "expires_in": 60, + }, + ) + + setup_api.return_value.mediaItems.return_value.list = Mock() + setup_api.return_value.mediaItems.return_value.list.return_value.execute.side_effect = HttpError( + Response({"status": "403"}), + bytes(load_fixture("google_photos/api_not_enabled_response.json"), "utf-8"), + ) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "access_not_configured" + assert result["description_placeholders"]["message"].endswith( + "Google Photos API has not been used in project 0 before or it is disabled. Enable it by visiting https://console.developers.google.com/apis/library/photoslibrary.googleapis.com/overview?project=0 then retry. If you enabled this API recently, wait a few minutes for the action to propagate to our systems and retry." + ) + + +@pytest.mark.usefixtures("current_request_with_host", "setup_credentials") +async def test_general_exception( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, +) -> None: + """Check flow aborts if exception happens.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + assert result["url"] == ( + f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" + "&redirect_uri=https://example.com/auth/external/callback" + f"&state={state}" + "&scope=https://www.googleapis.com/auth/photoslibrary.readonly" + "+https://www.googleapis.com/auth/userinfo.profile" + "&access_type=offline&prompt=consent" + ) + + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.post( + OAUTH2_TOKEN, + json={ + "refresh_token": "mock-refresh-token", + "access_token": "mock-access-token", + "type": "Bearer", + "expires_in": 60, + }, + ) + + with patch( + "homeassistant.components.google_photos.api.build", + side_effect=Exception, + ): + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "unknown" diff --git a/tests/components/google_photos/test_init.py b/tests/components/google_photos/test_init.py new file mode 100644 index 00000000000..a2f835c8611 --- /dev/null +++ b/tests/components/google_photos/test_init.py @@ -0,0 +1,109 @@ +"""Tests for Google Photos.""" + +import http +import time + +from aiohttp import ClientError +import pytest + +from homeassistant.components.google_photos.const import OAUTH2_TOKEN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry +from tests.test_util.aiohttp import AiohttpClientMocker + + +@pytest.mark.usefixtures("setup_integration") +async def test_setup( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test successful setup and unload.""" + assert config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.NOT_LOADED + + +@pytest.fixture(name="refresh_token_status") +def mock_refresh_token_status() -> http.HTTPStatus: + """Fixture to set a token refresh status.""" + return http.HTTPStatus.OK + + +@pytest.fixture(name="refresh_token_exception") +def mock_refresh_token_exception() -> Exception | None: + """Fixture to set a token refresh status.""" + return None + + +@pytest.fixture(name="refresh_token") +def mock_refresh_token( + aioclient_mock: AiohttpClientMocker, + refresh_token_status: http.HTTPStatus, + refresh_token_exception: Exception | None, +) -> MockConfigEntry: + """Fixture to simulate a token refresh response.""" + aioclient_mock.clear_requests() + aioclient_mock.post( + OAUTH2_TOKEN, + exc=refresh_token_exception, + status=refresh_token_status, + json={ + "access_token": "updated-access-token", + "refresh_token": "updated-refresh-token", + "expires_at": time.time() + 3600, + "expires_in": 3600, + }, + ) + + +@pytest.mark.usefixtures("refresh_token", "setup_integration") +@pytest.mark.parametrize("expires_at", [time.time() - 3600], ids=["expired"]) +async def test_expired_token_refresh_success( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test expired token is refreshed.""" + + assert config_entry.state is ConfigEntryState.LOADED + assert config_entry.data["token"]["access_token"] == "updated-access-token" + assert config_entry.data["token"]["expires_in"] == 3600 + + +@pytest.mark.usefixtures("refresh_token", "setup_integration") +@pytest.mark.parametrize( + ("expires_at", "refresh_token_status", "refresh_token_exception", "expected_state"), + [ + ( + time.time() - 3600, + http.HTTPStatus.NOT_FOUND, + None, + ConfigEntryState.SETUP_RETRY, + ), + ( + time.time() - 3600, + http.HTTPStatus.INTERNAL_SERVER_ERROR, + None, + ConfigEntryState.SETUP_RETRY, + ), + ( + time.time() - 3600, + None, + ClientError("Client exception raised"), + ConfigEntryState.SETUP_RETRY, + ), + ], + ids=["unauthorized", "internal_server_error", "client_error"], +) +async def test_expired_token_refresh_failure( + hass: HomeAssistant, + config_entry: MockConfigEntry, + expected_state: ConfigEntryState, +) -> None: + """Test failure while refreshing token with a transient error.""" + + assert config_entry.state is expected_state diff --git a/tests/components/google_photos/test_media_source.py b/tests/components/google_photos/test_media_source.py new file mode 100644 index 00000000000..31c84f4811c --- /dev/null +++ b/tests/components/google_photos/test_media_source.py @@ -0,0 +1,199 @@ +"""Test the Google Photos media source.""" + +from typing import Any +from unittest.mock import Mock + +from googleapiclient.errors import HttpError +from httplib2 import Response +import pytest + +from homeassistant.components.google_photos.const import DOMAIN +from homeassistant.components.media_source import ( + URI_SCHEME, + BrowseError, + async_browse_media, + async_resolve_media, +) +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry + + +@pytest.fixture(autouse=True) +async def setup_components(hass: HomeAssistant) -> None: + """Fixture to initialize the integration.""" + await async_setup_component(hass, "media_source", {}) + + +@pytest.mark.usefixtures("setup_integration") +async def test_no_config_entries( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> None: + """Test a media source with no active config entry.""" + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}") + + assert browse.domain == DOMAIN + assert browse.identifier is None + assert browse.title == "Google Photos" + assert browse.can_expand + assert not browse.children + + +@pytest.mark.usefixtures("setup_integration", "setup_api") +@pytest.mark.parametrize( + ("fixture_name", "expected_results", "expected_medias"), + [ + ("list_mediaitems_empty.json", [], []), + ( + "list_mediaitems.json", + [ + ("config-entry-id-123/p/id1", "example1.jpg"), + ("config-entry-id-123/p/id2", "example2.mp4"), + ], + [ + ("http://img.example.com/id1=w2048", "image/jpeg"), + ("http://img.example.com/id2=dv", "video/mp4"), + ], + ), + ], +) +async def test_recent_items( + hass: HomeAssistant, + expected_results: list[tuple[str, str]], + expected_medias: list[tuple[str, str]], +) -> None: + """Test a media source with no eligible camera devices.""" + browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}") + assert browse.domain == DOMAIN + assert browse.identifier is None + assert browse.title == "Google Photos" + assert [(child.identifier, child.title) for child in browse.children] == [ + ("config-entry-id-123", "Account Name") + ] + + browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}/config-entry-id-123") + assert browse.domain == DOMAIN + assert browse.identifier == "config-entry-id-123" + assert browse.title == "Account Name" + assert [(child.identifier, child.title) for child in browse.children] == [ + ("config-entry-id-123/a/recent", "Recent Photos") + ] + + browse = await async_browse_media( + hass, f"{URI_SCHEME}{DOMAIN}/config-entry-id-123/a/recent" + ) + assert browse.domain == DOMAIN + assert browse.identifier == "config-entry-id-123" + assert browse.title == "Account Name" + assert [ + (child.identifier, child.title) for child in browse.children + ] == expected_results + + media = [ + await async_resolve_media( + hass, f"{URI_SCHEME}{DOMAIN}/{child.identifier}", None + ) + for child in browse.children + ] + assert [ + (play_media.url, play_media.mime_type) for play_media in media + ] == expected_medias + + +@pytest.mark.usefixtures("setup_integration", "setup_api") +async def test_invalid_config_entry(hass: HomeAssistant) -> None: + """Test browsing to a config entry that does not exist.""" + with pytest.raises(BrowseError, match="Could not find config entry"): + await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}/invalid-config-entry") + + +@pytest.mark.usefixtures("setup_integration", "setup_api") +@pytest.mark.parametrize("fixture_name", ["list_mediaitems.json"]) +async def test_invalid_album_id(hass: HomeAssistant) -> None: + """Test browsing to an album id that does not exist.""" + browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}") + assert browse.domain == DOMAIN + assert browse.identifier is None + assert browse.title == "Google Photos" + assert [(child.identifier, child.title) for child in browse.children] == [ + ("config-entry-id-123", "Account Name") + ] + + with pytest.raises(BrowseError, match="Unsupported album"): + await async_browse_media( + hass, f"{URI_SCHEME}{DOMAIN}/config-entry-id-123/a/invalid-album-id" + ) + + +@pytest.mark.usefixtures("setup_integration") +@pytest.mark.parametrize( + ("identifier", "expected_error"), + [ + ("invalid-config-entry", "without a photo_media_id"), + ("too/many/slashes/in/path", "Invalid identifier"), + ], +) +async def test_missing_photo_id( + hass: HomeAssistant, identifier: str, expected_error: str +) -> None: + """Test parsing an invalid media identifier.""" + with pytest.raises(BrowseError, match=expected_error): + await async_resolve_media(hass, f"{URI_SCHEME}{DOMAIN}/{identifier}", None) + + +@pytest.mark.usefixtures("setup_integration", "setup_api") +@pytest.mark.parametrize( + "side_effect", + [ + HttpError(Response({"status": "403"}), b""), + ], +) +async def test_list_media_items_failure( + hass: HomeAssistant, + setup_api: Any, + side_effect: HttpError | Response, +) -> None: + """Test browsing to an album id that does not exist.""" + browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}") + assert browse.domain == DOMAIN + assert browse.identifier is None + assert browse.title == "Google Photos" + assert [(child.identifier, child.title) for child in browse.children] == [ + ("config-entry-id-123", "Account Name") + ] + + setup_api.return_value.mediaItems.return_value.list = Mock() + setup_api.return_value.mediaItems.return_value.list.return_value.execute.side_effect = side_effect + + with pytest.raises(BrowseError, match="Error listing media items"): + await async_browse_media( + hass, f"{URI_SCHEME}{DOMAIN}/config-entry-id-123/a/recent" + ) + + +@pytest.mark.usefixtures("setup_integration", "setup_api") +@pytest.mark.parametrize( + "fixture_name", + [ + "api_not_enabled_response.json", + "not_dict.json", + ], +) +async def test_media_items_error_parsing_response(hass: HomeAssistant) -> None: + """Test browsing to an album id that does not exist.""" + browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}") + assert browse.domain == DOMAIN + assert browse.identifier is None + assert browse.title == "Google Photos" + assert [(child.identifier, child.title) for child in browse.children] == [ + ("config-entry-id-123", "Account Name") + ] + with pytest.raises(BrowseError, match="Error listing media items"): + await async_browse_media( + hass, f"{URI_SCHEME}{DOMAIN}/config-entry-id-123/a/recent" + ) From 240bd6c3bf9324920cda0ab8742f680c9a38f4ab Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 30 Aug 2024 16:41:48 +0200 Subject: [PATCH 1261/1635] Bump aiomealie to 0.9.0 (#124924) * Bump aiomealie to 0.9.0 * Bump aiomealie to 0.9.0 --- homeassistant/components/mealie/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- .../mealie/snapshots/test_diagnostics.ambr | 29 +++++++++++++++ .../mealie/snapshots/test_services.ambr | 37 +++++++++++++++++++ 5 files changed, 69 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/mealie/manifest.json b/homeassistant/components/mealie/manifest.json index 75093577b0f..4a277cbd09b 100644 --- a/homeassistant/components/mealie/manifest.json +++ b/homeassistant/components/mealie/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/mealie", "integration_type": "service", "iot_class": "local_polling", - "requirements": ["aiomealie==0.8.1"] + "requirements": ["aiomealie==0.9.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 18bdde48625..48dbabd47d3 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -288,7 +288,7 @@ aiolookin==1.0.0 aiolyric==2.0.1 # homeassistant.components.mealie -aiomealie==0.8.1 +aiomealie==0.9.0 # homeassistant.components.modern_forms aiomodernforms==0.1.8 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 2a1e3e718eb..4c70d7bc4c4 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -270,7 +270,7 @@ aiolookin==1.0.0 aiolyric==2.0.1 # homeassistant.components.mealie -aiomealie==0.8.1 +aiomealie==0.9.0 # homeassistant.components.modern_forms aiomodernforms==0.1.8 diff --git a/tests/components/mealie/snapshots/test_diagnostics.ambr b/tests/components/mealie/snapshots/test_diagnostics.ambr index e6c72c950cc..a694c72fcf6 100644 --- a/tests/components/mealie/snapshots/test_diagnostics.ambr +++ b/tests/components/mealie/snapshots/test_diagnostics.ambr @@ -10,6 +10,7 @@ 'description': None, 'entry_type': 'breakfast', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-23', @@ -18,6 +19,7 @@ 'recipe': dict({ 'description': 'The BEST Roast Chicken recipe is simple, budget friendly, and gives you a tender, mouth-watering chicken full of flavor! Served with roasted vegetables, this recipe is simple enough for any cook!', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'JeQ2', 'name': 'Roast Chicken', 'original_url': 'https://tastesbetterfromscratch.com/roast-chicken/', @@ -35,6 +37,7 @@ 'description': None, 'entry_type': 'dinner', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-22', @@ -43,6 +46,7 @@ 'recipe': dict({ 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'AiIo', 'name': 'Zoete aardappel curry traybake', 'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/', @@ -58,6 +62,7 @@ 'description': None, 'entry_type': 'dinner', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-23', @@ -66,6 +71,7 @@ 'recipe': dict({ 'description': 'Εύκολη μακαρονάδα με κεφτεδάκια στον φούρνο από τον Άκη Πετρετζίκη. Φτιάξτε την πιο εύκολη μακαρονάδα με κεφτεδάκια σε μόνο ένα σκεύος.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'En9o', 'name': 'Εύκολη μακαρονάδα με κεφτεδάκια στον φούρνο (1)', 'original_url': 'https://akispetretzikis.com/recipe/7959/efkolh-makaronada-me-keftedakia-ston-fourno', @@ -81,6 +87,7 @@ 'description': None, 'entry_type': 'dinner', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-23', @@ -89,6 +96,7 @@ 'recipe': dict({ 'description': 'Delicious Greek turkey meatballs with lemon orzo, tender veggies, and a creamy feta yogurt sauce. These healthy baked Greek turkey meatballs are filled with tons of wonderful herbs and make the perfect protein-packed weeknight meal!', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'Kn62', 'name': 'Greek Turkey Meatballs with Lemon Orzo & Creamy Feta Yogurt Sauce', 'original_url': 'https://www.ambitiouskitchen.com/greek-turkey-meatballs/', @@ -104,6 +112,7 @@ 'description': None, 'entry_type': 'dinner', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-23', @@ -112,6 +121,7 @@ 'recipe': dict({ 'description': 'This is a modified Pampered Chef recipe. You can use a trifle bowl or large glass punch/salad bowl to show it off. It is really easy to make and I never have any leftovers. Cook time includes chill time.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'ibL6', 'name': 'Pampered Chef Double Chocolate Mocha Trifle', 'original_url': 'https://www.food.com/recipe/pampered-chef-double-chocolate-mocha-trifle-74963', @@ -127,6 +137,7 @@ 'description': None, 'entry_type': 'dinner', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-22', @@ -135,6 +146,7 @@ 'recipe': dict({ 'description': 'Cheeseburger Sliders are juicy, cheesy and beefy - everything we love about classic burgers! These sliders are quick and easy plus they are make-ahead and reheat really well.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'beGq', 'name': 'Cheeseburger Sliders (Easy, 30-min Recipe)', 'original_url': 'https://natashaskitchen.com/cheeseburger-sliders/', @@ -150,6 +162,7 @@ 'description': None, 'entry_type': 'dinner', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-23', @@ -158,6 +171,7 @@ 'recipe': dict({ 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': '356X', 'name': 'All-American Beef Stew Recipe', 'original_url': 'https://www.seriouseats.com/all-american-beef-stew-recipe', @@ -173,6 +187,7 @@ 'description': None, 'entry_type': 'dinner', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-22', @@ -181,6 +196,7 @@ 'recipe': dict({ 'description': 'Einfacher Nudelauflauf mit Brokkoli, Sahnesauce und extra Käse. Dieses vegetarische 5 Zutaten Rezept ist super schnell gemacht und SO gut!', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'nOPT', 'name': 'Einfacher Nudelauflauf mit Brokkoli', 'original_url': 'https://kochkarussell.com/einfacher-nudelauflauf-brokkoli/', @@ -196,6 +212,7 @@ 'description': None, 'entry_type': 'dinner', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-23', @@ -204,6 +221,7 @@ 'recipe': dict({ 'description': 'Simple to prepare and ready in 25 minutes, this vegetarian miso noodle recipe can be eaten on its own or served as a side.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': '5G1v', 'name': 'Miso Udon Noodles with Spinach and Tofu', 'original_url': 'https://www.allrecipes.com/recipe/284039/miso-udon-noodles-with-spinach-and-tofu/', @@ -219,6 +237,7 @@ 'description': None, 'entry_type': 'dinner', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-22', @@ -227,6 +246,7 @@ 'recipe': dict({ 'description': 'Avis aux nostalgiques des années 1980, la mousse de saumon est de retour dans une présentation adaptée au goût du jour. On utilise une technique sans faille : un saumon frais cuit au micro-ondes et mélangé au robot avec du fromage à la crème et de la crème sure. On obtient ainsi une texture onctueuse à tartiner, qui n’a rien à envier aux préparations gélatineuses d’antan !', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'rrNL', 'name': 'Mousse de saumon', 'original_url': 'https://www.ricardocuisine.com/recettes/8919-mousse-de-saumon', @@ -242,6 +262,7 @@ 'description': 'Dineren met de boys', 'entry_type': 'dinner', 'group_id': '3931df86-0679-4579-8c63-4bedc9ca9a85', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-21', @@ -257,6 +278,7 @@ 'description': None, 'entry_type': 'lunch', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-23', @@ -265,6 +287,7 @@ 'recipe': dict({ 'description': 'Te explicamos paso a paso, de manera sencilla, la elaboración de la receta de pollo al curry con leche de coco en 10 minutos. Ingredientes, tiempo de...', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'INQz', 'name': 'Receta de pollo al curry en 10 minutos (con vídeo incluido)', 'original_url': 'https://www.directoalpaladar.com/recetas-de-carnes-y-aves/receta-de-pollo-al-curry-en-10-minutos', @@ -280,6 +303,7 @@ 'description': None, 'entry_type': 'lunch', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-23', @@ -288,6 +312,7 @@ 'recipe': dict({ 'description': 'bourguignon, oignon, carotte, bouquet garni, vin rouge, beurre, sel, poivre', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'nj5M', 'name': 'Boeuf bourguignon : la vraie recette (2)', 'original_url': 'https://www.marmiton.org/recettes/recette_boeuf-bourguignon_18889.aspx', @@ -303,6 +328,7 @@ 'description': None, 'entry_type': 'lunch', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-22', @@ -311,6 +337,7 @@ 'recipe': dict({ 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': '356X', 'name': 'All-American Beef Stew Recipe', 'original_url': 'https://www.seriouseats.com/all-american-beef-stew-recipe', @@ -328,6 +355,7 @@ 'description': None, 'entry_type': 'side', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-23', @@ -336,6 +364,7 @@ 'recipe': dict({ 'description': 'Einfacher Nudelauflauf mit Brokkoli, Sahnesauce und extra Käse. Dieses vegetarische 5 Zutaten Rezept ist super schnell gemacht und SO gut!', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'nOPT', 'name': 'Einfacher Nudelauflauf mit Brokkoli', 'original_url': 'https://kochkarussell.com/einfacher-nudelauflauf-brokkoli/', diff --git a/tests/components/mealie/snapshots/test_services.ambr b/tests/components/mealie/snapshots/test_services.ambr index 3ae158f1d2d..4f9ee6a5c09 100644 --- a/tests/components/mealie/snapshots/test_services.ambr +++ b/tests/components/mealie/snapshots/test_services.ambr @@ -5,6 +5,7 @@ 'date_added': datetime.date(2024, 6, 29), 'description': 'The world’s most famous cake, the Original Sacher-Torte, is the consequence of several lucky twists of fate. The first was in 1832, when the Austrian State Chancellor, Prince Klemens Wenzel von Metternich, tasked his kitchen staff with concocting an extraordinary dessert to impress his special guests. As fortune had it, the chef had fallen ill that evening, leaving the apprentice chef, the then-16-year-old Franz Sacher, to perform this culinary magic trick. Metternich’s parting words to the talented teenager: “I hope you won’t disgrace me tonight.”', 'group_id': '24477569-f6af-4b53-9e3f-6d04b0ca6916', + 'household_id': None, 'image': 'SuPW', 'ingredients': list([ dict({ @@ -196,11 +197,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 22), 'mealplan_id': 230, 'recipe': dict({ 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'AiIo', 'name': 'Zoete aardappel curry traybake', 'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/', @@ -216,11 +219,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), 'mealplan_id': 229, 'recipe': dict({ 'description': 'The BEST Roast Chicken recipe is simple, budget friendly, and gives you a tender, mouth-watering chicken full of flavor! Served with roasted vegetables, this recipe is simple enough for any cook!', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'JeQ2', 'name': 'Roast Chicken', 'original_url': 'https://tastesbetterfromscratch.com/roast-chicken/', @@ -236,11 +241,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), 'mealplan_id': 226, 'recipe': dict({ 'description': 'Te explicamos paso a paso, de manera sencilla, la elaboración de la receta de pollo al curry con leche de coco en 10 minutos. Ingredientes, tiempo de...', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'INQz', 'name': 'Receta de pollo al curry en 10 minutos (con vídeo incluido)', 'original_url': 'https://www.directoalpaladar.com/recetas-de-carnes-y-aves/receta-de-pollo-al-curry-en-10-minutos', @@ -256,11 +263,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), 'mealplan_id': 224, 'recipe': dict({ 'description': 'bourguignon, oignon, carotte, bouquet garni, vin rouge, beurre, sel, poivre', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'nj5M', 'name': 'Boeuf bourguignon : la vraie recette (2)', 'original_url': 'https://www.marmiton.org/recettes/recette_boeuf-bourguignon_18889.aspx', @@ -276,11 +285,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), 'mealplan_id': 222, 'recipe': dict({ 'description': 'Εύκολη μακαρονάδα με κεφτεδάκια στον φούρνο από τον Άκη Πετρετζίκη. Φτιάξτε την πιο εύκολη μακαρονάδα με κεφτεδάκια σε μόνο ένα σκεύος.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'En9o', 'name': 'Εύκολη μακαρονάδα με κεφτεδάκια στον φούρνο (1)', 'original_url': 'https://akispetretzikis.com/recipe/7959/efkolh-makaronada-me-keftedakia-ston-fourno', @@ -296,11 +307,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), 'mealplan_id': 221, 'recipe': dict({ 'description': 'Delicious Greek turkey meatballs with lemon orzo, tender veggies, and a creamy feta yogurt sauce. These healthy baked Greek turkey meatballs are filled with tons of wonderful herbs and make the perfect protein-packed weeknight meal!', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'Kn62', 'name': 'Greek Turkey Meatballs with Lemon Orzo & Creamy Feta Yogurt Sauce', 'original_url': 'https://www.ambitiouskitchen.com/greek-turkey-meatballs/', @@ -316,11 +329,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), 'mealplan_id': 220, 'recipe': dict({ 'description': 'Einfacher Nudelauflauf mit Brokkoli, Sahnesauce und extra Käse. Dieses vegetarische 5 Zutaten Rezept ist super schnell gemacht und SO gut!', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'nOPT', 'name': 'Einfacher Nudelauflauf mit Brokkoli', 'original_url': 'https://kochkarussell.com/einfacher-nudelauflauf-brokkoli/', @@ -336,11 +351,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), 'mealplan_id': 219, 'recipe': dict({ 'description': 'This is a modified Pampered Chef recipe. You can use a trifle bowl or large glass punch/salad bowl to show it off. It is really easy to make and I never have any leftovers. Cook time includes chill time.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'ibL6', 'name': 'Pampered Chef Double Chocolate Mocha Trifle', 'original_url': 'https://www.food.com/recipe/pampered-chef-double-chocolate-mocha-trifle-74963', @@ -356,11 +373,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 22), 'mealplan_id': 217, 'recipe': dict({ 'description': 'Cheeseburger Sliders are juicy, cheesy and beefy - everything we love about classic burgers! These sliders are quick and easy plus they are make-ahead and reheat really well.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'beGq', 'name': 'Cheeseburger Sliders (Easy, 30-min Recipe)', 'original_url': 'https://natashaskitchen.com/cheeseburger-sliders/', @@ -376,11 +395,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 22), 'mealplan_id': 216, 'recipe': dict({ 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': '356X', 'name': 'All-American Beef Stew Recipe', 'original_url': 'https://www.seriouseats.com/all-american-beef-stew-recipe', @@ -396,11 +417,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), 'mealplan_id': 212, 'recipe': dict({ 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': '356X', 'name': 'All-American Beef Stew Recipe', 'original_url': 'https://www.seriouseats.com/all-american-beef-stew-recipe', @@ -416,11 +439,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 22), 'mealplan_id': 211, 'recipe': dict({ 'description': 'Einfacher Nudelauflauf mit Brokkoli, Sahnesauce und extra Käse. Dieses vegetarische 5 Zutaten Rezept ist super schnell gemacht und SO gut!', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'nOPT', 'name': 'Einfacher Nudelauflauf mit Brokkoli', 'original_url': 'https://kochkarussell.com/einfacher-nudelauflauf-brokkoli/', @@ -436,11 +461,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), 'mealplan_id': 196, 'recipe': dict({ 'description': 'Simple to prepare and ready in 25 minutes, this vegetarian miso noodle recipe can be eaten on its own or served as a side.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': '5G1v', 'name': 'Miso Udon Noodles with Spinach and Tofu', 'original_url': 'https://www.allrecipes.com/recipe/284039/miso-udon-noodles-with-spinach-and-tofu/', @@ -456,11 +483,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 22), 'mealplan_id': 195, 'recipe': dict({ 'description': 'Avis aux nostalgiques des années 1980, la mousse de saumon est de retour dans une présentation adaptée au goût du jour. On utilise une technique sans faille : un saumon frais cuit au micro-ondes et mélangé au robot avec du fromage à la crème et de la crème sure. On obtient ainsi une texture onctueuse à tartiner, qui n’a rien à envier aux préparations gélatineuses d’antan !', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'rrNL', 'name': 'Mousse de saumon', 'original_url': 'https://www.ricardocuisine.com/recettes/8919-mousse-de-saumon', @@ -476,6 +505,7 @@ 'description': 'Dineren met de boys', 'entry_type': , 'group_id': '3931df86-0679-4579-8c63-4bedc9ca9a85', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 21), 'mealplan_id': 1, 'recipe': None, @@ -491,6 +521,7 @@ 'date_added': datetime.date(2024, 6, 29), 'description': 'The world’s most famous cake, the Original Sacher-Torte, is the consequence of several lucky twists of fate. The first was in 1832, when the Austrian State Chancellor, Prince Klemens Wenzel von Metternich, tasked his kitchen staff with concocting an extraordinary dessert to impress his special guests. As fortune had it, the chef had fallen ill that evening, leaving the apprentice chef, the then-16-year-old Franz Sacher, to perform this culinary magic trick. Metternich’s parting words to the talented teenager: “I hope you won’t disgrace me tonight.”', 'group_id': '24477569-f6af-4b53-9e3f-6d04b0ca6916', + 'household_id': None, 'image': 'SuPW', 'ingredients': list([ dict({ @@ -681,11 +712,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': datetime.date(2024, 1, 22), 'mealplan_id': 230, 'recipe': dict({ 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'AiIo', 'name': 'Zoete aardappel curry traybake', 'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/', @@ -705,11 +738,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': datetime.date(2024, 1, 22), 'mealplan_id': 230, 'recipe': dict({ 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'AiIo', 'name': 'Zoete aardappel curry traybake', 'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/', @@ -729,11 +764,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': datetime.date(2024, 1, 22), 'mealplan_id': 230, 'recipe': dict({ 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'AiIo', 'name': 'Zoete aardappel curry traybake', 'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/', From 1d05a917f953554937b5f860f4472fe06bcd4c09 Mon Sep 17 00:00:00 2001 From: Aidan Timson Date: Fri, 30 Aug 2024 15:45:46 +0100 Subject: [PATCH 1262/1635] Add work items per type and state counter sensors to Azure DevOps (#119737) * Add work item data * Add work item sensors * Add icon * Add test fixtures * Add none return tests * Apply suggestions from code review Co-authored-by: Joost Lekkerkerker * Apply suggestion * Use icon translations * Apply suggestions from code review Co-authored-by: Joost Lekkerkerker * Update test --------- Co-authored-by: Joost Lekkerkerker --- .../components/azure_devops/coordinator.py | 54 ++++++++++++ homeassistant/components/azure_devops/data.py | 2 + .../components/azure_devops/icons.json | 3 + .../components/azure_devops/sensor.py | 85 ++++++++++++++++++- .../components/azure_devops/strings.json | 3 + tests/components/azure_devops/__init__.py | 52 ++++++++++++ tests/components/azure_devops/conftest.py | 16 +++- tests/components/azure_devops/test_init.py | 45 ++++++++++ 8 files changed, 253 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/azure_devops/coordinator.py b/homeassistant/components/azure_devops/coordinator.py index 2460a9bbfce..21fb76560c3 100644 --- a/homeassistant/components/azure_devops/coordinator.py +++ b/homeassistant/components/azure_devops/coordinator.py @@ -6,8 +6,14 @@ import logging from typing import Final from aioazuredevops.client import DevOpsClient +from aioazuredevops.helper import ( + WorkItemTypeAndState, + work_item_types_states_filter, + work_items_by_type_and_state, +) from aioazuredevops.models.build import Build from aioazuredevops.models.core import Project +from aioazuredevops.models.work_item_type import Category import aiohttp from homeassistant.config_entries import ConfigEntry @@ -20,6 +26,7 @@ from .const import CONF_ORG, DOMAIN from .data import AzureDevOpsData BUILDS_QUERY: Final = "?queryOrder=queueTimeDescending&maxBuildsPerDefinition=1" +IGNORED_CATEGORIES: Final[list[Category]] = [Category.COMPLETED, Category.REMOVED] def ado_exception_none_handler(func: Callable) -> Callable: @@ -105,13 +112,60 @@ class AzureDevOpsDataUpdateCoordinator(DataUpdateCoordinator[AzureDevOpsData]): BUILDS_QUERY, ) + @ado_exception_none_handler + async def _get_work_items( + self, project_name: str + ) -> list[WorkItemTypeAndState] | None: + """Get the work items.""" + + if ( + work_item_types := await self.client.get_work_item_types( + self.organization, + project_name, + ) + ) is None: + # If no work item types are returned, return an empty list + return [] + + if ( + work_item_ids := await self.client.get_work_item_ids( + self.organization, + project_name, + # Filter out completed and removed work items so we only get active work items + states=work_item_types_states_filter( + work_item_types, + ignored_categories=IGNORED_CATEGORIES, + ), + ) + ) is None: + # If no work item ids are returned, return an empty list + return [] + + if ( + work_items := await self.client.get_work_items( + self.organization, + project_name, + work_item_ids, + ) + ) is None: + # If no work items are returned, return an empty list + return [] + + return work_items_by_type_and_state( + work_item_types, + work_items, + ignored_categories=IGNORED_CATEGORIES, + ) + async def _async_update_data(self) -> AzureDevOpsData: """Fetch data from Azure DevOps.""" # Get the builds from the project builds = await self._get_builds(self.project.name) + work_items = await self._get_work_items(self.project.name) return AzureDevOpsData( organization=self.organization, project=self.project, builds=builds, + work_items=work_items, ) diff --git a/homeassistant/components/azure_devops/data.py b/homeassistant/components/azure_devops/data.py index c2da38ccc09..ff34bc90c24 100644 --- a/homeassistant/components/azure_devops/data.py +++ b/homeassistant/components/azure_devops/data.py @@ -2,6 +2,7 @@ from dataclasses import dataclass +from aioazuredevops.helper import WorkItemTypeAndState from aioazuredevops.models.build import Build from aioazuredevops.models.core import Project @@ -13,3 +14,4 @@ class AzureDevOpsData: organization: str project: Project builds: list[Build] + work_items: list[WorkItemTypeAndState] diff --git a/homeassistant/components/azure_devops/icons.json b/homeassistant/components/azure_devops/icons.json index de720b46106..ea6b4c632ea 100644 --- a/homeassistant/components/azure_devops/icons.json +++ b/homeassistant/components/azure_devops/icons.json @@ -3,6 +3,9 @@ "sensor": { "latest_build": { "default": "mdi:pipe" + }, + "work_item_count": { + "default": "mdi:ticket" } } } diff --git a/homeassistant/components/azure_devops/sensor.py b/homeassistant/components/azure_devops/sensor.py index 7b7af1dd666..fd47115214a 100644 --- a/homeassistant/components/azure_devops/sensor.py +++ b/homeassistant/components/azure_devops/sensor.py @@ -8,6 +8,7 @@ from datetime import datetime import logging from typing import Any +from aioazuredevops.helper import WorkItemState, WorkItemTypeAndState from aioazuredevops.models.build import Build from homeassistant.components.sensor import ( @@ -29,12 +30,19 @@ _LOGGER = logging.getLogger(__name__) @dataclass(frozen=True, kw_only=True) class AzureDevOpsBuildSensorEntityDescription(SensorEntityDescription): - """Class describing Azure DevOps base build sensor entities.""" + """Class describing Azure DevOps build sensor entities.""" attr_fn: Callable[[Build], dict[str, Any] | None] = lambda _: None value_fn: Callable[[Build], datetime | StateType] +@dataclass(frozen=True, kw_only=True) +class AzureDevOpsWorkItemSensorEntityDescription(SensorEntityDescription): + """Class describing Azure DevOps work item sensor entities.""" + + value_fn: Callable[[WorkItemState], datetime | StateType] + + BASE_BUILD_SENSOR_DESCRIPTIONS: tuple[AzureDevOpsBuildSensorEntityDescription, ...] = ( # Attributes are deprecated in 2024.7 and can be removed in 2025.1 AzureDevOpsBuildSensorEntityDescription( @@ -116,6 +124,16 @@ BASE_BUILD_SENSOR_DESCRIPTIONS: tuple[AzureDevOpsBuildSensorEntityDescription, . ), ) +BASE_WORK_ITEM_SENSOR_DESCRIPTIONS: tuple[ + AzureDevOpsWorkItemSensorEntityDescription, ... +] = ( + AzureDevOpsWorkItemSensorEntityDescription( + key="work_item_count", + translation_key="work_item_count", + value_fn=lambda work_item_state: len(work_item_state.work_items), + ), +) + def parse_datetime(value: str | None) -> datetime | None: """Parse datetime string.""" @@ -134,7 +152,7 @@ async def async_setup_entry( coordinator = entry.runtime_data initial_builds: list[Build] = coordinator.data.builds - async_add_entities( + entities: list[SensorEntity] = [ AzureDevOpsBuildSensor( coordinator, description, @@ -143,8 +161,22 @@ async def async_setup_entry( for description in BASE_BUILD_SENSOR_DESCRIPTIONS for key, build in enumerate(initial_builds) if build.project and build.definition + ] + + entities.extend( + AzureDevOpsWorkItemSensor( + coordinator, + description, + key, + state_key, + ) + for description in BASE_WORK_ITEM_SENSOR_DESCRIPTIONS + for key, work_item_type_state in enumerate(coordinator.data.work_items) + for state_key, _ in enumerate(work_item_type_state.state_items) ) + async_add_entities(entities) + class AzureDevOpsBuildSensor(AzureDevOpsEntity, SensorEntity): """Define a Azure DevOps build sensor.""" @@ -162,8 +194,8 @@ class AzureDevOpsBuildSensor(AzureDevOpsEntity, SensorEntity): self.entity_description = description self.item_key = item_key self._attr_unique_id = ( - f"{self.coordinator.data.organization}_" - f"{self.build.project.id}_" + f"{coordinator.data.organization}_" + f"{coordinator.data.project.id}_" f"{self.build.definition.build_id}_" f"{description.key}" ) @@ -185,3 +217,48 @@ class AzureDevOpsBuildSensor(AzureDevOpsEntity, SensorEntity): def extra_state_attributes(self) -> Mapping[str, Any] | None: """Return the state attributes of the entity.""" return self.entity_description.attr_fn(self.build) + + +class AzureDevOpsWorkItemSensor(AzureDevOpsEntity, SensorEntity): + """Define a Azure DevOps work item sensor.""" + + entity_description: AzureDevOpsWorkItemSensorEntityDescription + + def __init__( + self, + coordinator: AzureDevOpsDataUpdateCoordinator, + description: AzureDevOpsWorkItemSensorEntityDescription, + wits_key: int, + state_key: int, + ) -> None: + """Initialize.""" + super().__init__(coordinator) + self.entity_description = description + self.wits_key = wits_key + self.state_key = state_key + self._attr_unique_id = ( + f"{coordinator.data.organization}_" + f"{coordinator.data.project.id}_" + f"{self.work_item_type.name}_" + f"{self.work_item_state.name}_" + f"{description.key}" + ) + self._attr_translation_placeholders = { + "item_type": self.work_item_type.name, + "item_state": self.work_item_state.name, + } + + @property + def work_item_type(self) -> WorkItemTypeAndState: + """Return the work item.""" + return self.coordinator.data.work_items[self.wits_key] + + @property + def work_item_state(self) -> WorkItemState: + """Return the work item state.""" + return self.work_item_type.state_items[self.state_key] + + @property + def native_value(self) -> datetime | StateType: + """Return the state.""" + return self.entity_description.value_fn(self.work_item_state) diff --git a/homeassistant/components/azure_devops/strings.json b/homeassistant/components/azure_devops/strings.json index 8a17169fb6b..c5304270396 100644 --- a/homeassistant/components/azure_devops/strings.json +++ b/homeassistant/components/azure_devops/strings.json @@ -60,6 +60,9 @@ }, "url": { "name": "{definition_name} latest build url" + }, + "work_item_count": { + "name": "{item_type} {item_state} work items" } } }, diff --git a/tests/components/azure_devops/__init__.py b/tests/components/azure_devops/__init__.py index cc4732b1495..6414fe0257c 100644 --- a/tests/components/azure_devops/__init__.py +++ b/tests/components/azure_devops/__init__.py @@ -1,9 +1,12 @@ """Tests for the Azure DevOps integration.""" +from datetime import datetime from typing import Final from aioazuredevops.models.build import Build, BuildDefinition from aioazuredevops.models.core import Project +from aioazuredevops.models.work_item import WorkItem, WorkItemFields +from aioazuredevops.models.work_item_type import Category, Icon, State, WorkItemType from homeassistant.components.azure_devops.const import CONF_ORG, CONF_PAT, CONF_PROJECT from homeassistant.core import HomeAssistant @@ -77,6 +80,55 @@ DEVOPS_BUILD_MISSING_PROJECT_DEFINITION = Build( build_id=9876, ) +DEVOPS_WORK_ITEM_TYPES = [ + WorkItemType( + name="Bug", + reference_name="System.Bug", + description="Bug", + color="ff0000", + icon=Icon(id="1234", url="https://example.com/icon.png"), + is_disabled=False, + xml_form="", + fields=[], + field_instances=[], + transitions={}, + states=[ + State(name="New", color="ff0000", category=Category.PROPOSED), + State(name="Active", color="ff0000", category=Category.IN_PROGRESS), + State(name="Resolved", color="ff0000", category=Category.RESOLVED), + State(name="Closed", color="ff0000", category=Category.COMPLETED), + ], + url="", + ) +] + +DEVOPS_WORK_ITEM_IDS = [1] + +DEVOPS_WORK_ITEMS = [ + WorkItem( + id=1, + rev=1, + fields=WorkItemFields( + area_path="", + team_project="", + iteration_path="", + work_item_type="Bug", + state="New", + reason="New", + assigned_to=None, + created_date=datetime(2021, 1, 1), + created_by=None, + changed_date=datetime(2021, 1, 1), + changed_by=None, + comment_count=0, + title="Test", + microsoft_vsts_common_state_change_date=datetime(2021, 1, 1), + microsoft_vsts_common_priority=1, + ), + url="https://example.com", + ) +] + async def setup_integration( hass: HomeAssistant, diff --git a/tests/components/azure_devops/conftest.py b/tests/components/azure_devops/conftest.py index c65adaa4da5..54c730f9523 100644 --- a/tests/components/azure_devops/conftest.py +++ b/tests/components/azure_devops/conftest.py @@ -7,7 +7,16 @@ import pytest from homeassistant.components.azure_devops.const import DOMAIN -from . import DEVOPS_BUILD, DEVOPS_PROJECT, FIXTURE_USER_INPUT, PAT, UNIQUE_ID +from . import ( + DEVOPS_BUILD, + DEVOPS_PROJECT, + DEVOPS_WORK_ITEM_IDS, + DEVOPS_WORK_ITEM_TYPES, + DEVOPS_WORK_ITEMS, + FIXTURE_USER_INPUT, + PAT, + UNIQUE_ID, +) from tests.common import MockConfigEntry @@ -33,8 +42,9 @@ async def mock_devops_client() -> AsyncGenerator[MagicMock]: devops_client.get_project.return_value = DEVOPS_PROJECT devops_client.get_builds.return_value = [DEVOPS_BUILD] devops_client.get_build.return_value = DEVOPS_BUILD - devops_client.get_work_item_ids.return_value = None - devops_client.get_work_items.return_value = None + devops_client.get_work_item_types.return_value = DEVOPS_WORK_ITEM_TYPES + devops_client.get_work_item_ids.return_value = DEVOPS_WORK_ITEM_IDS + devops_client.get_work_items.return_value = DEVOPS_WORK_ITEMS yield devops_client diff --git a/tests/components/azure_devops/test_init.py b/tests/components/azure_devops/test_init.py index a7655042f25..dd512cb12e0 100644 --- a/tests/components/azure_devops/test_init.py +++ b/tests/components/azure_devops/test_init.py @@ -91,3 +91,48 @@ async def test_no_builds( assert mock_devops_client.get_builds.call_count == 1 assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_no_work_item_types( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_devops_client: MagicMock, +) -> None: + """Test a failed update entry.""" + mock_devops_client.get_work_item_types.return_value = None + + await setup_integration(hass, mock_config_entry) + + assert mock_devops_client.get_work_item_types.call_count == 1 + + assert mock_config_entry.state is ConfigEntryState.LOADED + + +async def test_no_work_item_ids( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_devops_client: MagicMock, +) -> None: + """Test a failed update entry.""" + mock_devops_client.get_work_item_ids.return_value = None + + await setup_integration(hass, mock_config_entry) + + assert mock_devops_client.get_work_item_ids.call_count == 1 + + assert mock_config_entry.state is ConfigEntryState.LOADED + + +async def test_no_work_items( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_devops_client: MagicMock, +) -> None: + """Test a failed update entry.""" + mock_devops_client.get_work_items.return_value = None + + await setup_integration(hass, mock_config_entry) + + assert mock_devops_client.get_work_items.call_count == 1 + + assert mock_config_entry.state is ConfigEntryState.LOADED From 20f9b9e412a555e95010217f43c812dfbe4a3b18 Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Fri, 30 Aug 2024 17:03:24 +0200 Subject: [PATCH 1263/1635] Add inverter-devices to solarlog (#123205) * Add inverter-devices * Minor code adjustments * Update manifest.json Seperate dependency upgrade to seperate PR * Update requirements_all.txt Seperate dependency upgrade to seperate PR * Update requirements_test_all.txt Seperate dependency upgrade to seperate PR * Update homeassistant/components/solarlog/sensor.py Co-authored-by: Joost Lekkerkerker * Split up base class, document SolarLogSensorEntityDescription * Split up sensor types * Update snapshot * Add all devices in config_flow * Remove options flow * Move devices in config_entry from options to data * Correct mock_config_entry * Minor adjustments * Remove enabled_devices from config * Remove obsolete test * Update snapshot * Delete obsolete code snips * Update homeassistant/components/solarlog/sensor.py Co-authored-by: Joost Lekkerkerker * Remove obsolete test in setting up sensors * Update homeassistant/components/solarlog/sensor.py Co-authored-by: Joost Lekkerkerker * Update homeassistant/components/solarlog/entity.py Co-authored-by: Joost Lekkerkerker * Update homeassistant/components/solarlog/config_flow.py Co-authored-by: Joost Lekkerkerker * Fix typing error --------- Co-authored-by: Joost Lekkerkerker --- homeassistant/components/solarlog/__init__.py | 6 +- .../components/solarlog/coordinator.py | 9 +- homeassistant/components/solarlog/entity.py | 71 ++++++++++ homeassistant/components/solarlog/sensor.py | 125 +++++++++++------- tests/components/solarlog/conftest.py | 25 +++- .../solarlog/fixtures/solarlog_data.json | 3 +- .../solarlog/snapshots/test_sensor.ambr | 99 ++++++++++---- tests/components/solarlog/test_config_flow.py | 2 +- 8 files changed, 260 insertions(+), 80 deletions(-) create mode 100644 homeassistant/components/solarlog/entity.py diff --git a/homeassistant/components/solarlog/__init__.py b/homeassistant/components/solarlog/__init__.py index 962efa4e190..f23305ca8f2 100644 --- a/homeassistant/components/solarlog/__init__.py +++ b/homeassistant/components/solarlog/__init__.py @@ -7,17 +7,17 @@ from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .coordinator import SolarlogData +from .coordinator import SolarLogCoordinator _LOGGER = logging.getLogger(__name__) PLATFORMS = [Platform.SENSOR] -type SolarlogConfigEntry = ConfigEntry[SolarlogData] +type SolarlogConfigEntry = ConfigEntry[SolarLogCoordinator] async def async_setup_entry(hass: HomeAssistant, entry: SolarlogConfigEntry) -> bool: """Set up a config entry for solarlog.""" - coordinator = SolarlogData(hass, entry) + coordinator = SolarLogCoordinator(hass, entry) await coordinator.async_config_entry_first_refresh() entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) diff --git a/homeassistant/components/solarlog/coordinator.py b/homeassistant/components/solarlog/coordinator.py index d2963e1950e..96ee00af1ec 100644 --- a/homeassistant/components/solarlog/coordinator.py +++ b/homeassistant/components/solarlog/coordinator.py @@ -24,7 +24,7 @@ if TYPE_CHECKING: from . import SolarlogConfigEntry -class SolarlogData(update_coordinator.DataUpdateCoordinator): +class SolarLogCoordinator(update_coordinator.DataUpdateCoordinator): """Get and update the latest data.""" def __init__(self, hass: HomeAssistant, entry: SolarlogConfigEntry) -> None: @@ -49,12 +49,19 @@ class SolarlogData(update_coordinator.DataUpdateCoordinator): self.host, extended_data, hass.config.time_zone ) + async def _async_setup(self) -> None: + """Do initialization logic.""" + if self.solarlog.extended_data: + device_list = await self.solarlog.client.get_device_list() + self.solarlog.set_enabled_devices({key: True for key in device_list}) + async def _async_update_data(self): """Update the data from the SolarLog device.""" _LOGGER.debug("Start data update") try: data = await self.solarlog.update_data() + await self.solarlog.update_device_list() except SolarLogConnectionError as err: raise ConfigEntryNotReady(err) from err except SolarLogUpdateError as err: diff --git a/homeassistant/components/solarlog/entity.py b/homeassistant/components/solarlog/entity.py new file mode 100644 index 00000000000..1d91fc8726b --- /dev/null +++ b/homeassistant/components/solarlog/entity.py @@ -0,0 +1,71 @@ +"""Entities for SolarLog integration.""" + +from __future__ import annotations + +from homeassistant.components.sensor import SensorEntityDescription +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity +from homeassistant.util import slugify + +from .const import DOMAIN +from .coordinator import SolarLogCoordinator + + +class SolarLogBaseEntity(CoordinatorEntity[SolarLogCoordinator]): + """SolarLog base entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: SolarLogCoordinator, + description: SensorEntityDescription, + ) -> None: + """Initialize the SolarLogCoordinator sensor.""" + super().__init__(coordinator) + + self.entity_description = description + + +class SolarLogCoordinatorEntity(SolarLogBaseEntity): + """Base SolarLog Coordinator entity.""" + + def __init__( + self, + coordinator: SolarLogCoordinator, + description: SensorEntityDescription, + ) -> None: + """Initialize the SolarLogCoordinator sensor.""" + super().__init__(coordinator, description) + + self._attr_unique_id = f"{coordinator.unique_id}-{description.key}" + self._attr_device_info = DeviceInfo( + manufacturer="Solar-Log", + model="Controller", + identifiers={(DOMAIN, coordinator.unique_id)}, + name=coordinator.name, + configuration_url=coordinator.host, + ) + + +class SolarLogInverterEntity(SolarLogBaseEntity): + """Base SolarLog inverter entity.""" + + def __init__( + self, + coordinator: SolarLogCoordinator, + description: SensorEntityDescription, + device_id: int, + ) -> None: + """Initialize the SolarLogInverter sensor.""" + super().__init__(coordinator, description) + name = f"{coordinator.unique_id}-{slugify(coordinator.solarlog.device_name(device_id))}" + self._attr_unique_id = f"{name}-{description.key}" + self._attr_device_info = DeviceInfo( + manufacturer="Solar-Log", + model="Inverter", + identifiers={(DOMAIN, name)}, + name=coordinator.solarlog.device_name(device_id), + via_device=(DOMAIN, coordinator.unique_id), + ) + self.device_id = device_id diff --git a/homeassistant/components/solarlog/sensor.py b/homeassistant/components/solarlog/sensor.py index 45961133e8a..cd4a711cdc9 100644 --- a/homeassistant/components/solarlog/sensor.py +++ b/homeassistant/components/solarlog/sensor.py @@ -1,8 +1,11 @@ """Platform for solarlog sensors.""" +from __future__ import annotations + from collections.abc import Callable from dataclasses import dataclass from datetime import datetime +from typing import Any from homeassistant.components.sensor import ( SensorDeviceClass, @@ -17,22 +20,22 @@ from homeassistant.const import ( UnitOfPower, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity -from . import SolarlogConfigEntry, SolarlogData -from .const import DOMAIN +from . import SolarlogConfigEntry +from .entity import SolarLogCoordinatorEntity, SolarLogInverterEntity @dataclass(frozen=True) class SolarLogSensorEntityDescription(SensorEntityDescription): """Describes Solarlog sensor entity.""" - value: Callable[[float | int], float] | Callable[[datetime], datetime] | None = None + value_fn: Callable[[float | int], float] | Callable[[datetime], datetime] = ( + lambda value: value + ) -SENSOR_TYPES: tuple[SolarLogSensorEntityDescription, ...] = ( +SOLARLOG_SENSOR_TYPES: tuple[SolarLogSensorEntityDescription, ...] = ( SolarLogSensorEntityDescription( key="last_updated", translation_key="last_update", @@ -71,28 +74,28 @@ SENSOR_TYPES: tuple[SolarLogSensorEntityDescription, ...] = ( translation_key="yield_day", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value=lambda value: round(value / 1000, 3), + value_fn=lambda value: round(value / 1000, 3), ), SolarLogSensorEntityDescription( key="yield_yesterday", translation_key="yield_yesterday", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value=lambda value: round(value / 1000, 3), + value_fn=lambda value: round(value / 1000, 3), ), SolarLogSensorEntityDescription( key="yield_month", translation_key="yield_month", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value=lambda value: round(value / 1000, 3), + value_fn=lambda value: round(value / 1000, 3), ), SolarLogSensorEntityDescription( key="yield_year", translation_key="yield_year", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value=lambda value: round(value / 1000, 3), + value_fn=lambda value: round(value / 1000, 3), ), SolarLogSensorEntityDescription( key="yield_total", @@ -100,7 +103,7 @@ SENSOR_TYPES: tuple[SolarLogSensorEntityDescription, ...] = ( native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, state_class=SensorStateClass.TOTAL, - value=lambda value: round(value / 1000, 3), + value_fn=lambda value: round(value / 1000, 3), ), SolarLogSensorEntityDescription( key="consumption_ac", @@ -114,28 +117,28 @@ SENSOR_TYPES: tuple[SolarLogSensorEntityDescription, ...] = ( translation_key="consumption_day", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value=lambda value: round(value / 1000, 3), + value_fn=lambda value: round(value / 1000, 3), ), SolarLogSensorEntityDescription( key="consumption_yesterday", translation_key="consumption_yesterday", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value=lambda value: round(value / 1000, 3), + value_fn=lambda value: round(value / 1000, 3), ), SolarLogSensorEntityDescription( key="consumption_month", translation_key="consumption_month", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value=lambda value: round(value / 1000, 3), + value_fn=lambda value: round(value / 1000, 3), ), SolarLogSensorEntityDescription( key="consumption_year", translation_key="consumption_year", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value=lambda value: round(value / 1000, 3), + value_fn=lambda value: round(value / 1000, 3), ), SolarLogSensorEntityDescription( key="consumption_total", @@ -143,7 +146,7 @@ SENSOR_TYPES: tuple[SolarLogSensorEntityDescription, ...] = ( native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, state_class=SensorStateClass.TOTAL, - value=lambda value: round(value / 1000, 3), + value_fn=lambda value: round(value / 1000, 3), ), SolarLogSensorEntityDescription( key="self_consumption_year", @@ -171,7 +174,7 @@ SENSOR_TYPES: tuple[SolarLogSensorEntityDescription, ...] = ( native_unit_of_measurement=PERCENTAGE, device_class=SensorDeviceClass.POWER_FACTOR, state_class=SensorStateClass.MEASUREMENT, - value=lambda value: round(value * 100, 1), + value_fn=lambda value: round(value * 100, 1), ), SolarLogSensorEntityDescription( key="efficiency", @@ -179,7 +182,7 @@ SENSOR_TYPES: tuple[SolarLogSensorEntityDescription, ...] = ( native_unit_of_measurement=PERCENTAGE, device_class=SensorDeviceClass.POWER_FACTOR, state_class=SensorStateClass.MEASUREMENT, - value=lambda value: round(value * 100, 1), + value_fn=lambda value: round(value * 100, 1), ), SolarLogSensorEntityDescription( key="power_available", @@ -194,7 +197,24 @@ SENSOR_TYPES: tuple[SolarLogSensorEntityDescription, ...] = ( native_unit_of_measurement=PERCENTAGE, device_class=SensorDeviceClass.POWER_FACTOR, state_class=SensorStateClass.MEASUREMENT, - value=lambda value: round(value * 100, 1), + value_fn=lambda value: round(value * 100, 1), + ), +) + +INVERTER_SENSOR_TYPES: tuple[SolarLogSensorEntityDescription, ...] = ( + SolarLogSensorEntityDescription( + key="current_power", + translation_key="current_power", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + ), + SolarLogSensorEntityDescription( + key="consumption_year", + translation_key="consumption_year", + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + device_class=SensorDeviceClass.ENERGY, + value_fn=lambda value: round(value / 1000, 3), ), ) @@ -206,39 +226,50 @@ async def async_setup_entry( ) -> None: """Add solarlog entry.""" coordinator = entry.runtime_data - async_add_entities( - SolarlogSensor(coordinator, description) for description in SENSOR_TYPES - ) + + # https://github.com/python/mypy/issues/14294 + + entities: list[SensorEntity] = [ + SolarLogCoordinatorSensor(coordinator, sensor) + for sensor in SOLARLOG_SENSOR_TYPES + ] + + device_data: dict[str, Any] = coordinator.data["devices"] + + if not device_data: + entities.extend( + SolarLogInverterSensor(coordinator, sensor, int(device_id)) + for device_id in device_data + for sensor in INVERTER_SENSOR_TYPES + if sensor.key in device_data[device_id] + ) + + async_add_entities(entities) -class SolarlogSensor(CoordinatorEntity[SolarlogData], SensorEntity): - """Representation of a Sensor.""" - - _attr_has_entity_name = True +class SolarLogCoordinatorSensor(SolarLogCoordinatorEntity, SensorEntity): + """Represents a SolarLog sensor.""" entity_description: SolarLogSensorEntityDescription - def __init__( - self, - coordinator: SolarlogData, - description: SolarLogSensorEntityDescription, - ) -> None: - """Initialize the sensor.""" - super().__init__(coordinator) - self.entity_description = description - self._attr_unique_id = f"{coordinator.unique_id}_{description.key}" - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, coordinator.unique_id)}, - manufacturer="Solar-Log", - name=coordinator.name, - configuration_url=coordinator.host, - ) + @property + def native_value(self) -> float | datetime: + """Return the state for this sensor.""" + + val = self.coordinator.data[self.entity_description.key] + return self.entity_description.value_fn(val) + + +class SolarLogInverterSensor(SolarLogInverterEntity, SensorEntity): + """Represents a SolarLog inverter sensor.""" + + entity_description: SolarLogSensorEntityDescription @property - def native_value(self): - """Return the native sensor value.""" - raw_attr = self.coordinator.data.get(self.entity_description.key) + def native_value(self) -> float | datetime: + """Return the state for this sensor.""" - if self.entity_description.value: - return self.entity_description.value(raw_attr) - return raw_attr + val = self.coordinator.data["devices"][self.device_id][ + self.entity_description.key + ] + return self.entity_description.value_fn(val) diff --git a/tests/components/solarlog/conftest.py b/tests/components/solarlog/conftest.py index c34d0c011a3..44c0e27f9b0 100644 --- a/tests/components/solarlog/conftest.py +++ b/tests/components/solarlog/conftest.py @@ -1,6 +1,7 @@ """Test helpers.""" from collections.abc import Generator +from datetime import UTC, datetime from unittest.mock import AsyncMock, patch import pytest @@ -35,9 +36,27 @@ def mock_solarlog_connector(): mock_solarlog_api = AsyncMock() mock_solarlog_api.test_connection = AsyncMock(return_value=True) - mock_solarlog_api.update_data.return_value = load_json_object_fixture( - "solarlog_data.json", SOLARLOG_DOMAIN - ) + + data = { + "devices": { + 0: {"consumption_total": 354687, "current_power": 5}, + } + } + data |= load_json_object_fixture("solarlog_data.json", SOLARLOG_DOMAIN) + data["last_updated"] = datetime.fromisoformat(data["last_updated"]).astimezone(UTC) + + mock_solarlog_api.update_data.return_value = data + mock_solarlog_api.device_list.return_value = { + 0: {"name": "Inverter 1"}, + 1: {"name": "Inverter 2"}, + } + mock_solarlog_api.device_name = {0: "Inverter 1", 1: "Inverter 2"}.get + mock_solarlog_api.client.get_device_list.return_value = { + 0: {"name": "Inverter 1"}, + 1: {"name": "Inverter 2"}, + } + mock_solarlog_api.client.close = AsyncMock(return_value=None) + with ( patch( "homeassistant.components.solarlog.coordinator.SolarLogConnector", diff --git a/tests/components/solarlog/fixtures/solarlog_data.json b/tests/components/solarlog/fixtures/solarlog_data.json index 4976f4fa8b7..f7077d88d0d 100644 --- a/tests/components/solarlog/fixtures/solarlog_data.json +++ b/tests/components/solarlog/fixtures/solarlog_data.json @@ -20,5 +20,6 @@ "efficiency": 0.9804, "usage": 0.5487, "power_available": 45.13, - "capacity": 0.85 + "capacity": 0.85, + "last_updated": "2024-08-01T15:20:45" } diff --git a/tests/components/solarlog/snapshots/test_sensor.ambr b/tests/components/solarlog/snapshots/test_sensor.ambr index df154a5eb9b..74a397be900 100644 --- a/tests/components/solarlog/snapshots/test_sensor.ambr +++ b/tests/components/solarlog/snapshots/test_sensor.ambr @@ -1,4 +1,55 @@ # serializer version: 1 +# name: test_all_entities[sensor.inverter_1_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'solarlog', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-inverter_1-current_power', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.inverter_1_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Inverter 1 Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_1_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5', + }) +# --- # name: test_all_entities[sensor.solarlog_alternator_loss-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -30,7 +81,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'alternator_loss', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_alternator_loss', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-alternator_loss', 'unit_of_measurement': , }) # --- @@ -81,7 +132,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'capacity', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_capacity', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-capacity', 'unit_of_measurement': '%', }) # --- @@ -132,7 +183,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'consumption_ac', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_ac', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-consumption_ac', 'unit_of_measurement': , }) # --- @@ -181,7 +232,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'consumption_day', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_day', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-consumption_day', 'unit_of_measurement': , }) # --- @@ -229,7 +280,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'consumption_month', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_month', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-consumption_month', 'unit_of_measurement': , }) # --- @@ -279,7 +330,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'consumption_total', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_total', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-consumption_total', 'unit_of_measurement': , }) # --- @@ -328,7 +379,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'consumption_year', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_year', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-consumption_year', 'unit_of_measurement': , }) # --- @@ -376,7 +427,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'consumption_yesterday', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_yesterday', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-consumption_yesterday', 'unit_of_measurement': , }) # --- @@ -426,7 +477,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'efficiency', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_efficiency', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-efficiency', 'unit_of_measurement': '%', }) # --- @@ -475,7 +526,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'total_power', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_total_power', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-total_power', 'unit_of_measurement': , }) # --- @@ -523,7 +574,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'last_update', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_last_updated', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-last_updated', 'unit_of_measurement': None, }) # --- @@ -538,7 +589,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unknown', + 'state': '2024-08-01T15:20:45+00:00', }) # --- # name: test_all_entities[sensor.solarlog_power_ac-entry] @@ -572,7 +623,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_ac', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_power_ac', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-power_ac', 'unit_of_measurement': , }) # --- @@ -623,7 +674,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_available', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_power_available', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-power_available', 'unit_of_measurement': , }) # --- @@ -674,7 +725,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_dc', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_power_dc', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-power_dc', 'unit_of_measurement': , }) # --- @@ -725,7 +776,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'self_consumption_year', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_self_consumption_year', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-self_consumption_year', 'unit_of_measurement': , }) # --- @@ -776,7 +827,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'usage', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_usage', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-usage', 'unit_of_measurement': '%', }) # --- @@ -827,7 +878,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_ac', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_voltage_ac', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-voltage_ac', 'unit_of_measurement': , }) # --- @@ -878,7 +929,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'voltage_dc', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_voltage_dc', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-voltage_dc', 'unit_of_measurement': , }) # --- @@ -927,7 +978,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'yield_day', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_yield_day', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-yield_day', 'unit_of_measurement': , }) # --- @@ -975,7 +1026,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'yield_month', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_yield_month', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-yield_month', 'unit_of_measurement': , }) # --- @@ -1025,7 +1076,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'yield_total', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_yield_total', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-yield_total', 'unit_of_measurement': , }) # --- @@ -1074,7 +1125,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'yield_year', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_yield_year', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-yield_year', 'unit_of_measurement': , }) # --- @@ -1122,7 +1173,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'yield_yesterday', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_yield_yesterday', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-yield_yesterday', 'unit_of_measurement': , }) # --- diff --git a/tests/components/solarlog/test_config_flow.py b/tests/components/solarlog/test_config_flow.py index b06f2ac0587..b2b2ff9566e 100644 --- a/tests/components/solarlog/test_config_flow.py +++ b/tests/components/solarlog/test_config_flow.py @@ -67,7 +67,7 @@ async def test_user( # tests with all provided result = await hass.config_entries.flow.async_configure( - result["flow_id"], {CONF_HOST: HOST, CONF_NAME: NAME, "extended_data": False} + result["flow_id"], {CONF_HOST: HOST, CONF_NAME: NAME, "extended_data": True} ) await hass.async_block_till_done() From 50577883dcce85ff2ac136ab4a542baefe8f2b27 Mon Sep 17 00:00:00 2001 From: "Mr. Bubbles" Date: Fri, 30 Aug 2024 17:08:06 +0200 Subject: [PATCH 1264/1635] Add option to login with username/email and password in Habitica integration (#117622) * add login/password authentication * add advanced config flow * remove unused exception classes, fix errors * update username in init * update tests * update strings * combine steps with menu * remove username from entry * update tests * Revert "update tests" This reverts commit 6ac8ad6a26547b623e217db817ec4d0cf8c91f1d. * Revert "remove username from entry" This reverts commit d9323fb72df3f9d41be0a53bb0cbe16be718d005. * small changes * remove pylint broad-excep * run habitipy init in executor * Add text selectors * changes --- homeassistant/components/habitica/__init__.py | 16 +- .../components/habitica/config_flow.py | 177 ++++++++++++---- .../components/habitica/strings.json | 24 ++- tests/components/habitica/test_config_flow.py | 197 ++++++++++++++---- tests/components/habitica/test_init.py | 1 + 5 files changed, 318 insertions(+), 97 deletions(-) diff --git a/homeassistant/components/habitica/__init__.py b/homeassistant/components/habitica/__init__.py index 468db8fbc42..bcf8713f9b1 100644 --- a/homeassistant/components/habitica/__init__.py +++ b/homeassistant/components/habitica/__init__.py @@ -15,6 +15,7 @@ from homeassistant.const import ( CONF_NAME, CONF_SENSORS, CONF_URL, + CONF_VERIFY_SSL, Platform, ) from homeassistant.core import HomeAssistant, ServiceCall @@ -125,6 +126,7 @@ async def async_setup_entry( name = call.data[ATTR_NAME] path = call.data[ATTR_PATH] entries = hass.config_entries.async_entries(DOMAIN) + api = None for entry in entries: if entry.data[CONF_NAME] == name: @@ -147,18 +149,16 @@ async def async_setup_entry( EVENT_API_CALL_SUCCESS, {ATTR_NAME: name, ATTR_PATH: path, ATTR_DATA: data} ) - websession = async_get_clientsession(hass) - - url = config_entry.data[CONF_URL] - username = config_entry.data[CONF_API_USER] - password = config_entry.data[CONF_API_KEY] + websession = async_get_clientsession( + hass, verify_ssl=config_entry.data.get(CONF_VERIFY_SSL, True) + ) api = await hass.async_add_executor_job( HAHabitipyAsync, { - "url": url, - "login": username, - "password": password, + "url": config_entry.data[CONF_URL], + "login": config_entry.data[CONF_API_USER], + "password": config_entry.data[CONF_API_KEY], }, ) try: diff --git a/homeassistant/components/habitica/config_flow.py b/homeassistant/components/habitica/config_flow.py index a40261c0902..2947032c41e 100644 --- a/homeassistant/components/habitica/config_flow.py +++ b/homeassistant/components/habitica/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +from http import HTTPStatus import logging from typing import Any @@ -10,48 +11,53 @@ from habitipy.aio import HabitipyAsync import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_API_KEY, CONF_NAME, CONF_URL -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.const import ( + CONF_API_KEY, + CONF_PASSWORD, + CONF_URL, + CONF_USERNAME, + CONF_VERIFY_SSL, +) +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue +from homeassistant.helpers.selector import ( + TextSelector, + TextSelectorConfig, + TextSelectorType, +) from .const import CONF_API_USER, DEFAULT_URL, DOMAIN -DATA_SCHEMA = vol.Schema( +STEP_ADVANCED_DATA_SCHEMA = vol.Schema( { vol.Required(CONF_API_USER): str, vol.Required(CONF_API_KEY): str, - vol.Optional(CONF_NAME): str, vol.Optional(CONF_URL, default=DEFAULT_URL): str, + vol.Required(CONF_VERIFY_SSL, default=True): bool, + } +) + +STEP_LOGIN_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_USERNAME): TextSelector( + TextSelectorConfig( + type=TextSelectorType.EMAIL, + autocomplete="email", + ) + ), + vol.Required(CONF_PASSWORD): TextSelector( + TextSelectorConfig( + type=TextSelectorType.PASSWORD, + autocomplete="current-password", + ) + ), } ) _LOGGER = logging.getLogger(__name__) -async def validate_input(hass: HomeAssistant, data: dict[str, str]) -> dict[str, str]: - """Validate the user input allows us to connect.""" - - websession = async_get_clientsession(hass) - api = await hass.async_add_executor_job( - HabitipyAsync, - { - "login": data[CONF_API_USER], - "password": data[CONF_API_KEY], - "url": data[CONF_URL] or DEFAULT_URL, - }, - ) - try: - await api.user.get(session=websession) - return { - "title": f"{data.get('name', 'Default username')}", - CONF_API_USER: data[CONF_API_USER], - } - except ClientResponseError as ex: - raise InvalidAuth from ex - - class HabiticaConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for habitica.""" @@ -62,24 +68,115 @@ class HabiticaConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle the initial step.""" - errors = {} + return self.async_show_menu( + step_id="user", + menu_options=["login", "advanced"], + ) + + async def async_step_login( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Config flow with username/password. + + Simplified configuration setup that retrieves API credentials + from Habitica.com by authenticating with login and password. + """ + errors: dict[str, str] = {} if user_input is not None: try: - info = await validate_input(self.hass, user_input) - except InvalidAuth: - errors = {"base": "invalid_credentials"} + session = async_get_clientsession(self.hass) + api = await self.hass.async_add_executor_job( + HabitipyAsync, + { + "login": "", + "password": "", + "url": DEFAULT_URL, + }, + ) + login_response = await api.user.auth.local.login.post( + session=session, + username=user_input[CONF_USERNAME], + password=user_input[CONF_PASSWORD], + ) + + except ClientResponseError as ex: + if ex.status == HTTPStatus.UNAUTHORIZED: + errors["base"] = "invalid_auth" + else: + errors["base"] = "cannot_connect" except Exception: _LOGGER.exception("Unexpected exception") - errors = {"base": "unknown"} + errors["base"] = "unknown" else: - await self.async_set_unique_id(info[CONF_API_USER]) + await self.async_set_unique_id(login_response["id"]) self._abort_if_unique_id_configured() - return self.async_create_entry(title=info["title"], data=user_input) + return self.async_create_entry( + title=login_response["username"], + data={ + CONF_API_USER: login_response["id"], + CONF_API_KEY: login_response["apiToken"], + CONF_USERNAME: login_response["username"], + CONF_URL: DEFAULT_URL, + CONF_VERIFY_SSL: True, + }, + ) + return self.async_show_form( - step_id="user", - data_schema=DATA_SCHEMA, + step_id="login", + data_schema=self.add_suggested_values_to_schema( + data_schema=STEP_LOGIN_DATA_SCHEMA, suggested_values=user_input + ), + errors=errors, + ) + + async def async_step_advanced( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Advanced configuration with User Id and API Token. + + Advanced configuration allows connecting to Habitica instances + hosted on different domains or to self-hosted instances. + """ + errors: dict[str, str] = {} + if user_input is not None: + try: + session = async_get_clientsession( + self.hass, verify_ssl=user_input.get(CONF_VERIFY_SSL, True) + ) + api = await self.hass.async_add_executor_job( + HabitipyAsync, + { + "login": user_input[CONF_API_USER], + "password": user_input[CONF_API_KEY], + "url": user_input.get(CONF_URL, DEFAULT_URL), + }, + ) + api_response = await api.user.get( + session=session, + userFields="auth", + ) + except ClientResponseError as ex: + if ex.status == HTTPStatus.UNAUTHORIZED: + errors["base"] = "invalid_auth" + else: + errors["base"] = "cannot_connect" + except Exception: + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + await self.async_set_unique_id(user_input[CONF_API_USER]) + self._abort_if_unique_id_configured() + user_input[CONF_USERNAME] = api_response["auth"]["local"]["username"] + return self.async_create_entry( + title=user_input[CONF_USERNAME], data=user_input + ) + + return self.async_show_form( + step_id="advanced", + data_schema=self.add_suggested_values_to_schema( + data_schema=STEP_ADVANCED_DATA_SCHEMA, suggested_values=user_input + ), errors=errors, - description_placeholders={}, ) async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: @@ -98,8 +195,4 @@ class HabiticaConfigFlow(ConfigFlow, domain=DOMAIN): "integration_title": "Habitica", }, ) - return await self.async_step_user(import_data) - - -class InvalidAuth(HomeAssistantError): - """Error to indicate there is invalid auth.""" + return await self.async_step_advanced(import_data) diff --git a/homeassistant/components/habitica/strings.json b/homeassistant/components/habitica/strings.json index 21d2622245c..c5a54d254cc 100644 --- a/homeassistant/components/habitica/strings.json +++ b/homeassistant/components/habitica/strings.json @@ -4,18 +4,32 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" }, "error": { - "invalid_credentials": "[%key:common::config_flow::error::invalid_auth%]", + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", "unknown": "[%key:common::config_flow::error::unknown%]" }, "step": { "user": { + "menu_options": { + "login": "Login to Habitica", + "advanced": "Login to other instances" + }, + "description": "Connect your Habitica profile to allow monitoring of your user's profile and tasks." + }, + "login": { + "data": { + "username": "Email or username (case-sensitive)", + "password": "[%key:common::config_flow::data::password%]" + } + }, + "advanced": { "data": { "url": "[%key:common::config_flow::data::url%]", - "name": "Override for Habitica’s username. Will be used for actions", - "api_user": "Habitica’s API user ID", - "api_key": "[%key:common::config_flow::data::api_key%]" + "api_user": "User ID", + "api_key": "API Token", + "verify_ssl": "[%key:common::config_flow::data::verify_ssl%]" }, - "description": "Connect your Habitica profile to allow monitoring of your user's profile and tasks. Note that api_id and api_key must be gotten from https://habitica.com/user/settings/api" + "description": "You can retrieve your `User ID` and `API Token` from **Settings -> Site Data** on Habitica or the instance you want to connect to" } } }, diff --git a/tests/components/habitica/test_config_flow.py b/tests/components/habitica/test_config_flow.py index 4dfc696daf2..09cda3fbb0a 100644 --- a/tests/components/habitica/test_config_flow.py +++ b/tests/components/habitica/test_config_flow.py @@ -3,26 +3,152 @@ from unittest.mock import AsyncMock, MagicMock, patch from aiohttp import ClientResponseError +import pytest from homeassistant import config_entries -from homeassistant.components.habitica.const import DEFAULT_URL, DOMAIN +from homeassistant.components.habitica.const import CONF_API_USER, DEFAULT_URL, DOMAIN +from homeassistant.const import ( + CONF_API_KEY, + CONF_PASSWORD, + CONF_URL, + CONF_USERNAME, + CONF_VERIFY_SSL, +) from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from tests.common import MockConfigEntry +MOCK_DATA_LOGIN_STEP = { + CONF_USERNAME: "test-email@example.com", + CONF_PASSWORD: "test-password", +} +MOCK_DATA_ADVANCED_STEP = { + CONF_API_USER: "test-api-user", + CONF_API_KEY: "test-api-key", + CONF_URL: DEFAULT_URL, + CONF_VERIFY_SSL: True, +} -async def test_form(hass: HomeAssistant) -> None: + +async def test_form_login(hass: HomeAssistant) -> None: + """Test we get the login form.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] is FlowResultType.MENU + assert "login" in result["menu_options"] + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "login"} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + assert result["step_id"] == "login" + + mock_obj = MagicMock() + mock_obj.user.auth.local.login.post = AsyncMock() + mock_obj.user.auth.local.login.post.return_value = { + "id": "test-api-user", + "apiToken": "test-api-key", + "username": "test-username", + } + with ( + patch( + "homeassistant.components.habitica.config_flow.HabitipyAsync", + return_value=mock_obj, + ), + patch( + "homeassistant.components.habitica.async_setup", return_value=True + ) as mock_setup, + patch( + "homeassistant.components.habitica.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=MOCK_DATA_LOGIN_STEP, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "test-username" + assert result["data"] == { + **MOCK_DATA_ADVANCED_STEP, + CONF_USERNAME: "test-username", + } + assert len(mock_setup.mock_calls) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.parametrize( + ("raise_error", "text_error"), + [ + (ClientResponseError(MagicMock(), (), status=400), "cannot_connect"), + (ClientResponseError(MagicMock(), (), status=401), "invalid_auth"), + (IndexError(), "unknown"), + ], +) +async def test_form_login_errors(hass: HomeAssistant, raise_error, text_error) -> None: + """Test we handle invalid credentials error.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "login"} + ) + + mock_obj = MagicMock() + mock_obj.user.auth.local.login.post = AsyncMock(side_effect=raise_error) + with patch( + "homeassistant.components.habitica.config_flow.HabitipyAsync", + return_value=mock_obj, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input=MOCK_DATA_LOGIN_STEP, + ) + + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": text_error} + + +async def test_form_advanced(hass: HomeAssistant) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) + + assert result["type"] is FlowResultType.MENU + assert "advanced" in result["menu_options"] + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "advanced"} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + assert result["step_id"] == "advanced" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "advanced"} + ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} mock_obj = MagicMock() mock_obj.user.get = AsyncMock() + mock_obj.user.get.return_value = {"auth": {"local": {"username": "test-username"}}} with ( patch( @@ -39,29 +165,46 @@ async def test_form(hass: HomeAssistant) -> None: ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - {"api_user": "test-api-user", "api_key": "test-api-key"}, + user_input=MOCK_DATA_ADVANCED_STEP, ) await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == "Default username" + assert result2["title"] == "test-username" assert result2["data"] == { - "url": DEFAULT_URL, - "api_user": "test-api-user", - "api_key": "test-api-key", + **MOCK_DATA_ADVANCED_STEP, + CONF_USERNAME: "test-username", } assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 -async def test_form_invalid_credentials(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("raise_error", "text_error"), + [ + (ClientResponseError(MagicMock(), (), status=400), "cannot_connect"), + (ClientResponseError(MagicMock(), (), status=401), "invalid_auth"), + (IndexError(), "unknown"), + ], +) +async def test_form_advanced_errors( + hass: HomeAssistant, raise_error, text_error +) -> None: """Test we handle invalid credentials error.""" + result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": "advanced"} + ) + mock_obj = MagicMock() - mock_obj.user.get = AsyncMock(side_effect=ClientResponseError(MagicMock(), ())) + mock_obj.user.get = AsyncMock(side_effect=raise_error) with patch( "homeassistant.components.habitica.config_flow.HabitipyAsync", @@ -69,41 +212,11 @@ async def test_form_invalid_credentials(hass: HomeAssistant) -> None: ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], - { - "url": DEFAULT_URL, - "api_user": "test-api-user", - "api_key": "test-api-key", - }, + user_input=MOCK_DATA_ADVANCED_STEP, ) assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "invalid_credentials"} - - -async def test_form_unexpected_exception(hass: HomeAssistant) -> None: - """Test we handle unexpected exception error.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - mock_obj = MagicMock() - mock_obj.user.get = AsyncMock(side_effect=Exception) - - with patch( - "homeassistant.components.habitica.config_flow.HabitipyAsync", - return_value=mock_obj, - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - "url": DEFAULT_URL, - "api_user": "test-api-user", - "api_key": "test-api-key", - }, - ) - - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "unknown"} + assert result2["errors"] == {"base": text_error} async def test_manual_flow_config_exist(hass: HomeAssistant) -> None: @@ -119,7 +232,7 @@ async def test_manual_flow_config_exist(hass: HomeAssistant) -> None: ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" + assert result["step_id"] == "advanced" mock_obj = MagicMock() mock_obj.user.get = AsyncMock(return_value={"api_user": "test-api-user"}) diff --git a/tests/components/habitica/test_init.py b/tests/components/habitica/test_init.py index 4c2b1e2aae6..56f17bc9889 100644 --- a/tests/components/habitica/test_init.py +++ b/tests/components/habitica/test_init.py @@ -52,6 +52,7 @@ def common_requests(aioclient_mock: AiohttpClientMocker) -> AiohttpClientMocker: "https://habitica.com/api/v3/user", json={ "data": { + "auth": {"local": {"username": TEST_USER_NAME}}, "api_user": "test-api-user", "profile": {"name": TEST_USER_NAME}, "stats": { From 28c24e5fefc5fcc4252204c881c293c7ac968b37 Mon Sep 17 00:00:00 2001 From: IceBotYT <34712694+IceBotYT@users.noreply.github.com> Date: Fri, 30 Aug 2024 11:08:58 -0400 Subject: [PATCH 1265/1635] Bump `nice-go` to 0.3.8 (#124872) * Bump nice-go to 0.3.6 * Bump to 0.3.7 * Bump to 0.3.8 --- homeassistant/components/nice_go/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/nice_go/manifest.json b/homeassistant/components/nice_go/manifest.json index 45dd3c8b5b4..884f2eb7b18 100644 --- a/homeassistant/components/nice_go/manifest.json +++ b/homeassistant/components/nice_go/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/nice_go", "iot_class": "cloud_push", "loggers": ["nice-go"], - "requirements": ["nice-go==0.3.5"] + "requirements": ["nice-go==0.3.8"] } diff --git a/requirements_all.txt b/requirements_all.txt index 48dbabd47d3..9c873e247a5 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1433,7 +1433,7 @@ nextdns==3.2.0 nibe==2.11.0 # homeassistant.components.nice_go -nice-go==0.3.5 +nice-go==0.3.8 # homeassistant.components.niko_home_control niko-home-control==0.2.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 4c70d7bc4c4..cf3d84208a9 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1190,7 +1190,7 @@ nextdns==3.2.0 nibe==2.11.0 # homeassistant.components.nice_go -nice-go==0.3.5 +nice-go==0.3.8 # homeassistant.components.nfandroidtv notifications-android-tv==0.1.5 From cb742a677c41c257189d1ea75b370560dabd882e Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Fri, 30 Aug 2024 08:31:24 -0700 Subject: [PATCH 1266/1635] Add Google Photos reauth support (#124933) * Add Google Photos reauth support * Update tests/components/google_photos/test_config_flow.py Co-authored-by: Joost Lekkerkerker --------- Co-authored-by: Joost Lekkerkerker --- .../components/google_photos/__init__.py | 10 +- .../components/google_photos/config_flow.py | 30 ++- tests/components/google_photos/conftest.py | 31 ++- .../google_photos/test_config_flow.py | 192 ++++++++++++++---- tests/components/google_photos/test_init.py | 4 +- .../google_photos/test_media_source.py | 30 +-- 6 files changed, 230 insertions(+), 67 deletions(-) diff --git a/homeassistant/components/google_photos/__init__.py b/homeassistant/components/google_photos/__init__.py index ab1ee4a63a4..643ad0b41ad 100644 --- a/homeassistant/components/google_photos/__init__.py +++ b/homeassistant/components/google_photos/__init__.py @@ -6,7 +6,7 @@ from aiohttp import ClientError, ClientResponseError from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers import config_entry_oauth2_flow from . import api @@ -32,7 +32,13 @@ async def async_setup_entry( auth = api.AsyncConfigEntryAuth(hass, session) try: await auth.async_get_access_token() - except (ClientResponseError, ClientError) as err: + except ClientResponseError as err: + if 400 <= err.status < 500: + raise ConfigEntryAuthFailed( + "OAuth session is not valid, reauth required" + ) from err + raise ConfigEntryNotReady from err + except ClientError as err: raise ConfigEntryNotReady from err entry.runtime_data = auth return True diff --git a/homeassistant/components/google_photos/config_flow.py b/homeassistant/components/google_photos/config_flow.py index 9bc4b35b6b4..93f0347e32f 100644 --- a/homeassistant/components/google_photos/config_flow.py +++ b/homeassistant/components/google_photos/config_flow.py @@ -1,5 +1,6 @@ """Config flow for Google Photos.""" +from collections.abc import Mapping import logging from typing import Any @@ -7,7 +8,7 @@ from homeassistant.config_entries import ConfigFlowResult from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN from homeassistant.helpers import config_entry_oauth2_flow -from . import api +from . import GooglePhotosConfigEntry, api from .const import DOMAIN, OAUTH2_SCOPES from .exceptions import GooglePhotosApiError @@ -19,6 +20,8 @@ class OAuth2FlowHandler( DOMAIN = DOMAIN + reauth_entry: GooglePhotosConfigEntry | None = None + @property def logger(self) -> logging.Logger: """Return logger.""" @@ -49,6 +52,31 @@ class OAuth2FlowHandler( self.logger.exception("Unknown error occurred") return self.async_abort(reason="unknown") user_id = user_resource_info["id"] + + if self.reauth_entry: + if self.reauth_entry.unique_id == user_id: + return self.async_update_reload_and_abort( + self.reauth_entry, unique_id=user_id, data=data + ) + return self.async_abort(reason="wrong_account") + await self.async_set_unique_id(user_id) self._abort_if_unique_id_configured() return self.async_create_entry(title=user_resource_info["name"], data=data) + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Perform reauth upon an API authentication error.""" + self.reauth_entry = self.hass.config_entries.async_get_entry( + self.context["entry_id"] + ) + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: Mapping[str, Any] | None = None + ) -> ConfigFlowResult: + """Confirm reauth dialog.""" + if user_input is None: + return self.async_show_form(step_id="reauth_confirm") + return await self.async_step_user() diff --git a/tests/components/google_photos/conftest.py b/tests/components/google_photos/conftest.py index 874e55f0d33..84ed717895d 100644 --- a/tests/components/google_photos/conftest.py +++ b/tests/components/google_photos/conftest.py @@ -18,16 +18,18 @@ from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, load_json_array_fixture USER_IDENTIFIER = "user-identifier-1" +CONFIG_ENTRY_ID = "user-identifier-1" CLIENT_ID = "1234" CLIENT_SECRET = "5678" FAKE_ACCESS_TOKEN = "some-access-token" FAKE_REFRESH_TOKEN = "some-refresh-token" +EXPIRES_IN = 3600 @pytest.fixture(name="expires_at") def mock_expires_at() -> int: """Fixture to set the oauth token expiration time.""" - return time.time() + 3600 + return time.time() + EXPIRES_IN @pytest.fixture(name="token_entry") @@ -37,17 +39,26 @@ def mock_token_entry(expires_at: int) -> dict[str, Any]: "access_token": FAKE_ACCESS_TOKEN, "refresh_token": FAKE_REFRESH_TOKEN, "scope": " ".join(OAUTH2_SCOPES), - "token_type": "Bearer", + "type": "Bearer", "expires_at": expires_at, + "expires_in": EXPIRES_IN, } +@pytest.fixture(name="config_entry_id") +def mock_config_entry_id() -> str | None: + """Provide a json fixture file to load for list media item api responses.""" + return CONFIG_ENTRY_ID + + @pytest.fixture(name="config_entry") -def mock_config_entry(token_entry: dict[str, Any]) -> MockConfigEntry: +def mock_config_entry( + config_entry_id: str, token_entry: dict[str, Any] +) -> MockConfigEntry: """Fixture for a config entry.""" return MockConfigEntry( domain=DOMAIN, - unique_id="config-entry-id-123", + unique_id=config_entry_id, data={ "auth_implementation": DOMAIN, "token": token_entry, @@ -73,12 +84,20 @@ def mock_fixture_name() -> str | None: return None +@pytest.fixture(name="user_identifier") +def mock_user_identifier() -> str | None: + """Provide a json fixture file to load for list media item api responses.""" + return USER_IDENTIFIER + + @pytest.fixture(name="setup_api") -def mock_setup_api(fixture_name: str) -> Generator[Mock, None, None]: +def mock_setup_api( + fixture_name: str, user_identifier: str +) -> Generator[Mock, None, None]: """Set up fake Google Photos API responses from fixtures.""" with patch("homeassistant.components.google_photos.api.build") as mock: mock.return_value.userinfo.return_value.get.return_value.execute.return_value = { - "id": USER_IDENTIFIER, + "id": user_identifier, "name": "Test Name", } diff --git a/tests/components/google_photos/test_config_flow.py b/tests/components/google_photos/test_config_flow.py index e9f2a68f2f5..4bd933a7eb8 100644 --- a/tests/components/google_photos/test_config_flow.py +++ b/tests/components/google_photos/test_config_flow.py @@ -1,5 +1,7 @@ """Test the Google Photos config flow.""" +from collections.abc import Generator +from typing import Any from unittest.mock import Mock, patch from googleapiclient.errors import HttpError @@ -16,9 +18,9 @@ from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow -from .conftest import USER_IDENTIFIER +from .conftest import EXPIRES_IN, FAKE_ACCESS_TOKEN, FAKE_REFRESH_TOKEN, USER_IDENTIFIER -from tests.common import load_fixture +from tests.common import MockConfigEntry, load_fixture from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import ClientSessionGenerator @@ -26,12 +28,44 @@ CLIENT_ID = "1234" CLIENT_SECRET = "5678" +@pytest.fixture(name="mock_setup") +def mock_setup_entry() -> Generator[Mock, None, None]: + """Fixture to mock out integration setup.""" + with patch( + "homeassistant.components.google_photos.async_setup_entry", return_value=True + ) as mock_setup: + yield mock_setup + + +@pytest.fixture(name="updated_token_entry", autouse=True) +def mock_updated_token_entry() -> dict[str, Any]: + """Fixture to provide any test specific overrides to token data from the oauth token endpoint.""" + return {} + + +@pytest.fixture(name="mock_oauth_token_request", autouse=True) +def mock_token_request( + aioclient_mock: AiohttpClientMocker, + token_entry: dict[str, any], + updated_token_entry: dict[str, Any], +) -> None: + """Fixture to provide a fake response from the oauth token endpoint.""" + aioclient_mock.clear_requests() + aioclient_mock.post( + OAUTH2_TOKEN, + json={ + **token_entry, + **updated_token_entry, + }, + ) + + @pytest.mark.usefixtures("current_request_with_host", "setup_api") @pytest.mark.parametrize("fixture_name", ["list_mediaitems.json"]) async def test_full_flow( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, - aioclient_mock: AiohttpClientMocker, + mock_setup: Mock, ) -> None: """Check full flow.""" result = await hass.config_entries.flow.async_init( @@ -59,20 +93,7 @@ async def test_full_flow( assert resp.status == 200 assert resp.headers["content-type"] == "text/html; charset=utf-8" - aioclient_mock.post( - OAUTH2_TOKEN, - json={ - "refresh_token": "mock-refresh-token", - "access_token": "mock-access-token", - "type": "Bearer", - "expires_in": 60, - }, - ) - - with patch( - "homeassistant.components.google_photos.async_setup_entry", return_value=True - ) as mock_setup: - result = await hass.config_entries.flow.async_configure(result["flow_id"]) + result = await hass.config_entries.flow.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.CREATE_ENTRY config_entry = result["result"] assert config_entry.unique_id == USER_IDENTIFIER @@ -84,10 +105,14 @@ async def test_full_flow( assert config_entry_data == { "auth_implementation": DOMAIN, "token": { - "access_token": "mock-access-token", - "expires_in": 60, - "refresh_token": "mock-refresh-token", + "access_token": FAKE_ACCESS_TOKEN, + "expires_in": EXPIRES_IN, + "refresh_token": FAKE_REFRESH_TOKEN, "type": "Bearer", + "scope": ( + "https://www.googleapis.com/auth/photoslibrary.readonly" + " https://www.googleapis.com/auth/userinfo.profile" + ), }, } assert len(hass.config_entries.async_entries(DOMAIN)) == 1 @@ -101,7 +126,6 @@ async def test_full_flow( async def test_api_not_enabled( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, - aioclient_mock: AiohttpClientMocker, setup_api: Mock, ) -> None: """Check flow aborts if api is not enabled.""" @@ -130,16 +154,6 @@ async def test_api_not_enabled( assert resp.status == 200 assert resp.headers["content-type"] == "text/html; charset=utf-8" - aioclient_mock.post( - OAUTH2_TOKEN, - json={ - "refresh_token": "mock-refresh-token", - "access_token": "mock-access-token", - "type": "Bearer", - "expires_in": 60, - }, - ) - setup_api.return_value.mediaItems.return_value.list = Mock() setup_api.return_value.mediaItems.return_value.list.return_value.execute.side_effect = HttpError( Response({"status": "403"}), @@ -158,7 +172,6 @@ async def test_api_not_enabled( async def test_general_exception( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, - aioclient_mock: AiohttpClientMocker, ) -> None: """Check flow aborts if exception happens.""" result = await hass.config_entries.flow.async_init( @@ -185,16 +198,6 @@ async def test_general_exception( assert resp.status == 200 assert resp.headers["content-type"] == "text/html; charset=utf-8" - aioclient_mock.post( - OAUTH2_TOKEN, - json={ - "refresh_token": "mock-refresh-token", - "access_token": "mock-access-token", - "type": "Bearer", - "expires_in": 60, - }, - ) - with patch( "homeassistant.components.google_photos.api.build", side_effect=Exception, @@ -203,3 +206,108 @@ async def test_general_exception( assert result["type"] is FlowResultType.ABORT assert result["reason"] == "unknown" + + +@pytest.mark.usefixtures("current_request_with_host", "setup_api", "setup_integration") +@pytest.mark.parametrize("fixture_name", ["list_mediaitems.json"]) +@pytest.mark.parametrize( + "updated_token_entry", + [ + { + "access_token": "updated-access-token", + } + ], +) +@pytest.mark.parametrize( + ( + "user_identifier", + "abort_reason", + "resulting_access_token", + "expected_setup_calls", + ), + [ + ( + USER_IDENTIFIER, + "reauth_successful", + "updated-access-token", + 1, + ), + ( + "345", + "wrong_account", + FAKE_ACCESS_TOKEN, + 0, + ), + ], +) +@pytest.mark.usefixtures("current_request_with_host") +async def test_reauth( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + config_entry: MockConfigEntry, + user_identifier: str, + abort_reason: str, + resulting_access_token: str, + mock_setup: Mock, + expected_setup_calls: int, +) -> None: + """Test the re-authentication case updates the correct config entry.""" + + config_entry.async_start_reauth(hass) + await hass.async_block_till_done() + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + result = flows[0] + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + assert result["url"] == ( + f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" + "&redirect_uri=https://example.com/auth/external/callback" + f"&state={state}" + "&scope=https://www.googleapis.com/auth/photoslibrary.readonly" + "+https://www.googleapis.com/auth/userinfo.profile" + "&access_type=offline&prompt=consent" + ) + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == abort_reason + + assert config_entry.unique_id == USER_IDENTIFIER + assert config_entry.title == "Account Name" + config_entry_data = dict(config_entry.data) + assert "token" in config_entry_data + assert "expires_at" in config_entry_data["token"] + del config_entry_data["token"]["expires_at"] + assert config_entry_data == { + "auth_implementation": DOMAIN, + "token": { + # Verify token is refreshed or not + "access_token": resulting_access_token, + "expires_in": EXPIRES_IN, + "refresh_token": FAKE_REFRESH_TOKEN, + "type": "Bearer", + "scope": ( + "https://www.googleapis.com/auth/photoslibrary.readonly" + " https://www.googleapis.com/auth/userinfo.profile" + ), + }, + } + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + assert len(mock_setup.mock_calls) == expected_setup_calls diff --git a/tests/components/google_photos/test_init.py b/tests/components/google_photos/test_init.py index a2f835c8611..ea236cfc712 100644 --- a/tests/components/google_photos/test_init.py +++ b/tests/components/google_photos/test_init.py @@ -80,9 +80,9 @@ async def test_expired_token_refresh_success( [ ( time.time() - 3600, - http.HTTPStatus.NOT_FOUND, + http.HTTPStatus.UNAUTHORIZED, None, - ConfigEntryState.SETUP_RETRY, + ConfigEntryState.SETUP_ERROR, # Reauth ), ( time.time() - 3600, diff --git a/tests/components/google_photos/test_media_source.py b/tests/components/google_photos/test_media_source.py index 31c84f4811c..b24b37c10e6 100644 --- a/tests/components/google_photos/test_media_source.py +++ b/tests/components/google_photos/test_media_source.py @@ -17,6 +17,8 @@ from homeassistant.components.media_source import ( from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component +from .conftest import CONFIG_ENTRY_ID + from tests.common import MockConfigEntry @@ -52,8 +54,8 @@ async def test_no_config_entries( ( "list_mediaitems.json", [ - ("config-entry-id-123/p/id1", "example1.jpg"), - ("config-entry-id-123/p/id2", "example2.mp4"), + (f"{CONFIG_ENTRY_ID}/p/id1", "example1.jpg"), + (f"{CONFIG_ENTRY_ID}/p/id2", "example2.mp4"), ], [ ("http://img.example.com/id1=w2048", "image/jpeg"), @@ -73,22 +75,22 @@ async def test_recent_items( assert browse.identifier is None assert browse.title == "Google Photos" assert [(child.identifier, child.title) for child in browse.children] == [ - ("config-entry-id-123", "Account Name") + (CONFIG_ENTRY_ID, "Account Name") ] - browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}/config-entry-id-123") + browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}/{CONFIG_ENTRY_ID}") assert browse.domain == DOMAIN - assert browse.identifier == "config-entry-id-123" + assert browse.identifier == CONFIG_ENTRY_ID assert browse.title == "Account Name" assert [(child.identifier, child.title) for child in browse.children] == [ - ("config-entry-id-123/a/recent", "Recent Photos") + (f"{CONFIG_ENTRY_ID}/a/recent", "Recent Photos") ] browse = await async_browse_media( - hass, f"{URI_SCHEME}{DOMAIN}/config-entry-id-123/a/recent" + hass, f"{URI_SCHEME}{DOMAIN}/{CONFIG_ENTRY_ID}/a/recent" ) assert browse.domain == DOMAIN - assert browse.identifier == "config-entry-id-123" + assert browse.identifier == CONFIG_ENTRY_ID assert browse.title == "Account Name" assert [ (child.identifier, child.title) for child in browse.children @@ -121,12 +123,12 @@ async def test_invalid_album_id(hass: HomeAssistant) -> None: assert browse.identifier is None assert browse.title == "Google Photos" assert [(child.identifier, child.title) for child in browse.children] == [ - ("config-entry-id-123", "Account Name") + (CONFIG_ENTRY_ID, "Account Name") ] with pytest.raises(BrowseError, match="Unsupported album"): await async_browse_media( - hass, f"{URI_SCHEME}{DOMAIN}/config-entry-id-123/a/invalid-album-id" + hass, f"{URI_SCHEME}{DOMAIN}/{CONFIG_ENTRY_ID}/a/invalid-album-id" ) @@ -164,7 +166,7 @@ async def test_list_media_items_failure( assert browse.identifier is None assert browse.title == "Google Photos" assert [(child.identifier, child.title) for child in browse.children] == [ - ("config-entry-id-123", "Account Name") + (CONFIG_ENTRY_ID, "Account Name") ] setup_api.return_value.mediaItems.return_value.list = Mock() @@ -172,7 +174,7 @@ async def test_list_media_items_failure( with pytest.raises(BrowseError, match="Error listing media items"): await async_browse_media( - hass, f"{URI_SCHEME}{DOMAIN}/config-entry-id-123/a/recent" + hass, f"{URI_SCHEME}{DOMAIN}/{CONFIG_ENTRY_ID}/a/recent" ) @@ -191,9 +193,9 @@ async def test_media_items_error_parsing_response(hass: HomeAssistant) -> None: assert browse.identifier is None assert browse.title == "Google Photos" assert [(child.identifier, child.title) for child in browse.children] == [ - ("config-entry-id-123", "Account Name") + (CONFIG_ENTRY_ID, "Account Name") ] with pytest.raises(BrowseError, match="Error listing media items"): await async_browse_media( - hass, f"{URI_SCHEME}{DOMAIN}/config-entry-id-123/a/recent" + hass, f"{URI_SCHEME}{DOMAIN}/{CONFIG_ENTRY_ID}/a/recent" ) From 910fb0930ebba96f9b606cf6db2c35bd5293a61d Mon Sep 17 00:00:00 2001 From: tronikos Date: Fri, 30 Aug 2024 08:34:27 -0700 Subject: [PATCH 1267/1635] Attempt to fix IndexError in Opower (#124478) * Change the order of async_add_external_statistics in Opower * Use consumption_statistic_id instead of cost_statistic_id --- homeassistant/components/opower/coordinator.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/opower/coordinator.py b/homeassistant/components/opower/coordinator.py index 9cef4e4a252..3249cf1a375 100644 --- a/homeassistant/components/opower/coordinator.py +++ b/homeassistant/components/opower/coordinator.py @@ -110,7 +110,7 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]): ) last_stat = await get_instance(self.hass).async_add_executor_job( - get_last_statistics, self.hass, 1, cost_statistic_id, True, set() + get_last_statistics, self.hass, 1, consumption_statistic_id, True, set() ) if not last_stat: _LOGGER.debug("Updating statistic for the first time") @@ -124,7 +124,7 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]): cost_reads = await self._async_get_cost_reads( account, self.api.utility.timezone(), - last_stat[cost_statistic_id][0]["start"], + last_stat[consumption_statistic_id][0]["start"], ) if not cost_reads: _LOGGER.debug("No recent usage/cost data. Skipping update") @@ -141,7 +141,7 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]): ) cost_sum = cast(float, stats[cost_statistic_id][0]["sum"]) consumption_sum = cast(float, stats[consumption_statistic_id][0]["sum"]) - last_stats_time = stats[cost_statistic_id][0]["start"] + last_stats_time = stats[consumption_statistic_id][0]["start"] cost_statistics = [] consumption_statistics = [] @@ -187,7 +187,17 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]): else UnitOfVolume.CENTUM_CUBIC_FEET, ) + _LOGGER.debug( + "Adding %s statistics for %s", + len(cost_statistics), + cost_statistic_id, + ) async_add_external_statistics(self.hass, cost_metadata, cost_statistics) + _LOGGER.debug( + "Adding %s statistics for %s", + len(consumption_statistics), + consumption_statistic_id, + ) async_add_external_statistics( self.hass, consumption_metadata, consumption_statistics ) From 7868ffac35d37960708a95b2b55ecee4a2945bc4 Mon Sep 17 00:00:00 2001 From: Louis Christ Date: Fri, 30 Aug 2024 20:21:27 +0200 Subject: [PATCH 1268/1635] Enable strict typing checking for bluesound integration (#123821) * Enable strict typing * Fix types * Update to pyblu 0.5.2 for typing support * Update pyblu to 1.0.0 * Update pyblu to 1.0.1 * Update error handling * Fix tests * Remove return None from methods only returning None --- .strict-typing | 1 + .../components/bluesound/__init__.py | 14 ++--- .../components/bluesound/config_flow.py | 12 ++-- .../components/bluesound/manifest.json | 2 +- .../components/bluesound/media_player.py | 59 ++++++++----------- mypy.ini | 10 ++++ requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- .../components/bluesound/test_config_flow.py | 8 +-- 9 files changed, 56 insertions(+), 54 deletions(-) diff --git a/.strict-typing b/.strict-typing index d77c12293c4..9e91272c37d 100644 --- a/.strict-typing +++ b/.strict-typing @@ -110,6 +110,7 @@ homeassistant.components.bitcoin.* homeassistant.components.blockchain.* homeassistant.components.blue_current.* homeassistant.components.blueprint.* +homeassistant.components.bluesound.* homeassistant.components.bluetooth.* homeassistant.components.bluetooth_adapters.* homeassistant.components.bluetooth_tracker.* diff --git a/homeassistant/components/bluesound/__init__.py b/homeassistant/components/bluesound/__init__.py index cbe95fc3abf..da74ed042be 100644 --- a/homeassistant/components/bluesound/__init__.py +++ b/homeassistant/components/bluesound/__init__.py @@ -2,8 +2,8 @@ from dataclasses import dataclass -import aiohttp from pyblu import Player, SyncStatus +from pyblu.errors import PlayerUnreachableError from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PORT, Platform @@ -22,14 +22,14 @@ PLATFORMS = [Platform.MEDIA_PLAYER] @dataclass -class BluesoundData: +class BluesoundRuntimeData: """Bluesound data class.""" player: Player sync_status: SyncStatus -type BluesoundConfigEntry = ConfigEntry[BluesoundData] +type BluesoundConfigEntry = ConfigEntry[BluesoundRuntimeData] async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: @@ -51,14 +51,10 @@ async def async_setup_entry( async with Player(host, port, session=session, default_timeout=10) as player: try: sync_status = await player.sync_status(timeout=1) - except TimeoutError as ex: - raise ConfigEntryNotReady( - f"Timeout while connecting to {host}:{port}" - ) from ex - except aiohttp.ClientError as ex: + except PlayerUnreachableError as ex: raise ConfigEntryNotReady(f"Error connecting to {host}:{port}") from ex - config_entry.runtime_data = BluesoundData(player, sync_status) + config_entry.runtime_data = BluesoundRuntimeData(player, sync_status) await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) diff --git a/homeassistant/components/bluesound/config_flow.py b/homeassistant/components/bluesound/config_flow.py index aae527187d2..050b3ee4eac 100644 --- a/homeassistant/components/bluesound/config_flow.py +++ b/homeassistant/components/bluesound/config_flow.py @@ -3,8 +3,8 @@ import logging from typing import Any -import aiohttp from pyblu import Player, SyncStatus +from pyblu.errors import PlayerUnreachableError import voluptuous as vol from homeassistant.components import zeroconf @@ -43,7 +43,7 @@ class BluesoundConfigFlow(ConfigFlow, domain=DOMAIN): ) as player: try: sync_status = await player.sync_status(timeout=1) - except (TimeoutError, aiohttp.ClientError): + except PlayerUnreachableError: errors["base"] = "cannot_connect" else: await self.async_set_unique_id( @@ -79,7 +79,7 @@ class BluesoundConfigFlow(ConfigFlow, domain=DOMAIN): ) as player: try: sync_status = await player.sync_status(timeout=1) - except (TimeoutError, aiohttp.ClientError): + except PlayerUnreachableError: return self.async_abort(reason="cannot_connect") await self.async_set_unique_id( @@ -105,7 +105,7 @@ class BluesoundConfigFlow(ConfigFlow, domain=DOMAIN): discovery_info.host, self._port, session=session ) as player: sync_status = await player.sync_status(timeout=1) - except (TimeoutError, aiohttp.ClientError): + except PlayerUnreachableError: return self.async_abort(reason="cannot_connect") await self.async_set_unique_id(format_unique_id(sync_status.mac, self._port)) @@ -127,7 +127,9 @@ class BluesoundConfigFlow(ConfigFlow, domain=DOMAIN): ) return await self.async_step_confirm() - async def async_step_confirm(self, user_input=None) -> ConfigFlowResult: + async def async_step_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Confirm the zeroconf setup.""" assert self._sync_status is not None assert self._host is not None diff --git a/homeassistant/components/bluesound/manifest.json b/homeassistant/components/bluesound/manifest.json index 64b8e8abffc..13514f52893 100644 --- a/homeassistant/components/bluesound/manifest.json +++ b/homeassistant/components/bluesound/manifest.json @@ -6,7 +6,7 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/bluesound", "iot_class": "local_polling", - "requirements": ["pyblu==0.4.0"], + "requirements": ["pyblu==1.0.1"], "zeroconf": [ { "type": "_musc._tcp.local." diff --git a/homeassistant/components/bluesound/media_player.py b/homeassistant/components/bluesound/media_player.py index 1ed53d7bfc5..cd1d9510eaa 100644 --- a/homeassistant/components/bluesound/media_player.py +++ b/homeassistant/components/bluesound/media_player.py @@ -9,8 +9,8 @@ from datetime import datetime, timedelta import logging from typing import TYPE_CHECKING, Any, NamedTuple -from aiohttp.client_exceptions import ClientError from pyblu import Input, Player, Preset, Status, SyncStatus +from pyblu.errors import PlayerUnreachableError import voluptuous as vol from homeassistant.components import media_source @@ -239,7 +239,7 @@ class BluesoundPlayer(MediaPlayerEntity): self.port = port self._polling_task: Task[None] | None = None # The actual polling task. self._id = sync_status.id - self._last_status_update = None + self._last_status_update: datetime | None = None self._sync_status = sync_status self._status: Status | None = None self._inputs: list[Input] = [] @@ -247,7 +247,7 @@ class BluesoundPlayer(MediaPlayerEntity): self._muted = False self._master: BluesoundPlayer | None = None self._is_master = False - self._group_name = None + self._group_name: str | None = None self._group_list: list[str] = [] self._bluesound_device_name = sync_status.name self._player = player @@ -273,14 +273,6 @@ class BluesoundPlayer(MediaPlayerEntity): via_device=(DOMAIN, format_mac(sync_status.mac)), ) - @staticmethod - def _try_get_index(string, search_string): - """Get the index.""" - try: - return string.index(search_string) - except ValueError: - return -1 - async def force_update_sync_status(self) -> bool: """Update the internal status.""" sync_status = await self._player.sync_status() @@ -309,12 +301,12 @@ class BluesoundPlayer(MediaPlayerEntity): return True - async def _poll_loop(self): + async def _poll_loop(self) -> None: """Loop which polls the status of the player.""" while True: try: await self.async_update_status() - except (TimeoutError, ClientError): + except PlayerUnreachableError: _LOGGER.error( "Node %s:%s is offline, retrying later", self.host, self.port ) @@ -324,9 +316,9 @@ class BluesoundPlayer(MediaPlayerEntity): "Stopping the polling of node %s:%s", self.host, self.port ) return - except Exception: + except: # noqa: E722 - this loop should never stop _LOGGER.exception( - "Unexpected error in %s:%s, retrying later", self.host, self.port + "Unexpected error for %s:%s, retrying later", self.host, self.port ) await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT) @@ -356,12 +348,12 @@ class BluesoundPlayer(MediaPlayerEntity): if not self.available: return - with suppress(TimeoutError): + with suppress(PlayerUnreachableError): await self.async_update_sync_status() await self.async_update_presets() await self.async_update_captures() - async def async_update_status(self): + async def async_update_status(self) -> None: """Use the poll session to always get the status of the player.""" etag = None if self._status is not None: @@ -394,11 +386,11 @@ class BluesoundPlayer(MediaPlayerEntity): # the device is playing. This would solve a lot of # problems. This change will be done when the # communication is moved to a separate library - with suppress(TimeoutError): + with suppress(PlayerUnreachableError): await self.force_update_sync_status() self.async_write_ha_state() - except (TimeoutError, ClientError): + except PlayerUnreachableError: self._attr_available = False self._last_status_update = None self._status = None @@ -409,7 +401,7 @@ class BluesoundPlayer(MediaPlayerEntity): ) raise - async def async_trigger_sync_on_all(self): + async def async_trigger_sync_on_all(self) -> None: """Trigger sync status update on all devices.""" _LOGGER.debug("Trigger sync status on all devices") @@ -417,7 +409,7 @@ class BluesoundPlayer(MediaPlayerEntity): await player.force_update_sync_status() @Throttle(SYNC_STATUS_INTERVAL) - async def async_update_sync_status(self): + async def async_update_sync_status(self) -> None: """Update sync status.""" await self.force_update_sync_status() @@ -506,8 +498,6 @@ class BluesoundPlayer(MediaPlayerEntity): return None position = self._status.seconds - if position is None: - return None if mediastate == MediaPlayerState.PLAYING: position += (dt_util.utcnow() - self._last_status_update).total_seconds() @@ -524,7 +514,7 @@ class BluesoundPlayer(MediaPlayerEntity): if duration is None: return None - return duration + return int(duration) @property def media_position_updated_at(self) -> datetime | None: @@ -660,7 +650,7 @@ class BluesoundPlayer(MediaPlayerEntity): return shuffle - async def async_join(self, master): + async def async_join(self, master: str) -> None: """Join the player to a group.""" master_device = [ device @@ -711,7 +701,7 @@ class BluesoundPlayer(MediaPlayerEntity): if entity.bluesound_device_name in device_group ] - async def async_unjoin(self): + async def async_unjoin(self) -> None: """Unjoin the player from a group.""" if self._master is None: return @@ -719,11 +709,11 @@ class BluesoundPlayer(MediaPlayerEntity): _LOGGER.debug("Trying to unjoin player: %s", self.id) await self._master.async_remove_slave(self) - async def async_add_slave(self, slave_device: BluesoundPlayer): + async def async_add_slave(self, slave_device: BluesoundPlayer) -> None: """Add slave to master.""" await self._player.add_slave(slave_device.host, slave_device.port) - async def async_remove_slave(self, slave_device: BluesoundPlayer): + async def async_remove_slave(self, slave_device: BluesoundPlayer) -> None: """Remove slave to master.""" await self._player.remove_slave(slave_device.host, slave_device.port) @@ -731,7 +721,7 @@ class BluesoundPlayer(MediaPlayerEntity): """Increase sleep time on player.""" return await self._player.sleep_timer() - async def async_clear_timer(self): + async def async_clear_timer(self) -> None: """Clear sleep timer on player.""" sleep = 1 while sleep > 0: @@ -755,6 +745,9 @@ class BluesoundPlayer(MediaPlayerEntity): if preset.name == source: url = preset.url + if url is None: + raise ServiceValidationError(f"Source {source} not found") + await self._player.play_url(url) async def async_clear_playlist(self) -> None: @@ -826,20 +819,20 @@ class BluesoundPlayer(MediaPlayerEntity): async def async_volume_up(self) -> None: """Volume up the media player.""" if self.volume_level is None: - return None + return new_volume = self.volume_level + 0.01 new_volume = min(1, new_volume) - return await self.async_set_volume_level(new_volume) + await self.async_set_volume_level(new_volume) async def async_volume_down(self) -> None: """Volume down the media player.""" if self.volume_level is None: - return None + return new_volume = self.volume_level - 0.01 new_volume = max(0, new_volume) - return await self.async_set_volume_level(new_volume) + await self.async_set_volume_level(new_volume) async def async_set_volume_level(self, volume: float) -> None: """Send volume_up command to media player.""" diff --git a/mypy.ini b/mypy.ini index 817060ac869..873cf1f66bd 100644 --- a/mypy.ini +++ b/mypy.ini @@ -855,6 +855,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.bluesound.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.bluetooth.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/requirements_all.txt b/requirements_all.txt index 9c873e247a5..570c16db626 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1763,7 +1763,7 @@ pybbox==0.0.5-alpha pyblackbird==0.6 # homeassistant.components.bluesound -pyblu==0.4.0 +pyblu==1.0.1 # homeassistant.components.neato pybotvac==0.0.25 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index cf3d84208a9..b1be638d4e9 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1428,7 +1428,7 @@ pybalboa==1.0.2 pyblackbird==0.6 # homeassistant.components.bluesound -pyblu==0.4.0 +pyblu==1.0.1 # homeassistant.components.neato pybotvac==0.0.25 diff --git a/tests/components/bluesound/test_config_flow.py b/tests/components/bluesound/test_config_flow.py index 8fecba7017d..53cf40a8d46 100644 --- a/tests/components/bluesound/test_config_flow.py +++ b/tests/components/bluesound/test_config_flow.py @@ -2,7 +2,7 @@ from unittest.mock import AsyncMock -from aiohttp import ClientConnectionError +from pyblu.errors import PlayerUnreachableError from homeassistant.components.bluesound.const import DOMAIN from homeassistant.components.zeroconf import ZeroconfServiceInfo @@ -49,7 +49,7 @@ async def test_user_flow_cannot_connect( context={"source": SOURCE_USER}, ) - mock_player.sync_status.side_effect = ClientConnectionError + mock_player.sync_status.side_effect = PlayerUnreachableError("Player not reachable") result = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -129,7 +129,7 @@ async def test_import_flow_cannot_connect( hass: HomeAssistant, mock_player: AsyncMock ) -> None: """Test we handle cannot connect error.""" - mock_player.sync_status.side_effect = ClientConnectionError + mock_player.sync_status.side_effect = PlayerUnreachableError("Player not reachable") result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_IMPORT}, @@ -200,7 +200,7 @@ async def test_zeroconf_flow_cannot_connect( hass: HomeAssistant, mock_player: AsyncMock ) -> None: """Test we handle cannot connect error.""" - mock_player.sync_status.side_effect = ClientConnectionError + mock_player.sync_status.side_effect = PlayerUnreachableError("Player not reachable") result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_ZEROCONF}, From ed161d3d49ff3277bb6b6366c69f08e0d615ed50 Mon Sep 17 00:00:00 2001 From: "Steven B." <51370195+sdb9696@users.noreply.github.com> Date: Fri, 30 Aug 2024 19:43:28 +0100 Subject: [PATCH 1269/1635] Bump python-kasa to 0.7.2 (#124930) --- homeassistant/components/tplink/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/tplink/manifest.json b/homeassistant/components/tplink/manifest.json index 10b0ef61153..0d9761ec8ce 100644 --- a/homeassistant/components/tplink/manifest.json +++ b/homeassistant/components/tplink/manifest.json @@ -301,5 +301,5 @@ "iot_class": "local_polling", "loggers": ["kasa"], "quality_scale": "platinum", - "requirements": ["python-kasa[speedups]==0.7.1"] + "requirements": ["python-kasa[speedups]==0.7.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 570c16db626..c63778c604e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2320,7 +2320,7 @@ python-join-api==0.0.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.7.1 +python-kasa[speedups]==0.7.2 # homeassistant.components.linkplay python-linkplay==0.0.8 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index b1be638d4e9..cd6db30def9 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1838,7 +1838,7 @@ python-izone==1.2.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.7.1 +python-kasa[speedups]==0.7.2 # homeassistant.components.linkplay python-linkplay==0.0.8 From 29a17edaa532f0d4112d006c89dfb895a8abadaf Mon Sep 17 00:00:00 2001 From: "Steven B." <51370195+sdb9696@users.noreply.github.com> Date: Fri, 30 Aug 2024 19:56:30 +0100 Subject: [PATCH 1270/1635] Exclude tplink firmware entities (#124935) Co-authored-by: J. Nick Koston --- homeassistant/components/tplink/entity.py | 2 ++ tests/components/tplink/fixtures/features.json | 5 +++++ 2 files changed, 7 insertions(+) diff --git a/homeassistant/components/tplink/entity.py b/homeassistant/components/tplink/entity.py index 4ec0480cf82..beb71d4e5ce 100644 --- a/homeassistant/components/tplink/entity.py +++ b/homeassistant/components/tplink/entity.py @@ -68,6 +68,8 @@ EXCLUDED_FEATURES = { # update "current_firmware_version", "available_firmware_version", + "update_available", + "check_latest_firmware", } diff --git a/tests/components/tplink/fixtures/features.json b/tests/components/tplink/fixtures/features.json index 7cfe979ea25..6d4afd98d15 100644 --- a/tests/components/tplink/fixtures/features.json +++ b/tests/components/tplink/fixtures/features.json @@ -150,6 +150,11 @@ "type": "Sensor", "category": "Debug" }, + "check_latest_firmware": { + "value": "", + "type": "Action", + "category": "Info" + }, "thermostat_mode": { "value": "off", "type": "Sensor", From 460363c4ba2bbe613fafaadc0d8c5333fef9fa74 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 30 Aug 2024 09:05:16 -1000 Subject: [PATCH 1271/1635] Bump aioshelly to 11.4.1 to accomodate shelly GetStatus calls that take a few seconds to respond (#124893) Co-authored-by: Shay Levy --- homeassistant/components/shelly/coordinator.py | 3 +-- homeassistant/components/shelly/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/shelly/test_coordinator.py | 4 ++-- 5 files changed, 6 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/shelly/coordinator.py b/homeassistant/components/shelly/coordinator.py index 012f6b43dc7..c8e6cc03a06 100644 --- a/homeassistant/components/shelly/coordinator.py +++ b/homeassistant/components/shelly/coordinator.py @@ -793,8 +793,7 @@ class ShellyRpcPollingCoordinator(ShellyCoordinatorBase[RpcDevice]): LOGGER.debug("Polling Shelly RPC Device - %s", self.name) try: - await self.device.update_status() - await self.device.get_dynamic_components() + await self.device.poll() except (DeviceConnectionError, RpcCallError) as err: raise UpdateFailed(f"Device disconnected: {err!r}") from err except InvalidAuthError: diff --git a/homeassistant/components/shelly/manifest.json b/homeassistant/components/shelly/manifest.json index a384255705c..f9fa2d571d1 100644 --- a/homeassistant/components/shelly/manifest.json +++ b/homeassistant/components/shelly/manifest.json @@ -9,7 +9,7 @@ "iot_class": "local_push", "loggers": ["aioshelly"], "quality_scale": "platinum", - "requirements": ["aioshelly==11.3.0"], + "requirements": ["aioshelly==11.4.1"], "zeroconf": [ { "type": "_http._tcp.local.", diff --git a/requirements_all.txt b/requirements_all.txt index c63778c604e..2b37b515a3c 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -359,7 +359,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==11.3.0 +aioshelly==11.4.1 # homeassistant.components.skybell aioskybell==22.7.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index cd6db30def9..3319870591e 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -341,7 +341,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==11.3.0 +aioshelly==11.4.1 # homeassistant.components.skybell aioskybell==22.7.0 diff --git a/tests/components/shelly/test_coordinator.py b/tests/components/shelly/test_coordinator.py index bb9694cf9b4..47c338e3fad 100644 --- a/tests/components/shelly/test_coordinator.py +++ b/tests/components/shelly/test_coordinator.py @@ -678,7 +678,7 @@ async def test_rpc_polling_auth_error( monkeypatch.setattr( mock_rpc_device, - "update_status", + "poll", AsyncMock( side_effect=InvalidAuthError, ), @@ -768,7 +768,7 @@ async def test_rpc_polling_connection_error( monkeypatch.setattr( mock_rpc_device, - "update_status", + "poll", AsyncMock( side_effect=DeviceConnectionError, ), From 8c2e63807cb867182d31d1755a4169b63488c2b0 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 30 Aug 2024 22:02:10 +0200 Subject: [PATCH 1272/1635] Make set_value required in number template (#124917) * Make set_value required in number template * Make set_value required in number template * Fix tests --- homeassistant/components/template/number.py | 9 ++++----- tests/components/template/test_config_flow.py | 20 +++++++++++++++++++ tests/components/template/test_init.py | 10 ++++++++++ tests/components/template/test_number.py | 10 ++++++++++ 4 files changed, 44 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/template/number.py b/homeassistant/components/template/number.py index 955600a9b9e..499ddc192cc 100644 --- a/homeassistant/components/template/number.py +++ b/homeassistant/components/template/number.py @@ -70,7 +70,7 @@ NUMBER_CONFIG_SCHEMA = vol.Schema( vol.Required(CONF_NAME): cv.template, vol.Required(CONF_STATE): cv.template, vol.Required(CONF_STEP): cv.template, - vol.Optional(CONF_SET_VALUE): cv.SCRIPT_SCHEMA, + vol.Required(CONF_SET_VALUE): cv.SCRIPT_SCHEMA, vol.Optional(CONF_MIN): cv.template, vol.Optional(CONF_MAX): cv.template, vol.Optional(CONF_DEVICE_ID): selector.DeviceSelector(), @@ -154,11 +154,10 @@ class TemplateNumber(TemplateEntity, NumberEntity): super().__init__(hass, config=config, unique_id=unique_id) assert self._attr_name is not None self._value_template = config[CONF_STATE] - self._command_set_value = ( - Script(hass, config[CONF_SET_VALUE], self._attr_name, DOMAIN) - if config.get(CONF_SET_VALUE, None) is not None - else None + self._command_set_value = Script( + hass, config[CONF_SET_VALUE], self._attr_name, DOMAIN ) + self._step_template = config[CONF_STEP] self._min_value_template = config[CONF_MIN] self._max_value_template = config[CONF_MAX] diff --git a/tests/components/template/test_config_flow.py b/tests/components/template/test_config_flow.py index a62370f4261..f8ab190e664 100644 --- a/tests/components/template/test_config_flow.py +++ b/tests/components/template/test_config_flow.py @@ -101,11 +101,21 @@ from tests.typing import WebSocketGenerator "min": "{{ 0 }}", "max": "{{ 100 }}", "step": "{{ 0.1 }}", + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, }, { "min": "{{ 0 }}", "max": "{{ 100 }}", "step": "{{ 0.1 }}", + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, }, {}, ), @@ -444,11 +454,21 @@ def get_suggested(schema, key): "min": "{{ 0 }}", "max": "{{ 100 }}", "step": "{{ 0.1 }}", + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, }, { "min": "{{ 0 }}", "max": "{{ 100 }}", "step": "{{ 0.1 }}", + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, }, "state", ), diff --git a/tests/components/template/test_init.py b/tests/components/template/test_init.py index 06d59d4d176..3b4db4bf668 100644 --- a/tests/components/template/test_init.py +++ b/tests/components/template/test_init.py @@ -322,12 +322,22 @@ async def async_yaml_patch_helper(hass: HomeAssistant, filename: str) -> None: "min": "{{ 0 }}", "max": "{{ 100 }}", "step": "{{ 0.1 }}", + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, }, { "state": "{{ 11 }}", "min": "{{ 0 }}", "max": "{{ 100 }}", "step": "{{ 0.1 }}", + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, }, ), ( diff --git a/tests/components/template/test_number.py b/tests/components/template/test_number.py index c8befc2b8f8..fdca94d9fa4 100644 --- a/tests/components/template/test_number.py +++ b/tests/components/template/test_number.py @@ -61,6 +61,11 @@ async def test_setup_config_entry( "min": "{{ 0 }}", "max": "{{ 100 }}", "step": "{{ 0.1 }}", + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, }, title="My template", ) @@ -522,6 +527,11 @@ async def test_device_id( "min": "{{ 0 }}", "max": "{{ 100 }}", "step": "{{ 0.1 }}", + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, "device_id": device_entry.id, }, title="My template", From 933ae143b3147f9bb1c91f7adacc7ddb15e4fb29 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 30 Aug 2024 10:32:09 -1000 Subject: [PATCH 1273/1635] Bump google-cloud-texttospeech to 2.17.2 (#124938) changelog: https://github.com/googleapis/google-cloud-python/compare/google-cloud-texttospeech-v2.16.3...google-cloud-texttospeech-v2.17.2 --- homeassistant/components/google_cloud/manifest.json | 2 +- requirements_all.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/google_cloud/manifest.json b/homeassistant/components/google_cloud/manifest.json index b4fc3f39b86..052fa79eef4 100644 --- a/homeassistant/components/google_cloud/manifest.json +++ b/homeassistant/components/google_cloud/manifest.json @@ -4,5 +4,5 @@ "codeowners": ["@lufton"], "documentation": "https://www.home-assistant.io/integrations/google_cloud", "iot_class": "cloud_push", - "requirements": ["google-cloud-texttospeech==2.16.3"] + "requirements": ["google-cloud-texttospeech==2.17.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 2b37b515a3c..9fd769185ac 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -987,7 +987,7 @@ google-api-python-client==2.71.0 google-cloud-pubsub==2.13.11 # homeassistant.components.google_cloud -google-cloud-texttospeech==2.16.3 +google-cloud-texttospeech==2.17.2 # homeassistant.components.google_generative_ai_conversation google-generativeai==0.6.0 From 66ddf44399005870a80bc4b7ede904706109a0b7 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 30 Aug 2024 10:32:23 -1000 Subject: [PATCH 1274/1635] Bump google-cloud-pubsub to 2.23.0 (#124937) changelog: https://github.com/googleapis/python-pubsub/compare/v2.13.11...v2.23.0 --- homeassistant/components/google_pubsub/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/google_pubsub/manifest.json b/homeassistant/components/google_pubsub/manifest.json index f22317404ab..aa13f1808c4 100644 --- a/homeassistant/components/google_pubsub/manifest.json +++ b/homeassistant/components/google_pubsub/manifest.json @@ -4,5 +4,5 @@ "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/google_pubsub", "iot_class": "cloud_push", - "requirements": ["google-cloud-pubsub==2.13.11"] + "requirements": ["google-cloud-pubsub==2.23.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 9fd769185ac..7b9666742fe 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -984,7 +984,7 @@ goodwe==0.3.6 google-api-python-client==2.71.0 # homeassistant.components.google_pubsub -google-cloud-pubsub==2.13.11 +google-cloud-pubsub==2.23.0 # homeassistant.components.google_cloud google-cloud-texttospeech==2.17.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 3319870591e..2dd8eb468a0 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -834,7 +834,7 @@ goodwe==0.3.6 google-api-python-client==2.71.0 # homeassistant.components.google_pubsub -google-cloud-pubsub==2.13.11 +google-cloud-pubsub==2.23.0 # homeassistant.components.google_generative_ai_conversation google-generativeai==0.6.0 From 8cafa1bcdf9d98d8d840b114f33b0264e4e1426d Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 30 Aug 2024 10:33:26 -1000 Subject: [PATCH 1275/1635] Bump google-generativeai to 0.7.2 (#124940) changelog: https://github.com/google-gemini/generative-ai-python/compare/v0.6.0...v0.7.2 --- .../components/google_generative_ai_conversation/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/google_generative_ai_conversation/manifest.json b/homeassistant/components/google_generative_ai_conversation/manifest.json index 9e0dc1ddeab..a15da4906f8 100644 --- a/homeassistant/components/google_generative_ai_conversation/manifest.json +++ b/homeassistant/components/google_generative_ai_conversation/manifest.json @@ -9,5 +9,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "quality_scale": "platinum", - "requirements": ["google-generativeai==0.6.0"] + "requirements": ["google-generativeai==0.7.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 7b9666742fe..980ba8d94e2 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -990,7 +990,7 @@ google-cloud-pubsub==2.23.0 google-cloud-texttospeech==2.17.2 # homeassistant.components.google_generative_ai_conversation -google-generativeai==0.6.0 +google-generativeai==0.7.2 # homeassistant.components.nest google-nest-sdm==5.0.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 2dd8eb468a0..f766c8c13d3 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -837,7 +837,7 @@ google-api-python-client==2.71.0 google-cloud-pubsub==2.23.0 # homeassistant.components.google_generative_ai_conversation -google-generativeai==0.6.0 +google-generativeai==0.7.2 # homeassistant.components.nest google-nest-sdm==5.0.0 From 0a9e20615ec9a468d398a0d3a52e0462d5c3ba98 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 30 Aug 2024 10:33:57 -1000 Subject: [PATCH 1276/1635] Limit maximum template render output to 256KiB (#124946) * Limit maximum template render output to 256KiB fixes #124931 256KiB is likely to still block the event loop for an unreasonable amont of time but its likely someone is using the template engine for large blocks of data so we want a limit which still allows that but has a reasonable safety to prevent the system from crashing down * Update homeassistant/helpers/template.py --- homeassistant/helpers/template.py | 6 ++++++ tests/helpers/test_template.py | 7 +++++++ 2 files changed, 13 insertions(+) diff --git a/homeassistant/helpers/template.py b/homeassistant/helpers/template.py index e090e0de2d1..0a980db30b4 100644 --- a/homeassistant/helpers/template.py +++ b/homeassistant/helpers/template.py @@ -149,6 +149,7 @@ CACHED_TEMPLATE_STATES = 512 EVAL_CACHE_SIZE = 512 MAX_CUSTOM_TEMPLATE_SIZE = 5 * 1024 * 1024 +MAX_TEMPLATE_OUTPUT = 256 * 1024 # 256KiB CACHED_TEMPLATE_LRU: LRU[State, TemplateState] = LRU(CACHED_TEMPLATE_STATES) CACHED_TEMPLATE_NO_COLLECT_LRU: LRU[State, TemplateState] = LRU(CACHED_TEMPLATE_STATES) @@ -604,6 +605,11 @@ class Template: except Exception as err: raise TemplateError(err) from err + if len(render_result) > MAX_TEMPLATE_OUTPUT: + raise TemplateError( + f"Template output exceeded maximum size of {MAX_TEMPLATE_OUTPUT} characters" + ) + render_result = render_result.strip() if not parse_result or self.hass and self.hass.config.legacy_templates: diff --git a/tests/helpers/test_template.py b/tests/helpers/test_template.py index 0676ae21ab7..f585b5c3260 100644 --- a/tests/helpers/test_template.py +++ b/tests/helpers/test_template.py @@ -6281,3 +6281,10 @@ def test_unzip(hass: HomeAssistant, col, expected) -> None: ).async_render({"col": col}) == expected ) + + +def test_template_output_exceeds_maximum_size(hass: HomeAssistant) -> None: + """Test template output exceeds maximum size.""" + tpl = template.Template("{{ 'a' * 1024 * 257 }}", hass) + with pytest.raises(TemplateError): + tpl.async_render() From ac39bf991faf28ee597a223836774481eda7cae7 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 30 Aug 2024 22:34:34 +0200 Subject: [PATCH 1277/1635] Rename lg_thinq domain name (#124926) --- CODEOWNERS | 4 ++-- homeassistant/brands/lg.json | 2 +- .../components/{lgthinq => lg_thinq}/__init__.py | 0 .../components/{lgthinq => lg_thinq}/config_flow.py | 0 .../components/{lgthinq => lg_thinq}/const.py | 2 +- .../components/{lgthinq => lg_thinq}/coordinator.py | 0 .../components/{lgthinq => lg_thinq}/entity.py | 0 .../components/{lgthinq => lg_thinq}/icons.json | 0 .../components/{lgthinq => lg_thinq}/manifest.json | 2 +- .../components/{lgthinq => lg_thinq}/strings.json | 0 .../components/{lgthinq => lg_thinq}/switch.py | 0 homeassistant/generated/config_flows.py | 2 +- homeassistant/generated/integrations.json | 12 ++++++------ requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/{lgthinq => lg_thinq}/__init__.py | 0 tests/components/{lgthinq => lg_thinq}/conftest.py | 6 +++--- tests/components/{lgthinq => lg_thinq}/const.py | 0 .../{lgthinq => lg_thinq}/test_config_flow.py | 2 +- 19 files changed, 18 insertions(+), 18 deletions(-) rename homeassistant/components/{lgthinq => lg_thinq}/__init__.py (100%) rename homeassistant/components/{lgthinq => lg_thinq}/config_flow.py (100%) rename homeassistant/components/{lgthinq => lg_thinq}/const.py (99%) rename homeassistant/components/{lgthinq => lg_thinq}/coordinator.py (100%) rename homeassistant/components/{lgthinq => lg_thinq}/entity.py (100%) rename homeassistant/components/{lgthinq => lg_thinq}/icons.json (100%) rename homeassistant/components/{lgthinq => lg_thinq}/manifest.json (92%) rename homeassistant/components/{lgthinq => lg_thinq}/strings.json (100%) rename homeassistant/components/{lgthinq => lg_thinq}/switch.py (100%) rename tests/components/{lgthinq => lg_thinq}/__init__.py (100%) rename tests/components/{lgthinq => lg_thinq}/conftest.py (90%) rename tests/components/{lgthinq => lg_thinq}/const.py (100%) rename tests/components/{lgthinq => lg_thinq}/test_config_flow.py (96%) diff --git a/CODEOWNERS b/CODEOWNERS index 8ae6aa367b5..3b250ceb9ab 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -805,8 +805,8 @@ build.json @home-assistant/supervisor /tests/components/lektrico/ @lektrico /homeassistant/components/lg_netcast/ @Drafteed @splinter98 /tests/components/lg_netcast/ @Drafteed @splinter98 -/homeassistant/components/lgthinq/ @LG-ThinQ-Integration -/tests/components/lgthinq/ @LG-ThinQ-Integration +/homeassistant/components/lg_thinq/ @LG-ThinQ-Integration +/tests/components/lg_thinq/ @LG-ThinQ-Integration /homeassistant/components/lidarr/ @tkdrob /tests/components/lidarr/ @tkdrob /homeassistant/components/lifx/ @Djelibeybi diff --git a/homeassistant/brands/lg.json b/homeassistant/brands/lg.json index 350db80b5f3..6b706685f1f 100644 --- a/homeassistant/brands/lg.json +++ b/homeassistant/brands/lg.json @@ -1,5 +1,5 @@ { "domain": "lg", "name": "LG", - "integrations": ["lg_netcast", "lg_soundbar", "webostv"] + "integrations": ["lg_netcast", "lg_thinq", "lg_soundbar", "webostv"] } diff --git a/homeassistant/components/lgthinq/__init__.py b/homeassistant/components/lg_thinq/__init__.py similarity index 100% rename from homeassistant/components/lgthinq/__init__.py rename to homeassistant/components/lg_thinq/__init__.py diff --git a/homeassistant/components/lgthinq/config_flow.py b/homeassistant/components/lg_thinq/config_flow.py similarity index 100% rename from homeassistant/components/lgthinq/config_flow.py rename to homeassistant/components/lg_thinq/config_flow.py diff --git a/homeassistant/components/lgthinq/const.py b/homeassistant/components/lg_thinq/const.py similarity index 99% rename from homeassistant/components/lgthinq/const.py rename to homeassistant/components/lg_thinq/const.py index 9b9b162bb06..811b7c50340 100644 --- a/homeassistant/components/lgthinq/const.py +++ b/homeassistant/components/lg_thinq/const.py @@ -37,7 +37,7 @@ from thinqconnect import ( ) # Common -DOMAIN = "lgthinq" +DOMAIN = "lg_thinq" COMPANY = "LGE" THINQ_DEFAULT_NAME: Final = "LG ThinQ" THINQ_PAT_URL: Final = "https://connect-pat.lgthinq.com" diff --git a/homeassistant/components/lgthinq/coordinator.py b/homeassistant/components/lg_thinq/coordinator.py similarity index 100% rename from homeassistant/components/lgthinq/coordinator.py rename to homeassistant/components/lg_thinq/coordinator.py diff --git a/homeassistant/components/lgthinq/entity.py b/homeassistant/components/lg_thinq/entity.py similarity index 100% rename from homeassistant/components/lgthinq/entity.py rename to homeassistant/components/lg_thinq/entity.py diff --git a/homeassistant/components/lgthinq/icons.json b/homeassistant/components/lg_thinq/icons.json similarity index 100% rename from homeassistant/components/lgthinq/icons.json rename to homeassistant/components/lg_thinq/icons.json diff --git a/homeassistant/components/lgthinq/manifest.json b/homeassistant/components/lg_thinq/manifest.json similarity index 92% rename from homeassistant/components/lgthinq/manifest.json rename to homeassistant/components/lg_thinq/manifest.json index 641c78844f9..0fa447a511b 100644 --- a/homeassistant/components/lgthinq/manifest.json +++ b/homeassistant/components/lg_thinq/manifest.json @@ -1,5 +1,5 @@ { - "domain": "lgthinq", + "domain": "lg_thinq", "name": "LG ThinQ", "codeowners": ["@LG-ThinQ-Integration"], "config_flow": true, diff --git a/homeassistant/components/lgthinq/strings.json b/homeassistant/components/lg_thinq/strings.json similarity index 100% rename from homeassistant/components/lgthinq/strings.json rename to homeassistant/components/lg_thinq/strings.json diff --git a/homeassistant/components/lgthinq/switch.py b/homeassistant/components/lg_thinq/switch.py similarity index 100% rename from homeassistant/components/lgthinq/switch.py rename to homeassistant/components/lg_thinq/switch.py diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index d4342d80d41..fcabc463f0a 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -319,7 +319,7 @@ FLOWS = { "lektrico", "lg_netcast", "lg_soundbar", - "lgthinq", + "lg_thinq", "lidarr", "lifx", "linear_garage_door", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 8091d48ca4d..ebfe7e056f2 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -3238,6 +3238,12 @@ "iot_class": "local_polling", "name": "LG Netcast" }, + "lg_thinq": { + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_push", + "name": "LG ThinQ" + }, "lg_soundbar": { "integration_type": "hub", "config_flow": true, @@ -3252,12 +3258,6 @@ } } }, - "lgthinq": { - "name": "LG ThinQ", - "integration_type": "hub", - "config_flow": true, - "iot_class": "cloud_push" - }, "lidarr": { "name": "Lidarr", "integration_type": "service", diff --git a/requirements_all.txt b/requirements_all.txt index 980ba8d94e2..27cd2d5abc1 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2786,7 +2786,7 @@ thermoworks-smoke==0.1.8 # homeassistant.components.thingspeak thingspeak==1.0.0 -# homeassistant.components.lgthinq +# homeassistant.components.lg_thinq thinqconnect==0.9.5 # homeassistant.components.tikteck diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f766c8c13d3..08903ae1c6f 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2199,7 +2199,7 @@ thermobeacon-ble==0.7.0 # homeassistant.components.thermopro thermopro-ble==0.10.0 -# homeassistant.components.lgthinq +# homeassistant.components.lg_thinq thinqconnect==0.9.5 # homeassistant.components.tilt_ble diff --git a/tests/components/lgthinq/__init__.py b/tests/components/lg_thinq/__init__.py similarity index 100% rename from tests/components/lgthinq/__init__.py rename to tests/components/lg_thinq/__init__.py diff --git a/tests/components/lgthinq/conftest.py b/tests/components/lg_thinq/conftest.py similarity index 90% rename from tests/components/lgthinq/conftest.py rename to tests/components/lg_thinq/conftest.py index 321c770ee8d..cae2de61fa4 100644 --- a/tests/components/lgthinq/conftest.py +++ b/tests/components/lg_thinq/conftest.py @@ -6,7 +6,7 @@ from unittest.mock import AsyncMock, MagicMock, patch import pytest from thinqconnect import ThinQAPIException -from homeassistant.components.lgthinq.const import CONF_CONNECT_CLIENT_ID, DOMAIN +from homeassistant.components.lg_thinq.const import CONF_CONNECT_CLIENT_ID, DOMAIN from homeassistant.const import CONF_ACCESS_TOKEN, CONF_COUNTRY from .const import MOCK_CONNECT_CLIENT_ID, MOCK_COUNTRY, MOCK_PAT, MOCK_UUID @@ -51,7 +51,7 @@ def mock_uuid() -> Generator[AsyncMock]: with ( patch("uuid.uuid4", autospec=True, return_value=MOCK_UUID) as mock_uuid, patch( - "homeassistant.components.lgthinq.config_flow.uuid.uuid4", + "homeassistant.components.lg_thinq.config_flow.uuid.uuid4", new=mock_uuid, ), ): @@ -64,7 +64,7 @@ def mock_thinq_api() -> Generator[AsyncMock]: with ( patch("thinqconnect.ThinQApi", autospec=True) as mock_api, patch( - "homeassistant.components.lgthinq.config_flow.ThinQApi", + "homeassistant.components.lg_thinq.config_flow.ThinQApi", new=mock_api, ), ): diff --git a/tests/components/lgthinq/const.py b/tests/components/lg_thinq/const.py similarity index 100% rename from tests/components/lgthinq/const.py rename to tests/components/lg_thinq/const.py diff --git a/tests/components/lgthinq/test_config_flow.py b/tests/components/lg_thinq/test_config_flow.py similarity index 96% rename from tests/components/lgthinq/test_config_flow.py rename to tests/components/lg_thinq/test_config_flow.py index 457549ccb7e..db0e2d29450 100644 --- a/tests/components/lgthinq/test_config_flow.py +++ b/tests/components/lg_thinq/test_config_flow.py @@ -2,7 +2,7 @@ from unittest.mock import AsyncMock -from homeassistant.components.lgthinq.const import CONF_CONNECT_CLIENT_ID, DOMAIN +from homeassistant.components.lg_thinq.const import CONF_CONNECT_CLIENT_ID, DOMAIN from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_ACCESS_TOKEN, CONF_COUNTRY from homeassistant.core import HomeAssistant From 26281662b5557a1fb2b435ca3316f27aa015251e Mon Sep 17 00:00:00 2001 From: Alex Yao <33379584+alexyao2015@users.noreply.github.com> Date: Fri, 30 Aug 2024 16:22:14 -0500 Subject: [PATCH 1278/1635] Enable config flow for html5 (#112806) * html5: Enable config flow * Add tests * attempt check create_issue * replace len with call_count * fix config flow tests * test user config * more tests * remove whitespace * Update homeassistant/components/html5/issues.py Co-authored-by: Steven B. <51370195+sdb9696@users.noreply.github.com> * Update homeassistant/components/html5/issues.py Co-authored-by: Steven B. <51370195+sdb9696@users.noreply.github.com> * fix config * Adjust issues log * lint * lint * rename create issue * fix typing * update codeowners * fix test * fix tests * Update issues.py * Update tests/components/html5/test_config_flow.py Co-authored-by: J. Nick Koston * Update tests/components/html5/test_config_flow.py Co-authored-by: J. Nick Koston * Update tests/components/html5/test_config_flow.py Co-authored-by: J. Nick Koston * update from review * remove ternary * fix * fix missing service * fix tests * updates * adress review comments * fix indent * fix * fix format * cleanup from review * Restore config schema and use HA issue * Restore config schema and use HA issue --------- Co-authored-by: alexyao2015 Co-authored-by: Steven B. <51370195+sdb9696@users.noreply.github.com> Co-authored-by: J. Nick Koston Co-authored-by: Joostlek --- CODEOWNERS | 2 + homeassistant/components/html5/__init__.py | 15 ++ homeassistant/components/html5/config_flow.py | 103 +++++++++ homeassistant/components/html5/const.py | 5 + homeassistant/components/html5/issues.py | 50 +++++ homeassistant/components/html5/manifest.json | 6 +- homeassistant/components/html5/notify.py | 51 +++-- homeassistant/components/html5/strings.json | 27 +++ homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 5 +- tests/components/html5/test_config_flow.py | 203 ++++++++++++++++++ tests/components/html5/test_init.py | 44 ++++ tests/components/html5/test_notify.py | 22 +- 13 files changed, 497 insertions(+), 37 deletions(-) create mode 100644 homeassistant/components/html5/config_flow.py create mode 100644 homeassistant/components/html5/issues.py create mode 100644 tests/components/html5/test_config_flow.py create mode 100644 tests/components/html5/test_init.py diff --git a/CODEOWNERS b/CODEOWNERS index 3b250ceb9ab..7b8b4ec1106 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -633,6 +633,8 @@ build.json @home-assistant/supervisor /tests/components/homewizard/ @DCSBL /homeassistant/components/honeywell/ @rdfurman @mkmer /tests/components/honeywell/ @rdfurman @mkmer +/homeassistant/components/html5/ @alexyao2015 +/tests/components/html5/ @alexyao2015 /homeassistant/components/http/ @home-assistant/core /tests/components/http/ @home-assistant/core /homeassistant/components/huawei_lte/ @scop @fphammerle diff --git a/homeassistant/components/html5/__init__.py b/homeassistant/components/html5/__init__.py index 88e437ef566..4b85bf8ab8c 100644 --- a/homeassistant/components/html5/__init__.py +++ b/homeassistant/components/html5/__init__.py @@ -1 +1,16 @@ """The html5 component.""" + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import discovery + +from .const import DOMAIN + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Set up HTML5 from a config entry.""" + await discovery.async_load_platform( + hass, Platform.NOTIFY, DOMAIN, dict(entry.data), {} + ) + return True diff --git a/homeassistant/components/html5/config_flow.py b/homeassistant/components/html5/config_flow.py new file mode 100644 index 00000000000..1dae0102d05 --- /dev/null +++ b/homeassistant/components/html5/config_flow.py @@ -0,0 +1,103 @@ +"""Config flow for the html5 component.""" + +import binascii +from typing import Any, cast + +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric import ec +from py_vapid import Vapid +from py_vapid.utils import b64urlencode +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_NAME +from homeassistant.core import callback + +from .const import ATTR_VAPID_EMAIL, ATTR_VAPID_PRV_KEY, ATTR_VAPID_PUB_KEY, DOMAIN +from .issues import async_create_html5_issue + + +def vapid_generate_private_key() -> str: + """Generate a VAPID private key.""" + private_key = ec.generate_private_key(ec.SECP256R1(), default_backend()) + return b64urlencode( + binascii.unhexlify(f"{private_key.private_numbers().private_value:x}".zfill(64)) + ) + + +def vapid_get_public_key(private_key: str) -> str: + """Get the VAPID public key from a private key.""" + vapid = Vapid.from_string(private_key) + public_key = cast(ec.EllipticCurvePublicKey, vapid.public_key) + return b64urlencode( + public_key.public_bytes( + serialization.Encoding.X962, serialization.PublicFormat.UncompressedPoint + ) + ) + + +class HTML5ConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for HTML5.""" + + @callback + def _async_create_html5_entry( + self: "HTML5ConfigFlow", data: dict[str, str] + ) -> tuple[dict[str, str], ConfigFlowResult | None]: + """Create an HTML5 entry.""" + errors = {} + flow_result = None + + if not data.get(ATTR_VAPID_PRV_KEY): + data[ATTR_VAPID_PRV_KEY] = vapid_generate_private_key() + + # we will always generate the corresponding public key + try: + data[ATTR_VAPID_PUB_KEY] = vapid_get_public_key(data[ATTR_VAPID_PRV_KEY]) + except (ValueError, binascii.Error): + errors[ATTR_VAPID_PRV_KEY] = "invalid_prv_key" + + if not errors: + config = { + ATTR_VAPID_EMAIL: data[ATTR_VAPID_EMAIL], + ATTR_VAPID_PRV_KEY: data[ATTR_VAPID_PRV_KEY], + ATTR_VAPID_PUB_KEY: data[ATTR_VAPID_PUB_KEY], + CONF_NAME: DOMAIN, + } + flow_result = self.async_create_entry(title="HTML5", data=config) + return errors, flow_result + + async def async_step_user( + self: "HTML5ConfigFlow", user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a flow initialized by the user.""" + errors: dict[str, str] = {} + if user_input: + errors, flow_result = self._async_create_html5_entry(user_input) + if flow_result: + return flow_result + else: + user_input = {} + + return self.async_show_form( + data_schema=vol.Schema( + { + vol.Required( + ATTR_VAPID_EMAIL, default=user_input.get(ATTR_VAPID_EMAIL, "") + ): str, + vol.Optional(ATTR_VAPID_PRV_KEY): str, + } + ), + errors=errors, + ) + + async def async_step_import( + self: "HTML5ConfigFlow", import_config: dict + ) -> ConfigFlowResult: + """Handle config import from yaml.""" + _, flow_result = self._async_create_html5_entry(import_config) + if not flow_result: + async_create_html5_issue(self.hass, False) + return self.async_abort(reason="invalid_config") + async_create_html5_issue(self.hass, True) + return flow_result diff --git a/homeassistant/components/html5/const.py b/homeassistant/components/html5/const.py index bf7eaca7e24..75826ab90c9 100644 --- a/homeassistant/components/html5/const.py +++ b/homeassistant/components/html5/const.py @@ -1,4 +1,9 @@ """Constants for the HTML5 component.""" DOMAIN = "html5" +DATA_HASS_CONFIG = "html5_hass_config" SERVICE_DISMISS = "dismiss" + +ATTR_VAPID_PUB_KEY = "vapid_pub_key" +ATTR_VAPID_PRV_KEY = "vapid_prv_key" +ATTR_VAPID_EMAIL = "vapid_email" diff --git a/homeassistant/components/html5/issues.py b/homeassistant/components/html5/issues.py new file mode 100644 index 00000000000..8892562d347 --- /dev/null +++ b/homeassistant/components/html5/issues.py @@ -0,0 +1,50 @@ +"""Issues utility for HTML5.""" + +import logging + +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + +SUCCESSFUL_IMPORT_TRANSLATION_KEY = "deprecated_yaml" +FAILED_IMPORT_TRANSLATION_KEY = "deprecated_yaml_import_issue" + +INTEGRATION_TITLE = "HTML5 Push Notifications" + + +@callback +def async_create_html5_issue(hass: HomeAssistant, import_success: bool) -> None: + """Create issues for HTML5.""" + if import_success: + async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + f"deprecated_yaml_{DOMAIN}", + breaks_in_ha_version="2025.4.0", + is_fixable=False, + issue_domain=DOMAIN, + severity=IssueSeverity.WARNING, + translation_key="deprecated_yaml", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": INTEGRATION_TITLE, + }, + ) + else: + async_create_issue( + hass, + DOMAIN, + f"deprecated_yaml_{DOMAIN}", + breaks_in_ha_version="2025.4.0", + is_fixable=False, + issue_domain=DOMAIN, + severity=IssueSeverity.WARNING, + translation_key="deprecated_yaml_import_issue", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": INTEGRATION_TITLE, + }, + ) diff --git a/homeassistant/components/html5/manifest.json b/homeassistant/components/html5/manifest.json index f480086d153..c6cbd826544 100644 --- a/homeassistant/components/html5/manifest.json +++ b/homeassistant/components/html5/manifest.json @@ -1,10 +1,12 @@ { "domain": "html5", "name": "HTML5 Push Notifications", - "codeowners": [], + "codeowners": ["@alexyao2015"], + "config_flow": true, "dependencies": ["http"], "documentation": "https://www.home-assistant.io/integrations/html5", "iot_class": "cloud_push", "loggers": ["http_ece", "py_vapid", "pywebpush"], - "requirements": ["pywebpush==1.14.1"] + "requirements": ["pywebpush==1.14.1"], + "single_config_entry": true } diff --git a/homeassistant/components/html5/notify.py b/homeassistant/components/html5/notify.py index 8082ca37aa3..48cc0598479 100644 --- a/homeassistant/components/html5/notify.py +++ b/homeassistant/components/html5/notify.py @@ -29,6 +29,7 @@ from homeassistant.components.notify import ( PLATFORM_SCHEMA as NOTIFY_PLATFORM_SCHEMA, BaseNotificationService, ) +from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import ATTR_NAME, URL_ROOT from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.exceptions import HomeAssistantError @@ -38,32 +39,23 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.util import ensure_unique_string from homeassistant.util.json import JsonObjectType, load_json_object -from .const import DOMAIN, SERVICE_DISMISS +from .const import ( + ATTR_VAPID_EMAIL, + ATTR_VAPID_PRV_KEY, + ATTR_VAPID_PUB_KEY, + DOMAIN, + SERVICE_DISMISS, +) +from .issues import async_create_html5_issue _LOGGER = logging.getLogger(__name__) REGISTRATIONS_FILE = "html5_push_registrations.conf" -ATTR_VAPID_PUB_KEY = "vapid_pub_key" -ATTR_VAPID_PRV_KEY = "vapid_prv_key" -ATTR_VAPID_EMAIL = "vapid_email" - - -def gcm_api_deprecated(value): - """Warn user that GCM API config is deprecated.""" - if value: - _LOGGER.warning( - "Configuring html5_push_notifications via the GCM api" - " has been deprecated and stopped working since May 29," - " 2019. Use the VAPID configuration instead. For instructions," - " see https://www.home-assistant.io/integrations/html5/" - ) - return value - PLATFORM_SCHEMA = NOTIFY_PLATFORM_SCHEMA.extend( { - vol.Optional("gcm_sender_id"): vol.All(cv.string, gcm_api_deprecated), + vol.Optional("gcm_sender_id"): cv.string, vol.Optional("gcm_api_key"): cv.string, vol.Required(ATTR_VAPID_PUB_KEY): cv.string, vol.Required(ATTR_VAPID_PRV_KEY): cv.string, @@ -171,15 +163,30 @@ async def async_get_service( discovery_info: DiscoveryInfoType | None = None, ) -> HTML5NotificationService | None: """Get the HTML5 push notification service.""" + if config: + existing_config_entry = hass.config_entries.async_entries(DOMAIN) + if existing_config_entry: + async_create_html5_issue(hass, True) + return None + hass.async_create_task( + hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data=config + ) + ) + return None + + if discovery_info is None: + return None + json_path = hass.config.path(REGISTRATIONS_FILE) registrations = await hass.async_add_executor_job(_load_config, json_path) - vapid_pub_key = config[ATTR_VAPID_PUB_KEY] - vapid_prv_key = config[ATTR_VAPID_PRV_KEY] - vapid_email = config[ATTR_VAPID_EMAIL] + vapid_pub_key = discovery_info[ATTR_VAPID_PUB_KEY] + vapid_prv_key = discovery_info[ATTR_VAPID_PRV_KEY] + vapid_email = discovery_info[ATTR_VAPID_EMAIL] - def websocket_appkey(hass, connection, msg): + def websocket_appkey(_hass, connection, msg): connection.send_message(websocket_api.result_message(msg["id"], vapid_pub_key)) websocket_api.async_register_command( diff --git a/homeassistant/components/html5/strings.json b/homeassistant/components/html5/strings.json index fa69025c43c..40bdbb36261 100644 --- a/homeassistant/components/html5/strings.json +++ b/homeassistant/components/html5/strings.json @@ -1,4 +1,31 @@ { + "config": { + "step": { + "user": { + "data": { + "vapid_email": "[%key:common::config_flow::data::email%]", + "vapid_prv_key": "VAPID private key" + }, + "data_description": { + "vapid_email": "Email to use for html5 push notifications.", + "vapid_prv_key": "If not specified, one will be automatically generated." + } + } + }, + "error": { + "unknown": "Unknown error", + "invalid_prv_key": "Invalid private key" + }, + "abort": { + "invalid_config": "Invalid configuration" + } + }, + "issues": { + "deprecated_yaml_import_issue": { + "title": "HTML5 YAML configuration import failed", + "description": "Configuring HTML5 push notification using YAML has been deprecated. An automatic import of your existing configuration was attempted, but it failed.\n\nPlease remove the HTML5 push notification YAML configuration from your configuration.yaml file and reconfigure HTML5 push notification again manually." + } + }, "services": { "dismiss": { "name": "Dismiss", diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index fcabc463f0a..912df1aee0f 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -253,6 +253,7 @@ FLOWS = { "homewizard", "homeworks", "honeywell", + "html5", "huawei_lte", "hue", "huisbaasje", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index ebfe7e056f2..38958845782 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -2633,8 +2633,9 @@ "html5": { "name": "HTML5 Push Notifications", "integration_type": "hub", - "config_flow": false, - "iot_class": "cloud_push" + "config_flow": true, + "iot_class": "cloud_push", + "single_config_entry": true }, "huawei_lte": { "name": "Huawei LTE", diff --git a/tests/components/html5/test_config_flow.py b/tests/components/html5/test_config_flow.py new file mode 100644 index 00000000000..ca0b3da0389 --- /dev/null +++ b/tests/components/html5/test_config_flow.py @@ -0,0 +1,203 @@ +"""Test the HTML5 config flow.""" + +from unittest.mock import patch + +import pytest + +from homeassistant import config_entries, data_entry_flow +from homeassistant.components.html5.const import ( + ATTR_VAPID_EMAIL, + ATTR_VAPID_PRV_KEY, + ATTR_VAPID_PUB_KEY, + DOMAIN, +) +from homeassistant.components.html5.issues import ( + FAILED_IMPORT_TRANSLATION_KEY, + SUCCESSFUL_IMPORT_TRANSLATION_KEY, +) +from homeassistant.const import CONF_NAME +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +import homeassistant.helpers.issue_registry as ir + +MOCK_CONF = { + ATTR_VAPID_EMAIL: "test@example.com", + ATTR_VAPID_PRV_KEY: "h6acSRds8_KR8hT9djD8WucTL06Gfe29XXyZ1KcUjN8", +} +MOCK_CONF_PUB_KEY = "BIUtPN7Rq_8U7RBEqClZrfZ5dR9zPCfvxYPtLpWtRVZTJEc7lzv2dhzDU6Aw1m29Ao0-UA1Uq6XO9Df8KALBKqA" + + +async def test_step_user_success(hass: HomeAssistant) -> None: + """Test a successful user config flow.""" + + with patch( + "homeassistant.components.html5.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_USER}, + data=MOCK_CONF.copy(), + ) + + await hass.async_block_till_done() + + assert result["type"] is data_entry_flow.FlowResultType.CREATE_ENTRY + assert result["data"] == { + ATTR_VAPID_PRV_KEY: MOCK_CONF[ATTR_VAPID_PRV_KEY], + ATTR_VAPID_PUB_KEY: MOCK_CONF_PUB_KEY, + ATTR_VAPID_EMAIL: MOCK_CONF[ATTR_VAPID_EMAIL], + CONF_NAME: DOMAIN, + } + + assert mock_setup_entry.call_count == 1 + + +async def test_step_user_success_generate(hass: HomeAssistant) -> None: + """Test a successful user config flow, generating a key pair.""" + + with patch( + "homeassistant.components.html5.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + conf = {ATTR_VAPID_EMAIL: MOCK_CONF[ATTR_VAPID_EMAIL]} + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER}, data=conf + ) + + await hass.async_block_till_done() + + assert result["type"] is data_entry_flow.FlowResultType.CREATE_ENTRY + assert result["data"][ATTR_VAPID_EMAIL] == MOCK_CONF[ATTR_VAPID_EMAIL] + + assert mock_setup_entry.call_count == 1 + + +async def test_step_user_new_form(hass: HomeAssistant) -> None: + """Test new user input.""" + + with patch( + "homeassistant.components.html5.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER}, data=None + ) + + await hass.async_block_till_done() + + assert result["type"] is data_entry_flow.FlowResultType.FORM + assert mock_setup_entry.call_count == 0 + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], MOCK_CONF + ) + assert result["type"] is data_entry_flow.FlowResultType.CREATE_ENTRY + assert mock_setup_entry.call_count == 1 + + +@pytest.mark.parametrize( + ("key", "value"), + [ + (ATTR_VAPID_PRV_KEY, "invalid"), + ], +) +async def test_step_user_form_invalid_key( + hass: HomeAssistant, key: str, value: str +) -> None: + """Test invalid user input.""" + + with patch( + "homeassistant.components.html5.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + bad_conf = MOCK_CONF.copy() + bad_conf[key] = value + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER}, data=bad_conf + ) + + await hass.async_block_till_done() + + assert result["type"] == data_entry_flow.FlowResultType.FORM + assert mock_setup_entry.call_count == 0 + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], MOCK_CONF + ) + assert result["type"] is data_entry_flow.FlowResultType.CREATE_ENTRY + assert mock_setup_entry.call_count == 1 + + +async def test_step_import_good( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, +) -> None: + """Test valid import input.""" + + with ( + patch( + "homeassistant.components.html5.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + ): + conf = MOCK_CONF.copy() + conf[ATTR_VAPID_PUB_KEY] = MOCK_CONF_PUB_KEY + conf["random_key"] = "random_value" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=conf + ) + + await hass.async_block_till_done() + + assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY + assert result["data"] == { + ATTR_VAPID_PRV_KEY: conf[ATTR_VAPID_PRV_KEY], + ATTR_VAPID_PUB_KEY: MOCK_CONF_PUB_KEY, + ATTR_VAPID_EMAIL: conf[ATTR_VAPID_EMAIL], + CONF_NAME: DOMAIN, + } + + assert mock_setup_entry.call_count == 1 + assert len(issue_registry.issues) == 1 + issue = issue_registry.async_get_issue( + HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{DOMAIN}" + ) + assert issue + assert issue.translation_key == SUCCESSFUL_IMPORT_TRANSLATION_KEY + + +@pytest.mark.parametrize( + ("key", "value"), + [ + (ATTR_VAPID_PRV_KEY, "invalid"), + ], +) +async def test_step_import_bad( + hass: HomeAssistant, issue_registry: ir.IssueRegistry, key: str, value: str +) -> None: + """Test invalid import input.""" + + with ( + patch( + "homeassistant.components.html5.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + ): + bad_conf = MOCK_CONF.copy() + bad_conf[key] = value + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=bad_conf + ) + + await hass.async_block_till_done() + + assert result["type"] == data_entry_flow.FlowResultType.ABORT + assert mock_setup_entry.call_count == 0 + + assert len(issue_registry.issues) == 1 + issue = issue_registry.async_get_issue(DOMAIN, f"deprecated_yaml_{DOMAIN}") + assert issue + assert issue.translation_key == FAILED_IMPORT_TRANSLATION_KEY diff --git a/tests/components/html5/test_init.py b/tests/components/html5/test_init.py new file mode 100644 index 00000000000..290cb381296 --- /dev/null +++ b/tests/components/html5/test_init.py @@ -0,0 +1,44 @@ +"""Test the HTML5 setup.""" + +from homeassistant.core import HomeAssistant +import homeassistant.helpers.issue_registry as ir +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry + +NOTIFY_CONF = { + "notify": [ + { + "platform": "html5", + "name": "html5", + "vapid_pub_key": "BIUtPN7Rq_8U7RBEqClZrfZ5dR9zPCfvxYPtLpWtRVZTJEc7lzv2dhzDU6Aw1m29Ao0-UA1Uq6XO9Df8KALBKqA", + "vapid_prv_key": "h6acSRds8_KR8hT9djD8WucTL06Gfe29XXyZ1KcUjN8", + "vapid_email": "test@example.com", + } + ] +} + + +async def test_setup_entry( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, +) -> None: + """Test setup of a good config entry.""" + config_entry = MockConfigEntry(domain="html5", data={}) + config_entry.add_to_hass(hass) + assert await async_setup_component(hass, "html5", {}) + + assert len(issue_registry.issues) == 0 + + +async def test_setup_entry_issue( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, +) -> None: + """Test setup of an imported config entry with deprecated YAML.""" + config_entry = MockConfigEntry(domain="html5", data={}) + config_entry.add_to_hass(hass) + assert await async_setup_component(hass, "notify", NOTIFY_CONF) + assert await async_setup_component(hass, "html5", NOTIFY_CONF) + + assert len(issue_registry.issues) == 1 diff --git a/tests/components/html5/test_notify.py b/tests/components/html5/test_notify.py index 42ca6067418..85a790c0610 100644 --- a/tests/components/html5/test_notify.py +++ b/tests/components/html5/test_notify.py @@ -94,7 +94,7 @@ async def test_get_service_with_no_json(hass: HomeAssistant) -> None: await async_setup_component(hass, "http", {}) m = mock_open() with patch("homeassistant.util.json.open", m, create=True): - service = await html5.async_get_service(hass, VAPID_CONF) + service = await html5.async_get_service(hass, {}, VAPID_CONF) assert service is not None @@ -109,7 +109,7 @@ async def test_dismissing_message(mock_wp, hass: HomeAssistant) -> None: m = mock_open(read_data=json.dumps(data)) with patch("homeassistant.util.json.open", m, create=True): - service = await html5.async_get_service(hass, VAPID_CONF) + service = await html5.async_get_service(hass, {}, VAPID_CONF) service.hass = hass assert service is not None @@ -138,7 +138,7 @@ async def test_sending_message(mock_wp, hass: HomeAssistant) -> None: m = mock_open(read_data=json.dumps(data)) with patch("homeassistant.util.json.open", m, create=True): - service = await html5.async_get_service(hass, VAPID_CONF) + service = await html5.async_get_service(hass, {}, VAPID_CONF) service.hass = hass assert service is not None @@ -169,7 +169,7 @@ async def test_fcm_key_include(mock_wp, hass: HomeAssistant) -> None: m = mock_open(read_data=json.dumps(data)) with patch("homeassistant.util.json.open", m, create=True): - service = await html5.async_get_service(hass, VAPID_CONF) + service = await html5.async_get_service(hass, {}, VAPID_CONF) service.hass = hass assert service is not None @@ -194,7 +194,7 @@ async def test_fcm_send_with_unknown_priority(mock_wp, hass: HomeAssistant) -> N m = mock_open(read_data=json.dumps(data)) with patch("homeassistant.util.json.open", m, create=True): - service = await html5.async_get_service(hass, VAPID_CONF) + service = await html5.async_get_service(hass, {}, VAPID_CONF) service.hass = hass assert service is not None @@ -219,7 +219,7 @@ async def test_fcm_no_targets(mock_wp, hass: HomeAssistant) -> None: m = mock_open(read_data=json.dumps(data)) with patch("homeassistant.util.json.open", m, create=True): - service = await html5.async_get_service(hass, VAPID_CONF) + service = await html5.async_get_service(hass, {}, VAPID_CONF) service.hass = hass assert service is not None @@ -244,7 +244,7 @@ async def test_fcm_additional_data(mock_wp, hass: HomeAssistant) -> None: m = mock_open(read_data=json.dumps(data)) with patch("homeassistant.util.json.open", m, create=True): - service = await html5.async_get_service(hass, VAPID_CONF) + service = await html5.async_get_service(hass, {}, VAPID_CONF) service.hass = hass assert service is not None @@ -479,7 +479,7 @@ async def test_callback_view_with_jwt( mock_wp().send().status_code = 201 await hass.services.async_call( "notify", - "notify", + "html5", {"message": "Hello", "target": ["device"], "data": {"icon": "beer.png"}}, blocking=True, ) @@ -516,7 +516,7 @@ async def test_send_fcm_without_targets( mock_wp().send().status_code = 201 await hass.services.async_call( "notify", - "notify", + "html5", {"message": "Hello", "target": ["device"], "data": {"icon": "beer.png"}}, blocking=True, ) @@ -541,7 +541,7 @@ async def test_send_fcm_expired( mock_wp().send().status_code = 410 await hass.services.async_call( "notify", - "notify", + "html5", {"message": "Hello", "target": ["device"], "data": {"icon": "beer.png"}}, blocking=True, ) @@ -566,7 +566,7 @@ async def test_send_fcm_expired_save_fails( mock_wp().send().status_code = 410 await hass.services.async_call( "notify", - "notify", + "html5", {"message": "Hello", "target": ["device"], "data": {"icon": "beer.png"}}, blocking=True, ) From 582b7eab66508531c5362b76d6f3a819f4ba01e6 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Sat, 31 Aug 2024 01:01:27 -0700 Subject: [PATCH 1279/1635] Add missing translation for Google Photos reauth (#124959) --- homeassistant/components/google_photos/strings.json | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/google_photos/strings.json b/homeassistant/components/google_photos/strings.json index 57bce01d9f8..b44e04287b1 100644 --- a/homeassistant/components/google_photos/strings.json +++ b/homeassistant/components/google_photos/strings.json @@ -18,6 +18,7 @@ "user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]", "access_not_configured": "Unable to access the Google API:\n\n{message}", "unknown": "[%key:common::config_flow::error::unknown%]", + "wrong_account": "Wrong account: Please authenticate with the right account.", "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]" From c1eb5f8b74a842a8021a633b74648e68dbf3fd90 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Sat, 31 Aug 2024 01:01:51 -0700 Subject: [PATCH 1280/1635] Fix Google Photos get media calls (#124958) --- homeassistant/components/google_photos/api.py | 2 +- tests/components/google_photos/conftest.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/google_photos/api.py b/homeassistant/components/google_photos/api.py index 2fa6ee2d8f6..b387326148f 100644 --- a/homeassistant/components/google_photos/api.py +++ b/homeassistant/components/google_photos/api.py @@ -54,7 +54,7 @@ class AuthBase(ABC): """Get all MediaItem resources.""" service = await self._get_photos_service() cmd: HttpRequest = service.mediaItems().get( - media_item_id, fields=GET_MEDIA_ITEM_FIELDS + mediaItemId=media_item_id, fields=GET_MEDIA_ITEM_FIELDS ) return await self._execute(cmd) diff --git a/tests/components/google_photos/conftest.py b/tests/components/google_photos/conftest.py index 84ed717895d..73e506658e6 100644 --- a/tests/components/google_photos/conftest.py +++ b/tests/components/google_photos/conftest.py @@ -115,10 +115,10 @@ def mock_setup_api( mock.return_value.mediaItems.return_value.list = list_media_items # Mock a point lookup by reading contents of the fixture above - def get_media_item(media_item_id: str, **kwargs: Any) -> Mock: + def get_media_item(mediaItemId: str, **kwargs: Any) -> Mock: for response in responses: for media_item in response["mediaItems"]: - if media_item["id"] == media_item_id: + if media_item["id"] == mediaItemId: mock = Mock() mock.execute.return_value = media_item return mock From 3bfcb1ebdd0c00e573789cce9b4ea548d26a83a9 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 30 Aug 2024 22:07:36 -1000 Subject: [PATCH 1281/1635] Restore sisyphus integration (#124749) * Revert "Disable sisyphus integration (#124742)" This reverts commit 1b304e60d926ceffbe79e25c5065af233fc4c059. * Restore sisyphus integration reverts #124742 and updates the lib instead changelog: https://github.com/jkeljo/sisyphus-control/compare/v3.1.3...v3.1.4 release is pending: https://github.com/jkeljo/sisyphus-control/pull/8#issuecomment-2313893689 --- homeassistant/components/sisyphus/__init__.py | 3 +-- homeassistant/components/sisyphus/manifest.json | 3 +-- homeassistant/components/sisyphus/media_player.py | 3 +-- homeassistant/components/sisyphus/ruff.toml | 5 ----- requirements_all.txt | 3 +++ 5 files changed, 6 insertions(+), 11 deletions(-) delete mode 100644 homeassistant/components/sisyphus/ruff.toml diff --git a/homeassistant/components/sisyphus/__init__.py b/homeassistant/components/sisyphus/__init__.py index 1fc440f260d..da8d670d412 100644 --- a/homeassistant/components/sisyphus/__init__.py +++ b/homeassistant/components/sisyphus/__init__.py @@ -1,10 +1,9 @@ """Support for controlling Sisyphus Kinetic Art Tables.""" -# mypy: ignore-errors import asyncio import logging -# from sisyphus_control import Table +from sisyphus_control import Table import voluptuous as vol from homeassistant.const import CONF_HOST, CONF_NAME, EVENT_HOMEASSISTANT_STOP, Platform diff --git a/homeassistant/components/sisyphus/manifest.json b/homeassistant/components/sisyphus/manifest.json index f1d90cebbd3..4e344c0b25e 100644 --- a/homeassistant/components/sisyphus/manifest.json +++ b/homeassistant/components/sisyphus/manifest.json @@ -2,9 +2,8 @@ "domain": "sisyphus", "name": "Sisyphus", "codeowners": ["@jkeljo"], - "disabled": "This integration is disabled because it uses an old version of socketio.", "documentation": "https://www.home-assistant.io/integrations/sisyphus", "iot_class": "local_push", "loggers": ["sisyphus_control"], - "requirements": ["sisyphus-control==3.1.3"] + "requirements": ["sisyphus-control==3.1.4"] } diff --git a/homeassistant/components/sisyphus/media_player.py b/homeassistant/components/sisyphus/media_player.py index 0248bbeac32..3884a83928a 100644 --- a/homeassistant/components/sisyphus/media_player.py +++ b/homeassistant/components/sisyphus/media_player.py @@ -1,11 +1,10 @@ """Support for track controls on the Sisyphus Kinetic Art Table.""" -# mypy: ignore-errors from __future__ import annotations import aiohttp +from sisyphus_control import Track -# from sisyphus_control import Track from homeassistant.components.media_player import ( MediaPlayerEntity, MediaPlayerEntityFeature, diff --git a/homeassistant/components/sisyphus/ruff.toml b/homeassistant/components/sisyphus/ruff.toml deleted file mode 100644 index 38f6f586aef..00000000000 --- a/homeassistant/components/sisyphus/ruff.toml +++ /dev/null @@ -1,5 +0,0 @@ -extend = "../../../pyproject.toml" - -lint.extend-ignore = [ - "F821" -] diff --git a/requirements_all.txt b/requirements_all.txt index 27cd2d5abc1..afb7ae7bc77 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2625,6 +2625,9 @@ simplepush==2.2.3 # homeassistant.components.simplisafe simplisafe-python==2024.01.0 +# homeassistant.components.sisyphus +sisyphus-control==3.1.4 + # homeassistant.components.slack slackclient==2.5.0 From 2cab9f7fe9cc374beb5f7b29a40598b2389a26bc Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Sat, 31 Aug 2024 01:10:45 -0700 Subject: [PATCH 1282/1635] Address additional Google Photos integration feedback (#124957) * Address review feedback * Fix typing for arguments --- .../components/google_photos/config_flow.py | 2 +- .../components/google_photos/media_source.py | 117 ++++++++++-------- .../google_photos/test_media_source.py | 8 +- 3 files changed, 72 insertions(+), 55 deletions(-) diff --git a/homeassistant/components/google_photos/config_flow.py b/homeassistant/components/google_photos/config_flow.py index 93f0347e32f..e5378f67ffd 100644 --- a/homeassistant/components/google_photos/config_flow.py +++ b/homeassistant/components/google_photos/config_flow.py @@ -42,7 +42,7 @@ class OAuth2FlowHandler( client = api.AsyncConfigFlowAuth(self.hass, data[CONF_TOKEN][CONF_ACCESS_TOKEN]) try: user_resource_info = await client.get_user_info() - await client.list_media_items() + await client.list_media_items(page_size=1) except GooglePhotosApiError as ex: return self.async_abort( reason="access_not_configured", diff --git a/homeassistant/components/google_photos/media_source.py b/homeassistant/components/google_photos/media_source.py index e6011cb0e61..a2f9383ec5f 100644 --- a/homeassistant/components/google_photos/media_source.py +++ b/homeassistant/components/google_photos/media_source.py @@ -1,6 +1,7 @@ """Media source for Google Photos.""" from dataclasses import dataclass +from enum import StrEnum import logging from typing import Any, cast @@ -28,7 +29,7 @@ MAX_PHOTOS = 50 PAGE_SIZE = 100 THUMBNAIL_SIZE = 256 -LARGE_IMAGE_SIZE = 2048 +LARGE_IMAGE_SIZE = 2160 # Markers for parts of PhotosIdentifier url pattern. @@ -47,6 +48,21 @@ RECENT_PHOTOS_ALBUM = "recent" RECENT_PHOTOS_TITLE = "Recent Photos" +class PhotosIdentifierType(StrEnum): + """Type for a PhotosIdentifier.""" + + PHOTO = "p" + ALBUM = "a" + + @classmethod + def of(cls, name: str) -> "PhotosIdentifierType": + """Parse a PhotosIdentifierType by string value.""" + for enum in PhotosIdentifierType: + if enum.value == name: + return enum + raise ValueError(f"Invalid PhotosIdentifierType: {name}") + + @dataclass class PhotosIdentifier: """Google Photos item identifier in a media source URL.""" @@ -54,47 +70,48 @@ class PhotosIdentifier: config_entry_id: str """Identifies the account for the media item.""" - album_media_id: str | None = None - """Identifies the album contents to show. + id_type: PhotosIdentifierType | None = None + """Type of identifier""" - Not present at the same time as `photo_media_id`. - """ - - photo_media_id: str | None = None - """Identifies an indiviidual photo or video. - - Not present at the same time as `album_media_id`. - """ + media_id: str | None = None + """Identifies the album or photo contents to show.""" def as_string(self) -> str: """Serialize the identiifer as a string. - This is the opposite if parse_identifier(). + This is the opposite if of(). """ - if self.photo_media_id is None: - if self.album_media_id is None: - return self.config_entry_id - return f"{self.config_entry_id}/{PHOTO_SOURCE_IDENTIFIER_ALBUM}/{self.album_media_id}" - return f"{self.config_entry_id}/{PHOTO_SOURCE_IDENTIFIER_PHOTO}/{self.photo_media_id}" + if self.id_type is None: + return self.config_entry_id + return f"{self.config_entry_id}/{self.id_type}/{self.media_id}" + @staticmethod + def of(identifier: str) -> "PhotosIdentifier": + """Parse a PhotosIdentifier form a string. -def parse_identifier(identifier: str) -> PhotosIdentifier: - """Parse a PhotosIdentifier form a string. + This is the opposite of as_string(). + """ + parts = identifier.split("/") + _LOGGER.debug("parts=%s", parts) + if len(parts) == 1: + return PhotosIdentifier(parts[0]) + if len(parts) != 3: + raise BrowseError(f"Invalid identifier: {identifier}") + return PhotosIdentifier(parts[0], PhotosIdentifierType.of(parts[1]), parts[2]) - This is the opposite of as_string(). - """ - parts = identifier.split("/") - if len(parts) == 1: - return PhotosIdentifier(parts[0]) - if len(parts) != 3: - raise BrowseError(f"Invalid identifier: {identifier}") - if parts[1] == PHOTO_SOURCE_IDENTIFIER_PHOTO: - return PhotosIdentifier(parts[0], photo_media_id=parts[2]) - return PhotosIdentifier(parts[0], album_media_id=parts[2]) + @staticmethod + def album(config_entry_id: str, media_id: str) -> "PhotosIdentifier": + """Create an album PhotosIdentifier.""" + return PhotosIdentifier(config_entry_id, PhotosIdentifierType.ALBUM, media_id) + + @staticmethod + def photo(config_entry_id: str, media_id: str) -> "PhotosIdentifier": + """Create an album PhotosIdentifier.""" + return PhotosIdentifier(config_entry_id, PhotosIdentifierType.PHOTO, media_id) async def async_get_media_source(hass: HomeAssistant) -> MediaSource: - """Set up Synology media source.""" + """Set up Google Photos media source.""" return GooglePhotosMediaSource(hass) @@ -113,16 +130,20 @@ class GooglePhotosMediaSource(MediaSource): This will resolve a specific media item to a url for the full photo or video contents. """ - identifier = parse_identifier(item.identifier) - if identifier.photo_media_id is None: + try: + identifier = PhotosIdentifier.of(item.identifier) + except ValueError as err: + raise BrowseError(f"Could not parse identifier: {item.identifier}") from err + if ( + identifier.media_id is None + or identifier.id_type != PhotosIdentifierType.PHOTO + ): raise BrowseError( - f"Could not resolve identifier without a photo_media_id: {identifier}" + f"Could not resolve identiifer that is not a Photo: {identifier}" ) entry = self._async_config_entry(identifier.config_entry_id) client = entry.runtime_data - media_item = await client.get_media_item( - media_item_id=identifier.photo_media_id - ) + media_item = await client.get_media_item(media_item_id=identifier.media_id) is_video = media_item["mediaMetadata"].get("video") is not None return PlayMedia( url=( @@ -158,24 +179,24 @@ class GooglePhotosMediaSource(MediaSource): ) # Determine the configuration entry for this item - identifier = parse_identifier(item.identifier) + identifier = PhotosIdentifier.of(item.identifier) entry = self._async_config_entry(identifier.config_entry_id) client = entry.runtime_data - if identifier.album_media_id is None: - source = _build_account(entry, identifier) + source = _build_account(entry, identifier) + if identifier.id_type is None: source.children = [ _build_album( RECENT_PHOTOS_TITLE, - PhotosIdentifier( - identifier.config_entry_id, album_media_id=RECENT_PHOTOS_ALBUM + PhotosIdentifier.album( + identifier.config_entry_id, RECENT_PHOTOS_ALBUM ), ) ] return source # Currently only supports listing a single album of recent photos. - if identifier.album_media_id != RECENT_PHOTOS_ALBUM: + if identifier.media_id != RECENT_PHOTOS_ALBUM: raise BrowseError(f"Unsupported album: {identifier}") # Fetch recent items @@ -194,12 +215,9 @@ class GooglePhotosMediaSource(MediaSource): break # Render the grid of media item results - source = _build_account(entry, PhotosIdentifier(cast(str, entry.unique_id))) source.children = [ _build_media_item( - PhotosIdentifier( - identifier.config_entry_id, photo_media_id=media_item["id"] - ), + PhotosIdentifier.photo(identifier.config_entry_id, media_item["id"]), media_item, ) for media_item in media_items @@ -250,7 +268,7 @@ def _build_album(title: str, identifier: PhotosIdentifier) -> BrowseMediaSource: def _build_media_item( identifier: PhotosIdentifier, media_item: dict[str, Any] ) -> BrowseMediaSource: - """Build the node for an individual photos or video.""" + """Build the node for an individual photo or video.""" is_video = media_item["mediaMetadata"].get("video") is not None return BrowseMediaSource( domain=DOMAIN, @@ -269,10 +287,7 @@ def _media_url(media_item: dict[str, Any], max_size: int) -> str: See https://developers.google.com/photos/library/guides/access-media-items#base-urls """ - width = media_item["mediaMetadata"]["width"] - height = media_item["mediaMetadata"]["height"] - key = "h" if height > width else "w" - return f"{media_item["baseUrl"]}={key}{max_size}" + return f"{media_item["baseUrl"]}=h{max_size}" def _video_url(media_item: dict[str, Any]) -> str: diff --git a/tests/components/google_photos/test_media_source.py b/tests/components/google_photos/test_media_source.py index b24b37c10e6..db57ab755c1 100644 --- a/tests/components/google_photos/test_media_source.py +++ b/tests/components/google_photos/test_media_source.py @@ -58,7 +58,7 @@ async def test_no_config_entries( (f"{CONFIG_ENTRY_ID}/p/id2", "example2.mp4"), ], [ - ("http://img.example.com/id1=w2048", "image/jpeg"), + ("http://img.example.com/id1=h2160", "image/jpeg"), ("http://img.example.com/id2=dv", "video/mp4"), ], ), @@ -90,7 +90,7 @@ async def test_recent_items( hass, f"{URI_SCHEME}{DOMAIN}/{CONFIG_ENTRY_ID}/a/recent" ) assert browse.domain == DOMAIN - assert browse.identifier == CONFIG_ENTRY_ID + assert browse.identifier == f"{CONFIG_ENTRY_ID}/a/recent" assert browse.title == "Account Name" assert [ (child.identifier, child.title) for child in browse.children @@ -136,7 +136,9 @@ async def test_invalid_album_id(hass: HomeAssistant) -> None: @pytest.mark.parametrize( ("identifier", "expected_error"), [ - ("invalid-config-entry", "without a photo_media_id"), + (CONFIG_ENTRY_ID, "not a Photo"), + ("invalid-config-entry/a/example", "not a Photo"), + ("invalid-config-entry/q/example", "Could not parse"), ("too/many/slashes/in/path", "Invalid identifier"), ], ) From 5fa23b1785bb3ea162488f667aef13b60129b35b Mon Sep 17 00:00:00 2001 From: vhkristof Date: Sat, 31 Aug 2024 10:56:23 +0200 Subject: [PATCH 1283/1635] Bump renault-api to v0.2.7 (#124858) * Bump renault-api to v0.2.7 * Updated requirements_all and requirements_test_all --- homeassistant/components/renault/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/renault/manifest.json b/homeassistant/components/renault/manifest.json index 6691921e850..716f2086bf1 100644 --- a/homeassistant/components/renault/manifest.json +++ b/homeassistant/components/renault/manifest.json @@ -8,5 +8,5 @@ "iot_class": "cloud_polling", "loggers": ["renault_api"], "quality_scale": "platinum", - "requirements": ["renault-api==0.2.5"] + "requirements": ["renault-api==0.2.7"] } diff --git a/requirements_all.txt b/requirements_all.txt index afb7ae7bc77..52c7879cb91 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2505,7 +2505,7 @@ refoss-ha==1.2.4 regenmaschine==2024.03.0 # homeassistant.components.renault -renault-api==0.2.5 +renault-api==0.2.7 # homeassistant.components.renson renson-endura-delta==1.7.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 08903ae1c6f..20a86de78fd 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1987,7 +1987,7 @@ refoss-ha==1.2.4 regenmaschine==2024.03.0 # homeassistant.components.renault -renault-api==0.2.5 +renault-api==0.2.7 # homeassistant.components.renson renson-endura-delta==1.7.1 From 7210cc1da6224f43abe1e33c2fd605dc6e9daf9b Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 30 Aug 2024 23:03:08 -1000 Subject: [PATCH 1284/1635] Bump yarl to 1.9.6 (#124955) * Bump yarl to 1.9.5 changelog: https://github.com/aio-libs/yarl/compare/v1.9.4...v1.9.5 * remove default port since mocker does exact matching and yarl now normalizes this * 1.9.6 --- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- tests/components/dremel_3d_printer/conftest.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index c01b23ab4e4..30edee058bb 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -62,7 +62,7 @@ urllib3>=1.26.5,<2 voluptuous-openapi==0.0.5 voluptuous-serialize==2.6.0 voluptuous==0.15.2 -yarl==1.9.4 +yarl==1.9.6 zeroconf==0.133.0 # Constrain pycryptodome to avoid vulnerability diff --git a/pyproject.toml b/pyproject.toml index 596c0297131..4376ed63d0d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -69,7 +69,7 @@ dependencies = [ "voluptuous==0.15.2", "voluptuous-serialize==2.6.0", "voluptuous-openapi==0.0.5", - "yarl==1.9.4", + "yarl==1.9.6", ] [project.urls] diff --git a/requirements.txt b/requirements.txt index ad6a39ddb54..a9e01545b83 100644 --- a/requirements.txt +++ b/requirements.txt @@ -41,4 +41,4 @@ urllib3>=1.26.5,<2 voluptuous==0.15.2 voluptuous-serialize==2.6.0 voluptuous-openapi==0.0.5 -yarl==1.9.4 +yarl==1.9.6 diff --git a/tests/components/dremel_3d_printer/conftest.py b/tests/components/dremel_3d_printer/conftest.py index 6490b844dc0..cc70537db3d 100644 --- a/tests/components/dremel_3d_printer/conftest.py +++ b/tests/components/dremel_3d_printer/conftest.py @@ -34,7 +34,7 @@ def connection() -> None: """Mock Dremel 3D Printer connection.""" with requests_mock.Mocker() as mock: mock.post( - f"http://{HOST}:80/command", + f"http://{HOST}/command", response_list=[ {"text": load_fixture("dremel_3d_printer/command_1.json")}, {"text": load_fixture("dremel_3d_printer/command_2.json")}, From 221f9615742f41307b17b7fda7a2ff6a7ccecd7b Mon Sep 17 00:00:00 2001 From: Alan Murray Date: Sat, 31 Aug 2024 19:33:58 +1000 Subject: [PATCH 1285/1635] Bump aiopulse to 0.4.6 (#124964) Non-breaking changes to fix isses: * eliminating hub exceptions raised due use of unicode strings. * eliminating hub exceptions raised due to Timers being configured on hub. --- homeassistant/components/acmeda/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/acmeda/manifest.json b/homeassistant/components/acmeda/manifest.json index a8b3c7c829f..0c35904cac6 100644 --- a/homeassistant/components/acmeda/manifest.json +++ b/homeassistant/components/acmeda/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/acmeda", "iot_class": "local_push", "loggers": ["aiopulse"], - "requirements": ["aiopulse==0.4.4"] + "requirements": ["aiopulse==0.4.6"] } diff --git a/requirements_all.txt b/requirements_all.txt index 52c7879cb91..d8e33b96236 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -318,7 +318,7 @@ aiooui==0.1.6 aiopegelonline==0.0.10 # homeassistant.components.acmeda -aiopulse==0.4.4 +aiopulse==0.4.6 # homeassistant.components.purpleair aiopurpleair==2022.12.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 20a86de78fd..d253225d17b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -300,7 +300,7 @@ aiooui==0.1.6 aiopegelonline==0.0.10 # homeassistant.components.acmeda -aiopulse==0.4.4 +aiopulse==0.4.6 # homeassistant.components.purpleair aiopurpleair==2022.12.1 From 36b7e8569ec0f9ca93b079523b60501bb38d254b Mon Sep 17 00:00:00 2001 From: Andre Lengwenus Date: Sat, 31 Aug 2024 11:42:22 +0200 Subject: [PATCH 1286/1635] Send entity name or original name to LCN frontend (#124518) * Send name or original name to frontend * Use walrus operator * Fix docstring * Fix mutated config_entry.data --- homeassistant/components/lcn/websocket.py | 28 ++++++++++++++++++++++- 1 file changed, 27 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/lcn/websocket.py b/homeassistant/components/lcn/websocket.py index b418e362b27..65896cc78d1 100644 --- a/homeassistant/components/lcn/websocket.py +++ b/homeassistant/components/lcn/websocket.py @@ -158,7 +158,13 @@ async def websocket_get_entity_configs( else: entity_configs = config_entry.data[CONF_ENTITIES] - connection.send_result(msg["id"], entity_configs) + result_entity_configs = [ + {**entity_config, CONF_NAME: entity.name or entity.original_name} + for entity_config in entity_configs[:] + if (entity := get_entity_entry(hass, entity_config, config_entry)) is not None + ] + + connection.send_result(msg["id"], result_entity_configs) @websocket_api.require_admin @@ -438,3 +444,23 @@ async def async_create_or_update_device_in_config_entry( await async_update_device_config(device_connection, device_config) hass.config_entries.async_update_entry(config_entry, data=data) + + +def get_entity_entry( + hass: HomeAssistant, entity_config: dict, config_entry: ConfigEntry +) -> er.RegistryEntry | None: + """Get entity RegistryEntry from entity_config.""" + entity_registry = er.async_get(hass) + domain_name = entity_config[CONF_DOMAIN] + domain_data = entity_config[CONF_DOMAIN_DATA] + resource = get_resource(domain_name, domain_data).lower() + unique_id = generate_unique_id( + config_entry.entry_id, + entity_config[CONF_ADDRESS], + resource, + ) + if ( + entity_id := entity_registry.async_get_entity_id(domain_name, DOMAIN, unique_id) + ) is None: + return None + return entity_registry.async_get(entity_id) From 2a8feda69116d0884965156ee78d444ab3803fdf Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sat, 31 Aug 2024 12:00:12 +0200 Subject: [PATCH 1287/1635] Define household support in Mealie (#124950) --- homeassistant/components/mealie/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/mealie/__init__.py b/homeassistant/components/mealie/__init__.py index 5c9c91729c0..bf0fbcac406 100644 --- a/homeassistant/components/mealie/__init__.py +++ b/homeassistant/components/mealie/__init__.py @@ -48,6 +48,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: MealieConfigEntry) -> bo ), ) try: + await client.define_household_support() about = await client.get_about() version = create_version(about.version) except MealieAuthenticationError as error: From 65f007ace7a8346661e97d1f98f81d02b4a61f5b Mon Sep 17 00:00:00 2001 From: Brett Adams Date: Sun, 1 Sep 2024 00:28:35 +1000 Subject: [PATCH 1288/1635] Remove HVAC Modes when no scopes in Teslemetry (#124612) * Remove modes when not scoped * Fix inits * Re-add raise * Remove unused raise_for_scope * Set hvac_modes when not scoped * tests --- .../components/teslemetry/climate.py | 36 +++++---- .../teslemetry/snapshots/test_climate.ambr | 79 +++++++++++++++++++ tests/components/teslemetry/test_climate.py | 15 +++- 3 files changed, 112 insertions(+), 18 deletions(-) diff --git a/homeassistant/components/teslemetry/climate.py b/homeassistant/components/teslemetry/climate.py index bd4fb0eba53..9fc68688271 100644 --- a/homeassistant/components/teslemetry/climate.py +++ b/homeassistant/components/teslemetry/climate.py @@ -84,8 +84,10 @@ class TeslemetryClimateEntity(TeslemetryVehicleEntity, ClimateEntity): ) -> None: """Initialize the climate.""" self.scoped = Scope.VEHICLE_CMDS in scopes + if not self.scoped: self._attr_supported_features = ClimateEntityFeature(0) + self._attr_hvac_modes = [] super().__init__( data, @@ -102,6 +104,10 @@ class TeslemetryClimateEntity(TeslemetryVehicleEntity, ClimateEntity): else: self._attr_hvac_mode = HVACMode.OFF + # If not scoped, prevent the user from changing the HVAC mode by making it the only option + if self._attr_hvac_mode and not self.scoped: + self._attr_hvac_modes = [self._attr_hvac_mode] + self._attr_current_temperature = self.get("climate_state_inside_temp") self._attr_target_temperature = self.get(f"climate_state_{self.key}_setting") self._attr_preset_mode = self.get("climate_state_climate_keeper_mode") @@ -114,7 +120,6 @@ class TeslemetryClimateEntity(TeslemetryVehicleEntity, ClimateEntity): async def async_turn_on(self) -> None: """Set the climate state to on.""" - self.raise_for_scope() await self.wake_up_if_asleep() await handle_vehicle_command(self.api.auto_conditioning_start()) @@ -124,7 +129,6 @@ class TeslemetryClimateEntity(TeslemetryVehicleEntity, ClimateEntity): async def async_turn_off(self) -> None: """Set the climate state to off.""" - self.raise_for_scope() await self.wake_up_if_asleep() await handle_vehicle_command(self.api.auto_conditioning_stop()) @@ -135,7 +139,6 @@ class TeslemetryClimateEntity(TeslemetryVehicleEntity, ClimateEntity): async def async_set_temperature(self, **kwargs: Any) -> None: """Set the climate temperature.""" - if temp := kwargs.get(ATTR_TEMPERATURE): await self.wake_up_if_asleep() await handle_vehicle_command( @@ -206,20 +209,21 @@ class TeslemetryCabinOverheatProtectionEntity(TeslemetryVehicleEntity, ClimateEn ) -> None: """Initialize the climate.""" + self.scoped = Scope.VEHICLE_CMDS in scopes + if self.scoped: + self._attr_supported_features = ( + ClimateEntityFeature.TURN_ON | ClimateEntityFeature.TURN_OFF + ) + else: + self._attr_supported_features = ClimateEntityFeature(0) + self._attr_hvac_modes = [] + super().__init__(data, "climate_state_cabin_overheat_protection") - # Supported Features - self._attr_supported_features = ( - ClimateEntityFeature.TURN_ON | ClimateEntityFeature.TURN_OFF - ) - if self.get("vehicle_config_cop_user_set_temp_supported"): + # Supported Features from data + if self.scoped and self.get("vehicle_config_cop_user_set_temp_supported"): self._attr_supported_features |= ClimateEntityFeature.TARGET_TEMPERATURE - # Scopes - self.scoped = Scope.VEHICLE_CMDS in scopes - if not self.scoped: - self._attr_supported_features = ClimateEntityFeature(0) - def _async_update_attrs(self) -> None: """Update the attributes of the entity.""" @@ -228,6 +232,10 @@ class TeslemetryCabinOverheatProtectionEntity(TeslemetryVehicleEntity, ClimateEn else: self._attr_hvac_mode = COP_MODES.get(state) + # If not scoped, prevent the user from changing the HVAC mode by making it the only option + if self._attr_hvac_mode and not self.scoped: + self._attr_hvac_modes = [self._attr_hvac_mode] + if (level := self.get("climate_state_cop_activation_temperature")) is None: self._attr_target_temperature = None else: @@ -245,8 +253,6 @@ class TeslemetryCabinOverheatProtectionEntity(TeslemetryVehicleEntity, ClimateEn async def async_set_temperature(self, **kwargs: Any) -> None: """Set the climate temperature.""" - self.raise_for_scope() - if not (temp := kwargs.get(ATTR_TEMPERATURE)): return diff --git a/tests/components/teslemetry/snapshots/test_climate.ambr b/tests/components/teslemetry/snapshots/test_climate.ambr index b65796fe10e..f5a95c7e3f2 100644 --- a/tests/components/teslemetry/snapshots/test_climate.ambr +++ b/tests/components/teslemetry/snapshots/test_climate.ambr @@ -280,6 +280,85 @@ 'state': 'off', }) # --- +# name: test_climate_noscope[climate.test_cabin_overheat_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + ]), + 'max_temp': 40, + 'min_temp': 30, + 'target_temp_step': 5, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': , + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.test_cabin_overheat_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cabin overheat protection', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'climate_state_cabin_overheat_protection', + 'unique_id': 'LRWXF7EK4KC700000-climate_state_cabin_overheat_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_climate_noscope[climate.test_climate-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + ]), + 'max_temp': 28.0, + 'min_temp': 15.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.test_climate', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Climate', + 'platform': 'teslemetry', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'LRWXF7EK4KC700000-driver_temp', + 'unit_of_measurement': None, + }) +# --- # name: test_climate_offline[climate.test_cabin_overheat_protection-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/teslemetry/test_climate.py b/tests/components/teslemetry/test_climate.py index 31a39f1f21a..3cb4b67dc54 100644 --- a/tests/components/teslemetry/test_climate.py +++ b/tests/components/teslemetry/test_climate.py @@ -1,6 +1,6 @@ """Test the Teslemetry climate platform.""" -from unittest.mock import patch +from unittest.mock import AsyncMock, patch from freezegun.api import FrozenDateTimeFactory import pytest @@ -371,12 +371,21 @@ async def test_asleep_or_offline( async def test_climate_noscope( hass: HomeAssistant, - mock_metadata, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_metadata: AsyncMock, ) -> None: """Tests that the climate entity is correct.""" mock_metadata.return_value = METADATA_NOSCOPE - await setup_platform(hass, [Platform.CLIMATE]) + entry = await setup_platform(hass, [Platform.CLIMATE]) + + entity_entries = er.async_entries_for_config_entry(entity_registry, entry.entry_id) + + assert entity_entries + for entity_entry in entity_entries: + assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") + entity_id = "climate.test_climate" with pytest.raises(ServiceValidationError): From 9da5dd0090f947eeab79230e2c5b3e23127cc515 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Sat, 31 Aug 2024 16:38:06 +0200 Subject: [PATCH 1289/1635] Improve config flow type hints in cast (#124861) --- homeassistant/components/cast/config_flow.py | 34 +++++++++++++------- tests/components/cast/test_config_flow.py | 4 +-- 2 files changed, 24 insertions(+), 14 deletions(-) diff --git a/homeassistant/components/cast/config_flow.py b/homeassistant/components/cast/config_flow.py index 22351f5d2f7..4f7dd59e83e 100644 --- a/homeassistant/components/cast/config_flow.py +++ b/homeassistant/components/cast/config_flow.py @@ -63,7 +63,9 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): return await self.async_step_confirm() - async def async_step_config(self, user_input=None): + async def async_step_config( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Confirm the setup.""" errors = {} data = {CONF_KNOWN_HOSTS: self._known_hosts} @@ -90,7 +92,9 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): step_id="config", data_schema=vol.Schema(fields), errors=errors ) - async def async_step_confirm(self, user_input=None): + async def async_step_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Confirm the setup.""" data = self._get_data() @@ -116,13 +120,15 @@ class CastOptionsFlowHandler(OptionsFlow): self.config_entry = config_entry self.updated_config: dict[str, Any] = {} - async def async_step_init(self, user_input=None): + async def async_step_init(self, user_input: None = None) -> ConfigFlowResult: """Manage the Google Cast options.""" return await self.async_step_basic_options() - async def async_step_basic_options(self, user_input=None): + async def async_step_basic_options( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Manage the Google Cast options.""" - errors = {} + errors: dict[str, str] = {} current_config = self.config_entry.data if user_input is not None: bad_hosts, known_hosts = _string_to_list( @@ -139,9 +145,9 @@ class CastOptionsFlowHandler(OptionsFlow): self.hass.config_entries.async_update_entry( self.config_entry, data=self.updated_config ) - return self.async_create_entry(title="", data=None) + return self.async_create_entry(title="", data={}) - fields = {} + fields: dict[vol.Marker, type[str]] = {} suggested_value = _list_to_string(current_config.get(CONF_KNOWN_HOSTS)) _add_with_suggestion(fields, CONF_KNOWN_HOSTS, suggested_value) @@ -152,9 +158,11 @@ class CastOptionsFlowHandler(OptionsFlow): last_step=not self.show_advanced_options, ) - async def async_step_advanced_options(self, user_input=None): + async def async_step_advanced_options( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Manage the Google Cast options.""" - errors = {} + errors: dict[str, str] = {} if user_input is not None: bad_cec, ignore_cec = _string_to_list( user_input.get(CONF_IGNORE_CEC, ""), IGNORE_CEC_SCHEMA @@ -169,9 +177,9 @@ class CastOptionsFlowHandler(OptionsFlow): self.hass.config_entries.async_update_entry( self.config_entry, data=self.updated_config ) - return self.async_create_entry(title="", data=None) + return self.async_create_entry(title="", data={}) - fields = {} + fields: dict[vol.Marker, type[str]] = {} current_config = self.config_entry.data suggested_value = _list_to_string(current_config.get(CONF_UUID)) _add_with_suggestion(fields, CONF_UUID, suggested_value) @@ -204,5 +212,7 @@ def _string_to_list(string, schema): return invalid, items -def _add_with_suggestion(fields, key, suggested_value): +def _add_with_suggestion( + fields: dict[vol.Marker, type[str]], key: str, suggested_value: str +) -> None: fields[vol.Optional(key, description={"suggested_value": suggested_value})] = str diff --git a/tests/components/cast/test_config_flow.py b/tests/components/cast/test_config_flow.py index 7dce3f768e2..2dcf007c6d4 100644 --- a/tests/components/cast/test_config_flow.py +++ b/tests/components/cast/test_config_flow.py @@ -250,7 +250,7 @@ async def test_option_flow(hass: HomeAssistant, parameter_data) -> None: user_input=user_input_dict, ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] is None + assert result["data"] == {} for other_param in advanced_parameters: if other_param == parameter: continue @@ -264,7 +264,7 @@ async def test_option_flow(hass: HomeAssistant, parameter_data) -> None: user_input={"known_hosts": ""}, ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] is None + assert result["data"] == {} expected_data = {**orig_data, "known_hosts": []} if parameter in advanced_parameters: expected_data[parameter] = updated From 30aa3a26adb3f9fc1e7b7787c987592710cbcfdb Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sat, 31 Aug 2024 16:40:12 +0200 Subject: [PATCH 1290/1635] Merge coordinators in Airgradient (#124714) --- .../components/airgradient/__init__.py | 40 +++------------- .../components/airgradient/button.py | 17 +++---- .../components/airgradient/coordinator.py | 35 ++++++-------- .../components/airgradient/entity.py | 8 ++++ .../components/airgradient/number.py | 16 +++---- .../components/airgradient/select.py | 18 ++++--- .../components/airgradient/sensor.py | 47 ++++++++++--------- .../components/airgradient/switch.py | 14 +++--- .../components/airgradient/update.py | 11 ++--- .../airgradient/snapshots/test_init.ambr | 2 +- tests/components/airgradient/test_button.py | 2 +- .../airgradient/test_config_flow.py | 2 +- tests/components/airgradient/test_init.py | 2 +- tests/components/airgradient/test_number.py | 2 +- tests/components/airgradient/test_select.py | 2 +- tests/components/airgradient/test_sensor.py | 2 +- tests/components/airgradient/test_switch.py | 2 +- 17 files changed, 98 insertions(+), 124 deletions(-) diff --git a/homeassistant/components/airgradient/__init__.py b/homeassistant/components/airgradient/__init__.py index 7ee8ac6a3c7..3b27d6cda5e 100644 --- a/homeassistant/components/airgradient/__init__.py +++ b/homeassistant/components/airgradient/__init__.py @@ -2,18 +2,14 @@ from __future__ import annotations -from dataclasses import dataclass - -from airgradient import AirGradientClient, get_model_name +from airgradient import AirGradientClient from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr from homeassistant.helpers.aiohttp_client import async_get_clientsession -from .const import DOMAIN -from .coordinator import AirGradientConfigCoordinator, AirGradientMeasurementCoordinator +from .coordinator import AirGradientCoordinator PLATFORMS: list[Platform] = [ Platform.BUTTON, @@ -25,15 +21,7 @@ PLATFORMS: list[Platform] = [ ] -@dataclass -class AirGradientData: - """AirGradient data class.""" - - measurement: AirGradientMeasurementCoordinator - config: AirGradientConfigCoordinator - - -type AirGradientConfigEntry = ConfigEntry[AirGradientData] +type AirGradientConfigEntry = ConfigEntry[AirGradientCoordinator] async def async_setup_entry(hass: HomeAssistant, entry: AirGradientConfigEntry) -> bool: @@ -43,27 +31,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirGradientConfigEntry) entry.data[CONF_HOST], session=async_get_clientsession(hass) ) - measurement_coordinator = AirGradientMeasurementCoordinator(hass, client) - config_coordinator = AirGradientConfigCoordinator(hass, client) + coordinator = AirGradientCoordinator(hass, client) - await measurement_coordinator.async_config_entry_first_refresh() - await config_coordinator.async_config_entry_first_refresh() + await coordinator.async_config_entry_first_refresh() - device_registry = dr.async_get(hass) - device_registry.async_get_or_create( - config_entry_id=entry.entry_id, - identifiers={(DOMAIN, measurement_coordinator.serial_number)}, - manufacturer="AirGradient", - model=get_model_name(measurement_coordinator.data.model), - model_id=measurement_coordinator.data.model, - serial_number=measurement_coordinator.data.serial_number, - sw_version=measurement_coordinator.data.firmware_version, - ) - - entry.runtime_data = AirGradientData( - measurement=measurement_coordinator, - config=config_coordinator, - ) + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) diff --git a/homeassistant/components/airgradient/button.py b/homeassistant/components/airgradient/button.py index b59188ebdd4..32a9b5adedf 100644 --- a/homeassistant/components/airgradient/button.py +++ b/homeassistant/components/airgradient/button.py @@ -15,8 +15,9 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN, AirGradientConfigEntry -from .coordinator import AirGradientConfigCoordinator +from . import AirGradientConfigEntry +from .const import DOMAIN +from .coordinator import AirGradientCoordinator from .entity import AirGradientEntity @@ -47,8 +48,8 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up AirGradient button entities based on a config entry.""" - model = entry.runtime_data.measurement.data.model - coordinator = entry.runtime_data.config + coordinator = entry.runtime_data + model = coordinator.data.measures.model added_entities = False @@ -57,7 +58,7 @@ async def async_setup_entry( nonlocal added_entities if ( - coordinator.data.configuration_control is ConfigurationControl.LOCAL + coordinator.data.config.configuration_control is ConfigurationControl.LOCAL and not added_entities ): entities = [AirGradientButton(coordinator, CO2_CALIBRATION)] @@ -67,7 +68,8 @@ async def async_setup_entry( async_add_entities(entities) added_entities = True elif ( - coordinator.data.configuration_control is not ConfigurationControl.LOCAL + coordinator.data.config.configuration_control + is not ConfigurationControl.LOCAL and added_entities ): entity_registry = er.async_get(hass) @@ -87,11 +89,10 @@ class AirGradientButton(AirGradientEntity, ButtonEntity): """Defines an AirGradient button.""" entity_description: AirGradientButtonEntityDescription - coordinator: AirGradientConfigCoordinator def __init__( self, - coordinator: AirGradientConfigCoordinator, + coordinator: AirGradientCoordinator, description: AirGradientButtonEntityDescription, ) -> None: """Initialize airgradient button.""" diff --git a/homeassistant/components/airgradient/coordinator.py b/homeassistant/components/airgradient/coordinator.py index c3def0b1f33..4e1c335019c 100644 --- a/homeassistant/components/airgradient/coordinator.py +++ b/homeassistant/components/airgradient/coordinator.py @@ -2,6 +2,7 @@ from __future__ import annotations +from dataclasses import dataclass from datetime import timedelta from typing import TYPE_CHECKING @@ -16,7 +17,15 @@ if TYPE_CHECKING: from . import AirGradientConfigEntry -class AirGradientCoordinator[_DataT](DataUpdateCoordinator[_DataT]): +@dataclass +class AirGradientData: + """Class for AirGradient data.""" + + measures: Measures + config: Config + + +class AirGradientCoordinator(DataUpdateCoordinator[AirGradientData]): """Class to manage fetching AirGradient data.""" config_entry: AirGradientConfigEntry @@ -33,25 +42,11 @@ class AirGradientCoordinator[_DataT](DataUpdateCoordinator[_DataT]): assert self.config_entry.unique_id self.serial_number = self.config_entry.unique_id - async def _async_update_data(self) -> _DataT: + async def _async_update_data(self) -> AirGradientData: try: - return await self._update_data() + measures = await self.client.get_current_measures() + config = await self.client.get_config() except AirGradientError as error: raise UpdateFailed(error) from error - - async def _update_data(self) -> _DataT: - raise NotImplementedError - - -class AirGradientMeasurementCoordinator(AirGradientCoordinator[Measures]): - """Class to manage fetching AirGradient data.""" - - async def _update_data(self) -> Measures: - return await self.client.get_current_measures() - - -class AirGradientConfigCoordinator(AirGradientCoordinator[Config]): - """Class to manage fetching AirGradient data.""" - - async def _update_data(self) -> Config: - return await self.client.get_config() + else: + return AirGradientData(measures, config) diff --git a/homeassistant/components/airgradient/entity.py b/homeassistant/components/airgradient/entity.py index 4de07904bba..588a799610b 100644 --- a/homeassistant/components/airgradient/entity.py +++ b/homeassistant/components/airgradient/entity.py @@ -1,5 +1,7 @@ """Base class for AirGradient entities.""" +from airgradient import get_model_name + from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -15,6 +17,12 @@ class AirGradientEntity(CoordinatorEntity[AirGradientCoordinator]): def __init__(self, coordinator: AirGradientCoordinator) -> None: """Initialize airgradient entity.""" super().__init__(coordinator) + measures = coordinator.data.measures self._attr_device_info = DeviceInfo( identifiers={(DOMAIN, coordinator.serial_number)}, + manufacturer="AirGradient", + model=get_model_name(measures.model), + model_id=measures.model, + serial_number=coordinator.serial_number, + sw_version=measures.firmware_version, ) diff --git a/homeassistant/components/airgradient/number.py b/homeassistant/components/airgradient/number.py index 139357f3753..7fd282ddd8b 100644 --- a/homeassistant/components/airgradient/number.py +++ b/homeassistant/components/airgradient/number.py @@ -18,7 +18,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import AirGradientConfigEntry from .const import DOMAIN -from .coordinator import AirGradientConfigCoordinator +from .coordinator import AirGradientCoordinator from .entity import AirGradientEntity @@ -62,8 +62,8 @@ async def async_setup_entry( ) -> None: """Set up AirGradient number entities based on a config entry.""" - model = entry.runtime_data.measurement.data.model - coordinator = entry.runtime_data.config + coordinator = entry.runtime_data + model = coordinator.data.measures.model added_entities = False @@ -72,7 +72,7 @@ async def async_setup_entry( nonlocal added_entities if ( - coordinator.data.configuration_control is ConfigurationControl.LOCAL + coordinator.data.config.configuration_control is ConfigurationControl.LOCAL and not added_entities ): entities = [] @@ -84,7 +84,8 @@ async def async_setup_entry( async_add_entities(entities) added_entities = True elif ( - coordinator.data.configuration_control is not ConfigurationControl.LOCAL + coordinator.data.config.configuration_control + is not ConfigurationControl.LOCAL and added_entities ): entity_registry = er.async_get(hass) @@ -104,11 +105,10 @@ class AirGradientNumber(AirGradientEntity, NumberEntity): """Defines an AirGradient number entity.""" entity_description: AirGradientNumberEntityDescription - coordinator: AirGradientConfigCoordinator def __init__( self, - coordinator: AirGradientConfigCoordinator, + coordinator: AirGradientCoordinator, description: AirGradientNumberEntityDescription, ) -> None: """Initialize AirGradient number.""" @@ -119,7 +119,7 @@ class AirGradientNumber(AirGradientEntity, NumberEntity): @property def native_value(self) -> int | None: """Return the state of the number.""" - return self.entity_description.value_fn(self.coordinator.data) + return self.entity_description.value_fn(self.coordinator.data.config) async def async_set_native_value(self, value: float) -> None: """Set the selected value.""" diff --git a/homeassistant/components/airgradient/select.py b/homeassistant/components/airgradient/select.py index 532f7167dff..af56802d842 100644 --- a/homeassistant/components/airgradient/select.py +++ b/homeassistant/components/airgradient/select.py @@ -18,7 +18,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import AirGradientConfigEntry from .const import DOMAIN, PM_STANDARD, PM_STANDARD_REVERSE -from .coordinator import AirGradientConfigCoordinator +from .coordinator import AirGradientCoordinator from .entity import AirGradientEntity @@ -144,13 +144,11 @@ async def async_setup_entry( ) -> None: """Set up AirGradient select entities based on a config entry.""" - coordinator = entry.runtime_data.config - measurement_coordinator = entry.runtime_data.measurement + coordinator = entry.runtime_data + model = coordinator.data.measures.model async_add_entities([AirGradientSelect(coordinator, CONFIG_CONTROL_ENTITY)]) - model = measurement_coordinator.data.model - added_entities = False @callback @@ -158,7 +156,7 @@ async def async_setup_entry( nonlocal added_entities if ( - coordinator.data.configuration_control is ConfigurationControl.LOCAL + coordinator.data.config.configuration_control is ConfigurationControl.LOCAL and not added_entities ): entities: list[AirGradientSelect] = [ @@ -179,7 +177,8 @@ async def async_setup_entry( async_add_entities(entities) added_entities = True elif ( - coordinator.data.configuration_control is not ConfigurationControl.LOCAL + coordinator.data.config.configuration_control + is not ConfigurationControl.LOCAL and added_entities ): entity_registry = er.async_get(hass) @@ -201,11 +200,10 @@ class AirGradientSelect(AirGradientEntity, SelectEntity): """Defines an AirGradient select entity.""" entity_description: AirGradientSelectEntityDescription - coordinator: AirGradientConfigCoordinator def __init__( self, - coordinator: AirGradientConfigCoordinator, + coordinator: AirGradientCoordinator, description: AirGradientSelectEntityDescription, ) -> None: """Initialize AirGradient select.""" @@ -216,7 +214,7 @@ class AirGradientSelect(AirGradientEntity, SelectEntity): @property def current_option(self) -> str | None: """Return the state of the select.""" - return self.entity_description.value_fn(self.coordinator.data) + return self.entity_description.value_fn(self.coordinator.data.config) async def async_select_option(self, option: str) -> None: """Change the selected option.""" diff --git a/homeassistant/components/airgradient/sensor.py b/homeassistant/components/airgradient/sensor.py index f431c49ed2a..497d4cc0488 100644 --- a/homeassistant/components/airgradient/sensor.py +++ b/homeassistant/components/airgradient/sensor.py @@ -32,7 +32,7 @@ from homeassistant.helpers.typing import StateType from . import AirGradientConfigEntry from .const import PM_STANDARD, PM_STANDARD_REVERSE -from .coordinator import AirGradientConfigCoordinator, AirGradientMeasurementCoordinator +from .coordinator import AirGradientCoordinator from .entity import AirGradientEntity @@ -218,7 +218,7 @@ async def async_setup_entry( ) -> None: """Set up AirGradient sensor entities based on a config entry.""" - coordinator = entry.runtime_data.measurement + coordinator = entry.runtime_data listener: Callable[[], None] | None = None not_setup: set[AirGradientMeasurementSensorEntityDescription] = set( MEASUREMENT_SENSOR_TYPES @@ -232,7 +232,7 @@ async def async_setup_entry( not_setup = set() sensors = [] for description in sensor_descriptions: - if description.value_fn(coordinator.data) is None: + if description.value_fn(coordinator.data.measures) is None: not_setup.add(description) else: sensors.append(AirGradientMeasurementSensor(coordinator, description)) @@ -248,64 +248,65 @@ async def async_setup_entry( add_entities() entities = [ - AirGradientConfigSensor(entry.runtime_data.config, description) + AirGradientConfigSensor(coordinator, description) for description in CONFIG_SENSOR_TYPES ] - if "L" in coordinator.data.model: + if "L" in coordinator.data.measures.model: entities.extend( - AirGradientConfigSensor(entry.runtime_data.config, description) + AirGradientConfigSensor(coordinator, description) for description in CONFIG_LED_BAR_SENSOR_TYPES ) - if "I" in coordinator.data.model: + if "I" in coordinator.data.measures.model: entities.extend( - AirGradientConfigSensor(entry.runtime_data.config, description) + AirGradientConfigSensor(coordinator, description) for description in CONFIG_DISPLAY_SENSOR_TYPES ) async_add_entities(entities) -class AirGradientMeasurementSensor(AirGradientEntity, SensorEntity): +class AirGradientSensor(AirGradientEntity, SensorEntity): """Defines an AirGradient sensor.""" - entity_description: AirGradientMeasurementSensorEntityDescription - coordinator: AirGradientMeasurementCoordinator - def __init__( self, - coordinator: AirGradientMeasurementCoordinator, - description: AirGradientMeasurementSensorEntityDescription, + coordinator: AirGradientCoordinator, + description: SensorEntityDescription, ) -> None: """Initialize airgradient sensor.""" super().__init__(coordinator) self.entity_description = description self._attr_unique_id = f"{coordinator.serial_number}-{description.key}" + +class AirGradientMeasurementSensor(AirGradientSensor): + """Defines an AirGradient sensor.""" + + entity_description: AirGradientMeasurementSensorEntityDescription + @property def native_value(self) -> StateType: """Return the state of the sensor.""" - return self.entity_description.value_fn(self.coordinator.data) + return self.entity_description.value_fn(self.coordinator.data.measures) -class AirGradientConfigSensor(AirGradientEntity, SensorEntity): +class AirGradientConfigSensor(AirGradientSensor): """Defines an AirGradient sensor.""" entity_description: AirGradientConfigSensorEntityDescription - coordinator: AirGradientConfigCoordinator def __init__( self, - coordinator: AirGradientConfigCoordinator, + coordinator: AirGradientCoordinator, description: AirGradientConfigSensorEntityDescription, ) -> None: """Initialize airgradient sensor.""" - super().__init__(coordinator) - self.entity_description = description - self._attr_unique_id = f"{coordinator.serial_number}-{description.key}" + super().__init__(coordinator, description) self._attr_entity_registry_enabled_default = ( - coordinator.data.configuration_control is not ConfigurationControl.LOCAL + coordinator.data.config.configuration_control + is not ConfigurationControl.LOCAL ) @property def native_value(self) -> StateType: """Return the state of the sensor.""" - return self.entity_description.value_fn(self.coordinator.data) + return self.entity_description.value_fn(self.coordinator.data.config) diff --git a/homeassistant/components/airgradient/switch.py b/homeassistant/components/airgradient/switch.py index 60c3f83ae5e..329f704e755 100644 --- a/homeassistant/components/airgradient/switch.py +++ b/homeassistant/components/airgradient/switch.py @@ -19,7 +19,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import AirGradientConfigEntry from .const import DOMAIN -from .coordinator import AirGradientConfigCoordinator +from .coordinator import AirGradientCoordinator from .entity import AirGradientEntity @@ -46,7 +46,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up AirGradient switch entities based on a config entry.""" - coordinator = entry.runtime_data.config + coordinator = entry.runtime_data added_entities = False @@ -55,7 +55,7 @@ async def async_setup_entry( nonlocal added_entities if ( - coordinator.data.configuration_control is ConfigurationControl.LOCAL + coordinator.data.config.configuration_control is ConfigurationControl.LOCAL and not added_entities ): async_add_entities( @@ -63,7 +63,8 @@ async def async_setup_entry( ) added_entities = True elif ( - coordinator.data.configuration_control is not ConfigurationControl.LOCAL + coordinator.data.config.configuration_control + is not ConfigurationControl.LOCAL and added_entities ): entity_registry = er.async_get(hass) @@ -82,11 +83,10 @@ class AirGradientSwitch(AirGradientEntity, SwitchEntity): """Defines an AirGradient switch entity.""" entity_description: AirGradientSwitchEntityDescription - coordinator: AirGradientConfigCoordinator def __init__( self, - coordinator: AirGradientConfigCoordinator, + coordinator: AirGradientCoordinator, description: AirGradientSwitchEntityDescription, ) -> None: """Initialize AirGradient switch.""" @@ -97,7 +97,7 @@ class AirGradientSwitch(AirGradientEntity, SwitchEntity): @property def is_on(self) -> bool: """Return the state of the switch.""" - return self.entity_description.value_fn(self.coordinator.data) + return self.entity_description.value_fn(self.coordinator.data.config) async def async_turn_on(self, **kwargs: Any) -> None: """Turn the switch on.""" diff --git a/homeassistant/components/airgradient/update.py b/homeassistant/components/airgradient/update.py index 95e64930ea6..eb6708afb67 100644 --- a/homeassistant/components/airgradient/update.py +++ b/homeassistant/components/airgradient/update.py @@ -7,7 +7,7 @@ from homeassistant.components.update import UpdateDeviceClass, UpdateEntity from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import AirGradientConfigEntry, AirGradientMeasurementCoordinator +from . import AirGradientConfigEntry, AirGradientCoordinator from .entity import AirGradientEntity SCAN_INTERVAL = timedelta(hours=1) @@ -20,18 +20,17 @@ async def async_setup_entry( ) -> None: """Set up Airgradient update platform.""" - data = config_entry.runtime_data + coordinator = config_entry.runtime_data - async_add_entities([AirGradientUpdate(data.measurement)], True) + async_add_entities([AirGradientUpdate(coordinator)], True) class AirGradientUpdate(AirGradientEntity, UpdateEntity): """Representation of Airgradient Update.""" _attr_device_class = UpdateDeviceClass.FIRMWARE - coordinator: AirGradientMeasurementCoordinator - def __init__(self, coordinator: AirGradientMeasurementCoordinator) -> None: + def __init__(self, coordinator: AirGradientCoordinator) -> None: """Initialize the entity.""" super().__init__(coordinator) self._attr_unique_id = f"{coordinator.serial_number}-update" @@ -44,7 +43,7 @@ class AirGradientUpdate(AirGradientEntity, UpdateEntity): @property def installed_version(self) -> str: """Return the installed version of the entity.""" - return self.coordinator.data.firmware_version + return self.coordinator.data.measures.firmware_version async def async_update(self) -> None: """Update the entity.""" diff --git a/tests/components/airgradient/snapshots/test_init.ambr b/tests/components/airgradient/snapshots/test_init.ambr index e47c5b38bbc..72cb12535f1 100644 --- a/tests/components/airgradient/snapshots/test_init.ambr +++ b/tests/components/airgradient/snapshots/test_init.ambr @@ -57,7 +57,7 @@ 'name': 'Airgradient', 'name_by_user': None, 'primary_config_entry': , - 'serial_number': '84fce60bec38', + 'serial_number': '84fce612f5b8', 'suggested_area': None, 'sw_version': '3.1.1', 'via_device_id': None, diff --git a/tests/components/airgradient/test_button.py b/tests/components/airgradient/test_button.py index 7901c3a067b..83de2c2f048 100644 --- a/tests/components/airgradient/test_button.py +++ b/tests/components/airgradient/test_button.py @@ -7,7 +7,7 @@ from airgradient import Config from freezegun.api import FrozenDateTimeFactory from syrupy import SnapshotAssertion -from homeassistant.components.airgradient import DOMAIN +from homeassistant.components.airgradient.const import DOMAIN from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant diff --git a/tests/components/airgradient/test_config_flow.py b/tests/components/airgradient/test_config_flow.py index 8730b18676f..73dbd17a213 100644 --- a/tests/components/airgradient/test_config_flow.py +++ b/tests/components/airgradient/test_config_flow.py @@ -9,7 +9,7 @@ from airgradient import ( ConfigurationControl, ) -from homeassistant.components.airgradient import DOMAIN +from homeassistant.components.airgradient.const import DOMAIN from homeassistant.components.zeroconf import ZeroconfServiceInfo from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF from homeassistant.const import CONF_HOST diff --git a/tests/components/airgradient/test_init.py b/tests/components/airgradient/test_init.py index 408e6f5f3ba..a566254d106 100644 --- a/tests/components/airgradient/test_init.py +++ b/tests/components/airgradient/test_init.py @@ -4,7 +4,7 @@ from unittest.mock import AsyncMock from syrupy import SnapshotAssertion -from homeassistant.components.airgradient import DOMAIN +from homeassistant.components.airgradient.const import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr diff --git a/tests/components/airgradient/test_number.py b/tests/components/airgradient/test_number.py index 0803c0d437f..7aabda8f81c 100644 --- a/tests/components/airgradient/test_number.py +++ b/tests/components/airgradient/test_number.py @@ -7,7 +7,7 @@ from airgradient import Config from freezegun.api import FrozenDateTimeFactory from syrupy import SnapshotAssertion -from homeassistant.components.airgradient import DOMAIN +from homeassistant.components.airgradient.const import DOMAIN from homeassistant.components.number import ( ATTR_VALUE, DOMAIN as NUMBER_DOMAIN, diff --git a/tests/components/airgradient/test_select.py b/tests/components/airgradient/test_select.py index 61679a15c07..de4a7beaaa7 100644 --- a/tests/components/airgradient/test_select.py +++ b/tests/components/airgradient/test_select.py @@ -8,7 +8,7 @@ from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion -from homeassistant.components.airgradient import DOMAIN +from homeassistant.components.airgradient.const import DOMAIN from homeassistant.components.select import ( DOMAIN as SELECT_DOMAIN, SERVICE_SELECT_OPTION, diff --git a/tests/components/airgradient/test_sensor.py b/tests/components/airgradient/test_sensor.py index c2e53ef4de2..e3fed70839a 100644 --- a/tests/components/airgradient/test_sensor.py +++ b/tests/components/airgradient/test_sensor.py @@ -8,7 +8,7 @@ from freezegun.api import FrozenDateTimeFactory import pytest from syrupy import SnapshotAssertion -from homeassistant.components.airgradient import DOMAIN +from homeassistant.components.airgradient.const import DOMAIN from homeassistant.const import STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er diff --git a/tests/components/airgradient/test_switch.py b/tests/components/airgradient/test_switch.py index 20a1cb7470b..a0cbdd17d75 100644 --- a/tests/components/airgradient/test_switch.py +++ b/tests/components/airgradient/test_switch.py @@ -7,7 +7,7 @@ from airgradient import Config from freezegun.api import FrozenDateTimeFactory from syrupy import SnapshotAssertion -from homeassistant.components.airgradient import DOMAIN +from homeassistant.components.airgradient.const import DOMAIN from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, From 3e60d7aa11cc7d89149d06d49437bf24044fdb6d Mon Sep 17 00:00:00 2001 From: Brett Adams Date: Sun, 1 Sep 2024 00:41:00 +1000 Subject: [PATCH 1291/1635] Small code quality fix in Teslemetry (#124603) * Fix cop_mode logic bug * Update climate.py * Fix attributes --- .../components/teslemetry/climate.py | 20 ++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/teslemetry/climate.py b/homeassistant/components/teslemetry/climate.py index 9fc68688271..9218be4dcb1 100644 --- a/homeassistant/components/teslemetry/climate.py +++ b/homeassistant/components/teslemetry/climate.py @@ -183,20 +183,28 @@ COP_MODES = { "FanOnly": HVACMode.FAN_ONLY, } +# String to celsius COP_LEVELS = { "Low": 30, "Medium": 35, "High": 40, } +# Celsius to IntEnum +TEMP_LEVELS = { + 30: CabinOverheatProtectionTemp.LOW, + 35: CabinOverheatProtectionTemp.MEDIUM, + 40: CabinOverheatProtectionTemp.HIGH, +} + class TeslemetryCabinOverheatProtectionEntity(TeslemetryVehicleEntity, ClimateEntity): """Telemetry vehicle cabin overheat protection entity.""" _attr_precision = PRECISION_WHOLE _attr_target_temperature_step = 5 - _attr_min_temp = 30 - _attr_max_temp = 40 + _attr_min_temp = COP_LEVELS["Low"] + _attr_max_temp = COP_LEVELS["High"] _attr_temperature_unit = UnitOfTemperature.CELSIUS _attr_hvac_modes = list(COP_MODES.values()) _enable_turn_on_off_backwards_compatibility = False @@ -256,13 +264,7 @@ class TeslemetryCabinOverheatProtectionEntity(TeslemetryVehicleEntity, ClimateEn if not (temp := kwargs.get(ATTR_TEMPERATURE)): return - if temp == 30: - cop_mode = CabinOverheatProtectionTemp.LOW - elif temp == 35: - cop_mode = CabinOverheatProtectionTemp.MEDIUM - elif temp == 40: - cop_mode = CabinOverheatProtectionTemp.HIGH - else: + if (cop_mode := TEMP_LEVELS.get(temp)) is None: raise ServiceValidationError( translation_domain=DOMAIN, translation_key="invalid_cop_temp", From 81f5068354372ec67f03b4bca8e62318611a41bb Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Sat, 31 Aug 2024 08:22:50 -0700 Subject: [PATCH 1292/1635] Clean up Google Photos media source (#124977) * Clean up Google Photos media source * Fix typo --------- Co-authored-by: Martin Hjelmare --- .../components/google_photos/media_source.py | 32 ++++++++----------- 1 file changed, 13 insertions(+), 19 deletions(-) diff --git a/homeassistant/components/google_photos/media_source.py b/homeassistant/components/google_photos/media_source.py index a2f9383ec5f..cdb6b22a3ed 100644 --- a/homeassistant/components/google_photos/media_source.py +++ b/homeassistant/components/google_photos/media_source.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from enum import StrEnum import logging -from typing import Any, cast +from typing import Any, Self, cast from homeassistant.components.media_player import MediaClass, MediaType from homeassistant.components.media_source import ( @@ -77,37 +77,31 @@ class PhotosIdentifier: """Identifies the album or photo contents to show.""" def as_string(self) -> str: - """Serialize the identiifer as a string. - - This is the opposite if of(). - """ + """Serialize the identifier as a string.""" if self.id_type is None: return self.config_entry_id return f"{self.config_entry_id}/{self.id_type}/{self.media_id}" - @staticmethod - def of(identifier: str) -> "PhotosIdentifier": - """Parse a PhotosIdentifier form a string. - - This is the opposite of as_string(). - """ + @classmethod + def of(cls, identifier: str) -> Self: + """Parse a PhotosIdentifier form a string.""" parts = identifier.split("/") _LOGGER.debug("parts=%s", parts) if len(parts) == 1: - return PhotosIdentifier(parts[0]) + return cls(parts[0]) if len(parts) != 3: raise BrowseError(f"Invalid identifier: {identifier}") - return PhotosIdentifier(parts[0], PhotosIdentifierType.of(parts[1]), parts[2]) + return cls(parts[0], PhotosIdentifierType.of(parts[1]), parts[2]) - @staticmethod - def album(config_entry_id: str, media_id: str) -> "PhotosIdentifier": + @classmethod + def album(cls, config_entry_id: str, media_id: str) -> Self: """Create an album PhotosIdentifier.""" - return PhotosIdentifier(config_entry_id, PhotosIdentifierType.ALBUM, media_id) + return cls(config_entry_id, PhotosIdentifierType.ALBUM, media_id) - @staticmethod - def photo(config_entry_id: str, media_id: str) -> "PhotosIdentifier": + @classmethod + def photo(cls, config_entry_id: str, media_id: str) -> Self: """Create an album PhotosIdentifier.""" - return PhotosIdentifier(config_entry_id, PhotosIdentifierType.PHOTO, media_id) + return cls(config_entry_id, PhotosIdentifierType.PHOTO, media_id) async def async_get_media_source(hass: HomeAssistant) -> MediaSource: From 994c2ebca124b6e2610324e8086a6d6495efef93 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Fern=C3=A1ndez=20Rojas?= Date: Sat, 31 Aug 2024 17:30:58 +0200 Subject: [PATCH 1293/1635] Update aioairzone-cloud to v0.6.3 (#124978) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Álvaro Fernández Rojas --- homeassistant/components/airzone_cloud/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/airzone_cloud/manifest.json b/homeassistant/components/airzone_cloud/manifest.json index b691770e934..05f854e6caf 100644 --- a/homeassistant/components/airzone_cloud/manifest.json +++ b/homeassistant/components/airzone_cloud/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/airzone_cloud", "iot_class": "cloud_push", "loggers": ["aioairzone_cloud"], - "requirements": ["aioairzone-cloud==0.6.2"] + "requirements": ["aioairzone-cloud==0.6.3"] } diff --git a/requirements_all.txt b/requirements_all.txt index d8e33b96236..1be3393dfc2 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -176,7 +176,7 @@ aio-georss-gdacs==0.9 aioairq==0.3.2 # homeassistant.components.airzone_cloud -aioairzone-cloud==0.6.2 +aioairzone-cloud==0.6.3 # homeassistant.components.airzone aioairzone==0.8.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index d253225d17b..4bd9dfa1e29 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -164,7 +164,7 @@ aio-georss-gdacs==0.9 aioairq==0.3.2 # homeassistant.components.airzone_cloud -aioairzone-cloud==0.6.2 +aioairzone-cloud==0.6.3 # homeassistant.components.airzone aioairzone==0.8.2 From 5cd8e4ab7e879f955a43f36618f757bcf9c6cf7e Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Sat, 31 Aug 2024 19:34:41 +0200 Subject: [PATCH 1294/1635] Update mypy-dev to 1.12.0a3 (#124939) * Update mypy-dev to 1.12.0a3 * Fix --- homeassistant/runner.py | 2 +- homeassistant/util/frozen_dataclass_compat.py | 7 ++++++- requirements_test.txt | 2 +- 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/homeassistant/runner.py b/homeassistant/runner.py index 4bac12ec399..102dbafe147 100644 --- a/homeassistant/runner.py +++ b/homeassistant/runner.py @@ -175,7 +175,7 @@ def _enable_posix_spawn() -> None: # less efficient. This is a workaround to force posix_spawn() # when using musl since cpython is not aware its supported. tag = next(packaging.tags.sys_tags()) - subprocess._USE_POSIX_SPAWN = "musllinux" in tag.platform # noqa: SLF001 + subprocess._USE_POSIX_SPAWN = "musllinux" in tag.platform # type: ignore[misc] # noqa: SLF001 def run(runtime_config: RuntimeConfig) -> int: diff --git a/homeassistant/util/frozen_dataclass_compat.py b/homeassistant/util/frozen_dataclass_compat.py index 6184e4564eb..81ce9961a0b 100644 --- a/homeassistant/util/frozen_dataclass_compat.py +++ b/homeassistant/util/frozen_dataclass_compat.py @@ -8,7 +8,10 @@ from __future__ import annotations import dataclasses import sys -from typing import Any, dataclass_transform +from typing import TYPE_CHECKING, Any, cast, dataclass_transform + +if TYPE_CHECKING: + from _typeshed import DataclassInstance def _class_fields(cls: type, kw_only: bool) -> list[tuple[str, Any, Any]]: @@ -111,6 +114,8 @@ class FrozenOrThawed(type): """ cls, *_args = args if dataclasses.is_dataclass(cls): + if TYPE_CHECKING: + cls = cast(type[DataclassInstance], cls) return object.__new__(cls) return cls._dataclass(*_args, **kwargs) diff --git a/requirements_test.txt b/requirements_test.txt index 19a60b6aa28..87203daae96 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -11,7 +11,7 @@ astroid==3.2.4 coverage==7.6.0 freezegun==1.5.1 mock-open==1.4.0 -mypy-dev==1.12.0a2 +mypy-dev==1.12.0a3 pre-commit==3.7.1 pydantic==1.10.17 pylint==3.2.6 From 93afc9458ab2841a3383b8e37808040792240e71 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Sat, 31 Aug 2024 11:38:45 -0700 Subject: [PATCH 1295/1635] Update nest to only include the image attachment payload for cameras that support fetching media (#124590) Only include the image attachment payload for cameras that support fetching media --- homeassistant/components/nest/__init__.py | 31 +++++++++------- tests/components/nest/test_events.py | 43 ++++++++++++++++++++--- 2 files changed, 57 insertions(+), 17 deletions(-) diff --git a/homeassistant/components/nest/__init__.py b/homeassistant/components/nest/__init__.py index da72fdfd53b..8a1719a9bd5 100644 --- a/homeassistant/components/nest/__init__.py +++ b/homeassistant/components/nest/__init__.py @@ -166,38 +166,43 @@ class SignalUpdateCallback: ) if not device_entry: return + supported_traits = self._supported_traits(device_id) for api_event_type, image_event in events.items(): if not (event_type := EVENT_NAME_MAP.get(api_event_type)): continue nest_event_id = image_event.event_token - attachment = { - "image": EVENT_THUMBNAIL_URL_FORMAT.format( - device_id=device_entry.id, event_token=image_event.event_token - ), - } - if self._supports_clip(device_id): - attachment["video"] = EVENT_MEDIA_API_URL_FORMAT.format( - device_id=device_entry.id, event_token=image_event.event_token - ) message = { "device_id": device_entry.id, "type": event_type, "timestamp": event_message.timestamp, "nest_event_id": nest_event_id, - "attachment": attachment, } + if ( + TraitType.CAMERA_EVENT_IMAGE in supported_traits + or TraitType.CAMERA_CLIP_PREVIEW in supported_traits + ): + attachment = { + "image": EVENT_THUMBNAIL_URL_FORMAT.format( + device_id=device_entry.id, event_token=image_event.event_token + ) + } + if TraitType.CAMERA_CLIP_PREVIEW in supported_traits: + attachment["video"] = EVENT_MEDIA_API_URL_FORMAT.format( + device_id=device_entry.id, event_token=image_event.event_token + ) + message["attachment"] = attachment if image_event.zones: message["zones"] = image_event.zones self._hass.bus.async_fire(NEST_EVENT, message) - def _supports_clip(self, device_id: str) -> bool: + def _supported_traits(self, device_id: str) -> list[TraitType]: if not ( device_manager := self._hass.data[DOMAIN] .get(self._config_entry_id, {}) .get(DATA_DEVICE_MANAGER) ) or not (device := device_manager.devices.get(device_id)): - return False - return TraitType.CAMERA_CLIP_PREVIEW in device.traits + return [] + return list(device.traits) async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: diff --git a/tests/components/nest/test_events.py b/tests/components/nest/test_events.py index 643a2614bbc..e746e5f263f 100644 --- a/tests/components/nest/test_events.py +++ b/tests/components/nest/test_events.py @@ -122,28 +122,28 @@ def create_events(events, device_id=DEVICE_ID, timestamp=None): [ ( "sdm.devices.types.DOORBELL", - ["sdm.devices.traits.DoorbellChime"], + ["sdm.devices.traits.DoorbellChime", "sdm.devices.traits.CameraEventImage"], "sdm.devices.events.DoorbellChime.Chime", "Doorbell", "doorbell_chime", ), ( "sdm.devices.types.CAMERA", - ["sdm.devices.traits.CameraMotion"], + ["sdm.devices.traits.CameraMotion", "sdm.devices.traits.CameraEventImage"], "sdm.devices.events.CameraMotion.Motion", "Camera", "camera_motion", ), ( "sdm.devices.types.CAMERA", - ["sdm.devices.traits.CameraPerson"], + ["sdm.devices.traits.CameraPerson", "sdm.devices.traits.CameraEventImage"], "sdm.devices.events.CameraPerson.Person", "Camera", "camera_person", ), ( "sdm.devices.types.CAMERA", - ["sdm.devices.traits.CameraSound"], + ["sdm.devices.traits.CameraSound", "sdm.devices.traits.CameraEventImage"], "sdm.devices.events.CameraSound.Sound", "Camera", "camera_sound", @@ -234,6 +234,41 @@ async def test_camera_multiple_event( } +@pytest.mark.parametrize( + "device_traits", + [(["sdm.devices.traits.CameraMotion"])], +) +async def test_media_not_supported( + hass: HomeAssistant, entity_registry: er.EntityRegistry, subscriber, setup_platform +) -> None: + """Test a pubsub message for a camera person event.""" + events = async_capture_events(hass, NEST_EVENT) + await setup_platform() + entry = entity_registry.async_get("camera.front") + assert entry is not None + + event_map = { + "sdm.devices.events.CameraMotion.Motion": { + "eventSessionId": EVENT_SESSION_ID, + "eventId": EVENT_ID, + }, + } + + timestamp = utcnow() + await subscriber.async_receive_event(create_events(event_map, timestamp=timestamp)) + await hass.async_block_till_done() + + event_time = timestamp.replace(microsecond=0) + assert len(events) == 1 + assert event_view(events[0].data) == { + "device_id": entry.device_id, + "type": "camera_motion", + "timestamp": event_time, + } + # Media fetching not supported by this device + assert "attachment" not in events[0].data + + async def test_unknown_event(hass: HomeAssistant, subscriber, setup_platform) -> None: """Test a pubsub message for an unknown event type.""" events = async_capture_events(hass, NEST_EVENT) From d3879a36d163aa6307308738fbe5f2985441e053 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Sat, 31 Aug 2024 12:11:22 -0700 Subject: [PATCH 1296/1635] Add loggers for Google Photos integration (#124986) --- homeassistant/components/google_photos/manifest.json | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/google_photos/manifest.json b/homeassistant/components/google_photos/manifest.json index 3299b437d29..3fefb6cf610 100644 --- a/homeassistant/components/google_photos/manifest.json +++ b/homeassistant/components/google_photos/manifest.json @@ -6,5 +6,6 @@ "dependencies": ["application_credentials"], "documentation": "https://www.home-assistant.io/integrations/google_photos", "iot_class": "cloud_polling", + "loggers": ["googleapiclient"], "requirements": ["google-api-python-client==2.71.0"] } From ef84a8869e6e5d1772688f9a02f2c91319fe10ee Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Sat, 31 Aug 2024 12:16:14 -0700 Subject: [PATCH 1297/1635] Add Google Photos service for uploading content (#124956) * Add Google Photos upload support * Fix format * Merge in scope/reauth changes * Address PR feedback * Fix blocking i/o in async --- .../components/google_photos/__init__.py | 4 + homeassistant/components/google_photos/api.py | 48 +++- .../components/google_photos/const.py | 9 +- .../components/google_photos/icons.json | 7 + .../components/google_photos/media_source.py | 13 +- .../components/google_photos/services.py | 116 ++++++++ .../components/google_photos/services.yaml | 11 + .../components/google_photos/strings.json | 37 +++ tests/components/google_photos/conftest.py | 10 +- .../google_photos/test_config_flow.py | 12 + .../google_photos/test_media_source.py | 20 +- .../components/google_photos/test_services.py | 256 ++++++++++++++++++ 12 files changed, 536 insertions(+), 7 deletions(-) create mode 100644 homeassistant/components/google_photos/icons.json create mode 100644 homeassistant/components/google_photos/services.py create mode 100644 homeassistant/components/google_photos/services.yaml create mode 100644 tests/components/google_photos/test_services.py diff --git a/homeassistant/components/google_photos/__init__.py b/homeassistant/components/google_photos/__init__.py index 643ad0b41ad..ee02c695f16 100644 --- a/homeassistant/components/google_photos/__init__.py +++ b/homeassistant/components/google_photos/__init__.py @@ -11,6 +11,7 @@ from homeassistant.helpers import config_entry_oauth2_flow from . import api from .const import DOMAIN +from .services import async_register_services type GooglePhotosConfigEntry = ConfigEntry[api.AsyncConfigEntryAuth] @@ -41,6 +42,9 @@ async def async_setup_entry( except ClientError as err: raise ConfigEntryNotReady from err entry.runtime_data = auth + + async_register_services(hass) + return True diff --git a/homeassistant/components/google_photos/api.py b/homeassistant/components/google_photos/api.py index b387326148f..c5de03d7d21 100644 --- a/homeassistant/components/google_photos/api.py +++ b/homeassistant/components/google_photos/api.py @@ -5,6 +5,7 @@ from functools import partial import logging from typing import Any, cast +from aiohttp.client_exceptions import ClientError from google.oauth2.credentials import Credentials from googleapiclient.discovery import Resource, build from googleapiclient.errors import HttpError @@ -12,7 +13,7 @@ from googleapiclient.http import BatchHttpRequest, HttpRequest from homeassistant.const import CONF_ACCESS_TOKEN from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_entry_oauth2_flow +from homeassistant.helpers import aiohttp_client, config_entry_oauth2_flow from .exceptions import GooglePhotosApiError @@ -25,6 +26,7 @@ GET_MEDIA_ITEM_FIELDS = ( "id,baseUrl,mimeType,filename,mediaMetadata(width,height,photo,video)" ) LIST_MEDIA_ITEM_FIELDS = f"nextPageToken,mediaItems({GET_MEDIA_ITEM_FIELDS})" +UPLOAD_API = "https://photoslibrary.googleapis.com/v1/uploads" class AuthBase(ABC): @@ -70,6 +72,40 @@ class AuthBase(ABC): ) return await self._execute(cmd) + async def upload_content(self, content: bytes, mime_type: str) -> str: + """Upload media content to the API and return an upload token.""" + token = await self.async_get_access_token() + session = aiohttp_client.async_get_clientsession(self._hass) + try: + result = await session.post( + UPLOAD_API, headers=_upload_headers(token, mime_type), data=content + ) + result.raise_for_status() + return await result.text() + except ClientError as err: + raise GooglePhotosApiError(f"Failed to upload content: {err}") from err + + async def create_media_items(self, upload_tokens: list[str]) -> list[str]: + """Create a batch of media items and return the ids.""" + service = await self._get_photos_service() + cmd: HttpRequest = service.mediaItems().batchCreate( + body={ + "newMediaItems": [ + { + "simpleMediaItem": { + "uploadToken": upload_token, + } + for upload_token in upload_tokens + } + ] + } + ) + result = await self._execute(cmd) + return [ + media_item["mediaItem"]["id"] + for media_item in result["newMediaItemResults"] + ] + async def _get_photos_service(self) -> Resource: """Get current photos library API resource.""" token = await self.async_get_access_token() @@ -141,3 +177,13 @@ class AsyncConfigFlowAuth(AuthBase): async def async_get_access_token(self) -> str: """Return a valid access token.""" return self._token + + +def _upload_headers(token: str, mime_type: str) -> dict[str, Any]: + """Create the upload headers.""" + return { + "Authorization": f"Bearer {token}", + "Content-Type": "application/octet-stream", + "X-Goog-Upload-Content-Type": mime_type, + "X-Goog-Upload-Protocol": "raw", + } diff --git a/homeassistant/components/google_photos/const.py b/homeassistant/components/google_photos/const.py index 7752f817608..c629e6feb27 100644 --- a/homeassistant/components/google_photos/const.py +++ b/homeassistant/components/google_photos/const.py @@ -4,7 +4,14 @@ DOMAIN = "google_photos" OAUTH2_AUTHORIZE = "https://accounts.google.com/o/oauth2/v2/auth" OAUTH2_TOKEN = "https://oauth2.googleapis.com/token" -OAUTH2_SCOPES = [ + +UPLOAD_SCOPE = "https://www.googleapis.com/auth/photoslibrary.appendonly" +READ_SCOPES = [ "https://www.googleapis.com/auth/photoslibrary.readonly", + "https://www.googleapis.com/auth/photoslibrary.readonly.appcreateddata", +] +OAUTH2_SCOPES = [ + *READ_SCOPES, + UPLOAD_SCOPE, "https://www.googleapis.com/auth/userinfo.profile", ] diff --git a/homeassistant/components/google_photos/icons.json b/homeassistant/components/google_photos/icons.json new file mode 100644 index 00000000000..5d51ed4370a --- /dev/null +++ b/homeassistant/components/google_photos/icons.json @@ -0,0 +1,7 @@ +{ + "services": { + "upload": { + "service": "mdi:cloud-upload" + } + } +} diff --git a/homeassistant/components/google_photos/media_source.py b/homeassistant/components/google_photos/media_source.py index cdb6b22a3ed..9b922ee3201 100644 --- a/homeassistant/components/google_photos/media_source.py +++ b/homeassistant/components/google_photos/media_source.py @@ -16,7 +16,7 @@ from homeassistant.components.media_source import ( from homeassistant.core import HomeAssistant from . import GooglePhotosConfigEntry -from .const import DOMAIN +from .const import DOMAIN, READ_SCOPES from .exceptions import GooglePhotosApiError _LOGGER = logging.getLogger(__name__) @@ -168,7 +168,7 @@ class GooglePhotosMediaSource(MediaSource): children_media_class=MediaClass.DIRECTORY, children=[ _build_account(entry, PhotosIdentifier(cast(str, entry.unique_id))) - for entry in self.hass.config_entries.async_loaded_entries(DOMAIN) + for entry in self._async_config_entries() ], ) @@ -218,6 +218,15 @@ class GooglePhotosMediaSource(MediaSource): ] return source + def _async_config_entries(self) -> list[GooglePhotosConfigEntry]: + """Return all config entries that support photo library reads.""" + entries = [] + for entry in self.hass.config_entries.async_loaded_entries(DOMAIN): + scopes = entry.data["token"]["scope"].split(" ") + if any(scope in scopes for scope in READ_SCOPES): + entries.append(entry) + return entries + def _async_config_entry(self, config_entry_id: str) -> GooglePhotosConfigEntry: """Return a config entry with the specified id.""" entry = self.hass.config_entries.async_entry_for_domain_unique_id( diff --git a/homeassistant/components/google_photos/services.py b/homeassistant/components/google_photos/services.py new file mode 100644 index 00000000000..a895f333962 --- /dev/null +++ b/homeassistant/components/google_photos/services.py @@ -0,0 +1,116 @@ +"""Google Photos services.""" + +from __future__ import annotations + +import asyncio +import mimetypes +from pathlib import Path + +import voluptuous as vol + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_FILENAME +from homeassistant.core import ( + HomeAssistant, + ServiceCall, + ServiceResponse, + SupportsResponse, +) +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import config_validation as cv + +from . import api +from .const import DOMAIN, UPLOAD_SCOPE + +type GooglePhotosConfigEntry = ConfigEntry[api.AsyncConfigEntryAuth] + +__all__ = [ + "DOMAIN", +] + +CONF_CONFIG_ENTRY_ID = "config_entry_id" + +UPLOAD_SERVICE = "upload" +UPLOAD_SERVICE_SCHEMA = vol.Schema( + { + vol.Required(CONF_CONFIG_ENTRY_ID): cv.string, + vol.Required(CONF_FILENAME): vol.All(cv.ensure_list, [cv.string]), + } +) + + +def _read_file_contents( + hass: HomeAssistant, filenames: list[str] +) -> list[tuple[str, bytes]]: + """Read the mime type and contents from each filen.""" + results = [] + for filename in filenames: + if not hass.config.is_allowed_path(filename): + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="no_access_to_path", + translation_placeholders={"filename": filename}, + ) + filename_path = Path(filename) + if not filename_path.exists(): + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="filename_does_not_exist", + translation_placeholders={"filename": filename}, + ) + mime_type, _ = mimetypes.guess_type(filename) + if mime_type is None or not (mime_type.startswith(("image", "video"))): + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="filename_is_not_image", + translation_placeholders={"filename": filename}, + ) + results.append((mime_type, filename_path.read_bytes())) + return results + + +def async_register_services(hass: HomeAssistant) -> None: + """Register Google Photos services.""" + + async def async_handle_upload(call: ServiceCall) -> ServiceResponse: + """Generate content from text and optionally images.""" + config_entry: GooglePhotosConfigEntry | None = ( + hass.config_entries.async_get_entry(call.data[CONF_CONFIG_ENTRY_ID]) + ) + if not config_entry: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="integration_not_found", + translation_placeholders={"target": DOMAIN}, + ) + scopes = config_entry.data["token"]["scope"].split(" ") + if UPLOAD_SCOPE not in scopes: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="missing_upload_permission", + translation_placeholders={"target": DOMAIN}, + ) + + client_api = config_entry.runtime_data + upload_tasks = [] + file_results = await hass.async_add_executor_job( + _read_file_contents, hass, call.data[CONF_FILENAME] + ) + for mime_type, content in file_results: + upload_tasks.append(client_api.upload_content(content, mime_type)) + upload_tokens = await asyncio.gather(*upload_tasks) + media_ids = await client_api.create_media_items(upload_tokens) + if call.return_response: + return { + "media_items": [{"media_item_id": media_id for media_id in media_ids}] + } + return None + + if not hass.services.has_service(DOMAIN, UPLOAD_SERVICE): + hass.services.async_register( + DOMAIN, + UPLOAD_SERVICE, + async_handle_upload, + schema=UPLOAD_SERVICE_SCHEMA, + supports_response=SupportsResponse.OPTIONAL, + ) diff --git a/homeassistant/components/google_photos/services.yaml b/homeassistant/components/google_photos/services.yaml new file mode 100644 index 00000000000..047305c0bca --- /dev/null +++ b/homeassistant/components/google_photos/services.yaml @@ -0,0 +1,11 @@ +upload: + fields: + config_entry_id: + required: true + selector: + config_entry: + integration: google_photos + filename: + required: false + selector: + object: diff --git a/homeassistant/components/google_photos/strings.json b/homeassistant/components/google_photos/strings.json index b44e04287b1..9e88429124e 100644 --- a/homeassistant/components/google_photos/strings.json +++ b/homeassistant/components/google_photos/strings.json @@ -26,5 +26,42 @@ "create_entry": { "default": "[%key:common::config_flow::create_entry::authenticated%]" } + }, + "exceptions": { + "integration_not_found": { + "message": "Integration \"{target}\" not found in registry." + }, + "not_loaded": { + "message": "{target} is not loaded." + }, + "no_access_to_path": { + "message": "Cannot read {filename}, no access to path; `allowlist_external_dirs` may need to be adjusted in `configuration.yaml`" + }, + "filename_does_not_exist": { + "message": "`{filename}` does not exist" + }, + "filename_is_not_image": { + "message": "`{filename}` is not an image" + }, + "missing_upload_permission": { + "message": "Home Assistnt was not granted permission to upload to Google Photos" + } + }, + "services": { + "upload": { + "name": "Upload media", + "description": "Upload images or videos to Google Photos.", + "fields": { + "config_entry_id": { + "name": "Integration Id", + "description": "The Google Photos integration id." + }, + "filename": { + "name": "Filename", + "description": "Path to the image or video to upload.", + "example": "/config/www/image.jpg" + } + } + } } } diff --git a/tests/components/google_photos/conftest.py b/tests/components/google_photos/conftest.py index 73e506658e6..2cdad5d4d10 100644 --- a/tests/components/google_photos/conftest.py +++ b/tests/components/google_photos/conftest.py @@ -32,13 +32,19 @@ def mock_expires_at() -> int: return time.time() + EXPIRES_IN +@pytest.fixture(name="scopes") +def mock_scopes() -> list[str]: + """Fixture to set scopes used during the config entry.""" + return OAUTH2_SCOPES + + @pytest.fixture(name="token_entry") -def mock_token_entry(expires_at: int) -> dict[str, Any]: +def mock_token_entry(expires_at: int, scopes: list[str]) -> dict[str, Any]: """Fixture for OAuth 'token' data for a ConfigEntry.""" return { "access_token": FAKE_ACCESS_TOKEN, "refresh_token": FAKE_REFRESH_TOKEN, - "scope": " ".join(OAUTH2_SCOPES), + "scope": " ".join(scopes), "type": "Bearer", "expires_at": expires_at, "expires_in": EXPIRES_IN, diff --git a/tests/components/google_photos/test_config_flow.py b/tests/components/google_photos/test_config_flow.py index 4bd933a7eb8..2564a8ed134 100644 --- a/tests/components/google_photos/test_config_flow.py +++ b/tests/components/google_photos/test_config_flow.py @@ -84,6 +84,8 @@ async def test_full_flow( "&redirect_uri=https://example.com/auth/external/callback" f"&state={state}" "&scope=https://www.googleapis.com/auth/photoslibrary.readonly" + "+https://www.googleapis.com/auth/photoslibrary.readonly.appcreateddata" + "+https://www.googleapis.com/auth/photoslibrary.appendonly" "+https://www.googleapis.com/auth/userinfo.profile" "&access_type=offline&prompt=consent" ) @@ -111,6 +113,8 @@ async def test_full_flow( "type": "Bearer", "scope": ( "https://www.googleapis.com/auth/photoslibrary.readonly" + " https://www.googleapis.com/auth/photoslibrary.readonly.appcreateddata" + " https://www.googleapis.com/auth/photoslibrary.appendonly" " https://www.googleapis.com/auth/userinfo.profile" ), }, @@ -145,6 +149,8 @@ async def test_api_not_enabled( "&redirect_uri=https://example.com/auth/external/callback" f"&state={state}" "&scope=https://www.googleapis.com/auth/photoslibrary.readonly" + "+https://www.googleapis.com/auth/photoslibrary.readonly.appcreateddata" + "+https://www.googleapis.com/auth/photoslibrary.appendonly" "+https://www.googleapis.com/auth/userinfo.profile" "&access_type=offline&prompt=consent" ) @@ -189,6 +195,8 @@ async def test_general_exception( "&redirect_uri=https://example.com/auth/external/callback" f"&state={state}" "&scope=https://www.googleapis.com/auth/photoslibrary.readonly" + "+https://www.googleapis.com/auth/photoslibrary.readonly.appcreateddata" + "+https://www.googleapis.com/auth/photoslibrary.appendonly" "+https://www.googleapis.com/auth/userinfo.profile" "&access_type=offline&prompt=consent" ) @@ -274,6 +282,8 @@ async def test_reauth( "&redirect_uri=https://example.com/auth/external/callback" f"&state={state}" "&scope=https://www.googleapis.com/auth/photoslibrary.readonly" + "+https://www.googleapis.com/auth/photoslibrary.readonly.appcreateddata" + "+https://www.googleapis.com/auth/photoslibrary.appendonly" "+https://www.googleapis.com/auth/userinfo.profile" "&access_type=offline&prompt=consent" ) @@ -305,6 +315,8 @@ async def test_reauth( "type": "Bearer", "scope": ( "https://www.googleapis.com/auth/photoslibrary.readonly" + " https://www.googleapis.com/auth/photoslibrary.readonly.appcreateddata" + " https://www.googleapis.com/auth/photoslibrary.appendonly" " https://www.googleapis.com/auth/userinfo.profile" ), }, diff --git a/tests/components/google_photos/test_media_source.py b/tests/components/google_photos/test_media_source.py index db57ab755c1..ff4993eb3df 100644 --- a/tests/components/google_photos/test_media_source.py +++ b/tests/components/google_photos/test_media_source.py @@ -7,7 +7,7 @@ from googleapiclient.errors import HttpError from httplib2 import Response import pytest -from homeassistant.components.google_photos.const import DOMAIN +from homeassistant.components.google_photos.const import DOMAIN, UPLOAD_SCOPE from homeassistant.components.media_source import ( URI_SCHEME, BrowseError, @@ -46,6 +46,24 @@ async def test_no_config_entries( assert not browse.children +@pytest.mark.usefixtures("setup_integration", "setup_api") +@pytest.mark.parametrize( + ("scopes"), + [ + [UPLOAD_SCOPE], + ], +) +async def test_no_read_scopes( + hass: HomeAssistant, +) -> None: + """Test a media source with only write scopes configured so no media source exists.""" + browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}") + assert browse.domain == DOMAIN + assert browse.identifier is None + assert browse.title == "Google Photos" + assert not browse.children + + @pytest.mark.usefixtures("setup_integration", "setup_api") @pytest.mark.parametrize( ("fixture_name", "expected_results", "expected_medias"), diff --git a/tests/components/google_photos/test_services.py b/tests/components/google_photos/test_services.py new file mode 100644 index 00000000000..198de3295a9 --- /dev/null +++ b/tests/components/google_photos/test_services.py @@ -0,0 +1,256 @@ +"""Tests for Google Photos.""" + +import http +from unittest.mock import Mock, patch + +from googleapiclient.errors import HttpError +from httplib2 import Response +import pytest + +from homeassistant.components.google_photos.api import UPLOAD_API +from homeassistant.components.google_photos.const import DOMAIN, READ_SCOPES +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError + +from tests.common import MockConfigEntry +from tests.test_util.aiohttp import AiohttpClientMocker + + +@pytest.mark.usefixtures("setup_integration") +async def test_upload_service( + hass: HomeAssistant, + config_entry: MockConfigEntry, + aioclient_mock: AiohttpClientMocker, + setup_api: Mock, +) -> None: + """Test service call to upload content.""" + assert hass.services.has_service(DOMAIN, "upload") + + aioclient_mock.post(UPLOAD_API, text="some-upload-token") + setup_api.return_value.mediaItems.return_value.batchCreate.return_value.execute.return_value = { + "newMediaItemResults": [ + { + "status": { + "code": 200, + }, + "mediaItem": { + "id": "new-media-item-id-1", + }, + } + ] + } + + with ( + patch( + "homeassistant.components.google_photos.services.Path.read_bytes", + return_value=b"image bytes", + ), + patch( + "homeassistant.components.google_photos.services.Path.exists", + return_value=True, + ), + patch.object(hass.config, "is_allowed_path", return_value=True), + ): + response = await hass.services.async_call( + DOMAIN, + "upload", + { + "config_entry_id": config_entry.entry_id, + "filename": "doorbell_snapshot.jpg", + }, + blocking=True, + return_response=True, + ) + assert response == {"media_items": [{"media_item_id": "new-media-item-id-1"}]} + + +@pytest.mark.usefixtures("setup_integration") +async def test_upload_service_config_entry_not_found( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test upload service call with a config entry that does not exist.""" + with pytest.raises(HomeAssistantError, match="not found in registry"): + await hass.services.async_call( + DOMAIN, + "upload", + { + "config_entry_id": "invalid-config-entry-id", + "filename": "doorbell_snapshot.jpg", + }, + blocking=True, + return_response=True, + ) + + +@pytest.mark.usefixtures("setup_integration") +async def test_config_entry_not_loaded( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test upload service call with a config entry that is not loaded.""" + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.NOT_LOADED + + with pytest.raises(HomeAssistantError, match="not found in registry"): + await hass.services.async_call( + DOMAIN, + "upload", + { + "config_entry_id": config_entry.unique_id, + "filename": "doorbell_snapshot.jpg", + }, + blocking=True, + return_response=True, + ) + + +@pytest.mark.usefixtures("setup_integration") +async def test_path_is_not_allowed( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test upload service call with a filename path that is not allowed.""" + with ( + patch.object(hass.config, "is_allowed_path", return_value=False), + pytest.raises(HomeAssistantError, match="no access to path"), + ): + await hass.services.async_call( + DOMAIN, + "upload", + { + "config_entry_id": config_entry.entry_id, + "filename": "doorbell_snapshot.jpg", + }, + blocking=True, + return_response=True, + ) + + +@pytest.mark.usefixtures("setup_integration") +async def test_filename_does_not_exist( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test upload service call with a filename path that does not exist.""" + with ( + patch.object(hass.config, "is_allowed_path", return_value=True), + patch("pathlib.Path.exists", return_value=False), + pytest.raises(HomeAssistantError, match="does not exist"), + ): + await hass.services.async_call( + DOMAIN, + "upload", + { + "config_entry_id": config_entry.entry_id, + "filename": "doorbell_snapshot.jpg", + }, + blocking=True, + return_response=True, + ) + + +@pytest.mark.usefixtures("setup_integration") +async def test_upload_service_upload_content_failure( + hass: HomeAssistant, + config_entry: MockConfigEntry, + aioclient_mock: AiohttpClientMocker, + setup_api: Mock, +) -> None: + """Test service call to upload content.""" + + aioclient_mock.post(UPLOAD_API, status=http.HTTPStatus.SERVICE_UNAVAILABLE) + + with ( + patch( + "homeassistant.components.google_photos.services.Path.read_bytes", + return_value=b"image bytes", + ), + patch( + "homeassistant.components.google_photos.services.Path.exists", + return_value=True, + ), + patch.object(hass.config, "is_allowed_path", return_value=True), + pytest.raises(HomeAssistantError, match="Failed to upload content"), + ): + await hass.services.async_call( + DOMAIN, + "upload", + { + "config_entry_id": config_entry.entry_id, + "filename": "doorbell_snapshot.jpg", + }, + blocking=True, + return_response=True, + ) + + +@pytest.mark.usefixtures("setup_integration") +async def test_upload_service_fails_create( + hass: HomeAssistant, + config_entry: MockConfigEntry, + aioclient_mock: AiohttpClientMocker, + setup_api: Mock, +) -> None: + """Test service call to upload content.""" + + aioclient_mock.post(UPLOAD_API, text="some-upload-token") + setup_api.return_value.mediaItems.return_value.batchCreate.return_value.execute.side_effect = HttpError( + Response({"status": "403"}), b"" + ) + + with ( + patch( + "homeassistant.components.google_photos.services.Path.read_bytes", + return_value=b"image bytes", + ), + patch( + "homeassistant.components.google_photos.services.Path.exists", + return_value=True, + ), + patch.object(hass.config, "is_allowed_path", return_value=True), + pytest.raises( + HomeAssistantError, match="Google Photos API responded with error" + ), + ): + await hass.services.async_call( + DOMAIN, + "upload", + { + "config_entry_id": config_entry.entry_id, + "filename": "doorbell_snapshot.jpg", + }, + blocking=True, + return_response=True, + ) + + +@pytest.mark.usefixtures("setup_integration") +@pytest.mark.parametrize( + ("scopes"), + [ + READ_SCOPES, + ], +) +async def test_upload_service_no_scope( + hass: HomeAssistant, + config_entry: MockConfigEntry, + aioclient_mock: AiohttpClientMocker, + setup_api: Mock, +) -> None: + """Test service call to upload content but the config entry is read-only.""" + + with pytest.raises(HomeAssistantError, match="not granted permission"): + await hass.services.async_call( + DOMAIN, + "upload", + { + "config_entry_id": config_entry.entry_id, + "filename": "doorbell_snapshot.jpg", + }, + blocking=True, + return_response=True, + ) From 30772da0e1e65af7991f2b2786b8e320f55e15e2 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Sat, 31 Aug 2024 14:39:18 -0700 Subject: [PATCH 1298/1635] Add Google Photos media source support for albums and favorites (#124985) --- homeassistant/components/google_photos/api.py | 37 ++++++-- .../components/google_photos/media_source.py | 89 +++++++++++++++---- tests/components/google_photos/conftest.py | 11 ++- .../google_photos/fixtures/list_albums.json | 12 +++ .../google_photos/test_media_source.py | 49 ++++++++-- 5 files changed, 165 insertions(+), 33 deletions(-) create mode 100644 tests/components/google_photos/fixtures/list_albums.json diff --git a/homeassistant/components/google_photos/api.py b/homeassistant/components/google_photos/api.py index c5de03d7d21..0bbb2fe162b 100644 --- a/homeassistant/components/google_photos/api.py +++ b/homeassistant/components/google_photos/api.py @@ -9,7 +9,7 @@ from aiohttp.client_exceptions import ClientError from google.oauth2.credentials import Credentials from googleapiclient.discovery import Resource, build from googleapiclient.errors import HttpError -from googleapiclient.http import BatchHttpRequest, HttpRequest +from googleapiclient.http import HttpRequest from homeassistant.const import CONF_ACCESS_TOKEN from homeassistant.core import HomeAssistant @@ -27,6 +27,9 @@ GET_MEDIA_ITEM_FIELDS = ( ) LIST_MEDIA_ITEM_FIELDS = f"nextPageToken,mediaItems({GET_MEDIA_ITEM_FIELDS})" UPLOAD_API = "https://photoslibrary.googleapis.com/v1/uploads" +LIST_ALBUMS_FIELDS = ( + "nextPageToken,albums(id,title,coverPhotoBaseUrl,coverPhotoMediaItemId)" +) class AuthBase(ABC): @@ -61,14 +64,38 @@ class AuthBase(ABC): return await self._execute(cmd) async def list_media_items( - self, page_size: int | None = None, page_token: str | None = None + self, + page_size: int | None = None, + page_token: str | None = None, + album_id: str | None = None, + favorites: bool = False, ) -> dict[str, Any]: """Get all MediaItem resources.""" service = await self._get_photos_service() - cmd: HttpRequest = service.mediaItems().list( + args: dict[str, Any] = { + "pageSize": (page_size or DEFAULT_PAGE_SIZE), + "pageToken": page_token, + } + cmd: HttpRequest + if album_id is not None or favorites: + if album_id is not None: + args["albumId"] = album_id + if favorites: + args["filters"] = {"featureFilter": {"includedFeatures": "FAVORITES"}} + cmd = service.mediaItems().search(body=args, fields=LIST_MEDIA_ITEM_FIELDS) + else: + cmd = service.mediaItems().list(**args, fields=LIST_MEDIA_ITEM_FIELDS) + return await self._execute(cmd) + + async def list_albums( + self, page_size: int | None = None, page_token: str | None = None + ) -> dict[str, Any]: + """Get all Album resources.""" + service = await self._get_photos_service() + cmd: HttpRequest = service.albums().list( pageSize=(page_size or DEFAULT_PAGE_SIZE), pageToken=page_token, - fields=LIST_MEDIA_ITEM_FIELDS, + fields=LIST_ALBUMS_FIELDS, ) return await self._execute(cmd) @@ -126,7 +153,7 @@ class AuthBase(ABC): partial(build, "oauth2", "v2", credentials=Credentials(token=token)) # type: ignore[no-untyped-call] ) - async def _execute(self, request: HttpRequest | BatchHttpRequest) -> dict[str, Any]: + async def _execute(self, request: HttpRequest) -> dict[str, Any]: try: result = await self._hass.async_add_executor_job(request.execute) except HttpError as err: diff --git a/homeassistant/components/google_photos/media_source.py b/homeassistant/components/google_photos/media_source.py index 9b922ee3201..a709dd66a0a 100644 --- a/homeassistant/components/google_photos/media_source.py +++ b/homeassistant/components/google_photos/media_source.py @@ -1,7 +1,7 @@ """Media source for Google Photos.""" from dataclasses import dataclass -from enum import StrEnum +from enum import Enum, StrEnum import logging from typing import Any, Self, cast @@ -25,14 +25,41 @@ _LOGGER = logging.getLogger(__name__) # photos when displaying the users library. We fetch a minimum of 50 photos # unless we run out, but in pages of 100 at a time given sometimes responses # may only contain a handful of items Fetches at least 50 photos. -MAX_PHOTOS = 50 +MAX_RECENT_PHOTOS = 50 +MAX_ALBUMS = 50 PAGE_SIZE = 100 THUMBNAIL_SIZE = 256 LARGE_IMAGE_SIZE = 2160 -# Markers for parts of PhotosIdentifier url pattern. +@dataclass +class SpecialAlbumDetails: + """Details for a Special album.""" + + path: str + title: str + list_args: dict[str, Any] + max_photos: int | None + + +class SpecialAlbum(Enum): + """Special Album types.""" + + RECENT = SpecialAlbumDetails("recent", "Recent Photos", {}, MAX_RECENT_PHOTOS) + FAVORITE = SpecialAlbumDetails( + "favorites", "Favorite Photos", {"favorites": True}, None + ) + + @classmethod + def of(cls, path: str) -> Self | None: + """Parse a PhotosIdentifierType by string value.""" + for enum in cls: + if enum.value.path == path: + return enum + return None + + # The PhotosIdentifier can be in the following forms: # config-entry-id # config-entry-id/a/album-media-id @@ -40,12 +67,6 @@ LARGE_IMAGE_SIZE = 2160 # # The album-media-id can contain special reserved folder names for use by # this integration for virtual folders like the `recent` album. -PHOTO_SOURCE_IDENTIFIER_PHOTO = "p" -PHOTO_SOURCE_IDENTIFIER_ALBUM = "a" - -# Currently supports a single album of recent photos -RECENT_PHOTOS_ALBUM = "recent" -RECENT_PHOTOS_TITLE = "Recent Photos" class PhotosIdentifierType(StrEnum): @@ -86,7 +107,6 @@ class PhotosIdentifier: def of(cls, identifier: str) -> Self: """Parse a PhotosIdentifier form a string.""" parts = identifier.split("/") - _LOGGER.debug("parts=%s", parts) if len(parts) == 1: return cls(parts[0]) if len(parts) != 3: @@ -179,27 +199,50 @@ class GooglePhotosMediaSource(MediaSource): source = _build_account(entry, identifier) if identifier.id_type is None: + result = await client.list_albums(page_size=MAX_ALBUMS) source.children = [ _build_album( - RECENT_PHOTOS_TITLE, + special_album.value.title, PhotosIdentifier.album( - identifier.config_entry_id, RECENT_PHOTOS_ALBUM + identifier.config_entry_id, special_album.value.path ), ) + for special_album in SpecialAlbum + ] + [ + _build_album( + album["title"], + PhotosIdentifier.album( + identifier.config_entry_id, + album["id"], + ), + _cover_photo_url(album, THUMBNAIL_SIZE), + ) + for album in result["albums"] ] return source - # Currently only supports listing a single album of recent photos. - if identifier.media_id != RECENT_PHOTOS_ALBUM: - raise BrowseError(f"Unsupported album: {identifier}") + if ( + identifier.id_type != PhotosIdentifierType.ALBUM + or identifier.media_id is None + ): + raise BrowseError(f"Unsupported identifier: {identifier}") + + list_args: dict[str, Any] + if special_album := SpecialAlbum.of(identifier.media_id): + list_args = special_album.value.list_args + else: + list_args = {"album_id": identifier.media_id} - # Fetch recent items media_items: list[dict[str, Any]] = [] page_token: str | None = None - while len(media_items) < MAX_PHOTOS: + while ( + not special_album + or (max_photos := special_album.value.max_photos) is None + or len(media_items) < max_photos + ): try: result = await client.list_media_items( - page_size=PAGE_SIZE, page_token=page_token + **list_args, page_size=PAGE_SIZE, page_token=page_token ) except GooglePhotosApiError as err: raise BrowseError(f"Error listing media items: {err}") from err @@ -255,7 +298,9 @@ def _build_account( ) -def _build_album(title: str, identifier: PhotosIdentifier) -> BrowseMediaSource: +def _build_album( + title: str, identifier: PhotosIdentifier, thumbnail_url: str | None = None +) -> BrowseMediaSource: """Build an album node.""" return BrowseMediaSource( domain=DOMAIN, @@ -265,6 +310,7 @@ def _build_album(title: str, identifier: PhotosIdentifier) -> BrowseMediaSource: title=title, can_play=False, can_expand=True, + thumbnail=thumbnail_url, ) @@ -299,3 +345,8 @@ def _video_url(media_item: dict[str, Any]) -> str: See https://developers.google.com/photos/library/guides/access-media-items#base-urls """ return f"{media_item["baseUrl"]}=dv" + + +def _cover_photo_url(album: dict[str, Any], max_size: int) -> str: + """Return a media item url for the cover photo of the album.""" + return f"{album["coverPhotoBaseUrl"]}=h{max_size}" diff --git a/tests/components/google_photos/conftest.py b/tests/components/google_photos/conftest.py index 2cdad5d4d10..f7289993258 100644 --- a/tests/components/google_photos/conftest.py +++ b/tests/components/google_photos/conftest.py @@ -15,7 +15,11 @@ from homeassistant.components.google_photos.const import DOMAIN, OAUTH2_SCOPES from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from tests.common import MockConfigEntry, load_json_array_fixture +from tests.common import ( + MockConfigEntry, + load_json_array_fixture, + load_json_object_fixture, +) USER_IDENTIFIER = "user-identifier-1" CONFIG_ENTRY_ID = "user-identifier-1" @@ -119,6 +123,7 @@ def mock_setup_api( return mock mock.return_value.mediaItems.return_value.list = list_media_items + mock.return_value.mediaItems.return_value.search = list_media_items # Mock a point lookup by reading contents of the fixture above def get_media_item(mediaItemId: str, **kwargs: Any) -> Mock: @@ -131,6 +136,10 @@ def mock_setup_api( return None mock.return_value.mediaItems.return_value.get = get_media_item + mock.return_value.albums.return_value.list.return_value.execute.return_value = ( + load_json_object_fixture("list_albums.json", DOMAIN) + ) + yield mock diff --git a/tests/components/google_photos/fixtures/list_albums.json b/tests/components/google_photos/fixtures/list_albums.json new file mode 100644 index 00000000000..57f2873715b --- /dev/null +++ b/tests/components/google_photos/fixtures/list_albums.json @@ -0,0 +1,12 @@ +{ + "albums": [ + { + "id": "album-media-id-1", + "title": "Album title", + "isWriteable": true, + "mediaItemsCount": 7, + "coverPhotoBaseUrl": "http://img.example.com/id3", + "coverPhotoMediaItemId": "cover-photo-media-id-3" + } + ] +} diff --git a/tests/components/google_photos/test_media_source.py b/tests/components/google_photos/test_media_source.py index ff4993eb3df..1028a34aec1 100644 --- a/tests/components/google_photos/test_media_source.py +++ b/tests/components/google_photos/test_media_source.py @@ -65,6 +65,14 @@ async def test_no_read_scopes( @pytest.mark.usefixtures("setup_integration", "setup_api") +@pytest.mark.parametrize( + ("album_path", "expected_album_title"), + [ + (f"{CONFIG_ENTRY_ID}/a/recent", "Recent Photos"), + (f"{CONFIG_ENTRY_ID}/a/favorites", "Favorite Photos"), + (f"{CONFIG_ENTRY_ID}/a/album-media-id-1", "Album title"), + ], +) @pytest.mark.parametrize( ("fixture_name", "expected_results", "expected_medias"), [ @@ -82,8 +90,10 @@ async def test_no_read_scopes( ), ], ) -async def test_recent_items( +async def test_browse_albums( hass: HomeAssistant, + album_path: str, + expected_album_title: str, expected_results: list[tuple[str, str]], expected_medias: list[tuple[str, str]], ) -> None: @@ -101,14 +111,14 @@ async def test_recent_items( assert browse.identifier == CONFIG_ENTRY_ID assert browse.title == "Account Name" assert [(child.identifier, child.title) for child in browse.children] == [ - (f"{CONFIG_ENTRY_ID}/a/recent", "Recent Photos") + (f"{CONFIG_ENTRY_ID}/a/recent", "Recent Photos"), + (f"{CONFIG_ENTRY_ID}/a/favorites", "Favorite Photos"), + (f"{CONFIG_ENTRY_ID}/a/album-media-id-1", "Album title"), ] - browse = await async_browse_media( - hass, f"{URI_SCHEME}{DOMAIN}/{CONFIG_ENTRY_ID}/a/recent" - ) + browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}/{album_path}") assert browse.domain == DOMAIN - assert browse.identifier == f"{CONFIG_ENTRY_ID}/a/recent" + assert browse.identifier == album_path assert browse.title == "Account Name" assert [ (child.identifier, child.title) for child in browse.children @@ -134,7 +144,25 @@ async def test_invalid_config_entry(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("setup_integration", "setup_api") @pytest.mark.parametrize("fixture_name", ["list_mediaitems.json"]) -async def test_invalid_album_id(hass: HomeAssistant) -> None: +async def test_browse_invalid_path(hass: HomeAssistant) -> None: + """Test browsing to a photo is not possible.""" + browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}") + assert browse.domain == DOMAIN + assert browse.identifier is None + assert browse.title == "Google Photos" + assert [(child.identifier, child.title) for child in browse.children] == [ + (CONFIG_ENTRY_ID, "Account Name") + ] + + with pytest.raises(BrowseError, match="Unsupported identifier"): + await async_browse_media( + hass, f"{URI_SCHEME}{DOMAIN}/{CONFIG_ENTRY_ID}/p/some-photo-id" + ) + + +@pytest.mark.usefixtures("setup_integration") +@pytest.mark.parametrize("fixture_name", ["list_mediaitems.json"]) +async def test_invalid_album_id(hass: HomeAssistant, setup_api: Mock) -> None: """Test browsing to an album id that does not exist.""" browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}") assert browse.domain == DOMAIN @@ -144,7 +172,12 @@ async def test_invalid_album_id(hass: HomeAssistant) -> None: (CONFIG_ENTRY_ID, "Account Name") ] - with pytest.raises(BrowseError, match="Unsupported album"): + setup_api.return_value.mediaItems.return_value.search = Mock() + setup_api.return_value.mediaItems.return_value.search.return_value.execute.side_effect = HttpError( + Response({"status": "404"}), b"" + ) + + with pytest.raises(BrowseError, match="Error listing media items"): await async_browse_media( hass, f"{URI_SCHEME}{DOMAIN}/{CONFIG_ENTRY_ID}/a/invalid-album-id" ) From 95a25c72dc363ad9240ae16bc8add225d1bc481c Mon Sep 17 00:00:00 2001 From: Bill Flood Date: Sat, 31 Aug 2024 22:12:24 -0700 Subject: [PATCH 1299/1635] Use constant for default medium type in Mopeka (#125002) - Updated the Mopeka BLE device setup to use const DEFAULT_MEDIUM_TYPE - Fix Spelling error in a coment --- homeassistant/components/mopeka/__init__.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/mopeka/__init__.py b/homeassistant/components/mopeka/__init__.py index 17a87efd6e6..d73ece581d7 100644 --- a/homeassistant/components/mopeka/__init__.py +++ b/homeassistant/components/mopeka/__init__.py @@ -14,7 +14,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from .const import CONF_MEDIUM_TYPE +from .const import CONF_MEDIUM_TYPE, DEFAULT_MEDIUM_TYPE PLATFORMS: list[Platform] = [Platform.SENSOR] @@ -29,8 +29,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: MopekaConfigEntry) -> bo address = entry.unique_id assert address is not None - # Default sensors configured prior to the intorudction of MediumType - medium_type_str = entry.data.get(CONF_MEDIUM_TYPE, MediumType.PROPANE.value) + # Default sensors configured prior to the introduction of MediumType + medium_type_str = entry.data.get(CONF_MEDIUM_TYPE, DEFAULT_MEDIUM_TYPE) data = MopekaIOTBluetoothDeviceData(MediumType(medium_type_str)) coordinator = entry.runtime_data = PassiveBluetoothProcessorCoordinator( hass, From 68162e1a27896fb7c04d04af1052b9bde90382ff Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Fern=C3=A1ndez=20Rojas?= Date: Sun, 1 Sep 2024 12:45:59 +0200 Subject: [PATCH 1300/1635] Update aioairzone-cloud to v0.6.4 (#125007) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Álvaro Fernández Rojas --- .../components/airzone_cloud/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- .../snapshots/test_diagnostics.ambr | 9 ++++++++ tests/components/airzone_cloud/util.py | 21 +++++++++++++++++++ 5 files changed, 33 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/airzone_cloud/manifest.json b/homeassistant/components/airzone_cloud/manifest.json index 05f854e6caf..47a06c308ad 100644 --- a/homeassistant/components/airzone_cloud/manifest.json +++ b/homeassistant/components/airzone_cloud/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/airzone_cloud", "iot_class": "cloud_push", "loggers": ["aioairzone_cloud"], - "requirements": ["aioairzone-cloud==0.6.3"] + "requirements": ["aioairzone-cloud==0.6.4"] } diff --git a/requirements_all.txt b/requirements_all.txt index 1be3393dfc2..08bd494955a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -176,7 +176,7 @@ aio-georss-gdacs==0.9 aioairq==0.3.2 # homeassistant.components.airzone_cloud -aioairzone-cloud==0.6.3 +aioairzone-cloud==0.6.4 # homeassistant.components.airzone aioairzone==0.8.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 4bd9dfa1e29..d3c94d221d3 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -164,7 +164,7 @@ aio-georss-gdacs==0.9 aioairq==0.3.2 # homeassistant.components.airzone_cloud -aioairzone-cloud==0.6.3 +aioairzone-cloud==0.6.4 # homeassistant.components.airzone aioairzone==0.8.2 diff --git a/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr b/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr index 26a606bde42..2e6463d35a1 100644 --- a/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr +++ b/tests/components/airzone_cloud/snapshots/test_diagnostics.ambr @@ -154,6 +154,9 @@ 'available': True, 'double-set-point': True, 'id': 'aidoo_pro', + 'indoor-exchanger-temperature': 26.0, + 'indoor-return-temperature': 26.0, + 'indoor-work-temperature': 25.0, 'installation': 'installation1', 'is-connected': True, 'mode': 2, @@ -166,6 +169,12 @@ 5, ]), 'name': 'Bron Pro', + 'outdoor-condenser-pressure': 150.0, + 'outdoor-discharge-temperature': 121.0, + 'outdoor-electric-current': 3.0, + 'outdoor-evaporator-pressure': 20.0, + 'outdoor-exchanger-temperature': -25.0, + 'outdoor-temperature': 29.0, 'power': True, 'problems': False, 'speed': 3, diff --git a/tests/components/airzone_cloud/util.py b/tests/components/airzone_cloud/util.py index fb538ea7c8e..52b0ae0bec3 100644 --- a/tests/components/airzone_cloud/util.py +++ b/tests/components/airzone_cloud/util.py @@ -24,12 +24,17 @@ from aioairzone_cloud.const import ( API_CELSIUS, API_CONFIG, API_CONNECTION_DATE, + API_CONSUMPTION_UE, API_CPU_WS, API_DEVICE_ID, API_DEVICES, + API_DISCH_COMP_TEMP_UE, API_DISCONNECTION_DATE, API_DOUBLE_SET_POINT, API_ERRORS, + API_EXCH_HEAT_TEMP_IU, + API_EXCH_HEAT_TEMP_UE, + API_EXT_TEMP, API_FAH, API_FREE, API_FREE_MEM, @@ -46,6 +51,8 @@ from aioairzone_cloud.const import ( API_MODE_AVAIL, API_NAME, API_OLD_ID, + API_PC_UE, + API_PE_UE, API_POWER, API_POWERFUL_MODE, API_RAD_ACTIVE, @@ -69,6 +76,7 @@ from aioairzone_cloud.const import ( API_RANGE_SP_MIN_HOT_AIR, API_RANGE_SP_MIN_STOP_AIR, API_RANGE_SP_MIN_VENT_AIR, + API_RETURN_TEMP, API_SETPOINT, API_SP_AIR_AUTO, API_SP_AIR_COOL, @@ -94,6 +102,7 @@ from aioairzone_cloud.const import ( API_THERMOSTAT_TYPE, API_TYPE, API_WARNINGS, + API_WORK_TEMP, API_WS_CONNECTED, API_WS_FW, API_WS_ID, @@ -266,6 +275,18 @@ GET_WEBSERVER_MOCK_AIDOO_PRO = { def mock_get_device_config(device: Device) -> dict[str, Any]: """Mock API device config.""" + if device.get_id() == "aidoo_pro": + return { + API_CONSUMPTION_UE: 3, + API_DISCH_COMP_TEMP_UE: {API_CELSIUS: 121, API_FAH: -250}, + API_EXCH_HEAT_TEMP_IU: {API_CELSIUS: 26, API_FAH: 79}, + API_EXCH_HEAT_TEMP_UE: {API_CELSIUS: -25, API_FAH: -13}, + API_EXT_TEMP: {API_CELSIUS: 29, API_FAH: 84}, + API_PC_UE: 0.15, + API_PE_UE: 0.02, + API_RETURN_TEMP: {API_CELSIUS: 26, API_FAH: 79}, + API_WORK_TEMP: {API_CELSIUS: 25, API_FAH: 77}, + } if device.get_id() == "system1": return { API_SYSTEM_FW: "3.35", From 1661304f10b0370797bf4a4b45774ca721b326dc Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Sun, 1 Sep 2024 12:47:52 +0200 Subject: [PATCH 1301/1635] Bump solarlog_cli to 0.2.2 (#124948) * Add inverter-devices * Minor code adjustments * Update manifest.json Seperate dependency upgrade to seperate PR * Update requirements_all.txt Seperate dependency upgrade to seperate PR * Update requirements_test_all.txt Seperate dependency upgrade to seperate PR * Update homeassistant/components/solarlog/sensor.py Co-authored-by: Joost Lekkerkerker * Split up base class, document SolarLogSensorEntityDescription * Split up sensor types * Update snapshot * Bump solarlog_cli to 0.2.1 * Add strict typing * Bump fyta_cli to 0.6.3 (#124574) * Ensure write access to hassrelease data folder (#124573) Co-authored-by: Robert Resch * Update a roborock blocking call to be fully async (#124266) Remove a blocking call in roborock * Add inverter-devices * Split up sensor types * Update snapshot * Bump solarlog_cli to 0.2.1 * Backport/rebase * Tidy up * Simplyfication coordinator.py * Minor adjustments * Ruff * Bump solarlog_cli to 0.2.2 * Update homeassistant/components/solarlog/sensor.py Co-authored-by: Joost Lekkerkerker * Update homeassistant/components/solarlog/config_flow.py Co-authored-by: Joost Lekkerkerker * Update homeassistant/components/solarlog/sensor.py Co-authored-by: Joost Lekkerkerker * Update persentage-values in fixture --------- Co-authored-by: Joost Lekkerkerker Co-authored-by: Paulus Schoutsen Co-authored-by: Robert Resch Co-authored-by: Allen Porter --- .strict-typing | 1 + .../components/solarlog/config_flow.py | 23 +- .../components/solarlog/coordinator.py | 19 +- .../components/solarlog/manifest.json | 2 +- homeassistant/components/solarlog/sensor.py | 143 +++++++----- mypy.ini | 10 + requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/solarlog/__init__.py | 2 + tests/components/solarlog/conftest.py | 43 ++-- .../solarlog/fixtures/solarlog_data.json | 8 +- .../solarlog/snapshots/test_sensor.ambr | 218 +++++++++++++++++- tests/components/solarlog/test_config_flow.py | 2 +- 13 files changed, 350 insertions(+), 125 deletions(-) diff --git a/.strict-typing b/.strict-typing index 9e91272c37d..fb35bc5d227 100644 --- a/.strict-typing +++ b/.strict-typing @@ -411,6 +411,7 @@ homeassistant.components.slack.* homeassistant.components.sleepiq.* homeassistant.components.smhi.* homeassistant.components.snooz.* +homeassistant.components.solarlog.* homeassistant.components.sonarr.* homeassistant.components.speedtestdotnet.* homeassistant.components.sql.* diff --git a/homeassistant/components/solarlog/config_flow.py b/homeassistant/components/solarlog/config_flow.py index 4587cb7d886..5d68a16eabe 100644 --- a/homeassistant/components/solarlog/config_flow.py +++ b/homeassistant/components/solarlog/config_flow.py @@ -19,7 +19,7 @@ _LOGGER = logging.getLogger(__name__) @callback -def solarlog_entries(hass: HomeAssistant): +def solarlog_entries(hass: HomeAssistant) -> set[str]: """Return the hosts already configured.""" return { entry.data[CONF_HOST] for entry in hass.config_entries.async_entries(DOMAIN) @@ -36,7 +36,7 @@ class SolarLogConfigFlow(ConfigFlow, domain=DOMAIN): """Initialize the config flow.""" self._errors: dict = {} - def _host_in_configuration_exists(self, host) -> bool: + def _host_in_configuration_exists(self, host: str) -> bool: """Return True if host exists in configuration.""" if host in solarlog_entries(self.hass): return True @@ -50,7 +50,7 @@ class SolarLogConfigFlow(ConfigFlow, domain=DOMAIN): url = ParseResult("http", netloc, path, *url[3:]) return url.geturl() - async def _test_connection(self, host): + async def _test_connection(self, host: str) -> bool: """Check if we can connect to the Solar-Log device.""" solarlog = SolarLogConnector(host) try: @@ -66,11 +66,12 @@ class SolarLogConfigFlow(ConfigFlow, domain=DOMAIN): return True - async def async_step_user(self, user_input=None) -> ConfigFlowResult: + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Step when user initializes a integration.""" self._errors = {} if user_input is not None: - # set some defaults in case we need to return to the form user_input[CONF_NAME] = slugify(user_input[CONF_NAME]) user_input[CONF_HOST] = self._parse_url(user_input[CONF_HOST]) @@ -81,20 +82,14 @@ class SolarLogConfigFlow(ConfigFlow, domain=DOMAIN): title=user_input[CONF_NAME], data=user_input ) else: - user_input = {} - user_input[CONF_NAME] = DEFAULT_NAME - user_input[CONF_HOST] = DEFAULT_HOST + user_input = {CONF_NAME: DEFAULT_NAME, CONF_HOST: DEFAULT_HOST} return self.async_show_form( step_id="user", data_schema=vol.Schema( { - vol.Required( - CONF_NAME, default=user_input.get(CONF_NAME, DEFAULT_NAME) - ): str, - vol.Required( - CONF_HOST, default=user_input.get(CONF_HOST, DEFAULT_HOST) - ): str, + vol.Required(CONF_NAME, default=user_input[CONF_NAME]): str, + vol.Required(CONF_HOST, default=user_input[CONF_HOST]): str, vol.Required("extended_data", default=False): bool, } ), diff --git a/homeassistant/components/solarlog/coordinator.py b/homeassistant/components/solarlog/coordinator.py index 96ee00af1ec..5c9aa540261 100644 --- a/homeassistant/components/solarlog/coordinator.py +++ b/homeassistant/components/solarlog/coordinator.py @@ -12,11 +12,12 @@ from solarlog_cli.solarlog_exceptions import ( SolarLogConnectionError, SolarLogUpdateError, ) +from solarlog_cli.solarlog_models import SolarlogData from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import update_coordinator +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed _LOGGER = logging.getLogger(__name__) @@ -24,7 +25,7 @@ if TYPE_CHECKING: from . import SolarlogConfigEntry -class SolarLogCoordinator(update_coordinator.DataUpdateCoordinator): +class SolarLogCoordinator(DataUpdateCoordinator[SolarlogData]): """Get and update the latest data.""" def __init__(self, hass: HomeAssistant, entry: SolarlogConfigEntry) -> None: @@ -43,29 +44,29 @@ class SolarLogCoordinator(update_coordinator.DataUpdateCoordinator): self.name = entry.title self.host = url.geturl() - extended_data = entry.data["extended_data"] - self.solarlog = SolarLogConnector( - self.host, extended_data, hass.config.time_zone + self.host, entry.data["extended_data"], hass.config.time_zone ) async def _async_setup(self) -> None: """Do initialization logic.""" if self.solarlog.extended_data: - device_list = await self.solarlog.client.get_device_list() + device_list = await self.solarlog.update_device_list() self.solarlog.set_enabled_devices({key: True for key in device_list}) - async def _async_update_data(self): + async def _async_update_data(self) -> SolarlogData: """Update the data from the SolarLog device.""" _LOGGER.debug("Start data update") try: data = await self.solarlog.update_data() - await self.solarlog.update_device_list() + if self.solarlog.extended_data: + await self.solarlog.update_device_list() + data.inverter_data = await self.solarlog.update_inverter_data() except SolarLogConnectionError as err: raise ConfigEntryNotReady(err) from err except SolarLogUpdateError as err: - raise update_coordinator.UpdateFailed(err) from err + raise UpdateFailed(err) from err _LOGGER.debug("Data successfully updated") diff --git a/homeassistant/components/solarlog/manifest.json b/homeassistant/components/solarlog/manifest.json index 0c097b7146d..eb2268e08da 100644 --- a/homeassistant/components/solarlog/manifest.json +++ b/homeassistant/components/solarlog/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/solarlog", "iot_class": "local_polling", "loggers": ["solarlog_cli"], - "requirements": ["solarlog_cli==0.1.6"] + "requirements": ["solarlog_cli==0.2.2"] } diff --git a/homeassistant/components/solarlog/sensor.py b/homeassistant/components/solarlog/sensor.py index cd4a711cdc9..498429f70cf 100644 --- a/homeassistant/components/solarlog/sensor.py +++ b/homeassistant/components/solarlog/sensor.py @@ -5,7 +5,8 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass from datetime import datetime -from typing import Any + +from solarlog_cli.solarlog_models import InverterData, SolarlogData from homeassistant.components.sensor import ( SensorDeviceClass, @@ -21,200 +22,219 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType from . import SolarlogConfigEntry from .entity import SolarLogCoordinatorEntity, SolarLogInverterEntity -@dataclass(frozen=True) -class SolarLogSensorEntityDescription(SensorEntityDescription): - """Describes Solarlog sensor entity.""" +@dataclass(frozen=True, kw_only=True) +class SolarLogCoordinatorSensorEntityDescription(SensorEntityDescription): + """Describes Solarlog coordinator sensor entity.""" - value_fn: Callable[[float | int], float] | Callable[[datetime], datetime] = ( - lambda value: value - ) + value_fn: Callable[[SolarlogData], StateType | datetime | None] -SOLARLOG_SENSOR_TYPES: tuple[SolarLogSensorEntityDescription, ...] = ( - SolarLogSensorEntityDescription( +@dataclass(frozen=True, kw_only=True) +class SolarLogInverterSensorEntityDescription(SensorEntityDescription): + """Describes Solarlog inverter sensor entity.""" + + value_fn: Callable[[InverterData], float | None] + + +SOLARLOG_SENSOR_TYPES: tuple[SolarLogCoordinatorSensorEntityDescription, ...] = ( + SolarLogCoordinatorSensorEntityDescription( key="last_updated", translation_key="last_update", device_class=SensorDeviceClass.TIMESTAMP, + value_fn=lambda data: data.last_updated, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="power_ac", translation_key="power_ac", native_unit_of_measurement=UnitOfPower.WATT, device_class=SensorDeviceClass.POWER, state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda data: data.power_ac, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="power_dc", translation_key="power_dc", native_unit_of_measurement=UnitOfPower.WATT, device_class=SensorDeviceClass.POWER, state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda data: data.power_dc, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="voltage_ac", translation_key="voltage_ac", native_unit_of_measurement=UnitOfElectricPotential.VOLT, device_class=SensorDeviceClass.VOLTAGE, state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda data: data.voltage_ac, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="voltage_dc", translation_key="voltage_dc", native_unit_of_measurement=UnitOfElectricPotential.VOLT, device_class=SensorDeviceClass.VOLTAGE, state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda data: data.voltage_dc, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="yield_day", translation_key="yield_day", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value_fn=lambda value: round(value / 1000, 3), + value_fn=lambda data: round(data.yield_day / 1000, 3), ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="yield_yesterday", translation_key="yield_yesterday", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value_fn=lambda value: round(value / 1000, 3), + value_fn=lambda data: round(data.yield_yesterday / 1000, 3), ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="yield_month", translation_key="yield_month", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value_fn=lambda value: round(value / 1000, 3), + value_fn=lambda data: round(data.yield_month / 1000, 3), ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="yield_year", translation_key="yield_year", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value_fn=lambda value: round(value / 1000, 3), + value_fn=lambda data: round(data.yield_year / 1000, 3), ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="yield_total", translation_key="yield_total", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, state_class=SensorStateClass.TOTAL, - value_fn=lambda value: round(value / 1000, 3), + value_fn=lambda data: round(data.yield_total / 1000, 3), ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="consumption_ac", translation_key="consumption_ac", native_unit_of_measurement=UnitOfPower.WATT, device_class=SensorDeviceClass.POWER, state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda data: data.consumption_ac, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="consumption_day", translation_key="consumption_day", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value_fn=lambda value: round(value / 1000, 3), + value_fn=lambda data: round(data.consumption_day / 1000, 3), ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="consumption_yesterday", translation_key="consumption_yesterday", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value_fn=lambda value: round(value / 1000, 3), + value_fn=lambda data: round(data.consumption_yesterday / 1000, 3), ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="consumption_month", translation_key="consumption_month", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value_fn=lambda value: round(value / 1000, 3), + value_fn=lambda data: round(data.consumption_month / 1000, 3), ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="consumption_year", translation_key="consumption_year", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value_fn=lambda value: round(value / 1000, 3), + value_fn=lambda data: round(data.consumption_year / 1000, 3), ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="consumption_total", translation_key="consumption_total", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, state_class=SensorStateClass.TOTAL, - value_fn=lambda value: round(value / 1000, 3), + value_fn=lambda data: round(data.consumption_total / 1000, 3), ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="self_consumption_year", translation_key="self_consumption_year", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, state_class=SensorStateClass.TOTAL_INCREASING, + value_fn=lambda data: data.self_consumption_year, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="total_power", translation_key="total_power", native_unit_of_measurement=UnitOfPower.WATT, device_class=SensorDeviceClass.POWER, + value_fn=lambda data: data.total_power, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="alternator_loss", translation_key="alternator_loss", native_unit_of_measurement=UnitOfPower.WATT, device_class=SensorDeviceClass.POWER, state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda data: data.alternator_loss, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="capacity", translation_key="capacity", native_unit_of_measurement=PERCENTAGE, device_class=SensorDeviceClass.POWER_FACTOR, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda value: round(value * 100, 1), + value_fn=lambda data: data.capacity, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="efficiency", translation_key="efficiency", native_unit_of_measurement=PERCENTAGE, device_class=SensorDeviceClass.POWER_FACTOR, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda value: round(value * 100, 1), + value_fn=lambda data: data.efficiency, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="power_available", translation_key="power_available", native_unit_of_measurement=UnitOfPower.WATT, device_class=SensorDeviceClass.POWER, state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda data: data.power_available, ), - SolarLogSensorEntityDescription( + SolarLogCoordinatorSensorEntityDescription( key="usage", translation_key="usage", native_unit_of_measurement=PERCENTAGE, device_class=SensorDeviceClass.POWER_FACTOR, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda value: round(value * 100, 1), + value_fn=lambda data: data.usage, ), ) -INVERTER_SENSOR_TYPES: tuple[SolarLogSensorEntityDescription, ...] = ( - SolarLogSensorEntityDescription( +INVERTER_SENSOR_TYPES: tuple[SolarLogInverterSensorEntityDescription, ...] = ( + SolarLogInverterSensorEntityDescription( key="current_power", translation_key="current_power", native_unit_of_measurement=UnitOfPower.WATT, device_class=SensorDeviceClass.POWER, state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda inverter: inverter.current_power, ), - SolarLogSensorEntityDescription( + SolarLogInverterSensorEntityDescription( key="consumption_year", translation_key="consumption_year", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value_fn=lambda value: round(value / 1000, 3), + value_fn=lambda inverter: None + if inverter.consumption_year is None + else round(inverter.consumption_year / 1000, 3), ), ) @@ -227,21 +247,18 @@ async def async_setup_entry( """Add solarlog entry.""" coordinator = entry.runtime_data - # https://github.com/python/mypy/issues/14294 - entities: list[SensorEntity] = [ SolarLogCoordinatorSensor(coordinator, sensor) for sensor in SOLARLOG_SENSOR_TYPES ] - device_data: dict[str, Any] = coordinator.data["devices"] + device_data = coordinator.data.inverter_data - if not device_data: + if device_data: entities.extend( - SolarLogInverterSensor(coordinator, sensor, int(device_id)) + SolarLogInverterSensor(coordinator, sensor, device_id) for device_id in device_data for sensor in INVERTER_SENSOR_TYPES - if sensor.key in device_data[device_id] ) async_add_entities(entities) @@ -250,26 +267,24 @@ async def async_setup_entry( class SolarLogCoordinatorSensor(SolarLogCoordinatorEntity, SensorEntity): """Represents a SolarLog sensor.""" - entity_description: SolarLogSensorEntityDescription + entity_description: SolarLogCoordinatorSensorEntityDescription @property - def native_value(self) -> float | datetime: + def native_value(self) -> StateType | datetime: """Return the state for this sensor.""" - val = self.coordinator.data[self.entity_description.key] - return self.entity_description.value_fn(val) + return self.entity_description.value_fn(self.coordinator.data) class SolarLogInverterSensor(SolarLogInverterEntity, SensorEntity): """Represents a SolarLog inverter sensor.""" - entity_description: SolarLogSensorEntityDescription + entity_description: SolarLogInverterSensorEntityDescription @property - def native_value(self) -> float | datetime: + def native_value(self) -> StateType: """Return the state for this sensor.""" - val = self.coordinator.data["devices"][self.device_id][ - self.entity_description.key - ] - return self.entity_description.value_fn(val) + return self.entity_description.value_fn( + self.coordinator.data.inverter_data[self.device_id] + ) diff --git a/mypy.ini b/mypy.ini index 873cf1f66bd..7fb8c49c8d9 100644 --- a/mypy.ini +++ b/mypy.ini @@ -3866,6 +3866,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.solarlog.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.sonarr.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/requirements_all.txt b/requirements_all.txt index 08bd494955a..f7d8147a058 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2650,7 +2650,7 @@ soco==0.30.4 solaredge-local==0.2.3 # homeassistant.components.solarlog -solarlog_cli==0.1.6 +solarlog_cli==0.2.2 # homeassistant.components.solax solax==3.1.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index d3c94d221d3..f234b427248 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2093,7 +2093,7 @@ snapcast==2.3.6 soco==0.30.4 # homeassistant.components.solarlog -solarlog_cli==0.1.6 +solarlog_cli==0.2.2 # homeassistant.components.solax solax==3.1.1 diff --git a/tests/components/solarlog/__init__.py b/tests/components/solarlog/__init__.py index 74b19bd297e..c2c0296d9e2 100644 --- a/tests/components/solarlog/__init__.py +++ b/tests/components/solarlog/__init__.py @@ -17,3 +17,5 @@ async def setup_platform( with patch("homeassistant.components.solarlog.PLATFORMS", platforms): await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() + + return config_entry diff --git a/tests/components/solarlog/conftest.py b/tests/components/solarlog/conftest.py index 44c0e27f9b0..b363f655c57 100644 --- a/tests/components/solarlog/conftest.py +++ b/tests/components/solarlog/conftest.py @@ -1,10 +1,10 @@ """Test helpers.""" from collections.abc import Generator -from datetime import UTC, datetime from unittest.mock import AsyncMock, patch import pytest +from solarlog_cli.solarlog_models import InverterData, SolarlogData from homeassistant.components.solarlog.const import DOMAIN as SOLARLOG_DOMAIN from homeassistant.const import CONF_HOST, CONF_NAME @@ -13,6 +13,19 @@ from .const import HOST, NAME from tests.common import MockConfigEntry, load_json_object_fixture +DEVICE_LIST = { + 0: InverterData(name="Inverter 1", enabled=True), + 1: InverterData(name="Inverter 2", enabled=True), +} +INVERTER_DATA = { + 0: InverterData( + name="Inverter 1", enabled=True, consumption_year=354687, current_power=5 + ), + 1: InverterData( + name="Inverter 2", enabled=True, consumption_year=354, current_power=6 + ), +} + @pytest.fixture def mock_config_entry() -> MockConfigEntry: @@ -34,28 +47,18 @@ def mock_config_entry() -> MockConfigEntry: def mock_solarlog_connector(): """Build a fixture for the SolarLog API that connects successfully and returns one device.""" + data = SolarlogData.from_dict( + load_json_object_fixture("solarlog_data.json", SOLARLOG_DOMAIN) + ) + data.inverter_data = INVERTER_DATA + mock_solarlog_api = AsyncMock() - mock_solarlog_api.test_connection = AsyncMock(return_value=True) - - data = { - "devices": { - 0: {"consumption_total": 354687, "current_power": 5}, - } - } - data |= load_json_object_fixture("solarlog_data.json", SOLARLOG_DOMAIN) - data["last_updated"] = datetime.fromisoformat(data["last_updated"]).astimezone(UTC) - + mock_solarlog_api.test_connection.return_value = True mock_solarlog_api.update_data.return_value = data - mock_solarlog_api.device_list.return_value = { - 0: {"name": "Inverter 1"}, - 1: {"name": "Inverter 2"}, - } + mock_solarlog_api.update_device_list.return_value = INVERTER_DATA + mock_solarlog_api.update_inverter_data.return_value = INVERTER_DATA mock_solarlog_api.device_name = {0: "Inverter 1", 1: "Inverter 2"}.get - mock_solarlog_api.client.get_device_list.return_value = { - 0: {"name": "Inverter 1"}, - 1: {"name": "Inverter 2"}, - } - mock_solarlog_api.client.close = AsyncMock(return_value=None) + mock_solarlog_api.device_enabled = {0: True, 1: False}.get with ( patch( diff --git a/tests/components/solarlog/fixtures/solarlog_data.json b/tests/components/solarlog/fixtures/solarlog_data.json index f7077d88d0d..339ab4a4dfc 100644 --- a/tests/components/solarlog/fixtures/solarlog_data.json +++ b/tests/components/solarlog/fixtures/solarlog_data.json @@ -17,9 +17,9 @@ "total_power": 120, "self_consumption_year": 545, "alternator_loss": 2, - "efficiency": 0.9804, - "usage": 0.5487, + "efficiency": 98.1, + "usage": 54.8, "power_available": 45.13, - "capacity": 0.85, - "last_updated": "2024-08-01T15:20:45" + "capacity": 85.5, + "last_updated": "2024-08-01T15:20:45Z" } diff --git a/tests/components/solarlog/snapshots/test_sensor.ambr b/tests/components/solarlog/snapshots/test_sensor.ambr index 74a397be900..6fccbd89dba 100644 --- a/tests/components/solarlog/snapshots/test_sensor.ambr +++ b/tests/components/solarlog/snapshots/test_sensor.ambr @@ -1,4 +1,103 @@ # serializer version: 1 +# name: test_all_entities[sensor.inverter_1_consumption_total-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1_consumption_total', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Consumption total', + 'platform': 'solarlog', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'consumption_total', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-inverter_1-consumption_total', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.inverter_1_consumption_total-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Inverter 1 Consumption total', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_1_consumption_total', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '354.687', + }) +# --- +# name: test_all_entities[sensor.inverter_1_consumption_year-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_1_consumption_year', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Consumption year', + 'platform': 'solarlog', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'consumption_year', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-inverter_1-consumption_year', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.inverter_1_consumption_year-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Inverter 1 Consumption year', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_1_consumption_year', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '354.687', + }) +# --- # name: test_all_entities[sensor.inverter_1_power-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -50,6 +149,105 @@ 'state': '5', }) # --- +# name: test_all_entities[sensor.inverter_2_consumption_year-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_2_consumption_year', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Consumption year', + 'platform': 'solarlog', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'consumption_year', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-inverter_2-consumption_year', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.inverter_2_consumption_year-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Inverter 2 Consumption year', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_2_consumption_year', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.354', + }) +# --- +# name: test_all_entities[sensor.inverter_2_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.inverter_2_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'solarlog', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'current_power', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-inverter_2-current_power', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.inverter_2_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Inverter 2 Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.inverter_2_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6', + }) +# --- # name: test_all_entities[sensor.solarlog_alternator_loss-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -98,7 +296,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '2', + 'state': '2.0', }) # --- # name: test_all_entities[sensor.solarlog_capacity-entry] @@ -149,7 +347,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '85.0', + 'state': '85.5', }) # --- # name: test_all_entities[sensor.solarlog_consumption_ac-entry] @@ -494,7 +692,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '98.0', + 'state': '98.1', }) # --- # name: test_all_entities[sensor.solarlog_installed_peak_power-entry] @@ -542,7 +740,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '120', + 'state': '120.0', }) # --- # name: test_all_entities[sensor.solarlog_last_update-entry] @@ -640,7 +838,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '100', + 'state': '100.0', }) # --- # name: test_all_entities[sensor.solarlog_power_available-entry] @@ -742,7 +940,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '102', + 'state': '102.0', }) # --- # name: test_all_entities[sensor.solarlog_self_consumption_year-entry] @@ -793,7 +991,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '545', + 'state': '545.0', }) # --- # name: test_all_entities[sensor.solarlog_usage-entry] @@ -844,7 +1042,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '54.9', + 'state': '54.8', }) # --- # name: test_all_entities[sensor.solarlog_voltage_ac-entry] @@ -895,7 +1093,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '100', + 'state': '100.0', }) # --- # name: test_all_entities[sensor.solarlog_voltage_dc-entry] @@ -946,7 +1144,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '100', + 'state': '100.0', }) # --- # name: test_all_entities[sensor.solarlog_yield_day-entry] diff --git a/tests/components/solarlog/test_config_flow.py b/tests/components/solarlog/test_config_flow.py index b2b2ff9566e..223ceec3ebb 100644 --- a/tests/components/solarlog/test_config_flow.py +++ b/tests/components/solarlog/test_config_flow.py @@ -123,7 +123,7 @@ async def test_form_exceptions( assert result["data"]["extended_data"] is False -async def test_abort_if_already_setup(hass: HomeAssistant, test_connect) -> None: +async def test_abort_if_already_setup(hass: HomeAssistant, test_connect: None) -> None: """Test we abort if the device is already setup.""" flow = init_config_flow(hass) MockConfigEntry( From 12336f5c15e854d9138fd6d819ac00893125df01 Mon Sep 17 00:00:00 2001 From: Jeef Date: Sun, 1 Sep 2024 04:48:38 -0600 Subject: [PATCH 1302/1635] Bump Intellifire to 4.1.9 (#121091) * rebase * Minor patch to fix duplicate DeviceInfo beign created - if data hasnt updated yet * rebase * Minor patch to fix duplicate DeviceInfo beign created - if data hasnt updated yet * fixing formatting * Update homeassistant/components/intellifire/__init__.py Co-authored-by: Erik Montnemery * Update homeassistant/components/intellifire/__init__.py Co-authored-by: Erik Montnemery * Removing cloud connectivity sensor - leaving local one in * Renaming class to something more useful * addressing pr * Update homeassistant/components/intellifire/__init__.py Co-authored-by: Erik Montnemery * add ruff exception * Fix test annotations * remove access to private variable * Bumping to 4.1.9 instead of 4.1.5 * A renaming * rename * Updated testing * Update __init__.py Co-authored-by: Joost Lekkerkerker * updateing styrings * Update tests/components/intellifire/conftest.py Co-authored-by: Joost Lekkerkerker * Testing refactor - WIP * everything is passing - cleanup still needed * cleaning up comments * update pr * unrename * Update homeassistant/components/intellifire/coordinator.py Co-authored-by: Joost Lekkerkerker * fixing sentence * fixed fixture and removed error codes * reverted a bad change * fixing strings.json * revert renaming * fix * typing inother pr * adding extra tests - one has a really dumb name * using a real value * added a migration in * Update homeassistant/components/intellifire/config_flow.py Co-authored-by: Joost Lekkerkerker * Update tests/components/intellifire/test_init.py Co-authored-by: Joost Lekkerkerker * cleanup continues * addressing pr * switch back to debug * Update tests/components/intellifire/conftest.py Co-authored-by: Joost Lekkerkerker * some changes * restore property mock cuase didnt work otherwise * cleanup has begun * removed extra text * addressing pr stuff * fixed reauth --------- Co-authored-by: Erik Montnemery Co-authored-by: Joost Lekkerkerker --- .../components/intellifire/__init__.py | 168 ++-- .../components/intellifire/binary_sensor.py | 4 +- .../components/intellifire/climate.py | 2 +- .../components/intellifire/config_flow.py | 399 +++++----- homeassistant/components/intellifire/const.py | 19 +- .../components/intellifire/coordinator.py | 50 +- .../components/intellifire/entity.py | 6 +- homeassistant/components/intellifire/fan.py | 10 +- homeassistant/components/intellifire/light.py | 9 +- .../components/intellifire/manifest.json | 2 +- .../components/intellifire/sensor.py | 53 +- .../components/intellifire/strings.json | 35 +- .../components/intellifire/switch.py | 29 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/intellifire/__init__.py | 12 + tests/components/intellifire/conftest.py | 242 +++++- .../intellifire/fixtures/local_poll.json | 29 + .../intellifire/fixtures/user_data_1.json | 17 + .../intellifire/fixtures/user_data_3.json | 33 + .../snapshots/test_binary_sensor.ambr | 717 ++++++++++++++++++ .../intellifire/snapshots/test_climate.ambr | 66 ++ .../intellifire/snapshots/test_sensor.ambr | 587 ++++++++++++++ .../intellifire/test_binary_sensor.py | 35 + tests/components/intellifire/test_climate.py | 34 + .../intellifire/test_config_flow.py | 415 ++++------ tests/components/intellifire/test_init.py | 111 +++ tests/components/intellifire/test_sensor.py | 35 + 28 files changed, 2445 insertions(+), 678 deletions(-) create mode 100644 tests/components/intellifire/fixtures/local_poll.json create mode 100644 tests/components/intellifire/fixtures/user_data_1.json create mode 100644 tests/components/intellifire/fixtures/user_data_3.json create mode 100644 tests/components/intellifire/snapshots/test_binary_sensor.ambr create mode 100644 tests/components/intellifire/snapshots/test_climate.ambr create mode 100644 tests/components/intellifire/snapshots/test_sensor.ambr create mode 100644 tests/components/intellifire/test_binary_sensor.py create mode 100644 tests/components/intellifire/test_climate.py create mode 100644 tests/components/intellifire/test_init.py create mode 100644 tests/components/intellifire/test_sensor.py diff --git a/homeassistant/components/intellifire/__init__.py b/homeassistant/components/intellifire/__init__.py index 7af472c8745..7609398673b 100644 --- a/homeassistant/components/intellifire/__init__.py +++ b/homeassistant/components/intellifire/__init__.py @@ -2,15 +2,17 @@ from __future__ import annotations -from aiohttp import ClientConnectionError -from intellifire4py import IntellifireControlAsync -from intellifire4py.exceptions import LoginException -from intellifire4py.intellifire import IntellifireAPICloud, IntellifireAPILocal +import asyncio + +from intellifire4py import UnifiedFireplace +from intellifire4py.cloud_interface import IntelliFireCloudInterface +from intellifire4py.model import IntelliFireCommonFireplaceData from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_API_KEY, CONF_HOST, + CONF_IP_ADDRESS, CONF_PASSWORD, CONF_USERNAME, Platform, @@ -18,7 +20,18 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from .const import CONF_USER_ID, DOMAIN, LOGGER +from .const import ( + CONF_AUTH_COOKIE, + CONF_CONTROL_MODE, + CONF_READ_MODE, + CONF_SERIAL, + CONF_USER_ID, + CONF_WEB_CLIENT_ID, + DOMAIN, + INIT_WAIT_TIME_SECONDS, + LOGGER, + STARTUP_TIMEOUT, +) from .coordinator import IntellifireDataUpdateCoordinator PLATFORMS = [ @@ -32,79 +45,114 @@ PLATFORMS = [ ] +def _construct_common_data(entry: ConfigEntry) -> IntelliFireCommonFireplaceData: + """Convert config entry data into IntelliFireCommonFireplaceData.""" + + return IntelliFireCommonFireplaceData( + auth_cookie=entry.data[CONF_AUTH_COOKIE], + user_id=entry.data[CONF_USER_ID], + web_client_id=entry.data[CONF_WEB_CLIENT_ID], + serial=entry.data[CONF_SERIAL], + api_key=entry.data[CONF_API_KEY], + ip_address=entry.data[CONF_IP_ADDRESS], + read_mode=entry.options[CONF_READ_MODE], + control_mode=entry.options[CONF_CONTROL_MODE], + ) + + +async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: + """Migrate entries.""" + LOGGER.debug( + "Migrating configuration from version %s.%s", + config_entry.version, + config_entry.minor_version, + ) + + if config_entry.version == 1: + new = {**config_entry.data} + + if config_entry.minor_version < 2: + username = config_entry.data[CONF_USERNAME] + password = config_entry.data[CONF_PASSWORD] + + # Create a Cloud Interface + async with IntelliFireCloudInterface() as cloud_interface: + await cloud_interface.login_with_credentials( + username=username, password=password + ) + + new_data = cloud_interface.user_data.get_data_for_ip(new[CONF_HOST]) + + if not new_data: + raise ConfigEntryAuthFailed + new[CONF_API_KEY] = new_data.api_key + new[CONF_WEB_CLIENT_ID] = new_data.web_client_id + new[CONF_AUTH_COOKIE] = new_data.auth_cookie + + new[CONF_IP_ADDRESS] = new_data.ip_address + new[CONF_SERIAL] = new_data.serial + + hass.config_entries.async_update_entry( + config_entry, + data=new, + options={CONF_READ_MODE: "local", CONF_CONTROL_MODE: "local"}, + unique_id=new[CONF_SERIAL], + version=1, + minor_version=2, + ) + LOGGER.debug("Pseudo Migration %s successful", config_entry.version) + + return True + + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up IntelliFire from a config entry.""" - LOGGER.debug("Setting up config entry: %s", entry.unique_id) if CONF_USERNAME not in entry.data: - LOGGER.debug("Old config entry format detected: %s", entry.unique_id) + LOGGER.debug("Config entry without username detected: %s", entry.unique_id) raise ConfigEntryAuthFailed - ift_control = IntellifireControlAsync( - fireplace_ip=entry.data[CONF_HOST], - ) try: - await ift_control.login( - username=entry.data[CONF_USERNAME], - password=entry.data[CONF_PASSWORD], + fireplace: UnifiedFireplace = ( + await UnifiedFireplace.build_fireplace_from_common( + _construct_common_data(entry) + ) ) - except (ConnectionError, ClientConnectionError) as err: - raise ConfigEntryNotReady from err - except LoginException as err: - raise ConfigEntryAuthFailed(err) from err - - finally: - await ift_control.close() - - # Extract API Key and User_ID from ift_control - # Eventually this will migrate to using IntellifireAPICloud - - if CONF_USER_ID not in entry.data or CONF_API_KEY not in entry.data: - LOGGER.info( - "Updating intellifire config entry for %s with api information", - entry.unique_id, - ) - cloud_api = IntellifireAPICloud() - await cloud_api.login( - username=entry.data[CONF_USERNAME], - password=entry.data[CONF_PASSWORD], - ) - api_key = cloud_api.get_fireplace_api_key() - user_id = cloud_api.get_user_id() - # Update data entry - hass.config_entries.async_update_entry( - entry, - data={ - **entry.data, - CONF_API_KEY: api_key, - CONF_USER_ID: user_id, - }, + LOGGER.debug("Waiting for Fireplace to Initialize") + await asyncio.wait_for( + _async_wait_for_initialization(fireplace), timeout=STARTUP_TIMEOUT ) + except TimeoutError as err: + raise ConfigEntryNotReady( + "Initialization of fireplace timed out after 10 minutes" + ) from err - else: - api_key = entry.data[CONF_API_KEY] - user_id = entry.data[CONF_USER_ID] - - # Instantiate local control - api = IntellifireAPILocal( - fireplace_ip=entry.data[CONF_HOST], - api_key=api_key, - user_id=user_id, + # Construct coordinator + data_update_coordinator = IntellifireDataUpdateCoordinator( + hass=hass, fireplace=fireplace ) - # Define the update coordinator - coordinator = IntellifireDataUpdateCoordinator( - hass=hass, - api=api, - ) + LOGGER.debug("Fireplace to Initialized - Awaiting first refresh") + await data_update_coordinator.async_config_entry_first_refresh() + + hass.data.setdefault(DOMAIN, {})[entry.entry_id] = data_update_coordinator - await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True +async def _async_wait_for_initialization( + fireplace: UnifiedFireplace, timeout=STARTUP_TIMEOUT +): + """Wait for a fireplace to be initialized.""" + while ( + fireplace.data.ipv4_address == "127.0.0.1" and fireplace.data.serial == "unset" + ): + LOGGER.debug(f"Waiting for fireplace to initialize [{fireplace.read_mode}]") + await asyncio.sleep(INIT_WAIT_TIME_SECONDS) + + async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): diff --git a/homeassistant/components/intellifire/binary_sensor.py b/homeassistant/components/intellifire/binary_sensor.py index a1b8865c876..f0a5d84fa62 100644 --- a/homeassistant/components/intellifire/binary_sensor.py +++ b/homeassistant/components/intellifire/binary_sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass -from intellifire4py import IntellifirePollData +from intellifire4py.model import IntelliFirePollData from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, @@ -26,7 +26,7 @@ from .entity import IntellifireEntity class IntellifireBinarySensorRequiredKeysMixin: """Mixin for required keys.""" - value_fn: Callable[[IntellifirePollData], bool] + value_fn: Callable[[IntelliFirePollData], bool] @dataclass(frozen=True) diff --git a/homeassistant/components/intellifire/climate.py b/homeassistant/components/intellifire/climate.py index ed4facffc67..4eddde5ff10 100644 --- a/homeassistant/components/intellifire/climate.py +++ b/homeassistant/components/intellifire/climate.py @@ -69,7 +69,7 @@ class IntellifireClimate(IntellifireEntity, ClimateEntity): super().__init__(coordinator, description) if coordinator.data.thermostat_on: - self.last_temp = coordinator.data.thermostat_setpoint_c + self.last_temp = int(coordinator.data.thermostat_setpoint_c) @property def hvac_mode(self) -> HVACMode: diff --git a/homeassistant/components/intellifire/config_flow.py b/homeassistant/components/intellifire/config_flow.py index 268fc6623d3..56f0d5ca6a5 100644 --- a/homeassistant/components/intellifire/config_flow.py +++ b/homeassistant/components/intellifire/config_flow.py @@ -7,16 +7,33 @@ from dataclasses import dataclass from typing import Any from aiohttp import ClientConnectionError -from intellifire4py import AsyncUDPFireplaceFinder -from intellifire4py.exceptions import LoginException -from intellifire4py.intellifire import IntellifireAPICloud, IntellifireAPILocal +from intellifire4py.cloud_interface import IntelliFireCloudInterface +from intellifire4py.exceptions import LoginError +from intellifire4py.local_api import IntelliFireAPILocal +from intellifire4py.model import IntelliFireCommonFireplaceData import voluptuous as vol from homeassistant.components.dhcp import DhcpServiceInfo -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_PASSWORD, CONF_USERNAME +from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.const import ( + CONF_API_KEY, + CONF_HOST, + CONF_IP_ADDRESS, + CONF_PASSWORD, + CONF_USERNAME, +) -from .const import CONF_USER_ID, DOMAIN, LOGGER +from .const import ( + API_MODE_LOCAL, + CONF_AUTH_COOKIE, + CONF_CONTROL_MODE, + CONF_READ_MODE, + CONF_SERIAL, + CONF_USER_ID, + CONF_WEB_CLIENT_ID, + DOMAIN, + LOGGER, +) STEP_USER_DATA_SCHEMA = vol.Schema({vol.Required(CONF_HOST): str}) @@ -31,17 +48,20 @@ class DiscoveredHostInfo: serial: str | None -async def validate_host_input(host: str, dhcp_mode: bool = False) -> str: +async def _async_poll_local_fireplace_for_serial( + host: str, dhcp_mode: bool = False +) -> str: """Validate the user input allows us to connect. Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user. """ LOGGER.debug("Instantiating IntellifireAPI with host: [%s]", host) - api = IntellifireAPILocal(fireplace_ip=host) + api = IntelliFireAPILocal(fireplace_ip=host) await api.poll(suppress_warnings=dhcp_mode) serial = api.data.serial LOGGER.debug("Found a fireplace: %s", serial) + # Return the serial number which will be used to calculate a unique ID for the device/sensors return serial @@ -50,239 +70,206 @@ class IntelliFireConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for IntelliFire.""" VERSION = 1 + MINOR_VERSION = 2 def __init__(self) -> None: """Initialize the Config Flow Handler.""" - self._host: str = "" - self._serial: str = "" - self._not_configured_hosts: list[DiscoveredHostInfo] = [] + + # DHCP Variables + self._dhcp_discovered_serial: str = "" # used only in discovery mode self._discovered_host: DiscoveredHostInfo + self._dhcp_mode = False + self._is_reauth = False + + self._not_configured_hosts: list[DiscoveredHostInfo] = [] self._reauth_needed: DiscoveredHostInfo - async def _find_fireplaces(self): - """Perform UDP discovery.""" - fireplace_finder = AsyncUDPFireplaceFinder() - discovered_hosts = await fireplace_finder.search_fireplace(timeout=12) - configured_hosts = { - entry.data[CONF_HOST] - for entry in self._async_current_entries(include_ignore=False) - if CONF_HOST in entry.data # CONF_HOST will be missing for ignored entries - } + self._configured_serials: list[str] = [] - self._not_configured_hosts = [ - DiscoveredHostInfo(ip, None) - for ip in discovered_hosts - if ip not in configured_hosts - ] - LOGGER.debug("Discovered Hosts: %s", discovered_hosts) - LOGGER.debug("Configured Hosts: %s", configured_hosts) - LOGGER.debug("Not Configured Hosts: %s", self._not_configured_hosts) - - async def validate_api_access_and_create_or_update( - self, *, host: str, username: str, password: str, serial: str - ): - """Validate username/password against api.""" - LOGGER.debug("Attempting login to iftapi with: %s", username) - - ift_cloud = IntellifireAPICloud() - await ift_cloud.login(username=username, password=password) - api_key = ift_cloud.get_fireplace_api_key() - user_id = ift_cloud.get_user_id() - - data = { - CONF_HOST: host, - CONF_PASSWORD: password, - CONF_USERNAME: username, - CONF_API_KEY: api_key, - CONF_USER_ID: user_id, - } - - # Update or Create - existing_entry = await self.async_set_unique_id(serial) - if existing_entry: - self.hass.config_entries.async_update_entry(existing_entry, data=data) - await self.hass.config_entries.async_reload(existing_entry.entry_id) - return self.async_abort(reason="reauth_successful") - return self.async_create_entry(title=f"Fireplace {serial}", data=data) - - async def async_step_api_config( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Configure API access.""" - - errors = {} - control_schema = vol.Schema( - { - vol.Required(CONF_USERNAME): str, - vol.Required(CONF_PASSWORD): str, - } - ) - - if user_input is not None: - control_schema = vol.Schema( - { - vol.Required( - CONF_USERNAME, default=user_input.get(CONF_USERNAME, "") - ): str, - vol.Required( - CONF_PASSWORD, default=user_input.get(CONF_PASSWORD, "") - ): str, - } - ) - - try: - return await self.validate_api_access_and_create_or_update( - host=self._host, - username=user_input[CONF_USERNAME], - password=user_input[CONF_PASSWORD], - serial=self._serial, - ) - - except (ConnectionError, ClientConnectionError): - errors["base"] = "iftapi_connect" - LOGGER.error( - "Could not connect to iftapi.net over https - verify connectivity" - ) - except LoginException: - errors["base"] = "api_error" - LOGGER.error("Invalid credentials for iftapi.net") - - return self.async_show_form( - step_id="api_config", errors=errors, data_schema=control_schema - ) - - async def _async_validate_ip_and_continue(self, host: str) -> ConfigFlowResult: - """Validate local config and continue.""" - self._async_abort_entries_match({CONF_HOST: host}) - self._serial = await validate_host_input(host) - await self.async_set_unique_id(self._serial, raise_on_progress=False) - self._abort_if_unique_id_configured(updates={CONF_HOST: host}) - # Store current data and jump to next stage - self._host = host - - return await self.async_step_api_config() - - async def async_step_manual_device_entry(self, user_input=None): - """Handle manual input of local IP configuration.""" - LOGGER.debug("STEP: manual_device_entry") - errors = {} - self._host = user_input.get(CONF_HOST) if user_input else None - if user_input is not None: - try: - return await self._async_validate_ip_and_continue(self._host) - except (ConnectionError, ClientConnectionError): - errors["base"] = "cannot_connect" - - return self.async_show_form( - step_id="manual_device_entry", - errors=errors, - data_schema=vol.Schema({vol.Required(CONF_HOST, default=self._host): str}), - ) - - async def async_step_pick_device( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Pick which device to configure.""" - errors = {} - LOGGER.debug("STEP: pick_device") - - if user_input is not None: - if user_input[CONF_HOST] == MANUAL_ENTRY_STRING: - return await self.async_step_manual_device_entry() - - try: - return await self._async_validate_ip_and_continue(user_input[CONF_HOST]) - except (ConnectionError, ClientConnectionError): - errors["base"] = "cannot_connect" - - return self.async_show_form( - step_id="pick_device", - errors=errors, - data_schema=vol.Schema( - { - vol.Required(CONF_HOST): vol.In( - [host.ip for host in self._not_configured_hosts] - + [MANUAL_ENTRY_STRING] - ) - } - ), - ) + # Define a cloud api interface we can use + self.cloud_api_interface = IntelliFireCloudInterface() async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Start the user flow.""" - # Launch fireplaces discovery - await self._find_fireplaces() - LOGGER.debug("STEP: user") - if self._not_configured_hosts: - LOGGER.debug("Running Step: pick_device") - return await self.async_step_pick_device() - LOGGER.debug("Running Step: manual_device_entry") - return await self.async_step_manual_device_entry() + current_entries = self._async_current_entries(include_ignore=False) + self._configured_serials = [ + entry.data[CONF_SERIAL] for entry in current_entries + ] + + return await self.async_step_cloud_api() + + async def async_step_cloud_api( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Authenticate against IFTAPI Cloud in order to see configured devices. + + Local control of IntelliFire devices requires that the user download the correct API KEY which is only available on the cloud. Cloud control of the devices requires the user has at least once authenticated against the cloud and a set of cookie variables have been stored locally. + + """ + errors: dict[str, str] = {} + LOGGER.debug("STEP: cloud_api") + + if user_input is not None: + try: + async with self.cloud_api_interface as cloud_interface: + await cloud_interface.login_with_credentials( + username=user_input[CONF_USERNAME], + password=user_input[CONF_PASSWORD], + ) + + # If login was successful pass username/password to next step + return await self.async_step_pick_cloud_device() + except LoginError: + errors["base"] = "api_error" + + return self.async_show_form( + step_id="cloud_api", + errors=errors, + data_schema=vol.Schema( + { + vol.Required(CONF_USERNAME): str, + vol.Required(CONF_PASSWORD): str, + } + ), + ) + + async def async_step_pick_cloud_device( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Step to select a device from the cloud. + + We can only get here if we have logged in. If there is only one device available it will be auto-configured, + else the user will be given a choice to pick a device. + """ + errors: dict[str, str] = {} + LOGGER.debug( + f"STEP: pick_cloud_device: {user_input} - DHCP_MODE[{self._dhcp_mode}" + ) + + if self._dhcp_mode or user_input is not None: + if self._dhcp_mode: + serial = self._dhcp_discovered_serial + LOGGER.debug(f"DHCP Mode detected for serial [{serial}]") + if user_input is not None: + serial = user_input[CONF_SERIAL] + + # Run a unique ID Check prior to anything else + await self.async_set_unique_id(serial) + self._abort_if_unique_id_configured(updates={CONF_SERIAL: serial}) + + # If Serial is Good obtain fireplace and configure + fireplace = self.cloud_api_interface.user_data.get_data_for_serial(serial) + if fireplace: + return await self._async_create_config_entry_from_common_data( + fireplace=fireplace + ) + + # Parse User Data to see if we auto-configure or prompt for selection: + user_data = self.cloud_api_interface.user_data + + available_fireplaces: list[IntelliFireCommonFireplaceData] = [ + fp + for fp in user_data.fireplaces + if fp.serial not in self._configured_serials + ] + + # Abort if all devices have been configured + if not available_fireplaces: + return self.async_abort(reason="no_available_devices") + + # If there is a single fireplace configure it + if len(available_fireplaces) == 1: + if self._is_reauth: + reauth_entry = self.hass.config_entries.async_get_entry( + self.context["entry_id"] + ) + return await self._async_create_config_entry_from_common_data( + fireplace=available_fireplaces[0], existing_entry=reauth_entry + ) + + return await self._async_create_config_entry_from_common_data( + fireplace=available_fireplaces[0] + ) + + return self.async_show_form( + step_id="pick_cloud_device", + errors=errors, + data_schema=vol.Schema( + { + vol.Required(CONF_SERIAL): vol.In( + [fp.serial for fp in available_fireplaces] + ) + } + ), + ) + + async def _async_create_config_entry_from_common_data( + self, + fireplace: IntelliFireCommonFireplaceData, + existing_entry: ConfigEntry | None = None, + ) -> ConfigFlowResult: + """Construct a config entry based on an object of IntelliFireCommonFireplaceData.""" + + data = { + CONF_IP_ADDRESS: fireplace.ip_address, + CONF_API_KEY: fireplace.api_key, + CONF_SERIAL: fireplace.serial, + CONF_AUTH_COOKIE: fireplace.auth_cookie, + CONF_WEB_CLIENT_ID: fireplace.web_client_id, + CONF_USER_ID: fireplace.user_id, + CONF_USERNAME: self.cloud_api_interface.user_data.username, + CONF_PASSWORD: self.cloud_api_interface.user_data.password, + } + + options = {CONF_READ_MODE: API_MODE_LOCAL, CONF_CONTROL_MODE: API_MODE_LOCAL} + + if existing_entry: + return self.async_update_reload_and_abort( + existing_entry, data=data, options=options + ) + return self.async_create_entry( + title=f"Fireplace {fireplace.serial}", data=data, options=options + ) async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" LOGGER.debug("STEP: reauth") + self._is_reauth = True entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - assert entry - assert entry.unique_id # populate the expected vars - self._serial = entry.unique_id - self._host = entry.data[CONF_HOST] + self._dhcp_discovered_serial = entry.data[CONF_SERIAL] # type: ignore[union-attr] - placeholders = {CONF_HOST: self._host, "serial": self._serial} + placeholders = {"serial": self._dhcp_discovered_serial} self.context["title_placeholders"] = placeholders - return await self.async_step_api_config() + + return await self.async_step_cloud_api() async def async_step_dhcp( self, discovery_info: DhcpServiceInfo ) -> ConfigFlowResult: """Handle DHCP Discovery.""" + self._dhcp_mode = True # Run validation logic on ip - host = discovery_info.ip - LOGGER.debug("STEP: dhcp for host %s", host) + ip_address = discovery_info.ip + LOGGER.debug("STEP: dhcp for ip_address %s", ip_address) - self._async_abort_entries_match({CONF_HOST: host}) + self._async_abort_entries_match({CONF_IP_ADDRESS: ip_address}) try: - self._serial = await validate_host_input(host, dhcp_mode=True) + self._dhcp_discovered_serial = await _async_poll_local_fireplace_for_serial( + ip_address, dhcp_mode=True + ) except (ConnectionError, ClientConnectionError): LOGGER.debug( - "DHCP Discovery has determined %s is not an IntelliFire device", host + "DHCP Discovery has determined %s is not an IntelliFire device", + ip_address, ) return self.async_abort(reason="not_intellifire_device") - await self.async_set_unique_id(self._serial) - self._abort_if_unique_id_configured(updates={CONF_HOST: host}) - self._discovered_host = DiscoveredHostInfo(ip=host, serial=self._serial) - - placeholders = {CONF_HOST: host, "serial": self._serial} - self.context["title_placeholders"] = placeholders - self._set_confirm_only() - - return await self.async_step_dhcp_confirm() - - async def async_step_dhcp_confirm(self, user_input=None): - """Attempt to confirm.""" - - LOGGER.debug("STEP: dhcp_confirm") - # Add the hosts one by one - host = self._discovered_host.ip - serial = self._discovered_host.serial - - if user_input is None: - # Show the confirmation dialog - return self.async_show_form( - step_id="dhcp_confirm", - description_placeholders={CONF_HOST: host, "serial": serial}, - ) - - return self.async_create_entry( - title=f"Fireplace {serial}", - data={CONF_HOST: host}, - ) + return await self.async_step_cloud_api() diff --git a/homeassistant/components/intellifire/const.py b/homeassistant/components/intellifire/const.py index 5c8af1eefe9..f194eeaf4e2 100644 --- a/homeassistant/components/intellifire/const.py +++ b/homeassistant/components/intellifire/const.py @@ -5,11 +5,22 @@ from __future__ import annotations import logging DOMAIN = "intellifire" - -CONF_USER_ID = "user_id" - LOGGER = logging.getLogger(__package__) +DEFAULT_THERMOSTAT_TEMP = 21 + +CONF_USER_ID = "user_id" # part of the cloud cookie +CONF_WEB_CLIENT_ID = "web_client_id" # part of the cloud cookie +CONF_AUTH_COOKIE = "auth_cookie" # part of the cloud cookie CONF_SERIAL = "serial" +CONF_READ_MODE = "cloud_read" +CONF_CONTROL_MODE = "cloud_control" -DEFAULT_THERMOSTAT_TEMP = 21 + +API_MODE_LOCAL = "local" +API_MODE_CLOUD = "cloud" + + +STARTUP_TIMEOUT = 600 + +INIT_WAIT_TIME_SECONDS = 10 diff --git a/homeassistant/components/intellifire/coordinator.py b/homeassistant/components/intellifire/coordinator.py index 0a46ff61435..b4f03f4b5c8 100644 --- a/homeassistant/components/intellifire/coordinator.py +++ b/homeassistant/components/intellifire/coordinator.py @@ -2,27 +2,27 @@ from __future__ import annotations -import asyncio from datetime import timedelta -from aiohttp import ClientConnectionError -from intellifire4py import IntellifirePollData -from intellifire4py.intellifire import IntellifireAPILocal +from intellifire4py import UnifiedFireplace +from intellifire4py.control import IntelliFireController +from intellifire4py.model import IntelliFirePollData +from intellifire4py.read import IntelliFireDataProvider from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from .const import DOMAIN, LOGGER -class IntellifireDataUpdateCoordinator(DataUpdateCoordinator[IntellifirePollData]): +class IntellifireDataUpdateCoordinator(DataUpdateCoordinator[IntelliFirePollData]): """Class to manage the polling of the fireplace API.""" def __init__( self, hass: HomeAssistant, - api: IntellifireAPILocal, + fireplace: UnifiedFireplace, ) -> None: """Initialize the Coordinator.""" super().__init__( @@ -31,36 +31,21 @@ class IntellifireDataUpdateCoordinator(DataUpdateCoordinator[IntellifirePollData name=DOMAIN, update_interval=timedelta(seconds=15), ) - self._api = api - async def _async_update_data(self) -> IntellifirePollData: - if not self._api.is_polling_in_background: - LOGGER.info("Starting Intellifire Background Polling Loop") - await self._api.start_background_polling() - - # Don't return uninitialized poll data - async with asyncio.timeout(15): - try: - await self._api.poll() - except (ConnectionError, ClientConnectionError) as exception: - raise UpdateFailed from exception - - LOGGER.debug("Failure Count %d", self._api.failed_poll_attempts) - if self._api.failed_poll_attempts > 10: - LOGGER.debug("Too many polling errors - raising exception") - raise UpdateFailed - - return self._api.data + self.fireplace = fireplace @property - def read_api(self) -> IntellifireAPILocal: + def read_api(self) -> IntelliFireDataProvider: """Return the Status API pointer.""" - return self._api + return self.fireplace.read_api @property - def control_api(self) -> IntellifireAPILocal: + def control_api(self) -> IntelliFireController: """Return the control API.""" - return self._api + return self.fireplace.control_api + + async def _async_update_data(self) -> IntelliFirePollData: + return self.fireplace.data @property def device_info(self) -> DeviceInfo: @@ -69,7 +54,6 @@ class IntellifireDataUpdateCoordinator(DataUpdateCoordinator[IntellifirePollData manufacturer="Hearth and Home", model="IFT-WFM", name="IntelliFire", - identifiers={("IntelliFire", f"{self.read_api.data.serial}]")}, - sw_version=self.read_api.data.fw_ver_str, - configuration_url=f"http://{self._api.fireplace_ip}/poll", + identifiers={("IntelliFire", str(self.fireplace.serial))}, + configuration_url=f"http://{self.fireplace.ip_address}/poll", ) diff --git a/homeassistant/components/intellifire/entity.py b/homeassistant/components/intellifire/entity.py index 3b35c9dabd8..571c4717ac2 100644 --- a/homeassistant/components/intellifire/entity.py +++ b/homeassistant/components/intellifire/entity.py @@ -9,7 +9,7 @@ from . import IntellifireDataUpdateCoordinator class IntellifireEntity(CoordinatorEntity[IntellifireDataUpdateCoordinator]): - """Define a generic class for Intellifire entities.""" + """Define a generic class for IntelliFire entities.""" _attr_attribution = "Data provided by unpublished Intellifire API" _attr_has_entity_name = True @@ -22,6 +22,8 @@ class IntellifireEntity(CoordinatorEntity[IntellifireDataUpdateCoordinator]): """Class initializer.""" super().__init__(coordinator=coordinator) self.entity_description = description - self._attr_unique_id = f"{description.key}_{coordinator.read_api.data.serial}" + self._attr_unique_id = f"{description.key}_{coordinator.fireplace.serial}" + self.identifiers = ({("IntelliFire", f"{coordinator.fireplace.serial}]")},) + # Configure the Device Info self._attr_device_info = self.coordinator.device_info diff --git a/homeassistant/components/intellifire/fan.py b/homeassistant/components/intellifire/fan.py index f68827b0a56..dc2fc279a5d 100644 --- a/homeassistant/components/intellifire/fan.py +++ b/homeassistant/components/intellifire/fan.py @@ -7,7 +7,8 @@ from dataclasses import dataclass import math from typing import Any -from intellifire4py import IntellifireControlAsync, IntellifirePollData +from intellifire4py.control import IntelliFireController +from intellifire4py.model import IntelliFirePollData from homeassistant.components.fan import ( FanEntity, @@ -31,8 +32,8 @@ from .entity import IntellifireEntity class IntellifireFanRequiredKeysMixin: """Required keys for fan entity.""" - set_fn: Callable[[IntellifireControlAsync, int], Awaitable] - value_fn: Callable[[IntellifirePollData], bool] + set_fn: Callable[[IntelliFireController, int], Awaitable] + value_fn: Callable[[IntelliFirePollData], int] speed_range: tuple[int, int] @@ -91,7 +92,8 @@ class IntellifireFan(IntellifireEntity, FanEntity): def percentage(self) -> int | None: """Return fan percentage.""" return ranged_value_to_percentage( - self.entity_description.speed_range, self.coordinator.read_api.data.fanspeed + self.entity_description.speed_range, + self.coordinator.read_api.data.fanspeed, ) @property diff --git a/homeassistant/components/intellifire/light.py b/homeassistant/components/intellifire/light.py index a7f2befaf33..5f25b5de823 100644 --- a/homeassistant/components/intellifire/light.py +++ b/homeassistant/components/intellifire/light.py @@ -6,7 +6,8 @@ from collections.abc import Awaitable, Callable from dataclasses import dataclass from typing import Any -from intellifire4py import IntellifireControlAsync, IntellifirePollData +from intellifire4py.control import IntelliFireController +from intellifire4py.model import IntelliFirePollData from homeassistant.components.light import ( ATTR_BRIGHTNESS, @@ -27,8 +28,8 @@ from .entity import IntellifireEntity class IntellifireLightRequiredKeysMixin: """Required keys for fan entity.""" - set_fn: Callable[[IntellifireControlAsync, int], Awaitable] - value_fn: Callable[[IntellifirePollData], bool] + set_fn: Callable[[IntelliFireController, int], Awaitable] + value_fn: Callable[[IntelliFirePollData], int] @dataclass(frozen=True) @@ -56,7 +57,7 @@ class IntellifireLight(IntellifireEntity, LightEntity): _attr_supported_color_modes = {ColorMode.BRIGHTNESS} @property - def brightness(self): + def brightness(self) -> int: """Return the current brightness 0-255.""" return 85 * self.entity_description.value_fn(self.coordinator.read_api.data) diff --git a/homeassistant/components/intellifire/manifest.json b/homeassistant/components/intellifire/manifest.json index 90d41fcffe7..e3ee663e8fe 100644 --- a/homeassistant/components/intellifire/manifest.json +++ b/homeassistant/components/intellifire/manifest.json @@ -11,5 +11,5 @@ "documentation": "https://www.home-assistant.io/integrations/intellifire", "iot_class": "local_polling", "loggers": ["intellifire4py"], - "requirements": ["intellifire4py==2.2.2"] + "requirements": ["intellifire4py==4.1.9"] } diff --git a/homeassistant/components/intellifire/sensor.py b/homeassistant/components/intellifire/sensor.py index dd3eef9c9b4..eaff89d08e7 100644 --- a/homeassistant/components/intellifire/sensor.py +++ b/homeassistant/components/intellifire/sensor.py @@ -6,8 +6,6 @@ from collections.abc import Callable from dataclasses import dataclass from datetime import datetime, timedelta -from intellifire4py import IntellifirePollData - from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, @@ -29,7 +27,9 @@ from .entity import IntellifireEntity class IntellifireSensorRequiredKeysMixin: """Mixin for required keys.""" - value_fn: Callable[[IntellifirePollData], int | str | datetime | None] + value_fn: Callable[ + [IntellifireDataUpdateCoordinator], int | str | datetime | float | None + ] @dataclass(frozen=True) @@ -40,16 +40,29 @@ class IntellifireSensorEntityDescription( """Describes a sensor entity.""" -def _time_remaining_to_timestamp(data: IntellifirePollData) -> datetime | None: +def _time_remaining_to_timestamp( + coordinator: IntellifireDataUpdateCoordinator, +) -> datetime | None: """Define a sensor that takes into account timezone.""" - if not (seconds_offset := data.timeremaining_s): + if not (seconds_offset := coordinator.data.timeremaining_s): return None return utcnow() + timedelta(seconds=seconds_offset) -def _downtime_to_timestamp(data: IntellifirePollData) -> datetime | None: +def _downtime_to_timestamp( + coordinator: IntellifireDataUpdateCoordinator, +) -> datetime | None: """Define a sensor that takes into account a timezone.""" - if not (seconds_offset := data.downtime): + if not (seconds_offset := coordinator.data.downtime): + return None + return utcnow() - timedelta(seconds=seconds_offset) + + +def _uptime_to_timestamp( + coordinator: IntellifireDataUpdateCoordinator, +) -> datetime | None: + """Return a timestamp of how long the sensor has been up.""" + if not (seconds_offset := coordinator.data.uptime): return None return utcnow() - timedelta(seconds=seconds_offset) @@ -60,14 +73,14 @@ INTELLIFIRE_SENSORS: tuple[IntellifireSensorEntityDescription, ...] = ( translation_key="flame_height", state_class=SensorStateClass.MEASUREMENT, # UI uses 1-5 for flame height, backing lib uses 0-4 - value_fn=lambda data: (data.flameheight + 1), + value_fn=lambda coordinator: (coordinator.data.flameheight + 1), ), IntellifireSensorEntityDescription( key="temperature", state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.TEMPERATURE, native_unit_of_measurement=UnitOfTemperature.CELSIUS, - value_fn=lambda data: data.temperature_c, + value_fn=lambda coordinator: coordinator.data.temperature_c, ), IntellifireSensorEntityDescription( key="target_temp", @@ -75,13 +88,13 @@ INTELLIFIRE_SENSORS: tuple[IntellifireSensorEntityDescription, ...] = ( state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.TEMPERATURE, native_unit_of_measurement=UnitOfTemperature.CELSIUS, - value_fn=lambda data: data.thermostat_setpoint_c, + value_fn=lambda coordinator: coordinator.data.thermostat_setpoint_c, ), IntellifireSensorEntityDescription( key="fan_speed", translation_key="fan_speed", state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda data: data.fanspeed, + value_fn=lambda coordinator: coordinator.data.fanspeed, ), IntellifireSensorEntityDescription( key="timer_end_timestamp", @@ -102,27 +115,27 @@ INTELLIFIRE_SENSORS: tuple[IntellifireSensorEntityDescription, ...] = ( translation_key="uptime", entity_category=EntityCategory.DIAGNOSTIC, device_class=SensorDeviceClass.TIMESTAMP, - value_fn=lambda data: utcnow() - timedelta(seconds=data.uptime), + value_fn=_uptime_to_timestamp, ), IntellifireSensorEntityDescription( key="connection_quality", translation_key="connection_quality", entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda data: data.connection_quality, + value_fn=lambda coordinator: coordinator.data.connection_quality, entity_registry_enabled_default=False, ), IntellifireSensorEntityDescription( key="ecm_latency", translation_key="ecm_latency", entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda data: data.ecm_latency, + value_fn=lambda coordinator: coordinator.data.ecm_latency, entity_registry_enabled_default=False, ), IntellifireSensorEntityDescription( key="ipv4_address", translation_key="ipv4_address", entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda data: data.ipv4_address, + value_fn=lambda coordinator: coordinator.data.ipv4_address, ), ) @@ -134,17 +147,17 @@ async def async_setup_entry( coordinator: IntellifireDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] async_add_entities( - IntellifireSensor(coordinator=coordinator, description=description) + IntelliFireSensor(coordinator=coordinator, description=description) for description in INTELLIFIRE_SENSORS ) -class IntellifireSensor(IntellifireEntity, SensorEntity): - """Extends IntellifireEntity with Sensor specific logic.""" +class IntelliFireSensor(IntellifireEntity, SensorEntity): + """Extends IntelliFireEntity with Sensor specific logic.""" entity_description: IntellifireSensorEntityDescription @property - def native_value(self) -> int | str | datetime | None: + def native_value(self) -> int | str | datetime | float | None: """Return the state.""" - return self.entity_description.value_fn(self.coordinator.read_api.data) + return self.entity_description.value_fn(self.coordinator) diff --git a/homeassistant/components/intellifire/strings.json b/homeassistant/components/intellifire/strings.json index 6393a4e070d..2eeb2b50b93 100644 --- a/homeassistant/components/intellifire/strings.json +++ b/homeassistant/components/intellifire/strings.json @@ -1,39 +1,30 @@ { "config": { - "flow_title": "{serial} ({host})", + "flow_title": "{serial}", "step": { - "manual_device_entry": { - "description": "Local Configuration", - "data": { - "host": "Host (IP Address)" - } + "pick_cloud_device": { + "title": "Configure fireplace", + "description": "Select fireplace by serial number:" }, - "api_config": { + "cloud_api": { + "description": "Authenticate against IntelliFire Cloud", + "data_description": { + "username": "Your IntelliFire app username", + "password": "Your IntelliFire app password" + }, "data": { "username": "[%key:common::config_flow::data::email%]", "password": "[%key:common::config_flow::data::password%]" } - }, - "dhcp_confirm": { - "description": "Do you want to set up {host}\nSerial: {serial}?" - }, - "pick_device": { - "title": "Device Selection", - "description": "The following IntelliFire devices were discovered. Please select which you wish to configure.", - "data": { - "host": "[%key:common::config_flow::data::host%]" - } } }, "error": { - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "api_error": "Login failed", - "iftapi_connect": "Error conecting to iftapi.net" + "api_error": "Login failed" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", - "not_intellifire_device": "Not an IntelliFire Device." + "not_intellifire_device": "Not an IntelliFire device.", + "no_available_devices": "All available devices have already been configured." } }, "entity": { diff --git a/homeassistant/components/intellifire/switch.py b/homeassistant/components/intellifire/switch.py index 00de6d74a9c..ac6096497b6 100644 --- a/homeassistant/components/intellifire/switch.py +++ b/homeassistant/components/intellifire/switch.py @@ -6,16 +6,13 @@ from collections.abc import Awaitable, Callable from dataclasses import dataclass from typing import Any -from intellifire4py import IntellifirePollData -from intellifire4py.intellifire import IntellifireAPILocal - from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import IntellifireDataUpdateCoordinator from .const import DOMAIN -from .coordinator import IntellifireDataUpdateCoordinator from .entity import IntellifireEntity @@ -23,9 +20,9 @@ from .entity import IntellifireEntity class IntellifireSwitchRequiredKeysMixin: """Mixin for required keys.""" - on_fn: Callable[[IntellifireAPILocal], Awaitable] - off_fn: Callable[[IntellifireAPILocal], Awaitable] - value_fn: Callable[[IntellifirePollData], bool] + on_fn: Callable[[IntellifireDataUpdateCoordinator], Awaitable] + off_fn: Callable[[IntellifireDataUpdateCoordinator], Awaitable] + value_fn: Callable[[IntellifireDataUpdateCoordinator], bool] @dataclass(frozen=True) @@ -39,16 +36,16 @@ INTELLIFIRE_SWITCHES: tuple[IntellifireSwitchEntityDescription, ...] = ( IntellifireSwitchEntityDescription( key="on_off", translation_key="flame", - on_fn=lambda control_api: control_api.flame_on(), - off_fn=lambda control_api: control_api.flame_off(), - value_fn=lambda data: data.is_on, + on_fn=lambda coordinator: coordinator.control_api.flame_on(), + off_fn=lambda coordinator: coordinator.control_api.flame_off(), + value_fn=lambda coordinator: coordinator.read_api.data.is_on, ), IntellifireSwitchEntityDescription( key="pilot", translation_key="pilot_light", - on_fn=lambda control_api: control_api.pilot_on(), - off_fn=lambda control_api: control_api.pilot_off(), - value_fn=lambda data: data.pilot_on, + on_fn=lambda coordinator: coordinator.control_api.pilot_on(), + off_fn=lambda coordinator: coordinator.control_api.pilot_off(), + value_fn=lambda coordinator: coordinator.read_api.data.pilot_on, ), ) @@ -74,15 +71,15 @@ class IntellifireSwitch(IntellifireEntity, SwitchEntity): async def async_turn_on(self, **kwargs: Any) -> None: """Turn on the switch.""" - await self.entity_description.on_fn(self.coordinator.control_api) + await self.entity_description.on_fn(self.coordinator) await self.async_update_ha_state(force_refresh=True) async def async_turn_off(self, **kwargs: Any) -> None: """Turn off the switch.""" - await self.entity_description.off_fn(self.coordinator.control_api) + await self.entity_description.off_fn(self.coordinator) await self.async_update_ha_state(force_refresh=True) @property def is_on(self) -> bool | None: """Return the on state.""" - return self.entity_description.value_fn(self.coordinator.read_api.data) + return self.entity_description.value_fn(self.coordinator) diff --git a/requirements_all.txt b/requirements_all.txt index f7d8147a058..9c12227e6ab 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1180,7 +1180,7 @@ inkbird-ble==0.5.8 insteon-frontend-home-assistant==0.5.0 # homeassistant.components.intellifire -intellifire4py==2.2.2 +intellifire4py==4.1.9 # homeassistant.components.iotty iottycloud==0.1.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f234b427248..c4d7dd1ad44 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -988,7 +988,7 @@ inkbird-ble==0.5.8 insteon-frontend-home-assistant==0.5.0 # homeassistant.components.intellifire -intellifire4py==2.2.2 +intellifire4py==4.1.9 # homeassistant.components.iotty iottycloud==0.1.3 diff --git a/tests/components/intellifire/__init__.py b/tests/components/intellifire/__init__.py index f655ccc2fa4..50497939f7f 100644 --- a/tests/components/intellifire/__init__.py +++ b/tests/components/intellifire/__init__.py @@ -1 +1,13 @@ """Tests for the IntelliFire integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/intellifire/conftest.py b/tests/components/intellifire/conftest.py index cf1e085c10f..251d5bdde48 100644 --- a/tests/components/intellifire/conftest.py +++ b/tests/components/intellifire/conftest.py @@ -1,14 +1,40 @@ """Fixtures for IntelliFire integration tests.""" from collections.abc import Generator -from unittest.mock import AsyncMock, MagicMock, Mock, patch +from unittest.mock import AsyncMock, MagicMock, Mock, PropertyMock, patch -from aiohttp.client_reqrep import ConnectionKey +from intellifire4py.const import IntelliFireApiMode +from intellifire4py.model import ( + IntelliFireCommonFireplaceData, + IntelliFirePollData, + IntelliFireUserData, +) import pytest +from homeassistant.components.intellifire.const import ( + API_MODE_CLOUD, + API_MODE_LOCAL, + CONF_AUTH_COOKIE, + CONF_CONTROL_MODE, + CONF_READ_MODE, + CONF_SERIAL, + CONF_USER_ID, + CONF_WEB_CLIENT_ID, + DOMAIN, +) +from homeassistant.const import ( + CONF_API_KEY, + CONF_HOST, + CONF_IP_ADDRESS, + CONF_PASSWORD, + CONF_USERNAME, +) + +from tests.common import MockConfigEntry, load_json_object_fixture + @pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: +def mock_setup_entry() -> Generator[AsyncMock, None, None]: """Mock setting up a config entry.""" with patch( "homeassistant.components.intellifire.async_setup_entry", return_value=True @@ -17,44 +43,206 @@ def mock_setup_entry() -> Generator[AsyncMock]: @pytest.fixture -def mock_fireplace_finder_none() -> Generator[MagicMock]: +def mock_fireplace_finder_none() -> Generator[None, MagicMock, None]: """Mock fireplace finder.""" mock_found_fireplaces = Mock() mock_found_fireplaces.ips = [] with patch( - "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace" + "homeassistant.components.intellifire.config_flow.UDPFireplaceFinder.search_fireplace" ): yield mock_found_fireplaces @pytest.fixture -def mock_fireplace_finder_single() -> Generator[MagicMock]: - """Mock fireplace finder.""" - mock_found_fireplaces = Mock() - mock_found_fireplaces.ips = ["192.168.1.69"] - with patch( - "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace" - ): - yield mock_found_fireplaces +def mock_config_entry_current() -> MockConfigEntry: + """Return a mock config entry.""" + return MockConfigEntry( + domain=DOMAIN, + version=1, + minor_version=2, + data={ + CONF_IP_ADDRESS: "192.168.2.108", + CONF_USERNAME: "grumpypanda@china.cn", + CONF_PASSWORD: "you-stole-my-pandas", + CONF_SERIAL: "3FB284769E4736F30C8973A7ED358123", + CONF_WEB_CLIENT_ID: "FA2B1C3045601234D0AE17D72F8E975", + CONF_API_KEY: "B5C4DA27AAEF31D1FB21AFF9BFA6BCD2", + CONF_AUTH_COOKIE: "B984F21A6378560019F8A1CDE41B6782", + CONF_USER_ID: "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + }, + options={CONF_READ_MODE: API_MODE_LOCAL, CONF_CONTROL_MODE: API_MODE_CLOUD}, + unique_id="3FB284769E4736F30C8973A7ED358123", + ) @pytest.fixture -def mock_intellifire_config_flow() -> Generator[MagicMock]: - """Return a mocked IntelliFire client.""" - data_mock = Mock() - data_mock.serial = "12345" +def mock_config_entry_old() -> MockConfigEntry: + """For migration testing.""" + return MockConfigEntry( + domain=DOMAIN, + version=1, + minor_version=1, + title="Fireplace 3FB284769E4736F30C8973A7ED358123", + data={ + CONF_HOST: "192.168.2.108", + CONF_USERNAME: "grumpypanda@china.cn", + CONF_PASSWORD: "you-stole-my-pandas", + CONF_USER_ID: "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + }, + ) + +@pytest.fixture +def mock_common_data_local() -> IntelliFireCommonFireplaceData: + """Fixture for mock common data.""" + return IntelliFireCommonFireplaceData( + auth_cookie="B984F21A6378560019F8A1CDE41B6782", + user_id="52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + web_client_id="FA2B1C3045601234D0AE17D72F8E975", + serial="3FB284769E4736F30C8973A7ED358123", + api_key="B5C4DA27AAEF31D1FB21AFF9BFA6BCD2", + ip_address="192.168.2.108", + read_mode=IntelliFireApiMode.LOCAL, + control_mode=IntelliFireApiMode.LOCAL, + ) + + +@pytest.fixture +def mock_apis_multifp( + mock_cloud_interface, mock_local_interface, mock_fp +) -> Generator[tuple[AsyncMock, AsyncMock, MagicMock], None, None]: + """Multi fireplace version of mocks.""" + return mock_local_interface, mock_cloud_interface, mock_fp + + +@pytest.fixture +def mock_apis_single_fp( + mock_cloud_interface, mock_local_interface, mock_fp +) -> Generator[tuple[AsyncMock, AsyncMock, MagicMock], None, None]: + """Single fire place version of the mocks.""" + data_v1 = IntelliFireUserData( + **load_json_object_fixture("user_data_1.json", DOMAIN) + ) + with patch.object( + type(mock_cloud_interface), "user_data", new_callable=PropertyMock + ) as mock_user_data: + mock_user_data.return_value = data_v1 + yield mock_local_interface, mock_cloud_interface, mock_fp + + +@pytest.fixture +def mock_cloud_interface() -> Generator[AsyncMock, None, None]: + """Mock cloud interface to use for testing.""" + user_data = IntelliFireUserData( + **load_json_object_fixture("user_data_3.json", DOMAIN) + ) + + with ( + patch( + "homeassistant.components.intellifire.IntelliFireCloudInterface", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.intellifire.config_flow.IntelliFireCloudInterface", + new=mock_client, + ), + patch( + "intellifire4py.cloud_interface.IntelliFireCloudInterface", + new=mock_client, + ), + ): + # Mock async context manager + mock_client = mock_client.return_value + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + + # Mock other async methods if needed + mock_client.login_with_credentials = AsyncMock() + mock_client.poll = AsyncMock() + type(mock_client).user_data = PropertyMock(return_value=user_data) + + yield mock_client # Yielding to the test + + +@pytest.fixture +def mock_local_interface() -> Generator[AsyncMock, None, None]: + """Mock version of IntelliFireAPILocal.""" + poll_data = IntelliFirePollData( + **load_json_object_fixture("intellifire/local_poll.json") + ) with patch( - "homeassistant.components.intellifire.config_flow.IntellifireAPILocal", + "homeassistant.components.intellifire.config_flow.IntelliFireAPILocal", autospec=True, - ) as intellifire_mock: - intellifire = intellifire_mock.return_value - intellifire.data = data_mock - yield intellifire + ) as mock_client: + mock_client = mock_client.return_value + # Mock all instances of the class + type(mock_client).data = PropertyMock(return_value=poll_data) + yield mock_client -def mock_api_connection_error() -> ConnectionError: - """Return a fake a ConnectionError for iftapi.net.""" - ret = ConnectionError() - ret.args = [ConnectionKey("iftapi.net", 443, False, None, None, None, None)] - return ret +@pytest.fixture +def mock_fp(mock_common_data_local) -> Generator[AsyncMock, None, None]: + """Mock fireplace.""" + + local_poll_data = IntelliFirePollData( + **load_json_object_fixture("local_poll.json", DOMAIN) + ) + + assert local_poll_data.connection_quality == 988451 + + with patch( + "homeassistant.components.intellifire.UnifiedFireplace" + ) as mock_unified_fireplace: + # Create an instance of the mock + mock_instance = mock_unified_fireplace.return_value + + # Mock methods and properties of the instance + mock_instance.perform_cloud_poll = AsyncMock() + mock_instance.perform_local_poll = AsyncMock() + + mock_instance.async_validate_connectivity = AsyncMock(return_value=(True, True)) + + type(mock_instance).is_cloud_polling = PropertyMock(return_value=False) + type(mock_instance).is_local_polling = PropertyMock(return_value=True) + + mock_instance.get_user_data_as_json.return_value = '{"mock": "data"}' + + mock_instance.ip_address = "192.168.1.100" + mock_instance.api_key = "mock_api_key" + mock_instance.serial = "mock_serial" + mock_instance.user_id = "mock_user_id" + mock_instance.auth_cookie = "mock_auth_cookie" + mock_instance.web_client_id = "mock_web_client_id" + + # Configure the READ Api + mock_instance.read_api = MagicMock() + mock_instance.read_api.poll = MagicMock(return_value=local_poll_data) + mock_instance.read_api.data = local_poll_data + + mock_instance.control_api = MagicMock() + + mock_instance.local_connectivity = True + mock_instance.cloud_connectivity = False + + mock_instance._read_mode = IntelliFireApiMode.LOCAL + mock_instance.read_mode = IntelliFireApiMode.LOCAL + + mock_instance.control_mode = IntelliFireApiMode.LOCAL + mock_instance._control_mode = IntelliFireApiMode.LOCAL + + mock_instance.data = local_poll_data + + mock_instance.set_read_mode = AsyncMock() + mock_instance.set_control_mode = AsyncMock() + + mock_instance.async_validate_connectivity = AsyncMock( + return_value=(True, False) + ) + + # Patch class methods + with patch( + "homeassistant.components.intellifire.UnifiedFireplace.build_fireplace_from_common", + new_callable=AsyncMock, + return_value=mock_instance, + ): + yield mock_instance diff --git a/tests/components/intellifire/fixtures/local_poll.json b/tests/components/intellifire/fixtures/local_poll.json new file mode 100644 index 00000000000..9dac47c698d --- /dev/null +++ b/tests/components/intellifire/fixtures/local_poll.json @@ -0,0 +1,29 @@ +{ + "name": "", + "serial": "4GC295860E5837G40D9974B7FD459234", + "temperature": 17, + "battery": 0, + "pilot": 1, + "light": 0, + "height": 1, + "fanspeed": 1, + "hot": 0, + "power": 1, + "thermostat": 0, + "setpoint": 0, + "timer": 0, + "timeremaining": 0, + "prepurge": 0, + "feature_light": 0, + "feature_thermostat": 1, + "power_vent": 0, + "feature_fan": 1, + "errors": [], + "fw_version": "0x00030200", + "fw_ver_str": "0.3.2+hw2", + "downtime": 0, + "uptime": 117, + "connection_quality": 988451, + "ecm_latency": 0, + "ipv4_address": "192.168.2.108" +} diff --git a/tests/components/intellifire/fixtures/user_data_1.json b/tests/components/intellifire/fixtures/user_data_1.json new file mode 100644 index 00000000000..501d240662b --- /dev/null +++ b/tests/components/intellifire/fixtures/user_data_1.json @@ -0,0 +1,17 @@ +{ + "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", + "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", + "fireplaces": [ + { + "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", + "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", + "ip_address": "192.168.2.108", + "api_key": "B5C4DA27AAEF31D1FB21AFF9BFA6BCD2", + "serial": "3FB284769E4736F30C8973A7ED358123" + } + ], + "username": "grumpypanda@china.cn", + "password": "you-stole-my-pandas" +} diff --git a/tests/components/intellifire/fixtures/user_data_3.json b/tests/components/intellifire/fixtures/user_data_3.json new file mode 100644 index 00000000000..39e9c95abbd --- /dev/null +++ b/tests/components/intellifire/fixtures/user_data_3.json @@ -0,0 +1,33 @@ +{ + "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", + "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", + "fireplaces": [ + { + "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", + "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", + "ip_address": "192.168.2.108", + "api_key": "B5C4DA27AAEF31D1FB21AFF9BFA6BCD2", + "serial": "3FB284769E4736F30C8973A7ED358123" + }, + { + "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", + "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", + "ip_address": "192.168.2.109", + "api_key": "D4C5EB28BBFF41E1FB21AFF9BFA6CD34", + "serial": "4GC295860E5837G40D9974B7FD459234" + }, + { + "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", + "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", + "ip_address": "192.168.2.110", + "api_key": "E5D6FC39CCED52F1FB21AFF9BFA6DE56", + "serial": "5HD306971F5938H51EAA85C8GE561345" + } + ], + "username": "grumpypanda@china.cn", + "password": "you-stole-my-pandas" +} diff --git a/tests/components/intellifire/snapshots/test_binary_sensor.ambr b/tests/components/intellifire/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..34d5836a025 --- /dev/null +++ b/tests/components/intellifire/snapshots/test_binary_sensor.ambr @@ -0,0 +1,717 @@ +# serializer version: 1 +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_accessory_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_accessory_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Accessory error', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'accessory_error', + 'unique_id': 'error_accessory_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_accessory_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'problem', + 'friendly_name': 'IntelliFire Accessory error', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_accessory_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_disabled_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_disabled_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Disabled error', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'disabled_error', + 'unique_id': 'error_disabled_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_disabled_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'problem', + 'friendly_name': 'IntelliFire Disabled error', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_disabled_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_ecm_offline_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_ecm_offline_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'ECM offline error', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ecm_offline_error', + 'unique_id': 'error_ecm_offline_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_ecm_offline_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'problem', + 'friendly_name': 'IntelliFire ECM offline error', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_ecm_offline_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_fan_delay_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_fan_delay_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Fan delay error', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'fan_delay_error', + 'unique_id': 'error_fan_delay_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_fan_delay_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'problem', + 'friendly_name': 'IntelliFire Fan delay error', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_fan_delay_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_fan_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_fan_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Fan error', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'fan_error', + 'unique_id': 'error_fan_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_fan_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'problem', + 'friendly_name': 'IntelliFire Fan error', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_fan_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_flame-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.intellifire_flame', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Flame', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'flame', + 'unique_id': 'on_off_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_flame-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'friendly_name': 'IntelliFire Flame', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_flame', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_flame_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_flame_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Flame Error', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'flame_error', + 'unique_id': 'error_flame_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_flame_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'problem', + 'friendly_name': 'IntelliFire Flame Error', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_flame_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_lights_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_lights_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Lights error', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lights_error', + 'unique_id': 'error_lights_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_lights_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'problem', + 'friendly_name': 'IntelliFire Lights error', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_lights_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_maintenance_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_maintenance_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Maintenance error', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'maintenance_error', + 'unique_id': 'error_maintenance_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_maintenance_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'problem', + 'friendly_name': 'IntelliFire Maintenance error', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_maintenance_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_offline_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_offline_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Offline error', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'offline_error', + 'unique_id': 'error_offline_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_offline_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'problem', + 'friendly_name': 'IntelliFire Offline error', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_offline_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_pilot_flame_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_pilot_flame_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pilot flame error', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pilot_flame_error', + 'unique_id': 'error_pilot_flame_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_pilot_flame_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'problem', + 'friendly_name': 'IntelliFire Pilot flame error', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_pilot_flame_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_pilot_light_on-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.intellifire_pilot_light_on', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Pilot light on', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pilot_light_on', + 'unique_id': 'pilot_light_on_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_pilot_light_on-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'friendly_name': 'IntelliFire Pilot light on', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_pilot_light_on', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_soft_lock_out_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_soft_lock_out_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Soft lock out error', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'soft_lock_out_error', + 'unique_id': 'error_soft_lock_out_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_soft_lock_out_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'problem', + 'friendly_name': 'IntelliFire Soft lock out error', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_soft_lock_out_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_thermostat_on-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.intellifire_thermostat_on', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Thermostat on', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'thermostat_on', + 'unique_id': 'thermostat_on_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_thermostat_on-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'friendly_name': 'IntelliFire Thermostat on', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_thermostat_on', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_timer_on-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.intellifire_timer_on', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Timer on', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'timer_on', + 'unique_id': 'timer_on_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_timer_on-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'friendly_name': 'IntelliFire Timer on', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_timer_on', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/intellifire/snapshots/test_climate.ambr b/tests/components/intellifire/snapshots/test_climate.ambr new file mode 100644 index 00000000000..36f719d2264 --- /dev/null +++ b/tests/components/intellifire/snapshots/test_climate.ambr @@ -0,0 +1,66 @@ +# serializer version: 1 +# name: test_all_sensor_entities[climate.intellifire_thermostat-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 37, + 'min_temp': 0, + 'target_temp_step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.intellifire_thermostat', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Thermostat', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'climate_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_sensor_entities[climate.intellifire_thermostat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'current_temperature': 17.0, + 'friendly_name': 'IntelliFire Thermostat', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 37, + 'min_temp': 0, + 'supported_features': , + 'target_temp_step': 1.0, + 'temperature': 0.0, + }), + 'context': , + 'entity_id': 'climate.intellifire_thermostat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/intellifire/snapshots/test_sensor.ambr b/tests/components/intellifire/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..d5e59e3f00f --- /dev/null +++ b/tests/components/intellifire/snapshots/test_sensor.ambr @@ -0,0 +1,587 @@ +# serializer version: 1 +# name: test_all_sensor_entities[sensor.intellifire_connection_quality-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.intellifire_connection_quality', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Connection quality', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'connection_quality', + 'unique_id': 'connection_quality_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_connection_quality-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'friendly_name': 'IntelliFire Connection quality', + }), + 'context': , + 'entity_id': 'sensor.intellifire_connection_quality', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '988451', + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_downtime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.intellifire_downtime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Downtime', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'downtime', + 'unique_id': 'downtime_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_downtime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'timestamp', + 'friendly_name': 'IntelliFire Downtime', + }), + 'context': , + 'entity_id': 'sensor.intellifire_downtime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_ecm_latency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.intellifire_ecm_latency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'ECM latency', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ecm_latency', + 'unique_id': 'ecm_latency_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_ecm_latency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'friendly_name': 'IntelliFire ECM latency', + }), + 'context': , + 'entity_id': 'sensor.intellifire_ecm_latency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_fan_speed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.intellifire_fan_speed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Fan Speed', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'fan_speed', + 'unique_id': 'fan_speed_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_fan_speed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'friendly_name': 'IntelliFire Fan Speed', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.intellifire_fan_speed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_flame_height-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.intellifire_flame_height', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Flame height', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'flame_height', + 'unique_id': 'flame_height_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_flame_height-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'friendly_name': 'IntelliFire Flame height', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.intellifire_flame_height', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_ip_address-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.intellifire_ip_address', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IP address', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ipv4_address', + 'unique_id': 'ipv4_address_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_ip_address-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'friendly_name': 'IntelliFire IP address', + }), + 'context': , + 'entity_id': 'sensor.intellifire_ip_address', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '192.168.2.108', + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_local_connectivity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.intellifire_local_connectivity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Local connectivity', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'local_connectivity', + 'unique_id': 'local_connectivity_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_local_connectivity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'friendly_name': 'IntelliFire Local connectivity', + }), + 'context': , + 'entity_id': 'sensor.intellifire_local_connectivity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'True', + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_none-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.intellifire_none', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'local_connectivity', + 'unique_id': 'local_connectivity_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_none-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'friendly_name': 'IntelliFire None', + }), + 'context': , + 'entity_id': 'sensor.intellifire_none', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'True', + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_target_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.intellifire_target_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Target temperature', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'target_temp', + 'unique_id': 'target_temp_mock_serial', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_target_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'temperature', + 'friendly_name': 'IntelliFire Target temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.intellifire_target_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.intellifire_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'temperature_mock_serial', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'temperature', + 'friendly_name': 'IntelliFire Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.intellifire_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '17', + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_timer_end-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.intellifire_timer_end', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Timer end', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'timer_end_timestamp', + 'unique_id': 'timer_end_timestamp_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_timer_end-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'timestamp', + 'friendly_name': 'IntelliFire Timer end', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.intellifire_timer_end', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_uptime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.intellifire_uptime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Uptime', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'uptime', + 'unique_id': 'uptime_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_uptime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'timestamp', + 'friendly_name': 'IntelliFire Uptime', + }), + 'context': , + 'entity_id': 'sensor.intellifire_uptime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-01-01T11:58:03+00:00', + }) +# --- diff --git a/tests/components/intellifire/test_binary_sensor.py b/tests/components/intellifire/test_binary_sensor.py new file mode 100644 index 00000000000..a40f92b84d5 --- /dev/null +++ b/tests/components/intellifire/test_binary_sensor.py @@ -0,0 +1,35 @@ +"""Test IntelliFire Binary Sensors.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_binary_sensor_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry_current: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_apis_single_fp: tuple[AsyncMock, AsyncMock, AsyncMock], +) -> None: + """Test all entities.""" + + with ( + patch( + "homeassistant.components.intellifire.PLATFORMS", [Platform.BINARY_SENSOR] + ), + ): + await setup_integration(hass, mock_config_entry_current) + await snapshot_platform( + hass, entity_registry, snapshot, mock_config_entry_current.entry_id + ) diff --git a/tests/components/intellifire/test_climate.py b/tests/components/intellifire/test_climate.py new file mode 100644 index 00000000000..da1b2864791 --- /dev/null +++ b/tests/components/intellifire/test_climate.py @@ -0,0 +1,34 @@ +"""Test climate.""" + +from unittest.mock import patch + +from freezegun import freeze_time +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@freeze_time("2021-01-01T12:00:00Z") +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_sensor_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry_current: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_fp, +) -> None: + """Test all entities.""" + with ( + patch("homeassistant.components.intellifire.PLATFORMS", [Platform.CLIMATE]), + ): + await setup_integration(hass, mock_config_entry_current) + await snapshot_platform( + hass, entity_registry, snapshot, mock_config_entry_current.entry_id + ) diff --git a/tests/components/intellifire/test_config_flow.py b/tests/components/intellifire/test_config_flow.py index ba4e2f039a3..f1465c4dcd4 100644 --- a/tests/components/intellifire/test_config_flow.py +++ b/tests/components/intellifire/test_config_flow.py @@ -1,323 +1,168 @@ """Test the IntelliFire config flow.""" -from unittest.mock import AsyncMock, MagicMock, patch +from unittest.mock import AsyncMock -from intellifire4py.exceptions import LoginException +from intellifire4py.exceptions import LoginError from homeassistant import config_entries from homeassistant.components import dhcp -from homeassistant.components.intellifire.config_flow import MANUAL_ENTRY_STRING -from homeassistant.components.intellifire.const import CONF_USER_ID, DOMAIN -from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_PASSWORD, CONF_USERNAME +from homeassistant.components.intellifire.const import CONF_SERIAL, DOMAIN +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from .conftest import mock_api_connection_error - from tests.common import MockConfigEntry -@patch.multiple( - "homeassistant.components.intellifire.config_flow.IntellifireAPICloud", - login=AsyncMock(), - get_user_id=MagicMock(return_value="intellifire"), - get_fireplace_api_key=MagicMock(return_value="key"), -) -async def test_no_discovery( +async def test_standard_config_with_single_fireplace( hass: HomeAssistant, mock_setup_entry: AsyncMock, - mock_intellifire_config_flow: MagicMock, + mock_apis_single_fp, ) -> None: - """Test we should get the manual discovery form - because no discovered fireplaces.""" - with patch( - "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace", - return_value=[], - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM + """Test standard flow with a user who has only a single fireplace.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] == FlowResultType.FORM assert result["errors"] == {} - assert result["step_id"] == "manual_device_entry" + assert result["step_id"] == "cloud_api" - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], - { - CONF_HOST: "1.1.1.1", - }, + {CONF_USERNAME: "donJulio", CONF_PASSWORD: "Tequila0FD00m"}, ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "api_config" - - result3 = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_USERNAME: "test", CONF_PASSWORD: "AROONIE"}, - ) - await hass.async_block_till_done() - - assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["title"] == "Fireplace 12345" - assert result3["data"] == { - CONF_HOST: "1.1.1.1", - CONF_USERNAME: "test", - CONF_PASSWORD: "AROONIE", - CONF_API_KEY: "key", - CONF_USER_ID: "intellifire", + # For a single fireplace we just create it + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["data"] == { + "ip_address": "192.168.2.108", + "api_key": "B5C4DA27AAEF31D1FB21AFF9BFA6BCD2", + "serial": "3FB284769E4736F30C8973A7ED358123", + "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", + "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", + "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + "username": "grumpypanda@china.cn", + "password": "you-stole-my-pandas", } - assert len(mock_setup_entry.mock_calls) == 1 -@patch.multiple( - "homeassistant.components.intellifire.config_flow.IntellifireAPICloud", - login=AsyncMock(side_effect=mock_api_connection_error()), - get_user_id=MagicMock(return_value="intellifire"), - get_fireplace_api_key=MagicMock(return_value="key"), -) -async def test_single_discovery( +async def test_standard_config_with_pre_configured_fireplace( hass: HomeAssistant, mock_setup_entry: AsyncMock, - mock_intellifire_config_flow: MagicMock, + mock_config_entry_current, + mock_apis_single_fp, ) -> None: - """Test single fireplace UDP discovery.""" - with patch( - "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace", - return_value=["192.168.1.69"], - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) + """What if we try to configure an already configured fireplace.""" + # Configure an existing entry + mock_config_entry_current.add_to_hass(hass) - await hass.config_entries.flow.async_configure( - result["flow_id"], {CONF_HOST: "192.168.1.69"} + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} ) - await hass.async_block_till_done() - result3 = await hass.config_entries.flow.async_configure( + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {} + assert result["step_id"] == "cloud_api" + + result = await hass.config_entries.flow.async_configure( result["flow_id"], - {CONF_USERNAME: "test", CONF_PASSWORD: "AROONIE"}, + {CONF_USERNAME: "donJulio", CONF_PASSWORD: "Tequila0FD00m"}, ) - await hass.async_block_till_done() - assert result3["type"] is FlowResultType.FORM - assert result3["errors"] == {"base": "iftapi_connect"} + + # For a single fireplace we just create it + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "no_available_devices" -@patch.multiple( - "homeassistant.components.intellifire.config_flow.IntellifireAPICloud", - login=AsyncMock(side_effect=LoginException), - get_user_id=MagicMock(return_value="intellifire"), - get_fireplace_api_key=MagicMock(return_value="key"), -) -async def test_single_discovery_loign_error( +async def test_standard_config_with_single_fireplace_and_bad_credentials( hass: HomeAssistant, mock_setup_entry: AsyncMock, - mock_intellifire_config_flow: MagicMock, + mock_apis_single_fp, ) -> None: - """Test single fireplace UDP discovery.""" - with patch( - "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace", - return_value=["192.168.1.69"], - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - await hass.config_entries.flow.async_configure( - result["flow_id"], {CONF_HOST: "192.168.1.69"} - ) - await hass.async_block_till_done() - result3 = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_USERNAME: "test", CONF_PASSWORD: "AROONIE"}, - ) - await hass.async_block_till_done() - assert result3["type"] is FlowResultType.FORM - assert result3["errors"] == {"base": "api_error"} - - -async def test_manual_entry( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_intellifire_config_flow: MagicMock, -) -> None: - """Test for multiple Fireplace discovery - involving a pick_device step.""" - with patch( - "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace", - return_value=["192.168.1.69", "192.168.1.33", "192.168.169"], - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - assert result["step_id"] == "pick_device" - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={CONF_HOST: MANUAL_ENTRY_STRING} - ) - - await hass.async_block_till_done() - assert result2["step_id"] == "manual_device_entry" - - -async def test_multi_discovery( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_intellifire_config_flow: MagicMock, -) -> None: - """Test for multiple fireplace discovery - involving a pick_device step.""" - with patch( - "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace", - return_value=["192.168.1.69", "192.168.1.33", "192.168.169"], - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - assert result["step_id"] == "pick_device" - await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={CONF_HOST: "192.168.1.33"} - ) - await hass.async_block_till_done() - assert result["step_id"] == "pick_device" - - -async def test_multi_discovery_cannot_connect( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_intellifire_config_flow: MagicMock, -) -> None: - """Test for multiple fireplace discovery - involving a pick_device step.""" - with patch( - "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace", - return_value=["192.168.1.69", "192.168.1.33", "192.168.169"], - ): - mock_intellifire_config_flow.poll.side_effect = ConnectionError - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "pick_device" - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={CONF_HOST: "192.168.1.33"} - ) - await hass.async_block_till_done() - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "cannot_connect"} - - -async def test_form_cannot_connect_manual_entry( - hass: HomeAssistant, - mock_intellifire_config_flow: MagicMock, - mock_fireplace_finder_single: AsyncMock, -) -> None: - """Test we handle cannot connect error.""" - mock_intellifire_config_flow.poll.side_effect = ConnectionError + """Test bad credentials on a login.""" + mock_local_interface, mock_cloud_interface, mock_fp = mock_apis_single_fp + # Set login error + mock_cloud_interface.login_with_credentials.side_effect = LoginError result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "manual_device_entry" + assert result["errors"] == {} + assert result["step_id"] == "cloud_api" - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], - { - CONF_HOST: "1.1.1.1", - }, + {CONF_USERNAME: "donJulio", CONF_PASSWORD: "Tequila0FD00m"}, ) - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "cannot_connect"} + # Erase the error + mock_cloud_interface.login_with_credentials.side_effect = None + + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {"base": "api_error"} + assert result["step_id"] == "cloud_api" + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_USERNAME: "donJulio", CONF_PASSWORD: "Tequila0FD00m"}, + ) + # For a single fireplace we just create it + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["data"] == { + "ip_address": "192.168.2.108", + "api_key": "B5C4DA27AAEF31D1FB21AFF9BFA6BCD2", + "serial": "3FB284769E4736F30C8973A7ED358123", + "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", + "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", + "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + "username": "grumpypanda@china.cn", + "password": "you-stole-my-pandas", + } -async def test_picker_already_discovered( +async def test_standard_config_with_multiple_fireplace( hass: HomeAssistant, mock_setup_entry: AsyncMock, - mock_intellifire_config_flow: MagicMock, + mock_apis_multifp, ) -> None: - """Test single fireplace UDP discovery.""" - - entry = MockConfigEntry( - domain=DOMAIN, - data={ - "host": "192.168.1.3", - }, - title="Fireplace", - unique_id=44444, - ) - entry.add_to_hass(hass) - with patch( - "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace", - return_value=["192.168.1.3"], - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - await hass.async_block_till_done() - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: "192.168.1.4", - }, - ) - assert result2["type"] is FlowResultType.FORM - assert len(mock_setup_entry.mock_calls) == 0 - - -@patch.multiple( - "homeassistant.components.intellifire.config_flow.IntellifireAPICloud", - login=AsyncMock(), - get_user_id=MagicMock(return_value="intellifire"), - get_fireplace_api_key=MagicMock(return_value="key"), -) -async def test_reauth_flow( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_intellifire_config_flow: MagicMock, -) -> None: - """Test the reauth flow.""" - - entry = MockConfigEntry( - domain=DOMAIN, - data={ - "host": "192.168.1.3", - }, - title="Fireplace 1234", - version=1, - unique_id="4444", - ) - entry.add_to_hass(hass) - + """Test multi-fireplace user who must be very rich.""" result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": "reauth", - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, + DOMAIN, context={"source": config_entries.SOURCE_USER} ) + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {} + assert result["step_id"] == "cloud_api" - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "api_config" - - result3 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], - {CONF_USERNAME: "test", CONF_PASSWORD: "AROONIE"}, + {CONF_USERNAME: "donJulio", CONF_PASSWORD: "Tequila0FD00m"}, ) - await hass.async_block_till_done() - assert result3["type"] is FlowResultType.ABORT - assert entry.data[CONF_PASSWORD] == "AROONIE" - assert entry.data[CONF_USERNAME] == "test" + # When we have multiple fireplaces we get to pick a serial + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "pick_cloud_device" + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_SERIAL: "4GC295860E5837G40D9974B7FD459234"}, + ) + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["data"] == { + "ip_address": "192.168.2.109", + "api_key": "D4C5EB28BBFF41E1FB21AFF9BFA6CD34", + "serial": "4GC295860E5837G40D9974B7FD459234", + "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", + "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", + "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + "username": "grumpypanda@china.cn", + "password": "you-stole-my-pandas", + } async def test_dhcp_discovery_intellifire_device( hass: HomeAssistant, mock_setup_entry: AsyncMock, - mock_intellifire_config_flow: MagicMock, + mock_apis_multifp, ) -> None: """Test successful DHCP Discovery.""" + result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_DHCP}, @@ -327,26 +172,26 @@ async def test_dhcp_discovery_intellifire_device( hostname="zentrios-Test", ), ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "dhcp_confirm" - result2 = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "dhcp_confirm" - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], user_input={} + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "cloud_api" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_USERNAME: "donJulio", CONF_PASSWORD: "Tequila0FD00m"}, ) - assert result3["title"] == "Fireplace 12345" - assert result3["data"] == {"host": "1.1.1.1"} + assert result["type"] == FlowResultType.CREATE_ENTRY async def test_dhcp_discovery_non_intellifire_device( hass: HomeAssistant, - mock_intellifire_config_flow: MagicMock, mock_setup_entry: AsyncMock, + mock_apis_multifp, ) -> None: - """Test failed DHCP Discovery.""" + """Test successful DHCP Discovery of a non intellifire device..""" - mock_intellifire_config_flow.poll.side_effect = ConnectionError + # Patch poll with an exception + mock_local_interface, mock_cloud_interface, mock_fp = mock_apis_multifp + mock_local_interface.poll.side_effect = ConnectionError result = await hass.config_entries.flow.async_init( DOMAIN, @@ -357,6 +202,28 @@ async def test_dhcp_discovery_non_intellifire_device( hostname="zentrios-Evil", ), ) - - assert result["type"] is FlowResultType.ABORT + assert result["type"] == FlowResultType.ABORT assert result["reason"] == "not_intellifire_device" + # Test is finished - the DHCP scanner detected a hostname that "might" be an IntelliFire device, but it was not. + + +async def test_reauth_flow( + hass: HomeAssistant, + mock_config_entry_current: MockConfigEntry, + mock_apis_single_fp, + mock_setup_entry: AsyncMock, +) -> None: + """Test reauth.""" + + mock_config_entry_current.add_to_hass(hass) + result = await mock_config_entry_current.start_reauth_flow(hass) + assert result["type"] == FlowResultType.FORM + result["step_id"] = "cloud_api" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_USERNAME: "donJulio", CONF_PASSWORD: "Tequila0FD00m"}, + ) + + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "reauth_successful" diff --git a/tests/components/intellifire/test_init.py b/tests/components/intellifire/test_init.py new file mode 100644 index 00000000000..6d08fda26c3 --- /dev/null +++ b/tests/components/intellifire/test_init.py @@ -0,0 +1,111 @@ +"""Test the IntelliFire config flow.""" + +from unittest.mock import AsyncMock, patch + +from homeassistant.components.intellifire import CONF_USER_ID +from homeassistant.components.intellifire.const import ( + API_MODE_CLOUD, + API_MODE_LOCAL, + CONF_AUTH_COOKIE, + CONF_CONTROL_MODE, + CONF_READ_MODE, + CONF_SERIAL, + CONF_WEB_CLIENT_ID, + DOMAIN, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ( + CONF_API_KEY, + CONF_HOST, + CONF_IP_ADDRESS, + CONF_PASSWORD, + CONF_USERNAME, +) +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def test_minor_migration( + hass: HomeAssistant, mock_config_entry_old, mock_apis_single_fp +) -> None: + """With the new library we are going to end up rewriting the config entries.""" + mock_config_entry_old.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry_old.entry_id) + + assert mock_config_entry_old.data == { + "ip_address": "192.168.2.108", + "host": "192.168.2.108", + "api_key": "B5C4DA27AAEF31D1FB21AFF9BFA6BCD2", + "serial": "3FB284769E4736F30C8973A7ED358123", + "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", + "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", + "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + "username": "grumpypanda@china.cn", + "password": "you-stole-my-pandas", + } + + +async def test_minor_migration_error(hass: HomeAssistant, mock_apis_single_fp) -> None: + """Test the case where we completely fail to initialize.""" + mock_config_entry = MockConfigEntry( + domain=DOMAIN, + version=1, + minor_version=1, + title="Fireplace of testing", + data={ + CONF_HOST: "11.168.2.218", + CONF_USERNAME: "grumpypanda@china.cn", + CONF_PASSWORD: "you-stole-my-pandas", + CONF_USER_ID: "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + }, + ) + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + assert mock_config_entry.state is ConfigEntryState.MIGRATION_ERROR + + +async def test_init_with_no_username(hass: HomeAssistant, mock_apis_single_fp) -> None: + """Test the case where we completely fail to initialize.""" + mock_config_entry = MockConfigEntry( + domain=DOMAIN, + version=1, + minor_version=2, + data={ + CONF_IP_ADDRESS: "192.168.2.108", + CONF_PASSWORD: "you-stole-my-pandas", + CONF_SERIAL: "3FB284769E4736F30C8973A7ED358123", + CONF_WEB_CLIENT_ID: "FA2B1C3045601234D0AE17D72F8E975", + CONF_API_KEY: "B5C4DA27AAEF31D1FB21AFF9BFA6BCD2", + CONF_AUTH_COOKIE: "B984F21A6378560019F8A1CDE41B6782", + CONF_USER_ID: "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + }, + options={CONF_READ_MODE: API_MODE_LOCAL, CONF_CONTROL_MODE: API_MODE_CLOUD}, + unique_id="3FB284769E4736F30C8973A7ED358123", + ) + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + +async def test_connectivity_bad( + hass: HomeAssistant, + mock_config_entry_current, + mock_apis_single_fp, +) -> None: + """Test a timeout error on the setup flow.""" + + with patch( + "homeassistant.components.intellifire.UnifiedFireplace.build_fireplace_from_common", + new_callable=AsyncMock, + side_effect=TimeoutError, + ): + mock_config_entry_current.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry_current.entry_id) + + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 diff --git a/tests/components/intellifire/test_sensor.py b/tests/components/intellifire/test_sensor.py new file mode 100644 index 00000000000..96e344d77fc --- /dev/null +++ b/tests/components/intellifire/test_sensor.py @@ -0,0 +1,35 @@ +"""Test IntelliFire Binary Sensors.""" + +from unittest.mock import AsyncMock, patch + +from freezegun import freeze_time +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@freeze_time("2021-01-01T12:00:00Z") +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_sensor_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry_current: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_apis_single_fp: tuple[AsyncMock, AsyncMock, AsyncMock], +) -> None: + """Test all entities.""" + + with ( + patch("homeassistant.components.intellifire.PLATFORMS", [Platform.SENSOR]), + ): + await setup_integration(hass, mock_config_entry_current) + await snapshot_platform( + hass, entity_registry, snapshot, mock_config_entry_current.entry_id + ) From 2f7a39677806302e3427535aac740b26df783e27 Mon Sep 17 00:00:00 2001 From: mvn23 Date: Sun, 1 Sep 2024 13:28:08 +0200 Subject: [PATCH 1303/1635] Split opentherm_gw entities between different devices (#124869) * * Add migration from single device to multiple devices, removing all old entities * Create new devices for Boiler and Thermostat * Add classes for new entities based on the new devices * Split binary_sensor entities into devices * Split sensor entities into different devices * Move climate entity to thermostat device * Fix climate entity away mode * Fix translation placeholders * Allow sensor values with capital letters * * Add EntityCategory * Update and add device_classes * Fix translation keys * Fix climate entity category * Update tests * Handle `available` property in `entity.py` * Improve GPIO state binary_sensor translations * Fix: Updates are already subscribed to in the base entity * Remove entity_id generation from sensor and binary_sensor entities * * Use _attr_name on climate class instead of through entity_description * Add type hints * Rewrite to derive entities for all OpenTherm devices from a single base class * Improve type annotations * Use OpenThermDataSource to access status dict * Move entity_category from entity_description to _attr_entity_category * Move entity descriptions with the same translation_key closer together * Update tests * Add device migration test * * Add missing sensors and binary_sensors back * Improve migration, do not delete old entities from registry * Add comments for migration period * Use single lists for entity descriptions * Avoid changing sensor values, remove translations * * Import only required class from pyotgw * Update tests --- .../components/opentherm_gw/__init__.py | 83 +- .../components/opentherm_gw/binary_sensor.py | 583 +++++--- .../components/opentherm_gw/climate.py | 213 +-- .../components/opentherm_gw/config_flow.py | 3 +- .../components/opentherm_gw/const.py | 43 + .../components/opentherm_gw/entity.py | 38 +- .../components/opentherm_gw/sensor.py | 1317 ++++++++++------- .../components/opentherm_gw/strings.json | 295 ++++ tests/components/opentherm_gw/test_init.py | 82 +- 9 files changed, 1702 insertions(+), 955 deletions(-) diff --git a/homeassistant/components/opentherm_gw/__init__.py b/homeassistant/components/opentherm_gw/__init__.py index 30410f73c2d..f0f5c709d0c 100644 --- a/homeassistant/components/opentherm_gw/__init__.py +++ b/homeassistant/components/opentherm_gw/__init__.py @@ -4,7 +4,7 @@ import asyncio from datetime import date, datetime import logging -import pyotgw +from pyotgw import OpenThermGateway import pyotgw.vars as gw_vars from serial import SerialException import voluptuous as vol @@ -59,6 +59,8 @@ from .const import ( SERVICE_SET_MAX_MOD, SERVICE_SET_OAT, SERVICE_SET_SB_TEMP, + OpenThermDataSource, + OpenThermDeviceIdentifier, ) _LOGGER = logging.getLogger(__name__) @@ -113,6 +115,23 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b del migrate_options[CONF_PRECISION] hass.config_entries.async_update_entry(config_entry, options=migrate_options) + # Migration can be removed in 2025.4.0 + dev_reg = dr.async_get(hass) + if ( + migrate_device := dev_reg.async_get_device( + {(DOMAIN, config_entry.data[CONF_ID])} + ) + ) is not None: + dev_reg.async_update_device( + migrate_device.id, + new_identifiers={ + ( + DOMAIN, + f"{config_entry.data[CONF_ID]}-{OpenThermDeviceIdentifier.GATEWAY}", + ) + }, + ) + config_entry.add_update_listener(options_updated) try: @@ -427,10 +446,9 @@ class OpenThermGatewayHub: self.name = config_entry.data[CONF_NAME] self.climate_config = config_entry.options self.config_entry_id = config_entry.entry_id - self.status = gw_vars.DEFAULT_STATUS self.update_signal = f"{DATA_OPENTHERM_GW}_{self.hub_id}_update" self.options_update_signal = f"{DATA_OPENTHERM_GW}_{self.hub_id}_options_update" - self.gateway = pyotgw.OpenThermGateway() + self.gateway = OpenThermGateway() self.gw_version = None async def cleanup(self, event=None) -> None: @@ -441,11 +459,11 @@ class OpenThermGatewayHub: async def connect_and_subscribe(self) -> None: """Connect to serial device and subscribe report handler.""" - self.status = await self.gateway.connect(self.device_path) - if not self.status: + status = await self.gateway.connect(self.device_path) + if not status: await self.cleanup() raise ConnectionError - version_string = self.status[gw_vars.OTGW].get(gw_vars.OTGW_ABOUT) + version_string = status[OpenThermDataSource.GATEWAY].get(gw_vars.OTGW_ABOUT) self.gw_version = version_string[18:] if version_string else None _LOGGER.debug( "Connected to OpenTherm Gateway %s at %s", self.gw_version, self.device_path @@ -453,22 +471,69 @@ class OpenThermGatewayHub: dev_reg = dr.async_get(self.hass) gw_dev = dev_reg.async_get_or_create( config_entry_id=self.config_entry_id, - identifiers={(DOMAIN, self.hub_id)}, - name=self.name, + identifiers={ + (DOMAIN, f"{self.hub_id}-{OpenThermDeviceIdentifier.GATEWAY}") + }, manufacturer="Schelte Bron", model="OpenTherm Gateway", + translation_key="gateway_device", sw_version=self.gw_version, ) if gw_dev.sw_version != self.gw_version: dev_reg.async_update_device(gw_dev.id, sw_version=self.gw_version) + + boiler_device = dev_reg.async_get_or_create( + config_entry_id=self.config_entry_id, + identifiers={(DOMAIN, f"{self.hub_id}-{OpenThermDeviceIdentifier.BOILER}")}, + translation_key="boiler_device", + ) + thermostat_device = dev_reg.async_get_or_create( + config_entry_id=self.config_entry_id, + identifiers={ + (DOMAIN, f"{self.hub_id}-{OpenThermDeviceIdentifier.THERMOSTAT}") + }, + translation_key="thermostat_device", + ) + self.hass.bus.async_listen(EVENT_HOMEASSISTANT_STOP, self.cleanup) async def handle_report(status): """Handle reports from the OpenTherm Gateway.""" _LOGGER.debug("Received report: %s", status) - self.status = status async_dispatcher_send(self.hass, self.update_signal, status) + dev_reg.async_update_device( + boiler_device.id, + manufacturer=status[OpenThermDataSource.BOILER].get( + gw_vars.DATA_SLAVE_MEMBERID + ), + model_id=status[OpenThermDataSource.BOILER].get( + gw_vars.DATA_SLAVE_PRODUCT_TYPE + ), + hw_version=status[OpenThermDataSource.BOILER].get( + gw_vars.DATA_SLAVE_PRODUCT_VERSION + ), + sw_version=status[OpenThermDataSource.BOILER].get( + gw_vars.DATA_SLAVE_OT_VERSION + ), + ) + + dev_reg.async_update_device( + thermostat_device.id, + manufacturer=status[OpenThermDataSource.THERMOSTAT].get( + gw_vars.DATA_MASTER_MEMBERID + ), + model_id=status[OpenThermDataSource.THERMOSTAT].get( + gw_vars.DATA_MASTER_PRODUCT_TYPE + ), + hw_version=status[OpenThermDataSource.THERMOSTAT].get( + gw_vars.DATA_MASTER_PRODUCT_VERSION + ), + sw_version=status[OpenThermDataSource.THERMOSTAT].get( + gw_vars.DATA_MASTER_OT_VERSION + ), + ) + self.gateway.subscribe(handle_report) @property diff --git a/homeassistant/components/opentherm_gw/binary_sensor.py b/homeassistant/components/opentherm_gw/binary_sensor.py index f978a2695d7..00885a18088 100644 --- a/homeassistant/components/opentherm_gw/binary_sensor.py +++ b/homeassistant/components/opentherm_gw/binary_sensor.py @@ -5,281 +5,387 @@ from dataclasses import dataclass from pyotgw import vars as gw_vars from homeassistant.components.binary_sensor import ( - ENTITY_ID_FORMAT, BinarySensorDeviceClass, BinarySensorEntity, BinarySensorEntityDescription, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_ID +from homeassistant.const import CONF_ID, EntityCategory from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.entity import async_generate_entity_id from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import OpenThermGatewayHub -from .const import DATA_GATEWAYS, DATA_OPENTHERM_GW +from .const import ( + BOILER_DEVICE_DESCRIPTION, + DATA_GATEWAYS, + DATA_OPENTHERM_GW, + GATEWAY_DEVICE_DESCRIPTION, + THERMOSTAT_DEVICE_DESCRIPTION, + OpenThermDataSource, +) from .entity import OpenThermEntity, OpenThermEntityDescription @dataclass(frozen=True, kw_only=True) class OpenThermBinarySensorEntityDescription( - BinarySensorEntityDescription, OpenThermEntityDescription + OpenThermEntityDescription, BinarySensorEntityDescription ): """Describes opentherm_gw binary sensor entity.""" -BINARY_SENSOR_INFO: tuple[ - tuple[list[str], OpenThermBinarySensorEntityDescription], ... -] = ( - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_MASTER_CH_ENABLED, - friendly_name_format="Thermostat Central Heating {}", - ), +BINARY_SENSOR_DESCRIPTIONS: tuple[OpenThermBinarySensorEntityDescription, ...] = ( + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_FAULT_IND, + translation_key="fault_indication", + device_class=BinarySensorDeviceClass.PROBLEM, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_MASTER_DHW_ENABLED, - friendly_name_format="Thermostat Hot Water {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_CH_ACTIVE, + translation_key="central_heating_n", + translation_placeholders={"circuit_number": "1"}, + device_class=BinarySensorDeviceClass.RUNNING, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_MASTER_COOLING_ENABLED, - friendly_name_format="Thermostat Cooling {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_CH2_ACTIVE, + translation_key="central_heating_n", + translation_placeholders={"circuit_number": "2"}, + device_class=BinarySensorDeviceClass.RUNNING, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_MASTER_OTC_ENABLED, - friendly_name_format="Thermostat Outside Temperature Correction {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_DHW_ACTIVE, + translation_key="hot_water", + device_class=BinarySensorDeviceClass.RUNNING, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_MASTER_CH2_ENABLED, - friendly_name_format="Thermostat Central Heating 2 {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_FLAME_ON, + translation_key="flame", + device_class=BinarySensorDeviceClass.HEAT, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_FAULT_IND, - friendly_name_format="Boiler Fault {}", - device_class=BinarySensorDeviceClass.PROBLEM, - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_COOLING_ACTIVE, + translation_key="cooling", + device_class=BinarySensorDeviceClass.RUNNING, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_CH_ACTIVE, - friendly_name_format="Boiler Central Heating {}", - device_class=BinarySensorDeviceClass.HEAT, - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_DIAG_IND, + translation_key="diagnostic_indication", + device_class=BinarySensorDeviceClass.PROBLEM, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_DHW_ACTIVE, - friendly_name_format="Boiler Hot Water {}", - device_class=BinarySensorDeviceClass.HEAT, - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_DHW_PRESENT, + translation_key="supports_hot_water", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_FLAME_ON, - friendly_name_format="Boiler Flame {}", - device_class=BinarySensorDeviceClass.HEAT, - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_CONTROL_TYPE, + translation_key="control_type", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_COOLING_ACTIVE, - friendly_name_format="Boiler Cooling {}", - device_class=BinarySensorDeviceClass.COLD, - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_COOLING_SUPPORTED, + translation_key="supports_cooling", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_CH2_ACTIVE, - friendly_name_format="Boiler Central Heating 2 {}", - device_class=BinarySensorDeviceClass.HEAT, - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_DHW_CONFIG, + translation_key="hot_water_config", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_DIAG_IND, - friendly_name_format="Boiler Diagnostics {}", - device_class=BinarySensorDeviceClass.PROBLEM, - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_MASTER_LOW_OFF_PUMP, + translation_key="supports_pump_control", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_DHW_PRESENT, - friendly_name_format="Boiler Hot Water Present {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_CH2_PRESENT, + translation_key="supports_ch_2", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_CONTROL_TYPE, - friendly_name_format="Boiler Control Type {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_SERVICE_REQ, + translation_key="service_required", + device_class=BinarySensorDeviceClass.PROBLEM, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_COOLING_SUPPORTED, - friendly_name_format="Boiler Cooling Support {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_REMOTE_RESET, + translation_key="supports_remote_reset", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_DHW_CONFIG, - friendly_name_format="Boiler Hot Water Configuration {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_LOW_WATER_PRESS, + translation_key="low_water_pressure", + device_class=BinarySensorDeviceClass.PROBLEM, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_MASTER_LOW_OFF_PUMP, - friendly_name_format="Boiler Pump Commands Support {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_GAS_FAULT, + translation_key="gas_fault", + device_class=BinarySensorDeviceClass.PROBLEM, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_CH2_PRESENT, - friendly_name_format="Boiler Central Heating 2 Present {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_AIR_PRESS_FAULT, + translation_key="air_pressure_fault", + device_class=BinarySensorDeviceClass.PROBLEM, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_SERVICE_REQ, - friendly_name_format="Boiler Service Required {}", - device_class=BinarySensorDeviceClass.PROBLEM, - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_WATER_OVERTEMP, + translation_key="water_overtemperature", + device_class=BinarySensorDeviceClass.PROBLEM, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_REMOTE_RESET, - friendly_name_format="Boiler Remote Reset Support {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_REMOTE_TRANSFER_MAX_CH, + translation_key="supports_central_heating_setpoint_transfer", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_LOW_WATER_PRESS, - friendly_name_format="Boiler Low Water Pressure {}", - device_class=BinarySensorDeviceClass.PROBLEM, - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_REMOTE_RW_MAX_CH, + translation_key="supports_central_heating_setpoint_writing", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_GAS_FAULT, - friendly_name_format="Boiler Gas Fault {}", - device_class=BinarySensorDeviceClass.PROBLEM, - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_REMOTE_TRANSFER_DHW, + translation_key="supports_hot_water_setpoint_transfer", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_AIR_PRESS_FAULT, - friendly_name_format="Boiler Air Pressure Fault {}", - device_class=BinarySensorDeviceClass.PROBLEM, - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_REMOTE_RW_DHW, + translation_key="supports_hot_water_setpoint_writing", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_SLAVE_WATER_OVERTEMP, - friendly_name_format="Boiler Water Overtemperature {}", - device_class=BinarySensorDeviceClass.PROBLEM, - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.OTGW_GPIO_A_STATE, + translation_key="gpio_state_n", + translation_placeholders={"gpio_id": "A"}, + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_REMOTE_TRANSFER_DHW, - friendly_name_format="Remote Hot Water Setpoint Transfer Support {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.OTGW_GPIO_B_STATE, + translation_key="gpio_state_n", + translation_placeholders={"gpio_id": "B"}, + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_REMOTE_TRANSFER_MAX_CH, - friendly_name_format="Remote Maximum Central Heating Setpoint Write Support {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.OTGW_IGNORE_TRANSITIONS, + translation_key="ignore_transitions", + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_REMOTE_RW_DHW, - friendly_name_format="Remote Hot Water Setpoint Write Support {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.OTGW_OVRD_HB, + translation_key="override_high_byte", + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_REMOTE_RW_MAX_CH, - friendly_name_format="Remote Central Heating Setpoint Write Support {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_MASTER_CH_ENABLED, + translation_key="central_heating_n", + translation_placeholders={"circuit_number": "1"}, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_ROVRD_MAN_PRIO, - friendly_name_format="Remote Override Manual Change Priority {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_MASTER_CH2_ENABLED, + translation_key="central_heating_n", + translation_placeholders={"circuit_number": "2"}, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermBinarySensorEntityDescription( - key=gw_vars.DATA_ROVRD_AUTO_PRIO, - friendly_name_format="Remote Override Program Change Priority {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_MASTER_DHW_ENABLED, + translation_key="hot_water", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermBinarySensorEntityDescription( - key=gw_vars.OTGW_GPIO_A_STATE, - friendly_name_format="Gateway GPIO A {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_MASTER_COOLING_ENABLED, + translation_key="cooling", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermBinarySensorEntityDescription( - key=gw_vars.OTGW_GPIO_B_STATE, - friendly_name_format="Gateway GPIO B {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_MASTER_OTC_ENABLED, + translation_key="outside_temp_correction", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermBinarySensorEntityDescription( - key=gw_vars.OTGW_IGNORE_TRANSITIONS, - friendly_name_format="Gateway Ignore Transitions {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_ROVRD_MAN_PRIO, + translation_key="override_manual_change_prio", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermBinarySensorEntityDescription( - key=gw_vars.OTGW_OVRD_HB, - friendly_name_format="Gateway Override High Byte {}", - ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_ROVRD_AUTO_PRIO, + translation_key="override_program_change_prio", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_FAULT_IND, + translation_key="fault_indication", + device_class=BinarySensorDeviceClass.PROBLEM, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_CH_ACTIVE, + translation_key="central_heating_n", + translation_placeholders={"circuit_number": "1"}, + device_class=BinarySensorDeviceClass.RUNNING, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_CH2_ACTIVE, + translation_key="central_heating_n", + translation_placeholders={"circuit_number": "2"}, + device_class=BinarySensorDeviceClass.RUNNING, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_DHW_ACTIVE, + translation_key="hot_water", + device_class=BinarySensorDeviceClass.RUNNING, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_FLAME_ON, + translation_key="flame", + device_class=BinarySensorDeviceClass.HEAT, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_COOLING_ACTIVE, + translation_key="cooling", + device_class=BinarySensorDeviceClass.RUNNING, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_DIAG_IND, + translation_key="diagnostic_indication", + device_class=BinarySensorDeviceClass.PROBLEM, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_DHW_PRESENT, + translation_key="supports_hot_water", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_CONTROL_TYPE, + translation_key="control_type", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_COOLING_SUPPORTED, + translation_key="supports_cooling", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_DHW_CONFIG, + translation_key="hot_water_config", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_MASTER_LOW_OFF_PUMP, + translation_key="supports_pump_control", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_CH2_PRESENT, + translation_key="supports_ch_2", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_SERVICE_REQ, + translation_key="service_required", + device_class=BinarySensorDeviceClass.PROBLEM, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_REMOTE_RESET, + translation_key="supports_remote_reset", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_LOW_WATER_PRESS, + translation_key="low_water_pressure", + device_class=BinarySensorDeviceClass.PROBLEM, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_GAS_FAULT, + translation_key="gas_fault", + device_class=BinarySensorDeviceClass.PROBLEM, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_AIR_PRESS_FAULT, + translation_key="air_pressure_fault", + device_class=BinarySensorDeviceClass.PROBLEM, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_WATER_OVERTEMP, + translation_key="water_overtemperature", + device_class=BinarySensorDeviceClass.PROBLEM, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_REMOTE_TRANSFER_MAX_CH, + translation_key="supports_central_heating_setpoint_transfer", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_REMOTE_RW_MAX_CH, + translation_key="supports_central_heating_setpoint_writing", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_REMOTE_TRANSFER_DHW, + translation_key="supports_hot_water_setpoint_transfer", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_REMOTE_RW_DHW, + translation_key="supports_hot_water_setpoint_writing", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_MASTER_CH_ENABLED, + translation_key="central_heating_n", + translation_placeholders={"circuit_number": "1"}, + device_description=BOILER_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_MASTER_CH2_ENABLED, + translation_key="central_heating_n", + translation_placeholders={"circuit_number": "2"}, + device_description=BOILER_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_MASTER_DHW_ENABLED, + translation_key="hot_water", + device_description=BOILER_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_MASTER_COOLING_ENABLED, + translation_key="cooling", + device_description=BOILER_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_MASTER_OTC_ENABLED, + translation_key="outside_temp_correction", + device_description=BOILER_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_ROVRD_MAN_PRIO, + translation_key="override_manual_change_prio", + device_description=BOILER_DEVICE_DESCRIPTION, + ), + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_ROVRD_AUTO_PRIO, + translation_key="override_program_change_prio", + device_description=BOILER_DEVICE_DESCRIPTION, ), ) @@ -293,35 +399,22 @@ async def async_setup_entry( gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][config_entry.data[CONF_ID]] async_add_entities( - OpenThermBinarySensor(gw_hub, source, description) - for sources, description in BINARY_SENSOR_INFO - for source in sources + OpenThermBinarySensor(gw_hub, description) + for description in BINARY_SENSOR_DESCRIPTIONS ) class OpenThermBinarySensor(OpenThermEntity, BinarySensorEntity): """Represent an OpenTherm Gateway binary sensor.""" + _attr_entity_category = EntityCategory.DIAGNOSTIC entity_description: OpenThermBinarySensorEntityDescription - def __init__( - self, - gw_hub: OpenThermGatewayHub, - source: str, - description: OpenThermBinarySensorEntityDescription, - ) -> None: - """Initialize the binary sensor.""" - self.entity_id = async_generate_entity_id( - ENTITY_ID_FORMAT, - f"{description.key}_{source}_{gw_hub.hub_id}", - hass=gw_hub.hass, - ) - super().__init__(gw_hub, source, description) - @callback - def receive_report(self, status: dict[str, dict]) -> None: + def receive_report(self, status: dict[OpenThermDataSource, dict]) -> None: """Handle status updates from the component.""" - self._attr_available = self._gateway.connected - state = status[self._source].get(self.entity_description.key) + state = status[self.entity_description.device_description.data_source].get( + self.entity_description.key + ) self._attr_is_on = None if state is None else bool(state) self.async_write_ha_state() diff --git a/homeassistant/components/opentherm_gw/climate.py b/homeassistant/components/opentherm_gw/climate.py index bf295fb1fb7..795a508be12 100644 --- a/homeassistant/components/opentherm_gw/climate.py +++ b/homeassistant/components/opentherm_gw/climate.py @@ -2,50 +2,52 @@ from __future__ import annotations +from dataclasses import dataclass import logging +from types import MappingProxyType from typing import Any from pyotgw import vars as gw_vars from homeassistant.components.climate import ( - ENTITY_ID_FORMAT, PRESET_AWAY, PRESET_NONE, ClimateEntity, + ClimateEntityDescription, ClimateEntityFeature, HVACAction, HVACMode, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_TEMPERATURE, - CONF_ID, - PRECISION_HALVES, - PRECISION_TENTHS, - PRECISION_WHOLE, - UnitOfTemperature, -) +from homeassistant.const import ATTR_TEMPERATURE, CONF_ID, UnitOfTemperature from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import async_dispatcher_connect -from homeassistant.helpers.entity import async_generate_entity_id from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN +from . import OpenThermGatewayHub from .const import ( - CONF_FLOOR_TEMP, CONF_READ_PRECISION, CONF_SET_PRECISION, CONF_TEMPORARY_OVRD_MODE, DATA_GATEWAYS, DATA_OPENTHERM_GW, + THERMOSTAT_DEVICE_DESCRIPTION, + OpenThermDataSource, ) +from .entity import OpenThermEntity, OpenThermEntityDescription _LOGGER = logging.getLogger(__name__) DEFAULT_FLOOR_TEMP = False +@dataclass(frozen=True, kw_only=True) +class OpenThermClimateEntityDescription( + ClimateEntityDescription, OpenThermEntityDescription +): + """Describes an opentherm_gw climate entity.""" + + async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, @@ -56,6 +58,10 @@ async def async_setup_entry( ents.append( OpenThermClimate( hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][config_entry.data[CONF_ID]], + OpenThermClimateEntityDescription( + key="thermostat_entity", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), config_entry.options, ) ) @@ -63,98 +69,82 @@ async def async_setup_entry( async_add_entities(ents) -class OpenThermClimate(ClimateEntity): +class OpenThermClimate(OpenThermEntity, ClimateEntity): """Representation of a climate device.""" - _attr_should_poll = False _attr_supported_features = ( ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.PRESET_MODE ) _attr_temperature_unit = UnitOfTemperature.CELSIUS - _attr_available = False _attr_hvac_modes = [] + _attr_name = None _attr_preset_modes = [] _attr_min_temp = 1 _attr_max_temp = 30 - _hvac_mode = HVACMode.HEAT - _current_temperature: float | None = None - _new_target_temperature: float | None = None - _target_temperature: float | None = None + _attr_hvac_mode = HVACMode.HEAT _away_mode_a: int | None = None _away_mode_b: int | None = None _away_state_a = False _away_state_b = False - _current_operation: HVACAction | None = None _enable_turn_on_off_backwards_compatibility = False + _target_temperature: float | None = None + _new_target_temperature: float | None = None + entity_description: OpenThermClimateEntityDescription - def __init__(self, gw_hub, options): - """Initialize the device.""" - self._gateway = gw_hub - self.entity_id = async_generate_entity_id( - ENTITY_ID_FORMAT, gw_hub.hub_id, hass=gw_hub.hass - ) - self.friendly_name = gw_hub.name - self._attr_name = self.friendly_name - self.floor_temp = options.get(CONF_FLOOR_TEMP, DEFAULT_FLOOR_TEMP) - self.temp_read_precision = options.get(CONF_READ_PRECISION) - self.temp_set_precision = options.get(CONF_SET_PRECISION) + def __init__( + self, + gw_hub: OpenThermGatewayHub, + description: OpenThermClimateEntityDescription, + options: MappingProxyType[str, Any], + ) -> None: + """Initialize the entity.""" + super().__init__(gw_hub, description) + if CONF_READ_PRECISION in options: + self._attr_precision = options[CONF_READ_PRECISION] + self._attr_target_temperature_step = options.get(CONF_SET_PRECISION) self.temporary_ovrd_mode = options.get(CONF_TEMPORARY_OVRD_MODE, True) - self._unsub_options = None - self._unsub_updates = None - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, gw_hub.hub_id)}, - manufacturer="Schelte Bron", - model="OpenTherm Gateway", - name=gw_hub.name, - sw_version=gw_hub.gw_version, - ) self._attr_unique_id = gw_hub.hub_id @callback def update_options(self, entry): """Update climate entity options.""" - self.floor_temp = entry.options[CONF_FLOOR_TEMP] - self.temp_read_precision = entry.options[CONF_READ_PRECISION] - self.temp_set_precision = entry.options[CONF_SET_PRECISION] + self._attr_precision = entry.options[CONF_READ_PRECISION] + self._attr_target_temperature_step = entry.options[CONF_SET_PRECISION] self.temporary_ovrd_mode = entry.options[CONF_TEMPORARY_OVRD_MODE] self.async_write_ha_state() async def async_added_to_hass(self) -> None: """Connect to the OpenTherm Gateway device.""" - _LOGGER.debug("Added OpenTherm Gateway climate device %s", self.friendly_name) - self._unsub_updates = async_dispatcher_connect( - self.hass, self._gateway.update_signal, self.receive_report + await super().async_added_to_hass() + self.async_on_remove( + async_dispatcher_connect( + self.hass, self._gateway.options_update_signal, self.update_options + ) ) - self._unsub_options = async_dispatcher_connect( - self.hass, self._gateway.options_update_signal, self.update_options - ) - - async def async_will_remove_from_hass(self) -> None: - """Unsubscribe from updates from the component.""" - _LOGGER.debug("Removing OpenTherm Gateway climate %s", self.friendly_name) - self._unsub_options() - self._unsub_updates() @callback - def receive_report(self, status): + def receive_report(self, status: dict[OpenThermDataSource, dict]): """Receive and handle a new report from the Gateway.""" - self._attr_available = self._gateway.connected - ch_active = status[gw_vars.BOILER].get(gw_vars.DATA_SLAVE_CH_ACTIVE) - flame_on = status[gw_vars.BOILER].get(gw_vars.DATA_SLAVE_FLAME_ON) - cooling_active = status[gw_vars.BOILER].get(gw_vars.DATA_SLAVE_COOLING_ACTIVE) + ch_active = status[OpenThermDataSource.BOILER].get(gw_vars.DATA_SLAVE_CH_ACTIVE) + flame_on = status[OpenThermDataSource.BOILER].get(gw_vars.DATA_SLAVE_FLAME_ON) + cooling_active = status[OpenThermDataSource.BOILER].get( + gw_vars.DATA_SLAVE_COOLING_ACTIVE + ) if ch_active and flame_on: - self._current_operation = HVACAction.HEATING - self._hvac_mode = HVACMode.HEAT + self._attr_hvac_action = HVACAction.HEATING + self._attr_hvac_mode = HVACMode.HEAT elif cooling_active: - self._current_operation = HVACAction.COOLING - self._hvac_mode = HVACMode.COOL + self._attr_hvac_action = HVACAction.COOLING + self._attr_hvac_mode = HVACMode.COOL else: - self._current_operation = HVACAction.IDLE + self._attr_hvac_action = HVACAction.IDLE - self._current_temperature = status[gw_vars.THERMOSTAT].get( + self._attr_current_temperature = status[OpenThermDataSource.THERMOSTAT].get( gw_vars.DATA_ROOM_TEMP ) - temp_upd = status[gw_vars.THERMOSTAT].get(gw_vars.DATA_ROOM_SETPOINT) + temp_upd = status[OpenThermDataSource.THERMOSTAT].get( + gw_vars.DATA_ROOM_SETPOINT + ) if self._target_temperature != temp_upd: self._new_target_temperature = None @@ -162,82 +152,35 @@ class OpenThermClimate(ClimateEntity): # GPIO mode 5: 0 == Away # GPIO mode 6: 1 == Away - gpio_a_state = status[gw_vars.OTGW].get(gw_vars.OTGW_GPIO_A) - if gpio_a_state == 5: - self._away_mode_a = 0 - elif gpio_a_state == 6: - self._away_mode_a = 1 - else: - self._away_mode_a = None - gpio_b_state = status[gw_vars.OTGW].get(gw_vars.OTGW_GPIO_B) - if gpio_b_state == 5: - self._away_mode_b = 0 - elif gpio_b_state == 6: - self._away_mode_b = 1 - else: - self._away_mode_b = None - if self._away_mode_a is not None: - self._away_state_a = ( - status[gw_vars.OTGW].get(gw_vars.OTGW_GPIO_A_STATE) == self._away_mode_a + gpio_a_state = status[OpenThermDataSource.GATEWAY].get(gw_vars.OTGW_GPIO_A) + gpio_b_state = status[OpenThermDataSource.GATEWAY].get(gw_vars.OTGW_GPIO_B) + self._away_mode_a = gpio_a_state - 5 if gpio_a_state in (5, 6) else None + self._away_mode_b = gpio_b_state - 5 if gpio_b_state in (5, 6) else None + self._away_state_a = ( + ( + status[OpenThermDataSource.GATEWAY].get(gw_vars.OTGW_GPIO_A_STATE) + == self._away_mode_a ) - if self._away_mode_b is not None: - self._away_state_b = ( - status[gw_vars.OTGW].get(gw_vars.OTGW_GPIO_B_STATE) == self._away_mode_b + if self._away_mode_a is not None + else False + ) + self._away_state_b = ( + ( + status[OpenThermDataSource.GATEWAY].get(gw_vars.OTGW_GPIO_B_STATE) + == self._away_mode_b ) + if self._away_mode_b is not None + else False + ) self.async_write_ha_state() @property - def precision(self): - """Return the precision of the system.""" - if self.temp_read_precision: - return self.temp_read_precision - if self.hass.config.units.temperature_unit == UnitOfTemperature.CELSIUS: - return PRECISION_HALVES - return PRECISION_WHOLE - - @property - def hvac_action(self) -> HVACAction | None: - """Return current HVAC operation.""" - return self._current_operation - - @property - def hvac_mode(self) -> HVACMode: - """Return current HVAC mode.""" - return self._hvac_mode - - def set_hvac_mode(self, hvac_mode: HVACMode) -> None: - """Set the HVAC mode.""" - _LOGGER.warning("Changing HVAC mode is not supported") - - @property - def current_temperature(self): - """Return the current temperature.""" - if self._current_temperature is None: - return None - if self.floor_temp is True: - if self.precision == PRECISION_HALVES: - return int(2 * self._current_temperature) / 2 - if self.precision == PRECISION_TENTHS: - return int(10 * self._current_temperature) / 10 - return int(self._current_temperature) - return self._current_temperature - - @property - def target_temperature(self): + def target_temperature(self) -> float | None: """Return the temperature we try to reach.""" return self._new_target_temperature or self._target_temperature @property - def target_temperature_step(self): - """Return the supported step of target temperature.""" - if self.temp_set_precision: - return self.temp_set_precision - if self.hass.config.units.temperature_unit == UnitOfTemperature.CELSIUS: - return PRECISION_HALVES - return PRECISION_WHOLE - - @property - def preset_mode(self): + def preset_mode(self) -> str: """Return current preset mode.""" if self._away_state_a or self._away_state_b: return PRESET_AWAY diff --git a/homeassistant/components/opentherm_gw/config_flow.py b/homeassistant/components/opentherm_gw/config_flow.py index a5ac116ac11..3cf8a1c4594 100644 --- a/homeassistant/components/opentherm_gw/config_flow.py +++ b/homeassistant/components/opentherm_gw/config_flow.py @@ -34,6 +34,7 @@ from .const import ( CONF_SET_PRECISION, CONF_TEMPORARY_OVRD_MODE, CONNECTION_TIMEOUT, + OpenThermDataSource, ) @@ -74,7 +75,7 @@ class OpenThermGwConfigFlow(ConfigFlow, domain=DOMAIN): await otgw.disconnect() if not status: raise ConnectionError - return status[gw_vars.OTGW].get(gw_vars.OTGW_ABOUT) + return status[OpenThermDataSource.GATEWAY].get(gw_vars.OTGW_ABOUT) try: async with asyncio.timeout(CONNECTION_TIMEOUT): diff --git a/homeassistant/components/opentherm_gw/const.py b/homeassistant/components/opentherm_gw/const.py index c1932c7b2bd..c842ff568ae 100644 --- a/homeassistant/components/opentherm_gw/const.py +++ b/homeassistant/components/opentherm_gw/const.py @@ -1,5 +1,10 @@ """Constants for the opentherm_gw integration.""" +from dataclasses import dataclass +from enum import StrEnum + +from pyotgw import vars as gw_vars + ATTR_GW_ID = "gateway_id" ATTR_LEVEL = "level" ATTR_DHW_OVRD = "dhw_override" @@ -33,3 +38,41 @@ SERVICE_SET_MAX_MOD = "set_max_modulation" SERVICE_SET_OAT = "set_outside_temperature" SERVICE_SET_SB_TEMP = "set_setback_temperature" SERVICE_SEND_TRANSP_CMD = "send_transparent_command" + + +class OpenThermDataSource(StrEnum): + """List valid OpenTherm data sources.""" + + BOILER = gw_vars.BOILER + GATEWAY = gw_vars.OTGW + THERMOSTAT = gw_vars.THERMOSTAT + + +class OpenThermDeviceIdentifier(StrEnum): + """List valid OpenTherm device identifiers.""" + + BOILER = "boiler" + GATEWAY = "gateway" + THERMOSTAT = "thermostat" + + +@dataclass(frozen=True, kw_only=True) +class OpenThermDeviceDescription: + """Describe OpenTherm device properties.""" + + data_source: OpenThermDataSource + device_identifier: OpenThermDeviceIdentifier + + +BOILER_DEVICE_DESCRIPTION = OpenThermDeviceDescription( + data_source=OpenThermDataSource.BOILER, + device_identifier=OpenThermDeviceIdentifier.BOILER, +) +GATEWAY_DEVICE_DESCRIPTION = OpenThermDeviceDescription( + data_source=OpenThermDataSource.GATEWAY, + device_identifier=OpenThermDeviceIdentifier.GATEWAY, +) +THERMOSTAT_DEVICE_DESCRIPTION = OpenThermDeviceDescription( + data_source=OpenThermDataSource.THERMOSTAT, + device_identifier=OpenThermDeviceIdentifier.THERMOSTAT, +) diff --git a/homeassistant/components/opentherm_gw/entity.py b/homeassistant/components/opentherm_gw/entity.py index a1035b946c2..b7110fa9e1b 100644 --- a/homeassistant/components/opentherm_gw/entity.py +++ b/homeassistant/components/opentherm_gw/entity.py @@ -10,7 +10,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity import Entity, EntityDescription from . import OpenThermGatewayHub -from .const import DOMAIN +from .const import DOMAIN, OpenThermDataSource, OpenThermDeviceDescription _LOGGER = logging.getLogger(__name__) @@ -24,53 +24,49 @@ TRANSLATE_SOURCE = { class OpenThermEntityDescription(EntityDescription): """Describe common opentherm_gw entity properties.""" - friendly_name_format: str + device_description: OpenThermDeviceDescription class OpenThermEntity(Entity): - """Represent an OpenTherm Gateway entity.""" + """Represent an OpenTherm entity.""" + _attr_has_entity_name = True _attr_should_poll = False - _attr_entity_registry_enabled_default = False - _attr_available = False entity_description: OpenThermEntityDescription def __init__( self, gw_hub: OpenThermGatewayHub, - source: str, description: OpenThermEntityDescription, ) -> None: """Initialize the entity.""" self.entity_description = description self._gateway = gw_hub - self._source = source - friendly_name_format = ( - f"{description.friendly_name_format} ({TRANSLATE_SOURCE[source]})" - if TRANSLATE_SOURCE[source] is not None - else description.friendly_name_format - ) - self._attr_name = friendly_name_format.format(gw_hub.name) - self._attr_unique_id = f"{gw_hub.hub_id}-{source}-{description.key}" + self._attr_unique_id = f"{gw_hub.hub_id}-{description.device_description.device_identifier}-{description.key}" self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, gw_hub.hub_id)}, - manufacturer="Schelte Bron", - model="OpenTherm Gateway", - name=gw_hub.name, - sw_version=gw_hub.gw_version, + identifiers={ + ( + DOMAIN, + f"{gw_hub.hub_id}-{description.device_description.device_identifier}", + ) + }, ) async def async_added_to_hass(self) -> None: """Subscribe to updates from the component.""" - _LOGGER.debug("Added OpenTherm Gateway entity %s", self._attr_name) self.async_on_remove( async_dispatcher_connect( self.hass, self._gateway.update_signal, self.receive_report ) ) + @property + def available(self) -> bool: + """Return connection status of the hub to indicate availability.""" + return self._gateway.connected + @callback - def receive_report(self, status: dict[str, dict]) -> None: + def receive_report(self, status: dict[OpenThermDataSource, dict]) -> None: """Handle status updates from the component.""" # Must be implemented at the platform level. raise NotImplementedError diff --git a/homeassistant/components/opentherm_gw/sensor.py b/homeassistant/components/opentherm_gw/sensor.py index fb30b2ce35c..eeadd5c4ee1 100644 --- a/homeassistant/components/opentherm_gw/sensor.py +++ b/homeassistant/components/opentherm_gw/sensor.py @@ -5,7 +5,6 @@ from dataclasses import dataclass import pyotgw.vars as gw_vars from homeassistant.components.sensor import ( - ENTITY_ID_FORMAT, SensorDeviceClass, SensorEntity, SensorEntityDescription, @@ -15,6 +14,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_ID, PERCENTAGE, + EntityCategory, UnitOfPower, UnitOfPressure, UnitOfTemperature, @@ -22,11 +22,16 @@ from homeassistant.const import ( UnitOfVolumeFlowRate, ) from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.entity import async_generate_entity_id from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import OpenThermGatewayHub -from .const import DATA_GATEWAYS, DATA_OPENTHERM_GW +from .const import ( + BOILER_DEVICE_DESCRIPTION, + DATA_GATEWAYS, + DATA_OPENTHERM_GW, + GATEWAY_DEVICE_DESCRIPTION, + THERMOSTAT_DEVICE_DESCRIPTION, + OpenThermDataSource, +) from .entity import OpenThermEntity, OpenThermEntityDescription SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION = 1 @@ -36,584 +41,833 @@ SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION = 1 class OpenThermSensorEntityDescription( SensorEntityDescription, OpenThermEntityDescription ): - """Describes opentherm_gw sensor entity.""" + """Describes an opentherm_gw sensor entity.""" -SENSOR_INFO: tuple[tuple[list[str], OpenThermSensorEntityDescription], ...] = ( - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_CONTROL_SETPOINT, - friendly_name_format="Control Setpoint {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), +SENSOR_DESCRIPTIONS: tuple[OpenThermSensorEntityDescription, ...] = ( + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CONTROL_SETPOINT, + translation_key="control_setpoint_n", + translation_placeholders={"circuit_number": "1"}, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_MASTER_MEMBERID, - friendly_name_format="Thermostat Member ID {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CONTROL_SETPOINT_2, + translation_key="control_setpoint_n", + translation_placeholders={"circuit_number": "2"}, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_SLAVE_MEMBERID, - friendly_name_format="Boiler Member ID {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_MEMBERID, + translation_key="manufacturer_id", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_SLAVE_OEM_FAULT, - friendly_name_format="Boiler OEM Fault Code {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_OEM_FAULT, + translation_key="oem_fault_code", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_COOLING_CONTROL, - friendly_name_format="Cooling Control Signal {}", - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=PERCENTAGE, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_COOLING_CONTROL, + translation_key="cooling_control", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_CONTROL_SETPOINT_2, - friendly_name_format="Control Setpoint 2 {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_MAX_RELATIVE_MOD, + translation_key="max_relative_mod_level", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_ROOM_SETPOINT_OVRD, - friendly_name_format="Room Setpoint Override {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_MAX_CAPACITY, + translation_key="max_capacity", + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.POWER, + native_unit_of_measurement=UnitOfPower.KILO_WATT, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_SLAVE_MAX_RELATIVE_MOD, - friendly_name_format="Boiler Maximum Relative Modulation {}", - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=PERCENTAGE, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_MIN_MOD_LEVEL, + translation_key="min_mod_level", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_SLAVE_MAX_CAPACITY, - friendly_name_format="Boiler Maximum Capacity {}", - state_class=SensorStateClass.MEASUREMENT, - device_class=SensorDeviceClass.POWER, - native_unit_of_measurement=UnitOfPower.KILO_WATT, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_REL_MOD_LEVEL, + translation_key="relative_mod_level", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_SLAVE_MIN_MOD_LEVEL, - friendly_name_format="Boiler Minimum Modulation Level {}", - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=PERCENTAGE, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CH_WATER_PRESS, + translation_key="central_heating_pressure", + device_class=SensorDeviceClass.PRESSURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPressure.BAR, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_ROOM_SETPOINT, - friendly_name_format="Room Setpoint {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_FLOW_RATE, + translation_key="hot_water_flow_rate", + device_class=SensorDeviceClass.VOLUME_FLOW_RATE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfVolumeFlowRate.LITERS_PER_MINUTE, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_REL_MOD_LEVEL, - friendly_name_format="Relative Modulation Level {}", - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=PERCENTAGE, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CH_WATER_TEMP, + translation_key="central_heating_temperature_n", + translation_placeholders={"circuit_number": "1"}, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_CH_WATER_PRESS, - friendly_name_format="Central Heating Water Pressure {}", - device_class=SensorDeviceClass.PRESSURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfPressure.BAR, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CH_WATER_TEMP_2, + translation_key="central_heating_temperature_n", + translation_placeholders={"circuit_number": "2"}, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_DHW_FLOW_RATE, - friendly_name_format="Hot Water Flow Rate {}", - device_class=SensorDeviceClass.VOLUME_FLOW_RATE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfVolumeFlowRate.LITERS_PER_MINUTE, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_TEMP, + translation_key="hot_water_temperature_n", + translation_placeholders={"circuit_number": "1"}, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_ROOM_SETPOINT_2, - friendly_name_format="Room Setpoint 2 {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_TEMP_2, + translation_key="hot_water_temperature_n", + translation_placeholders={"circuit_number": "2"}, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_ROOM_TEMP, - friendly_name_format="Room Temperature {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_RETURN_WATER_TEMP, + translation_key="return_water_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_CH_WATER_TEMP, - friendly_name_format="Central Heating Water Temperature {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SOLAR_STORAGE_TEMP, + translation_key="solar_storage_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_DHW_TEMP, - friendly_name_format="Hot Water Temperature {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SOLAR_COLL_TEMP, + translation_key="solar_collector_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_OUTSIDE_TEMP, - friendly_name_format="Outside Temperature {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_EXHAUST_TEMP, + translation_key="exhaust_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_RETURN_WATER_TEMP, - friendly_name_format="Return Water Temperature {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_DHW_MAX_SETP, + translation_key="max_hot_water_setpoint_upper", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_SOLAR_STORAGE_TEMP, - friendly_name_format="Solar Storage Temperature {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_DHW_MIN_SETP, + translation_key="max_hot_water_setpoint_lower", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_SOLAR_COLL_TEMP, - friendly_name_format="Solar Collector Temperature {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_CH_MAX_SETP, + translation_key="max_central_heating_setpoint_upper", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_CH_WATER_TEMP_2, - friendly_name_format="Central Heating 2 Water Temperature {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_CH_MIN_SETP, + translation_key="max_central_heating_setpoint_lower", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_DHW_TEMP_2, - friendly_name_format="Hot Water 2 Temperature {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_SETPOINT, + translation_key="hot_water_setpoint", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_EXHAUST_TEMP, - friendly_name_format="Exhaust Temperature {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_MAX_CH_SETPOINT, + translation_key="max_central_heating_setpoint", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_SLAVE_DHW_MAX_SETP, - friendly_name_format="Hot Water Maximum Setpoint {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_OEM_DIAG, + translation_key="oem_diagnostic_code", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_SLAVE_DHW_MIN_SETP, - friendly_name_format="Hot Water Minimum Setpoint {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_TOTAL_BURNER_STARTS, + translation_key="total_burner_starts", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="starts", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_SLAVE_CH_MAX_SETP, - friendly_name_format="Boiler Maximum Central Heating Setpoint {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CH_PUMP_STARTS, + translation_key="central_heating_pump_starts", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="starts", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_SLAVE_CH_MIN_SETP, - friendly_name_format="Boiler Minimum Central Heating Setpoint {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_PUMP_STARTS, + translation_key="hot_water_pump_starts", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="starts", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_DHW_SETPOINT, - friendly_name_format="Hot Water Setpoint {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_BURNER_STARTS, + translation_key="hot_water_burner_starts", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="starts", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_MAX_CH_SETPOINT, - friendly_name_format="Maximum Central Heating Setpoint {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_TOTAL_BURNER_HOURS, + translation_key="total_burner_hours", + device_class=SensorDeviceClass.DURATION, + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement=UnitOfTime.HOURS, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_OEM_DIAG, - friendly_name_format="OEM Diagnostic Code {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CH_PUMP_HOURS, + translation_key="central_heating_pump_hours", + device_class=SensorDeviceClass.DURATION, + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement=UnitOfTime.HOURS, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_TOTAL_BURNER_STARTS, - friendly_name_format="Total Burner Starts {}", - state_class=SensorStateClass.TOTAL, - native_unit_of_measurement="starts", - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_PUMP_HOURS, + translation_key="hot_water_pump_hours", + device_class=SensorDeviceClass.DURATION, + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement=UnitOfTime.HOURS, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_CH_PUMP_STARTS, - friendly_name_format="Central Heating Pump Starts {}", - state_class=SensorStateClass.TOTAL, - native_unit_of_measurement="starts", - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_BURNER_HOURS, + translation_key="hot_water_burner_hours", + device_class=SensorDeviceClass.DURATION, + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement=UnitOfTime.HOURS, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_DHW_PUMP_STARTS, - friendly_name_format="Hot Water Pump Starts {}", - state_class=SensorStateClass.TOTAL, - native_unit_of_measurement="starts", - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_OT_VERSION, + translation_key="opentherm_version", + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_DHW_BURNER_STARTS, - friendly_name_format="Hot Water Burner Starts {}", - state_class=SensorStateClass.TOTAL, - native_unit_of_measurement="starts", - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_PRODUCT_TYPE, + translation_key="product_type", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_TOTAL_BURNER_HOURS, - friendly_name_format="Total Burner Hours {}", - device_class=SensorDeviceClass.DURATION, - state_class=SensorStateClass.TOTAL, - native_unit_of_measurement=UnitOfTime.HOURS, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_PRODUCT_VERSION, + translation_key="product_version", + device_description=BOILER_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_CH_PUMP_HOURS, - friendly_name_format="Central Heating Pump Hours {}", - device_class=SensorDeviceClass.DURATION, - state_class=SensorStateClass.TOTAL, - native_unit_of_measurement=UnitOfTime.HOURS, - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_MODE, + translation_key="operating_mode", + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_DHW_PUMP_HOURS, - friendly_name_format="Hot Water Pump Hours {}", - device_class=SensorDeviceClass.DURATION, - state_class=SensorStateClass.TOTAL, - native_unit_of_measurement=UnitOfTime.HOURS, - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_DHW_OVRD, + translation_key="hot_water_override_mode", + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_DHW_BURNER_HOURS, - friendly_name_format="Hot Water Burner Hours {}", - device_class=SensorDeviceClass.DURATION, - state_class=SensorStateClass.TOTAL, - native_unit_of_measurement=UnitOfTime.HOURS, - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_ABOUT, + translation_key="firmware_version", + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_MASTER_OT_VERSION, - friendly_name_format="Thermostat OpenTherm Version {}", - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_BUILD, + translation_key="firmware_build", + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_SLAVE_OT_VERSION, - friendly_name_format="Boiler OpenTherm Version {}", - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_CLOCKMHZ, + translation_key="clock_speed", + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_MASTER_PRODUCT_TYPE, - friendly_name_format="Thermostat Product Type {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_LED_A, + translation_key="led_mode_n", + translation_placeholders={"led_id": "A"}, + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_MASTER_PRODUCT_VERSION, - friendly_name_format="Thermostat Product Version {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_LED_B, + translation_key="led_mode_n", + translation_placeholders={"led_id": "B"}, + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_SLAVE_PRODUCT_TYPE, - friendly_name_format="Boiler Product Type {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_LED_C, + translation_key="led_mode_n", + translation_placeholders={"led_id": "C"}, + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.BOILER, gw_vars.THERMOSTAT], - OpenThermSensorEntityDescription( - key=gw_vars.DATA_SLAVE_PRODUCT_VERSION, - friendly_name_format="Boiler Product Version {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_LED_D, + translation_key="led_mode_n", + translation_placeholders={"led_id": "D"}, + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_MODE, - friendly_name_format="Gateway/Monitor Mode {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_LED_E, + translation_key="led_mode_n", + translation_placeholders={"led_id": "E"}, + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_DHW_OVRD, - friendly_name_format="Gateway Hot Water Override Mode {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_LED_F, + translation_key="led_mode_n", + translation_placeholders={"led_id": "F"}, + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_ABOUT, - friendly_name_format="Gateway Firmware Version {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_GPIO_A, + translation_key="gpio_mode_n", + translation_placeholders={"gpio_id": "A"}, + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_BUILD, - friendly_name_format="Gateway Firmware Build {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_GPIO_B, + translation_key="gpio_mode_n", + translation_placeholders={"gpio_id": "B"}, + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_CLOCKMHZ, - friendly_name_format="Gateway Clock Speed {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_SB_TEMP, + translation_key="setback_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_LED_A, - friendly_name_format="Gateway LED A Mode {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_SETP_OVRD_MODE, + translation_key="room_setpoint_override_mode", + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_LED_B, - friendly_name_format="Gateway LED B Mode {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_SMART_PWR, + translation_key="smart_power_mode", + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_LED_C, - friendly_name_format="Gateway LED C Mode {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_THRM_DETECT, + translation_key="thermostat_detection_mode", + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_LED_D, - friendly_name_format="Gateway LED D Mode {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_VREF, + translation_key="reference_voltage", + device_description=GATEWAY_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_LED_E, - friendly_name_format="Gateway LED E Mode {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_MASTER_MEMBERID, + translation_key="manufacturer_id", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_LED_F, - friendly_name_format="Gateway LED F Mode {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_ROOM_SETPOINT_OVRD, + translation_key="room_setpoint_override", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_GPIO_A, - friendly_name_format="Gateway GPIO A Mode {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_ROOM_SETPOINT, + translation_key="room_setpoint_n", + translation_placeholders={"setpoint_id": "1"}, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_GPIO_B, - friendly_name_format="Gateway GPIO B Mode {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_ROOM_SETPOINT_2, + translation_key="room_setpoint_n", + translation_placeholders={"setpoint_id": "2"}, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_SB_TEMP, - friendly_name_format="Gateway Setback Temperature {}", - device_class=SensorDeviceClass.TEMPERATURE, - state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=UnitOfTemperature.CELSIUS, - suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_ROOM_TEMP, + translation_key="room_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_SETP_OVRD_MODE, - friendly_name_format="Gateway Room Setpoint Override Mode {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_OUTSIDE_TEMP, + translation_key="outside_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_SMART_PWR, - friendly_name_format="Gateway Smart Power Mode {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_MASTER_OT_VERSION, + translation_key="opentherm_version", + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_THRM_DETECT, - friendly_name_format="Gateway Thermostat Detection {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_MASTER_PRODUCT_TYPE, + translation_key="product_type", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, ), - ( - [gw_vars.OTGW], - OpenThermSensorEntityDescription( - key=gw_vars.OTGW_VREF, - friendly_name_format="Gateway Reference Voltage Setting {}", - ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_MASTER_PRODUCT_VERSION, + translation_key="product_version", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CONTROL_SETPOINT, + translation_key="control_setpoint_n", + translation_placeholders={"circuit_number": "1"}, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CONTROL_SETPOINT_2, + translation_key="control_setpoint_n", + translation_placeholders={"circuit_number": "2"}, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_MEMBERID, + translation_key="manufacturer_id", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_OEM_FAULT, + translation_key="oem_fault_code", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_COOLING_CONTROL, + translation_key="cooling_control", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_MAX_RELATIVE_MOD, + translation_key="max_relative_mod_level", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_MAX_CAPACITY, + translation_key="max_capacity", + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.POWER, + native_unit_of_measurement=UnitOfPower.KILO_WATT, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_MIN_MOD_LEVEL, + translation_key="min_mod_level", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_REL_MOD_LEVEL, + translation_key="relative_mod_level", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CH_WATER_PRESS, + translation_key="central_heating_pressure", + device_class=SensorDeviceClass.PRESSURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPressure.BAR, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_FLOW_RATE, + translation_key="hot_water_flow_rate", + device_class=SensorDeviceClass.VOLUME_FLOW_RATE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfVolumeFlowRate.LITERS_PER_MINUTE, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CH_WATER_TEMP, + translation_key="central_heating_temperature_n", + translation_placeholders={"circuit_number": "1"}, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CH_WATER_TEMP_2, + translation_key="central_heating_temperature_n", + translation_placeholders={"circuit_number": "2"}, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_TEMP, + translation_key="hot_water_temperature_n", + translation_placeholders={"circuit_number": "1"}, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_TEMP_2, + translation_key="hot_water_temperature_n", + translation_placeholders={"circuit_number": "2"}, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_RETURN_WATER_TEMP, + translation_key="return_water_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SOLAR_STORAGE_TEMP, + translation_key="solar_storage_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SOLAR_COLL_TEMP, + translation_key="solar_collector_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_EXHAUST_TEMP, + translation_key="exhaust_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_DHW_MAX_SETP, + translation_key="max_hot_water_setpoint_upper", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_DHW_MIN_SETP, + translation_key="max_hot_water_setpoint_lower", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_CH_MAX_SETP, + translation_key="max_central_heating_setpoint_upper", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_CH_MIN_SETP, + translation_key="max_central_heating_setpoint_lower", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_SETPOINT, + translation_key="hot_water_setpoint", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_MAX_CH_SETPOINT, + translation_key="max_central_heating_setpoint", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_OEM_DIAG, + translation_key="oem_diagnostic_code", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_TOTAL_BURNER_STARTS, + translation_key="total_burner_starts", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="starts", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CH_PUMP_STARTS, + translation_key="central_heating_pump_starts", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="starts", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_PUMP_STARTS, + translation_key="hot_water_pump_starts", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="starts", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_BURNER_STARTS, + translation_key="hot_water_burner_starts", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="starts", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_TOTAL_BURNER_HOURS, + translation_key="total_burner_hours", + device_class=SensorDeviceClass.DURATION, + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement=UnitOfTime.HOURS, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CH_PUMP_HOURS, + translation_key="central_heating_pump_hours", + device_class=SensorDeviceClass.DURATION, + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement=UnitOfTime.HOURS, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_PUMP_HOURS, + translation_key="hot_water_pump_hours", + device_class=SensorDeviceClass.DURATION, + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement=UnitOfTime.HOURS, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_BURNER_HOURS, + translation_key="hot_water_burner_hours", + device_class=SensorDeviceClass.DURATION, + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement=UnitOfTime.HOURS, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_OT_VERSION, + translation_key="opentherm_version", + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_PRODUCT_TYPE, + translation_key="product_type", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_PRODUCT_VERSION, + translation_key="product_version", + device_description=THERMOSTAT_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_MASTER_MEMBERID, + translation_key="manufacturer_id", + device_description=BOILER_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_ROOM_SETPOINT_OVRD, + translation_key="room_setpoint_override", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_ROOM_SETPOINT, + translation_key="room_setpoint_n", + translation_placeholders={"setpoint_id": "1"}, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_ROOM_SETPOINT_2, + translation_key="room_setpoint_n", + translation_placeholders={"setpoint_id": "2"}, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_ROOM_TEMP, + translation_key="room_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_OUTSIDE_TEMP, + translation_key="outside_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_MASTER_OT_VERSION, + translation_key="opentherm_version", + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + device_description=BOILER_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_MASTER_PRODUCT_TYPE, + translation_key="product_type", + device_description=BOILER_DEVICE_DESCRIPTION, + ), + OpenThermSensorEntityDescription( + key=gw_vars.DATA_MASTER_PRODUCT_VERSION, + translation_key="product_version", + device_description=BOILER_DEVICE_DESCRIPTION, ), ) @@ -629,37 +883,22 @@ async def async_setup_entry( async_add_entities( OpenThermSensor( gw_hub, - source, description, ) - for sources, description in SENSOR_INFO - for source in sources + for description in SENSOR_DESCRIPTIONS ) class OpenThermSensor(OpenThermEntity, SensorEntity): - """Representation of an OpenTherm Gateway sensor.""" + """Representation of an OpenTherm sensor.""" + _attr_entity_category = EntityCategory.DIAGNOSTIC entity_description: OpenThermSensorEntityDescription - def __init__( - self, - gw_hub: OpenThermGatewayHub, - source: str, - description: OpenThermSensorEntityDescription, - ) -> None: - """Initialize the OpenTherm Gateway sensor.""" - self.entity_id = async_generate_entity_id( - ENTITY_ID_FORMAT, - f"{description.key}_{source}_{gw_hub.hub_id}", - hass=gw_hub.hass, - ) - super().__init__(gw_hub, source, description) - @callback - def receive_report(self, status: dict[str, dict]) -> None: + def receive_report(self, status: dict[OpenThermDataSource, dict]) -> None: """Handle status updates from the component.""" - self._attr_available = self._gateway.connected - value = status[self._source].get(self.entity_description.key) - self._attr_native_value = value + self._attr_native_value = status[ + self.entity_description.device_description.data_source + ].get(self.entity_description.key) self.async_write_ha_state() diff --git a/homeassistant/components/opentherm_gw/strings.json b/homeassistant/components/opentherm_gw/strings.json index 9eb97539df9..006ccd1909b 100644 --- a/homeassistant/components/opentherm_gw/strings.json +++ b/homeassistant/components/opentherm_gw/strings.json @@ -1,4 +1,8 @@ { + "common": { + "state_not_supported": "Not supported", + "state_supported": "Supported" + }, "config": { "step": { "init": { @@ -16,6 +20,297 @@ "timeout_connect": "[%key:common::config_flow::error::timeout_connect%]" } }, + "device": { + "boiler_device": { + "name": "OpenTherm Boiler" + }, + "gateway_device": { + "name": "OpenTherm Gateway" + }, + "thermostat_device": { + "name": "OpenTherm Thermostat" + } + }, + "entity": { + "binary_sensor": { + "fault_indication": { + "name": "Fault indication" + }, + "central_heating_n": { + "name": "Central heating {circuit_number}" + }, + "cooling": { + "name": "Cooling" + }, + "flame": { + "name": "Flame" + }, + "hot_water": { + "name": "Hot water" + }, + "diagnostic_indication": { + "name": "Diagnostic indication" + }, + "supports_hot_water": { + "name": "Hot water support", + "state": { + "off": "[%key:component::opentherm_gw::common::state_not_supported%]", + "on": "[%key:component::opentherm_gw::common::state_supported%]" + } + }, + "control_type": { + "name": "Control type" + }, + "supports_cooling": { + "name": "Cooling support", + "state": { + "off": "[%key:component::opentherm_gw::common::state_not_supported%]", + "on": "[%key:component::opentherm_gw::common::state_supported%]" + } + }, + "hot_water_config": { + "name": "Hot water system type", + "state": { + "off": "Instantaneous or unspecified", + "on": "Storage tank" + } + }, + "supports_pump_control": { + "name": "Pump control support", + "state": { + "off": "[%key:component::opentherm_gw::common::state_not_supported%]", + "on": "[%key:component::opentherm_gw::common::state_supported%]" + } + }, + "supports_ch_2": { + "name": "Central heating 2 support", + "state": { + "off": "[%key:component::opentherm_gw::common::state_not_supported%]", + "on": "[%key:component::opentherm_gw::common::state_supported%]" + } + }, + "service_required": { + "name": "Service required" + }, + "supports_remote_reset": { + "name": "Remote reset support", + "state": { + "off": "[%key:component::opentherm_gw::common::state_not_supported%]", + "on": "[%key:component::opentherm_gw::common::state_supported%]" + } + }, + "low_water_pressure": { + "name": "Low water pressure" + }, + "gas_fault": { + "name": "Gas fault" + }, + "air_pressure_fault": { + "name": "Air pressure fault" + }, + "water_overtemperature": { + "name": "Water overtemperature" + }, + "supports_central_heating_setpoint_transfer": { + "name": "Central heating setpoint transfer support", + "state": { + "off": "[%key:component::opentherm_gw::common::state_not_supported%]", + "on": "[%key:component::opentherm_gw::common::state_supported%]" + } + }, + "supports_central_heating_setpoint_writing": { + "name": "Central heating setpoint write support", + "state": { + "off": "[%key:component::opentherm_gw::common::state_not_supported%]", + "on": "[%key:component::opentherm_gw::common::state_supported%]" + } + }, + "supports_hot_water_setpoint_transfer": { + "name": "Hot water setpoint transfer support", + "state": { + "off": "[%key:component::opentherm_gw::common::state_not_supported%]", + "on": "[%key:component::opentherm_gw::common::state_supported%]" + } + }, + "supports_hot_water_setpoint_writing": { + "name": "Hot water setpoint write support", + "state": { + "off": "[%key:component::opentherm_gw::common::state_not_supported%]", + "on": "[%key:component::opentherm_gw::common::state_supported%]" + } + }, + "gpio_state_n": { + "name": "GPIO {gpio_id} state" + }, + "ignore_transitions": { + "name": "Ignore transitions" + }, + "override_high_byte": { + "name": "Override high byte" + }, + "outside_temp_correction": { + "name": "Outside temperature correction" + }, + "override_manual_change_prio": { + "name": "Manual change has priority over override" + }, + "override_program_change_prio": { + "name": "Programmed change has priority over override" + } + }, + "sensor": { + "control_setpoint_n": { + "name": "Control setpoint {circuit_number}" + }, + "manufacturer_id": { + "name": "Manufacturer ID" + }, + "oem_fault_code": { + "name": "Manufacturer-specific fault code" + }, + "cooling_control": { + "name": "Cooling control signal" + }, + "max_relative_mod_level": { + "name": "Maximum relative modulation level" + }, + "max_capacity": { + "name": "Maximum capacity" + }, + "min_mod_level": { + "name": "Minimum modulation level" + }, + "relative_mod_level": { + "name": "Relative modulation level" + }, + "central_heating_pressure": { + "name": "Central heating water pressure" + }, + "hot_water_flow_rate": { + "name": "Hot water flow rate" + }, + "central_heating_temperature_n": { + "name": "Central heating {circuit_number} water temperature" + }, + "hot_water_temperature_n": { + "name": "Hot water {circuit_number} temperature" + }, + "return_water_temperature": { + "name": "Return water temperature" + }, + "solar_storage_temperature": { + "name": "Solar storage temperature" + }, + "solar_collector_temperature": { + "name": "Solar collector temperature" + }, + "exhaust_temperature": { + "name": "Exhaust temperature" + }, + "max_hot_water_setpoint_upper": { + "name": "Maximum hot water setpoint upper bound" + }, + "max_hot_water_setpoint_lower": { + "name": "Maximum hot water setpoint lower bound" + }, + "max_central_heating_setpoint_upper": { + "name": "Maximum central heating setpoint upper bound" + }, + "max_central_heating_setpoint_lower": { + "name": "Maximum central heating setpoint lower bound" + }, + "hot_water_setpoint": { + "name": "Hot water setpoint" + }, + "max_central_heating_setpoint": { + "name": "Maximum central heating setpoint" + }, + "oem_diagnostic_code": { + "name": "Manufacturer-specific diagnostic code" + }, + "total_burner_starts": { + "name": "Burner start count" + }, + "central_heating_pump_starts": { + "name": "Central heating pump start count" + }, + "hot_water_pump_starts": { + "name": "Hot water pump start count" + }, + "hot_water_burner_starts": { + "name": "Hot water burner start count" + }, + "total_burner_hours": { + "name": "Burner running time" + }, + "central_heating_pump_hours": { + "name": "Central heating pump running time" + }, + "hot_water_pump_hours": { + "name": "Hot water pump running time" + }, + "hot_water_burner_hours": { + "name": "Hot water burner running time" + }, + "opentherm_version": { + "name": "OpenTherm protocol version" + }, + "product_type": { + "name": "Product type" + }, + "product_version": { + "name": "Product version" + }, + "operating_mode": { + "name": "Operating mode" + }, + "hot_water_override_mode": { + "name": "Hot water override mode" + }, + "firmware_version": { + "name": "Firmware version" + }, + "firmware_build": { + "name": "Firmware build" + }, + "clock_speed": { + "name": "Clock speed" + }, + "led_mode_n": { + "name": "LED {led_id} mode" + }, + "gpio_mode_n": { + "name": "GPIO {gpio_id} mode" + }, + "setback_temperature": { + "name": "Setback temperature" + }, + "room_setpoint_override_mode": { + "name": "Room setpoint override mode" + }, + "smart_power_mode": { + "name": "Smart power mode" + }, + "thermostat_detection_mode": { + "name": "Thermostat detection mode" + }, + "reference_voltage": { + "name": "Reference voltage setting" + }, + "room_setpoint_override": { + "name": "Room setpoint override" + }, + "room_setpoint_n": { + "name": "Room setpoint {setpoint_id}" + }, + "room_temperature": { + "name": "Room temperature" + }, + "outside_temperature": { + "name": "Outside temperature" + } + } + }, "options": { "step": { "init": { diff --git a/tests/components/opentherm_gw/test_init.py b/tests/components/opentherm_gw/test_init.py index a466f788f1a..7b9801c8280 100644 --- a/tests/components/opentherm_gw/test_init.py +++ b/tests/components/opentherm_gw/test_init.py @@ -1,12 +1,15 @@ """Test Opentherm Gateway init.""" -from unittest.mock import patch +from unittest.mock import AsyncMock, MagicMock, patch from pyotgw.vars import OTGW, OTGW_ABOUT import pytest from homeassistant import setup -from homeassistant.components.opentherm_gw.const import DOMAIN +from homeassistant.components.opentherm_gw.const import ( + DOMAIN, + OpenThermDeviceIdentifier, +) from homeassistant.const import CONF_DEVICE, CONF_ID, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr @@ -49,7 +52,9 @@ async def test_device_registry_insert( await hass.async_block_till_done() - gw_dev = device_registry.async_get_device(identifiers={(DOMAIN, MOCK_GATEWAY_ID)}) + gw_dev = device_registry.async_get_device( + identifiers={(DOMAIN, f"{MOCK_GATEWAY_ID}-{OpenThermDeviceIdentifier.GATEWAY}")} + ) assert gw_dev.sw_version == VERSION_OLD @@ -63,7 +68,9 @@ async def test_device_registry_update( device_registry.async_get_or_create( config_entry_id=MOCK_CONFIG_ENTRY.entry_id, - identifiers={(DOMAIN, MOCK_GATEWAY_ID)}, + identifiers={ + (DOMAIN, f"{MOCK_GATEWAY_ID}-{OpenThermDeviceIdentifier.GATEWAY}") + }, name="Mock Gateway", manufacturer="Schelte Bron", model="OpenTherm Gateway", @@ -80,5 +87,70 @@ async def test_device_registry_update( await setup.async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done() - gw_dev = device_registry.async_get_device(identifiers={(DOMAIN, MOCK_GATEWAY_ID)}) + gw_dev = device_registry.async_get_device( + identifiers={(DOMAIN, f"{MOCK_GATEWAY_ID}-{OpenThermDeviceIdentifier.GATEWAY}")} + ) + assert gw_dev is not None assert gw_dev.sw_version == VERSION_NEW + + +# Device migration test can be removed in 2025.4.0 +async def test_device_migration( + hass: HomeAssistant, device_registry: dr.DeviceRegistry +) -> None: + """Test that the device registry is updated correctly.""" + MOCK_CONFIG_ENTRY.add_to_hass(hass) + + device_registry.async_get_or_create( + config_entry_id=MOCK_CONFIG_ENTRY.entry_id, + identifiers={ + (DOMAIN, MOCK_GATEWAY_ID), + }, + name="Mock Gateway", + manufacturer="Schelte Bron", + model="OpenTherm Gateway", + sw_version=VERSION_OLD, + ) + + with ( + patch( + "homeassistant.components.opentherm_gw.OpenThermGateway", + return_value=MagicMock( + connect=AsyncMock(return_value=MINIMAL_STATUS_UPD), + set_control_setpoint=AsyncMock(), + set_max_relative_mod=AsyncMock(), + disconnect=AsyncMock(), + ), + ), + ): + await setup.async_setup_component(hass, DOMAIN, {}) + + await hass.async_block_till_done() + + assert ( + device_registry.async_get_device(identifiers={(DOMAIN, MOCK_GATEWAY_ID)}) + is None + ) + + gw_dev = device_registry.async_get_device( + identifiers={(DOMAIN, f"{MOCK_GATEWAY_ID}-{OpenThermDeviceIdentifier.GATEWAY}")} + ) + assert gw_dev is not None + + assert ( + device_registry.async_get_device( + identifiers={ + (DOMAIN, f"{MOCK_GATEWAY_ID}-{OpenThermDeviceIdentifier.BOILER}") + } + ) + is not None + ) + + assert ( + device_registry.async_get_device( + identifiers={ + (DOMAIN, f"{MOCK_GATEWAY_ID}-{OpenThermDeviceIdentifier.THERMOSTAT}") + } + ) + is not None + ) From f735d12a66f7b3b8251d5045c043539526518b21 Mon Sep 17 00:00:00 2001 From: Richard Kroegel <42204099+rikroe@users.noreply.github.com> Date: Sun, 1 Sep 2024 16:26:14 +0200 Subject: [PATCH 1304/1635] Fix BMW client blocking on load_default_certs (#125015) * Fix BMW client blocking load_default_certs * Use get_default_context --- homeassistant/components/bmw_connected_drive/coordinator.py | 2 ++ homeassistant/components/bmw_connected_drive/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 5 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/bmw_connected_drive/coordinator.py b/homeassistant/components/bmw_connected_drive/coordinator.py index 6e0ed2ab670..992e7dea6b2 100644 --- a/homeassistant/components/bmw_connected_drive/coordinator.py +++ b/homeassistant/components/bmw_connected_drive/coordinator.py @@ -15,6 +15,7 @@ from homeassistant.const import CONF_PASSWORD, CONF_REGION, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +from homeassistant.util.ssl import get_default_context from .const import CONF_GCID, CONF_READ_ONLY, CONF_REFRESH_TOKEN, DOMAIN, SCAN_INTERVALS @@ -33,6 +34,7 @@ class BMWDataUpdateCoordinator(DataUpdateCoordinator[None]): entry.data[CONF_PASSWORD], get_region_from_name(entry.data[CONF_REGION]), observer_position=GPSPosition(hass.config.latitude, hass.config.longitude), + verify=get_default_context(), ) self.read_only = entry.options[CONF_READ_ONLY] self._entry = entry diff --git a/homeassistant/components/bmw_connected_drive/manifest.json b/homeassistant/components/bmw_connected_drive/manifest.json index 7ee91388d29..6bc9027ac19 100644 --- a/homeassistant/components/bmw_connected_drive/manifest.json +++ b/homeassistant/components/bmw_connected_drive/manifest.json @@ -7,5 +7,5 @@ "iot_class": "cloud_polling", "loggers": ["bimmer_connected"], "quality_scale": "platinum", - "requirements": ["bimmer-connected[china]==0.16.2"] + "requirements": ["bimmer-connected[china]==0.16.3"] } diff --git a/requirements_all.txt b/requirements_all.txt index 9c12227e6ab..3b95f22f161 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -559,7 +559,7 @@ beautifulsoup4==4.12.3 # beewi-smartclim==0.0.10 # homeassistant.components.bmw_connected_drive -bimmer-connected[china]==0.16.2 +bimmer-connected[china]==0.16.3 # homeassistant.components.bizkaibus bizkaibus==0.1.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c4d7dd1ad44..0f03f95e485 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -493,7 +493,7 @@ base36==0.1.1 beautifulsoup4==4.12.3 # homeassistant.components.bmw_connected_drive -bimmer-connected[china]==0.16.2 +bimmer-connected[china]==0.16.3 # homeassistant.components.eq3btsmart # homeassistant.components.esphome From fa21613951b2718606ead069b967a5e216ca65d1 Mon Sep 17 00:00:00 2001 From: Richard Kroegel <42204099+rikroe@users.noreply.github.com> Date: Sun, 1 Sep 2024 17:13:04 +0200 Subject: [PATCH 1305/1635] Fix telegram_bot blocking on load_default_certs (#125014) * Fix telegram_bot blocking on load_default_certs * Use sync variant of create_issue --- homeassistant/components/telegram_bot/__init__.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/telegram_bot/__init__.py b/homeassistant/components/telegram_bot/__init__.py index 9d1a5398055..2d53c744c22 100644 --- a/homeassistant/components/telegram_bot/__init__.py +++ b/homeassistant/components/telegram_bot/__init__.py @@ -41,6 +41,7 @@ from homeassistant.exceptions import TemplateError from homeassistant.helpers import config_validation as cv, issue_registry as ir from homeassistant.helpers.typing import ConfigType from homeassistant.loader import async_get_loaded_integration +from homeassistant.util.ssl import get_default_context, get_default_no_verify_context _LOGGER = logging.getLogger(__name__) @@ -378,7 +379,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: for p_config in domain_config: # Each platform config gets its own bot - bot = initialize_bot(hass, p_config) + bot = await hass.async_add_executor_job(initialize_bot, hass, p_config) p_type: str = p_config[CONF_PLATFORM] platform = platforms[p_type] @@ -486,7 +487,7 @@ def initialize_bot(hass: HomeAssistant, p_config: dict) -> Bot: # Auth can actually be stuffed into the URL, but the docs have previously # indicated to put them here. auth = proxy_params.pop("username"), proxy_params.pop("password") - ir.async_create_issue( + ir.create_issue( hass, DOMAIN, "proxy_params_auth_deprecation", @@ -503,7 +504,7 @@ def initialize_bot(hass: HomeAssistant, p_config: dict) -> Bot: learn_more_url="https://github.com/home-assistant/core/pull/112778", ) else: - ir.async_create_issue( + ir.create_issue( hass, DOMAIN, "proxy_params_deprecation", @@ -852,7 +853,11 @@ class TelegramNotificationService: username=kwargs.get(ATTR_USERNAME), password=kwargs.get(ATTR_PASSWORD), authentication=kwargs.get(ATTR_AUTHENTICATION), - verify_ssl=kwargs.get(ATTR_VERIFY_SSL), + verify_ssl=( + get_default_context() + if kwargs.get(ATTR_VERIFY_SSL, False) + else get_default_no_verify_context() + ), ) if file_content: From 56667ec2bc137e44616a82ed420f5afd191c2879 Mon Sep 17 00:00:00 2001 From: mvn23 Date: Sun, 1 Sep 2024 17:22:03 +0200 Subject: [PATCH 1306/1635] Migrate opentherm_gw climate entity unique_id (#125024) * Migrate climate entity unique_id to match the format used by other opentherm_gw entities * Add test to verify migration --- .../components/opentherm_gw/__init__.py | 19 +++++++++- .../components/opentherm_gw/climate.py | 1 - tests/components/opentherm_gw/test_init.py | 35 ++++++++++++++++++- 3 files changed, 52 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/opentherm_gw/__init__.py b/homeassistant/components/opentherm_gw/__init__.py index f0f5c709d0c..d8c352f3768 100644 --- a/homeassistant/components/opentherm_gw/__init__.py +++ b/homeassistant/components/opentherm_gw/__init__.py @@ -9,6 +9,7 @@ import pyotgw.vars as gw_vars from serial import SerialException import voluptuous as vol +from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import ( ATTR_DATE, @@ -27,7 +28,11 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import config_validation as cv, device_registry as dr +from homeassistant.helpers import ( + config_validation as cv, + device_registry as dr, + entity_registry as er, +) from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.typing import ConfigType @@ -132,6 +137,18 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b }, ) + # Migration can be removed in 2025.4.0 + ent_reg = er.async_get(hass) + if ( + entity_id := ent_reg.async_get_entity_id( + CLIMATE_DOMAIN, DOMAIN, config_entry.data[CONF_ID] + ) + ) is not None: + ent_reg.async_update_entity( + entity_id, + new_unique_id=f"{config_entry.data[CONF_ID]}-{OpenThermDeviceIdentifier.THERMOSTAT}-thermostat_entity", + ) + config_entry.add_update_listener(options_updated) try: diff --git a/homeassistant/components/opentherm_gw/climate.py b/homeassistant/components/opentherm_gw/climate.py index 795a508be12..45f1ca478f5 100644 --- a/homeassistant/components/opentherm_gw/climate.py +++ b/homeassistant/components/opentherm_gw/climate.py @@ -103,7 +103,6 @@ class OpenThermClimate(OpenThermEntity, ClimateEntity): self._attr_precision = options[CONF_READ_PRECISION] self._attr_target_temperature_step = options.get(CONF_SET_PRECISION) self.temporary_ovrd_mode = options.get(CONF_TEMPORARY_OVRD_MODE, True) - self._attr_unique_id = gw_hub.hub_id @callback def update_options(self, entry): diff --git a/tests/components/opentherm_gw/test_init.py b/tests/components/opentherm_gw/test_init.py index 7b9801c8280..ed829cb1986 100644 --- a/tests/components/opentherm_gw/test_init.py +++ b/tests/components/opentherm_gw/test_init.py @@ -12,7 +12,7 @@ from homeassistant.components.opentherm_gw.const import ( ) from homeassistant.const import CONF_DEVICE, CONF_ID, CONF_NAME from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr +from homeassistant.helpers import device_registry as dr, entity_registry as er from tests.common import MockConfigEntry @@ -154,3 +154,36 @@ async def test_device_migration( ) is not None ) + + +# Entity migration test can be removed in 2025.4.0 +async def test_climate_entity_migration( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test that the climate entity unique_id gets migrated correctly.""" + MOCK_CONFIG_ENTRY.add_to_hass(hass) + entry = entity_registry.async_get_or_create( + domain="climate", + platform="opentherm_gw", + unique_id=MOCK_CONFIG_ENTRY.data[CONF_ID], + ) + + with ( + patch( + "homeassistant.components.opentherm_gw.OpenThermGateway", + return_value=MagicMock( + connect=AsyncMock(return_value=MINIMAL_STATUS_UPD), + set_control_setpoint=AsyncMock(), + set_max_relative_mod=AsyncMock(), + disconnect=AsyncMock(), + ), + ), + ): + await setup.async_setup_component(hass, DOMAIN, {}) + + await hass.async_block_till_done() + + assert ( + entity_registry.async_get(entry.entity_id).unique_id + == f"{MOCK_CONFIG_ENTRY.data[CONF_ID]}-{OpenThermDeviceIdentifier.THERMOSTAT}-thermostat_entity" + ) From ef8fc3913e8c40989e073c814b2c98adf8a64169 Mon Sep 17 00:00:00 2001 From: Richard Kroegel <42204099+rikroe@users.noreply.github.com> Date: Sun, 1 Sep 2024 17:35:55 +0200 Subject: [PATCH 1307/1635] Fix ollama blocking on load_default_certs (#125012) * Fix ollama blocking on load_default_certs * Use get_default_context instead of client_context --- homeassistant/components/ollama/__init__.py | 3 ++- homeassistant/components/ollama/config_flow.py | 5 ++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/ollama/__init__.py b/homeassistant/components/ollama/__init__.py index 2ad389c55c3..3bcba567803 100644 --- a/homeassistant/components/ollama/__init__.py +++ b/homeassistant/components/ollama/__init__.py @@ -13,6 +13,7 @@ from homeassistant.const import CONF_URL, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import config_validation as cv +from homeassistant.util.ssl import get_default_context from .const import ( CONF_KEEP_ALIVE, @@ -43,7 +44,7 @@ PLATFORMS = (Platform.CONVERSATION,) async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Ollama from a config entry.""" settings = {**entry.data, **entry.options} - client = ollama.AsyncClient(host=settings[CONF_URL]) + client = ollama.AsyncClient(host=settings[CONF_URL], verify=get_default_context()) try: async with asyncio.timeout(DEFAULT_TIMEOUT): await client.list() diff --git a/homeassistant/components/ollama/config_flow.py b/homeassistant/components/ollama/config_flow.py index 6b516d67138..65b8efaf525 100644 --- a/homeassistant/components/ollama/config_flow.py +++ b/homeassistant/components/ollama/config_flow.py @@ -33,6 +33,7 @@ from homeassistant.helpers.selector import ( TextSelectorConfig, TextSelectorType, ) +from homeassistant.util.ssl import get_default_context from .const import ( CONF_KEEP_ALIVE, @@ -91,7 +92,9 @@ class OllamaConfigFlow(ConfigFlow, domain=DOMAIN): errors = {} try: - self.client = ollama.AsyncClient(host=self.url) + self.client = ollama.AsyncClient( + host=self.url, verify=get_default_context() + ) async with asyncio.timeout(DEFAULT_TIMEOUT): response = await self.client.list() From c6865d0862d01d7b6137680efd9035a11e77ead3 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sun, 1 Sep 2024 17:37:06 +0200 Subject: [PATCH 1308/1635] Bump aiomealie to 0.9.1 (#125017) --- homeassistant/components/mealie/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/mealie/manifest.json b/homeassistant/components/mealie/manifest.json index 4a277cbd09b..d8fe26d97b3 100644 --- a/homeassistant/components/mealie/manifest.json +++ b/homeassistant/components/mealie/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/mealie", "integration_type": "service", "iot_class": "local_polling", - "requirements": ["aiomealie==0.9.0"] + "requirements": ["aiomealie==0.9.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 3b95f22f161..6d80139df37 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -288,7 +288,7 @@ aiolookin==1.0.0 aiolyric==2.0.1 # homeassistant.components.mealie -aiomealie==0.9.0 +aiomealie==0.9.1 # homeassistant.components.modern_forms aiomodernforms==0.1.8 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 0f03f95e485..908adbaf678 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -270,7 +270,7 @@ aiolookin==1.0.0 aiolyric==2.0.1 # homeassistant.components.mealie -aiomealie==0.9.0 +aiomealie==0.9.1 # homeassistant.components.modern_forms aiomodernforms==0.1.8 From 5f2964d3e85338365ad9e6e4c230c18ed1eee665 Mon Sep 17 00:00:00 2001 From: Malte Franken Date: Mon, 2 Sep 2024 01:38:48 +1000 Subject: [PATCH 1309/1635] Bump aio-georss-gdacs to 0.10 (#125021) bump aio-georss-gdacs to 0.10 --- homeassistant/components/gdacs/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/gdacs/manifest.json b/homeassistant/components/gdacs/manifest.json index d743dd00424..fab47e00904 100644 --- a/homeassistant/components/gdacs/manifest.json +++ b/homeassistant/components/gdacs/manifest.json @@ -8,5 +8,5 @@ "iot_class": "cloud_polling", "loggers": ["aio_georss_gdacs", "aio_georss_client"], "quality_scale": "platinum", - "requirements": ["aio-georss-gdacs==0.9"] + "requirements": ["aio-georss-gdacs==0.10"] } diff --git a/requirements_all.txt b/requirements_all.txt index 6d80139df37..13d3c94d902 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -170,7 +170,7 @@ aio-geojson-nsw-rfs-incidents==0.7 aio-geojson-usgs-earthquakes==0.3 # homeassistant.components.gdacs -aio-georss-gdacs==0.9 +aio-georss-gdacs==0.10 # homeassistant.components.airq aioairq==0.3.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 908adbaf678..24b0928dad9 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -158,7 +158,7 @@ aio-geojson-nsw-rfs-incidents==0.7 aio-geojson-usgs-earthquakes==0.3 # homeassistant.components.gdacs -aio-georss-gdacs==0.9 +aio-georss-gdacs==0.10 # homeassistant.components.airq aioairq==0.3.2 From bd6b5568ebd62f0b5d9cd2e774347001bca5a636 Mon Sep 17 00:00:00 2001 From: Dmitry Krasnoukhov Date: Sun, 1 Sep 2024 18:50:53 +0300 Subject: [PATCH 1310/1635] Extend hjjcy device category in Tuya integration (#124854) * Extend hjjcy device category in Tuya integration * Better AQI level names --- homeassistant/components/tuya/const.py | 1 + homeassistant/components/tuya/icons.json | 3 +++ homeassistant/components/tuya/sensor.py | 13 ++++++++++++- homeassistant/components/tuya/strings.json | 11 +++++++++++ 4 files changed, 27 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/tuya/const.py b/homeassistant/components/tuya/const.py index 55af95f0d34..eb56761d26a 100644 --- a/homeassistant/components/tuya/const.py +++ b/homeassistant/components/tuya/const.py @@ -96,6 +96,7 @@ class DPCode(StrEnum): """ AIR_QUALITY = "air_quality" + AIR_QUALITY_INDEX = "air_quality_index" ALARM_SWITCH = "alarm_switch" # Alarm switch ALARM_TIME = "alarm_time" # Alarm time ALARM_VOLUME = "alarm_volume" # Alarm volume diff --git a/homeassistant/components/tuya/icons.json b/homeassistant/components/tuya/icons.json index 48ae61f36fd..e28371f2b3d 100644 --- a/homeassistant/components/tuya/icons.json +++ b/homeassistant/components/tuya/icons.json @@ -236,6 +236,9 @@ }, "air_quality": { "default": "mdi:air-filter" + }, + "air_quality_index": { + "default": "mdi:air-filter" } }, "switch": { diff --git a/homeassistant/components/tuya/sensor.py b/homeassistant/components/tuya/sensor.py index 1ab3ea700d7..4f3c6099377 100644 --- a/homeassistant/components/tuya/sensor.py +++ b/homeassistant/components/tuya/sensor.py @@ -264,8 +264,12 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = { ), ), # Air Quality Monitor - # No specification on Tuya portal + # https://developer.tuya.com/en/docs/iot/hjjcy?id=Kbeoad8y1nnlv "hjjcy": ( + TuyaSensorEntityDescription( + key=DPCode.AIR_QUALITY_INDEX, + translation_key="air_quality_index", + ), TuyaSensorEntityDescription( key=DPCode.TEMP_CURRENT, translation_key="temperature", @@ -301,6 +305,13 @@ SENSORS: dict[str, tuple[TuyaSensorEntityDescription, ...]] = { device_class=SensorDeviceClass.PM25, state_class=SensorStateClass.MEASUREMENT, ), + TuyaSensorEntityDescription( + key=DPCode.PM10, + translation_key="pm10", + device_class=SensorDeviceClass.PM10, + state_class=SensorStateClass.MEASUREMENT, + ), + *BATTERY_SENSORS, ), # Formaldehyde Detector # Note: Not documented diff --git a/homeassistant/components/tuya/strings.json b/homeassistant/components/tuya/strings.json index 6b699c0ffc0..865fbaffbbe 100644 --- a/homeassistant/components/tuya/strings.json +++ b/homeassistant/components/tuya/strings.json @@ -620,6 +620,17 @@ "good": "Good", "severe": "Severe" } + }, + "air_quality_index": { + "name": "Air quality index", + "state": { + "level_1": "Level 1", + "level_2": "Level 2", + "level_3": "Level 3", + "level_4": "Level 4", + "level_5": "Level 5", + "level_6": "Level 6" + } } }, "switch": { From ae1f53775f78817aa230f7e51cbafbed52581a51 Mon Sep 17 00:00:00 2001 From: Martin Hjelmare Date: Sun, 1 Sep 2024 17:51:31 +0200 Subject: [PATCH 1311/1635] Bump python-telegram-bot to 21.5 (#125025) --- homeassistant/components/telegram_bot/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/telegram_bot/manifest.json b/homeassistant/components/telegram_bot/manifest.json index c176e6c2cdf..b432c88762f 100644 --- a/homeassistant/components/telegram_bot/manifest.json +++ b/homeassistant/components/telegram_bot/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/telegram_bot", "iot_class": "cloud_push", "loggers": ["telegram"], - "requirements": ["python-telegram-bot[socks]==21.0.1"] + "requirements": ["python-telegram-bot[socks]==21.5"] } diff --git a/requirements_all.txt b/requirements_all.txt index 13d3c94d902..c4909d0de44 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2375,7 +2375,7 @@ python-tado==0.17.6 python-technove==1.3.1 # homeassistant.components.telegram_bot -python-telegram-bot[socks]==21.0.1 +python-telegram-bot[socks]==21.5 # homeassistant.components.vlc python-vlc==3.0.18122 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 24b0928dad9..cc13bb03fd6 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1887,7 +1887,7 @@ python-tado==0.17.6 python-technove==1.3.1 # homeassistant.components.telegram_bot -python-telegram-bot[socks]==21.0.1 +python-telegram-bot[socks]==21.5 # homeassistant.components.tile pytile==2023.12.0 From 92c1fb77e9ff04c5e779b110428f4dc23a993ec3 Mon Sep 17 00:00:00 2001 From: Etienne Soufflet Date: Sun, 1 Sep 2024 18:33:45 +0200 Subject: [PATCH 1312/1635] Fix Tado fan speed for AC (#122415) * change capabilities * fix tests 2 * improve usability with capabilities * fix swings management * Update homeassistant/components/tado/climate.py Co-authored-by: Erwin Douna * fix after Erwin's review * fix after joostlek's review * use constant * use in instead of get --------- Co-authored-by: Erwin Douna --- homeassistant/components/tado/climate.py | 165 +++++++++++++++-------- homeassistant/components/tado/const.py | 7 + 2 files changed, 115 insertions(+), 57 deletions(-) diff --git a/homeassistant/components/tado/climate.py b/homeassistant/components/tado/climate.py index 314a2315d0a..60096c25301 100644 --- a/homeassistant/components/tado/climate.py +++ b/homeassistant/components/tado/climate.py @@ -16,6 +16,7 @@ from homeassistant.components.climate import ( SWING_BOTH, SWING_HORIZONTAL, SWING_OFF, + SWING_ON, SWING_VERTICAL, ClimateEntity, ClimateEntityFeature, @@ -47,7 +48,6 @@ from .const import ( HA_TO_TADO_FAN_MODE_MAP, HA_TO_TADO_FAN_MODE_MAP_LEGACY, HA_TO_TADO_HVAC_MODE_MAP, - HA_TO_TADO_SWING_MODE_MAP, ORDERED_KNOWN_TADO_MODES, PRESET_AUTO, SIGNAL_TADO_UPDATE_RECEIVED, @@ -55,17 +55,20 @@ from .const import ( SUPPORT_PRESET_MANUAL, TADO_DEFAULT_MAX_TEMP, TADO_DEFAULT_MIN_TEMP, - TADO_FAN_LEVELS, - TADO_FAN_SPEEDS, + TADO_FANLEVEL_SETTING, + TADO_FANSPEED_SETTING, + TADO_HORIZONTAL_SWING_SETTING, TADO_HVAC_ACTION_TO_HA_HVAC_ACTION, TADO_MODES_WITH_NO_TEMP_SETTING, TADO_SWING_OFF, TADO_SWING_ON, + TADO_SWING_SETTING, TADO_TO_HA_FAN_MODE_MAP, TADO_TO_HA_FAN_MODE_MAP_LEGACY, TADO_TO_HA_HVAC_MODE_MAP, TADO_TO_HA_OFFSET_MAP, TADO_TO_HA_SWING_MODE_MAP, + TADO_VERTICAL_SWING_SETTING, TEMP_OFFSET, TYPE_AIR_CONDITIONING, TYPE_HEATING, @@ -166,29 +169,30 @@ def create_climate_entity( supported_hvac_modes.append(TADO_TO_HA_HVAC_MODE_MAP[mode]) if ( - capabilities[mode].get("swings") - or capabilities[mode].get("verticalSwing") - or capabilities[mode].get("horizontalSwing") + TADO_SWING_SETTING in capabilities[mode] + or TADO_VERTICAL_SWING_SETTING in capabilities[mode] + or TADO_VERTICAL_SWING_SETTING in capabilities[mode] ): support_flags |= ClimateEntityFeature.SWING_MODE supported_swing_modes = [] - if capabilities[mode].get("swings"): + if TADO_SWING_SETTING in capabilities[mode]: supported_swing_modes.append( TADO_TO_HA_SWING_MODE_MAP[TADO_SWING_ON] ) - if capabilities[mode].get("verticalSwing"): + if TADO_VERTICAL_SWING_SETTING in capabilities[mode]: supported_swing_modes.append(SWING_VERTICAL) - if capabilities[mode].get("horizontalSwing"): + if TADO_HORIZONTAL_SWING_SETTING in capabilities[mode]: supported_swing_modes.append(SWING_HORIZONTAL) if ( SWING_HORIZONTAL in supported_swing_modes - and SWING_HORIZONTAL in supported_swing_modes + and SWING_VERTICAL in supported_swing_modes ): supported_swing_modes.append(SWING_BOTH) supported_swing_modes.append(TADO_TO_HA_SWING_MODE_MAP[TADO_SWING_OFF]) - if not capabilities[mode].get("fanSpeeds") and not capabilities[mode].get( - "fanLevel" + if ( + TADO_FANSPEED_SETTING not in capabilities[mode] + and TADO_FANLEVEL_SETTING not in capabilities[mode] ): continue @@ -197,14 +201,15 @@ def create_climate_entity( if supported_fan_modes: continue - if capabilities[mode].get("fanSpeeds"): + if TADO_FANSPEED_SETTING in capabilities[mode]: supported_fan_modes = generate_supported_fanmodes( - TADO_TO_HA_FAN_MODE_MAP_LEGACY, capabilities[mode]["fanSpeeds"] + TADO_TO_HA_FAN_MODE_MAP_LEGACY, + capabilities[mode][TADO_FANSPEED_SETTING], ) else: supported_fan_modes = generate_supported_fanmodes( - TADO_TO_HA_FAN_MODE_MAP, capabilities[mode]["fanLevel"] + TADO_TO_HA_FAN_MODE_MAP, capabilities[mode][TADO_FANLEVEL_SETTING] ) cool_temperatures = capabilities[CONST_MODE_COOL]["temperatures"] @@ -316,12 +321,16 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): self._target_temp: float | None = None self._current_tado_fan_speed = CONST_FAN_OFF + self._current_tado_fan_level = CONST_FAN_OFF self._current_tado_hvac_mode = CONST_MODE_OFF self._current_tado_hvac_action = HVACAction.OFF self._current_tado_swing_mode = TADO_SWING_OFF self._current_tado_vertical_swing = TADO_SWING_OFF self._current_tado_horizontal_swing = TADO_SWING_OFF + capabilities = tado.get_capabilities(zone_id) + self._current_tado_capabilities = capabilities + self._tado_zone_data: PyTado.TadoZone = {} self._tado_geofence_data: dict[str, str] | None = None @@ -382,20 +391,23 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): def fan_mode(self) -> str | None: """Return the fan setting.""" if self._ac_device: - return TADO_TO_HA_FAN_MODE_MAP.get( - self._current_tado_fan_speed, - TADO_TO_HA_FAN_MODE_MAP_LEGACY.get( + if self._is_valid_setting_for_hvac_mode(TADO_FANSPEED_SETTING): + return TADO_TO_HA_FAN_MODE_MAP_LEGACY.get( self._current_tado_fan_speed, FAN_AUTO - ), - ) + ) + if self._is_valid_setting_for_hvac_mode(TADO_FANLEVEL_SETTING): + return TADO_TO_HA_FAN_MODE_MAP.get( + self._current_tado_fan_level, FAN_AUTO + ) + return FAN_AUTO return None def set_fan_mode(self, fan_mode: str) -> None: """Turn fan on/off.""" - if self._current_tado_fan_speed in TADO_FAN_LEVELS: - self._control_hvac(fan_mode=HA_TO_TADO_FAN_MODE_MAP[fan_mode]) - else: + if self._is_valid_setting_for_hvac_mode(TADO_FANSPEED_SETTING): self._control_hvac(fan_mode=HA_TO_TADO_FAN_MODE_MAP_LEGACY[fan_mode]) + elif self._is_valid_setting_for_hvac_mode(TADO_FANLEVEL_SETTING): + self._control_hvac(fan_mode=HA_TO_TADO_FAN_MODE_MAP[fan_mode]) @property def preset_mode(self) -> str: @@ -555,24 +567,30 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): swing = None if self._attr_swing_modes is None: return - if ( - SWING_VERTICAL in self._attr_swing_modes - or SWING_HORIZONTAL in self._attr_swing_modes - ): - if swing_mode == SWING_VERTICAL: + if swing_mode == SWING_OFF: + if self._is_valid_setting_for_hvac_mode(TADO_SWING_SETTING): + swing = TADO_SWING_OFF + if self._is_valid_setting_for_hvac_mode(TADO_HORIZONTAL_SWING_SETTING): + horizontal_swing = TADO_SWING_OFF + if self._is_valid_setting_for_hvac_mode(TADO_VERTICAL_SWING_SETTING): + vertical_swing = TADO_SWING_OFF + if swing_mode == SWING_ON: + swing = TADO_SWING_ON + if swing_mode == SWING_VERTICAL: + if self._is_valid_setting_for_hvac_mode(TADO_VERTICAL_SWING_SETTING): vertical_swing = TADO_SWING_ON - elif swing_mode == SWING_HORIZONTAL: + if self._is_valid_setting_for_hvac_mode(TADO_HORIZONTAL_SWING_SETTING): + horizontal_swing = TADO_SWING_OFF + if swing_mode == SWING_HORIZONTAL: + if self._is_valid_setting_for_hvac_mode(TADO_VERTICAL_SWING_SETTING): + vertical_swing = TADO_SWING_OFF + if self._is_valid_setting_for_hvac_mode(TADO_HORIZONTAL_SWING_SETTING): horizontal_swing = TADO_SWING_ON - elif swing_mode == SWING_BOTH: + if swing_mode == SWING_BOTH: + if self._is_valid_setting_for_hvac_mode(TADO_VERTICAL_SWING_SETTING): vertical_swing = TADO_SWING_ON + if self._is_valid_setting_for_hvac_mode(TADO_HORIZONTAL_SWING_SETTING): horizontal_swing = TADO_SWING_ON - elif swing_mode == SWING_OFF: - if SWING_VERTICAL in self._attr_swing_modes: - vertical_swing = TADO_SWING_OFF - if SWING_HORIZONTAL in self._attr_swing_modes: - horizontal_swing = TADO_SWING_OFF - else: - swing = HA_TO_TADO_SWING_MODE_MAP[swing_mode] self._control_hvac( swing_mode=swing, @@ -596,21 +614,23 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): self._device_id ][TEMP_OFFSET][offset_key] - self._current_tado_fan_speed = ( - self._tado_zone_data.current_fan_level - if self._tado_zone_data.current_fan_level is not None - else self._tado_zone_data.current_fan_speed - ) - self._current_tado_hvac_mode = self._tado_zone_data.current_hvac_mode self._current_tado_hvac_action = self._tado_zone_data.current_hvac_action - self._current_tado_swing_mode = self._tado_zone_data.current_swing_mode - self._current_tado_vertical_swing = ( - self._tado_zone_data.current_vertical_swing_mode - ) - self._current_tado_horizontal_swing = ( - self._tado_zone_data.current_horizontal_swing_mode - ) + + if self._is_valid_setting_for_hvac_mode(TADO_FANLEVEL_SETTING): + self._current_tado_fan_level = self._tado_zone_data.current_fan_level + if self._is_valid_setting_for_hvac_mode(TADO_FANSPEED_SETTING): + self._current_tado_fan_speed = self._tado_zone_data.current_fan_speed + if self._is_valid_setting_for_hvac_mode(TADO_SWING_SETTING): + self._current_tado_swing_mode = self._tado_zone_data.current_swing_mode + if self._is_valid_setting_for_hvac_mode(TADO_VERTICAL_SWING_SETTING): + self._current_tado_vertical_swing = ( + self._tado_zone_data.current_vertical_swing_mode + ) + if self._is_valid_setting_for_hvac_mode(TADO_HORIZONTAL_SWING_SETTING): + self._current_tado_horizontal_swing = ( + self._tado_zone_data.current_horizontal_swing_mode + ) @callback def _async_update_zone_callback(self) -> None: @@ -665,7 +685,10 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): self._target_temp = target_temp if fan_mode: - self._current_tado_fan_speed = fan_mode + if self._is_valid_setting_for_hvac_mode(TADO_FANSPEED_SETTING): + self._current_tado_fan_speed = fan_mode + if self._is_valid_setting_for_hvac_mode(TADO_FANLEVEL_SETTING): + self._current_tado_fan_level = fan_mode if swing_mode: self._current_tado_swing_mode = swing_mode @@ -735,21 +758,32 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): fan_speed = None fan_level = None if self.supported_features & ClimateEntityFeature.FAN_MODE: - if self._current_tado_fan_speed in TADO_FAN_LEVELS: - fan_level = self._current_tado_fan_speed - elif self._current_tado_fan_speed in TADO_FAN_SPEEDS: + if self._is_current_setting_supported_by_current_hvac_mode( + TADO_FANSPEED_SETTING, self._current_tado_fan_speed + ): fan_speed = self._current_tado_fan_speed + if self._is_current_setting_supported_by_current_hvac_mode( + TADO_FANLEVEL_SETTING, self._current_tado_fan_level + ): + fan_level = self._current_tado_fan_level + swing = None vertical_swing = None horizontal_swing = None if ( self.supported_features & ClimateEntityFeature.SWING_MODE ) and self._attr_swing_modes is not None: - if SWING_VERTICAL in self._attr_swing_modes: + if self._is_current_setting_supported_by_current_hvac_mode( + TADO_VERTICAL_SWING_SETTING, self._current_tado_vertical_swing + ): vertical_swing = self._current_tado_vertical_swing - if SWING_HORIZONTAL in self._attr_swing_modes: + if self._is_current_setting_supported_by_current_hvac_mode( + TADO_HORIZONTAL_SWING_SETTING, self._current_tado_horizontal_swing + ): horizontal_swing = self._current_tado_horizontal_swing - if vertical_swing is None and horizontal_swing is None: + if self._is_current_setting_supported_by_current_hvac_mode( + TADO_SWING_SETTING, self._current_tado_swing_mode + ): swing = self._current_tado_swing_mode self._tado.set_zone_overlay( @@ -765,3 +799,20 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): vertical_swing=vertical_swing, # api defaults to not sending verticalSwing if swing not None horizontal_swing=horizontal_swing, # api defaults to not sending horizontalSwing if swing not None ) + + def _is_valid_setting_for_hvac_mode(self, setting: str) -> bool: + return ( + self._current_tado_capabilities.get(self._current_tado_hvac_mode, {}).get( + setting + ) + is not None + ) + + def _is_current_setting_supported_by_current_hvac_mode( + self, setting: str, current_state: str | None + ) -> bool: + if self._is_valid_setting_for_hvac_mode(setting): + return current_state in self._current_tado_capabilities[ + self._current_tado_hvac_mode + ].get(setting, []) + return False diff --git a/homeassistant/components/tado/const.py b/homeassistant/components/tado/const.py index 5c6a80c5beb..8033a653325 100644 --- a/homeassistant/components/tado/const.py +++ b/homeassistant/components/tado/const.py @@ -234,3 +234,10 @@ CONF_READING = "reading" ATTR_MESSAGE = "message" WATER_HEATER_FALLBACK_REPAIR = "water_heater_fallback" + +TADO_SWING_SETTING = "swings" +TADO_FANSPEED_SETTING = "fanSpeeds" + +TADO_FANLEVEL_SETTING = "fanLevel" +TADO_VERTICAL_SWING_SETTING = "verticalSwing" +TADO_HORIZONTAL_SWING_SETTING = "horizontalSwing" From 24414369d72163e58db03c66cb3c99a9571f84e8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Fern=C3=A1ndez=20Rojas?= Date: Sun, 1 Sep 2024 20:28:13 +0200 Subject: [PATCH 1313/1635] Update aioairzone-cloud to v0.6.5 (#125030) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Álvaro Fernández Rojas --- homeassistant/components/airzone_cloud/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/airzone_cloud/manifest.json b/homeassistant/components/airzone_cloud/manifest.json index 47a06c308ad..e0b0695655d 100644 --- a/homeassistant/components/airzone_cloud/manifest.json +++ b/homeassistant/components/airzone_cloud/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/airzone_cloud", "iot_class": "cloud_push", "loggers": ["aioairzone_cloud"], - "requirements": ["aioairzone-cloud==0.6.4"] + "requirements": ["aioairzone-cloud==0.6.5"] } diff --git a/requirements_all.txt b/requirements_all.txt index c4909d0de44..599b557a730 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -176,7 +176,7 @@ aio-georss-gdacs==0.10 aioairq==0.3.2 # homeassistant.components.airzone_cloud -aioairzone-cloud==0.6.4 +aioairzone-cloud==0.6.5 # homeassistant.components.airzone aioairzone==0.8.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index cc13bb03fd6..58556b87c90 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -164,7 +164,7 @@ aio-georss-gdacs==0.10 aioairq==0.3.2 # homeassistant.components.airzone_cloud -aioairzone-cloud==0.6.4 +aioairzone-cloud==0.6.5 # homeassistant.components.airzone aioairzone==0.8.2 From 659d135fca5fa1568b10e5c163888935e48eb2d9 Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Sun, 1 Sep 2024 21:02:32 +0200 Subject: [PATCH 1314/1635] Add ConductivityConverter in websocket_api.py (#125029) --- homeassistant/components/recorder/websocket_api.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/recorder/websocket_api.py b/homeassistant/components/recorder/websocket_api.py index 5e0eef37721..f08f7bdcb97 100644 --- a/homeassistant/components/recorder/websocket_api.py +++ b/homeassistant/components/recorder/websocket_api.py @@ -15,6 +15,7 @@ from homeassistant.helpers import config_validation as cv from homeassistant.helpers.json import json_bytes from homeassistant.util import dt as dt_util from homeassistant.util.unit_conversion import ( + ConductivityConverter, DataRateConverter, DistanceConverter, DurationConverter, @@ -48,7 +49,7 @@ from .util import PERIOD_SCHEMA, get_instance, resolve_period UNIT_SCHEMA = vol.Schema( { - vol.Optional("conductivity"): vol.In(DataRateConverter.VALID_UNITS), + vol.Optional("conductivity"): vol.In(ConductivityConverter.VALID_UNITS), vol.Optional("data_rate"): vol.In(DataRateConverter.VALID_UNITS), vol.Optional("distance"): vol.In(DistanceConverter.VALID_UNITS), vol.Optional("duration"): vol.In(DurationConverter.VALID_UNITS), From 07e251d488b66b644eea48af90606a0851c74e41 Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Sun, 1 Sep 2024 22:04:29 +0200 Subject: [PATCH 1315/1635] Add diagnostics platform to modern forms (#125032) --- .../components/modern_forms/diagnostics.py | 36 +++++++++++++ .../snapshots/test_diagnostics.ambr | 50 +++++++++++++++++++ .../modern_forms/test_diagnostics.py | 26 ++++++++++ 3 files changed, 112 insertions(+) create mode 100644 homeassistant/components/modern_forms/diagnostics.py create mode 100644 tests/components/modern_forms/snapshots/test_diagnostics.ambr create mode 100644 tests/components/modern_forms/test_diagnostics.py diff --git a/homeassistant/components/modern_forms/diagnostics.py b/homeassistant/components/modern_forms/diagnostics.py new file mode 100644 index 00000000000..0011a7c3bab --- /dev/null +++ b/homeassistant/components/modern_forms/diagnostics.py @@ -0,0 +1,36 @@ +"""Diagnostics support for Modern Forms.""" + +from __future__ import annotations + +from dataclasses import asdict +from typing import TYPE_CHECKING, Any + +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_MAC +from homeassistant.core import HomeAssistant + +from .const import DOMAIN +from .coordinator import ModernFormsDataUpdateCoordinator + +REDACT_CONFIG = {CONF_MAC} +REDACT_DEVICE_INFO = {"mac_address", "owner"} + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: ConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + coordinator: ModernFormsDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + if TYPE_CHECKING: + assert coordinator is not None + + return { + "config_entry": async_redact_data(entry.as_dict(), REDACT_CONFIG), + "device": { + "info": async_redact_data( + asdict(coordinator.modern_forms.info), REDACT_DEVICE_INFO + ), + "status": asdict(coordinator.modern_forms.status), + }, + } diff --git a/tests/components/modern_forms/snapshots/test_diagnostics.ambr b/tests/components/modern_forms/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..56e299aa12a --- /dev/null +++ b/tests/components/modern_forms/snapshots/test_diagnostics.ambr @@ -0,0 +1,50 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'config_entry': dict({ + 'data': dict({ + 'host': '192.168.1.123', + 'mac': '**REDACTED**', + }), + 'disabled_by': None, + 'domain': 'modern_forms', + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'title': 'Mock Title', + 'unique_id': None, + 'version': 1, + }), + 'device': dict({ + 'info': dict({ + 'client_id': 'MF_000000000000', + 'device_name': 'ModernFormsFan', + 'fan_motor_type': 'DC125X25', + 'fan_type': '1818-56', + 'federated_identity': 'us-east-1:f3da237b-c19c-4f61-b387-0e6dde2e470b', + 'firmware_url': '', + 'firmware_version': '01.03.0025', + 'light_type': 'F6IN-120V-R1-30', + 'mac_address': '**REDACTED**', + 'main_mcu_firmware_version': '01.03.3008', + 'owner': '**REDACTED**', + 'product_sku': '', + 'production_lot_number': '', + }), + 'status': dict({ + 'adaptive_learning_enabled': False, + 'away_mode_enabled': False, + 'fan_direction': 'forward', + 'fan_on': True, + 'fan_sleep_timer': 0, + 'fan_speed': 3, + 'light_brightness': 50, + 'light_on': True, + 'light_sleep_timer': 0, + }), + }), + }) +# --- diff --git a/tests/components/modern_forms/test_diagnostics.py b/tests/components/modern_forms/test_diagnostics.py new file mode 100644 index 00000000000..9eb2e4efa94 --- /dev/null +++ b/tests/components/modern_forms/test_diagnostics.py @@ -0,0 +1,26 @@ +"""Tests for the Modern Forms diagnostics platform.""" + +from syrupy import SnapshotAssertion +from syrupy.filters import props + +from homeassistant.core import HomeAssistant + +from . import init_integration + +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.test_util.aiohttp import AiohttpClientMocker +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test the creation and values of the Modern Forms fans.""" + entry = await init_integration(hass, aioclient_mock) + + result = await get_diagnostics_for_config_entry(hass, hass_client, entry) + + assert result == snapshot(exclude=props("created_at", "modified_at", "entry_id")) From 77b464f2bd5002fbf86f23669f9d3934fc20eba1 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 1 Sep 2024 10:47:24 -1000 Subject: [PATCH 1316/1635] Bump yarl to 1.9.7 (#125035) --- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 30edee058bb..414bff657a0 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -62,7 +62,7 @@ urllib3>=1.26.5,<2 voluptuous-openapi==0.0.5 voluptuous-serialize==2.6.0 voluptuous==0.15.2 -yarl==1.9.6 +yarl==1.9.7 zeroconf==0.133.0 # Constrain pycryptodome to avoid vulnerability diff --git a/pyproject.toml b/pyproject.toml index 4376ed63d0d..69d952f4bc0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -69,7 +69,7 @@ dependencies = [ "voluptuous==0.15.2", "voluptuous-serialize==2.6.0", "voluptuous-openapi==0.0.5", - "yarl==1.9.6", + "yarl==1.9.7", ] [project.urls] diff --git a/requirements.txt b/requirements.txt index a9e01545b83..fd6e8815e90 100644 --- a/requirements.txt +++ b/requirements.txt @@ -41,4 +41,4 @@ urllib3>=1.26.5,<2 voluptuous==0.15.2 voluptuous-serialize==2.6.0 voluptuous-openapi==0.0.5 -yarl==1.9.6 +yarl==1.9.7 From 99f43400bf25ecaafe7ea3d14d148e6a68463b69 Mon Sep 17 00:00:00 2001 From: Shay Levy Date: Mon, 2 Sep 2024 00:08:19 +0300 Subject: [PATCH 1317/1635] Bump aioshelly to 11.4.2 (#125036) --- homeassistant/components/shelly/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/shelly/manifest.json b/homeassistant/components/shelly/manifest.json index f9fa2d571d1..5e2522ea456 100644 --- a/homeassistant/components/shelly/manifest.json +++ b/homeassistant/components/shelly/manifest.json @@ -9,7 +9,7 @@ "iot_class": "local_push", "loggers": ["aioshelly"], "quality_scale": "platinum", - "requirements": ["aioshelly==11.4.1"], + "requirements": ["aioshelly==11.4.2"], "zeroconf": [ { "type": "_http._tcp.local.", diff --git a/requirements_all.txt b/requirements_all.txt index 599b557a730..7c15916913f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -359,7 +359,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==11.4.1 +aioshelly==11.4.2 # homeassistant.components.skybell aioskybell==22.7.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 58556b87c90..54d9953779c 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -341,7 +341,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==11.4.1 +aioshelly==11.4.2 # homeassistant.components.skybell aioskybell==22.7.0 From 9fff3a13a57adaf512d48a27d60fc5dee9a07b98 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Sun, 1 Sep 2024 21:49:38 -0700 Subject: [PATCH 1318/1635] Clarify comment in google photos upload service (#125042) --- homeassistant/components/google_photos/services.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/google_photos/services.py b/homeassistant/components/google_photos/services.py index a895f333962..77015d5c700 100644 --- a/homeassistant/components/google_photos/services.py +++ b/homeassistant/components/google_photos/services.py @@ -42,7 +42,7 @@ UPLOAD_SERVICE_SCHEMA = vol.Schema( def _read_file_contents( hass: HomeAssistant, filenames: list[str] ) -> list[tuple[str, bytes]]: - """Read the mime type and contents from each filen.""" + """Return the mime types and file contents for each file.""" results = [] for filename in filenames: if not hass.config.is_allowed_path(filename): From 78cf7dc873ab2b95305a71034c2b5a49bd86f868 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Mon, 2 Sep 2024 09:13:10 +0300 Subject: [PATCH 1319/1635] New template merge_response (#114204) * New template merge_response * Extending * Extend comment * Update * Fixes * Fix comments * Mods * snapshots * Fixes from discussion --- homeassistant/helpers/template.py | 58 ++++ tests/helpers/snapshots/test_template.ambr | 337 +++++++++++++++++++++ tests/helpers/test_template.py | 259 ++++++++++++++++ 3 files changed, 654 insertions(+) create mode 100644 tests/helpers/snapshots/test_template.ambr diff --git a/homeassistant/helpers/template.py b/homeassistant/helpers/template.py index 0a980db30b4..6856983aa59 100644 --- a/homeassistant/helpers/template.py +++ b/homeassistant/helpers/template.py @@ -51,6 +51,7 @@ from homeassistant.const import ( from homeassistant.core import ( Context, HomeAssistant, + ServiceResponse, State, callback, split_entity_id, @@ -2118,6 +2119,62 @@ def as_timedelta(value: str) -> timedelta | None: return dt_util.parse_duration(value) +def merge_response(value: ServiceResponse) -> list[Any]: + """Merge action responses into single list. + + Checks that the input is a correct service response: + { + "entity_id": {str: dict[str, Any]}, + } + If response is a single list, it will extend the list with the items + and add the entity_id and value_key to each dictionary for reference. + If response is a dictionary or multiple lists, + it will append the dictionary/lists to the list + and add the entity_id to each dictionary for reference. + """ + if not isinstance(value, dict): + raise TypeError("Response is not a dictionary") + if not value: + # Bail out early if response is an empty dictionary + return [] + + is_single_list = False + response_items: list = [] + for entity_id, entity_response in value.items(): # pylint: disable=too-many-nested-blocks + if not isinstance(entity_response, dict): + raise TypeError("Response is not a dictionary") + for value_key, type_response in entity_response.items(): + if len(entity_response) == 1 and isinstance(type_response, list): + # Provides special handling for responses such as calendar events + # and weather forecasts where the response contains a single list with multiple + # dictionaries inside. + is_single_list = True + for dict_in_list in type_response: + if isinstance(dict_in_list, dict): + if ATTR_ENTITY_ID in dict_in_list: + raise ValueError( + f"Response dictionary already contains key '{ATTR_ENTITY_ID}'" + ) + dict_in_list[ATTR_ENTITY_ID] = entity_id + dict_in_list["value_key"] = value_key + response_items.extend(type_response) + else: + # Break the loop if not a single list as the logic is then managed in the outer loop + # which handles both dictionaries and in the case of multiple lists. + break + + if not is_single_list: + _response = entity_response.copy() + if ATTR_ENTITY_ID in _response: + raise ValueError( + f"Response dictionary already contains key '{ATTR_ENTITY_ID}'" + ) + _response[ATTR_ENTITY_ID] = entity_id + response_items.append(_response) + + return response_items + + def strptime(string, fmt, default=_SENTINEL): """Parse a time string to datetime.""" try: @@ -2833,6 +2890,7 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment): self.globals["as_timedelta"] = as_timedelta self.globals["as_timestamp"] = forgiving_as_timestamp self.globals["timedelta"] = timedelta + self.globals["merge_response"] = merge_response self.globals["strptime"] = strptime self.globals["urlencode"] = urlencode self.globals["average"] = average diff --git a/tests/helpers/snapshots/test_template.ambr b/tests/helpers/snapshots/test_template.ambr new file mode 100644 index 00000000000..af38433f1a4 --- /dev/null +++ b/tests/helpers/snapshots/test_template.ambr @@ -0,0 +1,337 @@ +# serializer version: 1 +# name: test_merge_response[calendar][a_response] + dict({ + 'calendar.local_furry_events': dict({ + 'events': list([ + ]), + }), + 'calendar.sports': dict({ + 'events': list([ + dict({ + 'description': '', + 'end': '2024-02-27T18:00:00-06:00', + 'start': '2024-02-27T17:00:00-06:00', + 'summary': 'Basketball vs. Rockets', + }), + ]), + }), + 'calendar.yap_house_schedules': dict({ + 'events': list([ + dict({ + 'description': '', + 'end': '2024-02-26T09:00:00-06:00', + 'start': '2024-02-26T08:00:00-06:00', + 'summary': 'Dr. Appt', + }), + dict({ + 'description': 'something good', + 'end': '2024-02-28T21:00:00-06:00', + 'start': '2024-02-28T20:00:00-06:00', + 'summary': 'Bake a cake', + }), + ]), + }), + }) +# --- +# name: test_merge_response[calendar][b_rendered] + Wrapper([ + dict({ + 'description': '', + 'end': '2024-02-27T18:00:00-06:00', + 'entity_id': 'calendar.sports', + 'start': '2024-02-27T17:00:00-06:00', + 'summary': 'Basketball vs. Rockets', + 'value_key': 'events', + }), + dict({ + 'description': '', + 'end': '2024-02-26T09:00:00-06:00', + 'entity_id': 'calendar.yap_house_schedules', + 'start': '2024-02-26T08:00:00-06:00', + 'summary': 'Dr. Appt', + 'value_key': 'events', + }), + dict({ + 'description': 'something good', + 'end': '2024-02-28T21:00:00-06:00', + 'entity_id': 'calendar.yap_house_schedules', + 'start': '2024-02-28T20:00:00-06:00', + 'summary': 'Bake a cake', + 'value_key': 'events', + }), + ]) +# --- +# name: test_merge_response[vacuum][a_response] + dict({ + 'vacuum.deebot_n8_plus_1': dict({ + 'header': dict({ + 'ver': '0.0.1', + }), + 'payloadType': 'j', + 'resp': dict({ + 'body': dict({ + 'msg': 'ok', + }), + }), + }), + 'vacuum.deebot_n8_plus_2': dict({ + 'header': dict({ + 'ver': '0.0.1', + }), + 'payloadType': 'j', + 'resp': dict({ + 'body': dict({ + 'msg': 'ok', + }), + }), + }), + }) +# --- +# name: test_merge_response[vacuum][b_rendered] + Wrapper([ + dict({ + 'entity_id': 'vacuum.deebot_n8_plus_1', + 'header': dict({ + 'ver': '0.0.1', + }), + 'payloadType': 'j', + 'resp': dict({ + 'body': dict({ + 'msg': 'ok', + }), + }), + }), + dict({ + 'entity_id': 'vacuum.deebot_n8_plus_2', + 'header': dict({ + 'ver': '0.0.1', + }), + 'payloadType': 'j', + 'resp': dict({ + 'body': dict({ + 'msg': 'ok', + }), + }), + }), + ]) +# --- +# name: test_merge_response[weather][a_response] + dict({ + 'weather.forecast_home': dict({ + 'forecast': list([ + dict({ + 'condition': 'cloudy', + 'datetime': '2024-03-31T10:00:00+00:00', + 'humidity': 71, + 'precipitation': 0, + 'precipitation_probability': 6.6, + 'temperature': 10.9, + 'templow': 6.5, + 'wind_bearing': 71.8, + 'wind_gust_speed': 24.1, + 'wind_speed': 13.7, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2024-04-01T10:00:00+00:00', + 'humidity': 79, + 'precipitation': 0, + 'precipitation_probability': 8, + 'temperature': 10.2, + 'templow': 3.4, + 'wind_bearing': 350.6, + 'wind_gust_speed': 38.2, + 'wind_speed': 21.6, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2024-04-02T10:00:00+00:00', + 'humidity': 77, + 'precipitation': 2.3, + 'precipitation_probability': 67.4, + 'temperature': 3, + 'templow': 0, + 'wind_bearing': 24.5, + 'wind_gust_speed': 64.8, + 'wind_speed': 37.4, + }), + ]), + }), + 'weather.smhi_home': dict({ + 'forecast': list([ + dict({ + 'cloud_coverage': 100, + 'condition': 'cloudy', + 'datetime': '2024-03-31T16:00:00', + 'humidity': 87, + 'precipitation': 0.2, + 'pressure': 998, + 'temperature': 10, + 'templow': 4, + 'wind_bearing': 79, + 'wind_gust_speed': 21.6, + 'wind_speed': 11.88, + }), + dict({ + 'cloud_coverage': 100, + 'condition': 'rainy', + 'datetime': '2024-04-01T12:00:00', + 'humidity': 88, + 'precipitation': 2.2, + 'pressure': 999, + 'temperature': 6, + 'templow': 1, + 'wind_bearing': 17, + 'wind_gust_speed': 20.52, + 'wind_speed': 8.64, + }), + dict({ + 'cloud_coverage': 100, + 'condition': 'cloudy', + 'datetime': '2024-04-02T12:00:00', + 'humidity': 71, + 'precipitation': 1.3, + 'pressure': 1003, + 'temperature': 0, + 'templow': -3, + 'wind_bearing': 17, + 'wind_gust_speed': 57.24, + 'wind_speed': 30.6, + }), + ]), + }), + }) +# --- +# name: test_merge_response[weather][b_rendered] + Wrapper([ + dict({ + 'cloud_coverage': 100, + 'condition': 'cloudy', + 'datetime': '2024-03-31T16:00:00', + 'entity_id': 'weather.smhi_home', + 'humidity': 87, + 'precipitation': 0.2, + 'pressure': 998, + 'temperature': 10, + 'templow': 4, + 'value_key': 'forecast', + 'wind_bearing': 79, + 'wind_gust_speed': 21.6, + 'wind_speed': 11.88, + }), + dict({ + 'cloud_coverage': 100, + 'condition': 'rainy', + 'datetime': '2024-04-01T12:00:00', + 'entity_id': 'weather.smhi_home', + 'humidity': 88, + 'precipitation': 2.2, + 'pressure': 999, + 'temperature': 6, + 'templow': 1, + 'value_key': 'forecast', + 'wind_bearing': 17, + 'wind_gust_speed': 20.52, + 'wind_speed': 8.64, + }), + dict({ + 'cloud_coverage': 100, + 'condition': 'cloudy', + 'datetime': '2024-04-02T12:00:00', + 'entity_id': 'weather.smhi_home', + 'humidity': 71, + 'precipitation': 1.3, + 'pressure': 1003, + 'temperature': 0, + 'templow': -3, + 'value_key': 'forecast', + 'wind_bearing': 17, + 'wind_gust_speed': 57.24, + 'wind_speed': 30.6, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2024-03-31T10:00:00+00:00', + 'entity_id': 'weather.forecast_home', + 'humidity': 71, + 'precipitation': 0, + 'precipitation_probability': 6.6, + 'temperature': 10.9, + 'templow': 6.5, + 'value_key': 'forecast', + 'wind_bearing': 71.8, + 'wind_gust_speed': 24.1, + 'wind_speed': 13.7, + }), + dict({ + 'condition': 'cloudy', + 'datetime': '2024-04-01T10:00:00+00:00', + 'entity_id': 'weather.forecast_home', + 'humidity': 79, + 'precipitation': 0, + 'precipitation_probability': 8, + 'temperature': 10.2, + 'templow': 3.4, + 'value_key': 'forecast', + 'wind_bearing': 350.6, + 'wind_gust_speed': 38.2, + 'wind_speed': 21.6, + }), + dict({ + 'condition': 'snowy', + 'datetime': '2024-04-02T10:00:00+00:00', + 'entity_id': 'weather.forecast_home', + 'humidity': 77, + 'precipitation': 2.3, + 'precipitation_probability': 67.4, + 'temperature': 3, + 'templow': 0, + 'value_key': 'forecast', + 'wind_bearing': 24.5, + 'wind_gust_speed': 64.8, + 'wind_speed': 37.4, + }), + ]) +# --- +# name: test_merge_response[workday][a_response] + dict({ + 'binary_sensor.workday': dict({ + 'workday': True, + }), + 'binary_sensor.workday2': dict({ + 'workday': False, + }), + }) +# --- +# name: test_merge_response[workday][b_rendered] + Wrapper([ + dict({ + 'entity_id': 'binary_sensor.workday', + 'workday': True, + }), + dict({ + 'entity_id': 'binary_sensor.workday2', + 'workday': False, + }), + ]) +# --- +# name: test_merge_response_with_empty_response[a_response] + dict({ + 'calendar.local_furry_events': dict({ + 'events': list([ + ]), + }), + 'calendar.sports': dict({ + 'events': list([ + ]), + }), + 'calendar.yap_house_schedules': dict({ + 'events': list([ + ]), + }), + }) +# --- +# name: test_merge_response_with_empty_response[b_rendered] + Wrapper([ + ]) +# --- diff --git a/tests/helpers/test_template.py b/tests/helpers/test_template.py index f585b5c3260..e4f833b2d1d 100644 --- a/tests/helpers/test_template.py +++ b/tests/helpers/test_template.py @@ -15,6 +15,7 @@ from unittest.mock import patch from freezegun import freeze_time import orjson import pytest +from syrupy import SnapshotAssertion import voluptuous as vol from homeassistant import config_entries @@ -6288,3 +6289,261 @@ def test_template_output_exceeds_maximum_size(hass: HomeAssistant) -> None: tpl = template.Template("{{ 'a' * 1024 * 257 }}", hass) with pytest.raises(TemplateError): tpl.async_render() + + +@pytest.mark.parametrize( + ("service_response"), + [ + { + "calendar.sports": { + "events": [ + { + "start": "2024-02-27T17:00:00-06:00", + "end": "2024-02-27T18:00:00-06:00", + "summary": "Basketball vs. Rockets", + "description": "", + } + ] + }, + "calendar.local_furry_events": {"events": []}, + "calendar.yap_house_schedules": { + "events": [ + { + "start": "2024-02-26T08:00:00-06:00", + "end": "2024-02-26T09:00:00-06:00", + "summary": "Dr. Appt", + "description": "", + }, + { + "start": "2024-02-28T20:00:00-06:00", + "end": "2024-02-28T21:00:00-06:00", + "summary": "Bake a cake", + "description": "something good", + }, + ] + }, + }, + { + "binary_sensor.workday": {"workday": True}, + "binary_sensor.workday2": {"workday": False}, + }, + { + "weather.smhi_home": { + "forecast": [ + { + "datetime": "2024-03-31T16:00:00", + "condition": "cloudy", + "wind_bearing": 79, + "cloud_coverage": 100, + "temperature": 10, + "templow": 4, + "pressure": 998, + "wind_gust_speed": 21.6, + "wind_speed": 11.88, + "precipitation": 0.2, + "humidity": 87, + }, + { + "datetime": "2024-04-01T12:00:00", + "condition": "rainy", + "wind_bearing": 17, + "cloud_coverage": 100, + "temperature": 6, + "templow": 1, + "pressure": 999, + "wind_gust_speed": 20.52, + "wind_speed": 8.64, + "precipitation": 2.2, + "humidity": 88, + }, + { + "datetime": "2024-04-02T12:00:00", + "condition": "cloudy", + "wind_bearing": 17, + "cloud_coverage": 100, + "temperature": 0, + "templow": -3, + "pressure": 1003, + "wind_gust_speed": 57.24, + "wind_speed": 30.6, + "precipitation": 1.3, + "humidity": 71, + }, + ] + }, + "weather.forecast_home": { + "forecast": [ + { + "condition": "cloudy", + "precipitation_probability": 6.6, + "datetime": "2024-03-31T10:00:00+00:00", + "wind_bearing": 71.8, + "temperature": 10.9, + "templow": 6.5, + "wind_gust_speed": 24.1, + "wind_speed": 13.7, + "precipitation": 0, + "humidity": 71, + }, + { + "condition": "cloudy", + "precipitation_probability": 8, + "datetime": "2024-04-01T10:00:00+00:00", + "wind_bearing": 350.6, + "temperature": 10.2, + "templow": 3.4, + "wind_gust_speed": 38.2, + "wind_speed": 21.6, + "precipitation": 0, + "humidity": 79, + }, + { + "condition": "snowy", + "precipitation_probability": 67.4, + "datetime": "2024-04-02T10:00:00+00:00", + "wind_bearing": 24.5, + "temperature": 3, + "templow": 0, + "wind_gust_speed": 64.8, + "wind_speed": 37.4, + "precipitation": 2.3, + "humidity": 77, + }, + ] + }, + }, + { + "vacuum.deebot_n8_plus_1": { + "payloadType": "j", + "resp": { + "body": { + "msg": "ok", + } + }, + "header": { + "ver": "0.0.1", + }, + }, + "vacuum.deebot_n8_plus_2": { + "payloadType": "j", + "resp": { + "body": { + "msg": "ok", + } + }, + "header": { + "ver": "0.0.1", + }, + }, + }, + ], + ids=["calendar", "workday", "weather", "vacuum"], +) +async def test_merge_response( + hass: HomeAssistant, + service_response: dict, + snapshot: SnapshotAssertion, +) -> None: + """Test the merge_response function/filter.""" + + _template = "{{ merge_response(" + str(service_response) + ") }}" + + tpl = template.Template(_template, hass) + assert service_response == snapshot(name="a_response") + assert tpl.async_render() == snapshot(name="b_rendered") + + +async def test_merge_response_with_entity_id_in_response( + hass: HomeAssistant, + snapshot: SnapshotAssertion, +) -> None: + """Test the merge_response function/filter with empty lists.""" + + service_response = { + "test.response": {"some_key": True, "entity_id": "test.response"}, + "test.response2": {"some_key": False, "entity_id": "test.response2"}, + } + _template = "{{ merge_response(" + str(service_response) + ") }}" + with pytest.raises( + TemplateError, + match="ValueError: Response dictionary already contains key 'entity_id'", + ): + template.Template(_template, hass).async_render() + + service_response = { + "test.response": { + "happening": [ + { + "start": "2024-02-27T17:00:00-06:00", + "end": "2024-02-27T18:00:00-06:00", + "summary": "Magic day", + "entity_id": "test.response", + } + ] + } + } + _template = "{{ merge_response(" + str(service_response) + ") }}" + with pytest.raises( + TemplateError, + match="ValueError: Response dictionary already contains key 'entity_id'", + ): + template.Template(_template, hass).async_render() + + +async def test_merge_response_with_empty_response( + hass: HomeAssistant, + snapshot: SnapshotAssertion, +) -> None: + """Test the merge_response function/filter with empty lists.""" + + service_response = { + "calendar.sports": {"events": []}, + "calendar.local_furry_events": {"events": []}, + "calendar.yap_house_schedules": {"events": []}, + } + _template = "{{ merge_response(" + str(service_response) + ") }}" + tpl = template.Template(_template, hass) + assert service_response == snapshot(name="a_response") + assert tpl.async_render() == snapshot(name="b_rendered") + + +async def test_response_empty_dict( + hass: HomeAssistant, + snapshot: SnapshotAssertion, +) -> None: + """Test the merge_response function/filter with empty dict.""" + + service_response = {} + _template = "{{ merge_response(" + str(service_response) + ") }}" + tpl = template.Template(_template, hass) + assert tpl.async_render() == [] + + +async def test_response_incorrect_value( + hass: HomeAssistant, + snapshot: SnapshotAssertion, +) -> None: + """Test the merge_response function/filter with incorrect response.""" + + service_response = "incorrect" + _template = "{{ merge_response(" + str(service_response) + ") }}" + with pytest.raises(TemplateError, match="TypeError: Response is not a dictionary"): + template.Template(_template, hass).async_render() + + +async def test_merge_response_with_incorrect_response(hass: HomeAssistant) -> None: + """Test the merge_response function/filter with empty response should raise.""" + + service_response = {"calendar.sports": []} + _template = "{{ merge_response(" + str(service_response) + ") }}" + tpl = template.Template(_template, hass) + with pytest.raises(TemplateError, match="TypeError: Response is not a dictionary"): + tpl.async_render() + + service_response = { + "binary_sensor.workday": [], + } + _template = "{{ merge_response(" + str(service_response) + ") }}" + tpl = template.Template(_template, hass) + with pytest.raises(TemplateError, match="TypeError: Response is not a dictionary"): + tpl.async_render() From 8f679fcbf3fd34eeec8e62c909d2b1898865f91f Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 2 Sep 2024 09:51:05 +0200 Subject: [PATCH 1320/1635] Fix motionblinds_ble tests (#125060) --- tests/components/motionblinds_ble/test_entity.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/components/motionblinds_ble/test_entity.py b/tests/components/motionblinds_ble/test_entity.py index 1bfd3b185e5..00369ba1e22 100644 --- a/tests/components/motionblinds_ble/test_entity.py +++ b/tests/components/motionblinds_ble/test_entity.py @@ -23,6 +23,7 @@ from . import setup_integration from tests.common import MockConfigEntry +@pytest.mark.usefixtures("motionblinds_ble_connect") @pytest.mark.parametrize( ("platform", "entity"), [ From fa14321aa19425c30a867375e5ed5dade93de92a Mon Sep 17 00:00:00 2001 From: tronikos Date: Mon, 2 Sep 2024 01:41:29 -0700 Subject: [PATCH 1321/1635] Bump androidtvremote2 to 0.1.2 to fix blocking event loop when loading ssl certificate chain (#125061) Bump androidtvremote2 to 0.1.2 --- homeassistant/components/androidtv_remote/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/androidtv_remote/manifest.json b/homeassistant/components/androidtv_remote/manifest.json index e24fcc5d653..a06152fa570 100644 --- a/homeassistant/components/androidtv_remote/manifest.json +++ b/homeassistant/components/androidtv_remote/manifest.json @@ -8,6 +8,6 @@ "iot_class": "local_push", "loggers": ["androidtvremote2"], "quality_scale": "platinum", - "requirements": ["androidtvremote2==0.1.1"], + "requirements": ["androidtvremote2==0.1.2"], "zeroconf": ["_androidtvremote2._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index 7c15916913f..bac5b32ce89 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -440,7 +440,7 @@ amcrest==1.9.8 androidtv[async]==0.0.73 # homeassistant.components.androidtv_remote -androidtvremote2==0.1.1 +androidtvremote2==0.1.2 # homeassistant.components.anel_pwrctrl anel-pwrctrl-homeassistant==0.0.1.dev2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 54d9953779c..4724f5d38c2 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -416,7 +416,7 @@ amberelectric==1.1.1 androidtv[async]==0.0.73 # homeassistant.components.androidtv_remote -androidtvremote2==0.1.1 +androidtvremote2==0.1.2 # homeassistant.components.anova anova-wifi==0.17.0 From 72d5146a3edfcfd57f8f60db1afe82829fec0ea8 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 2 Sep 2024 10:46:35 +0200 Subject: [PATCH 1322/1635] Improve renault tests (#125064) --- .../renault/snapshots/test_services.ambr | 460 ++++++++++++++++++ tests/components/renault/test_services.py | 9 +- 2 files changed, 465 insertions(+), 4 deletions(-) create mode 100644 tests/components/renault/snapshots/test_services.ambr diff --git a/tests/components/renault/snapshots/test_services.ambr b/tests/components/renault/snapshots/test_services.ambr new file mode 100644 index 00000000000..df4269c7430 --- /dev/null +++ b/tests/components/renault/snapshots/test_services.ambr @@ -0,0 +1,460 @@ +# serializer version: 1 +# name: test_service_set_charge_schedule[zoe_40] + list([ + dict({ + 'activated': True, + 'friday': dict({ + 'duration': 450, + 'raw_data': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'startTime': 'T00:00Z', + }), + 'id': 1, + 'monday': dict({ + 'duration': 450, + 'raw_data': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'startTime': 'T00:00Z', + }), + 'raw_data': dict({ + 'activated': True, + 'friday': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'id': 1, + 'monday': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'saturday': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'sunday': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'thursday': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'tuesday': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'wednesday': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + }), + 'saturday': dict({ + 'duration': 450, + 'raw_data': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'startTime': 'T00:00Z', + }), + 'sunday': dict({ + 'duration': 450, + 'raw_data': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'startTime': 'T00:00Z', + }), + 'thursday': dict({ + 'duration': 450, + 'raw_data': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'startTime': 'T00:00Z', + }), + 'tuesday': dict({ + 'duration': 450, + 'raw_data': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'startTime': 'T00:00Z', + }), + 'wednesday': dict({ + 'duration': 450, + 'raw_data': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'startTime': 'T00:00Z', + }), + }), + dict({ + 'activated': True, + 'friday': dict({ + 'duration': 15, + 'raw_data': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'startTime': 'T23:30Z', + }), + 'id': 2, + 'monday': dict({ + 'duration': 15, + 'raw_data': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'startTime': 'T23:30Z', + }), + 'raw_data': dict({ + 'activated': True, + 'friday': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'id': 2, + 'monday': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'saturday': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'sunday': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'thursday': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'tuesday': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'wednesday': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + }), + 'saturday': dict({ + 'duration': 15, + 'raw_data': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'startTime': 'T23:30Z', + }), + 'sunday': dict({ + 'duration': 15, + 'raw_data': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'startTime': 'T23:30Z', + }), + 'thursday': dict({ + 'duration': 15, + 'raw_data': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'startTime': 'T23:30Z', + }), + 'tuesday': dict({ + 'duration': 15, + 'raw_data': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'startTime': 'T23:30Z', + }), + 'wednesday': dict({ + 'duration': 15, + 'raw_data': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'startTime': 'T23:30Z', + }), + }), + dict({ + 'activated': False, + 'friday': None, + 'id': 3, + 'monday': None, + 'raw_data': dict({ + 'activated': False, + 'id': 3, + }), + 'saturday': None, + 'sunday': None, + 'thursday': None, + 'tuesday': None, + 'wednesday': None, + }), + dict({ + 'activated': False, + 'friday': None, + 'id': 4, + 'monday': None, + 'raw_data': dict({ + 'activated': False, + 'id': 4, + }), + 'saturday': None, + 'sunday': None, + 'thursday': None, + 'tuesday': None, + 'wednesday': None, + }), + dict({ + 'activated': False, + 'friday': None, + 'id': 5, + 'monday': None, + 'raw_data': dict({ + 'activated': False, + 'id': 5, + }), + 'saturday': None, + 'sunday': None, + 'thursday': None, + 'tuesday': None, + 'wednesday': None, + }), + ]) +# --- +# name: test_service_set_charge_schedule_multi[zoe_40] + list([ + dict({ + 'activated': True, + 'friday': dict({ + 'duration': 450, + 'raw_data': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'startTime': 'T00:00Z', + }), + 'id': 1, + 'monday': dict({ + 'duration': 450, + 'raw_data': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'startTime': 'T00:00Z', + }), + 'raw_data': dict({ + 'activated': True, + 'friday': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'id': 1, + 'monday': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'saturday': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'sunday': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'thursday': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'tuesday': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'wednesday': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + }), + 'saturday': dict({ + 'duration': 450, + 'raw_data': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'startTime': 'T00:00Z', + }), + 'sunday': dict({ + 'duration': 450, + 'raw_data': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'startTime': 'T00:00Z', + }), + 'thursday': dict({ + 'duration': 450, + 'raw_data': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'startTime': 'T00:00Z', + }), + 'tuesday': dict({ + 'duration': 450, + 'raw_data': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'startTime': 'T00:00Z', + }), + 'wednesday': dict({ + 'duration': 450, + 'raw_data': dict({ + 'duration': 450, + 'startTime': 'T00:00Z', + }), + 'startTime': 'T00:00Z', + }), + }), + dict({ + 'activated': True, + 'friday': dict({ + 'duration': 30, + 'raw_data': dict({ + 'duration': 30, + 'startTime': 'T12:00Z', + }), + 'startTime': 'T12:00Z', + }), + 'id': 2, + 'monday': dict({ + 'duration': 30, + 'raw_data': dict({ + 'duration': 30, + 'startTime': 'T12:00Z', + }), + 'startTime': 'T12:00Z', + }), + 'raw_data': dict({ + 'activated': True, + 'friday': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'id': 2, + 'monday': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'saturday': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'sunday': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'thursday': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'tuesday': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'wednesday': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + }), + 'saturday': dict({ + 'duration': 30, + 'raw_data': dict({ + 'duration': 30, + 'startTime': 'T12:00Z', + }), + 'startTime': 'T12:00Z', + }), + 'sunday': dict({ + 'duration': 30, + 'raw_data': dict({ + 'duration': 30, + 'startTime': 'T12:00Z', + }), + 'startTime': 'T12:00Z', + }), + 'thursday': dict({ + 'duration': 15, + 'raw_data': dict({ + 'duration': 15, + 'startTime': 'T23:30Z', + }), + 'startTime': 'T23:30Z', + }), + 'tuesday': dict({ + 'duration': 30, + 'raw_data': dict({ + 'duration': 30, + 'startTime': 'T12:00Z', + }), + 'startTime': 'T12:00Z', + }), + 'wednesday': None, + }), + dict({ + 'activated': False, + 'friday': None, + 'id': 3, + 'monday': None, + 'raw_data': dict({ + 'activated': False, + 'id': 3, + }), + 'saturday': None, + 'sunday': None, + 'thursday': None, + 'tuesday': None, + 'wednesday': None, + }), + dict({ + 'activated': False, + 'friday': None, + 'id': 4, + 'monday': None, + 'raw_data': dict({ + 'activated': False, + 'id': 4, + }), + 'saturday': None, + 'sunday': None, + 'thursday': None, + 'tuesday': None, + 'wednesday': None, + }), + dict({ + 'activated': False, + 'friday': None, + 'id': 5, + 'monday': None, + 'raw_data': dict({ + 'activated': False, + 'id': 5, + }), + 'saturday': None, + 'sunday': None, + 'thursday': None, + 'tuesday': None, + 'wednesday': None, + }), + ]) +# --- diff --git a/tests/components/renault/test_services.py b/tests/components/renault/test_services.py index 4e3460b9afa..831204c59b4 100644 --- a/tests/components/renault/test_services.py +++ b/tests/components/renault/test_services.py @@ -8,6 +8,7 @@ import pytest from renault_api.exceptions import RenaultException from renault_api.kamereon import schemas from renault_api.kamereon.models import ChargeSchedule +from syrupy import SnapshotAssertion from homeassistant.components.renault.const import DOMAIN from homeassistant.components.renault.services import ( @@ -143,7 +144,7 @@ async def test_service_set_ac_start_with_date( async def test_service_set_charge_schedule( - hass: HomeAssistant, config_entry: ConfigEntry + hass: HomeAssistant, config_entry: ConfigEntry, snapshot: SnapshotAssertion ) -> None: """Test that service invokes renault_api with correct data.""" await hass.config_entries.async_setup(config_entry.entry_id) @@ -176,11 +177,11 @@ async def test_service_set_charge_schedule( ) assert len(mock_action.mock_calls) == 1 mock_call_data: list[ChargeSchedule] = mock_action.mock_calls[0][1][0] - assert mock_action.mock_calls[0][1] == (mock_call_data,) + assert mock_call_data == snapshot async def test_service_set_charge_schedule_multi( - hass: HomeAssistant, config_entry: ConfigEntry + hass: HomeAssistant, config_entry: ConfigEntry, snapshot: SnapshotAssertion ) -> None: """Test that service invokes renault_api with correct data.""" await hass.config_entries.async_setup(config_entry.entry_id) @@ -225,7 +226,7 @@ async def test_service_set_charge_schedule_multi( ) assert len(mock_action.mock_calls) == 1 mock_call_data: list[ChargeSchedule] = mock_action.mock_calls[0][1][0] - assert mock_action.mock_calls[0][1] == (mock_call_data,) + assert mock_call_data == snapshot # Monday updated with new values assert mock_call_data[1].monday.startTime == "T12:00Z" From 077edb08f6a940d7fe6786f2b7cf45ac8908b11c Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Mon, 2 Sep 2024 11:27:31 +0200 Subject: [PATCH 1323/1635] Bump fyta_cli to 0.6.6 (#125065) --- homeassistant/components/fyta/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/fyta/manifest.json b/homeassistant/components/fyta/manifest.json index c07a19a3db0..dbd44ed34dc 100644 --- a/homeassistant/components/fyta/manifest.json +++ b/homeassistant/components/fyta/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "cloud_polling", "quality_scale": "platinum", - "requirements": ["fyta_cli==0.6.3"] + "requirements": ["fyta_cli==0.6.6"] } diff --git a/requirements_all.txt b/requirements_all.txt index bac5b32ce89..283f096fd0e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -924,7 +924,7 @@ freesms==0.2.0 fritzconnection[qr]==1.13.2 # homeassistant.components.fyta -fyta_cli==0.6.3 +fyta_cli==0.6.6 # homeassistant.components.google_translate gTTS==2.2.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 4724f5d38c2..97a49ef04f8 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -777,7 +777,7 @@ freebox-api==1.1.0 fritzconnection[qr]==1.13.2 # homeassistant.components.fyta -fyta_cli==0.6.3 +fyta_cli==0.6.6 # homeassistant.components.google_translate gTTS==2.2.4 From 9334099bedebd16dc4552b82f7c5f8fb323c53b4 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 1 Sep 2024 23:28:42 -1000 Subject: [PATCH 1324/1635] Bump habluetooth to 3.4.0 (#125058) changelog: https://github.com/Bluetooth-Devices/habluetooth/compare/v3.3.2...v3.4.0 --- homeassistant/components/bluetooth/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/bluetooth/manifest.json b/homeassistant/components/bluetooth/manifest.json index 027e2450bb4..0d17be70e0b 100644 --- a/homeassistant/components/bluetooth/manifest.json +++ b/homeassistant/components/bluetooth/manifest.json @@ -20,6 +20,6 @@ "bluetooth-auto-recovery==1.4.2", "bluetooth-data-tools==1.20.0", "dbus-fast==2.24.0", - "habluetooth==3.3.2" + "habluetooth==3.4.0" ] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 414bff657a0..0b91d1e792c 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -27,7 +27,7 @@ dbus-fast==2.24.0 fnv-hash-fast==1.0.2 ha-av==10.1.1 ha-ffmpeg==3.2.0 -habluetooth==3.3.2 +habluetooth==3.4.0 hass-nabucasa==0.81.1 hassil==1.7.4 home-assistant-bluetooth==1.12.2 diff --git a/requirements_all.txt b/requirements_all.txt index 283f096fd0e..5d26a6dafa0 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1060,7 +1060,7 @@ ha-philipsjs==3.2.2 habitipy==0.3.1 # homeassistant.components.bluetooth -habluetooth==3.3.2 +habluetooth==3.4.0 # homeassistant.components.cloud hass-nabucasa==0.81.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 97a49ef04f8..d1aa76a4950 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -895,7 +895,7 @@ ha-philipsjs==3.2.2 habitipy==0.3.1 # homeassistant.components.bluetooth -habluetooth==3.3.2 +habluetooth==3.4.0 # homeassistant.components.cloud hass-nabucasa==0.81.1 From 2ce6bd2378c28e0ffd1d0fc36d519fcae1310f76 Mon Sep 17 00:00:00 2001 From: Nidre Date: Mon, 2 Sep 2024 13:28:49 +0300 Subject: [PATCH 1325/1635] Update Matter light transition blocklist to include YNDX LightStrip (#124657) --- homeassistant/components/matter/light.py | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/matter/light.py b/homeassistant/components/matter/light.py index 58ef8081fa9..bcac945562a 100644 --- a/homeassistant/components/matter/light.py +++ b/homeassistant/components/matter/light.py @@ -66,6 +66,7 @@ TRANSITION_BLOCKLIST = ( (4999, 25057, "1.0", "27.0"), (5009, 514, "1.0", "1.0.0"), (5010, 769, "3.0", "1.0.0"), + (5130, 544, "v0.4", "6.7.196e9d4e08-14"), ) From f4a16c8dc9278a284ab4a65bc488622d5cf10ce2 Mon Sep 17 00:00:00 2001 From: tronikos Date: Mon, 2 Sep 2024 04:07:12 -0700 Subject: [PATCH 1326/1635] Add strict typing in Google Cloud (#125068) --- .strict-typing | 1 + .../components/google_cloud/helpers.py | 8 ++-- homeassistant/components/google_cloud/tts.py | 44 +++++++++++++------ mypy.ini | 10 +++++ 4 files changed, 45 insertions(+), 18 deletions(-) diff --git a/.strict-typing b/.strict-typing index fb35bc5d227..797a1b51293 100644 --- a/.strict-typing +++ b/.strict-typing @@ -210,6 +210,7 @@ homeassistant.components.glances.* homeassistant.components.goalzero.* homeassistant.components.google.* homeassistant.components.google_assistant_sdk.* +homeassistant.components.google_cloud.* homeassistant.components.google_photos.* homeassistant.components.google_sheets.* homeassistant.components.gpsd.* diff --git a/homeassistant/components/google_cloud/helpers.py b/homeassistant/components/google_cloud/helpers.py index 8ae6a456a4f..940bae709d8 100644 --- a/homeassistant/components/google_cloud/helpers.py +++ b/homeassistant/components/google_cloud/helpers.py @@ -4,7 +4,6 @@ from __future__ import annotations import functools import operator -from types import MappingProxyType from typing import Any from google.cloud import texttospeech @@ -51,8 +50,9 @@ async def async_tts_voices( def tts_options_schema( - config_options: MappingProxyType[str, Any], voices: dict[str, list[str]] -): + config_options: dict[str, Any], + voices: dict[str, list[str]], +) -> vol.Schema: """Return schema for TTS options with default values from config or constants.""" return vol.Schema( { @@ -152,7 +152,7 @@ def tts_options_schema( ) -def tts_platform_schema(): +def tts_platform_schema() -> vol.Schema: """Return schema for TTS platform.""" return vol.Schema( { diff --git a/homeassistant/components/google_cloud/tts.py b/homeassistant/components/google_cloud/tts.py index ee9999fc496..29f7e10a580 100644 --- a/homeassistant/components/google_cloud/tts.py +++ b/homeassistant/components/google_cloud/tts.py @@ -2,6 +2,7 @@ import logging import os +from typing import Any, cast from google.api_core.exceptions import GoogleAPIError from google.cloud import texttospeech @@ -11,9 +12,11 @@ from homeassistant.components.tts import ( CONF_LANG, PLATFORM_SCHEMA as TTS_PLATFORM_SCHEMA, Provider, + TtsAudioType, Voice, ) from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from .const import ( CONF_ENCODING, @@ -34,7 +37,11 @@ _LOGGER = logging.getLogger(__name__) PLATFORM_SCHEMA = TTS_PLATFORM_SCHEMA.extend(tts_platform_schema().schema) -async def async_get_engine(hass, config, discovery_info=None): +async def async_get_engine( + hass: HomeAssistant, + config: ConfigType, + discovery_info: DiscoveryInfoType | None = None, +) -> Provider | None: """Set up Google Cloud TTS component.""" if key_file := config.get(CONF_KEY_FILE): key_file = hass.config.path(key_file) @@ -42,7 +49,7 @@ async def async_get_engine(hass, config, discovery_info=None): _LOGGER.error("File %s doesn't exist", key_file) return None if key_file: - client = texttospeech.TextToSpeechAsyncClient.from_service_account_json( + client = texttospeech.TextToSpeechAsyncClient.from_service_account_file( key_file ) else: @@ -69,8 +76,8 @@ class GoogleCloudTTSProvider(Provider): hass: HomeAssistant, client: texttospeech.TextToSpeechAsyncClient, voices: dict[str, list[str]], - language, - options_schema, + language: str, + options_schema: vol.Schema, ) -> None: """Init Google Cloud TTS service.""" self.hass = hass @@ -81,24 +88,24 @@ class GoogleCloudTTSProvider(Provider): self._options_schema = options_schema @property - def supported_languages(self): - """Return list of supported languages.""" + def supported_languages(self) -> list[str]: + """Return a list of supported languages.""" return list(self._voices) @property - def default_language(self): + def default_language(self) -> str: """Return the default language.""" return self._language @property - def supported_options(self): + def supported_options(self) -> list[str]: """Return a list of supported options.""" return [option.schema for option in self._options_schema.schema] @property - def default_options(self): + def default_options(self) -> dict[str, Any]: """Return a dict including default options.""" - return self._options_schema({}) + return cast(dict[str, Any], self._options_schema({})) @callback def async_get_supported_voices(self, language: str) -> list[Voice] | None: @@ -107,16 +114,25 @@ class GoogleCloudTTSProvider(Provider): return None return [Voice(voice, voice) for voice in voices] - async def async_get_tts_audio(self, message, language, options): - """Load TTS from google.""" + async def async_get_tts_audio( + self, + message: str, + language: str, + options: dict[str, Any], + ) -> TtsAudioType: + """Load TTS from Google Cloud.""" try: options = self._options_schema(options) except vol.Invalid as err: _LOGGER.error("Error: %s when validating options: %s", err, options) return None, None - encoding = texttospeech.AudioEncoding[options[CONF_ENCODING]] - gender = texttospeech.SsmlVoiceGender[options[CONF_GENDER]] + encoding: texttospeech.AudioEncoding = texttospeech.AudioEncoding[ + options[CONF_ENCODING] + ] # type: ignore[misc] + gender: texttospeech.SsmlVoiceGender | None = texttospeech.SsmlVoiceGender[ + options[CONF_GENDER] + ] # type: ignore[misc] voice = options[CONF_VOICE] if voice: gender = None diff --git a/mypy.ini b/mypy.ini index 7fb8c49c8d9..c29db45cd53 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1856,6 +1856,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.google_cloud.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.google_photos.*] check_untyped_defs = true disallow_incomplete_defs = true From d40e3145fe3f7eca87e2e5fed784de1a8ea4a65f Mon Sep 17 00:00:00 2001 From: tronikos Date: Mon, 2 Sep 2024 04:30:18 -0700 Subject: [PATCH 1327/1635] Setup Google Cloud from the UI (#121502) * Google Cloud can now be setup from the UI * mypy * Add BaseGoogleCloudProvider * Allow clearing options in the UI * Address feedback * Don't translate Google Cloud title * mypy * Revert strict typing changes * Address comments --- CODEOWNERS | 3 +- .../components/google_cloud/__init__.py | 25 +++ .../components/google_cloud/config_flow.py | 169 ++++++++++++++++ .../components/google_cloud/const.py | 4 + .../components/google_cloud/helpers.py | 44 +++-- .../components/google_cloud/manifest.json | 7 +- .../components/google_cloud/strings.json | 32 +++ homeassistant/components/google_cloud/tts.py | 134 +++++++++++-- homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 6 +- requirements_test_all.txt | 3 + tests/components/google_cloud/__init__.py | 1 + tests/components/google_cloud/conftest.py | 122 ++++++++++++ .../google_cloud/test_config_flow.py | 183 ++++++++++++++++++ 14 files changed, 696 insertions(+), 38 deletions(-) create mode 100644 homeassistant/components/google_cloud/config_flow.py create mode 100644 homeassistant/components/google_cloud/strings.json create mode 100644 tests/components/google_cloud/__init__.py create mode 100644 tests/components/google_cloud/conftest.py create mode 100644 tests/components/google_cloud/test_config_flow.py diff --git a/CODEOWNERS b/CODEOWNERS index 7b8b4ec1106..f4c7d972f7c 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -549,7 +549,8 @@ build.json @home-assistant/supervisor /tests/components/google_assistant/ @home-assistant/cloud /homeassistant/components/google_assistant_sdk/ @tronikos /tests/components/google_assistant_sdk/ @tronikos -/homeassistant/components/google_cloud/ @lufton +/homeassistant/components/google_cloud/ @lufton @tronikos +/tests/components/google_cloud/ @lufton @tronikos /homeassistant/components/google_generative_ai_conversation/ @tronikos /tests/components/google_generative_ai_conversation/ @tronikos /homeassistant/components/google_mail/ @tkdrob diff --git a/homeassistant/components/google_cloud/__init__.py b/homeassistant/components/google_cloud/__init__.py index 97b669245d2..84848543790 100644 --- a/homeassistant/components/google_cloud/__init__.py +++ b/homeassistant/components/google_cloud/__init__.py @@ -1 +1,26 @@ """The google_cloud component.""" + +from __future__ import annotations + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +PLATFORMS = [Platform.TTS] + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Set up a config entry.""" + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + entry.async_on_unload(entry.add_update_listener(async_update_options)) + return True + + +async def async_update_options(hass: HomeAssistant, entry: ConfigEntry) -> None: + """Handle options update.""" + await hass.config_entries.async_reload(entry.entry_id) + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/google_cloud/config_flow.py b/homeassistant/components/google_cloud/config_flow.py new file mode 100644 index 00000000000..bf97de67eb1 --- /dev/null +++ b/homeassistant/components/google_cloud/config_flow.py @@ -0,0 +1,169 @@ +"""Config flow for the Google Cloud integration.""" + +from __future__ import annotations + +import json +import logging +from typing import TYPE_CHECKING, Any, cast + +from google.cloud import texttospeech +import voluptuous as vol + +from homeassistant.components.file_upload import process_uploaded_file +from homeassistant.components.tts import CONF_LANG +from homeassistant.config_entries import ( + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlowWithConfigEntry, +) +from homeassistant.core import callback +from homeassistant.helpers.selector import ( + FileSelector, + FileSelectorConfig, + SelectSelector, + SelectSelectorConfig, + SelectSelectorMode, +) + +from .const import CONF_KEY_FILE, CONF_SERVICE_ACCOUNT_INFO, DEFAULT_LANG, DOMAIN, TITLE +from .helpers import ( + async_tts_voices, + tts_options_schema, + tts_platform_schema, + validate_service_account_info, +) + +_LOGGER = logging.getLogger(__name__) + +UPLOADED_KEY_FILE = "uploaded_key_file" + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(UPLOADED_KEY_FILE): FileSelector( + FileSelectorConfig(accept=".json,application/json") + ) + } +) + + +class GoogleCloudConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Google Cloud integration.""" + + VERSION = 1 + + _name: str | None = None + entry: ConfigEntry | None = None + abort_reason: str | None = None + + def _parse_uploaded_file(self, uploaded_file_id: str) -> dict[str, Any]: + """Read and parse an uploaded JSON file.""" + with process_uploaded_file(self.hass, uploaded_file_id) as file_path: + contents = file_path.read_text() + return cast(dict[str, Any], json.loads(contents)) + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + errors: dict[str, Any] = {} + if user_input is not None: + try: + service_account_info = await self.hass.async_add_executor_job( + self._parse_uploaded_file, user_input[UPLOADED_KEY_FILE] + ) + validate_service_account_info(service_account_info) + except ValueError: + _LOGGER.exception("Reading uploaded JSON file failed") + errors["base"] = "invalid_file" + else: + data = {CONF_SERVICE_ACCOUNT_INFO: service_account_info} + if self.entry: + if TYPE_CHECKING: + assert self.abort_reason + return self.async_update_reload_and_abort( + self.entry, data=data, reason=self.abort_reason + ) + return self.async_create_entry(title=TITLE, data=data) + return self.async_show_form( + step_id="user", + data_schema=STEP_USER_DATA_SCHEMA, + errors=errors, + description_placeholders={ + "url": "https://console.cloud.google.com/apis/credentials/serviceaccountkey" + }, + ) + + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: + """Import Google Cloud configuration from YAML.""" + + def _read_key_file() -> dict[str, Any]: + with open( + self.hass.config.path(import_data[CONF_KEY_FILE]), encoding="utf8" + ) as f: + return cast(dict[str, Any], json.load(f)) + + service_account_info = await self.hass.async_add_executor_job(_read_key_file) + try: + validate_service_account_info(service_account_info) + except ValueError: + _LOGGER.exception("Reading credentials JSON file failed") + return self.async_abort(reason="invalid_file") + options = { + k: v for k, v in import_data.items() if k in tts_platform_schema().schema + } + options.pop(CONF_KEY_FILE) + _LOGGER.debug("Creating imported config entry with options: %s", options) + return self.async_create_entry( + title=TITLE, + data={CONF_SERVICE_ACCOUNT_INFO: service_account_info}, + options=options, + ) + + @staticmethod + @callback + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> GoogleCloudOptionsFlowHandler: + """Create the options flow.""" + return GoogleCloudOptionsFlowHandler(config_entry) + + +class GoogleCloudOptionsFlowHandler(OptionsFlowWithConfigEntry): + """Google Cloud options flow.""" + + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Manage the options.""" + if user_input is not None: + return self.async_create_entry(data=user_input) + + service_account_info = self.config_entry.data[CONF_SERVICE_ACCOUNT_INFO] + client: texttospeech.TextToSpeechAsyncClient = ( + texttospeech.TextToSpeechAsyncClient.from_service_account_info( + service_account_info + ) + ) + voices = await async_tts_voices(client) + return self.async_show_form( + step_id="init", + data_schema=self.add_suggested_values_to_schema( + vol.Schema( + { + vol.Optional( + CONF_LANG, + default=DEFAULT_LANG, + ): SelectSelector( + SelectSelectorConfig( + mode=SelectSelectorMode.DROPDOWN, options=list(voices) + ) + ), + **tts_options_schema( + self.options, voices, from_config_flow=True + ).schema, + } + ), + self.options, + ), + ) diff --git a/homeassistant/components/google_cloud/const.py b/homeassistant/components/google_cloud/const.py index 0fbd5e78274..6a718bf35d3 100644 --- a/homeassistant/components/google_cloud/const.py +++ b/homeassistant/components/google_cloud/const.py @@ -2,6 +2,10 @@ from __future__ import annotations +DOMAIN = "google_cloud" +TITLE = "Google Cloud" + +CONF_SERVICE_ACCOUNT_INFO = "service_account_info" CONF_KEY_FILE = "key_file" DEFAULT_LANG = "en-US" diff --git a/homeassistant/components/google_cloud/helpers.py b/homeassistant/components/google_cloud/helpers.py index 940bae709d8..3c614156132 100644 --- a/homeassistant/components/google_cloud/helpers.py +++ b/homeassistant/components/google_cloud/helpers.py @@ -2,11 +2,13 @@ from __future__ import annotations +from collections.abc import Mapping import functools import operator from typing import Any from google.cloud import texttospeech +from google.oauth2.service_account import Credentials import voluptuous as vol from homeassistant.components.tts import CONF_LANG @@ -52,14 +54,18 @@ async def async_tts_voices( def tts_options_schema( config_options: dict[str, Any], voices: dict[str, list[str]], + from_config_flow: bool = False, ) -> vol.Schema: """Return schema for TTS options with default values from config or constants.""" + # If we are called from the config flow we want the defaults to be from constants + # to allow clearing the current value (passed as suggested_value) in the UI. + # If we aren't called from the config flow we want the defaults to be from the config. + defaults = {} if from_config_flow else config_options return vol.Schema( { vol.Optional( CONF_GENDER, - description={"suggested_value": config_options.get(CONF_GENDER)}, - default=config_options.get( + default=defaults.get( CONF_GENDER, texttospeech.SsmlVoiceGender.NEUTRAL.name, # type: ignore[attr-defined] ), @@ -74,8 +80,7 @@ def tts_options_schema( ), vol.Optional( CONF_VOICE, - description={"suggested_value": config_options.get(CONF_VOICE)}, - default=config_options.get(CONF_VOICE, DEFAULT_VOICE), + default=defaults.get(CONF_VOICE, DEFAULT_VOICE), ): SelectSelector( SelectSelectorConfig( mode=SelectSelectorMode.DROPDOWN, @@ -84,8 +89,7 @@ def tts_options_schema( ), vol.Optional( CONF_ENCODING, - description={"suggested_value": config_options.get(CONF_ENCODING)}, - default=config_options.get( + default=defaults.get( CONF_ENCODING, texttospeech.AudioEncoding.MP3.name, # type: ignore[attr-defined] ), @@ -100,23 +104,19 @@ def tts_options_schema( ), vol.Optional( CONF_SPEED, - description={"suggested_value": config_options.get(CONF_SPEED)}, - default=config_options.get(CONF_SPEED, 1.0), + default=defaults.get(CONF_SPEED, 1.0), ): NumberSelector(NumberSelectorConfig(min=0.25, max=4.0, step=0.01)), vol.Optional( CONF_PITCH, - description={"suggested_value": config_options.get(CONF_PITCH)}, - default=config_options.get(CONF_PITCH, 0), + default=defaults.get(CONF_PITCH, 0), ): NumberSelector(NumberSelectorConfig(min=-20.0, max=20.0, step=0.1)), vol.Optional( CONF_GAIN, - description={"suggested_value": config_options.get(CONF_GAIN)}, - default=config_options.get(CONF_GAIN, 0), + default=defaults.get(CONF_GAIN, 0), ): NumberSelector(NumberSelectorConfig(min=-96.0, max=16.0, step=0.1)), vol.Optional( CONF_PROFILES, - description={"suggested_value": config_options.get(CONF_PROFILES)}, - default=config_options.get(CONF_PROFILES, []), + default=defaults.get(CONF_PROFILES, []), ): SelectSelector( SelectSelectorConfig( mode=SelectSelectorMode.DROPDOWN, @@ -137,8 +137,7 @@ def tts_options_schema( ), vol.Optional( CONF_TEXT_TYPE, - description={"suggested_value": config_options.get(CONF_TEXT_TYPE)}, - default=config_options.get(CONF_TEXT_TYPE, "text"), + default=defaults.get(CONF_TEXT_TYPE, "text"), ): vol.All( vol.Lower, SelectSelector( @@ -166,3 +165,16 @@ def tts_platform_schema() -> vol.Schema: ), } ) + + +def validate_service_account_info(info: Mapping[str, str]) -> None: + """Validate service account info. + + Args: + info: The service account info in Google format. + + Raises: + ValueError: If the info is not in the expected format. + + """ + Credentials.from_service_account_info(info) # type:ignore[no-untyped-call] diff --git a/homeassistant/components/google_cloud/manifest.json b/homeassistant/components/google_cloud/manifest.json index 052fa79eef4..d0dda80a870 100644 --- a/homeassistant/components/google_cloud/manifest.json +++ b/homeassistant/components/google_cloud/manifest.json @@ -1,8 +1,11 @@ { "domain": "google_cloud", - "name": "Google Cloud Platform", - "codeowners": ["@lufton"], + "name": "Google Cloud", + "codeowners": ["@lufton", "@tronikos"], + "config_flow": true, + "dependencies": ["file_upload"], "documentation": "https://www.home-assistant.io/integrations/google_cloud", + "integration_type": "service", "iot_class": "cloud_push", "requirements": ["google-cloud-texttospeech==2.17.2"] } diff --git a/homeassistant/components/google_cloud/strings.json b/homeassistant/components/google_cloud/strings.json new file mode 100644 index 00000000000..0a0804005de --- /dev/null +++ b/homeassistant/components/google_cloud/strings.json @@ -0,0 +1,32 @@ +{ + "config": { + "step": { + "user": { + "description": "Upload your Google Cloud service account JSON file that you can create at {url}.", + "data": { + "uploaded_key_file": "Upload service account JSON file" + } + } + }, + "error": { + "invalid_file": "Invalid service account JSON file" + } + }, + "options": { + "step": { + "init": { + "data": { + "language": "Default language of the voice", + "gender": "Default gender of the voice", + "voice": "Default voice name (overrides language and gender)", + "encoding": "Default audio encoder", + "speed": "Default rate/speed of the voice", + "pitch": "Default pitch of the voice", + "gain": "Default volume gain (in dB) of the voice", + "profiles": "Default audio profiles", + "text_type": "Default text type" + } + } + } + } +} diff --git a/homeassistant/components/google_cloud/tts.py b/homeassistant/components/google_cloud/tts.py index 29f7e10a580..d65a743c015 100644 --- a/homeassistant/components/google_cloud/tts.py +++ b/homeassistant/components/google_cloud/tts.py @@ -1,10 +1,12 @@ """Support for the Google Cloud TTS service.""" +from __future__ import annotations + import logging -import os +from pathlib import Path from typing import Any, cast -from google.api_core.exceptions import GoogleAPIError +from google.api_core.exceptions import GoogleAPIError, Unauthenticated from google.cloud import texttospeech import voluptuous as vol @@ -12,10 +14,14 @@ from homeassistant.components.tts import ( CONF_LANG, PLATFORM_SCHEMA as TTS_PLATFORM_SCHEMA, Provider, + TextToSpeechEntity, TtsAudioType, Voice, ) +from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from .const import ( @@ -25,10 +31,12 @@ from .const import ( CONF_KEY_FILE, CONF_PITCH, CONF_PROFILES, + CONF_SERVICE_ACCOUNT_INFO, CONF_SPEED, CONF_TEXT_TYPE, CONF_VOICE, DEFAULT_LANG, + DOMAIN, ) from .helpers import async_tts_voices, tts_options_schema, tts_platform_schema @@ -45,13 +53,20 @@ async def async_get_engine( """Set up Google Cloud TTS component.""" if key_file := config.get(CONF_KEY_FILE): key_file = hass.config.path(key_file) - if not os.path.isfile(key_file): + if not Path(key_file).is_file(): _LOGGER.error("File %s doesn't exist", key_file) return None if key_file: client = texttospeech.TextToSpeechAsyncClient.from_service_account_file( key_file ) + if not hass.config_entries.async_entries(DOMAIN): + _LOGGER.debug("Creating config entry by importing: %s", config) + hass.async_create_task( + hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data=config + ) + ) else: client = texttospeech.TextToSpeechAsyncClient() try: @@ -60,7 +75,6 @@ async def async_get_engine( _LOGGER.error("Error from calling list_voices: %s", err) return None return GoogleCloudTTSProvider( - hass, client, voices, config.get(CONF_LANG, DEFAULT_LANG), @@ -68,20 +82,51 @@ async def async_get_engine( ) -class GoogleCloudTTSProvider(Provider): - """The Google Cloud TTS API provider.""" +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Google Cloud text-to-speech.""" + service_account_info = config_entry.data[CONF_SERVICE_ACCOUNT_INFO] + client: texttospeech.TextToSpeechAsyncClient = ( + texttospeech.TextToSpeechAsyncClient.from_service_account_info( + service_account_info + ) + ) + try: + voices = await async_tts_voices(client) + except GoogleAPIError as err: + _LOGGER.error("Error from calling list_voices: %s", err) + if isinstance(err, Unauthenticated): + config_entry.async_start_reauth(hass) + return + options_schema = tts_options_schema(dict(config_entry.options), voices) + language = config_entry.options.get(CONF_LANG, DEFAULT_LANG) + async_add_entities( + [ + GoogleCloudTTSEntity( + config_entry, + client, + voices, + language, + options_schema, + ) + ] + ) + + +class BaseGoogleCloudProvider: + """The Google Cloud TTS base provider.""" def __init__( self, - hass: HomeAssistant, client: texttospeech.TextToSpeechAsyncClient, voices: dict[str, list[str]], language: str, options_schema: vol.Schema, ) -> None: - """Init Google Cloud TTS service.""" - self.hass = hass - self.name = "Google Cloud TTS" + """Init Google Cloud TTS base provider.""" self._client = client self._voices = voices self._language = language @@ -114,7 +159,7 @@ class GoogleCloudTTSProvider(Provider): return None return [Voice(voice, voice) for voice in voices] - async def async_get_tts_audio( + async def _async_get_tts_audio( self, message: str, language: str, @@ -155,11 +200,7 @@ class GoogleCloudTTSProvider(Provider): ), ) - try: - response = await self._client.synthesize_speech(request, timeout=10) - except GoogleAPIError as err: - _LOGGER.error("Error occurred during Google Cloud TTS call: %s", err) - return None, None + response = await self._client.synthesize_speech(request, timeout=10) if encoding == texttospeech.AudioEncoding.MP3: extension = "mp3" @@ -169,3 +210,64 @@ class GoogleCloudTTSProvider(Provider): extension = "wav" return extension, response.audio_content + + +class GoogleCloudTTSEntity(BaseGoogleCloudProvider, TextToSpeechEntity): + """The Google Cloud TTS entity.""" + + def __init__( + self, + entry: ConfigEntry, + client: texttospeech.TextToSpeechAsyncClient, + voices: dict[str, list[str]], + language: str, + options_schema: vol.Schema, + ) -> None: + """Init Google Cloud TTS entity.""" + super().__init__(client, voices, language, options_schema) + self._attr_unique_id = f"{entry.entry_id}-tts" + self._attr_name = entry.title + self._attr_device_info = dr.DeviceInfo( + identifiers={(DOMAIN, entry.entry_id)}, + manufacturer="Google", + model="Cloud", + entry_type=dr.DeviceEntryType.SERVICE, + ) + self._entry = entry + + async def async_get_tts_audio( + self, message: str, language: str, options: dict[str, Any] + ) -> TtsAudioType: + """Load TTS from Google Cloud.""" + try: + return await self._async_get_tts_audio(message, language, options) + except GoogleAPIError as err: + _LOGGER.error("Error occurred during Google Cloud TTS call: %s", err) + if isinstance(err, Unauthenticated): + self._entry.async_start_reauth(self.hass) + return None, None + + +class GoogleCloudTTSProvider(BaseGoogleCloudProvider, Provider): + """The Google Cloud TTS API provider.""" + + def __init__( + self, + client: texttospeech.TextToSpeechAsyncClient, + voices: dict[str, list[str]], + language: str, + options_schema: vol.Schema, + ) -> None: + """Init Google Cloud TTS service.""" + super().__init__(client, voices, language, options_schema) + self.name = "Google Cloud TTS" + + async def async_get_tts_audio( + self, message: str, language: str, options: dict[str, Any] + ) -> TtsAudioType: + """Load TTS from Google Cloud.""" + try: + return await self._async_get_tts_audio(message, language, options) + except GoogleAPIError as err: + _LOGGER.error("Error occurred during Google Cloud TTS call: %s", err) + return None, None diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index 912df1aee0f..5f46cb1013e 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -222,6 +222,7 @@ FLOWS = { "goodwe", "google", "google_assistant_sdk", + "google_cloud", "google_generative_ai_conversation", "google_mail", "google_photos", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 38958845782..e379851b37f 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -2251,10 +2251,10 @@ "name": "Google Assistant SDK" }, "google_cloud": { - "integration_type": "hub", - "config_flow": false, + "integration_type": "service", + "config_flow": true, "iot_class": "cloud_push", - "name": "Google Cloud Platform" + "name": "Google Cloud" }, "google_domains": { "integration_type": "hub", diff --git a/requirements_test_all.txt b/requirements_test_all.txt index d1aa76a4950..8dc22562398 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -836,6 +836,9 @@ google-api-python-client==2.71.0 # homeassistant.components.google_pubsub google-cloud-pubsub==2.23.0 +# homeassistant.components.google_cloud +google-cloud-texttospeech==2.17.2 + # homeassistant.components.google_generative_ai_conversation google-generativeai==0.7.2 diff --git a/tests/components/google_cloud/__init__.py b/tests/components/google_cloud/__init__.py new file mode 100644 index 00000000000..67e83b58c71 --- /dev/null +++ b/tests/components/google_cloud/__init__.py @@ -0,0 +1 @@ +"""Tests for the Google Cloud integration.""" diff --git a/tests/components/google_cloud/conftest.py b/tests/components/google_cloud/conftest.py new file mode 100644 index 00000000000..acde62144a9 --- /dev/null +++ b/tests/components/google_cloud/conftest.py @@ -0,0 +1,122 @@ +"""Tests helpers.""" + +from collections.abc import Generator +import json +from pathlib import Path +from unittest.mock import AsyncMock, MagicMock, patch + +from google.cloud.texttospeech_v1.types import cloud_tts +import pytest + +from homeassistant.components.google_cloud.const import ( + CONF_SERVICE_ACCOUNT_INFO, + DOMAIN, +) + +from tests.common import MockConfigEntry + +VALID_SERVICE_ACCOUNT_INFO = { + "type": "service_account", + "project_id": "my project id", + "private_key_id": "my private key if", + "private_key": "-----BEGIN PRIVATE KEY-----\nMIICdwIBADANBgkqhkiG9w0BAQEFAASCAmEwggJdAgEAAoGBAKYscIlwm7soDsHAz6L6YvUkCvkrX19rS6yeYOmovvhoK5WeYGWUsd8V72zmsyHB7XO94YgJVjvxfzn5K8bLePjFzwoSJjZvhBJ/ZQ05d8VmbvgyWUoPdG9oEa4fZ/lCYrXoaFdTot2xcJvrb/ZuiRl4s4eZpNeFYvVK/Am7UeFPAgMBAAECgYAUetOfzLYUudofvPCaKHu7tKZ5kQPfEa0w6BAPnBF1Mfl1JiDBRDMryFtKs6AOIAVwx00dY/Ex0BCbB3+Cr58H7t4NaPTJxCpmR09pK7o17B7xAdQv8+SynFNud9/5vQ5AEXMOLNwKiU7wpXT6Z7ZIibUBOR7ewsWgsHCDpN1iqQJBAOMODPTPSiQMwRAUHIc6GPleFSJnIz2PAoG3JOG9KFAL6RtIc19lob2ZXdbQdzKtjSkWo+O5W20WDNAl1k32h6MCQQC7W4ZCIY67mPbL6CxXfHjpSGF4Dr9VWJ7ZrKHr6XUoOIcEvsn/pHvWonjMdy93rQMSfOE8BKd/I1+GHRmNVgplAkAnSo4paxmsZVyfeKt7Jy2dMY+8tVZe17maUuQaAE7Sk00SgJYegwrbMYgQnWCTL39HBfj0dmYA2Zj8CCAuu6O7AkEAryFiYjaUAO9+4iNoL27+ZrFtypeeadyov7gKs0ZKaQpNyzW8A+Zwi7TbTeSqzic/E+z/bOa82q7p/6b7141xsQJBANCAcIwMcVb6KVCHlQbOtKspo5Eh4ZQi8bGl+IcwbQ6JSxeTx915IfAldgbuU047wOB04dYCFB2yLDiUGVXTifU=\n-----END PRIVATE KEY-----\n", + "client_email": "my client email", + "client_id": "my client id", + "auth_uri": "https://accounts.google.com/o/oauth2/auth", + "token_uri": "https://oauth2.googleapis.com/token", + "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", + "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/service-account", + "universe_domain": "googleapis.com", +} + + +@pytest.fixture +def create_google_credentials_json(tmp_path: Path) -> str: + """Create googlecredentials.json.""" + file_path = tmp_path / "googlecredentials.json" + with open(file_path, "w", encoding="utf8") as f: + json.dump(VALID_SERVICE_ACCOUNT_INFO, f) + return str(file_path) + + +@pytest.fixture +def create_invalid_google_credentials_json(create_google_credentials_json: str) -> str: + """Create invalid googlecredentials.json.""" + invalid_service_account_info = VALID_SERVICE_ACCOUNT_INFO.copy() + invalid_service_account_info.pop("client_email") + with open(create_google_credentials_json, "w", encoding="utf8") as f: + json.dump(invalid_service_account_info, f) + return create_google_credentials_json + + +@pytest.fixture +def mock_process_uploaded_file( + create_google_credentials_json: str, +) -> Generator[MagicMock]: + """Mock upload certificate files.""" + with patch( + "homeassistant.components.google_cloud.config_flow.process_uploaded_file", + return_value=Path(create_google_credentials_json), + ) as mock_upload: + yield mock_upload + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + title="my Google Cloud title", + domain=DOMAIN, + data={CONF_SERVICE_ACCOUNT_INFO: VALID_SERVICE_ACCOUNT_INFO}, + ) + + +@pytest.fixture +def mock_api_tts() -> AsyncMock: + """Return a mocked TTS client.""" + mock_client = AsyncMock() + mock_client.list_voices.return_value = cloud_tts.ListVoicesResponse( + voices=[ + cloud_tts.Voice(language_codes=["en-US"], name="en-US-Standard-A"), + cloud_tts.Voice(language_codes=["en-US"], name="en-US-Standard-B"), + cloud_tts.Voice(language_codes=["el-GR"], name="el-GR-Standard-A"), + ] + ) + return mock_client + + +@pytest.fixture +def mock_api_tts_from_service_account_info( + mock_api_tts: AsyncMock, +) -> Generator[AsyncMock]: + """Return a mocked TTS client created with from_service_account_info.""" + with ( + patch( + "google.cloud.texttospeech.TextToSpeechAsyncClient.from_service_account_info", + return_value=mock_api_tts, + ), + ): + yield mock_api_tts + + +@pytest.fixture +def mock_api_tts_from_service_account_file( + mock_api_tts: AsyncMock, +) -> Generator[AsyncMock]: + """Return a mocked TTS client created with from_service_account_file.""" + with ( + patch( + "google.cloud.texttospeech.TextToSpeechAsyncClient.from_service_account_file", + return_value=mock_api_tts, + ), + ): + yield mock_api_tts + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.google_cloud.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry diff --git a/tests/components/google_cloud/test_config_flow.py b/tests/components/google_cloud/test_config_flow.py new file mode 100644 index 00000000000..a5a51052e66 --- /dev/null +++ b/tests/components/google_cloud/test_config_flow.py @@ -0,0 +1,183 @@ +"""Test the Google Cloud config flow.""" + +from unittest.mock import AsyncMock, MagicMock +from uuid import uuid4 + +from homeassistant import config_entries +from homeassistant.components import tts +from homeassistant.components.google_cloud.config_flow import UPLOADED_KEY_FILE +from homeassistant.components.google_cloud.const import ( + CONF_KEY_FILE, + CONF_SERVICE_ACCOUNT_INFO, + DOMAIN, +) +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_PLATFORM +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType +from homeassistant.setup import async_setup_component + +from .conftest import VALID_SERVICE_ACCOUNT_INFO + +from tests.common import MockConfigEntry + + +async def test_user_flow_success( + hass: HomeAssistant, + mock_process_uploaded_file: MagicMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test user flow creates entry.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert not result["errors"] + + uploaded_file = str(uuid4()) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {UPLOADED_KEY_FILE: uploaded_file}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "Google Cloud" + assert result["data"] == {CONF_SERVICE_ACCOUNT_INFO: VALID_SERVICE_ACCOUNT_INFO} + mock_process_uploaded_file.assert_called_with(hass, uploaded_file) + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_user_flow_missing_file( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, +) -> None: + """Test user flow when uploaded file is missing.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {UPLOADED_KEY_FILE: str(uuid4())}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "invalid_file"} + assert len(mock_setup_entry.mock_calls) == 0 + + +async def test_user_flow_invalid_file( + hass: HomeAssistant, + create_invalid_google_credentials_json: str, + mock_process_uploaded_file: MagicMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test user flow when uploaded file is invalid.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + uploaded_file = str(uuid4()) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {UPLOADED_KEY_FILE: uploaded_file}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "invalid_file"} + mock_process_uploaded_file.assert_called_with(hass, uploaded_file) + assert len(mock_setup_entry.mock_calls) == 0 + + +async def test_import_flow( + hass: HomeAssistant, + create_google_credentials_json: str, + mock_api_tts_from_service_account_file: AsyncMock, + mock_api_tts_from_service_account_info: AsyncMock, +) -> None: + """Test the import flow.""" + assert not hass.config_entries.async_entries(DOMAIN) + assert await async_setup_component( + hass, + tts.DOMAIN, + { + tts.DOMAIN: {CONF_PLATFORM: DOMAIN} + | {CONF_KEY_FILE: create_google_credentials_json} + }, + ) + await hass.async_block_till_done() + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + config_entry = hass.config_entries.async_entries(DOMAIN)[0] + assert config_entry.state is config_entries.ConfigEntryState.LOADED + + +async def test_import_flow_invalid_file( + hass: HomeAssistant, + create_invalid_google_credentials_json: str, + mock_api_tts_from_service_account_file: AsyncMock, +) -> None: + """Test the import flow when the key file is invalid.""" + assert not hass.config_entries.async_entries(DOMAIN) + assert await async_setup_component( + hass, + tts.DOMAIN, + { + tts.DOMAIN: {CONF_PLATFORM: DOMAIN} + | {CONF_KEY_FILE: create_invalid_google_credentials_json} + }, + ) + await hass.async_block_till_done() + assert not hass.config_entries.async_entries(DOMAIN) + assert mock_api_tts_from_service_account_file.list_voices.call_count == 1 + + +async def test_options_flow( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_api_tts_from_service_account_info: AsyncMock, +) -> None: + """Test options flow.""" + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + assert mock_api_tts_from_service_account_info.list_voices.call_count == 1 + + assert mock_config_entry.options == {} + + result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + data_schema = result["data_schema"].schema + assert set(data_schema) == { + "language", + "gender", + "voice", + "encoding", + "speed", + "pitch", + "gain", + "profiles", + "text_type", + } + assert mock_api_tts_from_service_account_info.list_voices.call_count == 2 + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={"language": "el-GR"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert mock_config_entry.options == { + "language": "el-GR", + "gender": "NEUTRAL", + "voice": "", + "encoding": "MP3", + "speed": 1.0, + "pitch": 0.0, + "gain": 0.0, + "profiles": [], + "text_type": "text", + } + assert mock_api_tts_from_service_account_info.list_voices.call_count == 3 From fbfd8c48aaeae10a6386e0e5142cab6ee05e8f03 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 2 Sep 2024 13:33:51 +0200 Subject: [PATCH 1328/1635] Remove unused event from recorder (#125067) --- homeassistant/components/recorder/core.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/homeassistant/components/recorder/core.py b/homeassistant/components/recorder/core.py index c57274317e3..96a4f954c71 100644 --- a/homeassistant/components/recorder/core.py +++ b/homeassistant/components/recorder/core.py @@ -225,7 +225,6 @@ class Recorder(threading.Thread): self.event_session: Session | None = None self._get_session: Callable[[], Session] | None = None self._completed_first_database_setup: bool | None = None - self.async_migration_event = asyncio.Event() self.migration_in_progress = False self.migration_is_live = False self.use_legacy_events_index = False @@ -934,11 +933,6 @@ class Recorder(threading.Thread): return False - @callback - def _async_migration_started(self) -> None: - """Set the migration started event.""" - self.async_migration_event.set() - def _migrate_schema_offline( self, schema_status: migration.SchemaValidationStatus ) -> tuple[bool, migration.SchemaValidationStatus]: @@ -963,7 +957,6 @@ class Recorder(threading.Thread): "Database upgrade in progress", "recorder_database_migration", ) - self.hass.add_job(self._async_migration_started) return self._migrate_schema(schema_status, True) def _migrate_schema( From 114e254aa650e5eb8f2ad0db419db8bd1840513a Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 2 Sep 2024 14:20:50 +0200 Subject: [PATCH 1329/1635] Don't raise when registering entity service with invalid schema (#125057) * Don't raise when registering entity service with invalid schema * Update homeassistant/helpers/service.py Co-authored-by: Robert Resch --------- Co-authored-by: Robert Resch --- homeassistant/helpers/service.py | 11 ++++++++++- tests/helpers/test_entity_component.py | 22 ++++++++++++---------- tests/helpers/test_entity_platform.py | 22 ++++++++++++---------- 3 files changed, 34 insertions(+), 21 deletions(-) diff --git a/homeassistant/helpers/service.py b/homeassistant/helpers/service.py index 573073f3809..bb9490b9edd 100644 --- a/homeassistant/helpers/service.py +++ b/homeassistant/helpers/service.py @@ -1268,7 +1268,16 @@ def async_register_entity_service( # the check could be extended to require All/Any to have sub schema(s) # with all entity service fields elif not cv.is_entity_service_schema(schema): - raise HomeAssistantError("The schema is not an entity service schema") + # pylint: disable-next=import-outside-toplevel + from .frame import report + + report( + ( + "registers an entity service with a non entity service schema " + "which will stop working in HA Core 2025.9" + ), + error_if_core=False, + ) service_func: str | HassJob[..., Any] service_func = func if isinstance(func, str) else HassJob(func) diff --git a/tests/helpers/test_entity_component.py b/tests/helpers/test_entity_component.py index 8f4ece09a17..9723b91eb9a 100644 --- a/tests/helpers/test_entity_component.py +++ b/tests/helpers/test_entity_component.py @@ -557,21 +557,22 @@ async def test_register_entity_service( async def test_register_entity_service_non_entity_service_schema( - hass: HomeAssistant, + hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test attempting to register a service with a non entity service schema.""" component = EntityComponent(_LOGGER, DOMAIN, hass) + expected_message = "registers an entity service with a non entity service schema" - for schema in ( - vol.Schema({"some": str}), - vol.All(vol.Schema({"some": str})), - vol.Any(vol.Schema({"some": str})), + for idx, schema in enumerate( + ( + vol.Schema({"some": str}), + vol.All(vol.Schema({"some": str})), + vol.Any(vol.Schema({"some": str})), + ) ): - with pytest.raises( - HomeAssistantError, - match=("The schema is not an entity service schema"), - ): - component.async_register_entity_service("hello", schema, Mock()) + component.async_register_entity_service(f"hello_{idx}", schema, Mock()) + assert expected_message in caplog.text + caplog.clear() for idx, schema in enumerate( ( @@ -581,6 +582,7 @@ async def test_register_entity_service_non_entity_service_schema( ) ): component.async_register_entity_service(f"test_service_{idx}", schema, Mock()) + assert expected_message not in caplog.text async def test_register_entity_service_response_data(hass: HomeAssistant) -> None: diff --git a/tests/helpers/test_entity_platform.py b/tests/helpers/test_entity_platform.py index 2b0598cfe9d..db83819085b 100644 --- a/tests/helpers/test_entity_platform.py +++ b/tests/helpers/test_entity_platform.py @@ -1811,23 +1811,24 @@ async def test_register_entity_service_none_schema( async def test_register_entity_service_non_entity_service_schema( - hass: HomeAssistant, + hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test attempting to register a service with a non entity service schema.""" entity_platform = MockEntityPlatform( hass, domain="mock_integration", platform_name="mock_platform", platform=None ) + expected_message = "registers an entity service with a non entity service schema" - for schema in ( - vol.Schema({"some": str}), - vol.All(vol.Schema({"some": str})), - vol.Any(vol.Schema({"some": str})), + for idx, schema in enumerate( + ( + vol.Schema({"some": str}), + vol.All(vol.Schema({"some": str})), + vol.Any(vol.Schema({"some": str})), + ) ): - with pytest.raises( - HomeAssistantError, - match="The schema is not an entity service schema", - ): - entity_platform.async_register_entity_service("hello", schema, Mock()) + entity_platform.async_register_entity_service(f"hello_{idx}", schema, Mock()) + assert expected_message in caplog.text + caplog.clear() for idx, schema in enumerate( ( @@ -1839,6 +1840,7 @@ async def test_register_entity_service_non_entity_service_schema( entity_platform.async_register_entity_service( f"test_service_{idx}", schema, Mock() ) + assert expected_message not in caplog.text @pytest.mark.parametrize("update_before_add", [True, False]) From b99dceab74c42540a09b6b7fbed362c88e655233 Mon Sep 17 00:00:00 2001 From: LG-ThinQ-Integration Date: Mon, 2 Sep 2024 21:58:06 +0900 Subject: [PATCH 1330/1635] Do not LG thinq retry entry setup, when a single coordinator failed (#125052) Do not retry entry setup, when a single coordinator failed. Co-authored-by: jangwon.lee --- homeassistant/components/lg_thinq/coordinator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/lg_thinq/coordinator.py b/homeassistant/components/lg_thinq/coordinator.py index 1a23b70d8a7..1e16ac7ec56 100644 --- a/homeassistant/components/lg_thinq/coordinator.py +++ b/homeassistant/components/lg_thinq/coordinator.py @@ -133,7 +133,7 @@ async def async_setup_device_coordinator( coordinator_list: list[DeviceDataUpdateCoordinator] = [] for sub_id in device_sub_ids: coordinator = DeviceDataUpdateCoordinator(hass, device_api, sub_id=sub_id) - await coordinator.async_config_entry_first_refresh() + await coordinator.async_refresh() # Finally add a device coordinator into the result list. coordinator_list.append(coordinator) From baa876d4d9af99b41deb254bc2e99fba6433e85e Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 2 Sep 2024 15:18:02 +0200 Subject: [PATCH 1331/1635] Remove lying comment from service.async_register_entity_service (#125079) --- homeassistant/helpers/service.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/homeassistant/helpers/service.py b/homeassistant/helpers/service.py index bb9490b9edd..ac21f1da3fc 100644 --- a/homeassistant/helpers/service.py +++ b/homeassistant/helpers/service.py @@ -1264,9 +1264,6 @@ def async_register_entity_service( """ if schema is None or isinstance(schema, dict): schema = cv.make_entity_service_schema(schema) - # Do a sanity check to check this is a valid entity service schema, - # the check could be extended to require All/Any to have sub schema(s) - # with all entity service fields elif not cv.is_entity_service_schema(schema): # pylint: disable-next=import-outside-toplevel from .frame import report From df4bd721b5de5dcf9cc7ff6ce95214eb1848ef41 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 2 Sep 2024 15:33:10 +0200 Subject: [PATCH 1332/1635] Deprecate template.attach (#124843) --- homeassistant/helpers/template.py | 16 +++++++++++++--- tests/components/script/test_blueprint.py | 1 - tests/helpers/test_service.py | 4 ---- 3 files changed, 13 insertions(+), 8 deletions(-) diff --git a/homeassistant/helpers/template.py b/homeassistant/helpers/template.py index 6856983aa59..1786194b437 100644 --- a/homeassistant/helpers/template.py +++ b/homeassistant/helpers/template.py @@ -81,6 +81,7 @@ from . import ( label_registry, location as loc_helper, ) +from .deprecation import deprecated_function from .singleton import singleton from .translation import async_translate_state from .typing import TemplateVarsType @@ -207,15 +208,24 @@ def async_setup(hass: HomeAssistant) -> bool: @bind_hass +@deprecated_function( + "automatic setting of Template.hass introduced by HA Core PR #89242", + breaks_in_ha_version="2025.10", +) def attach(hass: HomeAssistant, obj: Any) -> None: + """Recursively attach hass to all template instances in list and dict.""" + return _attach(hass, obj) + + +def _attach(hass: HomeAssistant, obj: Any) -> None: """Recursively attach hass to all template instances in list and dict.""" if isinstance(obj, list): for child in obj: - attach(hass, child) + _attach(hass, child) elif isinstance(obj, collections.abc.Mapping): for child_key, child_value in obj.items(): - attach(hass, child_key) - attach(hass, child_value) + _attach(hass, child_key) + _attach(hass, child_value) elif isinstance(obj, Template): obj.hass = hass diff --git a/tests/components/script/test_blueprint.py b/tests/components/script/test_blueprint.py index aef22b93bcf..160b330c109 100644 --- a/tests/components/script/test_blueprint.py +++ b/tests/components/script/test_blueprint.py @@ -109,7 +109,6 @@ async def test_confirmable_notification( assert len(mock_call_action.mock_calls) == 1 _hass, config, variables, _context = mock_call_action.mock_calls[0][1] - template.attach(hass, config) rendered_config = template.render_complex(config, variables) assert rendered_config == { diff --git a/tests/helpers/test_service.py b/tests/helpers/test_service.py index 81cc189e1af..efe24fe4b8e 100644 --- a/tests/helpers/test_service.py +++ b/tests/helpers/test_service.py @@ -39,7 +39,6 @@ from homeassistant.helpers import ( device_registry as dr, entity_registry as er, service, - template, ) import homeassistant.helpers.config_validation as cv from homeassistant.loader import async_get_integration @@ -565,9 +564,6 @@ async def test_not_mutate_input(hass: HomeAssistant) -> None: config = cv.SERVICE_SCHEMA(config) orig = cv.SERVICE_SCHEMA(orig) - # Only change after call is each template getting hass attached - template.attach(hass, orig) - await service.async_call_from_config(hass, config, validate_config=False) assert orig == config From 1b1c1c2a55173e6e4dba6acff9a30f58bd89728d Mon Sep 17 00:00:00 2001 From: "Steven B." <51370195+sdb9696@users.noreply.github.com> Date: Mon, 2 Sep 2024 17:03:58 +0100 Subject: [PATCH 1333/1635] Call async_write_ha_state after ring update (#125096) Use async_write_ha_state after ring update --- homeassistant/components/ring/camera.py | 4 +++- homeassistant/components/ring/light.py | 2 +- homeassistant/components/ring/switch.py | 2 +- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/ring/camera.py b/homeassistant/components/ring/camera.py index b45803f3618..df71de29089 100644 --- a/homeassistant/components/ring/camera.py +++ b/homeassistant/components/ring/camera.py @@ -81,6 +81,8 @@ class RingCam(RingEntity[RingDoorBell], Camera): history_data = self._device.last_history if history_data: self._last_event = history_data[0] + # will call async_update to update the attributes and get the + # video url from the api self.async_schedule_update_ha_state(True) else: self._last_event = None @@ -183,7 +185,7 @@ class RingCam(RingEntity[RingDoorBell], Camera): await self._device.async_set_motion_detection(new_state) self._attr_motion_detection_enabled = new_state - self.async_schedule_update_ha_state(False) + self.async_write_ha_state() async def async_enable_motion_detection(self) -> None: """Enable motion detection in the camera.""" diff --git a/homeassistant/components/ring/light.py b/homeassistant/components/ring/light.py index f7f7f9b44ae..99c4105f4e9 100644 --- a/homeassistant/components/ring/light.py +++ b/homeassistant/components/ring/light.py @@ -86,7 +86,7 @@ class RingLight(RingEntity[RingStickUpCam], LightEntity): self._attr_is_on = new_state == OnOffState.ON self._no_updates_until = dt_util.utcnow() + SKIP_UPDATES_DELAY - self.async_schedule_update_ha_state() + self.async_write_ha_state() async def async_turn_on(self, **kwargs: Any) -> None: """Turn the light on for 30 seconds.""" diff --git a/homeassistant/components/ring/switch.py b/homeassistant/components/ring/switch.py index 810011d68c8..effb43cedbe 100644 --- a/homeassistant/components/ring/switch.py +++ b/homeassistant/components/ring/switch.py @@ -87,7 +87,7 @@ class SirenSwitch(BaseRingSwitch): self._attr_is_on = new_state > 0 self._no_updates_until = dt_util.utcnow() + SKIP_UPDATES_DELAY - self.async_schedule_update_ha_state() + self.async_write_ha_state() async def async_turn_on(self, **kwargs: Any) -> None: """Turn the siren on for 30 seconds.""" From 9ae59e5ea06d1cb96956e3493f0bb2b855a28587 Mon Sep 17 00:00:00 2001 From: "Steven B." <51370195+sdb9696@users.noreply.github.com> Date: Mon, 2 Sep 2024 17:18:45 +0100 Subject: [PATCH 1334/1635] Bump ring-doorbell to 0.9.3 (#125087) --- homeassistant/components/ring/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/ring/manifest.json b/homeassistant/components/ring/manifest.json index 23e7b882efe..3aced8fd1ea 100644 --- a/homeassistant/components/ring/manifest.json +++ b/homeassistant/components/ring/manifest.json @@ -14,5 +14,5 @@ "iot_class": "cloud_polling", "loggers": ["ring_doorbell"], "quality_scale": "silver", - "requirements": ["ring-doorbell[listen]==0.9.0"] + "requirements": ["ring-doorbell[listen]==0.9.3"] } diff --git a/requirements_all.txt b/requirements_all.txt index 5d26a6dafa0..e7455f29b75 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2520,7 +2520,7 @@ rfk101py==0.0.1 rflink==0.0.66 # homeassistant.components.ring -ring-doorbell[listen]==0.9.0 +ring-doorbell[listen]==0.9.3 # homeassistant.components.fleetgo ritassist==0.9.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 8dc22562398..4d0b9323e59 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2002,7 +2002,7 @@ reolink-aio==0.9.8 rflink==0.0.66 # homeassistant.components.ring -ring-doorbell[listen]==0.9.0 +ring-doorbell[listen]==0.9.3 # homeassistant.components.roku rokuecp==0.19.3 From 9f558d13e610075c7d99b8b7cbb98c207bc0dff7 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 2 Sep 2024 19:32:01 +0200 Subject: [PATCH 1335/1635] Correct start version in recorder schema migration tests (#125090) * Correct start version in recorder schema migration tests * Remove default from states.last_updated_ts --- tests/components/recorder/db_schema_30.py | 3 +- .../components/recorder/test_v32_migration.py | 72 +++++++++++-------- 2 files changed, 42 insertions(+), 33 deletions(-) diff --git a/tests/components/recorder/db_schema_30.py b/tests/components/recorder/db_schema_30.py index 2668f610dfd..97c33334111 100644 --- a/tests/components/recorder/db_schema_30.py +++ b/tests/components/recorder/db_schema_30.py @@ -9,7 +9,6 @@ from __future__ import annotations from collections.abc import Callable from datetime import datetime, timedelta import logging -import time from typing import Any, Self, TypedDict, cast, overload import ciso8601 @@ -381,7 +380,7 @@ class States(Base): # type: ignore[misc,valid-type] ) # *** Not originally in v30, only added for recorder to startup ok last_updated = Column(DATETIME_TYPE, default=dt_util.utcnow, index=True) last_updated_ts = Column( - TIMESTAMP_TYPE, default=time.time, index=True + TIMESTAMP_TYPE, index=True ) # *** Not originally in v30, only added for recorder to startup ok old_state_id = Column(Integer, ForeignKey("states.state_id"), index=True) attributes_id = Column( diff --git a/tests/components/recorder/test_v32_migration.py b/tests/components/recorder/test_v32_migration.py index 1006a03f4ec..8db2b9fa78c 100644 --- a/tests/components/recorder/test_v32_migration.py +++ b/tests/components/recorder/test_v32_migration.py @@ -1,5 +1,6 @@ """The tests for recorder platform migrating data from v30.""" +from collections.abc import Callable from datetime import timedelta import importlib import sys @@ -25,29 +26,38 @@ from tests.common import async_test_home_assistant from tests.typing import RecorderInstanceGenerator CREATE_ENGINE_TARGET = "homeassistant.components.recorder.core.create_engine" -SCHEMA_MODULE = "tests.components.recorder.db_schema_32" +SCHEMA_MODULE_30 = "tests.components.recorder.db_schema_30" +SCHEMA_MODULE_32 = "tests.components.recorder.db_schema_32" -def _create_engine_test(*args, **kwargs): +def _create_engine_test(schema_module: str) -> Callable: """Test version of create_engine that initializes with old schema. This simulates an existing db with the old schema. """ - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] - engine = create_engine(*args, **kwargs) - old_db_schema.Base.metadata.create_all(engine) - with Session(engine) as session: - session.add( - recorder.db_schema.StatisticsRuns(start=statistics.get_start_time()) - ) - session.add( - recorder.db_schema.SchemaChanges( - schema_version=old_db_schema.SCHEMA_VERSION + + def _create_engine_test(*args, **kwargs): + """Test version of create_engine that initializes with old schema. + + This simulates an existing db with the old schema. + """ + importlib.import_module(schema_module) + old_db_schema = sys.modules[schema_module] + engine = create_engine(*args, **kwargs) + old_db_schema.Base.metadata.create_all(engine) + with Session(engine) as session: + session.add( + recorder.db_schema.StatisticsRuns(start=statistics.get_start_time()) ) - ) - session.commit() - return engine + session.add( + recorder.db_schema.SchemaChanges( + schema_version=old_db_schema.SCHEMA_VERSION + ) + ) + session.commit() + return engine + + return _create_engine_test @pytest.mark.parametrize("enable_migrate_context_ids", [True]) @@ -60,8 +70,8 @@ async def test_migrate_times( caplog: pytest.LogCaptureFixture, ) -> None: """Test we can migrate times.""" - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_30) + old_db_schema = sys.modules[SCHEMA_MODULE_30] now = dt_util.utcnow() one_second_past = now - timedelta(seconds=1) now_timestamp = now.timestamp() @@ -108,7 +118,7 @@ async def test_migrate_times( patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_30)), patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), patch( "homeassistant.components.recorder.migration.cleanup_legacy_states_event_ids" @@ -216,8 +226,8 @@ async def test_migrate_can_resume_entity_id_post_migration( recorder_db_url: str, ) -> None: """Test we resume the entity id post migration after a restart.""" - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] now = dt_util.utcnow() one_second_past = now - timedelta(seconds=1) mock_state = State( @@ -259,7 +269,7 @@ async def test_migrate_can_resume_entity_id_post_migration( patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_32)), patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), patch( "homeassistant.components.recorder.migration.cleanup_legacy_states_event_ids" @@ -327,8 +337,8 @@ async def test_migrate_can_resume_ix_states_event_id_removed( This case tests the migration still happens if ix_states_event_id is removed from the states table. """ - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] now = dt_util.utcnow() one_second_past = now - timedelta(seconds=1) mock_state = State( @@ -381,7 +391,7 @@ async def test_migrate_can_resume_ix_states_event_id_removed( patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_32)), patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), patch( "homeassistant.components.recorder.migration.cleanup_legacy_states_event_ids" @@ -463,8 +473,8 @@ async def test_out_of_disk_space_while_rebuild_states_table( This case tests the migration still happens if ix_states_event_id is removed from the states table. """ - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] now = dt_util.utcnow() one_second_past = now - timedelta(seconds=1) mock_state = State( @@ -517,7 +527,7 @@ async def test_out_of_disk_space_while_rebuild_states_table( patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_32)), patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), patch( "homeassistant.components.recorder.migration.cleanup_legacy_states_event_ids" @@ -643,8 +653,8 @@ async def test_out_of_disk_space_while_removing_foreign_key( removed when migrating to schema version 46, inspecting the schema in cleanup_legacy_states_event_ids is not likely to fail. """ - importlib.import_module(SCHEMA_MODULE) - old_db_schema = sys.modules[SCHEMA_MODULE] + importlib.import_module(SCHEMA_MODULE_32) + old_db_schema = sys.modules[SCHEMA_MODULE_32] now = dt_util.utcnow() one_second_past = now - timedelta(seconds=1) mock_state = State( @@ -697,7 +707,7 @@ async def test_out_of_disk_space_while_removing_foreign_key( patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), - patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_32)), patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), patch( "homeassistant.components.recorder.migration.cleanup_legacy_states_event_ids" From 5300eddf3336330f955694070441fcaed0fc1650 Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Mon, 2 Sep 2024 19:50:09 +0200 Subject: [PATCH 1336/1635] Remove roundig in Solarlog and add suggested_display_precision (#125094) * Remove roundig and add suggested_display_precision * Add suggested_unit_of_measurement * Put lamda in parentheses --- homeassistant/components/solarlog/sensor.py | 74 +++++++++++----- .../solarlog/snapshots/test_sensor.ambr | 88 +++++++++++++++++-- 2 files changed, 133 insertions(+), 29 deletions(-) diff --git a/homeassistant/components/solarlog/sensor.py b/homeassistant/components/solarlog/sensor.py index 498429f70cf..91e18da1cb2 100644 --- a/homeassistant/components/solarlog/sensor.py +++ b/homeassistant/components/solarlog/sensor.py @@ -84,38 +84,47 @@ SOLARLOG_SENSOR_TYPES: tuple[SolarLogCoordinatorSensorEntityDescription, ...] = SolarLogCoordinatorSensorEntityDescription( key="yield_day", translation_key="yield_day", - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value_fn=lambda data: round(data.yield_day / 1000, 3), + suggested_display_precision=3, + value_fn=lambda data: data.yield_day, ), SolarLogCoordinatorSensorEntityDescription( key="yield_yesterday", translation_key="yield_yesterday", - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value_fn=lambda data: round(data.yield_yesterday / 1000, 3), + suggested_display_precision=3, + value_fn=lambda data: data.yield_yesterday, ), SolarLogCoordinatorSensorEntityDescription( key="yield_month", translation_key="yield_month", - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value_fn=lambda data: round(data.yield_month / 1000, 3), + suggested_display_precision=3, + value_fn=lambda data: data.yield_month, ), SolarLogCoordinatorSensorEntityDescription( key="yield_year", translation_key="yield_year", - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value_fn=lambda data: round(data.yield_year / 1000, 3), + value_fn=lambda data: data.yield_year, ), SolarLogCoordinatorSensorEntityDescription( key="yield_total", translation_key="yield_total", - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, state_class=SensorStateClass.TOTAL, - value_fn=lambda data: round(data.yield_total / 1000, 3), + suggested_display_precision=3, + value_fn=lambda data: data.yield_total, ), SolarLogCoordinatorSensorEntityDescription( key="consumption_ac", @@ -128,38 +137,48 @@ SOLARLOG_SENSOR_TYPES: tuple[SolarLogCoordinatorSensorEntityDescription, ...] = SolarLogCoordinatorSensorEntityDescription( key="consumption_day", translation_key="consumption_day", - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value_fn=lambda data: round(data.consumption_day / 1000, 3), + suggested_display_precision=3, + value_fn=lambda data: data.consumption_day, ), SolarLogCoordinatorSensorEntityDescription( key="consumption_yesterday", translation_key="consumption_yesterday", - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value_fn=lambda data: round(data.consumption_yesterday / 1000, 3), + suggested_display_precision=3, + value_fn=lambda data: data.consumption_yesterday, ), SolarLogCoordinatorSensorEntityDescription( key="consumption_month", translation_key="consumption_month", - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value_fn=lambda data: round(data.consumption_month / 1000, 3), + suggested_display_precision=3, + value_fn=lambda data: data.consumption_month, ), SolarLogCoordinatorSensorEntityDescription( key="consumption_year", translation_key="consumption_year", - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value_fn=lambda data: round(data.consumption_year / 1000, 3), + suggested_display_precision=3, + value_fn=lambda data: data.consumption_year, ), SolarLogCoordinatorSensorEntityDescription( key="consumption_total", translation_key="consumption_total", - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, state_class=SensorStateClass.TOTAL, - value_fn=lambda data: round(data.consumption_total / 1000, 3), + suggested_display_precision=3, + value_fn=lambda data: data.consumption_total, ), SolarLogCoordinatorSensorEntityDescription( key="self_consumption_year", @@ -190,6 +209,7 @@ SOLARLOG_SENSOR_TYPES: tuple[SolarLogCoordinatorSensorEntityDescription, ...] = native_unit_of_measurement=PERCENTAGE, device_class=SensorDeviceClass.POWER_FACTOR, state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, value_fn=lambda data: data.capacity, ), SolarLogCoordinatorSensorEntityDescription( @@ -198,6 +218,7 @@ SOLARLOG_SENSOR_TYPES: tuple[SolarLogCoordinatorSensorEntityDescription, ...] = native_unit_of_measurement=PERCENTAGE, device_class=SensorDeviceClass.POWER_FACTOR, state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, value_fn=lambda data: data.efficiency, ), SolarLogCoordinatorSensorEntityDescription( @@ -214,6 +235,7 @@ SOLARLOG_SENSOR_TYPES: tuple[SolarLogCoordinatorSensorEntityDescription, ...] = native_unit_of_measurement=PERCENTAGE, device_class=SensorDeviceClass.POWER_FACTOR, state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, value_fn=lambda data: data.usage, ), ) @@ -230,11 +252,15 @@ INVERTER_SENSOR_TYPES: tuple[SolarLogInverterSensorEntityDescription, ...] = ( SolarLogInverterSensorEntityDescription( key="consumption_year", translation_key="consumption_year", - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, - value_fn=lambda inverter: None - if inverter.consumption_year is None - else round(inverter.consumption_year / 1000, 3), + suggested_display_precision=3, + value_fn=( + lambda inverter: None + if inverter.consumption_year is None + else inverter.consumption_year + ), ), ) diff --git a/tests/components/solarlog/snapshots/test_sensor.ambr b/tests/components/solarlog/snapshots/test_sensor.ambr index 6fccbd89dba..9f95e04a38f 100644 --- a/tests/components/solarlog/snapshots/test_sensor.ambr +++ b/tests/components/solarlog/snapshots/test_sensor.ambr @@ -71,6 +71,12 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -170,6 +176,12 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -322,6 +334,9 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), }), 'original_device_class': , 'original_icon': None, @@ -422,6 +437,12 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -446,7 +467,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '0.005', + 'state': '0.00531', }) # --- # name: test_all_entities[sensor.solarlog_consumption_month-entry] @@ -470,6 +491,12 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -520,6 +547,12 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -569,6 +602,12 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -617,6 +656,12 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -641,7 +686,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '0.007', + 'state': '0.00734', }) # --- # name: test_all_entities[sensor.solarlog_efficiency-entry] @@ -667,6 +712,9 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), }), 'original_device_class': , 'original_icon': None, @@ -1017,6 +1065,9 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), }), 'original_device_class': , 'original_icon': None, @@ -1168,6 +1219,12 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -1192,7 +1249,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '0.004', + 'state': '0.00421', }) # --- # name: test_all_entities[sensor.solarlog_yield_month-entry] @@ -1216,6 +1273,12 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -1266,6 +1329,12 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -1315,6 +1384,9 @@ }), 'name': None, 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -1339,7 +1411,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '1.023', + 'state': '1.0230', }) # --- # name: test_all_entities[sensor.solarlog_yield_yesterday-entry] @@ -1363,6 +1435,12 @@ }), 'name': None, 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), }), 'original_device_class': , 'original_icon': None, @@ -1387,6 +1465,6 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': '0.005', + 'state': '0.00521', }) # --- From 633c90485292a2673124637832d16013a2ebdcea Mon Sep 17 00:00:00 2001 From: Paul Bottein Date: Mon, 2 Sep 2024 20:04:33 +0200 Subject: [PATCH 1337/1635] Update frontend to 20240902.0 (#125093) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 7e934c887fa..50bcb3b3d97 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240829.0"] + "requirements": ["home-assistant-frontend==20240902.0"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 0b91d1e792c..1729e6e8131 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -31,7 +31,7 @@ habluetooth==3.4.0 hass-nabucasa==0.81.1 hassil==1.7.4 home-assistant-bluetooth==1.12.2 -home-assistant-frontend==20240829.0 +home-assistant-frontend==20240902.0 home-assistant-intents==2024.8.29 httpx==0.27.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index e7455f29b75..16ff5a9a032 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1103,7 +1103,7 @@ hole==0.8.0 holidays==0.55 # homeassistant.components.frontend -home-assistant-frontend==20240829.0 +home-assistant-frontend==20240902.0 # homeassistant.components.conversation home-assistant-intents==2024.8.29 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 4d0b9323e59..a453a5948fd 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -929,7 +929,7 @@ hole==0.8.0 holidays==0.55 # homeassistant.components.frontend -home-assistant-frontend==20240829.0 +home-assistant-frontend==20240902.0 # homeassistant.components.conversation home-assistant-intents==2024.8.29 From 7c4fd9473cb453d27586c39770196d84d67a0915 Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Mon, 2 Sep 2024 20:08:44 +0200 Subject: [PATCH 1338/1635] Add diagnostics to solarlog (#125072) * Add diagnostics to solarlog * Fix wrong comment --- .../components/solarlog/diagnostics.py | 27 ++++++++ .../solarlog/snapshots/test_diagnostics.ambr | 64 +++++++++++++++++++ tests/components/solarlog/test_diagnostics.py | 32 ++++++++++ 3 files changed, 123 insertions(+) create mode 100644 homeassistant/components/solarlog/diagnostics.py create mode 100644 tests/components/solarlog/snapshots/test_diagnostics.ambr create mode 100644 tests/components/solarlog/test_diagnostics.py diff --git a/homeassistant/components/solarlog/diagnostics.py b/homeassistant/components/solarlog/diagnostics.py new file mode 100644 index 00000000000..02f6c96edc2 --- /dev/null +++ b/homeassistant/components/solarlog/diagnostics.py @@ -0,0 +1,27 @@ +"""Provides diagnostics for Solarlog.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant + +from . import SolarlogConfigEntry + +TO_REDACT = [ + CONF_HOST, +] + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, config_entry: SolarlogConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + data = config_entry.runtime_data.data + + return { + "config_entry": async_redact_data(config_entry.as_dict(), TO_REDACT), + "solarlog_data": data.to_dict(), + } diff --git a/tests/components/solarlog/snapshots/test_diagnostics.ambr b/tests/components/solarlog/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..09ff3a333ee --- /dev/null +++ b/tests/components/solarlog/snapshots/test_diagnostics.ambr @@ -0,0 +1,64 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'config_entry': dict({ + 'data': dict({ + 'extended_data': True, + 'host': '**REDACTED**', + 'name': 'Solarlog test 1 2 3', + }), + 'disabled_by': None, + 'domain': 'solarlog', + 'entry_id': 'ce5f5431554d101905d31797e1232da8', + 'minor_version': 2, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'title': 'solarlog', + 'unique_id': None, + 'version': 1, + }), + 'solarlog_data': dict({ + 'alternator_loss': 2.0, + 'capacity': 85.5, + 'consumption_ac': 54.87, + 'consumption_day': 5.31, + 'consumption_month': 758.0, + 'consumption_total': 354687.0, + 'consumption_year': 4587.0, + 'consumption_yesterday': 7.34, + 'efficiency': 98.1, + 'inverter_data': dict({ + '0': dict({ + 'consumption_year': 354687, + 'current_power': 5, + 'enabled': True, + 'name': 'Inverter 1', + }), + '1': dict({ + 'consumption_year': 354, + 'current_power': 6, + 'enabled': True, + 'name': 'Inverter 2', + }), + }), + 'last_updated': '2024-08-01T15:20:45+00:00', + 'power_ac': 100.0, + 'power_available': 45.13, + 'power_dc': 102.0, + 'production_year': None, + 'self_consumption_year': 545.0, + 'total_power': 120.0, + 'usage': 54.8, + 'voltage_ac': 100.0, + 'voltage_dc': 100.0, + 'yield_day': 4.21, + 'yield_month': 515.0, + 'yield_total': 56513.0, + 'yield_year': 1023.0, + 'yield_yesterday': 5.21, + }), + }) +# --- diff --git a/tests/components/solarlog/test_diagnostics.py b/tests/components/solarlog/test_diagnostics.py new file mode 100644 index 00000000000..bc0b020462d --- /dev/null +++ b/tests/components/solarlog/test_diagnostics.py @@ -0,0 +1,32 @@ +"""Test Solarlog diagnostics.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion +from syrupy.filters import props + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from . import setup_platform + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_config_entry: MockConfigEntry, + mock_solarlog_connector: AsyncMock, + snapshot: SnapshotAssertion, +) -> None: + """Test config entry diagnostics.""" + await setup_platform(hass, mock_config_entry, [Platform.SENSOR]) + + result = await get_diagnostics_for_config_entry( + hass, hass_client, mock_config_entry + ) + + assert result == snapshot(exclude=props("created_at", "modified_at")) From 4c27bfbf7fac053f4811b73cb4eec813f88bf24a Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Mon, 2 Sep 2024 20:35:36 +0200 Subject: [PATCH 1339/1635] Cleanup removed options for mqtt climate (#125083) --- .../components/mqtt/abbreviations.py | 5 ---- homeassistant/components/mqtt/climate.py | 28 ------------------- 2 files changed, 33 deletions(-) diff --git a/homeassistant/components/mqtt/abbreviations.py b/homeassistant/components/mqtt/abbreviations.py index f4a32bbdf9d..3c1d0abdb66 100644 --- a/homeassistant/components/mqtt/abbreviations.py +++ b/homeassistant/components/mqtt/abbreviations.py @@ -6,9 +6,6 @@ ABBREVIATIONS = { "act_stat_t": "activity_state_topic", "act_val_tpl": "activity_value_template", "atype": "automation_type", - "aux_cmd_t": "aux_command_topic", - "aux_stat_tpl": "aux_state_template", - "aux_stat_t": "aux_state_topic", "av_tones": "available_tones", "avty": "availability", "avty_mode": "availability_mode", @@ -157,8 +154,6 @@ ABBREVIATIONS = { "pos_open": "position_open", "pow_cmd_t": "power_command_topic", "pow_cmd_tpl": "power_command_template", - "pow_stat_t": "power_state_topic", - "pow_stat_tpl": "power_state_template", "pr_mode_cmd_t": "preset_mode_command_topic", "pr_mode_cmd_tpl": "preset_mode_command_template", "pr_mode_stat_t": "preset_mode_state_topic", diff --git a/homeassistant/components/mqtt/climate.py b/homeassistant/components/mqtt/climate.py index 426bac8e9ca..ac276c37d71 100644 --- a/homeassistant/components/mqtt/climate.py +++ b/homeassistant/components/mqtt/climate.py @@ -93,13 +93,6 @@ _LOGGER = logging.getLogger(__name__) DEFAULT_NAME = "MQTT HVAC" -# Options CONF_AUX_COMMAND_TOPIC, CONF_AUX_STATE_TOPIC -# and CONF_AUX_STATE_TEMPLATE were deprecated in HA Core 2023.9 -# Support was removed in HA Core 2024.3 -CONF_AUX_COMMAND_TOPIC = "aux_command_topic" -CONF_AUX_STATE_TEMPLATE = "aux_state_template" -CONF_AUX_STATE_TOPIC = "aux_state_topic" - CONF_FAN_MODE_COMMAND_TEMPLATE = "fan_mode_command_template" CONF_FAN_MODE_COMMAND_TOPIC = "fan_mode_command_topic" CONF_FAN_MODE_LIST = "fan_modes" @@ -113,10 +106,6 @@ CONF_HUMIDITY_STATE_TOPIC = "target_humidity_state_topic" CONF_HUMIDITY_MAX = "max_humidity" CONF_HUMIDITY_MIN = "min_humidity" -# Support for CONF_POWER_STATE_TOPIC and CONF_POWER_STATE_TEMPLATE -# was removed in HA Core 2023.8 -CONF_POWER_STATE_TEMPLATE = "power_state_template" -CONF_POWER_STATE_TOPIC = "power_state_topic" CONF_PRESET_MODE_STATE_TOPIC = "preset_mode_state_topic" CONF_PRESET_MODE_COMMAND_TOPIC = "preset_mode_command_topic" CONF_PRESET_MODE_VALUE_TEMPLATE = "preset_mode_value_template" @@ -201,7 +190,6 @@ TOPIC_KEYS = ( CONF_MODE_COMMAND_TOPIC, CONF_MODE_STATE_TOPIC, CONF_POWER_COMMAND_TOPIC, - CONF_POWER_STATE_TOPIC, CONF_PRESET_MODE_COMMAND_TOPIC, CONF_PRESET_MODE_STATE_TOPIC, CONF_SWING_MODE_COMMAND_TOPIC, @@ -295,8 +283,6 @@ _PLATFORM_SCHEMA_BASE = MQTT_BASE_SCHEMA.extend( vol.Optional(CONF_PAYLOAD_OFF, default="OFF"): cv.string, vol.Optional(CONF_POWER_COMMAND_TOPIC): valid_publish_topic, vol.Optional(CONF_POWER_COMMAND_TEMPLATE): cv.template, - vol.Optional(CONF_POWER_STATE_TEMPLATE): cv.template, - vol.Optional(CONF_POWER_STATE_TOPIC): valid_subscribe_topic, vol.Optional(CONF_PRECISION): vol.In( [PRECISION_TENTHS, PRECISION_HALVES, PRECISION_WHOLE] ), @@ -343,16 +329,6 @@ _PLATFORM_SCHEMA_BASE = MQTT_BASE_SCHEMA.extend( ).extend(MQTT_ENTITY_COMMON_SCHEMA.schema) PLATFORM_SCHEMA_MODERN = vol.All( - # Support for CONF_POWER_STATE_TOPIC and CONF_POWER_STATE_TEMPLATE - # was removed in HA Core 2023.8 - cv.removed(CONF_POWER_STATE_TEMPLATE), - cv.removed(CONF_POWER_STATE_TOPIC), - # Options CONF_AUX_COMMAND_TOPIC, CONF_AUX_STATE_TOPIC - # and CONF_AUX_STATE_TEMPLATE were deprecated in HA Core 2023.9 - # Support was removed in HA Core 2024.3 - cv.removed(CONF_AUX_COMMAND_TOPIC), - cv.removed(CONF_AUX_STATE_TEMPLATE), - cv.removed(CONF_AUX_STATE_TOPIC), _PLATFORM_SCHEMA_BASE, valid_preset_mode_configuration, valid_humidity_range_configuration, @@ -363,10 +339,6 @@ _DISCOVERY_SCHEMA_BASE = _PLATFORM_SCHEMA_BASE.extend({}, extra=vol.REMOVE_EXTRA DISCOVERY_SCHEMA = vol.All( _DISCOVERY_SCHEMA_BASE, - # Support for CONF_POWER_STATE_TOPIC and CONF_POWER_STATE_TEMPLATE - # was removed in HA Core 2023.8 - cv.removed(CONF_POWER_STATE_TEMPLATE), - cv.removed(CONF_POWER_STATE_TOPIC), valid_preset_mode_configuration, valid_humidity_range_configuration, valid_humidity_state_configuration, From 3206979488fcbbb281d35bcbfd5dcb8ab2f6cdb2 Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Mon, 2 Sep 2024 20:46:32 +0200 Subject: [PATCH 1340/1635] Add separate entities for temperature, humidity and pressure in AccuWeather integration (#125041) * Add temperature, humidity and pressure sensors * Make uv index sensor disabled by default * Fix type --- .../components/accuweather/sensor.py | 31 ++++ .../accuweather/snapshots/test_sensor.ambr | 159 ++++++++++++++++++ tests/components/accuweather/test_sensor.py | 1 + 3 files changed, 191 insertions(+) diff --git a/homeassistant/components/accuweather/sensor.py b/homeassistant/components/accuweather/sensor.py index fac3a2a4ba3..2f6b10b296f 100644 --- a/homeassistant/components/accuweather/sensor.py +++ b/homeassistant/components/accuweather/sensor.py @@ -18,6 +18,7 @@ from homeassistant.const import ( UV_INDEX, UnitOfIrradiance, UnitOfLength, + UnitOfPressure, UnitOfSpeed, UnitOfTemperature, UnitOfTime, @@ -279,6 +280,15 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = ( value_fn=lambda data: cast(float, data[API_METRIC][ATTR_VALUE]), translation_key="realfeel_temperature_shade", ), + AccuWeatherSensorDescription( + key="RelativeHumidity", + device_class=SensorDeviceClass.HUMIDITY, + entity_registry_enabled_default=False, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + value_fn=lambda data: cast(int, data), + translation_key="humidity", + ), AccuWeatherSensorDescription( key="Precipitation", device_class=SensorDeviceClass.PRECIPITATION_INTENSITY, @@ -288,6 +298,16 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = ( attr_fn=lambda data: {"type": data["PrecipitationType"]}, translation_key="precipitation", ), + AccuWeatherSensorDescription( + key="Pressure", + device_class=SensorDeviceClass.PRESSURE, + entity_registry_enabled_default=False, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=0, + native_unit_of_measurement=UnitOfPressure.HPA, + value_fn=lambda data: cast(float, data[API_METRIC][ATTR_VALUE]), + translation_key="pressure", + ), AccuWeatherSensorDescription( key="PressureTendency", device_class=SensorDeviceClass.ENUM, @@ -295,9 +315,19 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = ( value_fn=lambda data: cast(str, data["LocalizedText"]).lower(), translation_key="pressure_tendency", ), + AccuWeatherSensorDescription( + key="Temperature", + device_class=SensorDeviceClass.TEMPERATURE, + entity_registry_enabled_default=False, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + value_fn=lambda data: cast(float, data[API_METRIC][ATTR_VALUE]), + translation_key="temperature", + ), AccuWeatherSensorDescription( key="UVIndex", state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, native_unit_of_measurement=UV_INDEX, value_fn=lambda data: cast(int, data), attr_fn=lambda data: {ATTR_LEVEL: data["UVIndexText"]}, @@ -324,6 +354,7 @@ SENSOR_TYPES: tuple[AccuWeatherSensorDescription, ...] = ( AccuWeatherSensorDescription( key="Wind", device_class=SensorDeviceClass.WIND_SPEED, + entity_registry_enabled_default=False, state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=UnitOfSpeed.KILOMETERS_PER_HOUR, value_fn=lambda data: cast(float, data[ATTR_SPEED][API_METRIC][ATTR_VALUE]), diff --git a/tests/components/accuweather/snapshots/test_sensor.ambr b/tests/components/accuweather/snapshots/test_sensor.ambr index 5e28be5a72b..3468d638bc0 100644 --- a/tests/components/accuweather/snapshots/test_sensor.ambr +++ b/tests/components/accuweather/snapshots/test_sensor.ambr @@ -1969,6 +1969,58 @@ 'state': '9.2', }) # --- +# name: test_sensor[sensor.home_humidity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_humidity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Humidity', + 'platform': 'accuweather', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'humidity', + 'unique_id': '0123456-relativehumidity', + 'unit_of_measurement': '%', + }) +# --- +# name: test_sensor[sensor.home_humidity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by AccuWeather', + 'device_class': 'humidity', + 'friendly_name': 'Home Humidity', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.home_humidity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '67', + }) +# --- # name: test_sensor[sensor.home_mold_pollen_day_0-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2267,6 +2319,61 @@ 'state': '0.0', }) # --- +# name: test_sensor[sensor.home_pressure-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_pressure', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 0, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pressure', + 'platform': 'accuweather', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pressure', + 'unique_id': '0123456-pressure', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.home_pressure-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by AccuWeather', + 'device_class': 'pressure', + 'friendly_name': 'Home Pressure', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.home_pressure', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1012.0', + }) +# --- # name: test_sensor[sensor.home_pressure_tendency-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -4145,6 +4252,58 @@ 'state': '276.1', }) # --- +# name: test_sensor[sensor.home_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.home_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'accuweather', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temperature', + 'unique_id': '0123456-temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[sensor.home_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by AccuWeather', + 'device_class': 'temperature', + 'friendly_name': 'Home Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.home_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '22.6', + }) +# --- # name: test_sensor[sensor.home_thunderstorm_probability_day_0-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/accuweather/test_sensor.py b/tests/components/accuweather/test_sensor.py index 41c1c0d930a..37ebe260f39 100644 --- a/tests/components/accuweather/test_sensor.py +++ b/tests/components/accuweather/test_sensor.py @@ -148,6 +148,7 @@ async def test_manual_update_entity( assert mock_accuweather_client.async_get_current_conditions.call_count == 2 +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_sensor_imperial_units( hass: HomeAssistant, mock_accuweather_client: AsyncMock ) -> None: From 0b14f0a379d0d7053a10a89d2c5cc7242363dbcd Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 2 Sep 2024 21:13:26 +0200 Subject: [PATCH 1341/1635] Add test of statistics timestamp migration (#125100) --- .../recorder/test_migration_from_schema_32.py | 168 +++++++++++++++++- 1 file changed, 167 insertions(+), 1 deletion(-) diff --git a/tests/components/recorder/test_migration_from_schema_32.py b/tests/components/recorder/test_migration_from_schema_32.py index b2a83ae8313..bc16eae3410 100644 --- a/tests/components/recorder/test_migration_from_schema_32.py +++ b/tests/components/recorder/test_migration_from_schema_32.py @@ -48,6 +48,7 @@ from .common import ( async_wait_recording_done, ) +from tests.common import async_test_home_assistant from tests.typing import RecorderInstanceGenerator CREATE_ENGINE_TARGET = "homeassistant.components.recorder.core.create_engine" @@ -94,7 +95,7 @@ def _create_engine_test(*args, **kwargs): return engine -@pytest.fixture(autouse=True) +@pytest.fixture def db_schema_32(): """Fixture to initialize the db with the old schema.""" importlib.import_module(SCHEMA_MODULE) @@ -118,6 +119,7 @@ def db_schema_32(): @pytest.mark.parametrize("enable_migrate_context_ids", [True]) +@pytest.mark.usefixtures("db_schema_32") async def test_migrate_events_context_ids( hass: HomeAssistant, recorder_mock: Recorder ) -> None: @@ -333,6 +335,7 @@ async def test_migrate_events_context_ids( @pytest.mark.parametrize("enable_migrate_context_ids", [True]) +@pytest.mark.usefixtures("db_schema_32") async def test_migrate_states_context_ids( hass: HomeAssistant, recorder_mock: Recorder ) -> None: @@ -530,6 +533,7 @@ async def test_migrate_states_context_ids( @pytest.mark.parametrize("enable_migrate_event_type_ids", [True]) +@pytest.mark.usefixtures("db_schema_32") async def test_migrate_event_type_ids( hass: HomeAssistant, recorder_mock: Recorder ) -> None: @@ -621,6 +625,7 @@ async def test_migrate_event_type_ids( @pytest.mark.parametrize("enable_migrate_entity_ids", [True]) +@pytest.mark.usefixtures("db_schema_32") async def test_migrate_entity_ids(hass: HomeAssistant, recorder_mock: Recorder) -> None: """Test we can migrate entity_ids to the StatesMeta table.""" await async_wait_recording_done(hass) @@ -697,6 +702,7 @@ async def test_migrate_entity_ids(hass: HomeAssistant, recorder_mock: Recorder) @pytest.mark.parametrize("enable_migrate_entity_ids", [True]) +@pytest.mark.usefixtures("db_schema_32") async def test_post_migrate_entity_ids( hass: HomeAssistant, recorder_mock: Recorder ) -> None: @@ -750,6 +756,7 @@ async def test_post_migrate_entity_ids( @pytest.mark.parametrize("enable_migrate_entity_ids", [True]) +@pytest.mark.usefixtures("db_schema_32") async def test_migrate_null_entity_ids( hass: HomeAssistant, recorder_mock: Recorder ) -> None: @@ -833,6 +840,7 @@ async def test_migrate_null_entity_ids( @pytest.mark.parametrize("enable_migrate_event_type_ids", [True]) +@pytest.mark.usefixtures("db_schema_32") async def test_migrate_null_event_type_ids( hass: HomeAssistant, recorder_mock: Recorder ) -> None: @@ -918,6 +926,7 @@ async def test_migrate_null_event_type_ids( ) +@pytest.mark.usefixtures("db_schema_32") async def test_stats_timestamp_conversion_is_reentrant( hass: HomeAssistant, recorder_mock: Recorder ) -> None: @@ -1070,6 +1079,7 @@ async def test_stats_timestamp_conversion_is_reentrant( ] +@pytest.mark.usefixtures("db_schema_32") async def test_stats_timestamp_with_one_by_one( hass: HomeAssistant, recorder_mock: Recorder ) -> None: @@ -1289,6 +1299,7 @@ async def test_stats_timestamp_with_one_by_one( ] +@pytest.mark.usefixtures("db_schema_32") async def test_stats_timestamp_with_one_by_one_removes_duplicates( hass: HomeAssistant, recorder_mock: Recorder ) -> None: @@ -1483,3 +1494,158 @@ async def test_stats_timestamp_with_one_by_one_removes_duplicates( "sum": None, }, ] + + +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage +async def test_migrate_times( + async_test_recorder: RecorderInstanceGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test we can migrate times in the statistics tables.""" + importlib.import_module(SCHEMA_MODULE) + old_db_schema = sys.modules[SCHEMA_MODULE] + now = dt_util.utcnow() + now_timestamp = now.timestamp() + + statistics_kwargs = { + "created": now, + "mean": 0, + "metadata_id": 1, + "min": 0, + "max": 0, + "last_reset": now, + "start": now, + "state": 0, + "sum": 0, + } + mock_metadata = old_db_schema.StatisticMetaData( + has_mean=False, + has_sum=False, + name="Test", + source="sensor", + statistic_id="sensor.test", + unit_of_measurement="cats", + ) + number_of_migrations = 5 + + def _get_index_names(table): + with session_scope(hass=hass) as session: + return inspect(session.connection()).get_indexes(table) + + with ( + patch.object(recorder, "db_schema", old_db_schema), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch(CREATE_ENGINE_TARGET, new=_create_engine_test), + ): + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await hass.async_block_till_done() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + + def _add_data(): + with session_scope(hass=hass) as session: + session.add(old_db_schema.StatisticsMeta.from_meta(mock_metadata)) + with session_scope(hass=hass) as session: + session.add(old_db_schema.Statistics(**statistics_kwargs)) + session.add(old_db_schema.StatisticsShortTerm(**statistics_kwargs)) + + await instance.async_add_executor_job(_add_data) + await hass.async_block_till_done() + await instance.async_block_till_done() + + statistics_indexes = await instance.async_add_executor_job( + _get_index_names, "statistics" + ) + statistics_short_term_indexes = await instance.async_add_executor_job( + _get_index_names, "statistics_short_term" + ) + statistics_index_names = {index["name"] for index in statistics_indexes} + statistics_short_term_index_names = { + index["name"] for index in statistics_short_term_indexes + } + + await hass.async_stop() + await hass.async_block_till_done() + + assert "ix_statistics_statistic_id_start" in statistics_index_names + assert ( + "ix_statistics_short_term_statistic_id_start" + in statistics_short_term_index_names + ) + + # Test that the times are migrated during migration from schema 32 + async with ( + async_test_home_assistant() as hass, + async_test_recorder(hass) as instance, + ): + await hass.async_block_till_done() + + # We need to wait for all the migration tasks to complete + # before we can check the database. + for _ in range(number_of_migrations): + await instance.async_block_till_done() + await async_wait_recording_done(hass) + + def _get_test_data_from_db(): + with session_scope(hass=hass) as session: + statistics_result = list( + session.query(recorder.db_schema.Statistics) + .join( + recorder.db_schema.StatisticsMeta, + recorder.db_schema.Statistics.metadata_id + == recorder.db_schema.StatisticsMeta.id, + ) + .where( + recorder.db_schema.StatisticsMeta.statistic_id == "sensor.test" + ) + ) + statistics_short_term_result = list( + session.query(recorder.db_schema.StatisticsShortTerm) + .join( + recorder.db_schema.StatisticsMeta, + recorder.db_schema.StatisticsShortTerm.metadata_id + == recorder.db_schema.StatisticsMeta.id, + ) + .where( + recorder.db_schema.StatisticsMeta.statistic_id == "sensor.test" + ) + ) + session.expunge_all() + return statistics_result, statistics_short_term_result + + ( + statistics_result, + statistics_short_term_result, + ) = await instance.async_add_executor_job(_get_test_data_from_db) + + for results in (statistics_result, statistics_short_term_result): + assert len(results) == 1 + assert results[0].created is None + assert results[0].created_ts == now_timestamp + assert results[0].last_reset is None + assert results[0].last_reset_ts == now_timestamp + assert results[0].start is None + assert results[0].start_ts == now_timestamp + + statistics_indexes = await instance.async_add_executor_job( + _get_index_names, "statistics" + ) + statistics_short_term_indexes = await instance.async_add_executor_job( + _get_index_names, "statistics_short_term" + ) + statistics_index_names = {index["name"] for index in statistics_indexes} + statistics_short_term_index_names = { + index["name"] for index in statistics_short_term_indexes + } + + assert "ix_statistics_statistic_id_start" not in statistics_index_names + assert ( + "ix_statistics_short_term_statistic_id_start" + not in statistics_short_term_index_names + ) + + await hass.async_stop() From 3e350bdc906bbc6faf16c77c2ac0e303354c8780 Mon Sep 17 00:00:00 2001 From: Avi Miller Date: Tue, 3 Sep 2024 05:22:39 +1000 Subject: [PATCH 1342/1635] Bump aiolifx to 1.0.9 and remove unused HomeKit model prefixes (#125055) Co-authored-by: J. Nick Koston --- homeassistant/components/lifx/manifest.json | 4 +--- homeassistant/generated/zeroconf.py | 8 -------- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 3 insertions(+), 13 deletions(-) diff --git a/homeassistant/components/lifx/manifest.json b/homeassistant/components/lifx/manifest.json index 08540702736..3ef70f16467 100644 --- a/homeassistant/components/lifx/manifest.json +++ b/homeassistant/components/lifx/manifest.json @@ -17,7 +17,6 @@ "models": [ "LIFX A19", "LIFX A21", - "LIFX B10", "LIFX Beam", "LIFX BR30", "LIFX Candle", @@ -41,7 +40,6 @@ "LIFX Round", "LIFX Square", "LIFX String", - "LIFX T10", "LIFX Tile", "LIFX White", "LIFX Z" @@ -50,7 +48,7 @@ "iot_class": "local_polling", "loggers": ["aiolifx", "aiolifx_effects", "bitstring"], "requirements": [ - "aiolifx==1.0.8", + "aiolifx==1.0.9", "aiolifx-effects==0.3.2", "aiolifx-themes==0.5.0" ] diff --git a/homeassistant/generated/zeroconf.py b/homeassistant/generated/zeroconf.py index 36b0da4a9f4..2e3ffa23ff5 100644 --- a/homeassistant/generated/zeroconf.py +++ b/homeassistant/generated/zeroconf.py @@ -68,10 +68,6 @@ HOMEKIT = { "always_discover": True, "domain": "lifx", }, - "LIFX B10": { - "always_discover": True, - "domain": "lifx", - }, "LIFX BR30": { "always_discover": True, "domain": "lifx", @@ -164,10 +160,6 @@ HOMEKIT = { "always_discover": True, "domain": "lifx", }, - "LIFX T10": { - "always_discover": True, - "domain": "lifx", - }, "LIFX Tile": { "always_discover": True, "domain": "lifx", diff --git a/requirements_all.txt b/requirements_all.txt index 16ff5a9a032..ba6313f6466 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -276,7 +276,7 @@ aiolifx-effects==0.3.2 aiolifx-themes==0.5.0 # homeassistant.components.lifx -aiolifx==1.0.8 +aiolifx==1.0.9 # homeassistant.components.livisi aiolivisi==0.0.19 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a453a5948fd..98f18156cc0 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -258,7 +258,7 @@ aiolifx-effects==0.3.2 aiolifx-themes==0.5.0 # homeassistant.components.lifx -aiolifx==1.0.8 +aiolifx==1.0.9 # homeassistant.components.livisi aiolivisi==0.0.19 From fb27297df9d0fff953afe824cbbb28eefa2fcf3f Mon Sep 17 00:00:00 2001 From: Artur Pragacz <49985303+arturpragacz@users.noreply.github.com> Date: Mon, 2 Sep 2024 21:23:07 +0200 Subject: [PATCH 1343/1635] Fix area registry indexing when there is a name collision (#125050) --- homeassistant/helpers/area_registry.py | 5 +++-- tests/helpers/test_area_registry.py | 7 ++++++- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/homeassistant/helpers/area_registry.py b/homeassistant/helpers/area_registry.py index 3e101f185ed..5009ec654cf 100644 --- a/homeassistant/helpers/area_registry.py +++ b/homeassistant/helpers/area_registry.py @@ -153,22 +153,23 @@ class AreaRegistryItems(NormalizedNameBaseRegistryItems[AreaEntry]): def _index_entry(self, key: str, entry: AreaEntry) -> None: """Index an entry.""" + super()._index_entry(key, entry) if entry.floor_id is not None: self._floors_index[entry.floor_id][key] = True for label in entry.labels: self._labels_index[label][key] = True - super()._index_entry(key, entry) def _unindex_entry( self, key: str, replacement_entry: AreaEntry | None = None ) -> None: + # always call base class before other indices + super()._unindex_entry(key, replacement_entry) entry = self.data[key] if labels := entry.labels: for label in labels: self._unindex_entry_value(key, label, self._labels_index) if floor_id := entry.floor_id: self._unindex_entry_value(key, floor_id, self._floors_index) - return super()._unindex_entry(key, replacement_entry) def get_areas_for_label(self, label: str) -> list[AreaEntry]: """Get areas for label.""" diff --git a/tests/helpers/test_area_registry.py b/tests/helpers/test_area_registry.py index ad571ac50cc..da1947adbc8 100644 --- a/tests/helpers/test_area_registry.py +++ b/tests/helpers/test_area_registry.py @@ -242,9 +242,12 @@ async def test_update_area_with_same_name_change_case( async def test_update_area_with_name_already_in_use( area_registry: ar.AreaRegistry, + floor_registry: fr.FloorRegistry, ) -> None: """Make sure that we can't update an area with a name already in use.""" - area1 = area_registry.async_create("mock1") + floor = floor_registry.async_create("mock") + floor_id = floor.floor_id + area1 = area_registry.async_create("mock1", floor_id=floor_id) area2 = area_registry.async_create("mock2") with pytest.raises(ValueError) as e_info: @@ -255,6 +258,8 @@ async def test_update_area_with_name_already_in_use( assert area2.name == "mock2" assert len(area_registry.areas) == 2 + assert area_registry.areas.get_areas_for_floor(floor_id) == [area1] + async def test_update_area_with_normalized_name_already_in_use( area_registry: ar.AreaRegistry, From 687cd321426d3d567bee0feb19a6c0c35ef2d1d1 Mon Sep 17 00:00:00 2001 From: Martin Hjelmare Date: Mon, 2 Sep 2024 21:23:24 +0200 Subject: [PATCH 1344/1635] Handle telegram polling errors (#124327) --- .../components/telegram_bot/polling.py | 16 ++- .../telegram_bot/test_telegram_bot.py | 103 +++++++++++++++++- 2 files changed, 114 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/telegram_bot/polling.py b/homeassistant/components/telegram_bot/polling.py index 45d2ee65b45..bee7f752f6c 100644 --- a/homeassistant/components/telegram_bot/polling.py +++ b/homeassistant/components/telegram_bot/polling.py @@ -25,14 +25,22 @@ async def async_setup_platform(hass, bot, config): async def process_error(update: Update, context: CallbackContext) -> None: """Telegram bot error handler.""" + if context.error: + error_callback(context.error, update) + + +def error_callback(error: Exception, update: Update | None = None) -> None: + """Log the error.""" try: - if context.error: - raise context.error + raise error except (TimedOut, NetworkError, RetryAfter): # Long polling timeout or connection problem. Nothing serious. pass except TelegramError: - _LOGGER.error('Update "%s" caused error: "%s"', update, context.error) + if update is not None: + _LOGGER.error('Update "%s" caused error: "%s"', update, error) + else: + _LOGGER.error("%s: %s", error.__class__.__name__, error) class PollBot(BaseTelegramBotEntity): @@ -53,7 +61,7 @@ class PollBot(BaseTelegramBotEntity): """Start the polling task.""" _LOGGER.debug("Starting polling") await self.application.initialize() - await self.application.updater.start_polling() + await self.application.updater.start_polling(error_callback=error_callback) await self.application.start() async def stop_polling(self, event=None): diff --git a/tests/components/telegram_bot/test_telegram_bot.py b/tests/components/telegram_bot/test_telegram_bot.py index aad758827ca..bdf6ba72fcc 100644 --- a/tests/components/telegram_bot/test_telegram_bot.py +++ b/tests/components/telegram_bot/test_telegram_bot.py @@ -1,8 +1,11 @@ """Tests for the telegram_bot component.""" -from unittest.mock import AsyncMock, patch +from typing import Any +from unittest.mock import AsyncMock, MagicMock, patch +import pytest from telegram import Update +from telegram.error import NetworkError, RetryAfter, TelegramError, TimedOut from homeassistant.components.telegram_bot import ( ATTR_MESSAGE, @@ -11,6 +14,7 @@ from homeassistant.components.telegram_bot import ( SERVICE_SEND_MESSAGE, ) from homeassistant.components.telegram_bot.webhooks import TELEGRAM_WEBHOOK_URL +from homeassistant.const import EVENT_HOMEASSISTANT_START from homeassistant.core import Context, HomeAssistant from homeassistant.setup import async_setup_component @@ -188,6 +192,103 @@ async def test_polling_platform_message_text_update( assert isinstance(events[0].context, Context) +@pytest.mark.parametrize( + ("error", "log_message"), + [ + ( + TelegramError("Telegram error"), + 'caused error: "Telegram error"', + ), + (NetworkError("Network error"), ""), + (RetryAfter(42), ""), + (TimedOut("TimedOut error"), ""), + ], +) +async def test_polling_platform_add_error_handler( + hass: HomeAssistant, + config_polling: dict[str, Any], + update_message_text: dict[str, Any], + caplog: pytest.LogCaptureFixture, + error: Exception, + log_message: str, +) -> None: + """Test polling add error handler.""" + with patch( + "homeassistant.components.telegram_bot.polling.ApplicationBuilder" + ) as application_builder_class: + await async_setup_component( + hass, + DOMAIN, + config_polling, + ) + await hass.async_block_till_done() + + application = ( + application_builder_class.return_value.bot.return_value.build.return_value + ) + application.updater.stop = AsyncMock() + application.stop = AsyncMock() + application.shutdown = AsyncMock() + process_error = application.add_error_handler.call_args[0][0] + application.bot.defaults.tzinfo = None + update = Update.de_json(update_message_text, application.bot) + + await process_error(update, MagicMock(error=error)) + + assert log_message in caplog.text + + +@pytest.mark.parametrize( + ("error", "log_message"), + [ + ( + TelegramError("Telegram error"), + "TelegramError: Telegram error", + ), + (NetworkError("Network error"), ""), + (RetryAfter(42), ""), + (TimedOut("TimedOut error"), ""), + ], +) +async def test_polling_platform_start_polling_error_callback( + hass: HomeAssistant, + config_polling: dict[str, Any], + caplog: pytest.LogCaptureFixture, + error: Exception, + log_message: str, +) -> None: + """Test polling add error handler.""" + with patch( + "homeassistant.components.telegram_bot.polling.ApplicationBuilder" + ) as application_builder_class: + await async_setup_component( + hass, + DOMAIN, + config_polling, + ) + await hass.async_block_till_done() + + application = ( + application_builder_class.return_value.bot.return_value.build.return_value + ) + application.initialize = AsyncMock() + application.updater.start_polling = AsyncMock() + application.start = AsyncMock() + application.updater.stop = AsyncMock() + application.stop = AsyncMock() + application.shutdown = AsyncMock() + + hass.bus.async_fire(EVENT_HOMEASSISTANT_START) + await hass.async_block_till_done() + error_callback = application.updater.start_polling.call_args.kwargs[ + "error_callback" + ] + + error_callback(error) + + assert log_message in caplog.text + + async def test_webhook_endpoint_unauthorized_update_doesnt_generate_telegram_text_event( hass: HomeAssistant, webhook_platform, From f760c13e8f8a6fcf7751ca14c74b845d05383587 Mon Sep 17 00:00:00 2001 From: Richard Kroegel <42204099+rikroe@users.noreply.github.com> Date: Mon, 2 Sep 2024 21:23:38 +0200 Subject: [PATCH 1345/1635] Fix blocking calls for OpenAI conversation (#125010) --- .../components/openai_conversation/__init__.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/openai_conversation/__init__.py b/homeassistant/components/openai_conversation/__init__.py index 75b5db23094..0fbda9b7f4a 100644 --- a/homeassistant/components/openai_conversation/__init__.py +++ b/homeassistant/components/openai_conversation/__init__.py @@ -19,6 +19,7 @@ from homeassistant.exceptions import ( ServiceValidationError, ) from homeassistant.helpers import config_validation as cv, selector +from homeassistant.helpers.httpx_client import get_async_client from homeassistant.helpers.typing import ConfigType from .const import DOMAIN, LOGGER @@ -88,7 +89,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: OpenAIConfigEntry) -> bool: """Set up OpenAI Conversation from a config entry.""" - client = openai.AsyncOpenAI(api_key=entry.data[CONF_API_KEY]) + client = openai.AsyncOpenAI( + api_key=entry.data[CONF_API_KEY], + http_client=get_async_client(hass), + ) + + # Cache current platform data which gets added to each request (caching done by library) + _ = await hass.async_add_executor_job(client.platform_headers) + try: await hass.async_add_executor_job(client.with_options(timeout=10.0).models.list) except openai.AuthenticationError as err: From cd89db9bb6636c02adc52db7485d1122f2feccd5 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 2 Sep 2024 09:26:02 -1000 Subject: [PATCH 1346/1635] Add coverage for late unifiprotect person detection events (#125103) --- .../unifiprotect/test_binary_sensor.py | 146 ++++++++++++++++++ 1 file changed, 146 insertions(+) diff --git a/tests/components/unifiprotect/test_binary_sensor.py b/tests/components/unifiprotect/test_binary_sensor.py index af8ce015955..31669aa62bb 100644 --- a/tests/components/unifiprotect/test_binary_sensor.py +++ b/tests/components/unifiprotect/test_binary_sensor.py @@ -575,3 +575,149 @@ async def test_binary_sensor_package_detected( ufp.ws_msg(mock_msg) await hass.async_block_till_done() assert len(state_changes) == 2 + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_binary_sensor_person_detected( + hass: HomeAssistant, + ufp: MockUFPFixture, + doorbell: Camera, + unadopted_camera: Camera, + fixed_now: datetime, +) -> None: + """Test binary_sensor person detected detection entity.""" + + await init_entry(hass, ufp, [doorbell, unadopted_camera]) + assert_entity_counts(hass, Platform.BINARY_SENSOR, 15, 15) + + doorbell.smart_detect_settings.object_types.append(SmartDetectObjectType.PERSON) + + _, entity_id = ids_from_device_description( + Platform.BINARY_SENSOR, doorbell, EVENT_SENSORS[3] + ) + + events = async_capture_events(hass, EVENT_STATE_CHANGED) + + event = Event( + model=ModelType.EVENT, + id="test_event_id", + type=EventType.SMART_DETECT, + start=fixed_now - timedelta(seconds=1), + end=None, + score=50, + smart_detect_types=[], + smart_detect_event_ids=[], + camera_id=doorbell.id, + api=ufp.api, + ) + + new_camera = doorbell.copy() + new_camera.is_smart_detected = True + + ufp.api.bootstrap.cameras = {new_camera.id: new_camera} + ufp.api.bootstrap.events = {event.id: event} + + mock_msg = Mock() + mock_msg.changed_data = {} + mock_msg.new_obj = event + ufp.ws_msg(mock_msg) + + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_OFF + + event = Event( + model=ModelType.EVENT, + id="test_event_id", + type=EventType.SMART_DETECT, + start=fixed_now - timedelta(seconds=1), + end=fixed_now + timedelta(seconds=1), + score=65, + smart_detect_types=[SmartDetectObjectType.PERSON], + smart_detect_event_ids=[], + camera_id=doorbell.id, + api=ufp.api, + ) + + new_camera = doorbell.copy() + new_camera.is_smart_detected = True + new_camera.last_smart_detect_event_ids[SmartDetectObjectType.PERSON] = event.id + + ufp.api.bootstrap.cameras = {new_camera.id: new_camera} + ufp.api.bootstrap.events = {event.id: event} + + mock_msg = Mock() + mock_msg.changed_data = {} + mock_msg.new_obj = event + ufp.ws_msg(mock_msg) + + await hass.async_block_till_done() + + entity_events = [event for event in events if event.data["entity_id"] == entity_id] + assert len(entity_events) == 3 + assert entity_events[0].data["new_state"].state == STATE_OFF + assert entity_events[1].data["new_state"].state == STATE_ON + assert entity_events[2].data["new_state"].state == STATE_OFF + + # Event is already seen and has end, should now be off + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_OFF + + # Now send an event that has an end right away + event = Event( + model=ModelType.EVENT, + id="new_event_id", + type=EventType.SMART_DETECT, + start=fixed_now - timedelta(seconds=1), + end=fixed_now + timedelta(seconds=1), + score=80, + smart_detect_types=[SmartDetectObjectType.PERSON], + smart_detect_event_ids=[], + camera_id=doorbell.id, + api=ufp.api, + ) + + new_camera = doorbell.copy() + new_camera.is_smart_detected = True + new_camera.last_smart_detect_event_ids[SmartDetectObjectType.PERSON] = event.id + + ufp.api.bootstrap.cameras = {new_camera.id: new_camera} + ufp.api.bootstrap.events = {event.id: event} + + mock_msg = Mock() + mock_msg.changed_data = {} + mock_msg.new_obj = event + + state_changes: list[HAEvent[EventStateChangedData]] = async_capture_events( + hass, EVENT_STATE_CHANGED + ) + ufp.ws_msg(mock_msg) + + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state + assert state.state == STATE_OFF + + assert len(state_changes) == 2 + + on_event = state_changes[0] + state = on_event.data["new_state"] + assert state + assert state.state == STATE_ON + assert state.attributes[ATTR_ATTRIBUTION] == DEFAULT_ATTRIBUTION + assert state.attributes[ATTR_EVENT_SCORE] == 80 + + off_event = state_changes[1] + state = off_event.data["new_state"] + assert state + assert state.state == STATE_OFF + assert ATTR_EVENT_SCORE not in state.attributes + + # replay and ensure ignored + ufp.ws_msg(mock_msg) + await hass.async_block_till_done() + assert len(state_changes) == 2 From 606524f9e7c0775ad6e566aa84485046a85da448 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 2 Sep 2024 21:33:35 +0200 Subject: [PATCH 1347/1635] Test string timestamps are wiped after migration to schema version 32 (#125091) Co-authored-by: J. Nick Koston --- tests/components/recorder/test_v32_migration.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/components/recorder/test_v32_migration.py b/tests/components/recorder/test_v32_migration.py index 8db2b9fa78c..1e00353d02c 100644 --- a/tests/components/recorder/test_v32_migration.py +++ b/tests/components/recorder/test_v32_migration.py @@ -192,9 +192,12 @@ async def test_migrate_times( assert len(events_result) == 1 assert events_result[0].time_fired_ts == now_timestamp + assert events_result[0].time_fired is None assert len(states_result) == 1 assert states_result[0].last_changed_ts == one_second_past_timestamp assert states_result[0].last_updated_ts == now_timestamp + assert states_result[0].last_changed is None + assert states_result[0].last_updated is None def _get_events_index_names(): with session_scope(hass=hass) as session: From f93259a2f1e62328c7a2b237308767cbcd7d1399 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Mon, 2 Sep 2024 09:43:34 -1000 Subject: [PATCH 1348/1635] Bump yalexs to 8.6.0 (#125102) --- homeassistant/components/august/manifest.json | 2 +- homeassistant/components/yale/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/august/manifest.json b/homeassistant/components/august/manifest.json index 5f317a20834..a40c6920136 100644 --- a/homeassistant/components/august/manifest.json +++ b/homeassistant/components/august/manifest.json @@ -24,5 +24,5 @@ "documentation": "https://www.home-assistant.io/integrations/august", "iot_class": "cloud_push", "loggers": ["pubnub", "yalexs"], - "requirements": ["yalexs==8.5.5", "yalexs-ble==2.4.3"] + "requirements": ["yalexs==8.6.0", "yalexs-ble==2.4.3"] } diff --git a/homeassistant/components/yale/manifest.json b/homeassistant/components/yale/manifest.json index 9bee7df2e00..030df50a482 100644 --- a/homeassistant/components/yale/manifest.json +++ b/homeassistant/components/yale/manifest.json @@ -13,5 +13,5 @@ "documentation": "https://www.home-assistant.io/integrations/yale", "iot_class": "cloud_push", "loggers": ["socketio", "engineio", "yalexs"], - "requirements": ["yalexs==8.5.5", "yalexs-ble==2.4.3"] + "requirements": ["yalexs==8.6.0", "yalexs-ble==2.4.3"] } diff --git a/requirements_all.txt b/requirements_all.txt index ba6313f6466..66d3ca23a1e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2986,7 +2986,7 @@ yalexs-ble==2.4.3 # homeassistant.components.august # homeassistant.components.yale -yalexs==8.5.5 +yalexs==8.6.0 # homeassistant.components.yeelight yeelight==0.7.14 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 98f18156cc0..88e21eaddf9 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2369,7 +2369,7 @@ yalexs-ble==2.4.3 # homeassistant.components.august # homeassistant.components.yale -yalexs==8.5.5 +yalexs==8.6.0 # homeassistant.components.yeelight yeelight==0.7.14 From faefe624f62dba3f8fda95dd570bffb7444f3f2a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Fern=C3=A1ndez=20Rojas?= Date: Mon, 2 Sep 2024 22:17:24 +0200 Subject: [PATCH 1349/1635] Add Airzone Cloud Aidoo HVAC indoor/outdoor sensors (#125013) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Álvaro Fernández Rojas --- .../components/airzone_cloud/sensor.py | 83 +++++++++++++++++++ .../components/airzone_cloud/strings.json | 27 ++++++ tests/components/airzone_cloud/test_sensor.py | 27 ++++++ 3 files changed, 137 insertions(+) diff --git a/homeassistant/components/airzone_cloud/sensor.py b/homeassistant/components/airzone_cloud/sensor.py index 9f0ee01aca2..70d2fd079d4 100644 --- a/homeassistant/components/airzone_cloud/sensor.py +++ b/homeassistant/components/airzone_cloud/sensor.py @@ -12,7 +12,16 @@ from aioairzone_cloud.const import ( AZD_AQ_PM_10, AZD_CPU_USAGE, AZD_HUMIDITY, + AZD_INDOOR_EXCHANGER_TEMP, + AZD_INDOOR_RETURN_TEMP, + AZD_INDOOR_WORK_TEMP, AZD_MEMORY_FREE, + AZD_OUTDOOR_CONDENSER_PRESS, + AZD_OUTDOOR_DISCHARGE_TEMP, + AZD_OUTDOOR_ELECTRIC_CURRENT, + AZD_OUTDOOR_EVAPORATOR_PRESS, + AZD_OUTDOOR_EXCHANGER_TEMP, + AZD_OUTDOOR_TEMP, AZD_TEMP, AZD_THERMOSTAT_BATTERY, AZD_THERMOSTAT_COVERAGE, @@ -32,7 +41,9 @@ from homeassistant.const import ( PERCENTAGE, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, EntityCategory, + UnitOfElectricCurrent, UnitOfInformation, + UnitOfPressure, UnitOfTemperature, ) from homeassistant.core import HomeAssistant, callback @@ -48,6 +59,78 @@ from .entity import ( ) AIDOO_SENSOR_TYPES: Final[tuple[SensorEntityDescription, ...]] = ( + SensorEntityDescription( + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + key=AZD_INDOOR_EXCHANGER_TEMP, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + translation_key="indoor_exchanger_temp", + ), + SensorEntityDescription( + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + key=AZD_INDOOR_RETURN_TEMP, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + translation_key="indoor_return_temp", + ), + SensorEntityDescription( + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + key=AZD_INDOOR_WORK_TEMP, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + translation_key="indoor_work_temp", + ), + SensorEntityDescription( + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + key=AZD_OUTDOOR_CONDENSER_PRESS, + native_unit_of_measurement=UnitOfPressure.KPA, + state_class=SensorStateClass.MEASUREMENT, + translation_key="outdoor_condenser_press", + ), + SensorEntityDescription( + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + key=AZD_OUTDOOR_DISCHARGE_TEMP, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + translation_key="outdoor_discharge_temp", + ), + SensorEntityDescription( + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + key=AZD_OUTDOOR_ELECTRIC_CURRENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + state_class=SensorStateClass.MEASUREMENT, + translation_key="outdoor_electric_current", + ), + SensorEntityDescription( + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + key=AZD_OUTDOOR_EVAPORATOR_PRESS, + native_unit_of_measurement=UnitOfPressure.KPA, + state_class=SensorStateClass.MEASUREMENT, + translation_key="outdoor_evaporator_press", + ), + SensorEntityDescription( + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + key=AZD_OUTDOOR_EXCHANGER_TEMP, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + translation_key="outdoor_exchanger_temp", + ), + SensorEntityDescription( + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + key=AZD_OUTDOOR_TEMP, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + translation_key="outdoor_temp", + ), SensorEntityDescription( device_class=SensorDeviceClass.TEMPERATURE, key=AZD_TEMP, diff --git a/homeassistant/components/airzone_cloud/strings.json b/homeassistant/components/airzone_cloud/strings.json index eb9529c7ca5..523c43f4955 100644 --- a/homeassistant/components/airzone_cloud/strings.json +++ b/homeassistant/components/airzone_cloud/strings.json @@ -45,6 +45,33 @@ "free_memory": { "name": "Free memory" }, + "indoor_exchanger_temp": { + "name": "Indoor exchanger temperature" + }, + "indoor_return_temp": { + "name": "Indoor return temperature" + }, + "indoor_work_temp": { + "name": "Indoor working temperature" + }, + "outdoor_condenser_press": { + "name": "Outdoor condenser pressure" + }, + "outdoor_discharge_temp": { + "name": "Outdoor discharge temperature" + }, + "outdoor_electric_current": { + "name": "Outdoor electric current" + }, + "outdoor_evaporator_press": { + "name": "Outdoor evaporator pressure" + }, + "outdoor_exchanger_temp": { + "name": "Outdoor exchanger temperature" + }, + "outdoor_temp": { + "name": "Outdoor temperature" + }, "thermostat_coverage": { "name": "Signal percentage" } diff --git a/tests/components/airzone_cloud/test_sensor.py b/tests/components/airzone_cloud/test_sensor.py index cf291ec23a6..672e10adedb 100644 --- a/tests/components/airzone_cloud/test_sensor.py +++ b/tests/components/airzone_cloud/test_sensor.py @@ -20,6 +20,33 @@ async def test_airzone_create_sensors(hass: HomeAssistant) -> None: state = hass.states.get("sensor.bron_pro_temperature") assert state.state == "20.0" + state = hass.states.get("sensor.bron_pro_indoor_exchanger_temperature") + assert state.state == "26.0" + + state = hass.states.get("sensor.bron_pro_indoor_return_temperature") + assert state.state == "26.0" + + state = hass.states.get("sensor.bron_pro_indoor_working_temperature") + assert state.state == "25.0" + + state = hass.states.get("sensor.bron_pro_outdoor_condenser_pressure") + assert state.state == "150.0" + + state = hass.states.get("sensor.bron_pro_outdoor_discharge_temperature") + assert state.state == "121.0" + + state = hass.states.get("sensor.bron_pro_outdoor_electric_current") + assert state.state == "3.0" + + state = hass.states.get("sensor.bron_pro_outdoor_evaporator_pressure") + assert state.state == "20.0" + + state = hass.states.get("sensor.bron_pro_outdoor_exchanger_temperature") + assert state.state == "-25.0" + + state = hass.states.get("sensor.bron_pro_outdoor_temperature") + assert state.state == "29.0" + # WebServers state = hass.states.get("sensor.webserver_11_22_33_44_55_66_cpu_usage") assert state.state == "32" From 671aaa7e957ffd9603d0c5beb39b10d00d223da6 Mon Sep 17 00:00:00 2001 From: cnico Date: Mon, 2 Sep 2024 23:51:10 +0200 Subject: [PATCH 1350/1635] Bump flipr api to 1.6.1 (#125106) --- homeassistant/components/flipr/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/flipr/manifest.json b/homeassistant/components/flipr/manifest.json index 1f9b04e3d57..cdd03770bab 100644 --- a/homeassistant/components/flipr/manifest.json +++ b/homeassistant/components/flipr/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/flipr", "iot_class": "cloud_polling", "loggers": ["flipr_api"], - "requirements": ["flipr-api==1.6.0"] + "requirements": ["flipr-api==1.6.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 66d3ca23a1e..9bb204db562 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -895,7 +895,7 @@ fjaraskupan==2.3.0 flexit_bacnet==2.2.1 # homeassistant.components.flipr -flipr-api==1.6.0 +flipr-api==1.6.1 # homeassistant.components.flux_led flux-led==1.0.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 88e21eaddf9..c675c5a0c6a 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -754,7 +754,7 @@ fjaraskupan==2.3.0 flexit_bacnet==2.2.1 # homeassistant.components.flipr -flipr-api==1.6.0 +flipr-api==1.6.1 # homeassistant.components.flux_led flux-led==1.0.4 From d68ee8dceae7acd2fbdc2be0724847ac0827dcde Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Tue, 3 Sep 2024 00:38:09 +0200 Subject: [PATCH 1351/1635] Replace _host_in_configuration_exists with async_abort_entries_match in solarlog (#125099) * Add diagnostics to solarlog * Fix wrong comment * Move to async_abort_entries_match * Remove obsolete method solarlog_entries * Update tests/components/solarlog/test_config_flow.py Co-authored-by: Joost Lekkerkerker * Update tests/components/solarlog/test_config_flow.py Co-authored-by: Joost Lekkerkerker * Update tests/components/solarlog/test_config_flow.py Co-authored-by: Joost Lekkerkerker * Update tests/components/solarlog/test_config_flow.py Co-authored-by: Joost Lekkerkerker * Amend import of config_entries.SOURCE_USER * Update tests/components/solarlog/test_config_flow.py Co-authored-by: Joost Lekkerkerker * Ruff --------- Co-authored-by: Joost Lekkerkerker --- .../components/solarlog/config_flow.py | 24 ++------- tests/components/solarlog/test_config_flow.py | 51 +++++++------------ 2 files changed, 23 insertions(+), 52 deletions(-) diff --git a/homeassistant/components/solarlog/config_flow.py b/homeassistant/components/solarlog/config_flow.py index 5d68a16eabe..5f047a9c844 100644 --- a/homeassistant/components/solarlog/config_flow.py +++ b/homeassistant/components/solarlog/config_flow.py @@ -10,7 +10,6 @@ import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_NAME -from homeassistant.core import HomeAssistant, callback from homeassistant.util import slugify from .const import DEFAULT_HOST, DEFAULT_NAME, DOMAIN @@ -18,14 +17,6 @@ from .const import DEFAULT_HOST, DEFAULT_NAME, DOMAIN _LOGGER = logging.getLogger(__name__) -@callback -def solarlog_entries(hass: HomeAssistant) -> set[str]: - """Return the hosts already configured.""" - return { - entry.data[CONF_HOST] for entry in hass.config_entries.async_entries(DOMAIN) - } - - class SolarLogConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for solarlog.""" @@ -36,12 +27,6 @@ class SolarLogConfigFlow(ConfigFlow, domain=DOMAIN): """Initialize the config flow.""" self._errors: dict = {} - def _host_in_configuration_exists(self, host: str) -> bool: - """Return True if host exists in configuration.""" - if host in solarlog_entries(self.hass): - return True - return False - def _parse_url(self, host: str) -> str: """Return parsed host url.""" url = urlparse(host, "http") @@ -72,12 +57,13 @@ class SolarLogConfigFlow(ConfigFlow, domain=DOMAIN): """Step when user initializes a integration.""" self._errors = {} if user_input is not None: - user_input[CONF_NAME] = slugify(user_input[CONF_NAME]) user_input[CONF_HOST] = self._parse_url(user_input[CONF_HOST]) - if self._host_in_configuration_exists(user_input[CONF_HOST]): - self._errors[CONF_HOST] = "already_configured" - elif await self._test_connection(user_input[CONF_HOST]): + self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]}) + + user_input[CONF_NAME] = slugify(user_input[CONF_NAME]) + + if await self._test_connection(user_input[CONF_HOST]): return self.async_create_entry( title=user_input[CONF_NAME], data=user_input ) diff --git a/tests/components/solarlog/test_config_flow.py b/tests/components/solarlog/test_config_flow.py index 223ceec3ebb..b7ae6119893 100644 --- a/tests/components/solarlog/test_config_flow.py +++ b/tests/components/solarlog/test_config_flow.py @@ -5,9 +5,9 @@ from unittest.mock import AsyncMock, patch import pytest from solarlog_cli.solarlog_exceptions import SolarLogConnectionError, SolarLogError -from homeassistant import config_entries from homeassistant.components.solarlog import config_flow from homeassistant.components.solarlog.const import DOMAIN +from homeassistant.config_entries import SOURCE_RECONFIGURE, SOURCE_USER from homeassistant.const import CONF_HOST, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -21,7 +21,7 @@ async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -60,7 +60,7 @@ async def test_user( ) -> None: """Test user config.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -125,40 +125,25 @@ async def test_form_exceptions( async def test_abort_if_already_setup(hass: HomeAssistant, test_connect: None) -> None: """Test we abort if the device is already setup.""" - flow = init_config_flow(hass) - MockConfigEntry( - domain="solarlog", data={CONF_NAME: NAME, CONF_HOST: HOST} - ).add_to_hass(hass) - # Should fail, same HOST different NAME (default) - result = await flow.async_step_user( - {CONF_HOST: HOST, CONF_NAME: "solarlog_test_7_8_9", "extended_data": False} + MockConfigEntry(domain=DOMAIN, data={CONF_NAME: NAME, CONF_HOST: HOST}).add_to_hass( + hass ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM - assert result["errors"] == {CONF_HOST: "already_configured"} + assert result["step_id"] == "user" + assert result["errors"] == {} - # Should fail, same HOST and NAME - result = await flow.async_step_user({CONF_HOST: HOST, CONF_NAME: NAME}) - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {CONF_HOST: "already_configured"} - - # SHOULD pass, diff HOST (without http://), different NAME - result = await flow.async_step_user( - {CONF_HOST: "2.2.2.2", CONF_NAME: "solarlog_test_7_8_9", "extended_data": False} + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: HOST, CONF_NAME: "solarlog_test_7_8_9", "extended_data": False}, ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "solarlog_test_7_8_9" - assert result["data"][CONF_HOST] == "http://2.2.2.2" - - # SHOULD pass, diff HOST, same NAME - result = await flow.async_step_user( - {CONF_HOST: "http://2.2.2.2", CONF_NAME: NAME, "extended_data": False} - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "solarlog_test_1_2_3" - assert result["data"][CONF_HOST] == "http://2.2.2.2" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" async def test_reconfigure_flow( @@ -178,7 +163,7 @@ async def test_reconfigure_flow( result = await hass.config_entries.flow.async_init( DOMAIN, context={ - "source": config_entries.SOURCE_RECONFIGURE, + "source": SOURCE_RECONFIGURE, "entry_id": entry.entry_id, }, ) From 0c18b2e7ffe069024cbce77fc011b0b588164673 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Tue, 3 Sep 2024 06:57:25 +0200 Subject: [PATCH 1352/1635] Remove `is_on` function from `homeassistant.components` (#125104) * Remove `is_on` method from `homeassistant.components` * Cleanup test --- homeassistant/components/__init__.py | 49 --------------------- tests/components/homeassistant/test_init.py | 10 ----- 2 files changed, 59 deletions(-) diff --git a/homeassistant/components/__init__.py b/homeassistant/components/__init__.py index 030e23628d6..d01f51c3951 100644 --- a/homeassistant/components/__init__.py +++ b/homeassistant/components/__init__.py @@ -6,52 +6,3 @@ Component design guidelines: format ".". - Each component should publish services only under its own domain. """ - -from __future__ import annotations - -import logging - -from homeassistant.core import HomeAssistant, split_entity_id -from homeassistant.helpers.frame import report -from homeassistant.helpers.group import expand_entity_ids - -_LOGGER = logging.getLogger(__name__) - - -def is_on(hass: HomeAssistant, entity_id: str | None = None) -> bool: - """Load up the module to call the is_on method. - - If there is no entity id given we will check all. - """ - report( - ( - "uses homeassistant.components.is_on." - " This is deprecated and will stop working in Home Assistant 2024.9, it" - " should be updated to use the function of the platform directly." - ), - error_if_core=True, - ) - - if entity_id: - entity_ids = expand_entity_ids(hass, [entity_id]) - else: - entity_ids = hass.states.entity_ids() - - for ent_id in entity_ids: - domain = split_entity_id(ent_id)[0] - - try: - component = getattr(hass.components, domain) - - except ImportError: - _LOGGER.error("Failed to call %s.is_on: component not found", domain) - continue - - if not hasattr(component, "is_on"): - _LOGGER.warning("Integration %s has no is_on method", domain) - continue - - if component.is_on(ent_id): - return True - - return False diff --git a/tests/components/homeassistant/test_init.py b/tests/components/homeassistant/test_init.py index a0902fe62df..a66d13e5ffe 100644 --- a/tests/components/homeassistant/test_init.py +++ b/tests/components/homeassistant/test_init.py @@ -7,7 +7,6 @@ import voluptuous as vol import yaml from homeassistant import config -import homeassistant.components as comps from homeassistant.components.homeassistant import ( ATTR_ENTRY_ID, ATTR_SAFE_MODE, @@ -46,15 +45,6 @@ from tests.common import ( ) -async def test_is_on(hass: HomeAssistant) -> None: - """Test is_on method.""" - with pytest.raises( - RuntimeError, - match="Detected code that uses homeassistant.components.is_on. This is deprecated and will stop working", - ): - assert comps.is_on(hass, "light.Bowl") - - async def test_turn_on_without_entities(hass: HomeAssistant) -> None: """Test turn_on method without entities.""" await async_setup_component(hass, ha.DOMAIN, {}) From 7c223db1d5df60fb8e2a7fa8818d077b2b751c4f Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 3 Sep 2024 07:51:27 +0200 Subject: [PATCH 1353/1635] Remove recorder PostSchemaMigrationTask (#125076) Co-authored-by: J. Nick Koston --- homeassistant/components/recorder/core.py | 4 -- .../components/recorder/migration.py | 63 +++++-------------- homeassistant/components/recorder/tasks.py | 25 -------- 3 files changed, 14 insertions(+), 78 deletions(-) diff --git a/homeassistant/components/recorder/core.py b/homeassistant/components/recorder/core.py index 96a4f954c71..c0ac1fc1277 100644 --- a/homeassistant/components/recorder/core.py +++ b/homeassistant/components/recorder/core.py @@ -1283,10 +1283,6 @@ class Recorder(threading.Thread): self.event_session = self.get_session() self.event_session.expire_on_commit = False - def _post_schema_migration(self, old_version: int, new_version: int) -> None: - """Run post schema migration tasks.""" - migration.post_schema_migration(self, old_version, new_version) - def _post_migrate_entity_ids(self) -> bool: """Post migrate entity_ids if needed.""" return migration.post_migrate_entity_ids(self) diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index 3da0bc9abb1..213462e3731 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -99,14 +99,8 @@ from .queries import ( migrate_single_short_term_statistics_row_to_timestamp, migrate_single_statistics_row_to_timestamp, ) -from .statistics import get_start_time -from .tasks import ( - CommitTask, - EntityIDPostMigrationTask, - PostSchemaMigrationTask, - RecorderTask, - StatisticsTimestampMigrationCleanupTask, -) +from .statistics import cleanup_statistics_timestamp_migration, get_start_time +from .tasks import EntityIDPostMigrationTask, RecorderTask from .util import ( database_job_retry_wrapper, execute_stmt_lambda_element, @@ -350,13 +344,6 @@ def migrate_schema_live( states_correct_db_schema(instance, schema_errors) events_correct_db_schema(instance, schema_errors) - start_version = schema_status.start_version - if start_version != SCHEMA_VERSION: - instance.queue_task(PostSchemaMigrationTask(start_version, SCHEMA_VERSION)) - # Make sure the post schema migration task is committed in case - # the next task does not have commit_before = True - instance.queue_task(CommitTask()) - return schema_status @@ -1414,6 +1401,12 @@ class _SchemaVersion32Migrator(_SchemaVersionMigrator, target_version=32): _drop_index(self.session_maker, "events", "ix_events_event_type_time_fired") _drop_index(self.session_maker, "states", "ix_states_last_updated") _drop_index(self.session_maker, "events", "ix_events_time_fired") + with session_scope(session=self.session_maker()) as session: + # In version 31 we migrated all the time_fired, last_updated, and last_changed + # columns to be timestamps. In version 32 we need to wipe the old columns + # since they are no longer used and take up a significant amount of space. + assert self.instance.engine is not None, "engine should never be None" + _wipe_old_string_time_columns(self.instance, self.instance.engine, session) class _SchemaVersion33Migrator(_SchemaVersionMigrator, target_version=33): @@ -1492,6 +1485,12 @@ class _SchemaVersion35Migrator(_SchemaVersionMigrator, target_version=35): # ix_statistics_start and ix_statistics_statistic_id_start are still used # for the post migration cleanup and can be removed in a future version. + # In version 34 we migrated all the created, start, and last_reset + # columns to be timestamps. In version 35 we need to wipe the old columns + # since they are no longer used and take up a significant amount of space. + while not cleanup_statistics_timestamp_migration(self.instance): + pass + class _SchemaVersion36Migrator(_SchemaVersionMigrator, target_version=36): def _apply_update(self) -> None: @@ -1828,40 +1827,6 @@ def _correct_table_character_set_and_collation( ) -def post_schema_migration( - instance: Recorder, - old_version: int, - new_version: int, -) -> None: - """Post schema migration. - - Run any housekeeping tasks after the schema migration has completed. - - Post schema migration is run after the schema migration has completed - and the queue has been processed to ensure that we reduce the memory - pressure since events are held in memory until the queue is processed - which is blocked from being processed until the schema migration is - complete. - """ - if old_version < 32 <= new_version: - # In version 31 we migrated all the time_fired, last_updated, and last_changed - # columns to be timestamps. In version 32 we need to wipe the old columns - # since they are no longer used and take up a significant amount of space. - assert instance.event_session is not None - assert instance.engine is not None - _wipe_old_string_time_columns(instance, instance.engine, instance.event_session) - if old_version < 35 <= new_version: - # In version 34 we migrated all the created, start, and last_reset - # columns to be timestamps. In version 35 we need to wipe the old columns - # since they are no longer used and take up a significant amount of space. - _wipe_old_string_statistics_columns(instance) - - -def _wipe_old_string_statistics_columns(instance: Recorder) -> None: - """Wipe old string statistics columns to save space.""" - instance.queue_task(StatisticsTimestampMigrationCleanupTask()) - - @database_job_retry_wrapper("Wipe old string time columns", 3) def _wipe_old_string_time_columns( instance: Recorder, engine: Engine, session: Session diff --git a/homeassistant/components/recorder/tasks.py b/homeassistant/components/recorder/tasks.py index 46e529d4909..c51ba2b16ca 100644 --- a/homeassistant/components/recorder/tasks.py +++ b/homeassistant/components/recorder/tasks.py @@ -322,31 +322,6 @@ class SynchronizeTask(RecorderTask): instance.hass.loop.call_soon_threadsafe(self.event.set) -@dataclass(slots=True) -class PostSchemaMigrationTask(RecorderTask): - """Post migration task to update schema.""" - - old_version: int - new_version: int - - def run(self, instance: Recorder) -> None: - """Handle the task.""" - instance._post_schema_migration( # noqa: SLF001 - self.old_version, self.new_version - ) - - -@dataclass(slots=True) -class StatisticsTimestampMigrationCleanupTask(RecorderTask): - """An object to insert into the recorder queue to run a statistics migration cleanup task.""" - - def run(self, instance: Recorder) -> None: - """Run statistics timestamp cleanup task.""" - if not statistics.cleanup_statistics_timestamp_migration(instance): - # Schedule a new statistics migration task if this one didn't finish - instance.queue_task(StatisticsTimestampMigrationCleanupTask()) - - @dataclass(slots=True) class AdjustLRUSizeTask(RecorderTask): """An object to insert into the recorder queue to adjust the LRU size.""" From aa8fe9911362a97c53fcc155af9cb9ad9d7b3b7c Mon Sep 17 00:00:00 2001 From: LG-ThinQ-Integration Date: Tue, 3 Sep 2024 16:30:46 +0900 Subject: [PATCH 1354/1635] Add binary_sensor platform to LG Thinq (#125054) * Add binary_sensor entity * Update the document link due to the domain name change * Update casing --------- Co-authored-by: jangwon.lee --- homeassistant/components/lg_thinq/__init__.py | 2 +- .../components/lg_thinq/binary_sensor.py | 115 ++++++++++++++++++ homeassistant/components/lg_thinq/icons.json | 17 +++ .../components/lg_thinq/manifest.json | 2 +- .../components/lg_thinq/strings.json | 17 +++ 5 files changed, 151 insertions(+), 2 deletions(-) create mode 100644 homeassistant/components/lg_thinq/binary_sensor.py diff --git a/homeassistant/components/lg_thinq/__init__.py b/homeassistant/components/lg_thinq/__init__.py index 259d494902e..a86afc68171 100644 --- a/homeassistant/components/lg_thinq/__init__.py +++ b/homeassistant/components/lg_thinq/__init__.py @@ -19,7 +19,7 @@ from .coordinator import DeviceDataUpdateCoordinator, async_setup_device_coordin type ThinqConfigEntry = ConfigEntry[dict[str, DeviceDataUpdateCoordinator]] -PLATFORMS = [Platform.SWITCH] +PLATFORMS = [Platform.BINARY_SENSOR, Platform.SWITCH] _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/lg_thinq/binary_sensor.py b/homeassistant/components/lg_thinq/binary_sensor.py new file mode 100644 index 00000000000..fc6564c7652 --- /dev/null +++ b/homeassistant/components/lg_thinq/binary_sensor.py @@ -0,0 +1,115 @@ +"""Support for binary sensor entities.""" + +from __future__ import annotations + +from thinqconnect import PROPERTY_READABLE, DeviceType +from thinqconnect.devices.const import Property as ThinQProperty +from thinqconnect.integration.homeassistant.property import create_properties + +from homeassistant.components.binary_sensor import ( + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import ThinqConfigEntry +from .entity import ThinQEntity + +BINARY_SENSOR_DESC: dict[ThinQProperty, BinarySensorEntityDescription] = { + ThinQProperty.RINSE_REFILL: BinarySensorEntityDescription( + key=ThinQProperty.RINSE_REFILL, + translation_key=ThinQProperty.RINSE_REFILL, + ), + ThinQProperty.ECO_FRIENDLY_MODE: BinarySensorEntityDescription( + key=ThinQProperty.ECO_FRIENDLY_MODE, + translation_key=ThinQProperty.ECO_FRIENDLY_MODE, + ), + ThinQProperty.POWER_SAVE_ENABLED: BinarySensorEntityDescription( + key=ThinQProperty.POWER_SAVE_ENABLED, + translation_key=ThinQProperty.POWER_SAVE_ENABLED, + ), + ThinQProperty.REMOTE_CONTROL_ENABLED: BinarySensorEntityDescription( + key=ThinQProperty.REMOTE_CONTROL_ENABLED, + translation_key=ThinQProperty.REMOTE_CONTROL_ENABLED, + ), + ThinQProperty.SABBATH_MODE: BinarySensorEntityDescription( + key=ThinQProperty.SABBATH_MODE, + translation_key=ThinQProperty.SABBATH_MODE, + ), +} + +DEVICE_TYPE_BINARY_SENSOR_MAP: dict[ + DeviceType, tuple[BinarySensorEntityDescription, ...] +] = { + DeviceType.COOKTOP: (BINARY_SENSOR_DESC[ThinQProperty.REMOTE_CONTROL_ENABLED],), + DeviceType.DISH_WASHER: ( + BINARY_SENSOR_DESC[ThinQProperty.RINSE_REFILL], + BINARY_SENSOR_DESC[ThinQProperty.REMOTE_CONTROL_ENABLED], + ), + DeviceType.DRYER: (BINARY_SENSOR_DESC[ThinQProperty.REMOTE_CONTROL_ENABLED],), + DeviceType.OVEN: (BINARY_SENSOR_DESC[ThinQProperty.REMOTE_CONTROL_ENABLED],), + DeviceType.REFRIGERATOR: ( + BINARY_SENSOR_DESC[ThinQProperty.ECO_FRIENDLY_MODE], + BINARY_SENSOR_DESC[ThinQProperty.POWER_SAVE_ENABLED], + BINARY_SENSOR_DESC[ThinQProperty.SABBATH_MODE], + ), + DeviceType.STYLER: (BINARY_SENSOR_DESC[ThinQProperty.REMOTE_CONTROL_ENABLED],), + DeviceType.WASHCOMBO_MAIN: ( + BINARY_SENSOR_DESC[ThinQProperty.REMOTE_CONTROL_ENABLED], + ), + DeviceType.WASHCOMBO_MINI: ( + BINARY_SENSOR_DESC[ThinQProperty.REMOTE_CONTROL_ENABLED], + ), + DeviceType.WASHER: (BINARY_SENSOR_DESC[ThinQProperty.REMOTE_CONTROL_ENABLED],), + DeviceType.WASHTOWER_DRYER: ( + BINARY_SENSOR_DESC[ThinQProperty.REMOTE_CONTROL_ENABLED], + ), + DeviceType.WASHTOWER: (BINARY_SENSOR_DESC[ThinQProperty.REMOTE_CONTROL_ENABLED],), + DeviceType.WASHTOWER_WASHER: ( + BINARY_SENSOR_DESC[ThinQProperty.REMOTE_CONTROL_ENABLED], + ), + DeviceType.WINE_CELLAR: (BINARY_SENSOR_DESC[ThinQProperty.SABBATH_MODE],), +} + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ThinqConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up an entry for binary sensor platform.""" + entities: list[ThinQBinarySensorEntity] = [] + for coordinator in entry.runtime_data.values(): + if ( + descriptions := DEVICE_TYPE_BINARY_SENSOR_MAP.get( + coordinator.device_api.device_type + ) + ) is not None: + for description in descriptions: + properties = create_properties( + device_api=coordinator.device_api, + key=description.key, + children_keys=None, + rw_type=PROPERTY_READABLE, + ) + if not properties: + continue + + entities.extend( + ThinQBinarySensorEntity(coordinator, description, prop) + for prop in properties + ) + + if entities: + async_add_entities(entities) + + +class ThinQBinarySensorEntity(ThinQEntity, BinarySensorEntity): + """Represent a thinq binary sensor platform.""" + + def _update_status(self) -> None: + """Update status itself.""" + super()._update_status() + + self._attr_is_on = self.property.get_value_as_bool() diff --git a/homeassistant/components/lg_thinq/icons.json b/homeassistant/components/lg_thinq/icons.json index 6a4ff48494a..550d023d278 100644 --- a/homeassistant/components/lg_thinq/icons.json +++ b/homeassistant/components/lg_thinq/icons.json @@ -4,6 +4,23 @@ "operation_power": { "default": "mdi:power" } + }, + "binary_sensor": { + "eco_friendly_mode": { + "default": "mdi:sprout" + }, + "power_save_enabled": { + "default": "mdi:meter-electric" + }, + "remote_control_enabled": { + "default": "mdi:remote" + }, + "rinse_refill": { + "default": "mdi:tune-vertical-variant" + }, + "sabbath_mode": { + "default": "mdi:food-off-outline" + } } } } diff --git a/homeassistant/components/lg_thinq/manifest.json b/homeassistant/components/lg_thinq/manifest.json index 0fa447a511b..a49b91892f5 100644 --- a/homeassistant/components/lg_thinq/manifest.json +++ b/homeassistant/components/lg_thinq/manifest.json @@ -4,7 +4,7 @@ "codeowners": ["@LG-ThinQ-Integration"], "config_flow": true, "dependencies": [], - "documentation": "https://www.home-assistant.io/integrations/lgthinq/", + "documentation": "https://www.home-assistant.io/integrations/lg_thinq/", "iot_class": "cloud_push", "loggers": ["thinqconnect"], "requirements": ["thinqconnect==0.9.5"] diff --git a/homeassistant/components/lg_thinq/strings.json b/homeassistant/components/lg_thinq/strings.json index 6334fd9a893..472e8b848b7 100644 --- a/homeassistant/components/lg_thinq/strings.json +++ b/homeassistant/components/lg_thinq/strings.json @@ -23,6 +23,23 @@ "operation_power": { "name": "Power" } + }, + "binary_sensor": { + "eco_friendly_mode": { + "name": "Eco friendly" + }, + "power_save_enabled": { + "name": "Power saving mode" + }, + "remote_control_enabled": { + "name": "Remote start" + }, + "rinse_refill": { + "name": "Rinse refill needed" + }, + "sabbath_mode": { + "name": "Sabbath" + } } } } From 22b62393041a37a1c9288fb9dbc912be4046d61d Mon Sep 17 00:00:00 2001 From: "Steven B." <51370195+sdb9696@users.noreply.github.com> Date: Tue, 3 Sep 2024 11:04:35 +0100 Subject: [PATCH 1355/1635] Convert ring integration to use entry.runtime_data (#125127) --- homeassistant/components/ring/__init__.py | 30 ++++++++----------- .../components/ring/binary_sensor.py | 8 ++--- homeassistant/components/ring/button.py | 8 ++--- homeassistant/components/ring/camera.py | 8 ++--- homeassistant/components/ring/diagnostics.py | 8 ++--- homeassistant/components/ring/light.py | 8 ++--- homeassistant/components/ring/sensor.py | 8 ++--- homeassistant/components/ring/siren.py | 8 ++--- homeassistant/components/ring/switch.py | 8 ++--- tests/components/ring/test_init.py | 4 +-- 10 files changed, 39 insertions(+), 59 deletions(-) diff --git a/homeassistant/components/ring/__init__.py b/homeassistant/components/ring/__init__.py index 14ab435fda6..3714802b63a 100644 --- a/homeassistant/components/ring/__init__.py +++ b/homeassistant/components/ring/__init__.py @@ -31,7 +31,10 @@ class RingData: notifications_coordinator: RingNotificationsCoordinator -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +type RingConfigEntry = ConfigEntry[RingData] + + +async def async_setup_entry(hass: HomeAssistant, entry: RingConfigEntry) -> bool: """Set up a config entry.""" def token_updater(token: dict[str, Any]) -> None: @@ -56,7 +59,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await devices_coordinator.async_config_entry_first_refresh() await notifications_coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = RingData( + entry.runtime_data = RingData( api=ring, devices=ring.devices(), devices_coordinator=devices_coordinator, @@ -86,11 +89,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: severity=IssueSeverity.WARNING, translation_key="deprecated_service_ring_update", ) - - for info in hass.data[DOMAIN].values(): - ring_data = cast(RingData, info) - await ring_data.devices_coordinator.async_refresh() - await ring_data.notifications_coordinator.async_refresh() + for loaded_entry in hass.config_entries.async_loaded_entries(DOMAIN): + await loaded_entry.runtime_data.devices_coordinator.async_refresh() + await loaded_entry.runtime_data.notifications_coordinator.async_refresh() # register service hass.services.async_register(DOMAIN, "update", async_refresh_all) @@ -100,18 +101,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload Ring entry.""" - if not await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - return False + unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - hass.data[DOMAIN].pop(entry.entry_id) + if len(hass.config_entries.async_loaded_entries(DOMAIN)) == 1: + # This is the last loaded entry, clean up service + hass.services.async_remove(DOMAIN, "update") - if len(hass.data[DOMAIN]) != 0: - return True - - # Last entry unloaded, clean up service - hass.services.async_remove(DOMAIN, "update") - - return True + return unload_ok async def async_remove_config_entry_device( diff --git a/homeassistant/components/ring/binary_sensor.py b/homeassistant/components/ring/binary_sensor.py index 2db04cfd461..2fb557ddde0 100644 --- a/homeassistant/components/ring/binary_sensor.py +++ b/homeassistant/components/ring/binary_sensor.py @@ -14,12 +14,10 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import RingData -from .const import DOMAIN +from . import RingConfigEntry from .coordinator import RingNotificationsCoordinator from .entity import RingBaseEntity @@ -50,11 +48,11 @@ BINARY_SENSOR_TYPES: tuple[RingBinarySensorEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + entry: RingConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Ring binary sensors from a config entry.""" - ring_data: RingData = hass.data[DOMAIN][config_entry.entry_id] + ring_data = entry.runtime_data entities = [ RingBinarySensor( diff --git a/homeassistant/components/ring/button.py b/homeassistant/components/ring/button.py index c8d7d902d18..b9d5cceb373 100644 --- a/homeassistant/components/ring/button.py +++ b/homeassistant/components/ring/button.py @@ -5,12 +5,10 @@ from __future__ import annotations from ring_doorbell import RingOther from homeassistant.components.button import ButtonEntity, ButtonEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import RingData -from .const import DOMAIN +from . import RingConfigEntry from .coordinator import RingDataCoordinator from .entity import RingEntity, exception_wrap @@ -21,11 +19,11 @@ BUTTON_DESCRIPTION = ButtonEntityDescription( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + entry: RingConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Create the buttons for the Ring devices.""" - ring_data: RingData = hass.data[DOMAIN][config_entry.entry_id] + ring_data = entry.runtime_data devices_coordinator = ring_data.devices_coordinator async_add_entities( diff --git a/homeassistant/components/ring/camera.py b/homeassistant/components/ring/camera.py index df71de29089..9c66df9d89e 100644 --- a/homeassistant/components/ring/camera.py +++ b/homeassistant/components/ring/camera.py @@ -12,14 +12,12 @@ from ring_doorbell import RingDoorBell from homeassistant.components import ffmpeg from homeassistant.components.camera import Camera -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.aiohttp_client import async_aiohttp_proxy_stream from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import dt as dt_util -from . import RingData -from .const import DOMAIN +from . import RingConfigEntry from .coordinator import RingDataCoordinator from .entity import RingEntity, exception_wrap @@ -31,11 +29,11 @@ _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + entry: RingConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a Ring Door Bell and StickUp Camera.""" - ring_data: RingData = hass.data[DOMAIN][config_entry.entry_id] + ring_data = entry.runtime_data devices_coordinator = ring_data.devices_coordinator ffmpeg_manager = ffmpeg.get_ffmpeg_manager(hass) diff --git a/homeassistant/components/ring/diagnostics.py b/homeassistant/components/ring/diagnostics.py index 2e7604d9f50..cecf26a46a7 100644 --- a/homeassistant/components/ring/diagnostics.py +++ b/homeassistant/components/ring/diagnostics.py @@ -5,11 +5,9 @@ from __future__ import annotations from typing import Any from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from . import RingData -from .const import DOMAIN +from . import RingConfigEntry TO_REDACT = { "id", @@ -29,10 +27,10 @@ TO_REDACT = { async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: RingConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - ring_data: RingData = hass.data[DOMAIN][entry.entry_id] + ring_data = entry.runtime_data devices_data = ring_data.api.devices_data devices_raw = [ devices_data[device_type][device_id] diff --git a/homeassistant/components/ring/light.py b/homeassistant/components/ring/light.py index 99c4105f4e9..9e29373a3aa 100644 --- a/homeassistant/components/ring/light.py +++ b/homeassistant/components/ring/light.py @@ -8,13 +8,11 @@ from typing import Any from ring_doorbell import RingStickUpCam from homeassistant.components.light import ColorMode, LightEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.util.dt as dt_util -from . import RingData -from .const import DOMAIN +from . import RingConfigEntry from .coordinator import RingDataCoordinator from .entity import RingEntity, exception_wrap @@ -38,11 +36,11 @@ class OnOffState(StrEnum): async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + entry: RingConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Create the lights for the Ring devices.""" - ring_data: RingData = hass.data[DOMAIN][config_entry.entry_id] + ring_data = entry.runtime_data devices_coordinator = ring_data.devices_coordinator async_add_entities( diff --git a/homeassistant/components/ring/sensor.py b/homeassistant/components/ring/sensor.py index b6849e37d96..83d07dbd9b4 100644 --- a/homeassistant/components/ring/sensor.py +++ b/homeassistant/components/ring/sensor.py @@ -21,7 +21,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( PERCENTAGE, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, @@ -31,19 +30,18 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from . import RingData -from .const import DOMAIN +from . import RingConfigEntry from .coordinator import RingDataCoordinator from .entity import RingDeviceT, RingEntity async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + entry: RingConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a sensor for a Ring device.""" - ring_data: RingData = hass.data[DOMAIN][config_entry.entry_id] + ring_data = entry.runtime_data devices_coordinator = ring_data.devices_coordinator entities = [ diff --git a/homeassistant/components/ring/siren.py b/homeassistant/components/ring/siren.py index 665de07a5bb..f5730d942b8 100644 --- a/homeassistant/components/ring/siren.py +++ b/homeassistant/components/ring/siren.py @@ -6,12 +6,10 @@ from typing import Any from ring_doorbell import RingChime, RingEventKind from homeassistant.components.siren import ATTR_TONE, SirenEntity, SirenEntityFeature -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import RingData -from .const import DOMAIN +from . import RingConfigEntry from .coordinator import RingDataCoordinator from .entity import RingEntity, exception_wrap @@ -20,11 +18,11 @@ _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + entry: RingConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Create the sirens for the Ring devices.""" - ring_data: RingData = hass.data[DOMAIN][config_entry.entry_id] + ring_data = entry.runtime_data devices_coordinator = ring_data.devices_coordinator async_add_entities( diff --git a/homeassistant/components/ring/switch.py b/homeassistant/components/ring/switch.py index effb43cedbe..01d321572ac 100644 --- a/homeassistant/components/ring/switch.py +++ b/homeassistant/components/ring/switch.py @@ -7,13 +7,11 @@ from typing import Any from ring_doorbell import RingStickUpCam from homeassistant.components.switch import SwitchEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback import homeassistant.util.dt as dt_util -from . import RingData -from .const import DOMAIN +from . import RingConfigEntry from .coordinator import RingDataCoordinator from .entity import RingEntity, exception_wrap @@ -30,11 +28,11 @@ SKIP_UPDATES_DELAY = timedelta(seconds=5) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + entry: RingConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Create the switches for the Ring devices.""" - ring_data: RingData = hass.data[DOMAIN][config_entry.entry_id] + ring_data = entry.runtime_data devices_coordinator = ring_data.devices_coordinator async_add_entities( diff --git a/tests/components/ring/test_init.py b/tests/components/ring/test_init.py index 4ab3e1bd366..97392e0c93b 100644 --- a/tests/components/ring/test_init.py +++ b/tests/components/ring/test_init.py @@ -186,7 +186,7 @@ async def test_error_on_global_update( assert log_msg in caplog.text - assert mock_config_entry.entry_id in hass.data[DOMAIN] + assert hass.config_entries.async_get_entry(mock_config_entry.entry_id) @pytest.mark.parametrize( @@ -226,7 +226,7 @@ async def test_error_on_device_update( await hass.async_block_till_done(wait_background_tasks=True) assert log_msg in caplog.text - assert mock_config_entry.entry_id in hass.data[DOMAIN] + assert hass.config_entries.async_get_entry(mock_config_entry.entry_id) async def test_issue_deprecated_service_ring_update( From fc24843274ae6086b85662c379774f473068310c Mon Sep 17 00:00:00 2001 From: Artur Pragacz <49985303+arturpragacz@users.noreply.github.com> Date: Tue, 3 Sep 2024 12:43:31 +0200 Subject: [PATCH 1356/1635] Fix Onkyo action select_hdmi_output (#125115) * Fix Onkyo service select_hdmi_output * Move Hasskey directly under Onkyo domain --- .../components/onkyo/media_player.py | 72 +++++++++++-------- 1 file changed, 43 insertions(+), 29 deletions(-) diff --git a/homeassistant/components/onkyo/media_player.py b/homeassistant/components/onkyo/media_player.py index acc0459e258..8d8f4d3bfd5 100644 --- a/homeassistant/components/onkyo/media_player.py +++ b/homeassistant/components/onkyo/media_player.py @@ -11,7 +11,7 @@ import pyeiscp import voluptuous as vol from homeassistant.components.media_player import ( - DOMAIN, + DOMAIN as MEDIA_PLAYER_DOMAIN, PLATFORM_SCHEMA as MEDIA_PLAYER_PLATFORM_SCHEMA, MediaPlayerEntity, MediaPlayerEntityFeature, @@ -28,9 +28,14 @@ from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType +from homeassistant.util.hass_dict import HassKey _LOGGER = logging.getLogger(__name__) +DOMAIN = "onkyo" + +DATA_MP_ENTITIES: HassKey[list[dict[str, OnkyoMediaPlayer]]] = HassKey(DOMAIN) + CONF_SOURCES = "sources" CONF_MAX_VOLUME = "max_volume" CONF_RECEIVER_MAX_VOLUME = "receiver_max_volume" @@ -148,6 +153,33 @@ class ReceiverInfo: identifier: str +async def async_register_services(hass: HomeAssistant) -> None: + """Register Onkyo services.""" + + async def async_service_handle(service: ServiceCall) -> None: + """Handle for services.""" + entity_ids = service.data[ATTR_ENTITY_ID] + + targets: list[OnkyoMediaPlayer] = [] + for receiver_entities in hass.data[DATA_MP_ENTITIES]: + targets.extend( + entity + for entity in receiver_entities.values() + if entity.entity_id in entity_ids + ) + + for target in targets: + if service.service == SERVICE_SELECT_HDMI_OUTPUT: + await target.async_select_output(service.data[ATTR_HDMI_OUTPUT]) + + hass.services.async_register( + MEDIA_PLAYER_DOMAIN, + SERVICE_SELECT_HDMI_OUTPUT, + async_service_handle, + schema=ONKYO_SELECT_OUTPUT_SCHEMA, + ) + + async def async_setup_platform( hass: HomeAssistant, config: ConfigType, @@ -155,29 +187,10 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Onkyo platform.""" + await async_register_services(hass) + receivers: dict[str, pyeiscp.Connection] = {} # indexed by host - entities: dict[str, dict[str, OnkyoMediaPlayer]] = {} # indexed by host and zone - - async def async_service_handle(service: ServiceCall) -> None: - """Handle for services.""" - entity_ids = service.data[ATTR_ENTITY_ID] - targets = [ - entity - for h in entities.values() - for entity in h.values() - if entity.entity_id in entity_ids - ] - - for target in targets: - if service.service == SERVICE_SELECT_HDMI_OUTPUT: - await target.async_select_output(service.data[ATTR_HDMI_OUTPUT]) - - hass.services.async_register( - DOMAIN, - SERVICE_SELECT_HDMI_OUTPUT, - async_service_handle, - schema=ONKYO_SELECT_OUTPUT_SCHEMA, - ) + all_entities = hass.data.setdefault(DATA_MP_ENTITIES, []) host = config.get(CONF_HOST) name = config.get(CONF_NAME) @@ -188,6 +201,9 @@ async def async_setup_platform( async def async_setup_receiver( info: ReceiverInfo, discovered: bool, name: str | None ) -> None: + entities: dict[str, OnkyoMediaPlayer] = {} + all_entities.append(entities) + @callback def async_onkyo_update_callback( message: tuple[str, str, Any], origin: str @@ -199,7 +215,7 @@ async def async_setup_platform( ) zone, _, value = message - entity = entities[origin].get(zone) + entity = entities.get(zone) if entity is not None: if entity.enabled: entity.process_update(message) @@ -210,7 +226,7 @@ async def async_setup_platform( zone_entity = OnkyoMediaPlayer( receiver, sources, zone, max_volume, receiver_max_volume ) - entities[origin][zone] = zone_entity + entities[zone] = zone_entity async_add_entities([zone_entity]) @callback @@ -221,7 +237,7 @@ async def async_setup_platform( "Receiver (re)connected: %s (%s)", receiver.name, receiver.host ) - for entity in entities[origin].values(): + for entity in entities.values(): entity.backfill_state() _LOGGER.debug("Creating receiver: %s (%s)", info.model_name, info.host) @@ -237,9 +253,7 @@ async def async_setup_platform( receiver.name = name or info.model_name receiver.discovered = discovered - # Store the receiver object and create a dictionary to store its entities. receivers[receiver.host] = receiver - entities[receiver.host] = {} # Discover what zones are available for the receiver by querying the power. # If we get a response for the specific zone, it means it is available. @@ -251,7 +265,7 @@ async def async_setup_platform( main_entity = OnkyoMediaPlayer( receiver, sources, "main", max_volume, receiver_max_volume ) - entities[receiver.host]["main"] = main_entity + entities["main"] = main_entity async_add_entities([main_entity]) if host is not None: From f34b449f61a089d45a920c81040834db69ede97f Mon Sep 17 00:00:00 2001 From: Christopher Fenner <9592452+CFenner@users.noreply.github.com> Date: Tue, 3 Sep 2024 12:50:05 +0200 Subject: [PATCH 1357/1635] Correct device serial for ViCare integration (#125125) * expose correct serial * adapt inits * adjust _build_entities * adapt inits * add serial data point * update snapshot * apply suggestions * apply suggestions --- .../components/vicare/binary_sensor.py | 78 +++++++----------- homeassistant/components/vicare/button.py | 6 +- homeassistant/components/vicare/climate.py | 2 +- homeassistant/components/vicare/entity.py | 16 ++-- homeassistant/components/vicare/fan.py | 2 +- homeassistant/components/vicare/number.py | 43 +++++----- homeassistant/components/vicare/sensor.py | 79 +++++++------------ .../components/vicare/water_heater.py | 2 +- .../vicare/fixtures/Vitodens300W.json | 17 ++++ .../vicare/snapshots/test_diagnostics.ambr | 18 +++++ 10 files changed, 128 insertions(+), 135 deletions(-) diff --git a/homeassistant/components/vicare/binary_sensor.py b/homeassistant/components/vicare/binary_sensor.py index 2c114d15b85..7fe248fa266 100644 --- a/homeassistant/components/vicare/binary_sensor.py +++ b/homeassistant/components/vicare/binary_sensor.py @@ -112,61 +112,36 @@ def _build_entities( entities: list[ViCareBinarySensor] = [] for device in device_list: - entities.extend(_build_entities_for_device(device.api, device.config)) + # add device entities entities.extend( - _build_entities_for_component( - get_circuits(device.api), device.config, CIRCUIT_SENSORS + ViCareBinarySensor( + description, + device.config, + device.api, ) + for description in GLOBAL_SENSORS + if is_supported(description.key, description, device.api) ) - entities.extend( - _build_entities_for_component( - get_burners(device.api), device.config, BURNER_SENSORS + # add component entities + for component_list, entity_description_list in ( + (get_circuits(device.api), CIRCUIT_SENSORS), + (get_burners(device.api), BURNER_SENSORS), + (get_compressors(device.api), COMPRESSOR_SENSORS), + ): + entities.extend( + ViCareBinarySensor( + description, + device.config, + device.api, + component, + ) + for component in component_list + for description in entity_description_list + if is_supported(description.key, description, component) ) - ) - entities.extend( - _build_entities_for_component( - get_compressors(device.api), device.config, COMPRESSOR_SENSORS - ) - ) return entities -def _build_entities_for_device( - device: PyViCareDevice, - device_config: PyViCareDeviceConfig, -) -> list[ViCareBinarySensor]: - """Create device specific ViCare binary sensor entities.""" - - return [ - ViCareBinarySensor( - device_config, - device, - description, - ) - for description in GLOBAL_SENSORS - if is_supported(description.key, description, device) - ] - - -def _build_entities_for_component( - components: list[PyViCareHeatingDeviceComponent], - device_config: PyViCareDeviceConfig, - entity_descriptions: tuple[ViCareBinarySensorEntityDescription, ...], -) -> list[ViCareBinarySensor]: - """Create component specific ViCare binary sensor entities.""" - - return [ - ViCareBinarySensor( - device_config, - component, - description, - ) - for component in components - for description in entity_descriptions - if is_supported(description.key, description, component) - ] - - async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, @@ -190,12 +165,13 @@ class ViCareBinarySensor(ViCareEntity, BinarySensorEntity): def __init__( self, - device_config: PyViCareDeviceConfig, - api: PyViCareDevice | PyViCareHeatingDeviceComponent, description: ViCareBinarySensorEntityDescription, + device_config: PyViCareDeviceConfig, + device: PyViCareDevice, + component: PyViCareHeatingDeviceComponent | None = None, ) -> None: """Initialize the sensor.""" - super().__init__(device_config, api, description.key) + super().__init__(description.key, device_config, device, component) self.entity_description = description @property diff --git a/homeassistant/components/vicare/button.py b/homeassistant/components/vicare/button.py index f880c39ddea..51a763c1fcc 100644 --- a/homeassistant/components/vicare/button.py +++ b/homeassistant/components/vicare/button.py @@ -54,9 +54,9 @@ def _build_entities( return [ ViCareButton( + description, device.config, device.api, - description, ) for device in device_list for description in BUTTON_DESCRIPTIONS @@ -87,12 +87,12 @@ class ViCareButton(ViCareEntity, ButtonEntity): def __init__( self, + description: ViCareButtonEntityDescription, device_config: PyViCareDeviceConfig, device: PyViCareDevice, - description: ViCareButtonEntityDescription, ) -> None: """Initialize the button.""" - super().__init__(device_config, device, description.key) + super().__init__(description.key, device_config, device) self.entity_description = description def press(self) -> None: diff --git a/homeassistant/components/vicare/climate.py b/homeassistant/components/vicare/climate.py index df1cde2abca..4968e565d0b 100644 --- a/homeassistant/components/vicare/climate.py +++ b/homeassistant/components/vicare/climate.py @@ -148,7 +148,7 @@ class ViCareClimate(ViCareEntity, ClimateEntity): circuit: PyViCareHeatingCircuit, ) -> None: """Initialize the climate device.""" - super().__init__(device_config, device, circuit.id) + super().__init__(circuit.id, device_config, device) self._circuit = circuit self._attributes: dict[str, Any] = {} self._attributes["vicare_programs"] = self._circuit.getPrograms() diff --git a/homeassistant/components/vicare/entity.py b/homeassistant/components/vicare/entity.py index 1bb2993cd3a..eef114b4039 100644 --- a/homeassistant/components/vicare/entity.py +++ b/homeassistant/components/vicare/entity.py @@ -2,6 +2,9 @@ from PyViCare.PyViCareDevice import Device as PyViCareDevice from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig +from PyViCare.PyViCareHeatingDevice import ( + HeatingDeviceWithComponent as PyViCareHeatingDeviceComponent, +) from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity import Entity @@ -16,21 +19,24 @@ class ViCareEntity(Entity): def __init__( self, + unique_id_suffix: str, device_config: PyViCareDeviceConfig, device: PyViCareDevice, - unique_id_suffix: str, + component: PyViCareHeatingDeviceComponent | None = None, ) -> None: """Initialize the entity.""" - self._api = device + self._api: PyViCareDevice | PyViCareHeatingDeviceComponent = ( + component if component else device + ) self._attr_unique_id = f"{device_config.getConfig().serial}-{unique_id_suffix}" # valid for compressors, circuits, burners (HeatingDeviceWithComponent) - if hasattr(device, "id"): - self._attr_unique_id += f"-{device.id}" + if component: + self._attr_unique_id += f"-{component.id}" self._attr_device_info = DeviceInfo( identifiers={(DOMAIN, device_config.getConfig().serial)}, - serial_number=device_config.getConfig().serial, + serial_number=device.getSerial(), name=device_config.getModel(), manufacturer="Viessmann", model=device_config.getModel(), diff --git a/homeassistant/components/vicare/fan.py b/homeassistant/components/vicare/fan.py index 5b9dd2787e8..d7dbd037b56 100644 --- a/homeassistant/components/vicare/fan.py +++ b/homeassistant/components/vicare/fan.py @@ -129,7 +129,7 @@ class ViCareFan(ViCareEntity, FanEntity): device: PyViCareDevice, ) -> None: """Initialize the fan entity.""" - super().__init__(device_config, device, self._attr_translation_key) + super().__init__(self._attr_translation_key, device_config, device) def update(self) -> None: """Update state of fan.""" diff --git a/homeassistant/components/vicare/number.py b/homeassistant/components/vicare/number.py index a6bb849ce62..ea64fb174e8 100644 --- a/homeassistant/components/vicare/number.py +++ b/homeassistant/components/vicare/number.py @@ -245,30 +245,30 @@ def _build_entities( ) -> list[ViCareNumber]: """Create ViCare number entities for a device.""" - entities: list[ViCareNumber] = [ - ViCareNumber( - device.config, - device.api, - description, - ) - for device in device_list - for description in DEVICE_ENTITY_DESCRIPTIONS - if is_supported(description.key, description, device.api) - ] - - entities.extend( - [ + entities: list[ViCareNumber] = [] + for device in device_list: + # add device entities + entities.extend( ViCareNumber( - device.config, - circuit, description, + device.config, + device.api, + ) + for description in DEVICE_ENTITY_DESCRIPTIONS + if is_supported(description.key, description, device.api) + ) + # add component entities + entities.extend( + ViCareNumber( + description, + device.config, + device.api, + circuit, ) - for device in device_list for circuit in get_circuits(device.api) for description in CIRCUIT_ENTITY_DESCRIPTIONS if is_supported(description.key, description, circuit) - ] - ) + ) return entities @@ -295,12 +295,13 @@ class ViCareNumber(ViCareEntity, NumberEntity): def __init__( self, - device_config: PyViCareDeviceConfig, - api: PyViCareDevice | PyViCareHeatingDeviceComponent, description: ViCareNumberEntityDescription, + device_config: PyViCareDeviceConfig, + device: PyViCareDevice, + component: PyViCareHeatingDeviceComponent | None = None, ) -> None: """Initialize the number.""" - super().__init__(device_config, api, description.key) + super().__init__(description.key, device_config, device, component) self.entity_description = description @property diff --git a/homeassistant/components/vicare/sensor.py b/homeassistant/components/vicare/sensor.py index 4ac3c504d9a..bdcb6dfa3aa 100644 --- a/homeassistant/components/vicare/sensor.py +++ b/homeassistant/components/vicare/sensor.py @@ -747,7 +747,6 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ) - CIRCUIT_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ViCareSensorEntityDescription( key="supply_temperature", @@ -865,61 +864,36 @@ def _build_entities( entities: list[ViCareSensor] = [] for device in device_list: - entities.extend(_build_entities_for_device(device.api, device.config)) + # add device entities entities.extend( - _build_entities_for_component( - get_circuits(device.api), device.config, CIRCUIT_SENSORS + ViCareSensor( + description, + device.config, + device.api, ) + for description in GLOBAL_SENSORS + if is_supported(description.key, description, device.api) ) - entities.extend( - _build_entities_for_component( - get_burners(device.api), device.config, BURNER_SENSORS + # add component entities + for component_list, entity_description_list in ( + (get_circuits(device.api), CIRCUIT_SENSORS), + (get_burners(device.api), BURNER_SENSORS), + (get_compressors(device.api), COMPRESSOR_SENSORS), + ): + entities.extend( + ViCareSensor( + description, + device.config, + device.api, + component, + ) + for component in component_list + for description in entity_description_list + if is_supported(description.key, description, component) ) - ) - entities.extend( - _build_entities_for_component( - get_compressors(device.api), device.config, COMPRESSOR_SENSORS - ) - ) return entities -def _build_entities_for_device( - device: PyViCareDevice, - device_config: PyViCareDeviceConfig, -) -> list[ViCareSensor]: - """Create device specific ViCare sensor entities.""" - - return [ - ViCareSensor( - device_config, - device, - description, - ) - for description in GLOBAL_SENSORS - if is_supported(description.key, description, device) - ] - - -def _build_entities_for_component( - components: list[PyViCareHeatingDeviceComponent], - device_config: PyViCareDeviceConfig, - entity_descriptions: tuple[ViCareSensorEntityDescription, ...], -) -> list[ViCareSensor]: - """Create component specific ViCare sensor entities.""" - - return [ - ViCareSensor( - device_config, - component, - description, - ) - for component in components - for description in entity_descriptions - if is_supported(description.key, description, component) - ] - - async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, @@ -945,12 +919,13 @@ class ViCareSensor(ViCareEntity, SensorEntity): def __init__( self, - device_config: PyViCareDeviceConfig, - api: PyViCareDevice | PyViCareHeatingDeviceComponent, description: ViCareSensorEntityDescription, + device_config: PyViCareDeviceConfig, + device: PyViCareDevice, + component: PyViCareHeatingDeviceComponent | None = None, ) -> None: """Initialize the sensor.""" - super().__init__(device_config, api, description.key) + super().__init__(description.key, device_config, device, component) self.entity_description = description @property diff --git a/homeassistant/components/vicare/water_heater.py b/homeassistant/components/vicare/water_heater.py index c76c6ea81aa..621d2f2a09b 100644 --- a/homeassistant/components/vicare/water_heater.py +++ b/homeassistant/components/vicare/water_heater.py @@ -113,7 +113,7 @@ class ViCareWater(ViCareEntity, WaterHeaterEntity): circuit: PyViCareHeatingCircuit, ) -> None: """Initialize the DHW water_heater device.""" - super().__init__(device_config, device, circuit.id) + super().__init__(circuit.id, device_config, device) self._circuit = circuit self._attributes: dict[str, Any] = {} diff --git a/tests/components/vicare/fixtures/Vitodens300W.json b/tests/components/vicare/fixtures/Vitodens300W.json index 4cf67ebe0f7..bb86bda981b 100644 --- a/tests/components/vicare/fixtures/Vitodens300W.json +++ b/tests/components/vicare/fixtures/Vitodens300W.json @@ -1,5 +1,22 @@ { "data": [ + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "device.serial", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "value": { + "type": "string", + "value": "################" + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/device.serial" + }, { "properties": {}, "commands": {}, diff --git a/tests/components/vicare/snapshots/test_diagnostics.ambr b/tests/components/vicare/snapshots/test_diagnostics.ambr index dfc29d46cc2..430b2de35ad 100644 --- a/tests/components/vicare/snapshots/test_diagnostics.ambr +++ b/tests/components/vicare/snapshots/test_diagnostics.ambr @@ -4,6 +4,24 @@ 'data': list([ dict({ 'data': list([ + dict({ + 'apiVersion': 1, + 'commands': dict({ + }), + 'deviceId': '0', + 'feature': 'device.serial', + 'gatewayId': '################', + 'isEnabled': True, + 'isReady': True, + 'properties': dict({ + 'value': dict({ + 'type': 'string', + 'value': '################', + }), + }), + 'timestamp': '2024-03-20T01:29:35.549Z', + 'uri': 'https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/device.serial', + }), dict({ 'apiVersion': 1, 'commands': dict({ From b9db9eeab22d7ca3b1e456e7b3ba188251d48c82 Mon Sep 17 00:00:00 2001 From: Philip Vanloo <26272906+dukeofphilberg@users.noreply.github.com> Date: Tue, 3 Sep 2024 13:34:47 +0200 Subject: [PATCH 1358/1635] Add Linkplay mTLS/HTTPS and improve logging (#124307) * Work * Implement 0.0.8 changes, fixup tests * Cleanup * Implement new playmodes, close clientsession upon ha close * Implement new playmodes, close clientsession upon ha close * Add test for zeroconf bridge failure * Bump 0.0.9 Address old comments in 113940 * Exact _async_register_default_clientsession_shutdown --- homeassistant/components/linkplay/__init__.py | 24 ++++++---- .../components/linkplay/config_flow.py | 40 ++++++++++++---- homeassistant/components/linkplay/const.py | 1 + .../components/linkplay/manifest.json | 2 +- .../components/linkplay/media_player.py | 11 +++++ homeassistant/components/linkplay/utils.py | 27 +++++++++++ requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/linkplay/conftest.py | 9 +++- tests/components/linkplay/test_config_flow.py | 48 +++++++++++++------ 10 files changed, 128 insertions(+), 38 deletions(-) diff --git a/homeassistant/components/linkplay/__init__.py b/homeassistant/components/linkplay/__init__.py index c0fe711a61b..808f2f93ce2 100644 --- a/homeassistant/components/linkplay/__init__.py +++ b/homeassistant/components/linkplay/__init__.py @@ -1,17 +1,22 @@ """Support for LinkPlay devices.""" +from dataclasses import dataclass + +from aiohttp import ClientSession from linkplay.bridge import LinkPlayBridge -from linkplay.discovery import linkplay_factory_bridge +from linkplay.discovery import linkplay_factory_httpapi_bridge +from linkplay.exceptions import LinkPlayRequestException from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import PLATFORMS +from .utils import async_get_client_session +@dataclass class LinkPlayData: """Data for LinkPlay.""" @@ -24,16 +29,17 @@ type LinkPlayConfigEntry = ConfigEntry[LinkPlayData] async def async_setup_entry(hass: HomeAssistant, entry: LinkPlayConfigEntry) -> bool: """Async setup hass config entry. Called when an entry has been setup.""" - session = async_get_clientsession(hass) - if ( - bridge := await linkplay_factory_bridge(entry.data[CONF_HOST], session) - ) is None: + session: ClientSession = await async_get_client_session(hass) + bridge: LinkPlayBridge | None = None + + try: + bridge = await linkplay_factory_httpapi_bridge(entry.data[CONF_HOST], session) + except LinkPlayRequestException as exception: raise ConfigEntryNotReady( f"Failed to connect to LinkPlay device at {entry.data[CONF_HOST]}" - ) + ) from exception - entry.runtime_data = LinkPlayData() - entry.runtime_data.bridge = bridge + entry.runtime_data = LinkPlayData(bridge=bridge) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True diff --git a/homeassistant/components/linkplay/config_flow.py b/homeassistant/components/linkplay/config_flow.py index 0f9c40d0fd4..7dfdce238ff 100644 --- a/homeassistant/components/linkplay/config_flow.py +++ b/homeassistant/components/linkplay/config_flow.py @@ -1,16 +1,22 @@ """Config flow to configure LinkPlay component.""" +import logging from typing import Any -from linkplay.discovery import linkplay_factory_bridge +from aiohttp import ClientSession +from linkplay.bridge import LinkPlayBridge +from linkplay.discovery import linkplay_factory_httpapi_bridge +from linkplay.exceptions import LinkPlayRequestException import voluptuous as vol from homeassistant.components import zeroconf from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_MODEL -from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN +from .utils import async_get_client_session + +_LOGGER = logging.getLogger(__name__) class LinkPlayConfigFlow(ConfigFlow, domain=DOMAIN): @@ -25,10 +31,15 @@ class LinkPlayConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle Zeroconf discovery.""" - session = async_get_clientsession(self.hass) - bridge = await linkplay_factory_bridge(discovery_info.host, session) + session: ClientSession = await async_get_client_session(self.hass) + bridge: LinkPlayBridge | None = None - if bridge is None: + try: + bridge = await linkplay_factory_httpapi_bridge(discovery_info.host, session) + except LinkPlayRequestException: + _LOGGER.exception( + "Failed to connect to LinkPlay device at %s", discovery_info.host + ) return self.async_abort(reason="cannot_connect") self.data[CONF_HOST] = discovery_info.host @@ -66,14 +77,26 @@ class LinkPlayConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a flow initialized by the user.""" errors: dict[str, str] = {} if user_input: - session = async_get_clientsession(self.hass) - bridge = await linkplay_factory_bridge(user_input[CONF_HOST], session) + session: ClientSession = await async_get_client_session(self.hass) + bridge: LinkPlayBridge | None = None + + try: + bridge = await linkplay_factory_httpapi_bridge( + user_input[CONF_HOST], session + ) + except LinkPlayRequestException: + _LOGGER.exception( + "Failed to connect to LinkPlay device at %s", user_input[CONF_HOST] + ) + errors["base"] = "cannot_connect" if bridge is not None: self.data[CONF_HOST] = user_input[CONF_HOST] self.data[CONF_MODEL] = bridge.device.name - await self.async_set_unique_id(bridge.device.uuid) + await self.async_set_unique_id( + bridge.device.uuid, raise_on_progress=False + ) self._abort_if_unique_id_configured( updates={CONF_HOST: self.data[CONF_HOST]} ) @@ -83,7 +106,6 @@ class LinkPlayConfigFlow(ConfigFlow, domain=DOMAIN): data={CONF_HOST: self.data[CONF_HOST]}, ) - errors["base"] = "cannot_connect" return self.async_show_form( step_id="user", data_schema=vol.Schema({vol.Required(CONF_HOST): str}), diff --git a/homeassistant/components/linkplay/const.py b/homeassistant/components/linkplay/const.py index 48ae225dd98..91a427d5eb8 100644 --- a/homeassistant/components/linkplay/const.py +++ b/homeassistant/components/linkplay/const.py @@ -4,3 +4,4 @@ from homeassistant.const import Platform DOMAIN = "linkplay" PLATFORMS = [Platform.MEDIA_PLAYER] +CONF_SESSION = "session" diff --git a/homeassistant/components/linkplay/manifest.json b/homeassistant/components/linkplay/manifest.json index 5212f3f99b8..66a719c640e 100644 --- a/homeassistant/components/linkplay/manifest.json +++ b/homeassistant/components/linkplay/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/linkplay", "integration_type": "hub", "iot_class": "local_polling", - "requirements": ["python-linkplay==0.0.8"], + "requirements": ["python-linkplay==0.0.9"], "zeroconf": ["_linkplay._tcp.local."] } diff --git a/homeassistant/components/linkplay/media_player.py b/homeassistant/components/linkplay/media_player.py index 398add235bd..0b62b4dbcee 100644 --- a/homeassistant/components/linkplay/media_player.py +++ b/homeassistant/components/linkplay/media_player.py @@ -48,6 +48,17 @@ SOURCE_MAP: dict[PlayingMode, str] = { PlayingMode.XLR: "XLR", PlayingMode.HDMI: "HDMI", PlayingMode.OPTICAL_2: "Optical 2", + PlayingMode.EXTERN_BLUETOOTH: "External Bluetooth", + PlayingMode.PHONO: "Phono", + PlayingMode.ARC: "ARC", + PlayingMode.COAXIAL_2: "Coaxial 2", + PlayingMode.TF_CARD_1: "SD Card 1", + PlayingMode.TF_CARD_2: "SD Card 2", + PlayingMode.CD: "CD", + PlayingMode.DAB: "DAB Radio", + PlayingMode.FM: "FM Radio", + PlayingMode.RCA: "RCA", + PlayingMode.UDISK: "USB", } SOURCE_MAP_INV: dict[str, PlayingMode] = {v: k for k, v in SOURCE_MAP.items()} diff --git a/homeassistant/components/linkplay/utils.py b/homeassistant/components/linkplay/utils.py index 7532c9b354a..7f15e297145 100644 --- a/homeassistant/components/linkplay/utils.py +++ b/homeassistant/components/linkplay/utils.py @@ -2,6 +2,14 @@ from typing import Final +from aiohttp import ClientSession +from linkplay.utils import async_create_unverified_client_session + +from homeassistant.const import EVENT_HOMEASSISTANT_CLOSE +from homeassistant.core import Event, HomeAssistant, callback + +from .const import CONF_SESSION, DOMAIN + MANUFACTURER_ARTSOUND: Final[str] = "ArtSound" MANUFACTURER_ARYLIC: Final[str] = "Arylic" MANUFACTURER_IEAST: Final[str] = "iEAST" @@ -44,3 +52,22 @@ def get_info_from_project(project: str) -> tuple[str, str]: return MANUFACTURER_IEAST, MODELS_IEAST_AUDIOCAST_M5 case _: return MANUFACTURER_GENERIC, MODELS_GENERIC + + +async def async_get_client_session(hass: HomeAssistant) -> ClientSession: + """Get a ClientSession that can be used with LinkPlay devices.""" + hass.data.setdefault(DOMAIN, {}) + if CONF_SESSION not in hass.data[DOMAIN]: + clientsession: ClientSession = await async_create_unverified_client_session() + + @callback + def _async_close_websession(event: Event) -> None: + """Close websession.""" + clientsession.detach() + + hass.bus.async_listen_once(EVENT_HOMEASSISTANT_CLOSE, _async_close_websession) + hass.data[DOMAIN][CONF_SESSION] = clientsession + return clientsession + + session: ClientSession = hass.data[DOMAIN][CONF_SESSION] + return session diff --git a/requirements_all.txt b/requirements_all.txt index 9bb204db562..da902149cfd 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2323,7 +2323,7 @@ python-juicenet==1.1.0 python-kasa[speedups]==0.7.2 # homeassistant.components.linkplay -python-linkplay==0.0.8 +python-linkplay==0.0.9 # homeassistant.components.lirc # python-lirc==1.2.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c675c5a0c6a..67986dd3a53 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1844,7 +1844,7 @@ python-juicenet==1.1.0 python-kasa[speedups]==0.7.2 # homeassistant.components.linkplay -python-linkplay==0.0.8 +python-linkplay==0.0.9 # homeassistant.components.matter python-matter-server==6.3.0 diff --git a/tests/components/linkplay/conftest.py b/tests/components/linkplay/conftest.py index b3d65422e08..be83dd2412d 100644 --- a/tests/components/linkplay/conftest.py +++ b/tests/components/linkplay/conftest.py @@ -3,6 +3,7 @@ from collections.abc import Generator from unittest.mock import AsyncMock, patch +from aiohttp import ClientSession from linkplay.bridge import LinkPlayBridge, LinkPlayDevice import pytest @@ -14,11 +15,15 @@ NAME = "Smart Zone 1_54B9" @pytest.fixture def mock_linkplay_factory_bridge() -> Generator[AsyncMock]: - """Mock for linkplay_factory_bridge.""" + """Mock for linkplay_factory_httpapi_bridge.""" with ( patch( - "homeassistant.components.linkplay.config_flow.linkplay_factory_bridge" + "homeassistant.components.linkplay.config_flow.async_get_client_session", + return_value=AsyncMock(spec=ClientSession), + ), + patch( + "homeassistant.components.linkplay.config_flow.linkplay_factory_httpapi_bridge", ) as factory, ): bridge = AsyncMock(spec=LinkPlayBridge) diff --git a/tests/components/linkplay/test_config_flow.py b/tests/components/linkplay/test_config_flow.py index 641f09893c2..3fd1fbea95e 100644 --- a/tests/components/linkplay/test_config_flow.py +++ b/tests/components/linkplay/test_config_flow.py @@ -3,6 +3,9 @@ from ipaddress import ip_address from unittest.mock import AsyncMock +from linkplay.exceptions import LinkPlayRequestException +import pytest + from homeassistant.components.linkplay.const import DOMAIN from homeassistant.components.zeroconf import ZeroconfServiceInfo from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF @@ -47,10 +50,9 @@ ZEROCONF_DISCOVERY_RE_ENTRY = ZeroconfServiceInfo( ) +@pytest.mark.usefixtures("mock_linkplay_factory_bridge", "mock_setup_entry") async def test_user_flow( hass: HomeAssistant, - mock_linkplay_factory_bridge: AsyncMock, - mock_setup_entry: AsyncMock, ) -> None: """Test user setup config flow.""" result = await hass.config_entries.flow.async_init( @@ -74,10 +76,9 @@ async def test_user_flow( assert result["result"].unique_id == UUID +@pytest.mark.usefixtures("mock_linkplay_factory_bridge") async def test_user_flow_re_entry( hass: HomeAssistant, - mock_linkplay_factory_bridge: AsyncMock, - mock_setup_entry: AsyncMock, ) -> None: """Test user setup config flow when an entry with the same unique id already exists.""" @@ -105,10 +106,9 @@ async def test_user_flow_re_entry( assert result["reason"] == "already_configured" +@pytest.mark.usefixtures("mock_linkplay_factory_bridge", "mock_setup_entry") async def test_zeroconf_flow( hass: HomeAssistant, - mock_linkplay_factory_bridge: AsyncMock, - mock_setup_entry: AsyncMock, ) -> None: """Test Zeroconf flow.""" result = await hass.config_entries.flow.async_init( @@ -133,10 +133,9 @@ async def test_zeroconf_flow( assert result["result"].unique_id == UUID +@pytest.mark.usefixtures("mock_linkplay_factory_bridge") async def test_zeroconf_flow_re_entry( hass: HomeAssistant, - mock_linkplay_factory_bridge: AsyncMock, - mock_setup_entry: AsyncMock, ) -> None: """Test Zeroconf flow when an entry with the same unique id already exists.""" @@ -160,16 +159,35 @@ async def test_zeroconf_flow_re_entry( assert result["reason"] == "already_configured" -async def test_flow_errors( +@pytest.mark.usefixtures("mock_setup_entry") +async def test_zeroconf_flow_errors( + hass: HomeAssistant, + mock_linkplay_factory_bridge: AsyncMock, +) -> None: + """Test flow when the device discovered through Zeroconf cannot be reached.""" + + # Temporarily make the mock_linkplay_factory_bridge throw an exception + mock_linkplay_factory_bridge.side_effect = (LinkPlayRequestException("Error"),) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZEROCONF_DISCOVERY, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" + + +@pytest.mark.usefixtures("mock_setup_entry") +async def test_user_flow_errors( hass: HomeAssistant, mock_linkplay_factory_bridge: AsyncMock, - mock_setup_entry: AsyncMock, ) -> None: """Test flow when the device cannot be reached.""" - # Temporarily store bridge in a separate variable and set factory to return None - bridge = mock_linkplay_factory_bridge.return_value - mock_linkplay_factory_bridge.return_value = None + # Temporarily make the mock_linkplay_factory_bridge throw an exception + mock_linkplay_factory_bridge.side_effect = (LinkPlayRequestException("Error"),) result = await hass.config_entries.flow.async_init( DOMAIN, @@ -188,8 +206,8 @@ async def test_flow_errors( assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} - # Make linkplay_factory_bridge return a mock bridge again - mock_linkplay_factory_bridge.return_value = bridge + # Make mock_linkplay_factory_bridge_exception no longer throw an exception + mock_linkplay_factory_bridge.side_effect = None result = await hass.config_entries.flow.async_configure( result["flow_id"], From c07a9e9d593aa57976426ed29e85bc8d00640bfc Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Tue, 3 Sep 2024 04:54:43 -0700 Subject: [PATCH 1359/1635] Add dependency on google-photos-library-api: Change the Google Photos client library to a new external package (#125040) * Change the Google Photos client library to a new external package * Remove mime type guessing * Update tests to mock out the client library and iterators * Update homeassistant/components/google_photos/media_source.py Co-authored-by: Joost Lekkerkerker --------- Co-authored-by: Joost Lekkerkerker --- .../components/google_photos/__init__.py | 13 +- homeassistant/components/google_photos/api.py | 198 ++---------------- .../components/google_photos/config_flow.py | 15 +- .../components/google_photos/exceptions.py | 7 - .../components/google_photos/manifest.json | 4 +- .../components/google_photos/media_source.py | 105 +++++----- .../components/google_photos/services.py | 44 +++- .../components/google_photos/strings.json | 6 + .../components/google_photos/types.py | 7 + requirements_all.txt | 4 +- requirements_test_all.txt | 4 +- tests/components/google_photos/conftest.py | 113 ++++++---- .../fixtures/api_not_enabled_response.json | 17 -- .../google_photos/fixtures/list_albums.json | 1 + .../google_photos/fixtures/not_dict.json | 1 - .../google_photos/test_config_flow.py | 45 ++-- .../google_photos/test_media_source.py | 58 ++--- .../components/google_photos/test_services.py | 51 ++--- 18 files changed, 281 insertions(+), 412 deletions(-) delete mode 100644 homeassistant/components/google_photos/exceptions.py create mode 100644 homeassistant/components/google_photos/types.py delete mode 100644 tests/components/google_photos/fixtures/api_not_enabled_response.json delete mode 100644 tests/components/google_photos/fixtures/not_dict.json diff --git a/homeassistant/components/google_photos/__init__.py b/homeassistant/components/google_photos/__init__.py index ee02c695f16..950995e72c0 100644 --- a/homeassistant/components/google_photos/__init__.py +++ b/homeassistant/components/google_photos/__init__.py @@ -3,17 +3,17 @@ from __future__ import annotations from aiohttp import ClientError, ClientResponseError +from google_photos_library_api.api import GooglePhotosLibraryApi -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers import config_entry_oauth2_flow +from homeassistant.helpers.aiohttp_client import async_get_clientsession from . import api from .const import DOMAIN from .services import async_register_services - -type GooglePhotosConfigEntry = ConfigEntry[api.AsyncConfigEntryAuth] +from .types import GooglePhotosConfigEntry __all__ = [ "DOMAIN", @@ -29,8 +29,9 @@ async def async_setup_entry( hass, entry ) ) - session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation) - auth = api.AsyncConfigEntryAuth(hass, session) + web_session = async_get_clientsession(hass) + oauth_session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation) + auth = api.AsyncConfigEntryAuth(web_session, oauth_session) try: await auth.async_get_access_token() except ClientResponseError as err: @@ -41,7 +42,7 @@ async def async_setup_entry( raise ConfigEntryNotReady from err except ClientError as err: raise ConfigEntryNotReady from err - entry.runtime_data = auth + entry.runtime_data = GooglePhotosLibraryApi(auth) async_register_services(hass) diff --git a/homeassistant/components/google_photos/api.py b/homeassistant/components/google_photos/api.py index 0bbb2fe162b..35878efd792 100644 --- a/homeassistant/components/google_photos/api.py +++ b/homeassistant/components/google_photos/api.py @@ -1,216 +1,44 @@ """API for Google Photos bound to Home Assistant OAuth.""" -from abc import ABC, abstractmethod -from functools import partial -import logging -from typing import Any, cast +from typing import cast -from aiohttp.client_exceptions import ClientError -from google.oauth2.credentials import Credentials -from googleapiclient.discovery import Resource, build -from googleapiclient.errors import HttpError -from googleapiclient.http import HttpRequest +import aiohttp +from google_photos_library_api import api from homeassistant.const import CONF_ACCESS_TOKEN -from homeassistant.core import HomeAssistant -from homeassistant.helpers import aiohttp_client, config_entry_oauth2_flow - -from .exceptions import GooglePhotosApiError - -_LOGGER = logging.getLogger(__name__) - -DEFAULT_PAGE_SIZE = 20 - -# Only included necessary fields to limit response sizes -GET_MEDIA_ITEM_FIELDS = ( - "id,baseUrl,mimeType,filename,mediaMetadata(width,height,photo,video)" -) -LIST_MEDIA_ITEM_FIELDS = f"nextPageToken,mediaItems({GET_MEDIA_ITEM_FIELDS})" -UPLOAD_API = "https://photoslibrary.googleapis.com/v1/uploads" -LIST_ALBUMS_FIELDS = ( - "nextPageToken,albums(id,title,coverPhotoBaseUrl,coverPhotoMediaItemId)" -) +from homeassistant.helpers import config_entry_oauth2_flow -class AuthBase(ABC): - """Base class for Google Photos authentication library. - - Provides an asyncio interface around the blocking client library. - """ - - def __init__( - self, - hass: HomeAssistant, - ) -> None: - """Initialize Google Photos auth.""" - self._hass = hass - - @abstractmethod - async def async_get_access_token(self) -> str: - """Return a valid access token.""" - - async def get_user_info(self) -> dict[str, Any]: - """Get the user profile info.""" - service = await self._get_profile_service() - cmd: HttpRequest = service.userinfo().get() - return await self._execute(cmd) - - async def get_media_item(self, media_item_id: str) -> dict[str, Any]: - """Get all MediaItem resources.""" - service = await self._get_photos_service() - cmd: HttpRequest = service.mediaItems().get( - mediaItemId=media_item_id, fields=GET_MEDIA_ITEM_FIELDS - ) - return await self._execute(cmd) - - async def list_media_items( - self, - page_size: int | None = None, - page_token: str | None = None, - album_id: str | None = None, - favorites: bool = False, - ) -> dict[str, Any]: - """Get all MediaItem resources.""" - service = await self._get_photos_service() - args: dict[str, Any] = { - "pageSize": (page_size or DEFAULT_PAGE_SIZE), - "pageToken": page_token, - } - cmd: HttpRequest - if album_id is not None or favorites: - if album_id is not None: - args["albumId"] = album_id - if favorites: - args["filters"] = {"featureFilter": {"includedFeatures": "FAVORITES"}} - cmd = service.mediaItems().search(body=args, fields=LIST_MEDIA_ITEM_FIELDS) - else: - cmd = service.mediaItems().list(**args, fields=LIST_MEDIA_ITEM_FIELDS) - return await self._execute(cmd) - - async def list_albums( - self, page_size: int | None = None, page_token: str | None = None - ) -> dict[str, Any]: - """Get all Album resources.""" - service = await self._get_photos_service() - cmd: HttpRequest = service.albums().list( - pageSize=(page_size or DEFAULT_PAGE_SIZE), - pageToken=page_token, - fields=LIST_ALBUMS_FIELDS, - ) - return await self._execute(cmd) - - async def upload_content(self, content: bytes, mime_type: str) -> str: - """Upload media content to the API and return an upload token.""" - token = await self.async_get_access_token() - session = aiohttp_client.async_get_clientsession(self._hass) - try: - result = await session.post( - UPLOAD_API, headers=_upload_headers(token, mime_type), data=content - ) - result.raise_for_status() - return await result.text() - except ClientError as err: - raise GooglePhotosApiError(f"Failed to upload content: {err}") from err - - async def create_media_items(self, upload_tokens: list[str]) -> list[str]: - """Create a batch of media items and return the ids.""" - service = await self._get_photos_service() - cmd: HttpRequest = service.mediaItems().batchCreate( - body={ - "newMediaItems": [ - { - "simpleMediaItem": { - "uploadToken": upload_token, - } - for upload_token in upload_tokens - } - ] - } - ) - result = await self._execute(cmd) - return [ - media_item["mediaItem"]["id"] - for media_item in result["newMediaItemResults"] - ] - - async def _get_photos_service(self) -> Resource: - """Get current photos library API resource.""" - token = await self.async_get_access_token() - return await self._hass.async_add_executor_job( - partial( - build, - "photoslibrary", - "v1", - credentials=Credentials(token=token), # type: ignore[no-untyped-call] - static_discovery=False, - ) - ) - - async def _get_profile_service(self) -> Resource: - """Get current profile service API resource.""" - token = await self.async_get_access_token() - return await self._hass.async_add_executor_job( - partial(build, "oauth2", "v2", credentials=Credentials(token=token)) # type: ignore[no-untyped-call] - ) - - async def _execute(self, request: HttpRequest) -> dict[str, Any]: - try: - result = await self._hass.async_add_executor_job(request.execute) - except HttpError as err: - raise GooglePhotosApiError( - f"Google Photos API responded with error ({err.status_code}): {err.reason}" - ) from err - if not isinstance(result, dict): - raise GooglePhotosApiError( - f"Google Photos API replied with unexpected response: {result}" - ) - if error := result.get("error"): - message = error.get("message", "Unknown Error") - raise GooglePhotosApiError(f"Google Photos API response: {message}") - return cast(dict[str, Any], result) - - -class AsyncConfigEntryAuth(AuthBase): +class AsyncConfigEntryAuth(api.AbstractAuth): """Provide Google Photos authentication tied to an OAuth2 based config entry.""" def __init__( self, - hass: HomeAssistant, + websession: aiohttp.ClientSession, oauth_session: config_entry_oauth2_flow.OAuth2Session, ) -> None: """Initialize AsyncConfigEntryAuth.""" - super().__init__(hass) - self._oauth_session = oauth_session + super().__init__(websession) + self._session = oauth_session async def async_get_access_token(self) -> str: """Return a valid access token.""" - if not self._oauth_session.valid_token: - await self._oauth_session.async_ensure_token_valid() - return cast(str, self._oauth_session.token[CONF_ACCESS_TOKEN]) + await self._session.async_ensure_token_valid() + return cast(str, self._session.token[CONF_ACCESS_TOKEN]) -class AsyncConfigFlowAuth(AuthBase): +class AsyncConfigFlowAuth(api.AbstractAuth): """An API client used during the config flow with a fixed token.""" def __init__( self, - hass: HomeAssistant, + websession: aiohttp.ClientSession, token: str, ) -> None: """Initialize ConfigFlowAuth.""" - super().__init__(hass) + super().__init__(websession) self._token = token async def async_get_access_token(self) -> str: """Return a valid access token.""" return self._token - - -def _upload_headers(token: str, mime_type: str) -> dict[str, Any]: - """Create the upload headers.""" - return { - "Authorization": f"Bearer {token}", - "Content-Type": "application/octet-stream", - "X-Goog-Upload-Content-Type": mime_type, - "X-Goog-Upload-Protocol": "raw", - } diff --git a/homeassistant/components/google_photos/config_flow.py b/homeassistant/components/google_photos/config_flow.py index e5378f67ffd..6b025cac6be 100644 --- a/homeassistant/components/google_photos/config_flow.py +++ b/homeassistant/components/google_photos/config_flow.py @@ -4,13 +4,15 @@ from collections.abc import Mapping import logging from typing import Any +from google_photos_library_api.api import GooglePhotosLibraryApi +from google_photos_library_api.exceptions import GooglePhotosApiError + from homeassistant.config_entries import ConfigFlowResult from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN -from homeassistant.helpers import config_entry_oauth2_flow +from homeassistant.helpers import aiohttp_client, config_entry_oauth2_flow from . import GooglePhotosConfigEntry, api from .const import DOMAIN, OAUTH2_SCOPES -from .exceptions import GooglePhotosApiError class OAuth2FlowHandler( @@ -39,7 +41,10 @@ class OAuth2FlowHandler( async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResult: """Create an entry for the flow.""" - client = api.AsyncConfigFlowAuth(self.hass, data[CONF_TOKEN][CONF_ACCESS_TOKEN]) + session = aiohttp_client.async_get_clientsession(self.hass) + auth = api.AsyncConfigFlowAuth(session, data[CONF_TOKEN][CONF_ACCESS_TOKEN]) + client = GooglePhotosLibraryApi(auth) + try: user_resource_info = await client.get_user_info() await client.list_media_items(page_size=1) @@ -51,7 +56,7 @@ class OAuth2FlowHandler( except Exception: self.logger.exception("Unknown error occurred") return self.async_abort(reason="unknown") - user_id = user_resource_info["id"] + user_id = user_resource_info.id if self.reauth_entry: if self.reauth_entry.unique_id == user_id: @@ -62,7 +67,7 @@ class OAuth2FlowHandler( await self.async_set_unique_id(user_id) self._abort_if_unique_id_configured() - return self.async_create_entry(title=user_resource_info["name"], data=data) + return self.async_create_entry(title=user_resource_info.name, data=data) async def async_step_reauth( self, entry_data: Mapping[str, Any] diff --git a/homeassistant/components/google_photos/exceptions.py b/homeassistant/components/google_photos/exceptions.py deleted file mode 100644 index b1a40688677..00000000000 --- a/homeassistant/components/google_photos/exceptions.py +++ /dev/null @@ -1,7 +0,0 @@ -"""Exceptions for Google Photos api calls.""" - -from homeassistant.exceptions import HomeAssistantError - - -class GooglePhotosApiError(HomeAssistantError): - """Error talking to the Google Photos API.""" diff --git a/homeassistant/components/google_photos/manifest.json b/homeassistant/components/google_photos/manifest.json index 3fefb6cf610..5ff37135f9a 100644 --- a/homeassistant/components/google_photos/manifest.json +++ b/homeassistant/components/google_photos/manifest.json @@ -6,6 +6,6 @@ "dependencies": ["application_credentials"], "documentation": "https://www.home-assistant.io/integrations/google_photos", "iot_class": "cloud_polling", - "loggers": ["googleapiclient"], - "requirements": ["google-api-python-client==2.71.0"] + "loggers": ["google_photos_library_api"], + "requirements": ["google-photos-library-api==0.8.0"] } diff --git a/homeassistant/components/google_photos/media_source.py b/homeassistant/components/google_photos/media_source.py index a709dd66a0a..63d66d5a82b 100644 --- a/homeassistant/components/google_photos/media_source.py +++ b/homeassistant/components/google_photos/media_source.py @@ -5,6 +5,9 @@ from enum import Enum, StrEnum import logging from typing import Any, Self, cast +from google_photos_library_api.exceptions import GooglePhotosApiError +from google_photos_library_api.model import Album, MediaItem + from homeassistant.components.media_player import MediaClass, MediaType from homeassistant.components.media_source import ( BrowseError, @@ -17,17 +20,12 @@ from homeassistant.core import HomeAssistant from . import GooglePhotosConfigEntry from .const import DOMAIN, READ_SCOPES -from .exceptions import GooglePhotosApiError _LOGGER = logging.getLogger(__name__) -# Media Sources do not support paging, so we only show a subset of recent -# photos when displaying the users library. We fetch a minimum of 50 photos -# unless we run out, but in pages of 100 at a time given sometimes responses -# may only contain a handful of items Fetches at least 50 photos. -MAX_RECENT_PHOTOS = 50 -MAX_ALBUMS = 50 -PAGE_SIZE = 100 +MAX_RECENT_PHOTOS = 100 +MEDIA_ITEMS_PAGE_SIZE = 100 +ALBUM_PAGE_SIZE = 50 THUMBNAIL_SIZE = 256 LARGE_IMAGE_SIZE = 2160 @@ -158,14 +156,15 @@ class GooglePhotosMediaSource(MediaSource): entry = self._async_config_entry(identifier.config_entry_id) client = entry.runtime_data media_item = await client.get_media_item(media_item_id=identifier.media_id) - is_video = media_item["mediaMetadata"].get("video") is not None + if not media_item.mime_type: + raise BrowseError("Could not determine mime type of media item") + if media_item.media_metadata and (media_item.media_metadata.video is not None): + url = _video_url(media_item) + else: + url = _media_url(media_item, LARGE_IMAGE_SIZE) return PlayMedia( - url=( - _video_url(media_item) - if is_video - else _media_url(media_item, LARGE_IMAGE_SIZE) - ), - mime_type=media_item["mimeType"], + url=url, + mime_type=media_item.mime_type, ) async def async_browse_media(self, item: MediaSourceItem) -> BrowseMediaSource: @@ -199,7 +198,6 @@ class GooglePhotosMediaSource(MediaSource): source = _build_account(entry, identifier) if identifier.id_type is None: - result = await client.list_albums(page_size=MAX_ALBUMS) source.children = [ _build_album( special_album.value.title, @@ -208,17 +206,27 @@ class GooglePhotosMediaSource(MediaSource): ), ) for special_album in SpecialAlbum - ] + [ + ] + albums: list[Album] = [] + try: + async for album_result in await client.list_albums( + page_size=ALBUM_PAGE_SIZE + ): + albums.extend(album_result.albums) + except GooglePhotosApiError as err: + raise BrowseError(f"Error listing albums: {err}") from err + + source.children.extend( _build_album( - album["title"], + album.title, PhotosIdentifier.album( identifier.config_entry_id, - album["id"], + album.id, ), _cover_photo_url(album, THUMBNAIL_SIZE), ) - for album in result["albums"] - ] + for album in albums + ) return source if ( @@ -233,28 +241,24 @@ class GooglePhotosMediaSource(MediaSource): else: list_args = {"album_id": identifier.media_id} - media_items: list[dict[str, Any]] = [] - page_token: str | None = None - while ( - not special_album - or (max_photos := special_album.value.max_photos) is None - or len(media_items) < max_photos - ): - try: - result = await client.list_media_items( - **list_args, page_size=PAGE_SIZE, page_token=page_token - ) - except GooglePhotosApiError as err: - raise BrowseError(f"Error listing media items: {err}") from err - media_items.extend(result["mediaItems"]) - page_token = result.get("nextPageToken") - if page_token is None: - break + media_items: list[MediaItem] = [] + try: + async for media_item_result in await client.list_media_items( + **list_args, page_size=MEDIA_ITEMS_PAGE_SIZE + ): + media_items.extend(media_item_result.media_items) + if ( + special_album + and (max_photos := special_album.value.max_photos) + and len(media_items) > max_photos + ): + break + except GooglePhotosApiError as err: + raise BrowseError(f"Error listing media items: {err}") from err - # Render the grid of media item results source.children = [ _build_media_item( - PhotosIdentifier.photo(identifier.config_entry_id, media_item["id"]), + PhotosIdentifier.photo(identifier.config_entry_id, media_item.id), media_item, ) for media_item in media_items @@ -315,38 +319,41 @@ def _build_album( def _build_media_item( - identifier: PhotosIdentifier, media_item: dict[str, Any] + identifier: PhotosIdentifier, + media_item: MediaItem, ) -> BrowseMediaSource: """Build the node for an individual photo or video.""" - is_video = media_item["mediaMetadata"].get("video") is not None + is_video = media_item.media_metadata and ( + media_item.media_metadata.video is not None + ) return BrowseMediaSource( domain=DOMAIN, identifier=identifier.as_string(), media_class=MediaClass.IMAGE if not is_video else MediaClass.VIDEO, media_content_type=MediaType.IMAGE if not is_video else MediaType.VIDEO, - title=media_item["filename"], + title=media_item.filename, can_play=is_video, can_expand=False, thumbnail=_media_url(media_item, THUMBNAIL_SIZE), ) -def _media_url(media_item: dict[str, Any], max_size: int) -> str: +def _media_url(media_item: MediaItem, max_size: int) -> str: """Return a media item url with the specified max thumbnail size on the longest edge. See https://developers.google.com/photos/library/guides/access-media-items#base-urls """ - return f"{media_item["baseUrl"]}=h{max_size}" + return f"{media_item.base_url}=h{max_size}" -def _video_url(media_item: dict[str, Any]) -> str: +def _video_url(media_item: MediaItem) -> str: """Return a video url for the item. See https://developers.google.com/photos/library/guides/access-media-items#base-urls """ - return f"{media_item["baseUrl"]}=dv" + return f"{media_item.base_url}=dv" -def _cover_photo_url(album: dict[str, Any], max_size: int) -> str: +def _cover_photo_url(album: Album, max_size: int) -> str: """Return a media item url for the cover photo of the album.""" - return f"{album["coverPhotoBaseUrl"]}=h{max_size}" + return f"{album.cover_photo_base_url}=h{max_size}" diff --git a/homeassistant/components/google_photos/services.py b/homeassistant/components/google_photos/services.py index 77015d5c700..66aa61e23a4 100644 --- a/homeassistant/components/google_photos/services.py +++ b/homeassistant/components/google_photos/services.py @@ -6,9 +6,10 @@ import asyncio import mimetypes from pathlib import Path +from google_photos_library_api.exceptions import GooglePhotosApiError +from google_photos_library_api.model import NewMediaItem, SimpleMediaItem import voluptuous as vol -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_FILENAME from homeassistant.core import ( HomeAssistant, @@ -19,14 +20,8 @@ from homeassistant.core import ( from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import config_validation as cv -from . import api from .const import DOMAIN, UPLOAD_SCOPE - -type GooglePhotosConfigEntry = ConfigEntry[api.AsyncConfigEntryAuth] - -__all__ = [ - "DOMAIN", -] +from .types import GooglePhotosConfigEntry CONF_CONFIG_ENTRY_ID = "config_entry_id" @@ -98,11 +93,38 @@ def async_register_services(hass: HomeAssistant) -> None: ) for mime_type, content in file_results: upload_tasks.append(client_api.upload_content(content, mime_type)) - upload_tokens = await asyncio.gather(*upload_tasks) - media_ids = await client_api.create_media_items(upload_tokens) + try: + upload_results = await asyncio.gather(*upload_tasks) + except GooglePhotosApiError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="upload_error", + translation_placeholders={"message": str(err)}, + ) from err + try: + upload_result = await client_api.create_media_items( + [ + NewMediaItem( + SimpleMediaItem(upload_token=upload_result.upload_token) + ) + for upload_result in upload_results + ] + ) + except GooglePhotosApiError as err: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="api_error", + translation_placeholders={"message": str(err)}, + ) from err if call.return_response: return { - "media_items": [{"media_item_id": media_id for media_id in media_ids}] + "media_items": [ + { + "media_item_id": item_result.media_item.id + for item_result in upload_result.new_media_item_results + if item_result.media_item and item_result.media_item.id + } + ] } return None diff --git a/homeassistant/components/google_photos/strings.json b/homeassistant/components/google_photos/strings.json index 9e88429124e..bf2809f896f 100644 --- a/homeassistant/components/google_photos/strings.json +++ b/homeassistant/components/google_photos/strings.json @@ -45,6 +45,12 @@ }, "missing_upload_permission": { "message": "Home Assistnt was not granted permission to upload to Google Photos" + }, + "upload_error": { + "message": "Failed to upload content: {message}" + }, + "api_error": { + "message": "Google Photos API responded with error: {message}" } }, "services": { diff --git a/homeassistant/components/google_photos/types.py b/homeassistant/components/google_photos/types.py new file mode 100644 index 00000000000..2fe57fe1d15 --- /dev/null +++ b/homeassistant/components/google_photos/types.py @@ -0,0 +1,7 @@ +"""Google Photos types.""" + +from google_photos_library_api.api import GooglePhotosLibraryApi + +from homeassistant.config_entries import ConfigEntry + +type GooglePhotosConfigEntry = ConfigEntry[GooglePhotosLibraryApi] diff --git a/requirements_all.txt b/requirements_all.txt index da902149cfd..19d99787672 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -979,7 +979,6 @@ goalzero==0.2.2 goodwe==0.3.6 # homeassistant.components.google_mail -# homeassistant.components.google_photos # homeassistant.components.google_tasks google-api-python-client==2.71.0 @@ -995,6 +994,9 @@ google-generativeai==0.7.2 # homeassistant.components.nest google-nest-sdm==5.0.0 +# homeassistant.components.google_photos +google-photos-library-api==0.8.0 + # homeassistant.components.google_travel_time googlemaps==2.5.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 67986dd3a53..6203d295d78 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -829,7 +829,6 @@ goalzero==0.2.2 goodwe==0.3.6 # homeassistant.components.google_mail -# homeassistant.components.google_photos # homeassistant.components.google_tasks google-api-python-client==2.71.0 @@ -845,6 +844,9 @@ google-generativeai==0.7.2 # homeassistant.components.nest google-nest-sdm==5.0.0 +# homeassistant.components.google_photos +google-photos-library-api==0.8.0 + # homeassistant.components.google_travel_time googlemaps==2.5.1 diff --git a/tests/components/google_photos/conftest.py b/tests/components/google_photos/conftest.py index f7289993258..9dbe85bd25b 100644 --- a/tests/components/google_photos/conftest.py +++ b/tests/components/google_photos/conftest.py @@ -1,10 +1,18 @@ """Test fixtures for Google Photos.""" -from collections.abc import Awaitable, Callable, Generator +from collections.abc import AsyncGenerator, Awaitable, Callable, Generator import time from typing import Any -from unittest.mock import Mock, patch +from unittest.mock import AsyncMock, Mock, patch +from google_photos_library_api.api import GooglePhotosLibraryApi +from google_photos_library_api.model import ( + Album, + ListAlbumResult, + ListMediaItemResult, + MediaItem, + UserInfoResult, +) import pytest from homeassistant.components.application_credentials import ( @@ -28,6 +36,12 @@ CLIENT_SECRET = "5678" FAKE_ACCESS_TOKEN = "some-access-token" FAKE_REFRESH_TOKEN = "some-refresh-token" EXPIRES_IN = 3600 +USERINFO_URL = "https://www.googleapis.com/oauth2/v1/userinfo" +PHOTOS_BASE_URL = "https://photoslibrary.googleapis.com" +MEDIA_ITEMS_URL = f"{PHOTOS_BASE_URL}/v1/mediaItems" +ALBUMS_URL = f"{PHOTOS_BASE_URL}/v1/albums" +UPLOADS_URL = f"{PHOTOS_BASE_URL}/v1/uploads" +CREATE_MEDIA_ITEMS_URL = f"{PHOTOS_BASE_URL}/v1/mediaItems:batchCreate" @pytest.fixture(name="expires_at") @@ -100,56 +114,83 @@ def mock_user_identifier() -> str | None: return USER_IDENTIFIER -@pytest.fixture(name="setup_api") -def mock_setup_api( - fixture_name: str, user_identifier: str +@pytest.fixture(name="api_error") +def mock_api_error() -> Exception | None: + """Provide a json fixture file to load for list media item api responses.""" + return None + + +@pytest.fixture(name="mock_api") +def mock_client_api( + fixture_name: str, + user_identifier: str, + api_error: Exception, ) -> Generator[Mock, None, None]: """Set up fake Google Photos API responses from fixtures.""" - with patch("homeassistant.components.google_photos.api.build") as mock: - mock.return_value.userinfo.return_value.get.return_value.execute.return_value = { - "id": user_identifier, - "name": "Test Name", - } + mock_api = AsyncMock(GooglePhotosLibraryApi, autospec=True) + mock_api.get_user_info.return_value = UserInfoResult( + id=user_identifier, + name="Test Name", + email="test.name@gmail.com", + ) - responses = ( - load_json_array_fixture(fixture_name, DOMAIN) if fixture_name else [] - ) + responses = load_json_array_fixture(fixture_name, DOMAIN) if fixture_name else [] - queue = list(responses) + async def list_media_items( + *args: Any, + ) -> AsyncGenerator[ListMediaItemResult, None, None]: + for response in responses: + mock_list_media_items = Mock(ListMediaItemResult) + mock_list_media_items.media_items = [ + MediaItem.from_dict(media_item) for media_item in response["mediaItems"] + ] + yield mock_list_media_items - def list_media_items(**kwargs: Any) -> Mock: - mock = Mock() - mock.execute.return_value = queue.pop(0) - return mock + mock_api.list_media_items.return_value.__aiter__ = list_media_items + mock_api.list_media_items.return_value.__anext__ = list_media_items + mock_api.list_media_items.side_effect = api_error - mock.return_value.mediaItems.return_value.list = list_media_items - mock.return_value.mediaItems.return_value.search = list_media_items + # Mock a point lookup by reading contents of the fixture above + async def get_media_item(media_item_id: str, **kwargs: Any) -> Mock: + for response in responses: + for media_item in response["mediaItems"]: + if media_item["id"] == media_item_id: + return MediaItem.from_dict(media_item) + return None - # Mock a point lookup by reading contents of the fixture above - def get_media_item(mediaItemId: str, **kwargs: Any) -> Mock: - for response in responses: - for media_item in response["mediaItems"]: - if media_item["id"] == mediaItemId: - mock = Mock() - mock.execute.return_value = media_item - return mock - return None + mock_api.get_media_item = get_media_item - mock.return_value.mediaItems.return_value.get = get_media_item - mock.return_value.albums.return_value.list.return_value.execute.return_value = ( - load_json_object_fixture("list_albums.json", DOMAIN) - ) + # Emulate an async iterator for returning pages of response objects. We just + # return a single page. - yield mock + async def list_albums( + *args: Any, **kwargs: Any + ) -> AsyncGenerator[ListAlbumResult, None, None]: + mock_list_album_result = Mock(ListAlbumResult) + mock_list_album_result.albums = [ + Album.from_dict(album) + for album in load_json_object_fixture("list_albums.json", DOMAIN)["albums"] + ] + yield mock_list_album_result + + mock_api.list_albums.return_value.__aiter__ = list_albums + mock_api.list_albums.return_value.__anext__ = list_albums + mock_api.list_albums.side_effect = api_error + return mock_api @pytest.fixture(name="setup_integration") async def mock_setup_integration( hass: HomeAssistant, config_entry: MockConfigEntry, + mock_api: Mock, ) -> Callable[[], Awaitable[bool]]: """Fixture to set up the integration.""" config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() + with patch( + "homeassistant.components.google_photos.GooglePhotosLibraryApi", + return_value=mock_api, + ): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/google_photos/fixtures/api_not_enabled_response.json b/tests/components/google_photos/fixtures/api_not_enabled_response.json deleted file mode 100644 index 8933fcdc7bd..00000000000 --- a/tests/components/google_photos/fixtures/api_not_enabled_response.json +++ /dev/null @@ -1,17 +0,0 @@ -[ - { - "error": { - "code": 403, - "message": "Google Photos API has not been used in project 0 before or it is disabled. Enable it by visiting https://console.developers.google.com/apis/library/photoslibrary.googleapis.com/overview?project=0 then retry. If you enabled this API recently, wait a few minutes for the action to propagate to our systems and retry.", - "errors": [ - { - "message": "Google Photos API has not been used in project 0 before or it is disabled. Enable it by visiting https://console.developers.google.com/apis/library/photoslibrary.googleapis.com/overview?project=0 then retry. If you enabled this API recently, wait a few minutes for the action to propagate to our systems and retry.", - "domain": "usageLimits", - "reason": "accessNotConfigured", - "extendedHelp": "https://console.developers.google.com" - } - ], - "status": "PERMISSION_DENIED" - } - } -] diff --git a/tests/components/google_photos/fixtures/list_albums.json b/tests/components/google_photos/fixtures/list_albums.json index 57f2873715b..7460e1d36f3 100644 --- a/tests/components/google_photos/fixtures/list_albums.json +++ b/tests/components/google_photos/fixtures/list_albums.json @@ -3,6 +3,7 @@ { "id": "album-media-id-1", "title": "Album title", + "productUrl": "http://photos.google.com/album-media-id-1", "isWriteable": true, "mediaItemsCount": 7, "coverPhotoBaseUrl": "http://img.example.com/id3", diff --git a/tests/components/google_photos/fixtures/not_dict.json b/tests/components/google_photos/fixtures/not_dict.json deleted file mode 100644 index 05e325337d2..00000000000 --- a/tests/components/google_photos/fixtures/not_dict.json +++ /dev/null @@ -1 +0,0 @@ -["not a dictionary"] diff --git a/tests/components/google_photos/test_config_flow.py b/tests/components/google_photos/test_config_flow.py index 2564a8ed134..be97d7658c6 100644 --- a/tests/components/google_photos/test_config_flow.py +++ b/tests/components/google_photos/test_config_flow.py @@ -4,8 +4,7 @@ from collections.abc import Generator from typing import Any from unittest.mock import Mock, patch -from googleapiclient.errors import HttpError -from httplib2 import Response +from google_photos_library_api.exceptions import GooglePhotosApiError import pytest from homeassistant import config_entries @@ -20,7 +19,7 @@ from homeassistant.helpers import config_entry_oauth2_flow from .conftest import EXPIRES_IN, FAKE_ACCESS_TOKEN, FAKE_REFRESH_TOKEN, USER_IDENTIFIER -from tests.common import MockConfigEntry, load_fixture +from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import ClientSessionGenerator @@ -37,6 +36,16 @@ def mock_setup_entry() -> Generator[Mock, None, None]: yield mock_setup +@pytest.fixture(autouse=True) +def mock_patch_api(mock_api: Mock) -> Generator[None, None, None]: + """Fixture to patch the config flow api.""" + with patch( + "homeassistant.components.google_photos.config_flow.GooglePhotosLibraryApi", + return_value=mock_api, + ): + yield + + @pytest.fixture(name="updated_token_entry", autouse=True) def mock_updated_token_entry() -> dict[str, Any]: """Fixture to provide any test specific overrides to token data from the oauth token endpoint.""" @@ -60,7 +69,7 @@ def mock_token_request( ) -@pytest.mark.usefixtures("current_request_with_host", "setup_api") +@pytest.mark.usefixtures("current_request_with_host", "mock_api") @pytest.mark.parametrize("fixture_name", ["list_mediaitems.json"]) async def test_full_flow( hass: HomeAssistant, @@ -126,11 +135,17 @@ async def test_full_flow( @pytest.mark.usefixtures( "current_request_with_host", "setup_credentials", + "mock_api", +) +@pytest.mark.parametrize( + "api_error", + [ + GooglePhotosApiError("some error"), + ], ) async def test_api_not_enabled( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, - setup_api: Mock, ) -> None: """Check flow aborts if api is not enabled.""" result = await hass.config_entries.flow.async_init( @@ -160,24 +175,18 @@ async def test_api_not_enabled( assert resp.status == 200 assert resp.headers["content-type"] == "text/html; charset=utf-8" - setup_api.return_value.mediaItems.return_value.list = Mock() - setup_api.return_value.mediaItems.return_value.list.return_value.execute.side_effect = HttpError( - Response({"status": "403"}), - bytes(load_fixture("google_photos/api_not_enabled_response.json"), "utf-8"), - ) result = await hass.config_entries.flow.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "access_not_configured" - assert result["description_placeholders"]["message"].endswith( - "Google Photos API has not been used in project 0 before or it is disabled. Enable it by visiting https://console.developers.google.com/apis/library/photoslibrary.googleapis.com/overview?project=0 then retry. If you enabled this API recently, wait a few minutes for the action to propagate to our systems and retry." - ) + assert result["description_placeholders"]["message"].endswith("some error") @pytest.mark.usefixtures("current_request_with_host", "setup_credentials") async def test_general_exception( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, + mock_api: Mock, ) -> None: """Check flow aborts if exception happens.""" result = await hass.config_entries.flow.async_init( @@ -206,17 +215,15 @@ async def test_general_exception( assert resp.status == 200 assert resp.headers["content-type"] == "text/html; charset=utf-8" - with patch( - "homeassistant.components.google_photos.api.build", - side_effect=Exception, - ): - result = await hass.config_entries.flow.async_configure(result["flow_id"]) + mock_api.list_media_items.side_effect = Exception + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "unknown" -@pytest.mark.usefixtures("current_request_with_host", "setup_api", "setup_integration") +@pytest.mark.usefixtures("current_request_with_host", "mock_api", "setup_integration") @pytest.mark.parametrize("fixture_name", ["list_mediaitems.json"]) @pytest.mark.parametrize( "updated_token_entry", diff --git a/tests/components/google_photos/test_media_source.py b/tests/components/google_photos/test_media_source.py index 1028a34aec1..762a4d5ebd1 100644 --- a/tests/components/google_photos/test_media_source.py +++ b/tests/components/google_photos/test_media_source.py @@ -1,10 +1,8 @@ """Test the Google Photos media source.""" -from typing import Any from unittest.mock import Mock -from googleapiclient.errors import HttpError -from httplib2 import Response +from google_photos_library_api.exceptions import GooglePhotosApiError import pytest from homeassistant.components.google_photos.const import DOMAIN, UPLOAD_SCOPE @@ -46,7 +44,7 @@ async def test_no_config_entries( assert not browse.children -@pytest.mark.usefixtures("setup_integration", "setup_api") +@pytest.mark.usefixtures("setup_integration", "mock_api") @pytest.mark.parametrize( ("scopes"), [ @@ -64,7 +62,7 @@ async def test_no_read_scopes( assert not browse.children -@pytest.mark.usefixtures("setup_integration", "setup_api") +@pytest.mark.usefixtures("setup_integration", "mock_api") @pytest.mark.parametrize( ("album_path", "expected_album_title"), [ @@ -135,14 +133,14 @@ async def test_browse_albums( ] == expected_medias -@pytest.mark.usefixtures("setup_integration", "setup_api") +@pytest.mark.usefixtures("setup_integration", "mock_api") async def test_invalid_config_entry(hass: HomeAssistant) -> None: """Test browsing to a config entry that does not exist.""" with pytest.raises(BrowseError, match="Could not find config entry"): await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}/invalid-config-entry") -@pytest.mark.usefixtures("setup_integration", "setup_api") +@pytest.mark.usefixtures("setup_integration", "mock_api") @pytest.mark.parametrize("fixture_name", ["list_mediaitems.json"]) async def test_browse_invalid_path(hass: HomeAssistant) -> None: """Test browsing to a photo is not possible.""" @@ -161,8 +159,8 @@ async def test_browse_invalid_path(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("setup_integration") -@pytest.mark.parametrize("fixture_name", ["list_mediaitems.json"]) -async def test_invalid_album_id(hass: HomeAssistant, setup_api: Mock) -> None: +@pytest.mark.parametrize("api_error", [GooglePhotosApiError("some error")]) +async def test_invalid_album_id(hass: HomeAssistant, mock_api: Mock) -> None: """Test browsing to an album id that does not exist.""" browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}") assert browse.domain == DOMAIN @@ -172,11 +170,6 @@ async def test_invalid_album_id(hass: HomeAssistant, setup_api: Mock) -> None: (CONFIG_ENTRY_ID, "Account Name") ] - setup_api.return_value.mediaItems.return_value.search = Mock() - setup_api.return_value.mediaItems.return_value.search.return_value.execute.side_effect = HttpError( - Response({"status": "404"}), b"" - ) - with pytest.raises(BrowseError, match="Error listing media items"): await async_browse_media( hass, f"{URI_SCHEME}{DOMAIN}/{CONFIG_ENTRY_ID}/a/invalid-album-id" @@ -201,18 +194,9 @@ async def test_missing_photo_id( await async_resolve_media(hass, f"{URI_SCHEME}{DOMAIN}/{identifier}", None) -@pytest.mark.usefixtures("setup_integration", "setup_api") -@pytest.mark.parametrize( - "side_effect", - [ - HttpError(Response({"status": "403"}), b""), - ], -) -async def test_list_media_items_failure( - hass: HomeAssistant, - setup_api: Any, - side_effect: HttpError | Response, -) -> None: +@pytest.mark.usefixtures("setup_integration", "mock_api") +@pytest.mark.parametrize("api_error", [GooglePhotosApiError("some error")]) +async def test_list_albums_failure(hass: HomeAssistant) -> None: """Test browsing to an album id that does not exist.""" browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}") assert browse.domain == DOMAIN @@ -222,24 +206,13 @@ async def test_list_media_items_failure( (CONFIG_ENTRY_ID, "Account Name") ] - setup_api.return_value.mediaItems.return_value.list = Mock() - setup_api.return_value.mediaItems.return_value.list.return_value.execute.side_effect = side_effect - - with pytest.raises(BrowseError, match="Error listing media items"): - await async_browse_media( - hass, f"{URI_SCHEME}{DOMAIN}/{CONFIG_ENTRY_ID}/a/recent" - ) + with pytest.raises(BrowseError, match="Error listing albums"): + await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}/{CONFIG_ENTRY_ID}") -@pytest.mark.usefixtures("setup_integration", "setup_api") -@pytest.mark.parametrize( - "fixture_name", - [ - "api_not_enabled_response.json", - "not_dict.json", - ], -) -async def test_media_items_error_parsing_response(hass: HomeAssistant) -> None: +@pytest.mark.usefixtures("setup_integration", "mock_api") +@pytest.mark.parametrize("api_error", [GooglePhotosApiError("some error")]) +async def test_list_media_items_failure(hass: HomeAssistant) -> None: """Test browsing to an album id that does not exist.""" browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}") assert browse.domain == DOMAIN @@ -248,6 +221,7 @@ async def test_media_items_error_parsing_response(hass: HomeAssistant) -> None: assert [(child.identifier, child.title) for child in browse.children] == [ (CONFIG_ENTRY_ID, "Account Name") ] + with pytest.raises(BrowseError, match="Error listing media items"): await async_browse_media( hass, f"{URI_SCHEME}{DOMAIN}/{CONFIG_ENTRY_ID}/a/recent" diff --git a/tests/components/google_photos/test_services.py b/tests/components/google_photos/test_services.py index 198de3295a9..10d57e1d178 100644 --- a/tests/components/google_photos/test_services.py +++ b/tests/components/google_photos/test_services.py @@ -1,45 +1,42 @@ """Tests for Google Photos.""" -import http from unittest.mock import Mock, patch -from googleapiclient.errors import HttpError -from httplib2 import Response +from google_photos_library_api.exceptions import GooglePhotosApiError +from google_photos_library_api.model import ( + CreateMediaItemsResult, + MediaItem, + NewMediaItemResult, + Status, +) import pytest -from homeassistant.components.google_photos.api import UPLOAD_API from homeassistant.components.google_photos.const import DOMAIN, READ_SCOPES from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from tests.common import MockConfigEntry -from tests.test_util.aiohttp import AiohttpClientMocker @pytest.mark.usefixtures("setup_integration") async def test_upload_service( hass: HomeAssistant, config_entry: MockConfigEntry, - aioclient_mock: AiohttpClientMocker, - setup_api: Mock, + mock_api: Mock, ) -> None: """Test service call to upload content.""" assert hass.services.has_service(DOMAIN, "upload") - aioclient_mock.post(UPLOAD_API, text="some-upload-token") - setup_api.return_value.mediaItems.return_value.batchCreate.return_value.execute.return_value = { - "newMediaItemResults": [ - { - "status": { - "code": 200, - }, - "mediaItem": { - "id": "new-media-item-id-1", - }, - } + mock_api.create_media_items.return_value = CreateMediaItemsResult( + new_media_item_results=[ + NewMediaItemResult( + upload_token="some-upload-token", + status=Status(code=200), + media_item=MediaItem(id="new-media-item-id-1"), + ) ] - } + ) with ( patch( @@ -62,6 +59,7 @@ async def test_upload_service( blocking=True, return_response=True, ) + assert response == {"media_items": [{"media_item_id": "new-media-item-id-1"}]} @@ -157,12 +155,11 @@ async def test_filename_does_not_exist( async def test_upload_service_upload_content_failure( hass: HomeAssistant, config_entry: MockConfigEntry, - aioclient_mock: AiohttpClientMocker, - setup_api: Mock, + mock_api: Mock, ) -> None: """Test service call to upload content.""" - aioclient_mock.post(UPLOAD_API, status=http.HTTPStatus.SERVICE_UNAVAILABLE) + mock_api.upload_content.side_effect = GooglePhotosApiError() with ( patch( @@ -192,15 +189,11 @@ async def test_upload_service_upload_content_failure( async def test_upload_service_fails_create( hass: HomeAssistant, config_entry: MockConfigEntry, - aioclient_mock: AiohttpClientMocker, - setup_api: Mock, + mock_api: Mock, ) -> None: """Test service call to upload content.""" - aioclient_mock.post(UPLOAD_API, text="some-upload-token") - setup_api.return_value.mediaItems.return_value.batchCreate.return_value.execute.side_effect = HttpError( - Response({"status": "403"}), b"" - ) + mock_api.create_media_items.side_effect = GooglePhotosApiError() with ( patch( @@ -238,8 +231,6 @@ async def test_upload_service_fails_create( async def test_upload_service_no_scope( hass: HomeAssistant, config_entry: MockConfigEntry, - aioclient_mock: AiohttpClientMocker, - setup_api: Mock, ) -> None: """Test service call to upload content but the config entry is read-only.""" From 94f458ff9888615d7e9948c92e24743a340383c2 Mon Sep 17 00:00:00 2001 From: ilan <31193909+iloveicedgreentea@users.noreply.github.com> Date: Tue, 3 Sep 2024 07:56:59 -0400 Subject: [PATCH 1360/1635] Bump py-madvr2 to 1.6.32 (#125049) feat: update lib --- homeassistant/components/madvr/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/madvr/manifest.json b/homeassistant/components/madvr/manifest.json index ce6336acabc..0ac906fdbef 100644 --- a/homeassistant/components/madvr/manifest.json +++ b/homeassistant/components/madvr/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/madvr", "integration_type": "device", "iot_class": "local_push", - "requirements": ["py-madvr2==1.6.29"] + "requirements": ["py-madvr2==1.6.32"] } diff --git a/requirements_all.txt b/requirements_all.txt index 19d99787672..b3b60fc000e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1662,7 +1662,7 @@ py-dormakaba-dkey==1.0.5 py-improv-ble-client==1.0.3 # homeassistant.components.madvr -py-madvr2==1.6.29 +py-madvr2==1.6.32 # homeassistant.components.melissa py-melissa-climate==2.1.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 6203d295d78..0cfa38b538a 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1357,7 +1357,7 @@ py-dormakaba-dkey==1.0.5 py-improv-ble-client==1.0.3 # homeassistant.components.madvr -py-madvr2==1.6.29 +py-madvr2==1.6.32 # homeassistant.components.melissa py-melissa-climate==2.1.4 From 5965d8d503261496a449ce7654807241d02c1ff2 Mon Sep 17 00:00:00 2001 From: "Steven B." <51370195+sdb9696@users.noreply.github.com> Date: Tue, 3 Sep 2024 13:00:30 +0100 Subject: [PATCH 1361/1635] Pass hass clientsession to ring config flow (#125119) Pass hass clientsession to ring config flow --- homeassistant/components/ring/config_flow.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/ring/config_flow.py b/homeassistant/components/ring/config_flow.py index ee78541dec7..b82b4f22223 100644 --- a/homeassistant/components/ring/config_flow.py +++ b/homeassistant/components/ring/config_flow.py @@ -17,6 +17,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import CONF_2FA, DOMAIN @@ -31,7 +32,10 @@ STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PASSWORD): str}) async def validate_input(hass: HomeAssistant, data: dict[str, str]) -> dict[str, Any]: """Validate the user input allows us to connect.""" - auth = Auth(f"{APPLICATION_NAME}/{ha_version}") + auth = Auth( + f"{APPLICATION_NAME}/{ha_version}", + http_client_session=async_get_clientsession(hass), + ) try: token = await auth.async_fetch_token( From d12c6f89d2dbe91c4f32ff29b9692f22c09e3f7e Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Tue, 3 Sep 2024 14:13:43 +0200 Subject: [PATCH 1362/1635] Bump hadolint to 2.12.0 and use matrix for all Dockerfiles (#125131) * Bump hadolint to 2.12.0 and use matrix for all Dockerfiles * Fix * Disable fail fast --- .github/workflows/ci.yaml | 25 ++++++++++++++++++------- 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 24b204f3f55..5d21c2c7b04 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -429,17 +429,28 @@ jobs: . venv/bin/activate pre-commit run --show-diff-on-failure --hook-stage manual codespell --all-files + lint-hadolint: + name: Check ${{ matrix.file }} + runs-on: ubuntu-24.04 + needs: + - info + - pre-commit + strategy: + fail-fast: false + matrix: + file: + - Dockerfile + - Dockerfile.dev + steps: + - name: Check out code from GitHub + uses: actions/checkout@v4.1.7 - name: Register hadolint problem matcher run: | echo "::add-matcher::.github/workflows/matchers/hadolint.json" - - name: Check Dockerfile - uses: docker://hadolint/hadolint:v1.18.2 + - name: Check ${{ matrix.file }} + uses: docker://hadolint/hadolint:v2.12.0 with: - args: hadolint Dockerfile - - name: Check Dockerfile.dev - uses: docker://hadolint/hadolint:v1.18.2 - with: - args: hadolint Dockerfile.dev + args: hadolint ${{ matrix.file }} base: name: Prepare dependencies From c71cf272c859f3763dca80c48738a4e64cd4a23a Mon Sep 17 00:00:00 2001 From: Aaron Bach Date: Tue, 3 Sep 2024 06:21:52 -0600 Subject: [PATCH 1363/1635] Fix unhandled exception with missing IQVIA data (#125114) --- homeassistant/components/iqvia/sensor.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/iqvia/sensor.py b/homeassistant/components/iqvia/sensor.py index ba3c288b702..af351e0d543 100644 --- a/homeassistant/components/iqvia/sensor.py +++ b/homeassistant/components/iqvia/sensor.py @@ -244,8 +244,8 @@ class IndexSensor(IQVIAEntity, SensorEntity): key = self.entity_description.key.split("_")[-1].title() try: - [period] = [p for p in data["periods"] if p["Type"] == key] # type: ignore[index] - except TypeError: + period = next(p for p in data["periods"] if p["Type"] == key) # type: ignore[index] + except StopIteration: return data = cast(dict[str, Any], data) From e3896d1f60accb8cac3835812a6d97cb8ba18202 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michal=20J=C3=A1l?= Date: Tue, 3 Sep 2024 14:22:39 +0200 Subject: [PATCH 1364/1635] Bump PySwitchbot to 0.48.2 (#125113) --- homeassistant/components/switchbot/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/switchbot/manifest.json b/homeassistant/components/switchbot/manifest.json index 0cbbd70a805..f97162184c6 100644 --- a/homeassistant/components/switchbot/manifest.json +++ b/homeassistant/components/switchbot/manifest.json @@ -39,5 +39,5 @@ "documentation": "https://www.home-assistant.io/integrations/switchbot", "iot_class": "local_push", "loggers": ["switchbot"], - "requirements": ["PySwitchbot==0.48.1"] + "requirements": ["PySwitchbot==0.48.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index b3b60fc000e..e14ef8af35a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -84,7 +84,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.48.1 +PySwitchbot==0.48.2 # homeassistant.components.switchmate PySwitchmate==0.5.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 0cfa38b538a..b28d39da203 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -81,7 +81,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.48.1 +PySwitchbot==0.48.2 # homeassistant.components.syncthru PySyncThru==0.7.10 From 851600630c731abdaf50a79a5ce52b00228852df Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 3 Sep 2024 14:28:33 +0200 Subject: [PATCH 1365/1635] Log deprecation warning when `template.Template` is created without `hass` (#125142) * Log deprecation warning when template.Template is created without hass * Improve docstring --- homeassistant/helpers/template.py | 18 +++++++++++++++++- tests/helpers/test_template.py | 17 +++++++++++++++++ 2 files changed, 34 insertions(+), 1 deletion(-) diff --git a/homeassistant/helpers/template.py b/homeassistant/helpers/template.py index 1786194b437..9f8eb628e63 100644 --- a/homeassistant/helpers/template.py +++ b/homeassistant/helpers/template.py @@ -507,10 +507,26 @@ class Template: ) def __init__(self, template: str, hass: HomeAssistant | None = None) -> None: - """Instantiate a template.""" + """Instantiate a template. + + Note: A valid hass instance should always be passed in. The hass parameter + will be non optional in Home Assistant Core 2025.10. + """ + # pylint: disable-next=import-outside-toplevel + from .frame import report + if not isinstance(template, str): raise TypeError("Expected template to be a string") + if not hass: + report( + ( + "creates a template object without passing hass, " + "which will stop working in HA Core 2025.10" + ), + error_if_core=False, + ) + self.template: str = template.strip() self._compiled_code: CodeType | None = None self._compiled: jinja2.Template | None = None diff --git a/tests/helpers/test_template.py b/tests/helpers/test_template.py index e4f833b2d1d..339b372f137 100644 --- a/tests/helpers/test_template.py +++ b/tests/helpers/test_template.py @@ -6547,3 +6547,20 @@ async def test_merge_response_with_incorrect_response(hass: HomeAssistant) -> No tpl = template.Template(_template, hass) with pytest.raises(TemplateError, match="TypeError: Response is not a dictionary"): tpl.async_render() + + +def test_warn_no_hass(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) -> None: + """Test deprecation warning when instantiating Template without hass.""" + + message = "Detected code that creates a template object without passing hass" + template.Template("blah") + assert message in caplog.text + caplog.clear() + + template.Template("blah", None) + assert message in caplog.text + caplog.clear() + + template.Template("blah", hass) + assert message not in caplog.text + caplog.clear() From c321bd70e171151a723fccd3507dcf144c77140d Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 3 Sep 2024 14:37:21 +0200 Subject: [PATCH 1366/1635] Log deprecation warning when `cv.template` is called from wrong thread (#125141) Log deprecation warning when cv.template is called from wrong thread --- homeassistant/helpers/config_validation.py | 26 ++++++++++++++++++++-- 1 file changed, 24 insertions(+), 2 deletions(-) diff --git a/homeassistant/helpers/config_validation.py b/homeassistant/helpers/config_validation.py index 3d3de40a2c6..d88c388f9c7 100644 --- a/homeassistant/helpers/config_validation.py +++ b/homeassistant/helpers/config_validation.py @@ -715,8 +715,19 @@ def template(value: Any | None) -> template_helper.Template: raise vol.Invalid("template value is None") if isinstance(value, (list, dict, template_helper.Template)): raise vol.Invalid("template value should be a string") + if not (hass := _async_get_hass_or_none()): + # pylint: disable-next=import-outside-toplevel + from .frame import report - template_value = template_helper.Template(str(value), _async_get_hass_or_none()) + report( + ( + "validates schema outside the event loop, " + "which will stop working in HA Core 2025.10" + ), + error_if_core=False, + ) + + template_value = template_helper.Template(str(value), hass) try: template_value.ensure_valid() @@ -733,8 +744,19 @@ def dynamic_template(value: Any | None) -> template_helper.Template: raise vol.Invalid("template value should be a string") if not template_helper.is_template_string(str(value)): raise vol.Invalid("template value does not contain a dynamic template") + if not (hass := _async_get_hass_or_none()): + # pylint: disable-next=import-outside-toplevel + from .frame import report - template_value = template_helper.Template(str(value), _async_get_hass_or_none()) + report( + ( + "validates schema outside the event loop, " + "which will stop working in HA Core 2025.10" + ), + error_if_core=False, + ) + + template_value = template_helper.Template(str(value), hass) try: template_value.ensure_valid() From 6ecc5c19a29b101c63ed9342512b871523e0ce75 Mon Sep 17 00:00:00 2001 From: Brett Adams Date: Tue, 3 Sep 2024 22:38:47 +1000 Subject: [PATCH 1367/1635] Add climate platform to Tesla Fleet (#123169) * Add climate * docstring * Add tests * Fix limited scope situation * Add another test * Add icons * Type vehicle data * Replace inline temperatures * Fix handle_vehicle_command type * Fix preset turning HVAC off * Fix cop_mode check * Use constants * Reference docs in command signing error * Move to a read-only check * Remove raise_for * Fixes * Tests * Remove raise_for_signing * Remove unused strings * Fix async_set_temperature * Correct tests * Remove HVAC modes at startup in read-only mode * Fix order of init actions to set hvac_modes correctly * Fix no temp test * Add handle command type * Docstrings * fix matches and fix a bug * Split tests * Fix issues from rebase --- .../components/tesla_fleet/__init__.py | 12 +- .../components/tesla_fleet/climate.py | 330 +++++++++++++ homeassistant/components/tesla_fleet/const.py | 7 + .../components/tesla_fleet/entity.py | 6 + .../components/tesla_fleet/helpers.py | 80 ++++ .../components/tesla_fleet/icons.json | 14 + .../components/tesla_fleet/models.py | 3 + .../components/tesla_fleet/strings.json | 41 +- tests/components/tesla_fleet/conftest.py | 56 ++- .../tesla_fleet/snapshots/test_climate.ambr | 422 ++++++++++++++++ tests/components/tesla_fleet/test_climate.py | 450 ++++++++++++++++++ 11 files changed, 1407 insertions(+), 14 deletions(-) create mode 100644 homeassistant/components/tesla_fleet/climate.py create mode 100644 homeassistant/components/tesla_fleet/helpers.py create mode 100644 tests/components/tesla_fleet/snapshots/test_climate.ambr create mode 100644 tests/components/tesla_fleet/test_climate.py diff --git a/homeassistant/components/tesla_fleet/__init__.py b/homeassistant/components/tesla_fleet/__init__.py index 183e7e753b5..3bcb0bf7ef9 100644 --- a/homeassistant/components/tesla_fleet/__init__.py +++ b/homeassistant/components/tesla_fleet/__init__.py @@ -39,7 +39,12 @@ from .coordinator import ( from .models import TeslaFleetData, TeslaFleetEnergyData, TeslaFleetVehicleData from .oauth import TeslaSystemImplementation -PLATFORMS: Final = [Platform.BINARY_SENSOR, Platform.DEVICE_TRACKER, Platform.SENSOR] +PLATFORMS: Final = [ + Platform.BINARY_SENSOR, + Platform.CLIMATE, + Platform.DEVICE_TRACKER, + Platform.SENSOR, +] type TeslaFleetConfigEntry = ConfigEntry[TeslaFleetData] @@ -53,8 +58,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - session = async_get_clientsession(hass) token = jwt.decode(access_token, options={"verify_signature": False}) - scopes = token["scp"] - region = token["ou_code"].lower() + scopes: list[Scope] = [Scope(s) for s in token["scp"]] + region: str = token["ou_code"].lower() OAuth2FlowHandler.async_register_implementation( hass, @@ -133,6 +138,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - coordinator=coordinator, vin=vin, device=device, + signing=product["command_signing"] == "required", ) ) elif "energy_site_id" in product and hasattr(tesla, "energy"): diff --git a/homeassistant/components/tesla_fleet/climate.py b/homeassistant/components/tesla_fleet/climate.py new file mode 100644 index 00000000000..6199ee112b5 --- /dev/null +++ b/homeassistant/components/tesla_fleet/climate.py @@ -0,0 +1,330 @@ +"""Climate platform for Tesla Fleet integration.""" + +from __future__ import annotations + +from itertools import chain +from typing import Any, cast + +from tesla_fleet_api.const import CabinOverheatProtectionTemp, Scope + +from homeassistant.components.climate import ( + ATTR_HVAC_MODE, + ClimateEntity, + ClimateEntityFeature, + HVACMode, +) +from homeassistant.const import ( + ATTR_TEMPERATURE, + PRECISION_HALVES, + PRECISION_WHOLE, + UnitOfTemperature, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import TeslaFleetConfigEntry +from .const import DOMAIN, TeslaFleetClimateSide +from .entity import TeslaFleetVehicleEntity +from .helpers import handle_vehicle_command +from .models import TeslaFleetVehicleData + +DEFAULT_MIN_TEMP = 15 +DEFAULT_MAX_TEMP = 28 + +PARALLEL_UPDATES = 0 + + +async def async_setup_entry( + hass: HomeAssistant, + entry: TeslaFleetConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Tesla Fleet Climate platform from a config entry.""" + + async_add_entities( + chain( + ( + TeslaFleetClimateEntity( + vehicle, TeslaFleetClimateSide.DRIVER, entry.runtime_data.scopes + ) + for vehicle in entry.runtime_data.vehicles + ), + ( + TeslaFleetCabinOverheatProtectionEntity( + vehicle, entry.runtime_data.scopes + ) + for vehicle in entry.runtime_data.vehicles + ), + ) + ) + + +class TeslaFleetClimateEntity(TeslaFleetVehicleEntity, ClimateEntity): + """Tesla Fleet vehicle climate entity.""" + + _attr_precision = PRECISION_HALVES + + _attr_temperature_unit = UnitOfTemperature.CELSIUS + _attr_hvac_modes = [HVACMode.HEAT_COOL, HVACMode.OFF] + _attr_supported_features = ( + ClimateEntityFeature.TURN_ON + | ClimateEntityFeature.TURN_OFF + | ClimateEntityFeature.TARGET_TEMPERATURE + | ClimateEntityFeature.PRESET_MODE + ) + _attr_preset_modes = ["off", "keep", "dog", "camp"] + _enable_turn_on_off_backwards_compatibility = False + + def __init__( + self, + data: TeslaFleetVehicleData, + side: TeslaFleetClimateSide, + scopes: Scope, + ) -> None: + """Initialize the climate.""" + + self.read_only = Scope.VEHICLE_CMDS not in scopes or data.signing + + if self.read_only: + self._attr_supported_features = ClimateEntityFeature(0) + self._attr_hvac_modes = [] + + super().__init__( + data, + side, + ) + + def _async_update_attrs(self) -> None: + """Update the attributes of the entity.""" + value = self.get("climate_state_is_climate_on") + if value is None: + self._attr_hvac_mode = None + elif value: + self._attr_hvac_mode = HVACMode.HEAT_COOL + else: + self._attr_hvac_mode = HVACMode.OFF + + # If not scoped, prevent the user from changing the HVAC mode by making it the only option + if self._attr_hvac_mode and self.read_only: + self._attr_hvac_modes = [self._attr_hvac_mode] + + self._attr_current_temperature = self.get("climate_state_inside_temp") + self._attr_target_temperature = self.get(f"climate_state_{self.key}_setting") + self._attr_preset_mode = self.get("climate_state_climate_keeper_mode") + self._attr_min_temp = cast( + float, self.get("climate_state_min_avail_temp", DEFAULT_MIN_TEMP) + ) + self._attr_max_temp = cast( + float, self.get("climate_state_max_avail_temp", DEFAULT_MAX_TEMP) + ) + + async def async_turn_on(self) -> None: + """Set the climate state to on.""" + + await self.wake_up_if_asleep() + await handle_vehicle_command(self.api.auto_conditioning_start()) + + self._attr_hvac_mode = HVACMode.HEAT_COOL + self.async_write_ha_state() + + async def async_turn_off(self) -> None: + """Set the climate state to off.""" + + await self.wake_up_if_asleep() + await handle_vehicle_command(self.api.auto_conditioning_stop()) + + self._attr_hvac_mode = HVACMode.OFF + self._attr_preset_mode = self._attr_preset_modes[0] + self.async_write_ha_state() + + async def async_set_temperature(self, **kwargs: Any) -> None: + """Set the climate temperature.""" + + if ATTR_TEMPERATURE not in kwargs: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="missing_temperature", + ) + + temp = kwargs[ATTR_TEMPERATURE] + await self.wake_up_if_asleep() + await handle_vehicle_command( + self.api.set_temps( + driver_temp=temp, + passenger_temp=temp, + ) + ) + self._attr_target_temperature = temp + + if mode := kwargs.get(ATTR_HVAC_MODE): + # Set HVAC mode will call write_ha_state + await self.async_set_hvac_mode(mode) + else: + self.async_write_ha_state() + + async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: + """Set the climate mode and state.""" + if hvac_mode not in self.hvac_modes: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_hvac_mode", + translation_placeholders={"hvac_mode": hvac_mode}, + ) + if hvac_mode == HVACMode.OFF: + await self.async_turn_off() + else: + await self.async_turn_on() + + async def async_set_preset_mode(self, preset_mode: str) -> None: + """Set the climate preset mode.""" + await self.wake_up_if_asleep() + await handle_vehicle_command( + self.api.set_climate_keeper_mode( + climate_keeper_mode=self._attr_preset_modes.index(preset_mode) + ) + ) + self._attr_preset_mode = preset_mode + if preset_mode != self._attr_preset_modes[0]: + self._attr_hvac_mode = HVACMode.HEAT_COOL + self.async_write_ha_state() + + +COP_MODES = { + "Off": HVACMode.OFF, + "On": HVACMode.COOL, + "FanOnly": HVACMode.FAN_ONLY, +} + +# String to celsius +COP_LEVELS = { + "Low": 30, + "Medium": 35, + "High": 40, +} + +# Celsius to IntEnum +TEMP_LEVELS = { + 30: CabinOverheatProtectionTemp.LOW, + 35: CabinOverheatProtectionTemp.MEDIUM, + 40: CabinOverheatProtectionTemp.HIGH, +} + + +class TeslaFleetCabinOverheatProtectionEntity(TeslaFleetVehicleEntity, ClimateEntity): + """Tesla Fleet vehicle cabin overheat protection entity.""" + + _attr_precision = PRECISION_WHOLE + _attr_target_temperature_step = 5 + _attr_min_temp = COP_LEVELS["Low"] + _attr_max_temp = COP_LEVELS["High"] + _attr_temperature_unit = UnitOfTemperature.CELSIUS + _attr_hvac_modes = list(COP_MODES.values()) + _enable_turn_on_off_backwards_compatibility = False + _attr_entity_registry_enabled_default = False + + def __init__( + self, + data: TeslaFleetVehicleData, + scopes: Scope, + ) -> None: + """Initialize the cabin overheat climate entity.""" + + # Scopes + self.read_only = Scope.VEHICLE_CMDS not in scopes or data.signing + + # Supported Features + if self.read_only: + self._attr_supported_features = ClimateEntityFeature(0) + self._attr_hvac_modes = [] + else: + self._attr_supported_features = ( + ClimateEntityFeature.TURN_ON | ClimateEntityFeature.TURN_OFF + ) + + super().__init__(data, "climate_state_cabin_overheat_protection") + + def _async_update_attrs(self) -> None: + """Update the attributes of the entity.""" + + if (state := self.get("climate_state_cabin_overheat_protection")) is None: + self._attr_hvac_mode = None + else: + self._attr_hvac_mode = COP_MODES.get(state) + + # If not scoped, prevent the user from changing the HVAC mode by making it the only option + if self._attr_hvac_mode and self.read_only: + self._attr_hvac_modes = [self._attr_hvac_mode] + + if (level := self.get("climate_state_cop_activation_temperature")) is None: + self._attr_target_temperature = None + else: + self._attr_target_temperature = COP_LEVELS.get(level) + + self._attr_current_temperature = self.get("climate_state_inside_temp") + + @property + def supported_features(self) -> ClimateEntityFeature: + """Return the list of supported features.""" + if not self.read_only and self.get( + "vehicle_config_cop_user_set_temp_supported" + ): + return ( + self._attr_supported_features | ClimateEntityFeature.TARGET_TEMPERATURE + ) + return self._attr_supported_features + + async def async_turn_on(self) -> None: + """Set the climate state to on.""" + await self.async_set_hvac_mode(HVACMode.COOL) + + async def async_turn_off(self) -> None: + """Set the climate state to off.""" + await self.async_set_hvac_mode(HVACMode.OFF) + + async def async_set_temperature(self, **kwargs: Any) -> None: + """Set the climate temperature.""" + + if ATTR_TEMPERATURE not in kwargs: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="missing_temperature", + ) + + temp = kwargs[ATTR_TEMPERATURE] + if (cop_mode := TEMP_LEVELS.get(temp)) is None: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_cop_temp", + ) + + await self.wake_up_if_asleep() + await handle_vehicle_command(self.api.set_cop_temp(cop_mode)) + self._attr_target_temperature = temp + + if mode := kwargs.get(ATTR_HVAC_MODE): + await self._async_set_cop(mode) + + self.async_write_ha_state() + + async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: + """Set the climate mode and state.""" + await self.wake_up_if_asleep() + await self._async_set_cop(hvac_mode) + self.async_write_ha_state() + + async def _async_set_cop(self, hvac_mode: HVACMode) -> None: + if hvac_mode == HVACMode.OFF: + await handle_vehicle_command( + self.api.set_cabin_overheat_protection(on=False, fan_only=False) + ) + elif hvac_mode == HVACMode.COOL: + await handle_vehicle_command( + self.api.set_cabin_overheat_protection(on=True, fan_only=False) + ) + elif hvac_mode == HVACMode.FAN_ONLY: + await handle_vehicle_command( + self.api.set_cabin_overheat_protection(on=True, fan_only=True) + ) + + self._attr_hvac_mode = hvac_mode diff --git a/homeassistant/components/tesla_fleet/const.py b/homeassistant/components/tesla_fleet/const.py index 081225c296c..53e34092326 100644 --- a/homeassistant/components/tesla_fleet/const.py +++ b/homeassistant/components/tesla_fleet/const.py @@ -41,3 +41,10 @@ class TeslaFleetState(StrEnum): ONLINE = "online" ASLEEP = "asleep" OFFLINE = "offline" + + +class TeslaFleetClimateSide(StrEnum): + """Tesla Fleet Climate Keeper Modes.""" + + DRIVER = "driver_temp" + PASSENGER = "passenger_temp" diff --git a/homeassistant/components/tesla_fleet/entity.py b/homeassistant/components/tesla_fleet/entity.py index c853bb798b5..103fd216953 100644 --- a/homeassistant/components/tesla_fleet/entity.py +++ b/homeassistant/components/tesla_fleet/entity.py @@ -14,6 +14,7 @@ from .coordinator import ( TeslaFleetEnergySiteLiveCoordinator, TeslaFleetVehicleDataCoordinator, ) +from .helpers import wake_up_vehicle from .models import TeslaFleetEnergyData, TeslaFleetVehicleData @@ -27,6 +28,7 @@ class TeslaFleetEntity( """Parent class for all TeslaFleet entities.""" _attr_has_entity_name = True + read_only: bool def __init__( self, @@ -100,6 +102,10 @@ class TeslaFleetVehicleEntity(TeslaFleetEntity): """Return a specific value from coordinator data.""" return self.coordinator.data.get(self.key) + async def wake_up_if_asleep(self) -> None: + """Wake up the vehicle if its asleep.""" + await wake_up_vehicle(self.vehicle) + class TeslaFleetEnergyLiveEntity(TeslaFleetEntity): """Parent class for TeslaFleet Energy Site Live entities.""" diff --git a/homeassistant/components/tesla_fleet/helpers.py b/homeassistant/components/tesla_fleet/helpers.py new file mode 100644 index 00000000000..d554ccce70c --- /dev/null +++ b/homeassistant/components/tesla_fleet/helpers.py @@ -0,0 +1,80 @@ +"""Tesla Fleet helper functions.""" + +import asyncio +from collections.abc import Awaitable +from typing import Any + +from tesla_fleet_api.exceptions import TeslaFleetError + +from homeassistant.exceptions import HomeAssistantError + +from .const import DOMAIN, LOGGER, TeslaFleetState +from .models import TeslaFleetVehicleData + + +async def wake_up_vehicle(vehicle: TeslaFleetVehicleData) -> None: + """Wake up a vehicle.""" + async with vehicle.wakelock: + times = 0 + while vehicle.coordinator.data["state"] != TeslaFleetState.ONLINE: + try: + if times == 0: + cmd = await vehicle.api.wake_up() + else: + cmd = await vehicle.api.vehicle() + state = cmd["response"]["state"] + except TeslaFleetError as e: + raise HomeAssistantError(str(e)) from e + vehicle.coordinator.data["state"] = state + if state != TeslaFleetState.ONLINE: + times += 1 + if times >= 4: # Give up after 30 seconds total + raise HomeAssistantError("Could not wake up vehicle") + await asyncio.sleep(times * 5) + + +async def handle_command(command: Awaitable) -> dict[str, Any]: + """Handle a command.""" + try: + result = await command + except TeslaFleetError as e: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="command_failed", + translation_placeholders={"message": e.message}, + ) from e + LOGGER.debug("Command result: %s", result) + return result + + +async def handle_vehicle_command(command: Awaitable) -> bool: + """Handle a vehicle command.""" + result = await handle_command(command) + if (response := result.get("response")) is None: + if error := result.get("error"): + # No response with error + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="command_error", + translation_placeholders={"error": error}, + ) + # No response without error (unexpected) + raise HomeAssistantError(f"Unknown response: {response}") + if (result := response.get("result")) is not True: + if reason := response.get("reason"): + if reason in ("already_set", "not_charging", "requested"): + # Reason is acceptable + return result + # Result of false with reason + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="command_reason", + translation_placeholders={"reason": reason}, + ) + # Result of false without reason (unexpected) + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="command_no_reason", + ) + # Response with result of true + return result diff --git a/homeassistant/components/tesla_fleet/icons.json b/homeassistant/components/tesla_fleet/icons.json index 2dbde45ee08..dc40f282037 100644 --- a/homeassistant/components/tesla_fleet/icons.json +++ b/homeassistant/components/tesla_fleet/icons.json @@ -38,6 +38,20 @@ } } }, + "climate": { + "driver_temp": { + "state_attributes": { + "preset_mode": { + "state": { + "off": "mdi:power", + "keep": "mdi:fan", + "dog": "mdi:dog", + "camp": "mdi:tent" + } + } + } + } + }, "device_tracker": { "location": { "default": "mdi:map-marker" diff --git a/homeassistant/components/tesla_fleet/models.py b/homeassistant/components/tesla_fleet/models.py index 1b1f5f083cd..ae945dd96bf 100644 --- a/homeassistant/components/tesla_fleet/models.py +++ b/homeassistant/components/tesla_fleet/models.py @@ -2,6 +2,7 @@ from __future__ import annotations +import asyncio from dataclasses import dataclass from tesla_fleet_api import EnergySpecific, VehicleSpecific @@ -33,6 +34,8 @@ class TeslaFleetVehicleData: coordinator: TeslaFleetVehicleDataCoordinator vin: str device: DeviceInfo + signing: bool + wakelock = asyncio.Lock() @dataclass diff --git a/homeassistant/components/tesla_fleet/strings.json b/homeassistant/components/tesla_fleet/strings.json index d4848836689..5b59d3efc5c 100644 --- a/homeassistant/components/tesla_fleet/strings.json +++ b/homeassistant/components/tesla_fleet/strings.json @@ -107,6 +107,24 @@ "name": "Tire pressure warning rear right" } }, + "climate": { + "climate_state_cabin_overheat_protection": { + "name": "Cabin overheat protection" + }, + "driver_temp": { + "name": "[%key:component::climate::title%]", + "state_attributes": { + "preset_mode": { + "state": { + "off": "Normal", + "keep": "Keep mode", + "dog": "Dog mode", + "camp": "Camp mode" + } + } + } + } + }, "device_tracker": { "location": { "name": "Location" @@ -272,7 +290,28 @@ }, "exceptions": { "update_failed": { - "message": "{endpoint} data request failed. {message}" + "message": "{endpoint} data request failed: {message}" + }, + "command_failed": { + "message": "Command failed: {message}" + }, + "command_error": { + "message": "Command returned an error: {error}" + }, + "command_reason": { + "message": "Command was unsuccessful: {reason}" + }, + "command_no_reason": { + "message": "Command was unsuccessful but did not return a reason why." + }, + "invalid_cop_temp": { + "message": "Cabin overheat protection does not support that temperature." + }, + "invalid_hvac_mode": { + "message": "Climate mode {hvac_mode} is not supported." + }, + "missing_temperature": { + "message": "Temperature is required for this action." } } } diff --git a/tests/components/tesla_fleet/conftest.py b/tests/components/tesla_fleet/conftest.py index 615c62fe16e..cc580212233 100644 --- a/tests/components/tesla_fleet/conftest.py +++ b/tests/components/tesla_fleet/conftest.py @@ -9,10 +9,18 @@ from unittest.mock import AsyncMock, patch import jwt import pytest +from tesla_fleet_api.const import Scope from homeassistant.components.tesla_fleet.const import DOMAIN, SCOPES -from .const import LIVE_STATUS, PRODUCTS, SITE_INFO, VEHICLE_DATA, VEHICLE_ONLINE +from .const import ( + COMMAND_OK, + LIVE_STATUS, + PRODUCTS, + SITE_INFO, + VEHICLE_DATA, + VEHICLE_ONLINE, +) from tests.common import MockConfigEntry @@ -25,16 +33,8 @@ def mock_expires_at() -> int: return time.time() + 3600 -@pytest.fixture(name="scopes") -def mock_scopes() -> list[str]: - """Fixture to set the scopes present in the OAuth token.""" - return SCOPES - - -@pytest.fixture -def normal_config_entry(expires_at: int, scopes: list[str]) -> MockConfigEntry: +def create_config_entry(expires_at: int, scopes: list[Scope]) -> MockConfigEntry: """Create Tesla Fleet entry in Home Assistant.""" - access_token = jwt.encode( { "sub": UID, @@ -64,6 +64,32 @@ def normal_config_entry(expires_at: int, scopes: list[str]) -> MockConfigEntry: ) +@pytest.fixture +def normal_config_entry(expires_at: int) -> MockConfigEntry: + """Create Tesla Fleet entry in Home Assistant.""" + return create_config_entry(expires_at, SCOPES) + + +@pytest.fixture +def noscope_config_entry(expires_at: int) -> MockConfigEntry: + """Create Tesla Fleet entry in Home Assistant without scopes.""" + return create_config_entry(expires_at, [Scope.OPENID, Scope.OFFLINE_ACCESS]) + + +@pytest.fixture +def readonly_config_entry(expires_at: int) -> MockConfigEntry: + """Create Tesla Fleet entry in Home Assistant without scopes.""" + return create_config_entry( + expires_at, + [ + Scope.OPENID, + Scope.OFFLINE_ACCESS, + Scope.VEHICLE_DEVICE_DATA, + Scope.ENERGY_DEVICE_DATA, + ], + ) + + @pytest.fixture(autouse=True) def mock_products() -> Generator[AsyncMock]: """Mock Tesla Fleet Api products method.""" @@ -131,3 +157,13 @@ def mock_find_server() -> Generator[AsyncMock]: "homeassistant.components.tesla_fleet.TeslaFleetApi.find_server", ) as mock_find_server: yield mock_find_server + + +@pytest.fixture +def mock_request(): + """Mock all Tesla Fleet API requests.""" + with patch( + "homeassistant.components.tesla_fleet.TeslaFleetApi._request", + return_value=COMMAND_OK, + ) as mock_request: + yield mock_request diff --git a/tests/components/tesla_fleet/snapshots/test_climate.ambr b/tests/components/tesla_fleet/snapshots/test_climate.ambr new file mode 100644 index 00000000000..696f8c37f08 --- /dev/null +++ b/tests/components/tesla_fleet/snapshots/test_climate.ambr @@ -0,0 +1,422 @@ +# serializer version: 1 +# name: test_climate[climate.test_cabin_overheat_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 40, + 'min_temp': 30, + 'target_temp_step': 5, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.test_cabin_overheat_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cabin overheat protection', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'climate_state_cabin_overheat_protection', + 'unique_id': 'LRWXF7EK4KC700000-climate_state_cabin_overheat_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_climate[climate.test_cabin_overheat_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 30, + 'friendly_name': 'Test Cabin overheat protection', + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 40, + 'min_temp': 30, + 'supported_features': , + 'target_temp_step': 5, + 'temperature': 40, + }), + 'context': , + 'entity_id': 'climate.test_cabin_overheat_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'cool', + }) +# --- +# name: test_climate[climate.test_climate-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 28.0, + 'min_temp': 15.0, + 'preset_modes': list([ + 'off', + 'keep', + 'dog', + 'camp', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.test_climate', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Climate', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': , + 'unique_id': 'LRWXF7EK4KC700000-driver_temp', + 'unit_of_measurement': None, + }) +# --- +# name: test_climate[climate.test_climate-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 30.0, + 'friendly_name': 'Test Climate', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 28.0, + 'min_temp': 15.0, + 'preset_mode': 'keep', + 'preset_modes': list([ + 'off', + 'keep', + 'dog', + 'camp', + ]), + 'supported_features': , + 'temperature': 22.0, + }), + 'context': , + 'entity_id': 'climate.test_climate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat_cool', + }) +# --- +# name: test_climate_alt[climate.test_cabin_overheat_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 40, + 'min_temp': 30, + 'target_temp_step': 5, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.test_cabin_overheat_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cabin overheat protection', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'climate_state_cabin_overheat_protection', + 'unique_id': 'LRWXF7EK4KC700000-climate_state_cabin_overheat_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_climate_alt[climate.test_cabin_overheat_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 30, + 'friendly_name': 'Test Cabin overheat protection', + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 40, + 'min_temp': 30, + 'supported_features': , + 'target_temp_step': 5, + }), + 'context': , + 'entity_id': 'climate.test_cabin_overheat_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_climate_alt[climate.test_climate-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 28.0, + 'min_temp': 15.0, + 'preset_modes': list([ + 'off', + 'keep', + 'dog', + 'camp', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.test_climate', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Climate', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': , + 'unique_id': 'LRWXF7EK4KC700000-driver_temp', + 'unit_of_measurement': None, + }) +# --- +# name: test_climate_alt[climate.test_climate-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 30.0, + 'friendly_name': 'Test Climate', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 28.0, + 'min_temp': 15.0, + 'preset_mode': 'off', + 'preset_modes': list([ + 'off', + 'keep', + 'dog', + 'camp', + ]), + 'supported_features': , + 'temperature': 22.0, + }), + 'context': , + 'entity_id': 'climate.test_climate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_climate_offline[climate.test_cabin_overheat_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 40, + 'min_temp': 30, + 'target_temp_step': 5, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.test_cabin_overheat_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cabin overheat protection', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'climate_state_cabin_overheat_protection', + 'unique_id': 'LRWXF7EK4KC700000-climate_state_cabin_overheat_protection', + 'unit_of_measurement': None, + }) +# --- +# name: test_climate_offline[climate.test_cabin_overheat_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': None, + 'friendly_name': 'Test Cabin overheat protection', + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 40, + 'min_temp': 30, + 'supported_features': , + 'target_temp_step': 5, + }), + 'context': , + 'entity_id': 'climate.test_cabin_overheat_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_climate_offline[climate.test_climate-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 28.0, + 'min_temp': 15.0, + 'preset_modes': list([ + 'off', + 'keep', + 'dog', + 'camp', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.test_climate', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Climate', + 'platform': 'tesla_fleet', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': , + 'unique_id': 'LRWXF7EK4KC700000-driver_temp', + 'unit_of_measurement': None, + }) +# --- +# name: test_climate_offline[climate.test_climate-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': None, + 'friendly_name': 'Test Climate', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 28.0, + 'min_temp': 15.0, + 'preset_mode': None, + 'preset_modes': list([ + 'off', + 'keep', + 'dog', + 'camp', + ]), + 'supported_features': , + 'temperature': None, + }), + 'context': , + 'entity_id': 'climate.test_climate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/tesla_fleet/test_climate.py b/tests/components/tesla_fleet/test_climate.py new file mode 100644 index 00000000000..902faaba922 --- /dev/null +++ b/tests/components/tesla_fleet/test_climate.py @@ -0,0 +1,450 @@ +"""Test the Tesla Fleet climate platform.""" + +from unittest.mock import AsyncMock, patch + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion +from tesla_fleet_api.exceptions import InvalidCommand, VehicleOffline + +from homeassistant.components.climate import ( + ATTR_HVAC_MODE, + ATTR_PRESET_MODE, + ATTR_TARGET_TEMP_HIGH, + ATTR_TARGET_TEMP_LOW, + ATTR_TEMPERATURE, + DOMAIN as CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + SERVICE_SET_PRESET_MODE, + SERVICE_SET_TEMPERATURE, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + HVACMode, +) +from homeassistant.components.tesla_fleet.coordinator import VEHICLE_INTERVAL +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import entity_registry as er + +from . import assert_entities, setup_platform +from .const import ( + COMMAND_ERRORS, + COMMAND_IGNORED_REASON, + VEHICLE_ASLEEP, + VEHICLE_DATA_ALT, + VEHICLE_ONLINE, +) + +from tests.common import MockConfigEntry, async_fire_time_changed + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_climate( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the climate entities are correct.""" + + await setup_platform(hass, normal_config_entry, [Platform.CLIMATE]) + assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) + + +async def test_climate_services( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + normal_config_entry: MockConfigEntry, + mock_request: AsyncMock, +) -> None: + """Tests that the climate services work.""" + + await setup_platform(hass, normal_config_entry, [Platform.CLIMATE]) + entity_id = "climate.test_climate" + + # Turn On and Set Temp + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + { + ATTR_ENTITY_ID: [entity_id], + ATTR_TEMPERATURE: 20, + ATTR_HVAC_MODE: HVACMode.HEAT_COOL, + }, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.attributes[ATTR_TEMPERATURE] == 20 + assert state.state == HVACMode.HEAT_COOL + + # Set Temp + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + { + ATTR_ENTITY_ID: [entity_id], + ATTR_TEMPERATURE: 21, + }, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.attributes[ATTR_TEMPERATURE] == 21 + + # Set Preset + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: [entity_id], ATTR_PRESET_MODE: "keep"}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == "keep" + + # Set Preset + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: [entity_id], ATTR_PRESET_MODE: "off"}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.attributes[ATTR_PRESET_MODE] == "off" + + # Turn Off + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: [entity_id], ATTR_HVAC_MODE: HVACMode.OFF}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.state == HVACMode.OFF + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_climate_overheat_protection_services( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + normal_config_entry: MockConfigEntry, + mock_request: AsyncMock, +) -> None: + """Tests that the climate overheat protection services work.""" + + await setup_platform(hass, normal_config_entry, [Platform.CLIMATE]) + entity_id = "climate.test_cabin_overheat_protection" + + # Turn On and Set Low + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + { + ATTR_ENTITY_ID: [entity_id], + ATTR_TEMPERATURE: 30, + ATTR_HVAC_MODE: HVACMode.FAN_ONLY, + }, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.attributes[ATTR_TEMPERATURE] == 30 + assert state.state == HVACMode.FAN_ONLY + + # Set Temp Medium + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + { + ATTR_ENTITY_ID: [entity_id], + ATTR_TEMPERATURE: 35, + }, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.attributes[ATTR_TEMPERATURE] == 35 + + # Set Temp High + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + { + ATTR_ENTITY_ID: [entity_id], + ATTR_TEMPERATURE: 40, + }, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.attributes[ATTR_TEMPERATURE] == 40 + + # Turn Off + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: [entity_id]}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.state == HVACMode.OFF + + # Turn On + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: [entity_id]}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.state == HVACMode.COOL + + # Call set temp with invalid temperature + with pytest.raises( + ServiceValidationError, + match="Cabin overheat protection does not support that temperature", + ): + # Invalid Temp + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: [entity_id], ATTR_TEMPERATURE: 34}, + blocking=True, + ) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_climate_alt( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_vehicle_data: AsyncMock, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the climate entity is correct.""" + + mock_vehicle_data.return_value = VEHICLE_DATA_ALT + await setup_platform(hass, normal_config_entry, [Platform.CLIMATE]) + assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_climate_offline( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_vehicle_data: AsyncMock, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests that the climate entity is correct.""" + + mock_vehicle_data.side_effect = VehicleOffline + await setup_platform(hass, normal_config_entry, [Platform.CLIMATE]) + assert_entities(hass, normal_config_entry.entry_id, entity_registry, snapshot) + + +async def test_invalid_error( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests service error is handled.""" + + await setup_platform(hass, normal_config_entry, platforms=[Platform.CLIMATE]) + entity_id = "climate.test_climate" + + with ( + patch( + "homeassistant.components.tesla_fleet.VehicleSpecific.auto_conditioning_start", + side_effect=InvalidCommand, + ) as mock_on, + pytest.raises( + HomeAssistantError, + match="Command failed: The data request or command is unknown.", + ), + ): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: [entity_id]}, + blocking=True, + ) + mock_on.assert_called_once() + + +@pytest.mark.parametrize("response", COMMAND_ERRORS) +async def test_errors( + hass: HomeAssistant, response: str, normal_config_entry: MockConfigEntry +) -> None: + """Tests service reason is handled.""" + + await setup_platform(hass, normal_config_entry, [Platform.CLIMATE]) + entity_id = "climate.test_climate" + + with ( + patch( + "homeassistant.components.tesla_fleet.VehicleSpecific.auto_conditioning_start", + return_value=response, + ) as mock_on, + pytest.raises(HomeAssistantError), + ): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: [entity_id]}, + blocking=True, + ) + mock_on.assert_called_once() + + +async def test_ignored_error( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, +) -> None: + """Tests ignored error is handled.""" + + await setup_platform(hass, normal_config_entry, [Platform.CLIMATE]) + entity_id = "climate.test_climate" + with patch( + "homeassistant.components.tesla_fleet.VehicleSpecific.auto_conditioning_start", + return_value=COMMAND_IGNORED_REASON, + ) as mock_on: + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: [entity_id]}, + blocking=True, + ) + mock_on.assert_called_once() + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_asleep_or_offline( + hass: HomeAssistant, + mock_vehicle_data: AsyncMock, + mock_wake_up: AsyncMock, + mock_vehicle_state: AsyncMock, + freezer: FrozenDateTimeFactory, + normal_config_entry: MockConfigEntry, + mock_request: AsyncMock, +) -> None: + """Tests asleep is handled.""" + + await setup_platform(hass, normal_config_entry, [Platform.CLIMATE]) + entity_id = "climate.test_climate" + mock_vehicle_data.assert_called_once() + + # Put the vehicle alseep + mock_vehicle_data.reset_mock() + mock_vehicle_data.side_effect = VehicleOffline + freezer.tick(VEHICLE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + mock_vehicle_data.assert_called_once() + mock_wake_up.reset_mock() + + # Run a command but fail trying to wake up the vehicle + mock_wake_up.side_effect = InvalidCommand + with pytest.raises( + HomeAssistantError, match="The data request or command is unknown." + ): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: [entity_id]}, + blocking=True, + ) + mock_wake_up.assert_called_once() + + mock_wake_up.side_effect = None + mock_wake_up.reset_mock() + + # Run a command but timeout trying to wake up the vehicle + mock_wake_up.return_value = VEHICLE_ASLEEP + mock_vehicle_state.return_value = VEHICLE_ASLEEP + with ( + patch("homeassistant.components.tesla_fleet.helpers.asyncio.sleep"), + pytest.raises(HomeAssistantError, match="Could not wake up vehicle"), + ): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: [entity_id]}, + blocking=True, + ) + mock_wake_up.assert_called_once() + mock_vehicle_state.assert_called() + + mock_wake_up.reset_mock() + mock_vehicle_state.reset_mock() + mock_wake_up.return_value = VEHICLE_ONLINE + mock_vehicle_state.return_value = VEHICLE_ONLINE + + # Run a command and wake up the vehicle immediately + await hass.services.async_call( + CLIMATE_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: [entity_id]}, blocking=True + ) + await hass.async_block_till_done() + mock_wake_up.assert_called_once() + + +async def test_climate_noscope( + hass: HomeAssistant, + readonly_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Tests with no command scopes.""" + await setup_platform(hass, readonly_config_entry, [Platform.CLIMATE]) + entity_id = "climate.test_climate" + + with pytest.raises( + ServiceValidationError, match="Climate mode off is not supported" + ): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: [entity_id], ATTR_HVAC_MODE: HVACMode.OFF}, + blocking=True, + ) + + with pytest.raises( + HomeAssistantError, + match="Entity climate.test_climate does not support this service.", + ): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: [entity_id], ATTR_TEMPERATURE: 20}, + blocking=True, + ) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize( + ("entity_id", "high", "low"), + [ + ("climate.test_climate", 16, 28), + ("climate.test_cabin_overheat_protection", 30, 40), + ], +) +async def test_climate_notemp( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + entity_id: str, + high: int, + low: int, +) -> None: + """Tests that set temp fails without a temp attribute.""" + + await setup_platform(hass, normal_config_entry, [Platform.CLIMATE]) + + with pytest.raises( + ServiceValidationError, match="Temperature is required for this action" + ): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + { + ATTR_ENTITY_ID: [entity_id], + ATTR_TARGET_TEMP_HIGH: high, + ATTR_TARGET_TEMP_LOW: low, + }, + blocking=True, + ) From 6cea6be4a7c64d7257f8ff050925ea65f81981a6 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Tue, 3 Sep 2024 14:59:01 +0200 Subject: [PATCH 1368/1635] Improve hassfest docker image (#125133) * Improve hassfest docker image * Use fixed uv version * Use cli params instead env * run hassfest * Exclude pycache --- script/hassfest/docker.py | 13 +++++-------- script/hassfest/docker/Dockerfile | 13 +++++-------- script/hassfest/docker/Dockerfile.dockerignore | 3 +++ 3 files changed, 13 insertions(+), 16 deletions(-) diff --git a/script/hassfest/docker.py b/script/hassfest/docker.py index 6e39a5c350b..bce77e1ece0 100644 --- a/script/hassfest/docker.py +++ b/script/hassfest/docker.py @@ -69,7 +69,7 @@ WORKDIR /config _HASSFEST_TEMPLATE = r"""# Automatically generated by hassfest. # # To update, run python3 -m script.hassfest -p docker -FROM python:alpine3.20 +FROM python:alpine ENV \ UV_SYSTEM_PYTHON=true \ @@ -79,20 +79,17 @@ SHELL ["/bin/sh", "-o", "pipefail", "-c"] ENTRYPOINT ["/usr/src/homeassistant/script/hassfest/docker/entrypoint.sh"] WORKDIR "/github/workspace" -# Install uv -COPY --from=ghcr.io/astral-sh/uv:{uv} /uv /bin/uv - COPY . /usr/src/homeassistant -RUN \ +# Uv is only needed during build +RUN --mount=from=ghcr.io/astral-sh/uv:{uv},source=/uv,target=/bin/uv \ # Required for PyTurboJPEG apk add --no-cache libturbojpeg \ - && cd /usr/src/homeassistant \ && uv pip install \ --no-build \ --no-cache \ - -c homeassistant/package_constraints.txt \ - -r requirements.txt \ + -c /usr/src/homeassistant/homeassistant/package_constraints.txt \ + -r /usr/src/homeassistant/requirements.txt \ stdlib-list==0.10.0 pipdeptree=={pipdeptree} tqdm=={tqdm} ruff=={ruff} \ {required_components_packages} diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile index 4fc60c0c621..0d99b04c44c 100644 --- a/script/hassfest/docker/Dockerfile +++ b/script/hassfest/docker/Dockerfile @@ -1,7 +1,7 @@ # Automatically generated by hassfest. # # To update, run python3 -m script.hassfest -p docker -FROM python:alpine3.20 +FROM python:alpine ENV \ UV_SYSTEM_PYTHON=true \ @@ -11,20 +11,17 @@ SHELL ["/bin/sh", "-o", "pipefail", "-c"] ENTRYPOINT ["/usr/src/homeassistant/script/hassfest/docker/entrypoint.sh"] WORKDIR "/github/workspace" -# Install uv -COPY --from=ghcr.io/astral-sh/uv:0.2.27 /uv /bin/uv - COPY . /usr/src/homeassistant -RUN \ +# Uv is only needed during build +RUN --mount=from=ghcr.io/astral-sh/uv:0.2.27,source=/uv,target=/bin/uv \ # Required for PyTurboJPEG apk add --no-cache libturbojpeg \ - && cd /usr/src/homeassistant \ && uv pip install \ --no-build \ --no-cache \ - -c homeassistant/package_constraints.txt \ - -r requirements.txt \ + -c /usr/src/homeassistant/homeassistant/package_constraints.txt \ + -r /usr/src/homeassistant/requirements.txt \ stdlib-list==0.10.0 pipdeptree==2.23.1 tqdm==4.66.4 ruff==0.6.2 \ PyTurboJPEG==1.7.5 ha-ffmpeg==3.2.0 hassil==1.7.4 home-assistant-intents==2024.8.29 mutagen==1.47.0 diff --git a/script/hassfest/docker/Dockerfile.dockerignore b/script/hassfest/docker/Dockerfile.dockerignore index 75ed4f0e5d3..c109421fce1 100644 --- a/script/hassfest/docker/Dockerfile.dockerignore +++ b/script/hassfest/docker/Dockerfile.dockerignore @@ -6,3 +6,6 @@ !script/ script/hassfest/docker/ !script/hassfest/docker/entrypoint.sh + +# Temporary files +**/__pycache__ \ No newline at end of file From eda1656e757ce9d3aa92545fe1d275dc97859e81 Mon Sep 17 00:00:00 2001 From: "Steven B." <51370195+sdb9696@users.noreply.github.com> Date: Tue, 3 Sep 2024 14:22:38 +0100 Subject: [PATCH 1369/1635] Abort ring config_flow if account is already configured (#125120) * Abort ring config_flow if account is already configured * Update tests/components/ring/test_config_flow.py --------- Co-authored-by: Joost Lekkerkerker --- homeassistant/components/ring/config_flow.py | 3 ++- homeassistant/components/ring/strings.json | 2 +- tests/components/ring/test_config_flow.py | 22 ++++++++++++++++++++ 3 files changed, 25 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/ring/config_flow.py b/homeassistant/components/ring/config_flow.py index b82b4f22223..74546567270 100644 --- a/homeassistant/components/ring/config_flow.py +++ b/homeassistant/components/ring/config_flow.py @@ -65,6 +65,8 @@ class RingConfigFlow(ConfigFlow, domain=DOMAIN): """Handle the initial step.""" errors: dict[str, str] = {} if user_input is not None: + await self.async_set_unique_id(user_input[CONF_USERNAME]) + self._abort_if_unique_id_configured() try: token = await validate_input(self.hass, user_input) except Require2FA: @@ -77,7 +79,6 @@ class RingConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected exception") errors["base"] = "unknown" else: - await self.async_set_unique_id(user_input[CONF_USERNAME]) return self.async_create_entry( title=user_input[CONF_USERNAME], data={CONF_USERNAME: user_input[CONF_USERNAME], CONF_TOKEN: token}, diff --git a/homeassistant/components/ring/strings.json b/homeassistant/components/ring/strings.json index ed0319b7a4b..6bd7d194136 100644 --- a/homeassistant/components/ring/strings.json +++ b/homeassistant/components/ring/strings.json @@ -27,7 +27,7 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, diff --git a/tests/components/ring/test_config_flow.py b/tests/components/ring/test_config_flow.py index bbaec2e37c4..d27c4878aea 100644 --- a/tests/components/ring/test_config_flow.py +++ b/tests/components/ring/test_config_flow.py @@ -220,3 +220,25 @@ async def test_reauth_error( "token": "new-foobar", } assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_account_configured( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_added_config_entry: Mock, +) -> None: + """Test that user cannot configure the same account twice.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"username": "foo@bar.com", "password": "test-password"}, + ) + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "already_configured" From 334359bb0a118a917740682c0104a29d06187ec0 Mon Sep 17 00:00:00 2001 From: tronikos Date: Tue, 3 Sep 2024 06:23:07 -0700 Subject: [PATCH 1370/1635] Add Google Cloud Speech-to-Text (STT) (#120854) * Google Cloud * . * fix * mypy * add tests * Update .coveragerc * Update const.py * upload file, reconfigure and import flow * fixes * default to latest_short * mypy * update * Allow clearing options in the UI * update * update * update --- .../components/google_cloud/__init__.py | 2 +- .../components/google_cloud/config_flow.py | 20 ++- .../components/google_cloud/const.py | 164 ++++++++++++++++++ .../components/google_cloud/manifest.json | 5 +- .../components/google_cloud/strings.json | 3 +- homeassistant/components/google_cloud/stt.py | 147 ++++++++++++++++ requirements_all.txt | 3 + requirements_test_all.txt | 3 + .../google_cloud/test_config_flow.py | 2 + 9 files changed, 345 insertions(+), 4 deletions(-) create mode 100644 homeassistant/components/google_cloud/stt.py diff --git a/homeassistant/components/google_cloud/__init__.py b/homeassistant/components/google_cloud/__init__.py index 84848543790..9d1923fd87d 100644 --- a/homeassistant/components/google_cloud/__init__.py +++ b/homeassistant/components/google_cloud/__init__.py @@ -6,7 +6,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant -PLATFORMS = [Platform.TTS] +PLATFORMS = [Platform.STT, Platform.TTS] async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: diff --git a/homeassistant/components/google_cloud/config_flow.py b/homeassistant/components/google_cloud/config_flow.py index bf97de67eb1..dec849de4e6 100644 --- a/homeassistant/components/google_cloud/config_flow.py +++ b/homeassistant/components/google_cloud/config_flow.py @@ -26,7 +26,16 @@ from homeassistant.helpers.selector import ( SelectSelectorMode, ) -from .const import CONF_KEY_FILE, CONF_SERVICE_ACCOUNT_INFO, DEFAULT_LANG, DOMAIN, TITLE +from .const import ( + CONF_KEY_FILE, + CONF_SERVICE_ACCOUNT_INFO, + CONF_STT_MODEL, + DEFAULT_LANG, + DEFAULT_STT_MODEL, + DOMAIN, + SUPPORTED_STT_MODELS, + TITLE, +) from .helpers import ( async_tts_voices, tts_options_schema, @@ -162,6 +171,15 @@ class GoogleCloudOptionsFlowHandler(OptionsFlowWithConfigEntry): **tts_options_schema( self.options, voices, from_config_flow=True ).schema, + vol.Optional( + CONF_STT_MODEL, + default=DEFAULT_STT_MODEL, + ): SelectSelector( + SelectSelectorConfig( + mode=SelectSelectorMode.DROPDOWN, + options=SUPPORTED_STT_MODELS, + ) + ), } ), self.options, diff --git a/homeassistant/components/google_cloud/const.py b/homeassistant/components/google_cloud/const.py index 6a718bf35d3..f416d36483a 100644 --- a/homeassistant/components/google_cloud/const.py +++ b/homeassistant/components/google_cloud/const.py @@ -10,6 +10,7 @@ CONF_KEY_FILE = "key_file" DEFAULT_LANG = "en-US" +# TTS constants CONF_GENDER = "gender" CONF_VOICE = "voice" CONF_ENCODING = "encoding" @@ -18,3 +19,166 @@ CONF_PITCH = "pitch" CONF_GAIN = "gain" CONF_PROFILES = "profiles" CONF_TEXT_TYPE = "text_type" + +# STT constants +CONF_STT_MODEL = "stt_model" + +DEFAULT_STT_MODEL = "latest_short" + +# https://cloud.google.com/speech-to-text/docs/transcription-model +SUPPORTED_STT_MODELS = [ + "latest_long", + "latest_short", + "telephony", + "telephony_short", + "medical_dictation", + "medical_conversation", + "command_and_search", + "default", + "phone_call", + "video", +] + +# https://cloud.google.com/speech-to-text/docs/speech-to-text-supported-languages +STT_LANGUAGES = [ + "af-ZA", + "am-ET", + "ar-AE", + "ar-BH", + "ar-DZ", + "ar-EG", + "ar-IL", + "ar-IQ", + "ar-JO", + "ar-KW", + "ar-LB", + "ar-MA", + "ar-MR", + "ar-OM", + "ar-PS", + "ar-QA", + "ar-SA", + "ar-SY", + "ar-TN", + "ar-YE", + "az-AZ", + "bg-BG", + "bn-BD", + "bn-IN", + "bs-BA", + "ca-ES", + "cmn-Hans-CN", + "cmn-Hans-HK", + "cmn-Hant-TW", + "cs-CZ", + "da-DK", + "de-AT", + "de-CH", + "de-DE", + "el-GR", + "en-AU", + "en-CA", + "en-GB", + "en-GH", + "en-HK", + "en-IE", + "en-IN", + "en-KE", + "en-NG", + "en-NZ", + "en-PH", + "en-PK", + "en-SG", + "en-TZ", + "en-US", + "en-ZA", + "es-AR", + "es-BO", + "es-CL", + "es-CO", + "es-CR", + "es-DO", + "es-EC", + "es-ES", + "es-GT", + "es-HN", + "es-MX", + "es-NI", + "es-PA", + "es-PE", + "es-PR", + "es-PY", + "es-SV", + "es-US", + "es-UY", + "es-VE", + "et-EE", + "eu-ES", + "fa-IR", + "fi-FI", + "fil-PH", + "fr-BE", + "fr-CA", + "fr-CH", + "fr-FR", + "gl-ES", + "gu-IN", + "hi-IN", + "hr-HR", + "hu-HU", + "hy-AM", + "id-ID", + "is-IS", + "it-CH", + "it-IT", + "iw-IL", + "ja-JP", + "jv-ID", + "ka-GE", + "kk-KZ", + "km-KH", + "kn-IN", + "ko-KR", + "lo-LA", + "lt-LT", + "lv-LV", + "mk-MK", + "ml-IN", + "mn-MN", + "mr-IN", + "ms-MY", + "my-MM", + "ne-NP", + "nl-BE", + "nl-NL", + "no-NO", + "pa-Guru-IN", + "pl-PL", + "pt-BR", + "pt-PT", + "ro-RO", + "ru-RU", + "si-LK", + "sk-SK", + "sl-SI", + "sq-AL", + "sr-RS", + "su-ID", + "sv-SE", + "sw-KE", + "sw-TZ", + "ta-IN", + "ta-LK", + "ta-MY", + "ta-SG", + "te-IN", + "th-TH", + "tr-TR", + "uk-UA", + "ur-IN", + "ur-PK", + "uz-UZ", + "vi-VN", + "yue-Hant-HK", + "zu-ZA", +] diff --git a/homeassistant/components/google_cloud/manifest.json b/homeassistant/components/google_cloud/manifest.json index d0dda80a870..3e08b6254db 100644 --- a/homeassistant/components/google_cloud/manifest.json +++ b/homeassistant/components/google_cloud/manifest.json @@ -7,5 +7,8 @@ "documentation": "https://www.home-assistant.io/integrations/google_cloud", "integration_type": "service", "iot_class": "cloud_push", - "requirements": ["google-cloud-texttospeech==2.17.2"] + "requirements": [ + "google-cloud-texttospeech==2.17.2", + "google-cloud-speech==2.27.0" + ] } diff --git a/homeassistant/components/google_cloud/strings.json b/homeassistant/components/google_cloud/strings.json index 0a0804005de..3bf9d8c8489 100644 --- a/homeassistant/components/google_cloud/strings.json +++ b/homeassistant/components/google_cloud/strings.json @@ -24,7 +24,8 @@ "pitch": "Default pitch of the voice", "gain": "Default volume gain (in dB) of the voice", "profiles": "Default audio profiles", - "text_type": "Default text type" + "text_type": "Default text type", + "stt_model": "STT model" } } } diff --git a/homeassistant/components/google_cloud/stt.py b/homeassistant/components/google_cloud/stt.py new file mode 100644 index 00000000000..13715ae29f8 --- /dev/null +++ b/homeassistant/components/google_cloud/stt.py @@ -0,0 +1,147 @@ +"""Support for the Google Cloud STT service.""" + +from __future__ import annotations + +from collections.abc import AsyncGenerator, AsyncIterable +import logging + +from google.api_core.exceptions import GoogleAPIError, Unauthenticated +from google.cloud import speech_v1 + +from homeassistant.components.stt import ( + AudioBitRates, + AudioChannels, + AudioCodecs, + AudioFormats, + AudioSampleRates, + SpeechMetadata, + SpeechResult, + SpeechResultState, + SpeechToTextEntity, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import ( + CONF_SERVICE_ACCOUNT_INFO, + CONF_STT_MODEL, + DEFAULT_STT_MODEL, + DOMAIN, + STT_LANGUAGES, +) + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Google Cloud speech platform via config entry.""" + service_account_info = config_entry.data[CONF_SERVICE_ACCOUNT_INFO] + client = speech_v1.SpeechAsyncClient.from_service_account_info(service_account_info) + async_add_entities([GoogleCloudSpeechToTextEntity(config_entry, client)]) + + +class GoogleCloudSpeechToTextEntity(SpeechToTextEntity): + """Google Cloud STT entity.""" + + def __init__( + self, + entry: ConfigEntry, + client: speech_v1.SpeechAsyncClient, + ) -> None: + """Init Google Cloud STT entity.""" + self._attr_unique_id = f"{entry.entry_id}-stt" + self._attr_name = entry.title + self._attr_device_info = dr.DeviceInfo( + identifiers={(DOMAIN, entry.entry_id)}, + manufacturer="Google", + model="Cloud", + entry_type=dr.DeviceEntryType.SERVICE, + ) + self._entry = entry + self._client = client + self._model = entry.options.get(CONF_STT_MODEL, DEFAULT_STT_MODEL) + + @property + def supported_languages(self) -> list[str]: + """Return a list of supported languages.""" + return STT_LANGUAGES + + @property + def supported_formats(self) -> list[AudioFormats]: + """Return a list of supported formats.""" + return [AudioFormats.WAV, AudioFormats.OGG] + + @property + def supported_codecs(self) -> list[AudioCodecs]: + """Return a list of supported codecs.""" + return [AudioCodecs.PCM, AudioCodecs.OPUS] + + @property + def supported_bit_rates(self) -> list[AudioBitRates]: + """Return a list of supported bitrates.""" + return [AudioBitRates.BITRATE_16] + + @property + def supported_sample_rates(self) -> list[AudioSampleRates]: + """Return a list of supported samplerates.""" + return [AudioSampleRates.SAMPLERATE_16000] + + @property + def supported_channels(self) -> list[AudioChannels]: + """Return a list of supported channels.""" + return [AudioChannels.CHANNEL_MONO] + + async def async_process_audio_stream( + self, metadata: SpeechMetadata, stream: AsyncIterable[bytes] + ) -> SpeechResult: + """Process an audio stream to STT service.""" + streaming_config = speech_v1.StreamingRecognitionConfig( + config=speech_v1.RecognitionConfig( + encoding=( + speech_v1.RecognitionConfig.AudioEncoding.OGG_OPUS + if metadata.codec == AudioCodecs.OPUS + else speech_v1.RecognitionConfig.AudioEncoding.LINEAR16 + ), + sample_rate_hertz=metadata.sample_rate, + language_code=metadata.language, + model=self._model, + ) + ) + + async def request_generator() -> ( + AsyncGenerator[speech_v1.StreamingRecognizeRequest] + ): + # The first request must only contain a streaming_config + yield speech_v1.StreamingRecognizeRequest(streaming_config=streaming_config) + # All subsequent requests must only contain audio_content + async for audio_content in stream: + yield speech_v1.StreamingRecognizeRequest(audio_content=audio_content) + + try: + responses = await self._client.streaming_recognize( + requests=request_generator(), + timeout=10, + ) + + transcript = "" + async for response in responses: + _LOGGER.debug("response: %s", response) + if not response.results: + continue + result = response.results[0] + if not result.alternatives: + continue + transcript += response.results[0].alternatives[0].transcript + except GoogleAPIError as err: + _LOGGER.error("Error occurred during Google Cloud STT call: %s", err) + if isinstance(err, Unauthenticated): + self._entry.async_start_reauth(self.hass) + return SpeechResult(None, SpeechResultState.ERROR) + + return SpeechResult(transcript, SpeechResultState.SUCCESS) diff --git a/requirements_all.txt b/requirements_all.txt index e14ef8af35a..ebc621486c9 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -985,6 +985,9 @@ google-api-python-client==2.71.0 # homeassistant.components.google_pubsub google-cloud-pubsub==2.23.0 +# homeassistant.components.google_cloud +google-cloud-speech==2.27.0 + # homeassistant.components.google_cloud google-cloud-texttospeech==2.17.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index b28d39da203..d151c8e6bc2 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -835,6 +835,9 @@ google-api-python-client==2.71.0 # homeassistant.components.google_pubsub google-cloud-pubsub==2.23.0 +# homeassistant.components.google_cloud +google-cloud-speech==2.27.0 + # homeassistant.components.google_cloud google-cloud-texttospeech==2.17.2 diff --git a/tests/components/google_cloud/test_config_flow.py b/tests/components/google_cloud/test_config_flow.py index a5a51052e66..e4b4631f223 100644 --- a/tests/components/google_cloud/test_config_flow.py +++ b/tests/components/google_cloud/test_config_flow.py @@ -161,6 +161,7 @@ async def test_options_flow( "gain", "profiles", "text_type", + "stt_model", } assert mock_api_tts_from_service_account_info.list_voices.call_count == 2 @@ -179,5 +180,6 @@ async def test_options_flow( "gain": 0.0, "profiles": [], "text_type": "text", + "stt_model": "latest_short", } assert mock_api_tts_from_service_account_info.list_voices.call_count == 3 From fd01e22ca4c7c132ae2137cb055623f3efe31251 Mon Sep 17 00:00:00 2001 From: Marcel van der Veldt Date: Tue, 3 Sep 2024 15:24:49 +0200 Subject: [PATCH 1371/1635] Fix energy sensor for ThirdReality Matter powerplug (#125140) --- homeassistant/components/matter/sensor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/matter/sensor.py b/homeassistant/components/matter/sensor.py index c3ab18072f0..5d4ad900d8e 100644 --- a/homeassistant/components/matter/sensor.py +++ b/homeassistant/components/matter/sensor.py @@ -384,7 +384,7 @@ DISCOVERY_SCHEMAS = [ key="ThirdRealityEnergySensorWattAccumulated", device_class=SensorDeviceClass.ENERGY, entity_category=EntityCategory.DIAGNOSTIC, - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, suggested_display_precision=3, state_class=SensorStateClass.TOTAL_INCREASING, measurement_to_ha=lambda x: x / 1000, From cf10549df4b81493636e64c9b2000a7ef65ee140 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 3 Sep 2024 15:25:35 +0200 Subject: [PATCH 1372/1635] Restore unnecessary assignment of Template.hass in event helper (#125143) --- homeassistant/helpers/event.py | 16 ++++++++++++++ tests/helpers/test_event.py | 40 ++++++++++++++++++++++++++++++++++ 2 files changed, 56 insertions(+) diff --git a/homeassistant/helpers/event.py b/homeassistant/helpers/event.py index 38f461d8d7a..97a85fdde89 100644 --- a/homeassistant/helpers/event.py +++ b/homeassistant/helpers/event.py @@ -981,6 +981,22 @@ class TrackTemplateResultInfo: self._last_result: dict[Template, bool | str | TemplateError] = {} + for track_template_ in track_templates: + if track_template_.template.hass: + continue + + # pylint: disable-next=import-outside-toplevel + from .frame import report + + report( + ( + "calls async_track_template_result with template without hass, " + "which will stop working in HA Core 2025.10" + ), + error_if_core=False, + ) + track_template_.template.hass = hass + self._rate_limit = KeyedRateLimit(hass) self._info: dict[Template, RenderInfo] = {} self._track_state_changes: _TrackStateChangeFiltered | None = None diff --git a/tests/helpers/test_event.py b/tests/helpers/test_event.py index 6c71f1d8a7c..19f1ef5bb76 100644 --- a/tests/helpers/test_event.py +++ b/tests/helpers/test_event.py @@ -4938,3 +4938,43 @@ async def test_async_track_state_report_event(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert len(tracker_called) == 2 unsub() + + +async def test_async_track_template_no_hass_deprecated( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test async_track_template with a template without hass is deprecated.""" + message = ( + "Detected code that calls async_track_template_result with template without " + "hass, which will stop working in HA Core 2025.10. Please report this issue." + ) + + async_track_template(hass, Template("blah"), lambda x, y, z: None) + assert message in caplog.text + caplog.clear() + + async_track_template(hass, Template("blah", hass), lambda x, y, z: None) + assert message not in caplog.text + caplog.clear() + + +async def test_async_track_template_result_no_hass_deprecated( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test async_track_template_result with a template without hass is deprecated.""" + message = ( + "Detected code that calls async_track_template_result with template without " + "hass, which will stop working in HA Core 2025.10. Please report this issue." + ) + + async_track_template_result( + hass, [TrackTemplate(Template("blah"), None)], lambda x, y, z: None + ) + assert message in caplog.text + caplog.clear() + + async_track_template_result( + hass, [TrackTemplate(Template("blah", hass), None)], lambda x, y, z: None + ) + assert message not in caplog.text + caplog.clear() From fdce5248111fa089912662e1cf1e758e9b4fa308 Mon Sep 17 00:00:00 2001 From: Artur Pragacz <49985303+arturpragacz@users.noreply.github.com> Date: Tue, 3 Sep 2024 15:27:33 +0200 Subject: [PATCH 1373/1635] Add Onkyo Receiver class to improve typing (#124190) --- .../components/onkyo/media_player.py | 42 ++++++++----------- homeassistant/components/onkyo/receiver.py | 28 +++++++++++++ 2 files changed, 46 insertions(+), 24 deletions(-) create mode 100644 homeassistant/components/onkyo/receiver.py diff --git a/homeassistant/components/onkyo/media_player.py b/homeassistant/components/onkyo/media_player.py index 8d8f4d3bfd5..df1f25a196b 100644 --- a/homeassistant/components/onkyo/media_player.py +++ b/homeassistant/components/onkyo/media_player.py @@ -3,7 +3,6 @@ from __future__ import annotations import asyncio -from dataclasses import dataclass import logging from typing import Any @@ -30,6 +29,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.util.hass_dict import HassKey +from .receiver import Receiver, ReceiverInfo + _LOGGER = logging.getLogger(__name__) DOMAIN = "onkyo" @@ -143,16 +144,6 @@ ONKYO_SELECT_OUTPUT_SCHEMA = vol.Schema( SERVICE_SELECT_HDMI_OUTPUT = "onkyo_select_hdmi_output" -@dataclass -class ReceiverInfo: - """Onkyo Receiver information.""" - - host: str - port: int - model_name: str - identifier: str - - async def async_register_services(hass: HomeAssistant) -> None: """Register Onkyo services.""" @@ -189,7 +180,7 @@ async def async_setup_platform( """Set up the Onkyo platform.""" await async_register_services(hass) - receivers: dict[str, pyeiscp.Connection] = {} # indexed by host + receivers: dict[str, Receiver] = {} # indexed by host all_entities = hass.data.setdefault(DATA_MP_ENTITIES, []) host = config.get(CONF_HOST) @@ -234,31 +225,34 @@ async def async_setup_platform( """Receiver (re)connected.""" receiver = receivers[origin] _LOGGER.debug( - "Receiver (re)connected: %s (%s)", receiver.name, receiver.host + "Receiver (re)connected: %s (%s)", receiver.name, receiver.conn.host ) for entity in entities.values(): entity.backfill_state() _LOGGER.debug("Creating receiver: %s (%s)", info.model_name, info.host) - receiver = await pyeiscp.Connection.create( + connection = await pyeiscp.Connection.create( host=info.host, port=info.port, update_callback=async_onkyo_update_callback, connect_callback=async_onkyo_connect_callback, ) - receiver.model_name = info.model_name - receiver.identifier = info.identifier - receiver.name = name or info.model_name - receiver.discovered = discovered + receiver = Receiver( + conn=connection, + model_name=info.model_name, + identifier=info.identifier, + name=name or info.model_name, + discovered=discovered, + ) - receivers[receiver.host] = receiver + receivers[connection.host] = receiver # Discover what zones are available for the receiver by querying the power. # If we get a response for the specific zone, it means it is available. for zone in ZONES: - receiver.query_property(zone, "power") + receiver.conn.query_property(zone, "power") # Add the main zone to entities, since it is always active. _LOGGER.debug("Adding Main Zone on %s", receiver.name) @@ -306,7 +300,7 @@ async def async_setup_platform( @callback def close_receiver(_event): for receiver in receivers.values(): - receiver.close() + receiver.conn.close() hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, close_receiver) @@ -323,7 +317,7 @@ class OnkyoMediaPlayer(MediaPlayerEntity): def __init__( self, - receiver: pyeiscp.Connection, + receiver: Receiver, sources: dict[str, str], zone: str, max_volume: int, @@ -369,12 +363,12 @@ class OnkyoMediaPlayer(MediaPlayerEntity): @callback def _update_receiver(self, propname: str, value: Any) -> None: """Update a property in the receiver.""" - self._receiver.update_property(self._zone, propname, value) + self._receiver.conn.update_property(self._zone, propname, value) @callback def _query_receiver(self, propname: str) -> None: """Cause the receiver to send an update about a property.""" - self._receiver.query_property(self._zone, propname) + self._receiver.conn.query_property(self._zone, propname) async def async_turn_on(self) -> None: """Turn the media player on.""" diff --git a/homeassistant/components/onkyo/receiver.py b/homeassistant/components/onkyo/receiver.py new file mode 100644 index 00000000000..eb20f327b69 --- /dev/null +++ b/homeassistant/components/onkyo/receiver.py @@ -0,0 +1,28 @@ +"""Onkyo receiver.""" + +from __future__ import annotations + +from dataclasses import dataclass + +import pyeiscp + + +@dataclass +class Receiver: + """Onkyo receiver.""" + + conn: pyeiscp.Connection + model_name: str + identifier: str + name: str + discovered: bool + + +@dataclass +class ReceiverInfo: + """Onkyo receiver information.""" + + host: str + port: int + model_name: str + identifier: str From 491bde181c9b5c33cfe712edc6da24a6297d1109 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 3 Sep 2024 03:29:02 -1000 Subject: [PATCH 1374/1635] Speed up hassio send_command url check (#125122) * Speed up hassio send_command url check The send_command call checked the resulting path to make sure that the input path was not modified when converting to a URL. Since the host is is pre-set, we only need to check the processed raw_path matches command instead of converting back to a string, and than comparing it against another constructed string. * Speed up hassio send_command url check The send_command call checked the resulting path to make sure that the input path was not modified when converting to a URL. Since the host is is pre-set, we only need to check the processed raw_path matches command instead of converting back to a string, and than comparing it against another constructed string. * adjust --- homeassistant/components/hassio/handler.py | 3 +-- tests/components/hassio/test_handler.py | 7 +++++++ 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/hassio/handler.py b/homeassistant/components/hassio/handler.py index 305b9d4961b..c57e43f73f3 100644 --- a/homeassistant/components/hassio/handler.py +++ b/homeassistant/components/hassio/handler.py @@ -568,14 +568,13 @@ class HassIO: This method is a coroutine. """ - url = f"http://{self._ip}{command}" joined_url = self._base_url.join(URL(command)) # This check is to make sure the normalized URL string # is the same as the URL string that was passed in. If # they are different, then the passed in command URL # contained characters that were removed by the normalization # such as ../../../../etc/passwd - if url != str(joined_url): + if joined_url.raw_path != command: _LOGGER.error("Invalid request %s", command) raise HassioAPIError diff --git a/tests/components/hassio/test_handler.py b/tests/components/hassio/test_handler.py index c5fa6ff8254..949f96ece38 100644 --- a/tests/components/hassio/test_handler.py +++ b/tests/components/hassio/test_handler.py @@ -468,4 +468,11 @@ async def test_send_command_invalid_command(hass: HomeAssistant) -> None: """Test send command fails when command is invalid.""" hassio: HassIO = hass.data["hassio"] with pytest.raises(HassioAPIError): + # absolute path await hassio.send_command("/test/../bad") + with pytest.raises(HassioAPIError): + # relative path + await hassio.send_command("test/../bad") + with pytest.raises(HassioAPIError): + # relative path with percent encoding + await hassio.send_command("test/%2E%2E/bad") From d6bd4312ab0028e639431bc2a3da29651ca5f1c9 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 3 Sep 2024 15:34:31 +0200 Subject: [PATCH 1375/1635] Add explaining comments in cv.template tests (#125081) --- tests/helpers/test_config_validation.py | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/tests/helpers/test_config_validation.py b/tests/helpers/test_config_validation.py index 57c712e2f10..1608a856de8 100644 --- a/tests/helpers/test_config_validation.py +++ b/tests/helpers/test_config_validation.py @@ -671,10 +671,12 @@ def test_template(hass: HomeAssistant) -> None: "Hello", "{{ beer }}", "{% if 1 == 1 %}Hello{% else %}World{% endif %}", - # Function added as an extension by Home Assistant + # Function 'expand' added as an extension by Home Assistant "{{ expand('group.foo')|map(attribute='entity_id')|list }}", - # Filter added as an extension by Home Assistant + # Filter 'expand' added as an extension by Home Assistant "{{ ['group.foo']|expand|map(attribute='entity_id')|list }}", + # Non existing function 'no_such_function' is not detected by Jinja2 + "{{ no_such_function('group.foo')|map(attribute='entity_id')|list }}", ) for value in options: schema(value) @@ -700,8 +702,11 @@ async def test_template_no_hass(hass: HomeAssistant) -> None: "Hello", "{{ beer }}", "{% if 1 == 1 %}Hello{% else %}World{% endif %}", - # Function added as an extension by Home Assistant + # Function 'expand' added as an extension by Home Assistant, no error + # because non existing functions are not detected by Jinja2 "{{ expand('group.foo')|map(attribute='entity_id')|list }}", + # Non existing function 'no_such_function' is not detected by Jinja2 + "{{ no_such_function('group.foo')|map(attribute='entity_id')|list }}", ) for value in options: await hass.async_add_executor_job(schema, value) @@ -725,10 +730,12 @@ def test_dynamic_template(hass: HomeAssistant) -> None: options = ( "{{ beer }}", "{% if 1 == 1 %}Hello{% else %}World{% endif %}", - # Function added as an extension by Home Assistant + # Function 'expand' added as an extension by Home Assistant "{{ expand('group.foo')|map(attribute='entity_id')|list }}", - # Filter added as an extension by Home Assistant + # Filter 'expand' added as an extension by Home Assistant "{{ ['group.foo']|expand|map(attribute='entity_id')|list }}", + # Non existing function 'no_such_function' is not detected by Jinja2 + "{{ no_such_function('group.foo')|map(attribute='entity_id')|list }}", ) for value in options: schema(value) @@ -754,8 +761,11 @@ async def test_dynamic_template_no_hass(hass: HomeAssistant) -> None: options = ( "{{ beer }}", "{% if 1 == 1 %}Hello{% else %}World{% endif %}", - # Function added as an extension by Home Assistant + # Function 'expand' added as an extension by Home Assistant, no error + # because non existing functions are not detected by Jinja2 "{{ expand('group.foo')|map(attribute='entity_id')|list }}", + # Non existing function 'no_such_function' is not detected by Jinja2 + "{{ no_such_function('group.foo')|map(attribute='entity_id')|list }}", ) for value in options: await hass.async_add_executor_job(schema, value) From 822660732b0b47baf264708eb582e7e7e22133e0 Mon Sep 17 00:00:00 2001 From: Jakob Schlyter Date: Tue, 3 Sep 2024 15:45:37 +0200 Subject: [PATCH 1376/1635] Support setting Amazon Polly engine in service call (#120226) --- homeassistant/components/amazon_polly/tts.py | 23 ++++++++++++++++---- 1 file changed, 19 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/amazon_polly/tts.py b/homeassistant/components/amazon_polly/tts.py index 1fc972fa3a1..62852848a9c 100644 --- a/homeassistant/components/amazon_polly/tts.py +++ b/homeassistant/components/amazon_polly/tts.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections import defaultdict import logging from typing import Any, Final @@ -114,6 +115,8 @@ def get_engine( all_voices: dict[str, dict[str, str]] = {} + all_engines: dict[str, set[str]] = defaultdict(set) + all_voices_req = polly_client.describe_voices() for voice in all_voices_req.get("Voices", []): @@ -124,8 +127,12 @@ def get_engine( language_code: str | None = voice.get("LanguageCode") if language_code is not None and language_code not in supported_languages: supported_languages.append(language_code) + for engine in voice.get("SupportedEngines"): + all_engines[engine].add(voice_id) - return AmazonPollyProvider(polly_client, config, supported_languages, all_voices) + return AmazonPollyProvider( + polly_client, config, supported_languages, all_voices, all_engines + ) class AmazonPollyProvider(Provider): @@ -137,13 +144,16 @@ class AmazonPollyProvider(Provider): config: ConfigType, supported_languages: list[str], all_voices: dict[str, dict[str, str]], + all_engines: dict[str, set[str]], ) -> None: """Initialize Amazon Polly provider for TTS.""" self.client = polly_client self.config = config self.supported_langs = supported_languages self.all_voices = all_voices + self.all_engines = all_engines self.default_voice: str = self.config[CONF_VOICE] + self.default_engine: str = self.config[CONF_ENGINE] self.name = "Amazon Polly" @property @@ -159,12 +169,12 @@ class AmazonPollyProvider(Provider): @property def default_options(self) -> dict[str, str]: """Return dict include default options.""" - return {CONF_VOICE: self.default_voice} + return {CONF_VOICE: self.default_voice, CONF_ENGINE: self.default_engine} @property def supported_options(self) -> list[str]: """Return a list of supported options.""" - return [CONF_VOICE] + return [CONF_VOICE, CONF_ENGINE] def get_tts_audio( self, @@ -179,9 +189,14 @@ class AmazonPollyProvider(Provider): _LOGGER.error("%s does not support the %s language", voice_id, language) return None, None + engine = options.get(CONF_ENGINE, self.default_engine) + if voice_id not in self.all_engines[engine]: + _LOGGER.error("%s does not support the %s engine", voice_id, engine) + return None, None + _LOGGER.debug("Requesting TTS file for text: %s", message) resp = self.client.synthesize_speech( - Engine=self.config[CONF_ENGINE], + Engine=engine, OutputFormat=self.config[CONF_OUTPUT_FORMAT], SampleRate=self.config[CONF_SAMPLE_RATE], Text=message, From 733bbf9cd13b2fdb40540b18dd273ab9a9272f6c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 3 Sep 2024 15:46:05 +0200 Subject: [PATCH 1377/1635] Bump actions/upload-artifact from 4.3.6 to 4.4.0 (#125056) Bumps [actions/upload-artifact](https://github.com/actions/upload-artifact) from 4.3.6 to 4.4.0. - [Release notes](https://github.com/actions/upload-artifact/releases) - [Commits](https://github.com/actions/upload-artifact/compare/v4.3.6...v4.4.0) --- updated-dependencies: - dependency-name: actions/upload-artifact dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Joost Lekkerkerker --- .github/workflows/builder.yml | 2 +- .github/workflows/ci.yaml | 20 ++++++++++---------- .github/workflows/wheels.yml | 6 +++--- 3 files changed, 14 insertions(+), 14 deletions(-) diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index 910e179cd8e..ddb204ca42d 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -69,7 +69,7 @@ jobs: run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T - - name: Upload translations - uses: actions/upload-artifact@v4.3.6 + uses: actions/upload-artifact@v4.4.0 with: name: translations path: translations.tar.gz diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 5d21c2c7b04..d35187a3c45 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -634,7 +634,7 @@ jobs: . venv/bin/activate pip-licenses --format=json --output-file=licenses.json - name: Upload licenses - uses: actions/upload-artifact@v4.3.6 + uses: actions/upload-artifact@v4.4.0 with: name: licenses path: licenses.json @@ -844,7 +844,7 @@ jobs: . venv/bin/activate python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests - name: Upload pytest_buckets - uses: actions/upload-artifact@v4.3.6 + uses: actions/upload-artifact@v4.4.0 with: name: pytest_buckets path: pytest_buckets.txt @@ -945,14 +945,14 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-full.conclusion == 'failure' - uses: actions/upload-artifact@v4.3.6 + uses: actions/upload-artifact@v4.4.0 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }} path: pytest-*.txt overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.3.6 + uses: actions/upload-artifact@v4.4.0 with: name: coverage-${{ matrix.python-version }}-${{ matrix.group }} path: coverage.xml @@ -1071,7 +1071,7 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.3.6 + uses: actions/upload-artifact@v4.4.0 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.mariadb }} @@ -1079,7 +1079,7 @@ jobs: overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.3.6 + uses: actions/upload-artifact@v4.4.0 with: name: coverage-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.mariadb }} @@ -1198,7 +1198,7 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.3.6 + uses: actions/upload-artifact@v4.4.0 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.postgresql }} @@ -1206,7 +1206,7 @@ jobs: overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.3.6 + uses: actions/upload-artifact@v4.4.0 with: name: coverage-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.postgresql }} @@ -1340,14 +1340,14 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.3.6 + uses: actions/upload-artifact@v4.4.0 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }} path: pytest-*.txt overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.3.6 + uses: actions/upload-artifact@v4.4.0 with: name: coverage-${{ matrix.python-version }}-${{ matrix.group }} path: coverage.xml diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 735163e3b12..98585a97c6b 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -82,14 +82,14 @@ jobs: ) > .env_file - name: Upload env_file - uses: actions/upload-artifact@v4.3.6 + uses: actions/upload-artifact@v4.4.0 with: name: env_file path: ./.env_file overwrite: true - name: Upload requirements_diff - uses: actions/upload-artifact@v4.3.6 + uses: actions/upload-artifact@v4.4.0 with: name: requirements_diff path: ./requirements_diff.txt @@ -101,7 +101,7 @@ jobs: python -m script.gen_requirements_all ci - name: Upload requirements_all_wheels - uses: actions/upload-artifact@v4.3.6 + uses: actions/upload-artifact@v4.4.0 with: name: requirements_all_wheels path: ./requirements_all_wheels_*.txt From 8e3ad2d1f320ecd6670d54f467c629df8a8a9339 Mon Sep 17 00:00:00 2001 From: S <1311577+s0129@users.noreply.github.com> Date: Tue, 3 Sep 2024 14:46:57 +0100 Subject: [PATCH 1378/1635] Extended epson projector integration to include serial connections (#121630) * Extended epson projector integration to include serial connections * Fix review changes * Improve epson types and translations * Fix comment --------- Co-authored-by: Joostlek --- homeassistant/components/epson/__init__.py | 41 +++++++++++++++++-- homeassistant/components/epson/config_flow.py | 20 ++++++++- homeassistant/components/epson/const.py | 2 + homeassistant/components/epson/strings.json | 11 ++++- tests/components/epson/test_config_flow.py | 8 +++- tests/components/epson/test_init.py | 37 +++++++++++++++++ tests/components/epson/test_media_player.py | 4 +- 7 files changed, 112 insertions(+), 11 deletions(-) create mode 100644 tests/components/epson/test_init.py diff --git a/homeassistant/components/epson/__init__.py b/homeassistant/components/epson/__init__.py index 5171865594d..715b55824b4 100644 --- a/homeassistant/components/epson/__init__.py +++ b/homeassistant/components/epson/__init__.py @@ -13,7 +13,7 @@ from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession -from .const import DOMAIN, HTTP +from .const import CONF_CONNECTION_TYPE, DOMAIN, HTTP from .exceptions import CannotConnect, PoweredOff PLATFORMS = [Platform.MEDIA_PLAYER] @@ -22,13 +22,17 @@ _LOGGER = logging.getLogger(__name__) async def validate_projector( - hass: HomeAssistant, host, check_power=True, check_powered_on=True + hass: HomeAssistant, + host: str, + conn_type: str, + check_power: bool = True, + check_powered_on: bool = True, ): """Validate the given projector host allows us to connect.""" epson_proj = Projector( host=host, websession=async_get_clientsession(hass, verify_ssl=False), - type=HTTP, + type=conn_type, ) if check_power: _power = await epson_proj.get_power() @@ -46,6 +50,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: projector = await validate_projector( hass=hass, host=entry.data[CONF_HOST], + conn_type=entry.data[CONF_CONNECTION_TYPE], check_power=False, check_powered_on=False, ) @@ -60,5 +65,33 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) + projector = hass.data[DOMAIN].pop(entry.entry_id) + projector.close() return unload_ok + + +async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: + """Migrate old entry.""" + _LOGGER.debug( + "Migrating configuration from version %s.%s", + config_entry.version, + config_entry.minor_version, + ) + + if config_entry.version > 1 or config_entry.minor_version > 1: + # This means the user has downgraded from a future version + return False + + if config_entry.version == 1 and config_entry.minor_version == 1: + new_data = {**config_entry.data} + new_data[CONF_CONNECTION_TYPE] = HTTP + + hass.config_entries.async_update_entry( + config_entry, data=new_data, version=1, minor_version=2 + ) + + _LOGGER.debug( + "Migration to configuration version %s successful", config_entry.version + ) + + return True diff --git a/homeassistant/components/epson/config_flow.py b/homeassistant/components/epson/config_flow.py index 1e3b006a984..c54bff2eea9 100644 --- a/homeassistant/components/epson/config_flow.py +++ b/homeassistant/components/epson/config_flow.py @@ -7,13 +7,21 @@ import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT +from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig from . import validate_projector -from .const import DOMAIN +from .const import CONF_CONNECTION_TYPE, DOMAIN, HTTP, SERIAL from .exceptions import CannotConnect, PoweredOff +ALLOWED_CONNECTION_TYPE = [HTTP, SERIAL] + DATA_SCHEMA = vol.Schema( { + vol.Required(CONF_CONNECTION_TYPE, default=HTTP): SelectSelector( + SelectSelectorConfig( + options=ALLOWED_CONNECTION_TYPE, translation_key="connection_type" + ) + ), vol.Required(CONF_HOST): str, vol.Required(CONF_NAME, default=DOMAIN): str, } @@ -26,6 +34,7 @@ class EpsonConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for epson.""" VERSION = 1 + MINOR_VERSION = 2 async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -33,12 +42,16 @@ class EpsonConfigFlow(ConfigFlow, domain=DOMAIN): """Handle the initial step.""" errors = {} if user_input is not None: + # Epson projector doesn't appear to need to be on for serial + check_power = user_input[CONF_CONNECTION_TYPE] != SERIAL + projector = None try: projector = await validate_projector( hass=self.hass, + conn_type=user_input[CONF_CONNECTION_TYPE], host=user_input[CONF_HOST], check_power=True, - check_powered_on=True, + check_powered_on=check_power, ) except CannotConnect: errors["base"] = "cannot_connect" @@ -55,6 +68,9 @@ class EpsonConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_create_entry( title=user_input.pop(CONF_NAME), data=user_input ) + finally: + if projector: + projector.close() return self.async_show_form( step_id="user", data_schema=DATA_SCHEMA, errors=errors ) diff --git a/homeassistant/components/epson/const.py b/homeassistant/components/epson/const.py index 06ef9f25e35..5bc5f57cb3f 100644 --- a/homeassistant/components/epson/const.py +++ b/homeassistant/components/epson/const.py @@ -2,6 +2,8 @@ DOMAIN = "epson" SERVICE_SELECT_CMODE = "select_cmode" +CONF_CONNECTION_TYPE = "connection_type" ATTR_CMODE = "cmode" HTTP = "http" +SERIAL = "serial" diff --git a/homeassistant/components/epson/strings.json b/homeassistant/components/epson/strings.json index 94544c32d1d..fb8d7ab5fdd 100644 --- a/homeassistant/components/epson/strings.json +++ b/homeassistant/components/epson/strings.json @@ -3,11 +3,12 @@ "step": { "user": { "data": { + "connection_type": "Connection type", "host": "[%key:common::config_flow::data::host%]", "name": "[%key:common::config_flow::data::name%]" }, "data_description": { - "host": "The hostname or IP address of your Epson projector." + "host": "The hostname, IP address or serial port of your Epson projector." } } }, @@ -30,5 +31,13 @@ } } } + }, + "selector": { + "connection_type": { + "options": { + "http": "HTTP", + "serial": "Serial" + } + } } } diff --git a/tests/components/epson/test_config_flow.py b/tests/components/epson/test_config_flow.py index d485a4bfdef..f727185362c 100644 --- a/tests/components/epson/test_config_flow.py +++ b/tests/components/epson/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import patch from epson_projector.const import PWR_OFF_STATE from homeassistant import config_entries -from homeassistant.components.epson.const import DOMAIN +from homeassistant.components.epson.const import CONF_CONNECTION_TYPE, DOMAIN, HTTP from homeassistant.const import CONF_HOST, CONF_NAME, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -33,6 +33,10 @@ async def test_form(hass: HomeAssistant) -> None: patch( "homeassistant.components.epson.async_setup_entry", return_value=True, + ), + patch( + "homeassistant.components.epson.Projector.close", + return_value=True, ) as mock_setup_entry, ): result2 = await hass.config_entries.flow.async_configure( @@ -43,7 +47,7 @@ async def test_form(hass: HomeAssistant) -> None: assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "test-epson" - assert result2["data"] == {CONF_HOST: "1.1.1.1"} + assert result2["data"] == {CONF_CONNECTION_TYPE: HTTP, CONF_HOST: "1.1.1.1"} assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/epson/test_init.py b/tests/components/epson/test_init.py new file mode 100644 index 00000000000..964f9e915ab --- /dev/null +++ b/tests/components/epson/test_init.py @@ -0,0 +1,37 @@ +"""Test the epson init.""" + +from unittest.mock import patch + +from homeassistant.components.epson.const import CONF_CONNECTION_TYPE, DOMAIN +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def test_migrate_entry(hass: HomeAssistant) -> None: + """Test successful migration of entry data from version 1 to 1.2.""" + + mock_entry = MockConfigEntry( + domain=DOMAIN, + title="Epson", + version=1, + minor_version=1, + data={CONF_HOST: "1.1.1.1"}, + entry_id="1cb78c095906279574a0442a1f0003ef", + ) + assert mock_entry.version == 1 + + mock_entry.add_to_hass(hass) + + # Create entity entry to migrate to new unique ID + with patch("homeassistant.components.epson.Projector.get_power"): + await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() + + # Check that is now has connection_type + assert mock_entry + assert mock_entry.version == 1 + assert mock_entry.minor_version == 2 + assert mock_entry.data.get(CONF_CONNECTION_TYPE) == "http" + assert mock_entry.data.get(CONF_HOST) == "1.1.1.1" diff --git a/tests/components/epson/test_media_player.py b/tests/components/epson/test_media_player.py index e529746dcd0..188fdd5b700 100644 --- a/tests/components/epson/test_media_player.py +++ b/tests/components/epson/test_media_player.py @@ -5,7 +5,7 @@ from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory -from homeassistant.components.epson.const import DOMAIN +from homeassistant.components.epson.const import CONF_CONNECTION_TYPE, DOMAIN, HTTP from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -22,7 +22,7 @@ async def test_set_unique_id( entry = MockConfigEntry( domain=DOMAIN, title="Epson", - data={CONF_HOST: "1.1.1.1"}, + data={CONF_CONNECTION_TYPE: HTTP, CONF_HOST: "1.1.1.1"}, entry_id="1cb78c095906279574a0442a1f0003ef", ) entry.add_to_hass(hass) From 7c15075231b5d46c56f268909417de3a7216b2a8 Mon Sep 17 00:00:00 2001 From: Martin Hjelmare Date: Tue, 3 Sep 2024 15:49:11 +0200 Subject: [PATCH 1379/1635] Clean up Z-wave error log when raising in service handlers (#125138) --- homeassistant/components/zwave_js/entity.py | 5 +++-- homeassistant/components/zwave_js/sensor.py | 7 +++---- tests/components/zwave_js/test_sensor.py | 7 ++++++- tests/components/zwave_js/test_switch.py | 6 +++++- 4 files changed, 17 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/zwave_js/entity.py b/homeassistant/components/zwave_js/entity.py index 4a6f87cc032..d41c8bb01d0 100644 --- a/homeassistant/components/zwave_js/entity.py +++ b/homeassistant/components/zwave_js/entity.py @@ -335,5 +335,6 @@ class ZWaveBaseEntity(Entity): value, new_value, options=options, wait_for_result=wait_for_result ) except BaseZwaveJSServerError as err: - LOGGER.error("Unable to set value %s: %s", value.value_id, err) - raise HomeAssistantError from err + raise HomeAssistantError( + f"Unable to set value {value.value_id}: {err}" + ) from err diff --git a/homeassistant/components/zwave_js/sensor.py b/homeassistant/components/zwave_js/sensor.py index e43c620ff54..428bf504510 100644 --- a/homeassistant/components/zwave_js/sensor.py +++ b/homeassistant/components/zwave_js/sensor.py @@ -750,10 +750,9 @@ class ZWaveMeterSensor(ZWaveNumericSensor): CommandClass.METER, "reset", *args, wait_for_result=False ) except BaseZwaveJSServerError as err: - LOGGER.error( - "Failed to reset meters on node %s endpoint %s: %s", node, endpoint, err - ) - raise HomeAssistantError from err + raise HomeAssistantError( + f"Failed to reset meters on node {node} endpoint {endpoint}: {err}" + ) from err LOGGER.debug( "Meters on node %s endpoint %s reset with the following options: %s", node, diff --git a/tests/components/zwave_js/test_sensor.py b/tests/components/zwave_js/test_sensor.py index 02b3df17e22..19f8aeece36 100644 --- a/tests/components/zwave_js/test_sensor.py +++ b/tests/components/zwave_js/test_sensor.py @@ -522,7 +522,7 @@ async def test_reset_meter( "test", 1, "test" ) - with pytest.raises(HomeAssistantError): + with pytest.raises(HomeAssistantError) as err: await hass.services.async_call( DOMAIN, SERVICE_RESET_METER, @@ -530,6 +530,11 @@ async def test_reset_meter( blocking=True, ) + assert str(err.value) == ( + "Failed to reset meters on node Node(node_id=102) endpoint 0: " + "zwave_error: Z-Wave error 1 - test" + ) + async def test_meter_attributes( hass: HomeAssistant, client, aeon_smart_switch_6, integration diff --git a/tests/components/zwave_js/test_switch.py b/tests/components/zwave_js/test_switch.py index c18c0c4359e..810ce38cf99 100644 --- a/tests/components/zwave_js/test_switch.py +++ b/tests/components/zwave_js/test_switch.py @@ -286,7 +286,11 @@ async def test_config_parameter_switch( client.async_send_command.side_effect = FailedZWaveCommand("test", 1, "test") # Test turning off error raises proper exception - with pytest.raises(HomeAssistantError): + with pytest.raises(HomeAssistantError) as err: await hass.services.async_call( DOMAIN, SERVICE_TURN_OFF, {"entity_id": switch_entity_id}, blocking=True ) + + assert str(err.value) == ( + "Unable to set value 32-112-0-20: zwave_error: Z-Wave error 1 - test" + ) From 436ac72b821df40e861040e88a40a5a2ca9a21a9 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Tue, 3 Sep 2024 16:56:00 +0300 Subject: [PATCH 1380/1635] End deprecation setting attributes directly on config entry (#123729) * End deprecation setting attr directly on config entry * Update ollama test * Fix android_tv --- homeassistant/config_entries.py | 22 +++---------------- .../androidtv_remote/test_media_player.py | 10 +++++---- .../androidtv_remote/test_remote.py | 10 ++++----- tests/components/ollama/test_conversation.py | 4 +++- tests/test_config_entries.py | 9 ++------ 5 files changed, 18 insertions(+), 37 deletions(-) diff --git a/homeassistant/config_entries.py b/homeassistant/config_entries.py index f3b0aa03383..e64d2001efa 100644 --- a/homeassistant/config_entries.py +++ b/homeassistant/config_entries.py @@ -434,26 +434,10 @@ class ConfigEntry(Generic[_DataT]): def __setattr__(self, key: str, value: Any) -> None: """Set an attribute.""" if key in UPDATE_ENTRY_CONFIG_ENTRY_ATTRS: - if key == "unique_id": - # Setting unique_id directly will corrupt internal state - # There is no deprecation period for this key - # as changing them will corrupt internal state - # so we raise an error here - raise AttributeError( - "unique_id cannot be changed directly, use async_update_entry instead" - ) - report( - f'sets "{key}" directly to update a config entry. This is deprecated and will' - " stop working in Home Assistant 2024.9, it should be updated to use" - " async_update_entry instead", - error_if_core=False, + raise AttributeError( + f"{key} cannot be changed directly, use async_update_entry instead" ) - - elif key in FROZEN_CONFIG_ENTRY_ATTRS: - # These attributes are frozen and cannot be changed - # There is no deprecation period for these - # as changing them will corrupt internal state - # so we raise an error here + if key in FROZEN_CONFIG_ENTRY_ATTRS: raise AttributeError(f"{key} cannot be changed") super().__setattr__(key, value) diff --git a/tests/components/androidtv_remote/test_media_player.py b/tests/components/androidtv_remote/test_media_player.py index 46678f18fd3..e292a5b273f 100644 --- a/tests/components/androidtv_remote/test_media_player.py +++ b/tests/components/androidtv_remote/test_media_player.py @@ -20,10 +20,11 @@ async def test_media_player_receives_push_updates( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_api: MagicMock ) -> None: """Test the Android TV Remote media player receives push updates and state is updated.""" - mock_config_entry.options = { - "apps": {"com.google.android.youtube.tv": {"app_name": "YouTube"}} - } mock_config_entry.add_to_hass(hass) + hass.config_entries.async_update_entry( + mock_config_entry, + options={"apps": {"com.google.android.youtube.tv": {"app_name": "YouTube"}}}, + ) await hass.config_entries.async_setup(mock_config_entry.entry_id) assert mock_config_entry.state is ConfigEntryState.LOADED @@ -322,7 +323,7 @@ async def test_browse_media( mock_api: MagicMock, ) -> None: """Test the Android TV Remote media player browse media.""" - mock_config_entry.options = { + new_options = { "apps": { "com.google.android.youtube.tv": { "app_name": "YouTube", @@ -332,6 +333,7 @@ async def test_browse_media( } } mock_config_entry.add_to_hass(hass) + hass.config_entries.async_update_entry(mock_config_entry, options=new_options) await hass.config_entries.async_setup(mock_config_entry.entry_id) assert mock_config_entry.state is ConfigEntryState.LOADED diff --git a/tests/components/androidtv_remote/test_remote.py b/tests/components/androidtv_remote/test_remote.py index 7ca63685747..b3c3ce1c283 100644 --- a/tests/components/androidtv_remote/test_remote.py +++ b/tests/components/androidtv_remote/test_remote.py @@ -19,10 +19,9 @@ async def test_remote_receives_push_updates( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_api: MagicMock ) -> None: """Test the Android TV Remote receives push updates and state is updated.""" - mock_config_entry.options = { - "apps": {"com.google.android.youtube.tv": {"app_name": "YouTube"}} - } + new_options = {"apps": {"com.google.android.youtube.tv": {"app_name": "YouTube"}}} mock_config_entry.add_to_hass(hass) + hass.config_entries.async_update_entry(mock_config_entry, options=new_options) await hass.config_entries.async_setup(mock_config_entry.entry_id) assert mock_config_entry.state is ConfigEntryState.LOADED @@ -53,10 +52,9 @@ async def test_remote_toggles( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_api: MagicMock ) -> None: """Test the Android TV Remote toggles.""" - mock_config_entry.options = { - "apps": {"com.google.android.youtube.tv": {"app_name": "YouTube"}} - } + new_options = {"apps": {"com.google.android.youtube.tv": {"app_name": "YouTube"}}} mock_config_entry.add_to_hass(hass) + hass.config_entries.async_update_entry(mock_config_entry, options=new_options) await hass.config_entries.async_setup(mock_config_entry.entry_id) assert mock_config_entry.state is ConfigEntryState.LOADED diff --git a/tests/components/ollama/test_conversation.py b/tests/components/ollama/test_conversation.py index f10805e747d..6c34b8e0052 100644 --- a/tests/components/ollama/test_conversation.py +++ b/tests/components/ollama/test_conversation.py @@ -482,8 +482,10 @@ async def test_message_history_unlimited( "ollama.AsyncClient.chat", return_value={"message": {"role": "assistant", "content": "test response"}}, ), - patch.object(mock_config_entry, "options", {ollama.CONF_MAX_HISTORY: 0}), ): + hass.config_entries.async_update_entry( + mock_config_entry, options={ollama.CONF_MAX_HISTORY: 0} + ) for i in range(100): result = await conversation.async_converse( hass, diff --git a/tests/test_config_entries.py b/tests/test_config_entries.py index 3042ccb28d9..d01febd6904 100644 --- a/tests/test_config_entries.py +++ b/tests/test_config_entries.py @@ -5437,13 +5437,8 @@ async def test_report_direct_mutation_of_config_entry( entry = MockConfigEntry(domain="test") entry.add_to_hass(hass) - setattr(entry, field, "new_value") - - assert ( - f'Detected code that sets "{field}" directly to update a config entry. ' - "This is deprecated and will stop working in Home Assistant 2024.9, " - "it should be updated to use async_update_entry instead. Please report this issue." - ) in caplog.text + with pytest.raises(AttributeError): + setattr(entry, field, "new_value") async def test_updating_non_added_entry_raises(hass: HomeAssistant) -> None: From d827c53a855075f8dbef51e2e147992f6d8452ef Mon Sep 17 00:00:00 2001 From: mvn23 Date: Tue, 3 Sep 2024 15:59:12 +0200 Subject: [PATCH 1381/1635] Remove opentherm_gw options migration (#125046) --- .../components/opentherm_gw/__init__.py | 13 ----- .../opentherm_gw/test_config_flow.py | 53 ------------------- 2 files changed, 66 deletions(-) diff --git a/homeassistant/components/opentherm_gw/__init__.py b/homeassistant/components/opentherm_gw/__init__.py index d8c352f3768..a57ae7db601 100644 --- a/homeassistant/components/opentherm_gw/__init__.py +++ b/homeassistant/components/opentherm_gw/__init__.py @@ -46,8 +46,6 @@ from .const import ( CONF_CLIMATE, CONF_FLOOR_TEMP, CONF_PRECISION, - CONF_READ_PRECISION, - CONF_SET_PRECISION, CONNECTION_TIMEOUT, DATA_GATEWAYS, DATA_OPENTHERM_GW, @@ -109,17 +107,6 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b gateway = OpenThermGatewayHub(hass, config_entry) hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][config_entry.data[CONF_ID]] = gateway - if config_entry.options.get(CONF_PRECISION): - migrate_options = dict(config_entry.options) - migrate_options.update( - { - CONF_READ_PRECISION: config_entry.options[CONF_PRECISION], - CONF_SET_PRECISION: config_entry.options[CONF_PRECISION], - } - ) - del migrate_options[CONF_PRECISION] - hass.config_entries.async_update_entry(config_entry, options=migrate_options) - # Migration can be removed in 2025.4.0 dev_reg = dr.async_get(hass) if ( diff --git a/tests/components/opentherm_gw/test_config_flow.py b/tests/components/opentherm_gw/test_config_flow.py index e61a87bb55e..504a97dc953 100644 --- a/tests/components/opentherm_gw/test_config_flow.py +++ b/tests/components/opentherm_gw/test_config_flow.py @@ -8,7 +8,6 @@ from serial import SerialException from homeassistant import config_entries from homeassistant.components.opentherm_gw.const import ( CONF_FLOOR_TEMP, - CONF_PRECISION, CONF_READ_PRECISION, CONF_SET_PRECISION, CONF_TEMPORARY_OVRD_MODE, @@ -204,58 +203,6 @@ async def test_form_connection_error(hass: HomeAssistant) -> None: assert len(mock_connect.mock_calls) == 1 -async def test_options_migration(hass: HomeAssistant) -> None: - """Test migration of precision option after update.""" - entry = MockConfigEntry( - domain=DOMAIN, - title="Mock Gateway", - data={ - CONF_NAME: "Test Entry 1", - CONF_DEVICE: "/dev/ttyUSB0", - CONF_ID: "test_entry_1", - }, - options={ - CONF_FLOOR_TEMP: True, - CONF_PRECISION: PRECISION_TENTHS, - }, - ) - entry.add_to_hass(hass) - - with ( - patch( - "homeassistant.components.opentherm_gw.OpenThermGatewayHub.connect_and_subscribe", - return_value=True, - ), - patch( - "homeassistant.components.opentherm_gw.async_setup", - return_value=True, - ), - patch( - "pyotgw.status.StatusManager._process_updates", - return_value=None, - ), - ): - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - result = await hass.config_entries.options.async_init( - entry.entry_id, context={"source": config_entries.SOURCE_USER}, data=None - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "init" - - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={}, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"][CONF_READ_PRECISION] == PRECISION_TENTHS - assert result["data"][CONF_SET_PRECISION] == PRECISION_TENTHS - assert result["data"][CONF_FLOOR_TEMP] is True - - async def test_options_form(hass: HomeAssistant) -> None: """Test the options form.""" entry = MockConfigEntry( From 2fa3b9070c77b4853f369be605cf23536b71b139 Mon Sep 17 00:00:00 2001 From: UltimateGG Date: Tue, 3 Sep 2024 09:31:48 -0500 Subject: [PATCH 1382/1635] Fix updating insteon modem configuration while disconnected (#121918) #121917 Fix updating insteon modem configuration while disconnected --- homeassistant/components/insteon/api/config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/insteon/api/config.py b/homeassistant/components/insteon/api/config.py index 8a617911d1e..88c062c3271 100644 --- a/homeassistant/components/insteon/api/config.py +++ b/homeassistant/components/insteon/api/config.py @@ -211,7 +211,7 @@ async def websocket_update_modem_config( """Get the schema for the modem configuration.""" config = msg["config"] config_entry = get_insteon_config_entry(hass) - is_connected = devices.modem.connected + is_connected = devices.modem is not None and devices.modem.connected if not await _async_connect(**config): connection.send_error( From 96be3e25053e39970e89d44c0dd7431a4f1eadb7 Mon Sep 17 00:00:00 2001 From: Michael <35783820+mib1185@users.noreply.github.com> Date: Tue, 3 Sep 2024 16:39:06 +0200 Subject: [PATCH 1383/1635] Use SnapshotAssertion in more AVM Fritz!Box Tools tests (#125037) use SnapshotAssertion in more tests --- .../fritz/snapshots/test_button.ambr | 235 ++++++ .../fritz/snapshots/test_diagnostics.ambr | 67 ++ .../fritz/snapshots/test_sensor.ambr | 771 ++++++++++++++++++ .../fritz/snapshots/test_switch.ambr | 424 ++++++++++ .../fritz/snapshots/test_update.ambr | 169 ++++ tests/components/fritz/test_button.py | 27 +- tests/components/fritz/test_diagnostics.py | 67 +- tests/components/fritz/test_sensor.py | 122 +-- tests/components/fritz/test_switch.py | 26 +- tests/components/fritz/test_update.py | 93 +-- 10 files changed, 1771 insertions(+), 230 deletions(-) create mode 100644 tests/components/fritz/snapshots/test_button.ambr create mode 100644 tests/components/fritz/snapshots/test_diagnostics.ambr create mode 100644 tests/components/fritz/snapshots/test_sensor.ambr create mode 100644 tests/components/fritz/snapshots/test_switch.ambr create mode 100644 tests/components/fritz/snapshots/test_update.ambr diff --git a/tests/components/fritz/snapshots/test_button.ambr b/tests/components/fritz/snapshots/test_button.ambr new file mode 100644 index 00000000000..ed0b0e72160 --- /dev/null +++ b/tests/components/fritz/snapshots/test_button.ambr @@ -0,0 +1,235 @@ +# serializer version: 1 +# name: test_button_setup[button.mock_title_cleanup-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_title_cleanup', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cleanup', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'cleanup', + 'unique_id': '1C:ED:6F:12:34:11-cleanup', + 'unit_of_measurement': None, + }) +# --- +# name: test_button_setup[button.mock_title_cleanup-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Cleanup', + }), + 'context': , + 'entity_id': 'button.mock_title_cleanup', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_button_setup[button.mock_title_firmware_update-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_title_firmware_update', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Firmware update', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'firmware_update', + 'unique_id': '1C:ED:6F:12:34:11-firmware_update', + 'unit_of_measurement': None, + }) +# --- +# name: test_button_setup[button.mock_title_firmware_update-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'update', + 'friendly_name': 'Mock Title Firmware update', + }), + 'context': , + 'entity_id': 'button.mock_title_firmware_update', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_button_setup[button.mock_title_reconnect-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_title_reconnect', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reconnect', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reconnect', + 'unique_id': '1C:ED:6F:12:34:11-reconnect', + 'unit_of_measurement': None, + }) +# --- +# name: test_button_setup[button.mock_title_reconnect-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'restart', + 'friendly_name': 'Mock Title Reconnect', + }), + 'context': , + 'entity_id': 'button.mock_title_reconnect', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_button_setup[button.mock_title_restart-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_title_restart', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Restart', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1C:ED:6F:12:34:11-reboot', + 'unit_of_measurement': None, + }) +# --- +# name: test_button_setup[button.mock_title_restart-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'restart', + 'friendly_name': 'Mock Title Restart', + }), + 'context': , + 'entity_id': 'button.mock_title_restart', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_button_setup[button.printer_wake_on_lan-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': None, + 'entity_id': 'button.printer_wake_on_lan', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:lan-pending', + 'original_name': 'printer Wake on LAN', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'AA:BB:CC:00:11:22_wake_on_lan', + 'unit_of_measurement': None, + }) +# --- +# name: test_button_setup[button.printer_wake_on_lan-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'printer Wake on LAN', + 'icon': 'mdi:lan-pending', + }), + 'context': , + 'entity_id': 'button.printer_wake_on_lan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/fritz/snapshots/test_diagnostics.ambr b/tests/components/fritz/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..4b5b8bdea3b --- /dev/null +++ b/tests/components/fritz/snapshots/test_diagnostics.ambr @@ -0,0 +1,67 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'device_info': dict({ + 'client_devices': list([ + dict({ + 'connected_to': 'fritz.box', + 'connection_type': 'LAN', + 'hostname': 'printer', + 'is_connected': True, + 'wan_access': True, + }), + ]), + 'connection_type': 'WANPPPConnection', + 'current_firmware': '7.29', + 'discovered_services': list([ + 'DeviceInfo1', + 'Hosts1', + 'LANEthernetInterfaceConfig1', + 'Layer3Forwarding1', + 'UserInterface1', + 'WANCommonIFC1', + 'WANCommonInterfaceConfig1', + 'WANDSLInterfaceConfig1', + 'WANIPConn1', + 'WANPPPConnection1', + 'WLANConfiguration1', + 'X_AVM-DE_Homeauto1', + 'X_AVM-DE_HostFilter1', + ]), + 'is_router': True, + 'last_exception': None, + 'last_update success': True, + 'latest_firmware': None, + 'mesh_role': 'master', + 'model': 'FRITZ!Box 7530 AX', + 'unique_id': '1C:ED:XX:XX:34:11', + 'update_available': False, + 'wan_link_properties': dict({ + 'NewLayer1DownstreamMaxBitRate': 318557000, + 'NewLayer1UpstreamMaxBitRate': 51805000, + 'NewPhysicalLinkStatus': 'Up', + 'NewWANAccessType': 'DSL', + }), + }), + 'entry': dict({ + 'data': dict({ + 'host': 'fake_host', + 'password': '**REDACTED**', + 'port': '1234', + 'ssl': False, + 'username': '**REDACTED**', + }), + 'disabled_by': None, + 'domain': 'fritz', + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'title': 'Mock Title', + 'unique_id': None, + 'version': 1, + }), + }) +# --- diff --git a/tests/components/fritz/snapshots/test_sensor.ambr b/tests/components/fritz/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..50744815aa5 --- /dev/null +++ b/tests/components/fritz/snapshots/test_sensor.ambr @@ -0,0 +1,771 @@ +# serializer version: 1 +# name: test_sensor_setup[sensor.mock_title_connection_uptime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_title_connection_uptime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Connection uptime', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'connection_uptime', + 'unique_id': '1C:ED:6F:12:34:11-connection_uptime', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup[sensor.mock_title_connection_uptime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Mock Title Connection uptime', + }), + 'context': , + 'entity_id': 'sensor.mock_title_connection_uptime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-09-01T10:11:33+00:00', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_download_throughput-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_download_throughput', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Download throughput', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'kb_s_received', + 'unique_id': '1C:ED:6F:12:34:11-kb_s_received', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_setup[sensor.mock_title_download_throughput-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'Mock Title Download throughput', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_title_download_throughput', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '67.6', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_external_ip-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_external_ip', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'External IP', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'external_ip', + 'unique_id': '1C:ED:6F:12:34:11-external_ip', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup[sensor.mock_title_external_ip-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title External IP', + }), + 'context': , + 'entity_id': 'sensor.mock_title_external_ip', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.2.3.4', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_external_ipv6-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_external_ipv6', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'External IPv6', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'external_ipv6', + 'unique_id': '1C:ED:6F:12:34:11-external_ipv6', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup[sensor.mock_title_external_ipv6-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title External IPv6', + }), + 'context': , + 'entity_id': 'sensor.mock_title_external_ipv6', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'fec0::1', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_gb_received-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_gb_received', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'GB received', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'gb_received', + 'unique_id': '1C:ED:6F:12:34:11-gb_received', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_setup[sensor.mock_title_gb_received-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'Mock Title GB received', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_title_gb_received', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5.2', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_gb_sent-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_gb_sent', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'GB sent', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'gb_sent', + 'unique_id': '1C:ED:6F:12:34:11-gb_sent', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_setup[sensor.mock_title_gb_sent-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'Mock Title GB sent', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_title_gb_sent', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.7', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_last_restart-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_title_last_restart', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last restart', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'device_uptime', + 'unique_id': '1C:ED:6F:12:34:11-device_uptime', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor_setup[sensor.mock_title_last_restart-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Mock Title Last restart', + }), + 'context': , + 'entity_id': 'sensor.mock_title_last_restart', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-08-03T16:30:21+00:00', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_link_download_noise_margin-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_link_download_noise_margin', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Link download noise margin', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'link_noise_margin_received', + 'unique_id': '1C:ED:6F:12:34:11-link_noise_margin_received', + 'unit_of_measurement': 'dB', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_link_download_noise_margin-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Link download noise margin', + 'unit_of_measurement': 'dB', + }), + 'context': , + 'entity_id': 'sensor.mock_title_link_download_noise_margin', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '8.0', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_link_download_power_attenuation-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_link_download_power_attenuation', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Link download power attenuation', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'link_attenuation_received', + 'unique_id': '1C:ED:6F:12:34:11-link_attenuation_received', + 'unit_of_measurement': 'dB', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_link_download_power_attenuation-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Link download power attenuation', + 'unit_of_measurement': 'dB', + }), + 'context': , + 'entity_id': 'sensor.mock_title_link_download_power_attenuation', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '12.0', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_link_download_throughput-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_link_download_throughput', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Link download throughput', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'link_kb_s_received', + 'unique_id': '1C:ED:6F:12:34:11-link_kb_s_received', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_setup[sensor.mock_title_link_download_throughput-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'Mock Title Link download throughput', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_title_link_download_throughput', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '318557.0', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_link_upload_noise_margin-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_link_upload_noise_margin', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Link upload noise margin', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'link_noise_margin_sent', + 'unique_id': '1C:ED:6F:12:34:11-link_noise_margin_sent', + 'unit_of_measurement': 'dB', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_link_upload_noise_margin-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Link upload noise margin', + 'unit_of_measurement': 'dB', + }), + 'context': , + 'entity_id': 'sensor.mock_title_link_upload_noise_margin', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '9.0', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_link_upload_power_attenuation-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_link_upload_power_attenuation', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Link upload power attenuation', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'link_attenuation_sent', + 'unique_id': '1C:ED:6F:12:34:11-link_attenuation_sent', + 'unit_of_measurement': 'dB', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_link_upload_power_attenuation-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Link upload power attenuation', + 'unit_of_measurement': 'dB', + }), + 'context': , + 'entity_id': 'sensor.mock_title_link_upload_power_attenuation', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7.0', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_link_upload_throughput-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_link_upload_throughput', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Link upload throughput', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'link_kb_s_sent', + 'unique_id': '1C:ED:6F:12:34:11-link_kb_s_sent', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_setup[sensor.mock_title_link_upload_throughput-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'Mock Title Link upload throughput', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_title_link_upload_throughput', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '51805.0', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_max_connection_download_throughput-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_title_max_connection_download_throughput', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Max connection download throughput', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'max_kb_s_received', + 'unique_id': '1C:ED:6F:12:34:11-max_kb_s_received', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_setup[sensor.mock_title_max_connection_download_throughput-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'Mock Title Max connection download throughput', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_title_max_connection_download_throughput', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10087.0', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_max_connection_upload_throughput-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_title_max_connection_upload_throughput', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Max connection upload throughput', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'max_kb_s_sent', + 'unique_id': '1C:ED:6F:12:34:11-max_kb_s_sent', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_setup[sensor.mock_title_max_connection_upload_throughput-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'Mock Title Max connection upload throughput', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_title_max_connection_upload_throughput', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2105.0', + }) +# --- +# name: test_sensor_setup[sensor.mock_title_upload_throughput-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mock_title_upload_throughput', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Upload throughput', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'kb_s_sent', + 'unique_id': '1C:ED:6F:12:34:11-kb_s_sent', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor_setup[sensor.mock_title_upload_throughput-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'Mock Title Upload throughput', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_title_upload_throughput', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.4', + }) +# --- diff --git a/tests/components/fritz/snapshots/test_switch.ambr b/tests/components/fritz/snapshots/test_switch.ambr new file mode 100644 index 00000000000..048f6e005ec --- /dev/null +++ b/tests/components/fritz/snapshots/test_switch.ambr @@ -0,0 +1,424 @@ +# serializer version: 1 +# name: test_switch_setup[fc_data0-expected_wifi_names0][switch.mock_title_wi_fi_wifi_2_4ghz-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_title_wi_fi_wifi_2_4ghz', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:wifi', + 'original_name': 'Mock Title Wi-Fi WiFi (2.4Ghz)', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1C:ED:6F:12:34:11-wi_fi_wifi_2_4ghz', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_setup[fc_data0-expected_wifi_names0][switch.mock_title_wi_fi_wifi_2_4ghz-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Wi-Fi WiFi (2.4Ghz)', + 'icon': 'mdi:wifi', + }), + 'context': , + 'entity_id': 'switch.mock_title_wi_fi_wifi_2_4ghz', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch_setup[fc_data0-expected_wifi_names0][switch.mock_title_wi_fi_wifi_5ghz-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_title_wi_fi_wifi_5ghz', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:wifi', + 'original_name': 'Mock Title Wi-Fi WiFi (5Ghz)', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1C:ED:6F:12:34:11-wi_fi_wifi_5ghz', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_setup[fc_data0-expected_wifi_names0][switch.mock_title_wi_fi_wifi_5ghz-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Wi-Fi WiFi (5Ghz)', + 'icon': 'mdi:wifi', + }), + 'context': , + 'entity_id': 'switch.mock_title_wi_fi_wifi_5ghz', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch_setup[fc_data0-expected_wifi_names0][switch.printer_internet_access-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.printer_internet_access', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:router-wireless-settings', + 'original_name': 'printer Internet Access', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'AA:BB:CC:00:11:22_internet_access', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_setup[fc_data0-expected_wifi_names0][switch.printer_internet_access-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'printer Internet Access', + 'icon': 'mdi:router-wireless-settings', + }), + 'context': , + 'entity_id': 'switch.printer_internet_access', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch_setup[fc_data1-expected_wifi_names1][switch.mock_title_wi_fi_wifi-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_title_wi_fi_wifi', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:wifi', + 'original_name': 'Mock Title Wi-Fi WiFi', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1C:ED:6F:12:34:11-wi_fi_wifi', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_setup[fc_data1-expected_wifi_names1][switch.mock_title_wi_fi_wifi-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Wi-Fi WiFi', + 'icon': 'mdi:wifi', + }), + 'context': , + 'entity_id': 'switch.mock_title_wi_fi_wifi', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch_setup[fc_data1-expected_wifi_names1][switch.mock_title_wi_fi_wifi2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_title_wi_fi_wifi2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:wifi', + 'original_name': 'Mock Title Wi-Fi WiFi2', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1C:ED:6F:12:34:11-wi_fi_wifi2', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_setup[fc_data1-expected_wifi_names1][switch.mock_title_wi_fi_wifi2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Wi-Fi WiFi2', + 'icon': 'mdi:wifi', + }), + 'context': , + 'entity_id': 'switch.mock_title_wi_fi_wifi2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch_setup[fc_data1-expected_wifi_names1][switch.printer_internet_access-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.printer_internet_access', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:router-wireless-settings', + 'original_name': 'printer Internet Access', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'AA:BB:CC:00:11:22_internet_access', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_setup[fc_data1-expected_wifi_names1][switch.printer_internet_access-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'printer Internet Access', + 'icon': 'mdi:router-wireless-settings', + }), + 'context': , + 'entity_id': 'switch.printer_internet_access', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch_setup[fc_data2-expected_wifi_names2][switch.mock_title_wi_fi_wifi_2_4ghz-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_title_wi_fi_wifi_2_4ghz', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:wifi', + 'original_name': 'Mock Title Wi-Fi WiFi (2.4Ghz)', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1C:ED:6F:12:34:11-wi_fi_wifi_2_4ghz', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_setup[fc_data2-expected_wifi_names2][switch.mock_title_wi_fi_wifi_2_4ghz-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Wi-Fi WiFi (2.4Ghz)', + 'icon': 'mdi:wifi', + }), + 'context': , + 'entity_id': 'switch.mock_title_wi_fi_wifi_2_4ghz', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch_setup[fc_data2-expected_wifi_names2][switch.mock_title_wi_fi_wifi_5ghz-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_title_wi_fi_wifi_5ghz', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:wifi', + 'original_name': 'Mock Title Wi-Fi WiFi+ (5Ghz)', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1C:ED:6F:12:34:11-wi_fi_wifi_5ghz', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_setup[fc_data2-expected_wifi_names2][switch.mock_title_wi_fi_wifi_5ghz-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mock Title Wi-Fi WiFi+ (5Ghz)', + 'icon': 'mdi:wifi', + }), + 'context': , + 'entity_id': 'switch.mock_title_wi_fi_wifi_5ghz', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_switch_setup[fc_data2-expected_wifi_names2][switch.printer_internet_access-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.printer_internet_access', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:router-wireless-settings', + 'original_name': 'printer Internet Access', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'AA:BB:CC:00:11:22_internet_access', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_setup[fc_data2-expected_wifi_names2][switch.printer_internet_access-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'printer Internet Access', + 'icon': 'mdi:router-wireless-settings', + }), + 'context': , + 'entity_id': 'switch.printer_internet_access', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/fritz/snapshots/test_update.ambr b/tests/components/fritz/snapshots/test_update.ambr new file mode 100644 index 00000000000..5544c972499 --- /dev/null +++ b/tests/components/fritz/snapshots/test_update.ambr @@ -0,0 +1,169 @@ +# serializer version: 1 +# name: test_available_update_can_be_installed[update.mock_title_fritz_os-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.mock_title_fritz_os', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'FRITZ!OS', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '1C:ED:6F:12:34:11-update', + 'unit_of_measurement': None, + }) +# --- +# name: test_available_update_can_be_installed[update.mock_title_fritz_os-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'entity_picture': 'https://brands.home-assistant.io/_/fritz/icon.png', + 'friendly_name': 'Mock Title FRITZ!OS', + 'in_progress': False, + 'installed_version': '7.29', + 'latest_version': '7.50', + 'release_summary': None, + 'release_url': 'http://download.avm.de/fritzbox/fritzbox-7530-ax/deutschland/fritz.os/info_de.txt', + 'skipped_version': None, + 'supported_features': , + 'title': 'FRITZ!OS', + }), + 'context': , + 'entity_id': 'update.mock_title_fritz_os', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_update_available[update.mock_title_fritz_os-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.mock_title_fritz_os', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'FRITZ!OS', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '1C:ED:6F:12:34:11-update', + 'unit_of_measurement': None, + }) +# --- +# name: test_update_available[update.mock_title_fritz_os-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'entity_picture': 'https://brands.home-assistant.io/_/fritz/icon.png', + 'friendly_name': 'Mock Title FRITZ!OS', + 'in_progress': False, + 'installed_version': '7.29', + 'latest_version': '7.50', + 'release_summary': None, + 'release_url': 'http://download.avm.de/fritzbox/fritzbox-7530-ax/deutschland/fritz.os/info_de.txt', + 'skipped_version': None, + 'supported_features': , + 'title': 'FRITZ!OS', + }), + 'context': , + 'entity_id': 'update.mock_title_fritz_os', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_update_entities_initialized[update.mock_title_fritz_os-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.mock_title_fritz_os', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'FRITZ!OS', + 'platform': 'fritz', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '1C:ED:6F:12:34:11-update', + 'unit_of_measurement': None, + }) +# --- +# name: test_update_entities_initialized[update.mock_title_fritz_os-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'entity_picture': 'https://brands.home-assistant.io/_/fritz/icon.png', + 'friendly_name': 'Mock Title FRITZ!OS', + 'in_progress': False, + 'installed_version': '7.29', + 'latest_version': '7.29', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': 'FRITZ!OS', + }), + 'context': , + 'entity_id': 'update.mock_title_fritz_os', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/fritz/test_button.py b/tests/components/fritz/test_button.py index 79639835003..507331cde0b 100644 --- a/tests/components/fritz/test_button.py +++ b/tests/components/fritz/test_button.py @@ -5,11 +5,12 @@ from datetime import timedelta from unittest.mock import patch import pytest +from syrupy.assertion import SnapshotAssertion from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.components.fritz.const import DOMAIN, MeshRoles from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.util.dt import utcnow @@ -21,24 +22,30 @@ from .const import ( MOCK_USER_DATA, ) -from tests.common import MockConfigEntry, async_fire_time_changed +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform -async def test_button_setup(hass: HomeAssistant, fc_class_mock, fh_class_mock) -> None: +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_button_setup( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + fc_class_mock, + fh_class_mock, + snapshot: SnapshotAssertion, +) -> None: """Test setup of Fritz!Tools buttons.""" entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - assert entry.state is ConfigEntryState.LOADED + with patch("homeassistant.components.fritz.PLATFORMS", [Platform.BUTTON]): + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() - buttons = hass.states.async_all(BUTTON_DOMAIN) - assert len(buttons) == 4 + states = hass.states.async_all() + assert len(states) == 5 - for button in buttons: - assert button.state == STATE_UNKNOWN + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) @pytest.mark.parametrize( diff --git a/tests/components/fritz/test_diagnostics.py b/tests/components/fritz/test_diagnostics.py index 55196eb6988..cbcaa57dab4 100644 --- a/tests/components/fritz/test_diagnostics.py +++ b/tests/components/fritz/test_diagnostics.py @@ -2,14 +2,13 @@ from __future__ import annotations -from homeassistant.components.diagnostics import REDACTED +from syrupy import SnapshotAssertion +from syrupy.filters import props + from homeassistant.components.fritz.const import DOMAIN -from homeassistant.components.fritz.coordinator import AvmWrapper -from homeassistant.components.fritz.diagnostics import TO_REDACT -from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant -from .const import MOCK_MESH_MASTER_MAC, MOCK_USER_DATA +from .const import MOCK_USER_DATA from tests.common import MockConfigEntry from tests.components.diagnostics import get_diagnostics_for_config_entry @@ -21,64 +20,16 @@ async def test_entry_diagnostics( hass_client: ClientSessionGenerator, fc_class_mock, fh_class_mock, + snapshot: SnapshotAssertion, ) -> None: """Test config entry diagnostics.""" entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) + assert await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert entry.state is ConfigEntryState.LOADED - entry_dict = entry.as_dict() - for key in TO_REDACT: - entry_dict["data"][key] = REDACTED result = await get_diagnostics_for_config_entry(hass, hass_client, entry) - avm_wrapper: AvmWrapper = hass.data[DOMAIN][entry.entry_id] - assert result == { - "entry": entry_dict, - "device_info": { - "client_devices": [ - { - "connected_to": device.connected_to, - "connection_type": device.connection_type, - "hostname": device.hostname, - "is_connected": device.is_connected, - "last_activity": device.last_activity.isoformat(), - "wan_access": device.wan_access, - } - for _, device in avm_wrapper.devices.items() - ], - "connection_type": "WANPPPConnection", - "current_firmware": "7.29", - "discovered_services": [ - "DeviceInfo1", - "Hosts1", - "LANEthernetInterfaceConfig1", - "Layer3Forwarding1", - "UserInterface1", - "WANCommonIFC1", - "WANCommonInterfaceConfig1", - "WANDSLInterfaceConfig1", - "WANIPConn1", - "WANPPPConnection1", - "WLANConfiguration1", - "X_AVM-DE_Homeauto1", - "X_AVM-DE_HostFilter1", - ], - "is_router": True, - "last_exception": None, - "last_update success": True, - "latest_firmware": None, - "mesh_role": "master", - "model": "FRITZ!Box 7530 AX", - "unique_id": MOCK_MESH_MASTER_MAC.replace("6F:12", "XX:XX"), - "update_available": False, - "wan_link_properties": { - "NewLayer1DownstreamMaxBitRate": 318557000, - "NewLayer1UpstreamMaxBitRate": 51805000, - "NewPhysicalLinkStatus": "Up", - "NewWANAccessType": "DSL", - }, - }, - } + assert result == snapshot( + exclude=props("created_at", "modified_at", "entry_id", "last_activity") + ) diff --git a/tests/components/fritz/test_sensor.py b/tests/components/fritz/test_sensor.py index f8114238376..fcdb4b63450 100644 --- a/tests/components/fritz/test_sensor.py +++ b/tests/components/fritz/test_sensor.py @@ -2,123 +2,47 @@ from __future__ import annotations -from datetime import timedelta -from typing import Any +from datetime import UTC, datetime, timedelta +from unittest.mock import patch from fritzconnection.core.exceptions import FritzConnectionException +import pytest +from syrupy.assertion import SnapshotAssertion from homeassistant.components.fritz.const import DOMAIN -from homeassistant.components.fritz.sensor import SENSOR_TYPES -from homeassistant.components.sensor import ( - ATTR_STATE_CLASS, - DOMAIN as SENSOR_DOMAIN, - SensorDeviceClass, - SensorStateClass, -) -from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ( - ATTR_DEVICE_CLASS, - ATTR_STATE, - ATTR_UNIT_OF_MEASUREMENT, - STATE_UNAVAILABLE, -) +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.const import STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er import homeassistant.util.dt as dt_util from .const import MOCK_USER_DATA -from tests.common import MockConfigEntry, async_fire_time_changed - -SENSOR_STATES: dict[str, dict[str, Any]] = { - "sensor.mock_title_external_ip": { - ATTR_STATE: "1.2.3.4", - }, - "sensor.mock_title_external_ipv6": { - ATTR_STATE: "fec0::1", - }, - "sensor.mock_title_last_restart": { - # ATTR_STATE: "2022-02-05T17:46:04+00:00", - ATTR_DEVICE_CLASS: SensorDeviceClass.TIMESTAMP, - }, - "sensor.mock_title_connection_uptime": { - # ATTR_STATE: "2022-03-06T11:27:16+00:00", - ATTR_DEVICE_CLASS: SensorDeviceClass.TIMESTAMP, - }, - "sensor.mock_title_upload_throughput": { - ATTR_STATE: "3.4", - ATTR_STATE_CLASS: SensorStateClass.MEASUREMENT, - ATTR_UNIT_OF_MEASUREMENT: "kB/s", - }, - "sensor.mock_title_download_throughput": { - ATTR_STATE: "67.6", - ATTR_STATE_CLASS: SensorStateClass.MEASUREMENT, - ATTR_UNIT_OF_MEASUREMENT: "kB/s", - }, - "sensor.mock_title_max_connection_upload_throughput": { - ATTR_STATE: "2105.0", - ATTR_UNIT_OF_MEASUREMENT: "kbit/s", - }, - "sensor.mock_title_max_connection_download_throughput": { - ATTR_STATE: "10087.0", - ATTR_UNIT_OF_MEASUREMENT: "kbit/s", - }, - "sensor.mock_title_gb_sent": { - ATTR_STATE: "1.7", - ATTR_STATE_CLASS: SensorStateClass.TOTAL_INCREASING, - ATTR_UNIT_OF_MEASUREMENT: "GB", - }, - "sensor.mock_title_gb_received": { - ATTR_STATE: "5.2", - ATTR_STATE_CLASS: SensorStateClass.TOTAL_INCREASING, - ATTR_UNIT_OF_MEASUREMENT: "GB", - }, - "sensor.mock_title_link_upload_throughput": { - ATTR_STATE: "51805.0", - ATTR_UNIT_OF_MEASUREMENT: "kbit/s", - }, - "sensor.mock_title_link_download_throughput": { - ATTR_STATE: "318557.0", - ATTR_UNIT_OF_MEASUREMENT: "kbit/s", - }, - "sensor.mock_title_link_upload_noise_margin": { - ATTR_STATE: "9.0", - ATTR_UNIT_OF_MEASUREMENT: "dB", - }, - "sensor.mock_title_link_download_noise_margin": { - ATTR_STATE: "8.0", - ATTR_UNIT_OF_MEASUREMENT: "dB", - }, - "sensor.mock_title_link_upload_power_attenuation": { - ATTR_STATE: "7.0", - ATTR_UNIT_OF_MEASUREMENT: "dB", - }, - "sensor.mock_title_link_download_power_attenuation": { - ATTR_STATE: "12.0", - ATTR_UNIT_OF_MEASUREMENT: "dB", - }, -} +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform -async def test_sensor_setup(hass: HomeAssistant, fc_class_mock, fh_class_mock) -> None: +@pytest.mark.freeze_time(datetime(2024, 9, 1, 20, tzinfo=UTC)) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_setup( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + fc_class_mock, + fh_class_mock, + snapshot: SnapshotAssertion, +) -> None: """Test setup of Fritz!Tools sensors.""" entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - assert entry.state is ConfigEntryState.LOADED + with patch("homeassistant.components.fritz.PLATFORMS", [Platform.SENSOR]): + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() - sensors = hass.states.async_all(SENSOR_DOMAIN) - assert len(sensors) == len(SENSOR_TYPES) + states = hass.states.async_all() + assert len(states) == 16 - for sensor in sensors: - assert SENSOR_STATES.get(sensor.entity_id) is not None - for key, val in SENSOR_STATES[sensor.entity_id].items(): - if key == ATTR_STATE: - assert sensor.state == val - else: - assert sensor.attributes.get(key) == val + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) async def test_sensor_update_fail( diff --git a/tests/components/fritz/test_switch.py b/tests/components/fritz/test_switch.py index b82587d42bd..1542645758e 100644 --- a/tests/components/fritz/test_switch.py +++ b/tests/components/fritz/test_switch.py @@ -2,16 +2,19 @@ from __future__ import annotations +from unittest.mock import patch + import pytest +from syrupy.assertion import SnapshotAssertion from homeassistant.components.fritz.const import DOMAIN -from homeassistant.config_entries import ConfigEntryState from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from .const import MOCK_FB_SERVICES, MOCK_USER_DATA -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, snapshot_platform MOCK_WLANCONFIGS_SAME_SSID: dict[str, dict] = { "WLANConfiguration1": { @@ -179,23 +182,24 @@ MOCK_WLANCONFIGS_DIFF2_SSID: dict[str, dict] = { ), ], ) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_switch_setup( hass: HomeAssistant, + entity_registry: er.EntityRegistry, expected_wifi_names: list[str], fc_class_mock, fh_class_mock, + snapshot: SnapshotAssertion, ) -> None: """Test setup of Fritz!Tools switches.""" - entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done(wait_background_tasks=True) - assert entry.state is ConfigEntryState.LOADED + with patch("homeassistant.components.fritz.PLATFORMS", [Platform.SWITCH]): + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done(wait_background_tasks=True) - switches = hass.states.async_all(Platform.SWITCH) - assert len(switches) == 3 - assert switches[0].name == f"Mock Title Wi-Fi {expected_wifi_names[0]}" - assert switches[1].name == f"Mock Title Wi-Fi {expected_wifi_names[1]}" - assert switches[2].name == "printer Internet Access" + states = hass.states.async_all() + assert len(states) == 3 + + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) diff --git a/tests/components/fritz/test_update.py b/tests/components/fritz/test_update.py index 5d7ef852d4c..cca5decbcc4 100644 --- a/tests/components/fritz/test_update.py +++ b/tests/components/fritz/test_update.py @@ -2,10 +2,13 @@ from unittest.mock import patch +import pytest +from syrupy.assertion import SnapshotAssertion + from homeassistant.components.fritz.const import DOMAIN -from homeassistant.components.update import DOMAIN as UPDATE_DOMAIN -from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from .const import ( MOCK_FB_SERVICES, @@ -14,8 +17,7 @@ from .const import ( MOCK_USER_DATA, ) -from tests.common import MockConfigEntry -from tests.typing import ClientSessionGenerator +from tests.common import MockConfigEntry, snapshot_platform AVAILABLE_UPDATE = { "UserInterface1": { @@ -27,30 +29,36 @@ AVAILABLE_UPDATE = { } +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_update_entities_initialized( hass: HomeAssistant, - hass_client: ClientSessionGenerator, + entity_registry: er.EntityRegistry, fc_class_mock, fh_class_mock, + snapshot: SnapshotAssertion, ) -> None: """Test update entities.""" entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - assert entry.state is ConfigEntryState.LOADED + with patch("homeassistant.components.fritz.PLATFORMS", [Platform.UPDATE]): + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() - updates = hass.states.async_all(UPDATE_DOMAIN) - assert len(updates) == 1 + states = hass.states.async_all() + assert len(states) == 1 + + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_update_available( hass: HomeAssistant, - hass_client: ClientSessionGenerator, + entity_registry: er.EntityRegistry, fc_class_mock, fh_class_mock, + snapshot: SnapshotAssertion, ) -> None: """Test update entities.""" @@ -59,64 +67,45 @@ async def test_update_available( entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - assert entry.state is ConfigEntryState.LOADED + with patch("homeassistant.components.fritz.PLATFORMS", [Platform.UPDATE]): + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() - update = hass.states.get("update.mock_title_fritz_os") - assert update is not None - assert update.state == "on" - assert update.attributes.get("installed_version") == "7.29" - assert update.attributes.get("latest_version") == MOCK_FIRMWARE_AVAILABLE - assert update.attributes.get("release_url") == MOCK_FIRMWARE_RELEASE_URL - - -async def test_no_update_available( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - fc_class_mock, - fh_class_mock, -) -> None: - """Test update entities.""" - - entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) - entry.add_to_hass(hass) - - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - assert entry.state is ConfigEntryState.LOADED - - update = hass.states.get("update.mock_title_fritz_os") - assert update is not None - assert update.state == "off" - assert update.attributes.get("installed_version") == "7.29" - assert update.attributes.get("latest_version") == "7.29" + states = hass.states.async_all() + assert len(states) == 1 + + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_available_update_can_be_installed( hass: HomeAssistant, - hass_client: ClientSessionGenerator, + entity_registry: er.EntityRegistry, fc_class_mock, fh_class_mock, + snapshot: SnapshotAssertion, ) -> None: """Test update entities.""" fc_class_mock().override_services({**MOCK_FB_SERVICES, **AVAILABLE_UPDATE}) - with patch( - "homeassistant.components.fritz.coordinator.FritzBoxTools.async_trigger_firmware_update", - return_value=True, - ) as mocked_update_call: + with ( + patch( + "homeassistant.components.fritz.coordinator.FritzBoxTools.async_trigger_firmware_update", + return_value=True, + ) as mocked_update_call, + patch("homeassistant.components.fritz.PLATFORMS", [Platform.UPDATE]), + ): entry = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) + assert await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert entry.state is ConfigEntryState.LOADED - update = hass.states.get("update.mock_title_fritz_os") - assert update is not None - assert update.state == "on" + states = hass.states.async_all() + assert len(states) == 1 + + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) await hass.services.async_call( "update", From 42ed7fbb0de3c226acd52e5993034404b988a0f3 Mon Sep 17 00:00:00 2001 From: MJJ Date: Tue, 3 Sep 2024 16:50:30 +0200 Subject: [PATCH 1384/1635] Increase timeout for fetching buienradar weather data (#124597) Increase timeout for fetching weather data --- homeassistant/components/buienradar/const.py | 1 + homeassistant/components/buienradar/util.py | 16 ++++++++-------- homeassistant/components/buienradar/weather.py | 2 +- 3 files changed, 10 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/buienradar/const.py b/homeassistant/components/buienradar/const.py index c82970ed318..fd92afd59b0 100644 --- a/homeassistant/components/buienradar/const.py +++ b/homeassistant/components/buienradar/const.py @@ -2,6 +2,7 @@ DOMAIN = "buienradar" +DEFAULT_TIMEOUT = 60 DEFAULT_TIMEFRAME = 60 DEFAULT_DIMENSION = 700 diff --git a/homeassistant/components/buienradar/util.py b/homeassistant/components/buienradar/util.py index b641644cebe..f089fce89b7 100644 --- a/homeassistant/components/buienradar/util.py +++ b/homeassistant/components/buienradar/util.py @@ -1,9 +1,9 @@ """Shared utilities for different supported platforms.""" -from asyncio import timeout from datetime import datetime, timedelta from http import HTTPStatus import logging +from typing import Any import aiohttp from buienradar.buienradar import parse_data @@ -27,12 +27,12 @@ from buienradar.constants import ( from buienradar.urls import JSON_FEED_URL, json_precipitation_forecast_url from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE -from homeassistant.core import CALLBACK_TYPE, callback +from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.event import async_track_point_in_utc_time from homeassistant.util import dt as dt_util -from .const import SCHEDULE_NOK, SCHEDULE_OK +from .const import DEFAULT_TIMEOUT, SCHEDULE_NOK, SCHEDULE_OK __all__ = ["BrData"] _LOGGER = logging.getLogger(__name__) @@ -59,10 +59,10 @@ class BrData: load_error_count: int = WARN_THRESHOLD rain_error_count: int = WARN_THRESHOLD - def __init__(self, hass, coordinates, timeframe, devices): + def __init__(self, hass: HomeAssistant, coordinates, timeframe, devices) -> None: """Initialize the data object.""" self.devices = devices - self.data = {} + self.data: dict[str, Any] | None = {} self.hass = hass self.coordinates = coordinates self.timeframe = timeframe @@ -93,9 +93,9 @@ class BrData: resp = None try: websession = async_get_clientsession(self.hass) - async with timeout(10): - resp = await websession.get(url) - + async with websession.get( + url, timeout=aiohttp.ClientTimeout(total=DEFAULT_TIMEOUT) + ) as resp: result[STATUS_CODE] = resp.status result[CONTENT] = await resp.text() if resp.status == HTTPStatus.OK: diff --git a/homeassistant/components/buienradar/weather.py b/homeassistant/components/buienradar/weather.py index 02e1f444c9c..2af66982fab 100644 --- a/homeassistant/components/buienradar/weather.py +++ b/homeassistant/components/buienradar/weather.py @@ -130,7 +130,7 @@ class BrWeather(WeatherEntity): _attr_should_poll = False _attr_supported_features = WeatherEntityFeature.FORECAST_DAILY - def __init__(self, config, coordinates): + def __init__(self, config, coordinates) -> None: """Initialize the platform with a data instance and station name.""" self._stationname = config.get(CONF_NAME, "Buienradar") self._attr_name = self._stationname or f"BR {'(unknown station)'}" From 78517f75e8b7f2ab529fd670150cc63bf9801c8a Mon Sep 17 00:00:00 2001 From: Raj Laud <50647620+rajlaud@users.noreply.github.com> Date: Tue, 3 Sep 2024 10:50:55 -0400 Subject: [PATCH 1385/1635] Add favorites support to Media Browser for Squeezebox integration (#124732) * Add Favorites support to Media Browser * CI fixes * More CI Fixes * Another CI * Change icons for other library items to use standard LMS icons * Change max favorites to BROWSE_LIMIT * Simplify library_payload to consolidate favorite and non-favorite items * Simplify library_payload to consolidate favorite and non-favorite items * Add support for favorite hierarchy * small fix for icon naming with local albums * Add ability to expand an album from a favorite list * Reformat to fix linting error * and ruff format * Use library calls from pysqueezebox * Folder and playback support * Bump to pysqueezebox 0.8.0 * Bump pysqueezebox version to 0.8.1 * Add unit tests * Improve unit tests * Refactor tests to use websockets and services.async_call * Apply suggestions from code review --------- Co-authored-by: peteS-UK <64092177+peteS-UK@users.noreply.github.com> --- .../components/squeezebox/browse_media.py | 54 ++++- .../components/squeezebox/media_player.py | 2 +- tests/components/squeezebox/conftest.py | 133 ++++++++++++ .../squeezebox/test_media_browser.py | 205 ++++++++++++++++++ 4 files changed, 382 insertions(+), 12 deletions(-) create mode 100644 tests/components/squeezebox/conftest.py create mode 100644 tests/components/squeezebox/test_media_browser.py diff --git a/homeassistant/components/squeezebox/browse_media.py b/homeassistant/components/squeezebox/browse_media.py index bc63bcb7f2f..f68624f8f06 100644 --- a/homeassistant/components/squeezebox/browse_media.py +++ b/homeassistant/components/squeezebox/browse_media.py @@ -11,9 +11,10 @@ from homeassistant.components.media_player import ( ) from homeassistant.helpers.network import is_internal_request -LIBRARY = ["Artists", "Albums", "Tracks", "Playlists", "Genres"] +LIBRARY = ["Favorites", "Artists", "Albums", "Tracks", "Playlists", "Genres"] MEDIA_TYPE_TO_SQUEEZEBOX = { + "Favorites": "favorites", "Artists": "artists", "Albums": "albums", "Tracks": "titles", @@ -32,9 +33,11 @@ SQUEEZEBOX_ID_BY_TYPE = { MediaType.TRACK: "track_id", MediaType.PLAYLIST: "playlist_id", MediaType.GENRE: "genre_id", + "Favorites": "item_id", } CONTENT_TYPE_MEDIA_CLASS = { + "Favorites": {"item": MediaClass.DIRECTORY, "children": MediaClass.TRACK}, "Artists": {"item": MediaClass.DIRECTORY, "children": MediaClass.ARTIST}, "Albums": {"item": MediaClass.DIRECTORY, "children": MediaClass.ALBUM}, "Tracks": {"item": MediaClass.DIRECTORY, "children": MediaClass.TRACK}, @@ -57,6 +60,7 @@ CONTENT_TYPE_TO_CHILD_TYPE = { "Tracks": MediaType.TRACK, "Playlists": MediaType.PLAYLIST, "Genres": MediaType.GENRE, + "Favorites": None, # can only be determined after inspecting the item } BROWSE_LIMIT = 1000 @@ -64,6 +68,7 @@ BROWSE_LIMIT = 1000 async def build_item_response(entity, player, payload): """Create response payload for search described by payload.""" + internal_request = is_internal_request(entity.hass) search_id = payload["search_id"] @@ -71,6 +76,8 @@ async def build_item_response(entity, player, payload): media_class = CONTENT_TYPE_MEDIA_CLASS[search_type] + children = None + if search_id and search_id != search_type: browse_id = (SQUEEZEBOX_ID_BY_TYPE[search_type], search_id) else: @@ -82,16 +89,36 @@ async def build_item_response(entity, player, payload): browse_id=browse_id, ) - children = None - if result is not None and result.get("items"): item_type = CONTENT_TYPE_TO_CHILD_TYPE[search_type] - child_media_class = CONTENT_TYPE_MEDIA_CLASS[item_type] children = [] for item in result["items"]: item_id = str(item["id"]) item_thumbnail = None + if item_type: + child_item_type = item_type + child_media_class = CONTENT_TYPE_MEDIA_CLASS[item_type] + can_expand = child_media_class["children"] is not None + can_play = True + + if search_type == "Favorites": + if "album_id" in item: + item_id = str(item["album_id"]) + child_item_type = MediaType.ALBUM + child_media_class = CONTENT_TYPE_MEDIA_CLASS[MediaType.ALBUM] + can_expand = True + can_play = True + elif item["hasitems"]: + child_item_type = "Favorites" + child_media_class = CONTENT_TYPE_MEDIA_CLASS["Favorites"] + can_expand = True + can_play = False + else: + child_item_type = "Favorites" + child_media_class = CONTENT_TYPE_MEDIA_CLASS[MediaType.TRACK] + can_expand = False + can_play = True if artwork_track_id := item.get("artwork_track_id"): if internal_request: @@ -102,15 +129,17 @@ async def build_item_response(entity, player, payload): item_thumbnail = entity.get_browse_image_url( item_type, item_id, artwork_track_id ) + else: + item_thumbnail = item.get("image_url") # will not be proxied by HA children.append( BrowseMedia( title=item["title"], media_class=child_media_class["item"], media_content_id=item_id, - media_content_type=item_type, - can_play=True, - can_expand=child_media_class["children"] is not None, + media_content_type=child_item_type, + can_play=can_play, + can_expand=can_expand, thumbnail=item_thumbnail, ) ) @@ -124,7 +153,7 @@ async def build_item_response(entity, player, payload): children_media_class=media_class["children"], media_content_id=search_id, media_content_type=search_type, - can_play=True, + can_play=search_type != "Favorites", children=children, can_expand=True, ) @@ -144,6 +173,7 @@ async def library_payload(hass, player): for item in LIBRARY: media_class = CONTENT_TYPE_MEDIA_CLASS[item] + result = await player.async_browse( MEDIA_TYPE_TO_SQUEEZEBOX[item], limit=1, @@ -155,7 +185,7 @@ async def library_payload(hass, player): media_class=media_class["children"], media_content_id=item, media_content_type=item, - can_play=True, + can_play=item != "Favorites", can_expand=True, ) ) @@ -184,10 +214,12 @@ async def generate_playlist(player, payload): media_id = payload["search_id"] if media_type not in SQUEEZEBOX_ID_BY_TYPE: - return None + raise BrowseError(f"Media type not supported: {media_type}") browse_id = (SQUEEZEBOX_ID_BY_TYPE[media_type], media_id) result = await player.async_browse( "titles", limit=BROWSE_LIMIT, browse_id=browse_id ) - return result.get("items") + if result and "items" in result: + return result["items"] + raise BrowseError(f"Media not found: {media_type} / {media_id}") diff --git a/homeassistant/components/squeezebox/media_player.py b/homeassistant/components/squeezebox/media_player.py index 552b8ed800c..279e51485f0 100644 --- a/homeassistant/components/squeezebox/media_player.py +++ b/homeassistant/components/squeezebox/media_player.py @@ -591,7 +591,7 @@ class SqueezeBoxEntity(MediaPlayerEntity): if media_content_type in [None, "library"]: return await library_payload(self.hass, self._player) - if media_source.is_media_source_id(media_content_id): + if media_content_id and media_source.is_media_source_id(media_content_id): return await media_source.async_browse_media( self.hass, media_content_id, content_filter=media_source_content_filter ) diff --git a/tests/components/squeezebox/conftest.py b/tests/components/squeezebox/conftest.py new file mode 100644 index 00000000000..4a4bdc6ae73 --- /dev/null +++ b/tests/components/squeezebox/conftest.py @@ -0,0 +1,133 @@ +"""Setup the squeezebox tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest + +from homeassistant.components.media_player import MediaType +from homeassistant.components.squeezebox import const +from homeassistant.components.squeezebox.browse_media import ( + MEDIA_TYPE_TO_SQUEEZEBOX, + SQUEEZEBOX_ID_BY_TYPE, +) +from homeassistant.const import CONF_HOST, CONF_PORT +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import format_mac + +from tests.common import MockConfigEntry + +TEST_HOST = "1.2.3.4" +TEST_PORT = "9000" +TEST_USE_HTTPS = False +SERVER_UUID = "12345678-1234-1234-1234-123456789012" +TEST_MAC = "aa:bb:cc:dd:ee:ff" + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.squeezebox.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def config_entry(hass: HomeAssistant) -> MockConfigEntry: + """Add the squeezebox mock config entry to hass.""" + config_entry = MockConfigEntry( + domain=const.DOMAIN, + unique_id=SERVER_UUID, + data={ + CONF_HOST: TEST_HOST, + CONF_PORT: TEST_PORT, + const.CONF_HTTPS: TEST_USE_HTTPS, + }, + ) + config_entry.add_to_hass(hass) + return config_entry + + +async def mock_async_browse( + media_type: MediaType, limit: int, browse_id: tuple | None = None +) -> dict | None: + """Mock the async_browse method of pysqueezebox.Player.""" + child_types = { + "favorites": "favorites", + "albums": "album", + "album": "track", + "genres": "genre", + "genre": "album", + "artists": "artist", + "artist": "album", + "titles": "title", + "title": "title", + "playlists": "playlist", + "playlist": "title", + } + fake_items = [ + { + "title": "Fake Item 1", + "id": "1234", + "hasitems": False, + "item_type": child_types[media_type], + "artwork_track_id": "b35bb9e9", + }, + { + "title": "Fake Item 2", + "id": "12345", + "hasitems": media_type == "favorites", + "item_type": child_types[media_type], + "image_url": "http://lms.internal:9000/html/images/favorites.png", + }, + { + "title": "Fake Item 3", + "id": "123456", + "hasitems": media_type == "favorites", + "album_id": "123456" if media_type == "favorites" else None, + }, + ] + + if browse_id: + search_type, search_id = browse_id + if search_id: + if search_type in SQUEEZEBOX_ID_BY_TYPE.values(): + for item in fake_items: + if item["id"] == search_id: + return { + "title": item["title"], + "items": [item], + } + return None + if search_type in SQUEEZEBOX_ID_BY_TYPE.values(): + return { + "title": search_type, + "items": fake_items, + } + return None + if media_type in MEDIA_TYPE_TO_SQUEEZEBOX.values(): + return { + "title": media_type, + "items": fake_items, + } + return None + + +@pytest.fixture +def lms() -> MagicMock: + """Mock a Lyrion Media Server with one mock player attached.""" + lms = MagicMock() + player = MagicMock() + player.player_id = TEST_MAC + player.name = "Test Player" + player.power = False + player.async_browse = AsyncMock(side_effect=mock_async_browse) + player.async_load_playlist = AsyncMock() + player.async_update = AsyncMock() + player.generate_image_url_from_track_id = MagicMock( + return_value="http://lms.internal:9000/html/images/favorites.png" + ) + lms.async_get_players = AsyncMock(return_value=[player]) + lms.async_query = AsyncMock(return_value={"uuid": format_mac(TEST_MAC)}) + return lms diff --git a/tests/components/squeezebox/test_media_browser.py b/tests/components/squeezebox/test_media_browser.py new file mode 100644 index 00000000000..62d668ca57b --- /dev/null +++ b/tests/components/squeezebox/test_media_browser.py @@ -0,0 +1,205 @@ +"""Test the media browser interface.""" + +from unittest.mock import MagicMock, patch + +import pytest + +from homeassistant.components.media_player import ( + ATTR_MEDIA_CONTENT_ID, + ATTR_MEDIA_CONTENT_TYPE, + DOMAIN as MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + BrowseError, + MediaType, +) +from homeassistant.components.squeezebox.browse_media import ( + LIBRARY, + MEDIA_TYPE_TO_SQUEEZEBOX, +) +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry +from tests.typing import WebSocketGenerator + + +@pytest.fixture(autouse=True) +async def setup_integration( + hass: HomeAssistant, config_entry: MockConfigEntry, lms: MagicMock +) -> None: + """Fixture for setting up the component.""" + with ( + patch("homeassistant.components.squeezebox.Server", return_value=lms), + patch( + "homeassistant.components.squeezebox.media_player.start_server_discovery" + ), + ): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done(wait_background_tasks=True) + + +async def test_async_browse_media_root( + hass: HomeAssistant, + config_entry: MockConfigEntry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test the async_browse_media function at the root level.""" + + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "media_player/browse_media", + "entity_id": "media_player.test_player", + "media_content_id": "", + "media_content_type": "library", + } + ) + response = await client.receive_json() + assert response["success"] + result = response["result"] + for idx, item in enumerate(result["children"]): + assert item["title"] == LIBRARY[idx] + + +async def test_async_browse_media_with_subitems( + hass: HomeAssistant, + config_entry: MockConfigEntry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test each category with subitems.""" + for category in ("Favorites", "Artists", "Albums", "Playlists", "Genres"): + with patch( + "homeassistant.components.squeezebox.browse_media.is_internal_request", + return_value=False, + ): + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "media_player/browse_media", + "entity_id": "media_player.test_player", + "media_content_id": "", + "media_content_type": category, + } + ) + response = await client.receive_json() + assert response["success"] + category_level = response["result"] + assert category_level["title"] == MEDIA_TYPE_TO_SQUEEZEBOX[category] + assert category_level["children"][0]["title"] == "Fake Item 1" + + # Look up a subitem + search_type = category_level["children"][0]["media_content_type"] + search_id = category_level["children"][0]["media_content_id"] + await client.send_json( + { + "id": 2, + "type": "media_player/browse_media", + "entity_id": "media_player.test_player", + "media_content_id": search_id, + "media_content_type": search_type, + } + ) + response = await client.receive_json() + assert response["success"] + search = response["result"] + assert search["title"] == "Fake Item 1" + + +async def test_async_browse_tracks( + hass: HomeAssistant, + config_entry: MockConfigEntry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test tracks (no subitems).""" + with patch( + "homeassistant.components.squeezebox.browse_media.is_internal_request", + return_value=True, + ): + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "media_player/browse_media", + "entity_id": "media_player.test_player", + "media_content_id": "", + "media_content_type": "Tracks", + } + ) + response = await client.receive_json() + assert response["success"] + tracks = response["result"] + assert tracks["title"] == "titles" + assert len(tracks["children"]) == 3 + + +async def test_async_browse_error( + hass: HomeAssistant, + config_entry: MockConfigEntry, + hass_ws_client: WebSocketGenerator, +) -> None: + """Search for a non-existent item and assert error.""" + client = await hass_ws_client() + await client.send_json( + { + "id": 1, + "type": "media_player/browse_media", + "entity_id": "media_player.test_player", + "media_content_id": "0", + "media_content_type": MediaType.ALBUM, + } + ) + response = await client.receive_json() + assert not response["success"] + + +async def test_play_browse_item( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test play browse item.""" + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_MEDIA_CONTENT_ID: "1234", + ATTR_MEDIA_CONTENT_TYPE: "album", + }, + ) + + +async def test_play_browse_item_nonexistent( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> None: + """Test trying to play an item that doesn't exist.""" + with pytest.raises(BrowseError): + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_MEDIA_CONTENT_ID: "0", + ATTR_MEDIA_CONTENT_TYPE: "album", + }, + blocking=True, + ) + + +async def test_play_browse_item_bad_category( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test trying to play an item whose category doesn't exist.""" + with pytest.raises(BrowseError): + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.test_player", + ATTR_MEDIA_CONTENT_ID: "1234", + ATTR_MEDIA_CONTENT_TYPE: "bad_category", + }, + blocking=True, + ) From 5d072d1030c7307edc40d0abfc90f574930fc784 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hans=20Kr=C3=B6ner?= Date: Tue, 3 Sep 2024 16:51:13 +0200 Subject: [PATCH 1386/1635] Bump PyMetno to 0.13.0 (#125151) --- homeassistant/components/met/manifest.json | 2 +- homeassistant/components/norway_air/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/met/manifest.json b/homeassistant/components/met/manifest.json index e900c5a012a..1a145589a68 100644 --- a/homeassistant/components/met/manifest.json +++ b/homeassistant/components/met/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/met", "iot_class": "cloud_polling", "loggers": ["metno"], - "requirements": ["PyMetno==0.12.0"] + "requirements": ["PyMetno==0.13.0"] } diff --git a/homeassistant/components/norway_air/manifest.json b/homeassistant/components/norway_air/manifest.json index f787f647db8..0c8f15b9b78 100644 --- a/homeassistant/components/norway_air/manifest.json +++ b/homeassistant/components/norway_air/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/norway_air", "iot_class": "cloud_polling", "loggers": ["metno"], - "requirements": ["PyMetno==0.12.0"] + "requirements": ["PyMetno==0.13.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index ebc621486c9..58cded8ad5b 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -64,7 +64,7 @@ PyMetEireann==2021.8.0 # homeassistant.components.met # homeassistant.components.norway_air -PyMetno==0.12.0 +PyMetno==0.13.0 # homeassistant.components.keymitt_ble PyMicroBot==0.0.17 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index d151c8e6bc2..fdb65eac9a7 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -61,7 +61,7 @@ PyMetEireann==2021.8.0 # homeassistant.components.met # homeassistant.components.norway_air -PyMetno==0.12.0 +PyMetno==0.13.0 # homeassistant.components.keymitt_ble PyMicroBot==0.0.17 From 8759a6a14d82313349733afd41d56214e4b93187 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 3 Sep 2024 17:03:36 +0200 Subject: [PATCH 1387/1635] Make optional arguments to frame.report kwarg only (#125062) * Make optional arguments to frame.report kwarg only * Update homeassistant/helpers/frame.py Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> --------- Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> --- homeassistant/components/media_source/__init__.py | 2 +- homeassistant/helpers/frame.py | 10 +++++++--- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/media_source/__init__.py b/homeassistant/components/media_source/__init__.py index 928e46ab528..732a1d834f0 100644 --- a/homeassistant/components/media_source/__init__.py +++ b/homeassistant/components/media_source/__init__.py @@ -160,7 +160,7 @@ async def async_resolve_media( if target_media_player is UNDEFINED: report( "calls media_source.async_resolve_media without passing an entity_id", - {DOMAIN}, + exclude_integrations={DOMAIN}, ) target_media_player = None diff --git a/homeassistant/helpers/frame.py b/homeassistant/helpers/frame.py index 8a30c26886e..e8df1cea21b 100644 --- a/homeassistant/helpers/frame.py +++ b/homeassistant/helpers/frame.py @@ -129,15 +129,19 @@ class MissingIntegrationFrame(HomeAssistantError): def report( what: str, - exclude_integrations: set | None = None, + *, + exclude_integrations: set[str] | None = None, error_if_core: bool = True, + error_if_integration: bool = False, level: int = logging.WARNING, log_custom_component_only: bool = False, - error_if_integration: bool = False, ) -> None: """Report incorrect usage. - Async friendly. + If error_if_core is True, raise instead of log if an integration is not found + when unwinding the stack frame. + If error_if_integration is True, raise instead of log if an integration is found + when unwinding the stack frame. """ try: integration_frame = get_integration_frame( From 1dcae0c0a645623aad045d69703edd16bd38855d Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 3 Sep 2024 17:04:08 +0200 Subject: [PATCH 1388/1635] Improve some comments in recorder tests (#125118) --- tests/components/recorder/db_schema_32.py | 5 ++++- tests/components/recorder/test_v32_migration.py | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/components/recorder/db_schema_32.py b/tests/components/recorder/db_schema_32.py index 60f4f733ec0..6da0272da87 100644 --- a/tests/components/recorder/db_schema_32.py +++ b/tests/components/recorder/db_schema_32.py @@ -224,7 +224,7 @@ class Events(Base): # type: ignore[misc,valid-type] data_id = Column(Integer, ForeignKey("event_data.data_id"), index=True) context_id_bin = Column( LargeBinary(CONTEXT_ID_BIN_MAX_LENGTH) - ) # *** Not originally in v3v320, only added for recorder to startup ok + ) # *** Not originally in v32, only added for recorder to startup ok context_user_id_bin = Column( LargeBinary(CONTEXT_ID_BIN_MAX_LENGTH) ) # *** Not originally in v32, only added for recorder to startup ok @@ -565,6 +565,7 @@ class StatisticsBase: id = Column(Integer, Identity(), primary_key=True) created = Column(DATETIME_TYPE, default=dt_util.utcnow) + # *** Not originally in v32, only added for recorder to startup ok created_ts = Column(TIMESTAMP_TYPE, default=time.time) metadata_id = Column( Integer, @@ -572,11 +573,13 @@ class StatisticsBase: index=True, ) start = Column(DATETIME_TYPE, index=True) + # *** Not originally in v32, only added for recorder to startup ok start_ts = Column(TIMESTAMP_TYPE, index=True) mean = Column(DOUBLE_TYPE) min = Column(DOUBLE_TYPE) max = Column(DOUBLE_TYPE) last_reset = Column(DATETIME_TYPE) + # *** Not originally in v32, only added for recorder to startup ok last_reset_ts = Column(TIMESTAMP_TYPE) state = Column(DOUBLE_TYPE) sum = Column(DOUBLE_TYPE) diff --git a/tests/components/recorder/test_v32_migration.py b/tests/components/recorder/test_v32_migration.py index 1e00353d02c..56aa6705688 100644 --- a/tests/components/recorder/test_v32_migration.py +++ b/tests/components/recorder/test_v32_migration.py @@ -69,7 +69,7 @@ async def test_migrate_times( async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture, ) -> None: - """Test we can migrate times.""" + """Test we can migrate times in the events and states tables.""" importlib.import_module(SCHEMA_MODULE_30) old_db_schema = sys.modules[SCHEMA_MODULE_30] now = dt_util.utcnow() From 56887747a6d10af198ff46c6bf97e8e3a05cb68c Mon Sep 17 00:00:00 2001 From: Andrew Jackson Date: Tue, 3 Sep 2024 16:09:26 +0100 Subject: [PATCH 1389/1635] Bump aiomealie to 0.9.2 (#125153) Bump mealie version --- homeassistant/components/mealie/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/mealie/manifest.json b/homeassistant/components/mealie/manifest.json index d8fe26d97b3..4fabdffadc4 100644 --- a/homeassistant/components/mealie/manifest.json +++ b/homeassistant/components/mealie/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/mealie", "integration_type": "service", "iot_class": "local_polling", - "requirements": ["aiomealie==0.9.1"] + "requirements": ["aiomealie==0.9.2"] } diff --git a/requirements_all.txt b/requirements_all.txt index 58cded8ad5b..39b464c0561 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -288,7 +288,7 @@ aiolookin==1.0.0 aiolyric==2.0.1 # homeassistant.components.mealie -aiomealie==0.9.1 +aiomealie==0.9.2 # homeassistant.components.modern_forms aiomodernforms==0.1.8 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index fdb65eac9a7..1fc15b0f78f 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -270,7 +270,7 @@ aiolookin==1.0.0 aiolyric==2.0.1 # homeassistant.components.mealie -aiomealie==0.9.1 +aiomealie==0.9.2 # homeassistant.components.modern_forms aiomodernforms==0.1.8 From 470335e27ad8403887e1b2ba3f7004bdd07400ac Mon Sep 17 00:00:00 2001 From: ollo69 <60491700+ollo69@users.noreply.github.com> Date: Tue, 3 Sep 2024 17:11:17 +0200 Subject: [PATCH 1390/1635] Add sensors for AsusWRT using http(s) library (#124337) * Additional sensors for AsusWRT using http(s) library * Remove temperature sensors refactor from PR * Fix test function name * Change translation a suggested * Requested changes --- homeassistant/components/asuswrt/bridge.py | 59 +++++ homeassistant/components/asuswrt/const.py | 13 ++ homeassistant/components/asuswrt/icons.json | 15 ++ homeassistant/components/asuswrt/sensor.py | 73 +++++++ homeassistant/components/asuswrt/strings.json | 21 ++ tests/components/asuswrt/conftest.py | 42 +++- tests/components/asuswrt/test_sensor.py | 206 +++++++++++++++--- 7 files changed, 391 insertions(+), 38 deletions(-) diff --git a/homeassistant/components/asuswrt/bridge.py b/homeassistant/components/asuswrt/bridge.py index 4e928d63666..bc6f0fe6fd2 100644 --- a/homeassistant/components/asuswrt/bridge.py +++ b/homeassistant/components/asuswrt/bridge.py @@ -5,6 +5,7 @@ from __future__ import annotations from abc import ABC, abstractmethod from collections import namedtuple from collections.abc import Awaitable, Callable, Coroutine +from datetime import datetime import functools import logging from typing import Any, cast @@ -40,17 +41,23 @@ from .const import ( PROTOCOL_HTTPS, PROTOCOL_TELNET, SENSORS_BYTES, + SENSORS_CPU, SENSORS_LOAD_AVG, + SENSORS_MEMORY, SENSORS_RATES, SENSORS_TEMPERATURES, SENSORS_TEMPERATURES_LEGACY, + SENSORS_UPTIME, ) SENSORS_TYPE_BYTES = "sensors_bytes" SENSORS_TYPE_COUNT = "sensors_count" +SENSORS_TYPE_CPU = "sensors_cpu" SENSORS_TYPE_LOAD_AVG = "sensors_load_avg" +SENSORS_TYPE_MEMORY = "sensors_memory" SENSORS_TYPE_RATES = "sensors_rates" SENSORS_TYPE_TEMPERATURES = "sensors_temperatures" +SENSORS_TYPE_UPTIME = "sensors_uptime" WrtDevice = namedtuple("WrtDevice", ["ip", "name", "connected_to"]) # noqa: PYI024 @@ -346,6 +353,7 @@ class AsusWrtHttpBridge(AsusWrtBridge): async def async_get_available_sensors(self) -> dict[str, dict[str, Any]]: """Return a dictionary of available sensors for this bridge.""" + sensors_cpu = await self._get_available_cpu_sensors() sensors_temperatures = await self._get_available_temperature_sensors() sensors_loadavg = await self._get_loadavg_sensors_availability() return { @@ -353,20 +361,49 @@ class AsusWrtHttpBridge(AsusWrtBridge): KEY_SENSORS: SENSORS_BYTES, KEY_METHOD: self._get_bytes, }, + SENSORS_TYPE_CPU: { + KEY_SENSORS: sensors_cpu, + KEY_METHOD: self._get_cpu_usage, + }, SENSORS_TYPE_LOAD_AVG: { KEY_SENSORS: sensors_loadavg, KEY_METHOD: self._get_load_avg, }, + SENSORS_TYPE_MEMORY: { + KEY_SENSORS: SENSORS_MEMORY, + KEY_METHOD: self._get_memory_usage, + }, SENSORS_TYPE_RATES: { KEY_SENSORS: SENSORS_RATES, KEY_METHOD: self._get_rates, }, + SENSORS_TYPE_UPTIME: { + KEY_SENSORS: SENSORS_UPTIME, + KEY_METHOD: self._get_uptime, + }, SENSORS_TYPE_TEMPERATURES: { KEY_SENSORS: sensors_temperatures, KEY_METHOD: self._get_temperatures, }, } + async def _get_available_cpu_sensors(self) -> list[str]: + """Check which cpu information is available on the router.""" + try: + available_cpu = await self._api.async_get_cpu_usage() + available_sensors = [t for t in SENSORS_CPU if t in available_cpu] + except AsusWrtError as exc: + _LOGGER.warning( + ( + "Failed checking cpu sensor availability for ASUS router" + " %s. Exception: %s" + ), + self.host, + exc, + ) + return [] + return available_sensors + async def _get_available_temperature_sensors(self) -> list[str]: """Check which temperature information is available on the router.""" try: @@ -415,3 +452,25 @@ class AsusWrtHttpBridge(AsusWrtBridge): async def _get_temperatures(self) -> Any: """Fetch temperatures information from the router.""" return await self._api.async_get_temperatures() + + @handle_errors_and_zip(AsusWrtError, None) + async def _get_cpu_usage(self) -> Any: + """Fetch cpu information from the router.""" + return await self._api.async_get_cpu_usage() + + @handle_errors_and_zip(AsusWrtError, None) + async def _get_memory_usage(self) -> Any: + """Fetch memory information from the router.""" + return await self._api.async_get_memory_usage() + + async def _get_uptime(self) -> dict[str, Any]: + """Fetch uptime from the router.""" + try: + uptimes = await self._api.async_get_uptime() + except AsusWrtError as exc: + raise UpdateFailed(exc) from exc + + last_boot = datetime.fromisoformat(uptimes["last_boot"]) + uptime = uptimes["uptime"] + + return dict(zip(SENSORS_UPTIME, [last_boot, uptime], strict=False)) diff --git a/homeassistant/components/asuswrt/const.py b/homeassistant/components/asuswrt/const.py index 5ce37207145..7790750538e 100644 --- a/homeassistant/components/asuswrt/const.py +++ b/homeassistant/components/asuswrt/const.py @@ -27,7 +27,20 @@ PROTOCOL_TELNET = "telnet" # Sensors SENSORS_BYTES = ["sensor_rx_bytes", "sensor_tx_bytes"] SENSORS_CONNECTED_DEVICE = ["sensor_connected_device"] +SENSORS_CPU = [ + "cpu_total_usage", + "cpu1_usage", + "cpu2_usage", + "cpu3_usage", + "cpu4_usage", + "cpu5_usage", + "cpu6_usage", + "cpu7_usage", + "cpu8_usage", +] SENSORS_LOAD_AVG = ["sensor_load_avg1", "sensor_load_avg5", "sensor_load_avg15"] +SENSORS_MEMORY = ["mem_usage_perc", "mem_free", "mem_used"] SENSORS_RATES = ["sensor_rx_rates", "sensor_tx_rates"] SENSORS_TEMPERATURES_LEGACY = ["2.4GHz", "5.0GHz", "CPU"] SENSORS_TEMPERATURES = [*SENSORS_TEMPERATURES_LEGACY, "5.0GHz_2", "6.0GHz"] +SENSORS_UPTIME = ["sensor_last_boot", "sensor_uptime"] diff --git a/homeassistant/components/asuswrt/icons.json b/homeassistant/components/asuswrt/icons.json index a4e44496a2f..b5b2c35f742 100644 --- a/homeassistant/components/asuswrt/icons.json +++ b/homeassistant/components/asuswrt/icons.json @@ -24,6 +24,21 @@ }, "load_avg_15m": { "default": "mdi:cpu-32-bit" + }, + "cpu_usage": { + "default": "mdi:cpu-32-bit" + }, + "cpu_core_usage": { + "default": "mdi:cpu-32-bit" + }, + "memory_usage": { + "default": "mdi:memory" + }, + "memory_free": { + "default": "mdi:memory" + }, + "memory_used": { + "default": "mdi:memory" } } } diff --git a/homeassistant/components/asuswrt/sensor.py b/homeassistant/components/asuswrt/sensor.py index 69470882153..fb43e574379 100644 --- a/homeassistant/components/asuswrt/sensor.py +++ b/homeassistant/components/asuswrt/sensor.py @@ -11,10 +11,12 @@ from homeassistant.components.sensor import ( SensorStateClass, ) from homeassistant.const import ( + PERCENTAGE, EntityCategory, UnitOfDataRate, UnitOfInformation, UnitOfTemperature, + UnitOfTime, ) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -30,9 +32,12 @@ from .const import ( KEY_SENSORS, SENSORS_BYTES, SENSORS_CONNECTED_DEVICE, + SENSORS_CPU, SENSORS_LOAD_AVG, + SENSORS_MEMORY, SENSORS_RATES, SENSORS_TEMPERATURES, + SENSORS_UPTIME, ) from .router import AsusWrtRouter @@ -46,6 +51,19 @@ class AsusWrtSensorEntityDescription(SensorEntityDescription): UNIT_DEVICES = "Devices" +CPU_CORE_SENSORS: tuple[AsusWrtSensorEntityDescription, ...] = tuple( + AsusWrtSensorEntityDescription( + key=sens_key, + translation_key="cpu_core_usage", + translation_placeholders={"core_id": str(core_id)}, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + suggested_display_precision=1, + ) + for core_id, sens_key in enumerate(SENSORS_CPU[1:], start=1) +) CONNECTION_SENSORS: tuple[AsusWrtSensorEntityDescription, ...] = ( AsusWrtSensorEntityDescription( key=SENSORS_CONNECTED_DEVICE[0], @@ -167,6 +185,61 @@ CONNECTION_SENSORS: tuple[AsusWrtSensorEntityDescription, ...] = ( entity_registry_enabled_default=False, suggested_display_precision=1, ), + AsusWrtSensorEntityDescription( + key=SENSORS_MEMORY[0], + translation_key="memory_usage", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + suggested_display_precision=1, + ), + AsusWrtSensorEntityDescription( + key=SENSORS_MEMORY[1], + translation_key="memory_free", + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.DATA_SIZE, + native_unit_of_measurement=UnitOfInformation.MEGABYTES, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + suggested_display_precision=2, + factor=1024, + ), + AsusWrtSensorEntityDescription( + key=SENSORS_MEMORY[2], + translation_key="memory_used", + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.DATA_SIZE, + native_unit_of_measurement=UnitOfInformation.MEGABYTES, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + suggested_display_precision=2, + factor=1024, + ), + AsusWrtSensorEntityDescription( + key=SENSORS_UPTIME[0], + translation_key="last_boot", + device_class=SensorDeviceClass.TIMESTAMP, + ), + AsusWrtSensorEntityDescription( + key=SENSORS_UPTIME[1], + translation_key="uptime", + state_class=SensorStateClass.TOTAL, + device_class=SensorDeviceClass.DURATION, + native_unit_of_measurement=UnitOfTime.SECONDS, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), + AsusWrtSensorEntityDescription( + key=SENSORS_CPU[0], + translation_key="cpu_usage", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + suggested_display_precision=1, + ), + *CPU_CORE_SENSORS, ) diff --git a/homeassistant/components/asuswrt/strings.json b/homeassistant/components/asuswrt/strings.json index 4c8386dcd00..bab40f281f5 100644 --- a/homeassistant/components/asuswrt/strings.json +++ b/homeassistant/components/asuswrt/strings.json @@ -88,6 +88,27 @@ }, "6ghz_temperature": { "name": "6GHz Temperature" + }, + "cpu_usage": { + "name": "CPU usage" + }, + "cpu_core_usage": { + "name": "CPU core {core_id} usage" + }, + "memory_usage": { + "name": "Memory usage" + }, + "memory_free": { + "name": "Memory free" + }, + "memory_used": { + "name": "Memory used" + }, + "last_boot": { + "name": "Last boot" + }, + "uptime": { + "name": "Uptime" } } }, diff --git a/tests/components/asuswrt/conftest.py b/tests/components/asuswrt/conftest.py index 7710e26707c..f850a26b997 100644 --- a/tests/components/asuswrt/conftest.py +++ b/tests/components/asuswrt/conftest.py @@ -16,12 +16,30 @@ ASUSWRT_LEGACY_LIB = f"{ASUSWRT_BASE}.bridge.AsusWrtLegacy" MOCK_BYTES_TOTAL = 60000000000, 50000000000 MOCK_BYTES_TOTAL_HTTP = dict(enumerate(MOCK_BYTES_TOTAL)) +MOCK_CPU_USAGE = { + "cpu1_usage": 0.1, + "cpu2_usage": 0.2, + "cpu3_usage": 0.3, + "cpu4_usage": 0.4, + "cpu5_usage": 0.5, + "cpu6_usage": 0.6, + "cpu7_usage": 0.7, + "cpu8_usage": 0.8, + "cpu_total_usage": 0.9, +} MOCK_CURRENT_TRANSFER_RATES = 20000000, 10000000 MOCK_CURRENT_TRANSFER_RATES_HTTP = dict(enumerate(MOCK_CURRENT_TRANSFER_RATES)) MOCK_LOAD_AVG_HTTP = {"load_avg_1": 1.1, "load_avg_5": 1.2, "load_avg_15": 1.3} MOCK_LOAD_AVG = list(MOCK_LOAD_AVG_HTTP.values()) +MOCK_MEMORY_USAGE = { + "mem_usage_perc": 52.4, + "mem_total": 1048576, + "mem_free": 393216, + "mem_used": 655360, +} MOCK_TEMPERATURES = {"2.4GHz": 40.2, "5.0GHz": 0, "CPU": 71.2} MOCK_TEMPERATURES_HTTP = {**MOCK_TEMPERATURES, "5.0GHz_2": 40.3, "6.0GHz": 40.4} +MOCK_UPTIME = {"last_boot": "2024-08-02T00:47:00+00:00", "uptime": 1625927} @pytest.fixture(name="patch_setup_entry") @@ -121,6 +139,11 @@ def mock_controller_connect_http(mock_devices_http): service_mock.return_value.async_get_temperatures.return_value = { k: v for k, v in MOCK_TEMPERATURES_HTTP.items() if k != "5.0GHz" } + service_mock.return_value.async_get_cpu_usage.return_value = MOCK_CPU_USAGE + service_mock.return_value.async_get_memory_usage.return_value = ( + MOCK_MEMORY_USAGE + ) + service_mock.return_value.async_get_uptime.return_value = MOCK_UPTIME yield service_mock @@ -133,13 +156,22 @@ def mock_controller_connect_http_sens_fail(connect_http): connect_http.return_value.async_get_traffic_rates.side_effect = AsusWrtError connect_http.return_value.async_get_loadavg.side_effect = AsusWrtError connect_http.return_value.async_get_temperatures.side_effect = AsusWrtError + connect_http.return_value.async_get_cpu_usage.side_effect = AsusWrtError + connect_http.return_value.async_get_memory_usage.side_effect = AsusWrtError + connect_http.return_value.async_get_uptime.side_effect = AsusWrtError @pytest.fixture(name="connect_http_sens_detect") def mock_controller_connect_http_sens_detect(): """Mock a successful sensor detection using http library.""" - with patch( - f"{ASUSWRT_BASE}.bridge.AsusWrtHttpBridge._get_available_temperature_sensors", - return_value=[*MOCK_TEMPERATURES_HTTP], - ) as mock_sens_detect: - yield mock_sens_detect + with ( + patch( + f"{ASUSWRT_BASE}.bridge.AsusWrtHttpBridge._get_available_temperature_sensors", + return_value=[*MOCK_TEMPERATURES_HTTP], + ) as mock_sens_temp_detect, + patch( + f"{ASUSWRT_BASE}.bridge.AsusWrtHttpBridge._get_available_cpu_sensors", + return_value=[*MOCK_CPU_USAGE], + ) as mock_sens_cpu_detect, + ): + yield mock_sens_temp_detect, mock_sens_cpu_detect diff --git a/tests/components/asuswrt/test_sensor.py b/tests/components/asuswrt/test_sensor.py index 3de830f3f34..0036c40a6f2 100644 --- a/tests/components/asuswrt/test_sensor.py +++ b/tests/components/asuswrt/test_sensor.py @@ -2,6 +2,7 @@ from datetime import timedelta +from freezegun.api import FrozenDateTimeFactory from pyasuswrt.exceptions import AsusWrtError, AsusWrtNotAvailableInfoError import pytest @@ -10,10 +11,13 @@ from homeassistant.components.asuswrt.const import ( CONF_INTERFACE, DOMAIN, SENSORS_BYTES, + SENSORS_CPU, SENSORS_LOAD_AVG, + SENSORS_MEMORY, SENSORS_RATES, SENSORS_TEMPERATURES, SENSORS_TEMPERATURES_LEGACY, + SENSORS_UPTIME, ) from homeassistant.components.device_tracker import CONF_CONSIDER_HOME from homeassistant.config_entries import ConfigEntryState @@ -26,7 +30,6 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.util import slugify -from homeassistant.util.dt import utcnow from .common import ( CONFIG_DATA_HTTP, @@ -42,7 +45,14 @@ from tests.common import MockConfigEntry, async_fire_time_changed SENSORS_DEFAULT = [*SENSORS_BYTES, *SENSORS_RATES] SENSORS_ALL_LEGACY = [*SENSORS_DEFAULT, *SENSORS_LOAD_AVG, *SENSORS_TEMPERATURES_LEGACY] -SENSORS_ALL_HTTP = [*SENSORS_DEFAULT, *SENSORS_LOAD_AVG, *SENSORS_TEMPERATURES] +SENSORS_ALL_HTTP = [ + *SENSORS_DEFAULT, + *SENSORS_CPU, + *SENSORS_LOAD_AVG, + *SENSORS_MEMORY, + *SENSORS_TEMPERATURES, + *SENSORS_UPTIME, +] @pytest.fixture(name="create_device_registry_devices") @@ -95,6 +105,7 @@ def _setup_entry(hass: HomeAssistant, config, sensors, unique_id=None): async def _test_sensors( hass: HomeAssistant, + freezer: FrozenDateTimeFactory, mock_devices, config, entry_unique_id, @@ -125,7 +136,8 @@ async def _test_sensors( # initial devices setup assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=30)) + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) await hass.async_block_till_done() assert hass.states.get(f"{device_tracker.DOMAIN}.test").state == STATE_HOME @@ -139,7 +151,8 @@ async def _test_sensors( # remove first tracked device mock_devices.pop(MOCK_MACS[0]) - async_fire_time_changed(hass, utcnow() + timedelta(seconds=30)) + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) await hass.async_block_till_done() # consider home option set, all devices still home but only 1 device connected @@ -160,7 +173,8 @@ async def _test_sensors( config_entry, options={CONF_CONSIDER_HOME: 0} ) await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=30)) + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) await hass.async_block_till_done() # consider home option set to 0, device "test" not home @@ -176,13 +190,16 @@ async def _test_sensors( ) async def test_sensors_legacy( hass: HomeAssistant, - connect_legacy, + freezer: FrozenDateTimeFactory, mock_devices_legacy, - create_device_registry_devices, entry_unique_id, + connect_legacy, + create_device_registry_devices, ) -> None: """Test creating AsusWRT default sensors and tracker with legacy protocol.""" - await _test_sensors(hass, mock_devices_legacy, CONFIG_DATA_TELNET, entry_unique_id) + await _test_sensors( + hass, freezer, mock_devices_legacy, CONFIG_DATA_TELNET, entry_unique_id + ) @pytest.mark.parametrize( @@ -191,16 +208,21 @@ async def test_sensors_legacy( ) async def test_sensors_http( hass: HomeAssistant, - connect_http, + freezer: FrozenDateTimeFactory, mock_devices_http, - create_device_registry_devices, entry_unique_id, + connect_http, + create_device_registry_devices, ) -> None: """Test creating AsusWRT default sensors and tracker with http protocol.""" - await _test_sensors(hass, mock_devices_http, CONFIG_DATA_HTTP, entry_unique_id) + await _test_sensors( + hass, freezer, mock_devices_http, CONFIG_DATA_HTTP, entry_unique_id + ) -async def _test_loadavg_sensors(hass: HomeAssistant, config) -> None: +async def _test_loadavg_sensors( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, config +) -> None: """Test creating an AsusWRT load average sensors.""" config_entry, sensor_prefix = _setup_entry(hass, config, SENSORS_LOAD_AVG) config_entry.add_to_hass(hass) @@ -208,7 +230,8 @@ async def _test_loadavg_sensors(hass: HomeAssistant, config) -> None: # initial devices setup assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=30)) + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) await hass.async_block_till_done() # assert temperature sensor available @@ -217,18 +240,22 @@ async def _test_loadavg_sensors(hass: HomeAssistant, config) -> None: assert hass.states.get(f"{sensor_prefix}_sensor_load_avg15").state == "1.3" -async def test_loadavg_sensors_legacy(hass: HomeAssistant, connect_legacy) -> None: +async def test_loadavg_sensors_legacy( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, connect_legacy +) -> None: """Test creating an AsusWRT load average sensors.""" - await _test_loadavg_sensors(hass, CONFIG_DATA_TELNET) + await _test_loadavg_sensors(hass, freezer, CONFIG_DATA_TELNET) -async def test_loadavg_sensors_http(hass: HomeAssistant, connect_http) -> None: +async def test_loadavg_sensors_http( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, connect_http +) -> None: """Test creating an AsusWRT load average sensors.""" - await _test_loadavg_sensors(hass, CONFIG_DATA_HTTP) + await _test_loadavg_sensors(hass, freezer, CONFIG_DATA_HTTP) async def test_loadavg_sensors_unaivalable_http( - hass: HomeAssistant, connect_http + hass: HomeAssistant, freezer: FrozenDateTimeFactory, connect_http ) -> None: """Test load average sensors no available using http.""" config_entry, sensor_prefix = _setup_entry(hass, CONFIG_DATA_HTTP, SENSORS_LOAD_AVG) @@ -241,7 +268,8 @@ async def test_loadavg_sensors_unaivalable_http( # initial devices setup assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=30)) + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) await hass.async_block_till_done() # assert load average sensors not available @@ -271,7 +299,9 @@ async def test_temperature_sensors_http_fail( assert not hass.states.get(f"{sensor_prefix}_6_0ghz") -async def _test_temperature_sensors(hass: HomeAssistant, config, sensors) -> str: +async def _test_temperature_sensors( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, config, sensors +) -> str: """Test creating a AsusWRT temperature sensors.""" config_entry, sensor_prefix = _setup_entry(hass, config, sensors) config_entry.add_to_hass(hass) @@ -279,16 +309,19 @@ async def _test_temperature_sensors(hass: HomeAssistant, config, sensors) -> str # initial devices setup assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=30)) + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) await hass.async_block_till_done() return sensor_prefix -async def test_temperature_sensors_legacy(hass: HomeAssistant, connect_legacy) -> None: +async def test_temperature_sensors_legacy( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, connect_legacy +) -> None: """Test creating a AsusWRT temperature sensors.""" sensor_prefix = await _test_temperature_sensors( - hass, CONFIG_DATA_TELNET, SENSORS_TEMPERATURES_LEGACY + hass, freezer, CONFIG_DATA_TELNET, SENSORS_TEMPERATURES_LEGACY ) # assert temperature sensor available assert hass.states.get(f"{sensor_prefix}_2_4ghz").state == "40.2" @@ -296,10 +329,12 @@ async def test_temperature_sensors_legacy(hass: HomeAssistant, connect_legacy) - assert not hass.states.get(f"{sensor_prefix}_5_0ghz") -async def test_temperature_sensors_http(hass: HomeAssistant, connect_http) -> None: +async def test_temperature_sensors_http( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, connect_http +) -> None: """Test creating a AsusWRT temperature sensors.""" sensor_prefix = await _test_temperature_sensors( - hass, CONFIG_DATA_HTTP, SENSORS_TEMPERATURES + hass, freezer, CONFIG_DATA_HTTP, SENSORS_TEMPERATURES ) # assert temperature sensor available assert hass.states.get(f"{sensor_prefix}_2_4ghz").state == "40.2" @@ -309,6 +344,97 @@ async def test_temperature_sensors_http(hass: HomeAssistant, connect_http) -> No assert not hass.states.get(f"{sensor_prefix}_5_0ghz") +async def test_cpu_sensors_http_fail( + hass: HomeAssistant, connect_http_sens_fail +) -> None: + """Test fail creating AsusWRT cpu sensors.""" + config_entry, sensor_prefix = _setup_entry(hass, CONFIG_DATA_HTTP, SENSORS_CPU) + config_entry.add_to_hass(hass) + + # initial devices setup + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + # assert cpu availability exception is handled correctly + assert not hass.states.get(f"{sensor_prefix}_cpu1_usage") + assert not hass.states.get(f"{sensor_prefix}_cpu2_usage") + assert not hass.states.get(f"{sensor_prefix}_cpu3_usage") + assert not hass.states.get(f"{sensor_prefix}_cpu4_usage") + assert not hass.states.get(f"{sensor_prefix}_cpu5_usage") + assert not hass.states.get(f"{sensor_prefix}_cpu6_usage") + assert not hass.states.get(f"{sensor_prefix}_cpu7_usage") + assert not hass.states.get(f"{sensor_prefix}_cpu8_usage") + assert not hass.states.get(f"{sensor_prefix}_cpu_total_usage") + + +async def test_cpu_sensors_http( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, connect_http +) -> None: + """Test creating AsusWRT cpu sensors.""" + config_entry, sensor_prefix = _setup_entry(hass, CONFIG_DATA_HTTP, SENSORS_CPU) + config_entry.add_to_hass(hass) + + # initial devices setup + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # assert cpu sensors available + assert hass.states.get(f"{sensor_prefix}_cpu1_usage").state == "0.1" + assert hass.states.get(f"{sensor_prefix}_cpu2_usage").state == "0.2" + assert hass.states.get(f"{sensor_prefix}_cpu3_usage").state == "0.3" + assert hass.states.get(f"{sensor_prefix}_cpu4_usage").state == "0.4" + assert hass.states.get(f"{sensor_prefix}_cpu5_usage").state == "0.5" + assert hass.states.get(f"{sensor_prefix}_cpu6_usage").state == "0.6" + assert hass.states.get(f"{sensor_prefix}_cpu7_usage").state == "0.7" + assert hass.states.get(f"{sensor_prefix}_cpu8_usage").state == "0.8" + assert hass.states.get(f"{sensor_prefix}_cpu_total_usage").state == "0.9" + + +async def test_memory_sensors_http( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, connect_http +) -> None: + """Test creating AsusWRT memory sensors.""" + config_entry, sensor_prefix = _setup_entry(hass, CONFIG_DATA_HTTP, SENSORS_MEMORY) + config_entry.add_to_hass(hass) + + # initial devices setup + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # assert memory sensors available + assert hass.states.get(f"{sensor_prefix}_mem_usage_perc").state == "52.4" + assert hass.states.get(f"{sensor_prefix}_mem_free").state == "384.0" + assert hass.states.get(f"{sensor_prefix}_mem_used").state == "640.0" + + +async def test_uptime_sensors_http( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, connect_http +) -> None: + """Test creating AsusWRT uptime sensors.""" + config_entry, sensor_prefix = _setup_entry(hass, CONFIG_DATA_HTTP, SENSORS_UPTIME) + config_entry.add_to_hass(hass) + + # initial devices setup + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # assert uptime sensors available + assert ( + hass.states.get(f"{sensor_prefix}_sensor_last_boot").state + == "2024-08-02T00:47:00+00:00" + ) + assert hass.states.get(f"{sensor_prefix}_sensor_uptime").state == "1625927" + + @pytest.mark.parametrize( "side_effect", [OSError, None], @@ -359,7 +485,9 @@ async def test_connect_fail_http( assert config_entry.state is ConfigEntryState.SETUP_RETRY -async def _test_sensors_polling_fails(hass: HomeAssistant, config, sensors) -> None: +async def _test_sensors_polling_fails( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, config, sensors +) -> None: """Test AsusWRT sensors are unavailable when polling fails.""" config_entry, sensor_prefix = _setup_entry(hass, config, sensors) config_entry.add_to_hass(hass) @@ -367,7 +495,8 @@ async def _test_sensors_polling_fails(hass: HomeAssistant, config, sensors) -> N # initial devices setup assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=30)) + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) await hass.async_block_till_done() for sensor_name in sensors: @@ -380,22 +509,28 @@ async def _test_sensors_polling_fails(hass: HomeAssistant, config, sensors) -> N async def test_sensors_polling_fails_legacy( hass: HomeAssistant, + freezer: FrozenDateTimeFactory, connect_legacy_sens_fail, ) -> None: """Test AsusWRT sensors are unavailable when polling fails.""" - await _test_sensors_polling_fails(hass, CONFIG_DATA_TELNET, SENSORS_ALL_LEGACY) + await _test_sensors_polling_fails( + hass, freezer, CONFIG_DATA_TELNET, SENSORS_ALL_LEGACY + ) async def test_sensors_polling_fails_http( hass: HomeAssistant, + freezer: FrozenDateTimeFactory, connect_http_sens_fail, connect_http_sens_detect, ) -> None: """Test AsusWRT sensors are unavailable when polling fails.""" - await _test_sensors_polling_fails(hass, CONFIG_DATA_HTTP, SENSORS_ALL_HTTP) + await _test_sensors_polling_fails(hass, freezer, CONFIG_DATA_HTTP, SENSORS_ALL_HTTP) -async def test_options_reload(hass: HomeAssistant, connect_legacy) -> None: +async def test_options_reload( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, connect_legacy +) -> None: """Test AsusWRT integration is reload changing an options that require this.""" config_entry = MockConfigEntry( domain=DOMAIN, @@ -408,7 +543,8 @@ async def test_options_reload(hass: HomeAssistant, connect_legacy) -> None: await hass.async_block_till_done() assert connect_legacy.return_value.connection.async_connect.call_count == 1 - async_fire_time_changed(hass, utcnow() + timedelta(seconds=30)) + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) await hass.async_block_till_done() # change an option that requires integration reload @@ -451,7 +587,10 @@ async def test_unique_id_migration( async def test_decorator_errors( - hass: HomeAssistant, connect_legacy, mock_available_temps + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + connect_legacy, + mock_available_temps, ) -> None: """Test AsusWRT sensors are unavailable on decorator type check error.""" sensors = [*SENSORS_BYTES, *SENSORS_TEMPERATURES_LEGACY] @@ -465,7 +604,8 @@ async def test_decorator_errors( # initial devices setup assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - async_fire_time_changed(hass, utcnow() + timedelta(seconds=30)) + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) await hass.async_block_till_done() for sensor_name in sensors: From 8255728f530df6345b8e043ec291d538a5259553 Mon Sep 17 00:00:00 2001 From: Alexandre CUER Date: Tue, 3 Sep 2024 17:21:13 +0200 Subject: [PATCH 1391/1635] Migrate emoncms to config flow (#121336) * Migrate emoncms to config flow * tests coverage 98% * use runtime_data * Remove pyemoncms bump. * Remove not needed yaml parameters add async_update_data to coordinator * Reduce snapshot size * Remove CONF_UNIT_OF_MEASUREMENT * correct path in emoncms_client mock * Remove init connexion check as done by config_entry_first_refresh since async_update_data catches exceptionand raise UpdateFailed * Remove CONF_EXCLUDE_FEEDID from config flow * Update homeassistant/components/emoncms/__init__.py Co-authored-by: Joost Lekkerkerker * Update homeassistant/components/emoncms/sensor.py Co-authored-by: Joost Lekkerkerker * Update homeassistant/components/emoncms/strings.json Co-authored-by: Joost Lekkerkerker * Use options in options flow and common strings * Extend the ConfigEntry type * Define the type explicitely * Add data description in strings.json * Update tests/components/emoncms/test_config_flow.py Co-authored-by: Joost Lekkerkerker * Update tests/components/emoncms/test_config_flow.py Co-authored-by: Joost Lekkerkerker * Add test import same yaml conf + corrections * Add test user flow * Use data_description... * Use snapshot_platform in test_sensor * Transfer all fixtures in conftest * Add async_step_choose_feeds to ask flows to user * Test abortion reason in test_flow_import_failure * Add issue when value_template is i yaml conf * make text more expressive in strings.json * Add issue when no feed imported during migration. * Update tests/components/emoncms/test_config_flow.py * Update tests/components/emoncms/test_config_flow.py --------- Co-authored-by: Joost Lekkerkerker --- homeassistant/components/emoncms/__init__.py | 39 ++++ .../components/emoncms/config_flow.py | 210 ++++++++++++++++++ homeassistant/components/emoncms/const.py | 3 + .../components/emoncms/coordinator.py | 3 +- .../components/emoncms/manifest.json | 1 + homeassistant/components/emoncms/sensor.py | 150 +++++++------ homeassistant/components/emoncms/strings.json | 40 ++++ homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 2 +- tests/components/emoncms/__init__.py | 11 + tests/components/emoncms/conftest.py | 105 ++++++++- .../emoncms/snapshots/test_sensor.ambr | 41 +++- tests/components/emoncms/test_config_flow.py | 143 ++++++++++++ tests/components/emoncms/test_init.py | 40 ++++ tests/components/emoncms/test_sensor.py | 133 ++++++++--- 15 files changed, 815 insertions(+), 107 deletions(-) create mode 100644 homeassistant/components/emoncms/config_flow.py create mode 100644 homeassistant/components/emoncms/strings.json create mode 100644 tests/components/emoncms/test_config_flow.py create mode 100644 tests/components/emoncms/test_init.py diff --git a/homeassistant/components/emoncms/__init__.py b/homeassistant/components/emoncms/__init__.py index 5e7adbcd6e7..98ed6328578 100644 --- a/homeassistant/components/emoncms/__init__.py +++ b/homeassistant/components/emoncms/__init__.py @@ -1 +1,40 @@ """The emoncms component.""" + +from pyemoncms import EmoncmsClient + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_API_KEY, CONF_URL, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers.aiohttp_client import async_get_clientsession + +from .coordinator import EmoncmsCoordinator + +PLATFORMS: list[Platform] = [Platform.SENSOR] + +type EmonCMSConfigEntry = ConfigEntry[EmoncmsCoordinator] + + +async def async_setup_entry(hass: HomeAssistant, entry: EmonCMSConfigEntry) -> bool: + """Load a config entry.""" + emoncms_client = EmoncmsClient( + entry.data[CONF_URL], + entry.data[CONF_API_KEY], + session=async_get_clientsession(hass), + ) + coordinator = EmoncmsCoordinator(hass, emoncms_client) + await coordinator.async_config_entry_first_refresh() + entry.runtime_data = coordinator + + entry.async_on_unload(entry.add_update_listener(update_listener)) + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + return True + + +async def update_listener(hass: HomeAssistant, entry: ConfigEntry): + """Handle options update.""" + await hass.config_entries.async_reload(entry.entry_id) + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/emoncms/config_flow.py b/homeassistant/components/emoncms/config_flow.py new file mode 100644 index 00000000000..fdd5d29788e --- /dev/null +++ b/homeassistant/components/emoncms/config_flow.py @@ -0,0 +1,210 @@ +"""Configflow for the emoncms integration.""" + +from typing import Any + +from pyemoncms import EmoncmsClient +import voluptuous as vol + +from homeassistant.config_entries import ( + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlowWithConfigEntry, +) +from homeassistant.const import CONF_API_KEY, CONF_URL +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.selector import selector +from homeassistant.helpers.typing import ConfigType + +from .const import ( + CONF_MESSAGE, + CONF_ONLY_INCLUDE_FEEDID, + CONF_SUCCESS, + DOMAIN, + FEED_ID, + FEED_NAME, + FEED_TAG, + LOGGER, +) + + +def get_options(feeds: list[dict[str, Any]]) -> list[dict[str, Any]]: + """Build the selector options with the feed list.""" + return [ + { + "value": feed[FEED_ID], + "label": f"{feed[FEED_ID]}|{feed[FEED_TAG]}|{feed[FEED_NAME]}", + } + for feed in feeds + ] + + +def sensor_name(url: str) -> str: + """Return sensor name.""" + sensorip = url.rsplit("//", maxsplit=1)[-1] + return f"emoncms@{sensorip}" + + +async def get_feed_list(hass: HomeAssistant, url: str, api_key: str) -> dict[str, Any]: + """Check connection to emoncms and return feed list if successful.""" + emoncms_client = EmoncmsClient( + url, + api_key, + session=async_get_clientsession(hass), + ) + return await emoncms_client.async_request("/feed/list.json") + + +class EmoncmsConfigFlow(ConfigFlow, domain=DOMAIN): + """emoncms integration UI config flow.""" + + url: str + api_key: str + include_only_feeds: list | None = None + dropdown: dict = {} + + @staticmethod + @callback + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> OptionsFlowWithConfigEntry: + """Get the options flow for this handler.""" + return EmoncmsOptionsFlow(config_entry) + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Initiate a flow via the UI.""" + errors: dict[str, str] = {} + + if user_input is not None: + self._async_abort_entries_match( + { + CONF_API_KEY: user_input[CONF_API_KEY], + CONF_URL: user_input[CONF_URL], + } + ) + result = await get_feed_list( + self.hass, user_input[CONF_URL], user_input[CONF_API_KEY] + ) + if not result[CONF_SUCCESS]: + errors["base"] = result[CONF_MESSAGE] + else: + self.include_only_feeds = user_input.get(CONF_ONLY_INCLUDE_FEEDID) + self.url = user_input[CONF_URL] + self.api_key = user_input[CONF_API_KEY] + options = get_options(result[CONF_MESSAGE]) + self.dropdown = { + "options": options, + "mode": "dropdown", + "multiple": True, + } + return await self.async_step_choose_feeds() + return self.async_show_form( + step_id="user", + data_schema=self.add_suggested_values_to_schema( + vol.Schema( + { + vol.Required(CONF_URL): str, + vol.Required(CONF_API_KEY): str, + } + ), + user_input, + ), + errors=errors, + ) + + async def async_step_choose_feeds( + self, + user_input: dict[str, Any] | None = None, + ) -> ConfigFlowResult: + """Choose feeds to import.""" + errors: dict[str, str] = {} + include_only_feeds: list = [] + if user_input or self.include_only_feeds is not None: + if self.include_only_feeds is not None: + include_only_feeds = self.include_only_feeds + elif user_input: + include_only_feeds = user_input[CONF_ONLY_INCLUDE_FEEDID] + return self.async_create_entry( + title=sensor_name(self.url), + data={ + CONF_URL: self.url, + CONF_API_KEY: self.api_key, + CONF_ONLY_INCLUDE_FEEDID: include_only_feeds, + }, + ) + return self.async_show_form( + step_id="choose_feeds", + data_schema=vol.Schema( + { + vol.Required( + CONF_ONLY_INCLUDE_FEEDID, + default=include_only_feeds, + ): selector({"select": self.dropdown}), + } + ), + errors=errors, + ) + + async def async_step_import(self, import_info: ConfigType) -> ConfigFlowResult: + """Import config from yaml.""" + url = import_info[CONF_URL] + api_key = import_info[CONF_API_KEY] + include_only_feeds = None + if import_info.get(CONF_ONLY_INCLUDE_FEEDID) is not None: + include_only_feeds = list(map(str, import_info[CONF_ONLY_INCLUDE_FEEDID])) + config = { + CONF_API_KEY: api_key, + CONF_ONLY_INCLUDE_FEEDID: include_only_feeds, + CONF_URL: url, + } + LOGGER.debug(config) + result = await self.async_step_user(config) + if errors := result.get("errors"): + return self.async_abort(reason=errors["base"]) + return result + + +class EmoncmsOptionsFlow(OptionsFlowWithConfigEntry): + """Emoncms Options flow handler.""" + + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Manage the options.""" + errors: dict[str, str] = {} + data = self.options if self.options else self._config_entry.data + url = data[CONF_URL] + api_key = data[CONF_API_KEY] + include_only_feeds = data.get(CONF_ONLY_INCLUDE_FEEDID, []) + options: list = include_only_feeds + result = await get_feed_list(self.hass, url, api_key) + if not result[CONF_SUCCESS]: + errors["base"] = result[CONF_MESSAGE] + else: + options = get_options(result[CONF_MESSAGE]) + dropdown = {"options": options, "mode": "dropdown", "multiple": True} + if user_input: + include_only_feeds = user_input[CONF_ONLY_INCLUDE_FEEDID] + return self.async_create_entry( + title=sensor_name(url), + data={ + CONF_URL: url, + CONF_API_KEY: api_key, + CONF_ONLY_INCLUDE_FEEDID: include_only_feeds, + }, + ) + + return self.async_show_form( + step_id="init", + data_schema=vol.Schema( + { + vol.Required( + CONF_ONLY_INCLUDE_FEEDID, default=include_only_feeds + ): selector({"select": dropdown}), + } + ), + errors=errors, + ) diff --git a/homeassistant/components/emoncms/const.py b/homeassistant/components/emoncms/const.py index 96269218316..256db5726bb 100644 --- a/homeassistant/components/emoncms/const.py +++ b/homeassistant/components/emoncms/const.py @@ -7,6 +7,9 @@ CONF_ONLY_INCLUDE_FEEDID = "include_only_feed_id" CONF_MESSAGE = "message" CONF_SUCCESS = "success" DOMAIN = "emoncms" +FEED_ID = "id" +FEED_NAME = "name" +FEED_TAG = "tag" LOGGER = logging.getLogger(__package__) diff --git a/homeassistant/components/emoncms/coordinator.py b/homeassistant/components/emoncms/coordinator.py index d1f6a2858c7..c6fda5ed7c8 100644 --- a/homeassistant/components/emoncms/coordinator.py +++ b/homeassistant/components/emoncms/coordinator.py @@ -18,14 +18,13 @@ class EmoncmsCoordinator(DataUpdateCoordinator[list[dict[str, Any]] | None]): self, hass: HomeAssistant, emoncms_client: EmoncmsClient, - scan_interval: timedelta, ) -> None: """Initialize the emoncms data coordinator.""" super().__init__( hass, LOGGER, name="emoncms_coordinator", - update_interval=scan_interval, + update_interval=timedelta(seconds=60), ) self.emoncms_client = emoncms_client diff --git a/homeassistant/components/emoncms/manifest.json b/homeassistant/components/emoncms/manifest.json index 09229d0419a..f8f0f2edb95 100644 --- a/homeassistant/components/emoncms/manifest.json +++ b/homeassistant/components/emoncms/manifest.json @@ -2,6 +2,7 @@ "domain": "emoncms", "name": "Emoncms", "codeowners": ["@borpin", "@alexandrecuer"], + "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/emoncms", "iot_class": "local_polling", "requirements": ["pyemoncms==0.0.7"] diff --git a/homeassistant/components/emoncms/sensor.py b/homeassistant/components/emoncms/sensor.py index 3c448391974..4add7c9625d 100644 --- a/homeassistant/components/emoncms/sensor.py +++ b/homeassistant/components/emoncms/sensor.py @@ -2,10 +2,8 @@ from __future__ import annotations -from datetime import timedelta from typing import Any -from pyemoncms import EmoncmsClient import voluptuous as vol from homeassistant.components.sensor import ( @@ -14,25 +12,33 @@ from homeassistant.components.sensor import ( SensorEntity, SensorStateClass, ) +from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import ( CONF_API_KEY, CONF_ID, - CONF_SCAN_INTERVAL, CONF_UNIT_OF_MEASUREMENT, CONF_URL, CONF_VALUE_TEMPLATE, - STATE_UNKNOWN, UnitOfPower, ) -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback +from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import template -from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import CONF_EXCLUDE_FEEDID, CONF_ONLY_INCLUDE_FEEDID +from .config_flow import sensor_name +from .const import ( + CONF_EXCLUDE_FEEDID, + CONF_ONLY_INCLUDE_FEEDID, + DOMAIN, + FEED_ID, + FEED_NAME, + FEED_TAG, +) from .coordinator import EmoncmsCoordinator ATTR_FEEDID = "FeedId" @@ -42,9 +48,7 @@ ATTR_LASTUPDATETIMESTR = "LastUpdatedStr" ATTR_SIZE = "Size" ATTR_TAG = "Tag" ATTR_USERID = "UserId" - CONF_SENSOR_NAMES = "sensor_names" - DECIMALS = 2 DEFAULT_UNIT = UnitOfPower.WATT @@ -76,20 +80,73 @@ async def async_setup_platform( async_add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: - """Set up the Emoncms sensor.""" - apikey = config[CONF_API_KEY] - url = config[CONF_URL] - sensorid = config[CONF_ID] - value_template = config.get(CONF_VALUE_TEMPLATE) - config_unit = config.get(CONF_UNIT_OF_MEASUREMENT) + """Import config from yaml.""" + if CONF_VALUE_TEMPLATE in config: + async_create_issue( + hass, + DOMAIN, + f"remove_{CONF_VALUE_TEMPLATE}_{DOMAIN}", + is_fixable=False, + issue_domain=DOMAIN, + severity=IssueSeverity.ERROR, + translation_key=f"remove_{CONF_VALUE_TEMPLATE}", + translation_placeholders={ + "domain": DOMAIN, + "parameter": CONF_VALUE_TEMPLATE, + }, + ) + return + if CONF_ONLY_INCLUDE_FEEDID not in config: + async_create_issue( + hass, + DOMAIN, + f"missing_{CONF_ONLY_INCLUDE_FEEDID}_{DOMAIN}", + is_fixable=False, + issue_domain=DOMAIN, + severity=IssueSeverity.WARNING, + translation_key=f"missing_{CONF_ONLY_INCLUDE_FEEDID}", + translation_placeholders={ + "domain": DOMAIN, + }, + ) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data=config + ) + if ( + result.get("type") == FlowResultType.CREATE_ENTRY + or result.get("reason") == "already_configured" + ): + async_create_issue( + hass, + HOMEASSISTANT_DOMAIN, + f"deprecated_yaml_{DOMAIN}", + is_fixable=False, + issue_domain=DOMAIN, + breaks_in_ha_version="2025.3.0", + severity=IssueSeverity.WARNING, + translation_key="deprecated_yaml", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "emoncms", + }, + ) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the emoncms sensors.""" + config = entry.options if entry.options else entry.data + name = sensor_name(config[CONF_URL]) exclude_feeds = config.get(CONF_EXCLUDE_FEEDID) include_only_feeds = config.get(CONF_ONLY_INCLUDE_FEEDID) - sensor_names = config.get(CONF_SENSOR_NAMES) - scan_interval = config.get(CONF_SCAN_INTERVAL, timedelta(seconds=30)) - emoncms_client = EmoncmsClient(url, apikey, session=async_get_clientsession(hass)) - coordinator = EmoncmsCoordinator(hass, emoncms_client, scan_interval) - await coordinator.async_refresh() + if exclude_feeds is None and include_only_feeds is None: + return + + coordinator = entry.runtime_data elems = coordinator.data if not elems: return @@ -97,28 +154,15 @@ async def async_setup_platform( sensors: list[EmonCmsSensor] = [] for idx, elem in enumerate(elems): - if exclude_feeds is not None and int(elem["id"]) in exclude_feeds: + if include_only_feeds is not None and elem[FEED_ID] not in include_only_feeds: continue - if include_only_feeds is not None and int(elem["id"]) not in include_only_feeds: - continue - - name = None - if sensor_names is not None: - name = sensor_names.get(int(elem["id"]), None) - - if unit := elem.get("unit"): - unit_of_measurement = unit - else: - unit_of_measurement = config_unit - sensors.append( EmonCmsSensor( coordinator, + entry.entry_id, + elem["unit"], name, - value_template, - unit_of_measurement, - str(sensorid), idx, ) ) @@ -131,10 +175,9 @@ class EmonCmsSensor(CoordinatorEntity[EmoncmsCoordinator], SensorEntity): def __init__( self, coordinator: EmoncmsCoordinator, - name: str | None, - value_template: template.Template | None, + entry_id: str, unit_of_measurement: str | None, - sensorid: str, + name: str, idx: int, ) -> None: """Initialize the sensor.""" @@ -143,20 +186,9 @@ class EmonCmsSensor(CoordinatorEntity[EmoncmsCoordinator], SensorEntity): elem = {} if self.coordinator.data: elem = self.coordinator.data[self.idx] - if name is None: - # Suppress ID in sensor name if it's 1, since most people won't - # have more than one EmonCMS source and it's redundant to show the - # ID if there's only one. - id_for_name = "" if str(sensorid) == "1" else sensorid - # Use the feed name assigned in EmonCMS or fall back to the feed ID - feed_name = elem.get("name", f"Feed {elem.get('id')}") - self._attr_name = f"EmonCMS{id_for_name} {feed_name}" - else: - self._attr_name = name - self._value_template = value_template + self._attr_name = f"{name} {elem[FEED_NAME]}" self._attr_native_unit_of_measurement = unit_of_measurement - self._sensorid = sensorid - + self._attr_unique_id = f"{entry_id}-{elem[FEED_ID]}" if unit_of_measurement in ("kWh", "Wh"): self._attr_device_class = SensorDeviceClass.ENERGY self._attr_state_class = SensorStateClass.TOTAL_INCREASING @@ -186,9 +218,9 @@ class EmonCmsSensor(CoordinatorEntity[EmoncmsCoordinator], SensorEntity): def _update_attributes(self, elem: dict[str, Any]) -> None: """Update entity attributes.""" self._attr_extra_state_attributes = { - ATTR_FEEDID: elem["id"], - ATTR_TAG: elem["tag"], - ATTR_FEEDNAME: elem["name"], + ATTR_FEEDID: elem[FEED_ID], + ATTR_TAG: elem[FEED_TAG], + ATTR_FEEDNAME: elem[FEED_NAME], } if elem["value"] is not None: self._attr_extra_state_attributes[ATTR_SIZE] = elem["size"] @@ -199,13 +231,7 @@ class EmonCmsSensor(CoordinatorEntity[EmoncmsCoordinator], SensorEntity): ) self._attr_native_value = None - if self._value_template is not None: - self._attr_native_value = ( - self._value_template.async_render_with_possible_json_value( - elem["value"], STATE_UNKNOWN - ) - ) - elif elem["value"] is not None: + if elem["value"] is not None: self._attr_native_value = round(float(elem["value"]), DECIMALS) @callback diff --git a/homeassistant/components/emoncms/strings.json b/homeassistant/components/emoncms/strings.json new file mode 100644 index 00000000000..4a700cc8981 --- /dev/null +++ b/homeassistant/components/emoncms/strings.json @@ -0,0 +1,40 @@ +{ + "config": { + "step": { + "user": { + "data": { + "url": "[%key:common::config_flow::data::url%]", + "api_key": "[%key:common::config_flow::data::api_key%]" + }, + "data_description": { + "url": "Server url starting with the protocol (http or https)", + "api_key": "Your 32 bits api key" + } + }, + "choose_feeds": { + "data": { + "include_only_feed_id": "Choose feeds to include" + } + } + } + }, + "options": { + "step": { + "init": { + "data": { + "include_only_feed_id": "[%key:component::emoncms::config::step::choose_feeds::data::include_only_feed_id%]" + } + } + } + }, + "issues": { + "remove_value_template": { + "title": "The {domain} integration cannot start", + "description": "Configuring {domain} using YAML is being removed and the `{parameter}` parameter cannot be imported.\n\nPlease remove `{parameter}` from your `{domain}` yaml configuration and restart Home Assistant\n\nAlternatively, you may entirely remove the `{domain}` configuration from your configuration.yaml, restart Home Assistant, and add the {domain} integration manually." + }, + "missing_include_only_feed_id": { + "title": "No feed synchronized with the {domain} sensor", + "description": "Configuring {domain} using YAML is being removed.\n\nPlease add manually the feeds you want to synchronize with the `configure` button of the integration." + } + } +} diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index 5f46cb1013e..e78df5ab045 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -157,6 +157,7 @@ FLOWS = { "elkm1", "elmax", "elvia", + "emoncms", "emonitor", "emulated_roku", "energenie_power_sockets", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index e379851b37f..879012ae54b 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -1569,7 +1569,7 @@ "integrations": { "emoncms": { "integration_type": "hub", - "config_flow": false, + "config_flow": true, "iot_class": "local_polling", "name": "Emoncms" }, diff --git a/tests/components/emoncms/__init__.py b/tests/components/emoncms/__init__.py index ecf3c54e9ed..59dc4fa08e1 100644 --- a/tests/components/emoncms/__init__.py +++ b/tests/components/emoncms/__init__.py @@ -1 +1,12 @@ """Tests for the emoncms component.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Set up the integration.""" + entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/emoncms/conftest.py b/tests/components/emoncms/conftest.py index 500fff228e9..29e86f3c59d 100644 --- a/tests/components/emoncms/conftest.py +++ b/tests/components/emoncms/conftest.py @@ -1,10 +1,23 @@ """Fixtures for emoncms integration tests.""" -from collections.abc import AsyncGenerator +from collections.abc import AsyncGenerator, Generator +import copy from unittest.mock import AsyncMock, patch import pytest +from homeassistant.components.emoncms.const import CONF_ONLY_INCLUDE_FEEDID, DOMAIN +from homeassistant.const import ( + CONF_API_KEY, + CONF_ID, + CONF_PLATFORM, + CONF_URL, + CONF_VALUE_TEMPLATE, +) +from homeassistant.helpers.typing import ConfigType + +from tests.common import MockConfigEntry + UNITS = ["kWh", "Wh", "W", "V", "A", "VA", "°C", "°F", "K", "Hz", "hPa", ""] @@ -29,16 +42,102 @@ FEEDS = [get_feed(i + 1, unit=unit) for i, unit in enumerate(UNITS)] EMONCMS_FAILURE = {"success": False, "message": "failure"} +FLOW_RESULT = { + CONF_API_KEY: "my_api_key", + CONF_ONLY_INCLUDE_FEEDID: [str(i + 1) for i in range(len(UNITS))], + CONF_URL: "http://1.1.1.1", +} + +SENSOR_NAME = "emoncms@1.1.1.1" + +YAML_BASE = { + CONF_PLATFORM: "emoncms", + CONF_API_KEY: "my_api_key", + CONF_ID: 1, + CONF_URL: "http://1.1.1.1", +} + +YAML = { + **YAML_BASE, + CONF_ONLY_INCLUDE_FEEDID: [1], +} + + +@pytest.fixture +def emoncms_yaml_config() -> ConfigType: + """Mock emoncms yaml configuration.""" + return {"sensor": YAML} + + +@pytest.fixture +def emoncms_yaml_config_with_template() -> ConfigType: + """Mock emoncms yaml conf with template parameter.""" + return {"sensor": {**YAML, CONF_VALUE_TEMPLATE: "{{ value | float + 1500 }}"}} + + +@pytest.fixture +def emoncms_yaml_config_no_include_only_feed_id() -> ConfigType: + """Mock emoncms yaml configuration without include_only_feed_id parameter.""" + return {"sensor": YAML_BASE} + + +@pytest.fixture +def config_entry() -> MockConfigEntry: + """Mock emoncms config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title=SENSOR_NAME, + data=FLOW_RESULT, + ) + + +FLOW_RESULT_NO_FEED = copy.deepcopy(FLOW_RESULT) +FLOW_RESULT_NO_FEED[CONF_ONLY_INCLUDE_FEEDID] = None + + +@pytest.fixture +def config_no_feed() -> MockConfigEntry: + """Mock emoncms config entry with no feed selected.""" + return MockConfigEntry( + domain=DOMAIN, + title=SENSOR_NAME, + data=FLOW_RESULT_NO_FEED, + ) + + +FLOW_RESULT_SINGLE_FEED = copy.deepcopy(FLOW_RESULT) +FLOW_RESULT_SINGLE_FEED[CONF_ONLY_INCLUDE_FEEDID] = ["1"] + + +@pytest.fixture +def config_single_feed() -> MockConfigEntry: + """Mock emoncms config entry with a single feed exposed.""" + return MockConfigEntry( + domain=DOMAIN, + title=SENSOR_NAME, + data=FLOW_RESULT_SINGLE_FEED, + entry_id="XXXXXXXX", + ) + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.emoncms.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + @pytest.fixture async def emoncms_client() -> AsyncGenerator[AsyncMock]: """Mock pyemoncms success response.""" with ( patch( - "homeassistant.components.emoncms.sensor.EmoncmsClient", autospec=True + "homeassistant.components.emoncms.EmoncmsClient", autospec=True ) as mock_client, patch( - "homeassistant.components.emoncms.coordinator.EmoncmsClient", + "homeassistant.components.emoncms.config_flow.EmoncmsClient", new=mock_client, ), ): diff --git a/tests/components/emoncms/snapshots/test_sensor.ambr b/tests/components/emoncms/snapshots/test_sensor.ambr index 62c85aaba01..5e718c1d8e8 100644 --- a/tests/components/emoncms/snapshots/test_sensor.ambr +++ b/tests/components/emoncms/snapshots/test_sensor.ambr @@ -1,5 +1,40 @@ # serializer version: 1 -# name: test_coordinator_update[sensor.emoncms_parameter_1] +# name: test_coordinator_update[sensor.emoncms_1_1_1_1_parameter_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.emoncms_1_1_1_1_parameter_1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'emoncms@1.1.1.1 parameter 1', + 'platform': 'emoncms', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'XXXXXXXX-1', + 'unit_of_measurement': , + }) +# --- +# name: test_coordinator_update[sensor.emoncms_1_1_1_1_parameter_1-state] StateSnapshot({ 'attributes': ReadOnlyDict({ 'FeedId': '1', @@ -10,12 +45,12 @@ 'Tag': 'tag', 'UserId': '1', 'device_class': 'temperature', - 'friendly_name': 'EmonCMS parameter 1', + 'friendly_name': 'emoncms@1.1.1.1 parameter 1', 'state_class': , 'unit_of_measurement': , }), 'context': , - 'entity_id': 'sensor.emoncms_parameter_1', + 'entity_id': 'sensor.emoncms_1_1_1_1_parameter_1', 'last_changed': , 'last_reported': , 'last_updated': , diff --git a/tests/components/emoncms/test_config_flow.py b/tests/components/emoncms/test_config_flow.py new file mode 100644 index 00000000000..17ec32a9008 --- /dev/null +++ b/tests/components/emoncms/test_config_flow.py @@ -0,0 +1,143 @@ +"""Test emoncms config flow.""" + +from unittest.mock import AsyncMock + +from homeassistant.components.emoncms.const import CONF_ONLY_INCLUDE_FEEDID, DOMAIN +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.const import CONF_API_KEY, CONF_URL +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from . import setup_integration +from .conftest import EMONCMS_FAILURE, FLOW_RESULT_SINGLE_FEED, SENSOR_NAME, YAML + +from tests.common import MockConfigEntry + + +async def test_flow_import_include_feeds( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + emoncms_client: AsyncMock, +) -> None: + """YAML import with included feed - success test.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=YAML, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == SENSOR_NAME + assert result["data"] == FLOW_RESULT_SINGLE_FEED + + +async def test_flow_import_failure( + hass: HomeAssistant, + emoncms_client: AsyncMock, +) -> None: + """YAML import - failure test.""" + emoncms_client.async_request.return_value = EMONCMS_FAILURE + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=YAML, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == EMONCMS_FAILURE["message"] + + +async def test_flow_import_already_configured( + hass: HomeAssistant, + config_entry: MockConfigEntry, + emoncms_client: AsyncMock, +) -> None: + """Test we abort import data set when entry is already configured.""" + config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data=YAML, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +USER_INPUT = { + CONF_URL: "http://1.1.1.1", + CONF_API_KEY: "my_api_key", +} + + +async def test_user_flow( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + emoncms_client: AsyncMock, +) -> None: + """Test we get the user form.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + USER_INPUT, + ) + + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_ONLY_INCLUDE_FEEDID: ["1"]}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == SENSOR_NAME + assert result["data"] == {**USER_INPUT, CONF_ONLY_INCLUDE_FEEDID: ["1"]} + assert len(mock_setup_entry.mock_calls) == 1 + + +USER_OPTIONS = { + CONF_ONLY_INCLUDE_FEEDID: ["1"], +} + +CONFIG_ENTRY = { + CONF_API_KEY: "my_api_key", + CONF_ONLY_INCLUDE_FEEDID: ["1"], + CONF_URL: "http://1.1.1.1", +} + + +async def test_options_flow( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + emoncms_client: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Options flow - success test.""" + await setup_integration(hass, config_entry) + result = await hass.config_entries.options.async_init(config_entry.entry_id) + await hass.async_block_till_done() + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input=USER_OPTIONS, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == CONFIG_ENTRY + assert config_entry.options == CONFIG_ENTRY + + +async def test_options_flow_failure( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + emoncms_client: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Options flow - test failure.""" + emoncms_client.async_request.return_value = EMONCMS_FAILURE + await setup_integration(hass, config_entry) + result = await hass.config_entries.options.async_init(config_entry.entry_id) + await hass.async_block_till_done() + assert result["errors"]["base"] == "failure" + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" diff --git a/tests/components/emoncms/test_init.py b/tests/components/emoncms/test_init.py new file mode 100644 index 00000000000..b89b6e65a66 --- /dev/null +++ b/tests/components/emoncms/test_init.py @@ -0,0 +1,40 @@ +"""Test Emoncms component setup process.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import setup_integration +from .conftest import EMONCMS_FAILURE + +from tests.common import MockConfigEntry + + +async def test_load_unload_entry( + hass: HomeAssistant, + config_entry: MockConfigEntry, + emoncms_client: AsyncMock, +) -> None: + """Test load and unload entry.""" + await setup_integration(hass, config_entry) + + assert config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_failure( + hass: HomeAssistant, + config_entry: MockConfigEntry, + emoncms_client: AsyncMock, +) -> None: + """Test load failure.""" + emoncms_client.async_request.return_value = EMONCMS_FAILURE + config_entry.add_to_hass(hass) + assert not await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/emoncms/test_sensor.py b/tests/components/emoncms/test_sensor.py index a039239077e..a7bc8059287 100644 --- a/tests/components/emoncms/test_sensor.py +++ b/tests/components/emoncms/test_sensor.py @@ -1,54 +1,112 @@ """Test emoncms sensor.""" -from typing import Any from unittest.mock import AsyncMock from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.emoncms.const import CONF_ONLY_INCLUDE_FEEDID, DOMAIN +from homeassistant.components.emoncms.const import DOMAIN from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN -from homeassistant.const import CONF_API_KEY, CONF_ID, CONF_PLATFORM, CONF_URL -from homeassistant.core import HomeAssistant +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.helpers import entity_registry as er, issue_registry as ir from homeassistant.helpers.typing import ConfigType from homeassistant.setup import async_setup_component -from .conftest import EMONCMS_FAILURE, FEEDS, get_feed +from . import setup_integration +from .conftest import EMONCMS_FAILURE, get_feed -from tests.common import async_fire_time_changed - -YAML = { - CONF_PLATFORM: "emoncms", - CONF_API_KEY: "my_api_key", - CONF_ID: 1, - CONF_URL: "http://1.1.1.1", - CONF_ONLY_INCLUDE_FEEDID: [1, 2], - "scan_interval": 30, -} +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform -@pytest.fixture -def emoncms_yaml_config() -> ConfigType: - """Mock emoncms configuration from yaml.""" - return {"sensor": YAML} +async def test_deprecated_yaml( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, + emoncms_yaml_config: ConfigType, + emoncms_client: AsyncMock, +) -> None: + """Test an issue is created when we import from yaml config.""" + + await async_setup_component(hass, SENSOR_DOMAIN, emoncms_yaml_config) + await hass.async_block_till_done() + + assert issue_registry.async_get_issue( + domain=HOMEASSISTANT_DOMAIN, issue_id=f"deprecated_yaml_{DOMAIN}" + ) -def get_entity_ids(feeds: list[dict[str, Any]]) -> list[str]: - """Get emoncms entity ids.""" - return [ - f"{SENSOR_DOMAIN}.{DOMAIN}_{feed["name"].replace(' ', '_')}" for feed in feeds - ] +async def test_yaml_with_template( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, + emoncms_yaml_config_with_template: ConfigType, + emoncms_client: AsyncMock, +) -> None: + """Test an issue is created when we import a yaml config with a value_template parameter.""" + + await async_setup_component(hass, SENSOR_DOMAIN, emoncms_yaml_config_with_template) + await hass.async_block_till_done() + + assert issue_registry.async_get_issue( + domain=DOMAIN, issue_id=f"remove_value_template_{DOMAIN}" + ) -def get_feeds(nbs: list[int]) -> list[dict[str, Any]]: - """Get feeds.""" - return [feed for feed in FEEDS if feed["id"] in str(nbs)] +async def test_yaml_no_include_only_feed_id( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, + emoncms_yaml_config_no_include_only_feed_id: ConfigType, + emoncms_client: AsyncMock, +) -> None: + """Test an issue is created when we import a yaml config without a include_only_feed_id parameter.""" + + await async_setup_component( + hass, SENSOR_DOMAIN, emoncms_yaml_config_no_include_only_feed_id + ) + await hass.async_block_till_done() + + assert issue_registry.async_get_issue( + domain=DOMAIN, issue_id=f"missing_include_only_feed_id_{DOMAIN}" + ) + + +async def test_no_feed_selected( + hass: HomeAssistant, + config_no_feed: MockConfigEntry, + entity_registry: er.EntityRegistry, + emoncms_client: AsyncMock, +) -> None: + """Test with no feed selected.""" + await setup_integration(hass, config_no_feed) + + assert config_no_feed.state is ConfigEntryState.LOADED + entity_entries = er.async_entries_for_config_entry( + entity_registry, config_no_feed.entry_id + ) + assert entity_entries == [] + + +async def test_no_feed_broadcast( + hass: HomeAssistant, + config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + emoncms_client: AsyncMock, +) -> None: + """Test with no feed broadcasted.""" + emoncms_client.async_request.return_value = {"success": True, "message": []} + await setup_integration(hass, config_entry) + + assert config_entry.state is ConfigEntryState.LOADED + entity_entries = er.async_entries_for_config_entry( + entity_registry, config_entry.entry_id + ) + assert entity_entries == [] async def test_coordinator_update( hass: HomeAssistant, - emoncms_yaml_config: ConfigType, + config_single_feed: MockConfigEntry, + entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion, emoncms_client: AsyncMock, caplog: pytest.LogCaptureFixture, @@ -59,12 +117,11 @@ async def test_coordinator_update( "success": True, "message": [get_feed(1, unit="°C")], } - await async_setup_component(hass, SENSOR_DOMAIN, emoncms_yaml_config) - await hass.async_block_till_done() - feeds = get_feeds([1]) - for entity_id in get_entity_ids(feeds): - state = hass.states.get(entity_id) - assert state == snapshot(name=entity_id) + await setup_integration(hass, config_single_feed) + + await snapshot_platform( + hass, entity_registry, snapshot, config_single_feed.entry_id + ) async def skip_time() -> None: freezer.tick(60) @@ -78,8 +135,12 @@ async def test_coordinator_update( await skip_time() - for entity_id in get_entity_ids(feeds): - state = hass.states.get(entity_id) + entity_entries = er.async_entries_for_config_entry( + entity_registry, config_single_feed.entry_id + ) + + for entity_entry in entity_entries: + state = hass.states.get(entity_entry.entity_id) assert state.attributes["LastUpdated"] == 1665509670 assert state.state == "24.04" From 00533bae4ba42be8ad33c50fb8eadcd982af0be5 Mon Sep 17 00:00:00 2001 From: Alex Wijnholds <1023654+Alexwijn@users.noreply.github.com> Date: Tue, 3 Sep 2024 17:44:20 +0200 Subject: [PATCH 1392/1635] Add support for total YouTube views (#123144) * Add support for retrieving the total views of a channel. * Add missing tests * Re-order imports * Another run on code format * Add missing translation * Update YouTube test snapshots --- homeassistant/components/youtube/const.py | 1 + .../components/youtube/coordinator.py | 2 ++ homeassistant/components/youtube/sensor.py | 10 +++++++ homeassistant/components/youtube/strings.json | 3 +- .../youtube/snapshots/test_diagnostics.ambr | 1 + .../youtube/snapshots/test_sensor.ambr | 30 +++++++++++++++++++ tests/components/youtube/test_sensor.py | 15 ++++++++++ 7 files changed, 61 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/youtube/const.py b/homeassistant/components/youtube/const.py index a663c487d0a..da5a554f364 100644 --- a/homeassistant/components/youtube/const.py +++ b/homeassistant/components/youtube/const.py @@ -15,6 +15,7 @@ AUTH = "auth" LOGGER = logging.getLogger(__package__) ATTR_TITLE = "title" +ATTR_TOTAL_VIEWS = "total_views" ATTR_LATEST_VIDEO = "latest_video" ATTR_SUBSCRIBER_COUNT = "subscriber_count" ATTR_DESCRIPTION = "description" diff --git a/homeassistant/components/youtube/coordinator.py b/homeassistant/components/youtube/coordinator.py index 4599342c84d..0da480f1169 100644 --- a/homeassistant/components/youtube/coordinator.py +++ b/homeassistant/components/youtube/coordinator.py @@ -22,6 +22,7 @@ from .const import ( ATTR_SUBSCRIBER_COUNT, ATTR_THUMBNAIL, ATTR_TITLE, + ATTR_TOTAL_VIEWS, ATTR_VIDEO_ID, CONF_CHANNELS, DOMAIN, @@ -68,6 +69,7 @@ class YouTubeDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): ATTR_ICON: channel.snippet.thumbnails.get_highest_quality().url, ATTR_LATEST_VIDEO: latest_video, ATTR_SUBSCRIBER_COUNT: channel.statistics.subscriber_count, + ATTR_TOTAL_VIEWS: channel.statistics.view_count, } except UnauthorizedError as err: raise ConfigEntryAuthFailed from err diff --git a/homeassistant/components/youtube/sensor.py b/homeassistant/components/youtube/sensor.py index bc69f92e8fd..8832382508c 100644 --- a/homeassistant/components/youtube/sensor.py +++ b/homeassistant/components/youtube/sensor.py @@ -20,6 +20,7 @@ from .const import ( ATTR_SUBSCRIBER_COUNT, ATTR_THUMBNAIL, ATTR_TITLE, + ATTR_TOTAL_VIEWS, ATTR_VIDEO_ID, COORDINATOR, DOMAIN, @@ -58,6 +59,15 @@ SENSOR_TYPES = [ entity_picture_fn=lambda channel: channel[ATTR_ICON], attributes_fn=None, ), + YouTubeSensorEntityDescription( + key="views", + translation_key="views", + native_unit_of_measurement="views", + available_fn=lambda _: True, + value_fn=lambda channel: channel[ATTR_TOTAL_VIEWS], + entity_picture_fn=lambda channel: channel[ATTR_ICON], + attributes_fn=None, + ), ] diff --git a/homeassistant/components/youtube/strings.json b/homeassistant/components/youtube/strings.json index d664e2f15e7..5902d3a4482 100644 --- a/homeassistant/components/youtube/strings.json +++ b/homeassistant/components/youtube/strings.json @@ -46,7 +46,8 @@ "published_at": { "name": "Published at" } } }, - "subscribers": { "name": "Subscribers" } + "subscribers": { "name": "Subscribers" }, + "views": { "name": "Views" } } } } diff --git a/tests/components/youtube/snapshots/test_diagnostics.ambr b/tests/components/youtube/snapshots/test_diagnostics.ambr index a938cb8daad..50dc2757e8c 100644 --- a/tests/components/youtube/snapshots/test_diagnostics.ambr +++ b/tests/components/youtube/snapshots/test_diagnostics.ambr @@ -12,6 +12,7 @@ }), 'subscriber_count': 2290000, 'title': 'Google for Developers', + 'total_views': 214141263, }), }) # --- diff --git a/tests/components/youtube/snapshots/test_sensor.ambr b/tests/components/youtube/snapshots/test_sensor.ambr index cddfa6f6a3d..dce546b4803 100644 --- a/tests/components/youtube/snapshots/test_sensor.ambr +++ b/tests/components/youtube/snapshots/test_sensor.ambr @@ -30,6 +30,21 @@ 'state': '2290000', }) # --- +# name: test_sensor.2 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://yt3.ggpht.com/fca_HuJ99xUxflWdex0XViC3NfctBFreIl8y4i9z411asnGTWY-Ql3MeH_ybA4kNaOjY7kyA=s800-c-k-c0x00ffffff-no-rj', + 'friendly_name': 'Google for Developers Views', + 'unit_of_measurement': 'views', + }), + 'context': , + 'entity_id': 'sensor.google_for_developers_views', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '214141263', + }) +# --- # name: test_sensor_without_uploaded_video StateSnapshot({ 'attributes': ReadOnlyDict({ @@ -58,3 +73,18 @@ 'state': '2290000', }) # --- +# name: test_sensor_without_uploaded_video.2 + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'entity_picture': 'https://yt3.ggpht.com/fca_HuJ99xUxflWdex0XViC3NfctBFreIl8y4i9z411asnGTWY-Ql3MeH_ybA4kNaOjY7kyA=s800-c-k-c0x00ffffff-no-rj', + 'friendly_name': 'Google for Developers Views', + 'unit_of_measurement': 'views', + }), + 'context': , + 'entity_id': 'sensor.google_for_developers_views', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '214141263', + }) +# --- diff --git a/tests/components/youtube/test_sensor.py b/tests/components/youtube/test_sensor.py index ae0c38306e4..e883347c8db 100644 --- a/tests/components/youtube/test_sensor.py +++ b/tests/components/youtube/test_sensor.py @@ -29,6 +29,9 @@ async def test_sensor( state = hass.states.get("sensor.google_for_developers_subscribers") assert state == snapshot + state = hass.states.get("sensor.google_for_developers_views") + assert state == snapshot + async def test_sensor_without_uploaded_video( hass: HomeAssistant, snapshot: SnapshotAssertion, setup_integration: ComponentSetup @@ -52,6 +55,9 @@ async def test_sensor_without_uploaded_video( state = hass.states.get("sensor.google_for_developers_subscribers") assert state == snapshot + state = hass.states.get("sensor.google_for_developers_views") + assert state == snapshot + async def test_sensor_updating( hass: HomeAssistant, setup_integration: ComponentSetup @@ -95,6 +101,9 @@ async def test_sensor_reauth_trigger( state = hass.states.get("sensor.google_for_developers_subscribers") assert state.state == "2290000" + state = hass.states.get("sensor.google_for_developers_views") + assert state.state == "214141263" + mock.set_thrown_exception(UnauthorizedError()) future = dt_util.utcnow() + timedelta(minutes=15) async_fire_time_changed(hass, future) @@ -121,6 +130,9 @@ async def test_sensor_unavailable( state = hass.states.get("sensor.google_for_developers_subscribers") assert state.state == "2290000" + state = hass.states.get("sensor.google_for_developers_views") + assert state.state == "214141263" + mock.set_thrown_exception(YouTubeBackendError()) future = dt_util.utcnow() + timedelta(minutes=15) async_fire_time_changed(hass, future) @@ -131,3 +143,6 @@ async def test_sensor_unavailable( state = hass.states.get("sensor.google_for_developers_subscribers") assert state.state == "unavailable" + + state = hass.states.get("sensor.google_for_developers_views") + assert state.state == "unavailable" From 8f26cff65af78c5bb78c064bb728279a893096ba Mon Sep 17 00:00:00 2001 From: Raj Laud <50647620+rajlaud@users.noreply.github.com> Date: Tue, 3 Sep 2024 13:19:30 -0400 Subject: [PATCH 1393/1635] Enable strict typing for the Squeezebox integration (#125161) * Strict typing for squeezebox * Improve unit tests * Refactor tests to use websockets and services.async_call * Apply suggestions from code review * Fix merge conflict --- .strict-typing | 1 + .../components/squeezebox/browse_media.py | 45 +++++++--- .../components/squeezebox/config_flow.py | 22 +++-- .../components/squeezebox/media_player.py | 88 +++++++++++-------- mypy.ini | 10 +++ 5 files changed, 106 insertions(+), 60 deletions(-) diff --git a/.strict-typing b/.strict-typing index 797a1b51293..1d73b05fdea 100644 --- a/.strict-typing +++ b/.strict-typing @@ -416,6 +416,7 @@ homeassistant.components.solarlog.* homeassistant.components.sonarr.* homeassistant.components.speedtestdotnet.* homeassistant.components.sql.* +homeassistant.components.squeezebox.* homeassistant.components.ssdp.* homeassistant.components.starlink.* homeassistant.components.statistics.* diff --git a/homeassistant/components/squeezebox/browse_media.py b/homeassistant/components/squeezebox/browse_media.py index f68624f8f06..61ae7b7a403 100644 --- a/homeassistant/components/squeezebox/browse_media.py +++ b/homeassistant/components/squeezebox/browse_media.py @@ -1,14 +1,21 @@ """Support for media browsing.""" +from __future__ import annotations + import contextlib +from typing import Any + +from pysqueezebox import Player from homeassistant.components import media_source from homeassistant.components.media_player import ( BrowseError, BrowseMedia, MediaClass, + MediaPlayerEntity, MediaType, ) +from homeassistant.core import HomeAssistant from homeassistant.helpers.network import is_internal_request LIBRARY = ["Favorites", "Artists", "Albums", "Tracks", "Playlists", "Genres"] @@ -36,7 +43,7 @@ SQUEEZEBOX_ID_BY_TYPE = { "Favorites": "item_id", } -CONTENT_TYPE_MEDIA_CLASS = { +CONTENT_TYPE_MEDIA_CLASS: dict[str | MediaType, dict[str, MediaClass | None]] = { "Favorites": {"item": MediaClass.DIRECTORY, "children": MediaClass.TRACK}, "Artists": {"item": MediaClass.DIRECTORY, "children": MediaClass.ARTIST}, "Albums": {"item": MediaClass.DIRECTORY, "children": MediaClass.ALBUM}, @@ -66,14 +73,18 @@ CONTENT_TYPE_TO_CHILD_TYPE = { BROWSE_LIMIT = 1000 -async def build_item_response(entity, player, payload): +async def build_item_response( + entity: MediaPlayerEntity, player: Player, payload: dict[str, str | None] +) -> BrowseMedia: """Create response payload for search described by payload.""" internal_request = is_internal_request(entity.hass) search_id = payload["search_id"] search_type = payload["search_type"] - + assert ( + search_type is not None + ) # async_browse_media will not call this function if search_type is None media_class = CONTENT_TYPE_MEDIA_CLASS[search_type] children = None @@ -95,9 +106,9 @@ async def build_item_response(entity, player, payload): children = [] for item in result["items"]: item_id = str(item["id"]) - item_thumbnail = None + item_thumbnail: str | None = None if item_type: - child_item_type = item_type + child_item_type: MediaType | str = item_type child_media_class = CONTENT_TYPE_MEDIA_CLASS[item_type] can_expand = child_media_class["children"] is not None can_play = True @@ -120,7 +131,7 @@ async def build_item_response(entity, player, payload): can_expand = False can_play = True - if artwork_track_id := item.get("artwork_track_id"): + if artwork_track_id := item.get("artwork_track_id") and item_type: if internal_request: item_thumbnail = player.generate_image_url_from_track_id( artwork_track_id @@ -132,6 +143,7 @@ async def build_item_response(entity, player, payload): else: item_thumbnail = item.get("image_url") # will not be proxied by HA + assert child_media_class["item"] is not None children.append( BrowseMedia( title=item["title"], @@ -147,6 +159,9 @@ async def build_item_response(entity, player, payload): if children is None: raise BrowseError(f"Media not found: {search_type} / {search_id}") + assert media_class["item"] is not None + if not search_id: + search_id = search_type return BrowseMedia( title=result.get("title"), media_class=media_class["item"], @@ -159,9 +174,9 @@ async def build_item_response(entity, player, payload): ) -async def library_payload(hass, player): +async def library_payload(hass: HomeAssistant, player: Player) -> BrowseMedia: """Create response payload to describe contents of library.""" - library_info = { + library_info: dict[str, Any] = { "title": "Music Library", "media_class": MediaClass.DIRECTORY, "media_content_id": "library", @@ -179,6 +194,7 @@ async def library_payload(hass, player): limit=1, ) if result is not None and result.get("items") is not None: + assert media_class["children"] is not None library_info["children"].append( BrowseMedia( title=item, @@ -191,14 +207,14 @@ async def library_payload(hass, player): ) with contextlib.suppress(media_source.BrowseError): - item = await media_source.async_browse_media( + browse = await media_source.async_browse_media( hass, None, content_filter=media_source_content_filter ) # If domain is None, it's overview of available sources - if item.domain is None: - library_info["children"].extend(item.children) + if browse.domain is None: + library_info["children"].extend(browse.children) else: - library_info["children"].append(item) + library_info["children"].append(browse) return BrowseMedia(**library_info) @@ -208,7 +224,7 @@ def media_source_content_filter(item: BrowseMedia) -> bool: return item.media_content_type.startswith("audio/") -async def generate_playlist(player, payload): +async def generate_playlist(player: Player, payload: dict[str, str]) -> list | None: """Generate playlist from browsing payload.""" media_type = payload["search_type"] media_id = payload["search_id"] @@ -221,5 +237,6 @@ async def generate_playlist(player, payload): "titles", limit=BROWSE_LIMIT, browse_id=browse_id ) if result and "items" in result: - return result["items"] + items: list = result["items"] + return items raise BrowseError(f"Media not found: {media_type} / {media_id}") diff --git a/homeassistant/components/squeezebox/config_flow.py b/homeassistant/components/squeezebox/config_flow.py index fe57b12516a..c372c7262d4 100644 --- a/homeassistant/components/squeezebox/config_flow.py +++ b/homeassistant/components/squeezebox/config_flow.py @@ -1,5 +1,7 @@ """Config flow for Squeezebox integration.""" +from __future__ import annotations + import asyncio from http import HTTPStatus import logging @@ -24,9 +26,11 @@ _LOGGER = logging.getLogger(__name__) TIMEOUT = 5 -def _base_schema(discovery_info=None): +def _base_schema( + discovery_info: dict[str, Any] | None = None, +) -> vol.Schema: """Generate base schema.""" - base_schema = {} + base_schema: dict[Any, Any] = {} if discovery_info and CONF_HOST in discovery_info: base_schema.update( { @@ -71,14 +75,14 @@ class SqueezeboxConfigFlow(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize an instance of the squeezebox config flow.""" self.data_schema = _base_schema() - self.discovery_info = None + self.discovery_info: dict[str, Any] | None = None - async def _discover(self, uuid=None): + async def _discover(self, uuid: str | None = None) -> None: """Discover an unconfigured LMS server.""" self.discovery_info = None discovery_event = asyncio.Event() - def _discovery_callback(server): + def _discovery_callback(server: Server) -> None: if server.uuid: # ignore already configured uuids for entry in self._async_current_entries(): @@ -156,7 +160,9 @@ class SqueezeboxConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_edit(self, user_input=None): + async def async_step_edit( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Edit a discovered or manually inputted server.""" errors = {} if user_input: @@ -171,7 +177,9 @@ class SqueezeboxConfigFlow(ConfigFlow, domain=DOMAIN): step_id="edit", data_schema=self.data_schema, errors=errors ) - async def async_step_integration_discovery(self, discovery_info): + async def async_step_integration_discovery( + self, discovery_info: dict[str, Any] + ) -> ConfigFlowResult: """Handle discovery of a server.""" _LOGGER.debug("Reached server discovery flow with info: %s", discovery_info) if "uuid" in discovery_info: diff --git a/homeassistant/components/squeezebox/media_player.py b/homeassistant/components/squeezebox/media_player.py index 279e51485f0..5fa132533d1 100644 --- a/homeassistant/components/squeezebox/media_player.py +++ b/homeassistant/components/squeezebox/media_player.py @@ -8,12 +8,13 @@ import json import logging from typing import Any -from pysqueezebox import Player, async_discover +from pysqueezebox import Player, Server, async_discover import voluptuous as vol from homeassistant.components import media_source from homeassistant.components.media_player import ( ATTR_MEDIA_ENQUEUE, + BrowseMedia, MediaPlayerEnqueue, MediaPlayerEntity, MediaPlayerEntityFeature, @@ -87,7 +88,7 @@ SQUEEZEBOX_MODE = { async def start_server_discovery(hass: HomeAssistant) -> None: """Start a server discovery task.""" - def _discovered_server(server): + def _discovered_server(server: Server) -> None: discovery_flow.async_create_flow( hass, DOMAIN, @@ -118,10 +119,10 @@ async def async_setup_entry( known_players = hass.data[DOMAIN].setdefault(KNOWN_PLAYERS, []) lms = entry.runtime_data - async def _player_discovery(now=None): + async def _player_discovery(now: datetime | None = None) -> None: """Discover squeezebox players by polling server.""" - async def _discovered_player(player): + async def _discovered_player(player: Player) -> None: """Handle a (re)discovered player.""" entity = next( ( @@ -234,7 +235,7 @@ class SqueezeBoxEntity(MediaPlayerEntity): ) @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any]: """Return device-specific attributes.""" return { attr: getattr(self, attr) @@ -243,12 +244,13 @@ class SqueezeBoxEntity(MediaPlayerEntity): } @callback - def rediscovered(self, unique_id, connected): + def rediscovered(self, unique_id: str, connected: bool) -> None: """Make a player available again.""" if unique_id == self.unique_id and connected: self._attr_available = True _LOGGER.debug("Player %s is available again", self.name) - self._remove_dispatcher() + if self._remove_dispatcher: + self._remove_dispatcher() @property def state(self) -> MediaPlayerState | None: @@ -288,22 +290,22 @@ class SqueezeBoxEntity(MediaPlayerEntity): return None @property - def is_volume_muted(self): + def is_volume_muted(self) -> bool: """Return true if volume is muted.""" - return self._player.muting + return bool(self._player.muting) @property - def media_content_id(self): + def media_content_id(self) -> str | None: """Content ID of current playing media.""" if not self._player.playlist: return None if len(self._player.playlist) > 1: urls = [{"url": track["url"]} for track in self._player.playlist] return json.dumps({"index": self._player.current_index, "urls": urls}) - return self._player.url + return str(self._player.url) @property - def media_content_type(self): + def media_content_type(self) -> MediaType | None: """Content type of current playing media.""" if not self._player.playlist: return None @@ -312,47 +314,47 @@ class SqueezeBoxEntity(MediaPlayerEntity): return MediaType.MUSIC @property - def media_duration(self): + def media_duration(self) -> int | None: """Duration of current playing media in seconds.""" - return self._player.duration + return int(self._player.duration) @property - def media_position(self): + def media_position(self) -> int | None: """Position of current playing media in seconds.""" - return self._player.time + return int(self._player.time) @property - def media_position_updated_at(self): + def media_position_updated_at(self) -> datetime | None: """Last time status was updated.""" return self._last_update @property - def media_image_url(self): + def media_image_url(self) -> str | None: """Image url of current playing media.""" - return self._player.image_url + return str(self._player.image_url) @property - def media_title(self): + def media_title(self) -> str | None: """Title of current playing media.""" - return self._player.title + return str(self._player.title) @property - def media_channel(self): + def media_channel(self) -> str | None: """Channel (e.g. webradio name) of current playing media.""" - return self._player.remote_title + return str(self._player.remote_title) @property - def media_artist(self): + def media_artist(self) -> str | None: """Artist of current playing media.""" - return self._player.artist + return str(self._player.artist) @property - def media_album_name(self): + def media_album_name(self) -> str | None: """Album of current playing media.""" - return self._player.album + return str(self._player.album) @property - def repeat(self): + def repeat(self) -> RepeatMode: """Repeat setting.""" if self._player.repeat == "song": return RepeatMode.ONE @@ -361,13 +363,13 @@ class SqueezeBoxEntity(MediaPlayerEntity): return RepeatMode.OFF @property - def shuffle(self): + def shuffle(self) -> bool: """Boolean if shuffle is enabled.""" # Squeezebox has a third shuffle mode (album) not recognized by Home Assistant - return self._player.shuffle == "song" + return bool(self._player.shuffle == "song") @property - def group_members(self): + def group_members(self) -> list[str]: """List players we are synced with.""" player_ids = { p.unique_id: p.entity_id for p in self.hass.data[DOMAIN][KNOWN_PLAYERS] @@ -379,12 +381,12 @@ class SqueezeBoxEntity(MediaPlayerEntity): ] @property - def sync_group(self): + def sync_group(self) -> list[str]: """List players we are synced with. Deprecated.""" return self.group_members @property - def query_result(self): + def query_result(self) -> dict | bool: """Return the result from the call_query service.""" return self._query_result @@ -477,7 +479,7 @@ class SqueezeBoxEntity(MediaPlayerEntity): try: # a saved playlist by number payload = { - "search_id": int(media_id), + "search_id": media_id, "search_type": MediaType.PLAYLIST, } playlist = await generate_playlist(self._player, payload) @@ -519,7 +521,9 @@ class SqueezeBoxEntity(MediaPlayerEntity): """Send the media player the command for clear playlist.""" await self._player.async_clear_playlist() - async def async_call_method(self, command, parameters=None): + async def async_call_method( + self, command: str, parameters: list[str] | None = None + ) -> None: """Call Squeezebox JSON/RPC method. Additional parameters are added to the command to form the list of @@ -530,7 +534,9 @@ class SqueezeBoxEntity(MediaPlayerEntity): all_params.extend(parameters) await self._player.async_query(*all_params) - async def async_call_query(self, command, parameters=None): + async def async_call_query( + self, command: str, parameters: list[str] | None = None + ) -> None: """Call Squeezebox JSON/RPC method where we care about the result. Additional parameters are added to the command to form the list of @@ -560,7 +566,7 @@ class SqueezeBoxEntity(MediaPlayerEntity): "Could not find player_id for %s. Not syncing", other_player ) - async def async_sync(self, other_player): + async def async_sync(self, other_player: str) -> None: """Sync this Squeezebox player to another. Deprecated.""" _LOGGER.warning( "Service squeezebox.sync is deprecated; use media_player.join_players" @@ -572,7 +578,7 @@ class SqueezeBoxEntity(MediaPlayerEntity): """Unsync this Squeezebox player.""" await self._player.async_unsync() - async def async_unsync(self): + async def async_unsync(self) -> None: """Unsync this Squeezebox player. Deprecated.""" _LOGGER.warning( "Service squeezebox.unsync is deprecated; use media_player.unjoin_player" @@ -580,7 +586,11 @@ class SqueezeBoxEntity(MediaPlayerEntity): ) await self.async_unjoin_player() - async def async_browse_media(self, media_content_type=None, media_content_id=None): + async def async_browse_media( + self, + media_content_type: MediaType | str | None = None, + media_content_id: str | None = None, + ) -> BrowseMedia: """Implement the websocket media browsing helper.""" _LOGGER.debug( "Reached async_browse_media with content_type %s and content_id %s", diff --git a/mypy.ini b/mypy.ini index c29db45cd53..4ba1f41f4d4 100644 --- a/mypy.ini +++ b/mypy.ini @@ -3916,6 +3916,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.squeezebox.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.ssdp.*] check_untyped_defs = true disallow_incomplete_defs = true From 8e03f3a04525b40c9d0af52343e578ce19a5a43d Mon Sep 17 00:00:00 2001 From: mvn23 Date: Tue, 3 Sep 2024 19:19:43 +0200 Subject: [PATCH 1394/1635] Update opentherm_gw tests to avoid patching internals (#125152) * Update tests to avoid patching internals * * Use fixtures for tests * Update variable names in tests for clarity * Use hass.config_entries.async_setup instead of setup.async_setup_component --- tests/components/opentherm_gw/conftest.py | 41 ++++ .../opentherm_gw/test_config_flow.py | 223 +++++++----------- tests/components/opentherm_gw/test_init.py | 88 +++---- 3 files changed, 157 insertions(+), 195 deletions(-) create mode 100644 tests/components/opentherm_gw/conftest.py diff --git a/tests/components/opentherm_gw/conftest.py b/tests/components/opentherm_gw/conftest.py new file mode 100644 index 00000000000..057f47a169d --- /dev/null +++ b/tests/components/opentherm_gw/conftest.py @@ -0,0 +1,41 @@ +"""Test configuration for opentherm_gw.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch + +from pyotgw.vars import OTGW, OTGW_ABOUT +import pytest + +VERSION_TEST = "4.2.5" +MINIMAL_STATUS = {OTGW: {OTGW_ABOUT: f"OpenTherm Gateway {VERSION_TEST}"}} + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.opentherm_gw.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_pyotgw() -> Generator[MagicMock]: + """Mock a pyotgw.OpenThermGateway object.""" + with ( + patch( + "homeassistant.components.opentherm_gw.OpenThermGateway", + return_value=MagicMock( + connect=AsyncMock(return_value=MINIMAL_STATUS), + set_control_setpoint=AsyncMock(), + set_max_relative_mod=AsyncMock(), + disconnect=AsyncMock(), + ), + ) as mock_gateway, + patch( + "homeassistant.components.opentherm_gw.config_flow.pyotgw.OpenThermGateway", + new=mock_gateway, + ), + ): + yield mock_gateway diff --git a/tests/components/opentherm_gw/test_config_flow.py b/tests/components/opentherm_gw/test_config_flow.py index 504a97dc953..4f4a6cfce31 100644 --- a/tests/components/opentherm_gw/test_config_flow.py +++ b/tests/components/opentherm_gw/test_config_flow.py @@ -1,8 +1,7 @@ """Test the Opentherm Gateway config flow.""" -from unittest.mock import patch +from unittest.mock import AsyncMock, MagicMock -from pyotgw.vars import OTGW, OTGW_ABOUT from serial import SerialException from homeassistant import config_entries @@ -25,10 +24,12 @@ from homeassistant.data_entry_flow import FlowResultType from tests.common import MockConfigEntry -MINIMAL_STATUS = {OTGW: {OTGW_ABOUT: "OpenTherm Gateway 4.2.5"}} - -async def test_form_user(hass: HomeAssistant) -> None: +async def test_form_user( + hass: HomeAssistant, + mock_pyotgw: MagicMock, + mock_setup_entry: AsyncMock, +) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -37,27 +38,10 @@ async def test_form_user(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - with ( - patch( - "homeassistant.components.opentherm_gw.async_setup", - return_value=True, - ) as mock_setup, - patch( - "homeassistant.components.opentherm_gw.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - patch( - "pyotgw.OpenThermGateway.connect", return_value=MINIMAL_STATUS - ) as mock_pyotgw_connect, - patch( - "pyotgw.OpenThermGateway.disconnect", return_value=None - ) as mock_pyotgw_disconnect, - patch("pyotgw.status.StatusManager._process_updates", return_value=None), - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], {CONF_NAME: "Test Entry 1", CONF_DEVICE: "/dev/ttyUSB0"} - ) - await hass.async_block_till_done() + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_NAME: "Test Entry 1", CONF_DEVICE: "/dev/ttyUSB0"} + ) + await hass.async_block_till_done() assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["title"] == "Test Entry 1" @@ -66,37 +50,21 @@ async def test_form_user(hass: HomeAssistant) -> None: CONF_DEVICE: "/dev/ttyUSB0", CONF_ID: "test_entry_1", } - assert len(mock_setup.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 - assert len(mock_pyotgw_connect.mock_calls) == 1 - assert len(mock_pyotgw_disconnect.mock_calls) == 1 + assert mock_pyotgw.return_value.connect.await_count == 1 + assert mock_pyotgw.return_value.disconnect.await_count == 1 -async def test_form_import(hass: HomeAssistant) -> None: +async def test_form_import( + hass: HomeAssistant, + mock_pyotgw: MagicMock, + mock_setup_entry: AsyncMock, +) -> None: """Test import from existing config.""" - - with ( - patch( - "homeassistant.components.opentherm_gw.async_setup", - return_value=True, - ) as mock_setup, - patch( - "homeassistant.components.opentherm_gw.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - patch( - "pyotgw.OpenThermGateway.connect", return_value=MINIMAL_STATUS - ) as mock_pyotgw_connect, - patch( - "pyotgw.OpenThermGateway.disconnect", return_value=None - ) as mock_pyotgw_disconnect, - patch("pyotgw.status.StatusManager._process_updates", return_value=None), - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={CONF_ID: "legacy_gateway", CONF_DEVICE: "/dev/ttyUSB1"}, - ) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": config_entries.SOURCE_IMPORT}, + data={CONF_ID: "legacy_gateway", CONF_DEVICE: "/dev/ttyUSB1"}, + ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "legacy_gateway" @@ -105,13 +73,15 @@ async def test_form_import(hass: HomeAssistant) -> None: CONF_DEVICE: "/dev/ttyUSB1", CONF_ID: "legacy_gateway", } - assert len(mock_setup.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 - assert len(mock_pyotgw_connect.mock_calls) == 1 - assert len(mock_pyotgw_disconnect.mock_calls) == 1 + assert mock_pyotgw.return_value.connect.await_count == 1 + assert mock_pyotgw.return_value.disconnect.await_count == 1 -async def test_form_duplicate_entries(hass: HomeAssistant) -> None: +async def test_form_duplicate_entries( + hass: HomeAssistant, + mock_pyotgw: MagicMock, + mock_setup_entry: AsyncMock, +) -> None: """Test duplicate device or id errors.""" flow1 = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -123,87 +93,76 @@ async def test_form_duplicate_entries(hass: HomeAssistant) -> None: DOMAIN, context={"source": config_entries.SOURCE_USER} ) - with ( - patch( - "homeassistant.components.opentherm_gw.async_setup", - return_value=True, - ) as mock_setup, - patch( - "homeassistant.components.opentherm_gw.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - patch( - "pyotgw.OpenThermGateway.connect", return_value=MINIMAL_STATUS - ) as mock_pyotgw_connect, - patch( - "pyotgw.OpenThermGateway.disconnect", return_value=None - ) as mock_pyotgw_disconnect, - patch("pyotgw.status.StatusManager._process_updates", return_value=None), - ): - result1 = await hass.config_entries.flow.async_configure( - flow1["flow_id"], {CONF_NAME: "Test Entry 1", CONF_DEVICE: "/dev/ttyUSB0"} - ) - result2 = await hass.config_entries.flow.async_configure( - flow2["flow_id"], {CONF_NAME: "Test Entry 1", CONF_DEVICE: "/dev/ttyUSB1"} - ) - result3 = await hass.config_entries.flow.async_configure( - flow3["flow_id"], {CONF_NAME: "Test Entry 2", CONF_DEVICE: "/dev/ttyUSB0"} - ) + result1 = await hass.config_entries.flow.async_configure( + flow1["flow_id"], {CONF_NAME: "Test Entry 1", CONF_DEVICE: "/dev/ttyUSB0"} + ) assert result1["type"] is FlowResultType.CREATE_ENTRY + + result2 = await hass.config_entries.flow.async_configure( + flow2["flow_id"], {CONF_NAME: "Test Entry 1", CONF_DEVICE: "/dev/ttyUSB1"} + ) assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": "id_exists"} + + result3 = await hass.config_entries.flow.async_configure( + flow3["flow_id"], {CONF_NAME: "Test Entry 2", CONF_DEVICE: "/dev/ttyUSB0"} + ) assert result3["type"] is FlowResultType.FORM assert result3["errors"] == {"base": "already_configured"} - assert len(mock_setup.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 - assert len(mock_pyotgw_connect.mock_calls) == 1 - assert len(mock_pyotgw_disconnect.mock_calls) == 1 + + assert mock_pyotgw.return_value.connect.await_count == 1 + assert mock_pyotgw.return_value.disconnect.await_count == 1 -async def test_form_connection_timeout(hass: HomeAssistant) -> None: +async def test_form_connection_timeout( + hass: HomeAssistant, + mock_pyotgw: MagicMock, + mock_setup_entry: AsyncMock, +) -> None: """Test we handle connection timeout.""" - result = await hass.config_entries.flow.async_init( + flow = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - with ( - patch( - "pyotgw.OpenThermGateway.connect", side_effect=(TimeoutError) - ) as mock_connect, - patch("pyotgw.status.StatusManager._process_updates", return_value=None), - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_NAME: "Test Entry 1", CONF_DEVICE: "socket://192.0.2.254:1234"}, - ) + mock_pyotgw.return_value.connect.side_effect = TimeoutError - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "timeout_connect"} - assert len(mock_connect.mock_calls) == 1 + result = await hass.config_entries.flow.async_configure( + flow["flow_id"], + {CONF_NAME: "Test Entry 1", CONF_DEVICE: "socket://192.0.2.254:1234"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "timeout_connect"} + + assert mock_pyotgw.return_value.connect.await_count == 1 -async def test_form_connection_error(hass: HomeAssistant) -> None: +async def test_form_connection_error( + hass: HomeAssistant, + mock_pyotgw: MagicMock, + mock_setup_entry: AsyncMock, +) -> None: """Test we handle serial connection error.""" - result = await hass.config_entries.flow.async_init( + flow = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - with ( - patch( - "pyotgw.OpenThermGateway.connect", side_effect=(SerialException) - ) as mock_connect, - patch("pyotgw.status.StatusManager._process_updates", return_value=None), - ): - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], {CONF_NAME: "Test Entry 1", CONF_DEVICE: "/dev/ttyUSB0"} - ) + mock_pyotgw.return_value.connect.side_effect = SerialException - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "cannot_connect"} - assert len(mock_connect.mock_calls) == 1 + result = await hass.config_entries.flow.async_configure( + flow["flow_id"], {CONF_NAME: "Test Entry 1", CONF_DEVICE: "/dev/ttyUSB0"} + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + assert mock_pyotgw.return_value.connect.await_count == 1 -async def test_options_form(hass: HomeAssistant) -> None: +async def test_options_form( + hass: HomeAssistant, + mock_pyotgw: MagicMock, + mock_setup_entry: AsyncMock, +) -> None: """Test the options form.""" entry = MockConfigEntry( domain=DOMAIN, @@ -217,23 +176,17 @@ async def test_options_form(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - with ( - patch("homeassistant.components.opentherm_gw.async_setup", return_value=True), - patch( - "homeassistant.components.opentherm_gw.async_setup_entry", return_value=True - ), - ): - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() - result = await hass.config_entries.options.async_init( + flow = await hass.config_entries.options.async_init( entry.entry_id, context={"source": "test"}, data=None ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "init" + assert flow["type"] is FlowResultType.FORM + assert flow["step_id"] == "init" result = await hass.config_entries.options.async_configure( - result["flow_id"], + flow["flow_id"], user_input={ CONF_FLOOR_TEMP: True, CONF_READ_PRECISION: PRECISION_HALVES, @@ -248,12 +201,12 @@ async def test_options_form(hass: HomeAssistant) -> None: assert result["data"][CONF_TEMPORARY_OVRD_MODE] is True assert result["data"][CONF_FLOOR_TEMP] is True - result = await hass.config_entries.options.async_init( + flow = await hass.config_entries.options.async_init( entry.entry_id, context={"source": "test"}, data=None ) result = await hass.config_entries.options.async_configure( - result["flow_id"], user_input={CONF_READ_PRECISION: 0} + flow["flow_id"], user_input={CONF_READ_PRECISION: 0} ) assert result["type"] is FlowResultType.CREATE_ENTRY @@ -262,12 +215,12 @@ async def test_options_form(hass: HomeAssistant) -> None: assert result["data"][CONF_TEMPORARY_OVRD_MODE] is True assert result["data"][CONF_FLOOR_TEMP] is True - result = await hass.config_entries.options.async_init( + flow = await hass.config_entries.options.async_init( entry.entry_id, context={"source": "test"}, data=None ) result = await hass.config_entries.options.async_configure( - result["flow_id"], + flow["flow_id"], user_input={ CONF_FLOOR_TEMP: False, CONF_READ_PRECISION: PRECISION_TENTHS, diff --git a/tests/components/opentherm_gw/test_init.py b/tests/components/opentherm_gw/test_init.py index ed829cb1986..2116967d720 100644 --- a/tests/components/opentherm_gw/test_init.py +++ b/tests/components/opentherm_gw/test_init.py @@ -1,11 +1,9 @@ """Test Opentherm Gateway init.""" -from unittest.mock import AsyncMock, MagicMock, patch +from unittest.mock import MagicMock from pyotgw.vars import OTGW, OTGW_ABOUT -import pytest -from homeassistant import setup from homeassistant.components.opentherm_gw.const import ( DOMAIN, OpenThermDeviceIdentifier, @@ -14,11 +12,11 @@ from homeassistant.const import CONF_DEVICE, CONF_ID, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er +from .conftest import VERSION_TEST + from tests.common import MockConfigEntry -VERSION_OLD = "4.2.5" VERSION_NEW = "4.2.8.1" -MINIMAL_STATUS = {OTGW: {OTGW_ABOUT: f"OpenTherm Gateway {VERSION_OLD}"}} MINIMAL_STATUS_UPD = {OTGW: {OTGW_ABOUT: f"OpenTherm Gateway {VERSION_NEW}"}} MOCK_GATEWAY_ID = "mock_gateway" MOCK_CONFIG_ENTRY = MockConfigEntry( @@ -33,35 +31,28 @@ MOCK_CONFIG_ENTRY = MockConfigEntry( ) -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_device_registry_insert( - hass: HomeAssistant, device_registry: dr.DeviceRegistry + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mock_pyotgw: MagicMock, ) -> None: """Test that the device registry is initialized correctly.""" MOCK_CONFIG_ENTRY.add_to_hass(hass) - with ( - patch( - "homeassistant.components.opentherm_gw.OpenThermGatewayHub.cleanup", - return_value=None, - ), - patch("pyotgw.OpenThermGateway.connect", return_value=MINIMAL_STATUS), - ): - await setup.async_setup_component(hass, DOMAIN, {}) - + await hass.config_entries.async_setup(MOCK_CONFIG_ENTRY.entry_id) await hass.async_block_till_done() gw_dev = device_registry.async_get_device( identifiers={(DOMAIN, f"{MOCK_GATEWAY_ID}-{OpenThermDeviceIdentifier.GATEWAY}")} ) - assert gw_dev.sw_version == VERSION_OLD + assert gw_dev is not None + assert gw_dev.sw_version == VERSION_TEST -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_device_registry_update( - hass: HomeAssistant, device_registry: dr.DeviceRegistry + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mock_pyotgw: MagicMock, ) -> None: """Test that the device registry is updated correctly.""" MOCK_CONFIG_ENTRY.add_to_hass(hass) @@ -74,19 +65,14 @@ async def test_device_registry_update( name="Mock Gateway", manufacturer="Schelte Bron", model="OpenTherm Gateway", - sw_version=VERSION_OLD, + sw_version=VERSION_TEST, ) - with ( - patch( - "homeassistant.components.opentherm_gw.OpenThermGatewayHub.cleanup", - return_value=None, - ), - patch("pyotgw.OpenThermGateway.connect", return_value=MINIMAL_STATUS_UPD), - ): - await setup.async_setup_component(hass, DOMAIN, {}) + mock_pyotgw.return_value.connect.return_value = MINIMAL_STATUS_UPD + await hass.config_entries.async_setup(MOCK_CONFIG_ENTRY.entry_id) await hass.async_block_till_done() + gw_dev = device_registry.async_get_device( identifiers={(DOMAIN, f"{MOCK_GATEWAY_ID}-{OpenThermDeviceIdentifier.GATEWAY}")} ) @@ -96,7 +82,9 @@ async def test_device_registry_update( # Device migration test can be removed in 2025.4.0 async def test_device_migration( - hass: HomeAssistant, device_registry: dr.DeviceRegistry + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mock_pyotgw: MagicMock, ) -> None: """Test that the device registry is updated correctly.""" MOCK_CONFIG_ENTRY.add_to_hass(hass) @@ -109,22 +97,10 @@ async def test_device_migration( name="Mock Gateway", manufacturer="Schelte Bron", model="OpenTherm Gateway", - sw_version=VERSION_OLD, + sw_version=VERSION_TEST, ) - with ( - patch( - "homeassistant.components.opentherm_gw.OpenThermGateway", - return_value=MagicMock( - connect=AsyncMock(return_value=MINIMAL_STATUS_UPD), - set_control_setpoint=AsyncMock(), - set_max_relative_mod=AsyncMock(), - disconnect=AsyncMock(), - ), - ), - ): - await setup.async_setup_component(hass, DOMAIN, {}) - + await hass.config_entries.async_setup(MOCK_CONFIG_ENTRY.entry_id) await hass.async_block_till_done() assert ( @@ -158,7 +134,9 @@ async def test_device_migration( # Entity migration test can be removed in 2025.4.0 async def test_climate_entity_migration( - hass: HomeAssistant, entity_registry: er.EntityRegistry + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_pyotgw: MagicMock, ) -> None: """Test that the climate entity unique_id gets migrated correctly.""" MOCK_CONFIG_ENTRY.add_to_hass(hass) @@ -168,22 +146,12 @@ async def test_climate_entity_migration( unique_id=MOCK_CONFIG_ENTRY.data[CONF_ID], ) - with ( - patch( - "homeassistant.components.opentherm_gw.OpenThermGateway", - return_value=MagicMock( - connect=AsyncMock(return_value=MINIMAL_STATUS_UPD), - set_control_setpoint=AsyncMock(), - set_max_relative_mod=AsyncMock(), - disconnect=AsyncMock(), - ), - ), - ): - await setup.async_setup_component(hass, DOMAIN, {}) - + await hass.config_entries.async_setup(MOCK_CONFIG_ENTRY.entry_id) await hass.async_block_till_done() + updated_entry = entity_registry.async_get(entry.entity_id) + assert updated_entry is not None assert ( - entity_registry.async_get(entry.entity_id).unique_id + updated_entry.unique_id == f"{MOCK_CONFIG_ENTRY.data[CONF_ID]}-{OpenThermDeviceIdentifier.THERMOSTAT}-thermostat_entity" ) From 7b35c3036e0b2e918f176473d40908ac7071ebf3 Mon Sep 17 00:00:00 2001 From: Nerdix <70015952+N3rdix@users.noreply.github.com> Date: Tue, 3 Sep 2024 19:47:00 +0200 Subject: [PATCH 1395/1635] Enhance error handling when changing a timer's duration (#121786) * Update remaining before checking duration * fix comment * calculation based on transient field * lint * remove useless brackets --- homeassistant/components/timer/__init__.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/timer/__init__.py b/homeassistant/components/timer/__init__.py index c2057551239..19b1de427ef 100644 --- a/homeassistant/components/timer/__init__.py +++ b/homeassistant/components/timer/__init__.py @@ -338,7 +338,9 @@ class Timer(collection.CollectionEntity, RestoreEntity): raise HomeAssistantError( f"Timer {self.entity_id} is not running, only active timers can be changed" ) - if self._remaining and (self._remaining + duration) > self._running_duration: + # Check against new remaining time before checking boundaries + new_remaining = (self._end + duration) - dt_util.utcnow().replace(microsecond=0) + if self._remaining and new_remaining > self._running_duration: raise HomeAssistantError( f"Not possible to change timer {self.entity_id} beyond duration" ) @@ -349,7 +351,7 @@ class Timer(collection.CollectionEntity, RestoreEntity): self._listener() self._end += duration - self._remaining = self._end - dt_util.utcnow().replace(microsecond=0) + self._remaining = new_remaining self.async_write_ha_state() self.hass.bus.async_fire(EVENT_TIMER_CHANGED, {ATTR_ENTITY_ID: self.entity_id}) self._listener = async_track_point_in_utc_time( From 3137c27e5604b99ea4c548b41c85ccb18668320a Mon Sep 17 00:00:00 2001 From: Raj Laud <50647620+rajlaud@users.noreply.github.com> Date: Tue, 3 Sep 2024 13:50:44 -0400 Subject: [PATCH 1396/1635] Fix type errors in squeezebox (#125166) --- homeassistant/components/squeezebox/media_player.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/squeezebox/media_player.py b/homeassistant/components/squeezebox/media_player.py index 5fa132533d1..8607e72a67c 100644 --- a/homeassistant/components/squeezebox/media_player.py +++ b/homeassistant/components/squeezebox/media_player.py @@ -314,14 +314,14 @@ class SqueezeBoxEntity(MediaPlayerEntity): return MediaType.MUSIC @property - def media_duration(self) -> int | None: + def media_duration(self) -> int: """Duration of current playing media in seconds.""" - return int(self._player.duration) + return int(self._player.duration) if self._player.duration else 0 @property - def media_position(self) -> int | None: + def media_position(self) -> int: """Position of current playing media in seconds.""" - return int(self._player.time) + return int(self._player.time) if self._player.time else 0 @property def media_position_updated_at(self) -> datetime | None: From 61a722218a019f4a1ac2879285a0ca0beecc2a01 Mon Sep 17 00:00:00 2001 From: Paul Bottein Date: Tue, 3 Sep 2024 19:52:38 +0200 Subject: [PATCH 1397/1635] Update frontend to 20240903.1 (#125160) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 50bcb3b3d97..7b904cba999 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240902.0"] + "requirements": ["home-assistant-frontend==20240903.1"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 1729e6e8131..ddb96da6bff 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -31,7 +31,7 @@ habluetooth==3.4.0 hass-nabucasa==0.81.1 hassil==1.7.4 home-assistant-bluetooth==1.12.2 -home-assistant-frontend==20240902.0 +home-assistant-frontend==20240903.1 home-assistant-intents==2024.8.29 httpx==0.27.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 39b464c0561..7d72f29b74b 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1108,7 +1108,7 @@ hole==0.8.0 holidays==0.55 # homeassistant.components.frontend -home-assistant-frontend==20240902.0 +home-assistant-frontend==20240903.1 # homeassistant.components.conversation home-assistant-intents==2024.8.29 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 1fc15b0f78f..b75dfa99638 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -934,7 +934,7 @@ hole==0.8.0 holidays==0.55 # homeassistant.components.frontend -home-assistant-frontend==20240902.0 +home-assistant-frontend==20240903.1 # homeassistant.components.conversation home-assistant-intents==2024.8.29 From 27032c1780f53d4fe8f1071b6d6bf9a2fb5df566 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 3 Sep 2024 07:53:10 -1000 Subject: [PATCH 1398/1635] Bump yalexs to 8.6.2 (#125162) changelog: https://github.com/bdraco/yalexs/compare/v8.6.0...v8.6.2 --- homeassistant/components/august/manifest.json | 2 +- homeassistant/components/yale/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/august/manifest.json b/homeassistant/components/august/manifest.json index a40c6920136..42f97e56fd2 100644 --- a/homeassistant/components/august/manifest.json +++ b/homeassistant/components/august/manifest.json @@ -24,5 +24,5 @@ "documentation": "https://www.home-assistant.io/integrations/august", "iot_class": "cloud_push", "loggers": ["pubnub", "yalexs"], - "requirements": ["yalexs==8.6.0", "yalexs-ble==2.4.3"] + "requirements": ["yalexs==8.6.2", "yalexs-ble==2.4.3"] } diff --git a/homeassistant/components/yale/manifest.json b/homeassistant/components/yale/manifest.json index 030df50a482..0942dcb5dcb 100644 --- a/homeassistant/components/yale/manifest.json +++ b/homeassistant/components/yale/manifest.json @@ -13,5 +13,5 @@ "documentation": "https://www.home-assistant.io/integrations/yale", "iot_class": "cloud_push", "loggers": ["socketio", "engineio", "yalexs"], - "requirements": ["yalexs==8.6.0", "yalexs-ble==2.4.3"] + "requirements": ["yalexs==8.6.2", "yalexs-ble==2.4.3"] } diff --git a/requirements_all.txt b/requirements_all.txt index 7d72f29b74b..dd94c6838d9 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2991,7 +2991,7 @@ yalexs-ble==2.4.3 # homeassistant.components.august # homeassistant.components.yale -yalexs==8.6.0 +yalexs==8.6.2 # homeassistant.components.yeelight yeelight==0.7.14 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index b75dfa99638..001c9390755 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2374,7 +2374,7 @@ yalexs-ble==2.4.3 # homeassistant.components.august # homeassistant.components.yale -yalexs==8.6.0 +yalexs==8.6.2 # homeassistant.components.yeelight yeelight==0.7.14 From be8f14167fc6e7e06974ba10a1dd532d31a489fe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hans=20Kr=C3=B6ner?= Date: Tue, 3 Sep 2024 21:00:44 +0200 Subject: [PATCH 1399/1635] Expose UV Index in Met.no (#124992) UV Index now also appears in forecasts. --- homeassistant/components/met/const.py | 4 +++ homeassistant/components/met/weather.py | 8 ++++++ tests/components/met/conftest.py | 3 ++- tests/components/met/test_weather.py | 33 ++++++++++++++++++++++++- 4 files changed, 46 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/met/const.py b/homeassistant/components/met/const.py index c513e98504e..ccc0662b3c3 100644 --- a/homeassistant/components/met/const.py +++ b/homeassistant/components/met/const.py @@ -21,12 +21,14 @@ from homeassistant.components.weather import ( ATTR_FORECAST_NATIVE_WIND_SPEED, ATTR_FORECAST_PRECIPITATION_PROBABILITY, ATTR_FORECAST_TIME, + ATTR_FORECAST_UV_INDEX, ATTR_FORECAST_WIND_BEARING, ATTR_WEATHER_CLOUD_COVERAGE, ATTR_WEATHER_DEW_POINT, ATTR_WEATHER_HUMIDITY, ATTR_WEATHER_PRESSURE, ATTR_WEATHER_TEMPERATURE, + ATTR_WEATHER_UV_INDEX, ATTR_WEATHER_VISIBILITY, ATTR_WEATHER_WIND_BEARING, ATTR_WEATHER_WIND_GUST_SPEED, @@ -190,6 +192,7 @@ FORECAST_MAP = { ATTR_FORECAST_NATIVE_WIND_GUST_SPEED: "wind_gust", ATTR_FORECAST_CLOUD_COVERAGE: "cloudiness", ATTR_FORECAST_HUMIDITY: "humidity", + ATTR_FORECAST_UV_INDEX: "uv_index", } ATTR_MAP = { @@ -202,4 +205,5 @@ ATTR_MAP = { ATTR_WEATHER_WIND_GUST_SPEED: "wind_gust", ATTR_WEATHER_CLOUD_COVERAGE: "cloudiness", ATTR_WEATHER_DEW_POINT: "dew_point", + ATTR_WEATHER_UV_INDEX: "uv_index", } diff --git a/homeassistant/components/met/weather.py b/homeassistant/components/met/weather.py index 809bb792b2c..7b95567366b 100644 --- a/homeassistant/components/met/weather.py +++ b/homeassistant/components/met/weather.py @@ -13,6 +13,7 @@ from homeassistant.components.weather import ( ATTR_WEATHER_HUMIDITY, ATTR_WEATHER_PRESSURE, ATTR_WEATHER_TEMPERATURE, + ATTR_WEATHER_UV_INDEX, ATTR_WEATHER_WIND_BEARING, ATTR_WEATHER_WIND_GUST_SPEED, ATTR_WEATHER_WIND_SPEED, @@ -208,6 +209,13 @@ class MetWeather(SingleCoordinatorWeatherEntity[MetDataUpdateCoordinator]): ATTR_MAP[ATTR_WEATHER_DEW_POINT] ) + @property + def uv_index(self) -> float | None: + """Return the uv index.""" + return self.coordinator.data.current_weather_data.get( + ATTR_MAP[ATTR_WEATHER_UV_INDEX] + ) + def _forecast(self, hourly: bool) -> list[Forecast] | None: """Return the forecast array.""" if hourly: diff --git a/tests/components/met/conftest.py b/tests/components/met/conftest.py index 699c1c81795..92b81d3d320 100644 --- a/tests/components/met/conftest.py +++ b/tests/components/met/conftest.py @@ -17,8 +17,9 @@ def mock_weather(): "pressure": 100, "humidity": 50, "wind_speed": 10, - "wind_bearing": "NE", + "wind_bearing": 90, "dew_point": 12.1, + "uv_index": 1.1, } mock_data.get_forecast.return_value = {} yield mock_data diff --git a/tests/components/met/test_weather.py b/tests/components/met/test_weather.py index 80820ef0186..ac3904684e3 100644 --- a/tests/components/met/test_weather.py +++ b/tests/components/met/test_weather.py @@ -2,10 +2,22 @@ from homeassistant import config_entries from homeassistant.components.met import DOMAIN -from homeassistant.components.weather import DOMAIN as WEATHER_DOMAIN +from homeassistant.components.weather import ( + ATTR_CONDITION_CLOUDY, + ATTR_WEATHER_DEW_POINT, + ATTR_WEATHER_HUMIDITY, + ATTR_WEATHER_PRESSURE, + ATTR_WEATHER_TEMPERATURE, + ATTR_WEATHER_UV_INDEX, + ATTR_WEATHER_WIND_BEARING, + ATTR_WEATHER_WIND_SPEED, + DOMAIN as WEATHER_DOMAIN, +) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from . import init_integration + async def test_new_config_entry( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_weather @@ -36,6 +48,25 @@ async def test_legacy_config_entry( assert len(er.async_entries_for_config_entry(entity_registry, entry.entry_id)) == 1 +async def test_weather(hass: HomeAssistant, mock_weather) -> None: + """Test states of the weather.""" + + await init_integration(hass) + assert len(hass.states.async_entity_ids("weather")) == 1 + entity_id = hass.states.async_entity_ids("weather")[0] + + state = hass.states.get(entity_id) + assert state + assert state.state == ATTR_CONDITION_CLOUDY + assert state.attributes[ATTR_WEATHER_TEMPERATURE] == 15 + assert state.attributes[ATTR_WEATHER_PRESSURE] == 100 + assert state.attributes[ATTR_WEATHER_HUMIDITY] == 50 + assert state.attributes[ATTR_WEATHER_WIND_SPEED] == 10 + assert state.attributes[ATTR_WEATHER_WIND_BEARING] == 90 + assert state.attributes[ATTR_WEATHER_DEW_POINT] == 12.1 + assert state.attributes[ATTR_WEATHER_UV_INDEX] == 1.1 + + async def test_tracking_home(hass: HomeAssistant, mock_weather) -> None: """Test we track home.""" await hass.config_entries.flow.async_init("met", context={"source": "onboarding"}) From 14482ff6da4de11403aa1cd01dc2931521949d7f Mon Sep 17 00:00:00 2001 From: mvn23 Date: Tue, 3 Sep 2024 21:18:38 +0200 Subject: [PATCH 1400/1635] Update opentherm_gw tests to prepare for new platforms (#125172) Move MockConfigEntry to a fixture --- tests/components/opentherm_gw/conftest.py | 21 +++++++++++ tests/components/opentherm_gw/test_init.py | 43 +++++++++------------- 2 files changed, 39 insertions(+), 25 deletions(-) diff --git a/tests/components/opentherm_gw/conftest.py b/tests/components/opentherm_gw/conftest.py index 057f47a169d..9c90c74b04b 100644 --- a/tests/components/opentherm_gw/conftest.py +++ b/tests/components/opentherm_gw/conftest.py @@ -6,8 +6,14 @@ from unittest.mock import AsyncMock, MagicMock, patch from pyotgw.vars import OTGW, OTGW_ABOUT import pytest +from homeassistant.components.opentherm_gw import DOMAIN +from homeassistant.const import CONF_DEVICE, CONF_ID, CONF_NAME + +from tests.common import MockConfigEntry + VERSION_TEST = "4.2.5" MINIMAL_STATUS = {OTGW: {OTGW_ABOUT: f"OpenTherm Gateway {VERSION_TEST}"}} +MOCK_GATEWAY_ID = "mock_gateway" @pytest.fixture @@ -39,3 +45,18 @@ def mock_pyotgw() -> Generator[MagicMock]: ), ): yield mock_gateway + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock an OpenTherm Gateway config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title="Mock Gateway", + data={ + CONF_NAME: "Mock Gateway", + CONF_DEVICE: "/dev/null", + CONF_ID: MOCK_GATEWAY_ID, + }, + options={}, + ) diff --git a/tests/components/opentherm_gw/test_init.py b/tests/components/opentherm_gw/test_init.py index 2116967d720..4085e25c614 100644 --- a/tests/components/opentherm_gw/test_init.py +++ b/tests/components/opentherm_gw/test_init.py @@ -8,38 +8,28 @@ from homeassistant.components.opentherm_gw.const import ( DOMAIN, OpenThermDeviceIdentifier, ) -from homeassistant.const import CONF_DEVICE, CONF_ID, CONF_NAME +from homeassistant.const import CONF_ID from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er -from .conftest import VERSION_TEST +from .conftest import MOCK_GATEWAY_ID, VERSION_TEST from tests.common import MockConfigEntry VERSION_NEW = "4.2.8.1" MINIMAL_STATUS_UPD = {OTGW: {OTGW_ABOUT: f"OpenTherm Gateway {VERSION_NEW}"}} -MOCK_GATEWAY_ID = "mock_gateway" -MOCK_CONFIG_ENTRY = MockConfigEntry( - domain=DOMAIN, - title="Mock Gateway", - data={ - CONF_NAME: "Mock Gateway", - CONF_DEVICE: "/dev/null", - CONF_ID: MOCK_GATEWAY_ID, - }, - options={}, -) async def test_device_registry_insert( hass: HomeAssistant, device_registry: dr.DeviceRegistry, + mock_config_entry: MockConfigEntry, mock_pyotgw: MagicMock, ) -> None: """Test that the device registry is initialized correctly.""" - MOCK_CONFIG_ENTRY.add_to_hass(hass) + mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(MOCK_CONFIG_ENTRY.entry_id) + await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() gw_dev = device_registry.async_get_device( @@ -52,13 +42,14 @@ async def test_device_registry_insert( async def test_device_registry_update( hass: HomeAssistant, device_registry: dr.DeviceRegistry, + mock_config_entry: MockConfigEntry, mock_pyotgw: MagicMock, ) -> None: """Test that the device registry is updated correctly.""" - MOCK_CONFIG_ENTRY.add_to_hass(hass) + mock_config_entry.add_to_hass(hass) device_registry.async_get_or_create( - config_entry_id=MOCK_CONFIG_ENTRY.entry_id, + config_entry_id=mock_config_entry.entry_id, identifiers={ (DOMAIN, f"{MOCK_GATEWAY_ID}-{OpenThermDeviceIdentifier.GATEWAY}") }, @@ -70,7 +61,7 @@ async def test_device_registry_update( mock_pyotgw.return_value.connect.return_value = MINIMAL_STATUS_UPD - await hass.config_entries.async_setup(MOCK_CONFIG_ENTRY.entry_id) + await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() gw_dev = device_registry.async_get_device( @@ -84,13 +75,14 @@ async def test_device_registry_update( async def test_device_migration( hass: HomeAssistant, device_registry: dr.DeviceRegistry, + mock_config_entry: MockConfigEntry, mock_pyotgw: MagicMock, ) -> None: """Test that the device registry is updated correctly.""" - MOCK_CONFIG_ENTRY.add_to_hass(hass) + mock_config_entry.add_to_hass(hass) device_registry.async_get_or_create( - config_entry_id=MOCK_CONFIG_ENTRY.entry_id, + config_entry_id=mock_config_entry.entry_id, identifiers={ (DOMAIN, MOCK_GATEWAY_ID), }, @@ -100,7 +92,7 @@ async def test_device_migration( sw_version=VERSION_TEST, ) - await hass.config_entries.async_setup(MOCK_CONFIG_ENTRY.entry_id) + await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() assert ( @@ -136,22 +128,23 @@ async def test_device_migration( async def test_climate_entity_migration( hass: HomeAssistant, entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, mock_pyotgw: MagicMock, ) -> None: """Test that the climate entity unique_id gets migrated correctly.""" - MOCK_CONFIG_ENTRY.add_to_hass(hass) + mock_config_entry.add_to_hass(hass) entry = entity_registry.async_get_or_create( domain="climate", platform="opentherm_gw", - unique_id=MOCK_CONFIG_ENTRY.data[CONF_ID], + unique_id=mock_config_entry.data[CONF_ID], ) - await hass.config_entries.async_setup(MOCK_CONFIG_ENTRY.entry_id) + await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() updated_entry = entity_registry.async_get(entry.entity_id) assert updated_entry is not None assert ( updated_entry.unique_id - == f"{MOCK_CONFIG_ENTRY.data[CONF_ID]}-{OpenThermDeviceIdentifier.THERMOSTAT}-thermostat_entity" + == f"{mock_config_entry.data[CONF_ID]}-{OpenThermDeviceIdentifier.THERMOSTAT}-thermostat_entity" ) From e4f9f6447f1a8048be1dafd3822b1f7819eedbaa Mon Sep 17 00:00:00 2001 From: Joakim Plate Date: Tue, 3 Sep 2024 21:45:43 +0200 Subject: [PATCH 1401/1635] Update gardena_bluetooth dependency to 1.4.3 (#125175) --- homeassistant/components/gardena_bluetooth/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/gardena_bluetooth/manifest.json b/homeassistant/components/gardena_bluetooth/manifest.json index 4812def7dde..6d7566b3edf 100644 --- a/homeassistant/components/gardena_bluetooth/manifest.json +++ b/homeassistant/components/gardena_bluetooth/manifest.json @@ -14,5 +14,5 @@ "documentation": "https://www.home-assistant.io/integrations/gardena_bluetooth", "iot_class": "local_polling", "loggers": ["bleak", "bleak_esphome", "gardena_bluetooth"], - "requirements": ["gardena-bluetooth==1.4.2"] + "requirements": ["gardena-bluetooth==1.4.3"] } diff --git a/requirements_all.txt b/requirements_all.txt index dd94c6838d9..29375b32d07 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -930,7 +930,7 @@ fyta_cli==0.6.6 gTTS==2.2.4 # homeassistant.components.gardena_bluetooth -gardena-bluetooth==1.4.2 +gardena-bluetooth==1.4.3 # homeassistant.components.google_assistant_sdk gassist-text==0.0.11 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 001c9390755..a9e4e868a9b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -783,7 +783,7 @@ fyta_cli==0.6.6 gTTS==2.2.4 # homeassistant.components.gardena_bluetooth -gardena-bluetooth==1.4.2 +gardena-bluetooth==1.4.3 # homeassistant.components.google_assistant_sdk gassist-text==0.0.11 From 3a8039cbc06b3dd93b06676d63ecf70eaa934889 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 3 Sep 2024 10:18:19 -1000 Subject: [PATCH 1402/1635] Bump yalexs to 8.6.3 (#125176) Fixes the battery state not refreshing due to a refactoring error in the library. changelog: https://github.com/bdraco/yalexs/compare/v8.6.2...v8.6.3 --- homeassistant/components/august/manifest.json | 2 +- homeassistant/components/yale/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/august/manifest.json b/homeassistant/components/august/manifest.json index 42f97e56fd2..6635a95f1cf 100644 --- a/homeassistant/components/august/manifest.json +++ b/homeassistant/components/august/manifest.json @@ -24,5 +24,5 @@ "documentation": "https://www.home-assistant.io/integrations/august", "iot_class": "cloud_push", "loggers": ["pubnub", "yalexs"], - "requirements": ["yalexs==8.6.2", "yalexs-ble==2.4.3"] + "requirements": ["yalexs==8.6.3", "yalexs-ble==2.4.3"] } diff --git a/homeassistant/components/yale/manifest.json b/homeassistant/components/yale/manifest.json index 0942dcb5dcb..fc93d259891 100644 --- a/homeassistant/components/yale/manifest.json +++ b/homeassistant/components/yale/manifest.json @@ -13,5 +13,5 @@ "documentation": "https://www.home-assistant.io/integrations/yale", "iot_class": "cloud_push", "loggers": ["socketio", "engineio", "yalexs"], - "requirements": ["yalexs==8.6.2", "yalexs-ble==2.4.3"] + "requirements": ["yalexs==8.6.3", "yalexs-ble==2.4.3"] } diff --git a/requirements_all.txt b/requirements_all.txt index 29375b32d07..6ec38c80689 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2991,7 +2991,7 @@ yalexs-ble==2.4.3 # homeassistant.components.august # homeassistant.components.yale -yalexs==8.6.2 +yalexs==8.6.3 # homeassistant.components.yeelight yeelight==0.7.14 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index a9e4e868a9b..00802565a35 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2374,7 +2374,7 @@ yalexs-ble==2.4.3 # homeassistant.components.august # homeassistant.components.yale -yalexs==8.6.2 +yalexs==8.6.3 # homeassistant.components.yeelight yeelight==0.7.14 From 4aa86a574f7b066ded80664b3caf12de26f79ae4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Joakim=20S=C3=B8rensen?= Date: Tue, 3 Sep 2024 22:23:26 +0200 Subject: [PATCH 1403/1635] Add include-hidden-files to upload env_file artifact (#125179) --- .github/workflows/wheels.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 98585a97c6b..04e4391790a 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -86,6 +86,7 @@ jobs: with: name: env_file path: ./.env_file + include-hidden-files: true overwrite: true - name: Upload requirements_diff From cc3d059783551852fa799a5f47a3db103e389dba Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 3 Sep 2024 22:37:50 +0200 Subject: [PATCH 1404/1635] Refactor recorder EventIDPostMigration data migrator (#125126) --- .../components/recorder/migration.py | 89 +++++++++---------- .../components/recorder/test_v32_migration.py | 38 +++----- 2 files changed, 53 insertions(+), 74 deletions(-) diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index 213462e3731..890fc3045b2 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -2137,50 +2137,6 @@ def post_migrate_entity_ids(instance: Recorder) -> bool: return is_done -@retryable_database_job("cleanup_legacy_event_ids") -def cleanup_legacy_states_event_ids(instance: Recorder) -> bool: - """Remove old event_id index from states. - - We used to link states to events using the event_id column but we no - longer store state changed events in the events table. - - If all old states have been purged and existing states are in the new - format we can drop the index since it can take up ~10MB per 1M rows. - """ - session_maker = instance.get_session - _LOGGER.debug("Cleanup legacy entity_ids") - with session_scope(session=session_maker()) as session: - result = session.execute(has_used_states_event_ids()).scalar() - # In the future we may migrate existing states to the new format - # but in practice very few of these still exist in production and - # removing the index is the likely all that needs to happen. - all_gone = not result - - if all_gone: - # Only drop the index if there are no more event_ids in the states table - # ex all NULL - assert instance.engine is not None, "engine should never be None" - if instance.dialect_name == SupportedDialect.SQLITE: - # SQLite does not support dropping foreign key constraints - # so we have to rebuild the table - fk_remove_ok = rebuild_sqlite_table(session_maker, instance.engine, States) - else: - try: - _drop_foreign_key_constraints( - session_maker, instance.engine, TABLE_STATES, "event_id" - ) - except (InternalError, OperationalError): - fk_remove_ok = False - else: - fk_remove_ok = True - if fk_remove_ok: - _drop_index(session_maker, "states", LEGACY_STATES_EVENT_ID_INDEX) - instance.use_legacy_events_index = False - _mark_migration_done(session, EventIDPostMigration) - - return True - - def _initialize_database(session: Session) -> bool: """Initialize a new database. @@ -2635,9 +2591,50 @@ class EventIDPostMigration(BaseRunTimeMigration): migration_version = 2 @staticmethod + @retryable_database_job("cleanup_legacy_event_ids") def migrate_data(instance: Recorder) -> bool: - """Migrate some data, returns True if migration is completed.""" - return cleanup_legacy_states_event_ids(instance) + """Remove old event_id index from states, returns True if completed. + + We used to link states to events using the event_id column but we no + longer store state changed events in the events table. + + If all old states have been purged and existing states are in the new + format we can drop the index since it can take up ~10MB per 1M rows. + """ + session_maker = instance.get_session + _LOGGER.debug("Cleanup legacy entity_ids") + with session_scope(session=session_maker()) as session: + result = session.execute(has_used_states_event_ids()).scalar() + # In the future we may migrate existing states to the new format + # but in practice very few of these still exist in production and + # removing the index is the likely all that needs to happen. + all_gone = not result + + if all_gone: + # Only drop the index if there are no more event_ids in the states table + # ex all NULL + assert instance.engine is not None, "engine should never be None" + if instance.dialect_name == SupportedDialect.SQLITE: + # SQLite does not support dropping foreign key constraints + # so we have to rebuild the table + fk_remove_ok = rebuild_sqlite_table( + session_maker, instance.engine, States + ) + else: + try: + _drop_foreign_key_constraints( + session_maker, instance.engine, TABLE_STATES, "event_id" + ) + except (InternalError, OperationalError): + fk_remove_ok = False + else: + fk_remove_ok = True + if fk_remove_ok: + _drop_index(session_maker, "states", LEGACY_STATES_EVENT_ID_INDEX) + instance.use_legacy_events_index = False + _mark_migration_done(session, EventIDPostMigration) + + return True @staticmethod def _legacy_event_id_foreign_key_exists(instance: Recorder) -> bool: diff --git a/tests/components/recorder/test_v32_migration.py b/tests/components/recorder/test_v32_migration.py index 56aa6705688..1932860d845 100644 --- a/tests/components/recorder/test_v32_migration.py +++ b/tests/components/recorder/test_v32_migration.py @@ -113,6 +113,7 @@ async def test_migrate_times( patch.object(migration.StatesContextIDMigration, "migrate_data"), patch.object(migration.EventTypeIDMigration, "migrate_data"), patch.object(migration.EntityIDMigration, "migrate_data"), + patch.object(migration.EventIDPostMigration, "migrate_data"), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(core, "EventTypes", old_db_schema.EventTypes), patch.object(core, "EventData", old_db_schema.EventData), @@ -120,9 +121,6 @@ async def test_migrate_times( patch.object(core, "Events", old_db_schema.Events), patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_30)), patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), - patch( - "homeassistant.components.recorder.migration.cleanup_legacy_states_event_ids" - ), ): async with ( async_test_home_assistant() as hass, @@ -264,9 +262,8 @@ async def test_migrate_can_resume_entity_id_post_migration( with ( patch.object(recorder, "db_schema", old_db_schema), - patch.object( - recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION - ), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration.EventIDPostMigration, "migrate_data"), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(core, "EventTypes", old_db_schema.EventTypes), patch.object(core, "EventData", old_db_schema.EventData), @@ -274,9 +271,6 @@ async def test_migrate_can_resume_entity_id_post_migration( patch.object(core, "Events", old_db_schema.Events), patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_32)), patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), - patch( - "homeassistant.components.recorder.migration.cleanup_legacy_states_event_ids" - ), ): async with ( async_test_home_assistant() as hass, @@ -386,9 +380,8 @@ async def test_migrate_can_resume_ix_states_event_id_removed( with ( patch.object(recorder, "db_schema", old_db_schema), - patch.object( - recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION - ), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration.EventIDPostMigration, "migrate_data"), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(core, "EventTypes", old_db_schema.EventTypes), patch.object(core, "EventData", old_db_schema.EventData), @@ -396,9 +389,6 @@ async def test_migrate_can_resume_ix_states_event_id_removed( patch.object(core, "Events", old_db_schema.Events), patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_32)), patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), - patch( - "homeassistant.components.recorder.migration.cleanup_legacy_states_event_ids" - ), ): async with ( async_test_home_assistant() as hass, @@ -522,9 +512,8 @@ async def test_out_of_disk_space_while_rebuild_states_table( with ( patch.object(recorder, "db_schema", old_db_schema), - patch.object( - recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION - ), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration.EventIDPostMigration, "migrate_data"), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(core, "EventTypes", old_db_schema.EventTypes), patch.object(core, "EventData", old_db_schema.EventData), @@ -532,9 +521,6 @@ async def test_out_of_disk_space_while_rebuild_states_table( patch.object(core, "Events", old_db_schema.Events), patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_32)), patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), - patch( - "homeassistant.components.recorder.migration.cleanup_legacy_states_event_ids" - ), ): async with ( async_test_home_assistant() as hass, @@ -654,7 +640,7 @@ async def test_out_of_disk_space_while_removing_foreign_key( Note that the test is somewhat forced; the states.event_id foreign key constraint is removed when migrating to schema version 46, inspecting the schema in - cleanup_legacy_states_event_ids is not likely to fail. + EventIDPostMigration.migrate_data, is not likely to fail. """ importlib.import_module(SCHEMA_MODULE_32) old_db_schema = sys.modules[SCHEMA_MODULE_32] @@ -702,9 +688,8 @@ async def test_out_of_disk_space_while_removing_foreign_key( with ( patch.object(recorder, "db_schema", old_db_schema), - patch.object( - recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION - ), + patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration.EventIDPostMigration, "migrate_data"), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(core, "EventTypes", old_db_schema.EventTypes), patch.object(core, "EventData", old_db_schema.EventData), @@ -712,9 +697,6 @@ async def test_out_of_disk_space_while_removing_foreign_key( patch.object(core, "Events", old_db_schema.Events), patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_32)), patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), - patch( - "homeassistant.components.recorder.migration.cleanup_legacy_states_event_ids" - ), ): async with ( async_test_home_assistant() as hass, From 50c1bf8bb0014de051159c8b30dc1d74e5ff790e Mon Sep 17 00:00:00 2001 From: Maciej Bieniek Date: Tue, 3 Sep 2024 22:38:07 +0200 Subject: [PATCH 1405/1635] Add re-auth flow to NextDNS integration (#125101) --- homeassistant/components/nextdns/__init__.py | 5 +- .../components/nextdns/config_flow.py | 66 +++++++++++++++---- .../components/nextdns/coordinator.py | 4 +- homeassistant/components/nextdns/strings.json | 8 ++- tests/components/nextdns/test_config_flow.py | 56 ++++++++++++++++ tests/components/nextdns/test_coordinator.py | 46 +++++++++++++ tests/components/nextdns/test_init.py | 34 +++++++++- 7 files changed, 202 insertions(+), 17 deletions(-) create mode 100644 tests/components/nextdns/test_coordinator.py diff --git a/homeassistant/components/nextdns/__init__.py b/homeassistant/components/nextdns/__init__.py index 4256126b3c7..7f0729bca1e 100644 --- a/homeassistant/components/nextdns/__init__.py +++ b/homeassistant/components/nextdns/__init__.py @@ -15,6 +15,7 @@ from nextdns import ( AnalyticsStatus, ApiError, ConnectionStatus, + InvalidApiKeyError, NextDns, Settings, ) @@ -23,7 +24,7 @@ from tenacity import RetryError from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, Platform from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import ( @@ -88,6 +89,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: NextDnsConfigEntry) -> b nextdns = await NextDns.create(websession, api_key) except (ApiError, ClientConnectorError, RetryError, TimeoutError) as err: raise ConfigEntryNotReady from err + except InvalidApiKeyError as err: + raise ConfigEntryAuthFailed from err tasks = [] coordinators = {} diff --git a/homeassistant/components/nextdns/config_flow.py b/homeassistant/components/nextdns/config_flow.py index bd79112b1f9..80caba6ec7e 100644 --- a/homeassistant/components/nextdns/config_flow.py +++ b/homeassistant/components/nextdns/config_flow.py @@ -2,19 +2,30 @@ from __future__ import annotations -from typing import Any +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any from aiohttp.client_exceptions import ClientConnectorError from nextdns import ApiError, InvalidApiKeyError, NextDns from tenacity import RetryError import voluptuous as vol -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY, CONF_PROFILE_NAME +from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import CONF_PROFILE_ID, DOMAIN +AUTH_SCHEMA = vol.Schema({vol.Required(CONF_API_KEY): str}) + + +async def async_init_nextdns(hass: HomeAssistant, api_key: str) -> NextDns: + """Check if credentials are valid.""" + websession = async_get_clientsession(hass) + + return await NextDns.create(websession, api_key) + class NextDnsFlowHandler(ConfigFlow, domain=DOMAIN): """Config flow for NextDNS.""" @@ -23,8 +34,9 @@ class NextDnsFlowHandler(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize the config flow.""" - self.nextdns: NextDns | None = None - self.api_key: str | None = None + self.nextdns: NextDns + self.api_key: str + self.entry: ConfigEntry | None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -32,14 +44,10 @@ class NextDnsFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a flow initialized by the user.""" errors: dict[str, str] = {} - websession = async_get_clientsession(self.hass) - if user_input is not None: self.api_key = user_input[CONF_API_KEY] try: - self.nextdns = await NextDns.create( - websession, user_input[CONF_API_KEY] - ) + self.nextdns = await async_init_nextdns(self.hass, self.api_key) except InvalidApiKeyError: errors["base"] = "invalid_api_key" except (ApiError, ClientConnectorError, RetryError, TimeoutError): @@ -51,7 +59,7 @@ class NextDnsFlowHandler(ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="user", - data_schema=vol.Schema({vol.Required(CONF_API_KEY): str}), + data_schema=AUTH_SCHEMA, errors=errors, ) @@ -61,8 +69,6 @@ class NextDnsFlowHandler(ConfigFlow, domain=DOMAIN): """Handle the profiles step.""" errors: dict[str, str] = {} - assert self.nextdns is not None - if user_input is not None: profile_name = user_input[CONF_PROFILE_NAME] profile_id = self.nextdns.get_profile_id(profile_name) @@ -86,3 +92,39 @@ class NextDnsFlowHandler(ConfigFlow, domain=DOMAIN): ), errors=errors, ) + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Handle configuration by re-auth.""" + self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Dialog that informs the user that reauth is required.""" + errors: dict[str, str] = {} + + if user_input is not None: + try: + await async_init_nextdns(self.hass, user_input[CONF_API_KEY]) + except InvalidApiKeyError: + errors["base"] = "invalid_api_key" + except (ApiError, ClientConnectorError, RetryError, TimeoutError): + errors["base"] = "cannot_connect" + except Exception: # noqa: BLE001 + errors["base"] = "unknown" + else: + if TYPE_CHECKING: + assert self.entry is not None + + return self.async_update_reload_and_abort( + self.entry, data={**self.entry.data, **user_input} + ) + + return self.async_show_form( + step_id="reauth_confirm", + data_schema=AUTH_SCHEMA, + errors=errors, + ) diff --git a/homeassistant/components/nextdns/coordinator.py b/homeassistant/components/nextdns/coordinator.py index 5210807bd3c..6b35e35a027 100644 --- a/homeassistant/components/nextdns/coordinator.py +++ b/homeassistant/components/nextdns/coordinator.py @@ -21,6 +21,7 @@ from nextdns.model import NextDnsData from tenacity import RetryError from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -62,10 +63,11 @@ class NextDnsUpdateCoordinator(DataUpdateCoordinator[CoordinatorDataT]): except ( ApiError, ClientConnectorError, - InvalidApiKeyError, RetryError, ) as err: raise UpdateFailed(err) from err + except InvalidApiKeyError as err: + raise ConfigEntryAuthFailed from err async def _async_update_data_internal(self) -> CoordinatorDataT: """Update data via library.""" diff --git a/homeassistant/components/nextdns/strings.json b/homeassistant/components/nextdns/strings.json index e0a37aad03b..9dbc8061849 100644 --- a/homeassistant/components/nextdns/strings.json +++ b/homeassistant/components/nextdns/strings.json @@ -10,6 +10,11 @@ "data": { "profile": "Profile" } + }, + "reauth_confirm": { + "data": { + "api_key": "[%key:common::config_flow::data::api_key%]" + } } }, "error": { @@ -18,7 +23,8 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { - "already_configured": "This NextDNS profile is already configured." + "already_configured": "This NextDNS profile is already configured.", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, "system_health": { diff --git a/tests/components/nextdns/test_config_flow.py b/tests/components/nextdns/test_config_flow.py index 7571eef347e..2a51c6821fc 100644 --- a/tests/components/nextdns/test_config_flow.py +++ b/tests/components/nextdns/test_config_flow.py @@ -101,3 +101,59 @@ async def test_form_already_configured(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" + + +async def test_reauth_successful(hass: HomeAssistant) -> None: + """Test starting a reauthentication flow.""" + entry = await init_integration(hass) + + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + with ( + patch( + "homeassistant.components.nextdns.NextDns.get_profiles", + return_value=PROFILES, + ), + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_API_KEY: "new_api_key"}, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + + +@pytest.mark.parametrize( + ("exc", "base_error"), + [ + (ApiError("API Error"), "cannot_connect"), + (InvalidApiKeyError, "invalid_api_key"), + (RetryError("Retry Error"), "cannot_connect"), + (TimeoutError, "cannot_connect"), + (ValueError, "unknown"), + ], +) +async def test_reauth_errors( + hass: HomeAssistant, exc: Exception, base_error: str +) -> None: + """Test reauthentication flow with errors.""" + entry = await init_integration(hass) + + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + with patch( + "homeassistant.components.nextdns.NextDns.get_profiles", side_effect=exc + ): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_API_KEY: "new_api_key"}, + ) + await hass.async_block_till_done() + + assert result["errors"] == {"base": base_error} diff --git a/tests/components/nextdns/test_coordinator.py b/tests/components/nextdns/test_coordinator.py new file mode 100644 index 00000000000..9613a6b423f --- /dev/null +++ b/tests/components/nextdns/test_coordinator.py @@ -0,0 +1,46 @@ +"""Tests for NextDNS coordinator.""" + +from datetime import timedelta +from unittest.mock import patch + +from freezegun.api import FrozenDateTimeFactory +from nextdns import InvalidApiKeyError + +from homeassistant.components.nextdns.const import DOMAIN +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState +from homeassistant.core import HomeAssistant + +from . import init_integration + +from tests.common import async_fire_time_changed + + +async def test_auth_error( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, +) -> None: + """Test authentication error when polling data.""" + entry = await init_integration(hass) + + assert entry.state is ConfigEntryState.LOADED + + freezer.tick(timedelta(minutes=10)) + with patch( + "homeassistant.components.nextdns.NextDns.connection_status", + side_effect=InvalidApiKeyError, + ): + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert entry.state is ConfigEntryState.LOADED + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + + flow = flows[0] + assert flow.get("step_id") == "reauth_confirm" + assert flow.get("handler") == DOMAIN + + assert "context" in flow + assert flow["context"].get("source") == SOURCE_REAUTH + assert flow["context"].get("entry_id") == entry.entry_id diff --git a/tests/components/nextdns/test_init.py b/tests/components/nextdns/test_init.py index 61a487d917c..0a0bf3fc487 100644 --- a/tests/components/nextdns/test_init.py +++ b/tests/components/nextdns/test_init.py @@ -2,12 +2,12 @@ from unittest.mock import patch -from nextdns import ApiError +from nextdns import ApiError, InvalidApiKeyError import pytest from tenacity import RetryError from homeassistant.components.nextdns.const import CONF_PROFILE_ID, DOMAIN -from homeassistant.config_entries import ConfigEntryState +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState from homeassistant.const import CONF_API_KEY, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant @@ -59,3 +59,33 @@ async def test_unload_entry(hass: HomeAssistant) -> None: assert entry.state is ConfigEntryState.NOT_LOADED assert not hass.data.get(DOMAIN) + + +async def test_config_auth_failed(hass: HomeAssistant) -> None: + """Test for setup failure if the auth fails.""" + entry = MockConfigEntry( + domain=DOMAIN, + title="Fake Profile", + unique_id="xyz12", + data={CONF_API_KEY: "fake_api_key", CONF_PROFILE_ID: "xyz12"}, + ) + entry.add_to_hass(hass) + + with patch( + "homeassistant.components.nextdns.NextDns.get_profiles", + side_effect=InvalidApiKeyError, + ): + await hass.config_entries.async_setup(entry.entry_id) + + assert entry.state is ConfigEntryState.SETUP_ERROR + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + + flow = flows[0] + assert flow.get("step_id") == "reauth_confirm" + assert flow.get("handler") == DOMAIN + + assert "context" in flow + assert flow["context"].get("source") == SOURCE_REAUTH + assert flow["context"].get("entry_id") == entry.entry_id From cfe0c95c97629a61ee7039be43d80c5f777ff04e Mon Sep 17 00:00:00 2001 From: G Johansson Date: Tue, 3 Sep 2024 22:43:03 +0200 Subject: [PATCH 1406/1635] Bump python-holidays to 0.56 (#125182) --- homeassistant/components/holiday/manifest.json | 2 +- homeassistant/components/workday/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/holiday/manifest.json b/homeassistant/components/holiday/manifest.json index 0a3064450d4..0a2d98e71c5 100644 --- a/homeassistant/components/holiday/manifest.json +++ b/homeassistant/components/holiday/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/holiday", "iot_class": "local_polling", - "requirements": ["holidays==0.55", "babel==2.15.0"] + "requirements": ["holidays==0.56", "babel==2.15.0"] } diff --git a/homeassistant/components/workday/manifest.json b/homeassistant/components/workday/manifest.json index fafa870d00a..297b20b8c0e 100644 --- a/homeassistant/components/workday/manifest.json +++ b/homeassistant/components/workday/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_polling", "loggers": ["holidays"], "quality_scale": "internal", - "requirements": ["holidays==0.55"] + "requirements": ["holidays==0.56"] } diff --git a/requirements_all.txt b/requirements_all.txt index 6ec38c80689..41b65c55f1a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1105,7 +1105,7 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.55 +holidays==0.56 # homeassistant.components.frontend home-assistant-frontend==20240903.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 00802565a35..7ddac14c2d8 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -931,7 +931,7 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.55 +holidays==0.56 # homeassistant.components.frontend home-assistant-frontend==20240903.1 From c4cfff4b3f6b0909756a2202c594d0c78164cc3c Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Tue, 3 Sep 2024 22:50:00 +0200 Subject: [PATCH 1407/1635] Add 100% coverage of Reolink update platform (#124521) * Add 100% update test coverage * Add assertion --- homeassistant/components/reolink/update.py | 6 +- tests/components/reolink/conftest.py | 1 + tests/components/reolink/test_update.py | 131 +++++++++++++++++++++ 3 files changed, 134 insertions(+), 4 deletions(-) create mode 100644 tests/components/reolink/test_update.py diff --git a/homeassistant/components/reolink/update.py b/homeassistant/components/reolink/update.py index 9b710c6576d..3c1e70612a7 100644 --- a/homeassistant/components/reolink/update.py +++ b/homeassistant/components/reolink/update.py @@ -137,8 +137,7 @@ class ReolinkUpdateEntity( async def async_release_notes(self) -> str | None: """Return the release notes.""" new_firmware = self._host.api.firmware_update_available(self._channel) - if not isinstance(new_firmware, NewSoftwareVersion): - return None + assert isinstance(new_firmware, NewSoftwareVersion) return ( "If the install button fails, download this" @@ -229,8 +228,7 @@ class ReolinkHostUpdateEntity( async def async_release_notes(self) -> str | None: """Return the release notes.""" new_firmware = self._host.api.firmware_update_available() - if not isinstance(new_firmware, NewSoftwareVersion): - return None + assert isinstance(new_firmware, NewSoftwareVersion) return ( "If the install button fails, download this" diff --git a/tests/components/reolink/conftest.py b/tests/components/reolink/conftest.py index be87aac9291..b0f599c28fb 100644 --- a/tests/components/reolink/conftest.py +++ b/tests/components/reolink/conftest.py @@ -35,6 +35,7 @@ TEST_PORT = 1234 TEST_NVR_NAME = "test_reolink_name" TEST_CAM_NAME = "test_reolink_cam" TEST_NVR_NAME2 = "test2_reolink_name" +TEST_CAM_NAME = "test_reolink_cam" TEST_USE_HTTPS = True TEST_HOST_MODEL = "RLN8-410" TEST_ITEM_NUMBER = "P000" diff --git a/tests/components/reolink/test_update.py b/tests/components/reolink/test_update.py new file mode 100644 index 00000000000..3ad10a11499 --- /dev/null +++ b/tests/components/reolink/test_update.py @@ -0,0 +1,131 @@ +"""Test the Reolink update platform.""" + +from unittest.mock import MagicMock, patch + +from freezegun.api import FrozenDateTimeFactory +import pytest +from reolink_aio.exceptions import ReolinkError +from reolink_aio.software_version import NewSoftwareVersion + +from homeassistant.components.reolink.update import POLL_AFTER_INSTALL +from homeassistant.components.update import DOMAIN as UPDATE_DOMAIN, SERVICE_INSTALL +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError + +from .conftest import TEST_CAM_NAME, TEST_NVR_NAME + +from tests.common import MockConfigEntry, async_fire_time_changed +from tests.typing import WebSocketGenerator + +TEST_DOWNLOAD_URL = "https://reolink.com/test" +TEST_RELEASE_NOTES = "bugfix 1, bugfix 2" + + +@pytest.mark.parametrize("entity_name", [TEST_NVR_NAME, TEST_CAM_NAME]) +async def test_no_update( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + entity_name: str, +) -> None: + """Test update state when no update available.""" + reolink_connect.camera_name.return_value = TEST_CAM_NAME + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.UPDATE]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.UPDATE}.{entity_name}_firmware" + assert hass.states.get(entity_id).state == STATE_OFF + + +@pytest.mark.parametrize("entity_name", [TEST_NVR_NAME, TEST_CAM_NAME]) +async def test_update_str( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + entity_name: str, +) -> None: + """Test update state when update available with string from API.""" + reolink_connect.camera_name.return_value = TEST_CAM_NAME + reolink_connect.firmware_update_available.return_value = "New firmware available" + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.UPDATE]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.UPDATE}.{entity_name}_firmware" + assert hass.states.get(entity_id).state == STATE_ON + + +@pytest.mark.parametrize("entity_name", [TEST_NVR_NAME, TEST_CAM_NAME]) +async def test_update_firm( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + hass_ws_client: WebSocketGenerator, + freezer: FrozenDateTimeFactory, + entity_name: str, +) -> None: + """Test update state when update available with firmware info from reolink.com.""" + reolink_connect.camera_name.return_value = TEST_CAM_NAME + new_firmware = NewSoftwareVersion( + version_string="v3.3.0.226_23031644", + download_url=TEST_DOWNLOAD_URL, + release_notes=TEST_RELEASE_NOTES, + ) + reolink_connect.firmware_update_available.return_value = new_firmware + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.UPDATE]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.UPDATE}.{entity_name}_firmware" + assert hass.states.get(entity_id).state == STATE_ON + + # release notes + client = await hass_ws_client(hass) + await hass.async_block_till_done() + + await client.send_json( + { + "id": 1, + "type": "update/release_notes", + "entity_id": entity_id, + } + ) + result = await client.receive_json() + assert TEST_DOWNLOAD_URL in result["result"] + assert TEST_RELEASE_NOTES in result["result"] + + # test install + await hass.services.async_call( + UPDATE_DOMAIN, + SERVICE_INSTALL, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + reolink_connect.update_firmware.assert_called() + + reolink_connect.update_firmware.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + UPDATE_DOMAIN, + SERVICE_INSTALL, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + # test _async_update_future + reolink_connect.camera_sw_version.return_value = "v3.3.0.226_23031644" + reolink_connect.firmware_update_available.return_value = False + freezer.tick(POLL_AFTER_INSTALL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state == STATE_OFF From d8382c6de26ed7e1f49043116a8e5f2c62967583 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Tue, 3 Sep 2024 22:56:27 +0200 Subject: [PATCH 1408/1635] Improve recorder tests to check indices are removed (#125164) --- .../components/recorder/test_migration_from_schema_32.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/components/recorder/test_migration_from_schema_32.py b/tests/components/recorder/test_migration_from_schema_32.py index bc16eae3410..f1613909722 100644 --- a/tests/components/recorder/test_migration_from_schema_32.py +++ b/tests/components/recorder/test_migration_from_schema_32.py @@ -35,6 +35,7 @@ from homeassistant.components.recorder.queries import ( from homeassistant.components.recorder.tasks import EntityIDPostMigrationTask from homeassistant.components.recorder.util import ( execute_stmt_lambda_element, + get_index_by_name, session_scope, ) from homeassistant.core import HomeAssistant @@ -333,6 +334,10 @@ async def test_migrate_events_context_ids( == migration.EventsContextIDMigration.migration_version ) + # Check the index which will be removed by the migrator no longer exists + with session_scope(hass=hass) as session: + assert get_index_by_name(session, "states", "ix_states_context_id") is None + @pytest.mark.parametrize("enable_migrate_context_ids", [True]) @pytest.mark.usefixtures("db_schema_32") @@ -531,6 +536,10 @@ async def test_migrate_states_context_ids( == migration.StatesContextIDMigration.migration_version ) + # Check the index which will be removed by the migrator no longer exists + with session_scope(hass=hass) as session: + assert get_index_by_name(session, "states", "ix_states_context_id") is None + @pytest.mark.parametrize("enable_migrate_event_type_ids", [True]) @pytest.mark.usefixtures("db_schema_32") From d5c2e6ec357c039172e116ebc4dcbbb3ed53ce7e Mon Sep 17 00:00:00 2001 From: Shay Levy Date: Wed, 4 Sep 2024 00:20:25 +0300 Subject: [PATCH 1409/1635] Add myself as codeowner for BTHome (#125184) --- CODEOWNERS | 4 ++-- homeassistant/components/bthome/manifest.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CODEOWNERS b/CODEOWNERS index f4c7d972f7c..596795d4221 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -228,8 +228,8 @@ build.json @home-assistant/supervisor /homeassistant/components/bsblan/ @liudger /tests/components/bsblan/ @liudger /homeassistant/components/bt_smarthub/ @typhoon2099 -/homeassistant/components/bthome/ @Ernst79 -/tests/components/bthome/ @Ernst79 +/homeassistant/components/bthome/ @Ernst79 @thecode +/tests/components/bthome/ @Ernst79 @thecode /homeassistant/components/buienradar/ @mjj4791 @ties @Robbie1221 /tests/components/buienradar/ @mjj4791 @ties @Robbie1221 /homeassistant/components/button/ @home-assistant/core diff --git a/homeassistant/components/bthome/manifest.json b/homeassistant/components/bthome/manifest.json index 42fbe794918..ad06f648d14 100644 --- a/homeassistant/components/bthome/manifest.json +++ b/homeassistant/components/bthome/manifest.json @@ -15,7 +15,7 @@ "service_data_uuid": "0000fcd2-0000-1000-8000-00805f9b34fb" } ], - "codeowners": ["@Ernst79"], + "codeowners": ["@Ernst79", "@thecode"], "config_flow": true, "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/bthome", From af1af6f391cf3efd46646bd42163303ac388b3d4 Mon Sep 17 00:00:00 2001 From: Dian Date: Wed, 4 Sep 2024 14:11:32 +0800 Subject: [PATCH 1410/1635] Bump xiaomi-ble to 0.31.1 to add support for human presence sensor XMOSB01XS (#124751) --- .../components/xiaomi_ble/binary_sensor.py | 4 + .../components/xiaomi_ble/manifest.json | 2 +- homeassistant/components/xiaomi_ble/sensor.py | 18 +++ requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/xiaomi_ble/test_sensor.py | 110 ++++++++++++++++++ 6 files changed, 135 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/xiaomi_ble/binary_sensor.py b/homeassistant/components/xiaomi_ble/binary_sensor.py index 5336c4d8f7f..b853f83b967 100644 --- a/homeassistant/components/xiaomi_ble/binary_sensor.py +++ b/homeassistant/components/xiaomi_ble/binary_sensor.py @@ -50,6 +50,10 @@ BINARY_SENSOR_DESCRIPTIONS = { key=XiaomiBinarySensorDeviceClass.MOTION, device_class=BinarySensorDeviceClass.MOTION, ), + XiaomiBinarySensorDeviceClass.OCCUPANCY: BinarySensorEntityDescription( + key=XiaomiBinarySensorDeviceClass.OCCUPANCY, + device_class=BinarySensorDeviceClass.OCCUPANCY, + ), XiaomiBinarySensorDeviceClass.OPENING: BinarySensorEntityDescription( key=XiaomiBinarySensorDeviceClass.OPENING, device_class=BinarySensorDeviceClass.OPENING, diff --git a/homeassistant/components/xiaomi_ble/manifest.json b/homeassistant/components/xiaomi_ble/manifest.json index 21e9bc45bb8..da7169635e9 100644 --- a/homeassistant/components/xiaomi_ble/manifest.json +++ b/homeassistant/components/xiaomi_ble/manifest.json @@ -24,5 +24,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/xiaomi_ble", "iot_class": "local_push", - "requirements": ["xiaomi-ble==0.30.2"] + "requirements": ["xiaomi-ble==0.31.1"] } diff --git a/homeassistant/components/xiaomi_ble/sensor.py b/homeassistant/components/xiaomi_ble/sensor.py index 3108c285dbe..891caaf3e68 100644 --- a/homeassistant/components/xiaomi_ble/sensor.py +++ b/homeassistant/components/xiaomi_ble/sensor.py @@ -155,6 +155,24 @@ SENSOR_DESCRIPTIONS = { (ExtendedSensorDeviceClass.LOCK_METHOD, None): SensorEntityDescription( key=str(ExtendedSensorDeviceClass.LOCK_METHOD), icon="mdi:key-variant" ), + # Duration of detected status (in minutes) for Occpancy Sensor + ( + ExtendedSensorDeviceClass.DURATION_DETECTED, + Units.TIME_MINUTES, + ): SensorEntityDescription( + key=str(ExtendedSensorDeviceClass.DURATION_DETECTED), + native_unit_of_measurement=UnitOfTime.MINUTES, + state_class=SensorStateClass.MEASUREMENT, + ), + # Duration of cleared status (in minutes) for Occpancy Sensor + ( + ExtendedSensorDeviceClass.DURATION_CLEARED, + Units.TIME_MINUTES, + ): SensorEntityDescription( + key=str(ExtendedSensorDeviceClass.DURATION_CLEARED), + native_unit_of_measurement=UnitOfTime.MINUTES, + state_class=SensorStateClass.MEASUREMENT, + ), } diff --git a/requirements_all.txt b/requirements_all.txt index 41b65c55f1a..5ca05016bf4 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2963,7 +2963,7 @@ wyoming==1.5.4 xbox-webapi==2.0.11 # homeassistant.components.xiaomi_ble -xiaomi-ble==0.30.2 +xiaomi-ble==0.31.1 # homeassistant.components.knx xknx==3.1.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 7ddac14c2d8..ae262c5d05a 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2349,7 +2349,7 @@ wyoming==1.5.4 xbox-webapi==2.0.11 # homeassistant.components.xiaomi_ble -xiaomi-ble==0.30.2 +xiaomi-ble==0.31.1 # homeassistant.components.knx xknx==3.1.1 diff --git a/tests/components/xiaomi_ble/test_sensor.py b/tests/components/xiaomi_ble/test_sensor.py index 4d9a29e3111..11a20a62d02 100644 --- a/tests/components/xiaomi_ble/test_sensor.py +++ b/tests/components/xiaomi_ble/test_sensor.py @@ -11,6 +11,7 @@ from homeassistant.components.xiaomi_ble.const import CONF_SLEEPY_DEVICE, DOMAIN from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_UNIT_OF_MEASUREMENT, + STATE_ON, STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant @@ -465,6 +466,115 @@ async def test_xiaomi_hhccjcy01_only_some_sources_connectable( await hass.async_block_till_done() +async def test_xiaomi_xmosb01xs(hass: HomeAssistant) -> None: + """Test XMOSB01XS multiple advertisements. + + This device has multiple advertisements before all sensors are visible. + """ + entry = MockConfigEntry( + domain=DOMAIN, + unique_id="DC:8E:95:23:07:B7", + data={"bindkey": "272b1c920ef435417c49228b8ab9a563"}, + ) + entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == 0 + inject_bluetooth_service_info_bleak( + hass, + make_advertisement( + "DC:8E:95:23:07:B7", + ( + b"\x58\x59\x83\x46\x91\xb7\x07\x23\x95\x8e\xdc\xc7\x17\x61\xc1" + b"\x24\x03\x00\x25\x44\xb0\x65" + ), + connectable=False, + ), + ) + inject_bluetooth_service_info_bleak( + hass, + make_advertisement( + "DC:8E:95:23:07:B7", + b"\x10\x59\x83\x46\x90\xb7\x07\x23\x95\x8e\xdc", + connectable=False, + ), + ) + inject_bluetooth_service_info_bleak( + hass, + make_advertisement( + "DC:8E:95:23:07:B7", + b"\x48\x59\x83\x46\x9d\x34\x45\xec\xab\xda\x93\xf9\x24\x03\x00\x9e\x01\x6d\x3d", + connectable=False, + ), + ) + inject_bluetooth_service_info_bleak( + hass, + make_advertisement( + "DC:8E:95:23:07:B7", + ( + b"\x58\x59\x83\x46\xa9\xb7\x07\x23\x95\x8e\xdc\xc6\x59\xa2\xdc\xc5" + b"\x24\x03\x00\xa0\x4d\x0d\x45" + ), + connectable=False, + ), + ) + inject_bluetooth_service_info_bleak( + hass, + make_advertisement( + "DC:8E:95:23:07:B7", + ( + b"\x58\x59\x83\x46\xa4\xb7\x07\x23\x95\x8e\xdc\x77\x2a\xe2\x5c\x11" + b"\x24\x03\x00\xab\x87\x7b\xd7" + ), + connectable=False, + ), + ) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 4 + + occupancy_sensor = hass.states.get("binary_sensor.occupancy_sensor_07b7_occupancy") + occupancy_sensor_attribtes = occupancy_sensor.attributes + assert occupancy_sensor.state == STATE_ON + assert ( + occupancy_sensor_attribtes[ATTR_FRIENDLY_NAME] + == "Occupancy Sensor 07B7 Occupancy" + ) + + illum_sensor = hass.states.get("sensor.occupancy_sensor_07b7_illuminance") + illum_sensor_attr = illum_sensor.attributes + assert illum_sensor.state == "111.0" + assert illum_sensor_attr[ATTR_FRIENDLY_NAME] == "Occupancy Sensor 07B7 Illuminance" + assert illum_sensor_attr[ATTR_UNIT_OF_MEASUREMENT] == "lx" + assert illum_sensor_attr[ATTR_STATE_CLASS] == "measurement" + + illum_sensor = hass.states.get("sensor.occupancy_sensor_07b7_duration_detected") + illum_sensor_attr = illum_sensor.attributes + assert illum_sensor.state == "2" + assert ( + illum_sensor_attr[ATTR_FRIENDLY_NAME] + == "Occupancy Sensor 07B7 Duration detected" + ) + assert illum_sensor_attr[ATTR_UNIT_OF_MEASUREMENT] == "min" + assert illum_sensor_attr[ATTR_STATE_CLASS] == "measurement" + + illum_sensor = hass.states.get("sensor.occupancy_sensor_07b7_duration_cleared") + illum_sensor_attr = illum_sensor.attributes + assert illum_sensor.state == "2" + assert ( + illum_sensor_attr[ATTR_FRIENDLY_NAME] + == "Occupancy Sensor 07B7 Duration cleared" + ) + assert illum_sensor_attr[ATTR_UNIT_OF_MEASUREMENT] == "min" + assert illum_sensor_attr[ATTR_STATE_CLASS] == "measurement" + + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + assert entry.data[CONF_SLEEPY_DEVICE] is True + + async def test_xiaomi_cgdk2_bind_key(hass: HomeAssistant) -> None: """Test CGDK2 bind key. From 7788685340fc8dc5256981bb2f7e890f43fc6bfd Mon Sep 17 00:00:00 2001 From: Raman Gupta <7243222+raman325@users.noreply.github.com> Date: Wed, 4 Sep 2024 02:16:56 -0400 Subject: [PATCH 1411/1635] Get zwave_js statistics data from model (#120281) * Get zwave_js statistics data from model * Add migration logic * Update comment * revert change to forward entry --- homeassistant/components/zwave_js/__init__.py | 2 +- homeassistant/components/zwave_js/migrate.py | 83 +++++++----- homeassistant/components/zwave_js/sensor.py | 126 +++++++++++------- tests/components/zwave_js/test_sensor.py | 54 ++++++++ 4 files changed, 185 insertions(+), 80 deletions(-) diff --git a/homeassistant/components/zwave_js/__init__.py b/homeassistant/components/zwave_js/__init__.py index dedae10400f..4844f707201 100644 --- a/homeassistant/components/zwave_js/__init__.py +++ b/homeassistant/components/zwave_js/__init__.py @@ -353,7 +353,7 @@ class ControllerEvents: self.discovered_value_ids: dict[str, set[str]] = defaultdict(set) self.driver_events = driver_events self.dev_reg = driver_events.dev_reg - self.registered_unique_ids: dict[str, dict[str, set[str]]] = defaultdict( + self.registered_unique_ids: dict[str, dict[Platform, set[str]]] = defaultdict( lambda: defaultdict(set) ) self.node_events = NodeEvents(hass, self) diff --git a/homeassistant/components/zwave_js/migrate.py b/homeassistant/components/zwave_js/migrate.py index bde53137dc1..ac749cb516b 100644 --- a/homeassistant/components/zwave_js/migrate.py +++ b/homeassistant/components/zwave_js/migrate.py @@ -6,20 +6,16 @@ from dataclasses import dataclass import logging from zwave_js_server.model.driver import Driver +from zwave_js_server.model.node import Node from zwave_js_server.model.value import Value as ZwaveValue -from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.const import STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.device_registry import DeviceEntry -from homeassistant.helpers.entity_registry import ( - EntityRegistry, - RegistryEntry, - async_entries_for_device, -) +from homeassistant.helpers import device_registry as dr, entity_registry as er from .const import DOMAIN from .discovery import ZwaveDiscoveryInfo -from .helpers import get_unique_id +from .helpers import get_unique_id, get_valueless_base_unique_id _LOGGER = logging.getLogger(__name__) @@ -62,10 +58,10 @@ class ValueID: @callback def async_migrate_old_entity( hass: HomeAssistant, - ent_reg: EntityRegistry, + ent_reg: er.EntityRegistry, registered_unique_ids: set[str], - platform: str, - device: DeviceEntry, + platform: Platform, + device: dr.DeviceEntry, unique_id: str, ) -> None: """Migrate existing entity if current one can't be found and an old one exists.""" @@ -77,8 +73,8 @@ def async_migrate_old_entity( # Look for existing entities in the registry that could be the same value but on # a different endpoint - existing_entity_entries: list[RegistryEntry] = [] - for entry in async_entries_for_device(ent_reg, device.id): + existing_entity_entries: list[er.RegistryEntry] = [] + for entry in er.async_entries_for_device(ent_reg, device.id): # If entity is not in the domain for this discovery info or entity has already # been processed, skip it if entry.domain != platform or entry.unique_id in registered_unique_ids: @@ -109,35 +105,40 @@ def async_migrate_old_entity( @callback def async_migrate_unique_id( - ent_reg: EntityRegistry, platform: str, old_unique_id: str, new_unique_id: str + ent_reg: er.EntityRegistry, + platform: Platform, + old_unique_id: str, + new_unique_id: str, ) -> None: """Check if entity with old unique ID exists, and if so migrate it to new ID.""" - if entity_id := ent_reg.async_get_entity_id(platform, DOMAIN, old_unique_id): + if not (entity_id := ent_reg.async_get_entity_id(platform, DOMAIN, old_unique_id)): + return + + _LOGGER.debug( + "Migrating entity %s from old unique ID '%s' to new unique ID '%s'", + entity_id, + old_unique_id, + new_unique_id, + ) + try: + ent_reg.async_update_entity(entity_id, new_unique_id=new_unique_id) + except ValueError: _LOGGER.debug( - "Migrating entity %s from old unique ID '%s' to new unique ID '%s'", + ( + "Entity %s can't be migrated because the unique ID is taken; " + "Cleaning it up since it is likely no longer valid" + ), entity_id, - old_unique_id, - new_unique_id, ) - try: - ent_reg.async_update_entity(entity_id, new_unique_id=new_unique_id) - except ValueError: - _LOGGER.debug( - ( - "Entity %s can't be migrated because the unique ID is taken; " - "Cleaning it up since it is likely no longer valid" - ), - entity_id, - ) - ent_reg.async_remove(entity_id) + ent_reg.async_remove(entity_id) @callback def async_migrate_discovered_value( hass: HomeAssistant, - ent_reg: EntityRegistry, + ent_reg: er.EntityRegistry, registered_unique_ids: set[str], - device: DeviceEntry, + device: dr.DeviceEntry, driver: Driver, disc_info: ZwaveDiscoveryInfo, ) -> None: @@ -160,7 +161,7 @@ def async_migrate_discovered_value( ] if ( - disc_info.platform == "binary_sensor" + disc_info.platform == Platform.BINARY_SENSOR and disc_info.platform_hint == "notification" ): for state_key in disc_info.primary_value.metadata.states: @@ -211,6 +212,24 @@ def async_migrate_discovered_value( registered_unique_ids.add(new_unique_id) +@callback +def async_migrate_statistics_sensors( + hass: HomeAssistant, driver: Driver, node: Node, key_map: dict[str, str] +) -> None: + """Migrate statistics sensors to new unique IDs. + + - Migrate camel case keys in unique IDs to snake keys. + """ + ent_reg = er.async_get(hass) + base_unique_id = f"{get_valueless_base_unique_id(driver, node)}.statistics" + for new_key, old_key in key_map.items(): + if new_key == old_key: + continue + old_unique_id = f"{base_unique_id}_{old_key}" + new_unique_id = f"{base_unique_id}_{new_key}" + async_migrate_unique_id(ent_reg, Platform.SENSOR, old_unique_id, new_unique_id) + + @callback def get_old_value_ids(value: ZwaveValue) -> list[str]: """Get old value IDs so we can migrate entity unique ID.""" diff --git a/homeassistant/components/zwave_js/sensor.py b/homeassistant/components/zwave_js/sensor.py index 428bf504510..f52801109a1 100644 --- a/homeassistant/components/zwave_js/sensor.py +++ b/homeassistant/components/zwave_js/sensor.py @@ -4,7 +4,6 @@ from __future__ import annotations from collections.abc import Callable, Mapping from dataclasses import dataclass -from datetime import datetime from typing import Any import voluptuous as vol @@ -16,10 +15,10 @@ from zwave_js_server.const.command_class.meter import ( ) from zwave_js_server.exceptions import BaseZwaveJSServerError from zwave_js_server.model.controller import Controller -from zwave_js_server.model.controller.statistics import ControllerStatisticsDataType +from zwave_js_server.model.controller.statistics import ControllerStatistics from zwave_js_server.model.driver import Driver from zwave_js_server.model.node import Node as ZwaveNode -from zwave_js_server.model.node.statistics import NodeStatisticsDataType +from zwave_js_server.model.node.statistics import NodeStatistics from zwave_js_server.util.command_class.meter import get_meter_type from homeassistant.components.sensor import ( @@ -90,6 +89,7 @@ from .discovery_data_template import ( ) from .entity import ZWaveBaseEntity from .helpers import get_device_info, get_valueless_base_unique_id +from .migrate import async_migrate_statistics_sensors PARALLEL_UPDATES = 0 @@ -328,152 +328,172 @@ ENTITY_DESCRIPTION_KEY_MAP = { } -def convert_dict_of_dicts( - statistics: ControllerStatisticsDataType | NodeStatisticsDataType, key: str +def convert_nested_attr( + statistics: ControllerStatistics | NodeStatistics, key: str ) -> Any: - """Convert a dictionary of dictionaries to a value.""" - keys = key.split(".") - return statistics.get(keys[0], {}).get(keys[1], {}).get(keys[2]) # type: ignore[attr-defined] + """Convert a string that represents a nested attr to a value.""" + data = statistics + for _key in key.split("."): + if data is None: + return None # type: ignore[unreachable] + data = getattr(data, _key) + return data @dataclass(frozen=True, kw_only=True) class ZWaveJSStatisticsSensorEntityDescription(SensorEntityDescription): """Class to represent a Z-Wave JS statistics sensor entity description.""" - convert: Callable[ - [ControllerStatisticsDataType | NodeStatisticsDataType, str], Any - ] = lambda statistics, key: statistics.get(key) + convert: Callable[[ControllerStatistics | NodeStatistics, str], Any] = getattr entity_registry_enabled_default: bool = False # Controller statistics descriptions ENTITY_DESCRIPTION_CONTROLLER_STATISTICS_LIST = [ ZWaveJSStatisticsSensorEntityDescription( - key="messagesTX", + key="messages_tx", translation_key="successful_messages", translation_placeholders={"direction": "TX"}, state_class=SensorStateClass.TOTAL, ), ZWaveJSStatisticsSensorEntityDescription( - key="messagesRX", + key="messages_rx", translation_key="successful_messages", translation_placeholders={"direction": "RX"}, state_class=SensorStateClass.TOTAL, ), ZWaveJSStatisticsSensorEntityDescription( - key="messagesDroppedTX", + key="messages_dropped_tx", translation_key="messages_dropped", translation_placeholders={"direction": "TX"}, state_class=SensorStateClass.TOTAL, ), ZWaveJSStatisticsSensorEntityDescription( - key="messagesDroppedRX", + key="messages_dropped_rx", translation_key="messages_dropped", translation_placeholders={"direction": "RX"}, state_class=SensorStateClass.TOTAL, ), ZWaveJSStatisticsSensorEntityDescription( - key="NAK", translation_key="nak", state_class=SensorStateClass.TOTAL + key="nak", translation_key="nak", state_class=SensorStateClass.TOTAL ), ZWaveJSStatisticsSensorEntityDescription( - key="CAN", translation_key="can", state_class=SensorStateClass.TOTAL + key="can", translation_key="can", state_class=SensorStateClass.TOTAL ), ZWaveJSStatisticsSensorEntityDescription( - key="timeoutACK", + key="timeout_ack", translation_key="timeout_ack", state_class=SensorStateClass.TOTAL, ), ZWaveJSStatisticsSensorEntityDescription( - key="timeoutResponse", + key="timeout_response", translation_key="timeout_response", state_class=SensorStateClass.TOTAL, ), ZWaveJSStatisticsSensorEntityDescription( - key="timeoutCallback", + key="timeout_callback", translation_key="timeout_callback", state_class=SensorStateClass.TOTAL, ), ZWaveJSStatisticsSensorEntityDescription( - key="backgroundRSSI.channel0.average", + key="background_rssi.channel_0.average", translation_key="average_background_rssi", translation_placeholders={"channel": "0"}, native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT, device_class=SensorDeviceClass.SIGNAL_STRENGTH, - convert=convert_dict_of_dicts, + convert=convert_nested_attr, ), ZWaveJSStatisticsSensorEntityDescription( - key="backgroundRSSI.channel0.current", + key="background_rssi.channel_0.current", translation_key="current_background_rssi", translation_placeholders={"channel": "0"}, native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT, device_class=SensorDeviceClass.SIGNAL_STRENGTH, state_class=SensorStateClass.MEASUREMENT, - convert=convert_dict_of_dicts, + convert=convert_nested_attr, ), ZWaveJSStatisticsSensorEntityDescription( - key="backgroundRSSI.channel1.average", + key="background_rssi.channel_1.average", translation_key="average_background_rssi", translation_placeholders={"channel": "1"}, native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT, device_class=SensorDeviceClass.SIGNAL_STRENGTH, - convert=convert_dict_of_dicts, + convert=convert_nested_attr, ), ZWaveJSStatisticsSensorEntityDescription( - key="backgroundRSSI.channel1.current", + key="background_rssi.channel_1.current", translation_key="current_background_rssi", translation_placeholders={"channel": "1"}, native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT, device_class=SensorDeviceClass.SIGNAL_STRENGTH, state_class=SensorStateClass.MEASUREMENT, - convert=convert_dict_of_dicts, + convert=convert_nested_attr, ), ZWaveJSStatisticsSensorEntityDescription( - key="backgroundRSSI.channel2.average", + key="background_rssi.channel_2.average", translation_key="average_background_rssi", translation_placeholders={"channel": "2"}, native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT, device_class=SensorDeviceClass.SIGNAL_STRENGTH, - convert=convert_dict_of_dicts, + convert=convert_nested_attr, ), ZWaveJSStatisticsSensorEntityDescription( - key="backgroundRSSI.channel2.current", + key="background_rssi.channel_2.current", translation_key="current_background_rssi", translation_placeholders={"channel": "2"}, native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT, device_class=SensorDeviceClass.SIGNAL_STRENGTH, state_class=SensorStateClass.MEASUREMENT, - convert=convert_dict_of_dicts, + convert=convert_nested_attr, ), ] +CONTROLLER_STATISTICS_KEY_MAP: dict[str, str] = { + "messages_tx": "messagesTX", + "messages_rx": "messagesRX", + "messages_dropped_tx": "messagesDroppedTX", + "messages_dropped_rx": "messagesDroppedRX", + "nak": "NAK", + "can": "CAN", + "timeout_ack": "timeoutAck", + "timeout_response": "timeoutResponse", + "timeout_callback": "timeoutCallback", + "background_rssi.channel_0.average": "backgroundRSSI.channel0.average", + "background_rssi.channel_0.current": "backgroundRSSI.channel0.current", + "background_rssi.channel_1.average": "backgroundRSSI.channel1.average", + "background_rssi.channel_1.current": "backgroundRSSI.channel1.current", + "background_rssi.channel_2.average": "backgroundRSSI.channel2.average", + "background_rssi.channel_2.current": "backgroundRSSI.channel2.current", +} + # Node statistics descriptions ENTITY_DESCRIPTION_NODE_STATISTICS_LIST = [ ZWaveJSStatisticsSensorEntityDescription( - key="commandsRX", + key="commands_rx", translation_key="successful_commands", translation_placeholders={"direction": "RX"}, state_class=SensorStateClass.TOTAL, ), ZWaveJSStatisticsSensorEntityDescription( - key="commandsTX", + key="commands_tx", translation_key="successful_commands", translation_placeholders={"direction": "TX"}, state_class=SensorStateClass.TOTAL, ), ZWaveJSStatisticsSensorEntityDescription( - key="commandsDroppedRX", + key="commands_dropped_rx", translation_key="commands_dropped", translation_placeholders={"direction": "RX"}, state_class=SensorStateClass.TOTAL, ), ZWaveJSStatisticsSensorEntityDescription( - key="commandsDroppedTX", + key="commands_dropped_tx", translation_key="commands_dropped", translation_placeholders={"direction": "TX"}, state_class=SensorStateClass.TOTAL, ), ZWaveJSStatisticsSensorEntityDescription( - key="timeoutResponse", + key="timeout_response", translation_key="timeout_response", state_class=SensorStateClass.TOTAL, ), @@ -492,20 +512,24 @@ ENTITY_DESCRIPTION_NODE_STATISTICS_LIST = [ state_class=SensorStateClass.MEASUREMENT, ), ZWaveJSStatisticsSensorEntityDescription( - key="lastSeen", + key="last_seen", translation_key="last_seen", device_class=SensorDeviceClass.TIMESTAMP, - convert=( - lambda statistics, key: ( - datetime.fromisoformat(dt) # type: ignore[arg-type] - if (dt := statistics.get(key)) - else None - ) - ), entity_registry_enabled_default=True, ), ] +NODE_STATISTICS_KEY_MAP: dict[str, str] = { + "commands_rx": "commandsRX", + "commands_tx": "commandsTX", + "commands_dropped_rx": "commandsDroppedRX", + "commands_dropped_tx": "commandsDroppedTX", + "timeout_response": "timeoutResponse", + "rtt": "rtt", + "rssi": "rssi", + "last_seen": "lastSeen", +} + def get_entity_description( data: NumericSensorDataTemplateData, @@ -588,6 +612,14 @@ async def async_setup_entry( @callback def async_add_statistics_sensors(node: ZwaveNode) -> None: """Add statistics sensors.""" + async_migrate_statistics_sensors( + hass, + driver, + node, + CONTROLLER_STATISTICS_KEY_MAP + if driver.controller.own_node == node + else NODE_STATISTICS_KEY_MAP, + ) async_add_entities( [ ZWaveStatisticsSensor( @@ -1001,7 +1033,7 @@ class ZWaveStatisticsSensor(SensorEntity): def statistics_updated(self, event_data: dict) -> None: """Call when statistics updated event is received.""" self._attr_native_value = self.entity_description.convert( - event_data["statistics"], self.entity_description.key + event_data["statistics_updated"], self.entity_description.key ) self.async_write_ha_state() @@ -1027,5 +1059,5 @@ class ZWaveStatisticsSensor(SensorEntity): # Set initial state self._attr_native_value = self.entity_description.convert( - self.statistics_src.statistics.data, self.entity_description.key + self.statistics_src.statistics, self.entity_description.key ) diff --git a/tests/components/zwave_js/test_sensor.py b/tests/components/zwave_js/test_sensor.py index 19f8aeece36..34c50b8d449 100644 --- a/tests/components/zwave_js/test_sensor.py +++ b/tests/components/zwave_js/test_sensor.py @@ -23,6 +23,10 @@ from homeassistant.components.zwave_js.const import ( SERVICE_RESET_METER, ) from homeassistant.components.zwave_js.helpers import get_valueless_base_unique_id +from homeassistant.components.zwave_js.sensor import ( + CONTROLLER_STATISTICS_KEY_MAP, + NODE_STATISTICS_KEY_MAP, +) from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, @@ -55,6 +59,8 @@ from .common import ( VOLTAGE_SENSOR, ) +from tests.common import MockConfigEntry + async def test_numeric_sensor( hass: HomeAssistant, @@ -756,6 +762,54 @@ NODE_STATISTICS_SUFFIXES_UNKNOWN = { } +async def test_statistics_sensors_migration( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + zp3111_state, + client, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test statistics migration sensor.""" + node = Node(client, copy.deepcopy(zp3111_state)) + client.driver.controller.nodes[node.node_id] = node + + entry = MockConfigEntry(domain="zwave_js", data={"url": "ws://test.org"}) + entry.add_to_hass(hass) + + controller_base_unique_id = f"{client.driver.controller.home_id}.1.statistics" + node_base_unique_id = f"{client.driver.controller.home_id}.22.statistics" + + # Create entity registry records for the old statistics keys + for base_unique_id, key_map in ( + (controller_base_unique_id, CONTROLLER_STATISTICS_KEY_MAP), + (node_base_unique_id, NODE_STATISTICS_KEY_MAP), + ): + # old key + for key in key_map.values(): + entity_registry.async_get_or_create( + "sensor", DOMAIN, f"{base_unique_id}_{key}" + ) + + # Set up integration + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + # Validate that entity unique ID's have changed + for base_unique_id, key_map in ( + (controller_base_unique_id, CONTROLLER_STATISTICS_KEY_MAP), + (node_base_unique_id, NODE_STATISTICS_KEY_MAP), + ): + for new_key, old_key in key_map.items(): + # If the key has changed, the old entity should not exist + if new_key != old_key: + assert not entity_registry.async_get_entity_id( + "sensor", DOMAIN, f"{base_unique_id}_{old_key}" + ) + assert entity_registry.async_get_entity_id( + "sensor", DOMAIN, f"{base_unique_id}_{new_key}" + ) + + async def test_statistics_sensors_no_last_seen( hass: HomeAssistant, entity_registry: er.EntityRegistry, From 482bed522fed193a218a3ed7362c50940b244245 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 4 Sep 2024 08:34:51 +0200 Subject: [PATCH 1412/1635] Fix missing patch in nextdns tests (#125195) --- tests/components/nextdns/test_config_flow.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/components/nextdns/test_config_flow.py b/tests/components/nextdns/test_config_flow.py index 2a51c6821fc..27a6cf1e7e0 100644 --- a/tests/components/nextdns/test_config_flow.py +++ b/tests/components/nextdns/test_config_flow.py @@ -12,7 +12,7 @@ from homeassistant.const import CONF_API_KEY, CONF_PROFILE_NAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from . import PROFILES, init_integration +from . import PROFILES, init_integration, mock_nextdns async def test_form_create_entry(hass: HomeAssistant) -> None: @@ -116,6 +116,7 @@ async def test_reauth_successful(hass: HomeAssistant) -> None: "homeassistant.components.nextdns.NextDns.get_profiles", return_value=PROFILES, ), + mock_nextdns(), ): result = await hass.config_entries.flow.async_configure( result["flow_id"], From 7fc0e36b2f5ceedf5caeb4bd84cb806281b90dbb Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 4 Sep 2024 08:38:46 +0200 Subject: [PATCH 1413/1635] Move recorder EntityIDPostMigrationTask to migration (#125136) * Move recorder EntityIDPostMigrationTask to migration * Update test --- homeassistant/components/recorder/core.py | 4 ---- homeassistant/components/recorder/migration.py | 13 ++++++++++++- homeassistant/components/recorder/tasks.py | 13 ------------- .../recorder/test_migration_from_schema_32.py | 3 +-- tests/components/recorder/test_v32_migration.py | 10 +++++----- 5 files changed, 18 insertions(+), 25 deletions(-) diff --git a/homeassistant/components/recorder/core.py b/homeassistant/components/recorder/core.py index c0ac1fc1277..002d8937e3a 100644 --- a/homeassistant/components/recorder/core.py +++ b/homeassistant/components/recorder/core.py @@ -1283,10 +1283,6 @@ class Recorder(threading.Thread): self.event_session = self.get_session() self.event_session.expire_on_commit = False - def _post_migrate_entity_ids(self) -> bool: - """Post migrate entity_ids if needed.""" - return migration.post_migrate_entity_ids(self) - def _send_keep_alive(self) -> None: """Send a keep alive to keep the db connection open.""" assert self.event_session is not None diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index 890fc3045b2..d2d8fff136e 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -100,7 +100,7 @@ from .queries import ( migrate_single_statistics_row_to_timestamp, ) from .statistics import cleanup_statistics_timestamp_migration, get_start_time -from .tasks import EntityIDPostMigrationTask, RecorderTask +from .tasks import RecorderTask from .util import ( database_job_retry_wrapper, execute_stmt_lambda_element, @@ -2667,6 +2667,17 @@ class EventIDPostMigration(BaseRunTimeMigration): return NeedsMigrateResult(needs_migrate=False, migration_done=True) +@dataclass(slots=True) +class EntityIDPostMigrationTask(RecorderTask): + """An object to insert into the recorder queue to cleanup after entity_ids migration.""" + + def run(self, instance: Recorder) -> None: + """Run entity_id post migration task.""" + if not post_migrate_entity_ids(instance): + # Schedule a new migration task if this one didn't finish + instance.queue_task(EntityIDPostMigrationTask()) + + def _mark_migration_done( session: Session, migration: type[BaseRunTimeMigration] ) -> None: diff --git a/homeassistant/components/recorder/tasks.py b/homeassistant/components/recorder/tasks.py index c51ba2b16ca..2529e8012bf 100644 --- a/homeassistant/components/recorder/tasks.py +++ b/homeassistant/components/recorder/tasks.py @@ -333,19 +333,6 @@ class AdjustLRUSizeTask(RecorderTask): instance._adjust_lru_size() # noqa: SLF001 -@dataclass(slots=True) -class EntityIDPostMigrationTask(RecorderTask): - """An object to insert into the recorder queue to cleanup after entity_ids migration.""" - - def run(self, instance: Recorder) -> None: - """Run entity_id post migration task.""" - if ( - not instance._post_migrate_entity_ids() # noqa: SLF001 - ): - # Schedule a new migration task if this one didn't finish - instance.queue_task(EntityIDPostMigrationTask()) - - @dataclass(slots=True) class RefreshEventTypesTask(RecorderTask): """An object to insert into the recorder queue to refresh event types.""" diff --git a/tests/components/recorder/test_migration_from_schema_32.py b/tests/components/recorder/test_migration_from_schema_32.py index f1613909722..40d18ab51fd 100644 --- a/tests/components/recorder/test_migration_from_schema_32.py +++ b/tests/components/recorder/test_migration_from_schema_32.py @@ -32,7 +32,6 @@ from homeassistant.components.recorder.queries import ( get_migration_changes, select_event_type_ids, ) -from homeassistant.components.recorder.tasks import EntityIDPostMigrationTask from homeassistant.components.recorder.util import ( execute_stmt_lambda_element, get_index_by_name, @@ -746,7 +745,7 @@ async def test_post_migrate_entity_ids( await _async_wait_migration_done(hass) # This is a threadsafe way to add a task to the recorder - recorder_mock.queue_task(EntityIDPostMigrationTask()) + recorder_mock.queue_task(migration.EntityIDPostMigrationTask()) await _async_wait_migration_done(hass) def _fetch_migrated_states(): diff --git a/tests/components/recorder/test_v32_migration.py b/tests/components/recorder/test_v32_migration.py index 1932860d845..58bcabdff51 100644 --- a/tests/components/recorder/test_v32_migration.py +++ b/tests/components/recorder/test_v32_migration.py @@ -109,6 +109,7 @@ async def test_migrate_times( with ( patch.object(recorder, "db_schema", old_db_schema), patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), + patch.object(migration, "post_migrate_entity_ids", return_value=False), patch.object(migration.EventsContextIDMigration, "migrate_data"), patch.object(migration.StatesContextIDMigration, "migrate_data"), patch.object(migration.EventTypeIDMigration, "migrate_data"), @@ -120,7 +121,6 @@ async def test_migrate_times( patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_30)), - patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), ): async with ( async_test_home_assistant() as hass, @@ -264,13 +264,13 @@ async def test_migrate_can_resume_entity_id_post_migration( patch.object(recorder, "db_schema", old_db_schema), patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), patch.object(migration.EventIDPostMigration, "migrate_data"), + patch.object(migration, "post_migrate_entity_ids", return_value=False), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(core, "EventTypes", old_db_schema.EventTypes), patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_32)), - patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), ): async with ( async_test_home_assistant() as hass, @@ -382,13 +382,13 @@ async def test_migrate_can_resume_ix_states_event_id_removed( patch.object(recorder, "db_schema", old_db_schema), patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), patch.object(migration.EventIDPostMigration, "migrate_data"), + patch.object(migration, "post_migrate_entity_ids", return_value=False), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(core, "EventTypes", old_db_schema.EventTypes), patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_32)), - patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), ): async with ( async_test_home_assistant() as hass, @@ -514,13 +514,13 @@ async def test_out_of_disk_space_while_rebuild_states_table( patch.object(recorder, "db_schema", old_db_schema), patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), patch.object(migration.EventIDPostMigration, "migrate_data"), + patch.object(migration, "post_migrate_entity_ids", return_value=False), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(core, "EventTypes", old_db_schema.EventTypes), patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_32)), - patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), ): async with ( async_test_home_assistant() as hass, @@ -690,13 +690,13 @@ async def test_out_of_disk_space_while_removing_foreign_key( patch.object(recorder, "db_schema", old_db_schema), patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION), patch.object(migration.EventIDPostMigration, "migrate_data"), + patch.object(migration, "post_migrate_entity_ids", return_value=False), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(core, "EventTypes", old_db_schema.EventTypes), patch.object(core, "EventData", old_db_schema.EventData), patch.object(core, "States", old_db_schema.States), patch.object(core, "Events", old_db_schema.Events), patch(CREATE_ENGINE_TARGET, new=_create_engine_test(SCHEMA_MODULE_32)), - patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"), ): async with ( async_test_home_assistant() as hass, From 8fd691be6902ef97e0681a7c361c321f1f52fced Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Wed, 4 Sep 2024 09:52:41 +0200 Subject: [PATCH 1414/1635] Teach recorder data migrator base class to remove index (#125168) * Teach recorder data migrator base class to remove index * Fix tests --- .../components/recorder/migration.py | 49 +++++++++++++------ .../components/recorder/test_v32_migration.py | 3 ++ 2 files changed, 37 insertions(+), 15 deletions(-) diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index d2d8fff136e..242e503611c 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -2191,8 +2191,6 @@ class MigrationTask(RecorderTask): if not self.migrator.migrate_data(instance): # Schedule a new migration task if this one didn't finish instance.queue_task(MigrationTask(self.migrator)) - else: - self.migrator.migration_done(instance, None) @dataclass(slots=True) @@ -2213,6 +2211,7 @@ class NeedsMigrateResult: class BaseRunTimeMigration(ABC): """Base class for run time migrations.""" + index_to_drop: tuple[str, str] | None = None required_schema_version = 0 migration_version = 1 migration_id: str @@ -2230,11 +2229,29 @@ class BaseRunTimeMigration(ABC): else: self.migration_done(instance, session) + def migrate_data(self, instance: Recorder) -> bool: + """Migrate some data, returns True if migration is completed.""" + if result := self.migrate_data_impl(instance): + if self.index_to_drop is not None: + self._remove_index(instance, self.index_to_drop) + self.migration_done(instance, None) + return result + @staticmethod @abstractmethod - def migrate_data(instance: Recorder) -> bool: + def migrate_data_impl(instance: Recorder) -> bool: """Migrate some data, returns True if migration is completed.""" + @staticmethod + @database_job_retry_wrapper("remove index") + def _remove_index(instance: Recorder, index_to_drop: tuple[str, str]) -> None: + """Remove indices. + + Called when migration is completed. + """ + table, index = index_to_drop + _drop_index(instance.get_session, table, index) + def migration_done(self, instance: Recorder, session: Session | None) -> None: """Will be called after migrate returns True or if migration is not needed.""" @@ -2260,8 +2277,14 @@ class BaseRunTimeMigration(ABC): # The migration changes table indicates that the migration has been done return False # We do not know if the migration is done from the - # migration changes table so we must check the data + # migration changes table so we must check the index and data # This is the slow path + if ( + self.index_to_drop is not None + and get_index_by_name(session, self.index_to_drop[0], self.index_to_drop[1]) + is not None + ): + return True needs_migrate = self.needs_migrate_impl(instance, session) if needs_migrate.migration_done: _mark_migration_done(session, self.__class__) @@ -2290,10 +2313,11 @@ class StatesContextIDMigration(BaseRunTimeMigrationWithQuery): required_schema_version = CONTEXT_ID_AS_BINARY_SCHEMA_VERSION migration_id = "state_context_id_as_binary" + index_to_drop = ("states", "ix_states_context_id") @staticmethod @retryable_database_job("migrate states context_ids to binary format") - def migrate_data(instance: Recorder) -> bool: + def migrate_data_impl(instance: Recorder) -> bool: """Migrate states context_ids to use binary format, return True if completed.""" _to_bytes = _context_id_to_bytes session_maker = instance.get_session @@ -2323,9 +2347,6 @@ class StatesContextIDMigration(BaseRunTimeMigrationWithQuery): if is_done := not states: _mark_migration_done(session, StatesContextIDMigration) - if is_done: - _drop_index(session_maker, "states", "ix_states_context_id") - _LOGGER.debug("Migrating states context_ids to binary format: done=%s", is_done) return is_done @@ -2339,10 +2360,11 @@ class EventsContextIDMigration(BaseRunTimeMigrationWithQuery): required_schema_version = CONTEXT_ID_AS_BINARY_SCHEMA_VERSION migration_id = "event_context_id_as_binary" + index_to_drop = ("events", "ix_events_context_id") @staticmethod @retryable_database_job("migrate events context_ids to binary format") - def migrate_data(instance: Recorder) -> bool: + def migrate_data_impl(instance: Recorder) -> bool: """Migrate events context_ids to use binary format, return True if completed.""" _to_bytes = _context_id_to_bytes session_maker = instance.get_session @@ -2372,9 +2394,6 @@ class EventsContextIDMigration(BaseRunTimeMigrationWithQuery): if is_done := not events: _mark_migration_done(session, EventsContextIDMigration) - if is_done: - _drop_index(session_maker, "events", "ix_events_context_id") - _LOGGER.debug("Migrating events context_ids to binary format: done=%s", is_done) return is_done @@ -2395,7 +2414,7 @@ class EventTypeIDMigration(BaseRunTimeMigrationWithQuery): @staticmethod @retryable_database_job("migrate events event_types to event_type_ids") - def migrate_data(instance: Recorder) -> bool: + def migrate_data_impl(instance: Recorder) -> bool: """Migrate event_type to event_type_ids, return True if completed.""" session_maker = instance.get_session _LOGGER.debug("Migrating event_types") @@ -2478,7 +2497,7 @@ class EntityIDMigration(BaseRunTimeMigrationWithQuery): @staticmethod @retryable_database_job("migrate states entity_ids to states_meta") - def migrate_data(instance: Recorder) -> bool: + def migrate_data_impl(instance: Recorder) -> bool: """Migrate entity_ids to states_meta, return True if completed. We do this in two steps because we need the history queries to work @@ -2592,7 +2611,7 @@ class EventIDPostMigration(BaseRunTimeMigration): @staticmethod @retryable_database_job("cleanup_legacy_event_ids") - def migrate_data(instance: Recorder) -> bool: + def migrate_data_impl(instance: Recorder) -> bool: """Remove old event_id index from states, returns True if completed. We used to link states to events using the event_id column but we no diff --git a/tests/components/recorder/test_v32_migration.py b/tests/components/recorder/test_v32_migration.py index 58bcabdff51..60f223aaa91 100644 --- a/tests/components/recorder/test_v32_migration.py +++ b/tests/components/recorder/test_v32_migration.py @@ -219,6 +219,7 @@ async def test_migrate_times( await hass.async_stop() +@pytest.mark.parametrize("enable_migrate_entity_ids", [True]) @pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_migrate_can_resume_entity_id_post_migration( @@ -321,6 +322,7 @@ async def test_migrate_can_resume_entity_id_post_migration( await hass.async_stop() +@pytest.mark.parametrize("enable_migrate_entity_ids", [True]) @pytest.mark.parametrize("enable_migrate_event_ids", [True]) @pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage @@ -625,6 +627,7 @@ async def test_out_of_disk_space_while_rebuild_states_table( @pytest.mark.usefixtures("skip_by_db_engine") @pytest.mark.skip_on_db_engine(["sqlite"]) +@pytest.mark.parametrize("enable_migrate_entity_ids", [True]) @pytest.mark.parametrize("enable_migrate_event_ids", [True]) @pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage From 9da3f98c233a291ef43829d0c9773fef0750acd9 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Wed, 4 Sep 2024 11:00:02 +0200 Subject: [PATCH 1415/1635] Update knx-frontend to 2024.9.4.64538 (#125196) --- homeassistant/components/knx/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/knx/manifest.json b/homeassistant/components/knx/manifest.json index b7efd14fa2a..181dca6f4b8 100644 --- a/homeassistant/components/knx/manifest.json +++ b/homeassistant/components/knx/manifest.json @@ -13,7 +13,7 @@ "requirements": [ "xknx==3.1.1", "xknxproject==3.7.1", - "knx-frontend==2024.8.9.225351" + "knx-frontend==2024.9.4.64538" ], "single_config_entry": true } diff --git a/requirements_all.txt b/requirements_all.txt index 5ca05016bf4..cdbbd294eaa 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1231,7 +1231,7 @@ kiwiki-client==0.1.1 knocki==0.3.1 # homeassistant.components.knx -knx-frontend==2024.8.9.225351 +knx-frontend==2024.9.4.64538 # homeassistant.components.konnected konnected==1.2.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index ae262c5d05a..c5363b13b40 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1030,7 +1030,7 @@ kegtron-ble==0.4.0 knocki==0.3.1 # homeassistant.components.knx -knx-frontend==2024.8.9.225351 +knx-frontend==2024.9.4.64538 # homeassistant.components.konnected konnected==1.2.0 From daa5268cf21e3d9d0f79b41d0e3f05e0024587c7 Mon Sep 17 00:00:00 2001 From: Bram Kragten Date: Wed, 4 Sep 2024 11:35:14 +0200 Subject: [PATCH 1416/1635] Update frontend to 20240904.0 (#125206) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 7b904cba999..fbdafe6025d 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240903.1"] + "requirements": ["home-assistant-frontend==20240904.0"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index ddb96da6bff..73f3452b259 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -31,7 +31,7 @@ habluetooth==3.4.0 hass-nabucasa==0.81.1 hassil==1.7.4 home-assistant-bluetooth==1.12.2 -home-assistant-frontend==20240903.1 +home-assistant-frontend==20240904.0 home-assistant-intents==2024.8.29 httpx==0.27.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index cdbbd294eaa..67d53fb1d26 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1108,7 +1108,7 @@ hole==0.8.0 holidays==0.56 # homeassistant.components.frontend -home-assistant-frontend==20240903.1 +home-assistant-frontend==20240904.0 # homeassistant.components.conversation home-assistant-intents==2024.8.29 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c5363b13b40..e21097c1a9d 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -934,7 +934,7 @@ hole==0.8.0 holidays==0.56 # homeassistant.components.frontend -home-assistant-frontend==20240903.1 +home-assistant-frontend==20240904.0 # homeassistant.components.conversation home-assistant-intents==2024.8.29 From b26e4d672f5d3a6eb0f1a1636dfb5ea4147ff7fc Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 3 Sep 2024 23:44:49 -1000 Subject: [PATCH 1417/1635] Bump yarl to 1.9.8 (#125193) changelog: https://github.com/aio-libs/yarl/compare/v1.9.7...v1.9.8 --- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 73f3452b259..0eb5d6a78e0 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -62,7 +62,7 @@ urllib3>=1.26.5,<2 voluptuous-openapi==0.0.5 voluptuous-serialize==2.6.0 voluptuous==0.15.2 -yarl==1.9.7 +yarl==1.9.8 zeroconf==0.133.0 # Constrain pycryptodome to avoid vulnerability diff --git a/pyproject.toml b/pyproject.toml index 69d952f4bc0..2c8e0a432f0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -69,7 +69,7 @@ dependencies = [ "voluptuous==0.15.2", "voluptuous-serialize==2.6.0", "voluptuous-openapi==0.0.5", - "yarl==1.9.7", + "yarl==1.9.8", ] [project.urls] diff --git a/requirements.txt b/requirements.txt index fd6e8815e90..7f28e93cd4f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -41,4 +41,4 @@ urllib3>=1.26.5,<2 voluptuous==0.15.2 voluptuous-serialize==2.6.0 voluptuous-openapi==0.0.5 -yarl==1.9.7 +yarl==1.9.8 From 38a1c97a51d9b99183334465ace03c65ccde5519 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Wed, 4 Sep 2024 11:46:41 +0200 Subject: [PATCH 1418/1635] Bump deebot-client to 8.4.0 (#125207) --- homeassistant/components/ecovacs/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/ecovacs/manifest.json b/homeassistant/components/ecovacs/manifest.json index 560ee4d599c..33977b3b0de 100644 --- a/homeassistant/components/ecovacs/manifest.json +++ b/homeassistant/components/ecovacs/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/ecovacs", "iot_class": "cloud_push", "loggers": ["sleekxmppfs", "sucks", "deebot_client"], - "requirements": ["py-sucks==0.9.10", "deebot-client==8.3.0"] + "requirements": ["py-sucks==0.9.10", "deebot-client==8.4.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 67d53fb1d26..99721e57d61 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -715,7 +715,7 @@ debugpy==1.8.1 # decora==0.6 # homeassistant.components.ecovacs -deebot-client==8.3.0 +deebot-client==8.4.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns diff --git a/requirements_test_all.txt b/requirements_test_all.txt index e21097c1a9d..c358c8e3445 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -605,7 +605,7 @@ dbus-fast==2.24.0 debugpy==1.8.1 # homeassistant.components.ecovacs -deebot-client==8.3.0 +deebot-client==8.4.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns From fb5afff9d5a147f5b20953205df4bf5360e56e4c Mon Sep 17 00:00:00 2001 From: Lenn <78048721+LennP@users.noreply.github.com> Date: Wed, 4 Sep 2024 12:11:11 +0200 Subject: [PATCH 1419/1635] Add Motionblinds Bluetooth diagnostics (#121899) * Add diagnostics platform * Add diagnostics test * Remove comments * Exclude created_at and modified_at from snapshot * Fix entry_id in mock_config_entry * Add repr to excluded props from snapshot * Improve diagnostics * Use function name instead of number for callback diagnostics * Remove info from diagnostics * Reformat --- .../motionblinds_ble/diagnostics.py | 53 +++++++++++++++++++ tests/components/motionblinds_ble/conftest.py | 1 + .../snapshots/test_diagnostics.ambr | 34 ++++++++++++ .../motionblinds_ble/test_diagnostics.py | 27 ++++++++++ 4 files changed, 115 insertions(+) create mode 100644 homeassistant/components/motionblinds_ble/diagnostics.py create mode 100644 tests/components/motionblinds_ble/snapshots/test_diagnostics.ambr create mode 100644 tests/components/motionblinds_ble/test_diagnostics.py diff --git a/homeassistant/components/motionblinds_ble/diagnostics.py b/homeassistant/components/motionblinds_ble/diagnostics.py new file mode 100644 index 00000000000..c76bef7c2f8 --- /dev/null +++ b/homeassistant/components/motionblinds_ble/diagnostics.py @@ -0,0 +1,53 @@ +"""Diagnostics support for Motionblinds Bluetooth.""" + +from __future__ import annotations + +from collections.abc import Iterable +from typing import Any + +from motionblindsble.device import MotionDevice + +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_UNIQUE_ID +from homeassistant.core import HomeAssistant + +from .const import DOMAIN + +CONF_TITLE = "title" + +TO_REDACT: Iterable[Any] = { + # Config entry title and unique ID may contain sensitive data: + CONF_TITLE, + CONF_UNIQUE_ID, +} + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: ConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + device: MotionDevice = hass.data[DOMAIN][entry.entry_id] + + return async_redact_data( + { + "entry": entry.as_dict(), + "device": { + "blind_type": device.blind_type.value, + "timezone": device.timezone, + "position": device._position, # noqa: SLF001 + "tilt": device._tilt, # noqa: SLF001 + "calibration_type": device._calibration_type.value # noqa: SLF001 + if device._calibration_type # noqa: SLF001 + else None, + "connection_type": device._connection_type.value, # noqa: SLF001 + "end_position_info": None + if not device._end_position_info # noqa: SLF001 + else { + "end_positions": device._end_position_info.end_positions.value, # noqa: SLF001 + "favorite": device._end_position_info.favorite_position, # noqa: SLF001 + }, + }, + }, + TO_REDACT, + ) diff --git a/tests/components/motionblinds_ble/conftest.py b/tests/components/motionblinds_ble/conftest.py index f89cf4f305d..ffd3bc5a2ab 100644 --- a/tests/components/motionblinds_ble/conftest.py +++ b/tests/components/motionblinds_ble/conftest.py @@ -109,6 +109,7 @@ def mock_config_entry( return MockConfigEntry( title="mock_title", domain=DOMAIN, + entry_id="mock_entry_id", unique_id=address, data={ CONF_ADDRESS: address, diff --git a/tests/components/motionblinds_ble/snapshots/test_diagnostics.ambr b/tests/components/motionblinds_ble/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..ccb5b1ed87b --- /dev/null +++ b/tests/components/motionblinds_ble/snapshots/test_diagnostics.ambr @@ -0,0 +1,34 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'device': dict({ + 'blind_type': 'Roller blind', + 'calibration_type': None, + 'connection_type': 'disconnected', + 'end_position_info': None, + 'position': None, + 'tilt': None, + 'timezone': None, + }), + 'entry': dict({ + 'data': dict({ + 'address': 'cc:cc:cc:cc:cc:cc', + 'blind_type': 'roller', + 'local_name': 'Motionblind CCCC', + 'mac_code': 'CCCC', + }), + 'disabled_by': None, + 'domain': 'motionblinds_ble', + 'entry_id': 'mock_entry_id', + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'title': '**REDACTED**', + 'unique_id': '**REDACTED**', + 'version': 1, + }), + }) +# --- diff --git a/tests/components/motionblinds_ble/test_diagnostics.py b/tests/components/motionblinds_ble/test_diagnostics.py new file mode 100644 index 00000000000..878d2caa326 --- /dev/null +++ b/tests/components/motionblinds_ble/test_diagnostics.py @@ -0,0 +1,27 @@ +"""Test Motionblinds Bluetooth diagnostics.""" + +from syrupy import SnapshotAssertion +from syrupy.filters import props + +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test diagnostics.""" + + await setup_integration(hass, mock_config_entry) + + assert await get_diagnostics_for_config_entry( + hass, hass_client, mock_config_entry + ) == snapshot(exclude=props("created_at", "modified_at", "repr")) From 4b111008df69012f078ff7f87dcb4a3959d70cd6 Mon Sep 17 00:00:00 2001 From: starkillerOG Date: Wed, 4 Sep 2024 12:16:57 +0200 Subject: [PATCH 1420/1635] Add 100% coverage of Reolink button platform (#124380) * Add 100% button coverage * review comments * fix * Use SERVICE_PRESS constant * Use DOMAIN instead of const.DOMAIN * styling * User entity_registry_enabled_by_default fixture * fixes * Split out ptz_move test * use SERVICE_PTZ_MOVE constant --- homeassistant/components/reolink/button.py | 3 +- tests/components/reolink/conftest.py | 6 +- .../components/reolink/test_binary_sensor.py | 5 +- tests/components/reolink/test_button.py | 112 ++++++++++++++++++ tests/components/reolink/test_config_flow.py | 37 +++--- tests/components/reolink/test_init.py | 51 ++++---- tests/components/reolink/test_media_source.py | 7 +- 7 files changed, 164 insertions(+), 57 deletions(-) create mode 100644 tests/components/reolink/test_button.py diff --git a/homeassistant/components/reolink/button.py b/homeassistant/components/reolink/button.py index eba0570a3fb..3340cbad29a 100644 --- a/homeassistant/components/reolink/button.py +++ b/homeassistant/components/reolink/button.py @@ -37,6 +37,7 @@ from .entity import ( ATTR_SPEED = "speed" SUPPORT_PTZ_SPEED = CameraEntityFeature.STREAM +SERVICE_PTZ_MOVE = "ptz_move" @dataclass(frozen=True, kw_only=True) @@ -172,7 +173,7 @@ async def async_setup_entry( platform = async_get_current_platform() platform.async_register_entity_service( - "ptz_move", + SERVICE_PTZ_MOVE, {vol.Required(ATTR_SPEED): cv.positive_int}, "async_ptz_move", [SUPPORT_PTZ_SPEED], diff --git a/tests/components/reolink/conftest.py b/tests/components/reolink/conftest.py index b0f599c28fb..c14a5ee0c32 100644 --- a/tests/components/reolink/conftest.py +++ b/tests/components/reolink/conftest.py @@ -6,8 +6,8 @@ from unittest.mock import AsyncMock, MagicMock, patch import pytest from reolink_aio.api import Chime -from homeassistant.components.reolink import const from homeassistant.components.reolink.config_flow import DEFAULT_PROTOCOL +from homeassistant.components.reolink.const import CONF_USE_HTTPS, DOMAIN from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -137,14 +137,14 @@ def reolink_platforms() -> Generator[None]: def config_entry(hass: HomeAssistant) -> MockConfigEntry: """Add the reolink mock config entry to hass.""" config_entry = MockConfigEntry( - domain=const.DOMAIN, + domain=DOMAIN, unique_id=format_mac(TEST_MAC), data={ CONF_HOST: TEST_HOST, CONF_USERNAME: TEST_USERNAME, CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, - const.CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_USE_HTTPS: TEST_USE_HTTPS, }, options={ CONF_PROTOCOL: DEFAULT_PROTOCOL, diff --git a/tests/components/reolink/test_binary_sensor.py b/tests/components/reolink/test_binary_sensor.py index 0872c3ab3b2..893e58a9512 100644 --- a/tests/components/reolink/test_binary_sensor.py +++ b/tests/components/reolink/test_binary_sensor.py @@ -4,7 +4,8 @@ from unittest.mock import MagicMock, patch from freezegun.api import FrozenDateTimeFactory -from homeassistant.components.reolink import DEVICE_UPDATE_INTERVAL, const +from homeassistant.components.reolink import DEVICE_UPDATE_INTERVAL +from homeassistant.components.reolink.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant @@ -45,7 +46,7 @@ async def test_motion_sensor( # test webhook callback reolink_connect.motion_detected.return_value = True reolink_connect.ONVIF_event_callback.return_value = [0] - webhook_id = f"{const.DOMAIN}_{TEST_UID.replace(':', '')}_ONVIF" + webhook_id = f"{DOMAIN}_{TEST_UID.replace(':', '')}_ONVIF" client = await hass_client_no_auth() await client.post(f"/api/webhook/{webhook_id}", data="test_data") diff --git a/tests/components/reolink/test_button.py b/tests/components/reolink/test_button.py new file mode 100644 index 00000000000..7c91051c66e --- /dev/null +++ b/tests/components/reolink/test_button.py @@ -0,0 +1,112 @@ +"""Test the Reolink button platform.""" + +from unittest.mock import MagicMock, patch + +import pytest +from reolink_aio.exceptions import ReolinkError + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.reolink.button import ATTR_SPEED, SERVICE_PTZ_MOVE +from homeassistant.components.reolink.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError + +from .conftest import TEST_NVR_NAME + +from tests.common import MockConfigEntry + + +async def test_button( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test button entity with ptz up.""" + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.BUTTON]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.BUTTON}.{TEST_NVR_NAME}_ptz_up" + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + reolink_connect.set_ptz_command.assert_called_once() + + reolink_connect.set_ptz_command.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + +async def test_ptz_move_service( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test ptz_move entity service using PTZ button entity.""" + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.BUTTON]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.BUTTON}.{TEST_NVR_NAME}_ptz_up" + + await hass.services.async_call( + DOMAIN, + SERVICE_PTZ_MOVE, + {ATTR_ENTITY_ID: entity_id, ATTR_SPEED: 5}, + blocking=True, + ) + reolink_connect.set_ptz_command.assert_called_with(0, command="Up", speed=5) + + reolink_connect.set_ptz_command.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + DOMAIN, + SERVICE_PTZ_MOVE, + {ATTR_ENTITY_ID: entity_id, ATTR_SPEED: 5}, + blocking=True, + ) + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_host_button( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test host button entity with reboot.""" + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.BUTTON]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.BUTTON}.{TEST_NVR_NAME}_restart" + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + reolink_connect.reboot.assert_called_once() + + reolink_connect.reboot.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) diff --git a/tests/components/reolink/test_config_flow.py b/tests/components/reolink/test_config_flow.py index 2d55f62ec74..40695861aaf 100644 --- a/tests/components/reolink/test_config_flow.py +++ b/tests/components/reolink/test_config_flow.py @@ -10,8 +10,9 @@ from reolink_aio.exceptions import ApiError, CredentialsInvalidError, ReolinkErr from homeassistant import config_entries from homeassistant.components import dhcp -from homeassistant.components.reolink import DEVICE_UPDATE_INTERVAL, const +from homeassistant.components.reolink import DEVICE_UPDATE_INTERVAL from homeassistant.components.reolink.config_flow import DEFAULT_PROTOCOL +from homeassistant.components.reolink.const import CONF_USE_HTTPS, DOMAIN from homeassistant.components.reolink.exceptions import ReolinkWebhookException from homeassistant.components.reolink.host import DEFAULT_TIMEOUT from homeassistant.config_entries import ConfigEntryState @@ -50,7 +51,7 @@ async def test_config_flow_manual_success( ) -> None: """Successful flow manually initialized by the user.""" result = await hass.config_entries.flow.async_init( - const.DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM @@ -73,7 +74,7 @@ async def test_config_flow_manual_success( CONF_USERNAME: TEST_USERNAME, CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, - const.CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_USE_HTTPS: TEST_USE_HTTPS, } assert result["options"] == { CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -85,7 +86,7 @@ async def test_config_flow_errors( ) -> None: """Successful flow manually initialized by the user after some errors.""" result = await hass.config_entries.flow.async_init( - const.DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM @@ -206,7 +207,7 @@ async def test_config_flow_errors( CONF_PASSWORD: TEST_PASSWORD, CONF_HOST: TEST_HOST, CONF_PORT: TEST_PORT, - const.CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_USE_HTTPS: TEST_USE_HTTPS, }, ) @@ -217,7 +218,7 @@ async def test_config_flow_errors( CONF_USERNAME: TEST_USERNAME, CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, - const.CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_USE_HTTPS: TEST_USE_HTTPS, } assert result["options"] == { CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -227,14 +228,14 @@ async def test_config_flow_errors( async def test_options_flow(hass: HomeAssistant, mock_setup_entry: MagicMock) -> None: """Test specifying non default settings using options flow.""" config_entry = MockConfigEntry( - domain=const.DOMAIN, + domain=DOMAIN, unique_id=format_mac(TEST_MAC), data={ CONF_HOST: TEST_HOST, CONF_USERNAME: TEST_USERNAME, CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, - const.CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_USE_HTTPS: TEST_USE_HTTPS, }, options={ CONF_PROTOCOL: "rtsp", @@ -267,14 +268,14 @@ async def test_change_connection_settings( ) -> None: """Test changing connection settings by issuing a second user config flow.""" config_entry = MockConfigEntry( - domain=const.DOMAIN, + domain=DOMAIN, unique_id=format_mac(TEST_MAC), data={ CONF_HOST: TEST_HOST, CONF_USERNAME: TEST_USERNAME, CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, - const.CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_USE_HTTPS: TEST_USE_HTTPS, }, options={ CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -284,7 +285,7 @@ async def test_change_connection_settings( config_entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( - const.DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM @@ -310,14 +311,14 @@ async def test_change_connection_settings( async def test_reauth(hass: HomeAssistant, mock_setup_entry: MagicMock) -> None: """Test a reauth flow.""" config_entry = MockConfigEntry( - domain=const.DOMAIN, + domain=DOMAIN, unique_id=format_mac(TEST_MAC), data={ CONF_HOST: TEST_HOST, CONF_USERNAME: TEST_USERNAME, CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, - const.CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_USE_HTTPS: TEST_USE_HTTPS, }, options={ CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -367,7 +368,7 @@ async def test_dhcp_flow(hass: HomeAssistant, mock_setup_entry: MagicMock) -> No ) result = await hass.config_entries.flow.async_init( - const.DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=dhcp_data + DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=dhcp_data ) assert result["type"] is FlowResultType.FORM @@ -389,7 +390,7 @@ async def test_dhcp_flow(hass: HomeAssistant, mock_setup_entry: MagicMock) -> No CONF_USERNAME: TEST_USERNAME, CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, - const.CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_USE_HTTPS: TEST_USE_HTTPS, } assert result["options"] == { CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -442,14 +443,14 @@ async def test_dhcp_ip_update( ) -> None: """Test dhcp discovery aborts if already configured where the IP is updated if appropriate.""" config_entry = MockConfigEntry( - domain=const.DOMAIN, + domain=DOMAIN, unique_id=format_mac(TEST_MAC), data={ CONF_HOST: TEST_HOST, CONF_USERNAME: TEST_USERNAME, CONF_PASSWORD: TEST_PASSWORD, CONF_PORT: TEST_PORT, - const.CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_USE_HTTPS: TEST_USE_HTTPS, }, options={ CONF_PROTOCOL: DEFAULT_PROTOCOL, @@ -479,7 +480,7 @@ async def test_dhcp_ip_update( setattr(reolink_connect, attr, value) result = await hass.config_entries.flow.async_init( - const.DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=dhcp_data + DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=dhcp_data ) for host in host_call_list: diff --git a/tests/components/reolink/test_init.py b/tests/components/reolink/test_init.py index fd54f298966..765b3426249 100644 --- a/tests/components/reolink/test_init.py +++ b/tests/components/reolink/test_init.py @@ -13,8 +13,8 @@ from homeassistant.components.reolink import ( DEVICE_UPDATE_INTERVAL, FIRMWARE_UPDATE_INTERVAL, NUM_CRED_ERRORS, - const, ) +from homeassistant.components.reolink.const import DOMAIN from homeassistant.config import async_process_ha_core_config from homeassistant.config_entries import ConfigEntryState from homeassistant.const import STATE_OFF, STATE_UNAVAILABLE, Platform @@ -140,7 +140,7 @@ async def test_credential_error_three( reolink_connect.get_states.side_effect = CredentialsInvalidError("Test error") - issue_id = f"config_entry_reauth_{const.DOMAIN}_{config_entry.entry_id}" + issue_id = f"config_entry_reauth_{DOMAIN}_{config_entry.entry_id}" for _ in range(NUM_CRED_ERRORS): assert (HOMEASSISTANT_DOMAIN, issue_id) not in issue_registry.issues freezer.tick(DEVICE_UPDATE_INTERVAL) @@ -414,14 +414,14 @@ async def test_migrate_entity_ids( reolink_connect.supported = mock_supported dev_entry = device_registry.async_get_or_create( - identifiers={(const.DOMAIN, original_dev_id)}, + identifiers={(DOMAIN, original_dev_id)}, config_entry_id=config_entry.entry_id, disabled_by=None, ) entity_registry.async_get_or_create( domain=domain, - platform=const.DOMAIN, + platform=DOMAIN, unique_id=original_id, config_entry=config_entry, suggested_object_id=original_id, @@ -429,16 +429,13 @@ async def test_migrate_entity_ids( device_id=dev_entry.id, ) - assert entity_registry.async_get_entity_id(domain, const.DOMAIN, original_id) - assert entity_registry.async_get_entity_id(domain, const.DOMAIN, new_id) is None + assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) + assert entity_registry.async_get_entity_id(domain, DOMAIN, new_id) is None - assert device_registry.async_get_device( - identifiers={(const.DOMAIN, original_dev_id)} - ) + assert device_registry.async_get_device(identifiers={(DOMAIN, original_dev_id)}) if new_dev_id != original_dev_id: assert ( - device_registry.async_get_device(identifiers={(const.DOMAIN, new_dev_id)}) - is None + device_registry.async_get_device(identifiers={(DOMAIN, new_dev_id)}) is None ) # setup CH 0 and host entities/device @@ -446,19 +443,15 @@ async def test_migrate_entity_ids( assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert ( - entity_registry.async_get_entity_id(domain, const.DOMAIN, original_id) is None - ) - assert entity_registry.async_get_entity_id(domain, const.DOMAIN, new_id) + assert entity_registry.async_get_entity_id(domain, DOMAIN, original_id) is None + assert entity_registry.async_get_entity_id(domain, DOMAIN, new_id) if new_dev_id != original_dev_id: assert ( - device_registry.async_get_device( - identifiers={(const.DOMAIN, original_dev_id)} - ) + device_registry.async_get_device(identifiers={(DOMAIN, original_dev_id)}) is None ) - assert device_registry.async_get_device(identifiers={(const.DOMAIN, new_dev_id)}) + assert device_registry.async_get_device(identifiers={(DOMAIN, new_dev_id)}) async def test_no_repair_issue( @@ -472,11 +465,11 @@ async def test_no_repair_issue( assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert (const.DOMAIN, "https_webhook") not in issue_registry.issues - assert (const.DOMAIN, "webhook_url") not in issue_registry.issues - assert (const.DOMAIN, "enable_port") not in issue_registry.issues - assert (const.DOMAIN, "firmware_update") not in issue_registry.issues - assert (const.DOMAIN, "ssl") not in issue_registry.issues + assert (DOMAIN, "https_webhook") not in issue_registry.issues + assert (DOMAIN, "webhook_url") not in issue_registry.issues + assert (DOMAIN, "enable_port") not in issue_registry.issues + assert (DOMAIN, "firmware_update") not in issue_registry.issues + assert (DOMAIN, "ssl") not in issue_registry.issues async def test_https_repair_issue( @@ -503,7 +496,7 @@ async def test_https_repair_issue( assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert (const.DOMAIN, "https_webhook") in issue_registry.issues + assert (DOMAIN, "https_webhook") in issue_registry.issues async def test_ssl_repair_issue( @@ -533,7 +526,7 @@ async def test_ssl_repair_issue( assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert (const.DOMAIN, "ssl") in issue_registry.issues + assert (DOMAIN, "ssl") in issue_registry.issues @pytest.mark.parametrize("protocol", ["rtsp", "rtmp"]) @@ -553,7 +546,7 @@ async def test_port_repair_issue( assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert (const.DOMAIN, "enable_port") in issue_registry.issues + assert (DOMAIN, "enable_port") in issue_registry.issues async def test_webhook_repair_issue( @@ -576,7 +569,7 @@ async def test_webhook_repair_issue( assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert (const.DOMAIN, "webhook_url") in issue_registry.issues + assert (DOMAIN, "webhook_url") in issue_registry.issues async def test_firmware_repair_issue( @@ -590,4 +583,4 @@ async def test_firmware_repair_issue( assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - assert (const.DOMAIN, "firmware_update_host") in issue_registry.issues + assert (DOMAIN, "firmware_update_host") in issue_registry.issues diff --git a/tests/components/reolink/test_media_source.py b/tests/components/reolink/test_media_source.py index 31985bd10f7..6351f683545 100644 --- a/tests/components/reolink/test_media_source.py +++ b/tests/components/reolink/test_media_source.py @@ -14,9 +14,8 @@ from homeassistant.components.media_source import ( async_resolve_media, ) from homeassistant.components.media_source.error import Unresolvable -from homeassistant.components.reolink import const from homeassistant.components.reolink.config_flow import DEFAULT_PROTOCOL -from homeassistant.components.reolink.const import DOMAIN +from homeassistant.components.reolink.const import CONF_USE_HTTPS, DOMAIN from homeassistant.components.stream import DOMAIN as MEDIA_STREAM_DOMAIN from homeassistant.const import ( CONF_HOST, @@ -321,14 +320,14 @@ async def test_browsing_not_loaded( reolink_connect.get_host_data.side_effect = ReolinkError("Test error") config_entry2 = MockConfigEntry( - domain=const.DOMAIN, + domain=DOMAIN, unique_id=format_mac(TEST_MAC2), data={ CONF_HOST: TEST_HOST2, CONF_USERNAME: TEST_USERNAME2, CONF_PASSWORD: TEST_PASSWORD2, CONF_PORT: TEST_PORT, - const.CONF_USE_HTTPS: TEST_USE_HTTPS, + CONF_USE_HTTPS: TEST_USE_HTTPS, }, options={ CONF_PROTOCOL: DEFAULT_PROTOCOL, From b5d7eba4f680ed3fdb4ab74fe3ac7689138ca9c2 Mon Sep 17 00:00:00 2001 From: Hessel Date: Wed, 4 Sep 2024 14:00:38 +0200 Subject: [PATCH 1421/1635] Add new number component for setting the wallbox ICP current (#125209) * Add new number component for setting the wallbox ICP current * feat: Add number component for wallbox ICP current control --- homeassistant/components/wallbox/const.py | 4 + .../components/wallbox/coordinator.py | 29 +++++ homeassistant/components/wallbox/number.py | 11 ++ homeassistant/components/wallbox/sensor.py | 8 ++ homeassistant/components/wallbox/strings.json | 6 + tests/components/wallbox/__init__.py | 8 ++ tests/components/wallbox/const.py | 1 + tests/components/wallbox/test_number.py | 105 +++++++++++++++++- 8 files changed, 171 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/wallbox/const.py b/homeassistant/components/wallbox/const.py index 69633cbda22..c38b8967776 100644 --- a/homeassistant/components/wallbox/const.py +++ b/homeassistant/components/wallbox/const.py @@ -22,11 +22,15 @@ CHARGER_CURRENCY_KEY = "currency" CHARGER_DATA_KEY = "config_data" CHARGER_DEPOT_PRICE_KEY = "depot_price" CHARGER_ENERGY_PRICE_KEY = "energy_price" +CHARGER_FEATURES_KEY = "features" CHARGER_SERIAL_NUMBER_KEY = "serial_number" CHARGER_PART_NUMBER_KEY = "part_number" +CHARGER_PLAN_KEY = "plan" +CHARGER_POWER_BOOST_KEY = "POWER_BOOST" CHARGER_SOFTWARE_KEY = "software" CHARGER_MAX_AVAILABLE_POWER_KEY = "max_available_power" CHARGER_MAX_CHARGING_CURRENT_KEY = "max_charging_current" +CHARGER_MAX_ICP_CURRENT_KEY = "icp_max_current" CHARGER_PAUSE_RESUME_KEY = "paused" CHARGER_LOCKED_UNLOCKED_KEY = "locked" CHARGER_NAME_KEY = "name" diff --git a/homeassistant/components/wallbox/coordinator.py b/homeassistant/components/wallbox/coordinator.py index e24ccd28440..f3679551bc4 100644 --- a/homeassistant/components/wallbox/coordinator.py +++ b/homeassistant/components/wallbox/coordinator.py @@ -19,8 +19,12 @@ from .const import ( CHARGER_CURRENCY_KEY, CHARGER_DATA_KEY, CHARGER_ENERGY_PRICE_KEY, + CHARGER_FEATURES_KEY, CHARGER_LOCKED_UNLOCKED_KEY, CHARGER_MAX_CHARGING_CURRENT_KEY, + CHARGER_MAX_ICP_CURRENT_KEY, + CHARGER_PLAN_KEY, + CHARGER_POWER_BOOST_KEY, CHARGER_STATUS_DESCRIPTION_KEY, CHARGER_STATUS_ID_KEY, CODE_KEY, @@ -130,6 +134,16 @@ class WallboxCoordinator(DataUpdateCoordinator[dict[str, Any]]): data[CHARGER_ENERGY_PRICE_KEY] = data[CHARGER_DATA_KEY][ CHARGER_ENERGY_PRICE_KEY ] + # Only show max_icp_current if power_boost is available in the wallbox unit: + if ( + data[CHARGER_DATA_KEY].get(CHARGER_MAX_ICP_CURRENT_KEY, 0) > 0 + and CHARGER_POWER_BOOST_KEY + in data[CHARGER_DATA_KEY][CHARGER_PLAN_KEY][CHARGER_FEATURES_KEY] + ): + data[CHARGER_MAX_ICP_CURRENT_KEY] = data[CHARGER_DATA_KEY][ + CHARGER_MAX_ICP_CURRENT_KEY + ] + data[CHARGER_CURRENCY_KEY] = ( f"{data[CHARGER_DATA_KEY][CHARGER_CURRENCY_KEY][CODE_KEY]}/kWh" ) @@ -160,6 +174,21 @@ class WallboxCoordinator(DataUpdateCoordinator[dict[str, Any]]): ) await self.async_request_refresh() + @_require_authentication + def _set_icp_current(self, icp_current: float) -> None: + """Set maximum icp current for Wallbox.""" + try: + self._wallbox.setIcpMaxCurrent(self._station, icp_current) + except requests.exceptions.HTTPError as wallbox_connection_error: + if wallbox_connection_error.response.status_code == 403: + raise InvalidAuth from wallbox_connection_error + raise + + async def async_set_icp_current(self, icp_current: float) -> None: + """Set maximum icp current for Wallbox.""" + await self.hass.async_add_executor_job(self._set_icp_current, icp_current) + await self.async_request_refresh() + @_require_authentication def _set_energy_cost(self, energy_cost: float) -> None: """Set energy cost for Wallbox.""" diff --git a/homeassistant/components/wallbox/number.py b/homeassistant/components/wallbox/number.py index 8ae4c473299..24cdd16f99d 100644 --- a/homeassistant/components/wallbox/number.py +++ b/homeassistant/components/wallbox/number.py @@ -21,6 +21,7 @@ from .const import ( CHARGER_ENERGY_PRICE_KEY, CHARGER_MAX_AVAILABLE_POWER_KEY, CHARGER_MAX_CHARGING_CURRENT_KEY, + CHARGER_MAX_ICP_CURRENT_KEY, CHARGER_PART_NUMBER_KEY, CHARGER_SERIAL_NUMBER_KEY, DOMAIN, @@ -67,6 +68,16 @@ NUMBER_TYPES: dict[str, WallboxNumberEntityDescription] = { set_value_fn=lambda coordinator: coordinator.async_set_energy_cost, native_step=0.01, ), + CHARGER_MAX_ICP_CURRENT_KEY: WallboxNumberEntityDescription( + key=CHARGER_MAX_ICP_CURRENT_KEY, + translation_key="maximum_icp_current", + max_value_fn=lambda coordinator: cast( + float, coordinator.data[CHARGER_MAX_AVAILABLE_POWER_KEY] + ), + min_value_fn=lambda _: 6, + set_value_fn=lambda coordinator: coordinator.async_set_icp_current, + native_step=1, + ), } diff --git a/homeassistant/components/wallbox/sensor.py b/homeassistant/components/wallbox/sensor.py index eadbc04dca2..18d8afb5612 100644 --- a/homeassistant/components/wallbox/sensor.py +++ b/homeassistant/components/wallbox/sensor.py @@ -38,6 +38,7 @@ from .const import ( CHARGER_ENERGY_PRICE_KEY, CHARGER_MAX_AVAILABLE_POWER_KEY, CHARGER_MAX_CHARGING_CURRENT_KEY, + CHARGER_MAX_ICP_CURRENT_KEY, CHARGER_SERIAL_NUMBER_KEY, CHARGER_STATE_OF_CHARGE_KEY, CHARGER_STATUS_DESCRIPTION_KEY, @@ -145,6 +146,13 @@ SENSOR_TYPES: dict[str, WallboxSensorEntityDescription] = { device_class=SensorDeviceClass.CURRENT, state_class=SensorStateClass.MEASUREMENT, ), + CHARGER_MAX_ICP_CURRENT_KEY: WallboxSensorEntityDescription( + key=CHARGER_MAX_ICP_CURRENT_KEY, + translation_key=CHARGER_MAX_ICP_CURRENT_KEY, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + ), } diff --git a/homeassistant/components/wallbox/strings.json b/homeassistant/components/wallbox/strings.json index dd96cebf605..f4378b328d8 100644 --- a/homeassistant/components/wallbox/strings.json +++ b/homeassistant/components/wallbox/strings.json @@ -38,6 +38,9 @@ }, "energy_price": { "name": "Energy price" + }, + "maximum_icp_current": { + "name": "Maximum ICP current" } }, "sensor": { @@ -79,6 +82,9 @@ }, "max_charging_current": { "name": "Max charging current" + }, + "icp_max_current": { + "name": "Max ICP current" } }, "switch": { diff --git a/tests/components/wallbox/__init__.py b/tests/components/wallbox/__init__.py index f21e895b3a7..f4258ea0d49 100644 --- a/tests/components/wallbox/__init__.py +++ b/tests/components/wallbox/__init__.py @@ -14,11 +14,15 @@ from homeassistant.components.wallbox.const import ( CHARGER_CURRENT_VERSION_KEY, CHARGER_DATA_KEY, CHARGER_ENERGY_PRICE_KEY, + CHARGER_FEATURES_KEY, CHARGER_LOCKED_UNLOCKED_KEY, CHARGER_MAX_AVAILABLE_POWER_KEY, CHARGER_MAX_CHARGING_CURRENT_KEY, + CHARGER_MAX_ICP_CURRENT_KEY, CHARGER_NAME_KEY, CHARGER_PART_NUMBER_KEY, + CHARGER_PLAN_KEY, + CHARGER_POWER_BOOST_KEY, CHARGER_SERIAL_NUMBER_KEY, CHARGER_SOFTWARE_KEY, CHARGER_STATUS_ID_KEY, @@ -45,6 +49,8 @@ test_response = { CHARGER_PART_NUMBER_KEY: "PLP1-0-2-4-9-002-E", CHARGER_SOFTWARE_KEY: {CHARGER_CURRENT_VERSION_KEY: "5.5.10"}, CHARGER_CURRENCY_KEY: {"code": "EUR/kWh"}, + CHARGER_MAX_ICP_CURRENT_KEY: 20, + CHARGER_PLAN_KEY: {CHARGER_FEATURES_KEY: [CHARGER_POWER_BOOST_KEY]}, }, } @@ -64,6 +70,8 @@ test_response_bidir = { CHARGER_PART_NUMBER_KEY: "QSP1-0-2-4-9-002-E", CHARGER_SOFTWARE_KEY: {CHARGER_CURRENT_VERSION_KEY: "5.5.10"}, CHARGER_CURRENCY_KEY: {"code": "EUR/kWh"}, + CHARGER_MAX_ICP_CURRENT_KEY: 20, + CHARGER_PLAN_KEY: {CHARGER_FEATURES_KEY: [CHARGER_POWER_BOOST_KEY]}, }, } diff --git a/tests/components/wallbox/const.py b/tests/components/wallbox/const.py index 452b3af0af8..a86ae9fc3b9 100644 --- a/tests/components/wallbox/const.py +++ b/tests/components/wallbox/const.py @@ -9,6 +9,7 @@ STATUS = "status" MOCK_NUMBER_ENTITY_ID = "number.wallbox_wallboxname_maximum_charging_current" MOCK_NUMBER_ENTITY_ENERGY_PRICE_ID = "number.wallbox_wallboxname_energy_price" +MOCK_NUMBER_ENTITY_ICP_CURRENT_ID = "number.wallbox_wallboxname_maximum_icp_current" MOCK_LOCK_ENTITY_ID = "lock.wallbox_wallboxname_lock" MOCK_SENSOR_CHARGING_SPEED_ID = "sensor.wallbox_wallboxname_charging_speed" MOCK_SENSOR_CHARGING_POWER_ID = "sensor.wallbox_wallboxname_charging_power" diff --git a/tests/components/wallbox/test_number.py b/tests/components/wallbox/test_number.py index 5d782224ce5..0a8b1aa1207 100644 --- a/tests/components/wallbox/test_number.py +++ b/tests/components/wallbox/test_number.py @@ -6,9 +6,12 @@ import pytest import requests_mock from homeassistant.components.input_number import ATTR_VALUE, SERVICE_SET_VALUE +from homeassistant.components.number import DOMAIN as NUMBER_DOMAIN +from homeassistant.components.wallbox import InvalidAuth from homeassistant.components.wallbox.const import ( CHARGER_ENERGY_PRICE_KEY, CHARGER_MAX_CHARGING_CURRENT_KEY, + CHARGER_MAX_ICP_CURRENT_KEY, ) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant @@ -20,7 +23,11 @@ from . import ( setup_integration_bidir, setup_integration_platform_not_ready, ) -from .const import MOCK_NUMBER_ENTITY_ENERGY_PRICE_ID, MOCK_NUMBER_ENTITY_ID +from .const import ( + MOCK_NUMBER_ENTITY_ENERGY_PRICE_ID, + MOCK_NUMBER_ENTITY_ICP_CURRENT_ID, + MOCK_NUMBER_ENTITY_ID, +) from tests.common import MockConfigEntry @@ -212,3 +219,99 @@ async def test_wallbox_number_class_platform_not_ready( assert state is None await hass.config_entries.async_unload(entry.entry_id) + + +async def test_wallbox_number_class_icp_energy( + hass: HomeAssistant, entry: MockConfigEntry +) -> None: + """Test wallbox sensor class.""" + + await setup_integration(hass, entry) + + with requests_mock.Mocker() as mock_request: + mock_request.get( + "https://user-api.wall-box.com/users/signin", + json=authorisation_response, + status_code=200, + ) + + mock_request.post( + "https://api.wall-box.com/chargers/config/12345", + json={CHARGER_MAX_ICP_CURRENT_KEY: 10}, + status_code=200, + ) + + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: MOCK_NUMBER_ENTITY_ICP_CURRENT_ID, + ATTR_VALUE: 10, + }, + blocking=True, + ) + await hass.config_entries.async_unload(entry.entry_id) + + +async def test_wallbox_number_class_icp_energy_auth_error( + hass: HomeAssistant, entry: MockConfigEntry +) -> None: + """Test wallbox sensor class.""" + + await setup_integration(hass, entry) + + with requests_mock.Mocker() as mock_request: + mock_request.get( + "https://user-api.wall-box.com/users/signin", + json=authorisation_response, + status_code=200, + ) + mock_request.post( + "https://api.wall-box.com/chargers/config/12345", + json={CHARGER_MAX_ICP_CURRENT_KEY: 10}, + status_code=403, + ) + + with pytest.raises(InvalidAuth): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: MOCK_NUMBER_ENTITY_ICP_CURRENT_ID, + ATTR_VALUE: 10, + }, + blocking=True, + ) + await hass.config_entries.async_unload(entry.entry_id) + + +async def test_wallbox_number_class_icp_energy_connection_error( + hass: HomeAssistant, entry: MockConfigEntry +) -> None: + """Test wallbox sensor class.""" + + await setup_integration(hass, entry) + + with requests_mock.Mocker() as mock_request: + mock_request.get( + "https://user-api.wall-box.com/users/signin", + json=authorisation_response, + status_code=200, + ) + mock_request.post( + "https://api.wall-box.com/chargers/config/12345", + json={CHARGER_MAX_ICP_CURRENT_KEY: 10}, + status_code=404, + ) + + with pytest.raises(ConnectionError): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + { + ATTR_ENTITY_ID: MOCK_NUMBER_ENTITY_ICP_CURRENT_ID, + ATTR_VALUE: 10, + }, + blocking=True, + ) + await hass.config_entries.async_unload(entry.entry_id) From a1ecefee2104d2f40b42aa3b8033a55b6a9a420e Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 4 Sep 2024 02:35:52 -1000 Subject: [PATCH 1422/1635] Bump aioesphomeapi to 25.3.2 (#125188) changelog: https://github.com/esphome/aioesphomeapi/compare/v25.3.1...v25.3.2 --- homeassistant/components/esphome/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/esphome/manifest.json b/homeassistant/components/esphome/manifest.json index 9d42b7206e3..233015b13ba 100644 --- a/homeassistant/components/esphome/manifest.json +++ b/homeassistant/components/esphome/manifest.json @@ -17,7 +17,7 @@ "mqtt": ["esphome/discover/#"], "quality_scale": "platinum", "requirements": [ - "aioesphomeapi==25.3.1", + "aioesphomeapi==25.3.2", "esphome-dashboard-api==1.2.3", "bleak-esphome==1.0.0" ], diff --git a/requirements_all.txt b/requirements_all.txt index 99721e57d61..35dd9071f13 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -237,7 +237,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==25.3.1 +aioesphomeapi==25.3.2 # homeassistant.components.flo aioflo==2021.11.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c358c8e3445..8940fd0649c 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -225,7 +225,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==25.3.1 +aioesphomeapi==25.3.2 # homeassistant.components.flo aioflo==2021.11.0 From 5c35ccb9caf44f7b215db17cc23f161f27475c9c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michal=20J=C3=A1l?= Date: Wed, 4 Sep 2024 15:03:59 +0200 Subject: [PATCH 1423/1635] Allow Switchbot users to force nightlatch (#124326) * Add option to force nightlatch operation mode * Fix format * Make the new option available only for lock pro entry * use senor_type instead of switchbot model + tests * Update homeassistant/components/switchbot/lock.py --------- Co-authored-by: Joost Lekkerkerker --- .../components/switchbot/config_flow.py | 16 ++++- homeassistant/components/switchbot/const.py | 2 + homeassistant/components/switchbot/lock.py | 12 ++-- .../components/switchbot/strings.json | 3 +- .../components/switchbot/test_config_flow.py | 63 +++++++++++++++++++ 5 files changed, 90 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/switchbot/config_flow.py b/homeassistant/components/switchbot/config_flow.py index a1c947fd611..0468db5618a 100644 --- a/homeassistant/components/switchbot/config_flow.py +++ b/homeassistant/components/switchbot/config_flow.py @@ -38,13 +38,16 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import ( CONF_ENCRYPTION_KEY, CONF_KEY_ID, + CONF_LOCK_NIGHTLATCH, CONF_RETRY_COUNT, CONNECTABLE_SUPPORTED_MODEL_TYPES, + DEFAULT_LOCK_NIGHTLATCH, DEFAULT_RETRY_COUNT, DOMAIN, NON_CONNECTABLE_SUPPORTED_MODEL_TYPES, SUPPORTED_LOCK_MODELS, SUPPORTED_MODEL_TYPES, + SupportedModels, ) _LOGGER = logging.getLogger(__name__) @@ -355,7 +358,7 @@ class SwitchbotOptionsFlowHandler(OptionsFlow): # Update common entity options for all other entities. return self.async_create_entry(title="", data=user_input) - options = { + options: dict[vol.Optional, Any] = { vol.Optional( CONF_RETRY_COUNT, default=self.config_entry.options.get( @@ -363,5 +366,16 @@ class SwitchbotOptionsFlowHandler(OptionsFlow): ), ): int } + if self.config_entry.data.get(CONF_SENSOR_TYPE) == SupportedModels.LOCK_PRO: + options.update( + { + vol.Optional( + CONF_LOCK_NIGHTLATCH, + default=self.config_entry.options.get( + CONF_LOCK_NIGHTLATCH, DEFAULT_LOCK_NIGHTLATCH + ), + ): bool + } + ) return self.async_show_form(step_id="init", data_schema=vol.Schema(options)) diff --git a/homeassistant/components/switchbot/const.py b/homeassistant/components/switchbot/const.py index 0a1ac01e530..bd727edfea4 100644 --- a/homeassistant/components/switchbot/const.py +++ b/homeassistant/components/switchbot/const.py @@ -64,11 +64,13 @@ HASS_SENSOR_TYPE_TO_SWITCHBOT_MODEL = { # Config Defaults DEFAULT_RETRY_COUNT = 3 +DEFAULT_LOCK_NIGHTLATCH = False # Config Options CONF_RETRY_COUNT = "retry_count" CONF_KEY_ID = "key_id" CONF_ENCRYPTION_KEY = "encryption_key" +CONF_LOCK_NIGHTLATCH = "lock_force_nightlatch" # Deprecated config Entry Options to be removed in 2023.4 CONF_TIME_BETWEEN_UPDATE_COMMAND = "update_time" diff --git a/homeassistant/components/switchbot/lock.py b/homeassistant/components/switchbot/lock.py index cb41d14cf66..a3bee5661b2 100644 --- a/homeassistant/components/switchbot/lock.py +++ b/homeassistant/components/switchbot/lock.py @@ -9,6 +9,7 @@ from homeassistant.components.lock import LockEntity, LockEntityFeature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from .const import CONF_LOCK_NIGHTLATCH, DEFAULT_LOCK_NIGHTLATCH from .coordinator import SwitchbotConfigEntry, SwitchbotDataUpdateCoordinator from .entity import SwitchbotEntity @@ -19,7 +20,8 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up Switchbot lock based on a config entry.""" - async_add_entities([(SwitchBotLock(entry.runtime_data))]) + force_nightlatch = entry.options.get(CONF_LOCK_NIGHTLATCH, DEFAULT_LOCK_NIGHTLATCH) + async_add_entities([SwitchBotLock(entry.runtime_data, force_nightlatch)]) # noinspection PyAbstractClass @@ -30,11 +32,13 @@ class SwitchBotLock(SwitchbotEntity, LockEntity): _attr_name = None _device: switchbot.SwitchbotLock - def __init__(self, coordinator: SwitchbotDataUpdateCoordinator) -> None: + def __init__( + self, coordinator: SwitchbotDataUpdateCoordinator, force_nightlatch + ) -> None: """Initialize the entity.""" super().__init__(coordinator) self._async_update_attrs() - if self._device.is_night_latch_enabled(): + if self._device.is_night_latch_enabled() or force_nightlatch: self._attr_supported_features = LockEntityFeature.OPEN def _async_update_attrs(self) -> None: @@ -55,7 +59,7 @@ class SwitchBotLock(SwitchbotEntity, LockEntity): async def async_unlock(self, **kwargs: Any) -> None: """Unlock the lock.""" - if self._device.is_night_latch_enabled(): + if self._attr_supported_features & (LockEntityFeature.OPEN): self._last_run_success = await self._device.unlock_without_unlatch() else: self._last_run_success = await self._device.unlock() diff --git a/homeassistant/components/switchbot/strings.json b/homeassistant/components/switchbot/strings.json index a20b4939f8f..80ca32d4826 100644 --- a/homeassistant/components/switchbot/strings.json +++ b/homeassistant/components/switchbot/strings.json @@ -54,7 +54,8 @@ "step": { "init": { "data": { - "retry_count": "Retry count" + "retry_count": "Retry count", + "lock_force_nightlatch": "Force Nightlatch operation mode" } } } diff --git a/tests/components/switchbot/test_config_flow.py b/tests/components/switchbot/test_config_flow.py index 182e9457f22..b0fba2a5f18 100644 --- a/tests/components/switchbot/test_config_flow.py +++ b/tests/components/switchbot/test_config_flow.py @@ -7,6 +7,7 @@ from switchbot import SwitchbotAccountConnectionError, SwitchbotAuthenticationEr from homeassistant.components.switchbot.const import ( CONF_ENCRYPTION_KEY, CONF_KEY_ID, + CONF_LOCK_NIGHTLATCH, CONF_RETRY_COUNT, ) from homeassistant.config_entries import SOURCE_BLUETOOTH, SOURCE_USER @@ -782,3 +783,65 @@ async def test_options_flow(hass: HomeAssistant) -> None: assert len(mock_setup_entry.mock_calls) == 1 assert entry.options[CONF_RETRY_COUNT] == 6 + + +async def test_options_flow_lock_pro(hass: HomeAssistant) -> None: + """Test updating options.""" + entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_ADDRESS: "aa:bb:cc:dd:ee:ff", + CONF_NAME: "test-name", + CONF_PASSWORD: "test-password", + CONF_SENSOR_TYPE: "lock_pro", + }, + options={CONF_RETRY_COUNT: 10}, + unique_id="aabbccddeeff", + ) + entry.add_to_hass(hass) + + # Test Force night_latch should be disabled by default. + with patch_async_setup_entry() as mock_setup_entry: + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + result = await hass.config_entries.options.async_init(entry.entry_id) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + assert result["errors"] is None + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + CONF_RETRY_COUNT: 3, + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"][CONF_LOCK_NIGHTLATCH] is False + + assert len(mock_setup_entry.mock_calls) == 1 + + # Test Set force night_latch to be enabled. + + with patch_async_setup_entry() as mock_setup_entry: + result = await hass.config_entries.options.async_init(entry.entry_id) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + assert result["errors"] is None + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + CONF_LOCK_NIGHTLATCH: True, + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"][CONF_LOCK_NIGHTLATCH] is True + + assert len(mock_setup_entry.mock_calls) == 0 + + assert entry.options[CONF_LOCK_NIGHTLATCH] is True From 1bc63a61be8057850f68e0ff4e0c94563d5a41c9 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Wed, 4 Sep 2024 15:05:28 +0200 Subject: [PATCH 1424/1635] Fix enum lookup (#125220) --- homeassistant/components/google_cloud/tts.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/google_cloud/tts.py b/homeassistant/components/google_cloud/tts.py index d65a743c015..60cdfbee3ab 100644 --- a/homeassistant/components/google_cloud/tts.py +++ b/homeassistant/components/google_cloud/tts.py @@ -172,12 +172,10 @@ class BaseGoogleCloudProvider: _LOGGER.error("Error: %s when validating options: %s", err, options) return None, None - encoding: texttospeech.AudioEncoding = texttospeech.AudioEncoding[ - options[CONF_ENCODING] - ] # type: ignore[misc] - gender: texttospeech.SsmlVoiceGender | None = texttospeech.SsmlVoiceGender[ + encoding = texttospeech.AudioEncoding(options[CONF_ENCODING]) + gender: texttospeech.SsmlVoiceGender | None = texttospeech.SsmlVoiceGender( options[CONF_GENDER] - ] # type: ignore[misc] + ) voice = options[CONF_VOICE] if voice: gender = None From 4d96ed4c686d83411b622119f4f137873c151218 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Wed, 4 Sep 2024 15:05:51 +0200 Subject: [PATCH 1425/1635] Update modified_at datetime on storage collection changes (#125218) --- homeassistant/helpers/collection.py | 37 +++++++++++-- tests/helpers/test_collection.py | 81 +++++++++++++++++++++++++++++ 2 files changed, 115 insertions(+), 3 deletions(-) diff --git a/homeassistant/helpers/collection.py b/homeassistant/helpers/collection.py index 9151a9dfc6b..86d3450c3a0 100644 --- a/homeassistant/helpers/collection.py +++ b/homeassistant/helpers/collection.py @@ -7,6 +7,7 @@ import asyncio from collections.abc import Awaitable, Callable, Coroutine, Iterable from dataclasses import dataclass from functools import partial +from hashlib import md5 from itertools import groupby import logging from operator import attrgetter @@ -25,6 +26,7 @@ from homeassistant.util import slugify from . import entity_registry from .entity import Entity from .entity_component import EntityComponent +from .json import json_bytes from .storage import Store from .typing import ConfigType, VolDictType @@ -50,6 +52,7 @@ class CollectionChange: change_type: str item_id: str item: Any + item_hash: str | None = None type ChangeListener = Callable[ @@ -273,7 +276,9 @@ class StorageCollection[_ItemT, _StoreT: SerializedStorageCollection]( await self.notify_changes( [ - CollectionChange(CHANGE_ADDED, item[CONF_ID], item) + CollectionChange( + CHANGE_ADDED, item[CONF_ID], item, self._hash_item(item) + ) for item in raw_storage["items"] ] ) @@ -313,7 +318,16 @@ class StorageCollection[_ItemT, _StoreT: SerializedStorageCollection]( item = self._create_item(item_id, validated_data) self.data[item_id] = item self._async_schedule_save() - await self.notify_changes([CollectionChange(CHANGE_ADDED, item_id, item)]) + await self.notify_changes( + [ + CollectionChange( + CHANGE_ADDED, + item_id, + item, + self._hash_item(self._serialize_item(item_id, item)), + ) + ] + ) return item async def async_update_item(self, item_id: str, updates: dict) -> _ItemT: @@ -331,7 +345,16 @@ class StorageCollection[_ItemT, _StoreT: SerializedStorageCollection]( self.data[item_id] = updated self._async_schedule_save() - await self.notify_changes([CollectionChange(CHANGE_UPDATED, item_id, updated)]) + await self.notify_changes( + [ + CollectionChange( + CHANGE_UPDATED, + item_id, + updated, + self._hash_item(self._serialize_item(item_id, updated)), + ) + ] + ) return self.data[item_id] @@ -365,6 +388,10 @@ class StorageCollection[_ItemT, _StoreT: SerializedStorageCollection]( def _data_to_save(self) -> _StoreT: """Return JSON-compatible date for storing to file.""" + def _hash_item(self, item: dict) -> str: + """Return a hash of the item.""" + return md5(json_bytes(item)).hexdigest() + class DictStorageCollection(StorageCollection[dict, SerializedStorageCollection]): """A specialized StorageCollection where the items are untyped dicts.""" @@ -464,6 +491,10 @@ class _CollectionLifeCycle(Generic[_EntityT]): async def _update_entity(self, change_set: CollectionChange) -> None: if entity := self.entities.get(change_set.item_id): + if change_set.item_hash: + self.ent_reg.async_update_entity_options( + entity.entity_id, "collection", {"hash": change_set.item_hash} + ) await entity.async_update_config(change_set.item) async def _collection_changed(self, change_set: Iterable[CollectionChange]) -> None: diff --git a/tests/helpers/test_collection.py b/tests/helpers/test_collection.py index f0287218d7f..f564f85ec3b 100644 --- a/tests/helpers/test_collection.py +++ b/tests/helpers/test_collection.py @@ -2,8 +2,10 @@ from __future__ import annotations +from datetime import timedelta import logging +from freezegun.api import FrozenDateTimeFactory import pytest import voluptuous as vol @@ -15,6 +17,7 @@ from homeassistant.helpers import ( storage, ) from homeassistant.helpers.typing import ConfigType +from homeassistant.util.dt import utcnow from tests.common import flush_store from tests.typing import WebSocketGenerator @@ -254,6 +257,84 @@ async def test_storage_collection(hass: HomeAssistant) -> None: } +async def test_storage_collection_update_modifiet_at( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test that updating a storage collection will update the modified_at datetime in the entity registry.""" + + entities: dict[str, TestEntity] = {} + + class TestEntity(MockEntity): + """Entity that is config based.""" + + def __init__(self, config: ConfigType) -> None: + """Initialize entity.""" + super().__init__(config) + self._state = "initial" + + @classmethod + def from_storage(cls, config: ConfigType) -> TestEntity: + """Create instance from storage.""" + obj = super().from_storage(config) + entities[obj.unique_id] = obj + return obj + + @property + def state(self) -> str: + """Return state of entity.""" + return self._state + + def set_state(self, value: str) -> None: + """Set value.""" + self._state = value + self.async_write_ha_state() + + store = storage.Store(hass, 1, "test-data") + data = {"id": "mock-1", "name": "Mock 1", "data": 1} + await store.async_save( + { + "items": [ + data, + ] + } + ) + id_manager = collection.IDManager() + ent_comp = entity_component.EntityComponent(_LOGGER, "test", hass) + await ent_comp.async_setup({}) + coll = MockStorageCollection(store, id_manager) + collection.sync_entity_lifecycle(hass, "test", "test", ent_comp, coll, TestEntity) + changes = track_changes(coll) + + await coll.async_load() + assert id_manager.has_id("mock-1") + assert len(changes) == 1 + assert changes[0] == (collection.CHANGE_ADDED, "mock-1", data) + + modified_1 = entity_registry.async_get("test.mock_1").modified_at + assert modified_1 == utcnow() + + freezer.tick(timedelta(minutes=1)) + + updated_item = await coll.async_update_item("mock-1", {"data": 2}) + assert id_manager.has_id("mock-1") + assert updated_item == {"id": "mock-1", "name": "Mock 1", "data": 2} + assert len(changes) == 2 + assert changes[1] == (collection.CHANGE_UPDATED, "mock-1", updated_item) + + modified_2 = entity_registry.async_get("test.mock_1").modified_at + assert modified_2 > modified_1 + assert modified_2 == utcnow() + + freezer.tick(timedelta(minutes=1)) + + entities["mock-1"].set_state("second") + + modified_3 = entity_registry.async_get("test.mock_1").modified_at + assert modified_3 == modified_2 + + async def test_attach_entity_component_collection(hass: HomeAssistant) -> None: """Test attaching collection to entity component.""" ent_comp = entity_component.EntityComponent(_LOGGER, "test", hass) From da0d1b71ced0d15898131038395ec67238ee914d Mon Sep 17 00:00:00 2001 From: Denis Shulyaka Date: Wed, 4 Sep 2024 16:30:28 +0300 Subject: [PATCH 1426/1635] Update Anthropic default model to Haiku (#125225) --- homeassistant/components/anthropic/const.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/anthropic/const.py b/homeassistant/components/anthropic/const.py index 4ccf2c88faa..0dbf9c51ac1 100644 --- a/homeassistant/components/anthropic/const.py +++ b/homeassistant/components/anthropic/const.py @@ -8,7 +8,7 @@ LOGGER = logging.getLogger(__package__) CONF_RECOMMENDED = "recommended" CONF_PROMPT = "prompt" CONF_CHAT_MODEL = "chat_model" -RECOMMENDED_CHAT_MODEL = "claude-3-5-sonnet-20240620" +RECOMMENDED_CHAT_MODEL = "claude-3-haiku-20240307" CONF_MAX_TOKENS = "max_tokens" RECOMMENDED_MAX_TOKENS = 1024 CONF_TEMPERATURE = "temperature" From b557e9e8265cbe5c8220cdd4302ef13385be5525 Mon Sep 17 00:00:00 2001 From: Iskra kranj <162285659+iskrakranj@users.noreply.github.com> Date: Wed, 4 Sep 2024 15:33:23 +0200 Subject: [PATCH 1427/1635] Add Iskra integration (#121488) * Add iskra integration * iskra non resettable counters naming fix * added iskra config_flow test * fixed iskra integration according to code review * changed iskra config flow test * iskra integration, fixed codeowners * Removed counters code & minor fixes * added comment * Update homeassistant/components/iskra/__init__.py Co-authored-by: Joost Lekkerkerker * Updated Iskra integration according to review * Update homeassistant/components/iskra/strings.json Co-authored-by: Joost Lekkerkerker * Updated iskra integration according to review * minor iskra integration change * iskra integration changes according to review * iskra integration changes according to review * Changed iskra integration according to review * added iskra config_flow range validation * Fixed tests for iskra integration * Update homeassistant/components/iskra/coordinator.py * Update homeassistant/components/iskra/config_flow.py Co-authored-by: Joost Lekkerkerker * Fixed iskra integration according to review * Changed voluptuous schema for iskra integration and added data_descriptions * Iskra integration tests lint error fix --------- Co-authored-by: Joost Lekkerkerker --- CODEOWNERS | 2 + homeassistant/components/iskra/__init__.py | 100 ++++++ homeassistant/components/iskra/config_flow.py | 253 +++++++++++++++ homeassistant/components/iskra/const.py | 25 ++ homeassistant/components/iskra/coordinator.py | 57 ++++ homeassistant/components/iskra/entity.py | 38 +++ homeassistant/components/iskra/manifest.json | 11 + homeassistant/components/iskra/sensor.py | 229 +++++++++++++ homeassistant/components/iskra/strings.json | 92 ++++++ homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 6 + requirements_all.txt | 3 + requirements_test_all.txt | 3 + tests/components/iskra/__init__.py | 1 + tests/components/iskra/conftest.py | 46 +++ tests/components/iskra/const.py | 10 + tests/components/iskra/test_config_flow.py | 300 ++++++++++++++++++ 17 files changed, 1177 insertions(+) create mode 100644 homeassistant/components/iskra/__init__.py create mode 100644 homeassistant/components/iskra/config_flow.py create mode 100644 homeassistant/components/iskra/const.py create mode 100644 homeassistant/components/iskra/coordinator.py create mode 100644 homeassistant/components/iskra/entity.py create mode 100644 homeassistant/components/iskra/manifest.json create mode 100644 homeassistant/components/iskra/sensor.py create mode 100644 homeassistant/components/iskra/strings.json create mode 100644 tests/components/iskra/__init__.py create mode 100644 tests/components/iskra/conftest.py create mode 100644 tests/components/iskra/const.py create mode 100644 tests/components/iskra/test_config_flow.py diff --git a/CODEOWNERS b/CODEOWNERS index 596795d4221..42d96ceb941 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -728,6 +728,8 @@ build.json @home-assistant/supervisor /tests/components/iron_os/ @tr4nt0r /homeassistant/components/isal/ @bdraco /tests/components/isal/ @bdraco +/homeassistant/components/iskra/ @iskramis +/tests/components/iskra/ @iskramis /homeassistant/components/islamic_prayer_times/ @engrbm87 @cpfair /tests/components/islamic_prayer_times/ @engrbm87 @cpfair /homeassistant/components/israel_rail/ @shaiu diff --git a/homeassistant/components/iskra/__init__.py b/homeassistant/components/iskra/__init__.py new file mode 100644 index 00000000000..b841da9df26 --- /dev/null +++ b/homeassistant/components/iskra/__init__.py @@ -0,0 +1,100 @@ +"""The iskra integration.""" + +from __future__ import annotations + +from pyiskra.adapters import Modbus, RestAPI +from pyiskra.devices import Device +from pyiskra.exceptions import DeviceConnectionError, DeviceNotSupported, NotAuthorised + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ( + CONF_ADDRESS, + CONF_HOST, + CONF_PASSWORD, + CONF_PORT, + CONF_PROTOCOL, + CONF_USERNAME, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import device_registry as dr + +from .const import DOMAIN, MANUFACTURER +from .coordinator import IskraDataUpdateCoordinator + +PLATFORMS: list[Platform] = [Platform.SENSOR] + + +type IskraConfigEntry = ConfigEntry[list[IskraDataUpdateCoordinator]] + + +async def async_setup_entry(hass: HomeAssistant, entry: IskraConfigEntry) -> bool: + """Set up iskra device from a config entry.""" + conf = entry.data + adapter = None + + if conf[CONF_PROTOCOL] == "modbus_tcp": + adapter = Modbus( + ip_address=conf[CONF_HOST], + protocol="tcp", + port=conf[CONF_PORT], + modbus_address=conf[CONF_ADDRESS], + ) + elif conf[CONF_PROTOCOL] == "rest_api": + authentication = None + if (username := conf.get(CONF_USERNAME)) is not None and ( + password := conf.get(CONF_PASSWORD) + ) is not None: + authentication = { + "username": username, + "password": password, + } + adapter = RestAPI(ip_address=conf[CONF_HOST], authentication=authentication) + + # Try connecting to the device and create pyiskra device object + try: + base_device = await Device.create_device(adapter) + except DeviceConnectionError as e: + raise ConfigEntryNotReady("Cannot connect to the device") from e + except NotAuthorised as e: + raise ConfigEntryNotReady("Not authorised to connect to the device") from e + except DeviceNotSupported as e: + raise ConfigEntryNotReady("Device not supported") from e + + # Initialize the device + await base_device.init() + + # if the device is a gateway, add all child devices, otherwise add the device itself. + if base_device.is_gateway: + # Add the gateway device to the device registry + device_registry = dr.async_get(hass) + device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + identifiers={(DOMAIN, base_device.serial)}, + manufacturer=MANUFACTURER, + name=base_device.model, + model=base_device.model, + sw_version=base_device.fw_version, + ) + + coordinators = [ + IskraDataUpdateCoordinator(hass, child_device) + for child_device in base_device.get_child_devices() + ] + else: + coordinators = [IskraDataUpdateCoordinator(hass, base_device)] + + for coordinator in coordinators: + await coordinator.async_config_entry_first_refresh() + + entry.runtime_data = coordinators + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: IskraConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/iskra/config_flow.py b/homeassistant/components/iskra/config_flow.py new file mode 100644 index 00000000000..b67b9ba3839 --- /dev/null +++ b/homeassistant/components/iskra/config_flow.py @@ -0,0 +1,253 @@ +"""Config flow for iskra integration.""" + +from __future__ import annotations + +import logging +from typing import Any + +from pyiskra.adapters import Modbus, RestAPI +from pyiskra.exceptions import ( + DeviceConnectionError, + DeviceTimeoutError, + InvalidResponseCode, + NotAuthorised, +) +from pyiskra.helper import BasicInfo +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import ( + CONF_ADDRESS, + CONF_HOST, + CONF_PASSWORD, + CONF_PORT, + CONF_PROTOCOL, + CONF_USERNAME, +) +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.selector import ( + NumberSelector, + NumberSelectorConfig, + NumberSelectorMode, + SelectSelector, + SelectSelectorConfig, + SelectSelectorMode, +) + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_HOST): str, + vol.Required(CONF_PROTOCOL, default="rest_api"): SelectSelector( + SelectSelectorConfig( + options=["rest_api", "modbus_tcp"], + mode=SelectSelectorMode.LIST, + translation_key="protocol", + ), + ), + } +) + +STEP_AUTHENTICATION_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_USERNAME): str, + vol.Required(CONF_PASSWORD): str, + } +) + +# CONF_ADDRESS validation is done later in code, as if ranges are set in voluptuous it turns into a slider +STEP_MODBUS_TCP_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_PORT, default=10001): vol.All( + vol.Coerce(int), vol.Range(min=0, max=65535) + ), + vol.Required(CONF_ADDRESS, default=33): NumberSelector( + NumberSelectorConfig(min=1, max=255, mode=NumberSelectorMode.BOX) + ), + } +) + + +async def test_rest_api_connection(host: str, user_input: dict[str, Any]) -> BasicInfo: + """Check if the RestAPI requires authentication.""" + + rest_api = RestAPI(ip_address=host, authentication=user_input) + try: + basic_info = await rest_api.get_basic_info() + except NotAuthorised as e: + raise NotAuthorised from e + except (DeviceConnectionError, DeviceTimeoutError, InvalidResponseCode) as e: + raise CannotConnect from e + except Exception as e: + _LOGGER.error("Unexpected exception: %s", e) + raise UnknownException from e + + return basic_info + + +async def test_modbus_connection(host: str, user_input: dict[str, Any]) -> BasicInfo: + """Test the Modbus connection.""" + modbus_api = Modbus( + ip_address=host, + protocol="tcp", + port=user_input[CONF_PORT], + modbus_address=user_input[CONF_ADDRESS], + ) + + try: + basic_info = await modbus_api.get_basic_info() + except NotAuthorised as e: + raise NotAuthorised from e + except (DeviceConnectionError, DeviceTimeoutError, InvalidResponseCode) as e: + raise CannotConnect from e + except Exception as e: + _LOGGER.error("Unexpected exception: %s", e) + raise UnknownException from e + + return basic_info + + +class IskraConfigFlowFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for iskra.""" + + VERSION = 1 + host: str + protocol: str + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a flow initiated by the user.""" + errors: dict[str, str] = {} + if user_input is not None: + self.host = user_input[CONF_HOST] + self.protocol = user_input[CONF_PROTOCOL] + if self.protocol == "rest_api": + # Check if authentication is required. + try: + device_info = await test_rest_api_connection(self.host, user_input) + except CannotConnect: + errors["base"] = "cannot_connect" + except NotAuthorised: + # Proceed to authentication step. + return await self.async_step_authentication() + except UnknownException: + errors["base"] = "unknown" + # If the connection was not successful, show an error. + + # If the connection was successful, create the device. + if not errors: + return await self._create_entry( + host=self.host, + protocol=self.protocol, + device_info=device_info, + user_input=user_input, + ) + + if self.protocol == "modbus_tcp": + # Proceed to modbus step. + return await self.async_step_modbus_tcp() + + return self.async_show_form( + step_id="user", + data_schema=STEP_USER_DATA_SCHEMA, + errors=errors, + ) + + async def async_step_authentication( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the authentication step.""" + errors: dict[str, str] = {} + if user_input is not None: + try: + device_info = await test_rest_api_connection(self.host, user_input) + # If the connection failed, abort. + except CannotConnect: + errors["base"] = "cannot_connect" + # If the authentication failed, show an error and authentication form again. + except NotAuthorised: + errors["base"] = "invalid_auth" + except UnknownException: + errors["base"] = "unknown" + + # if the connection was successful, create the device. + if not errors: + return await self._create_entry( + self.host, + self.protocol, + device_info=device_info, + user_input=user_input, + ) + + # If there's no user_input or there was an error, show the authentication form again. + return self.async_show_form( + step_id="authentication", + data_schema=STEP_AUTHENTICATION_DATA_SCHEMA, + errors=errors, + ) + + async def async_step_modbus_tcp( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the Modbus TCP step.""" + errors: dict[str, str] = {} + + # If there's user_input, check the connection. + if user_input is not None: + # convert to integer + user_input[CONF_ADDRESS] = int(user_input[CONF_ADDRESS]) + + try: + device_info = await test_modbus_connection(self.host, user_input) + + # If the connection failed, show an error. + except CannotConnect: + errors["base"] = "cannot_connect" + except UnknownException: + errors["base"] = "unknown" + + # If the connection was successful, create the device. + if not errors: + return await self._create_entry( + host=self.host, + protocol=self.protocol, + device_info=device_info, + user_input=user_input, + ) + + # If there's no user_input or there was an error, show the modbus form again. + return self.async_show_form( + step_id="modbus_tcp", + data_schema=STEP_MODBUS_TCP_DATA_SCHEMA, + errors=errors, + ) + + async def _create_entry( + self, + host: str, + protocol: str, + device_info: BasicInfo, + user_input: dict[str, Any], + ) -> ConfigFlowResult: + """Create the config entry.""" + + await self.async_set_unique_id(device_info.serial) + self._abort_if_unique_id_configured() + + return self.async_create_entry( + title=device_info.model, + data={CONF_HOST: host, CONF_PROTOCOL: protocol, **user_input}, + ) + + +class CannotConnect(HomeAssistantError): + """Error to indicate we cannot connect.""" + + +class UnknownException(HomeAssistantError): + """Error to indicate an unknown exception occurred.""" diff --git a/homeassistant/components/iskra/const.py b/homeassistant/components/iskra/const.py new file mode 100644 index 00000000000..5fc3b501962 --- /dev/null +++ b/homeassistant/components/iskra/const.py @@ -0,0 +1,25 @@ +"""Constants for the iskra integration.""" + +DOMAIN = "iskra" +MANUFACTURER = "Iskra d.o.o" + +# POWER +ATTR_TOTAL_APPARENT_POWER = "total_apparent_power" +ATTR_TOTAL_REACTIVE_POWER = "total_reactive_power" +ATTR_TOTAL_ACTIVE_POWER = "total_active_power" +ATTR_PHASE1_POWER = "phase1_power" +ATTR_PHASE2_POWER = "phase2_power" +ATTR_PHASE3_POWER = "phase3_power" + +# Voltage +ATTR_PHASE1_VOLTAGE = "phase1_voltage" +ATTR_PHASE2_VOLTAGE = "phase2_voltage" +ATTR_PHASE3_VOLTAGE = "phase3_voltage" + +# Current +ATTR_PHASE1_CURRENT = "phase1_current" +ATTR_PHASE2_CURRENT = "phase2_current" +ATTR_PHASE3_CURRENT = "phase3_current" + +# Frequency +ATTR_FREQUENCY = "frequency" diff --git a/homeassistant/components/iskra/coordinator.py b/homeassistant/components/iskra/coordinator.py new file mode 100644 index 00000000000..175d8ed4c86 --- /dev/null +++ b/homeassistant/components/iskra/coordinator.py @@ -0,0 +1,57 @@ +"""Coordinator for Iskra integration.""" + +from datetime import timedelta +import logging + +from pyiskra.devices import Device +from pyiskra.exceptions import ( + DeviceConnectionError, + DeviceTimeoutError, + InvalidResponseCode, + NotAuthorised, +) + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +class IskraDataUpdateCoordinator(DataUpdateCoordinator[None]): + """Class to manage fetching Iskra data.""" + + def __init__(self, hass: HomeAssistant, device: Device) -> None: + """Initialize.""" + self.device = device + + update_interval = timedelta(seconds=60) + + super().__init__( + hass, + _LOGGER, + name=DOMAIN, + update_interval=update_interval, + ) + + async def _async_update_data(self) -> None: + """Fetch data from Iskra device.""" + try: + await self.device.update_status() + except DeviceTimeoutError as e: + raise UpdateFailed( + f"Timeout error occurred while updating data for device {self.device.serial}" + ) from e + except DeviceConnectionError as e: + raise UpdateFailed( + f"Connection error occurred while updating data for device {self.device.serial}" + ) from e + except NotAuthorised as e: + raise UpdateFailed( + f"Not authorised to fetch data from device {self.device.serial}" + ) from e + except InvalidResponseCode as e: + raise UpdateFailed( + f"Invalid response code from device {self.device.serial}" + ) from e diff --git a/homeassistant/components/iskra/entity.py b/homeassistant/components/iskra/entity.py new file mode 100644 index 00000000000..f1c01d3eaa4 --- /dev/null +++ b/homeassistant/components/iskra/entity.py @@ -0,0 +1,38 @@ +"""Base entity for Iskra devices.""" + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN, MANUFACTURER +from .coordinator import IskraDataUpdateCoordinator + + +class IskraEntity(CoordinatorEntity[IskraDataUpdateCoordinator]): + """Representation a base Iskra device.""" + + _attr_has_entity_name = True + + def __init__(self, coordinator: IskraDataUpdateCoordinator) -> None: + """Initialize the Iskra device.""" + super().__init__(coordinator) + self.device = coordinator.device + gateway = self.device.parent_device + + if gateway is not None: + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, self.device.serial)}, + manufacturer=MANUFACTURER, + model=self.device.model, + name=self.device.model, + sw_version=self.device.fw_version, + serial_number=self.device.serial, + via_device=(DOMAIN, gateway.serial), + ) + else: + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, self.device.serial)}, + manufacturer=MANUFACTURER, + model=self.device.model, + sw_version=self.device.fw_version, + serial_number=self.device.serial, + ) diff --git a/homeassistant/components/iskra/manifest.json b/homeassistant/components/iskra/manifest.json new file mode 100644 index 00000000000..7bda12ab615 --- /dev/null +++ b/homeassistant/components/iskra/manifest.json @@ -0,0 +1,11 @@ +{ + "domain": "iskra", + "name": "iskra", + "codeowners": ["@iskramis"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/iskra", + "integration_type": "hub", + "iot_class": "local_polling", + "loggers": ["pyiskra"], + "requirements": ["pyiskra==0.1.8"] +} diff --git a/homeassistant/components/iskra/sensor.py b/homeassistant/components/iskra/sensor.py new file mode 100644 index 00000000000..9e9976749a1 --- /dev/null +++ b/homeassistant/components/iskra/sensor.py @@ -0,0 +1,229 @@ +"""Support for Iskra.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass + +from pyiskra.devices import Device + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import ( + UnitOfApparentPower, + UnitOfElectricCurrent, + UnitOfElectricPotential, + UnitOfFrequency, + UnitOfPower, + UnitOfReactivePower, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import IskraConfigEntry +from .const import ( + ATTR_FREQUENCY, + ATTR_PHASE1_CURRENT, + ATTR_PHASE1_POWER, + ATTR_PHASE1_VOLTAGE, + ATTR_PHASE2_CURRENT, + ATTR_PHASE2_POWER, + ATTR_PHASE2_VOLTAGE, + ATTR_PHASE3_CURRENT, + ATTR_PHASE3_POWER, + ATTR_PHASE3_VOLTAGE, + ATTR_TOTAL_ACTIVE_POWER, + ATTR_TOTAL_APPARENT_POWER, + ATTR_TOTAL_REACTIVE_POWER, +) +from .coordinator import IskraDataUpdateCoordinator +from .entity import IskraEntity + + +@dataclass(frozen=True, kw_only=True) +class IskraSensorEntityDescription(SensorEntityDescription): + """Describes Iskra sensor entity.""" + + value_func: Callable[[Device], float | None] + + +SENSOR_TYPES: tuple[IskraSensorEntityDescription, ...] = ( + # Power + IskraSensorEntityDescription( + key=ATTR_TOTAL_ACTIVE_POWER, + translation_key="total_active_power", + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPower.WATT, + value_func=lambda device: device.measurements.total.active_power.value, + ), + IskraSensorEntityDescription( + key=ATTR_TOTAL_REACTIVE_POWER, + translation_key="total_reactive_power", + device_class=SensorDeviceClass.REACTIVE_POWER, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, + value_func=lambda device: device.measurements.total.reactive_power.value, + ), + IskraSensorEntityDescription( + key=ATTR_TOTAL_APPARENT_POWER, + translation_key="total_apparent_power", + device_class=SensorDeviceClass.APPARENT_POWER, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfApparentPower.VOLT_AMPERE, + value_func=lambda device: device.measurements.total.apparent_power.value, + ), + IskraSensorEntityDescription( + key=ATTR_PHASE1_POWER, + translation_key="phase1_power", + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPower.WATT, + value_func=lambda device: device.measurements.phases[0].active_power.value, + ), + IskraSensorEntityDescription( + key=ATTR_PHASE2_POWER, + translation_key="phase2_power", + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPower.WATT, + value_func=lambda device: device.measurements.phases[1].active_power.value, + ), + IskraSensorEntityDescription( + key=ATTR_PHASE3_POWER, + translation_key="phase3_power", + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPower.WATT, + value_func=lambda device: device.measurements.phases[2].active_power.value, + ), + # Voltage + IskraSensorEntityDescription( + key=ATTR_PHASE1_VOLTAGE, + translation_key="phase1_voltage", + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + value_func=lambda device: device.measurements.phases[0].voltage.value, + ), + IskraSensorEntityDescription( + key=ATTR_PHASE2_VOLTAGE, + translation_key="phase2_voltage", + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + value_func=lambda device: device.measurements.phases[1].voltage.value, + ), + IskraSensorEntityDescription( + key=ATTR_PHASE3_VOLTAGE, + translation_key="phase3_voltage", + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + value_func=lambda device: device.measurements.phases[2].voltage.value, + ), + # Current + IskraSensorEntityDescription( + key=ATTR_PHASE1_CURRENT, + translation_key="phase1_current", + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_func=lambda device: device.measurements.phases[0].current.value, + ), + IskraSensorEntityDescription( + key=ATTR_PHASE2_CURRENT, + translation_key="phase2_current", + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_func=lambda device: device.measurements.phases[1].current.value, + ), + IskraSensorEntityDescription( + key=ATTR_PHASE3_CURRENT, + translation_key="phase3_current", + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value_func=lambda device: device.measurements.phases[2].current.value, + ), + # Frequency + IskraSensorEntityDescription( + key=ATTR_FREQUENCY, + translation_key="frequency", + device_class=SensorDeviceClass.FREQUENCY, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfFrequency.HERTZ, + value_func=lambda device: device.measurements.frequency.value, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: IskraConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Iskra sensors based on config_entry.""" + + # Device that uses the config entry. + coordinators = entry.runtime_data + + entities: list[IskraSensor] = [] + + # Add sensors for each device. + for coordinator in coordinators: + device = coordinator.device + sensors = [] + + # Add measurement sensors. + if device.supports_measurements: + sensors.append(ATTR_FREQUENCY) + sensors.append(ATTR_TOTAL_APPARENT_POWER) + sensors.append(ATTR_TOTAL_ACTIVE_POWER) + sensors.append(ATTR_TOTAL_REACTIVE_POWER) + if device.phases >= 1: + sensors.append(ATTR_PHASE1_VOLTAGE) + sensors.append(ATTR_PHASE1_POWER) + sensors.append(ATTR_PHASE1_CURRENT) + if device.phases >= 2: + sensors.append(ATTR_PHASE2_VOLTAGE) + sensors.append(ATTR_PHASE2_POWER) + sensors.append(ATTR_PHASE2_CURRENT) + if device.phases >= 3: + sensors.append(ATTR_PHASE3_VOLTAGE) + sensors.append(ATTR_PHASE3_POWER) + sensors.append(ATTR_PHASE3_CURRENT) + + entities.extend( + IskraSensor(coordinator, description) + for description in SENSOR_TYPES + if description.key in sensors + ) + + async_add_entities(entities) + + +class IskraSensor(IskraEntity, SensorEntity): + """Representation of a Sensor.""" + + entity_description: IskraSensorEntityDescription + + def __init__( + self, + coordinator: IskraDataUpdateCoordinator, + description: IskraSensorEntityDescription, + ) -> None: + """Initialize the sensor.""" + super().__init__(coordinator) + self.entity_description = description + self._attr_unique_id = f"{coordinator.device.serial}_{description.key}" + + @property + def native_value(self) -> float | None: + """Return the state of the sensor.""" + return self.entity_description.value_func(self.device) diff --git a/homeassistant/components/iskra/strings.json b/homeassistant/components/iskra/strings.json new file mode 100644 index 00000000000..bd70336f637 --- /dev/null +++ b/homeassistant/components/iskra/strings.json @@ -0,0 +1,92 @@ +{ + "config": { + "step": { + "user": { + "title": "Configure Iskra Device", + "description": "Enter the IP address of your Iskra Device and select protocol.", + "data": { + "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "Hostname or IP address of your Iskra device." + } + }, + "authentication": { + "title": "Configure Rest API Credentials", + "description": "Enter username and password", + "data": { + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" + } + }, + "modbus_tcp": { + "title": "Configure Modbus TCP", + "description": "Enter Modbus TCP port and device's Modbus address.", + "data": { + "port": "[%key:common::config_flow::data::port%]", + "address": "Modbus address" + }, + "data_description": { + "port": "Port number can be found in the device's settings menu.", + "address": "Modbus address can be found in the device's settings menu." + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + }, + "selector": { + "protocol": { + "options": { + "rest_api": "Rest API", + "modbus_tcp": "Modbus TCP" + } + } + }, + "entity": { + "sensor": { + "total_active_power": { + "name": "Total active power" + }, + "total_apparent_power": { + "name": "Total apparent power" + }, + "total_reactive_power": { + "name": "Total reactive power" + }, + "phase1_power": { + "name": "Phase 1 power" + }, + "phase2_power": { + "name": "Phase 2 power" + }, + "phase3_power": { + "name": "Phase 3 power" + }, + "phase1_voltage": { + "name": "Phase 1 voltage" + }, + "phase2_voltage": { + "name": "Phase 2 voltage" + }, + "phase3_voltage": { + "name": "Phase 3 voltage" + }, + "phase1_current": { + "name": "Phase 1 current" + }, + "phase2_current": { + "name": "Phase 2 current" + }, + "phase3_current": { + "name": "Phase 3 current" + } + } + } +} diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index e78df5ab045..c7c8cd0f9f1 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -285,6 +285,7 @@ FLOWS = { "ipp", "iqvia", "iron_os", + "iskra", "islamic_prayer_times", "israel_rail", "iss", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 879012ae54b..f6854aeb58d 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -2908,6 +2908,12 @@ "config_flow": true, "iot_class": "local_polling" }, + "iskra": { + "name": "iskra", + "integration_type": "hub", + "config_flow": true, + "iot_class": "local_polling" + }, "islamic_prayer_times": { "integration_type": "hub", "config_flow": true, diff --git a/requirements_all.txt b/requirements_all.txt index 35dd9071f13..b603ce3a3ae 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1956,6 +1956,9 @@ pyiqvia==2022.04.0 # homeassistant.components.irish_rail_transport pyirishrail==0.0.2 +# homeassistant.components.iskra +pyiskra==0.1.8 + # homeassistant.components.iss pyiss==1.0.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 8940fd0649c..ba0fff1ac4b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1567,6 +1567,9 @@ pyipp==0.16.0 # homeassistant.components.iqvia pyiqvia==2022.04.0 +# homeassistant.components.iskra +pyiskra==0.1.8 + # homeassistant.components.iss pyiss==1.0.1 diff --git a/tests/components/iskra/__init__.py b/tests/components/iskra/__init__.py new file mode 100644 index 00000000000..ca93572a9e4 --- /dev/null +++ b/tests/components/iskra/__init__.py @@ -0,0 +1 @@ +"""Tests for the Iskra component.""" diff --git a/tests/components/iskra/conftest.py b/tests/components/iskra/conftest.py new file mode 100644 index 00000000000..d9cc6808aaa --- /dev/null +++ b/tests/components/iskra/conftest.py @@ -0,0 +1,46 @@ +"""Fixtures for mocking pyiskra's different protocols. + +Fixtures: +- `mock_pyiskra_rest`: Mock pyiskra Rest API protocol. +- `mock_pyiskra_modbus`: Mock pyiskra Modbus protocol. +""" + +from unittest.mock import patch + +import pytest + +from .const import PQ_MODEL, SERIAL, SG_MODEL + + +class MockBasicInfo: + """Mock BasicInfo class.""" + + def __init__(self, model) -> None: + """Initialize the mock class.""" + self.serial = SERIAL + self.model = model + self.description = "Iskra mock device" + self.location = "imagination" + self.sw_ver = "1.0.0" + + +@pytest.fixture +def mock_pyiskra_rest(): + """Mock Iskra API authenticate with Rest API protocol.""" + + with patch( + "pyiskra.adapters.RestAPI.RestAPI.get_basic_info", + return_value=MockBasicInfo(model=SG_MODEL), + ) as basic_info_mock: + yield basic_info_mock + + +@pytest.fixture +def mock_pyiskra_modbus(): + """Mock Iskra API authenticate with Rest API protocol.""" + + with patch( + "pyiskra.adapters.Modbus.Modbus.get_basic_info", + return_value=MockBasicInfo(model=PQ_MODEL), + ) as basic_info_mock: + yield basic_info_mock diff --git a/tests/components/iskra/const.py b/tests/components/iskra/const.py new file mode 100644 index 00000000000..bf38c9a4a79 --- /dev/null +++ b/tests/components/iskra/const.py @@ -0,0 +1,10 @@ +"""Constants used in the Iskra component tests.""" + +SG_MODEL = "SG-W1" +PQ_MODEL = "MC784" +SERIAL = "XXXXXXX" +HOST = "192.1.0.1" +MODBUS_PORT = 10001 +MODBUS_ADDRESS = 33 +USERNAME = "test_username" +PASSWORD = "test_password" diff --git a/tests/components/iskra/test_config_flow.py b/tests/components/iskra/test_config_flow.py new file mode 100644 index 00000000000..0c128be9850 --- /dev/null +++ b/tests/components/iskra/test_config_flow.py @@ -0,0 +1,300 @@ +"""Tests for the Iskra config flow.""" + +from pyiskra.exceptions import ( + DeviceConnectionError, + DeviceTimeoutError, + InvalidResponseCode, + NotAuthorised, +) +import pytest + +from homeassistant.components.iskra import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import ( + CONF_ADDRESS, + CONF_HOST, + CONF_PASSWORD, + CONF_PORT, + CONF_PROTOCOL, + CONF_USERNAME, +) +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .const import ( + HOST, + MODBUS_ADDRESS, + MODBUS_PORT, + PASSWORD, + PQ_MODEL, + SERIAL, + SG_MODEL, + USERNAME, +) + +from tests.common import MockConfigEntry + + +# Test step_user with Rest API protocol +async def test_user_rest_no_auth(hass: HomeAssistant, mock_pyiskra_rest) -> None: + """Test the user flow with Rest API protocol.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + # Test if user form is provided + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + # Test no authentication required + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: HOST, CONF_PROTOCOL: "rest_api"}, + ) + + # Test successful Rest API configuration + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].unique_id == SERIAL + assert result["title"] == SG_MODEL + assert result["data"] == {CONF_HOST: HOST, CONF_PROTOCOL: "rest_api"} + + +async def test_user_rest_auth(hass: HomeAssistant, mock_pyiskra_rest) -> None: + """Test the user flow with Rest API protocol and authentication required.""" + mock_pyiskra_rest.side_effect = NotAuthorised + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + # Test if user form is provided + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + # Test if prompted to enter username and password if not authorised + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: HOST, CONF_PROTOCOL: "rest_api"}, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "authentication" + + # Test failed authentication + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD}, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "invalid_auth"} + assert result["step_id"] == "authentication" + + # Test successful authentication + mock_pyiskra_rest.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD}, + ) + + # Test successful Rest API configuration + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].unique_id == SERIAL + assert result["title"] == SG_MODEL + assert result["data"] == { + CONF_HOST: HOST, + CONF_PROTOCOL: "rest_api", + CONF_USERNAME: USERNAME, + CONF_PASSWORD: PASSWORD, + } + + +async def test_user_modbus(hass: HomeAssistant, mock_pyiskra_modbus) -> None: + """Test the user flow with Modbus TCP protocol.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + ) + + # Test if user form is provided + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: HOST, CONF_PROTOCOL: "modbus_tcp"}, + ) + + # Test if propmpted to enter port and address + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "modbus_tcp" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_PORT: MODBUS_PORT, + CONF_ADDRESS: MODBUS_ADDRESS, + }, + ) + + # Test successful Modbus TCP configuration + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].unique_id == SERIAL + assert result["title"] == PQ_MODEL + assert result["data"] == { + CONF_HOST: HOST, + CONF_PROTOCOL: "modbus_tcp", + CONF_PORT: MODBUS_PORT, + CONF_ADDRESS: MODBUS_ADDRESS, + } + + +async def test_modbus_abort_if_already_setup( + hass: HomeAssistant, mock_pyiskra_modbus +) -> None: + """Test we abort if Iskra is already setup.""" + + MockConfigEntry(domain=DOMAIN, unique_id=SERIAL).add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={CONF_HOST: HOST, CONF_PROTOCOL: "modbus_tcp"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "modbus_tcp" + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_PORT: MODBUS_PORT, + CONF_ADDRESS: MODBUS_ADDRESS, + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_rest_api_abort_if_already_setup( + hass: HomeAssistant, mock_pyiskra_rest +) -> None: + """Test we abort if Iskra is already setup.""" + + MockConfigEntry(domain=DOMAIN, unique_id=SERIAL).add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={CONF_HOST: HOST, CONF_PROTOCOL: "rest_api"}, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +@pytest.mark.parametrize( + ("s_effect", "reason"), + [ + (DeviceConnectionError, "cannot_connect"), + (DeviceTimeoutError, "cannot_connect"), + (InvalidResponseCode, "cannot_connect"), + (Exception, "unknown"), + ], +) +async def test_modbus_device_error( + hass: HomeAssistant, + mock_pyiskra_modbus, + s_effect, + reason, +) -> None: + """Test device error with Modbus TCP protocol.""" + mock_pyiskra_modbus.side_effect = s_effect + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={CONF_HOST: HOST, CONF_PROTOCOL: "modbus_tcp"}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "modbus_tcp" + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_PORT: MODBUS_PORT, + CONF_ADDRESS: MODBUS_ADDRESS, + }, + ) + + # Test if error returned + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "modbus_tcp" + assert result["errors"] == {"base": reason} + + # Remove side effect + mock_pyiskra_modbus.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_PORT: MODBUS_PORT, + CONF_ADDRESS: MODBUS_ADDRESS, + }, + ) + + # Test successful Modbus TCP configuration + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].unique_id == SERIAL + assert result["title"] == PQ_MODEL + assert result["data"] == { + CONF_HOST: HOST, + CONF_PROTOCOL: "modbus_tcp", + CONF_PORT: MODBUS_PORT, + CONF_ADDRESS: MODBUS_ADDRESS, + } + + +@pytest.mark.parametrize( + ("s_effect", "reason"), + [ + (DeviceConnectionError, "cannot_connect"), + (DeviceTimeoutError, "cannot_connect"), + (InvalidResponseCode, "cannot_connect"), + (Exception, "unknown"), + ], +) +async def test_rest_device_error( + hass: HomeAssistant, + mock_pyiskra_rest, + s_effect, + reason, +) -> None: + """Test device error with Modbus TCP protocol.""" + mock_pyiskra_rest.side_effect = s_effect + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={CONF_HOST: HOST, CONF_PROTOCOL: "rest_api"}, + ) + + # Test if error returned + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {"base": reason} + + # Remove side effect + mock_pyiskra_rest.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={CONF_HOST: HOST, CONF_PROTOCOL: "rest_api"}, + ) + + # Test successful Rest API configuration + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].unique_id == SERIAL + assert result["title"] == SG_MODEL + assert result["data"] == {CONF_HOST: HOST, CONF_PROTOCOL: "rest_api"} From 1e1c3506febee4b839ff00c37aa95bdab753aac7 Mon Sep 17 00:00:00 2001 From: LG-ThinQ-Integration Date: Wed, 4 Sep 2024 22:52:41 +0900 Subject: [PATCH 1428/1635] Bump thinqconnect to 0.9.6 (#125155) * Refactor LG ThinQ integration * Rename ha_bridge_list to bridge_list * Update for reviews * Correct spells Do not use mqtt related api * Guarantee update status * Update for reviews * Update reviews --------- Co-authored-by: jangwon.lee --- homeassistant/components/lg_thinq/__init__.py | 22 ++-- .../components/lg_thinq/binary_sensor.py | 29 ++--- homeassistant/components/lg_thinq/const.py | 74 +---------- .../components/lg_thinq/coordinator.py | 123 ++++-------------- homeassistant/components/lg_thinq/entity.py | 69 ++++++---- homeassistant/components/lg_thinq/icons.json | 3 + .../components/lg_thinq/manifest.json | 2 +- .../components/lg_thinq/strings.json | 3 + homeassistant/components/lg_thinq/switch.py | 80 +++++------- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 11 files changed, 134 insertions(+), 275 deletions(-) diff --git a/homeassistant/components/lg_thinq/__init__.py b/homeassistant/components/lg_thinq/__init__.py index a86afc68171..625938564a8 100644 --- a/homeassistant/components/lg_thinq/__init__.py +++ b/homeassistant/components/lg_thinq/__init__.py @@ -6,6 +6,7 @@ import asyncio import logging from thinqconnect import ThinQApi, ThinQAPIException +from thinqconnect.integration import async_get_ha_bridge_list from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ACCESS_TOKEN, CONF_COUNTRY, Platform @@ -26,6 +27,8 @@ _LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass: HomeAssistant, entry: ThinqConfigEntry) -> bool: """Set up an entry.""" + entry.runtime_data = {} + access_token = entry.data[CONF_ACCESS_TOKEN] client_id = entry.data[CONF_CONNECT_CLIENT_ID] country_code = entry.data[CONF_COUNTRY] @@ -55,29 +58,22 @@ async def async_setup_coordinators( thinq_api: ThinQApi, ) -> None: """Set up coordinators and register devices.""" - entry.runtime_data = {} - - # Get a device list from the server. + # Get a list of ha bridge. try: - device_list = await thinq_api.async_get_device_list() + bridge_list = await async_get_ha_bridge_list(thinq_api) except ThinQAPIException as exc: raise ConfigEntryNotReady(exc.message) from exc - if not device_list: + if not bridge_list: return # Setup coordinator per device. - coordinator_list: list[DeviceDataUpdateCoordinator] = [] task_list = [ - hass.async_create_task(async_setup_device_coordinator(hass, thinq_api, device)) - for device in device_list + hass.async_create_task(async_setup_device_coordinator(hass, bridge)) + for bridge in bridge_list ] task_result = await asyncio.gather(*task_list) - for coordinators in task_result: - if coordinators: - coordinator_list += coordinators - - for coordinator in coordinator_list: + for coordinator in task_result: entry.runtime_data[coordinator.unique_id] = coordinator diff --git a/homeassistant/components/lg_thinq/binary_sensor.py b/homeassistant/components/lg_thinq/binary_sensor.py index fc6564c7652..6f856c3055f 100644 --- a/homeassistant/components/lg_thinq/binary_sensor.py +++ b/homeassistant/components/lg_thinq/binary_sensor.py @@ -2,9 +2,10 @@ from __future__ import annotations -from thinqconnect import PROPERTY_READABLE, DeviceType +import logging + +from thinqconnect import DeviceType from thinqconnect.devices.const import Property as ThinQProperty -from thinqconnect.integration.homeassistant.property import create_properties from homeassistant.components.binary_sensor import ( BinarySensorEntity, @@ -71,6 +72,7 @@ DEVICE_TYPE_BINARY_SENSOR_MAP: dict[ ), DeviceType.WINE_CELLAR: (BINARY_SENSOR_DESC[ThinQProperty.SABBATH_MODE],), } +_LOGGER = logging.getLogger(__name__) async def async_setup_entry( @@ -83,22 +85,13 @@ async def async_setup_entry( for coordinator in entry.runtime_data.values(): if ( descriptions := DEVICE_TYPE_BINARY_SENSOR_MAP.get( - coordinator.device_api.device_type + coordinator.api.device.device_type ) ) is not None: for description in descriptions: - properties = create_properties( - device_api=coordinator.device_api, - key=description.key, - children_keys=None, - rw_type=PROPERTY_READABLE, - ) - if not properties: - continue - entities.extend( - ThinQBinarySensorEntity(coordinator, description, prop) - for prop in properties + ThinQBinarySensorEntity(coordinator, description, property_id) + for property_id in coordinator.api.get_active_idx(description.key) ) if entities: @@ -112,4 +105,10 @@ class ThinQBinarySensorEntity(ThinQEntity, BinarySensorEntity): """Update status itself.""" super()._update_status() - self._attr_is_on = self.property.get_value_as_bool() + _LOGGER.debug( + "[%s:%s] update status: %s", + self.coordinator.device_name, + self.property_id, + self.data.is_on, + ) + self._attr_is_on = self.data.is_on diff --git a/homeassistant/components/lg_thinq/const.py b/homeassistant/components/lg_thinq/const.py index 811b7c50340..09f8c0833df 100644 --- a/homeassistant/components/lg_thinq/const.py +++ b/homeassistant/components/lg_thinq/const.py @@ -1,82 +1,12 @@ """Constants for LG ThinQ.""" -# Base component constants. from typing import Final -from thinqconnect import ( - AirConditionerDevice, - AirPurifierDevice, - AirPurifierFanDevice, - CeilingFanDevice, - CooktopDevice, - DehumidifierDevice, - DeviceType, - DishWasherDevice, - DryerDevice, - HomeBrewDevice, - HoodDevice, - HumidifierDevice, - KimchiRefrigeratorDevice, - MicrowaveOvenDevice, - OvenDevice, - PlantCultivatorDevice, - RefrigeratorDevice, - RobotCleanerDevice, - StickCleanerDevice, - StylerDevice, - SystemBoilerDevice, - WashcomboMainDevice, - WashcomboMiniDevice, - WasherDevice, - WashtowerDevice, - WashtowerDryerDevice, - WashtowerWasherDevice, - WaterHeaterDevice, - WaterPurifierDevice, - WineCellarDevice, -) - -# Common +# Config flow DOMAIN = "lg_thinq" COMPANY = "LGE" +DEFAULT_COUNTRY: Final = "US" THINQ_DEFAULT_NAME: Final = "LG ThinQ" THINQ_PAT_URL: Final = "https://connect-pat.lgthinq.com" - -# Config Flow CLIENT_PREFIX: Final = "home-assistant" CONF_CONNECT_CLIENT_ID: Final = "connect_client_id" -DEFAULT_COUNTRY: Final = "US" - -THINQ_DEVICE_ADDED: Final = "thinq_device_added" - -DEVICE_TYPE_API_MAP: Final = { - DeviceType.AIR_CONDITIONER: AirConditionerDevice, - DeviceType.AIR_PURIFIER_FAN: AirPurifierFanDevice, - DeviceType.AIR_PURIFIER: AirPurifierDevice, - DeviceType.CEILING_FAN: CeilingFanDevice, - DeviceType.COOKTOP: CooktopDevice, - DeviceType.DEHUMIDIFIER: DehumidifierDevice, - DeviceType.DISH_WASHER: DishWasherDevice, - DeviceType.DRYER: DryerDevice, - DeviceType.HOME_BREW: HomeBrewDevice, - DeviceType.HOOD: HoodDevice, - DeviceType.HUMIDIFIER: HumidifierDevice, - DeviceType.KIMCHI_REFRIGERATOR: KimchiRefrigeratorDevice, - DeviceType.MICROWAVE_OVEN: MicrowaveOvenDevice, - DeviceType.OVEN: OvenDevice, - DeviceType.PLANT_CULTIVATOR: PlantCultivatorDevice, - DeviceType.REFRIGERATOR: RefrigeratorDevice, - DeviceType.ROBOT_CLEANER: RobotCleanerDevice, - DeviceType.STICK_CLEANER: StickCleanerDevice, - DeviceType.STYLER: StylerDevice, - DeviceType.SYSTEM_BOILER: SystemBoilerDevice, - DeviceType.WASHER: WasherDevice, - DeviceType.WASHCOMBO_MAIN: WashcomboMainDevice, - DeviceType.WASHCOMBO_MINI: WashcomboMiniDevice, - DeviceType.WASHTOWER_DRYER: WashtowerDryerDevice, - DeviceType.WASHTOWER: WashtowerDevice, - DeviceType.WASHTOWER_WASHER: WashtowerWasherDevice, - DeviceType.WATER_HEATER: WaterHeaterDevice, - DeviceType.WATER_PURIFIER: WaterPurifierDevice, - DeviceType.WINE_CELLAR: WineCellarDevice, -} diff --git a/homeassistant/components/lg_thinq/coordinator.py b/homeassistant/components/lg_thinq/coordinator.py index 1e16ac7ec56..5ba77c648a8 100644 --- a/homeassistant/components/lg_thinq/coordinator.py +++ b/homeassistant/components/lg_thinq/coordinator.py @@ -5,12 +5,13 @@ from __future__ import annotations import logging from typing import Any -from thinqconnect import ConnectBaseDevice, DeviceType, ThinQApi, ThinQAPIException +from thinqconnect import ThinQAPIException +from thinqconnect.integration import HABridge from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed -from .const import DEVICE_TYPE_API_MAP, DOMAIN +from .const import DOMAIN _LOGGER = logging.getLogger(__name__) @@ -18,125 +19,51 @@ _LOGGER = logging.getLogger(__name__) class DeviceDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): """LG Device's Data Update Coordinator.""" - def __init__( - self, - hass: HomeAssistant, - device_api: ConnectBaseDevice, - *, - sub_id: str | None = None, - ) -> None: + def __init__(self, hass: HomeAssistant, ha_bridge: HABridge) -> None: """Initialize data coordinator.""" super().__init__( hass, _LOGGER, - name=f"{DOMAIN}_{device_api.device_id}", + name=f"{DOMAIN}_{ha_bridge.device.device_id}", ) - # For washTower's washer or dryer - self.sub_id = sub_id + self.data = {} + self.api = ha_bridge + self.device_id = ha_bridge.device.device_id + self.sub_id = ha_bridge.sub_id + + alias = ha_bridge.device.alias # The device name is usually set to 'alias'. # But, if the sub_id exists, it will be set to 'alias {sub_id}'. # e.g. alias='MyWashTower', sub_id='dryer' then 'MyWashTower dryer'. - self.device_name = ( - f"{device_api.alias} {self.sub_id}" if self.sub_id else device_api.alias - ) + self.device_name = f"{alias} {self.sub_id}" if self.sub_id else alias # The unique id is usually set to 'device_id'. # But, if the sub_id exists, it will be set to 'device_id_{sub_id}'. # e.g. device_id='TQSXXXX', sub_id='dryer' then 'TQSXXXX_dryer'. self.unique_id = ( - f"{device_api.device_id}_{self.sub_id}" - if self.sub_id - else device_api.device_id + f"{self.device_id}_{self.sub_id}" if self.sub_id else self.device_id ) - # Get the api instance. - self.device_api = device_api.get_sub_device(self.sub_id) or device_api - async def _async_update_data(self) -> dict[str, Any]: """Request to the server to update the status from full response data.""" try: - data = await self.device_api.thinq_api.async_get_device_status( - self.device_api.device_id - ) - except ThinQAPIException as exc: - raise UpdateFailed(exc) from exc + return await self.api.fetch_data() + except ThinQAPIException as e: + raise UpdateFailed(e) from e - # Full response data into the device api. - self.device_api.set_status(data) - return data + def refresh_status(self) -> None: + """Refresh current status.""" + self.async_set_updated_data(self.data) async def async_setup_device_coordinator( - hass: HomeAssistant, thinq_api: ThinQApi, device: dict[str, Any] -) -> list[DeviceDataUpdateCoordinator] | None: + hass: HomeAssistant, ha_bridge: HABridge +) -> DeviceDataUpdateCoordinator: """Create DeviceDataUpdateCoordinator and device_api per device.""" - device_id = device["deviceId"] - device_info = device["deviceInfo"] + coordinator = DeviceDataUpdateCoordinator(hass, ha_bridge) + await coordinator.async_refresh() - # Get an appropriate class constructor for the device type. - device_type = device_info.get("deviceType") - constructor = DEVICE_TYPE_API_MAP.get(device_type) - if constructor is None: - _LOGGER.error( - "Failed to setup device(%s): not supported device. type=%s", - device_id, - device_type, - ) - return None - - # Get a device profile from the server. - try: - profile = await thinq_api.async_get_device_profile(device_id) - except ThinQAPIException: - _LOGGER.warning("Failed to setup device(%s): no profile", device_id) - return None - - device_group_id = device_info.get("groupId") - - # Create new device api instance. - device_api: ConnectBaseDevice = ( - constructor( - thinq_api=thinq_api, - device_id=device_id, - device_type=device_type, - model_name=device_info.get("modelName"), - alias=device_info.get("alias"), - group_id=device_group_id, - reportable=device_info.get("reportable"), - profile=profile, - ) - if device_group_id - else constructor( - thinq_api=thinq_api, - device_id=device_id, - device_type=device_type, - model_name=device_info.get("modelName"), - alias=device_info.get("alias"), - reportable=device_info.get("reportable"), - profile=profile, - ) - ) - - # Create a list of sub-devices from the profile. - # Note that some devices may have more than two device profiles. - # In this case we should create multiple lg device instance. - # e.g. 'WashTower-Single-Unit' = 'WashTower{dryer}' + 'WashTower{washer}'. - device_sub_ids = ( - list(profile.keys()) - if device_type == DeviceType.WASHTOWER and "property" not in profile - else [None] - ) - - # Create new device coordinator instances. - coordinator_list: list[DeviceDataUpdateCoordinator] = [] - for sub_id in device_sub_ids: - coordinator = DeviceDataUpdateCoordinator(hass, device_api, sub_id=sub_id) - await coordinator.async_refresh() - - # Finally add a device coordinator into the result list. - coordinator_list.append(coordinator) - _LOGGER.debug("Setup device's coordinator: %s", coordinator) - - return coordinator_list + _LOGGER.debug("Setup device's coordinator: %s", coordinator.device_name) + return coordinator diff --git a/homeassistant/components/lg_thinq/entity.py b/homeassistant/components/lg_thinq/entity.py index 151687aabb8..09ff8662efb 100644 --- a/homeassistant/components/lg_thinq/entity.py +++ b/homeassistant/components/lg_thinq/entity.py @@ -2,11 +2,13 @@ from __future__ import annotations +from collections.abc import Coroutine import logging from typing import Any from thinqconnect import ThinQAPIException -from thinqconnect.integration.homeassistant.property import Property as ThinQProperty +from thinqconnect.devices.const import Location +from thinqconnect.integration import PropertyState from homeassistant.core import callback from homeassistant.exceptions import ServiceValidationError @@ -19,6 +21,8 @@ from .coordinator import DeviceDataUpdateCoordinator _LOGGER = logging.getLogger(__name__) +EMPTY_STATE = PropertyState() + class ThinQEntity(CoordinatorEntity[DeviceDataUpdateCoordinator]): """The base implementation of all lg thinq entities.""" @@ -29,43 +33,36 @@ class ThinQEntity(CoordinatorEntity[DeviceDataUpdateCoordinator]): self, coordinator: DeviceDataUpdateCoordinator, entity_description: EntityDescription, - property: ThinQProperty, + property_id: str, ) -> None: """Initialize an entity.""" super().__init__(coordinator) self.entity_description = entity_description - self.property = property + self.property_id = property_id + self.location = self.coordinator.api.get_location_for_idx(self.property_id) + self._attr_device_info = dr.DeviceInfo( identifiers={(DOMAIN, coordinator.unique_id)}, manufacturer=COMPANY, - model=coordinator.device_api.model_name, + model=coordinator.api.device.model_name, name=coordinator.device_name, ) + self._attr_unique_id = f"{coordinator.unique_id}_{self.property_id}" + if self.location is not None and self.location not in ( + Location.MAIN, + Location.OVEN, + coordinator.sub_id, + ): + self._attr_translation_placeholders = {"location": self.location} + self._attr_translation_key = ( + f"{entity_description.translation_key}_for_location" + ) - # Set the unique key. If there exist a location, add the prefix location name. - unique_key = ( - f"{entity_description.key}" - if property.location is None - else f"{property.location}_{entity_description.key}" - ) - self._attr_unique_id = f"{coordinator.unique_id}_{unique_key}" - - # Update initial status. - self._update_status() - - async def async_post_value(self, value: Any) -> None: - """Post the value of entity to server.""" - try: - await self.property.async_post_value(value) - except ThinQAPIException as exc: - raise ServiceValidationError( - exc.message, - translation_domain=DOMAIN, - translation_key=exc.code, - ) from exc - finally: - await self.coordinator.async_request_refresh() + @property + def data(self) -> PropertyState: + """Return the state data of entity.""" + return self.coordinator.data.get(self.property_id, EMPTY_STATE) def _update_status(self) -> None: """Update status itself. @@ -78,3 +75,21 @@ class ThinQEntity(CoordinatorEntity[DeviceDataUpdateCoordinator]): """Handle updated data from the coordinator.""" self._update_status() self.async_write_ha_state() + + async def async_added_to_hass(self) -> None: + """Call when entity is added to hass.""" + await super().async_added_to_hass() + self._handle_coordinator_update() + + async def async_call_api(self, target: Coroutine[Any, Any, Any]) -> None: + """Call the given api and handle exception.""" + try: + await target + except ThinQAPIException as exc: + raise ServiceValidationError( + exc.message, + translation_domain=DOMAIN, + translation_key=exc.code, + ) from exc + finally: + await self.coordinator.async_request_refresh() diff --git a/homeassistant/components/lg_thinq/icons.json b/homeassistant/components/lg_thinq/icons.json index 550d023d278..3cc4ab784c2 100644 --- a/homeassistant/components/lg_thinq/icons.json +++ b/homeassistant/components/lg_thinq/icons.json @@ -15,6 +15,9 @@ "remote_control_enabled": { "default": "mdi:remote" }, + "remote_control_enabled_for_location": { + "default": "mdi:remote" + }, "rinse_refill": { "default": "mdi:tune-vertical-variant" }, diff --git a/homeassistant/components/lg_thinq/manifest.json b/homeassistant/components/lg_thinq/manifest.json index a49b91892f5..9a594f70f95 100644 --- a/homeassistant/components/lg_thinq/manifest.json +++ b/homeassistant/components/lg_thinq/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/lg_thinq/", "iot_class": "cloud_push", "loggers": ["thinqconnect"], - "requirements": ["thinqconnect==0.9.5"] + "requirements": ["thinqconnect==0.9.6"] } diff --git a/homeassistant/components/lg_thinq/strings.json b/homeassistant/components/lg_thinq/strings.json index 472e8b848b7..6649c6b0c13 100644 --- a/homeassistant/components/lg_thinq/strings.json +++ b/homeassistant/components/lg_thinq/strings.json @@ -34,6 +34,9 @@ "remote_control_enabled": { "name": "Remote start" }, + "remote_control_enabled_for_location": { + "name": "{location} remote start" + }, "rinse_refill": { "name": "Rinse refill needed" }, diff --git a/homeassistant/components/lg_thinq/switch.py b/homeassistant/components/lg_thinq/switch.py index ee7dfdb02d7..ef85c8ad50e 100644 --- a/homeassistant/components/lg_thinq/switch.py +++ b/homeassistant/components/lg_thinq/switch.py @@ -5,9 +5,8 @@ from __future__ import annotations import logging from typing import Any -from thinqconnect import PROPERTY_WRITABLE, DeviceType +from thinqconnect import DeviceType from thinqconnect.devices.const import Property as ThinQProperty -from thinqconnect.integration.homeassistant.property import create_properties from homeassistant.components.switch import ( SwitchDeviceClass, @@ -20,44 +19,34 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import ThinqConfigEntry from .entity import ThinQEntity -OPERATION_SWITCH_DESC: dict[ThinQProperty, SwitchEntityDescription] = { - ThinQProperty.AIR_FAN_OPERATION_MODE: SwitchEntityDescription( - key=ThinQProperty.AIR_FAN_OPERATION_MODE, - translation_key="operation_power", - ), - ThinQProperty.AIR_PURIFIER_OPERATION_MODE: SwitchEntityDescription( - key=ThinQProperty.AIR_PURIFIER_OPERATION_MODE, - translation_key="operation_power", - ), - ThinQProperty.BOILER_OPERATION_MODE: SwitchEntityDescription( - key=ThinQProperty.BOILER_OPERATION_MODE, - translation_key="operation_power", - ), - ThinQProperty.DEHUMIDIFIER_OPERATION_MODE: SwitchEntityDescription( - key=ThinQProperty.DEHUMIDIFIER_OPERATION_MODE, - translation_key="operation_power", - ), - ThinQProperty.HUMIDIFIER_OPERATION_MODE: SwitchEntityDescription( - key=ThinQProperty.HUMIDIFIER_OPERATION_MODE, - translation_key="operation_power", - ), -} - -DEVIE_TYPE_SWITCH_MAP: dict[DeviceType, tuple[SwitchEntityDescription, ...]] = { +DEVICE_TYPE_SWITCH_MAP: dict[DeviceType, tuple[SwitchEntityDescription, ...]] = { DeviceType.AIR_PURIFIER_FAN: ( - OPERATION_SWITCH_DESC[ThinQProperty.AIR_FAN_OPERATION_MODE], + SwitchEntityDescription( + key=ThinQProperty.AIR_FAN_OPERATION_MODE, translation_key="operation_power" + ), ), DeviceType.AIR_PURIFIER: ( - OPERATION_SWITCH_DESC[ThinQProperty.AIR_PURIFIER_OPERATION_MODE], + SwitchEntityDescription( + key=ThinQProperty.AIR_PURIFIER_OPERATION_MODE, + translation_key="operation_power", + ), ), DeviceType.DEHUMIDIFIER: ( - OPERATION_SWITCH_DESC[ThinQProperty.DEHUMIDIFIER_OPERATION_MODE], + SwitchEntityDescription( + key=ThinQProperty.DEHUMIDIFIER_OPERATION_MODE, + translation_key="operation_power", + ), ), DeviceType.HUMIDIFIER: ( - OPERATION_SWITCH_DESC[ThinQProperty.HUMIDIFIER_OPERATION_MODE], + SwitchEntityDescription( + key=ThinQProperty.HUMIDIFIER_OPERATION_MODE, + translation_key="operation_power", + ), ), DeviceType.SYSTEM_BOILER: ( - OPERATION_SWITCH_DESC[ThinQProperty.BOILER_OPERATION_MODE], + SwitchEntityDescription( + key=ThinQProperty.BOILER_OPERATION_MODE, translation_key="operation_power" + ), ), } @@ -73,23 +62,14 @@ async def async_setup_entry( entities: list[ThinQSwitchEntity] = [] for coordinator in entry.runtime_data.values(): if ( - descriptions := DEVIE_TYPE_SWITCH_MAP.get( - coordinator.device_api.device_type + descriptions := DEVICE_TYPE_SWITCH_MAP.get( + coordinator.api.device.device_type ) ) is not None: for description in descriptions: - properties = create_properties( - device_api=coordinator.device_api, - key=description.key, - children_keys=None, - rw_type=PROPERTY_WRITABLE, - ) - if not properties: - continue - entities.extend( - ThinQSwitchEntity(coordinator, description, prop) - for prop in properties + ThinQSwitchEntity(coordinator, description, property_id) + for property_id in coordinator.api.get_active_idx(description.key) ) if entities: @@ -105,14 +85,20 @@ class ThinQSwitchEntity(ThinQEntity, SwitchEntity): """Update status itself.""" super()._update_status() - self._attr_is_on = self.property.get_value_as_bool() + _LOGGER.debug( + "[%s:%s] update status: %s", + self.coordinator.device_name, + self.property_id, + self.data.is_on, + ) + self._attr_is_on = self.data.is_on async def async_turn_on(self, **kwargs: Any) -> None: """Turn on the switch.""" _LOGGER.debug("[%s] async_turn_on", self.name) - await self.async_post_value("POWER_ON") + await self.async_call_api(self.coordinator.api.async_turn_on(self.property_id)) async def async_turn_off(self, **kwargs: Any) -> None: """Turn off the switch.""" _LOGGER.debug("[%s] async_turn_off", self.name) - await self.async_post_value("POWER_OFF") + await self.async_call_api(self.coordinator.api.async_turn_off(self.property_id)) diff --git a/requirements_all.txt b/requirements_all.txt index b603ce3a3ae..5c6d0dc0ecf 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2798,7 +2798,7 @@ thermoworks-smoke==0.1.8 thingspeak==1.0.0 # homeassistant.components.lg_thinq -thinqconnect==0.9.5 +thinqconnect==0.9.6 # homeassistant.components.tikteck tikteck==0.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index ba0fff1ac4b..5a5f835082c 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2211,7 +2211,7 @@ thermobeacon-ble==0.7.0 thermopro-ble==0.10.0 # homeassistant.components.lg_thinq -thinqconnect==0.9.5 +thinqconnect==0.9.6 # homeassistant.components.tilt_ble tilt-ble==0.2.3 From 3a44098ddff1af5d11a8a2e8d27ca9bc86e79022 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Wed, 4 Sep 2024 16:12:57 +0200 Subject: [PATCH 1429/1635] Fix Path.__enter__ DeprecationWarning in tests (#125227) --- tests/components/google_cloud/conftest.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/components/google_cloud/conftest.py b/tests/components/google_cloud/conftest.py index acde62144a9..897c352b402 100644 --- a/tests/components/google_cloud/conftest.py +++ b/tests/components/google_cloud/conftest.py @@ -54,9 +54,11 @@ def mock_process_uploaded_file( create_google_credentials_json: str, ) -> Generator[MagicMock]: """Mock upload certificate files.""" + ctx_mock = MagicMock() + ctx_mock.__enter__.return_value = Path(create_google_credentials_json) with patch( "homeassistant.components.google_cloud.config_flow.process_uploaded_file", - return_value=Path(create_google_credentials_json), + return_value=ctx_mock, ) as mock_upload: yield mock_upload From 638434c103879859b2847de09862a093e34631ac Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Wed, 4 Sep 2024 09:36:25 -0500 Subject: [PATCH 1430/1635] Bump intents to 2024.9.4 (#125232) --- homeassistant/components/conversation/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- script/hassfest/docker/Dockerfile | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/conversation/manifest.json b/homeassistant/components/conversation/manifest.json index 5a689485b29..837ac9f9b1f 100644 --- a/homeassistant/components/conversation/manifest.json +++ b/homeassistant/components/conversation/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/conversation", "integration_type": "system", "quality_scale": "internal", - "requirements": ["hassil==1.7.4", "home-assistant-intents==2024.8.29"] + "requirements": ["hassil==1.7.4", "home-assistant-intents==2024.9.4"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 0eb5d6a78e0..767bd206266 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -32,7 +32,7 @@ hass-nabucasa==0.81.1 hassil==1.7.4 home-assistant-bluetooth==1.12.2 home-assistant-frontend==20240904.0 -home-assistant-intents==2024.8.29 +home-assistant-intents==2024.9.4 httpx==0.27.0 ifaddr==0.2.0 Jinja2==3.1.4 diff --git a/requirements_all.txt b/requirements_all.txt index 5c6d0dc0ecf..2ea174ebbc3 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1111,7 +1111,7 @@ holidays==0.56 home-assistant-frontend==20240904.0 # homeassistant.components.conversation -home-assistant-intents==2024.8.29 +home-assistant-intents==2024.9.4 # homeassistant.components.home_connect homeconnect==0.8.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5a5f835082c..c7a11044f50 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -937,7 +937,7 @@ holidays==0.56 home-assistant-frontend==20240904.0 # homeassistant.components.conversation -home-assistant-intents==2024.8.29 +home-assistant-intents==2024.9.4 # homeassistant.components.home_connect homeconnect==0.8.0 diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile index 0d99b04c44c..4dbea0e4c95 100644 --- a/script/hassfest/docker/Dockerfile +++ b/script/hassfest/docker/Dockerfile @@ -23,7 +23,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.2.27,source=/uv,target=/bin/uv \ -c /usr/src/homeassistant/homeassistant/package_constraints.txt \ -r /usr/src/homeassistant/requirements.txt \ stdlib-list==0.10.0 pipdeptree==2.23.1 tqdm==4.66.4 ruff==0.6.2 \ - PyTurboJPEG==1.7.5 ha-ffmpeg==3.2.0 hassil==1.7.4 home-assistant-intents==2024.8.29 mutagen==1.47.0 + PyTurboJPEG==1.7.5 ha-ffmpeg==3.2.0 hassil==1.7.4 home-assistant-intents==2024.9.4 mutagen==1.47.0 LABEL "name"="hassfest" LABEL "maintainer"="Home Assistant " From eaee8d5b7857644d806a7e9a957ed47a790bc1bd Mon Sep 17 00:00:00 2001 From: Shay Levy Date: Wed, 4 Sep 2024 18:34:11 +0300 Subject: [PATCH 1431/1635] Fix BTHome validate triggers for device with multiple buttons (#125183) * Fix BTHome validate triggers for device with multiple buttons * Remove None default --- .../components/bthome/device_trigger.py | 56 +++++--- .../components/bthome/test_device_trigger.py | 124 +++++++++++++++++- 2 files changed, 158 insertions(+), 22 deletions(-) diff --git a/homeassistant/components/bthome/device_trigger.py b/homeassistant/components/bthome/device_trigger.py index c49664b1146..c50ffc05900 100644 --- a/homeassistant/components/bthome/device_trigger.py +++ b/homeassistant/components/bthome/device_trigger.py @@ -7,6 +7,9 @@ from typing import Any import voluptuous as vol from homeassistant.components.device_automation import DEVICE_TRIGGER_BASE_SCHEMA +from homeassistant.components.device_automation.exceptions import ( + InvalidDeviceAutomationConfig, +) from homeassistant.components.homeassistant.triggers import event as event_trigger from homeassistant.const import ( CONF_DEVICE_ID, @@ -43,33 +46,46 @@ TRIGGERS_BY_EVENT_CLASS = { EVENT_CLASS_DIMMER: {"rotate_left", "rotate_right"}, } -SCHEMA_BY_EVENT_CLASS = { - EVENT_CLASS_BUTTON: DEVICE_TRIGGER_BASE_SCHEMA.extend( - { - vol.Required(CONF_TYPE): vol.In([EVENT_CLASS_BUTTON]), - vol.Required(CONF_SUBTYPE): vol.In( - TRIGGERS_BY_EVENT_CLASS[EVENT_CLASS_BUTTON] - ), - } - ), - EVENT_CLASS_DIMMER: DEVICE_TRIGGER_BASE_SCHEMA.extend( - { - vol.Required(CONF_TYPE): vol.In([EVENT_CLASS_DIMMER]), - vol.Required(CONF_SUBTYPE): vol.In( - TRIGGERS_BY_EVENT_CLASS[EVENT_CLASS_DIMMER] - ), - } - ), -} +TRIGGER_SCHEMA = DEVICE_TRIGGER_BASE_SCHEMA.extend( + {vol.Required(CONF_TYPE): str, vol.Required(CONF_SUBTYPE): str} +) async def async_validate_trigger_config( hass: HomeAssistant, config: ConfigType ) -> ConfigType: """Validate trigger config.""" - return SCHEMA_BY_EVENT_CLASS.get(config[CONF_TYPE], DEVICE_TRIGGER_BASE_SCHEMA)( # type: ignore[no-any-return] - config + config = TRIGGER_SCHEMA(config) + event_class = config[CONF_TYPE] + event_type = config[CONF_SUBTYPE] + + device_registry = dr.async_get(hass) + device = device_registry.async_get(config[CONF_DEVICE_ID]) + assert device is not None + config_entries = [ + hass.config_entries.async_get_entry(entry_id) + for entry_id in device.config_entries + ] + bthome_config_entry = next( + iter(entry for entry in config_entries if entry and entry.domain == DOMAIN) ) + event_classes: list[str] = bthome_config_entry.data.get( + CONF_DISCOVERED_EVENT_CLASSES, [] + ) + + if event_class not in event_classes: + raise InvalidDeviceAutomationConfig( + f"BTHome trigger {event_class} is not valid for device " + f"{device} ({config[CONF_DEVICE_ID]})" + ) + + if event_type not in TRIGGERS_BY_EVENT_CLASS.get(event_class.split("_")[0], ()): + raise InvalidDeviceAutomationConfig( + f"BTHome trigger {event_type} is not valid for device " + f"{device} ({config[CONF_DEVICE_ID]})" + ) + + return config async def async_get_triggers( diff --git a/tests/components/bthome/test_device_trigger.py b/tests/components/bthome/test_device_trigger.py index 459654826f9..c4c900ef6e1 100644 --- a/tests/components/bthome/test_device_trigger.py +++ b/tests/components/bthome/test_device_trigger.py @@ -1,10 +1,19 @@ """Test BTHome BLE events.""" +import pytest + from homeassistant.components import automation from homeassistant.components.bluetooth import DOMAIN as BLUETOOTH_DOMAIN from homeassistant.components.bthome.const import CONF_SUBTYPE, DOMAIN from homeassistant.components.device_automation import DeviceAutomationType -from homeassistant.const import CONF_DEVICE_ID, CONF_DOMAIN, CONF_PLATFORM, CONF_TYPE +from homeassistant.const import ( + CONF_DEVICE_ID, + CONF_DOMAIN, + CONF_PLATFORM, + CONF_TYPE, + STATE_ON, + STATE_UNAVAILABLE, +) from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component @@ -121,6 +130,117 @@ async def test_get_triggers_button( await hass.async_block_till_done() +async def test_get_triggers_multiple_buttons( + hass: HomeAssistant, device_registry: dr.DeviceRegistry +) -> None: + """Test that we get the expected triggers for multiple buttons device.""" + mac = "A4:C1:38:8D:18:B2" + entry = await _async_setup_bthome_device(hass, mac) + events = async_capture_events(hass, "bthome_ble_event") + + # Emit button_1 long press and button_2 press events + # so it creates the device in the registry + inject_bluetooth_service_info_bleak( + hass, + make_bthome_v2_adv(mac, b"\x40\x3a\x04\x3a\x01"), + ) + + # wait for the event + await hass.async_block_till_done() + assert len(events) == 2 + + device = device_registry.async_get_device(identifiers={get_device_id(mac)}) + assert device + expected_trigger1 = { + CONF_PLATFORM: "device", + CONF_DOMAIN: DOMAIN, + CONF_DEVICE_ID: device.id, + CONF_TYPE: "button_1", + CONF_SUBTYPE: "long_press", + "metadata": {}, + } + expected_trigger2 = { + CONF_PLATFORM: "device", + CONF_DOMAIN: DOMAIN, + CONF_DEVICE_ID: device.id, + CONF_TYPE: "button_2", + CONF_SUBTYPE: "press", + "metadata": {}, + } + triggers = await async_get_device_automations( + hass, DeviceAutomationType.TRIGGER, device.id + ) + assert expected_trigger1 in triggers + assert expected_trigger2 in triggers + + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + +@pytest.mark.parametrize( + ("event_class", "event_type", "expected"), + [ + ("button_1", "long_press", STATE_ON), + ("button_2", "press", STATE_ON), + ("button_3", "long_press", STATE_UNAVAILABLE), + ("button", "long_press", STATE_UNAVAILABLE), + ("button_1", "invalid_press", STATE_UNAVAILABLE), + ], +) +async def test_validate_trigger_config( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + event_class: str, + event_type: str, + expected: str, +) -> None: + """Test unsupported trigger does not return a trigger config.""" + mac = "A4:C1:38:8D:18:B2" + entry = await _async_setup_bthome_device(hass, mac) + + # Emit button_1 long press and button_2 press events + # so it creates the device in the registry + inject_bluetooth_service_info_bleak( + hass, + make_bthome_v2_adv(mac, b"\x40\x3a\x04\x3a\x01"), + ) + + # wait for the event + await hass.async_block_till_done() + + device = device_registry.async_get_device(identifiers={get_device_id(mac)}) + + assert await async_setup_component( + hass, + automation.DOMAIN, + { + automation.DOMAIN: [ + { + "trigger": { + CONF_PLATFORM: "device", + CONF_DOMAIN: DOMAIN, + CONF_DEVICE_ID: device.id, + CONF_TYPE: event_class, + CONF_SUBTYPE: event_type, + }, + "action": { + "service": "test.automation", + "data_template": {"some": "test_trigger_button_long_press"}, + }, + }, + ] + }, + ) + await hass.async_block_till_done() + + automations = hass.states.async_entity_ids(automation.DOMAIN) + assert len(automations) == 1 + assert hass.states.get(automations[0]).state == expected + + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + async def test_get_triggers_dimmer( hass: HomeAssistant, device_registry: dr.DeviceRegistry ) -> None: @@ -235,7 +355,7 @@ async def test_if_fires_on_motion_detected( make_bthome_v2_adv(mac, b"\x40\x3a\x03"), ) - # # wait for the event + # wait for the event await hass.async_block_till_done() device = device_registry.async_get_device(identifiers={get_device_id(mac)}) From af51241c0dff53cd8e632d269f8a35f4b2c282a3 Mon Sep 17 00:00:00 2001 From: Martins Sipenko Date: Wed, 4 Sep 2024 19:01:49 +0300 Subject: [PATCH 1432/1635] Reenable Smarty integration (#124148) * Reenable Smarty integration * Updated codeowners to myself * Revert "Updated codeowners to myself" This reverts commit 639fef32b90d22117938f864e6ea3c55b0fc5074. * Upgraded pysmarty2 to version 0.10.1 which is not pinned to specific pymodbus version * Update requirements_all.txt --- homeassistant/components/smarty/__init__.py | 2 +- homeassistant/components/smarty/binary_sensor.py | 2 +- homeassistant/components/smarty/fan.py | 2 +- homeassistant/components/smarty/manifest.json | 6 +++--- homeassistant/components/smarty/sensor.py | 2 +- requirements_all.txt | 3 +++ 6 files changed, 10 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/smarty/__init__.py b/homeassistant/components/smarty/__init__.py index cc2e3850ef9..17c4bd0a26a 100644 --- a/homeassistant/components/smarty/__init__.py +++ b/homeassistant/components/smarty/__init__.py @@ -4,7 +4,7 @@ from datetime import timedelta import ipaddress import logging -from pysmarty import Smarty +from pysmarty2 import Smarty import voluptuous as vol from homeassistant.const import CONF_HOST, CONF_NAME, Platform diff --git a/homeassistant/components/smarty/binary_sensor.py b/homeassistant/components/smarty/binary_sensor.py index cf40dc7b982..b31c51244b8 100644 --- a/homeassistant/components/smarty/binary_sensor.py +++ b/homeassistant/components/smarty/binary_sensor.py @@ -4,7 +4,7 @@ from __future__ import annotations import logging -from pysmarty import Smarty +from pysmarty2 import Smarty from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, diff --git a/homeassistant/components/smarty/fan.py b/homeassistant/components/smarty/fan.py index 37f7c2e493f..a2d72250197 100644 --- a/homeassistant/components/smarty/fan.py +++ b/homeassistant/components/smarty/fan.py @@ -6,7 +6,7 @@ import logging import math from typing import Any -from pysmarty import Smarty +from pysmarty2 import Smarty from homeassistant.components.fan import FanEntity, FanEntityFeature from homeassistant.core import HomeAssistant, callback diff --git a/homeassistant/components/smarty/manifest.json b/homeassistant/components/smarty/manifest.json index 8769aa666a7..b83319b6744 100644 --- a/homeassistant/components/smarty/manifest.json +++ b/homeassistant/components/smarty/manifest.json @@ -2,9 +2,9 @@ "domain": "smarty", "name": "Salda Smarty", "codeowners": ["@z0mbieprocess"], - "disabled": "Dependencies not compatible with the new pip resolver", "documentation": "https://www.home-assistant.io/integrations/smarty", + "integration_type": "hub", "iot_class": "local_polling", - "loggers": ["pymodbus", "pysmarty"], - "requirements": ["pysmarty==0.8"] + "loggers": ["pymodbus", "pysmarty2"], + "requirements": ["pysmarty2==0.10.1"] } diff --git a/homeassistant/components/smarty/sensor.py b/homeassistant/components/smarty/sensor.py index a0c15b3825f..3c6873611b4 100644 --- a/homeassistant/components/smarty/sensor.py +++ b/homeassistant/components/smarty/sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations import datetime as dt import logging -from pysmarty import Smarty +from pysmarty2 import Smarty from homeassistant.components.sensor import SensorDeviceClass, SensorEntity from homeassistant.const import UnitOfTemperature diff --git a/requirements_all.txt b/requirements_all.txt index 2ea174ebbc3..3707b52fc59 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2225,6 +2225,9 @@ pysmartapp==0.3.5 # homeassistant.components.smartthings pysmartthings==0.7.8 +# homeassistant.components.smarty +pysmarty2==0.10.1 + # homeassistant.components.edl21 pysml==0.0.12 From 186c9aa33b083c7ebf8d214e2cbcad19fb854c3b Mon Sep 17 00:00:00 2001 From: Duco Sebel <74970928+DCSBL@users.noreply.github.com> Date: Wed, 4 Sep 2024 18:32:57 +0200 Subject: [PATCH 1433/1635] Remove ExternalDevice migration in HomeWizard (#125197) --- homeassistant/components/homewizard/sensor.py | 18 ------- .../homewizard/snapshots/test_sensor.ambr | 33 ------------ tests/components/homewizard/test_sensor.py | 51 +------------------ 3 files changed, 2 insertions(+), 100 deletions(-) diff --git a/homeassistant/components/homewizard/sensor.py b/homeassistant/components/homewizard/sensor.py index c5cf0bc64c7..9bb61a467cb 100644 --- a/homeassistant/components/homewizard/sensor.py +++ b/homeassistant/components/homewizard/sensor.py @@ -625,26 +625,8 @@ async def async_setup_entry( ) -> None: """Initialize sensors.""" - # Migrate original gas meter sensor to ExternalDevice - # This is sensor that was directly linked to the P1 Meter - # Migration can be removed after 2024.8.0 ent_reg = er.async_get(hass) data = entry.runtime_data.data.data - if ( - entity_id := ent_reg.async_get_entity_id( - Platform.SENSOR, DOMAIN, f"{entry.unique_id}_total_gas_m3" - ) - ) and data.gas_unique_id is not None: - ent_reg.async_update_entity( - entity_id, - new_unique_id=f"{DOMAIN}_gas_meter_{data.gas_unique_id}", - ) - - # Remove old gas_unique_id sensor - if entity_id := ent_reg.async_get_entity_id( - Platform.SENSOR, DOMAIN, f"{entry.unique_id}_gas_unique_id" - ): - ent_reg.async_remove(entity_id) # Initialize default sensors entities: list = [ diff --git a/tests/components/homewizard/snapshots/test_sensor.ambr b/tests/components/homewizard/snapshots/test_sensor.ambr index dd50b098d40..5d5b458dccc 100644 --- a/tests/components/homewizard/snapshots/test_sensor.ambr +++ b/tests/components/homewizard/snapshots/test_sensor.ambr @@ -1,37 +1,4 @@ # serializer version: 1 -# name: test_gas_meter_migrated[sensor.homewizard_aabbccddeeff_total_gas_m3:entity-registry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.homewizard_aabbccddeeff_total_gas_m3', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'homewizard', - 'previous_unique_id': 'aabbccddeeff_total_gas_m3', - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'homewizard_gas_meter_01FFEEDDCCBBAA99887766554433221100', - 'unit_of_measurement': None, - }) -# --- # name: test_sensors[HWE-KWH1-entity_ids7][sensor.device_apparent_power:device-registry] DeviceRegistryEntrySnapshot({ 'area_id': None, diff --git a/tests/components/homewizard/test_sensor.py b/tests/components/homewizard/test_sensor.py index abcd6a879c5..c180c2a4def 100644 --- a/tests/components/homewizard/test_sensor.py +++ b/tests/components/homewizard/test_sensor.py @@ -7,14 +7,13 @@ from homewizard_energy.models import Data import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.homewizard import DOMAIN from homeassistant.components.homewizard.const import UPDATE_INTERVAL -from homeassistant.const import STATE_UNAVAILABLE, Platform +from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er import homeassistant.util.dt as dt_util -from tests.common import MockConfigEntry, async_fire_time_changed +from tests.common import async_fire_time_changed pytestmark = [ pytest.mark.usefixtures("init_integration"), @@ -815,49 +814,3 @@ async def test_entities_not_created_for_device( """Ensures entities for a specific device are not created.""" for entity_id in entity_ids: assert not hass.states.get(entity_id) - - -async def test_gas_meter_migrated( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - init_integration: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Test old gas meter sensor is migrated.""" - entity_registry.async_get_or_create( - Platform.SENSOR, - DOMAIN, - "aabbccddeeff_total_gas_m3", - ) - - await hass.config_entries.async_reload(init_integration.entry_id) - await hass.async_block_till_done() - - entity_id = "sensor.homewizard_aabbccddeeff_total_gas_m3" - - assert (entity_entry := entity_registry.async_get(entity_id)) - assert snapshot(name=f"{entity_id}:entity-registry") == entity_entry - - # Make really sure this happens - assert entity_entry.previous_unique_id == "aabbccddeeff_total_gas_m3" - - -async def test_gas_unique_id_removed( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - init_integration: MockConfigEntry, - snapshot: SnapshotAssertion, -) -> None: - """Test old gas meter id sensor is removed.""" - entity_registry.async_get_or_create( - Platform.SENSOR, - DOMAIN, - "aabbccddeeff_gas_unique_id", - ) - - await hass.config_entries.async_reload(init_integration.entry_id) - await hass.async_block_till_done() - - entity_id = "sensor.homewizard_aabbccddeeff_gas_unique_id" - - assert not entity_registry.async_get(entity_id) From 643fd3447823e533c300e770b08c5f679a7f2086 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 4 Sep 2024 18:38:19 +0200 Subject: [PATCH 1434/1635] Improve config flow type hints in starline (#125202) --- .../components/starline/config_flow.py | 67 +++++++++++-------- 1 file changed, 39 insertions(+), 28 deletions(-) diff --git a/homeassistant/components/starline/config_flow.py b/homeassistant/components/starline/config_flow.py index e27885e6c60..5235bd5230b 100644 --- a/homeassistant/components/starline/config_flow.py +++ b/homeassistant/components/starline/config_flow.py @@ -2,8 +2,6 @@ from __future__ import annotations -from typing import Any - from starline import StarlineAuth import voluptuous as vol @@ -33,6 +31,10 @@ class StarlineFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 + _app_code: str + _app_token: str + _captcha_image: str + def __init__(self) -> None: """Initialize flow.""" self._app_id: str | None = None @@ -41,59 +43,64 @@ class StarlineFlowHandler(ConfigFlow, domain=DOMAIN): self._password: str | None = None self._mfa_code: str | None = None - self._app_code = None - self._app_token = None self._user_slid = None self._user_id = None self._slnet_token = None self._slnet_token_expires = None - self._captcha_image = None - self._captcha_sid = None - self._captcha_code = None + self._captcha_sid: str | None = None + self._captcha_code: str | None = None self._phone_number = None self._auth = StarlineAuth() async def async_step_user( - self, user_input: dict[str, Any] | None = None + self, user_input: dict[str, str] | None = None ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" return await self.async_step_auth_app(user_input) - async def async_step_auth_app(self, user_input=None, error=None): + async def async_step_auth_app( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Authenticate application step.""" if user_input is not None: self._app_id = user_input[CONF_APP_ID] self._app_secret = user_input[CONF_APP_SECRET] - return await self._async_authenticate_app(error) - return self._async_form_auth_app(error) + return await self._async_authenticate_app() + return self._async_form_auth_app() - async def async_step_auth_user(self, user_input=None, error=None): + async def async_step_auth_user( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Authenticate user step.""" if user_input is not None: self._username = user_input[CONF_USERNAME] self._password = user_input[CONF_PASSWORD] - return await self._async_authenticate_user(error) - return self._async_form_auth_user(error) + return await self._async_authenticate_user() + return self._async_form_auth_user() - async def async_step_auth_mfa(self, user_input=None, error=None): + async def async_step_auth_mfa( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Authenticate mfa step.""" if user_input is not None: self._mfa_code = user_input[CONF_MFA_CODE] - return await self._async_authenticate_user(error) - return self._async_form_auth_mfa(error) + return await self._async_authenticate_user() + return self._async_form_auth_mfa() - async def async_step_auth_captcha(self, user_input=None, error=None): + async def async_step_auth_captcha( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Captcha verification step.""" if user_input is not None: self._captcha_code = user_input[CONF_CAPTCHA_CODE] - return await self._async_authenticate_user(error) - return self._async_form_auth_captcha(error) + return await self._async_authenticate_user() + return self._async_form_auth_captcha() @callback - def _async_form_auth_app(self, error=None): + def _async_form_auth_app(self, error: str | None = None) -> ConfigFlowResult: """Authenticate application form.""" - errors = {} + errors: dict[str, str] = {} if error is not None: errors["base"] = error @@ -113,7 +120,7 @@ class StarlineFlowHandler(ConfigFlow, domain=DOMAIN): ) @callback - def _async_form_auth_user(self, error=None): + def _async_form_auth_user(self, error: str | None = None) -> ConfigFlowResult: """Authenticate user form.""" errors = {} if error is not None: @@ -135,7 +142,7 @@ class StarlineFlowHandler(ConfigFlow, domain=DOMAIN): ) @callback - def _async_form_auth_mfa(self, error=None): + def _async_form_auth_mfa(self, error: str | None = None) -> ConfigFlowResult: """Authenticate mfa form.""" errors = {} if error is not None: @@ -155,7 +162,7 @@ class StarlineFlowHandler(ConfigFlow, domain=DOMAIN): ) @callback - def _async_form_auth_captcha(self, error=None): + def _async_form_auth_captcha(self, error: str | None = None) -> ConfigFlowResult: """Captcha verification form.""" errors = {} if error is not None: @@ -176,7 +183,9 @@ class StarlineFlowHandler(ConfigFlow, domain=DOMAIN): }, ) - async def _async_authenticate_app(self, error=None): + async def _async_authenticate_app( + self, error: str | None = None + ) -> ConfigFlowResult: """Authenticate application.""" try: self._app_code = await self.hass.async_add_executor_job( @@ -190,7 +199,9 @@ class StarlineFlowHandler(ConfigFlow, domain=DOMAIN): _LOGGER.error("Error auth StarLine: %s", err) return self._async_form_auth_app(ERROR_AUTH_APP) - async def _async_authenticate_user(self, error=None): + async def _async_authenticate_user( + self, error: str | None = None + ) -> ConfigFlowResult: """Authenticate user.""" try: state, data = await self.hass.async_add_executor_job( @@ -223,7 +234,7 @@ class StarlineFlowHandler(ConfigFlow, domain=DOMAIN): _LOGGER.error("Error auth user: %s", err) return self._async_form_auth_user(ERROR_AUTH_USER) - async def _async_get_entry(self): + async def _async_get_entry(self) -> ConfigFlowResult: """Create entry.""" ( self._slnet_token, From 0fb1fbf0d14e82e371ed9f14784549c325033d53 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 4 Sep 2024 18:38:34 +0200 Subject: [PATCH 1435/1635] Improve config flow type hints (q-s) (#125198) * Improve config flow type hints (q-s) * Revert screenlogic * Revert starline --- .../components/rachio/config_flow.py | 4 ++- .../components/radiotherm/config_flow.py | 4 ++- .../components/reolink/config_flow.py | 2 +- homeassistant/components/roon/config_flow.py | 10 ++++--- homeassistant/components/sense/config_flow.py | 13 ++++----- .../components/smappee/config_flow.py | 14 +++++++--- .../components/smartthings/config_flow.py | 27 ++++++++++++------- .../components/smarttub/config_flow.py | 6 ++++- homeassistant/components/soma/config_flow.py | 2 +- .../components/somfy_mylink/config_flow.py | 14 +++++++--- .../components/songpal/config_flow.py | 20 +++++++------- .../components/soundtouch/config_flow.py | 4 ++- .../components/syncthru/config_flow.py | 4 ++- 13 files changed, 82 insertions(+), 42 deletions(-) diff --git a/homeassistant/components/rachio/config_flow.py b/homeassistant/components/rachio/config_flow.py index bdd2f81536d..66811091820 100644 --- a/homeassistant/components/rachio/config_flow.py +++ b/homeassistant/components/rachio/config_flow.py @@ -118,7 +118,9 @@ class OptionsFlowHandler(OptionsFlow): """Initialize options flow.""" self.config_entry = config_entry - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, int] | None = None + ) -> ConfigFlowResult: """Handle options flow.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) diff --git a/homeassistant/components/radiotherm/config_flow.py b/homeassistant/components/radiotherm/config_flow.py index e9904318ae9..6bcbe11872d 100644 --- a/homeassistant/components/radiotherm/config_flow.py +++ b/homeassistant/components/radiotherm/config_flow.py @@ -60,7 +60,9 @@ class RadioThermConfigFlow(ConfigFlow, domain=DOMAIN): self.discovered_ip = discovery_info.ip return await self.async_step_confirm() - async def async_step_confirm(self, user_input=None): + async def async_step_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Attempt to confirm.""" ip_address = self.discovered_ip init_data = self.discovered_init_data diff --git a/homeassistant/components/reolink/config_flow.py b/homeassistant/components/reolink/config_flow.py index 6d0381b025f..067a7e24b8e 100644 --- a/homeassistant/components/reolink/config_flow.py +++ b/homeassistant/components/reolink/config_flow.py @@ -48,7 +48,7 @@ DEFAULT_OPTIONS = {CONF_PROTOCOL: DEFAULT_PROTOCOL} class ReolinkOptionsFlowHandler(OptionsFlow): """Handle Reolink options.""" - def __init__(self, config_entry): + def __init__(self, config_entry: ConfigEntry) -> None: """Initialize ReolinkOptionsFlowHandler.""" self.config_entry = config_entry diff --git a/homeassistant/components/roon/config_flow.py b/homeassistant/components/roon/config_flow.py index de220454852..b896f6775ae 100644 --- a/homeassistant/components/roon/config_flow.py +++ b/homeassistant/components/roon/config_flow.py @@ -142,9 +142,11 @@ class RoonConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_fallback() - async def async_step_fallback(self, user_input=None): + async def async_step_fallback( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Get host and port details from the user.""" - errors = {} + errors: dict[str, str] = {} if user_input is not None: self._host = user_input["host"] @@ -155,7 +157,9 @@ class RoonConfigFlow(ConfigFlow, domain=DOMAIN): step_id="fallback", data_schema=DATA_SCHEMA, errors=errors ) - async def async_step_link(self, user_input=None): + async def async_step_link( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle linking and authenticating with the roon server.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/sense/config_flow.py b/homeassistant/components/sense/config_flow.py index 222c6b30f79..c0df40aec9d 100644 --- a/homeassistant/components/sense/config_flow.py +++ b/homeassistant/components/sense/config_flow.py @@ -3,7 +3,7 @@ from collections.abc import Mapping from functools import partial import logging -from typing import TYPE_CHECKING, Any +from typing import Any from sense_energy import ( ASyncSenseable, @@ -34,9 +34,10 @@ class SenseConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 + _gateway: ASyncSenseable + def __init__(self) -> None: """Init Config .""" - self._gateway: ASyncSenseable | None = None self._auth_data: dict[str, Any] = {} async def validate_input(self, data: Mapping[str, Any]) -> None: @@ -58,14 +59,12 @@ class SenseConfigFlow(ConfigFlow, domain=DOMAIN): client_session=client_session, ) ) - if TYPE_CHECKING: - assert self._gateway self._gateway.rate_limit = ACTIVE_UPDATE_RATE await self._gateway.authenticate( self._auth_data[CONF_EMAIL], self._auth_data[CONF_PASSWORD] ) - async def create_entry_from_data(self): + async def create_entry_from_data(self) -> ConfigFlowResult: """Create the entry from the config data.""" self._auth_data["access_token"] = self._gateway.sense_access_token self._auth_data["user_id"] = self._gateway.sense_user_id @@ -99,7 +98,9 @@ class SenseConfigFlow(ConfigFlow, domain=DOMAIN): return await self.create_entry_from_data() return None - async def async_step_validation(self, user_input=None): + async def async_step_validation( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Handle validation (2fa) step.""" errors = {} if user_input: diff --git a/homeassistant/components/smappee/config_flow.py b/homeassistant/components/smappee/config_flow.py index d5073bd9c34..f92f8b17662 100644 --- a/homeassistant/components/smappee/config_flow.py +++ b/homeassistant/components/smappee/config_flow.py @@ -69,9 +69,11 @@ class SmappeeFlowHandler( return await self.async_step_zeroconf_confirm() - async def async_step_zeroconf_confirm(self, user_input=None): + async def async_step_zeroconf_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Confirm zeroconf flow.""" - errors = {} + errors: dict[str, str] = {} # Check if already configured (cloud) if self.is_cloud_device_already_added(): @@ -118,7 +120,9 @@ class SmappeeFlowHandler( return await self.async_step_environment() - async def async_step_environment(self, user_input=None): + async def async_step_environment( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Decide environment, cloud or local.""" if user_input is None: return self.async_show_form( @@ -144,7 +148,9 @@ class SmappeeFlowHandler( return await self.async_step_pick_implementation() - async def async_step_local(self, user_input=None): + async def async_step_local( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Handle local flow.""" if user_input is None: return self.async_show_form( diff --git a/homeassistant/components/smartthings/config_flow.py b/homeassistant/components/smartthings/config_flow.py index df5b7a8acfa..081f833787e 100644 --- a/homeassistant/components/smartthings/config_flow.py +++ b/homeassistant/components/smartthings/config_flow.py @@ -42,16 +42,17 @@ class SmartThingsFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 2 + api: SmartThings + app_id: str + location_id: str + def __init__(self) -> None: """Create a new instance of the flow handler.""" - self.access_token = None - self.app_id = None - self.api = None + self.access_token: str | None = None self.oauth_client_secret = None self.oauth_client_id = None self.installed_app_id = None self.refresh_token = None - self.location_id = None self.endpoints_initialized = False async def async_step_import(self, import_data: None) -> ConfigFlowResult: @@ -91,9 +92,11 @@ class SmartThingsFlowHandler(ConfigFlow, domain=DOMAIN): # Show the next screen return await self.async_step_pat() - async def async_step_pat(self, user_input=None): + async def async_step_pat( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Get the Personal Access Token and validate it.""" - errors = {} + errors: dict[str, str] = {} if user_input is None or CONF_ACCESS_TOKEN not in user_input: return self._show_step_pat(errors) @@ -169,7 +172,9 @@ class SmartThingsFlowHandler(ConfigFlow, domain=DOMAIN): return await self.async_step_select_location() - async def async_step_select_location(self, user_input=None): + async def async_step_select_location( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Ask user to select the location to setup.""" if user_input is None or CONF_LOCATION_ID not in user_input: # Get available locations @@ -196,7 +201,9 @@ class SmartThingsFlowHandler(ConfigFlow, domain=DOMAIN): await self.async_set_unique_id(format_unique_id(self.app_id, self.location_id)) return await self.async_step_authorize() - async def async_step_authorize(self, user_input=None): + async def async_step_authorize( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Wait for the user to authorize the app installation.""" user_input = {} if user_input is None else user_input self.installed_app_id = user_input.get(CONF_INSTALLED_APP_ID) @@ -233,7 +240,9 @@ class SmartThingsFlowHandler(ConfigFlow, domain=DOMAIN): }, ) - async def async_step_install(self, data=None): + async def async_step_install( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Create a config entry at completion of a flow and authorization of the app.""" data = { CONF_ACCESS_TOKEN: self.access_token, diff --git a/homeassistant/components/smarttub/config_flow.py b/homeassistant/components/smarttub/config_flow.py index 827375c907c..5caff953d6d 100644 --- a/homeassistant/components/smarttub/config_flow.py +++ b/homeassistant/components/smarttub/config_flow.py @@ -81,9 +81,13 @@ class SmartTubConfigFlow(ConfigFlow, domain=DOMAIN): ) return await self.async_step_reauth_confirm() - async def async_step_reauth_confirm(self, user_input=None): + async def async_step_reauth_confirm( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" if user_input is None: + if TYPE_CHECKING: + assert self._reauth_input is not None # same as DATA_SCHEMA but with default email data_schema = vol.Schema( { diff --git a/homeassistant/components/soma/config_flow.py b/homeassistant/components/soma/config_flow.py index 586567611f7..caf361d5c3c 100644 --- a/homeassistant/components/soma/config_flow.py +++ b/homeassistant/components/soma/config_flow.py @@ -39,7 +39,7 @@ class SomaFlowHandler(ConfigFlow, domain=DOMAIN): return await self.async_step_creation(user_input) - async def async_step_creation(self, user_input=None): + async def async_step_creation(self, user_input: dict[str, Any]) -> ConfigFlowResult: """Finish config flow.""" try: api = await self.hass.async_add_executor_job( diff --git a/homeassistant/components/somfy_mylink/config_flow.py b/homeassistant/components/somfy_mylink/config_flow.py index 231f93b0cb7..705db43362e 100644 --- a/homeassistant/components/somfy_mylink/config_flow.py +++ b/homeassistant/components/somfy_mylink/config_flow.py @@ -132,7 +132,7 @@ class OptionsFlowHandler(OptionsFlow): """Initialize options flow.""" self.config_entry = config_entry self.options = deepcopy(dict(config_entry.options)) - self._target_id = None + self._target_id: str | None = None @callback def _async_callback_targets(self): @@ -150,7 +150,9 @@ class OptionsFlowHandler(OptionsFlow): return cover["name"] raise KeyError - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle options flow.""" if self.config_entry.state is not ConfigEntryState.LOADED: @@ -173,9 +175,13 @@ class OptionsFlowHandler(OptionsFlow): return self.async_show_form(step_id="init", data_schema=data_schema, errors={}) - async def async_step_target_config(self, user_input=None, target_id=None): + async def async_step_target_config( + self, user_input: dict[str, bool] | None = None, target_id: str | None = None + ) -> ConfigFlowResult: """Handle options flow for target.""" - reversed_target_ids = self.options.setdefault(CONF_REVERSED_TARGET_IDS, {}) + reversed_target_ids: dict[str | None, bool] = self.options.setdefault( + CONF_REVERSED_TARGET_IDS, {} + ) if user_input is not None: if user_input[CONF_REVERSE] != reversed_target_ids.get(self._target_id): diff --git a/homeassistant/components/songpal/config_flow.py b/homeassistant/components/songpal/config_flow.py index 9ccf7a8f19c..7f10d22b8c6 100644 --- a/homeassistant/components/songpal/config_flow.py +++ b/homeassistant/components/songpal/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from typing import Any +from typing import TYPE_CHECKING, Any from urllib.parse import urlparse from songpal import Device, SongpalException @@ -21,7 +21,7 @@ _LOGGER = logging.getLogger(__name__) class SongpalConfig: """Device Configuration.""" - def __init__(self, name, host, endpoint): + def __init__(self, name: str, host: str | None, endpoint: str) -> None: """Initialize Configuration.""" self.name = name self.host = host @@ -33,12 +33,10 @@ class SongpalConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Initialize the flow.""" - self.conf: SongpalConfig | None = None + conf: SongpalConfig async def async_step_user( - self, user_input: dict[str, Any] | None = None + self, user_input: dict[str, str] | None = None ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" if user_input is None: @@ -75,7 +73,9 @@ class SongpalConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_init(user_input) - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow start.""" # Check if already configured self._async_abort_entries_match({CONF_ENDPOINT: self.conf.endpoint}) @@ -122,14 +122,16 @@ class SongpalConfigFlow(ConfigFlow, domain=DOMAIN): CONF_HOST: parsed_url.hostname, } + if TYPE_CHECKING: + assert isinstance(parsed_url.hostname, str) self.conf = SongpalConfig(friendly_name, parsed_url.hostname, endpoint) return await self.async_step_init() - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, str]) -> ConfigFlowResult: """Import a config entry.""" name = import_data.get(CONF_NAME) - endpoint = import_data.get(CONF_ENDPOINT) + endpoint = import_data[CONF_ENDPOINT] parsed_url = urlparse(endpoint) # Try to connect to test the endpoint diff --git a/homeassistant/components/soundtouch/config_flow.py b/homeassistant/components/soundtouch/config_flow.py index fea63366db9..7c637d71111 100644 --- a/homeassistant/components/soundtouch/config_flow.py +++ b/homeassistant/components/soundtouch/config_flow.py @@ -68,7 +68,9 @@ class SoundtouchConfigFlow(ConfigFlow, domain=DOMAIN): self.context["title_placeholders"] = {"name": self.name} return await self.async_step_zeroconf_confirm() - async def async_step_zeroconf_confirm(self, user_input=None): + async def async_step_zeroconf_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle user-confirmation of discovered node.""" if user_input is not None: return await self._async_create_soundtouch_entry() diff --git a/homeassistant/components/syncthru/config_flow.py b/homeassistant/components/syncthru/config_flow.py index 180ba0d9e34..1fb155a5648 100644 --- a/homeassistant/components/syncthru/config_flow.py +++ b/homeassistant/components/syncthru/config_flow.py @@ -64,7 +64,9 @@ class SyncThruConfigFlow(ConfigFlow, domain=DOMAIN): self.context["title_placeholders"] = {CONF_NAME: self.name} return await self.async_step_confirm() - async def async_step_confirm(self, user_input=None): + async def async_step_confirm( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Handle discovery confirmation by user.""" if user_input is not None: return await self._async_check_and_create("confirm", user_input) From 349ea35dc39a0768a6248580df1bf7a22c7d952c Mon Sep 17 00:00:00 2001 From: Christopher Fenner <9592452+CFenner@users.noreply.github.com> Date: Wed, 4 Sep 2024 18:41:20 +0200 Subject: [PATCH 1436/1635] Fix device identifier in ViCare integration (#124483) * use correct serial * add migration handler * adjust init call * add missing types * adjust init call * adjust init call * adjust init call * adjust init call * Update types.py * fix loop * fix loop * fix parameter order * align parameter naming * remove comment * correct init * update * Update types.py * correct merge * revert type change * add test case * add helper * add test case * update snapshot * add snapshot * add device.serial data point * fix device unique id * update snapshot * add comments * update nmigration * fix missing parameter * move static parameters * fix circuit access * update device.serial * update snapshots * remove test case * Update binary_sensor.py * convert climate entity * Update entity.py * update snapshot * use snake case * add migration test * enhance test case * add test case * Apply suggestions from code review --------- Co-authored-by: Joost Lekkerkerker --- homeassistant/components/vicare/__init__.py | 76 +++++++++++++- homeassistant/components/vicare/climate.py | 42 ++++---- homeassistant/components/vicare/entity.py | 12 ++- .../components/vicare/fixtures/ViAir300F.json | 2 +- .../vicare/fixtures/Vitodens300W.json | 4 +- .../vicare/snapshots/test_binary_sensor.ambr | 16 +-- .../vicare/snapshots/test_button.ambr | 2 +- .../vicare/snapshots/test_climate.ambr | 4 +- .../vicare/snapshots/test_diagnostics.ambr | 4 +- .../components/vicare/snapshots/test_fan.ambr | 2 +- .../vicare/snapshots/test_number.ambr | 22 ++--- .../vicare/snapshots/test_sensor.ambr | 42 ++++---- .../vicare/snapshots/test_water_heater.ambr | 4 +- tests/components/vicare/test_init.py | 99 +++++++++++++++++++ 14 files changed, 252 insertions(+), 79 deletions(-) create mode 100644 tests/components/vicare/test_init.py diff --git a/homeassistant/components/vicare/__init__.py b/homeassistant/components/vicare/__init__.py index 0c87cd6f4fe..ead210e2816 100644 --- a/homeassistant/components/vicare/__init__.py +++ b/homeassistant/components/vicare/__init__.py @@ -15,10 +15,12 @@ from PyViCare.PyViCareUtils import ( PyViCareInvalidCredentialsError, ) +from homeassistant.components.climate import DOMAIN as DOMAIN_CLIMATE from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_CLIENT_ID, CONF_PASSWORD, CONF_USERNAME -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.storage import STORAGE_DIR from .const import ( @@ -47,6 +49,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except (PyViCareInvalidConfigurationError, PyViCareInvalidCredentialsError) as err: raise ConfigEntryAuthFailed("Authentication failed") from err + for device in hass.data[DOMAIN][entry.entry_id][DEVICE_LIST]: + # Migration can be removed in 2025.4.0 + await async_migrate_devices(hass, entry, device) + # Migration can be removed in 2025.4.0 + await async_migrate_entities(hass, entry, device) + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True @@ -109,6 +117,72 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return unload_ok +async def async_migrate_devices( + hass: HomeAssistant, entry: ConfigEntry, device: ViCareDevice +) -> None: + """Migrate old entry.""" + registry = dr.async_get(hass) + + gateway_serial: str = device.config.getConfig().serial + device_serial: str = device.api.getSerial() + + old_identifier = gateway_serial + new_identifier = f"{gateway_serial}_{device_serial}" + + # Migrate devices + for device_entry in dr.async_entries_for_config_entry(registry, entry.entry_id): + if device_entry.identifiers == {(DOMAIN, old_identifier)}: + _LOGGER.debug("Migrating device %s", device_entry.name) + registry.async_update_device( + device_entry.id, + serial_number=device_serial, + new_identifiers={(DOMAIN, new_identifier)}, + ) + + +async def async_migrate_entities( + hass: HomeAssistant, entry: ConfigEntry, device: ViCareDevice +) -> None: + """Migrate old entry.""" + gateway_serial: str = device.config.getConfig().serial + device_serial: str = device.api.getSerial() + new_identifier = f"{gateway_serial}_{device_serial}" + + @callback + def _update_unique_id( + entity_entry: er.RegistryEntry, + ) -> dict[str, str] | None: + """Update unique ID of entity entry.""" + if not entity_entry.unique_id.startswith(gateway_serial): + # belongs to other device/gateway + return None + if entity_entry.unique_id.startswith(f"{gateway_serial}_"): + # Already correct, nothing to do + return None + + unique_id_parts = entity_entry.unique_id.split("-") + unique_id_parts[0] = new_identifier + + # convert climate entity unique id from `-` to `-heating-` + if entity_entry.domain == DOMAIN_CLIMATE: + unique_id_parts[len(unique_id_parts) - 1] = ( + f"{entity_entry.translation_key}-{unique_id_parts[len(unique_id_parts)-1]}" + ) + + entity_new_unique_id = "-".join(unique_id_parts) + + _LOGGER.debug( + "Migrating entity %s from %s to new id %s", + entity_entry.entity_id, + entity_entry.unique_id, + entity_new_unique_id, + ) + return {"new_unique_id": entity_new_unique_id} + + # Migrate entities + await er.async_migrate_entries(hass, entry.entry_id, _update_unique_id) + + def get_supported_devices( devices: list[PyViCareDeviceConfig], ) -> list[PyViCareDeviceConfig]: diff --git a/homeassistant/components/vicare/climate.py b/homeassistant/components/vicare/climate.py index 4968e565d0b..410395760ea 100644 --- a/homeassistant/components/vicare/climate.py +++ b/homeassistant/components/vicare/climate.py @@ -148,10 +148,10 @@ class ViCareClimate(ViCareEntity, ClimateEntity): circuit: PyViCareHeatingCircuit, ) -> None: """Initialize the climate device.""" - super().__init__(circuit.id, device_config, device) - self._circuit = circuit + super().__init__(self._attr_translation_key, device_config, device, circuit) + self._device = device self._attributes: dict[str, Any] = {} - self._attributes["vicare_programs"] = self._circuit.getPrograms() + self._attributes["vicare_programs"] = self._api.getPrograms() self._attr_preset_modes = [ preset for heating_program in self._attributes["vicare_programs"] @@ -163,11 +163,11 @@ class ViCareClimate(ViCareEntity, ClimateEntity): try: _room_temperature = None with suppress(PyViCareNotSupportedFeatureError): - _room_temperature = self._circuit.getRoomTemperature() + _room_temperature = self._api.getRoomTemperature() _supply_temperature = None with suppress(PyViCareNotSupportedFeatureError): - _supply_temperature = self._circuit.getSupplyTemperature() + _supply_temperature = self._api.getSupplyTemperature() if _room_temperature is not None: self._attr_current_temperature = _room_temperature @@ -177,15 +177,13 @@ class ViCareClimate(ViCareEntity, ClimateEntity): self._attr_current_temperature = None with suppress(PyViCareNotSupportedFeatureError): - self._current_program = self._circuit.getActiveProgram() + self._current_program = self._api.getActiveProgram() with suppress(PyViCareNotSupportedFeatureError): - self._attr_target_temperature = ( - self._circuit.getCurrentDesiredTemperature() - ) + self._attr_target_temperature = self._api.getCurrentDesiredTemperature() with suppress(PyViCareNotSupportedFeatureError): - self._current_mode = self._circuit.getActiveMode() + self._current_mode = self._api.getActiveMode() # Update the generic device attributes self._attributes = { @@ -196,25 +194,25 @@ class ViCareClimate(ViCareEntity, ClimateEntity): with suppress(PyViCareNotSupportedFeatureError): self._attributes["heating_curve_slope"] = ( - self._circuit.getHeatingCurveSlope() + self._api.getHeatingCurveSlope() ) with suppress(PyViCareNotSupportedFeatureError): self._attributes["heating_curve_shift"] = ( - self._circuit.getHeatingCurveShift() + self._api.getHeatingCurveShift() ) with suppress(PyViCareNotSupportedFeatureError): - self._attributes["vicare_modes"] = self._circuit.getModes() + self._attributes["vicare_modes"] = self._api.getModes() self._current_action = False # Update the specific device attributes with suppress(PyViCareNotSupportedFeatureError): - for burner in get_burners(self._api): + for burner in get_burners(self._device): self._current_action = self._current_action or burner.getActive() with suppress(PyViCareNotSupportedFeatureError): - for compressor in get_compressors(self._api): + for compressor in get_compressors(self._device): self._current_action = ( self._current_action or compressor.getActive() ) @@ -245,9 +243,9 @@ class ViCareClimate(ViCareEntity, ClimateEntity): raise ValueError(f"Cannot set invalid hvac mode: {hvac_mode}") _LOGGER.debug("Setting hvac mode to %s / %s", hvac_mode, vicare_mode) - self._circuit.setMode(vicare_mode) + self._api.setMode(vicare_mode) - def vicare_mode_from_hvac_mode(self, hvac_mode): + def vicare_mode_from_hvac_mode(self, hvac_mode) -> str | None: """Return the corresponding vicare mode for an hvac_mode.""" if "vicare_modes" not in self._attributes: return None @@ -283,7 +281,7 @@ class ViCareClimate(ViCareEntity, ClimateEntity): def set_temperature(self, **kwargs: Any) -> None: """Set new target temperatures.""" if (temp := kwargs.get(ATTR_TEMPERATURE)) is not None: - self._circuit.setProgramTemperature(self._current_program, temp) + self._api.setProgramTemperature(self._current_program, temp) self._attr_target_temperature = temp @property @@ -312,7 +310,7 @@ class ViCareClimate(ViCareEntity, ClimateEntity): ): _LOGGER.debug("deactivating %s", self._current_program) try: - self._circuit.deactivateProgram(self._current_program) + self._api.deactivateProgram(self._current_program) except PyViCareCommandError as err: raise ServiceValidationError( translation_domain=DOMAIN, @@ -326,7 +324,7 @@ class ViCareClimate(ViCareEntity, ClimateEntity): if target_program in CHANGABLE_HEATING_PROGRAMS: _LOGGER.debug("activating %s", target_program) try: - self._circuit.activateProgram(target_program) + self._api.activateProgram(target_program) except PyViCareCommandError as err: raise ServiceValidationError( translation_domain=DOMAIN, @@ -341,9 +339,9 @@ class ViCareClimate(ViCareEntity, ClimateEntity): """Show Device Attributes.""" return self._attributes - def set_vicare_mode(self, vicare_mode): + def set_vicare_mode(self, vicare_mode) -> None: """Service function to set vicare modes directly.""" if vicare_mode not in self._attributes["vicare_modes"]: raise ValueError(f"Cannot set invalid vicare mode: {vicare_mode}.") - self._circuit.setMode(vicare_mode) + self._api.setMode(vicare_mode) diff --git a/homeassistant/components/vicare/entity.py b/homeassistant/components/vicare/entity.py index eef114b4039..f48243e83e1 100644 --- a/homeassistant/components/vicare/entity.py +++ b/homeassistant/components/vicare/entity.py @@ -25,18 +25,20 @@ class ViCareEntity(Entity): component: PyViCareHeatingDeviceComponent | None = None, ) -> None: """Initialize the entity.""" + gateway_serial = device_config.getConfig().serial + device_serial = device.getSerial() + identifier = f"{gateway_serial}_{device_serial}" + self._api: PyViCareDevice | PyViCareHeatingDeviceComponent = ( component if component else device ) - - self._attr_unique_id = f"{device_config.getConfig().serial}-{unique_id_suffix}" - # valid for compressors, circuits, burners (HeatingDeviceWithComponent) + self._attr_unique_id = f"{identifier}-{unique_id_suffix}" if component: self._attr_unique_id += f"-{component.id}" self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, device_config.getConfig().serial)}, - serial_number=device.getSerial(), + identifiers={(DOMAIN, identifier)}, + serial_number=device_serial, name=device_config.getModel(), manufacturer="Viessmann", model=device_config.getModel(), diff --git a/tests/components/vicare/fixtures/ViAir300F.json b/tests/components/vicare/fixtures/ViAir300F.json index b1ec747e127..090c7a81ddf 100644 --- a/tests/components/vicare/fixtures/ViAir300F.json +++ b/tests/components/vicare/fixtures/ViAir300F.json @@ -50,7 +50,7 @@ "properties": { "value": { "type": "string", - "value": "################" + "value": "deviceSerialViAir300F" } }, "timestamp": "2024-03-20T01:29:35.549Z", diff --git a/tests/components/vicare/fixtures/Vitodens300W.json b/tests/components/vicare/fixtures/Vitodens300W.json index bb86bda981b..d183146e94d 100644 --- a/tests/components/vicare/fixtures/Vitodens300W.json +++ b/tests/components/vicare/fixtures/Vitodens300W.json @@ -11,10 +11,10 @@ "properties": { "value": { "type": "string", - "value": "################" + "value": "deviceSerialVitodens300W" } }, - "timestamp": "2024-03-20T01:29:35.549Z", + "timestamp": "2024-07-30T20:03:40.073Z", "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/device.serial" }, { diff --git a/tests/components/vicare/snapshots/test_binary_sensor.ambr b/tests/components/vicare/snapshots/test_binary_sensor.ambr index a03a6150c45..f3e4d4e1c84 100644 --- a/tests/components/vicare/snapshots/test_binary_sensor.ambr +++ b/tests/components/vicare/snapshots/test_binary_sensor.ambr @@ -28,7 +28,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'burner', - 'unique_id': 'gateway0-burner_active-0', + 'unique_id': 'gateway0_deviceSerialVitodens300W-burner_active-0', 'unit_of_measurement': None, }) # --- @@ -75,7 +75,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'circulation_pump', - 'unique_id': 'gateway0-circulationpump_active-0', + 'unique_id': 'gateway0_deviceSerialVitodens300W-circulationpump_active-0', 'unit_of_measurement': None, }) # --- @@ -122,7 +122,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'circulation_pump', - 'unique_id': 'gateway0-circulationpump_active-1', + 'unique_id': 'gateway0_deviceSerialVitodens300W-circulationpump_active-1', 'unit_of_measurement': None, }) # --- @@ -169,7 +169,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'domestic_hot_water_charging', - 'unique_id': 'gateway0-charging_active', + 'unique_id': 'gateway0_deviceSerialVitodens300W-charging_active', 'unit_of_measurement': None, }) # --- @@ -216,7 +216,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'domestic_hot_water_circulation_pump', - 'unique_id': 'gateway0-dhw_circulationpump_active', + 'unique_id': 'gateway0_deviceSerialVitodens300W-dhw_circulationpump_active', 'unit_of_measurement': None, }) # --- @@ -263,7 +263,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'domestic_hot_water_pump', - 'unique_id': 'gateway0-dhw_pump_active', + 'unique_id': 'gateway0_deviceSerialVitodens300W-dhw_pump_active', 'unit_of_measurement': None, }) # --- @@ -310,7 +310,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'frost_protection', - 'unique_id': 'gateway0-frost_protection_active-0', + 'unique_id': 'gateway0_deviceSerialVitodens300W-frost_protection_active-0', 'unit_of_measurement': None, }) # --- @@ -356,7 +356,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'frost_protection', - 'unique_id': 'gateway0-frost_protection_active-1', + 'unique_id': 'gateway0_deviceSerialVitodens300W-frost_protection_active-1', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/vicare/snapshots/test_button.ambr b/tests/components/vicare/snapshots/test_button.ambr index 01120b8b0d6..9fadc6a983f 100644 --- a/tests/components/vicare/snapshots/test_button.ambr +++ b/tests/components/vicare/snapshots/test_button.ambr @@ -28,7 +28,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'activate_onetimecharge', - 'unique_id': 'gateway0-activate_onetimecharge', + 'unique_id': 'gateway0_deviceSerialVitodens300W-activate_onetimecharge', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/vicare/snapshots/test_climate.ambr b/tests/components/vicare/snapshots/test_climate.ambr index a01d1c43bea..aea0ea879c2 100644 --- a/tests/components/vicare/snapshots/test_climate.ambr +++ b/tests/components/vicare/snapshots/test_climate.ambr @@ -40,7 +40,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'heating', - 'unique_id': 'gateway0-0', + 'unique_id': 'gateway0_deviceSerialVitodens300W-heating-0', 'unit_of_measurement': None, }) # --- @@ -123,7 +123,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'heating', - 'unique_id': 'gateway0-1', + 'unique_id': 'gateway0_deviceSerialVitodens300W-heating-1', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/vicare/snapshots/test_diagnostics.ambr b/tests/components/vicare/snapshots/test_diagnostics.ambr index 430b2de35ad..120bdf7a333 100644 --- a/tests/components/vicare/snapshots/test_diagnostics.ambr +++ b/tests/components/vicare/snapshots/test_diagnostics.ambr @@ -16,10 +16,10 @@ 'properties': dict({ 'value': dict({ 'type': 'string', - 'value': '################', + 'value': 'deviceSerialVitodens300W', }), }), - 'timestamp': '2024-03-20T01:29:35.549Z', + 'timestamp': '2024-07-30T20:03:40.073Z', 'uri': 'https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/device.serial', }), dict({ diff --git a/tests/components/vicare/snapshots/test_fan.ambr b/tests/components/vicare/snapshots/test_fan.ambr index 48c8d728569..8ec4bc41d8d 100644 --- a/tests/components/vicare/snapshots/test_fan.ambr +++ b/tests/components/vicare/snapshots/test_fan.ambr @@ -35,7 +35,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'ventilation', - 'unique_id': 'gateway0-ventilation', + 'unique_id': 'gateway0_deviceSerialViAir300F-ventilation', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/vicare/snapshots/test_number.ambr b/tests/components/vicare/snapshots/test_number.ambr index e6e87ce5dc7..5a030fc0213 100644 --- a/tests/components/vicare/snapshots/test_number.ambr +++ b/tests/components/vicare/snapshots/test_number.ambr @@ -33,7 +33,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'comfort_temperature', - 'unique_id': 'gateway0-comfort_temperature-0', + 'unique_id': 'gateway0_deviceSerialVitodens300W-comfort_temperature-0', 'unit_of_measurement': , }) # --- @@ -90,7 +90,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'comfort_temperature', - 'unique_id': 'gateway0-comfort_temperature-1', + 'unique_id': 'gateway0_deviceSerialVitodens300W-comfort_temperature-1', 'unit_of_measurement': , }) # --- @@ -147,7 +147,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'heating_curve_shift', - 'unique_id': 'gateway0-heating curve shift-0', + 'unique_id': 'gateway0_deviceSerialVitodens300W-heating curve shift-0', 'unit_of_measurement': , }) # --- @@ -204,7 +204,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'heating_curve_shift', - 'unique_id': 'gateway0-heating curve shift-1', + 'unique_id': 'gateway0_deviceSerialVitodens300W-heating curve shift-1', 'unit_of_measurement': , }) # --- @@ -261,7 +261,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'heating_curve_slope', - 'unique_id': 'gateway0-heating curve slope-0', + 'unique_id': 'gateway0_deviceSerialVitodens300W-heating curve slope-0', 'unit_of_measurement': None, }) # --- @@ -316,7 +316,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'heating_curve_slope', - 'unique_id': 'gateway0-heating curve slope-1', + 'unique_id': 'gateway0_deviceSerialVitodens300W-heating curve slope-1', 'unit_of_measurement': None, }) # --- @@ -371,7 +371,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'normal_temperature', - 'unique_id': 'gateway0-normal_temperature-0', + 'unique_id': 'gateway0_deviceSerialVitodens300W-normal_temperature-0', 'unit_of_measurement': , }) # --- @@ -428,7 +428,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'normal_temperature', - 'unique_id': 'gateway0-normal_temperature-1', + 'unique_id': 'gateway0_deviceSerialVitodens300W-normal_temperature-1', 'unit_of_measurement': , }) # --- @@ -485,7 +485,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'reduced_temperature', - 'unique_id': 'gateway0-reduced_temperature-0', + 'unique_id': 'gateway0_deviceSerialVitodens300W-reduced_temperature-0', 'unit_of_measurement': , }) # --- @@ -542,7 +542,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'reduced_temperature', - 'unique_id': 'gateway0-reduced_temperature-1', + 'unique_id': 'gateway0_deviceSerialVitodens300W-reduced_temperature-1', 'unit_of_measurement': , }) # --- @@ -599,7 +599,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'dhw_temperature', - 'unique_id': 'gateway0-dhw_temperature', + 'unique_id': 'gateway0_deviceSerialVitodens300W-dhw_temperature', 'unit_of_measurement': , }) # --- diff --git a/tests/components/vicare/snapshots/test_sensor.ambr b/tests/components/vicare/snapshots/test_sensor.ambr index 7bbac75bedc..43e5b713293 100644 --- a/tests/components/vicare/snapshots/test_sensor.ambr +++ b/tests/components/vicare/snapshots/test_sensor.ambr @@ -30,7 +30,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'boiler_temperature', - 'unique_id': 'gateway0-boiler_temperature', + 'unique_id': 'gateway0_deviceSerialVitodens300W-boiler_temperature', 'unit_of_measurement': , }) # --- @@ -81,7 +81,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'burner_hours', - 'unique_id': 'gateway0-burner_hours-0', + 'unique_id': 'gateway0_deviceSerialVitodens300W-burner_hours-0', 'unit_of_measurement': , }) # --- @@ -131,7 +131,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'burner_modulation', - 'unique_id': 'gateway0-burner_modulation-0', + 'unique_id': 'gateway0_deviceSerialVitodens300W-burner_modulation-0', 'unit_of_measurement': '%', }) # --- @@ -181,7 +181,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'burner_starts', - 'unique_id': 'gateway0-burner_starts-0', + 'unique_id': 'gateway0_deviceSerialVitodens300W-burner_starts-0', 'unit_of_measurement': None, }) # --- @@ -230,7 +230,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'hotwater_gas_consumption_heating_this_month', - 'unique_id': 'gateway0-hotwater_gas_consumption_heating_this_month', + 'unique_id': 'gateway0_deviceSerialVitodens300W-hotwater_gas_consumption_heating_this_month', 'unit_of_measurement': None, }) # --- @@ -279,7 +279,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'hotwater_gas_consumption_heating_this_week', - 'unique_id': 'gateway0-hotwater_gas_consumption_heating_this_week', + 'unique_id': 'gateway0_deviceSerialVitodens300W-hotwater_gas_consumption_heating_this_week', 'unit_of_measurement': None, }) # --- @@ -328,7 +328,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'hotwater_gas_consumption_heating_this_year', - 'unique_id': 'gateway0-hotwater_gas_consumption_heating_this_year', + 'unique_id': 'gateway0_deviceSerialVitodens300W-hotwater_gas_consumption_heating_this_year', 'unit_of_measurement': None, }) # --- @@ -377,7 +377,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'hotwater_gas_consumption_today', - 'unique_id': 'gateway0-hotwater_gas_consumption_today', + 'unique_id': 'gateway0_deviceSerialVitodens300W-hotwater_gas_consumption_today', 'unit_of_measurement': None, }) # --- @@ -426,7 +426,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'hotwater_max_temperature', - 'unique_id': 'gateway0-hotwater_max_temperature', + 'unique_id': 'gateway0_deviceSerialVitodens300W-hotwater_max_temperature', 'unit_of_measurement': , }) # --- @@ -477,7 +477,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'hotwater_min_temperature', - 'unique_id': 'gateway0-hotwater_min_temperature', + 'unique_id': 'gateway0_deviceSerialVitodens300W-hotwater_min_temperature', 'unit_of_measurement': , }) # --- @@ -528,7 +528,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power consumption this month', - 'unique_id': 'gateway0-power consumption this month', + 'unique_id': 'gateway0_deviceSerialVitodens300W-power consumption this month', 'unit_of_measurement': , }) # --- @@ -579,7 +579,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_consumption_this_year', - 'unique_id': 'gateway0-power consumption this year', + 'unique_id': 'gateway0_deviceSerialVitodens300W-power consumption this year', 'unit_of_measurement': , }) # --- @@ -630,7 +630,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_consumption_today', - 'unique_id': 'gateway0-power consumption today', + 'unique_id': 'gateway0_deviceSerialVitodens300W-power consumption today', 'unit_of_measurement': , }) # --- @@ -681,7 +681,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'gas_consumption_heating_this_month', - 'unique_id': 'gateway0-gas_consumption_heating_this_month', + 'unique_id': 'gateway0_deviceSerialVitodens300W-gas_consumption_heating_this_month', 'unit_of_measurement': None, }) # --- @@ -730,7 +730,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'gas_consumption_heating_this_week', - 'unique_id': 'gateway0-gas_consumption_heating_this_week', + 'unique_id': 'gateway0_deviceSerialVitodens300W-gas_consumption_heating_this_week', 'unit_of_measurement': None, }) # --- @@ -779,7 +779,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'gas_consumption_heating_this_year', - 'unique_id': 'gateway0-gas_consumption_heating_this_year', + 'unique_id': 'gateway0_deviceSerialVitodens300W-gas_consumption_heating_this_year', 'unit_of_measurement': None, }) # --- @@ -828,7 +828,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'gas_consumption_heating_today', - 'unique_id': 'gateway0-gas_consumption_heating_today', + 'unique_id': 'gateway0_deviceSerialVitodens300W-gas_consumption_heating_today', 'unit_of_measurement': None, }) # --- @@ -877,7 +877,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'outside_temperature', - 'unique_id': 'gateway0-outside_temperature', + 'unique_id': 'gateway0_deviceSerialVitodens300W-outside_temperature', 'unit_of_measurement': , }) # --- @@ -928,7 +928,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'power_consumption_this_week', - 'unique_id': 'gateway0-power consumption this week', + 'unique_id': 'gateway0_deviceSerialVitodens300W-power consumption this week', 'unit_of_measurement': , }) # --- @@ -979,7 +979,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'supply_temperature', - 'unique_id': 'gateway0-supply_temperature-0', + 'unique_id': 'gateway0_deviceSerialVitodens300W-supply_temperature-0', 'unit_of_measurement': , }) # --- @@ -1030,7 +1030,7 @@ 'previous_unique_id': None, 'supported_features': 0, 'translation_key': 'supply_temperature', - 'unique_id': 'gateway0-supply_temperature-1', + 'unique_id': 'gateway0_deviceSerialVitodens300W-supply_temperature-1', 'unit_of_measurement': , }) # --- diff --git a/tests/components/vicare/snapshots/test_water_heater.ambr b/tests/components/vicare/snapshots/test_water_heater.ambr index 5ab4fcc78bd..bca04b1bbfa 100644 --- a/tests/components/vicare/snapshots/test_water_heater.ambr +++ b/tests/components/vicare/snapshots/test_water_heater.ambr @@ -31,7 +31,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'domestic_hot_water', - 'unique_id': 'gateway0-0', + 'unique_id': 'gateway0_deviceSerialVitodens300W-0', 'unit_of_measurement': None, }) # --- @@ -87,7 +87,7 @@ 'previous_unique_id': None, 'supported_features': , 'translation_key': 'domestic_hot_water', - 'unique_id': 'gateway0-1', + 'unique_id': 'gateway0_deviceSerialVitodens300W-1', 'unit_of_measurement': None, }) # --- diff --git a/tests/components/vicare/test_init.py b/tests/components/vicare/test_init.py new file mode 100644 index 00000000000..fea7b5985f1 --- /dev/null +++ b/tests/components/vicare/test_init.py @@ -0,0 +1,99 @@ +"""Test ViCare migration.""" + +from unittest.mock import patch + +from homeassistant.components.vicare.const import DOMAIN +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from . import MODULE +from .conftest import Fixture, MockPyViCare + +from tests.common import MockConfigEntry + + +# Device migration test can be removed in 2025.4.0 +async def test_device_migration( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that the device registry is updated correctly.""" + fixtures: list[Fixture] = [Fixture({"type:boiler"}, "vicare/Vitodens300W.json")] + with ( + patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.PLATFORMS", [Platform.CLIMATE]), + ): + mock_config_entry.add_to_hass(hass) + + device_registry.async_get_or_create( + config_entry_id=mock_config_entry.entry_id, + identifiers={ + (DOMAIN, "gateway0"), + }, + ) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await hass.async_block_till_done() + + assert device_registry.async_get_device(identifiers={(DOMAIN, "gateway0")}) is None + + assert ( + device_registry.async_get_device( + identifiers={(DOMAIN, "gateway0_deviceSerialVitodens300W")} + ) + is not None + ) + + +# Entity migration test can be removed in 2025.4.0 +async def test_climate_entity_migration( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that the climate entity unique_id gets migrated correctly.""" + fixtures: list[Fixture] = [Fixture({"type:boiler"}, "vicare/Vitodens300W.json")] + with ( + patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.PLATFORMS", [Platform.CLIMATE]), + ): + mock_config_entry.add_to_hass(hass) + + entry1 = entity_registry.async_get_or_create( + domain=Platform.CLIMATE, + platform=DOMAIN, + config_entry=mock_config_entry, + unique_id="gateway0-0", + translation_key="heating", + ) + entry2 = entity_registry.async_get_or_create( + domain=Platform.CLIMATE, + platform=DOMAIN, + config_entry=mock_config_entry, + unique_id="gateway0_deviceSerialVitodens300W-heating-1", + translation_key="heating", + ) + entry3 = entity_registry.async_get_or_create( + domain=Platform.CLIMATE, + platform=DOMAIN, + config_entry=mock_config_entry, + unique_id="gateway1-0", + translation_key="heating", + ) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await hass.async_block_till_done() + + assert ( + entity_registry.async_get(entry1.entity_id).unique_id + == "gateway0_deviceSerialVitodens300W-heating-0" + ) + assert ( + entity_registry.async_get(entry2.entity_id).unique_id + == "gateway0_deviceSerialVitodens300W-heating-1" + ) + assert entity_registry.async_get(entry3.entity_id).unique_id == "gateway1-0" From 416a2de179d2035cb90499deb85964bdce6f7c18 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Wed, 4 Sep 2024 19:09:41 +0200 Subject: [PATCH 1437/1635] Improve config flow type hints in screenlogic (#125199) --- .../components/screenlogic/config_flow.py | 16 +++++++++------- .../components/screenlogic/coordinator.py | 5 ++++- 2 files changed, 13 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/screenlogic/config_flow.py b/homeassistant/components/screenlogic/config_flow.py index 74a01fdeaa2..4a46756cf2f 100644 --- a/homeassistant/components/screenlogic/config_flow.py +++ b/homeassistant/components/screenlogic/config_flow.py @@ -32,9 +32,9 @@ GATEWAY_MANUAL_ENTRY = "manual" PENTAIR_OUI = "00-C0-33" -async def async_discover_gateways_by_unique_id(hass): +async def async_discover_gateways_by_unique_id() -> dict[str, dict[str, Any]]: """Discover gateways and return a dict of them by unique id.""" - discovered_gateways = {} + discovered_gateways: dict[str, dict[str, Any]] = {} try: hosts = await discovery.async_discover() _LOGGER.debug("Discovered hosts: %s", hosts) @@ -51,16 +51,16 @@ async def async_discover_gateways_by_unique_id(hass): return discovered_gateways -def _extract_mac_from_name(name): +def _extract_mac_from_name(name: str) -> str: return format_mac(f"{PENTAIR_OUI}-{name.split(':')[1].strip()}") -def short_mac(mac): +def short_mac(mac: str) -> str: """Short version of the mac as seen in the app.""" return "-".join(mac.split(":")[3:]).upper() -def name_for_mac(mac): +def name_for_mac(mac: str) -> str: """Derive the gateway name from the mac.""" return f"Pentair: {short_mac(mac)}" @@ -83,9 +83,11 @@ class ScreenlogicConfigFlow(ConfigFlow, domain=DOMAIN): """Get the options flow for ScreenLogic.""" return ScreenLogicOptionsFlowHandler(config_entry) - async def async_step_user(self, user_input=None) -> ConfigFlowResult: + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the start of the config flow.""" - self.discovered_gateways = await async_discover_gateways_by_unique_id(self.hass) + self.discovered_gateways = await async_discover_gateways_by_unique_id() return await self.async_step_gateway_select() async def async_step_dhcp( diff --git a/homeassistant/components/screenlogic/coordinator.py b/homeassistant/components/screenlogic/coordinator.py index 281bac86e01..a90c9cb2cf4 100644 --- a/homeassistant/components/screenlogic/coordinator.py +++ b/homeassistant/components/screenlogic/coordinator.py @@ -2,6 +2,7 @@ from datetime import timedelta import logging +from typing import TYPE_CHECKING from screenlogicpy import ScreenLogicGateway from screenlogicpy.const.common import ( @@ -33,11 +34,13 @@ async def async_get_connect_info( """Construct connect_info from configuration entry and returns it to caller.""" mac = entry.unique_id # Attempt to rediscover gateway to follow IP changes - discovered_gateways = await async_discover_gateways_by_unique_id(hass) + discovered_gateways = await async_discover_gateways_by_unique_id() if mac in discovered_gateways: return discovered_gateways[mac] _LOGGER.debug("Gateway rediscovery failed for %s", entry.title) + if TYPE_CHECKING: + assert mac is not None # Static connection defined or fallback from discovery return { SL_GATEWAY_NAME: name_for_mac(mac), From 7266a16295774488e55f410644b18e551ced4088 Mon Sep 17 00:00:00 2001 From: TimL Date: Thu, 5 Sep 2024 03:10:59 +1000 Subject: [PATCH 1438/1635] Add Button platform for Smlight integration (#124970) * Add button platform for smlight integration * Add strings required for button platform * Add commands api to smlight mock client * Add tests for smlight button platform * Move entity category to class * Disable by default Zigbee flash mode --- homeassistant/components/smlight/__init__.py | 1 + homeassistant/components/smlight/button.py | 87 +++++++++++++++++++ homeassistant/components/smlight/strings.json | 11 +++ tests/components/smlight/conftest.py | 4 +- tests/components/smlight/test_button.py | 77 ++++++++++++++++ 5 files changed, 179 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/smlight/button.py create mode 100644 tests/components/smlight/test_button.py diff --git a/homeassistant/components/smlight/__init__.py b/homeassistant/components/smlight/__init__.py index 16eb60b9c87..47dc943423e 100644 --- a/homeassistant/components/smlight/__init__.py +++ b/homeassistant/components/smlight/__init__.py @@ -9,6 +9,7 @@ from homeassistant.core import HomeAssistant from .coordinator import SmDataUpdateCoordinator PLATFORMS: list[Platform] = [ + Platform.BUTTON, Platform.SENSOR, ] type SmConfigEntry = ConfigEntry[SmDataUpdateCoordinator] diff --git a/homeassistant/components/smlight/button.py b/homeassistant/components/smlight/button.py new file mode 100644 index 00000000000..b6a0c24c2ed --- /dev/null +++ b/homeassistant/components/smlight/button.py @@ -0,0 +1,87 @@ +"""Support for SLZB-06 buttons.""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass +import logging +from typing import Final + +from pysmlight.web import CmdWrapper + +from homeassistant.components.button import ( + ButtonDeviceClass, + ButtonEntity, + ButtonEntityDescription, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .coordinator import SmDataUpdateCoordinator +from .entity import SmEntity + +_LOGGER = logging.getLogger(__name__) + + +@dataclass(frozen=True, kw_only=True) +class SmButtonDescription(ButtonEntityDescription): + """Class to describe a Button entity.""" + + press_fn: Callable[[CmdWrapper], Awaitable[None]] + + +BUTTONS: Final = [ + SmButtonDescription( + key="core_restart", + translation_key="core_restart", + device_class=ButtonDeviceClass.RESTART, + press_fn=lambda cmd: cmd.reboot(), + ), + SmButtonDescription( + key="zigbee_restart", + translation_key="zigbee_restart", + device_class=ButtonDeviceClass.RESTART, + press_fn=lambda cmd: cmd.zb_restart(), + ), + SmButtonDescription( + key="zigbee_flash_mode", + translation_key="zigbee_flash_mode", + entity_registry_enabled_default=False, + press_fn=lambda cmd: cmd.zb_bootloader(), + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up SMLIGHT buttons based on a config entry.""" + coordinator = entry.runtime_data + + async_add_entities(SmButton(coordinator, button) for button in BUTTONS) + + +class SmButton(SmEntity, ButtonEntity): + """Defines a SLZB-06 button.""" + + entity_description: SmButtonDescription + _attr_entity_category = EntityCategory.CONFIG + + def __init__( + self, + coordinator: SmDataUpdateCoordinator, + description: SmButtonDescription, + ) -> None: + """Initialize SLZB-06 button entity.""" + super().__init__(coordinator) + + self.entity_description = description + self._attr_unique_id = f"{coordinator.unique_id}-{description.key}" + + async def async_press(self) -> None: + """Trigger button press.""" + await self.entity_description.press_fn(self.coordinator.client.cmds) diff --git a/homeassistant/components/smlight/strings.json b/homeassistant/components/smlight/strings.json index 02b9ebcc4e8..f81e977b40c 100644 --- a/homeassistant/components/smlight/strings.json +++ b/homeassistant/components/smlight/strings.json @@ -44,6 +44,17 @@ "ram_usage": { "name": "RAM usage" } + }, + "button": { + "core_restart": { + "name": "Core restart" + }, + "zigbee_restart": { + "name": "Zigbee restart" + }, + "zigbee_flash_mode": { + "name": "Zigbee flash mode" + } } } } diff --git a/tests/components/smlight/conftest.py b/tests/components/smlight/conftest.py index 93493daf51d..ad4d749c0d2 100644 --- a/tests/components/smlight/conftest.py +++ b/tests/components/smlight/conftest.py @@ -3,7 +3,7 @@ from collections.abc import AsyncGenerator, Generator from unittest.mock import AsyncMock, MagicMock, patch -from pysmlight.web import Info, Sensors +from pysmlight.web import CmdWrapper, Info, Sensors import pytest from homeassistant.components.smlight import PLATFORMS @@ -75,6 +75,8 @@ def mock_smlight_client(request: pytest.FixtureRequest) -> Generator[MagicMock]: api.check_auth_needed.return_value = False api.authenticate.return_value = True + api.cmds = AsyncMock(spec_set=CmdWrapper) + yield api diff --git a/tests/components/smlight/test_button.py b/tests/components/smlight/test_button.py new file mode 100644 index 00000000000..487351acdea --- /dev/null +++ b/tests/components/smlight/test_button.py @@ -0,0 +1,77 @@ +"""Tests for SMLIGHT SLZB-06 button entities.""" + +from unittest.mock import MagicMock + +import pytest + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .conftest import setup_integration + +from tests.common import MockConfigEntry + + +@pytest.fixture +def platforms() -> Platform | list[Platform]: + """Platforms, which should be loaded during the test.""" + return [Platform.BUTTON] + + +@pytest.mark.parametrize( + ("entity_id", "method"), + [ + ("core_restart", "reboot"), + ("zigbee_flash_mode", "zb_bootloader"), + ("zigbee_restart", "zb_restart"), + ], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_buttons( + hass: HomeAssistant, + entity_id: str, + entity_registry: er.EntityRegistry, + method: str, + mock_config_entry: MockConfigEntry, + mock_smlight_client: MagicMock, +) -> None: + """Test creation of button entities.""" + await setup_integration(hass, mock_config_entry) + + state = hass.states.get(f"button.mock_title_{entity_id}") + assert state is not None + assert state.state == STATE_UNKNOWN + + entry = entity_registry.async_get(f"button.mock_title_{entity_id}") + assert entry is not None + assert entry.unique_id == f"aa:bb:cc:dd:ee:ff-{entity_id}" + + mock_method = getattr(mock_smlight_client.cmds, method) + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: f"button.mock_title_{entity_id}"}, + blocking=True, + ) + + assert len(mock_method.mock_calls) == 1 + mock_method.assert_called_with() + + +@pytest.mark.usefixtures("mock_smlight_client") +async def test_disabled_by_default_button( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the disabled by default flash mode button.""" + await setup_integration(hass, mock_config_entry) + + assert not hass.states.get("button.mock_title_zigbee_flash_mode") + + assert (entry := entity_registry.async_get("button.mock_title_zigbee_flash_mode")) + assert entry.disabled + assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION From bad305dcbfb1f672def2a67ae9174cb94d7f8cf1 Mon Sep 17 00:00:00 2001 From: Artur Pragacz <49985303+arturpragacz@users.noreply.github.com> Date: Wed, 4 Sep 2024 19:11:34 +0200 Subject: [PATCH 1439/1635] Add Onkyo to strict typing (#124617) --- .strict-typing | 1 + .../components/onkyo/media_player.py | 34 +++++++++++-------- mypy.ini | 10 ++++++ 3 files changed, 31 insertions(+), 14 deletions(-) diff --git a/.strict-typing b/.strict-typing index 1d73b05fdea..e93f1589cc8 100644 --- a/.strict-typing +++ b/.strict-typing @@ -341,6 +341,7 @@ homeassistant.components.nut.* homeassistant.components.onboarding.* homeassistant.components.oncue.* homeassistant.components.onewire.* +homeassistant.components.onkyo.* homeassistant.components.open_meteo.* homeassistant.components.openexchangerates.* homeassistant.components.opensky.* diff --git a/homeassistant/components/onkyo/media_player.py b/homeassistant/components/onkyo/media_player.py index df1f25a196b..1718ecb36be 100644 --- a/homeassistant/components/onkyo/media_player.py +++ b/homeassistant/components/onkyo/media_player.py @@ -4,7 +4,7 @@ from __future__ import annotations import asyncio import logging -from typing import Any +from typing import Any, Literal import pyeiscp import voluptuous as vol @@ -23,7 +23,7 @@ from homeassistant.const import ( CONF_NAME, EVENT_HOMEASSISTANT_STOP, ) -from homeassistant.core import HomeAssistant, ServiceCall, callback +from homeassistant.core import Event, HomeAssistant, ServiceCall, callback from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType @@ -269,7 +269,7 @@ async def async_setup_platform( _LOGGER.debug("Manually creating receiver: %s (%s)", name, host) @callback - async def async_onkyo_interview_callback(conn: pyeiscp.Connection): + async def async_onkyo_interview_callback(conn: pyeiscp.Connection) -> None: """Receiver interviewed, connection not yet active.""" info = ReceiverInfo(conn.host, conn.port, conn.name, conn.identifier) _LOGGER.debug("Receiver interviewed: %s (%s)", info.model_name, info.host) @@ -285,7 +285,7 @@ async def async_setup_platform( _LOGGER.debug("Discovering receivers") @callback - async def async_onkyo_discovery_callback(conn: pyeiscp.Connection): + async def async_onkyo_discovery_callback(conn: pyeiscp.Connection) -> None: """Receiver discovered, connection not yet active.""" info = ReceiverInfo(conn.host, conn.port, conn.name, conn.identifier) _LOGGER.debug("Receiver discovered: %s (%s)", info.model_name, info.host) @@ -298,7 +298,7 @@ async def async_setup_platform( ) @callback - def close_receiver(_event): + def close_receiver(_event: Event) -> None: for receiver in receivers.values(): receiver.conn.close() @@ -495,19 +495,23 @@ class OnkyoMediaPlayer(MediaPlayerEntity): self.async_write_ha_state() @callback - def _parse_source(self, source): + def _parse_source(self, source_raw: str | int | tuple[str]) -> None: # source is either a tuple of values or a single value, # so we convert to a tuple, when it is a single value. - if not isinstance(source, tuple): - source = (source,) + if isinstance(source_raw, str | int): + source = (str(source_raw),) + else: + source = source_raw for value in source: if value in self._source_mapping: self._attr_source = self._source_mapping[value] - break - self._attr_source = "_".join(source) + return + self._attr_source = "_".join(source) @callback - def _parse_audio_information(self, audio_information): + def _parse_audio_information( + self, audio_information: tuple[str] | Literal["N/A"] + ) -> None: # If audio information is not available, N/A is returned, # so only update the audio information, when it is not N/A. if audio_information == "N/A": @@ -523,7 +527,9 @@ class OnkyoMediaPlayer(MediaPlayerEntity): } @callback - def _parse_video_information(self, video_information): + def _parse_video_information( + self, video_information: tuple[str] | Literal["N/A"] + ) -> None: # If video information is not available, N/A is returned, # so only update the video information, when it is not N/A. if video_information == "N/A": @@ -538,11 +544,11 @@ class OnkyoMediaPlayer(MediaPlayerEntity): if len(value) > 0 } - def _query_av_info_delayed(self): + def _query_av_info_delayed(self) -> None: if self._zone == "main" and not self._query_timer: @callback - def _query_av_info(): + def _query_av_info() -> None: if self._supports_audio_info: self._query_receiver("audio-information") if self._supports_video_info: diff --git a/mypy.ini b/mypy.ini index 4ba1f41f4d4..b352d2747be 100644 --- a/mypy.ini +++ b/mypy.ini @@ -3166,6 +3166,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.onkyo.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.open_meteo.*] check_untyped_defs = true disallow_incomplete_defs = true From 892c32c8b7bd125016e5f20c08cb092232ef98c6 Mon Sep 17 00:00:00 2001 From: mvn23 Date: Wed, 4 Sep 2024 19:20:05 +0200 Subject: [PATCH 1440/1635] Add button platform to opentherm_gw (#125185) * Add button platform to opentherm_gw * Add tests for button.py * Update tests/components/opentherm_gw/test_button.py --------- Co-authored-by: Joost Lekkerkerker --- .../components/opentherm_gw/__init__.py | 2 +- .../components/opentherm_gw/button.py | 73 +++++++++++++++++++ tests/components/opentherm_gw/test_button.py | 50 +++++++++++++ 3 files changed, 124 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/opentherm_gw/button.py create mode 100644 tests/components/opentherm_gw/test_button.py diff --git a/homeassistant/components/opentherm_gw/__init__.py b/homeassistant/components/opentherm_gw/__init__.py index a57ae7db601..dfce2206df7 100644 --- a/homeassistant/components/opentherm_gw/__init__.py +++ b/homeassistant/components/opentherm_gw/__init__.py @@ -90,7 +90,7 @@ CONFIG_SCHEMA = vol.Schema( extra=vol.ALLOW_EXTRA, ) -PLATFORMS = [Platform.BINARY_SENSOR, Platform.CLIMATE, Platform.SENSOR] +PLATFORMS = [Platform.BINARY_SENSOR, Platform.BUTTON, Platform.CLIMATE, Platform.SENSOR] async def options_updated(hass: HomeAssistant, entry: ConfigEntry) -> None: diff --git a/homeassistant/components/opentherm_gw/button.py b/homeassistant/components/opentherm_gw/button.py new file mode 100644 index 00000000000..aa0a3dbcda5 --- /dev/null +++ b/homeassistant/components/opentherm_gw/button.py @@ -0,0 +1,73 @@ +"""Support for OpenTherm Gateway buttons.""" + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass + +import pyotgw.vars as gw_vars + +from homeassistant.components.button import ( + ButtonDeviceClass, + ButtonEntity, + ButtonEntityDescription, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_ID, EntityCategory +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import OpenThermGatewayHub +from .const import ( + DATA_GATEWAYS, + DATA_OPENTHERM_GW, + GATEWAY_DEVICE_DESCRIPTION, + OpenThermDataSource, +) +from .entity import OpenThermEntity, OpenThermEntityDescription + + +@dataclass(frozen=True, kw_only=True) +class OpenThermButtonEntityDescription( + ButtonEntityDescription, OpenThermEntityDescription +): + """Describes an opentherm_gw button entity.""" + + action: Callable[[OpenThermGatewayHub], Awaitable] + + +BUTTON_DESCRIPTIONS: tuple[OpenThermButtonEntityDescription, ...] = ( + OpenThermButtonEntityDescription( + key="restart_button", + device_class=ButtonDeviceClass.RESTART, + device_description=GATEWAY_DEVICE_DESCRIPTION, + action=lambda hub: hub.gateway.set_mode(gw_vars.OTGW_MODE_RESET), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the OpenTherm Gateway buttons.""" + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][config_entry.data[CONF_ID]] + + async_add_entities( + OpenThermButton(gw_hub, description) for description in BUTTON_DESCRIPTIONS + ) + + +class OpenThermButton(OpenThermEntity, ButtonEntity): + """Representation of an OpenTherm button.""" + + _attr_entity_category = EntityCategory.CONFIG + entity_description: OpenThermButtonEntityDescription + + @callback + def receive_report(self, status: dict[OpenThermDataSource, dict]) -> None: + """Handle status updates from the component.""" + # We don't need any information from the reports here + + async def async_press(self) -> None: + """Perform button action.""" + await self.entity_description.action(self._gateway) diff --git a/tests/components/opentherm_gw/test_button.py b/tests/components/opentherm_gw/test_button.py new file mode 100644 index 00000000000..b02a9d9fef0 --- /dev/null +++ b/tests/components/opentherm_gw/test_button.py @@ -0,0 +1,50 @@ +"""Test opentherm_gw buttons.""" + +from unittest.mock import AsyncMock, MagicMock + +from pyotgw.vars import OTGW_MODE_RESET + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.opentherm_gw import DOMAIN as OPENTHERM_DOMAIN +from homeassistant.components.opentherm_gw.const import OpenThermDeviceIdentifier +from homeassistant.const import ATTR_ENTITY_ID, CONF_ID +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .conftest import MINIMAL_STATUS + +from tests.common import MockConfigEntry + + +async def test_restart_button( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, + mock_pyotgw: MagicMock, +) -> None: + """Test restart button.""" + + mock_pyotgw.return_value.set_mode = AsyncMock(return_value=MINIMAL_STATUS) + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert ( + button_entity_id := entity_registry.async_get_entity_id( + BUTTON_DOMAIN, + OPENTHERM_DOMAIN, + f"{mock_config_entry.data[CONF_ID]}-{OpenThermDeviceIdentifier.GATEWAY}-restart_button", + ) + ) is not None + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + { + ATTR_ENTITY_ID: button_entity_id, + }, + blocking=True, + ) + + mock_pyotgw.return_value.set_mode.assert_awaited_once_with(OTGW_MODE_RESET) From 4ecc6555bf5aada9f4923ae5d4d0884edffa70f2 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Wed, 4 Sep 2024 12:42:41 -0500 Subject: [PATCH 1441/1635] Add support for sample bytes in preferred TTS format (#125235) --- .../components/assist_pipeline/__init__.py | 3 +- .../components/assist_pipeline/pipeline.py | 7 +- homeassistant/components/tts/__init__.py | 26 ++++++ tests/components/assist_pipeline/test_init.py | 84 +++++++++++++++++-- 4 files changed, 112 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/assist_pipeline/__init__.py b/homeassistant/components/assist_pipeline/__init__.py index 8ee053162b0..0a03402105a 100644 --- a/homeassistant/components/assist_pipeline/__init__.py +++ b/homeassistant/components/assist_pipeline/__init__.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import AsyncIterable +from typing import Any import voluptuous as vol @@ -99,7 +100,7 @@ async def async_pipeline_from_audio_stream( wake_word_phrase: str | None = None, pipeline_id: str | None = None, conversation_id: str | None = None, - tts_audio_output: str | None = None, + tts_audio_output: str | dict[str, Any] | None = None, wake_word_settings: WakeWordSettings | None = None, audio_settings: AudioSettings | None = None, device_id: str | None = None, diff --git a/homeassistant/components/assist_pipeline/pipeline.py b/homeassistant/components/assist_pipeline/pipeline.py index 342f811c99b..f6a6bc45b57 100644 --- a/homeassistant/components/assist_pipeline/pipeline.py +++ b/homeassistant/components/assist_pipeline/pipeline.py @@ -538,7 +538,7 @@ class PipelineRun: language: str = None # type: ignore[assignment] runner_data: Any | None = None intent_agent: str | None = None - tts_audio_output: str | None = None + tts_audio_output: str | dict[str, Any] | None = None wake_word_settings: WakeWordSettings | None = None audio_settings: AudioSettings = field(default_factory=AudioSettings) @@ -1052,12 +1052,15 @@ class PipelineRun: if self.pipeline.tts_voice is not None: tts_options[tts.ATTR_VOICE] = self.pipeline.tts_voice - if self.tts_audio_output is not None: + if isinstance(self.tts_audio_output, dict): + tts_options.update(self.tts_audio_output) + elif isinstance(self.tts_audio_output, str): tts_options[tts.ATTR_PREFERRED_FORMAT] = self.tts_audio_output if self.tts_audio_output == "wav": # 16 Khz, 16-bit mono tts_options[tts.ATTR_PREFERRED_SAMPLE_RATE] = SAMPLE_RATE tts_options[tts.ATTR_PREFERRED_SAMPLE_CHANNELS] = SAMPLE_CHANNELS + tts_options[tts.ATTR_PREFERRED_SAMPLE_BYTES] = SAMPLE_WIDTH try: options_supported = await tts.async_support_options( diff --git a/homeassistant/components/tts/__init__.py b/homeassistant/components/tts/__init__.py index 70bb2b4c713..9e3d9f65a76 100644 --- a/homeassistant/components/tts/__init__.py +++ b/homeassistant/components/tts/__init__.py @@ -77,6 +77,7 @@ __all__ = [ "ATTR_PREFERRED_FORMAT", "ATTR_PREFERRED_SAMPLE_RATE", "ATTR_PREFERRED_SAMPLE_CHANNELS", + "ATTR_PREFERRED_SAMPLE_BYTES", "CONF_LANG", "DEFAULT_CACHE_DIR", "generate_media_source_id", @@ -95,6 +96,7 @@ ATTR_AUDIO_OUTPUT = "audio_output" ATTR_PREFERRED_FORMAT = "preferred_format" ATTR_PREFERRED_SAMPLE_RATE = "preferred_sample_rate" ATTR_PREFERRED_SAMPLE_CHANNELS = "preferred_sample_channels" +ATTR_PREFERRED_SAMPLE_BYTES = "preferred_sample_bytes" ATTR_MEDIA_PLAYER_ENTITY_ID = "media_player_entity_id" ATTR_VOICE = "voice" @@ -103,6 +105,7 @@ _PREFFERED_FORMAT_OPTIONS: Final[set[str]] = { ATTR_PREFERRED_FORMAT, ATTR_PREFERRED_SAMPLE_RATE, ATTR_PREFERRED_SAMPLE_CHANNELS, + ATTR_PREFERRED_SAMPLE_BYTES, } CONF_LANG = "language" @@ -223,6 +226,7 @@ async def async_convert_audio( to_extension: str, to_sample_rate: int | None = None, to_sample_channels: int | None = None, + to_sample_bytes: int | None = None, ) -> bytes: """Convert audio to a preferred format using ffmpeg.""" ffmpeg_manager = ffmpeg.get_ffmpeg_manager(hass) @@ -234,6 +238,7 @@ async def async_convert_audio( to_extension, to_sample_rate=to_sample_rate, to_sample_channels=to_sample_channels, + to_sample_bytes=to_sample_bytes, ) ) @@ -245,6 +250,7 @@ def _convert_audio( to_extension: str, to_sample_rate: int | None = None, to_sample_channels: int | None = None, + to_sample_bytes: int | None = None, ) -> bytes: """Convert audio to a preferred format using ffmpeg.""" @@ -277,6 +283,10 @@ def _convert_audio( # Max quality for MP3 command.extend(["-q:a", "0"]) + if to_sample_bytes == 2: + # 16-bit samples + command.extend(["-sample_fmt", "s16"]) + command.append(output_file.name) with subprocess.Popen( @@ -738,11 +748,25 @@ class SpeechManager: else: sample_rate = options.pop(ATTR_PREFERRED_SAMPLE_RATE, None) + if sample_rate is not None: + sample_rate = int(sample_rate) + if ATTR_PREFERRED_SAMPLE_CHANNELS in supported_options: sample_channels = options.get(ATTR_PREFERRED_SAMPLE_CHANNELS) else: sample_channels = options.pop(ATTR_PREFERRED_SAMPLE_CHANNELS, None) + if sample_channels is not None: + sample_channels = int(sample_channels) + + if ATTR_PREFERRED_SAMPLE_BYTES in supported_options: + sample_bytes = options.get(ATTR_PREFERRED_SAMPLE_BYTES) + else: + sample_bytes = options.pop(ATTR_PREFERRED_SAMPLE_BYTES, None) + + if sample_bytes is not None: + sample_bytes = int(sample_bytes) + async def get_tts_data() -> str: """Handle data available.""" if engine_instance.name is None or engine_instance.name is UNDEFINED: @@ -769,6 +793,7 @@ class SpeechManager: (final_extension != extension) or (sample_rate is not None) or (sample_channels is not None) + or (sample_bytes is not None) ) if needs_conversion: @@ -779,6 +804,7 @@ class SpeechManager: to_extension=final_extension, to_sample_rate=sample_rate, to_sample_channels=sample_channels, + to_sample_bytes=sample_bytes, ) # Create file infos diff --git a/tests/components/assist_pipeline/test_init.py b/tests/components/assist_pipeline/test_init.py index 31cc1268098..c4696573bad 100644 --- a/tests/components/assist_pipeline/test_init.py +++ b/tests/components/assist_pipeline/test_init.py @@ -788,13 +788,12 @@ async def test_tts_audio_output( assert len(extra_options) == 0, extra_options -async def test_tts_supports_preferred_format( +async def test_tts_wav_preferred_format( hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_tts_provider: MockTTSProvider, init_components, pipeline_data: assist_pipeline.pipeline.PipelineData, - snapshot: SnapshotAssertion, ) -> None: """Test that preferred format options are given to the TTS system if supported.""" client = await hass_client() @@ -829,6 +828,7 @@ async def test_tts_supports_preferred_format( tts.ATTR_PREFERRED_FORMAT, tts.ATTR_PREFERRED_SAMPLE_RATE, tts.ATTR_PREFERRED_SAMPLE_CHANNELS, + tts.ATTR_PREFERRED_SAMPLE_BYTES, ] ) @@ -850,6 +850,80 @@ async def test_tts_supports_preferred_format( options = mock_get_tts_audio.call_args_list[0].kwargs["options"] # We should have received preferred format options in get_tts_audio - assert tts.ATTR_PREFERRED_FORMAT in options - assert tts.ATTR_PREFERRED_SAMPLE_RATE in options - assert tts.ATTR_PREFERRED_SAMPLE_CHANNELS in options + assert options.get(tts.ATTR_PREFERRED_FORMAT) == "wav" + assert int(options.get(tts.ATTR_PREFERRED_SAMPLE_RATE)) == 16000 + assert int(options.get(tts.ATTR_PREFERRED_SAMPLE_CHANNELS)) == 1 + assert int(options.get(tts.ATTR_PREFERRED_SAMPLE_BYTES)) == 2 + + +async def test_tts_dict_preferred_format( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_tts_provider: MockTTSProvider, + init_components, + pipeline_data: assist_pipeline.pipeline.PipelineData, +) -> None: + """Test that preferred format options are given to the TTS system if supported.""" + client = await hass_client() + assert await async_setup_component(hass, media_source.DOMAIN, {}) + + events: list[assist_pipeline.PipelineEvent] = [] + + pipeline_store = pipeline_data.pipeline_store + pipeline_id = pipeline_store.async_get_preferred_item() + pipeline = assist_pipeline.pipeline.async_get_pipeline(hass, pipeline_id) + + pipeline_input = assist_pipeline.pipeline.PipelineInput( + tts_input="This is a test.", + conversation_id=None, + device_id=None, + run=assist_pipeline.pipeline.PipelineRun( + hass, + context=Context(), + pipeline=pipeline, + start_stage=assist_pipeline.PipelineStage.TTS, + end_stage=assist_pipeline.PipelineStage.TTS, + event_callback=events.append, + tts_audio_output={ + tts.ATTR_PREFERRED_FORMAT: "flac", + tts.ATTR_PREFERRED_SAMPLE_RATE: 48000, + tts.ATTR_PREFERRED_SAMPLE_CHANNELS: 2, + tts.ATTR_PREFERRED_SAMPLE_BYTES: 2, + }, + ), + ) + await pipeline_input.validate() + + # Make the TTS provider support preferred format options + supported_options = list(mock_tts_provider.supported_options or []) + supported_options.extend( + [ + tts.ATTR_PREFERRED_FORMAT, + tts.ATTR_PREFERRED_SAMPLE_RATE, + tts.ATTR_PREFERRED_SAMPLE_CHANNELS, + tts.ATTR_PREFERRED_SAMPLE_BYTES, + ] + ) + + with ( + patch.object(mock_tts_provider, "_supported_options", supported_options), + patch.object(mock_tts_provider, "get_tts_audio") as mock_get_tts_audio, + ): + await pipeline_input.execute() + + for event in events: + if event.type == assist_pipeline.PipelineEventType.TTS_END: + # We must fetch the media URL to trigger the TTS + assert event.data + media_id = event.data["tts_output"]["media_id"] + resolved = await media_source.async_resolve_media(hass, media_id, None) + await client.get(resolved.url) + + assert mock_get_tts_audio.called + options = mock_get_tts_audio.call_args_list[0].kwargs["options"] + + # We should have received preferred format options in get_tts_audio + assert options.get(tts.ATTR_PREFERRED_FORMAT) == "flac" + assert int(options.get(tts.ATTR_PREFERRED_SAMPLE_RATE)) == 48000 + assert int(options.get(tts.ATTR_PREFERRED_SAMPLE_CHANNELS)) == 2 + assert int(options.get(tts.ATTR_PREFERRED_SAMPLE_BYTES)) == 2 From b4e20409def235bbf482a07ed038a16f3619617c Mon Sep 17 00:00:00 2001 From: Pete Sage <76050312+PeteRager@users.noreply.github.com> Date: Wed, 4 Sep 2024 14:03:26 -0400 Subject: [PATCH 1442/1635] Add Sonos tests and update error handling for unknown media (#124578) * initial commit * simplify tests --- .../components/sonos/media_player.py | 19 ++++++--- homeassistant/components/sonos/strings.json | 6 +++ tests/components/sonos/test_media_player.py | 39 +++++++++++++++++++ 3 files changed, 59 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/sonos/media_player.py b/homeassistant/components/sonos/media_player.py index 75527bdcb72..c4d417b0394 100644 --- a/homeassistant/components/sonos/media_player.py +++ b/homeassistant/components/sonos/media_player.py @@ -672,14 +672,23 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity): soco.play_from_queue(0) elif media_type in PLAYABLE_MEDIA_TYPES: item = media_browser.get_media(self.media.library, media_id, media_type) - if not item: - _LOGGER.error('Could not find "%s" in the library', media_id) - return - + raise ServiceValidationError( + translation_domain=SONOS_DOMAIN, + translation_key="invalid_media", + translation_placeholders={ + "media_id": media_id, + }, + ) self._play_media_queue(soco, item, enqueue) else: - _LOGGER.error('Sonos does not support a media type of "%s"', media_type) + raise ServiceValidationError( + translation_domain=SONOS_DOMAIN, + translation_key="invalid_content_type", + translation_placeholders={ + "media_type": media_type, + }, + ) def _play_media_queue( self, soco: SoCo, item: MusicServiceItem, enqueue: MediaPlayerEnqueue diff --git a/homeassistant/components/sonos/strings.json b/homeassistant/components/sonos/strings.json index 264420ef758..d3774e85213 100644 --- a/homeassistant/components/sonos/strings.json +++ b/homeassistant/components/sonos/strings.json @@ -185,6 +185,12 @@ "invalid_sonos_playlist": { "message": "Could not find Sonos playlist: {name}" }, + "invalid_media": { + "message": "Could not find media in library: {media_id}" + }, + "invalid_content_type": { + "message": "Sonos does not support media content type: {media_type}" + }, "announce_media_error": { "message": "Announcing clip {media_id} failed {response}" } diff --git a/tests/components/sonos/test_media_player.py b/tests/components/sonos/test_media_player.py index ac877f47904..ae3928c5ff6 100644 --- a/tests/components/sonos/test_media_player.py +++ b/tests/components/sonos/test_media_player.py @@ -232,6 +232,45 @@ async def test_play_media_library( ) +@pytest.mark.parametrize( + ("media_content_type", "media_content_id", "message"), + [ + ( + "artist", + "A:ALBUM/UnknowAlbum", + "Could not find media in library: A:ALBUM/UnknowAlbum", + ), + ( + "UnknownContent", + "A:ALBUM/UnknowAlbum", + "Sonos does not support media content type: UnknownContent", + ), + ], +) +async def test_play_media_library_content_error( + hass: HomeAssistant, + async_autosetup_sonos, + media_content_type, + media_content_id, + message, +) -> None: + """Test playing local library errors on content and content type.""" + with pytest.raises( + ServiceValidationError, + match=message, + ): + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: media_content_type, + ATTR_MEDIA_CONTENT_ID: media_content_id, + }, + blocking=True, + ) + + _track_url = "S://192.168.42.100/music/iTunes/The%20Beatles/A%20Hard%20Day%2fs%I%20Should%20Have%20Known%20Better.mp3" From 52320844fcf27b4e30b9ba07aa0062066ec349aa Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 4 Sep 2024 08:05:13 -1000 Subject: [PATCH 1443/1635] Revert "Disable IPv6 in the opower integration to fix AEP utilities" (#125208) Revert "Disable IPv6 in the opower integration to fix AEP utilities (#107203)" This reverts commit 2a9a046fab2ff3cde2ede62a50c253d5454b62de. --- homeassistant/components/opower/config_flow.py | 3 +-- homeassistant/components/opower/coordinator.py | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/opower/config_flow.py b/homeassistant/components/opower/config_flow.py index 574062aca52..a9162b060a2 100644 --- a/homeassistant/components/opower/config_flow.py +++ b/homeassistant/components/opower/config_flow.py @@ -4,7 +4,6 @@ from __future__ import annotations from collections.abc import Mapping import logging -import socket from typing import Any from opower import ( @@ -40,7 +39,7 @@ async def _validate_login( ) -> dict[str, str]: """Validate login data and return any errors.""" api = Opower( - async_create_clientsession(hass, family=socket.AF_INET), + async_create_clientsession(hass), login_data[CONF_UTILITY], login_data[CONF_USERNAME], login_data[CONF_PASSWORD], diff --git a/homeassistant/components/opower/coordinator.py b/homeassistant/components/opower/coordinator.py index 3249cf1a375..690e34a9865 100644 --- a/homeassistant/components/opower/coordinator.py +++ b/homeassistant/components/opower/coordinator.py @@ -2,7 +2,6 @@ from datetime import datetime, timedelta import logging -import socket from types import MappingProxyType from typing import Any, cast @@ -54,7 +53,7 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]): update_interval=timedelta(hours=12), ) self.api = Opower( - aiohttp_client.async_get_clientsession(hass, family=socket.AF_INET), + aiohttp_client.async_get_clientsession(hass), entry_data[CONF_UTILITY], entry_data[CONF_USERNAME], entry_data[CONF_PASSWORD], From c4029300c26bab0fcccd77eeea95005c18f7764c Mon Sep 17 00:00:00 2001 From: G Johansson Date: Wed, 4 Sep 2024 20:28:45 +0200 Subject: [PATCH 1444/1635] Remove deprecated aux_heat from honeywell (#125248) --- homeassistant/components/honeywell/climate.py | 61 +------------- .../components/honeywell/strings.json | 19 ----- .../honeywell/snapshots/test_climate.ambr | 3 +- tests/components/honeywell/test_climate.py | 81 ------------------- 4 files changed, 2 insertions(+), 162 deletions(-) diff --git a/homeassistant/components/honeywell/climate.py b/homeassistant/components/honeywell/climate.py index 141cb87f117..934d41b238e 100644 --- a/homeassistant/components/honeywell/climate.py +++ b/homeassistant/components/honeywell/climate.py @@ -35,11 +35,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError -from homeassistant.helpers import ( - device_registry as dr, - entity_registry as er, - issue_registry as ir, -) +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util.unit_conversion import TemperatureConverter @@ -218,9 +214,6 @@ class HoneywellUSThermostat(ClimateEntity): if device._data.get("canControlHumidification"): # noqa: SLF001 self._attr_supported_features |= ClimateEntityFeature.TARGET_HUMIDITY - if device.raw_ui_data.get("SwitchEmergencyHeatAllowed"): - self._attr_supported_features |= ClimateEntityFeature.AUX_HEAT - if not device._data.get("hasFan"): # noqa: SLF001 return @@ -337,11 +330,6 @@ class HoneywellUSThermostat(ClimateEntity): return PRESET_NONE - @property - def is_aux_heat(self) -> bool | None: - """Return true if aux heater.""" - return self._device.system_mode == "emheat" - @property def fan_mode(self) -> str | None: """Return the fan setting.""" @@ -538,53 +526,6 @@ class HoneywellUSThermostat(ClimateEntity): else: await self._turn_away_mode_off() - async def async_turn_aux_heat_on(self) -> None: - """Turn auxiliary heater on.""" - ir.async_create_issue( - self.hass, - DOMAIN, - "service_deprecation", - breaks_in_ha_version="2024.10.0", - is_fixable=True, - is_persistent=True, - severity=ir.IssueSeverity.WARNING, - translation_key="service_deprecation", - ) - try: - await self._device.set_system_mode("emheat") - - except SomeComfortError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="set_aux_failed", - ) from err - - async def async_turn_aux_heat_off(self) -> None: - """Turn auxiliary heater off.""" - - ir.async_create_issue( - self.hass, - DOMAIN, - "service_deprecation", - breaks_in_ha_version="2024.10.0", - is_fixable=True, - is_persistent=True, - severity=ir.IssueSeverity.WARNING, - translation_key="service_deprecation", - ) - - try: - if HVACMode.HEAT in self.hvac_modes: - await self.async_set_hvac_mode(HVACMode.HEAT) - else: - await self.async_set_hvac_mode(HVACMode.OFF) - - except HomeAssistantError as err: - raise HomeAssistantError( - translation_domain=DOMAIN, - translation_key="disable_aux_failed", - ) from err - async def async_update(self) -> None: """Get the latest state from the service.""" diff --git a/homeassistant/components/honeywell/strings.json b/homeassistant/components/honeywell/strings.json index d3bc1924e28..aa6e53620a5 100644 --- a/homeassistant/components/honeywell/strings.json +++ b/homeassistant/components/honeywell/strings.json @@ -88,30 +88,11 @@ "stop_hold_failed": { "message": "Honeywell could not stop hold mode" }, - "set_aux_failed": { - "message": "Honeywell could not set system mode to aux heat" - }, - "disable_aux_failed": { - "message": "Honeywell could turn off aux heat mode" - }, "switch_failed_off": { "message": "Honeywell could turn off emergency heat mode." }, "switch_failed_on": { "message": "Honeywell could not set system mode to emergency heat mode." } - }, - "issues": { - "service_deprecation": { - "title": "Honeywell aux heat is being removed", - "fix_flow": { - "step": { - "confirm": { - "title": "[%key:component::honeywell::issues::service_deprecation::title%]", - "description": "Use `switch.{name}_emergency_heat` instead to change mode.\n\nPlease adjust your automations and scripts and select **submit** to fix this issue." - } - } - } - } } } diff --git a/tests/components/honeywell/snapshots/test_climate.ambr b/tests/components/honeywell/snapshots/test_climate.ambr index 25bb73851c6..f26064b335a 100644 --- a/tests/components/honeywell/snapshots/test_climate.ambr +++ b/tests/components/honeywell/snapshots/test_climate.ambr @@ -1,7 +1,6 @@ # serializer version: 1 # name: test_static_attributes ReadOnlyDict({ - 'aux_heat': 'off', 'current_humidity': 50, 'current_temperature': 20, 'fan_action': 'idle', @@ -30,7 +29,7 @@ 'away', 'hold', ]), - 'supported_features': , + 'supported_features': , 'target_temp_high': None, 'target_temp_low': None, 'temperature': None, diff --git a/tests/components/honeywell/test_climate.py b/tests/components/honeywell/test_climate.py index 55a55f7d7e7..9485f2f4302 100644 --- a/tests/components/honeywell/test_climate.py +++ b/tests/components/honeywell/test_climate.py @@ -10,7 +10,6 @@ from syrupy.assertion import SnapshotAssertion from syrupy.filters import props from homeassistant.components.climate import ( - ATTR_AUX_HEAT, ATTR_FAN_MODE, ATTR_HVAC_MODE, ATTR_PRESET_MODE, @@ -22,7 +21,6 @@ from homeassistant.components.climate import ( FAN_ON, PRESET_AWAY, PRESET_NONE, - SERVICE_SET_AUX_HEAT, SERVICE_SET_FAN_MODE, SERVICE_SET_HVAC_MODE, SERVICE_SET_PRESET_MODE, @@ -40,7 +38,6 @@ from homeassistant.const import ( ATTR_TEMPERATURE, SERVICE_TURN_OFF, SERVICE_TURN_ON, - STATE_UNAVAILABLE, Platform, ) from homeassistant.core import HomeAssistant @@ -221,53 +218,6 @@ async def test_mode_service_calls( ) -async def test_auxheat_service_calls( - hass: HomeAssistant, device: MagicMock, config_entry: MagicMock -) -> None: - """Test controlling the auxheat through service calls.""" - await init_integration(hass, config_entry) - entity_id = f"climate.{device.name}" - - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_AUX_HEAT, - {ATTR_ENTITY_ID: entity_id, ATTR_AUX_HEAT: True}, - blocking=True, - ) - device.set_system_mode.assert_called_once_with("emheat") - - device.set_system_mode.reset_mock() - - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_AUX_HEAT, - {ATTR_ENTITY_ID: entity_id, ATTR_AUX_HEAT: False}, - blocking=True, - ) - device.set_system_mode.assert_called_once_with("heat") - - device.set_system_mode.reset_mock() - device.set_system_mode.side_effect = aiosomecomfort.SomeComfortError - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_AUX_HEAT, - {ATTR_ENTITY_ID: entity_id, ATTR_AUX_HEAT: True}, - blocking=True, - ) - device.set_system_mode.assert_called_once_with("emheat") - - device.set_system_mode.reset_mock() - device.set_system_mode.side_effect = aiosomecomfort.SomeComfortError - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_AUX_HEAT, - {ATTR_ENTITY_ID: entity_id, ATTR_AUX_HEAT: False}, - blocking=True, - ) - - async def test_fan_modes_service_calls( hass: HomeAssistant, device: MagicMock, config_entry: MagicMock ) -> None: @@ -1240,37 +1190,6 @@ async def test_async_update_errors( assert state.state == "unavailable" -async def test_aux_heat_off_service_call( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - device: MagicMock, - config_entry: MagicMock, -) -> None: - """Test aux heat off turns of system when no heat configured.""" - device.raw_ui_data["SwitchHeatAllowed"] = False - device.raw_ui_data["SwitchAutoAllowed"] = False - device.raw_ui_data["SwitchEmergencyHeatAllowed"] = True - - await init_integration(hass, config_entry) - - entity_id = f"climate.{device.name}" - entry = entity_registry.async_get(entity_id) - assert entry - - state = hass.states.get(entity_id) - assert state is not None - assert state.state != STATE_UNAVAILABLE - assert state.state == HVACMode.OFF - - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_AUX_HEAT, - {ATTR_ENTITY_ID: entity_id, ATTR_AUX_HEAT: False}, - blocking=True, - ) - device.set_system_mode.assert_called_once_with("off") - - async def test_unique_id( hass: HomeAssistant, device: MagicMock, From c4c8e74a8aeb790d99060aa7bd6885b9b20ae654 Mon Sep 17 00:00:00 2001 From: Tal Taub Date: Wed, 4 Sep 2024 21:29:06 +0300 Subject: [PATCH 1445/1635] Add Custom Drink Entities Tami4 Edge (#124506) * Add drinks as button entities instead of using actions * Remove button extensions * Add an extension to create new buttons * Use translation key for buttons names * Change translation key wording * Call async_add_entities once * Add icons * Update homeassistant/components/tami4/button.py --------- Co-authored-by: Joost Lekkerkerker --- homeassistant/components/tami4/button.py | 59 +++++++++++++++++---- homeassistant/components/tami4/icons.json | 3 ++ homeassistant/components/tami4/strings.json | 3 ++ 3 files changed, 56 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/tami4/button.py b/homeassistant/components/tami4/button.py index 2d8af3fcf89..11377a2dcfb 100644 --- a/homeassistant/components/tami4/button.py +++ b/homeassistant/components/tami4/button.py @@ -5,10 +5,12 @@ from dataclasses import dataclass import logging from Tami4EdgeAPI import Tami4EdgeAPI +from Tami4EdgeAPI.drink import Drink from homeassistant.components.button import ButtonEntity, ButtonEntityDescription from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import API, DOMAIN @@ -24,12 +26,17 @@ class Tami4EdgeButtonEntityDescription(ButtonEntityDescription): press_fn: Callable[[Tami4EdgeAPI], None] -BUTTONS: tuple[Tami4EdgeButtonEntityDescription] = ( - Tami4EdgeButtonEntityDescription( - key="boil_water", - translation_key="boil_water", - press_fn=lambda api: api.boil_water(), - ), +@dataclass(frozen=True, kw_only=True) +class Tami4EdgeDrinkButtonEntityDescription(ButtonEntityDescription): + """A class that describes Tami4Edge Drink button entities.""" + + press_fn: Callable[[Tami4EdgeAPI, Drink], None] + + +BOIL_WATER_BUTTON = Tami4EdgeButtonEntityDescription( + key="boil_water", + translation_key="boil_water", + press_fn=lambda api: api.boil_water(), ) @@ -37,12 +44,29 @@ async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Perform the setup for Tami4Edge.""" - api: Tami4EdgeAPI = hass.data[DOMAIN][entry.entry_id][API] - async_add_entities( - Tami4EdgeButton(api, entity_description) for entity_description in BUTTONS + api: Tami4EdgeAPI = hass.data[DOMAIN][entry.entry_id][API] + buttons: list[Tami4EdgeBaseEntity] = [Tami4EdgeButton(api, BOIL_WATER_BUTTON)] + + device = await hass.async_add_executor_job(api.get_device) + drinks = device.drinks + + buttons.extend( + Tami4EdgeDrinkButton( + api=api, + entity_description=Tami4EdgeDrinkButtonEntityDescription( + key=drink.id, + translation_key="prepare_drink", + translation_placeholders={"drink_name": drink.name}, + press_fn=lambda api, drink: api.prepare_drink(drink), + ), + drink=drink, + ) + for drink in drinks ) + async_add_entities(buttons) + class Tami4EdgeButton(Tami4EdgeBaseEntity, ButtonEntity): """Button entity for Tami4Edge.""" @@ -52,3 +76,20 @@ class Tami4EdgeButton(Tami4EdgeBaseEntity, ButtonEntity): def press(self) -> None: """Handle the button press.""" self.entity_description.press_fn(self._api) + + +class Tami4EdgeDrinkButton(Tami4EdgeBaseEntity, ButtonEntity): + """Drink Button entity for Tami4Edge.""" + + entity_description: Tami4EdgeDrinkButtonEntityDescription + + def __init__( + self, api: Tami4EdgeAPI, entity_description: EntityDescription, drink: Drink + ) -> None: + """Initialize the drink button.""" + super().__init__(api=api, entity_description=entity_description) + self.drink = drink + + def press(self) -> None: + """Handle the button press.""" + self.entity_description.press_fn(self._api, self.drink) diff --git a/homeassistant/components/tami4/icons.json b/homeassistant/components/tami4/icons.json index d623bdc6007..803ed9a5016 100644 --- a/homeassistant/components/tami4/icons.json +++ b/homeassistant/components/tami4/icons.json @@ -3,6 +3,9 @@ "button": { "boil_water": { "default": "mdi:kettle-steam" + }, + "prepare_drink": { + "default": "mdi:beer" } }, "sensor": { diff --git a/homeassistant/components/tami4/strings.json b/homeassistant/components/tami4/strings.json index 406964a3bff..9c33b6607e4 100644 --- a/homeassistant/components/tami4/strings.json +++ b/homeassistant/components/tami4/strings.json @@ -26,6 +26,9 @@ "button": { "boil_water": { "name": "Boil water" + }, + "prepare_drink": { + "name": "Prepare {drink_name}" } } }, From c2b24dd3550c1e18884f2f3727862e802a8a8706 Mon Sep 17 00:00:00 2001 From: tronikos Date: Wed, 4 Sep 2024 11:30:24 -0700 Subject: [PATCH 1446/1635] Add debug logging in get_cost_reads in opower (#124473) Add debug statements in get_cost_reads in opower --- homeassistant/components/opower/coordinator.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/homeassistant/components/opower/coordinator.py b/homeassistant/components/opower/coordinator.py index 690e34a9865..1e00243f657 100644 --- a/homeassistant/components/opower/coordinator.py +++ b/homeassistant/components/opower/coordinator.py @@ -236,9 +236,11 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]): else: start = datetime.fromtimestamp(start_time, tz=tz) - timedelta(days=30) end = dt_util.now(tz) + _LOGGER.debug("Getting monthly cost reads: %s - %s", start, end) cost_reads = await self.api.async_get_cost_reads( account, AggregateType.BILL, start, end ) + _LOGGER.debug("Got %s monthly cost reads", len(cost_reads)) if account.read_resolution == ReadResolution.BILLING: return cost_reads @@ -249,9 +251,11 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]): start = cost_reads[0].start_time assert start start = max(start, end - timedelta(days=3 * 365)) + _LOGGER.debug("Getting daily cost reads: %s - %s", start, end) daily_cost_reads = await self.api.async_get_cost_reads( account, AggregateType.DAY, start, end ) + _LOGGER.debug("Got %s daily cost reads", len(daily_cost_reads)) _update_with_finer_cost_reads(cost_reads, daily_cost_reads) if account.read_resolution == ReadResolution.DAY: return cost_reads @@ -261,8 +265,11 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]): else: assert start start = max(start, end - timedelta(days=2 * 30)) + _LOGGER.debug("Getting hourly cost reads: %s - %s", start, end) hourly_cost_reads = await self.api.async_get_cost_reads( account, AggregateType.HOUR, start, end ) + _LOGGER.debug("Got %s hourly cost reads", len(hourly_cost_reads)) _update_with_finer_cost_reads(cost_reads, hourly_cost_reads) + _LOGGER.debug("Got %s cost reads", len(cost_reads)) return cost_reads From f56c38d69b82fe359539e386f9a1ead72470a0de Mon Sep 17 00:00:00 2001 From: TimL Date: Thu, 5 Sep 2024 04:31:56 +1000 Subject: [PATCH 1447/1635] Add uptime sensors for Smlight (#124408) * Add uptime sensor as derived sensor class * Add strings for uptime sensors * Update sensor tests to include uptime sensors * test zigbee uptime when disconnected --- homeassistant/components/smlight/const.py | 1 + homeassistant/components/smlight/sensor.py | 70 ++++++- homeassistant/components/smlight/strings.json | 6 + .../smlight/snapshots/test_sensor.ambr | 188 ++++++++++++++++++ tests/components/smlight/test_sensor.py | 25 ++- 5 files changed, 286 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/smlight/const.py b/homeassistant/components/smlight/const.py index de3270fe3be..791b00c3e93 100644 --- a/homeassistant/components/smlight/const.py +++ b/homeassistant/components/smlight/const.py @@ -9,3 +9,4 @@ ATTR_MANUFACTURER = "SMLIGHT" LOGGER = logging.getLogger(__package__) SCAN_INTERVAL = timedelta(seconds=300) +UPTIME_DEVIATION = timedelta(seconds=5) diff --git a/homeassistant/components/smlight/sensor.py b/homeassistant/components/smlight/sensor.py index d9c03760fb8..f5193522c4c 100644 --- a/homeassistant/components/smlight/sensor.py +++ b/homeassistant/components/smlight/sensor.py @@ -4,6 +4,8 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass +from datetime import datetime, timedelta +from itertools import chain from pysmlight import Sensors @@ -16,8 +18,10 @@ from homeassistant.components.sensor import ( from homeassistant.const import EntityCategory, UnitOfInformation, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util.dt import utcnow from . import SmConfigEntry +from .const import UPTIME_DEVIATION from .coordinator import SmDataUpdateCoordinator from .entity import SmEntity @@ -67,6 +71,23 @@ SENSORS = [ ), ] +UPTIME = [ + SmSensorEntityDescription( + key="core_uptime", + translation_key="core_uptime", + device_class=SensorDeviceClass.TIMESTAMP, + entity_registry_enabled_default=False, + value_fn=lambda x: x.uptime, + ), + SmSensorEntityDescription( + key="socket_uptime", + translation_key="socket_uptime", + device_class=SensorDeviceClass.TIMESTAMP, + entity_registry_enabled_default=False, + value_fn=lambda x: x.socket_uptime, + ), +] + async def async_setup_entry( hass: HomeAssistant, @@ -77,7 +98,10 @@ async def async_setup_entry( coordinator = entry.runtime_data async_add_entities( - SmSensorEntity(coordinator, description) for description in SENSORS + chain( + (SmSensorEntity(coordinator, description) for description in SENSORS), + (SmUptimeSensorEntity(coordinator, description) for description in UPTIME), + ) ) @@ -98,6 +122,48 @@ class SmSensorEntity(SmEntity, SensorEntity): self._attr_unique_id = f"{coordinator.unique_id}_{description.key}" @property - def native_value(self) -> float | None: + def native_value(self) -> datetime | float | None: """Return the sensor value.""" return self.entity_description.value_fn(self.coordinator.data.sensors) + + +class SmUptimeSensorEntity(SmSensorEntity): + """Representation of a slzb uptime sensor.""" + + def __init__( + self, + coordinator: SmDataUpdateCoordinator, + description: SmSensorEntityDescription, + ) -> None: + "Initialize uptime sensor instance." + super().__init__(coordinator, description) + self._last_uptime: datetime | None = None + + def get_uptime(self, uptime: float | None) -> datetime | None: + """Return device uptime or zigbee socket uptime. + + Converts uptime from seconds to a datetime value, allow up to 5 + seconds deviation. This avoids unnecessary updates to sensor state, + that may be caused by clock jitter. + """ + if uptime is None: + # reset to unknown state + self._last_uptime = None + return None + + new_uptime = utcnow() - timedelta(seconds=uptime) + + if ( + not self._last_uptime + or abs(new_uptime - self._last_uptime) > UPTIME_DEVIATION + ): + self._last_uptime = new_uptime + + return self._last_uptime + + @property + def native_value(self) -> datetime | None: + """Return the sensor value.""" + value = self.entity_description.value_fn(self.coordinator.data.sensors) + + return self.get_uptime(value) diff --git a/homeassistant/components/smlight/strings.json b/homeassistant/components/smlight/strings.json index f81e977b40c..41f84c49bf9 100644 --- a/homeassistant/components/smlight/strings.json +++ b/homeassistant/components/smlight/strings.json @@ -43,6 +43,12 @@ }, "ram_usage": { "name": "RAM usage" + }, + "core_uptime": { + "name": "Core uptime" + }, + "socket_uptime": { + "name": "Zigbee uptime" } }, "button": { diff --git a/tests/components/smlight/snapshots/test_sensor.ambr b/tests/components/smlight/snapshots/test_sensor.ambr index 0ff3d37b735..6895a8473bd 100644 --- a/tests/components/smlight/snapshots/test_sensor.ambr +++ b/tests/components/smlight/snapshots/test_sensor.ambr @@ -53,6 +53,53 @@ 'state': '35.0', }) # --- +# name: test_sensors[sensor.mock_title_core_uptime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_title_core_uptime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Core uptime', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'core_uptime', + 'unique_id': 'aa:bb:cc:dd:ee:ff_core_uptime', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.mock_title_core_uptime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Mock Title Core uptime', + }), + 'context': , + 'entity_id': 'sensor.mock_title_core_uptime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-06-25T02:51:15+00:00', + }) +# --- # name: test_sensors[sensor.mock_title_filesystem_usage-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -149,6 +196,100 @@ 'state': '99', }) # --- +# name: test_sensors[sensor.mock_title_timestamp-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_title_timestamp', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Timestamp', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'core_uptime', + 'unique_id': 'aa:bb:cc:dd:ee:ff_core_uptime', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.mock_title_timestamp-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Mock Title Timestamp', + }), + 'context': , + 'entity_id': 'sensor.mock_title_timestamp', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-06-25T02:51:15+00:00', + }) +# --- +# name: test_sensors[sensor.mock_title_timestamp_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_title_timestamp_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Timestamp', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'socket_uptime', + 'unique_id': 'aa:bb:cc:dd:ee:ff_socket_uptime', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.mock_title_timestamp_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Mock Title Timestamp', + }), + 'context': , + 'entity_id': 'sensor.mock_title_timestamp_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-06-30T23:57:53+00:00', + }) +# --- # name: test_sensors[sensor.mock_title_zigbee_chip_temp-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -203,6 +344,53 @@ 'state': '32.7', }) # --- +# name: test_sensors[sensor.mock_title_zigbee_uptime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_title_zigbee_uptime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Zigbee uptime', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'socket_uptime', + 'unique_id': 'aa:bb:cc:dd:ee:ff_socket_uptime', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[sensor.mock_title_zigbee_uptime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Mock Title Zigbee uptime', + }), + 'context': , + 'entity_id': 'sensor.mock_title_zigbee_uptime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-06-30T23:57:53+00:00', + }) +# --- # name: test_sensors[sensor.slzb_06_core_chip_temp-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/smlight/test_sensor.py b/tests/components/smlight/test_sensor.py index e1239c99e32..f130d7ccf30 100644 --- a/tests/components/smlight/test_sensor.py +++ b/tests/components/smlight/test_sensor.py @@ -1,9 +1,12 @@ """Tests for the SMLIGHT sensor platform.""" +from unittest.mock import MagicMock + +from pysmlight import Sensors import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.const import Platform +from homeassistant.const import STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -25,6 +28,7 @@ def platforms() -> list[Platform]: @pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.freeze_time("2024-07-01 00:00:00+00:00") async def test_sensors( hass: HomeAssistant, device_registry: dr.DeviceRegistry, @@ -46,9 +50,26 @@ async def test_disabled_by_default_sensors( """Test the disabled by default SMLIGHT sensors.""" await setup_integration(hass, mock_config_entry) - for sensor in ("ram_usage", "filesystem_usage"): + for sensor in ("core_uptime", "filesystem_usage", "ram_usage", "zigbee_uptime"): assert not hass.states.get(f"sensor.mock_title_{sensor}") assert (entry := entity_registry.async_get(f"sensor.mock_title_{sensor}")) assert entry.disabled assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_zigbee_uptime_disconnected( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_smlight_client: MagicMock, +) -> None: + """Test for uptime when zigbee socket is disconnected. + + In this case zigbee uptime state should be unknown. + """ + mock_smlight_client.get_sensors.return_value = Sensors(socket_uptime=0) + await setup_integration(hass, mock_config_entry) + + state = hass.states.get("sensor.mock_title_zigbee_uptime") + assert state.state == STATE_UNKNOWN From b23297bb7eb0e4625238cbc74111c727258a9fcf Mon Sep 17 00:00:00 2001 From: Christopher Fenner <9592452+CFenner@users.noreply.github.com> Date: Wed, 4 Sep 2024 20:32:40 +0200 Subject: [PATCH 1448/1635] Add hysteresis entity for heat pumps via ViCare (#124294) * add hysteresis entity * update PyViCare-neo dependency * add hysteresis switch on / of entities * Revert "add hysteresis entity" This reverts commit dcb5680d0ca1958640e68de36f6befbf6416ab41. --- homeassistant/components/vicare/manifest.json | 2 +- homeassistant/components/vicare/number.py | 28 +++++++++++++++++++ homeassistant/components/vicare/strings.json | 6 ++++ requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 5 files changed, 37 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/vicare/manifest.json b/homeassistant/components/vicare/manifest.json index 186e9ef6289..7a3089d04c3 100644 --- a/homeassistant/components/vicare/manifest.json +++ b/homeassistant/components/vicare/manifest.json @@ -11,5 +11,5 @@ "documentation": "https://www.home-assistant.io/integrations/vicare", "iot_class": "cloud_polling", "loggers": ["PyViCare"], - "requirements": ["PyViCare-neo==0.2.1"] + "requirements": ["PyViCare-neo==0.3.0"] } diff --git a/homeassistant/components/vicare/number.py b/homeassistant/components/vicare/number.py index ea64fb174e8..a7f679f7224 100644 --- a/homeassistant/components/vicare/number.py +++ b/homeassistant/components/vicare/number.py @@ -75,6 +75,34 @@ DEVICE_ENTITY_DESCRIPTIONS: tuple[ViCareNumberEntityDescription, ...] = ( native_max_value=60, native_step=1, ), + ViCareNumberEntityDescription( + key="dhw_hysteresis_switch_on", + translation_key="dhw_hysteresis_switch_on", + entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.KELVIN, + value_getter=lambda api: api.getDomesticHotWaterHysteresisSwitchOn(), + value_setter=lambda api, value: api.setDomesticHotWaterHysteresisSwitchOn( + value + ), + min_value_getter=lambda api: api.getDomesticHotWaterHysteresisSwitchOnMin(), + max_value_getter=lambda api: api.getDomesticHotWaterHysteresisSwitchOnMax(), + stepping_getter=lambda api: api.getDomesticHotWaterHysteresisSwitchOnStepping(), + ), + ViCareNumberEntityDescription( + key="dhw_hysteresis_switch_off", + translation_key="dhw_hysteresis_switch_off", + entity_category=EntityCategory.CONFIG, + device_class=NumberDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.KELVIN, + value_getter=lambda api: api.getDomesticHotWaterHysteresisSwitchOff(), + value_setter=lambda api, value: api.setDomesticHotWaterHysteresisSwitchOff( + value + ), + min_value_getter=lambda api: api.getDomesticHotWaterHysteresisSwitchOffMin(), + max_value_getter=lambda api: api.getDomesticHotWaterHysteresisSwitchOffMax(), + stepping_getter=lambda api: api.getDomesticHotWaterHysteresisSwitchOffStepping(), + ), ) diff --git a/homeassistant/components/vicare/strings.json b/homeassistant/components/vicare/strings.json index 1466baab8f3..752645137df 100644 --- a/homeassistant/components/vicare/strings.json +++ b/homeassistant/components/vicare/strings.json @@ -110,6 +110,12 @@ }, "dhw_secondary_temperature": { "name": "DHW secondary temperature" + }, + "dhw_hysteresis_switch_on": { + "name": "DHW hysteresis switch on" + }, + "dhw_hysteresis_switch_off": { + "name": "DHW hysteresis switch off" } }, "sensor": { diff --git a/requirements_all.txt b/requirements_all.txt index 3707b52fc59..e95011f247b 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -100,7 +100,7 @@ PyTransportNSW==0.1.1 PyTurboJPEG==1.7.5 # homeassistant.components.vicare -PyViCare-neo==0.2.1 +PyViCare-neo==0.3.0 # homeassistant.components.xiaomi_aqara PyXiaomiGateway==0.14.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index c7a11044f50..1657969b7e5 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -94,7 +94,7 @@ PyTransportNSW==0.1.1 PyTurboJPEG==1.7.5 # homeassistant.components.vicare -PyViCare-neo==0.2.1 +PyViCare-neo==0.3.0 # homeassistant.components.xiaomi_aqara PyXiaomiGateway==0.14.3 From b61678d39c9743cf35897dc5435f0ccc6d4de680 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Wed, 4 Sep 2024 21:14:54 +0200 Subject: [PATCH 1449/1635] Fix blocking call in yale_smart_alarm (#125255) --- homeassistant/components/yale_smart_alarm/coordinator.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/yale_smart_alarm/coordinator.py b/homeassistant/components/yale_smart_alarm/coordinator.py index 1067b9279a4..b47545ea88b 100644 --- a/homeassistant/components/yale_smart_alarm/coordinator.py +++ b/homeassistant/components/yale_smart_alarm/coordinator.py @@ -36,8 +36,10 @@ class YaleDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): async def _async_setup(self) -> None: """Set up connection to Yale.""" try: - self.yale = YaleSmartAlarmClient( - self.entry.data[CONF_USERNAME], self.entry.data[CONF_PASSWORD] + self.yale = await self.hass.async_add_executor_job( + YaleSmartAlarmClient, + self.entry.data[CONF_USERNAME], + self.entry.data[CONF_PASSWORD], ) except AuthenticationError as error: raise ConfigEntryAuthFailed from error From 1f59bd9f922df12e4299acea7a8955d34cf70fd5 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Wed, 4 Sep 2024 21:49:28 +0200 Subject: [PATCH 1450/1635] Don't show input panel if default code provided in envisalink (#125256) --- homeassistant/components/envisalink/alarm_control_panel.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/envisalink/alarm_control_panel.py b/homeassistant/components/envisalink/alarm_control_panel.py index d4bbe174f20..ea8b6390178 100644 --- a/homeassistant/components/envisalink/alarm_control_panel.py +++ b/homeassistant/components/envisalink/alarm_control_panel.py @@ -119,7 +119,7 @@ class EnvisalinkAlarm(EnvisalinkDevice, AlarmControlPanelEntity): self._partition_number = partition_number self._panic_type = panic_type self._alarm_control_panel_option_default_code = code - self._attr_code_format = CodeFormat.NUMBER + self._attr_code_format = CodeFormat.NUMBER if not code else None _LOGGER.debug("Setting up alarm: %s", alarm_name) super().__init__(alarm_name, info, controller) From adda02b6b18848e56a320ec55963bfbe243a175c Mon Sep 17 00:00:00 2001 From: Shai Ungar Date: Wed, 4 Sep 2024 22:56:11 +0300 Subject: [PATCH 1451/1635] Add service to 17track to archive package (#123493) * Add service archive package * Update homeassistant/components/seventeentrack/icons.json Co-authored-by: Joost Lekkerkerker * CR fix in tests * CR fix in services.py * string references * extract constant keys --------- Co-authored-by: Joost Lekkerkerker --- .../components/seventeentrack/__init__.py | 122 +-------------- .../components/seventeentrack/const.py | 3 + .../components/seventeentrack/icons.json | 3 + .../components/seventeentrack/services.py | 145 ++++++++++++++++++ .../components/seventeentrack/services.yaml | 11 ++ .../components/seventeentrack/strings.json | 14 ++ tests/components/seventeentrack/conftest.py | 5 + .../seventeentrack/test_services.py | 47 +++++- 8 files changed, 229 insertions(+), 121 deletions(-) create mode 100644 homeassistant/components/seventeentrack/services.py diff --git a/homeassistant/components/seventeentrack/__init__.py b/homeassistant/components/seventeentrack/__init__.py index 56d87b1935d..695ca179966 100644 --- a/homeassistant/components/seventeentrack/__init__.py +++ b/homeassistant/components/seventeentrack/__init__.py @@ -1,136 +1,30 @@ """The seventeentrack component.""" -from typing import Final - from pyseventeentrack import Client as SeventeenTrackClient from pyseventeentrack.errors import SeventeenTrackError -from pyseventeentrack.package import PACKAGE_STATUS_MAP -import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigEntryState -from homeassistant.const import ( - ATTR_FRIENDLY_NAME, - ATTR_LOCATION, - CONF_PASSWORD, - CONF_USERNAME, - Platform, -) -from homeassistant.core import ( - HomeAssistant, - ServiceCall, - ServiceResponse, - SupportsResponse, -) -from homeassistant.exceptions import ConfigEntryNotReady, ServiceValidationError -from homeassistant.helpers import config_validation as cv, selector +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import config_validation as cv from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.typing import ConfigType -from homeassistant.util import slugify -from .const import ( - ATTR_CONFIG_ENTRY_ID, - ATTR_DESTINATION_COUNTRY, - ATTR_INFO_TEXT, - ATTR_ORIGIN_COUNTRY, - ATTR_PACKAGE_STATE, - ATTR_PACKAGE_TYPE, - ATTR_STATUS, - ATTR_TIMESTAMP, - ATTR_TRACKING_INFO_LANGUAGE, - ATTR_TRACKING_NUMBER, - DOMAIN, - SERVICE_GET_PACKAGES, -) +from .const import DOMAIN from .coordinator import SeventeenTrackCoordinator +from .services import setup_services PLATFORMS: list[Platform] = [Platform.SENSOR] CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) -SERVICE_SCHEMA: Final = vol.Schema( - { - vol.Required(ATTR_CONFIG_ENTRY_ID): selector.ConfigEntrySelector( - { - "integration": DOMAIN, - } - ), - vol.Optional(ATTR_PACKAGE_STATE): selector.SelectSelector( - selector.SelectSelectorConfig( - multiple=True, - options=[ - value.lower().replace(" ", "_") - for value in PACKAGE_STATUS_MAP.values() - ], - mode=selector.SelectSelectorMode.DROPDOWN, - translation_key=ATTR_PACKAGE_STATE, - ) - ), - } -) - async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the 17Track component.""" - async def get_packages(call: ServiceCall) -> ServiceResponse: - """Get packages from 17Track.""" - config_entry_id = call.data[ATTR_CONFIG_ENTRY_ID] - package_states = call.data.get(ATTR_PACKAGE_STATE, []) + setup_services(hass) - entry: ConfigEntry | None = hass.config_entries.async_get_entry(config_entry_id) - - if not entry: - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="invalid_config_entry", - translation_placeholders={ - "config_entry_id": config_entry_id, - }, - ) - if entry.state != ConfigEntryState.LOADED: - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key="unloaded_config_entry", - translation_placeholders={ - "config_entry_id": entry.title, - }, - ) - - seventeen_coordinator: SeventeenTrackCoordinator = hass.data[DOMAIN][ - config_entry_id - ] - live_packages = sorted( - await seventeen_coordinator.client.profile.packages( - show_archived=seventeen_coordinator.show_archived - ) - ) - - return { - "packages": [ - { - ATTR_DESTINATION_COUNTRY: package.destination_country, - ATTR_ORIGIN_COUNTRY: package.origin_country, - ATTR_PACKAGE_TYPE: package.package_type, - ATTR_TRACKING_INFO_LANGUAGE: package.tracking_info_language, - ATTR_TRACKING_NUMBER: package.tracking_number, - ATTR_LOCATION: package.location, - ATTR_STATUS: package.status, - ATTR_TIMESTAMP: package.timestamp, - ATTR_INFO_TEXT: package.info_text, - ATTR_FRIENDLY_NAME: package.friendly_name, - } - for package in live_packages - if slugify(package.status) in package_states or package_states == [] - ] - } - - hass.services.async_register( - DOMAIN, - SERVICE_GET_PACKAGES, - get_packages, - schema=SERVICE_SCHEMA, - supports_response=SupportsResponse.ONLY, - ) return True diff --git a/homeassistant/components/seventeentrack/const.py b/homeassistant/components/seventeentrack/const.py index 584eca507e9..6b888590600 100644 --- a/homeassistant/components/seventeentrack/const.py +++ b/homeassistant/components/seventeentrack/const.py @@ -42,8 +42,11 @@ NOTIFICATION_DELIVERED_MESSAGE = ( VALUE_DELIVERED = "Delivered" SERVICE_GET_PACKAGES = "get_packages" +SERVICE_ARCHIVE_PACKAGE = "archive_package" ATTR_PACKAGE_STATE = "package_state" +ATTR_PACKAGE_TRACKING_NUMBER = "package_tracking_number" ATTR_CONFIG_ENTRY_ID = "config_entry_id" + DEPRECATED_KEY = "deprecated" diff --git a/homeassistant/components/seventeentrack/icons.json b/homeassistant/components/seventeentrack/icons.json index 94ca8cd535a..a5cac0a9f84 100644 --- a/homeassistant/components/seventeentrack/icons.json +++ b/homeassistant/components/seventeentrack/icons.json @@ -30,6 +30,9 @@ "services": { "get_packages": { "service": "mdi:package" + }, + "archive_package": { + "service": "mdi:archive" } } } diff --git a/homeassistant/components/seventeentrack/services.py b/homeassistant/components/seventeentrack/services.py new file mode 100644 index 00000000000..9a7a4d2d4b6 --- /dev/null +++ b/homeassistant/components/seventeentrack/services.py @@ -0,0 +1,145 @@ +"""Services for the seventeentrack integration.""" + +from typing import Final + +from pyseventeentrack.package import PACKAGE_STATUS_MAP +import voluptuous as vol + +from homeassistant.config_entries import ConfigEntry, ConfigEntryState +from homeassistant.const import ATTR_FRIENDLY_NAME, ATTR_LOCATION +from homeassistant.core import ( + HomeAssistant, + ServiceCall, + ServiceResponse, + SupportsResponse, +) +from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import config_validation as cv, selector +from homeassistant.util import slugify + +from . import SeventeenTrackCoordinator +from .const import ( + ATTR_CONFIG_ENTRY_ID, + ATTR_DESTINATION_COUNTRY, + ATTR_INFO_TEXT, + ATTR_ORIGIN_COUNTRY, + ATTR_PACKAGE_STATE, + ATTR_PACKAGE_TRACKING_NUMBER, + ATTR_PACKAGE_TYPE, + ATTR_STATUS, + ATTR_TIMESTAMP, + ATTR_TRACKING_INFO_LANGUAGE, + ATTR_TRACKING_NUMBER, + DOMAIN, + SERVICE_ARCHIVE_PACKAGE, + SERVICE_GET_PACKAGES, +) + +SERVICE_ADD_PACKAGES_SCHEMA: Final = vol.Schema( + { + vol.Required(ATTR_CONFIG_ENTRY_ID): cv.string, + vol.Optional(ATTR_PACKAGE_STATE): selector.SelectSelector( + selector.SelectSelectorConfig( + multiple=True, + options=[ + value.lower().replace(" ", "_") + for value in PACKAGE_STATUS_MAP.values() + ], + mode=selector.SelectSelectorMode.DROPDOWN, + translation_key=ATTR_PACKAGE_STATE, + ) + ), + } +) + +SERVICE_ARCHIVE_PACKAGE_SCHEMA: Final = vol.Schema( + { + vol.Required(ATTR_CONFIG_ENTRY_ID): cv.string, + vol.Required(ATTR_PACKAGE_TRACKING_NUMBER): cv.string, + } +) + + +def setup_services(hass: HomeAssistant) -> None: + """Set up the services for the seventeentrack integration.""" + + async def get_packages(call: ServiceCall) -> ServiceResponse: + """Get packages from 17Track.""" + config_entry_id = call.data[ATTR_CONFIG_ENTRY_ID] + package_states = call.data.get(ATTR_PACKAGE_STATE, []) + + await _validate_service(config_entry_id) + + seventeen_coordinator: SeventeenTrackCoordinator = hass.data[DOMAIN][ + config_entry_id + ] + live_packages = sorted( + await seventeen_coordinator.client.profile.packages( + show_archived=seventeen_coordinator.show_archived + ) + ) + + return { + "packages": [ + { + ATTR_DESTINATION_COUNTRY: package.destination_country, + ATTR_ORIGIN_COUNTRY: package.origin_country, + ATTR_PACKAGE_TYPE: package.package_type, + ATTR_TRACKING_INFO_LANGUAGE: package.tracking_info_language, + ATTR_TRACKING_NUMBER: package.tracking_number, + ATTR_LOCATION: package.location, + ATTR_STATUS: package.status, + ATTR_TIMESTAMP: package.timestamp, + ATTR_INFO_TEXT: package.info_text, + ATTR_FRIENDLY_NAME: package.friendly_name, + } + for package in live_packages + if slugify(package.status) in package_states or package_states == [] + ] + } + + async def archive_package(call: ServiceCall) -> None: + config_entry_id = call.data[ATTR_CONFIG_ENTRY_ID] + tracking_number = call.data[ATTR_PACKAGE_TRACKING_NUMBER] + + await _validate_service(config_entry_id) + + seventeen_coordinator: SeventeenTrackCoordinator = hass.data[DOMAIN][ + config_entry_id + ] + + await seventeen_coordinator.client.profile.archive_package(tracking_number) + + async def _validate_service(config_entry_id): + entry: ConfigEntry | None = hass.config_entries.async_get_entry(config_entry_id) + if not entry: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="invalid_config_entry", + translation_placeholders={ + "config_entry_id": config_entry_id, + }, + ) + if entry.state != ConfigEntryState.LOADED: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="unloaded_config_entry", + translation_placeholders={ + "config_entry_id": entry.title, + }, + ) + + hass.services.async_register( + DOMAIN, + SERVICE_GET_PACKAGES, + get_packages, + schema=SERVICE_ADD_PACKAGES_SCHEMA, + supports_response=SupportsResponse.ONLY, + ) + + hass.services.async_register( + DOMAIN, + SERVICE_ARCHIVE_PACKAGE, + archive_package, + schema=SERVICE_ARCHIVE_PACKAGE_SCHEMA, + ) diff --git a/homeassistant/components/seventeentrack/services.yaml b/homeassistant/components/seventeentrack/services.yaml index 41cb66ada5f..d4592dc8aab 100644 --- a/homeassistant/components/seventeentrack/services.yaml +++ b/homeassistant/components/seventeentrack/services.yaml @@ -18,3 +18,14 @@ get_packages: selector: config_entry: integration: seventeentrack +archive_package: + fields: + package_tracking_number: + required: true + selector: + text: + config_entry_id: + required: true + selector: + config_entry: + integration: seventeentrack diff --git a/homeassistant/components/seventeentrack/strings.json b/homeassistant/components/seventeentrack/strings.json index 0fbac13736e..fda5575ff95 100644 --- a/homeassistant/components/seventeentrack/strings.json +++ b/homeassistant/components/seventeentrack/strings.json @@ -100,6 +100,20 @@ "description": "The packages will be retrieved for the selected service." } } + }, + "archive_package": { + "name": "Archive package", + "description": "Archive a package", + "fields": { + "package_tracking_number": { + "name": "Package tracking number", + "description": "The package will be archived for the specified tracking number." + }, + "config_entry_id": { + "name": "[%key:component::seventeentrack::services::get_packages::fields::config_entry_id::name%]", + "description": "The package will be archived for the selected service." + } + } } }, "selector": { diff --git a/tests/components/seventeentrack/conftest.py b/tests/components/seventeentrack/conftest.py index e2493319b69..0d02a7ab5f1 100644 --- a/tests/components/seventeentrack/conftest.py +++ b/tests/components/seventeentrack/conftest.py @@ -40,6 +40,11 @@ NEW_SUMMARY_DATA = { "Returned": 1, } +ARCHIVE_PACKAGE_NUMBER = "123" +CONFIG_ENTRY_ID_KEY = "config_entry_id" +PACKAGE_TRACKING_NUMBER_KEY = "package_tracking_number" +PACKAGE_STATE_KEY = "package_state" + VALID_CONFIG = { CONF_USERNAME: "test", CONF_PASSWORD: "test", diff --git a/tests/components/seventeentrack/test_services.py b/tests/components/seventeentrack/test_services.py index 4347189a5c0..54c9349c121 100644 --- a/tests/components/seventeentrack/test_services.py +++ b/tests/components/seventeentrack/test_services.py @@ -5,14 +5,24 @@ from unittest.mock import AsyncMock import pytest from syrupy import SnapshotAssertion -from homeassistant.components.seventeentrack import DOMAIN, SERVICE_GET_PACKAGES +from homeassistant.components.seventeentrack import DOMAIN +from homeassistant.components.seventeentrack.const import ( + SERVICE_ARCHIVE_PACKAGE, + SERVICE_GET_PACKAGES, +) from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr from . import init_integration -from .conftest import get_package +from .conftest import ( + ARCHIVE_PACKAGE_NUMBER, + CONFIG_ENTRY_ID_KEY, + PACKAGE_STATE_KEY, + PACKAGE_TRACKING_NUMBER_KEY, + get_package, +) from tests.common import MockConfigEntry @@ -30,8 +40,8 @@ async def test_get_packages_from_list( DOMAIN, SERVICE_GET_PACKAGES, { - "config_entry_id": mock_config_entry.entry_id, - "package_state": ["in_transit", "delivered"], + CONFIG_ENTRY_ID_KEY: mock_config_entry.entry_id, + PACKAGE_STATE_KEY: ["in_transit", "delivered"], }, blocking=True, return_response=True, @@ -53,7 +63,7 @@ async def test_get_all_packages( DOMAIN, SERVICE_GET_PACKAGES, { - "config_entry_id": mock_config_entry.entry_id, + CONFIG_ENTRY_ID_KEY: mock_config_entry.entry_id, }, blocking=True, return_response=True, @@ -76,7 +86,7 @@ async def test_service_called_with_unloaded_entry( DOMAIN, SERVICE_GET_PACKAGES, { - "config_entry_id": mock_config_entry.entry_id, + CONFIG_ENTRY_ID_KEY: mock_config_entry.entry_id, }, blocking=True, return_response=True, @@ -110,13 +120,36 @@ async def test_service_called_with_non_17track_device( DOMAIN, SERVICE_GET_PACKAGES, { - "config_entry_id": device_entry.id, + CONFIG_ENTRY_ID_KEY: device_entry.id, }, blocking=True, return_response=True, ) +async def test_archive_package( + hass: HomeAssistant, + mock_seventeentrack: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Ensure service archives package.""" + await _mock_packages(mock_seventeentrack) + await init_integration(hass, mock_config_entry) + await hass.services.async_call( + DOMAIN, + SERVICE_ARCHIVE_PACKAGE, + { + CONFIG_ENTRY_ID_KEY: mock_config_entry.entry_id, + PACKAGE_TRACKING_NUMBER_KEY: ARCHIVE_PACKAGE_NUMBER, + }, + blocking=True, + ) + mock_seventeentrack.return_value.profile.archive_package.assert_called_once_with( + ARCHIVE_PACKAGE_NUMBER + ) + + async def _mock_packages(mock_seventeentrack): package1 = get_package(status=10) package2 = get_package( From ba5d23290a406ac2430839938239e30e4f5e6360 Mon Sep 17 00:00:00 2001 From: ilan <31193909+iloveicedgreentea@users.noreply.github.com> Date: Wed, 4 Sep 2024 15:57:37 -0400 Subject: [PATCH 1452/1635] Add madvr diagnostics (#125109) * feat: add basic diagnostics * fix: add mock data * fix: regen snapshots --- homeassistant/components/madvr/diagnostics.py | 25 ++++++++++ tests/components/madvr/conftest.py | 1 + .../madvr/snapshots/test_diagnostics.ambr | 26 ++++++++++ tests/components/madvr/test_diagnostics.py | 48 +++++++++++++++++++ 4 files changed, 100 insertions(+) create mode 100644 homeassistant/components/madvr/diagnostics.py create mode 100644 tests/components/madvr/snapshots/test_diagnostics.ambr create mode 100644 tests/components/madvr/test_diagnostics.py diff --git a/homeassistant/components/madvr/diagnostics.py b/homeassistant/components/madvr/diagnostics.py new file mode 100644 index 00000000000..f6261d27305 --- /dev/null +++ b/homeassistant/components/madvr/diagnostics.py @@ -0,0 +1,25 @@ +"""Provides diagnostics for madVR.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant + +from . import MadVRConfigEntry + +TO_REDACT = [CONF_HOST] + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, config_entry: MadVRConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + data = config_entry.runtime_data.data + + return { + "config_entry": async_redact_data(config_entry.as_dict(), TO_REDACT), + "madvr_data": data, + } diff --git a/tests/components/madvr/conftest.py b/tests/components/madvr/conftest.py index 187786c6964..3136e04b06b 100644 --- a/tests/components/madvr/conftest.py +++ b/tests/components/madvr/conftest.py @@ -57,6 +57,7 @@ def mock_config_entry() -> MockConfigEntry: data=MOCK_CONFIG, unique_id=MOCK_MAC, title=DEFAULT_NAME, + entry_id="3bd2acb0e4f0476d40865546d0d91132", ) diff --git a/tests/components/madvr/snapshots/test_diagnostics.ambr b/tests/components/madvr/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..f8008a651f2 --- /dev/null +++ b/tests/components/madvr/snapshots/test_diagnostics.ambr @@ -0,0 +1,26 @@ +# serializer version: 1 +# name: test_entry_diagnostics[positive_payload0] + dict({ + 'config_entry': dict({ + 'data': dict({ + 'host': '**REDACTED**', + 'port': 44077, + }), + 'disabled_by': None, + 'domain': 'madvr', + 'entry_id': '3bd2acb0e4f0476d40865546d0d91132', + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'title': 'envy', + 'unique_id': '00:11:22:33:44:55', + 'version': 1, + }), + 'madvr_data': dict({ + 'is_on': True, + }), + }) +# --- diff --git a/tests/components/madvr/test_diagnostics.py b/tests/components/madvr/test_diagnostics.py new file mode 100644 index 00000000000..453eaba8d94 --- /dev/null +++ b/tests/components/madvr/test_diagnostics.py @@ -0,0 +1,48 @@ +"""Test madVR diagnostics.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion +from syrupy.filters import props + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from . import setup_integration +from .conftest import get_update_callback + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +@pytest.mark.parametrize( + ("positive_payload"), + [ + {"is_on": True}, + ], +) +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_config_entry: MockConfigEntry, + mock_madvr_client: AsyncMock, + snapshot: SnapshotAssertion, + positive_payload: dict, +) -> None: + """Test config entry diagnostics.""" + with patch("homeassistant.components.madvr.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + update_callback = get_update_callback(mock_madvr_client) + + # Add data to test storing diagnostic data + update_callback(positive_payload) + await hass.async_block_till_done() + + result = await get_diagnostics_for_config_entry( + hass, hass_client, mock_config_entry + ) + + assert result == snapshot(exclude=props("created_at", "modified_at")) From baa9473383c72029951e0995eb22f024662bd631 Mon Sep 17 00:00:00 2001 From: Shay Levy Date: Wed, 4 Sep 2024 23:24:52 +0300 Subject: [PATCH 1453/1635] Address BTHome review comment (#125259) * Address BTHome review comment * Review comment Co-authored-by: Ernst Klamer * generator expression Co-authored-by: Martin Hjelmare --------- Co-authored-by: Ernst Klamer Co-authored-by: Martin Hjelmare --- .../components/bthome/device_trigger.py | 85 +++++++++---------- 1 file changed, 41 insertions(+), 44 deletions(-) diff --git a/homeassistant/components/bthome/device_trigger.py b/homeassistant/components/bthome/device_trigger.py index c50ffc05900..4eca110e581 100644 --- a/homeassistant/components/bthome/device_trigger.py +++ b/homeassistant/components/bthome/device_trigger.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import Any +from typing import TYPE_CHECKING, Any import voluptuous as vol @@ -34,7 +34,7 @@ from .const import ( EVENT_TYPE, ) -TRIGGERS_BY_EVENT_CLASS = { +EVENT_TYPES_BY_EVENT_CLASS = { EVENT_CLASS_BUTTON: { "press", "double_press", @@ -51,6 +51,38 @@ TRIGGER_SCHEMA = DEVICE_TRIGGER_BASE_SCHEMA.extend( ) +def get_event_classes_by_device_id(hass: HomeAssistant, device_id: str) -> list[str]: + """Get the supported event classes for a device. + + Events for BTHome BLE devices are dynamically discovered + and stored in the device config entry when they are first seen. + """ + device_registry = dr.async_get(hass) + device = device_registry.async_get(device_id) + if TYPE_CHECKING: + assert device is not None + + config_entries = [ + hass.config_entries.async_get_entry(entry_id) + for entry_id in device.config_entries + ] + bthome_config_entry = next( + entry for entry in config_entries if entry and entry.domain == DOMAIN + ) + return bthome_config_entry.data.get(CONF_DISCOVERED_EVENT_CLASSES, []) + + +def get_event_types_by_event_class(event_class: str) -> set[str]: + """Get the supported event types for an event class. + + If the device has multiple buttons they will have + event classes like button_1 button_2, button_3, etc + but if there is only one button then it will be + button without a number postfix. + """ + return EVENT_TYPES_BY_EVENT_CLASS.get(event_class.split("_")[0], set()) + + async def async_validate_trigger_config( hass: HomeAssistant, config: ConfigType ) -> ConfigType: @@ -58,31 +90,17 @@ async def async_validate_trigger_config( config = TRIGGER_SCHEMA(config) event_class = config[CONF_TYPE] event_type = config[CONF_SUBTYPE] - - device_registry = dr.async_get(hass) - device = device_registry.async_get(config[CONF_DEVICE_ID]) - assert device is not None - config_entries = [ - hass.config_entries.async_get_entry(entry_id) - for entry_id in device.config_entries - ] - bthome_config_entry = next( - iter(entry for entry in config_entries if entry and entry.domain == DOMAIN) - ) - event_classes: list[str] = bthome_config_entry.data.get( - CONF_DISCOVERED_EVENT_CLASSES, [] - ) + device_id = config[CONF_DEVICE_ID] + event_classes = get_event_classes_by_device_id(hass, device_id) if event_class not in event_classes: raise InvalidDeviceAutomationConfig( - f"BTHome trigger {event_class} is not valid for device " - f"{device} ({config[CONF_DEVICE_ID]})" + f"BTHome trigger {event_class} is not valid for device_id '{device_id}'" ) - if event_type not in TRIGGERS_BY_EVENT_CLASS.get(event_class.split("_")[0], ()): + if event_type not in get_event_types_by_event_class(event_class): raise InvalidDeviceAutomationConfig( - f"BTHome trigger {event_type} is not valid for device " - f"{device} ({config[CONF_DEVICE_ID]})" + f"BTHome trigger {event_type} is not valid for device_id '{device_id}'" ) return config @@ -92,21 +110,7 @@ async def async_get_triggers( hass: HomeAssistant, device_id: str ) -> list[dict[str, Any]]: """Return a list of triggers for BTHome BLE devices.""" - device_registry = dr.async_get(hass) - device = device_registry.async_get(device_id) - assert device is not None - config_entries = [ - hass.config_entries.async_get_entry(entry_id) - for entry_id in device.config_entries - ] - bthome_config_entry = next( - iter(entry for entry in config_entries if entry and entry.domain == DOMAIN), - None, - ) - assert bthome_config_entry is not None - event_classes: list[str] = bthome_config_entry.data.get( - CONF_DISCOVERED_EVENT_CLASSES, [] - ) + event_classes = get_event_classes_by_device_id(hass, device_id) return [ { # Required fields of TRIGGER_BASE_SCHEMA @@ -118,14 +122,7 @@ async def async_get_triggers( CONF_SUBTYPE: event_type, } for event_class in event_classes - for event_type in TRIGGERS_BY_EVENT_CLASS.get( - event_class.split("_")[0], - # If the device has multiple buttons they will have - # event classes like button_1 button_2, button_3, etc - # but if there is only one button then it will be - # button without a number postfix. - (), - ) + for event_type in get_event_types_by_event_class(event_class) ] From 505df84783bfc12be73934868d5090f3acbd3131 Mon Sep 17 00:00:00 2001 From: Raj Laud <50647620+rajlaud@users.noreply.github.com> Date: Wed, 4 Sep 2024 17:17:39 -0400 Subject: [PATCH 1454/1635] Squeezebox remove deprecated sync and unsync services (#125271) * Squeezebox remove deprecated sync and unsync * Squeezebox remove sync group attribute --- .../components/squeezebox/media_player.py | 26 ------------------- 1 file changed, 26 deletions(-) diff --git a/homeassistant/components/squeezebox/media_player.py b/homeassistant/components/squeezebox/media_player.py index 8607e72a67c..0294c17f50a 100644 --- a/homeassistant/components/squeezebox/media_player.py +++ b/homeassistant/components/squeezebox/media_player.py @@ -56,11 +56,8 @@ from .const import DISCOVERY_TASK, DOMAIN, KNOWN_PLAYERS, SQUEEZEBOX_SOURCE_STRI SERVICE_CALL_METHOD = "call_method" SERVICE_CALL_QUERY = "call_query" -SERVICE_SYNC = "sync" -SERVICE_UNSYNC = "unsync" ATTR_QUERY_RESULT = "query_result" -ATTR_SYNC_GROUP = "sync_group" SIGNAL_PLAYER_REDISCOVERED = "squeezebox_player_rediscovered" @@ -75,7 +72,6 @@ ATTR_OTHER_PLAYER = "other_player" ATTR_TO_PROPERTY = [ ATTR_QUERY_RESULT, - ATTR_SYNC_GROUP, ] SQUEEZEBOX_MODE = { @@ -181,12 +177,6 @@ async def async_setup_entry( }, "async_call_query", ) - platform.async_register_entity_service( - SERVICE_SYNC, - {vol.Required(ATTR_OTHER_PLAYER): cv.string}, - "async_sync", - ) - platform.async_register_entity_service(SERVICE_UNSYNC, None, "async_unsync") # Start server discovery task if not already running entry.async_on_unload(async_at_start(hass, start_server_discovery)) @@ -566,26 +556,10 @@ class SqueezeBoxEntity(MediaPlayerEntity): "Could not find player_id for %s. Not syncing", other_player ) - async def async_sync(self, other_player: str) -> None: - """Sync this Squeezebox player to another. Deprecated.""" - _LOGGER.warning( - "Service squeezebox.sync is deprecated; use media_player.join_players" - " instead" - ) - await self.async_join_players([other_player]) - async def async_unjoin_player(self) -> None: """Unsync this Squeezebox player.""" await self._player.async_unsync() - async def async_unsync(self) -> None: - """Unsync this Squeezebox player. Deprecated.""" - _LOGGER.warning( - "Service squeezebox.unsync is deprecated; use media_player.unjoin_player" - " instead" - ) - await self.async_unjoin_player() - async def async_browse_media( self, media_content_type: MediaType | str | None = None, From 199a4b725b63c441ab94ab612ba6da86398c886c Mon Sep 17 00:00:00 2001 From: Jordi Date: Wed, 4 Sep 2024 23:22:31 +0200 Subject: [PATCH 1455/1635] Increase AquaCell timeout and handle timeout exception properly (#125263) * Increase timeout and add handling of timeout exception * Raise update failed instead of config entry error --- homeassistant/components/aquacell/config_flow.py | 2 +- homeassistant/components/aquacell/coordinator.py | 4 ++-- tests/components/aquacell/test_config_flow.py | 1 + 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/aquacell/config_flow.py b/homeassistant/components/aquacell/config_flow.py index 332cd16e749..1ee89035d93 100644 --- a/homeassistant/components/aquacell/config_flow.py +++ b/homeassistant/components/aquacell/config_flow.py @@ -56,7 +56,7 @@ class AquaCellConfigFlow(ConfigFlow, domain=DOMAIN): refresh_token = await api.authenticate( user_input[CONF_EMAIL], user_input[CONF_PASSWORD] ) - except ApiException: + except (ApiException, TimeoutError): errors["base"] = "cannot_connect" except AuthenticationFailed: errors["base"] = "invalid_auth" diff --git a/homeassistant/components/aquacell/coordinator.py b/homeassistant/components/aquacell/coordinator.py index dd5dfcd2d0d..ee4afb451b9 100644 --- a/homeassistant/components/aquacell/coordinator.py +++ b/homeassistant/components/aquacell/coordinator.py @@ -56,7 +56,7 @@ class AquacellCoordinator(DataUpdateCoordinator[dict[str, Softener]]): so entities can quickly look up their data. """ - async with asyncio.timeout(10): + async with asyncio.timeout(30): # Check if the refresh token is expired expiry_time = ( self.refresh_token_creation_time @@ -72,7 +72,7 @@ class AquacellCoordinator(DataUpdateCoordinator[dict[str, Softener]]): softeners = await self.aquacell_api.get_all_softeners() except AuthenticationFailed as err: raise ConfigEntryError from err - except AquacellApiException as err: + except (AquacellApiException, TimeoutError) as err: raise UpdateFailed(f"Error communicating with API: {err}") from err return {softener.dsn: softener for softener in softeners} diff --git a/tests/components/aquacell/test_config_flow.py b/tests/components/aquacell/test_config_flow.py index b73852d513f..f677b3f8348 100644 --- a/tests/components/aquacell/test_config_flow.py +++ b/tests/components/aquacell/test_config_flow.py @@ -79,6 +79,7 @@ async def test_full_flow( ("exception", "error"), [ (ApiException, "cannot_connect"), + (TimeoutError, "cannot_connect"), (AuthenticationFailed, "invalid_auth"), (Exception, "unknown"), ], From a0356f587e37359d684e112eee51d8d3d67cb666 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 4 Sep 2024 11:32:08 -1000 Subject: [PATCH 1456/1635] Fix yarl binary wheel builds for armv7l and armhf (#125270) --- .github/workflows/wheels.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index 04e4391790a..fcd71cbec32 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -140,7 +140,7 @@ jobs: wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true apk: "libffi-dev;openssl-dev;yaml-dev;nasm" - skip-binary: aiohttp + skip-binary: aiohttp;yarl constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements.txt" @@ -212,7 +212,7 @@ jobs: wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev" - skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad + skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad;yarl constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements_old-cython.txt" @@ -227,7 +227,7 @@ jobs: wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm" - skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad + skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad;yarl constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements_all.txtaa" @@ -241,7 +241,7 @@ jobs: wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm" - skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad + skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad;yarl constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements_all.txtab" @@ -255,7 +255,7 @@ jobs: wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm" - skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad + skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad;yarl constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements_all.txtac" From fbd3bf7a98ef5ac5a912f76fa3cf00afb9ff90e7 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 4 Sep 2024 11:32:33 -1000 Subject: [PATCH 1457/1635] Bump yarl to 1.9.9 (#125264) --- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 767bd206266..e489006867f 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -62,7 +62,7 @@ urllib3>=1.26.5,<2 voluptuous-openapi==0.0.5 voluptuous-serialize==2.6.0 voluptuous==0.15.2 -yarl==1.9.8 +yarl==1.9.9 zeroconf==0.133.0 # Constrain pycryptodome to avoid vulnerability diff --git a/pyproject.toml b/pyproject.toml index 2c8e0a432f0..e2d5e213811 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -69,7 +69,7 @@ dependencies = [ "voluptuous==0.15.2", "voluptuous-serialize==2.6.0", "voluptuous-openapi==0.0.5", - "yarl==1.9.8", + "yarl==1.9.9", ] [project.urls] diff --git a/requirements.txt b/requirements.txt index 7f28e93cd4f..1d6b4e74d22 100644 --- a/requirements.txt +++ b/requirements.txt @@ -41,4 +41,4 @@ urllib3>=1.26.5,<2 voluptuous==0.15.2 voluptuous-serialize==2.6.0 voluptuous-openapi==0.0.5 -yarl==1.9.8 +yarl==1.9.9 From c8fd48523fd33a23fe51402dc74a18d8f3da424c Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Thu, 5 Sep 2024 06:10:21 +0200 Subject: [PATCH 1458/1635] Use TypeVar defaults for Generator (#125228) --- tests/components/fujitsu_fglair/conftest.py | 2 +- tests/components/google_photos/conftest.py | 10 +++------- tests/components/google_photos/test_config_flow.py | 4 ++-- tests/components/intellifire/conftest.py | 14 +++++++------- tests/components/smlight/conftest.py | 4 ++-- tests/components/yale/test_config_flow.py | 2 +- 6 files changed, 16 insertions(+), 20 deletions(-) diff --git a/tests/components/fujitsu_fglair/conftest.py b/tests/components/fujitsu_fglair/conftest.py index b73007a566b..04042fb0b09 100644 --- a/tests/components/fujitsu_fglair/conftest.py +++ b/tests/components/fujitsu_fglair/conftest.py @@ -30,7 +30,7 @@ TEST_PROPERTY_VALUES = { @pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock, None, None]: +def mock_setup_entry() -> Generator[AsyncMock]: """Override async_setup_entry.""" with patch( "homeassistant.components.fujitsu_fglair.async_setup_entry", return_value=True diff --git a/tests/components/google_photos/conftest.py b/tests/components/google_photos/conftest.py index 9dbe85bd25b..3ca64471fa1 100644 --- a/tests/components/google_photos/conftest.py +++ b/tests/components/google_photos/conftest.py @@ -125,7 +125,7 @@ def mock_client_api( fixture_name: str, user_identifier: str, api_error: Exception, -) -> Generator[Mock, None, None]: +) -> Generator[Mock]: """Set up fake Google Photos API responses from fixtures.""" mock_api = AsyncMock(GooglePhotosLibraryApi, autospec=True) mock_api.get_user_info.return_value = UserInfoResult( @@ -136,9 +136,7 @@ def mock_client_api( responses = load_json_array_fixture(fixture_name, DOMAIN) if fixture_name else [] - async def list_media_items( - *args: Any, - ) -> AsyncGenerator[ListMediaItemResult, None, None]: + async def list_media_items(*args: Any) -> AsyncGenerator[ListMediaItemResult]: for response in responses: mock_list_media_items = Mock(ListMediaItemResult) mock_list_media_items.media_items = [ @@ -163,9 +161,7 @@ def mock_client_api( # Emulate an async iterator for returning pages of response objects. We just # return a single page. - async def list_albums( - *args: Any, **kwargs: Any - ) -> AsyncGenerator[ListAlbumResult, None, None]: + async def list_albums(*args: Any, **kwargs: Any) -> AsyncGenerator[ListAlbumResult]: mock_list_album_result = Mock(ListAlbumResult) mock_list_album_result.albums = [ Album.from_dict(album) diff --git a/tests/components/google_photos/test_config_flow.py b/tests/components/google_photos/test_config_flow.py index be97d7658c6..48c8723df3c 100644 --- a/tests/components/google_photos/test_config_flow.py +++ b/tests/components/google_photos/test_config_flow.py @@ -28,7 +28,7 @@ CLIENT_SECRET = "5678" @pytest.fixture(name="mock_setup") -def mock_setup_entry() -> Generator[Mock, None, None]: +def mock_setup_entry() -> Generator[Mock]: """Fixture to mock out integration setup.""" with patch( "homeassistant.components.google_photos.async_setup_entry", return_value=True @@ -37,7 +37,7 @@ def mock_setup_entry() -> Generator[Mock, None, None]: @pytest.fixture(autouse=True) -def mock_patch_api(mock_api: Mock) -> Generator[None, None, None]: +def mock_patch_api(mock_api: Mock) -> Generator[None]: """Fixture to patch the config flow api.""" with patch( "homeassistant.components.google_photos.config_flow.GooglePhotosLibraryApi", diff --git a/tests/components/intellifire/conftest.py b/tests/components/intellifire/conftest.py index 251d5bdde48..0bd7073ee47 100644 --- a/tests/components/intellifire/conftest.py +++ b/tests/components/intellifire/conftest.py @@ -34,7 +34,7 @@ from tests.common import MockConfigEntry, load_json_object_fixture @pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock, None, None]: +def mock_setup_entry() -> Generator[AsyncMock]: """Mock setting up a config entry.""" with patch( "homeassistant.components.intellifire.async_setup_entry", return_value=True @@ -43,7 +43,7 @@ def mock_setup_entry() -> Generator[AsyncMock, None, None]: @pytest.fixture -def mock_fireplace_finder_none() -> Generator[None, MagicMock, None]: +def mock_fireplace_finder_none() -> Generator[MagicMock]: """Mock fireplace finder.""" mock_found_fireplaces = Mock() mock_found_fireplaces.ips = [] @@ -110,7 +110,7 @@ def mock_common_data_local() -> IntelliFireCommonFireplaceData: @pytest.fixture def mock_apis_multifp( mock_cloud_interface, mock_local_interface, mock_fp -) -> Generator[tuple[AsyncMock, AsyncMock, MagicMock], None, None]: +) -> Generator[tuple[AsyncMock, AsyncMock, MagicMock]]: """Multi fireplace version of mocks.""" return mock_local_interface, mock_cloud_interface, mock_fp @@ -118,7 +118,7 @@ def mock_apis_multifp( @pytest.fixture def mock_apis_single_fp( mock_cloud_interface, mock_local_interface, mock_fp -) -> Generator[tuple[AsyncMock, AsyncMock, MagicMock], None, None]: +) -> Generator[tuple[AsyncMock, AsyncMock, MagicMock]]: """Single fire place version of the mocks.""" data_v1 = IntelliFireUserData( **load_json_object_fixture("user_data_1.json", DOMAIN) @@ -131,7 +131,7 @@ def mock_apis_single_fp( @pytest.fixture -def mock_cloud_interface() -> Generator[AsyncMock, None, None]: +def mock_cloud_interface() -> Generator[AsyncMock]: """Mock cloud interface to use for testing.""" user_data = IntelliFireUserData( **load_json_object_fixture("user_data_3.json", DOMAIN) @@ -165,7 +165,7 @@ def mock_cloud_interface() -> Generator[AsyncMock, None, None]: @pytest.fixture -def mock_local_interface() -> Generator[AsyncMock, None, None]: +def mock_local_interface() -> Generator[AsyncMock]: """Mock version of IntelliFireAPILocal.""" poll_data = IntelliFirePollData( **load_json_object_fixture("intellifire/local_poll.json") @@ -181,7 +181,7 @@ def mock_local_interface() -> Generator[AsyncMock, None, None]: @pytest.fixture -def mock_fp(mock_common_data_local) -> Generator[AsyncMock, None, None]: +def mock_fp(mock_common_data_local) -> Generator[AsyncMock]: """Mock fireplace.""" local_poll_data = IntelliFirePollData( diff --git a/tests/components/smlight/conftest.py b/tests/components/smlight/conftest.py index ad4d749c0d2..c51da5c5ee5 100644 --- a/tests/components/smlight/conftest.py +++ b/tests/components/smlight/conftest.py @@ -39,14 +39,14 @@ def platforms() -> list[Platform]: @pytest.fixture(autouse=True) -async def mock_patch_platforms(platforms: list[str]) -> AsyncGenerator[None, None]: +async def mock_patch_platforms(platforms: list[str]) -> AsyncGenerator[None]: """Fixture to set up platforms for tests.""" with patch(f"homeassistant.components.{DOMAIN}.PLATFORMS", platforms): yield @pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock, None, None]: +def mock_setup_entry() -> Generator[AsyncMock]: """Override async_setup_entry.""" with patch( "homeassistant.components.smlight.async_setup_entry", return_value=True diff --git a/tests/components/yale/test_config_flow.py b/tests/components/yale/test_config_flow.py index 163f8240553..004162c0ebf 100644 --- a/tests/components/yale/test_config_flow.py +++ b/tests/components/yale/test_config_flow.py @@ -25,7 +25,7 @@ CLIENT_ID = "1" @pytest.fixture -def mock_setup_entry() -> Generator[Mock, None, None]: +def mock_setup_entry() -> Generator[Mock]: """Patch setup entry.""" with patch( "homeassistant.components.yale.async_setup_entry", return_value=True From 71d35a03e17b4b48c1c33df541377b92b6cfd3b1 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 4 Sep 2024 20:12:43 -1000 Subject: [PATCH 1459/1635] Switch hassio to use with_path where possible (#125268) * Switch hassio to use with_path where possible Any place we are joining to the root url, we can use with_path as its much faster * revert --- homeassistant/components/hassio/handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/hassio/handler.py b/homeassistant/components/hassio/handler.py index c57e43f73f3..7c8d5c61a22 100644 --- a/homeassistant/components/hassio/handler.py +++ b/homeassistant/components/hassio/handler.py @@ -568,7 +568,7 @@ class HassIO: This method is a coroutine. """ - joined_url = self._base_url.join(URL(command)) + joined_url = self._base_url.with_path(command) # This check is to make sure the normalized URL string # is the same as the URL string that was passed in. If # they are different, then the passed in command URL From 4c56cbe8c8d55c4169481555ef556e3aff5c8522 Mon Sep 17 00:00:00 2001 From: Simon Lamon <32477463+silamon@users.noreply.github.com> Date: Thu, 5 Sep 2024 08:50:49 +0200 Subject: [PATCH 1460/1635] Add follower to the PlayingMode enum (#125294) Update media_player.py --- homeassistant/components/linkplay/media_player.py | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/linkplay/media_player.py b/homeassistant/components/linkplay/media_player.py index 0b62b4dbcee..8b2fcf5d52f 100644 --- a/homeassistant/components/linkplay/media_player.py +++ b/homeassistant/components/linkplay/media_player.py @@ -59,6 +59,7 @@ SOURCE_MAP: dict[PlayingMode, str] = { PlayingMode.FM: "FM Radio", PlayingMode.RCA: "RCA", PlayingMode.UDISK: "USB", + PlayingMode.FOLLOWER: "Follower", } SOURCE_MAP_INV: dict[str, PlayingMode] = {v: k for k, v in SOURCE_MAP.items()} From a8f2204f4f61bdc59bdda3b48bb0f012c62f6924 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 5 Sep 2024 08:56:18 +0200 Subject: [PATCH 1461/1635] Teach recorder data migrator base class to update MigrationChanges (#125214) * Teach recorder data migrator base class to update MigrationChanges * Bump migration version * Improve test coverage * Update migration.py * Revert migrator version bump * Remove unneeded change --- .../components/recorder/migration.py | 113 ++++++------------ homeassistant/components/recorder/util.py | 17 +-- 2 files changed, 47 insertions(+), 83 deletions(-) diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index 242e503611c..324bdd5ea13 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -2201,8 +2201,8 @@ class CommitBeforeMigrationTask(MigrationTask): @dataclass(frozen=True, kw_only=True) -class NeedsMigrateResult: - """Container for the return value of BaseRunTimeMigration.needs_migrate_impl.""" +class DataMigrationStatus: + """Container for data migrator status.""" needs_migrate: bool migration_done: bool @@ -2229,36 +2229,30 @@ class BaseRunTimeMigration(ABC): else: self.migration_done(instance, session) + @retryable_database_job("migrate data", method=True) def migrate_data(self, instance: Recorder) -> bool: """Migrate some data, returns True if migration is completed.""" - if result := self.migrate_data_impl(instance): + status = self.migrate_data_impl(instance) + if status.migration_done: if self.index_to_drop is not None: - self._remove_index(instance, self.index_to_drop) - self.migration_done(instance, None) - return result + table, index = self.index_to_drop + _drop_index(instance.get_session, table, index) + with session_scope(session=instance.get_session()) as session: + self.migration_done(instance, session) + _mark_migration_done(session, self.__class__) + return not status.needs_migrate - @staticmethod @abstractmethod - def migrate_data_impl(instance: Recorder) -> bool: - """Migrate some data, returns True if migration is completed.""" + def migrate_data_impl(self, instance: Recorder) -> DataMigrationStatus: + """Migrate some data, return if the migration needs to run and if it is done.""" - @staticmethod - @database_job_retry_wrapper("remove index") - def _remove_index(instance: Recorder, index_to_drop: tuple[str, str]) -> None: - """Remove indices. - - Called when migration is completed. - """ - table, index = index_to_drop - _drop_index(instance.get_session, table, index) - - def migration_done(self, instance: Recorder, session: Session | None) -> None: + def migration_done(self, instance: Recorder, session: Session) -> None: """Will be called after migrate returns True or if migration is not needed.""" @abstractmethod def needs_migrate_impl( self, instance: Recorder, session: Session - ) -> NeedsMigrateResult: + ) -> DataMigrationStatus: """Return if the migration needs to run and if it is done.""" def needs_migrate(self, instance: Recorder, session: Session) -> bool: @@ -2300,10 +2294,10 @@ class BaseRunTimeMigrationWithQuery(BaseRunTimeMigration): def needs_migrate_impl( self, instance: Recorder, session: Session - ) -> NeedsMigrateResult: + ) -> DataMigrationStatus: """Return if the migration needs to run.""" needs_migrate = execute_stmt_lambda_element(session, self.needs_migrate_query()) - return NeedsMigrateResult( + return DataMigrationStatus( needs_migrate=bool(needs_migrate), migration_done=not needs_migrate ) @@ -2315,9 +2309,7 @@ class StatesContextIDMigration(BaseRunTimeMigrationWithQuery): migration_id = "state_context_id_as_binary" index_to_drop = ("states", "ix_states_context_id") - @staticmethod - @retryable_database_job("migrate states context_ids to binary format") - def migrate_data_impl(instance: Recorder) -> bool: + def migrate_data_impl(self, instance: Recorder) -> DataMigrationStatus: """Migrate states context_ids to use binary format, return True if completed.""" _to_bytes = _context_id_to_bytes session_maker = instance.get_session @@ -2342,13 +2334,10 @@ class StatesContextIDMigration(BaseRunTimeMigrationWithQuery): for state_id, last_updated_ts, context_id, context_user_id, context_parent_id in states ], ) - # If there is more work to do return False - # so that we can be called again - if is_done := not states: - _mark_migration_done(session, StatesContextIDMigration) + is_done = not states _LOGGER.debug("Migrating states context_ids to binary format: done=%s", is_done) - return is_done + return DataMigrationStatus(needs_migrate=not is_done, migration_done=is_done) def needs_migrate_query(self) -> StatementLambdaElement: """Return the query to check if the migration needs to run.""" @@ -2362,9 +2351,7 @@ class EventsContextIDMigration(BaseRunTimeMigrationWithQuery): migration_id = "event_context_id_as_binary" index_to_drop = ("events", "ix_events_context_id") - @staticmethod - @retryable_database_job("migrate events context_ids to binary format") - def migrate_data_impl(instance: Recorder) -> bool: + def migrate_data_impl(self, instance: Recorder) -> DataMigrationStatus: """Migrate events context_ids to use binary format, return True if completed.""" _to_bytes = _context_id_to_bytes session_maker = instance.get_session @@ -2389,13 +2376,10 @@ class EventsContextIDMigration(BaseRunTimeMigrationWithQuery): for event_id, time_fired_ts, context_id, context_user_id, context_parent_id in events ], ) - # If there is more work to do return False - # so that we can be called again - if is_done := not events: - _mark_migration_done(session, EventsContextIDMigration) + is_done = not events _LOGGER.debug("Migrating events context_ids to binary format: done=%s", is_done) - return is_done + return DataMigrationStatus(needs_migrate=not is_done, migration_done=is_done) def needs_migrate_query(self) -> StatementLambdaElement: """Return the query to check if the migration needs to run.""" @@ -2412,9 +2396,7 @@ class EventTypeIDMigration(BaseRunTimeMigrationWithQuery): # no new pending event_types about to be added to # the db since this happens live - @staticmethod - @retryable_database_job("migrate events event_types to event_type_ids") - def migrate_data_impl(instance: Recorder) -> bool: + def migrate_data_impl(self, instance: Recorder) -> DataMigrationStatus: """Migrate event_type to event_type_ids, return True if completed.""" session_maker = instance.get_session _LOGGER.debug("Migrating event_types") @@ -2467,15 +2449,12 @@ class EventTypeIDMigration(BaseRunTimeMigrationWithQuery): ], ) - # If there is more work to do return False - # so that we can be called again - if is_done := not events: - _mark_migration_done(session, EventTypeIDMigration) + is_done = not events _LOGGER.debug("Migrating event_types done=%s", is_done) - return is_done + return DataMigrationStatus(needs_migrate=not is_done, migration_done=is_done) - def migration_done(self, instance: Recorder, session: Session | None) -> None: + def migration_done(self, instance: Recorder, session: Session) -> None: """Will be called after migrate returns True.""" _LOGGER.debug("Activating event_types manager as all data is migrated") instance.event_type_manager.active = True @@ -2495,9 +2474,7 @@ class EntityIDMigration(BaseRunTimeMigrationWithQuery): # no new pending states_meta about to be added to # the db since this happens live - @staticmethod - @retryable_database_job("migrate states entity_ids to states_meta") - def migrate_data_impl(instance: Recorder) -> bool: + def migrate_data_impl(self, instance: Recorder) -> DataMigrationStatus: """Migrate entity_ids to states_meta, return True if completed. We do this in two steps because we need the history queries to work @@ -2560,15 +2537,12 @@ class EntityIDMigration(BaseRunTimeMigrationWithQuery): ], ) - # If there is more work to do return False - # so that we can be called again - if is_done := not states: - _mark_migration_done(session, EntityIDMigration) + is_done = not states _LOGGER.debug("Migrating entity_ids done=%s", is_done) - return is_done + return DataMigrationStatus(needs_migrate=not is_done, migration_done=is_done) - def migration_done(self, instance: Recorder, _session: Session | None) -> None: + def migration_done(self, instance: Recorder, session: Session) -> None: """Will be called after migrate returns True.""" # The migration has finished, now we start the post migration # to remove the old entity_id data from the states table @@ -2576,15 +2550,7 @@ class EntityIDMigration(BaseRunTimeMigrationWithQuery): # so we set active to True _LOGGER.debug("Activating states_meta manager as all data is migrated") instance.states_meta_manager.active = True - session_generator = ( - contextlib.nullcontext(_session) - if _session - else session_scope(session=instance.get_session()) - ) - with ( - contextlib.suppress(SQLAlchemyError), - session_generator as session, - ): + with contextlib.suppress(SQLAlchemyError): # If ix_states_entity_id_last_updated_ts still exists # on the states table it means the entity id migration # finished by the EntityIDPostMigrationTask did not @@ -2609,9 +2575,7 @@ class EventIDPostMigration(BaseRunTimeMigration): task = MigrationTask migration_version = 2 - @staticmethod - @retryable_database_job("cleanup_legacy_event_ids") - def migrate_data_impl(instance: Recorder) -> bool: + def migrate_data_impl(self, instance: Recorder) -> DataMigrationStatus: """Remove old event_id index from states, returns True if completed. We used to link states to events using the event_id column but we no @@ -2651,9 +2615,8 @@ class EventIDPostMigration(BaseRunTimeMigration): if fk_remove_ok: _drop_index(session_maker, "states", LEGACY_STATES_EVENT_ID_INDEX) instance.use_legacy_events_index = False - _mark_migration_done(session, EventIDPostMigration) - return True + return DataMigrationStatus(needs_migrate=False, migration_done=fk_remove_ok) @staticmethod def _legacy_event_id_foreign_key_exists(instance: Recorder) -> bool: @@ -2674,16 +2637,16 @@ class EventIDPostMigration(BaseRunTimeMigration): def needs_migrate_impl( self, instance: Recorder, session: Session - ) -> NeedsMigrateResult: + ) -> DataMigrationStatus: """Return if the migration needs to run.""" if self.schema_version <= LEGACY_STATES_EVENT_ID_INDEX_SCHEMA_VERSION: - return NeedsMigrateResult(needs_migrate=False, migration_done=False) + return DataMigrationStatus(needs_migrate=False, migration_done=False) if get_index_by_name( session, TABLE_STATES, LEGACY_STATES_EVENT_ID_INDEX ) is not None or self._legacy_event_id_foreign_key_exists(instance): instance.use_legacy_events_index = True - return NeedsMigrateResult(needs_migrate=True, migration_done=False) - return NeedsMigrateResult(needs_migrate=False, migration_done=True) + return DataMigrationStatus(needs_migrate=True, migration_done=False) + return DataMigrationStatus(needs_migrate=False, migration_done=True) @dataclass(slots=True) diff --git a/homeassistant/components/recorder/util.py b/homeassistant/components/recorder/util.py index 4d494aed7d5..9f6cdccd79a 100644 --- a/homeassistant/components/recorder/util.py +++ b/homeassistant/components/recorder/util.py @@ -645,23 +645,24 @@ def _is_retryable_error(instance: Recorder, err: OperationalError) -> bool: type _FuncType[_T, **_P, _R] = Callable[Concatenate[_T, _P], _R] +type _FuncOrMethType[**_P, _R] = Callable[_P, _R] -def retryable_database_job[_RecorderT: Recorder, **_P]( - description: str, -) -> Callable[[_FuncType[_RecorderT, _P, bool]], _FuncType[_RecorderT, _P, bool]]: +def retryable_database_job[**_P]( + description: str, method: bool = False +) -> Callable[[_FuncOrMethType[_P, bool]], _FuncOrMethType[_P, bool]]: """Try to execute a database job. The job should return True if it finished, and False if it needs to be rescheduled. """ + recorder_pos = 1 if method else 0 - def decorator( - job: _FuncType[_RecorderT, _P, bool], - ) -> _FuncType[_RecorderT, _P, bool]: + def decorator(job: _FuncOrMethType[_P, bool]) -> _FuncOrMethType[_P, bool]: @functools.wraps(job) - def wrapper(instance: _RecorderT, *args: _P.args, **kwargs: _P.kwargs) -> bool: + def wrapper(*args: _P.args, **kwargs: _P.kwargs) -> bool: + instance: Recorder = args[recorder_pos] # type: ignore[assignment] try: - return job(instance, *args, **kwargs) + return job(*args, **kwargs) except OperationalError as err: if _is_retryable_error(instance, err): assert isinstance(err.orig, BaseException) # noqa: PT017 From f778033bd8c4049d542719e041794148ae463846 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Thu, 5 Sep 2024 09:55:57 +0200 Subject: [PATCH 1462/1635] Improve config flow type hints in ukraine_alarm (#125302) --- .../components/ukraine_alarm/config_flow.py | 28 +++++++++++++------ 1 file changed, 19 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/ukraine_alarm/config_flow.py b/homeassistant/components/ukraine_alarm/config_flow.py index faaa9240df3..12059124fa2 100644 --- a/homeassistant/components/ukraine_alarm/config_flow.py +++ b/homeassistant/components/ukraine_alarm/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from typing import Any +from typing import TYPE_CHECKING, Any import aiohttp from uasiren.client import Client @@ -25,7 +25,7 @@ class UkraineAlarmConfigFlow(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize a new UkraineAlarmConfigFlow.""" - self.states = None + self.states: list[dict[str, Any]] | None = None self.selected_region: dict[str, Any] | None = None async def async_step_user( @@ -69,17 +69,25 @@ class UkraineAlarmConfigFlow(ConfigFlow, domain=DOMAIN): return await self._handle_pick_region("user", "district", user_input) - async def async_step_district(self, user_input=None): + async def async_step_district( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Handle user-chosen district.""" return await self._handle_pick_region("district", "community", user_input) - async def async_step_community(self, user_input=None): + async def async_step_community( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Handle user-chosen community.""" return await self._handle_pick_region("community", None, user_input, True) async def _handle_pick_region( - self, step_id: str, next_step: str | None, user_input, last_step=False - ): + self, + step_id: str, + next_step: str | None, + user_input: dict[str, str] | None, + last_step: bool = False, + ) -> ConfigFlowResult: """Handle picking a (sub)region.""" if self.selected_region: source = self.selected_region["regionChildIds"] @@ -121,8 +129,10 @@ class UkraineAlarmConfigFlow(ConfigFlow, domain=DOMAIN): step_id=step_id, data_schema=schema, last_step=last_step ) - async def _async_finish_flow(self): + async def _async_finish_flow(self) -> ConfigFlowResult: """Finish the setup.""" + if TYPE_CHECKING: + assert self.selected_region is not None await self.async_set_unique_id(self.selected_region["regionId"]) self._abort_if_unique_id_configured() @@ -135,10 +145,10 @@ class UkraineAlarmConfigFlow(ConfigFlow, domain=DOMAIN): ) -def _find(regions, region_id): +def _find(regions: list[dict[str, Any]], region_id): return next((region for region in regions if region["regionId"] == region_id), None) -def _make_regions_object(regions): +def _make_regions_object(regions: list[dict[str, Any]]) -> dict[str, str]: regions = sorted(regions, key=lambda region: region["regionName"].lower()) return {region["regionId"]: region["regionName"] for region in regions} From 984eba809c9977b277991ed60ded3189e9d4c4cc Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 5 Sep 2024 10:16:44 +0200 Subject: [PATCH 1463/1635] Simplify generic decorators in recorder (#125301) * Simplify generic decorators in recorder * Remove additional case --- homeassistant/components/recorder/util.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/recorder/util.py b/homeassistant/components/recorder/util.py index 9f6cdccd79a..75e403d8204 100644 --- a/homeassistant/components/recorder/util.py +++ b/homeassistant/components/recorder/util.py @@ -644,7 +644,7 @@ def _is_retryable_error(instance: Recorder, err: OperationalError) -> bool: ) -type _FuncType[_T, **_P, _R] = Callable[Concatenate[_T, _P], _R] +type _FuncType[**P, R] = Callable[Concatenate[Recorder, P], R] type _FuncOrMethType[**_P, _R] = Callable[_P, _R] @@ -683,9 +683,9 @@ def retryable_database_job[**_P]( return decorator -def database_job_retry_wrapper[_RecorderT: Recorder, **_P]( +def database_job_retry_wrapper[**_P]( description: str, attempts: int = 5 -) -> Callable[[_FuncType[_RecorderT, _P, None]], _FuncType[_RecorderT, _P, None]]: +) -> Callable[[_FuncType[_P, None]], _FuncType[_P, None]]: """Try to execute a database job multiple times. This wrapper handles InnoDB deadlocks and lock timeouts. @@ -695,10 +695,10 @@ def database_job_retry_wrapper[_RecorderT: Recorder, **_P]( """ def decorator( - job: _FuncType[_RecorderT, _P, None], - ) -> _FuncType[_RecorderT, _P, None]: + job: _FuncType[_P, None], + ) -> _FuncType[_P, None]: @functools.wraps(job) - def wrapper(instance: _RecorderT, *args: _P.args, **kwargs: _P.kwargs) -> None: + def wrapper(instance: Recorder, *args: _P.args, **kwargs: _P.kwargs) -> None: for attempt in range(attempts): try: job(instance, *args, **kwargs) From b5831344a02f2c1ae72daf42f353e09bb2db973e Mon Sep 17 00:00:00 2001 From: Malte Franken Date: Thu, 5 Sep 2024 18:53:12 +1000 Subject: [PATCH 1464/1635] Add diagnostics to GDACS integration (#125296) * simple diagnostics * add service status information * remove from no diagnostics list * wip * cater for the case where status info is undefined * make test work * code reformatted * add snapshot data * simplify code --- homeassistant/components/gdacs/diagnostics.py | 39 +++++++++++++++++++ script/hassfest/manifest.py | 1 - .../gdacs/snapshots/test_diagnostics.ambr | 21 ++++++++++ tests/components/gdacs/test_diagnostics.py | 33 ++++++++++++++++ 4 files changed, 93 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/gdacs/diagnostics.py create mode 100644 tests/components/gdacs/snapshots/test_diagnostics.ambr create mode 100644 tests/components/gdacs/test_diagnostics.py diff --git a/homeassistant/components/gdacs/diagnostics.py b/homeassistant/components/gdacs/diagnostics.py new file mode 100644 index 00000000000..435e28ca1ae --- /dev/null +++ b/homeassistant/components/gdacs/diagnostics.py @@ -0,0 +1,39 @@ +"""Diagnostics support for GDACS integration.""" + +from __future__ import annotations + +from typing import Any + +from aio_georss_client.status_update import StatusUpdate + +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE +from homeassistant.core import HomeAssistant + +from . import GdacsFeedEntityManager +from .const import DOMAIN, FEED + +TO_REDACT = {CONF_LATITUDE, CONF_LONGITUDE} + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, config_entry: ConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + data: dict[str, Any] = { + "info": async_redact_data(config_entry.data, TO_REDACT), + } + + manager: GdacsFeedEntityManager = hass.data[DOMAIN][FEED][config_entry.entry_id] + status_info: StatusUpdate = manager.status_info() + if status_info: + data["service"] = { + "status": status_info.status, + "total": status_info.total, + "last_update": status_info.last_update, + "last_update_successful": status_info.last_update_successful, + "last_timestamp": status_info.last_timestamp, + } + + return data diff --git a/script/hassfest/manifest.py b/script/hassfest/manifest.py index 1c01ee7cf58..185b2b178e4 100644 --- a/script/hassfest/manifest.py +++ b/script/hassfest/manifest.py @@ -117,7 +117,6 @@ NO_IOT_CLASS = [ # https://github.com/home-assistant/developers.home-assistant/pull/1512 NO_DIAGNOSTICS = [ "dlna_dms", - "gdacs", "geonetnz_quakes", "hyperion", "nightscout", diff --git a/tests/components/gdacs/snapshots/test_diagnostics.ambr b/tests/components/gdacs/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..5b6154307f7 --- /dev/null +++ b/tests/components/gdacs/snapshots/test_diagnostics.ambr @@ -0,0 +1,21 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'info': dict({ + 'categories': list([ + ]), + 'latitude': '**REDACTED**', + 'longitude': '**REDACTED**', + 'radius': 25, + 'scan_interval': 300.0, + 'unit_system': 'metric', + }), + 'service': dict({ + 'last_timestamp': None, + 'last_update': '2024-09-05T15:00:00', + 'last_update_successful': '2024-09-05T15:00:00', + 'status': 'OK', + 'total': 0, + }), + }) +# --- diff --git a/tests/components/gdacs/test_diagnostics.py b/tests/components/gdacs/test_diagnostics.py new file mode 100644 index 00000000000..3c6cf4080a6 --- /dev/null +++ b/tests/components/gdacs/test_diagnostics.py @@ -0,0 +1,33 @@ +"""Test GDACS diagnostics.""" + +from __future__ import annotations + +from unittest.mock import patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +@pytest.mark.freeze_time("2024-09-05 15:00:00") +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, + config_entry: MockConfigEntry, +) -> None: + """Test config entry diagnostics.""" + with patch("aio_georss_client.feed.GeoRssFeed.update") as mock_feed_update: + mock_feed_update.return_value = "OK", [] + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) + assert result == snapshot From 511ecf98d5422ea4171a77c5e9c8c30d54185d86 Mon Sep 17 00:00:00 2001 From: TimL Date: Thu, 5 Sep 2024 19:02:05 +1000 Subject: [PATCH 1465/1635] Add reauth flow for Smlight (#124418) * Add reauth flow for smlight integration * add strings for reauth * trigger reauth flow on authentication errors * Add tests for reauth flow * test for update failed on auth error * restore name title placeholder * raise config entry error to trigger reauth * Add test for reauth triggered at startup --------- Co-authored-by: Tim Lunn --- .../components/smlight/config_flow.py | 49 ++++++++ .../components/smlight/coordinator.py | 11 +- homeassistant/components/smlight/strings.json | 13 +- tests/components/smlight/conftest.py | 12 ++ tests/components/smlight/test_config_flow.py | 113 ++++++++++++++++++ tests/components/smlight/test_init.py | 24 +++- 6 files changed, 215 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/smlight/config_flow.py b/homeassistant/components/smlight/config_flow.py index 1b8cc4efeb1..98da153ce75 100644 --- a/homeassistant/components/smlight/config_flow.py +++ b/homeassistant/components/smlight/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Mapping from typing import Any from pysmlight import Api2 @@ -14,6 +15,7 @@ from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNA from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.device_registry import format_mac +from . import SmConfigEntry from .const import DOMAIN STEP_USER_DATA_SCHEMA = vol.Schema( @@ -37,6 +39,7 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN): """Initialize the config flow.""" self.client: Api2 self.host: str | None = None + self._reauth_entry: SmConfigEntry | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -127,6 +130,52 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Handle reauth when API Authentication failed.""" + + self._reauth_entry = self.hass.config_entries.async_get_entry( + self.context["entry_id"] + ) + host = entry_data[CONF_HOST] + self.context["title_placeholders"] = { + "host": host, + "name": entry_data.get(CONF_USERNAME, "unknown"), + } + self.client = Api2(host, session=async_get_clientsession(self.hass)) + self.host = host + + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle re-authentication of an existing config entry.""" + errors = {} + if user_input is not None: + try: + await self.client.authenticate( + user_input[CONF_USERNAME], user_input[CONF_PASSWORD] + ) + except SmlightAuthError: + errors["base"] = "invalid_auth" + except SmlightConnectionError: + return self.async_abort(reason="cannot_connect") + else: + assert self._reauth_entry is not None + + return self.async_update_reload_and_abort( + self._reauth_entry, data={**user_input, CONF_HOST: self.host} + ) + + return self.async_show_form( + step_id="reauth_confirm", + data_schema=STEP_AUTH_DATA_SCHEMA, + description_placeholders=self.context["title_placeholders"], + errors=errors, + ) + async def _async_check_auth_required(self, user_input: dict[str, Any]) -> bool: """Check if auth required and attempt to authenticate.""" if await self.client.check_auth_needed(): diff --git a/homeassistant/components/smlight/coordinator.py b/homeassistant/components/smlight/coordinator.py index 6a29f14fafd..380644c81d1 100644 --- a/homeassistant/components/smlight/coordinator.py +++ b/homeassistant/components/smlight/coordinator.py @@ -8,7 +8,7 @@ from pysmlight.exceptions import SmlightAuthError, SmlightConnectionError from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryError +from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.device_registry import format_mac from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -54,8 +54,10 @@ class SmDataUpdateCoordinator(DataUpdateCoordinator[SmData]): self.config_entry.data[CONF_PASSWORD], ) except SmlightAuthError as err: - LOGGER.error("Failed to authenticate: %s", err) - raise ConfigEntryError from err + raise ConfigEntryAuthFailed from err + else: + # Auth required but no credentials available + raise ConfigEntryAuthFailed info = await self.client.get_info() self.unique_id = format_mac(info.MAC) @@ -67,5 +69,8 @@ class SmDataUpdateCoordinator(DataUpdateCoordinator[SmData]): sensors=await self.client.get_sensors(), info=await self.client.get_info(), ) + except SmlightAuthError as err: + raise ConfigEntryAuthFailed from err + except SmlightConnectionError as err: raise UpdateFailed(err) from err diff --git a/homeassistant/components/smlight/strings.json b/homeassistant/components/smlight/strings.json index 41f84c49bf9..f22966df904 100644 --- a/homeassistant/components/smlight/strings.json +++ b/homeassistant/components/smlight/strings.json @@ -17,6 +17,14 @@ "password": "[%key:common::config_flow::data::password%]" } }, + "reauth_confirm": { + "title": "[%key:common::config_flow::title::reauth%]", + "description": "Please enter the correct username and password for host: {host}", + "data": { + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" + } + }, "confirm_discovery": { "description": "Do you want to set up SMLIGHT at {host}?" } @@ -27,7 +35,10 @@ "unknown": "[%key:common::config_flow::error::unknown%]" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "reauth_failed": "[%key:common::config_flow::error::invalid_auth%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, "entity": { diff --git a/tests/components/smlight/conftest.py b/tests/components/smlight/conftest.py index c51da5c5ee5..a86c7b4c27a 100644 --- a/tests/components/smlight/conftest.py +++ b/tests/components/smlight/conftest.py @@ -32,6 +32,18 @@ def mock_config_entry() -> MockConfigEntry: ) +@pytest.fixture +def mock_config_entry_host() -> MockConfigEntry: + """Return the default mocked config entry, no credentials.""" + return MockConfigEntry( + domain=DOMAIN, + data={ + CONF_HOST: MOCK_HOST, + }, + unique_id="aa:bb:cc:dd:ee:ff", + ) + + @pytest.fixture def platforms() -> list[Platform]: """Platforms, which should be loaded during the test.""" diff --git a/tests/components/smlight/test_config_flow.py b/tests/components/smlight/test_config_flow.py index 9a23a8de753..fb07e29edd4 100644 --- a/tests/components/smlight/test_config_flow.py +++ b/tests/components/smlight/test_config_flow.py @@ -363,3 +363,116 @@ async def test_zeroconf_legacy_mac( assert len(mock_setup_entry.mock_calls) == 1 assert len(mock_smlight_client.get_info.mock_calls) == 2 + + +async def test_reauth_flow( + hass: HomeAssistant, + mock_smlight_client: MagicMock, + mock_config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, +) -> None: + """Test reauth flow completes successfully.""" + mock_smlight_client.check_auth_needed.return_value = True + mock_config_entry.add_to_hass(hass) + + result = await mock_config_entry.start_reauth_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: MOCK_USERNAME, + CONF_PASSWORD: MOCK_PASSWORD, + }, + ) + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "reauth_successful" + assert mock_config_entry.data == { + CONF_USERNAME: MOCK_USERNAME, + CONF_PASSWORD: MOCK_PASSWORD, + CONF_HOST: MOCK_HOST, + } + + assert len(mock_smlight_client.authenticate.mock_calls) == 1 + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + + +async def test_reauth_auth_error( + hass: HomeAssistant, + mock_smlight_client: MagicMock, + mock_config_entry: MockConfigEntry, + mock_setup_entry: AsyncMock, +) -> None: + """Test reauth flow with authentication error.""" + mock_smlight_client.check_auth_needed.return_value = True + mock_smlight_client.authenticate.side_effect = SmlightAuthError + + mock_config_entry.add_to_hass(hass) + result = await mock_config_entry.start_reauth_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: MOCK_USERNAME, + CONF_PASSWORD: "test-bad", + }, + ) + + assert result2["type"] is FlowResultType.FORM + assert result2["step_id"] == "reauth_confirm" + + mock_smlight_client.authenticate.side_effect = None + result3 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: MOCK_USERNAME, + CONF_PASSWORD: MOCK_PASSWORD, + }, + ) + + assert result3["type"] is FlowResultType.ABORT + assert result3["reason"] == "reauth_successful" + + assert mock_config_entry.data == { + CONF_USERNAME: MOCK_USERNAME, + CONF_PASSWORD: MOCK_PASSWORD, + CONF_HOST: MOCK_HOST, + } + + assert len(mock_smlight_client.authenticate.mock_calls) == 2 + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + + +async def test_reauth_connect_error( + hass: HomeAssistant, + mock_smlight_client: MagicMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reauth flow with error.""" + mock_smlight_client.check_auth_needed.return_value = True + mock_smlight_client.authenticate.side_effect = SmlightConnectionError + + mock_config_entry.add_to_hass(hass) + + result = await mock_config_entry.start_reauth_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: MOCK_USERNAME, + CONF_PASSWORD: MOCK_PASSWORD, + }, + ) + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "cannot_connect" + assert len(mock_smlight_client.authenticate.mock_calls) == 1 diff --git a/tests/components/smlight/test_init.py b/tests/components/smlight/test_init.py index 682993cb943..1323c93e6bf 100644 --- a/tests/components/smlight/test_init.py +++ b/tests/components/smlight/test_init.py @@ -3,7 +3,7 @@ from unittest.mock import MagicMock from freezegun.api import FrozenDateTimeFactory -from pysmlight.exceptions import SmlightAuthError, SmlightConnectionError +from pysmlight.exceptions import SmlightAuthError, SmlightConnectionError, SmlightError import pytest from syrupy.assertion import SnapshotAssertion @@ -55,19 +55,37 @@ async def test_async_setup_auth_failed( assert entry.state is ConfigEntryState.NOT_LOADED +async def test_async_setup_missing_credentials( + hass: HomeAssistant, + mock_config_entry_host: MockConfigEntry, + mock_smlight_client: MagicMock, +) -> None: + """Test we trigger reauth when credentials are missing.""" + mock_smlight_client.check_auth_needed.return_value = True + + await setup_integration(hass, mock_config_entry_host) + + progress = hass.config_entries.flow.async_progress() + assert len(progress) == 1 + assert progress[0]["step_id"] == "reauth_confirm" + assert progress[0]["context"]["unique_id"] == "aa:bb:cc:dd:ee:ff" + + +@pytest.mark.parametrize("error", [SmlightConnectionError, SmlightAuthError]) async def test_update_failed( hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_smlight_client: MagicMock, freezer: FrozenDateTimeFactory, + error: SmlightError, ) -> None: - """Test update failed due to connection error.""" + """Test update failed due to error.""" await setup_integration(hass, mock_config_entry) entity = hass.states.get("sensor.mock_title_core_chip_temp") assert entity.state is not STATE_UNAVAILABLE - mock_smlight_client.get_info.side_effect = SmlightConnectionError + mock_smlight_client.get_info.side_effect = error freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) From ba7f36328dfff5d2ec06988eb38e1ce5172b9ac0 Mon Sep 17 00:00:00 2001 From: Malte Franken Date: Thu, 5 Sep 2024 19:35:36 +1000 Subject: [PATCH 1466/1635] Add diagnostics to GeoNet NZ Quakes integration (#125320) * add diagnostics platform * add tests * add snapshot data * remove from no diagnostics list --- .../components/geonetnz_quakes/diagnostics.py | 39 +++++++++++++++++++ script/hassfest/manifest.py | 1 - .../snapshots/test_diagnostics.ambr | 21 ++++++++++ .../geonetnz_quakes/test_diagnostics.py | 33 ++++++++++++++++ 4 files changed, 93 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/geonetnz_quakes/diagnostics.py create mode 100644 tests/components/geonetnz_quakes/snapshots/test_diagnostics.ambr create mode 100644 tests/components/geonetnz_quakes/test_diagnostics.py diff --git a/homeassistant/components/geonetnz_quakes/diagnostics.py b/homeassistant/components/geonetnz_quakes/diagnostics.py new file mode 100644 index 00000000000..fbe9bf511aa --- /dev/null +++ b/homeassistant/components/geonetnz_quakes/diagnostics.py @@ -0,0 +1,39 @@ +"""Diagnostics support for GeoNet NZ Quakes Feeds integration.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE +from homeassistant.core import HomeAssistant + +from . import GeonetnzQuakesFeedEntityManager +from .const import DOMAIN, FEED + +TO_REDACT = {CONF_LATITUDE, CONF_LONGITUDE} + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, config_entry: ConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + data: dict[str, Any] = { + "info": async_redact_data(config_entry.data, TO_REDACT), + } + + manager: GeonetnzQuakesFeedEntityManager = hass.data[DOMAIN][FEED][ + config_entry.entry_id + ] + status_info = manager.status_info() + if status_info: + data["service"] = { + "status": status_info.status, + "total": status_info.total, + "last_update": status_info.last_update, + "last_update_successful": status_info.last_update_successful, + "last_timestamp": status_info.last_timestamp, + } + + return data diff --git a/script/hassfest/manifest.py b/script/hassfest/manifest.py index 185b2b178e4..8643e34725f 100644 --- a/script/hassfest/manifest.py +++ b/script/hassfest/manifest.py @@ -117,7 +117,6 @@ NO_IOT_CLASS = [ # https://github.com/home-assistant/developers.home-assistant/pull/1512 NO_DIAGNOSTICS = [ "dlna_dms", - "geonetnz_quakes", "hyperion", "nightscout", "pvpc_hourly_pricing", diff --git a/tests/components/geonetnz_quakes/snapshots/test_diagnostics.ambr b/tests/components/geonetnz_quakes/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..481a662ccf9 --- /dev/null +++ b/tests/components/geonetnz_quakes/snapshots/test_diagnostics.ambr @@ -0,0 +1,21 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'info': dict({ + 'latitude': '**REDACTED**', + 'longitude': '**REDACTED**', + 'minimum_magnitude': 0.0, + 'mmi': 4, + 'radius': 25, + 'scan_interval': 300.0, + 'unit_system': 'metric', + }), + 'service': dict({ + 'last_timestamp': None, + 'last_update': '2024-09-05T15:00:00', + 'last_update_successful': '2024-09-05T15:00:00', + 'status': 'OK', + 'total': 0, + }), + }) +# --- diff --git a/tests/components/geonetnz_quakes/test_diagnostics.py b/tests/components/geonetnz_quakes/test_diagnostics.py new file mode 100644 index 00000000000..db5e1300768 --- /dev/null +++ b/tests/components/geonetnz_quakes/test_diagnostics.py @@ -0,0 +1,33 @@ +"""Test GeoNet NZ Quakes diagnostics.""" + +from __future__ import annotations + +from unittest.mock import patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +@pytest.mark.freeze_time("2024-09-05 15:00:00") +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, + config_entry: MockConfigEntry, +) -> None: + """Test config entry diagnostics.""" + with patch("aio_geojson_client.feed.GeoJsonFeed.update") as mock_feed_update: + mock_feed_update.return_value = "OK", [] + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + result = await get_diagnostics_for_config_entry(hass, hass_client, config_entry) + assert result == snapshot From 70966c2b63a34478576d0cef5899d75b39e7b2f0 Mon Sep 17 00:00:00 2001 From: Adam Pasztor Date: Thu, 5 Sep 2024 12:07:19 +0200 Subject: [PATCH 1467/1635] Add new data types to ADS integration (#125201) * feat: Introduce new data types to ADS integration. * refactor: ADS data unpacking based on PLC data type * refactor: handle BOOL and STRING as special cases. --- homeassistant/components/ads/__init__.py | 99 ++++++++++++++++++------ 1 file changed, 74 insertions(+), 25 deletions(-) diff --git a/homeassistant/components/ads/__init__.py b/homeassistant/components/ads/__init__.py index f5742718b12..32d89b5b597 100644 --- a/homeassistant/components/ads/__init__.py +++ b/homeassistant/components/ads/__init__.py @@ -29,18 +29,40 @@ DATA_ADS = "data_ads" # Supported Types ADSTYPE_BOOL = "bool" ADSTYPE_BYTE = "byte" -ADSTYPE_DINT = "dint" ADSTYPE_INT = "int" -ADSTYPE_UDINT = "udint" ADSTYPE_UINT = "uint" +ADSTYPE_SINT = "sint" +ADSTYPE_USINT = "usint" +ADSTYPE_DINT = "dint" +ADSTYPE_UDINT = "udint" +ADSTYPE_WORD = "word" +ADSTYPE_DWORD = "dword" +ADSTYPE_LREAL = "lreal" +ADSTYPE_REAL = "real" +ADSTYPE_STRING = "string" +ADSTYPE_TIME = "time" +ADSTYPE_DATE = "date" +ADSTYPE_DATE_AND_TIME = "dt" +ADSTYPE_TOD = "tod" ADS_TYPEMAP = { ADSTYPE_BOOL: pyads.PLCTYPE_BOOL, ADSTYPE_BYTE: pyads.PLCTYPE_BYTE, - ADSTYPE_DINT: pyads.PLCTYPE_DINT, ADSTYPE_INT: pyads.PLCTYPE_INT, - ADSTYPE_UDINT: pyads.PLCTYPE_UDINT, ADSTYPE_UINT: pyads.PLCTYPE_UINT, + ADSTYPE_SINT: pyads.PLCTYPE_SINT, + ADSTYPE_USINT: pyads.PLCTYPE_USINT, + ADSTYPE_DINT: pyads.PLCTYPE_DINT, + ADSTYPE_UDINT: pyads.PLCTYPE_UDINT, + ADSTYPE_WORD: pyads.PLCTYPE_WORD, + ADSTYPE_DWORD: pyads.PLCTYPE_DWORD, + ADSTYPE_REAL: pyads.PLCTYPE_REAL, + ADSTYPE_LREAL: pyads.PLCTYPE_LREAL, + ADSTYPE_STRING: pyads.PLCTYPE_STRING, + ADSTYPE_TIME: pyads.PLCTYPE_TIME, + ADSTYPE_DATE: pyads.PLCTYPE_DATE, + ADSTYPE_DATE_AND_TIME: pyads.PLCTYPE_DT, + ADSTYPE_TOD: pyads.PLCTYPE_TOD, } CONF_ADS_FACTOR = "factor" @@ -75,12 +97,23 @@ SCHEMA_SERVICE_WRITE_DATA_BY_NAME = vol.Schema( { vol.Required(CONF_ADS_TYPE): vol.In( [ + ADSTYPE_BOOL, + ADSTYPE_BYTE, ADSTYPE_INT, ADSTYPE_UINT, - ADSTYPE_BYTE, - ADSTYPE_BOOL, + ADSTYPE_SINT, + ADSTYPE_USINT, ADSTYPE_DINT, ADSTYPE_UDINT, + ADSTYPE_WORD, + ADSTYPE_DWORD, + ADSTYPE_REAL, + ADSTYPE_LREAL, + ADSTYPE_STRING, + ADSTYPE_TIME, + ADSTYPE_DATE, + ADSTYPE_DATE_AND_TIME, + ADSTYPE_TOD, ] ), vol.Required(CONF_ADS_VALUE): vol.Coerce(int), @@ -222,37 +255,53 @@ class AdsHub: def _device_notification_callback(self, notification, name): """Handle device notifications.""" contents = notification.contents - hnotify = int(contents.hNotification) _LOGGER.debug("Received notification %d", hnotify) - # get dynamically sized data array + # Get dynamically sized data array data_size = contents.cbSampleSize - data = (ctypes.c_ubyte * data_size).from_address( + data_address = ( ctypes.addressof(contents) + pyads.structs.SAdsNotificationHeader.data.offset ) + data = (ctypes.c_ubyte * data_size).from_address(data_address) - try: - with self._lock: - notification_item = self._notification_items[hnotify] - except KeyError: + # Acquire notification item + with self._lock: + notification_item = self._notification_items.get(hnotify) + + if not notification_item: _LOGGER.error("Unknown device notification handle: %d", hnotify) return - # Parse data to desired datatype - if notification_item.plc_datatype == pyads.PLCTYPE_BOOL: + # Data parsing based on PLC data type + plc_datatype = notification_item.plc_datatype + unpack_formats = { + pyads.PLCTYPE_BYTE: " Date: Thu, 5 Sep 2024 13:03:16 +0200 Subject: [PATCH 1468/1635] Split opentherm_gw entity base class (#125330) Add OpenThermStatusEntity to allow entities that don't need status updates --- .../components/opentherm_gw/binary_sensor.py | 4 ++-- homeassistant/components/opentherm_gw/button.py | 14 ++------------ homeassistant/components/opentherm_gw/climate.py | 4 ++-- homeassistant/components/opentherm_gw/entity.py | 14 +++++++++----- homeassistant/components/opentherm_gw/sensor.py | 4 ++-- 5 files changed, 17 insertions(+), 23 deletions(-) diff --git a/homeassistant/components/opentherm_gw/binary_sensor.py b/homeassistant/components/opentherm_gw/binary_sensor.py index 00885a18088..5d542bedc07 100644 --- a/homeassistant/components/opentherm_gw/binary_sensor.py +++ b/homeassistant/components/opentherm_gw/binary_sensor.py @@ -22,7 +22,7 @@ from .const import ( THERMOSTAT_DEVICE_DESCRIPTION, OpenThermDataSource, ) -from .entity import OpenThermEntity, OpenThermEntityDescription +from .entity import OpenThermEntityDescription, OpenThermStatusEntity @dataclass(frozen=True, kw_only=True) @@ -404,7 +404,7 @@ async def async_setup_entry( ) -class OpenThermBinarySensor(OpenThermEntity, BinarySensorEntity): +class OpenThermBinarySensor(OpenThermStatusEntity, BinarySensorEntity): """Represent an OpenTherm Gateway binary sensor.""" _attr_entity_category = EntityCategory.DIAGNOSTIC diff --git a/homeassistant/components/opentherm_gw/button.py b/homeassistant/components/opentherm_gw/button.py index aa0a3dbcda5..bac50295199 100644 --- a/homeassistant/components/opentherm_gw/button.py +++ b/homeassistant/components/opentherm_gw/button.py @@ -12,16 +12,11 @@ from homeassistant.components.button import ( ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ID, EntityCategory -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import OpenThermGatewayHub -from .const import ( - DATA_GATEWAYS, - DATA_OPENTHERM_GW, - GATEWAY_DEVICE_DESCRIPTION, - OpenThermDataSource, -) +from .const import DATA_GATEWAYS, DATA_OPENTHERM_GW, GATEWAY_DEVICE_DESCRIPTION from .entity import OpenThermEntity, OpenThermEntityDescription @@ -63,11 +58,6 @@ class OpenThermButton(OpenThermEntity, ButtonEntity): _attr_entity_category = EntityCategory.CONFIG entity_description: OpenThermButtonEntityDescription - @callback - def receive_report(self, status: dict[OpenThermDataSource, dict]) -> None: - """Handle status updates from the component.""" - # We don't need any information from the reports here - async def async_press(self) -> None: """Perform button action.""" await self.entity_description.action(self._gateway) diff --git a/homeassistant/components/opentherm_gw/climate.py b/homeassistant/components/opentherm_gw/climate.py index 45f1ca478f5..6edfeb35ec3 100644 --- a/homeassistant/components/opentherm_gw/climate.py +++ b/homeassistant/components/opentherm_gw/climate.py @@ -34,7 +34,7 @@ from .const import ( THERMOSTAT_DEVICE_DESCRIPTION, OpenThermDataSource, ) -from .entity import OpenThermEntity, OpenThermEntityDescription +from .entity import OpenThermEntityDescription, OpenThermStatusEntity _LOGGER = logging.getLogger(__name__) @@ -69,7 +69,7 @@ async def async_setup_entry( async_add_entities(ents) -class OpenThermClimate(OpenThermEntity, ClimateEntity): +class OpenThermClimate(OpenThermStatusEntity, ClimateEntity): """Representation of a climate device.""" _attr_supported_features = ( diff --git a/homeassistant/components/opentherm_gw/entity.py b/homeassistant/components/opentherm_gw/entity.py index b7110fa9e1b..e87a6c182aa 100644 --- a/homeassistant/components/opentherm_gw/entity.py +++ b/homeassistant/components/opentherm_gw/entity.py @@ -52,6 +52,15 @@ class OpenThermEntity(Entity): }, ) + @property + def available(self) -> bool: + """Return connection status of the hub to indicate availability.""" + return self._gateway.connected + + +class OpenThermStatusEntity(OpenThermEntity): + """Represent an OpenTherm entity that receives status updates.""" + async def async_added_to_hass(self) -> None: """Subscribe to updates from the component.""" self.async_on_remove( @@ -60,11 +69,6 @@ class OpenThermEntity(Entity): ) ) - @property - def available(self) -> bool: - """Return connection status of the hub to indicate availability.""" - return self._gateway.connected - @callback def receive_report(self, status: dict[OpenThermDataSource, dict]) -> None: """Handle status updates from the component.""" diff --git a/homeassistant/components/opentherm_gw/sensor.py b/homeassistant/components/opentherm_gw/sensor.py index eeadd5c4ee1..5ccb4166665 100644 --- a/homeassistant/components/opentherm_gw/sensor.py +++ b/homeassistant/components/opentherm_gw/sensor.py @@ -32,7 +32,7 @@ from .const import ( THERMOSTAT_DEVICE_DESCRIPTION, OpenThermDataSource, ) -from .entity import OpenThermEntity, OpenThermEntityDescription +from .entity import OpenThermEntityDescription, OpenThermStatusEntity SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION = 1 @@ -889,7 +889,7 @@ async def async_setup_entry( ) -class OpenThermSensor(OpenThermEntity, SensorEntity): +class OpenThermSensor(OpenThermStatusEntity, SensorEntity): """Representation of an OpenTherm sensor.""" _attr_entity_category = EntityCategory.DIAGNOSTIC From 86ae70780ccbc58e0314e0f9e2ea26eb8a86d1a9 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 5 Sep 2024 13:09:27 +0200 Subject: [PATCH 1469/1635] Refactor recorder retryable_database_job decorator (#125306) --- .../components/recorder/migration.py | 3 +- homeassistant/components/recorder/util.py | 72 ++++++++++++------- 2 files changed, 49 insertions(+), 26 deletions(-) diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index 324bdd5ea13..4d9978c641b 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -106,6 +106,7 @@ from .util import ( execute_stmt_lambda_element, get_index_by_name, retryable_database_job, + retryable_database_job_method, session_scope, ) @@ -2229,7 +2230,7 @@ class BaseRunTimeMigration(ABC): else: self.migration_done(instance, session) - @retryable_database_job("migrate data", method=True) + @retryable_database_job_method("migrate data") def migrate_data(self, instance: Recorder) -> bool: """Migrate some data, returns True if migration is completed.""" status = self.migrate_data_impl(instance) diff --git a/homeassistant/components/recorder/util.py b/homeassistant/components/recorder/util.py index 75e403d8204..d078c32cb88 100644 --- a/homeassistant/components/recorder/util.py +++ b/homeassistant/components/recorder/util.py @@ -645,44 +645,66 @@ def _is_retryable_error(instance: Recorder, err: OperationalError) -> bool: type _FuncType[**P, R] = Callable[Concatenate[Recorder, P], R] +type _MethType[Self, **P, R] = Callable[Concatenate[Self, Recorder, P], R] type _FuncOrMethType[**_P, _R] = Callable[_P, _R] def retryable_database_job[**_P]( - description: str, method: bool = False -) -> Callable[[_FuncOrMethType[_P, bool]], _FuncOrMethType[_P, bool]]: + description: str, +) -> Callable[[_FuncType[_P, bool]], _FuncType[_P, bool]]: """Try to execute a database job. The job should return True if it finished, and False if it needs to be rescheduled. """ - recorder_pos = 1 if method else 0 - def decorator(job: _FuncOrMethType[_P, bool]) -> _FuncOrMethType[_P, bool]: - @functools.wraps(job) - def wrapper(*args: _P.args, **kwargs: _P.kwargs) -> bool: - instance: Recorder = args[recorder_pos] # type: ignore[assignment] - try: - return job(*args, **kwargs) - except OperationalError as err: - if _is_retryable_error(instance, err): - assert isinstance(err.orig, BaseException) # noqa: PT017 - _LOGGER.info( - "%s; %s not completed, retrying", err.orig.args[1], description - ) - time.sleep(instance.db_retry_wait) - # Failed with retryable error - return False - - _LOGGER.warning("Error executing %s: %s", description, err) - - # Failed with permanent error - return True - - return wrapper + def decorator(job: _FuncType[_P, bool]) -> _FuncType[_P, bool]: + return _wrap_func_or_meth(job, description, False) return decorator +def retryable_database_job_method[_Self, **_P]( + description: str, +) -> Callable[[_MethType[_Self, _P, bool]], _MethType[_Self, _P, bool]]: + """Try to execute a database job. + + The job should return True if it finished, and False if it needs to be rescheduled. + """ + + def decorator(job: _MethType[_Self, _P, bool]) -> _MethType[_Self, _P, bool]: + return _wrap_func_or_meth(job, description, True) + + return decorator + + +def _wrap_func_or_meth[**_P]( + job: _FuncOrMethType[_P, bool], description: str, method: bool +) -> _FuncOrMethType[_P, bool]: + recorder_pos = 1 if method else 0 + + @functools.wraps(job) + def wrapper(*args: _P.args, **kwargs: _P.kwargs) -> bool: + instance: Recorder = args[recorder_pos] # type: ignore[assignment] + try: + return job(*args, **kwargs) + except OperationalError as err: + if _is_retryable_error(instance, err): + assert isinstance(err.orig, BaseException) # noqa: PT017 + _LOGGER.info( + "%s; %s not completed, retrying", err.orig.args[1], description + ) + time.sleep(instance.db_retry_wait) + # Failed with retryable error + return False + + _LOGGER.warning("Error executing %s: %s", description, err) + + # Failed with permanent error + return True + + return wrapper + + def database_job_retry_wrapper[**_P]( description: str, attempts: int = 5 ) -> Callable[[_FuncType[_P, None]], _FuncType[_P, None]]: From 38f3fa021038b2e96028bc5a452c16c14975e62e Mon Sep 17 00:00:00 2001 From: "Phill (pssc)" Date: Thu, 5 Sep 2024 15:49:07 +0100 Subject: [PATCH 1470/1635] Add Squeezebox server service binary sensors (#122473) * squeezebox add binary sensor + coordinator * squeezebox add connected via for media_player * squeezebox add Player type for player * Add more type info * Fix linter errors * squeezebox use our own status entity * squeezebox rework device handling based on freedback * Fix device creation * squeezebox rework coordinator error handling * Fix lint type error * Correct spelling * Correct spelling * remove large comments * insert small comment * add translation support * Simply sensor * clean update function, minimise comments to the useful bits * Fix after testing * Update homeassistant/components/squeezebox/entity.py Co-authored-by: Joost Lekkerkerker * move data prep out of Device assign for clarity * stop being a generic api * Humans need to read the sensors... * ruff format * Humans need to read the sensors... * Revert "ruff format" This reverts commit 8fcb8143e7c4427e75d31f9dd57f6c2027f8df6a. * ruff format * Humans need to read the sensors... * errors after testing * infered * drop context * cutdown coordinator for the binary sensors * add tests for binary sensors * Fix import * add some basic media_player tests * Fix spelling and file headers * Fix spelling * remove uuid and use service device cat * use diag device * assert execpted value * ruff format * Update homeassistant/components/squeezebox/__init__.py Co-authored-by: Joost Lekkerkerker * Simplify T/F * Fix file header * remove redudant check * remove player tests from this commit * Fix formatting * remove unused * Fix function Type * Fix Any to bool * Fix browser tests * Patch our squeebox componemt not the server in the lib * ruff --------- Co-authored-by: Joost Lekkerkerker --- .../components/squeezebox/__init__.py | 58 ++++++++++++++-- .../components/squeezebox/binary_sensor.py | 54 +++++++++++++++ homeassistant/components/squeezebox/const.py | 17 +++++ .../components/squeezebox/coordinator.py | 59 ++++++++++++++++ homeassistant/components/squeezebox/entity.py | 31 +++++++++ .../components/squeezebox/media_player.py | 7 +- .../components/squeezebox/strings.json | 10 +++ tests/components/squeezebox/__init__.py | 68 +++++++++++++++++++ tests/components/squeezebox/conftest.py | 1 + .../squeezebox/test_binary_sensor.py | 33 +++++++++ .../squeezebox/test_media_browser.py | 6 +- 11 files changed, 334 insertions(+), 10 deletions(-) create mode 100644 homeassistant/components/squeezebox/binary_sensor.py create mode 100644 homeassistant/components/squeezebox/coordinator.py create mode 100644 homeassistant/components/squeezebox/entity.py create mode 100644 tests/components/squeezebox/test_binary_sensor.py diff --git a/homeassistant/components/squeezebox/__init__.py b/homeassistant/components/squeezebox/__init__.py index b6c7f049311..be8c92b18df 100644 --- a/homeassistant/components/squeezebox/__init__.py +++ b/homeassistant/components/squeezebox/__init__.py @@ -1,6 +1,7 @@ """The Squeezebox integration.""" from asyncio import timeout +from dataclasses import dataclass import logging from pysqueezebox import Server @@ -15,23 +16,42 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import device_registry as dr from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.device_registry import ( + CONNECTION_NETWORK_MAC, + DeviceEntryType, + format_mac, +) from .const import ( CONF_HTTPS, DISCOVERY_TASK, DOMAIN, + MANUFACTURER, + SERVER_MODEL, STATUS_API_TIMEOUT, STATUS_QUERY_LIBRARYNAME, + STATUS_QUERY_MAC, STATUS_QUERY_UUID, + STATUS_QUERY_VERSION, ) +from .coordinator import LMSStatusDataUpdateCoordinator _LOGGER = logging.getLogger(__name__) -PLATFORMS = [Platform.MEDIA_PLAYER] +PLATFORMS = [Platform.BINARY_SENSOR, Platform.MEDIA_PLAYER] -type SqueezeboxConfigEntry = ConfigEntry[Server] +@dataclass +class SqueezeboxData: + """SqueezeboxData data class.""" + + coordinator: LMSStatusDataUpdateCoordinator + server: Server + + +type SqueezeboxConfigEntry = ConfigEntry[SqueezeboxData] async def async_setup_entry(hass: HomeAssistant, entry: SqueezeboxConfigEntry) -> bool: @@ -66,25 +86,51 @@ async def async_setup_entry(hass: HomeAssistant, entry: SqueezeboxConfigEntry) - _LOGGER.debug("LMS Status for setup = %s", status) lms.uuid = status[STATUS_QUERY_UUID] + _LOGGER.debug("LMS %s = '%s' with uuid = %s ", lms.name, host, lms.uuid) lms.name = ( (STATUS_QUERY_LIBRARYNAME in status and status[STATUS_QUERY_LIBRARYNAME]) and status[STATUS_QUERY_LIBRARYNAME] or host ) - _LOGGER.debug("LMS %s = '%s' with uuid = %s ", lms.name, host, lms.uuid) + version = STATUS_QUERY_VERSION in status and status[STATUS_QUERY_VERSION] or None + # mac can be missing + mac_connect = ( + {(CONNECTION_NETWORK_MAC, format_mac(status[STATUS_QUERY_MAC]))} + if STATUS_QUERY_MAC in status + else None + ) - entry.runtime_data = lms + device_registry = dr.async_get(hass) + device = device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + identifiers={(DOMAIN, lms.uuid)}, + name=lms.name, + manufacturer=MANUFACTURER, + model=SERVER_MODEL, + sw_version=version, + entry_type=DeviceEntryType.SERVICE, + connections=mac_connect, + ) + _LOGGER.debug("LMS Device %s", device) + coordinator = LMSStatusDataUpdateCoordinator(hass, lms) + + entry.runtime_data = SqueezeboxData( + coordinator=coordinator, + server=lms, + ) + + await coordinator.async_config_entry_first_refresh() await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: SqueezeboxConfigEntry) -> bool: """Unload a config entry.""" # Stop player discovery task for this config entry. _LOGGER.debug( "Reached async_unload_entry for LMS=%s(%s)", - entry.runtime_data.name or "Unknown", + entry.runtime_data.server.name or "Unknown", entry.entry_id, ) diff --git a/homeassistant/components/squeezebox/binary_sensor.py b/homeassistant/components/squeezebox/binary_sensor.py new file mode 100644 index 00000000000..ec0bac0fe43 --- /dev/null +++ b/homeassistant/components/squeezebox/binary_sensor.py @@ -0,0 +1,54 @@ +"""Binary sensor platform for Squeezebox integration.""" + +from __future__ import annotations + +import logging + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import SqueezeboxConfigEntry +from .const import STATUS_SENSOR_NEEDSRESTART, STATUS_SENSOR_RESCAN +from .entity import LMSStatusEntity + +SENSORS: tuple[BinarySensorEntityDescription, ...] = ( + BinarySensorEntityDescription( + key=STATUS_SENSOR_RESCAN, + device_class=BinarySensorDeviceClass.RUNNING, + ), + BinarySensorEntityDescription( + key=STATUS_SENSOR_NEEDSRESTART, + device_class=BinarySensorDeviceClass.UPDATE, + entity_category=EntityCategory.DIAGNOSTIC, + ), +) + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SqueezeboxConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Platform setup using common elements.""" + + async_add_entities( + ServerStatusBinarySensor(entry.runtime_data.coordinator, description) + for description in SENSORS + ) + + +class ServerStatusBinarySensor(LMSStatusEntity, BinarySensorEntity): + """LMS Status based sensor from LMS via cooridnatior.""" + + @property + def is_on(self) -> bool: + """LMS Status directly from coordinator data.""" + return bool(self.coordinator.data[self.entity_description.key]) diff --git a/homeassistant/components/squeezebox/const.py b/homeassistant/components/squeezebox/const.py index a814cf6ecc4..a4824f2091f 100644 --- a/homeassistant/components/squeezebox/const.py +++ b/homeassistant/components/squeezebox/const.py @@ -5,8 +5,25 @@ DISCOVERY_TASK = "discovery_task" DOMAIN = "squeezebox" DEFAULT_PORT = 9000 KNOWN_PLAYERS = "known_players" +MANUFACTURER = "https://lyrion.org/" +PLAYER_DISCOVERY_UNSUB = "player_discovery_unsub" SENSOR_UPDATE_INTERVAL = 60 +SERVER_MODEL = "Lyron Music Server" STATUS_API_TIMEOUT = 10 +STATUS_SENSOR_LASTSCAN = "lastscan" +STATUS_SENSOR_NEEDSRESTART = "needsrestart" +STATUS_SENSOR_NEWVERSION = "newversion" +STATUS_SENSOR_NEWPLUGINS = "newplugins" +STATUS_SENSOR_RESCAN = "rescan" +STATUS_SENSOR_INFO_TOTAL_ALBUMS = "info total albums" +STATUS_SENSOR_INFO_TOTAL_ARTISTS = "info total artists" +STATUS_SENSOR_INFO_TOTAL_DURATION = "info total duration" +STATUS_SENSOR_INFO_TOTAL_GENRES = "info total genres" +STATUS_SENSOR_INFO_TOTAL_SONGS = "info total songs" +STATUS_SENSOR_PLAYER_COUNT = "player count" +STATUS_SENSOR_OTHER_PLAYER_COUNT = "other player count" STATUS_QUERY_LIBRARYNAME = "libraryname" +STATUS_QUERY_MAC = "mac" STATUS_QUERY_UUID = "uuid" +STATUS_QUERY_VERSION = "version" SQUEEZEBOX_SOURCE_STRINGS = ("source:", "wavin:", "spotify:") diff --git a/homeassistant/components/squeezebox/coordinator.py b/homeassistant/components/squeezebox/coordinator.py new file mode 100644 index 00000000000..71c55452004 --- /dev/null +++ b/homeassistant/components/squeezebox/coordinator.py @@ -0,0 +1,59 @@ +"""DataUpdateCoordinator for the Squeezebox integration.""" + +from asyncio import timeout +from datetime import timedelta +import logging + +from pysqueezebox import Server + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import ( + SENSOR_UPDATE_INTERVAL, + STATUS_API_TIMEOUT, + STATUS_SENSOR_NEEDSRESTART, + STATUS_SENSOR_RESCAN, +) + +_LOGGER = logging.getLogger(__name__) + + +class LMSStatusDataUpdateCoordinator(DataUpdateCoordinator): + """LMS Status custom coordinator.""" + + def __init__(self, hass: HomeAssistant, lms: Server) -> None: + """Initialize my coordinator.""" + super().__init__( + hass, + _LOGGER, + name=lms.name, + update_interval=timedelta(seconds=SENSOR_UPDATE_INTERVAL), + always_update=False, + ) + self.lms = lms + + async def _async_update_data(self) -> dict: + """Fetch data fromn LMS status call. + + Then we process only a subset to make then nice for HA + """ + async with timeout(STATUS_API_TIMEOUT): + data = await self.lms.async_status() + + if not data: + raise UpdateFailed("No data from status poll") + _LOGGER.debug("Raw serverstatus %s=%s", self.lms.name, data) + + return self._prepare_status_data(data) + + def _prepare_status_data(self, data: dict) -> dict: + """Sensors that need the data changing for HA presentation.""" + + # rescan bool are we rescanning alter poll not present if false + data[STATUS_SENSOR_RESCAN] = STATUS_SENSOR_RESCAN in data + # needsrestart bool pending lms plugin updates not present if false + data[STATUS_SENSOR_NEEDSRESTART] = STATUS_SENSOR_NEEDSRESTART in data + + _LOGGER.debug("Processed serverstatus %s=%s", self.lms.name, data) + return data diff --git a/homeassistant/components/squeezebox/entity.py b/homeassistant/components/squeezebox/entity.py new file mode 100644 index 00000000000..8ac80265369 --- /dev/null +++ b/homeassistant/components/squeezebox/entity.py @@ -0,0 +1,31 @@ +"""Base class for Squeezebox Sensor entities.""" + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN, STATUS_QUERY_UUID +from .coordinator import LMSStatusDataUpdateCoordinator + + +class LMSStatusEntity(CoordinatorEntity[LMSStatusDataUpdateCoordinator]): + """Defines a base status sensor entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: LMSStatusDataUpdateCoordinator, + description: EntityDescription, + ) -> None: + """Initialize status sensor entity.""" + super().__init__(coordinator) + self.entity_description = description + self._attr_translation_key = description.key + self._attr_unique_id = ( + f"{coordinator.data[STATUS_QUERY_UUID]}_{description.key}" + ) + + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, coordinator.data[STATUS_QUERY_UUID])}, + ) diff --git a/homeassistant/components/squeezebox/media_player.py b/homeassistant/components/squeezebox/media_player.py index 0294c17f50a..f7f8df55e2c 100644 --- a/homeassistant/components/squeezebox/media_player.py +++ b/homeassistant/components/squeezebox/media_player.py @@ -113,7 +113,7 @@ async def async_setup_entry( """Set up an player discovery from a config entry.""" hass.data.setdefault(DOMAIN, {}) known_players = hass.data[DOMAIN].setdefault(KNOWN_PLAYERS, []) - lms = entry.runtime_data + lms = entry.runtime_data.server async def _player_discovery(now: datetime | None = None) -> None: """Discover squeezebox players by polling server.""" @@ -136,7 +136,7 @@ async def async_setup_entry( if not entity: _LOGGER.debug("Adding new entity: %s", player) - entity = SqueezeBoxEntity(player) + entity = SqueezeBoxEntity(player, lms) known_players.append(entity) async_add_entities([entity]) @@ -212,7 +212,7 @@ class SqueezeBoxEntity(MediaPlayerEntity): _last_update: datetime | None = None _attr_available = True - def __init__(self, player: Player) -> None: + def __init__(self, player: Player, server: Server) -> None: """Initialize the SqueezeBox device.""" self._player = player self._query_result: bool | dict = {} @@ -222,6 +222,7 @@ class SqueezeBoxEntity(MediaPlayerEntity): identifiers={(DOMAIN, self._attr_unique_id)}, name=player.name, connections={(CONNECTION_NETWORK_MAC, self._attr_unique_id)}, + via_device=(DOMAIN, server.uuid), ) @property diff --git a/homeassistant/components/squeezebox/strings.json b/homeassistant/components/squeezebox/strings.json index 899d35813aa..89302951146 100644 --- a/homeassistant/components/squeezebox/strings.json +++ b/homeassistant/components/squeezebox/strings.json @@ -75,5 +75,15 @@ "name": "Unsync", "description": "Removes this player from its sync group." } + }, + "entity": { + "binary_sensor": { + "rescan": { + "name": "Library rescan" + }, + "needsrestart": { + "name": "Needs restart" + } + } } } diff --git a/tests/components/squeezebox/__init__.py b/tests/components/squeezebox/__init__.py index 34c0363292d..d5faabba32e 100644 --- a/tests/components/squeezebox/__init__.py +++ b/tests/components/squeezebox/__init__.py @@ -1 +1,69 @@ """Tests for the Logitech Squeezebox integration.""" + +from homeassistant.components.squeezebox.const import ( + DOMAIN, + STATUS_QUERY_LIBRARYNAME, + STATUS_QUERY_MAC, + STATUS_QUERY_UUID, + STATUS_QUERY_VERSION, + STATUS_SENSOR_RESCAN, +) +from homeassistant.const import CONF_HOST, CONF_PORT +from homeassistant.core import HomeAssistant + +# from homeassistant.setup import async_setup_component +from tests.common import MockConfigEntry + +FAKE_IP = "42.42.42.42" +FAKE_MAC = "deadbeefdead" +FAKE_UUID = "deadbeefdeadbeefbeefdeafbeef42" +FAKE_PORT = 9000 +FAKE_VERSION = "42.0" + +FAKE_QUERY_RESPONSE = { + STATUS_QUERY_UUID: FAKE_UUID, + STATUS_QUERY_MAC: FAKE_MAC, + STATUS_QUERY_VERSION: FAKE_VERSION, + STATUS_SENSOR_RESCAN: 1, + STATUS_QUERY_LIBRARYNAME: "FakeLib", + "players_loop": [ + { + "isplaying": 0, + "name": "SqueezeLite-HA-Addon", + "seq_no": 0, + "modelname": "SqueezeLite-HA-Addon", + "playerindex": "status", + "model": "squeezelite", + "uuid": FAKE_UUID, + "canpoweroff": 1, + "ip": "192.168.78.86:57700", + "displaytype": "none", + "playerid": "f9:23:cd:37:c5:ff", + "power": 0, + "isplayer": 1, + "connected": 1, + "firmware": "v2.0.0-1488", + } + ], + "count": 1, +} + + +async def setup_mocked_integration(hass: HomeAssistant) -> MockConfigEntry: + """Mock ConfigEntry in Home Assistant.""" + + entry = MockConfigEntry( + domain=DOMAIN, + unique_id=FAKE_UUID, + data={ + CONF_HOST: FAKE_IP, + CONF_PORT: FAKE_PORT, + }, + ) + + entry.add_to_hass(hass) + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + return entry diff --git a/tests/components/squeezebox/conftest.py b/tests/components/squeezebox/conftest.py index 4a4bdc6ae73..26cb0726aca 100644 --- a/tests/components/squeezebox/conftest.py +++ b/tests/components/squeezebox/conftest.py @@ -130,4 +130,5 @@ def lms() -> MagicMock: ) lms.async_get_players = AsyncMock(return_value=[player]) lms.async_query = AsyncMock(return_value={"uuid": format_mac(TEST_MAC)}) + lms.async_status = AsyncMock(return_value={"uuid": format_mac(TEST_MAC)}) return lms diff --git a/tests/components/squeezebox/test_binary_sensor.py b/tests/components/squeezebox/test_binary_sensor.py new file mode 100644 index 00000000000..a2de0cbf95e --- /dev/null +++ b/tests/components/squeezebox/test_binary_sensor.py @@ -0,0 +1,33 @@ +"""Test squeezebox binary sensors.""" + +from unittest.mock import patch + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import FAKE_QUERY_RESPONSE, setup_mocked_integration + + +async def test_binary_sensor( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, +) -> None: + """Test binary sensor states and attributes.""" + + # Setup component + with ( + patch( + "homeassistant.components.squeezebox.PLATFORMS", + [Platform.BINARY_SENSOR], + ), + patch( + "homeassistant.components.squeezebox.Server.async_query", + return_value=FAKE_QUERY_RESPONSE, + ), + ): + await setup_mocked_integration(hass) + state = hass.states.get("binary_sensor.fakelib_library_rescan") + + assert state is not None + assert state.state == "on" diff --git a/tests/components/squeezebox/test_media_browser.py b/tests/components/squeezebox/test_media_browser.py index 62d668ca57b..c3398d24aa3 100644 --- a/tests/components/squeezebox/test_media_browser.py +++ b/tests/components/squeezebox/test_media_browser.py @@ -16,7 +16,7 @@ from homeassistant.components.squeezebox.browse_media import ( LIBRARY, MEDIA_TYPE_TO_SQUEEZEBOX, ) -from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -30,6 +30,10 @@ async def setup_integration( """Fixture for setting up the component.""" with ( patch("homeassistant.components.squeezebox.Server", return_value=lms), + patch( + "homeassistant.components.squeezebox.PLATFORMS", + [Platform.MEDIA_PLAYER], + ), patch( "homeassistant.components.squeezebox.media_player.start_server_discovery" ), From b0bfe71b9bf8ba534ad8d954f1321a2efee4a65d Mon Sep 17 00:00:00 2001 From: peteS-UK <64092177+peteS-UK@users.noreply.github.com> Date: Thu, 5 Sep 2024 17:44:19 +0100 Subject: [PATCH 1471/1635] Fix typo in squeezebox (#125352) Spelling Correction on SERVER_MODEL --- homeassistant/components/squeezebox/const.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/squeezebox/const.py b/homeassistant/components/squeezebox/const.py index a4824f2091f..0bf8c24a5d1 100644 --- a/homeassistant/components/squeezebox/const.py +++ b/homeassistant/components/squeezebox/const.py @@ -8,7 +8,7 @@ KNOWN_PLAYERS = "known_players" MANUFACTURER = "https://lyrion.org/" PLAYER_DISCOVERY_UNSUB = "player_discovery_unsub" SENSOR_UPDATE_INTERVAL = 60 -SERVER_MODEL = "Lyron Music Server" +SERVER_MODEL = "Lyrion Music Server" STATUS_API_TIMEOUT = 10 STATUS_SENSOR_LASTSCAN = "lastscan" STATUS_SENSOR_NEEDSRESTART = "needsrestart" From d2d01b337da3cde1935c15ebd9f457e1c465c2bf Mon Sep 17 00:00:00 2001 From: Bouwe Westerdijk <11290930+bouwew@users.noreply.github.com> Date: Thu, 5 Sep 2024 20:16:11 +0200 Subject: [PATCH 1472/1635] Bump plugwise to v1.0.0 (#125354) --- homeassistant/components/plugwise/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/plugwise/manifest.json b/homeassistant/components/plugwise/manifest.json index 10faf75d0f1..6ac5254b424 100644 --- a/homeassistant/components/plugwise/manifest.json +++ b/homeassistant/components/plugwise/manifest.json @@ -7,6 +7,6 @@ "integration_type": "hub", "iot_class": "local_polling", "loggers": ["plugwise"], - "requirements": ["plugwise==0.38.3"], + "requirements": ["plugwise==1.0.0"], "zeroconf": ["_plugwise._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index e95011f247b..311b6aeb8ae 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1597,7 +1597,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==0.38.3 +plugwise==1.0.0 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 1657969b7e5..598324ea428 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1304,7 +1304,7 @@ plexauth==0.0.6 plexwebsocket==0.0.14 # homeassistant.components.plugwise -plugwise==0.38.3 +plugwise==1.0.0 # homeassistant.components.plum_lightpad plumlightpad==0.0.11 From d686b877b14134caa8f234551b3746a83fd47e47 Mon Sep 17 00:00:00 2001 From: Robert Contreras Date: Thu, 5 Sep 2024 11:52:12 -0700 Subject: [PATCH 1473/1635] Home Connect add FridgeFreezer switch entities (#122881) * Home Connect add FridgeFreezer switch entities * Fix unrelated test * Implemented requested changes from review * Move exist_fn check code to setup * Assign entity_description during init * Resolve issue with functional testing --- .../components/home_connect/const.py | 7 ++ .../components/home_connect/icons.json | 10 ++ .../components/home_connect/switch.py | 106 +++++++++++++++- .../home_connect/fixtures/settings.json | 30 +++++ tests/components/home_connect/test_init.py | 5 +- tests/components/home_connect/test_switch.py | 118 +++++++++++++++++- 6 files changed, 269 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/home_connect/const.py b/homeassistant/components/home_connect/const.py index b54637bb524..4c21201c37a 100644 --- a/homeassistant/components/home_connect/const.py +++ b/homeassistant/components/home_connect/const.py @@ -23,6 +23,13 @@ BSH_OPERATION_STATE_FINISHED = "BSH.Common.EnumType.OperationState.Finished" COOKING_LIGHTING = "Cooking.Common.Setting.Lighting" COOKING_LIGHTING_BRIGHTNESS = "Cooking.Common.Setting.LightingBrightness" + +REFRIGERATION_SUPERMODEFREEZER = "Refrigeration.FridgeFreezer.Setting.SuperModeFreezer" +REFRIGERATION_SUPERMODEREFRIGERATOR = ( + "Refrigeration.FridgeFreezer.Setting.SuperModeRefrigerator" +) +REFRIGERATION_DISPENSER = "Refrigeration.Common.Setting.Dispenser.Enabled" + BSH_AMBIENT_LIGHT_ENABLED = "BSH.Common.Setting.AmbientLightEnabled" BSH_AMBIENT_LIGHT_BRIGHTNESS = "BSH.Common.Setting.AmbientLightBrightness" BSH_AMBIENT_LIGHT_COLOR = "BSH.Common.Setting.AmbientLightColor" diff --git a/homeassistant/components/home_connect/icons.json b/homeassistant/components/home_connect/icons.json index 33617f5472e..163c03b297c 100644 --- a/homeassistant/components/home_connect/icons.json +++ b/homeassistant/components/home_connect/icons.json @@ -21,5 +21,15 @@ "change_setting": { "service": "mdi:cog" } + }, + "entity": { + "switch": { + "refrigeration_dispenser": { + "default": "mdi:snowflake", + "state": { + "off": "mdi:snowflake-off" + } + } + } } } diff --git a/homeassistant/components/home_connect/switch.py b/homeassistant/components/home_connect/switch.py index 8c7ef2eb11a..80e8e4b2d39 100644 --- a/homeassistant/components/home_connect/switch.py +++ b/homeassistant/components/home_connect/switch.py @@ -1,16 +1,18 @@ """Provides a switch for Home Connect.""" +from dataclasses import dataclass import logging from typing import Any from homeconnect.api import HomeConnectError -from homeassistant.components.switch import SwitchEntity +from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_DEVICE, CONF_ENTITIES from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from .api import ConfigEntryAuth from .const import ( ATTR_VALUE, BSH_ACTIVE_PROGRAM, @@ -19,12 +21,39 @@ from .const import ( BSH_POWER_ON, BSH_POWER_STATE, DOMAIN, + REFRIGERATION_DISPENSER, + REFRIGERATION_SUPERMODEFREEZER, + REFRIGERATION_SUPERMODEREFRIGERATOR, ) -from .entity import HomeConnectEntity +from .entity import HomeConnectDevice, HomeConnectEntity _LOGGER = logging.getLogger(__name__) +@dataclass(frozen=True, kw_only=True) +class HomeConnectSwitchEntityDescription(SwitchEntityDescription): + """Switch entity description.""" + + on_key: str + + +SWITCHES: tuple[HomeConnectSwitchEntityDescription, ...] = ( + HomeConnectSwitchEntityDescription( + key="Supermode Freezer", + on_key=REFRIGERATION_SUPERMODEFREEZER, + ), + HomeConnectSwitchEntityDescription( + key="Supermode Refrigerator", + on_key=REFRIGERATION_SUPERMODEREFRIGERATOR, + ), + HomeConnectSwitchEntityDescription( + key="Dispenser Enabled", + on_key=REFRIGERATION_DISPENSER, + translation_key="refrigeration_dispenser", + ), +) + + async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, @@ -35,18 +64,87 @@ async def async_setup_entry( def get_entities(): """Get a list of entities.""" entities = [] - hc_api = hass.data[DOMAIN][config_entry.entry_id] + hc_api: ConfigEntryAuth = hass.data[DOMAIN][config_entry.entry_id] for device_dict in hc_api.devices: entity_dicts = device_dict.get(CONF_ENTITIES, {}).get("switch", []) entity_list = [HomeConnectProgramSwitch(**d) for d in entity_dicts] entity_list += [HomeConnectPowerSwitch(device_dict[CONF_DEVICE])] entity_list += [HomeConnectChildLockSwitch(device_dict[CONF_DEVICE])] - entities += entity_list + # Auto-discover entities + hc_device: HomeConnectDevice = device_dict[CONF_DEVICE] + entities.extend( + HomeConnectSwitch(device=hc_device, entity_description=description) + for description in SWITCHES + if description.on_key in hc_device.appliance.status + ) + entities.extend(entity_list) + return entities async_add_entities(await hass.async_add_executor_job(get_entities), True) +class HomeConnectSwitch(HomeConnectEntity, SwitchEntity): + """Generic switch class for Home Connect Binary Settings.""" + + entity_description: HomeConnectSwitchEntityDescription + _attr_available: bool = False + + def __init__( + self, + device: HomeConnectDevice, + entity_description: HomeConnectSwitchEntityDescription, + ) -> None: + """Initialize the entity.""" + self.entity_description = entity_description + super().__init__(device=device, desc=entity_description.key) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on setting.""" + + _LOGGER.debug("Turning on %s", self.entity_description.key) + try: + await self.hass.async_add_executor_job( + self.device.appliance.set_setting, self.entity_description.on_key, True + ) + except HomeConnectError as err: + _LOGGER.error("Error while trying to turn on: %s", err) + self._attr_available = False + return + + self._attr_available = True + self.async_entity_update() + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off setting.""" + + _LOGGER.debug("Turning off %s", self.entity_description.key) + try: + await self.hass.async_add_executor_job( + self.device.appliance.set_setting, self.entity_description.on_key, False + ) + except HomeConnectError as err: + _LOGGER.error("Error while trying to turn off: %s", err) + self._attr_available = False + return + + self._attr_available = True + self.async_entity_update() + + async def async_update(self) -> None: + """Update the switch's status.""" + + self._attr_is_on = self.device.appliance.status.get( + self.entity_description.on_key, {} + ).get(ATTR_VALUE) + self._attr_available = True + _LOGGER.debug( + "Updated %s, new state: %s", + self.entity_description.key, + self._attr_is_on, + ) + + class HomeConnectProgramSwitch(HomeConnectEntity, SwitchEntity): """Switch class for Home Connect.""" diff --git a/tests/components/home_connect/fixtures/settings.json b/tests/components/home_connect/fixtures/settings.json index eb6a5f5ff98..29d431419c6 100644 --- a/tests/components/home_connect/fixtures/settings.json +++ b/tests/components/home_connect/fixtures/settings.json @@ -111,5 +111,35 @@ } ] } + }, + "FridgeFreezer": { + "data": { + "settings": [ + { + "key": "Refrigeration.FridgeFreezer.Setting.SuperModeFreezer", + "value": false, + "type": "Boolean", + "constraints": { + "access": "readWrite" + } + }, + { + "key": "Refrigeration.FridgeFreezer.Setting.SuperModeRefrigerator", + "value": false, + "type": "Boolean", + "constraints": { + "access": "readWrite" + } + }, + { + "key": "Refrigeration.Common.Setting.Dispenser.Enabled", + "value": false, + "type": "Boolean", + "constraints": { + "access": "readWrite" + } + } + ] + } } } diff --git a/tests/components/home_connect/test_init.py b/tests/components/home_connect/test_init.py index 02d9bcaa208..adfb4ff7a1d 100644 --- a/tests/components/home_connect/test_init.py +++ b/tests/components/home_connect/test_init.py @@ -9,6 +9,7 @@ import pytest from requests import HTTPError import requests_mock +from homeassistant.components.home_connect import SCAN_INTERVAL from homeassistant.components.home_connect.const import DOMAIN, OAUTH2_TOKEN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant @@ -155,14 +156,14 @@ async def test_update_throttle( # First re-load after 1 minute is not blocked. assert await hass.config_entries.async_unload(config_entry.entry_id) assert config_entry.state == ConfigEntryState.NOT_LOADED - freezer.tick(60) + freezer.tick(SCAN_INTERVAL.seconds + 0.1) assert await hass.config_entries.async_setup(config_entry.entry_id) assert get_appliances.call_count == get_appliances_call_count + 1 # Second re-load is blocked by Throttle. assert await hass.config_entries.async_unload(config_entry.entry_id) assert config_entry.state == ConfigEntryState.NOT_LOADED - freezer.tick(59) + freezer.tick(SCAN_INTERVAL.seconds - 0.1) assert await hass.config_entries.async_setup(config_entry.entry_id) assert get_appliances.call_count == get_appliances_call_count + 1 diff --git a/tests/components/home_connect/test_switch.py b/tests/components/home_connect/test_switch.py index c6a7b384036..3ab550ad0af 100644 --- a/tests/components/home_connect/test_switch.py +++ b/tests/components/home_connect/test_switch.py @@ -3,7 +3,7 @@ from collections.abc import Awaitable, Callable, Generator from unittest.mock import MagicMock, Mock -from homeconnect.api import HomeConnectError +from homeconnect.api import HomeConnectAppliance, HomeConnectError import pytest from homeassistant.components.home_connect.const import ( @@ -13,10 +13,12 @@ from homeassistant.components.home_connect.const import ( BSH_POWER_OFF, BSH_POWER_ON, BSH_POWER_STATE, + REFRIGERATION_SUPERMODEFREEZER, ) from homeassistant.components.switch import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ( + ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, @@ -214,3 +216,117 @@ async def test_switch_exception_handling( DOMAIN, service, {"entity_id": entity_id}, blocking=True ) assert getattr(problematic_appliance, mock_attr).call_count == 2 + + +@pytest.mark.parametrize( + ("entity_id", "status", "service", "state", "appliance"), + [ + ( + "switch.fridgefreezer_supermode_freezer", + {REFRIGERATION_SUPERMODEFREEZER: {"value": True}}, + SERVICE_TURN_ON, + STATE_ON, + "FridgeFreezer", + ), + ( + "switch.fridgefreezer_supermode_freezer", + {REFRIGERATION_SUPERMODEFREEZER: {"value": False}}, + SERVICE_TURN_OFF, + STATE_OFF, + "FridgeFreezer", + ), + ], + indirect=["appliance"], +) +async def test_ent_desc_switch_functionality( + entity_id: str, + status: dict, + service: str, + state: str, + bypass_throttle: Generator[None], + hass: HomeAssistant, + config_entry: MockConfigEntry, + integration_setup: Callable[[], Awaitable[bool]], + setup_credentials: None, + appliance: Mock, + get_appliances: MagicMock, +) -> None: + """Test switch functionality - entity description setup.""" + appliance.status.update( + HomeConnectAppliance.json2dict( + load_json_object_fixture("home_connect/settings.json") + .get(appliance.name) + .get("data") + .get("settings") + ) + ) + get_appliances.return_value = [appliance] + + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + appliance.status.update(status) + await hass.services.async_call( + DOMAIN, service, {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + assert hass.states.is_state(entity_id, state) + + +@pytest.mark.parametrize( + ("entity_id", "status", "service", "mock_attr", "problematic_appliance"), + [ + ( + "switch.fridgefreezer_supermode_freezer", + {REFRIGERATION_SUPERMODEFREEZER: {"value": ""}}, + SERVICE_TURN_ON, + "set_setting", + "FridgeFreezer", + ), + ( + "switch.fridgefreezer_supermode_freezer", + {REFRIGERATION_SUPERMODEFREEZER: {"value": ""}}, + SERVICE_TURN_OFF, + "set_setting", + "FridgeFreezer", + ), + ], + indirect=["problematic_appliance"], +) +async def test_ent_desc_switch_exception_handling( + entity_id: str, + status: dict, + service: str, + mock_attr: str, + bypass_throttle: Generator[None], + hass: HomeAssistant, + integration_setup: Callable[[], Awaitable[bool]], + config_entry: MockConfigEntry, + setup_credentials: None, + problematic_appliance: Mock, + get_appliances: MagicMock, +) -> None: + """Test switch exception handling - entity description setup.""" + problematic_appliance.status.update( + HomeConnectAppliance.json2dict( + load_json_object_fixture("home_connect/settings.json") + .get(problematic_appliance.name) + .get("data") + .get("settings") + ) + ) + get_appliances.return_value = [problematic_appliance] + + assert config_entry.state == ConfigEntryState.NOT_LOADED + assert await integration_setup() + assert config_entry.state == ConfigEntryState.LOADED + + # Assert that an exception is called. + with pytest.raises(HomeConnectError): + getattr(problematic_appliance, mock_attr)() + + problematic_appliance.status.update(status) + await hass.services.async_call( + DOMAIN, service, {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + assert getattr(problematic_appliance, mock_attr).call_count == 2 From 48c9361c01f773e975f42408978e3f1546483b88 Mon Sep 17 00:00:00 2001 From: YogevBokobza Date: Thu, 5 Sep 2024 22:34:11 +0300 Subject: [PATCH 1474/1635] Bump aioswitcher to 4.0.3 (#125355) --- homeassistant/components/switcher_kis/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/switcher_kis/manifest.json b/homeassistant/components/switcher_kis/manifest.json index 75ace60e942..f9956621ca6 100644 --- a/homeassistant/components/switcher_kis/manifest.json +++ b/homeassistant/components/switcher_kis/manifest.json @@ -7,6 +7,6 @@ "iot_class": "local_push", "loggers": ["aioswitcher"], "quality_scale": "platinum", - "requirements": ["aioswitcher==4.0.2"], + "requirements": ["aioswitcher==4.0.3"], "single_config_entry": true } diff --git a/requirements_all.txt b/requirements_all.txt index 311b6aeb8ae..77f7e50674a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -374,7 +374,7 @@ aiosolaredge==0.2.0 aiosteamist==1.0.0 # homeassistant.components.switcher_kis -aioswitcher==4.0.2 +aioswitcher==4.0.3 # homeassistant.components.syncthing aiosyncthing==0.5.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 598324ea428..e67ff882b1a 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -356,7 +356,7 @@ aiosolaredge==0.2.0 aiosteamist==1.0.0 # homeassistant.components.switcher_kis -aioswitcher==4.0.2 +aioswitcher==4.0.3 # homeassistant.components.syncthing aiosyncthing==0.5.1 From bbeecb40aeb6d4dfeed96f3a12ab027731921ac0 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Thu, 5 Sep 2024 21:35:24 +0200 Subject: [PATCH 1475/1635] Remove deprecated aux_heat from zha (#125247) Remove aux_heat from zha --- homeassistant/components/zha/climate.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/homeassistant/components/zha/climate.py b/homeassistant/components/zha/climate.py index f4fb58c254a..fcf5afb5ac5 100644 --- a/homeassistant/components/zha/climate.py +++ b/homeassistant/components/zha/climate.py @@ -120,8 +120,6 @@ class Thermostat(ZHAEntity, ClimateEntity): features |= ClimateEntityFeature.FAN_MODE if ZHAClimateEntityFeature.SWING_MODE in zha_features: features |= ClimateEntityFeature.SWING_MODE - if ZHAClimateEntityFeature.AUX_HEAT in zha_features: - features |= ClimateEntityFeature.AUX_HEAT if ZHAClimateEntityFeature.TURN_OFF in zha_features: features |= ClimateEntityFeature.TURN_OFF if ZHAClimateEntityFeature.TURN_ON in zha_features: From 9e312f2063bde5a24e1e349f3c9466a4d419a534 Mon Sep 17 00:00:00 2001 From: Mark Ruys Date: Thu, 5 Sep 2024 21:37:44 +0200 Subject: [PATCH 1476/1635] Add Sensoterra integration (#119642) * Initial version * Baseline release * Refactor based on first PR feedback * Refactoring based on second PR feedback * Initial version * Baseline release * Refactor based on first PR feedback * Refactoring based on second PR feedback * Refactoring based on PR feedback * Refactoring based on PR feedback * Remove extra attribute soil type Soil type isn't really a sensor, but more like a configuration entity. Move soil type to a different PR to keep this PR simpler. * Refactor SensoterraSensor to a named tuple * Implement feedback on PR * Remove .coveragerc * Add async_set_unique_id to config flow * Small fix based on feedback * Add test form unique_id * Fix * Fix --------- Co-authored-by: Joostlek --- .strict-typing | 1 + CODEOWNERS | 2 + .../components/sensoterra/__init__.py | 38 ++++ .../components/sensoterra/config_flow.py | 90 +++++++++ homeassistant/components/sensoterra/const.py | 10 + .../components/sensoterra/coordinator.py | 54 ++++++ .../components/sensoterra/manifest.json | 10 + homeassistant/components/sensoterra/sensor.py | 172 ++++++++++++++++++ .../components/sensoterra/strings.json | 38 ++++ homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 6 + mypy.ini | 10 + requirements_all.txt | 3 + requirements_test_all.txt | 3 + tests/components/sensoterra/__init__.py | 1 + tests/components/sensoterra/conftest.py | 32 ++++ tests/components/sensoterra/const.py | 7 + .../components/sensoterra/test_config_flow.py | 123 +++++++++++++ 18 files changed, 601 insertions(+) create mode 100644 homeassistant/components/sensoterra/__init__.py create mode 100644 homeassistant/components/sensoterra/config_flow.py create mode 100644 homeassistant/components/sensoterra/const.py create mode 100644 homeassistant/components/sensoterra/coordinator.py create mode 100644 homeassistant/components/sensoterra/manifest.json create mode 100644 homeassistant/components/sensoterra/sensor.py create mode 100644 homeassistant/components/sensoterra/strings.json create mode 100644 tests/components/sensoterra/__init__.py create mode 100644 tests/components/sensoterra/conftest.py create mode 100644 tests/components/sensoterra/const.py create mode 100644 tests/components/sensoterra/test_config_flow.py diff --git a/.strict-typing b/.strict-typing index e93f1589cc8..1a5133efe89 100644 --- a/.strict-typing +++ b/.strict-typing @@ -401,6 +401,7 @@ homeassistant.components.select.* homeassistant.components.sensibo.* homeassistant.components.sensirion_ble.* homeassistant.components.sensor.* +homeassistant.components.sensoterra.* homeassistant.components.senz.* homeassistant.components.sfr_box.* homeassistant.components.shelly.* diff --git a/CODEOWNERS b/CODEOWNERS index 42d96ceb941..edd10858e8d 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1288,6 +1288,8 @@ build.json @home-assistant/supervisor /tests/components/sensorpro/ @bdraco /homeassistant/components/sensorpush/ @bdraco /tests/components/sensorpush/ @bdraco +/homeassistant/components/sensoterra/ @markruys +/tests/components/sensoterra/ @markruys /homeassistant/components/sentry/ @dcramer @frenck /tests/components/sentry/ @dcramer @frenck /homeassistant/components/senz/ @milanmeu diff --git a/homeassistant/components/sensoterra/__init__.py b/homeassistant/components/sensoterra/__init__.py new file mode 100644 index 00000000000..b1428351f09 --- /dev/null +++ b/homeassistant/components/sensoterra/__init__.py @@ -0,0 +1,38 @@ +"""The Sensoterra integration.""" + +from __future__ import annotations + +from sensoterra.customerapi import CustomerApi + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_TOKEN, Platform +from homeassistant.core import HomeAssistant + +from .coordinator import SensoterraCoordinator + +PLATFORMS: list[Platform] = [Platform.SENSOR] + +type SensoterraConfigEntry = ConfigEntry[SensoterraCoordinator] + + +async def async_setup_entry(hass: HomeAssistant, entry: SensoterraConfigEntry) -> bool: + """Set up Sensoterra platform based on a configuration entry.""" + + # Create a coordinator and add an API instance to it. Store the coordinator + # in the configuration entry. + api = CustomerApi() + api.set_language(hass.config.language) + api.set_token(entry.data[CONF_TOKEN]) + + coordinator = SensoterraCoordinator(hass, api) + await coordinator.async_config_entry_first_refresh() + entry.runtime_data = coordinator + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: SensoterraConfigEntry) -> bool: + """Unload the configuration entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/sensoterra/config_flow.py b/homeassistant/components/sensoterra/config_flow.py new file mode 100644 index 00000000000..c98710dfa7d --- /dev/null +++ b/homeassistant/components/sensoterra/config_flow.py @@ -0,0 +1,90 @@ +"""Config flow for Sensoterra integration.""" + +from __future__ import annotations + +from datetime import datetime, timedelta +from typing import Any + +from jwt import DecodeError, decode +from sensoterra.customerapi import ( + CustomerApi, + InvalidAuth as StInvalidAuth, + Timeout as StTimeout, +) +import voluptuous as vol + +from homeassistant.config_entries import SOURCE_USER, ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, CONF_TOKEN +from homeassistant.helpers.selector import ( + TextSelector, + TextSelectorConfig, + TextSelectorType, +) + +from .const import DOMAIN, LOGGER, TOKEN_EXPIRATION_DAYS + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_EMAIL): TextSelector( + TextSelectorConfig(type=TextSelectorType.EMAIL, autocomplete="email") + ), + vol.Required(CONF_PASSWORD): TextSelector( + TextSelectorConfig(type=TextSelectorType.PASSWORD) + ), + } +) + + +class SensoterraConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Sensoterra.""" + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Create hub entry based on config flow.""" + errors: dict[str, str] = {} + + if user_input is not None: + api = CustomerApi(user_input[CONF_EMAIL], user_input[CONF_PASSWORD]) + # We need a unique tag per HA instance + uuid = self.hass.data["core.uuid"] + expiration = datetime.now() + timedelta(TOKEN_EXPIRATION_DAYS) + + try: + token: str = await api.get_token( + f"Home Assistant {uuid}", "READONLY", expiration + ) + decoded_token = decode( + token, algorithms=["HS256"], options={"verify_signature": False} + ) + + except StInvalidAuth as exp: + LOGGER.error( + "Login attempt with %s: %s", user_input[CONF_EMAIL], exp.message + ) + errors["base"] = "invalid_auth" + except StTimeout: + LOGGER.error("Login attempt with %s: time out", user_input[CONF_EMAIL]) + errors["base"] = "cannot_connect" + except DecodeError: + LOGGER.error("Login attempt with %s: bad token", user_input[CONF_EMAIL]) + errors["base"] = "invalid_access_token" + else: + device_unique_id = decoded_token["sub"] + await self.async_set_unique_id(device_unique_id) + self._abort_if_unique_id_configured() + return self.async_create_entry( + title=user_input[CONF_EMAIL], + data={ + CONF_TOKEN: token, + CONF_EMAIL: user_input[CONF_EMAIL], + }, + ) + + return self.async_show_form( + step_id=SOURCE_USER, + data_schema=self.add_suggested_values_to_schema( + STEP_USER_DATA_SCHEMA, user_input + ), + errors=errors, + ) diff --git a/homeassistant/components/sensoterra/const.py b/homeassistant/components/sensoterra/const.py new file mode 100644 index 00000000000..7c4ccf2944c --- /dev/null +++ b/homeassistant/components/sensoterra/const.py @@ -0,0 +1,10 @@ +"""Constants for the Sensoterra integration.""" + +import logging + +DOMAIN = "sensoterra" +SCAN_INTERVAL_MINUTES = 15 +SENSOR_EXPIRATION_DAYS = 2 +TOKEN_EXPIRATION_DAYS = 10 * 365 +CONFIGURATION_URL = "https://monitor.sensoterra.com" +LOGGER: logging.Logger = logging.getLogger(__package__) diff --git a/homeassistant/components/sensoterra/coordinator.py b/homeassistant/components/sensoterra/coordinator.py new file mode 100644 index 00000000000..2dffdceb443 --- /dev/null +++ b/homeassistant/components/sensoterra/coordinator.py @@ -0,0 +1,54 @@ +"""Polling coordinator for the Sensoterra integration.""" + +from collections.abc import Callable +from datetime import timedelta + +from sensoterra.customerapi import ( + CustomerApi, + InvalidAuth as ApiAuthError, + Timeout as ApiTimeout, +) +from sensoterra.probe import Probe, Sensor + +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryError +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import LOGGER, SCAN_INTERVAL_MINUTES + + +class SensoterraCoordinator(DataUpdateCoordinator[list[Probe]]): + """Sensoterra coordinator.""" + + def __init__(self, hass: HomeAssistant, api: CustomerApi) -> None: + """Initialize Sensoterra coordinator.""" + super().__init__( + hass, + LOGGER, + name="Sensoterra probe", + update_interval=timedelta(minutes=SCAN_INTERVAL_MINUTES), + ) + self.api = api + self.add_devices_callback: Callable[[list[Probe]], None] | None = None + + async def _async_update_data(self) -> list[Probe]: + """Fetch data from Sensoterra Customer API endpoint.""" + try: + probes = await self.api.poll() + except ApiAuthError as err: + raise ConfigEntryError(err) from err + except ApiTimeout as err: + raise UpdateFailed("Timeout communicating with Sensotera API") from err + + if self.add_devices_callback is not None: + self.add_devices_callback(probes) + + return probes + + def get_sensor(self, id: str | None) -> Sensor | None: + """Try to find the sensor in the API result.""" + for probe in self.data: + for sensor in probe.sensors(): + if sensor.id == id: + return sensor + return None diff --git a/homeassistant/components/sensoterra/manifest.json b/homeassistant/components/sensoterra/manifest.json new file mode 100644 index 00000000000..942741fdb2f --- /dev/null +++ b/homeassistant/components/sensoterra/manifest.json @@ -0,0 +1,10 @@ +{ + "domain": "sensoterra", + "name": "Sensoterra", + "codeowners": ["@markruys"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/sensoterra", + "integration_type": "hub", + "iot_class": "cloud_polling", + "requirements": ["sensoterra==2.0.1"] +} diff --git a/homeassistant/components/sensoterra/sensor.py b/homeassistant/components/sensoterra/sensor.py new file mode 100644 index 00000000000..7e9f4d0840e --- /dev/null +++ b/homeassistant/components/sensoterra/sensor.py @@ -0,0 +1,172 @@ +"""Sensoterra devices.""" + +from __future__ import annotations + +from datetime import UTC, datetime, timedelta +from enum import StrEnum, auto + +from sensoterra.probe import Probe, Sensor + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import ( + PERCENTAGE, + SIGNAL_STRENGTH_DECIBELS_MILLIWATT, + EntityCategory, + UnitOfTemperature, +) +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from . import SensoterraConfigEntry +from .const import CONFIGURATION_URL, DOMAIN, SENSOR_EXPIRATION_DAYS +from .coordinator import SensoterraCoordinator + + +class ProbeSensorType(StrEnum): + """Generic sensors within a Sensoterra probe.""" + + MOISTURE = auto() + SI = auto() + TEMPERATURE = auto() + BATTERY = auto() + RSSI = auto() + + +SENSORS: dict[ProbeSensorType, SensorEntityDescription] = { + ProbeSensorType.MOISTURE: SensorEntityDescription( + key=ProbeSensorType.MOISTURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=0, + device_class=SensorDeviceClass.MOISTURE, + native_unit_of_measurement=PERCENTAGE, + translation_key="soil_moisture_at_cm", + ), + ProbeSensorType.SI: SensorEntityDescription( + key=ProbeSensorType.SI, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, + translation_key="si_at_cm", + ), + ProbeSensorType.TEMPERATURE: SensorEntityDescription( + key=ProbeSensorType.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=0, + device_class=SensorDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + ), + ProbeSensorType.BATTERY: SensorEntityDescription( + key=ProbeSensorType.BATTERY, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=0, + device_class=SensorDeviceClass.BATTERY, + native_unit_of_measurement=PERCENTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + ), + ProbeSensorType.RSSI: SensorEntityDescription( + key=ProbeSensorType.RSSI, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=0, + device_class=SensorDeviceClass.SIGNAL_STRENGTH, + native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + ), +} + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SensoterraConfigEntry, + async_add_devices: AddEntitiesCallback, +) -> None: + """Set up Sensoterra sensor.""" + + coordinator = entry.runtime_data + + @callback + def _async_add_devices(probes: list[Probe]) -> None: + aha = coordinator.async_contexts() + current_sensors = set(aha) + async_add_devices( + SensoterraEntity( + coordinator, + probe, + sensor, + SENSORS[ProbeSensorType[sensor.type]], + ) + for probe in probes + for sensor in probe.sensors() + if sensor.type is not None + and sensor.type.lower() in SENSORS + and sensor.id not in current_sensors + ) + + coordinator.add_devices_callback = _async_add_devices + + _async_add_devices(coordinator.data) + + +class SensoterraEntity(CoordinatorEntity[SensoterraCoordinator], SensorEntity): + """Sensoterra sensor like a soil moisture or temperature sensor.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: SensoterraCoordinator, + probe: Probe, + sensor: Sensor, + entity_description: SensorEntityDescription, + ) -> None: + """Initialize entity.""" + super().__init__(coordinator, context=sensor.id) + + self._sensor_id = sensor.id + self._attr_unique_id = self._sensor_id + self._attr_translation_placeholders = { + "depth": "?" if sensor.depth is None else str(sensor.depth) + } + + self.entity_description = entity_description + + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, probe.serial)}, + name=probe.name, + model=probe.sku, + manufacturer="Sensoterra", + serial_number=probe.serial, + suggested_area=probe.location, + configuration_url=CONFIGURATION_URL, + ) + + @property + def sensor(self) -> Sensor | None: + """Return the sensor, or None if it doesn't exist.""" + return self.coordinator.get_sensor(self._sensor_id) + + @property + def native_value(self) -> StateType: + """Return the value reported by the sensor.""" + assert self.sensor + return self.sensor.value + + @property + def available(self) -> bool: + """Return True if entity is available.""" + if not super().available or (sensor := self.sensor) is None: + return False + + if sensor.timestamp is None: + return False + + # Expire sensor if no update within the last few days. + expiration = datetime.now(UTC) - timedelta(days=SENSOR_EXPIRATION_DAYS) + return sensor.timestamp >= expiration diff --git a/homeassistant/components/sensoterra/strings.json b/homeassistant/components/sensoterra/strings.json new file mode 100644 index 00000000000..86c4f2c2912 --- /dev/null +++ b/homeassistant/components/sensoterra/strings.json @@ -0,0 +1,38 @@ +{ + "config": { + "step": { + "user": { + "description": "Enter credentials to obtain a token", + "data": { + "email": "[%key:common::config_flow::data::email%]", + "password": "[%key:common::config_flow::data::password%]" + } + }, + "reconfigure": { + "description": "[%key:component::sensoterra::config::step::user::description%]", + "data": { + "email": "[%key:common::config_flow::data::email%]", + "password": "[%key:common::config_flow::data::password%]" + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "invalid_access_token": "[%key:common::config_flow::error::invalid_access_token%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]" + } + }, + "entity": { + "sensor": { + "soil_moisture_at_cm": { + "name": "Soil moisture @ {depth} cm" + }, + "si_at_cm": { + "name": "SI @ {depth} cm" + } + } + } +} diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index c7c8cd0f9f1..9f4b4e42bb0 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -513,6 +513,7 @@ FLOWS = { "sensirion_ble", "sensorpro", "sensorpush", + "sensoterra", "sentry", "senz", "seventeentrack", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index f6854aeb58d..b4c80aa70b4 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -5391,6 +5391,12 @@ "config_flow": true, "iot_class": "local_push" }, + "sensoterra": { + "name": "Sensoterra", + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling" + }, "sentry": { "name": "Sentry", "integration_type": "service", diff --git a/mypy.ini b/mypy.ini index b352d2747be..3854477b94b 100644 --- a/mypy.ini +++ b/mypy.ini @@ -3766,6 +3766,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.sensoterra.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.senz.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/requirements_all.txt b/requirements_all.txt index 77f7e50674a..1a05bcdde77 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2609,6 +2609,9 @@ sensorpro-ble==0.5.3 # homeassistant.components.sensorpush sensorpush-ble==1.6.2 +# homeassistant.components.sensoterra +sensoterra==2.0.1 + # homeassistant.components.sentry sentry-sdk==1.40.3 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index e67ff882b1a..98a26141f1a 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2067,6 +2067,9 @@ sensorpro-ble==0.5.3 # homeassistant.components.sensorpush sensorpush-ble==1.6.2 +# homeassistant.components.sensoterra +sensoterra==2.0.1 + # homeassistant.components.sentry sentry-sdk==1.40.3 diff --git a/tests/components/sensoterra/__init__.py b/tests/components/sensoterra/__init__.py new file mode 100644 index 00000000000..f70fede6c09 --- /dev/null +++ b/tests/components/sensoterra/__init__.py @@ -0,0 +1 @@ +"""Tests for the Sensoterra integration.""" diff --git a/tests/components/sensoterra/conftest.py b/tests/components/sensoterra/conftest.py new file mode 100644 index 00000000000..2e19a96543a --- /dev/null +++ b/tests/components/sensoterra/conftest.py @@ -0,0 +1,32 @@ +"""Common fixtures for the Sensoterra tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +import pytest + +from .const import API_TOKEN + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock, None, None]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.sensoterra.async_setup_entry", + return_value=True, + ) as mock_entry: + yield mock_entry + + +@pytest.fixture +def mock_customer_api_client() -> Generator[AsyncMock, None, None]: + """Override async_setup_entry.""" + with ( + patch( + "homeassistant.components.sensoterra.config_flow.CustomerApi", + autospec=True, + ) as mock_client, + ): + mock = mock_client.return_value + mock.get_token.return_value = API_TOKEN + yield mock diff --git a/tests/components/sensoterra/const.py b/tests/components/sensoterra/const.py new file mode 100644 index 00000000000..c85d675f9d7 --- /dev/null +++ b/tests/components/sensoterra/const.py @@ -0,0 +1,7 @@ +"""Constants for the test Sensoterra integration.""" + +API_TOKEN = "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJleHAiOjE4NTYzMDQwMDAsInN1YiI6IjM5In0.yxdXXlc1DqopqDRHfAVzFrMqZJl6nKLpu1dV8alHvVY" +API_EMAIL = "test-email@example.com" +API_PASSWORD = "test-password" +HASS_UUID = "phony-unique-id" +SOURCE_USER = "user" diff --git a/tests/components/sensoterra/test_config_flow.py b/tests/components/sensoterra/test_config_flow.py new file mode 100644 index 00000000000..23c57261741 --- /dev/null +++ b/tests/components/sensoterra/test_config_flow.py @@ -0,0 +1,123 @@ +"""Test the Sensoterra config flow.""" + +from unittest.mock import AsyncMock + +from jwt import DecodeError +import pytest +from sensoterra.customerapi import InvalidAuth as StInvalidAuth, Timeout as StTimeout + +from homeassistant.components.sensoterra.const import DOMAIN +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, CONF_TOKEN +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .const import API_EMAIL, API_PASSWORD, API_TOKEN, HASS_UUID, SOURCE_USER + +from tests.common import MockConfigEntry + + +async def test_full_flow( + hass: HomeAssistant, + mock_customer_api_client: AsyncMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test we can finish a config flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + hass.data["core.uuid"] = HASS_UUID + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: API_EMAIL, + CONF_PASSWORD: API_PASSWORD, + }, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == API_EMAIL + assert result["data"] == { + CONF_TOKEN: API_TOKEN, + CONF_EMAIL: API_EMAIL, + } + + assert len(mock_customer_api_client.mock_calls) == 1 + + +async def test_form_unique_id( + hass: HomeAssistant, mock_customer_api_client: AsyncMock +) -> None: + """Test we get the form.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + hass.data["core.uuid"] = HASS_UUID + + entry = MockConfigEntry(unique_id="39", domain=DOMAIN) + entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: API_EMAIL, + CONF_PASSWORD: API_PASSWORD, + }, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + assert len(mock_customer_api_client.mock_calls) == 1 + + +@pytest.mark.parametrize( + ("exception", "error"), + [ + (StTimeout, "cannot_connect"), + (StInvalidAuth("Invalid credentials"), "invalid_auth"), + (DecodeError("Bad API token"), "invalid_access_token"), + ], +) +async def test_form_exceptions( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_customer_api_client: AsyncMock, + exception: Exception, + error: str, +) -> None: + """Test we handle config form exceptions.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + hass.data["core.uuid"] = HASS_UUID + + mock_customer_api_client.get_token.side_effect = exception + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: API_EMAIL, + CONF_PASSWORD: API_PASSWORD, + }, + ) + assert result["errors"] == {"base": error} + assert result["type"] is FlowResultType.FORM + + mock_customer_api_client.get_token.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: API_EMAIL, + CONF_PASSWORD: API_PASSWORD, + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == API_EMAIL + assert result["data"] == { + CONF_TOKEN: API_TOKEN, + CONF_EMAIL: API_EMAIL, + } + assert len(mock_customer_api_client.mock_calls) == 2 From 2c0c0b9e2151392763ddf1f925c6391de3c4172b Mon Sep 17 00:00:00 2001 From: G Johansson Date: Thu, 5 Sep 2024 22:34:35 +0200 Subject: [PATCH 1477/1635] Extend deprecation of aux_heat in ClimateEntity (#125360) --- homeassistant/components/climate/__init__.py | 4 ++-- tests/components/climate/test_init.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/climate/__init__.py b/homeassistant/components/climate/__init__.py index 6097e4f1346..f752a3dcc7a 100644 --- a/homeassistant/components/climate/__init__.py +++ b/homeassistant/components/climate/__init__.py @@ -429,7 +429,7 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): ( "%s::%s implements the `is_aux_heat` property or uses the auxiliary " "heater methods in a subclass of ClimateEntity which is " - "deprecated and will be unsupported from Home Assistant 2024.10." + "deprecated and will be unsupported from Home Assistant 2025.4." " Please %s" ), self.platform.platform_name, @@ -451,7 +451,7 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): self.hass, DOMAIN, f"deprecated_climate_aux_{self.platform.platform_name}", - breaks_in_ha_version="2024.10.0", + breaks_in_ha_version="2025.4.0", is_fixable=False, is_persistent=False, issue_domain=self.platform.platform_name, diff --git a/tests/components/climate/test_init.py b/tests/components/climate/test_init.py index 256ecf92b1d..64c94ccfc6f 100644 --- a/tests/components/climate/test_init.py +++ b/tests/components/climate/test_init.py @@ -826,7 +826,7 @@ async def test_issue_aux_property_deprecated( assert ( "test::MockClimateEntityWithAux implements the `is_aux_heat` property or uses " "the auxiliary heater methods in a subclass of ClimateEntity which is deprecated " - f"and will be unsupported from Home Assistant 2024.10. Please {report}" + f"and will be unsupported from Home Assistant 2025.4. Please {report}" ) in caplog.text # Assert we only log warning once From 56b4ddc6b457eea7203987e776cce5086d63fce5 Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Thu, 5 Sep 2024 16:52:17 -0400 Subject: [PATCH 1478/1635] Add model ID to Sonos (#125364) --- homeassistant/components/sonos/entity.py | 1 + tests/components/sonos/test_media_player.py | 1 + 2 files changed, 2 insertions(+) diff --git a/homeassistant/components/sonos/entity.py b/homeassistant/components/sonos/entity.py index bd7256493e8..98dc8b8b752 100644 --- a/homeassistant/components/sonos/entity.py +++ b/homeassistant/components/sonos/entity.py @@ -85,6 +85,7 @@ class SonosEntity(Entity): identifiers={(DOMAIN, self.soco.uid)}, name=self.speaker.zone_name, model=self.speaker.model_name.replace("Sonos ", ""), + model_id=self.speaker.model_number, sw_version=self.speaker.version, connections={ (dr.CONNECTION_NETWORK_MAC, self.speaker.mac_address), diff --git a/tests/components/sonos/test_media_player.py b/tests/components/sonos/test_media_player.py index ae3928c5ff6..9887601a0a3 100644 --- a/tests/components/sonos/test_media_player.py +++ b/tests/components/sonos/test_media_player.py @@ -72,6 +72,7 @@ async def test_device_registry( ) assert reg_device is not None assert reg_device.model == "Model Name" + assert reg_device.model_id == "S12" assert reg_device.sw_version == "13.1" assert reg_device.connections == { (CONNECTION_NETWORK_MAC, "00:11:22:33:44:55"), From 006b2da14ea7b3c573d13e25697583e16ebb48e8 Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Thu, 5 Sep 2024 16:52:45 -0400 Subject: [PATCH 1479/1635] Add model ID to roborock (#125366) --- homeassistant/components/roborock/coordinator.py | 1 + tests/components/roborock/test_vacuum.py | 11 ++++++++--- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/roborock/coordinator.py b/homeassistant/components/roborock/coordinator.py index 615d18c3019..6b520ba10d6 100644 --- a/homeassistant/components/roborock/coordinator.py +++ b/homeassistant/components/roborock/coordinator.py @@ -63,6 +63,7 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]): identifiers={(DOMAIN, self.roborock_device_info.device.duid)}, manufacturer="Roborock", model=self.roborock_device_info.product.model, + model_id=self.roborock_device_info.product.model, sw_version=self.roborock_device_info.device.fv, ) self.current_map: int | None = None diff --git a/tests/components/roborock/test_vacuum.py b/tests/components/roborock/test_vacuum.py index 15a64cbecf3..5080711d0f9 100644 --- a/tests/components/roborock/test_vacuum.py +++ b/tests/components/roborock/test_vacuum.py @@ -24,7 +24,7 @@ from homeassistant.components.vacuum import ( from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component from .mock_data import PROP @@ -38,12 +38,17 @@ DEVICE_ID = "abc123" async def test_registry_entries( hass: HomeAssistant, entity_registry: er.EntityRegistry, + device_registry: dr.DeviceRegistry, bypass_api_fixture, setup_entry: MockConfigEntry, ) -> None: """Tests devices are registered in the entity registry.""" - entry = entity_registry.async_get(ENTITY_ID) - assert entry.unique_id == DEVICE_ID + entity_entry = entity_registry.async_get(ENTITY_ID) + assert entity_entry.unique_id == DEVICE_ID + + device_entry = device_registry.async_get(entity_entry.device_id) + assert device_entry is not None + assert device_entry.model_id == "roborock.vacuum.a27" @pytest.mark.parametrize( From 97ffbf5aad41e7309c31f9170d1ed80d4c9b4892 Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Thu, 5 Sep 2024 17:03:37 -0400 Subject: [PATCH 1480/1635] Add model ID to samsungtv (#125369) --- homeassistant/components/samsungtv/entity.py | 1 + tests/components/samsungtv/snapshots/test_init.ambr | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/samsungtv/entity.py b/homeassistant/components/samsungtv/entity.py index 030eaf98d9b..1af7495d78e 100644 --- a/homeassistant/components/samsungtv/entity.py +++ b/homeassistant/components/samsungtv/entity.py @@ -42,6 +42,7 @@ class SamsungTVEntity(CoordinatorEntity[SamsungTVDataUpdateCoordinator], Entity) name=config_entry.data.get(CONF_NAME), manufacturer=config_entry.data.get(CONF_MANUFACTURER), model=config_entry.data.get(CONF_MODEL), + model_id=config_entry.data.get(CONF_MODEL), ) if self.unique_id: self._attr_device_info[ATTR_IDENTIFIERS] = {(DOMAIN, self.unique_id)} diff --git a/tests/components/samsungtv/snapshots/test_init.ambr b/tests/components/samsungtv/snapshots/test_init.ambr index 061b5bc1836..017a2bc3e60 100644 --- a/tests/components/samsungtv/snapshots/test_init.ambr +++ b/tests/components/samsungtv/snapshots/test_init.ambr @@ -72,7 +72,7 @@ }), 'manufacturer': None, 'model': '82GXARRS', - 'model_id': None, + 'model_id': '82GXARRS', 'name': 'fake', 'name_by_user': None, 'primary_config_entry': , From 0677a256ecfe922943b3ce7d2cfd98a10e1549e0 Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Thu, 5 Sep 2024 17:03:50 -0400 Subject: [PATCH 1481/1635] Add model ID to Wemo (#125368) --- homeassistant/components/wemo/coordinator.py | 1 + tests/components/wemo/conftest.py | 1 + tests/components/wemo/test_coordinator.py | 1 + tests/components/wemo/test_init.py | 1 + 4 files changed, 4 insertions(+) diff --git a/homeassistant/components/wemo/coordinator.py b/homeassistant/components/wemo/coordinator.py index a186b666470..1f25c12f7ca 100644 --- a/homeassistant/components/wemo/coordinator.py +++ b/homeassistant/components/wemo/coordinator.py @@ -275,6 +275,7 @@ def _device_info(wemo: WeMoDevice) -> DeviceInfo: identifiers={(DOMAIN, wemo.serial_number)}, manufacturer="Belkin", model=wemo.model_name, + model_id=wemo.model, name=wemo.name, sw_version=wemo.firmware_version, ) diff --git a/tests/components/wemo/conftest.py b/tests/components/wemo/conftest.py index 64bd89f4793..fee981484ef 100644 --- a/tests/components/wemo/conftest.py +++ b/tests/components/wemo/conftest.py @@ -65,6 +65,7 @@ def create_pywemo_device( device.name = MOCK_NAME device.serial_number = MOCK_SERIAL_NUMBER device.model_name = pywemo_model.replace("LongPress", "") + device.model = device.model_name device.udn = f"uuid:{device.model_name}-1_0-{device.serial_number}" device.firmware_version = MOCK_FIRMWARE_VERSION device.get_state.return_value = 0 # Default to Off diff --git a/tests/components/wemo/test_coordinator.py b/tests/components/wemo/test_coordinator.py index f524633e701..17061aea2f6 100644 --- a/tests/components/wemo/test_coordinator.py +++ b/tests/components/wemo/test_coordinator.py @@ -178,6 +178,7 @@ async def test_device_info( } assert device_entries[0].manufacturer == "Belkin" assert device_entries[0].model == "LightSwitch" + assert device_entries[0].model_id == "LightSwitch" assert device_entries[0].sw_version == MOCK_FIRMWARE_VERSION diff --git a/tests/components/wemo/test_init.py b/tests/components/wemo/test_init.py index 48d8f8eac03..4a38775d331 100644 --- a/tests/components/wemo/test_init.py +++ b/tests/components/wemo/test_init.py @@ -201,6 +201,7 @@ async def test_discovery( device.name = f"{MOCK_NAME}_{counter}" device.serial_number = f"{MOCK_SERIAL_NUMBER}_{counter}" device.model_name = "Motion" + device.model = "Motion" device.udn = f"uuid:{device.model_name}-1_0-{device.serial_number}" device.firmware_version = MOCK_FIRMWARE_VERSION device.get_state.return_value = 0 # Default to Off From aa619c5594efd8ce7a3bec181f3a91a36666479b Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Thu, 5 Sep 2024 19:42:50 -0400 Subject: [PATCH 1482/1635] Add model ID to awair (#125373) * Add model ID to awair * less diff --- homeassistant/components/awair/sensor.py | 1 + tests/components/awair/test_sensor.py | 29 +++++++++++++++++------- 2 files changed, 22 insertions(+), 8 deletions(-) diff --git a/homeassistant/components/awair/sensor.py b/homeassistant/components/awair/sensor.py index b9a226e9c2c..a62a15368be 100644 --- a/homeassistant/components/awair/sensor.py +++ b/homeassistant/components/awair/sensor.py @@ -293,6 +293,7 @@ class AwairSensor(CoordinatorEntity[AwairDataUpdateCoordinator], SensorEntity): identifiers={(DOMAIN, self._device.uuid)}, manufacturer="Awair", model=self._device.model, + model_id=self._device.device_type, name=( self._device.name or cast(ConfigEntry, self.coordinator.config_entry).title diff --git a/tests/components/awair/test_sensor.py b/tests/components/awair/test_sensor.py index 8af1fdd9c7c..8c9cd6e3a24 100644 --- a/tests/components/awair/test_sensor.py +++ b/tests/components/awair/test_sensor.py @@ -29,7 +29,7 @@ from homeassistant.const import ( UnitOfTemperature, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_component import async_update_entity from . import setup_awair @@ -48,16 +48,24 @@ SENSOR_TYPES_MAP = { def assert_expected_properties( hass: HomeAssistant, - registry: er.RegistryEntry, - name, - unique_id, - state_value, + entity_registry: er.RegistryEntry, + name: str, + unique_id: str, + state_value: str, attributes: dict, + model="Awair", + model_id="awair", ): """Assert expected properties from a dict.""" + entity_entry = entity_registry.async_get(name) + assert entity_entry.unique_id == unique_id + + device_registry = dr.async_get(hass) + device_entry = device_registry.async_get(entity_entry.device_id) + assert device_entry is not None + assert device_entry.model == model + assert device_entry.model_id == model_id - entry = registry.async_get(name) - assert entry.unique_id == unique_id state = hass.states.get(name) assert state assert state.state == state_value @@ -201,7 +209,10 @@ async def test_awair_gen2_sensors( async def test_local_awair_sensors( - hass: HomeAssistant, entity_registry: er.EntityRegistry, local_devices, local_data + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + local_devices, + local_data, ) -> None: """Test expected sensors on a local Awair.""" @@ -215,6 +226,8 @@ async def test_local_awair_sensors( f"{local_devices['device_uuid']}_{SENSOR_TYPES_MAP[API_SCORE].unique_id_tag}", "94", {}, + model="Awair Element", + model_id="awair-element", ) From c3921f2112d7c8841b0e323f71f7ac0a800f1550 Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Thu, 5 Sep 2024 19:44:28 -0400 Subject: [PATCH 1483/1635] Add model ID to unifiprotect (#125376) --- homeassistant/components/unifiprotect/entity.py | 3 ++- tests/components/unifiprotect/test_camera.py | 10 +++++++++- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/unifiprotect/entity.py b/homeassistant/components/unifiprotect/entity.py index 17b9f7c4fe9..34b4ec085af 100644 --- a/homeassistant/components/unifiprotect/entity.py +++ b/homeassistant/components/unifiprotect/entity.py @@ -278,7 +278,8 @@ class ProtectDeviceEntity(BaseProtectEntity): self._attr_device_info = DeviceInfo( name=self.device.display_name, manufacturer=DEFAULT_BRAND, - model=self.device.type, + model=self.device.market_name or self.device.type, + model_id=self.device.type, via_device=(DOMAIN, self.data.api.bootstrap.nvr.mac), sw_version=self.device.firmware_version, connections={(dr.CONNECTION_NETWORK_MAC, self.device.mac)}, diff --git a/tests/components/unifiprotect/test_camera.py b/tests/components/unifiprotect/test_camera.py index 9fedb67fea4..ea7a7ae942d 100644 --- a/tests/components/unifiprotect/test_camera.py +++ b/tests/components/unifiprotect/test_camera.py @@ -32,7 +32,7 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component from .utils import ( @@ -66,6 +66,14 @@ def validate_default_camera_entity( assert entity.disabled is False assert entity.unique_id == unique_id + device_registry = dr.async_get(hass) + device = device_registry.async_get(entity.device_id) + assert device + assert device.manufacturer == "Ubiquiti" + assert device.name == camera_obj.name + assert device.model == camera_obj.market_name or camera_obj.type + assert device.model_id == camera_obj.type + return entity_id From 60b0f0dc5388a426e78c5c19fd0e2239c30dddfd Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Thu, 5 Sep 2024 20:16:30 -0500 Subject: [PATCH 1484/1635] Add assist satellite entity component (#125351) * Add assist_satellite * Update homeassistant/components/assist_satellite/manifest.json Co-authored-by: Paulus Schoutsen * Update homeassistant/components/assist_satellite/manifest.json Co-authored-by: Paulus Schoutsen * Add platform constant * Update Dockerfile * Apply suggestions from code review Co-authored-by: Martin Hjelmare * Address comments * Update docstring async_internal_announce * Update CODEOWNERS --------- Co-authored-by: Paulus Schoutsen Co-authored-by: Martin Hjelmare --- .core_files.yaml | 1 + .strict-typing | 1 + CODEOWNERS | 2 + .../components/assist_pipeline/__init__.py | 2 + .../components/assist_pipeline/const.py | 2 + .../components/assist_pipeline/select.py | 4 +- .../components/assist_satellite/__init__.py | 65 ++++ .../components/assist_satellite/const.py | 12 + .../components/assist_satellite/entity.py | 332 ++++++++++++++++++ .../components/assist_satellite/errors.py | 11 + .../components/assist_satellite/icons.json | 12 + .../components/assist_satellite/manifest.json | 9 + .../components/assist_satellite/services.yaml | 16 + .../components/assist_satellite/strings.json | 30 ++ .../assist_satellite/websocket_api.py | 46 +++ homeassistant/const.py | 1 + mypy.ini | 10 + script/hassfest/docker/Dockerfile | 2 +- tests/components/assist_satellite/__init__.py | 3 + tests/components/assist_satellite/conftest.py | 107 ++++++ .../assist_satellite/test_entity.py | 332 ++++++++++++++++++ .../assist_satellite/test_websocket_api.py | 192 ++++++++++ 22 files changed, 1188 insertions(+), 4 deletions(-) create mode 100644 homeassistant/components/assist_satellite/__init__.py create mode 100644 homeassistant/components/assist_satellite/const.py create mode 100644 homeassistant/components/assist_satellite/entity.py create mode 100644 homeassistant/components/assist_satellite/errors.py create mode 100644 homeassistant/components/assist_satellite/icons.json create mode 100644 homeassistant/components/assist_satellite/manifest.json create mode 100644 homeassistant/components/assist_satellite/services.yaml create mode 100644 homeassistant/components/assist_satellite/strings.json create mode 100644 homeassistant/components/assist_satellite/websocket_api.py create mode 100644 tests/components/assist_satellite/__init__.py create mode 100644 tests/components/assist_satellite/conftest.py create mode 100644 tests/components/assist_satellite/test_entity.py create mode 100644 tests/components/assist_satellite/test_websocket_api.py diff --git a/.core_files.yaml b/.core_files.yaml index 4a11d5da27c..e852a567601 100644 --- a/.core_files.yaml +++ b/.core_files.yaml @@ -14,6 +14,7 @@ core: &core base_platforms: &base_platforms - homeassistant/components/air_quality/** - homeassistant/components/alarm_control_panel/** + - homeassistant/components/assist_satellite/** - homeassistant/components/binary_sensor/** - homeassistant/components/button/** - homeassistant/components/calendar/** diff --git a/.strict-typing b/.strict-typing index 1a5133efe89..84c22d1cfca 100644 --- a/.strict-typing +++ b/.strict-typing @@ -95,6 +95,7 @@ homeassistant.components.aruba.* homeassistant.components.arwn.* homeassistant.components.aseko_pool_live.* homeassistant.components.assist_pipeline.* +homeassistant.components.assist_satellite.* homeassistant.components.asuswrt.* homeassistant.components.autarco.* homeassistant.components.auth.* diff --git a/CODEOWNERS b/CODEOWNERS index edd10858e8d..d2a60cbb246 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -143,6 +143,8 @@ build.json @home-assistant/supervisor /tests/components/aseko_pool_live/ @milanmeu /homeassistant/components/assist_pipeline/ @balloob @synesthesiam /tests/components/assist_pipeline/ @balloob @synesthesiam +/homeassistant/components/assist_satellite/ @home-assistant/core @synesthesiam +/tests/components/assist_satellite/ @home-assistant/core @synesthesiam /homeassistant/components/asuswrt/ @kennedyshead @ollo69 /tests/components/asuswrt/ @kennedyshead @ollo69 /homeassistant/components/atag/ @MatsNL diff --git a/homeassistant/components/assist_pipeline/__init__.py b/homeassistant/components/assist_pipeline/__init__.py index 0a03402105a..ec6d8a646b6 100644 --- a/homeassistant/components/assist_pipeline/__init__.py +++ b/homeassistant/components/assist_pipeline/__init__.py @@ -17,6 +17,7 @@ from .const import ( DATA_LAST_WAKE_UP, DOMAIN, EVENT_RECORDING, + OPTION_PREFERRED, SAMPLE_CHANNELS, SAMPLE_RATE, SAMPLE_WIDTH, @@ -58,6 +59,7 @@ __all__ = ( "PipelineNotFound", "WakeWordSettings", "EVENT_RECORDING", + "OPTION_PREFERRED", "SAMPLES_PER_CHUNK", "SAMPLE_RATE", "SAMPLE_WIDTH", diff --git a/homeassistant/components/assist_pipeline/const.py b/homeassistant/components/assist_pipeline/const.py index f7306b89a54..300cb5aad2a 100644 --- a/homeassistant/components/assist_pipeline/const.py +++ b/homeassistant/components/assist_pipeline/const.py @@ -22,3 +22,5 @@ SAMPLE_CHANNELS = 1 # mono MS_PER_CHUNK = 10 SAMPLES_PER_CHUNK = SAMPLE_RATE // (1000 // MS_PER_CHUNK) # 10 ms @ 16Khz BYTES_PER_CHUNK = SAMPLES_PER_CHUNK * SAMPLE_WIDTH * SAMPLE_CHANNELS # 16-bit + +OPTION_PREFERRED = "preferred" diff --git a/homeassistant/components/assist_pipeline/select.py b/homeassistant/components/assist_pipeline/select.py index 5d011424e6e..c7e4846aad7 100644 --- a/homeassistant/components/assist_pipeline/select.py +++ b/homeassistant/components/assist_pipeline/select.py @@ -9,12 +9,10 @@ from homeassistant.const import EntityCategory, Platform from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import collection, entity_registry as er, restore_state -from .const import DOMAIN +from .const import DOMAIN, OPTION_PREFERRED from .pipeline import AssistDevice, PipelineData, PipelineStorageCollection from .vad import VadSensitivity -OPTION_PREFERRED = "preferred" - @callback def get_chosen_pipeline( diff --git a/homeassistant/components/assist_satellite/__init__.py b/homeassistant/components/assist_satellite/__init__.py new file mode 100644 index 00000000000..3d6e04bcc75 --- /dev/null +++ b/homeassistant/components/assist_satellite/__init__.py @@ -0,0 +1,65 @@ +"""Base class for assist satellite entities.""" + +import logging + +import voluptuous as vol + +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.entity_component import EntityComponent +from homeassistant.helpers.typing import ConfigType + +from .const import DOMAIN, AssistSatelliteEntityFeature +from .entity import AssistSatelliteEntity, AssistSatelliteEntityDescription +from .errors import SatelliteBusyError +from .websocket_api import async_register_websocket_api + +__all__ = [ + "DOMAIN", + "AssistSatelliteEntity", + "AssistSatelliteEntityDescription", + "AssistSatelliteEntityFeature", + "SatelliteBusyError", +] + +_LOGGER = logging.getLogger(__name__) + +PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE + + +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + component = hass.data[DOMAIN] = EntityComponent[AssistSatelliteEntity]( + _LOGGER, DOMAIN, hass + ) + await component.async_setup(config) + + component.async_register_entity_service( + "announce", + vol.All( + cv.make_entity_service_schema( + { + vol.Optional("message"): str, + vol.Optional("media_id"): str, + } + ), + cv.has_at_least_one_key("message", "media_id"), + ), + "async_internal_announce", + [AssistSatelliteEntityFeature.ANNOUNCE], + ) + async_register_websocket_api(hass) + + return True + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Set up a config entry.""" + component: EntityComponent[AssistSatelliteEntity] = hass.data[DOMAIN] + return await component.async_setup_entry(entry) + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload a config entry.""" + component: EntityComponent[AssistSatelliteEntity] = hass.data[DOMAIN] + return await component.async_unload_entry(entry) diff --git a/homeassistant/components/assist_satellite/const.py b/homeassistant/components/assist_satellite/const.py new file mode 100644 index 00000000000..3a9ce896fb2 --- /dev/null +++ b/homeassistant/components/assist_satellite/const.py @@ -0,0 +1,12 @@ +"""Constants for assist satellite.""" + +from enum import IntFlag + +DOMAIN = "assist_satellite" + + +class AssistSatelliteEntityFeature(IntFlag): + """Supported features of Assist satellite entity.""" + + ANNOUNCE = 1 + """Device supports remotely triggered announcements.""" diff --git a/homeassistant/components/assist_satellite/entity.py b/homeassistant/components/assist_satellite/entity.py new file mode 100644 index 00000000000..8364a81b1fb --- /dev/null +++ b/homeassistant/components/assist_satellite/entity.py @@ -0,0 +1,332 @@ +"""Assist satellite entity.""" + +from abc import abstractmethod +import asyncio +from collections.abc import AsyncIterable +from enum import StrEnum +import logging +import time +from typing import Any, Final, final + +from homeassistant.components import media_source, stt, tts +from homeassistant.components.assist_pipeline import ( + OPTION_PREFERRED, + AudioSettings, + PipelineEvent, + PipelineEventType, + PipelineStage, + async_get_pipeline, + async_get_pipelines, + async_pipeline_from_audio_stream, + vad, +) +from homeassistant.components.media_player import async_process_play_media_url +from homeassistant.components.tts.media_source import ( + generate_media_source_id as tts_generate_media_source_id, +) +from homeassistant.core import Context, callback +from homeassistant.helpers import entity +from homeassistant.helpers.entity import EntityDescription +from homeassistant.util import ulid + +from .const import AssistSatelliteEntityFeature +from .errors import AssistSatelliteError, SatelliteBusyError + +_CONVERSATION_TIMEOUT_SEC: Final = 5 * 60 # 5 minutes + +_LOGGER = logging.getLogger(__name__) + + +class AssistSatelliteState(StrEnum): + """Valid states of an Assist satellite entity.""" + + LISTENING_WAKE_WORD = "listening_wake_word" + """Device is streaming audio for wake word detection to Home Assistant.""" + + LISTENING_COMMAND = "listening_command" + """Device is streaming audio with the voice command to Home Assistant.""" + + PROCESSING = "processing" + """Home Assistant is processing the voice command.""" + + RESPONDING = "responding" + """Device is speaking the response.""" + + +class AssistSatelliteEntityDescription(EntityDescription, frozen_or_thawed=True): + """A class that describes Assist satellite entities.""" + + +class AssistSatelliteEntity(entity.Entity): + """Entity encapsulating the state and functionality of an Assist satellite.""" + + entity_description: AssistSatelliteEntityDescription + _attr_should_poll = False + _attr_supported_features = AssistSatelliteEntityFeature(0) + _attr_pipeline_entity_id: str | None = None + _attr_vad_sensitivity_entity_id: str | None = None + + _conversation_id: str | None = None + _conversation_id_time: float | None = None + + _run_has_tts: bool = False + _is_announcing = False + _wake_word_intercept_future: asyncio.Future[str | None] | None = None + + __assist_satellite_state: AssistSatelliteState | None = None + + @final + @property + def state(self) -> str | None: + """Return state of the entity.""" + return self.__assist_satellite_state + + @property + def pipeline_entity_id(self) -> str | None: + """Entity ID of the pipeline to use for the next conversation.""" + return self._attr_pipeline_entity_id + + @property + def vad_sensitivity_entity_id(self) -> str | None: + """Entity ID of the VAD sensitivity to use for the next conversation.""" + return self._attr_vad_sensitivity_entity_id + + async def async_intercept_wake_word(self) -> str | None: + """Intercept the next wake word from the satellite. + + Returns the detected wake word phrase or None. + """ + if self._wake_word_intercept_future is not None: + raise SatelliteBusyError("Wake word interception already in progress") + + # Will cause next wake word to be intercepted in + # async_accept_pipeline_from_satellite + self._wake_word_intercept_future = asyncio.Future() + + _LOGGER.debug("Next wake word will be intercepted: %s", self.entity_id) + + try: + return await self._wake_word_intercept_future + finally: + self._wake_word_intercept_future = None + + async def async_internal_announce( + self, + message: str | None = None, + media_id: str | None = None, + ) -> None: + """Play and show an announcement on the satellite. + + If media_id is not provided, message is synthesized to + audio with the selected pipeline. + + If media_id is provided, it is played directly. It is possible + to omit the message and the satellite will not show any text. + + Calls async_announce with message and media id. + """ + if message is None: + message = "" + + if not media_id: + # Synthesize audio and get URL + pipeline_id = self._resolve_pipeline() + pipeline = async_get_pipeline(self.hass, pipeline_id) + + tts_options: dict[str, Any] = {} + if pipeline.tts_voice is not None: + tts_options[tts.ATTR_VOICE] = pipeline.tts_voice + + media_id = tts_generate_media_source_id( + self.hass, + message, + engine=pipeline.tts_engine, + language=pipeline.tts_language, + options=tts_options, + ) + + if media_source.is_media_source_id(media_id): + media = await media_source.async_resolve_media( + self.hass, + media_id, + None, + ) + media_id = media.url + + # Resolve to full URL + media_id = async_process_play_media_url(self.hass, media_id) + + if self._is_announcing: + raise SatelliteBusyError + + self._is_announcing = True + + try: + # Block until announcement is finished + await self.async_announce(message, media_id) + finally: + self._is_announcing = False + + async def async_announce(self, message: str, media_id: str) -> None: + """Announce media on the satellite. + + Should block until the announcement is done playing. + """ + raise NotImplementedError + + async def async_accept_pipeline_from_satellite( + self, + audio_stream: AsyncIterable[bytes], + start_stage: PipelineStage = PipelineStage.STT, + end_stage: PipelineStage = PipelineStage.TTS, + wake_word_phrase: str | None = None, + ) -> None: + """Triggers an Assist pipeline in Home Assistant from a satellite.""" + if self._wake_word_intercept_future and start_stage in ( + PipelineStage.WAKE_WORD, + PipelineStage.STT, + ): + if start_stage == PipelineStage.WAKE_WORD: + self._wake_word_intercept_future.set_exception( + AssistSatelliteError( + "Only on-device wake words currently supported" + ) + ) + return + + # Intercepting wake word and immediately end pipeline + _LOGGER.debug( + "Intercepted wake word: %s (entity_id=%s)", + wake_word_phrase, + self.entity_id, + ) + + if wake_word_phrase is None: + self._wake_word_intercept_future.set_exception( + AssistSatelliteError("No wake word phrase provided") + ) + else: + self._wake_word_intercept_future.set_result(wake_word_phrase) + self._internal_on_pipeline_event(PipelineEvent(PipelineEventType.RUN_END)) + return + + device_id = self.registry_entry.device_id if self.registry_entry else None + + # Refresh context if necessary + if ( + (self._context is None) + or (self._context_set is None) + or ((time.time() - self._context_set) > entity.CONTEXT_RECENT_TIME_SECONDS) + ): + self.async_set_context(Context()) + + assert self._context is not None + + # Reset conversation id if necessary + if (self._conversation_id_time is None) or ( + (time.monotonic() - self._conversation_id_time) > _CONVERSATION_TIMEOUT_SEC + ): + self._conversation_id = None + + if self._conversation_id is None: + self._conversation_id = ulid.ulid() + + # Update timeout + self._conversation_id_time = time.monotonic() + + # Set entity state based on pipeline events + self._run_has_tts = False + + await async_pipeline_from_audio_stream( + self.hass, + context=self._context, + event_callback=self._internal_on_pipeline_event, + stt_metadata=stt.SpeechMetadata( + language="", # set in async_pipeline_from_audio_stream + format=stt.AudioFormats.WAV, + codec=stt.AudioCodecs.PCM, + bit_rate=stt.AudioBitRates.BITRATE_16, + sample_rate=stt.AudioSampleRates.SAMPLERATE_16000, + channel=stt.AudioChannels.CHANNEL_MONO, + ), + stt_stream=audio_stream, + pipeline_id=self._resolve_pipeline(), + conversation_id=self._conversation_id, + device_id=device_id, + tts_audio_output="wav", + wake_word_phrase=wake_word_phrase, + audio_settings=AudioSettings( + silence_seconds=self._resolve_vad_sensitivity() + ), + start_stage=start_stage, + end_stage=end_stage, + ) + + @abstractmethod + def on_pipeline_event(self, event: PipelineEvent) -> None: + """Handle pipeline events.""" + + @callback + def _internal_on_pipeline_event(self, event: PipelineEvent) -> None: + """Set state based on pipeline stage.""" + if event.type is PipelineEventType.WAKE_WORD_START: + self._set_state(AssistSatelliteState.LISTENING_WAKE_WORD) + elif event.type is PipelineEventType.STT_START: + self._set_state(AssistSatelliteState.LISTENING_COMMAND) + elif event.type is PipelineEventType.INTENT_START: + self._set_state(AssistSatelliteState.PROCESSING) + elif event.type is PipelineEventType.TTS_START: + # Wait until tts_response_finished is called to return to waiting state + self._run_has_tts = True + self._set_state(AssistSatelliteState.RESPONDING) + elif event.type is PipelineEventType.RUN_END: + if not self._run_has_tts: + self._set_state(AssistSatelliteState.LISTENING_WAKE_WORD) + + self.on_pipeline_event(event) + + @callback + def _set_state(self, state: AssistSatelliteState) -> None: + """Set the entity's state.""" + self.__assist_satellite_state = state + self.async_write_ha_state() + + @callback + def tts_response_finished(self) -> None: + """Tell entity that the text-to-speech response has finished playing.""" + self._set_state(AssistSatelliteState.LISTENING_WAKE_WORD) + + @callback + def _resolve_pipeline(self) -> str | None: + """Resolve pipeline from select entity to id. + + Return None to make async_get_pipeline look up the preferred pipeline. + """ + if not (pipeline_entity_id := self.pipeline_entity_id): + return None + + if (pipeline_entity_state := self.hass.states.get(pipeline_entity_id)) is None: + raise RuntimeError("Pipeline entity not found") + + if pipeline_entity_state.state != OPTION_PREFERRED: + # Resolve pipeline by name + for pipeline in async_get_pipelines(self.hass): + if pipeline.name == pipeline_entity_state.state: + return pipeline.id + + return None + + @callback + def _resolve_vad_sensitivity(self) -> float: + """Resolve VAD sensitivity from select entity to enum.""" + vad_sensitivity = vad.VadSensitivity.DEFAULT + + if vad_sensitivity_entity_id := self.vad_sensitivity_entity_id: + if ( + vad_sensitivity_state := self.hass.states.get(vad_sensitivity_entity_id) + ) is None: + raise RuntimeError("VAD sensitivity entity not found") + + vad_sensitivity = vad.VadSensitivity(vad_sensitivity_state.state) + + return vad.VadSensitivity.to_seconds(vad_sensitivity) diff --git a/homeassistant/components/assist_satellite/errors.py b/homeassistant/components/assist_satellite/errors.py new file mode 100644 index 00000000000..cd05f374521 --- /dev/null +++ b/homeassistant/components/assist_satellite/errors.py @@ -0,0 +1,11 @@ +"""Errors for assist satellite.""" + +from homeassistant.exceptions import HomeAssistantError + + +class AssistSatelliteError(HomeAssistantError): + """Base class for assist satellite errors.""" + + +class SatelliteBusyError(AssistSatelliteError): + """Satellite is busy and cannot handle the request.""" diff --git a/homeassistant/components/assist_satellite/icons.json b/homeassistant/components/assist_satellite/icons.json new file mode 100644 index 00000000000..a98c3aefc5b --- /dev/null +++ b/homeassistant/components/assist_satellite/icons.json @@ -0,0 +1,12 @@ +{ + "entity_component": { + "_": { + "default": "mdi:account-voice" + } + }, + "services": { + "announce": { + "service": "mdi:bullhorn" + } + } +} diff --git a/homeassistant/components/assist_satellite/manifest.json b/homeassistant/components/assist_satellite/manifest.json new file mode 100644 index 00000000000..b4f89456351 --- /dev/null +++ b/homeassistant/components/assist_satellite/manifest.json @@ -0,0 +1,9 @@ +{ + "domain": "assist_satellite", + "name": "Assist Satellite", + "codeowners": ["@home-assistant/core", "@synesthesiam"], + "dependencies": ["assist_pipeline", "stt", "tts"], + "documentation": "https://www.home-assistant.io/integrations/assist_satellite", + "integration_type": "entity", + "quality_scale": "internal" +} diff --git a/homeassistant/components/assist_satellite/services.yaml b/homeassistant/components/assist_satellite/services.yaml new file mode 100644 index 00000000000..e7fefc4705f --- /dev/null +++ b/homeassistant/components/assist_satellite/services.yaml @@ -0,0 +1,16 @@ +announce: + target: + entity: + domain: assist_satellite + supported_features: + - assist_satellite.AssistSatelliteEntityFeature.ANNOUNCE + fields: + message: + required: false + example: "Time to wake up!" + selector: + text: + media_id: + required: false + selector: + text: diff --git a/homeassistant/components/assist_satellite/strings.json b/homeassistant/components/assist_satellite/strings.json new file mode 100644 index 00000000000..1d07882daae --- /dev/null +++ b/homeassistant/components/assist_satellite/strings.json @@ -0,0 +1,30 @@ +{ + "title": "Assist satellite", + "entity_component": { + "_": { + "name": "Assist satellite", + "state": { + "listening_wake_word": "Wake word", + "listening_command": "Voice command", + "responding": "Responding", + "processing": "Processing" + } + } + }, + "services": { + "announce": { + "name": "Announce", + "description": "Let the satellite announce a message.", + "fields": { + "message": { + "name": "Message", + "description": "The message to announce." + }, + "media_id": { + "name": "Media ID", + "description": "The media ID to announce instead of using text-to-speech." + } + } + } + } +} diff --git a/homeassistant/components/assist_satellite/websocket_api.py b/homeassistant/components/assist_satellite/websocket_api.py new file mode 100644 index 00000000000..10687f4210e --- /dev/null +++ b/homeassistant/components/assist_satellite/websocket_api.py @@ -0,0 +1,46 @@ +"""Assist satellite Websocket API.""" + +from typing import Any + +import voluptuous as vol + +from homeassistant.components import websocket_api +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.entity_component import EntityComponent + +from .const import DOMAIN +from .entity import AssistSatelliteEntity + + +@callback +def async_register_websocket_api(hass: HomeAssistant) -> None: + """Register the websocket API.""" + websocket_api.async_register_command(hass, websocket_intercept_wake_word) + + +@callback +@websocket_api.websocket_command( + { + vol.Required("type"): "assist_satellite/intercept_wake_word", + vol.Required("entity_id"): cv.entity_domain(DOMAIN), + } +) +@websocket_api.require_admin +@websocket_api.async_response +async def websocket_intercept_wake_word( + hass: HomeAssistant, + connection: websocket_api.connection.ActiveConnection, + msg: dict[str, Any], +) -> None: + """Intercept the next wake word from a satellite.""" + component: EntityComponent[AssistSatelliteEntity] = hass.data[DOMAIN] + satellite = component.get_entity(msg["entity_id"]) + if satellite is None: + connection.send_error( + msg["id"], websocket_api.ERR_NOT_FOUND, "Entity not found" + ) + return + + wake_word_phrase = await satellite.async_intercept_wake_word() + connection.send_result(msg["id"], {"wake_word_phrase": wake_word_phrase}) diff --git a/homeassistant/const.py b/homeassistant/const.py index 1ee73408f98..ee90ebfc28b 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -41,6 +41,7 @@ class Platform(StrEnum): AIR_QUALITY = "air_quality" ALARM_CONTROL_PANEL = "alarm_control_panel" + ASSIST_SATELLITE = "assist_satellite" BINARY_SENSOR = "binary_sensor" BUTTON = "button" CALENDAR = "calendar" diff --git a/mypy.ini b/mypy.ini index 3854477b94b..2686fbe3062 100644 --- a/mypy.ini +++ b/mypy.ini @@ -705,6 +705,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.assist_satellite.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.asuswrt.*] check_untyped_defs = true disallow_incomplete_defs = true diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile index 4dbea0e4c95..a37fa9c57fc 100644 --- a/script/hassfest/docker/Dockerfile +++ b/script/hassfest/docker/Dockerfile @@ -23,7 +23,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.2.27,source=/uv,target=/bin/uv \ -c /usr/src/homeassistant/homeassistant/package_constraints.txt \ -r /usr/src/homeassistant/requirements.txt \ stdlib-list==0.10.0 pipdeptree==2.23.1 tqdm==4.66.4 ruff==0.6.2 \ - PyTurboJPEG==1.7.5 ha-ffmpeg==3.2.0 hassil==1.7.4 home-assistant-intents==2024.9.4 mutagen==1.47.0 + PyTurboJPEG==1.7.5 ha-ffmpeg==3.2.0 hassil==1.7.4 home-assistant-intents==2024.9.4 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 LABEL "name"="hassfest" LABEL "maintainer"="Home Assistant " diff --git a/tests/components/assist_satellite/__init__.py b/tests/components/assist_satellite/__init__.py new file mode 100644 index 00000000000..7e06ea3a4b9 --- /dev/null +++ b/tests/components/assist_satellite/__init__.py @@ -0,0 +1,3 @@ +"""Tests for Assist Satellite.""" + +ENTITY_ID = "assist_satellite.test_entity" diff --git a/tests/components/assist_satellite/conftest.py b/tests/components/assist_satellite/conftest.py new file mode 100644 index 00000000000..a14e9e9452b --- /dev/null +++ b/tests/components/assist_satellite/conftest.py @@ -0,0 +1,107 @@ +"""Test helpers for Assist Satellite.""" + +import pathlib +from unittest.mock import Mock + +import pytest + +from homeassistant.components.assist_pipeline import PipelineEvent +from homeassistant.components.assist_satellite import ( + DOMAIN as AS_DOMAIN, + AssistSatelliteEntity, + AssistSatelliteEntityFeature, +) +from homeassistant.config_entries import ConfigEntry, ConfigFlow +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.common import ( + MockConfigEntry, + MockModule, + mock_config_flow, + mock_integration, + mock_platform, + setup_test_component_platform, +) + +TEST_DOMAIN = "test" + + +@pytest.fixture(autouse=True) +def mock_tts(mock_tts_cache_dir: pathlib.Path) -> None: + """Mock TTS cache dir fixture.""" + + +class MockAssistSatellite(AssistSatelliteEntity): + """Mock Assist Satellite Entity.""" + + _attr_name = "Test Entity" + _attr_supported_features = AssistSatelliteEntityFeature.ANNOUNCE + + def __init__(self) -> None: + """Initialize the mock entity.""" + self.events = [] + self.announcements = [] + + def on_pipeline_event(self, event: PipelineEvent) -> None: + """Handle pipeline events.""" + self.events.append(event) + + async def async_announce(self, message: str, media_id: str) -> None: + """Announce media on a device.""" + self.announcements.append((message, media_id)) + + +@pytest.fixture +def entity() -> MockAssistSatellite: + """Mock Assist Satellite Entity.""" + return MockAssistSatellite() + + +@pytest.fixture +def config_entry(hass: HomeAssistant) -> ConfigEntry: + """Mock config entry.""" + entry = MockConfigEntry(domain=TEST_DOMAIN) + entry.add_to_hass(hass) + return entry + + +@pytest.fixture +async def init_components( + hass: HomeAssistant, + config_entry: ConfigEntry, + entity: MockAssistSatellite, +) -> None: + """Initialize components.""" + assert await async_setup_component(hass, "homeassistant", {}) + + async def async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups(config_entry, [AS_DOMAIN]) + return True + + async def async_unload_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Unload test config entry.""" + await hass.config_entries.async_forward_entry_unload(config_entry, AS_DOMAIN) + return True + + mock_integration( + hass, + MockModule( + TEST_DOMAIN, + async_setup_entry=async_setup_entry_init, + async_unload_entry=async_unload_entry_init, + ), + ) + setup_test_component_platform(hass, AS_DOMAIN, [entity], from_config_entry=True) + mock_platform(hass, f"{TEST_DOMAIN}.config_flow", Mock()) + + with mock_config_flow(TEST_DOMAIN, ConfigFlow): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + return config_entry diff --git a/tests/components/assist_satellite/test_entity.py b/tests/components/assist_satellite/test_entity.py new file mode 100644 index 00000000000..f957a826828 --- /dev/null +++ b/tests/components/assist_satellite/test_entity.py @@ -0,0 +1,332 @@ +"""Test the Assist Satellite entity.""" + +import asyncio +from unittest.mock import patch + +import pytest + +from homeassistant.components import stt +from homeassistant.components.assist_pipeline import ( + OPTION_PREFERRED, + AudioSettings, + Pipeline, + PipelineEvent, + PipelineEventType, + PipelineStage, + async_get_pipeline, + async_update_pipeline, + vad, +) +from homeassistant.components.assist_satellite import SatelliteBusyError +from homeassistant.components.assist_satellite.entity import AssistSatelliteState +from homeassistant.components.media_source import PlayMedia +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import STATE_UNKNOWN +from homeassistant.core import Context, HomeAssistant + +from . import ENTITY_ID +from .conftest import MockAssistSatellite + + +async def test_entity_state( + hass: HomeAssistant, init_components: ConfigEntry, entity: MockAssistSatellite +) -> None: + """Test entity state represent events.""" + + state = hass.states.get(ENTITY_ID) + assert state is not None + assert state.state == STATE_UNKNOWN + + context = Context() + audio_stream = object() + + entity.async_set_context(context) + + with patch( + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream" + ) as mock_start_pipeline: + await entity.async_accept_pipeline_from_satellite(audio_stream) + + assert mock_start_pipeline.called + kwargs = mock_start_pipeline.call_args[1] + assert kwargs["context"] is context + assert kwargs["event_callback"] == entity._internal_on_pipeline_event + assert kwargs["stt_metadata"] == stt.SpeechMetadata( + language="", + format=stt.AudioFormats.WAV, + codec=stt.AudioCodecs.PCM, + bit_rate=stt.AudioBitRates.BITRATE_16, + sample_rate=stt.AudioSampleRates.SAMPLERATE_16000, + channel=stt.AudioChannels.CHANNEL_MONO, + ) + assert kwargs["stt_stream"] is audio_stream + assert kwargs["pipeline_id"] is None + assert kwargs["device_id"] is None + assert kwargs["tts_audio_output"] == "wav" + assert kwargs["wake_word_phrase"] is None + assert kwargs["audio_settings"] == AudioSettings( + silence_seconds=vad.VadSensitivity.to_seconds(vad.VadSensitivity.DEFAULT) + ) + assert kwargs["start_stage"] == PipelineStage.STT + assert kwargs["end_stage"] == PipelineStage.TTS + + for event_type, expected_state in ( + (PipelineEventType.RUN_START, STATE_UNKNOWN), + (PipelineEventType.RUN_END, AssistSatelliteState.LISTENING_WAKE_WORD), + (PipelineEventType.WAKE_WORD_START, AssistSatelliteState.LISTENING_WAKE_WORD), + (PipelineEventType.WAKE_WORD_END, AssistSatelliteState.LISTENING_WAKE_WORD), + (PipelineEventType.STT_START, AssistSatelliteState.LISTENING_COMMAND), + (PipelineEventType.STT_VAD_START, AssistSatelliteState.LISTENING_COMMAND), + (PipelineEventType.STT_VAD_END, AssistSatelliteState.LISTENING_COMMAND), + (PipelineEventType.STT_END, AssistSatelliteState.LISTENING_COMMAND), + (PipelineEventType.INTENT_START, AssistSatelliteState.PROCESSING), + (PipelineEventType.INTENT_END, AssistSatelliteState.PROCESSING), + (PipelineEventType.TTS_START, AssistSatelliteState.RESPONDING), + (PipelineEventType.TTS_END, AssistSatelliteState.RESPONDING), + (PipelineEventType.ERROR, AssistSatelliteState.RESPONDING), + ): + kwargs["event_callback"](PipelineEvent(event_type, {})) + state = hass.states.get(ENTITY_ID) + assert state.state == expected_state, event_type + + entity.tts_response_finished() + state = hass.states.get(ENTITY_ID) + assert state.state == AssistSatelliteState.LISTENING_WAKE_WORD + + +@pytest.mark.parametrize( + ("service_data", "expected_params"), + [ + ( + {"message": "Hello"}, + ("Hello", "https://www.home-assistant.io/resolved.mp3"), + ), + ( + { + "message": "Hello", + "media_id": "http://example.com/bla.mp3", + }, + ("Hello", "http://example.com/bla.mp3"), + ), + ( + {"media_id": "http://example.com/bla.mp3"}, + ("", "http://example.com/bla.mp3"), + ), + ], +) +async def test_announce( + hass: HomeAssistant, + init_components: ConfigEntry, + entity: MockAssistSatellite, + service_data: dict, + expected_params: tuple[str, str], +) -> None: + """Test announcing on a device.""" + await async_update_pipeline( + hass, + async_get_pipeline(hass), + tts_engine="tts.mock_entity", + tts_language="en", + tts_voice="test-voice", + ) + + with ( + patch( + "homeassistant.components.assist_satellite.entity.tts_generate_media_source_id", + return_value="media-source://bla", + ), + patch( + "homeassistant.components.media_source.async_resolve_media", + return_value=PlayMedia( + url="https://www.home-assistant.io/resolved.mp3", + mime_type="audio/mp3", + ), + ), + ): + await hass.services.async_call( + "assist_satellite", + "announce", + service_data, + target={"entity_id": "assist_satellite.test_entity"}, + blocking=True, + ) + + assert entity.announcements[0] == expected_params + + +async def test_announce_busy( + hass: HomeAssistant, + init_components: ConfigEntry, + entity: MockAssistSatellite, +) -> None: + """Test that announcing while an announcement is in progress raises an error.""" + media_id = "https://www.home-assistant.io/resolved.mp3" + announce_started = asyncio.Event() + got_error = asyncio.Event() + + async def async_announce(message, media_id): + announce_started.set() + + # Block so we can do another announcement + await got_error.wait() + + with patch.object(entity, "async_announce", new=async_announce): + announce_task = asyncio.create_task( + entity.async_internal_announce(media_id=media_id) + ) + async with asyncio.timeout(1): + await announce_started.wait() + + # Try to do a second announcement + with pytest.raises(SatelliteBusyError): + await entity.async_internal_announce(media_id=media_id) + + # Avoid lingering task + got_error.set() + await announce_task + + +async def test_context_refresh( + hass: HomeAssistant, init_components: ConfigEntry, entity: MockAssistSatellite +) -> None: + """Test that the context will be automatically refreshed.""" + audio_stream = object() + + # Remove context + entity._context = None + + with patch( + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream" + ): + await entity.async_accept_pipeline_from_satellite(audio_stream) + + # Context should have been refreshed + assert entity._context is not None + + +async def test_pipeline_entity( + hass: HomeAssistant, init_components: ConfigEntry, entity: MockAssistSatellite +) -> None: + """Test getting pipeline from an entity.""" + audio_stream = object() + pipeline = Pipeline( + conversation_engine="test", + conversation_language="en", + language="en", + name="test-pipeline", + stt_engine=None, + stt_language=None, + tts_engine=None, + tts_language=None, + tts_voice=None, + wake_word_entity=None, + wake_word_id=None, + ) + + pipeline_entity_id = "select.pipeline" + hass.states.async_set(pipeline_entity_id, pipeline.name) + entity._attr_pipeline_entity_id = pipeline_entity_id + + done = asyncio.Event() + + async def async_pipeline_from_audio_stream(*args, pipeline_id: str, **kwargs): + assert pipeline_id == pipeline.id + done.set() + + with ( + patch( + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + new=async_pipeline_from_audio_stream, + ), + patch( + "homeassistant.components.assist_satellite.entity.async_get_pipelines", + return_value=[pipeline], + ), + ): + async with asyncio.timeout(1): + await entity.async_accept_pipeline_from_satellite(audio_stream) + await done.wait() + + +async def test_pipeline_entity_preferred( + hass: HomeAssistant, init_components: ConfigEntry, entity: MockAssistSatellite +) -> None: + """Test getting pipeline from an entity with a preferred state.""" + audio_stream = object() + + pipeline_entity_id = "select.pipeline" + hass.states.async_set(pipeline_entity_id, OPTION_PREFERRED) + entity._attr_pipeline_entity_id = pipeline_entity_id + + done = asyncio.Event() + + async def async_pipeline_from_audio_stream(*args, pipeline_id: str, **kwargs): + # Preferred pipeline + assert pipeline_id is None + done.set() + + with ( + patch( + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + new=async_pipeline_from_audio_stream, + ), + ): + async with asyncio.timeout(1): + await entity.async_accept_pipeline_from_satellite(audio_stream) + await done.wait() + + +async def test_vad_sensitivity_entity( + hass: HomeAssistant, init_components: ConfigEntry, entity: MockAssistSatellite +) -> None: + """Test getting vad sensitivity from an entity.""" + audio_stream = object() + + vad_sensitivity_entity_id = "select.vad_sensitivity" + hass.states.async_set(vad_sensitivity_entity_id, vad.VadSensitivity.AGGRESSIVE) + entity._attr_vad_sensitivity_entity_id = vad_sensitivity_entity_id + + done = asyncio.Event() + + async def async_pipeline_from_audio_stream( + *args, audio_settings: AudioSettings, **kwargs + ): + # Verify vad sensitivity + assert audio_settings.silence_seconds == vad.VadSensitivity.to_seconds( + vad.VadSensitivity.AGGRESSIVE + ) + done.set() + + with patch( + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + new=async_pipeline_from_audio_stream, + ): + async with asyncio.timeout(1): + await entity.async_accept_pipeline_from_satellite(audio_stream) + await done.wait() + + +async def test_pipeline_entity_not_found( + hass: HomeAssistant, init_components: ConfigEntry, entity: MockAssistSatellite +) -> None: + """Test that setting the pipeline entity id to a non-existent entity raises an error.""" + audio_stream = object() + + # Set to an entity that doesn't exist + entity._attr_pipeline_entity_id = "select.pipeline" + + with pytest.raises(RuntimeError): + await entity.async_accept_pipeline_from_satellite(audio_stream) + + +async def test_vad_sensitivity_entity_not_found( + hass: HomeAssistant, init_components: ConfigEntry, entity: MockAssistSatellite +) -> None: + """Test that setting the vad sensitivity entity id to a non-existent entity raises an error.""" + audio_stream = object() + + # Set to an entity that doesn't exist + entity._attr_vad_sensitivity_entity_id = "select.vad_sensitivity" + + with pytest.raises(RuntimeError): + await entity.async_accept_pipeline_from_satellite(audio_stream) diff --git a/tests/components/assist_satellite/test_websocket_api.py b/tests/components/assist_satellite/test_websocket_api.py new file mode 100644 index 00000000000..af49334e629 --- /dev/null +++ b/tests/components/assist_satellite/test_websocket_api.py @@ -0,0 +1,192 @@ +"""Test WebSocket API.""" + +import asyncio + +from homeassistant.components.assist_pipeline import PipelineStage +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant + +from . import ENTITY_ID +from .conftest import MockAssistSatellite + +from tests.common import MockUser +from tests.typing import WebSocketGenerator + + +async def test_intercept_wake_word( + hass: HomeAssistant, + init_components: ConfigEntry, + entity: MockAssistSatellite, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test intercepting a wake word.""" + ws_client = await hass_ws_client(hass) + + await ws_client.send_json_auto_id( + { + "type": "assist_satellite/intercept_wake_word", + "entity_id": ENTITY_ID, + } + ) + + for _ in range(3): + await asyncio.sleep(0) + + await entity.async_accept_pipeline_from_satellite( + object(), + start_stage=PipelineStage.STT, + wake_word_phrase="ok, nabu", + ) + + response = await ws_client.receive_json() + + assert response["success"] + assert response["result"] == {"wake_word_phrase": "ok, nabu"} + + +async def test_intercept_wake_word_requires_on_device_wake_word( + hass: HomeAssistant, + init_components: ConfigEntry, + entity: MockAssistSatellite, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test intercepting a wake word fails if detection happens in HA.""" + ws_client = await hass_ws_client(hass) + + await ws_client.send_json_auto_id( + { + "type": "assist_satellite/intercept_wake_word", + "entity_id": ENTITY_ID, + } + ) + + for _ in range(3): + await asyncio.sleep(0) + + await entity.async_accept_pipeline_from_satellite( + object(), + # Emulate wake word processing in Home Assistant + start_stage=PipelineStage.WAKE_WORD, + ) + + response = await ws_client.receive_json() + assert not response["success"] + assert response["error"] == { + "code": "home_assistant_error", + "message": "Only on-device wake words currently supported", + } + + +async def test_intercept_wake_word_requires_wake_word_phrase( + hass: HomeAssistant, + init_components: ConfigEntry, + entity: MockAssistSatellite, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test intercepting a wake word fails if detection happens in HA.""" + ws_client = await hass_ws_client(hass) + + await ws_client.send_json_auto_id( + { + "type": "assist_satellite/intercept_wake_word", + "entity_id": ENTITY_ID, + } + ) + + for _ in range(3): + await asyncio.sleep(0) + + await entity.async_accept_pipeline_from_satellite( + object(), + start_stage=PipelineStage.STT, + # We are not passing wake word phrase + ) + + response = await ws_client.receive_json() + assert not response["success"] + assert response["error"] == { + "code": "home_assistant_error", + "message": "No wake word phrase provided", + } + + +async def test_intercept_wake_word_require_admin( + hass: HomeAssistant, + init_components: ConfigEntry, + entity: MockAssistSatellite, + hass_ws_client: WebSocketGenerator, + hass_admin_user: MockUser, +) -> None: + """Test intercepting a wake word requires admin access.""" + # Remove admin permission and verify we're not allowed + hass_admin_user.groups = [] + ws_client = await hass_ws_client(hass) + + await ws_client.send_json_auto_id( + { + "type": "assist_satellite/intercept_wake_word", + "entity_id": ENTITY_ID, + } + ) + response = await ws_client.receive_json() + + assert not response["success"] + assert response["error"] == { + "code": "unauthorized", + "message": "Unauthorized", + } + + +async def test_intercept_wake_word_invalid_satellite( + hass: HomeAssistant, + init_components: ConfigEntry, + entity: MockAssistSatellite, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test intercepting a wake word requires admin access.""" + ws_client = await hass_ws_client(hass) + + await ws_client.send_json_auto_id( + { + "type": "assist_satellite/intercept_wake_word", + "entity_id": "assist_satellite.invalid", + } + ) + response = await ws_client.receive_json() + + assert not response["success"] + assert response["error"] == { + "code": "not_found", + "message": "Entity not found", + } + + +async def test_intercept_wake_word_twice( + hass: HomeAssistant, + init_components: ConfigEntry, + entity: MockAssistSatellite, + hass_ws_client: WebSocketGenerator, +) -> None: + """Test intercepting a wake word requires admin access.""" + ws_client = await hass_ws_client(hass) + + await ws_client.send_json_auto_id( + { + "type": "assist_satellite/intercept_wake_word", + "entity_id": ENTITY_ID, + } + ) + + await ws_client.send_json_auto_id( + { + "type": "assist_satellite/intercept_wake_word", + "entity_id": ENTITY_ID, + } + ) + response = await ws_client.receive_json() + + assert not response["success"] + assert response["error"] == { + "code": "home_assistant_error", + "message": "Wake word interception already in progress", + } From 0ca0836e832bff6a160e9faddc4a5fafc298ac6c Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 6 Sep 2024 07:21:41 +0200 Subject: [PATCH 1485/1635] Correct check for removed index in recorder test (#125323) --- tests/components/recorder/test_migration_from_schema_32.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/components/recorder/test_migration_from_schema_32.py b/tests/components/recorder/test_migration_from_schema_32.py index 40d18ab51fd..cdbbd7ec4e4 100644 --- a/tests/components/recorder/test_migration_from_schema_32.py +++ b/tests/components/recorder/test_migration_from_schema_32.py @@ -335,7 +335,7 @@ async def test_migrate_events_context_ids( # Check the index which will be removed by the migrator no longer exists with session_scope(hass=hass) as session: - assert get_index_by_name(session, "states", "ix_states_context_id") is None + assert get_index_by_name(session, "events", "ix_events_context_id") is None @pytest.mark.parametrize("enable_migrate_context_ids", [True]) From cf049a07c22e554a8c541e1c04c765f9d03bab04 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 6 Sep 2024 07:59:22 +0200 Subject: [PATCH 1486/1635] Don't allow templating min, max, step in config entry template number (#125342) --- homeassistant/components/template/__init__.py | 20 ++++++-- .../components/template/config_flow.py | 18 +++---- homeassistant/components/template/const.py | 9 ++-- homeassistant/components/template/number.py | 5 +- tests/components/template/test_config_flow.py | 48 +++++++++--------- tests/components/template/test_init.py | 49 ++++++++++++++++--- tests/components/template/test_number.py | 12 ++--- 7 files changed, 106 insertions(+), 55 deletions(-) diff --git a/homeassistant/components/template/__init__.py b/homeassistant/components/template/__init__.py index efa99342699..d3cfda2d4eb 100644 --- a/homeassistant/components/template/__init__.py +++ b/homeassistant/components/template/__init__.py @@ -7,9 +7,14 @@ import logging from homeassistant import config as conf_util from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_DEVICE_ID, CONF_UNIQUE_ID, SERVICE_RELOAD +from homeassistant.const import ( + CONF_DEVICE_ID, + CONF_NAME, + CONF_UNIQUE_ID, + SERVICE_RELOAD, +) from homeassistant.core import Event, HomeAssistant, ServiceCall -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import ConfigEntryError, HomeAssistantError from homeassistant.helpers import discovery from homeassistant.helpers.device import ( async_remove_stale_devices_links_keep_current_device, @@ -19,7 +24,7 @@ from homeassistant.helpers.service import async_register_admin_service from homeassistant.helpers.typing import ConfigType from homeassistant.loader import async_get_integration -from .const import CONF_TRIGGER, DOMAIN, PLATFORMS +from .const import CONF_MAX, CONF_MIN, CONF_STEP, CONF_TRIGGER, DOMAIN, PLATFORMS from .coordinator import TriggerUpdateCoordinator _LOGGER = logging.getLogger(__name__) @@ -67,6 +72,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: entry.options.get(CONF_DEVICE_ID), ) + for key in (CONF_MAX, CONF_MIN, CONF_STEP): + if key not in entry.options: + continue + if isinstance(entry.options[key], str): + raise ConfigEntryError( + f"The '{entry.options.get(CONF_NAME) or ""}' number template needs to " + f"be reconfigured, {key} must be a number, got '{entry.options[key]}'" + ) + await hass.config_entries.async_forward_entry_setups( entry, (entry.options["template_type"],) ) diff --git a/homeassistant/components/template/config_flow.py b/homeassistant/components/template/config_flow.py index 2c12a0d03e9..ba4f4a78f53 100644 --- a/homeassistant/components/template/config_flow.py +++ b/homeassistant/components/template/config_flow.py @@ -107,15 +107,15 @@ def generate_schema(domain: str, flow_type: str) -> vol.Schema: if domain == Platform.NUMBER: schema |= { vol.Required(CONF_STATE): selector.TemplateSelector(), - vol.Required( - CONF_MIN, default=f"{{{{{DEFAULT_MIN_VALUE}}}}}" - ): selector.TemplateSelector(), - vol.Required( - CONF_MAX, default=f"{{{{{DEFAULT_MAX_VALUE}}}}}" - ): selector.TemplateSelector(), - vol.Required( - CONF_STEP, default=f"{{{{{DEFAULT_STEP}}}}}" - ): selector.TemplateSelector(), + vol.Required(CONF_MIN, default=DEFAULT_MIN_VALUE): selector.NumberSelector( + selector.NumberSelectorConfig(mode=selector.NumberSelectorMode.BOX), + ), + vol.Required(CONF_MAX, default=DEFAULT_MAX_VALUE): selector.NumberSelector( + selector.NumberSelectorConfig(mode=selector.NumberSelectorMode.BOX), + ), + vol.Required(CONF_STEP, default=DEFAULT_STEP): selector.NumberSelector( + selector.NumberSelectorConfig(mode=selector.NumberSelectorMode.BOX), + ), vol.Optional(CONF_SET_VALUE): selector.ActionSelector(), } diff --git a/homeassistant/components/template/const.py b/homeassistant/components/template/const.py index 8b4e46ba383..89df87b4031 100644 --- a/homeassistant/components/template/const.py +++ b/homeassistant/components/template/const.py @@ -28,11 +28,14 @@ PLATFORMS = [ Platform.WEATHER, ] -CONF_AVAILABILITY = "availability" -CONF_ATTRIBUTES = "attributes" CONF_ATTRIBUTE_TEMPLATES = "attribute_templates" +CONF_ATTRIBUTES = "attributes" +CONF_AVAILABILITY = "availability" +CONF_MAX = "max" +CONF_MIN = "min" +CONF_OBJECT_ID = "object_id" CONF_PICTURE = "picture" CONF_PRESS = "press" -CONF_OBJECT_ID = "object_id" +CONF_STEP = "step" CONF_TURN_OFF = "turn_off" CONF_TURN_ON = "turn_on" diff --git a/homeassistant/components/template/number.py b/homeassistant/components/template/number.py index 499ddc192cc..e051f124149 100644 --- a/homeassistant/components/template/number.py +++ b/homeassistant/components/template/number.py @@ -31,7 +31,7 @@ from homeassistant.helpers.script import Script from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from . import TriggerUpdateCoordinator -from .const import DOMAIN +from .const import CONF_MAX, CONF_MIN, CONF_STEP, DOMAIN from .template_entity import ( TEMPLATE_ENTITY_AVAILABILITY_SCHEMA, TEMPLATE_ENTITY_ICON_SCHEMA, @@ -42,9 +42,6 @@ from .trigger_entity import TriggerEntity _LOGGER = logging.getLogger(__name__) CONF_SET_VALUE = "set_value" -CONF_MIN = "min" -CONF_MAX = "max" -CONF_STEP = "step" DEFAULT_NAME = "Template Number" DEFAULT_OPTIMISTIC = False diff --git a/tests/components/template/test_config_flow.py b/tests/components/template/test_config_flow.py index f8ab190e664..ee748ce41f5 100644 --- a/tests/components/template/test_config_flow.py +++ b/tests/components/template/test_config_flow.py @@ -98,9 +98,9 @@ from tests.typing import WebSocketGenerator {"one": "30.0", "two": "20.0"}, {}, { - "min": "{{ 0 }}", - "max": "{{ 100 }}", - "step": "{{ 0.1 }}", + "min": "0", + "max": "100", + "step": "0.1", "set_value": { "action": "input_number.set_value", "target": {"entity_id": "input_number.test"}, @@ -108,9 +108,9 @@ from tests.typing import WebSocketGenerator }, }, { - "min": "{{ 0 }}", - "max": "{{ 100 }}", - "step": "{{ 0.1 }}", + "min": 0, + "max": 100, + "step": 0.1, "set_value": { "action": "input_number.set_value", "target": {"entity_id": "input_number.test"}, @@ -258,14 +258,14 @@ async def test_config_flow( "number", {"state": "{{ states('number.one') }}"}, { - "min": "{{ 0 }}", - "max": "{{ 100 }}", - "step": "{{ 0.1 }}", + "min": "0", + "max": "100", + "step": "0.1", }, { - "min": "{{ 0 }}", - "max": "{{ 100 }}", - "step": "{{ 0.1 }}", + "min": 0, + "max": 100, + "step": 0.1, }, ), ( @@ -451,9 +451,9 @@ def get_suggested(schema, key): ["30.0", "20.0"], {"one": "30.0", "two": "20.0"}, { - "min": "{{ 0 }}", - "max": "{{ 100 }}", - "step": "{{ 0.1 }}", + "min": 0, + "max": 100, + "step": 0.1, "set_value": { "action": "input_number.set_value", "target": {"entity_id": "input_number.test"}, @@ -461,9 +461,9 @@ def get_suggested(schema, key): }, }, { - "min": "{{ 0 }}", - "max": "{{ 100 }}", - "step": "{{ 0.1 }}", + "min": 0, + "max": 100, + "step": 0.1, "set_value": { "action": "input_number.set_value", "target": {"entity_id": "input_number.test"}, @@ -1230,14 +1230,14 @@ async def test_option_flow_sensor_preview_config_entry_removed( "number", {"state": "{{ states('number.one') }}"}, { - "min": "{{ 0 }}", - "max": "{{ 100 }}", - "step": "{{ 0.1 }}", + "min": 0, + "max": 100, + "step": 0.1, }, { - "min": "{{ 0 }}", - "max": "{{ 100 }}", - "step": "{{ 0.1 }}", + "min": 0, + "max": 100, + "step": 0.1, }, ), ( diff --git a/tests/components/template/test_init.py b/tests/components/template/test_init.py index 3b4db4bf668..0de57062984 100644 --- a/tests/components/template/test_init.py +++ b/tests/components/template/test_init.py @@ -319,9 +319,9 @@ async def async_yaml_patch_helper(hass: HomeAssistant, filename: str) -> None: "template_type": "number", "name": "My template", "state": "{{ 10 }}", - "min": "{{ 0 }}", - "max": "{{ 100 }}", - "step": "{{ 0.1 }}", + "min": 0, + "max": 100, + "step": 0.1, "set_value": { "action": "input_number.set_value", "target": {"entity_id": "input_number.test"}, @@ -330,9 +330,9 @@ async def async_yaml_patch_helper(hass: HomeAssistant, filename: str) -> None: }, { "state": "{{ 11 }}", - "min": "{{ 0 }}", - "max": "{{ 100 }}", - "step": "{{ 0.1 }}", + "min": 0, + "max": 100, + "step": 0.1, "set_value": { "action": "input_number.set_value", "target": {"entity_id": "input_number.test"}, @@ -454,3 +454,40 @@ async def test_change_device( ) == [] ) + + +async def test_fail_non_numerical_number_settings( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test that non numerical number options causes config entry setup to fail. + + Support for non numerical max, min and step was added in HA Core 2024.9.0 and + removed in HA Core 2024.9.1. + """ + + options = { + "template_type": "number", + "name": "My template", + "state": "{{ 10 }}", + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, + } + # Setup the config entry + template_config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options=options, + title="Template", + ) + template_config_entry.add_to_hass(hass) + assert not await hass.config_entries.async_setup(template_config_entry.entry_id) + assert ( + "The 'My template' number template needs to be reconfigured, " + "max must be a number, got '{{ 100 }}'" in caplog.text + ) diff --git a/tests/components/template/test_number.py b/tests/components/template/test_number.py index fdca94d9fa4..43decf848ff 100644 --- a/tests/components/template/test_number.py +++ b/tests/components/template/test_number.py @@ -58,9 +58,9 @@ async def test_setup_config_entry( "name": "My template", "template_type": "number", "state": "{{ 10 }}", - "min": "{{ 0 }}", - "max": "{{ 100 }}", - "step": "{{ 0.1 }}", + "min": 0, + "max": 100, + "step": 0.1, "set_value": { "action": "input_number.set_value", "target": {"entity_id": "input_number.test"}, @@ -524,9 +524,9 @@ async def test_device_id( "name": "My template", "template_type": "number", "state": "{{ 10 }}", - "min": "{{ 0 }}", - "max": "{{ 100 }}", - "step": "{{ 0.1 }}", + "min": 0, + "max": 100, + "step": 0.1, "set_value": { "action": "input_number.set_value", "target": {"entity_id": "input_number.test"}, From f80acdada04b797696ebe38b5c947bf3974d61b1 Mon Sep 17 00:00:00 2001 From: Sid <27780930+autinerd@users.noreply.github.com> Date: Fri, 6 Sep 2024 08:08:40 +0200 Subject: [PATCH 1487/1635] Bump ruff to 0.6.4 (#125385) * Bump ruff to 0.6.4 * fix Dockerfile --- .pre-commit-config.yaml | 2 +- requirements_test_pre_commit.txt | 2 +- script/hassfest/docker/Dockerfile | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ab5e59139cf..d87ccf93aa7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.6.2 + rev: v0.6.4 hooks: - id: ruff args: diff --git a/requirements_test_pre_commit.txt b/requirements_test_pre_commit.txt index 0c8d2b3796b..1407fda02b5 100644 --- a/requirements_test_pre_commit.txt +++ b/requirements_test_pre_commit.txt @@ -1,5 +1,5 @@ # Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit codespell==2.3.0 -ruff==0.6.2 +ruff==0.6.4 yamllint==1.35.1 diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile index a37fa9c57fc..571ae6a7181 100644 --- a/script/hassfest/docker/Dockerfile +++ b/script/hassfest/docker/Dockerfile @@ -22,7 +22,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.2.27,source=/uv,target=/bin/uv \ --no-cache \ -c /usr/src/homeassistant/homeassistant/package_constraints.txt \ -r /usr/src/homeassistant/requirements.txt \ - stdlib-list==0.10.0 pipdeptree==2.23.1 tqdm==4.66.4 ruff==0.6.2 \ + stdlib-list==0.10.0 pipdeptree==2.23.1 tqdm==4.66.4 ruff==0.6.4 \ PyTurboJPEG==1.7.5 ha-ffmpeg==3.2.0 hassil==1.7.4 home-assistant-intents==2024.9.4 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 LABEL "name"="hassfest" From a341bfd8cafecedbf382432a629e7da246f91db9 Mon Sep 17 00:00:00 2001 From: TimL Date: Fri, 6 Sep 2024 16:11:50 +1000 Subject: [PATCH 1488/1635] Add binary_sensor platform for Smlight integration (#125284) * Support binary_sensors for SMLight integration * Add strings for binary sensors * Add tests for binary_sensor platform * Update binary sensor docstring Co-authored-by: Shay Levy * Regenerate snapshot --------- Co-authored-by: Shay Levy Co-authored-by: Tim Lunn --- homeassistant/components/smlight/__init__.py | 1 + .../components/smlight/binary_sensor.py | 80 ++++++++++++++++ homeassistant/components/smlight/strings.json | 8 ++ .../smlight/snapshots/test_binary_sensor.ambr | 95 +++++++++++++++++++ .../components/smlight/test_binary_sensor.py | 52 ++++++++++ 5 files changed, 236 insertions(+) create mode 100644 homeassistant/components/smlight/binary_sensor.py create mode 100644 tests/components/smlight/snapshots/test_binary_sensor.ambr create mode 100644 tests/components/smlight/test_binary_sensor.py diff --git a/homeassistant/components/smlight/__init__.py b/homeassistant/components/smlight/__init__.py index 47dc943423e..4f0f2c0fb02 100644 --- a/homeassistant/components/smlight/__init__.py +++ b/homeassistant/components/smlight/__init__.py @@ -9,6 +9,7 @@ from homeassistant.core import HomeAssistant from .coordinator import SmDataUpdateCoordinator PLATFORMS: list[Platform] = [ + Platform.BINARY_SENSOR, Platform.BUTTON, Platform.SENSOR, ] diff --git a/homeassistant/components/smlight/binary_sensor.py b/homeassistant/components/smlight/binary_sensor.py new file mode 100644 index 00000000000..b010c3f7cbd --- /dev/null +++ b/homeassistant/components/smlight/binary_sensor.py @@ -0,0 +1,80 @@ +"""Support for SLZB-06 binary sensors.""" + +from __future__ import annotations + +from _collections_abc import Callable +from dataclasses import dataclass + +from pysmlight import Sensors + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .coordinator import SmDataUpdateCoordinator +from .entity import SmEntity + + +@dataclass(frozen=True, kw_only=True) +class SmBinarySensorEntityDescription(BinarySensorEntityDescription): + """Class describing SMLIGHT binary sensor entities.""" + + value_fn: Callable[[Sensors], bool] + + +SENSORS = [ + SmBinarySensorEntityDescription( + key="ethernet", + translation_key="ethernet", + value_fn=lambda x: x.ethernet, + ), + SmBinarySensorEntityDescription( + key="wifi", + translation_key="wifi", + entity_registry_enabled_default=False, + value_fn=lambda x: x.wifi_connected, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up SMLIGHT sensor based on a config entry.""" + coordinator = entry.runtime_data + + async_add_entities( + SmBinarySensorEntity(coordinator, description) for description in SENSORS + ) + + +class SmBinarySensorEntity(SmEntity, BinarySensorEntity): + """Representation of a slzb binary sensor.""" + + entity_description: SmBinarySensorEntityDescription + _attr_device_class = BinarySensorDeviceClass.CONNECTIVITY + _attr_entity_category = EntityCategory.DIAGNOSTIC + + def __init__( + self, + coordinator: SmDataUpdateCoordinator, + description: SmBinarySensorEntityDescription, + ) -> None: + """Initialize slzb binary sensor.""" + super().__init__(coordinator) + + self.entity_description = description + self._attr_unique_id = f"{coordinator.unique_id}_{description.key}" + + @property + def is_on(self) -> bool: + """Return the state of the sensor.""" + return self.entity_description.value_fn(self.coordinator.data.sensors) diff --git a/homeassistant/components/smlight/strings.json b/homeassistant/components/smlight/strings.json index f22966df904..7e17a53a38a 100644 --- a/homeassistant/components/smlight/strings.json +++ b/homeassistant/components/smlight/strings.json @@ -42,6 +42,14 @@ } }, "entity": { + "binary_sensor": { + "ethernet": { + "name": "Ethernet" + }, + "wifi": { + "name": "Wi-Fi" + } + }, "sensor": { "zigbee_temperature": { "name": "Zigbee chip temp" diff --git a/tests/components/smlight/snapshots/test_binary_sensor.ambr b/tests/components/smlight/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..5ea936f9647 --- /dev/null +++ b/tests/components/smlight/snapshots/test_binary_sensor.ambr @@ -0,0 +1,95 @@ +# serializer version: 1 +# name: test_all_binary_sensors[binary_sensor.mock_title_ethernet-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.mock_title_ethernet', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Ethernet', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ethernet', + 'unique_id': 'aa:bb:cc:dd:ee:ff_ethernet', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensors[binary_sensor.mock_title_ethernet-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Mock Title Ethernet', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_title_ethernet', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_binary_sensors[binary_sensor.mock_title_wi_fi-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.mock_title_wi_fi', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Wi-Fi', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'wifi', + 'unique_id': 'aa:bb:cc:dd:ee:ff_wifi', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensors[binary_sensor.mock_title_wi_fi-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'connectivity', + 'friendly_name': 'Mock Title Wi-Fi', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_title_wi_fi', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/smlight/test_binary_sensor.py b/tests/components/smlight/test_binary_sensor.py new file mode 100644 index 00000000000..ddf9b01bf16 --- /dev/null +++ b/tests/components/smlight/test_binary_sensor.py @@ -0,0 +1,52 @@ +"""Tests for the SMLIGHT binary sensor platform.""" + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .conftest import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + +pytestmark = [ + pytest.mark.usefixtures( + "mock_smlight_client", + ) +] + + +@pytest.fixture +def platforms() -> list[Platform]: + """Platforms, which should be loaded during the test.""" + return [Platform.BINARY_SENSOR] + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_binary_sensors( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test the SMLIGHT binary sensors.""" + entry = await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + + +async def test_disabled_by_default_sensors( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, +) -> None: + """Test wifi sensor is disabled by default .""" + await setup_integration(hass, mock_config_entry) + + assert not hass.states.get("binary_sensor.mock_title_wi_fi") + + assert (entry := entity_registry.async_get("binary_sensor.mock_title_wi_fi")) + assert entry.disabled + assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION From 0e515b2e1f57d70a50a656ec9b43b4ad645e0de3 Mon Sep 17 00:00:00 2001 From: Andre Lengwenus Date: Fri, 6 Sep 2024 08:29:49 +0200 Subject: [PATCH 1489/1635] Bump pypck to 0.7.22 (#125389) --- homeassistant/components/lcn/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/lcn/manifest.json b/homeassistant/components/lcn/manifest.json index f8b7d02b103..9023941277f 100644 --- a/homeassistant/components/lcn/manifest.json +++ b/homeassistant/components/lcn/manifest.json @@ -8,5 +8,5 @@ "documentation": "https://www.home-assistant.io/integrations/lcn", "iot_class": "local_push", "loggers": ["pypck"], - "requirements": ["pypck==0.7.21", "lcn-frontend==0.1.6"] + "requirements": ["pypck==0.7.22", "lcn-frontend==0.1.6"] } diff --git a/requirements_all.txt b/requirements_all.txt index 1a05bcdde77..42fed6f5b5e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2124,7 +2124,7 @@ pyownet==0.10.0.post1 pypca==0.0.7 # homeassistant.components.lcn -pypck==0.7.21 +pypck==0.7.22 # homeassistant.components.pjlink pypjlink2==1.2.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 98a26141f1a..d0f2b31e1eb 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1705,7 +1705,7 @@ pyoverkiz==1.13.14 pyownet==0.10.0.post1 # homeassistant.components.lcn -pypck==0.7.21 +pypck==0.7.22 # homeassistant.components.pjlink pypjlink2==1.2.1 From b025942a14bf484feb81a60685bdb1e773150154 Mon Sep 17 00:00:00 2001 From: Dave Leaver Date: Fri, 6 Sep 2024 19:06:33 +1200 Subject: [PATCH 1490/1635] Fix controlling AC temperature in airtouch5 (#125394) Fix controlling AC temperature --- homeassistant/components/airtouch5/climate.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/airtouch5/climate.py b/homeassistant/components/airtouch5/climate.py index 2d5740b1837..dfc34c1beaf 100644 --- a/homeassistant/components/airtouch5/climate.py +++ b/homeassistant/components/airtouch5/climate.py @@ -262,7 +262,7 @@ class Airtouch5AC(Airtouch5ClimateEntity): _LOGGER.debug("Argument `temperature` is missing in set_temperature") return - await self._control(temp=temp) + await self._control(setpoint=SetpointControl.CHANGE_SETPOINT, temp=temp) class Airtouch5Zone(Airtouch5ClimateEntity): From 187a38c91fea8f8bda798e15cae53bee2ee48066 Mon Sep 17 00:00:00 2001 From: Andre Lengwenus Date: Fri, 6 Sep 2024 09:51:11 +0200 Subject: [PATCH 1491/1635] Add tests for LCN actions / services (#125391) * Add tests for services/actions * Add snapshots for services/actions * Use constants for service names and parameters * Remove snapshot names --- homeassistant/components/lcn/services.py | 46 +- .../lcn/snapshots/test_services.ambr | 203 +++++++++ tests/components/lcn/test_services.py | 425 ++++++++++++++++++ 3 files changed, 661 insertions(+), 13 deletions(-) create mode 100644 tests/components/lcn/snapshots/test_services.ambr create mode 100644 tests/components/lcn/test_services.py diff --git a/homeassistant/components/lcn/services.py b/homeassistant/components/lcn/services.py index 49b54fc0c8d..611a7353bcd 100644 --- a/homeassistant/components/lcn/services.py +++ b/homeassistant/components/lcn/services.py @@ -1,5 +1,7 @@ """Service calls related dependencies for LCN component.""" +from enum import StrEnum, auto + import pypck import voluptuous as vol @@ -394,18 +396,36 @@ class Pck(LcnServiceCall): await device_connection.pck(pck) +class LcnService(StrEnum): + """LCN service names.""" + + OUTPUT_ABS = auto() + OUTPUT_REL = auto() + OUTPUT_TOGGLE = auto() + RELAYS = auto() + VAR_ABS = auto() + VAR_RESET = auto() + VAR_REL = auto() + LOCK_REGULATOR = auto() + LED = auto() + SEND_KEYS = auto() + LOCK_KEYS = auto() + DYN_TEXT = auto() + PCK = auto() + + SERVICES = ( - ("output_abs", OutputAbs), - ("output_rel", OutputRel), - ("output_toggle", OutputToggle), - ("relays", Relays), - ("var_abs", VarAbs), - ("var_reset", VarReset), - ("var_rel", VarRel), - ("lock_regulator", LockRegulator), - ("led", Led), - ("send_keys", SendKeys), - ("lock_keys", LockKeys), - ("dyn_text", DynText), - ("pck", Pck), + (LcnService.OUTPUT_ABS, OutputAbs), + (LcnService.OUTPUT_REL, OutputRel), + (LcnService.OUTPUT_TOGGLE, OutputToggle), + (LcnService.RELAYS, Relays), + (LcnService.VAR_ABS, VarAbs), + (LcnService.VAR_RESET, VarReset), + (LcnService.VAR_REL, VarRel), + (LcnService.LOCK_REGULATOR, LockRegulator), + (LcnService.LED, Led), + (LcnService.SEND_KEYS, SendKeys), + (LcnService.LOCK_KEYS, LockKeys), + (LcnService.DYN_TEXT, DynText), + (LcnService.PCK, Pck), ) diff --git a/tests/components/lcn/snapshots/test_services.ambr b/tests/components/lcn/snapshots/test_services.ambr new file mode 100644 index 00000000000..29e8da72fd7 --- /dev/null +++ b/tests/components/lcn/snapshots/test_services.ambr @@ -0,0 +1,203 @@ +# serializer version: 1 +# name: test_service_dyn_text + tuple( + 0, + 'text in row 1', + ) +# --- +# name: test_service_led + tuple( + , + , + ) +# --- +# name: test_service_lock_keys + tuple( + 0, + list([ + , + , + , + , + , + , + , + , + ]), + ) +# --- +# name: test_service_lock_keys_tab_a_temporary + tuple( + 10, + , + list([ + , + , + , + , + , + , + , + , + ]), + ) +# --- +# name: test_service_lock_regulator + tuple( + 0, + True, + ) +# --- +# name: test_service_output_abs + tuple( + 0, + 100, + 9, + ) +# --- +# name: test_service_output_rel + tuple( + 0, + 25, + ) +# --- +# name: test_service_output_toggle + tuple( + 0, + 9, + ) +# --- +# name: test_service_pck + tuple( + 'PIN4', + ) +# --- +# name: test_service_relays + tuple( + list([ + , + , + , + , + , + , + , + , + ]), + ) +# --- +# name: test_service_send_keys + tuple( + list([ + list([ + True, + False, + False, + False, + True, + False, + False, + False, + ]), + list([ + False, + False, + False, + False, + False, + False, + False, + False, + ]), + list([ + False, + False, + False, + False, + False, + False, + False, + False, + ]), + list([ + False, + False, + False, + False, + False, + False, + False, + True, + ]), + ]), + , + ) +# --- +# name: test_service_send_keys_hit_deferred + tuple( + list([ + list([ + True, + False, + False, + False, + True, + False, + False, + False, + ]), + list([ + False, + False, + False, + False, + False, + False, + False, + False, + ]), + list([ + False, + False, + False, + False, + False, + False, + False, + False, + ]), + list([ + False, + False, + False, + False, + False, + False, + False, + True, + ]), + ]), + 5, + , + ) +# --- +# name: test_service_var_abs + tuple( + , + 75.0, + , + ) +# --- +# name: test_service_var_rel + tuple( + , + 10.0, + , + , + ) +# --- +# name: test_service_var_reset + tuple( + , + ) +# --- diff --git a/tests/components/lcn/test_services.py b/tests/components/lcn/test_services.py new file mode 100644 index 00000000000..9cb53289065 --- /dev/null +++ b/tests/components/lcn/test_services.py @@ -0,0 +1,425 @@ +"""Test for the LCN services.""" + +from unittest.mock import patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.components.lcn import DOMAIN +from homeassistant.components.lcn.const import ( + CONF_KEYS, + CONF_LED, + CONF_OUTPUT, + CONF_PCK, + CONF_RELVARREF, + CONF_ROW, + CONF_SETPOINT, + CONF_TABLE, + CONF_TEXT, + CONF_TIME, + CONF_TIME_UNIT, + CONF_TRANSITION, + CONF_VALUE, + CONF_VARIABLE, +) +from homeassistant.components.lcn.services import LcnService +from homeassistant.const import ( + CONF_ADDRESS, + CONF_BRIGHTNESS, + CONF_STATE, + CONF_UNIT_OF_MEASUREMENT, +) +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from .conftest import MockModuleConnection, MockPchkConnectionManager, setup_component + + +@patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_output_abs( + hass: HomeAssistant, snapshot: SnapshotAssertion +) -> None: + """Test output_abs service.""" + await async_setup_component(hass, "persistent_notification", {}) + await setup_component(hass) + + with patch.object(MockModuleConnection, "dim_output") as dim_output: + await hass.services.async_call( + DOMAIN, + LcnService.OUTPUT_ABS, + { + CONF_ADDRESS: "pchk.s0.m7", + CONF_OUTPUT: "output1", + CONF_BRIGHTNESS: 100, + CONF_TRANSITION: 5, + }, + blocking=True, + ) + + assert dim_output.await_args.args == snapshot() + + +@patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_output_rel( + hass: HomeAssistant, snapshot: SnapshotAssertion +) -> None: + """Test output_rel service.""" + await async_setup_component(hass, "persistent_notification", {}) + await setup_component(hass) + + with patch.object(MockModuleConnection, "rel_output") as rel_output: + await hass.services.async_call( + DOMAIN, + LcnService.OUTPUT_REL, + { + CONF_ADDRESS: "pchk.s0.m7", + CONF_OUTPUT: "output1", + CONF_BRIGHTNESS: 25, + }, + blocking=True, + ) + + assert rel_output.await_args.args == snapshot() + + +@patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_output_toggle( + hass: HomeAssistant, snapshot: SnapshotAssertion +) -> None: + """Test output_toggle service.""" + await async_setup_component(hass, "persistent_notification", {}) + await setup_component(hass) + + with patch.object(MockModuleConnection, "toggle_output") as toggle_output: + await hass.services.async_call( + DOMAIN, + LcnService.OUTPUT_TOGGLE, + { + CONF_ADDRESS: "pchk.s0.m7", + CONF_OUTPUT: "output1", + CONF_TRANSITION: 5, + }, + blocking=True, + ) + + assert toggle_output.await_args.args == snapshot() + + +@patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_relays(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: + """Test relays service.""" + await async_setup_component(hass, "persistent_notification", {}) + await setup_component(hass) + + with patch.object(MockModuleConnection, "control_relays") as control_relays: + await hass.services.async_call( + DOMAIN, + LcnService.RELAYS, + {CONF_ADDRESS: "pchk.s0.m7", CONF_STATE: "0011TT--"}, + blocking=True, + ) + + assert control_relays.await_args.args == snapshot() + + +@patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_led(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: + """Test led service.""" + await async_setup_component(hass, "persistent_notification", {}) + await setup_component(hass) + + with patch.object(MockModuleConnection, "control_led") as control_led: + await hass.services.async_call( + DOMAIN, + LcnService.LED, + {CONF_ADDRESS: "pchk.s0.m7", CONF_LED: "led6", CONF_STATE: "blink"}, + blocking=True, + ) + + assert control_led.await_args.args == snapshot() + + +@patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_var_abs( + hass: HomeAssistant, snapshot: SnapshotAssertion +) -> None: + """Test var_abs service.""" + await async_setup_component(hass, "persistent_notification", {}) + await setup_component(hass) + + with patch.object(MockModuleConnection, "var_abs") as var_abs: + await hass.services.async_call( + DOMAIN, + LcnService.VAR_ABS, + { + CONF_ADDRESS: "pchk.s0.m7", + CONF_VARIABLE: "var1", + CONF_VALUE: 75, + CONF_UNIT_OF_MEASUREMENT: "%", + }, + blocking=True, + ) + + assert var_abs.await_args.args == snapshot() + + +@patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_var_rel( + hass: HomeAssistant, snapshot: SnapshotAssertion +) -> None: + """Test var_rel service.""" + await async_setup_component(hass, "persistent_notification", {}) + await setup_component(hass) + + with patch.object(MockModuleConnection, "var_rel") as var_rel: + await hass.services.async_call( + DOMAIN, + LcnService.VAR_REL, + { + CONF_ADDRESS: "pchk.s0.m7", + CONF_VARIABLE: "var1", + CONF_VALUE: 10, + CONF_UNIT_OF_MEASUREMENT: "%", + CONF_RELVARREF: "current", + }, + blocking=True, + ) + + assert var_rel.await_args.args == snapshot() + + +@patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_var_reset( + hass: HomeAssistant, snapshot: SnapshotAssertion +) -> None: + """Test var_reset service.""" + await async_setup_component(hass, "persistent_notification", {}) + await setup_component(hass) + + with patch.object(MockModuleConnection, "var_reset") as var_reset: + await hass.services.async_call( + DOMAIN, + LcnService.VAR_RESET, + {CONF_ADDRESS: "pchk.s0.m7", CONF_VARIABLE: "var1"}, + blocking=True, + ) + + assert var_reset.await_args.args == snapshot() + + +@patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_lock_regulator( + hass: HomeAssistant, snapshot: SnapshotAssertion +) -> None: + """Test lock_regulator service.""" + await async_setup_component(hass, "persistent_notification", {}) + await setup_component(hass) + + with patch.object(MockModuleConnection, "lock_regulator") as lock_regulator: + await hass.services.async_call( + DOMAIN, + LcnService.LOCK_REGULATOR, + { + CONF_ADDRESS: "pchk.s0.m7", + CONF_SETPOINT: "r1varsetpoint", + CONF_STATE: True, + }, + blocking=True, + ) + + assert lock_regulator.await_args.args == snapshot() + + +@patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_send_keys( + hass: HomeAssistant, snapshot: SnapshotAssertion +) -> None: + """Test send_keys service.""" + await async_setup_component(hass, "persistent_notification", {}) + await setup_component(hass) + + with patch.object(MockModuleConnection, "send_keys") as send_keys: + await hass.services.async_call( + DOMAIN, + LcnService.SEND_KEYS, + {CONF_ADDRESS: "pchk.s0.m7", CONF_KEYS: "a1a5d8", CONF_STATE: "hit"}, + blocking=True, + ) + + keys = [[False] * 8 for i in range(4)] + keys[0][0] = True + keys[0][4] = True + keys[3][7] = True + + assert send_keys.await_args.args == snapshot() + + +@patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_send_keys_hit_deferred( + hass: HomeAssistant, snapshot: SnapshotAssertion +) -> None: + """Test send_keys (hit_deferred) service.""" + await async_setup_component(hass, "persistent_notification", {}) + await setup_component(hass) + + keys = [[False] * 8 for i in range(4)] + keys[0][0] = True + keys[0][4] = True + keys[3][7] = True + + # success + with patch.object( + MockModuleConnection, "send_keys_hit_deferred" + ) as send_keys_hit_deferred: + await hass.services.async_call( + DOMAIN, + LcnService.SEND_KEYS, + { + CONF_ADDRESS: "pchk.s0.m7", + CONF_KEYS: "a1a5d8", + CONF_TIME: 5, + CONF_TIME_UNIT: "s", + }, + blocking=True, + ) + + assert send_keys_hit_deferred.await_args.args == snapshot() + + # wrong key action + with ( + patch.object( + MockModuleConnection, "send_keys_hit_deferred" + ) as send_keys_hit_deferred, + pytest.raises(ValueError), + ): + await hass.services.async_call( + DOMAIN, + LcnService.SEND_KEYS, + { + CONF_ADDRESS: "pchk.s0.m7", + CONF_KEYS: "a1a5d8", + CONF_STATE: "make", + CONF_TIME: 5, + CONF_TIME_UNIT: "s", + }, + blocking=True, + ) + + +@patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_lock_keys( + hass: HomeAssistant, snapshot: SnapshotAssertion +) -> None: + """Test lock_keys service.""" + await async_setup_component(hass, "persistent_notification", {}) + await setup_component(hass) + + with patch.object(MockModuleConnection, "lock_keys") as lock_keys: + await hass.services.async_call( + DOMAIN, + LcnService.LOCK_KEYS, + {CONF_ADDRESS: "pchk.s0.m7", CONF_TABLE: "a", CONF_STATE: "0011TT--"}, + blocking=True, + ) + + assert lock_keys.await_args.args == snapshot() + + +@patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_lock_keys_tab_a_temporary( + hass: HomeAssistant, snapshot: SnapshotAssertion +) -> None: + """Test lock_keys (tab_a_temporary) service.""" + await async_setup_component(hass, "persistent_notification", {}) + await setup_component(hass) + + # success + with patch.object( + MockModuleConnection, "lock_keys_tab_a_temporary" + ) as lock_keys_tab_a_temporary: + await hass.services.async_call( + DOMAIN, + LcnService.LOCK_KEYS, + { + CONF_ADDRESS: "pchk.s0.m7", + CONF_STATE: "0011TT--", + CONF_TIME: 10, + CONF_TIME_UNIT: "s", + }, + blocking=True, + ) + + assert lock_keys_tab_a_temporary.await_args.args == snapshot() + + # wrong table + with ( + patch.object( + MockModuleConnection, "lock_keys_tab_a_temporary" + ) as lock_keys_tab_a_temporary, + pytest.raises(ValueError), + ): + await hass.services.async_call( + DOMAIN, + LcnService.LOCK_KEYS, + { + CONF_ADDRESS: "pchk.s0.m7", + CONF_TABLE: "b", + CONF_STATE: "0011TT--", + CONF_TIME: 10, + CONF_TIME_UNIT: "s", + }, + blocking=True, + ) + + +@patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_dyn_text( + hass: HomeAssistant, snapshot: SnapshotAssertion +) -> None: + """Test dyn_text service.""" + await async_setup_component(hass, "persistent_notification", {}) + await setup_component(hass) + + with patch.object(MockModuleConnection, "dyn_text") as dyn_text: + await hass.services.async_call( + DOMAIN, + LcnService.DYN_TEXT, + {CONF_ADDRESS: "pchk.s0.m7", CONF_ROW: 1, CONF_TEXT: "text in row 1"}, + blocking=True, + ) + + assert dyn_text.await_args.args == snapshot() + + +@patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_pck(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: + """Test pck service.""" + await async_setup_component(hass, "persistent_notification", {}) + await setup_component(hass) + + with patch.object(MockModuleConnection, "pck") as pck: + await hass.services.async_call( + DOMAIN, + LcnService.PCK, + {CONF_ADDRESS: "pchk.s0.m7", CONF_PCK: "PIN4"}, + blocking=True, + ) + + assert pck.await_args.args == snapshot() + + +@patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_called_with_invalid_host_id(hass: HomeAssistant) -> None: + """Test service was called with non existing host id.""" + await async_setup_component(hass, "persistent_notification", {}) + await setup_component(hass) + + with patch.object(MockModuleConnection, "pck") as pck, pytest.raises(ValueError): + await hass.services.async_call( + DOMAIN, + LcnService.PCK, + {CONF_ADDRESS: "foobar.s0.m7", CONF_PCK: "PIN4"}, + blocking=True, + ) + + pck.assert_not_awaited() From 0092796fd25536ebdcf55167c3bdc776419709da Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Fri, 6 Sep 2024 03:51:53 -0400 Subject: [PATCH 1492/1635] Add model ID to linkplay (#125370) --- homeassistant/components/linkplay/media_player.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/linkplay/media_player.py b/homeassistant/components/linkplay/media_player.py index 8b2fcf5d52f..b1fa0e2a5c5 100644 --- a/homeassistant/components/linkplay/media_player.py +++ b/homeassistant/components/linkplay/media_player.py @@ -28,7 +28,7 @@ from homeassistant.util.dt import utcnow from . import LinkPlayConfigEntry from .const import DOMAIN -from .utils import get_info_from_project +from .utils import MANUFACTURER_GENERIC, get_info_from_project _LOGGER = logging.getLogger(__name__) STATE_MAP: dict[PlayingStatus, MediaPlayerState] = { @@ -153,6 +153,9 @@ class LinkPlayMediaPlayerEntity(MediaPlayerEntity): ] manufacturer, model = get_info_from_project(bridge.device.properties["project"]) + if model != MANUFACTURER_GENERIC: + model_id = bridge.device.properties["project"] + self._attr_device_info = dr.DeviceInfo( configuration_url=bridge.endpoint, connections={(dr.CONNECTION_NETWORK_MAC, bridge.device.properties["MAC"])}, @@ -160,6 +163,7 @@ class LinkPlayMediaPlayerEntity(MediaPlayerEntity): identifiers={(DOMAIN, bridge.device.uuid)}, manufacturer=manufacturer, model=model, + model_id=model_id, name=bridge.device.name, sw_version=bridge.device.properties["firmware"], ) From 54c15e7e0a62fb1e5517a00f51df4d29ace1fdd8 Mon Sep 17 00:00:00 2001 From: TimL Date: Fri, 6 Sep 2024 18:20:12 +1000 Subject: [PATCH 1493/1635] Bump pysmlight to 0.0.14 (#125387) Bump pysmlight 0.0.14 for smlight --- homeassistant/components/smlight/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/smlight/manifest.json b/homeassistant/components/smlight/manifest.json index 72d915666e5..1a91b29234c 100644 --- a/homeassistant/components/smlight/manifest.json +++ b/homeassistant/components/smlight/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/smlight", "integration_type": "device", "iot_class": "local_polling", - "requirements": ["pysmlight==0.0.13"], + "requirements": ["pysmlight==0.0.14"], "zeroconf": [ { "type": "_slzb-06._tcp.local." diff --git a/requirements_all.txt b/requirements_all.txt index 42fed6f5b5e..82cabd124d8 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2232,7 +2232,7 @@ pysmarty2==0.10.1 pysml==0.0.12 # homeassistant.components.smlight -pysmlight==0.0.13 +pysmlight==0.0.14 # homeassistant.components.snmp pysnmp==6.2.5 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index d0f2b31e1eb..f0ac4294ada 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1786,7 +1786,7 @@ pysmartthings==0.7.8 pysml==0.0.12 # homeassistant.components.smlight -pysmlight==0.0.13 +pysmlight==0.0.14 # homeassistant.components.snmp pysnmp==6.2.5 From 7752789c3abda110b864849c658a925c899bf7ee Mon Sep 17 00:00:00 2001 From: dontinelli <73341522+dontinelli@users.noreply.github.com> Date: Fri, 6 Sep 2024 10:23:30 +0200 Subject: [PATCH 1494/1635] Increase coordinator update_interval for fyta (#125393) * Increase update_interval * Update homeassistant/components/fyta/coordinator.py --------- Co-authored-by: Joost Lekkerkerker --- homeassistant/components/fyta/coordinator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/homeassistant/components/fyta/coordinator.py b/homeassistant/components/fyta/coordinator.py index c92a96eed63..df607de76b0 100644 --- a/homeassistant/components/fyta/coordinator.py +++ b/homeassistant/components/fyta/coordinator.py @@ -39,7 +39,7 @@ class FytaCoordinator(DataUpdateCoordinator[dict[int, Plant]]): hass, _LOGGER, name="FYTA Coordinator", - update_interval=timedelta(seconds=60), + update_interval=timedelta(minutes=4), ) self.fyta = fyta From 1db68327f971166e3abdb96b78b9ac50c785f57a Mon Sep 17 00:00:00 2001 From: Sid <27780930+autinerd@users.noreply.github.com> Date: Fri, 6 Sep 2024 11:33:01 +0200 Subject: [PATCH 1495/1635] Enable Ruff PTH for the script directory (#124441) * Enable Ruff PTH for the script directory * Address review comments * Fix translations script * Update script/hassfest/config_flow.py Co-authored-by: Martin Hjelmare --------- Co-authored-by: Martin Hjelmare --- pyproject.toml | 5 ++++ script/gen_requirements_all.py | 12 ++++----- script/hassfest/__main__.py | 17 +++++------- script/hassfest/bluetooth.py | 18 +++++-------- script/hassfest/codeowners.py | 17 +++++------- script/hassfest/config_flow.py | 38 +++++++++++--------------- script/hassfest/dhcp.py | 18 +++++-------- script/hassfest/docker.py | 8 +++--- script/hassfest/metadata.py | 3 +-- script/hassfest/mqtt.py | 16 +++++------ script/hassfest/ssdp.py | 16 +++++------ script/hassfest/usb.py | 18 +++++-------- script/hassfest/zeroconf.py | 18 +++++-------- script/inspect_schemas.py | 4 +-- script/lint_and_test.py | 11 ++++---- script/split_tests.py | 2 +- script/translations/download.py | 48 +++++++++++++++------------------ script/version_bump.py | 19 +++++-------- 18 files changed, 125 insertions(+), 163 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index e2d5e213811..787813bd64a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -734,6 +734,7 @@ select = [ "PIE", # flake8-pie "PL", # pylint "PT", # flake8-pytest-style + "PTH", # flake8-pathlib "PYI", # flake8-pyi "RET", # flake8-return "RSE", # flake8-raise @@ -905,5 +906,9 @@ split-on-trailing-comma = false "homeassistant/scripts/*" = ["T201"] "script/*" = ["T20"] +# Temporary +"homeassistant/**" = ["PTH"] +"tests/**" = ["PTH"] + [tool.ruff.lint.mccabe] max-complexity = 25 diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index b2165289ad8..47a6412bcfd 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -6,7 +6,6 @@ from __future__ import annotations import difflib import importlib from operator import itemgetter -import os from pathlib import Path import pkgutil import re @@ -82,8 +81,8 @@ URL_PIN = ( ) -CONSTRAINT_PATH = os.path.join( - os.path.dirname(__file__), "../homeassistant/package_constraints.txt" +CONSTRAINT_PATH = ( + Path(__file__).parent.parent / "homeassistant" / "package_constraints.txt" ) CONSTRAINT_BASE = """ # Constrain pycryptodome to avoid vulnerability @@ -256,8 +255,7 @@ def explore_module(package: str, explore_children: bool) -> list[str]: def core_requirements() -> list[str]: """Gather core requirements out of pyproject.toml.""" - with open("pyproject.toml", "rb") as fp: - data = tomllib.load(fp) + data = tomllib.loads(Path("pyproject.toml").read_text()) dependencies: list[str] = data["project"]["dependencies"] return dependencies @@ -528,7 +526,7 @@ def diff_file(filename: str, content: str) -> list[str]: def main(validate: bool, ci: bool) -> int: """Run the script.""" - if not os.path.isfile("requirements_all.txt"): + if not Path("requirements_all.txt").is_file(): print("Run this from HA root dir") return 1 @@ -590,7 +588,7 @@ def main(validate: bool, ci: bool) -> int: def _get_hassfest_config() -> Config: """Get hassfest config.""" return Config( - root=Path(".").absolute(), + root=Path().absolute(), specific_integrations=None, action="validate", requirements=True, diff --git a/script/hassfest/__main__.py b/script/hassfest/__main__.py index b48871b4651..f0b9ad25dd0 100644 --- a/script/hassfest/__main__.py +++ b/script/hassfest/__main__.py @@ -4,7 +4,7 @@ from __future__ import annotations import argparse from operator import attrgetter -import pathlib +from pathlib import Path import sys from time import monotonic @@ -63,9 +63,9 @@ ALL_PLUGIN_NAMES = [ ] -def valid_integration_path(integration_path: pathlib.Path | str) -> pathlib.Path: +def valid_integration_path(integration_path: Path | str) -> Path: """Test if it's a valid integration.""" - path = pathlib.Path(integration_path) + path = Path(integration_path) if not path.is_dir(): raise argparse.ArgumentTypeError(f"{integration_path} is not a directory.") @@ -109,8 +109,8 @@ def get_config() -> Config: ) parser.add_argument( "--core-integrations-path", - type=pathlib.Path, - default=pathlib.Path("homeassistant/components"), + type=Path, + default=Path("homeassistant/components"), help="Path to core integrations", ) parsed = parser.parse_args() @@ -123,14 +123,11 @@ def get_config() -> Config: "Generate is not allowed when limiting to specific integrations" ) - if ( - not parsed.integration_path - and not pathlib.Path("requirements_all.txt").is_file() - ): + if not parsed.integration_path and not Path("requirements_all.txt").is_file(): raise RuntimeError("Run from Home Assistant root") return Config( - root=pathlib.Path(".").absolute(), + root=Path().absolute(), specific_integrations=parsed.integration_path, action=parsed.action, requirements=parsed.requirements, diff --git a/script/hassfest/bluetooth.py b/script/hassfest/bluetooth.py index 49480d1ed02..94f25588632 100644 --- a/script/hassfest/bluetooth.py +++ b/script/hassfest/bluetooth.py @@ -34,19 +34,15 @@ def validate(integrations: dict[str, Integration], config: Config) -> None: if config.specific_integrations: return - with open(str(bluetooth_path)) as fp: - current = fp.read() - if current != content: - config.add_error( - "bluetooth", - "File bluetooth.py is not up to date. Run python3 -m script.hassfest", - fixable=True, - ) - return + if bluetooth_path.read_text() != content: + config.add_error( + "bluetooth", + "File bluetooth.py is not up to date. Run python3 -m script.hassfest", + fixable=True, + ) def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate bluetooth file.""" bluetooth_path = config.root / "homeassistant/generated/bluetooth.py" - with open(str(bluetooth_path), "w") as fp: - fp.write(f"{config.cache['bluetooth']}") + bluetooth_path.write_text(f"{config.cache['bluetooth']}") diff --git a/script/hassfest/codeowners.py b/script/hassfest/codeowners.py index 04150836dd5..73ea8d02520 100644 --- a/script/hassfest/codeowners.py +++ b/script/hassfest/codeowners.py @@ -98,18 +98,15 @@ def validate(integrations: dict[str, Integration], config: Config) -> None: if config.specific_integrations: return - with open(str(codeowners_path)) as fp: - if fp.read().strip() != content: - config.add_error( - "codeowners", - "File CODEOWNERS is not up to date. Run python3 -m script.hassfest", - fixable=True, - ) - return + if codeowners_path.read_text() != content + "\n": + config.add_error( + "codeowners", + "File CODEOWNERS is not up to date. Run python3 -m script.hassfest", + fixable=True, + ) def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate CODEOWNERS.""" codeowners_path = config.root / "CODEOWNERS" - with open(str(codeowners_path), "w") as fp: - fp.write(f"{config.cache['codeowners']}\n") + codeowners_path.write_text(f"{config.cache['codeowners']}\n") diff --git a/script/hassfest/config_flow.py b/script/hassfest/config_flow.py index 382e77bde74..83d406a0036 100644 --- a/script/hassfest/config_flow.py +++ b/script/hassfest/config_flow.py @@ -3,7 +3,6 @@ from __future__ import annotations import json -import pathlib from typing import Any from .brand import validate as validate_brands @@ -216,36 +215,31 @@ def validate(integrations: dict[str, Integration], config: Config) -> None: if config.specific_integrations: return - brands = Brand.load_dir(pathlib.Path(config.root / "homeassistant/brands"), config) + brands = Brand.load_dir(config.root / "homeassistant/brands", config) validate_brands(brands, integrations, config) - with open(str(config_flow_path)) as fp: - if fp.read() != content: - config.add_error( - "config_flow", - "File config_flows.py is not up to date. " - "Run python3 -m script.hassfest", - fixable=True, - ) + if config_flow_path.read_text() != content: + config.add_error( + "config_flow", + "File config_flows.py is not up to date. Run python3 -m script.hassfest", + fixable=True, + ) config.cache["integrations"] = content = _generate_integrations( brands, integrations, config ) - with open(str(integrations_path)) as fp: - if fp.read() != content + "\n": - config.add_error( - "config_flow", - "File integrations.json is not up to date. " - "Run python3 -m script.hassfest", - fixable=True, - ) + if integrations_path.read_text() != content + "\n": + config.add_error( + "config_flow", + "File integrations.json is not up to date. " + "Run python3 -m script.hassfest", + fixable=True, + ) def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate config flow file.""" config_flow_path = config.root / "homeassistant/generated/config_flows.py" integrations_path = config.root / "homeassistant/generated/integrations.json" - with open(str(config_flow_path), "w") as fp: - fp.write(f"{config.cache['config_flow']}") - with open(str(integrations_path), "w") as fp: - fp.write(f"{config.cache['integrations']}\n") + config_flow_path.write_text(f"{config.cache['config_flow']}") + integrations_path.write_text(f"{config.cache['integrations']}\n") diff --git a/script/hassfest/dhcp.py b/script/hassfest/dhcp.py index d1fd0474430..8a8f344f6cb 100644 --- a/script/hassfest/dhcp.py +++ b/script/hassfest/dhcp.py @@ -32,19 +32,15 @@ def validate(integrations: dict[str, Integration], config: Config) -> None: if config.specific_integrations: return - with open(str(dhcp_path)) as fp: - current = fp.read() - if current != content: - config.add_error( - "dhcp", - "File dhcp.py is not up to date. Run python3 -m script.hassfest", - fixable=True, - ) - return + if dhcp_path.read_text() != content: + config.add_error( + "dhcp", + "File dhcp.py is not up to date. Run python3 -m script.hassfest", + fixable=True, + ) def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate dhcp file.""" dhcp_path = config.root / "homeassistant/generated/dhcp.py" - with open(str(dhcp_path), "w") as fp: - fp.write(f"{config.cache['dhcp']}") + dhcp_path.write_text(f"{config.cache['dhcp']}") diff --git a/script/hassfest/docker.py b/script/hassfest/docker.py index bce77e1ece0..5809ea4afa0 100644 --- a/script/hassfest/docker.py +++ b/script/hassfest/docker.py @@ -103,9 +103,9 @@ LABEL "com.github.actions.color"="gray-dark" """ -def _get_package_versions(file: str, packages: set[str]) -> dict[str, str]: +def _get_package_versions(file: Path, packages: set[str]) -> dict[str, str]: package_versions: dict[str, str] = {} - with open(file, encoding="UTF-8") as fp: + with file.open(encoding="UTF-8") as fp: for _, line in enumerate(fp): if package_versions.keys() == packages: return package_versions @@ -173,10 +173,10 @@ def _generate_files(config: Config) -> list[File]: ) * 1000 package_versions = _get_package_versions( - "requirements_test.txt", {"pipdeptree", "tqdm", "uv"} + Path("requirements_test.txt"), {"pipdeptree", "tqdm", "uv"} ) package_versions |= _get_package_versions( - "requirements_test_pre_commit.txt", {"ruff"} + Path("requirements_test_pre_commit.txt"), {"ruff"} ) return [ diff --git a/script/hassfest/metadata.py b/script/hassfest/metadata.py index bd3ac4514e7..0768e875016 100644 --- a/script/hassfest/metadata.py +++ b/script/hassfest/metadata.py @@ -10,8 +10,7 @@ from .model import Config, Integration def validate(integrations: dict[str, Integration], config: Config) -> None: """Validate project metadata keys.""" metadata_path = config.root / "pyproject.toml" - with open(metadata_path, "rb") as fp: - data = tomllib.load(fp) + data = tomllib.loads(metadata_path.read_text()) try: if data["project"]["version"] != __version__: diff --git a/script/hassfest/mqtt.py b/script/hassfest/mqtt.py index b2112d9bb6a..54ee65aaa35 100644 --- a/script/hassfest/mqtt.py +++ b/script/hassfest/mqtt.py @@ -33,17 +33,15 @@ def validate(integrations: dict[str, Integration], config: Config) -> None: if config.specific_integrations: return - with open(str(mqtt_path)) as fp: - if fp.read() != content: - config.add_error( - "mqtt", - "File mqtt.py is not up to date. Run python3 -m script.hassfest", - fixable=True, - ) + if mqtt_path.read_text() != content: + config.add_error( + "mqtt", + "File mqtt.py is not up to date. Run python3 -m script.hassfest", + fixable=True, + ) def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate MQTT file.""" mqtt_path = config.root / "homeassistant/generated/mqtt.py" - with open(str(mqtt_path), "w") as fp: - fp.write(f"{config.cache['mqtt']}") + mqtt_path.write_text(f"{config.cache['mqtt']}") diff --git a/script/hassfest/ssdp.py b/script/hassfest/ssdp.py index 0a61284eb46..989b614e43d 100644 --- a/script/hassfest/ssdp.py +++ b/script/hassfest/ssdp.py @@ -33,17 +33,15 @@ def validate(integrations: dict[str, Integration], config: Config) -> None: if config.specific_integrations: return - with open(str(ssdp_path)) as fp: - if fp.read() != content: - config.add_error( - "ssdp", - "File ssdp.py is not up to date. Run python3 -m script.hassfest", - fixable=True, - ) + if ssdp_path.read_text() != content: + config.add_error( + "ssdp", + "File ssdp.py is not up to date. Run python3 -m script.hassfest", + fixable=True, + ) def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate ssdp file.""" ssdp_path = config.root / "homeassistant/generated/ssdp.py" - with open(str(ssdp_path), "w") as fp: - fp.write(f"{config.cache['ssdp']}") + ssdp_path.write_text(f"{config.cache['ssdp']}") diff --git a/script/hassfest/usb.py b/script/hassfest/usb.py index 84cafc973ad..c34f4fd1b62 100644 --- a/script/hassfest/usb.py +++ b/script/hassfest/usb.py @@ -35,19 +35,15 @@ def validate(integrations: dict[str, Integration], config: Config) -> None: if config.specific_integrations: return - with open(str(usb_path)) as fp: - current = fp.read() - if current != content: - config.add_error( - "usb", - "File usb.py is not up to date. Run python3 -m script.hassfest", - fixable=True, - ) - return + if usb_path.read_text() != content: + config.add_error( + "usb", + "File usb.py is not up to date. Run python3 -m script.hassfest", + fixable=True, + ) def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate usb file.""" usb_path = config.root / "homeassistant/generated/usb.py" - with open(str(usb_path), "w") as fp: - fp.write(f"{config.cache['usb']}") + usb_path.write_text(f"{config.cache['usb']}") diff --git a/script/hassfest/zeroconf.py b/script/hassfest/zeroconf.py index 63f10fcf294..48fcc0a4589 100644 --- a/script/hassfest/zeroconf.py +++ b/script/hassfest/zeroconf.py @@ -90,19 +90,15 @@ def validate(integrations: dict[str, Integration], config: Config) -> None: if config.specific_integrations: return - with open(str(zeroconf_path)) as fp: - current = fp.read() - if current != content: - config.add_error( - "zeroconf", - "File zeroconf.py is not up to date. Run python3 -m script.hassfest", - fixable=True, - ) - return + if zeroconf_path.read_text() != content: + config.add_error( + "zeroconf", + "File zeroconf.py is not up to date. Run python3 -m script.hassfest", + fixable=True, + ) def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate zeroconf file.""" zeroconf_path = config.root / "homeassistant/generated/zeroconf.py" - with open(str(zeroconf_path), "w") as fp: - fp.write(f"{config.cache['zeroconf']}") + zeroconf_path.write_text(f"{config.cache['zeroconf']}") diff --git a/script/inspect_schemas.py b/script/inspect_schemas.py index a8ffe0afb60..fa6707e93b2 100755 --- a/script/inspect_schemas.py +++ b/script/inspect_schemas.py @@ -2,7 +2,7 @@ """Inspect all component SCHEMAS.""" import importlib -import os +from pathlib import Path import pkgutil from homeassistant.config import _identify_config_schema @@ -20,7 +20,7 @@ def explore_module(package): def main(): """Run the script.""" - if not os.path.isfile("requirements_all.txt"): + if not Path("requirements_all.txt").is_file(): print("Run this from HA root dir") return diff --git a/script/lint_and_test.py b/script/lint_and_test.py index ff3db8aa1ed..fb350c113b9 100755 --- a/script/lint_and_test.py +++ b/script/lint_and_test.py @@ -9,6 +9,7 @@ from collections import namedtuple from contextlib import suppress import itertools import os +from pathlib import Path import re import shlex import sys @@ -63,7 +64,7 @@ async def async_exec(*args, display=False): """Execute, return code & log.""" argsp = [] for arg in args: - if os.path.isfile(arg): + if Path(arg).is_file(): argsp.append(f"\\\n {shlex.quote(arg)}") else: argsp.append(shlex.quote(arg)) @@ -132,7 +133,7 @@ async def ruff(files): async def lint(files): """Perform lint.""" - files = [file for file in files if os.path.isfile(file)] + files = [file for file in files if Path(file).is_file()] res = sorted( itertools.chain( *await asyncio.gather( @@ -164,7 +165,7 @@ async def lint(files): async def main(): """Run the main loop.""" # Ensure we are in the homeassistant root - os.chdir(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))) + os.chdir(Path(__file__).parent.parent) files = await git() if not files: @@ -194,7 +195,7 @@ async def main(): gen_req = True # requirements script for components # Find test files... if fname.startswith("tests/"): - if "/test_" in fname and os.path.isfile(fname): + if "/test_" in fname and Path(fname).is_file(): # All test helpers should be excluded test_files.add(fname) else: @@ -207,7 +208,7 @@ async def main(): else: parts[-1] = f"test_{parts[-1]}" fname = "/".join(parts) - if os.path.isfile(fname): + if Path(fname).is_file(): test_files.add(fname) if gen_req: diff --git a/script/split_tests.py b/script/split_tests.py index 8da03bd749b..e124f722552 100755 --- a/script/split_tests.py +++ b/script/split_tests.py @@ -66,7 +66,7 @@ class BucketHolder: def create_ouput_file(self) -> None: """Create output file.""" - with open("pytest_buckets.txt", "w") as file: + with Path("pytest_buckets.txt").open("w") as file: for idx, bucket in enumerate(self._buckets): print(f"Bucket {idx+1} has {bucket.total_tests} tests") file.write(bucket.get_paths_line()) diff --git a/script/translations/download.py b/script/translations/download.py index 8f7327c07ec..756de46fb61 100755 --- a/script/translations/download.py +++ b/script/translations/download.py @@ -4,8 +4,7 @@ from __future__ import annotations import json -import os -import pathlib +from pathlib import Path import re import subprocess @@ -14,7 +13,7 @@ from .error import ExitApp from .util import get_lokalise_token, load_json_from_path FILENAME_FORMAT = re.compile(r"strings\.(?P\w+)\.json") -DOWNLOAD_DIR = pathlib.Path("build/translations-download").absolute() +DOWNLOAD_DIR = Path("build/translations-download").absolute() def run_download_docker(): @@ -56,35 +55,32 @@ def run_download_docker(): raise ExitApp("Failed to download translations") -def save_json(filename: str, data: list | dict): - """Save JSON data to a file. - - Returns True on success. - """ - data = json.dumps(data, sort_keys=True, indent=4) - with open(filename, "w", encoding="utf-8") as fdesc: - fdesc.write(data) - return True - return False +def save_json(filename: Path, data: list | dict) -> None: + """Save JSON data to a file.""" + filename.write_text(json.dumps(data, sort_keys=True, indent=4), encoding="utf-8") -def get_component_path(lang, component): +def get_component_path(lang, component) -> Path | None: """Get the component translation path.""" - if os.path.isdir(os.path.join("homeassistant", "components", component)): - return os.path.join( - "homeassistant", "components", component, "translations", f"{lang}.json" + if (Path("homeassistant") / "components" / component).is_dir(): + return ( + Path("homeassistant") + / "components" + / component + / "translations" + / f"{lang}.json" ) return None -def get_platform_path(lang, component, platform): +def get_platform_path(lang, component, platform) -> Path: """Get the platform translation path.""" - return os.path.join( - "homeassistant", - "components", - component, - "translations", - f"{platform}.{lang}.json", + return ( + Path("homeassistant") + / "components" + / component + / "translations" + / f"{platform}.{lang}.json" ) @@ -107,7 +103,7 @@ def save_language_translations(lang, translations): f"Skipping {lang} for {component}, as the integration doesn't seem to exist." ) continue - os.makedirs(os.path.dirname(path), exist_ok=True) + path.parent.mkdir(parents=True, exist_ok=True) save_json(path, base_translations) if "platform" not in component_translations: @@ -117,7 +113,7 @@ def save_language_translations(lang, translations): "platform" ].items(): path = get_platform_path(lang, component, platform) - os.makedirs(os.path.dirname(path), exist_ok=True) + path.parent.mkdir(parents=True, exist_ok=True) save_json(path, platform_translations) diff --git a/script/version_bump.py b/script/version_bump.py index fb4fe2f7868..ff94c01a5a2 100755 --- a/script/version_bump.py +++ b/script/version_bump.py @@ -2,6 +2,7 @@ """Helper script to bump the current version.""" import argparse +from pathlib import Path import re import subprocess @@ -110,8 +111,7 @@ def bump_version( def write_version(version): """Update Home Assistant constant file with new version.""" - with open("homeassistant/const.py") as fil: - content = fil.read() + content = Path("homeassistant/const.py").read_text() major, minor, patch = str(version).split(".", 2) @@ -125,25 +125,21 @@ def write_version(version): "PATCH_VERSION: Final = .*\n", f'PATCH_VERSION: Final = "{patch}"\n', content ) - with open("homeassistant/const.py", "w") as fil: - fil.write(content) + Path("homeassistant/const.py").write_text(content) def write_version_metadata(version: Version) -> None: """Update pyproject.toml file with new version.""" - with open("pyproject.toml", encoding="utf8") as fp: - content = fp.read() + content = Path("pyproject.toml").read_text(encoding="utf8") content = re.sub(r"(version\W+=\W).+\n", f'\\g<1>"{version}"\n', content, count=1) - with open("pyproject.toml", "w", encoding="utf8") as fp: - fp.write(content) + Path("pyproject.toml").write_text(content, encoding="utf8") def write_ci_workflow(version: Version) -> None: """Update ci workflow with new version.""" - with open(".github/workflows/ci.yaml") as fp: - content = fp.read() + content = Path(".github/workflows/ci.yaml").read_text() short_version = ".".join(str(version).split(".", maxsplit=2)[:2]) content = re.sub( @@ -153,8 +149,7 @@ def write_ci_workflow(version: Version) -> None: count=1, ) - with open(".github/workflows/ci.yaml", "w") as fp: - fp.write(content) + Path(".github/workflows/ci.yaml").write_text(content) def main() -> None: From 84dcfb6ddc79e353e620ef1925a5258ab5ec9d88 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 6 Sep 2024 11:45:13 +0200 Subject: [PATCH 1496/1635] Replace SW version by model ID in renault device info (#125399) * Replace SW_VERSION by MODEL_ID in renault device info * Simplify PR * Fix tests --- .../components/renault/renault_hub.py | 4 +-- .../components/renault/renault_vehicle.py | 2 +- tests/components/renault/__init__.py | 4 +-- tests/components/renault/const.py | 10 +++--- .../renault/snapshots/test_binary_sensor.ambr | 32 +++++++++---------- .../renault/snapshots/test_button.ambr | 32 +++++++++---------- .../snapshots/test_device_tracker.ambr | 32 +++++++++---------- .../renault/snapshots/test_select.ambr | 32 +++++++++---------- .../renault/snapshots/test_sensor.ambr | 32 +++++++++---------- tests/components/renault/test_services.py | 4 +-- 10 files changed, 92 insertions(+), 92 deletions(-) diff --git a/homeassistant/components/renault/renault_hub.py b/homeassistant/components/renault/renault_hub.py index 97a9d080b86..76b197b2aaf 100644 --- a/homeassistant/components/renault/renault_hub.py +++ b/homeassistant/components/renault/renault_hub.py @@ -16,8 +16,8 @@ from homeassistant.const import ( ATTR_IDENTIFIERS, ATTR_MANUFACTURER, ATTR_MODEL, + ATTR_MODEL_ID, ATTR_NAME, - ATTR_SW_VERSION, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady @@ -106,7 +106,7 @@ class RenaultHub: manufacturer=vehicle.device_info[ATTR_MANUFACTURER], name=vehicle.device_info[ATTR_NAME], model=vehicle.device_info[ATTR_MODEL], - sw_version=vehicle.device_info[ATTR_SW_VERSION], + model_id=vehicle.device_info[ATTR_MODEL_ID], ) self._vehicles[vehicle_link.vin] = vehicle diff --git a/homeassistant/components/renault/renault_vehicle.py b/homeassistant/components/renault/renault_vehicle.py index d5c4f78126c..b77442c8331 100644 --- a/homeassistant/components/renault/renault_vehicle.py +++ b/homeassistant/components/renault/renault_vehicle.py @@ -76,8 +76,8 @@ class RenaultVehicleProxy: identifiers={(DOMAIN, cast(str, details.vin))}, manufacturer=(details.get_brand_label() or "").capitalize(), model=(details.get_model_label() or "").capitalize(), + model_id=(details.get_model_code() or ""), name=details.registrationNumber or "", - sw_version=details.get_model_code() or "", ) self.coordinators: dict[str, RenaultDataUpdateCoordinator] = {} self.hvac_target_temperature = 21 diff --git a/tests/components/renault/__init__.py b/tests/components/renault/__init__.py index 86fddfd5bac..a7c6b314ccb 100644 --- a/tests/components/renault/__init__.py +++ b/tests/components/renault/__init__.py @@ -10,9 +10,9 @@ from homeassistant.const import ( ATTR_IDENTIFIERS, ATTR_MANUFACTURER, ATTR_MODEL, + ATTR_MODEL_ID, ATTR_NAME, ATTR_STATE, - ATTR_SW_VERSION, STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant @@ -46,7 +46,7 @@ def check_device_registry( assert registry_entry.manufacturer == expected_device[ATTR_MANUFACTURER] assert registry_entry.name == expected_device[ATTR_NAME] assert registry_entry.model == expected_device[ATTR_MODEL] - assert registry_entry.sw_version == expected_device[ATTR_SW_VERSION] + assert registry_entry.model_id == expected_device[ATTR_MODEL_ID] def check_entities( diff --git a/tests/components/renault/const.py b/tests/components/renault/const.py index 19c40f6ec20..2d0263e40de 100644 --- a/tests/components/renault/const.py +++ b/tests/components/renault/const.py @@ -19,9 +19,9 @@ from homeassistant.const import ( ATTR_IDENTIFIERS, ATTR_MANUFACTURER, ATTR_MODEL, + ATTR_MODEL_ID, ATTR_NAME, ATTR_STATE, - ATTR_SW_VERSION, ATTR_UNIT_OF_MEASUREMENT, CONF_PASSWORD, CONF_USERNAME, @@ -74,7 +74,7 @@ MOCK_VEHICLES = { ATTR_MANUFACTURER: "Renault", ATTR_MODEL: "Zoe", ATTR_NAME: "REG-NUMBER", - ATTR_SW_VERSION: "X101VE", + ATTR_MODEL_ID: "X101VE", }, "endpoints": { "battery_status": "battery_status_charging.json", @@ -269,7 +269,7 @@ MOCK_VEHICLES = { ATTR_MANUFACTURER: "Renault", ATTR_MODEL: "Zoe", ATTR_NAME: "REG-NUMBER", - ATTR_SW_VERSION: "X102VE", + ATTR_MODEL_ID: "X102VE", }, "endpoints": { "battery_status": "battery_status_not_charging.json", @@ -517,7 +517,7 @@ MOCK_VEHICLES = { ATTR_MANUFACTURER: "Renault", ATTR_MODEL: "Captur ii", ATTR_NAME: "REG-NUMBER", - ATTR_SW_VERSION: "XJB1SU", + ATTR_MODEL_ID: "XJB1SU", }, "endpoints": { "battery_status": "battery_status_charging.json", @@ -755,7 +755,7 @@ MOCK_VEHICLES = { ATTR_MANUFACTURER: "Renault", ATTR_MODEL: "Captur ii", ATTR_NAME: "REG-NUMBER", - ATTR_SW_VERSION: "XJB1SU", + ATTR_MODEL_ID: "XJB1SU", }, "endpoints": { "cockpit": "cockpit_fuel.json", diff --git a/tests/components/renault/snapshots/test_binary_sensor.ambr b/tests/components/renault/snapshots/test_binary_sensor.ambr index 9dac0c323ce..7142608b977 100644 --- a/tests/components/renault/snapshots/test_binary_sensor.ambr +++ b/tests/components/renault/snapshots/test_binary_sensor.ambr @@ -22,13 +22,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -322,13 +322,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -708,13 +708,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X101VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X101VE', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -878,13 +878,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X102VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X102VE', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -1306,13 +1306,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -1606,13 +1606,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -1992,13 +1992,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X101VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X101VE', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -2162,13 +2162,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X102VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X102VE', + 'sw_version': None, 'via_device_id': None, }), ]) diff --git a/tests/components/renault/snapshots/test_button.ambr b/tests/components/renault/snapshots/test_button.ambr index c4732ad1458..e61255372c1 100644 --- a/tests/components/renault/snapshots/test_button.ambr +++ b/tests/components/renault/snapshots/test_button.ambr @@ -22,13 +22,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -106,13 +106,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -274,13 +274,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X101VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X101VE', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -442,13 +442,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X102VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X102VE', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -610,13 +610,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -694,13 +694,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -862,13 +862,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X101VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X101VE', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -1030,13 +1030,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X102VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X102VE', + 'sw_version': None, 'via_device_id': None, }), ]) diff --git a/tests/components/renault/snapshots/test_device_tracker.ambr b/tests/components/renault/snapshots/test_device_tracker.ambr index 5e7813316a2..f90cb92cc63 100644 --- a/tests/components/renault/snapshots/test_device_tracker.ambr +++ b/tests/components/renault/snapshots/test_device_tracker.ambr @@ -22,13 +22,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -107,13 +107,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -192,13 +192,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X101VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X101VE', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -234,13 +234,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X102VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X102VE', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -319,13 +319,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -407,13 +407,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -495,13 +495,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X101VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X101VE', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -537,13 +537,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X102VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X102VE', + 'sw_version': None, 'via_device_id': None, }), ]) diff --git a/tests/components/renault/snapshots/test_select.ambr b/tests/components/renault/snapshots/test_select.ambr index ccdc76f0130..9974e21be75 100644 --- a/tests/components/renault/snapshots/test_select.ambr +++ b/tests/components/renault/snapshots/test_select.ambr @@ -22,13 +22,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -64,13 +64,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -161,13 +161,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X101VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X101VE', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -258,13 +258,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X102VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X102VE', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -355,13 +355,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -397,13 +397,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -494,13 +494,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X101VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X101VE', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -591,13 +591,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X102VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X102VE', + 'sw_version': None, 'via_device_id': None, }), ]) diff --git a/tests/components/renault/snapshots/test_sensor.ambr b/tests/components/renault/snapshots/test_sensor.ambr index e4bb2d74297..80e73347b07 100644 --- a/tests/components/renault/snapshots/test_sensor.ambr +++ b/tests/components/renault/snapshots/test_sensor.ambr @@ -22,13 +22,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -332,13 +332,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -1087,13 +1087,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X101VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X101VE', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -1838,13 +1838,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X102VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X102VE', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -2632,13 +2632,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -2942,13 +2942,13 @@ }), 'manufacturer': 'Renault', 'model': 'Captur ii', - 'model_id': None, + 'model_id': 'XJB1SU', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'XJB1SU', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -3697,13 +3697,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X101VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X101VE', + 'sw_version': None, 'via_device_id': None, }), ]) @@ -4448,13 +4448,13 @@ }), 'manufacturer': 'Renault', 'model': 'Zoe', - 'model_id': None, + 'model_id': 'X102VE', 'name': 'REG-NUMBER', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'X102VE', + 'sw_version': None, 'via_device_id': None, }), ]) diff --git a/tests/components/renault/test_services.py b/tests/components/renault/test_services.py index 831204c59b4..aadeec60ebf 100644 --- a/tests/components/renault/test_services.py +++ b/tests/components/renault/test_services.py @@ -25,8 +25,8 @@ from homeassistant.const import ( ATTR_IDENTIFIERS, ATTR_MANUFACTURER, ATTR_MODEL, + ATTR_MODEL_ID, ATTR_NAME, - ATTR_SW_VERSION, ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -268,7 +268,7 @@ async def test_service_invalid_device_id2( manufacturer=extra_vehicle[ATTR_MANUFACTURER], name=extra_vehicle[ATTR_NAME], model=extra_vehicle[ATTR_MODEL], - sw_version=extra_vehicle[ATTR_SW_VERSION], + model_id=extra_vehicle[ATTR_MODEL_ID], ) device_id = device_registry.async_get_device( identifiers=extra_vehicle[ATTR_IDENTIFIERS] From ccbc300b6819496aca7274aebc7eacd2b43d6f02 Mon Sep 17 00:00:00 2001 From: Ryan Mattson Date: Fri, 6 Sep 2024 04:45:39 -0500 Subject: [PATCH 1497/1635] Lyric: fixed missed snake case conversions (#125382) fixed missed snake case conversions --- homeassistant/components/lyric/climate.py | 26 +++++++++++------------ 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/homeassistant/components/lyric/climate.py b/homeassistant/components/lyric/climate.py index 1c459c2c66a..bd9cf4997eb 100644 --- a/homeassistant/components/lyric/climate.py +++ b/homeassistant/components/lyric/climate.py @@ -358,8 +358,8 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): await self._update_thermostat( self.location, device, - coolSetpoint=target_temp_high, - heatSetpoint=target_temp_low, + cool_setpoint=target_temp_high, + heat_setpoint=target_temp_low, mode=mode, ) except LYRIC_EXCEPTIONS as exception: @@ -371,11 +371,11 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): try: if self.hvac_mode == HVACMode.COOL: await self._update_thermostat( - self.location, device, coolSetpoint=temp + self.location, device, cool_setpoint=temp ) else: await self._update_thermostat( - self.location, device, heatSetpoint=temp + self.location, device, heat_setpoint=temp ) except LYRIC_EXCEPTIONS as exception: _LOGGER.error(exception) @@ -410,7 +410,7 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): self.location, self.device, mode=HVAC_MODES[LYRIC_HVAC_MODE_HEAT], - autoChangeoverActive=False, + auto_changeover_active=False, ) # Sleep 3 seconds before proceeding await asyncio.sleep(3) @@ -422,7 +422,7 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): self.location, self.device, mode=HVAC_MODES[LYRIC_HVAC_MODE_HEAT], - autoChangeoverActive=True, + auto_changeover_active=True, ) else: _LOGGER.debug( @@ -430,7 +430,7 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): HVAC_MODES[self.device.changeable_values.mode], ) await self._update_thermostat( - self.location, self.device, autoChangeoverActive=True + self.location, self.device, auto_changeover_active=True ) else: _LOGGER.debug("HVAC mode passed to lyric: %s", LYRIC_HVAC_MODES[hvac_mode]) @@ -438,13 +438,13 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): self.location, self.device, mode=LYRIC_HVAC_MODES[hvac_mode], - autoChangeoverActive=False, + auto_changeover_active=False, ) async def _async_set_hvac_mode_lcc(self, hvac_mode: HVACMode) -> None: """Set hvac mode for LCC devices (e.g., T5,6).""" _LOGGER.debug("HVAC mode passed to lyric: %s", LYRIC_HVAC_MODES[hvac_mode]) - # Set autoChangeoverActive to True if the mode being passed is Auto + # Set auto_changeover_active to True if the mode being passed is Auto # otherwise leave unchanged. if ( LYRIC_HVAC_MODES[hvac_mode] == LYRIC_HVAC_MODE_HEAT_COOL @@ -458,7 +458,7 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): self.location, self.device, mode=LYRIC_HVAC_MODES[hvac_mode], - autoChangeoverActive=auto_changeover, + auto_changeover_active=auto_changeover, ) async def async_set_preset_mode(self, preset_mode: str) -> None: @@ -466,7 +466,7 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): _LOGGER.debug("Set preset mode: %s", preset_mode) try: await self._update_thermostat( - self.location, self.device, thermostatSetpointStatus=preset_mode + self.location, self.device, thermostat_setpoint_status=preset_mode ) except LYRIC_EXCEPTIONS as exception: _LOGGER.error(exception) @@ -479,8 +479,8 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): await self._update_thermostat( self.location, self.device, - thermostatSetpointStatus=PRESET_HOLD_UNTIL, - nextPeriodTime=time_period, + thermostat_setpoint_status=PRESET_HOLD_UNTIL, + next_period_time=time_period, ) except LYRIC_EXCEPTIONS as exception: _LOGGER.error(exception) From ff20131af1d4e7c329c91dd28c1fad56257fb05a Mon Sep 17 00:00:00 2001 From: Shay Levy Date: Fri, 6 Sep 2024 12:49:10 +0300 Subject: [PATCH 1498/1635] Use smlight discovery hostname as device name (#125359) * Use smlight discovery hostname as device name * Update reauth flow name * Drop host from description --- homeassistant/components/smlight/config_flow.py | 10 ++++------ homeassistant/components/smlight/strings.json | 2 +- tests/components/smlight/test_config_flow.py | 6 +++--- 3 files changed, 8 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/smlight/config_flow.py b/homeassistant/components/smlight/config_flow.py index 98da153ce75..e8984300ff1 100644 --- a/homeassistant/components/smlight/config_flow.py +++ b/homeassistant/components/smlight/config_flow.py @@ -139,10 +139,6 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN): self.context["entry_id"] ) host = entry_data[CONF_HOST] - self.context["title_placeholders"] = { - "host": host, - "name": entry_data.get(CONF_USERNAME, "unknown"), - } self.client = Api2(host, session=async_get_clientsession(self.hass)) self.host = host @@ -166,7 +162,8 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN): assert self._reauth_entry is not None return self.async_update_reload_and_abort( - self._reauth_entry, data={**user_input, CONF_HOST: self.host} + self._reauth_entry, + data={**self._reauth_entry.data, **user_input}, ) return self.async_show_form( @@ -197,4 +194,5 @@ class SmlightConfigFlow(ConfigFlow, domain=DOMAIN): user_input[CONF_HOST] = self.host assert info.model is not None - return self.async_create_entry(title=info.model, data=user_input) + title = self.context.get("title_placeholders", {}).get(CONF_NAME) or info.model + return self.async_create_entry(title=title, data=user_input) diff --git a/homeassistant/components/smlight/strings.json b/homeassistant/components/smlight/strings.json index 7e17a53a38a..bca42f642b7 100644 --- a/homeassistant/components/smlight/strings.json +++ b/homeassistant/components/smlight/strings.json @@ -19,7 +19,7 @@ }, "reauth_confirm": { "title": "[%key:common::config_flow::title::reauth%]", - "description": "Please enter the correct username and password for host: {host}", + "description": "Please enter the correct username and password", "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]" diff --git a/tests/components/smlight/test_config_flow.py b/tests/components/smlight/test_config_flow.py index fb07e29edd4..dae727c7a29 100644 --- a/tests/components/smlight/test_config_flow.py +++ b/tests/components/smlight/test_config_flow.py @@ -91,7 +91,7 @@ async def test_zeroconf_flow( assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["context"]["source"] == "zeroconf" assert result2["context"]["unique_id"] == "aa:bb:cc:dd:ee:ff" - assert result2["title"] == "SLZB-06p7" + assert result2["title"] == "slzb-06" assert result2["data"] == { CONF_HOST: MOCK_HOST, } @@ -143,7 +143,7 @@ async def test_zeroconf_flow_auth( assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["context"]["source"] == "zeroconf" assert result3["context"]["unique_id"] == "aa:bb:cc:dd:ee:ff" - assert result3["title"] == "SLZB-06p7" + assert result3["title"] == "slzb-06" assert result3["data"] == { CONF_USERNAME: MOCK_USERNAME, CONF_PASSWORD: MOCK_PASSWORD, @@ -356,7 +356,7 @@ async def test_zeroconf_legacy_mac( assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["context"]["source"] == "zeroconf" assert result2["context"]["unique_id"] == "aa:bb:cc:dd:ee:ff" - assert result2["title"] == "SLZB-06p7" + assert result2["title"] == "slzb-06" assert result2["data"] == { CONF_HOST: MOCK_HOST, } From dfcfe7873208fcea3d03f083e332a793d686e151 Mon Sep 17 00:00:00 2001 From: jesperraemaekers <146726232+jesperraemaekers@users.noreply.github.com> Date: Fri, 6 Sep 2024 11:58:01 +0200 Subject: [PATCH 1499/1635] Add weheat core integration (#123057) * Add empty weheat integration * Add first sensor to weheat integration * Add weheat entity to provide device information * Fixed automatic selection for a single heat pump * Replaced integration specific package and removed status sensor * Update const.py * Add reauthentication support for weheat integration * Add test cases for the config flow of the weheat integration * Changed API and OATH url to weheat production environment * Add empty weheat integration * Add first sensor to weheat integration * Add weheat entity to provide device information * Fixed automatic selection for a single heat pump * Replaced integration specific package and removed status sensor * Add reauthentication support for weheat integration * Update const.py * Add test cases for the config flow of the weheat integration * Changed API and OATH url to weheat production environment * Resolved merge conflict after adding weheat package * Apply suggestions from code review Co-authored-by: Joost Lekkerkerker * Added translation keys, more type info and version bump the weheat package * Adding native property value for weheat sensor * Removed reauth, added weheat sensor description and changed discovery of heat pumps * Added unique ID of user to entity * Replaced string by constants, added test case for duplicate unique id * Removed duplicate constant * Added offline scope * Removed re-auth related code * Simplified oath implementation * Cleanup tests for weheat integration * Added oath scope to tests --------- Co-authored-by: kjell-van-straaten Co-authored-by: Joost Lekkerkerker --- CODEOWNERS | 2 + homeassistant/components/weheat/__init__.py | 49 +++++++ homeassistant/components/weheat/api.py | 29 ++++ .../weheat/application_credentials.py | 11 ++ .../components/weheat/config_flow.py | 40 +++++ homeassistant/components/weheat/const.py | 25 ++++ .../components/weheat/coordinator.py | 84 +++++++++++ homeassistant/components/weheat/entity.py | 27 ++++ homeassistant/components/weheat/icons.json | 15 ++ homeassistant/components/weheat/manifest.json | 10 ++ homeassistant/components/weheat/sensor.py | 95 ++++++++++++ homeassistant/components/weheat/strings.json | 46 ++++++ .../generated/application_credentials.py | 1 + homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 6 + requirements_all.txt | 3 + requirements_test_all.txt | 3 + tests/components/weheat/__init__.py | 1 + tests/components/weheat/conftest.py | 36 +++++ tests/components/weheat/const.py | 11 ++ tests/components/weheat/test_config_flow.py | 137 ++++++++++++++++++ 21 files changed, 632 insertions(+) create mode 100644 homeassistant/components/weheat/__init__.py create mode 100644 homeassistant/components/weheat/api.py create mode 100644 homeassistant/components/weheat/application_credentials.py create mode 100644 homeassistant/components/weheat/config_flow.py create mode 100644 homeassistant/components/weheat/const.py create mode 100644 homeassistant/components/weheat/coordinator.py create mode 100644 homeassistant/components/weheat/entity.py create mode 100644 homeassistant/components/weheat/icons.json create mode 100644 homeassistant/components/weheat/manifest.json create mode 100644 homeassistant/components/weheat/sensor.py create mode 100644 homeassistant/components/weheat/strings.json create mode 100644 tests/components/weheat/__init__.py create mode 100644 tests/components/weheat/conftest.py create mode 100644 tests/components/weheat/const.py create mode 100644 tests/components/weheat/test_config_flow.py diff --git a/CODEOWNERS b/CODEOWNERS index d2a60cbb246..92beb8946ba 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1640,6 +1640,8 @@ build.json @home-assistant/supervisor /tests/components/webostv/ @thecode /homeassistant/components/websocket_api/ @home-assistant/core /tests/components/websocket_api/ @home-assistant/core +/homeassistant/components/weheat/ @jesperraemaekers +/tests/components/weheat/ @jesperraemaekers /homeassistant/components/wemo/ @esev /tests/components/wemo/ @esev /homeassistant/components/whirlpool/ @abmantis @mkmer diff --git a/homeassistant/components/weheat/__init__.py b/homeassistant/components/weheat/__init__.py new file mode 100644 index 00000000000..4800046926d --- /dev/null +++ b/homeassistant/components/weheat/__init__.py @@ -0,0 +1,49 @@ +"""The Weheat integration.""" + +from __future__ import annotations + +from weheat.abstractions.discovery import HeatPumpDiscovery + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_ACCESS_TOKEN, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers.config_entry_oauth2_flow import ( + OAuth2Session, + async_get_config_entry_implementation, +) + +from .const import API_URL, LOGGER +from .coordinator import WeheatDataUpdateCoordinator + +PLATFORMS: list[Platform] = [Platform.SENSOR] + +type WeheatConfigEntry = ConfigEntry[list[WeheatDataUpdateCoordinator]] + + +async def async_setup_entry(hass: HomeAssistant, entry: WeheatConfigEntry) -> bool: + """Set up Weheat from a config entry.""" + implementation = await async_get_config_entry_implementation(hass, entry) + + session = OAuth2Session(hass, entry, implementation) + + token = session.token[CONF_ACCESS_TOKEN] + entry.runtime_data = [] + + # fetch a list of the heat pumps the entry can access + for pump_info in await HeatPumpDiscovery.discover_active(API_URL, token): + LOGGER.debug("Adding %s", pump_info) + # for each pump, add a coordinator + new_coordinator = WeheatDataUpdateCoordinator(hass, session, pump_info) + + await new_coordinator.async_config_entry_first_refresh() + + entry.runtime_data.append(new_coordinator) + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: WeheatConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/weheat/api.py b/homeassistant/components/weheat/api.py new file mode 100644 index 00000000000..1d0828aa41b --- /dev/null +++ b/homeassistant/components/weheat/api.py @@ -0,0 +1,29 @@ +"""API for Weheat bound to Home Assistant OAuth.""" + +from aiohttp import ClientSession +from weheat.abstractions import AbstractAuth + +from homeassistant.const import CONF_ACCESS_TOKEN +from homeassistant.helpers.config_entry_oauth2_flow import OAuth2Session + +from .const import API_URL + + +class AsyncConfigEntryAuth(AbstractAuth): + """Provide Weheat authentication tied to an OAuth2 based config entry.""" + + def __init__( + self, + websession: ClientSession, + oauth_session: OAuth2Session, + ) -> None: + """Initialize Weheat auth.""" + super().__init__(websession, host=API_URL) + self._oauth_session = oauth_session + + async def async_get_access_token(self) -> str: + """Return a valid access token.""" + if not self._oauth_session.valid_token: + await self._oauth_session.async_ensure_token_valid() + + return self._oauth_session.token[CONF_ACCESS_TOKEN] diff --git a/homeassistant/components/weheat/application_credentials.py b/homeassistant/components/weheat/application_credentials.py new file mode 100644 index 00000000000..3f85d4b0558 --- /dev/null +++ b/homeassistant/components/weheat/application_credentials.py @@ -0,0 +1,11 @@ +"""application_credentials platform the Weheat integration.""" + +from homeassistant.components.application_credentials import AuthorizationServer +from homeassistant.core import HomeAssistant + +from .const import OAUTH2_AUTHORIZE, OAUTH2_TOKEN + + +async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationServer: + """Return authorization server.""" + return AuthorizationServer(authorize_url=OAUTH2_AUTHORIZE, token_url=OAUTH2_TOKEN) diff --git a/homeassistant/components/weheat/config_flow.py b/homeassistant/components/weheat/config_flow.py new file mode 100644 index 00000000000..707c2f6bc97 --- /dev/null +++ b/homeassistant/components/weheat/config_flow.py @@ -0,0 +1,40 @@ +"""Config flow for Weheat.""" + +import logging + +from weheat.abstractions.user import get_user_id_from_token + +from homeassistant.config_entries import ConfigFlowResult +from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN +from homeassistant.helpers.config_entry_oauth2_flow import AbstractOAuth2FlowHandler + +from .const import API_URL, DOMAIN, ENTRY_TITLE, OAUTH2_SCOPES + + +class OAuth2FlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): + """Config flow to handle Weheat OAuth2 authentication.""" + + DOMAIN = DOMAIN + + @property + def logger(self) -> logging.Logger: + """Return logger.""" + return logging.getLogger(__name__) + + @property + def extra_authorize_data(self) -> dict[str, str]: + """Extra data that needs to be appended to the authorize url.""" + return { + "scope": " ".join(OAUTH2_SCOPES), + } + + async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult: + """Override the create entry method to change to the step to find the heat pumps.""" + # get the user id and use that as unique id for this entry + user_id = await get_user_id_from_token( + API_URL, data[CONF_TOKEN][CONF_ACCESS_TOKEN] + ) + await self.async_set_unique_id(user_id) + self._abort_if_unique_id_configured() + + return self.async_create_entry(title=ENTRY_TITLE, data=data) diff --git a/homeassistant/components/weheat/const.py b/homeassistant/components/weheat/const.py new file mode 100644 index 00000000000..fa1b17f8c07 --- /dev/null +++ b/homeassistant/components/weheat/const.py @@ -0,0 +1,25 @@ +"""Constants for the Weheat integration.""" + +from logging import Logger, getLogger + +DOMAIN = "weheat" +MANUFACTURER = "Weheat" +ENTRY_TITLE = "Weheat cloud" +ERROR_DESCRIPTION = "error_description" + +OAUTH2_AUTHORIZE = ( + "https://auth.weheat.nl/auth/realms/Weheat/protocol/openid-connect/auth/" +) +OAUTH2_TOKEN = ( + "https://auth.weheat.nl/auth/realms/Weheat/protocol/openid-connect/token/" +) +API_URL = "https://api.weheat.nl" +OAUTH2_SCOPES = ["openid", "offline_access"] + + +UPDATE_INTERVAL = 30 + +LOGGER: Logger = getLogger(__package__) + +DISPLAY_PRECISION_WATTS = 0 +DISPLAY_PRECISION_COP = 1 diff --git a/homeassistant/components/weheat/coordinator.py b/homeassistant/components/weheat/coordinator.py new file mode 100644 index 00000000000..92c12990371 --- /dev/null +++ b/homeassistant/components/weheat/coordinator.py @@ -0,0 +1,84 @@ +"""Define a custom coordinator for the Weheat heatpump integration.""" + +from datetime import timedelta + +from weheat.abstractions.discovery import HeatPumpDiscovery +from weheat.abstractions.heat_pump import HeatPump +from weheat.exceptions import ( + ApiException, + BadRequestException, + ForbiddenException, + NotFoundException, + ServiceException, + UnauthorizedException, +) + +from homeassistant.const import CONF_ACCESS_TOKEN +from homeassistant.core import HomeAssistant +from homeassistant.helpers.config_entry_oauth2_flow import OAuth2Session +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import API_URL, DOMAIN, LOGGER, UPDATE_INTERVAL + +EXCEPTIONS = ( + ServiceException, + NotFoundException, + ForbiddenException, + UnauthorizedException, + BadRequestException, + ApiException, +) + + +class WeheatDataUpdateCoordinator(DataUpdateCoordinator[HeatPump]): + """A custom coordinator for the Weheat heatpump integration.""" + + def __init__( + self, + hass: HomeAssistant, + session: OAuth2Session, + heat_pump: HeatPumpDiscovery.HeatPumpInfo, + ) -> None: + """Initialize the data coordinator.""" + super().__init__( + hass, + logger=LOGGER, + name=DOMAIN, + update_interval=timedelta(seconds=UPDATE_INTERVAL), + ) + self._heat_pump_info = heat_pump + self._heat_pump_data = HeatPump(API_URL, self._heat_pump_info.uuid) + + self.session = session + + @property + def heatpump_id(self) -> str: + """Return the heat pump id.""" + return self._heat_pump_info.uuid + + @property + def readable_name(self) -> str | None: + """Return the readable name of the heat pump.""" + if self._heat_pump_info.name: + return self._heat_pump_info.name + return self._heat_pump_info.model + + @property + def model(self) -> str: + """Return the model of the heat pump.""" + return self._heat_pump_info.model + + def fetch_data(self) -> HeatPump: + """Get the data from the API.""" + try: + self._heat_pump_data.get_status(self.session.token[CONF_ACCESS_TOKEN]) + except EXCEPTIONS as error: + raise UpdateFailed(error) from error + + return self._heat_pump_data + + async def _async_update_data(self) -> HeatPump: + """Fetch data from the API.""" + await self.session.async_ensure_token_valid() + + return await self.hass.async_add_executor_job(self.fetch_data) diff --git a/homeassistant/components/weheat/entity.py b/homeassistant/components/weheat/entity.py new file mode 100644 index 00000000000..079db596e19 --- /dev/null +++ b/homeassistant/components/weheat/entity.py @@ -0,0 +1,27 @@ +"""Base entity for Weheat.""" + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN, MANUFACTURER +from .coordinator import WeheatDataUpdateCoordinator + + +class WeheatEntity(CoordinatorEntity[WeheatDataUpdateCoordinator]): + """Defines a base Weheat entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: WeheatDataUpdateCoordinator, + ) -> None: + """Initialize the Weheat entity.""" + super().__init__(coordinator) + + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, coordinator.heatpump_id)}, + name=coordinator.readable_name, + manufacturer=MANUFACTURER, + model=coordinator.model, + ) diff --git a/homeassistant/components/weheat/icons.json b/homeassistant/components/weheat/icons.json new file mode 100644 index 00000000000..b1eaf481bfa --- /dev/null +++ b/homeassistant/components/weheat/icons.json @@ -0,0 +1,15 @@ +{ + "entity": { + "sensor": { + "power_output": { + "default": "mdi:heat-wave" + }, + "power_input": { + "default": "mdi:lightning-bolt" + }, + "cop": { + "default": "mdi:speedometer" + } + } + } +} diff --git a/homeassistant/components/weheat/manifest.json b/homeassistant/components/weheat/manifest.json new file mode 100644 index 00000000000..2dfceacb635 --- /dev/null +++ b/homeassistant/components/weheat/manifest.json @@ -0,0 +1,10 @@ +{ + "domain": "weheat", + "name": "Weheat", + "codeowners": ["@jesperraemaekers"], + "config_flow": true, + "dependencies": ["application_credentials"], + "documentation": "https://www.home-assistant.io/integrations/weheat", + "iot_class": "cloud_polling", + "requirements": ["weheat==2024.09.05"] +} diff --git a/homeassistant/components/weheat/sensor.py b/homeassistant/components/weheat/sensor.py new file mode 100644 index 00000000000..a5bbc66001c --- /dev/null +++ b/homeassistant/components/weheat/sensor.py @@ -0,0 +1,95 @@ +"""Platform for sensor integration.""" + +from collections.abc import Callable +from dataclasses import dataclass + +from weheat.abstractions.heat_pump import HeatPump + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import UnitOfPower +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType + +from . import WeheatConfigEntry +from .const import DISPLAY_PRECISION_COP, DISPLAY_PRECISION_WATTS +from .coordinator import WeheatDataUpdateCoordinator +from .entity import WeheatEntity + + +@dataclass(frozen=True, kw_only=True) +class WeHeatSensorEntityDescription(SensorEntityDescription): + """Describes Weheat sensor entity.""" + + value_fn: Callable[[HeatPump], StateType] + + +SENSORS = [ + WeHeatSensorEntityDescription( + translation_key="power_output", + key="power_output", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=DISPLAY_PRECISION_WATTS, + value_fn=lambda status: status.power_output, + ), + WeHeatSensorEntityDescription( + translation_key="power_input", + key="power_input", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=DISPLAY_PRECISION_WATTS, + value_fn=lambda status: status.power_input, + ), + WeHeatSensorEntityDescription( + translation_key="cop", + key="cop", + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=DISPLAY_PRECISION_COP, + value_fn=lambda status: status.cop, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: WeheatConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the sensors for weheat heat pump.""" + async_add_entities( + WeheatHeatPumpSensor(coordinator, entity_description) + for entity_description in SENSORS + for coordinator in entry.runtime_data + ) + + +class WeheatHeatPumpSensor(WeheatEntity, SensorEntity): + """Defines a Weheat heat pump sensor.""" + + coordinator: WeheatDataUpdateCoordinator + entity_description: WeHeatSensorEntityDescription + + def __init__( + self, + coordinator: WeheatDataUpdateCoordinator, + entity_description: WeHeatSensorEntityDescription, + ) -> None: + """Pass coordinator to CoordinatorEntity.""" + super().__init__(coordinator) + + self.entity_description = entity_description + + self._attr_unique_id = f"{coordinator.heatpump_id}_{entity_description.key}" + + @property + def native_value(self) -> StateType: + """Return the state of the sensor.""" + return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/weheat/strings.json b/homeassistant/components/weheat/strings.json new file mode 100644 index 00000000000..63871b065b6 --- /dev/null +++ b/homeassistant/components/weheat/strings.json @@ -0,0 +1,46 @@ +{ + "config": { + "step": { + "pick_implementation": { + "title": "[%key:common::config_flow::title::oauth2_pick_implementation%]" + }, + "find_devices": { + "title": "Select your heat pump" + }, + "reauth_confirm": { + "title": "Re-authenticate with WeHeat", + "description": "You need to re-authenticate with WeHeat to continue" + } + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", + "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", + "oauth_error": "[%key:common::config_flow::abort::oauth2_error%]", + "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]", + "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", + "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", + "missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]", + "authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]", + "no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]", + "user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "no_devices_found": "Could not find any heat pumps on this account" + }, + "create_entry": { + "default": "[%key:common::config_flow::create_entry::authenticated%]" + } + }, + "entity": { + "sensor": { + "power_output": { + "name": "Output power" + }, + "power_input": { + "name": "Input power" + }, + "cop": { + "name": "COP" + } + } + } +} diff --git a/homeassistant/generated/application_credentials.py b/homeassistant/generated/application_credentials.py index efb6f426d36..359ef656290 100644 --- a/homeassistant/generated/application_credentials.py +++ b/homeassistant/generated/application_credentials.py @@ -28,6 +28,7 @@ APPLICATION_CREDENTIALS = [ "spotify", "tesla_fleet", "twitch", + "weheat", "withings", "xbox", "yale", diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index 9f4b4e42bb0..f03c980a2d4 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -656,6 +656,7 @@ FLOWS = { "weatherkit", "webmin", "webostv", + "weheat", "wemo", "whirlpool", "whois", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index b4c80aa70b4..eab7bf224d2 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -6854,6 +6854,12 @@ "config_flow": true, "iot_class": "local_polling" }, + "weheat": { + "name": "Weheat", + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling" + }, "wemo": { "name": "Belkin WeMo", "integration_type": "hub", diff --git a/requirements_all.txt b/requirements_all.txt index 82cabd124d8..6796a83c9c6 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2947,6 +2947,9 @@ weatherflow4py==0.2.23 # homeassistant.components.webmin webmin-xmlrpc==0.0.2 +# homeassistant.components.weheat +weheat==2024.09.05 + # homeassistant.components.whirlpool whirlpool-sixth-sense==0.18.8 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index f0ac4294ada..df33037cf4c 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2333,6 +2333,9 @@ weatherflow4py==0.2.23 # homeassistant.components.webmin webmin-xmlrpc==0.0.2 +# homeassistant.components.weheat +weheat==2024.09.05 + # homeassistant.components.whirlpool whirlpool-sixth-sense==0.18.8 diff --git a/tests/components/weheat/__init__.py b/tests/components/weheat/__init__.py new file mode 100644 index 00000000000..c077280ccb5 --- /dev/null +++ b/tests/components/weheat/__init__.py @@ -0,0 +1 @@ +"""Tests for the Weheat integration.""" diff --git a/tests/components/weheat/conftest.py b/tests/components/weheat/conftest.py new file mode 100644 index 00000000000..831d4d460ac --- /dev/null +++ b/tests/components/weheat/conftest.py @@ -0,0 +1,36 @@ +"""Fixtures for Weheat tests.""" + +from unittest.mock import patch + +import pytest + +from homeassistant.components.application_credentials import ( + DOMAIN as APPLICATION_CREDENTIALS, + ClientCredential, + async_import_client_credential, +) +from homeassistant.components.weheat.const import DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from .const import CLIENT_ID, CLIENT_SECRET + + +@pytest.fixture(autouse=True) +async def setup_credentials(hass: HomeAssistant) -> None: + """Fixture to setup credentials.""" + assert await async_setup_component(hass, APPLICATION_CREDENTIALS, {}) + await async_import_client_credential( + hass, + DOMAIN, + ClientCredential(CLIENT_ID, CLIENT_SECRET), + ) + + +@pytest.fixture +def mock_setup_entry(): + """Mock a successful setup.""" + with patch( + "homeassistant.components.weheat.async_setup_entry", return_value=True + ) as mock_setup: + yield mock_setup diff --git a/tests/components/weheat/const.py b/tests/components/weheat/const.py new file mode 100644 index 00000000000..01733de1c91 --- /dev/null +++ b/tests/components/weheat/const.py @@ -0,0 +1,11 @@ +"""Constants for weheat tests.""" + +CLIENT_ID = "1234" +CLIENT_SECRET = "5678" + +USER_UUID_1 = "0000-1111-2222-3333" + +CONF_REFRESH_TOKEN = "refresh_token" +CONF_AUTH_IMPLEMENTATION = "auth_implementation" +MOCK_REFRESH_TOKEN = "mock_refresh_token" +MOCK_ACCESS_TOKEN = "mock_access_token" diff --git a/tests/components/weheat/test_config_flow.py b/tests/components/weheat/test_config_flow.py new file mode 100644 index 00000000000..c065d011e42 --- /dev/null +++ b/tests/components/weheat/test_config_flow.py @@ -0,0 +1,137 @@ +"""Test the Weheat config flow.""" + +from unittest.mock import patch + +import pytest + +from homeassistant.components.weheat.const import ( + DOMAIN, + ENTRY_TITLE, + OAUTH2_AUTHORIZE, + OAUTH2_TOKEN, +) +from homeassistant.config_entries import SOURCE_USER, ConfigFlowResult +from homeassistant.const import CONF_ACCESS_TOKEN, CONF_SOURCE, CONF_TOKEN +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import config_entry_oauth2_flow + +from .const import ( + CLIENT_ID, + CONF_AUTH_IMPLEMENTATION, + CONF_REFRESH_TOKEN, + MOCK_ACCESS_TOKEN, + MOCK_REFRESH_TOKEN, + USER_UUID_1, +) + +from tests.common import MockConfigEntry +from tests.test_util.aiohttp import AiohttpClientMocker +from tests.typing import ClientSessionGenerator + + +@pytest.mark.usefixtures("current_request_with_host") +async def test_full_flow( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + mock_setup_entry, +) -> None: + """Check full of adding a single heat pump.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={CONF_SOURCE: SOURCE_USER} + ) + + await handle_oauth(hass, hass_client_no_auth, aioclient_mock, result) + + with ( + patch( + "homeassistant.components.weheat.config_flow.get_user_id_from_token", + return_value=USER_UUID_1, + ) as mock_weheat, + ): + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + assert len(mock_weheat.mock_calls) == 1 + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].unique_id == USER_UUID_1 + assert result["result"].title == ENTRY_TITLE + assert result["data"][CONF_TOKEN][CONF_REFRESH_TOKEN] == MOCK_REFRESH_TOKEN + assert result["data"][CONF_TOKEN][CONF_ACCESS_TOKEN] == MOCK_ACCESS_TOKEN + assert result["data"][CONF_AUTH_IMPLEMENTATION] == DOMAIN + + +@pytest.mark.usefixtures("current_request_with_host") +async def test_duplicate_unique_id( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + mock_setup_entry, +) -> None: + """Check that the config flow is aborted when an entry with the same ID exists.""" + first_entry = MockConfigEntry( + domain=DOMAIN, + data={}, + unique_id=USER_UUID_1, + ) + + first_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={CONF_SOURCE: SOURCE_USER} + ) + + await handle_oauth(hass, hass_client_no_auth, aioclient_mock, result) + + with ( + patch( + "homeassistant.components.weheat.config_flow.get_user_id_from_token", + return_value=USER_UUID_1, + ), + ): + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + # only care that the config flow is aborted + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def handle_oauth( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + result: ConfigFlowResult, +) -> None: + """Handle the Oauth2 part of the flow.""" + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + + assert result["url"] == ( + f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" + "&redirect_uri=https://example.com/auth/external/callback" + f"&state={state}" + "&scope=openid+offline_access" + ) + + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.post( + OAUTH2_TOKEN, + json={ + "refresh_token": MOCK_REFRESH_TOKEN, + "access_token": MOCK_ACCESS_TOKEN, + "type": "Bearer", + "expires_in": 60, + }, + ) From ff3cabbf3a4c65b526b5d9bddb8ed2e18e2abd90 Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Fri, 6 Sep 2024 07:36:02 -0400 Subject: [PATCH 1500/1635] Small Assist Satellite fixes (#125384) --- homeassistant/components/assist_pipeline/pipeline.py | 4 +++- homeassistant/components/assist_satellite/entity.py | 2 +- tests/components/assist_satellite/test_entity.py | 5 ++--- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/assist_pipeline/pipeline.py b/homeassistant/components/assist_pipeline/pipeline.py index f6a6bc45b57..8a5fec83565 100644 --- a/homeassistant/components/assist_pipeline/pipeline.py +++ b/homeassistant/components/assist_pipeline/pipeline.py @@ -504,7 +504,7 @@ class AudioSettings: is_vad_enabled: bool = True """True if VAD is used to determine the end of the voice command.""" - silence_seconds: float = 0.5 + silence_seconds: float = 0.7 """Seconds of silence after voice command has ended.""" def __post_init__(self) -> None: @@ -906,6 +906,8 @@ class PipelineRun: metadata, self._speech_to_text_stream(audio_stream=stream, stt_vad=stt_vad), ) + except (asyncio.CancelledError, TimeoutError): + raise # expected except Exception as src_error: _LOGGER.exception("Unexpected error during speech-to-text") raise SpeechToTextError( diff --git a/homeassistant/components/assist_satellite/entity.py b/homeassistant/components/assist_satellite/entity.py index 8364a81b1fb..6ec40ae24f7 100644 --- a/homeassistant/components/assist_satellite/entity.py +++ b/homeassistant/components/assist_satellite/entity.py @@ -73,7 +73,7 @@ class AssistSatelliteEntity(entity.Entity): _is_announcing = False _wake_word_intercept_future: asyncio.Future[str | None] | None = None - __assist_satellite_state: AssistSatelliteState | None = None + __assist_satellite_state = AssistSatelliteState.LISTENING_WAKE_WORD @final @property diff --git a/tests/components/assist_satellite/test_entity.py b/tests/components/assist_satellite/test_entity.py index f957a826828..2e4caca030b 100644 --- a/tests/components/assist_satellite/test_entity.py +++ b/tests/components/assist_satellite/test_entity.py @@ -21,7 +21,6 @@ from homeassistant.components.assist_satellite import SatelliteBusyError from homeassistant.components.assist_satellite.entity import AssistSatelliteState from homeassistant.components.media_source import PlayMedia from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_UNKNOWN from homeassistant.core import Context, HomeAssistant from . import ENTITY_ID @@ -35,7 +34,7 @@ async def test_entity_state( state = hass.states.get(ENTITY_ID) assert state is not None - assert state.state == STATE_UNKNOWN + assert state.state == AssistSatelliteState.LISTENING_WAKE_WORD context = Context() audio_stream = object() @@ -71,7 +70,7 @@ async def test_entity_state( assert kwargs["end_stage"] == PipelineStage.TTS for event_type, expected_state in ( - (PipelineEventType.RUN_START, STATE_UNKNOWN), + (PipelineEventType.RUN_START, AssistSatelliteState.LISTENING_WAKE_WORD), (PipelineEventType.RUN_END, AssistSatelliteState.LISTENING_WAKE_WORD), (PipelineEventType.WAKE_WORD_START, AssistSatelliteState.LISTENING_WAKE_WORD), (PipelineEventType.WAKE_WORD_END, AssistSatelliteState.LISTENING_WAKE_WORD), From 8f38b7191a9e6e22b21c55112e929f69ec112d58 Mon Sep 17 00:00:00 2001 From: Marcel van der Veldt Date: Fri, 6 Sep 2024 14:06:46 +0200 Subject: [PATCH 1501/1635] Fix for Hue sending effect None at turn_on command while no effect is active (#125377) * Fix for Hue sending effect None at turn_on command while no effect is active * typo * update tests --- homeassistant/components/hue/v2/light.py | 6 ++- tests/components/hue/test_light_v2.py | 54 +++++++++++++++++++----- 2 files changed, 49 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/hue/v2/light.py b/homeassistant/components/hue/v2/light.py index b908ec83877..6fd0eea7a0b 100644 --- a/homeassistant/components/hue/v2/light.py +++ b/homeassistant/components/hue/v2/light.py @@ -226,7 +226,11 @@ class HueLight(HueBaseEntity, LightEntity): flash = kwargs.get(ATTR_FLASH) effect = effect_str = kwargs.get(ATTR_EFFECT) if effect_str in (EFFECT_NONE, EFFECT_NONE.lower()): - effect = EffectStatus.NO_EFFECT + # ignore effect if set to "None" and we have no effect active + # the special effect "None" is only used to stop an active effect + # but sending it while no effect is active can actually result in issues + # https://github.com/home-assistant/core/issues/122165 + effect = None if self.effect == EFFECT_NONE else EffectStatus.NO_EFFECT elif effect_str is not None: # work out if we got a regular effect or timed effect effect = EffectStatus(effect_str) diff --git a/tests/components/hue/test_light_v2.py b/tests/components/hue/test_light_v2.py index 417670a3769..2b978ffc33f 100644 --- a/tests/components/hue/test_light_v2.py +++ b/tests/components/hue/test_light_v2.py @@ -175,7 +175,7 @@ async def test_light_turn_on_service( assert len(mock_bridge_v2.mock_requests) == 6 assert mock_bridge_v2.mock_requests[5]["json"]["color_temperature"]["mirek"] == 500 - # test enable effect + # test enable an effect await hass.services.async_call( "light", "turn_on", @@ -184,8 +184,20 @@ async def test_light_turn_on_service( ) assert len(mock_bridge_v2.mock_requests) == 7 assert mock_bridge_v2.mock_requests[6]["json"]["effects"]["effect"] == "candle" + # fire event to update effect in HA state + event = { + "id": "3a6710fa-4474-4eba-b533-5e6e72968feb", + "type": "light", + "effects": {"status": "candle"}, + } + mock_bridge_v2.api.emit_event("update", event) + await hass.async_block_till_done() + test_light = hass.states.get(test_light_id) + assert test_light is not None + assert test_light.attributes["effect"] == "candle" # test disable effect + # it should send a request with effect set to "no_effect" await hass.services.async_call( "light", "turn_on", @@ -194,6 +206,28 @@ async def test_light_turn_on_service( ) assert len(mock_bridge_v2.mock_requests) == 8 assert mock_bridge_v2.mock_requests[7]["json"]["effects"]["effect"] == "no_effect" + # fire event to update effect in HA state + event = { + "id": "3a6710fa-4474-4eba-b533-5e6e72968feb", + "type": "light", + "effects": {"status": "no_effect"}, + } + mock_bridge_v2.api.emit_event("update", event) + await hass.async_block_till_done() + test_light = hass.states.get(test_light_id) + assert test_light is not None + assert test_light.attributes["effect"] == "None" + + # test turn on with useless effect + # it should send a effect in the request if the device has no effect active + await hass.services.async_call( + "light", + "turn_on", + {"entity_id": test_light_id, "effect": "None"}, + blocking=True, + ) + assert len(mock_bridge_v2.mock_requests) == 9 + assert "effects" not in mock_bridge_v2.mock_requests[8]["json"] # test timed effect await hass.services.async_call( @@ -202,11 +236,11 @@ async def test_light_turn_on_service( {"entity_id": test_light_id, "effect": "sunrise", "transition": 6}, blocking=True, ) - assert len(mock_bridge_v2.mock_requests) == 9 + assert len(mock_bridge_v2.mock_requests) == 10 assert ( - mock_bridge_v2.mock_requests[8]["json"]["timed_effects"]["effect"] == "sunrise" + mock_bridge_v2.mock_requests[9]["json"]["timed_effects"]["effect"] == "sunrise" ) - assert mock_bridge_v2.mock_requests[8]["json"]["timed_effects"]["duration"] == 6000 + assert mock_bridge_v2.mock_requests[9]["json"]["timed_effects"]["duration"] == 6000 # test enabling effect should ignore color temperature await hass.services.async_call( @@ -215,9 +249,9 @@ async def test_light_turn_on_service( {"entity_id": test_light_id, "effect": "candle", "color_temp": 500}, blocking=True, ) - assert len(mock_bridge_v2.mock_requests) == 10 - assert mock_bridge_v2.mock_requests[9]["json"]["effects"]["effect"] == "candle" - assert "color_temperature" not in mock_bridge_v2.mock_requests[9]["json"] + assert len(mock_bridge_v2.mock_requests) == 11 + assert mock_bridge_v2.mock_requests[10]["json"]["effects"]["effect"] == "candle" + assert "color_temperature" not in mock_bridge_v2.mock_requests[10]["json"] # test enabling effect should ignore xy color await hass.services.async_call( @@ -226,9 +260,9 @@ async def test_light_turn_on_service( {"entity_id": test_light_id, "effect": "candle", "xy_color": [0.123, 0.123]}, blocking=True, ) - assert len(mock_bridge_v2.mock_requests) == 11 - assert mock_bridge_v2.mock_requests[10]["json"]["effects"]["effect"] == "candle" - assert "xy_color" not in mock_bridge_v2.mock_requests[9]["json"] + assert len(mock_bridge_v2.mock_requests) == 12 + assert mock_bridge_v2.mock_requests[11]["json"]["effects"]["effect"] == "candle" + assert "xy_color" not in mock_bridge_v2.mock_requests[11]["json"] async def test_light_turn_off_service( From 0eda451c24221a460223cd572e97512c2857e5f7 Mon Sep 17 00:00:00 2001 From: TimL Date: Fri, 6 Sep 2024 22:25:55 +1000 Subject: [PATCH 1502/1635] Add Switch platform to Smlight integration (#125292) * Add switch platform to Smlight * Add strings for switch platform * Add tests for Smlight switch platform * Regenerate snapshot * Address review comments * Use is_on property for updating switch state * Address review comments --------- Co-authored-by: Tim Lunn --- homeassistant/components/smlight/__init__.py | 1 + homeassistant/components/smlight/strings.json | 11 ++ homeassistant/components/smlight/switch.py | 110 ++++++++++++++ tests/components/smlight/conftest.py | 1 + .../components/smlight/fixtures/sensors.json | 2 +- .../smlight/snapshots/test_switch.ambr | 142 ++++++++++++++++++ tests/components/smlight/test_switch.py | 110 ++++++++++++++ 7 files changed, 376 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/smlight/switch.py create mode 100644 tests/components/smlight/snapshots/test_switch.ambr create mode 100644 tests/components/smlight/test_switch.py diff --git a/homeassistant/components/smlight/__init__.py b/homeassistant/components/smlight/__init__.py index 4f0f2c0fb02..58d5b7d343f 100644 --- a/homeassistant/components/smlight/__init__.py +++ b/homeassistant/components/smlight/__init__.py @@ -12,6 +12,7 @@ PLATFORMS: list[Platform] = [ Platform.BINARY_SENSOR, Platform.BUTTON, Platform.SENSOR, + Platform.SWITCH, ] type SmConfigEntry = ConfigEntry[SmDataUpdateCoordinator] diff --git a/homeassistant/components/smlight/strings.json b/homeassistant/components/smlight/strings.json index bca42f642b7..e3e8fee0d4d 100644 --- a/homeassistant/components/smlight/strings.json +++ b/homeassistant/components/smlight/strings.json @@ -80,6 +80,17 @@ "zigbee_flash_mode": { "name": "Zigbee flash mode" } + }, + "switch": { + "auto_zigbee_update": { + "name": "Auto Zigbee update" + }, + "disable_led": { + "name": "Disable LEDs" + }, + "night_mode": { + "name": "LED night mode" + } } } } diff --git a/homeassistant/components/smlight/switch.py b/homeassistant/components/smlight/switch.py new file mode 100644 index 00000000000..2e7b7e4df7e --- /dev/null +++ b/homeassistant/components/smlight/switch.py @@ -0,0 +1,110 @@ +"""Support for SLZB-06 switches.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +import logging +from typing import Any + +from pysmlight import Sensors +from pysmlight.const import Settings + +from homeassistant.components.switch import ( + SwitchDeviceClass, + SwitchEntity, + SwitchEntityDescription, +) +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import SmConfigEntry +from .coordinator import SmDataUpdateCoordinator +from .entity import SmEntity + +_LOGGER = logging.getLogger(__name__) + + +@dataclass(frozen=True, kw_only=True) +class SmSwitchEntityDescription(SwitchEntityDescription): + """Class to describe a Switch entity.""" + + setting: Settings + state_fn: Callable[[Sensors], bool | None] + + +SWITCHES: list[SmSwitchEntityDescription] = [ + SmSwitchEntityDescription( + key="disable_led", + translation_key="disable_led", + setting=Settings.DISABLE_LEDS, + state_fn=lambda x: x.disable_leds, + ), + SmSwitchEntityDescription( + key="night_mode", + translation_key="night_mode", + setting=Settings.NIGHT_MODE, + state_fn=lambda x: x.night_mode, + ), + SmSwitchEntityDescription( + key="auto_zigbee_update", + translation_key="auto_zigbee_update", + entity_category=EntityCategory.CONFIG, + setting=Settings.ZB_AUTOUPDATE, + state_fn=lambda x: x.auto_zigbee, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SmConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Initialize switches for SLZB-06 device.""" + coordinator = entry.runtime_data + + async_add_entities(SmSwitch(coordinator, switch) for switch in SWITCHES) + + +class SmSwitch(SmEntity, SwitchEntity): + """Representation of a SLZB-06 switch.""" + + entity_description: SmSwitchEntityDescription + _attr_device_class = SwitchDeviceClass.SWITCH + + def __init__( + self, + coordinator: SmDataUpdateCoordinator, + description: SmSwitchEntityDescription, + ) -> None: + """Initialize the switch.""" + super().__init__(coordinator) + self.entity_description = description + self._attr_unique_id = f"{coordinator.unique_id}-{description.key}" + + self._page, self._toggle = description.setting.value + + async def set_smlight(self, state: bool) -> None: + """Set the state on SLZB device.""" + await self.coordinator.client.set_toggle(self._page, self._toggle, state) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the switch on.""" + self._attr_is_on = True + self.async_write_ha_state() + + await self.set_smlight(True) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the switch off.""" + self._attr_is_on = False + self.async_write_ha_state() + + await self.set_smlight(False) + + @property + def is_on(self) -> bool | None: + """Return the state of the switch.""" + return self.entity_description.state_fn(self.coordinator.data.sensors) diff --git a/tests/components/smlight/conftest.py b/tests/components/smlight/conftest.py index a86c7b4c27a..b78ec7aa630 100644 --- a/tests/components/smlight/conftest.py +++ b/tests/components/smlight/conftest.py @@ -88,6 +88,7 @@ def mock_smlight_client(request: pytest.FixtureRequest) -> Generator[MagicMock]: api.authenticate.return_value = True api.cmds = AsyncMock(spec_set=CmdWrapper) + api.set_toggle = AsyncMock() yield api diff --git a/tests/components/smlight/fixtures/sensors.json b/tests/components/smlight/fixtures/sensors.json index 0b2f9055e01..89ec5615f34 100644 --- a/tests/components/smlight/fixtures/sensors.json +++ b/tests/components/smlight/fixtures/sensors.json @@ -9,6 +9,6 @@ "wifi_connected": false, "wifi_status": 255, "disable_leds": false, - "night_mode": false, + "night_mode": true, "auto_zigbee": false } diff --git a/tests/components/smlight/snapshots/test_switch.ambr b/tests/components/smlight/snapshots/test_switch.ambr new file mode 100644 index 00000000000..b8e1c8357ac --- /dev/null +++ b/tests/components/smlight/snapshots/test_switch.ambr @@ -0,0 +1,142 @@ +# serializer version: 1 +# name: test_switch_setup[switch.mock_title_auto_zigbee_update-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_title_auto_zigbee_update', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Auto Zigbee update', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'auto_zigbee_update', + 'unique_id': 'aa:bb:cc:dd:ee:ff-auto_zigbee_update', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_setup[switch.mock_title_auto_zigbee_update-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Mock Title Auto Zigbee update', + }), + 'context': , + 'entity_id': 'switch.mock_title_auto_zigbee_update', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch_setup[switch.mock_title_disable_leds-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.mock_title_disable_leds', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Disable LEDs', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'disable_led', + 'unique_id': 'aa:bb:cc:dd:ee:ff-disable_led', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_setup[switch.mock_title_disable_leds-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Mock Title Disable LEDs', + }), + 'context': , + 'entity_id': 'switch.mock_title_disable_leds', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switch_setup[switch.mock_title_led_night_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.mock_title_led_night_mode', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'LED night mode', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'night_mode', + 'unique_id': 'aa:bb:cc:dd:ee:ff-night_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_switch_setup[switch.mock_title_led_night_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Mock Title LED night mode', + }), + 'context': , + 'entity_id': 'switch.mock_title_led_night_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/smlight/test_switch.py b/tests/components/smlight/test_switch.py new file mode 100644 index 00000000000..165024eaa83 --- /dev/null +++ b/tests/components/smlight/test_switch.py @@ -0,0 +1,110 @@ +"""Tests for the SMLIGHT switch platform.""" + +from unittest.mock import MagicMock + +from freezegun.api import FrozenDateTimeFactory +from pysmlight import Sensors +from pysmlight.const import Settings +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.smlight.const import SCAN_INTERVAL +from homeassistant.components.switch import ( + DOMAIN as SWITCH_DOMAIN, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .conftest import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + +pytestmark = [ + pytest.mark.usefixtures( + "mock_smlight_client", + ) +] + + +@pytest.fixture +def platforms() -> list[Platform]: + """Platforms, which should be loaded during the test.""" + return [Platform.SWITCH] + + +async def test_switch_setup( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test setup of SMLIGHT switches.""" + entry = await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + + +@pytest.mark.parametrize( + ("entity", "setting", "field"), + [ + ("disable_leds", Settings.DISABLE_LEDS, "disable_leds"), + ("led_night_mode", Settings.NIGHT_MODE, "night_mode"), + ("auto_zigbee_update", Settings.ZB_AUTOUPDATE, "auto_zigbee"), + ], +) +async def test_switches( + hass: HomeAssistant, + entity: str, + field: str, + freezer: FrozenDateTimeFactory, + mock_config_entry: MockConfigEntry, + mock_smlight_client: MagicMock, + setting: Settings, +) -> None: + """Test the SMLIGHT switches.""" + await setup_integration(hass, mock_config_entry) + + _page, _toggle = setting.value + + entity_id = f"switch.mock_title_{entity}" + state = hass.states.get(entity_id) + assert state is not None + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + assert len(mock_smlight_client.set_toggle.mock_calls) == 1 + mock_smlight_client.set_toggle.assert_called_once_with(_page, _toggle, True) + mock_smlight_client.get_sensors.return_value = Sensors(**{field: True}) + + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state.state == STATE_ON + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + assert len(mock_smlight_client.set_toggle.mock_calls) == 2 + mock_smlight_client.set_toggle.assert_called_with(_page, _toggle, False) + mock_smlight_client.get_sensors.return_value = Sensors(**{field: False}) + + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state.state == STATE_OFF From aba23eb5139caa7023f710dc29329532ca3ea80d Mon Sep 17 00:00:00 2001 From: Matrix Date: Fri, 6 Sep 2024 20:47:31 +0800 Subject: [PATCH 1503/1635] Add YoLink temperature sensor YS8008 support (#125408) Add YS8008 support --- homeassistant/components/yolink/const.py | 2 ++ homeassistant/components/yolink/sensor.py | 4 ++++ 2 files changed, 6 insertions(+) diff --git a/homeassistant/components/yolink/const.py b/homeassistant/components/yolink/const.py index 217dd66d063..eb6169eccad 100644 --- a/homeassistant/components/yolink/const.py +++ b/homeassistant/components/yolink/const.py @@ -19,6 +19,8 @@ DEV_MODEL_WATER_METER_YS5007 = "YS5007" DEV_MODEL_MULTI_OUTLET_YS6801 = "YS6801" DEV_MODEL_TH_SENSOR_YS8004_UC = "YS8004-UC" DEV_MODEL_TH_SENSOR_YS8004_EC = "YS8004-EC" +DEV_MODEL_TH_SENSOR_YS8008_UC = "YS8008-UC" +DEV_MODEL_TH_SENSOR_YS8008_EC = "YS8008-EC" DEV_MODEL_TH_SENSOR_YS8014_UC = "YS8014-UC" DEV_MODEL_TH_SENSOR_YS8014_EC = "YS8014-EC" DEV_MODEL_TH_SENSOR_YS8017_UC = "YS8017-UC" diff --git a/homeassistant/components/yolink/sensor.py b/homeassistant/components/yolink/sensor.py index b8f2a77516c..537393d0315 100644 --- a/homeassistant/components/yolink/sensor.py +++ b/homeassistant/components/yolink/sensor.py @@ -57,6 +57,8 @@ from .const import ( DEV_MODEL_PLUG_YS6803_UC, DEV_MODEL_TH_SENSOR_YS8004_EC, DEV_MODEL_TH_SENSOR_YS8004_UC, + DEV_MODEL_TH_SENSOR_YS8008_EC, + DEV_MODEL_TH_SENSOR_YS8008_UC, DEV_MODEL_TH_SENSOR_YS8014_EC, DEV_MODEL_TH_SENSOR_YS8014_UC, DEV_MODEL_TH_SENSOR_YS8017_EC, @@ -125,6 +127,8 @@ MCU_DEV_TEMPERATURE_SENSOR = [ NONE_HUMIDITY_SENSOR_MODELS = [ DEV_MODEL_TH_SENSOR_YS8004_EC, DEV_MODEL_TH_SENSOR_YS8004_UC, + DEV_MODEL_TH_SENSOR_YS8008_EC, + DEV_MODEL_TH_SENSOR_YS8008_UC, DEV_MODEL_TH_SENSOR_YS8014_EC, DEV_MODEL_TH_SENSOR_YS8014_UC, DEV_MODEL_TH_SENSOR_YS8017_UC, From 9777ed2e624f2d0b9a252881ed2080c51f3c86a0 Mon Sep 17 00:00:00 2001 From: Tony <29752086+ms264556@users.noreply.github.com> Date: Sat, 7 Sep 2024 00:48:16 +1200 Subject: [PATCH 1504/1635] Rename "Ruckus Unleashed" integration to "Ruckus" (#125392) --- .../components/ruckus_unleashed/__init__.py | 8 +++---- .../ruckus_unleashed/config_flow.py | 6 ++--- .../components/ruckus_unleashed/const.py | 2 +- .../ruckus_unleashed/coordinator.py | 10 ++++---- .../ruckus_unleashed/device_tracker.py | 24 ++++++++----------- .../components/ruckus_unleashed/manifest.json | 2 +- homeassistant/generated/integrations.json | 2 +- tests/components/ruckus_unleashed/__init__.py | 6 ++--- .../ruckus_unleashed/test_config_flow.py | 2 +- .../ruckus_unleashed/test_device_tracker.py | 2 +- .../components/ruckus_unleashed/test_init.py | 2 +- 11 files changed, 31 insertions(+), 35 deletions(-) diff --git a/homeassistant/components/ruckus_unleashed/__init__.py b/homeassistant/components/ruckus_unleashed/__init__.py index c2c46fcc125..4ee870e8322 100644 --- a/homeassistant/components/ruckus_unleashed/__init__.py +++ b/homeassistant/components/ruckus_unleashed/__init__.py @@ -1,4 +1,4 @@ -"""The Ruckus Unleashed integration.""" +"""The Ruckus integration.""" import logging @@ -24,13 +24,13 @@ from .const import ( PLATFORMS, UNDO_UPDATE_LISTENERS, ) -from .coordinator import RuckusUnleashedDataUpdateCoordinator +from .coordinator import RuckusDataUpdateCoordinator _LOGGER = logging.getLogger(__package__) async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Set up Ruckus Unleashed from a config entry.""" + """Set up Ruckus from a config entry.""" ruckus = AjaxSession.async_create( entry.data[CONF_HOST], @@ -46,7 +46,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await ruckus.close() raise ConfigEntryAuthFailed from autherr - coordinator = RuckusUnleashedDataUpdateCoordinator(hass, ruckus=ruckus) + coordinator = RuckusDataUpdateCoordinator(hass, ruckus=ruckus) await coordinator.async_config_entry_first_refresh() diff --git a/homeassistant/components/ruckus_unleashed/config_flow.py b/homeassistant/components/ruckus_unleashed/config_flow.py index d2f27e4ef05..fdfacfc73a7 100644 --- a/homeassistant/components/ruckus_unleashed/config_flow.py +++ b/homeassistant/components/ruckus_unleashed/config_flow.py @@ -1,4 +1,4 @@ -"""Config flow for Ruckus Unleashed integration.""" +"""Config flow for Ruckus integration.""" from collections.abc import Mapping import logging @@ -59,8 +59,8 @@ async def validate_input(hass: HomeAssistant, data): } -class RuckusUnleashedConfigFlow(ConfigFlow, domain=DOMAIN): - """Handle a config flow for Ruckus Unleashed.""" +class RuckusConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Ruckus.""" VERSION = 1 diff --git a/homeassistant/components/ruckus_unleashed/const.py b/homeassistant/components/ruckus_unleashed/const.py index 9076437b8c7..1aae3041e73 100644 --- a/homeassistant/components/ruckus_unleashed/const.py +++ b/homeassistant/components/ruckus_unleashed/const.py @@ -1,4 +1,4 @@ -"""Constants for the Ruckus Unleashed integration.""" +"""Constants for the Ruckus integration.""" from homeassistant.const import Platform diff --git a/homeassistant/components/ruckus_unleashed/coordinator.py b/homeassistant/components/ruckus_unleashed/coordinator.py index 989748af86e..d9f20883559 100644 --- a/homeassistant/components/ruckus_unleashed/coordinator.py +++ b/homeassistant/components/ruckus_unleashed/coordinator.py @@ -1,4 +1,4 @@ -"""Ruckus Unleashed DataUpdateCoordinator.""" +"""Ruckus DataUpdateCoordinator.""" from datetime import timedelta import logging @@ -15,11 +15,11 @@ from .const import API_CLIENT_MAC, DOMAIN, KEY_SYS_CLIENTS, SCAN_INTERVAL _LOGGER = logging.getLogger(__package__) -class RuckusUnleashedDataUpdateCoordinator(DataUpdateCoordinator): - """Coordinator to manage data from Ruckus Unleashed client.""" +class RuckusDataUpdateCoordinator(DataUpdateCoordinator): + """Coordinator to manage data from Ruckus client.""" def __init__(self, hass: HomeAssistant, *, ruckus: AjaxSession) -> None: - """Initialize global Ruckus Unleashed data updater.""" + """Initialize global Ruckus data updater.""" self.ruckus = ruckus update_interval = timedelta(seconds=SCAN_INTERVAL) @@ -38,7 +38,7 @@ class RuckusUnleashedDataUpdateCoordinator(DataUpdateCoordinator): return {client[API_CLIENT_MAC]: client for client in clients} async def _async_update_data(self) -> dict: - """Fetch Ruckus Unleashed data.""" + """Fetch Ruckus data.""" try: return {KEY_SYS_CLIENTS: await self._fetch_clients()} except AuthenticationError as autherror: diff --git a/homeassistant/components/ruckus_unleashed/device_tracker.py b/homeassistant/components/ruckus_unleashed/device_tracker.py index 233e5cd4945..704272bf4c9 100644 --- a/homeassistant/components/ruckus_unleashed/device_tracker.py +++ b/homeassistant/components/ruckus_unleashed/device_tracker.py @@ -1,4 +1,4 @@ -"""Support for Ruckus Unleashed devices.""" +"""Support for Ruckus devices.""" from __future__ import annotations @@ -19,7 +19,7 @@ from .const import ( KEY_SYS_CLIENTS, UNDO_UPDATE_LISTENERS, ) -from .coordinator import RuckusUnleashedDataUpdateCoordinator +from .coordinator import RuckusDataUpdateCoordinator _LOGGER = logging.getLogger(__package__) @@ -27,7 +27,7 @@ _LOGGER = logging.getLogger(__package__) async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: - """Set up device tracker for Ruckus Unleashed component.""" + """Set up device tracker for Ruckus component.""" coordinator = hass.data[DOMAIN][entry.entry_id][COORDINATOR] tracked: set[str] = set() @@ -58,9 +58,7 @@ def add_new_entities(coordinator, async_add_entities, tracked): device = coordinator.data[KEY_SYS_CLIENTS][mac] _LOGGER.debug("adding new device: [%s] %s", mac, device[API_CLIENT_HOSTNAME]) - new_tracked.append( - RuckusUnleashedDevice(coordinator, mac, device[API_CLIENT_HOSTNAME]) - ) + new_tracked.append(RuckusDevice(coordinator, mac, device[API_CLIENT_HOSTNAME])) tracked.add(mac) async_add_entities(new_tracked) @@ -69,13 +67,13 @@ def add_new_entities(coordinator, async_add_entities, tracked): @callback def restore_entities( registry: er.EntityRegistry, - coordinator: RuckusUnleashedDataUpdateCoordinator, + coordinator: RuckusDataUpdateCoordinator, entry: ConfigEntry, async_add_entities: AddEntitiesCallback, tracked: set[str], ) -> None: """Restore clients that are not a part of active clients list.""" - missing: list[RuckusUnleashedDevice] = [] + missing: list[RuckusDevice] = [] for entity in registry.entities.get_entries_for_config_entry_id(entry.entry_id): if ( @@ -83,9 +81,7 @@ def restore_entities( and entity.unique_id not in coordinator.data[KEY_SYS_CLIENTS] ): missing.append( - RuckusUnleashedDevice( - coordinator, entity.unique_id, entity.original_name - ) + RuckusDevice(coordinator, entity.unique_id, entity.original_name) ) tracked.add(entity.unique_id) @@ -93,11 +89,11 @@ def restore_entities( async_add_entities(missing) -class RuckusUnleashedDevice(CoordinatorEntity, ScannerEntity): - """Representation of a Ruckus Unleashed client.""" +class RuckusDevice(CoordinatorEntity, ScannerEntity): + """Representation of a Ruckus client.""" def __init__(self, coordinator, mac, name) -> None: - """Initialize a Ruckus Unleashed client.""" + """Initialize a Ruckus client.""" super().__init__(coordinator) self._mac = mac self._name = name diff --git a/homeassistant/components/ruckus_unleashed/manifest.json b/homeassistant/components/ruckus_unleashed/manifest.json index 039840efc14..2066b65221e 100644 --- a/homeassistant/components/ruckus_unleashed/manifest.json +++ b/homeassistant/components/ruckus_unleashed/manifest.json @@ -1,6 +1,6 @@ { "domain": "ruckus_unleashed", - "name": "Ruckus Unleashed", + "name": "Ruckus", "codeowners": ["@lanrat", "@ms264556", "@gabe565"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/ruckus_unleashed", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index eab7bf224d2..a01e20909b6 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -5208,7 +5208,7 @@ "iot_class": "local_push" }, "ruckus_unleashed": { - "name": "Ruckus Unleashed", + "name": "Ruckus", "integration_type": "hub", "config_flow": true, "iot_class": "local_polling" diff --git a/tests/components/ruckus_unleashed/__init__.py b/tests/components/ruckus_unleashed/__init__.py index ccbf404cce0..b6c9c86953a 100644 --- a/tests/components/ruckus_unleashed/__init__.py +++ b/tests/components/ruckus_unleashed/__init__.py @@ -1,4 +1,4 @@ -"""Tests for the Ruckus Unleashed integration.""" +"""Tests for the Ruckus integration.""" from __future__ import annotations @@ -78,7 +78,7 @@ DEFAULT_UNIQUEID = DEFAULT_SYSTEM_INFO[API_SYS_SYSINFO][API_SYS_SYSINFO_SERIAL] def mock_config_entry() -> MockConfigEntry: - """Return a Ruckus Unleashed mock config entry.""" + """Return a Ruckus mock config entry.""" return MockConfigEntry( domain=DOMAIN, title=DEFAULT_TITLE, @@ -89,7 +89,7 @@ def mock_config_entry() -> MockConfigEntry: async def init_integration(hass: HomeAssistant) -> MockConfigEntry: - """Set up the Ruckus Unleashed integration in Home Assistant.""" + """Set up the Ruckus integration in Home Assistant.""" entry = mock_config_entry() entry.add_to_hass(hass) # Make device tied to other integration so device tracker entities get enabled diff --git a/tests/components/ruckus_unleashed/test_config_flow.py b/tests/components/ruckus_unleashed/test_config_flow.py index 89bd72d99e4..61f689f3030 100644 --- a/tests/components/ruckus_unleashed/test_config_flow.py +++ b/tests/components/ruckus_unleashed/test_config_flow.py @@ -1,4 +1,4 @@ -"""Test the Ruckus Unleashed config flow.""" +"""Test the config flow.""" from copy import deepcopy from datetime import timedelta diff --git a/tests/components/ruckus_unleashed/test_device_tracker.py b/tests/components/ruckus_unleashed/test_device_tracker.py index 79d7c2dfda4..460c64c9651 100644 --- a/tests/components/ruckus_unleashed/test_device_tracker.py +++ b/tests/components/ruckus_unleashed/test_device_tracker.py @@ -1,4 +1,4 @@ -"""The sensor tests for the Ruckus Unleashed platform.""" +"""The sensor tests for the Ruckus platform.""" from datetime import timedelta from unittest.mock import AsyncMock diff --git a/tests/components/ruckus_unleashed/test_init.py b/tests/components/ruckus_unleashed/test_init.py index 8147f040bde..a7514677f20 100644 --- a/tests/components/ruckus_unleashed/test_init.py +++ b/tests/components/ruckus_unleashed/test_init.py @@ -1,4 +1,4 @@ -"""Test the Ruckus Unleashed config flow.""" +"""Test the Ruckus config flow.""" from unittest.mock import AsyncMock From f8c94fd83f3ddba592933afce00d09e9774057af Mon Sep 17 00:00:00 2001 From: steffenrapp <88974099+steffenrapp@users.noreply.github.com> Date: Fri, 6 Sep 2024 14:49:05 +0200 Subject: [PATCH 1505/1635] Remove attributes from Nuki entities (#125348) * Remove attributes from Nuki entities * remove tests --- homeassistant/components/nuki/binary_sensor.py | 18 +----------------- homeassistant/components/nuki/lock.py | 18 +----------------- homeassistant/components/nuki/sensor.py | 8 +------- .../nuki/snapshots/test_binary_sensor.ambr | 2 -- tests/components/nuki/snapshots/test_lock.ambr | 4 ---- .../components/nuki/snapshots/test_sensor.ambr | 1 - 6 files changed, 3 insertions(+), 48 deletions(-) diff --git a/homeassistant/components/nuki/binary_sensor.py b/homeassistant/components/nuki/binary_sensor.py index 9b4772ee108..731b94e6551 100644 --- a/homeassistant/components/nuki/binary_sensor.py +++ b/homeassistant/components/nuki/binary_sensor.py @@ -15,7 +15,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import NukiEntity, NukiEntryData -from .const import ATTR_NUKI_ID, DOMAIN as NUKI_DOMAIN +from .const import DOMAIN as NUKI_DOMAIN async def async_setup_entry( @@ -51,14 +51,6 @@ class NukiDoorsensorEntity(NukiEntity[NukiDevice], BinarySensorEntity): """Return a unique ID.""" return f"{self._nuki_device.nuki_id}_doorsensor" - # Deprecated, can be removed in 2024.10 - @property - def extra_state_attributes(self): - """Return the device specific state attributes.""" - return { - ATTR_NUKI_ID: self._nuki_device.nuki_id, - } - @property def available(self) -> bool: """Return true if door sensor is present and activated.""" @@ -91,14 +83,6 @@ class NukiRingactionEntity(NukiEntity[NukiDevice], BinarySensorEntity): """Return a unique ID.""" return f"{self._nuki_device.nuki_id}_ringaction" - # Deprecated, can be removed in 2024.10 - @property - def extra_state_attributes(self): - """Return the device specific state attributes.""" - return { - ATTR_NUKI_ID: self._nuki_device.nuki_id, - } - @property def is_on(self) -> bool: """Return the value of the ring action state.""" diff --git a/homeassistant/components/nuki/lock.py b/homeassistant/components/nuki/lock.py index 5a8734d5df7..6e1c98bc69c 100644 --- a/homeassistant/components/nuki/lock.py +++ b/homeassistant/components/nuki/lock.py @@ -18,14 +18,7 @@ from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import NukiEntity, NukiEntryData -from .const import ( - ATTR_BATTERY_CRITICAL, - ATTR_ENABLE, - ATTR_NUKI_ID, - ATTR_UNLATCH, - DOMAIN as NUKI_DOMAIN, - ERROR_STATES, -) +from .const import ATTR_ENABLE, ATTR_UNLATCH, DOMAIN as NUKI_DOMAIN, ERROR_STATES from .helpers import CannotConnect @@ -75,15 +68,6 @@ class NukiDeviceEntity[_NukiDeviceT: NukiDevice](NukiEntity[_NukiDeviceT], LockE """Return a unique ID.""" return self._nuki_device.nuki_id - # Deprecated, can be removed in 2024.10 - @property - def extra_state_attributes(self) -> dict[str, Any]: - """Return the device specific state attributes.""" - return { - ATTR_BATTERY_CRITICAL: self._nuki_device.battery_critical, - ATTR_NUKI_ID: self._nuki_device.nuki_id, - } - @property def available(self) -> bool: """Return True if entity is available.""" diff --git a/homeassistant/components/nuki/sensor.py b/homeassistant/components/nuki/sensor.py index 6647eff5c83..628783062d3 100644 --- a/homeassistant/components/nuki/sensor.py +++ b/homeassistant/components/nuki/sensor.py @@ -11,7 +11,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import NukiEntity, NukiEntryData -from .const import ATTR_NUKI_ID, DOMAIN as NUKI_DOMAIN +from .const import DOMAIN as NUKI_DOMAIN async def async_setup_entry( @@ -38,12 +38,6 @@ class NukiBatterySensor(NukiEntity[NukiDevice], SensorEntity): """Return a unique ID.""" return f"{self._nuki_device.nuki_id}_battery_level" - # Deprecated, can be removed in 2024.10 - @property - def extra_state_attributes(self): - """Return the device specific state attributes.""" - return {ATTR_NUKI_ID: self._nuki_device.nuki_id} - @property def native_value(self) -> float: """Return the state of the sensor.""" diff --git a/tests/components/nuki/snapshots/test_binary_sensor.ambr b/tests/components/nuki/snapshots/test_binary_sensor.ambr index 4a122fa78f2..55976bcb433 100644 --- a/tests/components/nuki/snapshots/test_binary_sensor.ambr +++ b/tests/components/nuki/snapshots/test_binary_sensor.ambr @@ -83,7 +83,6 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Community door Ring Action', - 'nuki_id': 2, }), 'context': , 'entity_id': 'binary_sensor.community_door_ring_action', @@ -131,7 +130,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'door', 'friendly_name': 'Home', - 'nuki_id': 1, }), 'context': , 'entity_id': 'binary_sensor.home', diff --git a/tests/components/nuki/snapshots/test_lock.ambr b/tests/components/nuki/snapshots/test_lock.ambr index a0013fc37c1..24c80e7b487 100644 --- a/tests/components/nuki/snapshots/test_lock.ambr +++ b/tests/components/nuki/snapshots/test_lock.ambr @@ -35,9 +35,7 @@ # name: test_locks[lock.community_door-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'battery_critical': False, 'friendly_name': 'Community door', - 'nuki_id': 2, 'supported_features': , }), 'context': , @@ -84,9 +82,7 @@ # name: test_locks[lock.home-state] StateSnapshot({ 'attributes': ReadOnlyDict({ - 'battery_critical': False, 'friendly_name': 'Home', - 'nuki_id': 1, 'supported_features': , }), 'context': , diff --git a/tests/components/nuki/snapshots/test_sensor.ambr b/tests/components/nuki/snapshots/test_sensor.ambr index 3c1159aecba..a319104fbc3 100644 --- a/tests/components/nuki/snapshots/test_sensor.ambr +++ b/tests/components/nuki/snapshots/test_sensor.ambr @@ -37,7 +37,6 @@ 'attributes': ReadOnlyDict({ 'device_class': 'battery', 'friendly_name': 'Home Battery', - 'nuki_id': 1, 'unit_of_measurement': '%', }), 'context': , From ba81a68982757d3dd3f64ddc9a0ec82e362e85fd Mon Sep 17 00:00:00 2001 From: Paul Bottein Date: Fri, 6 Sep 2024 14:49:58 +0200 Subject: [PATCH 1506/1635] Update frontend to 20240906.0 (#125409) --- homeassistant/components/frontend/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index fbdafe6025d..e40832e4733 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240904.0"] + "requirements": ["home-assistant-frontend==20240906.0"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index e489006867f..ac7b74429bd 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -31,7 +31,7 @@ habluetooth==3.4.0 hass-nabucasa==0.81.1 hassil==1.7.4 home-assistant-bluetooth==1.12.2 -home-assistant-frontend==20240904.0 +home-assistant-frontend==20240906.0 home-assistant-intents==2024.9.4 httpx==0.27.0 ifaddr==0.2.0 diff --git a/requirements_all.txt b/requirements_all.txt index 6796a83c9c6..c6a8eba7b6f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1108,7 +1108,7 @@ hole==0.8.0 holidays==0.56 # homeassistant.components.frontend -home-assistant-frontend==20240904.0 +home-assistant-frontend==20240906.0 # homeassistant.components.conversation home-assistant-intents==2024.9.4 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index df33037cf4c..da62c029875 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -934,7 +934,7 @@ hole==0.8.0 holidays==0.56 # homeassistant.components.frontend -home-assistant-frontend==20240904.0 +home-assistant-frontend==20240906.0 # homeassistant.components.conversation home-assistant-intents==2024.9.4 From 053e38db38f2852fd0497c1fc7f48eeb2b177e19 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 6 Sep 2024 14:57:04 +0200 Subject: [PATCH 1507/1635] Improve config flow type hints in volumio (#125318) --- .../components/volumio/config_flow.py | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/volumio/config_flow.py b/homeassistant/components/volumio/config_flow.py index 4c7a48f36c7..7cc58556f3e 100644 --- a/homeassistant/components/volumio/config_flow.py +++ b/homeassistant/components/volumio/config_flow.py @@ -11,7 +11,7 @@ import voluptuous as vol from homeassistant.components import zeroconf from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_ID, CONF_NAME, CONF_PORT -from homeassistant.core import callback +from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -25,7 +25,7 @@ DATA_SCHEMA = vol.Schema( ) -async def validate_input(hass, host, port): +async def validate_input(hass: HomeAssistant, host: str, port: int) -> dict[str, Any]: """Validate the user input allows us to connect.""" volumio = Volumio(host, port, async_get_clientsession(hass)) @@ -40,15 +40,13 @@ class VolumioConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Initialize flow.""" - self._host: str | None = None - self._port: int | None = None - self._name: str | None = None - self._uuid: str | None = None + _host: str + _port: int + _name: str + _uuid: str | None @callback - def _async_get_entry(self): + def _async_get_entry(self) -> ConfigFlowResult: return self.async_create_entry( title=self._name, data={ @@ -103,7 +101,7 @@ class VolumioConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle zeroconf discovery.""" self._host = discovery_info.host - self._port = discovery_info.port + self._port = discovery_info.port or 3000 self._name = discovery_info.properties["volumioName"] self._uuid = discovery_info.properties["UUID"] @@ -111,7 +109,9 @@ class VolumioConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_discovery_confirm() - async def async_step_discovery_confirm(self, user_input=None): + async def async_step_discovery_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle user-confirmation of discovered node.""" if user_input is not None: try: From f3e2c5177435a0fb7dd97f20fe358243cfc8d399 Mon Sep 17 00:00:00 2001 From: Jan Rieger Date: Fri, 6 Sep 2024 14:59:02 +0200 Subject: [PATCH 1508/1635] Add translations to Xiaomi Miio (#123822) * Add translations to Xiaomi Miio * Deduplicate translations --- .../components/xiaomi_miio/binary_sensor.py | 16 +- .../components/xiaomi_miio/button.py | 12 +- .../components/xiaomi_miio/number.py | 18 +- .../components/xiaomi_miio/sensor.py | 79 +++---- .../components/xiaomi_miio/strings.json | 217 ++++++++++++++++++ .../components/xiaomi_miio/switch.py | 22 +- 6 files changed, 287 insertions(+), 77 deletions(-) diff --git a/homeassistant/components/xiaomi_miio/binary_sensor.py b/homeassistant/components/xiaomi_miio/binary_sensor.py index 6d1a81007dc..5d4b2042429 100644 --- a/homeassistant/components/xiaomi_miio/binary_sensor.py +++ b/homeassistant/components/xiaomi_miio/binary_sensor.py @@ -56,13 +56,13 @@ class XiaomiMiioBinarySensorDescription(BinarySensorEntityDescription): BINARY_SENSOR_TYPES = ( XiaomiMiioBinarySensorDescription( key=ATTR_NO_WATER, - name="Water tank empty", + translation_key=ATTR_NO_WATER, icon="mdi:water-off-outline", entity_category=EntityCategory.DIAGNOSTIC, ), XiaomiMiioBinarySensorDescription( key=ATTR_WATER_TANK_DETACHED, - name="Water tank", + translation_key=ATTR_WATER_TANK_DETACHED, icon="mdi:car-coolant-level", device_class=BinarySensorDeviceClass.CONNECTIVITY, value=lambda value: not value, @@ -70,13 +70,13 @@ BINARY_SENSOR_TYPES = ( ), XiaomiMiioBinarySensorDescription( key=ATTR_PTC_STATUS, - name="Auxiliary heat status", + translation_key=ATTR_PTC_STATUS, device_class=BinarySensorDeviceClass.POWER, entity_category=EntityCategory.DIAGNOSTIC, ), XiaomiMiioBinarySensorDescription( key=ATTR_POWERSUPPLY_ATTACHED, - name="Power supply", + translation_key=ATTR_POWERSUPPLY_ATTACHED, device_class=BinarySensorDeviceClass.PLUG, entity_category=EntityCategory.DIAGNOSTIC, ), @@ -88,7 +88,7 @@ FAN_ZA5_BINARY_SENSORS = (ATTR_POWERSUPPLY_ATTACHED,) VACUUM_SENSORS = { ATTR_MOP_ATTACHED: XiaomiMiioBinarySensorDescription( key=ATTR_WATER_BOX_ATTACHED, - name="Mop attached", + translation_key=ATTR_WATER_BOX_ATTACHED, icon="mdi:square-rounded", parent_key=VacuumCoordinatorDataAttributes.status, entity_registry_enabled_default=True, @@ -97,7 +97,7 @@ VACUUM_SENSORS = { ), ATTR_WATER_BOX_ATTACHED: XiaomiMiioBinarySensorDescription( key=ATTR_WATER_BOX_ATTACHED, - name="Water box attached", + translation_key=ATTR_WATER_BOX_ATTACHED, icon="mdi:water", parent_key=VacuumCoordinatorDataAttributes.status, entity_registry_enabled_default=True, @@ -106,7 +106,7 @@ VACUUM_SENSORS = { ), ATTR_WATER_SHORTAGE: XiaomiMiioBinarySensorDescription( key=ATTR_WATER_SHORTAGE, - name="Water shortage", + translation_key=ATTR_WATER_SHORTAGE, icon="mdi:water", parent_key=VacuumCoordinatorDataAttributes.status, entity_registry_enabled_default=True, @@ -119,7 +119,7 @@ VACUUM_SENSORS_SEPARATE_MOP = { **VACUUM_SENSORS, ATTR_MOP_ATTACHED: XiaomiMiioBinarySensorDescription( key=ATTR_MOP_ATTACHED, - name="Mop attached", + translation_key=ATTR_MOP_ATTACHED, icon="mdi:square-rounded", parent_key=VacuumCoordinatorDataAttributes.status, entity_registry_enabled_default=True, diff --git a/homeassistant/components/xiaomi_miio/button.py b/homeassistant/components/xiaomi_miio/button.py index 38e6afa5ffb..7496f765fe3 100644 --- a/homeassistant/components/xiaomi_miio/button.py +++ b/homeassistant/components/xiaomi_miio/button.py @@ -51,7 +51,7 @@ BUTTON_TYPES = ( # Fans XiaomiMiioButtonDescription( key=ATTR_RESET_DUST_FILTER, - name="Reset dust filter", + translation_key=ATTR_RESET_DUST_FILTER, icon="mdi:air-filter", method_press="reset_dust_filter", method_press_error_message="Resetting the dust filter lifetime failed", @@ -59,7 +59,7 @@ BUTTON_TYPES = ( ), XiaomiMiioButtonDescription( key=ATTR_RESET_UPPER_FILTER, - name="Reset upper filter", + translation_key=ATTR_RESET_UPPER_FILTER, icon="mdi:air-filter", method_press="reset_upper_filter", method_press_error_message="Resetting the upper filter lifetime failed.", @@ -68,7 +68,7 @@ BUTTON_TYPES = ( # Vacuums XiaomiMiioButtonDescription( key=ATTR_RESET_VACUUM_MAIN_BRUSH, - name="Reset main brush", + translation_key=ATTR_RESET_VACUUM_MAIN_BRUSH, icon="mdi:brush", method_press=METHOD_VACUUM_RESET_CONSUMABLE, method_press_params=Consumable.MainBrush, @@ -77,7 +77,7 @@ BUTTON_TYPES = ( ), XiaomiMiioButtonDescription( key=ATTR_RESET_VACUUM_SIDE_BRUSH, - name="Reset side brush", + translation_key=ATTR_RESET_VACUUM_SIDE_BRUSH, icon="mdi:brush", method_press=METHOD_VACUUM_RESET_CONSUMABLE, method_press_params=Consumable.SideBrush, @@ -86,7 +86,7 @@ BUTTON_TYPES = ( ), XiaomiMiioButtonDescription( key=ATTR_RESET_VACUUM_FILTER, - name="Reset filter", + translation_key=ATTR_RESET_VACUUM_FILTER, icon="mdi:air-filter", method_press=METHOD_VACUUM_RESET_CONSUMABLE, method_press_params=Consumable.Filter, @@ -95,7 +95,7 @@ BUTTON_TYPES = ( ), XiaomiMiioButtonDescription( key=ATTR_RESET_VACUUM_SENSOR_DIRTY, - name="Reset sensor dirty", + translation_key=ATTR_RESET_VACUUM_SENSOR_DIRTY, icon="mdi:eye-outline", method_press=METHOD_VACUUM_RESET_CONSUMABLE, method_press_params=Consumable.SensorDirty, diff --git a/homeassistant/components/xiaomi_miio/number.py b/homeassistant/components/xiaomi_miio/number.py index a0ae0ea5078..107debb7a60 100644 --- a/homeassistant/components/xiaomi_miio/number.py +++ b/homeassistant/components/xiaomi_miio/number.py @@ -139,7 +139,7 @@ class FavoriteLevelValues: NUMBER_TYPES = { FEATURE_SET_MOTOR_SPEED: XiaomiMiioNumberDescription( key=ATTR_MOTOR_SPEED, - name="Motor speed", + translation_key=ATTR_MOTOR_SPEED, icon="mdi:fast-forward-outline", native_unit_of_measurement=REVOLUTIONS_PER_MINUTE, native_min_value=200, @@ -151,7 +151,7 @@ NUMBER_TYPES = { ), FEATURE_SET_FAVORITE_LEVEL: XiaomiMiioNumberDescription( key=ATTR_FAVORITE_LEVEL, - name="Favorite level", + translation_key=ATTR_FAVORITE_LEVEL, icon="mdi:star-cog", native_min_value=0, native_max_value=17, @@ -161,7 +161,7 @@ NUMBER_TYPES = { ), FEATURE_SET_FAN_LEVEL: XiaomiMiioNumberDescription( key=ATTR_FAN_LEVEL, - name="Fan level", + translation_key=ATTR_FAN_LEVEL, icon="mdi:fan", native_min_value=1, native_max_value=3, @@ -171,7 +171,7 @@ NUMBER_TYPES = { ), FEATURE_SET_VOLUME: XiaomiMiioNumberDescription( key=ATTR_VOLUME, - name="Volume", + translation_key=ATTR_VOLUME, icon="mdi:volume-high", native_min_value=0, native_max_value=100, @@ -181,7 +181,7 @@ NUMBER_TYPES = { ), FEATURE_SET_OSCILLATION_ANGLE: XiaomiMiioNumberDescription( key=ATTR_OSCILLATION_ANGLE, - name="Oscillation angle", + translation_key=ATTR_OSCILLATION_ANGLE, icon="mdi:angle-acute", native_unit_of_measurement=DEGREE, native_min_value=1, @@ -192,7 +192,7 @@ NUMBER_TYPES = { ), FEATURE_SET_DELAY_OFF_COUNTDOWN: XiaomiMiioNumberDescription( key=ATTR_DELAY_OFF_COUNTDOWN, - name="Delay off countdown", + translation_key=ATTR_DELAY_OFF_COUNTDOWN, icon="mdi:fan-off", native_unit_of_measurement=UnitOfTime.MINUTES, native_min_value=0, @@ -203,7 +203,7 @@ NUMBER_TYPES = { ), FEATURE_SET_LED_BRIGHTNESS: XiaomiMiioNumberDescription( key=ATTR_LED_BRIGHTNESS, - name="LED brightness", + translation_key=ATTR_LED_BRIGHTNESS, icon="mdi:brightness-6", native_min_value=0, native_max_value=100, @@ -213,7 +213,7 @@ NUMBER_TYPES = { ), FEATURE_SET_LED_BRIGHTNESS_LEVEL: XiaomiMiioNumberDescription( key=ATTR_LED_BRIGHTNESS_LEVEL, - name="LED brightness", + translation_key=ATTR_LED_BRIGHTNESS_LEVEL, icon="mdi:brightness-6", native_min_value=0, native_max_value=8, @@ -223,7 +223,7 @@ NUMBER_TYPES = { ), FEATURE_SET_FAVORITE_RPM: XiaomiMiioNumberDescription( key=ATTR_FAVORITE_RPM, - name="Favorite motor speed", + translation_key=ATTR_FAVORITE_RPM, icon="mdi:star-cog", native_unit_of_measurement=REVOLUTIONS_PER_MINUTE, native_min_value=300, diff --git a/homeassistant/components/xiaomi_miio/sensor.py b/homeassistant/components/xiaomi_miio/sensor.py index ab992a8fe96..9b23e89903f 100644 --- a/homeassistant/components/xiaomi_miio/sensor.py +++ b/homeassistant/components/xiaomi_miio/sensor.py @@ -162,34 +162,31 @@ class XiaomiMiioSensorDescription(SensorEntityDescription): SENSOR_TYPES = { ATTR_TEMPERATURE: XiaomiMiioSensorDescription( key=ATTR_TEMPERATURE, - name="Temperature", native_unit_of_measurement=UnitOfTemperature.CELSIUS, device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, ), ATTR_HUMIDITY: XiaomiMiioSensorDescription( key=ATTR_HUMIDITY, - name="Humidity", native_unit_of_measurement=PERCENTAGE, device_class=SensorDeviceClass.HUMIDITY, state_class=SensorStateClass.MEASUREMENT, ), ATTR_PRESSURE: XiaomiMiioSensorDescription( key=ATTR_PRESSURE, - name="Pressure", native_unit_of_measurement=UnitOfPressure.HPA, device_class=SensorDeviceClass.ATMOSPHERIC_PRESSURE, state_class=SensorStateClass.MEASUREMENT, ), ATTR_LOAD_POWER: XiaomiMiioSensorDescription( key=ATTR_LOAD_POWER, - name="Load power", + translation_key=ATTR_LOAD_POWER, native_unit_of_measurement=UnitOfPower.WATT, device_class=SensorDeviceClass.POWER, ), ATTR_WATER_LEVEL: XiaomiMiioSensorDescription( key=ATTR_WATER_LEVEL, - name="Water level", + translation_key=ATTR_WATER_LEVEL, native_unit_of_measurement=PERCENTAGE, icon="mdi:water-check", state_class=SensorStateClass.MEASUREMENT, @@ -197,7 +194,7 @@ SENSOR_TYPES = { ), ATTR_ACTUAL_SPEED: XiaomiMiioSensorDescription( key=ATTR_ACTUAL_SPEED, - name="Actual speed", + translation_key=ATTR_ACTUAL_SPEED, native_unit_of_measurement=REVOLUTIONS_PER_MINUTE, icon="mdi:fast-forward", state_class=SensorStateClass.MEASUREMENT, @@ -205,7 +202,7 @@ SENSOR_TYPES = { ), ATTR_CONTROL_SPEED: XiaomiMiioSensorDescription( key=ATTR_CONTROL_SPEED, - name="Control speed", + translation_key=ATTR_CONTROL_SPEED, native_unit_of_measurement=REVOLUTIONS_PER_MINUTE, icon="mdi:fast-forward", state_class=SensorStateClass.MEASUREMENT, @@ -213,7 +210,7 @@ SENSOR_TYPES = { ), ATTR_FAVORITE_SPEED: XiaomiMiioSensorDescription( key=ATTR_FAVORITE_SPEED, - name="Favorite speed", + translation_key=ATTR_FAVORITE_SPEED, native_unit_of_measurement=REVOLUTIONS_PER_MINUTE, icon="mdi:fast-forward", state_class=SensorStateClass.MEASUREMENT, @@ -221,7 +218,7 @@ SENSOR_TYPES = { ), ATTR_MOTOR_SPEED: XiaomiMiioSensorDescription( key=ATTR_MOTOR_SPEED, - name="Motor speed", + translation_key=ATTR_MOTOR_SPEED, native_unit_of_measurement=REVOLUTIONS_PER_MINUTE, icon="mdi:fast-forward", state_class=SensorStateClass.MEASUREMENT, @@ -229,7 +226,7 @@ SENSOR_TYPES = { ), ATTR_MOTOR2_SPEED: XiaomiMiioSensorDescription( key=ATTR_MOTOR2_SPEED, - name="Second motor speed", + translation_key=ATTR_MOTOR2_SPEED, native_unit_of_measurement=REVOLUTIONS_PER_MINUTE, icon="mdi:fast-forward", state_class=SensorStateClass.MEASUREMENT, @@ -237,7 +234,7 @@ SENSOR_TYPES = { ), ATTR_USE_TIME: XiaomiMiioSensorDescription( key=ATTR_USE_TIME, - name="Use time", + translation_key=ATTR_USE_TIME, native_unit_of_measurement=UnitOfTime.SECONDS, icon="mdi:progress-clock", device_class=SensorDeviceClass.DURATION, @@ -247,54 +244,52 @@ SENSOR_TYPES = { ), ATTR_ILLUMINANCE: XiaomiMiioSensorDescription( key=ATTR_ILLUMINANCE, - name="Illuminance", + translation_key=ATTR_ILLUMINANCE, native_unit_of_measurement=UNIT_LUMEN, state_class=SensorStateClass.MEASUREMENT, ), ATTR_ILLUMINANCE_LUX: XiaomiMiioSensorDescription( key=ATTR_ILLUMINANCE, - name="Illuminance", native_unit_of_measurement=LIGHT_LUX, device_class=SensorDeviceClass.ILLUMINANCE, state_class=SensorStateClass.MEASUREMENT, ), ATTR_AIR_QUALITY: XiaomiMiioSensorDescription( key=ATTR_AIR_QUALITY, + translation_key=ATTR_AIR_QUALITY, native_unit_of_measurement="AQI", icon="mdi:cloud", state_class=SensorStateClass.MEASUREMENT, ), ATTR_TVOC: XiaomiMiioSensorDescription( key=ATTR_TVOC, - name="TVOC", + translation_key=ATTR_TVOC, state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS, ), ATTR_PM10: XiaomiMiioSensorDescription( key=ATTR_PM10, - name="PM10", native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, device_class=SensorDeviceClass.PM10, state_class=SensorStateClass.MEASUREMENT, ), ATTR_PM25: XiaomiMiioSensorDescription( key=ATTR_AQI, - name="PM2.5", + translation_key=ATTR_AQI, native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, device_class=SensorDeviceClass.PM25, state_class=SensorStateClass.MEASUREMENT, ), ATTR_PM25_2: XiaomiMiioSensorDescription( key=ATTR_PM25, - name="PM2.5", native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, device_class=SensorDeviceClass.PM25, state_class=SensorStateClass.MEASUREMENT, ), ATTR_FILTER_LIFE_REMAINING: XiaomiMiioSensorDescription( key=ATTR_FILTER_LIFE_REMAINING, - name="Filter lifetime remaining", + translation_key=ATTR_FILTER_LIFE_REMAINING, native_unit_of_measurement=PERCENTAGE, icon="mdi:air-filter", state_class=SensorStateClass.MEASUREMENT, @@ -303,7 +298,7 @@ SENSOR_TYPES = { ), ATTR_FILTER_USE: XiaomiMiioSensorDescription( key=ATTR_FILTER_HOURS_USED, - name="Filter use", + translation_key=ATTR_FILTER_HOURS_USED, native_unit_of_measurement=UnitOfTime.HOURS, icon="mdi:clock-outline", device_class=SensorDeviceClass.DURATION, @@ -312,7 +307,7 @@ SENSOR_TYPES = { ), ATTR_FILTER_LEFT_TIME: XiaomiMiioSensorDescription( key=ATTR_FILTER_LEFT_TIME, - name="Filter lifetime left", + translation_key=ATTR_FILTER_LEFT_TIME, native_unit_of_measurement=UnitOfTime.DAYS, icon="mdi:clock-outline", device_class=SensorDeviceClass.DURATION, @@ -321,7 +316,7 @@ SENSOR_TYPES = { ), ATTR_DUST_FILTER_LIFE_REMAINING: XiaomiMiioSensorDescription( key=ATTR_DUST_FILTER_LIFE_REMAINING, - name="Dust filter lifetime remaining", + translation_key=ATTR_DUST_FILTER_LIFE_REMAINING, native_unit_of_measurement=PERCENTAGE, icon="mdi:air-filter", state_class=SensorStateClass.MEASUREMENT, @@ -330,7 +325,7 @@ SENSOR_TYPES = { ), ATTR_DUST_FILTER_LIFE_REMAINING_DAYS: XiaomiMiioSensorDescription( key=ATTR_DUST_FILTER_LIFE_REMAINING_DAYS, - name="Dust filter lifetime remaining days", + translation_key=ATTR_DUST_FILTER_LIFE_REMAINING_DAYS, native_unit_of_measurement=UnitOfTime.DAYS, icon="mdi:clock-outline", device_class=SensorDeviceClass.DURATION, @@ -339,7 +334,7 @@ SENSOR_TYPES = { ), ATTR_UPPER_FILTER_LIFE_REMAINING: XiaomiMiioSensorDescription( key=ATTR_UPPER_FILTER_LIFE_REMAINING, - name="Upper filter lifetime remaining", + translation_key=ATTR_UPPER_FILTER_LIFE_REMAINING, native_unit_of_measurement=PERCENTAGE, icon="mdi:air-filter", state_class=SensorStateClass.MEASUREMENT, @@ -348,7 +343,7 @@ SENSOR_TYPES = { ), ATTR_UPPER_FILTER_LIFE_REMAINING_DAYS: XiaomiMiioSensorDescription( key=ATTR_UPPER_FILTER_LIFE_REMAINING_DAYS, - name="Upper filter lifetime remaining days", + translation_key=ATTR_UPPER_FILTER_LIFE_REMAINING_DAYS, native_unit_of_measurement=UnitOfTime.DAYS, icon="mdi:clock-outline", device_class=SensorDeviceClass.DURATION, @@ -357,14 +352,13 @@ SENSOR_TYPES = { ), ATTR_CARBON_DIOXIDE: XiaomiMiioSensorDescription( key=ATTR_CARBON_DIOXIDE, - name="Carbon dioxide", native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION, device_class=SensorDeviceClass.CO2, state_class=SensorStateClass.MEASUREMENT, ), ATTR_PURIFY_VOLUME: XiaomiMiioSensorDescription( key=ATTR_PURIFY_VOLUME, - name="Purify volume", + translation_key=ATTR_PURIFY_VOLUME, native_unit_of_measurement=UnitOfVolume.CUBIC_METERS, device_class=SensorDeviceClass.VOLUME, state_class=SensorStateClass.TOTAL_INCREASING, @@ -373,7 +367,6 @@ SENSOR_TYPES = { ), ATTR_BATTERY: XiaomiMiioSensorDescription( key=ATTR_BATTERY, - name="Battery", native_unit_of_measurement=PERCENTAGE, device_class=SensorDeviceClass.BATTERY, state_class=SensorStateClass.MEASUREMENT, @@ -587,7 +580,7 @@ VACUUM_SENSORS = { f"dnd_{ATTR_DND_START}": XiaomiMiioSensorDescription( key=ATTR_DND_START, icon="mdi:minus-circle-off", - name="DnD start", + translation_key="dnd_start", device_class=SensorDeviceClass.TIMESTAMP, parent_key=VacuumCoordinatorDataAttributes.dnd_status, entity_registry_enabled_default=False, @@ -596,7 +589,7 @@ VACUUM_SENSORS = { f"dnd_{ATTR_DND_END}": XiaomiMiioSensorDescription( key=ATTR_DND_END, icon="mdi:minus-circle-off", - name="DnD end", + translation_key="dnd_end", device_class=SensorDeviceClass.TIMESTAMP, parent_key=VacuumCoordinatorDataAttributes.dnd_status, entity_registry_enabled_default=False, @@ -605,7 +598,7 @@ VACUUM_SENSORS = { f"last_clean_{ATTR_LAST_CLEAN_START}": XiaomiMiioSensorDescription( key=ATTR_LAST_CLEAN_START, icon="mdi:clock-time-twelve", - name="Last clean start", + translation_key="last_clean_start", device_class=SensorDeviceClass.TIMESTAMP, parent_key=VacuumCoordinatorDataAttributes.last_clean_details, entity_category=EntityCategory.DIAGNOSTIC, @@ -615,7 +608,7 @@ VACUUM_SENSORS = { icon="mdi:clock-time-twelve", device_class=SensorDeviceClass.TIMESTAMP, parent_key=VacuumCoordinatorDataAttributes.last_clean_details, - name="Last clean end", + translation_key="last_clean_end", entity_category=EntityCategory.DIAGNOSTIC, ), f"last_clean_{ATTR_LAST_CLEAN_TIME}": XiaomiMiioSensorDescription( @@ -624,7 +617,7 @@ VACUUM_SENSORS = { device_class=SensorDeviceClass.DURATION, key=ATTR_LAST_CLEAN_TIME, parent_key=VacuumCoordinatorDataAttributes.last_clean_details, - name="Last clean duration", + translation_key=ATTR_LAST_CLEAN_TIME, entity_category=EntityCategory.DIAGNOSTIC, ), f"last_clean_{ATTR_LAST_CLEAN_AREA}": XiaomiMiioSensorDescription( @@ -632,7 +625,7 @@ VACUUM_SENSORS = { icon="mdi:texture-box", key=ATTR_LAST_CLEAN_AREA, parent_key=VacuumCoordinatorDataAttributes.last_clean_details, - name="Last clean area", + translation_key=ATTR_LAST_CLEAN_AREA, entity_category=EntityCategory.DIAGNOSTIC, ), f"current_{ATTR_STATUS_CLEAN_TIME}": XiaomiMiioSensorDescription( @@ -641,7 +634,7 @@ VACUUM_SENSORS = { device_class=SensorDeviceClass.DURATION, key=ATTR_STATUS_CLEAN_TIME, parent_key=VacuumCoordinatorDataAttributes.status, - name="Current clean duration", + translation_key=ATTR_STATUS_CLEAN_TIME, entity_category=EntityCategory.DIAGNOSTIC, ), f"current_{ATTR_LAST_CLEAN_AREA}": XiaomiMiioSensorDescription( @@ -650,7 +643,7 @@ VACUUM_SENSORS = { key=ATTR_STATUS_CLEAN_AREA, parent_key=VacuumCoordinatorDataAttributes.status, entity_category=EntityCategory.DIAGNOSTIC, - name="Current clean area", + translation_key=ATTR_STATUS_CLEAN_AREA, ), f"clean_history_{ATTR_CLEAN_HISTORY_TOTAL_DURATION}": XiaomiMiioSensorDescription( native_unit_of_measurement=UnitOfTime.SECONDS, @@ -658,7 +651,7 @@ VACUUM_SENSORS = { icon="mdi:timer-sand", key=ATTR_CLEAN_HISTORY_TOTAL_DURATION, parent_key=VacuumCoordinatorDataAttributes.clean_history_status, - name="Total duration", + translation_key=ATTR_CLEAN_HISTORY_TOTAL_DURATION, entity_registry_enabled_default=False, entity_category=EntityCategory.DIAGNOSTIC, ), @@ -667,7 +660,7 @@ VACUUM_SENSORS = { icon="mdi:texture-box", key=ATTR_CLEAN_HISTORY_TOTAL_AREA, parent_key=VacuumCoordinatorDataAttributes.clean_history_status, - name="Total clean area", + translation_key=ATTR_CLEAN_HISTORY_TOTAL_AREA, entity_registry_enabled_default=False, entity_category=EntityCategory.DIAGNOSTIC, ), @@ -677,7 +670,7 @@ VACUUM_SENSORS = { state_class=SensorStateClass.TOTAL_INCREASING, key=ATTR_CLEAN_HISTORY_COUNT, parent_key=VacuumCoordinatorDataAttributes.clean_history_status, - name="Total clean count", + translation_key=ATTR_CLEAN_HISTORY_COUNT, entity_registry_enabled_default=False, entity_category=EntityCategory.DIAGNOSTIC, ), @@ -687,7 +680,7 @@ VACUUM_SENSORS = { state_class=SensorStateClass.TOTAL_INCREASING, key=ATTR_CLEAN_HISTORY_DUST_COLLECTION_COUNT, parent_key=VacuumCoordinatorDataAttributes.clean_history_status, - name="Total dust collection count", + translation_key=ATTR_CLEAN_HISTORY_DUST_COLLECTION_COUNT, entity_registry_enabled_default=False, entity_category=EntityCategory.DIAGNOSTIC, ), @@ -697,7 +690,7 @@ VACUUM_SENSORS = { device_class=SensorDeviceClass.DURATION, key=ATTR_CONSUMABLE_STATUS_MAIN_BRUSH_LEFT, parent_key=VacuumCoordinatorDataAttributes.consumable_status, - name="Main brush left", + translation_key=ATTR_CONSUMABLE_STATUS_MAIN_BRUSH_LEFT, entity_category=EntityCategory.DIAGNOSTIC, ), f"consumable_{ATTR_CONSUMABLE_STATUS_SIDE_BRUSH_LEFT}": XiaomiMiioSensorDescription( @@ -706,7 +699,7 @@ VACUUM_SENSORS = { device_class=SensorDeviceClass.DURATION, key=ATTR_CONSUMABLE_STATUS_SIDE_BRUSH_LEFT, parent_key=VacuumCoordinatorDataAttributes.consumable_status, - name="Side brush left", + translation_key=ATTR_CONSUMABLE_STATUS_SIDE_BRUSH_LEFT, entity_category=EntityCategory.DIAGNOSTIC, ), f"consumable_{ATTR_CONSUMABLE_STATUS_FILTER_LEFT}": XiaomiMiioSensorDescription( @@ -715,7 +708,7 @@ VACUUM_SENSORS = { device_class=SensorDeviceClass.DURATION, key=ATTR_CONSUMABLE_STATUS_FILTER_LEFT, parent_key=VacuumCoordinatorDataAttributes.consumable_status, - name="Filter left", + translation_key=ATTR_CONSUMABLE_STATUS_FILTER_LEFT, entity_category=EntityCategory.DIAGNOSTIC, ), f"consumable_{ATTR_CONSUMABLE_STATUS_SENSOR_DIRTY_LEFT}": XiaomiMiioSensorDescription( @@ -724,7 +717,7 @@ VACUUM_SENSORS = { device_class=SensorDeviceClass.DURATION, key=ATTR_CONSUMABLE_STATUS_SENSOR_DIRTY_LEFT, parent_key=VacuumCoordinatorDataAttributes.consumable_status, - name="Sensor dirty left", + translation_key=ATTR_CONSUMABLE_STATUS_SENSOR_DIRTY_LEFT, entity_category=EntityCategory.DIAGNOSTIC, ), } diff --git a/homeassistant/components/xiaomi_miio/strings.json b/homeassistant/components/xiaomi_miio/strings.json index bbdc3f5737d..6419c9056a5 100644 --- a/homeassistant/components/xiaomi_miio/strings.json +++ b/homeassistant/components/xiaomi_miio/strings.json @@ -105,6 +105,223 @@ } } } + }, + "binary_sensor": { + "no_water": { + "name": "Water tank empty" + }, + "water_tank_detached": { + "name": "Water tank" + }, + "ptc_status": { + "name": "Auxiliary heat status" + }, + "powersupply_attached": { + "name": "Power supply" + }, + "is_water_box_attached": { + "name": "Mop attached" + }, + "is_water_shortage": { + "name": "Water shortage" + }, + "is_water_box_carriage_attached": { + "name": "[%key:component::xiaomi_miio::entity::binary_sensor::is_water_box_attached::name%]" + } + }, + "button": { + "reset_dust_filter": { + "name": "Reset dust filter" + }, + "reset_upper_filter": { + "name": "Reset upper filter" + }, + "reset_vacuum_main_brush": { + "name": "Reset main brush" + }, + "reset_vacuum_side_brush": { + "name": "Reset side brush" + }, + "reset_vacuum_filter": { + "name": "Reset filter" + }, + "reset_vacuum_sensor_dirty": { + "name": "Reset sensor dirty" + } + }, + "number": { + "motor_speed": { + "name": "Motor speed" + }, + "favorite_level": { + "name": "Favorite level" + }, + "fan_level": { + "name": "Fan level" + }, + "volume": { + "name": "Volume" + }, + "angle": { + "name": "Oscillation angle" + }, + "delay_off_countdown": { + "name": "Delay off countdown" + }, + "led_brightness": { + "name": "LED brightness" + }, + "led_brightness_level": { + "name": "LED brightness" + }, + "favorite_rpm": { + "name": "Favorite motor speed" + } + }, + "sensor": { + "load_power": { + "name": "Load power" + }, + "water_level": { + "name": "Water level" + }, + "actual_speed": { + "name": "Actual speed" + }, + "control_speed": { + "name": "Control speed" + }, + "favorite_speed": { + "name": "Favorite speed" + }, + "motor_speed": { + "name": "[%key:component::xiaomi_miio::entity::number::motor_speed::name%]" + }, + "motor2_speed": { + "name": "Second motor speed" + }, + "use_time": { + "name": "Use time" + }, + "illuminance": { + "name": "[%key:component::sensor::entity_component::illuminance::name%]" + }, + "air_quality": { + "name": "Air quality" + }, + "tvoc": { + "name": "TVOC" + }, + "air_quality_index": { + "name": "Air quality index" + }, + "filter_life_remaining": { + "name": "Filter lifetime remaining" + }, + "filter_hours_used": { + "name": "Filter use" + }, + "filter_left_time": { + "name": "Filter lifetime left" + }, + "dust_filter_life_remaining": { + "name": "Dust filter lifetime remaining" + }, + "dust_filter_life_remaining_days": { + "name": "Dust filter lifetime remaining days" + }, + "upper_filter_life_remaining": { + "name": "Upper filter lifetime remaining" + }, + "upper_filter_life_remaining_days": { + "name": "Upper filter lifetime remaining days" + }, + "purify_volume": { + "name": "Purify volume" + }, + "dnd_start": { + "name": "DnD start" + }, + "dnd_end": { + "name": "DnD end" + }, + "last_clean_start": { + "name": "Last clean start" + }, + "last_clean_end": { + "name": "Last clean end" + }, + "duration": { + "name": "Last clean duration" + }, + "area": { + "name": "Last clean area" + }, + "clean_time": { + "name": "Current clean duration" + }, + "clean_area": { + "name": "Current clean area" + }, + "total_duration": { + "name": "Total duration" + }, + "total_area": { + "name": "Total clean area" + }, + "count": { + "name": "Total clean count" + }, + "dust_collection_count": { + "name": "Total dust collection count" + }, + "main_brush_left": { + "name": "Main brush left" + }, + "side_brush_left": { + "name": "Side brush left" + }, + "filter_left": { + "name": "Filter left" + }, + "sensor_dirty_left": { + "name": "Sensor dirty left" + } + }, + "switch": { + "buzzer": { + "name": "Buzzer" + }, + "child_lock": { + "name": "Child lock" + }, + "display": { + "name": "Display" + }, + "dry": { + "name": "Dry mode" + }, + "clean_mode": { + "name": "Clean mode" + }, + "led": { + "name": "LED" + }, + "learn_mode": { + "name": "Learn mode" + }, + "auto_detect": { + "name": "Auto detect" + }, + "ionizer": { + "name": "Ionizer" + }, + "anion": { + "name": "[%key:component::xiaomi_miio::entity::switch::ionizer::name%]" + }, + "ptc": { + "name": "Auxiliary heat" + } } }, "services": { diff --git a/homeassistant/components/xiaomi_miio/switch.py b/homeassistant/components/xiaomi_miio/switch.py index 797a98d9fa1..42eb6cc0838 100644 --- a/homeassistant/components/xiaomi_miio/switch.py +++ b/homeassistant/components/xiaomi_miio/switch.py @@ -236,7 +236,7 @@ SWITCH_TYPES = ( XiaomiMiioSwitchDescription( key=ATTR_BUZZER, feature=FEATURE_SET_BUZZER, - name="Buzzer", + translation_key=ATTR_BUZZER, icon="mdi:volume-high", method_on="async_set_buzzer_on", method_off="async_set_buzzer_off", @@ -245,7 +245,7 @@ SWITCH_TYPES = ( XiaomiMiioSwitchDescription( key=ATTR_CHILD_LOCK, feature=FEATURE_SET_CHILD_LOCK, - name="Child lock", + translation_key=ATTR_CHILD_LOCK, icon="mdi:lock", method_on="async_set_child_lock_on", method_off="async_set_child_lock_off", @@ -254,7 +254,7 @@ SWITCH_TYPES = ( XiaomiMiioSwitchDescription( key=ATTR_DISPLAY, feature=FEATURE_SET_DISPLAY, - name="Display", + translation_key=ATTR_DISPLAY, icon="mdi:led-outline", method_on="async_set_display_on", method_off="async_set_display_off", @@ -263,7 +263,7 @@ SWITCH_TYPES = ( XiaomiMiioSwitchDescription( key=ATTR_DRY, feature=FEATURE_SET_DRY, - name="Dry mode", + translation_key=ATTR_DRY, icon="mdi:hair-dryer", method_on="async_set_dry_on", method_off="async_set_dry_off", @@ -272,7 +272,7 @@ SWITCH_TYPES = ( XiaomiMiioSwitchDescription( key=ATTR_CLEAN, feature=FEATURE_SET_CLEAN, - name="Clean mode", + translation_key=ATTR_CLEAN, icon="mdi:shimmer", method_on="async_set_clean_on", method_off="async_set_clean_off", @@ -282,7 +282,7 @@ SWITCH_TYPES = ( XiaomiMiioSwitchDescription( key=ATTR_LED, feature=FEATURE_SET_LED, - name="LED", + translation_key=ATTR_LED, icon="mdi:led-outline", method_on="async_set_led_on", method_off="async_set_led_off", @@ -291,7 +291,7 @@ SWITCH_TYPES = ( XiaomiMiioSwitchDescription( key=ATTR_LEARN_MODE, feature=FEATURE_SET_LEARN_MODE, - name="Learn mode", + translation_key=ATTR_LEARN_MODE, icon="mdi:school-outline", method_on="async_set_learn_mode_on", method_off="async_set_learn_mode_off", @@ -300,7 +300,7 @@ SWITCH_TYPES = ( XiaomiMiioSwitchDescription( key=ATTR_AUTO_DETECT, feature=FEATURE_SET_AUTO_DETECT, - name="Auto detect", + translation_key=ATTR_AUTO_DETECT, method_on="async_set_auto_detect_on", method_off="async_set_auto_detect_off", entity_category=EntityCategory.CONFIG, @@ -308,7 +308,7 @@ SWITCH_TYPES = ( XiaomiMiioSwitchDescription( key=ATTR_IONIZER, feature=FEATURE_SET_IONIZER, - name="Ionizer", + translation_key=ATTR_IONIZER, icon="mdi:shimmer", method_on="async_set_ionizer_on", method_off="async_set_ionizer_off", @@ -317,7 +317,7 @@ SWITCH_TYPES = ( XiaomiMiioSwitchDescription( key=ATTR_ANION, feature=FEATURE_SET_ANION, - name="Ionizer", + translation_key=ATTR_ANION, icon="mdi:shimmer", method_on="async_set_anion_on", method_off="async_set_anion_off", @@ -326,7 +326,7 @@ SWITCH_TYPES = ( XiaomiMiioSwitchDescription( key=ATTR_PTC, feature=FEATURE_SET_PTC, - name="Auxiliary heat", + translation_key=ATTR_PTC, icon="mdi:radiator", method_on="async_set_ptc_on", method_off="async_set_ptc_off", From af0a6d2820cdd0424aff742e4794351783da00ea Mon Sep 17 00:00:00 2001 From: Simon Lamon <32477463+silamon@users.noreply.github.com> Date: Fri, 6 Sep 2024 15:02:38 +0200 Subject: [PATCH 1509/1635] Improve play media support in LinkPlay (#125205) Improve play media support in linkplay --- homeassistant/components/linkplay/media_player.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/linkplay/media_player.py b/homeassistant/components/linkplay/media_player.py index b1fa0e2a5c5..20b0f63f6a3 100644 --- a/homeassistant/components/linkplay/media_player.py +++ b/homeassistant/components/linkplay/media_player.py @@ -20,6 +20,9 @@ from homeassistant.components.media_player import ( MediaType, RepeatMode, ) +from homeassistant.components.media_player.browse_media import ( + async_process_play_media_url, +) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr @@ -238,10 +241,14 @@ class LinkPlayMediaPlayerEntity(MediaPlayerEntity): self, media_type: MediaType | str, media_id: str, **kwargs: Any ) -> None: """Play a piece of media.""" - media = await media_source.async_resolve_media( - self.hass, media_id, self.entity_id - ) - await self._bridge.player.play(media.url) + if media_source.is_media_source_id(media_id): + play_item = await media_source.async_resolve_media( + self.hass, media_id, self.entity_id + ) + media_id = play_item.url + + url = async_process_play_media_url(self.hass, media_id) + await self._bridge.player.play(url) def _update_properties(self) -> None: """Update the properties of the media player.""" From 58056c49f7e782548075109df6b9628175c65cf8 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 6 Sep 2024 15:08:13 +0200 Subject: [PATCH 1510/1635] Improve config flow type hints (t-z) (#125315) --- homeassistant/components/wiffi/config_flow.py | 4 +++- homeassistant/components/wilight/config_flow.py | 5 ++++- homeassistant/components/ws66i/config_flow.py | 10 +++++++--- 3 files changed, 14 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/wiffi/config_flow.py b/homeassistant/components/wiffi/config_flow.py index 6e4872ea400..3fcbef395e6 100644 --- a/homeassistant/components/wiffi/config_flow.py +++ b/homeassistant/components/wiffi/config_flow.py @@ -83,7 +83,9 @@ class OptionsFlowHandler(OptionsFlow): """Initialize options flow.""" self.config_entry = config_entry - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, int] | None = None + ) -> ConfigFlowResult: """Manage the options.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) diff --git a/homeassistant/components/wilight/config_flow.py b/homeassistant/components/wilight/config_flow.py index babc011fc35..8795da19091 100644 --- a/homeassistant/components/wilight/config_flow.py +++ b/homeassistant/components/wilight/config_flow.py @@ -1,5 +1,6 @@ """Config flow to configure WiLight.""" +from typing import Any from urllib.parse import urlparse import pywilight @@ -89,7 +90,9 @@ class WiLightFlowHandler(ConfigFlow, domain=DOMAIN): self.context["title_placeholders"] = {"name": self._title} return await self.async_step_confirm() - async def async_step_confirm(self, user_input=None): + async def async_step_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle user-confirmation of discovered WiLight.""" if user_input is not None: return self._get_entry() diff --git a/homeassistant/components/ws66i/config_flow.py b/homeassistant/components/ws66i/config_flow.py index 330e9963f95..9f6f4ca59c2 100644 --- a/homeassistant/components/ws66i/config_flow.py +++ b/homeassistant/components/ws66i/config_flow.py @@ -49,7 +49,7 @@ FIRST_ZONE = 11 @callback -def _sources_from_config(data): +def _sources_from_config(data: dict[str, str]) -> dict[str, str]: sources_config = { str(idx + 1): data.get(source) for idx, source in enumerate(SOURCES) } @@ -134,7 +134,9 @@ class WS66iConfigFlow(ConfigFlow, domain=DOMAIN): @callback -def _key_for_source(index, source, previous_sources): +def _key_for_source( + index: int, source: str, previous_sources: dict[str, str] +) -> vol.Required: return vol.Required( source, description={"suggested_value": previous_sources[str(index)]} ) @@ -147,7 +149,9 @@ class Ws66iOptionsFlowHandler(OptionsFlow): """Initialize.""" self.config_entry = config_entry - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Manage the options.""" if user_input is not None: return self.async_create_entry( From f5f8c44ca6cb3da4726e52da712158d29db25421 Mon Sep 17 00:00:00 2001 From: Eric Shtivelberg <295836+shedokan@users.noreply.github.com> Date: Fri, 6 Sep 2024 16:08:30 +0300 Subject: [PATCH 1511/1635] Add Habitica up/down attributes for tasks (#125356) add: up/down --- homeassistant/components/habitica/sensor.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/homeassistant/components/habitica/sensor.py b/homeassistant/components/habitica/sensor.py index 8762345b597..fed1375c893 100644 --- a/homeassistant/components/habitica/sensor.py +++ b/homeassistant/components/habitica/sensor.py @@ -140,6 +140,8 @@ TASKS_MAP = { "frequency": "frequency", "every_x": "everyX", "streak": "streak", + "up": "up", + "down": "down", "counter_up": "counterUp", "counter_down": "counterDown", "next_due": "nextDue", From 66c6cd2a109d35ca2efb834d30b860b906d31a32 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 6 Sep 2024 15:16:32 +0200 Subject: [PATCH 1512/1635] Improve config flow type hints in xiaomi_aqara (#125316) --- .../components/xiaomi_aqara/config_flow.py | 21 ++++++++++--------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/xiaomi_aqara/config_flow.py b/homeassistant/components/xiaomi_aqara/config_flow.py index a89bb8447a3..6252e6849d0 100644 --- a/homeassistant/components/xiaomi_aqara/config_flow.py +++ b/homeassistant/components/xiaomi_aqara/config_flow.py @@ -2,7 +2,7 @@ import logging from socket import gaierror -from typing import TYPE_CHECKING, Any +from typing import Any import voluptuous as vol from xiaomi_gateway import MULTICAST_PORT, XiaomiGateway, XiaomiGatewayDiscovery @@ -50,13 +50,14 @@ class XiaomiAqaraFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 + selected_gateway: XiaomiGateway + gateways: dict[str, XiaomiGateway] + def __init__(self) -> None: """Initialize.""" self.host: str | None = None self.interface = DEFAULT_INTERFACE self.sid: str | None = None - self.gateways: dict[str, XiaomiGateway] | None = None - self.selected_gateway: XiaomiGateway | None = None @callback def async_show_form_step_user(self, errors): @@ -99,8 +100,6 @@ class XiaomiAqaraFlowHandler(ConfigFlow, domain=DOMAIN): None, ) - if TYPE_CHECKING: - assert self.selected_gateway if self.selected_gateway.connection_error: errors[CONF_HOST] = "invalid_host" if self.selected_gateway.mac_error: @@ -120,8 +119,6 @@ class XiaomiAqaraFlowHandler(ConfigFlow, domain=DOMAIN): self.gateways = xiaomi.gateways - if TYPE_CHECKING: - assert self.gateways is not None if len(self.gateways) == 1: self.selected_gateway = list(self.gateways.values())[0] self.sid = self.selected_gateway.sid @@ -132,9 +129,11 @@ class XiaomiAqaraFlowHandler(ConfigFlow, domain=DOMAIN): errors["base"] = "discovery_error" return self.async_show_form_step_user(errors) - async def async_step_select(self, user_input=None): + async def async_step_select( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Handle multiple aqara gateways found.""" - errors = {} + errors: dict[str, str] = {} if user_input is not None: ip_adress = user_input["select_ip"] self.selected_gateway = self.gateways[ip_adress] @@ -192,7 +191,9 @@ class XiaomiAqaraFlowHandler(ConfigFlow, domain=DOMAIN): return await self.async_step_user() - async def async_step_settings(self, user_input=None): + async def async_step_settings( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Specify settings and connect aqara gateway.""" errors = {} if user_input is not None: From b68c90d59a93953baf9dc43c8a3c55bc1d0488f3 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 6 Sep 2024 15:16:47 +0200 Subject: [PATCH 1513/1635] Improve config flow type hints in vulcan (#125308) * Improve config flow type hints in vulcan * Adjust tests --- .../components/vulcan/config_flow.py | 56 +++++++++++++------ homeassistant/components/vulcan/register.py | 4 +- tests/components/vulcan/test_config_flow.py | 4 +- 3 files changed, 43 insertions(+), 21 deletions(-) diff --git a/homeassistant/components/vulcan/config_flow.py b/homeassistant/components/vulcan/config_flow.py index 5938e4ce690..f02adba9f75 100644 --- a/homeassistant/components/vulcan/config_flow.py +++ b/homeassistant/components/vulcan/config_flow.py @@ -2,7 +2,7 @@ from collections.abc import Mapping import logging -from typing import Any +from typing import TYPE_CHECKING, Any from aiohttp import ClientConnectionError import voluptuous as vol @@ -16,6 +16,7 @@ from vulcan import ( UnauthorizedCertificateException, Vulcan, ) +from vulcan.model import Student from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PIN, CONF_REGION, CONF_TOKEN @@ -38,11 +39,12 @@ class VulcanFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 + account: Account + keystore: Keystore + def __init__(self) -> None: """Initialize config flow.""" - self.account = None - self.keystore = None - self.students = None + self.students: list[Student] | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -53,13 +55,16 @@ class VulcanFlowHandler(ConfigFlow, domain=DOMAIN): return await self.async_step_auth() - async def async_step_auth(self, user_input=None, errors=None): + async def async_step_auth( + self, + user_input: dict[str, str] | None = None, + errors: dict[str, str] | None = None, + ) -> ConfigFlowResult: """Authorize integration.""" if user_input is not None: try: credentials = await register( - self.hass, user_input[CONF_TOKEN], user_input[CONF_REGION], user_input[CONF_PIN], @@ -107,16 +112,20 @@ class VulcanFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_select_student(self, user_input=None): + async def async_step_select_student( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Allow user to select student.""" - errors = {} - students = {} + errors: dict[str, str] = {} + students: dict[str, str] = {} if self.students is not None: for student in self.students: students[str(student.pupil.id)] = ( f"{student.pupil.first_name} {student.pupil.last_name}" ) if user_input is not None: + if TYPE_CHECKING: + assert self.keystore is not None student_id = user_input["student"] await self.async_set_unique_id(str(student_id)) self._abort_if_unique_id_configured() @@ -135,17 +144,25 @@ class VulcanFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_select_saved_credentials(self, user_input=None, errors=None): + async def async_step_select_saved_credentials( + self, + user_input: dict[str, str] | None = None, + errors: dict[str, str] | None = None, + ) -> ConfigFlowResult: """Allow user to select saved credentials.""" - credentials = {} + credentials: dict[str, Any] = {} for entry in self.hass.config_entries.async_entries(DOMAIN): credentials[entry.entry_id] = entry.data["account"]["UserName"] if user_input is not None: - entry = self.hass.config_entries.async_get_entry(user_input["credentials"]) - keystore = Keystore.load(entry.data["keystore"]) - account = Account.load(entry.data["account"]) + existing_entry = self.hass.config_entries.async_get_entry( + user_input["credentials"] + ) + if TYPE_CHECKING: + assert existing_entry is not None + keystore = Keystore.load(existing_entry.data["keystore"]) + account = Account.load(existing_entry.data["account"]) client = Vulcan(keystore, account, async_get_clientsession(self.hass)) try: students = await client.get_students() @@ -189,12 +206,14 @@ class VulcanFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_add_next_config_entry(self, user_input=None): + async def async_step_add_next_config_entry( + self, user_input: dict[str, bool] | None = None + ) -> ConfigFlowResult: """Flow initialized when user is adding next entry of that integration.""" existing_entries = self.hass.config_entries.async_entries(DOMAIN) - errors = {} + errors: dict[str, str] = {} if user_input is not None: if not user_input["use_saved_credentials"]: @@ -248,13 +267,14 @@ class VulcanFlowHandler(ConfigFlow, domain=DOMAIN): """Perform reauth upon an API authentication error.""" return await self.async_step_reauth_confirm() - async def async_step_reauth_confirm(self, user_input=None): + async def async_step_reauth_confirm( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Reauthorize integration.""" errors = {} if user_input is not None: try: credentials = await register( - self.hass, user_input[CONF_TOKEN], user_input[CONF_REGION], user_input[CONF_PIN], diff --git a/homeassistant/components/vulcan/register.py b/homeassistant/components/vulcan/register.py index 67cceb8d7b8..a3dec97f622 100644 --- a/homeassistant/components/vulcan/register.py +++ b/homeassistant/components/vulcan/register.py @@ -1,9 +1,11 @@ """Support for register Vulcan account.""" +from typing import Any + from vulcan import Account, Keystore -async def register(hass, token, symbol, pin): +async def register(token: str, symbol: str, pin: str) -> dict[str, Any]: """Register integration and save credentials.""" keystore = await Keystore.create(device_model="Home Assistant") account = await Account.register(keystore, token, symbol, pin) diff --git a/tests/components/vulcan/test_config_flow.py b/tests/components/vulcan/test_config_flow.py index a72e77b32e8..a51d9727126 100644 --- a/tests/components/vulcan/test_config_flow.py +++ b/tests/components/vulcan/test_config_flow.py @@ -310,7 +310,7 @@ async def test_multiple_config_entries( unique_id="123456", data=json.loads(load_fixture("fake_config_entry_data.json", "vulcan")), ).add_to_hass(hass) - await register.register(hass, "token", "region", "000000") + await register.register("token", "region", "000000") result = await hass.config_entries.flow.async_init( const.DOMAIN, context={"source": config_entries.SOURCE_USER} ) @@ -703,7 +703,7 @@ async def test_student_already_exists( | {"student_id": "0"}, ).add_to_hass(hass) - await register.register(hass, "token", "region", "000000") + await register.register("token", "region", "000000") result = await hass.config_entries.flow.async_init( const.DOMAIN, context={"source": config_entries.SOURCE_USER} From 543f9869550a1a2645825248914f3904e0b38d3a Mon Sep 17 00:00:00 2001 From: GeoffAtHome Date: Fri, 6 Sep 2024 14:17:50 +0100 Subject: [PATCH 1514/1635] Improve geniushub test coverage (#124157) * Add tests for local connection * Test cloud setup * Improve tests. * Simplied coverage test to cloud setup. * Mock out library and add snapshots * Mock out library and add snapshots * Update tests/components/geniushub/conftest.py Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> * Attempt to make it nice * Fix --------- Co-authored-by: Joostlek Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> --- tests/components/geniushub/__init__.py | 12 + tests/components/geniushub/conftest.py | 39 +- .../fixtures/devices_cloud_test_data.json | 151 +++ .../fixtures/zones_cloud_test_data.json | 1069 +++++++++++++++++ .../snapshots/test_binary_sensor.ambr | 50 + .../geniushub/snapshots/test_climate.ambr | 569 +++++++++ .../geniushub/snapshots/test_sensor.ambr | 954 +++++++++++++++ .../geniushub/snapshots/test_switch.ambr | 166 +++ .../geniushub/test_binary_sensor.py | 32 + tests/components/geniushub/test_climate.py | 30 + tests/components/geniushub/test_sensor.py | 30 + tests/components/geniushub/test_switch.py | 30 + 12 files changed, 3130 insertions(+), 2 deletions(-) create mode 100644 tests/components/geniushub/fixtures/devices_cloud_test_data.json create mode 100644 tests/components/geniushub/fixtures/zones_cloud_test_data.json create mode 100644 tests/components/geniushub/snapshots/test_binary_sensor.ambr create mode 100644 tests/components/geniushub/snapshots/test_climate.ambr create mode 100644 tests/components/geniushub/snapshots/test_sensor.ambr create mode 100644 tests/components/geniushub/snapshots/test_switch.ambr create mode 100644 tests/components/geniushub/test_binary_sensor.py create mode 100644 tests/components/geniushub/test_climate.py create mode 100644 tests/components/geniushub/test_sensor.py create mode 100644 tests/components/geniushub/test_switch.py diff --git a/tests/components/geniushub/__init__.py b/tests/components/geniushub/__init__.py index 15886486e38..ed06642d339 100644 --- a/tests/components/geniushub/__init__.py +++ b/tests/components/geniushub/__init__.py @@ -1 +1,13 @@ """Tests for the geniushub integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/geniushub/conftest.py b/tests/components/geniushub/conftest.py index 125f1cfa80c..15938eabc62 100644 --- a/tests/components/geniushub/conftest.py +++ b/tests/components/geniushub/conftest.py @@ -1,14 +1,16 @@ """GeniusHub tests configuration.""" from collections.abc import Generator -from unittest.mock import patch +from typing import Any +from unittest.mock import MagicMock, patch +from geniushubclient import GeniusDevice, GeniusZone import pytest from homeassistant.components.geniushub.const import DOMAIN from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, load_json_array_fixture from tests.components.smhi.common import AsyncMock @@ -38,6 +40,38 @@ def mock_geniushub_client() -> Generator[AsyncMock]: yield client +@pytest.fixture(scope="session") +def zones() -> list[dict[str, Any]]: + """Return a list of zones.""" + return load_json_array_fixture("zones_cloud_test_data.json", DOMAIN) + + +@pytest.fixture(scope="session") +def devices() -> list[dict[str, Any]]: + """Return a list of devices.""" + return load_json_array_fixture("devices_cloud_test_data.json", DOMAIN) + + +@pytest.fixture +def mock_geniushub_cloud( + zones: list[dict[str, Any]], devices: list[dict[str, Any]] +) -> Generator[MagicMock]: + """Mock a GeniusHub.""" + with patch( + "homeassistant.components.geniushub.GeniusHub", + autospec=True, + ) as mock_client: + client = mock_client.return_value + genius_zones = [GeniusZone(z["id"], z, client) for z in zones] + client.zone_objs = genius_zones + client._zones = genius_zones + genius_devices = [GeniusDevice(d["id"], d, client) for d in devices] + client.device_objs = genius_devices + client._devices = genius_devices + client.api_version = 1 + yield client + + @pytest.fixture def mock_local_config_entry() -> MockConfigEntry: """Mock a local config entry.""" @@ -62,4 +96,5 @@ def mock_cloud_config_entry() -> MockConfigEntry: data={ CONF_TOKEN: "abcdef", }, + entry_id="01J71MQF0EC62D620DGYNG2R8H", ) diff --git a/tests/components/geniushub/fixtures/devices_cloud_test_data.json b/tests/components/geniushub/fixtures/devices_cloud_test_data.json new file mode 100644 index 00000000000..92fd2c33811 --- /dev/null +++ b/tests/components/geniushub/fixtures/devices_cloud_test_data.json @@ -0,0 +1,151 @@ +[ + { + "id": "4", + "type": "Smart Plug", + "assignedZones": [{ "name": "Bedroom Socket" }], + "state": { "outputOnOff": "True" } + }, + { + "id": "6", + "type": "Smart Plug", + "assignedZones": [{ "name": "Kitchen Socket" }], + "state": { "outputOnOff": "True" } + }, + { + "id": "11", + "type": "Radiator Valve", + "assignedZones": [{ "name": "Lounge" }], + "state": { "batteryLevel": 43, "setTemperature": 4 } + }, + { + "id": "16", + "type": "Room Sensor", + "assignedZones": [{ "name": "Guest room" }], + "state": { + "batteryLevel": 100, + "measuredTemperature": 21, + "luminance": 29, + "occupancyTrigger": 255 + } + }, + { + "id": "17", + "type": "Room Sensor", + "assignedZones": [{ "name": "Ensuite" }], + "state": { + "batteryLevel": 100, + "measuredTemperature": 21, + "luminance": 32, + "occupancyTrigger": 0 + } + }, + { + "id": "18", + "type": "Room Sensor", + "assignedZones": [{ "name": "Bedroom" }], + "state": { + "batteryLevel": 36, + "measuredTemperature": 21.5, + "luminance": 1, + "occupancyTrigger": 0 + } + }, + { + "id": "20", + "type": "Room Sensor", + "assignedZones": [{ "name": "Kitchen" }], + "state": { + "batteryLevel": 100, + "measuredTemperature": 21.5, + "luminance": 1, + "occupancyTrigger": 0 + } + }, + { + "id": "21", + "type": "Room Sensor", + "assignedZones": [{ "name": "Hall" }], + "state": { + "batteryLevel": 100, + "measuredTemperature": 21, + "luminance": 33, + "occupancyTrigger": 0 + } + }, + { + "id": "22", + "type": "Single Channel Receiver", + "assignedZones": [{ "name": "East Berlin" }], + "state": { "outputOnOff": "False" } + }, + { + "id": "50", + "type": "Room Sensor", + "assignedZones": [{ "name": "Study" }], + "state": { + "batteryLevel": 100, + "measuredTemperature": 22, + "luminance": 34, + "occupancyTrigger": 0 + } + }, + { + "id": "53", + "type": "Room Sensor", + "assignedZones": [{ "name": "Lounge" }], + "state": { + "batteryLevel": 28, + "measuredTemperature": 0, + "luminance": 0, + "occupancyTrigger": 0 + } + }, + { + "id": "56", + "type": "Radiator Valve", + "assignedZones": [{ "name": "Kitchen" }], + "state": { "batteryLevel": 55, "setTemperature": 4 } + }, + { + "id": "68", + "type": "Radiator Valve", + "assignedZones": [{ "name": "Hall" }], + "state": { "batteryLevel": 92, "setTemperature": 4 } + }, + { + "id": "78", + "type": "Radiator Valve", + "assignedZones": [{ "name": "Bedroom" }], + "state": { "batteryLevel": 42, "setTemperature": 4 } + }, + { + "id": "85", + "type": "Radiator Valve", + "assignedZones": [{ "name": "Study" }], + "state": { "batteryLevel": 61, "setTemperature": 4 } + }, + { + "id": "86", + "type": "Smart Plug", + "assignedZones": [{ "name": "Study Socket" }], + "state": { "outputOnOff": "False" } + }, + { + "id": "88", + "type": "Radiator Valve", + "assignedZones": [{ "name": "Ensuite" }], + "state": { "batteryLevel": 49, "setTemperature": 4 } + }, + { + "id": "89", + "type": "Radiator Valve", + "assignedZones": [{ "name": "Kitchen" }], + "state": { "batteryLevel": 48, "setTemperature": 4 } + }, + { + "id": "90", + "type": "Radiator Valve", + "assignedZones": [{ "name": "Guest room" }], + "state": { "batteryLevel": 92, "setTemperature": 4 } + } +] diff --git a/tests/components/geniushub/fixtures/zones_cloud_test_data.json b/tests/components/geniushub/fixtures/zones_cloud_test_data.json new file mode 100644 index 00000000000..00d3109cf6e --- /dev/null +++ b/tests/components/geniushub/fixtures/zones_cloud_test_data.json @@ -0,0 +1,1069 @@ +[ + { + "id": 0, + "name": "West Berlin", + "output": 0, + "type": "manager", + "mode": "off", + "schedule": { "timer": {}, "footprint": {} } + }, + { + "id": 1, + "name": "Lounge", + "output": 0, + "type": "radiator", + "mode": "off", + "temperature": 20, + "setpoint": 4, + "override": { "duration": 0, "setpoint": 20 }, + "schedule": { + "timer": { + "weekly": { + "sunday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 68400, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 68400, "setpoint": 20 }, + { "end": 81000, "start": 75600, "setpoint": 18 } + ] + }, + "monday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 68400, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 68400, "setpoint": 20 }, + { "end": 81000, "start": 75600, "setpoint": 18 } + ] + }, + "tuesday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 68400, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 68400, "setpoint": 20 }, + { "end": 81000, "start": 75600, "setpoint": 18 } + ] + }, + "wednesday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 68400, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 68400, "setpoint": 20 }, + { "end": 81000, "start": 75600, "setpoint": 18 } + ] + }, + "thursday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 68400, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 68400, "setpoint": 20 }, + { "end": 81000, "start": 75600, "setpoint": 18 } + ] + }, + "friday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 68400, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 68400, "setpoint": 20 }, + { "end": 81000, "start": 75600, "setpoint": 18 } + ] + }, + "saturday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 68400, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 68400, "setpoint": 20 }, + { "end": 81000, "start": 75600, "setpoint": 18 } + ] + } + } + }, + "footprint": { + "weekly": { + "sunday": { + "defaultSetpoint": 17, + "heatingPeriods": [ + { "end": 61200, "start": 0, "setpoint": 4 }, + { "end": 86400, "start": 80100, "setpoint": 4 } + ] + }, + "monday": { + "defaultSetpoint": 17, + "heatingPeriods": [ + { "end": 61200, "start": 0, "setpoint": 4 }, + { "end": 86400, "start": 80100, "setpoint": 4 } + ] + }, + "tuesday": { + "defaultSetpoint": 17, + "heatingPeriods": [ + { "end": 61200, "start": 0, "setpoint": 4 }, + { "end": 86400, "start": 80100, "setpoint": 4 } + ] + }, + "wednesday": { + "defaultSetpoint": 17, + "heatingPeriods": [ + { "end": 61200, "start": 0, "setpoint": 4 }, + { "end": 86400, "start": 80100, "setpoint": 4 } + ] + }, + "thursday": { + "defaultSetpoint": 17, + "heatingPeriods": [ + { "end": 61200, "start": 0, "setpoint": 4 }, + { "end": 86400, "start": 80100, "setpoint": 4 } + ] + }, + "friday": { + "defaultSetpoint": 17, + "heatingPeriods": [ + { "end": 61200, "start": 0, "setpoint": 4 }, + { "end": 86400, "start": 80100, "setpoint": 4 } + ] + }, + "saturday": { + "defaultSetpoint": 17, + "heatingPeriods": [ + { "end": 61200, "start": 0, "setpoint": 4 }, + { "end": 86400, "start": 80100, "setpoint": 4 } + ] + } + } + } + } + }, + { + "id": 2, + "name": "Hall", + "output": 0, + "type": "radiator", + "mode": "off", + "temperature": 21, + "setpoint": 4, + "occupied": "False", + "override": { "duration": 0, "setpoint": 20 }, + "schedule": { + "timer": { + "weekly": { + "sunday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 61200, "start": 29700, "setpoint": 6 }, + { "end": 70200, "start": 61200, "setpoint": 18.5 } + ] + }, + "monday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 61200, "start": 29700, "setpoint": 6 }, + { "end": 70200, "start": 61200, "setpoint": 18.5 } + ] + }, + "tuesday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 61200, "start": 29700, "setpoint": 6 }, + { "end": 70200, "start": 61200, "setpoint": 18.5 } + ] + }, + "wednesday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 61200, "start": 29700, "setpoint": 6 }, + { "end": 70200, "start": 61200, "setpoint": 18.5 } + ] + }, + "thursday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 61200, "start": 29700, "setpoint": 6 }, + { "end": 70200, "start": 61200, "setpoint": 18.5 } + ] + }, + "friday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 61200, "start": 29700, "setpoint": 6 }, + { "end": 70200, "start": 61200, "setpoint": 18.5 } + ] + }, + "saturday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 61200, "start": 29700, "setpoint": 6 }, + { "end": 73800, "start": 68400, "setpoint": 18.5 } + ] + } + } + }, + "footprint": { + "weekly": { + "sunday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 37800, "start": 32400, "setpoint": 20 }, + { "end": 75600, "start": 56700, "setpoint": 20 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "monday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 43500, "start": 31800, "setpoint": 20 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "tuesday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 34200, "start": 27300, "setpoint": 20 }, + { "end": 75600, "start": 60900, "setpoint": 20 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "wednesday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 48300, "start": 28800, "setpoint": 20 }, + { "end": 75600, "start": 75300, "setpoint": 20 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "thursday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 42000, "start": 28500, "setpoint": 20 }, + { "end": 70800, "start": 53700, "setpoint": 20 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "friday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 64500, "start": 28500, "setpoint": 20 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "saturday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 63900, "start": 53100, "setpoint": 20 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + } + } + } + } + }, + { + "id": 3, + "name": "Kitchen", + "output": 0, + "type": "radiator", + "mode": "off", + "temperature": 21.5, + "setpoint": 4, + "occupied": "False", + "override": { "duration": 0, "setpoint": 20 }, + "schedule": { + "timer": { + "weekly": { + "sunday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 61200, "start": 29700, "setpoint": 6 }, + { "end": 70200, "start": 61200, "setpoint": 18.5 } + ] + }, + "monday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 61200, "start": 29700, "setpoint": 6 }, + { "end": 70200, "start": 61200, "setpoint": 18.5 } + ] + }, + "tuesday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 61200, "start": 29700, "setpoint": 6 }, + { "end": 70200, "start": 61200, "setpoint": 18.5 } + ] + }, + "wednesday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 61200, "start": 29700, "setpoint": 6 }, + { "end": 70200, "start": 61200, "setpoint": 18.5 } + ] + }, + "thursday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 61200, "start": 29700, "setpoint": 6 }, + { "end": 70200, "start": 61200, "setpoint": 18.5 } + ] + }, + "friday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 61200, "start": 29700, "setpoint": 6 }, + { "end": 70200, "start": 61200, "setpoint": 18.5 } + ] + }, + "saturday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 61200, "start": 29700, "setpoint": 6 }, + { "end": 73800, "start": 68400, "setpoint": 18.5 } + ] + } + } + }, + "footprint": { + "weekly": { + "sunday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 38100, "start": 29100, "setpoint": 20 }, + { "end": 75600, "start": 56700, "setpoint": 20 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "monday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 51600, "start": 32400, "setpoint": 20 }, + { "end": 74400, "start": 60600, "setpoint": 20 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "tuesday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 33300, "start": 27300, "setpoint": 20 }, + { "end": 75600, "start": 58800, "setpoint": 20 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "wednesday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 48600, "start": 28800, "setpoint": 20 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "thursday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 71400, "start": 56400, "setpoint": 20 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "friday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 74400, "start": 40800, "setpoint": 20 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "saturday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 63300, "start": 29700, "setpoint": 20 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + } + } + } + } + }, + { + "id": 5, + "name": "Ensuite", + "output": 0, + "type": "radiator", + "mode": "off", + "temperature": 21, + "setpoint": 4, + "occupied": "False", + "override": { "duration": 0, "setpoint": 28 }, + "schedule": { + "timer": { + "weekly": { + "sunday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 81000, "start": 73800, "setpoint": 16 } + ] + }, + "monday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 81000, "start": 73800, "setpoint": 16 } + ] + }, + "tuesday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 81000, "start": 73800, "setpoint": 16 } + ] + }, + "wednesday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 81000, "start": 73800, "setpoint": 16 } + ] + }, + "thursday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 81000, "start": 73800, "setpoint": 16 } + ] + }, + "friday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 81000, "start": 73800, "setpoint": 16 } + ] + }, + "saturday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 81000, "start": 73800, "setpoint": 16 } + ] + } + } + }, + "footprint": { + "weekly": { + "sunday": { + "defaultSetpoint": 12, + "heatingPeriods": [ + { "end": 28800, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 81000, "setpoint": 16 } + ] + }, + "monday": { + "defaultSetpoint": 12, + "heatingPeriods": [ + { "end": 28800, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 81000, "setpoint": 16 } + ] + }, + "tuesday": { + "defaultSetpoint": 12, + "heatingPeriods": [ + { "end": 28800, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 81000, "setpoint": 16 } + ] + }, + "wednesday": { + "defaultSetpoint": 12, + "heatingPeriods": [ + { "end": 28800, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 81000, "setpoint": 16 } + ] + }, + "thursday": { + "defaultSetpoint": 12, + "heatingPeriods": [ + { "end": 28800, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 81000, "setpoint": 16 } + ] + }, + "friday": { + "defaultSetpoint": 12, + "heatingPeriods": [ + { "end": 28800, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 81000, "setpoint": 16 } + ] + }, + "saturday": { + "defaultSetpoint": 12, + "heatingPeriods": [ + { "end": 28800, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 81000, "setpoint": 16 } + ] + } + } + } + } + }, + { + "id": 7, + "name": "Guest room", + "output": 0, + "type": "radiator", + "mode": "off", + "temperature": 21, + "setpoint": 4, + "occupied": "True", + "override": { "duration": 0, "setpoint": 20 }, + "schedule": { + "timer": { + "weekly": { + "sunday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 73800, "setpoint": 14 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + }, + "monday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 73800, "setpoint": 14 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + }, + "tuesday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 75600, "start": 29700, "setpoint": 6 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + }, + "wednesday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 73800, "setpoint": 14 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + }, + "thursday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 73800, "setpoint": 14 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + }, + "friday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 73800, "setpoint": 14 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + }, + "saturday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 73800, "setpoint": 14 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + } + } + }, + "footprint": { + "weekly": { + "sunday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "monday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "tuesday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "wednesday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "thursday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "friday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "saturday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + } + } + } + } + }, + { + "id": 27, + "name": "Bedroom Socket", + "output": 1, + "type": "on / off", + "mode": "timer", + "setpoint": "True", + "override": { "duration": 0, "setpoint": "True" }, + "schedule": { + "timer": { + "weekly": { + "sunday": { + "defaultSetpoint": "False", + "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] + }, + "monday": { + "defaultSetpoint": "False", + "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] + }, + "tuesday": { + "defaultSetpoint": "False", + "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] + }, + "wednesday": { + "defaultSetpoint": "False", + "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] + }, + "thursday": { + "defaultSetpoint": "False", + "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] + }, + "friday": { + "defaultSetpoint": "False", + "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] + }, + "saturday": { + "defaultSetpoint": "False", + "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] + } + } + }, + "footprint": {} + } + }, + { + "id": 28, + "name": "Kitchen Socket", + "output": 1, + "type": "on / off", + "mode": "timer", + "setpoint": "True", + "override": { "duration": 0, "setpoint": "True" }, + "schedule": { + "timer": { + "weekly": { + "sunday": { + "defaultSetpoint": "False", + "heatingPeriods": [ + { "end": 82800, "start": 27000, "setpoint": "True" } + ] + }, + "monday": { + "defaultSetpoint": "False", + "heatingPeriods": [ + { "end": 82800, "start": 27000, "setpoint": "True" } + ] + }, + "tuesday": { + "defaultSetpoint": "False", + "heatingPeriods": [ + { "end": 82800, "start": 27000, "setpoint": "True" } + ] + }, + "wednesday": { + "defaultSetpoint": "False", + "heatingPeriods": [ + { "end": 82800, "start": 27000, "setpoint": "True" } + ] + }, + "thursday": { + "defaultSetpoint": "False", + "heatingPeriods": [ + { "end": 82800, "start": 27000, "setpoint": "True" } + ] + }, + "friday": { + "defaultSetpoint": "False", + "heatingPeriods": [ + { "end": 82800, "start": 27000, "setpoint": "True" } + ] + }, + "saturday": { + "defaultSetpoint": "False", + "heatingPeriods": [ + { "end": 82800, "start": 27000, "setpoint": "True" } + ] + } + } + }, + "footprint": {} + } + }, + { + "id": 29, + "name": "Bedroom", + "output": 0, + "type": "radiator", + "mode": "off", + "temperature": 21.5, + "setpoint": 4, + "override": { "duration": 0, "setpoint": 23.5 }, + "schedule": { + "timer": { + "weekly": { + "sunday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 75600, "start": 29700, "setpoint": 6 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + }, + "monday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 75600, "start": 29700, "setpoint": 6 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + }, + "tuesday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 75600, "start": 29700, "setpoint": 6 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + }, + "wednesday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 81000, "start": 73800, "setpoint": 18.5 } + ] + }, + "thursday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 75600, "start": 29700, "setpoint": 6 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + }, + "friday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 75600, "start": 29700, "setpoint": 6 }, + { "end": 81000, "start": 75600, "setpoint": 19.5 } + ] + }, + "saturday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 75600, "start": 29700, "setpoint": 6 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + } + } + }, + "footprint": { + "weekly": { + "sunday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "monday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "tuesday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "wednesday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "thursday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "friday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "saturday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + } + } + } + } + }, + { + "id": 30, + "name": "Study", + "output": 0, + "type": "radiator", + "mode": "off", + "temperature": 22, + "setpoint": 4, + "occupied": "False", + "override": { "duration": 0, "setpoint": 28 }, + "schedule": { + "timer": { + "weekly": { + "sunday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 73800, "setpoint": 14 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + }, + "monday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 73800, "setpoint": 14 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + }, + "tuesday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 75600, "start": 29700, "setpoint": 6 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + }, + "wednesday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 73800, "setpoint": 14 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + }, + "thursday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 73800, "setpoint": 14 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + }, + "friday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 73800, "setpoint": 14 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + }, + "saturday": { + "defaultSetpoint": 14.5, + "heatingPeriods": [ + { "end": 29700, "start": 27000, "setpoint": 18 }, + { "end": 73800, "start": 29700, "setpoint": 6 }, + { "end": 75600, "start": 73800, "setpoint": 14 }, + { "end": 81000, "start": 75600, "setpoint": 18.5 } + ] + } + } + }, + "footprint": { + "weekly": { + "sunday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "monday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "tuesday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "wednesday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "thursday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "friday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + }, + "saturday": { + "defaultSetpoint": 14, + "heatingPeriods": [ + { "end": 23400, "start": 0, "setpoint": 16 }, + { "end": 86400, "start": 75600, "setpoint": 16 } + ] + } + } + } + } + }, + { + "id": 32, + "name": "Study Socket", + "output": 0, + "type": "on / off", + "mode": "off", + "setpoint": "False", + "override": { "duration": 0, "setpoint": "True" }, + "schedule": { + "timer": { + "weekly": { + "sunday": { + "defaultSetpoint": "False", + "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] + }, + "monday": { + "defaultSetpoint": "False", + "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] + }, + "tuesday": { + "defaultSetpoint": "False", + "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] + }, + "wednesday": { + "defaultSetpoint": "False", + "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] + }, + "thursday": { + "defaultSetpoint": "False", + "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] + }, + "friday": { + "defaultSetpoint": "False", + "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] + }, + "saturday": { + "defaultSetpoint": "False", + "heatingPeriods": [{ "end": 86400, "start": 0, "setpoint": "True" }] + } + } + }, + "footprint": {} + } + } +] diff --git a/tests/components/geniushub/snapshots/test_binary_sensor.ambr b/tests/components/geniushub/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..fcc256b5232 --- /dev/null +++ b/tests/components/geniushub/snapshots/test_binary_sensor.ambr @@ -0,0 +1,50 @@ +# serializer version: 1 +# name: test_cloud_all_sensors[binary_sensor.single_channel_receiver_22-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.single_channel_receiver_22', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Single Channel Receiver 22', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_22', + 'unit_of_measurement': None, + }) +# --- +# name: test_cloud_all_sensors[binary_sensor.single_channel_receiver_22-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assigned_zone': 'East Berlin', + 'friendly_name': 'Single Channel Receiver 22', + 'state': dict({ + }), + }), + 'context': , + 'entity_id': 'binary_sensor.single_channel_receiver_22', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/geniushub/snapshots/test_climate.ambr b/tests/components/geniushub/snapshots/test_climate.ambr new file mode 100644 index 00000000000..eb372de784e --- /dev/null +++ b/tests/components/geniushub/snapshots/test_climate.ambr @@ -0,0 +1,569 @@ +# serializer version: 1 +# name: test_cloud_all_sensors[climate.bedroom-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 28.0, + 'min_temp': 4.0, + 'preset_modes': list([ + 'boost', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.bedroom', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:radiator', + 'original_name': 'Bedroom', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_zone_29', + 'unit_of_measurement': None, + }) +# --- +# name: test_cloud_all_sensors[climate.bedroom-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21.5, + 'friendly_name': 'Bedroom', + 'hvac_modes': list([ + , + , + ]), + 'icon': 'mdi:radiator', + 'max_temp': 28.0, + 'min_temp': 4.0, + 'preset_mode': None, + 'preset_modes': list([ + 'boost', + ]), + 'status': dict({ + 'mode': 'off', + 'override': dict({ + 'duration': 0, + 'setpoint': 23.5, + }), + 'temperature': 21.5, + 'type': 'radiator', + }), + 'supported_features': , + 'temperature': 4, + }), + 'context': , + 'entity_id': 'climate.bedroom', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_cloud_all_sensors[climate.ensuite-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 28.0, + 'min_temp': 4.0, + 'preset_modes': list([ + 'activity', + 'boost', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.ensuite', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:radiator', + 'original_name': 'Ensuite', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_zone_5', + 'unit_of_measurement': None, + }) +# --- +# name: test_cloud_all_sensors[climate.ensuite-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21, + 'friendly_name': 'Ensuite', + 'hvac_modes': list([ + , + , + ]), + 'icon': 'mdi:radiator', + 'max_temp': 28.0, + 'min_temp': 4.0, + 'preset_mode': None, + 'preset_modes': list([ + 'activity', + 'boost', + ]), + 'status': dict({ + 'mode': 'off', + 'occupied': 'False', + 'override': dict({ + 'duration': 0, + 'setpoint': 28, + }), + 'temperature': 21, + 'type': 'radiator', + }), + 'supported_features': , + 'temperature': 4, + }), + 'context': , + 'entity_id': 'climate.ensuite', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_cloud_all_sensors[climate.guest_room-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 28.0, + 'min_temp': 4.0, + 'preset_modes': list([ + 'activity', + 'boost', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.guest_room', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:radiator', + 'original_name': 'Guest room', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_zone_7', + 'unit_of_measurement': None, + }) +# --- +# name: test_cloud_all_sensors[climate.guest_room-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21, + 'friendly_name': 'Guest room', + 'hvac_modes': list([ + , + , + ]), + 'icon': 'mdi:radiator', + 'max_temp': 28.0, + 'min_temp': 4.0, + 'preset_mode': None, + 'preset_modes': list([ + 'activity', + 'boost', + ]), + 'status': dict({ + 'mode': 'off', + 'occupied': 'True', + 'override': dict({ + 'duration': 0, + 'setpoint': 20, + }), + 'temperature': 21, + 'type': 'radiator', + }), + 'supported_features': , + 'temperature': 4, + }), + 'context': , + 'entity_id': 'climate.guest_room', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_cloud_all_sensors[climate.hall-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 28.0, + 'min_temp': 4.0, + 'preset_modes': list([ + 'activity', + 'boost', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.hall', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:radiator', + 'original_name': 'Hall', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_zone_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_cloud_all_sensors[climate.hall-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21, + 'friendly_name': 'Hall', + 'hvac_modes': list([ + , + , + ]), + 'icon': 'mdi:radiator', + 'max_temp': 28.0, + 'min_temp': 4.0, + 'preset_mode': None, + 'preset_modes': list([ + 'activity', + 'boost', + ]), + 'status': dict({ + 'mode': 'off', + 'occupied': 'False', + 'override': dict({ + 'duration': 0, + 'setpoint': 20, + }), + 'temperature': 21, + 'type': 'radiator', + }), + 'supported_features': , + 'temperature': 4, + }), + 'context': , + 'entity_id': 'climate.hall', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_cloud_all_sensors[climate.kitchen-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 28.0, + 'min_temp': 4.0, + 'preset_modes': list([ + 'activity', + 'boost', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.kitchen', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:radiator', + 'original_name': 'Kitchen', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_zone_3', + 'unit_of_measurement': None, + }) +# --- +# name: test_cloud_all_sensors[climate.kitchen-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 21.5, + 'friendly_name': 'Kitchen', + 'hvac_modes': list([ + , + , + ]), + 'icon': 'mdi:radiator', + 'max_temp': 28.0, + 'min_temp': 4.0, + 'preset_mode': None, + 'preset_modes': list([ + 'activity', + 'boost', + ]), + 'status': dict({ + 'mode': 'off', + 'occupied': 'False', + 'override': dict({ + 'duration': 0, + 'setpoint': 20, + }), + 'temperature': 21.5, + 'type': 'radiator', + }), + 'supported_features': , + 'temperature': 4, + }), + 'context': , + 'entity_id': 'climate.kitchen', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_cloud_all_sensors[climate.lounge-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 28.0, + 'min_temp': 4.0, + 'preset_modes': list([ + 'boost', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.lounge', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:radiator', + 'original_name': 'Lounge', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_zone_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_cloud_all_sensors[climate.lounge-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 20, + 'friendly_name': 'Lounge', + 'hvac_modes': list([ + , + , + ]), + 'icon': 'mdi:radiator', + 'max_temp': 28.0, + 'min_temp': 4.0, + 'preset_mode': None, + 'preset_modes': list([ + 'boost', + ]), + 'status': dict({ + 'mode': 'off', + 'override': dict({ + 'duration': 0, + 'setpoint': 20, + }), + 'temperature': 20, + 'type': 'radiator', + }), + 'supported_features': , + 'temperature': 4, + }), + 'context': , + 'entity_id': 'climate.lounge', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_cloud_all_sensors[climate.study-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 28.0, + 'min_temp': 4.0, + 'preset_modes': list([ + 'activity', + 'boost', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.study', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:radiator', + 'original_name': 'Study', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_zone_30', + 'unit_of_measurement': None, + }) +# --- +# name: test_cloud_all_sensors[climate.study-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 22, + 'friendly_name': 'Study', + 'hvac_modes': list([ + , + , + ]), + 'icon': 'mdi:radiator', + 'max_temp': 28.0, + 'min_temp': 4.0, + 'preset_mode': None, + 'preset_modes': list([ + 'activity', + 'boost', + ]), + 'status': dict({ + 'mode': 'off', + 'occupied': 'False', + 'override': dict({ + 'duration': 0, + 'setpoint': 28, + }), + 'temperature': 22, + 'type': 'radiator', + }), + 'supported_features': , + 'temperature': 4, + }), + 'context': , + 'entity_id': 'climate.study', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/geniushub/snapshots/test_sensor.ambr b/tests/components/geniushub/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..874f24cff95 --- /dev/null +++ b/tests/components/geniushub/snapshots/test_sensor.ambr @@ -0,0 +1,954 @@ +# serializer version: 1 +# name: test_cloud_all_sensors[sensor.geniushub_errors-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.geniushub_errors', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'GeniusHub Errors', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_Errors', + 'unit_of_measurement': None, + }) +# --- +# name: test_cloud_all_sensors[sensor.geniushub_errors-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'error_list': list([ + ]), + 'friendly_name': 'GeniusHub Errors', + }), + 'context': , + 'entity_id': 'sensor.geniushub_errors', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_cloud_all_sensors[sensor.geniushub_information-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.geniushub_information', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'GeniusHub Information', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_Information', + 'unit_of_measurement': None, + }) +# --- +# name: test_cloud_all_sensors[sensor.geniushub_information-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'GeniusHub Information', + 'information_list': list([ + ]), + }), + 'context': , + 'entity_id': 'sensor.geniushub_information', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_cloud_all_sensors[sensor.geniushub_warnings-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.geniushub_warnings', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'GeniusHub Warnings', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_Warnings', + 'unit_of_measurement': None, + }) +# --- +# name: test_cloud_all_sensors[sensor.geniushub_warnings-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'GeniusHub Warnings', + 'warning_list': list([ + ]), + }), + 'context': , + 'entity_id': 'sensor.geniushub_warnings', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_cloud_all_sensors[sensor.radiator_valve_11-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.radiator_valve_11', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery-40', + 'original_name': 'Radiator Valve 11', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_11', + 'unit_of_measurement': '%', + }) +# --- +# name: test_cloud_all_sensors[sensor.radiator_valve_11-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assigned_zone': 'Lounge', + 'device_class': 'battery', + 'friendly_name': 'Radiator Valve 11', + 'icon': 'mdi:battery-40', + 'state': dict({ + 'set_temperature': 4, + }), + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.radiator_valve_11', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '43', + }) +# --- +# name: test_cloud_all_sensors[sensor.radiator_valve_56-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.radiator_valve_56', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery-50', + 'original_name': 'Radiator Valve 56', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_56', + 'unit_of_measurement': '%', + }) +# --- +# name: test_cloud_all_sensors[sensor.radiator_valve_56-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assigned_zone': 'Kitchen', + 'device_class': 'battery', + 'friendly_name': 'Radiator Valve 56', + 'icon': 'mdi:battery-50', + 'state': dict({ + 'set_temperature': 4, + }), + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.radiator_valve_56', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '55', + }) +# --- +# name: test_cloud_all_sensors[sensor.radiator_valve_68-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.radiator_valve_68', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery-90', + 'original_name': 'Radiator Valve 68', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_68', + 'unit_of_measurement': '%', + }) +# --- +# name: test_cloud_all_sensors[sensor.radiator_valve_68-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assigned_zone': 'Hall', + 'device_class': 'battery', + 'friendly_name': 'Radiator Valve 68', + 'icon': 'mdi:battery-90', + 'state': dict({ + 'set_temperature': 4, + }), + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.radiator_valve_68', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '92', + }) +# --- +# name: test_cloud_all_sensors[sensor.radiator_valve_78-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.radiator_valve_78', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery-40', + 'original_name': 'Radiator Valve 78', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_78', + 'unit_of_measurement': '%', + }) +# --- +# name: test_cloud_all_sensors[sensor.radiator_valve_78-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assigned_zone': 'Bedroom', + 'device_class': 'battery', + 'friendly_name': 'Radiator Valve 78', + 'icon': 'mdi:battery-40', + 'state': dict({ + 'set_temperature': 4, + }), + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.radiator_valve_78', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '42', + }) +# --- +# name: test_cloud_all_sensors[sensor.radiator_valve_85-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.radiator_valve_85', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery-60', + 'original_name': 'Radiator Valve 85', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_85', + 'unit_of_measurement': '%', + }) +# --- +# name: test_cloud_all_sensors[sensor.radiator_valve_85-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assigned_zone': 'Study', + 'device_class': 'battery', + 'friendly_name': 'Radiator Valve 85', + 'icon': 'mdi:battery-60', + 'state': dict({ + 'set_temperature': 4, + }), + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.radiator_valve_85', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '61', + }) +# --- +# name: test_cloud_all_sensors[sensor.radiator_valve_88-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.radiator_valve_88', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery-50', + 'original_name': 'Radiator Valve 88', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_88', + 'unit_of_measurement': '%', + }) +# --- +# name: test_cloud_all_sensors[sensor.radiator_valve_88-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assigned_zone': 'Ensuite', + 'device_class': 'battery', + 'friendly_name': 'Radiator Valve 88', + 'icon': 'mdi:battery-50', + 'state': dict({ + 'set_temperature': 4, + }), + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.radiator_valve_88', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '49', + }) +# --- +# name: test_cloud_all_sensors[sensor.radiator_valve_89-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.radiator_valve_89', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery-50', + 'original_name': 'Radiator Valve 89', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_89', + 'unit_of_measurement': '%', + }) +# --- +# name: test_cloud_all_sensors[sensor.radiator_valve_89-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assigned_zone': 'Kitchen', + 'device_class': 'battery', + 'friendly_name': 'Radiator Valve 89', + 'icon': 'mdi:battery-50', + 'state': dict({ + 'set_temperature': 4, + }), + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.radiator_valve_89', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '48', + }) +# --- +# name: test_cloud_all_sensors[sensor.radiator_valve_90-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.radiator_valve_90', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery-90', + 'original_name': 'Radiator Valve 90', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_90', + 'unit_of_measurement': '%', + }) +# --- +# name: test_cloud_all_sensors[sensor.radiator_valve_90-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assigned_zone': 'Guest room', + 'device_class': 'battery', + 'friendly_name': 'Radiator Valve 90', + 'icon': 'mdi:battery-90', + 'state': dict({ + 'set_temperature': 4, + }), + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.radiator_valve_90', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '92', + }) +# --- +# name: test_cloud_all_sensors[sensor.room_sensor_16-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.room_sensor_16', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery', + 'original_name': 'Room Sensor 16', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_16', + 'unit_of_measurement': '%', + }) +# --- +# name: test_cloud_all_sensors[sensor.room_sensor_16-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assigned_zone': 'Guest room', + 'device_class': 'battery', + 'friendly_name': 'Room Sensor 16', + 'icon': 'mdi:battery', + 'state': dict({ + 'luminance': 29, + 'measured_temperature': 21, + 'occupancy_trigger': 255, + }), + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.room_sensor_16', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_cloud_all_sensors[sensor.room_sensor_17-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.room_sensor_17', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery', + 'original_name': 'Room Sensor 17', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_17', + 'unit_of_measurement': '%', + }) +# --- +# name: test_cloud_all_sensors[sensor.room_sensor_17-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assigned_zone': 'Ensuite', + 'device_class': 'battery', + 'friendly_name': 'Room Sensor 17', + 'icon': 'mdi:battery', + 'state': dict({ + 'luminance': 32, + 'measured_temperature': 21, + 'occupancy_trigger': 0, + }), + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.room_sensor_17', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_cloud_all_sensors[sensor.room_sensor_18-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.room_sensor_18', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery-alert', + 'original_name': 'Room Sensor 18', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_18', + 'unit_of_measurement': '%', + }) +# --- +# name: test_cloud_all_sensors[sensor.room_sensor_18-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assigned_zone': 'Bedroom', + 'device_class': 'battery', + 'friendly_name': 'Room Sensor 18', + 'icon': 'mdi:battery-alert', + 'state': dict({ + 'luminance': 1, + 'measured_temperature': 21.5, + 'occupancy_trigger': 0, + }), + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.room_sensor_18', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '36', + }) +# --- +# name: test_cloud_all_sensors[sensor.room_sensor_20-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.room_sensor_20', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery', + 'original_name': 'Room Sensor 20', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_20', + 'unit_of_measurement': '%', + }) +# --- +# name: test_cloud_all_sensors[sensor.room_sensor_20-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assigned_zone': 'Kitchen', + 'device_class': 'battery', + 'friendly_name': 'Room Sensor 20', + 'icon': 'mdi:battery', + 'state': dict({ + 'luminance': 1, + 'measured_temperature': 21.5, + 'occupancy_trigger': 0, + }), + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.room_sensor_20', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_cloud_all_sensors[sensor.room_sensor_21-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.room_sensor_21', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery', + 'original_name': 'Room Sensor 21', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_21', + 'unit_of_measurement': '%', + }) +# --- +# name: test_cloud_all_sensors[sensor.room_sensor_21-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assigned_zone': 'Hall', + 'device_class': 'battery', + 'friendly_name': 'Room Sensor 21', + 'icon': 'mdi:battery', + 'state': dict({ + 'luminance': 33, + 'measured_temperature': 21, + 'occupancy_trigger': 0, + }), + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.room_sensor_21', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_cloud_all_sensors[sensor.room_sensor_50-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.room_sensor_50', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery', + 'original_name': 'Room Sensor 50', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_50', + 'unit_of_measurement': '%', + }) +# --- +# name: test_cloud_all_sensors[sensor.room_sensor_50-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assigned_zone': 'Study', + 'device_class': 'battery', + 'friendly_name': 'Room Sensor 50', + 'icon': 'mdi:battery', + 'state': dict({ + 'luminance': 34, + 'measured_temperature': 22, + 'occupancy_trigger': 0, + }), + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.room_sensor_50', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_cloud_all_sensors[sensor.room_sensor_53-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.room_sensor_53', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:battery-alert', + 'original_name': 'Room Sensor 53', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_device_53', + 'unit_of_measurement': '%', + }) +# --- +# name: test_cloud_all_sensors[sensor.room_sensor_53-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assigned_zone': 'Lounge', + 'device_class': 'battery', + 'friendly_name': 'Room Sensor 53', + 'icon': 'mdi:battery-alert', + 'state': dict({ + 'luminance': 0, + 'measured_temperature': 0, + 'occupancy_trigger': 0, + }), + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.room_sensor_53', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '28', + }) +# --- diff --git a/tests/components/geniushub/snapshots/test_switch.ambr b/tests/components/geniushub/snapshots/test_switch.ambr new file mode 100644 index 00000000000..6c3c95af477 --- /dev/null +++ b/tests/components/geniushub/snapshots/test_switch.ambr @@ -0,0 +1,166 @@ +# serializer version: 1 +# name: test_cloud_all_sensors[switch.bedroom_socket-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.bedroom_socket', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Bedroom Socket', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_zone_27', + 'unit_of_measurement': None, + }) +# --- +# name: test_cloud_all_sensors[switch.bedroom_socket-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Bedroom Socket', + 'status': dict({ + 'mode': 'timer', + 'override': dict({ + 'duration': 0, + 'setpoint': 'True', + }), + 'type': 'on / off', + }), + }), + 'context': , + 'entity_id': 'switch.bedroom_socket', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_cloud_all_sensors[switch.kitchen_socket-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.kitchen_socket', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Kitchen Socket', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_zone_28', + 'unit_of_measurement': None, + }) +# --- +# name: test_cloud_all_sensors[switch.kitchen_socket-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Kitchen Socket', + 'status': dict({ + 'mode': 'timer', + 'override': dict({ + 'duration': 0, + 'setpoint': 'True', + }), + 'type': 'on / off', + }), + }), + 'context': , + 'entity_id': 'switch.kitchen_socket', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_cloud_all_sensors[switch.study_socket-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.study_socket', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Study Socket', + 'platform': 'geniushub', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '01J71MQF0EC62D620DGYNG2R8H_zone_32', + 'unit_of_measurement': None, + }) +# --- +# name: test_cloud_all_sensors[switch.study_socket-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Study Socket', + 'status': dict({ + 'mode': 'off', + 'override': dict({ + 'duration': 0, + 'setpoint': 'True', + }), + 'type': 'on / off', + }), + }), + 'context': , + 'entity_id': 'switch.study_socket', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/geniushub/test_binary_sensor.py b/tests/components/geniushub/test_binary_sensor.py new file mode 100644 index 00000000000..682929eb696 --- /dev/null +++ b/tests/components/geniushub/test_binary_sensor.py @@ -0,0 +1,32 @@ +"""Tests for the Geniushub binary sensor platform.""" + +from unittest.mock import patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("mock_geniushub_cloud") +async def test_cloud_all_sensors( + hass: HomeAssistant, + mock_cloud_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the creation of the Genius Hub binary sensors.""" + with patch( + "homeassistant.components.geniushub.PLATFORMS", [Platform.BINARY_SENSOR] + ): + await setup_integration(hass, mock_cloud_config_entry) + + await snapshot_platform( + hass, entity_registry, snapshot, mock_cloud_config_entry.entry_id + ) diff --git a/tests/components/geniushub/test_climate.py b/tests/components/geniushub/test_climate.py new file mode 100644 index 00000000000..d14e57b9552 --- /dev/null +++ b/tests/components/geniushub/test_climate.py @@ -0,0 +1,30 @@ +"""Tests for the Geniushub climate platform.""" + +from unittest.mock import patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("mock_geniushub_cloud") +async def test_cloud_all_sensors( + hass: HomeAssistant, + mock_cloud_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the creation of the Genius Hub climate entities.""" + with patch("homeassistant.components.geniushub.PLATFORMS", [Platform.CLIMATE]): + await setup_integration(hass, mock_cloud_config_entry) + + await snapshot_platform( + hass, entity_registry, snapshot, mock_cloud_config_entry.entry_id + ) diff --git a/tests/components/geniushub/test_sensor.py b/tests/components/geniushub/test_sensor.py new file mode 100644 index 00000000000..a75329ca7fc --- /dev/null +++ b/tests/components/geniushub/test_sensor.py @@ -0,0 +1,30 @@ +"""Tests for the Geniushub sensor platform.""" + +from unittest.mock import patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("mock_geniushub_cloud") +async def test_cloud_all_sensors( + hass: HomeAssistant, + mock_cloud_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the creation of the Genius Hub sensors.""" + with patch("homeassistant.components.geniushub.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_cloud_config_entry) + + await snapshot_platform( + hass, entity_registry, snapshot, mock_cloud_config_entry.entry_id + ) diff --git a/tests/components/geniushub/test_switch.py b/tests/components/geniushub/test_switch.py new file mode 100644 index 00000000000..0e88562e381 --- /dev/null +++ b/tests/components/geniushub/test_switch.py @@ -0,0 +1,30 @@ +"""Tests for the Geniushub switch platform.""" + +from unittest.mock import patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("mock_geniushub_cloud") +async def test_cloud_all_sensors( + hass: HomeAssistant, + mock_cloud_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the creation of the Genius Hub switch entities.""" + with patch("homeassistant.components.geniushub.PLATFORMS", [Platform.SWITCH]): + await setup_integration(hass, mock_cloud_config_entry) + + await snapshot_platform( + hass, entity_registry, snapshot, mock_cloud_config_entry.entry_id + ) From 2c99f060f060c2994512b189dbb8e7f9de777668 Mon Sep 17 00:00:00 2001 From: Stefano Sonzogni Date: Fri, 6 Sep 2024 15:18:40 +0200 Subject: [PATCH 1515/1635] Add binary sensors for motion detection Comelit simple home (#125200) * Add binary sensors for motion detection * sort platforms * use _attr_device_class property and optimizations * use static _attr_device_class property --- homeassistant/components/comelit/__init__.py | 1 + .../components/comelit/binary_sensor.py | 62 +++++++++++++++++++ 2 files changed, 63 insertions(+) create mode 100644 homeassistant/components/comelit/binary_sensor.py diff --git a/homeassistant/components/comelit/__init__.py b/homeassistant/components/comelit/__init__.py index 478be85c1d4..12f28ef206d 100644 --- a/homeassistant/components/comelit/__init__.py +++ b/homeassistant/components/comelit/__init__.py @@ -19,6 +19,7 @@ BRIDGE_PLATFORMS = [ ] VEDO_PLATFORMS = [ Platform.ALARM_CONTROL_PANEL, + Platform.BINARY_SENSOR, Platform.SENSOR, ] diff --git a/homeassistant/components/comelit/binary_sensor.py b/homeassistant/components/comelit/binary_sensor.py new file mode 100644 index 00000000000..30b642584f8 --- /dev/null +++ b/homeassistant/components/comelit/binary_sensor.py @@ -0,0 +1,62 @@ +"""Support for sensors.""" + +from __future__ import annotations + +from aiocomelit import ComelitVedoZoneObject +from aiocomelit.const import ALARM_ZONES + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import ComelitVedoSystem + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Comelit VEDO presence sensors.""" + + coordinator: ComelitVedoSystem = hass.data[DOMAIN][config_entry.entry_id] + + async_add_entities( + ComelitVedoBinarySensorEntity(coordinator, device, config_entry.entry_id) + for device in coordinator.data[ALARM_ZONES].values() + ) + + +class ComelitVedoBinarySensorEntity( + CoordinatorEntity[ComelitVedoSystem], BinarySensorEntity +): + """Sensor device.""" + + _attr_has_entity_name = True + _attr_device_class = BinarySensorDeviceClass.MOTION + + def __init__( + self, + coordinator: ComelitVedoSystem, + zone: ComelitVedoZoneObject, + config_entry_entry_id: str, + ) -> None: + """Init sensor entity.""" + self._api = coordinator.api + self._zone = zone + super().__init__(coordinator) + # Use config_entry.entry_id as base for unique_id + # because no serial number or mac is available + self._attr_unique_id = f"{config_entry_entry_id}-presence-{zone.index}" + self._attr_device_info = coordinator.platform_device_info(zone, "zone") + + @property + def is_on(self) -> bool: + """Presence detected.""" + return self.coordinator.data[ALARM_ZONES][self._zone.index].status_api == "0001" From f9928a584383bcebc6f4cc57c52bb87c2701c019 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 6 Sep 2024 15:19:54 +0200 Subject: [PATCH 1516/1635] Fix location_id datatype in totalconnect tests (#125298) Adjust location_id type in totalconnect tests --- tests/components/totalconnect/common.py | 2 +- .../snapshots/test_alarm_control_panel.ambr | 4 +- .../snapshots/test_binary_sensor.ambr | 50 +++++++++---------- 3 files changed, 28 insertions(+), 28 deletions(-) diff --git a/tests/components/totalconnect/common.py b/tests/components/totalconnect/common.py index 6e9bb28a9b6..4cfbabb2d7d 100644 --- a/tests/components/totalconnect/common.py +++ b/tests/components/totalconnect/common.py @@ -11,7 +11,7 @@ from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry -LOCATION_ID = "123456" +LOCATION_ID = 123456 DEVICE_INFO_BASIC_1 = { "DeviceID": "987654", diff --git a/tests/components/totalconnect/snapshots/test_alarm_control_panel.ambr b/tests/components/totalconnect/snapshots/test_alarm_control_panel.ambr index 0b8b8bb79ac..ef7cb386b33 100644 --- a/tests/components/totalconnect/snapshots/test_alarm_control_panel.ambr +++ b/tests/components/totalconnect/snapshots/test_alarm_control_panel.ambr @@ -41,7 +41,7 @@ 'code_format': None, 'cover_tampered': False, 'friendly_name': 'test', - 'location_id': '123456', + 'location_id': 123456, 'location_name': 'test', 'low_battery': False, 'partition': 1, @@ -99,7 +99,7 @@ 'code_format': None, 'cover_tampered': False, 'friendly_name': 'test Partition 2', - 'location_id': '123456', + 'location_id': 123456, 'location_name': 'test partition 2', 'low_battery': False, 'partition': 2, diff --git a/tests/components/totalconnect/snapshots/test_binary_sensor.ambr b/tests/components/totalconnect/snapshots/test_binary_sensor.ambr index 81cfecbc530..1eccff1dfc3 100644 --- a/tests/components/totalconnect/snapshots/test_binary_sensor.ambr +++ b/tests/components/totalconnect/snapshots/test_binary_sensor.ambr @@ -37,7 +37,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'smoke', 'friendly_name': 'Fire', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': '2', }), @@ -87,7 +87,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'battery', 'friendly_name': 'Fire Battery', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': '2', }), @@ -137,7 +137,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'tamper', 'friendly_name': 'Fire Tamper', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': '2', }), @@ -187,7 +187,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'gas', 'friendly_name': 'Gas', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': '3', }), @@ -237,7 +237,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'battery', 'friendly_name': 'Gas Battery', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': '3', }), @@ -287,7 +287,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'tamper', 'friendly_name': 'Gas Tamper', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': '3', }), @@ -337,7 +337,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'safety', 'friendly_name': 'Medical', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': '5', }), @@ -387,7 +387,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'motion', 'friendly_name': 'Motion', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': '4', }), @@ -437,7 +437,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'battery', 'friendly_name': 'Motion Battery', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': '4', }), @@ -487,7 +487,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'tamper', 'friendly_name': 'Motion Tamper', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': '4', }), @@ -537,7 +537,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'door', 'friendly_name': 'Security', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': '1', }), @@ -587,7 +587,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'battery', 'friendly_name': 'Security Battery', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': '1', }), @@ -637,7 +637,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'tamper', 'friendly_name': 'Security Tamper', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': '1', }), @@ -687,7 +687,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'problem', 'friendly_name': 'Temperature', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': 7, }), @@ -737,7 +737,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'battery', 'friendly_name': 'Temperature Battery', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': 7, }), @@ -787,7 +787,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'tamper', 'friendly_name': 'Temperature Tamper', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': 7, }), @@ -837,7 +837,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'battery', 'friendly_name': 'test Battery', - 'location_id': '123456', + 'location_id': 123456, }), 'context': , 'entity_id': 'binary_sensor.test_battery', @@ -885,7 +885,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'carbon_monoxide', 'friendly_name': 'test Carbon monoxide', - 'location_id': '123456', + 'location_id': 123456, }), 'context': , 'entity_id': 'binary_sensor.test_carbon_monoxide', @@ -932,7 +932,7 @@ StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'test Police emergency', - 'location_id': '123456', + 'location_id': 123456, }), 'context': , 'entity_id': 'binary_sensor.test_police_emergency', @@ -980,7 +980,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'power', 'friendly_name': 'test Power', - 'location_id': '123456', + 'location_id': 123456, }), 'context': , 'entity_id': 'binary_sensor.test_power', @@ -1028,7 +1028,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'smoke', 'friendly_name': 'test Smoke', - 'location_id': '123456', + 'location_id': 123456, }), 'context': , 'entity_id': 'binary_sensor.test_smoke', @@ -1076,7 +1076,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'tamper', 'friendly_name': 'test Tamper', - 'location_id': '123456', + 'location_id': 123456, }), 'context': , 'entity_id': 'binary_sensor.test_tamper', @@ -1124,7 +1124,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'door', 'friendly_name': 'Unknown', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': '6', }), @@ -1174,7 +1174,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'battery', 'friendly_name': 'Unknown Battery', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': '6', }), @@ -1224,7 +1224,7 @@ 'attributes': ReadOnlyDict({ 'device_class': 'tamper', 'friendly_name': 'Unknown Tamper', - 'location_id': '123456', + 'location_id': 123456, 'partition': '1', 'zone_id': '6', }), From 86ef7bab28ecac460f554fbc2f5133288b1d6f85 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 6 Sep 2024 15:20:11 +0200 Subject: [PATCH 1517/1635] Improve config flow type hints in totalconnect (#125300) --- .../components/totalconnect/config_flow.py | 38 ++++++++++++------- 1 file changed, 24 insertions(+), 14 deletions(-) diff --git a/homeassistant/components/totalconnect/config_flow.py b/homeassistant/components/totalconnect/config_flow.py index 63973fd44e9..2a4c4d421a1 100644 --- a/homeassistant/components/totalconnect/config_flow.py +++ b/homeassistant/components/totalconnect/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations from collections.abc import Mapping -from typing import Any +from typing import TYPE_CHECKING, Any from total_connect_client.client import TotalConnectClient from total_connect_client.exceptions import AuthenticationError @@ -17,6 +17,7 @@ from homeassistant.config_entries import ( ) from homeassistant.const import CONF_LOCATION, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import callback +from homeassistant.helpers.typing import VolDictType from .const import AUTO_BYPASS, CONF_USERCODES, DOMAIN @@ -28,15 +29,16 @@ class TotalConnectConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 + client: TotalConnectClient + def __init__(self) -> None: """Initialize the config flow.""" - self.username = None - self.password = None - self.usercodes: dict[str, Any] = {} - self.client = None + self.username: str | None = None + self.password: str | None = None + self.usercodes: dict[int, str | None] = {} async def async_step_user( - self, user_input: dict[str, Any] | None = None + self, user_input: dict[str, str] | None = None ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" errors = {} @@ -70,18 +72,20 @@ class TotalConnectConfigFlow(ConfigFlow, domain=DOMAIN): step_id="user", data_schema=data_schema, errors=errors ) - async def async_step_locations(self, user_entry=None): + async def async_step_locations( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Handle the user locations and associated usercodes.""" errors = {} - if user_entry is not None: + if user_input is not None: for location_id in self.usercodes: if self.usercodes[location_id] is None: valid = await self.hass.async_add_executor_job( self.client.locations[location_id].set_usercode, - user_entry[CONF_USERCODES], + user_input[CONF_USERCODES], ) if valid: - self.usercodes[location_id] = user_entry[CONF_USERCODES] + self.usercodes[location_id] = user_input[CONF_USERCODES] else: errors[CONF_LOCATION] = "usercode" break @@ -111,11 +115,11 @@ class TotalConnectConfigFlow(ConfigFlow, domain=DOMAIN): self.usercodes[location_id] = None # show the next location that needs a usercode - location_codes = {} + location_codes: VolDictType = {} location_for_user = "" for location_id in self.usercodes: if self.usercodes[location_id] is None: - location_for_user = location_id + location_for_user = str(location_id) location_codes[ vol.Required( CONF_USERCODES, @@ -141,7 +145,9 @@ class TotalConnectConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_reauth_confirm() - async def async_step_reauth_confirm(self, user_input=None): + async def async_step_reauth_confirm( + self, user_input: dict[str, str] | None = None + ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" errors = {} if user_input is None: @@ -166,6 +172,8 @@ class TotalConnectConfigFlow(ConfigFlow, domain=DOMAIN): ) existing_entry = await self.async_set_unique_id(self.username) + if TYPE_CHECKING: + assert existing_entry is not None new_entry = { CONF_USERNAME: self.username, CONF_PASSWORD: user_input[CONF_PASSWORD], @@ -195,7 +203,9 @@ class TotalConnectOptionsFlowHandler(OptionsFlow): """Initialize options flow.""" self.config_entry = config_entry - async def async_step_init(self, user_input=None): + async def async_step_init( + self, user_input: dict[str, bool] | None = None + ) -> ConfigFlowResult: """Manage the options.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) From 3a5309e9a0a1e6fd23085740426ebf07b8f48c13 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 6 Sep 2024 15:20:39 +0200 Subject: [PATCH 1518/1635] Improve config flow type hints in tellduslive (#125299) --- .../components/tellduslive/config_flow.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/tellduslive/config_flow.py b/homeassistant/components/tellduslive/config_flow.py index 6b5e7150d67..3bbb34912f9 100644 --- a/homeassistant/components/tellduslive/config_flow.py +++ b/homeassistant/components/tellduslive/config_flow.py @@ -35,14 +35,15 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 + _session: Session + def __init__(self) -> None: """Init config flow.""" self._hosts = [CLOUD_NAME] self._host = None - self._session = None self._scan_interval = SCAN_INTERVAL - def _get_auth_url(self): + def _get_auth_url(self) -> str | None: self._session = Session( public_key=PUBLIC_KEY, private_key=NOT_SO_PRIVATE_KEY, @@ -70,7 +71,9 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): ), ) - async def async_step_auth(self, user_input=None): + async def async_step_auth( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the submitted configuration.""" errors = {} if user_input is not None: @@ -114,7 +117,10 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): }, ) - async def async_step_discovery(self, discovery_info): + async def async_step_discovery( + self, + discovery_info: list[str], # type: ignore[override] + ) -> ConfigFlowResult: """Run when a Tellstick is discovered.""" await self._async_handle_discovery_without_unique_id() From 8d239d368b57b888b9b815ca1bf61be3ab36b75c Mon Sep 17 00:00:00 2001 From: Noah Husby <32528627+noahhusby@users.noreply.github.com> Date: Fri, 6 Sep 2024 09:22:39 -0400 Subject: [PATCH 1519/1635] Bump aiorussound to 3.0.4 (#125285) feat: bump aiorussound to 3.0.4 --- .../components/russound_rio/__init__.py | 10 ++++----- .../components/russound_rio/config_flow.py | 9 ++++---- .../components/russound_rio/const.py | 4 ++-- .../components/russound_rio/entity.py | 15 +++++++++---- .../components/russound_rio/manifest.json | 2 +- .../components/russound_rio/media_player.py | 22 ++++++++++--------- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/russound_rio/conftest.py | 4 ++-- 9 files changed, 39 insertions(+), 31 deletions(-) diff --git a/homeassistant/components/russound_rio/__init__.py b/homeassistant/components/russound_rio/__init__.py index 8627c636ef2..823d0736037 100644 --- a/homeassistant/components/russound_rio/__init__.py +++ b/homeassistant/components/russound_rio/__init__.py @@ -3,7 +3,7 @@ import asyncio import logging -from aiorussound import Russound +from aiorussound import RussoundClient, RussoundTcpConnectionHandler from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PORT, Platform @@ -16,7 +16,7 @@ PLATFORMS = [Platform.MEDIA_PLAYER] _LOGGER = logging.getLogger(__name__) -type RussoundConfigEntry = ConfigEntry[Russound] +type RussoundConfigEntry = ConfigEntry[RussoundClient] async def async_setup_entry(hass: HomeAssistant, entry: RussoundConfigEntry) -> bool: @@ -24,7 +24,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: RussoundConfigEntry) -> host = entry.data[CONF_HOST] port = entry.data[CONF_PORT] - russ = Russound(hass.loop, host, port) + russ = RussoundClient(RussoundTcpConnectionHandler(hass.loop, host, port)) @callback def is_connected_updated(connected: bool) -> None: @@ -37,14 +37,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: RussoundConfigEntry) -> port, ) - russ.add_connection_callback(is_connected_updated) - + russ.connection_handler.add_connection_callback(is_connected_updated) try: async with asyncio.timeout(CONNECT_TIMEOUT): await russ.connect() except RUSSOUND_RIO_EXCEPTIONS as err: raise ConfigEntryNotReady(f"Error while connecting to {host}:{port}") from err - entry.runtime_data = russ await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) diff --git a/homeassistant/components/russound_rio/config_flow.py b/homeassistant/components/russound_rio/config_flow.py index df173d29f61..03e32f39c08 100644 --- a/homeassistant/components/russound_rio/config_flow.py +++ b/homeassistant/components/russound_rio/config_flow.py @@ -6,7 +6,7 @@ import asyncio import logging from typing import Any -from aiorussound import Controller, Russound +from aiorussound import Controller, RussoundClient, RussoundTcpConnectionHandler import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult @@ -54,8 +54,9 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): host = user_input[CONF_HOST] port = user_input[CONF_PORT] - controllers = None - russ = Russound(self.hass.loop, host, port) + russ = RussoundClient( + RussoundTcpConnectionHandler(self.hass.loop, host, port) + ) try: async with asyncio.timeout(CONNECT_TIMEOUT): await russ.connect() @@ -87,7 +88,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): port = import_data.get(CONF_PORT, 9621) # Connection logic is repeated here since this method will be removed in future releases - russ = Russound(self.hass.loop, host, port) + russ = RussoundClient(RussoundTcpConnectionHandler(self.hass.loop, host, port)) try: async with asyncio.timeout(CONNECT_TIMEOUT): await russ.connect() diff --git a/homeassistant/components/russound_rio/const.py b/homeassistant/components/russound_rio/const.py index d1f4e1c4c0e..42a1db5f2ad 100644 --- a/homeassistant/components/russound_rio/const.py +++ b/homeassistant/components/russound_rio/const.py @@ -2,7 +2,7 @@ import asyncio -from aiorussound import CommandException +from aiorussound import CommandError from aiorussound.const import FeatureFlag from homeassistant.components.media_player import MediaPlayerEntityFeature @@ -10,7 +10,7 @@ from homeassistant.components.media_player import MediaPlayerEntityFeature DOMAIN = "russound_rio" RUSSOUND_RIO_EXCEPTIONS = ( - CommandException, + CommandError, ConnectionRefusedError, TimeoutError, asyncio.CancelledError, diff --git a/homeassistant/components/russound_rio/entity.py b/homeassistant/components/russound_rio/entity.py index 0e4d5cf7dde..4d458118939 100644 --- a/homeassistant/components/russound_rio/entity.py +++ b/homeassistant/components/russound_rio/entity.py @@ -4,7 +4,7 @@ from collections.abc import Awaitable, Callable, Coroutine from functools import wraps from typing import Any, Concatenate -from aiorussound import Controller +from aiorussound import Controller, RussoundTcpConnectionHandler from homeassistant.core import callback from homeassistant.exceptions import HomeAssistantError @@ -53,7 +53,6 @@ class RussoundBaseEntity(Entity): or f"{self._primary_mac_address}-{self._controller.controller_id}" ) self._attr_device_info = DeviceInfo( - configuration_url=f"http://{self._instance.host}", # Use MAC address of Russound device as identifier identifiers={(DOMAIN, self._device_identifier)}, manufacturer="Russound", @@ -61,6 +60,10 @@ class RussoundBaseEntity(Entity): model=controller.controller_type, sw_version=controller.firmware_version, ) + if isinstance(self._instance.connection_handler, RussoundTcpConnectionHandler): + self._attr_device_info["configuration_url"] = ( + f"http://{self._instance.connection_handler.host}" + ) if controller.parent_controller: self._attr_device_info["via_device"] = ( DOMAIN, @@ -79,8 +82,12 @@ class RussoundBaseEntity(Entity): async def async_added_to_hass(self) -> None: """Register callbacks.""" - self._instance.add_connection_callback(self._is_connected_updated) + self._instance.connection_handler.add_connection_callback( + self._is_connected_updated + ) async def async_will_remove_from_hass(self) -> None: """Remove callbacks.""" - self._instance.remove_connection_callback(self._is_connected_updated) + self._instance.connection_handler.remove_connection_callback( + self._is_connected_updated + ) diff --git a/homeassistant/components/russound_rio/manifest.json b/homeassistant/components/russound_rio/manifest.json index 6c473d94874..19273de92ee 100644 --- a/homeassistant/components/russound_rio/manifest.json +++ b/homeassistant/components/russound_rio/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_push", "loggers": ["aiorussound"], "quality_scale": "silver", - "requirements": ["aiorussound==2.3.2"] + "requirements": ["aiorussound==3.0.4"] } diff --git a/homeassistant/components/russound_rio/media_player.py b/homeassistant/components/russound_rio/media_player.py index 20aaf0f3c08..a5bb392a028 100644 --- a/homeassistant/components/russound_rio/media_player.py +++ b/homeassistant/components/russound_rio/media_player.py @@ -84,14 +84,16 @@ async def async_setup_entry( """Set up the Russound RIO platform.""" russ = entry.runtime_data + await russ.init_sources() + sources = russ.sources + for source in sources.values(): + await source.watch() + # Discover controllers controllers = await russ.enumerate_controllers() entities = [] for controller in controllers.values(): - sources = controller.sources - for source in sources.values(): - await source.watch() for zone in controller.zones.values(): await zone.watch() mp = RussoundZoneDevice(zone, sources) @@ -154,7 +156,7 @@ class RussoundZoneDevice(RussoundBaseEntity, MediaPlayerEntity): @property def state(self) -> MediaPlayerState | None: """Return the state of the device.""" - status = self._zone.status + status = self._zone.properties.status if status == "ON": return MediaPlayerState.ON if status == "OFF": @@ -174,22 +176,22 @@ class RussoundZoneDevice(RussoundBaseEntity, MediaPlayerEntity): @property def media_title(self): """Title of current playing media.""" - return self._current_source().song_name + return self._current_source().properties.song_name @property def media_artist(self): """Artist of current playing media, music track only.""" - return self._current_source().artist_name + return self._current_source().properties.artist_name @property def media_album_name(self): """Album name of current playing media, music track only.""" - return self._current_source().album_name + return self._current_source().properties.album_name @property def media_image_url(self): """Image url of current playing media.""" - return self._current_source().cover_art_url + return self._current_source().properties.cover_art_url @property def volume_level(self): @@ -198,7 +200,7 @@ class RussoundZoneDevice(RussoundBaseEntity, MediaPlayerEntity): Value is returned based on a range (0..50). Therefore float divide by 50 to get to the required range. """ - return float(self._zone.volume or "0") / 50.0 + return float(self._zone.properties.volume or "0") / 50.0 @command async def async_turn_off(self) -> None: @@ -214,7 +216,7 @@ class RussoundZoneDevice(RussoundBaseEntity, MediaPlayerEntity): async def async_set_volume_level(self, volume: float) -> None: """Set the volume level.""" rvol = int(volume * 50.0) - await self._zone.set_volume(rvol) + await self._zone.set_volume(str(rvol)) @command async def async_select_source(self, source: str) -> None: diff --git a/requirements_all.txt b/requirements_all.txt index c6a8eba7b6f..6a215b2989c 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -350,7 +350,7 @@ aioridwell==2024.01.0 aioruckus==0.41 # homeassistant.components.russound_rio -aiorussound==2.3.2 +aiorussound==3.0.4 # homeassistant.components.ruuvi_gateway aioruuvigateway==0.1.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index da62c029875..74d792acb15 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -332,7 +332,7 @@ aioridwell==2024.01.0 aioruckus==0.41 # homeassistant.components.russound_rio -aiorussound==2.3.2 +aiorussound==3.0.4 # homeassistant.components.ruuvi_gateway aioruuvigateway==0.1.0 diff --git a/tests/components/russound_rio/conftest.py b/tests/components/russound_rio/conftest.py index a87d0a74fa8..344c743d0b3 100644 --- a/tests/components/russound_rio/conftest.py +++ b/tests/components/russound_rio/conftest.py @@ -37,10 +37,10 @@ def mock_russound() -> Generator[AsyncMock]: """Mock the Russound RIO client.""" with ( patch( - "homeassistant.components.russound_rio.Russound", autospec=True + "homeassistant.components.russound_rio.RussoundClient", autospec=True ) as mock_client, patch( - "homeassistant.components.russound_rio.config_flow.Russound", + "homeassistant.components.russound_rio.config_flow.RussoundClient", return_value=mock_client, ), ): From ff449e77415331edf2df6d727cf9016bef2d26bd Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 6 Sep 2024 08:23:22 -0500 Subject: [PATCH 1520/1635] Bump yarl to 1.9.11 (#125287) * Bump yarl to 1.9.10 changelog: https://github.com/aio-libs/yarl/compare/v1.9.9...v1.9.10 * 11 --- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index ac7b74429bd..c8fc265cee8 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -62,7 +62,7 @@ urllib3>=1.26.5,<2 voluptuous-openapi==0.0.5 voluptuous-serialize==2.6.0 voluptuous==0.15.2 -yarl==1.9.9 +yarl==1.9.11 zeroconf==0.133.0 # Constrain pycryptodome to avoid vulnerability diff --git a/pyproject.toml b/pyproject.toml index 787813bd64a..a8c43ada99f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -69,7 +69,7 @@ dependencies = [ "voluptuous==0.15.2", "voluptuous-serialize==2.6.0", "voluptuous-openapi==0.0.5", - "yarl==1.9.9", + "yarl==1.9.11", ] [project.urls] diff --git a/requirements.txt b/requirements.txt index 1d6b4e74d22..8d5c01b5c27 100644 --- a/requirements.txt +++ b/requirements.txt @@ -41,4 +41,4 @@ urllib3>=1.26.5,<2 voluptuous==0.15.2 voluptuous-serialize==2.6.0 voluptuous-openapi==0.0.5 -yarl==1.9.9 +yarl==1.9.11 From 051a28b55a33a930a3f3325da8ef19eba6364bdf Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 6 Sep 2024 08:34:52 -0500 Subject: [PATCH 1521/1635] Remove unneeded wrapping of URL in URL in network helper (#125265) * Remove unneeded wrapping of URL in URL in network helper * fix mocks --- homeassistant/helpers/network.py | 2 +- tests/helpers/test_network.py | 15 +++++++++------ 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/homeassistant/helpers/network.py b/homeassistant/helpers/network.py index d5891973e40..36c9feb83c4 100644 --- a/homeassistant/helpers/network.py +++ b/homeassistant/helpers/network.py @@ -216,7 +216,7 @@ def _get_request_host() -> str | None: """Get the host address of the current request.""" if (request := http.current_request.get()) is None: raise NoURLAvailableError - return yarl.URL(request.url).host + return request.url.host @bind_hass diff --git a/tests/helpers/test_network.py b/tests/helpers/test_network.py index 3c9594bca38..5a847e6a29c 100644 --- a/tests/helpers/test_network.py +++ b/tests/helpers/test_network.py @@ -3,6 +3,7 @@ from unittest.mock import Mock, patch import pytest +from yarl import URL from homeassistant.components import cloud from homeassistant.config import async_process_ha_core_config @@ -591,7 +592,7 @@ async def test_get_request_host(hass: HomeAssistant) -> None: with patch("homeassistant.components.http.current_request") as mock_request_context: mock_request = Mock() - mock_request.url = "http://example.com:8123/test/request" + mock_request.url = URL("http://example.com:8123/test/request") mock_request_context.get = Mock(return_value=mock_request) assert _get_request_host() == "example.com" @@ -682,10 +683,12 @@ async def test_is_internal_request(hass: HomeAssistant, mock_current_request) -> mock_current_request.return_value = None assert not is_internal_request(hass) - mock_current_request.return_value = Mock(url="http://example.local:8123") + mock_current_request.return_value = Mock(url=URL("http://example.local:8123")) assert is_internal_request(hass) - mock_current_request.return_value = Mock(url="http://no_match.example.local:8123") + mock_current_request.return_value = Mock( + url=URL("http://no_match.example.local:8123") + ) assert not is_internal_request(hass) # Test with internal URL: http://192.168.0.1:8123 @@ -697,18 +700,18 @@ async def test_is_internal_request(hass: HomeAssistant, mock_current_request) -> assert hass.config.internal_url == "http://192.168.0.1:8123" assert not is_internal_request(hass) - mock_current_request.return_value = Mock(url="http://192.168.0.1:8123") + mock_current_request.return_value = Mock(url=URL("http://192.168.0.1:8123")) assert is_internal_request(hass) # Test for matching against local IP hass.config.api = Mock(use_ssl=False, local_ip="192.168.123.123", port=8123) for allowed in ("127.0.0.1", "192.168.123.123"): - mock_current_request.return_value = Mock(url=f"http://{allowed}:8123") + mock_current_request.return_value = Mock(url=URL(f"http://{allowed}:8123")) assert is_internal_request(hass), mock_current_request.return_value.url # Test for matching against HassOS hostname for allowed in ("hellohost", "hellohost.local"): - mock_current_request.return_value = Mock(url=f"http://{allowed}:8123") + mock_current_request.return_value = Mock(url=URL(f"http://{allowed}:8123")) assert is_internal_request(hass), mock_current_request.return_value.url From 9f469c08d14d04b6ce364822f821706dfd05d130 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Fri, 6 Sep 2024 15:35:38 +0200 Subject: [PATCH 1522/1635] Code quality improvement on local_file (#125165) * Code quality improvement on local_file * Fix * No translation * Review comments --- homeassistant/components/local_file/camera.py | 86 +++++++----------- .../components/local_file/services.yaml | 9 +- .../components/local_file/strings.json | 9 +- tests/components/local_file/test_camera.py | 91 ++++++++++++++++--- 4 files changed, 121 insertions(+), 74 deletions(-) diff --git a/homeassistant/components/local_file/camera.py b/homeassistant/components/local_file/camera.py index 1306751f1a9..74d887b613f 100644 --- a/homeassistant/components/local_file/camera.py +++ b/homeassistant/components/local_file/camera.py @@ -12,13 +12,14 @@ from homeassistant.components.camera import ( PLATFORM_SCHEMA as CAMERA_PLATFORM_SCHEMA, Camera, ) -from homeassistant.const import ATTR_ENTITY_ID, CONF_FILE_PATH, CONF_NAME -from homeassistant.core import HomeAssistant, ServiceCall -from homeassistant.helpers import config_validation as cv +from homeassistant.const import CONF_FILE_PATH, CONF_NAME +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import PlatformNotReady, ServiceValidationError +from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from .const import DATA_LOCAL_FILE, DEFAULT_NAME, DOMAIN, SERVICE_UPDATE_FILE_PATH +from .const import DEFAULT_NAME, SERVICE_UPDATE_FILE_PATH _LOGGER = logging.getLogger(__name__) @@ -29,57 +30,45 @@ PLATFORM_SCHEMA = CAMERA_PLATFORM_SCHEMA.extend( } ) -CAMERA_SERVICE_UPDATE_FILE_PATH = vol.Schema( - { - vol.Required(ATTR_ENTITY_ID): cv.comp_entity_ids, - vol.Required(CONF_FILE_PATH): cv.string, - } -) + +def check_file_path_access(file_path: str) -> bool: + """Check that filepath given is readable.""" + if not os.access(file_path, os.R_OK): + return False + return True -def setup_platform( +async def async_setup_platform( hass: HomeAssistant, config: ConfigType, - add_entities: AddEntitiesCallback, + async_add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Camera that works with local files.""" - if DATA_LOCAL_FILE not in hass.data: - hass.data[DATA_LOCAL_FILE] = [] + file_path: str = config[CONF_FILE_PATH] - file_path = config[CONF_FILE_PATH] - camera = LocalFile(config[CONF_NAME], file_path) - hass.data[DATA_LOCAL_FILE].append(camera) - - def update_file_path_service(call: ServiceCall) -> None: - """Update the file path.""" - file_path = call.data[CONF_FILE_PATH] - entity_ids = call.data[ATTR_ENTITY_ID] - cameras = hass.data[DATA_LOCAL_FILE] - - for camera in cameras: - if camera.entity_id in entity_ids: - camera.update_file_path(file_path) - - hass.services.register( - DOMAIN, + platform = entity_platform.async_get_current_platform() + platform.async_register_entity_service( SERVICE_UPDATE_FILE_PATH, - update_file_path_service, - schema=CAMERA_SERVICE_UPDATE_FILE_PATH, + { + vol.Required(CONF_FILE_PATH): cv.string, + }, + "update_file_path", ) - add_entities([camera]) + if not await hass.async_add_executor_job(check_file_path_access, file_path): + raise PlatformNotReady(f"File path {file_path} is not readable") + + async_add_entities([LocalFile(config[CONF_NAME], file_path)]) class LocalFile(Camera): """Representation of a local file camera.""" - def __init__(self, name, file_path): + def __init__(self, name: str, file_path: str) -> None: """Initialize Local File Camera component.""" super().__init__() - - self._name = name - self.check_file_path_access(file_path) + self._attr_name = name self._file_path = file_path # Set content type of local file content, _ = mimetypes.guess_type(file_path) @@ -96,30 +85,21 @@ class LocalFile(Camera): except FileNotFoundError: _LOGGER.warning( "Could not read camera %s image from file: %s", - self._name, + self.name, self._file_path, ) return None - def check_file_path_access(self, file_path): - """Check that filepath given is readable.""" - if not os.access(file_path, os.R_OK): - _LOGGER.warning( - "Could not read camera %s image from file: %s", self._name, file_path - ) - - def update_file_path(self, file_path): + async def update_file_path(self, file_path: str) -> None: """Update the file_path.""" - self.check_file_path_access(file_path) + if not await self.hass.async_add_executor_job( + check_file_path_access, file_path + ): + raise ServiceValidationError(f"Path {file_path} is not accessible") self._file_path = file_path self.schedule_update_ha_state() @property - def name(self): - """Return the name of this camera.""" - return self._name - - @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, str]: """Return the camera state attributes.""" return {"file_path": self._file_path} diff --git a/homeassistant/components/local_file/services.yaml b/homeassistant/components/local_file/services.yaml index 5fc0b11f4c2..1b3000e663e 100644 --- a/homeassistant/components/local_file/services.yaml +++ b/homeassistant/components/local_file/services.yaml @@ -1,10 +1,9 @@ update_file_path: + target: + entity: + integration: local_file + domain: camera fields: - entity_id: - required: true - selector: - entity: - domain: camera file_path: required: true example: "/config/www/images/image.jpg" diff --git a/homeassistant/components/local_file/strings.json b/homeassistant/components/local_file/strings.json index 0db5d709c69..801d85ce1e0 100644 --- a/homeassistant/components/local_file/strings.json +++ b/homeassistant/components/local_file/strings.json @@ -4,15 +4,16 @@ "name": "Updates file path", "description": "Use this action to change the file displayed by the camera.", "fields": { - "entity_id": { - "name": "Entity", - "description": "Name of the entity_id of the camera to update." - }, "file_path": { "name": "File path", "description": "The full path to the new image file to be displayed." } } } + }, + "exceptions": { + "file_path_not_accessible": { + "message": "Path {file_path} is not accessible" + } } } diff --git a/tests/components/local_file/test_camera.py b/tests/components/local_file/test_camera.py index 4455d47469c..132212df0ec 100644 --- a/tests/components/local_file/test_camera.py +++ b/tests/components/local_file/test_camera.py @@ -6,7 +6,9 @@ from unittest import mock import pytest from homeassistant.components.local_file.const import DOMAIN, SERVICE_UPDATE_FILE_PATH +from homeassistant.const import ATTR_ENTITY_ID, CONF_FILE_PATH from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError from homeassistant.setup import async_setup_component from tests.typing import ClientSessionGenerator @@ -71,9 +73,45 @@ async def test_file_not_readable( ) await hass.async_block_till_done() - assert "Could not read" in caplog.text - assert "config_test" in caplog.text - assert "mock.file" in caplog.text + assert "File path mock.file is not readable;" in caplog.text + + +async def test_file_not_readable_after_setup( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test a warning is shown setup when file is not readable.""" + with ( + mock.patch("os.path.isfile", mock.Mock(return_value=True)), + mock.patch("os.access", mock.Mock(return_value=True)), + mock.patch( + "homeassistant.components.local_file.camera.mimetypes.guess_type", + mock.Mock(return_value=(None, None)), + ), + ): + await async_setup_component( + hass, + "camera", + { + "camera": { + "name": "config_test", + "platform": "local_file", + "file_path": "mock.file", + } + }, + ) + await hass.async_block_till_done() + + client = await hass_client() + + with mock.patch( + "homeassistant.components.local_file.camera.open", side_effect=FileNotFoundError + ): + resp = await client.get("/api/camera_proxy/camera.config_test") + + assert resp.status == HTTPStatus.INTERNAL_SERVER_ERROR + assert "Could not read camera config_test image from file: mock.file" in caplog.text async def test_camera_content_type( @@ -100,13 +138,23 @@ async def test_camera_content_type( "platform": "local_file", "file_path": "/path/to/image", } - - await async_setup_component( - hass, - "camera", - {"camera": [cam_config_jpg, cam_config_png, cam_config_svg, cam_config_noext]}, - ) - await hass.async_block_till_done() + with ( + mock.patch("os.path.isfile", mock.Mock(return_value=True)), + mock.patch("os.access", mock.Mock(return_value=True)), + ): + await async_setup_component( + hass, + "camera", + { + "camera": [ + cam_config_jpg, + cam_config_png, + cam_config_svg, + cam_config_noext, + ] + }, + ) + await hass.async_block_till_done() client = await hass_client() @@ -169,8 +217,12 @@ async def test_update_file_path(hass: HomeAssistant) -> None: service_data = {"entity_id": "camera.local_file", "file_path": "new/path.jpg"} - await hass.services.async_call(DOMAIN, SERVICE_UPDATE_FILE_PATH, service_data) - await hass.async_block_till_done() + await hass.services.async_call( + DOMAIN, + SERVICE_UPDATE_FILE_PATH, + service_data, + blocking=True, + ) state = hass.states.get("camera.local_file") assert state.attributes.get("file_path") == "new/path.jpg" @@ -178,3 +230,18 @@ async def test_update_file_path(hass: HomeAssistant) -> None: # Check that local_file_camera_2 file_path is still as configured state = hass.states.get("camera.local_file_camera_2") assert state.attributes.get("file_path") == "mock/path_2.jpg" + + # Assert it fails if file is not readable + service_data = { + ATTR_ENTITY_ID: "camera.local_file", + CONF_FILE_PATH: "new/path2.jpg", + } + with pytest.raises( + ServiceValidationError, match="Path new/path2.jpg is not accessible" + ): + await hass.services.async_call( + DOMAIN, + SERVICE_UPDATE_FILE_PATH, + service_data, + blocking=True, + ) From 73f04e3ede7362ffc2054035803e8dc9c1195529 Mon Sep 17 00:00:00 2001 From: Robert Svensson Date: Fri, 6 Sep 2024 15:41:49 +0200 Subject: [PATCH 1523/1635] Add filter run time for deCONZ air purifiers (#123306) * Add filter run time for deCONZ air purifiers * Add duration and second * Fix review comments * Update tests/components/deconz/snapshots/test_sensor.ambr --------- Co-authored-by: Joost Lekkerkerker --- homeassistant/components/deconz/sensor.py | 16 ++++++ .../deconz/snapshots/test_sensor.ambr | 54 +++++++++++++++++++ tests/components/deconz/test_sensor.py | 35 ++++++++++++ 3 files changed, 105 insertions(+) diff --git a/homeassistant/components/deconz/sensor.py b/homeassistant/components/deconz/sensor.py index e67c0129147..8b2b4896cdf 100644 --- a/homeassistant/components/deconz/sensor.py +++ b/homeassistant/components/deconz/sensor.py @@ -10,6 +10,7 @@ from typing import Generic, TypeVar from pydeconz.interfaces.sensors import SensorResources from pydeconz.models.event import EventType from pydeconz.models.sensor import SensorBase as PydeconzSensorBase +from pydeconz.models.sensor.air_purifier import AirPurifier from pydeconz.models.sensor.air_quality import AirQuality from pydeconz.models.sensor.carbon_dioxide import CarbonDioxide from pydeconz.models.sensor.consumption import Consumption @@ -47,6 +48,7 @@ from homeassistant.const import ( UnitOfPower, UnitOfPressure, UnitOfTemperature, + UnitOfTime, ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -77,6 +79,7 @@ ATTR_EVENT_ID = "event_id" T = TypeVar( "T", + AirPurifier, AirQuality, CarbonDioxide, Consumption, @@ -108,6 +111,19 @@ class DeconzSensorDescription(Generic[T], SensorEntityDescription): ENTITY_DESCRIPTIONS: tuple[DeconzSensorDescription, ...] = ( + DeconzSensorDescription[AirPurifier]( + key="air_purifier_filter_run_time", + supported_fn=lambda device: True, + update_key="filterruntime", + name_suffix="Filter time", + value_fn=lambda device: device.filter_run_time, + instance_check=AirPurifier, + device_class=SensorDeviceClass.DURATION, + entity_category=EntityCategory.DIAGNOSTIC, + native_unit_of_measurement=UnitOfTime.SECONDS, + suggested_unit_of_measurement=UnitOfTime.DAYS, + suggested_display_precision=1, + ), DeconzSensorDescription[AirQuality]( key="air_quality", supported_fn=lambda device: device.supports_air_quality, diff --git a/tests/components/deconz/snapshots/test_sensor.ambr b/tests/components/deconz/snapshots/test_sensor.ambr index dd097ea1c9a..0b76366b5d1 100644 --- a/tests/components/deconz/snapshots/test_sensor.ambr +++ b/tests/components/deconz/snapshots/test_sensor.ambr @@ -1537,6 +1537,60 @@ 'state': '90', }) # --- +# name: test_sensors[config_entry_options0-sensor_payload21-expected21][sensor.ikea_starkvind_filter_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.ikea_starkvind_filter_time', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'IKEA Starkvind Filter time', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '0c:43:14:ff:fe:6c:20:12-01-fc7d-air_purifier_filter_run_time', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[config_entry_options0-sensor_payload21-expected21][sensor.ikea_starkvind_filter_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'IKEA Starkvind Filter time', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.ikea_starkvind_filter_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.849594907407407', + }) +# --- # name: test_sensors[config_entry_options0-sensor_payload3-expected3][sensor.airquality_1_ch2o-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/deconz/test_sensor.py b/tests/components/deconz/test_sensor.py index e6ae85df615..958cb3b793a 100644 --- a/tests/components/deconz/test_sensor.py +++ b/tests/components/deconz/test_sensor.py @@ -602,6 +602,41 @@ TEST_DATA = [ "next_state": "80", }, ), + ( # Air purifier filter time sensor + { + "config": { + "filterlifetime": 259200, + "ledindication": True, + "locked": False, + "mode": "speed_1", + "on": True, + "reachable": True, + }, + "ep": 1, + "etag": "de26d19d9e91b2db3ded6ee7ab6b6a4b", + "lastannounced": None, + "lastseen": "2024-08-07T18:27Z", + "manufacturername": "IKEA of Sweden", + "modelid": "STARKVIND Air purifier", + "name": "IKEA Starkvind", + "productid": "E2007", + "state": { + "deviceruntime": 73405, + "filterruntime": 73405, + "lastupdated": "2024-08-07T18:27:52.543", + "replacefilter": False, + "speed": 20, + }, + "swversion": "1.1.001", + "type": "ZHAAirPurifier", + "uniqueid": "0c:43:14:ff:fe:6c:20:12-01-fc7d", + }, + { + "entity_id": "sensor.ikea_starkvind_filter_time", + "websocket_event": {"state": {"filterruntime": 100000}}, + "next_state": "1.15740740740741", + }, + ), ] From 1e6b6fef7e898119e66a4b7ee060469180f9a621 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Fri, 6 Sep 2024 15:42:56 +0200 Subject: [PATCH 1524/1635] Revert #122676 Yamaha discovery (#125216) Revert Yamaha discovery --- .../components/yamaha/media_player.py | 30 +------------------ 1 file changed, 1 insertion(+), 29 deletions(-) diff --git a/homeassistant/components/yamaha/media_player.py b/homeassistant/components/yamaha/media_player.py index 58f501b99be..bccb7b437f8 100644 --- a/homeassistant/components/yamaha/media_player.py +++ b/homeassistant/components/yamaha/media_player.py @@ -2,7 +2,6 @@ from __future__ import annotations -import contextlib import logging from typing import Any @@ -130,34 +129,7 @@ def _discovery(config_info): zones.extend(recv.zone_controllers()) else: _LOGGER.debug("Config Zones") - zones = None - - # Fix for upstream issues in rxv.find() with some hardware. - with contextlib.suppress(AttributeError, ValueError): - for recv in rxv.find(DISCOVER_TIMEOUT): - _LOGGER.debug( - "Found Serial %s %s %s", - recv.serial_number, - recv.ctrl_url, - recv.zone, - ) - if recv.ctrl_url == config_info.ctrl_url: - _LOGGER.debug( - "Config Zones Matched Serial %s: %s", - recv.ctrl_url, - recv.serial_number, - ) - zones = rxv.RXV( - config_info.ctrl_url, - friendly_name=config_info.name, - serial_number=recv.serial_number, - model_name=recv.model_name, - ).zone_controllers() - break - - if not zones: - _LOGGER.debug("Config Zones Fallback") - zones = rxv.RXV(config_info.ctrl_url, config_info.name).zone_controllers() + zones = rxv.RXV(config_info.ctrl_url, config_info.name).zone_controllers() _LOGGER.debug("Returned _discover zones: %s", zones) return zones From 8168b8fce4044fef3af55f73775c8b6dd3e8235a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pierre=20St=C3=A5hl?= Date: Fri, 6 Sep 2024 15:43:16 +0200 Subject: [PATCH 1525/1635] Bump pyatv to 0.15.1 (#125412) --- homeassistant/components/apple_tv/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/apple_tv/manifest.json b/homeassistant/components/apple_tv/manifest.json index 9a053829516..b4e1b354878 100644 --- a/homeassistant/components/apple_tv/manifest.json +++ b/homeassistant/components/apple_tv/manifest.json @@ -7,7 +7,7 @@ "documentation": "https://www.home-assistant.io/integrations/apple_tv", "iot_class": "local_push", "loggers": ["pyatv", "srptools"], - "requirements": ["pyatv==0.15.0"], + "requirements": ["pyatv==0.15.1"], "zeroconf": [ "_mediaremotetv._tcp.local.", "_companion-link._tcp.local.", diff --git a/requirements_all.txt b/requirements_all.txt index 6a215b2989c..8a5c6a34a07 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1753,7 +1753,7 @@ pyatag==0.3.5.3 pyatmo==8.1.0 # homeassistant.components.apple_tv -pyatv==0.15.0 +pyatv==0.15.1 # homeassistant.components.aussie_broadband pyaussiebb==0.0.15 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 74d792acb15..614fb06b132 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1424,7 +1424,7 @@ pyatag==0.3.5.3 pyatmo==8.1.0 # homeassistant.components.apple_tv -pyatv==0.15.0 +pyatv==0.15.1 # homeassistant.components.aussie_broadband pyaussiebb==0.0.15 From 6b1fc00910e776dba53f6c5fd297a892cbd9057b Mon Sep 17 00:00:00 2001 From: TimL Date: Fri, 6 Sep 2024 23:46:08 +1000 Subject: [PATCH 1526/1635] Improve handling of old firmware versions (#125406) * Update Info fixture with new fields from pysmlight 0.0.14 * Create repair if device is running unsupported firmware * Add test for legacy firmware info * Add strings for repair issue --- .../components/smlight/coordinator.py | 22 +++++++++++- homeassistant/components/smlight/strings.json | 6 ++++ tests/components/smlight/fixtures/info.json | 4 ++- .../smlight/snapshots/test_init.ambr | 2 +- tests/components/smlight/test_init.py | 34 ++++++++++++++++++- 5 files changed, 64 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/smlight/coordinator.py b/homeassistant/components/smlight/coordinator.py index 380644c81d1..094c6ec9cdb 100644 --- a/homeassistant/components/smlight/coordinator.py +++ b/homeassistant/components/smlight/coordinator.py @@ -9,8 +9,10 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed +from homeassistant.helpers import issue_registry as ir from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.device_registry import format_mac +from homeassistant.helpers.issue_registry import IssueSeverity from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import DOMAIN, LOGGER, SCAN_INTERVAL @@ -40,6 +42,7 @@ class SmDataUpdateCoordinator(DataUpdateCoordinator[SmData]): self.unique_id: str | None = None self.client = Api2(host=host, session=async_get_clientsession(hass)) + self.legacy_api: int = 0 async def _async_setup(self) -> None: """Authenticate if needed during initial setup.""" @@ -62,11 +65,28 @@ class SmDataUpdateCoordinator(DataUpdateCoordinator[SmData]): info = await self.client.get_info() self.unique_id = format_mac(info.MAC) + if info.legacy_api: + self.legacy_api = info.legacy_api + ir.async_create_issue( + self.hass, + DOMAIN, + "unsupported_firmware", + is_fixable=False, + is_persistent=False, + learn_more_url="https://smlight.tech/flasher/#SLZB-06", + severity=IssueSeverity.ERROR, + translation_key="unsupported_firmware", + ) + async def _async_update_data(self) -> SmData: """Fetch data from the SMLIGHT device.""" try: + sensors = Sensors() + if not self.legacy_api: + sensors = await self.client.get_sensors() + return SmData( - sensors=await self.client.get_sensors(), + sensors=sensors, info=await self.client.get_info(), ) except SmlightAuthError as err: diff --git a/homeassistant/components/smlight/strings.json b/homeassistant/components/smlight/strings.json index e3e8fee0d4d..8628a49a13c 100644 --- a/homeassistant/components/smlight/strings.json +++ b/homeassistant/components/smlight/strings.json @@ -92,5 +92,11 @@ "name": "LED night mode" } } + }, + "issues": { + "unsupported_firmware": { + "title": "SLZB core firmware update required", + "description": "Your SMLIGHT SLZB-06x device is running an unsupported core firmware version. Please update it to the latest version to enjoy all the features of this integration." + } } } diff --git a/tests/components/smlight/fixtures/info.json b/tests/components/smlight/fixtures/info.json index 72bb7c1ed9b..070232512f3 100644 --- a/tests/components/smlight/fixtures/info.json +++ b/tests/components/smlight/fixtures/info.json @@ -3,10 +3,12 @@ "device_ip": "192.168.1.161", "fs_total": 3456, "fw_channel": "dev", + "legacy_api": 0, + "hostname": "SLZB-06p7", "MAC": "AA:BB:CC:DD:EE:FF", "model": "SLZB-06p7", "ram_total": 296, - "sw_version": "v2.3.1.dev", + "sw_version": "v2.3.6", "wifi_mode": 0, "zb_flash_size": 704, "zb_hw": "CC2652P7", diff --git a/tests/components/smlight/snapshots/test_init.ambr b/tests/components/smlight/snapshots/test_init.ambr index 528a7b7b340..bb6a6c50f9b 100644 --- a/tests/components/smlight/snapshots/test_init.ambr +++ b/tests/components/smlight/snapshots/test_init.ambr @@ -27,7 +27,7 @@ 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, - 'sw_version': 'core: v2.3.1.dev / zigbee: -1', + 'sw_version': 'core: v2.3.6 / zigbee: -1', 'via_device_id': None, }) # --- diff --git a/tests/components/smlight/test_init.py b/tests/components/smlight/test_init.py index 1323c93e6bf..eb7b6396d26 100644 --- a/tests/components/smlight/test_init.py +++ b/tests/components/smlight/test_init.py @@ -3,15 +3,17 @@ from unittest.mock import MagicMock from freezegun.api import FrozenDateTimeFactory +from pysmlight import Info from pysmlight.exceptions import SmlightAuthError, SmlightConnectionError, SmlightError import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.smlight.const import SCAN_INTERVAL +from homeassistant.components.smlight.const import DOMAIN, SCAN_INTERVAL from homeassistant.config_entries import ConfigEntryState from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.issue_registry import IssueRegistry from .conftest import setup_integration @@ -110,3 +112,33 @@ async def test_device_info( ) assert device_entry is not None assert device_entry == snapshot + + +async def test_device_legacy_firmware( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_smlight_client: MagicMock, + device_registry: dr.DeviceRegistry, + issue_registry: IssueRegistry, +) -> None: + """Test device setup for old firmware version that dont support required API.""" + LEGACY_VERSION = "v2.3.1" + mock_smlight_client.get_sensors.side_effect = SmlightError + mock_smlight_client.get_info.return_value = Info( + legacy_api=1, sw_version=LEGACY_VERSION, MAC="AA:BB:CC:DD:EE:FF" + ) + entry = await setup_integration(hass, mock_config_entry) + + assert entry.unique_id == "aa:bb:cc:dd:ee:ff" + + device_entry = device_registry.async_get_device( + connections={(dr.CONNECTION_NETWORK_MAC, entry.unique_id)} + ) + assert LEGACY_VERSION in device_entry.sw_version + + issue = issue_registry.async_get_issue( + domain=DOMAIN, issue_id="unsupported_firmware" + ) + assert issue is not None + assert issue.domain == DOMAIN + assert issue.issue_id == "unsupported_firmware" From 6976a66758af656be2ab7cc6df0554ac42ab5477 Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Fri, 6 Sep 2024 10:11:51 -0400 Subject: [PATCH 1527/1635] Migrate VoIP to use assist satellite (#125381) * Migrate VoIP to assist satellite * Fix flaky test --- homeassistant/components/voip/__init__.py | 1 + .../components/voip/assist_satellite.py | 310 +++++++++ .../components/voip/binary_sensor.py | 6 +- homeassistant/components/voip/devices.py | 15 +- homeassistant/components/voip/entity.py | 8 +- homeassistant/components/voip/manifest.json | 2 +- homeassistant/components/voip/strings.json | 10 + homeassistant/components/voip/util.py | 28 + homeassistant/components/voip/voip.py | 421 +----------- tests/components/voip/conftest.py | 3 + .../components/voip/snapshots/test_voip.ambr | 10 + tests/components/voip/test_util.py | 47 ++ tests/components/voip/test_voip.py | 642 ++++++++++++------ 13 files changed, 863 insertions(+), 640 deletions(-) create mode 100644 homeassistant/components/voip/assist_satellite.py create mode 100644 homeassistant/components/voip/util.py create mode 100644 tests/components/voip/snapshots/test_voip.ambr create mode 100644 tests/components/voip/test_util.py diff --git a/homeassistant/components/voip/__init__.py b/homeassistant/components/voip/__init__.py index 9ab6a8bf0e8..cee0cbb0766 100644 --- a/homeassistant/components/voip/__init__.py +++ b/homeassistant/components/voip/__init__.py @@ -20,6 +20,7 @@ from .devices import VoIPDevices from .voip import HassVoipDatagramProtocol PLATFORMS = ( + Platform.ASSIST_SATELLITE, Platform.BINARY_SENSOR, Platform.SELECT, Platform.SWITCH, diff --git a/homeassistant/components/voip/assist_satellite.py b/homeassistant/components/voip/assist_satellite.py new file mode 100644 index 00000000000..9f117fc9878 --- /dev/null +++ b/homeassistant/components/voip/assist_satellite.py @@ -0,0 +1,310 @@ +"""Assist satellite entity for VoIP integration.""" + +from __future__ import annotations + +import asyncio +from enum import IntFlag +from functools import partial +import io +import logging +from pathlib import Path +from typing import TYPE_CHECKING, Final +import wave + +from voip_utils import RtpDatagramProtocol + +from homeassistant.components import tts +from homeassistant.components.assist_pipeline import ( + PipelineEvent, + PipelineEventType, + PipelineNotFound, +) +from homeassistant.components.assist_satellite import ( + AssistSatelliteEntity, + AssistSatelliteEntityDescription, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import Context, HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import CHANNELS, DOMAIN, RATE, RTP_AUDIO_SETTINGS, WIDTH +from .devices import VoIPDevice +from .entity import VoIPEntity +from .util import queue_to_iterable + +if TYPE_CHECKING: + from . import DomainData + +_LOGGER = logging.getLogger(__name__) + +_PIPELINE_TIMEOUT_SEC: Final = 30 + + +class Tones(IntFlag): + """Feedback tones for specific events.""" + + LISTENING = 1 + PROCESSING = 2 + ERROR = 4 + + +_TONE_FILENAMES: dict[Tones, str] = { + Tones.LISTENING: "tone.pcm", + Tones.PROCESSING: "processing.pcm", + Tones.ERROR: "error.pcm", +} + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up VoIP Assist satellite entity.""" + domain_data: DomainData = hass.data[DOMAIN] + + @callback + def async_add_device(device: VoIPDevice) -> None: + """Add device.""" + async_add_entities([VoipAssistSatellite(hass, device, config_entry)]) + + domain_data.devices.async_add_new_device_listener(async_add_device) + + entities: list[VoIPEntity] = [ + VoipAssistSatellite(hass, device, config_entry) + for device in domain_data.devices + ] + + async_add_entities(entities) + + +class VoipAssistSatellite(VoIPEntity, AssistSatelliteEntity, RtpDatagramProtocol): + """Assist satellite for VoIP devices.""" + + entity_description = AssistSatelliteEntityDescription(key="assist_satellite") + _attr_translation_key = "assist_satellite" + _attr_has_entity_name = True + _attr_name = None + + def __init__( + self, + hass: HomeAssistant, + voip_device: VoIPDevice, + config_entry: ConfigEntry, + tones=Tones.LISTENING | Tones.PROCESSING | Tones.ERROR, + ) -> None: + """Initialize an Assist satellite.""" + VoIPEntity.__init__(self, voip_device) + AssistSatelliteEntity.__init__(self) + RtpDatagramProtocol.__init__(self) + + self.config_entry = config_entry + + self._audio_queue: asyncio.Queue[bytes] = asyncio.Queue() + self._audio_chunk_timeout: float = 2.0 + self._pipeline_task: asyncio.Task | None = None + self._pipeline_had_error: bool = False + self._tts_done = asyncio.Event() + self._tts_extra_timeout: float = 1.0 + self._tone_bytes: dict[Tones, bytes] = {} + self._tones = tones + self._processing_tone_done = asyncio.Event() + + @property + def pipeline_entity_id(self) -> str | None: + """Return the entity ID of the pipeline to use for the next conversation.""" + return self.voip_device.get_pipeline_entity_id(self.hass) + + @property + def vad_sensitivity_entity_id(self) -> str | None: + """Return the entity ID of the VAD sensitivity to use for the next conversation.""" + return self.voip_device.get_vad_sensitivity_entity_id(self.hass) + + async def async_added_to_hass(self) -> None: + """Run when entity about to be added to hass.""" + await super().async_added_to_hass() + self.voip_device.protocol = self + + async def async_will_remove_from_hass(self) -> None: + """Run when entity will be removed from hass.""" + await super().async_will_remove_from_hass() + assert self.voip_device.protocol == self + self.voip_device.protocol = None + + # ------------------------------------------------------------------------- + # VoIP + # ------------------------------------------------------------------------- + + def on_chunk(self, audio_bytes: bytes) -> None: + """Handle raw audio chunk.""" + if self._pipeline_task is None: + self._clear_audio_queue() + + # Run pipeline until voice command finishes, then start over + self._pipeline_task = self.config_entry.async_create_background_task( + self.hass, + self._run_pipeline(), + "voip_pipeline_run", + ) + + self._audio_queue.put_nowait(audio_bytes) + + async def _run_pipeline( + self, + ) -> None: + """Forward audio to pipeline STT and handle TTS.""" + self.async_set_context(Context(user_id=self.config_entry.data["user"])) + self.voip_device.set_is_active(True) + + # Play listening tone at the start of each cycle + await self._play_tone(Tones.LISTENING, silence_before=0.2) + + try: + self._tts_done.clear() + + # Run pipeline with a timeout + _LOGGER.debug("Starting pipeline") + async with asyncio.timeout(_PIPELINE_TIMEOUT_SEC): + await self.async_accept_pipeline_from_satellite( + audio_stream=queue_to_iterable( + self._audio_queue, timeout=self._audio_chunk_timeout + ), + ) + + if self._pipeline_had_error: + self._pipeline_had_error = False + await self._play_tone(Tones.ERROR) + else: + # Block until TTS is done speaking. + # + # This is set in _send_tts and has a timeout that's based on the + # length of the TTS audio. + await self._tts_done.wait() + + _LOGGER.debug("Pipeline finished") + except PipelineNotFound: + _LOGGER.warning("Pipeline not found") + except (asyncio.CancelledError, TimeoutError): + # Expected after caller hangs up + _LOGGER.debug("Pipeline cancelled or timed out") + self.disconnect() + self._clear_audio_queue() + finally: + self.voip_device.set_is_active(False) + + # Allow pipeline to run again + self._pipeline_task = None + + def _clear_audio_queue(self) -> None: + """Ensure audio queue is empty.""" + while not self._audio_queue.empty(): + self._audio_queue.get_nowait() + + def on_pipeline_event(self, event: PipelineEvent) -> None: + """Set state based on pipeline stage.""" + if event.type == PipelineEventType.STT_END: + if (self._tones & Tones.PROCESSING) == Tones.PROCESSING: + self._processing_tone_done.clear() + self.config_entry.async_create_background_task( + self.hass, self._play_tone(Tones.PROCESSING), "voip_process_tone" + ) + elif event.type == PipelineEventType.TTS_END: + # Send TTS audio to caller over RTP + if event.data and (tts_output := event.data["tts_output"]): + media_id = tts_output["media_id"] + self.config_entry.async_create_background_task( + self.hass, + self._send_tts(media_id), + "voip_pipeline_tts", + ) + else: + # Empty TTS response + self._tts_done.set() + elif event.type == PipelineEventType.ERROR: + # Play error tone instead of wait for TTS when pipeline is finished. + self._pipeline_had_error = True + + async def _send_tts(self, media_id: str) -> None: + """Send TTS audio to caller via RTP.""" + try: + if self.transport is None: + return # not connected + + extension, data = await tts.async_get_media_source_audio( + self.hass, + media_id, + ) + + if extension != "wav": + raise ValueError(f"Only WAV audio can be streamed, got {extension}") + + if (self._tones & Tones.PROCESSING) == Tones.PROCESSING: + # Don't overlap TTS and processing beep + await self._processing_tone_done.wait() + + with io.BytesIO(data) as wav_io: + with wave.open(wav_io, "rb") as wav_file: + sample_rate = wav_file.getframerate() + sample_width = wav_file.getsampwidth() + sample_channels = wav_file.getnchannels() + + if ( + (sample_rate != RATE) + or (sample_width != WIDTH) + or (sample_channels != CHANNELS) + ): + raise ValueError( + f"Expected rate/width/channels as {RATE}/{WIDTH}/{CHANNELS}," + f" got {sample_rate}/{sample_width}/{sample_channels}" + ) + + audio_bytes = wav_file.readframes(wav_file.getnframes()) + + _LOGGER.debug("Sending %s byte(s) of audio", len(audio_bytes)) + + # Time out 1 second after TTS audio should be finished + tts_samples = len(audio_bytes) / (WIDTH * CHANNELS) + tts_seconds = tts_samples / RATE + + async with asyncio.timeout(tts_seconds + self._tts_extra_timeout): + # TTS audio is 16Khz 16-bit mono + await self._async_send_audio(audio_bytes) + except TimeoutError: + _LOGGER.warning("TTS timeout") + raise + finally: + # Signal pipeline to restart + self._tts_done.set() + + # Update satellite state + self.tts_response_finished() + + async def _async_send_audio(self, audio_bytes: bytes, **kwargs): + """Send audio in executor.""" + await self.hass.async_add_executor_job( + partial(self.send_audio, audio_bytes, **RTP_AUDIO_SETTINGS, **kwargs) + ) + + async def _play_tone(self, tone: Tones, silence_before: float = 0.0) -> None: + """Play a tone as feedback to the user if it's enabled.""" + if (self._tones & tone) != tone: + return # not enabled + + if tone not in self._tone_bytes: + # Do I/O in executor + self._tone_bytes[tone] = await self.hass.async_add_executor_job( + self._load_pcm, + _TONE_FILENAMES[tone], + ) + + await self._async_send_audio( + self._tone_bytes[tone], + silence_before=silence_before, + ) + + if tone == Tones.PROCESSING: + self._processing_tone_done.set() + + def _load_pcm(self, file_name: str) -> bytes: + """Load raw audio (16Khz, 16-bit mono).""" + return (Path(__file__).parent / file_name).read_bytes() diff --git a/homeassistant/components/voip/binary_sensor.py b/homeassistant/components/voip/binary_sensor.py index 8eeefbd5d94..121de507d7b 100644 --- a/homeassistant/components/voip/binary_sensor.py +++ b/homeassistant/components/voip/binary_sensor.py @@ -51,10 +51,12 @@ class VoIPCallInProgress(VoIPEntity, BinarySensorEntity): """Call when entity about to be added to hass.""" await super().async_added_to_hass() - self.async_on_remove(self._device.async_listen_update(self._is_active_changed)) + self.async_on_remove( + self.voip_device.async_listen_update(self._is_active_changed) + ) @callback def _is_active_changed(self, device: VoIPDevice) -> None: """Call when active state changed.""" - self._attr_is_on = self._device.is_active + self._attr_is_on = self.voip_device.is_active self.async_write_ha_state() diff --git a/homeassistant/components/voip/devices.py b/homeassistant/components/voip/devices.py index 4e2dca15308..613d05fc614 100644 --- a/homeassistant/components/voip/devices.py +++ b/homeassistant/components/voip/devices.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Callable, Iterator from dataclasses import dataclass, field -from voip_utils import CallInfo +from voip_utils import CallInfo, VoipDatagramProtocol from homeassistant.config_entries import ConfigEntry from homeassistant.core import Event, HomeAssistant, callback @@ -22,6 +22,7 @@ class VoIPDevice: device_id: str is_active: bool = False update_listeners: list[Callable[[VoIPDevice], None]] = field(default_factory=list) + protocol: VoipDatagramProtocol | None = None @callback def set_is_active(self, active: bool) -> None: @@ -56,6 +57,18 @@ class VoIPDevice: return False + def get_pipeline_entity_id(self, hass: HomeAssistant) -> str | None: + """Return entity id for pipeline select.""" + ent_reg = er.async_get(hass) + return ent_reg.async_get_entity_id("select", DOMAIN, f"{self.voip_id}-pipeline") + + def get_vad_sensitivity_entity_id(self, hass: HomeAssistant) -> str | None: + """Return entity id for VAD sensitivity.""" + ent_reg = er.async_get(hass) + return ent_reg.async_get_entity_id( + "select", DOMAIN, f"{self.voip_id}-vad_sensitivity" + ) + class VoIPDevices: """Class to store devices.""" diff --git a/homeassistant/components/voip/entity.py b/homeassistant/components/voip/entity.py index 9e1e067b195..e96784bc218 100644 --- a/homeassistant/components/voip/entity.py +++ b/homeassistant/components/voip/entity.py @@ -15,10 +15,10 @@ class VoIPEntity(entity.Entity): _attr_has_entity_name = True _attr_should_poll = False - def __init__(self, device: VoIPDevice) -> None: + def __init__(self, voip_device: VoIPDevice) -> None: """Initialize VoIP entity.""" - self._device = device - self._attr_unique_id = f"{device.voip_id}-{self.entity_description.key}" + self.voip_device = voip_device + self._attr_unique_id = f"{voip_device.voip_id}-{self.entity_description.key}" self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, device.voip_id)}, + identifiers={(DOMAIN, voip_device.voip_id)}, ) diff --git a/homeassistant/components/voip/manifest.json b/homeassistant/components/voip/manifest.json index 594abc69c13..964193fca53 100644 --- a/homeassistant/components/voip/manifest.json +++ b/homeassistant/components/voip/manifest.json @@ -3,7 +3,7 @@ "name": "Voice over IP", "codeowners": ["@balloob", "@synesthesiam"], "config_flow": true, - "dependencies": ["assist_pipeline"], + "dependencies": ["assist_pipeline", "assist_satellite"], "documentation": "https://www.home-assistant.io/integrations/voip", "iot_class": "local_push", "quality_scale": "internal", diff --git a/homeassistant/components/voip/strings.json b/homeassistant/components/voip/strings.json index 8bcbb06d4e2..750f526ba1b 100644 --- a/homeassistant/components/voip/strings.json +++ b/homeassistant/components/voip/strings.json @@ -10,6 +10,16 @@ } }, "entity": { + "assist_satellite": { + "assist_satellite": { + "state": { + "listening_wake_word": "[%key:component::assist_satellite::entity_component::_::state::listening_wake_word%]", + "listening_command": "[%key:component::assist_satellite::entity_component::_::state::listening_command%]", + "responding": "[%key:component::assist_satellite::entity_component::_::state::responding%]", + "processing": "[%key:component::assist_satellite::entity_component::_::state::processing%]" + } + } + }, "binary_sensor": { "call_in_progress": { "name": "Call in progress" diff --git a/homeassistant/components/voip/util.py b/homeassistant/components/voip/util.py new file mode 100644 index 00000000000..bfda96ba810 --- /dev/null +++ b/homeassistant/components/voip/util.py @@ -0,0 +1,28 @@ +"""Voip util functions.""" + +from __future__ import annotations + +from asyncio import Queue, timeout as async_timeout +from collections.abc import AsyncIterable +from typing import Any + +from typing_extensions import TypeVar + +_DataT = TypeVar("_DataT", default=Any) + + +async def queue_to_iterable( + queue: Queue[_DataT], timeout: float | None = None +) -> AsyncIterable[_DataT]: + """Stream items from a queue until None with an optional timeout per item.""" + if timeout is None: + while (item := await queue.get()) is not None: + yield item + else: + async with async_timeout(timeout): + item = await queue.get() + + while item is not None: + yield item + async with async_timeout(timeout): + item = await queue.get() diff --git a/homeassistant/components/voip/voip.py b/homeassistant/components/voip/voip.py index be1e58b6eec..6f6cf989d3b 100644 --- a/homeassistant/components/voip/voip.py +++ b/homeassistant/components/voip/voip.py @@ -3,15 +3,11 @@ from __future__ import annotations import asyncio -from collections import deque -from collections.abc import AsyncIterable, MutableSequence, Sequence from functools import partial -import io import logging from pathlib import Path import time from typing import TYPE_CHECKING -import wave from voip_utils import ( CallInfo, @@ -21,33 +17,19 @@ from voip_utils import ( VoipDatagramProtocol, ) -from homeassistant.components import assist_pipeline, stt, tts from homeassistant.components.assist_pipeline import ( Pipeline, - PipelineEvent, - PipelineEventType, PipelineNotFound, async_get_pipeline, - async_pipeline_from_audio_stream, select as pipeline_select, ) -from homeassistant.components.assist_pipeline.audio_enhancer import ( - AudioEnhancer, - MicroVadSpeexEnhancer, -) -from homeassistant.components.assist_pipeline.vad import ( - AudioBuffer, - VadSensitivity, - VoiceCommandSegmenter, -) from homeassistant.const import __version__ -from homeassistant.core import Context, HomeAssistant -from homeassistant.util.ulid import ulid_now +from homeassistant.core import HomeAssistant from .const import CHANNELS, DOMAIN, RATE, RTP_AUDIO_SETTINGS, WIDTH if TYPE_CHECKING: - from .devices import VoIPDevice, VoIPDevices + from .devices import VoIPDevices _LOGGER = logging.getLogger(__name__) @@ -60,11 +42,8 @@ def make_protocol( ) -> VoipDatagramProtocol: """Plays a pre-recorded message if pipeline is misconfigured.""" voip_device = devices.async_get_or_create(call_info) - pipeline_id = pipeline_select.get_chosen_pipeline( - hass, - DOMAIN, - voip_device.voip_id, - ) + + pipeline_id = pipeline_select.get_chosen_pipeline(hass, DOMAIN, voip_device.voip_id) try: pipeline: Pipeline | None = async_get_pipeline(hass, pipeline_id) except PipelineNotFound: @@ -83,22 +62,18 @@ def make_protocol( rtcp_state=rtcp_state, ) - vad_sensitivity = pipeline_select.get_vad_sensitivity( - hass, - DOMAIN, - voip_device.voip_id, - ) + if (protocol := voip_device.protocol) is None: + raise ValueError("VoIP satellite not found") - # Pipeline is properly configured - return PipelineRtpDatagramProtocol( - hass, - hass.config.language, - voip_device, - Context(user_id=devices.config_entry.data["user"]), - opus_payload_type=call_info.opus_payload_type, - silence_seconds=VadSensitivity.to_seconds(vad_sensitivity), - rtcp_state=rtcp_state, - ) + protocol._rtp_input.opus_payload_type = call_info.opus_payload_type # noqa: SLF001 + protocol._rtp_output.opus_payload_type = call_info.opus_payload_type # noqa: SLF001 + + protocol.rtcp_state = rtcp_state + if protocol.rtcp_state is not None: + # Automatically disconnect when BYE is received over RTCP + protocol.rtcp_state.bye_callback = protocol.disconnect + + return protocol class HassVoipDatagramProtocol(VoipDatagramProtocol): @@ -143,372 +118,6 @@ class HassVoipDatagramProtocol(VoipDatagramProtocol): await self._closed_event.wait() -class PipelineRtpDatagramProtocol(RtpDatagramProtocol): - """Run a voice assistant pipeline in a loop for a VoIP call.""" - - def __init__( - self, - hass: HomeAssistant, - language: str, - voip_device: VoIPDevice, - context: Context, - opus_payload_type: int, - pipeline_timeout: float = 30.0, - audio_timeout: float = 2.0, - buffered_chunks_before_speech: int = 100, - listening_tone_enabled: bool = True, - processing_tone_enabled: bool = True, - error_tone_enabled: bool = True, - tone_delay: float = 0.2, - tts_extra_timeout: float = 1.0, - silence_seconds: float = 1.0, - rtcp_state: RtcpState | None = None, - ) -> None: - """Set up pipeline RTP server.""" - super().__init__( - rate=RATE, - width=WIDTH, - channels=CHANNELS, - opus_payload_type=opus_payload_type, - rtcp_state=rtcp_state, - ) - - self.hass = hass - self.language = language - self.voip_device = voip_device - self.pipeline: Pipeline | None = None - self.pipeline_timeout = pipeline_timeout - self.audio_timeout = audio_timeout - self.buffered_chunks_before_speech = buffered_chunks_before_speech - self.listening_tone_enabled = listening_tone_enabled - self.processing_tone_enabled = processing_tone_enabled - self.error_tone_enabled = error_tone_enabled - self.tone_delay = tone_delay - self.tts_extra_timeout = tts_extra_timeout - self.silence_seconds = silence_seconds - - self._audio_queue: asyncio.Queue[bytes] = asyncio.Queue() - self._context = context - self._conversation_id: str | None = None - self._pipeline_task: asyncio.Task | None = None - self._tts_done = asyncio.Event() - self._session_id: str | None = None - self._tone_bytes: bytes | None = None - self._processing_bytes: bytes | None = None - self._error_bytes: bytes | None = None - self._pipeline_error: bool = False - - def connection_made(self, transport): - """Server is ready.""" - super().connection_made(transport) - self.voip_device.set_is_active(True) - - def connection_lost(self, exc): - """Handle connection is lost or closed.""" - super().connection_lost(exc) - self.voip_device.set_is_active(False) - - def on_chunk(self, audio_bytes: bytes) -> None: - """Handle raw audio chunk.""" - if self._pipeline_task is None: - self._clear_audio_queue() - - # Run pipeline until voice command finishes, then start over - self._pipeline_task = self.hass.async_create_background_task( - self._run_pipeline(), - "voip_pipeline_run", - ) - - self._audio_queue.put_nowait(audio_bytes) - - async def _run_pipeline( - self, - ) -> None: - """Forward audio to pipeline STT and handle TTS.""" - if self._session_id is None: - self._session_id = ulid_now() - - # Play listening tone at the start of each cycle - if self.listening_tone_enabled: - await self._play_listening_tone() - - try: - # Wait for speech before starting pipeline - segmenter = VoiceCommandSegmenter(silence_seconds=self.silence_seconds) - audio_enhancer = MicroVadSpeexEnhancer(0, 0, True) - chunk_buffer: deque[bytes] = deque( - maxlen=self.buffered_chunks_before_speech, - ) - speech_detected = await self._wait_for_speech( - segmenter, - audio_enhancer, - chunk_buffer, - ) - if not speech_detected: - _LOGGER.debug("No speech detected") - return - - _LOGGER.debug("Starting pipeline") - self._tts_done.clear() - - async def stt_stream(): - try: - async for chunk in self._segment_audio( - segmenter, - audio_enhancer, - chunk_buffer, - ): - yield chunk - - if self.processing_tone_enabled: - await self._play_processing_tone() - except TimeoutError: - # Expected after caller hangs up - _LOGGER.debug("Audio timeout") - self._session_id = None - self.disconnect() - finally: - self._clear_audio_queue() - - # Run pipeline with a timeout - async with asyncio.timeout(self.pipeline_timeout): - await async_pipeline_from_audio_stream( - self.hass, - context=self._context, - event_callback=self._event_callback, - stt_metadata=stt.SpeechMetadata( - language="", # set in async_pipeline_from_audio_stream - format=stt.AudioFormats.WAV, - codec=stt.AudioCodecs.PCM, - bit_rate=stt.AudioBitRates.BITRATE_16, - sample_rate=stt.AudioSampleRates.SAMPLERATE_16000, - channel=stt.AudioChannels.CHANNEL_MONO, - ), - stt_stream=stt_stream(), - pipeline_id=pipeline_select.get_chosen_pipeline( - self.hass, DOMAIN, self.voip_device.voip_id - ), - conversation_id=self._conversation_id, - device_id=self.voip_device.device_id, - tts_audio_output="wav", - ) - - if self._pipeline_error: - self._pipeline_error = False - if self.error_tone_enabled: - await self._play_error_tone() - else: - # Block until TTS is done speaking. - # - # This is set in _send_tts and has a timeout that's based on the - # length of the TTS audio. - await self._tts_done.wait() - - _LOGGER.debug("Pipeline finished") - except PipelineNotFound: - _LOGGER.warning("Pipeline not found") - except TimeoutError: - # Expected after caller hangs up - _LOGGER.debug("Pipeline timeout") - self._session_id = None - self.disconnect() - finally: - # Allow pipeline to run again - self._pipeline_task = None - - async def _wait_for_speech( - self, - segmenter: VoiceCommandSegmenter, - audio_enhancer: AudioEnhancer, - chunk_buffer: MutableSequence[bytes], - ): - """Buffer audio chunks until speech is detected. - - Returns True if speech was detected, False otherwise. - """ - # Timeout if no audio comes in for a while. - # This means the caller hung up. - async with asyncio.timeout(self.audio_timeout): - chunk = await self._audio_queue.get() - - vad_buffer = AudioBuffer(assist_pipeline.SAMPLES_PER_CHUNK * WIDTH) - - while chunk: - chunk_buffer.append(chunk) - - segmenter.process_with_vad( - chunk, - assist_pipeline.SAMPLES_PER_CHUNK, - lambda x: audio_enhancer.enhance_chunk(x, 0).is_speech is True, - vad_buffer, - ) - if segmenter.in_command: - # Buffer until command starts - if len(vad_buffer) > 0: - chunk_buffer.append(vad_buffer.bytes()) - - return True - - async with asyncio.timeout(self.audio_timeout): - chunk = await self._audio_queue.get() - - return False - - async def _segment_audio( - self, - segmenter: VoiceCommandSegmenter, - audio_enhancer: AudioEnhancer, - chunk_buffer: Sequence[bytes], - ) -> AsyncIterable[bytes]: - """Yield audio chunks until voice command has finished.""" - # Buffered chunks first - for buffered_chunk in chunk_buffer: - yield buffered_chunk - - # Timeout if no audio comes in for a while. - # This means the caller hung up. - async with asyncio.timeout(self.audio_timeout): - chunk = await self._audio_queue.get() - - vad_buffer = AudioBuffer(assist_pipeline.SAMPLES_PER_CHUNK * WIDTH) - - while chunk: - if not segmenter.process_with_vad( - chunk, - assist_pipeline.SAMPLES_PER_CHUNK, - lambda x: audio_enhancer.enhance_chunk(x, 0).is_speech is True, - vad_buffer, - ): - # Voice command is finished - break - - yield chunk - - async with asyncio.timeout(self.audio_timeout): - chunk = await self._audio_queue.get() - - def _clear_audio_queue(self) -> None: - while not self._audio_queue.empty(): - self._audio_queue.get_nowait() - - def _event_callback(self, event: PipelineEvent): - if not event.data: - return - - if event.type == PipelineEventType.INTENT_END: - # Capture conversation id - self._conversation_id = event.data["intent_output"]["conversation_id"] - elif event.type == PipelineEventType.TTS_END: - # Send TTS audio to caller over RTP - tts_output = event.data["tts_output"] - if tts_output: - media_id = tts_output["media_id"] - self.hass.async_create_background_task( - self._send_tts(media_id), - "voip_pipeline_tts", - ) - else: - # Empty TTS response - self._tts_done.set() - elif event.type == PipelineEventType.ERROR: - # Play error tone instead of wait for TTS - self._pipeline_error = True - - async def _send_tts(self, media_id: str) -> None: - """Send TTS audio to caller via RTP.""" - try: - if self.transport is None: - return - - extension, data = await tts.async_get_media_source_audio( - self.hass, - media_id, - ) - - if extension != "wav": - raise ValueError(f"Only WAV audio can be streamed, got {extension}") - - with io.BytesIO(data) as wav_io: - with wave.open(wav_io, "rb") as wav_file: - sample_rate = wav_file.getframerate() - sample_width = wav_file.getsampwidth() - sample_channels = wav_file.getnchannels() - - if ( - (sample_rate != RATE) - or (sample_width != WIDTH) - or (sample_channels != CHANNELS) - ): - raise ValueError( - f"Expected rate/width/channels as {RATE}/{WIDTH}/{CHANNELS}," - f" got {sample_rate}/{sample_width}/{sample_channels}" - ) - - audio_bytes = wav_file.readframes(wav_file.getnframes()) - - _LOGGER.debug("Sending %s byte(s) of audio", len(audio_bytes)) - - # Time out 1 second after TTS audio should be finished - tts_samples = len(audio_bytes) / (WIDTH * CHANNELS) - tts_seconds = tts_samples / RATE - - async with asyncio.timeout(tts_seconds + self.tts_extra_timeout): - # TTS audio is 16Khz 16-bit mono - await self._async_send_audio(audio_bytes) - except TimeoutError: - _LOGGER.warning("TTS timeout") - raise - finally: - # Signal pipeline to restart - self._tts_done.set() - - async def _async_send_audio(self, audio_bytes: bytes, **kwargs): - """Send audio in executor.""" - await self.hass.async_add_executor_job( - partial(self.send_audio, audio_bytes, **RTP_AUDIO_SETTINGS, **kwargs) - ) - - async def _play_listening_tone(self) -> None: - """Play a tone to indicate that Home Assistant is listening.""" - if self._tone_bytes is None: - # Do I/O in executor - self._tone_bytes = await self.hass.async_add_executor_job( - self._load_pcm, - "tone.pcm", - ) - - await self._async_send_audio( - self._tone_bytes, - silence_before=self.tone_delay, - ) - - async def _play_processing_tone(self) -> None: - """Play a tone to indicate that Home Assistant is processing the voice command.""" - if self._processing_bytes is None: - # Do I/O in executor - self._processing_bytes = await self.hass.async_add_executor_job( - self._load_pcm, - "processing.pcm", - ) - - await self._async_send_audio(self._processing_bytes) - - async def _play_error_tone(self) -> None: - """Play a tone to indicate a pipeline error occurred.""" - if self._error_bytes is None: - # Do I/O in executor - self._error_bytes = await self.hass.async_add_executor_job( - self._load_pcm, - "error.pcm", - ) - - await self._async_send_audio(self._error_bytes) - - def _load_pcm(self, file_name: str) -> bytes: - """Load raw audio (16Khz, 16-bit mono).""" - return (Path(__file__).parent / file_name).read_bytes() - - class PreRecordMessageProtocol(RtpDatagramProtocol): """Plays a pre-recorded message on a loop.""" diff --git a/tests/components/voip/conftest.py b/tests/components/voip/conftest.py index b039a49e0f0..cbca8997797 100644 --- a/tests/components/voip/conftest.py +++ b/tests/components/voip/conftest.py @@ -14,6 +14,9 @@ from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry +from tests.components.tts.conftest import ( + mock_tts_cache_dir_fixture_autouse, # noqa: F401 +) @pytest.fixture(autouse=True) diff --git a/tests/components/voip/snapshots/test_voip.ambr b/tests/components/voip/snapshots/test_voip.ambr new file mode 100644 index 00000000000..935dbba51b8 --- /dev/null +++ b/tests/components/voip/snapshots/test_voip.ambr @@ -0,0 +1,10 @@ +# serializer version: 1 +# name: test_calls_not_allowed + b'\xfe\xff\x04\x00\x05\x00\x03\x00\x04\x00\x03\x00\x02\x00\x00\x00\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xfe\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xfe\xff\xfc\xff\xfc\xff\xfc\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xfd\xff\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x03\x00\x03\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\x00\x00\xff\xff\x00\x00\x00\x00\xfe\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\xfe\xff\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x03\x00\x02\x00\x03\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x03\x00\x02\x00\x02\x00\x01\x00\xff\xff\x01\x00\x01\x00\x01\x00\xfe\xff\xfc\xff\xff\xff\x00\x00\xfe\xff\x00\x00\x00\x00\xfd\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfd\xff\xfe\xff\x00\x00\xff\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfc\xff\xfe\xff\xfd\xff\xfe\xff\xfc\xff\xfc\xff\xfe\xff\xfd\xff\xfc\xff\xfe\xff\xfc\xff\xfc\xff\xfd\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xfe\xff\xfe\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xfe\xff\x00\x00\xff\xff\xff\xff\x00\x00\xfe\xff\xfe\xff\x00\x00\x00\x00\xfe\xff\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\xfe\xff\xfe\xff\x02\x00\x02\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x02\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xfe\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x01\x00\x02\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\xfd\xff\xfd\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xff\xff\x00\x00\xff\xff\xfe\xff\x00\x00\xfe\xff\xfc\xff\xfd\xff\xfe\xff\xfd\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xfd\xff\xff\xff\xff\xff\xfd\xff\xfc\xff\xfd\xff\xfe\xff\xfe\xff\xfc\xff\xfc\xff\xff\xff\xfe\xff\xfc\xff\xfa\xff\xfb\xff\xfb\xff\xfb\xff\xff\xff\xfe\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xfe\xff\x00\x00\xff\xff\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xfa\xff\xfe\xff\x00\x00\xfd\xff\x00\x00\x00\x00\xff\xff\x00\x00\xfd\xff\xfa\xff\xfc\xff\xfc\xff\xfa\xff\xfe\xff\xfd\xff\xf8\xff\xf7\xff\xfa\xff\xfe\xff\xfa\xff\xf8\xff\xf9\xff\xfa\xff\xfd\xff\x00\x00\x00\x00\x00\x00\xfb\xff\xfb\xff\xfa\xff\xfd\xff\xff\xff\xff\xff\x01\x00\xfc\xff\xff\xff\xf8\xff\xff\xff\x00\x00\xf3\xff\xfd\xff\xf3\xff\xfb\xff\x01\x00\xff\xff\xfa\xff\x02\x00\xf4\xff\xeb\xff\xfc\xff\xf7\xff\xe8\xff\xfb\xff\xf8\xff\xf7\xff\r\x00\xfe\xff\x02\x00\xfe\xff\xf9\xff\xfa\xff\xf8\xff\x00\x00\xf6\xff\xfe\xff\x02\x00\x05\x00\x04\x00\xfa\xff\xf4\xff\xe8\xff\xf3\xff\x06\x00\xf9\xff\x06\x00\n\x00\xf8\xff\xfa\xff\x01\x00\xf4\xff\xfd\xff\xf7\xff\xf4\xff\x01\x00\x05\x00\x02\x00\x04\x00\xfc\xff\xef\xff\x03\x00\xf3\xff\xfc\xff\x08\x00\x04\x00\xfd\xff\x08\x00\x04\x00\x00\x00\x00\x00\x06\x00\x03\x00\xfd\xff\x04\x00\x15\x00\x06\x00\x12\x00\x15\x00\x05\x00\x04\x00\x05\x00\x05\x00\x02\x00\x07\x00\x05\x00\xfc\xff\xfd\xff\x06\x00\xff\xff\xf8\xff\x01\x00\xf2\xff\xe6\xff\xf4\xff\xef\xff\xfb\xff\xfc\xff\xf2\xff\xec\xff\xe4\xff\xe6\xff\xf9\xff\xfa\xff\xee\xff\xea\xff\xe9\xff\xf8\xff\x06\x00\x0b\x00\xe9\xff\x03\x00\xea\xff\xfc\xff\x0f\x00\x00\x00\x13\x00\xe6\xff\xfe\xff\x10\x00\x12\x00\xfd\xff\x03\x00\xf1\xff\xfb\xff\x18\x00\x1f\x00\x08\x00\xfa\xff\xf9\xff\xf6\xff\r\x00\x17\x00\x03\x00\xfb\xff\xfc\xff\xf3\xff,\x00\x1c\x00\xf8\xff\xed\xff\x05\x00\x10\x00$\x00@\x00\x19\x00\x00\x00\x19\x004\x00G\x00]\x001\x00\x07\x005\x00J\x00X\x00\\\x00\x03\x00\xf6\xff\x13\x007\x00]\x008\x00\xef\xff\xeb\xff\x00\x00#\x00\x85\x00S\x00\xb6\xff\xcf\xff\x1a\x00\xc3\xff\xb6\x00\x8a\x00^\xff\xe0\xff\xfc\xff\xba\xff4\x00n\x00\xc5\xff5\xff\xf4\xffR\x00\xe8\xff-\x00\x11\x00z\xff\xb0\xff\x92\x00\xeb\xff\xca\xff\t\x00\xa0\xff\xcb\xff6\x00L\x00\x02\x00\x91\xff\xdb\xff\xd3\xff\xed\xff\xc0\xff\x8b\xff\x97\x00\xe2\xff\x16\x00B\x00\xbc\xff\xfb\xff1\x00\xe4\xff\xed\xff\x95\x00\xcc\x00H\x00>\x00\x03\x00g\xff\x18\x01\x8c\x01\xa8\xff?\xff\xc6\xfeO\xff\xaa\x00\x00\x01Q\xff\xaf\xfe\xce\xfe\xd8\xfe\x7f\xff\xce\xfe\x93\xfd\xb6\xfc\x9c\xfd\xb1\xff\xf7\x00H\x00D\xfe\x8d\xfc\xc2\xfco\xffG\x01r\x00\x94\xffG\x007\x01,\x02\xc0\x02\x18\x01\xaa\xff\xf0\xffS\x00\xbf\x029\x03\xa0\x01p\x00/\x00\xc4\xff\xb3\xff\xd4\xffU\xfdB\xfd\x8b\xfe\xfb\xfe\x86\xfe\x0e\xfd\xba\xfd\xb7\xfd\x8e\xfc\xf0\xfc\x88\xfd"\xfe\'\xfe]\xfe\xfb\xfe\x13\x00\x08\x01\xe1\x00&\xff\xf0\xfe\x05\x015\x01E\x02:\x02G\x02*\x02E\x02\xcf\x02\x1f\x03\xcc\x03\x15\x03N\x03\xdf\x03\x82\x04X\x05P\x05f\x04}\x04Q\x06\xe3\x06\x9a\x06\x8e\x06\xc7\x05a\x05\xe6\x05-\x06g\x066\x06\x9e\x05\xf4\x03\x9b\x03\x14\x03e\x02\x99\x01\xdf\xff\xa1\xfe{\xfe%\xfe2\xfd/\xfc\xc3\xfa-\xf9\xe2\xf8\xa2\xf8\x8d\xf8\xa0\xf9B\xf9\x15\xf9\xf3\xf8<\xf9y\xfa\xe1\xfa\xce\xfa#\xfb\xa1\xfc\xf3\xfd\xec\xfeE\xff\xc5\xfe\x9f\xfe8\xff\x19\xff\xff\xfe5\xff\xd8\xfe\x90\xfe\x87\xfd\xb5\xfcR\xfc\x18\xfc\xae\xfaI\xf9/\xf9\x14\xf9>\xf9\xb6\xf8d\xf8o\xf8E\xf8\x18\xf8c\xf8g\xfaA\xfb\xe2\xfak\xfb\xda\xfbM\xfd\xa0\xfe\x1c\xfft\xfe\xee\xfe\xf9\xff\x0e\x00y\x00*\x00P\xff\xfa\xfe\x84\xfe\xef\xfd\xd4\xfe\xb3\xfdf\xfd\xfa\xfbq\xfb\xfa\xfb \xfd{\xfd\xe4\xfc\xb3\xfc\xe5\xfa\x97\xfd\xee\xffP\x00o\x01o\x00\xfc\x01\x13\x04S\x05R\x08\x13\x07\xda\x08\xa6\t`\x0cX\x11\x1c\x0f\x88\x0b\xb5\x04\x17\x08\x8f\x17\x8f)\x9f4G+\xa0\x1c\x9f\x12\xe9\x13\x88#\xac+++\x94"\x8f\x1bM\x1f\xa0\x1e\x05\x17\xf1\x04\x17\xf4V\xec\x13\xf0\x0e\xfaJ\xfe&\xf9\xcb\xe7\x96\xd7\xa6\xcf\xab\xd2\xd2\xd9\x95\xdbT\xd9J\xdb\x84\xe2\x98\xe8\x06\xeb8\xe8J\xe5\x93\xe5\xfa\xea\xa2\xf8:\t\xe9\x11\xd3\x10c\n\x97\x05\x1a\x08\xbb\r\x94\r\x15\x0e\xef\rz\x0eU\x10\xc1\r+\x08\xbd\xfd(\xf24\xeaj\xec\x1f\xf3H\xf7\x0e\xf5\x07\xed\xcb\xe6\x9f\xe2\xe1\xe2\xdc\xe5&\xe87\xed\xcb\xf1\x13\xf8\xf9\xfe*\x039\x04\xda\x00h\xff\xe3\x03\xdf\x0eY\x18\xf4\x1d\xa7\x1d8\x1a=\x16\xad\x12\xa8\x118\x11\xa7\x10\xaa\x0e\xfb\r`\x0c}\n\xd2\x06|\xfe@\xf5\x11\xf1U\xf2K\xf6\x9c\xf9\xf3\xf8\xf8\xf5\xc6\xf2\xb5\xf0\x1a\xf2\xb6\xf4\xa5\xf6\x87\xf7~\xf9\xa3\xfd1\x01{\x03\xff\x01\xfc\xfc\x1c\xfb\xa7\xfb\xf7\xfd\x85\x00\xb1\x00\xaf\xfe\x01\xfc\xf1\xf9x\xf7\xab\xf6c\xf4M\xf2f\xf2?\xf2\x1f\xf7 \xf8\xf7\xf7!\xf6W\xf1@\xf34\xf5\xce\xf8\xdb\xfdA\x00\x9f\x02[\x03\x18\x04\x0b\x01\xb0\x02\x0c\x06>\x08 \x0b\xfa\n\xc6\x0e\xaa\x12K\x13>\x12y\x11o\x11C\x17\xe6\':7w9 /\x0e$\'$\xcb)\x04,\xc8+\xc3+\x9a)t"`\x18\xf0\x0f\x88\x07s\xfc\xe7\xef\xf7\xe7\x9a\xe9\x0c\xed3\xec\xcf\xe4*\xda\x11\xd1\xab\xcc\xf6\xcc\x00\xd1^\xd8\x93\xdf\xb2\xe4\x08\xe75\xea\xad\xefZ\xf3`\xf4\x0c\xf6\x05\xfd/\tU\x13\xaa\x17\x06\x16,\x13-\x10\xdb\r\x18\x0c"\n\xe7\t\x8b\x08\xef\x04\x19\x00P\xfb\xd8\xf5\xbe\xed{\xe4C\xe0\x04\xe1\xff\xe2\xe4\xe2\xaf\xe2\x89\xe2N\xe2\x9e\xe2 \xe4{\xe8\x1e\xef\xf5\xf5\x1a\xfc\xf2\x01\x9e\x08\xba\x0fh\x12\xf5\x14\xcc\x17\xb1\x1cb \xb8 9!\xa6 |\x1f\xbe\x1bu\x17\xef\x13~\x0fo\nt\x05\xef\x00;\xfd\xe3\xf9L\xf6\xab\xf1&\xef\xc7\xed\xf0\xec\x1a\xed\xf4\xec;\xee/\xf0\xa9\xf22\xf68\xf9\xf3\xfax\xfb\xd2\xfd(\xff\x00\x01\x9d\x02\xad\x02T\x03R\x02\xf6\x00$\xff\\\xfd\x9e\xfa\xef\xf6\x91\xf4\x1d\xf3K\xf3\x80\xf2\x88\xf0X\xed#\xec\x8f\xebb\xeaK\xeb\xdc\xec;\xf0\xf2\xf3\x15\xf5\xfe\xf7\xb2\xfa\xf3\xfb(\xff\xc9\x00`\x03]\t\x0b\x0cW\x10O\x14\xbf\x14\x9c\x14\xb6\x11\x81\x11X\x14y\x17f\x1b\xce&c9vB\x96:,&\xd5\x1b\xbc%\xf20H4.3\x983\xaa0P%B\x15\xa8\n/\x03i\xf8#\xf0\xcf\xf03\xf9"\xfc\x8a\xf1\xb0\xde{\xd1\xf9\xcc\xa9\xcc>\xcfw\xd67\xe1\xb8\xe7F\xe7\x06\xe6\xf1\xe7\x0c\xe9C\xe8\x0c\xea\xa2\xf2\x83\x00k\r\x91\x13\xb1\x13X\x0f\xa4\n\x13\x07(\x05\xe9\x06L\n\x00\x0b\xbb\x08\xe9\x04\x01\xffN\xf7\x97\xee\'\xe6\x9c\xe0\x95\xdeu\xdfX\xe3\x14\xe5\xba\xe4\xee\xe1\xad\xddE\xdc+\xe0\x10\xe8\xab\xf0\xbc\xf7\x06\xfet\x05\xdf\n\xc3\x0fH\x13)\x16W\x18\x7f\x1c\xc3"\x14)\xaf+<)k$0\x1e8\x19V\x15\r\x13\xd7\x0f3\x0c\x0e\x08\xb8\x01l\xfa\xe3\xf3\x05\xef\x17\xec_\xeah\xea\xa6\xeb\xfd\xec`\xeex\xef\x82\xf0\xcf\xf0U\xf2\x9d\xf5S\xfa\x89\xff\x1f\x02[\x03>\x039\x02L\x01\x0b\x00\x1e\x00\x11\x00\xb5\xfe~\xfcv\xf9!\xf7\xb5\xf3\xde\xf0Z\xed\xac\xeat\xe9\xc3\xe8\xdd\xe9\xf5\xe9\xe5\xe9\xce\xe8+\xe9_\xeb\x0f\xee\xa8\xf3x\xf6\xc7\xf9\x8d\xfc`\xfe\xd3\x03R\tA\x0f\x15\x12C\x12\xdb\x12\xfd\x14%\x1bK\x1eN\x1f\xd3\x1f\x08#\xa5.\xe7<\xb7D\x99?\x101*&\xc8&O.N3\xac2w/\x03(\xf0\x1c\xbe\x0e\xd9\x03\x90\xfd\x9f\xf5*\xefz\xeb\x04\xed\x81\xee\xac\xe9\x04\xe0\xcd\xd4`\xcdL\xcc7\xd2x\xdb0\xe4?\xea\x03\xeb\xe6\xe9\x1d\xeaH\xed\xd2\xf2\xfe\xf6\xc3\xfc\x11\x04\xa6\x0cc\x12H\x14\'\x12\x06\r!\x08h\x04\x1c\x03\x0c\x03\x10\x03{\x02\x18\xff?\xf8\xe8\xef\x1a\xe7\x00\xe1\x1e\xddb\xdb\xcf\xdby\xdd)\xe0=\xe2T\xe2\xf5\xe1\xac\xe2\xcf\xe4\xd4\xe8\xbc\xef?\xfa\xf3\x03k\x0cF\x12A\x16R\x18F\x19U\x1c\xca w%\x9a(\x99)])S&(!\xa5\x1aD\x14\x12\x0fP\x0b\x12\x08g\x04q\x00\xa2\xfb\xf8\xf5x\xef1\xea^\xe7\x82\xe7\x04\xe9p\xeb\xc0\xed\xf6\xef\x94\xf1\x94\xf2#\xf3v\xf4k\xf7j\xfb\xbf\xff\xcc\x03N\x07\x1f\x08\xbc\x06\x03\x04\xe8\x01\xe7\x00\r\x00\xf5\xfeo\xfdE\xfbl\xf8\x8e\xf5|\xf1;\xed\'\xea\n\xe8,\xe7\xe3\xe7,\xe9d\xea`\xea#\xe9\xf5\xe8\x88\xea\x11\xede\xf2\xcb\xf7`\xfc\xda\xff7\x00l\x01H\x04g\tX\x0f\x93\x13\x1c\x162\x18S\x1c\xa5!\xa5,&=\xe5J\x93L\xaf>[1\x0e1\x819x?j>\x0b;\x8c6\xc3,\\\x1de\x0e\xb3\x04X\xfc8\xf35\xeb_\xe87\xebR\xeb\xf2\xe2\x8e\xd4\x8c\xc9\xad\xc6\xb1\xc9\xe4\xce\xc2\xd5c\xde\x8d\xe5x\xe8\xc8\xe8\xd7\xe9\xc7\xed\x9a\xf2\x15\xf7\xa0\xfc\x87\x04\xc1\x0fd\x19V\x1d\x97\x19a\x12\x93\x0b\xee\x06\xf7\x04E\x05\xe8\x06\x05\x07\xbb\x02\xb6\xf9\xbd\xee|\xe5P\xe0\xa5\xdc\x98\xd9*\xd8\x86\xd9;\xdc\x05\xde>\xde]\xdeg\xdf\xd7\xe1\xea\xe6\r\xeeE\xf7`\x01 \nM\x0fI\x12C\x15T\x1a\x1a \x91$q\'\xa6)\x89*H)\xb4%\xf8 6\x1d\x80\x19\xe1\x14d\x0f|\n\x8f\x06\x83\x02[\xfc\xd1\xf5\'\xf0T\xec\x99\xea4\xea\xad\xeaP\xeb1\xec\xfa\xec\x9a\xed\xb2\xeeo\xf0|\xf3]\xf7!\xfb8\xff\xe3\x02\xed\x05\xd4\x07\xc2\x07b\x06\x9a\x04\xc7\x03\x97\x03\xcc\x03\xad\x02\xe1\xff\xe6\xfb0\xf7X\xf3G\xf0t\xed\xcb\xea\x80\xe8\x08\xe7b\xe6\xab\xe6J\xe7\xef\xe75\xe9\xc1\xea\xde\xec\xcd\xef\xd2\xf2\x93\xf5\xdc\xf7#\xfa\xce\xfd\xc7\x02:\x07Y\x0b\xc3\x0e\xc8\x10\x82\x11]\x11\xb9\x12\x98\x16 \x1c"%b3+C\xd8JVD\x845\xd2*\x89)I-\xca3X;\xc0?\xc3;\xfd,\r\x19\x8f\x08r\xfd\t\xf8c\xf5^\xf2\x06\xf0W\xedR\xea\x0c\xe5\xdb\xdc\xe9\xd2\xee\xca\x87\xc6\xf4\xc5\xe7\xcaP\xd5\xb0\xe1\x8c\xeaM\xec\xdb\xe8\xc5\xe5\x99\xe5U\xe9G\xf1\xd1\xfc\x8b\t\xff\x12+\x16\x90\x15\xc9\x13\xe8\x10J\x0cQ\x07\x99\x04U\x05\x85\x07\x06\t\xb2\x08\xd9\x04\xa1\xfck\xf1)\xe6\xe3\xddA\xda:\xdc\xe1\xe1\xb4\xe7\x93\xe9\x90\xe7\x0f\xe5K\xe3b\xe3\x14\xe6\xc6\xeb=\xf4"\xfdI\x05\xaa\x0c\xd7\x12\xaf\x17}\x1a\xe9\x1a\x8e\x1a\x07\x1b\x97\x1e\xc6#\xc3(M*\'(\xe4"I\x1b\xd4\x13\x9c\r\\\n\xdc\x08\xcd\x07\xaa\x04b\xffH\xf9\xba\xf3f\xef\x0b\xec\xb7\xe9\x89\xe8=\xe8\x8a\xe8\xc4\xe9\x18\xec\xf7\xeeK\xf1\x00\xf3\x02\xf4\x11\xf5\x87\xf6\xd9\xf9\x0b\xfe\xdf\x01b\x04\xee\x04\xd1\x04o\x03\xfc\x00\xd8\xfe\x9f\xfd\x06\xfdd\xfc,\xfa\xa9\xf7A\xf5M\xf3\x85\xf1\xab\xef\xac\xed\xa8\xec\xd0\xecp\xed\xae\xee\xda\xefn\xf1\x8a\xf3"\xf5\xc6\xf6\x19\xf9\x0f\xfc_\xff$\x02\x0e\x05d\x08_\x0c\x01\x10&\x12\x17\x13Q\x13\x14\x142\x15\x0b\x18b\x1fK,\xd89\x86@\x8e<\x991\xf4\'t$\xb5&\xcf+\xfa1\xa16\x036\x7f-s\x1e.\x0e\x1b\x02\x97\xfb\x81\xf93\xf8\x1d\xf55\xf0k\xeat\xe5\x7f\xe0\xaa\xda\x00\xd4\xfb\xcd\xa8\xc9r\xc8i\xcb\x1d\xd2T\xda\xf2\xe0g\xe4\'\xe5]\xe4\xb4\xe3e\xe5\x07\xeb\xf4\xf4\xcd\xffL\x08\x12\rD\x0f\xfc\x0f\xa4\x0f\xea\r\x13\x0cE\x0b\xca\x0b.\rj\x0e\xa3\x0e\xe6\x0c\xf7\x08\xf9\x02\xd0\xfb\x84\xf4\xd9\xee]\xec\xcd\xec\xca\xee \xf0\x16\xf0:\xef\xb4\xed\xfe\xeb\xe5\xea\xdc\xeb\x94\xefW\xf5\xf7\xfb\xe6\x01\x90\x06\x11\n\xf5\x0c?\x0f\xd3\x10\xb1\x11\x0c\x13c\x15\x8a\x18\xef\x1a\xfb\x1bK\x1b+\x19\xa0\x15\xed\x10\xec\x0b/\x07\xe3\x03\xdd\x01\x94\x00\xfc\xfe\xba\xfc\x8d\xf9\xe5\xf5\xef\xf1g\xee\xe3\xeb\xf6\xea\xb1\xebx\xed\xac\xef\xa0\xf1\xea\xf2k\xf3y\xf3q\xf3\xf6\xf3\x0e\xf5\xd4\xf6\xfe\xf8\x7f\xfb\xd0\xfd\xa0\xffX\x00\x1c\x00G\xff&\xfeV\xfd\xbe\xfc\xcb\xfc2\xfd\xe5\xfd\x94\xfe\xce\xfe^\xfep\xfdf\xfc\xb9\xfb\xa3\xfb\x05\xfc\x18\xfdo\xfe\xce\xff\xf0\x00\x88\x01\x8a\x01\x0b\x01B\x00\xea\xff\xa1\x00A\x02|\x04y\x06\xc5\x07V\x08\\\x08\x08\x089\x08\xff\x08\xe5\nj\x0ej\x14\x14\x1ds&\xde,\xc6-\xd4)\x19$\x17 \x8c\x1f\xa9"\xf1\'?-0/\xba+\x88"\xa1\x15\x97\x08\xc7\xfe\x01\xfa\xed\xf8x\xf8\xf3\xf5\x84\xf0$\xe9c\xe1\x19\xdaC\xd4Z\xd0\x97\xce:\xcel\xce\xf4\xceF\xd08\xd3\xe0\xd7\x9d\xdd5\xe3e\xe7\xe5\xe9\xfc\xebr\xef^\xf5\x06\xfd\x90\x05\x86\r\xf0\x13l\x17\x9a\x17\x95\x15P\x13\x88\x12\x99\x13\xdc\x15\xc4\x17$\x18\xed\x15\x94\x11\xd0\x0b\xd4\x05\xb0\x00\xf9\xfc\xcb\xfa^\xf9\xfc\xf7\x05\xf6\xca\xf3\xbd\xf1t\xf0\xab\xef\xfb\xeeA\xee\xcb\xed\'\xeeW\xef\xa4\xf1\x13\xf5Q\xf9\xa3\xfd\xe0\x00\xa2\x02\xd4\x02F\x02\x90\x02[\x04\xb5\x07\xa1\x0b\xd9\x0e\x0f\x11\xc5\x11\xd9\x10\xc1\x0e\x02\x0c\xdf\t\x95\x08E\x08N\x08\x08\x08\x05\x07&\x05\xe8\x02\x0c\x00\x81\xfc\xa2\xf8o\xf5\xc7\xf3\xaf\xf3\x81\xf4\xb5\xf5\xd4\xf6_\xf7\x1a\xf7\xdb\xf5\x07\xf4c\xf2\xc9\xf1\xff\xf2\xd9\xf5\x93\xf9\xcf\xfc\xf4\xfe\x06\x00d\x00M\x00\xd8\xff\xd5\xff\xb2\x00v\x02O\x04\xaa\x05Q\x06;\x06|\x05,\x04\xb0\x02n\x01\xbf\x00n\x00z\x00`\x00\xe1\xff\xdc\xfe\x7f\xfd?\xfca\xfb\xea\xfa\xdf\xfa\xf8\xfa\x13\xfb,\xfbH\xfb\x96\xfb\xce\xfb\x18\xfc\xd4\xfcx\xfe\xc5\x00z\x03\xea\x06$\x0cG\x13\xd5\x1ao \xec"\xea"\xb2!\x7f \xe7\x1f\xfc \x00$=(\xba+\xfd+q\'\xb1\x1e\xe7\x13\xd3\t\\\x02<\xfe\x9c\xfc\xde\xfbO\xfa\\\xf6\x96\xef\xb6\xe6\xa9\xddf\xd6\x93\xd2&\xd2:\xd4:\xd7\xfa\xd9\xed\xdb4\xdd<\xde\x85\xdf\xe4\xe1\x8a\xe5\x88\xea/\xf0.\xf63\xfcv\x02o\x08\xaa\rX\x11:\x13\x80\x13\xc9\x12\xee\x11\x85\x117\x12\xbe\x13\x87\x15\xe7\x15\xdb\x13\xf0\x0e0\x08\x1a\x01\x08\xfb\x16\xf7>\xf5#\xf5;\xf5`\xf4\xcd\xf1\xed\xed\x15\xea\x85\xe7\xa2\xe6;\xe7\xc7\xe8+\xeb"\xeeS\xf1\xb5\xf4\xff\xf7B\xfbA\xfe\xb6\x00\xaf\x02Z\x04\x84\x06\xb6\t\xff\r\x8f\x12R\x16C\x188\x18\xc6\x16\xa0\x14\xb2\x12O\x11\xd0\x10\xdc\x10\x8d\x10\xfb\x0e\xe7\x0b\xd2\x07\x98\x03\xe3\xff\xa1\xfc\xb8\xf9\x18\xf7\x02\xf5\xb3\xf3\x16\xf3\xf5\xf2\x10\xf3#\xf3\xf9\xf2X\xf2h\xf1\xba\xf0A\xf1Q\xf3\xa8\xf62\xfa\xf9\xfcV\xfe~\xfe<\xfe8\xfe\xf7\xfe;\x00\xf0\x01\x80\x03\x81\x04\x84\x04\x8e\x03\x06\x02\x91\x00\xb7\xffK\xff\x0e\xffe\xfe\x8b\xfdg\xfcX\xfb`\xfa\x8a\xf9\x1e\xf9\x11\xf9;\xf9!\xf9o\xf8\x8e\xf7\xf6\xf6f\xf7\x07\xf9m\xfb\xea\xfd6\xff[\xff\xe7\xfe\xf5\xfeS\x00\x83\x03+\t\xe2\x11:\x1c\t%\xfd(\xb5\'\xf1#0!\x82!\xdf$K*\xe3/\xb93\xdc3m/|&\xe7\x1a\xec\x0fb\x08\xd5\x04\xa0\x035\x02.\xff\x08\xfa\x96\xf2Q\xe9o\xdf\x1c\xd70\xd2\'\xd1\xbe\xd2f\xd5\n\xd8H\xda\x0e\xdcd\xdd:\xde.\xdf\x93\xe1(\xe6\n\xed\xd4\xf4V\xfc\xb6\x02\xfc\x07\xdd\x0b9\x0e\xf0\x0e\xb1\x0eI\x0e[\x0e\xfe\x0e\xd1\x0f\xaf\x10\x00\x11\x18\x10\xc5\x0c\xda\x067\xff\xce\xf7b\xf2p\xef\xe7\xee\xa8\xef\x80\xf0\x07\xf0\xbc\xed7\xea\xe2\xe6a\xe5\x92\xe68\xea\x00\xef\xce\xf3\x03\xf8\xab\xfb\xc6\xfe~\x01i\x04\x89\x07Z\x0b\x19\x0f\x9e\x12x\x15\x94\x17\x8d\x19\xea\x1a\x8e\x1b\xe7\x1at\x19\xaf\x17\x00\x16]\x14\x87\x12p\x10\xe7\r\xfc\n\xb4\x076\x04v\x00\xc4\xfcF\xf9\xf0\xf6z\xf5w\xf4J\xf3\xdf\xf1\xce\xf0\xf4\xefj\xef\'\xef\xa8\xef\r\xf1\xd7\xf2\\\xf4U\xf5=\xf6O\xf7\xb8\xf8\x18\xfa \xfb\xa2\xfb\xee\xfbl\xfc\x08\xfd\xb0\xfd\xdb\xfd\xc1\xfd[\xfd\xd5\xfc\xf4\xfb\xd8\xfa\x00\xfa\xcb\xf9/\xfaa\xfa\x0c\xfa\x02\xf9\xdf\xf7\xf5\xf6\xb3\xf6\xe2\xf6_\xf7.\xf8\xf0\xf8m\xf9U\xf9\xc9\xf8~\xf8\xef\xf8c\xfa\xa9\xfcp\xff\x81\x02\xfe\x04\x80\x06E\x07T\x08\xa3\n\xe8\x0e\xec\x15\xf8 E.\x9e8\x88:Z4\x0f,\x91(5,=4\x87<\xe9@v?j7u*z\x1br\x0ey\x06\xef\x03\x94\x03\xd2\x00s\xf9\t\xef\xe5\xe4%\xdcE\xd4\xc1\xcc\xc5\xc6\xa8\xc3I\xc4\xe9\xc7I\xcd\xac\xd2.\xd6\x82\xd7\x9f\xd7\n\xd85\xda\x07\xe0\x92\xea\x99\xf8L\x05\xf8\x0c|\x0f\xb0\x0fK\x0f\x9e\x0e\xd6\r}\x0e\x8c\x11\x9b\x15G\x18\xdc\x17D\x14\xaf\r\xe0\x04J\xfb \xf3\xec\xed\\\xec<\xeeN\xf1\x84\xf2\xdd\xef$\xea\x9a\xe4\xcb\xe1\\\xe2b\xe5\x8d\xeaZ\xf1\x9b\xf8\xe3\xfe\xfb\x02n\x05:\x07\xcb\t\xa2\x0cD\x10\x9e\x14\x11\x1a\xc1\x1f\xfa#\x18%\x8a"\x90\x1e\xee\x1a\x98\x18k\x16\xb7\x14\x80\x14\xea\x14\x8c\x13\x1f\x0e\xbd\x05j\xfd\xa3\xf7\xee\xf4Y\xf4\xd5\xf43\xf5W\xf4E\xf2,\xefg\xeb\x19\xe8K\xe7\xb0\xe9\xde\xed\xe8\xf1 \xf5\x95\xf7\x88\xf8\xa0\xf7\x9b\xf5N\xf4\xe8\xf4)\xf7#\xfa\xfc\xfc\xe9\xfen\xff\x9a\xfe\x8e\xfc\x92\xf9\xd3\xf6y\xf5A\xf6z\xf8\xc1\xfa\xa2\xfb\xdb\xfa\x97\xf9K\xf8\x84\xf7\xe6\xf6\xdc\xf6\x0c\xf8\xaf\xf9\x99\xfb\xe1\xfc\'\xfd\x81\xfdc\xfe\xe5\xff-\x017\x01\xcf\x00W\x01\xf3\x02\x1e\x05\xbd\x06\x7f\x07O\x08g\t\x1d\x0b6\r\xd3\x0f\xfd\x12\xef\x17\xe3\x1e\x16\'\x11/ 414\x84/\xb7)\x1e(\x1e-\x125\xad9\xfc6\xef.\t%\xd0\x1ab\x10?\x07\x8b\x01\xa7\xfe\xba\xfb\xd3\xf5S\xed\xb1\xe3\x96\xda\xd4\xd2\x04\xcd8\xc9\x0e\xc7\xe3\xc6\xc5\xc9\xb4\xce\x9b\xd2I\xd3M\xd2\xe7\xd2\x0e\xd7u\xde\x11\xe8\x8a\xf29\xfc\xf5\x03\xcd\x07j\x086\x08\xba\nO\x105\x16&\x193\x19#\x18\x92\x16\x8d\x13\xe3\x0e\x90\t\xb3\x04\xc0\x00|\xfd\x88\xfa\xad\xf7\xbe\xf4\x03\xf2\xd0\xee\xba\xea\xec\xe6\xaa\xe5\x0f\xe8\x01\xec\xfa\xeeV\xf0D\xf2\xfc\xf5W\xfaN\xfe\xa1\x01\xee\x057\nV\x0ex\x11\xe6\x14\x11\x18\x02\x1b<\x1c\xc0\x1b\x8d\x1a\xee\x18\x80\x18n\x17\x9b\x152\x12\n\x0f\x1b\r\x92\x0bd\x08\xba\x02\x8f\xfc\x14\xf8\t\xf6^\xf5\xe0\xf4\xfb\xf3\xa3\xf2\xb6\xf0\xd1\xee\xb5\xec,\xeb\x96\xea\n\xecr\xefM\xf2\xf7\xf3\xbe\xf46\xf5\xff\xf4\xac\xf3\xc7\xf2/\xf4\x84\xf7\xbd\xfa\xbc\xfc4\xfd\xbc\xfc\x85\xfb\xcf\xf9\x85\xf8P\xf8\x9c\xf9i\xfcP\xffu\x006\xff\xea\xfc\x1e\xfba\xfa\xd4\xf9\xde\xf9\xa2\xfb\x1f\xfey\xff#\x00\x8f\x00T\x00\x10\xfd~\xf8\xe3\xf8\xcf\xfe\xc9\x05B\tW\x07\xfc\x03V\x01P\x01}\x03\xb6\x05=\x08J\n\x04\r\xd1\x0f;\x17S$j/a.\x99!V\x1a<"p0\xb47\xab7N7S6\xe4/\x80%\x86\x1d\xcb\x183\x13z\r\x14\n\xde\x08\xcf\x04\x9d\xfb\xeb\xee^\xe0E\xd5\x90\xd0\x1c\xd2Y\xd5\xde\xd4\xf7\xd1\x14\xcf\xa0\xcc\xdb\xc9\x07\xc8\x8e\xca\xd8\xd1m\xdb\xa0\xe4\x8a\xec\xb9\xf3\xd1\xf7K\xf8\x9b\xf8\'\xfd\x9f\x05\xfd\rr\x13\xa7\x17\\\x1a\t\x1bi\x18\xf2\x12\x86\r\xb0\n\xa8\x0b/\r\xbb\x0bY\x07\xbc\x01\x88\xfb\xb8\xf4r\xee\xf5\xea\x9e\xea\x04\xeb\xf7\xeb-\xedk\xee\xdd\xeda\xec\x1e\xec\x0f\xeeL\xf2\xf7\xf8x\x00\x11\x07\xcf\t\xc3\n\xc7\x0ba\x0e,\x110\x13\x14\x15\xfb\x18\xdc\x1di \xe9\x1e\x1c\x19S\x131\x0f\x11\x0f\xbd\x10\xd7\x10W\r8\t7\x05d\x00\xc4\xfa\xfe\xf5\xda\xf3\x8a\xf2\x0e\xf2\n\xf3Q\xf3r\xf1\x8c\xed\x03\xea\xbb\xe8\x1e\xea\n\xed\xa8\xf0!\xf3\xbb\xf3\xad\xf2O\xf2\x96\xf2\xec\xf2\x98\xf3\xf1\xf5!\xf9\xaf\xfb\x16\xfd\xa3\xfd\xaf\xfc\x95\xfbs\xfb\xfb\xfc\xd8\xff\xfa\x01\xa4\x02b\x01\xe7\xffQ\x00\xa7\x00\xc9\xff\x9d\xfeD\xfdS\xfe7\x00\xd9\xff?\xff\xb6\xfd&\xfd\xdd\xfd\x99\xfe\xdc\x00\x80\x02n\x02\xe9\x02\xc3\x03\xea\x03F\x04\xe7\x04\xe1\x062\t\x01\x0b\x83\r\x8f\x0fH\x15t\x1fJ);\'&\x1e\xc6\x1cU& 0\x0f0p.\x8e1\xc22\xe1,\x06%Z \x9f\x1b\xd7\x13\xe7\rk\x0e\x89\x0e\xe4\x06\xeb\xf9f\xeef\xe7\x0b\xe2A\xdc\xd5\xd8e\xd7\t\xd6\x82\xd3\xbb\xd1\xee\xd0d\xcf?\xcd\xf8\xcc\x14\xd1\xec\xd7\x1b\xdfS\xe5\x06\xea:\xed\x08\xf0\x1b\xf4"\xf9@\xfeO\x02\xb3\x06(\x0c\x8d\x10\xc7\x12\xf6\x12\xa1\x11K\x0f\xa9\x0c\xef\x0be\x0c\x0e\x0c\x06\n\x98\x07\xbb\x04\x08\x00w\xfa\xe9\xf6\xa1\xf4q\xf26\xf1m\xf2\xcc\xf5\xd2\xf5c\xf3.\xf1L\xf1)\xf2m\xf4\x95\xf8~\xfe\x0b\x03/\x060\t\xf0\t\xa6\x08\xe1\x07\x1a\n\xe3\x0e[\x15\xf7\x1a\xfd\x1b\x96\x17\xef\x11\xb7\x0f\x05\x10\x08\x0eM\x0c\xd0\x0be\x0c \x0cK\t\xdc\x03\xa2\xfc\x90\xf5\xc5\xf1\xfa\xf2\xec\xf4r\xf5\xe8\xf3\xdf\xef\xe3\xeb\xf7\xe8p\xe7\xdb\xe6\xd3\xe6\xbc\xe7a\xeb`\xef\xde\xf1\xc6\xf1\x86\xf0M\xef\xf1\xf0D\xf4\xb0\xf7Y\xfb\n\xfe\xa0\x00#\x03\\\x03z\x02\x94\x01P\x01`\x02\xf2\x03/\x08\x92\x0bh\t\xa8\x06\xf5\x03\xcc\x03$\x05\x16\x03\x01\x03\xaf\x05\xea\x06\x85\tW\n\x89\x07}\x01%\xfe\xd2\x01o\x04\xee\x05\x11\tN\n\xb9\x07H\x06v\x06\xd5\x06\xf8\x05\x8d\x05]\x07g\x0b\xee\x12b\x1a\xb1\x1b\x05\x17\\\x14?\x16\n\x19H\x1b\xe8\x1d\x8e!\xf7"l!\xf4\x1f\xbd\x1e\xe0\x19\xfb\x112\r\xa6\r\xf4\r\xec\n%\x07c\x03\x94\xfc\xff\xf3Z\xeeV\xeb{\xe7\xa2\xe2\xd3\xe0Y\xe1w\xe0Q\xdd8\xda\xf4\xd7O\xd6;\xd5\x8b\xd7\xb7\xdc{\xe1+\xe4\xef\xe5-\xe91\xec\xa8\xedc\xf0\xfb\xf4\xb9\xf8-\xfd]\x02\xac\x06\x0f\t\x97\t>\tf\t4\nW\x0b9\r\x02\x0eF\x0e\xb7\rM\x0c\x90\n\x9d\x07\xcc\x04L\x03\xc0\x024\x03$\x04]\x03\xfa\x01\x8e\xff\xe1\xfd\xc4\xfd\xdd\xfd\x8d\xfd\xdc\xfe,\x01J\x02B\x03\xdd\x041\x05J\x03\xfb\x01\x87\x03K\x06R\x07\x15\x08\xd1\x07V\x08m\x06\x84\x03\xdd\x02\x87\x008\xffT\x00\r\x002\xff\xe9\xfd\xcf\xfbJ\xf8e\xf6\xe0\xf60\xf5\xbc\xf2\xf5\xf4\x14\xf8\x12\xf6\xd9\xf0%\xf5\x06\xf8\x96\xefi\xf0\xe3\xf7 \xf9\xd0\xf5\xf1\xf6x\xfe\xed\xf9\xae\xf6z\xfc\xf1\xfb\xf1\xf7\x86\xfbc\x02\xae\xfd\x85\xfb+\x06\xdf\x01\x10\xf7V\x00r\x08\xcb\xfe`\xfa\x85\t\x97\x0f\xce\xfcy\x01\x17\x16\x84\x07\xa1\xfc\xe7\ny\x11\x02\t\xa3\x08\x1e\x12\x93\x0e?\x05\x8b\rQ\x11\x87\n(\x08\xe4\x0b[\x0c\xae\t)\x0c\x8c\x0c\x07\tZ\x03a\x05\x01\x07\x9c\x04\xeb\x03\xf9\x02\x8a\x04\xe4\x04\xdf\x00\xc9\x01e\x05\xf9\xfe\x03\xfe!\x06@\x06S\x01\xac\x05V\x08\xba\x05]\x03\xfd\x04\x8d\x07\xb2\x05\xc0\x01\xba\x04\xd8\x07F\x05\xcb\x02\xd8\x02i\x00\xcd\xfb$\xfb>\xfc\x15\xfb\x02\xf9\xe4\xf6\x04\xf5\x9e\xf4\xdc\xf3\xbd\xf1\xa0\xef\x8c\xefD\xee\xe4\xed\xd3\xf0c\xf2\x82\xef\x1b\xf0q\xf2&\xf4\xf6\xf2F\xf7B\xf9\xe0\xf5j\xfb\xae\xff\xe3\xfe\xa3\xfd\xbd\xff#\x03\xa9\x02<\x02\xab\x04F\x07\x1b\x04\xcb\x02\xb5\x07\xc2\x06\xa6\x03\xc2\x05\xa5\x04\xc9\x04\x96\x05\x13\x04\x10\x05\xcc\x04\xa3\x02r\x03\xb7\x04\xcf\x04\xd3\x01\x83\x040\x02\xb2\x01j\x04:\x01\x1b\xff#\x02\xb1\x01\xd3\xfb\x08\xfc\xc7\x00%\xfd\xec\xf4\xa3\xfb"\xfc\x08\xf7\xd4\xf7?\xfb\xb1\xf4|\xf2\x16\xf9\xd9\xf8Z\xf2\xf4\xf4V\x01\'\xf3\x15\xf4)\x00\x1a\xfdY\xef\xaf\xfb\x9c\xffd\x01\xc5\xf8\xcb\xf8\x95\x0c\xbc\xfea\xf6\xa7\t\xd0\x05%\xfaC\x047\x06C\x04\x06\xff5\t\xe7\x06\x07\xf8\xec\x07}\x0c\xa3\xfa\xbf\xf8"\x08\x81\x08\x03\xf7\xeb\xfb\xcb\x0e\xd2\xff\xf3\xf8\xb8\x05\x88\x03U\xfd\xad\x019\t~\x00\xc4\xff\x98\t\xbb\x04*\xfe\xa1\x0bk\x0c\x97\xfdI\x08M\n\xb9\x04\xe7\x04\xb6\n\xc3\tV\x05\xc3\x08\xed\x05\xf3\x03\xe7\x0cY\x07\xdd\xfbk\x05D\x10\xf5\xfa\x1a\xfd\xee\x10E\x01C\xf7\xf7\x04\x88\x05\xf4\xf4G\x04b\x00\xc3\xf8\xea\x00b\xfb\xc9\xfb\x8d\xfcG\xff\x18\xf7j\xf3\xda\x03,\x03\xd3\xf3\xb8\xf8\xc7\x06\x1a\xfa\x8d\xfb\x08\xfe\xdc\x03\x80\x00\xdc\xfa\xbe\x08\xa8\x02:\x00\xd2\x05\x9d\x05\x1f\x00[\x03\xbd\x06\x91\x03\xf4\x03\xf8\x03P\x04\x18\x02f\x01\xc6\x01\xfd\x01\x8a\x01\xa3\xfd@\xfd\x11\x05#\xfdG\xfa\xd2\x01g\xfdg\xf7\xe8\xfd\xb7\x02\x94\xf7\xf8\xf5q\x02\xf7\xfd\xda\xf3\xa8\xfd:\x01\x86\xf7D\xf5\xc7\x03\xa8\xfd\xc8\xfbt\xfc@\x00 \xfe\x04\xfee\x060\xfa\xf7\xf8\xcc\x06\xdb\x00\\\xfa\x11\x00\x07\x03\xa3\xfb|\xfc\xc7\x04\xef\xf9\x17\xf8\xe5\xfc\xd7\x05\xb0\xef\x81\xfa\x1f\x08\xdd\xf8Q\xf1\xe7\xfd\x88\x01\x89\xf5Q\xf3\x00\x00\x08\x04\xd6\xed\xf1\x01\x8f\x00?\xf4O\x01\xcf\xfa\x89\xff\xfe\x01\xa0\xf8y\x06T\x01\x1c\xfbD\t[\x05\x85\xfa}\t\xf5\x07n\xfd\x0f\n\t\x04\xa1\x02\xa4\t\xff\x01c\x01V\x0bv\x04\x7f\x00\xf9\x08R\xff&\x04]\x07\xfa\xfc\n\x01P\n\xcb\xfc*\xff\xff\x05,\xfdb\xff\xe1\xffH\x06\x1e\xfbS\xf1\xbd\x07@\x10n\xeb\x1c\x01\xcf\x0b0\xf5\x9e\xf2(\x0c\xb1\x06\xb4\xef"\x01\xaf\t\xd3\xfeF\xf7`\x05Y\xffy\xff\x10\xfae\x05\x83\x04\x80\xfc\x97\xfd\x0f\x07I\x02\xfb\xf4\x1e\xfd\xd9\x0b\xa2\xf8\x98\xf6\xd7\x06p\x01\xd6\xfc\x11\xfdB\xff\xc1\xf3\xb6\x04\x98\xfa\xbd\xfb\x95\xffe\x04\xc0\xfe\xc4\xf6\x13\xff9\x06\x18\xff\xc4\xf4e\x0b\xee\x069\xf7g\x01R\x11\xd4\xfd \xfd\x89\t\xc2\nE\xf9D\x06\x07\x0fb\xff\x94\xfd\x9e\x08l\x0b\xc7\xfa\xd5\x03a\x0c\xaa\xf8\x8b\xfb\x9b\x0b%\x02v\xfc,\x04\xf6\x00\'\xff\xfd\x01\xc3\xfd\x94\x03\xfa\xfa\x14\xff,\x03\xe6\x01\xfb\xfc*\xfe+\x01[\xfa^\xff\xbe\xfdQ\x00\xf8\xfd&\xfd\x1e\xfb\x1c\x06\xbf\xfe\x00\xf2\x8e\x0b\xd0\xf8\x96\xf8\xe9\x02\xcc\xfd\xe2\xfeH\xf8\xf8\x04\xc0\xf9\xc0\xfd\x1b\xffh\xfdm\xfc\x0c\x01]\xf5\x90\x00y\x03\xbb\xf5C\x08{\xf6\xe4\xfc\xa2\x00\xca\x00\x8f\xf9\x95\x02\xb6\xf6\xa2\x04&\x00\x85\xf9\xde\x02\'\xffn\x00\x13\xff\xaf\x02\xf8\xf5~\x0b+\xfa6\x02f\x05f\xfd\xe5\xfe6\x07\xff\x02B\xfc\x1e\x00\xc6\x07\xf4\x01\xa9\xfbp\x0bk\xfb\xce\x03\x0b\x05\x96\x01\xb9\xfa8\x05Z\x00\xf7\x02\xcf\x00;\xfc\xf6\x00\xec\x02\xed\xff\xbb\xfe\x85\xfc\xa5\x02\x11\x02\xbd\xf4\xac\x08\x9f\x06\x00\xf1\x17\x01n\x10u\xec\x88\x05x\r\x96\xf3\xdb\xffJ\x0eV\xfa\xa8\xfc\x0c\x07\xe4\xfd\x1b\x06l\xfb\xff\x05\x95\x01[\xfd\xfc\xfeJ\x055\x02\x17\xf6_\x07\xb9\xf7\x15\x04\xa6\x019\xf4\x0e\x06\xb7\xfc\xb7\xf9\xd5\xffV\xfd&\xfbz\x02\xbf\xee\x82\x0f\\\x02x\xea\x17\x08E\x00\xff\xfdo\xf5\x9d\x0f\xc5\xff\xcc\xf9\x8c\xff~\x00\x03\x073\xf49\r\x19\x01\xb7\xfd\xa8\x01!\x02\xf4\x01\xe1\xffh\xfeo\xfcX\n\xa5\xfdi\x03r\x04\x0e\xfd\xed\xffb\x06\xdb\xf9\x02\x00\xba\rL\xf4\xc0\x02I\x08\x92\x038\xf8\xc8\x03T\x03\xd6\xf8\x9d\x08p\x05\x1f\xff\xd8\xf5\x96\x14\xae\xfdH\xf0/\x10\xcf\x07\xfa\xed\xa7\xfd\xff\x15\xa7\xf6\x9f\xf9\xcb\x08\x80\x02\x9f\xf7\t\xf2|\x0c\xca\x00\xd5\xf1I\x00\xe7\x06\xae\xf9s\xef\xe9\x0b@\xf7\x8f\xf2I\x06\xe6\xf4\xe4\xfc&\x05\xaf\xf3`\x02]\xf9\xb7\xfc\xc1\x003\xf9\x1f\x03O\xf9\xeb\x04\xa7\xfa\x1c\x07\x1b\xfb\xd0\xfe5\x05\xbc\xfe\xda\x02\xda\xfdE\x05\x1a\xffH\x08\xfb\xfd\x1e\x05\xbb\x08\x0e\xf4\xa5\x04E\x08(\x02\xbc\xfd\x89\xff\xf9\x0fg\xf3\t\xfe\x90\x12\xa9\xfb\xd6\xef\x18\x06\xd9\x11\x95\xf0B\xfb:\x14\x9a\xfe\xda\xf1\xff\xfc\xb5\x0e\xde\xfd\xe0\xf5]\x0cK\xfd\xc6\xf2\xaa\x14r\x01h\xe9X\x04t\x0b\xaa\xf8\x9f\xf5\xb3\x0b5\x07\x86\xf2\xe0\xfa\x0c\x0f\xce\xf2H\x01\x9d\x00i\xfb\x1c\x05\x10\x00\x7f\xfcs\xfb\x93\x05\x0c\xfa\x90\x00\x1f\xf8O\x05\x17\xff\xb1\xff\xbb\xf2j\x05\xec\x04\'\xec\x99\x10x\xff\xf8\xec\xc3\x06v\x0b\xcc\xe5\x82\x04\xe7\x19`\xe6\x8f\xfa\xe6\x1a\x8a\xf2\x1b\xf7\x88\x05G\x08{\xfbd\xf7\xc7\x16\xc7\xfa\xe0\xf4\xfb\x0c\xd2\x04\xdc\xf9[\x07\xe6\xff"\xfa$\x0f\'\x01P\xfa9\x08\x9e\x04\xe8\xff\x83\xffq\xff\xac\x06R\xff\x80\x01W\x03\x89\x02}\x00\xb0\xfcX\x07x\x00\xed\xf7p\n\x08\xfa\xd0\x00%\x08\xb4\xf6\\\x05o\x02[\xfd&\xf9\x8b\x08\xee\xf9\x89\xfcq\tN\xf8\x80\xf9#\n\x04\xff\x9c\xf2\x9e\r\xcf\xf5\xc1\x005\x05\xe1\xf7\xda\xf9\x91\t\x1b\xfb(\xf4\x1d\x13h\xed\xe2\xfb\xe1\r3\xf5}\xf8\x06\xfe\xf4\x07\x88\xf4\x87\xfa\x02\x05,\xffu\xfa\xd2\xf7\xf4\x04!\x00\x17\xfcQ\xf6\xb9\x0b\xf6\xfc\xb3\xf5\xbc\x06D\x04\xc2\xfbj\xff\\\x06\x8f\xfa\x9c\x01X\x03}\x02K\x00w\x10r\xf5\xeb\xffK\r`\xfbc\xfd\x82\x0c\xb3\x02\x88\xfb*\x00-\ns\xfc\x86\xf9\x01\t*\xfal\x018\xf5\x82\x11#\xfc\xe2\xef\x9b\n^\x06\xae\xf2\x88\x02@\x0c\x1d\xfa\xae\xf6\xb4\x13\x99\x01\x84\xeer\r\x00\x0c\xf1\xf7\x8e\xf1\x0e\x18\x97\xfc\x89\xef#\x0c\xde\x08\xa1\xefk\xff^\x0c\xde\xf2~\xfa\xfd\x08\x84\xf9\x81\xf3\x89\x06\xd0\x06\xf3\xec\x17\xfci\x0e\xe6\xf2\x99\xf3\x8b\x05_\x06\xb1\xf6\x85\xf3\xf5\x14\xa8\xfeH\xe2I\x12\xa8\x0f\xda\xe80\x01\xbd\x14\x8b\xf5\xd6\xf8#\r\x93\x046\xf3\x0e\x06~\x0b\x0b\xfd\x17\xfe\x08\x08\xfc\x06)\xf5p\x07\x0b\x08\xee\xfc\x8b\xf9\xd4\tf\x03\x0e\xfb\xc7\x02\x8c\x02F\x000\xfcQ\x00\x05\x01\x10\xfec\xfb\x03\x08$\x00\x84\xf9\x87\td\xff\xa6\xeeD\x03\xe4\x07L\xfd\x83\xfe\xb7\x02\xff\xfb]\xfd}\x05\x91\xf9\x97\xfer\xff\xad\xfa\x0e\x00\x14\x02d\x01_\xfcJ\xfb\xa8\xfc\x85\xfd\xd6\xfe\xb7\x02\x9a\xf6\x1a\xfd\xe8\x03\xd3\xfb\xb0\xfb\x9b\xfc6\x02\x94\xf5x\xfdn\x07\x81\xf9\x97\xff\x97\x03\xf2\xf6\xa1\xf6\xfa\x10\xe3\x01y\xf1q\x04\xd3\x04\xc7\xfe\xe9\xfb\xb8\x0c\xc5\xffi\xf7E\x03P\nD\tG\xf4\x96\x08\xe5\x08\xb0\xf6(\x07\x06\x0b\xad\x02W\x03\xec\x01\xa4\x012\x052\x02!\x05#\x01\n\x00\xcd\x03r\x02\xb8\x02E\xff\x19\xffL\xfe\xf1\xfd\xeb\x01A\xff\xec\xffz\xf8\n\xfal\x06\xa0\xf7\x90\xfd\xa2\xfe\xcd\xf9\xfb\xfe\x86\x001\xf8\xfa\xfb\xc2\x08\xac\xf3i\xf6\xa7\x07A\xff\xae\xf7\x9f\x00\x85\x01\xf7\xfa\xc9\xfb\xda\x02\xc6\xfb\xa3\xf8m\x03\xce\xfe\x87\xff\xb7\x00n\xff[\xfcd\xfe\x10\xff\xd1\xfdh\xfe \xfc7\xff\x90\x03\xae\x013\xfe\x1e\x01o\x01\xf4\xfdn\x03Y\x07\x17\x03\xe3\x01K\t;\x0b\xf8\x04\x80\nM\x0e\x8a\x04t\x05\xcd\x0f\xe8\x0c\xdf\x08<\x0cc\x08e\x08v\n\x05\n\xaa\x05\x8f\x01\x07\x05V\x02G\xfe\x8f\x02\xad\x01\x9e\xfa\x9b\xfa\xc0\xfa\xcf\xf8\xd9\xfa\xde\xf8I\xf5\x0e\xf8\x05\xf9]\xf7\x13\xfa\xa1\xfa\xaf\xf8e\xf8\x90\xfa\xee\xfd\x1e\xfe\x0f\xff\t\xfe\xa5\xfd\xc9\x00t\x01I\xff&\x00e\x03\xca\xfc\x95\xfa\x13\x02\xd8\x02\xcb\xfb\xa6\xfa\xa9\xfb\x0e\xf9\x0c\xf9\x04\xfat\xf9\xe3\xf4/\xf6\xb0\xf5\xf3\xf5\x80\xf8\xbe\xf5\x14\xf8\xca\xf2\x07\xf3\x0b\xfa\x84\xfc\xb1\xf6V\xf8\xe1\xfc[\xf7\xfb\xfb;\x00\x03\xff\xad\xf9r\xfb)\xffN\xfc\xbb\x01T\x01\xd9\xf9\x9f\xfd\xb9\x00d\xfd\x11\xfd\x0e\x00\xcc\xff\x8a\xf9,\xfc\xb1\x03\x87\x05\n\x02\x9c\xfdx\xffj\x03\x16\x04E\x05v\x05%\x07\x8d\t\'\x0bu\r\xa2\x0e\x90\rB\x0fM\x11=\x13\xcd\x19\xef\x1dH \xa7"k#\xd2"\xcc\x1e$\x1e\xd8\x1f\xef\x1f\xe6\x1b`\x18|\x18>\x17}\x11\xfc\n\x92\x03\'\xfdV\xf7u\xf2\x93\xf1\xbc\xef~\xeb\xb2\xe7\xd2\xe6\xc0\xe5[\xe3E\xe2#\xe0\xca\xde\x93\xdf\x0f\xe3\x14\xe8\xb7\xec\x14\xf0\x1f\xf2\x99\xf3=\xf7\x9c\xfb\xac\xfd\xc5\xfeb\x01\xbb\x02u\x05j\n\xba\x0b\n\x0c\xa5\x0b+\x07\xec\x035\x03=\x02*\xff\xb7\xfa\x08\xf8E\xf7\xb9\xf6\xcf\xf5\x8f\xf5&\xf1\xbc\xec\xa2\xebV\xecq\xee\x9f\xf0\xfd\xf1\xfa\xf2\xdb\xf6\xa1\xfc\x07\x01\x04\x03\x08\x05G\x04q\x06\x05\n\xd5\rJ\x11\x9f\x12R\x12\xc3\x11\xa1\x12\xef\x12\x1c\x10s\x0b\xb0\x07\r\x05u\x03\xb4\x02\x84\x01\x93\xfe\xbf\xfa"\xf7^\xf5\xfc\xf3\x10\xf22\xefG\xed\xa8\xed(\xf0\xd8\xf2\xf4\xf2\x19\xf3\xd4\xf3\xbd\xf3\x10\xf4\xa0\xf5\xca\xf6\xd1\xf7n\xf9\x18\xfbt\xfc\xce\xfdO\xfd\xf3\xfb\xe9\xfa#\xfa\xb0\xfa\x9d\xf9\x9d\xf8\xcc\xf8\xf4\xf9`\xfc\'\xfc\x9a\xfb\xa9\xfb\x87\xfc\x03\xff~\xffs\xfd\xce\x03U\x16O%?*\xc9\'\x0b*\xb52r5\xff2\x872F5T5e2\x9d2\xf14\xc10\x15#\x99\x13\xa4\t\x03\x04\x94\xfco\xf3\xc7\xeb5\xe7\xb2\xe3\xf5\xdf\xfa\xdc\xd7\xd9\xf3\xd5\x89\xd0\x89\xcb#\xcd\x84\xd5\xdf\xdc\x1b\xe1\x94\xe6\x86\xee#\xf6\x80\xfc|\x01D\x06+\x08\xe6\t\xa9\x0e\xb4\x13b\x19\xd5\x1d[\x1cL\x19\xb7\x16\x9f\x13\xe2\x0eR\t)\x03\xb1\xfbl\xf4\xac\xef\x8d\xef\x86\xed^\xe9\x06\xe4;\xde\xc0\xdaU\xd9P\xda\xb9\xdc\x17\xdes\xe1\x1d\xe8\xa2\xee\xa9\xf5\xe0\xfa\xab\xfe\x93\x01y\x05\xfe\t\xbf\x0f\x85\x15\x06\x1a\xc1\x1d\xcb\x1e\xd2\x1d\x16\x1e=\x1d\xfb\x19\xcd\x15v\x11[\x0e\x0c\x0b\xe8\x07\xe4\x04T\x01\xd7\xfc\xa4\xf8?\xf5\xcf\xf2%\xf2~\xf1\x99\xf0\xcc\xef{\xf0\xa0\xf2`\xf4A\xf5m\xf6R\xf7\xf8\xf8H\xfa\xc6\xfa\xe9\xfb4\xfc\xd6\xfb\x12\xfa\xa0\xf8A\xf8\xa8\xf8N\xf6\xc6\xf3\x8b\xf2l\xf1\xd4\xf1q\xf1\xa6\xf0\xd2\xf0\xd3\xf0f\xf2\xae\xf3\xcf\xf3\x9c\xf5\x8c\xf9\x9c\xfcX\x00\n\x05\xcb\x07\x8e\t[\x0b3\x0f\xad\x16\xff n+\xfa2_6\xd86\xc87\xd48\xf96\\2^,\x98(k\'\xa8$\xd1\x1f\xaf\x19\x9d\x11Z\x076\xfc\xa7\xf2\x90\xeb\xc4\xe5\x1d\xdf\xf5\xd8\x97\xd6\x8a\xd7D\xda\x89\xdc\xf8\xdc\x11\xdc\xa2\xdc&\xdf\x86\xe2X\xe7k\xed\xf6\xf3\xe9\xf8N\xfe\x99\x05f\ry\x12T\x14\xcc\x13w\x13\xbb\x13\xf8\x13E\x13\x88\x11\x03\x0f\xd7\x0bg\x08y\x04\x1f\x01\xf0\xfc\xf5\xf6\x9c\xefM\xe9\x85\xe5\xab\xe3\xf8\xe1o\xe0\xd3\xdf6\xe0\x83\xe1\x94\xe3\xb9\xe5~\xe8-\xeb\x9a\xedO\xf18\xf6x\xfcr\x02V\x07"\x0b\xf2\x0e\x18\x12m\x14\xc7\x15\x85\x16\xb2\x16+\x16\xa4\x14\xb0\x13\x12\x13\xcc\x11i\x0f\xaf\x0b\xdd\x07i\x05\xa6\x02T\xff\x05\xfd\xee\xfa\x18\xf9\xd7\xf7\xfd\xf6\x89\xf6\xed\xf6\x86\xf7_\xf7p\xf6\xd7\xf5\xf7\xf6G\xf8\x7f\xf8\xd1\xf8\x0e\xfa\'\xfb\x9a\xfbo\xfc\xa8\xfc\xcb\xfc\xce\xfd\x0b\xfe\x8a\xfdh\xfd:\xfe\xf3\xfe\x88\xffp\x00\xd4\x01\xd0\x02\x8a\x03d\x04\xdc\x04\xde\x05\x8b\x06\xdc\x06t\x07\xe2\x07\x97\x08s\tN\tz\x08z\x083\x08\x9c\x06\xf7\x04\xa1\x044\x05.\x05,\x04\r\x02\x8b\x00\x80\x00\xac\x00\xd5\x00o\x00\xf6\xff\x03\x00D\xff\x99\xfeE\xffr\x00\xc8\x01\xd8\x02\x19\x03.\x031\x04\x8b\x05k\x06i\x07\xf1\x08E\n\xa3\n\x8e\n\xd4\ns\x0b\xb3\x0b\xfa\n\xad\t>\x08\xdf\x06j\x05\xad\x03R\x02(\x01\xa4\xff\xc2\xfd\xb7\xfbT\xfa\x94\xf9g\xf8\xa9\xf6$\xf5c\xf4+\xf4\xda\xf3H\xf3/\xf3@\xf3D\xf3\xbf\xf3\x86\xf4\xbd\xf5\x1f\xf7\x9f\xf7z\xf8\xac\xf9{\xfa\x89\xfb\'\xfc\xbf\xfc\x85\xfd\xd2\xfdR\xfeq\xfey\xfe\xe5\xfe\xdb\xfe\xb3\xfe\x7f\xfe\x1c\xfe\xbf\xfd\xc9\xfd\xd7\xfd\xd7\xfd\x9a\xfd\xc1\xfdp\xfe\x8a\xfe\xf6\xfe\xcf\xff^\x00\x07\x01\x85\x01\xdf\x01\x99\x02i\x03\xf5\x03f\x04\x8a\x04\xa1\x04\xf5\x04]\x05]\x05\x1c\x05\xcb\x04_\x04\xc5\x03\x17\x03\x15\x03\xc9\x02\x1d\x02\xaf\x01\xde\x00j\x006\x00\xc9\xffW\xff;\xff=\xff\xcf\xfe\x95\xfe\xc9\xfe\xd6\xfe\x9f\xfe\xa2\xfe\xf0\xfe\xa3\xfe\xbb\xfe\x08\xff\xbd\xfe\xb4\xfe\xd5\xfe\x97\xfe4\xfe\t\xfe\xcf\xfd:\xfdy\xfc\x0e\xfc\xbf\xfb\x04\xfbl\xfa5\xfa\xca\xf9R\xf92\xf9*\xf9\x19\xf9\\\xf9\x02\xfa\xbc\xfa3\xfb\xfd\xfbA\xfdU\xfee\xff|\x00\xc3\x01\xda\x02?\x04h\x05e\x064\x07h\x08\x01\t\x18\t\x88\t\xb2\t\xc5\t\xb9\t\x9d\t"\t\xc7\x08W\x08\xda\x07\x1c\x07\x8f\x06\x11\x06\x82\x05\xa1\x04\xdc\x03s\x03[\x03\r\x03\x86\x02\x85\x02\x93\x02l\x02:\x02u\x02z\x02w\x02C\x02(\x02\xed\x01\xd7\x01\xac\x01<\x01\xcf\x00N\x00\xd4\xff?\xff\x86\xfe\xac\xfd\x1d\xfd\xb5\xfc\x13\xfc0\xfb3\xfa\xa3\xf9(\xf9v\xf8\xdb\xf7g\xf7\x0c\xf7\x01\xf7B\xf7a\xf7\xa4\xf7\x12\xf8u\xf8\xa6\xf8\x02\xf9\x0b\xfa\xb8\xfa\x0b\xfb\xdb\xfb\xf4\xfc\x01\xfe\xe4\xfe\xdc\xffL\x00\xa5\x00M\x01\xd3\x01\x02\x02v\x02\xb4\x02\xbf\x020\x031\x03\x0c\x035\x03\x1b\x03\xa5\x02\x9c\x02u\x02,\x02\xf8\x01\x00\x02\xdb\x01~\x01~\x01\x98\x01I\x01\x0f\x01/\x01#\x01\xde\x00\xb9\x00\xdf\x00\xe1\x00\xb5\x00\xa3\x00\xad\x00_\x00\xfd\xff\xcf\xffz\xff\x02\xff\xa1\xfe\x0e\xfel\xfd\xe5\xfc\xa0\xfc8\xfc\xbc\xfb,\xfb\xbe\xfa\xb0\xfa\xab\xfa\xa0\xfa\xd5\xfaW\xfb\xbd\xfb)\xfc\xf9\xfc\t\xfe\xa2\xfe0\xff\x04\x00E\x01\x0c\x02\xc1\x02\x8f\x03J\x04\xd5\x04b\x05\xef\x05\x01\x06\xee\x05\xf0\x05\xbd\x05I\x05\x1c\x05\xa8\x04\x15\x04\x97\x03;\x03}\x02\xe9\x01\xb8\x01-\x01\xb0\x00q\x00o\x00;\x00a\x00\x93\x00\x94\x00\xc2\x00_\x01\xc8\x01\xff\x01]\x02\xd4\x02$\x03t\x03\xcb\x03\xe0\x03\x0c\x04\x19\x04\x10\x04\xda\x03\x9e\x03M\x03\x97\x02\xfa\x01[\x01=\x00X\xff\x97\xfe\x88\xfd\x99\xfc\xc2\xfb\xe9\xfa0\xfa\xbf\xf9Y\xf9\xf6\xf8\xae\xf8\xcd\xf8\xbb\xf8\xfe\xf8\x85\xf9\x15\xfa\xf3\xfa\xa9\xfb\xa0\xfc\x99\xfd\x7f\xfe;\xff\x0f\x00\xdb\x00\x82\x01\xf7\x01r\x02\xf8\x02B\x03f\x03\x81\x03f\x03\x14\x03\x1a\x03\xe4\x029\x02\xb2\x01\x8c\x01&\x01\x98\x00\x0f\x00\xf4\xff\xb3\xff\n\xff\xe3\xfe\x01\xff\xef\xfe\xe5\xfe\xf1\xfe\xfa\xfe6\xffv\xff\xa5\xff\xd8\xff\x00\x00=\x00Z\x00\x80\x00\xca\x00\xe8\x00\xed\x00\xb5\x00\xa2\x00\x9e\x00\x7f\x004\x00\xe4\xff\x7f\xff\x1f\xff\xba\xfel\xfe]\xfe\xfc\xfd\xbc\xfd\x87\xfdN\xfd$\xfd2\xfd\'\xfd1\xfdu\xfd\xbc\xfd0\xfe\x90\xfe\xe7\xfeG\xff\xb0\xff/\x00\x80\x00\xe3\x00+\x01&\x01:\x01\x84\x01\xa4\x01w\x01m\x01n\x013\x01\xd9\x00\x97\x00W\x00\x07\x00\xdf\xff\x9c\xff\x8e\xff\x95\xffy\xff`\xff\x98\xff\xb4\xff\xba\xff\xf1\xff9\x00\x96\x00\x0f\x01k\x01\xc4\x015\x02\x96\x02\xc9\x02\t\x03+\x034\x039\x039\x03+\x03\xef\x02\xc5\x02{\x02\x1e\x02\xb5\x014\x01\xb1\x008\x00\xaf\xff\x08\xffX\xfe\xc8\xfd\x9a\xfd\x1d\xfd\x94\xfcB\xfc\x13\xfc\x06\xfc\xfc\xfb\x10\xfc?\xfcb\xfc\xa9\xfc\x06\xfd\\\xfd\xd1\xfdA\xfe\x8f\xfe\xec\xfeU\xff\xa2\xff\xf7\xff2\x00k\x00\xa3\x00\xd4\x00\xd3\x00\xc9\x00\xe1\x00\xdd\x00\xc8\x00\xa8\x00\xab\x00\x9b\x00a\x00S\x00[\x008\x00\x16\x00\x00\x00\xfa\xff\xfa\xff\x1b\x007\x00:\x00K\x00o\x00\x80\x00\xac\x00\xf2\x00\x06\x01\x13\x01?\x01|\x01\xa1\x01\xdb\x01\xd9\x01\xcb\x01\xdd\x01\xbe\x01\xbd\x01\xb8\x01\x95\x01e\x01]\x01"\x01\xd0\x00\xa9\x00\x8e\x00G\x00\xeb\xff\xb1\xfff\xff8\xff)\xff\x04\xff\xd6\xfe\xc8\xfe\xcb\xfe\xbc\xfe\xbc\xfe\xc3\xfe\xdc\xfe\xd1\xfe\xba\xfe\xcb\xfe\xe2\xfe\xf4\xfe\x06\xff\x07\xff\x0b\xff\x1c\xff\x03\xff\xfd\xfe%\xff\x14\xff\x0c\xff\x04\xff\r\xff7\xffa\xffj\xff\x80\xff\x9f\xff\xcf\xff\t\x00<\x00r\x00\xa7\x00\xba\x00\xeb\x00/\x01n\x01\xb0\x01\xde\x01\x11\x02L\x02z\x02\xa4\x02\xe0\x02\xf6\x02\xf8\x02\xea\x02\xd5\x02\xb4\x02\x97\x02\x85\x02;\x02\xeb\x01\x92\x01;\x01\xc7\x00W\x00\xf1\xff\x8d\xff7\xff\xea\xfe\xbb\xfe\xa0\xfe{\xfeZ\xfe8\xfe*\xfe1\xfe0\xfe>\xfeg\xfe\xa1\xfe\xd2\xfe\xf8\xfe \xff=\xffm\xff\x83\xffx\xfff\xffb\xff^\xffZ\xffK\xff1\xff\x0c\xff\xd7\xfe\x9e\xfen\xfe7\xfe\xfc\xfd\xd4\xfd\xbb\xfd\xad\xfd\xae\xfd\xbc\xfd\xe4\xfd\x13\xfeQ\xfe\x9c\xfe\xee\xfeI\xff\xb5\xff\x12\x00f\x00\xbf\x00-\x01\x92\x01\xe0\x01/\x02w\x02\xad\x02\xd9\x02\x04\x03\x18\x03\x19\x03\x18\x03\x07\x03\xed\x02\xca\x02\xa0\x02d\x02\x1b\x02\xcd\x01}\x01+\x01\xde\x00\x85\x001\x00\xe4\xff\x8f\xff>\xff\xf5\xfe\xb0\xfev\xfe;\xfe\x06\xfe\xd7\xfd\xba\xfd\xa7\xfd\x93\xfd\x89\xfd\x80\xfdt\xfdg\xfda\xfdd\xfdm\xfdz\xfd\x97\xfd\xc7\xfd\xe5\xfd\xfb\xfd*\xfeg\xfe\x93\xfe\xc1\xfe\xfc\xfe9\xffo\xff\xb9\xff\x03\x00G\x00\x87\x00\xcf\x00\r\x01E\x01\x8b\x01\xc0\x01\xe2\x01\x05\x02\x18\x02&\x02+\x02%\x02\x1b\x02\xfd\x01\xd9\x01\xbb\x01\xa8\x01\x96\x01u\x01H\x01\x12\x01\xec\x00\xc1\x00\x9c\x00|\x00`\x00C\x007\x006\x00<\x007\x00\x1a\x00\x0b\x00\x03\x00\xf8\xff\xed\xff\xe8\xff\xf3\xff\xef\xff\xed\xff\xe9\xff\xe2\xff\xde\xff\xd3\xff\xb3\xff\x89\xfff\xffD\xff\x1d\xff\xfc\xfe\xd7\xfe\xb2\xfe\x8b\xfeh\xfeP\xfe;\xfe\x1f\xfe\x03\xfe\xf4\xfd\xf2\xfd\xf0\xfd\xfb\xfd\t\xfe\x12\xfe9\xfeo\xfe\x9c\xfe\xd1\xfe\r\xffP\xff\x85\xff\xbc\xff\xfb\xffA\x00\x82\x00\xc0\x00\x08\x01E\x01\x81\x01\xb4\x01\xd7\x01\xf4\x01\x10\x025\x02J\x02S\x02V\x02Y\x02]\x02]\x02W\x02E\x02%\x02\x07\x02\xe5\x01\xc0\x01\x92\x01^\x01 \x01\xde\x00\xa6\x00w\x00>\x00\xfc\xff\xbd\xff\x7f\xffG\xff\x11\xff\xd4\xfe\x96\xfe[\xfe4\xfe\x19\xfe\x05\xfe\xf9\xfd\xf5\xfd\xf4\xfd\xf1\xfd\xfd\xfd\r\xfe\x16\xfe\'\xfe>\xfe^\xfe\x82\xfe\xb5\xfe\xe6\xfe\x14\xffE\xffj\xff\x87\xff\xa9\xff\xcd\xff\xf6\xff\x17\x00:\x00]\x00\x80\x00\xa3\x00\xbf\x00\xdb\x00\xe8\x00\xef\x00\xff\x00\x13\x01 \x01$\x01\'\x01!\x01&\x01\'\x01\x16\x01\x04\x01\xf2\x00\xdb\x00\xd5\x00\xc6\x00\xbd\x00\xad\x00\xa1\x00\x9e\x00\xa2\x00\xaa\x00\xa8\x00\xa5\x00\xa4\x00\xac\x00\xbc\x00\xca\x00\xc8\x00\xc9\x00\xc4\x00\xc3\x00\xc8\x00\xb5\x00\x9c\x00\x81\x00f\x00Z\x00J\x00.\x00\x03\x00\xe4\xff\xd0\xff\xb1\xff\x8d\xffd\xff9\xff\x11\xff\x05\xff\xf6\xfe\xe6\xfe\xc6\xfe\xa4\xfe\x8d\xfe\x91\xfe\x90\xfeh\xfeX\xfeQ\xfeM\xfe^\xfez\xfe\x8d\xfe\x9b\xfe\xbb\xfe\xd7\xfe\xff\xfe\'\xffT\xffy\xff\xa5\xff\xd1\xff\t\x007\x00]\x00\x8c\x00\xa7\x00\xbe\x00\xd5\x00\xeb\x00\xfd\x00\x02\x01\xfe\x00\x00\x01\x05\x01\x04\x01\xfb\x00\xed\x00\xe5\x00\xd4\x00\xbc\x00\xa8\x00\x93\x00~\x00k\x00U\x00?\x00%\x00"\x00\x15\x00\x05\x00\xfc\xff\xf2\xff\xee\xff\xe6\xff\xde\xff\xd8\xff\xcf\xff\xbe\xff\xb8\xff\xad\xff\xab\xff\xac\xff\xa7\xff\x9f\xff\x97\xff\x94\xff\x84\xffu\xffc\xff\\\xffS\xff@\xff2\xff$\xff&\xff#\xff\x1d\xff+\xff3\xff>\xffG\xffT\xffb\xffv\xff\x8c\xff\x9c\xff\xbb\xff\xdc\xff\xf4\xff\x07\x00\x1c\x00,\x000\x00F\x00c\x00a\x00k\x00{\x00\x82\x00\x8b\x00\x90\x00\xa2\x00\x9b\x00\x84\x00\x83\x00}\x00s\x00k\x00j\x00`\x00S\x00M\x00N\x00^\x00U\x00P\x00Q\x00`\x00v\x00z\x00y\x00{\x00\x87\x00\x98\x00\xa3\x00\x9b\x00\x91\x00\x89\x00q\x00_\x00T\x008\x00(\x00\n\x00\xf4\xff\xd8\xff\xc3\xff\xaa\xff\x8d\xff\xa3\xffg\xffj\xff\x7f\xffH\xffe\xffH\xff\n\xffk\xff\x0e\xffU\xff\x1d\xff\x19\xff)\xff\xf5\xfe{\xff\x0b\xff_\xff^\xff,\xffi\xffj\xffp\xff\xad\xff\x94\xff\xcb\xff\xdb\xff\xba\xff\xe1\xff\x1f\x00\x00\x00\xf4\xff\xf6\xff\xfb\xff0\x00)\x00~\x007\x00\xc2\x00l\x00t\x00\xcb\x00\xa3\x00 \x01\xda\x00\xf2\x00\xf7\x00\x12\x01\xe9\x00\x9d\x01\x9c\x00\x1a\x02~\xfe\x1a\xffM\x0e\x90\tk\x02o\xfe\x02\xfd\xc5\xff\xff\xfc\xf2\xfc\x93\xfcQ\xfd\x93\xfc\xda\x00\x82\x03N\x00 \xfd\x00\xfe\x07\x03\xf8\x02x\xfc[\xfc\xae\x01\xc7\n:\x08;\xfe\xa1\xfa\x88\xf8\xfb\xfa\xb6\xff\xef\xfda\xfd#\xfe\x88\x03\x11\nk\xfc\xbc\x02\xb8\x07\xad\x03\n\x06z\x05b\x04[\x03\xb0\x07\xce\xfe\xd9\xf8x\xfc+\x01$\x03C\x03M\xfd\x18\xf5\xb1\xef-\xec\xa1\xeb\x8e\xf2\xc1\xf9\xf0\xfb\n\xff\xf6\xfaH\xf7B\xfb \xff\xc9\x00\xf2\x02\xba\x05\xc2\x07b\x01(\x03\xfc\x04\xd3\x08_\x0e`\x08\xc0\x05\xd5\x00-\x04\x99\x07\x88\x07\xee\x03!\x02\xf0\xffn\xfe\xe2\xfd\xa6\xf9D\xfb|\xfd=\xffX\xfe2\xfc\xa0\xfc\xec\xfbQ\xf8\\\xfba\xfb4\xfc\x14\xff\x90\xfd\x15\x00\x06\x01\x1e\x01\x11\x06\x11\x08!\x08"\x04\xf3\x01}\x02\xc5\xff\xc0\x01\x0c\x02\x10\x05\x1a\x02\xb6\x04\x91\x02\xb3\xffi\x01\x90\xfd \xfb\xbb\xf8\x8e\xfd\xdd\xfcw\xff_\x01\xcc\xfd{\xfa\x07\xf7\x04\xfa]\xfe\x07\x01\xa9\x01\xa5\x02\xd5\x00\x98\x01\xfd\x00\xf7\x00k\x01\xca\x01\x98\x01.\x07\xf0\x12Q\x0f\xcb\x08\xce\xffA\xfep\xfb\xdb\xf6\xa5\xf9\xe1\xf7F\xf8\xec\xfd\x8b\xff\x80\xff\t\x01\xc6\xfe\xee\xff8\xfcu\xfa\x01\xfc5\xfa\xab\xfcE\x003\x02]\x02Q\x02\x90\x03\xba\x03\\\x02\xf5\x02\xab\x02\x08\x01~\xffQ\xfe\x86\xfc{\xfa\x1d\xfe\xc4\xfd\xe8\x00\xe1\x01\xbc\x01`\x011\xfd=\xfa@\xf6D\xf94\xfe\xd1\x00|\x02\xc7\xfe\xd8\xfd\x86\xff\xf5\xff\xc9\x01\xcf\x05Z\x08\xab\x07\xba\x048\x01 \xfdJ\xfb\x03\xfb\xe8\xfa\xe0\x01\xde\x06\x11\x06\xb7\x05t\x02\x14\xfd\xa7\xfd\xf9\xf9Y\xfaW\xff\x7f\xfe\xb6\xff\xd2\x007\x00\x8a\xfeE\x00\xc2\x01p\x04\xe9\x04\xe0\x05\xf1\x05&\x01\x13\xfe)\xfb\xce\xfc@\x03_\x04\xe7\x04C\x043\x00\x97\xfc\x82\xfd\xf6\x00\xec\x01\x90\x00\x01\xff6\xfd\xf6\xfa\x1d\xfa\xf2\xf9#\xfd\xcd\x01\xca\x04\xa6\x06\xf2\x03\x95\xff\x9d\xfd:\xfa\xab\xf8\xe7\xfa\xec\xfd\xb5\x01\xf5\x034\x03L\x01\xac\xfc\xd2\xfc=\x00\x12\x01\x84\x01u\x01"\x03\x0b\x02\xc8\xfeL\xff\xbd\xff\xe8\xff\xf5\x00\xe4\x00\x81\xff\x90\xfco\xfd9\xff\xf7\xfeQ\x04\xb2\x05\x11\x03\xf2\xfe/\xfcA\xffG\xff\xdc\x01\x97\x03\xe8\x00\x9e\x00\x1b\x01\xb5\x00e\x01\xca\xfe\xd8\xfe&\x00\'\x01\xde\x00y\xff\xe6\xfc\xe6\xfc\x8a\x00\n\x01\xcf\x02h\x03\x0c\x04-\x03\xfe\x00<\xff?\xfdb\xfa\n\xfa\xd2\xfc?\x01\xc1\x04\xd9\x04\x9e\x04Y\x02\xd9\xfe\xde\xfb\xe0\xfa\xe7\xfa\xa3\xfe\x08\x03\xf7\x04D\x03#\xfe\xd5\xf8B\xfc\x1b\xff=\xff|\x03\x82\x04\x1d\x01\xd0\xfdN\xfeg\xfdN\xfc\xfc\xfb\xe8\xfcy\x00\xdc\x02V\x00\xa6\xff\xc4\xff\xb8\xfd\xb3\xfcr\xfb;\xfe$\x02\xd3\x05\xaa\x06^\x01\x08\xfe\xe7\xfd\xfe\xfdu\xffb\x00\xbc\x01\xf8\x03\xf6\x04\xed\x03[\x01y\xfeZ\xfe\xe1\xffw\xff>\x03\x03\x04(\x01\x08\xfe\xd5\xfb\x1e\xfd\xce\xff\x9d\x02X\x015\x01\x87\x00\xfc\xfe\xf0\xfd\'\xf8\x9e\xf5\xc8\xf9\xac\xfei\x03%\x05\x05\x03\x8e\x00\\\xfeG\xff[\x00\xec\xfe\xd3\xffI\x03 \x07\xcd\x04\xd9\x01\xae\xff\xec\xfb\xd8\xfb\xad\xfa\xa2\xfd\xdc\x03s\x07\xab\x05L\x00\xa9\xfcy\xf9\x99\xf9\xf9\xfc\x0c\xff\xfd\xff\xe9\x01\x82\x02\xac\x01\x85\xff+\xfc(\xfa<\xf9N\xfau\xfd\xb4\x00\xef\x03Z\x05\x11\x05j\x03\xdd\x00\xd5\xff\xd6\xff\xd5\x00\xc8\x01w\x01\xbf\x00\'\x01\xb8\x01y\x00\x07\xff@\xff\xcb\x01\xb8\x01B\x03n\x04*\x01\x85\xfeK\xfb\x83\xfbN\xfdq\xfd_\xfe\xb1\x00U\x04\x8b\x03\xbf\x01\x18\x00(\xfdH\xfd\x9f\xfdU\xff\xa9\x00&\x00\x97\xff\x9a\xff\x8f\x00\x1e\x00\x81\x00J\x01%\x01\xda\x00+\xff\xb8\xfd=\xfe\x9f\xfe\xdd\xfe\'\xff\xa7\xff-\x00\xf1\xff`\xfe\x80\xfdM\xfe^\xfe\xb0\xff5\x01\xda\x01\x9a\x02\x87\x01\x06\x01\x13\x01\xc7\xff\x83\x01\xfa\x02\x97\x03|\x04\xd6\x02\xd2\x00\xbf\xfe\xc0\xfb\xc6\xfb~\xfeD\xfe\x05\x00\xb1\x02\xfe\x01\xfc\xff\x07\xfe\xfa\xfc\xaa\xfd\xd7\xff\xf0\x01\xda\x02\x8d\x024\x02\xc2\x011\x01\xb1\x00\x19\x00L\xff\xf1\xff\xac\x00|\x00*\xff9\xfe\xa8\xfd\x80\xfc\xf5\xfd\x94\x00\x86\x03^\x047\x043\x03K\x01\xe6\x00\xf0\xff5\xff;\xff4\xfd\xdd\xfc\xb1\xfex\xfe\xd8\xfe\n\xfe\xdb\xfcg\xfd\x8f\xfdC\xfds\xfd\xc2\xfeL\x00\x9d\x01\xc4\x02p\x04\n\x03}\x01\x8d\x00\x01\xff\xc7\xfe!\xff\x97\xff\x1f\xfe(\xff\xe4\xff\x9a\xfd\xc6\xfc\x8f\xfc\xd9\xfd\xe4\xff\x82\x01w\x03+\x06\xd8\x07\x10\x06T\x03\xbb\x00[\xff(\xfe\x1e\xfd\xfd\xfc\x88\xfd%\x00\xba\x01\xe9\x01\x13\x01\xa8\xffj\xff"\xfe\x93\xfd\xc1\xfd;\xfe\x98\xffr\x00\xed\xff\\\xff\x86\xfe\x81\xfeF\xffq\xff\x17\x00\xfb\xffI\xff\xda\x00\xf1\x02\xb0\x02{\x02\xc3\x02\xaa\x02\x1c\x03U\x03\x10\x02k\x01k\xffh\x00\xab\x00\xcd\xff\xf9\xff:\xfe\xaa\xfe\x10\xffE\xffB\xff\x1e\xffu\xff>\x00\xe3\x00\xb5\x00\xd9\xff\xba\xff\xc3\xff\xb5\x00`\x02\xe6\x02 \x04\x8d\x04\xbd\x04>\x04\x99\x03\xc9\x02\x1b\x01\x96\xff]\xff\xfa\xff\xd3\x00\x1c\x01K\x00\x13\xff\x0e\xfeq\xfd\xed\xfcl\xfc\xa3\xfb\x07\xfcb\xfdN\xfd\x05\xfd\xd3\xfc\x88\xfcr\xfc|\xfc\x89\xfc\x8a\xfc\t\xfdy\xfdH\xfe\xfd\xfeh\xfe\x0c\xfe\x8a\xfdM\xfd\xd8\xfc\\\xfb\x85\xfa\x0b\xfa \xfa\x00\xfa\xc1\xf9\xff\xf9B\xf9\xde\xf8\xe7\xf8\xc8\xf8\xa7\xf8\r\xf8\xa9\xf8-\xfak\xfbw\xfc\x85\xfd\xbc\xfe\xd7\xfe\xd7\xfe\xbb\xfe\n\xfeo\xff0\x00G\x01\xc9\x02U\x02\xed\x02#\x03]\x03%\x03d\x01f\x01+\x01>\x02\x9c\x03\n\x04\xb1\x05 \x05r\x05\xf9\x04\xdb\x039\x03>\x02\x85\x02t\x04U\x07\x9c\tW\x0b\xc4\x0c\xeb\r\xd7\x10\x99\x13t\x15t\x18\xda\x1b/ h#\xc6#\x16"Q\x1d\xe4\x17\xe4\x11<\x0b\xd6\x04\xc6\xffZ\xfc\x16\xfa\xcb\xf8\xc0\xf6\xc5\xf3\xcd\xf0\x1d\xee2\xecb\xeb;\xeb\x04\xec\xcd\xed\x01\xf0\xe6\xf1\xf7\xf2n\xf3\x92\xf3\xcc\xf4\x8e\xf6\xf6\xf8\x06\xfc\n\xff\xe6\x01\xf4\x03f\x04c\x03=\x01\x86\xfe\x04\xfc\x00\xfaG\xf8B\xf6c\xf4A\xf2\xd9\xefv\xed\xd4\xea7\xe9\xd0\xe8\x86\xe9\x8d\xeb=\xee\xf6\xf05\xf3F\xf4\xab\xf4\x1d\xf5\x83\xf5H\xf6M\xf8\xfd\xfa\xcd\xfd\xe4\x00\x14\x03\xe9\x03\x84\x03\xf6\x01\xb4\x00\x92\xff\x00\xff\xf0\xfe\xca\xfe\xda\xfe2\xfe\x98\xfdi\xfc\x95\xfa\xf8\xf9\x03\xfa\xb2\xfb\x88\xfeY\x01\x81\x03\x1f\x05\xe3\x05\xd7\x06\xed\x07C\x08\xbc\t\xd9\nF\x0c\xc8\x0eD\x0f\xca\x0eY\x0c\x17\x08\t\x06\xf8\x06L\x0c1\x15%\x1f<)\xe21K7\x837\xf03\xdd-}(\x86%u#G!\x15\x1de\x16\x04\r\xce\x01\x08\xf6\xbf\xebv\xe6\xed\xe55\xe8\xd8\xeb\xe5\xed\xb1\xed\xec\xeb\xaa\xe8\x00\xe6\xc8\xe4O\xe5\xc2\xe8:\xeeW\xf4"\xf9d\xfb\xa8\xfb\xcb\xfb\x8f\xfd\xd1\x00\xab\x05s\ni\x0e\xc0\x10\xdc\x0f\x86\x0bU\x04P\xfcx\xf5\xb0\xf0m\xee:\xed\xbe\xec\xfc\xeb;\xea"\xe8\xed\xe4F\xe2\xe9\xe1\xc0\xe3\xdd\xe7K\xecg\xf0\x8f\xf3\xf8\xf4\x81\xf5\xe5\xf5\xab\xf6\x9b\xf8\x8f\xfb\xf9\xfe\xbc\x01;\x03D\x03z\x01:\xff\x8c\xfd\xcb\xfc7\xfd\xb2\xfdG\xfe\xaf\xfdw\xfcx\xfak\xf7\xbe\xf5j\xf4%\xf5\xc8\xf7\x8f\xf9\x8b\xfc\xe4\xfdD\xfe|\xff\xde\xfe\xc9\xff\xc4\x004\x02\x03\x06\x81\t\xda\r\xd3\x10\x8d\x12>\x12<\x0f\x01\x0b\xe4\x04Y\x00\xf6\xfe\xb5\x01a\n\xa6\x16\xf5$\x822\x11<\x86@\x02A\xe7>\x02\xfa+\xf8\x04\xf6p\xf5\x8a\xf4\xcd\xf3v\xf3S\xf2\x00\xf2\xea\xf1w\xf2\xca\xf4\xbc\xf7\xbd\xfb0\xff\x01\x01\xee\x02\x15\x03\xb5\x03\x11\x05<\x06\xb4\x08\xca\t3\n`\n`\x08\x98\x06V\x03\xca\x00\xa7\x00,\x02z\x07z\x0e\xdf\x17r#\xa9.\xdf9\xdaB!IXL\x08KEF\x91<\xfc/\xb3!\x8e\x11,\x03F\xf5\xe2\xe8\x15\xdf\x83\xd7%\xd4Z\xd4\'\xd8\xc6\xde*\xe6\xf8\xec\x99\xf1\xa3\xf4_\xf6\xce\xf7\x17\xfa\xa8\xfdy\x02\xad\x07\x05\x0c\x19\x0f\xe6\x10\x98\x11\xb0\x11\xc1\x10\xa2\x0e\xd1\n\xce\x04K\xfd\xbe\xf4M\xec|\xe55\xe0\xb3\xdd\x98\xdd\xef\xde\xc9\xe1\xf1\xe4\x05\xe96\xeeA\xf3\xc0\xf7I\xfb-\xfd\x97\xfe\x8f\xff\xf2\xffZ\x00-\x00\xd5\xffv\xffp\xfe\xce\xfc\x10\xfb\x0b\xf9\x07\xf8\xd8\xf7\xbc\xf7{\xf7\xa8\xf6s\xf5\xb8\xf4\xd2\xf4\xaa\xf5\xe8\xf6*\xf8\xaa\xf8\xee\xf7K\xf6k\xf4\x16\xf3\x11\xf3\xda\xf3\xcc\xf5\xbf\xf89\xfbK\xfd\xbd\xfe\xaf\x00\xa6\x03\xd3\x06b\x08|\x08\x80\x06\xd0\x03\xc3\x01.\xff`\xfd\xee\xfbH\xfb\x00\xfb\xe2\xf8,\xf5*\xf2\xb6\xf5\xac\x02a\x17\x0e/\xc5CnS\x8b]\xdcbzc\xe0^8V\xe6J3;\x83&\xad\x0c1\xf1 \xda\xb7\xca\xe3\xc3\x1d\xc4\xb6\xc7{\xcc\x9e\xd2\x80\xd9v\xe2\x01\xec^\xf5R\xfd\x01\x03\xdd\x06\xa3\x07\x8e\x06\xed\x04\xd9\x04\x8b\x08\xf2\x0e\xc8\x151\x1a\x99\x18\xc7\x12\xfa\t\x9b\x01F\xfa\xd4\xf2\x86\xebv\xe2\xbc\xd9\xc3\xd2\x13\xcf?\xd1\xdf\xd6:\xdf\xb0\xe8\xe2\xf0\xba\xf8\xbf\xfe\x80\x03j\x07\xd7\t\xd3\n\xc4\t\xa7\x06\xd0\x02^\xff\xd7\xfc\xc6\xfb\x0f\xfb\xe3\xf9\xf7\xf7\xac\xf5\x8a\xf4}\xf4\xd0\xf5\x90\xf8\xb9\xfa"\xfcn\xfb\xbc\xf8\x17\xf6\x19\xf6\xe5\xf8q\xfc\xfe\xfd \xfbL\xf6%\xf2\xb2\xef"\xf0\xd2\xf1\x9e\xf4\x04\xf8\xa8\xfa\x85\xfdC\x00\x1c\x03\xdb\x06\xba\n\x07\rH\x0c\xc7\x08S\x04\xa1\xff\xde\xfb4\xf8\xfb\xf3c\xf0\x1b\xec\xc4\xe7\xaf\xe4\xe1\xe6\xe0\xf4G\x0e\x06.=IkY\xc9b\'hwm2p\xfdh\xdfX\x11B\xaf&6\t\xdd\xe9\xb6\xce\xc7\xbc\xff\xb5\x08\xb7[\xb8\x83\xbam\xc1\xf4\xd0\xd8\xe7\xd8\xfd;\x0c\x7f\x12\xf0\x14\xc2\x160\x18\xbe\x183\x18u\x17\x03\x176\x16\xa6\x13\xe6\x0e\x8e\n\n\x08L\x06~\x002\xf5\xb2\xe6F\xd9.\xd0O\xcb\x96\xc8I\xc7.\xc8\xb5\xcd#\xd8\xbc\xe6\x04\xf7p\x06N\x13\x1b\x1c?!\xe2!2\x1f\x05\x1b\xa9\x14\xf6\x0b\xa0\x00\x07\xf5\xfa\xeb\x96\xe6t\xe4}\xe4D\xe5y\xe6\xf0\xe8f\xed.\xf3\xe5\xf9o\x009\x05\xfe\x07A\tp\tS\tn\x08k\x06\xde\x02\xe0\xfdl\xf8\xf3\xf2 \xef\xc2\xed\x0b\xee\x9f\xef\x99\xf12\xf4\xdb\xf7\xe0\xfb\xd4\x00\xca\x04\xaf\x06-\x07x\x05\xe7\x01\xaf\xfd\x12\xfa\xdf\xf6\xe2\xf1|\xe9\x08\xe0\xd7\xda\x8a\xde3\xebR\xfbi\x0b\xea\x1e&6?P"h\x90v\xa4zwwJo\x02bAO\xe46\xb0\x17b\xf7\xe1\xdb\x04\xc6j\xb7\xad\xafD\xaeb\xb3\x18\xbeQ\xcdh\xdf\xb0\xf2\xa1\x03~\x0f\xf5\x15\xff\x17,\x18\xe6\x187\x19w\x17\xaa\x13\xbf\x0e|\n\x0e\x08%\x07\x0c\x06\xaf\x01\xbb\xfa\x85\xf2V\xea\x08\xe4\xc6\xde_\xd9\xf8\xd40\xd2\x1a\xd4:\xda)\xe3C\xee\xce\xf9\xcb\x04[\x0fM\x17\xd3\x1b\xb1\x1cd\x19\x1e\x13\xae\n\x80\x01\x94\xf7\xab\xecp\xe3\xa9\xdd\xa7\xdbL\xdeQ\xe3\x1f\xe8\'\xeek\xf5\x7f\xfdz\x05V\x0bY\x0e\xac\x0f\xe7\x0ed\x0cs\x08\x16\x04G\x00v\xfcA\xf7V\xf1\x0b\xeci\xe9\xeb\xe9\x87\xeba\xed\x9f\xef\xf2\xf2\xbe\xf7\xbf\xfc\x90\x01(\x06\x19\n\xbc\x0c\xf0\x0c\xa2\n%\x06w\x01\xc7\xfc\xdf\xf5\xbd\xef\xf5\xe9`\xe4\xdb\xdf\x7f\xda\x9b\xd9\xc3\xe3s\xfar\x18\x9d1fC\xb1P9_%r\xac\x7f\xff\x7f#rSYR?\x06(x\x0e\xfc\xf1"\xd5R\xbc\xc8\xac5\xa8D\xab\x8b\xb3\xf4\xc0\xe1\xcf\'\xe0W\xf2.\x03B\x12Z\x1fb%&$) p\x1c\xd1\x196\x18J\x14/\x0c\x9d\x02[\xfb\x9b\xf7\x99\xf6\x05\xf5\x16\xf0\x1a\xe7\xbc\xde1\xdb\x96\xdb9\xdd\x82\xde^\xdeb\xe0t\xe7&\xf2=\xfdS\x06w\r\xa9\x11\x9b\x11"\x11 \x11\xc1\r\xd8\x07\x91\xff\xed\xf4\xfb\xec\xd9\xe9p\xe8{\xe6\xdc\xe5\xea\xe7\xa7\xec\xbe\xf3\xdb\xfb\xca\x012\x06W\n\x0f\r\xc6\r\xb3\r\xa9\x0b\x81\x06\'\x009\xfa\xde\xf4c\xf0\x17\xedq\xeaV\xe9S\xeb\x8a\xefe\xf4)\xf9\x06\xfe\x98\x02q\x06\x9c\t\xb9\x0bS\x0cR\x0b:\x08}\x03\xef\xfdR\xf8p\xf3\xf0\xee\xcf\xeb=\xeaG\xe7\x94\xe2\xa0\xe0\xe3\xe5\xc9\xf4\xfe\n\xef \xe50o>\xd5P\x18e-t\x0fygr&d\x1fT\x10B\xb8)\xec\x0c\xa7\xf0\x87\xd5+\xc0i\xb3\xaf\xad!\xaf\x9c\xb5s\xbe\xf5\xc9\x81\xdat\xef(\x03\xcd\x10a\x17\xa1\x19\x05\x1c>\x1f\x12 &\x1c\x18\x14\xce\x0bi\x06c\x04\x83\x02\xdc\xfe\xeb\xf9[\xf5\xd1\xf1\x9e\xefS\xed\x1a\xea\xf8\xe5\xfb\xe1\x14\xdf\xb1\xde\\\xe1\x83\xe66\xec\x9f\xf0\xf6\xf4\xaf\xfb\xc5\x04+\r,\x11\xa9\x0f\xfc\x0b\x87\t\xe8\x07L\x04\xda\xfd\xdb\xf5\xe4\xee\xa8\xebm\xeb\x9d\xec\x9e\xee\xa2\xf0\x1e\xf3\xd5\xf7\xc9\xfd\xcc\x03\x0e\x08\xf4\x08T\x07\x84\x05\x82\x03N\x00\xa2\xfb\'\xf6\x08\xf1@\xedj\xeb\x83\xebQ\xed\xb7\xf0\x8e\xf4\x10\xf8?\xfc\xba\x01}\x07\x92\x0b\x8a\x0c#\x0b\xe4\x08\xbb\x06\x95\x04Q\x01V\xfc\xa2\xf55\xf0\xce\xec\xb3\xea\x8b\xe9\xa5\xe5\r\xe0A\xe1\x94\xedP\x02\xd9\x17\xab&\x981N@LVMl\xdbw1v#l``\xf4S\xc9A\xac(\x9d\x0b\x94\xef\x15\xd8`\xc4\x15\xb6\xc4\xae\x05\xae\xa8\xb1\x14\xb8\x8a\xc2\xa2\xd1\'\xe5^\xf8\x06\x04\xd4\tB\x0fL\x15\xf5\x1bm\x1fA\x1c[\x15\xaf\x10\xdf\x0ec\r\x04\x0b~\x07\xc5\x02\xd0\xfe[\xfb\xd4\xf6O\xf1\xb8\xec0\xe8\x93\xe2\x05\xdd\xbc\xd9\xec\xd9\xea\xdd\xb6\xe3\x92\xe7\x86\xea\x81\xf0&\xfa\x98\x03(\t5\nz\t\xa5\t9\x0b\xd0\n+\x06\x19\x00\x1c\xfb\xbf\xf7\xef\xf5\xb7\xf5\xae\xf5\xdc\xf5\xb4\xf6m\xf8\xd1\xfa\xc4\xfd\xd4\x00J\x02*\x01w\xff\x8e\xfd\n\xfb\x1e\xf9d\xf6\x98\xf2\xc1\xef\xb0\xee\x98\xef\x17\xf2C\xf5\x81\xf8\xf7\xfb\xb5\xffR\x04\xb6\x08\xb0\x0b-\r\xdc\x0ck\x0b\x03\t\xaa\x05\x98\x01\xc7\xfc\xaf\xf7\xc4\xf2\x0f\xee\\\xeb\xb1\xe9\x02\xe7R\xe3\xdc\xe0\xaf\xe4I\xf1g\x03\xd5\x14\x1e"\x1b.\x00?\xeaS\xe1e2n\xfck\xacd\x8a][T\x96D/.\xf7\x14g\xfd\xd5\xe9\xeb\xd9\x9e\xccX\xc2\xf9\xbb~\xb9X\xbb6\xc2\x96\xcc\x9e\xd7\xdd\xe0@\xe8\x90\xefA\xf8p\x01f\x08\xa3\x0b-\x0c\x07\r\x14\x10\xbb\x14\x0b\x18\xaf\x18\xa5\x17\x9e\x16\xe7\x15\x99\x14>\x11\x85\x0b\x00\x04\x1d\xfc\x1f\xf4\x96\xec\xcc\xe5\xf6\xdf"\xdb>\xd7o\xd5\xd5\xd6\xd7\xda\xc9\xdf\x1d\xe4\xc9\xe7\x94\xec\x1e\xf3$\xfaO\xff\xe7\x01\x1b\x03\x80\x04\xb8\x06\x9a\x08p\t\xf3\x08\xc2\x07\x85\x06\xf9\x05\x05\x06\xf5\x05\x05\x050\x03\xd9\x00\xe7\xfeg\xfd\xc1\xfb.\xf9\x17\xf6G\xf3/\xf1[\xf0\x94\xf0h\xf1\xa1\xf2I\xf4\xd5\xf6\x91\xfa^\xffg\x04\xca\x08\x00\x0c[\x0e\n\x10%\x11v\x11e\x10\xf0\r\xa6\n<\x07\xdb\x03~\x00,\xfd\x05\xfa \xf7[\xf4J\xf1|\xee\xac\xed;\xf0\xfa\xf5\xd5\xfcO\x03\x9b\t_\x113\x1bq%l-I2\xb14\xf15\x056\xd83\xb5.F\',\x1f%\x17\x19\x0f\xd8\x06\xaf\xfe\xc4\xf7m\xf2<\xee\xef\xeai\xe8\x07\xe7\xda\xe6\x10\xe7\n\xe7\xf0\xe69\xe7+\xe8\x7f\xe9\xa2\xeat\xeb^\xec\xef\xedA\xf0%\xf3\xdf\xf5|\xf8?\xfb<\xfec\x01)\x04W\x06-\x08\xae\t\x9c\n\xa8\n\xc4\tU\x08\xcb\x06\xed\x04\xa7\x02\xdd\xff\x06\xfd\xb6\xfa\xd9\xf8J\xf7\xba\xf5N\xf4b\xf3\xe7\xf2\xea\xf20\xf3\x8d\xf3=\xf4.\xf5.\xf6=\xf7@\xf8z\xf9\xc0\xfa\x0f\xfc7\xfd;\xfe<\xffB\x00R\x01L\x02\xf8\x02@\x03y\x03\xcf\x03^\x04\xf6\x04K\x05t\x05y\x05r\x05T\x05\x13\x05\xcb\x04{\x04\x11\x04\xa5\x03\x0b\x03|\x02\xbe\x01\xea\x008\x00\x81\xff\xb0\xfe\xc8\xfd\xfa\xfc\x80\xfc:\xfc\xf8\xfb\xa9\xfb2\xfb\xea\xfa\xcb\xfa\xc7\xfa\xee\xfa\x1d\xfbh\xfb\xb5\xfb\n\xfc[\xfc\xa9\xfc\x02\xfdH\xfd\x8c\xfd\xb3\xfd\xb5\xfd\xe2\xfd]\xfe\x1f\xff1\x00\x91\x01A\x03#\x05\xf7\x06\xa9\x08I\n\xf5\x0bu\r\xca\x0e\xd4\x0fx\x10\xf4\x10l\x11\xf5\x11a\x12\x90\x12{\x12\x08\x12f\x11\x99\x10\x80\x0f \x0et\x0c\x95\n\x9d\x08s\x06\x05\x04f\x01\xe4\xfe\xae\xfc\xe4\xfa]\xf9\x0c\xf8\x1f\xf7\xcb\xf6\xbc\xf6\xf9\xf6e\xf7\xf5\xf7\xb8\xf8O\xf9\x8e\xf9U\xf9\xc4\xf8\x16\xf8U\xf7\xa1\xf6\xec\xf5u\xf5B\xf5k\xf5\xdc\xf5\xa2\xf6\x96\xf7\xb6\xf8\xbd\xf9\x87\xfa\xfa\xfa\x1e\xfb\x12\xfb\xc1\xfa.\xfak\xf9\x97\xf8\x02\xf8\xd5\xf7\x19\xf8\xc1\xf8\x9d\xf9\xd2\xfa<\xfc\xba\xfd:\xff\x95\x00\xd6\x01\xdd\x02\xaa\x03H\x04\xa0\x04\xaf\x04\xa8\x04k\x04\x0b\x04\x99\x03\x1d\x03m\x02\xa4\x01\xc8\x00\xf3\xff\x1a\xff]\xfe\x9f\xfd\xde\xfc=\xfc\xdd\xfb\xc9\xfb\x1e\xfc\xc0\xfc\x9f\xfd\x94\xfe\x86\xff\x87\x00\x85\x01\x84\x02\x80\x035\x04\xa4\x04\xe3\x04\xf4\x04\xdf\x04\xa0\x04\x17\x04l\x03\xa4\x02\xc9\x01\xec\x00\xe2\xff\xdf\xfe\xf1\xfd\x06\xfd8\xfc|\xfb\xe6\xfa\x87\xfaQ\xfam\xfa\xc6\xfa\x83\xfb\x8d\xfc\xc9\xfda\xff\r\x01\xdf\x02\xc3\x04\xb8\x06\xb5\x08\x92\n8\x0c\x9b\r\xb1\x0ev\x0f\xef\x0f\x1e\x10\xcf\x0f\x0f\x0f\xf7\r\xc7\x0c\x9a\x0be\n\x07\t\x9e\x07q\x06\x99\x05\xe8\x04B\x04\x95\x03\x05\x03\x97\x02+\x02\xab\x01\x08\x01V\x00\x9a\xff\xb9\xfe\xa8\xfd\x84\xfcw\xfb\x86\xfa\xaf\xf9\xbc\xf8\xbf\xf7\xff\xf6\x95\xf6s\xf6]\xf6B\xf6C\xf6y\xf6\xe4\xf6T\xf7\xb8\xf7\xf0\xf7:\xf8\x98\xf8\xf7\xf84\xf94\xf9\x14\xf9\xfc\xf8\xea\xf8\xf8\xf8\x00\xf9!\xf9s\xf9\xfc\xf9\xb3\xfav\xfb^\xfcY\xfdC\xfe\x11\xff\xb0\xffA\x00\xa7\x00\xd5\x00\xbc\x00k\x00\xf8\xff\x8e\xff\x10\xff\x8c\xfe\x1f\xfe\xe0\xfd\xb7\xfd\xbe\xfd\xf4\xfda\xfe\x08\xff\xc8\xfft\x00\x06\x01\x8e\x012\x02\xe4\x02\x98\x03\x15\x04K\x04]\x04z\x04\x9a\x04\xa8\x04\x88\x04-\x04\xb8\x03"\x03\xa3\x02\x0c\x02s\x01\xd5\x00\x16\x00V\xff\xae\xfe^\xfeg\xfe\x92\xfe\xc0\xfe\xe4\xfe\x1c\xff\x8c\xff\xf6\xffE\x00J\x00\x08\x00\x92\xff\xf5\xfeT\xfe\xa5\xfd\xd7\xfc\x0c\xfcg\xfb\x19\xfb.\xfb\xa2\xfbg\xfcG\xfde\xfe\xbe\xff9\x01\xc8\x02)\x04P\x05K\x06)\x07\xf2\x07\x9d\x08\x05\t%\t5\tM\tm\tp\t7\t\xd9\x08\x80\x08!\x08\xcd\x07o\x07\xde\x06>\x06\x87\x05\xce\x04$\x04\x8b\x03\xf8\x02S\x02\xaf\x01"\x01\xa4\x002\x00\xcd\xffR\xff\xc9\xfeI\xfe\xd5\xfdv\xfd\x1a\xfd\xbf\xfcV\xfc\x16\xfc\xd4\xfb\xa4\xfb\xa9\xfb\xb6\xfb\xce\xfb\xde\xfb\xc4\xfb\x92\xfbW\xfb\x0b\xfb\xa6\xfa"\xfa\x97\xf9\n\xf9\x85\xf8\x0f\xf8\xc7\xf7\xba\xf7\xd3\xf7\x08\xf8@\xf8\xbf\xf8w\xf9n\xfam\xfb4\xfc\xe0\xfc\xa8\xfdy\xfe"\xff\x98\xff\xbd\xff\xc8\xff\xe9\xff\x10\x00\x1e\x00\x05\x00\xcb\xff\x9f\xff\x83\xffm\xffd\xff^\xffy\xff\xa9\xff\xf8\xffV\x00\xe2\x00\x8a\x01M\x02&\x03\xec\x03\x9c\x04\x1e\x05o\x05\x8f\x05o\x05 \x05\x95\x04\xde\x03\'\x03i\x02\xad\x01\xfa\x00\\\x00\xf6\xff\xac\xff\x98\xff\x80\xffq\xffe\xff`\xffa\xffc\xffC\xff\x0e\xff\xd4\xfe\xb2\xfe\xbc\xfe\xdb\xfe\x12\xffB\xff\x8b\xff\xdf\xff5\x00t\x00\x8d\x00|\x00G\x00\xf1\xff\x81\xff\x02\xff\x83\xfe"\xfe\xd1\xfd\xa1\xfd\x88\xfd\xa0\xfd\xef\xfd`\xfe\xcf\xfe7\xff\xae\xff.\x00\xbc\x00O\x01\xe2\x01|\x02*\x03\xfd\x03\xf0\x04\xf7\x05\x01\x07\r\x08\x12\t\x0c\n\xde\ni\x0b\xb4\x0b\xaa\x0b\\\x0b\xc8\n\xe0\t\xc7\x08\x8f\x079\x06\xd2\x04q\x03\x1d\x02\xe9\x00\xd8\xff\xef\xfe*\xfe\x90\xfd#\xfd\xe2\xfc\xbf\xfc\xa7\xfc\x91\xfc\x9f\xfc\xac\xfc\xaf\xfc\x8d\xfcE\xfc\xf3\xfb\x89\xfb\x02\xfb_\xfa\x9c\xf9\xdc\xf8+\xf8\x87\xf7\xf4\xf6|\xf6A\xf60\xf6E\xf6t\xf6\xbd\xf6*\xf7\xad\xf7K\xf8\xf2\xf8\x8f\xf9E\xfa\x02\xfb\xbf\xfbt\xfc\x14\xfd\xbc\xfdR\xfe\xd9\xfeG\xff\xa1\xff\xe2\xff\x1d\x00I\x00Y\x00c\x00`\x00`\x00p\x00\x83\x00\xa6\x00\xd1\x00\x0f\x01f\x01\xda\x01[\x02\xde\x02X\x03\xc8\x036\x04\x96\x04\xd5\x04\xfa\x04\xf3\x04\xc5\x04y\x04\t\x04\x8f\x03\r\x03\x86\x02\x06\x02\xa1\x01U\x01,\x01\x1e\x011\x01P\x01w\x01\x9d\x01\xb2\x01\xa0\x01c\x01\xfc\x00v\x00\xc5\xff\xfa\xfe-\xfeh\xfd\xc4\xfc9\xfc\xe3\xfb\xb3\xfb\xa8\xfb\xca\xfb\r\xfc_\xfc\xaa\xfc\xe5\xfc\x07\xfd\x1b\xfd%\xfd\x14\xfd\x01\xfd\xe9\xfc\xd5\xfc\xde\xfc\xf6\xfc-\xfd\x85\xfd\xee\xfde\xfe\xe2\xfek\xff\xf9\xff\x96\x003\x01\xd3\x01~\x028\x03\x08\x04\xf1\x04\xed\x05\xf3\x06\xf6\x07\xee\x08\xe2\t\xbf\nt\x0b\xf6\x0bA\x0cX\x0c@\x0c\xf4\x0by\x0b\xd5\n\t\n\x1e\t6\x08K\x07P\x06k\x05\x8b\x04\xb4\x03\xf1\x022\x02\x85\x01\xde\x002\x00\x88\xff\xdb\xfe5\xfe\x97\xfd\xf3\xfcW\xfc\xd8\xfbl\xfb\x13\xfb\xd2\xfa\x94\xfa\\\xfa7\xfa&\xfa\x15\xfa\xf3\xf9\xc4\xf9\x97\xf9k\xf9.\xf9\xe8\xf8\xa0\xf8j\xf8=\xf8\x17\xf8\xff\xf7\xf9\xf7\t\xf8)\xf8S\xf8\x9a\xf8\xea\xf8M\xf9\xc6\xf99\xfa\xb4\xfa/\xfb\xa7\xfb#\xfc\x9c\xfc\x1b\xfd\xa8\xfd;\xfe\xd3\xfek\xff\x11\x00\xc0\x00u\x010\x02\xde\x02s\x03\xed\x03\\\x04\xa8\x04\xd7\x04\xe8\x04\xc3\x04\x9c\x04\\\x04\x10\x04\xb9\x03Y\x03\xfd\x02\xa3\x02^\x02\x1c\x02\xef\x01\xd0\x01\xa9\x01\x9b\x01\xa6\x01\xb8\x01\xc5\x01\xcf\x01\xd1\x01\xc0\x01\xaf\x01\x8a\x01;\x01\xe7\x00\x8a\x00\x1e\x00\xb0\xff@\xff\xcf\xfeg\xfe\x05\xfe\xb4\xfds\xfdE\xfd\x1e\xfd\x0b\xfd\xfb\xfc\xfa\xfc\x0c\xfd\x1c\xfd1\xfdE\xfdu\xfd\xa4\xfd\xbe\xfd\xe7\xfd\x1f\xfeX\xfe\x93\xfe\xcd\xfe\x03\xff=\xff}\xff\xbf\xff\xfb\xff+\x00P\x00\x84\x00\xb7\x00\xdd\x00\xf8\x00!\x01K\x01\x7f\x01\xc3\x01\xf5\x01\'\x02P\x02\x81\x02\xb5\x02\xcc\x02\xf3\x02\x01\x03\n\x03,\x03\x18\x031\x037\x03=\x03f\x03\x80\x03\xc6\x03\xef\x03(\x04_\x04\x7f\x04\x9f\x04\xad\x04\xa4\x04\x94\x04q\x04?\x04\x02\x04\xaa\x03a\x03\x10\x03\xb0\x02l\x02\x18\x02\xd4\x01\x8b\x019\x01\xfb\x00\xa7\x00W\x00\x03\x00\xbb\xff[\xff\xf2\xfe\x86\xfe\x1b\xfe\xb4\xfd9\xfd\xc1\xfcB\xfc\xd1\xfbT\xfb\xfb\xfa\x9a\xfaR\xfa\x10\xfa\xd9\xf9\xc2\xf9\xaa\xf9\xa2\xf9\xbc\xf9\xd6\xf9\x00\xfa,\xfak\xfa\xca\xfa\x1e\xfbs\xfb\xd5\xfb?\xfc\xa9\xfc\r\xfd~\xfd\xe5\xfd4\xfe~\xfe\xcf\xfe,\xff\x80\xff\xc1\xff\x17\x00g\x00\x9d\x00\xaf\x00\xdb\x00\x0b\x01\x15\x01\x1b\x01(\x01L\x01P\x01.\x01$\x01\xfb\x00\n\x01\x13\x01!\x01_\x01_\x01]\x01c\x01\r\x01\xf3\x00\xcf\x00\x94\x00\x10\x01\x1f\x01\x06\x01\x1b\x01\xcc\x00\xa1\x00\x91\x00u\x00\x85\x00\xe1\x00\xe1\x00\xa4\x00f\x00J\x00\x16\x00\x14\x00\x98\x00\xdf\x00\xe6\x00D\x01P\x01P\x01%\x01%\x01\xcf\x00\xd5\x00T\x01\xb1\x01^\x01\xe9\x00\x01\x01\x10\x01;\x00\xf3\xfe\xe0\x00\xc4\x06\xc3\t\x91\x08j\x01\x94\xf8&\xf58\xf6\xb7\xf7\x8e\xfb\xb8\x00*\x04\x0b\x05\xbf\xfb\xcd\xf9A\xf9\xda\xf7\xb8\xfc\xb4\x01#\x04\xd7\x02e\x02\x7f\x00\xdd\xfe\xce\xff\xd9\x02\xfb\x04\x7f\x06\xab\x07\x14\x08\xe8\x05r\x02\x9e\xff#\xff{\x00:\x02&\x03\xc6\x01\xee\xff\x10\xfe\xc8\xfc\x9f\xfc\xe9\xfc;\xfd\x8a\xfd\x89\xfd\xe2\xfde\xfc\xf1\xfc\xd8\xfc\xed\xfa\xb1\xfbn\xfe\xdc\xffM\x00\xed\x00\x8e\x00\xfe\xff\x05\xff\xf4\x00\x11\x03\xe7\x03\xe0\x03\xa8\x037\x03\x92\x01b\x00\xb1\x01\x8f\x03\xcc\x03\xa7\x04^\x044\x03\xc0\x01i\x00\xd3\xff\x92\x02\x94\x03E\x06\x1c\x06|\x03P\x03L\x00m\x00\x1c\x03i\x06\x04\x06s\x05W\x02l\x00\xce\x01\xdd\x02H\x03\x95\x01\x1a\xffA\xfd\xdd\xfe\xe4\x00\xf9\x00\xa0\xff\xce\xfc\xd1\xfb\xba\xfb+\xfb\x9c\xfbe\xfc\xe5\xfc\xf5\xfc\\\xfc\x04\xfa\xa8\xf7\x1c\xf7\xe3\xf8\xf9\xfb\xdf\xfd\xe1\xfd\xa2\xfb\xa4\xf7\x05\xf7\xe1\xf8\xc7\xfaL\xfd\x92\xfez\xfe\xb8\xfch\xfa\xc7\xf9\xa5\xf92\xfb\xf7\xfdh\x00V\x01\xae\xfe\xd6\xfb\xc0\xfa4\xfc:\x00\x95\x02^\x03G\x02D\xff\xef\xfd_\xfd\xaa\xff\xd2\x01I\x02\x8b\x04\x9d\x03o\x00\x0b\xfe4\xfd\x10\xfe1\x01\x90\x04;\x06\xfb\x03\xe5\xfe\xba\xfb8\xfbi\xfe\x0f\x04\xfd\x06\xc6\x04\xcb\x00N\xfe\xae\xfd\xa4\xff\x02\x01\xbc\x01O\x02\xb7\x01\n\x01+\xffL\xfes\x00\xc8\x02\x81\x00\x19\xfe\xc6\xfd_\xfe\x92\x01\x8e\x02Z\x00\x9b\xfdB\xfb\xa5\xfb\xa3\xfd\xd2\xfe\xf0\xfe~\xfd\xfd\xfa\xb7\xf9\x80\xf9\xcd\xf9\x8a\xfa\xa9\xfbq\xfb\xac\xfaZ\xfa\t\xfbO\xfcP\xfdP\xfe\x91\x01e\x06\xb0\n\x16\x0e\xdd\x10R\x12\xa3\x13g\x15 \x17\xdf\x18&\x1as\x1a<\x19R\x17\x8c\x15q\x14\xf1\x13\x92\x13\xd0\x11\xa7\x0e@\x0b\xee\x07\xf2\x04\x06\x02\x16\xff\xa7\xfb\xbe\xf8A\xf6\x90\xf3\x10\xf1\x12\xef\xe2\xed_\xedm\xed\xa6\xed\x99\xed*\xed\xfa\xec)\xed\xd1\xed\x04\xef\x11\xf0\xf7\xf0\xc8\xf1\xbd\xf2!\xf4\xe6\xf5\xd6\xf7\xe0\xf9{\xfb\x81\xfc4\xfd\xc8\xfdB\xfe\xaa\xfe\xd2\xfe\x95\xfey\xfeb\xfes\xfe\xcf\xfe\xe6\xfe\x18\xffx\xff\xc6\xff/\x00\x81\x00.\x00\xe4\xff\xa2\xff|\xff\xec\xff\xdd\xff\x89\xff\xa6\xff\x17\x00 \x00\xd0\x00{\x01[\x01\x83\x01\xc6\x01\x14\x02\x97\x02"\x03J\x03\x9a\x03\x13\x04q\x04%\x05F\x06\x10\x07\xd6\x07\xbd\x08\x1a\t\xe3\t5\n\x9a\n\xfa\n\xbe\n\xa9\n\x87\n\x1e\nt\t\xb8\x08\xba\x07P\x06\xa3\x05\xd0\x04\xa5\x03V\x02h\x00I\xfe\xce\xfc\xcb\xfb<\xfa\x03\xf9v\xf7\x1a\xf6\xfb\xf4\xfc\xf46\xf5\x14\xf5\\\xf5\xbe\xf5\x07\xf6z\xf6\x1a\xf7\x19\xf7\xb1\xf7\x9f\xf8q\xf9S\xfa\x98\xfaa\xfa\xd4\xfaz\xfc\xb1\xfd\xac\xfe\xae\xfeH\xfe\x84\xfe\x91\xff\xee\x00/\x01(\x00\r\xff\x8b\xfe\x0f\xff\xd4\x00)\x01\x99\xff_\xfeD\xffI\x00\xc9\x00U\x00\xca\xfe[\xfe\xa7\x00\xc9\x02\x89\x03]\x02*\x00K\x01\xc8\x02\xc6\x02\xe7\x01`\x01\x85\x01\x10\x02\xf6\x01y\x000\xff\xee\xfe\xe8\xffo\x00P\x00\xde\xff\x04\xff\xe8\xfe\xa0\xff\x86\x00R\x02\x1a\x05\xbf\x07\x19\n\xcc\x0bs\r\xe6\x0fl\x12%\x14\x8e\x14\x15\x14\n\x13v\x12\xcd\x12\x14\x13\xd3\x126\x11\x01\x0f\x18\x0e\xef\r\x18\r\x15\x0bC\x08O\x059\x02\r\xff\xcf\xfb\xac\xf8r\xf5=\xf2\xe0\xef\x06\xee&\xec1\xea\xfc\xe8s\xe9\x01\xeb\xbc\xebQ\xeb"\xebB\xec\xb0\xeeT\xf1\'\xf3)\xf4\x06\xf5S\xf6\xbf\xf8\xed\xfbw\xfe\xd1\xff\xa1\x00\xff\x01\xce\x03\x07\x05\x1e\x05\xde\x04"\x05=\x05\x89\x04$\x03\xa3\x01\x9b\x00\xfa\xfft\xff\xb5\xfe\xbd\xfd\x99\xfc\xd1\xfb\xab\xfb\xbd\xfb\x8d\xfb\xec\xfa\\\xfa\\\xfa\x81\xfa^\xfam\xfa\xab\xfa\x05\xfb\xfb\xfbm\xfc\xae\xfc\xaf\xfd\xc0\xfe\xc5\xff\'\x01\xdd\x01\xca\x01\x82\x02m\x03\xc2\x04\xb9\x053\x06\xb9\x06\xae\x07\xc0\x08\x9b\tE\n.\n\xb3\n`\x0b2\x0cO\x0c\xaa\x0b\xfa\tG\t6\t\xb4\x08\xb9\x07^\x05\xab\x03\x86\x03_\x03\x01\x02\x1f\xff|\xfco\xfb\x86\xfb\x80\xfbU\xf9\xa3\xf6\xb7\xf4\xfe\xf4\x9c\xf5\xe6\xf44\xf4\x82\xf4\x17\xf5\x81\xf5\x1d\xf6V\xf6m\xf7*\xf9e\xfa\x06\xfb\xc2\xfb\xc3\xfb\xbc\xfc\xcb\xfeB\x01\\\x02\xcd\x00n\xffj\x00,\x04\xfb\x05\x9b\x04\xe9\x01%\x00\x8f\x00\xfc\x01U\x03\xb3\x02\xcf\x00?\xfe+\xfc(\xfc\xf7\xfd\xf5\xfe\xf3\xfd\xd5\xfb`\xfa\xfb\xf9\x96\xfa^\xfbs\xfbN\xfbJ\xfbD\xfb\xb3\xfb;\xfc\x0c\xfd\x80\xfeH\xff\x16\xffz\xfeG\xfeh\xff\x82\x01\xeb\x02\xa7\x02b\x02U\x04\x9d\t\xc8\x0f\x1e\x13\x17\x136\x12[\x14V\x19\xc3\x1c\xef\x1b\xb7\x18\xbf\x16\x9c\x17T\x19G\x19y\x17\x0c\x15@\x13[\x13_\x143\x13\x19\x0f\x16\n\xeb\x06\xca\x057\x03\xf2\xfd\xb5\xf88\xf5\xdd\xf2\xfb\xef\x81\xec\xf1\xe9\xfc\xe8\xd2\xe8\xa0\xe8\xa7\xe8\xd3\xe8\xd9\xe8>\xe9k\xea\xfb\xeb\x03\xed%\xed\x11\xee\xc3\xf0N\xf3T\xf4\xb2\xf4F\xf6\x80\xf9K\xfc$\xfdT\xfd\x15\xfe(\xff\xf8\xff\x99\x00\xe1\x00m\x00<\xff\xa8\xfe%\xff\xb0\xffR\xffh\xfeu\xfe5\xff|\xff\xf3\xfex\xfe\xb0\xfe\x05\xff\xe7\xfek\xfeG\xfeU\xfeL\xfe\x87\xfe\n\xff\xc5\xff-\x00?\x00\x02\x01\xe1\x012\x02\x80\x02{\x02\xe6\x02\xeb\x03"\x04\x19\x04\x92\x04h\x05\x16\x06\x9e\x06\xb7\x06\xdf\x06\xce\x077\x08E\x08\x17\x08r\x07\xa8\x06\x94\x06%\x07~\x07!\x07\xa3\x05\xf2\x04\x80\x053\x06\x0c\x06b\x05!\x04\xf6\x02\x91\x01C\x00\xc3\xff\xa1\xff;\xff\x1e\xfe%\xfc\xdc\xf9\x18\xf8\x8f\xf7\xe9\xf7O\xf85\xf8\x89\xf7\x06\xf7e\xf6\xf7\xf5\x04\xf5Y\xf4\x0b\xf6\x1c\xf9\xee\xfa*\xfa\xe0\xf7\xba\xf6;\xf9\x14\xfdg\xff;\xffI\xfe\x06\xfe\x99\xffy\x01\xa1\x02\x8b\x02\xc5\x00N\x00F\x01/\x03v\x03\xc4\x01\xc3\xffW\xffB\x00\x90\x00+\x00\x7f\xff,\xff\x14\xff\x91\xfe\xc6\xfcR\xfcK\xfdb\xfem\xffS\xff\xb9\xfd\xf4\xfc\x9c\xfd\xee\xfe\x15\x00\xee\xff\xe0\xfe\xff\xfe\xfc\xff\xa1\x01\x1c\x02\x8b\x00\x8e\xff5\x00\x05\x02\xc6\x02\x8e\x01\x1f\x00w\x00\xb4\x02J\x05\xdc\x07\x19\nw\x0b\xe4\x0bh\x0c\x1c\x0e\xb9\x10\xc8\x11\x8c\x10\xa5\x0f\x18\x10W\x11\xb6\x11\x0e\x11%\x11\x06\x12\xcd\x11\xaa\x11\xb0\x11f\x10\x9d\r\xc1\t\x9a\x07"\x07\xb1\x04e\xff&\xfb\x11\xfa0\xfa\xfd\xf7\xcb\xf3\x9f\xf0\xdc\xef*\xf0\x08\xf0\xc7\xefv\xef,\xee(\xedS\xee\x86\xf0R\xf1\x1c\xf0\xf2\xef\xdb\xf27\xf5h\xf4\xa5\xf2\xe2\xf3X\xf8\xa8\xfb(\xfb\x91\xf9\x96\xf9\x92\xfa\x86\xfb\xb9\xfcv\xfe*\xff\x82\xfd.\xfc\n\xfd\xcb\xfe\xfd\xfe\x96\xfd\x81\xfd\xe1\xfet\xff\x8a\xfe!\xfeP\xff\x95\x00D\x00!\x00\x8f\x00\xb9\x00\x98\x00\x00\x00\xd3\x00\xe1\x01\xf1\x00\xb7\xff)\x01\xa3\x02)\x03\x87\x02\xe0\x00\xa1\x01\xcc\x041\x05\x87\x04\xad\x04\xfc\x04O\x05\x1b\x06M\x06\x9c\x06c\x07\x1b\x07\xfc\x06\xcd\x079\x07d\x06\x12\x07s\x08\x10\n\xba\t\xd9\x06_\x05\xb6\x05\xaa\x05\xbb\x05\x05\x05\xa5\x03Y\x03\xb1\x01@\x01\xa0\x00\xd8\xfe\x11\xfe\x93\xfe\xc6\xff\x17\x00\xae\xfd\xa1\xfa\xa9\xf9^\xfb\x17\xfe]\xfe\x95\xfcc\xfa\xf3\xfay\xfc:\xfd\xcf\xfb\xb6\xf9\x8f\xf91\xfb\xbd\xfc\x86\xfc\xa2\xfa}\xf9\xfa\xf9\xc1\xfb\xd6\xfc\x9c\xfbS\xfa\xab\xf9W\xfap\xfb"\xfbp\xfa\xdd\xf9&\xfa\xfa\xfa\xfe\xfa\xa1\xfa\xda\xfa\xe3\xfa-\xfb\x91\xfbJ\xfb\x00\xfbl\xfa\xee\xf96\xfa\xd1\xfa\xf4\xfa\x1c\xfb\xd7\xfaM\xfb\xe3\xfb#\xfc\xe4\xfc\xd8\xfc\x9b\xfc:\xfd\xbb\xfd0\xfeK\xfe\x08\xfew\xff\xdc\x00\xb0\x00\xf8\xff,\x00Y\x01-\x03\xc2\x030\x03\xda\x02+\x02Q\x032\x07\x85\n\x95\x0b\xd8\x0fL\x1d\x0e/X7;2\\-\x9b4\xc5?\xe4>\xc4/\xaa \x05\x1c\x0c\x1b\n\x14\x00\x07w\xfa\xbe\xf2\x12\xefk\xed6\xebT\xe4\x18\xdc\xb8\xd8\x00\xdb\xe4\xdc\xef\xd8S\xd4_\xd6\x83\xdc\xc5\xe0\x97\xe3\xb4\xea\xcc\xf5`\xfe<\x02^\x07\x9b\x10A\x18\xac\x18\x96\x14\xb8\x12\xa7\x13l\x13\x10\x0f\x1f\x08\xc2\x01\xac\xfdT\xfb\xe4\xf8\x99\xf4z\xee~\xe8\n\xe4\xca\xe1I\xe0r\xdd\xc4\xd9?\xd8P\xda\xe9\xde(\xe4\x89\xe9X\xef\x14\xf6\xee\xfd\xfc\x06\x83\x0f\xf9\x14i\x17t\x19\xa9\x1c3\x1fU\x1e\xf7\x19Y\x15\xbd\x12;\x11G\x0eK\ts\x04\x00\x01"\xfe\x91\xfa\xa2\xf6\xc1\xf3\x9e\xf1\x14\xef\xce\xec\xb8\xecF\xefS\xf2o\xf4\x19\xf7\x17\xfc\x9b\x02\x95\x08\xf6\x0c\x07\x10\x8e\x12\x7f\x15x\x18\xfd\x19\x9b\x18]\x15\xde\x13\x82\x13\xf9\x11W\rV\x07\xe1\x03\xe9\x00\x8d\xfcN\xf7]\xf2\x7f\xef\n\xecq\xe8\x0f\xe7T\xe7B\xe9\xf3\xe9\xf3\xea\xf5\xef\xb8\xf7{\x00T\x06T\t\xc3\r\x05\x14\xfe\x1a|\x1d\xdb\x1a\xe3\x17\x18\x18\xcd\x19\x7f\x17h\x10\xb5\tu\x06\xde\x04\x0e\x012\xfa~\xf4\x12\xf1m\xee\xff\xeaC\xe7\xad\xe5|\xe5\n\xe5\xe1\xe45\xe6\xf6\xe9\xbb\xee\x01\xf2{\xf4\xa2\xf7\xfd\xfb\x94\x00g\x03\xb5\x03\xd9\x032\x05\x89\x07P\t\xd2\x08\xd3\x07\xf9\x07\xa3\x08\x93\x08\x11\x07\xa3\x04=\x02\xbf\xff\xfd\xfc^\xfa\xd4\xf7\xd8\xf5\xb3\xf3+\xf2\x1f\xf2U\xf3\xc4\xf4s\xf5]\xf5\x0e\xf6\xad\xf8|\xfb4\xfdz\xfd\xbb\xfe\xbc\x01\t\x05T\x077\t\x83\x0b\x17\r\xae\r\xa3\x0e7\x103\x0e\x0e\nO\x0bS\x17\xc1&\x10,\x8d\'\xba&o1\xbf<\xbe:\xd7,\x91\x1f\x1b\x1b\xe7\x17\x87\r/\xff\x91\xf46\xf0\xc8\xeb\xc8\xe3\x90\xde\xfd\xe0E\xe5\x92\xe2\xb1\xdb9\xdbP\xe4\xf5\xeb\xe1\xe9\'\xe4g\xe6\xeb\xf0Q\xfa>\xfdG\xfe\xc5\x02\xf2\t\xf1\x0f\xd7\x133\x16\x84\x15U\x11\x07\x0c\x7f\x08/\x05}\xfe\xa5\xf4\xdc\xeb\xb8\xe6\xa8\xe4\x04\xe3\xa2\xe0[\xde\xfe\xdd_\xe0\xde\xe3\t\xe7W\xe9\x8d\xebm\xee\x81\xf2\xef\xf6\x14\xfbM\xff \x04\x84\x08.\x0c\xc7\x10\x9c\x16\x96\x1a\xa4\x1a\xf9\x18\xf4\x18\n\x1a"\x18\xeb\x11^\n\xe2\x04V\x01A\xfd\xa7\xf7X\xf2T\xef\xa7\xeeP\xef8\xf09\xf1\xbc\xf2\xfd\xf4\x96\xf7E\xfa\xff\xfc\xb2\xff\xda\x01\xa5\x030\x06\xb8\t0\r\xd4\x0f\xe3\x11\t\x14=\x16\xb9\x17\x18\x18\x06\x17F\x14\xab\x10\xfc\x0c\x17\t<\x04\\\xfe\x87\xf8\xea\xf3j\xf0c\xed\x00\xeb^\xe9\xfc\xe8\xf5\xe9\xe8\xebZ\xee\xd2\xf0\xf8\xf3\xd0\xf7\x1b\xfc\x98\xff\xa1\x02\xd5\x07x\x0e\xa3\x13X\x15\xc3\x15\x88\x18\x19\x1b\x07\x1a\xd3\x15i\x11p\x0e{\n\xb2\x04x\xfe\xae\xf8*\xf4\x8e\xf0\xa0\xedc\xeb\x08\xea\xe2\xe9\xad\xea\x15\xec\xdb\xed\t\xf0\xe4\xf2u\xf5\xef\xf7\xa1\xfa\xc4\xfd`\x01\x1f\x04\xda\x05n\x07@\t\x1c\x0b\xc2\x0b\xc0\ni\ty\x08V\x07n\x05\x85\x02\xdc\xff\x14\xfe)\xfcW\xfa\xba\xf8R\xf7\xc5\xf6 \xf6\x94\xf5\xe4\xf53\xf6\xee\xf6\xa4\xf7!\xf8t\xf9\xe1\xfa\x8b\xfc6\xfe/\xff\xbd\x00\x86\x027\x04\xa8\x05\x86\x06"\x07\xa0\x07\xb7\x07~\x07\x07\x07\xc2\x05;\x04p\x02\xe5\x00\x87\xff\xaa\xfd-\xfb\xbb\xf9\xc1\xf9\xbd\xf9\xd6\xf7\xf5\xf4s\xf5\xb1\xfb\xc2\x02b\x05\xa0\x07\x04\x11a!\xef,\xef,\xa7)G.S8h:\xa2/\xc7!"\x1bB\x19\xb5\x12\x1c\x05E\xf7\x0c\xef\xe2\xeb_\xe9\xce\xe4P\xdf\x8e\xdc\xd2\xddw\xe0f\xe17\xe1\xbe\xe2\x80\xe6d\xe9\x06\xeb\x7f\xee\x98\xf5_\xfc\x0c\xff6\x00\x87\x05\x95\x0eA\x14\xbb\x12\x14\x0fZ\x0f\xad\x11\n\x10|\x087\x00N\xfb9\xf8\x03\xf4&\xee\x03\xe9\xb4\xe6A\xe6\xc9\xe5\x82\xe5\x9c\xe60\xe9\x8d\xebX\xed\x95\xef\xf3\xf2\x1b\xf7\xf2\xfah\xfdj\xff\xf5\x02f\x08\x03\r\xc2\x0e\x9d\x0f\xff\x11+\x15]\x16{\x14D\x11\x9c\x0eN\x0c\xcd\x08\xb6\x03\x87\xfe\xa6\xfa\xe2\xf7\x8b\xf5\xaf\xf3\xee\xf2\xa2\xf3O\xf5;\xf7v\xf9p\xfc\xdb\xff\xe7\x02\x07\x05\xd4\x06\x19\t[\x0b\xdc\x0c~\r\xd3\r\x84\x0eo\x0f\xf8\x0f\x96\x0fM\x0e\xd4\x0cy\x0b\xb3\t\xb9\x06\xb0\x02\x95\xfe$\xfb\xdf\xf7L\xf4\xe4\xf0E\xee\xd8\xece\xec\xa9\xec\xbd\xed\xb4\xef\x83\xf2y\xf5\x7f\xf8\xd8\xfb<\xff<\x02(\x04\x9e\x05\x15\x07c\x08L\t5\t\r\tT\t\xb8\t\xb8\t\xf1\x08\xa8\x08/\t5\t\x1b\x08\x13\x06\x18\x04\xb1\x02\xbf\x00\xd8\xfd\xad\xfa\xf4\xf7e\xf6.\xf5\xda\xf3\xfb\xf2\xec\xf2 \xf4\x0e\xf6\xc5\xf7\x86\xf9\x81\xfb\xcd\xfd<\x00\x07\x02:\x03r\x04z\x05*\x067\x06\xa3\x05,\x05\x9f\x04\xd4\x03\xd2\x02\xb9\x01\xdc\x00\xe4\xff\xe3\xfe\xc8\xfd\x03\xfd}\xfc\x03\xfc~\xfb\xea\xfa\xcd\xfa\x1d\xfb\x81\xfb\xa5\xfb\xb4\xfb\xef\xfbh\xfc\xdb\xfc\t\xfd\xe4\xfc\xf4\xfc\x17\xfd8\xfdI\xfd\'\xfd8\xfdr\xfd\x9a\xfd\x0c\xfef\xfe\xa7\xfe\xfc\xfe\x1c\xff|\xff\xbe\xff\x83\xffN\xffC\xffD\xffU\xff+\xff^\xff)\x00\xdb\x00\xec\x00/\x01B\x02\xb1\x03\xf1\x03\xe9\x02I\x04\xdf\tb\x10s\x13\x18\x14\x8c\x17X\x1fv%\x82%\x8d"m!\x07"\xc1\x1f\x0e\x19q\x11/\x0bx\x05R\xff\xdd\xf8\x88\xf3\t\xef\\\xeb=\xe9z\xe8\x00\xe8l\xe7\x00\xe8\xa1\xe9\xcc\xea5\xebA\xec\xcb\xee\x15\xf1\x16\xf2X\xf3\xae\xf6\xe9\xfa\xdc\xfd\x9b\xff\x82\x02\xfc\x06\x83\nm\x0b%\x0b\xab\x0b(\x0c\x97\n\xec\x067\x03%\x00\xdd\xfc\xcd\xf8\x93\xf4r\xf1\x87\xefI\xeeN\xed\n\xed\xd9\xedv\xef:\xf1\x0e\xf3\x00\xf5%\xf7\x99\xf95\xfci\xfe\xfb\xff\xb6\x01M\x04\xfb\x06\xb2\x08\x07\n\xdb\x0b\x1e\x0e\xc6\x0f;\x10\xdf\x0fn\x0f\xc5\x0e=\r\x82\n\x1c\x07\xe2\x03\x13\x01G\xfee\xfb\xf3\xf8\xb1\xf7\x95\xf7\xe1\xf7G\xf8N\xf9L\xfb\x9e\xfdo\xff\xab\x00\xf8\x01b\x03_\x04\xad\x04\xa9\x04\xab\x04\xe7\x04J\x05u\x05b\x05;\x05h\x05\xcb\x05\xb6\x05\xef\x04\xbf\x03\xa0\x02J\x01e\xff\x13\xfd\xbf\xfa\xc6\xf8+\xf7\xd8\xf5\xd8\xf4q\xf4\xea\xf4\xe0\xf5\x0f\xf7\xa7\xf8\xb1\xfa\x04\xfd\xf9\xfe\x9a\x00,\x02\xba\x03\x0e\x05\xbd\x05\'\x06\x9d\x06)\x07{\x07\x82\x07\x06\x08\x0c\t\xe1\t\xdf\t6\t\x8e\x08\xf7\x07\xd1\x06\xb1\x04\xeb\x01\x0b\xffd\xfc\xd3\xf9O\xf7\xf7\xf46\xf3Z\xf2`\xf2\xed\xf2\xcf\xf3 \xf5\t\xf7b\xf9\xb6\xfb\xd2\xfd\xcf\xff\xb6\x01k\x03\xb4\x04\x89\x05\x18\x06q\x06\x83\x06T\x06\xd7\x053\x05r\x04d\x03/\x02\xfa\x00\xa4\xffJ\xfe\xb0\xfc\xe2\xfaV\xf9\xee\xf7\xc3\xf6\xc0\xf5\xc0\xf4\x1e\xf4\xe5\xf3\xf3\xf3]\xf4\x00\xf5\xd6\xf5\x00\xf7F\xf8\xc3\xf9k\xfb\r\xfd\xb6\xfeP\x00\'\x02\xe5\x03Z\x05j\x06\x9b\x07\xc0\x08\x82\t\x9e\tQ\tn\t.\tN\x08\xb9\x06G\x05\x98\x04p\x03j\x01\xaf\xff\x04\xff\xe0\xfet\xfdC\xfb\x9d\xfb\x8e\xff\x07\x04\xa5\x05>\x06?\n\xac\x11q\x17\xe2\x18\xff\x18\x8b\x1b\x05\x1f\x91\x1f\x8b\x1c\xc5\x18\xff\x15\xa9\x12\xc4\r\x08\x08\xd2\x02A\xfe\xe5\xf9\xd7\xf5Q\xf2>\xef\xeb\xec\xca\xeb\xf9\xea\x97\xe9=\xe8^\xe8\xc4\xe9k\xea\xfc\xe9X\xea\xe9\xec\x10\xf0\xd5\xf1\xec\xf2\xb2\xf5C\xfaO\xfe\x9f\x00\x85\x02\x81\x05\xa3\x08%\n\xf5\tx\t3\t2\x08\xf5\x05\x15\x03\\\x00\x05\xfe\xab\xfb\x19\xf9\xcf\xf6A\xf5|\xf4\xfc\xf3\xae\xf3\xb1\xf3L\xf4\x99\xf5.\xf7\x83\xf8\x96\xf9 \xfb\x99\xfd\xf2\xffV\x01\x84\x02p\x04\xf0\x06\xe3\x08\xf9\t\xc6\n\xea\x0b\n\ru\r\xec\x0c\xfe\x0b#\x0b:\n\xc3\x08\xa2\x06`\x04\xba\x02z\x01\xd0\xff\xc1\xfdh\xfcV\xfc\x84\xfc\xf6\xfbI\xfb\xc6\xfb\x10\xfd\xec\xfd\xeb\xfd\xfb\xfd\xbc\xfe\xce\xffp\x00l\x00R\x00\x95\x00@\x01\xb0\x01u\x01)\x01d\x01\xf4\x01\xed\x01 \x01\x8c\x00\x8b\x00_\x00\x80\xffT\xfe\xa7\xfd\x84\xfd@\xfd\x93\xfc\xfc\xfb\x1b\xfc\xd4\xfca\xfd\x97\xfd\x06\xfe\x06\xff-\x00\xf7\x00z\x01\x18\x02\xe5\x02\x8c\x03\xde\x03\xe7\x03\x10\x04H\x04[\x04A\x04\x0b\x04\xea\x03\xb4\x03g\x03\r\x03\xa7\x020\x02\xac\x01\x11\x01\x8b\x00.\x00\xd6\xff^\xff\xe6\xfe\xe3\xfe\x1e\xffB\xff\x10\xff\xf7\xfeO\xff\xa0\xff\x9a\xffA\xff\x02\xff\xf7\xfe\xe8\xfe\x93\xfe\x0f\xfe\xc1\xfd\xb0\xfd\x9a\xfdM\xfd\x08\xfd\x12\xfd6\xfdD\xfd"\xfd\x0f\xfd7\xfdX\xfd!\xfd\xb7\xfc\xac\xfc\xe6\xfc\xf0\xfc\xba\xfc\x95\xfc\xc5\xfc&\xfdi\xfd\x9f\xfd\x10\xfe\xb4\xfeG\xff\xa3\xff\x0e\x00\xc7\x00\x84\x01\xdd\x01\xf0\x018\x02\x99\x02\xba\x02}\x024\x02)\x029\x02\x08\x02\x9c\x01Y\x012\x01\x06\x01\xc7\x00\x8e\x00e\x007\x00\x00\x00\xc7\xff\x9c\xffw\xffa\xffW\xff@\xff0\xffC\xffw\xff\x8b\xffv\xffw\xff\x9d\xff\xcd\xff\xb9\xff\x8a\xffu\xff\x9c\xff\xab\xff{\xff4\xff\x0b\xffB\xff`\xffj\xffb\xffb\xff\x98\xff\x96\xffx\xff\x7f\xff\x81\xffd\xff\x02\xffh\xfe\x1e\xfe+\xfe\x08\xfeU\xfd\xab\xfc&\xfd\xde\xfe\x0c\x01\xd6\x02<\x04F\x06^\t\xc2\x0c\xb5\x0e\xd2\x0e\xf1\x0eI\x10\x8b\x11w\x10S\r\xf2\nx\n\x9a\to\x06\x96\x02\xee\x00\xf4\x00\xc4\xff\x11\xfdI\xfb\x97\xfb\x07\xfc\xbd\xfa\xc1\xf8/\xf8\xcd\xf8\xb3\xf8R\xf7\xe9\xf5\xde\xf5\xc5\xf6\x1c\xf7\x87\xf6Z\xf6\x8b\xf7x\xf9\x98\xfa\xc3\xfa\x8f\xfbf\xfd\x0c\xfft\xff/\xff|\xff9\x00C\x00N\xffI\xfe\xfb\xfd\xe1\xfdM\xfdf\xfc\xda\xfb\xef\xfb0\xfc\'\xfc\xfc\xfb.\xfc\xd3\xfco\xfd\x8f\xfd\x8b\xfd\xf2\xfd\x9d\xfe\xec\xfe\xc8\xfe\xc5\xfe?\xff\xc9\xff\x0b\x00*\x00u\x00\x1e\x01\xcd\x01S\x02\xb3\x02\x17\x03\xa7\x030\x04Y\x04;\x04\x1c\x04@\x04Q\x04\xe9\x036\x03\xc9\x02\xd3\x02\xcb\x02W\x02\xd8\x01\xda\x01!\x02:\x02\x11\x02\x04\x023\x02X\x02P\x02$\x02\xf9\x01\xec\x01\xee\x01\xca\x01\\\x01\xe3\x00\xc8\x00\xe3\x00\x9e\x00\xf6\xffo\xff_\xfff\xff\xf5\xfeI\xfe\xee\xfd\xf7\xfd\xeb\xfdx\xfd\x08\xfd\x02\xfd1\xfd\x1e\xfd\xd1\xfc\xc4\xfc$\xfd\x8a\xfd\xa6\xfd\xc1\xfd$\xfe\xba\xfe,\xff\x83\xff\xe0\xffF\x00\x96\x00\xd2\x00\x0e\x015\x018\x01,\x012\x01A\x01B\x016\x010\x01H\x01l\x01\x89\x01\x8e\x01}\x01w\x01\x85\x01x\x01\'\x01\xad\x00n\x00C\x00\xea\xffo\xff!\xff*\xffM\xffG\xffY\xff\xac\xff+\x00\xa7\x00\xfc\x00Z\x01\xdd\x01b\x02\xd0\x02\xf4\x02\xcf\x02\xbc\x02\xdb\x02\xc0\x02,\x02w\x01\x18\x01\xf8\x00|\x00\x80\xff\xcd\xfe\x96\xfeO\xfe\x87\xfd\xa5\xfcN\xfcB\xfc\xfc\xfb\x88\xfbQ\xfb\xa6\xfb\x0e\xfc\x1f\xfc*\xfc\x9e\xfcl\xfd\x05\xfej\xfe\x13\xffB\x00\x85\x01a\x02\xfd\x02\xc6\x03\xd3\x04d\x05Y\x05\x13\x05\xf5\x04\xd8\x049\x04=\x03T\x02\xbd\x01\x1a\x019\x00Z\xff\xc4\xfe\x8e\xfeN\xfe\xd8\xfdo\xfdo\xfd\xac\xfd\xb5\xfd}\xfdj\xfd\xa7\xfd\xe9\xfd\xe5\xfd\xb8\xfd\xca\xfd\x15\xfe@\xfe1\xfe%\xfeF\xfes\xfek\xfeB\xfe4\xfe1\xfe$\xfe\xec\xfd\xae\xfd\x98\xfd\x80\xfdt\xfdO\xfd*\xfdO\xfdV\xfdp\xfdc\xfdf\xfd\xa9\xfd\xb6\xfd\xc8\xfd\xd0\xfd\xea\xfd:\xfe^\xfem\xfe\x91\xfe\xba\xfe\xec\xfe\xfd\xfe\xf9\xfe\n\xff)\xffH\xffT\xff[\xffV\xffm\xff\x91\xffz\xffg\xff\xa9\xff(\x00\xaf\x00\xf9\x00\xbb\x01Q\x03\t\x05e\x06\x8f\x07\'\t\x1e\x0bf\x0c\xfe\x0c\xa8\r\x83\x0e\xe8\x0e^\x0e\xeb\r\t\x0e\xd7\r\xdc\x0c\xc7\x0b\x9f\x0b\x96\x0b}\n\xd5\x08\xfa\x07\xa0\x07R\x06\xc1\x03\x7f\x01b\x00D\xff\x00\xfdR\xfa\xdc\xf8\x88\xf8\xd4\xf7I\xf6,\xf5{\xf5Z\xf6k\xf6\xd5\xf5\xc5\xf5\x9d\xf6U\xf7\x1f\xf7\x90\xf6\x98\xf6\t\xf76\xf7\xe1\xf6\x8e\xf6\xc6\xf6O\xf7\xc4\xf7\xf1\xf7/\xf8\xd5\xf8\xc2\xf9\x86\xfa\xfe\xfak\xfb+\xfc\x14\xfd\xa9\xfd\xea\xfdE\xfe\xe3\xfek\xff\x9f\xff\xc0\xff%\x00\x9d\x00\xd7\x00\xe6\x00\x1d\x01\x8b\x01\xe6\x013\x02{\x02\xdc\x02P\x03\xba\x03\x00\x043\x04j\x04\xa0\x04\xb6\x04\xac\x04\x95\x04\x82\x04\x80\x04o\x04J\x04\'\x04\x1b\x04$\x042\x043\x046\x04?\x048\x04\x13\x04\xd5\x03\x91\x03F\x03\xe9\x02\x83\x02\x02\x02w\x01\xf0\x00y\x00\xfb\xffh\xff\xdf\xfeo\xfe\x16\xfe\xaa\xfd1\xfd\xc4\xfc}\xfcN\xfc\n\xfc\xc6\xfb\xb5\xfb\xc6\xfb\xc8\xfb\xb8\xfb\xe4\xfb;\xfcw\xfc\xa0\xfc\xef\xfc[\xfd\xa8\xfd\xbc\xfd\xef\xfd>\xfel\xfeo\xfeo\xfe\xa5\xfe\xd4\xfe\xdd\xfe\xf7\xfeK\xff\xb0\xff\x05\x00_\x00\xcb\x00M\x01\xb4\x01\x15\x02{\x02\xc6\x02\xf0\x02(\x03p\x03s\x034\x03\x18\x03;\x03A\x03\xfc\x02\xd3\x02 \x03\x9b\x03\xb3\x03d\x03G\x03\xa2\x03\xf4\x03\xb5\x037\x03,\x03q\x03F\x03\x9c\x02\x1d\x022\x02B\x02\x99\x01\xbe\x00.\x00\xeb\xffS\xffB\xfea\xfd\xd4\xfcg\xfc\xbe\xfb\xe9\xfao\xfa;\xfa\x1b\xfa\xe8\xf9\xa8\xf9\xa8\xf9\xd4\xf9\x01\xfa\x16\xfa\x1b\xfa]\xfa\xc5\xfa\x18\xfbJ\xfbw\xfb\xe6\xfbk\xfc\xc2\xfc\x02\xfdX\xfd\xe2\xfdi\xfe\xc0\xfe\x08\xffs\xff\x03\x00\x84\x00\xce\x00\x17\x01\x8d\x01\x01\x02E\x02E\x02\\\x02\xbb\x02\xe3\x02\xcb\x02\x80\x02w\x02\xab\x02\x88\x024\x02\xeb\x01\xe8\x01\x05\x02\xbc\x01S\x016\x01>\x01#\x01\xb8\x00Q\x00@\x00@\x00\x0e\x00\xc1\xff\x89\xfft\xffx\xffa\xff\x1e\xff\x0c\xff7\xffm\xffy\xffN\xffu\xff\xf0\xff>\x00`\x00\xcd\x00\xe1\x01"\x03\xdb\x03i\x04\x80\x05\xce\x06w\x07v\x07\xd5\x07\xbc\x08\xfd\x08L\x08\xc4\x07N\x08\xbc\x08\xf8\x07\xef\x06\x12\x07\xa8\x07\xf5\x06%\x05\x18\x04*\x04\x85\x03H\x01\x0e\xffK\xfe\t\xfe\xa5\xfc\x80\xfa\x85\xf9\xff\xf9=\xfaS\xf9n\xf8\xcd\xf8\xbc\xf9\xb1\xf9\xd3\xf8\x91\xf8C\xf9\xac\xf9*\xf9\x8d\xf8\xdd\xf8\x98\xf9\xda\xf9\xd4\xf94\xfa\x04\xfb\xb3\xfb\x10\xfcw\xfc\x12\xfd\x8c\xfd\xd4\xfd\x16\xfeT\xfer\xfe\x8f\xfe\xc0\xfe\xfd\xfe\x1e\xff8\xff\x85\xff\xe5\xff#\x00>\x00t\x00\xc1\x00\xd9\x00\xbf\x00\x9b\x00\xa4\x00\xb5\x00\x8d\x00@\x00&\x00M\x00q\x00P\x00F\x00\x97\x00\xed\x00\x0c\x01\t\x01=\x01\x9f\x01\xd1\x01\xcd\x01\xe2\x01"\x02c\x02y\x02\x81\x02\xad\x02\xeb\x02\x14\x03\x19\x03\x17\x03&\x03&\x03\x01\x03\xb7\x02o\x025\x02\xe7\x01\x87\x01#\x01\xd7\x00\x95\x00K\x00\xf2\xff\xb2\xff\x93\xffp\xff<\xff\x03\xff\xd3\xfe\xaa\xfeu\xfeH\xfe0\xfe\x18\xfe\x0c\xfe"\xfeF\xfeb\xfey\xfe\xb9\xfe\x07\xff2\xffV\xff\x92\xff\xcd\xff\xd0\xff\xb8\xff\xdb\xff%\x00)\x00\x01\x00+\x00\x98\x00\xcd\x00\x9c\x00\xb8\x00v\x01\x0c\x02\xd3\x01j\x01\xaf\x01H\x02$\x02`\x01\x17\x01u\x01{\x01\xa2\x00\xce\xff\xce\xff\x12\x00\xa4\xff\xc2\xfeb\xfe\x9e\xfe\xae\xfe=\xfe\xcf\xfd\xf0\xfd9\xfe(\xfe\xde\xfd\xe3\xfdI\xfel\xfej\xfe\x92\xfe\xda\xfe\x02\xff\x05\xff:\xffv\xff\x89\xff\x86\xff\xa3\xff\xc5\xff\xab\xff\x8a\xff\x89\xff\x99\xff\x97\xffv\xffY\xffV\xffK\xff7\xff3\xff?\xff?\xff\x1b\xff\x1c\xffB\xff9\xff\xff\xfe\xe6\xfe\'\xffT\xff"\xff\xd9\xfe\xe4\xfe(\xff \xff\xcb\xfe\xa2\xfe\xdd\xfe\t\xff\xd6\xfe\x98\xfe\xb4\xfe\x08\xff\n\xff\xe8\xfe\xec\xfe2\xffr\xff}\xff\xa8\xff\xee\xff*\x00R\x00\x92\x00\xff\x00V\x01p\x01\xc8\x01\x91\x02K\x03\xc2\x03E\x04U\x05\x8e\x06\x0e\x07\xfd\x06\x86\x07\xbd\x08K\t\xa7\x08\x18\x08\xb1\x08|\t\x08\t\x00\x08\xf2\x07\xb8\x08\xac\x08`\x07N\x06c\x06\\\x06\x0c\x05\x14\x03\xc3\x01?\x01U\x00\xb2\xfe\t\xfd\x16\xfc\xa9\xfb\xf9\xfa\xdf\xf9\xe8\xf8\x96\xf8\x96\xf83\xf8]\xf7\xbf\xf6\xa7\xf6\xb4\xf6d\xf6\xd6\xf5\xaa\xf5\xe6\xf5C\xf6p\xf6\x90\xf6\xff\xf6\xb8\xf7|\xf8\x03\xf9d\xf9\xe6\xf9\x92\xfa-\xfb\x82\xfb\xb6\xfb.\xfc\xdb\xfcg\xfd\xc2\xfd!\xfe\xc0\xfe\x82\xff\x1b\x00\x91\x00\x01\x01\x85\x01\x05\x02O\x02~\x02\xb6\x02\x04\x03E\x03P\x03R\x03\x82\x03\xd3\x03\xfd\x03\x06\x04"\x04f\x04\xa1\x04\xbd\x04\xdb\x04\xff\x04\x1e\x05!\x05 \x05#\x05\x16\x05\xfd\x04\xf0\x04\xe7\x04\xc8\x04\x90\x04l\x04g\x04Q\x04\x08\x04\xba\x03\x84\x03V\x03\xf5\x02\\\x02\xd3\x01o\x01\xfb\x00K\x00\x86\xff\xfa\xfe\x9d\xfe&\xfex\xfd\xda\xfc\x96\xfcW\xfc\xe8\xfbt\xfb;\xfb+\xfb\xe0\xfa\x8d\xfa\x97\xfa\xe1\xfa\xfa\xfa\xdc\xfa#\xfb\xc4\xfb \xfc\x0f\xfca\xfc\x82\xfd\x9b\xfe\xde\xfe\xde\xfe\x80\xff\x85\x00\xf4\x00\xe5\x00$\x01\xd5\x01t\x02\x86\x02_\x02\x9f\x02!\x03t\x03b\x03*\x03L\x03\x83\x03\x98\x03t\x03+\x03\x01\x03\xfe\x02\xe8\x02\xa9\x02[\x02\x08\x02\x0b\x02(\x02\xea\x01y\x01&\x01\x1e\x01\n\x01\x9c\x00"\x00\xe7\xff\xaa\xff?\xff\xc8\xfep\xfe5\xfe\xec\xfd\x9f\xfd_\xfd\x16\xfd\xb0\xfcV\xfc1\xfc+\xfc\xfa\xfb\xab\xfb\x9d\xfb\xc6\xfb\xcc\xfb\x98\xfb\x88\xfb\xdc\xfbC\xfct\xfcv\xfc\xa7\xfc\x1e\xfd\x8c\xfd\xc0\xfd\xe2\xfd9\xfe\xad\xfe\xf8\xfe\x0c\xff\x1f\xffc\xff\xaf\xff\xc7\xff\xb8\xff\xbd\xff\xfa\xff3\x00.\x00\x11\x00&\x00o\x00\xad\x00\x9e\x00\x94\x00\xe4\x00O\x01|\x01Q\x01r\x01\xfa\x01V\x02J\x02 \x02j\x02\xfa\x02\x1f\x03\xd3\x02\xc2\x02&\x03\x7f\x03Y\x03\x15\x03=\x03\xab\x03\xd4\x03\xbe\x03\xf2\x03n\x04\xc0\x04\xd8\x04\xf9\x04a\x05\x9a\x05x\x05u\x05\xb0\x05\xd2\x05\x91\x05;\x05F\x05w\x05\x1e\x05c\x04\xed\x03\xc4\x03b\x03\x81\x02o\x01\x9f\x00\x01\x00,\xff\t\xfe\xe3\xfc\x1e\xfc\xa0\xfb\xf4\xfa\x07\xfa@\xf9\xe7\xf8\xb8\xf8h\xf8\xea\xf7\xa5\xf7\xb6\xf7\xc7\xf7\xc3\xf7\xb7\xf7\xe1\xf7@\xf8\x94\xf8\xdb\xf84\xf9\xa8\xf9;\xfa\xda\xfak\xfb\xeb\xfbq\xfc\x13\xfd\xb7\xfd9\xfe\x9a\xfe\x13\xff\xa0\xff\xfd\xff3\x00w\x00\xd2\x00\x15\x01&\x011\x01\\\x01\x88\x01\x9d\x01\xa6\x01\xc3\x01\xe5\x01\xeb\x01\xdf\x01\xce\x01\xd1\x01\xd6\x01\xbc\x01\x96\x01\x87\x01\x98\x01\xaa\x01\x9e\x01\x9d\x01\xc1\x01\xe2\x01\xed\x01\xed\x01\x0b\x02A\x02[\x02I\x02@\x02S\x02l\x02j\x02V\x02\\\x02p\x02n\x02N\x025\x023\x02 \x02\xec\x01\xaa\x01{\x01H\x01\x01\x01\xb2\x00n\x00)\x00\xe0\xff\x9b\xfff\xff9\xff\xfd\xfe\xcb\xfe\xb1\xfe\x9e\xfe|\xfeW\xfeQ\xfe_\xfe`\xfeU\xfem\xfe\xa7\xfe\xd3\xfe\xd6\xfe\xe8\xfe9\xff\x8d\xff\xa3\xff\xa4\xff\xd7\xff2\x00\\\x00Y\x00~\x00\xd9\x00\x1f\x01\x1f\x01\x18\x01?\x01n\x01d\x01E\x01@\x01;\x01\x1b\x01\xe9\x00\xcf\x00\xaf\x00v\x00Q\x00D\x00\x1d\x00\xde\xff\xb5\xff\xb2\xff\xad\xff\x7f\xffU\xff]\xffv\xffo\xffH\xffC\xffo\xff\x99\xff\xa1\xff\x9f\xff\xbb\xff\xeb\xff\x05\x00\x08\x00\r\x00\'\x00D\x00D\x00,\x00\x1b\x00\x17\x00\x1b\x00\x06\x00\xd3\xff\xb1\xff\xa7\xff\xa5\xff\x94\xffw\xffh\xfft\xffz\xffa\xff<\xff4\xffP\xff_\xffW\xffR\xffw\xff\xa5\xff\xc0\xff\xd6\xff\xf1\xff!\x00Q\x00y\x00\x8c\x00\x96\x00\xae\x00\xd5\x00\xe1\x00\xc9\x00\xc2\x00\xdc\x00\xfb\x00\xf8\x00\xe4\x00\xf4\x00 \x01:\x01%\x01\x0f\x01\x1f\x012\x01\x1b\x01\xe6\x00\xbe\x00\xa9\x00\x9f\x00x\x00?\x00\x18\x00\x02\x00\xe2\xff\xa9\xffl\xff3\xff\xf5\xfe\xb9\xfet\xfe\x1e\xfe\xc2\xfdp\xfd"\xfd\xdb\xfc\x97\xfce\xfcA\xfc4\xfc9\xfc7\xfc5\xfcM\xfc\x89\xfc\xb8\xfc\xd7\xfc\x04\xfdM\xfd\xa0\xfd\xe1\xfd!\xfe\x80\xfe\xf5\xfec\xff\xb6\xff\x17\x00\x96\x00\x06\x01R\x01\x99\x01\xfb\x01J\x02p\x02\x8b\x02\xa3\x02\xad\x02\xa2\x02\x8d\x02\x84\x02o\x02F\x02\'\x02\n\x02\xd1\x01\x86\x01:\x01\xf5\x00\xaf\x00N\x00\xed\xff\xbc\xff\x8c\xffK\xff\x04\xff\xd7\xfe\xc9\xfe\xa9\xfe\x82\xfeo\xfev\xfe\x84\xfe\x8f\xfe\xa3\xfe\xbe\xfe\xe9\xfe5\xff\x90\xff\xe4\xff>\x00\xbc\x00R\x01\xdc\x01Z\x02\xf0\x02\x97\x03\x1e\x04\x85\x04\xeb\x04]\x05\xb6\x05\xdb\x05\xea\x05\x05\x06\x15\x06\xfb\x05\xbc\x05~\x05K\x05\xe3\x04L\x04\xb6\x032\x03\x9e\x02\xe0\x01\x1f\x01r\x00\xd1\xff$\xffr\xfe\xdd\xfdd\xfd\xf5\xfc\x9e\xfcJ\xfc\x00\xfc\xcd\xfb\xa5\xfb\x8e\xfbv\xfbY\xfbQ\xfbb\xfb}\xfb\x89\xfb\x93\xfb\xad\xfb\xdf\xfb\x0e\xfc*\xfcL\xfct\xfc\xa4\xfc\xd0\xfc\xfa\xfc#\xfdV\xfd\x89\xfd\xb2\xfd\xd8\xfd\xff\xfd+\xfeY\xfe\x7f\xfe\x9f\xfe\xcb\xfe\xf6\xfe&\xffR\xfft\xff\x9c\xff\xbf\xff\xe1\xff\x01\x00+\x00W\x00t\x00\x92\x00\xb0\x00\xd7\x00\xfc\x00\x1d\x01F\x01n\x01\x97\x01\xbd\x01\xe1\x01\x0b\x02,\x02J\x02i\x02\x8d\x02\x9e\x02\xa4\x02\xa6\x02\xa9\x02\xa4\x02\x92\x02w\x02h\x02U\x024\x02\x16\x02\xf5\x01\xd1\x01\xa6\x01w\x01H\x01\x0b\x01\xd9\x00\xb4\x00z\x00*\x00\xe9\xff\xc6\xff\xb1\xffr\xff$\xff\x1b\xff5\xff2\xff\xf9\xfe\xf0\xfeQ\xff\xb2\xff\xba\xff\xa0\xff\xc7\xff\x13\x00!\x00\xfb\xff\xef\xff\x00\x00\x0b\x00\xd2\xff|\xffP\xffB\xff\'\xff\xd9\xfe\x8a\xfex\xfey\xfed\xfe:\xfe&\xfe9\xfeO\xfeN\xfeD\xfeN\xfef\xfe\x8f\xfe\xb8\xfe\xd7\xfe\xfc\xfe+\xffo\xff\xa5\xff\xcc\xff\xed\xff\x1f\x00O\x00d\x00s\x00~\x00\x92\x00\xa2\x00\x9c\x00\x89\x00\x86\x00\x80\x00o\x00N\x006\x002\x00\x1a\x00\xfc\xff\xde\xff\xcb\xff\xb8\xff\x9d\xff\x83\xffu\xffx\xffu\xffn\xffk\xffr\xff\x84\xff\x86\xff\x8b\xff\x89\xff\x93\xff\x9e\xff\x9d\xff\x99\xff\x9a\xff\x9e\xff\xa1\xff\xa2\xff\xa7\xff\xb7\xff\xcc\xff\xd7\xff\xdf\xff\xed\xff\x07\x00.\x00T\x00s\x00\x9a\x00\xbf\x00\xde\x00\xf7\x00\x15\x01:\x01W\x01k\x01t\x01\x83\x01\x85\x01u\x01[\x01H\x01=\x01(\x01\x12\x01\xfb\x00\xe2\x00\xc2\x00\xaa\x00\xa9\x00\xc6\x00\xee\x00\x18\x01@\x01s\x01\xb6\x01\xff\x01I\x02\x9a\x02\x00\x03l\x03\xb5\x03\xd9\x03\xf5\x03\x1b\x04;\x04/\x04\x05\x04\xd9\x03\xa2\x03]\x03\xf2\x02|\x02\x12\x02\xa2\x01$\x01\x96\x00\x07\x00\x82\xff\xfd\xfem\xfe\xe1\xfd\\\xfd\xd9\xfcZ\xfc\xdd\xfbw\xfb\x1f\xfb\xc8\xfa\x7f\xfaH\xfa.\xfa\x1f\xfa\x1d\xfa0\xfaK\xfak\xfa\x92\xfa\xbf\xfa\xfc\xfa8\xfbs\xfb\xb5\xfb\xfa\xfbD\xfc\x8c\xfc\xd7\xfc%\xfdp\xfd\xc1\xfd\x0f\xfeZ\xfe\xa5\xfe\xed\xfe>\xff\x90\xff\xe4\xff,\x00|\x00\xca\x00\x0e\x01N\x01\x89\x01\xc7\x01\x04\x02@\x02n\x02\x8f\x02\xb3\x02\xd7\x02\xee\x02\xf7\x02\xfd\x02\x01\x03\x02\x03\xf6\x02\xe6\x02\xd7\x02\xc7\x02\xaf\x02\x96\x02x\x02[\x02C\x02$\x02\x03\x02\xe2\x01\xc7\x01\xab\x01\x86\x01c\x01H\x016\x01\x1f\x01\x07\x01\xf7\x00\xf1\x00\xe8\x00\xd5\x00\xc0\x00\xb3\x00\xa9\x00\x91\x00q\x00W\x00<\x00\x1f\x00\xf8\xff\xd3\xff\xb3\xff\x93\xffm\xffJ\xff.\xff\x13\xff\xf6\xfe\xd8\xfe\xbd\xfe\x9c\xfe\x7f\xfec\xfeJ\xfe5\xfe\x1d\xfe\r\xfe\x01\xfe\xfc\xfd\xf2\xfd\xf2\xfd\xff\xfd\x17\xfe4\xfeO\xfew\xfe\xa1\xfe\xca\xfe\xf1\xfe\x16\xffE\xfft\xff\x9f\xff\xc2\xff\xe4\xff\x0b\x007\x00\\\x00z\x00\x93\x00\xaf\x00\xc8\x00\xd4\x00\xda\x00\xe3\x00\xeb\x00\xec\x00\xde\x00\xd9\x00\xd5\x00\xcf\x00\xc2\x00\xaf\x00\x99\x00\x88\x00~\x00m\x00]\x00L\x00>\x003\x00&\x00\x1a\x00\x0b\x00\x07\x00\x06\x00\x04\x00\x01\x00\xfc\xff\x01\x00\x00\x00\xff\xff\x02\x00\x06\x00\r\x00\x15\x00\x17\x00\x1b\x00)\x00,\x000\x00/\x001\x00>\x00K\x00K\x00>\x00B\x00G\x00E\x00>\x004\x00-\x00-\x00+\x00\x1b\x00\r\x00\x02\x00\xfa\xff\xee\xff\xdc\xff\xd1\xff\xc3\xff\xb6\xff\xa9\xff\xa9\xff\xb6\xff\xc6\xff\xd7\xff\xea\xff\r\x009\x00b\x00\x95\x00\xd7\x00&\x01m\x01\xaa\x01\xe8\x01(\x02^\x02\x86\x02\xaf\x02\xcf\x02\xdf\x02\xda\x02\xca\x02\xbc\x02\x9e\x02d\x02"\x02\xe0\x01\x98\x01@\x01\xd6\x00i\x00\xfe\xff\x9b\xff0\xff\xbb\xfeQ\xfe\xee\xfd\x9f\xfdM\xfd\xfa\xfc\xb6\xfc\x86\xfce\xfcH\xfc0\xfc+\xfc9\xfcK\xfc`\xfc|\xfc\xa6\xfc\xd2\xfc\xfd\xfc.\xfda\xfd\x9a\xfd\xcb\xfd\xfb\xfd-\xfe\\\xfe\x95\xfe\xc2\xfe\xeb\xfe\x14\xff9\xffh\xff\x8c\xff\xb5\xff\xd3\xff\xf4\xff\x1f\x00D\x00d\x00\x87\x00\xb2\x00\xdf\x00\n\x01+\x01I\x01m\x01\x8f\x01\x9e\x01\xa7\x01\xb5\x01\xc2\x01\xc4\x01\xb6\x01\xb1\x01\xae\x01\xa6\x01\x99\x01\x86\x01p\x01\\\x01P\x01;\x01 \x01\x12\x01\x0c\x01\x04\x01\xf7\x00\xe8\x00\xdb\x00\xd7\x00\xcc\x00\xba\x00\xa3\x00\x8c\x00\x85\x00p\x00E\x00\x16\x00\xef\xff\xc1\xff\x8b\xff[\xff5\xff\x12\xff\xe5\xfe\xb6\xfe\x86\xfe`\xfeB\xfe4\xfe\x1c\xfe\x0f\xfe\x0c\xfe\x0f\xfe\x1d\xfe$\xfe.\xfe>\xfeN\xfei\xfe\x83\xfe\xa7\xfe\xd2\xfe\xf1\xfe\x1c\xffP\xff\x93\xff\xc5\xff\xf6\xff(\x00_\x00\x90\x00\xb4\x00\xee\x00(\x01J\x01[\x01m\x01\x80\x01u\x01z\x01\x8b\x01\x97\x01\xaa\x01\xc6\x01\xb8\x01\x86\x01J\x01/\x01%\x01\x14\x01\x0f\x01\xf3\x00\xc9\x00\xae\x00\x8e\x00N\x00\x0e\x00\xe2\xff\xdf\xff\xf9\xff\x06\x00\xf2\xff\xd8\xff\xca\xff\xbc\xff\xc8\xff\xd4\xff\xda\xff\xd5\xff\xc3\xff\xa4\xffb\xff"\xff\x10\xff\x19\xff\x1d\xff&\xffA\xff\x81\xff\xde\xff\\\x00\xf7\x00\xb4\x01\x90\x02`\x03\x17\x04\xc4\x04i\x05\xfb\x05a\x06\x89\x06\x95\x06\x8e\x06d\x06\x0b\x06\x96\x05\x12\x05y\x04\xca\x03\x14\x03d\x02\xbb\x01\x1a\x01k\x00\xaa\xff\xeb\xfe6\xfe~\xfd\xbb\xfc\x01\xfcb\xfb\xda\xfa\\\xfa\xf2\xf9\xbb\xf9\xb6\xf9\xc7\xf9\xf1\xf9E\xfa\xc1\xfaC\xfb\xca\xfbV\xfc\xe1\xfc]\xfd\xca\xfd"\xfeX\xfep\xfe}\xfe\x8d\xfe\x86\xfen\xfeN\xfe3\xfe \xfe\x04\xfe\xeb\xfd\xdf\xfd\xd6\xfd\xce\xfd\xca\xfd\xc9\xfd\xc4\xfd\xd3\xfd\xd8\xfd\xd3\xfd\xe4\xfd\xfa\xfd\x1b\xfe@\xfey\xfe\xc1\xfe\x12\xffl\xff\xcd\xff1\x00\x90\x00\xf1\x00N\x01\xa6\x01\xef\x01#\x02D\x02l\x02\x84\x02\x84\x02{\x02}\x02\x85\x02\x86\x02\x85\x02\x9b\x02\xda\x02\x1b\x039\x03M\x03\x82\x03\xbc\x03\xd4\x03\xc8\x03\xaf\x03p\x03\x1b\x03\xc9\x02~\x02\x03\x02e\x01\xcf\x00@\x00\xb3\xff&\xff\xb4\xfeO\xfe\xf9\xfd\xaa\xfdA\xfd\xe9\xfc\xb3\xfc\x98\xfcV\xfc\x03\xfc\xe9\xfb\xe4\xfb\xb7\xfb\x85\xfb\x8c\xfb\xd3\xfb\xf7\xfb!\xfc\x8f\xfc\xff\xfc3\xfdI\xfd\x8b\xfd\xe0\xfd\xec\xfd\xe9\xfd;\xfez\xfe]\xfer\xfe\xc2\xfe\xe1\xfe\t\xffX\xff~\xff\x81\xff\x9f\xff\xba\xff\x9a\xff\xc4\xff"\x009\x00,\x00\x88\x00\x07\x01\x15\x01\r\x01b\x01\xea\x01*\x02A\x02\x8b\x02\xe4\x02\x13\x03a\x03\xab\x03\x9b\x03m\x03I\x03I\x03L\x03B\x03^\x03h\x03\xdd\x02j\x02\x8e\x02\xb7\x02\x87\x02P\x02\xa0\x02P\x03\xfd\x03\xcf\x04"\x06\xdb\x07z\t\x90\n&\x0b\xed\x0b\xec\x0c}\r#\r\x81\x0c\x0c\x0cx\x0bC\n\xef\x08\x0b\x08B\x07\xe5\x05\xfb\x03\r\x026\x00R\xfe\\\xfcD\xfa\xfb\xf7\xde\xf57\xf4\xf0\xf2\xca\xf1%\xf1#\xf14\xf1\x11\xf1*\xf1\xe4\xf1\xc9\xf2\x89\xf3K\xf48\xf5\x0f\xf6\x1b\xf7\xb2\xf86\xfal\xfb\xdf\xfc\xc4\xfe:\x00\xe3\x00\xb8\x01\x07\x03\xd0\x03}\x03\xff\x02\x0f\x03\x13\x03s\x02\xc9\x01\x89\x01Y\x01\xb4\x00\xd7\xffG\xff\xe2\xfe6\xfeR\xfdx\xfc\xbd\xfb\x06\xfb\x8b\xfas\xfa\x96\xfa\xe7\xfaQ\xfb\xee\xfb\xb4\xfc\x9a\xfd\xb6\xfe\xc6\xff\xa3\x00F\x01\xe9\x01\xa2\x02r\x03_\x04*\x05\xaf\x05"\x06\x9a\x06\x0b\x07B\x079\x07\x1d\x07\xc1\x060\x06z\x05\xc1\x04;\x04\xbd\x033\x03\x84\x028\x02\xb9\x02\\\x03R\x03\xde\x02\x97\x02\x80\x02\xce\x01\xfd\x00\x8b\x00\x07\x00\x04\xff\xf4\xfdx\xfdT\xfd\x04\xfd\xc6\xfc\x9c\xfc*\xfc\xaa\xfb\x81\xfb\xac\xfb\x9a\xfb_\xfbs\xfb\x9d\xfb\x83\xfbY\xfb\xb7\xfbt\xfc\xd3\xfc\xa0\xfcl\xfc\x85\xfc\xb2\xfc\x8e\xfcm\xfc\x87\xfc\xbb\xfc\xb1\xfc\x97\xfc\x0f\xfd\xd0\xfdM\xfe;\xfe\x0e\xfe\xdc\xfdz\xfd\x14\xfd\xf0\xfc\xf3\xfc\xd2\xfc\x86\xfcV\xfcF\xfch\xfc\xb4\xfc\n\xfd9\xfd\x11\xfd\x1b\xfd5\xfd\x19\xfd\xd0\xfc]\xfd\x1a\xff\xb5\x00-\x01\x82\x01\xb0\x02\xe9\x03\xd9\x03h\x03\n\x04\xf9\x04\xe0\x04u\x04]\x05\x1b\x07\xfd\x07\x8d\x07\x03\x07\t\x073\x07\xcc\x07\xbb\t8\rc\x11\xff\x13y\x14|\x14Z\x15\x00\x16\x81\x14\xdb\x12C\x13\xd4\x13\xbd\x11\x00\x0f>\x0f\xf1\x0f\x93\x0c\x96\x06\xa0\x02=\x00\xce\xfb\x96\xf6X\xf4\x9a\xf3#\xf1\xc1\xed\xbe\xeca\xed\xae\xec\xed\xea\x8b\xe9\xe9\xe8\x8a\xe8\xe0\xe8\x1a\xea.\xecd\xef\xf2\xf2\x94\xf5\xc7\xf7\xe4\xfa,\xfe}\xff\xb5\xff8\x01s\x03n\x04\xd0\x04\xb1\x06\xed\x08\x1b\t\xee\x07C\x07\xbf\x06\xbd\x04\xd0\x01\x90\xff\x15\xfej\xfc\x94\xfaY\xf9\xa0\xf8\xce\xf7\xa1\xf6\\\xf5O\xf4\x82\xf3\x1a\xf3Q\xf3\xeb\xf3,\xf5\x07\xf7F\xf9W\xfb\xfc\xfc\xe5\xfe\xca\x00k\x02\x99\x03\x08\x05\xfe\x06\xea\x08Q\n\xe2\x0bY\ro\x0eJ\x0eK\x0e\x93\x0e\x19\x0e\xfb\x0c.\x0c\x10\x0b\x8b\tq\t\x8d\x0c\xc9\x0e\x87\x0c\x8c\x08\x83\x06\x80\x05n\x02\x17\x00\xc7\xff9\xffQ\xfc\x1e\xfa\xbb\xfa\xc8\xfb^\xfa7\xf7d\xf4\xad\xf2\x16\xf2\xfe\xf2\xd6\xf4[\xf6\xbf\xf6\xab\xf6+\xf7\xd4\xf7\xb0\xf8:\xf9\\\xf9\\\xf9\xc9\xf9\x93\xfb\x00\xfe\xbd\xff\xad\x00N\x00h\xff\xad\xfe)\xffm\x00\x9e\x00/\x00\xdd\xff\x95\xff.\xff\xea\xfeo\xff\x1e\xff\x84\xfd\x0f\xfc\xab\xfb\xd2\xfbT\xfb2\xfbS\xfbr\xfb\x93\xfa1\xfa$\xfb\xb2\xfc6\xfd&\xfc%\xfc\x1d\xfd\xc3\xfe\xb1\xff\xee\x00]\x03\xe8\x04\xb6\x05\x86\x06\xd1\x07d\x08\x98\x08K\t/\n=\ng\x0b5\x11\xbb\x19\xdf\x1d\x92\x1a\x96\x15\x9d\x15\xe5\x17\x1c\x17\xd1\x15|\x19^\x1d\xf9\x19!\x13\xba\x12\xe0\x15\xde\x10\xc8\x05\x18\xff\xf8\xfe\x03\xfd\xb4\xf8\x15\xf9b\xfbn\xf7V\xee>\xe9\x02\xe9\xec\xe7\x8c\xe5\xed\xe45\xe6\x86\xe7m\xe9\x86\xec\xc8\xee1\xefK\xef\x06\xef\x1e\xf0\x86\xf3v\xf9\x13\xfe+\x00)\x022\x04O\x05\x1a\x05\xc6\x046\x05R\x05\x04\x05\xf1\x04\xfd\x05\xaf\x07\xc7\x06"\x03I\xff9\xfdu\xfbD\xf8\xb7\xf6\xea\xf6X\xf6X\xf4\x8a\xf3\xb5\xf4\xdd\xf4\x14\xf3\x80\xf1\x8e\xf1G\xf2\xaa\xf3\xbe\xf6\xe7\xf9T\xfc_\xfdC\xffn\x00\xde\x01\x8a\x03\xb4\x054\x07\x9f\x08\x9a\n\xce\x0cZ\x0eX\x0fl\x0fN\x0e%\x0eX\x0e\xfc\x0eN\x0e\x1d\x0eG\r\n\r\xfc\r!\x0f\x89\r1\t\xf9\x05K\x03\x0e\x01Y\xffe\xff\xaa\xfe\xeb\xfb$\xf9?\xf7(\xf6N\xf4z\xf2\xe2\xf0\t\xf0}\xf1\xab\xf2\xa9\xf3e\xf4\xd7\xf4\xc6\xf4\xa9\xf3\xe3\xf4\xa5\xf6\x8a\xf8}\xf9i\xfa\x16\xfc\x0e\xfdW\xfe\xde\xfe\xaf\xff@\x00L\x00\xcc\x00\x06\x02\xf0\x03~\x05F\x05\xcb\x03!\x03\x19\x02E\x01\xed\x01b\x03\xb0\x01\xb7\xff\xa4\xff\x92\xfeS\xfdA\xfe\x15\xfeB\xfb\x1b\xfb\xbb\xfc\x83\xfd\xdd\xfc\x06\x00O\x02\x95\xff\x02\xff\xc8\x03F\x05M\x04?\x05\xe6\x07\xc3\x07\xde\x06l\x0bI\x0c[\n\x0e\n+\x0c\r\x0b\x1f\n\x18\x0bi\n\r\n\xb4\t\xf1\n\x8b\t\t\nJ\x0bE\x0cn\x0b\xa4\t\x1b\n\xe1\to\t\x0c\tw\x08\xcc\x08\x1f\x08X\x06i\x05^\x04l\x02\xec\xffI\xfe\x9f\xfdN\xfcg\xfb\xdd\xfa\xa3\xf9\x84\xf8w\xf7\x85\xf6\x15\xf5\xd3\xf4\x8f\xf4N\xf4\xcc\xf4V\xf5M\xf6f\xf5\xce\xf5\xa3\xf6R\xf7\xb6\xf7^\xf8\xdc\xf9\xf1\xfa\xd1\xfbY\xfc~\xfd\x15\xfe4\xfe@\xfea\xfe\x83\xfe\x91\xfe\xd8\xfe\x03\xff!\xffP\xfe\xa4\xfd\xee\xfcg\xfc4\xfc;\xfb\xbc\xfa\xad\xfa#\xfa\x87\xfa\xf6\xf9\x14\xfa\xfc\xf9\xbe\xf9\x1c\xfaj\xfa\x8c\xfb\x18\xfc\xb1\xfc\xae\xfd\x13\xfeR\x00\x9d\x01{\x03@\x06\x98\x06\xff\x07Z\x075\t2\tw\n\xf0\n\x04\n\x87\n\xa7\x078\x08\x13\x06\xf9\x04\xfd\x02\x06\x01\xb4\xff\x8a\xfe\xbe\xfe\x05\xfd\xb7\xfd\xb6\xfbI\xfa\x87\xf9\xb0\xf9\xe2\xf9\xbb\xf9P\xfa\xb7\xfa\xe4\xf9\xf7\xfa\xfe\xfaP\xfc\xdc\xfc\xd0\xfb\x01\xfe\xfc\xfc\xe0\xfc\n\x00\xde\x00\x1b\xfej\xff\xd6\x01\x92\xff\xeb\x00?\x02:\x02\x12\xff\xf0\x00\xcc\x03\x8d\xff\xd2\xff\x06\x04\x80\x03\x0c\xfd\xcd\xff~\x01D\xffk\x01\xef\x03\x1f\x00\x0b\xffe\x02\xfe\x01\x85\xffc\x02\xbc\x03\xdc\x00\xd7\x01\x84\x02\x97\x02\x92\x01\x0b\x04\xb4\x00\xd4\x00\xeb\x013\x01\xa1\x01f\x02\xf7\x01\x9b\xfe\x89\x00\xee\x00\xab\x00\xc5\x01\xd9\x01L\x00?\x02G\x00\x07\x02\xbd\x02:\x01\xff\x011\x01\x92\x03G\x01\x0c\x031\x04G\x01f\x03\xe5\x018\x02a\x02f\x00\xe4\x01,\x00\xe0\x00]\x00\xa2\xff\x04\xfe5\xffC\xff\x1a\xfd\xfa\xfd\xfd\xfcc\xfe0\xfd\xa5\xfd\r\xfe,\xfd\xa1\xfd\x86\xfe\x92\xfe\xd0\xfen\x00.\x00j\xffr\x00\xf8\x01j\x00[\x02\x85\x02\x9d\x01\xbd\x02~\x03\xea\x01\xe5\x01\x99\x03 \x01$\x01S\x02\x96\x01\x9a\x00&\x00\xeb\x00D\x00\xed\xfe\x99\xff\x08\xfe\xf4\xfe"\xfe\xf8\xfd\xe5\xfdn\xfdh\xfd\xe2\xfc\xdf\xfe\x8f\xfbY\xfd\xc4\xfd\x85\xfb\xd7\xfc\x82\xfd\x96\xfc\xf1\xfcE\xfe\x1f\xfd(\xfd\x05\xfe\xce\xfd\xf1\xfdM\xfe\xd9\xfex\xff\x0e\xffZ\xff\x1e\xff\x12\x010\xff\x8a\xff\xda\x00 \x001\x00E\x00\xcd\x00\xcf\xff\x07\x01\xf8\x00\xf5\x00\xa4\x00\xc7\xff\x9c\x00\xe4\xff\xfc\xfe\xbc\x00A\x00k\xff=\xff\xa8\x00D\xffP\xff\xe5\xff:\xfe6\x00\xca\xff\xd9\xff\xa0\x00\xe7\x00X\x00%\x00\xad\x00\x10\x01{\x00\xba\x01\x9e\x01\xea\x00\xb6\x01\xaa\x01\xb3\x01\xa0\x00\x03\x01\t\x01\xc1\x00V\x00\xb0\x02\xb2\xff\xb5\xff\xfd\x01\xe0\xfe\xab\xff@\x00\xfa\xff\xd0\xfe\xd4\xff!\x00\xeb\xfe\xcb\xff\x9c\xff?\xff\xa8\xfe3\x00i\xffJ\xff\xf1\xff\x8d\xff\x88\xff{\xff\xa0\x00>\xff-\x00\x0b\x00\x82\xff=\x00\xdf\x00X\x00\xbd\xffp\x00\x1b\x01[\x00\x9b\x01H\x01\xd3\x00\x86\x01\x82\x00\xca\x01q\x01v\x00\x10\x01\xca\x00\xa8\x00R\x01\xba\x00i\x01m\x00\xea\xff\x9e\x00f\x00\xef\x00L\xff\x9c\xff5\xff\xc2\xfe\x1b\x01\x19\xfe\x98\xff\xdb\xff\xd5\xfdR\xff\xd9\xfe6\xff&\xfe\x9f\x00\xd8\xfe\xdb\xfe \x00I\xff\xc6\xffo\xff\x83\x00\x07\x00@\x00\xc9\xff\xb3\x00\x97\x00\x87\x00\x89\x01\x9a\x00\xde\x00\x8c\xff/\x02\x92\x00L\x00\x17\x01H\x00\x98\x00x\xffi\x01\x10\x00\x91\xff\xdc\xff\xcc\xff\x9f\xff\x96\xff\x84\xff>\xff\x90\xfe\xd0\xff#\xff0\xff\x80\xff\x0c\xff;\xff\xc2\xfe\xd7\xffp\xfe\xc6\xff\xc2\xfe\xcb\xffF\xffn\xff\xf3\xff\xa7\xff|\xff]\xff\xc3\xffy\xff\x1a\x00\x99\xffq\x00\x7f\xff_\x00_\x00\xa7\xff\xff\xffu\x00\xce\xff\xdb\xff\xdc\xff\xbd\xff\x9c\xff~\xff\xb1\xff\x18\xff\xc4\xffX\xfe\x1f\xff%\xff\x17\xffM\xff^\xff\xda\xfeY\xff^\xff\x80\xff%\xff\x9e\xff\xe7\xff\x00\xffm\x009\x00+\x00X\x00\xda\x00\xb8\x00\xf1\x00\x9c\x00]\x01\xe1\x00\xa3\x01\x19\x01\xd8\x01P\x01\xfa\x00\\\x02d\x00\xb7\x01\x94\x00\x0c\x01g\x00 \x00\x13\x00Q\x00\xc6\xff}\xff\xca\xff\x0f\xff7\xff\x1b\xff#\xff\xd9\xfe\x04\xff\xdc\xfe?\xff\xa9\xfel\xff*\xff\x81\xff\xb3\xfex\x00\xc7\xfeS\xff#\x01X\xffe\x00)\x00-\x01\x05\x00W\x01D\x00<\x01\x9d\x00\xec\x002\x01\xbc\x00\x96\x01\xe2\x00n\x01\xb1\x00\xe4\x00\x8d\x00\x08\x01/\x00\xc6\x00\x89\x00_\x005\x00\x12\x00\x0f\x00\x8a\xff\xda\xffy\xff\x89\xff[\xffY\xff\xb6\xffq\xff\x16\xff\xa0\xff\x08\xffI\xffo\xff\xa0\xff\x85\xff\xad\xff\xc2\xff\x00\x00\x08\x00\x1d\x00\x92\x00`\x00\xb1\x00\xb5\x00?\x016\x01\xfb\x001\x01\x06\x01\x1e\x01;\x018\x01c\x01\xbb\x00\xd7\x00\xd7\x00s\x00\xed\xff3\x00\'\x00)\xff\xe2\xff1\xff\x17\xff\x0b\xff\xb4\xfe\xe9\xfe2\xfe\xc8\xfe\xb1\xfe$\xfe\xd3\xfe\x9f\xfe\xa9\xfe\xe9\xfe\xec\xfe\r\xffy\xffO\xffs\xff\xdb\xffM\x00N\x00j\x00\xa9\x00\xa1\x00B\x01\xa2\x00\xbd\x00_\x01\xeb\x00\x15\x01P\x01\xcf\x00\x9c\x00\xdd\x00\xdb\xffh\x00\xdc\xffr\xff\x06\x00\\\xff\x1b\xff\x93\xff\xc8\xfe`\xfe\x9e\xfeT\xfe\xbf\xfe\xf5\xfd\xd9\xfe\x84\xfe\n\xfe\x1d\xff\xc1\xfe\xae\xfe\xda\xfeQ\xff\xf2\xfej\xff\xb9\xff\x1a\x00e\x00\x0f\x00\x9a\x00\x10\x01\x85\x00\xeb\x00\'\x01\xe1\x00z\x010\x01\xa6\x01\xb9\x00\x8e\x01_\x01\xda\x00\xfc\x00\xc7\x00\xb2\x00O\x00\xcd\x00\'\x00<\x00P\x00d\xff\xd2\xff\xb8\xff&\xff`\xff\x1a\xff\xf8\xfe\x15\xff\x08\xff\xaa\xfe\xdf\xfe\xea\xfef\xffy\xfe%\xff\x92\xfea\xffo\xff\x92\xfe\x00\x00\x14\xffx\x00\xda\xffZ\x00[\x00\xac\xff\xf3\xff\x02\x00\x00\x01*\x00`\x00\xa5\x00\x9c\x00\x06\x00\x83\x00\xd8\xff\xbc\xff\x17\x00\n\x00R\x00W\xff\x99\x00\x88\xff\x88\xff\xa2\xff{\xff\xdb\xff\xdb\xffk\x00\x86\xff\x1b\x00\xd6\xff$\x00\xea\xff\xfc\xff\xfe\xff\xa1\x00\xa3\x00&\x00K\x01|\x00\x94\x00\x94\x00\xb4\x00\xd1\x00\xce\x00\x05\x01L\x01\xfa\x00\xa1\x00\xee\x00\xdf\x00h\x00\x8f\x00\xa6\x00\xc7\x00\xd3\x00\x80\xff\x92\x00\x16\x00\xd1\xff\xb7\xffA\x00\x0c\xff\x0c\xff\xb3\xff\x0e\xff\x81\xff\xc9\xfeN\xff\xc6\xfex\xff\xa4\xfez\xff\xf3\xfe\xf5\xfe\x8f\xff\xa2\xfe\xbc\xff+\xff\xdf\xff\xce\xff\xdc\xff\x16\x00\xb6\xff\xd2\xff\r\x014\x00\xe5\x00\xea\x00\xac\x00\xa5\x01\xb2\x00\x8a\x01m\x01\x04\x01\xa1\x01\xce\x01\xcf\x00\x83\x02\x10\x02e\x000\x01#\x01\xd5\x00\xa3\x00\x87\x00\x03\x00\x13\x00l\xff\'\xffX\xff\x9a\xfe\xa6\xfeg\xfe\x0f\xfek\xfe9\xfe{\xfeV\xfe\x8f\xfeT\xfe\xa2\xfe\x13\xff\xf1\xfe\x03\xff\x01\xff\xa1\xff\xed\xff\x02\x00}\x00Z\x00@\x00\xaa\x00\xcc\x00D\x01V\x01p\x01\xd7\x00\xb4\x01\x13\x01H\x01\xa4\x01\xa1\x00\xce\x00$\x01\x8a\x00\xa9\x00\n\x01\xae\xff\x10\xff8\x01\x18\xffF\xfe\xe9\x00\x91\xfe>\xff\xcf\xfeG\xff\xa8\xff\xe0\xfe\xa2\xfeu\xffo\xfeG\xff\x0b\x00\x1d\xff\xec\xffi\xff\xd5\xff\xaa\xff\xd6\xff*\x01\xde\xff\xcb\xff\'\x01\x0e\x00\xad\x00\xed\x00\x10\x01\x00\x00\xbf\x00\xf5\x00\xf9\xff@\x01\xc1\xff\xb8\x00\x00\x00\x7f\xffM\x00\x83\xff`\xff\xe4\xff=\xff\xb2\xfe\x1b\x00W\xfe\xc3\xfe~\xff\xcc\xfen\xfe\\\xff\x1b\xff\xa2\xfe\x03\xff\x15\xffJ\xff\xe3\xfe\xa7\x00\xa2\xfe\x00\x00\xcf\xff@\xff8\x00\x12\x00b\x00(\x00\x19\x01\xb5\xff~\x00\xde\x00\xf8\x00d\x00:\x01N\x00+\x01S\x01Q\x01\x12\x01\x9f\x00k\x01\x06\x00\xbd\x00#\x01<\x001\x00\x95\x00\xe7\xff\n\x01\x04\x00\x19\xff\xbc\xff\xb5\xff\x81\xff\xa4\xff5\x00P\xff&\x00\r\x00e\xfe\x98\x00M\xfe\xc8\xffl\x000\xffe\x005\xffL\x00u\xff6\x00O\xff\xc4\xff\x13\x00\x11\x00\xd8\xff\x8d\xff$\x00\xda\xff:\x00]\xfeK\x00\x1b\x00\xc3\xff\xcd\xff\xb4\xff\x17\xff+\xff_\x01\xaa\xffe\xff\xb0\xff\x1e\x00V\xff\xa4\x00\xc4\x00\x12\xffi\x00}\xffc\x00\xa9\xff(\xff\xe4\xff\x00\x00[\x00{\xffT\x00\x95\xfet\xff\xd6\xff<\x00\xb4\x00s\xff\x13\x01#\xfe\xce\x01R\xffZ\x00\xcd\x01E\xff\x05\x01\x83\xff\xc1\x01\xce\xff\xb8\x01\xc9\x008\xff\xed\x00\xc4\x00\xdb\xfe\xe1\x008\xff\x12\x01\xd8\x00t\xff\xa2\x01\xcc\xfch\x01y\x00S\xfe\r\x00\xa4\x00\xfa\xff\xe6\xff\x11\x01=\xfd\\\xfe\x8f\x00s\xff\x85\x00\xeb\x00\xcc\xff-\xff\x96\xff@\x00\xfc\xfe\xb5\x019\xfe\xa8\x02\xa8\x00\xc6\xff\xa0\x00\xb0\xfa\xd6\x01i\x01g\x01\xd6\x00M\xff\x18\xff>\xfe\xb2\x00\xfd\xfe2\x01\x13\x00\x0e\xfe\xbc\x00\x15\xfd\xcf\x034\xfe \xfa\x92\x04X\xfd\xaa\xfe\x89\x00=\x01\x8b\xfd\xb7\x00\x86\x01\xef\xfd\xce\x01*\xff\xb1\xff3\x02\x96\x00\x95\x01\x0e\x01\x90\xffL\x02\xba\x009\xffM\x01\xc3\x01d\x03%\xff\x92\xfe\x92\x04\x18\xfd#\x05i\x00\x8f\xfb\xb3\x03\xf9\xfbi\x05L\xfd\x01\x02\x95\x00*\xf9\xb4\x02u\x01(\xff\xd8\xfe\x9d\xfd\xfc\xffq\x02D\xfc\x0c\x00E\x01\x1a\x03e\xfbA\xffA\x02g\xfd\x92\x03\x8f\x00\x81\xfd\xae\x00\xdf\xfe=\xfec\x02\x81\x00\x95\xffr\xfc\xc7\xfe\xa4\x02t\xff\xde\xfd\x90\xfe\x0e\x01\x1c\xfdv\xfe\x9a\x04,\xfe\xdc\x00\x9e\xfb\xeb\x01\x83\xfe\x16\xff\\\x04\x88\xfb\xdc\x03\x1c\x00\x08\x00X\xfdf\x01\x1e\x00P\xfe\xef\xfeE\x05\xfd\xff@\xfe\t\x01\x96\xfc\xe9\x00\x1b\xfe~\x01g\xff-\xfe\xc1\x01\x08\x02\xde\xfc\x17\x03\xbf\xfe\x07\x00\x03\x01\xae\xfd\x1c\x02t\xfe]\x02\x19\x02\xd6\xff\xaa\xff\xb2\xfe\xf6\xff$\x01\x80\xff\x9b\xffS\xfc\x1c\xff\xad\x00\xa5\xfdL\x02\xb6\xffb\xfdZ\x02\x94\xfe\xc5\xfd\x7f\xfd\xaa\xf7\x9c\x00\xac\x10\x02\x03\x97\x00#\xfe\xf9\xfb\xa6\xffy\x02\x1f\x03N\xff\xd0\x06Y\xfd\x8e\x01\xda\x0bn\x047\xf5&\xf7\xff\xfa1\xfd\x8b\x03}\x00\xda\xfeR\xffC\xfc\t\xf8i\xfc\x7f\x02\xe5\xfb$\xfe\xb6\xffL\x03u\x03\xeb\xfe\xa4\x05\x9e\xfdP\x00\xae\x03 \x02\xd0\x03\xbb\x07\x14\x02\xa9\xff\xc4\x07a\x01\x84\xfb?\x02\x10\x06\x11\x00\xa8\xfd\xaa\x002\xfcM\xfc\xf3\x02\x08\xfds\xfb\xc8\xfa\xed\xfc\xb8\xfb\xf2\x03\x04\x00b\xf9\xf3\xfe\xf7\xf9\x8c\x00`\x00n\x00\x85\x01\xfb\x02\xf2\xfc0\xfeU\xfe\xca\xff\xc1\x04z\xfe\x1f\x01\x9a\xfd\xa5\xfdZ\xffB\x01\r\x01X\xfc\xa0\xff\x10\x00)\xfdU\x01\x8d\x00\xae\xff\xb3\x00\xec\xfe\xf0\xfe\xc7\x00$\x04"\x01\x16\x01\xfb\xffA\x01?\x03s\x02A\x01C\x01\xcc\x01\x89\x01]\xff\x15\x01\xe1\x03%\x01\x1e\xff|\xff\x88\x00\x14\xff\x1e\x01\x9c\x01\xea\xff\xbc\x00\xc2\x00\xdb\xfe\x90\xff\xba\x01\x1a\x01\xe9\x00\x8e\xff\x9b\x00\xea\x01~\x01\xb9\x00\x82\xff\xe1\xffk\x00\xd1\x00\x93\x01\xcc\x01\x9e\x01Z\xff\\\x00v\x00i\x01\x9e\x00\xdb\xff \x01:\x00\xbd\x01w\x01\r\x00\xd4\xfe=\xfe\xeb\xfe \xfe\x14\xfe\x96\xff\xd4\xffS\x00I\xff\xac\xfb\x9b\xfb\x90\xfc\x1d\xfe<\xfd\xd0\xfe\xb9\x00\x11\x00,\xff\x03\xfdu\xfcr\xfb=\xfd\xa5\xfdg\x001\x03m\xfd\xe4\xfb\xf2\xff[\xff\x03\xf9]\xf9\xb4\xfa\x1d\xfd6\xfe[\xfc\x0e\xfe\xcb\xfc\xa7\xf9\xcc\xf6}\xf9\xff\xfc\xa6\xf9\xa0\xf9\xf6\xfbu\xfe\xfd\x00\x89\xfe*\xfbY\xf8\xb0\xfa\xdd\xfd\xb5\xff\xf2\xffl\x00.\xffp\xfc\xbc\xf9\xa0\xf9\x8b\x00\xfa\x07\xfa\x15<\x17\x96\x10\xd9\x0c\x9f\x0f \x18\xc4\x17\xa7\x17\xd7\x1c\xcf!\xef \x92\x1e\xc4\x1f\x19\x1e\x92\x13\\\ti\x07$\x0c\x03\x0c\xdd\x07\x86\x03\xb1\xfd\xc0\xf7\xa3\xf0\x9c\xec\xb4\xeb\xe4\xe8~\xe6 \xe5\n\xe7)\xea\x82\xe9\xac\xe61\xe5\xc9\xe6\xc2\xe8Q\xec:\xf3\xae\xfa~\xfd\xf3\xfb\x8d\xfc*\x00\xf4\x02\xf8\x03]\x04\x05\x08v\n+\x0bd\x0b\xdc\x0b\x0e\x0bj\x04e\xff=\xfd\xac\xfd\xe5\xfe\x83\xfdx\xfbI\xf9\x86\xf3\xf4\xee7\xee\xee\xeeM\xf1\xa4\xf0{\xf0\x86\xf2\xc9\xf4`\xf6q\xf7u\xfa\xc1\xfcK\xff\x04\x023\x06\xd9\x0b\x14\x0e \x0f \x0f\x06\x10\xe3\x11\xfe\x12\x19\x13\xcd\x12\xb9\x12\x92\x10\xa8\x0eG\r\x83\x0b\x90\x08j\x04\x14\x01\x00\x00\xe2\xfe\xa6\xfcg\xfa\xf5\xf7\x92\xf4\x99\xf1\xc2\xf0w\xf08\xf1=\xf2m\xf1\xee\xef\xa2\xef\x82\xef\x81\xf0]\xf2\xfe\xf4\xdb\xf6\t\xf7\x8a\xf8\xab\xf9\xc8\xf9\xae\xfa\x04\xfb\xaf\xfb \xfd\xb1\xfd\xb2\xfe\x88\xfeb\xfe4\xfc9\xf9D\xf8d\xf7\xcc\xf6\x17\xf7\xed\xf8\xf1\xf7\x1b\xf6i\xf5\xc0\xf5\xda\xf4\xd4\xf1\xe7\xf7\xa0\x0c+$"-\x15%\x15\x1b\xe1\x1a\xc4\x1fM%F1DDWN\xa5E\xce5%-_*\xaa \xa4\x14&\x14o\x1a\'\x1bh\x10\xf4\x05O\xfd\x94\xeeI\xdc\x18\xd1\x04\xd4`\xdcy\xe1\x05\xe0i\xdc\x15\xd9b\xd2P\xccn\xcd\x9e\xd7Z\xe4\x04\xed\xa1\xf5\x8f\xfe>\x03\n\xffO\xf9_\xfc\xf8\x05\xf7\x0e\x82\x14#\x19\xac\x1cT\x1a\x8b\x11\x14\n\x0c\x07\xe8\x05\xd8\x02\xc4\xff\x18\x00\xc6\x00\xdc\xfb{\xf2\x19\xea{\xe4\x8f\xdf)\xddJ\xe0\x16\xe8@\xed\xa7\xeb\x99\xe8E\xe6x\xe6\xd9\xe7\xd0\xed8\xf83\x02\xb8\x07$\t9\n\xeb\n\x9c\x0b7\r\x02\x11>\x18\x91\x1d\xfc\x1f\xe5!\xd0 \xdd\x1a\xd3\x11\xed\x0e\x9e\x16\xfd\x1e\xf7\x1e\xad\x19\x9d\x12>\nz\x00\x8c\xfc\x85\x00+\x05d\x03\xfa\xfc\xc4\xf7\x9d\xf2\\\xec\x08\xe9W\xea\xaf\xec\xa7\xec\x16\xebF\xec\xdd\xedx\xed\xea\xeaW\xe8\xbb\xe7\x0c\xea^\xef\x13\xf5\xf0\xf8\xa1\xf8s\xf5-\xf3V\xf2f\xf5\xb5\xfb0\x00\xdd\x02\xd0\x02\x12\x01\xcd\xfdg\xfb\x96\xfb\x93\xfer\x01\r\x02\xea\x02\x9b\x01)\x00\xd3\xfeq\xfe@\xfe\xe0\xfc\x98\xfc\xf0\xfc\x05\x00)\x05\x9d\x0c\xb0\x11\xa6\x10A\x0e}\x0c\xcd\r|\x12\xa2\x1al&\x7f-;,P&4!\x94\x1e~\x1dA \xb8%\xe1(\xe8$L\x1c\xb7\x14\xab\r9\x06S\x00\x9f\xfe\xa3\xff\x98\xfd\x82\xf8\xaf\xf3$\xee|\xe6p\xde"\xdc\x13\xe0\xa7\xe4N\xe6o\xe6:\xe6\x00\xe3\xbb\xde&\xde{\xe4\xff\xec\x97\xf2\xb8\xf5K\xf7\xc9\xf6\xd6\xf4\x80\xf4*\xf8\x9d\xfd\xca\x014\x04I\x05$\x04Z\x01\xca\xfeu\xfe\xce\xff\x82\x01\xfe\x02W\x03\xf0\x01]\xff\xd0\xfc7\xfb\xdf\xf9D\xfab\xfb\xc8\xfc\x0c\xfd\x85\xfc\xef\xfb)\xfb\xcc\xfaB\xfb\x94\xfdM\x00w\x02\n\x03\x02\x03g\x03X\x04\x9a\x06r\tK\x0c,\r\'\x0c,\nP\t.\x0b\x84\r\xc1\x0fx\x10<\x0e\xa3\n\xea\x05\x16\x04\x87\x04\xd0\x04{\x05\xd8\x04\xe6\x02\xb3\xfdf\xf9\xd3\xf7\x8f\xf7c\xf7\x15\xf82\xfau\xf9\xc3\xf5E\xf3\x9f\xf3\xa1\xf4\xcb\xf4\x03\xf60\xf8\x90\xf8\x19\xf7-\xf6\x88\xf6.\xf7\x85\xf7\x04\xf9\x02\xfb~\xfb\xb4\xfa\x02\xfat\xfa\xff\xfa\xb0\xfb\xee\xfc\x83\xfe\x08\xff\xd5\xfe\xba\xfe#\xffQ\x007\x01+\x02\xe3\x02]\x03t\x04U\x05D\x06\x92\x07\x86\x08\xcd\x08\xd6\x07\xbf\x07c\t\xb9\np\x0bU\x0b\xdc\nn\n&\tK\x086\x08\xba\x08\xa0\x08\x05\x08\x04\x07\xce\x05\x1f\x05?\x04X\x04\xdb\x04i\x05\x84\x05\xaa\x04(\x04\x13\x04P\x04\xad\x04\xb7\x05\xb1\x06\xb5\x06\xf2\x05\xe0\x04\xc8\x04\xdd\x04\xa6\x04\xd6\x04\xab\x04\xd0\x03]\x02\xc5\x00\xf4\xff\x08\xff\xd6\xfd\x96\xfc\x85\xfbe\xfa\xb8\xf8\xfb\xf6\xb7\xf5;\xf5\xe0\xf4w\xf4!\xf4\xf8\xf3\x14\xf4K\xf4\x9a\xf4i\xf5\x99\xf6\xb5\xf7\xc3\xf8\xe9\xf9m\xfb\xc7\xfc\xb9\xfd\xbd\xfe\x14\x00X\x01!\x02\x98\x02\x17\x03\x9a\x03\xe6\x03\xf8\x03$\x04D\x04\xe8\x03U\x03\xed\x02m\x02\xcf\x01L\x01\x16\x01\x8c\x00\xbc\xff1\xff\xdd\xfe?\xfe\x98\xfd~\xfd\xa1\xfd{\xfdG\xfdO\xfdS\xfd8\xfdC\xfd\xaf\xfd!\xfem\xfe\x94\xfe\xa1\xfe\xb2\xfe\xce\xfe\x04\xff.\xffB\xff7\xff\x06\xff\xd1\xfe\xc4\xfe\xb5\xfe\xa1\xfee\xfe\x19\xfe\xbc\xfdw\xfd\x91\xfd\xfd\xfd\x1f\xfe\xee\xfd\xe2\xfd\xf4\xfd\xee\xfd\x18\xfe\x9e\xfe+\xffK\xff%\xffS\xff\xa6\xff\xe8\xff+\x00\x87\x00\xca\x00\xda\x00\xfd\x00O\x01\xb6\x01\xee\x01\xe7\x01\xf3\x01!\x02K\x02[\x02i\x02}\x02q\x02I\x02\x0b\x02\x04\x02\xfd\x01\xe2\x01\xca\x01\x98\x01|\x01l\x01T\x01S\x01n\x01\xa4\x01\xa0\x01\x98\x01\x96\x01\xc5\x01\xf9\x01*\x02e\x02\x83\x02\x80\x02G\x021\x020\x02(\x02\x05\x02\xbc\x01~\x01@\x01\xed\x00\x91\x00B\x00\xe3\xffv\xff\xe5\xfem\xfe1\xfe\x13\xfe\xe3\xfd\x8c\xfd&\xfd\xc8\xfc\xa9\xfc\xbe\xfc\xfc\xfc=\xfdr\xfd\x90\xfd\x97\xfd\xaa\xfd\xf7\xfd\x82\xfe*\xff\xc3\xff:\x00\x99\x00\xe6\x004\x01\x98\x01\x18\x02\x8e\x02\xe7\x02.\x03n\x03\x87\x03z\x03B\x03\x10\x03\xd7\x02\xb7\x02\x8c\x02T\x02%\x02\xdb\x01Y\x01\xa4\x00\xfe\xff\xa3\xff\x80\xffn\xffF\xff\xe3\xfe_\xfe\xe1\xfd\x93\xfdw\xfd\x83\xfd\x8b\xfd{\xfd\x82\xfd\x8d\xfd\x9d\xfd\xaa\xfd\xb8\xfd\xd1\xfd\xfb\xfd.\xfe\x94\xfe\x17\xffa\xffh\xffA\xff$\xffX\xff\xbc\xff\x1c\x006\x00\t\x00\xc0\xff\x9c\xffv\xff`\xffY\xff4\xff\xfb\xfe\xcb\xfe\x98\xfeg\xfeQ\xfeF\xfe\x03\xfe\xaf\xfd\xc4\xfd\x15\xfe[\xfe\x83\xfe\x8c\xfe\xab\xfe\xae\xfe\xb7\xfe\x11\xff\xaf\xffG\x00\xad\x00\xd3\x00\xf3\x00C\x01\x89\x01\xce\x01A\x02\x90\x02\xa9\x02\xa4\x02\xc6\x02\xf6\x02\n\x03\x00\x03\xd8\x02\xc2\x02\xc3\x02\xc1\x02\xc7\x02\xbf\x02\x88\x02K\x02(\x02\x01\x02\x05\x02\xed\x01\xa9\x01h\x018\x01\x1c\x01\xf4\x00\x90\x00>\x00\xf5\xff\x93\xff*\xff\xf4\xfe\xeb\xfe\xc7\xfe\x89\xfe&\xfe\xed\xfd\xc0\xfd\x8c\xfd\x83\xfd\x9b\xfd\xcc\xfd\xdb\xfd\xbb\xfd\xd1\xfd\x0f\xfe+\xfe7\xfeJ\xfe\x92\xfe\xf3\xfeL\xff\x90\xff\xe5\xff\x15\x00/\x00U\x00\xa2\x00\x13\x01w\x01\xc2\x01\xe8\x01\xf8\x01\x03\x02\x16\x02:\x02V\x02g\x02d\x02L\x02\'\x02\x00\x02\xd5\x01\xaf\x01w\x012\x01\x01\x01\xe9\x00\xcc\x00\xa9\x00}\x00L\x00\x12\x00\xd8\xff\xae\xff\xaa\xff\xa8\xff\x9e\xff\x7f\xffF\xff\x19\xff\x0f\xff\t\xff\xf2\xfe\xd3\xfe\xb5\xfe\x89\xfeV\xfe2\xfe:\xfe6\xfe\x11\xfe\xec\xfd\xe4\xfd\xdf\xfd\xc7\xfd\xbf\xfd\xda\xfd\xf3\xfd\xfb\xfd\x08\xfe;\xfeu\xfev\xfet\xfe\x92\xfe\xc4\xfe\xe8\xfe\x04\xff,\xffL\xffq\xffw\xffu\xff\x8b\xff\xaf\xff\xd4\xff\xfd\xff\x1e\x00O\x00w\x00\x92\x00\xaa\x00\xdc\x00\xf6\x00\xf0\x00\x00\x01F\x01\x92\x01\xba\x01\xd8\x01\xfc\x01\x0e\x02\x04\x02\x0e\x02F\x02o\x02j\x02Q\x02Q\x02\\\x02Q\x02=\x02"\x02\xef\x01\xa8\x01k\x01K\x01/\x01\x04\x01\xc7\x00\x81\x00/\x00\xe7\xff\xac\xff|\xffF\xff\x10\xff\xd3\xfe\x97\xfeX\xfe!\xfe\xed\xfd\xbd\xfd\x89\xfdT\xfd?\xfd/\xfd\x1f\xfd\x19\xfd\x03\xfd\xef\xfc\xe0\xfc\xe2\xfc\xf6\xfc!\xfd?\xfdb\xfd\x86\xfd\x99\xfd\xbb\xfd\xf1\xfd.\xfet\xfe\xb0\xfe\xfa\xfeA\xff\x85\xff\xc5\xff\x15\x00h\x00\xbd\x00\x1b\x01\x87\x01\xfb\x01^\x02\xb5\x02\x01\x03K\x03\x99\x03\xe5\x031\x04r\x04\x97\x04\xa1\x04\x97\x04\x84\x04{\x04`\x046\x04\xfb\x03\xa9\x03L\x03\xe7\x02v\x02\x05\x02\x93\x01\x1d\x01\xa0\x00!\x00\xa6\xff)\xff\xb0\xfe6\xfe\xc0\xfdY\xfd\x05\xfd\xca\xfc\x8f\xfcN\xfc\x13\xfc\xf1\xfb\xe0\xfb\xe4\xfb\xfd\xfb"\xfcA\xfcV\xfcw\xfc\xbe\xfc\x16\xfdk\xfd\xbb\xfd\x03\xfeJ\xfe\x90\xfe\xde\xfe;\xff\xa0\xff\x01\x00Q\x00\x94\x00\xcf\x00\r\x01A\x01c\x01\x87\x01\xb6\x01\xda\x01\xf7\x01\x0c\x02\x13\x02\x06\x02\xe6\x01\xc7\x01\xb8\x01\xbb\x01\xc7\x01\xc8\x01\xcb\x01\xb4\x01\x94\x01~\x01\x82\x01\x8f\x01\x90\x01\x8f\x01\x8b\x01\x86\x01\x85\x01\x91\x01\x9c\x01\x9f\x01\x92\x01z\x01q\x01g\x01_\x01Y\x01S\x015\x01\xfc\x00\xd4\x00\xab\x00|\x00H\x00\x14\x00\xe5\xff\xab\xff\x85\xffV\xff\x17\xff\xcf\xfe\x86\xfe@\xfe\x04\xfe\xde\xfd\xc4\xfd\x9c\xfdb\xfd"\xfd\xed\xfc\xc6\xfc\xb2\xfc\xb1\xfc\xc0\xfc\xc8\xfc\xd0\xfc\xe7\xfc\x04\xfd!\xfdG\xfd|\xfd\xcb\xfd\x1e\xfex\xfe\xdc\xfe,\xffn\xff\xb4\xff\t\x00q\x00\xd4\x00<\x01\x8a\x01\xc6\x01\xf4\x01\x1e\x02R\x02\x82\x02\xae\x02\xd0\x02\xe2\x02\xf1\x02\xf5\x02\xf9\x02\xed\x02\xd3\x02\xab\x02\x8a\x02v\x02m\x02S\x02&\x02\xee\x01\xb0\x01y\x01=\x01\x17\x01\xed\x00\xb5\x00p\x00*\x00\xf4\xff\xb9\xff\x7f\xffA\xff\t\xff\xcb\xfe\x8c\xfeW\xfe$\xfe\xf4\xfd\xc3\xfd\x9a\xfd\x81\xfdg\xfdI\xfd3\xfd\'\xfd\x1d\xfd!\xfd0\xfdN\xfdd\xfdq\xfd\x85\xfd\xa2\xfd\xc8\xfd\xf2\xfd\x1e\xfeS\xfe\x88\xfe\xb8\xfe\xe0\xfe\r\xff8\xffm\xff\x9f\xff\xd8\xff\x18\x00P\x00\x7f\x00\xa7\x00\xd2\x00\xfb\x00\x18\x014\x01^\x01\x90\x01\xbd\x01\xd3\x01\xd8\x01\xdd\x01\xe0\x01\xe8\x01\xfc\x01\x15\x02\x1d\x02\x0c\x02\x04\x02\xff\x01\xfb\x01\xe4\x01\xd2\x01\xcf\x01\xbb\x01\xb2\x01\xa5\x01\x90\x01q\x01A\x01\x18\x01\x03\x01\xf4\x00\xde\x00\xc6\x00\x9a\x00^\x00 \x00\xe6\xff\xc4\xff\xa4\xff\x89\xffa\xff.\xff\xfd\xfe\xbb\xfev\xfe;\xfe\x05\xfe\xe3\xfd\xc5\xfd\xaa\xfd\x94\xfds\xfdM\xfd"\xfd\x11\xfd!\xfd;\xfdY\xfdn\xfd\x84\xfd\x9b\xfd\xbc\xfd\xea\xfd*\xfeu\xfe\xc2\xfe\x12\xffY\xff\x9b\xff\xe2\xff\x1e\x00h\x00\xc0\x00\x0b\x01R\x01\x92\x01\xc8\x01\xe5\x01\x07\x02!\x02D\x02\x81\x02\x94\x02\xa3\x02\xa7\x02\x92\x02y\x02l\x02V\x02F\x02:\x02,\x02\x11\x02\xe2\x01\xb2\x01~\x01S\x015\x01\x1b\x01\xf0\x00\xca\x00\x94\x00a\x00;\x00\x0e\x00\xf3\xff\xd0\xff\xaa\xff\x8a\xff]\xff1\xff\x03\xff\xe0\xfe\xc5\xfe\xae\xfe\x9a\xfe\x7f\xfee\xfeF\xfe9\xfe,\xfe\x1c\xfe \xfe$\xfe.\xfe:\xfe;\xfeC\xfeR\xfef\xfe{\xfe\x98\xfe\xbd\xfe\xdb\xfe\x01\xff \xff0\xffM\xff\x88\xff\xc6\xff\xdf\xff\xed\xff\x1e\x00W\x00\x84\x00\xad\x00\xce\x00\x00\x01(\x01=\x01M\x01R\x01Y\x01v\x01r\x01r\x01\x86\x01\x99\x01\x9f\x01\x98\x01\xb2\x01\xd8\x01\xe0\x01\xda\x01\xce\x01\x95\x01G\x01.\x01K\x01l\x01\xad\x01\xe3\x01\x96\x01\xdf\x00\xfd\xff\xd9\xff\x03\x00:\x00\x82\x00\x7f\x002\x00\x8b\xff\xdc\xfe\xa1\xfe\xc4\xfe\xe2\xfe\x0c\xff\xf1\xfe\xb6\xfen\xfe#\xfeL\xfe\x88\xfeK\xfeQ\xfe\xdf\xfe\xc2\xfe\xb0\xfe;\xfe\xcb\xfd\x04\xfe\xa8\xfd\xae\xfd\xfd\xfd\x19\xfe\x15\xfe\x8f\xfeJ\xff\x81\xff\xab\xff\xdf\xff7\x00\xe5\xff\x9e\xff\xfd\xfe\x16\xfe\xd4\xfc\xfc\x00\xd9\x0e\xbc\x14P\x05\xc9\xf5\x93\xf8\xdd\xfa\xe7\xf8\x7f\xfe\xaf\t6\t\xbf\x00#\xfd\x86\xfa\xec\xf6S\xf5\xba\xfd$\x064\tY\x08\x98\x02\x1d\xfe\x9d\xfc\xe2\xfb\xcf\x00\xea\x05\xda\x08\xc4\x04n\x01\xf0\xff8\xfe\x13\xff\x00\x00L\x03\xc7\x02\x9a\x00$\xfd\x9b\xfc\xf8\xfc\x95\xfd:\xfe\x97\x00\x86\x00\x7f\xfeJ\xfb4\xfc\x1f\xfb\x0b\xfb\xf8\xfe\xde\xff5\x02\xf2\xfe\x03\xfe\x8b\xfa\x04\xfd\xa0\xff\x81\x05\x8e\x04d\x03f\x02\xaa\xfb\xe4\xfcT\xff`\x07\x16\x07\xf8\x05f\x03\xf3\xfe9\xfc\xe4\xfb\x92\xffK\x02@\x02*\x002\xfc\xf3\xfa\xc3\xf9\xa8\xfb\xf9\xfe\xb0\x00\xfd\xff_\xfe\xf2\xfba\xff\x98\xfb\xd3\xf4%\x06)\x19E\x17H\x06-\xffH\xfb\x0c\xf7\xd7\xfc\xf4\x0bV\x14O\r\xbb\x02\x1a\xf8\x11\xf1\xe0\xf0\xab\xfb\x01\x03\x1e\x03\xcb\x01x\xff0\xf9\x8e\xf3>\xf5+\xfb\x88\xfes\x02\x95\x08\xdf\x02\x15\xf8\xa8\xf7\x9a\xfb\xd4\xfeQ\x04D\x0b\x01\t\x11\xfe\x8a\xf9\xfc\xfaI\x03\xaf\x06a\tk\t\xda\x00k\xfa\xb9\xf8\x9d\xfb\xcd\xfd\\\x02Z\x08\xb2\x04\xa0\xfas\xf59\xf4;\xf9?\xfd\xc8\x01\x9d\x03)\x00U\xfd\xb6\xfbx\xfc\x05\xfe\xae\x03\x1b\x04F\x04\x84\x03V\xffi\xfe\xe3\xff[\x04\xf2\x02\xb0\x01\xbe\x04\x83\x05\x18\x01Y\xfe@\xfe\xd7\xfe5\x02\x8d\x05\xd5\x06\x18\x02\xd9\x00\x05\xfe\xf1\xfe\xd1\x03g\x03g\x02\xa2\xfdL\xfc3\xfb/\xff\xe0\x00\xa1\x03\xec\x02\x18\xfa6\xfb&\xfe\xed\xff\x07\x02\x0f\x03 \x01\xb7\x00\xdb\x01\x07\xffy\xfbw\xff\xe1\x01_\x00\xcf\xffx\x02\xcb\x02!\xfbm\xf9h\xfem\xfer\xfc\xee\xfa\x9b\xfdY\x01\xf1\xfd\xea\xfd\xd2\xfd\xd9\xfb\xaf\xfc\x1f\xffA\xff\xeb\xff\xbf\x03a\x03\x9e\x02\x86\x02I\x01!\x01\xee\xff\xba\xff\xb9\x02#\x03v\x01\xd1\x02\xa0\x07\x1c\x04\xdd\xfd\x1c\xfe\x04\xff\xa1\x02\xbd\x03$\x02\xd6\x03\xaa\x00\x08\xff\xdc\xfe=\x009\x00s\xfe\xeb\xfep\xfec\xff<\xff\xa9\xff\x99\xfc\x80\xf9J\xfa\xab\xfd{\xfe\xce\xfew\x01\xc6\x00h\xfe\x07\xfa\x07\xfa\x1f\xff\xc6\x03\x0f\x06-\x016\xfe\x1c\x00\xe1\x02\x14\x02\xb9\xff"\xfe\xb0\x07D\x05\xb2\xf7\xc9\xfa\xe4\x06\xd9\x07y\xfb7\xfe\xa0\x00&\xfa{\xfb\xfb\x00\xf8\x00\x84\xfco\x01E\x06\xfe\x02\xe2\x00\x12\x00\xe0\xfd\x8c\xfe\xbb\x07\x9f\x0bm\x01\xc8\xfcl\xff \xfb\x9f\xf68\xf9V\x03\x89\x06\xe5\x02\x03\xfd\xa8\xf7,\xf7\xf0\xf8j\xfe\xd4\x05m\nd\x06\xfc\x00s\x01\xb4\xfe\x03\xfb6\x03\xfc\n\xd8\x0eA\n\xc1\x04\xa7\xfa\xed\xf2\xb8\xf8"\x02\xa9\x06m\xfc\xb2\xfe\\\xf9\x10\xf4\x0e\xf7\xce\xf7\xbd\xf9G\xf7\xe4\x00Q\x07\x19\x01\xaa\xfe\xb3\xfea\xfe\xbc\xfa\xc9\xfcj\x08\xa6\x0b`\x07\xf5\x03\x04\x04[\xfde\xfb\xa0\xfe\x0e\x003\x044\x08\xd2\x08\xd5\x00\x9d\xfd^\xfa\xbf\xfb\xbb\xfd\xdd\xfbo\x00\xb1\xfd\xd3\xffS\x01\x89\xf8v\xf6\x07\xf7k\xfbe\x00\xd9\x02W\x08P\x03\xd2\xfa\xf5\xfdh\x00\xc4\x00\xe9\x05\xb5\x12\x93\x0e\xbd\xfc:\xfc\xda\x02\xd3\x06r\x03\xbf\x06-\t}\xff\x01\xfc\x9a\xfe\xfc\x00&\xfb\xf6\xfb\xe2\xff\x17\xfdM\xfa\x1e\xf8v\xfc\xa1\xfe\xe0\xff~\xfdd\xf8\r\xfb\xe8\xff\xd9\x03j\x01u\xfd\x1a\x01f\x055\x03 \x03\xa6\x03I\x04^\xfe\xe0\xfd\xd9\x03\xde\x03\xd6\x01W\x03V\x03s\xfd\x12\xf5\xef\xf8\xc6\x00\xec\xffT\xfd\x98\xfcK\x00W\xfaE\xfaY\x00\xc8\xfeg\xfc\xda\xffp\x04\xd1\xfd)\xfc\xc5\x03\xe2\x04G\x02\x98\x03\xa3\x03\x1d\x00&\xff{\x04\xb9\x04\x1a\x00h\x00\x8b\x03\xe6\x04d\x02\x8a\xfd\n\xfd\xe9\xfc\xac\xfd\x14\xfft\xfeC\x00P\xff\'\xfd%\xf9;\xfc\x92\x00A\xfd\xd3\xfc\xe3\x01\xdb\x05\x00\x01R\xffU\xfe\xdd\x02\xd8\t\xb3\x05\xaa\x02\x00\x01j\xfe\xb3\x01\x91\x04\x8a\x07(\t\x8c\x02\xfc\xfb/\xfbW\xfc\xc6\xfbY\xfc\xeb\xfe\xfa\xfe\x93\xff\xb4\xfe\xa9\xf9\xcd\xf8u\xfb\xa9\xfba\xfaC\xfd\x9b\x01\xb0\x00\x06\xfc\xd0\xfb\x1d\x01/\xff\x11\xfc \xfd\xe8\x02\xc0\x02\xb6\x01\xfa\x04\xed\x02\xb9\xfe\x17\xfeg\x06\x12\x08\xda\x05>\x05J\x03~\xff\xa1\xff\xba\x03=\x01\xed\xfd\xa9\xfe2\x02\x0f\x00s\xfa\x13\xfb\xd1\xfci\xff\x8b\xfb\xea\xfb<\xff2\x00\t\x08<\x00l\xfb\x14\xfeJ\x00\xd2\x02\xb5\x05\x9f\nh\x04%\xfeD\xfa\x9c\xf8\x84\xfc;\x04\x8e\x07\x9c\x06&\x05\xe1\xfe\xfe\xf6\x0b\xf6\xed\xfcY\x01\xd4\x00\x12\x01\xcf\x03}\xff;\xf9\x11\xfa\xfa\xfb\x0e\xfc[\xfag\xfc\xc3\x01\xc3\x05&\x04[\xfeO\xfb\x12\xfd\xdb\x01\xc6\x04\xab\x06\n\x049\x01+\x02\xb3\x02\r\x05\x05\x07\x19\x05\x94\xfd\xa5\xf8a\xfb:\xfe\xd9\xfd\xa9\xfdi\xfe\x84\xfd\x08\xfa\x7f\xfc\xdd\xfe\t\xfe\xdb\xfd\xfd\xfdx\x01:\x03m\x05\xf9\x01#\xff\x8d\x00x\x02\x18\x05\xd2\x04\x89\x06\xe6\x04\x86\x00W\xfd\xf5\xfc\xaa\x01\x13\x05\xea\x07^\x06H\x01\xdf\xfb\x97\xfb\x87\xfd\xff\xf9\xca\xfay\xfeE\x02\xa7\x00\x17\xfd.\xfd\x05\xfai\xf6\x07\xf6\xba\xfc\\\x042\x05\x87\x02\xe2\xfd\xb6\xf9\xb3\xfbH\xff\xe2\x03\xe0\x04\xaa\x03\xe2\x01\xb2\x00\xd4\x02U\x01\xd9\x00\n\x00"\xff-\x01\xe0\x03\xab\x02\xc7\xfe\xbd\xfd\xc5\xfb\xa7\xfa\x91\xfd\x16\x00\x93\x00+\x00m\x00u\xff\x80\xfd\xa2\xfb\x9b\xf9\xc2\xfc?\x00q\x03\xdf\x04m\x04\x06\x03\x82\xfe\xe4\xf9L\xfb;\x01\x8d\x03\xb3\x02C\x02\xd9\x02O\x02E\xff\xf1\xfd\xc0\xfe\xe8\xff\x7f\x00f\xfe\xe8\xfff\x02\xfe\x00m\xff\x00\xff\xf0\xfd(\xfd>\xfa"\xfa\x02\xfc\x05\xfc\xf8\xff\xc0\x01"\x02\x83\x01\x19\xff;\xfd\x0f\xfbQ\xfb\x1a\x02\xda\x06\xdc\x08\xa4\x08\xdb\x05*\x02\x86\xfc\xe1\xfaX\xff\xfd\x04?\x07\x9a\t\x0c\x06\x1e\x01\xb9\xff|\xfeL\xff;\x00\xf8\x04T\x08\x19\t\xf4\x08&\x06:\x03\xa8\x00\x17\x00^\x04\xd7\x08\xf0\n\xa9\tI\x08\xfa\x04R\x00\xee\xfe9\x00\x9d\x04\x8a\x04\xa6\x04c\x05`\x03\x0b\x02\xc0\xff}\xfd\xf3\xfcy\xfe\xe5\xfe\xa1\xfeJ\xff`\xff\xf9\xfc\xf2\xf9\xa8\xf8O\xf6l\xf4\xda\xf6\xbc\xf9\xd5\xfaP\xfb\x80\xf9*\xf7\x14\xf6E\xf7\x0b\xf8\x95\xf7\xc3\xf8\xf2\xf9\xb1\xfa\x14\xf9\x89\xf8Y\xf9\xc3\xf8\x9d\xf9\x8e\xf9\x98\xf8\xb4\xf7\xf5\xf8\x11\xf9i\xf9\xa6\xfb\xa3\xfaZ\xf9\x1d\xf8\x81\xf6\x04\xf5\xe4\xf2\xe3\xf2\xf6\xf32\xf5\xd7\xf7\x1a\xf9U\xf8]\xf9\xf4\xf9\x94\xf78\xf8\xdc\xfc\x8b\x03,\x0b\x15\x0b\x8f\x08K\x07\xf7\x02\x11\x04\xb9\x06\xb4\x08J\r*\x0c\x92\t\xee\x07j\x08\x82\t\xac\x07\xc0\x07N\x05\'\x04\xbd\x03\xa5\x03\x8f\x07\xf9\x04J\x04\t\x07\xb5\x01\x81\xfb1\x00\x92\x15\xd7)\xd5,k$Q\x1bc\x18\x8a\x17\x81\x1a5&01\xa12k)\xe3\x1fx\x18\x91\x11=\x07{\xfe\x96\xfb\x14\xfd\x1b\x00\x06\x01`\x00y\xf4M\xed\xe3\xe3\x12\xd8\xbc\xdaI\xe6\xe2\xf0-\xf2r\xf1\x99\xee\x18\xedc\xe9\x0c\xe8\xd8\xef\x84\xf5A\xfaL\xfd_\xfdM\x04=\x07\xb3\x01Z\xff\xb0\xf9\x01\xf6\x1d\xfaL\xff)\xfe\xdf\xfcN\xfav\xf2\x1b\xe9\x95\xe4\xd9\xe8\xb9\xeb\xbc\xec\xba\xed\xcb\xea\xf0\xe7\xde\xe6k\xe7\xd9\xe8;\xec\xab\xf17\xf5\xf8\xf7\xe0\xf9\x1a\xfb!\xfa\xe4\xf8\x9f\xf9\xec\xfb\xcf\xfeN\x00\xd1\x00G\x01\x11\xff8\xf9\t\xf5\xa5\xf3\xc1\xf3\xaf\xf7\xa2\xfb\xb5\xfc\xf9\xfc\x82\xfb\xcc\xf8\xec\xf5\xa3\xf5\xfa\xfa\xf5\x02U\x05\xcd\x03\x19\x07\xae\r\x06\x13\x1e\x11R\n\xda\x07\xa3\x069\x0bs\x105\x15\xd7\x1a\x14\x16\x90\x0e\x1a\x07\xec\x01z\xfd\xd8\xfb\xeb\x0e\xb9,\x9e>\x1d=\xaa-\xa0"\xaf \xfc\x1bv\x1d\xa1,\xac;@?\x0c8\xb0*h\x1b\x88\x0e\x03\xfd\xf0\xf1\x16\xf3\x9f\xf8\x0b\x03{\x04\x99\xfd\x89\xf2\x10\xe2\x16\xd3\xd2\xcc\xda\xd0\xe4\xdf\xaa\xef/\xf4\xcc\xf5\xe2\xf6\xb3\xf1\\\xec\x88\xe9\xb7\xeb\xf3\xf3\xa1\xfb\x00\x03\xb9\nF\x13\x14\x11\xad\x04\xcf\xf9\x86\xf0\xec\xee\xa1\xf4-\xfb\xc0\xffI\x01\xee\xfd>\xf4\xca\xe9f\xe4\xb5\xe4\xc9\xe6\x13\xe8\xc5\xee~\xf4R\xf7\xbf\xf7\xe3\xf2\xea\xec^\xe8\x95\xe8\x9d\xec\x11\xf5s\xfea\x03j\x03\xb0\x01j\xfd\xde\xf8\t\xf7\xdc\xf8C\xfd\xd9\x02\xca\x06\x17\x04\x17\xfe\xe0\xf7\x8e\xef\xf5\xe9\xcb\xe9P\xec\xf8\xf0k\xf3\xc9\xf6\x80\xf7 \xf4Q\xf2\xa5\xf0n\xf4\x8c\xfb\xa4\x01 \x07~\x08L\x0c\xf3\x0e\xf6\x08@\x03\xec\x04\x81\x07\xd3\x0c\xe2\x10\x91\x10l\x11\xdb\x0b\xa5\x05\x02\x04p\xfe<\xfb\xa0\x0c\xf5.\xefK\x04T\x8dA-*\x94"\'!S\'\x9c9\xebH\xcfJ@;\x90%\xb4\x16\x95\x06\xda\xf8\xea\xed\xfb\xe5*\xe9p\xf0\xdb\xf4S\xf11\xecO\xe0Y\xd0L\xc8Y\xca\xd2\xd8\xb4\xedE\xfb\xda\x01\xce\x05\xc5\x01:\xfb\xb2\xf6\xe5\xf6y\xfa\xab\x00.\x07I\x0e\xb8\x11;\x10T\x0cq\xfeB\xf4\xca\xef\xc2\xe6\x9c\xe3\xdd\xe7K\xee\x95\xf4]\xf5\x8b\xf0\xb4\xe6\x80\xdeW\xdd\x0f\xe2i\xe9\xcd\xf2Z\xfd\xec\x01"\x02\x14\x02\xc3\x01g\x00\x14\xfdn\xfa\xad\xfc\xee\x014\x08o\x0b\t\t!\x05\xd6\xfe\x04\xf8\xb4\xf3\xf2\xf3,\xf9B\xfcW\xfc\x86\xf9\xa8\xf5\xef\xf4*\xf2b\xee<\xeb\xdf\xe9\x1b\xeb)\xed\xb7\xf3\xea\xfa\xfe\xfe\x83\x01\xfb\xfdp\xf8\x88\xf7\xed\xf8\x95\xfc\x94\x028\x057\x06\xe0\x05\xeb\x03\x90\x01\xf7\x00\x98\xfc\xfa\xf5\xa7\xf5\xcd\xf5d\xf2\xdc\xf1\xce\xfd\xef \xb8O\xe6f\x83^\xa6I\xd96F.\x913\x9b?!Pv[\xceR\xb1>\x0c*\x19\x12\x17\xfc\x94\xe5\x19\xcf\xfe\xc6\xbb\xcb\x13\xd4~\xdc\xbd\xe1\xd7\xdfZ\xd8\x9d\xcb\xf5\xc2A\xc8%\xd9\xc1\xee\x0c\x04\x13\x14\x95\x1e\xb4%\xd7\'\x02"\xba\x17\xcb\x0ca\x05\xfc\x04]\x07b\x0e`\x14\x0e\x10&\x07O\xf5\xde\xdeP\xcd\x18\xc0\xea\xbe\xf8\xc6\xd9\xd1C\xdej\xe6*\xe8\xe9\xe5\xa8\xe5\xff\xe6\x1b\xe8\\\xee\x8a\xfa\xef\x08s\x18\x05(\xe9.\x00*2!J\x16e\n\x9e\x04\xb4\x01"\x00^\x02\x01\x03\xf3\xfd\xa7\xf6&\xf1"\xeb0\xe6\xa7\xe2 \xe2I\xe7\xb9\xee\xf0\xf31\xf7\xec\xf6\x1e\xf5\xf8\xf4\xc0\xf14\xf3\x98\xf9\xf3\xfc\xf2\x02\x8e\x06\xc6\x05\xfa\x06U\x04\xed\x00\xc3\xfe\xe5\xf93\xfc9\xfe\x9e\xfe \x02\xda\xfe\xb2\xf9\x90\xf5\x10\xef\x86\xedA\xef\x88\xee\x1a\xf0\xf9\xf2\x84\xed\x07\xe9=\xf6\xa2\x15\xb1@e[\xa1W\x08K>B\xffB\xbaH\x0fL\xd8P\xa0Q\xf3Jk>\xea-\x0b\x1f`\r\x1d\xf3,\xd7\xa2\xc4\x95\xc0\xdc\xc7p\xd0\xfb\xd5D\xd9\x89\xd9\xb2\xd7\x97\xd6)\xd9\x12\xe2\xe1\xed\xfa\xfa\xd5\x08p\x19\x97)\x913\x8f3\x9a(\xcf\x1a/\rk\x03;\xfeH\xfc\xcf\x00f\x00\xc1\xfa!\xf2z\xe1a\xd1\x95\xc4\xa3\xbc;\xbe\xc7\xc7Q\xd5\xeb\xe2\xba\xee\xd4\xf5c\xf9l\xfa(\xfbp\xfe\xc3\x03\xf9\n\x0e\x15\xb2 \x06)M*\xfb#p\x18\xbe\nk\xfe\x1e\xf7\n\xf3\xba\xf0\x95\xf1\x1d\xf1r\xee\xc9\xee\xec\xedX\xec\xa4\xebF\xe9M\xe9\x85\xed\x0e\xf5\x9a\xfbu\x01W\x05I\x04\xfb\x02\xf7\xff@\xfc\xa1\xfb\x1d\xfbM\xfc\xe4\xfe\x0e\x01\xf1\x02\xf0\x01\xc4\xff\xc4\xf9X\xf4\xdc\xf0p\xedl\xf06\xf4\x85\xf5\xc9\xf8\xf5\xf5e\xf0\xa9\xefX\xebZ\xeb*\xf2\x89\xf0<\xec\xed\xf3l\x0cC6\xf5]\xf5f{ZKL\xa2?i<\x8e@\xccDrL!NDBu/\xb7\x1b\x0c\n?\xf7\x9e\xdc\xb3\xc3\xc7\xb7\x00\xba/\xc5\xd6\xd2\xba\xdd\xa7\xe3\xe1\xe2:\xdeh\xdb\x18\xe0W\xec\xaf\xfb\xc8\t\xf6\x16\xb4%\xe8116T/?!\xb9\x12\xe2\x06\xa3\xfdk\xfc\x10\x02\x9d\x04.\x02/\xf5\xde\xe3n\xd5D\xcaw\xc6\x00\xc6?\xc9\xb6\xd2 \xdd\x89\xea\xc6\xf6L\xfe\n\x02d\x00c\xfeY\xff.\x04\xc8\r\xb3\x18\x9f \xa1!Z\x1dg\x15/\nO\x00\x7f\xf8o\xf0\xef\xea\xf9\xe9,\xeb8\xee\xb4\xf2\xb1\xf4\x81\xf3k\xf1\x1b\xee\xa2\xece\xf0F\xf7a\xfe\x8d\x05\xbf\x08)\t\xdf\x07W\x05\x99\x03\x87\x00\xd7\xfdL\xfb\xb0\xfa\xc5\xfc@\xff\xd3\x01\xac\x01/\xfd\xf1\xf7\xd6\xf1\xb9\xebA\xe9Q\xe9.\xea\xc6\xec\x9a\xefH\xf1\x1d\xf2\x94\xf1\xce\xee[\xec\xa9\xeb\x93\xea\x08\xea\xd1\xec\x8f\xf8\xe4\x18\xf1C(d(n\xeccWS$I\xc8D\xdeB\x92E\xecJhM\xd1G\xe76\x11"\x96\x0e\xbd\xf5\x01\xd9\xfa\xbd\xc9\xacz\xadg\xba\xee\xca\r\xd7\x86\xdd\xda\xdd\xc6\xdbH\xd9c\xdar\xe4\xa1\xf4D\x06\x01\x17H%\xb61\xd09\xe980/?\x1f\xe8\x0f\x18\x04\xc3\xfc\xa2\xf9\xec\xf9\x17\xfd\x9a\xfb\xa5\xf4)\xe8\xe0\xd89\xcc$\xc2\xf0\xbe\x9a\xc2s\xcb\x08\xdb:\xeb#\xf8\xb2\xff\xa1\x02\xb1\x02&\x01.\x01i\x03\x0c\tM\x13\x8b\x1e\xcd$]#\x87\x1c\xc8\x12q\x07\xd0\xfd\x8e\xf5/\xefq\xedx\xef\n\xf1\xbd\xf1\xf7\xf2o\xf2\xc0\xf0\x1d\xef\x12\xec\xc0\xeb\x9f\xf0\xb8\xf6\xcd\xfeY\x05\xb7\x07\xc1\t#\x08\x12\x05G\x03\\\x01\xc1\x00d\x01P\x02\xfd\x02o\x03\xf2\x02\xbb\xff\xb3\xfa\xa6\xf6\x84\xf1\xbe\xed\x82\xec7\xebZ\xec\t\xee\xe9\xedQ\xedp\xecR\xe9\xb9\xe8\xe7\xecz\xefR\xf1p\xf0Q\xec4\xf3\x1e\x0c\xfe0\xf1V\x95iif\x9dY\xf6NxK\x9bL\x9cK\xe2H\xe8G\\Bs6l&\x0e\x12\xcf\xfd\xb3\xe9\x8b\xd2$\xbd\xa6\xb1\xcf\xb2Z\xbe\x83\xcc\x01\xd5\xdd\xd6\x92\xd6/\xd5\xe1\xd6T\xde+\xea\x1d\xf9\xec\x08\xd2\x15\x85 g*\xe40\xed2\xcf-\x9d"\xc7\x15\xcb\n\x0e\x04\x97\x01\xf1\x02\xa8\x02E\xfe\xed\xf4\xb3\xe5\x9e\xd6I\xccM\xc7\xdd\xc79\xcb\x82\xd0\x00\xd8\x98\xe1O\xebH\xf4\xb8\xfa\xf6\xfc.\xfd\x1a\xfd\x1f\xfes\x03\xf3\x0cx\x17\x1d\x1f\xce #\x1d|\x16F\x0f}\x083\x02\xba\xfc\x15\xf8p\xf4\xce\xf2\xf9\xf3\r\xf7c\xf9\'\xf9\x01\xf5(\xef\xab\xea\xf4\xe9\x1c\xee\xeb\xf4\xf7\xfa\xe2\xff\xe0\x01b\x02.\x02f\x01\x90\x01i\x01\x0c\x01p\x01*\x02\xce\x04R\x07L\x08[\x07\x92\x01\xee\xfa\x19\xf5?\xf0=\xee\x9a\xec\x8e\xeb\xb2\xec\n\xed\x0b\xecS\xeb\x9e\xea\xc5\xea\xea\xe9\xb4\xe6\xbe\xe6I\xf2\x94\x0c\xa80zP\xd1]\xf2Y\xdfN+E.B{B\x7fC\xb2G/K\x85IH>\x92)*\x13M\x00T\xf0z\xdf\r\xcd[\xbeS\xb9\xcc\xbe\xfd\xc9A\xd4\x13\xd8\xa6\xd6\x8c\xd4s\xd4`\xd8R\xe0i\xebI\xfaK\n\xc3\x178"x(\xd8+=-\x91*\xde"\x12\x18\xea\rq\x08\x90\x084\tT\x06+\xffB\xf4\x92\xe81\xddd\xd2{\xcab\xc7\xd4\xc8\x02\xcez\xd4S\xda\xf7\xe0\x17\xe9`\xf0\x89\xf5S\xf7\xd8\xf7N\xfb[\x02\xa9\x0bD\x15I\x1d\xa7"\x81$\xaa!\xfd\x1a\xca\x12\xa3\x0b\xf0\x06\xa5\x03\x85\x00,\xfd\x04\xfa\xf9\xf7\xef\xf5\x16\xf3\x03\xf0\xa4\xec\xa1\xea\x06\xea\xcd\xe9\x7f\xeb)\xefS\xf3Q\xf9\x92\xfe\xba\x01q\x04Z\x05z\x05p\x07\xc3\x07\x01\t"\x0b\xa3\x0b`\r\x9f\x0c\x1d\t\x9e\x03l\xfc\x1b\xf6\xa3\xf1\xb5\xefB\xee\xe7\xecO\xec\xab\xe9q\xe7X\xe6\xf4\xe3\xd3\xe3~\xe4<\xe5\xed\xecO\xfe\xf0\x16\x9c1^E\xaaM\xe5M\xdbH\xa9A`=4=9A\xc2G\x1fK\x10EU7o%\xa9\x12\xa6\x02\x82\xf1\xd4\xe0\x82\xd4\x97\xcd\xa9\xcc\x87\xce\xf9\xcf$\xd1\xae\xd2\xb4\xd3j\xd4\xb4\xd4\xdc\xd5%\xdbc\xe5\xbc\xf2\xb6\x00\xa2\x0c\x0f\x17\xf3\x1f\xac%!\'\t$\x0b\x1f\xc5\x1a\x9d\x17/\x15\xb6\x12\x95\x0f\xbb\x0b\xf3\x05T\xfd\xe1\xf2k\xe8\xb4\xdf}\xd9\xad\xd4\xeb\xd0\xae\xce3\xcf\xa7\xd2\xf7\xd77\xde,\xe4\x99\xe9d\xee\xe6\xf1\x17\xf5\xa3\xf9b\xffE\x07\xee\x0f\x9b\x16\xf1\x1bL\x1f\xf6\x1f)\x1f\xbd\x1b4\x16\xad\x10\r\x0b\x86\x06\xfd\x03\xaa\x01\t\xff\x10\xfc\x99\xf70\xf24\xedY\xe9\xc3\xe7:\xe8*\xea\xef\xecz\xf0=\xf4q\xf7\x8a\xfa\xde\xfd{\x01\x82\x05b\t|\x0b\\\x0c\x0b\x0c\x96\n"\t\xdd\x06Q\x04x\x01\xe4\xfd\x17\xfa\xe2\xf5a\xf2T\xef\x1f\xed\xf8\xeb\xb5\xea\xe8\xeaR\xeb\x90\xeb\'\xec\xed\xeb\x1c\xee\x0b\xf5\x93\x02\xe3\x16\xed+2<[D\xdbD\xe5A\x16=\x9f8\xb16e7\x9d;\xa9?\xb0>\xff6K)\x87\x19\xed\nO\xfd\x00\xefg\xe1\xe3\xd6\xe7\xd0J\xd0X\xd2\xcb\xd4X\xd7\xa8\xd8M\xd8w\xd6w\xd3F\xd2\xdd\xd5]\xdf\xc7\xed\x87\xfdZ\x0bh\x15=\x1b\xb1\x1d\xf3\x1c\xdf\x1a\xfc\x18\x96\x17c\x17\x10\x17P\x15G\x12\xc9\r\xa2\x08h\x034\xfc\xf7\xf2\xf8\xe8\xc0\xdf\x0b\xd9u\xd5\xd2\xd3\x80\xd4\x89\xd7\x12\xdcg\xe1\xd1\xe5L\xe8$\xe9\x0f\xea\xf3\xeb\xff\xef\xae\xf66\xff.\tE\x13,\x1b}\x1f\xb8\x1f\xb4\x1c\xfb\x18\xbe\x15_\x13\xd2\x11A\x10\xa7\x0e\x9a\x0c}\t\x8a\x05\xe4\x00m\xfb\x1b\xf6@\xf1\x10\xedB\xea&\xe9\r\xea\xdd\xec\xca\xf0\x83\xf44\xf7n\xf8\xf6\xf8o\xf9\x8e\xfa\xa9\xfc\xfa\xfe`\x010\x03\x81\x03}\x03\x92\x02g\x01\x8c\x00\x0c\xff\xd8\xfdZ\xfc\x82\xfa\x1b\xf9\xbf\xf7\xdd\xf6\xcc\xf6\xdd\xf6s\xf6@\xf5#\xf3\xd4\xf2p\xf7c\x02\xc9\x11\x99!Q-F3\xad4t2\x8c.a+\xd1*L.\xfe4;;\xcd\xf63\xf5\x01\xf4\x88\xf2[\xf1\x00\xf1T\xf1\xaf\xf2\xf6\xf44\xf7>\xf9\x91\xfa\xf0\xfa\x00\xfb\xc6\xfa_\xfa\x89\xfa\xc1\xfaZ\xfb\xab\xfc\xe7\xfdR\xff\xee\xff\xa3\xff\xa7\xfe\xd8\xfc\xf9\xfa\t\xf9/\xf8\xc3\xf9\x07\xffo\x08Z\x14\xba\x1f$(\xe3+R+\x9a(}%\xfa#\xe9$\x03(\xdd,11\xc92\x130M(d\x1d\xd0\x11\xca\x07\xac\x00\xde\xfb\x19\xf9d\xf7\xee\xf5\x9b\xf3\x11\xf0\x8b\xeb\xb5\xe6\xf7\xe2\x93\xe0<\xdf\x8e\xde\xfc\xdd\x1f\xde\xb3\xdf\xf2\xe2\xd7\xe7\x9c\xedx\xf3\xc2\xf8p\xfc&\xfe\xf4\xfd\xdc\xfc1\xfc\t\xfd\xff\xffw\x04H\t\xf5\x0cK\x0e\x8b\x0c\xda\x07.\x01r\xfa\x84\xf5<\xf3S\xf3\x89\xf4\xdd\xf5\xef\xf5\xb3\xf4@\xf2\xf4\xee\x0c\xec\x1b\xea\x8c\xe9\xcb\xea<\xed\x90\xf0/\xf4\xc8\xf7\xb3\xfb\xd0\xff\xe3\x03[\x07\n\n\xad\x0br\x0c\x9a\x0c~\x0c\xc1\x0c\xad\rq\x0f\xaf\x11~\x13\xb5\x13\xd6\x11\x19\x0e<\t\x04\x04O\xff\xd1\xfb9\xfa8\xfa\x18\xfb\xeb\xfb\xc3\xfbc\xfa\x05\xf8\xed\xf4\x18\xf2\xea\xef\x0e\xef\xea\xef\x0f\xf2\xde\xf45\xf7v\xf8\x9d\xf8\xc7\xf7\xb7\xf6\xbd\xf5u\xf5\x06\xf6H\xf7P\xf9p\xfbB\xfdX\xfeQ\xfep\xfd\x19\xfc\x94\xfa\xa0\xf9/\xfa\xa8\xfcl\x02\x16\x0b>\x15\xe8\x1ex%\x11(\x9b\'X%{#0#\x7f$\xf3\'\x0e,\xc5/c1\xff.\x01)\r Y\x16\xf7\r/\x076\x02\x89\xfei\xfb\xfe\xf8\n\xf6T\xf2\xd6\xed\xb5\xe8\x87\xe4O\xe1\x19\xdf\xbb\xdd\x80\xdci\xdc\xb2\xdd\xad\xe0,\xe5\x11\xea\xf5\xeee\xf3\xb6\xf6\xe9\xf8\xe3\xf9q\xfak\xfb\x8a\xfd\x1e\x01\x84\x05\xee\t@\r\xac\x0e\xbd\r\x87\n\xb5\x05\xa6\x00\x99\xfcd\xfa\xed\xf95\xfa\xab\xfa-\xfa\xbc\xf8g\xf6`\xf3\x90\xf0M\xee*\xeds\xed\xc9\xee5\xf1\xef\xf3\xe9\xf6:\xfa\x8f\xfd\xfc\x00\xae\x03\xc0\x05\xfa\x06{\x07\xca\x07\xdd\x07r\x08\xc1\t\xa5\x0b\x14\x0e%\x10\xd2\x10\xed\x0fW\r\xa9\t\x99\x05\xca\x01\xe1\xfe\x80\xfdd\xfd\x17\xfe\xf7\xfe\x19\xff\x17\xfe!\xfcC\xf9+\xf6\x94\xf3\xc8\xf13\xf1\xd7\xf1\xf7\xf2S\xf4I\xf5\x88\xf50\xf5~\xf4\x8e\xf3\x17\xf3\xc8\xf2\xe7\xf2\x99\xf3v\xf4,\xf6\x01\xf8\xa0\xf9\x0c\xfb\x90\xfb\xd5\xfb\xcd\xfb\x7f\xfb,\xfc\x93\xfe5\x04\x81\rM\x18\xaa"\xdc)\xe1,\xfe,\x0f+\xff(\x02(\xb1(\t,z0+4\xf34\xbd0\xa9(j\x1e?\x14\x01\x0c\x07\x05R\xff\xcb\xfa"\xf7\xd4\xf3\xef\xefm\xeb\xc2\xe6\xb8\xe2\x97\xdf\xe1\xdc\x88\xdar\xd8/\xd7\xae\xd7\xfd\xd9\x0f\xde\x84\xe3\xe2\xe9\x94\xf0F\xf6X\xfas\xfc^\xfdM\xfe\xfc\xff\xf7\x02\xfd\x06o\x0b\x93\x0fm\x12\x02\x13\xfd\x10\x8d\x0c$\x07^\x02\x08\xff\x06\xfdz\xfb\x13\xfa@\xf8$\xf6\xd0\xf3:\xf1\xd0\xee\xb8\xec,\xeb\xae\xea\x18\xebn\xecj\xee\r\xf1\x91\xf4\xe1\xf8~\xfdx\x01u\x045\x06\x0e\x07U\x07k\x07\xfb\x07U\t\x93\x0bO\x0e\xcc\x10\x1f\x12\xcf\x11\xe8\x0f\xcb\x0c\xd9\x08\xe3\x04{\x01W\xff_\xfe5\xfe`\xfe;\xfe\x8f\xfd=\xfc\x0c\xfaP\xf7b\xf4\xf8\xf1\xb4\xf0\x9d\xf0S\xf1t\xf2f\xf3\x00\xf4\xe9\xf3A\xf3"\xf2*\xf1\xd7\xf0]\xf1\xd5\xf2\x7f\xf4\x9d\xf5.\xf6U\xf6\xb8\xf6d\xf8\x99\xfa0\xfdu\xffK\x00}\x00n\x00j\x01W\x05\xc9\x0c\xa3\x17i$T/"6\x1a7K3\xc3.\xc3+\xc8,\x900\x9a4c7T6=1\xd7(\xa7\x1d\x89\x12\x93\x08d\x006\xfaE\xf4Z\xee\x0e\xe8G\xe2B\xde\xbb\xdb\\\xda-\xd9;\xd7 \xd5\xe2\xd2\xc8\xd1\x04\xd3M\xd7I\xdf[\xe9\\\xf3 \xfb\xd8\xff\xb1\x02\xdd\x04G\x07\xf8\t\xb7\x0c\xe9\x0fb\x13u\x16\xf1\x17\x1b\x17\x88\x14\x12\x11C\r\x07\t\xed\x03\x0c\xfe-\xf8\x05\xf3T\xef\x1a\xed\xd2\xeb;\xeb\xb6\xea\xfe\xe9\xd3\xe8,\xe7\xdf\xe5}\xe5\xf4\xe6\x85\xea\x85\xefg\xf5\x06\xfb\x0b\x00o\x04\xa2\x07\xe2\t\xe6\n\x82\x0b\xac\x0c5\x0ey\x10\x90\x12\xd9\x137\x14>\x13\x89\x11\x0f\x0f\xef\x0b\xd3\x08r\x05b\x02\x92\xff\x04\xfd]\xfb3\xfa\xba\xf9\x81\xf9\xb1\xf8{\xf7\xba\xf5\xc5\xf3H\xf2)\xf1.\xf1\x18\xf2\x97\xf3 \xf5\xd7\xf5\x82\xf5\x9f\xf4\xa3\xf3|\xf3M\xf44\xf5F\xf6\xcb\xf6\xb0\xf6\x9c\xf6i\xf6\x9f\xf6\xc2\xf7\x7f\xf9x\xfc.\xff\xc0\x00e\x01\xb0\x00\x1e\x00\x06\x00\x14\x01\x01\x06D\x0fo\x1cn*6429%9\xa26m4\xff1<1\xdc0\xd30\xb00F.u*\xdd#\xf0\x1a\xf4\x0f\xce\x02\xfd\xf5\x96\xeao\xe2\xce\xdd\x9a\xdbk\xdb[\xdbW\xda\xc5\xd7o\xd4\xd9\xd1b\xd1{\xd3\xcf\xd7\x81\xddI\xe4\xf3\xebI\xf4\xaa\xfd\x9f\x06a\x0e\xea\x13\x94\x16\xe4\x16\xb0\x15\xb8\x13\xe7\x12\xab\x13\xaf\x15\xe0\x17\x12\x18W\x15/\x0f\x9b\x06/\xfd?\xf4\xb6\xecC\xe7\xd4\xe3$\xe2z\xe1\x81\xe1\xc1\xe1\xbf\xe1.\xe1Q\xe0\xba\xdf*\xe0\x8e\xe2Z\xe7m\xee\t\xf7\xaf\xff\x1e\x07\xa5\x0c%\x10&\x12.\x13\xbe\x13+\x14\xa8\x14\xfe\x14?\x15\x1e\x15\xa6\x14w\x13K\x11\xcb\r\x02\tO\x03\xe4\xfd[\xf9\x90\xf6\xcc\xf5\n\xf6\xf3\xf6\x1a\xf7\x81\xf6\xb9\xf5\xc4\xf4<\xf4\xdc\xf3\xfe\xf3\xd5\xf4\xe2\xf5w\xf7\xc5\xf8\xe0\xf9\xcf\xfa\xb3\xfb\xb6\xfc7\xfds\xfc\x9c\xfa\x80\xf8O\xf6\xff\xf4\x03\xf4\xc5\xf3[\xf5\xf3\xf6g\xf8T\xf8\xb3\xf6\xa1\xf6\xd5\xf6\xba\xf8\xcc\xfa\xc0\xfbs\xfd\x96\xfd\x9b\xff\x9a\x05\xa2\x10\xce R0p;\xd4?\xe7=m9N4b1\xcd0\xe81c4\xab3\xf1/p(\x00\x1e\x14\x13\xf1\x05 \xf8\xca\xeb\xf3\xe1$\xdc\x8d\xd9\x1e\xd8\x94\xd8G\xd9\xf5\xd8\xc4\xd7f\xd5\xcd\xd3\xf5\xd4F\xd8;\xde1\xe6q\xef\x8a\xfaR\x05\xe4\x0e\\\x15\xa3\x18\xc5\x19\xa8\x19\x9a\x19\x86\x19\xa5\x19\x0f\x1a\x05\x1a\xe1\x18\xef\x15\x00\x11\xb8\n,\x03\x8e\xfa+\xf1:\xe8\x02\xe1\xb9\xdc\xf9\xda\x10\xdb\xb4\xdb\x1d\xdc\xdc\xdb\xde\xda\xd5\xd9\x04\xdaO\xdcO\xe1T\xe8t\xf0\xa2\xf8S\x00G\x08\xad\x0f\xba\x15\x04\x19\\\x19G\x184\x17\x0b\x17S\x18\xa7\x19r\x1aM\x19\x0f\x16\xb3\x10h\n\xbc\x04\xfb\xff\xd4\xfc\xa0\xf9\xe3\xf6\xfd\xf4p\xf4S\xf5\x92\xf6$\xf7\x9e\xf6\xa8\xf5\xd6\xf4\x02\xf5\xe8\xf5\x90\xf7\xe1\xf9\x8a\xfc\x1d\xff\xfd\x00e\x01\xd2\x00\xa6\xff%\xfe\x93\xfc\x1c\xfa\xe0\xf76\xf6\x1e\xf6\xe6\xf6}\xf7\xde\xf6&\xf5?\xf3[\xf1\xce\xf0R\xf0\xb5\xf18\xf4\xfd\xf6\xa9\xfa\xfa\xfc\xef\xff[\x02\x7f\x039\x04\xdf\x02x\x04n\x0c\xc5\x1a\x85-%:\xdf>\xe8;\x0c6&3\xf91;2\xbe1\x9f03.k)c!\xd3\x17\xdf\x0e<\x06e\xfcQ\xef\xf8\xe1\xfd\xd8$\xd7+\xdaf\xdd\xbc\xdd\xbd\xdb\xbb\xd9\x02\xd9\xac\xd9\xe1\xdbc\xe0c\xe7\x86\xf0\x7f\xf9\x85\x01/\tk\x10\xff\x16\x99\x1a\xf9\x19\xc0\x16\x9e\x13F\x13\x94\x15\xbc\x17\x93\x17N\x14\xa5\x0eY\x07]\xfe\x82\xf4\x13\xeb\xd6\xe33\xdf\xc9\xdbo\xd91\xd8\xfb\xd8\x05\xdb;\xdc\x1b\xdbw\xd8\x02\xd7[\xd9\x02\xe0L\xe9:\xf3\x90\xfc\xe0\x04\x9d\x0b^\x10;\x13,\x15|\x16\x14\x18\xfc\x18(\x1a\x8c\x1b0\x1d0\x1e:\x1c8\x17\x05\x10\xbb\x08\xe2\x02W\xfex\xfb\xf2\xf9\x94\xf9\xb0\xf9\x85\xf8x\xf6\xce\xf3\x88\xf1\'\xf0Y\xef4\xf0\xc7\xf2&\xf7\xce\xfbK\xff\x9c\x00\xd0\x00\x1a\x005\xff\xc0\xfeC\xfe\x86\xff\x8b\x00\x00\x01D\x00\xd5\xfd\\\xfb \xf8\xeb\xf4\xe2\xf1\x9e\xee\x88\xedZ\xed\xc5\xee\xc0\xf0\x83\xf0,\xf1\xaf\xf18\xf3\x99\xf5\xfe\xf6\x84\xf9I\xfc\x8b\x00\xbf\x04\xed\x07!\x0b\x93\r?\x14\xe3\x1fo-5:F>3:\xa93c.\xab.U1\xd01\xba0>-\xe8\'\xc9!]\x18\x82\r\xdc\x01\xe9\xf5\xc7\xebK\xe3<\xdf\x89\xdfb\xe2\t\xe5-\xe4a\xe0&\xdc\xa9\xd9>\xdb\x06\xe0\xc2\xe6\x8d\xee\xe3\xf6\x84\xffG\x07\xa5\r\n\x11<\x12g\x11\xed\x0f_\x0f\xeb\x0f\xc1\x115\x14G\x15\x16\x134\r\x89\x04N\xfb\xe7\xf2\xfa\xeaQ\xe4\xb4\xdf\xb8\xdd\xbb\xde\x9a\xdf\x83\xdf\xf4\xdd\xe2\xdb\xaa\xda1\xda\xf1\xda=\xde5\xe4S\xed\x06\xf7\\\xffW\x06A\x0b\xe5\x0f}\x12z\x13P\x14\xa0\x15\x81\x18\x89\x1b\x9a\x1d\x80\x1e\x82\x1c\x01\x19E\x13\xa0\x0cT\x06\xdf\x00\xc6\xfd\xba\xfb\xc8\xfa?\xfa\x92\xf9\x07\xf8\xd4\xf5N\xf3\t\xf1\xd4\xf0\x10\xf2\xd3\xf4w\xf8\xe7\xfb+\xff9\x01C\x02\xa1\x02(\x02\x99\x01\x9f\x00\x94\xff\xee\xfe\xd5\xfe\x87\xfeo\xfd$\xfb\xa7\xf7U\xf4\xa2\xf0\xfa\xed[\xec\x0b\xeb\xb3\xeb\x13\xec\xa4\xedb\xef+\xf0\x83\xf2\xcf\xf3\xbe\xf6:\xfa\xca\xfd\xa0\x02l\x06\x11\t\xfc\x08\x12\x08\x80\n\x1c\x12\xff\x1f\xa7-\xb25\xbf6\xb12\xd6.\x08,\xeb*8+\xb5+\xcc+V*\xb2&\x06!\x1a\x19\xc6\x0e\x82\x02\x8a\xf62\xed\xce\xe8\xf6\xe8\xb1\xea\xbd\xec\x05\xecm\xe8\x8d\xe3E\xdf\xfb\xdda\xdf\xc7\xe2\n\xe8,\xef\xd7\xf7h\x00d\x06\xa1\x08f\x07\xd5\x05\xc4\x044\x05\x96\x07,\x0b\xe1\x0fx\x12\xe8\x10/\x0c\x19\x05\xd6\xfd\xce\xf6#\xf0\xa5\xeb\xc3\xe9Q\xea\x08\xeb\x0c\xeb\x90\xe9L\xe7%\xe4\xeb\xe0N\xde\x8d\xde\xb5\xe2\xff\xe8\xab\xef\xbb\xf5<\xfb\x97\x00\xf5\x03\xff\x04&\x05\xd6\x05\xc9\x08\xe5\x0b)\x10\xcf\x13\xa4\x17N\x19\n\x17\x98\x12\xe8\x0c\x1c\t\x17\x06"\x03i\x01,\x00\xec\x00\x19\x01\x0e\xff\x1e\xfc"\xf8L\xf5\x9c\xf3\xd7\xf3\xba\xf5\x84\xf9M\xfd\x94\xff\x10\x01A\x01L\x01\xa2\x00\x9b\xffz\xfe0\xfe{\xff\xec\x00\x10\x02\x17\x01f\xfdB\xf9A\xf5\xd9\xf2\xde\xf1\xed\xf0\xb8\xef\x08\xeeq\xed:\xee\x0f\xf0\x1c\xf2\xc3\xf1z\xf1X\xf2h\xf4\x1f\xf9+\xfd\xf7\xff\x98\x02\x0f\x04\xdc\x05\x9d\x08\xf0\x07\x8d\x06\xff\x07\xcb\x0fp\x1f=->2P.\x85\'\x0e&\xb2(0+\xc4*\xd4(^)\x1b)\x89&\xad!\xe9\x19\x1b\x10S\x03\xa9\xf6\xf9\xef\x13\xf1.\xf5\xad\xf6\x15\xf3\x80\xed\x19\xeaB\xe7\x03\xe4\xbb\xe0b\xdfs\xe2\xc0\xe8u\xf0\xdd\xf7\x93\xfd\x98\x00\xfd\xfe\x0c\xfb\x17\xf8v\xfa\x8c\x00\xf7\x05\xcf\x08\xb1\t\xb9\x0b\x9a\x0c!\t\x13\x03\x91\xfc\xfb\xf7\xa5\xf5\xba\xf3I\xf3\xcf\xf4\xe7\xf6>\xf6\xe7\xf1O\xecH\xe9\x98\xe8O\xe8\xf7\xe7\x9e\xe9<\xee\xbb\xf4\xc6\xf9\x99\xfb\xe3\xfc.\xfd\xc0\xfd,\xfe~\xff\xd1\x03\xc9\x08\x84\x0c\x1b\x0e\x8c\x0e\xeb\x0fB\x0f~\x0c\xdf\x07\xe9\x04\xbe\x04\xe5\x05\x7f\x06\x19\x05\xf0\x03\x16\x02\xe5\xff\x0c\xfd[\xfb\xe3\xfb\xb3\xfc;\xfd\x7f\xfd\xa2\xfe\xd2\x00g\x025\x02\x0c\x00\x9e\xfef\xfe$\xff\xe3\xff\'\xff\x7f\xfe\xb6\xfcI\xfb\x82\xf9\xfa\xf6\x9e\xf5\x02\xf4n\xf3\x93\xf1\xb6\xef\xa1\xef\x1e\xf0\xa5\xf1\xce\xf0\xfd\xef\xc1\xf0G\xf2\xae\xf5\xc6\xf7\xe3\xf9-\xfc\x15\xfd\x97\xfe\x17\x00\x99\x02\xb5\x05\x1a\x06\x88\x04\xe2\x02\xde\x07u\x14k"\x80*\xf4(\x0b$o"\x86%\xe3**,*+\xdb*\xf8*\xd4+%)\xda"\x94\x19Q\rR\x02\xb1\xfcX\xfc\xae\xfd\xda\xfb{\xf5\xb1\xee\x00\xea\x1a\xe7-\xe5\x95\xe1 \xde\xed\xdd*\xe1\xb9\xe7\x8b\xef\xea\xf4\x01\xf6\x06\xf3a\xf0\x81\xf2\xff\xf8\x0c\x002\x05\xf5\x07\xf7\x08\x9e\n \x0c\x96\x0c\xa3\n8\x04g\xfe\xca\xfb.\xfdR\x01\xeb\x01\xd8\xfe5\xf9A\xf3-\xef^\xec\x00\xea\xc4\xe8U\xe8\xad\xe8\x86\xeb\xba\xefE\xf3\x07\xf5\xfd\xf2/\xf0\xe1\xefE\xf3f\xfa\xc1\xff\x1f\x04\xa3\x06n\t\x83\x0b\n\x0cR\x0b;\t\x8f\x07\x0e\x07\xb9\x08\xc1\x0b\xf3\r\xb2\rj\n\xfc\x04\x8f\x01\xe1\xfft\xffq\xfe\xc1\xfdQ\xfe\xdb\xff}\x01\xc1\x00\xb7\xfe;\xfcn\xfa\xee\xf9F\xfaS\xfb\x06\xfdx\xfd\xd2\xfck\xfb\xe1\xf9\xc8\xf8\x14\xf7Z\xf5[\xf3\xe2\xf2\xd4\xf3\xac\xf4\x8a\xf4i\xf3T\xf2\xcd\xf2\xde\xf2\x7f\xf3\xdf\xf4\x03\xf6\x1a\xf9\xf8\xf9\xe1\xfa\x18\xfd\x99\xfe\x9c\x01\x00\x02\x18\x02\xe2\x03\x94\x03\xbb\x02\x10\x03\x86\t[\x19\x96%\xe9\'\xc6#j!\x00&u*\xcd)\x9f(\x8e+80a2W/%)\x12!Q\x16\xeb\t3\x01\xbf\xfe\xec\xffH\xfe\xc9\xf7?\xf1\x13\xed\x8b\xe9\x01\xe3g\xdbi\xd7\xb2\xd8\xfd\xdd#\xe4\xdf\xe9B\xef\xa6\xf1\xe3\xf0\'\xeea\xeeC\xf4\x80\xfbE\x02\x1b\x06/\nK\x0f\xda\x11\x02\x11\x1b\r\xa1\x08\x0c\x06H\x04N\x04[\x05\x86\x06\xe2\x05\xc3\x00=\xfa\n\xf4|\xf0\x84\xedH\xea\xac\xe7O\xe7I\xe9-\xec(\xeec\xee\x17\xee\x1b\xed6\xedh\xee\xad\xf2$\xf9Q\xffA\x03[\x06\x14\t\x92\x0b\xf9\x0bM\n\x90\t\x88\n\xd2\x0c[\x0f\x02\x11\xe2\x10.\x0fj\x0bz\x07\x00\x05\xa0\x03\xd9\x02&\x02\xcf\x00\xbb\x00\xe5\x00k\x00\xe2\xfe\x16\xfb2\xf8I\xf7(\xf8T\xfa\x90\xfb\xed\xfb%\xfb"\xf9 \xf7\xbc\xf5\xec\xf4\r\xf5\x80\xf5\xd1\xf6\x9b\xf7m\xf7\x90\xf69\xf5\x0f\xf4\x1f\xf3N\xf3\x82\xf5\xd7\xf7s\xf8\x95\xf8\xa9\xf8\x1c\xfbM\xfd\xcf\xfd\x8d\xfd\xa4\xfc\x02\xff\xcb\x01\x05\x04i\x05\xca\x04\xf7\x05\xad\ni\x13\x12\x1f\xa9%v$\x9d\x1f\x93\x1e\xb9${+\xed,\x1b+O+M-\xcc,E&\x0b\x1d\xb1\x14;\rR\x064\x01D\x00\x0c\x00\x9b\xfb4\xf3\x9b\xeaD\xe5\x82\xe2o\xdf\x00\xdd^\xdc+\xdfU\xe4l\xe9\xd5\xec\xe4\xed\xb7\xed\x0c\xed\xc3\xee{\xf3\x0e\xfb\xbe\x02\x1d\x08\xf6\tk\n\x88\x0b\x11\x0cu\x0b\xd7\x08\xd2\x06\xad\x07#\tI\t)\x07$\x03\x98\xfe\xca\xf8R\xf3;\xefE\xed\x0b\xed:\xec\x00\xebW\xea\xa3\xea}\xeb\xd7\xeaS\xe9\xa3\xe9E\xec\xb7\xf0+\xf5p\xf9\xb3\xfd\xc8\x00\xe6\x01\xe5\x02G\x057\x08\xa8\t\x17\nm\n\xb0\x0c\xf0\x0et\x0f\xa1\x0e\xac\x0b\xbd\tK\x083\x07\xd0\x06\xf4\x05{\x05V\x04~\x02\x1d\x01\xa1\xff\xc5\xfed\xfda\xfb@\xfa*\xfa\x06\xfb\xfe\xfa\x9f\xf9C\xf8\xac\xf6I\xf6\x9b\xf69\xf6K\xf6s\xf5\xeb\xf4\x92\xf5\xd4\xf4:\xf4\xc4\xf39\xf3\xe3\xf4\xeb\xf5/\xf7n\xf8\xf1\xf7\x9b\xf8~\xf9\xd8\xfaB\xfd\xea\xfe>\x01\x11\x03\t\x02\x15\x00r\xff\xed\x02~\t\x95\x10m\x15%\x19\x07\x1ds\x1fg \x11\x1f_\x1e1!\xb8&\x83++,\xd4(&$\xe9\x1f;\x1b\xa5\x14\xd7\r\xc6\x08\xe4\x066\x06\xac\x03\xa7\xff\xfc\xf9i\xf3\xb0\xec$\xe7\xd5\xe4\t\xe55\xe6E\xe7^\xe8M\xea\xe2\xeb_\xec\xdb\xeb\xbf\xeb\xb3\xedJ\xf1\x9d\xf6\xf8\xfc\xa6\x02\xc3\x05Y\x05\xac\x03\xad\x030\x05?\x06\xd2\x05#\x05\xc5\x05(\x07)\x07\xe5\x04\xbd\x00\x82\xfcp\xf9\xa5\xf7\xb6\xf6w\xf5P\xf4\xe1\xf3\xe1\xf3\x8c\xf3G\xf2|\xf0c\xef\xfd\xee\xd2\xefQ\xf2\xca\xf5\xe5\xf9\x17\xfc\xa2\xfc0\xfcG\xfc\xc3\xfd\xcb\xff\xb3\x01\x88\x03\r\x05\xcf\x06\x0b\x08 \x08X\x077\x06\x95\x05\xa9\x05\xc0\x06\xfc\x07\xf5\x08\xb4\x08\x8c\x07;\x06\x95\x05\x10\x05G\x04\x93\x03\x9a\x03x\x04\xff\x04\x82\x04\x84\x02[\x00\xe2\xfd\xac\xfby\xfaa\xf9\xd3\xf8M\xf8p\xf7#\xf7?\xf6\x87\xf4\xc4\xf2x\xf1\xdd\xf1v\xf3\r\xf5a\xf6[\xf7\x19\xf8\x1f\xf9\xd4\xfaf\xfcu\xfdV\xfes\xff\xd9\x01\xa5\x047\x06\xf2\x06\x1e\x06\xb6\x04\xe0\x03\x8f\x04\xf3\x07\xdf\x0b\xdf\rK\r(\x0cn\r\x84\x0f\xbb\x10\x87\x10\xc7\x0f\x00\x110\x13\x1a\x15\x00\x161\x15\xd2\x13\x0f\x12\x0f\x10\xc8\x0e\xa3\rn\x0c\xb7\n\xb8\x08\xf8\x07 \x07\x11\x05*\x01\xe0\xfc\x96\xfa\xbf\xf9>\xf9\t\xf9\xc7\xf8\xa1\xf8r\xf7\xcd\xf54\xf5\xfc\xf4\x81\xf4.\xf3\x80\xf2\x84\xf3I\xf5\x80\xf6\xa8\xf6\x92\xf6w\xf6\x0b\xf6\x92\xf5\r\xf6\x9b\xf7\x1e\xf9\xb0\xf9\xb3\xf9?\xfa\x1e\xfb\x8e\xfb\x7f\xfb\xac\xfb\x1a\xfcx\xfc\x01\xfd\xcb\xfd\x8d\xfe[\xfe[\xfd\xd7\xfc\x01\xfd0\xfd\xe8\xfca\xfc\x9f\xfc\xca\xfcf\xfc\x06\xfcc\xfcp\xfd#\xfe1\xfeC\xfe\x93\xfe\x05\xff?\xff\xd1\xff\xe3\x00\xb4\x01@\x02b\x02\xf4\x02\x12\x04\xc1\x04\xf9\x04:\x05\xb0\x05\x18\x06J\x06J\x06n\x06a\x06\xde\x05(\x05\x98\x04\xf7\x03\xed\x02\x97\x01N\x00!\xff\xd9\xfdu\xfc\x1f\xfb\xf0\xf9\xce\xf8\x92\xf7M\xf6e\xf5\x9e\xf4\xe8\xf3q\xf3R\xf3\xaa\xf3\x17\xf4s\xf4\xfb\xf4\xda\xf5\xe3\xf6\xf3\xf7\xdd\xf8\xcf\xf9\xba\xfa\xa4\xfb\x9b\xfc\xb5\xfd\xc7\xfe\x9b\xff\xd1\xff\x96\xff=\xffO\xff$\x00L\x01\xa0\x02\xe3\x039\x05\xe2\x06\x96\x08[\n~\x0c\x0f\x0f\xc8\x11\x89\x14\x1b\x17\xba\x19!\x1c\x82\x1d#\x1e\xc9\x1eC\x1f\xcc\x1e)\x1d^\x1b\x05\x1au\x18\xe1\x15\xa7\x12\xf8\x0f|\rb\n\xeb\x06(\x045\x02\xcf\xff\xec\xfc:\xfar\xf8\xae\xf6E\xf4<\xf2\xf9\xf0\x1b\xf0\xcc\xeet\xed\x19\xed_\xedk\xed\x19\xed.\xed\x08\xee\xe8\xeea\xef\xea\xef\xd1\xf0\x07\xf2\x17\xf3\xc6\xf3\xa4\xf4\xba\xf5\xdf\xf6\xff\xf7\x05\xf9\xef\xf9z\xfa\xa8\xfa\xab\xfa\xf6\xfa\x98\xfb\x14\xfcz\xfc\x97\xfc\x81\xfc\x8d\xfc\xaf\xfc1\xfd\xfe\xfd\x9e\xfe\x11\xffz\xff0\x00p\x01\x89\x02\r\x03*\x03p\x03\xff\x03\x9c\x04\x18\x05{\x05\xaf\x05\x89\x05$\x05\xdd\x04\xd0\x04\xa0\x04;\x04\xe8\x03\xbd\x03\x91\x03%\x03\x9e\x02d\x02V\x02\xfb\x01m\x01\x10\x01\xd9\x00\x91\x00\xf9\xffm\xff\x19\xff\x95\xfe\xcf\xfd&\xfd\x9e\xfc\xfd\xfb\x1e\xfbL\xfa\xdb\xf9x\xf9\xdd\xf8E\xf8\x16\xf8\x13\xf8\xf8\xf7\xa9\xf7c\xf7\x95\xf7\xee\xf7\x0f\xf8\x1e\xf8D\xf8r\xf8\xac\xf8\x01\xf9R\xf9\x96\xf9\xbd\xf9\xcf\xf9\xa0\xfaL\xfc\x0b\xfe\xe5\xff\xa3\x01\x87\x03#\x06\x15\tm\x0c\x1a\x10d\x13\x1a\x16K\x18l\x1a\xc2\x1c!\x1f\x92 \xc1 . ^\x1f\x84\x1e8\x1d\x1a\x1b\x9a\x18\xdc\x15\xe4\x12\xd9\x0f\xd4\x0c\xf8\t\xc4\x06/\x03\x03\x00\xc5\xfd\x11\xfc\xfc\xf9\xcc\xf7\x08\xf6\xc3\xf4\x8c\xf3\x19\xf2\xfc\xf0H\xf0\xa3\xef"\xefH\xef\xbf\xef\xfb\xef\xd1\xef\x98\xef\x08\xf0\xa6\xf0$\xf1\xa9\xf1\x18\xf2\x89\xf2\x02\xf3\xc5\xf3\xd8\xf4\xc0\xf59\xf6u\xf6\xda\xf6[\xf7\xe2\xf7T\xf8\xac\xf8\x0c\xf9x\xf9\x02\xfa\x9d\xfa\x1c\xfb\x81\xfb\xd5\xfbK\xfc\xf4\xfc\xe8\xfd\xf9\xfe\xe4\xff\x9a\x00:\x01\xfb\x01\xcd\x02\x97\x03I\x04\xc8\x042\x05\x82\x05\xb0\x05\xdc\x05\xd8\x05\xb2\x05p\x05/\x05\x00\x05\xa1\x04\x18\x04\xa2\x03N\x03\r\x03\xc5\x02\x88\x02P\x02\x02\x02\x95\x011\x01\xdb\x00[\x00\xc1\xff\'\xff\xab\xfe \xfek\xfd\xa4\xfc\xfd\xfb^\xfb\xac\xfa\xfc\xf9l\xf9\xf8\xf8\x87\xf8U\xf8D\xf8D\xf8X\xf8^\xf8s\xf8\xba\xf8\xd9\xf8\x03\xf9U\xf9x\xf9\xb5\xf9\x11\xfal\xfa\xe9\xfaU\xfb\x94\xfb\x08\xfc\x95\xfcT\xfdz\xfe\xf5\xff\x05\x02c\x04\xc6\x06`\t\xd7\x0b\x1c\x0eq\x10\xf4\x12\xce\x15\x95\x18\x84\x1a\xdd\x1b\xe6\x1c\xa5\x1d\n\x1e\xde\x1d^\x1dt\x1c\xb6\x1a\x99\x18\x89\x16\xbc\x14\xaa\x12\xdd\x0f\xd2\x0c\xce\t\x1a\x07\x9d\x046\x02?\x00;\xfe2\xfcB\xfa\xa4\xf8u\xf7\x1f\xf6\xcf\xf4z\xf3p\xf2\xa0\xf1\xba\xf0#\xf0\xb4\xefU\xef\x1d\xef\xe4\xee\xe7\xee\xf3\xee\xfb\xee0\xef\x9b\xef\x1b\xf0\xa4\xf0_\xf1S\xf2)\xf3\x02\xf4\xe3\xf4\xbb\xf5q\xf6\xe5\xf6\x96\xf7x\xf84\xf9\xc6\xf9k\xfa5\xfb\xcc\xfb3\xfc\xc2\xfcr\xfd\x0e\xfe\xa8\xfe\x82\xff\xbc\x00\xac\x01>\x02\xc5\x02T\x03\xf5\x03U\x04\xc7\x04d\x05\xbd\x05\xd6\x05\xca\x05\xf5\x05\x10\x06\xd1\x05r\x05D\x050\x05\xd8\x04X\x04\x1a\x04\xf7\x03\xac\x03=\x03\xea\x02\xac\x025\x02\xad\x018\x01\xe9\x00y\x00\xde\xff@\xff\xaa\xfe/\xfe\x93\xfd\xf9\xfcg\xfc\xd9\xfbY\xfb\xea\xfa\x8d\xfa\x1d\xfa\xbf\xf9t\xf99\xf9"\xf9.\xf9R\xf9i\xf9O\xf9=\xf9f\xf9\x91\xf9\x88\xf9\x9c\xf9\xe7\xf96\xfaM\xfaB\xfa\x7f\xfa)\xfb\xcb\xfb\x04\xfc\xa5\xfc\xe1\xfd(\xff\x8e\x00/\x02A\x04\xc2\x06\xf6\x080\x0b\xe5\r\\\x10\\\x12!\x14\x1c\x16\x0c\x18\x99\x19\xa0\x1aS\x1b\xd0\x1b\x95\x1b\xd6\x1a\x13\x1a\x01\x19\xae\x17\xc6\x15\xb8\x13\xe6\x11\xbf\x0f|\r\x05\x0b\xa7\x08w\x06\xfd\x03\x9c\x01j\xffp\xfd\x82\xfbz\xf9\xbd\xf7A\xf6\xc9\xf4W\xf3\xf5\xf1\xde\xf0\n\xf03\xef\x88\xee\x08\xee\x8f\xedC\xed,\xed]\xed\xbb\xed\xd4\xed\xf6\xedb\xee\x0c\xef\xe1\xef\xa6\xf0o\xf1F\xf2#\xf3\r\xf4\x18\xf5&\xf6\t\xf7\xcb\xf7\x89\xf8u\xf9\xa3\xfa\xa2\xfb_\xfc\xfc\xfc\xa5\xfdj\xfe\x1c\xff\xd8\xff\x94\x008\x01\xb4\x01&\x02\xc7\x02h\x03\xd3\x03\x19\x04X\x04\xb3\x04\x00\x05(\x057\x05C\x05M\x05]\x05`\x05R\x058\x05\r\x05\xd6\x04\xd0\x04\xde\x04\xcc\x04\x93\x04I\x04\x1f\x04\xdf\x03z\x03?\x03 \x03\xd2\x02H\x02\xea\x01\xb6\x012\x01X\x00s\xff\xe4\xfe_\xfe\xb0\xfd\xf4\xfca\xfc\xc5\xfb\x0c\xfbz\xfa\x11\xfa\xbd\xf96\xf9\x92\xf81\xf8+\xf8=\xf8G\xf8\\\xf8\x88\xf8\xc7\xf8\x0b\xf9C\xf9\xb4\xf9S\xfa\xdb\xfae\xfb\xf6\xfb\xb8\xfc\x8c\xfdA\xfe\x11\xff\x0e\x00"\x01\x16\x02\xfd\x02\x1a\x04H\x05r\x06\xd8\x07\x80\t\x17\x0bs\x0c\xb3\r\x1d\x0f\x85\x10\xac\x11\xc1\x12\xe1\x13\xbd\x14:\x15\x88\x15\xe1\x15!\x16\xe0\x15#\x15M\x14o\x13Z\x12\xe6\x106\x0f\x8d\r\xd3\x0b\xf6\t\x17\x08?\x06G\x04\x1d\x02\t\x00:\xfe\x9d\xfc\xe1\xfa"\xf9\xac\xf7j\xf6$\xf5\xf5\xf3\xf1\xf2\x1d\xf2L\xf1o\xf0\xe7\xef\x9e\xeff\xef2\xef*\xefw\xef\xca\xef\t\xf0_\xf0\xe4\xf0\x91\xf1"\xf2\xc1\xf2\x8e\xf3^\xf4)\xf5\xe1\xf5\xba\xf6\x9c\xf7R\xf8\xf7\xf8\xa8\xf9p\xfa\'\xfb\xc8\xfbk\xfc\x0f\xfd\xa0\xfd&\xfe\xb5\xfea\xff\xf9\xff\x80\x00\x0e\x01\xc2\x01\x8b\x02.\x03\xb9\x03@\x04\xc0\x041\x05\xa0\x05\'\x06\x96\x06\xca\x06\xe7\x06\x1a\x07Z\x07^\x07%\x07\xe5\x06\xa4\x06T\x06\xf2\x05y\x05\xff\x04h\x04\xcc\x03:\x03\xab\x02\n\x02>\x01]\x00\x96\xff\xf0\xfeU\xfe\xba\xfd\x1d\xfd\x82\xfc\xf7\xfb\x8c\xfb\x1f\xfb\x95\xfa$\xfa\xbb\xf9S\xf9\x10\xf9\r\xf99\xf9?\xf9\x14\xf9\x04\xf9H\xf9\xb8\xf9\r\xfaW\xfa\xc7\xfa[\xfb\xc6\xfb/\xfc\xea\xfc\xd9\xfd\xa8\xfe\x16\xff\xae\xff\xa3\x00\x96\x01g\x02,\x03\xfc\x03\xe0\x04\xa0\x05J\x06,\x07\xf0\x07k\x08\xd9\x08L\t\xc8\tM\n\xb7\n\x15\x0b[\x0b\x88\x0b\xdb\x0b@\x0ch\x0cX\x0cC\x0c=\x0c\x1b\x0c\xc9\x0b\\\x0b\xf2\n\x87\n\xeb\t5\t|\x08\xcd\x07\xf6\x06\xf6\x05\xe8\x04\xe8\x03\xf4\x02\xd8\x01\xaf\x00\x9b\xff\x99\xfe\x9b\xfd\x84\xfc\x85\xfb\x91\xfam\xf9M\xf8U\xf7}\xf6\xac\xf5\xb6\xf4\xf7\xf3o\xf3\xeb\xf2z\xf2#\xf2\xea\xf1\xc7\xf1\xa8\xf1\xb9\xf1\xec\xf18\xf2\xa5\xf21\xf3\xef\xf3\xaa\xf4d\xf5<\xf6K\xf7e\xf8j\xf9r\xfaz\xfb~\xfcn\xfdS\xfeL\xff=\x00\x16\x01\xe2\x01\xb7\x02\x87\x031\x04\xb2\x043\x05\xb1\x05\x19\x06h\x06\x9a\x06\xbc\x06\xde\x06\xe0\x06\xce\x06\xbd\x06\xb7\x06\x9d\x06T\x06\x0e\x06\xc8\x05m\x05\xf2\x04x\x04\x00\x04u\x03\xd9\x02P\x02\xe4\x01b\x01\xb1\x00\xef\xffE\xff\xaf\xfe\x15\xfeO\xfd\x90\xfc\xee\xfbg\xfb\xe2\xfa\x7f\xfaM\xfa(\xfa\xd4\xf9x\xf9l\xf9\x91\xf9\xaa\xf9\xbb\xf9\r\xfa{\xfa\xc3\xfa\xf0\xfaa\xfb\x15\xfc\x9b\xfc\x11\xfd\x98\xfd3\xfe\xc8\xfec\xff\x19\x00\xce\x00^\x01\xca\x01R\x02\x0b\x03\x9a\x03\xef\x03\\\x04\xfc\x04|\x05\xcd\x05\x13\x06t\x06\xc1\x06\xdf\x06\t\x07R\x07\xa8\x07\xec\x07\x14\x08;\x08W\x08:\x08\xfa\x07\xc4\x07\x9e\x07\x85\x07g\x07\x18\x07\xc7\x06\x87\x06\x1f\x06\xad\x05A\x05\xf1\x04\x90\x04\x13\x04\x8b\x03\xf9\x02y\x02\xfa\x01\x8b\x01A\x01\xf9\x00\x93\x00\x1c\x00\x95\xff\x0c\xff\x81\xfe\xf4\xfdq\xfd\x01\xfd\x9a\xfc\x10\xfc{\xfb\xe2\xfaJ\xfa\xc1\xf9^\xf90\xf9\x14\xf9\xf5\xf8\xbd\xf8\x7f\xf8]\xf8C\xf8Y\xf8\x8c\xf8\xc8\xf8\x08\xf9K\xf9\xb8\xf9/\xfa\xa7\xfa\x1d\xfb\xa1\xfb>\xfc\xdf\xfcs\xfd\n\xfe\xa4\xfe\'\xff\xaf\xff:\x00\xc0\x00P\x01\xc9\x014\x02\x98\x02\xf6\x02b\x03\xb1\x03\xe6\x03\xf6\x03\xe4\x03\xe6\x03\xf1\x03\xf7\x03\xfa\x03\xe4\x03\xb7\x03U\x03\xea\x02\x89\x02\x1c\x02\xc1\x01`\x01\xf4\x00\x8e\x00\x1a\x00\xa6\xff>\xff\xc9\xfed\xfe\xfb\xfd\xa1\xfd/\xfd\xb7\xfci\xfc\x1b\xfc\xdc\xfb\xbb\xfbd\xfb-\xfb\x00\xfb\xc3\xfa\xcd\xfa\xa4\xfa\x98\xfa\xbc\xfa\xd9\xfa\xfe\xfaD\xfb\x8e\xfb\xdf\xfbN\xfc\xa1\xfc \xfd\x94\xfd3\xfe\xaa\xfe"\xff\xb5\xffn\x00\x13\x01\xdd\x01U\x02\xf8\x02\x95\x03\xec\x03`\x04\x87\x04\xd3\x04\xed\x04\xef\x04\xfd\x04"\x05$\x05~\x05\x7f\x05\xca\x05\xb4\x05`\x05q\x05\x11\x053\x05\x86\x04\xed\x04|\x04\xe1\x03u\x04\xb3\x03o\x030\x03\x19\x03\xb7\x02Y\x02c\x02\x8f\x02\x9d\x02\xf4\x01\xe9\x01\xd1\x01\'\x01\xcd\x00\r\x00\xe7\xffj\xff\xf3\xfe\xc5\xfe"\xfey\xfbH\xfa\x8d\x01\x9c\x0fV\x15\x8a\x03\x89\xee\x1c\xe8\xd7\xee\\\xf7\x91\x03|\x05\x86\xfd\n\xf3\xe9\xe7\xc7\xeee\xf3=\xf8\xde\xf8\x1e\xf6\x00\xf6\x80\xf6\xe9\xf8\xbd\xfe\n\x04\x1f\x02W\xfb\x17\xf8B\xfcI\x028\r+\r\xcf\x04u\xff\x7f\x00\x93\x06\xa5\x0c\xaf\n\xa1\x02\xbb\x005\x05\n\x0b5\x0b\xe4\x06"\x02\x90\xff\xbf\x04\xb3\x06\x0c\xfff\x04\xde\x02\xa8\xff\x01\x01 \x03\x17\x07\xd3\xff\xed\xfd\xc0\xfe\x97\xfc\xd3\xfe\xbf\x03a\x04\xed\x01\x92\xfc\x02\xfbt\xfe\xfd\x00\xd3\xfe\x80\xfdO\xffj\xfeE\xfc^\xfe\xab\xfcH\xfe\xf4\xfd\x13\xf8\x97\xf7\xa8\xf9\xea\x034\xfcN\xfa\x88\xfbQ\xf6c\xf8S\xfc+\x02\x1d\xf9\xac\xfc3\xfeZ\xfau\xfb\xe8\xfeP\x02\xe3\x00\xd3\xff\xa1\xfc(\xfdf\xfd\xe7\x01t\x07\xe8\x04?\x02\xda\x01\xc2\x00\x8e\x01\xac\x05-\x05\x00\x03\x1c\x04\xae\x07\xb2\x04 \x03\x87\x04f\x04\xce\x06\xa1\x06F\x03\xc3\x01\xb5\x02\x93\x03\xba\x05\xbc\x04\xeb\x04\xb9\x01\xd1\xfe`\x00\xa4\x04\x00\x03\xc0\xffx\xff\x9a\x01\x00\x00w\x00\xa4\x02u\x00\xcd\xfd"\xfd\xda\xfdZ\xfeA\x02\\\x001\xfes\xfb\xfa\xfa\xe1\xff0\x01\x9b\xfd\xdb\xfc_\xfdB\xfc<\xfc\x9e\xfd\xad\xfeG\xfd\xf5\xfb\xb4\xfa\xcb\xfb\x0e\xfdd\xfc\x0c\xfd\xdd\xfd\x8f\xfco\xfb-\xfb\xae\xfc\xc3\xfe\xe5\xfeg\xff\xdc\xfe\x0e\xfe\x80\xfc\x11\x00\xf1\x03\xce\x01\x0f\x03=\x02\xc1\xfe\xbc\xff\xb6\x04\x00\x05\xbd\x025\x03a\x01\xfe\x00\xd1\x05+\x05\xca\x01s\x01\x06\x05\xb6\x03$\x02\xad\x019\x02\x9e\x02\xf0\x01\x84\x01C\xff\xab\xfe\xa2\xfe,\x02z\xff\x0c\xfd\x99\xfb\x85\xfc\xc5\xfe\xb4\xfd\x81\xfc\xd9\xfcn\xfe_\xfd\x88\xfb\x8f\xfc2\xfe\xfc\xfd\xd2\xfb\xbd\xfa\x9c\xfe\xc0\xfb\xa0\xfcC\xfe\xa0\xfc\xfc\xfb?\xfc\xc8\xfe?\xfdw\xfc|\xfd\x8d\xfd!\xff\xe9\xff8\x00V\xfe\xce\xfdO\xff\x0e\xff\x15\x00\xb3\xff\'\x02\xd6\x01\xc4\x00\xc5\x01_\x03F\x01\x94\x00h\x02\xbc\x02\xe1\x04W\x04\x03\x04\x87\x02R\x04v\x04\x0e\x04U\x07n\x05q\x02\x18\x03\xe4\x03\x14\x03\xeb\x03\x8b\x06^\x04\xa4\x02\t\x02F\x02\x92\x00|\x02\'\x03\xc7\xff\x16\x017\x01m\x01\xeb\xff\xc2\xffr\xfd\xf6\xfd\x0c\x00h\xfe$\xff\x92\xffw\xfc\xe0\xfa\x16\xfe+\xfc2\xfbC\xfd\x7f\xfbw\xfb\xa0\xfc?\xfc\xd0\xfb/\xfbY\xfb\xdc\xf9\x05\xfc>\xfdk\xfd\\\xfd\x8a\xfc\x18\xfe\xa8\xfd"\xfe\x06\xff\x19\x00!\xfd\xfa\xfe\xce\x00\xae\x00\x82\x01S\xff\xfa\xff,\x01;\x02X\x03\xf2\x03\xf0\x01\xc2\x01\xf9\x01E\x02\xdf\x03e\x05\xb2\x04\x81\x02\xc6\x03\xdf\x02\xab\x03^\x03\xc2\x03\x19\x02\xa6\x02"\x04\x1c\x02\x84\x02\x95\x01n\x02\x96\x00X\x00:\x00\xf3\x00\xb7\xff\xbe\xfe\xac\xff\xa1\xfe\x90\xfe3\xfe:\xff|\xfe\xe3\xfe4\xfe\x15\xfe\xa5\xfe%\xfe\x82\xfc\x07\xfd\x00\xfe\xbb\xfe\xbf\xfd0\xfc\x1d\xfe\xf9\xfd\xb4\xfd\x8c\xfc\x91\xfd\xe6\xfb\x02\xfc\x86\xfd\n\xfd\xea\xfeb\xfd\xa8\xfd\xd0\xfc\x8f\xfe\x04\x00-\x00r\xfdG\xfd\x15\x01v\xfe{\x00\xff\x00s\x01\xb2\x00W\x01\x7f\x025\x02\x85\x019\x00b\x04\xd1\x03T\x03\xe7\x02\x9c\x02 \x04\xc4\x03\r\x04\xcf\x04j\x04$\x02e\x02\xed\x04\xa9\x03Y\x03\xfc\x04\xe7\x03{\x02\xeb\x01\x9d\x02\xc9\x00c\x02R\x02\x9e\x02\xbd\xffr\x01\xc4\x01\x9f\x00\xda\xfe\xeb\xfcu\xff9\xfc\xca\xfe5\xfeG\xff\xdf\xfd\xe8\xfb\xb0\xfb}\xfbI\xfb\x8b\xfd\xd5\xfa\xad\xfa\x11\xfe\x94\xfb\x0c\xfd\x99\xfdG\xfd\x98\xfa\xec\xfa\x00\xfd\x86\xfe\xc3\xfb\x1e\xff0\xfeX\xfeZ\xfe\x95\xfd\xf4\xfe\x9f\xfe{\x01\xfc\xfe\x0b\x01-\x00\xe3\x00\xf5\x00e\x01b\x02"\x02\x9a\x01\xd7\x01\x82\x01I\x04l\x04.\x01\x91\x03e\x03\xb0\x02\xc9\x03\x9a\x03I\x02\xda\x02;\x01\xc6\x03\xb7\x02\x08\x01\xf1\x02\x8f\x01\xfb\xff\xc4\x00\xe2\x01\xb6\xff"\x00(\x00\x01\x01d\x00c\x00\xba\xffW\xfe\xcc\xfe\xb3\xfe\xaf\x00O\x00\xa8\xfe]\xfe\xb4\xff\xed\xfe$\xfe[\xfe\x93\xfe\xe5\xfe\xdf\xfeF\xff\xe0\xfd\x96\xfdN\xfe\xf3\xfdQ\xfc\xd0\xfdN\xfeQ\xfe\xd8\xfe\xbf\xfb\x17\xfb\x03\xfd\xef\xfc\x81\xfc\xae\xfdM\xfch\xfdd\xfee\xfd\x9f\xfb<\xfd^\xfe\xf9\xfe\xd3\xff%\xfe[\xff\xce\xfe\xb2\x00J\x01\xa6\x00s\x00\xc4\x01\xae\x01\xad\x00H\x03\x8a\x03\x9c\x02w\x02\xbf\x02\x81\x02\xfd\x05:\x033\x01\xcf\x03\x8e\x03\xee\x03\xba\x02\x1e\x03F\x02<\x02p\x03\xc4\x02m\x01\x15\x01\xb7\x01<\x01~\x00_\x00}\x00(\xff\xc2\x00d\xff\xeb\xfd4\x00\\\xfe:\xfd,\xfe\x0e\xfe\x0b\xfe\xc9\xfdX\xfe\xf5\xfc%\xfd\xa9\xfd\xb5\xfc\x1b\xfe=\xfe*\xfd\x85\xfdG\xfet\xfd\x8e\xfe4\xfe\x0b\xfe;\xfd\xb2\xff\xfb\xff[\xff\xe6\x00\xd1\xfe\x05\xff\xa7\xff\xd9\x00\'\x01\xe4\x00P\x00\xf1\x01p\x01t\x01\xbc\x01\x10\x01\xd9\x011\x01;\x02\x9c\x02\x1e\x022\x02s\x01\x86\x01\xab\x013\x01N\x02\xf6\x01\xc1\x00\x94\x01\xe2\x01\x84\x01\xf9\x01#\x01\xf1\x00H\x01\x06\x01\x86\x00\xac\x01F\x02\x01\x01I\x00\x18\x01\xb0\xff5\x00\xd1\x01\xe8\xffH\x01F\xff\xb5\xff\xd7\xffq\xff\xf2\xfe0\xffR\xffD\xfe\x08\xffS\xfd\x0e\xfe\xdd\xfd\xa0\xfdY\xfeD\xfd\xdf\xfc\xae\xfd\xa2\xfd\xaf\xfd>\xfd\xea\xfc\x95\xfc\x95\xfd\xc6\xfd\x8b\xfe\x92\xfe\xc2\xfdb\xfeD\xfd\xb5\xfe\xe0\xfd\x1b\x00\xe6\xfe\x89\xff\xa3\xff~\xff\xf9\x01\x16\x00\'\x01\x0e\x01\x84\x01$\x00J\x02\xed\x01>\x03\xcf\x03\xc8\x02\xfa\x020\x028\x03\xfb\x02\xb2\x02\xc4\x02P\x04j\x03\xa3\x03w\x02\xb6\x03\x85\x01N\x02\xa2\x02\x0c\x03_\x017\x00\xf3\x00t\x00L\x02\xaa\xff\xce\x00\x94\xfe\xc0\xfe]\xfe&\xff\xd8\xfe\x11\xfe\xb2\xfd\'\xfd\xea\xfdr\xfc\xea\xfc\xe3\xfc\x95\xfc\xc8\xfd\x14\xfcT\xfc\x87\xfc\x92\xfc\x9d\xfc\xc7\xfdS\xfe\xdd\xfc<\xffR\xfd\r\xfeJ\xfe2\xffo\xfeG\xff\xc0\xffN\xff\xf2\x007\xff~\x01\x03\x00F\x00\xa5\x00\x7f\x01\x85\x01\xfa\x00\x82\x02\xe2\x00A\x02~\x02\xa2\x01\x8b\x00$\x01J\x02\xe0\x017\x03;\x01\x8a\x00\xc0\x00\x11\x01V\x01c\x02\xff\x01\x05\x00\xd1\x00\xf8\x00Z\x00\xa1\x01_\x01\x06\x00\xb8\x00\x16\x00c\x00\x81\x00{\x00\xbe\xffP\x01V\xff\x95\xff\xf7\xfe\x97\x00\xd8\xfe\xc2\xfe\\\x01<\xfe\xa3\xfeS\xfd\'\xff\xdb\xfe\x0f\xffO\xfdv\xfd\xc8\xfd\xb7\xfc\xd0\xfd\xad\xfd\x84\xfd\xef\xfct\xfd\xe8\xfcX\xfe\x05\xfd\x03\xfd\xbd\xfe+\xfd\xf3\xfe\xd7\xfd\x9f\xffc\xfe\x0c\xff\x15\x011\xfe\xbd\x00\x95\xff~\x01^\x00F\x01\xc2\x00\xf4\x01l\x02[\x00\t\x03\xb2\x01\x06\x03\t\x02\xb8\x02\xdc\x01{\x02\x95\x02|\x02Y\x02<\x02i\x02\t\x02B\x02.\x01e\x02v\x01B\x01\x9a\x01=\x00\xbd\x01\x9d\x00\xcc\x00\x0f\x00\xcd\xfev\x01\xf8\xfe\x8b\xff\xa9\xff"\xff\xb5\xfe\xad\xff\x1d\xfe\xf2\xfdT\xff\n\xfe8\xff>\xfe\xed\xfd\x8f\xfd\x12\xfe[\xfe\xd1\xff\xd7\xfe\xd1\xff\xfc\xfd\x00\xfe2\xfen\xfe\xa5\xffR\xffc\x00Q\xfe\xf5\x00\xad\xfd\x95\x00\x9e\x00\xfa\xff\x17\x00\xaf\xfe(\x01\xf8\x00\x87\x02\x1d\x00w\x02\xcb\x00\x1d\x02\xe6\x02I\x01\x05\x03\x99\x01\xb3\x02\xcf\x01\xe3\x02"\x02?\x02\xe6\x01=\x01\xfb\x01\x7f\x00O\x01u\x00:\x00n\x00\x08\x01T\xff\x98\xff\x16\xff\xbc\xffm\xff\x89\xffV\xff\xd9\xfd\xbb\xff9\xff0\xfe\xc2\xffO\xfe\xa9\xfe\xc5\x00_\xfd\xa7\xff\x02\xfe\xdb\xff<\xfe\x93\xff\x1c\xff\x1b\xff\xdd\xfer\xfe\x8d\xff/\xfe\x1e\x00U\xfd[\xfff\xfd\xa1\xff\x15\xff\x8c\xfe\xb0\xfe\x13\xfew\xfe\x87\xfeW\xff\x07\xff\x15\x00\xc3\xfe\xa5\xff\xfd\xfe\x1f\x00\xae\xff\xa3\xff\xc2\x00Q\x00g\xff\xed\x00\xcf\xff\xac\x01b\x01\xa1\xff<\x01\xd2\x00\xd2\x01S\x00\xee\x01\x91\x00\x0b\x02\xa8\x02\x96\x01\xcf\x01\x88\xff\xe1\x00y\x01G\x01g\x03\xa5\x01c\x00\x19\x01\xeb\x01\xc6\x00E\x02\xcc\x00\x08\x01a\x01\x89\x00\x1c\x01\xad\xffd\x02\x8b\xff\x81\xff\xc3\xff\x86\xfe\xd9\xffy\xff!\xff\x13\xff\x12\xfer\xfe\x19\xff@\xfe\xa6\xfe&\xfd\xe0\xfd9\xffp\xfe=\xfe\xce\xfd\xde\xff\x86\xfe;\xfe\x85\xfft\xfd?\xffq\xff\xa6\x00\x18\xff\xce\xff\xa6\xff~\xfe9\x01u\x00\x83\xff\xbc\x00\xce\x00\xda\xff\x01\x00\x9e\x00\xa1\x00I\x00\x96\x015\x00\x96\x018\xff\x15\x02\x03\x00\xc8\x00\xcf\x01E\xff\xfe\x01\x95\x00\xb7\x01\xfe\xffO\x01\'\x01\xe4\x00\xec\xfe\x15\x01\xed\xff\x9c\xff\xc8\x00\xc8\xff*\x01\xde\xfe*\x00\xc6\xfe\xd0\xffl\xffP\xfe/\x01\xbb\xfe\xaf\xff\xa3\xfe\xf6\xfe2\xff?\xff_\x00\xc4\xfd\xd2\xff\xeb\xfe\xea\xff\x7f\xfe\'\xff\n\xff\xb0\xfe\xc0\xffm\xfe\xf7\xffr\xff\x89\xff\xd9\xfe\xdc\xfe\xab\xfe\x1c\xff\xe8\xff\xf7\xfe7\xfe\xdc\xff8\xff;\x00\xbe\xff\x94\xfe\x94\xff\xc5\xff{\xff\xc6\xff*\x00\xc4\xff\xb8\xff\x8c\x01\xea\xff#\xff\xe6\x00K\x01\xe3\x00Z\x00\xd0\xff\xf8\x01,\x00\xb9\x011\x02b\x00q\x02\x1b\xff\x08\x03$\x00\xb4\x01&\x01\x8c\x00\xee\x01a\x00\x9c\x03_\x01\xf5\xff]\xff\xdc\xff\xc0\xffN\x01\xd5\x00\x93\x00\x10\x00t\xff\x95\xff\xae\x00S\xff\x1c\xff\xa6\xfe\xa9\xfeR\x00g\x00u\xff\x08\xffQ\x00\xaa\xfeD\xff`\xfea\x01\x88\xff\xd6\xffU\xff\xf2\xfek\x00/\xff\xcd\xff\xc4\xffq\x00\x9b\xff\x93\x00\xac\xff\x00\xffR\xff`\x00\xf4\x00N\x00\xef\xff\xbd\x00\r\x00w\xff\x91\x00\xc1\xff\xe5\xff#\x01\x15\x00\xc6\x00\x08\x00\t\x00\xd0\x00F\xffd\x00\x83\xff\xde\x01\x8f\xff\xd4\x00\xa2\x00\x7f\x00\xea\x00\x08\x01\xe2\x01\xe0\xff\xc8\x01\xce\xfe\xa7\x01\xdc\x00/\x01\x1c\x01-\x00Z\x01\xa2\xff4\x00\xcb\xff\xb0\xff\xfe\xfe\xa2\x01F\xff\x81\xff\x9a\xff\xf2\xfe~\xff5\xff\x1b\x00\xf5\xfe\x13\xff\x96\xfe\x14\xfe\xa0\xff\xe2\xffx\xfe\x17\xfe\x94\xfd\xf6\xff\xf6\xff\xba\x00:\xff6\xff\xaa\xff.\xfey\x00\xe8\xfd\x04\xff9\x00\x86\xff.\x02\xba\xfe6\x00(\xff`\x00t\xfe\xe7\xff2\x01*\xff\xe3\x01\x0e\x00\x19\x01\xc9\xfe\x9d\x00\xeb\xfe\x00\xff\xce\x00W\x01\x9d\x01\xdc\xfea\x00\xdd\xff\xd3\xfe\xa6\xff\xe1\x00\xe7\x00\xbe\xff|\x01\x11\x00\\\x00@\x01\xc1\xfe\x94\xff\xb6\xff\x07\x01\x81\xff+\x01\x05\x00\xa1\x00v\xff\x90\xff&\xff\xf7\xff}\x01i\xfe\xe9\xffn\x00`\x00\xde\xfe\xdb\x00Y\xfeh\x00\xac\xff\xd0\xff\xda\xffR\xfe]\xff\xb0\x00\xc9\x00\x90\xff.\xff\xcf\xfe\xbf\x00\xd3\xff\xcc\xff>\xfe3\x00\xde\xffJ\x00\x8e\x00\x8f\x006\xff\xd2\xff\x98\xfe\xea\xfe\x95\x00i\xff7\x01\xc9\xfe:\x02\xbc\xff\x19\xff\xba\xfe$\xff\xb3\x00\n\x00\xd8\x01S\x00K\x00\xbe\x00\x12\xff\xa8\x00\xc8\xff\x91\xfe\xd1\x01L\xff\x9d\x00\xc2\x00\xd2\x01\xb1\x00\n\x00\xf7\xff\xdc\x00\x10\x01\xe6\xfd\xd3\x00W\x00\xa7\x00{\x02\x08\x01}\xfe|\xff\xef\xffI\xff\xb1\xff_\x00\xab\xfe\xdd\xff\x8c\x00\xde\xfe\x8f\xffh\xffI\xff|\xff\xea\xfe\\\x00C\xffG\x00~\x00\xb0\xff\x04\xffJ\xffR\x00\xee\xff\xca\xff\xc0\xfd\xe9\x00\xe3\xffN\xff\xb6\xfew\xff\xf7\xffd\x01\xab\x00t\x00<\x00\xbb\xfe\xaa\xff\x7f\xfeK\xff\x14\x01L\x01m\x01\x10\x01a\xff~\xff.\xffT\x00\x96\x00\xcf\x00G\x00m\x00\x88\xff2\x00R\x00e\x00\x83\x01\xe7\xff\xef\x00}\x00"\x02\xce\xff]\xfe\x10\x01\r\x00\x8a\xff\x0e\x00w\x00\x8b\x00\x07\x02\xd3\x00\xd0\xfe\x8b\xfe\x8a\xff\x80\x00q\x00\xf9\x00\xc9\xff\xcb\xfe\xcf\xfe.\x00\xbe\x01T\xff\xbb\xfc\x80\x00\xca\x00\xa5\xff\'\x00\xc5\xff\x8a\xfd_\xfc\xd7\xff"\x022\x02Q\x00\x95\xff\xf7\xff\x8b\x00\x80\xfe\xbd\xfe\xaa\xff\xbb\xff\xd9\xfe\xbb\xff\xf1\x01<\xff\x03\x01%\x01\xd3\xff\xe5\xff\x85\xff\x88\x01`\xff\x15\xff\x06\x00\x13\x00<\xffV\x01\xe0\x02\xa4\x01\xca\xff\xd4\xff\x03\x000\xfe%\xff\x12\xff\xea\x01\xca\xff\x94\x00\xb2\x00\xd6\xff\xab\x01\x0e\xff\x16\x00x\xff\xa2\x00X\x029\x00\x81\x00*\x01W\x00\x81\xff\xcc\x008\xfe{\xfd\xae\xff\xcd\x02<\x04n\x00\x12\xff*\xfe\xff\xfd\xe5\xffz\x02\x9a\xff\xa4\xfdW\xff\x9c\x01\xc6\x01J\x00\x0b\x00J\xff2\xfd\x00\xfe\x18\x00`\xffk\xff\xaf\xff\x8a\x00\xf5\xff#\x01\xfe\xff\xac\xfe\x99\xff\xe3\xfe\r\x00H\xffn\x01\xeb\x01G\x00\xe2\xff\xc9\xfe\xf2\xfe\x00\xff\x91\x00\xb5\x01\xda\x01\x9f\x01\xd5\x01\xb3\xffN\xfe\xec\xfe\xc1\xffA\x01i\x00\xf7\x00\x00\x01h\x00\x88\x00\xd4\xfei\xfe\x8b\xfe\x86\xff\xf6\x00\xc4\x00\xa9\xff\xf4\x00\xd0\x00a\xfe\xcb\xfe\x1d\x00v\xfe\xe5\xfd\x06\x00C\x00D\x01\xfc\xff\x8c\xfe\x14\xfe\xe3\xfd\\\xfeX\xfdB\xfe(\xff\xb3\xfeh\xff\x97\xff\x83\xfe?\xfd\xed\xfc\x8d\xffO\xfe\x06\xfe\x9c\xfd\x82\xfd\xfd\xff\xec\xffB\xff\xb3\xfd\xb3\xfdy\xfem\xfe\xc1\xff\xee\xfe8\xff\x14\xff\x80\xff1\xff7\xfe\xe5\xfd$\xfc\x1b\xff\xa5\x004\xff\x08\xfe\xce\xfd\x81\xfec\xfe\xce\xfeC\xfe\xc2\xffo\xff\xe7\xff\xb5\x00H\x00G\x00\xb5\xff\xc0\xfe\xe6\xfe\x89\xff7\x00\xb0\xff\xb0\xff\xc5\xff\x83\xfe\x9a\xfeQ\xff\xbb\xfeX\xff\x1a\x00\xf7\x00o\x02\x1c\x04s\x05\x86\x07\xbd\t\xac\x0bH\x0e\xa9\x10\xef\x12U\x14O\x15\xd5\x15\xaf\x15\x1f\x15\x0b\x14h\x12\x14\x10\x10\r8\nG\x07z\x03\xe9\xff\x01\xfdt\xfa\xa5\xf7\x9b\xf5\xdb\xf3\xf8\xf1B\xf1\xc6\xf0$\xf0\x05\xf0,\xf0g\xf0\xd0\xf0\xbf\xf1\\\xf2\x91\xf2\xa7\xf3\t\xf5\x01\xf6\xe2\xf7\x1e\xfa\x92\xfb%\xfd!\xff\xec\x003\x02_\x03\xbe\x03\xc9\x031\x04\x13\x04\xaa\x03\x1c\x03\x06\x02\xd3\x00\xa7\xff\xc0\xfe\xeb\xfd\xa0\xfc\xfe\xfb\xa3\xfb]\xfb7\xfb{\xfb&\xfc9\xfc}\xfcD\xfd\r\xfe\x17\xff\xc6\xff+\x00O\x01t\x02&\x037\x03`\x03\xb9\x034\x04\x01\x04\xb3\x03\x92\x03T\x03\xb7\x02\xc7\x01k\x00\xf0\xfff\xff\xff\xfe\x15\xff\x91\xfe.\xfe6\xfd\xdc\xfb4\xfb\x0c\xfd\xd4\xfeI\xff\x9c\xfd\xcc\xfc\xc8\xfd%\xff\x81\xff8\xfeb\xfd\x9c\xfe\x87\xfff\xfe\x03\xfeO\xfe4\xfe\xb9\xfc\xf8\xfb\xcc\xfbv\xfbT\xfb\xd4\xf9\xe8\xf8Z\xf8\xdc\xf8@\xf8\xe2\xf7\x95\xf7\xe9\xf7\xf1\xf7#\xf8\xd4\xf8\x0f\xf9\x10\xfa\xde\xfa\n\xfcf\xfd$\xff\x16\x01L\x04\xb9\x07\r\x0bR\x0f\xa3\x13\xa1\x18@\x1e"#$\'0*\xb7,I.\x9f.\x82-\x0f+d\'\xb1!R\x1b\xb1\x14\xae\r\xb6\x06^\xff\x05\xf8\xf9\xf1\x02\xed\xdd\xe8r\xe5\x99\xe2\x9a\xe0\xb8\xdf\x9c\xdf\xd8\xdf\x96\xe0\xff\xe1t\xe3\xc3\xe4\x87\xe6\xfb\xe8\x05\xec\xe1\xeej\xf1_\xf47\xf8h\xfcg\x00:\x04\xc2\x07\x00\x0b\xcb\r\xc4\x0f\xdf\x10"\x11{\x10\xd5\x0e;\x0c\xe9\x08|\x05\xa4\x01`\xfd\xf9\xf8\x18\xf5\xef\xf1S\xefW\xed\xec\xebw\xeb\xc9\xeb\xb5\xec\xe3\xedy\xef\xd1\xf17\xf4\'\xf6/\xf8\x9e\xfa6\xfd\t\x00N\x02\x11\x04|\x06)\tD\x0b\xa4\r\xa3\x0f \x11c\x12Y\x13\xd3\x13\xac\x13\x1b\x13\xbe\x11\x91\x0f\x14\r\x8e\n\xcb\x07\xc1\x04\xfd\x01F\xff\x83\xfc\xac\xfa=\xf9\xb8\xf7\xc8\xf6_\xf6!\xf6\xd1\xf5\xde\xf5\xf1\xf5\xe0\xf5)\xf6Z\xf6I\xf64\xf6V\xf6M\xf6\x11\xf6&\xf6Z\xf6\x8f\xf6\xb4\xf6\x89\xf6\xa6\xf6\xe4\xf6N\xf7\xef\xf74\xf8\xa5\xf84\xf9\xe4\xf9]\xfa\xe9\xfa\xf4\xfb\x97\xfc\xc3\xfc\t\xfd^\xfd\xd7\xfd\xfb\xfd"\xfe\x04\xfe\xab\xfd\xfa\xfd\x0f\xfe\x9a\xfe$\x002\x02\xd6\x04{\x085\r\x83\x12\x7f\x18u\x1f2&\x19,\xad1V6)::\x11\xc6\x14[\x17\xbc\x18\xf2\x18\n\x18\xbf\x15\x00\x12)\r\xc7\x07\xd9\x01Q\xfbW\xf4\xcc\xedJ\xe8\xc9\xe3M\xe0\xde\xdd\xbb\xdc\x11\xdd\xdc\xde\xe1\xe1\xca\xe5k\xea\x93\xef\n\xf5X\xfa\x97\xff\xb4\x04p\t\x8a\r\r\x11\xe1\x13]\x16\xa2\x18b\x1a\\\x1b\xd7\x1b\x16\x1c\xd3\x1b\xfa\x1a\xb7\x19\xe0\x17n\x15Z\x12\x93\x0ea\n!\x06\xb1\x01V\xfd#\xf9"\xf5\xc3\xf1E\xef\xa6\xed\xfe\xec\xdc\xecR\xedm\xee\x16\xf0\x1c\xf2V\xf4\xb5\xf6\xcf\xf8|\xfa\xe3\xfb\xf5\xfc\xc5\xfdl\xfe\x86\xfe\'\xfe\xa8\xfd\x07\xfdi\xfc\xb0\xfb\xc7\xfa\xe8\xf9\xf3\xf8\xf6\xf7-\xf7\x8b\xf6\xe2\xf5N\xf5\xcc\xf4E\xf4M\xf4\xaa\xf4E\xf5\xf6\xf5\xe0\xf6\xec\xf7\x0e\xf9\x88\xfa\x05\xfc`\xfd\xd6\xfe\xd3\xff\xbb\x00\xa3\x01\x8a\x02 \x04_\x05Z\x067\x08~\x0b\xf3\x10i\x17\x15\x1d\x87"a(\x02/\xe45\xea:\x96=\xaa>N>\xe9; 7O0\x12(\xa4\x1e\xb0\x13\xbf\x07\x96\xfc\xe4\xf2E\xea\xe9\xe1 \xda\xac\xd4\x15\xd2[\xd1m\xd1\x15\xd2\xfe\xd3\x03\xd7c\xda\xef\xdd\xa4\xe1\x97\xe5^\xe9u\xec`\xef\x12\xf3\xc4\xf7\xb7\xfc\xd3\x00>\x04X\x08C\r\x01\x12\x84\x15\xae\x17\xbe\x18\xb2\x18\xff\x16\x9b\x13\xd8\x0e/\ts\x02\xb2\xfaV\xf2\xb1\xeam\xe4\x1b\xdf\xa3\xdaI\xd7\xd4\xd5\xa9\xd6\x8a\xd9\xb5\xdd\xc1\xe2\xab\xe8J\xef`\xf6V\xfd\xe1\x03\xfd\tH\x0f~\x13\xaa\x16\xf8\x18\xfd\x1aX\x1c\xd8\x1cy\x1c\xad\x1b\xd4\x1a\xf2\x19\x8f\x18\xb8\x16\x89\x14\xf7\x11\x08\x0f\xb5\x0bT\x08\xc0\x04\xcd\x00\xad\xfc\xb8\xf84\xf5l\xf2y\xf0,\xef\x9e\xee\xcd\xee\xde\xef\xbe\xf1.\xf4\xc5\xf6\x93\xf9Z\xfc\xf0\xfe(\x01\xcb\x02\xce\x038\x04\xf5\x03\x12\x03\xb2\x01\x12\x00;\xfe\x10\xfc\xc0\xf9\xb5\xf7\x02\xf6q\xf4\xe6\xf2\x94\xf1\xb9\xf01\xf0\xea\xef\xee\xef3\xf0\xd2\xf0\x98\xf1U\xf2n\xf3 \xf59\xf7\'\xf9\xdf\xfa\xea\xfcZ\xff\xa0\x01\xf2\x02\xb6\x03\\\x04\xb3\x04a\x04\xf4\x02Q\x01V\x00\x1a\xff\xcd\xfd\x1a\xfd\xee\xfe\xa4\x03 \t\xbf\x0eq\x15\xf6\x1e\xb7*!5\xd4<\xa0B\x0fH\x11L\xf7K\xc7G\x00A\x868\xc2-* \xad\x11t\x04Q\xf8C\xec\xa4\xe0p\xd7\r\xd2y\xcf\x0b\xceB\xcd\x12\xce\xd2\xd0\x8c\xd4\x03\xd8\t\xdbo\xde\x16\xe27\xe5\xc0\xe7\xab\xea\xf0\xee$\xf42\xf9\x12\xfe\xfa\x036\x0b\x94\x12\xb5\x18?\x1d\x9a \xb9"\xca"K %\x1b3\x14\xc0\x0b:\x02%\xf8D\xeeR\xe5\x83\xdd\x1b\xd7\xbc\xd2\xc5\xd0Y\xd1\xf7\xd3\xfd\xd7!\xddL\xe3p\xea\n\xf2\x85\xf9K\x00\xeb\x05\xb8\n\x0b\x0f\x1d\x13\xae\x16\\\x19\x02\x1b\x1d\x1c\xe6\x1c\xcc\x1dn\x1e{\x1e\x81\x1d\\\x1bQ\x18\xcc\x14\x0e\x11\xa2\x0c\'\x07\xf8\x00\xd7\xfaJ\xf5\x9d\xf0$\xed\xe9\xea\xbf\xe9\x87\xe9\x99\xeaN\xedp\xf15\xf6\xea\xfa\t\xff\xba\x02\x16\x06+\t\x85\x0b\x83\x0c\x02\x0cr\nj\x08g\x06P\x04\x01\x025\xffA\xfc\x8c\xf9m\xf7\xe7\xf5\x9b\xf4\x1f\xf3I\xf1]\xef\xee\xed\x1b\xed\xb7\xec\x8d\xec\xb3\xec\x04\xed\xf0\xed\xc2\xef^\xf2\xea\xf5\x80\xf9\x9c\xfc\x87\xff&\x02\xde\x04\x1e\x07\xfc\x07\x08\x08\xab\x07s\x06\xc5\x04\xa2\x02~\x00\x03\xff4\xfdq\xfa\x8e\xf7`\xf5V\xf4\xc2\xf3\x85\xf2Y\xf1\x9c\xf1\xa8\xf3X\xf8\n\x01\xb7\r\x0b\x1c\xac(\xf52\'>=K\x89V\xd6[\x1d[\rW\xe5P\xe6F\xf08\\)\xda\x19\xa4\t\x7f\xf8\xe8\xe8\xaf\xddo\xd6\xf4\xd0\x92\xcbx\xc7\xd8\xc5,\xc6]\xc7\xa3\xc8\xc0\xc9\x00\xcb\x88\xcc7\xcf&\xd4\x9d\xdb1\xe5\x82\xef3\xfa\x9c\x05\t\x12\xb5\x1e\xea)t2\x017\x957\xbc4\xfa.\xbe&k\x1c@\x10m\x03q\xf6R\xeaK\xe0\xed\xd8/\xd4\x10\xd1\xf7\xcei\xce\xa2\xcfu\xd2\xef\xd5\x98\xd9\xa2\xdd=\xe2\x88\xe7\x82\xed\xa1\xf4\r\xfd\xeb\x05\x85\x0e\xef\x16*\x1f\x98&\xe5,H1;3{2D/\x14*\x18#]\x1aa\x10\xad\x05\x90\xfb\xe8\xf2\xdc\xebm\xe6\xdc\xe2:\xe1G\xe1\xd0\xe2\xa1\xe5n\xe9\xbd\xed\xa8\xf1.\xf5\xe9\xf8\xca\xfda\x03j\x08T\x0c&\x10\xa5\x146\x19\xa8\x1c\xac\x1em\x1f\xd5\x1eM\x1c\xb6\x17\x04\x12\xcc\x0b\xe2\x04D\xfdB\xf5\xe3\xed\xfb\xe7\xdc\xe3\xf8\xe05\xdf\xc1\xde\xf9\xdf]\xe2\x8d\xe5o\xe9\xb4\xed\x0e\xf2\xd6\xf5_\xf9o\xfd\x9e\x01\x7f\x05\xfd\x07n\tt\x0b\xbb\rR\x0fP\x0f\xdd\r\x07\x0c\xb5\t\t\x06.\x01\x95\xfc0\xf8a\xf3\xec\xed\x07\xe9\xa3\xe6|\xe6\x17\xe6j\xe5\xbe\xe5&\xe8"\xec\x8c\xef\x9f\xf2\xc5\xf6$\xfbA\xff\x96\x05U\x12\x97%\x898\xabDdK\xefR,\\\x7fax^WUmK\x8c?X/b\x1d\n\x0f\\\x04\xa4\xf8\xef\xe9\xb2\xdc\xc6\xd5\xf9\xd2\xc8\xceh\xc8h\xc3\x10\xc11\xc0\xf7\xbf\xad\xc1\x9a\xc7J\xd01\xd9g\xe2\x0b\xee\xd1\xfdP\x0e \x1b\xd3#<*\xe7.\x061\xab0O-\xd4&\x1d\x1e\xd4\x14\xb8\x0b\x07\x03\xaf\xfa\x90\xf3\xb3\xec\x8e\xe5h\xde\xa5\xd8\x9b\xd4\xa8\xd0F\xcc$\xc8]\xc6\xd3\xc7\n\xcc^\xd2x\xdb\xca\xe6[\xf2=\xfdt\x08#\x14*\x1e\xae$\xfb\'\x87)\x9f)\x8c(\x17&\x9e"T\x1e\x97\x19\xcd\x14\xcc\x0fr\n\x88\x04\xc7\xfd\xed\xf6\x99\xf0\xb9\xeaY\xe5\x1b\xe1\xfd\xde\xc2\xde\xe8\xdfv\xe2X\xe7\t\xee\xfd\xf3\xd5\xf8\x02\x00<\r\x1a\x1c\xb5$\xb4%\xdb%\xe8)\xab-p+\xc7$\xf3\x1eY\x1an\x13=\nl\x03\x1c\x01\x97\xfeh\xf7\xaa\xed\x9e\xe62\xe4h\xe2\x01\xde_\xd9g\xd8\xbb\xdb1\xdf\xff\xe1\xc0\xe7\xaa\xf1\xd9\xfb\x14\x01&\x03\xf1\x07=\x0fO\x14^\x13\x07\x10\xf9\x0e\xf5\x0el\x0c\xef\x07\x12\x05-\x04\x9e\x01\x0b\xfb\xdb\xf3\xbb\xefS\xed}\xe8\x89\xe0\x91\xd9\xfd\xd6A\xd8\xcb\xd98\xdc0\xe1L\xe8\xe0\xef\xf1\xf5\xad\xfby\x01\xd7\x05\xef\x08\x18\n\xa8\n\xb8\x0b\xb6\x0eI\x14\x93\x1aO%\x116\x08H\x90S\xbeV\xd6WVX\x86R\xf6C(4\xa2(\xb6\x1d)\x0f>\x01D\xfbx\xfa(\xf6\x1e\xed\x90\xe3l\xda\xac\xd1\x84\xc8\x97\xc1\xa1\xbf\xa1\xc1\xf9\xc6\xe0\xce\xda\xd8\x14\xe6i\xf5n\x02\xdb\n\x9c\x0f$\x12\xe7\x132\x15\x98\x15\x06\x16\xe9\x16\xef\x18\x1e\x1b\xc3\x1aM\x18K\x15+\x10N\x06-\xf8\x16\xea\xe8\xde\xcd\xd5\x17\xce\n\xcak\xcb\x92\xcf\xa9\xd3\xea\xd6W\xdb\xb9\xe0\x11\xe51\xe8\x9e\xeb\xac\xf0\xe7\xf6\xcf\xfe\x9a\x08\xcb\x13\x0f\x1e\xe2%\xc1*\xc7,w+\xa0\'\x94"\xdb\x1c\xa8\x16U\x10\x95\nd\x06\xa8\x03K\x01\xb0\xfd\x9d\xf8-\xf3\x9a\xeeE\xea\x9a\xe6\x03\xe4\x92\xe3\x17\xe5)\xe8s\xedz\xf5P\xfeP\x06\xa7\x0b\xf5\x0e4\x11_\x13\t\x16_\x17U\x19\x02\x1e/%\xb5*\xc7+5) $\xb0\x1b/\x10\r\x04/\xf9\xc2\xef]\xe7\xc2\xe0\xa8\xdc\x99\xdb\xa1\xdb\xac\xdcK\xddu\xdd\x0b\xde\xb9\xde\xb4\xe1\xd3\xe6\x0e\xef"\xf9\xb9\x02M\x0bB\x12\x8f\x17[\x1a\x90\x1a\x1b\x19\x1e\x16\xda\x11\xa2\x0c~\x08\t\x06\xf8\x03\xc1\x00\xbf\xfb\x84\xf5\x87\xee\xd9\xe7k\xe2R\xdf\x1d\xde\xd8\xdd\xf9\xdeA\xe1\x96\xe5m\xeb^\xf0l\xf4r\xf7\x03\xfb\x87\xfe\x03\x02\x94\x06\xa2\n\xc2\rZ\x0f\x0b\x10=\x11\xd5\x12\xbe\x13~\x13\x11\x10I\x0c\x02\n\x8e\x08Q\rf\x1e\xd55\xf9B\xc9;,+_$\x07&\x0c#d\x1b\xd8\x17\xe1\x19\x8a\x18\xd6\x0e\x11\t|\r\xef\r\xc7\xff\xb0\xe9g\xda\'\xd6\xa9\xd6\x83\xd8#\xdf\x9f\xe6\x90\xe8\x04\xe5\x88\xe1\x92\xe3\xfb\xe9\x8a\xed\x0e\xec\x9f\xeaX\xee\x97\xf7\xfa\x02\x04\x0eg\x163\x18\x06\x13\x9b\x0c\r\t\xca\x07\x86\x07\xa2\x06\xc4\x03\xa5\xfe#\xf9\xe3\xf7l\xf9\xc4\xf8\x9b\xf3\xc5\xeb:\xe4\x15\xdfR\xde/\xe2\xe2\xe89\xefX\xf3L\xf5k\xf7z\xfb\x93\x00\x98\x04\x07\x07\xd8\x08\xd5\n\xd0\rS\x12\xe6\x17\x89\x1b\xaf\x1a\x1e\x15\xdf\r}\x07\xde\x025\xffU\xfc#\xfa\x1c\xf8\x8b\xf5\xf0\xf3\x0b\xf4@\xf4)\xf2I\xee\xe5\xeb\x1a\xec\xbb\xee\xc3\xf3\xe7\xfa\x08\x03\x88\x08\xb2\n\xc3\n\xa4\x0b\x18\x0e\xf8\x10\xe4\x12\xe1\x13\xf0\x15\xfb\x18\x1d\x1au\x19\xd9\x17\x0b\x16\xe1\x10\xe0\x07\x85\xff\'\xfaI\xf7\xcf\xf4\xc9\xf2^\xf1\xa7\xef\xd1\xec\xc2\xe9\x1c\xe7\xd7\xe6\\\xe8\x05\xebd\xee\xc9\xf3\xd2\xfaU\x01c\x05\xaf\x07\xce\t6\x0bL\x0b\x8e\n\x80\nh\x0bG\x0c\xfb\x0b\xa0\nW\x08\xf3\x04C\x00\xcd\xfa\xa9\xf6\xb3\xf4=\xf4j\xf4\xa4\xf3_\xf3\xd7\xf3T\xf4\xc3\xf4\xc3\xf3K\xf3\x10\xf4\x91\xf6\xb1\xf9\x9d\xfc!\xff\x8d\xff\x18\xff \xfdQ\xfc\x10\xfd\xd4\xfcn\xfc8\xfb`\xfb\xba\xfbD\xfae\xf7\x98\xf5\xb2\xf5\xf7\xf5V\xf5\xd1\xf5w\xf88\xfc\xb1\x03y\x15D.\x01=\x9c7\xc6\'\x7f#(+\x821\x0f3\xe17uB\xe0@s.\xb7\x1bR\x167\x13x\x03\x02\xf0\x83\xe8\'\xecX\xec\x12\xe6\x9e\xe2\xa5\xde\xb9\xd4:\xc6\xfe\xbeo\xc6-\xd6,\xe4t\xebJ\xf0v\xf5=\xf9G\xfb\x16\xff0\x08\xbc\x10.\x16\xe2\x1b\x81"?\'\x9b&3!\x1f\x18:\r\xc1\x047\x02*\x03o\x02\xac\xfdP\xf5!\xeb\x9d\xe0\xd1\xd8k\xd5\xd4\xd5\xb2\xd7\x97\xd9F\xdc\x92\xe0\xc9\xe5\x05\xe9k\xea\xc8\xec\xe6\xf1,\xf8g\xff\x1c\x08\x1d\x11\x1b\x16/\x16h\x14;\x13\x8e\x12F\x12\x8f\x12)\x13}\x12I\x0f"\n\x88\x04\x16\x00\xca\xfc~\xf9R\xf6\xd6\xf4O\xf5\x82\xf5{\xf5\xbb\xf5\x04\xf7\x9c\xf7\xaf\xf7\x18\xf9\xa1\xfc\xcb\x00y\x04\xf6\x06e\t\'\x0b\xc4\x0c\xfd\r\xda\r\xb9\x0c\x8c\x0c}\x10\xc2\x15\x80\x18d\x16\x92\x11\xcc\x0b\xb9\x05\xc0\x01_\x01\xce\x02\x13\x02\x94\xfd\x0b\xf8\xda\xf3\xfb\xf0\x18\xf0w\xf0\x19\xf1C\xf1D\xf1n\xf2L\xf4P\xf6j\xf8$\xfa\x04\xfb\'\xfc\x88\xfe\xc7\x01\xe8\x03\x10\x05\x9f\x05:\x05\xe9\x03]\x02#\x02\xb4\x02\x9a\x02\x0b\x01\xa2\xfe@\xfc\x17\xfan\xf8(\xf7\r\xf6R\xf4\xe6\xf1:\xf0\t\xf0\xce\xf0(\xf1p\xf1\xea\xf18\xf2\xb3\xf1\x9f\xf1\xb3\xf3c\xf7P\xfa\xe8\xfb0\xfd\xda\xfe&\x00\xeb\xff\x0b\x00\xdd\x00u\x03]\x06\x12\x08\x9b\x08\xef\x06\xfb\x057\x058\x04\xd7\x07\xf7\x16\x801e?\'3\x0b\x1a\x1e\x10,\x1a\x92$\xad+R7MA\x1d5\x8d\x16\xeb\x04I\t\x9a\x0e\xcf\x05-\xfdi\xff\x89\x01\xbb\xf6\xda\xe7^\xe2\xae\xe2z\xdc\xa8\xd2S\xd2\x0b\xe0\x08\xee-\xee\x15\xe6\xd8\xe1z\xe3S\xe5;\xead\xf8X\x086\rq\t\\\x08\xee\x0cn\x0e\xdd\x0c<\x0f\xaf\x14q\x16\xd2\x11\xca\x0e\xb1\x0e\xe2\x0b\xae\x01@\xf6x\xf1\x80\xf1)\xf1U\xefT\xee+\xec?\xe4\xab\xda\xf6\xd7O\xdd]\xe4\xc4\xe8\xa5\xecq\xf0\xb7\xf1\x92\xf0\xa2\xf1y\xf7;\xff\xef\x05S\x0b\xfb\x0fY\x13\x81\x13x\x12\xa1\x12?\x14\xb7\x15@\x16\x0f\x17\xf5\x16\xc8\x13#\x0e>\t]\x06\x9f\x03(\x01:\x00,\x00\xaf\xfdH\xf9\xfa\xf5"\xf5\x88\xf4\x8c\xf4\x81\xf6\xfb\xf9\xc4\xfb\xa9\xfb\xb9\xfc\x05\xff\xa2\xfe6\xfe\x8a\x01Z\nZ\x0f\xa8\r\xff\ni\t\xd5\x07\xa3\x04\x1b\x08r\x0fw\x11J\x0b\xac\x03\xcd\xff~\xfc\xbc\xf9c\xfcM\x01\xff\x00\x87\xfb \xf7\x9f\xf6\xe7\xf5\xd6\xf4\xa7\xf6$\xfaN\xfb\xb2\xfa\xf9\xfb\xe3\xfd\xc2\xfd\xdb\xfb\xd0\xfbk\xfd\xe5\xfe\xa9\x00\xed\x02\x0b\x04\xd8\x017\xfe>\xfc\xc4\xfc\xb5\xfew\x00Z\x00T\xfe\xcb\xfa\x89\xf8\xee\xf7\xd0\xf8\xf0\xf9\xd8\xf9|\xf8}\xf6"\xf62\xf7\xab\xf8]\xf9R\xf9K\xf9A\xf9f\xfa\xf2\xfc%\xff\x94\xff\x9c\xfem\xfd\xef\xfc\x84\xfc\x97\xfdJ\xff\x08\x00\x8b\xfe\x0c\xfc\xab\xfb\\\xfbM\xfb\x9b\xfa\xff\xf9Y\xf7\x94\xf2\x1c\xf4\x81\xff\xd0\x11N\x1f\x0f d\x14\xa0\x03\xf4\x02\xc9\x17\xa15\x88Aa8\xc0+\x9c!\x02\x1a\xd9\x15b!\xcc2F19\x1b=\x07\xe7\x06\x94\x07\x0e\xfd6\xf2\x9c\xf4$\xf82\xee\xf2\xe2n\xe4\x11\xe7\xd9\xda\xb8\xcb\x88\xd0\xd9\xe3A\xee\xc0\xe9S\xe5z\xe6\xa7\xe3\xcb\xe1n\xec\xf0\x01\xd7\r\xf2\x07J\x00w\x01\xfd\x05R\x06R\t?\x13\xad\x1a\x13\x15i\t\x93\x03\x90\x03@\x02\xf5\xfe\xd6\xff\x95\x02[\xff\xb0\xf5\xc0\xed^\xeb2\xea-\xe8\x81\xe9\xd1\xee\xeb\xf1\xac\xed\xe3\xe7?\xe7\xac\xeam\xee\xd0\xf2\x0f\xfal\x00\xb1\x00p\xfd\xf9\xfc\x11\x01\xab\x05\xa3\t\x80\x0e\xec\x12]\x13\xa9\x0f\x96\x0cQ\x0cm\r\x9a\x0ek\x10\x94\x12\x19\x12\xb1\rf\x07<\x03\xa3\x02Q\x04\xcf\x05e\x06U\x05t\x02I\xfd\xf2\xf8O\xf8\xeb\xfa8\xfd.\xfeg\xfeI\xfe`\xfbP\xf8\xb3\xf7\x84\xfa\xe0\xfd\xf8\xff\x0e\x03\x8d\x04\x95\x03\xe6\xfe\xb6\xfc\x9d\xff\xe4\x05W\x0c\xaf\x0f\x9b\x0e8\x08X\x01\x1e\xff\x9d\x02\x97\x08 \x0cx\x0b\x7f\x05\xf4\xfd\x0f\xf9\xaa\xf8h\xfbs\xfd\x95\xfey\xfd\x15\xfa\xa5\xf6\xc7\xf4\x00\xf6X\xf7m\xf8\xb9\xf9\xf6\xfa*\xfb\x15\xfa4\xf9*\xf9\x88\xf9\x93\xfa\xd7\xfcF\xff\xad\xffu\xfd\xe0\xfa\xb0\xf9\xb4\xfa\x9e\xfc\xb7\xfe\xcf\xffJ\xfe\x9e\xfbg\xf8l\xf77\xf8\xad\xfa\xe5\xfc\x9d\xfc\x8a\xfb\xc6\xf8\xab\xf7@\xf6\x86\xf6m\xf8F\xfaz\xfc\x94\xfc\xd2\xfc%\xfc*\xfa\xb0\xf8.\xf9\x98\xfc\x19\xfe\x0b\xfe!\x06\xaf\x15\x02\x1d\x13\x0fn\xff\x9c\x05u\x19\xc3\'&//6\xf3/X\x17\x82\n\xcd\x1c\xa55A7F-x(B\x1d\x07\x07b\xfe_\x0f\xfd\x1b\x06\x0f\xe8\xfd"\xfb\x13\xf7~\xe7P\xdf\xb3\xe7\xbb\xed\xf0\xe3\xf3\xdc\xc4\xe2\xb2\xe5\xeb\xdb\xd2\xd3\xec\xda\x81\xe6\x9d\xec\x8b\xef\xeb\xf3\\\xf4\xea\xedG\xebe\xf4W\x03\xff\r\xae\x0eF\x08$\x02w\x00\x05\x04$\x08\x9a\x0b\x0b\x0eY\x0c\x90\x04\x83\xfc\x01\xfb\xa3\xfcE\xfal\xf5\x85\xf5\x89\xf8\xc7\xf6/\xf03\xec\xcd\xeb\x11\xea\x9f\xe8\x16\xed1\xf5\xae\xf7\x08\xf3\x8f\xef,\xf1\xad\xf4>\xf8q\xfe\x00\x06\xbf\x08\x97\x05\x17\x03\xb1\x05\x1e\nu\x0c\xa6\x0es\x12"\x14\xe4\x10\xcb\x0c\xdb\x0c?\x0f;\x0f\xef\r\x91\x0e\x05\x0f\x9d\x0b\x83\x06\x12\x05(\x06\xba\x04\xba\x02\x03\x03\xe4\x04f\x01y\xfbM\xf9\xeb\xfa\x8f\xfaL\xf9\xdb\xfc\xc9\x02^\x00\x93\xf7P\xf4y\xf9\xd0\xfee\xff\xd0\x02X\x07\x07\x05m\xfd\x86\xfc#\x03\xc5\x07+\x06[\x06\x0f\t\xb3\x06\x12\x01\x00\xff\xf9\x02?\x04\xdc\x01r\x01}\x02&\x00\x9e\xf9\x10\xf7\x1c\xf9\xc4\xfa\xf2\xf9\x08\xf9\xd3\xf8\x8b\xf5f\xf1-\xf0\xc2\xf2\n\xf5(\xf5\x0c\xf5S\xf4\xff\xf26\xf2\xa7\xf30\xf7\x0f\xf9Q\xfa\xbd\xfaQ\xfb\xfd\xfb\xc4\xfc\x05\xff\x9f\x00\x94\x02\x89\x03%\x04\xcb\x03\x10\x03(\x03\xb7\x03\x99\x04W\x05\x12\x06\x86\x05,\x04r\x02c\x02\xa4\x02L\x03!\x04\x8c\x04\x03\x045\x02o\x02\xa5\x03V\x04C\x04\x1d\x04\xdc\x04[\x03\xc8\x03;\x05\xe6\x06\xa4\x07\xbc\x07\xcf\n\xfe\n\\\x0c\x84\x119\x17\x8f\x16\xce\x0f\xef\r\x9d\x12\xe5\x15\x99\x16m\x19q\x1b\xa0\x16\xe0\x0bk\t1\rI\x0e\xe9\n\x0c\x08\x9c\x08\xc8\x02\xe5\xfa\xd0\xf7u\xf9\x9c\xf8\x07\xf3\xf5\xf0%\xf2\xf2\xf0,\xec\x81\xe8\xf8\xe8\xda\xe9E\xe9\xb9\xe9*\xed\x80\xef\xfb\xec\xba\xe9\xcf\xeaQ\xf0\x86\xf3\xd3\xf4V\xf7\x90\xf9P\xf94\xf7\xbe\xf9J\xfe\x12\x00\xa0\xff\xe4\x00\xe7\x03\xa8\x032\x01\xe1\x00\xb8\x02\xd4\x02\xf6\x00\x88\x01\xfe\x03\xc6\x035\x00\xc5\xfdt\xfe\xa2\xfe\x03\xfe`\xfe\x80\x00\x98\xffz\xfc\x11\xfb\xcf\xfcf\xfe\x0f\xfe\r\xffg\x00\x1d\x00L\xfeH\xfe\xfb\x006\x02\xe7\x01\xe7\x01S\x03\x02\x04A\x03\xed\x03a\x05\xf4\x05\xcf\x04\xe4\x04"\x070\x08\xf1\x06\x98\x06\x9d\x07_\x076\x06\r\x06\xa0\x08\xdd\x08\x80\x06\xfe\x04\xc5\x04\xdf\x03E\x02\xf6\x01\xde\x02\xb6\x01\x94\xffX\xfe\xbf\xfd\x97\xfcd\xfb\xf6\xfbE\xfcd\xfc\xc9\xfb\x0c\xfc\x81\xfb;\xfa\xe2\xf9I\xfa\xa3\xfbX\xfcc\xfd\x9b\xfd0\xfd\x12\xfcP\xfb\xe9\xfb\xc7\xfc4\xfe \xff0\xff\x92\xfe7\xfd\xa9\xfc\xfe\xfcA\xfd\xcd\xfd\xc9\xfd\xa6\xfd\xbc\xfc;\xfbj\xfa\x9d\xf9\xfd\xf9[\xfag\xfb]\xfc\r\xfcP\xfa\x96\xfa\\\xfa*\xfbA\xfd\x01\xff}\x01s\x00\xf9\xff\x07\x00\xe4\x00j\x01\xa3\x01\xa1\x02L\x04\x81\x04\xdb\x03*\x05\x05\x05\xb1\x03N\x04\xc3\x05:\x06\xeb\x04\x89\x05\xc9\x07F\x07+\x07\x0e\x07i\x06?\x05\x81\x06B\x08\xbf\x08\xd6\x06\xcd\x030\x04l\x06m\x06\xbc\x052\x07\xb5\x06\xb5\x01o\x00\xff\x03X\x01\xba\xffO\x03\xb0\x06t\x02]\xfb\x9d\xfd\xf7\xfe\xe5\xfd\xf3\xfc\xa5\xfe\xfe\xfe\xb2\xfc\xf0\xfb\xeb\xfe\xa2\x02\x88\xff\xa7\xfb{\xfd\xe1\x00\r\x01Z\xff\xb9\x01\x0c\x04B\x02\xdc\xff/\x02\xee\x03p\x00\xcb\xfep\x01\xed\x02\x1a\x00x\xfd&\xfeG\xfe~\xfbF\xfb/\xfc\xc8\xfa)\xf9\xec\xf8\xcb\xf9\x1b\xfa\xdf\xf9\x84\xf9\xf0\xfaz\xfa \xf9W\xfa4\xfc \xfd6\xfe\xd3\xfe\xfd\xfe\xa4\xfe\xbf\xfc?\xfe\xc3\x00n\x01\x0c\x01l\xff\xb4\x00Q\x02"\x01\x7f\x00~\x00\xde\x00\x1b\x00&\xff\x9b\x019\x02\xd1\xfe\xd4\xfd\xba\xfe&\xfe<\xffr\xffK\xff\'\xfe]\xfd\x10\xfeK\xfeZ\xff\xb5\xfe\xb2\x00\x1a\xff-\xfd\x1f\xff\x92\xffS\xfeg\xff]\x01M\xfd\xd3\xfb\xce\xff,\x02\x12\xfe\xb6\xfe\xd3\x00%\xfeo\xff\x9e\xff\xa0\xffN\xfe\x9d\x02\xfa\x00\x00\x01\xf3\x03/\x00\x92\x00\xaa\x05\xa8\x03\xc8\xff\x98\x01\xce\x04\xbf\x05/\x00U\x06\xdd\x06\x12\x00\xb4\xffq\x02\xee\xff\xb9\x00\xfe\x03\xc5\xffN\x00\xa3\xff6\xff\xc3\xfb?\xfeX\x00\x16\xfd\xde\xfeg\x02\x9a\x00\xce\xfb\xc6\xfc\xdc\xf9W\xfd\xf5\x01b\x00\x00\x03I\xfe\x17\x01\xcb\xfe\x80\xfb\x93\xff\x8f\x002\x08!\x01s\xfd\x0e\x05\xcc\x04\x04\x00\xa3\xfd\'\x07~\x04\xaf\xfe\xa2\x01\xfa\x04X\x04\xf0\xfe_\x02\x1c\x05\xcc\x01\xd3\xfb\xee\x07\x10\x05\xa3\xf8B\xfe\xbb\x04\x90\x03\x9b\xfbW\x06\xc9\x03\xa3\xf5V\xff9\nG\xf9?\xfc\\\n\x90\xfc%\xfc\xd4\x028\x05\x0f\xfco\xfa\x8b\x04\x93\xfe\x1a\xfd%\x04\xba\x04/\xf9\x98\xfeJ\x00p\xf9\x86\xfe\xc0\x00\x04\xfe3\xfd\xcc\x01{\xfd\n\xfa\xba\xfe\x87\xfe\x90\xfa\xcb\xff;\x02\xb6\x00R\xfdx\x01\xd2\xfd\xb7\xfd$\xff\x98\xfd\xb4\x04\x9b\xfen\xfd\xed\xffK\x00\x1a\xfe=\xfe\xe7\xfb\x98\xfeI\x01\x87\xfdj\xff\xec\x03\xf1\xfd~\xfa\xea\x02\x00\xff\x10\xfa\xf5\x00\x1a\x01\x00\xffL\x01\x92\xfe\xc9\xff.\x04\x04\xfc\x8f\xfd\x96\x02I\xfe\x97\x06\x99\x00x\x02\x93\x01\x1d\xfe\xc9\x00F\xff\xca\x04\xf7\x04y\xfe\xef\xff]\x03\xe0\xfa\xd0\x01\xbe\x00&\xfd\x01\xff\xe3\xfc\xaa\x02r\x00S\xfc\xd5\xf8G\xfb\xf9\x00\xbd\xfb\xde\x01\xd9\x03\x88\xf9J\xfbj\xff\xd2\xfb\xc2\xfa|\x03\x15\x04\xcb\xf9=\x00|\x03W\xff\xea\xfb\xcb\x00z\x01\xa7\xfbU\x04\xaa\x07\xed\xfa\x96\x01\xfd\t\xca\xfb#\xf7\x1b\x06\xd6\x03\xe8\xfd\x1f\n\x80\x00\xf5\xf6\xef\t\xf5\x04\x8f\xf99\x02\xb4\x02g\xff,\xfa\xb6\x0bX\x0c\xbd\xf8a\xff[\x04\x16\xf3\x1e\x00\x95\x0e\x83\xf9\x87\x06P\x06\x07\xfc\x1c\xf9\x04\x01\x93\xfe\xd4\xfc\xfa\x06\xfd\xf9>\x02\xc8\x05\x17\x01E\xfa\xa9\xef\xa7\t\x9e\xfd,\xf8t\x0e\xfb\xfa\x1a\xfa\x93\x02\x0e\x01\x1a\xf4\xce\x03\xfa\x021\xf7\xfe\x00\xcb\r~\x01\xc8\xf6\xa5\t}\xfb\xd4\xf0\xa3\x0c\xf0\x07\xc5\xf7\x04\x14R\x02B\xf7\xf3\xfc\xf1\x05\x83\xfb)\xfcJ\n\xea\xfeT\x02v\x01N\x05[\xf8n\xf2\xdf\x01\xdd\x040\xf9\xc2\t9\n\xc8\xf0-\x01\x17\x08\xa9\xf6C\xfeF\x05k\x02\x99\xff\n\x03A\x06\x9b\xfb\x80\xffv\xf7\x07\x02\xf5\x00\x96\xfb\x89\x0c\x06\xfc\xd2\xf6\xec\x02H\x01$\xf7\x0b\t\xf5\xfc\x10\xfcB\x00\xa9\xfc\x0c\x07\x10\xfb\xa3\xfeO\x01\xc5\x04\xaa\xfa\x8b\x02\x8b\x07\xa8\xf8w\xfd\xf1\x03\x07\xff7\x04\x95\n\xa4\xfbs\xfa\x91\x02\x8a\x02\xc2\xff\x95\xfd@\xffx\x06v\xffh\xfc\xcc\xfcM\x04\xb1\xfb\x16\x00\x9a\xfe\x0c\xfbx\xfar\x03\x1e\x06}\xf8Z\x00:\xfd\x1d\xf9\xa3\x00c\x05\xe6\xf3@\x06?\x08\xf4\xf4^\xfe\xd0\x03\xb7\x04\x19\xf9\x8b\xfd\xdf\x05\x00\xfc\xe5\xfe\xd7\n\x85\xfe\xc3\xfc\t\xfe\x18\x00\xfc\x02\xfd\xffz\xfb.\x0c\x7f\x03\xeb\xf3\xf4\x02\x06\r\xfc\xf7D\xf0\x1a\x15\xaf\xfc\xad\xf9\xd3\x06\x0f\x07[\xff\x1b\xf9o\xff\x87\xff\xc1\x01\xac\xfb$\x0b.\xff\x7f\xfe\x8b\x00\x17\x04\xb5\xf7@\xf6\xcf\x0f\xe6\xfe\x14\xf5b\x05e\x13\xf8\xee\xbf\xf1\xbe\x18\xbd\xf9\x15\xe7\x85\x13\x04\x0b\x81\xed\xa7\x05\x00\x0b\xbc\xfc\x8c\xf2l\x06\xbb\x00~\xf3H\t\xa2\t\x14\x00\x8a\xf8D\x05)\xff\xb0\xe8\xa4\n\xee\t\xb3\xf1\\\x0c~\x08\xd2\xef\x0f\xfd}\x03P\xfb\x91\xf7\xce\xff\x17\x0b?\x01g\xf7\x9c\x08V\xfb\x01\xf4\xff\x08K\xf8\x18\x03\x81\x08\xce\xf77\x02o\t\xa1\xf5q\x03\x13\xfd\xee\xf6\'\x0c_\xfeh\x03\xe8\x06\x14\xfa\xf4\xff\xb5\x04\x1b\xf6\xd9\x07\x18\x03A\xf8%\x039\x087\x03o\xf9\xf6\x07\x8f\xfd\xd1\xf8\xb8\x01-\x03\xe2\x00d\x03\xb6\x07\xe0\xf7\xb8\x01\x18\xff\xb5\x00\xa1\xfb3\xfd\xa2\t?\xf9j\x02\x8d\xfd\xc4\x06\xeb\xfci\xf4\xf6\x02\x88\x03\xe7\xfd~\xfe&\x01\x11\xff\xc1\x03[\xfd\x16\xf8\x99\x03\x9b\x03^\xf8H\x06\x8f\xfe\x9f\xfc\xa8\x073\x01\xb0\xf2?\x0b0\x07\xee\xe9\xfa\t{\x06\xa5\xffT\xfe\xa8\xffd\x05\xfe\xfb\xfc\xf9\xcc\x04M\x000\xfcW\x06\x8b\xfb\x9b\x03\x13\x00\xcc\xfbF\x010\x01L\xf2Z\n\xa3\x04~\xf7Z\x07\xd0\xff\xcb\xfc\xb5\xfd(\xfd\xe1\xfc&\x04/\xfd\xe5\x06b\xfc\xa5\xfcR\x08\xa8\xf7f\xf6\\\x0b\xf2\xfeM\xf9s\x07\xfd\x07\xfe\xf9\x08\xf7\xf3\n#\xf9\xaf\xff\xfd\xfe\x8f\r\x7f\xf8q\xfa\xe5\x11\xb9\xf7\xf9\xf3\x1a\x02\xa3\r\xe3\xf3o\x04\xbb\r\t\xf7\x7f\xf8\xcd\x05\x00\x009\xf7\xe8\x05\xc1\x06\xc5\xfd\x9b\xfb\x8f\n\x11\xfdC\xed\xf8\x0c\xf5\x02\xc1\xf3\x14\x06\xb0\n\x1e\xf8\xfa\xf7\xe1\n\xdb\xfe\xbb\xf1\x9f\x01\x11\x0e\xd0\xf2\xff\x00\xc6\x0f\xcc\xf7:\xf7\xc3\x0co\x00\xe4\xefL\x058\x11d\xf3\x07\xfb\xe5\x1f\xd3\xe8\x04\xf5\xfc\x1ah\xf1\x08\xf8\xaf\x0c\x80\x03\x08\xfa6\xfb\x0b\x0b\xb2\xfc`\xf6\t\x08\x95\xfc\xdc\xf3\x90\x07\x1c\t\x9f\xed\xaa\t\xde\x02\xa3\xf4\xef\x008\x04}\xfb\xd2\xfeF\x070\xfb@\x02\xf9\xfe\xf3\x04\x8b\xf7c\x07k\x01Z\xf9O\x03\x0e\t\xc6\xfa\x91\xfcr\n\xc6\xfa\xbd\xfeX\xfc%\x07\xfa\xfcC\xfc!\t\x18\xfdX\xf8V\x0b\x1e\xf7e\x01\xc2\xf8#\x08g\xfe\x05\xf8y\x0c\x95\xf7\x1a\x07\xeb\xf2\x92\x04\xec\x00\xf8\xfd\xcf\x01\x9b\x03\x00\x01a\xfb\xc3\x01Z\x04\xaf\xfa<\xfe\x11\x08=\xfd\x8c\x00&\xfbQ\x08\x9f\x02\x0c\xfa\x02\xfc\xb5\x06\xee\xfd\xfa\xf3g\x0e\x91\xfe\xdb\xf7\xbd\x07\xda\x04j\xf3\xf2\xff\x16\r\x1e\xf35\xf8,\x12\x00\x02!\xf4\r\x07\xba\x07\x03\xf2\x0b\xfa\xe6\n\xda\xffS\xffC\xfb-\x0b\xb1\xffO\xf6:\x03\xcc\xfe\x8e\xfe\xb7\xfd\x82\x04\xed\x00s\x08\xf5\xf3\x14\x02\x81\x07d\xf1/\x03\xee\x05d\xfe\'\xf9\x91\x06\x05\x06\'\xf7\xfc\x00<\x01\xa6\xf8-\xfd\x03\x0b\xbe\xfc)\xfd\x80\x051\xf9\'\x01`\xfdt\x04\x87\xfb\x00\xf9c\x0c\x00\x04&\xf0\xe4\t:\x05M\xef`\n\xf6\x00\xbb\xf8A\x01\xf1\x07Y\xfd\xe5\xf9\x15\x07\x9a\x022\xf2\xbc\x076\t\xd6\xef\xac\x03\xf9\x11U\xf3\xa5\xf7\xab\x0f3\xfe+\xf1\x13\x07\xdf\r\x0b\xf5}\xfa\x16\r\x07\xfe\x8e\xf1\xbd\t\xdb\x07\xe9\xf8\x7f\xfa\x95\r\xd1\xf9B\xf6\xd5\r8\xfd\xf7\xf5\xc2\x05\xad\x07\x82\xf6~\x03d\n\xa3\xf2\xa8\xfel\x00\x85\x07j\xfa\xee\xfb\xaf\x0f\xd1\xf4n\xfc\xbf\x06\x86\xfe\x1d\xfa\xeb\x035\xff\xda\xfd\x93\x02\xd6\x00\xe3\xfeZ\xfb\xfa\x01\xa7\x03m\xfeO\xf3\xa5\x10\x96\xfe\xa5\xf2s\t\x9f\x04\xc8\xf7\xd7\x02\xfb\x03\x0e\xfby\x02\r\xfa\xf8\nx\xfa\xba\xff\x85\x0b3\xf6\x1f\xff+\x04X\x00\xc1\xfa\xd7\x05\xb0\x01P\xfdp\x03\x1f\xfdr\x03e\xfa.\x01S\x01 \xffR\x00\xc1\xfc\xe9\t\xb4\xf9\x7f\xfd\x0b\x02\x9e\xffD\xfaq\x05<\x01V\xf5\xf2\x0bp\x01\xfa\xf7\xe9\x04%\xff5\xf8\xe9\x06\x03\xf88\x06l\x04\x82\xfb\xaf\xff\xad\x02=\x02\xd5\xf2\x98\x07\xa7\x07\x94\xf1\xeb\x02n\t\xab\xfa;\x01L\x04C\xfaJ\xfd-\x008\x00\xd8\x01$\xfc\x02\x06\x1e\x02\xc9\xf8^\x05|\x01G\xf8&\xfb\x99\x08D\xfd\xce\xfa:\x13.\xf5\xf5\xfcU\x08\xf3\xf6y\xfeT\x01\xf2\x07\xe6\xfcP\xff\x0c\x07\x02\x02\xde\xf4\xa1\x04\x17\x01k\xf8s\t_\xfe\xab\x01\xcf\x05\x8c\xf8\xb5\x01\x19\xfe{\xfe\xbc\x055\xf7\xc6\x08\xf9\x04Y\xf9\xaa\xfd\xa2\x08\xc1\xf6\xed\xfd\x0f\x07\xd9\xf8\x16\x08\x83\xfe~\xfc\xf0\x02\x8b\xfe\x92\xf8\xe6\x07\xd0\xf9y\xffF\x08V\xf6t\x05c\x03\x7f\xf5\xf7\xff\xd0\x05s\xf6\x98\x01\x1e\x06\x84\xff\x1a\xfd\xd2\x00\x94\x02\xb4\xf8\n\x01\xc3\x06\xce\xfa\xd4\x02\xbf\x04\x10\xff\x17\xfc\xd7\x03\xdd\x02\x0c\xf5\xd1\t\xe3\xfd\xd9\xfd\xc6\x01\x89\x00\r\xffG\xfe\x0c\xfe\xe3\xff`\x02\x86\xf7\x7f\x0bs\xf8\xfa\x00\xbd\x05\xcb\xf6\xcb\xfd4\x04\xb1\xfe6\xfe\x1c\x06l\xfb\xcf\xff\x1b\x02j\x00\xeb\xfd\x9e\xfc\xdd\x05\xbc\x03\x12\xf75\x07z\x06\xc0\xf5\xe8\xfe\xb0\x0b\xc3\xfad\xf9@\t%\x01\x06\xf8~\x06N\x06\xaa\xf5!\x05\x14\xff\x18\xf7\xbb\x07\x9d\x06\x1c\xf3\xf0\x05\x9d\x05\xb2\xf7}\x02N\xfd\x96\xfe.\x00~\x00:\x01b\x02N\xff\x1e\x00\x08\xfc=\xff\r\xfdo\x03\xe0\xfeN\x00r\x04\r\xfbu\x00\xe4\x01\xe0\xfan\x00>\x01.\xf8\xaf\x075\x01d\xff\xed\xffF\xfd\x15\xff6\x01(\x00\'\xff3\x00\x91\x01\xa3\xff\x1a\xff\xfe\x03|\x03\xa4\xfbi\xfd\xe3\x02\xb7\xfb\x94\x03j\x060\xfc\xce\x00b\x05\xd2\xf9\x02\xff\x9d\x06\xc0\xfb-\xff\xba\x06\xaa\xfa\xfe\x01\x06\x08\x89\xfa\x9e\x00;\xff\xdc\xffV\xfa\xa7\x04-\x07\x98\xf7J\x04l\x03\xcd\xfaz\xfd\x82\x05\xdc\xfb\xbc\xfad\x05\xde\x02\x11\xfbl\x04b\x02\x15\xfa\xdf\xfc\xe5\x00V\x02\xcc\xf9\x97\x060\x00q\xf9v\x06}\x00\\\xfc\xfc\xfc\x1e\x02\r\xfc\xb6\xffl\x05%\xfd\x9e\x01\xcd\xff\x02\x00\x14\xffT\xfe\xf9\xfe\xba\xfc\xde\x02\xb6\xff\'\x01\r\x04\xa3\xff\x81\xfa\x01\x02\x01\xfeo\xfa\x88\x04\x89\x00\x01\x01\xe8\xfd\xd2\x05\xcf\x00;\xf9n\x03\x19\xfb,\xfe\x99\x02S\x00\xcf\x04d\x01\x90\xffD\xff\x1b\xffr\x00\xff\xfb\n\x04<\xff\x9d\xfe\xb4\x07b\xff\xb9\x00\x94\x01\x8e\xfc\xce\xfd\xdb\xfee\x00\x1b\x00\\\x03N\x02\x12\xfe\xb1\x01[\xfdA\xfe\x0e\x00@\xfc\xc7\x00\xab\xff\xf7\x00\xc1\x02\xcc\x01\xb8\xfd\'\xfe\xf9\x00\xa8\xf8\x05\x04-\x03\xf1\xf9\xc3\x04s\x00\x16\xfe\xcf\x01\\\x02\xe5\xfcx\x01\xf9\xfb"\xfd\r\x077\xfd\x0b\x031\x02/\xfd\xc9\xff\x9d\xfey\xfe7\xff\x8c\x00*\x00\xa7\xfe\x87\xff!\x04\xa9\xff\xa9\xfc,\xff\xd1\xfd\xc0\xfen\x01\xd4\x01\x87\x00\x08\x03\xf6\xfd7\xff\xbc\x01h\xff>\x01L\xfd.\xff<\x04r\xff%\x00\xd6\x03u\xfd\xc6\xfc\xbe\x02\x9d\xfd\x1c\xff\n\x03\xcb\xfd]\xfe\xe2\x02\xbd\x00\x08\xfe\x07\x00\xa3\xff\xda\xfd(\xfe\x8a\x01\xe5\x00K\xfe\xf0\x00\xc2\x01\xb7\xfd\'\xff[\x02\xec\xfd^\xfd\x98\x02\x92\x00\xca\xffd\x017\x00\x1e\xff\xd0\xfe\xf8\xff\x1b\xff\xce\x003\x00\xd0\xff3\x01\x94\x00\xb3\xfe\xfc\xff\xc5\xff\xc3\xfc2\x02\xfd\x00#\xff\x8a\x01\x19\x00\xa3\xff8\xff\x12\x00\x18\xffC\xffT\x01\xfa\x00\xef\x01&\x00\x0e\xff\xc4\x00\x80\xff\x80\xfd\xab\x00\x8f\x02x\xfe\xbd\x01\xc1\x01<\xfe*\x00\xe7\x00\xf8\xfd\xd4\xfe=\x01&\x00\xc1\xff\xff\x00\x05\x03G\xff\xbb\xfd\xbf\x00\xdd\xff\xd8\xfc[\x00Y\x04(\xfeF\x00\x8e\x02\x88\xfes\xff\x1e\x00\xc7\xfdD\xfdr\x02\xeb\x01^\xfe<\x01\xd8\x00&\xfe\xaa\xfe\xca\xfeR\xfeQ\x00\x8e\xff\x19\x00\xc7\x00y\xff\x1b\x00\x83\xffi\xfc\r\xff\'\x00\xff\xfc\xe1\xff\x9c\x03\r\x00u\xfc\xf3\xff\xa0\xfe\x98\xfb`\xfe\xaa\xffI\xfd\xbb\xff\xcc\x01\xae\xfd\xf1\xfdO\xff\x8c\xfdt\xfd\x00\x00\x8f\x00\xaa\x00\xe3\x02F\x03\xaf\x03?\x04\x8f\x04Y\x05)\x04\xb9\x05o\x07\xfc\x07\r\tK\t\x90\x07O\x07\x0f\x06\xba\x044\x04@\x03\xd9\x02\xbb\x00\x8a\x00\x81\xff\xce\xfd\xa0\xfc\xa6\xfa\xca\xf8\xc5\xf7\xc7\xf7Q\xf8:\xf8\xca\xf7\x82\xf8\xba\xf8\x7f\xf8\xd4\xf9s\xfa\xa9\xfa>\xfc\x82\xfd.\xff\x84\x00\xfe\x015\x02Z\x02\xa4\x03\xee\x02\xce\x03\xc6\x04\xe5\x04\xac\x04\xb6\x03\xba\x03-\x03\xc8\x02\xa9\x01\x9f\x00[\x00\x04\xff\xd0\xfe\xc9\xfe\xc6\xfd{\xfd\x0b\xfda\xfb\x0c\xfc\xbc\xfc\xc9\xfb9\xfc\xb8\xfc\xcc\xfc\xf5\xfc|\xfe\x1f\xff\xc8\xfe`\xff\xdf\xff:\x00\xea\x00\x8e\x02V\x02\x08\x02\xbc\x02\xe3\x02\xff\x02?\x037\x03\n\x02\xa6\x02\xc6\x02%\x02\x8f\x02\xed\x01\xbf\x00\x00\x01\xce\x00d\x00\xa3\x00c\x00\x90\xff}\xff\xe8\xffI\xff^\xff\x98\xff\xd3\xfe\xc6\xfe0\xff\x8e\xff\xb5\xff\xfe\xfeI\xff\xee\xfe\xaf\xfe\x18\xff\x11\xff\xf8\xfe-\xff \xff\x05\xffe\xff)\xff\xe0\xfe\xe9\xfe\x08\xff\xeb\xfe\x9c\xff\xbd\xff\xfb\xfe\x80\xff\x97\xff0\xff\x97\xff\x06\x00\xc4\xfft\xff8\x00\x1a\x00\x00\x00{\x00`\x00j\x00\x07\x00\xb0\x00\xf0\x00n\x00\xa1\x00\xb9\x00p\x00\x7f\x00\x9a\x00\xbe\x00\xc4\x00e\x00\xbe\x00\x0f\x00\x13\x00\x82\x00\xe9\xff\xff\xff\'\x00\xd8\xff\xf4\xff\x0b\x00X\xff\xa0\xffT\xff.\xff\xd8\xff\x89\xff\xb0\xff\xfc\xff\x88\xff\x83\xffo\xff\x88\xff\xbc\xff{\xff\xf5\xff+\x00\xdc\xff\xef\xff8\x00\xb8\xff\x98\xff\xdc\xff\xbb\xff\n\x00w\x00<\x00`\x00}\x00\xb6\xff;\x00\x02\x00\xdb\xffo\x00K\x00R\x005\x00\x98\x00\xfb\xff\xdd\xff\xdc\xff\xb9\xff\xb7\xff\xb3\xff&\x00\x04\x00\xa9\xff\x03\x00\xb2\xff6\xff\xac\xff\x90\xffm\xff\xb2\xff\xfd\xff\xf5\xff\x02\x00\x1a\x00(\x00\xf5\xff;\x00\x1c\x00E\x00\x96\x00\x95\x00\x87\x00\xb0\x00\xd0\x00y\x00\xca\x00E\x00n\x00\x93\x00J\x00x\x00\x9d\x00D\x00U\x00?\x00\xe3\xff\x1a\x00\x0c\x00\xc8\xff\xde\xff\xbb\xff\x90\xff\x11\x00\xa1\xff\x85\xff\xdb\xff\x1d\xff;\xff6\xff\xe8\xfe\xa2\xff\x8b\xffr\xff\x96\x00\xd1\xffy\x00\xcc\x00\x93\xff=\x00\x15\x00\xfe\xff\x1d\x04\x11\x05\xf4\x03o\x03\x11\x02\x7f\x01W\x01P\x02\xc6\x02\xc3\x02`\x022\x02\xb0\x01]\x00\xda\xfe\x16\xfc\xd2\xfa\xaf\xfb{\xfc\r\xfdD\xfd\xe3\xfc\x1c\xfb\xc5\xfaF\xfbt\xfa\x83\xfb\xb3\xfb\xea\xfa\xab\xfd\xf1\xfe\xae\xfe\xbb\xff\xd6\xfe\xcb\xfd\x0f\xfef\xfe\\\xff\xc2\xff\xe9\xffg\x00\xca\x00\x9f\x00\xe6\x00g\x00t\xff\x86\xffO\x00\x07\x01\x0c\x02+\x03\x8f\x03a\x03\xb7\x038\x04W\x04\xa3\x04\xdd\x04]\x05$\x06\xf3\x06Y\x071\x07^\x06*\x05S\x04\xc3\x03j\x03\xc7\x02\x01\x02t\x01\xbd\x00\x00\x00\xeb\xfe\x9d\xfd`\xfct\xfb\x13\xfb\x00\xfb\x1a\xfb`\xfb|\xfbr\xfbu\xfb\xa3\xfb\xde\xfbs\xfcW\xfdQ\xfe_\xffR\x009\x01\xd3\x01%\x026\x024\x02\x84\x02\x06\x03\x8a\x03\xb8\x03b\x03\xb8\x02\x16\x02M\x01\x82\x00\xe5\xff\x1b\xffP\xfe\xa8\xfd)\xfd\xa7\xfcD\xfc\xb0\xfb\xf5\xfa\x90\xfam\xfau\xfa\t\xfbN\xfbs\xfb\x0f\xfc\x97\xfc\x18\xfd\xef\xfd\xee\xfe\x1b\xff\xb1\xff\x96\x00\x0e\x01\xd7\x01c\x02\xae\x02\xf8\x021\x03F\x03a\x03I\x03\xd3\x02u\x02F\x02\x12\x02\x0e\x02\xab\x01\x06\x01\x8f\x00\xff\xff\x9d\xff\x80\xffF\xff\xe5\xfe\xd4\xfe\xc9\xfe\xca\xfe\xfc\xfe\x0c\xff\xe4\xfe\x17\xffG\xff\x90\xff\x10\x00R\x00o\x00\xba\x00\xbb\x00\xb4\x00\xc1\x00\x13\x01\xfd\x00\xd1\x00\r\x01\x03\x01\xcf\x00\x8b\x00I\x00\xf6\xff\xbf\xff\x90\xffd\xffd\xffI\xff\x16\xff\xeb\xfe\xd3\xfe\xba\xfe\x8c\xfe\x83\xfe\x82\xfe\xa9\xfe\x0e\xff#\xff{\xffb\xff\x80\xff\xbf\xff\xb8\xff\x13\x00L\x00\x89\x00\xe6\x000\x01@\x01A\x01]\x016\x01\x02\x01\x07\x01\x06\x01\xdd\x00\xd2\x00\x8e\x00\x1d\x00\xda\xff\x80\xff\xfa\xfe\xf5\xfe\xcf\xfe\xc0\xfe\xdd\xfe\xf3\xfe\x06\xff\xbc\xfez\xfe\x9c\xfe\x03\xff\xfe\xffZ\x01J\x03g\x02\x10\x01\x1e\x02\x97\x01\x17\x02\xeb\x02}\x02 \x04\xae\x03\xfe\x02/\x03\xc0\x01\x1e\x00\xeb\xfeY\xfe\x9c\xfe\xda\xfe\xae\xfdZ\xfe\xa8\xfd\xf4\xfc\\\xfdP\xfc\x99\xfd\xf0\xfc}\xfb\xbf\xfdY\xfe\x9b\xff\x7f\x03\x80\x03\xde\x02W\x01\x9a\x00\xca\x01\x95\x01\xc8\x01\x83\x02\xa4\x02\x82\x01\xef\x01\x9e\x01\xb2\xfe\x91\xfc\xb9\xfa\t\xfa\xe3\xfap\xfb\x1b\xfc<\xfb\xa0\xfa\xaf\xf9\xcb\xf8\xa2\xf8\x84\xf8W\xf9\x0f\xfas\xfa\xe7\xfbI\xfd%\xfd\xa2\xfd\xc7\xfd)\xfeI\xff\xa5\x00j\x02\x07\x04\xb7\x05\x8e\x07"\tS\n|\x0c\xd3\rq\rc\r>\x0e\x1c\x0f\xb8\x0f\x10\x10r\x0f\xe3\r\xe7\x0b\xf4\t\x16\x08\xd6\x05:\x03\x87\x00\x97\xfe\xdb\xfd\x8b\xfca\xfa\x11\xf8$\xf6\x9e\xf4y\xf38\xf3\xa5\xf3R\xf4\x8b\xf4\xd7\xf4:\xf6\x91\xf7|\xf8i\xf9\xb5\xfaG\xfc\x1d\xfe\xf4\xff\xe2\x01\xc3\x03\xb5\x04\xc9\x04\r\x05\xd8\x05V\x06\x18\x06\xc7\x05Z\x05\xc9\x04\xfe\x03\xe5\x02\xd4\x01P\x003\xfeG\xfcg\xfbu\xfb\x18\xfb\xa6\xf9t\xf8#\xf8\xf8\xf7\xdf\xf7\xf1\xf7k\xf8\xd6\xf8=\xf9~\xfa\x99\xfcR\xfe\xff\xfe\x13\xff\xe4\xffg\x01\xa5\x02\xd2\x03\xcb\x043\x05\xf8\x05\x02\x06\x96\x062\x07\xce\x06\\\x05\xa4\x04\x90\x04A\x04u\x04V\x03E\x02\x97\x01\x98\x003\x00\xa5\xff\x1f\xff"\xfe\x9d\xfc\x0e\xfc\xbf\xfc\x1e\xffr\x00o\xfd8\xfc\xa8\xfc\xb2\xfc\xef\xfd\x1b\xfe\x97\xfeb\xfe\x93\xfe\xb2\x00p\x02\xcb\x02\xd3\x00X\xfe\xb5\xfe\x95\x028\x04\xad\x04\xe3\x04-\x03!\x03\xac\x034\x03`\x03G\x01\n\xff\xad\xff\xab\x00\xa7\x01\x88\x00\xff\xfd\xf1\xfb\xb5\xfam\xfa\x1c\xfae\xfa\xe6\xf9\x82\xf8\x14\xf9>\xfa4\xfas\xfa\x11\xf94\xf8v\xf9\x89\xfa\xa7\xfb\xd0\xfcp\xfc\xb0\xfd>\xfe\xd5\xfd\x12\x00h\x00<\xff\xf2\xff@\x01\x82\x01G\x02a\x01\x16\x01\xd3\x00\x97\x00\xbe\x02\xbd\x01\xa5\x01/\x01\xbb\x00\xf3\x01\x0c\x01c\x01]\x02h\x02\x18\x04\xc1\x08H\x0c\x82\n\xce\x08=\t\xb2\x08m\n\x83\x0b\xe2\x0c\xd2\x0e\x8e\x0b\'\x0b\x00\x0c\xde\x08\xd0\x05B\x02L\x00s\x00\x85\xff\xf1\xfeh\xfe\x06\xfc\x99\xf9L\xf8\xaf\xf7\xa1\xf7\xb5\xf5\x12\xf4j\xf5\xa4\xf6r\xf8 \xfa\xf1\xfal\xfb\xb0\xfa\x11\xfbB\xfd6\xff\xb9\xffr\x00\xb1\x01\x9b\x03\xc5\x04E\x05\x8c\x05\xa4\x04\xa8\x02\xca\x01\x9f\x01V\x02\x82\x01U\x00\xa7\xff\xc3\xfeN\xfey\xfc\xe8\xfa\x18\xf9>\xf7|\xf5\xc9\xf5\xac\xf76\xf9\x08\xf9\xea\xf8E\xf9%\xf9\x98\xfa}\xfb\xa1\xfd\x01\xfe\xd7\xff\x89\x05\xa5\t0\r\xf0\r\x0e\x0c@\x0bH\n\xfe\nd\x0cP\x0e\x01\r\xc3\x0b\x1d\x0e\x88\x0b\x07\x08\xf5\x02\xf1\xfb\xce\xf78\xf7u\xf9\x8b\xfb\x9f\xf8A\xf6\xcb\xf4[\xf2\xba\xf1\xe9\xf1\xd9\xf0\xe4\xef\x8f\xf0\x8e\xf4\x03\xfa\xca\xf9_\xfa\x9e\xf7>\xf6\x16\xf8\x0e\xfb \xfe\xce\xfb\xa9\xfdZ\x00[\xff\x08\xfe\x91\x01\xdd\x01\x9f\xfa\x13\xfb\x96\x01v\x009\xffR\x03H\x05\x15\x02\xf3\xfeA\x05\xba\x05\xe2\x01\xd5\x06\x9f\x08\xa7\x064\x08\xc9\x0b\x1c\n\x0e\x05\xb0\x07\x04\x08[\x05R\x08l\tK\x04\x12\x02i\x06\x1c\x05`\x04o\t\xd8\x08l\x04\x15\x07I\x0bl\n\x15\x0c\x84\x0c0\x0b+\x0b\x1a\r%\x0e\x06\r\xe0\x0b-\x08W\x06\xa4\x06\xfe\x06\xf8\x03\x82\xff,\xfd\xf6\xfb\x9c\xfc<\xfbd\xf9`\xf7N\xf5\x9c\xf3\xd2\xf3\x88\xf48\xf3\xe4\xf2\xc9\xf2\xb7\xf3g\xf5\x81\xf5\xd5\xf6\xc0\xf79\xf6)\xf6n\xf8\xcc\xfa\xc6\xfc`\xfdp\xfdo\xfd\x0e\xfe\x11\xff(\xff\xfd\xfe\x00\xfeG\xfd\r\xfe<\xff_\xfe\xe4\xfd\x0b\xfd\xee\xfb\x1c\xfc9\xfc\xa7\xfc4\xfd\\\xfdE\xfcf\xff\xef\x00\xee\x00\xf9\x02u\x02\x11\x03\xa6\x02\x16\x04\x8d\x05\xe3\x05\xf0\x06\x0c\x06\x90\x05\xec\x05\x00\x04`\x03\xf6\x01\xfd\xff[\x00o\xfd\x18\xfe@\xfe\xc4\xfa\xb7\xfb9\xf8\x88\xf6\x18\xf9\x08\xf7\xca\xf6\x96\xfa\xce\xf9\x12\xf8\n\xf9\xa0\xfb&\xfc\xae\xfa\x0f\xfek\xfd\xf2\xfe\xd3\x00r\x00\xeb\x02\xa7\x00~\xff\\\xff-\xfe\xa4\x01J\x02B\xfe\xe7\x00\xc4\xff\xc6\xfbG\xfc\xd5\x00\xe2\xfb\xe7\xfc+\x02\xfc\xfb;\xff\xc7\x03\xf8\x00\x8f\xff\x1b\x03P\x05<\x04\xdd\x02t\t\xe4\x08O\x04\xa8\t\x9d\n\xb1\x07\x19\x08\x18\n\x07\x08\x17\x06b\x07\x8a\x08!\x07 \x06h\x05\x1c\x04\x11\x04\xf7\x02\xed\x03\xdf\x04\x1d\x02\xd8\xfe\n\x05?\x05}\xfbE\x02\xcc\x04*\xfb\xa4\xfdV\x04\x0b\x00\xa8\xff\xef\x01\x0c\xff+\xff\x95\x01E\xfe\xad\xfd\xa3\x00N\x00\xfc\xff0\x023\x02\xb5\xfe\xde\x00\x1c\xfd\xd4\xfc\xa7\x03T\xfa6\xfcG\x03:\xfd\xb2\xfb\xc9\xfd\xbf\xf9\xf5\xfa\x13\xfc\xe4\xf9}\xfaf\xfd\xf9\xfd\xb1\xf9`\xffg\xfd\xc5\xfb\xc1\xfb\x10\xfdK\x01\x98\xfa!\x01\xe5\x044\xfb#\x02\x03\x06\xdd\xf6\xac\x00\xf4\x061\xfcS\xfa\xd0\n\xc2\x03-\xfb#\x03\xcd\x05\x06\x03\x8f\xf7L\x05\xe4\x04"\xfa\x83\xfdt\x05\x98\x00\xb0\xf9#\x04\x9c\x01\x00\xf8\x1f\xfb_\x04/\xf9N\xf6<\x08\xe2\xfe\x87\xf7\xcb\xfc\x92\t\x91\xf4\xa9\xf8\xe2\x06\x0c\xf7\x88\xf9\xee\x05|\xfe\x8d\xf5\xb4\x0b\xa9\xfd}\xef,\x052\x08N\xf0\x1b\xff\xf1\x0c\xa0\xf6m\xf8P\t\xe9\x04Z\xf3-\x02\xbc\t\x1c\xf5\x8a\xf7W\x13s\xfb\xe4\xef\xd3\r\x11\x06`\xf3 \x01L\x0eT\xf3\x8c\xfe\xd0\x0b\xde\xfa\xdc\xff\xb7\tM\xff\xe3\xfbM\x07\xee\x05!\xf5^\x05\x1f\x0f\xce\xf2\x88\x04\xdb\x0f%\xf1\xc2\x04.\x11C\xfa-\x01\xf9\t\xd4\xfc\x93\xfbB\x0e\x88\x02\xa7\xfeP\x0b\x16\xfe\x02\x00\x1b\n\xbb\xfe7\xf8%\n\x00\x034\xf98\x07#\x08g\xf6\xee\xfdk\x0e\xba\xf0R\xf2\xf9\x16\xa0\xf7S\xeeo\x0b\x04\x0c\x00\xeb\xb3\xfbO\r*\xef\x92\xf4\xa8\x02.\x08%\xf8)\xfd\x00\x02\xd8\x00\xc1\xf1\x0e\x01\xc8\t\xd2\xf4b\xfa+\x0f\xa1\xfeh\xf0\x1b\x10\xec\x02\xf8\xfbV\xf9\xac\x08\xed\xfe\xf9\xfdK\x02\x17\xfe\xff\xfes\x01\x1c\xff\x8a\xfaa\x07\x89\xf7\x8c\xf9\xc3\x02\x1f\x00\xdb\xfb\xfc\n\xc8\xfcd\xf7o\x0c\xe4\xfd@\xfe\x08\x03\\\xfbo\x0e\xe9\xf9j\x00\x1b\x11\xcb\xee[\x02B\x05%\xfb\x0c\xfc\xfd\x07<\x00\xe3\xf8\x7f\x01\xd7\x04\x1c\x05\x9a\xee\xa2\t\x1b\x00.\xf1/\x04K\x05\xc3\xfe-\xfa\x0e\x01H\x02\xb5\xfb7\xf7\xde\t\xd1\xfa\xf5\xf7\xed\x06;\xfen\xfb(\x07]\x06\xfd\xea4\x05\xcc\x0eW\xea\xad\x02\x08\x11+\xf1\xcf\xf9\x15\x0b(\x01\x1d\xef\x87\x068\x06-\xf5 \xff\xfd\x04\xc3\xfb\xc4\xf8\x85\x0c\r\xfa\xab\xf1\xee\x12\x92\xfe~\xf1\xd6\x11"\x00W\xf5\xa2\x08\xb6\x07\xa6\xfc~\xfc\xd4\x0e\xae\x03\x10\xf3\xe9\r2\x06Z\xf9>\x03\xe1\x08#\xfbh\xfe\xc4\x08\xf0\xf9\x8c\n\xdc\xf75\xfd\x13\x0c\xff\xf7\xec\xf8\x0e\x0eQ\xfa\xaf\xf1\xa1\x10\xe1\x00\x8a\xf36\x056\rY\xf6\x0e\xf9\xce\x0c\x89\x02G\xf0\x91\x04\xae\x16C\xf13\xfa\xcd\x0f\xe9\xf6\x08\xff[\x00\x17\x05\xc3\xfb\xaf\x00\xc8\xfb\x8b\x02\x0c\x0b\x97\xe71\x10\x01\xfa+\xf5\xe3\x08\xea\xfe1\xf7\xb5\xf8\x83\x0f\xa7\xfa\n\xf25\nv\x04\xf8\xe8\xa3\t\x9c\n\xaf\xf8*\xfd\xa9\x08`\x01b\xeft\x12\x0e\xfb\xa3\xf9\xfb\r\xba\x034\xf6\xb1\x05a\x05e\xff\xc2\xf3h\x04B\x13\xc6\xeb\x05\x0b\x85\x061\xf7\xe9\x01U\x05\xae\xf6\x15\x03\xdf\x07\x9a\xf1\xf5\x06\xea\x08I\xf2\xde\x04\xfd\x00\xb8\xf3\xcd\x05j\x02]\xf9\xb6\n\xc3\xf1\x8e\x02\xca\x11\x82\xe2\xf6\n\x13\x0c\xc2\xee\xe0\xf9\xe1\x11K\xfdB\xf7\xac\x08I\x03O\xf8\xa3\xf4\xda\r\xa0\xff\xa8\xfaS\x06W\x02\x91\xfa\xe0\xfc\x94\x0cB\xf4S\xfbP\r]\xf5\x06\xff(\t\xb2\xfb\x01\xfe~\x02t\xf9\x0b\x04 \xfe\x8a\x00\x8a\xff\xb1\xfd\xa3\x08\xaa\xfc\xed\xf5\x9f\x063\x07b\xf2_\x03o\x08\xac\xf8\x8d\x02\x0f\x05\x18\xf2d\r\xe5\x00\x0c\xf03\n;\x03\xfd\xfd\x98\xfc9\x05\xbb\xfe\xf1\xfc\xbd\x02V\xff\xc1\x00\x94\xf9\xc2\x040\x00A\xfe\xb7\x04u\xfe\xfa\xffC\x01\xbe\xf8\xcd\x04V\x01V\xfa\x1c\x03\x1d\x03\xd4\xfe\xf5\x00\xba\xfc]\xfc\xbe\xff\xaf\xf6\x8b\n\x13\xfb\x88\xfdv\x08`\xf6\xb9\x00.\x05\xd9\xf5\x98\x01\'\x02\x88\x01\xf3\x01\x81\x006\x03b\xfa\xea\x04$\xfdP\x04\xab\xf9\xfa\x06\x9c\x06\xc1\xf84\x00\xd8\x04\x99\x00\x1b\xfc\x98\x02f\x08S\xf7\xc7\xfe\xa8\x04\x1e\xfe\x13\x02p\xfa\xbe\x07\xb8\x00\xce\xfa\x0f\xfe6\x07\x18\xf4X\x00\xa5\x06\xcf\xfcm\xfe\x9e\xff>\x01\xf0\xf8\x8e\x02i\x02y\xfb1\xf3\xe3\x133\xf5\x91\xf9\x10\x0f\x9a\xf8\xb7\xfbM\x06\x92\x016\xf2\xc1\x0b\xc2\x00=\xfa\x86\x04D\x08\x95\xf6\xa4\x00/\x07\x1f\xf7\xd0\x04r\xfdY\x01\xcc\x05\xf0\xf5\t\x01\xf4\n\xa7\xf2\x89\x04\x05\x01D\xf5\xdd\x02\xf0\x07\xd6\xf2\x1a\x04\x8e\x03\xf5\xf7c\x06\x1d\xf9\xbf\n\x00\xf8\xef\x01\x8c\x04I\xfdS\x02\x7f\x00\x8d\xfe\xd1\x08\xcd\xfcD\xfaH\x0e\x81\x02\xc9\xf0\x12\x0b\xd9\te\xef\xa3\x08\xf2\x02\xfb\xf8H\x06\xf3\x03M\xf3#\r\xc2\xfe\x96\xf6B\x02\x9b\x0b\x1f\xef[\xff\xba\x11\x9b\xeb\xbd\x06W\x03\xa3\x03X\xed\xe7\t\xb0\x02U\xf7X\x04\xf0\x02\x9f\x01Y\xfa\x8a\x01\xe6\xfe\xdc\x04n\xf8E\x07\xfc\x03\xff\xf6\x18\x00:\x07\xf5\xfe\xe8\xfb\xa4\xfeh\x04\\\xf9\x8b\x05\x12\x01\x82\xf2\xd0\x0eT\xfb\x07\xf9\xae\x07\x86\x01\x1f\xfeA\xfb\'\xfd\xa1\r\x99\xfaL\xf8\xfd\x12\xb8\xf52\xfb\xc5\x06\x1e\xff8\xf8\xf2\r\xc6\xf8\xd3\xfbI\r%\xf9?\x02,\xf8\xa5\n\xa4\xfa\xaf\xf9\xe5\x0c\xa7\xfe\xa4\xf6\x00\x0b\x16\xfe\xfd\xf7i\nd\xf6K\x03\xbb\x06\xf3\xf1\x13\t{\t\x03\xf6t\xfdr\x07\xf2\xf6\xc9\xfe\x13\x0e9\xf3\x8e\x01\x16\x05\x00\xfe\x10\xf6n\x0e2\xf7\xb0\xf38\x14\xfa\xfa\x07\xf1\x9c\x11\x98\xfcv\xee.\x16\xc0\xf4\x19\xf4,\x0cD\x03\xf5\xed1\x11\xdf\xfds\xf2\xf0\x0c\xe0\xf7k\x00\xbf\xfe\xd9\x00=\x07\xb3\xfc[\xfe\xfa\xff\x10\x07\xe1\xef\xac\x0c\x85\x06\xca\xec\xa6\x16M\xf6A\xfa]\x0b\x03\xfc\xbf\xfc\xca\n\x80\xf8\xdc\xfe\x7f\n\x9f\xf8\xbc\x017\x04\x8f\xf78\x05q\xfbz\x08\xf1\x06{\xeeO\nJ\x03i\xf3\xe5\x00\xb6\r\x06\xf1\xad\x06~\x01\x80\xfc\x07\x01\x1b\xfc\xab\x03\x1e\xfe\xc6\xf8b\x03\xd4\x08|\xf0\xb7\t\x86\xfb)\xfe\'\x03\xdb\x003\xf5\xce\x06s\x06\x9f\xf2\xa7\x01s\x0f5\xf0\xfc\xfd\xe2\x14\x85\xe92\x066\x03\xf2\x02\x12\xf8B\x05\xb8\t\x95\xee\xd4\n\x9b\xfd\xa5\xff<\xfa\xe1\rR\xf7\x9e\xfb\xeb\x10\xc1\xec\t\x0c\xed\xfb\xff\xfb4\x04\xe2\x00\xe0\xff\xd5\xf5\xa8\x12=\xf9\x85\xf3-\x11\x08\xfa\xa2\xf4$\x0eh\x04t\xe8\x94\x11\x8d\r\x9a\xe85\n\xc0\x05\x10\xf3\xbe\x04\xed\x01V\xfc8\x06\xb7\x02)\xf3\xf5\x10\xcc\xf7\xd9\xf4U\x10\x10\xfd\xb9\xf5\xba\x06\xe5\x04\xbf\xf4\x1b\x0c\xcb\xfd\xd7\xf9\xb5\x032\xfb\xa2\x02\x14\x04\x8c\xf1\x11\nQ\x06,\xf1+\x04\x8e\x0e\xf8\xf0G\xf6\x01\x12\x8d\xf0\xf6\x02\x93\x0c\x90\xf2K\x03\xe4\x08_\xf7\x8e\xfd\xb3\x01 \x01d\x03\xda\xfc\xe5\xffc\nD\xf7\xbb\x01y\xfb\x88\x02\xce\n3\xf2\xe7\x01V\x12D\xf2\xa3\xf5\x15\x18\x08\xf3h\xf9\x0c\x0bm\x03\x95\xf8J\x02\xbd\x06\x9d\xf9\x9a\xfb\xe9\x08\xca\xfe\xae\xf9\xf6\x03\xe1\x07#\xf8\xd8\xfa\xe1\x0c6\xf4\x89\x05\x10\xfbA\x05j\xfcl\xfc\xfa\x0e\x0f\xf1c\x00a\x03U\x03\x96\xec\x8e\r\xb1\x03"\xf9\xb5\x04\x1f\xfbo\x04\xd9\xf8\x00\n\xde\xf3\x93\x07\xfc\x05R\xf7[\x07V\xfb0\x06\xe6\xfb\xd1\xf6\x12\x13\xcb\xf3\xc3\xfcY\x08\x16\xfc\xa1\xfb\x91\x01\xa2\x06\x7f\xf1r\x08\xe8\xfa\xf8\x06\xe7\xf8y\x03*\x04\xf6\xf8\xfa\xf9\xab\x04Z\x03\xab\xfa:\x07\xee\xf3\x90\x0e\xa8\xf6>\xfd\xb1\x08\xf2\xfd\x19\xfe*\xfd;\x06w\x05\xd2\xf5\xf7\x00\x00\nT\xfdF\xf5v\rW\x01\xea\xf0\x82\n\x99\x03\x16\xf9\xb3\x05B\x02\x86\xf4{\x06b\xffE\x03\xb6\xf5\x18\x08\xd7\xfcy\x03e\x00\xab\xf4Q\x0cn\xf7I\x00Y\x01n\x03\x15\x01$\xf9#\xffG\r\xcc\xec\xdc\x05\xf0\x08w\xf6 \x01\x0b\xff\x16\x06\xe3\xf2.\x0f\xd8\xf8\xbf\xf7\x98\x08T\xff\xa8\xfc\xa8\xfcQ\n\xe1\xf8\xe7\xf6U\x13\x92\xf73\xfb`\x06G\xfb\x1b\x00\x80\xfcS\tB\xffO\xfb\x1f\x01G\x04\x85\xfa*\xfa3\x15\xb4\xeew\xf8Y\x1ep\xeao\xfb\xd2\x14\x03\xf72\xf5\x02\x11\xb0\xf9_\xfb\x87\x08\xb2\xf8\x91\x0e\xe5\xf0\x0c\x05H\x026\x006\xfc/\xfe\x91\n\x8c\xf66\x08l\xf5C\x06\xb0\x02`\xf9E\xfb\xc6\x0c$\x002\xef\x84\n\x9f\x06"\xf5\xa8\xfel\x05\xb2\xffK\x01\x1c\xf5\xa9\x06\xb1\xff\x8b\x04N\xfa\x8b\xfd!\x04\xf3\xfe\xe1\xfe\x11\xfb\xac\x0c-\xf0\xa6\x04\xd3\x08X\xfbm\xfcr\x03\x8c\x03S\xefH\x0c\x98\t\xff\xeb\x00\x07\x8d\x0e"\xee\xcd\x05"\x04\xad\xfb\x16\xfdW\x00\xbc\x05}\xfc\xfe\xff\xe0\x036\x03\xd5\xf0\xf8\x08\\\x06 \xf5\xd0\x01U\x03\xda\x01\xc4\xfe\x19\xfc\xb1\x01\x1e\x0b\xfa\xf3?\xfe\xd8\t\x98\xfcv\xfb\xa7\x04\xa3\x02\xff\xf8\xf6\x0b\xee\xf21\x07\x92\x01\xb9\xf8i\x07\xce\xfc\x01\x04\xa4\xf8\xf2\x04\xa1\xff\xa9\xfd\xe5\x01\xd5\x01\xad\xf7\xd0\x06\xdf\x01\x14\xfd\xc9\xfbb\x00\xc8\x07c\xf8\xcd\x01\x8a\xfbd\x10\xb7\xef}\xfc"\t\x02\x02\x93\xff\xfc\xf3j\x10\xba\xf9\x17\xf8\xa9\x0b\x1d\xfa\x8e\x00\xe5\x08:\xf8\xf5\xfa\xdc\x04\x98\x00\x9a\xf9\xfe\x0b7\xfd\xc0\xf71\x04e\x05w\xf5\xf1\xfa\x05\x0fP\xf5\xd3\xfeT\r\x1e\xfe\xd5\xf7\xf9\x05?\xf7\xf9\x027\x07\xb1\xf7\x18\x0f\x8a\xf8\xce\xfe2\x04~\xf6H\x07\xdc\x01C\xf7\x8b\x05\x88\tv\xf3=\x02{\x07r\xf5y\xfe\x92\x0b@\xf5\xc3\x02\xa8\x03\xf7\xfc\x89\xff\xbc\x01p\x04\xf2\xeeC\x0c\x08\x03&\xee\x0c\x0c\x13\x0b\xf4\xee\xa7\x03S\x08-\xef\xbf\ny\x05\x82\xf3\xc3\x08\xd8\xf8"\x05S\x00{\xfcM\x01x\x02\x0e\xfd\xbd\x00\x9f\x02\xd4\xfb\xe5\x06\x03\xf7\xee\x08Q\xfc0\xff\xa3\xff\x99\xfb\x8b\x0c\xbd\xf9\x82\xf8\x92\x040\x0c|\xee\x85\xffG\x11\xd5\xf4\x8b\xfe\xd2\x04k\x00Y\xf8\xbd\x08\x94\xf9\x9b\x02\xef\xfe\xe9\xff\x87\tF\xed\x8e\x0cY\xfe\xe2\xfa\xc1\x00\xdb\t\x04\xf9\x19\xf92\x0e\xe5\xf1\x8b\x08\xc0\xfeL\xfbZ\xfe<\t\xfc\xfb\xf3\xf8.\x13\xe8\xeb\xf0\x06\x1b\xfc\xfc\x03\x15\x06\x99\xee\x99\x10\xba\xfc\xbc\xfa\x13\xff\xe6\x06\x08\xfa\xb9\x01Z\x01\xeb\xf6\x06\x0e\x85\xf9\xaa\xf8\xc0\tv\xfd\xc4\xf9;\x0e_\xee\x9f\x08@\x06&\xf4\xb8\x00\x12\x06\xf6\xff\xb7\x01\xda\xf98\x00(\x0f{\xe8Y\x08\xf6\n\xeb\xfa\xc5\xf8{\x07\x80\xffs\x00\xec\xfb5\xfeC\r\x10\xf3\x00\x027\x08,\xf5\xd4\x03\xbb\x04\x9a\xf6q\t\x97\xfbH\x02 \xf9\xe0\x01N\x08\xbd\xf8\x85\xfb\xc6\x0c\x1f\xf6\xd2\xff&\x0cB\xec\x19\n\x7f\x03\xe4\x01\xe3\xf1\xeb\x0c\x81\x02\x82\xf0\xed\x05d\x08\xee\xfa!\xf7\x9b\n\xd0\x018\xf7\xbb\xfe\xa1\n\xa2\xff\xcb\xf5\xe0\x01\x89\x03\xd7\xfbT\x06w\x01&\xf8\x19\x05\xa7\x03k\xf6\xb8\x02\x8c\x02T\xffK\xfaW\x08Q\x03t\xf6u\x07\x8e\xfd?\xf5/\n\xde\xfd\x85\x03\x05\xfd\xc2\x00\x03\x06\x05\xf7\xd9\x01(\xfe\x1d\x08&\xfb\x92\xfd\x07\x0c+\xfb\x01\xf3\xe2\x12&\xf4\t\xfa\xa5\x0e|\x00i\xf2\xfe\x0c;\xfd2\xfar\x07\x08\xf2L\r\xc2\x00@\xf6\xaa\x08\xc3\x089\xe9\xe4\x11\xe9\xf7P\xfe\x97\x039\xfeW\x02\x19\x01\x93\x00\x96\xfc\x88\x02\x97\xf5-\x13\x16\xeb\x8e\x07\x05\n\x1b\xf4\xd7\x08-\xfe\x0c\xf9\xdc\x03\xd5\x04"\xe7\xfd\x11U\x0fw\xe8\x95\x02w\x122\xf0n\xf7\xbc\x177\xee\xed\xff\x7f\x0b4\x00<\xf7\x82\x04\x07\x0c\n\xef#\xfe\x96\x13+\xf4\x05\xf6\xc6\x0e2\xfb\x90\xfb\xa6\x02\x15\x07\xe8\xf2(\x01\x1d\x01\xef\x07\xab\xf4b\x07\xe7\x05\xf2\xf1\xbe\x01\xde\x00}\x05n\xf9\xf1\x07\xd8\xf3\xd8\x04\x84\x04>\xfet\xfd;\x031\x03\xee\xf0o\x0f\x9d\x01\x1f\xf3a\x08\xdd\xff=\x01\x8d\x00\xb4\xff\x94\xfd\xb5\xfb8\x05\xe0\xffb\xfd\x8a\tc\xfb\x05\xf7\xa5\x06\xc2\xff\xe8\x02M\xf1\'\n\x1b\x02\xa7\xff\xff\x01[\xf6\x01\ti\xf7A\xfe\x89\t\xbb\x01\xeb\xf4\x8e\x0b\x80\xffy\xf1\x1d\x0b\xdf\x01K\xf6I\x03U\t\t\xf6x\xfc\xd9\rT\xf8\x8f\xf7|\x08\xac\xfex\xf8&\t\x80\x06\x9b\xf0\xd6\xff\xe0\t\x89\x01\xe6\xf5"\x05x\x05\t\xf6^\x02\xee\x04\x87\xfe\x11\x00s\x04"\xf9\xa9\xf7G\x15\xb8\xf3\xa9\xfdw\r\xcb\xf1.\x07\xc4\xfc\xbe\x00\xdd\xff{\x04\x90\xfd0\x00^\x01\xf2\xfb\x98\x05\xc3\xf6J\n\xb6\xf5\xd0\x07H\x00\x11\xfa#\x0bU\xf23\x04&\x02\xf9\xfd\x93\xfa0\x0b \xfc=\xf7\xf2\r\x02\x00\xe0\xef\xcc\x01J\r\x9b\xf7\xf5\xf9/\x0c!\x059\xf0A\t\x82\xf6Y\x01\x16\x04\x16\xff\xec\x04\xf1\xf7j\n\xe8\xf6\xc9\xff\x8c\x05\xc5\xfb\x8e\xf5\xf0\x07u\x14\xee\xe8e\x01\xa8\x13\x11\xe6\xc4\x01\x10\x10\x01\xfbM\xf6\xec\x07\xef\x08+\xf3#\x02\x1a\x07%\xf5\xd3\xf9\x87\x12\x08\xf9\xd1\xfcy\x07\x9f\xff\xb9\xf7\x10\x00\x14\t\xb0\xf46\x08\xa4\x02\x16\xf9\xf4\x006\t\xf1\xfc.\xec8\x16\xc4\x07-\xe9\xfb\x05\xad\x0ft\xf6\xbf\xf2\x87\x113\xfe\xbf\xf6t\t\xb7\xfc?\xfa\x8a\x0cA\xf56\xff\xf9\x02\'\x05\xe1\xf9\x0f\xf8C\x0f\xba\xf1\xd8\x06\x15\xfe\xce\x00\x88\x02$\xfe\xdb\xf6\xfb\x06\xb2\x00\xe4\xfd\xa6\xfe%\x02)\tC\xefG\ry\xf4R\x05D\x02\xd4\x00%\xfc\xbc\x07\xee\xfb\xec\xfd\xd5\n\xd9\xf6A\x07t\xfe\x8d\xf5\xc8\x03\x83\x0f}\xef\x12\x06\xe1\tS\xf0\xcd\xfap\x0b\xee\xfd\xf5\xf5O\xfd\x90\nk\x05\xa1\xf3\x07\x05H\x00\xa7\xfd2\xfb\xb1\xfe\xce\r\xba\xff\x16\xfc\r\x00Q\x00\x87\x04\x80\xf8\xd2\xf7X\x0e.\x02z\xf3T\n\xcf\x02R\xf5\x80\x00!\xff\xc8\xfa\n\x0b\xe2\xfd\x05\xfeN\xffL\xff\xd5\x04_\xf2\xff\x02\x83\x0b\x93\xefK\x04\xba\n\x84\xfc\x93\x02\xd7\xee\xdd\x0c\xa8\x05\xe3\xf0\x0e\x0b7\x0b3\xf4\xcb\xfa\x9d\t\xae\xfe\x88\xfa\xeb\x05u\x06\xaa\xf1d\x06\x18\x0f\x12\xef]\xf9\xf9\x17\x01\xf5\xc2\xf5\x8f\n(\x05\xc7\xfa\xbf\xf5}\x0c\x83\x00\xa2\xf6\xdb\x05\x9c\x01@\xfe\xad\xfb\xeb\x00\x8f\x05u\xfb"\xfc\xc4\t\\\xfe\xfd\xf1\x80\x05\xc4\x08\xf5\xfb$\xf8\xc3\x06\xe2\xfe\x98\xf5\xf3\x08\x19\x06\xb2\xf6\\\x00P\x04\x8e\xf7f\x02m\x05\xa9\xfb_\xfeL\x00\xe7\x02\xdb\xfe\x87\x01e\x00\xd8\xf9;\xff\x10\x02d\xffB\x08J\xfaV\xf8\xd5\x0b\x99\xfbO\xfb\xec\x07%\xfd\xb5\xf4*\x04g\rm\xf8\xc0\x00r\x01\xe5\xf6\x9f\x009\x02J\x01\'\x00\xe5\x018\xf9:\x02\x10\x08*\xf7h\x01\xbe\xffQ\xfa\x1a\x05j\x03\xdc\x02\xd7\x01\x80\xfbW\xfa9\x05\xb2\x00\xd9\xff\x8c\x05\x91\xfe\n\xfcO\x05\x13\x01\x0b\xfa\x1d\x04\xd1\x02p\xf7\xd1\x03\x8e\n\xc6\xfa|\xf9+\xff\x07\x05\xc4\xfa\x9f\xfb\x03\x08\xad\x03\xaa\xf6+\xfe4\x02q\xfdk\x02y\xfd\x85\xfe\xc5\xfe|\x02Q\xff\xae\x02\xac\xfc3\xfd2\x01\xbb\xfa\x10\x07\xbe\x03E\xfa9\xfc\xb2\x06\xef\xfeg\xfa\x9b\x01\x03\x04F\xfe\xd1\xfb\x1e\x05\n\x01\x83\xfc\xa2\x00\xea\xfd\x8a\xfe;\x02d\x00L\xff\xfa\x01\xa0\x00\x99\xfc\x1f\xff\x7f\xff\x92\x01\xc6\xff\xa6\xfd=\x03\x9b\x02G\xfe\x14\xfe\x1b\x047\xfdq\xfd\xf7\x02}\x02U\x02\xce\xff\xa7\x00\x0b\xfc\xbb\x00L\x00\x91\x01\r\x01\xf7\xfe\xae\xff\xbb\xfb\xf6\x02\x1b\x03\x1c\xfb\x07\xfd\xda\x01\x05\xfea\x00\x0f\x00\x8f\xff0\xff\xa9\xfbK\x02R\x02\xfe\xff\xd3\xfdi\xffC\x014\x00T\x04\xb7\x00\xb6\xff\xbe\xfd~\x000\x04\x15\x02\x0f\x03\x00\x00\x98\xfe\xba\xfea\x02a\x02\xe9\xff\x0f\x017\x01\xd6\xff\x98\x00\xc5\x02\x85\xfe\xed\xfc+\x02\xf4\x01d\x00A\x02\x1f\x02\xc0\xffI\xfe\x96\x01\x9c\x00z\x01\x98\x02\xa7\x01\xfb\xff\x92\x02\xea\x01i\xff\xfa\xfe#\x00\xe5\x01\x99\xff\xc4\x01\x8a\x01\x89\xff\xf1\xfc\xb4\xfe\xc0\xfe\xf9\xfd1\x00\x15\xff\xff\xfd#\xff\xfb\xfd\x15\xfe\xa9\xfes\xfc\xab\xfe\x9f\xfe~\xfe;\x00]\xff\x7f\xfc\xbb\xfa_\xfe0\x00\xd8\xfd!\xfb\xf1\xfd\xfc\xfb\xf6\xf7x\xfcd\xfa\xc2\xf8\xcd\xf7-\xf9\xec\xf8\xca\xf7b\xf7\x9b\xf3\xb9\xf3N\xf7\xb5\xf9\xd9\xf5L\xfa_\xf99\xf5\xe8\xf8N\xfc\xbe\xff2\xff\t\x03\xdc\x05\xc9\x05\x85\x08\x9b\x0c6\x0f\x85\x11=\x13\x87\x16\x05\x19{\x1c,\x1d\xcd\x1b\xb4\x1b\x83\x1a\xa7\x1c5\x1c\xd1\x1a\xd1\x1aT\x16\xe4\x10\xc1\x0f\x93\r-\t+\x05\x14\x02^\xfe\xd6\xfb\xf2\xf8\xd9\xf4/\xf1W\xed\xf6\xeb\x8c\xec\xb7\xec\xa8\xeb\x03\xea\xf8\xe8e\xea\x96\xec\xa3\xedA\xf0\x91\xf3\xc7\xf3\xb7\xf6\x10\xfa\xac\xfbN\xfe\xc6\xff\xd0\x00d\x04!\x07\x18\x066\x06\x04\x06\xb4\x04\xb0\x02\x9c\x02z\x02T\x00<\xfe\xef\xfb\xd4\xf8_\xf4\xce\xf1!\xf0\x15\xee\xbd\xec\xac\xea;\xea\x12\xe8]\xe5\x13\xe5]\xe5\xa6\xe4\xe5\xe5p\xe7\xaf\xe7F\xea[\xeb\xf3\xe9\x1a\xec{\xf2\x04\xf5\xf7\xf6\x90\xf9\xa0\xf8\xe8\xfd\xab\x01\xd0\x03\xf5\t\xab\r&\x16\xf0\x1f\xc0%\r&\t%\x04(e,\xb75A;\xbc>\xb3AA\n\xb1\n\xed\x0c\xbb\x0cK\ry\x0f\x02\x0f2\x0c\x06\x06\xbc\x01\x15\x01\x92\x00\xbc\xfd\xf0\xf9J\xf3\x92\xeco\xea\x01\xeaP\xe8\x0c\xe6M\xe3b\xe3>\xe5/\xe5j\xe6\x19\xe7s\xe6\xd7\xe8\x8c\xedS\xf1C\xf5}\xf7\xbc\xf6\xad\xf8\xc8\xfb\x11\xfd\x9b\xff\x0b\x00\xc3\xfd\xd7\xfe\xa3\xff\x97\xff\xe1\x01\x15\x00B\xfco\xfd\xdf\xfc\x90\xfdc\xfe\xbe\xfc\xde\xfc\xcd\xfb\xda\xfd8\x08\xdc\x14q\x14\x86\rM\nr\x10\xb0\x1f\xca(L,\x1a-2,d-\xc1/\xa50\r/\x1a*\xa9\'\xa8+\xbb.C)\x0f\x1c\'\x0f\x0e\x08s\x06\xc3\x06\xbf\x06\x01\xff-\xf4\x9d\xec|\xe6l\xe5\x82\xe4\x1b\xe0\xb8\xdd\\\xdf\x9b\xe1\x91\xe3\xa4\xe3o\xe1a\xe1\xf8\xe3\x1d\xea\xb2\xf3?\xf9P\xfb\x82\xfc@\xfd\xc0\xff \x05\xe4\t,\x0bC\x0c\x1d\r\xea\r\xf9\x0eD\x0c\x96\x07\xd6\x02r\x01\x9d\x02\xa8\x012\xfdU\xf7l\xf0\xde\xed\x95\xec\x13\xeax\xe9\xed\xe7{\xe5\xde\xe4\xe3\xe6\xb2\xe6\x11\xe6\x1f\xe7\x16\xe9\x1c\xed6\xf2\xf3\xf4\x90\xf4x\xf4\xf0\xf6\xa2\xf9\xac\xfb0\xff\x7f\x00\x90\x00\xdb\x02\xc9\x01\xbd\xffR\x01\xca\x00\x9b\x02v\x02\x8c\x02=\x03\x8e\x01\xe4\x00\x98\x00Y\x02\xff\x03\xbc\x02\xe4\xff\x1c\xffB\x04,\x0c\xa3\n\x85\x0b\r\x11\x81\x14\x18\x18k\x1au\x1e\x1f \x92\x1f\xb1#\xe8+\x1a1\xe8+-%\x80#\x1e$\x17%\xae#\xff!\t\x1d\xad\x13\x83\x0f\xa2\r\xb5\n\xc6\x05L\xfd\xfd\xf9\xff\xf7)\xf4^\xf2\xef\xedv\xe7u\xe3\xdb\xe1P\xe5\x80\xe7\xd9\xe5\xdb\xe3\x04\xe2M\xe2t\xe6\xe9\xea\x08\xed"\xef\xbe\xef(\xf3\xd5\xf7?\xfb*\xfd\x95\xfc\x82\xfc\xf9\x00\xae\x07\xe0\x07M\x07\x0f\x05\x8f\x01\x16\x02\xcf\x03\xa8\x04g\x02\x89\xfd\xf3\xf9\x07\xf9\x03\xfa\x0e\xf8\x14\xf4z\xf0[\xed\xaa\xebi\xf1\t\xf2x\xee\xcb\xec\xaa\xe89\xec \xf2X\xf26\xf1o\xf4\xa0\xf3,\xf3\x81\xf8A\xfbo\xfa\xec\xf8\xb5\xf9@\xfc\xf7\x00\\\x01\xbd\xff\xbf\xff\xac\xff\xfa\x028\t\xc9\x07Q\x05\x00\x02\xd0\x02u\x07\x97\n6\x11K\x0c\xa6\x05\xf5\x07\xea\x04k\x08\xa9\x0e\x84\x08\xec\x07\x15\x0e.\x15\xf9\x15\xab\x11=\x0b@\x0ba\x10J\x17\xda\x1cD\x1e>\x1b#\x16\xd1\x14v\x14\x92\x17\x8b\x15g\x13\xeb\x14\xcd\x13\x1d\x13<\x0f\x91\x08C\x03<\x00\x0b\x01\xfa\x048\x02:\xfc^\xf8"\xf36\xf0\n\xf0a\xefV\xef\xd4\xee\xb5\xed\'\xec\xf3\xea:\xeb\x91\xe9\xb8\xe82\xee\x88\xf2L\xf2\xc5\xf4\xbb\xf5\x8e\xf3d\xf7\xe9\xf9\xfe\xfc}\x02\xb6\x02-\x03\xb0\x02\xd8\x02\x83\x02V\x02\xeb\x02Y\x02\x98\x01s\x01\xd9\xfey\xfb\xfb\xf6\xcd\xf5N\xf5Y\xf3f\xf3\xfd\xf6\xb1\xf1\xb4\xed\x93\xef\xd4\xeb\x14\xedM\xf1@\xf2\xa4\xf4j\xf6\x08\xf3\xea\xf3\xf6\xf7\x0c\xf6j\xf8\'\xffO\x01/\x03\xcc\x06\xb9\x08\xaa\xff\xe3\xfe[\r\xe5\ra\x0c\x81\x0fd\x0c@\x0c\xe9\n\xa6\x0bi\x0e\xed\x08\\\x06E\x0eY\x0f\xea\n\xe9\x04)\x02.\x04\xdd\xfdw\x06\xe8\x10\x1d\x05\x16\xfa\xba\x00\xb5\x02\x8f\xfan\xff\x96\x07b\xfdp\xf8\x16\x08(\x04\x14\xf9\x14\xfe\x1c\x01\xc6\x00\x7f\x00\xfa\x06\xcb\x08\x88\tu\x08p\x03\x82\n3\x0b\xd5\x08Q\x11\x13\x14\xfd\x0e~\r\xef\x0f\xad\x0e\xb2\n\x82\x0c@\x0cy\x0c\x9c\x0c\x95\x08\xfa\x08\xbd\x04 \xff\xdd\xfd\xdd\xff\x93\xff\x8e\xfb\xaa\xfb\x96\xf9\x92\xf5\xb7\xf4,\xf4`\xf1\xb3\xf1\x96\xf2]\xf09\xf5\x90\xf5!\xf1\x19\xf1_\xf5\x1d\xf4I\xf2\xba\xf7\xce\xf9i\xf8\x1c\xf8\xcd\xfdA\xfa>\xf9O\xfd\xad\xf9\x99\xfb\x12\xffc\x02.\xfag\xfd\x8c\x00M\x00\x92\xf72\xfa!\xff\xb4\xfb\xb6\xfb\xe9\xffe\xf8M\xfc\xad\xfc\xf6\xfa\xc9\xfbk\xfa^\xfa\xb2\xf9=\x02\xbe\xfb\xb8\x00T\xf8\xa5\x00\x8f\xf8v\xfe\'\x01"\xf7\x07\x03\xdb\xfe&\x05\x9a\xf9x\x022\x00\'\xff\r\xfb\xe7\x06(\tT\xfb4\x0f\xaa\x04\x14\xfbX\xfc\x98\x11l\x00$\xfci\x16c\x082\xfaL\x06\xb2\x11\xbe\xf6\xcd\xfd\x1a\x1b\xf2\x03\xc0\xf4\xf7\x10\xb1\n/\xfaH\xfe>\x10\xe9\xfc\xa8\xf9b\x06\xf1\x04h\x06\xc3\xf2m\x07\xbe\x07\xbd\xf4R\x02\x7f\x06\x06\xfc\x0f\xff\xe0\x00\x83\xfe\xe3\x02\xf9\x02X\xffm\xfc\xaa\xfat\x055\x08\x0b\xfa\x89\xfb\xeb\x06\xef\x00\xb9\xfea\x02\x1a\x07\xbc\xffL\xfc\xa6\x02v\x07r\x07\xdd\xfcx\x06\x82\x02\xd1\x02M\x03\xfa\x02\x05\x07\x0b\xfeY\x03\xca\x08\xaa\xfbi\x05\x19\x07i\xfd\xae\xfc(\x04\xe5\x05\x83\xfd\xb9\x01#\xff\x95\xfe\xf5\xf9\xae\x02\t\x00{\xf3\xe6\xfc6\xfe\x14\xf9\x9e\xf2\x07\xfay\xfdf\xebU\xf5\xf8\xfe\x93\xf5S\xf6\xa4\xef\x9a\xfc\x94\xf5\x1a\xf1\x90\x04\xa9\xf7O\xf4\xc4\xf9\xee\xfc\xff\xf9S\xfe\xed\xfc\xed\xf5\x16\x05\xea\x05\xe3\xef2\x05\x94\x05\xdc\xfbv\xfb\xbf\xfc~\x11\x9a\x02Z\xed;\n\x19\x06\\\xfe\xf5\xfbn\x06\xfc\x02O\xf7\xff\x0f\xe5\xf2B\x01\x1b\x12\xfd\xefQ\xfa\xa8\x18\x05\x00a\xf2\xc0\n\x06\x0c)\xf8\x81\xff\x8a\x12\xc5\x07\xfb\xfa[\xff\x87\x12\xe2\x03\x9d\xf4B\x0b\x81\x0f\x0c\xfb\xa4\xfe\x0c\x0e,\x04\xfd\xfeC\xfc \n%\x01\x0e\x00h\x06E\x01\xa0\x04\x9d\x02\x11\xf6Y\t2\t\xca\xedS\x10\xc9\x0b\x11\xe7H\x07\xe9\x19\xb1\xf2\xfe\xf5\xec\x05\xff\x0b2\xf1\x01\x02T\x1a\xed\xeed\xf3\xf6\x0e\xbc\x02e\xec\xea\x04\x7f\n\xe0\xfa\xd7\xf2\x11\x0b\x83\x03\xc2\xe7\x87\xfa\xb9\r\x1a\xf8\x05\xf0\x03\x08S\x04x\xef\x1a\xf8\xf3\xfdl\xfd\x1b\xff\xf4\xf1X\x0c\xd2\xf4@\x00\xc5\xfe\x16\xfd*\xf7\xb5\x01\xd4\n\x05\xef\x8c\t\x96\x05\x9c\x07\xc9\xf0\x9f\x07\x85\x0c\x80\xec\xa8\x07\x01\x10v\xfc`\x07\x1c\x05\xc6\xf8/\x01\xb7\x03\xb1\x02\xaf\x00\xa6\xff\x15\x07\xcd\xfb\xd6\xfe`\x02\x15\x01\xaf\xf3\xee\xfc|\x0f\xc4\xf4\xee\x05{\x00\xda\xfc\xd5\xf86\x06\x9a\xfd\x02\x06n\xfe\xf2\xfb?\n\xff\xfcp\x03\xc5\xf5.\t0\xff\xaf\xf3\x18\x0c\xb4\ta\xf0q\x03\x86\xff\xf2\xff\x1e\x02U\xf5\x1c\x0b\x0c\xff\x82\xfb\xfc\xfa|\t\xcd\x04\xd4\xe8\xb7\x0e}\xff\xe8\xf6F\x0f?\xfb\x80\xfb\t\x08\x81\xfa\xaf\xfa\x93\x04\xe8\rG\xf1\xb4\x01B\t\xba\xf4\x83\x0c\xcf\xf4\xa2\x06\xfe\xf5\xbf\x08\x80\xf5\x9b\x0b\xe6\xf6h\xfbw\x05\xc6\xf8\xb4\x07\'\xf2U\n\x8f\xef*\x0f?\xf9>\xfeQ\xf9\x93\r\x9c\x02\x17\xec\xa9\x0c\t\x0c\xa2\xf0\xb9\xf9e\x1e\xa7\xee\xbf\x00\x9c\n\xe0\xff\xf6\xf4\xe3\x08\x83\x03\xe1\xf8\xcb\t\xd3\xfb)\xfd\xca\x02\x0f\xf96\x07\xa8\xed\xd2\x086\t\xe8\xe5\xbe\x0e\xda\x01Y\xf3\xcb\xee7\x12\xf3\xfe\x0c\xe4\x08\x18\xf3\x01\x19\xef6\x02n\x03K\xfe\xcf\xf1\xb4\x0c\xa5\x059\xf7\x8f\x08K\x00\x91\xf8\xb1\x06s\x04Q\xfc\x87\x02I\x07\xa9\xff\xd8\xfe+\x10\xe0\xf6i\xfeI\x06K\x03C\x04\x1e\xfa\t\x10 \xf5\xfa\xfe>\x18<\xf3\x97\xf8\xf6\x0f\x97\xf6\xb4\x03\x97\x06\x0f\x05S\xff"\xff\xe9\xfdZ\xf9\x7f\x0f\xa2\xedy\rJ\x07\xeb\xf0V\x03\xb8\x01\xc9\xf5\xcc\x00*\xfe7\xf7F\t~\xf9\x9a\xfb\xa7\x00\x05\xfb\x0e\xf2\xc9\x10"\xf1\x1e\xf9S\rZ\xf2\xaf\xf8\xb0\t\xec\xf8@\xfa/\x05C\xefL\n*\x03\x9d\xf8Q\x04-\xf9\x9e\xff?\x0c(\xee\xc0\x07\xa8\x14\xe2\xe8\xab\xfa\xd1\x1a\xa5\xfay\xef\xda\x0e\x83\x0c\\\xf3+\xf70\x16R\x00\xab\xf7\x81\x01\xb7\x0b\x13\xfaw\xf7\\\x1bs\xef}\xfbB\x17\xd5\xed\xac\xfb\xb1\x13\xe9\xf8\x87\xf6q\r\xc5\xfa\x07\x05\x14\xfd\x85\xfb\x92\x07\xdf\xf7\x03\x03\x89\x07\xfe\xfa\x8b\xfa\xdc\x06\xc7\x02j\xf6\xde\x00\xb8\x0e\x18\xedg\x07\x11\x01`\x02/\x04\x19\xf0\x11\nw\xff\x1a\x01X\xf5\xcc\x0e\xf4\xf9\xef\xf8,\x08a\xfd\x0f\x04\xbd\xf3\x9d\x00\xc1\x07\xda\xfa\xc3\x00\x8d\x021\xfb\x8a\ta\xea<\x0c\xdf\x0b\x06\xeb\xc8\x08\xec\x00]\xfe\r\x06\xa4\xfb\x08\xf4\x9f\x10L\xf6v\xfb:\t\xa1\x04\x13\xf7\xc5\xfa\n\x0e\xf5\xfc\xb3\xf3\xe8\x0bp\x04\x91\xf1\x99\x11\xd6\xfb\xe3\xfb\xb9\xfd\x18\tm\xf5B\t\x1b\xff\x95\xf6\xf6\x14\xa1\xf1\xf8\xf4\xf6\x10\xa5\xfc]\xf0Y\x0f\xd3\x01\x8c\xf7\xac\xfd\xab\t\xa3\xf4C\xfeY\x08\x12\xf9U\x01\x1a\x06"\xfa\xf0\xff\xcf\xff\xe6\xfcD\x01:\x01\xb7\x00\xf0\xf7C\x0b\x0f\xf6p\xffM\n\x83\xf7\xeb\xf8\xff\x064\x04h\xfaD\xfd\xd4\x08\xd7\xfeR\xf8\xb1\n\x94\xfc\x99\xfc*\x06\xd1\x04\xc8\xf63\x08\x1b\x06_\xf4+\x05x\x08\x9f\xfa\xa1\xfc\xec\t)\xfc{\xfcR\x05\x14\x00~\xfeT\x03v\xf7^\x01\xe0\x07\xec\xf1P\x07r\x06\xac\xe8\xe7\x12y\x01\xf1\xea9\x0c\xa1\x07X\xec\xcc\x06d\r6\xe9\xc4\x0b\x10\xfa\x0f\x05\x11\xfd\n\xf4g\x13\xa9\xf1{\xfe\xf1\x08\xac\xf6\xeb\xfc~\x0b\xac\xf11\xfb\xf8\x13\x89\xf0E\xf6\xdf\x12\xe3\xf7l\xf6\x92\x0b\xe8\xfc\x93\xf7c\x02\x7f\x05\x12\xfb\xc8\xfe:\x07\xfd\xfeE\xf7/\t\xb0\xff!\xfd\xdc\x02\x15\x07\x17\xfe\xe4\x00\xb5\x06\xca\xf9\xf6\x04t\x06k\xfbD\x07\xca\xfd\xd2\x01q\xfe\xb2\x05\xb2\x06\x93\xeb5\x13\xec\xf9\x1c\xfa\'\x08\x85\xfc\xf5\xf88\x08\x9e\x03\xd7\xf03\x07G\x0f~\xe7a\xfd#\x1f\xd0\xe6e\xfd\xa4\x17\xb5\xf4\x15\xef\\\x15\x8b\xf9\xb6\xf3\x91\x11\x82\xf8\'\xfe\xeb\x02E\x01e\x00\x99\xf8c\x06\xe1\xfb\xa1\xfeZ\x08/\xff\xef\xf2\xcc\x0b\xe1\x04\xe7\xe7\x99\x11\xed\xff_\xf7\xdf\xfa\xb3\x0c6\x01\x7f\xecg\x107\xf8\x05\xf9s\x04\x03\x01\x13\xff\x9d\xfd\xbc\xfd\x97\xf8\xd2\r\x87\xf0\xf3\x01:\t\xa4\xeb\x9f\x0c"\x07G\xef\xc1\xff\xce\x0f\x9b\xf0>\x00\x94\t\x7f\xfb\x90\xf8\xad\x0c\xe7\xfd\xee\xf0\xc8\x19\xfb\xf1\xd9\xfe\xe7\t^\xfbU\x01N\xff\x97\x07\x18\x02\xc0\xfe\xf7\xf7\x80\x11@\xf6\xe2\xfcY\x14\x93\xf1:\x05\xdf\x001\xfeU\x04\\\xfd{\x06"\xf9=\x06k\xfcL\xff;\x03<\xfc^\tV\xeb\xf2\r=\x04\x9f\xf6\xc7\x002\x06\xce\xf9\x1b\xf0t\x18\x1b\xf7\x0c\xf0\xd3\x0f\x9c\xfe\xd1\xf3\x8a\x05\xd1\x04a\xf4\xe0\x00I\x06\x84\xf7,\x01\xc7\x06\x88\xff+\xed\xb0\x10\xa7\x02e\xf2\x8e\x04\xb3\x05\xa1\x00\xc1\xf1\xbd\x16J\xf4w\xfd\xaf\x04\xed\x00\xe1\xfb\xbb\x02~\x0c\xa6\xee\xd7\x0eq\xfdI\xf4i\x0c\xf6\x00\xdd\xf82\x03\xf8\x06\xa7\xf8\x13\x00\xb8\x074\xf7G\xff\xbb\x03\x14\x04\x00\xf9.\x02\xb5\xfd\xc7\x00\x05\x03\xf2\xf6\xaa\x08\x94\xfb\xcb\xf9|\x07\x02\x05\xad\xefW\t5\x05E\xef\x19\x0e_\x03\xaa\xf3J\x07\xe9\x02\x07\xf3\xce\x11=\xfb\x84\xf3\xbb\x12\x80\xf7\xd4\x009\x05\xf2\xf8-\x03\xef\x02\x94\xfe\x8f\xf9\xb2\x0eb\xfe\t\xeb^\x11H\x05\\\xee~\x07k\x07\xb0\xf0\n\t(\x05\xe8\xf0\x06\x06\x19\n/\xf1\xc0\xff\x9f\r\xf5\xf3\x1e\x02^\x00\xdc\x01\x8c\xfe\x8d\xfey\x02(\x01\xd4\xfb\xc0\xfc\xfa\x0b\xfa\xfa\xed\xf6\xfa\x0cS\xfb\xcc\xf8_\x0b\x12\xfd2\xfa}\x03l\x04Z\xf9\x10\x082\xf7\xab\x02^\x05\xdc\xf9(\x004\x02\x8d\x08\x19\xee\x1d\x0c\x0f\x00\x7f\xf6{\x0c`\xfa\xd1\xfa\x85\x07X\xfd\xea\xfd\x90\x005\xff8\x00z\x013\xfa\x91\xfc\x8a\x06\\\xfd\x00\xfa[\x03\x1a\x01e\xf6\xa2\x06s\xfbQ\xfc~\x04\x1d\xfc3\xfe\xe0\x05\x8d\xff\x03\xf9\x8c\x08\x0b\xfcn\xfe`\x04+\x02\xf7\x01/\xff\x0f\x00N\x05h\xfe\x94\xfd \x07\xd8\xfe\x15\xffR\x04\xaa\x00\x17\xff\xf3\xfdi\x045\xff9\xfdL\x043\xfd\xf1\xfe\xec\x02\xed\xfd\xf1\xfd\x04\x03\xf5\xf9\x88\x01\xa6\x00\xd6\xfb\xd4\xff\xe9\x00\xaa\x01u\xf8\x83\x01\xd7\x03\x8e\xf74\x05\x0f\x00\x01\xf9;\x06\xaf\xfde\xfc-\x04z\x025\xf9\x86\x04\xff\x02\x8e\xfa\xb8\x02\xe3\x03^\xfe\xc6\xff\xb3\x04\xa1\xfa\xd5\x03U\x02\x1c\xfdy\x01 \x00\xdf\x00\x16\xfes\x04:\xfc3\xfe\xc3\x03n\xfc\x7f\xfe\x95\x010\xfe`\xfe)\xff\xf3\xfe\xc2\xffS\xfd\xc9\xfe=\x01I\xfd\xb8\xfc1\x04\xe9\xfe\xc4\xfd<\xff\xcf\xff3\x02\xcc\xfe\xeb\xffz\x02\xb9\xff\xe4\x00\xb3\x01r\x01\xe1\x01D\x01\xf2\xff\x1a\x02d\x03`\x00\xd8\x00\x18\x02{\x01k\xff\x7f\x01\xab\x00\xb1\xffH\x00\xf9\xffi\xfe\x98\xff\'\x00q\xfd\x14\xff`\xff\x11\xfe\xb0\xfe\x93\xff\xba\xfdc\xffN\xff|\xfe\'\xff\xdc\x00\x1b\xff\x02\x00L\x01H\xff`\x00\xd8\x00\xf0\x00\xe9\x007\x01\xdd\x00\xe3\x00\x92\x00\xda\x00\xe2\x00\x84\x00\xe9\xff\xa5\x00\x0f\x01\x01\xff2\x003\x00f\xff\x9f\xff\xfa\xffY\xffz\xff\x8a\xffw\xff\x95\xffc\xff\x8c\xff\xa8\xffw\xff\xa1\xff\xf2\xff\xb7\xff\xc8\xff\xb8\xffG\x00\xd1\xff\xd0\xffd\x00\xfa\xff\xa9\xff\x9c\x00M\x00\xca\xff+\x00\xd3\xffa\x00\xda\xff\xfe\xffb\x00\xaf\xff\xb4\xffK\x00\x98\xff\x94\xff[\x00\xb4\xff[\xff\x06\x00\x02\x00r\xff\xdf\xff-\x00\xae\xff\xfa\xff`\x00\x01\x009\x00`\x00\x16\x00\x8b\x00\x96\x00=\x00\xf0\x00\x99\x00i\x00\xf9\x00\x91\x00B\x00r\x00\x8a\x003\x00\x07\x00\x82\x00\x15\x006\xff\xe7\xff?\x00\x04\xff\x8e\xffB\x00\xb0\xfe\r\xff\x0c\x00T\xff\x11\xff\xb1\xffa\xffD\xff\xf2\xff\xbd\xffx\xff\xf9\xff\x02\x00\xf8\xff\xe9\xffj\x00P\x00\xfc\xff]\x00\x87\x00J\x00H\x00\x99\x00K\x00\xec\xffk\x008\x00\xda\xffX\x00\xfe\xff\xb7\xff\xc6\xff\xc8\xffD\x00\xc0\xff9\xff\x11\x00\xc9\xff\xe7\xff\xab\xff\x9b\xffw\x00\xa4\xffY\xff\xd4\x00)\x00(\xff\xef\xff/\x01/\xff.\xff\x05\x01}\xff\x9b\xff\xbe\xff\x13\x00K\x00\xa3\xff\xb9\xff\xaf\xffb\xff\xaa\x00\xb9\xff\xeb\xfeI\x00\x97\xff$\x00\x07\xff\xc7\xff\r\x01\x9c\xfe\x13\xffE\x01Q\x01\x1c\xfe\xaf\xff\x8a\x01\x87\xffl\x00\x8f\xff\x0e\x01r\x01L\xfd\xf7\x00\xc3\x03d\xfe\xd9\xfd4\x02\x00\x01\x00\x00\xa0\xffh\x01.\x01v\xfe\x8c\xff\x18\x02a\x01\xac\xfcM\x01\xd4\x00R\x00\xb7\x00\x15\x01w\xfd\x80\xfd`\x02\xa3\x03\x07\xfe\xeb\xfa\x80\x03\x94\x01p\xfd\\\xff"\x02\x05\xfc,\xfe*\x04\x0f\xfe\x93\xfdK\x03\xf8\xfd\x13\xfb\x85\x04\xd2\x02\xc4\xf8{\xfd\xc4\x08{\xfe\xd5\xfa\xde\x00+\x04\xff\xfd\x02\xfe#\x00\x03\x01\xad\x00\xc6\xfd|\x01\xbc\x02\xbd\xfe\x02\x00\xaf\x00\x04\xfe\x94\x00\'\x01\xba\x03\xbb\xff\x9d\xfc\xbd\x02\xd0\xfd\xa4\x01\xc3\x01z\xf9\xdd\x06\xb3\x04\x95\xf4r\x00q\x04|\xffW\xfeE\xfe\xdd\x03~\xfc\x1b\x00\x12\x02\x07\xfb\xbd\xfe\x18\x07\x97\x00W\xf7\x0b\x05>\x04B\xf7\xe6\x03\x81\x03[\xfd$\xfcM\x03\x85\x07\x05\xfa\x1c\xfd\xac\x01c\x06y\xfe\xcb\xf7\xd5\xff\x08\nx\x03\x85\xf2\xec\x02K\x0b#\xf5@\xf8$\n\xbb\x06H\xf5S\xfc\xa1\x07\x05\x02\xe3\xf9\xe9\xfbB\x07(\x00\x12\xf8\x8e\x02~\x0c(\xfa\xa8\xf3\x04\ta\x05\xe7\xf8\x84\xfe\x97\x06\xc8\x04%\xf3\x92\xfd\xdc\x0e\xa0\xfe\x93\xf2\xd3\x00\x90\x05\x88\x01\xaa\xfbH\x00?\x05?\xfa/\xfd:\x05\xd8\x05\xa5\xf8I\xfb\xfa\nK\x07\xb4\xf2\xf5\xfdX\n\xfe\x03\xb7\xf6z\xff\xf1\x03\x98\xff\x85\x032\xff\xce\xfc\xb4\xfb\xea\x00H\x07\xf4\xf6\xb7\xfd\xe9\x07\xec\xff\x08\xf8\xc4\xffN\x04\x81\x00}\xfcn\xfa\xee\x04\x98\xfc\xce\x03l\xfc\xd0\x04\x0b\xff\xea\xf3e\x06\xb3\x07\xbf\xfc\x86\xf92\t\x82\xff\x10\xfe@\t!\xf9&\xfd\xa2\x07\xf8\xfa\x99\t\x03\x02\xc5\xf7\t\xf8\xaa\x0ci\x07\x0e\xec|\x00/\x05\xe6\t\x07\xf8\x0e\xf17\x07\x18\x03\xaf\xfd8\xff\xbd\x03S\xf2\xaf\xfc\x9a\x15\xd7\xfc.\xf3\x8f\xf7\x9e\x07+\x14\x9c\xf8\xbb\xe8Z\x03D\x1b\x11\xfd\xa7\xe9\x9a\xff\x81\x12\xbd\x05.\xf7\xe3\xf5\xd0\xff>\x04\x99\nu\xfdB\xfa\x14\xfe\xa1\xfcn\r\xc1\xf7Q\xfb\xbd\r\x1b\xfc \xf8i\x04\xeb\n\x08\x00\x0c\xf7\xe9\xfa?\x02?\x01\x06\r(\xff/\xf1\xf5\xff\xb5\x01g\x03\x17\xfe#\xfe\x13\x06\xb9\xf8\x8e\xf4z\x0b\x1b\x0c\x9d\xfc\xc2\xf1\xe2\xf9\x86\n\x9d\n\xad\xf7\xed\xf3\x9f\tZ\tN\xf3\xc2\xf9D\x10\xd9\x02\x88\xf2\xdf\xf6\xa4\x0e3\x0f\xc0\xf5\xdf\xf2\xb2\x0b\xa4\x02\x81\xf4\x18\x06\x07\x06l\xff\x88\xf9\xfe\xfc\x1d\nk\xfc\xee\xf4\xf6\x06\x88\x01\xb4\xf9E\x05\xf1\x05\xd7\xf4\xc2\xfe\x96\x07\xf9\xf59\x03\x9e\x08\xf0\xf5:\x01@\x0e\x93\xfa}\xf2\xb6\x02\t\xff\xcd\x02\x02\x05\xc0\xf6\xaa\x07\xb3\x02\xa3\xf3\x1b\x07\xea\xff\xf4\xf1N\xfe\x92\x10\xfb\x06\xcb\xf5\xb5\xf7#\x00\xf1\x07\xcc\xfd\x92\x03Z\xf9\x17\xf6!\x0f\x80\x07\xd5\xfa\xfd\xfc/\x02\x04\xfa\xcb\xf8\xe8\x07\x86\t\x91\xfe\xf3\xfc\r\x05\xf6\xfdm\xf4 \x00\x93\n\xbf\x07j\xf3|\xf8\xeb\x0eQ\t:\xf5\x19\xf5\x9c\x06\x8a\x02\xda\x00\xc8\xf5\xd5\x06\x1a\x0c\xa9\xed.\xfd<\x0c1\xfff\xedq\x01^\x0e\xdb\xfd\xb3\xf6\xc7\x05\x0c\x08\xef\xf7\xea\xf6o\x06\x10\r\xde\xf9\xd1\xf0\xf8\x07c\x14\xb0\xfc\x08\xf8\xe4\xfb\xd9\x00\xca\x06H\x01\x82\xfb[\x02m\x00\x87\x01\x8b\x03\x03\xfe\x9e\xf7\xcc\xfb\x94\n>\x01q\xf8X\xf99\n:\x08Q\xfd$\xf9\xe9\xfb4\x08\x02\x07\x89\xfau\xf6E\nr\x0b\x16\xfb\x82\xfc\x1b\x01\xff\x03w\x01\x1e\xfb\xf9\xff\x9b\x03\xdb\x00\\\x03U\x02\x85\xfb!\xfcR\x03\x89\x01\x10\xfc\xec\xfe\xd3\x01/\x06L\xfbH\xf6Q\x02\x11\x02\xa9\x01n\xf8\x19\xf9l\x02|\x04\xd8\xfc\xe2\xfbW\xfdQ\xfe\x85\x00R\xfe\x17\x00E\x00\x15\xff\xf0\xfd\xde\xfb2\xff\x17\x08K\xff\x16\xfa\xa9\xfd\x8d\x01?\x03\x98\xfb=\x00\x8e\x04D\xfd\xdb\xf5\x94\x03n\x0b\x91\xfd\xc2\xf0\xf5\xf9\x87\t\xd3\x05\xb4\xfb\x8f\xf2\xf3\xf7u\x06C\x08\x00\xf9i\xf0\xe5\xf6Z\x01\xf5\x07?\xfe\x05\xf6I\xf9t\xff\x7f\xfe/\xf7\xf7\x01\x98\t\xea\xfd\x06\xf4\x19\xf7h\x05\x8f\t\x87\xfd_\xf3\x97\xf5k\x03<\x08I\x05\x91\xfc\x95\xf2+\xf8\xe4\x02\xda\x07w\x07\x91\x08}\x07&\x06\x1f\tP\x11\xa8\x12\xa3\x07\xa6\x06\xce\x15\xfd#y \xdd\x10\x11\x0c2\x12\xa9\x12O\x11\x81\x12\xfd\x108\x0b\x99\x05\xbe\t$\rT\x00\x8f\xf0g\xee\xce\xf6,\xfb\x08\xf6\x93\xf0W\xee2\xea\x17\xe7s\xe80\xebU\xe9\xc9\xe6\xc8\xeb\xaa\xf4\xf3\xf8\r\xf5\xe0\xef\xbf\xf1]\xf6\xd0\xfa\xfb\xff{\x03\x1f\x05v\x044\x045\x07\xf9\x07\xe2\x03\x00\x00\xb0\x02B\t\x85\x0b\x83\x06a\x02\xa8\xff\xbf\xfd \xfc\x8e\xfc\x1b\xfdl\xfb\xa4\xf9\x00\xf9\x13\xfb\xb9\xfa$\xf7U\xf3D\xf3;\xf8s\xfc\xed\xfcT\xfd\xb8\xfc\xd3\xfbA\xfeN\x01\r\x02"\x02\x83\x03\xb3\x06\x97\tf\n\xad\x08m\x07\xb2\x07^\x07C\n\xa9\x0c\xf7\x0c\x96\nn\n\x13\n7\n!\tX\x07\x19\t\x8f\t*\n\xbb\n\xb5\x08\xa7\x03\xc1\x00\x8e\x001\x01\x9d\x01\xec\xff\xa9\xfe+\xfc\x8b\xf9\xe4\xf7f\xf6\xbe\xf4k\xf3(\xf4\xcf\xf5\x98\xf5\t\xf5\xa0\xf4{\xf2^\xf1\xdb\xf3\x0e\xf5\x9a\xf5a\xf7u\xf8\xf2\xf8\xef\xf9\xa5\xfa}\xf9+\xf9\xce\xfb\x0f\xfe)\xffv\x00\xc5\xff\x07\xffE\xff0\xff\x1e\x00\xc7\xfe\xf0\xfd\xe9\xfdi\xfd\x9e\xfc&\xfb\xe5\xf7\x07\xf7C\xf7j\xf8\x94\xfa\x88\xf80\xf7\xc8\xf43\xf3;\xf5\x9f\xfe0\x0c}\x13b\x11T\n\x85\x0e\x8c\x1a|\x1f\x12\x1dr\x1e=,\xf88\xf38\n/\xca$1!\xfa\x1e\xa4!\x07$Y#\x81\x1c\xcc\x12\x98\x0c\xf3\x04\x0f\xfa-\xefH\xeap\xeb\x9f\xeeq\xeeo\xe8\x11\xe0c\xd7\xe7\xd3[\xd5T\xda\x93\xdf\xc4\xe2\xbe\xe7\x1f\xec\x81\xee\x8f\xed\xaf\xecQ\xf0\xee\xf4\xc3\xfb\xeb\x03[\n{\x0c\xd2\x08\xcb\x05\x93\x06\xb1\x08\x02\x08I\x05\xc1\x06i\n|\x0b\x91\x06\x10\x000\xfaT\xf6\x84\xf4\xff\xf5_\xf82\xf7\xae\xf3\x9a\xf1-\xf1\xa5\xef3\xedZ\xeb/\xee\xd7\xf2\xdd\xf6)\xfa\xb3\xfb\xf6\xfa\xfc\xf8\xcd\xf9\xf3\xfe\xc8\x02d\x05\r\x08;\x0b\xa7\x0e\xec\x0e@\r;\x0c/\x0c4\x0c\xfd\x0e^\x12\xbb\x13O\x11\xd3\r\x06\x0bF\n\xee\x08|\x06x\x05,\x05\x8c\x06f\x07\xce\x04;\x00\xb3\xfbZ\xf8>\xfa\xf5\xfc,\xfel\xfe\xc0\xfc\xb9\xfa.\xf9n\xf90\xf9\x9d\xf8-\xf9\xbb\xfb\xbf\xffI\x01\xa4\x00p\xfd\xc9\xfa_\xfc\xee\xfdR\x011\x02s\x02\xc6\x01\xfa\x00\xb5\x00P\xfe\x86\xfc\x9a\xfb\x8e\xfb\r\xfd\xd5\xfe6\xfe\xbe\xfb\xf0\xf8\xb3\xf6\x17\xf7\xd5\xf7\x0e\xf8\x91\xf8v\xf9\xb8\xfa\x07\xfbK\xfa\xb2\xf81\xf7:\xf7v\xf9\x0c\xfc\xb2\xfc\xd4\xfd\xb8\xfd\xb7\xfbs\xfc\xde\xfb\xf9\xfb|\xfd_\xfd\x0c\xff5\xff\xcd\xfe}\xff]\xfc\x90\xfc\xec\xfci\xfc\x9c\xfep\xfe\x83\xff\r\xfe\xc7\xfc\xae\xfd\xde\x04m\x0f\xa9\x13a\x0fq\ne\x0eV\x1a\x1f \x9d\x1c\x99\x1d\xa8#\x1e+Q+\x96$q\x1f\xa6\x1a-\x18\x13\x19\xcd\x1b\x9b\x1bh\x13\xef\x07\xd9\x02\x9d\x00\x83\xfb\x1e\xf4\x07\xee\x97\xec0\xec\xb0\xe9d\xe9K\xe7\xd5\xe1\xcc\xdc\x8b\xdc\x9a\xe2\xdc\xe8e\xeb\xe1\xebS\xee\x1a\xf22\xf5\x0c\xf7d\xf8\xac\xfb\x01\xff\xac\x02\xf7\x07\xa1\x0b?\x0bk\x07m\x05\xe3\x06\xd2\x08\xef\x08\x1e\x06\xab\x04\xcc\x03r\x02G\x00\x01\xfd\xde\xf9b\xf6$\xf4\xbb\xf4\xda\xf6g\xf6\xd7\xf2\xbb\xef\x9f\xef\xd2\xf0\xd5\xf0(\xf1{\xf2\xb3\xf4g\xf6\xf8\xf7\x97\xfa\xc2\xfb\xbb\xfbW\xfc\xe5\xfe\xd8\x02\x06\x06/\x07\xb5\x07\x9d\x08p\t\x9b\n\xa1\n\x96\nW\nG\x0b\x9f\x0c\xd1\x0cO\x0cg\nW\x08\xb9\x06o\x06M\x06@\x06&\x05E\x037\x02\xa9\x01X\x00\x9a\xfe\t\xfdv\xfbZ\xfb\xc0\xfb\xcc\xfb\xde\xfb\x15\xfbh\xf9N\xfa\x02\xfb\x06\xfb+\xfb\x9b\xfa\xc7\xfd\x18\x00\x89\x01)\x03q\x03\xee\x03\x1f\x05\xec\x06k\x08\xa1\t?\t!\n\xf1\x0b_\x0b\x17\x0b8\x08\x95\x06V\x06B\x04\x11\x04\xee\x01R\x01\xed\xfef\xfcp\xfb]\xf9}\xf7n\xf5C\xf4U\xf4\x82\xf4]\xf44\xf4\x7f\xf3N\xf25\xf2\x88\xf2W\xf35\xf4\x86\xf4<\xf6(\xf7\x1c\xf8\xe8\xf8 \xf8\xff\xf7\xe3\xf89\xfa\xa5\xfb\xf3\xfcT\xfd\xd5\xfd\xcb\xfd\xe0\xfd\x11\xfe\xf0\xfd\xa1\xfe[\xfe\xd1\xfe>\x00\xbb\x00\xf5\xfe\xb9\xfe\x94\xfe]\xfe\x93\xff\xbb\xff\x19\x01\xd6\xff/\xff\xcd\xff\xd1\x01\xc9\x05V\n)\r\xae\r\xed\x0c*\x0e\x88\x12n\x176\x1b\xaf\x1b\x91\x1d7 \x9f!\x15 \x8e\x1c\xd7\x1b\xce\x1aI\x19y\x18(\x17\xaf\x14\xe7\x0e\xd5\x08\xc7\x05\x96\x03p\xff\x90\xfa\xa8\xf7c\xf6\xcd\xf4\xb2\xf1\xc1\xef\x1e\xef\xc2\xec\n\xea \xeb]\xee\x08\xef\xd7\xed\xa3\xed\x9b\xefq\xf1\x8d\xf1:\xf3c\xf4I\xf4B\xf4\xf5\xf4|\xf7\x90\xf8\x9f\xf7#\xf7\x06\xf8\xd4\xf8F\xf8\x7f\xf8\x9e\xf8\x84\xf8R\xf8?\xf8~\xf9\xe5\xf9\xcb\xf9!\xfa\xcb\xfaW\xfb\xa7\xfb\xb1\xfb5\xfc\xdf\xfc(\xfdi\xfe\xcc\xff\xce\x00\x9e\x00j\x00\xb7\x00T\x01\xbc\x01\xee\x01\xbe\x02\xb5\x03\xc4\x03\x01\x04\x83\x04\x89\x04\xe2\x03\xa2\x03*\x04\x04\x05\x91\x05\x06\x06\xa1\x06>\x07Q\x07@\x07S\x07a\x07\xc4\x07\xb0\x07q\x081\t\xc4\x08P\x08\xb5\x07\'\x07y\x06\x81\x05\xa6\x04\x0e\x04\x7f\x03~\x02\xf0\x01X\x01+\x00\xf4\xfe\xbb\xfd\x9a\xfcr\xfc\xb3\xfb,\xfb\x19\xfb\xc6\xfao\xfaG\xfaE\xfa\x8c\xfa`\xfa}\xfa#\xfb\xcb\xfb\xe5\xfc\xf5\xfdx\xfe\x8a\x00\xf4\x01\xd8\x01Z\x02\x7f\x03\x80\x05\x9b\x05\xaa\x05q\x06\x8c\x07"\x08\xb4\x067\x06P\x06\x01\x05"\x03\xbe\x02\xcb\x02\x18\x02\xb1\xff\x0c\xfe2\xfe\xb6\xfc\xf8\xfaN\xfa\xbd\xf9X\xf9\xf7\xf7Y\xf7\xe6\xf7t\xf7\x8f\xf6\x1a\xf6\x96\xf6\x82\xf6\x9c\xf6\xd1\xf61\xf7\x92\xf7z\xf7\xbb\xf7x\xf8.\xf9B\xf9j\xf9\\\xfa\xc6\xfbu\xfc\x8e\xfc\xf7\xfc\xb6\xfd\xea\xfeO\xff\x03\x00\xee\x00|\x01K\x019\x01\xe6\x01^\x02\x1f\x02\xf5\x01{\x024\x02l\x02%\x02U\x02\xd9\x02\xc8\x02-\x03\xd4\x03\x1a\x04\xd3\x04I\x08\xb9\n-\x0bS\x0b\xd1\x0c\x9f\x0f)\x11\x88\x12\xcb\x14\t\x16\xff\x15\x14\x16\xab\x16\x10\x17\x0f\x16\x8f\x144\x13\xb6\x11\x9a\x0f\x87\r\xda\x0b\xc4\x08J\x05/\x03\xf4\x000\xfe\x18\xfbE\xf9\xba\xf7\x86\xf4\x0c\xf2\xb8\xf1=\xf27\xf1\xf0\xee\x00\xef\x0b\xf0\xbe\xef\xd4\xee\x04\xef\x9e\xf0\xb1\xf07\xf0b\xf1\x11\xf3\xae\xf3\x9f\xf2\xc3\xf2\x9d\xf4\xb7\xf5\xb4\xf5\xda\xf5\x0c\xf7P\xf8e\xf8\xa4\xf8,\xfa\x94\xfb\xdc\xfb\x81\xfb\xd9\xfc=\xfe\x91\xfe\xa8\xfeK\xff\x95\x00\xe6\x00Q\x00\xaf\x00\xaa\x01\xb7\x01\n\x01\xeb\x00\xa4\x01\xe5\x01\x1a\x01\xae\x00\xa5\x01\xf1\x01\x1b\x01\xee\x00\xc1\x01\xf9\x01\xa5\x01X\x01:\x02Z\x03\xd7\x02\xa2\x02\x9b\x03n\x04n\x04>\x04\xf5\x04\xb1\x05\x7f\x05N\x056\x06\xe9\x06\x91\x06;\x06q\x06\xa7\x06G\x06\xd6\x05\xae\x05\x80\x05\xe1\x04 \x04\xb5\x038\x03O\x02|\x01\xe6\x00\x16\x00\x87\xff\xb6\xfe\xe7\xfdW\xfd\xb6\xfc-\xfc\xe1\xfb\x82\xfbI\xfb\xec\xfa\x0f\xfbB\xfb\xe7\xfa\xe9\xfae\xfb\x97\xfb\t\xfc\x8e\xfd#\xfe\xea\xfd\xa9\xfe\x1e\xff#\x00\xcb\x00,\x01Z\x02\x9d\x02\x85\x02\xb4\x02u\x03?\x04\xa4\x03\\\x03\x9f\x03\xd1\x03*\x03l\x02\xac\x02K\x02\xa3\x01\xe3\x00\xae\x00\xb0\x00\xc1\xff\x8c\xfe0\xfe!\xfel\xfdg\xfc8\xfc0\xfc\xc5\xfb \xfb\xf9\xfaO\xfb*\xfb\x93\xfa\xa4\xfaZ\xfb\xca\xfb\x9d\xfb\x9c\xfbm\xfc\xbe\xfc\x95\xfc\xd6\xfcv\xfd\x13\xfe{\xfe\x83\xfe\xcf\xfe\x90\xff\xc5\xff\xc1\xff~\x00\xba\x00\xbd\x00\xa2\x00\xe8\x00"\x01\xf1\x00\xe7\x00\xf0\x00\xf2\x00\xbe\x007\x00\x95\xffe\xff\x0b\xff\x05\xff\xff\xfe\x08\xff\xe2\xfe\xcb\xfe\x1c\xff\x83\xff]\xff\xc3\xffr\x01-\x03\xe0\x04\x07\x06\x1d\x08@\n"\x0b\x18\x0c\xd3\x0e\xe9\x11L\x13\xc1\x13r\x14\x97\x15\x8a\x15;\x14i\x14`\x15\x19\x14\xef\x104\x0eN\r\xbb\x0b\x16\x08\xce\x04\x1e\x03\x02\x016\xfd\xfa\xf9\xf5\xf8\x12\xf8\xe2\xf4^\xf1\x85\xf0S\xf1\x17\xf0\x1d\xeeM\xee\xa1\xef\x80\xef\x18\xee\xba\xee\xd0\xf0\x1f\xf1I\xf0A\xf1\xcf\xf3\xdb\xf4\x8a\xf4\xeb\xf4\xae\xf6\xb8\xf7y\xf7Y\xf8\x04\xfas\xfa8\xfa\xa1\xfaG\xfcU\xfd\x0e\xfdR\xfdH\xfe\x89\xfeY\xfe\xfb\xfe\xd5\xff\xfb\xff\xae\xff\xef\xff\xa0\x00\xba\x00T\x00m\x00\xc1\x00\x82\x005\x00\xad\x00j\x01(\x01\t\x01\x9a\x01\xea\x01\x03\x02f\x02\x13\x03\xc0\x03\x06\x04k\x04U\x05\xf8\x05Q\x06\x9a\x06\x06\x07z\x07`\x07b\x07\xa8\x07\x84\x072\x07\xdd\x06\xbe\x06\x91\x06\xd5\x05 \x05\xa6\x04\xf4\x03\n\x03?\x02\x90\x01\x0f\x016\x00\x06\xffN\xfe\xc8\xfd\xd2\xfc\xf1\xfbU\xfb\xd3\xfa=\xfa\xa5\xf9\x82\xf9\x89\xf9\x7f\xf9M\xf9\\\xf9\xa9\xf9\xcd\xf9\xf4\xf9e\xfa\xf3\xfaP\xfb\xa7\xfbQ\xfc\r\xfd\x8d\xfd"\xfe\xa0\xfe\xfe\xfe\x9f\xff[\x00+\x01\x0c\x02\xf5\x02\xbc\x033\x04\xdd\x04\xa8\x05\x9e\x06\xfb\x06\xe5\x06|\x07\xf6\x07\xcd\x07C\x07\x10\x07\xfd\x06\x16\x06\xd9\x04\n\x04\x8e\x03\x92\x02\x02\x01\xd8\xff<\xffa\xfe\x1b\xfd\x0e\xfc\xd4\xfbQ\xfbR\xfa\xbe\xf9\xcb\xf9\xa4\xf97\xf9\xfa\xf8&\xf9\xa2\xf9\xc9\xf9\xd4\xf98\xfa\xd2\xfa=\xfb\x98\xfb.\xfc\xf4\xfcx\xfd\xa3\xfd-\xfe\xc2\xfe+\xffv\xffV\xff\x80\xff\x00\x00\x95\xffH\xffp\xffu\xff\x13\xff\xa0\xfek\xfew\xfe:\xfe\xc1\xfd\xc3\xfd\xd6\xfd\xcc\xfd\xe5\xfdD\xfe\xd0\xfe\xf2\xfe\xb8\xfeS\xff%\x00\xe7\x00a\x01&\x02\x10\x03t\x03\x14\x04 \x05\xc7\x05p\x06w\x07\xa9\x08\xe6\t\x8c\n@\x0b5\x0c\xf4\x0c\\\r`\x0e\x88\x0f\x07\x10\x04\x10\xd1\x0f\x02\x10\xe9\x0f\x1c\x0f\xc2\x0e\x96\x0e\xa3\r\xd9\x0b\xfa\t\xdf\x08\x9b\x07K\x05\xf8\x02b\x01\xdf\xff\x94\xfd&\xfb\x8e\xf95\xf8%\xf6\xe8\xf3\xe6\xf2\x92\xf2l\xf1F\xf0\x1d\xf0\x8a\xf0e\xf0\xee\xef_\xf0]\xf1\xd7\xf1\x0e\xf2\x15\xf3\xa5\xf4\x87\xf5\xf3\xf5\xd0\xf6"\xf8 \xf9\x94\xf9Z\xfa}\xfb8\xfc\xa1\xfc8\xfd>\xfe\xf8\xfe+\xff\x88\xff\x1b\x00\x8c\x00\xc7\x00\x1d\x01\xa1\x01\xda\x01\xe8\x01%\x02\x7f\x02\x9c\x02\x8a\x02\x9a\x02\xa8\x02\x89\x02b\x02t\x02\x86\x02\\\x02?\x02F\x026\x02\x01\x02\xea\x01\n\x02\x18\x02\x02\x02\x02\x026\x02a\x02X\x02i\x02\xaf\x02\xd6\x02\xae\x02\x8a\x02\xbc\x02\xe1\x02\xc8\x02\x9b\x02\xa3\x02\xb3\x02m\x02\x1e\x02\x02\x02\xda\x01g\x01\xe4\x00\x9b\x00\x82\x00\x1f\x00\x8c\xff\x1c\xff\xd2\xfed\xfe\xdd\xfdu\xfd.\xfd\xd7\xfck\xfcA\xfc1\xfc\t\xfc\xdc\xfb\xc9\xfb\xed\xfb\x14\xfc*\xfc`\xfc\xa1\xfc\xd8\xfc\x13\xfdm\xfd\xef\xfde\xfe\xcf\xfe\x18\xffl\xff\xe0\xffy\x00\x12\x01\x80\x01\xf2\x01y\x02\xc3\x02P\x03\xd9\x03\x8a\x04\xc1\x04\x84\x04\xe2\x04C\x05q\x050\x05S\x05\x91\x05\x17\x05p\x045\x04r\x04\xd9\x03\xfe\x02\x9b\x02l\x02\xc9\x01\xc1\x00P\x00\xfc\xff7\xffg\xfe"\xfe\xe8\xfd\x08\xfd"\xfc\xda\xfb\xaa\xfb@\xfb\xc4\xfa\xbd\xfa\x98\xfa\x10\xfa\xd8\xf9\x1d\xfaD\xfa\x05\xfa\n\xfa<\xfa\x8c\xfa\x86\xfa\xa0\xfa\x17\xfb6\xfbE\xfb\x8d\xfb\xf3\xfb>\xfc\x87\xfc\xc2\xfc \xfds\xfd\xa9\xfd\x1c\xfe\x7f\xfe\xe4\xfeA\xff\x9d\xff\x0b\x00\x80\x00\x08\x01\x89\x01\xff\x01y\x02\xea\x02R\x03\xd0\x03B\x04\x95\x04\xe5\x04Q\x05\xd7\x05*\x06\x19\x06Z\x06\xb7\x06\xba\x06\x9e\x06\x8d\x06\xb1\x06\xab\x06D\x06\xf4\x05\xd9\x05\x91\x05\x1a\x05\xc2\x04\x80\x04-\x04\xbb\x03B\x03\x1d\x03\xf1\x02\xac\x02\x86\x02{\x02y\x02M\x028\x02_\x02z\x02n\x02T\x02U\x02U\x02\x14\x02\xf3\x01\xd2\x01}\x01\x15\x01\x9e\x00A\x00\xc0\xff)\xff\x89\xfe\xd7\xfd7\xfd\x93\xfc\x0c\xfc\x92\xfb\xfa\xfa\xa5\xfa,\xfa\xd1\xf9\xa3\xf9z\xf9\x8a\xf9a\xf9~\xf9\xca\xf9\xf8\xf9U\xfa\x85\xfa\xda\xfaQ\xfb\xa5\xfb+\xfc\x9b\xfc\x0c\xfde\xfd\xae\xfd5\xfe\xa5\xfe\xfc\xfeH\xff\x88\xff\xd5\xff\x11\x00M\x00\x9b\x00\xcb\x00\xee\x00\x1c\x01M\x01u\x01\x80\x01\x88\x01\x87\x01\x88\x01\x94\x01\x9d\x01\x96\x01\x7f\x01[\x015\x01\x13\x01\xed\x00\xbe\x00\x89\x00X\x00(\x00\x03\x00\xdb\xff\xa7\xffv\xffQ\xff-\xff\x15\xff\x03\xff\xf0\xfe\xf8\xfe\xff\xfe\x06\xff\x0e\xff\x1b\xffE\xffv\xff\xae\xff\xd6\xff\xfe\xff-\x00f\x00\xa4\x00\xd3\x00\x03\x011\x01]\x01\x85\x01\xb2\x01\xdc\x01\xf5\x01\x0b\x02"\x02A\x02N\x02E\x02>\x028\x02.\x02*\x02\x1c\x02\t\x02\xe4\x01\xb8\x01\x9b\x01\x81\x01R\x01\x0c\x01\xd3\x00\xa0\x00l\x00@\x00\x06\x00\xc4\xffv\xff2\xff\n\xff\xdd\xfe\xa9\xfei\xfe7\xfe\x17\xfe\xeb\xfd\xc4\xfd\xa7\xfd\x8b\xfdy\xfdc\xfdV\xfdD\xfd\x1f\xfd\x00\xfd\xfd\xfc\n\xfd\x0f\xfd\xf8\xfc\xea\xfc\xf0\xfc\xed\xfc\xe8\xfc\xf6\xfc\x04\xfd\x08\xfd\x14\xfd(\xfdc\xfd\x86\xfd\x98\xfd\xd7\xfd\x1d\xfei\xfe\xb2\xfe\xf1\xfeW\xff\xb7\xff\xff\xffq\x00\xec\x00E\x01\x9c\x01\xfb\x01d\x02\xca\x02\x06\x03I\x03\x9f\x03\xd6\x03\xfd\x03%\x04L\x04m\x04\x84\x04\x8d\x04\x95\x04{\x04`\x04L\x040\x04\x18\x04\xf0\x03\xb9\x03{\x03.\x03\xe6\x02\xa0\x02Q\x02\x01\x02\xaa\x01Z\x01\xfe\x00\x9a\x00<\x00\xdf\xff\x80\xff\x1c\xff\xbf\xfeg\xfe\x11\xfe\xc8\xfd\x93\xfdd\xfd6\xfd\x13\xfd\xfd\xfc\xe9\xfc\xec\xfc\xfb\xfc\x0b\xfd\x1e\xfd?\xfdc\xfd\x88\xfd\xb5\xfd\xe3\xfd\x04\xfe(\xfeL\xfe\x80\xfe\xb3\xfe\xb1\xfe\xd5\xfe\xf2\xfe\xf5\xfe\x11\xff\x15\xff%\xff=\xff+\xff&\xff:\xff:\xff?\xffH\xffT\xffh\xff\x80\xff\x91\xff\xb6\xff\xde\xff\xf5\xff*\x00[\x00\x92\x00\xd5\x00\x11\x01W\x01\xa4\x01\xd7\x01\t\x02P\x02\x80\x02\xa9\x02\xd8\x02\xfd\x02$\x030\x03&\x03+\x03\t\x03\xfb\x02\xca\x02\x8f\x02b\x02.\x02\xe5\x01\x8c\x01K\x01\xeb\x00\xa1\x00C\x00\xd7\xff\xa1\xffb\xff\x01\xff\xdb\xfe\x97\xfef\xfe-\xfe\x10\xfe\xda\xfd\xcc\xfd\xc0\xfd\xa7\xfd\xa6\xfd\xb0\xfd\xb9\xfd\xc2\xfd\xd4\xfd\xb3\xfd\x05\xfe\xd8\xfd-\xfe+\xfeY\xfe\x85\xfep\xfe\xe7\xfe\xb9\xfe\x15\xff)\xff\x90\xffu\xff\x94\xff\x07\x00\xfd\xff@\x00\x8f\x00\xdc\x00\x00\x01\x0b\x01N\x01\x98\x01\x90\x01\xae\x01\xcb\x01\xf1\x01>\x02\x17\x025\x02\x82\x022\x02\x93\x02\xa3\x01<\x02\x03\x01\xf5\x00\x81\x00`\xff<\x05\x02\x059\x00\xf1\xfb\x98\x04\x9f\xfbB\xfck\x04\x06\xfa\x02\x02\x88\xfa\x8c\xff\xd8\xfdt\xf9*\xfe\xad\xfb\x8c\xfd8\xfdt\xfd;\xffZ\xfd\r\xfe\xe9\xff\xfd\xfd\xa9\xff\x18\xff\x83\x00\xfe\xfe\x82\x02\x85\xfec\x01|\x00x\xff\xb6\x02,\xfft\x01\x00\x00\x9b\x00\x93\x00\xff\x00\xf4\xffk\x01\xe1\xff\xe6\x00\t\x00,\x00\x19\x01_\xff(\x01?\x00\xda\x00H\x00\n\x01\x97\x00\'\x01\x07\xff;\x02=\x00\x1c\x00\xc1\x01"\x00\x9a\x01\xae\x00{\x01\x11\x00,\x01\x00\x00\xaf\x01\x0c\x00\x1e\x02\xc6\xff\x1c\x02\xa8\xff\x06\x01T\x00\xa6\xff\x92\x019\xfds\x04\x16\xfc\xe2\x01\xe9\xfe\x0e\xff\x7f\x00\xcc\xfd\xbb\xff\xea\xfd\xf2\xfe\xe4\xfd\x90\xfe\x86\xfe\x98\xfb\xf5\xffA\xfc:\xfd=\xff,\xfbr\xffi\xfc\xa2\xfe\xad\xfd\x8e\xfec\xfe@\xfe\xd6\xff\xa5\xfe\xed\xff\xdd\xffN\xff\x98\x010\xffO\x02~\xff\x04\x01\xfe\x01\xd4\xfe\xac\x032\xff\xe8\x02\xdd\xff\xf8\x01\xd6\x00\xa6\x01%\x00[\x02\x12\x015\xff\x06\x04a\xfe\x9d\x01.\x01\xaf\x00\xba\x00T\x00\x8d\x02\x8c\xff\xd5\x01\xed\xfe\xfe\x01\xb8\xff\xc1\xffZ\x03\xba\xfd\xe4\x03\xf2\xfd\xd5\xff\xcc\x02\xec\xfeK\x00\x85\x01\xfe\xfc\x83\x01\xd1\xff\x1d\xfd\x03\x04\xa5\xfb\x19\x01\x1a\x00\xa1\xfb\xe2\x02\xfd\xfcO\xfd\xe6\x02`\xfbs\x00\xc1\x00\x8e\xfb\x15\x01T\x00$\xfc5\x02\xdb\xfc,\xffT\x02\xab\xfb\xf6\x02.\xfe\xb9\xff\x82\xffU\x00\x08\xff\xca\x00\xf5\xff\xd5\xff\x15\xff\xf3\x01\xdf\xfe\x10\x03\xd8\xff\x90\xfd\xc0\x04V\xfd"\x01M\x03\xb1\xfe\xad\x00\xb0\x03b\xfcO\x04\xf8\xffD\x00\x93\x010\x01,\xff\xc9\x00t\x02\xf0\xfc\xb2\x03\x89\xff2\x003\x01\xe8\xff^\xfe\xd8\x03\xe0\xfc\xde\x01,\x01\x9c\xfc\xcb\x01\xfd\x02[\xfa\xc1\x04\xf1\xfe\x8c\xfcW\x068\xfb\xe6\xfd\xbd\x03(\xff\xc6\xfd:\x06n\xf9U\x01\xb3\x00\xf3\xfd\xbf\xffO\x03\x0e\xfd\x0b\x01\xfe\x01#\xf9\xfe\x04Q\xf9\x9a\x06.\xfc\xc2\x01P\xfdr\x02\x16\xfdC\xff\xf9\x01\xe0\xf8\xf8\x07\xe5\xfa\xa1\x01\xb3\xfe\xbe\x03/\xf9\x17\x07\xe8\xf9h\x00\x88\x06\x19\xf4\x8e\t3\xfe\xa6\xfeS\x00\x96\x03\xb4\xf8\x9d\x05\xb4\x00\xa3\xfa\xa0\n\xa7\xf78\x04\xc7\x01|\xfcx\x04\xbf\xfeE\x01 \xfc\x92\x06\x04\xfcF\x04\xc8\x01\xd9\xfbn\x06\xda\xfa^\x02r\xff\x1d\x03x\xfd\x8b\x03t\xfe\xd6\xffO\x02=\xfaI\x04\xa2\x00\x82\xf7\x88\x07\x12\xfb\x18\x03>\xfe\xd6\xfcF\x03\xff\xf8\x7f\x08\xe6\xf6\xa7\x05\x0f\xfbH\x04(\xfb&\xffw\x07)\xf5n\x08\'\xfaX\x04\xc9\xf9t\x03R\x01\xa9\xfc\x9e\x03\xe1\xfd\x07\x02\x18\xfb\x06\x04c\x00\x88\xfdW\xff\xd3\x03v\xfb\xff\x03\xf4\xfd#\xfd5\x04\xb4\xf9\xc2\x03\xd3\xfd?\xff\x9f\x00\xbb\x01\x8b\xfd\x1d\xfd\x8e\x03K\xfb\xe9\x02\xf3\xfeG\x00\xf3\xfc\x91\x06[\xfa\x87\xfe\x7f\x05\xf7\xf7\x91\x04k\xfe\xec\x01\x10\x00\xe4\xfds\x02\xb8\x02\xe1\xf7L\x07\x98\xfd\xc0\x01\x98\x01l\xfdQ\x04A\xfb\xbf\x05\x9a\xfc\xf7\x03\xae\xf9\x91\x07\xa9\xfd\x8b\xffn\x00P\xfeX\x00\x94\xfeH\x05\xd5\xfc\xa9\x00\x19\xfc\x8a\x03\xcb\xfb8\x00E\x03o\xfa@\x03\x9e\xffM\xfdr\x04\xa4\xf7J\x02\x88\x03g\xfa\\\x02\x8c\x00T\xfc-\x01\xab\x01]\xfd\xe2\x00(\xffX\xfcr\x06\x9d\xfc!\xfe\x7f\x08"\xf8\x1d\x05,\x00\xd6\xfe\xcd\xff\xd3\x01\x17\x01\x06\xff\x15\x07\xcf\xf7c\x05j\x02D\xf9b\x06d\xfe\xa6\xfeC\x05\xc6\xf8\xb5\x03\xa1\x04\xac\xf7\xa7\x05\xe9\xfd\xbb\xfd\xa8\x00\xaa\x00\x81\xfed\xff\xb8\x00\x9f\xfc;\x06D\xf8\xb5\x05\xc3\xfe\xa1\xfd!\x03\xc0\xfe\xcc\xff\x00\x00\xe3\x00\xca\xff\xfd\x00\x80\xfee\x02\x15\x00\xd3\xfd\x07\x00\x1c\x05a\xf8\xb4\x04l\xff\xd1\xfaB\x08\x15\xf9U\x02\xa9\x00n\xfc\xf3\x04\xa9\xfa\x0e\x04\xca\xfa|\x03\x84\xfeL\xfdz\x06+\xf8\xe8\t\x0f\xf6\x0b\x01\xaf\x05X\xfa\x87\x03\x08\x03\x18\xf9\xa2\x05\xba\x01\xc3\xf8\xb2\x06\xe2\x00\xf4\xfda\x02\x06\xff\x81\xfd\x1a\x05f\xfdu\x038\xfd"\x01\xc5\xffu\xff\xe0\x01\x89\xfb\x1a\x04B\x00\xe9\xfd\x06\x01t\x05J\xf9\xd6\x00\xb9\x01\xb1\x00z\xfe\x07\x00\x1d\x07\x98\xf5\x1b\t\x85\xf9\xbb\x00\x13\x04\x18\xfb\xb9\xff!\x03l\x01Q\xf8m\nl\xf7\x08\x00r\x03\xcd\xfd\r\xff\x97\x02&\xff\x15\xfd\x13\x03\xec\xfe\x03\xff\xc4\xfev\x030\xfd\xfa\xfa\xa9\x06b\xff=\x01@\xff\x17\xf9\xe4\x07\xcc\xfb\xc5\x00\xc2\x02\xa4\xfdM\xfd\x1c\x04\x03\xfe\x14\x00T\x03\xd6\xf7\x0b\x07\x83\x01K\xf7\xa0\t\xf1\xf9\xbe\x00\xd0\x04+\xfa\x8f\x03=\xfb\xd6\x053\xf9\x06\x07\xd7\xfbr\x00\x01\x042\xf7\n\x07\xb9\xfbf\x01\xa5\x05\xd2\xf8\xcd\x02\x8f\x01\xc8\xf8\xe4\x07\xd7\xfb^\xff\xe4\x03\xa5\xfci\x00\x93\xff\xb9\xff\x01\xffi\x01\xfe\x00\xb3\xfdV\x02F\xfc1\x03\x97\x00\xfc\xf9%\x04\x82\xff\xf6\xfd\x15\x00\x8f\x05\x9f\xffL\xf6\xe1\x080\xfb\xd1\xfd\xb8\x07\xb5\xf8\xec\x06\x9f\xfb\xb9\xfd\xf6\x03Y\x00w\xfc\x95\x03-\xfe\x99\xfb\xcc\n\x9e\xf4\xbd\x04\xb7\x00\x04\xfc\xdd\x07Q\xfa \xfc\xfe\t(\xfa{\xfeX\xff\x87\x06\x8f\xfc\x19\xfc\xb8\x0f\xab\xef!\x06U\xfe\xed\x00Y\x04c\xf9#\x08\x0c\xfd\xc4\xfeR\xfe\xe2\x04\xe5\xf9E\x07u\xfa\x8b\x01\xfd\x025\xf9\xcc\nh\xf1\xdf\n\x17\x00\x02\xf7~\t\'\xf9\x1c\x02\xd9\x02\xa2\xfdW\xfe\xad\x00\xcf\x02a\xfa}\x06c\xf8\xa6\x00\xc9\n\xc9\xf6\x8e\x01\xa4\xff\x97\x00T\xfd(\x00\x17\x04\x19\xfc*\x03\x1e\xfc\xdc\x04\xc3\xfc\x9b\xfb\xf8\x08\xc7\xfeo\xf6\x16\x0b\x01\xf9Q\xfe\xf9\x0b8\xf5\xf2\x03g\x00@\xff,\xfb\x97\x0b\xa5\xf3\xc5\x03\xf8\x05w\xf7t\x056\x00E\x04`\xf4\x98\x06\xda\xfeZ\xfbA\nf\xf8[\x03\xde\xffe\x00 \x03\xf5\xf2\x1a\x0e\x96\xff\xb5\xf5\x8c\n\x00\x02t\xf4\x14\x0b\x9a\xfb!\xfce\x07\xc7\xfcu\xfdZ\x07@\xfc\x02\xfb\x8c\t\r\xf5\x9a\x08\x1e\xfb\x9b\x01\x8a\x06\x16\xf7k\x00p\x08\xde\xf6\x8e\xfe%\x0b7\xf4L\x03\xf8\x06\x9c\xf7\x17\x01\xf2\x02\x82\xfb\xfc\x01G\xff\'\xfe\x97\x02\x1b\xfcB\x07D\xfef\xf7h\x07\xf2\xfd\xe0\xf6/\x05b\x08B\xf6\xb5\x07\x95\xfa4\xff\'\x04f\xfc\xe2\xfc\xee\x06\xdf\x03\x8a\xf4\xcd\x0c\x11\xf6?\t\x14\xfd@\xf6e\x14\x92\xed\x84\x06\xda\x03\xb7\xf8\xbb\x03V\x00&\x02\xf3\xf3\x10\x10\xc6\xf3z\x02V\x08n\xf4m\n\x82\xfb:\xf7\x88\t\xcb\xf9}\x04U\x032\xf4\xc7\x0c\xc1\xf7\xdd\xff4\x03u\xff\x85\xfeo\x00\x86\x00\xc9\x02[\xfc\x9d\x00m\x02.\xff\x89\xfb\xc0\x08\x00\xf8\xe0\x00l\x04\x90\xfa1\x06I\xfc\x81\x06\xf5\xef*\r\xbd\xfc\xca\xfc\xba\x00\x0b\x01\xc7\xff+\x01\xb6\x03+\xf1R\x0f\xb6\xf5\xc6\x00*\n\xb8\xf2\xfb\x0b\xe7\xfb"\xf8`\x0eY\xf1\x9d\x060\xff\xc1\x008\x02?\xf8T\x08\xb9\xf4\xe1\x0f\x0b\xf5l\xfb;\n7\xfa\x94\x012\xfc_\ns\xf4#\xfc\\\x13\xdf\xf2\xe1\x01x\x02o\xfbw\x00\xca\x00^\x01w\x02\xba\x00T\xfa~\x06\x84\xf7\xa2\x02+\n\x07\xf3\x81\xff\xb9\x10\xf1\xee\r\x05\xfe\x07w\xf4\x83\x07\x81\xffa\xfb\x00\x03\xf9\x03\xd8\xf6\xd9\x0c\x1e\xf9\xd5\xfc\xb0\x03\x97\x00\x19\xfd\xa3\xff=\x05\x0e\xfaS\x06\n\xf9@\x03\x8b\xfeu\x01\x80\xfd\xde\xffM\x05C\xf9L\x05\xa6\xfc~\xff\x8d\x01\xc4\xfb\xab\x057\x00a\xf9\xd9\xfeB\x06\xa5\x02\xf7\xfam\xfd\xd1\x01\\\x01\x17\x01\xef\xf96\x06\x17\xfe>\xfb)\x07=\xfdl\x03\x11\xfe \xfb\xec\xfd\xce\x0c\x86\xf9C\xf6\xa7\x0f\x11\xffG\xf1x\r\xc8\xfeT\xf8k\x06\x0b\xfb\xd7\x01T\x05\x8e\xf7\xc0\x06x\x06\xf7\xea\x10\x10Z\x00\xad\xf3,\n[\xfeo\xfeT\x02V\xff\x9c\xf9\x91\x10!\xf5z\xf6\xee\x13\xef\xf5M\xfc\xb4\x06\xf6\x01=\xf7C\x0bO\xf5\xa3\x08\\\xfd|\xfa\xe3\x0bn\xf3t\r\x9d\xf0j\x08L\x02\xd1\xf9\xa0\x022\x02\xd6\xfa6\x05L\xff]\xfam\x06\xc3\xf6\xb0\x08}\xfe\xe3\xfep\xfb{\x11W\xeb\xa7\x00\xed\x0b5\xf7\x16\x08\xb9\xf8y\x08\xfd\xf9\x88\xfe\x1f\x07\xb0\xf9\xbc\x01\xde\x06\x17\xfci\xfa\xeb\xff\xfc\x06\xfa\xfa\x1e\x01\x17\x07\xe6\xf5\xf1\x00\xa5\x04z\xf9\'\xfe\xdf\x01\xb9\x04\x8e\xf8\n\x07>\x06\xe5\xf6\x89\x04\xa9\xf2\xbf\x08\xd5\x08\xff\xef]\x11a\x00\x15\xf6\n\x01\x17\x01\xc0\x04]\xf5\xf8\x05\xf5\x04\xd1\xfa\x9f\x02\x87\xff/\xfee\x01\xc2\xfa)\x03O\x05\xdb\xf6E\x01w\n\xc9\xf4n\x03\x91\n"\xeb7\x07\xa0\x0c\x99\xe9l\x07A\x14#\xeb\xd5\x04\xc2\x05\xa3\xf1\x05\tP\x08\xaa\xf1j\x03|\x06c\xf8F\x00\xf7\tD\xf4\xc2\x01\xe5\x08\x1b\xf5\xb4\x04S\x06\xb3\xf7 \xfe,\x0c8\xf82\xfco\x04\x80\xfcv\x03\xfb\x04v\xf4u\x03\xad\x0e2\xe9\xb5\x02\xbb\x11!\xf2\x90\x05O\xff\x00\xfd|\x00\x02\x06,\xf2\xe0\x0bb\x00|\xf1\xb2\x16\xe2\xed\xf5\x03\x0b\x02\xd0\xfdg\xfb\xfc\x0b\n\xfaK\xf8M\x0f\xe7\xec\x1a\x12\xa5\xf4M\xff\xbb\x02\xe0\xfb\x08\x0b\xae\xf4M\x108\xec\xa4\x08\xde\x00\r\xf3<\x17`\xee\xf0\x03=\x07\xd4\xf87\xfe/\x07`\xfa\xe1\xff\xa7\x06\xc5\xef\xd8\x11\xbe\xfc\x1d\xef\xcf\x16\xe2\xf1\x08\xfdQ\x15\x93\xe5\xa7\r}\xfe\x99\x00r\xf8\x19\x03\xd9\t2\xfbX\xfd\xb5\xfa\x8e\x16\xe7\xde{\x10o\r{\xed\t\x02\xa8\x0c\xdf\xf4\xc6\x02-\x03\x9d\xf31\x15.\xedG\x04\xc3\rv\xeb\x85\n\xac\x03\xee\xf3]\r\x99\xf7\xcb\x01(\xfe\x99\xffH\x03\xcd\x04t\xf2\xed\n4\xfd\x88\xfaw\x0c\xda\xe9U\x159\xf6_\nN\xf4\x9d\x01(\x05\xdd\xf7A\x04\x9a\xfa3\x0eH\xf2q\x00T\x0c\x17\xf6[\xf6P\x15\xbc\xf8\x83\xef=\x16r\xef\x17\x04r\x0b\x02\xf7\xd5\x01{\xf8d\x11n\xee\xc7\x04\x1c\x04\xca\xfb\x80\x01\xa5\xfe\x0b\t\xfe\xf7\xbb\x03\x90\xff"\xf6\x95\x07t\xfd-\x046\x01\x9d\xf9\x85\x0b\r\xf6u\x02\xab\xf9\xf9\x0bP\xfc\x8e\xf6\xe6\x15\xba\xef]\xfd\xe7\x10\x8d\xed\xdd\x01Q\x0b\xf3\xfe\x97\xeeW\x19u\xf2\xf7\xf9\xf9\x11\x06\xe7g\x12\x11\xff1\xf9y\x04\xdb\x06\x12\xf0z\x10Q\xf5\xf8\xfd\xee\n\xee\xf4\xcb\x04\xd5\x05&\xfc\x19\xfbM\x07\xbe\xf60\x0b\xd8\xf3\x1f\x02\xf6\x0b\xaf\xf2t\x08\xcf\x02\xd7\xf6\x16\xfc\xf9\r\x0e\xec\x08\x02\x1c\x19\xdb\xe6C\x0bD\x00\xa9\xf8\x8e\x04f\x03\xc9\xf36\x0b\x17\x04\xa3\xf6w\n\xb8\xf6\x8b\x0b\'\xf3\xda\x02,\x10\x9e\xe8\x80\x0cl\xff\xb1\xf5Z\r\r\xf6.\n\xc0\xf1\x02\x02\x90\x06\x12\xfc\xc0\x02\t\xff\x12\x043\xfav\xf9N\x06\xdd\xff\x9b\xfeY\x06\x1c\xf4\xda\x08:\xfc\xac\x00T\x04\x7f\xf7\xa3\x06_\xffQ\xfc\xe8\x08\xb5\xf7\x1b\x03\x0b\x02{\xfb\xdc\x07p\xff\x83\xf44\t\xfe\xfb4\x01I\x05\xbc\xfc\x9f\x07\xfc\xecS\x0bs\x02\x89\xfa\xcd\xf7_\x12-\xef\xf8\x0c@\x03\xbb\xe7\xf3\x18n\xef\x9f\x017\x03F\n\xf9\xf1\xe7\x05\xf9\x04\xed\xf2F\t\x98\xf8\xde\x04\xa4\x01\x06\xfa\xeb\x02\x80\xff2\x00u\x05I\xf8\x95\xfbU\x07\xd6\xfc\xa9\xf9\xbc\x12\xb2\xf2\xd6\xf5\x0e\x0fT\x02x\xfd\xfd\xf8\x1a\x08\xaa\xf6\x0e\x03\xb9\x01\xfc\xfdG\x0f\xa8\xf1\x0f\x02j\x01\x0f\xfe\xd7\xfd\xb2\x04\x99\x02\x95\xf2\x08\x11G\xfa\xa7\xfdQ\x01\x7f\xfe\xb3\x04*\xf8<\x06\xa8\xfb;\x07d\xf8\x97\x06P\xfd\x84\xfc\x8e\x06I\xf5\xa8\x0c^\xf4\xc1\x07\x07\xfe\xfc\x00\xb5\xfe\x87\xff\xa8\xfe\xed\xfb\xee\x0e;\xf1V\x05J\x00\xb5\xfa\x08\n!\xf7\x1d\xfd\xd1\x0e\xf8\xf5\xb3\xfe\xad\xfc\xe6\x06\xa3\xfe\xa7\xfb1\n9\xf5\\\x03R\x03\x1e\xf9\xa0\x07\xf1\xfeP\xf4\xdc\x07\xf0\x06}\xfc\xf2\xf9\xac\x08v\xf9\xed\xfe\xca\x02|\xfd\x07\t\xe2\xf1\x84\x05\x11\ns\xf3>\t\xda\xf4W\x05\'\x01s\xf7\xe5\x0e\x12\xf9\x10\xfe!\xffn\n\xbb\xf1\x95\x08|\x01Q\xf6Z\t\x03\xf73\n]\x02\x97\xf0\x1d\x0e\xaa\x02,\xf15\x0fT\xf5\x89\x01\xf5\x06\x11\xf77\x03Q\x0bP\xf3\xb0\x02E\x00\xdd\x01;\xfes\xfaY\x06\xcb\x02\x9f\xfaT\xfa\x85\x10\x06\xed\x12\n\xca\xfd\xb0\xfb\xc0\t\xf0\xf5+\x017\x03o\xfa\x80\x07\xff\xf9\r\x02\xa0\t\xdc\xeaD\x11w\xf3/\x07V\x03\x19\x01\x80\xf6\x02\x08\xd2\x06\xb2\xe9\x90\x1b\x92\xf3\xd2\xfb\xcd\x10\xbc\xee\x93\xfe^\x11R\xf4\xa7\xfe\xdd\n\xe6\xf2r\xff.\r\x7f\xee\xe7\x04\x1c\x01d\xf5\xb0\n\x08\x014\xff\xf4\xf7\xb9\r\xbb\xf5P\xfb\xab\r\x81\xfd\xc5\xfc\xcd\x06\xe1\xfc(\xff\xbc\x08A\xef\xaf\tW\x07\x0e\xf3x\x06\x12\x01\x99\xfeN\xfb\r\x04\xab\xfb!\xfd\x81\x0b?\xf8b\xfd%\x08\xff\xfd\xb2\xf5\xb1\t0\xfd\x81\xf0X\x11\xac\xfc\x9a\xf99\x10~\xf2\xe5\xfc\xa1\x07^\xfe,\xfe\xa4\x06\x80\x00\xd1\xf4\x07\rR\xfc\xd1\xfb\\\x04N\xfep\xfe\xce\x04\x1f\xff\x17\xfc\xf0\x020\x03\x0f\xfe3\xfc\x86\x03\x10\xfc\xc9\x03v\xfd\xda\x01\x16\x04e\xf8z\t\x07\xf8\x8c\x00[\x01\xc0\xfa\xd1\x03\x07\x03\xea\x00\xe4\x02\x07\x01\xb7\xf2\xd2\xfe\x81\x08\xa9\xff\xa3\xf8\x05\nz\xfe\xaa\xf6s\x07\xd3\x01\xb6\xf5\xd5\x05\x12\xfd\x9e\xf4\x16\x10\x13\xff}\xfd\xe5\x04%\xf7F\x00\x9a\xff\x02\x00%\x06|\xfaI\x01\x92\x06/\xf9\xdd\x056\xff\xf2\xf3\xc0\n\x9c\xfch\xfe\x11\t\xe8\xfd\xc9\xf8\xa2\x00^\x06\xa0\xf7\x18\x05\xf9\x01R\xf78\x02\x1f\x03\xdd\x01\r\xfb\xc8\x03\x83\xfc\xd2\xfb\x13\t\x08\xf9\x8b\x02T\x00\x80\xfa,\x05\xe0\xff\xd7\x01\xee\xff\xe8\xfe\xf4\xfaY\x00.\x02J\x017\x00\xe4\x00A\x00,\xfc\x12\xfe\x9e\x03o\xfd\xff\xfe\x98\xff\x12\x05\xd2\x04\xc4\xf9\x12\x00w\xfd\xdd\x037\xfc\xc7\xfd\xe3\x04^\x07\x13\xfa\xdd\xfe?\x04\x0c\xfb)\x02\xa1\x00?\xfc\x87\xfc\xd7\x03\xd6\xfd\x1d\x05\xeb\xfbs\xfe\x10\x00\xb2\xf8n\x02n\xfe\xa8\x01\x16\xfe\xf4\x00\x98\x02~\xfe\xe3\xfc4\x03\xb0\x00?\xfe\xb8\x02A\x00T\x00i\x04\xd7\x00\x17\xfd\x0c\x00@\x03\x00\xfd\xb7\x00\xc4\x02#\xfcQ\xfe\x16\xfd)\x01Y\xfeW\x00\x14\xff8\xff\xf7\xfcN\x02\xe2\x02e\xfb2\x04V\xff\xca\x02:\x03\x16\x03\x93\x02l\xfd\xb6\xff\xaf\x01n\x03\xf0\x02\xd1\xffx\xfe\xe7\xfc \x00\x94\x02\xe6\xfa\xfa\xfe\xaf\xfd(\xfd\x9b\xff\x03\xfe\x91\xfd\n\xfd8\x00\xe5\x00b\xff-\x02\x96\xfe\xce\xf9]\x04\xa1\x02\x84\x00\x04\x06`\x02\xf7\xfeF\x01\x85\x00\x06\x00\x10\x03.\x03;\xfe!\x00\x8d\x020\x00\xdf\xfe\xde\xfd\xcb\xff\x08\xff"\xfc\x19\x01J\x01Q\xfd\x11\x00l\xffk\xfd!\xff\xf6\x00\xec\xfe\xb6\xff\xda\x04\xf0\xff|\xfe*\x02\xf2\x00\xcb\xff\x17\x03b\x01\xdc\x01\xb8\x00\xed\xff\xce\x004\x00\xc0\x01\xd1\x005\x01\xab\xfe\xb0\x00\xae\xff\x98\xfe\x88\x00\xa7\xfeJ\xff\xd5\x01\x99\x00\x86\x00\x97\x00\r\xfcF\xfd\xfe\xff\xb4\x00\xff\x01]\x02|\xff\xcd\xfe\x87\x00\xfb\xff\xa3\xfeW\xfe\xcf\xff\xc8\x01\xd3\x01\xbc\x02\xe5\xff:\xfe8\xfdc\xfc.\x00\xfc\xff\x02\x00\x8d\x00\x1c\xff\xd9\xff\x1e\x00r\xfd\x8b\xfc\xf1\xfc\xf5\xfc8\x00\x1c\x02\xfe\x00\xfc\xfd\xf4\xfd^\xfd\xf2\xfb\xc3\xfd\x9c\xfe+\xfe\x00\xffW\xff\xcc\xff\x8b\xfd\x94\xfc\x93\xfb\xc6\xfb\xf1\xfco\xfdM\x01\xef\xff\xfa\xfc\xe3\xfe\xee\xfd$\xfdO\xfe\xf0\xfc\xec\xfc\x13\xfd1\xffU\xff\xf5\xfe\x0c\xfe\x7f\xfd\x14\xfd@\xfc\xf6\xfa\xfe\xfb+\xffB\x02{\x07\x1f\x0b\xe1\nB\x08n\n\xdc\x0c\x03\x11c\x12\x19\x14\xd9\x17\x9d\x18\xef\x19f\x19\x9c\x16\xc7\x11\x9e\r\x11\x0b\x95\x0c\xb3\r\xb7\t\xc4\x049\x01\x99\xfb\xf8\xf6\xb5\xf4X\xf1w\xeeN\xeb.\xed\x86\xed_\xee\x11\xee\x93\xea%\xeb\xe0\xea\xec\xed\x16\xf2Z\xf7\xed\xfaA\xfd*\xff\xc6\x025\x06\xa9\x05\xf3\x07\xe4\x06\xa0\tb\x0c\xa7\x0c\x90\x0c\x16\x07\x07\x05m\x01\xf8\xfe+\xfek\xfax\xf6\x0f\xf4/\xf3\xd6\xf1\x0f\xf1\xbc\xed\xa0\xeaU\xea\x07\xea|\xed\xdd\xefM\xf0\x84\xf0x\xf1;\xf3\x7f\xf5\xde\xf8\xe1\xf7\xa3\xf7\x96\xfbd\xfe\x03\x00\xda\x02q\x01F\xfeK\x00`\x00\x06\x02M\x03\xbd\xff:\xfe\x0f\xff\x05\x00\x11\xff\xbd\xfbS\xf9)\xf8\xcd\xfaG\xfdR\xfc\x9d\xfb\xfd\xf8\r\xf9\xbb\xffc\x04\xe2\x048\x03b\x03\xf1\xff\xa3\x016\nY\x15\xd8#\xda(\xd9*\xc8+\xe2,\xad.I+\xac)Q-24\xa68~3\xd2(\xf7\x1b\x8d\x0f\xb3\x08(\x03(\xfe4\xf8E\xf3\xe9\xf1\xe1\xf03\xec\xf2\xe1J\xd7G\xd4\xab\xd6x\xddb\xe5~\xe9\xc9\xeb\xb9\xed]\xf0t\xf3\xd4\xf7\xbd\xf8\x9d\xfb\x9e\x02\xc1\n\xb3\x12\x11\x15\xf0\x11_\x0cc\x07F\x05\xe1\x04\x9e\x03\xbf\x00\x1d\xfe\xb8\xfa\xc2\xf7\xcc\xf4%\xed\x10\xe6\xef\xdf\xee\xdd\xa2\xe0\xe3\xe3\x18\xe6W\xe7\xa8\xe7"\xe9]\xec\x8b\xf0\r\xf5\xf3\xf7\x16\xfd\x82\x04\x0b\rR\x14\x0e\x17f\x16\xc7\x15\x15\x16\x99\x17\xc9\x19C\x19h\x15\xbc\x12\x91\x0f\xed\x0b0\x07\x18\x01\x19\xfb\xed\xf6\x9d\xf6\xd5\xf5R\xf5R\xf3\x0e\xf1+\xef\x9c\xf0\xf4\xf3\x10\xf5}\xf6\xd9\xf8\xf5\xfa\xb7\xfez\x02\x1c\x01\xe0\x00\x99\x00\xa4\x00\x9f\x04\x96\x05\x18\x03S\x01\xab\xfd\xa7\xfc\x86\xfe\xa9\xfa\xfb\xf6\xb1\xf3\xaa\xf1 \xf2M\xf3\xb1\xf1\xff\xee\x06\xeeN\xeb\xdf\xed;\xf2\xea\xf3~\xf6p\xf4U\xf7\xc1\xfb\xde\xff\x19\x03\xae\xff#\x04+\t\xc3\x16\x15-\xb76\xdd8\x1f1\xa1+\xa71p7\x8c6\xd0344\xd73p/\xdb$\xeb\x16\xc9\x04\x9c\xf4\xd0\xed\n\xef\xcc\xf1\x01\xee\xb2\xe5\x8f\xde\xde\xdb\xeb\xd9\xe6\xd7h\xd7Q\xdaC\xe1#\xed"\xfaS\x02\xcf\x03\xc8\x00Y\x00\x04\x04\x11\x0c*\x14\x8a\x18\\\x1a\x0b\x1b\x94\x18y\x13&\x0c^\x00\xec\xf7\xe5\xf4\xb6\xf3"\xf4\xc9\xf0@\xe9\x7f\xdf\xe7\xd6\xde\xd4.\xd3\x80\xd3\xac\xd6>\xdc\xdd\xe4\x1a\xebb\xef#\xf1m\xf1r\xf5\x07\xfd0\x08\x7f\x12,\x18\x06\x1ai\x1a\xd1\x1af\x1a\x8e\x18@\x14\xf9\x11\x00\x13&\x16:\x14\xae\x0c\xe1\x02q\xfa\xb3\xf6s\xf5\x84\xf65\xf5\xe1\xf4M\xf4\x0f\xf4\x0e\xf8\x1c\xf7\xce\xf5\xd6\xf7\xb8\xf9\xb8\x02\xaa\tl\x0c\r\rU\n\xc5\t\xc9\tP\n\x87\x08\x98\x05\xc6\x03\x0c\x02\x17\x01\xe2\xfdj\xf6\xf5\xee.\xeaD\xe7[\xe7h\xe6\xd1\xe4\xa9\xe4\x15\xe4\xee\xe6\x9f\xe81\xe6Q\xe6\x13\xe7\x7f\xec\xb5\xf5\x8b\xfa;\xfc\xe9\xffA\x03\x9f\x06\x94\x0bE\x08\xc7\x05\x18\n;\x0f\xb7\x16\x1c\x199\x19\xe7\x1e\xdb+\x025\x193r*\xeb"\xd6!^#\xbe\'\xbd,R,\xc9"\xe6\x15c\r\xa8\x07\x9c\xfe\xa3\xf3\xb0\xef\xeb\xf12\xf6\xe4\xf7\xc6\xf6\xff\xf1\xb7\xe9\x19\xe3\xcc\xe4\x97\xee\\\xf8\x1b\xfe)\x03H\x07\xb9\x08\x05\x07\xd4\x03\xdc\x01\xb7\x01\xc8\x04\x07\nw\x0f\x8c\x10\xb5\nS\xffM\xf4\x9b\xef\xe1\xeb~\xea\xcf\xea\x1d\xeb\x06\xed\x87\xea\xb7\xe7\xb3\xe3\xb7\xde\xa2\xddN\xe0\xc5\xe8\xce\xf2\xc6\xfaS\xfe\x0f\x00`\x00&\x00l\x00\x95\x02.\x06\x00\x0b\x00\x11\xcc\x12O\x13j\x0e\x92\x06\xea\x01\xdb\xffK\x01\xbf\x02\xd3\x04\x8f\x04s\x02c\x00\x82\xfc\xf0\xfa\xfc\xf95\xfa\x13\xfd\x86\x00y\x05.\x08{\x07\x14\x06\xcd\x04V\x05\x89\x08f\n\x05\rA\x0e\x83\x0e.\x0e\x1d\x0c(\x08\x8d\x03\x10\xff)\xfcS\xfc\x89\xfb\x7f\xf8Y\xf3~\xee\xeb\xea\x8d\xe8X\xe7\xf1\xe4n\xe4W\xe7\x7f\xe9\x06\xed\x05\xef\xf1\xed~\xefF\xf0\xe2\xf2T\xf8\xa7\xfa\xf2\xfd7\x01|\x03T\x05\xb6\x03\x91\x01\xbf\xfe\xc4\xfe\xd0\xfek\x02/\x04H\x03\x84\x02\x00\xfe\xea\xfaV\xf5f\xf55\xffL\x0b:\x19\x86&\xd30\x003\x86+\xa1\x1e-\x1a\xfd#81\xb6:s:t2\xf6%;\x17\xe2\x08.\xfd\x88\xf3U\xed\xa6\xed\x1c\xf2)\xfa\xc9\xf7\x14\xec\x07\xe0J\xd9\xde\xda\xb6\xe1\x05\xec=\xf7\x11\x01\xf0\x07\x10\n\xb7\tE\x07\x05\x02\xb6\x00\xf6\x04\x87\x0c\xf5\x13q\x15\xeb\x10\xee\x07g\xfc\xd8\xf3\xe1\xedL\xe7M\xe5\xbe\xe6\xa1\xe7m\xea)\xe9\xe7\xe3U\xdd\xed\xd8\xe1\xda\xa5\xe1m\xea\xf7\xf1\x7f\xf9\x18\xff\xd4\x02I\x04=\x04%\x03\x11\x05 \t\xa6\x0e\x00\x14\xc0\x14\x0c\x12\xbf\r\x02\n\x0b\x04\xda\xfe:\xfc\x1e\xfft\x05\xfb\x07i\x06\r\x01\xf5\xfcn\xfb\xe9\xfa\xfb\xfc<\x01*\x05\xf3\x07r\n\xec\t\xf9\x065\x03{\x00\xf5\x02\x8e\x06g\n\xf7\n\xf6\t\xcb\x08L\x05\xde\x02\x0b\xffB\xfc\xbb\xfc\xa4\xfc\x9f\xfd\x1f\xfd\xcf\xf8\xcc\xf2\xf8\xee\x9f\xec{\xeb\x8f\xecp\xec\xbe\xec{\xee\x11\xf0\xee\xf0\x8f\xf0r\xf0\x15\xf1\xd4\xf3\xfd\xf7\x98\xfb\x0b\xfd\xea\xfc\x98\xfc!\xfd\xe6\xfdi\xfd}\xfb\xa0\xf9>\xf8\x9b\xf9-\xf9\xd4\xf6\xfa\xf3\x00\xf4\xb8\xf9\xc3\xfb\x19\xfd\xf3\xfb\xcf\xf9k\xfeu\x03Q\x0c4\x1c\xcb+\x954\x817\xd6638\x179\xc16\xe16\x819\x819\x8c5\xb9.\xe4$\xfb\x17P\x06\xad\xf8\xdf\xf2O\xefZ\xed\x95\xea\n\xe9)\xe7\x96\xe2\x98\xdd\xef\xdcy\xdf\x00\xe4\xd2\xeby\xf5\xbd\xfe\xad\x04a\x05\xfc\x04\xc1\x04\x06\x05=\x07H\n\xef\x0e\xb9\x10\xb3\x0e\xde\x08\x94\x01.\xfbo\xf3\xaa\xedW\xeb\xae\xebR\xeb\x03\xe9@\xe6\xba\xe2\xfd\xde\xf9\xdb\x9f\xdc\x0b\xe1\xe1\xe5\xd5\xeb\xca\xf0~\xf4\\\xf7t\xf8\xbc\xf9\xc3\xfc\xc4\x00f\x06\xaa\x0b#\x0e\xfb\x0f)\r\xd5\tz\tg\t\x97\nw\n\xd2\x07\xfa\x06\xb4\x07\xbb\x07\xf8\x07\x8f\x05i\x02\x02\x03\x0b\x05l\x07s\t\xf4\x07j\x07\x14\x08o\x08\x86\x08\xde\x06T\x05\x81\x05\xad\x05\xec\x06e\t\x93\tC\x06\xff\x03x\x02\x1e\x02\x03\x014\xfe/\xfd\xf2\xfd\xdd\xfc\xb5\xf9\xed\xf5s\xf0\\\xec\xd3\xeaH\xe9M\xe9\xf3\xe9%\xe8\x86\xe9\xd9\xe9\x9f\xe8Y\xe9\xfa\xe7\xb3\xe9\x1e\xef\xa3\xf2\xb9\xf6\xbf\xf9\xad\xf9\x14\xfa\xe8\xf8.\xfa\x87\xfc,\xfe\x0e\x00\xfc\x01R\x01\x08\x01j\x00\x18\xfd:\xfeu\xff\x8b\x01\xfa\x05\xba\x06\x02\x06\xa4\x06\x15\x07]\tS\x0bK\x0c\xe0\x0f+\x14\x80\x17]\x1b\x10!\x8f(\xda0\x8401*o(\xcc*\xf2,I+\xdf\'l&Z"f\x1a\x90\x12w\x0by\x04S\xfb\xea\xf52\xf6\xbc\xf6\xb8\xf3p\xecG\xe82\xe7\xd5\xe5\xba\xe5u\xe7\xe3\xe9\xf2\xec\x9b\xee\xbe\xf1\xca\xf5\xec\xf5t\xf4\x06\xf5\xfe\xf7\xff\xfbI\xfe\x9d\xfe\xd0\xfd\t\xfcI\xfaA\xf7\xaf\xf4\xde\xf3\xf6\xf1f\xef\xfe\xee\xa0\xef.\xeeN\xeb>\xe9\xd2\xe8\xb8\xe8Q\xea\x0b\xed\x15\xf0\xd5\xf2c\xf4\xe6\xf6\x1e\xf9\x1b\xfb|\xfd\xef\xffD\x035\x07\xcb\nD\x0e5\x0f\x90\x0f\xaf\x10@\x10}\x104\x11\xd3\x10\xbf\x10m\x11\xb4\x13\x05\x15\\\x10Q\x08_\x04\xc2\x03}\x04c\x050\x03\xcd\x00(\xffD\xfd\xa3\xfc\x9a\xfb\xb7\xf83\xf8\xd0\xf9[\xfd\x93\x01{\x03\xf4\x00A\xfd\xfc\xfb\xf9\xfb\x9d\xfcv\xfc#\xfcZ\xfc\xcf\xfb\xc7\xfa \xf87\xf4\'\xf0>\xee(\xee\x18\xef\xc7\xf0\x1a\xf1\xa0\xf0-\xf0R\xef\xb5\xefK\xf1\xb5\xf2Z\xf5\x00\xf8\xc2\xfb\xf1\xfeB\xfe\x15\xfd(\xfd\xab\xfd\xa4\xff\xc8\x01Z\x02\x95\x05.\x07\r\x06\x8e\x07\x9c\x06E\x04\xc4\x04T\x05C\to\x0b\x06\n\xc4\n\xe5\t\xfc\x07\xd4\x05\x06\x05\xec\x06*\x08\xab\n\x8b\r\x11\x0e\x0b\r\x91\x0cl\x0f)\x14\x14\x18\xa4\x1ac\x1f\x96%\xf2&\x9f%A$\xcb"\xdc"\x0b!\x91\x1f^\x1f2\x1a\xbc\x13\x87\x0e\xbb\x08a\x02\xdf\xfb\xae\xf5\xe6\xf1\x13\xef\xda\xeb\x1a\xe9\xbe\xe5\xb3\xe1G\xdf\x96\xde\xae\xdf\xb5\xe1\xfb\xe2\x11\xe4\xee\xe5v\xe8\x1f\xeb=\xed\xb0\xefM\xf2\t\xf5O\xf8n\xfba\xfe&\xff\xa7\xfe\xa4\xfe\xa8\xffg\x00p\xff\xc5\xfe\xed\xfe\x1c\xfe\x0f\xfcd\xfa\x10\xf9\'\xf7 \xf5\xcd\xf4\xe1\xf5\x9a\xf6p\xf6\xbc\xf6\xca\xf7\x88\xf8y\xf9j\xfbA\xfe\xb4\x00R\x03@\x060\t#\x0b\x1e\x0cE\re\x0f\xd2\x0f\xec\x0f\x7f\x10\xf7\x103\x11X\x0fQ\x0c!\nQ\x08\xfc\x05\xc0\x031\x01k\xff\xc4\xfd\x16\xfc\xfd\xfa\x88\xf9\xe3\xf6:\xf5\xbc\xf5}\xf6\x11\xf7\x80\xf7\xcf\xf7\\\xf8\xde\xf7\x04\xf8j\xf8\x9c\xf7x\xf7\xd5\xf7\x07\xf9u\xf9\xe2\xf8\xe0\xf7\xe4\xf5\x15\xf5\xbb\xf4-\xf5\xcb\xf4\xb2\xf5\x89\xf5\xd9\xf5u\xf7\xbe\xf7<\xf8\xa6\xf7\xb6\xf8\x05\xfd\x00\x01\x97\x00\x15\x01\xc4\x03\x15\x05\xf2\x04\xb9\x04Q\t\x0c\n\xfd\x08\xce\n\xac\x0b-\n\xc3\t-\n-\x08\xfc\x08\x19\x0b\xbd\n\x89\x08\x18\x08\x0c\x08\xb7\x07f\x07\x8d\x08\xb5\x07\xa1\x04\xef\x05\x91\x08<\x08@\x06.\x07\xf3\x07p\x05I\x04\xc9\x04\xab\x04\xd6\x042\x04\xbf\x02d\x04|\x07?\x08&\x07\xeb\x06i\n%\x0c\xb8\x0b\xed\x0bw\x0c2\x0c\x04\x0c\xa3\x0c\xd9\x0b\xb4\t\x95\x06@\x04\xda\x01\xc7\xff\x8e\xfe\xb6\xfcw\xfa\xab\xf7\x1b\xf6U\xf5\xfa\xf3\xff\xf16\xf0k\xf0\xfa\xf1\xa2\xf1\x80\xf1>\xf2>\xf2\xc1\xf1\xa3\xf21\xf5D\xf6S\xf6\xc1\xf7\x1b\xfa\xba\xfb\xe4\xfc\xf1\xfd\xf3\xfe.\xff2\x00(\x02\xbd\x03\xb9\x038\x03\xaa\x02M\x02\x13\x03\x90\x02@\x01O\x00\x14\x00\x84\xff\x1e\xfe\xc2\xfd\xa7\xfcy\xfa\xba\xf9\x8a\xfa\x1b\xfb\x02\xfa\xcb\xf9\xc1\xfa\x8a\xfbr\xfb\xf7\xfbI\xfd#\xff-\xff\x9d\xff\x1f\x02\x04\x04q\x03\x8f\x03"\x04\xae\x03\xb5\x04_\x03\xcb\x01\x9a\x03;\x03\x92\x01E\xfek\xfev\xfe\x82\xfa\xdf\xfb\x93\xfc\xdb\xf9\xee\xf9\xe6\xfa\xf4\xf8\xaa\xfc\x10\xfdP\xf9h\xfe\xc1\xff\x9f\xfd\xd2\x00\r\x03\xc8\x01\xc5\x02L\x03\x18\x07\x8e\x07=\x03\x82\xfeb\x05_\x05\xe4\xff(\x08\xf6\x01\x17\xfe\x8d\x02\xe6\x02\xc1\x03\xd5\x00D\xfc\x83\x00\xca\x03\x84\x01\x9e\x05\x97\x02r\xfd\x89\x02L\x04a\x02y\x03\xc3\x049\x001\x01\xd4\x04\\\x04\xda\x04\x1c\x01\x1d\x00\t\x00\xf4\x00\xa6\x02\xac\x02\xd0\xfeY\xfa\x95\x00\x9a\xff\xfa\xfa\xd8\x01\x05\x00#\xf8\x9b\xf8>\xff\xb2\x01\xd5\xfd\xca\xfc\xde\xfd\x80\xfe6\x00)\x01\xb5\x01\xf9\x00:\xfe"\x02\xc6\x05\xce\x01\xc6\x00\xbb\x01\x89\x02\x04\x01\x81\x03\xdd\x04G\x01\x9e\x00\xb0\x00\xff\x00\xa7\x03\xba\x040\xfeF\xff\xa9\x03\xe5\x02+\x00T\x00\xc7\x02j\xfd]\x01\xfe\x05_\x01f\xffH\x02\xec\x034\x00\xa4\x01\x10\x03\xac\x003\xff\x0e\x02<\x01\xcc\xfd[\x00\x12\xff\x1e\xfd\x87\xfb\xe4\xfbv\xff0\xfe\x86\xfa\x8e\xfb\x8e\xfb\x16\xf9\xa4\xfc$\xfdU\xfc}\xfc\xdb\xfb\xc1\xfc\xb0\xfe^\x00\xe7\xfcE\xfe\xf6\xfeH\xffo\x01Q\x01\xcc\xffW\xff\xfe\xfeN\x00\x8c\xffi\x01\x0e\x01\x7f\xfb>\xfd\xe9\x01\x1e\x01\xf9\xfcz\xfe\x16\xff\x8f\xfe\x1e\xff\xb8\xffP\x00\x93\xfe\xd7\xfc%\xff\x16\x01\xc2\xfe\x8a\xff0\xfe\x89\xff\xe6\xfd\xe1\xff?\x01W\x00Q\x02\xca\xfe\xfd\xfew\x02]\x04f\xff\xa4\xfd\x0b\x00\x9c\x01\xa2\x03%\x00\x9d\xfe\xbf\x00\xba\x03\xee\xfc\xea\xfd\xfa\x01\x89\xfdL\x01\x0c\xfe\x10\xff\x07\x01\xf8\xfd\x07\xfd\x89\xfcO\xff\xef\xff\x01\x008\xfat\xfe\x0e\x01\x9f\xfc\xf6\xfba\xfe\xf3\xff\x98\xf8T\xffR\x02i\xfe\xac\xfc8\xf8\xe0\x03\xad\x01\xf0\xfa>\x00\xc0\x032\xfe\x9a\xff\x87\x04\xc7\x02\xfd\xfc\x1c\x00\x1b\x08\xb5\x01\x94\x00\xce\x07\x0e\x04\xbb\xfe\xb2\x03\xae\x08)\x03D\xfc\xee\x06\xdf\t\xe4\xfb\xef\x03\x98\x08\xb0\xfd\xdf\xff\xc0\x07\x11\x03\xd7\xfd\xd9\x01\xaa\x05\xe5\x03\xfc\xfe\x8b\x05J\x02\xd1\xfd\xbb\x01\xa6\x01\xc2\x05\x97\xfe\xd2\xfd\x9a\x01 \x03\xbc\xffD\xf8|\xfeV\x03\xce\xfc\x83\xfa\x1f\xfe\xde\xfd\xd1\xfcJ\xfa3\xfe\xcf\xfcL\xf9\x0f\xfe4\xfc\xaa\xfc\x08\xfc\x90\xff\x90\xfc\xab\xfd\xe4\xfd\xfb\xfb\xdf\xfd\xa8\xfc\xc8\x02\x10\xfe\xab\xffg\x01\x02\xfe\x00\xfbY\x02\xfd\x02\x12\xff\xd7\xff\xc9\x03\x83\xff\x8b\xfc\x19\x05\x8e\x05v\xff\xbf\xfec\x071\x00\xa9\xff\x1e\x039\x06S\xff\xf7\xfd@\x036\x067\x05~\xfe\xe8\x02}\x01b\x02\xe6\x02\x9e\t\xd4\x03\x89\xff\xb1\x06\xf5\x05\x11\x00&\x03\x84\x06\x00\x00\xe1\xfe\x92\x01\x9e\x05f\xffV\xfb\x0c\xfb\x96\x00\x05\xfd\x1b\xfbn\x02\x82\xfe|\xf3\xf1\xff\x86\x02\xef\xf62\x00?\xfb\x12\xf7W\x00\xaa\xfb\xd1\x00\x89\xfd\xc6\xf6\xb6\xfe\xcd\xff;\xfb!\xfc\x81\x03\x14\xfa\x94\xfb\x08\xff\xe6\xfcQ\x01\xc0\x019\xfbH\xfb(\x03 \xfd\x9e\xffl\xfe\xf8\x01\xf4\xfe\xd4\xfdZ\xfe\xb2\x00\xfb\x03\xc7\xfb\x1c\x01\x18\x03-\xfe\x92\xff\x83\x03\xbb\x03\x00\x01\x04\xfe\x02\x01\xc2\x08!\x04\x86\xfd\x10\x01\x8b\x08\xe7\x02H\xff\xd5\x00d\tU\x04\xfe\xfb[\x05f\x02\x8f\x00\x16\x03\xbd\x00\x06\x02\xa8\x01\xbd\xf8\x86\x05\xd2\x03\xd2\xf8g\xfe\x1f\x06\x9f\xf9\xeb\xf9:\x07O\x00\xed\xf1\x9b\xfb`\ny\xf9\xd3\xfan\x00q\xff\x1e\xfb7\xfcG\xff\xcc\x00W\xf38\xfe\x1f\x08\x03\xfaD\xfb\xdb\xfd\x0b\x00\xaf\xfc\xa0\xfc\x83\x00\x16\xfd\xa1\xfa\x04\x05(\xffD\xfc\xd1\xff\x9d\x00\xe9\xfc\x93\xf8\xf9\x05\xf0\x03\x06\xf9\xc4\x00Q\x00\xea\x04\xf4\xfft\xfcb\x03\n\x00\xc7\xffX\x04\x9b\n\xc3\x01v\xfe\xfe\x01Y\x06G\x01 \x02\xa5\td\x05\xbd\xfe6\xff\'\x0c\x87\x00\xa3\xfaZ\x04Y\x03\xdf\xff\r\x02\xd8\x03\xa7\x00W\x01\xdb\xff\xdd\xfc\x86\xfe\xb8\x05I\xff\xaf\xfd\x03\xfd[\x03\xb2\x02\xb9\xfd\xa7\xfa\xf0\xf9\x05\x06\xcc\x02\xec\xf8\xb1\xfb=\x08\xc2\xfb\xf0\xf3\x0e\x03\x95\x05f\xf6Y\xf7N\x04\xbe\xfe\xe6\xfc\xd0\xfc\xb1\xff\x93\xfe\xb9\xf7\x10\xfd\x90\x01\x88\xff\x9b\xff\x03\xff\xd9\xfc\x08\xfc\xcf\x01:\x03T\xfeX\xfc}\xfdz\x06;\x03O\xfb\xf8\x01\xdb\x04T\xff\x8d\xff\x83\x06\r\x02\xa5\xff\xdc\xfeJ\x08\x1e\x03\xbe\xfc\x14\x055\x00\xbc\x00\xab\x02K\x05\xea\xfd\x82\x00G\x04%\x00\x11\x01\xeb\xff\xd8\x03\xee\xfdD\xff\\\x05\x8c\x00\xcb\xfd\xcc\x02\\\x00\x19\xfa\x98\x07\x86\x006\xff\x0c\xfe\xb4\xf7]\x0bS\x03\r\xf4\x9c\x00\x99\r]\xf6)\xf3\x18\x08J\x08U\xf6\x96\xf7\xf5\x01!\x00\xf8\x01\xb2\xf8\xe9\xfb\xf8\xfax\xfa\x86\x04\xb4\xff\x14\xf9\xd4\xfc \x02\xc9\xfb0\xfe\xae\xfe\x7f\x01L\xfe\x85\xff\xb6\x01\x91\x00\x12\xfde\xfc\x0e\x07g\xfe2\xf8\xb5\x03\xa2\t\xfc\xfb\xe2\xf7\xfc\x04_\x02K\xf7\xad\x04\xe2\n}\xfc\xd7\xf8W\x07\x06\x05\xab\xfb\x9a\xfe\xc6\x07&\x05\xb8\xfd:\x05\xf2\x03\xd1\xff\x07\x00k\x05Q\xfe\xe5\x04\xfd\x061\xf9\x07\xf9\xe7\x07Y\x0c_\xf3\x82\xfa\xef\x08\xb6\xfa\xba\xf8\xa8\x03\xf9\x07\x1a\xf8\xc8\xf5\xa5\x06d\x03v\xf8\x95\xfdC\x02\xcd\xfc\x97\x02\xdf\xfd\xce\xfc\x06\x05\x94\xfc\xf4\xfd\x1c\xfe\xdd\xff\n\x01\xde\x00\xa4\xfb(\xfe\xe3\xfc\xea\x00R\x04}\xf4\x03\xf9{\x01?\t\x95\xf62\xf5.\n\x1d\x03{\xf2\xac\xfc\xff\r\xa6\x02\x05\xf5k\x01\x9f\x0e\xfe\xfa{\xf9x\x08\xf5\x08N\xfd\xb6\xfc*\x08e\x07G\xfd+\x03\x19\xff\xe0\xfeG\x05\xf0\x02\x86\xfei\x04\xac\x03\xbc\xf6l\x01\x81\x07`\xfe\xa9\xfa_\x05\x8c\x00\xfa\xf6~\xfe\xe3\x07\x9b\xfdl\xfbl\x03\xa8\xff\xdb\xf9Y\x02\xff\x01\x80\x00\x06\x00\xeb\xfc\x19\x08\xae\xff)\xfb"\x01\x8f\x04m\xfc\x99\x02\x01\xfd\x8c\xfc\xb9\x03\xb6\xf9\x0c\xfc\xc6\x029\xfd@\xf6\xda\xfe\xa4\xfc\xbb\xf9C\x00\xaa\x00o\xfa\x1a\xfcJ\xff\xd4\xfch\xfb@\x08\xb8\xff\x8e\xf3\'\x03\x18\x074\x00z\xf9\xfc\xff]\xff\xb1\x02A\x03\x9d\x01\xcc\x034\xfd\xc7\xff\xa5\x04\x84\t\xef\xfe3\xfb\xe6\x06)\x07\x83\x05c\xfa\xc4\x05\xc7\x05|\xf8\x98\x04o\t\x1a\xff\x9d\xff\xd6\x01(\x02\xda\xfe[\xfd\x8a\x05>\xfe\xf9\xf9\x8f\x03"\x05\xe7\xf7T\xfc\xd6\xfe\xa0\xfb\xb0\xfc\r\x08\xe9\x00\xbf\xf4\xb9\xfe\xd2\xfeY\x01\xcf\xfd\xc8\x03\n\x02-\xf3F\x02\xc4\x0e\xa5\xfb\xc2\xf53\x02G\x07\xbb\xfa\x90\xfe\x9f\x0e\x05\x04\x91\xf2\xe2\xf7\x8b\x04P\x11`\x01V\xf3\x14\xf8\x86\x06:\x07=\xf7\xb5\x03;\x00\xb1\xf7\\\xf6\x95\xfd<\r\xde\x06\xc6\xf2\xb2\xf5\x83\x03\x91\x04\xa1\xfa\xe4\x01\xe1\x02U\xf8(\x03\x9b\x08\x16\x02\x0e\xfd\xf3\xff\xa6\xfaK\x04\xdf\x07X\xfe\xf0\xfeG\x05\xde\x00\x85\xfd\x03\x06^\xfd\x0f\xfd\x87\x03\x12\x02\xc7\x03\xb8\x02\xa0\xfcn\xfa\x80\x04k\x03\xc0\xfbQ\xfdP\x08\xd7\x00l\xf6\xc7\x00M\x01r\xfc|\x01]\xfd-\xfdV\x03\xd7\xfbL\xfcl\xf9\x0b\x02\x81\x0cm\xf3B\xf2 \x0b\x93\x07\xac\xfdj\xf2\xe6\x00\xa6\n\x1a\xfa\xf8\xf9d\x05(\x0b\xf8\xfc\xb3\xf0p\x00\x15\x0eo\xfd\xc2\xf2S\x02\xc1\x0e\x0c\xfb\x84\xf4D\x06Y\x08L\xfe\x01\xf5\xa7\xffh\x0fn\x080\xfa\xda\xf6u\x00n\x0c@\x03h\xfa\xf5\x01|\x06\x01\x00\xdf\xfb\xec\x03L\x05\xcb\xfd\x1b\xf7\x94\xfd\xbc\x0eA\x04\xf9\xf4\x8d\xfb/\xfdY\xff-\x03}\xfd}\x00\x03\xfe\x0f\xfc\xbf\x02\x8e\x02\x9b\xf9[\xfc+\x02g\x01\xd0\x073\x00\xc9\xfd\xa2\xf7\xc0\xf9+\tg\x04\x16\x02\xbc\xffw\xf8P\xfea\x07\xdc\xfe\x8e\xfcz\xfc"\x00\xbc\x02\xb1\xffI\x05U\x00\x8b\xf5m\xf9-\x07\n\x05\xe0\x01\xf9\xfa\xdc\xfa\x81\xfb6\xff\xd5\x05\x1e\x028\x02\xdd\xf8\x90\xfbU\x02[\x03\xbd\x00W\x01`\x00\x1e\x00%\x02d\x04a\x04m\xfcC\xfc*\xfe\xad\x00+\x08\x8f\x04\xc5\xfb\xb7\xfc\xd3\xfd\xb9\x00@\x00d\xf94\xfe5\x05\x9a\xff]\xff\x1e\x04\xca\x02\xed\xf8\xff\xf4\x11\x03`\r\x0b\x04\xeb\xfc\xbb\xfb\x15\xfd\xac\x00\xf2\xfe:\x04\xb2\x00\xa1\xf9\xb1\xfe~\x011\x04[\x04\x96\xfc%\xf3<\xfb\x8a\x0b\x10\x07\xd6\xffV\xfd\xa9\xf9\x96\xfby\x00\x16\x06s\x04\x86\xfd\x87\xfb\xa1\xfe\xae\x01\xa2\x04D\x01o\xfb\xed\xfd\x9c\x03\xa8\x02"\x05\xdd\x01\xdc\xf9\x82\xfcD\x01\xed\x04\x15\x01?\x00\xfc\xffl\x00\xa9\x01O\xff\x07\x00\xf2\xff\xa8\xfe\xdf\x00\x93\x00\xd3\x01\x87\x03k\xfc\xb8\xfa)\x00\xc3\x01\x18\x02_\x01\xd4\xfd\xd7\xfc.\xfd<\xff\xb8\x00\xff\x05 \xfd\xe5\xf7\xb2\xff\xf3\x04)\x03\xa7\xf9\x1d\xfdL\x00\xe7\x01\xb9\x01?\x03\xb7\xfdD\xfc\x94\xffj\x00:\x05:\x04\xd4\x00\xc2\xf85\xfd;\x031\x03\xbf\x03[\x00B\xfd|\xfd\x8c\x00\xd1\x01\xcf\x00\xe2\xfd\x8b\xfdV\xffB\x03*\x06{\xfeg\xfa)\x00\x9f\x01\xd3\xfee\x01\x92\x03\xb6\x02\xf8\x00\xcb\xfdp\xfe7\x00\xd4\xff2\xff"\x04\xdf\x025\xfd\xe6\xfd\xe9\x00J\x01\x8f\xfb{\xfd!\x02$\x03\xb5\x02\xc1\xfd_\xfb\n\xfe\xfb\xfe6\x00g\x02\x81\x02\x8f\xfe\x94\xfcx\xfe\n\x00K\xfe<\xfd\x19\x00+\x03w\x02\xd7\xfe\xeb\xfb\x8d\xfc\x83\xfe*\xff\xd8\x00V\x02\xdc\x02}\xfdO\xfa\xba\xfe\xbf\x01(\x00\x9b\x00\xbb\x01\xb0\x00\x03\x00\x91\xfe4\xfd\xd8\xff\xf7\x01R\x00 \x02\xd2\x02v\x00(\xfd\xa5\xfcf\x00\x19\x02\xad\x01\x94\x00\x00\x01\xa1\x01r\xfe\x17\xfe\xfe\xfe\xb2\xff\xb3\x01\x12\x01h\x02<\x03\x19\x00P\xfc\xc6\xfb\xad\x01\x8a\x05q\x03\xb2\xff\x86\x00\xb8\xff\xe1\xfd5\xff\xe2\x01\x16\x02g\xff\xef\xfe]\x016\x02\xdf\xfdZ\xfcE\xfe\x19\x00\x95\x01\xd7\x00e\xff\xc0\xfd\xc2\xfb\xbf\xfe\xed\x00&\x00\x03\x01\x8d\xfe/\xfd\xbd\xfd\x9f\x00\xf3\xff\xb7\xff\x03\x02/\xffq\xfc\xa8\xfd\x97\xfe\xbf\xfd\xe3\xfew\xff\xf2\xffM\xfc\x98\xfb\x85\xfd\x02\xfd3\xffr\xff\xcc\x01\xd1\x02T\x03\xd5\x01L\x02\xf5\x04\x85\x07{\n^\n\x0c\x0c\xc0\n3\n,\x0b%\x0b\xd8\x0bI\x0b.\n\x02\x0b\xfe\x07}\x05\xeb\x03\x9e\x00\\\x00\x9d\xff\x0c\xfe@\xfb\xe5\xf9\x83\xf6d\xf4T\xf4\xa7\xf4Z\xf4\xa0\xf3G\xf4\xba\xf2\xee\xf2w\xf4\xa9\xf6\xdf\xf8\x85\xf9<\xfb.\xfc-\xfe\xb8\xff!\x00S\x03\xe4\x04\x0c\x05\xc1\x051\x07\xf4\x06U\x05\xf9\x04p\x04"\x050\x04X\x02\x12\x01\xc8\xfd5\xfcw\xfb\t\xfa\xf5\xf8\xcf\xf7\xf3\xf6\xc0\xf4\x0f\xf5V\xf4\xbe\xf4C\xf4\n\xf5\x95\xf6\x89\xf64\xf8r\xf7\x1f\xf9\xf9\xfae\xfcs\xfe\xef\xfe\x96\x00\xb2\x00\x9b\x00B\x01\xdd\x02\xff\x04\xe6\x03a\x03\xc4\x02\xbe\x02\x1e\x03\xbd\x02\x1e\x03\xfe\x01z\x01\xa3\x01=\x00\xca\xff\xaa\xff\x11\x01\xac\x01\xd3\xff\x1a\xff\xef\xfd\xfa\xfeg\xfd\xfc\xff\xd4\xff\xbd\xfb\x9c\xfd\x9d\xfdh\x00\x11\x01\xc0\xff\x84\xffk\xfcO\xfd\xd6\x08B\x14m\x16~\x0f\t\t\xd1\x0e|\x182\x1f\x0f \xaf\x1f\x98!\x8a\x1f\xf3\x1e}\x1e\x92\x1a\x8c\x16\xf6\x11v\x16\xb1\x18\x1b\x11>\x06\xd1\xfb\x05\xfa\xba\xf9\x9d\xf8\xa9\xf7\xe5\xf1\x9b\xea\xdc\xe4\xbb\xe4\xaa\xe5\xd2\xe5B\xe4\x83\xe5\x90\xe7\xd6\xe8\x97\xea\xa8\xe9+\xeb\x80\xeeL\xf4<\xfaO\xfd\x84\xfe\xc5\xfb\x9e\xfb\xd6\xffw\x04\xa0\x07\x85\x07t\x06\xba\x04<\x03\x12\x03A\x02X\x01\xff\xff\x0f\x004\xff\x97\xfdE\xfa\xba\xf6\xe3\xf4|\xf5\xa4\xf7\xef\xf8\xe0\xf7>\xf52\xf3{\xf3{\xf6\xbc\xf8\xdf\xfa\t\xfcD\xfc\x96\xfd\xae\xfe\xb4\xff\x00\x01\xc3\x02\x98\x05k\x07\xa2\x08i\x08\x86\x07!\x07\xc0\x07}\t\xb1\n\xbf\nl\t\x18\x07\xa3\x05\x90\x05\xbf\x05[\x05\r\x04\x84\x04\xf1\x03\xa2\x01T\x00\t\xff\xa7\xfe\x0c\x01\xa0\x02\x92\x02A\x005\xfd+\xfe\xef\xfe\xac\x01g\x03\xea\x03\xf2\x00\x11\xffK\x02n\x01[\x02\xf7\x00\xa3\x01P\x03\xf0\x00\xda\x04\x90\x03~\xfe\xf5\xfb^\xfc\xf9\x02\xa9\x01\t\xff\x19\xfd\xea\xf9\xdd\xf8\xdb\xf7\x14\xfb\x13\xfc!\xf8\x08\xf5#\xf5\x14\xf70\xf6&\xf5\xbc\xf58\xf6\xef\xf5\x91\xf67\xf8\x99\xf8\x11\xf7D\xf7W\xfa\xd3\xfc]\xfc6\xfb\xdd\xfb\x1d\xfc\x9a\xfc\xb3\xfe\x9f\x00X\x00\xbc\xfe\xae\xfd*\xff@\xff\xd4\xfe\x9f\xfe\xca\xfd\xcd\xfd\xdf\xfd\xf5\xfc\x1a\xfc~\xfb\xf3\xfbh\xfc0\xfb \xfc\x95\xfb\x9d\xfey\x08\x19\x10s\r.\x04\xd9\x03"\x13\x8c\x1f\x96%M&\xe2!*\x1d\xf0\x19\xfe#\x99.\x04/\x07&\xa1\x1d\'\x1b\x07\x18\x8c\x15\xc5\x12\xb8\x0e]\x08\xf6\x02\xf5\xfe.\xf8\xfc\xef\xa6\xean\xe9\x85\xebo\xeb\x9d\xe8\xdf\xe1\x1d\xdd6\xdeL\xe4\xe7\xea\xb1\xee\x89\xef\x11\xedZ\xec\x81\xf0\xcf\xf8\xd0\xff\x1f\x02\xb3\x02\xee\x02\xb9\x04\xd2\x04J\x06\xf0\x08\xa0\t4\tD\x07*\x06r\x03\xcc\xfd\x91\xfb3\xfc\xdc\xfc\xdf\xf9^\xf5\xc5\xf1o\xee\x16\xec\xac\xec\x9a\xef*\xf0\x94\xedq\xeb\xd4\xec\xd8\xee\xd6\xf0 \xf4\x88\xf7\x17\xfa\x87\xfa\xd2\xfc\x00\x007\x02(\x04>\x07E\x0b\x9a\x0cF\x0c\xee\x0b\xc2\x0c\xae\r\x18\x0ed\x0f\xb0\x0f5\x0eO\x0bh\t%\t\x97\x08*\x08\xa2\x06\x07\x06\xf0\x03U\x01\x94\xff<\xff\xa3\xffW\xff\xff\xfd\xc1\xfd\xf2\xfc\xa8\xfb\x84\xfb\x93\xfbU\xfd\x93\xfd\x9c\xfd\x12\xfd(\xfd\xc8\xfc\xce\xfdw\xff\xd9\xff\xd4\x00\xa6\xff\x83\x00\xfd\x00\x91\x01\x06\x02|\x03\x16\x04;\x03\xcc\x02\xc4\x02\xee\x03\xae\x03B\x04\x01\x04\xba\x02\x9c\x01O\x00\x05\x01\x02\x01[\x00b\x00\x08\xfe\xee\xfc\xf5\xfa\xf9\xfcm\xfe\xf9\xfe\x17\xffo\xfcM\xfc\xb9\xfa\xe2\xfd\xab\xff\xc2\x00\xde\x00\x84\xfeF\xfe\xd4\xfc>\xfd\t\xfeR\xffq\xff\xc1\xfd\xd0\xfa!\xf9\'\xf9\xca\xf9{\xfaa\xf9\x01\xf9\x03\xf8\\\xf6C\xf6y\xf6\xeb\xf7\xf4\xf7\x07\xf9`\xfa\xd3\xf9\xcf\xf8\xbc\xf8Q\xfd\x0c\xffP\x01\x08\x03\xd4\x02\x80\x02\x03\x01\x01\x07\xe2\nN\x0b\x99\x0c\xf0\r\x06\x0e\xc9\x07\x8b\x06E\x0c\x91\x12\x86\x14\xfe\x10\x92\x0c\xa9\x04\xc6\x01V\x08V\x11\xbe\x12u\t\xee\x00\xe7\xfej\x01R\x06i\tC\x08\xee\x03\xdd\xfe\xa0\xffY\x02\xc2\x03)\x04\x03\x03:\x05\xf1\x05\xa5\x05V\x04\xd4\x01Y\x02\xf7\x04]\x07T\tt\x05\x15\x00\xd9\xfb\xbc\xfa\xf0\xffX\x00d\xfd\xf6\xf6\xbc\xf1s\xf1\xf1\xef\x9a\xf1d\xf2\xad\xef\x1e\xec\x96\xe9\xf9\xeb\n\xefc\xee\x06\xef\xae\xf1\t\xf3\xab\xf2\xb0\xf3\x86\xf7\x03\xfbT\xfb\xe4\xfby\xff\x87\x00\t\x00\x1e\x00\xb0\x02\xc4\x04\x08\x03\x84\x02n\x03\x8c\x03\x82\x01\xc4\x00\xd2\x01B\x02j\x00\x95\xfe\xc1\x00\xf8\xfeG\xfd\xac\xfe,\x02-\x03\xee\xfe\xa1\xfdS\x020\x05\x13\x05\x0b\x05\xf2\x05\x9f\x06\x1d\x04c\x06\r\t]\x08\xa2\x05\'\x03\xe3\x04\xcc\x04\xbb\x02\xe0\xff<\xff\xbc\xfeI\xfd`\xfc\t\xfd\xe9\xfb\xb7\xf9J\xf9\xe0\xfb\x85\xfce\xfb!\xfe\x1e\x01\xfc\x01\xb0\xfe\xe5\xff\xe1\x05O\x08\x9b\x07\xbf\x06\xef\x07\xef\x06\xed\x05)\tZ\x0b\xda\x06\xad\xff\xc3\xfe_\x03\x1f\x04y\x00_\xfb\xd1\xf7\xb3\xf5\xc0\xf5\x03\xf9E\xf9!\xf5\xd6\xf0_\xf2\xdc\xf5U\xf6+\xf6\x03\xf7\x19\xf9\xbd\xfac\xfe\xde\x01\xae\x00l\xff\x18\x02\xff\t/\x0ey\x0e\xb2\x0e\xf5\x0b9\t\xdc\t\xba\x10R\x16\x14\x12\x98\x0b\xf1\x06\x8d\x04\xd0\x04-\x06O\t\xc8\x05\xfb\xfc\xe4\xf6\x01\xf8\xcc\xfc\xe6\xfd\x14\xfc\xe9\xf9K\xf8\x1f\xf7\x10\xfa?\xff\x0b\x02(\xff7\xfd\x94\xff\xc4\x02W\x04\xfd\x04\xd6\x05u\x03\xdc\x00d\x02\x04\x06&\x06\xce\x02\xdd\x00E\x00\xd9\xff\x84\x00\x1b\x01,\x00T\xfd\x87\xfc\x93\xfe^\xfee\xfd\xba\xfcS\xfc.\xfc\x9c\xfbH\xfe$\xff\x98\xfcL\xfa\xfc\xfaR\xfd\x87\xfd\xa2\xfd\xe8\xfd8\xfdV\xfa`\xfa`\xfdK\xfe\xd5\xfc\x9c\xfa\x11\xfb&\xfb\xce\xfa\x01\xfb\xdd\xfb=\xfc\xb5\xfa\xf2\xfaH\xfc\x1a\xfd\xad\xfc\xe8\xfc\xd8\xfe\x9a\xff\x7f\x00\x8d\x00\xbc\x01\x9d\x02\x84\x02\xcd\x03:\x05\x13\x06\xbf\x05\x14\x05v\x05\xba\x05\xc4\x05\r\x06\xee\x05H\x04\x7f\x02o\x012\x02%\x026\x00f\xfe\xeb\xfc\x06\xfd~\xfcU\xfc\x93\xfc\xc4\xfbI\xfb\x87\xfb\x10\xfee\xfeJ\xfe\xcd\xff\x9a\x02\x99\x03\xb5\x02\x7f\x03)\x06\xfb\x07\xe4\x07\xd2\x08\x92\x08\x91\x07\xc2\x05F\x06\x94\x07h\x06\x85\x03P\x01\xdb\xffK\xfe\x8b\xfd\x0c\xfcC\xfbI\xf9;\xf7\x9d\xf6K\xf6f\xf6\x83\xf5\x99\xf5\xf6\xf5\xf4\xf57\xf6\x8e\xf6\xb2\xf7{\xf8\x10\xf9\x01\xfa\xe0\xfa\xc1\xfb0\xfcx\xfdy\xfe\x81\xff \x00\xa8\x00O\x01N\x01\x03\x02\xda\x02E\x035\x03\xe5\x02\xd5\x02\xd3\x02+\x03\xf4\x02\xe4\x02\x84\x02R\x01\x82\x01\x98\x01\x13\x020\x01e\x00\xf6\xff\x08\x00\x00\x00\x04\x01\x14\x01"\x00\xcd\xff`\xffK\x01\x8b\x01\x9f\x02\x12\x02\x83\x01\x02\x01\x00\x014\x02\xa4\x02\x9e\x02y\x01\x85\x00\xa5\xff\xb1\xff\xf6\xfd\x94\xfd\x05\xfd\xe7\xfc\x92\xfc,\xfc\xcb\xfd\x03\xfdo\xfd]\xff\xf8\x02\x17\x05\x88\x06N\tx\x0c\x14\r2\x0e\xc3\x12&\x17\xdc\x17\xa2\x15\xdf\x15w\x15\x08\x148\x13\x7f\x14e\x13F\x0c\xee\x05\x87\x02_\x01i\xfeD\xfc\x04\xfa\xbc\xf4L\xed\xf6\xe9\xd4\xeb\x89\xed\xfa\xecf\xeb\xaf\xea_\xe9V\xe9\x02\xedd\xf2\xdf\xf5:\xf6\xd6\xf6m\xf89\xfb\x82\xfe[\x02\xdf\x04\xca\x04\xd0\x03\xfe\x03\x7f\x052\x06R\x06\xa1\x05\x9c\x03\x90\x01\x08\x00\xea\xff&\xff`\xfd\x97\xfb?\xfa\x1c\xf9\x88\xf8\xd2\xf8\xd3\xf8\xe5\xf7\xe9\xf6s\xf7\x03\xf9\'\xfa\x1d\xfb\xee\xfb%\xfc\x8b\xfc\xda\xfd\x1a\x00\xef\x01\xa3\x02\xc3\x02\xe3\x02\xff\x02\xea\x03o\x05/\x06\r\x06.\x05\x80\x04/\x04O\x04\xc2\x04\xba\x04\xc8\x03\xc0\x02%\x02\xba\x01\x85\x01\x96\x01\x8d\x01(\x01\xaa\x00\xa0\x00\xfb\x00\xb6\x00\xd1\x007\x01\xd8\x01*\x02"\x02\x1a\x02\x04\x02\xf6\x01#\x02b\x02\x84\x02\xe7\x01\x0e\x01+\x00\xe6\xff\xb0\xff1\xff\xfe\xfe=\xfe\x81\xfdI\xfdQ\xfd4\xfd\xc2\xfds\xfe\xab\xfe\x18\xff\xc2\xff3\x01\xd0\x01e\x02\x86\x03H\x04\x04\x05\x00\x05<\x05\xae\x05;\x05\xc8\x04]\x04h\x03K\x025\x01\x9e\x00\xc6\xffn\xfe\r\xfd\xc3\xfb\r\xfb\x98\xfa9\xfa\'\xfaZ\xf9\xba\xf8\x9b\xf8\x18\xf9\xf6\xf9S\xfaV\xfa\x88\xfa\xe4\xfa\xc6\xfb\xcf\xfc\x87\xfd=\xfe\x97\xfe\x04\xff\xe2\xffZ\x00/\x01\x89\x01\xd8\x01\xf8\x01\xd0\x01\x1e\x02=\x02\xeb\x01\xb1\x01f\x01\xfd\x00j\x00\xee\xff\xa8\xff\xa8\xff-\xffu\xfeK\xfe\xe0\xfd\xb7\xfd\x05\xfe{\xfe\x7f\xfe"\xfe\x0e\xfe\xad\xfe7\xffF\xff9\x00\xb6\x00\xa5\x00\x7f\x00\xf9\x00\xe3\x01\x0e\x02<\x02\xd1\x02\x13\x03\xc2\x02\xac\x02\n\x03\xef\x02\xd0\x02\xae\x02\x1c\x02A\x01\x9f\x00\xb0\x00\xf7\xff"\xff\xdb\xfe\xa1\xfe\x91\xfd\t\xfd(\xfe\xaa\xff\x83\x00\xcc\x01\xaa\x03~\x04\x86\x04\xc8\x05.\nS\r\x96\x0e\xbf\x0e\xd7\x0ev\x0e\x0f\x0e\xb1\x0fU\x11\'\x10n\x0c\x00\t\xba\x060\x05\xd7\x03\xa2\x02\xa1\xff6\xfb\x98\xf7\xb9\xf5K\xf5\xc1\xf4\xdd\xf3\xb7\xf22\xf1H\xf0\xa2\xf0\xec\xf1\xbf\xf3\x91\xf4\xee\xf4\x81\xf5\x9f\xf6~\xf8\x1d\xfa\x85\xfb\x8a\xfc\xe2\xfc\x9a\xfd\x9e\xfe\xb3\xff\x9b\x00\x8b\x007\x00\x04\x00\x1a\x00\xac\x00|\x00\n\x00i\xff\xbc\xfe^\xfe7\xfeE\xfe\xc5\xfd\xe2\xfcB\xfc*\xfcD\xfcV\xfcl\xfcz\xfcW\xfcx\xfc\x01\xfd\xd5\xfd\x81\xfe\xe3\xfeS\xff\x06\x00\xa8\x00)\x01\xbc\x01g\x02\xc5\x02\xdb\x02#\x03}\x03\xa0\x03\xa6\x03\xd1\x03\xf8\x03\xfd\x03\xd1\x03\xc0\x03\xb5\x03\xbc\x03\xe2\x03\xf8\x03\xf0\x03\xbd\x03\x8e\x03l\x03\x7f\x03\x8e\x03v\x03-\x03\xce\x02p\x02A\x02\x0f\x02\xcc\x01k\x01\xf4\x00\x8e\x00\x1d\x00\xd8\xff\x9d\xff;\xff\xe8\xfe\xb8\xfe\x86\xfeV\xfe=\xfe*\xfe\x1c\xfe\x1a\xfe3\xfe\\\xfeo\xfex\xfee\xfe]\xfe\x93\xfe\xc6\xfe\x06\xff(\xffM\xff~\xff\x94\xff\xc4\xffG\x00\xea\x001\x01\'\x01n\x01\xdc\x01v\x02\xef\x02\x89\x03\xa0\x03\x03\x03\x81\x02\x94\x02\xef\x02\xa1\x02\xe9\x01R\x01\xba\x00\xcd\xff\x1e\xff\n\xff\xc7\xfe\xfc\xfd1\xfd\x0c\xfd$\xfd\xd2\xfc\xae\xfc\r\xfdA\xfd\x02\xfd\n\xfd\x87\xfd\xdf\xfd\xe1\xfd\xda\xfd=\xfe\x85\xfe\\\xfel\xfe\xbf\xfe\xd2\xfe\xa6\xfe\xb2\xfe\x14\xffM\xffK\xff9\xff\xa4\xff\xc4\xffo\xff\x81\xff\xca\xff\xe1\xff\x8d\xffa\xfft\xff:\xff\xdf\xfe\xc1\xfe\xd4\xfe\xdb\xfe\xa5\xfe\x81\xfe\x8e\xfe\xd6\xfe\x1d\xffZ\xff\xad\xff\xed\xff\x1f\x00R\x00\x8f\x00\xd6\x00\xf8\x00\x12\x01\x1d\x01\xfd\x00\t\x01\xe1\x00\xc4\x00\xb1\x00\x86\x00\x89\x00l\x00W\x00\\\x00d\x00\x7f\x00k\x00k\x00}\x00F\x005\x006\x002\x00\xfd\xff\xa1\xff\x8b\xffp\xff\xd8\xfez\xfe\xc2\xfen\xff:\x003\x01t\x02T\x03!\x04^\x05c\x07\x8e\tJ\x0b\x91\x0cH\r\x96\r\xe2\r\x1f\x0eV\x0e\x06\x0e\xb0\x0c\xa8\nh\x08\x8a\x06\xc1\x04\x95\x021\x00\xa8\xfd\x1f\xfb\x9b\xf8\xd0\xf6\xfb\xf55\xf5\xfa\xf3\xec\xf2z\xf2\xc4\xf23\xf3\xdf\xf3\x05\xf5\xf6\xf5\x8c\xf6W\xf7\xc5\xf8d\xfaS\xfb\xfc\xfb\x0b\xfd\t\xfe\x97\xfe\x0e\xff\xbc\xffI\x00)\x00\xf7\xff]\x00\xbc\x00x\x00\xfe\xff\xdc\xff\xdc\xffu\xff\x15\xff\x11\xff\xef\xfe\x1f\xfeb\xfdp\xfd\x9e\xfd^\xfd\x0c\xfd\x10\xfd8\xfd\x1a\xfd6\xfd\xe3\xfd\x81\xfe\xa5\xfe\xbf\xfeS\xff\xf7\xffY\x00\xdd\x00k\x01\xb3\x01\xc9\x01\n\x02k\x02\xbe\x02\xcf\x02\xd7\x02\xf1\x02\xeb\x02\xde\x02\xe0\x02\xf8\x02\x08\x03\xf5\x02\xe7\x02\xf4\x02\xe1\x02\xca\x02\xe0\x02\xfa\x02\xed\x02\xcb\x02\xcc\x02\xaf\x02g\x02%\x02\xf2\x01\x9f\x010\x01\xc2\x00Y\x00\xda\xffM\xff\xd3\xfey\xfe\x13\xfe\xac\xfdg\xfd;\xfd\x1c\xfd\x06\xfd"\xfdu\xfd\xb9\xfd\x04\xfek\xfe\xeb\xfeg\xff\xe0\xffe\x00\xe7\x00W\x01\xad\x01\xfe\x01J\x02l\x02i\x02\\\x02Q\x02+\x02\xeb\x01\x96\x013\x01\xc7\x00o\x00\x1e\x00\xcb\xffn\xff@\xff+\xff\xe6\xfe\xd5\xfe(\xff\x98\xff\xa6\xff\xa3\xff\t\x00r\x00\xbb\x00D\x01o\x02)\x03\xd4\x02\x99\x02\n\x03P\x03\xf9\x02\xc9\x02\xf7\x02z\x028\x01e\x00m\x00\xf9\xff\xd7\xfe\x0f\xfe\xd1\xfdC\xfd]\xfc\x12\xfct\xfcC\xfc\xa0\xfb\x97\xfb\x1c\xfcU\xfcD\xfc\x8f\xfc8\xfdt\xfdU\xfd\xbd\xfdq\xfe\x7f\xfeA\xfe\x82\xfe\x02\xff-\xff\'\xffb\xff\xd3\xff\xe3\xff\xd4\xff=\x00\xd5\x00\x16\x01\x16\x01<\x01\x85\x01\xa3\x01\xa9\x01\xc5\x01\xc4\x01\x8e\x015\x01\xfe\x00\xdc\x00\xb1\x00u\x00*\x00\xed\xff\xb7\xff\xa6\xff\xac\xff\xb7\xff\xcd\xff\xd5\xff\x00\x00<\x00v\x00\xaf\x00\xd0\x00\xe8\x00\xee\x00\xfe\x00\x06\x01\xd8\x00\x83\x005\x00\xe3\xff\x84\xff\x13\xff\xaa\xfeE\xfe\xb6\xfd%\xfd\xc6\xfc\x93\xfcS\xfc\xfb\xfb\xcf\xfb\xc6\xfb\xb8\xfb\xca\xfbI\xfcO\xfdj\xfew\xff\xb8\x003\x02\xbb\x03E\x05=\x07q\t<\x0bc\x0cF\r\x1b\x0e\xc4\x0e\xed\x0e\x08\x0f\xd8\x0e\xe4\r.\x0c?\n\xaa\x08\xf1\x06\xcf\x04\x7f\x02/\x00\xd7\xfdi\xfb\x86\xf9]\xf8U\xf7\x13\xf6\xff\xf4|\xf4w\xf4\x97\xf4\n\xf5\xd4\xf5\x8f\xf6\x18\xf7\xd7\xf7\xfb\xf8A\xfa.\xfb\xf8\xfb\xef\xfc\xb6\xfdD\xfe\xcb\xfeW\xff\xb7\xff\xaa\xff\xa3\xff\xdc\xff\xfa\xff\xc3\xff\x82\xffl\xffW\xff\x08\xff\xc9\xfe\xc9\xfe\xa9\xfe4\xfe\xda\xfd\xcf\xfd\xc0\xfd\x9a\xfd\x87\xfd\x91\xfd\xa4\xfd\x9e\xfd\xba\xfd\x18\xfev\xfe\xbf\xfe\x07\xff[\xff\xb3\xff\xf2\xffJ\x00\xa0\x00\xd1\x00\xfd\x00+\x01M\x01g\x01v\x01\x90\x01\xa3\x01\xa1\x01\xa5\x01\xbb\x01\xd0\x01\xe4\x01\xfe\x01(\x02R\x02e\x02\x86\x02\xc7\x02\xe9\x02\xfb\x02\x19\x035\x03&\x03\xf5\x02\xcf\x02\xb9\x02m\x02\x03\x02\xac\x01P\x01\xd0\x00C\x00\xce\xff]\xff\xd6\xfeU\xfe\xff\xfd\xba\xfdw\xfd;\xfd*\xfd9\xfd?\xfd]\xfd\x9e\xfd\xed\xfd3\xfe|\xfe\xe1\xfeM\xff\x98\xff\xea\xffW\x00\xc4\x00\xff\x006\x01\x85\x01\xca\x01\xe4\x01\xf8\x01\x17\x02 \x02\xfe\x01\xf0\x01\xff\x01\xf3\x01\xb6\x01|\x01a\x01A\x01\t\x01\xd5\x00\xb4\x00\x87\x00/\x00\xef\xff\xd8\xff\xc2\xff\x94\xfff\xff^\xffW\xffB\xff1\xffH\xffk\xffi\xffm\xff\x94\xff\xc7\xff\xe3\xff\x05\x002\x00R\x00Y\x00d\x00\x7f\x00\x8c\x00|\x00t\x00m\x00X\x00A\x008\x00\'\x00\xff\xff\xcc\xff\xc5\xff\xb8\xff\x8f\xff\x80\xff\x8f\xff\x82\xffS\xffO\xffm\xffp\xff^\xffh\xff\x95\xff\x96\xff\x89\xff\xa5\xff\xdd\xff\xf1\xff\xe8\xff\xfd\xff5\x00Q\x00Y\x00q\x00\xa7\x00\xc8\x00\xc6\x00\xe8\x00%\x01M\x01V\x01S\x01x\x01\x91\x01\xa1\x01\xc0\x01\xfc\x01\xf5\x01\xb2\x01\x92\x01\x8f\x01h\x01#\x01\xe4\x00\xb2\x00.\x00\xa1\xffD\xff\r\xff\xa7\xfe)\xfe\xd2\xfd\x93\xfd5\xfd\xd7\xfc\xbb\xfc\xca\xfc\xad\xfc|\xfc\x85\xfc\xbb\xfc\xcb\xfc\xc5\xfc\xfe\xfco\xfd\xb5\xfd\xe1\xfdU\xfe\n\xfff\xff\x81\xff\xe1\xffe\x00\xa8\x00\xb8\x00\x03\x01j\x01b\x01\x1f\x01\xfc\x00\xf9\x00\xce\x00\x94\x00u\x00X\x00\t\x00\xa6\xffk\xffd\xff[\xffC\xff\x1f\xff\xf4\xfe\xd4\xfe\xd4\xfe\xe7\xfe\t\xff$\xff.\xff#\xff4\xffd\xff\xa0\xff\xdb\xff\xf4\xff\r\x00,\x00B\x00}\x00\xbd\x00\xfa\x00\x1c\x011\x01M\x01~\x01\xad\x01\xc6\x01\xdb\x01\xf3\x01\xf7\x01\xdb\x01\xdb\x01\xf9\x01\xef\x01\xb5\x01u\x01Q\x01\x07\x01\xaa\x00\x80\x00f\x00\xf9\xffk\xff.\xff"\xff\xf8\xfe\xd2\xfe\x0f\xff]\xffJ\xffN\xff\xce\xfft\x00\xe7\x00y\x01A\x02\xe6\x020\x03\xb3\x03\x8a\x04\x1b\x05J\x05\x8e\x05\xee\x05\n\x06\xe3\x05\xc7\x05\x96\x05\x0b\x054\x04y\x03\xd0\x02\xf8\x01\xdb\x00\xbd\xff\x9c\xfek\xfd`\xfc\x93\xfb\x00\xfbb\xfa\xa9\xf9%\xf9\xdd\xf8\xda\xf8\x10\xf9r\xf9\xd7\xf9.\xfa\xa0\xfa6\xfb\xf7\xfb\xc7\xfc\x8b\xfd=\xfe\xb1\xfe\x1a\xff\x97\xff\x15\x00}\x00\xc9\x00\xfb\x00\x03\x01\xe9\x00\xe7\x00\xf2\x00\xe9\x00\xbd\x00\x83\x00N\x00\x0e\x00\xde\xff\xc7\xff\xb2\xff\x8e\xffW\xffD\xff;\xff7\xff@\xffR\xffR\xffE\xffM\xffY\xffS\xffJ\xffB\xff@\xff7\xff/\xffA\xffd\xff]\xffL\xffT\xffx\xff\x94\xff\xa7\xff\xcd\xff\xf6\xff\xff\xff\xfb\xff#\x00p\x00\x98\x00\x9b\x00\xaa\x00\xc4\x00\xbf\x00\xc2\x00\xdd\x00\x06\x01\x0c\x01\xfc\x00\xfd\x00\x18\x01)\x016\x01M\x01V\x01Z\x01c\x01x\x01\x83\x01\x83\x01\x84\x01n\x01N\x01<\x01&\x01\x02\x01\xd5\x00\xa1\x00i\x00,\x00\xf9\xff\xd9\xff\xbe\xff\x9a\xffx\xff_\xffX\xffW\xff_\xff{\xff\x96\xff\xaa\xff\xbe\xff\xde\xff\x08\x000\x00P\x00q\x00\x88\x00\x94\x00\x9d\x00\xa0\x00\xad\x00\xa3\x00~\x00Y\x009\x00\x1c\x00\x00\x00\xde\xff\xb9\xff\x93\xffy\xffk\xff[\xffT\xffL\xff:\xff3\xff+\xff4\xff@\xffH\xffR\xffQ\xffU\xfff\xff{\xff\x95\xff\xab\xff\xbd\xff\xcf\xff\xde\xff\xf4\xff\x19\x004\x00A\x00R\x00n\x00\x89\x00\x9d\x00\xb2\x00\xce\x00\xde\x00\xe2\x00\xef\x00\x04\x01\x0f\x01\x19\x01\x15\x01\x12\x01\n\x01\xfe\x00\xed\x00\xdd\x00\xc3\x00\x9d\x00v\x00Q\x000\x00\xfe\xff\xca\xff\x98\xffl\xffM\xff#\xff\x01\xff\xdc\xfe\xbe\xfe\xa3\xfe\x9b\xfe\x9d\xfe\x9d\xfe\xa6\xfe\xb3\xfe\xc8\xfe\xe3\xfe\x05\xff&\xffC\xffj\xff\x8e\xff\xa7\xff\xc0\xff\xd4\xff\xed\xff\x00\x00\x01\x00\x1b\x00\x1e\x00,\x004\x00)\x00G\x00i\x00\x8b\x00\xa1\x00\x8c\x00\x9c\x00\x92\x00\xd3\x00P\x01\xe8\x01A\x027\x02[\x02j\x02f\x026\x02/\x021\x02\xb7\x01\'\x01\xac\x00G\x00\xd6\xff=\xff\xb6\xfe0\xfe\x99\xfd\x0e\xfd\x8a\xfcF\xfc\x13\xfc\xe3\xfb\xb2\xfb\x9a\xfb\xc4\xfb\xfe\xfb4\xfco\xfc\xd5\xfc9\xfdz\xfd\xdf\xfdF\xfe\xa7\xfe\xee\xfe\x0c\xffU\xff\x9b\xff\xc7\xff\xeb\xff\xfa\xff\x06\x00\xe5\xff\xbe\xff\xb8\xff\x9e\xff}\xff8\xff\xf4\xfe\xdc\xfe\xb6\xfe\x95\xfe\x88\xfe\x95\xfe\x9c\xfe\x91\xfe\xa9\xfe\xde\xfe\x1c\xffD\xff\x8d\xff\xfa\xff>\x00u\x00\xc1\x009\x01\x91\x01\xc9\x01\x1d\x02{\x02\x90\x02\x90\x02\x9e\x02\xb9\x02\xa6\x02|\x02\x8a\x02\x99\x02\x8f\x02j\x02p\x02\x8b\x02\xa8\x02\xde\x02t\x03g\x04G\x05\xfb\x05\xb4\x06\xaa\x07^\x08\xd0\x08o\t\x0c\n9\n\x0f\n\xd3\t\xa5\t\x0c\t:\x08H\x07.\x06\xd4\x047\x03\xb4\x010\x00\xa8\xfe/\xfd\xc3\xfb\x83\xfa\x8c\xf9\xb4\xf8\x18\xf8\xa0\xf7_\xf7T\xf7L\xf7y\xf7\xce\xf7;\xf8\xbc\xf8O\xf9\x0e\xfa\xca\xfa\x98\xfbg\xfc\xfa\xfc\x84\xfd\xf1\xfdF\xfe\x89\xfe\xa9\xfe\xc3\xfe\xd1\xfe\xbf\xfe\xa3\xfey\xfeH\xfe\x18\xfe\xcb\xfd\x83\xfdA\xfd\x0f\xfd\xd8\xfc\xb7\xfc\xc3\xfc\xd3\xfc\xf8\xfc0\xfdv\xfd\xc6\xfd-\xfe\x9e\xfe\x06\xffk\xff\xce\xff(\x00\x93\x00\xfb\x00J\x01\x9d\x01\xef\x01\x1d\x026\x02E\x02M\x02B\x020\x02\x17\x02\xf9\x01\xda\x01\xc9\x01\xac\x01\x96\x01{\x01[\x01S\x01M\x01F\x01:\x01;\x01B\x01G\x01T\x01[\x01`\x01X\x01P\x01N\x01:\x01\x14\x01\xeb\x00\xbf\x00\x8b\x00N\x00\x0e\x00\xd3\xff\x98\xffQ\xff\x0e\xff\xd2\xfe\xa5\xfer\xfeO\xfe6\xfe&\xfe\x1d\xfe"\xfe5\xfeM\xfel\xfe\x95\xfe\xc1\xfe\xf3\xfe-\xffe\xff\x98\xff\xd3\xff\t\x00I\x00z\x00\xa2\x00\xbb\x00\xcf\x00\xe8\x00\xf1\x00\xf9\x00\x03\x01\xfa\x00\xf4\x00\xe8\x00\xe2\x00\xd6\x00\xbf\x00\xa5\x00\x8b\x00t\x00a\x00J\x00;\x00(\x00\x19\x00\x16\x00\x17\x00 \x00%\x00)\x00<\x00F\x00Y\x00m\x00t\x00\x87\x00\xa0\x00\xa5\x00\xbb\x00\xb0\x00\xb6\x00\xb8\x00\xb0\x00\xb2\x00\x97\x00\x93\x00\x95\x00\x87\x00\x84\x00f\x00?\x00\x1c\x00\xd9\xff\xab\xff\x88\xffg\xffw\xff6\xff\xfd\xfe\x03\xff\xfc\xfe\xef\xfe$\xffk\xff\xa1\xff\x9c\xff\xb8\xff6\x00\xc2\x00\xe8\x01K\x04\xa4\x05\xf7\x05\xd9\x05`\x05\xae\x04\x83\x02v\x01\xe1\x00o\xffH\xfeU\xfd\xe9\xfcH\xfc\xf0\xfa\xcf\xf9d\xf8\xc0\xf6\x15\xf6\x87\xf5\xb6\xf5y\xf6\xf5\xf7\x8f\xf9)\xfa\xb7\xfb\xad\xfd\xa4\xfe\x7f\xfe\xf5\xfe-\x00y\x00\x19\x01\x10\x02\x07\x03q\x03\x15\x03\xab\x03\x14\x04\xa8\x03I\x03\xe3\x01\x11\x01\xa7\x00\xef\xff\xb1\xff\x82\xff\xbe\xffA\xff\xfc\xfe_\xff}\xff\xfa\xfe\x8d\xfe\x88\xfe\xa2\xfe\xf7\xfe\x07\x00\x00\x01\x9f\x01#\x02\xa4\x02\xe0\x03u\x04n\x04m\x04\xd2\x04C\x05\x19\x05}\x05\xcf\x06\xb6\x06\x08\x06\xb8\x05\xa3\x05\xfe\x05\x98\x05\xbe\x05\xd2\x05\xc8\x05\x9a\x05T\x05u\x05#\x05R\x04n\x03\xd6\x02\x96\x020\x02\xb6\x01;\x01\xc2\x00/\x00A\xffi\xfe\xac\xfd\r\xfdJ\xfc\x9b\xfbH\xfbo\xfb\x9e\xfb\x8f\xfbt\xfbZ\xfb3\xfb\x0e\xfb\xe2\xfa\xbf\xfa\xda\xfa\x02\xfbf\xfb\xe8\xfb\xc1\xfcj\xfd\xa4\xfd\xb3\xfd\xb5\xfd\xcc\xfd\xaa\xfd\x9a\xfd\xbd\xfd\x08\xfeP\xfe\x95\xfe\xdf\xfe \xff\x04\xff\x9b\xfe\'\xfe\xcb\xfd\x90\xfdD\xfdT\xfd\xb5\xfd\x19\xfeg\xfe\xc1\xfe8\xffb\xffh\xff\x83\xff\xac\xff\xec\xff0\x00\xb6\x000\x01\x84\x01\xdc\x01 \x02^\x02@\x02\xf9\x01\xb2\x01x\x01J\x01A\x01A\x01\\\x01Q\x016\x01:\x01\xfd\x00\xd3\x00\x8f\x00L\x00\x15\x00\xe8\xff\x00\x00,\x00K\x00g\x00`\x00S\x00M\x00>\x00\x0c\x00\xe5\xff\xcf\xff\xbc\xff\xc4\xff\xdf\xff\x12\x00/\x00,\x00;\x00B\x006\x00=\x00+\x00\x1f\x00\x04\x00\x11\x00E\x00M\x00|\x00\xa0\x00\x8a\x00\x99\x00t\x00U\x00,\x00\xf7\xff\xe9\xff\xb5\xff\xb9\xff\xb0\xff\xb3\xff\xc0\xff\xae\xff\x8e\xffX\xff*\xff\x08\xff\xed\xfe\xb6\xfe\x9d\xfe\xa8\xfe\xae\xfe\x06\xffT\xffw\xff\xae\xff\x9a\xff\xc2\xff\xfc\xff$\x00Y\x00g\x00\x91\x00\xbe\x00 \x01\x8a\x01\x9f\x01\xab\x01G\x01\xd2\x00\xa8\x00S\x00A\x00\x03\x00\xa5\xff\xaa\xff|\xffe\xffy\xffa\xffA\xff\x19\xff\x01\xff!\xfft\xffm\xff\xbc\xffT\x00\xca\x007\x04\x8a\x06p\x07\xfd\x07\xa2\x06\xf5\x05\xbd\x03\xe5\x02\xc5\x02\x03\x02b\x02\xe3\x01\xb7\x01\x8a\x01Q\x00\xdb\xfd\xeb\xfa\r\xf9\x85\xf7\xf9\xf6\xbd\xf7\xa6\xf8\x1e\xfa\x14\xfb\xda\xfb4\xfc\xf0\xfc\xa4\xfci\xfaV\xfb\x1a\xfc4\xfc\xbd\xfe\x00\x00\x19\x01\xfe\x01H\x02\x8c\x02\xa3\x01\xe3\x00\x95\x00\xa3\xff\x8d\xff\xb0\x00,\x01\xb7\x01d\x02\xd1\x02"\x02\xaa\x00\x94\xff\xed\xfeX\xfd\x1a\xfde\xfd\xc5\xfd\xf6\xfd\x15\xff\xc2\xffg\xff\xeb\xff\x8d\xff\x18\xff\x84\xfeg\xff\xe7\xfe\x82\x00Y\x03\xa1\x01\x8e\x03I\x05\xb3\x03\xe8\x02%\x03~\x03\r\x01\x8a\x00\xd0\x03C\x01\x94\xff&\x04l\x02\x90\xff\xa4\x01\xbd\x01\xe4\xfeq\xff\x81\x01\x89\xff\xd3\xff\xc0\x01\xaf\x02\xc2\x01@\x02\xf5\x02\x18\x01\xb4\x00\x84\x02\xf4\x02\x06\x003\x01\xf9\x02\x1e\x005\x00e\x02\xbf\x00\xe2\xfe\xc1\xff\xfe\xff\x8b\xfe9\xff\x9e\x00\x9a\xfe\xb7\xff\x18\x01\x92\xfe\xd8\xff\xce\x01\xa3\xff\xc9\xfd\x83\x00\xd3\x00\x05\xff\xfd\x00\x04\x02*\x00g\x00\xe1\x01\xe9\xff\x1f\xff\x05\x01\xad\xff8\xfdY\x01\x82\x01\x0c\xfd\x9f\xff\xd5\x01\x98\xfe^\xfc\xcf\xff\x83\xfe\xdd\xfaV\xffI\x00\x95\xfb\xfe\xfc]\x00k\xfea\xfd\xbc\xff9\xfe\xb0\xfcZ\xff\xb9\xff\xde\xfd\x92\xff\xa0\x00\x13\xff\xf9\xfe\x1e\x00,\x00\xb0\xfeZ\xffM\xff+\xfe\x17\xff\xcd\xfe\xb0\xff\x11\x00\x03\xffm\xffb\xff\xf1\xfey\xfe\xb8\xfe2\x00\xe2\x00G\xff\x89\x00\xa9\x01\xb8\xff1\x00\xcc\x00j\x00\x9a\x00q\x01\x8d\x00\xfd\x00\x85\x02\x7f\x01\xdd\xff\xab\x00\x00\x00\xbb\xfe\xe0\x00l\x00m\xffD\x00\xe6\x00a\xff4\xfd\xe4\xfeu\xff\x0f\xfc\xc7\xfd\xd9\x02\x96\xfe:\xfdM\x03\xf1\x00\x8b\xfb>\x007\x02)\xfdI\xff\x1c\x05N\x01y\xfeU\x03\xbd\x03\x86\x01\x08\xff\xe8\x02\xff\xfc2\xff\xde\x00\xe7\xff\xe6\xfd\xfe\xfe\x91\x02C\xfc\r\xfeV\x00\xca\xfd?\xfa\x00\xffn\x00\xde\xfb)\xff\xdc\x01\x0b\xfe\xb1\x01e\x01\xb5\xfd\xe0\x00\xaf\x02\xa1\xfd\x9d\xfd\x02\x04\xfb\xffq\xfc\t\x01\xab\x060\xfd(\xff\xaf\x06\x15\xffT\xfc\xbb\x04,\x02\xe9\xfa\n\x03\xb8\x03F\xfd\x9c\xff\xe4\x05\xc4\x00\x08\xff(\x01\x91\x00u\xfft\xfbz\x033\x03\xf7\xfb\xbe\x02\n\x04\x96\x01\xf2\xfe\xe1\x00\x7f\x03\xc8\xfd\x15\xfe(\x01\x96\x020\x01\x19\x02\xe4\x00_\x01\x1e\x01\xd9\xfcS\xfe\xbc\x03\xd6\xfd\xfb\xfc\x9b\x012\x00<\x00}\xfc\xe0\x02\xea\xff\xf2\xfbE\xfe0\x01\xb8\x00C\xfcy\x01\xd0\x00~\xfe\xdc\x01\xc0\x00\r\x01`\xfe_\xfes\x03\x08\xfbT\x01\xc6\x04x\xfb\xc1\xfe\x1c\x05\xbc\xfc=\xfe\xb5\x00\xa1\xfd\xd4\xfd[\xfd\x7f\x01\x85\xfc\x08\x01\xe9\x00\x98\xfc\x05\x00\xdc\x02\x03\xfbR\xfc;\x05\x1e\xff\xa2\xfcF\x02\x17\x03y\xfe]\x01\xbc\x01$\x00\xeb\xfc\x18\x01r\xfd\xcb\x00\xa9\x04\xa4\xfe.\xfeb\x03f\x034\xf9o\x04\xf0\x00\xe8\xfa\x14\xff\x14\x02\xb8\x01\xf9\xfeM\xfd\xde\x03\xf6\x00\xd2\xfa%\x02\x8a\x01\xe0\xfc\xf3\xfc\x82\x03E\x03\x0c\xfen\x02V\x03y\xfd\xcf\xff\x9c\x03\xf2\xfet\x00\xbe\x03\xe6\xfdW\x02\x81\x03\x98\xfa\xe3\x00\xb4\x027\xfe\xfa\xfd\xbf\x03K\x01\x9a\xfa6\x03y\x00\xbc\xfc\x9c\x00p\x01)\xff\xeb\xfe\x07\x03\xa4\xfe?\xfed\x02\xd8\xfa`\x00\xcc\x05\x00\xfb\x9e\x00\x16\x07T\xf9\xf4\xfc\x05\x08\x0e\xfc\xac\xf9\xbe\x07O\xfe\xff\xfb\xfc\x04\xf2\x01\xee\xfc\xe1\xfe\xd9\x03\xdb\xfa\xa1\xfa\xbc\x04"\x07w\xf8\xc3\x01\xe6\x07\x1a\xf8\xec\xfd\xaf\x04o\xfdw\xfb@\x04\x86\x02A\xfc\xf0\x00\xe1\x04\xf0\xfbP\xfc\x96\x01\'\x02\x95\xfb\xb9\x00]\x03\x85\xfe\xa3\x00\x86\xfe\x91\xfd^\x00p\x02\xe5\xfa\xea\x01\xd3\x01:\xfd\x16\x03\xb9\xfe\xda\xfb\x8e\x02z\x00\xc2\xfb\xbf\xfe\x1f\x07w\x01e\xf8\x89\x03E\x05\x96\xf8\xbb\xfe\x04\x08]\xfaz\xfd\xd0\x07W\xfeG\xfe\x18\x05\xa8\xfc\x11\x00\x15\x00\xd7\xfeL\x00\xce\x00{\x01\xa0\xff\x88\x002\x01_\x01\x85\xf9\x98\xff\xd9\x00\xed\xfb|\x01\x9d\x04\xef\xfe\x93\xfc0\x03g\xfe\x96\xfcG\x00\xf7\xff\xe0\xff1\x006\x05\xb2\x00\x1d\xfb\xa1\x02F\x00\x07\xfc_\xff\xc1\x01d\x02+\xff\xff\xffc\x02\x03\xffP\xfd.\xff5\x04l\xfdU\xfa\xb4\x05\xd1\x01X\xfcn\x01\xd8\x03\xac\xfd\x96\xfa|\x06\xe6\xfd\xbf\xf7t\x05\xdd\x01\x90\xfd\xaa\x02l\x03\xdf\xfa\xaa\xfe\xc0\x01\xd1\xfc%\xff\x01\x01\xf7\xfe\x08\x01=\x01\xbd\x01\xa5\xff\x96\xfcQ\x03\xbc\xfe\xeb\xfc6\x00f\x01\xb3\x03N\xfe%\x00\xfc\x06\x83\xfc\x15\xf9[\x07\x11\x01n\xf7\xd7\x03\xe7\x07\xca\xfa\xa5\xfc\x17\x07\xbc\x01\xaf\xf6\xaa\x04\xa7\x02a\xf5(\x05\x99\x04[\xf9\xaf\x03\xb8\x00\xe7\xfb\xab\x02\x82\x02\x11\xfc\xc7\xfd\xd3\x05\x16\xfd)\xffu\x00\xcf\x00\xe4\x03\xd4\xfc\xce\xfex\x02&\x00h\xfe1\xff\xc5\x01c\xff^\xfek\x032\xff\xcf\xfa\xa4\x06;\x01\xbc\xf6\x85\x04\x08\x02\xab\xfc%\xfe\x07\x04\xec\xff\xe4\xfa\xf5\x03s\x00\x1a\xfd\x87\x00\xcd\x01\x94\xfcF\xff\x1d\x02\x12\x011\xfe\x89\x02%\x00\x9f\x00\xc4\xfc\xe5\xfen\x06\xce\xfc)\xfe\xac\x07\xa2\xfe6\xf9\x02\x07q\x00u\xfa%\x01\xa9\x05\xee\xf9\x98\x00\x88\tp\xf7\xde\xfb:\x08\xeb\xfe&\xf7\x9e\x06^\x05d\xf9\x11\xfe\xcb\x07\xa4\xff\xe3\xf7q\x06I\x00v\xf9\xa2\x02j\x04w\xfc\xcc\xfek\x04\x1e\xfe:\xfb\xa5\x04>\x01\x83\xf9B\x02P\x02c\x01_\xfcA\x03\x8f\x02 \xf9\xf7\xff|\x03/\xfd\xca\xfd(\x07G\xfb\xba\xff\x97\x04e\x00d\xf9\x85\xfe\x86\x06^\xfb\xef\xff\xd1\x05\xd1\x00\xb1\xfa\x81\xffy\x04\x06\xfa\x83\xfd\x97\x04\xae\xfe\x99\x01\xd2\x02;\xfb\x13\x02T\x00\xf8\xf9L\x04\xdb\xfe$\xfc\xbc\x05\xdd\x03\xa7\xf6\xf4\x05\xc7\x05d\xf3\xfa\x01\x0e\x06\xf9\xf9\xa1\xfe\x98\x07:\x02\xb0\xfb_\x04\xb7\xfek\xf86\x032\x03\xac\xfei\xfeH\x05a\xfd\x10\xfeB\x05}\xf8\xe6\x01X\x025\xf7\xd3\x06v\x02\x9c\xfbc\xff\xab\x05/\xf9\xaf\xfa~\x08s\x03\xa5\xf5\xcc\x01\xc9\n\xbe\xf4\xf6\xfdM\t.\xfc\x1d\xf8\xb2\x07\xda\x01\xee\xf7L\x03\xc2\x04\xd0\xfcl\xfa\xea\x04\xff\x04q\xf5\xf1\x05\xd8\x03\x04\xf8\xba\x01\x97\x08s\xf8\xec\xfa\x03\x11\xa1\xf5\xa9\xf8Y\rc\x00\x81\xf1\xf0\x08\xd1\x07\xa2\xf6\xd4\xff`\x06?\x02>\xf5Q\x03~\x04\xd0\xfc\x8d\xfd%\x03\xfd\x05j\xf8\xcb\x02\xb2\x02\xbd\xf8\xd6\x01\x1d\x02\x15\x01@\xfc\xe5\x03~\x01D\xfa\xe2\x02}\x04%\xf6n\xff_\n\xb3\xf7\x86\xfc\xd3\x0b(\xfb\xbe\xf7r\n\x84\x01y\xf3+\x05\xf7\x07v\xf1\xb4\x01\xf8\x0fH\xf6\xa5\xf7\x0c\x0e\xca\xfe/\xf46\x06R\x01;\xfb\x8d\xff\xc2\x05\xb2\x03\xe8\xf9D\xff\x91\x06u\xf9b\xf9c\x0c\x97\xfc\xdf\xf5P\n\xce\x06s\xf6\x17\xfd\xe6\n\xe9\xfc\xf5\xf6\x98\x05\x96\x05\x90\xf6\xbc\x00\xb7\x0eU\xf5O\xfd\xbf\x0b\xf2\xf8\x0e\xfd\xd3\x034\x00B\xffM\xffT\x05\xa7\xfe\r\xfb\x8b\x05>\x03\x01\xf7|\xfe\x99\t+\xfb\x81\xfbc\t\xe5\xfd\x97\xf9S\x04x\x03Q\xfa\xd3\xfc\x12\t-\xfd\x9d\xf8\xd4\x03\xff\x07\xa3\xf9`\xfb\xf6\x07-\x01\xda\xf4\x9d\x05\x15\x06q\xf7\\\x02\xea\x05)\xfc(\xf7\x1a\x07\xc7\x05\x1c\xf6\xdd\xff\xf8\n\xb8\xf4\'\xfen\x08\x90\xfc\xb1\xf8{\x01\xf8\t\xca\xf7\x7f\x00r\x07E\xfb\xea\xf8\xc7\x04\xe0\x06!\xf9\x05\x03\x17\x06:\xfb\xb0\xfa\xc8\x06\xe9\x01\xcc\xf8\xd7\x02\xe6\x05\xd5\xfa\x13\x00\xe0\x05{\xf9J\xff\x9a\x04\x0e\xff\xf2\xf8\xf1\x06\xd4\x04\xdc\xf7\x87\x01\xce\x04\x9b\xfa^\xfdS\x05 \xfc\xe8\xfcd\x04\xe2\x04\x8b\xfb\x16\xfb\x84\x06\'\xfc\x97\xfc\xf4\x05\xc8\xfe\xb0\xfc\xc5\x00\xdd\x05z\xfb\xd7\xfcj\x03\xf7\x01\xbf\xfaR\x00{\x04\xa4\xfd\xf9\xfe7\x03\x96\x01\xc1\xf8\xee\x02\xb0\xfe\x83\x01Y\x01\xba\xfa\x0e\x05\x86\xff\xb8\xfa\xee\x06\xdb\xff\xf9\xf2\x80\n\xed\x05G\xf2\xc3\x02+\x0b\xfa\xfa\xe1\xf5\x0b\t\xb9\x038\xf6\x01\x01:\x08g\xfc\xa8\xf7\xac\t\x81\x03l\xf5\xa8\x02\xe5\x08W\xf6\xbe\xfbd\x0bg\xff*\xf6\xd5\x04y\x06\x02\xfbe\xfe\xea\x03\x19\x02G\xf6\xa0\xffK\r$\xf8\x12\xf9\xf3\x0e\x98\xfe\x19\xef\x14\n\xc8\x08w\xf2X\xffs\x08h\x00\x9f\xf8;\x01\xae\x08\xe5\xfb\x93\xf4\xf3\n\xef\x05\x97\xedH\n\x9d\x08\x97\xf4!\xfe\xcd\x071\xff=\xfa\xf7\x04\x80\x00B\xfb\xda\xffi\x05W\xffA\xfc|\x03\xf4\x01?\xfa\x03\x00\x01\x06W\xfd\xb7\xfb\xf2\x05\x9c\x01n\xf9\xee\x02(\x06^\xf5\xb6\xff\n\n}\xfa\xb1\xfc\t\x04A\x04-\xfb{\xfb\x16\x08t\xfc\xab\xf9M\t\xd0\xfd\xd1\xfb\x95\x02:\x07_\xfb\x01\xf9\xb4\x08\xfb\xfa\x1a\xff5\x01\xc3\x00\xde\x02U\xff\xa0\xfd\xf6\x02X\xfe\xfd\xf9\xe2\x05\xb7\x022\xf8\xd3\x03\x9e\x03)\xfb\x9d\x03>\x02\x12\xfb/\xfdI\x04\xd4\xff\x94\xfc\xa9\x01-\x06\xe0\xf9K\xfcF\x0b\xc7\xfd\xff\xf4e\x04\xba\x05\xb5\xf8\x9e\xff\x0c\tE\xfd\xa8\xfb\x86\x03r\x01-\xfa\xd1\xff\xc1\x04a\xff\x14\xfd0\x04\xeb\x03$\xf8\x1b\x04E\xfc*\x00Q\x04r\xfc\xd1\x00X\x02\xcf\x00\x9e\xfc\x03\x01k\x00d\xff~\xff\xe2\xff\xeb\x03c\xff\xe0\xfb-\x04\x9a\xff\xe1\xfc\xa0\xff\xe3\x05&\xfdA\xfb$\x08\x12\xff\'\xfa\x8a\x00x\x07\x9d\xf8\x92\xfe\xd0\x07\x9d\xfb\x8c\xff\xcc\x03\x91\xfd\xb5\xf9\xf9\x06\x08\xfd\xa5\xfa}\x059\x03\x95\xfb\x15\xfe>\x03\x90\x00\xc7\xfct\xfe\xaa\x06\xf3\xfc_\xfe\x94\x04\x7f\x00\xd5\xf9;\x04\xda\x03:\xf9\x95\xfe[\x08\xf1\xfb\x05\xfb\x05\x03\xcb\x03\xad\xfd\xce\xf93\x04\x0e\x02/\xfe\'\xfdx\x06F\xfb$\x00\xb1\x01!\xf8\x95\x04V\x06\x95\xfb\xb4\xfa\xca\x05\x94\x00\x97\xfb\xf0\x00\x84\x04\'\xfd\xdc\xfb\xe9\x05[\x03\xf2\xf8\xd6\xfe\x9a\x07G\xfdJ\xfb\xb5\x04e\xfev\x01\x80\x00\xc9\xfc\xdb\x015\x04\xbd\xfc%\xf9\xbb\x06\xda\x00\x85\xff\x17\xfc\xa0\x03g\xfd\xe6\x01\xea\x04\x15\xf5\xb3\x02\xa9\x03}\x00\x7f\xfaR\x03\xe4\x05\x9c\xf8\x14\xfe\xed\x07z\xfa\x07\xfb\xd5\x07\xd5\x00\x08\xfc\x07\xfc/\x05\xc8\x04\xf6\xf9R\xfa\xb6\x08\x90\xfe6\xf9\x01\x02\xeb\x08\x1c\xfc\xb3\xf3\xdc\x0bi\x05?\xf5~\xff~\tn\xfai\xf7\xb4\nJ\x02\xa4\xf9\x97\x03\x02\x010\xfc\xd8\xfdJ\x03y\x04i\xf9\xe8\xfcV\x0c,\xfb\xab\xf6\r\x0c\xe3\x01M\xf2\'\x05D\x06\x8f\xfap\x00,\x02\x03\x04u\xf8\xc0\x00\xbd\x03\xf1\xfc\x1a\xfef\x01\xd7\x04\xa6\xfa\xc5\x02\xc6\x001\xfd\xd4\xfd\xec\x03M\xfe\xee\xfcx\x04\xb7\x03q\xf8\xb8\xfer\x0bi\xf5\xc8\xfd\x0b\x07[\x01\xb7\xf7V\xff\xe3\x0c\xfb\xf9\xec\xfa~\x02\x19\x03\x94\xfa\xab\xfei\x08-\xfbj\xfc\xb4\x02\x04\x06\xcd\xfb\xaa\xf9k\x08\x88\xfe\x87\xf6W\x05p\x08\x8a\xf8`\xfd\xd9\x07J\xfd\xa1\xfc\x1f\x01\x98\x00\xf9\xfe\xcc\xfe\xde\x00s\x04\xff\xfd\xc5\xfeg\x03#\xfb\xf5\xfe\xe8\x03\xb5\xfc9\x01#\x01\xf7\xff\x99\x03\x94\xfa\xf4\xff]\x05\x12\x00\xbd\xf6a\x03\xe6\x07\xc8\xf9\x87\x00\xfc\x01\xfb\x02\xa6\xfc\xa9\xfb\x9c\x07\xc3\xfd\x94\xf9\xb0\x05\x7f\x05\x15\xf9q\xfc\x8e\t\xaa\xfd\xd2\xf7\x9d\x06\xa0\x02\x17\xf7\xb8\x01~\x08\xf1\xfb8\xfa|\x03R\x05\xa2\xfaQ\xfc9\x06\x8e\x00\x15\xfa\x87\x01{\x02<\x00\x19\xff~\x01\x0c\xfdJ\x03\xf4\xfc\x9b\xff\xb3\x04H\xfc\xa3\x03\x8b\x00$\xfe\xca\xfbm\x01!\x06\xda\xfb\xc3\xfb\x89\x05\x00\x00\xb0\xfb\xd4\xff\xf2\x03d\xfb\xd3\xfb\xdb\x08\x84\xffd\xfa\xcd\x03K\x054\xf7F\xfd\x97\tx\x01e\xfa\xd1\xff\x12\x06l\xfd\xc4\xfc\xaf\x02\x87\x01Y\xfc\xad\x00t\x02\xe3\xff\x88\xffv\xfeL\x01\xf5\xfe\xe9\xfe\xe3\xffG\x03\xa4\xfc\x05\x00I\x03u\xfe\xb2\xfd\x91\xffr\x02\xc1\xfcB\xfd\xde\x04H\x03\xc8\xfaL\x00\xf2\x03-\xf9\xd6\x01\xe6\x06\xbd\xf9Z\xfd\x08\x05\xe1\x02\x9d\xf8\xfd\x02\x03\x05\xf3\xf9;\xfc\xca\x08\x1d\x00\xb5\xf6_\x06\x85\x06O\xf8\x00\xfb\xfe\t\xee\xfd\x9b\xf9\xe4\x03\xc6\x04\xc0\xf9[\xffC\x06\xa6\xfd\xd8\xfa\x81\x02F\x04\xed\xfa\xec\x00\xd9\x02!\xfe\x02\x00\xc7\xfeF\x00^\x017\xfdK\x02\x98\x00p\xfc9\x00\x14\x04\xa4\xff\xf7\xfbX\xff\xb5\x04\x0b\x00\r\xf8a\x05\xb2\x04\xea\xf8n\xfd\xb0\x07\xc3\x00\xd3\xf8i\x02\x9c\x04"\xfb\xcd\xfc\x98\x06q\x00*\xf9w\x03\xa9\x05\x9e\xf8z\xfe\x85\x05J\x00\xa5\xfa\xa0\x00\xd4\x03\xf0\xfd\x93\xfe\xf8\x01l\x00\x8f\xfcg\x00~\x03\x8a\xfdC\xfd\x08\x05\xdc\xfen\xfb\x17\x02\x8b\x05P\xfb3\xfc^\x07{\xfe\xb0\xfa\x90\x02\xb5\x04\x01\xfdX\xfcs\x04\xa7\x01\n\xfcL\x00`\x01\x06\x01\xc6\xfdD\xff\xd6\x04\x02\xfd\xd0\xfeO\x01\x80\x00\r\xff\x9e\xff\x9e\x00\xe8\xff\xd5\x00\xfb\xfd\t\x020\x00\x8c\xfd\xc0\xfe\x0f\x04\xbd\xfe\x19\xfc\xfa\x02\xfe\x03\x02\xfd\xf3\xfa\x9a\x06\xdf\xff\x1e\xfa\xb7\x00.\x05>\xfeb\xfe8\x01K\x00\xa9\xff\x8a\xfeX\x00\xbb\x00}\xffB\x00:\xff\xad\xff\xfe\x02D\x00#\xfcM\x00Q\x03\x15\xfcX\x00\xbb\x03N\xfd\x84\xfd}\x03\xfd\x03H\xfbe\xfc&\x05\xa5\xff\xeb\xfb\x95\x01v\x03\xc5\xfe\x8c\xff\x02\x01\xaf\xfe\x03\x00\x0e\x01\xa5\xff\x98\xfd\xbd\x01\xd0\x03/\xfe\x1f\xfe\xb2\x02\x08\xfe\xce\xffS\x00\x96\x00c\xffY\x00\x80\x01\x16\xff\xe6\xffg\x00\xb2\x00\x17\xfeh\x00\xfb\x01:\xff\xaa\xfd\x0e\x03)\x01\xea\xfc\xec\xff\xfe\x03\xe3\xfc\x82\xfe\x92\x05\x84\xfd\xce\xfb\x0e\x03l\x05u\xfa\x85\xfd\xe2\x05\x8f\xff\xf8\xfb\x0f\x01\xb0\x02\x02\xfc\xe1\xfe\x08\x02\x8f\xff~\xfeI\x01!\x01\x9e\xfd\'\xfe\r\x03\xa9\x01\xaf\xfc\x82\xff\x10\x04\xa1\x00\xf4\xfc\xf4\x01A\x01\xd3\xfeY\xff \x01\x1b\x00\x9c\xfe\xc1\xff\x00\x01\xd0\xfe\xa8\xfe"\x03\xfd\xfdP\xfd`\x01\xa3\x02\xbe\xfe3\xfd\xda\x011\x01\xda\xfc\xdc\xfe\xaf\x02\xbd\x01\x0c\xfe\xb3\xfdQ\x01n\x03.\xfdc\xfe\xb4\x02\x7f\x00\xa6\xfe\xda\x00\x8f\x02~\xfd\x16\xfe#\x02\x98\x02`\xfdh\xfe\x8c\x02r\x01\x11\xfc\x18\x01%\x03\x80\xfd\xa6\xfeN\x00\xef\x01e\xfe.\x01\x18\x00\xfc\xfde\x00\x8c\x01\xc6\xff\x9c\xfe\xdb\x00\xd1\xff\x89\xff\x01\x01\x0b\x01\x81\xff\xfc\xff\x93\xfe1\x00\'\x01\x03\xffA\x01\x1d\x00\xc2\xfe\x03\xff\x03\x01\x9e\x01-\xfe}\xfe\x90\x00\xe9\xfe\x8e\x00\xee\x01\x8e\xff\xd3\xfd\x89\xffg\x01\xec\xffE\x00\xea\xff(\xff,\xff\xa4\x006\x01\xa9\xff\xb3\xff\xcc\x01\xc8\xfd7\xfe\xed\x02+\x00\xc0\xfe\x7f\x00\xf3\xff@\xff\xc5\x01\xd5\xff\xe0\xff\xba\xff)\xff2\x00>\x00\\\x01c\xff\xfb\xffA\x00Y\xfe\x9e\x00\x8f\x01\xba\xfe\xa7\xfe\xe1\x00\xf7\x00\x9d\xffX\xff\xe7\x00\x8a\xff\xd6\xfe\x12\x00\xaf\x00f\x00g\x00f\xff,\xfe\xdf\x00\x89\x01:\xff\x9f\xfe\xde\xffF\x01A\xff\x02\x00\xdb\x00\x94\xff\xad\xfem\xff\x14\x01\xc8\xff\xc0\xff\xd3\x00\x81\xff\xbd\xfe`\x00-\x01s\xff\x8d\xfe\xe5\xff\x91\x00/\x00\x16\x00C\x00-\x00\xe2\xfen\xff\xb7\x00\xb8\xff\xb4\xff\x82\x00\x98\xff\x08\x00U\x00\xcb\xff\x11\x00I\x00V\xff\xfc\xfe\xc1\xffe\x01{\x00\x95\xff\xe7\xff\xdb\xff1\x00\x0e\x00\xd1\x00\xc3\xffb\xff\xde\x00r\x00B\xff\x80\x00\xd1\x01T\xff\xa0\xfe\xae\x00\xf9\x00p\xff\x03\x00\xba\x00\xd4\xfe\xef\xffz\x01T\xff\xb8\xfe\xfb\x00"\x00\xf1\xfeA\x00\x12\x00\xde\xff+\x00Y\x00i\xff*\xff\x0b\x01\x95\x00\xab\xfe\x91\xff\x0c\x01W\x00\x0e\xfe\x8d\x00T\x021\xff\xfb\xfe\x9b\x00S\x00l\xff\xfe\xff\xd4\x00s\x00\xfc\xff\x01\x00\xbe\xff\xec\xff\x87\xff\xd9\xff<\x00i\xff\x00\x00\x7f\xff\x93\x00\xf2\xff\xad\xfe}\x00Y\x00\xd2\xfe\x96\x00E\x01d\xff\xe1\xff\x0e\x01\x1d\x00\x92\xff\x85\x00\x86\x00\xcd\xff\x98\xff\\\x00\xbb\x00Y\xff\x87\xffB\x01J\x00\x9a\xfe\xbd\xff\x19\x01\n\x00\xc0\xfe\xa5\xff\x96\x01?\x00\xa6\xfeU\x00\x95\x00\x06\xff\xca\xff\xad\x00\x91\xff&\x00a\x00\x00\x00 \x00y\x00\x83\x00\x96\xffb\xff\x89\x00^\x00}\x00W\x00\xe8\xfe\x11\x00\xaf\x00\xb0\xff\xb5\xff%\x00\x01\x00\xcb\xffp\x00#\x00L\xff\x10\x00\x9b\x00\xb9\xffv\xff\xf3\xff\xc1\x00\x07\x00\xeb\xff1\x00\xa1\xff\xd7\xff\xd3\x00\xb6\xffL\xff\xa7\x00\xd1\x00\xdd\xff\xb2\xfe\xd4\x00\xd4\x00x\xfe/\xff\xb5\x00\xd1\x00(\xffQ\xff\xc1\x00\x00\x00n\xfe2\x00\xff\x00~\xff\x84\xfe\xb8\x00\x82\x01\x19\xff\xf0\xfe[\x00\xdd\x00\x86\xff(\xff\xfc\x00\x95\x00\xe6\xfe\n\x00\xfe\x00\x14\x00e\xfe\xfd\xff\xcc\x01\xa4\xff}\xfe\x91\x00A\x01\x80\xff\xba\xfej\x00\xe3\x00\xf3\xfe\xd7\xff_\x00\xc3\xff\xfb\xff\x00\x00\xe7\xff\xc3\xff\xae\xff\xff\xff\x08\x00\xf6\xff\x1f\x00\x89\xff\xf2\xff\x9e\x00\x19\x00A\xff\x84\xff\xbc\x00_\x00\xac\xff\x8d\xffM\x00m\x00\xf8\xff\xab\xff\xca\xffX\x00\xc7\xff\xbe\xff\x99\x00\xce\xff\\\xff\x84\x00_\x00<\xff\xa4\xffX\x00\xb6\xffS\xffA\x00;\x00`\xff\xbf\xffq\x00\xec\xffZ\xff\r\x00o\x00\x86\xff\xb4\xff{\x00a\x00\xc7\xff\xff\xff`\x00\xaa\xff\x08\x00\x8a\x00q\xff\x8c\xff\xa5\x00M\x00P\xff\xb3\xff\x95\x00\xed\xff2\xff\x02\x00\xe1\xff\x91\xff\xbb\xff\x99\x00\xf6\xffA\xffj\x00p\x00^\xff\xa4\xff\xdc\x00s\x00k\xff\xce\xff\xc3\x00o\x00\x7f\xff\r\x00\xaa\x003\x00\xb3\xff8\x00h\x00\xfe\xff\xd8\xff\xf7\xff\xf1\xff\xd4\xff\x13\x00-\x00\xf9\xff|\xff\xd5\xffg\x00\xf1\xff\x9b\xff\xbe\xff2\x00\xfb\xff\xd6\xffT\x00B\x00\xb2\xff\x87\xff\xf1\xffa\x00.\x00\xf5\xff\x02\x00\xfe\xff\x14\x00\x1d\x00\x19\x00\x14\x00\xff\xff\xd5\xff\xc1\xff=\x00G\x00\x16\x00\xc5\xff\x9f\xff\xe1\xff\'\x00\x0f\x00\xc2\xff\xba\xff\xdd\xff\x06\x00\x19\x00\t\x00\xd2\xff\x0b\x00\xec\xff\x01\x00-\x00(\x00N\x00\x0b\x00\x0e\x00\x0c\x00^\x00V\x00\x00\x00\xe0\xff\x16\x00N\x006\x00\x02\x00\xff\xff\x13\x00\xf9\xff\xda\xff#\x00;\x00\xfe\xff\xc2\xff\x06\x00 \x00\t\x00*\x00\xe8\xff\x00\x00=\x00\xf4\xff\xdc\xff>\x00M\x00\xf5\xff\r\x00h\x00R\x00\xcd\xff\'\x00\\\x00\xd5\xff\xed\xffB\x00&\x00\xd6\xff\xfe\xff+\x00\xe0\xff\xb2\xff\x10\x00(\x00\xc7\xff\xeb\xff\x0b\x00\xfc\xff\xef\xff\t\x00\x11\x00\xdd\xff\xd3\xff\x16\x000\x00\xcf\xff\xeb\xff5\x00\xea\xff\xf6\xff\x08\x00\xde\xff\xf4\xff\x17\x00\r\x00\xe0\xff\x06\x00!\x00\xe5\xff\xd8\xff\xff\xff\xd1\xff\xbf\xff\xf1\xff\xff\xff\xee\xff\xe8\xff\n\x00\xe1\xff\xf1\xff\xf7\xff\xf6\xff\xda\xff\x12\x00 \x00\xee\xff\xf6\xff:\x00!\x00\xb7\xff\x12\x00\x0f\x00\xf6\xff\x16\x00\r\x00\x1a\x00\x08\x00\x01\x00\x06\x00\x0e\x00\x17\x00\xdd\xff\xdf\xff\'\x00\x05\x00\xf2\xff\xf2\xff\xf9\xff\xea\xff\xe6\xff\x0e\x00\xe2\xff\xcb\xff\x00\x00\x1e\x00\xdc\xff\xdd\xff\x0b\x00\xfa\xff\xee\xff\xdd\xff\xff\xff\xeb\xff\xe9\xff\x05\x00\xdc\xff\xef\xff\x02\x00\xe5\xff\xdf\xff\xf9\xff\x13\x00\xe7\xff\xe1\xff\xd8\xff\t\x00\x11\x00\xcb\xff\xe7\xff\x04\x00\xde\xff\xc4\xff\xf6\xff\x04\x00\xe0\xff\xdf\xff\x1e\x00\xf2\xff\xcb\xff\x1d\x00A\x00\xba\xff\xbe\xff*\x00\x04\x00\xee\xff\xff\xff\x01\x00\xf5\xff\xf1\xff\xfc\xff\x0e\x00\x1b\x00\xf0\xff\xcf\xff\t\x00(\x00\x1a\x00\xf8\xff\xf8\xff\xfa\xff\x05\x00\x0e\x00\xf3\xff\xe0\xff\x00\x00\xf5\xff\xd8\xff\x00\x00\x16\x00\x03\x00\xe7\xff\xd8\xff\xf6\xff\x03\x00\x08\x00\x0f\x00\x00\x00\xf7\xff\x1e\x00\t\x00\x1b\x00\r\x00\xe9\xff\xf7\xff\x0c\x00\x13\x00\x05\x00\x17\x00\xdb\xff\xc5\xff\xe5\xff\x11\x00\xfc\xff\xbb\xff\xeb\xff\t\x00\xe3\xff\xc2\xff\x1a\x00\x1f\x00\xb3\xff\xb9\xff\x04\x00\x16\x00\xdd\xff\xe9\xff\xef\xff\xe1\xff\xf3\xff\xf8\xff\xf6\xff\xfc\xff\xf6\xff\xf0\xff\xf3\xff\x00\x00 \x00\x11\x00\xf5\xff\xf8\xff\x11\x00\r\x00\x16\x00\x0f\x00\x14\x00\xec\xff\x1e\x00)\x00\xf3\xff\x05\x00\xfd\xff\x00\x00\xed\xff\xfb\xff\x13\x00\t\x00\x03\x00\x08\x00 \x00 \x00\xf2\xff\x03\x00$\x00\xf4\xff\x07\x00!\x00\t\x00\xe6\xff!\x006\x00\xec\xff\xf9\xff#\x00\x17\x00\x01\x00\x12\x003\x00"\x00\xf7\xff\x1a\x00<\x00\'\x00\n\x00\n\x00+\x00+\x00\x14\x00\x18\x00=\x00\x12\x00\xf3\xff-\x004\x00\xfc\xff\x10\x00\x1f\x00\xeb\xff\xf4\xff\x14\x00\x16\x00\xe9\xff\xe7\xff\x00\x00\xe5\xff\xf3\xff\x08\x00\xf0\xff\xdb\xff\x01\x00\x19\x00\xf2\xff\xef\xff\x0f\x00\x08\x00\xed\xff\x12\x00\x0e\x00\xf2\xff\x18\x00"\x00\xf4\xff\xef\xff:\x00\x11\x00\xf5\xff\x0f\x00\x08\x00\x15\x00\x04\x00\x07\x00\x02\x00\x07\x00\x11\x00\xff\xff\xf7\xff\xff\xff\x05\x00\xf9\xff\xe2\xff\xfa\xff+\x00\x1a\x00\xf3\xff\xf3\xff/\x00\x1a\x00\xde\xff\xed\xff0\x00\x10\x00\xf5\xff\xf5\xff\x15\x00\x00\x00\xc5\xff\n\x00\xf6\xff\xe4\xff\xf7\xff\xeb\xff\xed\xff\xf3\xff\xf8\xff\xe4\xff\xec\xff\xf8\xff\xe5\xff\xda\xff\x05\x00\x00\x00\xf3\xff\xea\xff\xf7\xff\xfc\xff\xec\xff\x01\x00\xf6\xff\xe6\xff\xe9\xff\xfe\xff\x10\x00\xfa\xff\xef\xff\xf8\xff\x01\x00\xf0\xff\xe1\xff\xff\xff\xf9\xff\xe6\xff\xf3\xff\xf3\xff\xe2\xff\xe3\xff\xf6\xff\xf5\xff\xe4\xff\xea\xff\xf6\xff\xf0\xff\x00\x00\xf8\xff\xf7\xff\x02\x00\xff\xff\xf9\xff\xff\xff\x10\x00\x00\x00\xee\xff\xea\xff\x14\x00\x0e\x00\xe4\xff\r\x00\x13\x00\xd1\xff\xd9\xff\x15\x00\xf6\xff\xd3\xff\xf0\xff\x01\x00\xee\xff\xea\xff\xf6\xff\xf5\xff\xf1\xff\xde\xff\xe7\xff\n\x00\x00\x00\x02\x00\xf0\xff\xf0\xff\x05\x00\x12\x00\xfe\xff\xe4\xff\xfa\xff\x11\x00\xf8\xff\xe9\xff\xfc\xff\r\x00\xe9\xff\xe4\xff\xec\xff\xea\xff\xe0\xff\xf4\xff\x02\x00\xd1\xff\xcf\xff\x00\x00\xff\xff\xd1\xff\xdf\xff\xf6\xff\xe5\xff\xdd\xff\xf1\xff\x0b\x00\x06\x00\xe1\xff\xe3\xff\xff\xff\x18\x00\x08\x00\xf4\xff\r\x00\x1e\x00\n\x00\xfa\xff\x19\x00\x18\x00\x00\x00\xf9\xff\x0c\x00\x19\x00\x08\x00\x01\x00\x01\x00\xfc\xff\xf5\xff\xfd\xff\t\x00\xfa\xff\xeb\xff\xeb\xff\xf5\xff\xf3\xff\xf0\xff\xf0\xff\xe4\xff\xdc\xff\xf2\xff\xf3\xff\xee\xff\x06\x00\x00\x00\xe1\xff\xf5\xff\x12\x00\xf7\xff\xed\xff\x01\x00\x0b\x00\xef\xff\xf7\xff\x17\x00\x05\x00\xf8\xff\xf9\xff\x10\x00\x0c\x00\xfd\xff\xff\xff\x0f\x00\x0e\x00\xf8\xff\x06\x00\x0e\x00\xff\xff\x0f\x00\x15\x00\x0b\x00\r\x00\x15\x00\n\x00\t\x00\x16\x00\x16\x00\x0e\x00\x06\x00\x13\x00\x18\x00\x11\x00\x0f\x00\x0f\x00\x12\x00\x16\x00\x16\x00\x1a\x00\x1b\x00\r\x00\x1d\x00"\x00\x17\x00\x15\x00$\x00\x17\x00\x05\x00\x1b\x00\x19\x00\x13\x00\x17\x00\x12\x00\x10\x00\x10\x00\x1b\x00\x1d\x00\x02\x00\xfc\xff\x1c\x00\x18\x00\x04\x00\x0c\x00\x0e\x00\xfe\xff\n\x00&\x00\xff\xff\xf3\xff\x1a\x00\x0c\x00\xe5\xff\x07\x00\x16\x00\xe5\xff\xf5\xff\r\x00\xee\xff\xf1\xff\x10\x00\xf9\xff\xf2\xff\t\x00\x11\x00\xf5\xff\xfa\xff\x0b\x00\xf7\xff\xf8\xff\xfc\xff\x03\x00\x05\x00\x03\x00\xfc\xff\x00\x00\x13\x00\x01\x00\xf4\xff\xf7\xff\x03\x00\xfc\xff\xff\xff\xfc\xff\xff\xff\xf0\xff\xff\xff\x11\x00\xf4\xff\xee\xff\x05\x00\xfe\xff\xee\xff\x04\x00\x01\x00\xf0\xff\xf6\xff\x07\x00\xf8\xff\xed\xff\x00\x00\xf7\xff\xed\xff\xfe\xff\xff\xff\xf2\xff\xf9\xff\t\x00\x06\x00\xed\xff\xfd\xff\x06\x00\xf5\xff\xf8\xff\x00\x00\xf7\xff\xf8\xff\xf3\xff\xf3\xff\x00\x00\xf6\xff\xe9\xff\xfe\xff\xfb\xff\xe4\xff\xe9\xff\x00\x00\xf7\xff\xe8\xff\xf0\xff\x04\x00\xf8\xff\xe9\xff\xfd\xff\xf7\xff\xf3\xff\xf2\xff\x04\x00\xfa\xff\xf9\xff\xff\xff\xfb\xff\xf6\xff\x03\x00\x02\x00\xf1\xff\xf9\xff\xfc\xff\xed\xff\xe8\xff\xf4\xff\xe1\xff\xee\xff\xef\xff\xe3\xff\xe7\xff\xef\xff\xe1\xff\xe3\xff\xf5\xff\xea\xff\xe4\xff\xf1\xff\xf3\xff\xe8\xff\xe9\xff\xf2\xff\xee\xff\xf0\xff\xf9\xff\xee\xff\xec\xff\xfe\xff\xf0\xff\xee\xff\x01\x00\xfc\xff\xe8\xff\xf4\xff\x05\x00\xf0\xff\xe6\xff\x04\x00\x05\x00\xf6\xff\xf5\xff\n\x00\xfc\xff\xf1\xff\x04\x00\xfd\xff\xeb\xff\xf9\xff\x02\x00\xf3\xff\xf9\xff\x00\x00\xeb\xff\xed\xff\t\x00\xfc\xff\xe4\xff\xff\xff\x10\x00\xea\xff\xeb\xff\x11\x00\xfc\xff\xe2\xff\x02\x00\x12\x00\xf3\xff\xf7\xff\x0b\x00\x00\x00\xf7\xff\x07\x00\x01\x00\xfe\xff\x05\x00\x04\x00\x00\x00\xfe\xff\x04\x00\x07\x00\xfb\xff\x02\x00\x08\x00\x00\x00\x01\x00\x01\x00\x04\x00\xfd\xff\xfc\xff\x04\x00\xff\xff\xfb\xff\x04\x00\x04\x00\xfc\xff\xfc\xff\x07\x00\xf9\xff\xfc\xff\x11\x00\x01\x00\x00\x00\r\x00\x0c\x00\xfc\xff\xfa\xff\x07\x00\x06\x00\xfd\xff\x00\x00\x04\x00\x04\x00\xf7\xff\x02\x00\x05\x00\xf5\xff\xf7\xff\x07\x00\x04\x00\xf6\xff\x00\x00\x10\x00\x0c\x00\x00\x00\n\x00\x11\x00\x00\x00\xff\xff\x1a\x00\x0e\x00\x04\x00\x1a\x00\x0c\x00\x08\x00\x12\x00\x10\x00\t\x00\x0b\x00\x0e\x00\x0f\x00\n\x00\x0c\x00\x0f\x00\x0c\x00\x0c\x00\x13\x00\n\x00\t\x00\x17\x00\x12\x00\x0c\x00\x13\x00\x15\x00\x08\x00\r\x00\x17\x00\x05\x00\x04\x00\x14\x00\x07\x00\xff\xff\x10\x00\x11\x00\x02\x00\r\x00\x1b\x00\x01\x00\xfd\xff\x17\x00\x13\x00\xfe\xff\x07\x00\x11\x00\x04\x00\x01\x00\x0b\x00\x06\x00\xfa\xff\x04\x00\x05\x00\xfe\xff\xf8\xff\x05\x00\x0b\x00\x02\x00\xf8\xff\xfc\xff\x13\x00\x02\x00\xf7\xff\x02\x00\x03\x00\xf7\xff\xff\xff\x00\x00\xf6\xff\xf7\xff\x02\x00\xff\xff\xf6\xff\xfb\xff\xfe\xff\xf7\xff\xf4\xff\x00\x00\xfb\xff\xf2\xff\x00\x00\x03\x00\xfc\xff\xff\xff\x03\x00\xfc\xff\xf9\xff\x04\x00\x00\x00\xfc\xff\x01\x00\x06\x00\x05\x00\xfc\xff\x00\x00\x05\x00\xfd\xff\xff\xff\x08\x00\xfd\xff\xf9\xff\x00\x00\x03\x00\xf7\xff\xf0\xff\xfb\xff\xfd\xff\xf4\xff\xf0\xff\xf2\xff\xf4\xff\xf1\xff\xf0\xff\xf7\xff\xf2\xff\xf1\xff\xfb\xff\xfd\xff\xf4\xff\xf9\xff\x00\x00\x01\x00\xf7\xff\xfc\xff\x07\x00\xf7\xff\xf0\xff\x05\x00\x03\x00\xef\xff\xfc\xff\xff\xff\xef\xff\xf2\xff\xfa\xff\xf1\xff\xf0\xff\xf6\xff\xfb\xff\xf5\xff\xf2\xff\xf7\xff\xf6\xff\xf3\xff\xf6\xff\xf6\xff\xf7\xff\xf9\xff\xf5\xff\xed\xff\xf3\xff\xf3\xff\xf6\xff\xf4\xff\xf4\xff\xf5\xff\xf2\xff\xf3\xff\xf9\xff\xf7\xff\xef\xff\xf6\xff\xfa\xff\xee\xff\xef\xff\xfb\xff\xf9\xff\xf2\xff\xfa\xff\xfd\xff\xf5\xff\xf3\xff\xf7\xff\xf8\xff\xef\xff\xf8\xff\x00\x00\xf9\xff\xf2\xff\xf7\xff\xfd\xff\xf5\xff\xf8\xff\xfc\xff\xfa\xff\xf4\xff\xfd\xff\x01\x00\xf1\xff\xf5\xff\x01\x00\xfb\xff\xf2\xff\xfd\xff\xff\xff\xf3\xff\xee\xff\xfe\xff\xfd\xff\xf1\xff\xfd\xff\xff\xff\xf6\xff\xf8\xff\x00\x00\xf5\xff\xf4\xff\x00\x00\x04\x00\xf9\xff\xfb\xff\x07\x00\x00\x00\xf9\xff\x02\x00\n\x00\xfc\xff\xff\xff\x0b\x00\t\x00\xfc\xff\x07\x00\x0f\x00\xfc\xff\xf9\xff\x08\x00\t\x00\xfe\xff\xff\xff\x02\x00\x01\x00\xfd\xff\x07\x00\x02\x00\xfb\xff\x05\x00\n\x00\x01\x00\xfd\xff\x03\x00\x0b\x00\t\x00\x04\x00\n\x00\n\x00\x03\x00\x02\x00\x03\x00\x02\x00\x02\x00\x03\x00\x00\x00\xff\xff\x00\x00\x00\x00\xfd\xff\xfb\xff\xff\xff\xff\xff\xfc\xff\xfa\xff\xff\xff\x06\x00\x03\x00\x00\x00\x00\x00\xfc\xff\xfa\xff\xfb\xff\xfd\xff\xf5\xff\xf8\xff\x01\x00\xff\xff\xff\xff\x00\x00\x03\x00\x02\x00\x00\x00\x02\x00\x06\x00\x04\x00\x06\x00\n\x00\x0b\x00\x0b\x00\r\x00\x10\x00\x0c\x00\x13\x00\x17\x00\x14\x00\x0e\x00\x17\x00\x13\x00\n\x00\x11\x00\x0e\x00\x06\x00\x06\x00\x11\x00\x0c\x00\x0b\x00\x13\x00\x12\x00\x08\x00\n\x00\x12\x00\x0c\x00\x06\x00\x05\x00\t\x00\x07\x00\n\x00\x06\x00\x00\x00\t\x00\x05\x00\x00\x00\x00\x00\xfe\xff\xfa\xff\xfe\xff\x02\x00\x00\x00\x00\x00\x02\x00\x03\x00\x01\x00\xff\xff\xfd\xff\xfb\xff\xf8\xff\xfd\xff\xfb\xff\xf9\xff\xfc\xff\xfc\xff\xf4\xff\xf1\xff\xfb\xff\xfd\xff\xf7\xff\xfc\xff\x00\x00\xf7\xff\xfb\xff\xff\xff\xfb\xff\xf9\xff\xff\xff\xfe\xff\xfb\xff\xfd\xff\xfe\xff\xf9\xff\xf9\xff\xfc\xff\xff\xff\xfc\xff\xfa\xff\xfc\xff\xfe\xff\xfd\xff\xfa\xff\xfa\xff\xfb\xff\xfb\xff\xfb\xff\xfd\xff\xfd\xff\xf9\xff\xf8\xff\xfb\xff\xfa\xff\xfc\xff\xff\xff\x01\x00\xfc\xff\xf7\xff\x05\x00\x01\x00\xfd\xff\x03\x00\x03\x00\xfd\xff\xfd\xff\x00\x00\xfb\xff\xf8\xff\xfd\xff\xfe\xff\xfc\xff\xfd\xff\xfb\xff\xfa\xff\xf5\xff\xf2\xff\xf5\xff\xf9\xff\xf4\xff\xf5\xff\xf6\xff\xf3\xff\xfa\xff\xf3\xff\xf5\xff\xf7\xff\xf3\xff\xf2\xff\xf5\xff\xf2\xff\xef\xff\xf1\xff\xf4\xff\xf5\xff\xf1\xff\xf6\xff\xf6\xff\xf0\xff\xf4\xff\xf7\xff\xf5\xff\xf3\xff\xfa\xff\xfb\xff\xf4\xff\xf7\xff\xf8\xff\xf8\xff\xf7\xff\xf8\xff\xfd\xff\xf7\xff\xf5\xff\xf5\xff\xf5\xff\xf6\xff\xf2\xff\xf3\xff\xf5\xff\xf7\xff\xf9\xff\xf6\xff\xf6\xff\xfa\xff\xfd\xff\xf9\xff\xfa\xff\xfe\xff\xfe\xff\xfc\xff\xff\xff\xfe\xff\xfa\xff\xfc\xff\xfb\xff\xfb\xff\xf9\xff\xfc\xff\xfa\xff\xf6\xff\xfc\xff\xfb\xff\xfd\xff\x00\x00\xfe\xff\xfa\xff\xf9\xff\xfc\xff\xfd\xff\xfe\xff\xfa\xff\xfb\xff\xfe\xff\xfc\xff\xf9\xff\xfb\xff\xfa\xff\xfe\xff\xfe\xff\x00\x00\xff\xff\xfe\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x03\x00\x01\x00\x02\x00\x04\x00\x04\x00\x04\x00\x02\x00\x01\x00\x00\x00\x01\x00\x03\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x02\x00\x02\x00\x02\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x04\x00\x04\x00\x03\x00\x06\x00\x05\x00\x06\x00\x04\x00\x07\x00\x07\x00\x03\x00\x04\x00\x06\x00\x06\x00\x04\x00\x06\x00\x03\x00\x00\x00\x01\x00\x04\x00\x04\x00\x04\x00\x08\x00\x07\x00\x06\x00\x07\x00\x04\x00\x03\x00\x06\x00\x06\x00\x04\x00\x04\x00\x06\x00\x08\x00\t\x00\x0b\x00\n\x00\x0c\x00\r\x00\x0c\x00\x0b\x00\x0b\x00\x0c\x00\x07\x00\x05\x00\x07\x00\x06\x00\x03\x00\x04\x00\x04\x00\x02\x00\x03\x00\x03\x00\x06\x00\x03\x00\x00\x00\x01\x00\x07\x00\x07\x00\x05\x00\x06\x00\x06\x00\x06\x00\x06\x00\x05\x00\x04\x00\x01\x00\x03\x00\x04\x00\x02\x00\x00\x00\x00\x00\x02\x00\x01\x00\xfe\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfd\xff\xfc\xff\xfb\xff\xfe\xff\xff\xff\xfd\xff\xfc\xff\xfd\xff\xfd\xff\xff\xff\xfe\xff\x01\x00\x00\x00\xff\xff\xfd\xff\xfa\xff\xfb\xff\xfb\xff\xfe\xff\xfd\xff\xfc\xff\xfd\xff\xfb\xff\xfa\xff\xfb\xff\xfa\xff\xf9\xff\xfa\xff\xfa\xff\xfc\xff\xff\xff\x00\x00\xff\xff\xfc\xff\xfd\xff\xfd\xff\xfa\xff\xf5\xff\xf3\xff\xf8\xff\xf7\xff\xf6\xff\xf6\xff\xf5\xff\xf4\xff\xf6\xff\xf6\xff\xf4\xff\xf4\xff\xf7\xff\xf8\xff\xf7\xff\xfa\xff\xf6\xff\xf3\xff\xf3\xff\xf5\xff\xf6\xff\xf7\xff\xf8\xff\xf7\xff\xf9\xff\xf8\xff\xf5\xff\xf6\xff\xf9\xff\xf9\xff\xf9\xff\xfa\xff\xf8\xff\xf9\xff\xfb\xff\xff\xff\xfe\xff\xfc\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xfb\xff\xf8\xff\xfa\xff\xfb\xff\xfa\xff\xf6\xff\xf4\xff\xf3\xff\xf6\xff\xf6\xff\xf8\xff\xf8\xff\xf5\xff\xf7\xff\xfb\xff\xf9\xff\xf8\xff\xfa\xff\xfd\xff\xfe\xff\xf9\xff\xfc\xff\xfa\xff\xf9\xff\xf8\xff\xf5\xff\xf4\xff\xf4\xff\xf1\xff\xf2\xff\xf4\xff\xf4\xff\xf5\xff\xf3\xff\xf5\xff\xf3\xff\xf6\xff\xf8\xff\xf8\xff\xfa\xff\xf9\xff\xfb\xff\xfc\xff\xfe\xff\xfc\xff\xfb\xff\x00\x00\x00\x00\xfb\xff\xfd\xff\xfe\xff\xf9\xff\xfe\xff\xfc\xff\xfa\xff\xfe\xff\xfd\xff\xff\xff\x00\x00\x00\x00\xfe\xff\xff\xff\x01\x00\x02\x00\x00\x00\x01\x00\x02\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x00\x01\x00\x03\x00\x05\x00\x05\x00\x04\x00\x02\x00\x03\x00\x02\x00\x03\x00\x02\x00\x01\x00\x03\x00\x03\x00\x02\x00\x03\x00\x03\x00\x03\x00\x02\x00\x03\x00\x03\x00\x02\x00\x04\x00\x07\x00\x03\x00\x04\x00\x06\x00\x06\x00\x07\x00\x03\x00\x03\x00\x02\x00\x04\x00\x07\x00\x05\x00\x02\x00\x03\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfc\xff\x00\x00\xff\xff\x00\x00\x01\x00\x02\x00\x02\x00\x02\x00\x05\x00\x03\x00\x05\x00\x04\x00\x06\x00\x08\x00\x08\x00\t\x00\x08\x00\x06\x00\x05\x00\x05\x00\x03\x00\x05\x00\x07\x00\x08\x00\x06\x00\t\x00\x07\x00\x05\x00\x06\x00\x04\x00\x04\x00\x04\x00\x02\x00\x02\x00\x00\x00\xff\xff\x01\x00\x01\x00\x04\x00\x05\x00\x04\x00\x03\x00\x05\x00\x05\x00\x05\x00\x06\x00\x06\x00\x06\x00\x05\x00\x04\x00\x04\x00\x07\x00\x05\x00\x04\x00\x05\x00\x04\x00\x02\x00\x03\x00\x05\x00\x05\x00\x03\x00\x03\x00\x02\x00\x00\x00\xfe\xff\xf8\xff\xfa\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\x00\x00\xfe\xff\x00\x00\x00\x00\xfe\xff\xff\xff\x00\x00\x02\x00\x03\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x03\x00\x05\x00\x05\x00\x04\x00\x02\x00\x01\x00\x00\x00\xfb\xff\xfd\xff\xfd\xff\xfa\xff\xfd\xff\xff\xff\xfd\xff\xf9\xff\xfc\xff\xfd\xff\xfb\xff\xfb\xff\xf9\xff\xf9\xff\xfb\xff\xfa\xff\xf8\xff\xf8\xff\xf9\xff\xf9\xff\xfa\xff\xf7\xff\xf8\xff\xf9\xff\xf7\xff\xf8\xff\xf7\xff\xf7\xff\xf8\xff\xfa\xff\xfa\xff\xf8\xff\xf6\xff\xf6\xff\xf5\xff\xf8\xff\xf7\xff\xf4\xff\xf7\xff\xf5\xff\xf8\xff\xfb\xff\xf9\xff\xfb\xff\xfc\xff\xfa\xff\xf7\xff\xf9\xff\xfa\xff\xfb\xff\xfc\xff\xfc\xff\xf9\xff\xf7\xff\xf6\xff\xf6\xff\xf3\xff\xf0\xff\xf1\xff\xf1\xff\xf1\xff\xf3\xff\xef\xff\xee\xff\xf1\xff\xf3\xff\xf3\xff\xf1\xff\xf2\xff\xf3\xff\xf3\xff\xf4\xff\xf6\xff\xf4\xff\xf7\xff\xf5\xff\xf6\xff\xf6\xff\xf7\xff\xf7\xff\xf8\xff\xf6\xff\xf6\xff\xf6\xff\xf5\xff\xf3\xff\xf8\xff\xfc\xff\xfc\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc9\x00\xb5\x00\x9d\x00/\x00\x16\x01F\x01\xcb\x00#\x01\xc3\x00p\x00\xe7\x00\x9f\x01\x87\x01\t\x01\xd3\x00\xa0\x01`\x00\xc2\xfe9\x00\xae\x01\x9f\x00\xc5\xff\xfe\xff\x85\xff\xea\xfe\xf3\x00;\x00o\xfdQ\xfd\xd3\x00U\x01\xdb\xfeh\x00:\x02\x9d\xffb\xfe\t\x00<\x00\xbb\xffk\x02\r\x01\xd9\xfe5\x00\x7f\x01\x9f\x00\xa5\xfe`\xfe\xcc\xfd\x91\xfe\x9e\xff6\x00\x83\xfed\xfe\x13\xfe\x9c\xfd\xa4\xfe}\xff\xf6\xfe\xb4\xfd\xe9\xff\xff\x00\xd6\xff\xc9\x00L\x01#\x00\xf0\xff\xa4\x00\x16\x02\xfd\x01N\x02\xba\x02\xf1\x01\x9c\x01\xa7\x01,\x02\x9c\x01\x86\x01w\x01\x9f\x01\xca\x01\\\x01\xe4\x00\x9f\x00\xcc\xffh\xff\xa8\xff\xc0\xff\xc3\xffE\xff\xae\xfeE\xfe$\xfe \xfeT\xfe|\xfe!\xfe\x99\xfe\x83\xfe\x16\xff\xbe\xfe\xaf\xfd+\xff\xa9\xff\r\xff\x81\xff\xde\xff\xf9\xfe4\xff\x06\x00a\xffF\xff\x85\xff@\x00J\x00u\x00\xae\xff\xb0\xfe\x9e\xff\x9d\xffP\xff\x82\x00\x1c\x00\xbc\x00 \x01P\xff\x88\xfe\xeb\xfd\xc4\xff\xe3\xffB\x00\xd7\x01\x92\xffr\xfe\x08\x00$\xff&\xfe"\xffe\xffT\xfe\xc5\x00\xbd\x02\x13\x00\xb1\xff7\xff\xcf\xfe\x17\xff\xd9\xff~\x020\x026\xffS\x01\xd0\x01\xaf\xffE\xff\xa9\x00\x8d\x02\xaf\x00#\x00\xe3\xffa\x02n\x02\xdc\x01\xe4\x01\xbf\xff\xc1\xfe\xcc\xfew\xffM\x00#\x02\x82\x02.\xff}\xfc\x13\xff\x03\x01\xbc\xfe\x0b\xfcq\xfb\xc3\xff\xa8\x02\xb2\xfd\xa9\x00\xc1\x00\xd8\xfb\x0e\xfc\xad\xff\x0e\xff\x9b\xfb\xcb\x00\xcc\x01\x82\x00u\x00\xc2\x00F\xff9\xfb\xa2\xfd\xe0\x01\xca\x01\xb8\x00\x8f\x02\xb7\x04\xa7\xff+\xfe\xe0\x01\xfc\x00\xbf\xfc\x80\x00R\x04G\x03|\x01\xa6\x01u\x02\x0f\xfe\xc6\xfe\x1f\x01\x08\xfe\xe4\x00>\x04\x18\x03\x93\xff\xc2\xfeA\x01R\xfd\xdd\xfd=\x003\xff!\x00k\x026\x00\xc1\xff\xe3\x01\x1e\xfco\xfc\xe2\x00"\x01u\x02s\x03*\x01\x17\x00u\xfc\xe9\xfb\xbe\x02\xbb\x02\\\xfe\xfd\x00l\x029\xfe\xd9\xfcl\x00x\xffs\xfe\xb8\x02\xa2\xff\x93\xfd\xb9\x04\xaa\x03\x9c\xfcc\xfe_\xfe2\xfe\x93\x02\xc2\x01\xdf\x01\x0f\xfek\xfb\xf5\xfe\x13\x01\xc3\x00\xf9\xfb\xff\xfe\x8e\x00\x06\xfe\x12\x03\x8a\x02s\x00\xcb\x01\xc8\x00a\xfe>\xff\x86\x02=\x08\xee\x04r\xfd\x9f\xfb\xc0\xfb\xe4\xfd\xde\x01\x16\x05\xa9\x00\x92\xfc\xc4\xfa;\xfc\xda\x01\t\x01`\x01\x16\xfd<\xfd\xbc\x024\x01\xf5\xfe\x00\x02\xad\xfe\x84\xf8C\xfd\x80\x03;\x03%\x02\xbd\x03\x06\xfch\xf9\x00\x03U\x05\xe5\x00\x1a\x02P\x03E\x01C\x02\x1b\xff\x8f\xff\x8b\xfd\x08\xfd\xe1\xff/\xfcg\xffA\x03\xf2\xff\x9a\xfa\xc2\xfa\xee\xfa\x1f\xfbR\x02\xb3\x06D\xfe\x11\xfc\xec\x00\x03\x01H\x04R\x01i\xfd\x8c\xfdl\x03\xdd\x03/\x04!\xff\x9a\xfe\x83\x03\xd3\x01\xce\x04@\x01\x15\xfa\xae\xfb\xef\x01\x12\x03\xc2\x02\x9f\x02\xb0\x02\xe7\xfbg\xfb\x15\xff\x1e\xffu\xff`\x005\x01_\xfec\x01\x92\x03X\x04\xc9\xfd\x8b\xfdN\xff\x04\xfc\x05\x02\xfe\x04\xf6\xff\x7f\x00B\xfe\x05\xfe\xa1\xff\x0f\xfa\xb3\xfd&\xff6\xfc\x02\x02Y\x04J\x02\x9c\xfe\x1c\xfdf\xff~\x01\xbf\x00\xff\xff\x98\x00\xc6\x03d\x06\xce\x00\x89\xff\x1d\xfe$\xfc\xa0\x01\n\x07\xc2\x01\xf5\xf8\xf7\xfd\x03\xfe{\xfd\xaa\x02\xc2\x01\x84\xf8\xaa\xf4\xca\xfb\xba\xfc\xac\xfe\xca\x02\x00\xff\x9b\xfb\x9e\xfd\xcc\x00\x92\x01\xaa\xfd\x14\x00\xf8\x04\xb1\x03\xe7\x02_\x05\xe2\x007\xfb\x10\x04l\x03?\xfd\xb1\xfe\xd6\x03C\x05\xb0\x02\xc3\xfc\xeb\xfaf\xfe(\xfeU\x02\x86\x03\x86\xfd\xc6\xf7V\xfcV\xfe`\x03\x11\n\x1b\xfc\xdd\xf28\xf9\xe2\x01$\x02\xe4\x05\xc5\x01$\xfaA\xfa4\x06\xba\x06)\xfe\xf4\xfb8\xfd\x1b\x04\xda\x04^\x05\xd3\x03I\x03t\x03`\x03M\x04Y\r\x00\x03\xa8\xf4\xdd\xfft\x00:\xfd\xdf\x00\xa1\x01`\x05f\xfc@\xf3{\xf0\xc2\xf3\x9f\x00\x99\x03\x15\x05\xfd\xfb{\xf4\xfe\xf9\xbf\x03\xc7\n-\x07\xe8\x01#\xf8\xe9\xff\x88\x0e\xfa\x053\x03\n\x03\x93\x05\x7f\x03\x8b\xfc\x03\x02\x89\x01m\xfdF\x00-\xfd\xed\xfbX\xfd\xe2\xff\xad\x05f\x008\xf2\xa5\xed\x15\xfa#\x08\'\tm\xfd\xd4\xf9\xbd\xfe1\xff\x7f\x046\x05\xf3\xf9\x15\xf7]\x01\xe0\x0bT\n\xc4\x03\x9d\x011\xfe\x89\xfa\x96\x02u\n\xea\x01\xe1\xfac\xfa"\x05\x90\x04\xa9\x00K\xfe\x98\xf4\xa6\xf4\xb2\xfa\xde\x01f\x02\xed\xff)\xff\x06\xfe4\xff:\xff(\xff^\xfe@\x01x\x06\t\xffB\x01R\x04\xb8\x03\xde\x03J\xfc\xd4\xfb\xf2\x00\x05\x06\xc8\x02d\x06\x10\x03\x9d\xfa,\x00\x89\x06M\x00\xbd\xf6\x1f\xfd\xdc\x02G\x02\xc5\xfbI\xf6\\\xfd*\x03\x95\xfe^\xfa\x0b\xf9)\xfcl\x07\x82\x08v\x03\x87\xfeq\xff\x88\xff\xf3\xfdh\t\x87\x0bz\x00d\xfd\xe4\xff\x16\x02\xfe\x03M\x03\x7f\x01\xff\xfa\x85\xf4\xdf\xfc\x93\x05\xd8\x00"\xfb\x10\xfb\xa4\xfd\xda\xfd\xd4\xfeF\xfa\x00\xf8M\xff=\x04\xda\x05\x8c\x01\xe8\xfd\xed\xfb\xf9\x01^\x05{\x04S\x00\xfc\xfb\x15\x02\x9e\x03\xde\x01g\x03A\xfe\x9c\xfb\xb4\x00,\x00\xa6\xffp\xfe\xc7\xfb\xe7\xfd\xd3\xfe\xdc\x00\xba\x00[\xfd\xde\xfeG\xffF\xfc\xe8\x00\xb8\x02\xa0\x01L\x02n\xff\xf9\xfd\xd4\x00#\x02\xdf\x05\x80\x01\x8e\xfd\x17\x00Q\xff\xc1\x03\xe7\x03\xa5\xfe\x04\xfdo\xfec\xfc\x1a\x00R\x00\xab\xfe\x9b\xfc\xb1\xf8\xe5\xfdh\x01K\x02\x92\xfe\xb5\xfa\xff\xfce\xfe\x8c\x02n\x0by\x04\xd7\xfc0\x01\xa3\xff,\x003\t\xa6\x05\x14\xffU\x00J\xfd\n\x00y\x04\x04\xfe\xc5\xfb,\xfa+\xfe\\\x03\xe8\x00\xf1\xfbh\xfa5\xfdb\xfdU\x00d\x01\x1d\x00\x91\xfa\x8b\xfcG\x02\x16\x02\xf1\xffT\xfc*\x02\xec\x03m\x00\xb9\xfe\xe8\xfd"\x01\xc5\x08\xdc\x05\xdf\xf8\x11\xfb\xfb\x03s\x05\xcb\x04\xe0\xfdy\xf8\xd3\xfa5\x07\x86\x08m\x00H\xfc\xa6\xf9\xbb\xffB\x06\x00\x04d\xfe=\xfe\xc5\xfb\xb0\xfe&\x06\x81\x04\x94\xfe\xbb\xfb"\xfc\xdc\xff\xdf\x00\xba\xff\x82\x00\x0b\xfd\xdf\xfc\xc2\xfe\x91\x00\x9e\x01\x99\xff\x89\xfe\x90\xfcL\xffh\x03\xad\x03\x9f\x02b\xfd\xc4\xfdi\x02\\\x02\xd3\x00O\x01\xce\xfe\xb3\xfc\xd5\x01c\x02a\xffh\xfe\x86\xfc\xa5\xfd+\xffC\x05e\x02\x85\xfa,\xfbE\xff\xb2\x01%\x04E\x03\xb1\xfd\xe8\xfb\xf2\xff\xf5\x04*\x03\xb0\xfe\x94\xf9\xab\xfe\x86\x05\x9d\x01\xec\xff\xea\xfc\xe4\xfdN\x03r\x02\x8b\x00\x0b\xfe8\x00\x98\x02\x92\x01\xf3\xffX\xfdn\xfff\x01\x90\x01)\xfc\xd9\xfc\x91\x02\x0e\xff\xdc\xfc4\xff@\x00\x06\xff~\xfe\xdd\xff\xf3\xff1\x03$\x02\xcc\xfd\xe5\xfcJ\x03\xcd\x04\xac\xff\xcc\xfd\xde\xffh\x02D\x00\xb2\x03\xdb\x029\xfc\x18\xfbV\x02i\x05\xe8\xfdQ\xfc\xa7\xfeM\xff\xe4\xff\x91\xff\x18\xffr\xfer\xfe\xc6\x00_\x00\xc9\xff&\x01\x1c\x01>\x00t\xff\xc2\xfeP\x02s\x02@\x01\xf0\xff\x01\xfe\xa6\x01\x04\x03\x07\x01\xf2\xfc\x8a\xfcG\x00\x1c\x04\x1f\x02\xda\xfdg\xfct\xfeF\x04\xde\x03\\\xfbo\xf9\xab\xfe\x1b\x01\xfa\x03\xf1\x01\xd1\xfdq\xfb\xe8\xfc\xd1\x01\r\x03\xa1\xffz\xff\x1b\x00H\xff\x8c\x03\x83\x05\x87\x01\n\x00\xe2\xfe\xea\xfcB\x03O\x05R\x01;\xfe\x9f\xfc\x88\xfeC\x01f\xfe\xbe\xfd\x8f\xfd\xe1\xfc \xffO\x00\x93\xff\xf3\xfd2\xfe\xd2\xfe\xc6\x01\x99\xff\x81\xfe!\x00\xf3\x02\x12\x02\x01\x017\x01\x19\xfeV\x01\x9c\x02R\x02\x06\x01\xaf\xfd\xa4\xfe\xe7\x00f\x02\xf2\xfe\x11\xfd\x1c\xfck\xfd\xcb\x04\xac\xfe^\xfa\xa7\xfdk\xfc\x85\x02B\x05\x83\xfe\x98\xfbw\xfd\x03\x01w\x05\xbb\x05Z\x00\xef\xfc\x9e\xfdM\x01\x17\x07k\x03\x12\xfc\x9a\xf96\x00\x15\x07\xff\x05\x86\xfe\xa7\xf7%\xfa|\xff+\x05t\x05u\xfd\x9e\xf7\xb4\xf9f\x03\xe0\x07D\xfe<\xf8\x1a\xf9\x16\x01\x99\x04k\x02\xf8\xfd\x80\xf8\xd5\x00\xb2\x03n\x01\x1a\x00\xe7\xff\xf4\xff\x87\x03\x11\x03\xcc\xff\x86\x02\x1e\x031\x03\xdd\x00&\x00Y\x01\xb2\x02y\x01$\xfc.\xfe\'\x01Y\x00_\xfe\x16\xfd\x9f\xfe\x04\xfd%\xfb}\xfb\xd8\xff\x99\xff5\xfb"\xfd\x96\xfd\xd7\xfd\xd6\xff\xc8\x03\xdb\xfd\xc6\xfa\xb1\x03\xdb\x03\x01\x08\x17\x05\x06\xfc\x98\x02\xf1\x03\xd4\x05a\x08<\x027\xffx\xffS\x02A\x03\x80\x00 \xfa\xf9\xf8\x05\xfc\xeb\x00\x81\xff\xe6\xfa%\xf9\xbd\xf8\xe3\xfb\xd5\xfe\x0f\x00\x82\xfc\xbf\xfeH\x01s\x03\x0f\x05\x1b\x03\xd1\x00\x10\x03q\x02\xe4\x03\xba\x08\x9c\x02\xa8\xff\xbb\xff?\x01\xa6\x05\x05\x05;\xfa\xb7\xf9\xd3\x00\x94\x00\x9f\xff\x97\x00\xe8\xfbT\xf9\x87\xff\x89\x00\x87\xff\x1e\xfce\xfc\x86\xfeT\xfe\x9b\x01\x15\x02o\xff\x11\xfc3\xfc*\x03G\x06P\x04#\x01\x93\xff\xb0\xff\t\x04s\tt\x06D\xfb\xcf\xfb\xf5\x05\x89\x04%\x02q\xfft\xfa\xa2\xfa\xd5\x00\x9d\xffa\xfdu\xfa\xc6\xfa6\xff0\xfe\xb8\x00\xdf\xfd\x1f\xf99\xfd\xf6\x06\x13\x06\xfa\xfc\x91\xfcm\xfe\xb7\x01\x1b\x08G\x03\xcb\xfa\x1d\xfe\xcb\x02\xef\x05\xa7\x03\xef\xfde\xfde\xfe\xda\x00\xd3\x02s\x02\xf5\x00\xba\xfc\x13\xfe\xc5\x00\xe5\xff\xf1\xffd\xfe\\\xfc\xab\xfe-\x00#\x01J\x01\xba\xfc\xcc\xfc>\xff\x1c\x01H\x00\xba\xfe\x87\xfe\xa8\x01x\x01\x88\x00\xfd\x04\xac\x01\x1b\xfd/\x00\x8b\x02t\x02\xdf\x01\xbc\x00\xc6\x01\x15\x02Y\x01\xcb\xff\\\x00\x05\xff\x91\xfc\xeb\xfck\x02\xdf\x03\x85\x01\xe5\xfd\xf3\xfb\x96\xfd\xe4\xff(\x03\x02\xff\x7f\xfc\xdb\xff\xcf\x03\xe6\x01\xef\xff\xe8\xfd\xa0\xfb\x1d\xfe\xb1\xfd&\x03\x8e\x02\xf8\xfd\x95\xfb\xed\xfb\x90\x02x\x03\xfe\xfc\x87\xf7v\xfc\xe8\x02\xae\x05`\x00\xa8\xfaC\xfd\xc7\xfe\xd6\xff\x12\xfd\xca\xfd\xcd\xfe\x7f\xff\xc5\x00\x01\x00\xe3\x00\xb2\xfe\x9b\xfbA\xfb\xfc\x02\x19\x04\xe1\x01\xe5\x01`\xffK\x03*\x04\x92\xff\x1d\xfd\xce\xff\xa8\x01\x18\x02t\x02/\xfft\xff\xe4\x009\xfe\\\xffY\xfeH\xfe\xc7\x00\xe7\x00L\x00\xd9\xfe\xc6\x00\xe5\xfe{\x00\xd3\xff\xa8\xfd5\x01\xdc\x00\xe8\x00\x98\x02\x93\xff\xea\xfc\xd3\x02\xc2\x02\xc6\x00\xea\xff\xdb\xff\x13\x01\x00\x01\x15\x03y\x01T\xffc\x00e\x04\xbb\x03\xd7\x00\x88\xffB\xfe\xfb\x00\x98\x02\x18\x04\x10\x02z\xfe\xb3\x00\xa9\x01W\xffe\xfe\xf4\xff\x9c\xfe\xa2\xfe\xb9\x01\xa4\x00\x1b\x01\x1d\x00\x02\xfd\x92\xfb\xb8\xfb\xb5\xff\x19\x01\xc2\xffh\x00\x92\xff\x89\xff\xe1\xfe.\xfd\x93\xfbj\xfbu\xfe\xc4\x00b\x02\x80\x02.\xff\x82\xfcd\xfbP\xfb\xe4\xfc\xf8\xfd\xe3\x01x\x01\xfb\xfe\x1f\x00K\xff\xa9\xfc\x81\xfai\xfa\x10\xfc\x0c\x01!\x02\xb2\x01\xd0\xfe \xfb]\xfb\x8f\xfc\xf4\xfdG\xfc8\xfa\xe3\xfb\x0e\x01\\\x02\xbe\xfe\xaa\xfa{\xf6\xbc\xf8{\xfc\r\xffB\xfe.\xfb^\xfbx\xfc"\xfe\x08\xfd\x0f\xfbz\xf9\x1a\xfbz\xff\x1f\x01\xa2\x027\x016\xfd\xda\xfd\xe2\x02P\x04\x00\x02F\x04\x9d\x06\xca\x05\x08\t&\x08\xe3\x06\x1e\x06\x14\x05t\t\xfd\x0b\x82\t\x12\t\xd6\x07c\x06e\nz\x0by\x08G\x07\xfa\n\xae\x0c\xa4\x0cM\x0e\xa9\x10\xd1\x11"\x11&\x12\xdc\x14L\x17\x95\x15\x8d\x15\x85\x14\x05\x15\n\x17\r\x14"\x10\x12\r\x16\n.\x07\xf7\x04\x95\x02\x1c\xfe\xe7\xf9\xfe\xf6\n\xf4\x82\xf0\xf0\xec\xed\xe8B\xe7Y\xe7v\xe6\x07\xe5 \xe5\xa1\xe4\xb2\xe3\xa1\xe3\x08\xe5\xa6\xe6\xbb\xe8y\xea3\xecq\xef\\\xf0\xbb\xef.\xf1\xf1\xf2r\xf5\xbf\xf6U\xf6\xa5\xf8\xe7\xf9\xc9\xf7*\xf6l\xf7\xe3\xf6\x92\xf4_\xf4\xd6\xf4b\xf4\xe8\xf2}\xf2\xa3\xefQ\xee\x96\xee\xb1\xf2\xe5\xf3<\xf0\xff\xee\xa5\xf0\x11\xf7\x1c\xfd\xe5\xfb\x1f\xf9\xb3\xfc\xc1\x01\xcf\x05C\x08\xda\x08o\t\x18\n\xa8\x0e+\x12\x8d\x12;\x11\xe5\x0e\xe3\x10\x8a\x14\xd4\x16\x93\x15\xb6\x11\x88\x0f\xaa\x12B\x1e\xdc&\xac$C\x1dF\x1c\xc5!\x00)z/\xa21\x920H.n0\x894\x891\x16&\xe6\x1c\xcf\x1d~"\xf7!X\x19\xb4\x0b\x99\xfeK\xf6q\xf3\xee\xf2\x81\xed\n\xe3\xe5\xdae\xd9?\xd8\xb4\xd2O\xcd\x0c\xca\x1c\xcaY\xceA\xd4\xf8\xd7\xa4\xd7\t\xd7\x8a\xda\xd7\xe1;\xea\x1e\xf15\xf3\xf0\xf6U\xff\xa5\x04\xff\x06\x93\tr\n\x02\r=\x10\xbc\x13\x80\x15_\x11\x88\n\xa2\x07\x8c\x08\n\x08/\x03y\xfc\x08\xf8\xf6\xf4\x8d\xf1\xb3\xee\xc7\xea\x11\xe6%\xe2\xe5\xe1\xe8\xe4#\xe5\x05\xe2\x90\xe0\xf4\xe3r\xe9\x94\xeb\xbf\xec\xc1\xefC\xf2/\xf5\x93\xfa4\xff!\x01Z\xff\xd3\xff\xf4\x04w\x08\x1b\t8\x07g\x05\x97\x06\x14\x07\x81\x07\xa6\x07\xfa\x041\x03\x9d\x01\x84\x01\x10\x03\xfe\x00\x10\x01\x90\xffk\xff\t\xff\xee\xffv\x05\xf1\x03\x00\x02<\x01,\x022\t\x9e\x12\xc2\x1dz\x1f\x89\x18\x07\x19\xd4#O-\xd91\xf84\xf57f:2:H:\xfa7O0\x18*\x97*\xca-\xa1,\xf7 \x16\x11\t\x07\xe5\x00s\xfeg\xfc\xdc\xf5\xd1\xecP\xe43\xdd\xe6\xda#\xd9;\xd4\xce\xd0\xb6\xd1\xd6\xd5\xce\xd6\xcb\xd4"\xd4n\xd5\\\xda\xd8\xe1\xde\xe8\x90\xecE\xef\'\xf1\x19\xf3y\xfa\xae\x00%\x03\'\x07\x11\x0b\x03\r5\x0b=\n&\x0b\x1c\x0c\xd5\x0bv\x0c\x1b\x0b\x97\x05\xa7\xfe\x80\xfa\x9d\xf9R\xf8K\xf5\x13\xf29\xef\xfd\xea\xae\xe6\xf6\xe4\x01\xe6o\xe6;\xe6\xe6\xe6\xd6\xe8`\xe93\xe8\xb3\xe8T\xec\xf2\xf0m\xf3V\xf5\xc6\xf6\xb3\xf7(\xf8\xbb\xf9N\xfb\xe9\xfd\xfe\xffJ\x03\xbf\x04T\x04\xd3\x00\xf8\xfe\x97\x03d\x06\xd0\x08l\t\xbb\x07,\x07\xd8\x02\xfe\x02*\x08\xfa\t\xd5\x08J\x081\x0c\t\rh\x0b=\x0c\xfd\x13\x04\x1f\xda"\xd0"\xa9!\n$\xf6)\xa71 8\x0f<\x8c:b4\xfa1>2C1\xb9-X\'S$* \x1b\x17\xce\r\x12\x03\xaa\xf9\x8d\xf4\xaa\xf1s\xef\x12\xe8\xcb\xdc\x9e\xd3\xf2\xcf\x8a\xd1\xcb\xd2\xc0\xd3\xe1\xd15\xcfr\xcf\xc0\xd1\x81\xd6~\xdb\xe2\xe1\xda\xe6\xc1\xe9t\xee\x93\xf3\xf4\xf4\xc5\xf8\x91\x00i\x07\xe5\x0br\x0c5\x0bj\n?\tf\x0c8\x11|\x11g\r\x03\x07^\x02\xc8\x00\x91\xfee\xfd\xdb\xfb\xfe\xf6#\xf2\'\xef\x86\xec\xb9\xe9\xaa\xe6\xfd\xe6\xa5\xea\x0c\xeb\xa8\xe8\xc1\xe5s\xe6c\xe9L\xed\x02\xf1\xc4\xf1\x13\xf2<\xf2\xe8\xf3\xf5\xf6K\xf9\xad\xf9Q\xfbY\xfd\x9f\xfd\xef\xfdG\xff\xbb\x00\n\x01X\x01.\x03\xcc\x03T\x04q\x04\xd7\x03\x06\x04\x93\x03\\\x07)\x08\x9f\x03\x9e\x01\xa1\x02\xad\x06.\x0b\xe2\rM\x0fe\x0b\xb3\t\xbc\x12\xc5\x1d\xb8$\'$|#\x97\'D,\xc90\xaf5\xa07\x9a6\x884\xb13\xe33A/\xc6)i&\x15#\x91\x1e\x90\x16\x9f\x0cy\x04\xb9\xfe\x8c\xfa\xb2\xf6\xbf\xef\xa9\xe5\x85\xde\xfe\xda\xea\xd9\xc5\xd8y\xd5w\xd2F\xcf)\xcf\xce\xd2U\xd6\x1a\xd8\x8d\xd9\x16\xdc\xa8\xdf\x1b\xe4\xb4\xe8\xef\xecg\xf2\x8b\xf7\xfe\xfa\xdf\xfd\x00\x00\x16\x03\x93\x07y\n\xbb\x0c6\r\x12\x0c\xc3\ng\tu\t\x14\n\xaa\x08\x1c\x05\x07\x01\xb6\xfc\xc1\xf9\xa8\xf9n\xf9\x93\xf6s\xf2\x80\xee|\xee\xc7\xee\xeb\xedP\xee\xd2\xed-\xed<\xed\xeb\xed\x83\xf0\x85\xf1T\xf1w\xf2y\xf3f\xf4\x94\xf5\xdc\xf6$\xf8\xfa\xf9J\xfa\'\xfaI\xfbw\xfd\xb9\xfe\x9c\xff\x17\x01Q\x02\x99\x01\xe1\x01X\x04?\x06\x02\x07\x9a\x05\xbd\x07D\t\x89\t\xfb\x0b|\r\xaf\r\xdc\x0el\x15\x9c\x1c/\x1d\x9a\x1a\xa2\x1c\x8a#\xec((-\xc40=0\xa1.\xbe,\xf7.\xf41k0\xc0,)\'\x02#K\x1e\x8c\x18\x8f\x14v\x0f\xb3\x08Y\x01\xe1\xfa\xff\xf4\x8e\xef\xc9\xea\xce\xe6\xbf\xe2\xc4\xde}\xdb\xd4\xd8&\xd7\x99\xd6\x9f\xd8V\xda\x1d\xdb\xea\xdb5\xdd\xc1\xe0@\xe5\x9d\xe9\xa7\xec\x90\xef[\xf3|\xf6\xbb\xf9P\xfdb\x00\xd8\x02x\x03\x7f\x04\x93\x06\x90\x07\xfe\x06%\x06\xbe\x059\x05\xa5\x03\xc8\x015\x00_\xfd\x85\xfa\xca\xf9(\xf9\x87\xf6\xa2\xf2\x00\xf0\xb5\xf0\x8a\xf1u\xf0M\xef\xbd\xed\xd7\xec>\xee)\xf1a\xf3\x98\xf1\x17\xf0\xe0\xf1_\xf4\x94\xf5\xdd\xf5=\xf7\x92\xf7}\xf7n\xf8\xcd\xf9i\xf9B\xfb\xfd\xfe\x1b\xff\xbb\xfc\xfb\xfa=\xfe\x14\x03\xfd\x04\xf2\x02\x14\x00\xde\xff\xeb\x02\x98\x07q\n~\tT\x06t\x05[\t \x11\xd9\x17\xfd\x19}\x18\x8c\x15\xfb\x17\xab!@-L3\xdf,>(\xe0)20[6-7?3\x0b,h&\xa6$\xd0%M#M\x1b\x1a\x12[\x0c\x9f\x06\xa8\xff\\\xfa\x0f\xf7\xf1\xf1d\xea\xf6\xe3\x1b\xe0\x87\xdc\x92\xd9\xb3\xd9\x19\xda\xe1\xd7-\xd3\x1f\xd2\n\xd6\xe6\xdas\xde\xa0\xdfQ\xdf+\xe0\xf9\xe3A\xeb\xb7\xf1\xb6\xf3\x06\xf4\xf2\xf5\xa5\xf8\xe4\xfc2\x02\x8a\x05m\x04\x94\x02v\x04\x8d\x07\xea\x07\xd3\x06c\x06\x8c\x05\xd7\x03\x0e\x027\x01}\xff\xc0\xfc\x93\xfb\xb5\xfb\xa1\xf9b\xf5S\xf3\x17\xf4\xe3\xf4\xb2\xf3\xc2\xf1\x88\xf1w\xf0\xf2\xef\x11\xf2k\xf3\xb3\xf2B\xf1\xf9\xf1l\xf4W\xf5\xde\xf4\xef\xf5&\xf7\xee\xf8\x98\xfa\xbc\xfah\xfbF\xfc0\xfe\xbb\x00\xb6\x01\xd2\x01q\x01\xe8\x01R\x05\'\x07X\x07\xc6\x06<\x05\x88\x06\xb9\x0b\x80\x127\x13T\x0e\x02\r^\x15\xcc\x1f\xb3#\x02"p G#1)\x910\x9b3\x050\x0b+\xfc*\xa0/O1\x91-\x9b%\xd6\x1f\xb9\x1cL\x1a\xce\x16\xaf\x10\xac\x08\xa0\x01\x10\xfd\x0c\xf9\x00\xf4\xa2\xedr\xe95\xe6\xe7\xe2\xd1\xdfL\xdd\xc6\xda3\xd9\xa9\xda\x12\xddw\xdcF\xda\xa1\xdbj\xdf\xb2\xe3\xaf\xe6\xa3\xe8\xfc\xe8\xfb\xea\xe8\xef\x8f\xf5\xed\xf8^\xf9\x0b\xfam\xfc\x90\xff\x0e\x03\xc2\x04\xe3\x03\xdd\x02\t\x04\xcb\x052\x05T\x03V\x02\xdb\x01\x08\x00\x83\xfe#\xfe\x85\xfcG\xf9{\xf7\x1c\xf8\x9c\xf7\xa6\xf4W\xf28\xf2\x99\xf2M\xf2\xae\xf1\x9e\xf1\x86\xf0\xe5\xef\x8b\xf1|\xf3&\xf3&\xf2\x9e\xf3\x9e\xf5"\xf6\xff\xf6\xac\xf8\x84\xfa\xbe\xfa\x1b\xfc\x1b\xff\x1c\x00d\x00\xc0\x026\x05b\x05z\x05\x88\x06\x95\x08S\nS\x0c\x10\rB\x0b\xac\nX\rP\x13\x9c\x18\x83\x19\xe5\x16\xeb\x15\x1c\x1a\xa6"\x1a)b*\xe3\']%y&@+\x120d0s*\xca#\x8c \xc7 ^ \x08\x1c\xbf\x15\xeb\r\x16\x07\x1a\x03\x9d\x00d\xfcD\xf5\x80\xee-\xeb\xbc\xe7\xde\xe3\xcc\xe0\xbd\xdep\xdd\x18\xdbo\xda;\xdbA\xdb]\xdb\x1e\xdd\xc4\xe0\x83\xe2t\xe2l\xe5\x9f\xe90\xed{\xef\xfc\xf1f\xf4g\xf6\xee\xf8\xcc\xfc\xa9\xffs\x00h\x00H\x01\x90\x02\n\x04/\x05 \x05A\x03\xde\x00\xb5\x00\xbe\x017\x01\x9a\xfe\x0e\xfc\x9e\xfa\xca\xf9\x16\xf9F\xf8\xa5\xf6\x18\xf4r\xf3~\xf4\xf6\xf3\x8d\xf1\xad\xf0\xe7\xf19\xf2f\xf1\x9a\xf1$\xf2\xd7\xf1\xc9\xf1\x84\xf3.\xf5\xb0\xf5<\xf6I\xf7\xf0\xf7\x8d\xf8\x1a\xfb4\xff\x12\x00\x0c\xfe\xe0\xfdt\x01\xfc\x05\xaa\x078\x07\xc5\x05\xb8\x04\xbe\x08\xae\x11\x9e\x15\x9d\x12-\x0e\x90\x11\x99\x1bp!\xc0%_&|$[$=*P3\x856\x9c2\x1c.=-3.Z/M.\xc0)\xaf!\x13\x1a\x16\x17\xc1\x14\xca\x0f\x02\x08\x9c\xff_\xf9\xd4\xf3\xac\xefY\xebZ\xe6#\xe1\xe7\xdc\xc2\xda\x10\xd9\x0e\xd8\xbf\xd6k\xd6\x88\xd7\xd2\xd8\xbb\xd9}\xdb\x81\xde\xf1\xe1\xcf\xe4\xc5\xe7~\xea\x86\xedz\xf0>\xf4!\xf8\xd8\xfa\x06\xfc?\xfd\xe9\xfe\xb6\x01\xee\x04\xb0\x06\x7f\x05,\x03M\x03\xb2\x05g\x07\xbf\x06\x05\x04\xe0\x00(\xffW\x00\xf4\x01\x15\x00\xfc\xfb\xda\xf8\x07\xf9\x07\xf9x\xf8\xf8\xf7L\xf6\xb8\xf3l\xf26\xf4\xb1\xf4\x7f\xf2\x17\xf1\x9f\xf2\x10\xf3\x1d\xf2\xc8\xf1\x08\xf3?\xf3\xac\xf3&\xf6}\xf7\x1e\xf6:\xf5o\xf8M\xfcS\xfe\x12\xff\x8c\xfe\\\xfef\xfe\xa0\x02\xb5\t\xea\x0b\xc9\x087\x05@\x05\x0c\x0c`\x17U\x1c\x1d\x18\xab\x10*\x13\x96\x1f\x03*5-\x0b)3&\xd4&\xcf-X5\x837\xc61{*y)\x14+W+\x1a\'\xa9 \x96\x19h\x13H\x0f9\x0b\xcb\x05g\xff\x12\xf97\xf3\xe7\xed\xc8\xe9\x92\xe5k\xe2d\xe0\x9f\xddu\xdaV\xd7\x93\xd7\x95\xd9\xc7\xdb\'\xdc8\xdbM\xdb \xdeE\xe3I\xe8\xf5\xea\x1d\xeb\xbb\xebd\xef\xa8\xf5F\xfby\xfc\xb2\xfb\xff\xfb\xe8\xff\xa3\x04\x9d\x06\xd2\x05\x94\x04{\x04;\x05F\x06\x87\x06\t\x05\xfe\x01\x17\x00:\x00e\xff\x94\xfc2\xfa\xf6\xf8\xc2\xf7!\xf6J\xf4\x9c\xf3C\xf2\x81\xf0\n\xf1\xa7\xf1\xf2\xef\xac\xedl\xee,\xf1\x10\xf1\x98\xef\xaf\xef$\xf1\xbc\xf1\x80\xf3\x10\xf6\xfa\xf5\x15\xf5\xb3\xf7\xb0\xfb\xca\xfd\x8a\xfd5\xfe\xb0\x00\xa0\x01F\x04\x1b\x06\x0c\x07\x9a\x08)\x08D\x06\xe5\x05\xac\x0bb\x144\x15B\x0e\xc2\tr\x0e\xa3\x1b\x15&\x8f&\xb5\x1e\x83\x19\xd8 \xbb/\x8f8;5]+]&-+c3\xaa5\xd3.0#X\x1b\xef\x1a\x8a\x1c>\x19\x96\x0f+\x06\x19\xff&\xfa6\xf7\xea\xf3\xcd\xee\x89\xe7R\xe2,\xe0\xed\xdd\xd8\xdb\x98\xda\xdc\xda<\xda$\xd8*\xd8B\xda\xe2\xddg\xe0\xb9\xe2\xc3\xe3\x19\xe4\xce\xe6\t\xed)\xf3\xf6\xf4\xe1\xf3[\xf4\x11\xf8<\xfe\xad\x02.\x03\x0f\x00=\xfe\x0f\x01T\x06V\x086\x05\x84\x00T\xfe\x1e\x00\x90\x02\x0c\x02\xe1\xfd\'\xf9\xc0\xf7\x1c\xf9\xaa\xf9\\\xf7\x9f\xf4\xef\xf2\xbe\xf1\xa6\xf1V\xf21\xf2\x15\xf0\xb7\xee\x08\xf0\xc3\xf0O\xf0\xf8\xef"\xf17\xf2\xf8\xf2\x1b\xf4\xa7\xf5\xae\xf6\xcb\xf7\xe1\xfa\x0f\xfd\xfa\xfe,\x01\xf4\x02\xb6\x04\x1f\x06\t\x08:\n\xfd\x0c\xe5\x0e\x7f\x0f\x99\x0f!\x10\x95\x11\xb6\x14<\x1a6\x1e\x85\x1d+\x19\xbe\x1a\x03#h+f.\x8d*(\'\x99&\x91+\xaa2\x064\xe1,\x7f#\x98\x1f\xb0"=$\x1f \x01\x17N\x0c\xea\x05F\x03\xdd\x03:\x00P\xf7\x83\xed\xc4\xe7\x85\xe6\x1a\xe6\x10\xe5l\xe1\x87\xdc2\xd8\xd5\xd8(\xdc\xcd\xdd"\xdd\x06\xdd-\xde\x10\xdf\xf5\xe0\x0e\xe6N\xeaE\xec\xce\xec9\xee\xf5\xf0\x1b\xf5\x8e\xfa:\xfeC\xfe\xff\xfc8\xfe_\x02D\x06<\x08\xde\x06\xd4\x03,\x02\x1a\x04\xb0\x06\xf8\x05|\x02 \xff\xa4\xfd\x96\xfcN\xfc6\xfcx\xfa\xb2\xf6\xd5\xf3k\xf4\xe9\xf4\xa5\xf3+\xf2\xb9\xf1\xc2\xf0C\xefN\xefd\xf1t\xf1-\xf0\x82\xf0o\xf1n\xf1\xf1\xf1\xc8\xf4\x16\xf7\xa0\xf6#\xf7\xa1\xf9\x1e\xfc\xde\xfcQ\xff\xe7\x02\x9e\x03h\x03\x93\x05l\t\xce\x0b\xe8\x0b\x18\x0cf\r\x81\x10:\x16\xac\x17s\x15X\x15\x84\x19\x98 |$\xd4%\x81#\x7f"L&\x7f-M1d-\x9b(\x84&Z(u*\x99(U#\xe9\x1aA\x15\x00\x14\xa2\x12\x07\x0e\xde\x05v\xfe\xda\xf7!\xf4\x1f\xf3\x9c\xf0\x10\xead\xe2\xc5\xdfP\xe0Y\xe0\x84\xdey\xdc\x9c\xda\x97\xda\xba\xdc\xff\xdf\xaf\xe1\xad\xe1t\xe2\x8b\xe5\xff\xe9[\xed\xb7\xee\x90\xf0q\xf3\xfe\xf6\xd8\xf9\x9f\xfb\x8e\xfca\xfe\x16\x01\xac\x02\xd1\x01\x14\x01\x02\x02\xe1\x02}\x02m\x01p\xff\x9d\xfc]\xfbs\xfck\xfc\xee\xf8\x03\xf5\xf1\xf3k\xf4:\xf4\x90\xf3~\xf2U\xf0j\xeeB\xef\xd4\xf1\xff\xf1A\xf0`\xefS\xf0\xf9\xf0<\xf2\x9b\xf4\x9b\xf5\x82\xf4.\xf5\xeb\xf7\xec\xfa\xf1\xfb\x14\xfd}\xff\xcb\xff\xfc\x00\xcf\x03\xc0\x06\xd1\x08\xc0\x06\x82\x06$\t\xc5\x0c@\x11\xa1\x0f=\x0c\xd8\x0b\xd0\x10\x93\x19\xd4\x1d\x11\x1b\xfc\x16v\x18}!\xe1+6.\xb3*\x07&\x88\'Y.75M5\xad-\xfd%\xfc#\xbb&$\'\xa1!\xf3\x17\x1b\x10\xa9\ne\x07\xcb\x04\xc6\xff\x99\xf7.\xef\x8a\xea\xa6\xe8\xe3\xe5\x83\xe23\xdf$\xdc\x16\xdae\xd8/\xd9\xb9\xda&\xdcc\xdc\xaa\xdc\x0f\xde\r\xe08\xe3;\xe8\xfa\xecV\xed\xcc\xecG\xef\xa3\xf4\x88\xfaI\xfd\xad\xfd\xc3\xfb\x83\xfc\xd2\x00[\x051\x06\xbc\x03#\x01g\x00(\x02F\x04^\x03[\xff\x18\xfc\xf6\xfb\x94\xfcu\xfb\x01\xf9\'\xf7\xfc\xf5\x01\xf5R\xf4\xfb\xf3\xa2\xf2\xe3\xf0\xec\xf0\xf2\xf1i\xf1\xb4\xef\x87\xef\x0f\xf1\xa0\xf1\xd7\xf1p\xf2w\xf3\xe6\xf3c\xf5\xfe\xf7\xa4\xf9N\xfa\x1f\xfc\x16\xff`\x01\x1b\x03\x12\x04\xc4\x05f\x07d\n\xd4\r\x99\x0e)\x0e\xcd\x0c3\x0f\x15\x16B\x1b\x95\x1b\x84\x177\x16\xc3\x1b\x8b$\xa3*\xc2)\xaf$K"|\'\x9a/~3\xeb.\xaf\'V#\xfc$\xb5(\xf7\'5!\x0b\x17\x1a\x10\xf7\r<\x0eP\x0b\xb6\x03\x06\xfa\xd4\xf2\x0e\xf05\xef\xbf\xedG\xe9\x17\xe3\xd0\xdd\xa2\xdc\x83\xde\x89\xdf\x8a\xde\n\xddC\xdc\xa5\xdc\x84\xdet\xe2\x88\xe5*\xe7\xe6\xe7@\xe9\x90\xeb\x15\xef\xce\xf3\xf8\xf7\x8e\xf9<\xf9\x89\xf9\x8f\xfc\x06\x01\xbf\x04\xdf\x045\x02\x10\x00E\x01\x1d\x04/\x05"\x03r\xffh\xfc\xcd\xfaq\xfbo\xfc\x98\xfa\x06\xf65\xf2\x93\xf1[\xf2\x7f\xf2P\xf1z\xeff\xed\xa4\xec\x83\xed\x1f\xef\x85\xef\x07\xef\xaf\xeeF\xefh\xf0g\xf2\x9a\xf4H\xf5o\xf5Y\xf7\xb2\xf9\xb8\xfb\xea\xfc\xc2\xfe)\x01\xce\x01\x94\x03a\x07:\tR\t\xb0\t\xe9\n\x8d\rb\x11\xbc\x16?\x17v\x13\x92\x13\xa1\x19y"\xb5&\xb6%\x86"\xe4!\xbf&h/\x8b3\xa9/\xf6(\xdc%H)\xc6,\xc9+\x83%\xb5\x1c\xec\x16\xe6\x14\xc1\x14\x10\x11g\t\xe6\x00\xe7\xf9r\xf6\xdf\xf4W\xf2\\\xec\xde\xe5$\xe2\xa9\xe0\xde\xdf\xf3\xdeS\xdeT\xdcp\xdb\xe8\xdb\x0e\xdev\xe0\x13\xe2\xd1\xe3\x9c\xe5M\xe8@\xeb\xaf\xedB\xf1\xbb\xf4Y\xf7\x83\xf8\x0c\xfa\x92\xfc\\\xffD\x01i\x02\x00\x02+\x01\x13\x01\r\x02\x89\x02N\x01\xbc\xfe\x7f\xfc\xad\xfbE\xfb8\xfa0\xf8\xfe\xf5\n\xf4J\xf3E\xf3\xdc\xf2\xc4\xf1\x0f\xf0\x92\xef3\xf0X\xf1B\xf12\xf0\xdd\xef\xff\xf0\x8d\xf2\xb8\xf3/\xf4.\xf43\xf4\xa8\xf5!\xf8\xbe\xfaU\xfbd\xfbF\xfc\xae\xfd\x19\x00\x03\x03\xa7\x05\xf7\x06\xe3\x05\x90\x04W\x07\xfb\x0c6\x126\x13*\x10F\x0e<\x11Q\x19\xfb!\x1e$\x96 \x89\x1c\x88 \xb2)\xd30\xb82\x80-\xf9)o)9.;2\x1f0\x88)B"g\x1f\xbe\x1e\x82\x1c\x80\x176\x11.\nr\x03\'\xff\xfe\xfb\xe0\xf7\xe4\xf1j\xec\xb0\xe8\xa5\xe4\x86\xe1\x8b\xdf\xa7\xde\xd2\xdd`\xdb\'\xdac\xda\x01\xdc\x0e\xde\xfd\xdf\xb5\xe1\x8f\xe2\xc1\xe3\xcf\xe7\x8f\xec0\xf0s\xf1\x95\xf2\x0f\xf5\xaa\xf8a\xfcD\xff\xce\xff~\xff\xd2\xff\xb5\x01\x9e\x03=\x04\xf7\x02\xdc\x00\xb0\xffM\xff\xe7\xfe\x97\xfd\xa0\xfb|\xf9[\xf7\xf1\xf5\xd0\xf4|\xf3)\xf28\xf1\xfc\xef/\xefw\xee"\xee\xc4\xee\xed\xee\x01\xef\xd1\xee\x1f\xef\x0e\xf1\xae\xf2\xb5\xf3R\xf4\x9f\xf5\xc6\xf6\xc1\xf8Z\xfb\xe7\xfd\x7f\xfe \xffL\x01U\x04\xa2\x06\xd1\x06\xc9\x08\x15\x0b\xdf\r\x1f\x0fp\x0f\xdf\x11c\x13\xe0\x18\xf1\x1d\x14\x1e\x1a\x1c\xc5\x1c\xcd#w+\x9a-\\+\xd1(O(=,\xd11\x1d3a-\xb1$\x18";$\n%\x84 1\x18\xb3\x10E\x0b\xf8\x07)\x06_\x02\xf9\xfa\xb1\xf2\xa5\xed\xa2\xeb\x81\xe9r\xe6\xf0\xe2\xd3\xdf\xc9\xdc\xf3\xda\xad\xdb~\xdd\x01\xde\xd0\xdc\xaa\xdd\x91\xdf\xcb\xe0\x06\xe3\xfd\xe7<\xec\xcf\xec\x90\xec~\xef\xbe\xf3\x0f\xf8\xae\xfbh\xfd\x8f\xfc\xf1\xfb\xd1\xfeQ\x03\xf8\x04\xa4\x03\xca\x00\xc4\xfe\x1a\xffF\x01\xc4\x01e\xfe\xe6\xf9@\xf7N\xf7f\xf7\\\xf6@\xf40\xf1\x9b\xee\xc4\xed\xaa\xee\x11\xef1\xee\xaf\xec$\xec\x11\xec4\xed\xa8\xee\x1f\xf0\xc3\xf0\x0c\xf1#\xf2\x92\xf3\x0b\xf5\xef\xf6\x0c\xf9V\xfa\xab\xfb\\\xfd\xfa\xfd\xf4\xfe\xe9\xff\xb2\x02\x99\x07\xc7\x089\x07c\x06.\x08\xdd\x0fk\x16J\x18z\x17\xc6\x152\x1b\xdd#\xa3+\xeb.\xf9+++e/\xc96\xef:\xad9K5q2\x012\xab2.1\t,o$.\x1eo\x1a\x7f\x16:\x10y\x08\x12\x02\xe6\xfb#\xf6\xae\xf0n\xec\x9f\xe6\x11\xe1r\xde\xfa\xdc\xfc\xd9\x8f\xd5\xd2\xd4Z\xd6_\xd7\x9d\xd6\x18\xd7\x0b\xd8I\xda\xc3\xdd\xac\xe2d\xe6\xc4\xe6\x86\xe8\xcd\xec\xe8\xf2\x0c\xf8|\xf9v\xfav\xfb\xa1\xff\x0c\x04x\x05O\x04\xca\x03\x98\x04\xc0\x04r\x04\n\x04\x17\x02\x98\xfe\xdd\xfc\xd9\xfc\x1f\xfb]\xf7\x92\xf4L\xf3$\xf2\xa6\xf0o\xefx\xed\xde\xeb\xbc\xeb\xb5\xec{\xed]\xecE\xec\xcc\xedk\xef\x88\xf0{\xf2c\xf4\x18\xf6\x8c\xf7W\xf9\xc5\xfc$\xfe\x08\xffF\x02n\x043\x06j\x06#\x072\t\x9d\x0b\xa0\x0e\xd2\x0f\xa2\x0e\x17\x0eS\x10\xf9\x15p\x1c[\x1e)\x1c\xff\x19\x00\x1e\xa5\'p.\x93/\x8e-y*h,\x801\xf66[7+0\x1b+\xa4(k)\x9c\'B#\x19\x1d]\x15#\x0f\x07\n\x83\x06\x83\x001\xf9\xd4\xf2N\xee\x03\xe9[\xe3\x12\xdf(\xdcf\xda\x1e\xd7q\xd5\n\xd5\xd7\xd3&\xd3\xf7\xd4C\xd9G\xdb\x08\xdb1\xde\xab\xe2R\xe6\xa3\xe9\x06\xee\x13\xf2\xef\xf4\xf6\xf6\xb2\xfa\xa5\xfe\x80\x01F\x03O\x04j\x04\xb8\x04\xc3\x05\xaf\x06C\x06\x07\x04g\x01\x9f\xff\xc2\xfe\x83\xfd`\xfb\x8b\xf8\xd4\xf5}\xf3\x0b\xf2w\xf1Y\xefh\xec\xa2\xeb\x1a\xecA\xeb\xff\xe9\x97\xe9\xdb\xe9A\xea\x7f\xeb\x10\xed\x15\xee:\xee4\xef\xb5\xf1\xf6\xf4N\xf7\x8f\xf8\xce\xf9\xc2\xfb_\xfd\x9f\x00\x83\x03n\x06\xe8\x08\xee\x08\xd1\x0b\x82\r2\x0fm\x13\xcf\x17-\x1d\n \xdb\x1e\x07\x1f!"/+\xa03\xd73\x0b2\x16/\x96/;4\xbf:?;\xcc4r-m)\x82*\x8f)\xcc%\x86\x1eD\x16\r\x0fV\t\xe3\x05_\x01z\xfa\x89\xf2\x9f\xec\x8c\xe8\x08\xe3\x8d\xde\xab\xdb\xd7\xd9\xce\xd8L\xd5w\xd3\x08\xd36\xd4\xc1\xd6\xa3\xd9]\xdc\xa1\xdd\xe4\xdd \xe1\x8e\xe8\xed\xed%\xf1_\xf3\xa5\xf4\x9b\xf7p\xfb\xb9\x00\x82\x03\x18\x04\xc8\x03\xc4\x03\xf7\x03z\x05\xa2\x06\x1c\x05}\x02\xf7\xff]\xfe\x15\xfd]\xfbm\xf9\n\xf7\x13\xf4\xb4\xf1\xf8\xefr\xeei\xed\xff\xeb\xb9\xeaF\xea\x8d\xe9\x91\xe8B\xe8;\xe9\xfe\xea\xe1\xeb\x0c\xec\x97\xec\x02\xee*\xf0C\xf3\x82\xf5A\xf7\x16\xf8t\xf9\x99\xfc\x90\x00Q\x03\xf9\x03x\x03\xcf\x042\t\xfa\r\\\x12J\x10\x9c\r\xeb\r\x1b\x15/ \x06&\\$\x93\x1e\x10\x1fY&\xaf1\x959\x039\xe53\x08/\xba1}9_<\xc78\xf81\x12.\x8e+](\x91$\x8b\x1eh\x18\x9f\x12\xe6\r\xec\x07\xe0\xfe:\xf6\x90\xf0\x1c\xef\xec\xec\x9e\xe6\xbb\xde\xb8\xd7J\xd4d\xd6(\xd9<\xd9\x90\xd5\x96\xd2\xa0\xd3\x1f\xd7\xd3\xdc\xc6\xe1\xa0\xe2E\xe4\xb0\xe7i\xebT\xf0\xed\xf3c\xf5\xee\xf9\x97\xfe\x87\x00\xc3\x00\x1a\x01\xf3\x01o\x03\x8e\x05\xff\x07\xf0\x06>\x01s\xfd\xb0\xfd;\xff\x84\xfe}\xfb]\xf8\xc8\xf3}\xef\xfb\xee%\xef\x87\xee\xe9\xeb,\xe94\xe8\xed\xe6\xf6\xe5\xf7\xe5\xc3\xe7\xc5\xe8\xc0\xe9\x86\xea\xe7\xea\x97\xeb\xc5\xecz\xf0\xc7\xf5\\\xf83\xf9\x96\xfan\xfc;\xff;\x02\xc6\x04\xa5\x08\xe4\t\x8a\x0b\x01\x10\xfb\x0e\xf8\rz\x0e\x80\x11\x05\x1a\xe0\x1f \x1f\xf6\x1a2\x19(\x1e\x85\'\x81.\xc7/\x11-\xc3)G-w2\xe16\x985j/ .\xc0-\x91.Q+\x8b$\x9d\x1e\x11\x1a\xca\x18\r\x16]\x0fJ\x05\x9a\xfd\xc5\xf9~\xf7x\xf4\x08\xef{\xe8F\xe0\xb7\xdd\xce\xde\x0c\xdd\x1a\xd9\xa9\xd6\x15\xd8\xd1\xd9\xd9\xd9\xd1\xdb\xce\xda\xcf\xdb\x8b\xe0:\xe52\xecf\xed\xe7\xebg\xee,\xf4\xb8\xf9\xc0\xfbp\xfd\x81\xfe\xa9\xff\x02\x01,\x03\x97\x03\x83\x01\x1e\xff\x9b\xfe\xf8\xff\x05\x00\xf0\xfb\xc5\xf6u\xf4\xfa\xf3J\xf3\x00\xf15\xee\xe3\xea\xb8\xe8\xc8\xe8\xe4\xe8\xe8\xe7\xb9\xe61\xe6(\xe6\x17\xe8\x1d\xea4\xea\x9e\xea\x14\xec\x0f\xef\xe1\xf1L\xf4\x87\xf7H\xf7\x99\xf8%\xfeY\x01\xf5\x03\xdb\x06\x9a\x07\xcc\t{\n\x04\r\x1e\x11G\x11\xc4\x12\xc6\x13\xf5\x15\xb0\x16U\x14\x99\x13\x89\x18W \xe8"\xeb!\xbf\x1e\x00\x1f\xd8!\x06&\xad+\xeb.\x94-\xba)\x98)3*\xea)\xbb\'\x7f&\xd5&}$\xc6\x1f\xb8\x1a%\x15\x19\x0f\xcc\x0be\n\x8d\x08\xd6\x02J\xfa\r\xf4\xd9\xef8\xeb\x81\xe8\xb3\xe7*\xe5\xdf\xdf\\\xdcS\xdbQ\xdb\xa8\xd8%\xd8N\xdd8\xe0\xa9\xdf\xe7\xe0\xb2\xe1\xea\xe2\x83\xe7\xff\xec\xc8\xf2O\xf5 \xf5\xa5\xf5\xdc\xf8\xf4\xfc\xfe\xfe[\x01Y\x03\xf1\x02\x8b\x00G\x01\xf8\x01\x0b\x00\xef\xfc\xe1\xfc\x86\xfdz\xfbi\xf7{\xf3\x98\xf2\x85\xf0\xe8\xed\xbe\xeez\xef\xf6\xea\xb9\xe9\xaa\xea\x94\xe8R\xe8\xee\xec\xc8\xeb\xda\xe9a\xf0m\xf3`\xf2\xa4\xf1>\xf4t\xf87\xfc\x17\xff\x9c\x04\xd8\x04c\x04X\x07\xe1\t\x13\x0c\x14\x10u\x12\xff\x0f\xd4\x11D\x14\xad\x11\xcc\x10D\x11\xcb\x11\\\x12[\x14\xc7\x15\x81\x11\xb2\x10\xcf\x10-\x12\xc3\x17\xd1\x1b\x84\x1c\x8e\x19\x9e\x1b\xb0\x1b\x8c\x1d@#\x1b$I#\n"_"\xca!\x80\x1f\xd0\x1d\x8f\x1c\r\x1a\x08\x18E\x15\xc2\x11B\x0c\xf3\x05\x13\x02\xba\x00\x93\xfdH\xf9\xeb\xf4B\xee\xf4\xea\x86\xe7\xc8\xe5C\xe4\x06\xe2\xe0\xe0D\xe0(\xdf\x89\xdf\xea\xde\xe8\xde"\xe3\xe6\xe4\x88\xe8\x05\xecc\xeb\xcc\xedy\xf0\xd5\xf1\x82\xf7\x88\xfb\xb5\xfah\xfd\xc2\xff\xb2\xff\x9d\x00F\x00c\xff\x7f\x00n\x006\x00:\xff\xd0\xfc:\xf9\xf1\xf6\x1f\xf6\x8f\xf4\x89\xf3\x82\xf3\x8d\xf1:\xee\x06\xefR\xec\xbf\xea/\xef\x81\xec@\xec\xc9\xf49\xf0n\xef\x90\xf5\xf1\xf2\x19\xf9\x01\xfb\xdd\xfa;\xfe\xb4\x05j\xff\x98\x03X\x0b/\x06\xac\t\xc0\t\xe7\x0f\x0b\rg\x08K\x11\xbe\x11\x90\x08\x1f\x11\xaf\x11:\x06\x13\x0fu\x0e\x83\n\xd4\r}\n<\x07\xb1\n\xc1\x0cN\x08\x1c\tL\t\xed\x08F\x0b\xbc\x0c\x0c\x0e^\r/\r\xe5\x0ed\x0f\x81\x12\xa7\x13\xee\x13\xbb\x14\xc5\x11\xa4\x13\xf1\x16U\x12\x87\x0f\xc1\x11\xfd\x11y\r\x05\x0f.\x0f_\x08D\x04\xcf\x037\x031\x01\x13\xff\xa6\xfd\xd5\xfbe\xf6\x8b\xf4i\xf4\x18\xf2X\xf0\xe8\xef.\xefv\xee\xea\xee~\xec\xdc\xeb\xcc\xed\x0f\xedG\xeb_\xee\x11\xf2\x89\xf0\x1f\xf0i\xf2Z\xf2f\xf1\x18\xf4\xb7\xf5\x9c\xf7\x1b\xf7\xd8\xf5j\xf6R\xfa\xfd\xf8\xc4\xf7\xda\xf6)\xf8\x1d\xfc\xa3\xfa\x1f\xf7\x11\xf9?\xf80\xf8\xfd\xf6\x8e\xf7.\xfb\x05\xf9F\xf6O\xf7\xb6\xfb\x8b\xf65\xf5c\xfa\xc1\xf5\x1c\xfe\x03\xfa\xe4\xf8\xfe\xfa\x1d\xfdv\xf9\x9d\xf7N\x04\xe1\xfe\x8d\xfc\x8c\xfe\xfa\x03\xaf\xff\x84\x04\x8b\x05\x82\x01\x7f\t\x83\x05\xed\x02z\x0fQ\t\xc3\x05\xc3\x0e\x97\x0f|\t\xa6\x0cp\x0e\xe3\n\xc3\r\xdf\x0b\xde\x12\x8c\x0f3\n\x07\x0f\x03\x08\x87\x06\xd5\r\x1f\rJ\x08`\t\x1d\x0b\xb8\x06\xec\xff\xd2\x06\xd9\x08Y\xff8\x04\xaf\x0c]\x05u\x03\xd7\x07}\x03\x15\x06\x90\x07\xf3\x05R\rW\x0b\xaf\x064\x0c\xbc\x0fQ\x08}\x03\x89\x0bw\x0bs\x04t\n_\x0bc\x05)\x02\x05\xff\x17\xfd\xd8\x00\xd6\xfd\x15\xfb~\xfc\xf8\xf8s\xf4S\xf5|\xf6\t\xf1\xf6\xf1u\xf4\xab\xf4\xb9\xf5 \xf4\xbb\xf3<\xf5\x9c\xf5\xa2\xf6\xcf\xfa\xd3\xfck\xf84\xfd\x05\xfdJ\xfb\x8c\xff\x87\xfd\xe0\xfd\xd8\x01\xf6\xf6o\x00X\xff\x83\xf8j\xf80\xfbb\xfa\xa1\xf1\xb7\xf87\xf8_\xf2\xc8\xee\x82\xf8\xdb\xf3\xcf\xec\xbc\xf2\xab\xf5C\xf4-\xee\x12\xf5\xe2\xf3/\xf4F\xf4\xed\xf8%\xf5\x8e\xf6^\xff\x03\xf9\xad\xf9\xf3\x04\x82\x01N\xf7\x99\x03C\t\x1c\x04i\x03W\rw\x0c\x1c\x02\xb5\x02\x9a\x0c\x9b\t6\t\xc4\x0b\xd8\x08\n\x0b&\tS\x05j\x04\xcd\n\x1b\x08S\x01\x8f\r\xdd\x0b\xf4\x01\xdd\x02O\x08e\x03\x80\x00\x80\x07Z\ne\x01\xca\x04m\t\x8f\xfe\x8d\x00\x13\t\xc8\x03\x8f\x00x\x0ci\r\xde\x001\x03\xcb\t\xc9\x08j\x06\xf3\x0c\x8f\x0e\x81\t@\t\xdc\x0e\x04\x08\xc8\x05x\r\xea\x0b<\x08\x85\x0c9\x0b\xb4\x05|\x06s\x04\xd3\xff\xb4\x02\x95\x07\x1b\x01[\xfd\xa3\x00E\xfd&\xf4\t\xf9\x84\xfc\xeb\xf6\x88\xf4B\xf7\x94\xf7O\xf3\xdc\xf2W\xf8\x93\xf4\xc6\xec_\xf29\xf7e\xf8\xb1\xf5]\xf5)\xf7N\xef\x90\xf0\xd3\xfa1\xf9\xc9\xef\xec\xf8<\xfd?\xf4\xe7\xee\xb3\xf7\x83\xf7X\xee\'\xf4h\xfbJ\xf3\xf5\xfa7\xf6\xde\xefA\xee\x7f\xf6\x97\xf9\xc0\xf3\x16\xfe\x04\xfaA\xf9B\xf4\x8b\x00X\xf7\x8d\xf8\x8e\x06O\xfc\x98\xfe\xbc\x06\xad\rS\xff\xff\xf7\x18\t\x9f\n\x11\xfd\x92\x07\xf1\x18\x03\n=\xf6\xb5\x13Q\n\xeb\xf7\xf4\x0e*\x10g\x02O\x07\xa0\rj\n\x85\x03\xb0\x03\x1e\t \x06\xd5\x06\xf4\x0e\x1c\x10W\x02\xd9\x05\x96\t \r\xc1\x02\x8d\t\xc4\x10\x8e\x04z\x05F\x0c~\x05,\x00_\x08>\x02\xf2\x02\xfd\x05V\x04\x9d\x00\x04\x02`\x02i\xfb\xbd\x03E\x00I\x00;\x05q\xfc\xe2\xfa\xab\x04\x17\x02\xc9\xf9j\x05Z\xff\xf8\xfa\xa8\xfek\x04\x9b\x01\xaa\xf9,\xfe\x0f\xfd\xfb\xfd|\xfd\xa0\x03\x16\xfe\xaf\xf3I\xf9h\x04w\xf9K\xf2\xc0\x06\xb6\xf7\xbb\xf3\x13\xf7c\x03p\xfaF\xefW\xfe5\xfe\xf9\xf0\x89\xf6L\t\xa9\xf9+\xf2\x08\xf5\xf7\x0b\xe9\xfc\'\xf1\x05\x0c\x16\x04\x01\xec?\x08\x06\x13Q\xfa\x1b\xfc\n\x05\xa9\x0e(\xf8v\x00\x13\x12\x8b\x02\xc1\xf8\xec\x04\x00\x049\x03\xcb\xfd\x0f\xf9\xbd\x07\xfa\xf9W\x04\xc9\xfcc\xfaP\x01\xb1\xfc\xc1\xf4\xf6\xfd\x11\nb\x00\x0f\xfa\x85\xfbC\x07\xea\x06\x93\xfbc\xf5\x87\x0f\r\x031\xf9\xd4\x0c\x93\x14J\xf8\xa3\xf4\xd6\x0c6\x06\xf4\x01\xf7\x03\x15\x0bR\xff\xfa\x04h\x02P\x07\xfd\xfeX\xf4f\x08e\x00\xf7\xfd\xaf\x05\x98\x042\xeeV\xf9{\ne\xf0K\xf8\x81\x03\x8e\xfb\x15\xf2\xa7\x00\x1f\x00y\xfa\x0b\xef?\xf9\x08\x03\xcc\xee\x02\xfeS\x08\x02\xfa}\xe8\xee\x05\xeb\xfa\xc8\xed\xcf\x03_\xf9\x03\xf98\x01\xac\xfd\xfe\xf9\'\x01V\xf7\xd5\xf7\xf0\x0c\x0e\xfe\xc9\xfa\x1c\x07\xc2\x07\xe4\xfdw\xf8\x7f\x0f>\th\xf5\xbb\x018\x1b\x7f\x01s\xf4\x8b\x10{\x0f\xf9\xf7\x1b\x03\xf4\x16\xa0\xfct\x01\x9b\x0cI\t\x86\xfa"\t9\t\xf9\xfa\xff\x06k\x0b\xc0\xfa\xeb\xfa\xed\r<\x04\xd6\xfc\xae\xf5\xcf\x0e\x1b\x02\xad\xf25\x04\x06\x0bN\xf8\xcf\xf9\xa0\x06h\xfa+\xfe\n\xfe\xf2\x08N\xf9\xe5\x01\x86\xff\x9b\xfa7\xfdH\xff\xa7\ti\xf9L\xfb*\x06\xb9\x06\x9b\xf5\xd2\xfcC\x0e\x07\xf7\x17\xf6\xaa\x11\xe2\t\xed\xf5\x91\x00\x0e\n-\xfb\xf4\xfd\xb7\nx\x03=\xfb\xc1\x05\xaf\x068\xfe\x87\x03[\x03\x1c\xf7[\xfa\xf0\x0b\xc1\x00L\xf9\x15\x01\x06\x03\xc3\xf3?\xf42\x03\x93\x02E\xf0\x82\xf2\xe0\x07\x1c\xfb\x7f\xf0`\xf9\xf2\xfeL\xf1\xdc\xf2[\x02-\x04n\xf4\xd8\xf8h\x01\x86\xf9?\xf9\x85\xfcP\x04\xc3\xfa]\xf98\x05a\xfen\xf2~\x02\x06\x00\x81\xf2\x81\xfc\xf9\xff\xe9\xfa\xf3\xf9\xd9\xfc\x95\xfb\xd1\xf7 \xf4\x8d\x04\xe3\x01\x8e\xf2&\xf8\n\x0eG\xfbF\xec\xeb\t\xf7\x10(\xf9n\xf1\x9f\x11L\x0c\x89\xf5W\x04A\x0f6\x07\xd4\xf3.\x0e\xf1\x17S\xf1\xb9\xfdW\x1eX\xfe\x05\xedq\x19}\x08\xce\xfa\xa9\x02a\nJ\x00m\x04I\x06.\x06\xaa\xfdm\xfb\x90\x0ft\x06\x8c\x00\xf7\x01\xe2\n\x93\xfc\x13\x00\xa5\x0fz\xffa\xfa\xf8\x0f\xcb\x05\xda\xf5\xd6\x07V\n\xda\xfb(\xf8\x1f\x08\xb8\x02\x91\xfa\xad\x03?\xfeI\x00T\xfb\x9e\xfe\xc9\xff\xc2\xff\x90\x01\x8c\xfe\x0c\xfch\x07\xda\x04\x9c\xf8\n\x01J\nh\x018\x04f\nj\x05\x00\x02\xc4\x08X\x0c\xbb\xfe\xae\x07x\x05N\t\xb9\x06{\x03J\x0cQ\xff\xce\xf8\x9f\x05\xbc\x07\xc8\xf8C\x02\xb3\x00\r\xfa\x8b\xf7\x93\xfe\xba\x01\xaa\xf5\xca\xf1A\xfa6\xfd\xaf\xfd\xa5\xf7D\xf9\x03\xfdG\xf1\xbd\xfb\x81\xfe\xcf\xf8\x13\xfdy\xfc\xa0\xf5\xac\x00&\x03\x05\xf3t\xf6\x12\x00\xaa\xf7\xa0\xf8\xd3\xffa\x00\xe4\xf6!\xf4M\xf4X\xfaC\xfe \xf3x\xf9Z\xf9\x10\xfad\xfb\xf5\xf0~\xfb\xa5\x00\xfc\xf0Z\xf7i\x00,\x01\xa9\xff\xa4\xf1t\xfc\xd1\x0cg\xfc\xe0\xeer\t\xb5\x0e\x98\xf6\x0e\xfb\x80\x0e`\x05h\xfc\xe4\xff\xc8\x08\xdd\xfe\xbf\xff\x1e\x05\xbf\x051\x05\xa6\xfb\r\x0bS\xfb\xb9\xfe\xb1\x06h\xfeC\xfa\x97\x04\xe5\r\x14\xfc\xcf\xfa\x87\x03b\x03M\x00;\xfb]\x07\x15\x04\x14\x05%\x02\x17\x02\xb1\r\xde\x02\xc2\xfbW\x03\xf9\x10\x14\x03K\x00d\t\xe1\x0b4\xff\xf9\x00\xfb\x0b\xc0\xff\x8e\x03_\x02\'\x01\xdb\x08T\x03z\xffv\x050\x02\xae\x00\x91\x01\x0b\x00\x15\x00M\x05K\x02\x1e\x05S\x03\xf9\x00\x05\x02y\x01}\x01\xb5\x03L\x078\x01B\x06\t\x08u\x03\x1e\x03<\x03T\x06\x0c\x04\x1a\x03Y\x07\xe1\x06\xa8\x02\r\x03\xa4\x01\xc8\x01*\x02n\xfd\xdf\x01\x9f\x02\x83\xff%\xfea\xfc\\\xfe\xf7\xfc\x8a\xfb\xb4\xfav\xfc\x81\xfa#\xfb\x0e\xfd\xbc\xf8\xf8\xf8\x9f\xf5\xd7\xf8\xfe\xfa\x04\xf7V\xf9\xae\xf8{\xf4\xad\xf8m\xf8H\xf5O\xf7\x15\xf5\xd7\xf6\x1f\xfb\xa3\xf7s\xf9s\xf5\xa7\xf3\xcb\xf63\xf8\x96\xfb\x1c\xf9\xc0\xf7\xd6\xfa\xc8\xf6\n\xf3\x08\xff^\xfa\x88\xf4\xbc\xfc\xca\xf9\xa4\xfcP\xfc~\xfc?\xfa\xa5\xfa\x06\xf9\x8b\x00\x12\xff\x06\x00\xee\x03\x90\xfa\xe3\x00g\x03}\x01\xeb\xfd\x05\x02c\x00R\x04\xa1\t\xf6\x05\xe0\x01C\x04\xd0\x01\x93\x05\x11\x05e\x06\xb1\x07\xcd\x05\xae\x0c\xbe\x03?\x06\x18\x08v\x05\x8b\x04\x12\x07k\x06\x80\x035\ri\x07\xe8\x02\xfa\x03\x8a\x05\\\x04i\x04D\x05\x88\x04"\x03r\x03\x9a\x07\x15\x03\xaf\x01\xa7\x03\x8b\x02\x05\x03\xb3\x03\xe1\x05\xe0\x03\xd0\x04m\x06\x14\x04(\x05-\x04\x08\x05\x85\x07\x97\x07Y\x05\xd9\x07\xa7\x06\xd7\x05\xcb\x07o\x05\xca\x05\xdf\x03\xb0\x04+\x05C\x05\xfa\x03\x0e\x02\xf7\x00\xb5\x00\xef\x00\xdd\xff5\x00\xec\xfe8\xfd\x1b\xfd\xe7\xff\xfe\xfc,\xfb>\xfd~\xfb\xc4\xf98\xfc\xd0\xfcu\xfa\x89\xfa\xe0\xfb\xc8\xfa\x12\xfaT\xfc\xd9\xfaz\xfa\xfb\xfa\x99\xfc\t\xfbV\xfaT\xfc\xe1\xfc\x15\xfc\x9e\xfa3\xfb)\xfbu\xfc\x9d\xfb\xe0\xfa\xf2\xfa_\xfbq\xfa\xdb\xfb\xa1\xfa\xf4\xf9\x94\xfaP\xfa\xa4\xf9F\xfb\x9a\xfb\x11\xf9m\xfa?\xfb\x99\xf9\x15\xfa\xe3\xfb\xe6\xf7\xa4\xfbA\xfc\xce\xf9i\xfb\xe9\xfc\xc2\xfa\x95\xfaO\xfd\xbf\xfcf\xfb?\xfe\xfb\xff~\xfdL\xff\xcf\xff\x8c\xfev\xff\xcf\x01<\x01\xdb\x02\x9b\x02\x12\x02\xd7\x03 \x04S\x04I\x04\xd8\x03\x1f\x04\xc4\x05\x8d\x06\xda\x057\x04\x8b\x052\x05T\x04\x9a\x03\x83\x05@\x05e\x04\xde\x03b\x03\x93\x02F\x02\xf9\x03\x10\x02\x98\x02\xaa\x02\n\x01\xe9\xff\xf9\x01c\x02\x92\x00\x15\x01\xfe\x00v\x00\xef\xff&\x01\xbf\x00?\x00_\x00\xf0\xff\xea\x00\xed\x00\x8e\x01\xd5\x00t\xff/\x02\xbe\x00X\x00\x17\x02\x05\x03\x1d\x02M\x01\xaf\x01\xe7\x02.\x03\x01\x03\xc1\x03O\x03\x8c\x03;\x03\x9f\x03\xe3\x04\x13\x05\x96\x03\x1f\x04\xb6\x03P\x03~\x03\x98\x03\xcc\x02X\x02\x17\x02\xdc\x00\x17\x01\xa0\x00\xb2\xff\r\xff\x16\xff\xe2\xfd\xad\xfd\xc1\xfd\xd3\xfc`\xfcN\xfcq\xfc;\xfba\xfb>\xfbB\xfb$\xfb\x16\xfb\x03\xfa\xe0\xfa\x13\xfbf\xfb\xef\xfbE\xfa\xfe\xf95\xfa\xad\xfbv\xfb\x9f\xfb\\\xfc%\xfb\x89\xfb\x99\xfbe\xfb\xcd\xfc\xb3\xfc.\xfc\xf0\xfc\x9d\xfc\x97\xfbH\xfdM\xfeP\xfdO\xfb\xe1\xfdK\xfe(\xfd|\xfe\x14\xfep\xfd^\xfdu\xff\xd7\xfe\xa5\xfe\x83\xffS\xff\xf1\xfd\x0f\x01@\x01\n\xff\xcd\x00\xd9\x01\xa5\x00\xc4\x02\x8d\x03\x81\x00\x9a\x02\xe7\x031\x02F\x03\xf1\x04\x89\x03\xbe\x02\xbd\x03\xb0\x04W\x035\x02\xa6\x03\x8b\x04)\x03\xed\x02(\x03\x9f\x02\xfa\x00\x8a\x02\xaa\x02\xc7\x00\xcb\x02\xa4\x01\x9f\xffL\x00\xb9\x00\xd4\x00\xe1\xff\xcc\xfe\xe0\x00\xd2\xff\x80\xff\x9b\x01T\x00\xad\x00\xe3\xffp\x00u\x01\xf5\x02m\x014\x01\x0c\x025\x03\x98\x03\x19\x02\x95\x03\x08\x05\xad\x03\xf1\x01>\x04\xac\x05J\x04r\x03W\x04\xd1\x03\xcb\x03\xb2\x03r\x03r\x03J\x03\xb8\x02\xb7\x01\x99\x02W\x02V\x01:\x00\xa7\x00{\x00\x18\x00Y\x00I\xff{\xff\x05\xff\xb1\xfe\xf2\xfd#\xfeK\xfe\x84\xfe~\xfd\xd7\xfd\xed\xfd\xb7\xfd&\xfdx\xfdN\xfdd\xfc\x81\xfd\xbd\xfc\x0c\xfd\xb1\xfc\x1c\xfd \xfd^\xfd\xb6\xfc\xc0\xfb\x8d\xfb\xbf\xfdT\xfdF\xfc(\xfe\xb5\xfc\x13\xfd\x03\xfcd\xfc\xb6\xfd\xde\xfc)\xfc\xbb\xfc\x85\xfd\xb2\xfbW\xfd\xa0\xfc@\xfc\xe6\xfby\xfc@\xfe\x9b\xfc\xae\xfd_\xfd=\xfd\x10\xfd\xea\xfef\xffM\xfej\x003\xffl\xff\xdc\xff(\x00y\x00\x06\x01\xae\x01\xc7\x00<\x01X\x01\x1c\x02\x0c\x02\xcf\x01\xc1\x01*\x02\xfb\x01 \x02k\x02n\x01\xbd\x01~\x028\x01\x1d\x01\xe5\x01^\x015\x01\xfb\x00;\x01R\x00\xb1\xff\xd6\xffw\x01!\x01x\xfe;\x00\xd1\x00\xb2\xff\xc7\xff\x8f\xff\x88\xff\xda\xfe\xc0\x00k\x00\x16\x00\xc4\xff5\xff\x03\x001\x00\xf3\xff\xb8\x00\xdc\x00\x93\xff\xae\xff\xd6\x00\xb5\x01\x1a\x01\xe5\x00\x05\x00\x99\x00\xd9\x012\x02\xbd\x01\xdc\x01j\x01\xb9\x01S\x02\x83\x02w\x02g\x02\xe8\x028\x02d\x02H\x03\xe5\x02\xbe\x02x\x02{\x02B\x02\x95\x02\xe7\x02|\x02t\x02\xb7\x01Y\x01\'\x01\xc7\x01\xb8\x01\x89\x00\x14\x00\xde\x00\x12\x00\xad\xffe\x00\x0c\x00R\xff\x06\xff\xa7\xff\xba\xfe\x9a\xfe\xda\xfe\xe6\xfe0\xfe\x06\xfey\xfeq\xfe;\xfdK\xfe\xe7\xfd\x86\xfc\xd9\xfdi\xfd\xaa\xfd\x90\xfdW\xfcp\xfd\xee\xfc\xb2\xfc6\xfd\xbd\xfd6\xfc\xea\xfc[\xfd\xb0\xfci\xfdc\xfdf\xfc\x1c\xfe\x07\xff\xae\xfc\x17\xff`\xfd\x9b\xfc.\xff\xe7\xfe\x04\xff\xa9\xff+\x00\x91\xfd"\xfe$\x00\xd9\xfe\t\xff\x0e\x00]\x00\x84\xfe,\x00\xcd\x00v\x006\xfe$\x00\x9a\x01\x9e\xfd\x98\x01X\x01\x8b\x00\xf9\xff\xb0\x00p\x00/\x00\t\x01\xa4\x00\x17\x01M\x00\x92\x02u\x00\x8e\x00\xe8\x00\x8b\x01\x03\x00]\x01\x85\x01e\xff\xa6\x01=\x00g\x00\xa1\x00H\x01\xe8\x003\x00\xa8\x00\xaf\xff\xc2\x00\xd6\x00"\x00\x8f\x01%\x01#\x01b\xff\xe4\x01\xdf\x01\xa6\xff\xd4\x01\xa5\x00\xe0\x01V\x02\xd0\x01&\x00T\x02\xb5\x00\xd5\x01\xd9\x029\x02\xdf\x02v\x01\xfe\x01a\x01\xf4\x03e\x01\x03\x04d\x03\x97\x01\xba\x02.\x03i\x02\xf6\x02\xc0\x03|\x02\x95\x02\xe4\x001\x03\xc7\x01\x15\x02#\x02x\x00\xee\x01\xe8\xffS\x00\xd7\x01L\xff\r\x00\xe3\xff:\x00\xc6\xffN\xff\xd3\xff\x8c\xfe\x13\xff\x1f\xfeT\xfe\xce\xff\xd8\xfe\xe6\xfd\x8e\xfd\xaa\xfc"\xfe\x06\xfd"\xfe{\xfd\x14\xfe\xa8\xfb\xa6\xfcr\xfd\x9a\xfdE\xfd\xb2\xfc\x00\xfe>\xfc@\xfdI\xfc6\xfe\xb6\xfd\x8a\xfb|\xfdJ\xfe\xd3\xfc0\xfd\x03\xffV\xfd\xcb\xfb\x17\xfd"\xfe\xbf\xfe\xea\xfc[\x00k\xfbp\xfc=\xfeG\xfec\xff\\\xfd\xfe\x00q\xf9\xe6\x00^\xfeF\x01\x00\x00W\xfc\x93\x00\xc2\xfb\xea\x02-\x01\x8a\xff\x14\x00\x9e\x00\x8a\xff\xf7\xff\xc8\xfeG\x02;\x01\xd8\x003\x02\xaa\xfd\x8d\xffY\x02@\x03\xf8\x023\x00=\xfd\xce\xff\xe5\x01\xb8\x02\x81\x03\\\x02\xc8\xfd\xa9\xfd}\x02\x83\x02\x85\x00\xd2\x00|\xfec\xff\x0b\x02\xae\xff\xc9\x00z\x00\xb9\x00\x07\x00@\xfe\xc6\x00J\x03\xf9\xffj\x00\x10\x02\x19\x00\x98\x01\x8a\x01\x07\x01\xe4\x03\xd8\x00\x89\x01C\x02\xb2\x021\x04\x84\x02\x05\x04(\x01v\x01{\x04\xa3\x03j\x03\x1f\x04x\x01\xf7\x03\xff\x03y\x03R\x02M\x02B\x02\xa9\x01x\x03\xe3\x02}\x02\xde\x00S\x01$\x00c\x01\x8f\x01\xa4\x00n\x00\x92\xfe?\xfe%\x00\xb2\x00\xfd\xfe\x0e\x00\x00\xfd4\xfd1\xff\x98\xfe}\xfd\x19\x00\xc6\xfd\xe0\xfcG\xfdt\xfc\xe5\xfe9\xfc\xbd\xfd\xc4\xfe\x87\xfc\x99\xff\xb6\xfe\xa6\xfa\xb9\xfbc\xfcV\xfc\x9e\xfd\x8c\xfd\x8c\xfc\x86\xfd-\xfa\x16\xfc\x8e\xfc<\xfd/\xfc\xe2\xfbp\xfc\xaa\xfc|\xfd\xbb\xfai\xfe<\xfc\x8a\xfd\xcf\xfd+\xfd<\xfd\x03\x02\x9c\xfc\xf3\xf6j\x031\x00\x12\xfc\x12\x03\x9e\x02\xf9\xfaH\xfe\x9f\x02\xf1\x00\xbc\xfew\xfc\xab\x02\'\x05\x8e\x05t\x026\xfd\x9c\xff\xeb\xfd\xe5\x02B\x02\xda\x02&\x03\x1f\x05\xdf\x04\x15\xfc\xce\xff\xff\x00!\xffl\xff\xc6\x07\x11\x03\xaa\xfd\x99\x03\x9d\x00\x8b\xff\xed\xffx\x01\\\x03\xa4\x03m\xfe\xbb\xfe\x1b\x02p\x03c\x04_\x00W\x02N\x049\xfez\x00j\x05\xfe\x03\xaf\x02\xab\x04#\x06\xa5\x01\xbc\x00\xb6\x066\x06;\x03\x18\x04\x9c\x02\xc3\x02\xd6\x04\xb0\x07\xc1\x043\x02\xad\x03\x0b\x01\xc0\xffl\x02\x96\x03\xce\x01d\x02\xe1\x02\x84\xfdO\xfe\xc6\x01\x97\xffe\xfe\x8a\xff\xe6\xfd\x01\xfd\x8d\xff\xb4\xff\xc0\x01\x82\x00\x17\xfb\xe0\xf9|\xff\xcf\x00\'\xff\xcb\x00I\x01\xbd\xfe\xb1\xfav\xfc\xa2\xfe\x1e\xff$\xff\x99\xfd\xb8\xfd\x8a\xfdY\xff\x99\xff3\xfd\xf8\xf9\xd4\xfb\xad\xfd\xd4\xf9\xda\xfb`\x02\x08\x00\xc9\xf7_\xf9\x99\xfb\xa0\xfa\xe6\xfc(\xfe\xd4\xf8s\xfa\xef\xfc\xeb\xfa@\xfd\xe8\xfb\xdb\xf8V\xf7\xb0\xffh\xfeD\xf8F\xfc\x02\xfe\x90\xfao\xfcM\xfc&\xf9`\xff\x9c\x01\x07\xfdI\xfb\xb6\xfcg\x00n\x03`\x00\xfe\xfa:\xfd \x01_\x02\x8c\xff\xe5\x00\xe5\x02B\x00\x86\x03\x00\x03\xd3\xfcP\xfd\xb6\xff\xa4\x03r\t\xa5\x05f\xfd|\xfc\xfd\xfc\xb0\x00\x85\x039\x01&\xff\x8e\xff\xd5\xff \xfek\x00\x80\x01\x8f\xff\xd1\xfe+\xff\x9f\x03\xaf\x05-\x06\'\t\x0f\x0b\x1b\x0b\xaa\t\xec\x08\xef\x07C\x0c\xd3\x11\xcb\x11x\x11\x08\x12\x9e\x0f$\r;\x0e\x98\r$\x0c\x06\t\x18\x07\xb2\x07@\x08\xfb\x06\xcf\x05\xd4\x02\xf4\xfeF\xfa.\xf8n\xf9\x9e\xf9x\xf8;\xf7\xe7\xf7\x8a\xf5S\xf6U\xf5\xeb\xf4"\xf7\xe1\xf3\x1a\xf3\x12\xf5\xf8\xf9y\xfc\xbd\xfbr\xfb\xe8\xf9\xdf\xf8\'\xf96\xfb\xe1\xfd\x96\xff\xdb\xfc\t\xfe\xe4\xfd\x80\xfcs\xff%\x00\r\xfe\xd8\xfb\xab\xfb\xfb\xfb\xd7\xfe\xc9\xfdp\xfe)\x00(\xfe}\xfb\xf7\xf9N\xff\x06\x00\x18\xfc\x03\xfe\xb8\xffd\xfdA\xfd\xb9\x00*\x01\x1c\xfc\x94\xfa\x93\xfb\x8a\xfb \xfe\xea\x010\xfe\x95\xfcD\xfd\x18\xf9b\xf9:\xfa9\xfc\x98\xfb\r\xf9\x10\xfa\xff\x00u\x01\xd3\xfc\xb2\xf9\xf5\xf4\xdc\xf4\x17\xfc\xbd\x01\xe4\x01.\x00\xc3\xfe\xff\xfd)\xfb\x9c\xf9\x8e\xfc\xe6\xfb\x8f\xfbW\xfc\xb0\xf9\xad\xfd\r\x03\x84\xffc\xfc\xb7\xf8\x8d\xf0\xff\xeb\xfb\xf1G\x01\x13\x12\x0c\x1c\xe0\x1a|\x12H\x0eu\x0fX\x13g\x1c\xb5)\xa44\xf85\xed482D.\r)5\x1f%\x18\xd0\x15\xf0\x17\t\x1c\xfb\x19/\x11r\x041\xf68\xe8\'\xe0\xa5\xdf\x99\xe2u\xe47\xe3\x89\xe1\xdb\xde\xa3\xda\x1b\xd7\x9e\xd4\xb3\xd4\x90\xda\xb4\xe4T\xef\xbf\xf8>\xffW\xffu\xfa<\xfa-\xfd\xe5\x03\xe0\r\x00\x13\xa4\x16s\x18[\x17\x96\x14v\x0f\x13\t\x9e\x04\xde\x03\xc8\x04\xf1\x07!\x08\xfe\x04\xac\xfdX\xf2D\xea\xc7\xe7\xc1\xe7C\xe9\xd0\xec\x9c\xee\xbf\xee\xfd\xed\xf9\xed\x93\xecw\xeb@\xed\x05\xf1`\xf7D\x00A\x07u\x07\xcd\x04\xe0\x019\xfeM\xfe\xb2\x01\x9b\x05\x86\x08\x19\x07"\x03\xba\xfeK\xfa\x9d\xf6\xa1\xf4\x8a\xf0h\xf0\x96\xf2\x98\xf3\x97\xf6N\xf7\xd6\xf2\xfa\xebK\xe7\xea\xe5\x89\xea\xd8\xf2*\xf9\x98\xf9+\xf7\x19\xf5{\xf7\xd3\xf8@\xf8\xde\xf5F\xeep\xeb\xc1\xf3$\x15\\A\xf5S"D\x9b"\x03\x13\x7f!?r\xe9ZqL\x11Ha?\xdf&\xef\x0cc\x071\x10\xd0\x13\xf0\x07\x84\xee\x9e\xd2\xd4\xbb\x86\xad\x91\xad\x8e\xbbR\xcb\x11\xd0&\xc7\x9f\xbe_\xc0F\xc9\x8b\xd0\x87\xd7u\xe5g\xf6|\x08h\x12\x7f\x19*\x1e\xc1\x1c\xad\x1d\xbe\x1f\x0e"8&\xbd"\xff\x18\xe0\x12\x8b\x0f\x97\t}\xfd\x9e\xec\x90\xde\x9f\xd6\xbd\xcf\xbf\xcd\xe0\xcf\xf2\xd0\xea\xcc\xa8\xc6\x0f\xc5\xf3\xca\x14\xd6Z\xdcb\xe0L\xe7w\xf1\x88\xfe\x0c\n\xf2\x12\xd9\x18?\x19g\x15]\x14\x8b\x18\x00 \xd7#\xf1\x1e\xa9\x16\x0c\x0fd\x07Q\xff\x1a\xf7s\xf0\x95\xed\x96\xec\xc7\xe9\xcb\xe6\xf7\xe1\x1a\xdc"\xd6\xe3\xd2)\xd6f\xdfd\xe8\xf1\xee\xb0\xf0\x02\xf2\x90\xf6\x98\xfa\t\x02q\x08\xe0\x0e*\x13\x1a\x15\xb9\x17\xca\x180\x19\xe1\x12\xd1\x0b\x00\ta\x03\xa0\xfd\x96\xfe\xaa\x14;>\xa9Y\x81PC.9\x17\x03\x1d\xa44\xfcL\x88[\x9ba\xfdV\xc8=e\'5\x1c\xa3\x17\xc3\x0bk\xfb\x1f\xf7\xfd\x01\xc4\x0bi\x01\xff\xe2\xf2\xc1\x95\xaf=\xb1\xee\xc1`\xdaN\xed\xfb\xf0\xd4\xe3\xba\xd4\x9f\xd2*\xdc\xa5\xeb\xbb\xf8\xab\x06)\x16\x00$[)\x81"8\x12`\x03\xa3\xffp\x03\x93\x0e`\x17[\x10L\x01\x07\xed\x89\xdaP\xd4\xca\xd3\xfe\xd2\xa7\xd0\xe3\xd0\x85\xd7\xd5\xe0X\xe4I\xe1\xd8\xdc}\xd9\x1d\xdf\xb9\xf1\xc5\x08\xd5\x19W\x1c;\x13?\x0b\xbf\t\x8a\x0f\xb3\x17\xe3\x1a\xf4\x18\x88\x13\xee\x0c\xcf\x05T\xfe\xdf\xf6\xde\xed\x16\xe6\xfa\xe1Z\xe4\xea\xe9Y\xe9\xcf\xe2G\xd9\x07\xd4\xb2\xd5\xc6\xdb`\xe5\x81\xee\xdc\xf4L\xf8\x81\xf9\xf9\xfc@\x01H\x03\x8c\x05=\t~\x11\xff\x1a\x90\x1f\xa8\x1d\xa5\x14\x85\x08\x92\xfej\xfa\x00\xfe\xf7\x04\xc0\x03\xb2\xf5p\xe4\xf1\xe6\xa0\x0c\xda>\xceS\x03=\xbb\x1b\xa7\x14\xaa,\xd4M\xef^\xbdbs^qN]=\xba0j\'\xcd\x1bT\x05\xb6\xf6j\xfc\x8a\t\xf5\x07\xfa\xee\xf8\xcc\xf6\xb7\'\xb5\xaf\xbf\xcd\xcfa\xde\xbc\xe4k\xe2h\xdbe\xda\xbc\xe3X\xf0\x17\xf9B\xfe\xe6\x08\x96\x1a\xf2*\x06,\x0f!j\x11\xa9\x03V\x03\x02\x08U\r1\x0e0\x04=\xf9|\xebR\xdd\xc5\xd38\xcb\x93\xcc\x1b\xd4b\xdb\xab\xe3\xcd\xe7\x0b\xe7,\xe2J\xde\xa2\xe3\x91\xf3g\x05w\x11q\x17e\x17g\x12\x8f\r\'\x0b,\x0b\xa6\x0c\x87\r\xca\x0c@\x0bW\x08\xa0\xff\x16\xf1a\xe2\xd0\xda\xf6\xdb)\xe2\xd4\xe8\x1c\xeb\xc0\xe7\xb7\xe0\x8f\xd8\xf9\xd6\xe8\xdd\xbe\xe9\xfd\xf4C\xfc\xba\xfex\x02!\x05\xe6\x03\xad\x04\xa4\x05\xdf\t+\x11X\x14\xcf\x14\x9c\x11$\t8\xff\x01\xf9\x18\xf8\x16\xf9\n\xf9"\xf2\x92\xe3\x8b\xd78\xdb\xf1\xfd\xdb1\x95RtO\xfc8\xcf,~8SRYb\xe4h^o\xbcq\xb3i@Xb=S\x1c\\\xfd\x80\xe9\xb8\xeaY\xfag\x01f\xefJ\xcf\xd9\xb2Q\xa9\xce\xae\xaa\xb8d\xc6j\xd7{\xe5\xce\xed\xd7\xf2\xb1\xf6\xe6\xf8!\xf6\xf5\xf5\x93\x04\xfb\x1f\x047w=\xf6-\xfd\x15\x1a\x06b\xfd_\xff\x94\xff\xef\xf8\x1e\xf4\xaf\xeb\x94\xe3\xa4\xdc\x19\xd13\xc6\x18\xbf\xe4\xc0\xae\xcd\'\xdeY\xec\xab\xf2Y\xf1\xa1\xee\x9d\xefr\xf7\xa9\x03\x04\x11\xef\x1b\xa7\x1f\x9e\x1d\x9c\x19\xad\x14\xe6\x0er\n@\t\xf3\x08\x84\x07\xf4\x02\x1e\xf9Z\xee\x89\xe5r\xe0\x1e\xdf\xb2\xde\x90\xe0D\xe1@\xe1\xc4\xe0 \xe0\x8d\xe2\xb5\xe7~\xed\xa2\xf4\x82\xfc\xd4\x03~\x08D\x07\x03\x04\xb7\x03\xe4\x07\x8c\x0f\xb5\x14\xba\x14I\x10r\x079\xfe\x16\xf8\xd2\xf5\xc5\xf4\x9f\xf1\xfb\xeb\xc4\xe9\x8f\xec\xa0\xe9\xdf\xdb\x07\xce\x96\xd4\xc4\xfcE2\x95T\x00Z\x94I\xa1:\x85=1M{c\x14uxyRp\x98\\\xf5E\xc50n\x1a\xf2\xfeB\xe7\x1a\xdf\x85\xe3\xcf\xe9V\xe9\x03\xdbC\xc5<\xb2F\xab\\\xb6\x11\xce]\xe7\x1b\xf8\xb9\xfc\x95\xfba\xfb\xea\x00\xe3\x08\x15\r\xc2\x12\xa6\x1b\xe2%\x11,2(\x10\x1c\xdb\x08\x84\xf3\x1d\xe5\xc7\xe1m\xe6W\xed{\xed\xeb\xe2\xb7\xd49\xc8)\xc4(\xc8\xfb\xce-\xd7\x00\xe0%\xeb9\xf6L\xff\x9e\x03\xbe\x02\xc1\x00,\x00\xcc\x05\xce\x13\xcc \x9f#\xed\x1do\x14\xc2\r6\x0b \x08]\x033\xfe\xc5\xfa\xce\xf6\xe6\xf0v\xe9!\xe3#\xde\xa9\xd8>\xd6\xb7\xd8h\xdf\xb4\xe6\xd1\xe8\xd2\xe7\xa1\xe8*\xee\xb4\xf6\xcf\xfd\x07\x04j\t\xac\rU\x11n\x11\xf7\x0e \r\x9c\n\xd0\x08\xb1\x08D\x07\'\x03\x80\xfc]\xf5|\xf0;\xee@\xea\xbe\xe4\xc4\xe2\xd7\xe1\xd3\xdd\x82\xdd8\xec\x93\x10\x98;qR\xefQ\x82FYB\x89K\x0c\\Fk\xddsQu\xafl\xddW\xb8;\xb6\x1d\xfd\x03Q\xf2<\xe8\xd6\xe7]\xeb\xb9\xe8O\xddG\xcdF\xbd\xe0\xb5\xea\xb9\xf3\xc6Z\xda\xe1\xee<\xff\xc9\x08K\t\xd8\x04\x9e\x00\x9c\x00]\x06\xcf\x10H\x1e\xb8\'M(p\x1bu\x07\x0c\xf57\xe7\x82\xe1\xf5\xdf|\xe1\xa3\xe5Y\xe8\x89\xe8\xe6\xe1(\xd8\xee\xcf^\xcbY\xd0\xf5\xdb]\xeb\x1e\xfaJ\x00&\xff\xd4\xfc\xf1\xfb\x99\xfd\xd0\x00j\x04r\x0c\x9a\x17\xb4\x1e\x80\x1b\t\x10\x1e\x03w\xfa\x88\xf8\x95\xfaW\xfd\x92\xff\x9c\xff\x98\xf9\xa2\xee\x9f\xe3\xf3\xdc\xd0\xdbX\xde-\xe4\xca\xeda\xf7N\xfc\x9f\xf9\xb6\xf16\xec\x9f\xedP\xf5&\x01\xb6\x0bS\x12\xed\x13[\x0f\xc9\x07\xf6\xff\xac\xfb\x9c\xfc\xfb\xfe4\x01>\x01\xd2\xfe\xa2\xfb\x01\xf7t\xf2s\xec6\xe7\xff\xe5s\xe8B\xeew\xeb\xf5\xe6\xa2\xf3\xe4\x145>\xa1T>R\xbcG\x9eC$K\'V\xc3^\xc3c&d\x94`\xa2Q\x8f8\x11\x1b\x98\xff6\xea\xf3\xdaE\xd7z\xde\'\xe7\xde\xe6(\xdb\xb9\xca\xf3\xbfx\xbf\xd0\xc7i\xd7\xb5\xeb\xe8\x00!\x11l\x16\\\x13\xf6\x0b\x10\x06v\x04\x8f\x06\x02\x10\xab\x1a\x9d!\xf1\x1e\xea\x0f\xfe\xfdl\xed\x85\xe3\xc1\xe0\x91\xde\x16\xdfS\xe1\xab\xe3\x11\xe4`\xe0\x14\xd9\x81\xd1P\xd0\x84\xd6\xa9\xe2U\xef\x86\xf8\x96\xfdZ\xfe*\xfer\xff\x92\x03\x1c\t\x91\x0e\x85\x13\xb9\x15\x05\x14\xdf\x0e\xad\x07\x7f\xff\x1e\xf9\xfe\xf6\xe8\xf7\xab\xfb\x97\xfe\xbc\xfb\x08\xf4\xc5\xe9\xbf\xe1\xb8\xde\x01\xe1\xc7\xe7\xf1\xeeI\xf5S\xf9\x94\xf9\xda\xf8\xd4\xf6f\xf4\xbc\xf4\x82\xf7;\xff\xb4\t\xfb\x10\x86\x13\xc4\x0eM\x05*\xfeM\xfa\xa0\xfa\x0c\xfd\xb9\xfe%\xff|\xfc\xc0\xf8p\xf2\xdf\xeb\xcc\xe7\xdf\xe6\xe0\xed\x08\xf8\x1b\xfd\x8c\xf8\x80\xee\xbf\xecV\xff\xfe%9M,_\xd4Y\x03L7G\x90NyU\x1dU\xe2OkIqF~>\x82*\xcc\x10\x0b\xf5\xf4\xde\xce\xd3\x0e\xd3\x9a\xdb\xd9\xe5B\xe9\x01\xe3{\xd7b\xd0\t\xd3\xc7\xdd\x92\xeb\xaa\xf8Y\x03\x8f\x0co\x13s\x15l\x12\x89\x0b\xf3\x03O\xffJ\x01\x1f\t\xfd\x120\x16X\x0f\xbc\x00\xda\xedU\xe2n\xde[\xdf\xbe\xe4.\xe8)\xea)\xe9\xb3\xe3\xb6\xdeR\xdb<\xda>\xdc\xd7\xdfS\xe6\x9e\xf0\x1e\xfc\xf5\x05Q\n\x9f\x07\xd4\x01\x0f\xffy\x02J\n\x90\x11\xf4\x15\x1a\x15\x1b\x0f\x02\x07\x16\xfe\xfe\xf6\x02\xf3~\xf0\xb9\xef\xc3\xefk\xf0W\xf0\xab\xee\xff\xea\xcb\xe6\xfa\xe5.\xe8\x98\xee\xff\xf5\x16\xfdS\x02\xa1\x02\x1f\x01\x07\xfe^\xfbm\xfcT\xff\x15\x05\x19\x0bm\x0e\xa0\x0e\r\t\x08\x00W\xf7\xf6\xf0\x0f\xef\xca\xf0H\xf4\\\xf8\xb4\xfa\xe3\xfa\xff\xf7\xc1\xf2~\xed\xf1\xeb}\xf0\xb5\xf5\xc6\xf9\x10\xff\xfe\t\x8f!\xa7<\xaeP\x8bY\x97SKI\x9e@\xec<\xdeB\x8aK\xa1Q\xc4M\x19<\xb7#.\x0b\xf3\xf7\xd0\xea\x15\xe0\x0c\xd9\xc7\xd8\x86\xde\x8a\xe5\xed\xe8H\xe5+\xdeV\xd8\xa2\xd6\xf0\xda\xe7\xe5\xfd\xf55\x08\xd1\x15\xd1\x19,\x15;\rR\x08\xdf\x07\xd4\x08\xff\x08"\t[\nk\x0c\x10\x0bR\x03\n\xf7e\xe9\xef\xde[\xd8\'\xd6\x80\xd9\'\xe0\\\xe7\xce\xe9\xdc\xe4\x7f\xdd\x03\xd9\xe8\xdac\xe1\x97\xe8\xe7\xef\x98\xf8\xef\x02=\x0c\xcd\x10>\x0fQ\n|\x05\x1a\x03V\x03M\x06\x93\x0b/\x10\xa4\x10\x84\n)\xffQ\xf3\xfc\xebf\xea7\xed\xf3\xf1\x94\xf4\x84\xf5\xda\xf4\xec\xf1\xac\xee\x82\xeb\x81\xe9P\xeb\x06\xf0\x1b\xf7\x91\xff[\x05\xc9\x07\x1d\x06\x90\x00\x9a\xfc\xcd\xfb\x0f\x00\xb3\x07\xf6\r^\x11\x8a\x0eT\x07Y\xfe\xca\xf4\xbc\xf0\xc4\xf0\x9a\xf37\xf7\xbc\xf6\xbd\xf5\x8f\xf4%\xf2\xea\xee(\xec\x86\xec\xed\xee\xa6\xf1I\xf4}\xfc\xaf\x11z/\x1eMp^\xd4\\nP\xb1D\xc2?\xafB\x95H6M{N\xf7F\x905\xb8\x1d\xfe\x05\xbd\xf3Y\xe6\x17\xdc;\xd5\xd0\xd3\xc6\xd8\x1c\xe0\xd4\xe4\xdc\xe3Q\xde\xa5\xd8.\xd68\xd9\xb5\xe2\x92\xf2G\x03\x00\x0fh\x121\x0f\xfe\x0b\xe9\x0b\xe7\x0c\xd3\x0c\xe6\n1\t\xbc\n\xb7\r\xaf\x0el\x0bF\x03\xf2\xf8\xed\xed\xf7\xe3\x7f\xdd\xec\xdc\x05\xe2\xea\xe8?\xec)\xea@\xe3[\xdc\x98\xd9\x82\xda\xac\xdf\xf5\xe6@\xef\xa4\xf7\xd8\xfd\x82\x01\xf3\x03\x0c\x06\x89\x085\n"\t\xae\x06\xc9\x05\x86\x07\x9f\x0b\xda\x0e\xbd\r\x0f\t\x80\x01\xaa\xf9\xaa\xf3V\xef\xef\xeeu\xf1u\xf4I\xf6O\xf5\x8c\xf2\xb6\xf0z\xeff\xf0\xfb\xf2\x85\xf60\xfc^\x02$\x07\xca\t\xe4\x08E\x06\xc7\x04\xba\x04\xf4\x06\x1e\t\xa8\t\x17\x08!\x04z\xfe>\xf8\x07\xf4\xe2\xf1]\xf1\xaf\xf2\x0c\xf3\xe7\xf1b\xef\x9b\xea\xdc\xe9!\xeb\xae\xeb&\xea\xc6\xe6\x8c\xed\xa1\x026$\xf6G&]\x99`wUUE\x17<\xe4:\xccB\xa3NDW\xcdW\xbcJ|2k\x15h\xfa\xf4\xe6\xf2\xdc\x89\xd9T\xda\xf9\xdbD\xdd\xcb\xde,\xdfW\xdd~\xd8\xdb\xd1\xae\xcd\xfe\xcf}\xd9\x9d\xe8\x85\xf9\xc6\tG\x16\x1e\x1cN\x19\x0e\x0f*\x03\xe8\xfc\xf7\x00\x9c\x0c#\x19\x87\x1f\x15\x1d\xc3\x13\xc9\x05m\xf6\xab\xe8=\xdf\xbf\xdc\xe0\xdf\xb3\xe4\xe3\xe6\x17\xe4<\xdf\xe4\xdb%\xdb\xd7\xdb\x1e\xdd\x85\xde\xe0\xe0\x95\xe5\x83\xec\r\xf6\xd5\x00\x00\x0b\x9e\x11\x99\x12\x86\x0eN\x08I\x03\x94\x026\x07\xef\x0e\xba\x15\x80\x17\xec\x12&\t\xb0\xfd4\xf4\xb0\xee\xb3\xed4\xefh\xf1\n\xf3w\xf2A\xf0q\xee\x14\xee\xa9\xef\x82\xf2\xae\xf5K\xf9\xca\xfd\xe9\x02e\x08h\rY\x10\x9a\x11\xed\x10\xf3\r\x91\n\xb0\x06\x1b\x043\x03\x06\x02\xe7\xff\x7f\xfb\xb1\xf5.\xf0\xa3\xeb\t\xe9\x88\xe7\xa3\xe7\x03\xe8\xf6\xe6\xd4\xe3\x1b\xdev\xd8\xf1\xd7\xb5\xe1\xd5\xf8\xba\x18L8yOPXGUmJ\x11@\xe8=\xceEOU\x05b[dJY\x02D\x8f+\x0b\x14\xfd\xffc\xee.\xe0@\xd8\xd5\xd59\xd7\x8d\xd8O\xd8\xe9\xd6\x9f\xd4P\xd0\x0f\xca\xf8\xc4\xd5\xc6P\xd4w\xeaz\x01\xfa\x10\xe6\x16l\x16\xbc\x12\xac\x0e3\x0b\x8a\n/\x0f\x8e\x17\t [#\xce\x1e\x02\x153\t\x10\xfdZ\xf1\xc2\xe5\xdb\xdbO\xd6\xf6\xd5\x02\xd9\'\xdck\xdd#\xdd\x92\xdb\r\xd94\xd6e\xd57\xd9\x0e\xe2\xa9\xee&\xfb-\x05w\x0c\xe2\x11\xe1\x15\x06\x18\x9c\x17\x0c\x15\xe1\x11F\x10]\x11\x10\x14]\x16\xf1\x15\x0e\x12$\n\xed\xfe\xc4\xf2\xda\xe8\x8f\xe4_\xe6^\xec\xb9\xf2\xf9\xf5\xaa\xf4\x95\xf0.\xec_\xea\x0b\xedD\xf4\t\xff}\t\x14\x11-\x14-\x13\x17\x11\x12\x0f\xad\x0eL\x0f\xe3\x0e\xab\r\x0f\n\xcc\x04\x00\xffx\xf8\x8b\xf3k\xef\xec\xeb"\xe8\xc6\xe28\xde#\xdb{\xda\xab\xdc\xbd\xde\xb7\xe2.\xe5\xf9\xe3\xd5\xdf.\xdaN\xde\xe9\xf0\x0f\x11&7\x01U\\dgd|Y\x00M\x01D:C\xbeLFZ/e\x7fdfT\xdd8Q\x19\r\xff\xba\xed\x84\xe3\xe1\xddT\xda\xde\xd8,\xd9\xc2\xd9\x9f\xd9g\xd8~\xd6\x01\xd5\x9a\xd3\xe6\xd2\x10\xd6\xad\xdf\x1a\xf1\x04\x06D\x17p\x1f\xfb\x1d}\x16\x0e\x0ek\x08\xcc\x06\x1a\t\x01\x0e$\x13p\x157\x12\x9c\x08\xac\xfa;\xec\x03\xe0\x83\xd7\xc1\xd2@\xd1\xca\xd2\x9e\xd6\x07\xdb\xbb\xdd\xc8\xdd!\xdc{\xda\xdb\xda\xed\xdd\xff\xe3\x0c\xed\xfe\xf7A\x03\x82\x0c\x80\x12\x95\x15P\x173\x19S\x1b\x12\x1c\xfb\x1a\xa3\x18&\x16Q\x14J\x12\xeb\x0e\x9f\t\xf5\x02?\xfb\xcb\xf2\xc5\xea7\xe5*\xe4,\xe7L\xec\x89\xf0P\xf2\xc2\xf1\xbd\xf0\x0b\xf1:\xf3(\xf8\xb3\xff\xd5\x08]\x11\xf9\x16[\x18\x11\x16\xea\x11P\x0e\xf5\x0b\x86\n\xf1\x08\xe6\x05\xec\x01P\xfc\xfa\xf5^\xef\x04\xe9\x14\xe5\x91\xe2J\xe0\xbb\xddC\xda\x1d\xd9=\xd9\x9b\xda\xed\xdb\xbb\xdcl\xdeB\xde\x8e\xdc\xb6\xdc\xb7\xe5+\xfd\xb7\x1fSBSZ\xf5b\xb4`\x1dY\x02R\xf1NIQ\xa6Y\xa5b\x1bfb^NK:1d\x17\xb5\x02\xfc\xf3\x17\xea\xce\xe1\xfa\xdad\xd6O\xd4\xd3\xd4\x19\xd6H\xd7\xc6\xd7\x02\xd7}\xd5\x9d\xd4\x86\xd7\xc2\xe0o\xf0\xb5\x02\xda\x11l\x19\xfe\x18\x86\x13\x8b\rm\n\x9b\ne\x0c \x0eM\x0eC\x0cU\x07b\xff\x85\xf5\xcb\xeb\x80\xe3_\xdd\x1b\xd9&\xd6\xcf\xd4J\xd5\xa5\xd7O\xda\x8e\xdc\xea\xdd\x19\xdf\xc0\xe0\x08\xe3\xf4\xe6\xae\xec~\xf4\xb4\xfd\xce\x06\xb9\x0e\x13\x15\xf8\x19&\x1eM!0"\xa0 -\x1d\xb2\x19\xa2\x17h\x16r\x14;\x10\x89\t\xb8\x00\x80\xf6{\xec\xf5\xe4\x9b\xe2+\xe5\x8a\xea\xef\xefb\xf2I\xf2\xd1\xf1\x7f\xf2\x9c\xf5\xd8\xfa\xb7\x01\xe9\t\xaf\x10[\x15\xd0\x166\x15[\x13/\x11\xc3\x0f\xf9\r"\n\xb0\x05\xd1\xff$\xfa\xd8\xf4\x92\xeec\xe8+\xe2k\xdd\x12\xda2\xd7t\xd5\x18\xd4\xcb\xd3\x14\xd5?\xd7\x7f\xdc\xe1\xe2\xd9\xe8\xa1\xed\xfa\xef\xce\xf23\xf6#\xfa\x80\x01\xb5\x0f\x95\'SF\xc2`\xe7o\xaep\x1eh\x8b^\x80V2S\xe7R?SqR\xcbK`>\xb9*\xf3\x12!\xfc*\xe8R\xd9\xab\xcf\xfc\xc9\xab\xc8"\xcbV\xd0A\xd6\x03\xda\xb3\xda\x1b\xdav\xda\xf0\xdeJ\xe7\x19\xf2p\xfd\xed\x07p\x10l\x16\x16\x19n\x18\x91\x15\xf2\x11~\x0e\xca\t\xbc\x02\xee\xf9\xc8\xf2\xc6\xef\xf3\xef\xdb\xefX\xec\xb5\xe5B\xde\xf8\xd7\xad\xd38\xd1S\xd1\xa0\xd4\xd7\xda\xe2\xe1\x86\xe7%\xeb\x1d\xee\xa1\xf1\xa8\xf54\xfa\xfe\xfe\xe2\x04\x9e\x0c\x0b\x16Z\x1f\x11&N(Y&?!\xfc\x19\x87\x11\x02\tf\x02\x1e\xff\x13\xfe\xd0\xfc\xf7\xf8\x0f\xf3\xb6\xec\x7f\xe7\x99\xe4\x8d\xe4\x00\xe8:\xee\xb4\xf6\xe9\xff"\x07\x96\x0b\xa6\ro\x0e\xfb\x0e\xc2\x0f\xf4\x10}\x12\xb5\x13\xa1\x14\xff\x13.\x11\xa4\x0b\xc3\x03\xaa\xfb\xb3\xf3R\xed\xfa\xe8\xf1\xe5\xec\xe4\xf4\xe3\xe2\xe13\xde\xca\xd8\xcd\xd4\xb9\xd2\xe3\xd3\x19\xd8n\xdd(\xe4\xce\xea\xbb\xf0\xaf\xf5\x94\xf8%\xfb\xeb\xfdX\x01\x03\x06G\t\x03\n\xa6\x06\x90\x01N\x01g\x0c\xda#uA\xf1Y\xe9e\x03d\xffXNL5C9@(BvF\x87H\xe8C\xc76\xbd#{\x10\x87\x00\x85\xf2\xad\xe3e\xd45\xc9e\xc6\xb1\xcc\xa9\xd7\xa9\xe1W\xe7\xa4\xe7a\xe4(\xdfa\xdb\xb8\xdc\xbb\xe5<\xf5\xec\x05b\x11%\x15r\x13\x81\x10v\x0e\x9d\x0bj\x06\x8a\xffz\xf9V\xf6\x17\xf6u\xf7O\xf9e\xf97\xf6\xf2\xeex\xe4\x95\xda\x9b\xd4\x1f\xd5\x9e\xdb#\xe5\x07\xee\xa6\xf3\xde\xf4\xce\xf2\xe4\xef\x8c\xee\x1b\xf0\xd7\xf3\xd7\xf8\xe8\xfdv\x03\xd3\t\x1b\x10h\x15\xde\x17.\x17\xc4\x13T\x0e\xfb\x08\x06\x05\n\x04\x9b\x05B\x07\xcc\x07Q\x05[\x00\xb0\xfa$\xf5\xd4\xf1\xd3\xefY\xf0f\xf3c\xf7\xb1\xfc_\x01\x89\x05\x03\t\'\x0b\x9f\x0c\xcf\x0cD\x0cH\x0c\x9f\x0c-\r\xc5\x0c?\n\t\x06\xcb\x00\x1c\xfb\xd9\xf5\xad\xf0\xe5\xeb\x1a\xe8\x81\xe5\x11\xe41\xe3\xa0\xe2\x93\xe21\xe3\x9b\xe4\x16\xe6\xeb\xe7\xb3\xea&\xee/\xf3\xc5\xf7Z\xfb\t\xfe\xac\xff(\x01X\x02\xd7\x03\xdf\x06j\n\x08\r\xf9\r\x84\x0c\x18\nj\x05\xbd\xfe\xc5\xf8a\xfa\xea\x08?$;B\x85V7Z\xb8NF?\xa64y2\xea5\xba9|;X9S2\x05\'\xdb\x16{\x05\r\xf4a\xe3\xe9\xd4t\xc9?\xc6\xe9\xcb=\xd8\x0f\xe5@\xeb-\xe99\xe2R\xdb`\xdae\xe0\xf5\xeb\xa4\xfaD\t\x89\x15\x9c\x1d\xb7 \x06\x1f\xa3\x1a\xbb\x14.\x0e4\x07\xe2\x003\xfd\xd3\xfc\xb8\xfek\xff\xa6\xfbL\xf2\x1d\xe6\x0c\xdb\xce\xd3\x19\xd1\x8b\xd2\x0f\xd7|\xdd\xae\xe3i\xe8\x05\xebu\xec\x88\xedx\xee*\xef\xc9\xef%\xf2~\xf7>\x00\'\n\xee\x11/\x15+\x14\xd0\x10\xaa\r\xf4\x0b\xe3\x0b\x10\rD\x0ey\x0e\xd1\x0c\xa7\t\x17\x06\x9d\x02\\\xffL\xfb\xa3\xf6\xd2\xf2F\xf1\xc7\xf2\xa7\xf6s\xfb$\x00v\x03\xcd\x04\x0b\x05\x01\x05\xe8\x05=\x08\x9f\n\xa3\x0c}\r\xcc\x0c\xc1\n\x06\x07\x15\x02J\xfc\xfc\xf6\xa9\xf2\x89\xef\xe6\xed\x8e\xec\xa9\xeb7\xeb\x10\xeb)\xeb"\xeb\xe9\xea\xfd\xeb,\xee\xff\xf0\xff\xf3\x96\xf6,\xf9\xfb\xfbd\xfd*\xfeD\xff\x14\x01\x1a\x04\x06\x06\x83\x06\xbc\x05L\x03\xa3\x00\x16\xfeP\xfbR\xfa\xee\xf6b\xf0\xb5\xea\x9d\xe9\xfc\xf48\n\x0f ?0<5\xfc2a0\x151o5\xf79X=\xf9=\xa3=\x15;\x8a4o,Q!\x9a\x13\x10\x04%\xf4%\xea\xb1\xe8z\xed_\xf4\xe0\xf6\xfd\xf3\x0b\xee\x11\xe8T\xe5!\xe6H\xe9\xc7\xed`\xf2\xb5\xf7D\xfeb\x05\xd2\x0b\xa0\x0e\x16\r\xa8\x08\x05\x04\xb5\x01\x1c\x02\xcc\x03\xd1\x04\xbe\x03\xe5\xffi\xfat\xf4\xe4\xee\xef\xe9Z\xe4\x1e\xde6\xd9^\xd7<\xd9D\xdd\xc8\xe0;\xe2\xf5\xe1q\xe1\xbe\xe2>\xe6t\xeb\xa3\xf1\xb8\xf7\xcd\xfc\xbf\x00\xbc\x03\xe6\x06\xb9\n\xd0\rb\x0f\xf8\x0eq\x0eP\x0f_\x11<\x13\xdf\x12\x12\x10+\x0c\x0e\x08\xcb\x04Y\x01_\xfeA\xfc\x9a\xfa&\xfa\x12\xfa\x01\xfb3\xfd`\xff&\x01\xf1\x01i\x02\x13\x04\xb8\x06\xb1\t5\x0cH\rc\ry\x0c[\nE\x07\x7f\x03\xb5\xff\x8e\xfc\xe7\xf9\xd2\xf7\xc6\xf6\xc4\xf5\x91\xf4\x98\xf2\xc7\xef\xa1\xed\xfd\xebq\xeb\xbe\xeb\x1f\xec,\xed\x83\xef,\xf2\xe1\xf4N\xf6w\xf6\xa3\xf6W\xf7\xae\xf8A\xfa\xbc\xfb`\xfc\xf6\xfb\x13\xfa\xd8\xf6\xfe\xf3\xcf\xf31\xf6u\xfa8\xfe\x8e\xff\xc0\xff\xdf\xfeT\xfeI\xfe\x1b\xfe\xe5\xfd!\xfd2\xfd\x84\x00\xc4\t%\x18\x07(!4O:\x9e;\xf4:\xbc;\xdc=\xe0@rC\xb7DeD\xe9@\x84:\n1s%\x1f\x19\xfc\x0b(\x01\xb7\xf9\xaa\xf5G\xf4\xab\xf1t\xed\xf5\xe72\xe2\xa3\xdd\x1d\xdb\xd4\xda)\xdc\xae\xdeo\xe2\x0b\xe7e\xec\xe1\xf1>\xf6\xaa\xf9\x87\xfbk\xfcY\xfd\x8d\xfe\xd6\xff(\x01\xfb\x00l\xff\xb3\xfc-\xf9\x82\xf5\xa8\xf1\x8c\xed\x08\xea\xac\xe7U\xe6\xa5\xe5\x8d\xe4\x0f\xe3\\\xe1^\xe0z\xe0\xc5\xe1\xb7\xe3\xd9\xe5P\xe8t\xebJ\xef>\xf4U\xfaP\x01\xdc\x08e\x0f\xed\x14\xe1\x18\x87\x1b\xc8\x1c\x01\x1c\xd6\x19\x9c\x16\x80\x13p\x10U\r$\x0b\x87\x08i\x05\xb4\x01\x95\xfd\x8f\xfa\x07\xf9\xbe\xf8W\xf9\x93\xfa\xc7\xfb\xd3\xfc\xba\xfd\x01\xfe;\xfe=\xfe\xa6\xfdo\xfd|\xfdD\xfe\xf8\xff\x00\x01\xa1\x01\xfe\x00\xcd\xff\xcd\xfd\xef\xfb.\xfb\xfd\xf9\x97\xf8\x07\xf7s\xf5z\xf4\xb7\xf3a\xf2\x86\xf1\xb3\xf19\xf0\x8e\xed\xcd\xec\x01\xee"\xf1k\xf46\xf6\xde\xf7-\xfal\xfc\xf9\xfd!\x00#\x02\xb9\x02\x0f\x03^\x057\x08\x18\x0c\xb5\x10\xbd\x12\xff\x12\xbd\x11:\x0f\xfb\r\x86\r\x07\r\x85\rE\r\xcd\x0b\x15\n;\x08k\x06\x9c\x05<\x05\xcd\x04\x15\x04\xdd\x02g\x02K\x03\xf4\x05\xab\t\xdc\r\xff\x11I\x16r\x19\x1f\x1b\x0c\x1c\x81\x1c\xe6\x1c\xba\x1d5\x1f\xb6 \xdf!\xd6\x1f\x05\x1cS\x17:\x11\xc0\x0cc\t\xcd\x06\x02\x05o\x02\x90\xfe\xb9\xf9w\xf5\x98\xf1\xa0\xedO\xea\x0f\xe8\x06\xe7\xb7\xe6V\xe6Q\xe5\xab\xe4\x9f\xe4\xd7\xe4l\xe5V\xe6_\xe7\x89\xe9K\xec\xc4\xeeh\xf1(\xf3\x94\xf4;\xf55\xf5\x03\xf6\x95\xf7\xdd\xf8\x96\xf9\x90\xfa\x0f\xfb\xe0\xfb\xfe\xfcq\xfd\xf0\xfd\x06\xfe\xca\xfd\xad\xfdH\xfe\x81\xff\x99\x00\x16\x01\xdb\x00.\x00\x1e\x00Y\x01\xb0\x01|\x03@\x05\xec\x04C\x05{\x06\xec\x05]\x06\x19\x06\xda\x04\n\x05\x82\x05\xbb\x05>\x04\x92\x02\x8e\x02\r\x02\xfa\xff\xbb\xfep\xfd)\xfc\x0f\xfcR\xfb\xb9\xf8&\xf8\xdd\xfa*\xfb$\xf9\xf3\xf8,\xfa\x15\xfd\x00\xfe4\xfe\xe0\xfb\x8c\xfa/\xfc\xd5\xffA\x01Q\xffx\xfdp\xfd\x1c\xfe\x92\xfd\x93\xfeh\xfc\x92\xfa\xa8\xfb\xa2\xf9!\xfa\xaf\xfcQ\xfbN\xfd\x8d\xfe\x10\xfc\x0f\xfd)\xfd\x81\xfc\x12\xff[\x02\xc0\x04\xc2\x04G\x045\x05\xab\x07\xd6\x07\xa6\x06\xc3\x07\xc3\x07\x96\t\xb5\t\xcc\nH\x0b\xf4\x0b\xd7\r\'\r\t\x10\xca\x0c:\n\xbe\x0b\x0c\n\x0f\n\x8c\nM\x08\xee\x07\xf7\t\xc3\x07\x08\t\x7f\x07#\x02C\x02\x85\x02\xfa\x03!\x059\x08t\x04\x16\x01j\xfe\xac\xfe\x87\x02\xa9\x01\'\xff\xa7\xfci\xfb\xe7\xfb`\xff_\xfb>\xfc\x8b\xfb;\xfaa\xf9G\xf51\xf8\x14\xfa\x89\xfb\x13\xfa\xf7\xf8\xc1\xf9\xf1\xf9\x8d\xf9(\xfaS\xfa\xb6\xfa8\xfap\xfb\x96\xfd\x94\xfe\xed\xff5\xfe\xa8\xfc\xfc\xfc[\xfe;\x00\xe2\x00\x05\x01)\x02w\x01\xb9\x00\xbf\xff\x8b\x00\x1b\x01D\x02\x1b\x02Z\x04\xa2\x03\xe2\x02V\x04\xf7\x03*\x05C\x07\xe1\x04\x13\x01o\x06D\x07\x93\x01\xd9\x01l\x05\xbc\x03\x87\xfd\xe9\x00p\xfe\xd4\xfaV\xfe?\xfe\xe3\xfb\xb8\xfa3\xfd\xf5\xf75\xf6\xef\xf5\xb7\xf7\x8e\xf6\xfd\xf5x\xfb2\xf6(\xf3\x94\xf6B\xf64\xf5~\xf55\xf9\xc6\xf7B\xf6W\xfb\xfb\xf9\xc1\xfaZ\xfb\'\xfa\xda\xf7\xd2\xf7:\xf72\xfat\xfb\xa6\xfa\x0f\xf9\x1a\xfd\xb3\xfaI\xfa/\x00\xf4\xfd\xda\xff\xe9\xfd\xb5\x01V\x04z\x06\xdd\x05\x16\x06\xc9\x05\xfa\x07\x85\x0b\x1f\nq\x08\xd7\t\xf7\r\x9f\x0cL\n\x83\t\xe7\n&\x0c\xba\x06}\x04>\n\x8c\n\x15\x07\xc0\x068\x08&\x04\x06\x03\x95\x05\x1a\x05\xee\x04\xdc\x02\x84\x01\x06\x02\xec\x06\x0c\x05\x99\x02Q\x00\xd4\x00>\x000\xff\xed\xfe+\xfa%\xfd\x1d\xfd\xeb\xfb\xb7\xfb\x97\xfc\xb4\xf9$\xfdU\xfb\x08\xfay\xfb\x01\xf8\x16\xfbr\xf9\x18\xf8\xb1\xf7\x94\xfb\x81\xfc\x1d\xf5I\xf9\xb1\xfau\xfet\xfb\x9b\xf8\xcb\xff\xaf\xfd\x92\xfeU\xffh\x00\xca\x01\xec\x02~\x00\xc7\x02t\x06\xa9\x03\x1d\x03t\x04\x9d\x02H\x02%\x06\xcb\x0c.\x06\x99\x04F\x02\x1e\xff!\x08\x91\x0cY\x06\x06\x04A\x07\xd3\x03\x15\x06~\x03N\n\xf7\x02\x83\x03&\x08\x9d\x06\xae\x03R\xfd\xfa\x06W\x00\xdb\xfd\xdd\xfe\xb2\x00\xcf\xfd\x06\xfdT\xfe\xd0\xf8+\xfa\x15\xfb\xa7\xf6\x0c\xf7\xeb\xf8\xd8\xf7\xf5\xf5@\xf7\x12\xf9\x0f\xf6%\xf9\xa0\xf5\xf7\xf8b\xfbd\xf8\xe5\xfc)\x00f\xfd\x9f\xfd6\x00v\xfc\x92\x01\xc0\x03\xf3\xfd\xc1\xff!\xfe\xeb\xfdz\x02\x91\x02\xca\xfe\xf1\xfe\x01\x00\xe8\xff6\x06\xa4\x06\x9e\x02~\x01\xbb\x06W\x084\x06\xb2\x03\xee\x04\xb3\x065\x04q\x07\xc3\x046\x03q\tK\x05\xe3\x01\xce\x01\xb9\x00\xc2\x03\x9c\x03g\xff\x90\xffp\xff\xba\x02M\x01\x0c\x01\xbe\x02\xcb\xfeE\xfd\x8d\x00N\x00T\xfc\xe6\xfd\xc7\xfd\x14\xfe\xf8\xff~\xfe\x8e\xfet\xfdP\xfeX\xfa\x13\xf9\xa6\xfdZ\xfeB\xfe\xc2\xf87\xfb`\xfaE\x00\x85\xf8\xc2\xf6]\xfb\xdd\xf8\x14\xfc\xba\xf9\xec\xfa\xa0\xfd\xea\xfa|\xf9\x99\xff\xc9\xfc`\xfc\xa1\xf3\x19\xfd\t\x00\xac\x03\x85\x04.\xff\xbf\xfft\xfe\xe9\x06q\x00\xbc\x030\x06$\x06\xa9\x04\x9c\x07\xb6\tg\x05.\x05N\x03\xd8\x00\x92\x05\xa6\x04\xa5\x02\x87\x07\x80\x04\xcf\x02\xf6\xfe`\x04\xcd\x05\x0f\x00\xfc\x03\xc3\x00\xeb\xff\x8b\x04\xb1\x08&\x00\x03\xfdX\xfd#\xfe\xaa\x01\xe1\xfd\x19\x03\xc6\x00\xe4\xfc\xda\xfb\xdf\xf2\xf8\xf8=\xfd:\xfa\x96\xfdt\xf9 \xf9!\xf8p\xf8)\xf8\xa7\xf8\xa8\xf8\xf5\xf9`\xfc\x94\xfd\xd6\xfb\xd5\xfb\x81\xfc\x96\xfe\x8f\x02z\x01\xf0\x00*\x00\x12\x05\x89\xfa@\x02#\x02\x12\x01\xb8\x02\x97\x02O\x02}\xff\xcd\x01\x82\xf8>\x038\x02v\x03H\xffQ\x00\x8a\x04\x99\x04A\x02#\x01m\x042\x03s\x03\xeb\x03\x1e\x08\t\x05\xed\x02_\x01S\x06\x0c\x07\x9f\x04\x7f\x07F\x04\xf9\xff.\x03H\x06\x85\x05J\x06\xc2\x01\x16\xfe\xac\xfe4\x01\xb2\x01\x10\x00\xf1\xff8\xff\xb2\xfe\xee\xfd\x83\xfbO\xfb\'\xfb:\xfd\x83\xfc\x8b\xf9\x1d\xfbN\xffe\xfc\x13\xf9\x96\xfc<\xfb\xb3\xf9_\xf9\x82\xf9I\xfcH\x014\xfbm\xfa\xad\xfa\xcf\xfd\x9a\xfeK\xfb\xa4\xfe\x82\xfd.\xfe \xfb7\x00\x1c\x02\x00\x02\xd4\xfeE\xff\x91\x03`\xffr\x03R\x02/\x03\x1e\x06\xa4\x02\xb0\x05\x04\x08\xf5\x05\x0f\x005\x05\x9e\x08\x91\x05w\x06i\x05\x03\x03"\x07%\t\xf6\x05R\x04(\x02v\x00\x07\x02*\x03\xb9\x02\xf6\x01\x14\xfd\xf2\x00+\x00\\\xffT\xff\xd0\xfa\xd7\xf9!\xfc\xea\xfd\xa7\xfe\x93\xfc\xa4\xf9L\xfd\x88\x00P\xfa1\xfc\xf2\x02a\xf9|\xfa\xba\xfb\x95\xfa\xd8\xfdI\xfe\xf0\xfd\xbd\xfa5\xfaj\xfe\xe0\xfe\n\xfb\xfa\xf9\x9d\xfb\xbe\xfd\x9c\xff:\x02r\xfcd\xfc\xd7\x00\x16\xff\xc8\xff@\x01\xf6\xfe\t\x01\xdb\x01\x0c\x03\xde\x02?\x00\xc2\x04v\xfe(\x02e\x05\xdb\x01\xf7\xfe\x92\x03\x98\x030\x02U\x05\x01\x04\xa7\x02u\x02\xfc\x01\xca\x03\xd9\x07\xa6\x01\x82\x03\xce\x02\x05\x036\xff.\x04W\x02R\x01\n\x05M\x01Q\x03\xeb\xfeW\x03\xb9\x00\xbf\x01\x9c\x00u\xfdW\xff\xa5\x03a\x00\xab\xfd\xf5\xfb\xa1\xfb\xc6\xfdl\xfc\xf0\xfb\x82\xfcG\x00X\xfb\xae\xfaF\xfa\xa9\xfb\'\xfa!\xfb|\xfc\xe7\xfa\xe0\xfa\xc3\xfca\xfd\xb1\xfbE\xfb\xb1\xf9\x88\xfd\x00\xfe\xa4\xfd\xbf\x00\xaa\xff\xf1\xfd\x82\xff\x12\x01\xe6\xfd\xd9\x01`\x04`\xfe\x89\xffg\x01D\x04=\x02!\x05E\x05\xfa\x02Q\xff\x7f\xffN\x06\xc4\x07\xc7\x08W\x012\x04\xad\x00\xf1\x03\x11\x04H\x03\xc2\x02\x85\x02\x06\x03\xae\xfe\x00\x07\xb3\x00\x1e\xfc\xa9\xfd\x12\x01r\x02G\x02\x91\xfe\xf8\xfc\xa4\xfd\xea\xfc\xf1\x01\x95\xff\xf6\xfc\x17\xfd\x8d\xfcd\xff\xdc\xfcC\xfdS\xfc\xe3\xfc\xf2\xfb\x97\xfc\xb8\xfcG\xfe~\xfe\x16\xfd\xe4\x00s\xf8\xb2\xfab\xff\x16\xffE\x00c\x00!\xffR\xfa\xc4\xfb\xd3\x00_\xfe/\xffT\xfe.\xfd\x93\xffW\x00\xac\xfb2\xfd\x94\x01\x84\xfc\xab\x01}\x01\x9a\x01\xe4\xff>\x01\x8b\xff.\x03`\x03\xeb\x00\x8f\x03\xfb\x04^\x046\x03\x13\x05\xcf\x02\xd2\x03\x04\x02\xdf\x06U\x03\x93\x04\xa2\x01a\x01\xb5\x04\xc4\x04{\x02\xe8\x02#\x03\xd6\xfd2\x00 \x04I\x040\xff0\x02\xf2\xff\xf6\xfb\xec\xff\x1a\xff\x97\x01\xcd\x03\xd6\xfb\x00\xf91\xfeY\x01:\xfe"\xfb\xf9\xfaJ\xfdH\xfd[\xffo\xfc\xd6\xf9\xae\xfa\xa5\xfbq\xfe~\xfc\xcc\xfc\xa7\xfb\x9e\xfc\xca\xfe\x85\x03\x81\xfe0\xfb\x11\x00\x8a\x01\xc1\xff\xaf\xff\x9b\xfe\x8e\x02\xa0\xff\xf2\xff\xb7\x03\xc5\xff\xea\x00\xdf\x01\xa6\x00\xed\xfc\xad\x02\xdd\xfe\xb7\x04x\x04\xa6\x00\xe7\x00W\xffv\x03*\x05Y\x06\xcc\xff#\x01-\x04<\x01\xda\x00G\x02r\x06\xca\x019\xffa\x00R\x01\xb3\x04m\x008\x00\xba\xff\xd5\x01\xd7\x00m\xfe\xef\x00\x9a\x02\xc2\xff\x8f\xffv\xff\xcb\xfdK\xfd\xb0\xfe\xac\x01\xd7\xff\xaf\xff\xd2\xfa\xa2\xfd\xe1\xfd5\xff,\xfc\xc6\xfd\x16\xfe\xf0\xfe\xa4\x004\xff\x0c\xffw\xfa,\x01\x7f\xfc\x9b\xfc&\xff*\x00\x9d\xfe/\xff\x9d\xfeQ\xfd\x8d\xfe\x9d\xff\xb1\xfc\xfc\xfc\xd2\x00\xb8\x00\xae\x02\xb8\xfe\xea\xfe\xba\x01\xb9\x02\x8a\xff\x1b\xff\x93\xff\r\x04\xa4\x02P\x02\xb0\x01\xc1\x00\xe9\x00\xdc\xff\xbe\x04\xde\x021\x01%\x01\xaf\x01\x97\x01(\x03\xe8\x02\xa2\x00\xab\x00\xa7\xff\xe7\xfdB\x02&\x00\xa1\x00\x05\x02m\xff\xfd\x01\x9f\x00\xfb\xfe\x00\x01\xe3\xfe\n\xfe4\x00\xc4\x01\xfa\x01\xd7\xff\xd7\xfe4\xff,\xfe\xae\xfc|\xfe\xe6\xfe\xa1\xffO\xfee\xfd|\xfe9\xfc\xe5\xfdV\xffT\xfeP\xfcL\xfc\xce\xfeX\xfd\x85\xfeW\xffE\xff\x14\x01\xe6\xfc8\x00\xd0\xfd\x15\xff\xdd\x04\x01\x00\xd8\x00\xa9\xfe7\xff_\x00\xec\x02\x97\x00\xab\xfe\x18\x04j\x00\xa5\xfd.\x01\x8f\x01\xf8\xff\xba\xfd\xda\x00>\x02\x18\x00\xa7\x02\xba\x01\xb3\xff{\xffj\xffS\x007\x00i\x01F\x03\r\x00\xdc\xfd\x17\xff\x1c\xff\x9a\x01W\x02\xa0\xfe/\x01\x0c\x01+\xfd\xd2\xff\x14\x00\xc3\xfe\xc1\xfft\x02\x10\x05\xd6\xfe\x98\xff=\xfe\x80\xfc!\x01\x99\x00\xf2\xff\xb0\x01>\x01-\x00\xb1\xfe\xe1\xfc\xb8\x01y\xfe\x80\xfd\xc8\xfe0\xff\xea\x00x\xfff\xff\xa8\x00~\xfe\x1e\xfd\x84\xfc\xe8\xfd\xd0\x01\xa7\xff\xd6\xfe`\xff\xb2\xfd\xfe\xff*\x00\x00\x00\x1f\x00`\x00\xb2\xff2\x01\xc4\x01\x85\x00A\x01]\x00\xac\x01\x9f\xffA\x02\xb1\x00\xcc\x01"\x03\x86\x00\xbe\xff\xc0\x01$\x01Z\xff:\xff+\x03$\x01\xb6\x03\t\x02\xa0\xfc\xb0\x01s\xfe\x11\x03<\x00\x1f\xffS\xff!\x01\x02\x03\xba\xffg\xfe\xb4\xfe\xa9\xff\xae\xfdN\xfd\xb7\xffx\x03G\xff\xc8\xfe\xf7\xff$\xffY\xff\x1b\xfe\xc0\x00\x9e\xfe\x7f\x00i\x00d\xff\xf6\x00\xd2\xfd\x86\x00\xd3\xfd\xb8\x00\x0f\xff\xdf\xfe\xc5\xfdf\x03N\x00\xdb\xf9\x06\x03\x19\xfe\xb2\xff\x17\x04l\xfdc\xfd\x98\x03w\x00\x04\x00\xdb\x01q\xffZ\x01\xcc\xff\x8a\x01\xa4\t\x94\xfe\xb6\xfb\r\x00<\x05\xab\x02\xbb\x01H\x04\x0e\xfdP\xfc<\xfd\xa7\xff\xa3\x02\xc1\xfe>\xfd\xfc\xfe\x8c\xfb\xaf\x01r\x03\xef\x018\xfcv\x00\x80\xfc\xd0\xff\xfe\x06\xcf\xff\x19\x01\xc7\xff4\x03n\x025\x00=\xfc5\xfd\xd2\xfb\x1a\x015\x00\x14\xfee\xfe\x10\xff\xb9\x01\x0c\x01M\x01O\xfd\xc0\xfb%\xfd\x9a\xfe5\xfe\xd9\xfe\xf7\xff\x0f\x00V\xff\x85\x00\xb3\xfb\x1e\xfd^\x02e\x02"\x00X\xfaC\xffb\x02s\x01\xca\x021\xffw\xff\xc3\x00B\x02\x01\x01_\x00\xe8\x01\xdf\xff\xb8\xffP\x03\x9f\x03\x9d\x02\xd9\xfc\x1d\xfd\xeb\x01\x1a\x05\x88\x00\x1b\x03N\x07$\xfc\x02\x01\x90\xfd\x97\xfd\x0e\x03N\x04\xc2\x038\xfeu\x00T\x05!\x02\xca\xfb-\xfe\xe5\xff!\xfd\x93\xfe\xf1\x035\x00\xe2\xfc\xcb\xfd\x0f\x017\xfd{\xfa\xb6\xfd\x18\xfe\xe7\xff\x82\xfe\x8b\xfe\xb2\xfc8\xfd\xd7\x00\xbb\x00\x96\xfe%\xfd\xd9\xfa\xe8\xfd?\x01\xbd\xff\xd6\x02\xcf\x00\xb0\xfb\x8f\xff\x8e\x00\xc7\xfd\xbe\xfe-\x00\x9d\xff{\x00~\x04[\x00\x03\xff\xdc\x023\x00\x00\x00-\x01{\x01\xf0\xff\t\xfe\x14\x04\xda\x04\xeb\x010\x01\xa3\xfdd\xfc\x15\xffJ\x01\xc3\xffP\x01[\x03\x91\x03\x1c\x02q\x01\xeb\xfc\xba\xf9\xa6\xfcI\xff\xbe\x01\xc6\xfe$\xff`\x05\xc3\x02\xfb\xff#\x03\xd2\xfb\xf3\xf9\x1b\x00\\\xfck\x01\xd6\x06\x8b\x029\xffS\x01b\xfd\x15\xfe6\x00\xd5\xfc\t\x00O\xffT\xfc\x8d\xfed\xff\xd7\xfct\x00a\xff\x94\x01C\x00\xcc\xf92\xfb\x9f\xfd\xf7\xfe\xa4\xff\xef\x00\xc7\xfd\xf4\xfe\x87\x01~\x01\x17\xfc\x08\xfd\x9b\xffX\xfd\xca\xfe\xcb\xff \x00h\x02\x7f\x04\xcd\x02!\x01i\xfcq\xfe.\x01\xf6\xfe\x16\x02 \x04\xb7\x02x\x05P\x05E\x03\xd9\x01y\x01\xea\xfe\xd5\x00x\x02\xf1\x01\xb9\x03\x1c\x01p\x01R\xff\xb5\x02\xea\x05r\x00(\xfeg\x01\x92\x02J\x00\x1e\xfc\xd9\xfce\xfc\x1f\xff\xe0\x01\x19\x02 \x03z\x012\xffM\xfc\xee\xfa\xce\xfb\xbe\xfd\x0f\xfe\xe9\x00\xd1\x01\xb1\x01\xec\x00\x0f\x01R\x00}\xfc\xca\xf9\x85\xfb\x95\x00\x9c\x00\xec\x00\xea\xfdf\xff1\x00\xec\x00\x04\x02\xe3\xfd"\xfe\xe7\xfez\x03\xe7\x03\x14\x01%\x00\xbf\xfcr\xfc\x99\xff\xe7\xffd\xfek\xff!\x02;\x02\xf6\xff\xb3\xff6\x005\x01\xc1\xfd\xdb\xfd\xec\xff\xe9\x01m\x02\xf3\x03\x96\x05\x0b\x03\\\xffS\xfd\x15\xff\xce\xfeM\xfe\xa3\x000\x03\xad\x01I\x02\n\x01|\x00\xf2\x01\x1b\xff\xc4\xff?\xff\xb0\x00\xe3\x00\x1d\xfed\xfe\x8d\x00\xf1\xffE\xfe\x8e\x00\xc4\xfd\xa6\xfd\x95\xff/\x01\xf8\x01u\xff\x07\xfeY\xfc:\xfd\xb6\xff\x87\x03i\x00r\xfc\xf6\xfb\xa6\xfd\xc4\x00m\x01\x81\xff\xfe\xfcB\xf9\xa2\xfb\x86\x01\x1f\x02:\x017\xff\xda\xfd\xc4\xfe\x03\x04c\x03]\xff\xd5\xffM\xff\xee\xfe\x1f\x017\x03-\x02\x8e\x00\xb3\xfe*\xfe\x03\xff\x9b\x00\x0f\x01<\x00\xb4\xfe\xce\xfe\\\xff\x02\x01;\x01S\x00\x9e\x01\x82\x01V\xffC\x00\x11\x021\x01\x16\x01\xff\xffW\xffU\xff\x81\x02B\x02\xe6\xfe\x9a\xfe3\xff\x86\xfes\xfeP\x00z\x00T\xffX\xff\xc0\x000\x00\x8e\x01\xa5\x01\xd7\xff=\xfec\xff\xd3\xffi\xffd\xff\xed\xfd>\xfe\xd3\xfe\n\xff~\xfeU\xfe(\xfe\xf1\xfe\xd6\xfey\xfe\xa6\xff\xa9\xff\r\x00^\x00\xed\x00P\x01q\x01_\x01\xdf\xff\xa5\xff\x83\xff5\x00\xbe\x00\xa7\x00L\x01\x1c\x01\r\x00(\xffh\x00A\x01\x7f\x01\x08\x01\xa9\xff\x8d\x00c\x01\x00\x01\x9e\x00\xf6\xff_\x008\x01P\x01\x11\x010\x01\xec\x00!\x00\x82\x00\x1c\x01V\x017\x01\x13\x01\xc1\x00\x9c\x00\xbc\x00\x03\x01\xa7\x006\x00O\x00\\\x00@\x00h\xff\xdd\xff\xf8\xff/\xffK\xfe\x15\xfe\xb9\xfe\x14\xffM\xff-\xff9\xfeC\xfe\x85\xff\xba\xffc\xffF\xff\xde\xfe)\xff\xa3\xff\xcf\xff5\x00\xcf\xff\xaa\xfe7\xfd\x1b\xfd\x02\xfd\x10\xfd\'\xfd\xf8\xfc\xde\xfd\x19\xfe\x11\xfeW\xfdN\xfd\xe2\xfe\xc3\xfe[\xfd\xb5\xfc!\xfdv\xfe\xbc\xfea\xfe*\xfe#\xfe\x17\xff\t\xff\xbd\xfe\xfe\xff\x05\x00m\xfe\xb2\xfd\xb7\xfdm\xfe\x9c\xfe\xdc\xfdO\xfd\xc2\xfd\xc8\xfe\xf8\xfe\xbb\xff\x80\x00\xfe\xff\x1b\x009\x00C\x00\xac\xff\xcc\xfe\xfb\xfc6\xfa\xcc\xf8\x95\xf7\x05\xf7\xb3\xf9M\x01`\x0c;\x17Y"\x13,\x8c2u5\xb73\x080\x06*|"Z\x1a\xf8\x11\x94\t\x9b\x01p\xfb\n\xf7\x1f\xf3H\xf0\x0c\xed\xdb\xe9\xf7\xe7\xeb\xe5\xf4\xe56\xe6\xcc\xe6\xc0\xe7W\xe9\xa0\xed\x15\xf3\xaf\xf8J\xfe\xff\x02\xd5\x05;\x08\xb8\t\xd4\tM\x08\x17\x05\x12\x00q\xfb\x96\xf7\xcf\xf4c\xf3|\xf18\xf0\x1d\xf0V\xf0\xa4\xf1\xad\xf2x\xf3U\xf4\xc0\xf4\x87\xf5W\xf6\xa8\xf7&\xf8\x9c\xf8\xb9\xf9\x0b\xfb!\xfd3\xfe\r\xff\xe6\x00\xa1\x02\x9c\x05\xa6\x07\xc0\x08\xfe\t\x87\t\x1c\t\x96\x08e\x08\xd1\n%\x0fQ\x12Z\x15t\x16\x9c\x16\xd8\x15\x0f\x12\xe7\x0bx\x03\xa1\xfb\x9e\xf4&\xef\x86\xea\xb4\xe7\x02\xe6f\xe5&\xe8\xa5\xec\xa2\xf1\xa1\xf6\xe3\xfaT\xfe\xa6\x01\xa4\x04d\x06\xc4\x06\xc9\x05\x9e\x03<\x01H\xfe\x97\xfc\x81\xfa\xcc\xf7\x15\xf6O\xf4\xff\xf2d\xf3\xa2\xf3\xef\xf2\xb3\xf3\xb9\xf4r\xf7\xa5\xfa\x89\xfdi\xff\xec\xff\x15\xffO\xfd\x9c\xfb\x1f\xfa4\xf9%\xf8\xe9\xf7\x9e\xf7\x81\xf8J\xfb\xa5\xff@\x04U\x08m\t\x81\t\xf3\t\x18\n\x9d\t\x18\x05x\x01j\xffW\xfc:\xf93\xf6\x00\xf6n\xf7Q\xfa=\x05V!\x8bJ"i\x0bp;gHd\x0fk\x88gOR\xe40A\x12\xc9\xfa\xda\xe6/\xd7 \xce\xb0\xcci\xcco\xc7\xb4\xc3\x07\xcc\xa7\xdd\x8c\xe9\xd2\xe5\xd9\xdcT\xde\xa7\xecc\xfb\xb1\x00\xe5\xff]\x02\x80\x0b\x04\x15\xbf\x1b\x07\x1e\xfe\x1b\xea\x14;\x07\x85\xf8\xaa\xf0\x88\xeb\x1c\xe4\xa5\xd67\xcb\xef\xc9C\xd0\x9e\xd9\x1c\xdfQ\xe0\x85\xe2\xce\xe7V\xee\x98\xf3\x1a\xf9\xd1\xfe\xdc\x00\xaa\x00\xcc\x02S\nt\x13\x94\x18\xb2\x17>\x13\xa7\x0f\x07\x0e\xb9\n|\x03\xb4\xf9\x8b\xef\x07\xe8\x15\xe5\xa0\xe7J\xed\xe5\xf2\xb7\xf7\xbb\xfdn\x06T\x10E\x18\x06\x1b\x8c\x18\xc1\x13\xa9\x0f\x0e\r\xfb\nx\x07w\x02\x92\xfe\x96\xfd1\x00q\x04d\x07@\x08=\x07\xf7\x054\x06\xc0\x05\xa9\x04X\x01a\xfdy\xfb\x19\xfc\x00\x00\xf4\x03{\x06\xd8\x07\x94\x08-\x0b\xe7\r\x1b\x0e\xcd\n\xe0\x03\x85\xfd\x92\xf8\xac\xf4\xf8\xf1K\xef%\xee\x9c\xed_\xeel\xf1\xa6\xf6\xd6\xfb\xd5\xfe\x98\x00\xd3\x022\x06\x91\t\xed\ni\n5\t\xb3\x08\xcf\x08\xb9\x07\x08\x05\x1a\x01o\xfc\x96\xf7\xb4\xf3 \xf1\x0e\xef \xee\xb3\xedg\xeeZ\xf13\xf6\xb4\xfb\x95\xff(\x02(\x04\x84\x05\x85\x060\x06\x9b\x04\x12\x02\xe9\xffI\xfe\'\xfd\x94\xfc\x14\xfcc\xfb\x0e\xfa\x00\xf9#\xf9M\xfa\xbb\xfan\xfa\x04\xfaX\xfa\x8e\xfb\xc5\xfc\x99\xfd\xec\xfd\x06\xff\xa6\x00x\x02n\x04\xd2\x05+\x06\x10\x05(\x03\xf8\x01n\x01\xd4\xffD\xfc{\xf6\xc9\xee\xbd\xea\xea\xeez\xfb\\\x0b\xd8\x17\r#{7GU\x13i\xfdd\xc5M\x898\x102\x97-\x04\x1d\x92\x015\xe9-\xdf\xee\xdfH\xe0\xf0\xdem\xdf\xf5\xe0W\xe1\xab\xe0\x81\xe3\x7f\xeb>\xf3\x9e\xf5\x0c\xf5*\xfa\xec\x08t\x19\tL\x0e}\x11\x9f\x10\xad\n\x0f\x01)\xf7\x8e\xefH\xeb\xb4\xe9P\xe9/\xea\xd3\xec\xe9\xf2\xdf\xfa>\x01\xea\x03s\x02r\xff\x8b\xfd\xf1\xfc&\xfdi\xfc\'\xfb\xd5\xfa\xd5\xfc\xe5\x00T\x04\xad\x04\x08\x02\x0c\xfe\x86\xfa\x13\xf8\xf2\xf5s\xf3h\xf0\xc8\xee\x13\xf1\xb2\xf6\xe6\xfd!\x03\xab\x05\xef\x06\xda\x07j\t\xd4\t\x9d\x071\x04\x91\x00\x04\xff\x03\xff\xad\xff\xe9\xffS\xff\x89\xfe/\xffW\x01_\x03\x13\x04\x13\x02\xb0\xff\xf7\xfe\xb9\x00o\x03\x1c\x05\xe3\x05\xf3\x06\xfb\x08\x03\n\xfe\x08\x96\x06a\x04\xb7\x02\xe2\x01\xb1\x00\x87\xfe\x88\xfa\x14\xf5\xf1\xf2\x10\xf9\x1a\x07\xc7\x16\xb5%}9JS\xdegDi\xfaV{?0.\xbb\x1e\xb2\x08\xcc\xebZ\xd2f\xc65\xc6\xdd\xc9\xdd\xcd\xae\xd4A\xe0G\xeba\xf1\x06\xf4c\xf8C\xff!\x04\x83\x03\x91\x01\xd7\x04D\r\x8e\x13\xd4\x11\xad\t\x7f\x00\x08\xf8:\xee\xff\xe1\xce\xd6\xc5\xcf\xb1\xcc\x85\xcc\x8c\xd1\x10\xdd\\\xed\xe5\xfc6\x08`\x0f\x90\x14\x15\x19\x1e\x1a!\x14\xf4\x07\xff\xf9\x9f\xef`\xeb\xd6\xebg\xec\x8e\xea\xd6\xe8\x17\xeb\x81\xf1\x89\xf8\x99\xfb`\xf9\xb3\xf5\xc6\xf5\xf8\xfa\xfe\x01V\x07\xd4\n$\x0e\x80\x13\\\x1a\t \x0b!\x83\x1ba\x11\x96\x06\x0b\xfe3\xf8o\xf2\xcd\xeb*\xe6\x00\xe5\x8c\xea/\xf5\xfa\xff\xea\x06\x99\n\xb0\x0e\xbe\x14\xb6\x19$\x1a\xe0\x15\xf2\x0fa\x0c\xfb\x0b\xee\x0c\x16\x0c\x89\x08\x0c\x04\x1c\x00\xc4\xfc\xb1\xf9\xe2\xf5q\xf1$\xed\x17\xeb7\xed9\xf3\\\xfa\xe9\xff3\x03\xa6\x06\xcf\x0b:\x11\xa3\x12\xea\rI\x05\xdd\xfd\x1c\xfa\x80\xf8\x99\xf5n\xf14\xef2\xf2\x82\xf9y\x01\xcc\x06\x07\t<\tb\x08\x95\x06Y\x03#\xfe\xb5\xf7s\xf1s\xed\xa0\xed\xbd\xf1Z\xf7.\xfck\xff/\x02%\x05\x80\x07W\x07\xac\x03\x97\xfd\x8e\xf8\x92\xf6\xc2\xf7j\xf9N\xfa%\xfbR\xfd,\x01\x03\x05\xce\x06u\x05\x0c\x02\x04\xff\x9e\xfd\x93\xfd\x9e\xfd\xbb\xfd"\xfek\xff\x15\x02@\x05 \x07\xbd\x06\x14\x04\x99\x00\x13\xfe@\xfd \xfd\xa4\xfc\xb7\xfbj\xfb\x03\xfd\xf5\x00\xa5\x05}\x08f\x08\x84\x06\xe3\x04\x10\x04\x8d\x03a\x02\x82\xff\x90\xfc\xa6\xfb\xe0\xfc!\xff\x11\x00G\xff\xeb\xfd\xf0\xfc\x87\xfd\xfd\xfe7\x00\x04\x00\xde\xfe\x86\xfe\x8a\xff\xc4\x01j\x03\xdf\x03\x1d\x03\xcd\x01\xe3\x00q\x00R\xff\xf9\xfc/\xfa\x8c\xf8\x94\xf9\xad\xfdW\x02S\x043\x03\x1f\x01\xd4\x00\xa8\x03#\x06\xb0\x03w\xfb?\xf3\xe5\xf5\x01\tn$q8\xcd<\xe6:%A\xa9L\xd0J\xe50\xbf\t|\xec\x15\xe3\x9c\xe4\xab\xe3;\xdbg\xd4\xfa\xd7\x15\xe6\x16\xf5;\xfcX\xfa9\xf3\xd2\xed+\xee\xdf\xf3\x89\xfb\x8c\x009\x02f\x03\xf6\x07/\x0e\xb7\x0f\xa8\x07o\xf7c\xe6\xeb\xda\x8c\xd6\xa1\xd7A\xdbc\xe1\xa7\xe9\x86\xf3n\x00\x1b\x0fB\x1a\xa0\x1b\xe3\x12\xef\x06\x08\xff\xab\xfbB\xf8#\xf1\x99\xe8\xef\xe5\xa3\xeb\x0e\xf5\x1c\xfbH\xfb\xd5\xf8\x8d\xf7\x8b\xf8g\xfa\x90\xfbb\xfc\xfc\xfeL\x04\x10\x0c\xa8\x153\x1e\xb6!\xa7\x1e\xe4\x17P\x11X\x0bi\x03\xab\xf8\xc0\xed\n\xe72\xe7\xdb\xec\xcd\xf3\xac\xf9D\xff\n\x06\x18\r\xdb\x11:\x13[\x11\x9f\rg\t0\x06P\x05,\x06\xef\x06~\x06n\x05w\x04\xf0\x03\xba\x02x\xff\x1f\xfa@\xf5\xbc\xf3\x03\xf6\xf5\xf9\xd5\xfc9\xfe\x1c\x00J\x03\xb0\x06e\x07\xd1\x04\xc8\x00\xd8\xfdV\xfd(\xfe\x7f\xfep\xfd\xd9\xfb\xa2\xfb\x05\xfd\xa0\xff\xd3\x01\xac\x02k\x02\x16\x02\xb0\x02\xcb\x03\xea\x03H\x02\x84\xff\xdb\xfdo\xfe\xed\x00\x1d\x03_\x030\x02\xde\x00,\x00\x80\xff\xac\xfdq\xfaK\xf6\r\xf33\xf2\x12\xf4\xdc\xf6\xe3\xf8\xcd\xf9)\xfb3\xfes\x02\x8b\x05a\x05=\x02\xd3\xfe)\xfd\x9e\xfdW\xfe]\xfe\xa7\xfd@\xfdq\xfe9\x00\x0f\x01\xad\xffU\xfc\x04\xf9\xc2\xf7\x10\xf9B\xfb\x85\xfcq\xfcj\xfc\xe1\xfd\x91\x00\x99\x02b\x02\xcc\xff\xfa\xfc\x12\xfc\x87\xfd\xf6\xff\xe9\x00>\xff]\xfd\x84\xfd\x96\xff\xd8\x01\xc4\x01\xea\xff.\xfe\xec\xfd\x11\xff?\x00\x18\x00f\xfeA\xfc-\xfbA\xfc\x1a\xff\xce\x00\xe9\xff\x9c\xfd\x8d\xfcP\xfd\xc6\xfd\xc1\xfbE\xf7\x18\xf2;\xeei\xed-\xf6:\x0f\xd53\xd7R\x84\\KUJN\x9bK\x94A\xaa\'\xfe\x07\xe7\xf40\xf4\xf8\xfa\x9b\xfb\xdc\xf4d\xeeq\xee\xa6\xefr\xeb\x80\xe2\xc1\xdac\xda\xb1\xe0\xeb\xebi\xfay\t\xf5\x14\xb3\x17\x1d\x13C\x0br\x02\xf2\xf7M\xea[\xdc\t\xd4\xcf\xd4S\xdd3\xe8A\xf1\xeb\xf5\xe3\xf7o\xfbi\x023\x08\xa3\x06\x0c\xff\x1a\xf84\xf8y\xfe[\x04\x98\x05\x83\x010\xfc\x01\xf85\xf4x\xef\xe3\xe8\xff\xe1\xb1\xdd\xe8\xdeb\xe6\xb9\xf1f\xfdx\x06;\r\xfc\x12\xfe\x17\xbf\x1ad\x18?\x11\xcb\x08n\x03\xfa\x01\x01\x02\x07\x01\xbe\xfe\x86\xfd\x91\xfe\xf7\x00\x12\x01v\xfd\xbe\xf8\xef\xf6\xf0\xf9G\xff\xb9\x04(\nn\x10B\x169\x19\xe9\x18\xde\x16h\x14\xa6\x10:\x0b\xa2\x05[\x02\\\x02\x0f\x03\x1e\x02,\x00\xd3\xff\xaa\x01\xbc\x02\x81\x00\xf8\xfb\x0c\xf9\x98\xf9\xa2\xfbG\xfc\x1d\xfc\xb3\xfd\n\x02\xe3\x06\x92\tz\t\xac\x07v\x04\r\x008\xfb\x97\xf7\x86\xf5\x1d\xf4\xfd\xf2I\xf3\xec\xf5\x0b\xfa{\xfcv\xfbg\xf8\xf4\xf6f\xf8\x1d\xfb$\xfc\xc8\xfb\x17\xfd\x8f\x01\xc4\x06C\t}\x07n\x03r\xffh\xfc\xa4\xf9\xb9\xf66\xf4[\xf3\xea\xf4\x82\xf8\xd7\xfc\x90\x00\n\x02,\x01\x06\xff\xaf\xfd\xb5\xfd/\xfe\xbb\xfd\xa3\xfc\x89\xfc\x0b\xfeY\x00\xe2\x01\x8f\x01\xd3\xff\xd0\xfd\xa3\xfcZ\xfc>\xfc\xb5\xfbE\xfb\xe0\xfb/\xfeo\x01d\x04\xa7\x05L\x05F\x04x\x03\x10\x03r\x02Z\x01\x0f\x00\x8a\xff\x03\x00%\x01=\x02g\x02<\x015\xff\x81\xfdL\xfds\xfe\xb8\xff\xcb\xffJ\xfe\xb7\xfcV\xfd\x9d\xff\xa3\x00G\xfe=\xfa\x0b\xf9#\xfc\x08\x01.\x04\xdc\x03\x8a\x02y\x02\xeb\x05~\x0b~\x0f\x7f\x0f\n\r\xba\x0c\xb0\x11\x19\x1al%\xb31\xc7=\x81E\xbfB\x995 #\xed\x11\xde\x04^\xfa\x94\xf2\xe0\xec\x0e\xe8\x07\xe5\xcd\xe3G\xe3\xb1\xe0f\xde#\xe0\xb7\xe5\xee\xec\xbb\xf3\xf0\xfb\xd9\x04\xa1\n\xb6\x0be\t\xf6\x04U\xfd_\xf2\x1b\xe8\x08\xe3\xe2\xe3\xbc\xe6\x06\xe7o\xe6\x14\xeaj\xf3\x86\xfa\xb7\xf7\x97\xef\x12\xee\x8e\xf6L\xff\x1f\x01k\x00\x1b\x04\x97\t\xf7\x08@\x01\xb9\xf8\x8d\xf3T\xef\x7f\xe9\xd7\xe4\x89\xe5\x0c\xec\xf6\xf3\xd2\xf8\xbf\xfa\xa1\xfd=\x03\x97\x08\xed\x08\xd7\x04\xc3\x02Z\x07J\x0f\xe8\x13\xc8\x12Y\x0f\x1d\rT\x0b\\\x07\xc2\x00\n\xfa\xa7\xf5M\xf4\xaf\xf5r\xf9c\xfe\xdf\x01\xdf\x02\xdd\x02)\x05_\n\x98\x0f\xa3\x11\x9f\x10k\x10Q\x13\xd9\x16\x8e\x16\xcb\x10\x0c\x08\x99\x00\x99\xfd\x1c\xfe~\xfe\xd0\xfc\x08\xfb\x94\xfcz\x01%\x05O\x03\x1a\xfd\x9a\xf8[\xfb\xf1\x03s\x0c\xcd\x0fU\x0e\xc9\x0b\xdb\t4\x07=\x01H\xf8\r\xf0%\xec\x18\xee\t\xf4\xf5\xfa\x82\xffv\x00g\xfe\x08\xfcr\xfb\xff\xfbP\xfc\x8a\xfbz\xfb\x16\xfen\x02\xc5\x056\x05c\x00\xed\xf9\xe5\xf4?\xf3\x14\xf4o\xf5C\xf7e\xfaL\xff\x0b\x04~\x06n\x05\x80\x01\xd8\xfc\x90\xf9\x8c\xf8)\xf9Q\xfa\xcf\xfb\xe0\xfcK\xfd|\xfc\x93\xfa\\\xf8s\xf6\xed\xf5\x87\xf7$\xfb\n\x00\x80\x04\xb0\x07}\t\xc3\t\xc0\x089\x06\n\x03\xd5\x00I\x00\xb7\x00\\\x004\xff/\xfeN\xfeU\xfe;\xfd\x89\xfb\xae\xfa;\xfd\x96\x01\x96\x05\xb5\x07-\x08\xa6\x08\n\t\xaf\x07Y\x04F\x00\x82\xfd\x82\xfc\xfa\xfc\x9f\xfdq\xfd\xc4\xfc\xc4\xfb\x15\xfc,\xfd\xbd\xfd\xca\xfe\x00\x02\x93\x08\x00\x0e\xf5\x0e\xc9\x0b\xc0\x08\xb0\t\x80\r{\x0e\x03\t\x9e\x07i\x19\xac:\x18QSF\x1a$B\t\x82\x06A\x12R\x17\xf0\x0f(\x05>\x01U\x04\x1c\x060\xfe\xeb\xebP\xd8\xc0\xcd\x0e\xd13\xdeU\xee\x8d\xfa\xfc\xfe\x94\xfcu\xf7\xf5\xf32\xf2\xa5\xf0\xa5\xef\xd9\xf0z\xf6\x81\xff+\x07^\x08U\x02q\xf7C\xeao\xdeb\xdb\x80\xe5\x87\xf5\x01\xfe-\xfbi\xf5\xae\xf4W\xf7*\xf6\xab\xf0\xb3\xec\xbd\xef\xe7\xf9\x86\x04\x9a\t5\x08\x84\x02\x80\xfb$\xf6\x8c\xf3\xfa\xf3s\xf5\xf6\xf7\xb9\xfbG\x00\x17\x05\x9e\x08\xc0\x08\xa6\x03\xa1\xfc\xd9\xf9j\xfe#\x06L\ng\t\x92\x07\x8a\x07S\x07\xe9\x04\xde\x00\x9e\xfd\x03\xfc\xc9\xfb\x9e\xfd\xdb\x01&\x06\x9a\x07=\x06x\x04\t\x05\x83\x07\x80\n\x88\x0c-\r4\x0eC\x10M\x12\xcf\x11\r\r\xf7\x07\xd4\x05\x9b\x06\xd3\x05[\x01\x02\xfe\xbd\xfe\xd6\x01C\x02\x1e\xfe\xe7\xf8\xbf\xf5\xae\xf4\r\xf5\xb1\xf5]\xf7\x85\xf9\xd8\xfa\xdb\xfb\x91\xfc[\xfd\xd2\xfcd\xfb\x9b\xfa!\xfb\xae\xfc\x1d\xfe`\xff\xe2\xff\x86\xffT\xfes\xfc\x94\xfa"\xf94\xf9^\xfa\xd0\xfb\xae\xfc&\xfd\xe5\xfdC\xfe`\xfe\xb6\xfd\x0e\xfd\xb7\xfc\xd8\xfcn\xfd\xfc\xfd\x9c\xfe\xec\xfe\xd5\xfe\x1c\xfe:\xfd\x9b\xfc\x17\xfc\x8a\xfb\xb4\xfb\xab\xfcE\xfe#\xff\xbe\xfe\x0f\xfe\x85\xfd\x9f\xfd\xe1\xfd}\xfe\xdc\xff.\x02=\x04\xbb\x04\\\x03\xe7\x01\x91\x01\xc9\x02\xc8\x04t\x06\x85\x07\xa3\x07w\x06Q\x04p\x01\xd4\xff$\xff\'\xfe\xd0\xfc\xfa\xfc\xa0\xff\x18\x03\xe3\x02\x00\xff%\xfb\x92\xf9\xe2\xfbw\xff(\x03\x01\x07\xd0\x06\xd6\x02\x83\xfb\x0b\xfc\x14\r\xd2\'V8\xce1\xf7!\x9c\x1bs"\xec*\x7f+l(\xa5\'\xb2(\xdb&\x17\x1e?\x10\x8c\x03\xcb\xf9\xd0\xf1p\xeb(\xea\xe3\xec\x04\xec\x87\xe2\xca\xd8\xae\xd8\x0b\xdf\x1b\xe2+\xe0\x05\xe2s\xeb]\xf4F\xf7\xa1\xf6.\xf8\r\xfc\'\xfe\xb0\xff\x94\x03\x00\n\xef\x0bJ\x07\xc9\x01\xbc\x00\xc1\x01\xed\xfe\xb9\xf8\xca\xf3\x97\xf2A\xf3.\xf26\xee,\xe9\xec\xe5\x12\xe5\xf3\xe4N\xe5A\xe7\x82\xea+\xedt\xee1\xf0\xcc\xf3\x7f\xf8\x1e\xfc\xf5\xfd\xe5\xff\x00\x04*\t\xc8\x0ca\r\x9c\r\x0b\x0f\x8a\x10(\x10\x88\x0e\xa6\r\x95\x0c\xe3\t\xaf\x06\x94\x05\xa1\x06\xfa\x05\n\x03g\xffz\xfd{\xfc\x95\xfbL\xfc\xf6\xfd\xb3\xff\x89\x01\x95\x03\x1e\x05\xa1\x04*\x04\xc8\x06\xb1\n\x14\x0e~\x119\x15o\x16\xb5\x11h\n\xbf\x06<\x07\x89\x08\x1b\x08~\x06\xa0\x03\x1b\xff\x03\xfa#\xf6S\xf3J\xf1\x8f\xf0\x95\xf1\xdb\xf2\xd2\xf2\xd3\xf1\xb5\xf0b\xf07\xf1\x8e\xf3b\xf7\xff\xfa\x02\xfd\xe4\xfd\x0f\xfe"\xfe/\xfe\x16\xff,\x01\xd8\x02;\x034\x02\xa9\x00\xdd\xfe\x01\xfd\xad\xfbg\xfb\x7f\xfb3\xfbX\xfa\xf2\xf8\x88\xf7\x1c\xf6\x1f\xf5\xd2\xf43\xf5\x86\xf6\xa5\xf8P\xfa\xb2\xfaa\xfad\xfaT\xfb\xf0\xfb#\xfd\x1d\x00\xba\x04n\x07\xc9\x06;\x04\x05\x028\x02\x15\x046\x07\xfb\t\xb0\n\xad\t\xab\x05g\x00z\xfdI\x00\x80\x06\x14\t\xb4\x05I\x00s\xfeD\x01\xd7\x05\x94\t"\tq\x05\xdc\x01\x8a\x03\x15\t]\x0e\xc6\x14\xaa \x03,M*m\x1b&\x11\xc2\x16\x93#\xa9+\xcb-\xda-\xca\'\x85\x1a\xfb\r\xdc\x08\xac\x08\xaf\x07C\x05?\x02\x14\xfd\x1f\xf4>\xe9Y\xe0\x13\xdb%\xda\xce\xdcL\xe0\xd6\xe27\xe1\x8f\xdcU\xd8-\xd7a\xdbP\xe45\xefh\xf7\x9f\xf8\xc6\xf5\x98\xf5h\xf9\xca\xfe\xdb\x03Y\n/\x10\xb2\x10J\x0c8\x07[\x05f\x05z\x05W\x05\xb5\x04\x02\x03E\xfem\xf6\xb0\xee\x8d\xeb\xe8\xec\xc1\xee\xf8\xed/\xec\x07\xeb\xc1\xe9\xd5\xe7\xb8\xe7M\xeb\xf3\xf0\x84\xf5\xe2\xf8(\xfc\x83\xff\xbb\x01\x88\x03\x07\x06<\n\xcc\x0e\x8e\x12\xef\x14\xbd\x15V\x14G\x12+\x11`\x12S\x14\x82\x14\x8f\x11H\x0c\x97\x08\xc8\tD\x0e\xaa\x0f\xc9\n\xa7\x03\x0b\x01\x9e\x02\x86\x03\n\x03\xb7\x03t\x04\x1c\x02p\xfdc\xfb\xa3\xfc\xba\xfda\xfd\x1f\xfd\x9d\xfd\xac\xfd\\\xfc7\xfb\xf9\xf9$\xf9j\xf9\xeb\xfa\x1e\xfc\x9a\xfb\x87\xf9S\xf7+\xf6\xbf\xf6\x9d\xf8~\xfaE\xfbi\xfa\xd6\xf8\xd1\xf7\xd4\xf8\xb4\xfa>\xfc\x18\xfd\xf2\xfd]\xffo\xff\xeb\xfdA\xfcg\xfc2\xfe\xaa\xff\xd4\xff\xfe\xfe\xb0\xfd\x15\xfc\xef\xf9<\xf9\x15\xfa_\xfb-\xfc\x93\xfb\x0f\xfb\x0f\xfa\xce\xf8\x91\xf8\x89\xf96\xfb\xea\xfc!\xfe\xc6\xfe\r\xff\xe9\xfe\xc2\xff\xd5\x00q\x01\xfe\x02n\x06\x9b\x08\x01\x08r\x05\x13\x05\xb5\x07&\t\xc8\t\xf9\nT\x0c\xa6\x0b/\x08S\x06\xaa\x07\xa0\n\xe0\x0cr\rm\x0c\xba\nF\n(\x0c\xeb\x10\x9a\x18C\x1f\xd3\x1fB\x18\xe8\x0f\x11\x10\xfd\x17) \xc3!\xb0\x1e\x88\x1a}\x15\x9c\x0f\xeb\n\x99\t\xca\n\x91\t\x93\x05\xcf\x00\xc4\xfb\xda\xf5\x0e\xef\xdd\xea\n\xea\x08\xea\x83\xe9\xf5\xe7\xfb\xe4\x02\xe1T\xddw\xdcX\xde\x81\xe1w\xe5\xaa\xe8?\xe9G\xe7\x97\xe6G\xe9\xb5\xed\x8e\xf1\\\xf5\xfa\xf9k\xfc\x91\xfb\x13\xfa\x9f\xfaR\xfd\xcb\xff\x9e\x02\xdc\x052\x075\x05`\x01\xc5\xfeW\xfeX\xff=\x01\x9b\x02v\x01\xc3\xfd\r\xfa>\xf8\xcf\xf7\xa2\xf7\xcf\xf7\xf4\xf8\xca\xf9\x02\xf9\x0e\xf7/\xf6o\xf7/\xf9v\xfb\xa1\xfdI\x00\x9c\x01\x8b\x02\x92\x04\xa0\x07\xb1\t\xf9\t$\x0b(\x0fL\x14[\x17A\x17\xf8\x13h\x10\xb3\x0fh\x13\xcb\x16\x8b\x15\x02\x11D\r!\x0b\xf3\x07\x9f\x04\xb4\x03\x0e\x04\x98\x02\xee\xfe\xef\xfb\xa5\xf9\xc2\xf6{\xf4\xb1\xf4\xef\xf5q\xf5\x1c\xf4l\xf3\t\xf3\xcf\xf1\xed\xf0z\xf2\xfd\xf4^\xf6\xf4\xf6\xf2\xf7\xfb\xf8%\xf9K\xf9\x7f\xfaI\xfc|\xfd6\xfe\xf9\xfe9\xff\xb5\xfec\xfe\xe3\xfe\xeb\xffu\x00Z\x00 \x00\xdc\xff0\xffO\xfe\x8d\xfdy\xfd\x1e\xfe\x9d\xfeT\xfe`\xfd\xf4\xfc\x86\xfd)\xfeb\xfe\xe4\xfe\xc8\xff\x9f\x00\xa7\x00\x98\x00e\x01\'\x02b\x02(\x02+\x03\x9b\x04U\x04;\x03\x82\x03\x9e\x05\xc5\x06\xda\x05\xd1\x04x\x05\xdb\x06\x0e\x07\xaa\x05k\x04\xa1\x04a\x06\xa9\x07\xc5\x07\x9f\x08\xca\x0bJ\x0fb\x0f\\\r\x94\r\x01\x11\xa2\x13\xb3\x14F\x15\xe1\x15\x8e\x14\x9f\x12\x04\x13\xb8\x14\x94\x14.\x12U\x10\x0f\x0e[\nn\x07\x18\x07a\x07,\x04}\xff?\xfc\xb2\xf9!\xf6&\xf3\x9b\xf3\x12\xf4\xf3\xf0\x07\xec\r\xea\x1e\xebJ\xebp\xea\xc6\xea\x04\xec\xed\xeb\x14\xeb\x07\xec\x8a\xee\x10\xf0\xb8\xf08\xf2g\xf4<\xf5\xe7\xf4\xc4\xf5U\xf8U\xfa\xeb\xfa\x8a\xfb\x96\xfc\xcb\xfc\xf3\xfb!\xfc\xa5\xfe\x1a\x01T\x01\xe6\xff\xbe\xfew\xfe\xd4\xfe\x8e\xff|\x00\x8f\x00H\xff\x89\xfd\xd5\xfcV\xfd\xf2\xfd\x9e\xfd\xa5\xfc\xc0\xfb\xc4\xfb\x11\xfcm\xfc\x98\xfc\xc5\xfc"\xfd\xba\xfd\xed\xfe\x98\x00\'\x02\xf8\x02\x84\x03\xde\x04\xa0\x06v\x08\xb9\t\xb5\n5\x0b6\x0bg\x0b\xe9\x0b\x94\x0ce\x0c\xec\x0bx\x0b\xd0\n\xbe\t\x1d\x08\xfb\x06\x0f\x06\\\x05\x98\x04|\x03>\x02\xaf\x005\xff\xea\xfd\xee\xfcR\xfc\xcd\xfb/\xfb<\xfa\x02\xf9\xa8\xf7\xe9\xf6\xfc\xf6\\\xf7G\xf7\xb6\xf6\x1f\xf6\xc9\xf5\x94\xf5\xf5\xf5\x00\xf7\xfb\xf78\xf8\xf7\xf7\xd5\xf7\x19\xf8\x99\xf8p\xf9\x85\xfa\\\xfb\x8d\xfb\xa4\xfb(\xfc\x0b\xfd~\xfd\x83\xfd\xdf\xfd\x9e\xfeR\xff\xe4\xff\xb4\x00|\x01h\x01\xe2\x00\xdd\x00\x83\x01\x87\x01\xed\x00\r\x01=\x02?\x03\xb0\x02\x8c\x01\x88\x01\x0e\x02J\x02Q\x02\x7f\x02\xa7\x02#\x02\xd9\x01\xc8\x02A\x04\xe1\x04\\\x04W\x04\xe1\x04\x94\x05\'\x06\xa4\x07s\n\xcc\x0cx\rw\r\x80\x0e\xc0\x10\xab\x12\xfd\x13E\x15$\x16\xf4\x15h\x15\xfe\x15\x1c\x17\xeb\x16\x0f\x15F\x13?\x12\xca\x10i\x0e\xbe\x0b\xb7\t\x16\x07\xbf\x03\x82\x00\x03\xfe\xca\xfb\xb1\xf8H\xf5\xc6\xf25\xf1\xd2\xef\xf2\xed\x06\xec\x8e\xea\x9d\xe9A\xe9\x8e\xe9\x1f\xeau\xeae\xea\xa7\xea\x8a\xeb\xee\xecO\xee\xb5\xef#\xf1k\xf2U\xf39\xf4\xac\xf5\x86\xf7%\xf9\'\xfa\xe3\xfa\xe1\xfb\xd7\xfc\xce\xfd\xdd\xfe\xd9\xff;\x00\x0e\x003\x00\xe7\x00\x8a\x01\xae\x01s\x01O\x01J\x01b\x01k\x01_\x014\x01)\x011\x01{\x01\xdc\x01\'\x02\t\x02\xad\x01\xca\x01\xd9\x02R\x04%\x05\x19\x05\x8a\x04\n\x04J\x04T\x05\xac\x06E\x07\x0b\x07\x8a\x06C\x06\x02\x06\x10\x06\xbb\x06h\x07@\x07A\x06w\x052\x05\xcb\x04J\x04%\x04\x0f\x04\x1e\x03\xaf\x01\xb9\x00R\x00\x9f\xff\x88\xfe\xc0\xfdT\xfd\x8f\xfcx\xfb\xc2\xfah\xfa\xeb\xf9k\xf9\x1c\xf9\xe2\xf8k\xf8\x06\xf8\x04\xf8\x13\xf8\xe3\xf7\xc5\xf7\x0e\xf8\xa6\xf84\xf9\x89\xf9\xa9\xf9\xb7\xf9\xc4\xf9[\xfa\x9a\xfb\xf9\xfc\x90\xfd`\xfd\xfc\xfc;\xfd\x1c\xfe$\xff\xde\xff"\x00\x1c\x00!\x00a\x00\xe8\x00q\x01\xcc\x01\xb2\x01\x9c\x01\xe4\x01u\x02\xe7\x02\x07\x03\x14\x03!\x03\xfb\x02\xb7\x02\xc6\x021\x03\x81\x03\x9c\x03\x81\x03Z\x03\x0e\x03\xf1\x02.\x03{\x03\xe3\x03\xe4\x04\xcf\x06\xc8\x08\x8d\t\xa4\t$\nx\x0b<\re\x0f\xc9\x11p\x13\xba\x13\x9e\x13\xfa\x13\x97\x14w\x14*\x14`\x14N\x14\x0b\x13\xd7\x10\xd9\x0e\x01\r\x87\n\x18\x089\x06I\x04I\x01\xe3\xfd%\xfb\x06\xf9\x8c\xf6\xfa\xf3\xed\xf1S\xf0\xa4\xee\x01\xed\x01\xec_\xeb\xae\xeaE\xeaz\xea/\xeb\xa0\xeb\x07\xec\xc6\xec\xc4\xed\xc7\xee\xed\xefm\xf1\x12\xf3O\xf4I\xf5I\xf6a\xf7i\xf8s\xf9\xac\xfa\xf3\xfb\xcf\xfc5\xfd~\xfd\x0b\xfe\xa7\xfe\n\xff6\xff\x92\xff\x17\x00]\x00\x0e\x00\xab\xff\xb0\xff\xcd\xff\xa2\xffj\xff\xcc\xffg\x00_\x00\xf4\xff\xf0\xffq\x00\xb6\x00\xcf\x00|\x01d\x02\xaf\x02\x9e\x024\x03\x1f\x040\x04\xd4\x03-\x04\x1c\x05b\x05\x1c\x05F\x05\xc7\x05\x95\x05\xe1\x04\xe0\x04i\x05~\x05\xfd\x04\xdb\x04I\x05R\x05\xf6\x04\xd3\x04\xb3\x04(\x04\x86\x03\x84\x03\xb1\x03+\x03%\x02"\x01d\x00\x9a\xff\xdf\xfe;\xfex\xfdk\xfcb\xfb\x86\xfa\xed\xf9l\xf9\x02\xf9Y\xf8\x96\xf7"\xf7+\xf7`\xf7l\xf7U\xf7@\xf7@\xf7\x86\xf7!\xf8\xfa\xf8\x90\xf9\x1e\xfa\xb0\xfa(\xfbU\xfb\xc1\xfb\xf8\xfcb\xfe\'\xff=\xffB\xfft\xff\xb4\xffe\x00\x82\x01a\x02\xf7\x01\xfa\x00\xe9\x00%\x02o\x03\xe9\x03\xe9\x03\xa8\x03\x16\x03\xb2\x02w\x03\x1c\x05:\x06\x1f\x06e\x05\x1e\x05~\x05+\x06\xf2\x06\xb5\x07\xde\x07\x88\x07\xb0\x07}\th\x0c\xae\x0e\x07\x0f\x0e\x0e\xce\r\x83\x0f\x90\x12m\x15\x01\x17!\x176\x16\x0f\x15\xc8\x14\xea\x15W\x17M\x17\x0e\x15\x99\x11\x85\x0e~\x0c/\x0b\xd7\t\x93\x07\xda\x03\xfd\xfe\xab\xfa\xbb\xf7\xe1\xf5\xfc\xf3\x80\xf1\xa9\xeey\xeb\xa0\xe8\x12\xe7\xc4\xe6\xd9\xe6C\xe6`\xe5\xee\xe41\xe5W\xe6J\xe8U\xea\xad\xebN\xec \xed\xff\xee\xcb\xf1\xe9\xf4\x9d\xf7\x90\xf9^\xfa\xc6\xfa\xce\xfb\xe2\xfd\x90\x00\xad\x02\xb6\x03\xbd\x033\x03\xef\x02G\x03\x1b\x04\xa4\x04\xad\x04J\x04Y\x03g\x02\x02\x02;\x02\x90\x02.\x02\x80\x01\x97\x00\x0b\x00\xcd\x00\xac\x02E\x04\xa8\x03\xb0\x01x\x00!\x01\xe3\x02\x8e\x04a\x05\x02\x05y\x03\xc2\x01X\x01\x1b\x02\x82\x03\x1b\x04\xc1\x03\xa9\x02\\\x01\x97\x00\x8a\x00\x16\x01\xc8\x01\xef\x01\xa2\x01D\x01\x18\x01\xe9\x00K\x00\xe0\xff\xd8\xff&\x00N\x00_\x00p\x00\xcd\xffY\xfe\xc2\xfc\'\xfc{\xfc\xde\xfc\x10\xfd\xe8\xfc<\xfc\xdc\xfa;\xf9Q\xf8D\xf8\xa3\xf8\xff\xf8V\xf9d\xf9\xb4\xf8\xb1\xf7\x0c\xf7\x17\xf7\x93\xf7O\xf80\xf9\xf9\xf93\xfa\x03\xfa\xed\xf9C\xfa?\xfbp\xfce\xfd\xb1\xfdZ\xfdW\xfd\x0c\xfe\x18\xff\xf6\xff\x80\x00\xa9\x00\x91\x00/\x00P\x00\xfa\x00\xb3\x01\xfd\x01\xc5\x01\xee\x01\x14\x03\xae\x04\xa1\x05C\x05M\x04Y\x040\x06\x85\nz\x10\xac\x15.\x17\x03\x15\xdd\x12W\x14s\x19\xe6\x1fw%\xca(\x06(\xc2#i\x1f\xa9\x1e\x87!\xe0#\\#\xee\x1f:\x1b\x94\x15\xe4\x0fI\x0b\xde\x07\xb1\x04\xb3\xff\xb2\xf9\x0c\xf4\xb5\xef^\xec"\xe8;\xe3\x80\xde\x00\xdb)\xd9\xb9\xd8\x9f\xd9\x82\xda&\xdad\xd89\xd7\xaa\xd8\x90\xdc\xc3\xe1~\xe6\xd3\xe9V\xeb\x8a\xec\'\xef\x9b\xf3\xdf\xf8\x03\xfe\'\x02-\x04|\x04Q\x05Y\x08\x02\x0c\x0c\x0e6\x0e\x0f\x0e\xe9\r+\r\xb6\x0c,\r\x1e\r;\x0b\xf6\x07T\x05\x13\x04\xd5\x02$\x02\xb1\x00\xa2\xfe\xf4\xfb\xfd\xf9_\xf9Q\xf9\x92\xf9\xfb\xf9\x99\xf9\xe1\xf7\xe1\xf5G\xf6\x8d\xf9\x17\xfe0\x01\xd2\x01:\x00J\xfeh\xfe\x93\x00~\x04\xc0\x07\xff\t\x9c\t\xa0\x07!\x06\x80\x06C\x08\xb8\x08G\x08l\x07\xb7\x06\xe1\x05|\x05\xf0\x05l\x05\xe3\x02v\xffU\xfd\x1b\xfdj\xfd\x90\xfdu\xfd\xc1\xfb\xf3\xf8\xe4\xf5\xb4\xf4\xc3\xf4\xe1\xf4\x14\xf5<\xf5\xdc\xf4\xc2\xf3\xe5\xf2\r\xf3s\xf3d\xf3e\xf3:\xf4\xa0\xf5\xba\xf6\xc8\xf7A\xf8`\xf8\xed\xf7%\xf8k\xf9*\xfb\x18\xfd\xad\xfe\x8a\xff=\xff;\xfeg\xfe\\\xffX\x00\xde\x00J\x01\x1f\x02\xcc\x02\xd8\x03\xb0\x04\xf3\x04\xd5\x04\xd3\x04;\x05\xa0\x07\x82\x0ex\x19\xcf!\x89#\xcc!\xe5!\xbc#\xd6%\xf9+\xfd6\xb8?\x1b?\x958\xf33-1\xc3,.(\xe7%\xfd"/\x1b\xa5\x11\x8f\n\xd1\x04\xd2\xfc\x07\xf3\xb7\xe9\x16\xe0\xc3\xd6a\xd0/\xcei\xcez\xcda\xcb\\\xc8\x1e\xc5F\xc4O\xc7G\xcd|\xd3G\xd9\x8a\xdf\xe9\xe5\x19\xec9\xf2\x00\xf9\x16\xffV\x032\x06{\t\xdb\r\xda\x12\x14\x17\xac\x19\xb8\x19\xb2\x16!\x12\x1e\x0e\x9b\x0b\r\n)\x08\xf7\x05\xe2\x02V\xfe\xe9\xf8r\xf4t\xf1\x01\xefO\xecx\xea$\xea\x17\xeb\xe7\xeb\xa2\xed\x95\xefS\xf1=\xf2\x12\xf3\xa4\xf5\xc1\xf98\xff\xcd\x044\t\x11\x0cb\r\xed\r7\x0ec\x0f5\x11\xf2\x12\xf3\x13\xb3\x13A\x13\x05\x12P\x10\xd3\r\x1b\x0b\xd6\x07\x8f\x041\x02\xa7\x00\x1e\x00\x08\xff<\xfd=\xfa\x11\xf7?\xf48\xf2F\xf1%\xf1\xa9\xf1:\xf2\xa5\xf2\x04\xf3\x13\xf3\r\xf3\xe6\xf2\x8c\xf2\x94\xf2\xb2\xf2\x0f\xf4\xba\xf5\x1b\xf7\xbe\xf7\xf1\xf7\xa7\xf7T\xf6\xf6\xf4\xf1\xf4\xf3\xf5\x8b\xf6\x19\xf7\xaf\xf75\xf8\xe6\xf6\xa4\xf6\x9b\xf7e\xf8\xb6\xf7~\xf6J\xf7\xb8\xf8\xde\xfa\x05\xfe6\x01\xab\x02@\x02\xe6\x02\x94\x05\xa5\x08\t\r\xf3\x11\xde\x18B =)w4\x97<\xd7?\t? @\xe8BGD\tFMH\x18JkEz;J2T*\n"\xb2\x15G\t\xd2\xfe\xc3\xf4,\xeaF\xe0\xf1\xdaf\xd62\xcf\xda\xc5[\xbf\xaf\xbcQ\xba\x16\xba\xe2\xbd\xff\xc3\xb3\xc8\x08\xcdW\xd4\x94\xdd\xae\xe5\xb0\xed\x05\xf6\x06\xfd\xb8\x01\x08\x07%\x0eR\x14_\x19C\x1dK\x1f\xfc\x1d;\x1a1\x17\xc7\x13>\x0f\x07\n/\x04a\xfd\x82\xf6x\xf1\xa8\xed\xfa\xe8\x84\xe4\xf5\xe0U\xdd\x9a\xd9:\xd8\xc6\xda)\xde\x8c\xe1\xac\xe5\xd6\xea\x8a\xf0\xba\xf5\xf0\xfc=\x04\x19\x0b\x97\x10G\x15\xdd\x18\xd1\x1d\xf9"K\'N)6)B(\xbf$\xbb!\x91\x1e\x92\x1b%\x17\xd6\x11\xac\x0c\x11\x07\x95\x01n\xfdr\xf9\xb8\xf5#\xf2\xad\xee\xbf\xebM\xe9*\xe9\x19\xea0\xeah\xeav\xebE\xed\x1f\xef\x84\xf0Y\xf3-\xf5=\xf6\xc2\xf6A\xf7\x9d\xf8\x18\xf9\x1a\xf9\x8b\xf8\xf0\xf6\xd8\xf5\xb6\xf4S\xf3\xa4\xf1\x10\xef\x1a\xee\xf9\xec\xf5\xeb\xe4\xea8\xea\xf8\xea\x03\xea\xf4\xe94\xec\x8a\xf0\x82\xf3\xd9\xf2\xaa\xf3\x8c\xf6\x0c\xfa\xba\xfc\xad\x02\xe5\n\xac\x10\xb7\x14w\x1a!"\xa5(\xc80%=7H;NLR\xf4VUY\xafV[T\xf6R\xcfN\x94F`=\x165\xac+#!p\x153\x08=\xfa\xbd\xec"\xe0p\xd5\x00\xcde\xc7\xa1\xc1o\xbc\x80\xb9\x94\xb9X\xbb@\xbe\xef\xc2\xb5\xc7\x9b\xcde\xd4\xe1\xdc\x14\xe6\x0f\xf0\x16\xfb\xae\x03\xc3\t\x8a\x0f\xa9\x15\xc1\x1a\xdb\x1b\xc5\x1b\xf1\x1a\t\x18J\x13\xf6\r\xc2\t{\x04m\xfd\xde\xf5\x9b\xee(\xe8+\xe2\x96\xdc\x04\xd8e\xd4\x9b\xd1\xc8\xcf\x1e\xd0\xb5\xd2\xed\xd6,\xdbr\xe0\xdb\xe6\x98\xedd\xf5$\xfd<\x05\xde\x0b]\x12 \x18\x13\x1d\xc7"M*\t0\xc51\xef0\xd00\xc6.\x86)\x9a%;"k\x1d\xf4\x14L\r\x14\t\x10\x04\xdd\xff&\xfc\xc2\xf8C\xf4\x94\xee4\xec\x00\xea6\xe9Q\xe9\x1f\xe9\xa7\xe9n\xe9b\xec\x9c\xf0\t\xf4\xec\xf6T\xf88\xfa\x97\xf9;\xf9\xc9\xf9D\xfa\xb6\xf9\xd7\xf6\xb2\xf4v\xf3?\xf2\x82\xf0\x7f\xee\xfb\xec8\xea]\xe7\xe6\xe4\x87\xe3.\xe3\x98\xe2\x8b\xe3z\xe3E\xe5\xfa\xe7\xe0\xeb\xa2\xefX\xf2\xa5\xf6\xc7\xf9\xd8\xfc\\\xffK\x04P\n\xf8\x0fX\x17\xf6\x1f\x8a(\xd80B\xbb\x90\xb7\x8a\xb7N\xba\xf3\xbev\xc6m\xcf\xdb\xd7\xc4\xe0\xe8\xea\xb6\xf4\xcc\xfc\xb5\x04\xff\x0c\x0b\x13P\x16\x96\x19o\x1d\x12\x1f\xcb\x1dI\x1b\x9c\x17\x87\x11\xab\t\xd3\x01a\xf9\x1f\xf0\xf2\xe6N\xde\xad\xd6\xa3\xd0\xf4\xcc\xcf\xca\xc5\xc9\xc2\xca\x11\xcd\x9c\xcf7\xd3\x13\xd9\xba\xdf[\xe6!\xee\xe0\xf6\x86\xff/\x08e\x11O\x1a\xdd!\x81(\xc7-\xd90L1I2k1i.L*\x11&\x87!\xc6\x1a\xfd\x14\xdd\x0f\x9f\tG\x03J\xfdE\xf8-\xf3\x9e\xeeO\xec\x05\xea\r\xe9X\xe9\xbc\xea\xd4\xec\xd1\xeeV\xf2\x13\xf6\xc2\xf82\xfb%\xfd\x8e\xfeL\xff\xe0\xfeA\xff\xa0\xfe\xe7\xfd|\xfc`\xfa\xb1\xf8\xd6\xf5\xcc\xf2\xf3\xee\xba\xea,\xe7\xb7\xe3\x01\xe1\xd6\xdeS\xdd\xb1\xddD\xde\x0e\xdf\xf7\xdf\xc4\xe1h\xe4\xbd\xe5\xf9\xe6\x03\xea\x17\xef\x9c\xf3\x08\xf7/\xfb\x9d\x00\xab\x06\x85\x0e\xc2\x18\x86"\x9c+\x1a7\xd4D)P\xf8X\xa2c\xd7l\xb0n\xecj@h\x0beZ[\xbbM\\A 5\x87%@\x14q\x05\xce\xf8\xff\xec4\xe1w\xd5\x85\xcb^\xc4\xd8\xbe\xae\xb9\xf9\xb6\xe7\xb7\xa0\xb9\xc0\xbb\x1b\xc1z\xca\x93\xd4\xef\xdex\xeb\'\xf8p\x02\xab\x0bw\x15n\x1d\x9b!\xf6#w%j#\xb2\x1e\x89\x1aB\x16\xa3\x0f$\x07\xd6\xfe\xef\xf5\xee\xebq\xe2\x92\xda\xdf\xd2\x19\xcb\xd6\xc4\x0e\xc0\xbd\xbcZ\xbc/\xbf\x9e\xc3\x1f\xc9\xc9\xd0\xfe\xd9\xab\xe3\x1a\xee\x94\xf9\xd5\x04\x9c\x0e\xd0\x17\xbf 5(Y.\xb03\xc97:939\xd37\xf84i0\xf4*\x93$\x92\x1c\xff\x13\x8c\x0b)\x03\x83\xfbW\xf5r\xf02\xec\xad\xe8/\xe77\xe7\xf9\xe7\xb1\xe9G\xec2\xef\xde\xf1\x14\xf5\xe8\xf8\xab\xfc\xbe\xff\x92\x02\x87\x04\x00\x06[\x06R\x06U\x05(\x03\x90\x00\xe6\xfc\xab\xf8\xfa\xf3\xc1\xef\xf7\xeb\x0e\xe8^\xe4\xd5\xe1\xbf\xdfN\xdd\xe8\xdb\xd4\xdb=\xdc\xfb\xdb\x10\xdc\xaa\xddl\xdf,\xe1\xe9\xe3\xce\xe7\x9d\xec!\xf0\xf0\xf3\x0c\xf9\x08\xfek\x022\x06}\x0c\xd3\x14\xe3\x1c\xbc%m2LBRP\xe8ZRd7m\x9ar\x80sxp\xaajca\xfaS\x19C\xc31o"\xc7\x13\xd3\x03*\xf4\x96\xe7\x87\xdd\xb3\xd3F\xcb\xfa\xc5?\xc2f\xbe*\xbb\x1a\xba\xc9\xbb\r\xc0A\xc6K\xcd\xe1\xd5\x01\xe1f\xed~\xf9\xee\x05\xd2\x12\xb4\x1d\xdd$b)2,\x83,\xad)d$\x11\x1d\xee\x13"\n\x9f\x00\xbd\xf6\xda\xec\xad\xe3\xca\xda\xc6\xd1\xb3\xc9\x8f\xc3\x8f\xbe\xdb\xbay\xb9\xd4\xb9Q\xbb\x91\xbf\x80\xc7\xba\xd0\xc8\xdaU\xe7T\xf5\xd9\x01\x90\r.\x1aM%\xed,?3k8\x16:\xf98}8n7\xb42\xbb,Q(p"\x06\x1a\\\x12{\r\xfb\x064\xfe\x0f\xf7C\xf2p\xedH\xe8\x16\xe6)\xe6\x80\xe6\x84\xe7\x94\xea<\xef\x96\xf4\xa9\xfay\x00\xe2\x04\xe8\x07m\n\x9d\x0b\x01\x0b)\t\xd1\x06h\x03e\xfei\xf9\x93\xf5\x16\xf2:\xeeb\xea8\xe7\x07\xe4\xc0\xe0\xe5\xdd^\xdb\xb8\xd8z\xd65\xd5|\xd4D\xd4\xa3\xd5R\xd9\x91\xdd\xc0\xe1\xe8\xe6.\xee\xb6\xf5V\xfb\x8b\x00o\x06\x98\x0b\x88\x0e\t\x12\xf2\x18\xb8!\\)\xc71\x04=SI\xf9S8]\x00fDk?k\x9dg\x02a\\WJJq;K+K\x1ap\n\x01\xfd\xa9\xf1\xe4\xe7\x17\xe0g\xda\xa9\xd5\xe0\xd1_\xcf\xae\xcd\x90\xcc\xfa\xcb\xe2\xcbK\xcd\xc8\xd0;\xd6P\xdd\xb6\xe5\xa4\xef\x1e\xfa@\x04\xe6\r\x84\x16V\x1d\xae!\t#\x1b!\xdf\x1c&\x17\x1e\x10.\x07\xbe\xfd\x94\xf5\x16\xee\xd0\xe5F\xde\x13\xd9\xa5\xd4Y\xcf\x80\xca\xc1\xc7\x9c\xc5\xf8\xc2\xe5\xc1\xb8\xc3\xe3\xc6\x05\xcb\xa2\xd1"\xdb\xd3\xe5,\xf1\x1d\xfe\x8d\x0b9\x17\xd4 \xef(\xd0.W2\x813\xc22Z0\xe5,.(\xfa"T\x1e\xe7\x19\x14\x15\x9b\x0f\x01\x0bG\x07"\x03\xc7\xfe\xc2\xfb\xa4\xf9"\xf7\xe3\xf4%\xf4\xc3\xf4\xf9\xf5\x17\xf8\xe2\xfa\x15\xfe\xc5\x00e\x03\xad\x050\x07\xce\x07G\x07i\x05\xa0\x02\x80\xff\x89\xfc\x15\xf9\x0b\xf5+\xf1e\xedi\xe9\xb8\xe5Y\xe3\x89\xe1\x0e\xdfM\xdc\xa9\xda\xd1\xd9\xe6\xd8\xd7\xd8\x10\xda{\xdc\x90\xde\xa6\xe1\x8c\xe7h\xee\x9f\xf4\x0e\xfa\xe4\xff\x88\x05\x82\x08\xbb\n;\x0eS\x11\x86\x12\xc6\x13%\x18X\x1e\xfb$\x17.\x849\x97C\xdcJ\x85Q\x9eW\x83Y\x9eV\xc4QtJ\xaf?H2\xfa%\xc4\x1aU\x0f\x9a\x04\xda\xfb3\xf5\xf1\xef\x00\xecA\xe9\xd8\xe65\xe4<\xe1~\xde\x91\xdc^\xdbq\xda\x92\xda\x85\xdcL\xe0j\xe5\x0e\xecQ\xf4v\xfc\x8d\x03\x94\t\xc7\x0eo\x12\\\x13J\x12d\x0f\x02\x0bF\x05\t\xff\xa9\xf9\xd8\xf4\xd2\xef\xd3\xea\xa3\xe6\x8f\xe3\xb5\xe0\xb0\xdd5\xdb\x17\xd99\xd63\xd3\xa6\xd1\xea\xd1:\xd2q\xd3u\xd7w\xdd#\xe4\xce\xeb\xaa\xf5\xe2\xff\x12\x08\x19\x0fW\x16\xcc\x1b\xa2\x1e\xb1 u"p"\xb7 l\x1fk\x1e2\x1c[\x19j\x17G\x15\xe4\x11\x7f\x0e(\x0c~\t\xe8\x05\xa6\x02\x91\x00\x00\xff\xa6\xfdX\xfd\xf0\xfd\xa2\xfep\xff!\x00\r\x01\xf3\x01Q\x02\x85\x01\xed\xffk\xfe\xea\xfc\xe7\xfa\xda\xf89\xf7\x92\xf5/\xf3\xb7\xf0%\xef\xf8\xed\x8b\xec\r\xeb}\xe90\xe8\xe1\xe6\x0b\xe6\x89\xe5L\xe5\xcd\xe5\xa4\xe6\xe3\xe7\xd0\xe9\xf4\xec\x1c\xf1#\xf5Z\xf9\x82\xfdo\x01\xec\x04\xcd\x07u\nP\x0c\x8c\ra\x0e\xbd\x0e\xb9\x0f\xd9\x11\xee\x14\x8b\x18\xdf\x1c\x8d!\xb2%\x8f)\xc6-~1\xf42Q2\x1d1\xec.++\x8a&\xae"\x98\x1e\x8a\x19z\x14\xac\x10\xb7\rB\n~\x06\xfa\x02q\xffV\xfb\xe1\xf6\xec\xf2\xc0\xef\xe0\xec\xd4\xe9\xba\xe7\x03\xe7(\xe7\x98\xe7\xc0\xe8R\xeb\x1c\xee)\xf0\x04\xf2\x81\xf4\xe8\xf6\xce\xf7\xd4\xf76\xf8j\xf8q\xf7\t\xf6}\xf5\x84\xf5\x84\xf4\xfa\xf2)\xf2\xb4\xf1N\xf0\x96\xee\x9f\xed\xc1\xec\xdf\xea\xb6\xe8\xfc\xe7\xf9\xe7C\xe7.\xe7\xee\xe8\xab\xeb\x18\xee\xfa\xf0\x84\xf5\t\xfa[\xfd7\x00r\x03\x0c\x06I\x07y\x080\n\x96\x0bK\x0c}\rX\x0f\xea\x10\xe9\x11\xec\x12\xb9\x13\x86\x13\x91\x12s\x11\xfb\x0f\xa1\r\xb7\n"\x08\xc6\x05\x9b\x03\xeb\x01\xba\x00\xfd\xffz\xffp\xff\xb6\xff\xf4\xff!\x00\xf3\xff^\xffy\xfek\xfd\x93\xfc\xcd\xfb\xdd\xfa\x14\xfa\x96\xf9L\xf9\xd1\xf8i\xf8Y\xf8\xf8\xf7\x9f\xf6\xd6\xf4\x98\xf3Z\xf2w\xf0\xd8\xee_\xee\xae\xee\x18\xefZ\xf0\xeb\xf2\xd7\xf5\x01\xf8\xba\xf9\x91\xfb&\xfd\xdf\xfd<\xfe\xe5\xfe\x00\x00\xfd\x00G\x02\x01\x04s\x06B\t\x87\x0b\xb8\r\x00\x10*\x12\xbf\x13~\x14C\x158\x16\xf7\x16Q\x17,\x18\xd5\x194\x1b\xcb\x1bS\x1c"\x1d8\x1d\x10\x1cv\x1a\xd0\x18Z\x16\x1c\x13u\x10}\x0ex\x0cq\n\x01\t\xeb\x07z\x06\xe2\x04\xa3\x03#\x02\xfb\xff\xc1\xfd\xfe\xfb\'\xfa\x0f\xf8@\xf6\x98\xf4\x0c\xf3\xa5\xf1\x88\xf0\xf2\xef\x89\xefA\xef\x14\xef\x11\xef\xfd\xee\xb6\xeeD\xee\xb9\xed\x0b\xedh\xec\xd0\xebO\xeb\xe5\xea\xb9\xea\xc5\xea\xd6\xea\xf7\xeav\xebX\xec4\xed\x16\xeed\xef3\xf1\x02\xf3\xba\xf4\r\xf7\xa3\xf9\xfe\xfb\xf1\xfd\xc9\xff\xe0\x01\xa7\x03\xfe\x044\x06s\x07n\x08\x14\t\xbe\t{\n"\x0bx\x0b\xcb\x0bM\x0c\xa1\x0cg\x0c\n\x0c\xea\x0b\x93\x0b~\nk\t\xcd\x08U\x08u\x07\xae\x06\x9d\x06\x8d\x06\x1d\x06\x94\x05\x8c\x05n\x05\xa8\x04\xb0\x03\xef\x02.\x02\xd6\x00\x98\xff\xc3\xfe\x08\xfe\x05\xfd\xf9\xfbV\xfb\xb3\xfa\xb1\xf9\x89\xf8\x90\xf7\x98\xf6:\xf5\x07\xf4;\xf3\x9d\xf2\x14\xf2\xbb\xf1\x01\xf2z\xf2\xd8\xf2]\xf3\x19\xf4\xda\xf4z\xf5S\xf6u\xf7\x8a\xf8\x8b\xf9\xcd\xfaE\xfc\xb1\xfd\xff\xfeL\x00\x86\x01\xb0\x02\xcc\x03,\x05\x8b\x06\xc4\x07\xd5\x08\xe6\t\xdf\n\x9e\x0bF\x0c\xeb\x0cH\rL\rX\r\x94\r\xc5\r\xdd\r\x03\x0e1\x0e,\x0e\xff\r\xe4\r\xaa\r;\r\xa5\x0c\x0f\x0cy\x0b\xb8\n\xf1\t\\\t\xae\x08\xef\x07#\x07N\x06c\x05E\x04T\x03h\x02b\x01[\x00b\xff\x83\xfe\x94\xfd\x9d\xfc\xce\xfb\xed\xfa\xc6\xf9f\xf8\xfd\xf6\xa9\xf5Z\xf4\x08\xf3\xea\xf1\x11\xf1{\xf0\x0e\xf0\xf2\xef2\xf0\xa7\xf0\x14\xf1~\xf1\x02\xf2\xa1\xf2<\xf3\xd4\xf3\x9b\xf4\x89\xf5\x98\xf6\xc4\xf7\x1d\xf9\x9f\xfa\x08\xfcU\xfd\x95\xfe\xd4\xff\xec\x00\xd4\x01\x8f\x024\x03\xf2\x03\xc3\x04p\x05\x17\x06\xcd\x06\xa0\x07O\x08\xc4\x08.\t\x88\t\xa8\t\x7f\tR\t/\t\x07\t\xad\x08k\x08:\x08\xde\x07q\x07\x11\x07\xb4\x06\x13\x064\x05a\x04{\x03m\x02K\x01c\x00\x93\xff{\xfe<\xfd+\xfc2\xfb%\xfa\x00\xf9\n\xf87\xf7P\xf6\x98\xf5d\xf5\x82\xf5\x9e\xf5\xdb\xf5|\xf6,\xf7\x89\xf7\xd0\xf7.\xf8\xa4\xf8\xb3\xf8\xbe\xf8U\xf9\x1b\xfa\xd3\xfa\xaf\xfb\xf5\xfcE\xfe\x1e\xff\xb5\xffP\x00\xc0\x00\xc7\x00\xa9\x00\xb7\x00\x03\x01+\x01m\x01+\x02A\x03=\x04 \x05?\x06J\x07\x01\x08\x92\x08\x0e\tz\t\xba\t\xe1\t@\n\xb5\n\x04\x0bK\x0b\x8c\x0b\x8b\x0bS\x0b\x02\x0b\x8e\n\xf4\tC\t\x91\x08\xfa\x07;\x07q\x06\xc7\x05"\x05N\x04K\x03>\x02\x14\x01\xd5\xff\x92\xfe`\xfdF\xfc.\xfb)\xfah\xf9\xb8\xf8\x0f\xf8x\xf7\xf8\xf6o\xf6\xe3\xf5j\xf5\r\xf5\xc7\xf4\x9d\xf4\xa9\xf4\xfe\xf4i\xf5\xea\xf5\x85\xf60\xf7\xdf\xf7\x86\xf8.\xf9\xc7\xf9e\xfa\x0c\xfb\xbb\xfbr\xfc\x18\xfd\xcb\xfd\x8c\xfeX\xff\x0c\x00\xb3\x00m\x01&\x02\xea\x02\xbb\x03\x88\x04O\x05\xef\x05_\x06\xb8\x06\x02\x07"\x07 \x07\x19\x07\x13\x07\x1b\x07\x0e\x07\x15\x078\x07I\x07D\x075\x07!\x07\xf9\x06\x9e\x063\x06\xcc\x059\x05\x93\x04\xdf\x03$\x03Z\x02c\x01x\x00\x88\xff\x93\xfe\x89\xfdw\xfc\x7f\xfb\x86\xfa\xa1\xf9\xce\xf8\x1c\xf8\x98\xf7D\xf7$\xf7\'\xf7L\xf7\x9a\xf7\r\xf8\xa0\xf8*\xf9\xb0\xf9&\xfa\x8b\xfa\xdd\xfa8\xfb\x93\xfb\xef\xfbH\xfc\x90\xfc\xd8\xfc\x13\xfdN\xfd\x8c\xfd\xcc\xfd\n\xfeQ\xfe\xa0\xfe\xfd\xfe^\xff\xdd\xff\x94\x00P\x01\x02\x02\xba\x02{\x03,\x04\xd0\x04x\x05\x1d\x06\xa5\x06\xfa\x06D\x07\x86\x07\xa3\x07\xbb\x07\xcb\x07\xc9\x07\xb4\x07z\x07P\x073\x07\xfd\x06\xa3\x06O\x06\xe5\x05^\x05\xd8\x04S\x04\xba\x03\x1f\x03o\x02\xa8\x01\xea\x00\x15\x00S\xff\x97\xfe\xe0\xfd6\xfd\x9a\xfc\r\xfc\x8e\xfb\x1a\xfb\xb9\xfal\xfa\x1f\xfa\xd7\xf9\x97\xf9Z\xf9\x1c\xf9\xe0\xf8\xc1\xf8\xa2\xf8\x8b\xf8~\xf8\x91\xf8\xb8\xf8\xdd\xf8&\xf9}\xf9\xd8\xf9:\xfa\xa6\xfa\x1c\xfb\x99\xfb\x16\xfc\xa9\xfcP\xfd\xfe\xfd\xcb\xfe\xb3\xff\xa5\x00\xa0\x01\x93\x02p\x03=\x04\xed\x04|\x05\xe2\x05(\x06`\x06\x87\x06\xa8\x06\xc1\x06\xd5\x06\xd7\x06\xde\x06\xf4\x06\xec\x06\xcd\x06\xa0\x06m\x06+\x06\xd1\x05\x87\x05=\x05\xdf\x04f\x04\xed\x03\x8c\x03\x16\x03z\x02\xdc\x01,\x01t\x00\xa2\xff\xca\xfe\xfc\xfd/\xfdc\xfc\xae\xfb\x11\xfb\x83\xfa\r\xfa\xae\xf9d\xf9&\xf9\x05\xf9\xf4\xf8\xf8\xf8\x05\xf9\x10\xf97\xf9r\xf9\xb8\xf9\x04\xfaI\xfa\xa9\xfa\x11\xfb\x81\xfb\xfd\xfbu\xfc\x01\xfd\x8a\xfd\x17\xfe\x99\xfe\x15\xff\x98\xff\x0c\x00{\x00\xe9\x00a\x01\xd1\x010\x02\x94\x02\x07\x03l\x03\xb9\x03\x0e\x04z\x04\xda\x04/\x05\x8d\x05\xec\x05D\x06\x7f\x06\xb3\x06\xe4\x06\xfe\x06\xf7\x06\xe4\x06\xc7\x06\xa4\x06|\x06U\x06"\x06\xf4\x05\xc1\x05\x86\x057\x05\xd4\x04V\x04\xcd\x033\x03u\x02\xb3\x01\xe3\x00\r\x00.\xffU\xfe\x84\xfd\xbb\xfc\xeb\xfb&\xfb\x86\xfa\xe9\xf9X\xf9\xd7\xf8[\xf8\xfb\xf7\x98\xf7V\xf7,\xf7\x00\xf7\xf2\xf6\x10\xf7=\xf7\x88\xf7\xea\xf7u\xf8)\xf9\xdb\xf9\x8b\xfaD\xfb\xfe\xfb\xba\xfc`\xfd\xf5\xfd\x9c\xfeR\xff\x03\x00\x96\x00c\x01U\x02)\x03\xf8\x03\xc3\x04e\x05\xc1\x05\x01\x06\x1a\x06\'\x06#\x06\x03\x06\xe8\x05\xf8\x05\xf8\x05\xe2\x05\xd9\x05\xc4\x05\xab\x05q\x054\x05\xe7\x04~\x04\xe7\x03e\x03\xf4\x02y\x02\x04\x02\xa6\x01F\x01\xe5\x00\x81\x00\x12\x00\xb3\xff/\xff\x9b\xfe\x01\xfee\xfd\xda\xfcS\xfc\xdb\xfb\x80\xfb.\xfb\xfc\xfa\xe3\xfa\xc5\xfa\xa8\xfa\x94\xfa\x87\xfax\xfa_\xfaT\xfaU\xfaU\xfar\xfa\xbc\xfa\x1a\xfb\x8c\xfb\xfc\xfb\x82\xfc\xfa\xfcl\xfd\xdb\xfdD\xfe\xac\xfe\xff\xfe]\xff\xc4\xff+\x00\x9b\x00\n\x01~\x01\xe2\x01I\x02\x8d\x02\xba\x02\xed\x02\x18\x03T\x03\x86\x03\xbe\x03\xea\x03\x10\x049\x04\\\x04m\x04a\x04e\x04_\x04S\x04i\x04\x8e\x04\xd0\x04\xf9\x04/\x05n\x05\x84\x05\x83\x05g\x054\x05\xe6\x04k\x04\xdf\x03O\x03\xad\x02\xf9\x015\x01~\x00\xaf\xff\xca\xfe\xea\xfd\x15\xfdG\xfc\x84\xfb\xdd\xfa@\xfa\xc5\xf9a\xf9\x1d\xf9\xf0\xf8\xd4\xf8\xe2\xf8\x03\xf9C\xf9\x92\xf9\xe1\xf9d\xfa\xe4\xfaj\xfb\xee\xfbf\xfc\xe1\xfc6\xfd\x97\xfd\xf3\xfdQ\xfe\xc0\xfe9\xff\xb6\xff>\x00\xe9\x00\x8f\x017\x02\xd0\x02L\x03\xbe\x03\xf7\x03 \x04A\x045\x049\x041\x04&\x04\x1c\x04\x04\x04\xde\x03\xc7\x03\x95\x03I\x03\xfd\x02\xa7\x02\\\x02\xf6\x01\xa3\x01y\x01J\x01\x0e\x01\xd8\x00\xd3\x00\x01\x01\xe9\x00\xc0\x00\xb8\x00\xb3\x00\x93\x00M\x00&\x00$\x00\xf0\xff\x9b\xffW\xff\x10\xff\xac\xfe\x14\xfe\x9f\xfdh\xfd\x18\xfd\xac\xfcd\xfcD\xfc\x17\xfc\xe7\xfb\xb9\xfb\x8d\xfb\x99\xfb\xb3\xfb\xc0\xfb\xfd\xfb\x17\xfc\xea\xfb\xc3\xfb\x99\xfb\xbd\xfb%\xfc\xaa\xfc\x16\xfd\x19\xfdh\xfd\x0e\xfe\xa2\xfec\xff\xc9\xff~\x00\x02\x01\x02\x02\xc3\x02\xb7\x02"\x02\x1b\x02\t\x038\x05"\rs\x1a\xb6\x1f\xd3\x13\xf8\x03\xf8\xfd\xba\xfb~\xf5a\xf50\x00c\x0c\xda\x0c\xec\x06\x02\x04\xcd\x01\xcb\xfa6\xf1\x99\xf03\xf8V\xff\xe0\x01H\x06g\x0b\xf0\t\xe8\x01b\xfbU\xf9\xd6\xf86\xf9\xc8\xfc\x03\x02|\x05\x9f\x06\xf4\x03l\xff\xbd\xf9\x05\xf7\xba\xf4\x14\xf7]\xfc\x97\x02\x19\x04\x01\x01&\xff\xbd\xfb\xc0\xf8A\xf6[\xf9\xaf\xfd\x83\x01b\x02\xe3\x01\x80\x00&\xfd\x83\xfa\x9b\xf96\xfb\x8b\xfd\xa6\x01\x1b\x06\xd9\x06\xdc\x02\x80\xfe\x1a\xfdM\xfc\xe5\xfb3\xff\xf0\x04>\x08b\x06\x9d\x03\xd0\x02\xfd\x00\xb1\xfd\'\xfc\x85\x00\xec\x05t\x07C\x06\x87\x078\tJ\x04\xb7\xfd\xcf\xfd\xd7\x02\xeb\x04\x88\x03\xba\x03\x84\x05\xad\x01\x84\xfc.\xfa\x1b\xfc\xd1\xfef\xfe\xed\xfew\xfe\x9c\xff9\x01B\x01\xd3\xfd\xc5\xf9\x96\xfb]\xfd=\xfd\x17\xfc&\xfe\xbb\xfe\x86\xfb\xa5\xfb\x89\x00\xf5\x02\xbe\xfe\xa2\xfcX\x00\x81\x02\x13\xfe\xae\xfa\xd5\xfe]\x05\xa6\x04S\xfd\xa1\xf8.\xf9\xee\xfb\xd7\xfbg\xfc\x0e\x01F\x02\x96\xff\xf1\xfa_\xfb\x17\xfeX\xff\x1f\x00;\x00c\x04\x97\x02\x8e\xff\x8b\xfc\x1e\x00\x87\x01\xd5\xfa\xe4\xf7\xa4\xfdQ\t3\t\xad\x04S\x02\xf0\x03\xed\x00\x99\xfc6\xfe{\x04\x8a\t\xf1\t\xb9\x07>\x06N\x05\xac\x04\xea\x03\x8d\x01H\x01\x19\x03\x86\x04x\x05\xfc\x07\x1b\x07\xc4\x02Y\xfcq\xfa\x00\xfd\'\x02Q\x05Q\x02,\xff{\xfa\xd8\xf5G\xf5\xcf\xfb&\xff\x98\xf7\x18\xf1s\xf8R\x039\x01\x98\xfa\xf0\xfd7\x01H\xfa\xc2\xf3\xa7\xfb6\x06\n\x03o\x00\x90\x046\x05\xb7\xf9\xf5\xf5\x9c\x02\x91\n\xce\x06,\x01\x1e\x042\x01E\xfb\x8f\xfe:\x07:\x07d\xf9\xe4\xf6Q\xfc\xc7\xffo\xfdE\xfau\xfc\x9d\xf8N\xf5\xd5\xf9*\x00\x80\xffW\xfb\x0b\xfd\xe0\x00F\xfe\xb2\xfcN\x00F\x07T\x0b\x92\rt\x10\xc5\x0e\xfc\x08\xa1\x06\xe2\x0b\xac\x0c\x92\t\xb7\t\x1c\tg\x05\xed\x03\xe2\x00\xd9\xf9\xc0\xf4\xff\xf1\x05\xf5\x92\xf5\xc7\xf4x\xf7\x9a\xf5\x80\xf1u\xeez\xf0\xb3\xf2\'\xf5\x05\xfcV\x03^\x02\x00\xfc\xb8\xfdk\x02\x9c\x03\x07\x05a\nc\r\xd9\x0bD\t\xeb\tV\nu\x08\x94\x08\x8c\x08A\x04U\x00L\x01d\x01\x88\xfd\x87\xf8\xd1\xf9\xc9\xf8i\xf5\x94\xf4;\xf7;\xf9\xe0\xf5\x17\xf5\xce\xf8\x91\xfd6\xfe\xe8\x00^\x05A\x05\xc4\x01\x9b\x01T\x07/\x0b\xd2\x08\x14\t\x88\r,\r\x1a\x07\xec\x04\n\x08\xac\x04\x03\x00m\x03O\tr\x05\xbc\xfc,\xfd\xf8\xfc\x8a\xf9S\xfa\xbe\xfe\x8e\xff|\xfba\xf9\xa9\xf7?\xf6\x00\xf7\xcb\xf9>\xfd\xc5\xfc\xea\xf9\x19\xfb\x17\xfc\x9c\xfd\xa7\xfc\n\xfc*\xff\xe6\x006\x02\xa4\x04\xdf\x05\xe3\x00\x85\xfc\xcb\x00Z\x04\xa4\x03\xd6\x03\x95\x06s\x04\xeb\xfc#\xfd\x8a\x02.\x00\x10\xfe\x00\x01D\x03\xcd\x01U\xfe\xf8\xff\xd2\xfe\x81\xfc]\xfe7\x02\xbd\xfe(\xfa,\x00\xe8\x04(\x04\xd1\x00(\x00M\x02\xf2\xfe(\xfb\xac\xfe\x17\x02U\x00|\xff\x1d\x05\xe9\x05\x17\xff;\xfb\xf0\xfd\xc6\x019\x01\xa2\x02"\x05~\x02\x88\xfe\xe3\xfc\xba\x00\x08\x02\x90\x01\xea\x01\xaa\x00)\xff=\xffR\x003\xfe\x12\xfd\xc2\xfe#\x01\x0f\x00|\xff\xe7\x00J\xff\xcc\xfa\x9c\xfa\xcf\xff\xd7\xffM\xfc\x90\xfd\xf9\x00@\xfe\xf2\xf9\xbb\xfb\xd8\xfe\xd0\xfeJ\xfc\x89\xfeP\x02(\xffk\xfd\xf7\xfe/\x03\x1b\x02y\xfe\xbe\x008\x04W\x03&\xff\xfa\xff\xb1\x00;\xff\xae\x00f\x05\x01\x06\xdc\x02\x16\x02\xa6\xffA\xfb\x9a\xfd\xab\x06\xad\x08\xca\x02-\xff\xb4\xfeF\xfc\xd7\xfa\xc3\xff\xf3\x04\xa5\x02e\xfd4\xfe\x0e\x01\x80\xff\x90\xfd6\xfd\xbb\xfc\x0b\xfdF\x01g\x04\xda\x00\x08\xfcA\xfd\xfb\xff[\xfeO\x00\x19\x05b\x03\x89\xfb_\xfb\xc6\x02\xb8\x00\xd2\xfc\xb6\xfdL\x04;\x03\xc2\xfcE\xff\xf7\x00\xaa\xff\x05\xff\xe6\x01W\x04\xc6\x00\x81\x01a\x01\xd2\xfe\xf9\xff\xda\x02$\x03d\xfe\x10\xff@\x016\x00\xbb\xfdr\xfc\xad\xff8\x00\x82\x00X\x00\xef\xff\xa6\xfd?\xfd\x8c\x00\xd6\x00x\x01\xfd\x00\xd8\x02\xc3\x02\xe9\xfez\x01]\x03\x81\x01\x0b\x00R\x03l\x06D\x01\x14\xfe\xce\xffV\x00\x19\x00\xd7\x00I\x04\xa2\x01Y\xfc\xcb\xfa\xf1\xfdQ\x02\x85\x00b\xffy\xfe\x07\xff\xa8\xff%\xff\xfb\xfdG\xfdr\xfdU\xfd\xbb\xfe0\x00^\x00L\xfd\xd6\xf9Z\xf9\xf1\xfb|\xfeF\x00n\xff\xa7\xfe\xe5\xfa%\xf8\xeb\xf9\xd1\xfd\x80\x00x\xff\xf0\xfe\x8a\xfd\xa1\xfcS\xf9\x9b\xf8\xcb\xfc\x00\x00\xba\xffr\xfd^\xfdO\xfd\xba\xfb\xe0\xfa\xf4\xfcu\xff\xc9\xfe\x1a\xfdM\xffk\x02\xf7\x00\x1d\xff#\xfe\xa6\xfe\xcf\xfe\xd7\xff2\x04*\x06O\x03\xb9\xffn\x00\xd3\x03\xb1\x06\xdb\x05\xd2\x05\xc5\x054\x06[\x07\xc6\x08\x89\t\xe9\x07"\x06\x8c\x05-\x07t\x07\n\tR\x05\xe4\x00\x08\x01\xc5\x03\x94\x07\xbc\x05w\x05\xb2\x05\x8a\x04\xbb\x04v\x08\xa0\x0f\xf1\x10%\r\x0c\r\xdf\x0f\xf6\x11\x1d\x11H\x11\x9e\x12\x84\x10\xd8\x0c\x17\x0cv\x0e)\x0c\xb0\x06t\x02)\x01\'\x00\x03\xfd\xd7\xfb\xfc\xfa\x1d\xf7L\xf2\xe5\xef\x99\xef\x8b\xee0\xec\xa0\xebP\xeb-\xea\xac\xe9\xf2\xe9\x99\xea\x0f\xea\xf6\xe9H\xebe\xed\xb1\xef\xbf\xf1\xe7\xf2P\xf3]\xf3\x85\xf4\x03\xf6q\xf7\x0f\xf9\xdc\xf9w\xf9\xb7\xf8C\xf8\xa5\xf7\xda\xf6k\xf6\xbd\xf5B\xf5\xe5\xf4u\xf3\xa4\xf2X\xf1\x1a\xf0\x9d\xef\x8c\xef(\xf0\xdf\xf0]\xf1u\xf11\xf2\xd3\xf2\x00\xf4\\\xf6\xf4\xf8"\xfa\x9a\xfb\xda\xfc\xd6\xff5\x02\xa4\x04\xe3\t\x0c\n\x98\t\x10\x0b.\x0e\x14\x12\x8b\x12\x87\x14\x9f\x14D\x12\xc2\x10\xdb\x10\x08\x16\xdd\x1c\xa0%f&\x8e\x1e^\x1b\x1c#U/\x020e)\x89\'\xc4)\xd8-)1(4\xd1/\xa9#\xa7\x19k\x17~\x1aZ\x1a4\x16}\r\xd7\x03\x94\xfc\xce\xf8\xfb\xf5Z\xf1*\xec\xc9\xe6\x18\xe3\xc5\xe2\x05\xe4X\xe3\xb7\xdf\xe0\xdc\r\xdeT\xe0{\xe3\xd7\xe5\x85\xe7\xe8\xe7\x08\xe8\xff\xea,\xf1&\xf6\xd2\xf6}\xf3\x0f\xf1\xc5\xf5L\xfe=\x01\xdd\xffw\xfc]\xf9O\xf9\x93\xf9\xe0\xfd\x97\x009\xfd\x9f\xf5\xdb\xf2)\xf5\x0f\xf73\xf7?\xf4\x19\xf2m\xef\x13\xefO\xf2|\xf5>\xf5\xdd\xf1b\xef\xbb\xef\xb8\xf3%\xf8\x8c\xfa\xeb\xf9\xac\xf6\x12\xf5v\xf50\xf7\xec\xf9\x07\xfb}\xfa\x04\xf9\x94\xf8Z\xf8\xf3\xf8\xbb\xf8Z\xf8d\xf81\xf9\xa1\xfb+\xfdL\xfd\x06\xfcp\xfa\xa2\xfa(\xfd\x88\x00\x99\x033\x037\x02y\x01\xec\x02\\\x06\xa1\x08\x13\n\xc6\x0b1\r\xf4\r\xbf\x0e\xe6\x0e\xe4\rB\x0f\xf0\x16\xc5!>\'\xdb#\xf0\x1f\xec\x1eU"\x9f\'\xfe+R/\xb8.f,\x13+\x06*\n(i#q\x1d\xe7\x18\x15\x16b\x16\xd4\x15\x18\x11l\x08\x8c\xff\xbb\xf9\xc5\xf6\xc3\xf44\xf2\x17\xf0c\xec\x10\xe8b\xe6\xa9\xe6\xb4\xe7\n\xe6\xe9\xe2\xaa\xe1\xef\xe2h\xe6K\xea\xfa\xec\xd6\xed%\xed\xf8\xec\xc3\xefK\xf3\x7f\xf4T\xf44\xf4\x8a\xf6S\xf9\x8c\xfaU\xfa\xc9\xf8\x10\xf7\x87\xf5*\xf6\xdf\xf8\r\xfa\xbc\xf9;\xf8d\xf7\xed\xf6\x01\xf6\x8d\xf53\xf5\xf2\xf5\x08\xf7\xab\xf7\xcf\xf7\x80\xf7s\xf7\x9f\xf7\xea\xf7\r\xf8\xfd\xf8.\xfa;\xfb-\xfc\x87\xfc\xe4\xfcd\xfd\x12\xfe.\xff;\x00\xe7\x00d\x01\xde\x01\xf8\x01q\x02\xe9\x02\x18\x03-\x03\x9e\x02j\x02\xef\x02\xee\x02@\x02\xae\x01\xe2\x01h\x025\x02\xb4\x01\xc3\x01\x16\x02\xce\x01\xf8\x00"\x01\xa5\x01\xdd\x01i\x01\x84\x00\xe0\xff\xbb\xff\xb1\xffR\xff\xad\xff\xea\xff\x91\xff\x85\xff\x15\x00\x88\x01>\x03\xe5\x04O\x06\xae\x07P\t\x01\x0bt\r\xbc\x10}\x13\xd3\x15\xc3\x16\xab\x17\xeb\x18\xec\x19A\x1b\xb8\x1b\xe7\x1bX\x1b\'\x19\xb5\x17u\x16g\x14P\x126\x0f7\x0c\xa5\t\xab\x06+\x04\xa8\x01:\xff=\xfc\xc6\xf9\xcd\xf7\x18\xf6\xe2\xf4B\xf3\xf9\xf1\xdf\xf0\x8f\xef\xd4\xee\x99\xee\xee\xee1\xef\x8d\xeef\xee\xed\xee\x9f\xef~\xf0B\xf1B\xf2\x1d\xf3\'\xf3Q\xf3q\xf4\xb0\xf5\xe2\xf6\xe2\xf7\x9f\xf8Q\xf9\x13\xfa\xbb\xfa\xbb\xfb5\xfc8\xfc\xe8\xfc\xad\xfd\'\xfez\xfeD\xfe\xe7\xfdv\xfd\xcf\xfc\xd6\xfc\x05\xfd\x92\xfc/\xfc\xde\xfb\x1a\xfb\xc4\xfa\xd1\xfa\x84\xfar\xfa\x06\xfa\x8d\xf9\xb1\xf9\x91\xf9M\xf9\x83\xf9g\xf9,\xf9H\xf9G\xf96\xf9F\xf9\xe2\xf8\xd0\xf8+\xf9\x89\xf9\xfe\xf9`\xfa\xa0\xfa\xee\xfa\xfe\xfa\xfa\xfa\x9e\xfb\x94\xfc\x00\xfd\x0e\xfd;\xfd`\xfd\x8e\xfd\x01\xfeG\xfe\xbc\xfe<\xff\x08\x00r\x01\xf2\x02\x01\x05E\x08\x07\x0c\xdd\x0e\x80\x11\xdc\x14\xbc\x18y\x1cL\x1f\n"N%\xc6\'p(\x80(\xe0(\x93(\x80&\xd5#\xec!\xb9\x1f\xb8\x1b"\x17\x8e\x13$\x10\xa3\x0b\xc3\x061\x03\x08\x01\x1a\xfes\xfa\xd9\xf7=\xf6q\xf4l\xf2.\xf1+\xf1\xa9\xf0$\xefm\xee!\xef\xa1\xef\x8a\xef]\xef\x8d\xef\xc7\xefm\xef"\xef\xba\xef\'\xf0\x08\xf0\xd5\xef=\xf0\r\xf1`\xf1\xd3\xf1z\xf2$\xf3\xc6\xf3\x1a\xf4\xf2\xf4\x10\xf6\x00\xf7\xcc\xf7n\xf8\xd3\xf8Q\xf9\xef\xf9\x81\xfa\xfd\xfa5\xfb_\xfb\xb6\xfb\xf3\xfb\xe2\xfb\xce\xfb\xd2\xfb\x8b\xfb\x8a\xfb\xcf\xfb8\xfc\xa2\xfc\xbe\xfc\xe4\xfc\x1c\xfdV\xfd\x80\xfd\xb6\xfd\x14\xfe[\xfe\x80\xfek\xfeR\xfej\xfet\xfey\xfe\x81\xfep\xfe^\xfe,\xfe\x0f\xfeG\xfe\xa2\xfe\xbc\xfe\xcc\xfe\xaf\xfe\x93\xfe\x9d\xfeR\xfe8\xfee\xfe~\xfe\xc9\xfe\x14\xff&\xff\x1b\xff\xe4\xfe\x99\xfe\xfd\xfep\x00A\x03\x83\x06v\x08\x13\n\x87\x0c;\x0f\xe3\x11\x93\x14i\x18W\x1c\x0f\x1e\xbe\x1e< \x0b"\t#\xed!\xaf v |\x1e\x0e\x1bY\x18G\x16\xf6\x13\x83\x0f\xb2\nt\x08\x90\x06\xc8\x02\x11\xff\xd1\xfc\xbb\xfb\xaa\xf9W\xf7\xd8\xf6\x1e\xf7\xd0\xf5H\xf3\x97\xf2s\xf3\xc8\xf3_\xf3A\xf3\x07\xf4\xbf\xf3j\xf2\x1a\xf2i\xf2]\xf2\x97\xf11\xf1\xf7\xf1,\xf2\xd4\xf1\xc0\xf1\x1c\xf2\'\xf2\x0c\xf2\x97\xf2\xa5\xf3G\xf4q\xf4\xd0\xf4\x7f\xf5%\xf6\xb9\xf6\x08\xf7C\xf7}\xf7\x96\xf7\xe5\xf7M\xf8\xa4\xf8\xfa\xf8\xe9\xf8\xb8\xf8\xd9\xf8\'\xf9Z\xf9\x8f\xf9\x16\xfa\xb2\xfa\x13\xfb\x9d\xfb\x87\xfc[\xfd\xb1\xfd\xed\xfd\x87\xfei\xff\x05\x00R\x00\xbd\x00\x12\x01 \x01+\x01\x80\x01\xc1\x01u\x01\xe8\x00l\x00\xc2\x00\x14\x01\x06\x01\x01\x01\xa9\x00\x0b\x00\xa0\xff\xb7\xff\x04\x00\x1d\x00\xb0\xffA\xffA\xff\xea\xfe\xc6\xfe\xe2\xff\xe5\x00\xcc\x00\x8a\x00%\x01\x04\x03\x8a\x05/\x08S\x0b\x01\x0e\x0c\x0fG\x10\x0b\x13|\x16"\x19\xf7\x1a\xaf\x1c\x19\x1e\xe4\x1d\x1e\x1d\x80\x1d\x1f\x1e\xee\x1c\xfb\x19:\x17I\x15\xb3\x12\x86\x0f\xb1\x0c\n\n\x84\x06S\x028\xff\xba\xfd>\xfc\xd8\xf9I\xf7\xd4\xf5\xf4\xf4-\xf4\x99\xf3\xa3\xf3\xc7\xf3\xfc\xf2\x11\xf2k\xf2\xb9\xf3\xdb\xf4\x15\xf5,\xf5\xf2\xf56\xf6\xbb\xf5\x82\xf5\xe5\xf5j\xf6\x02\xf6V\xf5\xce\xf5Q\xf6\xc4\xf5\xaa\xf4J\xf4\xcc\xf4\xdb\xf4r\xf4\xac\xf4\x85\xf5\x7f\xf5\xb6\xf4\xa4\xf4q\xf5"\xf66\xf6\x85\xf6Q\xf7\xf1\xf7F\xf8\xb5\xf8\x90\xf9O\xfa\xb5\xfa\xe5\xfar\xfb1\xfc\xae\xfc4\xfd\xdb\xfd\x9c\xfe.\xffY\xff\x8d\xff\xc4\xff\x06\x00J\x00\x92\x00\x13\x01\x8a\x01\xa2\x01\x93\x01\x80\x01\xa1\x01\xa9\x01p\x01i\x01\x95\x01\xac\x01\xbe\x01\xd0\x01\xfd\x01\xda\x01c\x01\xf9\x00\xde\x00\xf1\x00\xee\x00\xf4\x00\xc9\x00V\x00\xf0\xff\xc5\xff\xc2\xff\xf4\xff\x0b\x00&\x00s\x00p\x00\x9e\x00\xd5\x01\xc3\x03\xc8\x05\xca\x07y\t\x11\x0b1\r\xc7\x0f\xa1\x12\xeb\x14\xf5\x15Y\x17\x0f\x19>\x1a<\x1b\x98\x1b}\x1bU\x1a\t\x18\x95\x16\xba\x156\x14\xa1\x11\x88\x0e\xbe\x0b\'\tt\x06N\x04\x81\x02I\x00|\xfdB\xfbL\xfa\xed\xf97\xf9\x10\xf8\x18\xf7\x81\xf6\xe2\xf5\x9e\xf5\xf3\xf5n\xf6S\xf6\x91\xf5l\xf5\x18\xf6q\xf6\x18\xf6\x86\xf5c\xf5<\xf5\xbd\xf4\x96\xf4\xe3\xf4\xbf\xf4\xec\xf3?\xf3U\xf3\xc6\xf3\xbc\xf3s\xf3u\xf3\x98\xf3\xdd\xf3L\xf4\xf8\xf4O\xf5V\xf5i\xf5\xc7\xf5\xa3\xf6\x81\xf7G\xf8\xd2\xf8\t\xf9Z\xf9\xd8\xf9{\xfa\xfb\xfa\x94\xfb\n\xfc=\xfc\x8f\xfc\t\xfd\x96\xfd\xcc\xfd\xb2\xfd\xcf\xfdI\xfe\xd5\xfec\xff\xe7\xff\x0e\x00<\x00j\x00\xd8\x00\x87\x01\x0e\x02J\x02n\x02\xa1\x02\xf6\x029\x03;\x03L\x03L\x03\xf9\x02\x8f\x02\x90\x02\xc2\x02\x8b\x02\xf8\x01p\x01s\x01M\x01\xa3\x007\x00\xf3\xff\xc8\xff\xc4\xff\xb4\xff\xf0\xff\xd8\xff^\xffq\xff\x05\x00\n\x01Q\x02\x1b\x045\x06i\x07\x15\x08\x9b\t(\x0c\x99\x0e\xc5\x10\xec\x12\xda\x14\xdb\x15)\x16\xdf\x16\x1a\x18\xc5\x18\x7f\x18\xaf\x17\xa2\x16\x18\x15F\x13}\x11\x00\x10U\x0e\xf4\x0b\n\tU\x06E\x04X\x02%\x00.\xfe\xae\xfcX\xfb}\xf9\xda\xf7,\xf7\xac\xf6\x87\xf53\xf4\x94\xf3e\xf3\xbf\xf2\n\xf2 \xf2H\xf2\xb0\xf1\xf1\xf0\xd5\xf09\xf1\x1c\xf1\xc0\xf0#\xf1\xba\xf1\xbe\xf1\xac\xf1@\xf2[\xf3\xeb\xf3\x15\xf4\xcf\xf4\xd5\xf5\\\xf6\xb3\xf6\x95\xf7\xbe\xf8_\xf9\x92\xf9\xf5\xf9\xb4\xfa\x14\xfb2\xfb\xac\xfbO\xfc\x81\xfcq\xfc\x8a\xfc\xe2\xfc\xf6\xfc\xbf\xfc\xd7\xfc6\xfdt\xfd\x87\xfd\xb5\xfd\xf3\xfd0\xfeh\xfe\xdb\xfe\x7f\xff\x08\x00\x8e\x00\r\x01\xb7\x01R\x02\x08\x03\xbe\x03E\x04\xbd\x048\x05\x98\x05\xe4\x05V\x06\xd6\x06\xe7\x06t\x069\x06T\x06\r\x06N\x05\xdc\x04\xb9\x04j\x04\x9b\x03\xae\x02%\x02\x15\x02\xe3\x01+\x01\x00\x00/\xff"\xff\xc0\xfew\xfe:\xfe\x18\xfem\xfe\xa1\xfed\xfe\xb8\xfe\xb8\xff\xb1\x00\xf8\x00!\x01\x8a\x02Y\x04a\x05\xc3\x06\xe6\x08\x93\n\xfa\na\x0b\xf8\x0c\xd2\x0e\xb2\x0f\x01\x10\xcd\x10\x9e\x11Y\x11\xb2\x10\x9e\x10\xc3\x10\x11\x10\x80\x0eM\r\x82\x0cD\x0b\xa4\t\xfc\x07\x8d\x06\xc5\x04\xc2\x02\x00\x01\xb5\xffz\xfe\xe2\xfc4\xfb\xca\xf9\x9b\xf8\x94\xf7\x90\xf6\xc6\xf5!\xf5T\xf4\xc8\xf3|\xf35\xf3&\xf3#\xf38\xf3A\xf3O\xf3\xb2\xf3+\xf4w\xf4\xc8\xf4H\xf5\xec\xf5p\xf6\xbb\xf6\x19\xf7\x99\xf7\x04\xf8j\xf8\x0c\xf9\xc6\xf90\xfaa\xfa\x90\xfa\xdd\xfa!\xfbI\xfb\x82\xfb\xd4\xfb\x16\xfc=\xfcU\xfc\x83\xfc\xa6\xfc\xae\xfc\x9f\xfc\xb6\xfc1\xfd\xb5\xfd-\xfe\x9f\xfe\xde\xfe\xf5\xfe!\xff\x9a\xffr\x007\x01\xc6\x01&\x02s\x02\n\x03\x94\x03\t\x04r\x04\xda\x04\x1f\x052\x05y\x05\xff\x050\x06\xd2\x05a\x05V\x05w\x05!\x05\xb6\x04`\x04\xa2\x03\xd5\x029\x02+\x02\xfa\x01\x16\x01x\x00Q\x00\xe3\xff>\xff\x0b\xffr\xff\x88\xff\x05\xff\xb1\xfe\r\xff{\xff\xb6\xff\x1f\x00L\x00\x9e\x00\xd8\x00\x1b\x01\x14\x02\xae\x02\xec\x02)\x03\x87\x03\xac\x03\x08\x04\xa8\x04`\x05\xe9\x05\xef\x05\xec\x05,\x06\x18\x06\x12\x06\x01\x06\xc2\x05\xc2\x05M\x05\xa4\x04\x06\x04v\x03\x0c\x03t\x02\x9a\x01\x01\x01\xa2\x00\xcc\xffY\xfe\xa0\xfd\xba\xfd\xaa\xfd\xde\xfc\x1c\xfc\r\xfc[\xfbB\xfa\xeb\xf9\x98\xfa#\xfb\xe7\xfa\xcb\xfa\xee\xfa\xe0\xfa\xcb\xfa9\xfb\xe7\xfb\x83\xfc\x88\xfc\xc0\xfc\xaf\xfd\x8a\xfe\xf2\xfe/\xffo\xff\x86\xff\x9c\xffU\xff\xb5\xffK\x00]\x00e\x00\x1a\x00\xf4\xff\xe6\xff\x99\xff\x99\xffy\xff*\xff!\xff\x11\xff%\xff\xe5\xfe\xb6\xfe\x9b\xfef\xfeD\xfeL\xfe\x98\xfeq\xfe\xcf\xfe\xab\xfe\x8e\xfe\xcf\xfe\xdf\xfe\xb7\xfeV\xff\x9c\xffJ\xff\xa3\xffi\xff\x88\xffV\xff4\xff\x95\xff\xa0\xff\x96\xff\xe9\xfe\xf4\xfe\xc3\xfeV\xfe_\xfe_\xfe8\xff\x80\x00\xca\x02\xb8\x012\xfe\x82\xfc\x89\xfd!\xfei\xffr\x00\xc4\x00l\x004\xfe\xa2\xfe[\xff\x17\xff\x81\xffH\x01\x12\x02Z\x02\x84\x02M\x03\xf5\x03\x1c\x04B\x04Z\x04r\x05\xe9\x05\xdf\x05U\x06\xa3\x06*\x06\xf9\x05\x07\x05.\x05\x02\x05\xed\x03\x7f\x03\xef\x02\xfa\x01G\x01\x08\x01(\x00W\x00\x01\xff\xd9\xfd\x92\xfc^\xfc\xe6\xfc<\xfc+\xfc:\xfcV\xfcw\xfb\x13\xfc\x7f\xfc\x80\xfc\xed\xfc\x87\xfd\xb0\xfd\xca\xfd\x86\xfer\xfe\xb3\xfe\xb6\xfe\x0c\xff \xff\xaf\xff\x92\xff\x18\x00Q\x00\xa0\xff\xc0\xff\r\x00\x92\x00\xd8\x00\x05\x01\\\x01\xc9\x01H\x02\xcd\x02\x98\x02\x13\x03S\x03\x08\x04\xc7\x03N\x044\x04t\x03\xd7\x03%\x03\xd6\x02\x13\x03\xfd\x02E\x02\xbb\x018\x01&\x01%\x00l\xffl\xff\x90\xfe\x1b\xffH\xfe\xd8\xffu\xfe\n\xfdd\xfd\x90\xfc\x05\xfd\xfa\xfb\x0c\xfd\x1c\xfe\xc7\xfdG\xfd\x08\xfdn\xfc\\\xfcI\xfb3\xfd\xae\xfd\x8e\xfe\xcd\xfe\x1f\xfe\x86\xfe\x1b\xfdZ\xfe\xf6\xfd\x05\xff\xc0\xfe\x1f\xff\xc7\xff\x81\xff\xb9\xff#\x00\xee\xff\xba\xff\x9b\xfe\xe5\xfe\x02\x00\xd2\xff1\x00\xd9\x00,\x01\xa8\xff\x02\x00Z\x00\xac\x00u\x00\xc1\x00\x8e\x01V\x01\xdf\x00\x03\x02d\x02,\x01\x9d\x01\xd1\x01/\x02^\x01\xee\x01V\x02\xe5\x01\xec\x01\xb4\x00\xa9\x00\xc3\xffM\x00{\x00\xe1\xff\xb5\xff\xba\xfeh\xfe\x0c\xfe\xd9\xfd1\xfe+\xff\xcc\xfd^\xfd\xb4\xfd,\xfe\xac\xfdB\xfeY\xfe\x8f\xfe,\xfe\xe5\xfc\xe2\xfc\x99\xfd\x05\xfe\xa9\xfd\xf4\xfe\x08\xff0\xfe\xb6\xfc\x9f\xfd\xa5\xfd\x91\xfe\x14\xffg\xff\xcd\xff\x83\x00p\x01\xd5\x00\xe2\xff\x90\x00\x94\x03\xc1\x02$\x03\xbf\x03\xba\x04c\x03\x95\x02\x8e\x03h\x05\x0f\x06\xbf\x046\x03\xa4\x04M\x03,\x02b\x01\x85\x03R\x02\x03\x00\xad\x00\x81\x00\x95\xff\x8e\xfd\xd3\xffs\xff\xcd\xfe\xc5\xfd\x1b\xff\xa9\xfdS\xfem\xfdE\xfeK\xfe2\xfd\r\x00E\xfdy\xff(\xfe@\xfe&\xfdO\xfc.\xfeh\xfe\xe8\xfd\x92\xfe:\xff\xad\xfdK\xfd#\xfe\x83\xfe\x10\xff\x94\x00L\xff\x04\xff\xc8\xff!\xff?\x00\xe7\x00\xe0\x02\xac\x01\x7f\x00\xf5\xff\x00\x01\x9b\x01>\x01\x8d\x02\xdb\x01\xff\x01?\x01p\x01\xb8\x00\xc1\x01\xe4\xff_\x00\xa2\x00\xe8\x01\xfb\x01V\x00/\x02\xe0\xff\t\x006\xff\x83\xff\xd6\xff\xa3\x00\xa4\x00u\x01\x83\x00\xf5\xfe\xc9\xff\x9f\xfe\xe7\xff\x1f\xffW\x00`\x01\xf5\xff\x06\xff\xdc\xfeW\xffh\x00\xf0\x00\x83\xff\xe5\xff^\xfe\x96\xffG\x00\xee\xfe9\xff0\xfee\xffV\xfec\xfe\x9f\xfe|\xfe\xa0\xfe[\xfd\x1b\xfd<\xfeO\xfd\xea\xfe\'\xff\x97\xff\xd3\xffX\xfeO\x01/\x01\xf4\x00\xe6\xff\x9e\x02\xb1\x02\xda\x02\x0b\x05\xc4\x03\x91\x04\xa8\x02G\x02\xc1\x03\x16\x04\xf1\x01a\x03\xff\x01\xdc\x01_\x01\xa2\x01G\xff\xdc\xff\xe8\x00\xca\xfd\xeb\xff\x98\xfd\xf0\xfe\xfd\xfc\x1a\x01\xb0\xfc\x8f\xfd<\xfd\x00\xfdv\xff\x81\xfe\xfd\x00\xe1\xf9\xaf\x008\xff\x89\x00J\x00#\xff\xac\xff\xe0\xfe\xfe\xfe\xba\x01y\x01\xec\xfe\x19\x00\xc5\xff\xc0\x02\xf5\x01\x18\x00\x0c\xfe\xd5\xff\xb4\xfds\x01#\x02\xc4\x00\xd5\xff(\xfe\xf0\xfe2\xff\xf2\xfe\x8f\x005\x00 \xfe\x9a\x01.\xffr\x00\xc1\x00\x93\xff\x89\xff"\x00\x17\x02R\x00\x94\xff\xc9\x01\xa7\xff\xd5\xfe(\x00\xa5\xfd\xdb\xffe\x00\x12\xff\xe8\x01\x9a\xff\x84\xfe}\xff4\xfen\xfd\xed\xfcY\x00#\x03\x1a\x00\x9d\xfe$\x00I\xfc%\xff\n\x00\x7f\x00\xca\xfe\xfa\xff\xd3\x01\xbb\xfd\xbc\xfeO\xff\xe0\x00$\xfe\xd2\xff\xb0\x01\xf6\xfb\n\x01U\x00e\x00\xd3\xfdn\xfdX\x01\x0f\xff\xdf\x00\x06\xff\xad\x02\xaa\xfc\x86\x01`\x01\xba\x01\xdd\xfe\xec\x00\n\x05\xbd\x00\xfd\x01\xb3\xfe\x96\x03`\x02?\x01+\x00\xbe\x02\xbb\x00\x12\xff\xeb\xfec\x02\xf6\x025\xfc8\xfc\x19\x01\xee\xfda\x00L\xfd\x0b\x00Z\xfe1\xfbO\x01\xe8\xfbq\xff\x9f\xfcr\xfe\xb0\xfe\xd7\x00\x7f\x00\xa9\xffJ\xfe\x96\xfdt\xfd\xf3\xfd\x9e\x02)\x01\xc1\xff$\x01\x1d\x01\xbb\xff\x12\xff\xa9\x00G\x03~\x01\xda\xff\xcc\xfc\xcb\x02\x91\x02\x86\x06\x98\x01\xdc\x00\xf5\x00\x89\xfc\x7f\x03\xd7\x00\x08\x03\xdf\x01\x85\x00$\x04F\x00\xba\xfc\x87\x01t\x02B\xff\t\xfff\xfe9\x00\xc0\xfeH\x00:\x02\x01\x02\x91\xfeR\xf9\x99\xff\r\xfe\xee\x00\x17\x00\xc1\xfe\x9b\x00^\xfd\xef\x02\x87\x00\x08\xfe\xb2\x00\xd2\xfd\xba\xfe\xb8\xfe\x94\xff\x04\x02\xf8\xfe\xab\xffA\xff\xac\x03\xb5\xfe\x1d\xfd\n\xfb\xf1\xfb@\x02\xeb\x003\xff\x0b\xff@\x00\xce\xfe\xc6\xfe\x94\xfe\xa3\x04\xe5\xfeT\xfd\x1d\x02e\xfb1\x00\xc4\x04\xbc\x02|\x02 \xff2\x01\xb3\x00)\xff\xb1\xfc\x9d\x00\x94\x04\xf3\x04\\\t\x81\xfb\xb1\xfc\x8e\x01\x1f\x00\xe1\x02\xf6\xfe\x13\x04R\x00\xcb\x00M\x02\xeb\x01h\xfc\x86\xfc\xbf\xff\xae\xfbb\x03>\x01\xa8\xff\x12\xfe\x88\xfc[\xfc\x13\x01\x01\xff\x02\xfb\xb3\x03\x83\x00C\xff\r\x00R\xffn\xfe\xd5\xff\x0b\xfe\x92\xfe\xb3\xfe|\xfd\x9d\xffi\x02s\x00\x17\x02\x04\xfd\x1e\xf8\xed\xfbV\xff\xe1\x03\x96\x04\x95\x04\n\x01\xb8\xfb\xdb\xfc:\xfe@\x00<\x04?\x04r\x01\x85\x00\x8d\x03\x9b\xfd\xd3\x02\xdc\x05"\x00\xf8\x00\x1c\xfe\xc2\x00\xeb\x02\xed\x01\xbe\x02k\x03e\xff\xc1\xfa\x07\xffi\x01h\x00\xc1\xff\xfc\xff~\xfe]\x00\n\x00\xd2\x04\x87\xfbt\xf8\xf6\x02\xc5\xfe!\xfc*\x03\x84\t\xe1\xfe\xc3\xfa*\xfa)\xfc\xb4\xfdw\x02\x14\x05z\x02\xb1\xfe\xfd\x02\x91\xfe\xb8\xf7\xb4\xfe\xb4\xfbg\xfe\xf0\xfe\x1d\x04V\x04\x0e\x02\x1c\x02l\x00\xd1\xf1o\xf0w\xfd\xc3\x07\xd1\x0cI\x08q\x02\xb4\xf7\xb4\xf8\x1c\xf6Z\x01\xd3\x04\xba\x03\xf5\x02h\x03\xed\x00y\xfb\xee\x03I\xfe\x9e\x01\x0e\xff/\xf9z\xff\xef\x04\xb8\n.\x05\x8b\xfd\xb4\xfc\x06\xfb\xfe\xf6(\x00\xa4\t\xce\tn\x02\xb3\xf5c\xfau\xfb\x18\x04\xf6\tA\x03\x1c\xfc\xfc\xf6;\xf9}\xfbS\x06G\t\x15\x03\xd2\xf7i\xf87\x01\xc1\x03\x08\xfd\x14\x00\xbf\xfe\xca\xfa\xd6\xfe\xd4\xf8\xf6\x04\x17\x0f\x88\x02\xbd\xf7\xd1\xfb\x92\xfb\xd3\xfd"\x05\x11\x03\xad\x03\x8b\x02\xde\xff\xab\xfb\xe0\xfem\x05n\x04W\x00\xcc\xfd\x1b\xff/\xff\x7f\xfd\xcb\x03&\x06H\x02\xeb\x00\x14\xfe+\x02\x7f\xff\x8b\xfaJ\x01\xbc\xffr\xfeO\x04\xab\x04\xec\x05.\x00\xbf\xf9@\x00\xd0\xf5g\xf8{\x06/\x0c\x9a\x04\xe9\xf8\xdc\xf9\x91\xfc"\xff \xfc\xf9\x064\x05W\xfb\xfe\xfd \x01<\x03\x11\xfd\xd6\xfc\xaa\xff\xe9\xfcW\x05\x87\x04Y\xff\xe3\xff\xe9\xfdl\xfb\x15\xfb\x8d\x00!\xffK\x01\xe0\x01\x84\x03v\x03\xd6\xfaX\xfb\xdb\x003\x06\xe9\xfd\xf5\xf62\x06,\x05\xa7\xff\xf8\x05{\x06\x1b\xfa\xf7\xf4\x16\xfd\x9e\x03\xa9\x05@\x01\xb3\x04\xfd\x03w\xf9\x9e\xfc\x8c\x01:\xfd&\x00y\x06\xc3\x00\xc5\xf5:\xff\x82\x054\x00\xee\x06\xb9\x05\xb2\xfd,\xf5\xdf\xf65\x01m\x02\xfd\x02\xbe\x06\xce\x06\x1e\x03\xc4\xff\x1b\xfcz\xf8\xb3\xf7[\xff\xc1\x05\xa1\x05o\x08\xc6\t\xa0\xfd-\xf3\x01\xf5\x13\xfa\xd0\xf9\xab\x01\xf2\x0cc\x0cs\tq\xfa&\xf3\xe3\xf3\xbf\xfan\x00_\x02\xf6\t\xa1\n\xf9\x050\xfc\xae\xfc\xce\xf9~\xf8\xfa\xfa\xa7\xfc|\x05w\x0b\x14\r\xaf\x06\xc0\xfd\x89\xf6H\xee\xec\xf5^\x03c\x08\x12\x10\x11\rE\xf9\x00\xec\xfd\xf1S\x00\r\x07\x8d\x06-\x07\x12\x00\xc8\xfa\xf6\xfb\xa9\xfd\xfa\xfc\xca\xff\xe1\xfe\x0e\x01\xfb\t\xc7\x06m\x02\xf1\xfb\x14\xf2\xd7\xf5\xc1\x00$\x07\xd9\x0b\x8d\n\x9b\x01{\xf8\xe2\xf7\x12\xf8\xb5\xfa\xd0\x039\x0c\x05\x0b\xff\xfbk\xf9\xd4\xfb\x12\xfb\xc4\xfd\r\x07\x1c\x05\x86\xfc\x05\xfe\x16\x00\xe5\x00\xcf\x02~\x00\xd5\xfam\xfc\xee\xfd\x87\x05\x14\t\xe6\x014\xfc\xe3\xf7\xc7\xf9\xb7\xff7\x06\xa3\nz\x03\xf4\xfa\xfd\xf9n\xfbN\xfd\x8b\x02\xca\x06\xc9\x06\xdf\x00\x87\xf8v\xf8"\xfe\xf9\x04\xd8\x02X\xffy\xfe\xe5\xffo\xff\x86\xfd\xd4\xfe\x13\x00\xf4\xfe\xb4\xfb\x14\xfe\xc6\x02\xb3\x04\x1d\x02*\xfd\'\xfcR\xfd\x8f\xfb\xb3\xfeL\x02\xd0\x02\xa4\x04\xd9\x01\xf5\xfdU\xfc\x05\xfc\x8c\xfdu\xffu\x03n\x03\xa1\x01\x0c\x01\x83\x00\xd1\xff\x1d\xfe\x96\xfek\xfd\xd9\xfe+\x02{\x04y\x02+\xff\xcf\xfd\x01\xfe\xd6\xfes\xffD\x01\xb7\xff\xc2\xfc\'\xfb\xbf\xfb\xb6\xfe\xda\x02\x99\x01\x11\xfd\xd9\xfa\x90\xfb\xa9\xfa\xe1\xfb.\x00\xfd\xff\x11\x00\x03\xff\xf5\xfe\xa1\xfe)\xfd\x84\xfd\xbd\xfdk\x00\xb1\x03\xd0\x04I\x04\xd0\x03\xe0\x03\xe2\x02\xab\x01\xe4\x04[\n\x02\n\x0f\t\x8a\x08~\x06\xfd\x06u\x08\xa6\t\xd0\x0b\x18\rQ\tB\x06?\x04\xf8\x04\xc0\tE\x0c#\n\t\x05\xee\x02!\x01^\x00\xad\x01\x1b\x03\xf7\x03\xb3\x01Q\xfe~\xfc\xc4\xfbB\xf9}\xf8\x9c\xf8D\xf9|\xfa\x87\xf9\xf3\xf7\x12\xf6K\xf3`\xf0\xbc\xef\xcf\xf2\xab\xf6q\xf7\x16\xf6\x99\xf3\xfb\xf0\x95\xedJ\xec\xa1\xf0\xe0\xf6.\xf9\xbb\xf6\xec\xf2\x03\xf0\xf0\xeeT\xf0\xc9\xf2\x8b\xf4\x99\xf6\x14\xf6C\xf3@\xf2o\xf2+\xf3\xb2\xf2\xdf\xf0\xc0\xf3l\xf7-\xf74\xf5\xa7\xf4\xd3\xf5\xf1\xf4\xb2\xf6\x06\xfa#\xfb\xe5\xfc\x04\xfd\x8c\xfdn\x00s\x03\xc4\x06\n\x08b\x07Q\t(\x11\xc0\x1f\xb6-\x9c2Q*\xac"\x84&\x19/6<\xe7H\x93P\xe6Lc>02\x111\xe85\xa15\xc22i/U)8\x1fn\x12\xb3\x08r\x00p\xf63\xee\x97\xebY\xedI\xeb;\xe2\xb9\xd3\x93\xc8l\xc56\xc6l\xcb\xb8\xd2\xe6\xd7\xea\xd5\x81\xcf@\xcdt\xd2\xbc\xda\xa4\xe01\xe8\x83\xee\x90\xf4\x95\xf8\x18\xf9\x85\xfe\xdc\x03\x11\x05\xa2\x04@\t\xed\x10\xbd\x15\xe2\x153\x13\xe3\x0f[\t\xf1\x05~\x07B\n\xf3\n\xcf\x05\xfa\xfd\xca\xf6\x08\xf1\xf6\xee5\xee\xc6\xecY\xeb\xde\xe71\xe4\xd9\xe1\xe3\xe1\x9a\xe3\x18\xe3\xa0\xe2\xb0\xe3y\xe5<\xe8\t\xeaK\xed\x15\xee\xa7\xed\xa2\xefR\xf3\xd8\xf8$\xfb\x99\xfc+\xfe&\xfe\xe0\xfd/\xfe;\x02\x1e\x06\x04\x06\x86\x04\'\x02\xd1\x04\xaa\x04\xf0\x02{\x01%\x00\xa2\x00\x9c\xfd\x93\x02\xd5\x07w\n\x86\x03\x87\x01\xad\x10\xcf\x1e\x82#"!l(|/o.\xe1,\x8b3\xa7C\xafG\x9bA\xa2?\xc7A\xb9?\xe83P-\x95-5-j\'\xaf\x1eh\x1a\xf8\x11\xbc\x04\xee\xf7&\xef\xc7\xec\x8c\xe9\xcf\xe6\x16\xe4\xf6\xdd\xe3\xd3B\xc9X\xc7\xcf\xcc\x94\xd3\\\xd6\xfd\xd5g\xd6\x0e\xd6%\xd5.\xd9\xca\xe1\x11\xe9\x11\xee#\xf1\xf1\xf3\xf5\xf7\xde\xf9\xcc\xfa\xaf\xfd\xb4\x01\xf5\x07\xa9\x0b\xad\x0c[\x0ba\x07\xbc\x02\x03\x01:\x05\x03\x0b\x08\x0c/\x06\xa9\xff\xbe\xf9N\xf5\xdf\xf4{\xf8\x8a\xfb\x1d\xf9\x90\xf3T\xf0\xdf\xedN\xec_\xedA\xf0+\xf3\xbd\xf2\xfe\xf1\x11\xf2x\xf2\x94\xf1 \xf1\xf2\xf3\x84\xf8\xe5\xfb\x05\xfd*\xfc\xca\xfa\xe1\xf8\xe1\xf7>\xfb\xd6\x00\xb8\x04@\x04K\xff\xfd\xfb\xcf\xfb\xa6\xfd\xbe\xff\x1a\x02\xbc\x01\x13\xffU\xfb\xf5\xf8\x89\xfa@\xf9\xb3\xf7$\xf8\xb7\xf9\xc5\xf94\xf7j\xf4\xcc\xf0\xf9\xef\xf2\xfaK\x12,$\x1b\x1d\xff\x0e\xc6\x0b%\x16d\'\xcf6NJ\x16R\xf2G!5\x13/y;AHDL\x98HfB76\x80%\x94\x1c\x18\x1c\xfc\x1ay\x13\xc0\x08X\x03\x9a\xfc\xd8\xee9\xe0d\xd8[\xd6g\xd5\x9a\xd4a\xd5\n\xd4\xd4\xca(\xc0\x91\xbej\xc7\xea\xd2Y\xd9\xa5\xdb\x05\xdd\x8c\xdb \xda\x9e\xdf\x0c\xeb\x05\xf8\x10\xfe\xf1\xfc{\xfc\x84\xfeK\x01\x0f\x04%\n\xc3\x0f\xff\x10\x87\x0ca\t#\x0b\x82\x0b\xed\x08\x14\x08\x8e\t\x15\t\xb6\x05z\x02\xc9\x00#\xfd\x9a\xf8Q\xf7\xc4\xfa\x11\xfcM\xfa\xfe\xf5\xe3\xf1I\xee\xc8\xedZ\xf2\xce\xf6j\xf9d\xf6,\xf2\xbf\xf0\xce\xf2\xb1\xf7B\xfc_\xff\x7f\xff(\xfec\xfe>\xff0\x02C\x04,\x05\xba\x050\x05\x10\x05\x8d\x04]\x04\x0f\x03\x1d\x02S\x01\xec\x00\x16\x00U\xfc\x14\xfa\x8a\xf8)\xf8\xa5\xf7\'\xf6\xe5\xf4\xa7\xf2\xa9\xf0\xff\xec|\xeb\xa9\xeb\xf0\xec\x13\xf3p\xf8\x9e\xfcP\xfai\xf4m\xf7(\x03!\x13i\x1f\xdc#\n!\xd5\x1a\x81\x1a\x0c&\xf79\x02E\xe1D\xe7<\xff2\x17/81[9\xe4=19l-\xc2"\xf4\x1bu\x17W\x16b\x13\x90\x0b\x97\x00E\xf6\x11\xf2`\xef\x9f\xeb\xfa\xe5\x14\xdf\x85\xd9\xa7\xd6:\xd7\xed\xd7D\xd8\xe7\xd55\xd3\x1d\xd33\xd6\x94\xdc\xb9\xe0\xc2\xe15\xe2\xcf\xe2T\xe5\x01\xe9\xa9\xef\x0b\xf6\x07\xf8\n\xf6\x14\xf5\xf7\xf8W\xfd\xb6\xff|\x02]\x04\xd8\x03-\x01\x8e\x01n\x05\xe3\x06\xbb\x05\xe6\x04H\x05\xd7\x03\x0f\x022\x02\xea\x03A\x021\xff]\xfeS\xff%\xff\\\xfdN\xfc\xdd\xfb\xc6\xf9o\xf8\x8c\xfav\xfc\xe4\xfc\xff\xf9\x80\xf8u\xf8\x1d\xfa\x7f\xfcM\xff\x1a\x00(\xfen\xfc^\xfca\xfe\\\x01K\x03~\x033\x02r\xff\xbd\xfe\xeb\xfe\xfd\xffq\x00\x9a\xffJ\xfe\x90\xfc\xd8\xfar\xf9\xd4\xf8\xe2\xf7\xd9\xf6\xf9\xf4\xdc\xf4\xc4\xf4(\xf3\xc0\xf1k\xf1Z\xf3\xe0\xf4\xbd\xf6t\xf8.\xf7\xef\xf3\xc1\xf6\x0b\x02m\x0e\xc3\x13\x99\x11\xd4\x0e\x1a\r\x8c\x10\x9b\x1d\xe3.\xb48\xa23o)_$R\'~0\xe09\xa3?q9c+& +\x1e\x7f#c&\xae$\xbb\x1c\xba\x10\x9d\x05\xec\xff\xe2\x00K\x01\xdf\xfd\x9a\xf7F\xf1\xa9\xeb\x87\xe6E\xe4<\xe5\x0e\xe6-\xe4"\xe1\xab\xdf\x82\xdf/\xdeG\xddZ\xden\xe1d\xe3a\xe3h\xe4\t\xe5\xd0\xe4J\xe4r\xe6X\xeb2\xee\x15\xef\xfe\xee\x91\xef\x86\xf0B\xf1E\xf4e\xf7\xd1\xf9r\xfa\x93\xfa\x0c\xfco\xfd\xbd\xfen\x00x\x02D\x047\x04\x9d\x03\xdc\x04\xbb\x06\xbd\x08\x87\x08J\x081\x08\xbd\x07\xf0\x07P\x08\x11\n]\n\x00\t\xfd\x06\x93\x06\xa5\x07\x8a\x08\xdd\x07\xfa\x06d\x06\x19\x06\x8f\x05\xed\x05\xf5\x06:\x06\t\x04\xab\x02\n\x038\x04/\x04s\x03s\x02g\xffu\xfc\xcf\xfb0\xfd\xcd\xfd\xf5\xfbr\xf96\xf73\xf5\xce\xf3\x1f\xf4\xe4\xf4\xeb\xf3\xcf\xf1]\xf0u\xf0\xca\xf0\x07\xf1o\xf1\xfb\xf1\xf1\xf1\x15\xf2\xd5\xf3!\xf6c\xf7|\xf7J\xf8\xaa\xfa\xfb\xfd4\x01\xb2\x03)\x05\xd3\x05P\x07\xd1\n\xaa\x0f\xda\x13d\x16k\x17\x0e\x18l\x19X\x1c9 =#\x95#}"\xf9 \xa9 }!N"\x1f"\xd7\x1f1\x1c>\x18\xa6\x15F\x14\xde\x12k\x10\x8a\x0c\x04\x08\x91\x03I\x00P\xfe\xa6\xfc*\xfa\xdb\xf6l\xf3\xc6\xf0*\xef\x08\xee\xfe\xec\xaf\xeb\x1e\xea\xf0\xe8\\\xe8\x84\xe8\xb1\xe8\x8b\xe8\xfc\xe7\xca\xe7\x12\xe8\xce\xe8\xe0\xe9\x90\xea\xd1\xea"\xeb\xbe\xeb\xcc\xec\x16\xee_\xef\xa4\xf0\xa5\xf1\x90\xf2\xf3\xf3\xa2\xf5O\xf7\xad\xf8\xc9\xf9\xef\xfaL\xfc\xc2\xfdN\xff\xa8\x00\xab\x01c\x02\x15\x03\xff\x03\x08\x05\xe6\x05o\x06\xc0\x06\xf7\x06/\x07|\x07\xed\x079\x08.\x08\xd9\x07\x91\x07\x87\x07\x87\x07\x9b\x07\x97\x07Z\x07\xc9\x06\x08\x06\x9a\x05o\x053\x05\xb4\x04\xe6\x03\xfd\x02\x07\x02\x0f\x01Z\x00\xb3\xff\xcb\xfe\xb7\xfd\x7f\xfcr\xfbs\xfa}\xf9\xa5\xf8\xeb\xf7)\xf7K\xf6\xae\xf5j\xf5J\xf55\xf5\x00\xf5\xe2\xf4\xe6\xf4$\xf5\xde\xf5\xf3\xf6\x00\xf8\xe4\xf8\x91\xf9F\xfa/\xfb\x84\xfc$\xfe\xb0\xff(\x01i\x02p\x03e\x04\x8a\x05\xe9\x06\x1f\x08\xff\x08\xac\tN\n\xc1\nY\x0bG\x0cF\r\xe1\r\xf4\r\xc1\r\xe2\rh\x0e\x14\x0f\xd2\x0f,\x10\x11\x10\xa5\x0fM\x0fb\x0f\x88\x0fa\x0f\xff\x0eL\x0eJ\r[\x0c\x8b\x0b\x03\x0bD\n\x02\t\x8b\x07^\x06U\x05Z\x04m\x03c\x02&\x01\xec\xff\x0b\xff\x7f\xfe\xf4\xfd&\xfdY\xfc\x94\xfb\xf3\xfa\x83\xfai\xfa;\xfa\xae\xf9\xe3\xf8\x1d\xf8\xdb\xf7\xa8\xf7\x81\xf71\xf7\xa5\xf6\xce\xf5\t\xf5\xae\xf4\xa5\xf4\xb6\xf4\x9c\xf42\xf4\x9c\xf3\x1f\xf3\xf2\xf22\xf3\xa3\xf3\xf4\xf3\x19\xf4\x0f\xf4\x13\xf4g\xf4\x01\xf5\xe0\xf5\xb4\xf6V\xf7\xf8\xf7\x93\xf8d\xf9N\xfaX\xfbd\xfc]\xfd5\xfe\x0c\xff\xf1\xff\xd7\x00\xaf\x01r\x026\x03\xe7\x03\x80\x04\xe0\x04/\x05}\x05\xb6\x05\xef\x05\x05\x06\xfc\x05\xd1\x05\x8a\x05=\x05\xf1\x04\xac\x04[\x04\xf1\x03}\x03\xff\x02\x87\x02 \x02\xbc\x01h\x01#\x01\xd3\x00j\x00\x00\x00\xbf\xff\x99\xffj\xffC\xff\x17\xff\xcf\xfej\xfe\x15\xfe\xf4\xfd\xd9\xfd\xb9\xfd\x88\xfdF\xfd\xdd\xfcr\xfc5\xfc7\xfc8\xfc\x12\xfc\xbb\xfb<\xfb\xd9\xfa\x9b\xfa\x9c\xfa\xae\xfa\xae\xfa\x94\xfa}\xfa\x87\xfa\xb9\xfa\x1e\xfb\x94\xfb\x1a\xfc\xa7\xfcN\xfd1\xfeS\xff\xb2\x00"\x02\x91\x03\xf0\x047\x06\xa7\x07_\tM\x0b\x1c\r\xb5\x0e!\x10j\x11\x8b\x12\x8a\x13\xb1\x14\xdf\x15\xaa\x16\r\x17\x07\x17\xc0\x16G\x16\xd2\x15T\x15\xa3\x14\x8f\x133\x12\x8b\x10\xbc\x0e\x0f\rZ\x0b\x83\t\x8e\x07r\x05O\x039\x016\xff9\xfd8\xfbA\xf9c\xf7\xb0\xf5(\xf4\xa2\xf2.\xf1\xcb\xef\x97\xee\x99\xed\xc2\xec$\xec\xa0\xeb\'\xeb\xbe\xea\x85\xea\x7f\xea\xa2\xea\xe1\xea:\xeb\xb8\xeb>\xec\xc7\xec\x94\xed\x98\xee\x81\xefY\xf08\xf1U\xf2\x8e\xf3\xcc\xf4\x13\xf6R\xf7\x85\xf8\xbb\xf9)\xfb\xbd\xfcC\xfe\xa0\xff\xd1\x00\x0c\x02[\x03\xae\x04\x02\x062\x07/\x08\xf3\x08\xa9\tr\nH\x0b\xf3\x0bG\x0ck\x0cy\x0cv\x0c_\x0c.\x0c\xd3\x0bC\x0b\x80\n\x9e\t\xcf\x08\x00\x08\x11\x07\x0c\x06\xde\x04\xb3\x03\x80\x02H\x01+\x00\x13\xff\xfa\xfd\xdf\xfc\xce\xfb\xd2\xfa\xe8\xf9\x10\xf9J\xf8\xa3\xf7\r\xf7\x84\xf6\n\xf6\xa9\xf5h\xf5:\xf5:\xf5`\xf5\x97\xf5\xdc\xf5,\xf6\x8d\xf60\xf7\xed\xf7\xde\xf8\xc7\xf9\x94\xfak\xfbQ\xfcr\xfd\xb4\xfe\x03\x007\x01F\x02)\x03\r\x04\x1c\x05^\x06\x91\x07l\x08\x12\t\xaa\t8\n\xee\n\xc6\x0b\x80\x0c\x07\ra\r\xb3\r\xff\r*\x0eD\x0e\x82\x0e\xcd\x0e\xfe\x0e%\x0f\x14\x0f\xdd\x0e\x95\x0e{\x0em\x0eX\x0e\x1f\x0e\xa6\r\xda\x0c\xff\x0bi\x0b\xe6\nQ\nz\t^\x08\x06\x07\xa5\x05m\x04f\x03e\x02\x14\x01\x91\xff\t\xfe\x91\xfc5\xfb\x03\xfa\xff\xf8\xf2\xf7\xbe\xf6t\xf5b\xf4\x8a\xf3\xdd\xf2Y\xf2\xde\xf1e\xf1\xf2\xf0\x9f\xf0\x87\xf0\xa3\xf0\xc5\xf0\xee\xf0\x1b\xf1i\xf1\xe3\xf1c\xf2\xef\xf2\x88\xf3(\xf4\xd0\xf4\x94\xf5f\xf65\xf7\x0f\xf8\xe1\xf8\xb3\xf9\x92\xfau\xfbg\xfcN\xfd$\xfe\xec\xfe\xb4\xff\x87\x00V\x01\t\x02\x88\x02\xeb\x02H\x03\xbd\x03&\x04\x84\x04\xb8\x04\xbc\x04\xa2\x04y\x04x\x04\x87\x04\x7f\x04Q\x04\xfb\x03\x9a\x03:\x03\xf5\x02\xc5\x02\xa0\x02S\x02\xd1\x01U\x01\xff\x00\xd1\x00\x9e\x00u\x00/\x00\xd2\xffi\xff1\xff6\xff2\xff\x18\xff\xcd\xfe\x8e\xfeu\xfep\xfe\x97\xfe\xb6\xfe\x9a\xfeo\xfeP\xfec\xfe\x89\xfe\x94\xfe\x8b\xfem\xfeL\xfeM\xfeg\xfe\x7f\xfes\xfe8\xfe\x15\xfe\t\xfe\x02\xfe\xf5\xfd\xef\xfd\xd2\xfd\xb8\xfd\x8b\xfd\x94\xfd\xd6\xfd\xf3\xfd\xf9\xfd\xe1\xfd\xe9\xfd1\xfe\xad\xfe"\xffr\xff\xc1\xff\'\x00\xbc\x00\x8a\x01\x84\x02u\x03B\x04\xf1\x04\xd0\x05\xf3\x06&\x088\th\n\x98\x0b\x99\x0cO\r\xee\r\xc5\x0e\x96\x0f&\x10m\x10\x8f\x10O\x10\xcb\x0fb\x0f\x1b\x0f\xb0\x0e\xc1\r1\x0c\x8b\n!\t\xfb\x07\xdf\x06h\x05\xa5\x03\x90\x01s\xff\xa2\xfdN\xfc,\xfb\xf2\xf99\xf85\xf6\x88\xf4\xc1\xf3{\xf3\xe7\xf2\xe0\xf1\xc8\xf04\xf0=\xf0\x9a\xf0\t\xf1%\xf1\xff\xf0\xfa\xf0s\xf1{\xf2\xc8\xf3\xa5\xf4\xd8\xf4\x17\xf5\xe3\xf5W\xf7q\xf8#\xf9\x90\xf93\xfa\xe6\xfa\x89\xfb\xb1\xfc\xd1\xfd3\xfe\xd6\xfd\xf1\xfd>\xff\x84\x00\xb5\x00\x7f\x00\x8c\x00\xea\x00B\x01\x12\x02z\x02\xed\x02\xb3\x03\x86\x03\x88\x02\xc1\x02\x12\x04w\x02\x88\x01\x17\x08\xa4\x0f\x9f\np\xfc\\\xfa\xe4\x05l\x0el\r\x15\t\x95\x030\xfb\x1b\xfa1\x07\x9c\x0fO\x08\x03\xfe!\xfa\x98\xfb\xf6\xfe\xa9\x02\xb7\x01\xa0\xfb\xb1\xf6\x91\xf7\x13\xf9\xe8\xf7\x9e\xf8\x05\xfcA\xf9\xa9\xf0?\xee<\xf6\x90\xfd.\xfc\xf8\xf6e\xf42\xf5\xfa\xf6V\xfbJ\x01\xe7\x01\x7f\xfcu\xf8\x0c\xfb\xb0\x01\xc2\x05m\x05\xeb\x02m\x00b\x01\x11\x05#\x08\xa2\t\x8e\tt\x06\xdc\x03\xb1\x064\x0c\xb1\r*\n\x85\x08\xfb\n\x7f\x0c\x1c\x0cV\x0c\x0f\r\xa5\x0c\xb3\x0b\x9e\x0c\x02\x0e\xa8\rf\x0c=\x0b\xdb\n[\nm\nb\x0b:\x0cM\n|\x06\x1d\x05\x19\x07\x9f\t\xb2\x07\xc1\x04\x02\x04\x8f\x03\xbd\x01\xb6\x01\xb8\x03\x96\x01\xc8\xfb\xea\xf9\xd4\xfd\x02\x00\x9b\xfc?\xf8\xce\xf6\x1a\xf6\'\xf5\xeb\xf6\xac\xf9\xda\xf8\x87\xf3\x84\xee\x14\xefk\xf3\xc2\xf6\x02\xf7\xd5\xf4\\\xf1\xe8\xeeW\xf0\xf5\xf5\x82\xfa\xeb\xf9S\xf70\xf6B\xf6\x1b\xf8\xf7\xfc\x9f\x01\x87\x01\xbb\xfd2\xfd\xd8\x00t\x03\x91\x04#\x06e\x07!\x06!\x05\x9b\x07\xa0\t\x16\x08\xd0\x068\x07\x02\x08S\x07\xdc\x06\xd1\x05\x91\x01\xab\xfd\x0c\xff\xf8\x01\xb6\xff\x85\xfb\xf3\xf8D\xf5\xc6\xf0|\xf1\xaa\xf6\x03\xf7\x83\xf0\x02\xebk\xe9O\xeb"\xeew\xf0\x89\xf0\xf3\xed\xcd\xeb\xfc\xeb\xdb\xed\'\xf2\'\xf6c\xf6\x1d\xf4\xf9\xf2\x05\xf5g\xf8\x19\xfcT\xfe\xcc\xfel\xfd\t\xfc\x9c\xfe\xf2\x01\xb3\x03]\x05\xe7\x07\xd2\x08i\x05J\x01@\x02\xc2\x08\xa7\x0e\xb8\x0fh\x0b\x13\x06>\x04^\x06\x7f\t\x8a\x0f*\x14\x9c\x112\x0bk\t\x87\r\xe6\x0f\x98\x14\x12 \xd3,\xe6(\xa4\x18\x7f\x0f?\x19\x9c,\xe26.8\xd42}(\xce\x1b\x9e\x18o%K2\x890f!\x8d\x13q\x0cc\x08\xed\x08W\n\xae\x07\x1e\xfe\xee\xf1\xdb\xe8\xb8\xe3\xd1\xe1\xa8\xdfo\xdcT\xd8\xe8\xd5=\xd4\xa2\xce\x06\xc9D\xc8\xbc\xcd\x93\xd3\xd4\xd6\x03\xd8C\xd7\xcf\xd4\x87\xd4\xfa\xdb+\xe6F\xee\xc0\xf1N\xf1\xe5\xf0C\xf1t\xf5\xb9\xfe\xe3\x06\x8a\n{\t\x1a\x07\xf0\x06\x80\t/\rC\x11?\x13\x9f\x12\xea\x10\xd4\x0e\x9f\rx\r"\x0e\xb7\r\x06\r9\x0b%\t\xf6\x06\x8a\x03\xf1\x00\xf3\xff\x89\xff:\xff\xb1\xfd\xb7\xfb\xeb\xf8 \xf5-\xf4\x19\xf5\x91\xf7\xee\xf7\xba\xf6N\xf5B\xf3\x0f\xf2j\xf3\xfb\xf6r\xfa\'\xfb\x8a\xf9\xb3\xf7A\xf7N\xf8\xc8\xfa\xbb\xfd\x83\xff\xf4\xfe:\xfc\x1e\xfa\xa0\xfa\xaf\xfc|\xfe\xc1\xfe\x9f\xfd\xd8\xfbC\xf9\xce\xf7\x9a\xf7R\xf9\x9a\xf9\xdf\xf7c\xf5\x83\xf3\x10\xf3|\xf2\x12\xf3\x87\xf5d\xf8 \xfa\xed\xf9Z\xf9\x92\xfa\xbd\xfd\xe7\x04;\x11C#\x93+\x03"\xec\x12R\x16}1\xc4I\xc4PQLKG\x8a@F8\x87<\x99M\x9eY\xd1O\t;E.\xe8)\xe2\'\x91"\x9c\x1dC\x17v\n\xce\xfa9\xeeH\xe7N\xe2\x8f\xdb;\xd3\xbe\xcd\xc6\xcb\xc1\xc6\xe3\xbd<\xb6\xa0\xb57\xbb\x01\xc1\x8e\xc4\x0f\xc7\xe9\xc6;\xc5\x92\xc6*\xce\xf6\xda\xf7\xe5f\xec~\xef\xbe\xf0[\xf2y\xf8\xbd\x01@\n\x0c\x0f\x97\x11\t\x13\xa4\x12\xff\x11\xaa\x14\xa9\x18\xdb\x17\xa1\x13L\x12R\x13\x1e\x11\xc3\n\xcc\x05\'\x05p\x03\xea\xff&\xfd\xca\xfbe\xf9\xe3\xf3_\xef:\xef1\xf1\xe7\xf2)\xf2\xc8\xef\x9c\xee\xe2\xee&\xf1\xf6\xf3\x7f\xf7\x9a\xfa\x81\xfc\x88\xfc\xfb\xfd\xd2\x01g\x05-\x07{\x08\xb1\nC\r\x17\rE\x0b\x04\x0b\xf2\x0b2\x0c\x1b\x0b\x18\t\xc1\x06\xda\x03\n\x00H\xfe1\xfdE\xfb\xbb\xf6\xdc\xf1\xb9\xed]\xeb\x07\xea3\xe9S\xe7\xb9\xe3\xdf\xe1\xae\xdfO\xde0\xdd\xf0\xdf\xc1\xe6\x0b\xe9\xde\xe7(\xed$\xf63\xf7:\xf1\xae\xfd\xc0#_>\xae1^\x18\xf6\x1b\x847\xbdJ\xe7O\xeb\\zl\x04ffJ\xfd:3IO[nZrL&Ex?\xc1+\xbd\x12\x05\t0\r\x80\n\xc2\xf9\x15\xea\x06\xe5.\xde_\xcb\xa1\xb9\x95\xb4\x89\xb9\x93\xbev\xbd \xbc\xe2\xb8l\xb2\xb2\xae\x95\xb3[\xc0V\xcf\xb7\xdb\x83\xe1h\xe2$\xe2?\xe6G\xf0\x8f\xfb\xb9\x08\x87\x15Z\x1ak\x17\x91\x12\\\x14<\x1a\xfb\x1d\x9b\x1f\xc1!f!_\x1b\xa7\x12\x1c\r\xa4\n\xc4\x06\xd0\x00<\xfcB\xfbj\xf9\xec\xf3\xf6\xea\x14\xe2\x1d\xdeT\xde\xc5\xe0\x93\xe42\xe7\xe2\xe6\x7f\xe2_\xdd\xbd\xde\xcb\xe6\x92\xf1\xca\xf8\xc6\xfb\xb1\xfc{\xfd`\xff(\x04\xe8\n\x17\x12\x8d\x15n\x155\x15<\x16\x94\x178\x17\xb1\x14\x08\x13\xed\x12\xc9\x12\xc0\x10h\x0c\xce\x06p\x01\x8d\xfc\x8e\xf9\x95\xf8\xa3\xf7\xce\xf5\x8a\xef@\xe94\xe4\xe1\xe1\xf6\xe0\xba\xdfS\xe1F\xe10\xe0\x12\xdf\xa2\xe0\xbd\xe4\xa3\xe5\xa9\xe6\x04\xe8\xa6\xe8\xde\xeb\xcd\xfav\x1d96\xdf/\xc7\x14\xfe\x08\xa2\x1e\xe3>}Z\x9amJs_bqC\xf56\xc9GHb*k|a\tQ\x80=\t,w\x1e\x8e\x16&\x13:\x0c\xee\xff4\xf1r\xe4Y\xdbG\xd1\x08\xbf3\xacR\xa5z\xab\xa3\xb7]\xbb\xa3\xb6y\xad\x9d\xa5\x80\xa4\xbd\xaeA\xc3O\xd9\xc6\xe7\xe7\xe8\xcf\xe5\xf7\xe5\x05\xedH\xfb5\x0e\xce V+o(\xdf \x8c\x1dJ#t+\x9d0\x990\xab,\x01&\xfa\x1c\xb2\x15M\x11=\r-\x06V\xfc\xaf\xf5\x0f\xf3\x83\xf0\x02\xeb\x96\xe2\x80\xdaF\xd4\xcf\xd1\x90\xd4I\xdaV\xdf|\xe03\xde\x93\xdbj\xdc$\xe2\xa8\xec\xd6\xf6\xf5\xfdj\x01s\x03\xbf\x04t\x07E\x0c\x9b\x12\xb0\x17\x15\x1a\xc2\x1a\xe5\x1b\xd4\x1b\x86\x19\xc4\x15\xcf\x12I\x11/\x10]\x0eb\x0b\x14\x07\x0f\x00\x19\xf8\xe5\xf0\x02\xed\xb2\xebS\xeb\x94\xe9\xb9\xe51\xe10\xdd\xab\xda\xfc\xd8\xe1\xda\xbb\xddL\xe1\xa0\xe3\xa0\xe4H\xe7\xe6\xea\x19\xef\xd2\xf3\x15\xf8N\xfb\xa0\xfaU\xf8\x12\xfd\x9b\x13\xa08\x95S\x80M\x99,2\x16\x96&tM.l\x92w\\w\xb5n\xdcW\x97>\xfe8iIXYMU\x8fA\xf3,3\x1fP\x11l\x00G\xefJ\xe3P\xdce\xd6\xe8\xce\x95\xc8r\xc1\xa1\xb2\x8a\x9e?\x8f\xc6\x90y\xa0\x18\xb2?\xbc\xbc\xbc4\xb7\xd6\xb1\x9f\xb3r\xc0\xb0\xd6\xb4\xee\x8d\x00\xe0\x07\xe4\x07\x83\x07\x96\x0cN\x17\xf5$X/\xca3\xe23\xc83\x022\x9f-s(?%\xe7 k\x19\x99\x12u\x10\x8f\x0e\xd7\x06\r\xfa\xd1\xed\x1e\xe6\x96\xe1\x1d\xe1^\xe3\x83\xe4\x11\xe1)\xdb\t\xd7k\xd6\xd9\xd8\xc7\xdf\x85\xe8\\\xeeQ\xf0\x98\xf2-\xf7\xc1\xfbe\xff\'\x03\x12\nY\x0f[\x12m\x14\x8b\x17s\x1a\x95\x1a\xf3\x17\xcf\x15\xb4\x14\xcf\x14#\x15\n\x144\x108\n\xf2\x02(\xfc(\xf7\xae\xf3\x8e\xf2\x82\xf0k\xec}\xe5`\xdeg\xda{\xd9@\xda\x8d\xda\xdb\xdbv\xdcn\xdc\xd8\xdc\xed\xde\x1d\xe3J\xe7v\xe9X\xeb\x9e\xeeh\xf4\x90\xfc~\x04\xba\x08\xc1\x08\xc8\x06\xbb\x05M\x06\xa4\r\xcd&\x80M\x95f|^\xed@\x11/]7vK\x8d^|q\xff\x7f\xbc{\x88b\x88D_5\xcc5\x9a7\xba2\xdd\'\xb3\x1bZ\x10)\x04\xaf\xf4V\xe2\x90\xcf)\xbe\x8c\xaf\xc5\xa5\xa1\xa6\xa4\xb1\xc9\xbb.\xb9O\xa9\xbf\x98\xa6\x91\x05\x97\xbd\xa5\xeb\xba\xc8\xd2\x05\xe5\x13\xeb\xb4\xe6\xe7\xe5\xc1\xf0@\x01\x08\r\xed\x14z\x1e\xc2(\xbe.p1\x0e4@6z2\x10\'\xee\x19D\x11\x9e\x10\xe9\x15\xd6\x1a%\x18,\x0c\x89\xfc\xdb\xeeV\xe3m\xdbI\xdbt\xe2\x91\xe9\xe5\xe8\xba\xe4\xff\xe3\xcd\xe5l\xe5E\xe2\xbb\xe1\xf2\xe6\x8c\xee\x06\xf7\xb7\x016\x0c(\x12i\x11T\x0c3\x08\xef\x08\xab\x0e\x0f\x18i\x1f\xb4!\x8e\x1f\xf0\x1aI\x15a\x10\x12\r\xc5\n\x05\x08\x0e\x04\xc2\x00\x1a\xff\x07\xfd\xb8\xf80\xf2\xe0\xe9A\xe1\xb8\xda7\xd9\x9f\xdc;\xe1\xd8\xe2\xe7\xdf\x93\xdb\xf9\xd6\xa7\xd4\x91\xd7\xa2\xdd\x90\xe4\x01\xea?\xed\xb2\xefS\xf1\x8a\xf21\xf6\xf5\xfb\xc1\x01\x9c\x07!\x0c\xbd\x0f$\x11c\x10\xaf\x0f\x89\x13v \x036OJ\xdcQoK\x9eA\\>\xe0A>G\xdcMgW\x04_\xb0\\`O|@\x816\xb0/\x8f%Q\x18\x9a\x0c\xdd\x03t\xfc\xf1\xf4\xb3\xed\xe5\xe4A\xd9\xd8\xca\xce\xbc\x9e\xb1_\xab\x10\xac\x00\xb34\xbb\x8e\xbf\x9d\xbe\xab\xbb\x85\xba\x02\xbd~\xc4Z\xd0I\xde\xb7\xea\xef\xf4\xeb\xfcQ\x04\xa9\x0bN\x13\x82\x19v\x1d\xab\x1ew\x1e\xc6\x1e) \xa3"0%\xf5%}"\x86\x1a\x95\x0f\xd8\x04\xaf\xfc\x16\xf8\xcf\xf7\xde\xf8\x00\xf8]\xf3\x17\xecf\xe5\xd9\xe0}\xde"\xde\xab\xde\xa8\xe0H\xe3\xc8\xe6E\xeb\x82\xf0\xfc\xf5\x07\xfa\xbc\xfb>\xfc"\xfd\xdb\x00\x17\x07\x83\x0eC\x15+\x19p\x1a?\x19\x14\x18\xb8\x16\xe1\x15=\x16\xb5\x16\x02\x16i\x13\x01\x10?\r\x93\n\x16\x07z\x02\xd6\xfcL\xf6\xd2\xef?\xeb1\xe9\xf0\xe8;\xe8\xc9\xe5&\xe2\xac\xdd\x19\xda\x92\xd8p\xd9*\xdci\xdf\x88\xe2$\xe5\xf8\xe6#\xe8\xcb\xe9;\xed$\xf3\xd0\xf9F\x00\n\x05\x98\x07\xaa\t\x99\x0cP\x11\x93\x16\\\x1a\xd9\x1a\xf9\x19\xdf\x19(\x1e\xb7\'\x9c4\x16@\xa7E\x8fD\n?\xb78e4-4w7\x86;\x9e<\x109\xd31\xb2(\xbd\x1f\xd6\x17+\x10\x0b\x07\xf6\xfb3\xf1!\xe9\x02\xe4X\xe1\xb2\xdf\xb3\xdd4\xd9\xb5\xd1\xa9\xc9\xf5\xc34\xc2\x9d\xc4\x18\xca\xef\xd0\x9e\xd6\xa3\xda\x97\xde\x03\xe4=\xeb\x1f\xf2\xa7\xf7g\xfb\\\xfeq\x01\xa5\x05,\x0b&\x11\x0f\x16\xbf\x18\x86\x18\x88\x15\xd5\x10k\x0c.\t\x0e\x07\x04\x05M\x02*\xff\xe5\xfb\xfe\xf8_\xf6\xca\xf3\t\xf1&\xeem\xeb\xfd\xe8\x0f\xe7\xa2\xe6`\xe8\xbc\xeb\xed\xee\xcb\xf0\xe8\xf1J\xf3\xbd\xf5\xf4\xf8\xee\xfc\xf7\x00\x82\x04D\x07\xc1\t\x95\x0cZ\x0f\xed\x11\x8d\x14\x9a\x16&\x17\xff\x15\xa1\x14B\x14W\x14\xad\x13=\x12\xdd\x10\xed\x0e\xdf\x0b\xe5\x07\x91\x03\x9b\xff\xb6\xfb\xdb\xf7h\xf4%\xf1\xe9\xed6\xeb\x12\xe9_\xe7\xf2\xe5\xf1\xe4\xd8\xe4\x13\xe5l\xe4\xc2\xe2\xac\xe2]\xe5\xc0\xe8Q\xeb\t\xed\xe6\xef\\\xf3\xbb\xf6\\\xf9A\xfc\x1f\x00q\x03Y\x05&\x06\xdd\x07/\x0b\xb5\x0f\xf5\x12G\x14X\x14\xd9\x14\'\x17\xe4\x1b\xf0!\n(\xce,\xa6.\xc1-"+\x1d(\x11&\'%\xff$\x05%\xb3#\x7f \xcc\x1b\\\x16\x89\x11Y\r\xd8\x08\xc1\x03/\xfeo\xf9?\xf6\x04\xf4W\xf2N\xf1\x93\xf0\xdf\xee\xa6\xeb\xff\xe7X\xe5\x9a\xe4\x84\xe5\x1f\xe8\xa8\xeb\xd4\xee\x7f\xf0\x0f\xf1\xcf\xf1\xc5\xf3f\xf6\x15\xf9`\xfb\xe7\xfc\\\xfd\xe8\xfc\x14\xfcO\xfb\x13\xfb0\xfb\x95\xfbM\xfb\xd5\xf9y\xf7\x02\xf5\xce\xf2\n\xf1e\xf0\x9b\xf0V\xf1\xd0\xf1!\xf2N\xf2\xb5\xf2f\xf3\xac\xf4\\\xf6\x06\xf8\x8a\xf9\xda\xfa\x1d\xfcz\xfd\x15\xff,\x01}\x03^\x05]\x06\xf5\x06b\x07\x17\x08\x90\x08J\tq\n\xac\x0bV\x0c\x1d\x0c\xa6\x0b\x19\x0b2\n_\t\x08\t!\t\xd5\x08z\x07\t\x06\xbf\x04\xa1\x03i\x02Q\x01C\x00\x17\xff\xca\xfd\x91\xfcK\xfbO\xf9\xa2\xf65\xf4\xad\xf2q\xf1\'\xf0}\xefg\xf0\xaf\xf1\xc2\xf1Z\xf0\xc3\xee[\xedO\xec\xb8\xecD\xee\xb8\xf0\xfb\xf3\x8a\xf7>\xfa\xa0\xfbq\xfc\x01\xfe\xc5\x00\x18\x04\xb0\x06c\x08}\n\xd9\r\xfd\x11{\x16\xf9\x1a\xd9\x1e\xf5 \x11!\x0c \x8f\x1e\x1d\x1d\xa7\x1b\x93\x1a;\x1a^\x19\xeb\x16!\x13n\x0f\x0f\r4\x0b\x86\t\x1f\x08\x05\x07\x86\x05\n\x03/\x00\x83\xfd\x88\xfb8\xfa\x0c\xfa\x15\xfbu\xfc\xe6\xfc\xc5\xfb\xc1\xf9\xe9\xf7\xde\xf6\xd9\xf6\xde\xf7\xf1\xf9\xd2\xfc\xa5\xff3\x01\xd1\x00\xdb\xfe\xfd\xfb5\xf9T\xf7\x0c\xf7\t\xf8:\xf9\xa9\xf9\xcc\xf8\xc8\xf6\xf8\xf3\x03\xf1\x17\xef\xc3\xee\x1d\xf0\x0f\xf2\x10\xf4\xb1\xf5\x88\xf6R\xf6C\xf5V\xf4\x0b\xf4\xa4\xf44\xf6@\xf8/\xfa\x93\xfb\x1b\xfcO\xfc\xc3\xfc\x85\xfd\x08\xff\xf8\x00\xe3\x02\x14\x04$\x04\xfb\x026\x01\xe7\xff\xd8\xff\xf0\x00Q\x02\x07\x03\xb3\x02\x8e\x01\xe0\xffl\xfe\xc4\xfd-\xfe\x94\xffY\x01\x11\x03\xb0\x045\x05|\x04\xee\x021\x01@\x00U\x00\r\x01g\x02h\x03-\x03*\x02\x03\x01/\x00s\xff\xc8\xfe\x8f\xfe\xf5\xfe\x88\xff]\x00\xdf\x01!\x04\x06\x07\xe3\t\xa6\x0c\xef\x0ev\x10a\x117\x12\xfe\x120\x13\xd9\x12U\x12F\x12\xa8\x12\x13\x13\xa6\x12\xf0\x10$\x0e\xa4\n\xe2\x06\xf0\x02\xfe\xfem\xfbM\xf8\xb4\xf5\x8d\xf3\xd4\xf0\xc3\xed$\xebN\xe9U\xe8\x02\xe8\x17\xe8\xf8\xe8_\xea^\xec\x80\xee\xb4\xf0\x00\xf3\xda\xf5\x94\xf9\x7f\xfd\x0f\x01\x9d\x030\x05w\x06\x0f\x08\x1e\nv\x0c\xd8\x0ea\x11\xe7\x13\xd3\x15,\x16\x9c\x14,\x11\x9f\x0c\xe8\x07\xb9\x03|\x00>\xfe\xf3\xfc%\xfc\x08\xfb\x18\xf9\xf0\xf5\xe9\xf1\x01\xee\xf7\xeaF\xe91\xe9\x8a\xea\xcd\xecx\xef\x84\xf1\x02\xf3-\xf4\x18\xf57\xf6\x96\xf7c\xf9\xa5\xfb\xe1\xfd\xf4\xffh\x01K\x02\xcc\x02%\x03\x91\x03\xfb\x03\xfb\x03:\x03\xa5\x01a\xff\xfe\xfc\xc0\xfa\x13\xf9_\xf8q\xf8\xfa\xf81\xf9\xaf\xf8,\xf7)\xf5\x10\xf3}\xf1\'\xf1k\xf2\n\xf5Q\xf8v\xfb\xca\xfd\xf4\xfeX\xff-\x00\xcf\x02\x0c\x08\x9d\x0f\xb4\x18,"\xe4*\xab1t5\xe45f4\x802T1\x061\x041\xc80S/\x15,\xb3&\x97\x1fe\x17\xcc\x0e\xbd\x06\x83\xff\xf0\xf8\xbf\xf2\xee\xec\xf3\xe7\xe6\xe3\xe1\xe0\x7f\xdeP\xdc\x06\xda!\xd8\xcd\xd6T\xd6\xbc\xd6K\xd8\xaa\xdb\xc9\xe0\xe3\xe6\xe6\xec\xec\xf1\xa8\xf5g\xf8\x8c\xfaB\xfc\xfe\xfd\x12\x00\xd3\x02W\x06\xcd\t\x82\x0c\xbf\r\x80\r\xb6\x0b\xd3\x08G\x05\xa3\x01\xc9\xfe\x08\xfdA\xfc\xee\xfbA\xfb\x9d\xf9\xd8\xf6P\xf3\x83\xefl\xec\xe0\xea\x1d\xeb$\xed\xb5\xef\xa2\xf1"\xf2\xc9\xf1\xa8\xf1%\xf3\xb8\xf6\xc5\xfb\x01\x01"\x05\x98\x07q\x08\xb9\x08B\t2\x0bG\x0e\xc5\x11\xd9\x14$\x16s\x15\xf7\x12?\x0f\xbb\x0b:\t\x92\x07\x92\x06-\x05@\x03\x04\x01\xfd\xfd@\xfaA\xf6?\xf2\xfa\xee\x81\xec\x00\xeb\xfe\xea\xf4\xeb:\xed\xbf\xed\xfe\xecG\xebs\xe9@\xe9\xcb\xeb0\xf1.\xf7o\xfb\x1f\xfd&\xfc\xd1\xfan\xfai\xfc\xc9\x00\xe7\x05p\n\xbf\x0c\xc0\x0c\xac\x0bK\x0c1\x12Q\x1e7.\x91\xf5Q\xf0+\xec\x14\xe9\xd6\xe6c\xe6\x95\xe8\xd0\xec\t\xf2\x91\xf64\xfa\xa8\xfbj\xfaM\xf7\xb3\xf4\xc5\xf4o\xf8\xaa\xfd"\x02b\x02\x16\xfe\xb3\xf6P\xee\xf8\xe7\xc9\xe4w\xe5\x88\xe8%\xebc\xeb(\xe9\xf9\xe4\x9a\xe0\x10\xde\xda\xdf\xf9\xe5\xf7\xed\xdd\xf4\x05\xf9\xef\xfa\x82\xfbT\xfd\x18\x04\x07\x15\xb8/\x89M\xafc\xa2j\xbacrW\xf1P\x05U2a2o*xLu\xbdd\xddH%)\x84\x0e`\xfdQ\xf4\xc9\xef\x9d\xebp\xe4,\xd9\x19\xcaJ\xb9*\xaaB\xa0\x19\x9d\xb6\xa1\x19\xad\xd2\xbd\xb4\xd07\xdfX\xe5Z\xe4\xbe\xe1Q\xe6\xc8\xf4\xe7\x0b\x06&\xe9:5E\xcaB:7\r)\xe3\x1f@\x1ew"\xea%|"\x0f\x17\x1d\x06#\xf4\xe1\xe2\xe4\xd3\xd9\xc80\xc3a\xc1_\xc06\xbf\xb2\xbe\x7f\xbf\xdd\xc0r\xc1\xa6\xc1\xda\xc5\xbe\xd0\x91\xe2\xe0\xf5\xb6\x04-\x0e4\x12G\x14a\x18,\x1f\x88)@4\xda:\xac:\xa23\xc5*Q#\xa3\x1e}\x1bU\x17 \x12\xd7\n\x9d\x01\x8b\xf9\xe6\xf2+\xefr\xed\xa3\xeb\x14\xe9\xa9\xe6\xb1\xe6\xb6\xea\x8e\xf0K\xf5\x8c\xf8d\xfa\x8b\xfb\xc5\xfc\\\xfeg\x02<\x066\x08\xbe\x06i\x02k\xfd\xa5\xf9\x9e\xf7\xb7\xf5\xcf\xf2\x7f\xee\xac\xe8?\xe3\xb3\xdf\xe6\xdd\x88\xde\x92\xdf\xe0\xdf\x18\xe0\xf8\xdf\x08\xe0\xec\xe2\x08\xe7\xb8\xed\xe2\xf4J\xf9\x8d\xfdT\x01\xaa\x06\xa5\x0bu\x0f\xae\x14\xa1\x1b\x89%\xb04JJ\xa8`\x9cg\x15\\\x01I\x8a>}D1Q\r^pd_\\\x8eDP#@\x07V\xfa\x9c\xf8\x9b\xfa\xb5\xf8?\xef[\xe1\xbc\xd2*\xc6z\xbc\xbe\xb8o\xba\x94\xc0\x85\xc6\xc6\xcc\x85\xd6,\xe2\xbd\xe9\xe3\xe9\xee\xe8\x00\xeeM\xfc\x1a\x0e\x98\x1d\x81(L.7-\x85%\xb2\x1c|\x19>\x1d\xe5 \x97\x1f\xf5\x16W\n\xfd\xfb\x13\xee>\xe3\x7f\xdb\x81\xd5w\xcf\x9c\xc8\xc4\xc3\x08\xc2\xd5\xc2\xa3\xc3<\xc3\xaf\xc3\x16\xc5\xb7\xcaa\xd6\x01\xe6\x99\xf3\x12\xfb\xe9\xfe\xf3\x01\xed\x06\xe4\x0f\x01\x1cM(\xdc.\x1c/\n,\xe0(\xb8&#&\x00&\xb2#\xc1\x1e\xdc\x184\x13\x9e\r\x04\x07q\xff\xa5\xf9]\xf4\x15\xf2`\xf2\x04\xf4\x07\xf4&\xf1\x85\xec\xcf\xeau\xed\x87\xf1\xc3\xf65\xfa\xea\xfb2\xfc\x99\xfa\xd2\xf9L\xfa\xf0\xfa\x1d\xfd\xf1\xfd\x11\xfeP\xfc\xd5\xf8\xb3\xf5\xff\xf0O\xed\xd6\xea3\xeb\xd0\xec\xc1\xedg\xec8\xe9\xba\xe6\xeb\xe4\xe2\xe5\xbc\xe9 \xed\x8f\xf3\x95\xf7\xe6\xf8\\\xf9Q\xf4\x8c\xf3\xd2\xf7\xc3\xff\xf6\x0b.\x12\x9d\x11\x9e\x0c\x9f\x0e\xeb \xc9=\xaeP\x9bM:A\xf79\x89=IH\xfaS\xd6_5c\xd8RK:I(b#<&:!\xb2\x15c\x08e\xfd}\xf4\xa8\xeb\xc9\xe08\xd7\x17\xd1\x89\xccW\xccO\xcf"\xd5I\xd9_\xd4\xe6\xcb\xa4\xcaX\xd6>\xe9J\xf7S\xfd\xab\xfe8\xfe\x83\xfd\x89\x015\x0br\x17`\x1e(\x1d\r\x17)\x10$\x0b\x95\x08"\x07\xe2\x03\xb7\xfd\x16\xf5\xcd\xed\x16\xe7\x1c\xe1\x03\xd9\x94\xd1\x95\xcc\x86\xcb\xfc\xcc\x9a\xcf1\xd2\xfd\xd2\xc2\xd0\xac\xcf\x91\xd5\x89\xe0,\xef\xa7\xfa}\xff\x10\x00\x8c\x02\xad\n\xa0\x16s\x1fp& +\x9c)\x06&{%d)\\,G)\xec"\x99\x1d\xff\x18s\x15N\x11F\x0c\xaa\x05\x04\x00U\xfb\x0f\xf9\x90\xf7\x84\xf5;\xf3\xd5\xef\x00\xee-\xee\x08\xef\xb0\xefr\xefL\xf0\x98\xf1\xfe\xf2\x90\xf3\xba\xf33\xf5H\xf5\x00\xf5\x17\xf4)\xf4\xb0\xf5\x02\xf5V\xf2\x01\xef\xbc\xed\xa9\xed\x13\xee&\xedA\xea\x9a\xe7_\xe4\xb5\xe5|\xea\x9f\xed\x12\xf0\x00\xed\xbe\xeb\x0e\xecp\xed\xea\xf0\xfa\xf1t\xf3\x97\xf3\xd7\xf9\xc8\n\x13!\xcc0\xda+\x08!<"z4\xf4L\\]\xc3f\x04g6[kJ\x08D[MaV\xa5R\'Ce1i"4\x13\x1c\x07/\xffG\xf8\x0c\xf0D\xe4\x91\xd9X\xd1c\xca\x97\xc2\xd2\xbb\xb2\xba_\xc0\xff\xc9\xf8\xcf\x92\xd0j\xcf\xe1\xd1z\xda+\xe7S\xf4\x1e\x01?\n\xf1\rF\x0e\xf9\x0e\xf2\x13=\x1c\x05"\x06#\xd1\x1f\x9c\x191\x12)\t6\x03\xc5\xfe\xf3\xfa>\xf5\x0f\xed-\xe2\xaa\xd6\xe3\xcdD\xca\xf9\xca\xf0\xcc\x7f\xcf\x99\xce\xe7\xc9\xf6\xc6\x9d\xcaD\xd5\x10\xe1\xaf\xeb\xc8\xf5N\xfb*\xff\xa3\x02p\x0b\xdd\x15\x03\x1f\xa6\'X-\x840\x11/a-I-\xba,\xd3,(,$*^&Z\x1e\xf1\x15\xc8\r\xa7\x08\xb4\x04.\x01h\xfdU\xf8=\xf2Y\xebl\xe7I\xe6\x15\xe7\xd7\xe7\xdc\xe6\x00\xe7\x8a\xe6+\xe5K\xe5\xf0\xe5\xc6\xe9?\xed\x10\xee&\xee\xc4\xee\x85\xf0R\xf1\xa1\xf0\xbf\xf0$\xf2M\xf3Q\xf3/\xf3\x85\xf3b\xf2Y\xef\xca\xef3\xf1\x1a\xf3\xa5\xf2\xe6\xec\xc8\xeb\xaa\xec0\xf2l\xf6z\xf7]\xfb\xc2\x02\xfa\x0b\x84\x12\\\x1b,+\x84<_DR@^=\x87D\xd6P&[sa;c\x8c]!O)>]6\x076\xd85\xfc/\xac"\x8c\x13\x03\x03\xa7\xf2\x05\xe6\xfd\xdel\xdb\x0f\xd7\xca\xd0\xef\xc8\xfd\xc2\xd1\xbe\x03\xbb\xb5\xba\xd2\xbe\xd5\xc7\x98\xd3\xfe\xd9\xc9\xdc\x1c\xde\x0f\xe2W\xeb\x1f\xf7`\x05T\x11y\x18\xfa\x17\xa9\x14\xd1\x13\xd4\x16\xaf\x1dZ"\xc3"\x9d\x1d\xe3\x12\xb0\x07c\xfe\xeb\xf8\xd2\xf6\xc6\xf4u\xef\xc5\xe6\xa8\xddy\xd4\x19\xcd\xa8\xc8(\xc9(\xce\xae\xd1\xef\xd3\xb6\xd5z\xd6!\xd7\x06\xda\xf9\xe3\r\xf3=\x02~\t\t\n\xdd\x08\xea\x0c\'\x18\xd6#K,\xd4/\x8c.\xca*\x81\'\xc9\'a+\xc5+\x0b*d%\xb0\x1fO\x17\xc1\x0e\x0f\x08\xa4\x04\xb9\x02\xe4\xffV\xfbs\xf3\xbc\xea\xbe\xe2\xe1\xdf\xbd\xe0\x0e\xe4.\xe6\xea\xe4\x89\xe0\xd5\xdb\x0b\xdbn\xdf\xda\xe5s\xeb\x9b\xed\x12\xed\xb2\xeb\\\xeau\xed\r\xf1\x0b\xf7\x8a\xfbY\xfc\xbb\xfay\xf8\r\xf6M\xf6\x1c\xfa\x81\xfe=\x04\xfc\xff\xd4\xf7\x0e\xf2e\xf1\x7f\xf8u\xfd\xe5\xfe\x02\xfc\x0b\xfdC\t/\x1a\x81#\xc9\x1d\xa7\x18\xc8\x1f\x1d/\xd4A:M\xbcQVM[A\xe8<\xffB!M6P2H[9\xdc,\x86#x\x1c\xcc\x17q\x10\\\x07\xfc\xfa8\xed\xf1\xe2\xa2\xdb\xa2\xd6\\\xd1\xe5\xca\xa0\xc63\xc5/\xc6!\xc5\x95\xc3\xf3\xc4\xe5\xca\x93\xd4\x7f\xdc\x03\xe3\xa7\xe7@\xeb\xda\xef\xd2\xf7\xde\x02u\r\x04\x13M\x13%\x12\x15\x12\x10\x15_\x18b\x19p\x17K\x13U\r\x11\x07\xb1\x004\xfdE\xfa^\xf5\xdd\xee\xc2\xe8\xa7\xe3\xad\xdd\xb0\xd9j\xd8}\xd9\xe6\xd9\xd9\xd8\x1b\xd9&\xdb\xec\xdc\xfb\xe0L\xe6\x95\xeeA\xf6s\xfa#\xfeM\x02\x98\x08\xa1\x0f\xe3\x15K\x1b|\x1f\xd0!/#\x9e#\x92$\xb7%\xf7%\x1e%\xcb"\x80\x1f{\x1b\xcf\x16Q\x13A\x0f*\x0b&\x05\x95\xfe]\xfa\xdd\xf5;\xf24\xedY\xe9\xb2\xe6G\xe4\xe5\xe3$\xe3=\xe3N\xe1\xb9\xe1\x83\xe3\x07\xe6\xcc\xe8J\xe9o\xea\xb0\xeb\xd7\xee\xfd\xf2?\xf6F\xf7\x8c\xf8\xf3\xf9\xe3\xfa\xed\xfc\xa6\xffj\x018\x01\xa7\xfe\xde\xff\x11\x01c\x01\x8d\x01_\x01\xcc\x02l\xffT\xff\x05\x01\xee\x08[\x14\x92\x1b\xc2\x1d \x17\x8f\x14Y\x1bs*\xf98\x17=\xf8:\x813I/H0p67>!=\xee5\xf3)| \xd6\x1b\xd9\x19\xe5\x19\x97\x14\xd1\x0b\xa6\x00a\xf6@\xef\xd5\xe9n\xe8\'\xe7\xa3\xe4B\xdfQ\xd9.\xd5\xe4\xd3\xbc\xd5\xdf\xd9\x1a\xdf\xac\xe2U\xe3\x9c\xe2\x98\xe2\x13\xe6G\xec\x00\xf4\x1e\xfaK\xfc\x0e\xfc%\xfa\x15\xfa9\xfc\x16\x01\xaa\x05[\x06J\x02\x8d\xfd\xeb\xfa$\xfa\xd7\xfa1\xfb\xb5\xfa\x8d\xf6\x9a\xf1U\xf0\x7f\xf0\xd6\xef\xeb\xec\x88\xebt\xeeM\xf0!\xf1\x9f\xef@\xee$\xee\x1a\xf1c\xf7\xee\xfc\xbe\xfe\x0e\xfd\r\xfc\xd6\xfdQ\x02\xc9\x08^\x0e\x8f\x10\xa6\x0e\x1c\x0c\xe4\x0ct\x10\xbb\x14\xb1\x16\xed\x16\x04\x15\xa1\x11\xc3\x0e\xb4\r\x19\x0e\x05\x0e\xe9\x0b\xd0\x08`\x04\x1c\x00\xe3\xfc\xd6\xfa\x86\xf9\xa3\xf7\xdb\xf5\x85\xf3\xd6\xef(\xedE\xecl\xec\xe2\xed\'\xed\x12\xed\xf0\xeb\xf6\xea\x17\xeb|\xebN\xed$\xef\xae\xf0c\xf2\xe3\xf2b\xf2\xd2\xf1f\xf1b\xf6\xe9\xfb\x82\xff\x91\xfe\xaa\xf9\xc4\xf9\xe5\xfc\x0b\x04\xaf\x08\xff\x06\xd6\x02/\x01e\x07 \x10.\x15\xfd\x13\xc2\x117\x13y\x1b\xb0%\xcf*N)\xda%\x90\'N-[3\x8b675\x851\x0c-\x9c*\xa7*\x0e*\r([#\xbb\x1c\xaa\x15\x01\x10\xb2\r\xd9\n\xee\x05\x08\xffr\xf8+\xf4\xe5\xef\x0c\xed\xe2\xe9\x0b\xe7T\xe3\xdc\xdf\x15\xdf\x8a\xdf\xa2\xe0\x94\xdf\xd4\xde&\xdf\xf5\xe0\xbf\xe3\x81\xe6\xe6\xe8\xdd\xe9\xd0\xe9\x01\xea\xcd\xeb2\xefj\xf2\x02\xf4c\xf3\x0b\xf2\xe3\xf1\x0f\xf2V\xf3\xa1\xf4\xb0\xf4\x0e\xf4\xe5\xf2\xaa\xf2\xdf\xf2\x0f\xf2S\xf1\r\xf2\xab\xf3+\xf51\xf5P\xf5\x0e\xf6\xd2\xf6\xfd\xf8\t\xfb\x90\xfd\x9b\xff\xdb\x00i\x02*\x04\x8e\x05\x00\x08,\nl\x0c\'\x0e\xc0\x0e\x82\x0f\xf6\x0f\x8b\x10\xe5\x10.\x12\x86\x12m\x12Q\x11u\x0fU\x0eU\ra\x0c\x93\x0b\xe1\t\x82\x06\x88\x04h\x03X\x02\xf7\x00\xa0\xfe\xb8\xfb\xa9\xf9\x9f\xf8\xe5\xf8\x8f\xf7\xd4\xf5\xd6\xf5\xc6\xf5W\xf5\xf2\xf3B\xf0\t\xef\xc7\xf1u\xf5\xfc\xf6\x06\xf57\xf2\x00\xf18\xf1T\xf3\xf7\xf5R\xf6c\xf4\x08\xf4\x1c\xf8\x92\xfd\xb6\xfe\xe4\xf91\xf7\xb4\xf7\xba\xfa#\x00K\x04#\x06S\x05\xf6\x05\xd6\x06x\x07E\x07 \x06\xb7\n\x1b\x14\xbc\x17\xa0\x132\x0b~\t\x83\x11\xc7\x1a3 s\x1b\x08\x11\xc5\x0bx\x10\xbc\x1eO*\r(\xf7\x1b\xa0\x0f\x96\r,\x16\xf3\x1f\x16%\x8d"9\x1a\xb5\x11\x0f\x0e\x8a\x0f\xc8\x11\xdc\x12\x02\x11\x88\x0bK\x06d\x01y\xff_\xff\xcc\xfd\x0e\xfa\xc9\xf5\n\xf3 \xf1\xbf\xee\xef\xea\x92\xe8X\xe7\xcc\xe6\xdb\xe60\xe4\xb8\xe0\xab\xde\xc9\xde\xe1\xe0\x90\xe2c\xe3Z\xe3\x8b\xe2\x88\xe0\x95\xdf\xcf\xe2O\xe8-\xec\xff\xec\x96\xed\x12\xed\\\xee8\xefE\xf0\x05\xf6]\xfc\xa2\xff\xe8\xfe\x8e\xfc\x92\xfd\xe4\x00\x90\x013\x03\xb9\x07\xec\x0bJ\x0c*\nS\x07\xcc\x06\xfa\nm\x0f\xef\x11\xf2\x0e\xc6\n_\n\xa0\x0b\xa4\x0eH\x0f\x82\r\xf7\x07\xb2\x08\xf9\n\x1b\x0b\xeb\x06&\x04\x01\x05Q\x05\xc9\x03\xde\x02h\x01\xbb\x00@\x01_\xfe<\xfb\x89\xf5\xb0\xf9H\x00\xab\x01\x11\xf9\x16\xf2u\xf1 \xf3\x0e\xfb\xf5\xfe\xa5\xfbD\xf0N\xe78\xf0\x7f\xfa\x19\xfc|\xf9\xce\xf8\x9a\xf3^\xec&\xf1R\xfaU\xf5\x83\xf8\xc3\x01\xbd\xfe\xe1\xf6\n\xef\xc8\xf5\xa9\xfe\xdf\x08\xad\t8\x01B\xf7\xf7\xf9\x0e\x0bC\x0e\xd0\x05\x82\x01f\x03\x1c\x0f\xa8\x1e\x9f\x13s\xfc\x1a\xf7\xe6\x05\x9e\x1c\\#I\x1d1\x0eC\xfa\x06\x00y\r\x7f\x12\xc9\x15\x98\x1aW\x11y\x0b\xeb\n\xb5\x01R\x03{\t\r\x16y\x19\x18\x0f\x1c\x08>\x00\x08\x01\xe2\n\x1a\n\xa3\nr\rC\r\xa4\x07\x11\xfd\x0b\xf9\xc5\xff9\x01a\x05\xea\x06\x89\xfb\x95\xf8\xf5\xf6\x9d\xf4L\xf1\x13\xef\xc1\xf1N\xf5d\xfb\xb0\xf3\xb1\xea\xe8\xddJ\xe3\xd5\xf1|\xf5F\xf8\x9a\xef\xaf\xe7\xfa\xe33\xe8;\xf3\xf1\xf8&\xfe\xf7\x00\xad\xf2\xd1\xe9X\xed:\xfbE\t&\n\xc4\x06\x8f\xfel\xfd\xa3\x00\xc4\xfd?\n\x8b\x15\xcb\r\x8b\x06\xe2\x04=\x0f\x15\x11_\x01g\x02\xe0\x0e/\x14\x8d\x17\x10\x10B\xfe\xd7\xf7\x8e\x04\x1b\x15&\x10\x07\nW\x06m\xff<\xfc9\xff\xfd\x03\xe2\x02\xb1\xfcA\xfb\x0c\xf6u\xfa\xe9\x03\xd4\xfd\x12\xef8\xe5#\xec\x82\xf7\xa9\x02c\xf9\x8c\xf0Z\xe4U\xe1\x12\xfb\xeb\x06\x17\xfdH\xf2q\xe8\x8f\xe1\xef\xf2\xda\x05m\x08\xa2\x07\xa2\x06\x12\xef\x10\xdd@\xf3*\x0f\xce\x0f\x98\x0b\xdb\x02\xd6\xf6z\xf6\x85\xfe\xe5\x10F\x0ek\x04"\x02\x10\x05-\x04r\x03\xe6\r\x0f\x13}\x07@\xfa\xeb\xf93\t7\x16\xde\x15\xf1\x07\xf6\xfeU\xf7\x1f\xf4[\x13\xb3\x1c\xae\x15\xa7\x00G\xf1\xa8\xf8\xe1\x04\xe5\x183\x19L\t\xeb\xfa\xaf\xf4\x9f\x01`\x07%\n\x99\x0fO\t\xbb\x082\x042\xf4Z\xf3y\x082\x12Q\x0b\xf7\xfa\xd2\xf6\x87\xfb&\xfc\xe8\x05"\x07\xb3\xec\xae\xf2M\x03z\xf8\xf3\xfa\r\xf5\xc7\xf1m\xf8m\xf9\x9d\xf3\x7f\xedR\xeeq\xf0w\x02\x92\xfd\xec\xf0l\xeb,\xef[\x00\x8e\xf9\x1f\xf4~\xf8\xe0\xf1)\xfc$\x10n\x0b\x90\xf3v\xef\'\xfeS\t\xb6\x11\xc9\x06\x8e\xfc\xf9\x072\x11\xb0\x18H\x04\x07\xf3\x80\x00\x8e\x12\xc9#\x92\x14\xa3\xf3\x87\xfb\xd3\x14\x83\x0cp\x0c\xe7\t\xa3\xfc\xbd\x00\xb5\x05\xaf\x0e\xad\x03\x17\xfbJ\xf8\xb7\x08;\x0f\xd9\x005\xe8\xe0\xe1\xf7\x04\xa7\x12\xe4\xff\xfe\xfaW\xe8\'\xe5\xc9\x03\xbb\xf9/\xf6\x8b\xf5\xa2\xf2\xcb\xf3\xaf\xf7\x0b\xfez\xf9\xa1\xf5\x0f\xf6\x10\xf0|\xe2J\xff\x1b\x17h\x16\xbc\xf5\xc5\xd0\xa8\xdc\xde\x07\xf4$\x15\x12\x8b\t%\xf4?\xdb\xd1\xe6\xa6\x06\xd7/\xe4#\xa3\xff\x89\xdb\xf8\xd36\x11\xae;\n W\xf0\x18\xdb\x16\xf3k\x1dr*\x93\x18E\xfb\xff\xe1L\xeb\x7f\x1a\xd7&P\x12\t\x02\x87\xf2\x05\xfb)\x05\\\r\x9c\x1b3\x06\x0c\xf2K\xf9\xec\xfcd\x15\xe3\x12n\x01\x06\xfc\xc3\xf1c\xfb\x12\x03N\x07\x1b\x06\x1b\x04\xdf\x05\xf1\xf8\x8b\xf5\x99\xe1\xef\xe9\'%\xd8\'\xaf\xf6\xc7\xe0\x93\xe3\xcf\xef\xdc\x07\x9e\x04}\x01}\x04\x04\xebA\xf6_\xf8\xc3\xef3\xf0\x00\xfe\x8c\t\x10\x046\xf2}\xe1\xe4\xef\xb1\x08\xa0\x07\xf7\xfc \xfb\xe4\xee@\x06h\nc\x03\'\xf0\xd6\xf1p\x0f>\x11y\x10\xd9\x08_\x00c\xfd\xcb\x02\xe4\x07H\x07g\x08\x93!\xd7\x1d\xcd\x02L\xeb\x16\xedm\n\xfa\x1eb&\xe1\r\xc0\xfbO\xf5u\xf1k\xfa\xce\x08\xbf\x16\x1d\tx\x02\xbb\x03\xf5\x00\xc7\xf6\x0b\xe7\x7f\xec\xa2\xfaC\x08\x16\x19d\x0b\xc4\xf5\xe7\xe7\x93\xd1\xf6\xe9 \x0b\xbb\tG\x05\xfe\xf3\t\xfb\x10\xfd\x9b\xf5z\xe4x\xda\xd6\xfc4\x0f@\x1a\xb9\x07\xd7\xe4$\xe3/\xed\x0e\x08\xab\x061\xf6\x08\x07\xe4\x0cI\xf9\x1b\xf3\xca\xf9\x17\xfb\xf5\t\x0b\x0c\xb2\n\x05\x104\xfd4\xf8\xab\xfb\xf4\xfb\xac\x0c>\x19S\rv\x17\r\x0b\xae\xf2\xbb\xff<\xf8\x0b\x05\xe1\x16\xf7\x1d\xdb\x18\x9b\x03\x8f\xfc\xa5\xf29\xf4\xde\x0b\x96\x12\x02\x13\xdd\rr\x08\xfb\xf8\xf4\xe69\xfe\xd1\x05\x04\x00\xda\x0f#\t\xa6\xf4\xb5\xf2\xc3\xf8]\x04\x81\xee\xd5\xf2i\r\xb8\xf8\xff\xf9\xb8\x00J\nF\xdd\xcb\xcf\x10\x00X\x17v\x1b\x0b\xf0\x9c\xe5\x96\xe1Q\xea\x15\xfe\'\x0f\xc8\x16\xc1\x05o\xd5w\xd3\xb0\xfa\xb6\x0e\xc5\x1c\xe2\x11\x12\xf3\x1a\xd9\xa4\xee\x0b\x02\x14\x0c\x99\x1c5\x10\xef\xe8\xf8\xdb;\t\x0f$v\x16\xb6\xfb*\xf0\xfc\xf8\xe0\t\x19\x1c\xfb\x11~\xf6v\xfd2\x0b\xff\t\xf5\x0f\x03\xffW\xff\'\x0e\xdb\x10\xd4\x02\x00\xf6\x99\xfe\'\x08K\x0f-\x06\x89\x10\x81\x05\xa9\xe7\x86\xf9\xe4\xfee\xfe;\x17|\x11|\x03!\xee\xbb\xd7g\xf6\xd7\x154\x15*\x01?\xe8-\xec%\xfa\x97\x05\xfa\xf5\xaf\xf4\x95\xf9Z\xff\x9b\xf6$\xf7\xa5\x03}\xf0\x1e\xf8G\xf4\xe7\xefH\xfan\x0e?\nq\xf2f\xf4\x15\xe0K\xf1[$g\n8\xf7s\xffi\xef\x01\xf3\x94\nk\x12\x00\x00W\xfb\xae\xfcp\x03q\x11\xfc\x0c"\xfb\xa9\xf0}\xf04\x14\xa7)Q\x1c\x1d\xf4K\xebQ\xee\xcb\xefw"\x83@\xcf\x16^\xddF\xed\x07\xff>\x04\xb3\x16\x12\x1dV\x02\xb7\xea\xfb\x0b!\x10{\xfaH\t1\x06\xc0\xdc\x7f\xf5s \xc2!\xd8\xf9\xc8\xe7\x03\xf2\xc5\xe2\xcb\n@\x07\xea\xf8J\x10\x0e\x00\xb1\xf54\xd0\x02\xe4\x81\x0c9\x18R\x13\xc7\xebN\xdf\xf2\xe07\xf6\xb1\x079\x1b\xe8\xff\x98\xf1O\xf4\xe9\xe3a\xeeB\x0b\xa5\x130\x12\xb3\xfc\xf9\xda\xbf\xf3\xd2\x03\xf9\r\xe3\x0e=\x14\x1e\xf5t\xec~\xef\xd6\xff\xbd5T\x19\x90\xfb|\xdff\xd9\x17\r\x18*\xf07\x83\x14P\xd8\xbc\xc8\xcc\xef\xaf/\x1d"\x85\x12\xf1\x17\xd6\xe8l\xe8\r\xe8\xcd\xfd\xf80[\x19\xad\x081\xf4]\xdf\xb0\xf2g\x0c1!d\x0b\n\xed\xa6\xf1\xaf\xfb\xd7\xff\xbc\x13t\xf8d\xe2\x90\xfd\xd2\x12V\x04\x96\xe1\x01\xfaN\x17h\xf0\xb3\xe8\xdd\xf9U\xfe\xf9\xfdR\xf1!\x17P%\xcf\xdf\xca\xb7B\xee\xf4\x0f\xcd)\x01-`\xf1\xe9\xd3u\xd98\xf7\x89\t\x93\x10\n\x1b8\x14f\xe6\xcf\xe3d\xed\x06\x04p\'\x14\x13\x1c\xf7H\xdb_\xeb\xf2!\xd6+J\x1e_\xe9\xba\xc6H\xf2\x04\x11\xb8;\xb95\xdb\xffa\xc7\x9c\xd5\xdb\x13\xdb-\xb3#n\xfaa\xf6\xa8\xfc\x0f\x07\x84\xfaV\x00\xea\x15\xde\x02\x8f\xf4\xf8\xffY\xf8l\x0f\xbc\x0f\xb3\xf8\xba\xe6\x9e\xed*&\x14\xfc.\xe4\xcf\x0c\x14\x02\xc8\xf1\xdb\xf5\x84\xe8y\x13e\x0b\x8b\xe5\x89\x11\x05\xf9\xd9\xe9\xbe\xf5\xeb\xf2\xfa\xfd3\x19\xe7\n\xec\xf4x\xec\x00\xf4\xeb\x00a\xf2\xd4\xf3\xb0\x14\xc5\x1e\xff\x07\xb2\xe3V\xdb\xa9\xf1[\x13l*\x97\xf2`\xec@\xf8\x1d\x06\xf97\xe5\x01\x00\xc5\xea\xd4\xe4\t\xc0=\x08A\x1f\xf09\xcb\r\xf0\xbc\xff\xf2\x04\x18\x1f\xd2\r@\x08H\t\xad\xe9\xeb\xf3u\x0b\x1c\x0f;\x01\x98\x0e&\xfb\x80\xffh\xf5\xff\xff\x89!\xe3\xfd\x99\xf8\x04\xf2\x07\xe5B\x0b\x1c!\xf0\x1c\'\xf87\xbf\xd0\xe5\x06\r\xac#\x91\x1c\x10\xee\x07\xecA\xe7\x1f\xec\xa7\x12\xcd\x07\xac\xfa+\x06\xd0\xe2\xa2\xe9f#\xdd\n\xcd\xfaW\x05\xba\xcb\xb5\xdc\xb8 m# \x10\xf4\xfd\'\xe2\x08\xf4E\t\xb3\xf6\x9e\x05\x9d\x05\xc0\x02\x14\nI\x03!\x06\xa3\x00\x03\xe3\xa7\xf8)\x19Z\x16\xca\xf92\xf0\xfe\x02\x9f\tb\x06\xd2\xff.\xf8\xbe\xfb\x91\xfao\x0f\x17\x0eD\x03(\x07M\xea\xd3\x08p\xf0\xd1\xef+%\r\x01\xd4\x00\xbe\x19n\xfec\xe0\x87\xe2\xe3\xff\xa1\x1b\xb2!_\x065\xef \xf0[\xe6\xbf\xf1r\x1b#\x1b\xb8\xff\x80\xfdC\xe2\xa5\xe8\xed\x04H\x12A\x10\xa6\xf6\xb5\xf3t\xf5v\xf4\xec\x05[\x03e\t\xa8\x14\xa2\xd8\x1f\xdd\xbb\x1ct\x0fZ\x00*\x0f#\xea\x19\xe5w\xf8\xc2\x10\xa0!\xbd\x02#\xfa\x16\xed8\xe2\xdc\x03\x84$p\x06f\xff8\x04\'\xf5\x1c\xff\x82\xf1\xd5\xf7^\x14\xaf\x19\xd7\x19j\xe4\xe4\xd7\x18\xfb\x8d#\xd7\x13\x91\t\xc3\xee\x96\xdfI\x0c\xf3\x17z\x10\xa8\xf3y\xe7\xec\xf2\xa3\x1c\\\x02\x85\xfb\x9b\x17\x82\xf7H\xeb\x94\xfa\x90\xff\x00\xf6\xef\xfb\xbc"\xa6!\xe4\xe0f\xe0\x1b\xee\xc5\x03\xf5\x11.\nJ\x01\x10\xfe>\xfa\x85\xfb-\xfc\x00\xe2w\x00\xa8%\x8c\n\xe1\xe2\x84\xf4r\x0bR\n\xb1\x08\xc4\xeac\xd8\xe9\x02\xb2&)\'p\x07\xa6\xc7J\xd0\x83\x18\xc4&\r\x01:\xfe\x0c\xfay\x03\xbd\xfe"\xf6\xc8\x04\x9d\xf2\x85\x00\x0b\x1e\xfb\xf9h\xf4w\x1e3\t\xdf\xd4\x10\xe7\xf5\t\xda\x1ae,\x9c\x04Y\xe0\xce\xd7\xfa\xfdZ3(\x1a\x0b\xe1\xee\xe7\x94\x03A\x12\x0f\x10}\x00\xbe\x04\x90\xe9\xc2\xf4o\x0c|\xf9\xe4\xfb\x8c\x14\xfa\x06\x85\xfe\x01\xef(\xf2\n\xff\xe1\xfd\xdf\x15\x0c\xf9\xd1\xef\xd1\xee\xe1\n\x051\x90\xe3P\xd0\xea\x0c\x9f\x17\x9a\x05$\xf65\xef\xed\xf2E\x12\xe6\x1a\xac\xf3\x9b\xf0\xa4\xf9\x89\x0b\xdc\x08B\xdf\x81\x04%#"\xfb7\xf6@\xfd\xf7\xeel\x07\xd9\x04\xef\t\xc1\x1aL\xe8\xba\xdb2\x04\xb7\x18\x98\x16\x87\xff}\xe3%\xec\x18\t\x16\x1cz\xff4\xf4\xbe\x04\xcb\xf7!\xfaw\x0b\x1e\x02r\xff\x8a\xf7v\xff?\r\x7f\xf7\x01\xfc\xc2\r\xdf\t)\xfc"\xe6\xfb\xee\xc1\x193\x1a\x9c\xffA\xfc\xc9\xebJ\xde\x8d\x16\x8b4^\xfd\xc9\xde\xf9\xe2^\xfdU\x1e\xbe\x13-\x05\x0e\t\x9f\xdf\xc3\xc7\x0f\x0f\xe5/P\x17\xb6\x01\xee\xdb\xdb\xe8g\xfb\x1d\x02\xdb\x15:\x1c\x14\xf1!\xdd\xc5\xf6\xb9\x11\xff\x1e\xae\xefo\xe2\x1a\xf8\xbf\x12\x81\n9\xf6~\x0f\xa7\x05\xf1\xfa\x92\xdd\x95\xe4\x0c e&\xa4\x0e\t\xf0\xc8\xea\x91\xea\x8f\xf9\x90\x1aQ \x81\x04\xc9\xe6\xd9\xea\xf1\x01v\nD\x14d\x17\xf4\xee\xed\xdf\x18\xec<\r 2?\x0e\xcf\xf1\x00\xea\'\xdf\x04\xf5\xf0\x1d\\/\xee\x0b\xa6\xdf\x89\xd5L\xfe\x1c\x18k\x0b+\x0b-\xf2"\xfb\xcc\xfd \xef\xdb\x13\xe2\x03 \xf4&\xff\x98\x04\x19\xfas\xec\x98\x0e\x8c#\x01\xf0\x1b\xd4B\xff\x05\x14\xaa\nQ\x05\x7f\xf9\xa4\xfe\xb1\xec\xcf\xfa0\x13\xaa\xf4K\xfdd\x12\xb0\x12\xf2\xfa\x88\xd9\xfc\xe4\xf5\x0f\xc5*\x9a\x15\xc1\xfd\x0c\xe0g\xd9{\xfd\xa3\x1d~*\xee\xfdE\xea\xf2\xf0\x13\xec\xb1\n\x94\x1e\x92\t\x1c\xf9.\xfa\x1a\xdey\xf7\xb3"\xf4\x1c!\n3\xe6q\xdar\xf3M\x13\xf7\x1a\xf2%\xfa\x00<\xcc^\xe8\xf8\xfdU\x10\x19&\x91\r\xc4\xfd\xda\xe3\xac\xe1\xf2\xfa\x8b\t<"\xe4\x16\t\x00~\xd8\xa0\xe3\xfd\x06\x83\x17\xfd\x1b\xc0\xf1\xc2\xdb\xa2\xfb\xe5\x172\x14\xee\xfc\x7f\xf4X\xea(\xf2\x07\x07>\x04\x9e\x0b\xb3\x18\x1d\x00\xeb\xe4n\xf3#\xf3\x97\x02\xd9\x0b\xe2\r\xd8\x11Y\xf9\xbd\xf1\xe2\xf2\xf4\xf7\xfe\x08\x9a\x10\xf5\x02\xb6\x03i\xf7W\xf0\xab\xfc\x1c\to\x13\xd4\rD\xf5d\xe4+\xfb\xec\x07\x9f\x06\x82\r\x13\x08\x06\xf7\x03\xeb\xc6\xf5\x89\x19m\x1d\x7f\xfc\x99\xdd\x11\xeb\xa4\xfe\xa1\x1e\'\x1d\x8d\x07\xd9\xf6e\xde+\xef\xda\x04\xd6\x0b\xa5\x11\xc4\x12c\xf1\xab\xedN\xef\xcf\xf8\xd6\x167\x14(\xfaA\xe6\x88\xf4\xc5\x0e\xc0\x02\x10\xf9\x05\x13\xb3\xfd\xad\xe2Z\xef\x8d\t\xb8\x11\x94\x12X\n\xb3\xef\xb9\xd8e\xf2]\x1a\xfe\x13^\x04K\x01z\xf6\x8a\xed\xa5\x00\xa8\x06k\nD\x0b\x15\xf6`\xed\x98\x07J\x06+\rY\xff\x8e\xeb\x11\x0c\xd2\xfe\xf6\xf6"\x10\x9a\x06j\xefa\xfd\x8c\t\xe2\x05\xc0\x04v\xfc,\xf9^\xfaT\xfb\x8a\x108\n\x91\xf7v\x06\x87\xf9E\xf9\x98\x03b\xfb\x81\x02\xa0\x0b\x0c\x00\xea\xf2n\x01\xc3\x0f\x01\x04\xae\xe8\xc7\xea\xc8\x05>\x19\xe9\x0e\x18\x00\xa8\xea;\xe3\xdd\x04\xe2\x10\x1e\x0b\x9e\x02c\xef%\xf0\xd3\x08\xb4\n\xad\x03\x16\xf6\xb8\xfe\x14\xf4\xc5\xec\xad\x15\x10\x1a\xca\x08L\xee\xdf\xde\xc1\xf3i\n#\x17*\x1d\xb7\x00\x8e\xdb?\xee\xda\x04\x88\r$\x0f\xe7\x06`\xf3O\xf2s\xfd)\r\xb3\x19\xa3\xf9\xcf\xe24\xf2\x17\x05\xc6\x16\x17\x14(\x08\xbb\xf4\x82\xe2\xe9\xf2\xe6\x04\xc3\x13\x8f\x11\xcd\x08~\xf9\xd7\xea6\xf6\x98\x06\xc3\x0b\xc4\x0eD\xfd\xf1\xee\xc0\xfb[\x07Q\x06\xfb\x08\xa1\x06\x9c\xf2\xfb\xefu\xff\xd9\x05<\x08\x1f\x0b\xfd\x04=\xf9\xed\xe8i\xf5\xc7\x0c\xbf\x0c\x13\x05\x7f\xfb\x93\xf7<\xf6+\x00\x0c\x04\xed\xfe\x07\xfe\xfa\xf95\x00\x9c\x01\x04\xfe\x10\x06.\xfd\xeb\xf6\xa3\xf9\x94\xf6\x17\x03\xb4\x0f/\x0bn\xfc\x0b\xf1\xeb\xf1\x80\x05x\x11\xbe\x01v\xf8\x1c\xfe\x16\x02\xe5\x04\xb0\x04`\x05\xa3\xfd\x01\xf4D\xf7"\x0bF\t@\x08n\x07g\xf9\xb1\xf7\x7f\xf3+\xfc+\x08\x8b\x0e\xd5\x10\x98\xf8\x03\xf4\xcf\xfe\x1e\xf6\xf2\xff\x07\x13\xa9\x03\x1a\xf8\xc3\xf6\xac\xfe\x92\r\xb2\x02\xdf\x02&\x01G\xe86\xef\xe4\x0cE\x17B\x0b\xf5\xfc\xfc\xf0~\xf2\xe4\xfa)\x02\xbe\x0f\xb6\x02\xba\xf7\x8d\xfb*\x004\x07,\xfdx\xf2,\xfd\x8c\x04\xb8\x00\xc1\xfef\xfes\xff4\xff\xbe\xfcq\x00,\x01\xb8\x013\xfd.\xfb\r\x02\xc1\x02\xe6\x00\xbb\x00\x8f\x01"\xfe\xbb\xfd\x82\xfbG\x01\x91\x06P\x02\xec\xfc\x13\xfb\x0c\x05\x9a\x03}\x00\x10\x00\xc7\xffa\xfbB\x00\xdb\t>\x03A\xff\xf6\xffH\xff4\xfd\x11\xff\x91\x03\xc5\x06\xd7\x02\xce\xfd\xf0\xfcj\xfc\xf2\x01D\t#\x04w\xf9\x13\xf9\xe7\x03H\x04\xe9\xfb\x08\xfe\x18\x02\xbd\xff8\xfa\x97\x003\x01\xe9\xffe\x01\xf3\xfc\xb9\xfc\xd5\xf9p\xf7\'\x05o\x0e\xb4\x02\xd5\xf6\x13\xf5e\xfc`\x00R\x06\x05\t/\xff\xfb\xf7,\xf8\x82\xfe\xb9\nU\x05\xec\xfbx\xfcx\xfc2\xfc\xa2\x01\xf0\tT\x03M\xfd\x96\xf7\xbf\xfc\x9b\x04\xdf\x02\xfe\x03\xaa\x00\x06\xffr\x01\xf4\xfd\x93\xfb\x8f\x022\x04\xe9\x00\xcc\x01\x97\x02\xe9\xfc1\xfc\xce\x02\xd9\x03k\xff\xe4\xfb6\x01\xa7\x00~\x00H\x02e\xfe{\x02\xb9\xff\xc0\xf9m\x00\xf5\x02\xcf\x00\xf0\xff\xdb\xfe\x81\xfeA\x019\x01\x00\x00\x13\xfe\xc8\xfa\x8c\x00\xab\x02\xd7\xfd\xe9\x00*\x02\xad\xfe\x87\xfdU\xfc\xfe\xfd/\x01\x04\xff\xf0\xfd\xc2\x02\xaa\x00\x9e\xfda\xffy\xff\x15\xfe,\xfe6\x01\xbc\xff\xd1\xfd\x9c\x01Z\x03w\xff\xc3\x00e\xff\xf1\xfa\x08\x017\x02\xa9\xff+\x00,\xfe \xffk\x03\x00\x03\xcd\xff\xd6\xfa\xc7\xf8Q\x00\x03\x05\t\x04&\x00\xac\xfd\xaf\xfbP\xfdw\x03&\x02\xb5\xfe\xf1\xfb\x97\xfe\xd1\x03\x99\x05\x89\x01\x81\xfb\xdd\xfc\x95\xff\xb6\x00T\x03$\x02\xb6\x02[\x04\x0e\xfd\xc3\xf9\xab\xfdz\x02\xd1\x06\x82\x04F\xfd\xad\xf8\xcf\xfb\xed\x01\xdc\x06\xf1\x04\x1a\xfd&\xf7_\xf7\xe7\xff\xcc\x07\xaf\x06\x8c\xff-\xf8\x87\xf6`\xfc\xb0\x05r\x06Q\x00\x8c\xf8;\xf4\xfa\xfas\x02\xa3\x05 \x003\xf9\x1c\xf7\xb5\xf8\xd9\xff\x7f\x07\xd9\x05\xfe\xfd\xdb\xfbQ\x01B\x08\x16\x0e\xd3\r\xa6\t_\x07r\x08\xd3\x0e\xeb\x15\x01\x15\xcb\x108\x0c\xf4\n\xc1\x0e\xaf\x11r\x0f?\x07\x0b\x03$\x04\xbf\x04^\x04\xa5\xffV\xf9\xc0\xf5l\xf3\xc6\xf3\xb9\xf5\xf3\xf3\xa4\xf0\xa2\xeeV\xee\xa9\xee\xc0\xef\xaf\xf1\xaf\xf3s\xf3\xc5\xf3\x8d\xf6^\xf9h\xfd\xc2\xfe\xb6\xfd+\xfe\xf0\xfe\x8a\x02\xdb\x08\x87\n\x96\x05\xf2\x00\x87\x01\xc3\x06\xde\x08#\x07d\x04\x9a\xfd\xd5\xf9\x8c\xfd\x7f\x05v\x06\xc7\xfb~\xf1\x87\xef\xcc\xf6i\xfe\x08\xff\x06\xf9\xd6\xf0`\xf0\xec\xf4\xbd\xf9\x83\xfe\xf8\xfa\x84\xf6\x93\xf6\xa3\xfcb\x00\xc5\xfd\xab\xfd\xd0\xfdy\xfe\xc2\xff\xc8\xff\x92\x00\xfb\x02\xa6\x02z\x04\xa1\xffy\xfa\xc4\xfb=\xff\xe7\x05\x02\x06\xe0\x00\xda\xfc*\xfc]\xfcp\xff\xf4\xff\xf5\x00_\x01\x00\x00/\xff\xca\xff\xd8\x00\xc3\x00\xde\xffd\xfd\xc7\xfc(\x00\n\x046\x04K\x03U\x01u\xff\xba\x01\xf9\x02\x87\x02f\x05\x8a\x0b\xd7\x11\xc1\x14\xae\x11\xa0\r\xf5\x0e_\x14Q\x1a\x11\x1e\xf0\x1dP\x1c\x1c\x1c\x86\x1c\xca\x1b\xaf\x17\x95\x12M\x10@\x0f\x1f\rA\n\xe5\x06\xbc\x02\x80\xfe7\xf9b\xf3/\xefm\xee\x82\xeeX\xed\xe0\xeb\xd9\xea\x97\xea\xe1\xea/\xec\x07\xed\x8a\xebY\xea\t\xed\xab\xf3\xf0\xf8j\xfaK\xf8\x1e\xf6a\xf7\x1a\xfa\x06\xfdh\xfeM\xfd\x0f\xfb\x1e\xfbl\xfd\x91\xfe\x07\xfd\xfd\xf9"\xf8>\xf7\x90\xf7\xb9\xf8$\xf9\xab\xf8\n\xf7w\xf6\xbd\xf7\xd9\xf8\x01\xf9\xac\xf8\xfb\xf81\xfa\xa6\xfb\x90\xfd\xa2\xff\xb6\xff\xd7\xfe\xc7\xff\xe6\x00\xf0\x01\x17\x03\xe9\x03\x81\x04S\x045\x04\'\x05\xf2\x05\xcf\x05\xef\x04T\x04>\x05-\x06X\x06r\x06\x1c\x06L\x05\x80\x058\x06\xc1\x06_\x07\xb8\x06<\x06l\x07\x0e\x08\xd8\x07\x91\x07\xcf\x06B\x06%\x06\x1c\x06\x11\x06\x15\x06h\x04.\x03\xf3\x02\xbd\x014\x01\x7f\x00\x0b\xff\xed\xfd\x1c\xfe\xd8\xfd\xb9\xfd\xc0\xfd\xbf\xfd\x8d\xfc[\xfc\x9f\xfcF\xfd\x03\xfe\xe6\xfd\x1f\xff\x06\x00\xdf\xffV\xff\xa2\xff\xe6\xff8\xff#\xff\xd4\xff\xcf\xff\\\xff\xbb\xfeO\xfem\xfd\xbd\xfc\x7f\xfc\xb5\xfb\x8e\xfb:\xfb\xe1\xfa\r\xfb\x88\xfa-\xfa\xb1\xf9\xfc\xf8]\xf9\x89\xf9\xc6\xf9\x97\xf9\xa3\xf9>\xfaE\xfa_\xfa\x04\xfas\xfa\x05\xfb\x90\xfb!\xfc/\xfc\xd3\xfc*\xfd\xd3\xfcA\xfd4\xfe\x82\xfe]\xfe\xb2\xfe\x0b\xff9\xff\x9b\xff\xa8\xff,\x00E\x00G\x00\xda\xff\xc0\xff\xe1\xff\x0c\x00\xcb\xff\xb8\xff\xa4\xff\xf6\xfeh\xff\xcf\xff\x92\x00\xf5\xff\xb0\xff:\x01\'\x04\x8b\x07<\n\x85\x0b}\x0c4\x0f\xc0\x12\xf9\x16<\x1bj\x1f\x03 \xaf\x1e\xdd\x1e\xce!4$\xbb"\x15 \xa9\x1dk\x1bt\x18\xe4\x13\x88\x10\xd9\r\x82\t\x07\x04\\\xfe&\xfb\xc3\xf9Q\xf6\xe6\xf1\x87\xee\xb9\xec\xd3\xea\x80\xe9Q\xe9\xc3\xe9\x1d\xea\xe6\xe8\xc1\xe7\x97\xe8L\xea\x85\xeb \xec\xb3\xec\xbd\xee6\xf0F\xf0\xeb\xf0\xec\xf1`\xf3<\xf4\xd1\xf4;\xf6\xc4\xf7\xbb\xf7|\xf7^\xf9\x07\xfby\xfb&\xfb\x84\xfb\x84\xfc\x9b\xfci\xfc\xfe\xfd\xf4\xfeB\xfe\x89\xfdM\xfd\xdb\xfdF\xfe\xc2\xfd\xb5\xfd\x88\xfe\x80\xfe\xf7\xfd\xff\xfd\xe0\xfep\xffK\xff\x14\xff\x19\x00\x05\x01m\x012\x02+\x03\xd1\x03\x07\x04h\x04]\x05\xb6\x06\xbc\x07a\x08\xd8\x08W\t\xee\t\xc1\nb\x0b\xaf\x0b\xdc\x0b\x98\x0b\xb4\x0b\xd4\x0bW\x0b\xde\n\n\n\xb5\x08\xa4\x07\xc5\x06\x8b\x058\x04n\x02N\x00\x10\xffI\xfe+\xfd\x0b\xfcV\xfa\xa9\xf8)\xf8\x9d\xf7Z\xf7\xb5\xf7\xb5\xf7i\xf7\x9a\xf7>\xf8\xf3\xf8\xdd\xf9\xa4\xfa[\xfb!\xfd\x97\xfe\xd3\xff\x1c\x01j\x028\x03\xa9\x03\xab\x04\xdc\x05\xc3\x06\xb6\x06&\x06$\x06M\x06\x8b\x05.\x04q\x03\xc7\x02\x1e\x01I\xff\xfe\xfe\xdc\xfe\x97\xfd\x9b\xfb\x98\xfa0\xfal\xf9\xb6\xf8\xd2\xf8k\xf9u\xf9\xb2\xf8H\xf8&\xf9\x81\xf9y\xf9\xaf\xf9 \xfa\xd7\xfay\xfbw\xfb\xbc\xfb\xa1\xfc\x84\xfc1\xfcl\xfc\x9c\xfd4\xfe\xe6\xfd\xe7\xfd\x07\xfe\x16\xfe\xe8\xfd\x87\xfdT\xfd\xa4\xfd\x7f\xfd\x85\xfd\x03\xfd\xab\xfc\x85\xfc\xef\xfb\x07\xfc\x81\xfc\xcb\xfc\xe3\xfc$\xfd\r\xfd\x85\xfdN\xfd\xf5\xfdA\xff\x17\x00\x08\x01\xff\x01\x8c\x02\x99\x03/\x05\xe4\x05\xbc\x06\x04\t\xde\x0c\x81\x10z\x12.\x14\x83\x17L\x1aY\x1b_\x1cu\x1f\xb0#\x1b%W#\t"\x87"A!\x18\x1d\xde\x19\x90\x19\xff\x16\x99\x0f\x9b\x08\x90\x06Z\x05i\x00\x8f\xf9\x80\xf5\x88\xf3\xaf\xef|\xea\x98\xe8\x0e\xeac\xea \xe7v\xe4F\xe5&\xe7\x1f\xe7c\xe6\xcf\xe7\'\xea\xd6\xea\r\xeb\xac\xecD\xefH\xf0\xc3\xef\xf4\xf0\x8e\xf3\x02\xf5\x87\xf5k\xf6\xdf\xf7\xca\xf8O\xf9\xc8\xfaj\xfc\xa2\xfc\xf9\xfb\xa3\xfc\xef\xfd\x7f\xfeP\xfe\xcc\xfeF\xffL\xfe\xf8\xfd\'\xff\xab\xff\xcc\xfe\xcd\xfd$\xfe6\xff\x82\xfe\x8f\xfe\xff\xffS\x00g\xffE\xffn\x00\xe9\x01\xdd\x01\xfb\x01d\x03\x16\x04z\x044\x05\x81\x06\xcb\x07\x04\x08\xf2\x07\x00\t)\n\x8b\n\xb0\n\xfb\n[\x0b?\x0b\xde\n\xd2\n\xe3\nG\nH\t\x8b\x08\x0e\x08\xa1\x07z\x06[\x05d\x04H\x03\xbd\x01\x91\x00\xf3\xff\xf5\xfeY\xfd\x18\xfc\x82\xfb\xb4\xfal\xf9\xaf\xf8[\xf8\x99\xf7p\xf7e\xf7\xcc\xf6\xa7\xf65\xf7\xab\xf7K\xf8j\xf9\xef\xf9\x82\xfa\x88\xfb\x83\xfc\xe6\xfdI\xffm\x00\x89\x01\x95\x02=\x03\x9d\x03M\x04\x1a\x05\x12\x05\xeb\x04\xb4\x04\xa0\x04a\x04s\x03\x8d\x02\xf1\x01c\x01X\x006\xff\x06\xff\x8c\xfe\x8e\xfd\xd2\xfc\xc9\xfc\xb5\xfcJ\xfc\xf1\xfb\xf7\xfb"\xfcZ\xfc\x88\xfc\xfc\xfcr\xfdu\xfd9\xfd\x8f\xfd6\xfe\x82\xfe\xdb\xfe.\xff,\xff\x0f\xff\x0f\xff1\xff\x9b\xff\x08\x00w\xff"\xffW\xff?\xff\xd3\xfe\xaa\xfe+\xfe\xca\xfdD\xfd\x8c\xfc0\xfc\x02\xfcx\xfb\xc7\xfae\xfa{\xfaK\xfaM\xf96\xf9\xdf\xf9o\xfab\xfaj\xfa\xcb\xfa\xef\xfa\x0b\xfbh\xfb\xf2\xfb\x8c\xfc/\xfd\x95\xfdI\xfe\x1a\xff\x1c\x00\xb1\x00\x8c\x01\x99\x03@\x06\xee\x07\x02\nf\x0e6\x13\x9c\x15Z\x16i\x18Y\x1dL!\xf4!V"\xb2$Y&\x16$\xef \xb4 \xa5 \xff\x1b6\x15\x13\x12\x04\x11\xb6\x0c\x14\x05_\xffj\xfd3\xfa\xb3\xf3\xdf\xee?\xeeh\xed\xfd\xe8\xed\xe4)\xe5j\xe7a\xe6\xab\xe3~\xe4\xe6\xe7\xf6\xe8\x16\xe8A\xe9\xfc\xec2\xef\xd2\xee\xc0\xef\xb6\xf2\xbd\xf4\xd3\xf4C\xf5M\xf7\xbe\xf8\xe6\xf8e\xf9\xb7\xfah\xfbK\xfbw\xfb \xfc\xb3\xfc\x0e\xfdo\xfdy\xfd\x1a\xfd\x1e\xfd\xde\xfd4\xfe\x98\xfdS\xfd\xe1\xfd\xf6\xfd\x17\xfdB\xfd\xab\xfe1\xffN\xfeJ\xfe\xf2\xff\x0b\x01\xc8\x00J\x01\x18\x03?\x048\x04\xf6\x04\xff\x06{\x08u\x08\xdc\x08\x83\n\xb0\x0b\xd4\x0b\xff\x0b\x07\r\xbe\r^\r#\r\xa0\r\xd4\r\x0e\r\x01\x0c\x83\x0b[\x0bh\n\xe3\x08\xc0\x07\xcb\x06;\x05D\x03\xae\x01w\x00\xfa\xfe\xf9\xfcU\xfb8\xfa\x08\xf9\x89\xf7T\xf6\xcd\xf5[\xf5\x80\xf4%\xf4\x81\xf4\xc5\xf4\xcf\xf46\xf5\t\xf6\xe1\xf6\xd0\xf7\xa1\xf8~\xf9\x9e\xfa\xc2\xfb\xf1\xfc>\xfe\xd4\xff\xc4\x00\x84\x01\xba\x02\xa0\x03\xa7\x04\x8a\x05e\x06H\x07\x97\x07\x85\x07\x8a\x07\x03\x08\xe4\x07<\x07\xbf\x062\x06\xbf\x05\xd1\x04\xc2\x03\xd2\x02\xfc\x01\x1f\x01+\x00p\xff\t\xff4\xfej\xfd\x08\xfd\x00\xfd\xd3\xfcn\xfc<\xfcV\xfcx\xfcz\xfc\xb7\xfc-\xfdv\xfdw\xfdm\xfd\xd0\xfd_\xfer\xfe\x81\xfe\xc7\xfe\xe7\xfe\xe7\xfe\xa4\xfe\x89\xfe\xa2\xfet\xfe\xa5\xfd\xf0\xfc\xb9\xfco\xfc\xa8\xfb\xdb\xfa4\xfa\x9e\xf9\xe1\xf8(\xf8\xe5\xf7\xc3\xf7o\xf7.\xf7K\xf7\xb1\xf7\xe7\xf7\x16\xf8\x9d\xf8\x81\xf9s\xfa\x1d\xfb\xeb\xfb\xd0\xfco\xfd=\xfe,\xff\xe2\xff\xb9\x00{\x01\xeb\x01Y\x02\x94\x02\xf6\x02\xab\x03\x1e\x04x\x04"\x05\xf9\x05,\x07\x99\x08\x1c\n\xf2\x0b{\r\x0b\x0f\xda\x11S\x15\x99\x17\x81\x18\x15\x1a\xaf\x1c\x0f\x1e\xbf\x1d6\x1e\xc3\x1f\x15\x1fp\x1b\xa1\x18\xfa\x17#\x16\x04\x11\x02\x0c\x8d\t\x86\x06\x80\x00\xa0\xfaV\xf8\xb5\xf6\xec\xf1X\xec\x10\xea\x02\xea\xf7\xe7\xe9\xe4d\xe4\x1f\xe6G\xe6\xbd\xe4\x04\xe5\xbb\xe7\xdd\xe9\x02\xea\x82\xea\xfc\xec\xa4\xef\xdf\xf0k\xf1\x19\xf3u\xf5\xd6\xf6B\xf7O\xf8|\xfaO\xfcX\xfc6\xfc\xca\xfd\xf8\xff\x84\x00\xe0\xffy\x00\x08\x02)\x02\x0b\x01M\x01\t\x035\x03s\x01\xa8\x00\xe5\x01\x87\x029\x01:\x00\xfe\x00\x92\x01\x83\x00\xbf\xff\xcc\x00\xfd\x01g\x01{\x00.\x01\x89\x02\xc3\x02F\x02\x02\x03\x8a\x04\xf3\x04\xb9\x04n\x05\xd3\x06o\x07\x14\x07T\x07h\x08\xe3\x08\x8b\x08p\x08\xef\x08\x17\tl\x08\xf3\x07\xe2\x07m\x075\x06\x05\x05r\x04\xd9\x03\xa1\x02S\x01J\x001\xff\xc7\xfd\x85\xfc\xbb\xfb\r\xfb\x15\xfa\x1a\xf9\xa3\xf8T\xf8\xf1\xf7\xbe\xf7\xf0\xf7.\xf81\xf8\x86\xf8?\xf9\xfe\xf9\xac\xfar\xfbT\xfc!\xfd\xd8\xfd\xa8\xfe|\xff1\x00\xac\x00H\x01\xde\x016\x02^\x02\x91\x02\x07\x03[\x03g\x03\x87\x03\x9d\x03\xaa\x03n\x03n\x03@\x04N\x05\\\x05\xd7\x040\x05+\x06f\x06\xef\x05T\x06U\x07-\x07\xcf\x05I\x05\x06\x06\xad\x05\xb4\x03\\\x02p\x02\x06\x02"\x00\xa9\xfe\xa0\xfe0\xfex\xfc.\xfb[\xfb\x83\xfb\x91\xfa~\xf9\x95\xf9\xdf\xf9\x84\xf9\x1e\xf9W\xf9\x8f\xf9)\xf9\xc8\xf8\xfe\xf8\x81\xf9\xb1\xf9\x91\xf9\xa9\xf9\xe3\xf9:\xfa\xa4\xfa\x08\xfbl\xfb\xe0\xfbT\xfc\xb9\xfc/\xfd\xe8\xfd\x85\xfe\xc2\xfe\xf2\xfeN\xff\xc8\xff\x05\x00#\x00T\x00|\x00\x89\x00\xa1\x00\xd1\x00\xec\x00\xf4\x00\xd9\x00\xcf\x00\xff\x00#\x01%\x01&\x01\xf1\x00\xc7\x00\xbf\x00\x8f\x00i\x00N\x00(\x00\xf8\xff\xe1\xff!\x00Y\x00E\x000\x00\xc3\x00\xe7\x01*\x03\x99\x04c\x06Y\x08\x0c\nM\x0b\xea\x0cZ\x0f\xcd\x11>\x13$\x14C\x154\x16\x13\x16S\x15\xeb\x14h\x14\x81\x12\xa4\x0fF\rR\x0bl\x08\x93\x04&\x01r\xfeh\xfb\xe4\xf7\x17\xf53\xf3\'\xf1\xdb\xeeD\xed\xe0\xec\x9a\xec\x00\xec\xeb\xeb\xa3\xec~\xed-\xeeR\xef\xff\xf0p\xf2\x87\xf3\xb7\xf4R\xf6\xfd\xf7<\xf9^\xfa\x92\xfb|\xfc_\xfdY\xfe~\xffK\x00\x95\x00\xe8\x00[\x01\xb8\x01\xc9\x01\xe8\x014\x02\x1e\x02\xa9\x01L\x01/\x01\xde\x00\x08\x00G\xff\xf1\xfe\x9b\xfe\x0f\xfer\xfd\x10\xfd\xa7\xfc\x00\xfc\xb4\xfb\xe3\xfb\x10\xfc\xf5\xfb\xd4\xfb\x1f\xfc\x81\xfc\xb2\xfc\x08\xfd\xaa\xfdX\xfe\xdb\xfeT\xff\'\x00\x13\x01\xbb\x01\\\x02&\x03\x0b\x04\xec\x04\xb3\x05w\x061\x07\xb9\x07,\x08\x90\x08\xe5\x08\r\t\x03\t\xe4\x08\x9e\x08&\x08y\x07\xa3\x06\xa8\x05\xa3\x04\x9f\x03\x9b\x02}\x01T\x00+\xff\x12\xfe\x13\xfd;\xfc\x83\xfb\xe8\xfaa\xfa\x15\xfa\x04\xfa\x10\xfa:\xfa\xa0\xfa0\xfb\xce\xfbh\xfc\x01\xfd\xcb\xfd\x97\xfec\xffW\x007\x01\xea\x01^\x02\xa7\x02\t\x03t\x03\xb8\x03\xd8\x03\xdb\x03\xc9\x03\x8e\x03/\x03\xc3\x02H\x02\xa2\x01\xf8\x00f\x00\xee\xffz\xff\xe1\xfe(\xfe\x7f\xfd\xf7\xfc\xa3\xfcg\xfcC\xfcN\xfcL\xfcH\xfc\\\xfc\x9f\xfc\x13\xfd|\xfd\xdd\xfdv\xfe3\xff\xc7\xffH\x00\xe6\x00\x97\x01#\x02\x96\x02(\x03\xc0\x03*\x04a\x04\x90\x04\xd5\x04\xf8\x04\xd3\x04\xa9\x04\xa0\x04\x87\x047\x04\xf0\x03\xa3\x03/\x03\x9b\x02\x1c\x02\xbc\x01[\x01\r\x01\xa2\x00\x0f\x00\xa5\xffb\xff\x1a\xff\xb6\xfeg\xfeT\xfeh\xfeN\xfe\x10\xfe\xd2\xfd\xb3\xfd\xc1\xfd\xc9\xfd\xc6\xfd\xb6\xfd\xb5\xfd\xb1\xfd\xc5\xfd\xe2\xfd\xf2\xfd\xd6\xfd\xa9\xfd\xde\xfdU\xfe\xca\xfe\xf0\xfe\x1f\xff\x9d\xff\'\x00\x87\x00J\x01\xe6\x01\xbe\x00C\xff\x97\x01<\x07\x14\t\xbc\x03~\xfei\xffn\x03\xf0\x04\x80\x04\xed\x03\xe0\x00H\xfbJ\xf9V\xfd)\x00#\xfc\x88\xf6#\xf6\xdc\xf8 \xf9\x9e\xf6\xed\xf4\xc5\xf4\xb8\xf5\xb3\xf7\x0b\xfa\xc8\xfa\xdf\xf8*\xf7\xae\xf8\x08\xfd\x01\x01\x9c\x01\xd3\xff\xe3\xfeW\x006\x03\x01\x05X\x05\x0e\x05q\x04\x16\x04B\x04\xb0\x04\x14\x04\xe8\x01!\x00\x14\x00\xf6\x00\x7f\x00t\xfe\x0b\xfc\x9a\xfa\xc7\xf9\x10\xf9i\xf9V\xfca\x00\x0e\x01\xa1\xfd<\xfb\xe4\xfc\xd1\x01:\x08\t\x0f$\x13c\x0f^\x08\r\x08=\x10\xe0\x17M\x18\xda\x15\x90\x14\x97\x11A\x0c\x08\nk\x0cr\r\xe6\t\xed\x06\xaf\x05\x90\x01-\xf9\xed\xf3\x92\xf6\xfa\xfad\xfb\xe8\xf7X\xf3r\xee\xfd\xea\xf3\xech\xf3\xec\xf7\\\xf7j\xf4\x84\xf2j\xf2\x1e\xf4\xa4\xf8\x0c\xfe\xe0\x00\x88\x00e\xff+\xff\xc8\xff\xba\x01,\x05l\x08$\t\xce\x07\xc7\x05\x81\x03\x0e\x02\xd5\x02\xa9\x05\x05\x07\xda\x04O\x00\x10\xfc\xb9\xf9\xc7\xf9\xfe\xfbG\xfdc\xfb\x89\xf7\xb9\xf4/\xf4\x8d\xf4u\xf5\xee\xf6\x03\xf8\xdb\xf7\xe2\xf6\xa8\xf6\x85\xf7,\xf9\x84\xfb"\xfe\xcb\xff\xde\xff\x8d\xff9\x00^\x02\xb5\x04^\x06P\x07\xa4\x07y\x07\x18\x072\x07\xc2\x07\x8b\x08\xcf\x08j\x08Z\x07\xaf\x051\x04v\x03\x93\x03\xb6\x03\x0f\x03\xa8\x01\x17\x00\xe0\xfe?\xfe\x1a\xfes\xfe\xcd\xfe\xa0\xfe\xf1\xfd\\\xfdi\xfd\xc0\xfdC\xfe\x0c\xff\x06\x00\x93\x00z\x00<\x00d\x00\xdf\x00\x96\x01X\x02\xf0\x02\x02\x03]\x02l\x01\x02\x01M\x01\x02\x02O\x02\xdc\x01\xd7\x00\x94\xff\x08\xffn\xff\x0e\x00h\xff<\xfe\xab\xfe\x15\x00y\xff\xbf\xfcG\xfbB\xfd\x1a\x00\xca\x00\x1f\xff\xa9\xfc<\xfb1\xfc\t\xff\xa4\x00n\xffn\xfd\xf8\xfc\x84\xfd\xb6\xfd \xfe \xff\x92\xff-\xff\xd8\xfe\xd7\xfe\xb2\xfe\xb2\xfe\xd7\xffN\x01\xa4\x01\x16\x01\xb0\x00\xc9\x00\n\x01\x9c\x01\xac\x02\x92\x03\x89\x03\xfb\x02\x8d\x02{\x02\xf2\x02\xd7\x03\x8e\x04F\x04l\x03\xc0\x02\x9c\x02\xac\x02\xf3\x02&\x03\xb5\x02\xae\x01\xce\x00\x93\x00\x8e\x00,\x00\xbe\xffe\xff\xf9\xfei\xfe\x00\xfe\xcb\xfdz\xfdI\xfd~\xfd\xc2\xfdX\xfd\xaf\xfc\xe9\xfc\xb1\xfdv\xfes\xfe\x1a\xfe\x05\xfek\xfeb\xff*\x00;\x00\xe8\xff\x05\x00d\x00\xa5\x00\xb8\x00\x15\x01Q\x01\x01\x01\xc1\x00\xa7\x00\x86\x00*\x00\xf4\xff+\x00;\x00\xc5\xff\xfe\xfei\xfe3\xfe7\xfeZ\xfeh\xfe\x13\xfe}\xfdH\xfdq\xfd\xc8\xfd\n\xfe]\xfe\xc3\xfe\xfb\xfe\x03\xff,\xff\x90\xff#\x00\xb4\x00\x11\x01/\x01\x0e\x01%\x01s\x01\xc7\x01\x08\x02\x16\x02\x02\x02\xaa\x01U\x01K\x01T\x01Q\x01-\x01\xec\x00\x85\x00\xf8\xff\xb9\xff\xeb\xff\x0f\x00\xe5\xff\xa2\xff}\xffc\xff5\xffK\xff\x85\xff\xbc\xff\xcc\xff\xbf\xff\xb3\xff\x9a\xff\xa0\xff\xca\xff\x0f\x003\x00$\x00\xf6\xff\xc6\xff\xba\xff\xc2\xff\xcb\xff\xb3\xff\x97\xffz\xffP\xff.\xff\x15\xff\x13\xff&\xff%\xff:\xff;\xffL\xff_\xff\x97\xff\xe6\xff\x1c\x004\x00F\x00o\x00\xcc\x00\x17\x01)\x01\x19\x01\x0f\x01\x19\x014\x01U\x01r\x01J\x01\xec\x00\xb5\x00\xbd\x00\xcf\x00\x9d\x00U\x00\x01\x00\xb7\xffy\xffh\xffk\xff<\xff\xec\xfe\xaf\xfe\x9a\xfe\x9b\xfe\xab\xfe\xaf\xfe\xc9\xfe\xd2\xfe\xf5\xfe-\xff\\\xff\x83\xff\xbf\xff,\x00\x90\x00\xcb\x00\xe8\x00\r\x01I\x01\x8a\x01\xba\x01\xc8\x01\xa5\x01m\x01j\x01q\x01E\x01\xe2\x00|\x005\x00\xe0\xff\xac\xff\xa5\xffh\xff\xf2\xfeK\xfe\x0f\xfe&\xfe?\xfe\x1a\xfe\x01\xfe5\xfe\xb0\xfen\xfe!\xfeN\xfe&\xfe3\xfe\xc3\xff@\x04\x12\x05\xf6\xff\x86\xfbt\xfe\xae\x05\xb9\x07\xd4\x05\xa6\x03\x88\x01\xb8\xfe\x17\x00\x82\x06\x12\t\xb5\x03$\xfe\x16\xff\xa1\x01\xcc\x00W\xff>\x00S\x00\xd5\xfd\xc6\xfc\x04\xfe\xd4\xfdh\xfb\x17\xfb*\xfe\xbc\xff\x87\xfd\xe9\xfa%\xfb\xf2\xfc0\xfeN\xff&\x00\x8b\xff\x84\xfd/\xfdh\xff\xba\x019\x02\x84\x01\x14\x01\xb1\x00H\x00\xf1\x00I\x02\xbf\x02\xa0\x01\x86\x00\x89\x00o\x00\xbd\xff)\xff\'\xff>\xff\xc5\xfe+\xfeQ\xfds\xfc\x0e\xfc\x94\xfc`\xfdC\xfdz\xfc\x8e\xfb\x07\xfbP\xfb\\\xfcP\xfd,\xfd?\xfc\xbc\xfb\x0c\xfc\x93\xfc\xc4\xfc\x1c\xfd\xa6\xfe\xa4\x00w\x01O\x00W\xff\xc6\x00\xb2\x04\x83\x08\xcc\t\xe9\x08\x9a\x075\x08\xca\n\xd0\r[\x0f\x9a\x0e\xf4\x0c\xa2\x0b0\x0b*\x0b\xa8\n\x97\t\x06\x08\xf5\x05n\x03\x98\x00d\xfe\\\xfd\xdf\xfc\xfe\xfb\xee\xf9:\xf7F\xf5\xda\xf4\xbd\xf5\xe9\xf6y\xf7$\xf7g\xf6j\xf6\xf6\xf7g\xfav\xfc\xb8\xfd_\xfe\xdf\xfe*\xff\xfb\xff\xa0\x01/\x03\xc1\x031\x03m\x02\xba\x01)\x01\x0c\x01\xfe\x00\xa0\x00p\xff\xd6\xfde\xfcU\xfb\xf1\xfa\xfd\xfa\xe9\xfaO\xfas\xf9\xcb\xf8\xac\xf8\r\xf9\x1e\xfa6\xfb\xc6\xfb\xd9\xfb\xe5\xfb\xa3\xfc\xff\xfd\xa8\xff\x0c\x01\x94\x01\x99\x01\xde\x01\xbf\x02\xb0\x03]\x04\xdf\x044\x05\x08\x05\xae\x04\x84\x04\xab\x04\xc9\x04\xa5\x04\x85\x04\x17\x04t\x03\xe1\x02\x88\x02p\x02_\x024\x02\xc8\x01\x1a\x01\x93\x00l\x00\x81\x00\x85\x00]\x00\r\x00\xb1\xffq\xff[\xffO\xff\x13\xff\xec\xfe\xf7\xfe\xe9\xfe\x99\xfe=\xfe\x10\xfe\x0e\xfe\x1a\xfe6\xfeM\xfe=\xfe\x1e\xfe-\xfe\x80\xfe\xe2\xfe\x15\xff6\xffm\xff\xcb\xff"\x00]\x00|\x00\xa7\x00\xce\x00\xf6\x00\x1d\x015\x01/\x01\x0b\x01\xe4\x00\xda\x00\xdc\x00\xac\x00\\\x00\x1a\x00\x02\x00\xea\xff\xc5\xff\x93\xffi\xff0\xff$\xff9\xff^\xff`\xff5\xffB\xff}\xff\xad\xff\xcf\xff\x02\x00.\x00\\\x00\x80\x00\xca\x00\xfb\x00\xf3\x00\xf1\x00\x14\x01c\x01s\x01a\x01S\x01>\x011\x01)\x013\x01#\x01\xd3\x00\x8f\x00\x99\x00\xa1\x00{\x00)\x00\x07\x00\xff\xff\xd6\xff\xb5\xff\xbb\xff\xab\xff\x89\xff6\xff\x0c\xff1\xffW\xff\x05\xff\xdb\xfe`\xff\x11\x00\x97\xff\x04\xff\xb3\xffT\xff<\xfe\x9e\xff\xd0\x04\xd9\x05B\xff\x9e\xfa\xaf\xfe\x81\x05\xe8\x05\x80\x03O\x02\x08\x00c\xfco\xfe\xcf\x05,\x074\x00J\xfb-\xfeM\x01\x0c\x00\xd8\xfe\xea\xffo\xff\x03\xfdU\xfd\x84\xff\xfc\xfeR\xfc\xa6\xfc\x87\xff9\x00\xef\xfd_\xfc \xfd%\xfe\xfb\xfe\x07\x00H\x00\xa3\xfe\xfd\xfc\xcd\xfd\xdb\xff\xb0\x00\\\x00/\x00\xc5\xff\xdb\xfe\xc6\xfeB\x00B\x01\xd4\x00\xec\xff\xdc\xff\n\x00\xd2\xff\xde\xffQ\x00P\x00\xf4\xff\xef\xff(\x00\xc4\xff\xea\xfe\xb7\xfeq\xff=\x00\x0f\x00J\xff\x8d\xfe\'\xfe\xab\xfe\xf0\xff\xab\x00\xdd\xffx\xfeG\xfer\xff.\x00\x0f\x00\x83\xff\x07\xff\xb8\xfe\xf9\xfe\xb3\xff\xb1\xff|\xfe\x9e\xfd\x12\xfe\xdb\xfe#\xff\x15\xff\x86\xff\x89\xff\x18\xff\x02\x00W\x02\xdc\x04X\x05\xf9\x04M\x054\x06C\x08\xab\n\x8f\x0c\xf6\x0bs\t\x8b\x08\xf5\t\x8f\x0b\xfb\n\x94\x08\x00\x06\x1f\x04\xc7\x02\x02\x02\xa3\x00:\xfe\x91\xfb\xfc\xf9|\xf9V\xf8_\xf6\xf5\xf4\xef\xf4\xcf\xf5`\xf6p\xf6T\xf6n\xf6w\xf7\xe7\xf9z\xfc\xcf\xfd\xdf\xfd-\xfe\xac\xff\x9f\x014\x03\x08\x04\x01\x04V\x03\xc7\x02\x17\x03~\x03\xd8\x02\x10\x01\xae\xff\x16\xffD\xfe\xbd\xfc\\\xfb\x8c\xfa\x92\xf9\x9c\xf8[\xf8\x96\xf8&\xf8P\xf7\x88\xf7\xa5\xf8\xc2\xf9\x8e\xfa\x9c\xfb\xa2\xfcc\xfd~\xfeX\x00\x14\x02\xd5\x02g\x03\x8d\x04\xae\x055\x06i\x06\xf3\x067\x07\x14\x07#\x07\\\x07\xf4\x06\n\x06V\x05\x14\x05\x98\x04\x03\x04\x94\x03\x00\x03\x03\x02\x11\x01\x9f\x00\x8d\x00g\x00\t\x00\x92\xff\x08\xff\xc0\xfe\xdf\xfe+\xffn\xffP\xff\x03\xff\xd5\xfe\x04\xffW\xffk\xffy\xff\x92\xff|\xffK\xff\x16\xff\x02\xff\xe2\xfe\xc5\xfe\xc1\xfe\xbf\xfe\x90\xfe2\xfe\xcf\xfd\xab\xfd\xc4\xfd\xdc\xfd\xc9\xfd\xf5\xfd\x82\xfe\x98\xfe\xe0\xfd\x92\xfd\xae\xfe\x14\x00p\x00n\x00\x96\x00L\x00\x01\x00\x8d\x01\xdc\x03\xe3\x03\x03\x02\xbd\x01e\x03\xb7\x03\x89\x02\xaa\x02\x03\x04\xb3\x03\xec\x01R\x01\xc2\x010\x01$\x00\x93\x00w\x01\x83\x00U\xfe\xc6\xfd\xe9\xfec\xff\xbb\xfe\x8c\xfe\x03\xff\xb8\xfe\xdc\xfd-\xfea\xff\xa7\xff\xf8\xfe \xff\xc5\xff\x95\xff\xf0\xfeG\xff#\x00\x00\x00i\xff\x98\xff\xbe\xff\x02\xff\x85\xfe7\xff\x92\xff\x8c\xfe\xd6\xfd\x7f\xfe\xa5\xfe\x97\xfd\xf7\xfc\xaf\xfd\xe4\xfdW\xfd~\xfd\xd6\xfd#\xfdY\xfc8\xfdc\xfe\xe9\xfd\x1c\xfdf\xfd\xd0\xfdU\xfd\x9c\xfd^\xfeO\xfeN\xfdl\xfdU\xfeY\xfem\xfd\xe5\xfcT\xfdY\xfd\xf1\xfc\x90\xfcc\xfc\x07\xfc\xcb\xfb\xcb\xfb@\xfcn\xfc*\xfdB\xfe\xdf\xffj\x02\x0f\x04\x13\x05\xb4\x053\t\xa1\x0e\xaf\x12,\x14\xd3\x13K\x14n\x16\xe4\x19\x86\x1c\xe8\x1b\'\x19\xaf\x161\x15\x01\x14\x12\x12\xed\x0ec\n\xc7\x05\x9b\x02\x8a\xffd\xfb\xb6\xf6<\xf3\r\xf1\xf9\xee\x01\xed\xd7\xea\xec\xe8\x1b\xe8\x01\xe9\x9a\xea\xb6\xeb\xbb\xec,\xee\x02\xf0{\xf2\xd1\xf5\x0b\xf9\x07\xfb\xdb\xfc\xc2\xff\xff\x02_\x04H\x04M\x05\x8f\x07\xd0\x08\x12\x08,\x07\xaf\x060\x05\x04\x03{\x02\x1c\x03V\x01\xeb\xfc&\xfa\x8e\xfa\xa0\xfa\x81\xf8\xa3\xf6\x84\xf6\x1b\xf6\x85\xf4\xc7\xf4"\xf7\xe1\xf7\x84\xf6R\xf6\xd7\xf9\xba\xfc\x9a\xfcF\xfd\xeb\xff\x1d\x02\xf6\x01L\x03G\x07\xb0\x08\x14\x08\xb3\x08y\n\x11\n\xcc\x08/\x0b\xc8\x0c\x15\n\x13\x07D\x07\xf1\x07\xf1\x05Q\x043\x04\x01\x028\xff\x9e\xfe?\xff\xb3\xfd\x0f\xfbQ\xfax\xfa\xa7\xf94\xf9\x80\xf9!\xf9\xd6\xf7\xab\xf7C\xf9e\xfa\x0c\xfa\xba\xf9w\xfaW\xfb\xb1\xfbT\xfc\x95\xfd<\xfe\xbf\xfd\xee\xfd3\xff\xfc\xff)\xffo\xfe\xea\xfeb\xffB\xff\xe5\xfe.\xfe\x0e\xfd6\xfc\xa5\xfc-\xfd\xae\xfc\x96\xfbG\xfa?\xf9I\xf9w\xfbN\xfc9\xfa\xe9\xf7C\xf8\x13\xfa3\xfa\x04\xfb\x08\xfc\xd4\xfb`\xfa\x14\xfc\x1b\x01\xfd\x02\xbc\x01*\x02-\x07\xd4\x0b\x9a\x0e\xeb\x12<\x17\xab\x17u\x15\xf6\x18\xc7"\xa3(\xe5%\xb6 \x82 \xf5"\xa6"f \xf3\x1d\x1f\x1a\xd6\x13\xfa\x0e\xfb\x0c.\ta\x01\x9d\xf9\x85\xf6\xd0\xf5\x89\xf23\xed\xd8\xe7\xac\xe4\xcc\xe3\xa6\xe4\t\xe6 \xe6\xd1\xe5a\xe6\xfa\xe7\x8d\xeb\xde\xef\xe7\xf2\xc2\xf3Q\xf6\xad\xfb\xa0\xff\xd9\xff\xf0\xff=\x03#\x05\xf4\x04&\x06\xa8\x08\x1b\x07\x1c\x01\xb8\xff\x9e\x02\xe4\x01\x08\xfc\xa9\xf9\xee\xfb\xb0\xfa\xb4\xf4M\xf2\x9f\xf4\x8e\xf4\xb3\xf1\xae\xf2\xac\xf6\xc6\xf6\xa2\xf3^\xf4\n\xf9\xb9\xfb\xa8\xfc\x16\xff\xe9\x02\x8f\x03\x04\x03\xa5\x040\x08\xc7\t\xb0\t\x02\x0b\xa7\x0c\xee\x0c\xf2\nS\t\xdd\x08\xba\x08\xa2\x08\x91\x08\xb1\x07!\x05u\x01o\xff2\xff\xf4\xfe\xe0\xfd0\xfd\x8b\xfc&\xfb\xe8\xf8\t\xf87\xf8a\xf8\xd5\xf8Z\xfa8\xfbn\xfa!\xf9Z\xf9\x8d\xfa\xa0\xfb\x1d\xfd(\xfeM\xfeU\xfd\x1c\xfdR\xfd{\xfd\xe6\xfdu\xfe\x8b\xfeE\xfd\xdc\xfb\xa6\xfa>\xfat\xfa\x87\xfa\x9e\xf9\xd9\xf7X\xf6\xe6\xf5\x8f\xf5\x81\xf6\x03\xf7\x1c\xf6\\\xf4>\xf5\x0f\xf9B\xfa\x99\xf8\x93\xf7\xd9\xfa8\x00\x06\x04\x82\x06\x07\x07q\x06\x0c\x08&\x0e\xda\x16\xb5\x1b\x07\x1c\xe0\x1cm \'$\x11%\x8b&i)\x89+\x85*5(y&\n#J\x1d\x96\x17\xc1\x14\xb6\x12N\x0e(\x07\xd7\xff\xc6\xf9\xb8\xf4\xea\xf0\xbb\xedP\xeb\xe3\xe8\xda\xe6`\xe5^\xe4\xfc\xe30\xe4\xf0\xe5T\xe9\xc2\xec\x11\xefS\xf05\xf2\xa1\xf4K\xf7k\xfay\xfd=\xff\x0b\xff*\xff\xad\x00\xc5\x01t\x00\x9c\xfd\xaa\xfc\x84\xfd4\xfde\xfa\x00\xf7\x1b\xf5\xcb\xf3_\xf2\xa1\xf1\x8d\xf1~\xf0\xe9\xee/\xefP\xf1\x18\xf3\x06\xf3\x10\xf4\xa5\xf6O\xf9\xcc\xfb\xe2\xfe\x91\x02\xe4\x03\x80\x04\x94\x06E\n\x99\x0c\xee\x0cc\r\xe6\rr\x0ey\x0e\x8c\x0e\xf8\x0c\xbe\nw\t\xac\t\xe4\x08B\x06\xd1\x03\x1f\x02\x05\x01\xd8\xff\xe5\xfe\xff\xfdC\xfc\xf0\xfa\xc4\xfaO\xfb\xfb\xfb\xc1\xfbd\xfb\x03\xfbF\xfbd\xfct\xfd-\xfe\x14\xfe\xbc\xfd\x12\xfd\r\xfd;\xfd-\xfd\xcd\xfb\x12\xfa\xcd\xf8d\xf8Q\xf8\xf0\xf6n\xf4\xaa\xf1\x98\xf0<\xf2\xa6\xf3\xf0\xf2T\xf0\xab\xee\\\xf0\xa9\xf3O\xf6\xb9\xf6<\xf59\xf5f\xf8g\xfdK\x00\xce\xff\xe1\xfe\x0c\x01\xdd\x05\x8f\tm\nB\n)\x0b%\x0e\x9f\x11Y\x14\x7f\x15\xa5\x15|\x16G\x18\xe5\x1a\xf7\x1cF\x1e\xba\x1e%\x1e\x1d\x1eP\x1f\xf7 \x07!\xa9\x1eH\x1c\x9e\x1b\x84\x1b\x13\x1a\xf6\x16\xa2\x13\x8c\x10|\r\xb3\n\x81\x08\xd3\x05\xa0\x01\x0c\xfd\x1c\xfa\xd9\xf8\xea\xf6\x9a\xf3\x01\xf1\xaf\xefJ\xeea\xec@\xebn\xeb\xc7\xea_\xe9*\xe9g\xea\x93\xebA\xeb\x1e\xeb\xd5\xeb\xe2\xec\xfe\xed\xe0\xee\xe7\xef\x9b\xf0\xc7\xf0P\xf1`\xf2\xbe\xf3Z\xf4\xf9\xf3,\xf4@\xf5\x8f\xf6\xf8\xf6\xc8\xf6\x80\xf7\x95\xf8\x8c\xf9)\xfa!\xfb\xb0\xfcR\xfd\xb6\xfd\xd3\xfe\x87\x00\xe7\x01b\x029\x03\x7f\x04\x80\x05\x1c\x06\x0c\x07H\x08\x80\x08"\x08|\x08\xd4\t\\\n\x85\t\xb6\x08\x9b\x08\xb4\x08\x06\x08\xc5\x07\xbe\x07A\x07\x0b\x06$\x05\x06\x05\xed\x04{\x04\xb7\x03\xf7\x02@\x02\xd8\x01\xb2\x01;\x01+\x00\x1e\xffZ\xfe\xe0\xfda\xfd\xb3\xfc\xaf\xfb=\xfa\xdf\xf8!\xf8\xa1\xf7\xd4\xf6\xa8\xf5f\xf4m\xf3\xb9\xf2E\xf2\xf7\xf1\xa9\xf1(\xf1\xa5\xf0\xa6\xf0 \xf1\x91\xf1\xd8\xf1J\xf2\x05\xf3\xe7\xf3\xee\xf4\x1b\xf6A\xf7L\xf8U\xf9\xb1\xfau\xfc4\xfeq\xffi\x00\xd2\x01\xb5\x03y\x05\xae\x06\x04\x08\xcc\t\x18\x0b\xef\x0b2\r\xc1\x0eb\x10\x8f\x11\x96\x12\xe3\x13\xcd\x14\x9b\x15b\x16\x92\x17\xa4\x187\x19\x1e\x1a1\x1b/\x1c^\x1c\x90\x1cu\x1d\xd8\x1d\x97\x1d$\x1d\x1d\x1d\xc0\x1c\xf1\x1a\xe2\x182\x17o\x15\xed\x12j\x0fC\x0c\xf9\x08\x19\x05I\x01\xf0\xfd\xca\xfa\x0e\xf7\r\xf3\xde\xef\x1f\xed\x97\xeap\xe8\xaf\xe6\xfc\xe4A\xe3.\xe2\x0e\xe2\x1f\xe2\x06\xe2>\xe2\xcc\xe2\x90\xe3~\xe4\xc7\xe5?\xe7\x97\xe8\xb2\xe9\x05\xeb\xb6\xec\x81\xee \xf0b\xf1\xb9\xf2B\xf4\xdb\xf55\xf7q\xf8\xb6\xf9\xef\xfa\xfe\xfb\x12\xfd\x85\xfe\x16\x00=\x01+\x02\x87\x03\xfa\x04\xec\x05j\x066\x07\x8d\x08\x04\n\xf5\n9\x0b*\x0b\xdb\n\x98\n\xba\n\x03\x0b\xdb\n\x17\n%\tI\x08~\x07\xc8\x065\x06d\x05:\x04e\x03\x1b\x03\xce\x02\x19\x02C\x01\xca\x00d\x00\xda\xff\x9d\xff\x9f\xffd\xff\xa6\xfe\x00\xfe\xe8\xfd\xce\xfd;\xfdO\xfcs\xfb\xbd\xfa\xeb\xf9/\xf9`\xf8E\xf7\xf0\xf5\xa8\xf4\xa6\xf3\x03\xf3\x7f\xf2\xf2\xf1`\xf1\x06\xf1%\xf1\x8a\xf1\x05\xf2Q\xf2\xf0\xf2\xe0\xf3\t\xf5U\xf6\xa7\xf7\xf7\xf82\xfaS\xfb\xa2\xfc.\xfe\xc5\xff)\x01j\x02\xd4\x03(\x05\x84\x06\xb2\x07\xf6\x08\x87\n+\x0c\xbe\r\xe1\x0e\'\x10\x18\x11\xb8\x11\xc9\x12\n\x14]\x15\x16\x16\x0b\x16\x14\x16U\x16\xa8\x16j\x17`\x18\xdf\x18\xe4\x18\x11\x19\xbb\x19!\x1a~\x1a:\x1b\xce\x1b\xed\x1a\x1d\x19\x89\x18l\x18>\x17\x8b\x14\xf0\x11\xe3\x0f\xad\x0c~\x08\x1c\x05\x90\x02d\xff\x99\xfa\x8d\xf6?\xf4\xd7\xf1x\xee\'\xebL\xe9\x1d\xe8\xfd\xe5\x06\xe4s\xe3\xa5\xe3=\xe3=\xe2b\xe2\xbf\xe3o\xe4F\xe4\xd0\xe4\xca\xe6\x0c\xe9\xec\xe9\xba\xea\xcf\xec&\xef\xab\xf0\xb2\xf1\xaa\xf3U\xf6\x11\xf8\xae\xf8\xfa\xf9d\xfcs\xfe5\xff\xbc\xffK\x01\xe0\x02-\x03H\x03G\x04\xc8\x05.\x06\xa9\x05\r\x06\xf9\x06\x1a\x07\xff\x05\x93\x05\x83\x06\xd5\x06\t\x06F\x05\x97\x05\xb3\x05q\x04J\x03B\x03\xa5\x03\xff\x02\xe4\x01\xd3\x01$\x02\x92\x01P\x00\xb1\xff\xe6\xff\xa1\xff\xa2\xfe=\xfe\xa0\xfe\x84\xfe\x8c\xfd\xca\xfc\xbf\xfc\x9d\xfc\xb9\xfb\x10\xfb5\xfb?\xfb\x82\xfa\x9a\xf9D\xf9\xfd\xf8;\xf8=\xf7\xc0\xf6\x98\xf6\xfd\xf5N\xf5\'\xf5!\xf5\xd6\xf4\x80\xf4\xab\xf4R\xf5\xe4\xf5|\xf6\x96\xf7\xc2\xf8\xb4\xf9\xb5\xfa>\xfc\xd4\xfd\xfd\xfe*\x00\x96\x01\x17\x03\x1b\x04\xe7\x04b\x06\xb8\x07Z\x08\x06\t\xf2\tL\x0b\xea\x0b\x18\x0c9\ro\x0e\x0b\x0f;\x0f\x8d\x0f\xba\x10\x1d\x11t\x10\xf7\x10`\x11-\x11i\x10\xd3\x0fo\x10"\x10h\x0e\xce\r@\x0e8\x0ea\r1\r\xbb\x0e-\x0f\xf8\r\x86\r$\x0f\xa6\x10\xbb\x0f\xbe\x0eK\x0f\xeb\x0f\xbc\x0e2\x0c\x00\x0ca\x0c\x1b\nC\x06\x98\x03C\x03\x96\x01\xfd\xfc\xb3\xf9\xc1\xf8D\xf7"\xf3b\xef#\xef\xe1\xee\xac\xebx\xe8\xf7\xe8q\xea\x8b\xe8\xe4\xe5\x07\xe7\xc6\xe9\xab\xe9a\xe8\xe7\xe9?\xed\x18\xee\x13\xed/\xef\xf3\xf2j\xf46\xf4\xa1\xf5\xcb\xf8\x1c\xfa\xd7\xf9\x82\xfb\x16\xfe\xb1\xfe\xc3\xfd\x83\xfe\xc5\x00\x98\x01\xf2\x00\x0c\x01Q\x02\xa2\x02<\x02i\x02V\x03\x9e\x03\xee\x02\xdf\x02\x90\x03\xdc\x03_\x03\xc3\x02\xe2\x02\x14\x03\xd1\x02\x07\x02\xba\x01\x9f\x01?\x01x\x00\xd1\xff\xf7\xff\xcf\xff\x1e\xff<\xfe\x07\xfe\xe8\xfdd\xfd\xc3\xfc\xd9\xfc\x07\xfdw\xfc\xb5\xfb\x84\xfb\'\xfcA\xfc\x8d\xfb\x8f\xfb\xea\xfb3\xfcd\xfc\x8a\xfcL\xfd\xa4\xfd#\xfd\x03\xfd\xa4\xfd\x9d\xfe\xbc\xfe\xe9\xfd\xf2\xfb\xdd\xfa\xb3\xfc\x7f\x00\xd4\x03\x9a\x00\x93\xf9\x16\xf6v\xf8O\xfe\xff\x01\t\x01\xc4\xfe\xac\xfd*\xfd\x11\xfd6\xfd{\xfe\x10\x01\xb4\x02 \x03O\x05\x7f\x07{\x08q\x07!\x07<\t3\n\xe5\n#\x0c!\x0f\x03\x12\x16\x11\xf7\x0f\xbf\x0e\x82\x0c\x81\x0b&\x0b\xd5\x0c\xed\r\xfd\x0b\xa8\n\x14\n\x08\t\x13\x06i\x029\x00\xc3\xff\xaa\x00=\x02\xb4\x03\x94\x04\xc0\x02\xf8\xffP\xfeV\x00(\x05\xca\x08>\n\t\x0b\x05\x0eE\x11\xdb\x11#\x0f\xa3\x0cl\r\xf0\x11T\x16B\x18\x9d\x14\x1e\r\xc2\x07\xde\x06\x1e\t|\x07\x8c\x01\x8b\xfc[\xfbn\xfc\x1d\xfaD\xf3\xe9\xea\x0c\xe5\'\xe5\xdb\xe8~\xec\x02\xebP\xe5\xd9\xe1\x95\xe3\xf7\xe7e\xe8X\xe5l\xe6\xf5\xed$\xf6|\xf9\x07\xf7\xd1\xf4\x9f\xf4\x15\xf7O\xfbI\xff0\x02\x03\x03r\x04\xe2\x04G\x03s\xff\xb0\xfc:\xfeb\x01\xa4\x03_\x03V\x01\x8d\xfep\xfa\x93\xf7\x8a\xf6\xea\xf7Y\xf9\xcc\xf9\x18\xfal\xf9\xbb\xf8x\xf6\xf8\xf4\xed\xf3*\xf5z\xf8;\xfc\xe2\xfec\xfe!\xfc\x07\xfa\x98\xf9\n\xfbm\xfc\x12\xfe\xb7\xffL\x01\xeb\x01C\x00\xd7\xfdr\xfb\x84\xfb\xfc\xfc\x19\x00q\x02\x00\x03z\x02\xef\x00\xe1\xff\xa4\xfe\x80\xfe9\x00\xa3\x02\xdb\x044\x05u\x05\xc2\x04^\x02\xb7\xff\xd5\xfe\x98\x01\xe1\x04A\x06\xea\x04\xd2\x02\x04\x01\\\x00\xc6\x02\x07\x03\x86\x01\xb3\xfe\x1a\xff\xbb\x04\xcd\x061\x03\x80\xfe\xbf\xfc\xac\x00\x14\x03p\x02\xcb\x011\x01\x1d\x04\xc5\x06c\x08\x07\x07\xec\x03>\x04\\\x08\xb4\x0cB\x0e\xc2\nR\x083\t\xeb\n\xe2\r\xbe\x0b\xf6\x08m\x07v\x07\\\t\x14\t\xe9\x06b\x04W\x02\x9a\x02\xe4\x03\x89\x04\xd9\x04\r\x03\xa9\x00\xb0\xfe\x1d\xfd\x8d\xfc\x1f\xfd$\x02!\n\x1f\x0f\x16\x0f\xe6\x08\x05\x05\xc5\x05\x01\r\xa5\x15 \x19\xe9\x17\xcf\x13\xc4\x14s\x14\xda\x10\xe4\x08\x99\x02\xfb\x04V\n\xd1\r\x9f\n\xc3\x00\x02\xf7\x0c\xf0\x8b\xef\xd8\xf1\xd1\xf2\xbd\xf3}\xf2>\xf1d\xec\x18\xe71\xe5%\xe6\x1a\xebB\xef\x83\xf2\xf4\xf3\xc9\xf2\xb8\xf2\xeb\xf1\xfb\xf0\x8c\xf1\xff\xf4\xb6\xfc\xe3\x02\xec\x04\xc7\x00$\xfa\xf9\xf6P\xf8\xda\xfd\xa4\x01\xda\x02\xbd\x01(\xfe\xad\xfb%\xf9\x07\xf9u\xf8\xdf\xf6\xff\xf6\x00\xf9O\xfcD\xfcl\xf8\x01\xf3m\xf1\n\xf4\xdf\xf9(\xfe_\xfd*\xfb\x84\xf8p\xfa\x02\xfd\xc1\xfdb\xfd\x14\xfd"\x00$\x03b\x03$\x00\xda\xfb\x1d\xf9\x1c\xfa\xb6\xfd\xb4\x005\x01,\xfe\x11\xfb\xad\xf9k\xfa}\xfbu\xfb+\xfb\xa8\xfd\x91\x01\x9e\x04\x90\x03F\xfeh\xfbj\xfcK\x00\xa2\x03\x98\x02\\\x01\x0c\x02\x16\x04\xc2\x05?\x02\xe2\xfc\xbf\xfee\x03=\to\n\xc5\x07/\x08Y\x06k\x02\xe7\x01\xe6\x01o\x07\x08\n\x8c\x07\xbc\x06\x1d\x03\xc3\x01\x00\x01\xbf\x00\xe5\x033\x07\xa4\nP\x0et\x0b4\x07\xc2\x019\x02g\x08\xb4\x0c\x97\x11\x90\x0f\xbc\x0c\xc0\x08S\x06\xf5\x06\x1e\x05H\x01}\x02\xad\x06=\x0c\xfb\x0cl\x07O\x05"\x01\x10\xff\x9d\xff!\x01\xf0\x05\xb7\x08\x84\t\xd9\t\xe0\x04%\xff\x11\xfb$\xfb\xa3\x01\x15\x08\xdb\x0c\xa8\n\x00\x03~\xfa\x9a\xf7\xb1\xfb\xf5\x00-\x03\x9c\x01\x88\x00\x8d\xffS\xfd\xce\xfa\xa6\xf80\xfb\x90\xfd\x93\x01\xfe\x01\x95\xff\x01\xfd*\xf7\t\xf6>\xf4\x8c\xf7\xa3\xfa\xdc\xf9m\xf8\xc4\xf3\x92\xf3\xcd\xf4\x91\xf7\xb8\xf9y\xf9\xcc\xfa)\xfb\xb0\xfdF\xfe\xeb\xfe(\x00q\xffC\x00X\xfc\x86\xf9\xaa\xf8\x06\xf9\x88\xfbo\xfb2\xf90\xf8^\xf6>\xf6\x89\xf5q\xf4c\xf8\xec\xfc\xca\x01\x84\x00\'\xfbC\xf8k\xf9\xe6\xfdr\x00-\x00%\x00\xdc\x00\xd9\x02\x06\x03!\x00v\xfd\xb6\xfc\xbc\x01\xa6\x08\xf4\t\xea\x04i\xfe8\xfde\x00\x04\x02\xd1\x02G\x01\xbb\x00\xe3\xff\xf4\xfe\x9a\xfc\xec\xf9\xe6\xf8y\xf9u\xfbl\xfa\xc8\xfa@\xfb,\xfc\x90\xfcf\xf9\xe7\xf9\xf4\xfc\xd7\x01\x04\x06L\x05\x0f\x05\xb9\x04x\x05\x84\x07\x93\x08z\x08\xd9\x03\xf2\x01\xf3\x02O\x07\xef\x08m\x06w\x04u\x01>\x02\xe2\x02>\x03\x1e\x03o\xff\xe9\xfd\xd8\xfef\x02#\x07-\x077\x03\x9d\xfd5\xf9\xe0\xf9\x9e\xff\xf0\x05\xc9\t\x9c\x08\xb8\x04\t\x03:\x02\xe8\x03\xe6\x024\x01E\x02\x89\x06\x81\x0b\xb7\t\xed\x03:\xfcr\xf8\xff\xfa\xf7\x00\xed\x06\xc8\x06\x1c\x04\x1a\x00]\xfc\x85\xf9\x9f\xf8\xd9\xfb\x88\x00\x8f\x02\x06\x01\xa4\xfd\xb0\xfbt\xfa\xe9\xf9\xca\xfa,\xfc\xca\xfe\xd3\x01c\x05\x1a\x067\x04j\x01\xad\x00\xd9\x01\xbb\x01\xe9\x01V\x01\x16\x02;\x01\xcb\xfe3\xfd\x8d\xfcR\xfcE\xfc\xe3\xfc\'\xff\xba\x01\xdb\x03}\x05\x1c\x04\xc9\x00\x91\xfc\xeb\xfb\x06\xff\xd2\x03\x19\x07`\x06s\x02\x93\xfd\x90\xfav\xfa\x12\xfd\xf2\xffW\x03\xf7\x06\xe5\n6\x0cI\t\xee\x01=\xfbV\xf9F\xff\xec\x06}\x08\xfc\x02}\xf9\xd1\xf4\x90\xf4\x90\xf7)\xfb\x05\xfc\x80\xfd \xff\xb0\x001\x01\x1c\xfc\xd6\xf6\xd4\xf2\xe1\xf4\xaa\xfb\xba\x00\x82\x03\xa8\x00\xb8\xfb\xcf\xf5\x81\xf3$\xf7K\xfc\xd5\xfe\xb4\xfe\x80\xfe\xa2\xffQ\x01\x17\x00G\xfe\x8b\xfbU\xfb\x04\xff\xc5\x01\xe6\x02\xae\xff\x07\xfd\xff\xfd\x9e\x01\xdc\x04y\x03\xb5\xff\xb0\xfd\xcb\xfdy\xff\x8f\x01\xd7\x02\xad\x01\xa7\xff\x91\xff\x15\x005\x00"\xff\xdc\xff\x80\x00,\x02\x81\x03\x9b\x04\x08\x06\x0f\x04W\x02\x06\x00\xa7\x00j\x03\x90\x04\x99\x04K\x02k\x00\x9b\x00\x7f\x00@\x01\xbc\x00\x17\x00\xf1\x01\xe7\x02b\x03\xc0\x00\xc5\xfd\xc6\xfb?\xfb\xe7\xff\xb9\x04\xc0\x04O\xfe\x1d\xf7i\xf6D\xfcp\x01\x17\x02\xc9\xfe\xcf\xfc\xa0\xfe\xc5\xffZ\xfe"\xfa7\xf7 \xf8\xe5\xfc/\x02\xf4\x04\xad\x06\xbc\x05T\x02\x01\xfd\xb0\xf8D\xfb%\x03i\x0b\xf1\x0c\xc8\x06\x8a\xfe\xce\xf7\xf7\xf4\xfa\xf5v\xfa\xeb\xfd\xa8\xff\x1e\x03*\x05r\x04\x01\x01l\xfdY\xfe\\\x00\xc5\x04\x00\nR\x0c#\x0b\x82\x05\xab\xfe\xf4\xf9J\xfa\xb7\xfdw\x01u\x03Z\x03\x8b\x00_\xfdn\xfdF\x00\xd2\x04p\x08\x83\x06\x10\x02\xdd\xff\x18\x00\xaa\xffa\xfd\x93\xfd\x86\x00\x84\x05\xf3\x07`\x07\x11\x033\xfe\xfa\xfb\x98\xf8\xae\xfa\xfb\xff\x0e\x06q\t\xfe\x04\x05\xfe\xe9\xf6?\xf6\x12\xfc\xbb\x00\xa1\x01j\xff\x92\xff\x1c\x02\xae\x03\x99\x01\x97\xfeM\xfc\xbe\xfb\x11\xfc\xc1\xfe0\x03\x1c\x03\x08\xff\xdc\xf9\xb1\xf8z\xfcq\x00\x7f\x02\xf5\x01\xf0\xff\xf7\xfe\r\xfez\xffr\x01\x8c\x01\xba\x00\xb1\xfe\xca\xff\x81\x00\x1b\xff\x9c\xfbP\xf7\x92\xf8\xa1\xfc\x85\x01\xf2\x04\xd8\x04\xe2\x02\xbe\xfek\xfe6\xff\xcc\xff?\x01L\x04\x88\x07\xed\x04e\x01\x01\xff\x9d\xfeQ\xffj\x01\xe8\x03\x12\x06\xba\x06\x87\x04o\x00\xfc\xfc\xb5\xfc\x1c\xff\xb3\x03\xa5\x057\x01\x87\xf9\x1e\xf7\x99\xfa\x15\xff_\x01\\\x01y\x003\xfdv\xfbQ\xfc\x81\xfe\x87\xff\xfd\xfe`\x01\xdb\x04\xa4\x03k\xff\x1a\xfd\xe9\xfd\t\xfe\xfd\xfc\xb6\xfd\x04\x02\xf4\x04\xf4\x01\x01\xfb\xea\xf3\x1e\xf3\xa3\xf6\x90\xfdP\x04\xe0\x03\xbb\x00\n\xfeb\xfd\xb3\xfe\xdc\xfd\xef\xfd\x93\xfe\xb1\x01 \x06K\x064\x02o\xfa\xfa\xf5\x11\xf5h\xf8^\xfe\xee\x04\x12\t\xb5\x07\xee\x05\xff\x04\x97\x02\xca\xfdB\xfd\x1c\x016\x06U\nq\t\xef\x03\x82\xf9\xdb\xf0\xa8\xf1B\xfc\x89\x08\x9b\x0c`\x08\xed\x00\xf9\xf8\x06\xf6D\xfa\xce\x01\xf0\x06{\t\x86\x0c2\n\xaf\x01\xfa\xf8\x93\xf5{\xf7\x04\xfd\xeb\x04:\x0b\xb5\x0c\x93\x07\xed\xfd\xf3\xf3\xf1\xf1\xc8\xf9\xb8\x02J\ne\x0c\x9f\x07N\xff\x1b\xf7&\xf4\xf7\xf4\xd8\xf9\x8c\x01\x9f\x08o\ro\x0b\x01\x04)\xfa\xb2\xf1l\xf1M\xf7\xc8\x00 \tC\n1\x07*\x02v\xfc.\xf9\xb2\xf6\xa9\xf7B\xfdh\x03\xc1\x08\x82\x06\x12\xff\xf5\xf8\xa6\xf6V\xf9\x0c\xfey\x04k\x086\x05h\xff\x99\xf99\xf8\x99\xfd\xe1\x04\xb4\t\xf0\x08\xc1\x04\x00\x00 \xfc\x7f\xfb\x82\xfc:\xfd\xb3\xfdW\x00\x04\x05j\x08O\x08u\x04\x88\xff\xab\xfa\xe9\xf9\xd5\xfd_\x04\x82\t\xec\x08\r\x07\xdf\x03\xcb\xff\x0b\xfc\xa1\xf7\xcb\xf7\x17\xfb\xc2\xfd\xd3\x01\x16\x06:\x08%\x06n\x00\x95\xfa\xd1\xf4\xee\xf4\xfb\xfa\x17\xff\xac\x01"\x03L\x04\x80\x03n\xfe\xaf\xfa\xa9\xf8\xfe\xf86\xfc\x92\xfe\x16\x03:\x05\x9c\x04\x9b\x02}\xfd\xda\xf9\xfa\xf5\xc0\xf5"\xfd\xfe\x03U\x07>\x03\x9d\xfc\x08\xfb9\xfc4\xfe\x9d\xfe\xb9\xfei\x00\x95\x02\x16\x05\x10\x07[\x03`\xfd\xc4\xfaS\xfc\xa0\xfe\x90\x01\x9b\x05F\x08\xbb\x06\x82\x02j\xffS\xfc\x86\xfa\x0c\xfc\x9f\xff\xa1\x03\xc5\x06\xce\x06\xcf\x04\xba\x00 \xfb*\xf6(\xf5\x81\xfa\xa4\x02\x83\x08\x97\n1\x07>\x01c\xfdn\xfd\xa0\xfd{\xfdA\x01V\x05:\x08\xeb\x07\xd1\x05$\x01@\xfc\x12\xf9M\xf8\x05\xfd\x9a\x04\xa6\x08\xa0\x06\xde\x01\x00\xff1\xfd\x03\xfbq\xf9\x96\xfa\x97\xffe\x03\xad\x05\xec\x04U\x00\xfd\xfa\x8f\xf6\xde\xf6\x96\xfaD\x01\xc7\t\xe4\x0ct\x08v\x013\xfc\x84\xf8I\xf6\x08\xf8\x8b\xfd\xe2\x05\x96\x0c<\r\xf3\x07\xaa\xfe\xb8\xf3E\xec\xf2\xed\xc3\xf5\x88\x01\xaa\x0c\xa7\x10b\x0e\xa8\x06\x8b\xfd\xda\xf6X\xf1\xfd\xf2\x93\xfa\xb7\x041\x0e\x96\x11\xe0\x0c\xf0\xff\xd2\xf3s\xed\x98\xf2\x04\xff\xf5\x06U\tz\x07\xa4\x07\xec\x07\x0c\x05\x16\xfe\x98\xf6\xeb\xf4T\xf9\xcf\x00\xb5\x05\xfa\x06\x9f\x03\x03\xfc\x00\xf7\xe8\xfa\xd1\x01\xfd\x04{\x04j\x02o\xff\t\xfe|\x01\xb8\x04=\x02\x11\xfd\x87\xfb\xc6\xfa\xd2\xfb\x1a\x00F\x03\xa6\xff\xa2\xfa9\xfaA\xfd\x05\x02\x9c\x04i\x04\xfd\x00T\xfe\xe8\xfd\xe9\xfcR\xfdc\xfe\xc4\x00\t\x02q\xfem\xfa<\xf7\x07\xf6\xc6\xf9\x84\xff\x8e\x05\xc7\x07\xc8\x06h\x02\x8c\xf9\x13\xf5\x18\xf6\x08\xfb\x82\x02\xb2\t\xc4\x0f\x1c\x0f\xaa\x07L\xff\xf8\xf6\x1f\xf3\xcc\xf74\xff\xae\x08\xee\x0cj\x0b\x11\x05\x82\xf9\xb9\xf0I\xee>\xf8c\x07=\x11\xcb\x13N\x0f\xcb\x07\xf8\xff\xa9\xf8>\xf4\xe1\xf5/\xfd\xb8\x04G\x08\xe8\x07\xc3\x02\xf3\xf7\xfe\xec\x92\xecj\xf6\xae\x03b\x0e\x03\x14\x9e\x11\xa8\x07T\xfb\xf3\xf1\xdd\xf2\xe7\xfbc\x04\'\n\xd6\n<\x08\x01\x02\xe2\xf6+\xf1\xf0\xf2\xf9\xf6\x8d\xfd-\x07\xbe\x0f\xde\x0f;\x08\x8b\xfc\xed\xf0x\xeeR\xf3|\xf8\xa3\xfe:\x05\x99\x0b\xd6\x0b\xee\x03t\xfd\xf6\xf8\xd7\xf5e\xf6\xd7\xf9\x1a\x03Y\x0b\x8a\r\xaf\tv\xffl\xf8\x84\xf4@\xf3A\xfb\xdc\x03Q\t\xd6\x07V\x03\x00\x02\x97\x01;\x00\x96\xfc\xad\xfa6\xfc \xff\xb4\x019\x04\xec\x02C\xff\xaf\xfd\x00\xfe\x89\xfe]\xff\xaa\x00f\x01\xa2\xff\xa1\x00\xca\x04\x84\x06\x81\x03\xa2\xfe^\xfe*\xff`\x00\xfe\x00x\xff\x16\xfeF\xfb\xea\xf8\x9c\xf7\x1d\xf8\xd2\xfe\xfb\x05\x94\n\xc6\t9\x05V\x01\x8b\xfdE\xfa\xc1\xf9\xe6\xfd\xe9\x01\xf1\x04\x1f\x07\xb3\x07\xda\x04\xd5\xff\x0f\xfb\xfa\xf6\xce\xf8\xd7\xfe\xc3\x03\xe5\x06\xb2\x08\xc2\x08\xd8\x03g\xfc\x98\xf8\x02\xf8\x87\xfaG\xff`\x04\x7f\x07n\x05\xa8\x00\xf1\xfc\xb7\xfaT\xfa\x07\xfb\x91\xfd\xfe\x01\x8b\x06\xfd\x07|\x05\xef\xff\x07\xfb\xad\xf9$\xfb\xd6\xff\x80\x05\xcd\ta\n.\x07\x1b\x01N\xfb*\xf8\xca\xf7%\xfb\xf2\xff\xbb\x03^\x05!\x03\xa5\xfe\xd0\xfb\xfb\xf8h\xf8i\xf9\xcb\xfc\xf3\x01\xc5\x05\x8e\x08^\x08\xa1\x05\x02\x00u\xfb\x06\xfb\xff\xfbU\xfd;\xfe-\x01\xad\x04!\x06\xcc\x02\xd4\xfb\x97\xf9\xfd\xfd\xf1\x00\xb4\x00\x99\x012\x03n\x00A\xfdM\x00\x9a\x02\xa5\x01\x81\x00T\xff\xa1\xfb\x19\xf8\xba\xf8Y\xfb\x03\xfe\r\x01g\x04\xf6\x02\x7f\x00h\xff\xa2\xffF\xfe\xfe\xfcH\xfe\xdb\xfeD\xff\xff\xff\xef\x01b\x02\x80\x00\xbd\xfe\t\xff\x95\x00\x95\x00\t\x01\x02\x03\x1e\x027\xff\xd0\xfez\xff\xaf\xfe\xa4\xfc\xff\xfb@\xfd\xe0\xff\xae\x02t\x02\x17\xffQ\xfb*\xf9\xaf\xfa\xd8\xfd|\x01d\x03\xc5\x04\xc3\x04,\x01\x0b\xfd\x80\xfb\xa8\xfbb\xfd\xf7\xffg\x03D\x05\x80\x03\x1f\x01\xbc\xfe\xdd\xff\x90\x03<\x04\x87\x01\x81\xfe>\xfe\x99\x01\x80\x03&\x02\xe9\x00+\xff6\xfc<\xfa#\xfb\x83\xfb;\xfbM\xffH\x06\\\x06\xf7\x00\xb0\xfd\xd8\xfd\x84\xffJ\xff\x86\xfd\xb1\xfe\xb5\x04-\nH\ta\x02\x00\xfb!\xf8\x9d\xf8M\xfd\xaa\x04Q\x08\x8e\x07\x17\x04"\x01\x9c\xfdj\xfa\xdb\xfb+\xff\xab\x01e\x03\x93\x02\xa9\x01\r\x02\xe3\xff\xe7\xfd\xfe\xfcD\xfc\xbd\xff\x94\x04\xa9\x07\x82\x07\x01\x03\x9d\xfet\xfbY\xfa\xb0\xfe\xf9\x01\x92\x01\xf5\x00f\x00\x05\x02\t\x00\xbe\xfd\xaa\xfc\xfc\xf9\x17\xfb\xbd\xffP\x04x\x08\xa8\x07/\x02\xcd\xfb\xb6\xf8\xc7\xfc\xf5\x02\x11\x07,\x07g\x04\x81\x03\x0f\x04\xb8\x02_\xfe\xf0\xf8\xbe\xf7\xc9\xf9\xd9\xfe\x9a\x04}\x05C\x03\xa4\xfd\x9a\xf9\'\xf7\xe7\xf5\xb5\xf8\xa9\xfb\xaf\xffW\x03%\x04\xc8\x03\xc0\x02u\x028\x01\x03\x00\xb0\x01\xe1\x02\x9a\x02\xc8\x03+\x04%\x02\xfb\xffp\xff\xf9\xff\x95\xff\xba\xff\xc6\x00\x90\xff\xbb\xfe\x94\xfd\xd5\xfc\x19\xfe:\xff\x91\xfe\xf3\xfb\xb1\xfc#\xff\x87\xff%\xff\x84\xfe\x1d\xfd/\xfd\xe4\xfe\x87\x00\n\x01\xe7\x00\x1b\x019\xff6\xfc\x8a\xfb\xd4\xfa\x82\xf9\x87\xfb\xb0\xfc%\xfe\\\xff\x9d\xff\x1d\xff\x18\xfc!\xfb\xe8\xfa\x11\xfc\xfc\xfei\x02\xdf\x04I\x04\xd8\x02\xee\x00Q\xffu\xfe^\xfc\xd0\xf9\xfb\xf9\xea\xfeW\x04\xbe\x05\xb1\x04\x97\x00\xc8\xfa\n\xf9\x15\xfd\xea\x02\xf9\x05\xe0\x06S\x06-\x06\x93\x05\xdf\x01W\xfe\x89\xf9\xd9\xf6\x13\xf9\xfa\xfb\xbf\x01?\t\x01\x0c\x03\x06\xe3\xfc\x99\xf94\xf9.\xfa\xb5\x00P\t\\\x0e\x95\x0f\x82\re\x07\xe6\xfd\xff\xf5\xcb\xf3\x18\xf7\x86\x00\x13\nu\r\xa0\x0ce\x06>\xff\xd4\xfa\xe2\xf8C\xfb\xfb\xfc.\x00\xe5\x04B\x08\xd9\t\xc8\x04\x12\xfc1\xf8\x06\xf7\x85\xf7E\xfc\xaa\x020\x075\x04"\x01\x9d\x01\xce\xfer\xfb\x02\xf8g\xf5l\xf8\x1b\xfb\xab\xfc\xc6\xfcH\x02\xcd\r\x13\x12K\x11D\x0f\xa2\x10\x06\x13\x19\x13=\x15B\x17G\x18\xe8\x16y\x13a\x10P\x0b\xb8\x04\xf4\xfd\x1e\xf9\xe0\xf7\xfb\xf8\xcb\xfa,\xfa\x13\xf8\xbf\xf5\x12\xf5\xb8\xf5\xde\xf5\xb3\xf4$\xf3\x97\xf4|\xf5\x1a\xf4\x10\xf6e\xf8^\xf9\xea\xf7\xa6\xf6p\xf7W\xf8\x98\xfaH\xfcl\xfd\x08\xfe~\xff\xed\xffL\xfe\xf0\xfd_\xfc\\\xf8\r\xf5k\xf4o\xf4\x18\xf4\x1c\xf4C\xf4R\xf3\'\xf2\x05\xf2\xae\xf1\xde\xf1z\xf3M\xf5\x05\xf6\x11\xf7\x8b\xf8K\xf8\xeb\xf6\xd8\xf5\xa5\xf5\xd3\xf59\xf7$\xf9\xee\xf9n\xfa\x10\xfb\x97\xfbj\xfbf\xfb\x0e\xfc\xec\xfc\x9b\xfd@\xfda\xfd\xc0\xfd?\xff\x05\x01\xf8\x01\x9b\x01\x19\x02p\x05\x82\x08\x82\x08@\x07\xcb\x06\xab\x06\xb0\x08\xf7\x0b\x07\r \x0b\x97\nz\x0b\xaa\tr\x03\xbc\x00\xce\x04\x1d\x08\xfa\x08\xdd\nI\x0e\xee\x10\xa9\x0f\x1a\x0b$\x08\x9c\t\x86\x0bz\n\x91\tc\x0bV\r&\x0c\x89\te\x05&\x02\xee\x00?\x00\xac\x00\xc9\xfd.\xf9\xd5\xf9(\x06\xb4\x1d\x9e3\x97:\x994\xe4,i(\xc3#e f!\xc4$A%\xd6!G\x1f\x7f\x1aE\x11r\x01\xe3\xef;\xe3\xc9\xdb\x1b\xdb\x1e\xde@\xe2h\xe4N\xe3\x12\xe2\x96\xe1\xe0\xdf\x10\xdc\x83\xd8~\xd8\x1c\xdd\x10\xe5r\xf0\x00\xfd\x16\x03^\x00\x96\xfb\xd3\xf8\xf4\xf8\xbe\xf88\xfa\xed\xfd\xf2\xff\x11\x02W\x03f\x03R\x01\x14\xfbJ\xf4\x87\xee\xd8\xeb\xbf\xeb\x0c\xebg\xecR\xf0\x0e\xf4Y\xf4\xf0\xf3\xf0\xf4u\xf4e\xf3\xa0\xf4\xad\xfae\x01\xa5\x07\xeb\x0e\xec\x12\xc3\x12\xfe\x12\xf7\x13\xae\x14\xa9\x11c\x0e\xa4\r\xdb\ng\x08\xf6\x05&\x02r\xfd\xf8\xf7\xeb\xf41\xf4\xc3\xf3\x12\xf4\x16\xf3A\xf1\x04\xf1\xf1\xf1\xa7\xf3\xfc\xf4\x82\xf5Q\xf6Y\xf6k\xf6\x14\xf8C\xf9\xd5\xf9R\xf9h\xf9`\xfb\x11\xfd.\xfe\x06\xfed\xfc\xe0\xfa\xce\xf9\xe1\xf8\xef\xf8\xe8\xf8c\xf8\xa7\xf5\x1d\xf1\xf3\xedn\xed\x97\xedO\xed*\xeeA\xf1,\xf5\x07\xf9T\xfc5\xffb\x00\xe9\xff\x82\x01&\x05\x7f\t\xee\x0e\xd8\x11\xfd\x13\x07\x14\xf0\x11\xa6\x0c>\tE\x14\xca.DJ9WJW\xb6S@P\xbbI\xb5A?=\xad;X8\x982\x19.<+\xe7")\x12\xe1\xfaM\xe5\xd0\xd6&\xce\xda\xcbU\xcdC\xd2)\xd8"\xdb\xc5\xdb\xb3\xdb\x8d\xdb+\xda\xda\xd7\x12\xda1\xe4R\xf3\xe5\x01\xa2\x0b6\x10]\x0fz\x0c\xc0\x07\x9b\x03r\x00\x88\xfeL\xfd\xa1\xfb\x03\xfc:\xfcE\xf8\xed\xf0\xd3\xe7\x90\xdfA\xd8\xa4\xd3\xee\xd3X\xd5\xe3\xd7\x8a\xdb>\xe0d\xe4\x15\xe7\x08\xea\xef\xec\xc2\xef\xa6\xf3\xb4\xf9%\x01b\t\\\x11\x05\x17\xf0\x19B\x1c\xaf\x1d\xcd\x1d\xf1\x1b\xb2\x19\xdb\x17g\x16\x96\x17x\x1a\xde\x1a\x0f\x18^\x12\x13\x0b\x97\x02(\xfbQ\xf7Z\xf5\x9c\xf3i\xf3X\xf59\xf8\xeb\xfa\x91\xfc\xdc\xfd\x05\xfe\xc9\xfd\xff\xfe\x00\x02\xd6\x05\x07\t)\x0b\xd2\x0bi\n=\x06h\x00\xf2\xf8\xdc\xf0\x10\xea(\xe5\x83\xe2R\xe1\xf7\xe0\xd8\xe0\xaa\xdfH\xdda\xda\x02\xd8V\xd7\x1a\xd9\x0c\xdd\xfd\xe1-\xe7p\xeb\x8b\xee=\xf1\x85\xf34\xf6\xe9\xf9\x98\xfd\xb7\xffD\x02\x9a\x06\xda\x0c\xb2\x13\xac\x18\xed\x1cF\x1f\xe0\x1fO >#\xa2)\xd30A8\xc5A\xdbK&R\xdaPLJeD\x8b=\xc04H*\xce"\xbc\x1f\xac\x1b\xa3\x14\x12\x0b\xd0\x02\xbb\xfb\xb3\xf2\xdd\xe8\xaa\xe0(\xdc\xe6\xdbB\xdd!\xe0\xe2\xe45\xebn\xf0~\xf2\x91\xf3\x88\xf6g\xfa\x0f\xfc\xf2\xfc2\xff\xd0\x02\xb1\x05\xa9\x07\xc4\x08\xe8\x07\x97\x04\xfc\xff\xeb\xf9\xe7\xf3W\xef\x91\xeb\xd4\xe6I\xe1=\xde\xdc\xdca\xdbj\xd9X\xd8\x83\xd7\x0b\xd6\x83\xd6\t\xdaU\xdf\x87\xe5\xb5\xeb\x07\xf1\xfd\xf5\xf4\xfb\x0b\x03\x05\x083\n\xf2\x0b^\x0e#\x10\xcc\x11\xc7\x13\xd3\x14\x9f\x13\x19\x11\xf7\x0e\x0f\r\xb2\x0b\x00\x0b\xe4\x08-\x068\x04\xd3\x04\xf1\x05\xf3\x05\xb5\x05p\x05]\x05\x92\x05\xe1\x06\xc6\x08x\n\xed\n\x8e\np\n9\x0b\xf9\x0c \x0e\xa4\r\xd6\x0b\xd7\t\xff\x07\xdd\x05-\x03\xc8\xff~\xfb\xaf\xf6G\xf2$\xef\x9e\xec\xce\xe9;\xe6\xae\xe2\x05\xe0\xf9\xde0\xdf\x80\xdfG\xe0\xc7\xe1\xed\xe3Y\xe6T\xe9>\xed\x12\xf1\xb5\xf2+\xf37\xf4\xbf\xf6\xb4\xf8\r\xf8w\xf6\x16\xf6\x9b\xf6\xf8\xf5Y\xf4\xb3\xf3\xdf\xf3\xab\xf3;\xf26\xf1\xb1\xf2\x16\xf6\xce\xf9\xd4\xfd\xe8\x02\xc6\n\x9a\x13\x07\x1d9*\xe7=\xaeR;]\xf5\\\x1a[(_\xd8aJ[\xc6O\xdbH\xc1E\xc5=@0\xed#\x18\x1bT\x0f\n\xfcN\xe7\xeb\xda6\xd6\x9a\xd1!\xca\xd1\xc6\xcb\xca\xf3\xd04\xd4#\xd79\xddZ\xe3\xb0\xe6A\xe9\xfd\xee\xf7\xf7\xa9\x01\xac\x08\x11\r\x9c\x10\x8d\x14\\\x16\x14\x14s\x0f?\n\x97\x02\x92\xf9\xcb\xf2@\xee6\xe9\xc2\xe2\xb1\xdd\xae\xd9\xd7\xd4\x07\xd1\xcf\xcf\x15\xcf\x05\xce\x00\xce\x01\xd1\xf9\xd6\n\xdf]\xe8\x91\xf0k\xf7^\xfe\xae\x05\xb9\x0b}\x10\xf7\x14/\x19\xf8\x1aU\x1b&\x1d7 \xfa!\x94 z\x1c\xa6\x18\xe8\x14\xe3\x11<\r\x82\x08]\x04Z\x01\x1e\xff\xd6\xfcH\xfd`\xffb\x00t\xffY\xff\x04\x03\x11\x07\xaf\x08g\t\x80\n\xf9\x0b\x08\x0cN\x0cu\r\xbd\r\xdf\x0cZ\n\xea\x07\xba\x05\xf9\x03\xfc\x00\xa5\xfbX\xf6*\xf2\x1b\xef\x18\xec\xbe\xe96\xe8~\xe6\xba\xe4.\xe3\xa8\xe2c\xe2\xfc\xe1\xf3\xe1&\xe2\x8b\xe3\x9c\xe6\xbf\xeak\xee\xed\xf0\x04\xf3\xc5\xf4\x95\xf5\x8c\xf5\xae\xf5\xe8\xf5\xbf\xf4\x9a\xf2\xd2\xf0*\xf1\xbf\xf2i\xf4\r\xf5\xf7\xf4\x05\xf5y\xf6u\xf9K\xfc#\xff\xc5\x02q\x06@\n\xba\x104\x1b\x91\'\xf93\x04C#U\xa0a5c{`\xafa\x15d\xcc]GQ\x85G\xedB\xb5;\xa1.B"Q\x18 \x0c+\xfa&\xe7\xad\xd9\xcd\xd1\xd6\xc9\xb3\xc0\xb5\xba\x9f\xbc\x86\xc3j\xc9\x11\xcdl\xd2\xe0\xd9\x89\xdfZ\xe3\xf0\xe8\xe4\xf1\x10\xfap\xff\t\x04y\n\xf1\x11)\x17\\\x17\x01\x14\x97\x0f\x96\n\xc3\x02\x0f\xf9l\xf1B\xec\xa6\xe6\x93\xdf\xe3\xda\xe3\xd9v\xd9\x14\xd7\x0f\xd4"\xd2\xc0\xd1\x11\xd3\x01\xd6\x8e\xda\xd7\xe0\xb9\xe8\xd0\xf0\x1d\xf9l\x02\xd7\x0b\x07\x13a\x17\x03\x1a\x9d\x1c\xa4\x1ej \xc0!N"\xd2!\xb8 \xaf\x1e\x90\x1c,\x1a#\x16K\x0fA\x07@\x01\x92\xfdV\xfa>\xf8=\xf8\x17\xf9\xe3\xf8\x1e\xf9<\xfc\xa8\xff\xca\xff_\xfd5\xfd\xf0\xff\x9a\x02\xf9\x04\x14\x08\xd7\x0b\xe3\r\xac\x0eR\x0f-\x0f\x0f\ru\x08\xae\x02\xeb\xfc\xcf\xf8\x92\xf5\'\xf2O\xee\xc4\xeav\xe8e\xe65\xe4!\xe2a\xe0\xd2\xde9\xdde\xddz\xe0$\xe5\xc6\xe9\xb3\xed\xf6\xf1\x03\xf7\x85\xfb\x04\xff#\x01\'\x02\x93\x02\x9b\x02\xca\x021\x03(\x04\x95\x04r\x03:\x01\xfe\xfe\xf3\xfc[\xfa\xe4\xf6\xa4\xf3\xb4\xf1%\xf1\xed\xf1\xad\xf3\xf4\xf6\xae\xfa\xb7\xfd?\x00\x99\x02<\x06\xa6\x0b\xa3\x13\x86 \x9f1\xf2AJL\x13R>YKaxc\xe6\\\xdfS\xd7M\xe0H\x1b@\xea5\xcf-\xab%_\x19\x8e\t\xd4\xfb\x90\xf0\x99\xe4\x87\xd5\xc4\xc6\xa6\xbc\xd4\xb7\xd9\xb5\x98\xb5a\xb7=\xbc\xb7\xc2;\xc9"\xd0\t\xd8\xf6\xdf\xbc\xe5Y\xea\x85\xf07\xf9\x1b\x03\xeb\x0b\xb8\x12\xbd\x17\xa9\x1b\x9a\x1e&\x1f\xeb\x1cd\x17\xe7\x0f\xda\x06\xc9\xfd^\xf6o\xf0k\xebg\xe66\xe2N\xdf\xa9\xdd\xee\xdc\xab\xdc\xf6\xdb\x88\xdb\x8e\xdcg\xdfN\xe4\xfe\xea\xdf\xf2\xf4\xfa\xb6\x02\xa3\n\xb8\x12\xbc\x19$\x1f\xab!a"v!S +\x1f\xb2\x1d\xca\x1a-\x17\xf5\x12\x8d\x0fx\x0c\x9c\x08v\x04\xee\xfe:\xfa\xd1\xf5B\xf2o\xf0A\xf0n\xf1s\xf2t\xf3 \xf6\x97\xfa\xca\xfe\x96\x01\xac\x02\xc7\x03\x1e\x05E\x06v\x067\x06\x17\x06\x03\x06H\x05\x9f\x04\xce\x04b\x04i\x02\x92\xfe\x11\xfb[\xf8\x95\xf5\xd0\xf2\x19\xf0\x14\xeeC\xed\xb2\xed$\xef\xef\xf0\xb1\xf2t\xf4\xf6\xf5"\xf7N\xf8\x04\xfa\xb6\xfb\xc4\xfc\x7f\xfd\xe8\xfe\x99\x013\x04_\x05[\x05v\x04\x0b\x03\x8b\x00\xce\xfd\xaf\xfb\x04\xfa\x83\xf8\x84\xf6\x19\xf56\xf5[\xf6`\xf7S\xf7\xaf\xf6\x8d\xf6\xf4\xf6\xd4\xf7\x08\xf9\xb2\xfa`\xfc\x88\xfdP\xff:\x02\x9e\x05\xb0\tO\x10\x03\x1a\x7f#_*%1\xdf9\xc7A\xd5C\xecA\x8b@J@\xe2<\xa35\x85.\x19)}"\x08\x19\xe2\x0f\xa8\x08g\x01E\xf7#\xecU\xe3\xc6\xdcu\xd6\xec\xcf\x1f\xcb\xa9\xc9~\xca/\xcc\xe4\xceT\xd46\xdb\x10\xe14\xe6\x14\xec\xcc\xf2\x9e\xf8_\xfdI\x02\xfe\x06\xc2\n\x9b\x0e\xb2\x12}\x15\xc8\x15;\x15\\\x14\n\x12\xad\rr\x08D\x03W\xfd\xad\xf6\x08\xf1\x1d\xed!\xea7\xe7@\xe4\x9a\xe2b\xe2\r\xe3\xbb\xe3\x83\xe4e\xe5\x9e\xe6\xe0\xe8\x11\xec\r\xf0!\xf4@\xf8d\xfc\x94\x00\x88\x04x\x08\xb3\x0b\xab\r\xe0\r\xf7\r\xf8\r*\x0e\xec\r\xf4\x0c\x8d\x0c\xa1\x0b\x06\x0bR\n\x92\t%\t?\x08\x11\x07V\x05\xfd\x03\xb1\x03\xc5\x03\x0c\x04\x1c\x04\x18\x04X\x04\xb6\x04t\x05\xa8\x05\x7f\x05\xeb\x04\x11\x04\x13\x03%\x02\xe0\x01\x85\x01\xe1\x00\xfd\xff@\xff\xf7\xfe\x8f\xfe\x0b\xfe>\xfdQ\xfc\xa4\xfbE\xfbM\xfb(\xfb\x1d\xfbp\xfb\xd7\xfb\'\xfc4\xfcU\xfcC\xfc\xd5\xfb2\xfb\xd3\xfa\xf8\xfa$\xfb\xf4\xfa\xc2\xfa\xb1\xfa\xda\xfa\xb8\xfaI\xfa\xd6\xf9X\xf9\xc2\xf8\xcf\xf7\xd6\xf67\xf6\xd6\xf5\x81\xf5\x00\xf5\xa9\xf4\xc7\xf4\x00\xf5\xc8\xf4I\xf4\xfa\xf3\x05\xf4\xf4\xf3-\xf4K\xf5z\xf7\x18\xfa\x86\xfc\xe2\xfe9\x01\x9a\x03\xfb\x05N\x08.\x0b\xc5\x0fR\x16k\x1dV#\x16()-\x952\xdb6X8G8\xe97\x037\x0e4\xfb/\x8c,\xe7)\xea%\xc1\x1f\x18\x19x\x13m\x0e\xe4\x07\xa9\x00\xea\xf9\x02\xf4\x1c\xee\x1c\xe8/\xe3\x16\xe0\x08\xde/\xdc\xda\xda\xee\xdaj\xdc)\xde\xaf\xdfD\xe1\xa9\xe3G\xe6\xd3\xe8c\xeb\xe4\xedE\xf0\x9b\xf2\xf0\xf4\x17\xf7\xb3\xf8%\xfae\xfb\xdd\xfb\x95\xfb\n\xfb\xb5\xfa\x07\xfa\x8f\xf8\x07\xf7\xcc\xf5\xed\xf4A\xf4\xae\xf3\x96\xf3\xdf\xf3F\xf4\xe7\xf4\xdf\xf5\x0b\xf7;\xf8\x7f\xf9\xd8\xfaM\xfc\xd3\xfdf\xff\xfa\x00\x81\x02\xd5\x03\x17\x05<\x06\x15\x07\xa3\x07\x06\x080\x08/\x08&\x08\xf3\x07\xf2\x07\x10\x08R\x08\xc5\x08\x8e\t\x95\n\xcc\x0b\x1f\r9\x0e\x1a\x0f\x80\x0f\xb9\x0f\xb8\x0f\x80\x0f\xbf\x0e\xdf\r\xe7\x0c\xd2\x0bx\n\x9b\x08\xc3\x06\xed\x04\xeb\x02w\x00\xe1\xfd~\xfbM\xf94\xf7#\xf5W\xf3\xfe\xf1\xf0\xf0\x10\xf0u\xefO\xefj\xef\x85\xef\xb0\xef2\xf0!\xf1\x12\xf2\xd5\xf2x\xf3U\xf4J\xf51\xf6\xf3\xf6\xcb\xf7\xbd\xf8\x8f\xf9&\xfa\x9f\xfa-\xfb\xad\xfb\xea\xfb\xfa\xfb*\xfc\x8b\xfc\x07\xfd~\xfd\xd9\xfdI\xfe\x8e\xfe\xcd\xfe\x16\xffl\xff\xcc\xff!\x00\x7f\x00\xf7\x00X\x01\xb6\x01\x15\x02\x94\x02\'\x03\x91\x03\x1a\x04\xf1\x04\x06\x06\x1b\x07\xfd\x07\xda\x08\r\n1\x0b\xdb\x0b\x19\x0c\x8c\x0cl\r[\x0e^\x0f\xdb\x10\x14\x13(\x15G\x16\xb4\x16b\x17z\x18)\x19\x0c\x19\xe4\x18\xf8\x18\xa6\x18l\x17\xe5\x15\x1a\x15\x81\x14\xe8\x124\x10T\r\xda\n\x0b\x08\x87\x04\x00\x01\xdd\xfd\xf4\xfa\xc2\xf7\x9e\xf4-\xf2\x81\xf0!\xef\x9f\xed \xec\xd6\xea\xe5\xe9=\xe9\xc3\xe8\xb3\xe8\xe1\xe8*\xe9/\xe9A\xe9\xf4\xe9>\xeb\xd6\xecR\xee\xa3\xef\x12\xf1\x9b\xf2\x13\xf4\x99\xf5\x16\xf7\x8d\xf8\xd3\xf9\xda\xfa\xbf\xfb\xcf\xfc%\xfe\x8b\xff\xb9\x00\xbe\x01\xbe\x02\xae\x03v\x04\xe6\x043\x05s\x05e\x05\xfb\x04K\x04\xb1\x03.\x03\x88\x02\xb7\x01\xe5\x00j\x00\x1e\x00\xbc\xffS\xffE\xffy\xffv\xff\'\xff\x1a\xffo\xff\xdf\xff\'\x00\x96\x00f\x01w\x02k\x03T\x04_\x05t\x06\x8d\x07J\x08\x96\x08\xe3\x08%\t4\t\xe8\x08v\x08#\x08\xad\x07\xbb\x06e\x05\x14\x04\xe8\x02\x93\x01\xda\xff!\xfe\xa3\xfcW\xfb\x04\xfa\xd9\xf8.\xf8\xfa\xf7\xcf\xf7\x84\xf7[\xf7\x99\xf7\x18\xf8G\xf8V\xf8\x93\xf8\xee\xf82\xf9O\xf9\x8b\xf9!\xfa\xc1\xfa\x0c\xfb*\xfbw\xfb\xfb\xfbH\xfcB\xfc%\xfc)\xfc\x1e\xfc\xca\xfb\x94\xfb\xab\xfb\x06\xfcA\xfcA\xfcX\xfc\xac\xfc\x1d\xfdU\xfd\x83\xfd\xce\xfd \xfe(\xfe\x17\xfeU\xfe\xca\xfe.\xffj\xff\xb9\xff^\x00\x1d\x01\xaa\x01+\x02\xcc\x02l\x03\xc8\x03\x13\x04\xeb\x04e\x06\xd8\x07I\t\r\x0b\x1a\r\x06\x0f\xb5\x10\xe9\x12\xa6\x15\xe0\x17\n\x19P\x1a\'\x1c\xe1\x1d\xa8\x1e\x1b\x1f\x05 h +\x1f\xfd\x1cD\x1b\xc6\x19(\x17\x19\x13\x03\x0f\x89\x0b\xd9\x07Y\x03\x08\xff\xac\xfb\xe9\xf8\x95\xf5\x01\xf27\xef\x91\xed,\xecK\xea{\xe8x\xe7\x0e\xe7d\xe6\x85\xe5\x83\xe5q\xe6v\xe7\x04\xe8\xb8\xe8a\xeab\xec\xf6\xed2\xef\xbd\xf0\x8b\xf2\xf2\xf3\xe2\xf4\xce\xf5\x06\xf7R\xf8(\xf9\xc9\xf9\x9a\xfa\x92\xfb=\xfcv\xfc\xbe\xfcM\xfd\x95\xfdU\xfd\xe0\xfc\x93\xfc[\xfc\xdd\xfbe\xfb\x17\xfb\xfa\xfa\n\xfb\x1c\xfb]\xfb\xdc\xfb\x8d\xfcY\xfd\n\xfe\xd0\xfe\xcc\xff\xef\x00\x08\x02\x06\x03#\x04{\x05\xd6\x06\x01\x08\x12\tB\nu\x0bz\x0c`\r3\x0e\xd0\x0e*\x0f%\x0f\xe3\x0ee\x0e\xaf\r\xd0\x0c\xcf\x0b\xb2\n\x83\t^\x08O\x07N\x065\x05 \x04+\x03%\x02\xf3\x00\xa7\xffZ\xfe\x0b\xfd\xa4\xfbN\xfaJ\xf9P\xf8\\\xf7\x9f\xf61\xf6\xf4\xf5\xa1\xf5:\xf5\xf6\xf4\xa9\xf4/\xf4\xb2\xf3E\xf3\x01\xf3\xd8\xf2\xba\xf2\xe9\xf2S\xf3\xd9\xf3p\xf4\xfe\xf4\xab\xf5C\xf6\xb4\xf6\x12\xf7d\xf7\xc0\xf7\x1b\xf8u\xf8\xf8\xf8\xab\xf9\x98\xfa\x8f\xfb\x88\xfc\x83\xfd}\xfeY\xff\x05\x00\xd5\x00\xdb\x01\xde\x02\x7f\x03\x15\x04\x0c\x05*\x06\x11\x07\xe7\x07\x05\t\x7f\n\xc5\x0b\xd9\x0ce\x0e\xbf\x10/\x13Y\x15\xba\x17\x87\x1a>\x1d\x02\x1f\x88 J"\xdb#V$n$\xf1$\\%\xc4$R#\xf1!k \xc4\x1d\x00\x1a\x18\x166\x12\xc4\r\xa9\x08\xc4\x03\x87\xff\x96\xfbo\xf7~\xf3\x1f\xf0N\xed\xb8\xeaT\xe8U\xe6\xbf\xe4\x83\xe3I\xe2/\xe1\xb6\xe0\xd0\xe0F\xe1\xa6\xe1[\xe2}\xe3\xce\xe4%\xe6z\xe7\x19\xe9\xab\xea\xcb\xeb\xc3\xec\xe2\xed2\xef\xa4\xf0\xff\xf1g\xf3\xfa\xf4\xa5\xf69\xf8\xb2\xf92\xfb\xba\xfc\t\xfe\x0c\xff\xeb\xff\xde\x00\xd1\x01\x92\x02.\x03\xaf\x034\x04\x8f\x04\xad\x04\xba\x04\xd9\x04\xfa\x04\xce\x04m\x04\x12\x04\xe8\x03\xce\x03\x8d\x03y\x03\xb9\x03/\x04\xab\x04\x19\x05\xc3\x05\xa6\x06h\x07\xe2\x07e\x087\t\xfe\tv\n\xd0\nq\x0bM\x0c\xdf\x0c\x0b\r=\rV\r\xe4\x0c\xbc\x0bD\n\xda\x08G\x07Y\x05\x8e\x03,\x02\x0f\x01\xee\xff\xdb\xfe\xee\xfd\xff\xfc\x06\xfc\xdd\xfa\xaa\xf9e\xf8\x15\xf7\xcf\xf5\x90\xf4\x8a\xf3\xdb\xf2\x82\xf2!\xf2\xd6\xf1\xe5\xf1+\xf2u\xf2\x8d\xf2\xaa\xf2\xcf\xf2\xdb\xf2\xd9\xf2\xed\xf2M\xf3\xd7\xf3y\xf4I\xf5[\xf6{\xf7e\xf80\xf9\xea\xf9\x99\xfa\xfc\xfa\'\xfbd\xfb\xaf\xfb#\xfc\xaf\xfcl\xfdb\xfe5\xff\xc4\xffF\x00\xe6\x00l\x01\xc4\x01\x0e\x02\x9c\x02Z\x03\x00\x04\x0b\x05\xe6\x06&\t\t\x0b\xc5\x0c\xfc\x0e\x8b\x11\xd5\x13\xcc\x15!\x18\xbe\x1a\xce\x1c%\x1e\x98\x1f\x95!?#\xd2#\xd6#&$G$!#\xf6 \xe8\x1e\xe3\x1c\xc1\x19\x8e\x15\x90\x11X\x0e\xd3\np\x06M\x02\t\xff\xf7\xfbN\xf8\x87\xf4\x94\xf1\x1b\xefC\xecO\xe9\xe9\xe63\xe5\xce\xe3s\xe2}\xe1e\xe1\x85\xe1\xbd\xe1\xfa\xe1\x9d\xe2\xcd\xe3\xf2\xe4\x03\xe63\xe7\xb5\xe8f\xea%\xec\x17\xee^\xf0\xc6\xf2\x0f\xf5>\xf7\x84\xf9\x96\xfbu\xfdP\xff\x02\x01\x94\x02\xd2\x03\xda\x04\xc0\x05p\x06\xd5\x06\x02\x07\x08\x07\xd5\x06\x8a\x06\x12\x06_\x05\x93\x04\xd0\x03\x08\x03\x1e\x02-\x01u\x00\x00\x00\x87\xff\xfb\xfe\xb7\xfe\xeb\xfeG\xff\x81\xff\xf6\xff\xf0\x00\'\x02\x15\x03\xfd\x03B\x05\x9b\x06\xb1\x07l\x08D\tK\n\x1c\x0b{\x0b\xc1\x0b\x07\x0c\x18\x0c\xcf\x0bQ\x0b\xce\n@\n~\tb\x08\x1a\x07\xc3\x05Y\x04\xb7\x02\xfc\x00_\xff\xf5\xfd\xa4\xfcZ\xfb+\xfaG\xf9\x91\xf8\xee\xf7S\xf7\xd7\xf6\x88\xf6?\xf6\xf3\xf5\xbc\xf5\xb4\xf5\xc9\xf5\xee\xf5+\xf6\x9b\xf66\xf7\xcb\xf7P\xf8\xcf\xf8\\\xf9\xc2\xf9\x02\xfa6\xfaf\xfa\x8d\xfa\xa5\xfa\xd2\xfa\x10\xfb\\\xfb\x98\xfb\xcf\xfb\x0e\xfc.\xfc4\xfc\x07\xfc\xbf\xfby\xfb\x1f\xfb\xbf\xfan\xfaG\xfa+\xfa\x00\xfa\xfe\xf9u\xfaB\xfb\r\xfc\xc5\xfc\xeb\xfds\xff\x11\x01\xe5\x02f\x05\xb0\x08\xf8\x0b\x9d\x0eB\x11}\x14\xa3\x17\x1e\x1a"\x1c\xa5\x1e&!h"\xa0"K#~$\xa8$N#\xd2!\xe4 (\x1f\x8d\x1b\x92\x17\xac\x14\xc5\x11b\r,\x08\x17\x04\xe8\x00\x1d\xfd\xa1\xf8\x1b\xf5\xcf\xf2u\xf0I\xedQ\xea\x9c\xe8\x90\xe7\x1f\xe6\x89\xe4\xb6\xe3\xe2\xe3\x1d\xe4\xf3\xe3A\xe4\xaa\xe5q\xe7\xae\xe8\xba\xe9J\xebO\xed\xfe\xee>\xf0\xc4\xf1\xb2\xf3h\xf5\xa0\xf6\xe4\xf7|\xf91\xfb\x9d\xfc\xce\xfd\x10\xff4\x009\x01\x06\x02\xab\x02@\x03\xa6\x03\xf5\x03\x15\x04\x0f\x04\xf7\x03\xd0\x03\xa0\x03w\x03\'\x03\xc1\x02T\x02\xe9\x01\x96\x01\x16\x01\xc3\x00\xb1\x00\xac\x00\x96\x00\x80\x00\xd7\x00h\x01\xd5\x01P\x02\'\x03\x13\x04\xcd\x04g\x05#\x06\xed\x06\x84\x07\xfb\x07h\x08\xc8\x08\xe9\x08\xc4\x08y\x08\x04\x08u\x07\xc3\x06\xe1\x05\xee\x04\xf4\x03\xed\x02\xc7\x01\x91\x00r\xfft\xfel\xfdj\xfc\x84\xfb\xb4\xfa\xe3\xf9,\xf9\x95\xf8$\xf8\xef\xf7\xc7\xf7\xbd\xf7\xd2\xf7*\xf8\xa3\xf8\x15\xf9\x81\xf9\xf1\xf9\x96\xfa"\xfb\x95\xfb\x1d\xfc\xd2\xfc\x99\xfd!\xfe\xb1\xfer\xff*\x00\xca\x00%\x01\x9c\x01\x03\x02@\x02O\x02R\x02\x82\x02\xa3\x02\x96\x02u\x02\x81\x02\x9f\x02\xa4\x02\x94\x02\x92\x02\x9b\x02m\x02\x1a\x02\xd1\x01\x96\x01T\x01\xe9\x00s\x00\x06\x00\x94\xff\x16\xff\xa2\xfe\x0b\xfen\xfd\xe0\xfcF\xfc\x91\xfb\xcc\xfa!\xfa\xa0\xf93\xf9\xe2\xf8\r\xf9\xa7\xf9d\xfa)\xfbB\xfc\xdc\xfd\xbb\xffj\x01-\x03<\x05t\x07X\t\x08\x0b\xe1\x0c\x06\x0f\x00\x11U\x12\x7f\x13\xcf\x14%\x16\xd7\x16\xe7\x16\xea\x16\xcd\x16\x05\x16]\x14x\x12\xd3\x10\xff\x0e\x8d\x0c\xc8\ta\x07@\x05\xde\x02)\x00\xd3\xfd\xfc\xfbF\xfa=\xf8-\xf6\xcb\xf4\xdd\xf3\xec\xf2\x0c\xf2\xb1\xf1\x04\xf2`\xf2\x9d\xf2\x1e\xf32\xf4Y\xf5\x1e\xf6\xc6\xf6\xb7\xf7\xa2\xf8\x1e\xf9a\xf9\xda\xf9\x83\xfa\xd6\xfa\xee\xfa\x1a\xfb`\xfbt\xfb\\\xfbO\xfbH\xfb#\xfb\xcc\xfat\xfa6\xfa\xf2\xf9\xba\xf9}\xf9L\xf9\x1e\xf9\xe7\xf8\xda\xf8\xfe\xf8:\xf9l\xf9\x9d\xf9\xc4\xf9\x11\xfaz\xfa\xf1\xfa\x92\xfbG\xfc\xef\xfc\x87\xfdN\xfe<\xffA\x00:\x01"\x02\xff\x02\xd8\x03\x9c\x04E\x05\xe4\x05z\x06\xe3\x06\x16\x070\x07I\x07a\x07g\x07S\x07\x1f\x07\xca\x06\x7f\x06\x15\x06\x98\x05+\x05\xaf\x04*\x04\x8d\x03\xf4\x02u\x02\xf8\x01\x8b\x010\x01\xce\x00o\x00+\x00\xf6\xff\xcb\xff\x99\xff\x82\xffn\xffM\xff&\xff\x0f\xff\r\xff\xf5\xfe\xdc\xfe\xe9\xfe\x0e\xff;\xffc\xff\xbc\xff#\x00A\x001\x00S\x00\x9c\x00\xc5\x00\xab\x00\xba\x00\xfd\x00\x00\x01\xcf\x00\xc3\x00\x04\x012\x01\xd0\x00J\x00\x0c\x00\xd8\xff^\xff\xa9\xfe.\xfe\xd7\xfdP\xfd_\xfc\xa1\xfbM\xfb*\xfb\xe2\xfaY\xfa\xfe\xf9\xe8\xf9\xdd\xf9\xb7\xf9\xb0\xf9\x1a\xfa\xa2\xfa\x01\xfbB\xfb\xe7\xfb\xfb\xfc\x02\xfe\xbe\xfeR\xff\x03\x00\xd4\x00\x94\x01\x0c\x02o\x02\xcb\x02\x0c\x03\xf1\x02\xa1\x02j\x02<\x02\xe4\x01I\x01\x99\x00\x1b\x00\xaa\xffO\xff\xd0\xfeS\xfe\xf0\xfd\xa6\xfdi\xfdE\xfdQ\xfd\xa7\xfdX\xfe7\xff7\x00w\x01\xbc\x02\xc8\x03\xb6\x04\xb8\x05\xd5\x06\xd8\x07\xa1\x08\x8a\t\x9a\n\xca\x0b\xa0\x0c*\r\xd3\r\x1e\x0e\xe6\rp\r\xd3\x0cG\x0co\x0b~\n\xcc\t\x19\t=\x08]\x07\x9d\x06\xc0\x05\x98\x04D\x031\x02!\x01t\xff\xf7\xfd\x19\xfd2\xfc\xd9\xfaM\xf9\x87\xf8F\xf8!\xf7\xb6\xf5{\xf5d\xf5h\xf4n\xf3I\xf3t\xf3\xb6\xf2=\xf2\xc6\xf2M\xf3\xb6\xf3[\xf4\x88\xf5\x9a\xf6\x11\xf7\x9e\xf7\x9f\xf8\x95\xf9T\xfaO\xfb\x91\xfc\xed\xfdJ\xff\xc1\x00\xff\x01\xb1\x02W\x03\xe0\x03\xcf\x03\xbf\x03\xa8\x03:\x03\xeb\x02\'\x03?\x03\xd4\x02o\x02\x03\x02\x12\x01\xe9\xff\x10\xffA\xfej\xfd5\xfdt\xfd\xc5\xfdf\xfe\x8e\xff\xa4\x00\xfe\x00"\x01f\x01O\x01F\x01\x81\x01\xe8\x01\xac\x02\x97\x03~\x04K\x05\xdf\x05d\x06\x06\x06C\x05\x8d\x04\xae\x03\xf9\x02e\x02%\x02\'\x02\x06\x02\xe3\x01t\x01\x17\x01\xe6\x00\x19\x00\x16\xffW\xfe\xed\xfd\xb8\xfd\xa0\xfd\xba\xfd\xfd\xfd9\xfeX\xfep\xfe\x9c\xfe\xa0\xfe\x92\xfer\xfe?\xfe8\xfe\x9e\xfeu\xffh\xff4\xff\x9b\xff\xfc\xff\xf3\xff\xff\xff\x95\x00\xf5\x00\xb5\x00\x99\x00\xf1\x00\x1f\x01\'\x01\xde\x00~\x00\x8e\x00\x02\x01P\x01`\x01\xac\x01\xc7\x01\xf6\x00\x04\x00\x0e\x00\x08\x00K\xff\xd6\xfe \xffU\xffk\xff\xab\xff0\xff\xf3\xfc\x17\xfb\xfb\xf9\x86\xf8~\xf9w\x01\xbd\t\xce\x08I\x04D\x05Q\x088\x04b\xfeR\xfe\\\x00\xb9\x00\xfa\x00\xb8\x03&\x05p\x02\xef\xfc\xe7\xf7Y\xf6\xc2\xf70\xf9\xfa\xf8\xa5\xfa#\xff\x8e\x03C\x051\x06\x97\x07\x92\x05\xad\x00\xfc\xfe\xce\x01q\x05\xd3\x07\x15\x08+\x07\x91\x05\xaf\x03\xaf\xff~\xf8B\xf4\x82\xf3\xfe\xf1Q\xf1\xcf\xf3\\\xf7\xab\xf6\xa3\xf3,\xf1\xff\xed\xd6\xec\xdc\xedU\xef\x10\xf0\xa9\xf3\xb1\xf8\xd2\xfa_\xfa\x1e\xfa\xd5\xf9\xb2\xf7=\xf5\xb5\xf6U\xfb\x83\xff\x17\x05\xfb\x0f,\x1d\xdd!\x8a\x1eo\x1d\xd6\x1f\x85 \x04\x1f\xcb\x1e\xc4\x1fx\x1fV\x1f\xb4\x1c\r\x16l\r\xa4\x03\x1d\xfa\xca\xf1\x91\xed\xa0\xeb\xfe\xe9\xb0\xe9\xae\xeb\xc3\xed\x83\xef\xd4\xf0\xc0\xf0\xe8\xf0\xb0\xf3K\xf9\x90\xff\x08\x05\xb8\tk\rt\x0e\xd4\r\xf7\x0c\x19\x0b\xed\x07:\x04\x85\x02q\x03\xee\x03\x81\x02;\xffF\xfb\x9d\xf8\xbc\xf6g\xf4\xe0\xf1\x9f\xf1\x91\xf3\x00\xf5\x8e\xf6\xad\xf9x\xfb\xf3\xf9\xe4\xf8{\xfa0\xfc\x8e\xfc\xfd\xfc+\xfeX\xff\xe0\x00\xbd\x02\xde\x02\x13\x01\xfb\xfey\xfd\xd9\xfcm\xfdZ\xfe\x90\xfe\xdb\xfe\xeb\xffw\x01\x08\x02m\x01?\x00:\xff\x87\xff\xf9\x00\xcb\x02(\x04\xb4\x04\x85\x04\xc1\x04~\x05\x03\x06\xc4\x04\xf5\x02i\x02"\x03\xac\x04\x80\x05[\x05\x8d\x04n\x03\x9c\x02\x06\x02\xa4\x01\xeb\x00t\xff:\xfe`\xfe\x81\xff\xc0\xffC\xfeA\xfcK\xfb?\xfb#\xfb\xea\xfa\xda\xfa\xb4\xfao\xfav\xfa \xfb\x12\xfc\xcc\xfc\xc8\xfc\x16\xfd\xcf\xfeK\x01\x1e\x03{\x03\x96\x03R\x04u\x05\x80\x06\x00\x07s\x07.\x07\x14\x06K\x05\xc9\x04C\x04\xcb\x02\xc3\x00v\xff\xb6\xfe\x06\xff\x04\xff\xf6\xfd\r\xfd\x94\xfc\xaf\xfc\xe1\xfc\xf6\xfc\xaa\xfd\x10\xfe!\xfe\xe0\xfe,\x00|\x01\x15\x01\xee\xff)\xff\x07\xff\x92\xff\xc6\xff\xab\xff%\xff\x88\xff\x13\x00\x7f\x00\xd2\x00\x05\x01\n\x01\xbe\x00X\x01\x97\x02l\x03\xaf\x03\xad\x03\xdc\x03Z\x04\xd1\x04k\x04\x1c\x037\x01\xa3\xff\x9b\xfep\xfec\xfe\r\xfd\x80\xfb\xbf\xfa\x87\xfa0\xf9*\xf7\x1f\xf6\xb5\xf5\xf6\xf5\x07\xf7\xed\xf8\xf1\xf9\t\xfak\xfaM\xfbQ\xfc\x18\xfdb\xfd*\xfe\xc2\xff\x0e\x02\x9d\x03\xee\x03[\x04\xa7\x04,\x04F\x03\xb1\x02\x0b\x03d\x032\x03W\x03\xda\x03"\x04\xb9\x02\x86\x00@\xffP\xff\xfb\xff\'\x01&\x02#\x02\xc3\x01\x0b\x01;\x01\xe4\x013\x02\x90\x01G\x00/\x02\x0e\x063\x07\xb0\x03\x00\x00\xbc\x01\x12\x04t\x01\x83\xfc\xc4\xfbr\xfe\x96\xff\xc7\xfe\xc5\xfe\xa8\xff5\xff\x02\xffU\x00Z\x01\xec\xff\x8e\xfe\x90\x00[\x04\xf1\x04\xf1\x00\x83\xfc\xee\xfaS\xfcz\xfdL\xfc\x02\xfa\x18\xf9\x96\xfb\x84\xff\xe0\x00\x91\xfe\xe9\xfb<\xfet\x02b\x03>\x01\xdc\x00\xcb\x01\x0e\x00\xc7\xfd)\xfe\xfb\xff9\xff`\xfd\x02\xfd\xc2\xfd\xf7\xfd\x99\xfd[\xfe\xfe\xff_\x02\xf9\x03\xc4\x04}\x05\xb4\x06s\x07?\x06\xa4\x04\x06\x05\x00\tq\r\x1e\x0e\x99\n\xe9\x05\xe7\x03\x06\x03\xd8\x01\x03\x01F\x01b\x01\xda\xff\x17\xffR\x01\x95\x01\x86\xfb\x1f\xf5s\xf6\xe3\xfbO\xfc\xd5\xf8\x96\xf6\xe2\xf6b\xf8\xc0\xf9M\xfa\xa6\xf8\xf3\xf6\\\xf7\xa0\xf86\xfa\x82\xfby\xfd4\x01\xd9\x05x\x08u\x07\xe1\x05\xc1\x04\x0e\x03\x11\x02$\x03\x9c\x04.\x04$\x03\xf5\x01\x0f\x00\xab\xfd`\xfbu\xf9\x8d\xf8>\xf9S\xfa\xdd\xfa\xe8\xfb\xa0\xfd:\xfe\xba\xfd\xaa\xfd\xc0\xfe\x1a\x00q\x00\x9c\x00\x96\x01\xb7\x03\x1c\x05\xa5\x04\xa1\x04I\x05\x83\x04\x83\x02U\x01\xa2\x01=\x02\x87\x02\xef\x01\xd3\x00\xad\x00>\x01\xd6\x00\xfc\xff\x19\x00\x92\x00r\x00\x15\x00p\x00:\x01\x8f\x01\x8b\x01q\x01\x8d\x01c\x01\xaf\x00\xf3\xff\x90\xffy\xff\x91\xff\xac\xff\x87\xfft\xffN\xff\xed\xfe\x9e\xfeh\xfe9\xfe\x19\xfeL\xfe\x80\xfeF\xfe\xd9\xfd\x8d\xfd\x95\xfd\xc2\xfd\x04\xfe/\xfe\x06\xfe\xe6\xfd\x16\xfel\xfe\xc3\xfe\x14\xff\xc8\xff\x8f\x00j\x01S\x02\xd5\x02\xf3\x02\xcb\x02z\x02\x1b\x02\xc2\x01\xb6\x01\x8a\x01\x11\x01@\x01\xc8\x01N\x01\x0b\x00\x11\xff\x82\xfe\x93\xfd\xa5\xfc\xa0\xfcA\xfct\xfd\x0f\x021\x07\xf7\x08\x8d\x06w\x04\x00\x03Z\x01\x1c\x01\xa1\x02e\x04^\x059\x06M\x06^\x05\'\x03\\\xff\xa4\xfct\xfc\x8e\xfe\x9e\x00\x01\x02\x1d\x03\xf7\x01I\xff\xd5\xfd\x19\xfd\xeb\xfb\xd0\xf9\n\xf8\x9e\xf7k\xf8.\xfa\xd7\xfae\xf9-\xf8\xa7\xf7\x1d\xf78\xf7f\xf8\xa1\xf9\xf1\xf9\xb8\xfan\xfc\x12\xfeL\xfee\xfd)\xfc\xf8\xfa\xa8\xfa\xe0\xfaU\xfb.\xfb\x9c\xfa\xa6\xf9:\xf8\xd5\xf7-\xf9\xc9\xf8\x90\xf4\x96\xef)\xef$\xf1*\xf2\xe4\xf1)\xf2\xc1\xf3\xd0\xf3U\xf2\x19\xf2\x18\xf5\xdf\xf9\xd8\xfd\x90\x02_\tj\x11A\x18\x1f\x1f\xae(\xda1\'5a4\xfb6\xad9\xc66\xe8/f*\x1a\'U"$\x1b\xa0\x12\xb2\x08\xe5\xfd`\xf3\x9f\xea\x89\xe6\xc7\xe4\x84\xe3\xa4\xe2\xff\xe2E\xe4\xb2\xe6+\xea\x85\xec\xa5\xed\x96\xf0\x8f\xf6.\xfd\xce\x02_\x07\x04\n\xb2\t%\t\x98\t\x05\t\xeb\x06d\x04\x1a\x03-\x03\x17\x03\xec\x01\xc4\xfe\x1b\xfa\xed\xf4\x87\xf1\xd2\xef\xca\xee\x18\xef%\xf0\xbd\xf0z\xf15\xf3f\xf4\x15\xf4\x90\xf3\xbb\xf3\xc3\xf4\xdb\xf7\x8f\xfc\xdf\xff+\x01\xe1\x01\x89\x029\x02\xac\x01\x8b\x01\xf1\x00+\x00\xf7\x00\xaa\x02\x84\x036\x03\x1f\x02\xbc\x00s\xff\xdc\xfe\xd2\xfe\x95\xfex\xffc\x00\xde\x00\x1b\x01v\x01w\x01\x95\x00\xda\x00\xf4\x01)\x03/\x048\x05f\x06z\x06\xb8\x06=\x07\xd9\x06\xfc\x05\x15\t\x12\x12v\x16 \x12`\x0b\'\x06\xc6\x01\x87\xfe\xc3\xff\x96\x02\xe6\xffP\xfc%\xfe\xbc\xfeb\xf9\xe3\xf2`\xee*\xec\xa4\xee9\xf6\xc8\xfc\x8c\xfey\xfdS\xfd\x1e\xfd\x88\xfc\xb3\xfc\x00\xfb\x1d\xf9\x18\xfb\x02\xff\xa6\x02\x90\x02\xb0\xff&\xfc\xec\xf7\xdc\xf4\x12\xf4\xfb\xf3\xa6\xf3o\xf4\xef\xf5-\xf9\xb1\xf9\xc0\xf6-\xf4\x07\xf12\xf1\xda\xf4@\xf7\x98\xf9\xb2\xf9\x16\xfa\xf9\xfas\xf8\x13\xf9\x03\xfa\x94\xfbZ\x00\xfe\x02\x12\x02\xd9\x02\xeb\x07\xdc\x0e(\x10\xdb\x10g\x14\xd2\x13U\x17\x0c$\xa34y8$0\x1a.\x920o1],]%\xa9 "\x1c\xee\x1c\xd4\x1b\x97\x12\xf0\x06\x0c\xfb\xc7\xf0\xce\xe8j\xe5,\xe5c\xe1T\xdb\xa3\xdc\xaf\xe20\xe5\xbf\xe2\x08\xe0\x90\xe1\x95\xe7\xbe\xed\xb1\xf5\xd4\xfd2\x02\x11\x04\xe0\x05\xa6\n\x18\x0eO\x0c\xd5\t\\\n&\n\xa3\x08\xe2\x06]\x06 \x03\x0f\xfdh\xf7\x8c\xf3\xc1\xef\xc4\xe9J\xe7\xb8\xe7M\xe8\xba\xe8\xc3\xe9\xce\xea!\xea\x88\xeb\x0e\xeff\xf2\x19\xf6\x95\xfb\x96\x01\xfe\x044\x07\x1f\na\x0b\r\n\xd0\t\xb1\n\x86\n\x82\tv\x08\xc0\x07b\x05\xd3\x03\x10\x04>\x02\xae\xfe\xa6\xfd\xd3\xfe\x8b\x00T\x01\xd9\x02\\\x06\xe7\x052\x051\x07\xa3\x08*\t\x11\x07\x08\x07\xa7\x08\xeb\x06\xcf\x04\x93\x03\xdb\x023\x01\x95\xff%\xff\x14\x00\'\xfe\xd7\xfai\xfaq\xf9O\xf7\x08\xf2\xfd\xee\xda\xf0\x8d\xee\xb7\xeb\xfb\xeaR\xe9L\xe8\x8d\xe4\xfd\xe5R\xe9\xce\xe7)\xe9\xf2\xebt\xf1n\xf5\x86\xf6\x18\xf9Z\xfb\x84\xfd<\x00Q\x03Z\x05v\x05\xd9\x05[\t\x8a\x0b)\x0b{\x06\xb9\x03\t\t\x84\r\xe0\x0e/\t\xc2\x06\x06\x08\x8d\x05\xdb\x03\xa1\x04\xd7\x05\x8a\x05\xf7\x08-\x0e\x1b\x12`\x11\x90\x0f\xde\x12\x1c\x15\xaf\x19\xc5\x1c\xa1\x1f1#v"\x16\'\x81/\x993\xae,*$\x91#\xec"\xc4\x1c\x83\x18\xc1\x16\xdf\x0eh\x055\x00\xf8\xfd\x8f\xf4.\xe8\xd2\xe0\x98\xdf\xa1\xe1\x03\xe2\x9d\xe2.\xe2\xca\xe2\xf9\xe1\x01\xe39\xe9\xe8\xed\x1e\xf1N\xf5\x1e\xfdn\x01\t\x03\x8e\x05\x10\x03O\xfd\xe6\xf9Z\xfa\xf5\xfaQ\xf9\x91\xf8\x89\xf7\xda\xf35\xf1\xe1\xefc\xec\xbe\xe9\xcd\xe8\x81\xeb+\xf1d\xf4\x12\xf7/\xf8\xbb\xf9;\xf9]\xf9\x87\xfc,\x00\xd1\x01r\x03\xd9\x07\x8f\x0b\xcc\x0c\xab\x0b`\x0b^\x08\xd7\x06\xf2\x06n\x08\t\x0b\x8f\x08\xef\x06v\x04\xcf\x020\x02\x0f\xfe4\xfc\xd3\xf9\xce\xf6\xf9\xf6\xca\xf5\x1c\xf5d\xf2\xfb\xeep\xeeL\xef\x11\xf1\x14\xf2\x0f\xf2R\xf2i\xf3\xbd\xf4\x9c\xf7\xa5\xfa\xee\xfc\x9f\xfd\xca\xff\x88\x02\xa3\x03\xaa\x04B\x05]\x05\xdb\x03s\x04r\x06\xa6\x06\x10\x06e\x03r\x00\x0f\xffu\xff\xcc\x01!\x017\xff\xe4\xff\xbf\xfc\x7f\x01@\x04\xc5\x02\xde\x036\x01c\x00W\x00\xe3\x05[\x0b\xfd\n@\n\x0b\x0e\xbd\x0f\xb4\x0e\x10\r\r\x0fo\nW\x06\xda\x0e\xae\x10+\r\x9b\rC\r\x9d\x07!\x047\x06\x82\x06\x05\x00b\x02\x81\x08\xd6\x06\xcc\x08\x08\x0c\xb8\nz\x01\\\xffY\x06m\x05\x05\x04\xd6\n\xc1\x08\xe4\x03\x87\x07?\x07\x13\x02\xdd\xfdp\xfd\xd9\xfb\xb7\xfa\xd2\xfd\x89\x01t\x00\xeb\xfaw\xf9\xd1\xf6\xd6\xf3\x8d\xf3\xb5\xf5?\xf7Q\xf2\xc0\xf4$\xf9q\xf7\x9c\xf7!\xf8\x86\xf43\xf5\x8c\xf8z\xfa\x06\xfe]\xffP\xff\'\xfb\x87\xfc~\x00\x15\x00(\xfe\xd5\xff!\x01\x8c\xfe]\x02\n\x04q\x00\r\xff#\xfc+\xf9e\xfdM\xfe\xf2\xfb\xbc\xfa\xe0\xfc\xca\xf8\xab\xf6\xed\xf8\x9a\xfb|\xfb\xf1\xf3p\xf7\xbb\xfd\xfe\xfb\xd7\xfap\xfe\xd4\xfa\n\xfc\xd2\x003\xff@\x02|\x02\x87\xfd\xef\xf9\xd6\x038\x07;\x01\x87\xfe_\x02m\x05\xfa\xfb\x94\x00\x90\x01K\xffu\xfdB\xffM\x03\x87\xfa9\x08\x0e\xff\xdb\xf3G\x01/\xfe\xa5\xf4\x18\x02\xd0\x05\x0c\xf4\xd3\xf8\x12\x02\xed\xff\xd9\xfa\xb0\xfc;\x02~\xf5\x8e\xf53\x0e\xc1\xfc\x01\xfa8\x0c\x01\xfcB\x02z\x0c&\x00R\xfe\x92\x08\xb5\x04\xe1\x05\x8f\r:\x0c\xf3\x01\xbf\x02\xec\x07\xa4\x04\xa2\x01]\x08\xef\x03~\x03\xf1\x04\x85\x03\x16\x05I\n\xc3\x03\xe1\xff\x97\x0b\x99\xfc\xe8\xfb\x18\x03v\x0b\xff\x06]\x02Y\x06?\x015\x05\xa7\xfc\xc7\xfe+\x03"\xfb|\x08\x9e\x05\x17\x03\x8d\x08\xb3\xfc"\xf8\xad\x01\xba\xf8\xfa\xf9\x9b\x0b\xfc\xf3\xa0\xfc\xa9\x06\xa1\xfa\xdb\xf9\xf3\x00\x9a\xef\xbd\xf3#\xff\x12\xfd\xbb\x02*\xfd\xe2\x01)\xfd\x1d\xff\x8a\xf2W\x05#\x03\x95\xf6\xa6\x06b\x06\x81\xfe;\x00\xad\n6\xfbV\x04\xc3\xfc\x9d\x00A\x03q\xfe\xc6\x01\x03\x01B\xf7\xa9\x04\xb3\xfc\xca\xf2\xbc\x03:\xfb,\xfbi\xf9\x9c\x04-\x0bd\xf4\xc6\xfa\xf2\x0c\xbb\xf9\xe1\xff\x1a\nR\xfe/\x02\x0c\x0b\xb6\xf7/\x00B\t\xb9\xf3\xa6\x00\x9b\xfbR\x01g\xff\xbc\x01\x11\x04o\xf8\x8a\x00N\x08s\xfc\xdc\xf6\x13\x0e.\xf3\xc1\xf2{\x13:\xf5\xc4\xfce\nU\xf4\xf2\xfeF\xfd\x0c\xee\xee\x08\x8d\x06\x0f\xee\xdc\x04\x08\xffB\xfb?\x05\xc3\x04\xc6\xf56\xfc]\x08"\xf3\xa7\x06\xa3\t\x02\x03\xba\xf6:\x00N\x03\x92\xfc\xf2\x04\xd6\xf9!\x0b\x82\xfac\xf5\xc0\x15\xd5\xf7\xcc\xf8\xc4\n\x8c\xf56\x03\xbc\x0e\xbd\xff\xf1\xfd|\x07u\xfc\xb8\x07D\xfd\xd7\x03\xba\x0e\xaf\xf6\xb5\x03\n\x06\xc9\x03\x02\x01\xbf\xfe\x8e\x01\xe7\xfe\xe7\x00*\xfc\x90\xfd\x8f\x02R\x02u\xf2\xfa\x054\x02\x0c\xf2\xa4\x02H\x00{\xf3\xd3\x00\xfa\x07^\xf8\xe5\x02\x1a\x00\xd0\xfc\xed\x05\xff\xf9\xec\xf9e\x07\xd7\xf6W\x02\xd9\n\xd2\xf4\x15\xfd\x14\x077\xef\x87\xfb\x11\x10\x0b\xf8@\xf4\xe6\x08\x05\x06\xe5\xef\xdb\r\x07\xfb%\xf5\xf2\x05\x14\xf7\x17\x07\x1b\x06\x90\xf2)\xfej\x10\xd5\xf9\xbb\xf9\x1e\x0b\xec\xf8\x15\x03\xa5\xff\xb8\x08\x8b\x00\xae\x07\xf0\x03\x96\xf6\xc8\n\xdd\x02\xc4\xfbz\x02\'\x11K\xf5\xb4\x03\x1f\x01:\xffL\x03\xe2\xee\xb7\x0f\x84\xfc\xce\xf9\xef\x0e\xa2\xf5\x0b\xf7B\x15{\xf4\xa2\xf0\xce\x17\x1c\xfc#\xf0\xef\x0c\xea\x07m\xf1\xcd\x06\xbe\xfb>\xfb\x8b\x02\xc1\xfe\xfd\x03\xc3\x00*\xf6\x08\xff\x10\x0b\xc5\xef9\xfe\x81\ns\xee\xe2\x04\xb2\x00]\xfa/\x07I\xfd\x00\xf8;\x009\xf8\xbc\xf6\x84\x18T\xf89\xfb\x81\x07\x10\xfd\xf9\xfb\xa8\x02\xca\x05\xa7\xfd\xf2\x04\xda\xfd{\x05\xb6\x08\xd6\xfc\xdf\x02\xb0\x04\xed\xf8\xff\n\x11\x03\xa4\x00y\xfd:\x0b\x16\x04\\\xf7\x02\xfe\xd3\x01\xf3\x06G\xf7z\xfe\x91\x08v\x04\xe8\xf8\x93\xfb\x90\xfa\r\x06\xd1\xf9\xa8\xf4\xdb\x14\x87\xfen\xf2\x16\x08\xa6\xfc\x1c\xfc\xb1\xfe\xdd\t\xb0\xf7\xff\x01\xd5\x03:\xf5\x8f\t\x8b\x00\xcc\xfc3\x00R\x06\xe9\xf1\xd1\x06\x8d\x01\x1b\xf8&\xfb4\x00|\x02\xe3\xf5\xa6\x08\xcf\xfa\xb9\xedT\x02{\xff\x06\xf2\xb3\x0e\xcf\x00\xd7\xf1\x90\x06\xa3\x08\xe2\xf9g\xfc|\x01\xb7\t*\xfbe\t\xc0\x0b\xb2\xf60\x05\xb0\x00\xd1\x06\xff\xf8z\x00\x10\x0f\xf6\xfdi\xf8\x98\na\x04%\xf5W\x08\xd0\x03\xeb\xfc\xdc\xf8\x0b\x06\xfa\x04y\xfc<\xfd\xff\x00f\x0c(\xf4\xbf\x001\x07\x8f\xf4\xce\x01\xdb\x00\xb9\xfc\x9d\x06#\x01\x8f\xf6\xe1\xfb_\x04\x8e\xf9|\xff\\\x03l\xedE\x04\x07\x08Q\xf3O\xf8\xea\x10<\xf2\x92\xfa\x1c\x08x\xf5\xa5\xfc\x93\x06\xd9\xff\xc2\xfe\xc4\x01\xab\xff\xf0\x00k\xf6\xe0\rA\xf6)\x05\xa1\x03B\xfe\xdd\xffI\xfcg\x0e\x91\xefN\x0b\x1f\x01\x04\xef\xda\x07\x88\x0c:\xf45\x04\xfb\xf9\xf8\x05j\x08\xae\xf3I\tu\x08\xf8\xf4\xdc\x02\xe3\x13^\xef%\x06>\x07\x0e\x00\xcd\xff\xce\x01\xab\x06\x98\xff\xb5\x06\xac\xf8\xc9\x02&\t=\xf6\xc8\x00\xd6\x01D\x07\xda\xfcy\xf2a\x19\xb0\xed\x81\xff\x0c\x031\xfcZ\x03\xf1\xec\x7f\x12\xb6\xef$\x00\x9b\x085\xf6\xa8\xf5\xaf\x05\x1b\xfc\xa2\xf7 \x02\xfe\x02g\x03\x06\xf9P\xfc\x0b\x01\xe4\xfc\xa1\xfb\x18\x08\xba\x04\xdd\xf5\x88\x08\x8a\x03\x13\xf0I\x07\xbe\xfe%\x02\xfd\xf8\xc6\r4\xff\x01\xf2\xd7\r\xde\x01\xc7\xf6F\xf61\x1a\xaa\xf3\x9a\xf9\xff\x12\xec\xfb\xca\xfc\x98\x03\xc2\x04H\xf5\x13\x04\x9c\x03\x9b\xfb\x87\x01(\t\xda\xff\xc7\xf5\x84\x04f\x07\xd0\xf6J\x04\xe8\x08`\xefZ\x11G\x02\xb6\xf5\xb1\x04\x13\x07\x04\xf6\xc3\xf2\xd9\x18\x8b\xf9\xbd\xfa\xfa\x04o\x08|\xf5\xa3\xfdf\x0b\x90\xf1\xb2\x03\x94\x03\xf0\xfc\x00\x00\xab\n\xb5\xf2\xdd\xf9\xfd\x07\xfb\xefN\n\x19\xf5\xd9\xf6J\x16R\xf3\x1c\xf5\x80\t\x12\xf6\xe0\xffW\xfe\xcc\xf7\x02\x06\xb4\xfcT\x04\x14\xf5\x86\x06\xd6\t\xc5\xedD\x07A\x01\xea\xf9N\x04\xe6\x01\xa6\n>\x07\xa3\xfa#\xfdV\x05\xe3\xf9\x93\r=\xff\xdf\xff\x9d\tQ\xfc5\xfdB\tV\xffz\xfb\x99\x07\x9b\xf1\x17\x13\x98\xfb\x8f\xfb\xf1\x06E\x05\xab\xeeM\xfe\xac\x12s\xfc\xab\xf7\xdd\x00\x91\nC\xf5\x0f\xfc`\x05\x13\x03v\xee\xc4\x08\x18\x07\xf4\xf5\x14\xfb\x95\n\xfe\xfc\xd5\xee\x8b\x0c\\\x03\x14\xf2\x99\n\xb6\x01\xfe\xee\xf5\np\x05-\xf1\xf2\x00\xb4\x14\x1b\xeb\x90\xfa\x85\x17s\xf5\xd9\xed\xe0\nb\n.\xefJ\x02\xf6\r\xd7\xfc\xef\xf6p\xf8\xc9\x06\x84\x06\xa1\xf4\xe2\xf8\x02\x19E\xf4\xf3\xf3T\x16\x10\xef\xaa\xfb\x11\n\xd5\x01\x80\xf6\xce\x05\xb2\x06\xf8\xf5e\x03~\x10\xdd\xea\x13\xfd\xbe\x16&\xef\xc8\xff\xc8\x12\xd9\xf9c\xf8d\x0e\x81\xff\xc9\xf8\x19\x06\xef\x05\xa1\xf1\xbf\x05d\x18\xc7\xed\x0c\xf5\x06\x1c\x9e\xfc\xeb\xe6\xf4\x0f\xe0\x07\xd8\xf0\xed\x00\xe1\x07Y\x02-\xff)\xf6r\x06\xe8\xfe\xeb\xed"\x15\xf4\xf6#\xf1\xa8\x0b\r\x07?\xf5\xa3\xf5\xb3\x10.\x00\xdd\xed\x1e\x05\x89\x08\xb6\xf6+\xff\x9d\x04\x9a\x04Q\xf8\xdd\x04e\xff\xd4\x00Z\xfb\x06\xffn\nI\xf87\xfe\xc0\x0b!\xf7\xbe\xf8\xa9\x0f\xd7\xfa:\xf7\xdd\x01\xf3\np\xfa\x1b\xffH\xfd\xfb\x030\x02\xb6\xfb^\x01\xaa\x03j\x03\xd7\xfa\xc0\xff\xa8\xff\xe6\r\x15\xfa7\xfc/\r\x9c\xfc\n\xf8\xaf\x10\x80\xf8/\xfc\x8e\x10+\xfc\xf3\xf0\x1f\x08\xd7\x08e\xf6\r\xff\n\x01~\x07\xa5\xed\x90\x04O\x07I\xf1\x9e\x02\x91\xfdn\x06\x14\xff\x1e\xf8\xae\x0bS\xf8o\xf1U\x12a\x02`\xf01\x0f8\t\x18\xed\x88\xfb\x94\x12v\xf2\xbd\xff\xfc\x05\xdd\xfc\xf4\x04Q\xfb2\xff\x10\xfd1\x05\x16\xfcB\xfe\xc4\xfb\xe1\x0f$\xf6\x08\xf7?\x14;\xf6;\xf5\xc2\x06i\x05\xdc\xf0\\\x07\x17\x07\xfc\xfa\xb0\x018\xff\x85\xfd\xe5\xf7\xe2\x0f\x8f\xfd\xa1\xf5\xb5\x06\xec\x01\xd8\x01c\xf5d\r4\xffx\xf5[\x080\x08M\xf2\x92\x02\xe8\n\xef\xf7\xeb\x07\xeb\xf9d\xfe\xa7\x01\xb0\x01\x07\xfe\x9b\xfd5\x04i\x02\xf5\xf1&\rx\x04o\xe7*\x0e\x9b\x0c\xd7\xeb\xf1\xfcC\x12\x80\xfc\x1b\xf0\x97\xfc\x18\x1a\xa7\xf4\x86\xe9\xd1\x16 \x05t\xe2\x11\r\xef\x15\x08\xea\xb5\xfa9\x14\xac\xf3g\xf3\xa6\x15y\xfb\x7f\xf3I\x03Y\x0e\x81\xf83\xf6\xc8\x19\x12\xf0\xe4\xef\xe2\x10\x87\n\xe7\xe7\xde\x08|\x1b\x88\xe36\xf4\x8d\x1e\xdc\xfa\xb9\xe6t\x17a\xfa\x08\xfb\x9b\x05W\xfe\xba\x01\xcf\xfc\xb4\x00Y\xfeR\x06b\xf1g\x0fb\xff\xdd\xf1m\x0c\x81\x00\xbb\xf6O\x06M\x0b|\xec\xcd\x04d\n\'\xf3\xd1\x04\xd6\x0c~\xf3\x85\xffR\x0cB\xf3\xc5\xff\xf6\x08\x13\xfb\xe1\xfe`\x02.\x05\xc7\xf8\x9c\x00}\x05>\xf3\xed\x05\xab\x064\xf6\xb9\xf9\xd3\x13\x11\xfa\x17\xf1S\x0cg\x02w\xf2\xcf\x02\x06\t\xc6\xf7\xbb\xff\xf0\n9\xff"\xf5n\x06\x8e\xfeN\x00\xbb\xf7\xb6\x05\x17\nZ\xf9E\xfdX\x07O\x02U\xebj\x0b_\x12\xe0\xe4F\x03\xc8\x11\xb4\xf2\xe3\x03g\x0b\xe2\xf0\n\x015\x04\xe5\xfa\xbb\xff\xcb\x04\x7f\x02j\xf9_\xff\xec\tM\x04F\xeb\xae\x01\xcb\n\x8f\xf5\xa3\xfbI\x1a(\xf4z\xf4O\x0fD\xf7\xbf\xf9\xfe\x06\x81\x05\xf3\xf3\x15\x07"\x0ch\xf8\xc4\xf7O\x0c\xe0\xf3S\xfc\x80\x0e\xa6\xfbg\xfa,\x10:\xf6\x9f\xf6\xa7\x0fb\xf9`\xfa\x03\x02^\x07\n\x01\x97\xfd\xd1\xfc\xba\ts\xf7\xa8\xfcD\x05H\xfe\xce\x01P\x07\xfa\xf9\xcd\xfeb\x08\x9a\xf0\xb6\x08.\x03`\xf8\xa0\x04,\x02\xe9\xfb\x16\x05`\xfdi\xf8\xf9\x02\xd6\xf7D\x01}\x05N\x07\xb5\xf3\xe3\xff\x16\t5\xf6\'\x006\t\xa3\xfb\x8f\xfd\xb1\x0b\x8f\xf8V\x04#\x02\xb1\xfat\xfe\xcd\x06\xf5\xfao\xfe\xd4\x02\xd4\xfc\xf1\xfdT\xff\xf7\x00\xc7\xfcY\xffU\xfb\xe1\x0ca\xf2\xb8\x02B\n\x96\xf3l\xf8\x17\x04\x86\x07\x81\xf99\x05\xeb\xfe\xa8\xf7:\x064\x04X\xf8-\xff\x1f\t\xd7\xfe\xb1\xf7\xd7\x0e\xbf\x00\xa6\xed\x9f\x07\xbf\x0e\xb9\xf2\x1a\xfd\xac\x0e\xcf\xfap\xf2\xf2\x10P\x06\xb8\xed\xc3\x08\xd1\x00\x8a\xf5\x94\x045\x0c\xf7\xf7\x11\xf9\r\x0b\x8f\xfe\x0f\xf9\x80\x01\xb6\x000\xfe\xcd\xffZ\x06o\x02\xf4\xfa\x1b\x03\xe8\xfa\xeb\xfd\x8f\x00\x9e\x00\xf0\x03\x8d\xffw\xfd\xcc\x03\xc0\xfa\xb4\xfel\x04\x8f\xf8h\xfdJ\x00n\x01\xcd\x04\xd5\x02\xf9\xf5 \x02\x12\xfd\xa2\xfb\'\x07\x80\xfd\x12\xfe\xbd\x03k\xf8\xf6\x05\x82\x06\x8b\xf5\xe4\x04_\xfd6\xf8\x87\x01D\x08\\\x02\xae\xfc\xae\x03\x99\xfb\'\xfd\x18\x03\xbb\x03\xfe\xf8y\x02\xf2\x07\xb6\xf6\x89\x07i\x07\xad\xf8\xa1\xfdb\xfe5\xff\xea\xffe\t\x96\xfe5\xfa\xc8\x06g\x00\x9a\xfbG\xff[\x03\x9d\xf7\x8a\x00\x8b\x05+\x06\x8b\xff\xe5\xfd\x1f\xf9\x90\xfd=\x05N\xfc\xe5\x02\x18\x01\x9d\x01\x90\xfc\x10\x00\xb4\x046\x02S\xf9\x91\xf3\xc4\x08y\t\xbb\xf96\x06\x96\x00\x93\xf5\xb7\x00g\x08Z\xfcX\xfaP\x04\xc6\xf9e\x01\xfd\x05\xfc\x04\xb0\xfb|\xf5\x8b\xfe\xc6\x04c\xff)\xfc{\x058\xff"\xfa\x05\xfe\xb1\t%\xfe\x9a\xf9\x1d\xfe\x06\xfdD\x020\x06\xb3\x00\xbb\xfe\xb9\x00\xff\xfb\x19\x00H\x03\x16\x03N\xfc\xbc\xfe\xb6\x01\xba\x01I\x03\x05\x02\x8b\xfd\xa3\xfd\xa4\x00D\x00t\xff5\x02\xe3\xff\xa4\xfdD\x04\xc4\x00\xa9\xff\xe8\xff\xb5\xfcB\xfdC\x01\xa2\x02\x10\x01D\x01\x15\xfe\xbb\x00\xcc\x00k\xfeQ\x00\xc4\xfcV\xff[\x03\x99\xff\xa1\xff=\x01\xab\xff\xba\xfd\x90\x01\x17\x03\xbf\xff\xd0\xfa\xfe\xfc\xb0\x05\xb2\x01\n\x01B\x04\xba\xfd\x0b\xfc\xbf\x01\x93\x00\xe6\xfc\x91\x01P\x02\x9e\xfd\xc4\x01\x1e\x04\x81\xff\xde\xf9\xa4\xfer\x03c\xfdi\x01\xe3\x02\xc5\xfd\x89\x01\xdb\x00[\xfe\xf5\xff\xe9\xff`\xfdv\xfd\x1a\x03\x98\x05\xd3\xfd\r\xfe\x01\x02x\xfe\x87\xfd\xc9\x02\xd2\x00\xd1\xfe\xa0\x00\xd7\xff}\xffS\x01\xc2\x02\xce\xfd\x0b\xfe"\x018\x016\xfdA\x02o\x01h\xfd\xbe\x00*\x00\xe5\x01u\x02\t\xff,\xfb"\x00^\x01\x08\x00\x80\x04\xdf\xff\x92\xfd1\xff\x06\x01z\xfd\x91\xff\xef\x02a\xfc\x92\x00\x89\x03]\x01!\xfe\x01\xfe\xf9\xfd\xf7\xfd\xe3\x00\r\x00T\x02#\x02\xcc\xfb\x07\x00\x92\x01\x8f\xfc\xb8\xfe\xdc\x00\xbd\xfc\xc1\xfd\t\x05\x0f\x024\xfb(\xfe&\x01\xf3\xfa\x80\xfc\x80\x02k\xff\'\xff%\x00X\xfe\xe9\xffp\xff\xb7\xfb\x12\xffa\xfd\x8d\xfd\x9c\x01\x8b\x01q\xfeG\xff2\xfew\xfbv\xff\xc3\x00\x8a\xfd\xf3\xfd\x1b\x030\x00\xb2\xfd\x82\x01?\x00\xc1\xfbZ\x00\x8a\x00K\xfe"\x04\xc8\x02\x9e\xfe\xfb\x00\xc6\x01\xd4\xff\x96\x00p\x01\x05\x03p\x01\x92\x03&\x06x\x03\x87\x03\xa0\x05T\x03\x93\x05\'\n\xae\x07[\x08\x9d\n\x9a\n\x82\t\xcc\x0b.\n\xcb\x06\x8e\ns\n\xca\x07\xcc\x07a\x08_\x042\x02W\x02I\xfeD\xfc\xdb\xfd@\xfbW\xf8\xa5\xf8\xb6\xf6m\xf4\x8e\xf3\xa5\xf3\xa0\xf2\xeb\xf1\x03\xf4\x8d\xf5\x1c\xf5\x9f\xf5K\xf6-\xf7\xdb\xf8F\xfa\xf1\xfa\x8c\xfd\x05\xfe\x0e\xfe\xc4\x00\xb3\x00\x1d\x00\x83\xff\x81\x00\x1c\x01\x80\x00J\x00\x87\xfd\x02\xfd_\xfd\xb0\xfa\x96\xf9\xf8\xfa9\xf8|\xf5:\xf63\xf5\x14\xf5\xb0\xf3F\xf3\xf5\xf4\xd4\xf4\x88\xf3n\xf5\xbb\xf8\xa1\xf54\xf8G\xfb+\xf9\xb0\xfc\x01\x01\x80\xfeh\xfc\x8f\x03\x0e\x05\x85\xffc\x02[\x07\xf6\x02\xab\xfe3\x05(\x05{\x01u\x02`\x04\x13\x00\x8e\xfe\x90\x00\xb2\xfe?\xfe\xed\x03}\x03\x14\xfe7\x03\x82\x06\x94\x06r\x05\x0b\x07\xa1\t\xb5\x0cT\x148\x19\xf4\x1b\xaa \x9f\x1f\xf6\x1dF!5&Y\'h%4(\xf3*\xba)F%: |\x1b\x0c\x14\xdb\x0c\x8a\x0b\x05\r=\x08X\xfeq\xf8a\xf5\x88\xef\xac\xe7\xce\xe1\r\xe01\xdes\xdc\x0e\xde\x8d\xe0>\xdf\xe2\xda\xbd\xd8\xab\xdc\x8e\xe0|\xe1$\xe6y\xed\x15\xf1j\xf3\xa9\xf6I\xfa\x86\xfc}\xfc!\xff\xf8\x04V\t\x83\x0bx\x0bu\x0cY\x0c\x05\t\x7f\x07:\x07\xa9\x06:\x04&\x03%\x03o\x01\xc1\xfd\xc9\xfa\xaf\xf7\xa3\xf4L\xf3\xc1\xf36\xf5\x97\xf4\'\xf3Z\xf3\xf4\xf5l\xf5^\xf4\x98\xf6|\xf9i\xfb\xe4\xfef\x02#\x04\xc9\x04;\x07B\x084\x08\x82\x0b\xca\ru\rF\x0e\x0e\x10\xd0\x0eM\rD\x0cE\x0b\x0f\t\x9a\x08\xea\x07 \x06\x86\x04\x8d\x02v\x00\n\xfe]\xfc/\xfaR\xf8\xac\xf9#\xf8S\xf6\xc6\xf5x\xf4~\xf3\xba\xf2\xf2\xf2_\xf5\xf3\xf2\x00\xf2\xb1\xf5\x84\xf5s\xf4\x1a\xf6P\xf61\xf5L\xf7e\xf9\x93\xfc!\xf9+\xf9m\x00\xd7\xfe\xb6\xfb\x88\xff\x89\x03\xd2\xfd\x07\x00)\x06\x98\x04A\x01\x03\x04"\x08\x89\x03@\x04\xb8\x05\xd4\x05\xcc\x06\xed\x06\xc6\t\x1e\t\xe4\x08\x10\x08\xf3\x08\x93\n\x1c\r\x8e\x0f\xe9\r\xec\x0f+\x14\xf6\x17m\x16c\x14.\x14d\x15\xb1\x18\xdf\x1b[\x1b\xa8\x18Y\x16u\x14\xd9\x13A\x12\xc1\r\x88\t\xd6\x05\'\x03\xf6\x02\xfc\x00\xcb\xfb\x93\xf6\t\xf2K\xee4\xecO\xeb}\xe9L\xe7\xce\xe5\x9a\xe5w\xe7_\xe8\xd7\xe6\x16\xe63\xe7\x9b\xe9M\xedG\xf1\x16\xf5/\xf6\xa2\xf5\xd0\xf7\x1b\xfb\xe8\xfd\xd4\xfd\xb9\xfe\xd3\x00<\x039\x05\x9f\x05\xe0\x04\x80\x02\xc5\x00\x84\xff\xa8\x01*\x03\xc9\x00\x19\xff\xd3\xfe<\xfe,\xfd\xaa\xfbB\xfbL\xfa\x8c\xf8\x86\xfaf\xfe\n\xff2\xfd\xd3\xfc\xde\xfc\xff\xfdX\xff\xdd\xff\xa9\x01~\x03b\x03^\x03\xed\x07\x14\x07D\x02\xe6\x02\x8e\x04\xb2\x04\xfc\x04\x94\x04\xc5\x06z\x04\'\xff\x1f\x00!\x05C\x00}\xf8\x02\xfe\xc2\xff\xd2\xf9n\xfd\x10\xff\x8e\xf9\x87\xf6\xb0\xfb\x05\xf5x\xf6u\xfb\x9c\xf5\x8e\xf9\xcd\xf4\x9e\xfa>\x02A\xf2,\xf5\xbc\xfe\xbc\xf6\xd8\xf5\xd7\xfb\xd4\x00[\x01\xf5\xfaX\xff\xba\xfe\x12\xfd\x1a\x02N\xfe\xef\xfe\xe8\x07\x07\x07m\x04\x0e\x07z\np\x08\x13\xff\x93\x06\\\x11\xe5\t)\x06\x8e\x10K\x0e\x0e\x087\t\xe0\n\x97\x07B\x06\x98\t\xed\n\x8c\t\x92\x0b)\x07\xbb\x04\xf4\x05\xb5\x07N\x08\xb4\x08B\t\xa7\x0c3\x0f)\n\xe4\x0cS\x0eW\x0b!\re\x10\x8e\x10,\x0fW\x0eq\x0e\xe6\x0c\xca\tj\x08J\x05\xf2\x03\xd1\x02\x05\x01^\xffN\xfb\xc1\xf7\xcb\xf4\x01\xf3 \xf14\xef\xa4\xee#\xee\xb0\xeb\x1c\xebi\xed\xaf\xecR\xeaZ\xed4\xeef\xee\x05\xf1\x8c\xf4o\xf4`\xf5^\xf8\x9d\xf8\'\xfa\xd5\xfb\x9d\xfd\xe9\xfc\x17\xfe\xc8\x01\x9c\x00\x02\xff3\x01x\x00\xdd\xfe@\x00\xa9\xfe \x01\x9d\xff\x00\xfe\xaf\xff\xad\xfe\xd7\xfe\xbc\xffq\xff3\xfd\xc7\xff\xb5\x03\xe1\xfe\xdb\xff\xf8\x05\xca\x01\x87\xff\x92\x06\x0c\x04h\x01\x00\x05\x04\x05Q\xffd\x034\x05\x01\xfe\x1d\x01#\x02\xae\xfd\x8c\xfd\x84\xfd\xb5\x00\xbd\xf76\xfa;\x00M\xf6\xdf\xfbd\xfa\t\xfa*\xfe\x18\xf8\xf2\xf6\xfb\xfd\x0b\xfd_\xfaM\xfd\xd4\xfc\x08\xff,\xf8\xfa\x03C\xfe\x9b\xfa-\x02\xc2\x01\xd3\xfe=\xf9\n\x07!\xfcp\xfa\xd6\xfcc\x02w\x03\x12\xf5\xf8\xfe\x98\t\x04\xf5Q\xf2\xab\n\xc0\x02\xfe\xf6\xb2\xff\xc2\x08\x08\xfe;\xfe\xdc\t0\x00\x0c\xfe\xe3\x0e\xa7\xfe\xa3\x07)\x12/\x04\xa5\x07\x04\n\x86\t\x86\x018\x0f\xc0\x0ft\x03\t\n\xb8\n\x8e\n\xb4\x00:\tT\nd\xfb\xcd\x03(\x10\xba\x08A\xf6~\t\xe3\x04"\xfc6\x00\xeb\x06;\x07\xe4\xf7x\x08\xaf\x03d\xffU\x04\x92\x03\x82\xfb\xf0\xfd\x9a\x08)\x01\xfa\xfe\x08\x01=\x06B\xf9\x95\xfe\xec\x03%\xfc\xa6\xff\xff\xfb\x16\xff\x18\x02\xee\xfdS\xfb\xaa\xfd\x18\xfc\xad\xf9\x8f\xfb\x0e\x02L\xf88\xf5\xac\x03*\xfab\xf2\x06\xfeH\x02l\xf0\x0e\xefo\x08\xa6\xfc\x18\xf4T\xfb\xdc\xfe*\xfd\x14\xf2\xe8\x03\xdf\xfeo\xf4U\xfc\xec\x01\xa0\xff\x84\xfa\x1f\x01>\xfcL\xfa\x1a\x03\xed\xfcZ\xfd\xec\x00\xd4\x07\x8c\xf6\r\xffA\x0b\x9e\x06\xb5\xed[\x02e\x11C\xf9%\xf85\r\xc1\x0br\xe8\xf4\x0b\x94\x05f\xf3\xdc\x01\xe4\xff\xfe\xfe\xbc\xfcC\x01\x96\x03\x9a\xf5\xad\xf5\x7f\x07\xfc\xfa\\\xf3\xdf\t\xe4\x01^\xed\xef\x08\xe0\x01\xce\xf4&\x03[\xf8\xa5\x05\xae\xffx\xfc\x9e\n\x91\x02N\xf4e\x06\xb7\x0cL\xee\xbb\x0b\xb1\nP\xf7i\x08\x8e\x01l\x003\t\xaa\xfc\xc2\x00/\x08\xa9\xf1\x9c\x04\xc6\x19\xf6\xef\x9e\xfcm\x13\x88\xf8\xe6\xec\'\x18\x90\x06\x0e\xe7D\x12\xce\x04\xc4\xfcr\xff\xd1\x08V\xf8\x0e\x04C\xfe\xa5\x03\xe1\x08\xd0\xf5\xb2\x07{\x01*\x04\x1c\xf9\xbd\xfd\xcf\t\x0c\xf8s\xfd\x8c\x06\xa3\xff|\xfeM\xfdq\t\xda\xe9\x16\r\r\x03\x98\xef\x94\x06\x8d\x05\x8a\x03\x99\xefT\r\xae\xfcS\xfc;\x06\xbd\x00I\xfd\xe3\x03\x10\xff:\x03\xfe\x03+\x01*\x02\xe8\xfd\x89\x06U\xfa%\x04z\x05\xe5\xf8>\x00v\x06h\xfb\xd3\x06\x06\xff\xd8\xed\xb3\x12\x11\xf7\xad\xf5\x92\x11\x17\xffL\xf5/\x05\xed\x08\xa9\xf7\xaa\x01\x8f\x00\xd2\x05\xd9\xf4o\x13\x9a\xff\xa9\xf7~\x05g\x01\xf9\x00\x9e\xf7\xa5\nm\xfef\x00\\\xf8\x05\n\xb0\xfd\xd3\xf4A\x07\x18\xfe\xd6\xfc\xff\xf6\x8a\x059\x03\xa6\xeeW\x06\x87\x01j\xf3.\xffK\x05\xdf\xf7r\xfc\xfa\xfe\x98\xfa\\\x03\xa8\xfd\x1e\xf9\x88\x01]\xfdZ\xf9\x85\x06\xa3\xf9\x16\x06\x18\xf2\xc8\x03\xf3\x053\xfa\xb1\xf8\xd1\x078\x02\xd1\xf6\x18\x08@\xfb\x85\x02\x9b\xf6e\x10a\xfd;\xf1\xb2\x0e\xda\x04\x99\xf4M\x01D\x0bT\xfd\xdd\xf4G\r\xf9\x05\x9b\xefb\t\xc3\x0b\xd0\xf1\x07\xff\x13\r\xae\xfa\x0b\xfd\xce\x01\xac\x0b\x9d\xee\x99\x06\xab\x03?\x02&\xf6H\x02\x95\x0c\xcd\xee\xfd\x05\x81\x05\x91\xfd\xd3\xf0p\x15\x97\xf2f\xfd)\t=\xfa^\x00\xb9\xfe|\x03\xc2\xf9J\x01\xfa\xf9\xe3\x07@\xff$\xfa\xd2\x02m\x02C\xf6\xa0\x05\xf6\x02\x88\xfa\xe2\xfa\xb3\x05\x7f\x02\xff\xf5(\t\x03\x00Q\xf4y\x03\xf8\x0b\xb5\xef\x81\x05\xcf\x03\xdb\xfa\xea\x07\x96\xff\xb2\xfbc\x04L\xff\x0b\xfe\xe3\x04s\x07O\xfc\x99\x03\xef\x017\xfa6\n\xeb\xfc\x13\x00\xa7\x05@\x06&\xf7\x80\x06\xea\x04\xf5\xfc\x06\xfc \x02[\x05\x86\xff|\x02\xac\xfa\x16\x06\x1d\x00;\xfa\x80\x03n\x03^\xf7-\x02\x03\x03\xd6\xfdH\xff\xdf\x03\xaa\xf7!\x00\x8b\x040\xfcf\x00\xf1\x00\x9c\xfc=\x03\xb8\x00\x94\xfa\xb2\x06\x16\xfc\xb7\xfe\x9d\xff\xf2\x01\xd1\x02\x0c\xfcX\x00\xd7\x03\x0c\xff\x98\xf9\xbe\x05\x96\xfd\x11\x01\xd3\x01w\xfa\x07\x04\x9a\xff2\xff\xff\xfc\xdb\x01J\x01\x87\xfa5\x01W\x03p\xfe]\xfcu\x04\x88\xfe?\xfd[\x06\xa7\xfd:\xfb7\x06\x04\x02\xe0\xfa\xb0\x02\x83\x04\x11\xfc\xc6\x03\xde\x01\xa8\xfe\x06\x01x\xff\xf5\x01-\x03\xca\x02\xee\xfc\xd3\xfe\xa9\x06a\xfb\x14\x01W\x05\x19\xfb5\x02\xb4\xfd\x96\x00e\x019\xfe\xcb\x00M\xf9h\x06~\xfb\xb5\xfa0\x07\xfd\xfa\xdd\xff\xa5\xfb\xc7\xfe\xd3\x02\xa8\x00\x1f\xf7\x12\x03/\x02X\xf7Y\x04\x98\xfeH\xf9\xcd\x03\x81\xfe\x8b\xf9\xfe\x05\r\xfd\x1c\xfc\x07\x01.\x01\xa6\xfaQ\x01Y\x03\xc6\xfe\n\xfbB\x04R\x03\xcc\xf9\xa0\x01B\x01\xc2\x04\x02\xf9\xc7\x03\x90\x02\x02\x00T\xfdm\xfde\x08,\xfb\x95\x01\x9c\x02\x9a\xff\xb3\xff\xe3\xfe\xb0\x00\x98\x01\x08\x02G\xfd\xd9\x00/\x04*\xfe\xdb\xfe@\x01\xb1\x00\xb6\x00\x80\xfei\x01\x98\x01\xd8\xfc\xdb\x01p\x00T\xfe\x95\x00\xeb\xfd\x13\x00\xf0\xff\x0f\xff|\xff\x03\x00d\xfd\x86\xff\x91\xff\x04\xfe+\xffv\xfd\xe6\xff\r\xff\xf6\xfc\xc3\xffF\xff8\xfc\x04\xff\xd6\xff-\xfc\xbd\xff\xa0\xff\xf5\xfb\xb4\xff\xdd\x00\xd1\xfcT\xfe}\x00\xba\xfe\x96\xfe\x15\x00\xdc\xff\x9c\xff\xe5\xffu\x003\x01\xd7\x00\x06\x01\x18\x00\x90\x01\xa6\x02k\x00\xb0\x01\xec\x02C\x00\xa8\x02r\x03\x08\x00X\x02\xf6\x02\xd1\x00\xa0\x01R\x02N\x01!\x01\x8a\x01\x14\x02^\x00\xb0\x00c\x01\xb0\xff{\x00\x92\x01}\xff\xbd\xfe+\x01\xb3\x00\xce\xfd\xdf\xff\x98\x00\xd7\xfe\xad\xfe>\x00\xbe\xff\xe4\xfdd\x00\xf4\x00C\xfe\xea\xfd\xfe\x00\xa7\x00\xa8\xfd\x9c\xffw\x000\xff\xdd\xfe\xd2\xff\x9b\xff\x13\xfe.\xff\xb9\x007\xfe8\xfe\x93\xff|\xff\xbc\xfd\xdb\xfe*\xff\x07\xfe*\xff\xf7\xfe\x7f\xfe\xa4\xfe+\xff\xc5\xfe\x80\xfe\xd8\xfe\xe5\xffA\xff\xe6\xfex\x00\x85\xffh\xff\xc3\x00\xca\x00\xea\xffb\x00\x81\x01k\x01\xb4\x00\x83\x01C\x02\xf5\x00\xaf\x01\xc0\x02t\x01p\x01\xc4\x02"\x02\x11\x01\xaa\x02\xe0\x01Z\x01\xab\x01\xa0\x01R\x01\xae\x00u\x01v\x01\x10\x00\xce\xff\xea\x009\x00R\xff\x03\x00\xc5\xff\xdb\xfe-\xffZ\xff\xd5\xfes\xfe\xbb\xfe\x0b\xff_\xfe5\xfe\x8c\xfe\xcb\xfe1\xfe7\xfe\xad\xfe\xa4\xfe\'\xfe\xeb\xfe_\xff\x86\xfe\xde\xfe\x89\xffK\xff\xdd\xfe\x17\x00\xe6\xff\x10\xff\x00\x00\xb9\x00w\xff\x1f\x005\x01\xc1\xff*\x00\xf3\x00\xd3\x00j\x00e\x00R\x01\x0b\x01x\x00b\x01\x04\x01\xb7\x00\x10\x01\x01\x01\t\x01\xf7\x008\x01R\x01\x08\x01\xd1\x00\x1f\x01\x0b\x01\x9b\x00\x01\x01\xf7\x00y\x00\xd9\x00\x0c\x01w\x00@\x00\xd8\x00u\x00\x14\x00Q\x00\x12\x00\xd0\xff\xe8\xff\xf0\xffN\xff\xa1\xff\xe7\xff\x13\xff\xd5\xfe\xd3\xff*\xffl\xfe~\xff=\xff\x87\xfeB\xff\x02\xff\xed\xfe$\xff\xd3\xfe&\xff\x1e\xff\xfe\xfe<\xff\x1b\xff\x08\xff\x87\xff\x1e\xff \xff\xce\xff\x85\xffH\xffv\xff\x17\x00b\xffY\xff=\x00\xf7\xff\x92\xff%\x00\x88\x00\xca\xff/\x00\\\x00F\x00T\x00y\x00\x86\x00\x8d\x00\xc2\x00\x9a\x00\xa1\x00\x8c\x00\xbf\x00\xcc\x009\x00\x90\x00?\x01\x88\x00\x14\x00\xff\x00\xb8\x00\xef\xff\x86\x00q\x000\x00\xe3\xffM\x00S\x00\xce\xff\xf8\xff\x0c\x00\x80\xff\xea\xff\x01\x00\x98\xffu\xff\xb6\xff\xd2\xff-\xffo\xff\xc9\xffS\xff\x13\xff\x99\xffL\xff%\xffe\xff,\xff^\xffE\xff.\xffz\xff\x7f\xff\x88\xffh\xffs\xff\xa1\xff^\xff\x8a\xff\xf7\xff\xa3\xff\xc8\xff\xe9\xff\xe2\xff\x03\x00"\x00\xf1\xff\xff\xffi\x00(\x00E\x00\x8b\x00?\x00L\x00\x9b\x00R\x00\x8c\x00\x94\x00\x94\x00T\x00\x7f\x00\xb6\x00C\x00F\x00\x93\x00D\x00\xf6\xff\x83\x00[\x00\xfb\xff*\x00)\x00\xeb\xff \x00\x1b\x00\xd8\xff\n\x00\x0f\x00\xdc\xff\x00\x00\xb9\xff\xdc\xff\x0f\x00d\xff\xc5\xff^\x00v\xff\xe1\xff\xfd\xff\x8f\xff\xe1\xff\xb5\xff\xc4\xff\xf5\xff\x14\x00k\xff\xf6\xff\x02\x00\x94\xff\xaa\xff\x0e\x00\xa9\xff\x86\xff<\x00\xb0\xff\x90\xffN\x00<\x00^\xff\xbd\xff\x86\x00\xe9\xff\x99\xffk\x00M\x00\x1f\x00\xe8\xff8\x00\x91\x00\xd0\xff\r\x00\x98\x00\xb4\x00\xc8\xff!\x00\x87\x00\x1b\x00M\x00B\x00\x0f\x00G\x00\x08\x00r\x00F\x00\xbc\xffJ\x00m\x00\xdf\xff\x06\x00\xa0\x00\xdc\xff4\x00P\x00\xfd\xff`\x00\x12\x00\xf6\xffg\x00\xfd\xff:\x00\x19\x00\x11\x00\x11\x00\xee\xff\x1f\x00,\x00\xf7\xff!\x00\x08\x00\x9d\xffd\x00\x03\x00\x80\xff\xe7\xff*\x00\xc6\xff\xf4\xfe\x15\x00\x1d\x00\xbd\xfe\x90\xff\\\x00h\xff\xba\xfeL\x00;\x00_\xff\x8a\xff\xf8\xff\xc7\xff\x19\xff\x1a\x00\xb6\x004\xffL\xffn\x00\xbb\x00\x99\xff\xf1\xff\x83\x00\xb7\xff\x8e\xff\xdf\x00\x1e\x01\xf4\xfex\xff\r\x02\xfb\x00M\xfd\xbb\x01\xee\x02\xf8\xfc\x95\xff\x18\x03 \x00\xc2\xfe\x08\x00,\x01*\x00\xb3\xff4\x01\xb6\xff[\xff;\x00\x9d\x00\x1c\x00\t\x00\x92\xff,\xff\xaf\x00X\xff,\xfe\x88\x02L\xff\xe4\xfd\xd4\xff\xcc\x00t\xff\xd0\xfd\xd2\x01e\xff\x98\x00<\xfe\xe9\xff\xa0\xff\xd0\xff\xd6\x01r\xfd4\xfe\xe1\x00\x16\x01h\xfe\xd0\xfe\x85\x00|\x01\xe3\xfe\x8d\xfe\x84\xfc&\x00D\x03\xa9\x03\\\x02\xcf\xfe\xf2\xfb\xf5\xfe\xca\x07\xf3\xfe\xdf\xfbv\x04\xa1\x00X\xfc8\x02\xc8\x04\x82\xfcY\xfa\x9c\x02 \x03z\xfd\xef\xfdh\x000\x00\x8e\xfe\xfa\xff{\xffc\x00\xa7\xfe\xd4\xff\xa2\x00\xd0\xfc\xc4\xff\x8e\x02\x1d\x05>\xfc\x18\xfd\x0b\x04\xb3\x02\x82\xfdb\xff\xc4\x03\xe6\xfe\xb5\xff\xd1\x00L\x00\xb4\xfc\xc1\x00\\\xff\xaa\xfd\x18\x02\xde\xfd\x07\x01~\xff\x94\xfd\xea\xfe\xac\x01\xfb\xfcZ\x00\x9e\x00\xf1\xfd\xf4\x00U\x01+\xff}\xfd\n\x00!\xff\x89\x02\xa2\xffD\xff\xcd\x02\xbf\x00\xfa\x01S\xfd\n\xfdr\x00\x88\x05\xeb\x00\\\xfdn\x02\xff\xfdB\x03(\x00\xbe\xf7\xe1\x02\x99\x04\\\xf9)\x00\x1f\t\xd7\xf8\xaa\xf8~\x04\xb7\x04j\xf9\xfb\xfbu\x03\xc9\x03\xa9\xfe\t\xfe\x85\x01\x8e\xfd\\\x00\xf6\x01$\x03*\xfa\xa4\x00E\t\xed\xfb\xc9\xfc\xa9\x04D\x00]\xfc;\x03\x16\xfd\x18\xff\x9c\x03^\x01-\xfb2\x01`\xfeh\x02\x82\xfe\t\xf8\xfc\x06\xd2\x07\xba\xf9<\xf7C\t\xff\x03h\xfb\x88\xfb\xca\x00\x81\x01\x08\x01\xe0\x00w\xff^\xfdN\xfc\xae\x05a\xfe\xfa\xf8\x95\x02k\x07\x89\xf7\xa3\xfc3\x0b\xb1\xfe,\xf8\xad\xfd1\x0b\x1d\x00\xd0\xf7\xe4\x03O\x03\t\xfd\xee\xfd\xb2\x06\xef\x02$\xf9\xe5\xffR\n\xf7\xfb\xba\xf8\x1c\t\xb0\x06\xff\xf7\xd8\xffb\tO\xf9 \xf8\xc1\t\x11\x07\xc2\xfav\xf8\x17\x03\xfc\x01\xe5\xff\xf5\x00E\xfd\xd9\xfct\x03\xb6\x02E\xf8\xb2\x02\x1e\x08q\xfa\xba\xf66\x04\x05\x0b\x8b\xfc\xdb\xf7\xda\x04\xc6\x05\'\xf90\xf4\x95\x0e\xc3\x04\xce\xed\xad\x05\xe6\x0b*\xf48\xf9\x8c\x0f\x16\xf6\x84\xee^\x13\x13\n\xe2\xec|\xf91\x12w\x01K\xea\xaf\x08\xf3\x0b\xee\xf5\x1e\xf7o\r\xb3\x08\x14\xf1\xa5\x02)\x06\xf7\xf5G\xf9e\x13\xb7\x03\x00\xf4m\x02\x88\x02\xd2\xfex\xfe\x7f\xff\x0f\x06\xf3\xfee\xfa\xc7\x03O\x02\xf9\x00l\xfe\xeb\xfdO\xfc\x99\x02\x13\x03\xea\x02j\xfb\n\xfb\xd8\x05A\x00\xed\xfep\xfc\x9c\x02\xd3\xfe\xab\x02\xe7\xfd\xfc\x01g\x08\xdb\xff\xeb\xf4\x88\xfb\xd2\x0bN\x04\xa1\xf6F\xff\x0e\x0fK\xf9\xee\xf0\x1c\x05\xab\re\xf6?\xf2,\x06@\t%\xfc\x95\xf8"\xfe\x80\x02P\xfa\xad\xfcD\x08G\xfc\x9a\xff\x89\xfb\xd3\xfe\xf3\x07\x96\x05\x08\xfc\xb3\xf9\xb4\x01\x99\x07z\x05\x0e\xfaJ\xfb!\x02\xf8\x04\x91\xf9\xc0\xff\x85\x0cp\xfd\x12\xf0\xef\x02\xb9\x08U\xfb\x8f\xf7F\x07$\x04\x03\xfb\x01\xfec\x04h\x02B\xf7\'\xff\xe8\x08&\x03\x88\xf5h\x026\x07\xd6\x01\xe0\xf8/\xfd\n\x05\xfa\xfdN\xfd\xdd\x02\n\x03q\xfdo\xfd=\x00Q\x03v\xff&\xfb\xe7\x00\xbb\x00&\xffT\x070\x00\xf0\xf8\xfe\xffd\x04\x80\xff$\xfey\x04w\x03\'\xf9\x84\xf9\xac\x0cj\x07\x19\xf4\xc8\xf9\'\x08\xd6\x05\x03\xfb\xa4\xfe\xe8\x03\x96\xfd\x1d\xfbg\x03\xb4\x049\xfe\x9b\xfc\xea\xfcg\xffW\x02;\x03~\xfe \xfa\xd1\xffN\x04}\x03\x1e\xfc!\xfd\x06\x01\xef\xfa\xb6\x03\xff\x0b\xea\xfd\x16\xf7\xe8\x00\xb7\x06-\x00p\xfe\xa7\x04N\xffk\xfa\x14\x07>\n\x0c\xfa\xf7\xf5\xcc\x01a\x05\x16\xff&\xff:\x01g\xfa\xf0\xf6\x08\x03e\x08\x84\xfa\xae\xf3\x16\xfd\xdc\x04\x97\xfe#\x00\xe0\x00\xb9\xf5$\xf9\xcf\x06\x88\x06l\xfc\x0b\xfc\xf0\xff\xb4\xfd\xab\xffD\x086\x03\x9a\xf6\x9a\xf9\x0c\x04\xc1\x03\x08\xff+\xfe\x16\xfb\x1b\xf9\xc8\xfe\xda\x02\xfe\xfb\xb1\xf6\x9f\xfa\x01\xfeu\xfcz\xf8=\xf8\x1b\xf9\xde\xf6|\xf9h\xfd\xe4\xfe\x98\xfdJ\xfcM\xfa\xf3\xfd\xc6\x03\xe4\x084\x12\xf8\x16h\x10\x01\x0bB\x14\xb4\x1e\xcd\x1b\x0f\x14\xcb\x19\xf7$\'$0\x1d4\x1a&\x19\x85\x0f\\\x06\x96\n\x10\x12z\x0c\xcd\xfe\x15\xfa\xb1\xfd$\xf7U\xeaG\xe3\xfc\xe5Q\xe8H\xe4V\xe6\xda\xeb\x13\xe8\x9e\xdc\x11\xd9\x90\xe3\'\xec\xf1\xe9\xb8\xe8\xb1\xf2\xfa\xfc}\xfc\x99\xf7X\xf8\xfa\xfd:\xff\x88\x00\x80\x08C\x13\xc2\x11@\x067\x05\x07\x0c\xf9\x0b\xf7\x02\xf4\x00\x9a\x08U\rX\x07r\x00\xd0\xfei\xfa\xa1\xf4\xe9\xf4\xb0\xfa\xea\xfc\xa5\xf8N\xf6\x14\xf8\xf2\xf8\x89\xf3\'\xf0\xb5\xf2{\xf8*\xfe\x8f\x00V\x02\xf7\xfe\x9a\xfb>\xfc\x8f\x01\xc0\x05\xc6\x06\xd3\x08S\x0c\xb5\x0e\xdc\x0cN\x0c\xfb\t\xf4\x07\xbd\t|\x0eH\x13\x18\x12(\x0cy\x08o\x07\xb6\x06\xe7\x05h\x05M\x05\xc2\x04\xe5\x02D\x01\x08\xfe\xb8\xfa\xcb\xf7\xfd\xf5E\xf8\x17\xfa\xf0\xf8\xc9\xf5\x03\xf3\x95\xf1h\xf2\x96\xf1\xbb\xf1#\xf5\x8a\xf6\x1d\xf7\x1e\xf6\xcd\xf7<\xf8\xab\xf6>\xf8\xfa\xfb\xb3\xff\xae\x00a\xff\xa0\xffk\x00\xba\xfe\r\xfe\xa4\xff\xd3\x01(\x02d\x01\x1c\x01\xa0\xff\xed\xfd\x82\xfc\xa8\xfc\xe5\xfc\'\xfd\x02\xfd`\xfb\xb4\xfb\x0b\xf9\x96\xf5\x1c\xf4\x9c\xf2\xa4\xf5\x16\xf6O\xf6\x17\xf6\xe6\xf5L\xf7\xe4\xf7\x1c\xfa\xde\xfb\xfd\xfc\xd9\x01\xb9\x10B"J%L\x18\xf3\x13\xe4"Z1#2I0\xcc6!;\xa88\x825\xd10\xea$\xae\x18\x89\x1a\x9f$\xd3"R\x12s\x02x\xfb?\xf3I\xec\xf0\xe7>\xe4E\xdd\x1a\xd7\xea\xd9\x98\xde\x81\xd8\xa5\xc8\xd0\xc2i\xce\xc8\xdbh\xdei\xdc\x9a\xe1\x98\xe9$\xed\xe4\xec\xfb\xf0\xae\xf8x\xfcy\x00>\t\x10\x15\x91\x15\x0c\n\n\x07P\x0f\xa2\x14x\x0eI\n(\x0e}\x10\xf1\n[\x035\x00\x87\xfb4\xf4\xbe\xf3V\xf9F\xfb\xa1\xf4M\xed\x17\xee*\xf1\xb7\xed\xbf\xea=\xee\xa9\xf4(\xf9\x1b\xfac\xfc\x85\xfc^\xf9z\xf9u\x00\xe0\x07\xcb\n\xa3\x0b\xff\x0cL\x0f\x1d\x0f\xe4\r\x1c\r\xa3\r\x1a\x0f,\x12%\x15\xa7\x14\xd5\x0f\xd6\t\x97\x06S\x05\xda\x05E\x05)\x04\x87\x03O\x01c\xfe\xa0\xfa\xc0\xf7z\xf4;\xf3\x9a\xf6E\xfc\xa0\xffJ\xfe\xbb\xfa;\xf7\x04\xf8\xeb\xf9\x0b\xfd\xb9\x01\xf0\x02\xb8\x03\'\x046\x03\xff\x00j\xfdQ\xfc\xfd\xfd7\x01\x84\x03\x17\x02\x11\x00\xa9\xfc=\xfa\xc8\xf8h\xf9\xe2\xfa\xdb\xfb\xc0\xfcC\xfd\xd4\xfc\x9e\xfa\x9d\xf8\xde\xf7\xd0\xf8\x80\xfbv\xfdv\xfe\xdb\xfft\xfe\xa6\xfcp\xfc"\xfd\x85\xff(\x00\xb2\x012\x04\xbd\x03\xc0\x02\xa8\x00\x9c\xfe\xe5\xfe<\xff\x9e\x00\x1b\x02\x85\x01:\xfff\xfc\xf1\xfa\x0b\xfb\xef\xfak\xfa*\xfc\x0f\xfeT\xff\xf9\xfe\xd0\xfc\xb1\xfb&\xfb\xa0\xfc\xfb\x00\xe3\x01\xcf\x00\x87\xfet\xfc\x9f\xfd\xeb\xfbF\xfc\xab\xfe\x12\x01\xb7\x00I\xff\x1f\xfe\x16\xfb\xc2\xf8\x94\x01\xb9\x18\xa7%\x81\x19\xd5\x06f\x0e\x7f$\x83(G\x1f\x80!\xc91y4\xb7*N(\xdd&~\x18\xf3\x08\xb9\x10E$\xaa!\xe4\t\x89\xf9\xe7\xfa\xf3\xf5s\xe8>\xe1\xb5\xe5\x9b\xe6i\xdfe\xe0h\xe6\xfb\xde8\xcc\xfe\xc6R\xd8\x0b\xe9\xd5\xe8\xe7\xe3a\xe9\xb3\xf1\xa1\xf1\xb8\xee\xa5\xf2\xd8\xfa\xce\xfc\xec\xfe\xf4\x08q\x13\x86\x0fh\x00\xa0\xfd\x11\x08\x88\r\x8b\x06:\x02\xff\x06\x16\t\xad\x013\xfa\xa2\xf9\xf5\xf6(\xf0\xfe\xf0G\xf9\xc6\xfa\x8d\xf1\xfd\xea\xe2\xee\xe1\xf2\x9b\xee\xbc\xec^\xf3\xf3\xf8\x8c\xf8\xe9\xf8\x81\xfe\xf0\xff\xee\xf9\xa9\xf8,\x02B\x0b\xf8\n)\tq\x0b!\x0e\x80\x0c\xab\x0bk\x0ep\x10q\x0f_\x0f\xa0\x12\x8f\x14\x91\x10\xb7\t1\x07\x1c\to\n\xc5\t\x11\x08\xb3\x06\xbd\x03\xd3\xff\xdf\xfd\xdb\xfdZ\xfcm\xf9\xdd\xf8\xd7\xfa\xc0\xfb\x14\xfa\xab\xf6w\xf5\xd0\xf6F\xf7]\xf8\xf6\xfa\xdc\xfcY\xfd\x84\xfc[\xfd\xf7\xff\xb3\x00\x14\x00\xee\x00c\x04\x00\x07\r\x07\xcf\x06\xbc\x06\xd8\x05=\x04\x19\x04x\x05k\x06;\x05k\x039\x02Y\x01\xd6\xff\x04\xfe\x90\xfd\x10\xfe\x08\xfe\xc0\xfc\xc7\xfcD\xfc[\xfb\x89\xfa&\xfa\xf2\xfbG\xfd\xdc\xfd!\xfe\x82\xfe\xd6\xfe\x9a\xfeo\xff\x14\x02\xfb\x04s\x03*\x02G\x04\x1e\t\x13\x0b\xca\x07\x17\x07B\x07\xe7\x07|\x07K\x08\xd1\x08\x11\x07\xe5\x04 \x04\xb5\x02g\xff\x9f\xfdU\xfc\xa7\xfc\x9e\xfc\xae\xfa]\xf9y\xf7\xe2\xf5\x04\xf5\xa2\xf4\xe9\xf5&\xf6\xec\xf5\x82\xf6\xdc\xf6\xeb\xf6"\xf6\xb4\xf6g\xf8v\xfaA\xfbc\xfc\xef\xfd\xea\xfe\x9f\xff\xce\xff*\x01 \x031\x04\x12\x05\xfc\x05\xaf\x06\xb7\x06\xf4\x05L\x06?\x07\xb0\x06\x00\x06\xef\x05\xef\x05\xf8\x041\x03}\x02\xf5\x01r\x006\xffK\xfe\t\xfeW\xfd\x9d\xfb7\xfa\x99\xf9E\xf9\x81\xf9r\xf9\xe8\xf8j\xf9\xf8\xf9V\xfau\xfa[\xfa\x81\xfa\x9d\xfa\xc9\xfa\xe4\xfb\x02\xfc \xfb\xe8\xfa\xe2\xfa\xd6\xfa8\xf9\x1d\xf8(\xfa\xe1\xfc\xcb\xfcK\xfb@\xfc_\x02\x87\x07\x8f\t\x13\x0e\xba\x15\xfc\x18\x9f\x14\xf9\x14m\x1f6(_&\x9f"\x8d&]*l%9\x1d\xdf\x1a\xa1\x1a\xde\x166\x11\xa7\x0f\xdc\x0c\xd6\x03\xa3\xf9k\xf4\xf0\xf1\x81\xed\x91\xe8Y\xe6T\xe5m\xe3\xfc\xe1\xa8\xe1\xe7\xdf\xf9\xdc\xb7\xdd&\xe3\xe0\xe81\xec\xbe\xedF\xf0^\xf3\xbf\xf5d\xf8\xed\xfa\x97\xfdV\x00\xc2\x03u\x07\xfb\t\xa1\to\x06\t\x04\xfb\x03\xe2\x05u\x06}\x05\x10\x04.\x02\x00\x00\xb1\xfd\xc4\xfb1\xf9n\xf6\xb9\xf5\x9c\xf7\xd9\xf8\x13\xf7\x1c\xf5\xd9\xf4\x1b\xf5a\xf4\xa5\xf4\x92\xf7*\xfam\xfa\x05\xfb\xe1\xfd0\x00\x93\xff\xd8\xfe\xd3\x00\x08\x04n\x05$\x06\xf5\x07$\tR\x08I\x07\xf8\x07j\t\x84\t\xc2\x08\xf2\x08\xe0\t\x8f\t\xcc\x07y\x06\r\x061\x05M\x04-\x04R\x04S\x03O\x01\xfe\xff\x9e\xff\xc8\xfe?\xfd[\xfc]\xfc\x99\xfcn\xfc\x9d\xfb\\\xfbO\xfb\xc9\xfa\xd4\xfa\xf3\xfb\x14\xfd\x80\xfd\xb7\xfdC\xfe,\xff\xe9\xff\n\x00\xa6\x00\xdb\x01\xca\x02\xf4\x02\x12\x03\x99\x03\x96\x03<\x03\x1c\x03$\x03\xf8\x02\xe2\x02\xb3\x029\x02\xb4\x01\x0c\x01i\x00\x15\x00\xc9\xff]\xff!\xff/\xff/\xff\xfd\xfe\x9a\xfe\\\xfep\xfer\xfe\x9e\xfe\x07\xff`\xff\xad\xff\xc6\xff\xaf\xff\xb5\xff\xa2\xff\xaa\xff\x18\x00S\x00d\x00\xa8\x00\x94\x00Z\x00M\x00F\x00\x19\x00\xe5\xff\x0f\x00o\x00\xa6\x00\x84\x00\x14\x00\x01\x00>\x00.\x00,\x00\xc6\x00\xbc\x01Q\x02`\x02\xaa\x02\xe7\x02\xfa\x02Q\x03\xf2\x03m\x04?\x04E\x04\x0e\x05\x1a\x06#\x05\xd5\x02\xde\x02b\x04\x16\x04t\x01q\x00\x06\x02\x0b\x02\x13\xffW\xfd\xa2\xfe\xd6\xfe\x9d\xfb\x88\xf9\x98\xfb\xf9\xfc`\xfa\xb9\xf7\x19\xf9\x1c\xfb\xa8\xf9\x8a\xf7y\xf8\x88\xfa:\xfa\xf7\xf8F\xfa\x97\xfc\xae\xfc\x95\xfb\x8c\xfc\xf8\xfe\xdc\xffe\xff\xae\xffx\x01\xe4\x02\xef\x02\xf8\x02\xad\x03\'\x04\xbc\x03-\x03|\x03\r\x04\x88\x03E\x02\xb2\x01\xef\x01t\x01\xe9\xff\xb6\xfe\x8a\xfe0\xfe/\xfdd\xfc:\xfc\xfe\xfb:\xfb\xbf\xfa\x03\xfbe\xfbZ\xfb$\xfbo\xfb7\xfc\xc2\xfc\xfb\xfcr\xfd\x1a\xfe\xa7\xfe\xc0\xfe\xdf\xfe*\xff)\xff\xbc\xfe{\xfe\xa9\xfe\xcf\xfek\xfe\xce\xfd8\xfd\xe5\xfc\xb4\xfc\xa2\xfc&\xfd\x8d\xfe\xca\x00\xf7\x02f\x04b\x05m\x07\xb0\n\xaf\r\x13\x10\xd6\x12\xc9\x15z\x17\xd0\x17k\x18\xc9\x19Y\x1a6\x19e\x17\x15\x16\x9b\x14\xf5\x11\xb5\x0e\xe1\x0b~\tz\x06\xdd\x02\x89\xff\n\xfd\xb7\xfa\x16\xf8\xcb\xf5W\xf4v\xf3\x10\xf2w\xf0\x9c\xef}\xef\x82\xef\x0f\xef\xd2\xeex\xef?\xf0\xb1\xf04\xf1#\xf2$\xf3\xc9\xf3Z\xf4\x8d\xf5\x01\xf7:\xf8C\xf9\x1a\xfa\x14\xfb\x00\xfc\xa4\xfc2\xfd\xde\xfd\x93\xfe\xfc\xfe\xfc\xfe\xe4\xfe\xf2\xfe\xd1\xfel\xfe\x03\xfe\xcb\xfd\x8b\xfd\x18\xfdk\xfc\x00\xfc\x1a\xfc!\xfc&\xfch\xfc\xc0\xfc\xf5\xfc\xf4\xfc#\xfd\x84\xfd\xfc\xfd\x83\xfe\x0f\xff\xa1\xff\x1d\x00s\x00\xbd\x00\x07\x01F\x01\xb9\x01i\x02\x11\x03\xb0\x03X\x04\xda\x04P\x05\xba\x05Y\x06\x0e\x07\x9c\x07\x07\x08\x95\x08B\tq\t\x1a\t\xe2\x08\xd6\x08V\x08{\x07\xf7\x06\x9a\x06\x85\x05\x01\x04\xe1\x02$\x02\xdd\x00?\xff/\xfe\xa7\xfd\xbb\xfc\x8a\xfb\xfd\xfa\xf8\xfa\x8f\xfa\xdc\xf9\xd7\xf9l\xfa\x81\xfa0\xfa\x93\xfa\x85\xfb\xcd\xfb\xc5\xfb9\xfc\xf4\xfc \xfd\x06\xfdj\xfd\x1d\xfe[\xfe7\xfew\xfe(\xffK\xff\x11\xff]\xff\xef\xff$\x00\x1c\x00}\x00\x13\x01F\x01O\x01\x94\x01\xec\x01\xf5\x01\xde\x01\xeb\x01\r\x02\x11\x02\xd7\x01\xaa\x01\xb4\x01\x8d\x01>\x01\x18\x01\x18\x01\x16\x01\x08\x01\x1e\x01:\x01x\x01\x8a\x01\x84\x01\xb7\x01\xed\x01\xfc\x01\t\x02#\x02*\x02\x1a\x02\x08\x02\x01\x02\xd8\x01\x85\x01\x0e\x01\xb8\x00\x99\x00r\x00\x02\x00{\xff\xa2\xff\x06\x00\xdb\xff\xbb\xff\xf2\xff[\x00\x17\x00\xf1\xff|\x01\xb6\x03\xd7\x03\x84\x02\r\x03\x12\x05t\x05\n\x04\x1a\x04\x7f\x05[\x05c\x03\x8f\x02M\x03\x87\x02\xf0\xff\\\xfe\xe9\xfe\xdc\xfe\x8a\xfc~\xfa\x82\xfa\xb1\xfa\xb5\xf9\xc0\xf8\x01\xf9[\xf9\xde\xf8k\xf8Q\xf9\xc8\xfa=\xfbF\xfb\x1b\xfcs\xfds\xfe\x0b\xffi\xff.\x00\xf7\x00\x83\x01\x00\x02_\x02\xb9\x02\xb0\x02q\x02f\x02h\x02\xf9\x01\x17\x01\x81\x00\x82\x00?\x00S\xff\x81\xfe-\xfe\xbb\xfd\x0c\xfd\x82\xfcZ\xfc\x18\xfc\x96\xfb|\xfb\xe0\xfb<\xfc,\xfc\x13\xfcd\xfc\xe7\xfcA\xfd\x97\xfd"\xfe\x9b\xfe\xfc\xfeW\xff\xda\xffi\x00\xab\x00\xae\x00\xdb\x00Z\x01\xc8\x01\xd7\x01\xd5\x01\x12\x02q\x02v\x02[\x02}\x02\xa0\x02u\x02E\x02g\x02o\x02\x1a\x02\xbc\x01\x7f\x01\x14\x01a\x00\xc9\xffL\xff\xa6\xfe\xde\xfdA\xfd\xde\xfcw\xfc\r\xfc\xcd\xfb\xc8\xfb\xc7\xfb9\xfc^\xfd\x04\xff\xa8\x00R\x02N\x04\x88\x06\xa8\x08\xbd\n2\r\xd2\x0f\xca\x11\x04\x13e\x14\xe4\x15o\x16\xd8\x15b\x156\x157\x14\xc7\x11l\x0f\xc8\r\x85\x0b\xec\x07x\x049\x02\xbd\xff\xfe\xfbx\xf8{\xf6\xec\xf4S\xf2\xd0\xef\xcb\xee{\xee\x82\xed[\xecq\xecD\xedy\xedZ\xed%\xee\xbb\xef\xd8\xf0S\xf1c\xf2\x0b\xf4L\xf5\x18\xf6\x15\xf7\xa3\xf8\xe7\xf9\xb1\xfa\xb1\xfb\x04\xfd\x1d\xfe\xbb\xfeQ\xff\x07\x00\x8d\x00\xf2\x00i\x01\xd1\x01\xd6\x01\xa9\x01\xae\x01\xb0\x01Y\x01\xf0\x00\xa1\x00W\x00\xe1\xffy\xffn\xffE\xff\xd0\xfey\xfe\x8f\xfe\xd0\xfe\xab\xfe\x98\xfe\xf2\xfeX\xff\xc6\xff<\x00\xd7\x00g\x01\xd0\x01K\x02\xf7\x02\x9b\x03\xef\x03E\x04\xc0\x04F\x05\xb0\x05\x08\x06K\x06e\x06v\x06\x99\x06\xd9\x06\xf5\x06\xd4\x06\x96\x06G\x06\xe8\x05t\x05\xe5\x04\x18\x04<\x03~\x02\xbe\x01\xcc\x00\xad\xff\x95\xfe\xa5\xfd\xc3\xfc\xe0\xfb"\xfb\x92\xfa\xf3\xf9G\xf9\xd4\xf8\xb6\xf8\xa2\xf8\x84\xf8\x9c\xf8\xf3\xf8e\xf9\xd3\xf9F\xfa\xeb\xfa\x9e\xfbH\xfc\xfb\xfc\xb9\xfd\x93\xfe{\xffL\x00\x1b\x01\xd5\x01\xab\x02x\x03\x02\x04t\x04\xd7\x04C\x05\x9c\x05\xd9\x05\xfb\x05\xf5\x05\xd4\x05\x82\x05%\x05\xd9\x04s\x04\xe3\x03X\x03\xdc\x02Q\x02\xc5\x01@\x01\xb6\x00*\x00\xb3\xff{\xffZ\xff0\xff\x19\xff\n\xff\xfb\xfe\r\xff)\xffD\xffa\xff\x92\xff\xd4\xff\x0b\x00-\x00M\x00l\x00q\x00v\x00\xa7\x00\xd7\x00\xbf\x00\x95\x00\x90\x00\x91\x00~\x00A\x00\x13\x00\xf8\xff\xc4\xfft\xff\'\xff\x0f\xff\xf6\xfe\xb2\xfe|\xfe\x9a\xfe\xc5\xfe\xb7\xfe\xd2\xfe%\xff\x95\xff\xba\xff\xda\xff\x9b\x00\xa5\x01\x03\x02\xee\x01t\x02A\x03p\x03\r\x03"\x03\xa4\x03\x9b\x03\x05\x03\xa1\x02\x85\x02\r\x02\x19\x01\\\x009\x00\xe7\xff\xf5\xfe\x05\xfe\x9e\xfdW\xfd\xa6\xfc\xf4\xfb\x99\xfbg\xfb-\xfb\xd8\xfa\xd2\xfa\xf3\xfa\xe2\xfa\xc4\xfa\xdd\xfa.\xfb{\xfb\xa8\xfb\xc9\xfb\x1d\xfc\x8a\xfc\xdc\xfc\x1b\xfd>\xfdy\xfd\xd4\xfd0\xfe~\xfe\xd8\xfe\x1d\xffI\xffu\xff\xc3\xff\x1d\x00T\x00\x8b\x00\xba\x00\x05\x01x\x01\xdf\x01 \x02V\x02\x99\x02\xeb\x02;\x03}\x03\xe1\x03\x16\x04\x1c\x04B\x04q\x04\x88\x04W\x04\x13\x04\xe7\x03\xca\x03\xaa\x03V\x03\xeb\x02s\x02\x13\x02\xbe\x01o\x01\x1d\x01\xb8\x00`\x00\x1d\x00\xf0\xff\xcf\xff\xc5\xff\xa3\xff\x8a\xff\xa1\xff\xc7\xff\xd5\xff\xdf\xff\x00\x00,\x00^\x00\x87\x00\xae\x00\xc8\x00\xca\x00\xcd\x00\xd5\x00\xe1\x00\xdc\x00\xbb\x00\x9d\x00v\x00`\x00:\x00\xf9\xff\xb9\xff}\xff;\xff\t\xff\xd4\xfe\x9c\xfex\xfeC\xfe\x12\xfe\xee\xfd\xd5\xfd\xce\xfd\xbd\xfd\xc3\xfd\xc8\xfd\xd1\xfd\xdd\xfd\x01\xfe8\xfek\xfe\xa7\xfe\xdd\xfe$\xff^\xff\x97\xff\xd4\xff\x08\x00S\x00\x8b\x00\xa0\x00\xab\x00\xa1\x00\x96\x00\x81\x00Z\x005\x00\xf4\xff\x91\xff!\xff\xca\xfey\xfe+\xfe\xe4\xfd\xb5\xfd\x8a\xfda\xfd3\xfd2\xfdo\xfd\xd4\xfd\x1f\xfeD\xfe\x89\xfe\xe2\xfe2\xff^\xff\xc3\xff6\x00p\x00\x95\x00\xbc\x00\xea\x00\x01\x01\xfe\x00\xef\x00\xff\x00\x00\x01\xf3\x00\xcb\x00\xbb\x00\xbe\x00\xa9\x00\xa0\x00\x91\x00\x90\x00\x93\x00\x98\x00\x97\x00\xa6\x00\xad\x00\xb7\x00\xc1\x00\xc1\x00\xcc\x00\xc6\x00\xb7\x00\xbc\x00\xba\x00\xa4\x00\x83\x00]\x00E\x00$\x00\xfb\xff\xd6\xff\xc1\xff\x92\xffN\xff\x14\xff\xfd\xfe\xec\xfe\xc1\xfe\xb1\xfe\xb3\xfe\xb3\xfe\xa1\xfe\x97\xfe\xb2\xfe\xd4\xfe\xe6\xfe\xfc\xfe4\xffh\xff\x8c\xff\xbd\xff\xfe\xff2\x00=\x00`\x00\x8f\x00\xba\x00\xe5\x00\xfd\x00&\x01=\x01H\x01[\x01c\x01n\x01\x85\x01\x90\x01\xa4\x01\xb7\x01\xbc\x01\xb3\x01\x84\x01~\x01\xa2\x01\xae\x01\xaf\x01\xbe\x01\xdf\x01\xec\x01\xeb\x01\xf7\x01\n\x02"\x02;\x02Q\x02c\x02s\x02t\x02w\x02q\x02]\x02<\x024\x02/\x02\x13\x02\xfc\x01\xb9\x01X\x01\xef\x00\xa5\x00^\x00 \x00\xe7\xff\x98\xffL\xff\xf0\xfe\x97\xfen\xfeG\xfe\x01\xfe\xf0\xfd\xf7\xfd\xdc\xfd\xcc\xfd\xb5\xfd\xa6\xfd\xb5\xfd\x9e\xfd\x96\xfd\xb8\xfd\xb8\xfd\xa5\xfd\x94\xfd\x92\xfd\x98\xfd\x91\xfd}\xfd\x80\xfd\x85\xfdx\xfdh\xfdO\xfdQ\xfdQ\xfd?\xfdN\xfd`\xfdv\xfd\x90\xfd\x93\xfd\xa5\xfd\xc2\xfd\xe3\xfd\xec\xfd\x05\xfe>\xfe]\xfet\xfe\x95\xfe\xdc\xfe\x15\xff\x15\xffB\xff\x92\xff\xc3\xff\xee\xff!\x00d\x00\x94\x00\xc2\x00\x02\x01M\x01z\x01\x99\x01\xca\x01\xf9\x01!\x029\x02H\x02]\x02f\x02i\x02a\x02V\x02D\x025\x02%\x02\x0e\x02\x02\x02\xf0\x01\xe1\x01\xc8\x01\xa8\x01\x8e\x01\x89\x01z\x01j\x01i\x01W\x019\x01\x1f\x01\x05\x01\xf5\x00\xe8\x00\xcd\x00\xc5\x00\xb4\x00\x8e\x00n\x00E\x00\x19\x00\x00\x00\xdc\xff\xaf\xff\x9a\xff~\xffG\xff:\xff(\xff\xf9\xfe\xe2\xfe\xcd\xfe\xbc\xfe\x9c\xfe\x8e\xfe\x8f\xfe}\xfeW\xfeD\xfe]\xfeT\xfeU\xfec\xfe]\xfea\xfeo\xfe|\xfe\x83\xfe\x9c\xfe\xab\xfe\xb8\xfe\xe2\xfe\xf5\xfe\x12\xff.\xff/\xffK\xffx\xff\x96\xff\x9a\xff\xb4\xff\xda\xff\xed\xff\xfa\xff\xee\xff\xfc\xff\x1b\x00\x10\x00\x13\x00/\x00@\x00B\x00I\x00m\x00{\x00\x96\x00\xb4\x00\xd0\x00\xf3\x00\x1a\x018\x01h\x01\x99\x01\xbe\x01\xd0\x01\xe4\x01\x02\x02\x1e\x02$\x02\x07\x02\xfe\x01\xf3\x01\xcd\x01\x9c\x01n\x01F\x01\r\x01\xba\x00w\x00A\x00\x01\x00\xab\xffU\xff)\xff\xf0\xfe\xbf\xfe\x97\xfel\xfeM\xfe+\xfe\x1c\xfe\x15\xfe\x15\xfe\x14\xfe!\xfeS\xfev\xfe\x92\xfe\xb5\xfe\xe7\xfe\x12\xffD\xff|\xff\xb2\xff\xe7\xff\x0c\x000\x00`\x00~\x00\x89\x00\xa1\x00\xb2\x00\xc7\x00\xcb\x00\xc7\x00\xce\x00\xcd\x00\xc3\x00\xb0\x00\xae\x00\xa1\x00~\x00x\x00a\x00K\x00C\x00,\x008\x007\x00!\x00$\x009\x008\x000\x004\x00=\x00Q\x00M\x00A\x00N\x00S\x007\x00-\x006\x00?\x009\x00 \x00\x1e\x00\x17\x00\x08\x00\xfb\xff\xe3\xff\xce\xff\xb5\xff\x9b\xff\x93\xff\x8e\xff\x8a\xffz\xffe\xffW\xff^\xffa\xffa\xffe\xffs\xff\x86\xff\x88\xff\x89\xff\x92\xff\xa5\xff\xbc\xff\xcc\xff\xe2\xff\x07\x00\x10\x00\x1c\x004\x00H\x00W\x00n\x00\x8b\x00\x99\x00\xa2\x00\x9b\x00\x96\x00\x8d\x00\x88\x00\x7f\x00z\x00\x7f\x00v\x00j\x00g\x00[\x00J\x00H\x00B\x00A\x00?\x00<\x000\x00/\x00(\x00\x1c\x00\x13\x00\x06\x00\xfd\xff\xf0\xff\xf1\xff\xdd\xff\xbd\xff\xb2\xff\xab\xff\xa1\xff\x86\xffz\xff}\xffr\xffc\xffe\xffm\xfff\xff]\xffW\xfft\xff\x7f\xff\x80\xff\x86\xff\x8e\xff\x99\xff\xa2\xff\xa5\xff\xa3\xff\xab\xff\xae\xff\xb2\xff\xb3\xff\xb5\xff\xaf\xff\xaf\xff\xb1\xff\xa6\xff\xa3\xff\xb0\xff\xba\xff\xb5\xff\xbc\xff\xb8\xff\xb8\xff\xb7\xff\xc2\xff\xc6\xff\xcb\xff\xd4\xff\xda\xff\xeb\xff\xf3\xff\xff\xff\x08\x00\x14\x00\x17\x00\x1f\x00:\x00B\x00A\x00\\\x00g\x00o\x00\x7f\x00\x85\x00\x83\x00\x8f\x00\xa2\x00\x9b\x00\x91\x00\x91\x00\x8f\x00\x8b\x00\x90\x00\x98\x00\x8e\x00y\x00v\x00m\x00k\x00m\x00X\x00D\x00@\x00;\x00#\x00\x0c\x00\xf4\xff\xe1\xff\xdf\xff\xce\xff\xc0\xff\xab\xff\x96\xff\x85\xff{\xffj\xffh\xffl\xff\\\xffR\xffR\xffN\xffF\xff:\xff9\xff1\xff4\xff@\xffF\xffH\xffG\xffK\xffV\xffl\xff{\xff\x89\xff\x9d\xff\xb3\xff\xc9\xff\xd4\xff\xec\xff\x01\x00\x12\x00&\x00<\x00P\x00e\x00v\x00\x84\x00\x95\x00\x9d\x00\xa7\x00\xb2\x00\xbc\x00\xbc\x00\xb6\x00\xba\x00\xbc\x00\xb2\x00\xa8\x00\xa7\x00\xa5\x00\xa2\x00\x9e\x00\x9b\x00\x8e\x00{\x00t\x00n\x00j\x00[\x00S\x00I\x00C\x009\x00,\x00\'\x00\x1d\x00\x12\x00\x0e\x00\x04\x00\x07\x00\r\x00\x00\x00\xeb\xff\xe1\xff\xd6\xff\xd1\xff\xc4\xff\xb2\xff\xa3\xff\x8a\xff\x8a\xff\x8d\xff\x82\xff}\xff~\xffv\xffr\xff\x83\xffy\xffs\xff~\xffv\xffz\xff\x82\xff}\xffr\xff\x80\xff\x90\xff\x9b\xff\xa6\xff\xb1\xff\xbb\xff\xc8\xff\xce\xff\xd5\xff\xe1\xff\xf6\xff\x0c\x00\x15\x00\x14\x00\x1f\x008\x00G\x00W\x00[\x00b\x00t\x00\x84\x00\x7f\x00\x84\x00\x89\x00\x91\x00\x90\x00\x90\x00\x93\x00\x8d\x00\x90\x00\x81\x00{\x00d\x00c\x00\\\x00B\x00A\x00/\x00\x1c\x00\x1d\x00\t\x00\xf2\xff\xee\xff\xdc\xff\xcf\xff\xc8\xff\xb3\xff\xa5\xff\xb4\xff\xb3\xff\xb4\xff\x9c\xff\x8a\xff\xa4\xff\xa2\xff\x99\xff\xae\xff\x9b\xff\x93\xff\xab\xff\xc8\xff\xae\xff\x93\xff\xb0\xff\xe2\xff\xde\xff\xb4\xff\xb8\xff\x91\xff\x9d\xff\xb9\xff\xb1\xff\xc4\xff\xc0\xff\xad\xffa\xffC\xff\x84\xffn\xffP\xff\x92\xff\xbc\xff\xb5\xff\x9a\xff\x97\xff\xe4\xff\xc0\xff\x86\xff\xa5\xff\xae\xff\xfc\xff\x1d\x00v\x00\x04\x01\xf5\x00\x81\x01\x1e\x01\xef\x01>\x01\xa7\x01\xdd\x00\x00\xfd\xdc\x08\xea\x10G\x04\xce\xf3.\xfe\x19\x05\x10\x04\xaa\x02A\xff\xb1\xfa/\xf7\xce\x00\xc7\xf9[\xfc\xbd\xf9\xa5\xf40\xfe\xd9\x00\xc7\xfe5\xfe)\xfdy\xfb \xfb\xec\x05-\x0b\xb7\xfb\xba\xfeC\x04\xcf\x01\xe2\x03m\x03v\x03\xee\xffM\xfa\xe1\x00\x98\tR\x03`\xfad\x01\xae\x01\xfe\xf8\xb1\x00\x96\x04h\xff\xd1\xfb\xa5\xfb\xaa\x011\x00\xa1\xfc\xf5\x01+\xfeH\xf33\x02]\x05`\x00}\xfa\x9e\xfd\xee\x01\xc6\x00\xae\xfe@\xfb\x18\x07\x1b\x04a\xfa\x19\x02s\x06\xab\xfe}\x02\xd5\x02-\xff4\x02c\xfdu\x06~\x08\xcc\xfe\xf8\xf5\x18\x07a\x07\xc9\xfey\xff\x01\xff:\x01K\xff\xb0\x04\x9e\xfft\x03\x9a\xf9\xe8\xfb\xce\x01\xc3\xff\xdf\x02\xda\xf6 \x03\xcb\x00\x9a\xf6\x96\xfeS\x00\xd5\xff2\xf7&\xfei\x04\x9d\xf9\x03\xfd\xec\x06O\xfei\xf4\xf3\x02=\x06\x90\x01\xad\xfe\x18\x02\xe4\x00\xa4\x01L\xffp\x044\x05\x19\x00\xbb\xff\xa3\xfe2\x04\xec\xff\xdf\x04{\x04c\xfa\xbc\xfd\xc0\x0bG\xf9\xaf\xf9\x8a\x06\xf5\x01n\xfc\x0c\xfap\x068\xfd\xc1\xfa\\\xfcL\x05-\x01]\xf8\xd5\xfd\x99\x039\xf9=\x00\x8b\n\xde\xfbz\xf7A\x04\xd1\np\xf2Z\xff\x9e\x10x\xfb\xb7\xf3\x02\t\xe9\t\x08\xef\xfd\x01\xad\r\xf1\xfa)\xf5\xbc\xffa\x0b\xa8\xfci\xfc\x98\x05O\xfd\x7f\xfa>\x00\x13\t-\x00\xc4\xfa|\xfc\x9a\x05\xd3\x01\x15\xfa\r\x05L\xfe\xce\xf4L\x01\xda\x05\xf3\xfb\x91\x02\x1e\x01\xc5\xefS\x03\x18\x0b\xfe\xf6C\xff\x11\x06\xad\xff\x91\xfe\xfe\xff\x05\x04\xb6\x04\xa1\xf9\x03\xfe\xdf\x05\x99\xfb\x02\x03\xb4\x07u\xf6h\xffd\x05\xe1\xfc\xa1\xfd\x87\xfe\xd1\x01\x94\xff\xe6\x00N\xf8\x1d\x04\xb8\xfeL\xfa\xba\x04\xbb\xfd\x85\xf5\x8b\x07\xc0\x08\xe2\xf5\'\x07\x88\xffD\x01\x1f\x00\xb8\x04\xa8\x05\xca\xf8E\x01z\t\xcf\x00L\xfc\xc2\x02\x06\x01\xef\xf8\xb2\xfbH\x06\xc7\x06%\xf8\x86\xf3A\x0e\x02\xfe\'\xf3\xa5\x00\x14\n\x8a\xfeJ\xf0$\x07\x12\x07\xaa\xfc\x04\xfbf\x01U\x04\xac\xff\x9b\x02\x04\x06J\x01~\xfc\x9c\x00\x1a\x06)\x05\x9a\x05 \x04\x14\xf6\x1e\xfe1\x06O\x03\x9f\x03\x05\xf9,\xfc\x8f\x04P\xfb\xc4\xf8N\x03\xe1\x03o\xf7A\xf9{\xff\xf1\xfe\xcf\x05f\xf9\xe4\xfa#\x04\x82\xfcG\x009\x02\xb3\xfe\xe4\xff\x83\x02\x9e\xfb\x0c\xf9\x97\x05\x87\x04\x02\xfd:\x01\x98\xfa\x7f\xfa\xe2\x07\xab\x05[\xf4[\xfc\xe0\x0br\xfc\xc9\xfc\xdd\t\x16\x00\x93\xf3!\xfa:\x11\xcb\x06\xf9\xf24\x01\xb7\x07:\xf5\xfc\x00\xe4\x10\xf6\xfau\xe8\xdb\x00\xab\x16`\xfd\xea\xf9v\x06\xc3\xfdw\xea\xa8\x02\xff\x13\x10\x03v\xf7\x13\xfaq\x03\xcc\xff\x83\x02J\x06\xa2\xfd\x87\xf3#\xfc\x02\x0c#\x06\xb5\xfc\x86\xfd\xba\xfa\xc3\xf8\x13\xfe`\x0c\xe3\x02\x17\xf5\xb9\x00\x85\x05\x0c\xfeP\xff\x81\x08\xee\x01\xfe\xef8\xfd\xc3\x0b\xd3\x06\xdd\x03\xa1\xfb\xdc\xf7\xbf\xffM\x06\xd0\xfc\xf8\xff\xa0\x04\xaa\xfe\xa2\xfe\x98\xfcG\x01r\x05\xea\xfc\xc3\xf7\xe9\xfc\xd2\x04a\x06x\xfe\xec\xf6c\x01\x07\x03;\xff@\x000\x01\xc3\xfe:\xff\xa0\x05\xb4\x00\xe0\xfe\x1d\x02\xf3\x00\x97\xfd\xcd\xffO\x06\xcb\x02p\xfd\x01\xffS\x02\xbe\x01\x9c\xfd\x81\xfd\xf2\x03C\xff\xcf\xfb^\x01L\x03\xb7\x00e\xfb2\xfe;\x03K\xff\xb7\xf8`\x03\x1e\x04\x04\xfc\xe1\xffO\x01\xcd\xfew\xfb\xcc\x00\xc4\x04\xef\x00\xe6\xfa\x14\x01\x82\x04\xc8\xff\x94\x00\xa1\x00\xbd\xff\xb4\xfaU\x011\x084\x02t\xfc%\xfci\xfcZ\xfe\xf5\x06c\x06q\xf8[\xf6X\x02\x89\x04\xd4\xfeM\xff;\x05\xb6\xfe3\xf8\xbb\x00\xea\x04\xa5\x04U\xff\x98\xfc`\xfa\xf4\x02\x18\n\x92\x01\xe6\xf6\x9e\xfa\x9e\x022\x01A\x02\xd5\x01\xe9\x02D\xfbi\xfbb\x01\xae\x03V\x00!\xfd\x87\x00^\x03\xa2\x00\x07\xfc\xf3\x00\xcd\x00\xd2\xfeO\x00\xca\x02\xef\xfd\x02\xfc\x14\x02\xe9\x05S\x01\x17\xfa\xaf\xfc\xf2\x00j\xff\x0e\x03\x15\x06[\xfe\xaf\xf5z\xfdu\x03Y\x06\t\x01\x14\xf6\x9a\x00\x05\x05/\x04\xbb\xfd\xd0\xfcY\xfer\xfeh\x03\x9a\x03\x98\x00\x8f\xfb\xbe\x00x\x03\x1c\xff\xf8\xffc\x00:\x00\xd4\xfa\xa3\xfd_\t\x14\x08\xb5\xfc?\xf2$\xfb\x19\x08b\x04\xd0\x00T\xfb\xe9\xfa\xa5\xfe\xf6\x03\xd1\x05M\xfc\xb9\xf8N\xfe\x08\x04V\x03.\x03\xf8\x01t\xfa\x8d\xf82\x00\x96\x06\xd9\x04y\x00\x08\xfc\x97\xfb\x99\x02e\x06u\xff\xc0\xfbG\xfb\x14\xfe\xa3\x06M\x06\xcc\xffm\xfc\xf0\xfb\xbe\xfe\x14\x01\xaf\x00&\x03\xcf\x02\xeb\xf9%\xfa2\x05B\x06\xc4\xff\x99\xfa\x9f\xf7,\xff\x9a\x06\xd1\x04e\x01B\xfb\xb7\xfc\xb7\xfd\x1e\x03\xab\x04\xce\x02\x86\xf9+\xf8\x84\x04\x0c\x08`\x05s\xfb\xd6\xf8\xc5\xfe~\x01\x1a\x02e\x05\xd3\xfe@\xf9)\x00a\x06\x1f\x00\xf8\xfe\x83\xfe\xeb\xf7f\xff,\t\xb8\x05{\xfe\xa5\xfa\x16\xfb~\xff\xc5\x02\xdf\x03\n\x01p\xfe\xbc\xfa\xf9\xff)\x05D\x02\xcd\x00H\xfb\x1c\xfa\xaa\x01\xf1\x05\xe2\x00s\xfd\xfb\xfd\t\x01&\x01m\xff\xbf\xfdK\xff\xf1\x00\xe7\x01\x97\x02S\xfc\n\xff4\xffK\xfd%\x02\x92\x04\xac\x01\xff\xfa!\xfc\x8d\x01\x95\x02\xef\xff\x14\xfe\x83\xfeS\x00:\x026\x03%\x01\x18\xfb\xb8\xf8\x80\x00C\x07~\x05>\xfel\xfb\xf7\xff\x0c\x02\x84\x01\x98\x00}\xff\x82\xfd\x15\x00\xed\x01L\x03\xca\x01\x07\xfem\xfc5\xffW\xff\xca\xffI\x04d\x02\x89\xfc]\xfa\xda\x03\x9d\x04\x8f\xfcD\xff\x06\x00\x86\xfd\xa8\x03\x9f\x03q\xfe1\xfd_\xff;\x010\x04\xa8\x01\x87\xfe^\xfe\x0c\xfd\xec\x00I\x03\xa8\x02\xea\x01\xc7\xfd\xe2\xfa\xda\xfe \x04\xfb\x02\x04\xfeh\xfa\xfd\xfcZ\x00\xb2\x00\xf6\x01|\x02n\xfd\xf3\xf8n\xfe\xf0\x02\x9e\x04\x07\x04\xc9\xfc<\xfa`\x03\x0c\x05\x99\x00\xa7\xff\xd6\xfd[\x00s\x01\xd0\x00\x16\xffO\x00d\xffN\xfc\x93\xffw\x02O\x01\xa5\xff\xac\xfc\xae\xfeS\x04W\x00V\xfa\x82\xfdp\x00\t\x01X\x02\x93\x01\x00\xfd\x86\xfcT\x02`\x02\xdb\xfd>\x00\x8f\x02\xdc\xfeb\x00\xa2\x03I\x01\xe9\xfc\xcf\xfc\x02\x02)\x02\xe1\xfed\x00.\x02\x7f\x00\x05\xfd\xeb\xff<\xff\x7f\xfe\x9c\x03h\x00I\xfc@\xfd\x9b\x02\xa8\x04\xe9\x01-\xfd\xb5\xfb\xfa\xfc\xc3\x02\xf8\x03,\x02:\x02m\xff\x1d\xfd\x84\xfd\x88\x02@\x02\xe2\x00K\xfeb\xfe\x07\x01\xc3\x02\xe3\x01\xb1\xfd\xb3\xfc\t\xfdF\xff\xf6\xff\xb3\x00c\x03\x19\x02\xe1\xfc\xa5\xfaS\xfe\x15\x01s\x01:\x02h\xfe\xa4\xfc\x16\x01B\x03~\x00\xb4\xfey\xfd\x8f\xfc/\x01\xd7\x03\x14\x02\xb3\xfd~\xfe\x81\x000\x01\x05\x01R\xffv\xff\xba\xfd\x15\x01\x92\x03\xa2\x02\xf5\x00\n\xfe>\xfc\xe3\xfd\x19\x03E\x04\xa3\x00\x18\xfd\x06\xfe\x91\xff_\x01\x1e\x03\x9f\xfe\xb1\xfb\xc2\xfdU\x01\x81\x02@\x01\xb3\xff\xab\xfd)\xfe\xa7\xff\xca\x00w\x00~\xff\xc0\xffm\x00\xc6\xff\xb3\x00\x00\x03\xd5\xfe\x83\xfc\xa8\xfeN\x00\xfa\x00\xd0\x02\xa4\x024\xfd\x1f\xfe\xb9\x00\x9c\xffb\x00n\x00y\xff`\xff\x8a\x00\x88\x01\x06\x00\t\xff\xfa\xff\x81\xfe\x19\xfe\xf6\xff0\x01$\x01\xc0\xff\x87\xfe\xc7\xff,\x00\x11\x00\xc1\xff\x1c\xff`\xff\xca\x00*\x00>\xff\xc0\x01)\x01U\xfek\xfej\xff\xd2\x00\x9c\x02\xf3\x01j\xff\xd8\xfd\xcf\xfe\xb6\x01\xb8\x01\x94\xff\x9a\xff\xfa\xfe\x99\xff\x95\x01\xa1\x01S\x00\x87\xfe=\xfe\xe9\xfes\x00\xd7\x00\xd6\x01{\x00\xae\xfc\x85\xff\x83\x01\xad\x00!\x00\xdf\xfdU\xfe\xac\x00\xa5\x02\xb4\x01]\xffl\xfe7\xfe\xcd\xff\xeb\x00h\x00\x0b\x00\xbc\xff_\x00H\x01\x8c\x00\xdb\xff\x10\xff\x04\xfe\xe6\xffq\x01\xdf\x00\xfe\x00\x12\x01\xae\x00C\xff\xe8\xfd\x1d\xfe\xa2\xffc\x01\xad\x01\x02\x01\xd4\xff>\xff\xda\xff\xae\xff{\xff\xdb\xff#\x00\xd0\x00%\x01\x17\x00/\x01(\x02E\x00\xa6\xfd\xff\xfc\xa9\xff\xdd\x01\n\x02\x8d\x00\xb0\xfe\xe6\xfe\x0f\x00\xb1\x00#\x00\x1f\xff\x88\xfe\x19\xff\xde\x00\x90\x01\x90\x01\x95\x00\x83\xfe\xa6\xfd\x83\xfe\xbe\x00\xbf\x01\x16\x01F\x00W\xff\x8e\xff\x7f\x00T\x01\xd0\x00Q\xff\xf3\xfe\xb0\xff`\x01J\x01\xed\x00\x9e\xff\x7f\xfd\x03\xfe\x14\x00\xff\x00\x87\x00\xe7\xffY\xff\xe4\xff\xca\x00\x89\x00\x91\xff\xf3\xfe\xc5\xfe\x8a\x00\x93\x01\xaf\x00\x85\x00\x91\x00\xed\xffj\xff\xb1\xff\x93\x00c\x00\xdb\xff_\x00R\x01\x06\x01u\xff\x11\xff\'\xff\xa3\xff\xb0\xff\\\xffq\xffr\xff/\x00\xdc\x00\xd7\xff\xd2\xfe\x80\xfe\xb7\xfel\x00\xab\x01\x10\x01\x10\x00|\xffT\xff\xbc\xff>\x00\xc4\x00\xc4\x00Q\x00\x82\xff\x85\xff^\x00\xb6\x00\x1b\x00%\xff\x0b\xff\xaa\xff\x97\x004\x01\x8e\x00R\xff\xe1\xfeS\xff\xe3\xff+\x00\xff\xff\x02\x00\xe6\xff\xd5\xff\x91\xff8\xffx\xff\xf6\xffE\x00\x08\x00\xed\xff\x83\xff\xaf\xff\xb0\x00\xef\x00\xaf\xff\xcd\xfe\x98\xffs\x00\xc0\x00\x1c\x01\xa0\x00\xf7\xfe\xb4\xfe\xd1\xff\x03\x008\x00\xb7\x00\x89\xff&\xfe^\xff@\x01m\x01O\x00\xa9\xfe\xfe\xfd>\xff\xff\x00}\x01\xa0\x00L\xff\x8e\xfeD\xff8\x00\xe5\x00\xd1\x00C\xff\xfc\xfe\x00\x00\x1b\x01\x10\x01\xa3\xff\xc7\xfee\xff\x89\x00\x03\x01u\x00\xa7\xff\x99\xff\xd9\xff3\x00,\x00\xc4\xffM\xff\x83\xff5\x00h\x00\x04\x00\xc6\xff\x92\xff`\xff\x8d\xffz\xffg\xff\xfa\xff\x82\x00{\x00\x1b\x00\xbc\xff\x93\xffs\xff\xbd\xff6\x00\x9f\x00\x8b\x00\xe1\xff\xf8\xff\x85\x00\xc1\x00,\x00*\xff\xef\xfe\x14\x00>\x01\x19\x01N\x00\xa9\xff\x7f\xfff\xff*\x00\xab\x00\x00\x00\xb1\xff\xe5\xff?\x00\xcc\x00\xcd\x00z\xff~\xfe;\xff\x91\x00\x00\x01\xc3\x00A\x00\x86\xff\xe2\xfe\x91\xff\xea\x00\xb3\x00\xfa\xff{\xff\xa1\xffM\x00\xe5\x00\xda\x00\xf2\xff\x01\xff\xcb\xfe\xc8\xff\xdb\x00\xd2\x00\\\x00\xd4\xffC\xffm\xff\xf6\xff\xf4\xff\xc7\xff\x9b\xff\xaf\xff<\x00\x9a\x00d\x00\xd9\xffT\xffQ\xff\x0e\x00\xea\x00\xc3\x001\x00V\x00\x7f\x009\x00F\x00\x11\x00}\xff\x8c\xff\x95\x006\x01\xd1\x00\x04\x00\x1e\xff\xae\xfeV\xff\x8e\x00\xfc\x00\x8a\x00\x16\x00_\xffr\xff4\x00o\x00<\x00z\xffE\xff\x17\x00/\x01$\x01F\x00h\xff\x15\xff\xb2\xff\xcf\x00M\x01\xf6\x00q\x00\xf6\xff\xcc\xff0\x00k\x00\xb8\xffM\xff\x97\xff!\x00\x97\x00o\x00\xbf\xff\x0c\xff\xef\xfea\xff\xd8\xff\x15\x00\x00\x00\xc5\xff\xba\xff\xcb\xff\xed\xff\xf8\xff\xbf\xff\x94\xff\xa4\xff\xf9\xffT\x00m\x00-\x00\xa7\xff\x8b\xff\xda\xff?\x00\xbc\x00\xc5\x00b\x00\xf4\xff\xec\xff\xf4\xff\xde\xff\xe1\xff\xe9\xff\xfa\xff\n\x00(\x00.\x00\xd2\xffS\xffx\xff\xe8\xff\x0c\x00\x10\x003\x002\x005\x00U\x00\xff\xff\xc0\xff\xdb\xff\x07\x008\x00j\x00s\x00c\x00L\x00\x18\x00\xae\xff\x96\xff\xf4\xff\x05\x001\x00\x8a\x00W\x00\xbf\xff\x88\xff\xbf\xff\xd5\xff\xfe\xff\x03\x00\xc2\xff\xc6\xff9\x00\xa5\x007\x00_\xff\xf6\xfe`\xff$\x00Y\x00\xf0\xff\xae\xff\x9e\xff\xb7\xff\xee\xff\xdf\xff\x89\xffG\xff\x93\xff\xe9\xffJ\x00g\x00\xf8\xff\x95\xff\xa6\xff\xe5\xff\x18\x00.\x00\x0b\x00\xb7\xffx\xff\xec\xffb\x009\x00\x94\xffR\xff\xa0\xff\xcf\xff\x1e\x00U\x00\xf4\xffp\xff\x92\xff\x00\x008\x001\x00\xf5\xff\x8f\xff\x86\xff\xe2\xff:\x005\x00\xe4\xff\x9f\xff\xa1\xff\xeb\xffV\x00K\x00$\x00!\x00D\x00t\x00g\x00\x1e\x00\xf4\xff\x0b\x00F\x00e\x00<\x00\xdd\xff\xb0\xff\xca\xff\xf5\xff\xff\xff\xca\xff\x9b\xff\xa1\xff\xe5\xff:\x00N\x00\xec\xfft\xffs\xff\xd3\xffA\x00;\x00\xee\xff\x98\xffw\xff\xde\xff]\x00$\x00~\xffW\xff\xaa\xff!\x00r\x002\x00\xb3\xffi\xff\xb0\xff3\x00_\x00 \x00\xd8\xff\xc3\xff\xd8\xff*\x00~\x00I\x00\xb5\xff\\\xff\x82\xff\xfc\xffi\x00w\x00\x0c\x00\x8c\xff\x84\xff\xf2\xff|\x00\x8b\x00.\x00\xe0\xff\xfd\xff\\\x00\xa5\x00\x93\x00\'\x00\xd1\xff\xce\xff\x12\x00Y\x00K\x00\x0e\x00\xe4\xff\xd0\xff\xf9\xff/\x00 \x00\xde\xff\xc2\xff\x03\x005\x00?\x00-\x00\r\x00\xee\xff\xf0\xff*\x00S\x00C\x00\r\x00\xf2\xff\x13\x00T\x00}\x00M\x00\xc4\xff\x9c\xff\x05\x00r\x00f\x00$\x00\xf5\xff\xc8\xff\xf2\xffH\x00M\x00\xf0\xff\xb7\xff\xda\xff\t\x00E\x00y\x008\x00\xb9\xff\x9a\xff\xef\xffF\x00M\x00\t\x00\xd4\xff\xdc\xff&\x00]\x00?\x00\xf0\xff\xdb\xff+\x00S\x00M\x00A\x00 \x00\xee\xff\xff\xffA\x00:\x00\xf2\xff\xc0\xff\xcd\xff\xf5\xff\x1b\x00\x1e\x00\xfc\xff\xcf\xff\xd3\xff\x01\x00\n\x00\xdb\xff\xd5\xff\xef\xff\x00\x00\x07\x00\xfc\xff\xe6\xff\xc3\xff\xd5\xff\xe6\xff\xfb\xff\x06\x00\xf1\xff\xd3\xff\xf0\xffG\x002\x00\xf8\xff\xd4\xff\xe7\xff\x15\x00F\x008\x00\xf6\xff\xbd\xff\xcb\xff\x15\x001\x00\x08\x00\xb2\xff\x8d\xff\xb7\xff\x07\x002\x00\x08\x00\xbe\xff\x9a\xff\xb5\xff\xfa\xffD\x00%\x00\xd8\xff\xb9\xff\xda\xff:\x00{\x008\x00\xbd\xff\xa6\xff\xdf\xff8\x00h\x004\x00\xaa\xff\x8c\xff\xf6\xff3\x003\x00\x04\x00\xc7\xff\xa4\xff\xea\xffB\x000\x00\xeb\xff\xb5\xff\xa8\xff\xce\xff\x12\x004\x00\x14\x00\xd9\xff\xbe\xff\xda\xff\x04\x00\x0f\x00\x06\x00\xc5\xff\xb4\xff\xf6\xff0\x008\x00\x0f\x00\xde\xff\xa9\xff\xc2\xff\xf0\xff\x00\x00\xf2\xff\xd2\xff\xbe\xff\xb2\xff\xd6\xff\xf7\xff\xfa\xff\xd8\xff\xc6\xff\xde\xff\x08\x00"\x00\x00\x00\xdb\xff\xcd\xff\xe9\xff\xf9\xff\x1b\x00%\x00\x04\x00\xf9\xff\x06\x00\x0c\x00\xff\xff\xf4\xff\xf4\xff\xed\xff\xe5\xff\xf1\xff\xfc\xff\xe6\xff\xc2\xff\xaf\xff\xb6\xff\xc6\xff\xdc\xff\xe2\xff\xd7\xff\xd3\xff\xd3\xff\xd3\xff\xd3\xff\xdf\xff\xe4\xff\xd4\xff\xea\xff\x03\x00\x07\x00\t\x00\x0b\x00\xf5\xff\xe1\xff\xee\xff\x16\x00*\x00\x1a\x00\x0b\x00\x04\x00\x07\x00\x0c\x00\n\x00\x02\x00\xfe\xff\xf8\xff\xf4\xff\x02\x00\x12\x00\t\x00\x03\x00\x02\x00\xf7\xff\xf4\xff\xfe\xff!\x00\x1e\x00\x05\x00\x0f\x00\x11\x00\x01\x00\xfc\xff\x06\x00\x01\x00\x01\x00\x06\x00\x03\x00\x05\x00\x07\x00\x00\x00\xfe\xff\xfc\xff\x00\x00\x10\x00\x0e\x00\t\x00\n\x00\x14\x00\x19\x00\x1a\x00\x11\x00\x00\x00\xfe\xff\x08\x00\x17\x00\x15\x00\x02\x00\xf2\xff\xfa\xff\x0f\x00\x1c\x00\x04\x00\xfc\xff\x11\x00\x1d\x002\x00=\x00A\x00(\x00\x14\x00\x1d\x00-\x001\x00*\x00\x19\x00\x0e\x00\x16\x00\x1f\x00.\x00\x1d\x00\xfe\xff\xf2\xff\x05\x00,\x005\x00(\x00\t\x00\xfb\xff\r\x00%\x003\x00%\x00\x13\x00\x11\x00\x16\x00"\x00+\x00!\x00\t\x00\x00\x00\x12\x00\x05\x00\x07\x00*\x00\x11\x00\xef\xff\xe4\xff\xf4\xff\xfb\xff\xfa\xff\xeb\xff\xd5\xff\xdf\xff\x0f\x00*\x00\x0c\x00\x00\x00\x03\x00\x05\x00\xfc\xff\xfd\xff\x07\x00\x05\x00\n\x00\x14\x00\x12\x00\x01\x00\xfe\xff\xf3\xff\xe9\xff\xe2\xff\xf8\xff\x05\x00\xff\xff\xfa\xff\xf7\xff\xf4\xff\xf0\xff\xf6\xff\xfc\xff\xfb\xff\xfe\xff\x04\x00\x01\x00\xfe\xff\x02\x00\xfe\xff\xf5\xff\xeb\xff\xef\xff\xfc\xff\x00\x00\xf6\xff\xdf\xff\xd6\xff\xea\xff\xf7\xff\xf9\xff\xea\xff\xd7\xff\xdc\xff\xf8\xff\x06\x00\xfc\xff\xf2\xff\xfc\xff\xf4\xff\xfe\xff\x18\x00\x17\x00\xff\xff\xf5\xff\x01\x00\x08\x00\x0c\x00\r\x00\xfc\xff\xef\xff\xf0\xff\xf4\xff\xf4\xff\xe9\xff\xdf\xff\xcf\xff\xcf\xff\xe8\xff\xf1\xff\xf4\xff\xf0\xff\xed\xff\xe4\xff\xf1\xff\x03\x00\x06\x00\x00\x00\x04\x00\x0c\x00\xfb\xff\x0b\x00"\x00\x0e\x00\xf0\xff\xf8\xff\x0e\x00\x12\x00\x02\x00\x0e\x00\x05\x00\xdf\xff\xea\xff\xf3\xff\xec\xff\xda\xff\xd0\xff\xc9\xff\xc4\xff\xc6\xff\xc9\xff\xbc\xff\xaa\xff\xb6\xff\xc1\xff\xc8\xff\xd3\xff\xc8\xff\xce\xff\xd3\xff\xdf\xff\xec\xff\xeb\xff\xea\xff\xf2\xff\x00\x00\t\x00\x10\x00\n\x00\xf8\xff\xf6\xff\xf7\xff\xf1\xff\xf5\xff\xf9\xff\xf3\xff\xea\xff\xe4\xff\xef\xff\xe3\xff\xd8\xff\xd6\xff\xda\xff\xe9\xff\xf1\xff\xf7\xff\xf0\xff\xe5\xff\xde\xff\xeb\xff\xfe\xff\xfb\xff\xfa\xff\xfe\xff\xfd\xff\x00\x00\n\x00\x0c\x00\xfe\xff\xf2\xff\xfb\xff\x05\x00\x15\x00\x1d\x00\x0c\x00\x03\x00\x05\x00\x0c\x00\r\x00\x10\x00\x11\x00\x10\x00\x0b\x00\x12\x00\x12\x00\x08\x00\x06\x00\x00\x00\x03\x00\x0f\x00"\x00\x1e\x00\x0f\x00\t\x00\x07\x00\x06\x00\x15\x00\x16\x00\x00\x00\xf6\xff\x00\x00\x13\x00\x0b\x00\xf9\xff\xe9\xff\xe1\xff\xe3\xff\xf7\xff\x00\x00\xee\xff\xd9\xff\xd8\xff\xec\xff\xf0\xff\xf7\xff\xfa\xff\xf6\xff\xf8\xff\x05\x00\x16\x00\x14\x00\t\x00\x04\x00\x04\x00\x0e\x00 \x00,\x00&\x00\x1f\x00\x19\x00%\x00)\x00\x1b\x00\x10\x00\n\x00\x07\x00\r\x00\x18\x00\x04\x00\xfc\xff\t\x00\x07\x00\x05\x00\x0b\x00\t\x00\r\x00\x03\x00\x02\x00\x05\x00\r\x00\x1e\x00 \x00 \x00\x1d\x00\'\x00"\x00\x13\x00\x12\x00\x18\x00\x14\x00\x13\x00\x12\x00\x1c\x00 \x00\x1f\x00#\x00\x18\x00\x17\x00\x1a\x00\x1a\x00\x18\x00 \x00\x1c\x00\x10\x00\x13\x00\x04\x00\xf9\xff\xfe\xff\x0e\x00\x11\x00\t\x00\x05\x00\x00\x00\xfe\xff\x08\x00\t\x00\xfe\xff\xf8\xff\xf8\xff\x00\x00\x00\x00\x00\x00\x08\x00\xfb\xff\xfc\xff\x05\x00\x04\x00\x00\x00\xfe\xff\xf8\xff\xf5\xff\xf5\xff\xf7\xff\xfb\xff\x01\x00\x01\x00\xf6\xff\xf6\xff\xfc\xff\xfc\xff\xf7\xff\xf9\xff\xfc\xff\xf9\xff\xf8\xff\xff\xff\xfe\xff\xf7\xff\xf4\xff\xed\xff\xe8\xff\xed\xff\xec\xff\xe3\xff\xe8\xff\xe4\xff\xde\xff\xe1\xff\xe7\xff\xde\xff\xdc\xff\xe2\xff\xe1\xff\xe7\xff\xf6\xff\xfc\xff\xfb\xff\x00\x00\x0c\x00\x07\x00\x04\x00\x06\x00\xfd\xff\xfd\xff\xfe\xff\xfb\xff\xf8\xff\xed\xff\xee\xff\xf3\xff\xf1\xff\xef\xff\xeb\xff\xea\xff\xeb\xff\xf3\xff\xfc\xff\xfa\xff\xf7\xff\xfb\xff\x05\x00\x06\x00\x00\x00\xfe\xff\xff\xff\xfa\xff\xf3\xff\xfa\xff\x01\x00\xfe\xff\xfb\xff\x00\x00\xf7\xff\xf9\xff\xfa\xff\x02\x00\x06\x00\x05\x00\x02\x00\xff\xff\x05\x00\xfd\xff\xfd\xff\xf5\xff\xf4\xff\xf3\xff\xea\xff\xe9\xff\xe6\xff\xe4\xff\xdb\xff\xd6\xff\xdb\xff\xe5\xff\xe4\xff\xe3\xff\xd4\xff\xd4\xff\xe1\xff\xdd\xff\xe6\xff\xe8\xff\xf1\xff\xf5\xff\xf4\xff\xf6\xff\xf4\xff\xf4\xff\xe8\xff\xe5\xff\xeb\xff\xef\xff\xf1\xff\xf1\xff\xe4\xff\xdb\xff\xe0\xff\xe2\xff\xe9\xff\xec\xff\xeb\xff\xeb\xff\xe9\xff\xf0\xff\xf8\xff\xf0\xff\xee\xff\xf5\xff\xfb\xff\xf6\xff\xfc\xff\x00\x00\xfa\xff\x01\x00\x00\x00\x05\x00\x06\x00\x03\x00\xff\xff\xf6\xff\x03\x00\x06\x00\xfe\xff\x02\x00\x06\x00\xfc\xff\xfc\xff\x00\x00\xfe\xff\x00\x00\xfe\xff\xf9\xff\xfc\xff\x03\x00\x03\x00\x00\x00\xff\xff\x02\x00\x10\x00\x11\x00\x10\x00\x13\x00\x1c\x00"\x00\x1b\x00\x14\x00\x13\x00\x11\x00\x10\x00\x0b\x00\x00\x00\xfc\xff\xfd\xff\x00\x00\xfc\xff\xf5\xff\xf2\xff\xf7\xff\xf9\xff\xfe\xff\t\x00\x0c\x00\x12\x00\x10\x00\x06\x00\x02\x00\r\x00\r\x00\x08\x00\x03\x00\x04\x00\x05\x00\xfa\xff\xf9\xff\xfb\xff\xf9\xff\xf7\xff\xff\xff\x02\x00\x01\x00\x02\x00\x01\x00\x01\x00\xfd\xff\xfe\xff\x01\x00\x07\x00\x0c\x00\x12\x00\x18\x00\x11\x00\x14\x00#\x00\x1f\x00\x1f\x00(\x00\x1f\x00\x1e\x00&\x00\x1f\x00\x17\x00\x11\x00\x15\x00\x16\x00\x14\x00\x16\x00\x13\x00\r\x00\x15\x00\x1b\x00\x0f\x00\x16\x00 \x00\x1c\x00\x16\x00\x13\x00\x0f\x00\x0f\x00\x0f\x00\x10\x00\x0e\x00\n\x00\n\x00\x02\x00\x05\x00\x04\x00\x04\x00\x00\x00\x00\x00\x04\x00\x05\x00\x04\x00\xfd\xff\xf8\xff\xfb\xff\xfa\xff\xfa\xff\xf8\xff\xf8\xff\xfb\xff\xfa\xff\xf5\xff\xf7\xff\xf7\xff\xfa\xff\xf6\xff\xf6\xff\xfa\xff\xf9\xff\xf9\xff\xf8\xff\xfb\xff\xff\xff\x00\x00\xfb\xff\xfb\xff\x04\x00\x04\x00\xfd\xff\xfa\xff\x05\x00\x01\x00\x07\x00\x13\x00\r\x00\x03\x00\x04\x00\x01\x00\xff\xff\xfc\xff\xfd\xff\xfb\xff\x02\x00\t\x00\xff\xff\xfd\xff\xfd\xff\xfd\xff\xf4\xff\xee\xff\xfb\xff\x01\x00\xfb\xff\xfb\xff\x00\x00\xff\xff\x01\x00\x06\x00\x07\x00\x00\x00\x02\x00\x07\x00\x00\x00\x01\x00\xfb\xff\xf3\xff\xee\xff\xee\xff\xf1\xff\xf4\xff\xee\xff\xf5\xff\xfb\xff\xef\xff\xea\xff\xe8\xff\xef\xff\xec\xff\xf2\xff\xf1\xff\xec\xff\xee\xff\xeb\xff\xe7\xff\xe4\xff\xed\xff\xef\xff\xec\xff\xec\xff\xea\xff\xef\xff\xeb\xff\xe7\xff\xed\xff\xf7\xff\xf9\xff\xff\xff\xfe\xff\xf3\xff\xef\xff\xee\xff\xf3\xff\xf0\xff\xee\xff\xee\xff\xea\xff\xe6\xff\xe6\xff\xe9\xff\xe7\xff\xf0\xff\xe9\xff\xe3\xff\xe7\xff\xe7\xff\xec\xff\xef\xff\xf1\xff\xf4\xff\xf5\xff\xf1\xff\xf4\xff\x00\x00\xf8\xff\xf9\xff\x00\x00\xfb\xff\xfc\xff\xfd\xff\x00\x00\xf8\xff\xf9\xff\xfb\xff\xf4\xff\xef\xff\xf8\xff\xf6\xff\xf4\xff\xf8\xff\xf9\xff\xfb\xff\xfa\xff\xfc\xff\xf8\xff\xff\xff\x00\x00\xfd\xff\xf7\xff\xf6\xff\xf8\xff\xfc\xff\x06\x00\x04\x00\x02\x00\x08\x00\x05\x00\xfd\xff\xfe\xff\xfe\xff\xfc\xff\xfb\xff\xf8\xff\xef\xff\xf2\xff\xf2\xff\xf1\xff\xf5\xff\xec\xff\xec\xff\xf0\xff\xef\xff\xec\xff\xec\xff\xf1\xff\xf6\xff\xf4\xff\xf5\xff\xfa\xff\xfe\xff\x05\x00\x04\x00\x04\x00\x05\x00\x03\x00\x03\x00\x04\x00\t\x00\x0c\x00\t\x00\t\x00\t\x00\x08\x00\x08\x00\t\x00\r\x00\r\x00\x0c\x00\n\x00\t\x00\x08\x00\x08\x00\x0c\x00\x0c\x00\x0b\x00\n\x00\x08\x00\n\x00\x0c\x00\x03\x00\x04\x00\x00\x00\x05\x00\x0e\x00\r\x00\x0c\x00\r\x00\x15\x00\x0e\x00\t\x00\x0e\x00\x11\x00\x0b\x00\x0b\x00\t\x00\x0e\x00\x15\x00\x14\x00\x15\x00\x0e\x00\x0f\x00\x14\x00\x16\x00\x12\x00\x16\x00\x19\x00\x12\x00\x11\x00\x0b\x00\x01\x00\x01\x00\n\x00\x0b\x00\x0f\x00\x14\x00\x12\x00\x10\x00\x17\x00\x14\x00\x16\x00\x15\x00\n\x00\x0b\x00\x0f\x00\r\x00\x0f\x00\n\x00\r\x00\x10\x00\r\x00\r\x00\x10\x00\r\x00\x04\x00\x00\x00\x07\x00\n\x00\n\x00\n\x00\x08\x00\x05\x00\x02\x00\x06\x00\x05\x00\x07\x00\x04\x00\x02\x00\x05\x00\x02\x00\xff\xff\xfa\xff\xfa\xff\xf9\xff\xf4\xff\xf3\xff\xf6\xff\xf4\xff\xf7\xff\xf7\xff\xf8\xff\xf6\xff\xf8\xff\xee\xff\xea\xff\xee\xff\xf2\xff\xf6\xff\xfa\xff\xfc\xff\x00\x00\x01\x00\x05\x00\x06\x00\x04\x00\x03\x00\x00\x00\x01\x00\xfd\xff\xfb\xff\xfa\xff\xf4\xff\xf4\xff\xf7\xff\xf6\xff\xf3\xff\xf3\xff\xf2\xff\xf3\xff\xf5\xff\xf4\xff\xf4\xff\xfc\xff\xfe\xff\x00\x00\x02\x00\xfe\xff\xfa\xff\xfc\xff\xfc\xff\xf7\xff\xf8\xff\xfd\xff\xfe\xff\xfd\xff\xfd\xff\xfc\xff\xfd\xff\xf6\xff\xf5\xff\xf5\xff\xfa\xff\xfc\xff\xfa\xff\xfc\xff\xf9\xff\xfa\xff\xf4\xff\xf6\xff\xf4\xff\xee\xff\xee\xff\xf0\xff\xee\xff\xed\xff\xea\xff\xea\xff\xea\xff\xe5\xff\xeb\xff\xe8\xff\xed\xff\xf2\xff\xed\xff\xf3\xff\xf1\xff\xf5\xff\xf4\xff\xf7\xff\xfa\xff\xf2\xff\xf6\xff\xf0\xff\xea\xff\xef\xff\xf2\xff\xef\xff\xec\xff\xe6\xff\xe3\xff\xe3\xff\xe1\xff\xe5\xff\xec\xff\xe6\xff\xe6\xff\xea\xff\xee\xff\xf4\xff\xf3\xff\xf5\xff\xfb\xff\xfd\xff\xf9\xff\xfd\xff\xfa\xff\xf5\xff\xfa\xff\xf9\xff\x00\x00\xfc\xff\xff\xff\xff\xff\xf3\xff\xfb\xff\x00\x00\x00\x00\x00\x00\x00\x00\xfd\xff\x00\x00\xff\xff\xfc\xff\x00\x00\xff\xff\xfe\xff\x01\x00\x04\x00\xff\xff\xfc\xff\xff\xff\x02\x00\x03\x00\x05\x00\x03\x00\x03\x00\x06\x00\x07\x00\x04\x00\x03\x00\x07\x00\x03\x00\x00\x00\xfe\xff\xf9\xff\xfb\xff\xf9\xff\xfa\xff\xfa\xff\xf7\xff\xf4\xff\xf5\xff\xf4\xff\xf9\xff\xfe\xff\x00\x00\x07\x00\x05\x00\x04\x00\x02\x00\x04\x00\x08\x00\x03\x00\x01\x00\x01\x00\x04\x00\x01\x00\x02\x00\x05\x00\x02\x00\x00\x00\x06\x00\x08\x00\n\x00\x07\x00\t\x00\n\x00\x02\x00\x04\x00\x05\x00\x07\x00\x04\x00\x07\x00\x02\x00\x00\x00\x04\x00\x0c\x00\t\x00\x08\x00\x0c\x00\x0b\x00\r\x00\r\x00\x0c\x00\x0c\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x13\x00\x18\x00\x16\x00\x18\x00\x15\x00\x13\x00\x14\x00\x13\x00\x12\x00\x0f\x00\r\x00\t\x00\x07\x00\x06\x00\x06\x00\x06\x00\x04\x00\x00\x00\xfc\xff\x01\x00\x03\x00\x07\x00\x02\x00\x02\x00\x07\x00\x07\x00\t\x00\x07\x00\t\x00\t\x00\x06\x00\x08\x00\x05\x00\x03\x00\xff\xff\x00\x00\xff\xff\xff\xff\x02\x00\x04\x00\x07\x00\x06\x00\x02\x00\x02\x00\x04\x00\x01\x00\x01\x00\x05\x00\x03\x00\xff\xff\xfa\xff\xff\xff\xff\xff\xfa\xff\xf9\xff\x00\x00\xff\xff\xfd\xff\x01\x00\x00\x00\xfe\xff\x00\x00\xfd\xff\x00\x00\x00\x00\x02\x00\x02\x00\x06\x00\x08\x00\x02\x00\x01\x00\x04\x00\x07\x00\x03\x00\x00\x00\x05\x00\x03\x00\xff\xff\x00\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x02\x00\xfe\xff\xf9\xff\xfa\xff\xfb\xff\xfd\xff\xf8\xff\xf5\xff\xf3\xff\xed\xff\xf2\xff\xf5\xff\xf3\xff\xf6\xff\xf7\xff\xf6\xff\xf7\xff\xf6\xff\xf8\xff\xf5\xff\xfe\xff\xfd\xff\xff\xff\x04\x00\x00\x00\xfa\xff\xfa\xff\xfc\xff\xf6\xff\xf4\xff\xf8\xff\xf8\xff\xfa\xff\xfa\xff\xf6\xff\xfa\xff\xfc\xff\xfe\xff\x03\x00\x00\x00\xf7\xff\xf4\xff\xf4\xff\xf8\xff\xf8\xff\xf8\xff\xfa\xff\xf4\xff\xf1\xff\xf3\xff\xf4\xff\xf2\xff\xf0\xff\xef\xff\xef\xff\xeb\xff\xee\xff\xee\xff\xf0\xff\xf2\xff\xf3\xff\xf5\xff\xf2\xff\xf2\xff\xf7\xff\xf5\xff\xf1\xff\xf6\xff\xf9\xff\xfa\xff\xf3\xff\xf1\xff\xef\xff\xed\xff\xe8\xff\xe9\xff\xea\xff\xf1\xff\xf1\xff\xf0\xff\xf7\xff\xf6\xff\xfc\xff\xf7\xff\xf8\xff\xf7\xff\xfd\xff\x00\x00\xf6\xff\xf6\xff\xfb\xff\xfb\xff\xfe\xff\x04\x00\xfe\xff\xf9\xff\x01\x00\xff\xff\xfd\xff\xff\xff\xfe\xff\xfa\xff\xfb\xff\xfc\xff\xfc\xff\xff\xff\xfb\xff\xfc\xff\xfe\xff\xfb\xff\xfd\xff\xfd\xff\xfa\xff\xf9\xff\xfa\xff\xfd\xff\xfe\xff\xfd\xff\x00\x00\xff\xff\x00\x00\x03\x00\x02\x00\x03\x00\x02\x00\x03\x00\x04\x00\x04\x00\x06\x00\x05\x00\x03\x00\x01\x00\xff\xff\xff\xff\xfa\xff\xfa\xff\xfd\xff\xfe\xff\xfc\xff\xfe\xff\xfe\xff\xfd\xff\xfb\xff\xfb\xff\xfb\xff\xf9\xff\xf9\xff\xf9\xff\xfc\xff\xfd\xff\xf8\xff\xf9\xff\xf6\xff\xf3\xff\xfa\xff\xfa\xff\xf6\xff\xf8\xff\xfc\xff\xfa\xff\xfa\xff\xf9\xff\xfb\xff\xf9\xff\xff\xff\xff\xff\xfe\xff\x00\x00\xff\xff\xfe\xff\xfc\xff\x00\x00\x01\x00\x06\x00\x00\x00\x03\x00\t\x00\t\x00\t\x00\x06\x00\x04\x00\x02\x00\x0c\x00\t\x00\t\x00\x0f\x00\x12\x00\x13\x00\x17\x00\x19\x00\x1d\x00\x1d\x00\x16\x00\x16\x00\x18\x00\x19\x00\x15\x00\x16\x00\x16\x00\x14\x00\x13\x00\x17\x00\x16\x00\x10\x00\r\x00\n\x00\x0b\x00\t\x00\x04\x00\x03\x00\x04\x00\x03\x00\x03\x00\x05\x00\x02\x00\x03\x00\x04\x00\x02\x00\x02\x00\xff\xff\xfc\xff\xfb\xff\xff\xff\xff\xff\xfb\xff\xfe\xff\x01\x00\x01\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\xfe\xff\xfd\xff\xff\xff\x02\x00\x00\x00\x00\x00\x01\x00\x04\x00\x06\x00\x05\x00\x08\x00\x05\x00\x04\x00\x06\x00\x05\x00\x01\x00\x00\x00\x02\x00\xfc\xff\xfd\xff\xff\xff\xfe\xff\xfa\xff\xfa\xff\xf8\xff\xf4\xff\xfa\xff\xfe\xff\xfa\xff\xfe\xff\xfe\xff\x01\x00\x01\x00\xfc\xff\xfd\xff\xfd\xff\xfc\xff\xfb\xff\xfb\xff\xfd\xff\x00\x00\xfd\xff\xfc\xff\x00\x00\x00\x00\xfb\xff\xf8\xff\xf9\xff\xf9\xff\xf9\xff\xfc\xff\xfc\xff\xfb\xff\xfd\xff\xfa\xff\xf9\xff\xf8\xff\xf6\xff\xf7\xff\xf6\xff\xf7\xff\xf7\xff\xf6\xff\xf8\xff\xf7\xff\xf7\xff\xf7\xff\xfa\xff\xfb\xff\xfa\xff\xfc\xff\x01\x00\xfc\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\xf7\xff\xfa\xff\xfa\xff\xf6\xff\xf7\xff\xf9\xff\xf6\xff\xf4\xff\xf3\xff\xf0\xff\xf2\xff\xf0\xff\xed\xff\xf2\xff\xf2\xff\xf2\xff\xf6\xff\xf6\xff\xf7\xff\xf5\xff\xf3\xff\xf6\xff\xf8\xff\xf7\xff\xf4\xff\xf3\xff\xef\xff\xf2\xff\xf0\xff\xf1\xff\xef\xff\xf2\xff\xf4\xff\xeb\xff\xef\xff\xf3\xff\xf5\xff\xf3\xff\xf6\xff\xf7\xff\xf3\xff\xf5\xff\xf5\xff\xf6\xff\xf5\xff\xf4\xff\xf5\xff\xf7\xff\xf3\xff\xf3\xff\xf4\xff\xf8\xff\xf8\xff\xf6\xff\xfa\xff\xfd\xff\xfa\xff\xf8\xff\xfb\xff\xfc\xff\xfe\xff\xfa\xff\xf8\xff\xf6\xff\xf7\xff\xf9\xff\xf8\xff\xf8\xff\xf9\xff\xf9\xff\xf9\xff\xfc\xff\xf8\xff\xf9\xff\xff\xff\xfe\xff\xfd\xff\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x01\x00\x03\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x02\x00\x00\x00\x02\x00\x01\x00\x01\x00\x05\x00\xff\xff\xfe\xff\xff\xff\xfd\xff\xfa\xff\xfd\xff\xfc\xff\xfa\xff\xfc\xff\xfe\xff\xfb\xff\xfc\xff\xfd\xff\xfe\xff\xff\xff\xfa\xff\xf9\xff\xfb\xff\x00\x00\x00\x00\x02\x00\x01\x00\x04\x00\x06\x00\x04\x00\x05\x00\x05\x00\x06\x00\x06\x00\x07\x00\t\x00\t\x00\t\x00\x03\x00\x01\x00\x04\x00\x02\x00\x01\x00\x02\x00\x02\x00\x00\x00\x00\x00\x03\x00\x06\x00\x02\x00\x05\x00\x08\x00\x05\x00\x07\x00\t\x00\x08\x00\x05\x00\x06\x00\x0c\x00\x0c\x00\x08\x00\t\x00\x0b\x00\x0b\x00\t\x00\n\x00\x08\x00\x07\x00\n\x00\x04\x00\x06\x00\x08\x00\x08\x00\x08\x00\t\x00\x08\x00\x07\x00\x08\x00\x08\x00\x06\x00\x06\x00\t\x00\n\x00\x06\x00\x08\x00\n\x00\x07\x00\t\x00\x0e\x00\r\x00\x0c\x00\n\x00\x08\x00\x05\x00\x06\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfc\xff\xfb\xff\xfd\xff\xfa\xff\xf7\xff\xfd\xff\xfd\xff\xf8\xff\xfc\xff\xfc\xff\xfe\xff\xfc\xff\xfb\xff\xfd\xff\xfe\xff\x00\x00\x00\x00\x00\x00\xfe\xff\xfb\xff\xff\xff\xfe\xff\xff\xff\xfc\xff\xfb\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xfd\xff\xfd\xff\xfb\xff\xfb\xff\xfb\xff\xfb\xff\xfa\xff\xf8\xff\xf7\xff\xf9\xff\xff\xff\x00\x00\x02\x00\x00\x00\xff\xff\xfb\xff\xfd\xff\xfe\xff\xf8\xff\xfa\xff\xff\xff\xfc\xff\xfb\xff\xfd\xff\xf9\xff\xf5\xff\xf6\xff\xfa\xff\xf9\xff\xf7\xff\xf6\xff\xf2\xff\xf7\xff\xf7\xff\xf7\xff\xf8\xff\xf5\xff\xf5\xff\xf7\xff\xf5\xff\xf5\xff\xf5\xff\xf9\xff\xf7\xff\xf0\xff\xf1\xff\xef\xff\xef\xff\xea\xff\xe8\xff\xea\xff\xed\xff\xee\xff\xf0\xff\xf7\xff\xf6\xff\xf8\xff\xf3\xff\xf1\xff\xf2\xff\xf4\xff\xf7\xff\xf5\xff\xf4\xff\xf5\xff\xf4\xff\xf6\xff\xf7\xff\xf4\xff\xf3\xff\xf6\xff\xf5\xff\xf1\xff\xf5\xff\xfa\xff\xf7\xff\xf7\xff\xf8\xff\xfa\xff\xfe\xff\xfc\xff\xfc\xff\xfa\xff\xf7\xff\xf6\xff\xf7\xff\xf7\xff\xfa\xff\xf7\xff\xf7\xff\xfd\xff\xfb\xff\xfb\xff\xfa\xff\xfb\xff\xff\xff\xff\xff\x00\x00\x02\x00\x03\x00\x02\x00\x01\x00\x01\x00\x02\x00\x01\x00\x02\x00\x00\x00\xfe\xff\xfe\xff\xfd\xff\xff\xff\x01\x00\xfd\xff\xfb\xff\xfc\xff\xfe\xff\xfd\xff\xfb\xff\xfc\xff\xf9\xff\xfa\xff\xfb\xff\xfc\xff\xfd\xff\xf8\xff\xf6\xff\xf4\xff\xf4\xff\xf9\xff\xf9\xff\xf6\xff\xfa\xff\xf9\xff\xf8\xff\xfb\xff\xfb\xff\xfa\xff\xf8\xff\xfb\xff\xfd\xff\xfb\xff\xfd\xff\xfb\xff\xf9\xff\xf9\xff\xfd\xff\xfe\xff\xfd\xff\xfb\xff\xfb\xff\xfc\xff\xfd\xff\xfc\xff\xfc\xff\xff\xff\xfe\xff\x00\x00\x00\x00\x03\x00\x06\x00\x06\x00\x07\x00\x08\x00\x08\x00\n\x00\x0b\x00\x08\x00\t\x00\x08\x00\n\x00\x08\x00\x08\x00\x05\x00\x08\x00\t\x00\n\x00\x0b\x00\t\x00\x05\x00\x03\x00\x04\x00\x06\x00\x04\x00\x01\x00\x02\x00\x03\x00\x03\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\xfc\xff\xfc\xff\x03\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\xfe\xff\x01\x00\x01\x00\x02\x00\x01\x00\xfd\xff\x02\x00\x03\x00\x00\x00\x00\x00\x00\x00\x01\x00\x03\x00\x01\x00\x03\x00\x00\x00\x00\x00\x04\x00\x04\x00\x04\x00\x06\x00\n\x00\x03\x00\x03\x00\x04\x00\x05\x00\x06\x00\x03\x00\x02\x00\x03\x00\x06\x00\x08\x00\x04\x00\x07\x00\t\x00\x08\x00\x07\x00\x07\x00\x08\x00\x07\x00\x07\x00\x07\x00\x08\x00\t\x00\t\x00\x07\x00\x05\x00\x06\x00\x05\x00\x01\x00\x00\x00\x01\x00\x02\x00\x00\x00\x00\x00\xfe\xff\xfc\xff\xfe\xff\xfc\xff\xf9\xff\xf7\xff\xf7\xff\xf8\xff\xf7\xff\xf7\xff\xf8\xff\xf5\xff\xf6\xff\xf4\xff\xf5\xff\xf6\xff\xf7\xff\xf8\xff\xfb\xff\xfd\xff\xfc\xff\xf8\xff\xfa\xff\xfc\xff\xfd\xff\xff\xff\xfb\xff\xfd\xff\xf8\xff\xf4\xff\xf6\xff\xf7\xff\xf5\xff\xf6\xff\xf4\xff\xf4\xff\xf4\xff\xf2\xff\xf4\xff\xf7\xff\xf6\xff\xf4\xff\xfa\xff\xfb\xff\xfa\xff\xf4\xff\xf7\xff\xf8\xff\xfa\xff\xf9\xff\xf6\xff\xfa\xff\xf8\xff\xf9\xff\xf8\xff\xf8\xff\xf8\xff\xfa\xff\xf8\xff\xf5\xff\xf8\xff\xf9\xff\xfc\xff\xfc\xff\xfe\xff\xff\xff\xfc\xff\xfb\xff\xfb\xff\xfc\xff\xf9\xff\xf8\xff\xf9\xff\xf8\xff\xf3\xff\xf2\xff\xf3\xff\xf3\xff\xef\xff\xf0\xff\xf2\xff\xf3\xff\xf2\xff\xf2\xff\xf7\xff\xf7\xff\xfa\xff\xf8\xff\xf7\xff\xf7\xff\xf5\xff\xf7\xff\xf8\xff\xf8\xff\xf9\xff\xfa\xff\xf9\xff\xfc\xff\xfa\xff\xfa\xff\xfc\xff\xfb\xff\xfd\xff\x00\x00\xfe\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\xfb\xff\xff\xff\xfc\xff\xfb\xff\xfd\xff\xfb\xff\xfe\xff\xfd\xff\xfe\xff\x00\x00\xf8\xff\xfc\xff\xfe\xff\xfd\xff\xfb\xff\xfd\xff\xfc\xff\xf8\xff\xf9\xff\xfa\xff\xf7\xff\xf7\xff\xfa\xff\xf9\xff\xfa\xff\xf6\xff\xf6\xff\xf7\xff\xfb\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xfd\xff\xfe\xff\x00\x00\x00\x00\xfb\xff\xf9\xff\xfd\xff\xfc\xff\xfb\xff\xfa\xff\xfa\xff\xf7\xff\xf7\xff\xfc\xff\xfa\xff\xfc\xff\xfd\xff\xfd\xff\x00\x00\x00\x00\xfa\xff\xf8\xff\xfa\xff\xf7\xff\xf3\xff\xf6\xff\xf7\xff\xf6\xff\xf8\xff\xfa\xff\xf7\xff\xf7\xff\xf6\xff\xfb\xff\xfb\xff\xfc\xff\xfd\xff\xfd\xff\x03\x00\x00\x00\x04\x00\t\x00\x05\x00\x07\x00\t\x00\t\x00\x08\x00\t\x00\n\x00\n\x00\x05\x00\x02\x00\x02\x00\x02\x00\x00\x00\x01\x00\x01\x00\x02\x00\x07\x00\x05\x00\x06\x00\x04\x00\x01\x00\x00\x00\x02\x00\x02\x00\x00\x00\x05\x00\t\x00\x0b\x00\n\x00\x0b\x00\x0c\x00\x0b\x00\t\x00\n\x00\t\x00\n\x00\r\x00\x0e\x00\r\x00\x08\x00\x05\x00\x02\x00\x04\x00\x02\x00\x01\x00\x03\x00\x00\x00\xff\xff\x03\x00\x05\x00\x04\x00\x03\x00\x00\x00\x00\x00\x06\x00\t\x00\x04\x00\x01\x00\x01\x00\xfe\xff\x00\x00\x00\x00\x00\x00\xfc\xff\xfd\xff\xfe\xff\xfd\xff\xfe\xff\xff\xff\xfe\xff\x01\x00\x02\x00\x00\x00\x03\x00\x02\x00\x04\x00\x05\x00\x01\x00\x02\x00\x03\x00\x04\x00\x03\x00\x04\x00\x06\x00\x06\x00\x01\x00\xfe\xff\xfd\xff\xfb\xff\xfa\xff\xfb\xff\xfc\xff\xf8\xff\xf5\xff\xf6\xff\xf6\xff\xf5\xff\xf6\xff\xf9\xff\xf8\xff\xf5\xff\xf6\xff\xf4\xff\xf1\xff\xf4\xff\xf4\xff\xf2\xff\xee\xff\xee\xff\xef\xff\xee\xff\xf0\xff\xf2\xff\xf2\xff\xf5\xff\xef\xff\xf0\xff\xee\xff\xf1\xff\xf3\xff\xf1\xff\xf0\xff\xf3\xff\xf3\xff\xf3\xff\xf0\xff\xf4\xff\xf8\xff\xee\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +# --- +# name: test_pipeline_error + b'\'\xff\x9d\xfe\xc7\xfe\x92\xfe\x88\xfe\xe2\xfe\x02\x00\x9a\x00!\x00H\xff$\xff|\xff\x94\xff1\xff\xd6\xfe\xdf\xfe8\xffj\xff*\xff\xba\xfe\x99\xfe\xf1\xfe\\\xff\x87\xff\x84\xffs\xff?\xff\xf5\xfe\xce\xfe\xd7\xfe\x0e\xff\x8e\xff\xed\xff\xea\xff\xd2\xff\xcf\xff\xa4\xffP\xff\x1b\xff=\xff\x8e\xff\xbe\xff\xd1\xff\xe9\xff\x01\x00\xdf\xffe\xff\xc9\xfe\x88\xfe\xd6\xfe[\xff\x9e\xff\x9d\xff\x9c\xff\xbe\xff\xde\xff\xc5\xff\x95\xff\x98\xff\xc7\xff\xf0\xff\n\x00\x15\x00\xf3\xff\xba\xff\x9a\xff\xae\xff\xe5\xff\r\x00\x15\x00!\x00A\x00Z\x00[\x00A\x00\r\x00\xee\xff\r\x00V\x00\x8a\x00\x89\x00p\x00l\x00\x98\x00\xe2\x00\x13\x01\xff\x00\xc6\x00\xa9\x00\xae\x00\x9e\x00x\x00_\x00x\x00\xc9\x00\x10\x01%\x01)\x01\x1c\x01\xea\x00\xa1\x00j\x00\x85\x00\xf7\x00i\x01q\x01\x1e\x01\xe0\x00\xea\x00\n\x01\n\x01\xe0\x00\xb3\x00\xb3\x00\xeb\x00.\x01K\x01=\x01\xff\x00\xae\x00\x81\x00\x97\x00\xd6\x00\x10\x016\x01K\x010\x01\xe6\x00\x9f\x00^\x00\'\x00*\x00|\x00\xdf\x00\xfa\x00\xcc\x00\x94\x00X\x00\xfa\xff\xc0\xff\xfb\xff\x89\x00\xed\x00\xe3\x00\xa5\x00\x81\x00\x88\x00\x95\x00\x89\x00q\x00c\x00S\x00B\x005\x00\'\x000\x00H\x00H\x00<\x007\x00#\x00\xe8\xff\xa3\xff\xba\xff?\x00\x9d\x00l\x00\xf8\xff\xb9\xff\xbf\xff\xd3\xff\xdd\xff\xe6\xff\xf3\xff\x02\x00"\x008\x00+\x00\n\x00\xf8\xff\x04\x00\r\x00\xf4\xff\xc1\xff\xa9\xff\xd8\xffI\x00\xba\x00\xd3\x00u\x00\xf1\xff\x97\xffh\xffY\xff}\xff\xcf\xff8\x00}\x00r\x008\x00\t\x00\xfb\xff\x02\x00\x12\x003\x00l\x00\x8b\x00^\x00!\x004\x00b\x00.\x00\x1a\x00\xa2\x00\xfa\x00\x93\x00\xed\xff\xa7\xff\xd8\xff(\x00<\x00\x04\x00\xd4\xff\xf7\xffR\x00\x88\x00W\x00\xef\xff\x94\xffm\xffW\xff\xde\xfe_\xff\xb3\x01\x86\x02v\x00\x87\xfe\xae\xfe\xb6\xff\xe5\xffg\xff\x1d\xffF\xff\xa4\xff\xe3\xff\xdf\xff\xdb\xff\xed\xff\xf0\xff\xc1\xffl\xffm\xff\xce\xff\xf8\xff\xc1\xff\x8d\xff\xa7\xff\x05\x00\x83\x00\xde\x00\xed\x00\xad\x00\x0c\x00@\xff\xcb\xfe\x0c\xff\xec\xff\xbb\x00\x03\x01\x04\x01\xd6\x00c\x00\xe0\xffz\xff>\xffh\xff\xf7\xffw\x00\xa0\x00{\x00\x0f\x00\\\xff\xb3\xfe\xb3\xfe\xb6\xff\xe7\x00\x0e\x01>\x00\x92\xff\xbc\xffY\x00\xa1\x00N\x00\xcb\xff|\xffn\xff\x81\xff\xb3\xff,\x00\xb9\x00\xc6\x00R\x00\x01\x00\x1e\x00_\x00`\x00 \x00\xd8\xff\xc5\xff\xf4\xff6\x00`\x00v\x00\x8d\x00\xb4\x00\xe4\x00\xf4\x00\xad\x00,\x00\xbc\xff\x96\xff\xde\xff~\x00.\x01m\x01\xea\x00/\x00\xd8\xff\xb5\xff\xa3\xff\xcb\xff\xfc\xff\xee\xff\xa6\xff\x8d\xff\x00\x00\xd2\x00c\x01$\x01\x11\x00\x0c\xff\xe2\xfe;\xfft\xff\x9f\xff$\x00\xd5\x00\x1e\x01\xce\x00E\x00\xda\xffs\xff\xea\xfep\xfe\x80\xfeH\xffW\x00\xf6\x00\x03\x01\xd1\x00S\x00}\xff\xcb\xfe\x8b\xfe\x96\xfe\xcb\xfeB\xff\xee\xff\x86\x00\xd5\x00\xdf\x00y\x00\x94\xff\x9a\xfe\x14\xfe>\xfe\xf1\xfe\xaa\xff\xe9\xff\xe7\xff\x11\x00;\x00\x13\x00\xaa\xffF\xff\x1b\xff%\xffU\xff\xc7\xff\x82\x00\x1a\x01)\x01\xd3\x00\x80\x00C\x00\xde\xffY\xff)\xff\x9c\xffl\x00\x19\x01\\\x017\x01\xc7\x003\x00\xb3\xffq\xffp\xff\xb0\xff,\x00\x9f\x00\xbb\x00\x9b\x00\x91\x00\x8e\x00P\x00\xdb\xffr\xffF\xff]\xff\x9d\xff\xf2\xff/\x00#\x00\xe1\xff\xa8\xff\x8f\xff\x87\xff\x85\xff|\xfff\xffH\xffJ\xff\x85\xff\xd7\xff\x0c\x00\xfe\xff\x98\xff\xe5\xfe6\xfe\x14\xfe\xc7\xfe\xe7\xff\x9a\x00g\x00\xb6\xff=\xff&\xff\x18\xff\xb6\xfe\x11\xfe\xaa\xfd#\xfen\xff\xb7\x002\x01\xc5\x00\xe8\xff(\xff\xd7\xfe\xf4\xfe=\xffl\xfft\xff\x8c\xff\xda\xff\x14\x00\xdc\xffl\xffY\xff\xd0\xffo\x00\xb7\x00m\x00\xb9\xff\x02\xff\x97\xfe\x9b\xfe\x10\xff\xd2\xff\x89\x00\xcb\x00\x8b\x005\x00:\x00\xa8\x00\r\x01\xeb\x00p\x00\x10\x00\xf3\xff\'\x00\x91\x00\xf8\x00V\x01\xa9\x01\xbf\x01j\x01\xd0\x00)\x00\x8c\xff/\xffw\xffg\x00z\x01+\x027\x02\xbf\x01\x19\x01\x85\x00\x05\x00\x80\xff-\xffp\xffD\x00(\x01\xb9\x01\x15\x02[\x026\x02+\x01V\xff\xa0\xfd\n\xfd\xdc\xfd\x92\xff~\x01\xf4\x02G\x03A\x02\x8c\x00U\xff\xed\xfeP\xfe\xeb\xfc\x13\xfc\xa4\xfd/\x01\xec\x03\xc1\x03v\x01\x84\xff\x15\xffZ\xffZ\xffI\xff\xc5\xff\xad\x00?\x01A\x01b\x01\n\x02d\x02|\x01\xe1\xff\x1c\xff\xe5\xff\x89\x01\xb5\x02\xdb\x02V\x02~\x01\\\x00!\xff\x0c\xfeD\xfd\x03\xfd\x84\xfd\xbb\xfeI\x00p\x01\x8d\x01\xac\x00b\xffV\xfe\xbd\xfdZ\xfd\x0c\xfd\x1c\xfd\xe6\xfdO\xff\xda\x00\x07\x02\x93\x02k\x02\xc2\x01\xe2\x00\n\x00w\xffM\xff\xaa\xff\x93\x00\x94\x01*\x02O\x02\'\x02h\x01\xdb\xff\xfa\xfd\xaf\xfc\x8c\xfct\xfd\xd5\xfe@\x00|\x013\x02\xee\x01t\x00L\xfe\x7f\xfc\xd7\xfbR\xfc~\xfd\x03\xff\xae\x00\x0c\x02\x8a\x02\x14\x023\x01H\x00$\xff\xd2\xfd%\xfd\xb6\xfd\xa4\xfe\x95\xfe\xdf\xfdu\xfe\x18\x01i\x03o\x02\x92\xfe$\xfb\xb5\xfa\x03\xfd"\x00\xc4\x02\x8b\x04 \x05\x15\x04\xd2\x01\x86\xff\x13\xfe}\xfdl\xfd\x1b\xfe\t\x00\xa5\x02F\x04\xd2\x03\xde\x01\xdc\xff\xc0\xfey\xfe}\xfe\x9b\xfe\xa9\xfe`\xfe\xc3\xfd2\xfd\xd6\xfc\x97\xfc\xb6\xfc~\xfd\xa9\xfe\xb1\xffS\x00\xad\x00\xd8\x00\x9b\x00\x04\x00v\xff\xe1\xfe\xe9\xfd\xca\xfci\xfc\x8e\xfd\xd4\xff\xba\x018\x02\xb8\x01C\x01_\x01\xd6\x01\xe9\x01\x19\x01\xc3\xff\x8b\xfe\xc6\xfd\xba\xfd\xab\xfe=\x00\x82\x01\xd2\x01a\x01\x02\x01\x0b\x01\xfc\x00]\x00f\xff\xf8\xfez\xff1\x00d\x00F\x00s\x00\x19\x01\xd8\x01%\x02\xe0\x01\x8f\x01\xac\x01\x02\x02N\x02\xd2\x02\xaa\x03T\x04f\x04\xc4\x03\r\x02\xf2\xfe\xc4\xfb*\xfb\xf8\xfd+\x01\x1e\x01\xcb\xfdr\xfa\xdd\xf9\x17\xfcI\xff\xcf\x01\x07\x03\xbd\x02\x06\x01\xc0\xfe \xfd-\xfc\x17\xfb]\xfa\\\xfc2\x02\xce\x08\x1f\x0bj\x073\x01h\xfd\xa7\xfd\x1d\x00\x7f\x02\x95\x03\xdb\x02M\x00\xaa\xfcI\xf9W\xf7\x06\xf7\xd8\xf7~\xf9\xe8\xfb\xcc\xfe`\x01\x87\x02\x88\x01\x05\xff\xb5\xfc\xcc\xfb\xf5\xfbk\xfc<\xfd\xa3\xfe7\x00\x8d\x01\xee\x02t\x04~\x05`\x05w\x04\xe8\x03\x02\x04\xb7\x03\x00\x02G\xffn\xfd\x13\xfe\xb0\x00\xf1\x02J\x03c\x02\xbe\x01\xf2\x01\xbf\x02\xdc\x03\x0f\x05F\x06;\x07%\x07O\x05 \x02\xfc\xfeJ\xfd\xb7\xfd\xa3\xff\x87\x01\x1b\x02m\x01p\x00\xf6\xff_\x000\x01*\x01\xb6\xff\xae\xfd$\xfc\n\xfb\xbe\xf9\x08\xf9T\xfbO\x01\x97\x07\x13\t\x06\x04I\xfc\xce\xf7,\xf9U\xfe\x03\x04O\x08\x02\n6\x08\x83\x03\xe6\xfd!\xf9\r\xf66\xf5=\xf7\x8a\xfb\x07\x00"\x03\xdd\x04\xbe\x05\x82\x05\x98\x03h\x00*\xfd\x94\xfa\xc1\xf8\xf3\xf7y\xf88\xfa\xbc\xfc\x80\xff\xc2\x01\xd2\x02\x98\x02\xb2\x01\x1d\x01\xe0\x00\x93\xffX\xfc\xa3\xf8\x1c\xf7@\xf9"\xfeM\x03\xc1\x06!\x08\x01\x08\xef\x06\xce\x05\xb5\x05\xfa\x06\xfb\x08m\n\xf7\t\x1b\x07\x87\x02\x1d\xfe\xe3\xfbZ\xfc7\xfe\xfb\xff\x1d\x01\xa3\x01}\x01\x88\x00\xfc\xfe\x89\xfd\xe3\xfc\'\xfd\xbb\xfd\x16\xfe\x1d\xfe\x9a\xfd\x9d\xfc\xd2\xfc0\x00h\x05\x10\x08\xa8\x05\xca\x00\xfd\xfd\xf1\xfe\xd4\x01F\x04\x9f\x05\xcf\x05\x9d\x03\xe1\xfd=\xf6C\xf1w\xf2\x1a\xf8\x93\xfcO\xfcE\xf9A\xf8I\xfb\xd1\xff)\x02\x8d\x01\x01\x00\xf5\xfe\xc2\xfd\x83\xfb\xfc\xf8\xec\xf7\\\xf9\x03\xfd\xa2\x01-\x05\x12\x06\xea\x04\xdd\x03,\x04\xd8\x04\xec\x03\x91\x00\xea\xfbJ\xf8s\xf7/\xf9\xf3\xfb\xa9\xfe\xea\x00T\x02\xb1\x02\xa9\x029\x03b\x04h\x05\x9d\x054\x04\xce\x00\xb1\xfc\x1a\xfa\\\xfa\x08\xfd\x98\x00m\x03\xdd\x049\x05\xa2\x04!\x03\\\x01N\x00w\x00z\x01r\x02\x95\x02I\x01o\xfeZ\xfb\xb4\xfa4\xfe\xae\x03s\x06\xaa\x04j\x01J\x004\x01X\x02\xaf\x03\x8c\x05<\x06\xc5\x03\xce\xfe\xea\xf9\xfb\xf6B\xf6r\xf7\r\xfaC\xfd]\x00\xb0\x02f\x03G\x02C\x00\x9f\xfe\xf5\xfd\xa8\xfdN\xfc\x1d\xf9\xf9\xf5\xb2\xf6L\xfc\xb1\x02\xa6\x04f\x01\x13\xfd\x1d\xfc]\xff\x06\x04~\x06}\x05d\x02\x1c\xff\x87\xfc\xa5\xfa\xc0\xf9\xe1\xfa\x9d\xfe\xce\x03\x05\x08\xa6\t6\tH\x08\xc1\x07\xa6\x07\xbf\x07v\x07\xad\x05\xdd\x01F\xfd\xfe\xf9E\xf9\xef\xfa\xab\xfd\x8f\xff\xa9\xff\xdf\xfe\xa5\xfe\n\xff!\xff~\xfej\xfdZ\xfc\x84\xfb\xa8\xfav\xf9\x02\xf9\xb5\xfb\x06\x02M\x08e\nM\x08P\x05\xea\x03H\x04\xe1\x05!\x08g\t\xc9\x07<\x03\xb1\xfdT\xf9\x1e\xf7\xf5\xf6y\xf8\xfd\xfae\xfd\x17\xffD\x00\xc5\x00\xfe\xffl\xfe\xb6\xfdG\xfex\xfe \xfd-\xfb6\xfa\xe3\xfa\xda\xfc\x8c\xff\\\x02\x97\x04\xc6\x05\xd8\x05G\x05x\x04\x0c\x03\xb8\x00\x00\xfe\x85\xfb~\xf9C\xf8`\xf9X\xfe\x1b\x05Z\x08\xa2\x05\xa4\x00|\xfe?\x00\xd6\x038\x07Y\t\xa4\t\xdd\x07_\x04\x1e\x00R\xfc%\xfan\xfa\x17\xfd\xc5\x00\x80\x03.\x04U\x03&\x029\x01\xaa\x00\x99\x00\x0f\x01\xf7\x00u\xfe\x90\xf9\xbd\xf5\x1a\xf7=\xfd\x9e\x02\xf9\x02\x1c\x00\xee\xfe \x01"\x04\x9b\x05G\x052\x03c\xff\x02\xfb\x97\xf7\xb4\xf5^\xf5\xe5\xf6c\xfa\xd7\xfe\xbf\x02\xfb\x04$\x05\xa5\x03\xbd\x01\xc3\x00\xdd\x00\xac\x00\xeb\xfe\x1f\xfc\x0b\xfa\xc1\xf9\xd8\xfa\xcb\xfc\xa0\xff\xac\x02b\x04\xf7\x03f\x02=\x01\xd9\x00\x87\x00\x8e\xff\xcf\xfd.\xfc\xd9\xfb\xfc\xfc\xf4\xfeU\x01\x7f\x03^\x04M\x044\x058\x07\xe2\x07 \x06\xef\x03d\x03=\x04=\x04\x84\x01\xcb\xfca\xf9\x9e\xfa\xba\xffI\x036\x01\xa9\xfbI\xf8\xd3\xf9$\xfe\xc4\x01V\x03\xbb\x03\xc0\x03\x7f\x02\xaf\xfe{\xf9\x0e\xf7\x12\xfa\xdc\xff\xa4\x03%\x04\xa3\x03\xd8\x03\x8c\x04\x85\x05\x1c\x07\x91\x08\xe7\x07\xe7\x03w\xfd\xbf\xf6\xe7\xf1\x89\xf0A\xf3\xb0\xf8\xac\xfd\t\x00n\x00\x81\x00Q\x00\xf6\xfe\xb2\xfc\xa2\xfa7\xf9P\xf8\xf2\xf7\x17\xf8\xbb\xf8N\xfa\x88\xfd[\x02\x06\x07\x1d\t\xdf\x071\x05\x9f\x03\xb0\x03\xc3\x03O\x02}\xff\xe3\xfc\xe4\xfb\xc4\xfc\xff\xfe\xb6\x01\xc6\x03\xb0\x04D\x05:\x06\xb1\x06\xc5\x05^\x04\xc3\x03\xaf\x03\xc9\x02\xc4\x00\x03\xff\x99\xfe\x07\xffm\xff\x98\xffw\xffI\xff\xef\xff}\x01<\x02\xaf\x00 \xfe\x8f\xfd\x02\x00i\x02^\x00\xe8\xf9L\xf5\xde\xf8G\x02\xde\x07\x19\x04\xfe\xfb\x82\xf8\x84\xfc\xd4\x03\x8f\t\xfc\x0b/\x0b\x9f\x077\x02%\xfc~\xf6\xbc\xf2{\xf2\x13\xf6\xf0\xfb\x8f\x01\x07\x05\x88\x05\xa3\x03\xe2\x00\xee\xfe\x9f\xfe>\xff1\xff\x92\xfd\xb2\xfa\x9a\xf7\xd9\xf5\x19\xf7d\xfb%\x00>\x02\x83\x01\x01\x01\xe5\x02k\x05\x05\x05\x8d\x00\x99\xfa\x11\xf7\xb6\xf7\xe4\xfa\x1d\xfe\xd6\x00f\x03\x8f\x05g\x07Z\tz\n*\tw\x06\x98\x05#\x07\n\x08\xfa\x05\x0e\x02\xe0\xfe\xa3\xfd\xcb\xfd`\xfe\x18\xff\xf6\xff\xd4\x00E\x01\xe0\x00\xd5\xff\xd8\xfe\x96\xfeB\xffC\x00V\x00\xd9\xfe\n\xfd<\xfd\xcf\xffK\x02\x9b\x02u\x01f\x01\xab\x031\x06(\x06\xd2\x03\xfa\x01\xf2\x01\xe6\x01S\xff\xdf\xfa\x06\xf8\n\xf9R\xfc)\xfe\x01\xfd\x16\xfb\x99\xfb\xd8\xfe3\x02k\x03\xc3\x02\x9f\x01c\x00\xa4\xfe\x85\xfc\xb1\xfa\xbf\xf9$\xfaJ\xfc\xf7\xff|\x03\xeb\x04\x1d\x04\x9b\x02\xcb\x01\x9c\x01 \x01"\x008\xff\x85\xfev\xfd\x08\xfcc\xfb\x9e\xfc,\xffP\x01\x04\x02\xeb\x01.\x026\x03w\x04\xd7\x04W\x03\'\x00\x13\xfd\xe4\xfb\x80\xfc8\xfd>\xfd\xad\xfd\x8a\xff\xfb\x010\x03S\x02:\x00y\xfeB\xfe\x9a\xff&\x01T\x01\xb8\xffG\xfd\x85\xfb\\\xfb\x8a\xfc\x03\xfeZ\xff\x01\x01\xe7\x02\xf1\x03s\x03?\x02g\x01\xec\x00;\x00\xfa\xfeq\xfd\x93\xfc\xf6\xfc\x1d\xfe\xce\xfe}\xfe\x19\xfe\xe7\xfe\xc1\x00\xe0\x01\xaa\x00\xf6\xfd\xa2\xfc\x0b\xfe0\x00V\x00t\xfe\xb0\xfc]\xfc\xfe\xfc\xfc\xfdb\xff\x0b\x01a\x02\x1c\x03\x96\x03 \x04\x86\x04A\x04\x0e\x03G\x01\x8d\xffG\xfe\xdc\xfd\x8e\xfe\xd9\xff\xfe\x00\xdb\x01\xbc\x02y\x03|\x03\xb7\x02\xab\x01\xb6\x000\x00w\x00\x10\x01\xb0\x00\xb7\xfeI\xfcq\xfb\xf6\xfc`\xff\xb6\x00\xce\x00\xf1\x00\xb4\x01B\x02\xd6\x01\xaf\x00}\xff\xa3\xfe\x1b\xfe\xc2\xfds\xfd#\xfd`\xfd\x0e\xff\xe2\x01\xf0\x03\x84\x03@\x01q\xff\x85\xff\x02\x01g\x02n\x02\xd9\x00\x83\xfe\x9d\xfc\xe1\xfbC\xfc:\xfdT\xfe\xca\xff\xb1\x01\x0f\x03\xac\x02\xb5\x00\xc8\xfeW\xfe=\xff\x1b\x00h\x00\xb7\x00\xa8\x00\x86\xff\xed\xfdR\xfd\x98\xfe\x1f\x018\x03\xa4\x03\xb7\x02\xbe\x01\xbd\x01\x97\x02\r\x03\xdc\x01>\xff\x11\xfd\x0f\xfd\x95\xfes\x00\x08\x03\xbd\x04\t\x04\xd1\x02\xb0\x02(\x03-\x03\x92\x02\xa1\x01\x8b\x00}\xff\xa4\xfe0\xfet\xfeC\xff\x00\x00Z\x00j\x00\x9e\x00\xe4\x00\x96\x00\xa6\xff\xd6\xfe\xd7\xfe\x9b\xffu\x00\x97\x00\xbd\xff\x88\xfe\xbf\xfd\xa3\xfd\x0f\xfe\xc5\xfe\xd6\xff2\x01d\x026\x03a\x03e\x02\x96\x00\xd8\xfe\x9c\xfd\x14\xfd?\xfd\xd1\xfd\x9f\xfe\x8a\xffA\x00:\x006\xff\xeb\xfd\xb5\xfd\x02\xff\xa7\x00j\x01+\x01\x82\x00\xc4\xff\x9f\xfe\xec\xfc\x86\xfb\x8b\xfbO\xfd\x01\x000\x02/\x03Z\x03\xe1\x02\xa7\x014\x00o\xff\xa2\xff\x1e\x00\x00\x00`\xff$\xff\xa0\xff1\x00~\x00\r\x01\x17\x02\xfd\x02\t\x03d\x02\x99\x01\xba\x00\xa8\xff\x9d\xfe\x15\xfeb\xfeZ\xffP\x00\x9d\x00\x82\x00\xc9\x00>\x01?\x01\xfe\x00\xf2\x00\x18\x011\x01\x16\x01\xc5\x00p\x00X\x00g\x00\x0b\x00\x10\xff\x0b\xfe\xbf\xfd\x8d\xfe:\x00\xf3\x01\x94\x02\xb1\x01\x1f\x00.\xffU\xff\x9c\xff\xc8\xfe\x11\xfd\xf4\xfb`\xfc\xde\xfdA\xff\xe9\xff&\x00N\x00P\x00J\x00o\x00\xb4\x00\xe4\x00\xc4\x00D\x00\x8a\xff\xd1\xfeN\xfe\x15\xfe\x15\xfet\xfeW\xffm\x00B\x01\xb9\x01\xbd\x01C\x01\xc1\x00g\x00\xaf\xffN\xfe\xbc\xfc\xc7\xfb\xe4\xfb\x05\xfd\xe2\xfe#\x01\x07\x03\xda\x03\xa6\x03\x00\x03=\x02\x14\x01`\xff\xea\xfd\xa8\xfd\x8f\xfe\xb6\xffW\x00|\x00\x94\x00\xc2\x00\x00\x01 \x01\xef\x00\xaa\x00\xc4\x00\x1b\x01\xfc\x00.\x00/\xffu\xfeD\xfe\xa0\xfe\x12\xff?\xffR\xff\xbd\xff\x9a\x00>\x01\x05\x01[\x00H\x00\x0f\x01\xd9\x01\xab\x01Q\x00\x96\xfe\x94\xfd\xaa\xfd,\xfe\x86\xfe\xff\xfe\xfb\xff\xff\x00\xff\x00\xc6\xffl\xfe%\xfe=\xff\xe9\x00\xf0\x01\x90\x01\x00\x00\xfd\xfd=\xfc\x13\xfb\xa4\xfa\x1d\xfb\xb4\xfcY\xff0\x02\r\x04\x89\x04\xf7\x03\x9e\x02\xb9\x00\xa7\xfe\x03\xfd\x81\xfc9\xfdo\xfe_\xff\xf0\xff\x8d\x00\xa1\x01\x03\x03\xd5\x03~\x03G\x02\xda\x00\x9b\xff\x90\xfe\x8e\xfdp\xfc}\xfbd\xfb\xa1\xfc\xe8\xfeI\x01\xda\x02K\x03\xf6\x02\\\x02\xa8\x01\xfd\x00\x80\x00$\x00\xf0\xff\xf7\xff\x04\x00 \x00u\x00\xd6\x00\xf2\x00\xef\x00\x1e\x01N\x012\x01\xe3\x00\xab\x00\x99\x00\x95\x00\x82\x00L\x00\xfc\xff\xb4\xffu\xff(\xff\x1e\xff\xcd\xff\xec\x00\x9a\x01^\x01\x0e\x01\xb7\x01\x04\x03\xad\x03\xec\x02\xfb\x00\xb6\xfe\x0f\xfd\x82\xfc\x04\xfd\x17\xfe>\xff\x11\x00\x8a\x00\xfb\x00q\x01\x18\x01<\xff\xf6\xfc\xba\xfcM\xffU\x02\x08\x03%\x01\xa6\xfe/\xfd\xc7\xfc\x16\xfdN\xfe~\x00\xc6\x02\xfb\x03\xfe\x03\xcd\x03\xeb\x03F\x03\xaa\x00\x10\xfd\xf1\xfa\xa5\xfb~\xfe\x95\x01\x81\x03,\x04\xfa\x03\xfd\x02i\x01\xb9\xffE\xfel\xfd\x92\xfd\x92\xfe\xa7\xff\x00\x00H\xff\xf0\xfd\xcd\xfc\x7f\xfc(\xfdp\xfed\xff\x87\xff\x97\xff#\x00\xc8\x00&\x01\\\x01\x7f\x01_\x01\xf2\x00\x82\x00+\x00\xd7\xff\xdc\xff\xc9\x00`\x02\x8f\x03\xbf\x03:\x03O\x02\x06\x01\x99\xffj\xfe\xb6\xfdi\xfd<\xfd#\xfd\x92\xfd\xd1\xfeA\x00\xc3\x00\xce\xff!\xfe\x17\xfdG\xfdJ\xfe\x96\xff\x02\x01T\x02\xc2\x02\xe0\x01\\\x00!\xff-\xfe+\xfd\xac\xfc\xa1\xfd\xa9\xff\xfe\x00\xc1\x00X\x00\x81\x01i\x034\x03\xd1\xff\x99\xfb\x7f\xf9N\xfa\xbe\xfc\x8c\xff]\x02\xa9\x04h\x05B\x047\x02\x91\x00\xd3\xff|\xff\x1b\xff\x1f\xff\x00\x007\x01\xa9\x01\xf9\x00\xdd\xfff\xff\xea\xff\xb5\x00\x02\x01\x8e\x00\x91\xff\x92\xfe\x0f\xfe\xf3\xfd\xb5\xfdc\xfd\xa2\xfdi\xfe\xef\xfe\xcd\xfe\x93\xfe8\xff\xcd\x00b\x02\x04\x035\x02#\x00\xb0\xfd\x19\xfcn\xfc\x9a\xfe\t\x01\x0f\x02\xa0\x01\xde\x00\x8f\x00\xbd\x00\x06\x01\xe9\x00Y\x00\xb7\xff7\xff%\xff\xea\xffT\x01h\x02Q\x020\x01\x03\x00\x7f\xffc\xff*\xff\xf9\xfeo\xff\x95\x00d\x01\x05\x01\x17\x00\xe5\xff\xee\x00\\\x02\xe6\x02\x0e\x02\x95\x00\x9d\xffz\xff\xf5\xff\x12\x01\xd1\x02\x9d\x04\xb0\x05\xae\x05[\x04\x8f\x01\r\xfe0\xfc\xa4\xfd|\x00\xff\x00\x1a\xfeC\xfac\xf8j\xf9l\xfc\xdd\xffF\x02\xa0\x02\xe3\x00}\xfe\x85\xfdH\xfe\xa8\xfe+\xfd\xc7\xfb\xf7\xfd\xa0\x03\xfc\x07\x02\x07>\x02\xea\xfe\xd8\xff4\x03\xe4\x05\x91\x06X\x05\x89\x02q\xfe\xe3\xf9>\xf6\x88\xf4\xe3\xf4\xde\xf6\xc2\xf9\xd3\xfcw\xff\x1f\x01*\x01l\xff\xe1\xfc(\xfb\x15\xfb*\xfc\x86\xfd\xc1\xfe\xd5\xff\xf0\x00w\x02<\x043\x05y\x04\xbb\x02\xf9\x01G\x03`\x05\xe3\x05\x9d\x03\xd8\xffn\xfd\xe8\xfd\t\x00\x82\x01\x82\x01\xec\x00\x96\x00\xbf\x00\xb8\x01\x90\x03\x9b\x05\xe4\x06\xb8\x06\xf0\x04E\x02\xee\xff\xc4\xfe\xf2\xfe\xf5\xff\xe2\x00\x13\x01\x98\x00\xcd\xff\xf3\xfew\xfe\xc5\xfeZ\xffd\xff\xd8\xfe\x18\xfe\x10\xfdw\xfb\r\xfa\x0e\xfb\xda\xff\x13\x06\x96\x08J\x04\x1d\xfc\xaf\xf6\xea\xf7\xdd\xfd\x16\x04M\x08B\n\x8a\t\xde\x05\'\x005\xfa\xd1\xf5[\xf4u\xf6\x08\xfbX\xff\x7f\x01(\x02\n\x03g\x04\xde\x04u\x03\xc6\x00\xfa\xfd\x9f\xfb\xfc\xf9B\xf9c\xf9c\xfar\xfc,\xffm\x01S\x02\r\x02\xab\x01\xa7\x01\xe3\x00)\xfeH\xfa\xcc\xf7\xc3\xf8\xef\xfc\xf6\x01s\x05\xcb\x06\xe2\x06\x85\x06(\x06g\x06\x9b\x07_\t\xd8\n.\x0b\xbe\t,\x06D\x01\\\xfdn\xfc\xfd\xfd\xc9\xffW\x00\xd1\xff\x1e\xff\xc2\xfe\x9d\xfe^\xfe\xdd\xfd?\xfd\xdb\xfc\xff\xfc\xc2\xfdw\xfe\x1b\xfeo\xfd#\xff\x05\x04V\x08\xd9\x07B\x03D\xff\t\xff\x8c\x012\x04\xe1\x05\x9f\x06\x95\x05d\x01\xaa\xfa\xe4\xf4\xc3\xf3\x0c\xf7X\xfa\xee\xf9\xcd\xf6C\xf5\x1b\xf8\xb1\xfd\xe2\x01\x8f\x02U\x01\\\x00x\xffs\xfd\x89\xfaq\xf8\xb2\xf8\x7f\xfb\xd0\xff\xca\x03\xa2\x05*\x05\xfe\x03\xce\x03\xbc\x047\x05\x85\x03\x80\xff(\xfb\xfa\xf8y\xf9\x01\xfb<\xfcj\xfd\x0b\xff\xca\x00:\x02\xae\x03[\x05\xd4\x06|\x07\x95\x06\x8a\x03,\xff\x88\xfb?\xfa\xa7\xfb\xea\xfeg\x02\x85\x04\xd8\x04\xdb\x03C\x02\xd3\x00&\x00b\x00\x1b\x01\x9d\x01i\x01O\x00Q\xfe!\xfcs\xfb\xe4\xfd\xa2\x02\xfc\x05"\x05\xc0\x01\xcb\xffo\x00\xc8\x01\xfd\x02\xc8\x04l\x06\x9a\x05\xb0\x01\xb1\xfc\xe7\xf8\xfd\xf6\xa2\xf6\xb8\xf7\xf5\xf9\xbc\xfcE\xff\xc2\x00\xe3\x00.\x00N\xff\xa2\xfe-\xfeO\xfd\xfe\xfa\xb3\xf7\x8c\xf6\xe8\xf9\xa4\xff\xbc\x02\xad\x00#\xfc\xdb\xf9\x1b\xfcM\x01\xdb\x05\x12\x07\xce\x04\xd2\x00\x14\xfd{\xfa\x16\xf9T\xf9\xfd\xfb\xb8\x00\x8c\x05\x84\x08a\t6\t\xdc\x08\xaa\x08\xf1\x08z\t\xd3\x08|\x05\x19\x00C\xfb.\xf9\x15\xfa\x85\xfck\xfe\x9f\xfe\xe2\xfd\xc3\xfd\x9c\xfeJ\xff\xec\xfe\xc6\xfd\x86\xfc\xa0\xfb\x04\xfb1\xfav\xf9\xf1\xfaF\x00(\x07\xfb\n\x1d\n\x03\x07\x9f\x04\x06\x04%\x05\xb0\x07G\ns\n\xd5\x06\xad\x00\xe9\xfa\x90\xf7\x91\xf6\x11\xf7\xa2\xf8\xf4\xfat\xfda\xff\xfd\xff\x13\xff\x94\xfd)\xfdM\xfeB\xffI\xfe\xfa\xfb/\xfa\x0c\xfa\x96\xfbV\xfey\x01\xf1\x03!\x05;\x05\xc7\x04-\x04L\x03\xd2\x01\xd5\xff\x86\xfd\xff\xfa\xd3\xf8\xba\xf8\xc2\xfc\xda\x03\xb8\x08/\x07\x85\x01\xc6\xfd\xd9\xfe\xd9\x02\xda\x06y\t\x98\n\x10\nh\x07\xcc\x02\x91\xfd\xe8\xf9r\xf9\xed\xfb\x84\xffG\x02H\x03\xe4\x02\xf8\x01\'\x01\xc4\x00\xf3\x00\x8f\x01\xbc\x01\xf6\xff\xb2\xfb/\xf7z\xf6\t\xfb\xf7\x00\x03\x03\xe0\x00\x07\xff\x99\x00\x13\x04\x9d\x06O\x07\xe9\x05\xfd\x01\xa7\xfc4\xf8\xe5\xf5\x81\xf5\xb0\xf6\x87\xf9x\xfdH\x01\xdf\x03\xb4\x04\xce\x03\xeb\x01Z\x00\x1b\x00\xb3\x00\\\x00\x19\xfe\x1a\xfbq\xf9\xc6\xf9v\xfb\r\xfe"\x01\x8a\x03\x1d\x04\x17\x03\xd0\x01\r\x01m\x00M\xffy\xfd\xb7\xfb_\xfb\xe2\xfcO\xff\x96\x01>\x03\xe9\x03\xfb\x03\xf8\x04=\x07\x7f\x08\x11\x07\xb5\x04\t\x04+\x05\xba\x05s\x03\x9b\xfe(\xfa\xbe\xf9\x14\xfe\xd3\x02\x93\x02K\xfdc\xf8j\xf8\x8f\xfc\xd1\x00\xae\x02\xe6\x02J\x03\x90\x03q\x01b\xfc\x02\xf8\x9f\xf8\xa8\xfdN\x02\xaa\x03B\x03{\x03\xa3\x04\xf6\x05m\x07\xe1\x08\xe4\x08\xd4\x05\xbb\xffy\xf8\x83\xf2\xcd\xefU\xf1b\xf6\xff\xfbJ\xff0\x00|\x00\xc8\x00\xf2\xff\xa8\xfd7\xfb\x8f\xf9\x8c\xf8$\xf8p\xf8,\xf9J\xfaz\xfcQ\x00\xd3\x04\xc9\x07\xd7\x07\xc8\x05\xd7\x03q\x03\xd5\x03+\x03\x9b\x00K\xfdJ\xfb\xa4\xfb\xfe\xfd\x1a\x01m\x034\x04w\x04\x9c\x05\xfe\x06\xc8\x06\xf6\x04a\x03\x0e\x03\x02\x03\xc8\x01\xc5\xff\xab\xfe\xe5\xfe.\xff\xc9\xfe\x00\xfey\xfd\x00\xfe\xe6\xff\xc8\x01}\x01\xf9\xfe\xff\xfc\x0c\xfe\xc4\x00\xc7\x00\x05\xfc\x93\xf6a\xf7u\xff\x00\x07\t\x06-\xfeX\xf8\x14\xfa\xee\x00\xa3\x07\x96\x0b?\x0c\x90\tR\x04\x12\xfe)\xf8\xcb\xf3K\xf2d\xf4U\xf9*\xff\xc0\x03\x95\x05w\x04\xa0\x01\xeb\xfe\xe1\xfd\x8a\xfe>\xffR\xfe\xac\xfbt\xf82\xf6b\xf6\xc1\xf9\xcd\xfe\x14\x02\xe0\x01\xab\x00\xe6\x011\x05\xa2\x06{\x03;\xfd\x1a\xf8>\xf70\xfa\x0b\xfe\x0e\x01e\x03h\x05V\x07\x86\t\x1d\x0b&\n\xb4\x06\xfa\x03`\x04X\x06\\\x064\x03%\xff\x17\xfd\x8f\xfd\xe0\xfe\x80\xffO\xff,\xff\xc5\xff\xb6\x00\xf5\x000\x00N\xffX\xff*\x00s\x003\xffV\xfd\x1e\xfdj\xff*\x02\xc5\x02z\x01\xf9\x00\xf6\x02\xd9\x05\xa7\x06\xd3\x04\xad\x02\t\x02\x06\x02Y\x00t\xfc\xc6\xf8L\xf81\xfb_\xfe\xcf\xfe\xe7\xfc\xf2\xfb\x05\xfe\x97\x01\x9e\x03*\x03\xb6\x01\xab\x00\xf6\xff\xd8\xfe\xf9\xfc\xe0\xfa\xb8\xf9\xa3\xfa\xec\xfd?\x02E\x05\x92\x05\xef\x03I\x02\x9e\x01M\x01\x86\x00z\xff\xac\xfe\xe4\xfd\xc7\xfc\x0e\xfc\x02\xfd\xaf\xffX\x02Y\x03\xee\x02\xbc\x02\xbf\x03]\x05\x0e\x06\x9e\x04J\x01\xd0\xfd\x1d\xfc\xa7\xfc\n\xfe\xa3\xfe\x8b\xfe\x1c\xff\xbf\x00m\x02\xcf\x02r\x01+\xff\xba\xfdr\xfe\xb1\x006\x02?\x01A\xfe\x83\xfb\xef\xfad\xfcF\xfe\xa4\xff\xf2\x00\xa5\x02\x0e\x04"\x04\xf4\x02\x95\x01\xa3\x00\xe0\xff\x06\xff\x1f\xfel\xfdE\xfd\xb0\xfd\x0f\xfe\xb7\xfd\x10\xfdu\xfdo\xff\x83\x01v\x01\x03\xff\xb5\xfc\x07\xfd$\xff\x1a\x00\xb5\xfe\x82\xfc\x9a\xfbu\xfc\n\xfel\xff\x85\x00\x88\x01\x87\x02|\x03\x1d\x04\x10\x04R\x03;\x02\x1e\x01\x11\x00\x07\xffL\xfe\x88\xfe\xbd\xff\xf3\x00\x81\x01\xe0\x01\x91\x027\x03)\x03c\x02M\x01Y\x00\xd3\xff\xba\xffo\xff$\xfe\x1d\xfc\x1e\xfb\x9d\xfc\xaa\xff\xb6\x01\xa9\x01\xd7\x00\xdf\x00\xb5\x01,\x02\x98\x01N\x00\x00\xff1\xfe\x0f\xfeh\xfe\xaa\xfeq\xfeb\xfe\x83\xff\x85\x01\xa5\x02\xc7\x01\xf3\xff\x1b\xff\xdc\xff \x01h\x01\x03\x00\x83\xfdR\xfb|\xfa\xed\xfa\xfb\xfb5\xfd\xb6\xfe\x88\x00\t\x02?\x02\xf5\x000\xff\'\xfe2\xfe\xd7\xfe|\xff\xb3\xffJ\xffU\xfe<\xfd\xc7\xfc\xa1\xfd\xad\xff\xd0\x01\xd6\x02q\x02V\x01\x95\x00\xbe\x00Q\x01\x1b\x01\x88\xff\xab\xfdY\xfd\r\xff8\x01-\x02(\x02\x8d\x02\x97\x030\x04\xdc\x03K\x03\x03\x03\xa0\x02\xa6\x018\x00\xe3\xfe\x0b\xfe\xc8\xfd2\xfe)\xff)\x00\xc5\x00\x10\x01\x1d\x01\xb7\x00\xe1\xff*\xff\x17\xff\x9a\xff#\x00\x1d\x00{\xff\xaf\xfeD\xfeh\xfe\xd7\xfed\xff\x0e\x00\xee\x00\x01\x02\xd3\x02\xe5\x029\x02G\x01`\x00T\xff\x15\xfe\x1f\xfd\x0c\xfd\xf4\xfdA\xffD\x00\xc2\x00\xa7\x00\x0b\x00`\xff>\xff\xd3\xff\xbc\x00.\x01\x9a\x00D\xff\xe2\xfd\xbf\xfc\xdb\xfb\xb5\xfb\x19\xfd\xdf\xff\x9c\x02\xdd\x03\x95\x03\xb3\x02\xca\x01\xcc\x00\xda\xff^\xffk\xff\x98\xff\xaf\xff\n\x00\xad\x00\xf6\x00\xdd\x006\x01@\x02\xfb\x02\x87\x02d\x01\xbc\x00\xbf\x00r\x00F\xff\xfd\xfd\x9e\xfdV\xfet\xff\x15\x008\x00\xa4\x00\xb1\x01\xa1\x02\x9c\x02\xde\x01O\x01h\x01\xab\x01M\x01;\x009\xff\xf7\xfe`\xff\xb5\xffT\xff\xae\xfe\xd3\xfe\x18\x00\xa7\x017\x02&\x01H\xff#\xfe`\xfe3\xffH\xffA\xfe\x0c\xfd\xda\xfc\xbf\xfd\xc5\xfe?\xff\x87\xff\x12\x00\xe4\x00\xb5\x016\x02C\x02\xe4\x01\x14\x01\xe4\xff\x92\xfez\xfd\xfa\xfc>\xfd$\xfeM\xffd\x00<\x01\xb9\x01\xcf\x01\x93\x01\x00\x01;\x00\xa1\xff"\xffZ\xfeG\xfdt\xfc~\xfcu\xfd\xff\xfe\xcc\x00~\x02v\x03c\x03\xa3\x02\xd1\x01 \x01C\x00\'\xffO\xfe\x16\xfe&\xfe\x1a\xfe!\xfe\xab\xfe\xc0\xff\xf9\x00\xc5\x01\xce\x01=\x01\xa3\x00m\x00d\x00\xf7\xff\xff\xfe\x12\xfe\xdd\xfd\x8b\xfe\x94\xff\x03\x00\xaa\xff\x89\xffv\x00\xd1\x01W\x02\xbf\x01\x04\x01\x04\x01\x83\x01\x86\x01x\x00\xc8\xfe\xaf\xfd\xb0\xfd\xff\xfd\xfc\xfd&\xfe)\xff\xd2\x00\x1e\x02/\x02\x1b\x01\xcb\xff0\xff\x82\xff-\x00p\x00\xeb\xff\xc1\xfeg\xfdg\xfc\x16\xfc\x93\xfc\xe2\xfd\xcb\xff\xb9\x01 \x03\xc2\x03\xad\x03\x04\x03\xd0\x01\x13\x00\x16\xfe\xc7\xfc\x0e\xfd\x91\xfe\xf6\xffs\x00\x86\x00#\x01{\x02\xa6\x03\xab\x03\x81\x02\xda\x00a\xff]\xfe\xae\xfd\xf0\xfc\xf2\xfb)\xfb\x89\xfb\x89\xfd]\x00\x8e\x02,\x03y\x02\x80\x01\x03\x01\r\x01\x18\x01\xaf\x00\xe6\xff \xff\xb0\xfe\xa7\xfe\xcc\xfe\xf4\xfeI\xff\xfb\xff\xcf\x00j\x01\xa3\x01{\x01\xe9\x00\x00\x00\t\xffP\xfe\xef\xfd\xf5\xfdR\xfe\xba\xfe\x18\xff\xd5\xff/\x01\xa2\x02;\x03Q\x02v\x00t\xff:\x00\xcf\x01\xb4\x02Y\x02(\x01\xc5\xffw\xfe\'\xfd\xef\xfbs\xfbU\xfcq\xfe\xc0\x006\x02\x80\x02\x06\x02K\x01\x90\x00\xfb\xff\x89\xff\x01\xffo\xfe8\xfee\xfe\xaa\xfe\x03\xff\xe7\xff\x82\x01-\x03\x1f\x043\x04\xa4\x03\x98\x02\xfd\x00\xc9\xfe\x93\xfc[\xfb\xa5\xfb\x14\xfd\xd8\xfe=\x00\x18\x01\xa1\x01\xf3\x01\t\x02\xef\x01\xa4\x010\x01\xaf\x00\x0e\x00(\xff%\xfed\xfd+\xfd}\xfd\x13\xfe\xeb\xfe\'\x01I\x04\xd0\x04I\x02\xf2\xffv\xff\xac\xffb\xff\xf2\xfe\xe4\xfe\x1b\xffL\xff_\xff{\xff\xf6\xff\xd8\x00\xd0\x01I\x02\xf4\x01g\x01@\x01D\x01\x00\x01~\x00$\x00\xf9\xff{\xffJ\xfe\xd2\xfc,\xfc;\xfd\x8f\xff\x8a\x01\x0e\x02\x7f\x01\xd8\x00\xc0\x00L\x01\xcc\x01\x82\x01l\x00\x12\xff\x1c\xfe\x01\xfe}\xfe\xd4\xfe\xf3\xfe\x9d\xff+\x01\xd4\x02\x8b\x03\x1a\x03\xec\x01\x8d\x00\x8d\xff\x01\xff\x8c\xfe\x02\xfe\x9f\xfd\x9a\xfd\x02\xfe\xd2\xfe\xe1\xff\xe6\x00\x9f\x01\x03\x02<\x02s\x02\x80\x02\x08\x02\xb8\x00\xb8\xfe\xe4\xfc9\xfc\xee\xfc[\xfe\xc4\xff\xe3\x00\xac\x01\x0e\x02\xff\x01\xa1\x01>\x01\xe3\x00Z\x00\xa4\xff?\xff~\xff\xc5\xffj\xff\x9f\xfeC\xfe\x10\xff\xf0\x00\xfb\x02\xca\x03\xc3\x02\x0e\x01\x15\x00\xf6\xff#\x00M\x00i\x00=\x00\x9e\xff\xc9\xfe\x18\xfe\xcb\xfd<\xfeS\xfft\x00C\x01\xbc\x01\xe8\x01\xd8\x01\x9c\x01\x1e\x01v\x00\xd9\xffV\xff\xde\xfea\xfe\x17\xfeg\xfev\xff\xc5\x00\x90\x01}\x01\xcd\x00E\x00u\x00\xf5\x00\x07\x01`\x006\xff\xf4\xfd\x10\xfd\xe6\xfc}\xfd\x88\xfe\xc1\xff\xe5\x00\xaf\x01\x1a\x02F\x02#\x02}\x01\x8d\x00\xad\xff\xe4\xfe<\xfe\xdd\xfd\xc2\xfd\xa8\xfd\xa2\xfd\x0e\xfe\xf2\xfe\xdd\xffS\x00D\x00\x1b\x00P\x00\xcb\x00\xed\x00:\x00\xf3\xfe\xc3\xfd&\xfdN\xfdK\xfe\x05\x00\xfc\x01F\x03P\x03|\x02\x92\x01\xea\x00O\x00\x9e\xff\x1e\xff\x0f\xff\x1d\xff\xb2\xfe\xad\xfd\xaa\xfc\x96\xfc\xad\xfd \xff\xf8\xff\x12\x00\xfe\xff?\x00\xcb\x00+\x01\xfc\x00b\x00\xbf\xffO\xff\x1b\xff\xfb\xfe\xcc\xfe\xb4\xfe\t\xff\xee\xff\x1d\x01\r\x02=\x02\x97\x01\x8e\x00\xad\xff%\xff\xc7\xfeT\xfe\xce\xfdu\xfd\x94\xfdJ\xfeY\xff_\x00\xf3\x00\xe6\x00r\x003\x00\x99\x00W\x01\x94\x01\xe7\x00\xb6\xff\xbc\xfem\xfe\x86\xfek\xfe9\xfe}\xfe:\xff\xfc\xffr\x00\xa9\x00\xbc\x00\xca\x00\xdc\x00\xed\x00\xde\x00\x96\x00\x06\x000\xffd\xfeI\xfe>\xff\xc0\x00\xbc\x01\xb9\x01J\x01;\x01\xaf\x01!\x02\x0f\x02K\x01\x13\x00\xee\xfe7\xfe\xe9\xfd\x05\xfe\xa7\xfe\xac\xff\xb9\x00\x96\x01\'\x02`\x02P\x02\x03\x02\x85\x01\xf5\x00b\x00\xc3\xff/\xff\xd7\xfe\xec\xfev\xff6\x00\xc4\x00\xf5\x00\x04\x01G\x01\xa8\x01\xd6\x01\x9b\x01\x03\x01J\x00\xb5\xffO\xff\x1d\xffS\xff\xf1\xff\xab\x00;\x01\xa3\x01\x0f\x02\x90\x02\xf2\x02\xca\x02\xe8\x01\xb1\x00\xd7\xff\xa7\xff\xca\xff\xaf\xff\x19\xffe\xfe7\xfe\xdb\xfe\xec\xff\x9c\x00L\x004\xffc\xfe\xc8\xfe?\x00\xa7\x01\x05\x02q\x01\xa7\x00!\x00\xdb\xff\xb7\xff\xb9\xff\xf4\xffZ\x00\xba\x00\x1f\x01\xb8\x01M\x02E\x02\x8b\x01\xbf\x00E\x00\xd9\xff\x17\xff\xfd\xfd\xf1\xfc\x83\xfc\x10\xfdp\xfe\xe1\xff\x9c\x00\x98\x00}\x00\xcb\x00F\x01B\x01s\x00+\xff\x01\xfe\x80\xfd\xd3\xfd\x9c\xfeR\xff\xa7\xff\xb7\xff\xe5\xffx\x00G\x01\xbe\x01{\x01\xb6\x00\xee\xffI\xff\x8e\xfe\xc2\xfdw\xfd#\xfeq\xff\x8e\x00\x11\x013\x01[\x01\xad\x01\xf5\x01\xe1\x01f\x01\xac\x00\xc7\xff\xd0\xfe$\xfe\x0e\xfeo\xfe\xda\xfe\x06\xff\x06\xff\x19\xff?\xff>\xff\x0c\xff\xf1\xfe\x1f\xfft\xff\xc8\xff\x13\x00E\x00\x15\x00\\\xffw\xfe,\xfe\xe3\xfe\x19\x00\xc2\x00\x81\x00\xfe\xff\x07\x00\xa0\x000\x01]\x01*\x01\xa9\x00\xd9\xff\xb4\xfer\xfd\x97\xfc\x9d\xfc\x92\xfd\xe3\xfe\xe7\xffi\x00\xad\x00\xf8\x00$\x01\xec\x00R\x00\xac\xffK\xff\x19\xff\xcf\xfeL\xfe\xd0\xfd\xc9\xfdn\xfe\x80\xffk\x00\xda\x00\xec\x00\xec\x00\xf7\x00\xe5\x00r\x00\x9d\xff\xac\xfe\n\xfe\x1b\xfe\xfa\xfe9\x000\x01\x8d\x01\x84\x01\x98\x01\x11\x02\xa5\x02\xc5\x025\x02&\x01\xf5\xff\x07\xff\x9e\xfe\xc7\xfeM\xff\xd0\xff\x05\x00\xf3\xff\xdc\xff\xd9\xff\xd1\xff\xb8\xff\xc3\xff4\x00\xf7\x00\x8c\x01\x8b\x01\xf9\x00C\x00\xde\xff\x02\x00\x7f\x00\xe6\x00\x13\x01*\x01a\x01\xc9\x01 \x02\x04\x02h\x01\xba\x00`\x00(\x00\x9a\xff\xbc\xfe\x11\xfe\x0e\xfe\xa9\xfen\xff\xe1\xff\xea\xff\xea\xffJ\x00\xea\x00.\x01\xd0\x008\x00\xda\xff\xb3\xffz\xff\x12\xff\xc2\xfe\xf9\xfe\xbe\xff\x82\x00\xbd\x00\x83\x00Y\x00\x8e\x00\xda\x00\xc9\x00N\x00\xd2\xff\xb4\xff\xe1\xff\x00\x00\xdd\xff\xa3\xff\xa4\xff\t\x00\xaf\x006\x01f\x01^\x01l\x01\xa8\x01\xc5\x01U\x01I\x00\x1a\xffa\xfec\xfe\xeb\xfeu\xff\xa1\xff\x8d\xff\x82\xff\x98\xff\xbf\xff\xe3\xff\r\x00M\x00\x87\x00~\x00\x16\x00\x8b\xffO\xff\xa5\xffK\x00\xb7\x00\xc8\x00\xcc\x00\t\x01p\x01\xb3\x01\x97\x010\x01\xe2\x00\xe1\x00\xd0\x00&\x00\xfe\xfe&\xfe1\xfe\xd5\xfep\xff\xb0\xff\xbf\xff\xe1\xff&\x00`\x00U\x00\xe1\xff9\xff\xca\xfe\xbb\xfe\xcd\xfe\xc9\xfe\xc8\xfe\xfb\xfeV\xff\x90\xffj\xff\x00\xff\xd3\xfe?\xff\xf1\xffC\x00\x08\x00\xb5\xff\xae\xff\xce\xff\xaf\xffJ\xff\x0f\xffm\xffI\x00\xff\x00\n\x01\x95\x00E\x00u\x00\xdb\x00\xee\x00e\x00\x8d\xff\x04\xff"\xff\x9c\xff\xcc\xff~\xff\x1e\xff\r\xffF\xff~\xff\x8a\xff\x81\xff\xa1\xff\x11\x00\xb5\x00<\x01<\x01\x97\x00\xb3\xff:\xffd\xff\xd1\xff(\x00k\x00\xb2\x00\xde\x00\xd5\x00\xad\x00~\x00W\x00F\x00<\x00\x02\x00\x8e\xff6\xffQ\xff\xb4\xff\xe6\xff\xcb\xff\xb0\xff\xde\xffK\x00\xa7\x00\xc0\x00\xa2\x00y\x00^\x00%\x00\xa1\xff\xf2\xfe\x84\xfe\xa8\xfe>\xff\xcf\xff\xf4\xff\xa1\xffK\xff]\xff\xb7\xff\xe9\xff\xbd\xffx\xff\x81\xff\xdc\xff\x17\x00\xf5\xff\xb5\xff\xbd\xff\x19\x00w\x00y\x00\x19\x00\xc6\xff\xfa\xff\xa3\x00%\x01\x12\x01\x9a\x00"\x00\xbf\xff`\xff\x16\xff\x1d\xff|\xff\x00\x00b\x00]\x00\xf3\xff|\xffY\xff\x9b\xff\x0c\x00r\x00\xa5\x00\x9b\x00m\x00J\x00B\x002\x00\x0e\x00\x17\x00n\x00\xca\x00\xe4\x00\xcc\x00\xbb\x00\xcc\x00\xfb\x00(\x01%\x01\xd2\x00L\x00\xdc\xff\xaf\xff\xaf\xff\xa8\xff\x91\xff\x8c\xff\xa8\xff\xdc\xff\x0e\x00\'\x00)\x00!\x00\t\x00\xd8\xff\x82\xff\x1d\xff\xfc\xfeQ\xff\xe1\xffS\x00p\x00A\x00\x0e\x00\x1f\x00]\x00y\x00^\x00I\x00S\x00T\x00#\x00\xdb\xff\xc0\xff\x06\x00\x8b\x00\xd0\x00\x83\x00\xe8\xff\xa3\xff\xe9\xffG\x00E\x00\xf1\xff\xbb\xff\xe1\xff-\x000\x00\xcd\xffY\xffE\xff\xab\xff\x1c\x00\x1f\x00\xab\xff1\xff\x1f\xffx\xff\xf6\xffN\x00[\x00\x12\x00\xac\xff\x86\xff\xbf\xff\x12\x00<\x00G\x00B\x00\x1c\x00\xe4\xff\xd3\xff\xf6\xff/\x00j\x00\x9b\x00\x91\x00>\x00\xf0\xff\xf8\xff&\x00\x19\x00\xd5\xff\xaa\xff\xc0\xff\xf0\xff\x00\x00\xf2\xff\xfd\xff-\x00K\x00\x16\x00\x8f\xff\xf4\xfe\x9b\xfe\xc2\xfeV\xff\x00\x00O\x00\x14\x00\x88\xff\x1f\xff\x18\xffE\xffe\xff\x83\xff\xd4\xffJ\x00\x91\x00\x85\x00e\x00\x80\x00\xbc\x00\xd5\x00\xae\x00d\x00E\x00j\x00\x8e\x00]\x00\xf1\xff\xd8\xffK\x00\xcf\x00\xba\x00\x0b\x00P\xff\x19\xffw\xff\xf4\xff\t\x00\xaf\xffb\xff\x7f\xff\xdc\xff\x15\x00\xfc\xff\xc4\xff\xb9\xff\xf1\xff:\x00V\x00)\x00\xe9\xff\xed\xff+\x00M\x00\x1c\x00\xc8\xff\xb0\xff\xe6\xff7\x00l\x00q\x00L\x00\x07\x00\xd1\xff\xd6\xff\x03\x00\x1f\x00\x1e\x00\x19\x00\x13\x00\xf0\xff\xbc\xff\x9f\xff\xae\xff\xd4\xff\xe2\xff\xba\xffz\xffS\xffj\xff\xb5\xff\n\x00=\x009\x00\x06\x00\xc9\xff\xa0\xff\x91\xff\x9b\xff\xd0\xffC\x00\xc0\x00\xeb\x00\xa1\x00%\x00\xdf\xff\xfc\xffR\x00\x91\x00\x94\x00v\x00T\x00\x1b\x00\xb2\xffO\xffJ\xff\xb3\xff5\x00p\x00^\x00C\x00Q\x00\x81\x00\x9a\x00t\x00 \x00\xde\xff\xe7\xff)\x00Y\x00W\x00R\x00\x88\x00\xdc\x00\xe5\x00m\x00\xb3\xff=\xff\\\xff\xe9\xffc\x00c\x00\x06\x00\xbf\xff\xd4\xff\x10\x00+\x00\x14\x00\xe9\xff\xbf\xff\xb2\xff\xdb\xff*\x00r\x00\x9b\x00\xb1\x00\x9e\x00:\x00\x95\xff\x12\xff\xfe\xfeA\xff\x97\xff\xc6\xff\xca\xff\xc4\xff\xc6\xff\xb5\xff\x8a\xffh\xffw\xff\xb6\xff\xed\xff\xdf\xff\x89\xff0\xff\x14\xff[\xff\xf1\xffy\x00\x9b\x00_\x00+\x00Q\x00\xb3\x00\xf8\x00\xe0\x00\x86\x001\x00\x08\x00\xea\xff\xbd\xff\x9b\xff\xae\xff\xfb\xffE\x00Y\x00?\x00\x0c\x00\xd6\xff\xb4\xff\xb7\xff\xc8\xff\xc6\xff\xb1\xff\x94\xffu\xffh\xff\x83\xff\xd1\xffD\x00\x9d\x00\x99\x00G\x00\x0c\x001\x00\x93\x00\xca\x00\x92\x00\x10\x00\xb9\xff\xd0\xff&\x00b\x00b\x00J\x009\x002\x00\x14\x00\xd4\xff\x9f\xff\xa6\xff\xf3\xffY\x00y\x00\x17\x00y\xff%\xff8\xfft\xff\x9b\xff\xa7\xff\xb6\xff\xd5\xff\xeb\xff\xe3\xff\xc3\xff\xb3\xff\xd5\xff\x0c\x00\xec\xffZ\xff\xd3\xfe\xc2\xfe\x15\xff}\xff\xbb\xff\xcc\xff\xca\xff\xd3\xff\xf0\xff\x1a\x00>\x00i\x00\xa0\x00\xbc\x00\x91\x00\x1d\x00\x86\xff\t\xff\xf3\xfeO\xff\xce\xff+\x00\\\x00\x80\x00\xab\x00\xd5\x00\xdc\x00\x9f\x00:\x00\xf2\xff\xee\xff\xfc\xff\xda\xff\xac\xff\xba\xff\x16\x00\x8b\x00\xcc\x00\xba\x00}\x00Q\x00<\x003\x00 \x00\xf9\xff\xba\xfft\xffJ\xffX\xff\x91\xff\xca\xff\xed\xff\x14\x00V\x00\x9f\x00\xbe\x00\xa7\x00\x8b\x00\x8d\x00\x93\x00j\x00\x1c\x00\xf0\xff\xf6\xff\xfc\xff\xdd\xff\xba\xff\xca\xff\t\x002\x00\r\x00\xab\xff]\xffj\xff\xcd\xff\x1e\x00\xfc\xff\x88\xff0\xffB\xff\x9f\xff\xec\xff\x03\x00\t\x00-\x00p\x00\x9b\x00\x8c\x00m\x00{\x00\xb7\x00\xd8\x00\xa5\x00\x1d\x00\x86\xff3\xff6\xffd\xff\x8e\xff\xb9\xff\xfc\xffK\x00y\x00x\x00`\x00I\x006\x00.\x00/\x00\x11\x00\xcc\xff\x97\xff\xc3\xffM\x00\xd3\x00\xfd\x00\xdb\x00\xbf\x00\xdc\x00\x06\x01\xf0\x00\x91\x009\x00 \x00\x1b\x00\xf4\xff\xc5\xff\xcd\xff\x07\x001\x00&\x00\x15\x001\x00`\x00o\x00U\x00*\x00\x04\x00\xf6\xff\xf0\xff\xd0\xff\x8d\xffN\xff<\xffb\xff\xc5\xffL\x00\xb0\x00\xb0\x00\\\x00 \x00J\x00\xa0\x00\x92\x00\xfe\xfff\xffN\xff\xa0\xff\xf4\xff\x0e\x00\x11\x00*\x00?\x00)\x00\xeb\xff\xa3\xffz\xff\x9a\xff\xfc\xffI\x001\x00\xb3\xff(\xff\xef\xfe\x19\xffT\xffq\xff\xa0\xff\x19\x00\xab\x00\xe8\x00\xaa\x009\x00\xe9\xff\xd5\xff\xd2\xff\xb8\xff\x86\xffU\xffF\xff^\xff\x8a\xff\xb7\xff\xd2\xff\xd8\xff\xcf\xff\xba\xff\xa5\xff\x97\xff\x8c\xff\x92\xff\xa3\xff\xa3\xff\x8e\xff\x80\xff\x89\xff\x99\xff\xa2\xff\xaf\xff\xd8\xff\x18\x00n\x00\xd6\x00\x1e\x01\x0b\x01\xa5\x004\x00\xda\xffk\xff\xe8\xfe\x9d\xfe\xd6\xfeh\xff\xe6\xff\x14\x00\x08\x00\xe8\xff\xcf\xff\xc5\xff\xc4\xff\xb2\xffx\xff/\xff\x0e\xff:\xff\x82\xff\xac\xff\xb8\xff\xda\xff8\x00\xaa\x00\xde\x00\xb4\x00v\x00\x87\x00\xe4\x00#\x01\xee\x00O\x00\xad\xffj\xff\x8c\xff\xbc\xff\xc2\xff\xc1\xff\xf1\xff;\x00S\x000\x00\x03\x00\x04\x00\'\x00=\x003\x00\x03\x00\xb3\xffm\xffc\xff\xb0\xff3\x00\xa8\x00\xeb\x00\x03\x01\xff\x00\xd8\x00\x96\x00Z\x00A\x00@\x00.\x00\xfa\xff\xc1\xff\x9e\xff\x86\xffg\xffU\xffs\xff\xcd\xff7\x00\x80\x00\x9b\x00\x92\x00w\x00P\x00 \x00\xe5\xff\xa6\xff\x87\xff\x9b\xff\xd4\xff\xf8\xff\xf2\xff\x03\x00b\x00\xf6\x00c\x01f\x01\n\x01\x91\x00-\x00\xef\xff\xd0\xff\xb9\xff\xa7\xff\xa8\xff\xca\xff\r\x00d\x00\x9e\x00\x84\x00\x18\x00\xbc\xff\xc2\xff\x1b\x00j\x00_\x00\x0e\x00\xbd\xff\x94\xff\x8a\xff\x8a\xff\x9d\xff\xe1\xffY\x00\xcd\x00\x05\x01\n\x01\x0c\x01\x17\x01\xff\x00\xae\x00E\x00\xf2\xff\xc7\xff\xaf\xff\x99\xff\x8d\xff\xb2\xff\x0e\x00g\x00}\x00D\x00\xf4\xff\xc9\xff\xd1\xff\xf0\xff\xfa\xff\xdc\xff\xa0\xffk\xffk\xff\x9d\xff\xcd\xff\xe3\xff\x05\x00N\x00\x9d\x00\xcb\x00\xd1\x00\xaa\x00P\x00\xd7\xffp\xffA\xffC\xff^\xffo\xffT\xff\x1a\xff\xff\xfe/\xff\x9a\xff\xff\xff$\x00\x0e\x00\xef\xff\xdc\xff\xc7\xff\x9c\xffk\xff\\\xff\x85\xff\xbd\xff\xdc\xff\xda\xff\xd4\xff\xe7\xff\x17\x00U\x00\x86\x00\x95\x00\x80\x00_\x003\x00\xe7\xff\x88\xffB\xff#\xff\x1b\xff7\xff\x93\xff\x06\x00F\x00@\x00\x1a\x00\xee\xff\xc9\xff\xb3\xff\xb3\xff\xc7\xff\xcd\xff\xad\xffh\xff4\xffK\xff\xbd\xffS\x00\xba\x00\xc6\x00\x98\x00x\x00{\x00s\x00A\x00\x07\x00\x01\x007\x00_\x009\x00\xbe\xff/\xff\xf6\xfe?\xff\xca\xff,\x00G\x00;\x005\x006\x00)\x00\xf2\xff\x8a\xff#\xff\n\xffR\xff\xc0\xff\x0e\x006\x00d\x00\xa4\x00\xc3\x00\x97\x00U\x00<\x00<\x00(\x00\xfe\xff\xe8\xff\xfe\xff\x0e\x00\xca\xffB\xff\xe9\xfe\x14\xff\x85\xff\xd6\xff\xf7\xff\x18\x00W\x00\x89\x00v\x00&\x00\xd5\xff\xa7\xff\x94\xff|\xffg\xff\x81\xff\xdb\xffO\x00\xb0\x00\xfb\x00/\x01-\x01\xf1\x00\xaa\x00\x80\x00T\x00\xfb\xff\x88\xff:\xff;\xff\x8a\xff\xfd\xffd\x00\x99\x00\x8e\x00]\x00!\x00\xdf\xff\xa6\xff\x9a\xff\xc6\xff\x11\x00J\x00J\x00\x05\x00\xb4\xff\xa7\xff\xec\xffP\x00\x98\x00\xb5\x00\xc5\x00\xd6\x00\xce\x00\x90\x00A\x00\x1c\x00$\x00+\x00\x05\x00\xb0\xffR\xff-\xffa\xff\xd4\xff?\x00j\x00\\\x00D\x00>\x00<\x00%\x00\xe6\xff\x99\xffa\xffP\xffj\xff\x9f\xff\xe1\xff\x1d\x00C\x00M\x00J\x00F\x00H\x00G\x002\x00\x11\x00\xfa\xff\xea\xff\xbb\xffS\xff\xe2\xfe\xc2\xfe$\xff\xd8\xff{\x00\xc6\x00\xc1\x00\xa6\x00\x97\x00\x88\x00T\x00\xeb\xffi\xff\t\xff\xf8\xfe4\xff\x99\xff\x06\x00d\x00\xac\x00\xe2\x00\n\x01\xfe\x00\x93\x00\x00\x00\xaf\xff\xd1\xff \x001\x00\xea\xff\x90\xffz\xff\xc7\xffB\x00\x97\x00\x98\x00U\x00\x0b\x00\xf2\xff\r\x005\x00<\x00\x14\x00\xdf\xff\xbd\xff\xa6\xff\x8f\xff\x83\xff\x9d\xff\xea\xffY\x00\xc7\x00\x06\x01\x02\x01\xc9\x00~\x00*\x00\xe4\xff\xbb\xff\xa4\xff\x83\xffU\xff<\xffT\xff\x8c\xff\xbc\xff\xc6\xff\xb4\xff\xac\xff\xc9\xff\x12\x00a\x00\x82\x00H\x00\xdd\xff\x7f\xff:\xff\t\xff\x02\xffT\xff\xf4\xff\x9d\x00\x02\x01\r\x01\xdb\x00\x9c\x00e\x00*\x00\xdf\xff\x98\xffo\xff[\xffA\xff.\xffW\xff\xc1\xff-\x00`\x00b\x00b\x00b\x00I\x00\x19\x00\xfc\xff\x07\x00\x17\x00\xfb\xff\xb0\xffq\xffr\xff\xa0\xff\xbf\xff\xc5\xff\xed\xffS\x00\xc2\x00\xed\x00\xc6\x00\x89\x00`\x00/\x00\xd2\xff_\xff\x17\xff!\xffg\xff\xcc\xffE\x00\xb4\x00\xed\x00\xd3\x00\x80\x00)\x00\xf2\xff\xd6\xff\xb1\xff|\xffH\xff&\xff\x17\xff*\xffY\xff\x90\xff\xc1\xff\xed\xff#\x00S\x00m\x00o\x00W\x00\x1d\x00\xd5\xff\xa1\xff~\xff\\\xff2\xff(\xffo\xff\xf7\xff\x81\x00\xc2\x00\xb2\x00}\x00_\x00[\x00I\x00\x01\x00\x91\xff2\xff\t\xff\x17\xffX\xff\xc8\xff@\x00\x8f\x00\xa1\x00\x8e\x00l\x000\x00\xdb\xff\x9d\xff\xb5\xff\x1c\x00y\x00m\x00\xfd\xff\x83\xffT\xff\x87\xff\xe0\xff!\x00H\x00z\x00\xc0\x00\xf8\x00\xfc\x00\xc2\x00c\x00\xfd\xff\xa8\xffj\xffC\xff9\xff[\xff\xb1\xff\'\x00\x96\x00\xe2\x00\xf7\x00\xdd\x00\xac\x00p\x00.\x00\xeb\xff\xa6\xffc\xff2\xff2\xffc\xff\xb7\xff\x11\x00`\x00\x8e\x00\x84\x00W\x00/\x00\x1c\x00\x0c\x00\xe6\xff\xab\xffp\xffG\xff?\xff^\xff\x9f\xff\xf4\xffV\x00\xc1\x00\x1b\x017\x01\x07\x01\xb3\x00n\x00N\x00G\x002\x00\xf4\xff\x9c\xffh\xff\x85\xff\xe2\xffC\x00z\x00x\x00U\x00/\x00\x18\x00\r\x00\n\x00\x11\x00\x15\x00\x00\x00\xbb\xffk\xffW\xff\x96\xff\x02\x00Q\x00f\x00`\x00_\x00c\x00`\x00_\x00f\x00t\x00\x87\x00~\x00$\x00z\xff\xee\xfe\xf0\xfe\x80\xff3\x00\xa9\x00\xd7\x00\xe6\x00\xe0\x00\xae\x00`\x00\x10\x00\xba\xffZ\xff\n\xff\xe5\xfe\xe7\xfe\xfd\xfe\'\xffg\xff\xbc\xff\x07\x00"\x00\x11\x00\x01\x00\n\x00\x12\x00\xf1\xff\xac\xff_\xff3\xff9\xffl\xff\xbb\xff$\x00\xa2\x00\t\x01!\x01\xdd\x00{\x00F\x00N\x00T\x00"\x00\xc0\xffj\xffR\xffy\xff\xb8\xff\xfb\xffD\x00\x7f\x00\x8b\x00q\x00F\x00\x13\x00\xdc\xff\xcc\xff\x08\x00h\x00\x86\x00/\x00\xad\xff\x84\xff\xce\xff-\x00Q\x00?\x000\x00=\x00_\x00z\x00}\x00m\x00c\x00c\x00K\x00\xfd\xff\x9a\xffs\xff\xaa\xff\x14\x00[\x00]\x004\x00\r\x00\x07\x00\x18\x00#\x00\x16\x00\xf8\xff\xc8\xff}\xff\x16\xff\xb8\xfe\x95\xfe\xd2\xfe\\\xff\xeb\xff6\x00-\x00\x02\x00\xe3\xff\xd4\xff\xc5\xff\xb0\xff\x94\xffh\xff2\xff\x12\xff0\xff\x83\xff\xe9\xff6\x00V\x00L\x00+\x00\x17\x00.\x00]\x00h\x00.\x00\xd4\xff\x8d\xfft\xff\x89\xff\xcb\xff4\x00\xa0\x00\xd0\x00\xab\x00\\\x00\x18\x00\xef\xff\xd4\xff\xc5\xff\xd1\xff\xea\xff\xe5\xff\xba\xff\xa1\xff\xbc\xff\xf5\xff$\x00F\x00e\x00p\x00P\x00!\x00\x1d\x00]\x00\xae\x00\xd0\x00\xa7\x00[\x00\x1a\x00\x06\x00\x19\x00.\x00&\x00\x01\x00\xd6\xff\xca\xff\xf6\xff5\x00H\x00\'\x00\x01\x00\xf6\xff\xed\xff\xb3\xffH\xff\xf1\xfe\xf5\xfeL\xff\xb1\xff\xd5\xff\xac\xffx\xff\x86\xff\xd4\xff\x1f\x005\x00\x17\x00\xcd\xffa\xff\xf3\xfe\xc2\xfe\xf6\xfer\xff\xf3\xff\\\x00\xa9\x00\xcc\x00\xbd\x00\x95\x00~\x00q\x00U\x00+\x00\x0f\x00\xfb\xff\xd6\xff\xa8\xff\xa4\xff\xeb\xffT\x00\x89\x00X\x00\xef\xff\x9b\xff\x80\xff\x9d\xff\xd6\xff\x07\x00\x10\x00\xe4\xff\xb0\xff\xb2\xff\xf5\xffO\x00\x92\x00\xb5\x00\xbc\x00\xa9\x00\x8d\x00{\x00|\x00\x87\x00\xa0\x00\xb5\x00\xa5\x00Z\x00\xfd\xff\xde\xff\x16\x00f\x00x\x00<\x00\xee\xff\xce\xff\xda\xff\xf3\xff\x11\x00;\x00]\x00\\\x00*\x00\xdf\xff\xa6\xff\x92\xff\xa1\xff\xbe\xff\xda\xff\xee\xff\xf6\xff\xf2\xff\xec\xff\xec\xff\xf3\xff\x01\x00\x12\x00\x0f\x00\xdb\xff\x86\xff\\\xff\x88\xff\xeb\xffE\x00c\x00?\x00\x08\x00\x00\x008\x00u\x00x\x00H\x00\x18\x00\xfe\xff\xdb\xff\xa7\xff\x91\xff\xc4\xff\'\x00x\x00\x88\x00W\x00\x12\x00\xdd\xff\xcd\xff\xe3\xff\x0f\x000\x00-\x00\x07\x00\xd8\xff\xbd\xff\xbf\xff\xd9\xff\x0b\x00J\x00i\x00=\x00\xe7\xff\xb8\xff\xda\xff%\x00]\x00b\x004\x00\xf2\xff\xca\xff\xda\xff\x10\x00N\x00n\x00[\x00,\x00\x04\x00\xf6\xff\xfb\xff\xfd\xff\xf3\xff\xea\xff\xe3\xff\xd2\xff\xa9\xff\x84\xff\x8d\xff\xc8\xff\x0c\x00\'\x00\r\x00\xd3\xff\x92\xffk\xffu\xff\xb8\xff\x12\x00>\x00\x18\x00\xba\xffm\xffn\xff\xb6\xff\x04\x00)\x00,\x00"\x00\x13\x00\x01\x00\xf1\xff\xe8\xff\xf4\xff\x15\x00A\x00T\x006\x00\xfc\xff\xe1\xff\xfc\xff\'\x00 \x00\xd7\xff\x83\xffc\xff\x85\xff\xb4\xff\xc7\xff\xc1\xff\xc1\xff\xc7\xff\xc3\xff\xa8\xff\x84\xff}\xff\xb6\xff\x1c\x00n\x00t\x00<\x00\x08\x00\x05\x00,\x00_\x00\x89\x00\x9a\x00\x7f\x005\x00\xde\xff\xac\xff\xba\xff\xf1\xff#\x00$\x00\xe6\xff\x91\xff^\xffg\xff\x95\xff\xbb\xff\xd8\xff\xf4\xff\x0b\x00\x0b\x00\xf9\xff\xea\xff\xea\xff\xf7\xff\x13\x009\x00L\x00.\x00\xef\xff\xc8\xff\xe0\xff\x19\x001\x00\xfe\xff\xa7\xff{\xff\xa9\xff\x0b\x00Y\x00o\x00_\x00;\x00\t\x00\xd9\xff\xd3\xff\x0e\x00k\x00\xb3\x00\xc0\x00\x90\x00P\x005\x00Q\x00v\x00t\x00A\x00\n\x00\xe8\xff\xc8\xff\x9e\xff\x80\xff\x8e\xff\xc3\xff\r\x00K\x00Y\x006\x00\x0b\x00\x10\x00B\x00b\x00<\x00\xec\xff\xbc\xff\xd4\xff\x17\x00I\x00U\x00X\x00j\x00o\x00H\x00\xf6\xff\xb2\xff\xa7\xff\xd0\xff\x07\x00\x18\x00\xec\xff\xaf\xff\xa6\xff\xe6\xffA\x00\x80\x00\x8d\x00y\x00K\x00\x01\x00\xaa\xffn\xffk\xff\xa2\xff\xf0\xff\x13\x00\xe2\xff\x81\xffK\xffc\xff\xa0\xff\xc7\xff\xba\xff\x89\xffY\xffS\xffq\xff\xa2\xff\xdb\xff%\x00t\x00\x9b\x00v\x00\x1f\x00\xde\xff\xe4\xff*\x00l\x00g\x00"\x00\xef\xff\x08\x00W\x00\x8d\x00}\x00<\x00\xfd\xff\xce\xff\x9c\xff]\xff<\xffq\xff\xed\xff`\x00\x8b\x00n\x00>\x00)\x001\x00:\x00,\x00\x0c\x00\xed\xff\xe0\xff\xe2\xff\xf1\xff\x13\x00J\x00\x89\x00\xae\x00\xa0\x00d\x00,\x00\x1d\x00$\x00\x11\x00\xcc\xffr\xffI\xff\x7f\xff\xf3\xffU\x00r\x00c\x00\\\x00c\x00J\x00\x01\x00\xad\xff\x87\xff\xa1\xff\xed\xff8\x00F\x00\x0b\x00\xc4\xff\xb7\xff\xf9\xffL\x00\\\x00\x17\x00\xb9\xff{\xff`\xff[\xffs\xff\xbe\xff"\x00[\x00H\x00\x04\x00\xc7\xff\xa7\xff\x9c\xff\xaa\xff\xd5\xff\x0f\x00<\x00I\x00A\x004\x00#\x00\x12\x00\x0b\x00\x03\x00\xe4\xff\xac\xffz\xffs\xff\xa8\xff\x00\x00O\x00o\x00c\x00O\x00M\x00O\x00E\x002\x00#\x00\x1b\x00\x14\x00\x08\x00\x05\x00%\x00b\x00\x98\x00\xa2\x00\x82\x00Y\x00A\x00\x1c\x00\xd7\xff\x92\xffl\xffd\xffy\xff\xa1\xff\xd0\xff\xf9\xff\x15\x007\x00e\x00\x82\x00g\x00&\x00\xf3\xff\xed\xff\x02\x00\x0f\x00\xfa\xff\xcc\xff\xa8\xff\xb7\xff\xfa\xff9\x00?\x00\n\x00\xc6\xff\x95\xff\x80\xffs\xffj\xffm\xff\x99\xff\xf0\xffF\x00e\x00D\x00\x05\x00\xe0\xff\xe8\xff\r\x00-\x00>\x00B\x004\x00\x13\x00\xf3\xff\xe8\xff\xf4\xff\xf8\xff\xd4\xff\x91\xffQ\xff1\xff6\xffZ\xff\x96\xff\xe3\xff\'\x00<\x00"\x00\xf9\xff\xdb\xff\xd5\xff\xe6\xff\x04\x00\x12\x00\x00\x00\xec\xff\xfe\xff/\x00a\x00s\x00c\x00?\x00\x1e\x00\n\x00\xff\xff\xf7\xff\xea\xff\xdd\xff\xd9\xff\xe6\xff\xee\xff\xe6\xff\xd9\xff\xe9\xff!\x00c\x00\x87\x00\x85\x00i\x008\x00\xfb\xff\xc7\xff\xb6\xff\xc0\xff\xc5\xff\xb5\xff\xa8\xff\xc3\xff\xf8\xff\x1a\x00\x17\x00\x04\x00\xf3\xff\xdf\xff\xbd\xff\xa1\xff\xaf\xff\xe0\xff\r\x00 \x00%\x007\x00]\x00}\x00t\x00B\x00\t\x00\xf8\xff\x12\x008\x00N\x00N\x00A\x00.\x00\x15\x00\xf4\xff\xcb\xff\x9e\xff{\xff\x82\xff\xc0\xff\x1d\x00d\x00n\x00D\x00\x18\x00\x13\x00$\x00.\x00\'\x00\x0e\x00\xf1\xff\xdf\xff\xde\xff\xed\xff\n\x00&\x004\x002\x00#\x00\x07\x00\xe6\xff\xcf\xff\xc7\xff\xc6\xff\xbf\xff\xaf\xff\xa9\xff\xb4\xff\xc5\xff\xd8\xff\xf2\xff\x1b\x00G\x00b\x00i\x00c\x00Y\x00O\x00C\x00>\x003\x00\x10\x00\xde\xff\xc1\xff\xcc\xff\xf0\xff\x15\x002\x00<\x001\x00\x03\x00\xaf\xff`\xffN\xff\x84\xff\xd4\xff\x0f\x00$\x00\x1d\x00\x0f\x00\x08\x00\x05\x00\xfe\xff\xfe\xff\x17\x00E\x00q\x00~\x00j\x00C\x00\x12\x00\xe0\xff\xb8\xff\xa9\xff\xbb\xff\xe4\xff\x04\x00\x03\x00\xf2\xff\xf0\xff\x00\x00\x1a\x006\x00T\x00f\x00\\\x00;\x00\x18\x00\xfb\xff\xe8\xff\xe0\xff\xed\xff\x17\x00N\x00j\x00P\x00\x11\x00\xd7\xff\xca\xff\xe6\xff\xfd\xff\xe4\xff\xa9\xff~\xff\x82\xff\xa3\xff\xc0\xff\xc9\xff\xcf\xff\xde\xff\xf7\xff\x19\x004\x006\x00\x1d\x00\xff\xff\xf9\xff\xfd\xff\xe9\xff\xb5\xff\x89\xff\x91\xff\xc5\xff\xfd\xff\r\x00\xfd\xff\xe9\xff\xe1\xff\xda\xff\xcf\xff\xd3\xff\xf3\xff&\x00F\x00;\x00\x11\x00\xeb\xff\xdf\xff\xe8\xff\xf3\xff\xff\xff\x1a\x00>\x00L\x006\x00\x0b\x00\xde\xff\xb6\xff\x9d\xff\x99\xff\xa3\xff\xae\xff\xb4\xff\xaf\xff\xab\xff\xb9\xff\xe2\xff\x1d\x00V\x00{\x00\x8f\x00\x95\x00\x91\x00x\x00A\x00\n\x00\xf7\xff\x12\x00;\x00X\x00Z\x00C\x00 \x00\x06\x00\xfc\xff\x06\x00\x1a\x00#\x00\x10\x00\xe6\xff\xc2\xff\xb7\xff\xc1\xff\xcd\xff\xd7\xff\xef\xff\x1a\x00H\x00i\x00v\x00p\x00^\x00F\x006\x005\x00,\x00\x0b\x00\xe2\xff\xca\xff\xcf\xff\xda\xff\xce\xff\xae\xff\x9b\xff\xa1\xff\xae\xff\xae\xff\xa0\xff\xa2\xff\xc0\xff\xec\xff\xf4\xff\xc6\xff\x8f\xff\x7f\xff\x9d\xff\xce\xff\xf1\xff\x05\x00\x11\x00\x17\x00\x1c\x00.\x00H\x00S\x00?\x00\x1b\x00\xfe\xff\xeb\xff\xd1\xff\xaa\xff\x8b\xff\x8c\xff\xba\xff\xff\xff>\x00]\x00X\x00>\x00#\x00\x06\x00\xdf\xff\xbb\xff\xb5\xff\xd5\xff\x06\x00)\x00+\x00\x18\x00\x0e\x00\'\x00G\x00G\x00\x1d\x00\xf2\xff\xea\xff\xfe\xff\n\x00\x01\x00\xf6\xff\xfc\xff\x15\x000\x007\x00)\x00\x11\x00\x00\x00\x06\x00#\x00P\x00\x81\x00\x9d\x00\x8d\x00P\x00\x07\x00\xd3\xff\xba\xff\xb6\xff\xc3\xff\xe2\xff\t\x00\'\x00)\x00\r\x00\xdd\xff\xb6\xff\xb4\xff\xd4\xff\xf0\xff\xe5\xff\xbd\xff\x9a\xff\x90\xff\x9b\xff\xaf\xff\xbc\xff\xc1\xff\xc5\xff\xca\xff\xd6\xff\xe6\xff\xef\xff\xea\xff\xde\xff\xd7\xff\xd3\xff\xc4\xff\xa6\xff\x8c\xff\x90\xff\xb6\xff\xf7\xffA\x00u\x00\x85\x00\x84\x00\x8c\x00\x9a\x00\x97\x00n\x000\x00\xff\xff\xf4\xff\x03\x00\t\x00\x01\x00\xff\xff\x1b\x00C\x00L\x00&\x00\xed\xff\xd0\xff\xda\xff\xef\xff\xf3\xff\xe9\xff\xe2\xff\xeb\xff\x05\x00"\x001\x000\x001\x00H\x00l\x00~\x00u\x00o\x00w\x00{\x00W\x00\r\x00\xc3\xff\xa5\xff\xbf\xff\xf3\xff\x16\x00\x1d\x00\x15\x00\t\x00\x03\x00\t\x00\x1e\x002\x00=\x00?\x004\x00\x0e\x00\xd4\xff\x9b\xff\x82\xff\x96\xff\xc9\xff\xff\xff \x00&\x00\x1b\x00\x05\x00\xed\xff\xd0\xff\xb3\xff\x9f\xff\x96\xff\x98\xff\xa3\xff\xb8\xff\xda\xff\x01\x00\x1d\x00 \x00\x11\x00\x0e\x00(\x00R\x00r\x00o\x00M\x00$\x00\x0c\x00\xf6\xff\xce\xff\x9c\xff\x8a\xff\xb5\xff\t\x00B\x004\x00\xfb\xff\xdf\xff\xf9\xff&\x000\x00\n\x00\xda\xff\xbd\xff\xbe\xff\xd1\xff\xf2\xff\x1a\x00@\x00]\x00j\x00b\x00E\x00(\x00\x1d\x00\x1e\x00\x14\x00\xf7\xff\xd5\xff\xc7\xff\xd4\xff\xe6\xff\xe9\xff\xdf\xff\xda\xff\xeb\xff\x06\x00\x0b\x00\xed\xff\xd4\xff\xe6\xff\x1a\x00<\x00\x1e\x00\xd2\xff\x95\xff\x90\xff\xb2\xff\xd9\xff\xee\xff\xf5\xff\xfa\xff\x04\x00\x11\x00\x1e\x00\x1b\x00\x04\x00\xe5\xff\xd7\xff\xde\xff\xe2\xff\xd1\xff\xc3\xff\xd1\xff\xfb\xff#\x00.\x00#\x00\x1c\x00*\x00>\x00@\x00%\x00\xfc\xff\xd4\xff\xb6\xff\xa6\xff\xa8\xff\xba\xff\xd9\xff\xf8\xff\x08\x00\xfa\xff\xd2\xff\xb7\xff\xbf\xff\xe5\xff\x08\x00\x14\x00\x0b\x00\xf9\xff\xdb\xff\xb9\xff\xa8\xff\xbd\xff\xfa\xffA\x00k\x00e\x00D\x00/\x007\x00M\x00N\x00)\x00\xf2\xff\xcb\xff\xc7\xff\xd6\xff\xd7\xff\xc5\xff\xc0\xff\xe0\xff\x10\x00\x1e\x00\xfa\xff\xd1\xff\xd4\xff\x04\x00.\x00\x1d\x00\xe1\xff\xb1\xff\xb3\xff\xd4\xff\xec\xff\xeb\xff\xed\xff\x05\x001\x00M\x00A\x00\x17\x00\xf0\xff\xea\xff\n\x000\x005\x00\x11\x00\xec\xff\xe8\xff\x08\x00/\x00<\x003\x001\x00P\x00~\x00\x93\x00\x81\x00\\\x00>\x00.\x00\x1b\x00\xf2\xff\xc6\xff\xb1\xff\xc2\xff\xe4\xff\xfa\xff\xfb\xff\xf7\xff\xff\xff\x03\x00\xf1\xff\xd9\xff\xd8\xff\xe7\xff\xf3\xff\xf0\xff\xf0\xff\x04\x00$\x00;\x009\x00!\x00\x0c\x00\x11\x009\x00h\x00}\x00c\x000\x00\x03\x00\xe9\xff\xd6\xff\xb5\xff\x8f\xff\x8b\xff\xb6\xff\xf2\xff\x08\x00\xf2\xff\xd5\xff\xdc\xff\x00\x00\x17\x00\x01\x00\xc8\xff\x96\xff\x8f\xff\xb3\xff\xda\xff\xe4\xff\xdc\xff\xe6\xff\x08\x00\'\x00#\x00\x0b\x00\xff\xff\x0f\x000\x00G\x00@\x00$\x00\n\x00\x0e\x00+\x00C\x00C\x007\x002\x006\x005\x00%\x00\x1a\x00#\x00;\x00E\x003\x00\x08\x00\xd7\xff\xad\xff\x91\xff\x96\xff\xb5\xff\xe0\xff\x05\x00-\x00M\x00M\x00+\x00\x08\x00\x04\x00\x1c\x00+\x00\x17\x00\xee\xff\xd2\xff\xde\xff\x04\x00#\x001\x00?\x00O\x00K\x00!\x00\xe6\xff\xbd\xff\xb3\xff\xba\xff\xc7\xff\xd3\xff\xda\xff\xdb\xff\xcd\xff\xb3\xff\x9f\xff\xa0\xff\xb7\xff\xd6\xff\xef\xff\xf7\xff\xf2\xff\xe9\xff\xe8\xff\xec\xff\xde\xff\xbc\xff\x93\xff\x81\xff\x9a\xff\xd8\xff\x11\x00\x1d\x00\x01\x00\xef\xff\xfe\xff\x1a\x00%\x00\x15\x00\xf7\xff\xe6\xff\xf2\xff\x16\x001\x00&\x00\x00\x00\xdd\xff\xd8\xff\xf2\xff\x18\x00J\x00w\x00\x8a\x00r\x00=\x00\r\x00\xee\xff\xd5\xff\xbd\xff\xaf\xff\xbf\xff\xe8\xff\x17\x004\x00>\x004\x00\x19\x00\x02\x00\x00\x00\x1b\x002\x00"\x00\xed\xff\xc0\xff\xc2\xff\xee\xff!\x00D\x00X\x00a\x00W\x008\x00\x0f\x00\xf7\xff\xf3\xff\xf6\xff\xf5\xff\xf9\xff\x02\x00\x00\x00\xec\xff\xd0\xff\xbf\xff\xbb\xff\xba\xff\xb3\xff\xa6\xff\x9a\xff\x9e\xff\xb9\xff\xdb\xff\xf1\xff\xf8\xff\xf5\xff\xe4\xff\xc9\xff\xb1\xff\xab\xff\xb0\xff\xb6\xff\xcb\xff\xfb\xff6\x00\\\x00b\x00_\x00d\x00i\x00Z\x005\x00\r\x00\xf4\xff\xe7\xff\xe9\xff\xfd\xff\x16\x00%\x00\'\x00&\x00+\x00(\x00\x14\x00\xf9\xff\xe3\xff\xd9\xff\xd5\xff\xd0\xff\xca\xff\xc7\xff\xce\xff\xe2\xff\t\x00<\x00f\x00y\x00t\x00e\x00S\x007\x00\x10\x00\xec\xff\xe3\xff\xfd\xff,\x00X\x00k\x00_\x00<\x00\n\x00\xe3\xff\xd8\xff\xe6\xff\xf5\xff\xfb\xff\xfa\xff\xf7\xff\xf6\xff\xee\xff\xdb\xff\xca\xff\xcc\xff\xe0\xff\xf2\xff\xf3\xff\xe8\xff\xdd\xff\xda\xff\xdf\xff\xe9\xff\xf4\xff\xfd\xff\xfc\xff\xee\xff\xdb\xff\xd1\xff\xd2\xff\xd3\xff\xce\xff\xc1\xff\xb9\xff\xc6\xff\x01\x00\\\x00\x8c\x00u\x00=\x00\x0f\x00\xfb\xff\xf5\xff\xed\xff\xde\xff\xd4\xff\xdd\xff\xf7\xff\x19\x00;\x00I\x00@\x00(\x00\x17\x00\x1c\x00"\x00\n\x00\xd3\xff\xa4\xff\xa0\xff\xc0\xff\xe9\xff\x0c\x00(\x00;\x00:\x00/\x00+\x000\x00&\x00\x06\x00\xe2\xff\xd8\xff\xe9\xff\xfb\xff\xfe\xff\xf6\xff\xf3\xff\xfb\xff\x06\x00\x10\x00\x18\x00!\x00\'\x00\x18\x00\xf9\xff\xe0\xff\xd5\xff\xd7\xff\xdf\xff\xec\xff\x00\x00\x13\x00\x12\x00\xfa\xff\xe4\xff\xe9\xff\xfb\xff\xfd\xff\xe8\xff\xd9\xff\xe9\xff\t\x00\x0f\x00\xea\xff\xb7\xff\xa1\xff\xb6\xff\xe0\xff\xfb\xff\xfa\xff\xee\xff\xf1\xff\x10\x00=\x00[\x00O\x00(\x00\x07\x00\x00\x00\x05\x00\x01\x00\xf5\xff\xea\xff\xe7\xff\xec\xff\xf2\xff\xf8\xff\x03\x00\x11\x00\x15\x00\x07\x00\xf0\xff\xda\xff\xcc\xff\xc4\xff\xc0\xff\xc0\xff\xc8\xff\xe4\xff\x1b\x00R\x00e\x00L\x00)\x00\x1c\x005\x00Z\x00g\x00M\x00!\x00\xfb\xff\xf2\xff\x01\x00\x12\x00\x0b\x00\xee\xff\xd0\xff\xc9\xff\xd6\xff\xe2\xff\xe5\xff\xdb\xff\xce\xff\xca\xff\xdf\xff\xf7\xff\xef\xff\xc5\xff\xa1\xff\xa5\xff\xc9\xff\xed\xff\x06\x00\x1d\x001\x008\x00.\x00\x1e\x00\x18\x00\x12\x00\x00\x00\xe5\xff\xda\xff\xeb\xff\x0b\x00\x1f\x00\x16\x00\xf9\xff\xea\xff\x02\x00)\x00A\x00@\x003\x00&\x00\x1b\x00\x03\x00\xd9\xff\xb8\xff\xb7\xff\xd2\xff\xfb\xff\x1b\x00(\x00\x1f\x00\x0c\x00\xf8\xff\xec\xff\xe7\xff\xe3\xff\xe6\xff\xf1\xff\x03\x00\x05\x00\xf8\xff\xe8\xff\xf0\xff\x10\x000\x008\x00&\x00\x18\x00#\x00=\x00K\x00=\x00 \x00\x0b\x00\x04\x00\x04\x00\x03\x00\x00\x00\xf8\xff\xed\xff\xe5\xff\xe2\xff\xe2\xff\xe6\xff\xef\xff\x04\x00\x1a\x00.\x005\x00$\x00\xf3\xff\xb8\xff\x9b\xff\xab\xff\xd3\xff\xf5\xff\x06\x00\x0b\x00\x03\x00\xfe\xff\x02\x00\r\x00\x16\x00\x18\x00\x08\x00\xe0\xff\xb4\xff\xa5\xff\xb8\xff\xd1\xff\xda\xff\xe3\xff\t\x00G\x00v\x00|\x00`\x00;\x00\x1c\x00\xff\xff\xe3\xff\xd4\xff\xdd\xff\xff\xff"\x000\x00\x1f\x00\xff\xff\xec\xff\xeb\xff\xee\xff\xe8\xff\xdf\xff\xdb\xff\xdb\xff\xdd\xff\xdc\xff\xd1\xff\xc0\xff\xbe\xff\xe0\xff\x1d\x00J\x00J\x00,\x00\x10\x00\x15\x003\x00G\x00=\x00*\x00\x1f\x00\x1f\x00 \x00\x1e\x00\x14\x00\xf8\xff\xd1\xff\xad\xff\xa2\xff\xb7\xff\xdd\xff\xf7\xff\xfb\xff\xf9\xff\r\x00*\x00)\x00\xf7\xff\xb9\xff\xa5\xff\xc4\xff\xf3\xff\x13\x00\x1d\x00\x15\x00\x0c\x00\x14\x001\x00S\x00^\x00D\x00\x11\x00\xd7\xff\xab\xff\x9b\xff\xa5\xff\xbc\xff\xdb\xff\x03\x00,\x00D\x00J\x00=\x00\'\x00\x0f\x00\x04\x00\x02\x00\xf7\xff\xe7\xff\xe2\xff\xef\xff\xff\xff\xfc\xff\xef\xff\xf2\xff\t\x00\x1b\x00\x12\x00\xf9\xff\xe9\xff\xea\xff\xed\xff\xe7\xff\xe2\xff\xea\xff\xff\xff\x10\x00\x15\x00\x1d\x00.\x00A\x00D\x008\x003\x00=\x00@\x00/\x00\x14\x00\x05\x00\n\x00\x12\x00\x07\x00\xf1\xff\xd8\xff\xbe\xff\xa5\xff\x9e\xff\xb3\xff\xdc\xff\xfc\xff\x05\x00\x05\x00\x18\x003\x00/\x00\x04\x00\xd8\xff\xcf\xff\xe1\xff\xef\xff\xf2\xff\xf9\xff\x0f\x00(\x00;\x00C\x00?\x00/\x00\x10\x00\xe8\xff\xc8\xff\xbc\xff\xc5\xff\xd3\xff\xdc\xff\xe6\xff\xf7\xff\n\x00\x15\x00\x19\x00\x1a\x00\x16\x00\x10\x00\x0c\x00\x10\x00\x11\x00\x07\x00\xeb\xff\xcb\xff\xb5\xff\xb5\xff\xca\xff\xe6\xff\xf6\xff\xf7\xff\xf2\xff\xf1\xff\xf4\xff\xf8\xff\xfd\xff\xfc\xff\xf1\xff\xe3\xff\xd8\xff\xd9\xff\xe2\xff\xfb\xff\x1f\x00:\x00C\x00>\x005\x004\x007\x009\x002\x00&\x00\x1c\x00\x16\x00\x0b\x00\xe9\xff\xb5\xff\x8d\xff\x96\xff\xca\xff\x04\x00 \x00\x17\x00\x08\x00\x11\x000\x00;\x00\x1a\x00\xe4\xff\xc5\xff\xc8\xff\xdd\xff\xee\xff\xfd\xff\x15\x00,\x004\x002\x00.\x00/\x00-\x00 \x00\x06\x00\xec\xff\xd7\xff\xca\xff\xbf\xff\xb8\xff\xc0\xff\xdd\xff\x05\x00(\x00:\x00?\x008\x00-\x00"\x00\x1b\x00\x10\x00\x01\x00\xeb\xff\xd5\xff\xcd\xff\xd2\xff\xd4\xff\xd1\xff\xd0\xff\xdb\xff\xef\xff\xfa\xff\xf9\xff\xf0\xff\xec\xff\xec\xff\xe6\xff\xd5\xff\xbe\xff\xb4\xff\xc9\xff\xf4\xff\x1f\x005\x009\x00@\x00R\x00d\x00j\x00\\\x00D\x00,\x00\x12\x00\xf7\xff\xda\xff\xbe\xff\xa3\xff\x9a\xff\xac\xff\xd6\xff\x00\x00\r\x00\xff\xff\xf6\xff\x06\x00\x1e\x00 \x00\x08\x00\xf2\xff\xf0\xff\xf5\xff\xf1\xff\xed\xff\x03\x00.\x00R\x00[\x00S\x00Q\x00S\x00K\x000\x00\n\x00\xe9\xff\xca\xff\xae\xff\x9a\xff\x97\xff\xb5\xff\xee\xff+\x00H\x00<\x00\x1e\x00\n\x00\x0b\x00\x11\x00\x11\x00\x06\x00\xfc\xff\xf4\xff\xeb\xff\xe0\xff\xd8\xff\xd9\xff\xe5\xff\xf4\xff\xfc\xff\xfc\xff\xf7\xff\xf6\xff\xfd\xff\x05\x00\x06\x00\xfc\xff\xe9\xff\xd6\xff\xd0\xff\xdc\xff\xf2\xff\xff\xff\x00\x00\x07\x00$\x00I\x00_\x00W\x00?\x00$\x00\x08\x00\xec\xff\xd4\xff\xc2\xff\xb4\xff\xab\xff\xb1\xff\xcf\xff\xf4\xff\x0e\x00\x12\x00\x14\x00+\x00M\x00Y\x00=\x00\n\x00\xe7\xff\xdd\xff\xe0\xff\xe0\xff\xdf\xff\xeb\xff\x07\x00$\x00;\x00G\x00H\x00@\x00.\x00\x14\x00\xf4\xff\xd6\xff\xbb\xff\xa8\xff\xa3\xff\xb7\xff\xe7\xff!\x00M\x00U\x00G\x00:\x006\x00/\x00\x17\x00\xfb\xff\xf0\xff\xf2\xff\xf1\xff\xe2\xff\xcd\xff\xc3\xff\xc7\xff\xd4\xff\xe0\xff\xe5\xff\xe4\xff\xe4\xff\xea\xff\xf6\xff\xff\xff\xff\xff\xf8\xff\xeb\xff\xde\xff\xd6\xff\xdf\xff\xf5\xff\x0c\x00\x1f\x003\x00F\x00O\x00K\x00C\x00C\x00C\x003\x00\x0e\x00\xe6\xff\xc8\xff\xb5\xff\xa9\xff\xa9\xff\xb8\xff\xd1\xff\xeb\xff\xff\xff\x15\x000\x00E\x00=\x00\x17\x00\xeb\xff\xd9\xff\xdd\xff\xe1\xff\xe0\xff\xe8\xff\t\x005\x00U\x00^\x00\\\x00Z\x00V\x00E\x00+\x00\x08\x00\xde\xff\xb2\xff\x8f\xff\x89\xff\xa6\xff\xd4\xff\xfb\xff\x0e\x00\x11\x00\x11\x00\x1b\x00\'\x00/\x00#\x00\x04\x00\xde\xff\xc1\xff\xb5\xff\xbc\xff\xd2\xff\xee\xff\x04\x00\r\x00\x11\x00\x12\x00\x0e\x00\n\x00\x02\x00\xf7\xff\xea\xff\xe3\xff\xde\xff\xd6\xff\xc5\xff\xba\xff\xc4\xff\xde\xff\xf5\xff\x01\x00\x0b\x00\x1a\x000\x00E\x00M\x00A\x00$\x00\x02\x00\xe4\xff\xca\xff\xb8\xff\xb1\xff\xba\xff\xd5\xff\xfa\xff\x19\x00$\x00%\x000\x00E\x00O\x00?\x00\x16\x00\xf3\xff\xe5\xff\xea\xff\xed\xff\xf0\xff\xf9\xff\x05\x00\n\x00\x06\x00\x0b\x00 \x00<\x00I\x00=\x00!\x00\xfc\xff\xd4\xff\xb0\xff\x9b\xff\x9e\xff\xba\xff\xe3\xff\x0f\x00,\x006\x006\x00@\x00R\x00T\x007\x00\x06\x00\xd9\xff\xba\xff\xac\xff\xae\xff\xbb\xff\xcd\xff\xe0\xff\xf0\xff\xf8\xff\xf8\xff\xf7\xff\xfd\xff\x0c\x00\x16\x00\r\x00\xf4\xff\xd5\xff\xbf\xff\xbd\xff\xd1\xff\xf7\xff\x15\x00\x1e\x00\x1c\x00$\x00=\x00Y\x00_\x00K\x00\'\x00\x04\x00\xf1\xff\xe6\xff\xd6\xff\xc5\xff\xc4\xff\xe0\xff\n\x00%\x00%\x00\x1b\x00\x1f\x009\x00V\x00W\x007\x00\x0e\x00\xf5\xff\xf4\xff\xfa\xff\x00\x00\x04\x00\r\x00\x1d\x00*\x00(\x00\x1e\x00\x1b\x00(\x00=\x00G\x00@\x00 \x00\xee\xff\xbd\xff\xa7\xff\xb7\xff\xde\xff\x00\x00\n\x00\xfd\xff\xf3\xff\xfe\xff\x1b\x003\x00+\x00\x07\x00\xdf\xff\xc8\xff\xbc\xff\xaf\xff\x9e\xff\x97\xff\xa5\xff\xc8\xff\xf0\xff\x0b\x00\x12\x00\x15\x00\x1d\x00%\x00\x17\x00\xfb\xff\xde\xff\xc8\xff\xbf\xff\xc7\xff\xe4\xff\x06\x00\x0f\x00\xfd\xff\xeb\xff\xed\xff\x0b\x007\x00S\x00K\x00)\x00\x02\x00\xdd\xff\xbf\xff\xad\xff\xb0\xff\xcb\xff\xf6\xff\x1d\x00*\x00\x1d\x00\x0e\x00\x18\x00:\x00Y\x00Z\x009\x00\x13\x00\xfa\xff\xef\xff\xea\xff\xf1\xff\x08\x00(\x005\x00\'\x00\x0f\x00\x08\x00\x18\x00,\x002\x00+\x00\x1c\x00\x05\x00\xe5\xff\xc9\xff\xbd\xff\xc9\xff\xe5\xff\xff\xff\x0c\x00\r\x00\x07\x00\x07\x00\x10\x00\x1c\x00\x1e\x00\x13\x00\xfc\xff\xda\xff\xb7\xff\xa4\xff\xab\xff\xbf\xff\xd2\xff\xdd\xff\xe0\xff\xdf\xff\xe6\xff\xfa\xff\x0c\x00\x06\x00\xea\xff\xcf\xff\xc9\xff\xd2\xff\xda\xff\xdb\xff\xdf\xff\xe7\xff\xee\xff\xed\xff\xef\xff\xf8\xff\x0e\x00,\x00C\x00F\x00.\x00\x06\x00\xdb\xff\xbb\xff\xb1\xff\xc1\xff\xe8\xff\x0c\x00\x1a\x00\x13\x00\n\x00\x16\x00<\x00f\x00w\x00^\x00/\x00\x06\x00\xf0\xff\xe1\xff\xda\xff\xe6\xff\x08\x00/\x00:\x00)\x00\x13\x00\r\x00\x15\x00!\x00(\x00(\x00\x1b\x00\xfe\xff\xdd\xff\xcc\xff\xd7\xff\xf7\xff\x13\x00\x1e\x00\x19\x00\x15\x00\x1d\x004\x00?\x001\x00\n\x00\xe3\xff\xcc\xff\xc2\xff\xb9\xff\xb4\xff\xbb\xff\xce\xff\xe8\xff\xfd\xff\x07\x00\x01\x00\xf2\xff\xea\xff\xed\xff\xf1\xff\xf3\xff\xf0\xff\xea\xff\xe7\xff\xed\xff\xfb\xff\t\x00\n\x00\xf8\xff\xdf\xff\xca\xff\xcb\xff\xec\xff\x1b\x008\x00.\x00\x08\x00\xe0\xff\xc7\xff\xc1\xff\xc6\xff\xcf\xff\xde\xff\xf3\xff\x0c\x00#\x004\x00@\x00I\x00N\x00N\x00E\x000\x00\x14\x00\xf7\xff\xe6\xff\xee\xff\x0b\x00%\x00,\x00 \x00\x19\x00!\x001\x009\x00.\x00\x1d\x00\x14\x00\x0f\x00\xff\xff\xe1\xff\xcd\xff\xd4\xff\xf6\xff\x16\x00"\x00\x1c\x00\x19\x00!\x00*\x00(\x00\x0f\x00\xee\xff\xd5\xff\xce\xff\xcf\xff\xd4\xff\xde\xff\xee\xff\t\x00\x1e\x00#\x00\x17\x00\x0c\x00\x0f\x00\x1a\x00\x13\x00\xf6\xff\xd2\xff\xbf\xff\xc7\xff\xdb\xff\xec\xff\xfa\xff\x05\x00\x05\x00\xf9\xff\xe7\xff\xdc\xff\xd7\xff\xd9\xff\xe2\xff\xed\xff\xf3\xff\xf4\xff\xf1\xff\xef\xff\xf2\xff\xf7\xff\xf9\xff\xf9\xff\x00\x00\x0c\x00\x12\x00\t\x00\x03\x00\x0e\x00$\x00+\x00\x1f\x00\x0b\x00\xfc\xff\xf2\xff\xe9\xff\xe0\xff\xd6\xff\xda\xff\xf0\xff\x0f\x00-\x00;\x002\x00\x1c\x00\x0b\x00\x08\x00\x0c\x00\x08\x00\xf5\xff\xe2\xff\xe8\xff\t\x00\'\x00,\x00\x1c\x00\x0f\x00\x12\x00"\x00/\x00#\x00\x04\x00\xe5\xff\xd8\xff\xd4\xff\xcc\xff\xc1\xff\xc5\xff\xd9\xff\xf3\xff\x07\x00\x12\x00\x19\x00!\x00"\x00\x15\x00\xfb\xff\xe0\xff\xd3\xff\xd3\xff\xdc\xff\xf0\xff\x0f\x00+\x008\x001\x00\x1c\x00\t\x00\xfb\xff\xf1\xff\xed\xff\xeb\xff\xe6\xff\xe3\xff\xe0\xff\xe3\xff\xe7\xff\xeb\xff\xea\xff\xea\xff\xf6\xff\x0e\x00\x1c\x00\x16\x00\x03\x00\xf6\xff\xf9\xff\x08\x00\x18\x00!\x00#\x00\x1f\x00\x11\x00\xfa\xff\xe2\xff\xd6\xff\xdd\xff\xf3\xff\t\x00\x13\x00\x17\x00\x19\x00\x17\x00\x0e\x00\xfd\xff\xeb\xff\xdc\xff\xd6\xff\xd7\xff\xdf\xff\xe8\xff\xeb\xff\xf1\xff\x05\x00&\x00>\x00?\x00*\x00\x0e\x00\xf7\xff\xe5\xff\xd0\xff\xb9\xff\xae\xff\xc1\xff\xef\xff\x1e\x005\x005\x002\x006\x00>\x009\x00#\x00\x04\x00\xeb\xff\xe0\xff\xe4\xff\xf1\xff\xfc\xff\x06\x00\x12\x00\x1a\x00\x18\x00\x0b\x00\xfe\xff\xf5\xff\xef\xff\xe4\xff\xd5\xff\xcd\xff\xd7\xff\xef\xff\t\x00\x14\x00\x14\x00\x14\x00\x18\x00!\x00)\x00.\x002\x002\x001\x000\x00)\x00\x1c\x00\x0b\x00\xfa\xff\xec\xff\xe3\xff\xe2\xff\xe5\xff\xe9\xff\xeb\xff\xf3\xff\xfb\xff\x03\x00\x07\x00\x02\x00\xfc\xff\xfb\xff\x01\x00\x06\x00\x00\x00\xf2\xff\xe7\xff\xe6\xff\xef\xff\xfd\xff\x0c\x00\x16\x00\x19\x00\x16\x00\x16\x00\x14\x00\x00\x00\xd9\xff\xb1\xff\x98\xff\x98\xff\xad\xff\xcf\xff\xf0\xff\x02\x00\n\x00\x15\x00 \x00$\x00\x1d\x00\x11\x00\x05\x00\xfd\xff\xf5\xff\xeb\xff\xe3\xff\xe6\xff\xfb\xff\x16\x00)\x002\x00,\x00\x1b\x00\n\x00\x02\x00\xfe\xff\xf7\xff\xec\xff\xe3\xff\xe6\xff\xf1\xff\x00\x00\t\x00\x06\x00\xff\xff\x05\x00\x13\x00\x1f\x00!\x00\x1c\x00\x1a\x00\x1b\x00\x17\x00\x06\x00\xf5\xff\xef\xff\xf5\xff\x01\x00\n\x00\x0e\x00\n\x00\xfb\xff\xea\xff\xeb\xff\x04\x00%\x006\x001\x00!\x00\x0f\x00\xfb\xff\xe4\xff\xd5\xff\xdb\xff\xf4\xff\n\x00\r\x00\x07\x00\t\x00\x15\x00!\x00\x1c\x00\n\x00\xf0\xff\xdd\xff\xd7\xff\xd7\xff\xd4\xff\xcb\xff\xc9\xff\xd4\xff\xec\xff\x03\x00\x0c\x00\x0e\x00\r\x00\r\x00\x0e\x00\r\x00\t\x00\x00\x00\xf1\xff\xde\xff\xd4\xff\xde\xff\xf6\xff\x0c\x00\x10\x00\x07\x00\xff\xff\x02\x00\r\x00\x12\x00\x08\x00\xf2\xff\xe0\xff\xde\xff\xe6\xff\xee\xff\xf7\xff\x01\x00\x0f\x00\x1f\x00/\x00B\x00I\x00>\x00%\x00\x16\x00\x1b\x00\'\x00(\x00\x11\x00\xf9\xff\xef\xff\xf6\xff\xf9\xff\xee\xff\xdc\xff\xd3\xff\xda\xff\xee\xff\x07\x00\x17\x00\x18\x00\x05\x00\xf2\xff\xe7\xff\xe6\xff\xe7\xff\xee\xff\x02\x00\x1b\x00$\x00\x1c\x00\x0f\x00\x10\x00$\x009\x00>\x000\x00\x12\x00\xf2\xff\xd6\xff\xbe\xff\xaf\xff\xb7\xff\xd8\xff\x00\x00\x18\x00\x12\x00\xfd\xff\xf3\xff\xfb\xff\x06\x00\x08\x00\xff\xff\xf5\xff\xf2\xff\xf3\xff\xf3\xff\xef\xff\xea\xff\xe6\xff\xe8\xff\xf1\xff\xfd\xff\x03\x00\x04\x00\x05\x00\xff\xff\xf1\xff\xe4\xff\xde\xff\xdf\xff\xdd\xff\xdb\xff\xe0\xff\xf0\xff\x03\x00\x0f\x00\x15\x00\x18\x00\x1c\x00\x1f\x00$\x00*\x00*\x00\x18\x00\xfa\xff\xe0\xff\xdb\xff\xe3\xff\xea\xff\xea\xff\xe9\xff\xef\xff\xfc\xff\x0c\x00\x1c\x00\'\x00"\x00\x11\x00\x01\x00\xfe\xff\x03\x00\x04\x00\xff\xff\x00\x00\n\x00\x12\x00\x0f\x00\x0b\x00\x12\x00!\x00,\x00,\x00$\x00\x13\x00\xfb\xff\xe0\xff\xc8\xff\xba\xff\xc2\xff\xe1\xff\x0b\x00!\x00\x18\x00\x05\x00\x07\x00"\x00>\x00@\x00,\x00\x17\x00\x12\x00\x0e\x00\xff\xff\xe8\xff\xd9\xff\xe2\xff\xfb\xff\x15\x00\x1d\x00\x12\x00\xfe\xff\xed\xff\xe8\xff\xea\xff\xe9\xff\xe2\xff\xd7\xff\xd5\xff\xde\xff\xed\xff\xf8\xff\xfb\xff\xf8\xff\xf9\xff\x06\x00\x16\x00\x1d\x00\x1d\x00 \x00#\x00\x19\x00\x05\x00\xee\xff\xe1\xff\xdd\xff\xdc\xff\xda\xff\xdc\xff\xe0\xff\xe6\xff\xe7\xff\xf1\xff\t\x00 \x00!\x00\r\x00\xf2\xff\xe1\xff\xd9\xff\xd6\xff\xe0\xff\xf4\xff\t\x00\x18\x00#\x00.\x006\x007\x000\x00)\x00!\x00\x13\x00\xff\xff\xe9\xff\xdb\xff\xdb\xff\xf0\xff\x0c\x00\x1c\x00\x16\x00\x08\x00\x08\x00\x16\x00%\x00#\x00\x17\x00\x0b\x00\t\x00\n\x00\x02\x00\xf2\xff\xe9\xff\xf1\xff\x03\x00\x0e\x00\n\x00\x00\x00\xff\xff\x07\x00\x11\x00\x11\x00\x01\x00\xf0\xff\xed\xff\xf1\xff\xe7\xff\xd1\xff\xc3\xff\xd1\xff\xf3\xff\x10\x00\x1b\x00\x1c\x00\x1a\x00\x15\x00\x0f\x00\x0b\x00\x03\x00\xf7\xff\xe8\xff\xe1\xff\xe9\xff\xf5\xff\xf7\xff\xee\xff\xe0\xff\xda\xff\xd8\xff\xd9\xff\xe9\xff\x04\x00\x1c\x00\x1e\x00\x0b\x00\xf4\xff\xe5\xff\xdb\xff\xd5\xff\xd5\xff\xde\xff\xf0\xff\x06\x00\x16\x00\x1c\x00\x1e\x00!\x00)\x00*\x00\x1b\x00\x01\x00\xe4\xff\xcf\xff\xc7\xff\xd1\xff\xe9\xff\x04\x00\x16\x00\x1b\x00\x1b\x00$\x001\x003\x00(\x00\x1c\x00\x1b\x00\x1e\x00\x18\x00\x06\x00\xf3\xff\xf1\xff\xfa\xff\x00\x00\x03\x00\t\x00\x14\x00\x18\x00\x13\x00\t\x00\x01\x00\xf9\xff\xf0\xff\xe4\xff\xd9\xff\xd1\xff\xd4\xff\xe9\xff\t\x00\x1c\x00\x19\x00\x11\x00\x12\x00\x1f\x00*\x00)\x00\x1e\x00\x0e\x00\x06\x00\x07\x00\x03\x00\xed\xff\xd1\xff\xc8\xff\xd7\xff\xf6\xff\x02\x00\xf8\xff\xe6\xff\xe7\xff\xf9\xff\x0c\x00\t\x00\xf5\xff\xe1\xff\xdd\xff\xe9\xff\xf4\xff\xf7\xff\xf4\xff\xf3\xff\xfc\xff\x0c\x00\x16\x00\x1c\x00#\x00,\x00+\x00\x1c\x00\x03\x00\xeb\xff\xd6\xff\xc8\xff\xc7\xff\xd9\xff\xf4\xff\x06\x00\n\x00\x0c\x00\x12\x00\x1c\x00$\x00&\x00$\x00\x1f\x00\n\x00\xe8\xff\xd0\xff\xd6\xff\xee\xff\xfa\xff\xf4\xff\xf3\xff\x07\x00&\x001\x00!\x00\x07\x00\xf8\xff\xf6\xff\xf7\xff\xef\xff\xe1\xff\xda\xff\xe0\xff\xf4\xff\t\x00\x11\x00\x11\x00\x17\x00#\x00.\x00.\x00*\x00%\x00\x19\x00\x03\x00\xe7\xff\xd3\xff\xd1\xff\xe2\xff\xfd\xff\x0e\x00\n\x00\xfa\xff\xec\xff\xed\xff\x00\x00\x13\x00\x10\x00\xff\xff\xf2\xff\xf6\xff\xfb\xff\xee\xff\xd6\xff\xd0\xff\xe4\xff\x06\x00\x1f\x00\'\x00$\x00 \x00\x1f\x00\x1f\x00\x17\x00\x06\x00\xf6\xff\xeb\xff\xe9\xff\xe7\xff\xe7\xff\xed\xff\xfb\xff\x0b\x00\r\x00\x08\x00\x08\x00\x13\x00"\x00(\x00!\x00\n\x00\xf2\xff\xdf\xff\xd9\xff\xde\xff\xe2\xff\xe2\xff\xe5\xff\xed\xff\xfe\xff\r\x00\x13\x00\x17\x00\x18\x00\x16\x00\x0b\x00\xf5\xff\xd6\xff\xbd\xff\xbc\xff\xd8\xff\xf4\xff\xf9\xff\xf6\xff\x06\x00/\x00O\x00J\x00/\x00\x18\x00\x11\x00\t\x00\xf1\xff\xd5\xff\xd0\xff\xe6\xff\t\x00\x1f\x00\x1f\x00\x11\x00\xfe\xff\xf6\xff\xff\xff\x13\x00\x1e\x00\x17\x00\x03\x00\xf0\xff\xe4\xff\xdd\xff\xd6\xff\xd8\xff\xe6\xff\xfe\xff\x13\x00\x1f\x00!\x00 \x00!\x00%\x00!\x00\x14\x00\x06\x00\xfe\xff\xf9\xff\xed\xff\xdd\xff\xdc\xff\xf3\xff\x15\x00+\x00\'\x00\x1b\x00\x17\x00\x1c\x00\x18\x00\n\x00\xfa\xff\xf1\xff\xf0\xff\xef\xff\xf0\xff\xf0\xff\xea\xff\xe2\xff\xe2\xff\xee\xff\xf9\xff\xfd\xff\x00\x00\x05\x00\x0c\x00\x0b\x00\x01\x00\xee\xff\xd7\xff\xc3\xff\xbf\xff\xcb\xff\xdd\xff\xed\xff\xff\xff\x14\x00#\x00)\x00\'\x00*\x002\x00+\x00\x0c\x00\xdf\xff\xc1\xff\xc5\xff\xde\xff\xf6\xff\xfd\xff\xfe\xff\x01\x00\x0b\x00\x15\x00\x13\x00\x0c\x00\x08\x00\x08\x00\x06\x00\xfa\xff\xe8\xff\xdb\xff\xdc\xff\xef\xff\x06\x00\x17\x00\x1d\x00\x1e\x00 \x00#\x00)\x00+\x00\'\x00 \x00\x14\x00\x02\x00\xe6\xff\xcf\xff\xc7\xff\xd6\xff\xf6\xff\x12\x00$\x00*\x00+\x00,\x00-\x00%\x00\x15\x00\xfe\xff\xec\xff\xe3\xff\xe0\xff\xe6\xff\xf3\xff\x02\x00\x0b\x00\n\x00\x05\x00\x01\x00\xff\xff\xff\xff\xfb\xff\xf0\xff\xe4\xff\xe0\xff\xe2\xff\xdf\xff\xd5\xff\xd3\xff\xe1\xff\xf3\xff\xfc\xff\xfe\xff\xfc\xff\xfd\xff\x07\x00\x17\x00(\x00-\x00\x1e\x00\xfe\xff\xde\xff\xcb\xff\xcb\xff\xd7\xff\xe5\xff\xf8\xff\x0b\x00\x17\x00\x11\x00\x06\x00\x04\x00\r\x00\x19\x00\x19\x00\r\x00\xfc\xff\xe7\xff\xd9\xff\xda\xff\xe5\xff\xf6\xff\x03\x00\x0e\x00\x15\x00\x19\x00\x19\x00\x1a\x00\x1f\x00$\x00\x1e\x00\x0c\x00\xf4\xff\xe0\xff\xda\xff\xe1\xff\xf5\xff\x10\x00(\x006\x00:\x009\x003\x00)\x00\x1a\x00\n\x00\xf6\xff\xe3\xff\xd5\xff\xd2\xff\xdc\xff\xec\xff\xf8\xff\x02\x00\n\x00\n\x00\x04\x00\xf9\xff\xf5\xff\xf9\xff\xfc\xff\xfb\xff\xf1\xff\xdf\xff\xcb\xff\xc5\xff\xdb\xff\x01\x00\x1d\x00\x1e\x00\r\x00\x03\x00\x02\x00\x07\x00\x07\x00\x02\x00\xfc\xff\xf9\xff\xfa\xff\xf6\xff\xec\xff\xe6\xff\xea\xff\xf7\xff\t\x00\x13\x00\x0f\x00\x04\x00\x00\x00\x08\x00\x18\x00#\x00\x1b\x00\x07\x00\xee\xff\xdb\xff\xdb\xff\xe2\xff\xed\xff\xf8\xff\x05\x00\x10\x00\x17\x00\x17\x00\x17\x00\x1a\x00\x1f\x00\x1a\x00\x0f\x00\x02\x00\xf7\xff\xf2\xff\xf3\xff\xfd\xff\n\x00\x14\x00\x16\x00\x18\x00\x1c\x00!\x00"\x00\x1c\x00\x14\x00\n\x00\xfa\xff\xe7\xff\xd7\xff\xd2\xff\xd8\xff\xe3\xff\xf3\xff\x03\x00\x0e\x00\r\x00\x07\x00\x04\x00\x05\x00\x02\x00\xf8\xff\xee\xff\xe5\xff\xdd\xff\xd5\xff\xdb\xff\xee\xff\x00\x00\x08\x00\x08\x00\x0b\x00\x0e\x00\r\x00\x08\x00\x06\x00\x0b\x00\x10\x00\x0b\x00\xfe\xff\xf1\xff\xea\xff\xee\xff\xfd\xff\x0f\x00\x1f\x00"\x00\x18\x00\t\x00\x02\x00\x04\x00\x05\x00\x02\x00\x04\x00\n\x00\n\x00\xff\xff\xef\xff\xea\xff\xf3\xff\x00\x00\x06\x00\x03\x00\xfd\xff\x02\x00\r\x00\x16\x00\x19\x00\x17\x00\x14\x00\r\x00\x01\x00\xf5\xff\xee\xff\xef\xff\xf4\xff\x00\x00\x0b\x00\x12\x00\x16\x00\x19\x00\x1c\x00\x1e\x00\x1b\x00\x0b\x00\xf4\xff\xdc\xff\xce\xff\xcf\xff\xdd\xff\xee\xff\xf9\xff\xfb\xff\xf4\xff\xf1\xff\xf4\xff\xfc\xff\xfd\xff\xf8\xff\xf4\xff\xf2\xff\xed\xff\xe0\xff\xd7\xff\xd8\xff\xe8\xff\xfa\xff\x04\x00\t\x00\x08\x00\x04\x00\xfd\xff\xfd\xff\x07\x00\x0f\x00\x0c\x00\x00\x00\xee\xff\xe3\xff\xe5\xff\xf3\xff\x04\x00\x13\x00\x1d\x00!\x00\x1e\x00\x18\x00\x12\x00\x10\x00\x11\x00\x14\x00\x17\x00\x13\x00\t\x00\x00\x00\xff\xff\x08\x00\x0e\x00\r\x00\x03\x00\xfa\xff\xf8\xff\xff\xff\n\x00\x0f\x00\x10\x00\x0f\x00\x11\x00\x11\x00\t\x00\xfa\xff\xee\xff\xf1\xff\x01\x00\x12\x00\x19\x00\x17\x00\x11\x00\x12\x00\x11\x00\t\x00\xfd\xff\xf0\xff\xe6\xff\xdf\xff\xdb\xff\xdb\xff\xde\xff\xe2\xff\xe7\xff\xf2\xff\xfc\xff\x00\x00\xfe\xff\xfd\xff\x00\x00\x03\x00\x02\x00\xfb\xff\xed\xff\xdf\xff\xdb\xff\xe4\xff\xf9\xff\x08\x00\x0c\x00\x04\x00\xf9\xff\xf5\xff\xfb\xff\x02\x00\x08\x00\x07\x00\x01\x00\xf7\xff\xeb\xff\xe8\xff\xec\xff\xf6\xff\x05\x00\x14\x00 \x00#\x00\x1b\x00\r\x00\x02\x00\x03\x00\x0b\x00\x13\x00\x12\x00\x08\x00\xfa\xff\xf3\xff\xf4\xff\xfa\xff\xff\xff\x04\x00\x0b\x00\x16\x00!\x00(\x00"\x00\x14\x00\x0e\x00\x11\x00\x12\x00\x0c\x00\xfd\xff\xf6\xff\xfe\xff\x13\x00#\x00\x1f\x00\x0e\x00\xff\xff\xfe\xff\x04\x00\x02\x00\xf6\xff\xec\xff\xe7\xff\xe5\xff\xe1\xff\xdd\xff\xe1\xff\xed\xff\xfd\xff\x08\x00\x0b\x00\x04\x00\xff\xff\xfd\xff\xfd\xff\xf8\xff\xf0\xff\xec\xff\xed\xff\xf0\xff\xee\xff\xe8\xff\xe8\xff\xf0\xff\xf7\xff\xfa\xff\xf9\xff\xf7\xff\xf6\xff\xf6\xff\xfb\xff\x02\x00\x06\x00\x02\x00\xfa\xff\xf7\xff\xfa\xff\xfd\xff\xff\xff\x04\x00\x10\x00\x1c\x00\x1d\x00\x12\x00\x06\x00\x05\x00\x0c\x00\x14\x00\x12\x00\x07\x00\xfa\xff\xee\xff\xe9\xff\xe7\xff\xe6\xff\xec\xff\xfc\xff\x12\x00#\x00&\x00\x1c\x00\r\x00\x05\x00\x03\x00\x03\x00\x02\x00\xfe\xff\xfb\xff\xfb\xff\xff\xff\x08\x00\x0f\x00\x14\x00\x18\x00\x1d\x00\x1e\x00\x16\x00\x08\x00\xfb\xff\xf5\xff\xf3\xff\xea\xff\xdb\xff\xd9\xff\xed\xff\t\x00\x1a\x00\x16\x00\t\x00\x04\x00\x07\x00\x0b\x00\x05\x00\xf9\xff\xf2\xff\xf3\xff\xf5\xff\xf0\xff\xe7\xff\xe3\xff\xee\xff\x00\x00\x0e\x00\x10\x00\x06\x00\xf9\xff\xf1\xff\xf0\xff\xf6\xff\xfa\xff\xfc\xff\xfb\xff\xfc\xff\xfa\xff\xf4\xff\xed\xff\xe9\xff\xf5\xff\x0c\x00\x1b\x00\x1c\x00\x13\x00\x0b\x00\t\x00\x07\x00\x05\x00\x03\x00\x06\x00\x05\x00\xf9\xff\xe9\xff\xe1\xff\xe9\xff\xfc\xff\x0e\x00\x16\x00\x17\x00\x15\x00\x17\x00\x1a\x00\x16\x00\x0b\x00\xfe\xff\xf6\xff\xf2\xff\xf1\xff\xef\xff\xeb\xff\xef\xff\xfb\xff\x11\x00$\x00&\x00\x18\x00\x03\x00\xf4\xff\xee\xff\xeb\xff\xe1\xff\xd6\xff\xd4\xff\xe1\xff\xf6\xff\x08\x00\x10\x00\x19\x00\x1e\x00#\x00"\x00\x1b\x00\x0f\x00\xff\xff\xf4\xff\xf0\xff\xee\xff\xea\xff\xe7\xff\xef\xff\x01\x00\x0f\x00\x10\x00\x07\x00\xfe\xff\xfc\xff\xfc\xff\xfa\xff\xf6\xff\xf8\xff\xfe\xff\x04\x00\xff\xff\xf2\xff\xe8\xff\xe7\xff\xf6\xff\x0c\x00\x1f\x00\'\x00$\x00\x1b\x00\x0f\x00\x04\x00\xfb\xff\xf7\xff\xf6\xff\xf6\xff\xf2\xff\xe9\xff\xe5\xff\xe8\xff\xee\xff\xf9\xff\x04\x00\x0f\x00\x17\x00\x19\x00\x15\x00\x0e\x00\x04\x00\xfc\xff\xfb\xff\xfb\xff\xf6\xff\xe9\xff\xe1\xff\xec\xff\x04\x00\x1e\x00%\x00\x1e\x00\x12\x00\r\x00\x0b\x00\x04\x00\xf8\xff\xe8\xff\xdd\xff\xdc\xff\xe4\xff\xef\xff\xf7\xff\xfc\xff\x04\x00\x13\x00 \x00#\x00\x18\x00\x08\x00\xfd\xff\xf6\xff\xee\xff\xe3\xff\xdc\xff\xde\xff\xe9\xff\xf7\xff\x03\x00\x0c\x00\x0e\x00\x13\x00\x18\x00\x1a\x00\x14\x00\n\x00\x04\x00\x00\x00\xfd\xff\xf9\xff\xf5\xff\xf5\xff\xfb\xff\x03\x00\x0b\x00\x10\x00\x13\x00\x14\x00\x13\x00\x0e\x00\x06\x00\xfb\xff\xf3\xff\xf3\xff\xf6\xff\xf3\xff\xea\xff\xe4\xff\xe3\xff\xe9\xff\xf2\xff\xfe\xff\x10\x00"\x00.\x00.\x00!\x00\x0e\x00\xfc\xff\xf2\xff\xee\xff\xec\xff\xe8\xff\xe9\xff\xf0\xff\xf9\xff\x06\x00\r\x00\x14\x00\x19\x00\x1a\x00\x13\x00\x02\x00\xf0\xff\xe6\xff\xe0\xff\xe0\xff\xe1\xff\xe4\xff\xed\xff\xf6\xff\x01\x00\r\x00\x18\x00 \x00$\x00\x1f\x00\x14\x00\x06\x00\xf7\xff\xe8\xff\xe0\xff\xe3\xff\xec\xff\xf6\xff\xfd\xff\x00\x00\x04\x00\x0c\x00\x13\x00\x16\x00\x11\x00\x06\x00\x00\x00\xfe\xff\x00\x00\xfd\xff\xf7\xff\xf4\xff\xfa\xff\x01\x00\x03\x00\x04\x00\x06\x00\x0c\x00\x13\x00\x17\x00\x18\x00\x17\x00\x0e\x00\xfe\xff\xf1\xff\xec\xff\xed\xff\xec\xff\xe8\xff\xe7\xff\xec\xff\xf6\xff\x02\x00\x11\x00\x1c\x00\x1f\x00\x1b\x00\x14\x00\x0f\x00\n\x00\xff\xff\xf3\xff\xeb\xff\xed\xff\xf4\xff\xf9\xff\xfb\xff\xfd\xff\x05\x00\x0e\x00\x19\x00\x1a\x00\x13\x00\x07\x00\xf5\xff\xe9\xff\xe1\xff\xdf\xff\xe4\xff\xec\xff\xf2\xff\xf6\xff\xf7\xff\xff\xff\x0e\x00\x1f\x00%\x00\x1e\x00\x0f\x00\xfe\xff\xef\xff\xe2\xff\xdc\xff\xdf\xff\xe7\xff\xef\xff\xf6\xff\xf9\xff\xfc\xff\xff\xff\x05\x00\x0e\x00\x13\x00\x12\x00\x0e\x00\x0b\x00\t\x00\x05\x00\xfc\xff\xf5\xff\xf7\xff\xfb\xff\xfa\xff\xfb\xff\x02\x00\x10\x00\x1a\x00\x1b\x00\x11\x00\xfd\xff\xe9\xff\xda\xff\xd8\xff\xe4\xff\xf1\xff\xf8\xff\xf7\xff\xf2\xff\xf2\xff\xfa\xff\x06\x00\x13\x00 \x00%\x00&\x00"\x00\x1c\x00\x13\x00\x0b\x00\x04\x00\x00\x00\xfe\xff\xfc\xff\xfe\xff\x03\x00\x08\x00\x06\x00\x05\x00\x05\x00\x0c\x00\x11\x00\x0e\x00\x03\x00\xf2\xff\xe4\xff\xe1\xff\xe9\xff\xf3\xff\xf4\xff\xf1\xff\xf5\xff\x08\x00\x1f\x00*\x00$\x00\x15\x00\n\x00\x00\x00\xf5\xff\xec\xff\xe6\xff\xe8\xff\xed\xff\xf3\xff\xf9\xff\x01\x00\x07\x00\n\x00\n\x00\n\x00\t\x00\x0b\x00\x0b\x00\x0b\x00\x01\x00\xf0\xff\xe4\xff\xe4\xff\xed\xff\xf2\xff\xed\xff\xe8\xff\xee\xff\x00\x00\x10\x00\x12\x00\x07\x00\xf6\xff\xe8\xff\xe6\xff\xea\xff\xf2\xff\xf4\xff\xf3\xff\xf1\xff\xf3\xff\xfa\xff\x04\x00\x0f\x00\x17\x00\x1e\x00\x1d\x00\x17\x00\r\x00\x02\x00\xfa\xff\xf7\xff\xfc\xff\x06\x00\n\x00\x05\x00\xfc\xff\xf6\xff\xf9\xff\x01\x00\x0b\x00\x17\x00 \x00\x1f\x00\x14\x00\x08\x00\x03\x00\x04\x00\x02\x00\xfb\xff\xf7\xff\xf9\xff\x04\x00\x0f\x00\x15\x00\x13\x00\x0f\x00\x10\x00\x12\x00\x14\x00\x0c\x00\xfc\xff\xee\xff\xeb\xff\xef\xff\xf0\xff\xec\xff\xea\xff\xf0\xff\x00\x00\x0f\x00\x18\x00\x17\x00\x11\x00\x0f\x00\x13\x00\x13\x00\x08\x00\xf3\xff\xe5\xff\xe8\xff\xef\xff\xf3\xff\xef\xff\xf1\xff\xf8\xff\x00\x00\xff\xff\xfc\xff\xf8\xff\xf3\xff\xea\xff\xe3\xff\xe1\xff\xe5\xff\xe9\xff\xe8\xff\xe7\xff\xe5\xff\xe6\xff\xf2\xff\x03\x00\x16\x00\x1e\x00\x17\x00\x0c\x00\x06\x00\x07\x00\x06\x00\x03\x00\xfc\xff\xf6\xff\xf8\xff\xfb\xff\x02\x00\x06\x00\x04\x00\x02\x00\x07\x00\x0f\x00\x14\x00\x10\x00\x06\x00\x00\x00\xff\xff\x04\x00\x0c\x00\x10\x00\x0c\x00\x04\x00\x02\x00\x08\x00\x13\x00\x1a\x00\x19\x00\x18\x00\x1a\x00\x1a\x00\x12\x00\x04\x00\xfc\xff\xfa\xff\xf5\xff\xef\xff\xec\xff\xf7\xff\x06\x00\x0c\x00\x03\x00\xfb\xff\xfc\xff\t\x00\x16\x00\x1b\x00\x13\x00\x03\x00\xf5\xff\xef\xff\xf0\xff\xf0\xff\xec\xff\xe9\xff\xef\xff\xfb\xff\x03\x00\x02\x00\xfb\xff\xf8\xff\xf6\xff\xf3\xff\xec\xff\xe7\xff\xe7\xff\xec\xff\xf0\xff\xf1\xff\xf2\xff\xf4\xff\xfe\xff\x08\x00\x0f\x00\r\x00\x08\x00\x08\x00\x05\x00\x01\x00\xfa\xff\xf3\xff\xf0\xff\xed\xff\xed\xff\xee\xff\xed\xff\xed\xff\xf2\xff\xfa\xff\x08\x00\x0f\x00\x0f\x00\n\x00\x06\x00\t\x00\x0c\x00\t\x00\x05\x00\x04\x00\x05\x00\n\x00\x10\x00\x19\x00\x1c\x00\x18\x00\x16\x00\x13\x00\x12\x00\x0b\x00\x00\x00\xf8\xff\xf9\xff\xfe\xff\x01\x00\xff\xff\x01\x00\x05\x00\x08\x00\t\x00\x08\x00\x07\x00\n\x00\x0c\x00\x0f\x00\x12\x00\x11\x00\t\x00\x00\x00\xfa\xff\xf8\xff\xf5\xff\xf0\xff\xf1\xff\xf5\xff\xf6\xff\xf0\xff\xee\xff\xf3\xff\xfa\xff\xfb\xff\xf3\xff\xed\xff\xed\xff\xf4\xff\xf9\xff\xfa\xff\xf7\xff\xf8\xff\xfc\xff\x03\x00\x07\x00\x05\x00\x02\x00\x03\x00\x05\x00\x06\x00\x02\x00\x00\x00\x02\x00\x05\x00\x02\x00\xf9\xff\xf3\xff\xf3\xff\xf4\xff\xf5\xff\xf7\xff\xff\xff\t\x00\x10\x00\x11\x00\r\x00\x03\x00\xfa\xff\xf4\xff\xf9\xff\x01\x00\x05\x00\x02\x00\xfd\xff\x01\x00\t\x00\r\x00\n\x00\x04\x00\xfc\xff\xf7\xff\xf6\xff\xf9\xff\xfe\xff\x01\x00\x01\x00\x02\x00\x07\x00\r\x00\x11\x00\x10\x00\x0e\x00\t\x00\x04\x00\x01\x00\x04\x00\x07\x00\t\x00\x07\x00\x06\x00\x06\x00\x05\x00\x01\x00\xfc\xff\xfb\xff\xfa\xff\xf7\xff\xf3\xff\xf3\xff\xf2\xff\xed\xff\xe7\xff\xe9\xff\xf7\xff\x06\x00\x0e\x00\x0b\x00\x02\x00\xfd\xff\xfb\xff\x00\x00\x02\x00\x00\x00\xfc\xff\xfd\xff\x03\x00\x05\x00\x00\x00\xfc\xff\xfd\xff\x02\x00\x03\x00\x00\x00\xfc\xff\xfc\xff\xf9\xff\xf6\xff\xf3\xff\xf3\xff\xfc\xff\x08\x00\x0e\x00\x0e\x00\x08\x00\x02\x00\x02\x00\x0b\x00\x13\x00\x13\x00\x0e\x00\t\x00\x0c\x00\x0f\x00\x0c\x00\x04\x00\xfc\xff\xfd\xff\x00\x00\xfe\xff\xf8\xff\xf1\xff\xee\xff\xf3\xff\xfb\xff\x02\x00\x02\x00\xfb\xff\xf7\xff\xf6\xff\xf9\xff\xfb\xff\xf9\xff\xf8\xff\xfb\xff\xff\xff\x03\x00\x02\x00\x02\x00\x03\x00\x00\x00\xfc\xff\xfb\xff\x01\x00\x04\x00\x01\x00\xf8\xff\xf0\xff\xea\xff\xe6\xff\xe9\xff\xf3\xff\x05\x00\x12\x00\x15\x00\x12\x00\r\x00\n\x00\x06\x00\xfc\xff\xf8\xff\xf9\xff\x03\x00\x0b\x00\x08\x00\xfc\xff\xf2\xff\xf7\xff\t\x00\x1a\x00\x1a\x00\n\x00\xfa\xff\xf1\xff\xf0\xff\xf2\xff\xf3\xff\xf7\xff\x01\x00\x0c\x00\x12\x00\x11\x00\x0b\x00\x06\x00\x02\x00\x04\x00\x0b\x00\x10\x00\x11\x00\x12\x00\x11\x00\x0b\x00\x05\x00\x00\x00\x01\x00\x02\x00\x01\x00\xfd\xff\xfe\xff\x03\x00\x0b\x00\x0f\x00\x0e\x00\x06\x00\x03\x00\x04\x00\t\x00\n\x00\x02\x00\xf4\xff\xed\xff\xf2\xff\xfc\xff\xff\xff\xf8\xff\xf3\xff\xf5\xff\xfe\xff\x01\x00\xfc\xff\xf4\xff\xeb\xff\xe6\xff\xe4\xff\xe5\xff\xe5\xff\xe1\xff\xde\xff\xe2\xff\xf2\xff\x05\x00\r\x00\x0b\x00\t\x00\n\x00\n\x00\x05\x00\x00\x00\xfc\xff\xfb\xff\xfd\xff\xff\xff\xfe\xff\xf9\xff\xfc\xff\x05\x00\x11\x00\x16\x00\x13\x00\x0c\x00\x06\x00\x00\x00\xf5\xff\xeb\xff\xea\xff\xf6\xff\x05\x00\x0e\x00\x0b\x00\x05\x00\x01\x00\x02\x00\x08\x00\x10\x00\x10\x00\x0e\x00\x0e\x00\x11\x00\x11\x00\x0c\x00\x03\x00\xfd\xff\xfe\xff\xff\xff\x01\x00\x03\x00\x06\x00\x02\x00\x00\x00\x05\x00\r\x00\x15\x00\x19\x00\x17\x00\x0f\x00\x07\x00\xfe\xff\xf8\xff\xf3\xff\xef\xff\xee\xff\xef\xff\xf6\xff\x05\x00\x12\x00\x15\x00\n\x00\xfd\xff\xf5\xff\xf3\xff\xed\xff\xe5\xff\xe2\xff\xe1\xff\xe4\xff\xe7\xff\xec\xff\xf2\xff\xf9\xff\x02\x00\x0c\x00\x14\x00\x15\x00\x0c\x00\xfb\xff\xeb\xff\xe2\xff\xe5\xff\xf2\xff\xfd\xff\xfd\xff\xf7\xff\xf6\xff\xfe\xff\x07\x00\t\x00\x06\x00\x02\x00\xff\xff\xfe\xff\xfd\xff\xfc\xff\xfa\xff\xf7\xff\xf8\xff\x01\x00\x0b\x00\x14\x00\x11\x00\n\x00\x06\x00\x08\x00\x0e\x00\x12\x00\x14\x00\x14\x00\x12\x00\x0e\x00\t\x00\x05\x00\xff\xff\xf9\xff\xf5\xff\xfa\xff\xff\xff\x06\x00\x0b\x00\x13\x00\x19\x00\x19\x00\x15\x00\x0e\x00\x07\x00\xfe\xff\xf7\xff\xf0\xff\xef\xff\xf0\xff\xf3\xff\xf4\xff\xf7\xff\xfd\xff\x03\x00\n\x00\r\x00\x0c\x00\x05\x00\xfa\xff\xeb\xff\xe5\xff\xea\xff\xf3\xff\xf5\xff\xf2\xff\xf3\xff\xf9\xff\x04\x00\x0f\x00\x18\x00\x1a\x00\x19\x00\x12\x00\x04\x00\xf7\xff\xed\xff\xe9\xff\xea\xff\xee\xff\xf3\xff\xf5\xff\xf8\xff\xfc\xff\x01\x00\x07\x00\n\x00\n\x00\x07\x00\xfe\xff\xf1\xff\xe8\xff\xe7\xff\xef\xff\xfe\xff\x0c\x00\x10\x00\t\x00\x02\x00\x00\x00\x03\x00\x07\x00\t\x00\n\x00\r\x00\x13\x00\x17\x00\x12\x00\x06\x00\xfa\xff\xf4\xff\xf5\xff\xfa\xff\xff\xff\x03\x00\x06\x00\x0e\x00\x16\x00\x1a\x00\x17\x00\x10\x00\n\x00\x08\x00\x06\x00\xff\xff\xf6\xff\xee\xff\xe9\xff\xe7\xff\xe9\xff\xee\xff\xf9\xff\x05\x00\x0e\x00\x11\x00\n\x00\xfb\xff\xec\xff\xe4\xff\xe6\xff\xe9\xff\xec\xff\xf0\xff\xf5\xff\xfa\xff\xfd\xff\xff\xff\x04\x00\x10\x00\x1c\x00$\x00\x1f\x00\x0f\x00\xfc\xff\xf1\xff\xf0\xff\xf2\xff\xf3\xff\xf3\xff\xf7\xff\x03\x00\x11\x00\x17\x00\x15\x00\x0e\x00\n\x00\t\x00\x07\x00\xfe\xff\xf5\xff\xee\xff\xf2\xff\xfa\xff\x04\x00\x06\x00\x01\x00\xfe\xff\xff\xff\x04\x00\x07\x00\x04\x00\x01\x00\x00\x00\x01\x00\xfe\xff\xfd\xff\xfc\xff\xff\xff\x00\x00\xfc\xff\xf6\xff\xf4\xff\xf8\xff\xff\xff\x08\x00\x11\x00\x17\x00\x1a\x00\x1a\x00\x18\x00\x12\x00\x05\x00\xf7\xff\xec\xff\xe6\xff\xe7\xff\xea\xff\xed\xff\xf5\xff\x01\x00\x0c\x00\x11\x00\x0e\x00\x06\x00\xfc\xff\xf5\xff\xf2\xff\xf1\xff\xf0\xff\xee\xff\xf1\xff\xf5\xff\xf9\xff\xfd\xff\x02\x00\x0b\x00\x14\x00\x1c\x00\x1b\x00\r\x00\xf9\xff\xea\xff\xe8\xff\xec\xff\xf0\xff\xf3\xff\xf7\xff\xfe\xff\x08\x00\x0c\x00\t\x00\x04\x00\x07\x00\x10\x00\x17\x00\x17\x00\x0e\x00\x04\x00\x01\x00\x01\x00\x02\x00\x01\x00\x00\x00\xfd\xff\x00\x00\x07\x00\x0b\x00\x08\x00\x02\x00\xff\xff\x01\x00\x06\x00\x06\x00\x02\x00\x00\x00\xfc\xff\xf6\xff\xf0\xff\xed\xff\xed\xff\xf6\xff\x01\x00\n\x00\x11\x00\x15\x00\x16\x00\x11\x00\x0b\x00\xff\xff\xf2\xff\xe9\xff\xe6\xff\xe8\xff\xe6\xff\xe3\xff\xe5\xff\xf3\xff\x03\x00\x0e\x00\r\x00\t\x00\x06\x00\x03\x00\xfd\xff\xf7\xff\xf3\xff\xf0\xff\xf2\xff\xfa\xff\x03\x00\t\x00\x0c\x00\x0b\x00\r\x00\x13\x00\x15\x00\x0f\x00\x04\x00\xfd\xff\xfc\xff\xff\xff\xfd\xff\xf7\xff\xf2\xff\xf6\xff\xfd\xff\x01\x00\x01\x00\xff\xff\x00\x00\x06\x00\x0c\x00\x0f\x00\x0b\x00\x02\x00\xfd\xff\xfc\xff\x01\x00\x06\x00\n\x00\x08\x00\x02\x00\xfa\xff\xf6\xff\xf7\xff\xfb\xff\x01\x00\n\x00\x13\x00\x15\x00\x16\x00\x12\x00\n\x00\xfd\xff\xf0\xff\xe6\xff\xe8\xff\xf3\xff\x01\x00\x0c\x00\x12\x00\x17\x00\x1c\x00\x1d\x00\x19\x00\x0e\x00\x02\x00\xf3\xff\xe8\xff\xe1\xff\xe0\xff\xe0\xff\xe3\xff\xec\xff\xf8\xff\x00\x00\x02\x00\x03\x00\x01\x00\xfd\xff\xf4\xff\xed\xff\xec\xff\xed\xff\xf3\xff\xfa\xff\x00\x00\x04\x00\x05\x00\x03\x00\x05\x00\n\x00\x10\x00\x11\x00\x0e\x00\x0b\x00\x08\x00\x05\x00\xfd\xff\xf4\xff\xf2\xff\xf6\xff\xfc\xff\xff\xff\xff\xff\x03\x00\x08\x00\x0b\x00\x0b\x00\r\x00\x0e\x00\x0e\x00\x0c\x00\n\x00\x07\x00\x04\x00\x03\x00\x02\x00\x00\x00\xfa\xff\xf4\xff\xf1\xff\xf4\xff\xfa\xff\x03\x00\t\x00\x07\x00\x06\x00\x06\x00\x06\x00\x02\x00\xfb\xff\xf5\xff\xf3\xff\xf8\xff\xfc\xff\xfe\xff\x00\x00\x06\x00\x11\x00\x1e\x00%\x00"\x00\x18\x00\x08\x00\xf5\xff\xe5\xff\xdc\xff\xda\xff\xdd\xff\xe5\xff\xf1\xff\xfe\xff\x07\x00\n\x00\x0b\x00\x08\x00\x04\x00\xff\xff\xfd\xff\xfa\xff\xf7\xff\xf6\xff\xf4\xff\xf5\xff\xf9\xff\x01\x00\x06\x00\x07\x00\t\x00\x0c\x00\x11\x00\r\x00\x03\x00\xf8\xff\xee\xff\xe9\xff\xee\xff\xf6\xff\xfc\xff\xfa\xff\xf7\xff\xf9\xff\x01\x00\x06\x00\x07\x00\x08\x00\x0c\x00\x10\x00\x10\x00\x0b\x00\x03\x00\xfa\xff\xf7\xff\xfd\xff\x01\x00\x00\x00\xf9\xff\xf5\xff\xfa\xff\x02\x00\x07\x00\x06\x00\x05\x00\x07\x00\x0c\x00\x11\x00\x11\x00\t\x00\xff\xff\xf7\xff\xf5\xff\xfa\xff\xfe\xff\x00\x00\x03\x00\t\x00\x14\x00\x1a\x00\x19\x00\x11\x00\x08\x00\xfe\xff\xf1\xff\xe7\xff\xe1\xff\xe3\xff\xe9\xff\xee\xff\xf5\xff\xfb\xff\x01\x00\x07\x00\r\x00\x10\x00\r\x00\x08\x00\x02\x00\xfb\xff\xf4\xff\xee\xff\xed\xff\xf3\xff\xff\xff\t\x00\x0e\x00\x0e\x00\x10\x00\x13\x00\x13\x00\x0e\x00\x04\x00\xfc\xff\xf9\xff\xfb\xff\xfc\xff\xfa\xff\xf4\xff\xf1\xff\xf5\xff\xfe\xff\x04\x00\x04\x00\x05\x00\x07\x00\x0c\x00\x0b\x00\x03\x00\xf4\xff\xea\xff\xea\xff\xf0\xff\xf7\xff\xfa\xff\xf9\xff\xfa\xff\xfc\xff\xfe\xff\xfd\xff\xfe\xff\x03\x00\n\x00\x0e\x00\x0b\x00\x08\x00\x04\x00\xfe\xff\xf9\xff\xf7\xff\xf9\xff\xfe\xff\x03\x00\x06\x00\x0c\x00\x10\x00\x15\x00\x16\x00\x13\x00\x0e\x00\n\x00\x04\x00\xfc\xff\xf2\xff\xeb\xff\xe9\xff\xeb\xff\xf2\xff\xfb\xff\x04\x00\n\x00\x0b\x00\n\x00\x04\x00\xff\xff\xfd\xff\xfa\xff\xf7\xff\xf4\xff\xf5\xff\xfb\xff\x03\x00\x08\x00\x08\x00\x03\x00\x00\x00\x02\x00\x0b\x00\x11\x00\x13\x00\x0e\x00\x07\x00\xff\xff\xf8\xff\xf1\xff\xee\xff\xed\xff\xef\xff\xf8\xff\x03\x00\x08\x00\x08\x00\t\x00\x11\x00\x15\x00\x10\x00\x08\x00\x00\x00\xfc\xff\xfb\xff\xf9\xff\xf4\xff\xf3\xff\xf7\xff\x00\x00\x01\x00\xfd\xff\xf8\xff\xf8\xff\xfd\xff\x01\x00\x00\x00\xfa\xff\xf4\xff\xf0\xff\xee\xff\xf0\xff\xf7\xff\xfe\xff\x04\x00\x05\x00\x07\x00\x08\x00\x0b\x00\r\x00\x10\x00\x12\x00\x0f\x00\x08\x00\xfd\xff\xf5\xff\xf1\xff\xef\xff\xf0\xff\xf2\xff\xf8\xff\xfd\xff\x02\x00\x07\x00\t\x00\n\x00\x07\x00\x04\x00\x02\x00\x02\x00\x04\x00\x02\x00\x02\x00\x02\x00\x07\x00\x0b\x00\x0b\x00\x07\x00\x05\x00\n\x00\x0e\x00\x0c\x00\x07\x00\x02\x00\xff\xff\xfa\xff\xf4\xff\xf2\xff\xf5\xff\xf9\xff\xfd\xff\xfe\xff\xfa\xff\xf7\xff\xf7\xff\xff\xff\x0c\x00\x16\x00\x18\x00\x0f\x00\x03\x00\xfa\xff\xf3\xff\xed\xff\xea\xff\xee\xff\xf8\xff\x00\x00\x03\x00\x02\x00\x01\x00\x04\x00\x08\x00\n\x00\x06\x00\x00\x00\xfc\xff\xf9\xff\xf7\xff\xf4\xff\xf6\xff\xfc\xff\x03\x00\x06\x00\x05\x00\x03\x00\x02\x00\x04\x00\x08\x00\x05\x00\xff\xff\xf6\xff\xef\xff\xec\xff\xef\xff\xf3\xff\xf3\xff\xf3\xff\xf5\xff\xfb\xff\x04\x00\x08\x00\x0b\x00\x0b\x00\r\x00\x10\x00\x10\x00\x08\x00\x00\x00\xf8\xff\xf7\xff\xfb\xff\x04\x00\x0c\x00\x0b\x00\x07\x00\x05\x00\t\x00\r\x00\x10\x00\x0c\x00\x08\x00\x03\x00\xfe\xff\xfb\xff\xf8\xff\xf7\xff\xf8\xff\xfc\xff\xfd\xff\xfe\xff\xff\xff\x03\x00\n\x00\x0e\x00\x0c\x00\x08\x00\x04\x00\x00\x00\xfa\xff\xf4\xff\xf3\xff\xf4\xff\xfb\xff\x00\x00\xff\xff\xfa\xff\xf7\xff\xfa\xff\x04\x00\x0c\x00\x0f\x00\x0b\x00\x03\x00\xfc\xff\xf5\xff\xf1\xff\xef\xff\xf1\xff\xf7\xff\x01\x00\x04\x00\x03\x00\x03\x00\x04\x00\x0b\x00\x0e\x00\r\x00\t\x00\x02\x00\xfd\xff\xf8\xff\xf4\xff\xf1\xff\xf1\xff\xf4\xff\xf9\xff\xff\xff\x01\x00\x02\x00\x04\x00\x08\x00\x0c\x00\x0c\x00\x04\x00\xfc\xff\xf6\xff\xf6\xff\xfa\xff\xfd\xff\x01\x00\x02\x00\x01\x00\x04\x00\t\x00\r\x00\x0e\x00\r\x00\r\x00\n\x00\x04\x00\xf9\xff\xf0\xff\xee\xff\xef\xff\xf5\xff\xf9\xff\xfd\xff\xff\xff\x01\x00\x04\x00\n\x00\x0e\x00\r\x00\x08\x00\x01\x00\xff\xff\xfe\xff\xfc\xff\xf9\xff\xfb\xff\xfd\xff\xff\xff\xff\xff\xfe\xff\x03\x00\x07\x00\x08\x00\x05\x00\x02\x00\x01\x00\x01\x00\xfe\xff\xf8\xff\xf4\xff\xf6\xff\xff\xff\x07\x00\x07\x00\x02\x00\xfe\xff\x01\x00\x07\x00\n\x00\t\x00\x07\x00\x04\x00\x01\x00\xfe\xff\xf8\xff\xf3\xff\xed\xff\xec\xff\xf0\xff\xf6\xff\xfc\xff\x01\x00\x03\x00\x05\x00\t\x00\r\x00\x0e\x00\x0c\x00\x07\x00\x02\x00\xf9\xff\xf3\xff\xf4\xff\xfc\xff\x02\x00\x06\x00\t\x00\x0c\x00\x0f\x00\x11\x00\r\x00\x07\x00\xfd\xff\xf2\xff\xec\xff\xeb\xff\xee\xff\xf2\xff\xf4\xff\xf8\xff\xfc\xff\x03\x00\x08\x00\x0b\x00\x0b\x00\t\x00\x05\x00\x01\x00\xff\xff\xfd\xff\xfb\xff\xf5\xff\xf4\xff\xf8\xff\xfd\xff\x02\x00\x03\x00\x05\x00\x06\x00\x08\x00\x0b\x00\r\x00\t\x00\x02\x00\xf9\xff\xf6\xff\xf6\xff\xfc\xff\x02\x00\x06\x00\x07\x00\x04\x00\x04\x00\t\x00\x0e\x00\r\x00\x08\x00\x02\x00\x04\x00\x07\x00\x06\x00\xff\xff\xf9\xff\xf6\xff\xf7\xff\xfa\xff\xfd\xff\xfc\xff\xfe\xff\x01\x00\x04\x00\t\x00\x0b\x00\x0b\x00\x08\x00\x01\x00\xfc\xff\xf7\xff\xef\xff\xeb\xff\xed\xff\xf4\xff\xfb\xff\x04\x00\r\x00\x13\x00\x12\x00\x0e\x00\x07\x00\x04\x00\xff\xff\xf8\xff\xf1\xff\xec\xff\xea\xff\xeb\xff\xf3\xff\xfc\xff\x05\x00\x0b\x00\x0c\x00\n\x00\x06\x00\x02\x00\xfd\xff\xfa\xff\xf5\xff\xf4\xff\xf5\xff\xf8\xff\xfc\xff\xfd\xff\xfe\xff\x01\x00\x07\x00\x0c\x00\x0e\x00\x0b\x00\x07\x00\x02\x00\x00\x00\xfe\xff\xfb\xff\xf5\xff\xf2\xff\xf1\xff\xf9\xff\x01\x00\x06\x00\x04\x00\x01\x00\x02\x00\x06\x00\n\x00\x0b\x00\x0b\x00\n\x00\x04\x00\xff\xff\xfd\xff\xfd\xff\xff\xff\x01\x00\x02\x00\x03\x00\x06\x00\n\x00\r\x00\x0c\x00\x08\x00\x04\x00\x02\x00\x01\x00\x00\x00\xfc\xff\xf6\xff\xf2\xff\xf6\xff\xfe\xff\x03\x00\x02\x00\x02\x00\x06\x00\x0b\x00\r\x00\x0e\x00\x0c\x00\x04\x00\xfa\xff\xf1\xff\xee\xff\xef\xff\xef\xff\xee\xff\xf1\xff\xf8\xff\x02\x00\x06\x00\n\x00\x08\x00\x04\x00\x01\x00\xfd\xff\xfc\xff\xf8\xff\xf6\xff\xf4\xff\xf4\xff\xf8\xff\xfe\xff\x03\x00\x08\x00\x0c\x00\x0e\x00\x0e\x00\n\x00\x05\x00\x02\x00\xfe\xff\xfa\xff\xf8\xff\xf6\xff\xf7\xff\xfb\xff\x00\x00\x01\x00\xff\xff\xff\xff\x05\x00\x0b\x00\x0c\x00\t\x00\x04\x00\x05\x00\x08\x00\x05\x00\xff\xff\xf7\xff\xf5\xff\xf7\xff\xfe\xff\x07\x00\n\x00\x08\x00\x04\x00\x03\x00\x05\x00\x08\x00\x04\x00\x00\x00\xfc\xff\xfc\xff\xfb\xff\xf9\xff\xfa\xff\xfc\xff\xfd\xff\xfe\xff\xff\xff\x05\x00\x0c\x00\x0e\x00\n\x00\x06\x00\x01\x00\xfe\xff\xf9\xff\xf6\xff\xf4\xff\xf2\xff\xf4\xff\xf9\xff\xff\xff\x02\x00\x00\x00\xfd\xff\xfc\xff\x01\x00\x05\x00\x06\x00\x02\x00\xfb\xff\xf5\xff\xf3\xff\xf9\xff\xfe\xff\x00\x00\xfe\xff\xfa\xff\xfc\xff\x03\x00\t\x00\n\x00\t\x00\x05\x00\x04\x00\x02\x00\xff\xff\xfb\xff\xf6\xff\xf2\xff\xf5\xff\xfa\xff\xff\xff\x00\x00\x02\x00\x07\x00\x0b\x00\x0b\x00\x08\x00\x07\x00\x07\x00\x03\x00\xfd\xff\xf9\xff\xfb\xff\xff\xff\x04\x00\x05\x00\x04\x00\x05\x00\x08\x00\x0b\x00\x0c\x00\x0b\x00\x05\x00\x00\x00\xfe\xff\xfd\xff\xfc\xff\xf8\xff\xf4\xff\xf6\xff\xfb\xff\xff\xff\x01\x00\x03\x00\x06\x00\x08\x00\x08\x00\x06\x00\x01\x00\xfe\xff\xfb\xff\xf8\xff\xf9\xff\xfa\xff\xf7\xff\xf6\xff\xf8\xff\xfd\xff\xfd\xff\xfd\xff\xff\xff\x01\x00\x01\x00\xff\xff\xfb\xff\xf7\xff\xf8\xff\xfa\xff\xfc\xff\xfd\xff\xfe\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x05\x00\x05\x00\x06\x00\x06\x00\x03\x00\x01\x00\xfd\xff\xfb\xff\xfa\xff\xf8\xff\xf4\xff\xf2\xff\xf6\xff\xff\xff\t\x00\x0f\x00\x0e\x00\x0c\x00\x0e\x00\r\x00\x07\x00\xfe\xff\xf8\xff\xf6\xff\xf5\xff\xf6\xff\xfa\xff\x03\x00\x0c\x00\x11\x00\x10\x00\r\x00\x08\x00\x05\x00\x04\x00\xff\xff\xfa\xff\xf5\xff\xf5\xff\xf8\xff\xff\xff\x03\x00\x05\x00\x06\x00\n\x00\x12\x00\x15\x00\x10\x00\x07\x00\x00\x00\xfc\xff\xfc\xff\xfc\xff\xfa\xff\xf5\xff\xf5\xff\xf9\xff\xff\xff\x03\x00\x03\x00\x02\x00\x01\x00\xfe\xff\xf9\xff\xf6\xff\xf7\xff\xfd\xff\xfe\xff\xfd\xff\xfb\xff\xfd\xff\x01\x00\x02\x00\x01\x00\x00\x00\xff\xff\xfe\xff\x02\x00\x04\x00\x03\x00\x00\x00\xfe\xff\xff\xff\x02\x00\x01\x00\xfc\xff\xf4\xff\xf0\xff\xee\xff\xf1\xff\xf6\xff\xfd\xff\x05\x00\t\x00\n\x00\x0b\x00\r\x00\x08\x00\x03\x00\xfe\xff\xfb\xff\xf9\xff\xf7\xff\xf8\xff\xfd\xff\x04\x00\x0b\x00\x0c\x00\x0c\x00\n\x00\x07\x00\x05\x00\x01\x00\xfc\xff\xf7\xff\xf4\xff\xf4\xff\xf5\xff\xf5\xff\xf3\xff\xf8\xff\x04\x00\x11\x00\x13\x00\r\x00\x04\x00\x01\x00\x03\x00\x04\x00\x01\x00\xfd\xff\xfb\xff\xfa\xff\xfc\xff\xff\xff\x04\x00\x06\x00\t\x00\x0b\x00\t\x00\x04\x00\xff\xff\xfb\xff\xfd\xff\xfd\xff\xfa\xff\xf9\xff\xfa\xff\xfd\xff\x01\x00\x06\x00\x06\x00\x03\x00\x03\x00\x03\x00\x04\x00\x00\x00\xfd\xff\xff\xff\x06\x00\n\x00\x08\x00\x00\x00\xf8\xff\xf5\xff\xf9\xff\xfa\xff\xf9\xff\xfb\xff\xff\xff\x02\x00\x06\x00\x05\x00\x02\x00\xff\xff\xfd\xff\xff\xff\x00\x00\xfd\xff\xf9\xff\xf4\xff\xf5\xff\xfb\xff\x01\x00\x06\x00\x07\x00\t\x00\x08\x00\x06\x00\x05\x00\x02\x00\xff\xff\xfd\xff\xfc\xff\xfb\xff\xf8\xff\xf8\xff\xfb\xff\x02\x00\t\x00\n\x00\x06\x00\x02\x00\x02\x00\x06\x00\x05\x00\x01\x00\xfb\xff\xf8\xff\xf6\xff\xf4\xff\xf1\xff\xee\xff\xf3\xff\x00\x00\x0c\x00\x11\x00\t\x00\xff\xff\xfb\xff\xfd\xff\x02\x00\x00\x00\xfd\xff\xfb\xff\xff\xff\x06\x00\x0b\x00\r\x00\x0b\x00\x0b\x00\n\x00\x07\x00\x03\x00\x02\x00\x00\x00\x01\x00\x02\x00\x02\x00\xfe\xff\xf9\xff\xf7\xff\xf8\xff\xf9\xff\xf6\xff\xf7\xff\xfc\xff\x02\x00\x05\x00\x03\x00\x01\x00\x02\x00\x06\x00\x07\x00\x05\x00\x00\x00\xfa\xff\xf9\xff\xfc\xff\x02\x00\x07\x00\t\x00\n\x00\x07\x00\x06\x00\x03\x00\xff\xff\xfb\xff\xf7\xff\xf5\xff\xf9\xff\xfa\xff\xf9\xff\xf9\xff\xf7\xff\xf8\xff\xfc\xff\x00\x00\x02\x00\x02\x00\x05\x00\x07\x00\t\x00\t\x00\x07\x00\x05\x00\x02\x00\xfd\xff\xf7\xff\xf5\xff\xf7\xff\xfc\xff\x04\x00\x07\x00\x04\x00\xff\xff\xfd\xff\xff\xff\x01\x00\xfd\xff\xf8\xff\xf5\xff\xf8\xff\xfb\xff\xfe\xff\xff\xff\x00\x00\x05\x00\x0b\x00\x0f\x00\x0c\x00\x06\x00\x03\x00\x02\x00\x05\x00\x06\x00\x04\x00\xfe\xff\xfb\xff\xfd\xff\xfe\xff\xfc\xff\xf8\xff\xfa\xff\xff\xff\x03\x00\x04\x00\x05\x00\x04\x00\x04\x00\x04\x00\x01\x00\xfd\xff\xf9\xff\xf6\xff\xf7\xff\xf9\xff\xff\xff\x05\x00\x0b\x00\x0e\x00\x0c\x00\x06\x00\x01\x00\xfd\xff\xfb\xff\xf9\xff\xfb\xff\xfb\xff\xfc\xff\xfe\xff\x01\x00\x03\x00\x04\x00\x04\x00\x05\x00\x04\x00\x04\x00\x04\x00\x03\x00\x00\x00\xff\xff\x02\x00\x06\x00\x05\x00\x02\x00\xfc\xff\xf7\xff\xf8\xff\xfb\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\x03\x00\x05\x00\x03\x00\x01\x00\xfe\xff\x00\x00\xff\xff\xfe\xff\xfc\xff\xfc\xff\xff\xff\x02\x00\x05\x00\x05\x00\x03\x00\x02\x00\x04\x00\x04\x00\x02\x00\xff\xff\xfc\xff\xfa\xff\xf7\xff\xf3\xff\xef\xff\xee\xff\xf5\xff\xff\xff\x05\x00\x06\x00\x04\x00\x04\x00\x05\x00\x06\x00\x01\x00\xfa\xff\xf7\xff\xf6\xff\xfb\xff\x00\x00\x01\x00\x04\x00\x07\x00\n\x00\x0b\x00\t\x00\x02\x00\xfc\xff\xf8\xff\xf7\xff\xf8\xff\xfa\xff\xfb\xff\xfd\xff\xff\xff\x00\x00\x01\x00\x02\x00\x06\x00\x08\x00\x07\x00\x05\x00\x03\x00\x04\x00\x06\x00\t\x00\n\x00\n\x00\x07\x00\x04\x00\x04\x00\x02\x00\x01\x00\x01\x00\x03\x00\x03\x00\x01\x00\x00\x00\xff\xff\xfc\xff\xf9\xff\xf9\xff\xfd\xff\x02\x00\x03\x00\xff\xff\xfa\xff\xfa\xff\xfc\xff\xfe\xff\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x04\x00\x03\x00\x05\x00\x04\x00\x00\x00\xfb\xff\xf2\xff\xef\xff\xf2\xff\xf8\xff\xfd\xff\xff\xff\xff\xff\x03\x00\x07\x00\x07\x00\x01\x00\xf9\xff\xf6\xff\xf6\xff\xf9\xff\xfa\xff\xf6\xff\xf5\xff\xfa\xff\x04\x00\r\x00\x0e\x00\x08\x00\x00\x00\xff\xff\x00\x00\xff\xff\xfd\xff\xfa\xff\xf9\xff\xfc\xff\x01\x00\x04\x00\x05\x00\x06\x00\x08\x00\n\x00\t\x00\x06\x00\x05\x00\x01\x00\x00\x00\x01\x00\x03\x00\x06\x00\x06\x00\x02\x00\xfe\xff\xfc\xff\xfd\xff\x03\x00\x08\x00\x08\x00\x03\x00\x00\x00\xfe\xff\xff\xff\x02\x00\x03\x00\x01\x00\xfe\xff\xfc\xff\xfe\xff\x02\x00\x02\x00\x01\x00\x00\x00\x03\x00\x07\x00\x07\x00\x02\x00\xfa\xff\xf6\xff\xf8\xff\xfd\xff\x01\x00\x01\x00\xfc\xff\xf7\xff\xf3\xff\xf6\xff\xf9\xff\xfa\xff\xf9\xff\xfb\xff\x02\x00\x06\x00\x05\x00\x02\x00\xff\xff\x00\x00\xff\xff\xfc\xff\xf9\xff\xf8\xff\xfb\xff\x00\x00\x05\x00\x06\x00\x04\x00\x04\x00\x06\x00\x08\x00\x04\x00\xfe\xff\xf7\xff\xf7\xff\xfc\xff\x00\x00\xfe\xff\xfc\xff\xfe\xff\x06\x00\x0b\x00\x0c\x00\n\x00\t\x00\x07\x00\x05\x00\x01\x00\x01\x00\x01\x00\x03\x00\x02\x00\x02\x00\x01\x00\xff\xff\x00\x00\x05\x00\x08\x00\x04\x00\x01\x00\xfe\xff\xfe\xff\xfd\xff\xfc\xff\xfa\xff\xf7\xff\xf7\xff\xf9\xff\xfc\xff\x00\x00\x01\x00\x02\x00\x05\x00\x07\x00\x07\x00\x03\x00\xfb\xff\xf5\xff\xf6\xff\xfa\xff\xfe\xff\x01\x00\x00\x00\xff\xff\xfd\xff\xfc\xff\xfb\xff\xfb\xff\xfd\xff\xff\xff\x01\x00\x02\x00\xff\xff\xfb\xff\xfa\xff\xfc\xff\xff\xff\xff\xff\xfd\xff\xfc\xff\xfd\xff\x00\x00\x04\x00\x04\x00\x03\x00\x01\x00\x04\x00\n\x00\t\x00\x05\x00\xff\xff\xfe\xff\x00\x00\x01\x00\x01\x00\x00\x00\x03\x00\x07\x00\t\x00\x08\x00\x07\x00\x07\x00\n\x00\t\x00\x05\x00\xff\xff\xfd\xff\xfe\xff\x00\x00\x00\x00\xfd\xff\xfa\xff\xfc\xff\x02\x00\x07\x00\t\x00\x06\x00\x02\x00\x01\x00\x01\x00\xff\xff\xfa\xff\xf5\xff\xf4\xff\xf7\xff\xfd\xff\x01\x00\x03\x00\x03\x00\x02\x00\x03\x00\x01\x00\xff\xff\xfd\xff\xfa\xff\xf8\xff\xf6\xff\xf5\xff\xf6\xff\xf7\xff\xf9\xff\xf8\xff\xf7\xff\xf6\xff\xfa\xff\xff\xff\x04\x00\x04\x00\x02\x00\xfe\xff\xf9\xff\xf5\xff\xf5\xff\xfa\xff\x00\x00\x00\x00\x02\x00\x02\x00\x04\x00\x06\x00\x07\x00\x06\x00\x05\x00\x06\x00\x07\x00\x07\x00\x04\x00\x02\x00\x01\x00\x02\x00\x02\x00\x03\x00\x03\x00\x05\x00\x06\x00\x08\x00\x08\x00\x06\x00\x07\x00\x0c\x00\x0e\x00\x0c\x00\x06\x00\x02\x00\x03\x00\x05\x00\x04\x00\x00\x00\xfc\xff\xfb\xff\x00\x00\x05\x00\x07\x00\x06\x00\x04\x00\x04\x00\x03\x00\x00\x00\xf9\xff\xf6\xff\xf5\xff\xf5\xff\xf4\xff\xf4\xff\xf9\xff\xfd\xff\x03\x00\x06\x00\x05\x00\x03\x00\x02\x00\xff\xff\xff\xff\xfc\xff\xf7\xff\xf2\xff\xf2\xff\xf7\xff\xfa\xff\xf9\xff\xf7\xff\xf8\xff\xfd\xff\x03\x00\x05\x00\x03\x00\xfb\xff\xf4\xff\xf1\xff\xee\xff\xf1\xff\xf4\xff\xf8\xff\xfb\xff\xff\xff\x00\x00\x03\x00\x05\x00\x07\x00\x07\x00\x06\x00\x06\x00\x06\x00\x06\x00\x04\x00\x02\x00\x02\x00\x04\x00\x06\x00\x05\x00\x04\x00\x05\x00\x06\x00\x07\x00\x08\x00\t\x00\x0b\x00\r\x00\x0c\x00\x07\x00\x04\x00\x02\x00\x01\x00\x02\x00\x01\x00\x02\x00\x01\x00\x03\x00\x07\x00\n\x00\x0c\x00\x0b\x00\n\x00\x08\x00\x04\x00\xfe\xff\xfb\xff\xf8\xff\xf9\xff\xf6\xff\xf3\xff\xf3\xff\xf8\xff\x00\x00\x04\x00\x04\x00\x01\x00\xff\xff\xff\xff\xff\xff\xfd\xff\xf8\xff\xf0\xff\xee\xff\xef\xff\xf4\xff\xf7\xff\xf9\xff\xfb\xff\xfd\xff\x01\x00\x06\x00\x07\x00\x05\x00\x00\x00\xf8\xff\xf5\xff\xf4\xff\xf6\xff\xf9\xff\xfb\xff\xfe\xff\x00\x00\x01\x00\x02\x00\x04\x00\t\x00\n\x00\x06\x00\x04\x00\x02\x00\x00\x00\xfe\xff\xfc\xff\xfa\xff\xfa\xff\xfd\xff\xff\xff\x04\x00\x04\x00\x03\x00\x02\x00\x04\x00\t\x00\r\x00\x0c\x00\x05\x00\xfc\xff\xfb\xff\xff\xff\x03\x00\x06\x00\x02\x00\x00\x00\x00\x00\x04\x00\x0b\x00\r\x00\x0c\x00\t\x00\x07\x00\x05\x00\x02\x00\xff\xff\xfe\xff\xfd\xff\xfa\xff\xf8\xff\xf9\xff\xfc\xff\x01\x00\x05\x00\x04\x00\x05\x00\x06\x00\x04\x00\x03\x00\xfe\xff\xf9\xff\xf6\xff\xf5\xff\xf7\xff\xf9\xff\xf9\xff\xf6\xff\xf6\xff\xf8\xff\xfd\xff\x02\x00\x03\x00\xff\xff\xfd\xff\xfc\xff\xfb\xff\xf8\xff\xf4\xff\xf5\xff\xf8\xff\xfd\xff\x01\x00\x01\x00\x02\x00\x02\x00\x05\x00\x07\x00\x08\x00\t\x00\n\x00\x08\x00\x05\x00\x01\x00\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\x01\x00\x01\x00\x02\x00\x02\x00\x03\x00\x04\x00\x05\x00\x03\x00\x00\x00\xfb\xff\xfa\xff\xf9\xff\xf7\xff\xf7\xff\xfd\xff\x02\x00\x05\x00\x08\x00\x08\x00\n\x00\x0b\x00\r\x00\n\x00\x02\x00\xfa\xff\xf7\xff\xfa\xff\xfe\xff\xff\xff\xfd\xff\xfd\xff\xfe\xff\x03\x00\x05\x00\x04\x00\x00\x00\xff\xff\xfd\xff\xfd\xff\xfc\xff\xfc\xff\xfb\xff\xfb\xff\xfc\xff\xfd\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x04\x00\x06\x00\x08\x00\x06\x00\xff\xff\xf8\xff\xf5\xff\xf5\xff\xfc\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x04\x00\x07\x00\x06\x00\x04\x00\x02\x00\x03\x00\x04\x00\x05\x00\x04\x00\xff\xff\xfd\xff\xfe\xff\x00\x00\x01\x00\x00\x00\xfe\xff\xfd\xff\xff\xff\x02\x00\x07\x00\x08\x00\x04\x00\x00\x00\xfd\xff\xfc\xff\xfc\xff\xfb\xff\xf9\xff\xf9\xff\x00\x00\x04\x00\t\x00\x0c\x00\n\x00\x08\x00\x06\x00\x02\x00\xfe\xff\xfa\xff\xf7\xff\xf4\xff\xf3\xff\xf4\xff\xf9\xff\xff\xff\x03\x00\x01\x00\x00\x00\xfd\xff\xff\xff\x00\x00\xff\xff\xfb\xff\xfa\xff\xf8\xff\xfa\xff\xfa\xff\xfd\xff\xfd\xff\xff\xff\xff\xff\x01\x00\x01\x00\x03\x00\x04\x00\x01\x00\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xff\xff\x01\x00\x02\x00\x03\x00\x04\x00\x06\x00\x06\x00\x08\x00\n\x00\x0b\x00\x0c\x00\x0b\x00\x07\x00\x03\x00\x02\x00\x02\x00\x03\x00\x02\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xfc\xff\xfa\xff\xfc\xff\xfe\xff\x02\x00\x04\x00\x05\x00\x07\x00\x0b\x00\x0c\x00\r\x00\x07\x00\x01\x00\xfd\xff\xfa\xff\xf9\xff\xf9\xff\xf8\xff\xfa\xff\xfd\xff\xff\xff\x00\x00\x00\x00\xfd\xff\xfd\xff\xfc\xff\xfa\xff\xf8\xff\xf5\xff\xf4\xff\xf5\xff\xf4\xff\xf6\xff\xfa\xff\xff\xff\x01\x00\x04\x00\x04\x00\x07\x00\x04\x00\x02\x00\xfe\xff\xfd\xff\xfd\xff\xfd\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x06\x00\x05\x00\x04\x00\x03\x00\x05\x00\x07\x00\x07\x00\x06\x00\x05\x00\x04\x00\x04\x00\x04\x00\x05\x00\x05\x00\x03\x00\x01\x00\x00\x00\x02\x00\x05\x00\x05\x00\x02\x00\xff\xff\xfd\xff\xfe\xff\xfd\xff\xfc\xff\xf9\xff\xf8\xff\xfa\xff\xfd\xff\x02\x00\x05\x00\x06\x00\x08\x00\t\x00\x07\x00\x04\x00\x01\x00\xff\xff\xfd\xff\xfa\xff\xf9\xff\xfb\xff\xff\xff\x01\x00\x02\x00\xff\xff\xfe\xff\x00\x00\x02\x00\x04\x00\x03\x00\xfd\xff\xfa\xff\xf8\xff\xf8\xff\xf8\xff\xf9\xff\xf7\xff\xf9\xff\xfd\xff\x01\x00\x05\x00\x06\x00\x04\x00\x00\x00\xfc\xff\xf9\xff\xfa\xff\xfa\xff\xfa\xff\xf8\xff\xfa\xff\xfc\xff\x00\x00\x02\x00\x04\x00\x06\x00\x07\x00\x07\x00\x06\x00\x05\x00\x04\x00\x04\x00\x06\x00\x05\x00\x03\x00\x02\x00\x01\x00\x00\x00\x01\x00\x01\x00\xff\xff\xfd\xff\xfe\xff\xff\xff\x00\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfb\xff\xf9\xff\xfa\xff\xff\xff\x05\x00\t\x00\t\x00\t\x00\x0c\x00\x0c\x00\n\x00\x08\x00\x04\x00\xff\xff\xfa\xff\xf7\xff\xf7\xff\xf9\xff\xfc\xff\xfd\xff\xfc\xff\xfd\xff\xfe\xff\x00\x00\x02\x00\xff\xff\xfb\xff\xfa\xff\xf9\xff\xf9\xff\xfb\xff\xfb\xff\xfc\xff\xfc\xff\xfe\xff\x01\x00\x06\x00\x08\x00\x07\x00\x06\x00\x04\x00\x03\x00\x01\x00\x01\x00\xfe\xff\xfd\xff\xfc\xff\xfb\xff\xfb\xff\xfb\xff\xff\xff\x01\x00\x05\x00\x07\x00\x07\x00\x05\x00\x03\x00\x00\x00\xff\xff\xfd\xff\xfc\xff\xfb\xff\xfa\xff\xfc\xff\xff\xff\x01\x00\x03\x00\x03\x00\x00\x00\xfd\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\x02\x00\xff\xff\xfb\xff\xf8\xff\xf9\xff\xfd\xff\x02\x00\x07\x00\x08\x00\t\x00\x08\x00\x08\x00\t\x00\x07\x00\x07\x00\x06\x00\x04\x00\x02\x00\xfd\xff\xfa\xff\xfb\xff\x00\x00\x04\x00\x07\x00\x06\x00\x03\x00\x02\x00\x01\x00\xfe\xff\xfe\xff\xfd\xff\xfb\xff\xf8\xff\xf8\xff\xf9\xff\xfc\xff\xfd\xff\xfc\xff\xfb\xff\xfd\xff\x01\x00\x03\x00\x04\x00\x01\x00\xfe\xff\xfe\xff\x01\x00\x01\x00\xfe\xff\xfb\xff\xf9\xff\xf9\xff\xfc\xff\xff\xff\x04\x00\x08\x00\x08\x00\t\x00\n\x00\n\x00\x07\x00\x04\x00\x00\x00\xfd\xff\xfb\xff\xfa\xff\xfa\xff\xfd\xff\x00\x00\x02\x00\x02\x00\x00\x00\xfe\xff\xfe\xff\xfc\xff\xf9\xff\xf6\xff\xf7\xff\xf9\xff\xf7\xff\xf5\xff\xf4\xff\xf6\xff\xfc\xff\x02\x00\x07\x00\x07\x00\x06\x00\x05\x00\x06\x00\x06\x00\x08\x00\t\x00\t\x00\x05\x00\x01\x00\xfe\xff\xfe\xff\x01\x00\x03\x00\x05\x00\x06\x00\x02\x00\x02\x00\x01\x00\x02\x00\x02\x00\x00\x00\xff\xff\xfc\xff\xfc\xff\xfa\xff\xfd\xff\xfe\xff\xff\xff\x02\x00\x05\x00\x07\x00\x07\x00\x05\x00\x01\x00\x01\x00\x02\x00\x06\x00\x03\x00\xfe\xff\xf8\xff\xf7\xff\xfa\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x04\x00\x04\x00\x04\x00\x02\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xfc\xff\xfa\xff\xfb\xff\xff\xff\x02\x00\x03\x00\x00\x00\xfd\xff\xfd\xff\x00\x00\x02\x00\x00\x00\xfd\xff\xfd\xff\xfd\xff\xfc\xff\xf9\xff\xf8\xff\xf9\xff\xfd\xff\xff\xff\x03\x00\x05\x00\x06\x00\x07\x00\x06\x00\x02\x00\xff\xff\x00\x00\xff\xff\xfe\xff\xfb\xff\xf9\xff\xfb\xff\xfe\xff\x02\x00\x04\x00\x03\x00\x02\x00\x01\x00\x01\x00\x02\x00\x03\x00\x02\x00\xff\xff\xfc\xff\xfb\xff\xfd\xff\x01\x00\x03\x00\x04\x00\x04\x00\x03\x00\x04\x00\x03\x00\x02\x00\x00\x00\x01\x00\x06\x00\x07\x00\x04\x00\x00\x00\xfc\xff\xfb\xff\xfe\xff\x00\x00\x02\x00\x06\x00\x07\x00\x07\x00\x05\x00\x03\x00\x02\x00\x03\x00\x04\x00\x03\x00\xff\xff\xfc\xff\xfa\xff\xfb\xff\xfe\xff\x01\x00\xff\xff\xfc\xff\xfa\xff\xf9\xff\xfb\xff\xfc\xff\xfc\xff\xfb\xff\xfc\xff\xfe\xff\xfe\xff\xfc\xff\xfa\xff\xf9\xff\xfb\xff\xfe\xff\x03\x00\x04\x00\x04\x00\x06\x00\x08\x00\t\x00\n\x00\t\x00\x07\x00\x04\x00\xff\xff\xfc\xff\xfa\xff\xfd\xff\x00\x00\x02\x00\x01\x00\x00\x00\xfe\xff\xff\xff\x00\x00\xfd\xff\xfb\xff\xf9\xff\xf6\xff\xf7\xff\xf7\xff\xf8\xff\xfc\xff\x00\x00\x02\x00\x02\x00\x04\x00\x03\x00\x01\x00\x01\x00\x01\x00\x04\x00\x07\x00\x08\x00\x04\x00\x02\x00\x02\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x02\x00\x03\x00\x02\x00\x04\x00\x05\x00\x05\x00\x05\x00\x04\x00\x01\x00\xfe\xff\xfd\xff\xfd\xff\x00\x00\x01\x00\x03\x00\x03\x00\x03\x00\x02\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfc\xff\xfa\xff\xf8\xff\xf9\xff\xfa\xff\xfe\xff\x01\x00\x01\x00\x00\x00\x00\x00\x02\x00\x04\x00\x03\x00\x04\x00\x04\x00\x05\x00\x03\x00\xfe\xff\xfb\xff\xf9\xff\xfc\xff\xff\xff\x03\x00\x06\x00\x04\x00\x02\x00\x01\x00\x01\x00\x02\x00\x02\x00\xff\xff\xfb\xff\xf8\xff\xf8\xff\xfc\xff\x00\x00\x02\x00\x02\x00\x02\x00\x02\x00\x02\x00\x00\x00\xfe\xff\xfb\xff\xfb\xff\xfe\xff\x00\x00\xff\xff\xfe\xff\xfc\xff\xfa\xff\xfb\xff\xff\xff\x03\x00\x05\x00\x04\x00\x03\x00\x04\x00\x04\x00\x07\x00\x07\x00\x07\x00\x04\x00\x02\x00\xfe\xff\xfd\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xfd\xff\xfd\xff\xff\xff\xff\xff\xff\xff\xfd\xff\xfb\xff\xfc\xff\xfc\xff\xfd\xff\xfd\xff\xfe\xff\x00\x00\x03\x00\x07\x00\x08\x00\x07\x00\x08\x00\x06\x00\x07\x00\x08\x00\x07\x00\x04\x00\xfe\xff\xfa\xff\xf9\xff\xfa\xff\xfe\xff\xff\xff\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfb\xff\xfa\xff\xfa\xff\xfb\xff\xfc\xff\xfe\xff\x00\x00\x03\x00\x07\x00\x07\x00\x04\x00\x01\x00\x00\x00\x04\x00\x05\x00\x04\x00\x01\x00\xfb\xff\xfa\xff\xfc\xff\xfe\xff\x00\x00\x01\x00\x03\x00\x02\x00\x02\x00\x02\x00\x00\x00\xff\xff\xfe\xff\x00\x00\x00\x00\xfe\xff\xfd\xff\xfb\xff\xfc\xff\xfe\xff\x01\x00\x02\x00\x01\x00\x00\x00\x01\x00\x02\x00\x03\x00\x02\x00\xfe\xff\xfd\xff\xff\xff\xfe\xff\xfc\xff\xfc\xff\xfd\xff\x00\x00\x03\x00\x04\x00\x01\x00\x01\x00\x03\x00\x05\x00\x07\x00\x06\x00\x06\x00\x04\x00\x00\x00\xfe\xff\xfe\xff\xfe\xff\x00\x00\x03\x00\x04\x00\x05\x00\x05\x00\x02\x00\x01\x00\x01\x00\x02\x00\xff\xff\xfc\xff\xf8\xff\xf8\xff\xf9\xff\xfc\xff\xfe\xff\xfd\xff\xff\xff\xff\xff\x01\x00\x01\x00\xff\xff\xfc\xff\xfd\xff\x00\x00\x02\x00\x02\x00\x02\x00\xfe\xff\xfc\xff\xfc\xff\xff\xff\x02\x00\x04\x00\x05\x00\x05\x00\x05\x00\x06\x00\x05\x00\x05\x00\x04\x00\x03\x00\x00\x00\xfd\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\x01\x00\xff\xff\xfe\xff\xff\xff\x00\x00\xff\xff\xfd\xff\xfa\xff\xfb\xff\xfd\xff\xfd\xff\xfd\xff\xfa\xff\xfa\xff\xfd\xff\xff\xff\x02\x00\x02\x00\x02\x00\x04\x00\x06\x00\x07\x00\x08\x00\x07\x00\x04\x00\x03\x00\x00\x00\xfe\xff\xfc\xff\xfc\xff\xff\xff\x02\x00\x02\x00\x01\x00\xfe\xff\xfe\xff\xff\xff\x00\x00\x01\x00\xfe\xff\xfb\xff\xf9\xff\xfa\xff\xfc\xff\xfe\xff\xff\xff\x01\x00\x03\x00\x06\x00\x05\x00\x02\x00\xff\xff\xff\xff\x00\x00\x02\x00\x01\x00\x00\x00\xfe\xff\xfe\xff\xfc\xff\xfc\xff\xfe\xff\x00\x00\x01\x00\x03\x00\x01\x00\x00\x00\xff\xff\x01\x00\x04\x00\x06\x00\x05\x00\x00\x00\xfd\xff\xfd\xff\xfd\xff\x00\x00\x02\x00\x02\x00\x02\x00\x02\x00\x03\x00\x02\x00\x02\x00\x00\x00\x01\x00\x02\x00\x00\x00\xfe\xff\xfc\xff\xfc\xff\xfe\xff\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\x03\x00\x04\x00\x03\x00\x01\x00\x01\x00\x01\x00\x00\x00\xfe\xff\xfe\xff\xfc\xff\xfe\xff\x00\x00\x02\x00\x02\x00\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\xff\xff\xfb\xff\xf9\xff\xf8\xff\xfa\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xfe\xff\xfe\xff\x00\x00\x02\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xfe\xff\x02\x00\x05\x00\x06\x00\x04\x00\x01\x00\xff\xff\x00\x00\x01\x00\x04\x00\x03\x00\x01\x00\xfe\xff\xfe\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\x01\x00\x02\x00\x02\x00\x00\x00\xff\xff\xfe\xff\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\x01\x00\x03\x00\x04\x00\x06\x00\x06\x00\x04\x00\x03\x00\x03\x00\x05\x00\x06\x00\x04\x00\x00\x00\xfe\xff\xfd\xff\xfe\xff\xfe\xff\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xfd\xff\xfc\xff\xfb\xff\xfb\xff\xfb\xff\xfc\xff\xfc\xff\xfe\xff\xff\xff\x01\x00\x03\x00\x03\x00\x03\x00\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfd\xff\xfe\xff\xff\xff\x02\x00\x02\x00\x00\x00\xfe\xff\xfc\xff\xfd\xff\x01\x00\x02\x00\x03\x00\x02\x00\x02\x00\x03\x00\x03\x00\x03\x00\x03\x00\x03\x00\x02\x00\x01\x00\x02\x00\x04\x00\x02\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\xfe\xff\xfd\xff\xfe\xff\xfd\xff\xfd\xff\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x03\x00\x03\x00\x02\x00\x02\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x03\x00\x03\x00\x01\x00\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xfc\xff\xfb\xff\xfb\xff\xfc\xff\xfe\xff\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xfd\xff\xfd\xff\xfb\xff\xfb\xff\xfd\xff\xfe\xff\x00\x00\xff\xff\xff\xff\xfe\xff\x00\x00\x03\x00\x04\x00\x03\x00\x00\x00\xfe\xff\xfe\xff\x00\x00\x01\x00\x03\x00\x04\x00\x04\x00\x03\x00\x03\x00\x02\x00\x02\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x02\x00\x03\x00\x02\x00\x01\x00\xff\xff\xff\xff\xff\xff\x02\x00\x03\x00\x05\x00\x03\x00\x02\x00\x01\x00\x02\x00\x02\x00\x03\x00\x00\x00\x00\x00\xff\xff\xfd\xff\xfc\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xfd\xff\xfc\xff\xfb\xff\xfd\xff\xff\xff\x02\x00\x02\x00\x02\x00\x03\x00\x04\x00\x04\x00\x02\x00\x00\x00\xfe\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\xfd\xff\xfd\xff\xfd\xff\xff\xff\x00\x00\x02\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfd\xff\xfd\xff\xfe\xff\x00\x00\x03\x00\x04\x00\x05\x00\x04\x00\x03\x00\x02\x00\x01\x00\x02\x00\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\x02\x00\x01\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x01\x00\x02\x00\x02\x00\x02\x00\x01\x00\x03\x00\x02\x00\x02\x00\x01\x00\x01\x00\x00\x00\xfd\xff\xfe\xff\x01\x00\x01\x00\x03\x00\x03\x00\x00\x00\xfe\xff\xfe\xff\xfd\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x03\x00\x04\x00\x04\x00\x02\x00\xff\xff\xff\xff\xff\xff\x01\x00\x02\x00\x04\x00\x04\x00\x02\x00\x02\x00\x01\x00\x01\x00\xff\xff\xfd\xff\xfe\xff\xfe\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x02\x00\x01\x00\x02\x00\xfe\xff\xfd\xff\xfe\xff\xfd\xff\x00\x00\x02\x00\x03\x00\x03\x00\x02\x00\x02\x00\x04\x00\x02\x00\x02\x00\x01\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\x01\x00\x00\x00\x00\x00\xfe\xff\xfe\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfc\xff\xfc\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\x01\x00\x02\x00\x04\x00\x05\x00\x04\x00\x03\x00\x00\x00\xff\xff\x00\x00\x03\x00\x02\x00\x03\x00\x02\x00\x01\x00\x01\x00\x01\x00\x02\x00\x03\x00\x02\x00\xff\xff\xff\xff\xfe\xff\xfd\xff\xfe\xff\xfe\xff\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x02\x00\x02\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x03\x00\x04\x00\x05\x00\x03\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xff\xff\x01\x00\x01\x00\x01\x00\xff\xff\xfe\xff\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\xfe\xff\xfe\xff\xfc\xff\xfd\xff\xfe\xff\x00\x00\x02\x00\x01\x00\x00\x00\xfd\xff\xfe\xff\xfc\xff\xfc\xff\xfd\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\x03\x00\x04\x00\x03\x00\x01\x00\xff\xff\xfe\xff\x00\x00\x02\x00\x04\x00\x02\x00\x03\x00\x02\x00\x02\x00\x02\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x02\x00\x03\x00\x03\x00\x04\x00\x02\x00\xff\xff\xfe\xff\xfe\xff\x00\x00\x01\x00\x01\x00\x02\x00\x04\x00\x03\x00\x02\x00\x01\x00\x01\x00\x03\x00\x03\x00\x03\x00\xff\xff\xfe\xff\xfd\xff\xff\xff\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x02\x00\x00\x00\xfe\xff\xfe\xff\xfc\xff\xfc\xff\xfc\xff\xfc\xff\xfd\xff\x00\x00\x02\x00\x01\x00\x01\x00\x00\x00\x02\x00\x00\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\xff\xff\xfc\xff\xfd\xff\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfc\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\x01\x00\x02\x00\x02\x00\x02\x00\x03\x00\x04\x00\x04\x00\x03\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x02\x00\x03\x00\x06\x00\x05\x00\x03\x00\x02\x00\x02\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfd\xff\xff\xff\x01\x00\x01\x00\xff\xff\xff\xff\x01\x00\x04\x00\x02\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xfc\xff\xfc\xff\xfe\xff\x00\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfd\xff\xfe\xff\x00\x00\xff\xff\xfe\xff\xfc\xff\xfe\xff\x00\x00\x03\x00\x03\x00\x03\x00\x03\x00\x01\x00\x02\x00\x00\x00\x01\x00\x01\x00\xff\xff\xff\xff\xfe\xff\xfb\xff\xfa\xff\xfb\xff\xfe\xff\x00\x00\x02\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x02\x00\x05\x00\x07\x00\x08\x00\x07\x00\x05\x00\x04\x00\x03\x00\x01\x00\xff\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xff\xff\x01\x00\x02\x00\x01\x00\x02\x00\x01\x00\x01\x00\x02\x00\x00\x00\xfe\xff\xfb\xff\xfa\xff\xfa\xff\xfd\xff\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x02\x00\x01\x00\x00\x00\xfe\xff\x00\x00\x00\x00\xff\xff\xfd\xff\xfb\xff\xfd\xff\x00\x00\x02\x00\x02\x00\x02\x00\x02\x00\x01\x00\x02\x00\x01\x00\x00\x00\xff\xff\xfd\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\x01\x00\x01\x00\x02\x00\x02\x00\x04\x00\x04\x00\x04\x00\x02\x00\x01\x00\x02\x00\x01\x00\x02\x00\xff\xff\xfc\xff\xfe\xff\xff\xff\x02\x00\x04\x00\x07\x00\x05\x00\x03\x00\x03\x00\x01\x00\xff\xff\xfd\xff\xfc\xff\xfc\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x01\x00\x03\x00\x02\x00\x02\x00\x03\x00\x02\x00\x02\x00\x00\x00\xff\xff\xfd\xff\xfb\xff\xfd\xff\xfc\xff\xfd\xff\xfd\xff\xfe\xff\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xfd\xff\xfc\xff\xfc\xff\xfc\xff\xfd\xff\xff\xff\x01\x00\x02\x00\x03\x00\x05\x00\x05\x00\x02\x00\xff\xff\xfc\xff\xfd\xff\xfe\xff\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfd\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\x00\x00\x03\x00\x04\x00\x03\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x00\x00\xff\xff\x01\x00\x02\x00\x06\x00\x06\x00\x06\x00\x06\x00\x06\x00\x06\x00\x03\x00\x02\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfd\xff\xfc\xff\xfd\xff\xff\xff\x02\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\xfe\xff\xfe\xff\xfb\xff\xfc\xff\xfc\xff\xfc\xff\xfb\xff\xfd\xff\xff\xff\x02\x00\x03\x00\x03\x00\x03\x00\x03\x00\x01\x00\xfe\xff\xfd\xff\xfb\xff\xfa\xff\xfc\xff\xfc\xff\xfc\xff\xfe\xff\x00\x00\x01\x00\x02\x00\x03\x00\x02\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfc\xff\xfc\xff\xfd\xff\xff\xff\x01\x00\x02\x00\x01\x00\x03\x00\x02\x00\x02\x00\x01\x00\xff\xff\xff\xff\xff\xff\x01\x00\x02\x00\x02\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x03\x00\x03\x00\x03\x00\x04\x00\x05\x00\x06\x00\x04\x00\x03\x00\x03\x00\x02\x00\x02\x00\x01\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x02\x00\x03\x00\x03\x00\x04\x00\x04\x00\x03\x00\x02\x00\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfc\xff\xfb\xff\xfe\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xfd\xff\xfd\xff\xfb\xff\xfb\xff\xfb\xff\xfb\xff\xfb\xff\xfa\xff\xfe\xff\x01\x00\x03\x00\x03\x00\x02\x00\x02\x00\x01\x00\xff\xff\xfd\xff\xfc\xff\xfa\xff\xfa\xff\xfc\xff\xfc\xff\xfd\xff\x00\x00\x01\x00\x03\x00\x02\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xfd\xff\xff\xff\xfe\xff\x00\x00\x01\x00\x01\x00\x02\x00\x04\x00\x06\x00\x06\x00\x06\x00\x05\x00\x03\x00\x02\x00\x01\x00\x04\x00\x04\x00\x02\x00\x00\x00\xfe\xff\xff\xff\x01\x00\x03\x00\x02\x00\x02\x00\x03\x00\x04\x00\x04\x00\x02\x00\x00\x00\xfe\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfd\xff\xff\xff\x00\x00\x02\x00\x03\x00\x02\x00\x01\x00\x02\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfd\xff\xfc\xff\xfb\xff\xfb\xff\xfb\xff\xfd\xff\xff\xff\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\xfe\xff\xfd\xff\xfd\xff\xfb\xff\xfb\xff\xfb\xff\xfb\xff\xfc\xff\xff\xff\x02\x00\x02\x00\x03\x00\x02\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xfe\xff\xfd\xff\xfc\xff\xff\xff\x01\x00\x04\x00\x06\x00\x05\x00\x04\x00\x04\x00\x02\x00\x01\x00\x01\x00\x00\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x03\x00\x03\x00\x05\x00\x05\x00\x05\x00\x05\x00\x03\x00\x01\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xfe\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xfe\xff\xfc\xff\xfc\xff\xfd\xff\xfd\xff\xfd\xff\xfb\xff\xfb\xff\xfd\xff\xff\xff\x02\x00\x03\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x00\x00\xfe\xff\xfc\xff\xfb\xff\xfd\xff\xfd\xff\xfe\xff\xfd\xff\xfe\xff\x00\x00\x00\x00\x02\x00\x01\x00\xff\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\x02\x00\x05\x00\x04\x00\x05\x00\x06\x00\x05\x00\x04\x00\x04\x00\x02\x00\x02\x00\x00\x00\xfe\xff\xfd\xff\xfc\xff\xfd\xff\xff\xff\x00\x00\x02\x00\x02\x00\x04\x00\x05\x00\x04\x00\x01\x00\xff\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\x02\x00\x03\x00\x03\x00\x04\x00\x03\x00\x03\x00\x02\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfd\xff\xfb\xff\xfb\xff\xfb\xff\xfe\xff\xff\xff\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\xff\xff\xfd\xff\xff\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\x01\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xfe\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x02\x00\x04\x00\x06\x00\x04\x00\x04\x00\x02\x00\x03\x00\x04\x00\x03\x00\x00\x00\xff\xff\xfd\xff\xfd\xff\xff\xff\xff\xff\x02\x00\x04\x00\x06\x00\x06\x00\x04\x00\x01\x00\xff\xff\xff\xff\xfe\xff\xfc\xff\xfc\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x01\x00\x02\x00\x01\x00\x00\x00\xff\xff\xfc\xff\xfb\xff\xfd\xff\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\x00\x00\x01\x00\x02\x00\x02\x00\x02\x00\x03\x00\x02\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x01\x00\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\x02\x00\x02\x00\x01\x00\xff\xff\xff\xff\x01\x00\x00\x00\x02\x00\x04\x00\x06\x00\x06\x00\x06\x00\x05\x00\x03\x00\x03\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfc\xff\xff\xff\x03\x00\x04\x00\x04\x00\x03\x00\x02\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x01\x00\x02\x00\x03\x00\x04\x00\x03\x00\x01\x00\xfe\xff\xfc\xff\xfb\xff\xfd\xff\xfc\xff\xfc\xff\xfc\xff\xfd\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xfd\xff\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x02\x00\x04\x00\x03\x00\x03\x00\x01\x00\x01\x00\x03\x00\x03\x00\x02\x00\x01\x00\xff\xff\xff\xff\xfe\xff\x00\x00\xff\xff\x02\x00\x03\x00\x04\x00\x04\x00\x02\x00\x01\x00\xfe\xff\xfe\xff\xfd\xff\xfe\xff\xfe\xff\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\x01\x00\x02\x00\x02\x00\x01\x00\xff\xff\xff\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x02\x00\x04\x00\x03\x00\x02\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfc\xff\xfd\xff\xfe\xff\xfe\xff\x00\x00\xfe\xff\xff\xff\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x04\x00\x04\x00\x04\x00\x03\x00\x02\x00\x02\x00\x02\x00\x02\x00\x01\x00\x00\x00\xfe\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\x01\x00\x02\x00\x02\x00\x02\x00\x00\x00\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xfd\xff\xfe\xff\x00\x00\x01\x00\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfe\xff\xfd\xff\xfd\xff\xff\xff\x01\x00\x01\x00\x01\x00\x02\x00\x03\x00\x02\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x02\x00\x02\x00\x01\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\x02\x00\x03\x00\x04\x00\x02\x00\x00\x00\xff\xff\x01\x00\x02\x00\x03\x00\x02\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xfe\xff\xfe\xff\xff\xff\x01\x00\x01\x00\x02\x00\x01\x00\x00\x00\x01\x00\x02\x00\x03\x00\x03\x00\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\x01\x00\x00\x00\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\x01\x00\x02\x00\x01\x00\x02\x00\x01\x00\x02\x00\x03\x00\x03\x00\x02\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x01\x00\x02\x00\x03\x00\x03\x00\x02\x00\x01\x00\x00\x00\x00\x00\x01\x00\x02\x00\x02\x00\x02\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xfd\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x03\x00\x03\x00\x02\x00\x02\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfd\xff\xfd\xff\xff\xff\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfc\xff\xfd\xff\xfe\xff\x00\x00\x01\x00\x00\x00\xff\xff\xfe\xff\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x03\x00\x02\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x02\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xfe\xff\xff\xff\x00\x00\xff\xff\xfe\xff\xfd\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x02\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x03\x00\x02\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\xfe\xff\xff\xff\x02\x00\x02\x00\x02\x00\x00\x00\x00\x00\xff\xff\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x03\x00\x03\x00\x03\x00\x01\x00\x01\x00\x02\x00\x02\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\x01\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x03\x00\x03\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfd\xff\xfe\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xfd\xff\xff\xff\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\x00\x00\x01\x00\x02\x00\x02\x00\x02\x00\x02\x00\x02\x00\x01\x00\x00\x00\xff\xff\x01\x00\x01\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xfe\xff\xfd\xff\xfc\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\x02\x00\x01\x00\x01\x00\x02\x00\xff\xff\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x00\x00\x01\x00\xff\xff\x00\x00\x02\x00\x00\x00\xff\xff\x00\x00\x00\x00\x02\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfd\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xfe\xff\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x03\x00\x02\x00\x01\x00\x00\x00\xfd\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x02\x00\x02\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x03\x00\x02\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xfd\xff\xfd\xff\xfd\xff\xfc\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x01\x00\x02\x00\x02\x00\x03\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xfe\xff\x00\x00\x01\x00\x03\x00\x03\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x02\x00\x03\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x03\x00\x02\x00\x02\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xfd\xff\xfc\xff\xfd\xff\xfd\xff\xfb\xff\xfd\xff\xfe\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfd\xff\xfe\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\xfe\xff\x00\x00\xff\xff\xfe\xff\xfe\xff\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xfe\xff\xff\xff\xfe\xff\xfd\xff\xfc\xff\xfc\xff\xfe\xff\xfd\xff\xff\xff\xff\xff\xff\xff\x01\x00\x03\x00\x02\x00\x02\x00\x01\x00\x00\x00\x01\x00\x01\x00\x02\x00\x00\x00\xff\xff\xff\xff\x00\x00\x02\x00\x01\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x02\x00\x01\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x02\x00\x02\x00\x02\x00\x02\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01\x00\x01\x00\x02\x00\x03\x00\x03\x00\x02\x00\x02\x00\x01\x00\x01\x00\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x02\x00\x01\x00\x02\x00\x02\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x02\x00\x02\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xff\xff\x01\x00\x02\x00\x02\x00\x01\x00\x02\x00\x02\x00\x01\x00\x03\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xfe\xff\x00\x00\xff\xff\xff\xff\x00\x00\xfe\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfe\xff\xff\xff\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xfe\xff\xff\xff\xff\xff\xfd\xff\xfd\xff\xfe\xff\xfe\xff\x00\x00\x00\x00\x01\x00\x02\x00\x02\x00\x02\x00\x03\x00\x03\x00\x02\x00\x03\x00\x02\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x01\x00\xff\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x02\x00\x02\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x02\x00\x01\x00\x02\x00\x01\x00\x02\x00\x02\x00\x02\x00\x03\x00\x02\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfd\xff\xfe\xff\xfd\xff\xfe\xff\xfd\xff\xfd\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x02\x00\x01\x00\x02\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x01\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x02\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x03\x00\x02\x00\x02\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfd\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\xfd\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x02\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xfe\xff\xff\xff\xfd\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xfe\xff\xff\xff\xff\xff\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x02\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xfe\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xfe\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xfd\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x03\x00\x02\x00\x02\x00\x02\x00\x02\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x02\x00\x00\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x02\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x02\x00\x02\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x01\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xfe\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x02\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x02\x00\x00\x00\x00\x00\x02\x00\x01\x00\x00\x00\x01\x00\x01\x00\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xfe\xff\xfd\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x02\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x01\x00\x02\x00\x02\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xfe\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x02\x00\x01\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\x01\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xfe\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x02\x00\x03\x00\x02\x00\x02\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xfe\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x02\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x01\x00\xff\xff\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x02\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x02\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xfe\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xfe\xff\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x02\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x02\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xfe\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x02\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x02\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xfe\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x02\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfe\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\xfe\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xfe\xff\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\xff\xff\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00' +# --- +# name: test_pre_recorded_message + b'\xfe\xff\x04\x00\x05\x00\x03\x00\x04\x00\x03\x00\x02\x00\x00\x00\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xfe\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xfe\xff\xfc\xff\xfc\xff\xfc\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\xfd\xff\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x01\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x03\x00\x03\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\x00\x00\xff\xff\x00\x00\x00\x00\xfe\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\xff\xff\x00\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\xfe\xff\x00\x00\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x03\x00\x02\x00\x03\x00\x00\x00\x00\x00\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x03\x00\x02\x00\x02\x00\x01\x00\xff\xff\x01\x00\x01\x00\x01\x00\xfe\xff\xfc\xff\xff\xff\x00\x00\xfe\xff\x00\x00\x00\x00\xfd\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\xfe\xff\xfd\xff\xfe\xff\x00\x00\xff\xff\xfd\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\x00\x00\xff\xff\xff\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\xff\xff\xfe\xff\xfe\xff\xfe\xff\xfe\xff\xfd\xff\xfc\xff\xfe\xff\xfd\xff\xfe\xff\xfc\xff\xfc\xff\xfe\xff\xfd\xff\xfc\xff\xfe\xff\xfc\xff\xfc\xff\xfd\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\xfe\xff\xfe\xff\xff\xff\xff\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xfe\xff\x00\x00\xff\xff\xff\xff\x00\x00\xfe\xff\xfe\xff\x00\x00\x00\x00\xfe\xff\xff\xff\xff\xff\xfe\xff\xff\xff\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\xfe\xff\xfe\xff\x02\x00\x02\x00\x01\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x01\x00\xff\xff\x00\x00\x02\x00\x01\x00\xff\xff\x00\x00\x00\x00\x00\x00\x02\x00\x01\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\xff\xff\xff\xff\x00\x00\x01\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xfe\xff\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x02\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x01\x00\x02\x00\x00\x00\x00\x00\x00\x00\xff\xff\x01\x00\x01\x00\x01\x00\x00\x00\xff\xff\xfd\xff\xfe\xff\xff\xff\xff\xff\xfd\xff\xfd\xff\xfe\xff\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xfe\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xff\xff\x00\x00\xff\xff\xfe\xff\x00\x00\xfe\xff\xfc\xff\xfd\xff\xfe\xff\xfd\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\xfd\xff\xff\xff\xff\xff\xfd\xff\xfc\xff\xfd\xff\xfe\xff\xfe\xff\xfc\xff\xfc\xff\xff\xff\xfe\xff\xfc\xff\xfa\xff\xfb\xff\xfb\xff\xfb\xff\xff\xff\xfe\xff\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\xfe\xff\x00\x00\xff\xff\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xfa\xff\xfe\xff\x00\x00\xfd\xff\x00\x00\x00\x00\xff\xff\x00\x00\xfd\xff\xfa\xff\xfc\xff\xfc\xff\xfa\xff\xfe\xff\xfd\xff\xf8\xff\xf7\xff\xfa\xff\xfe\xff\xfa\xff\xf8\xff\xf9\xff\xfa\xff\xfd\xff\x00\x00\x00\x00\x00\x00\xfb\xff\xfb\xff\xfa\xff\xfd\xff\xff\xff\xff\xff\x01\x00\xfc\xff\xff\xff\xf8\xff\xff\xff\x00\x00\xf3\xff\xfd\xff\xf3\xff\xfb\xff\x01\x00\xff\xff\xfa\xff\x02\x00\xf4\xff\xeb\xff\xfc\xff\xf7\xff\xe8\xff\xfb\xff\xf8\xff\xf7\xff\r\x00\xfe\xff\x02\x00\xfe\xff\xf9\xff\xfa\xff\xf8\xff\x00\x00\xf6\xff\xfe\xff\x02\x00\x05\x00\x04\x00\xfa\xff\xf4\xff\xe8\xff\xf3\xff\x06\x00\xf9\xff\x06\x00\n\x00\xf8\xff\xfa\xff\x01\x00\xf4\xff\xfd\xff\xf7\xff\xf4\xff\x01\x00\x05\x00\x02\x00\x04\x00\xfc\xff\xef\xff\x03\x00\xf3\xff\xfc\xff\x08\x00\x04\x00\xfd\xff\x08\x00\x04\x00\x00\x00\x00\x00\x06\x00\x03\x00\xfd\xff\x04\x00\x15\x00\x06\x00\x12\x00\x15\x00\x05\x00\x04\x00\x05\x00\x05\x00\x02\x00\x07\x00\x05\x00\xfc\xff\xfd\xff\x06\x00\xff\xff\xf8\xff\x01\x00\xf2\xff\xe6\xff\xf4\xff\xef\xff\xfb\xff\xfc\xff\xf2\xff\xec\xff\xe4\xff\xe6\xff\xf9\xff\xfa\xff\xee\xff\xea\xff\xe9\xff\xf8\xff\x06\x00\x0b\x00\xe9\xff\x03\x00\xea\xff\xfc\xff\x0f\x00\x00\x00\x13\x00\xe6\xff\xfe\xff\x10\x00\x12\x00\xfd\xff\x03\x00\xf1\xff\xfb\xff\x18\x00\x1f\x00\x08\x00\xfa\xff\xf9\xff\xf6\xff\r\x00\x17\x00\x03\x00\xfb\xff\xfc\xff\xf3\xff,\x00\x1c\x00\xf8\xff\xed\xff\x05\x00\x10\x00$\x00@\x00\x19\x00\x00\x00\x19\x004\x00G\x00]\x001\x00\x07\x005\x00J\x00X\x00\\\x00\x03\x00\xf6\xff\x13\x007\x00]\x008\x00\xef\xff\xeb\xff\x00\x00#\x00\x85\x00S\x00\xb6\xff\xcf\xff\x1a\x00\xc3\xff\xb6\x00\x8a\x00^\xff\xe0\xff\xfc\xff\xba\xff4\x00n\x00\xc5\xff5\xff\xf4\xffR\x00\xe8\xff-\x00\x11\x00z\xff\xb0\xff\x92\x00\xeb\xff\xca\xff\t\x00\xa0\xff\xcb\xff6\x00L\x00\x02\x00\x91\xff\xdb\xff\xd3\xff\xed\xff\xc0\xff\x8b\xff\x97\x00\xe2\xff\x16\x00B\x00\xbc\xff\xfb\xff1\x00\xe4\xff\xed\xff\x95\x00\xcc\x00H\x00>\x00\x03\x00g\xff\x18\x01\x8c\x01\xa8\xff?\xff\xc6\xfeO\xff\xaa\x00\x00\x01Q\xff\xaf\xfe\xce\xfe\xd8\xfe\x7f\xff\xce\xfe\x93\xfd\xb6\xfc\x9c\xfd\xb1\xff\xf7\x00H\x00D\xfe\x8d\xfc\xc2\xfco\xffG\x01r\x00\x94\xffG\x007\x01,\x02\xc0\x02\x18\x01\xaa\xff\xf0\xffS\x00\xbf\x029\x03\xa0\x01p\x00/\x00\xc4\xff\xb3\xff\xd4\xffU\xfdB\xfd\x8b\xfe\xfb\xfe\x86\xfe\x0e\xfd\xba\xfd\xb7\xfd\x8e\xfc\xf0\xfc\x88\xfd"\xfe\'\xfe]\xfe\xfb\xfe\x13\x00\x08\x01\xe1\x00&\xff\xf0\xfe\x05\x015\x01E\x02:\x02G\x02*\x02E\x02\xcf\x02\x1f\x03\xcc\x03\x15\x03N\x03\xdf\x03\x82\x04X\x05P\x05f\x04}\x04Q\x06\xe3\x06\x9a\x06\x8e\x06\xc7\x05a\x05\xe6\x05-\x06g\x066\x06\x9e\x05\xf4\x03\x9b\x03\x14\x03e\x02\x99\x01\xdf\xff\xa1\xfe{\xfe%\xfe2\xfd/\xfc\xc3\xfa-\xf9\xe2\xf8\xa2\xf8\x8d\xf8\xa0\xf9B\xf9\x15\xf9\xf3\xf8<\xf9y\xfa\xe1\xfa\xce\xfa#\xfb\xa1\xfc\xf3\xfd\xec\xfeE\xff\xc5\xfe\x9f\xfe8\xff\x19\xff\xff\xfe5\xff\xd8\xfe\x90\xfe\x87\xfd\xb5\xfcR\xfc\x18\xfc\xae\xfaI\xf9/\xf9\x14\xf9>\xf9\xb6\xf8d\xf8o\xf8E\xf8\x18\xf8c\xf8g\xfaA\xfb\xe2\xfak\xfb\xda\xfbM\xfd\xa0\xfe\x1c\xfft\xfe\xee\xfe\xf9\xff\x0e\x00y\x00*\x00P\xff\xfa\xfe\x84\xfe\xef\xfd\xd4\xfe\xb3\xfdf\xfd\xfa\xfbq\xfb\xfa\xfb \xfd{\xfd\xe4\xfc\xb3\xfc\xe5\xfa\x97\xfd\xee\xffP\x00o\x01o\x00\xfc\x01\x13\x04S\x05R\x08\x13\x07\xda\x08\xa6\t`\x0cX\x11\x1c\x0f\x88\x0b\xb5\x04\x17\x08\x8f\x17\x8f)\x9f4G+\xa0\x1c\x9f\x12\xe9\x13\x88#\xac+++\x94"\x8f\x1bM\x1f\xa0\x1e\x05\x17\xf1\x04\x17\xf4V\xec\x13\xf0\x0e\xfaJ\xfe&\xf9\xcb\xe7\x96\xd7\xa6\xcf\xab\xd2\xd2\xd9\x95\xdbT\xd9J\xdb\x84\xe2\x98\xe8\x06\xeb8\xe8J\xe5\x93\xe5\xfa\xea\xa2\xf8:\t\xe9\x11\xd3\x10c\n\x97\x05\x1a\x08\xbb\r\x94\r\x15\x0e\xef\rz\x0eU\x10\xc1\r+\x08\xbd\xfd(\xf24\xeaj\xec\x1f\xf3H\xf7\x0e\xf5\x07\xed\xcb\xe6\x9f\xe2\xe1\xe2\xdc\xe5&\xe87\xed\xcb\xf1\x13\xf8\xf9\xfe*\x039\x04\xda\x00h\xff\xe3\x03\xdf\x0eY\x18\xf4\x1d\xa7\x1d8\x1a=\x16\xad\x12\xa8\x118\x11\xa7\x10\xaa\x0e\xfb\r`\x0c}\n\xd2\x06|\xfe@\xf5\x11\xf1U\xf2K\xf6\x9c\xf9\xf3\xf8\xf8\xf5\xc6\xf2\xb5\xf0\x1a\xf2\xb6\xf4\xa5\xf6\x87\xf7~\xf9\xa3\xfd1\x01{\x03\xff\x01\xfc\xfc\x1c\xfb\xa7\xfb\xf7\xfd\x85\x00\xb1\x00\xaf\xfe\x01\xfc\xf1\xf9x\xf7\xab\xf6c\xf4M\xf2f\xf2?\xf2\x1f\xf7 \xf8\xf7\xf7!\xf6W\xf1@\xf34\xf5\xce\xf8\xdb\xfdA\x00\x9f\x02[\x03\x18\x04\x0b\x01\xb0\x02\x0c\x06>\x08 \x0b\xfa\n\xc6\x0e\xaa\x12K\x13>\x12y\x11o\x11C\x17\xe6\':7w9 /\x0e$\'$\xcb)\x04,\xc8+\xc3+\x9a)t"`\x18\xf0\x0f\x88\x07s\xfc\xe7\xef\xf7\xe7\x9a\xe9\x0c\xed3\xec\xcf\xe4*\xda\x11\xd1\xab\xcc\xf6\xcc\x00\xd1^\xd8\x93\xdf\xb2\xe4\x08\xe75\xea\xad\xefZ\xf3`\xf4\x0c\xf6\x05\xfd/\tU\x13\xaa\x17\x06\x16,\x13-\x10\xdb\r\x18\x0c"\n\xe7\t\x8b\x08\xef\x04\x19\x00P\xfb\xd8\xf5\xbe\xed{\xe4C\xe0\x04\xe1\xff\xe2\xe4\xe2\xaf\xe2\x89\xe2N\xe2\x9e\xe2 \xe4{\xe8\x1e\xef\xf5\xf5\x1a\xfc\xf2\x01\x9e\x08\xba\x0fh\x12\xf5\x14\xcc\x17\xb1\x1cb \xb8 9!\xa6 |\x1f\xbe\x1bu\x17\xef\x13~\x0fo\nt\x05\xef\x00;\xfd\xe3\xf9L\xf6\xab\xf1&\xef\xc7\xed\xf0\xec\x1a\xed\xf4\xec;\xee/\xf0\xa9\xf22\xf68\xf9\xf3\xfax\xfb\xd2\xfd(\xff\x00\x01\x9d\x02\xad\x02T\x03R\x02\xf6\x00$\xff\\\xfd\x9e\xfa\xef\xf6\x91\xf4\x1d\xf3K\xf3\x80\xf2\x88\xf0X\xed#\xec\x8f\xebb\xeaK\xeb\xdc\xec;\xf0\xf2\xf3\x15\xf5\xfe\xf7\xb2\xfa\xf3\xfb(\xff\xc9\x00`\x03]\t\x0b\x0cW\x10O\x14\xbf\x14\x9c\x14\xb6\x11\x81\x11X\x14y\x17f\x1b\xce&c9vB\x96:,&\xd5\x1b\xbc%\xf20H4.3\x983\xaa0P%B\x15\xa8\n/\x03i\xf8#\xf0\xcf\xf03\xf9"\xfc\x8a\xf1\xb0\xde{\xd1\xf9\xcc\xa9\xcc>\xcfw\xd67\xe1\xb8\xe7F\xe7\x06\xe6\xf1\xe7\x0c\xe9C\xe8\x0c\xea\xa2\xf2\x83\x00k\r\x91\x13\xb1\x13X\x0f\xa4\n\x13\x07(\x05\xe9\x06L\n\x00\x0b\xbb\x08\xe9\x04\x01\xffN\xf7\x97\xee\'\xe6\x9c\xe0\x95\xdeu\xdfX\xe3\x14\xe5\xba\xe4\xee\xe1\xad\xddE\xdc+\xe0\x10\xe8\xab\xf0\xbc\xf7\x06\xfet\x05\xdf\n\xc3\x0fH\x13)\x16W\x18\x7f\x1c\xc3"\x14)\xaf+<)k$0\x1e8\x19V\x15\r\x13\xd7\x0f3\x0c\x0e\x08\xb8\x01l\xfa\xe3\xf3\x05\xef\x17\xec_\xeah\xea\xa6\xeb\xfd\xec`\xeex\xef\x82\xf0\xcf\xf0U\xf2\x9d\xf5S\xfa\x89\xff\x1f\x02[\x03>\x039\x02L\x01\x0b\x00\x1e\x00\x11\x00\xb5\xfe~\xfcv\xf9!\xf7\xb5\xf3\xde\xf0Z\xed\xac\xeat\xe9\xc3\xe8\xdd\xe9\xf5\xe9\xe5\xe9\xce\xe8+\xe9_\xeb\x0f\xee\xa8\xf3x\xf6\xc7\xf9\x8d\xfc`\xfe\xd3\x03R\tA\x0f\x15\x12C\x12\xdb\x12\xfd\x14%\x1bK\x1eN\x1f\xd3\x1f\x08#\xa5.\xe7<\xb7D\x99?\x101*&\xc8&O.N3\xac2w/\x03(\xf0\x1c\xbe\x0e\xd9\x03\x90\xfd\x9f\xf5*\xefz\xeb\x04\xed\x81\xee\xac\xe9\x04\xe0\xcd\xd4`\xcdL\xcc7\xd2x\xdb0\xe4?\xea\x03\xeb\xe6\xe9\x1d\xeaH\xed\xd2\xf2\xfe\xf6\xc3\xfc\x11\x04\xa6\x0cc\x12H\x14\'\x12\x06\r!\x08h\x04\x1c\x03\x0c\x03\x10\x03{\x02\x18\xff?\xf8\xe8\xef\x1a\xe7\x00\xe1\x1e\xddb\xdb\xcf\xdby\xdd)\xe0=\xe2T\xe2\xf5\xe1\xac\xe2\xcf\xe4\xd4\xe8\xbc\xef?\xfa\xf3\x03k\x0cF\x12A\x16R\x18F\x19U\x1c\xca w%\x9a(\x99)])S&(!\xa5\x1aD\x14\x12\x0fP\x0b\x12\x08g\x04q\x00\xa2\xfb\xf8\xf5x\xef1\xea^\xe7\x82\xe7\x04\xe9p\xeb\xc0\xed\xf6\xef\x94\xf1\x94\xf2#\xf3v\xf4k\xf7j\xfb\xbf\xff\xcc\x03N\x07\x1f\x08\xbc\x06\x03\x04\xe8\x01\xe7\x00\r\x00\xf5\xfeo\xfdE\xfbl\xf8\x8e\xf5|\xf1;\xed\'\xea\n\xe8,\xe7\xe3\xe7,\xe9d\xea`\xea#\xe9\xf5\xe8\x88\xea\x11\xede\xf2\xcb\xf7`\xfc\xda\xff7\x00l\x01H\x04g\tX\x0f\x93\x13\x1c\x162\x18S\x1c\xa5!\xa5,&=\xe5J\x93L\xaf>[1\x0e1\x819x?j>\x0b;\x8c6\xc3,\\\x1de\x0e\xb3\x04X\xfc8\xf35\xeb_\xe87\xebR\xeb\xf2\xe2\x8e\xd4\x8c\xc9\xad\xc6\xb1\xc9\xe4\xce\xc2\xd5c\xde\x8d\xe5x\xe8\xc8\xe8\xd7\xe9\xc7\xed\x9a\xf2\x15\xf7\xa0\xfc\x87\x04\xc1\x0fd\x19V\x1d\x97\x19a\x12\x93\x0b\xee\x06\xf7\x04E\x05\xe8\x06\x05\x07\xbb\x02\xb6\xf9\xbd\xee|\xe5P\xe0\xa5\xdc\x98\xd9*\xd8\x86\xd9;\xdc\x05\xde>\xde]\xdeg\xdf\xd7\xe1\xea\xe6\r\xeeE\xf7`\x01 \nM\x0fI\x12C\x15T\x1a\x1a \x91$q\'\xa6)\x89*H)\xb4%\xf8 6\x1d\x80\x19\xe1\x14d\x0f|\n\x8f\x06\x83\x02[\xfc\xd1\xf5\'\xf0T\xec\x99\xea4\xea\xad\xeaP\xeb1\xec\xfa\xec\x9a\xed\xb2\xeeo\xf0|\xf3]\xf7!\xfb8\xff\xe3\x02\xed\x05\xd4\x07\xc2\x07b\x06\x9a\x04\xc7\x03\x97\x03\xcc\x03\xad\x02\xe1\xff\xe6\xfb0\xf7X\xf3G\xf0t\xed\xcb\xea\x80\xe8\x08\xe7b\xe6\xab\xe6J\xe7\xef\xe75\xe9\xc1\xea\xde\xec\xcd\xef\xd2\xf2\x93\xf5\xdc\xf7#\xfa\xce\xfd\xc7\x02:\x07Y\x0b\xc3\x0e\xc8\x10\x82\x11]\x11\xb9\x12\x98\x16 \x1c"%b3+C\xd8JVD\x845\xd2*\x89)I-\xca3X;\xc0?\xc3;\xfd,\r\x19\x8f\x08r\xfd\t\xf8c\xf5^\xf2\x06\xf0W\xedR\xea\x0c\xe5\xdb\xdc\xe9\xd2\xee\xca\x87\xc6\xf4\xc5\xe7\xcaP\xd5\xb0\xe1\x8c\xeaM\xec\xdb\xe8\xc5\xe5\x99\xe5U\xe9G\xf1\xd1\xfc\x8b\t\xff\x12+\x16\x90\x15\xc9\x13\xe8\x10J\x0cQ\x07\x99\x04U\x05\x85\x07\x06\t\xb2\x08\xd9\x04\xa1\xfck\xf1)\xe6\xe3\xddA\xda:\xdc\xe1\xe1\xb4\xe7\x93\xe9\x90\xe7\x0f\xe5K\xe3b\xe3\x14\xe6\xc6\xeb=\xf4"\xfdI\x05\xaa\x0c\xd7\x12\xaf\x17}\x1a\xe9\x1a\x8e\x1a\x07\x1b\x97\x1e\xc6#\xc3(M*\'(\xe4"I\x1b\xd4\x13\x9c\r\\\n\xdc\x08\xcd\x07\xaa\x04b\xffH\xf9\xba\xf3f\xef\x0b\xec\xb7\xe9\x89\xe8=\xe8\x8a\xe8\xc4\xe9\x18\xec\xf7\xeeK\xf1\x00\xf3\x02\xf4\x11\xf5\x87\xf6\xd9\xf9\x0b\xfe\xdf\x01b\x04\xee\x04\xd1\x04o\x03\xfc\x00\xd8\xfe\x9f\xfd\x06\xfdd\xfc,\xfa\xa9\xf7A\xf5M\xf3\x85\xf1\xab\xef\xac\xed\xa8\xec\xd0\xecp\xed\xae\xee\xda\xefn\xf1\x8a\xf3"\xf5\xc6\xf6\x19\xf9\x0f\xfc_\xff$\x02\x0e\x05d\x08_\x0c\x01\x10&\x12\x17\x13Q\x13\x14\x142\x15\x0b\x18b\x1fK,\xd89\x86@\x8e<\x991\xf4\'t$\xb5&\xcf+\xfa1\xa16\x036\x7f-s\x1e.\x0e\x1b\x02\x97\xfb\x81\xf93\xf8\x1d\xf55\xf0k\xeat\xe5\x7f\xe0\xaa\xda\x00\xd4\xfb\xcd\xa8\xc9r\xc8i\xcb\x1d\xd2T\xda\xf2\xe0g\xe4\'\xe5]\xe4\xb4\xe3e\xe5\x07\xeb\xf4\xf4\xcd\xffL\x08\x12\rD\x0f\xfc\x0f\xa4\x0f\xea\r\x13\x0cE\x0b\xca\x0b.\rj\x0e\xa3\x0e\xe6\x0c\xf7\x08\xf9\x02\xd0\xfb\x84\xf4\xd9\xee]\xec\xcd\xec\xca\xee \xf0\x16\xf0:\xef\xb4\xed\xfe\xeb\xe5\xea\xdc\xeb\x94\xefW\xf5\xf7\xfb\xe6\x01\x90\x06\x11\n\xf5\x0c?\x0f\xd3\x10\xb1\x11\x0c\x13c\x15\x8a\x18\xef\x1a\xfb\x1bK\x1b+\x19\xa0\x15\xed\x10\xec\x0b/\x07\xe3\x03\xdd\x01\x94\x00\xfc\xfe\xba\xfc\x8d\xf9\xe5\xf5\xef\xf1g\xee\xe3\xeb\xf6\xea\xb1\xebx\xed\xac\xef\xa0\xf1\xea\xf2k\xf3y\xf3q\xf3\xf6\xf3\x0e\xf5\xd4\xf6\xfe\xf8\x7f\xfb\xd0\xfd\xa0\xffX\x00\x1c\x00G\xff&\xfeV\xfd\xbe\xfc\xcb\xfc2\xfd\xe5\xfd\x94\xfe\xce\xfe^\xfep\xfdf\xfc\xb9\xfb\xa3\xfb\x05\xfc\x18\xfdo\xfe\xce\xff\xf0\x00\x88\x01\x8a\x01\x0b\x01B\x00\xea\xff\xa1\x00A\x02|\x04y\x06\xc5\x07V\x08\\\x08\x08\x089\x08\xff\x08\xe5\nj\x0ej\x14\x14\x1ds&\xde,\xc6-\xd4)\x19$\x17 \x8c\x1f\xa9"\xf1\'?-0/\xba+\x88"\xa1\x15\x97\x08\xc7\xfe\x01\xfa\xed\xf8x\xf8\xf3\xf5\x84\xf0$\xe9c\xe1\x19\xdaC\xd4Z\xd0\x97\xce:\xcel\xce\xf4\xceF\xd08\xd3\xe0\xd7\x9d\xdd5\xe3e\xe7\xe5\xe9\xfc\xebr\xef^\xf5\x06\xfd\x90\x05\x86\r\xf0\x13l\x17\x9a\x17\x95\x15P\x13\x88\x12\x99\x13\xdc\x15\xc4\x17$\x18\xed\x15\x94\x11\xd0\x0b\xd4\x05\xb0\x00\xf9\xfc\xcb\xfa^\xf9\xfc\xf7\x05\xf6\xca\xf3\xbd\xf1t\xf0\xab\xef\xfb\xeeA\xee\xcb\xed\'\xeeW\xef\xa4\xf1\x13\xf5Q\xf9\xa3\xfd\xe0\x00\xa2\x02\xd4\x02F\x02\x90\x02[\x04\xb5\x07\xa1\x0b\xd9\x0e\x0f\x11\xc5\x11\xd9\x10\xc1\x0e\x02\x0c\xdf\t\x95\x08E\x08N\x08\x08\x08\x05\x07&\x05\xe8\x02\x0c\x00\x81\xfc\xa2\xf8o\xf5\xc7\xf3\xaf\xf3\x81\xf4\xb5\xf5\xd4\xf6_\xf7\x1a\xf7\xdb\xf5\x07\xf4c\xf2\xc9\xf1\xff\xf2\xd9\xf5\x93\xf9\xcf\xfc\xf4\xfe\x06\x00d\x00M\x00\xd8\xff\xd5\xff\xb2\x00v\x02O\x04\xaa\x05Q\x06;\x06|\x05,\x04\xb0\x02n\x01\xbf\x00n\x00z\x00`\x00\xe1\xff\xdc\xfe\x7f\xfd?\xfca\xfb\xea\xfa\xdf\xfa\xf8\xfa\x13\xfb,\xfbH\xfb\x96\xfb\xce\xfb\x18\xfc\xd4\xfcx\xfe\xc5\x00z\x03\xea\x06$\x0cG\x13\xd5\x1ao \xec"\xea"\xb2!\x7f \xe7\x1f\xfc \x00$=(\xba+\xfd+q\'\xb1\x1e\xe7\x13\xd3\t\\\x02<\xfe\x9c\xfc\xde\xfbO\xfa\\\xf6\x96\xef\xb6\xe6\xa9\xddf\xd6\x93\xd2&\xd2:\xd4:\xd7\xfa\xd9\xed\xdb4\xdd<\xde\x85\xdf\xe4\xe1\x8a\xe5\x88\xea/\xf0.\xf63\xfcv\x02o\x08\xaa\rX\x11:\x13\x80\x13\xc9\x12\xee\x11\x85\x117\x12\xbe\x13\x87\x15\xe7\x15\xdb\x13\xf0\x0e0\x08\x1a\x01\x08\xfb\x16\xf7>\xf5#\xf5;\xf5`\xf4\xcd\xf1\xed\xed\x15\xea\x85\xe7\xa2\xe6;\xe7\xc7\xe8+\xeb"\xeeS\xf1\xb5\xf4\xff\xf7B\xfbA\xfe\xb6\x00\xaf\x02Z\x04\x84\x06\xb6\t\xff\r\x8f\x12R\x16C\x188\x18\xc6\x16\xa0\x14\xb2\x12O\x11\xd0\x10\xdc\x10\x8d\x10\xfb\x0e\xe7\x0b\xd2\x07\x98\x03\xe3\xff\xa1\xfc\xb8\xf9\x18\xf7\x02\xf5\xb3\xf3\x16\xf3\xf5\xf2\x10\xf3#\xf3\xf9\xf2X\xf2h\xf1\xba\xf0A\xf1Q\xf3\xa8\xf62\xfa\xf9\xfcV\xfe~\xfe<\xfe8\xfe\xf7\xfe;\x00\xf0\x01\x80\x03\x81\x04\x84\x04\x8e\x03\x06\x02\x91\x00\xb7\xffK\xff\x0e\xffe\xfe\x8b\xfdg\xfcX\xfb`\xfa\x8a\xf9\x1e\xf9\x11\xf9;\xf9!\xf9o\xf8\x8e\xf7\xf6\xf6f\xf7\x07\xf9m\xfb\xea\xfd6\xff[\xff\xe7\xfe\xf5\xfeS\x00\x83\x03+\t\xe2\x11:\x1c\t%\xfd(\xb5\'\xf1#0!\x82!\xdf$K*\xe3/\xb93\xdc3m/|&\xe7\x1a\xec\x0fb\x08\xd5\x04\xa0\x035\x02.\xff\x08\xfa\x96\xf2Q\xe9o\xdf\x1c\xd70\xd2\'\xd1\xbe\xd2f\xd5\n\xd8H\xda\x0e\xdcd\xdd:\xde.\xdf\x93\xe1(\xe6\n\xed\xd4\xf4V\xfc\xb6\x02\xfc\x07\xdd\x0b9\x0e\xf0\x0e\xb1\x0eI\x0e[\x0e\xfe\x0e\xd1\x0f\xaf\x10\x00\x11\x18\x10\xc5\x0c\xda\x067\xff\xce\xf7b\xf2p\xef\xe7\xee\xa8\xef\x80\xf0\x07\xf0\xbc\xed7\xea\xe2\xe6a\xe5\x92\xe68\xea\x00\xef\xce\xf3\x03\xf8\xab\xfb\xc6\xfe~\x01i\x04\x89\x07Z\x0b\x19\x0f\x9e\x12x\x15\x94\x17\x8d\x19\xea\x1a\x8e\x1b\xe7\x1at\x19\xaf\x17\x00\x16]\x14\x87\x12p\x10\xe7\r\xfc\n\xb4\x076\x04v\x00\xc4\xfcF\xf9\xf0\xf6z\xf5w\xf4J\xf3\xdf\xf1\xce\xf0\xf4\xefj\xef\'\xef\xa8\xef\r\xf1\xd7\xf2\\\xf4U\xf5=\xf6O\xf7\xb8\xf8\x18\xfa \xfb\xa2\xfb\xee\xfbl\xfc\x08\xfd\xb0\xfd\xdb\xfd\xc1\xfd[\xfd\xd5\xfc\xf4\xfb\xd8\xfa\x00\xfa\xcb\xf9/\xfaa\xfa\x0c\xfa\x02\xf9\xdf\xf7\xf5\xf6\xb3\xf6\xe2\xf6_\xf7.\xf8\xf0\xf8m\xf9U\xf9\xc9\xf8~\xf8\xef\xf8c\xfa\xa9\xfcp\xff\x81\x02\xfe\x04\x80\x06E\x07T\x08\xa3\n\xe8\x0e\xec\x15\xf8 E.\x9e8\x88:Z4\x0f,\x91(5,=4\x87<\xe9@v?j7u*z\x1br\x0ey\x06\xef\x03\x94\x03\xd2\x00s\xf9\t\xef\xe5\xe4%\xdcE\xd4\xc1\xcc\xc5\xc6\xa8\xc3I\xc4\xe9\xc7I\xcd\xac\xd2.\xd6\x82\xd7\x9f\xd7\n\xd85\xda\x07\xe0\x92\xea\x99\xf8L\x05\xf8\x0c|\x0f\xb0\x0fK\x0f\x9e\x0e\xd6\r}\x0e\x8c\x11\x9b\x15G\x18\xdc\x17D\x14\xaf\r\xe0\x04J\xfb \xf3\xec\xed\\\xec<\xeeN\xf1\x84\xf2\xdd\xef$\xea\x9a\xe4\xcb\xe1\\\xe2b\xe5\x8d\xeaZ\xf1\x9b\xf8\xe3\xfe\xfb\x02n\x05:\x07\xcb\t\xa2\x0cD\x10\x9e\x14\x11\x1a\xc1\x1f\xfa#\x18%\x8a"\x90\x1e\xee\x1a\x98\x18k\x16\xb7\x14\x80\x14\xea\x14\x8c\x13\x1f\x0e\xbd\x05j\xfd\xa3\xf7\xee\xf4Y\xf4\xd5\xf43\xf5W\xf4E\xf2,\xefg\xeb\x19\xe8K\xe7\xb0\xe9\xde\xed\xe8\xf1 \xf5\x95\xf7\x88\xf8\xa0\xf7\x9b\xf5N\xf4\xe8\xf4)\xf7#\xfa\xfc\xfc\xe9\xfen\xff\x9a\xfe\x8e\xfc\x92\xf9\xd3\xf6y\xf5A\xf6z\xf8\xc1\xfa\xa2\xfb\xdb\xfa\x97\xf9K\xf8\x84\xf7\xe6\xf6\xdc\xf6\x0c\xf8\xaf\xf9\x99\xfb\xe1\xfc\'\xfd\x81\xfdc\xfe\xe5\xff-\x017\x01\xcf\x00W\x01\xf3\x02\x1e\x05\xbd\x06\x7f\x07O\x08g\t\x1d\x0b6\r\xd3\x0f\xfd\x12\xef\x17\xe3\x1e\x16\'\x11/ 414\x84/\xb7)\x1e(\x1e-\x125\xad9\xfc6\xef.\t%\xd0\x1ab\x10?\x07\x8b\x01\xa7\xfe\xba\xfb\xd3\xf5S\xed\xb1\xe3\x96\xda\xd4\xd2\x04\xcd8\xc9\x0e\xc7\xe3\xc6\xc5\xc9\xb4\xce\x9b\xd2I\xd3M\xd2\xe7\xd2\x0e\xd7u\xde\x11\xe8\x8a\xf29\xfc\xf5\x03\xcd\x07j\x086\x08\xba\nO\x105\x16&\x193\x19#\x18\x92\x16\x8d\x13\xe3\x0e\x90\t\xb3\x04\xc0\x00|\xfd\x88\xfa\xad\xf7\xbe\xf4\x03\xf2\xd0\xee\xba\xea\xec\xe6\xaa\xe5\x0f\xe8\x01\xec\xfa\xeeV\xf0D\xf2\xfc\xf5W\xfaN\xfe\xa1\x01\xee\x057\nV\x0ex\x11\xe6\x14\x11\x18\x02\x1b<\x1c\xc0\x1b\x8d\x1a\xee\x18\x80\x18n\x17\x9b\x152\x12\n\x0f\x1b\r\x92\x0bd\x08\xba\x02\x8f\xfc\x14\xf8\t\xf6^\xf5\xe0\xf4\xfb\xf3\xa3\xf2\xb6\xf0\xd1\xee\xb5\xec,\xeb\x96\xea\n\xecr\xefM\xf2\xf7\xf3\xbe\xf46\xf5\xff\xf4\xac\xf3\xc7\xf2/\xf4\x84\xf7\xbd\xfa\xbc\xfc4\xfd\xbc\xfc\x85\xfb\xcf\xf9\x85\xf8P\xf8\x9c\xf9i\xfcP\xffu\x006\xff\xea\xfc\x1e\xfba\xfa\xd4\xf9\xde\xf9\xa2\xfb\x1f\xfey\xff#\x00\x8f\x00T\x00\x10\xfd~\xf8\xe3\xf8\xcf\xfe\xc9\x05B\tW\x07\xfc\x03V\x01P\x01}\x03\xb6\x05=\x08J\n\x04\r\xd1\x0f;\x17S$j/a.\x99!V\x1a<"p0\xb47\xab7N7S6\xe4/\x80%\x86\x1d\xcb\x183\x13z\r\x14\n\xde\x08\xcf\x04\x9d\xfb\xeb\xee^\xe0E\xd5\x90\xd0\x1c\xd2Y\xd5\xde\xd4\xf7\xd1\x14\xcf\xa0\xcc\xdb\xc9\x07\xc8\x8e\xca\xd8\xd1m\xdb\xa0\xe4\x8a\xec\xb9\xf3\xd1\xf7K\xf8\x9b\xf8\'\xfd\x9f\x05\xfd\rr\x13\xa7\x17\\\x1a\t\x1bi\x18\xf2\x12\x86\r\xb0\n\xa8\x0b/\r\xbb\x0bY\x07\xbc\x01\x88\xfb\xb8\xf4r\xee\xf5\xea\x9e\xea\x04\xeb\xf7\xeb-\xedk\xee\xdd\xeda\xec\x1e\xec\x0f\xeeL\xf2\xf7\xf8x\x00\x11\x07\xcf\t\xc3\n\xc7\x0ba\x0e,\x110\x13\x14\x15\xfb\x18\xdc\x1di \xe9\x1e\x1c\x19S\x131\x0f\x11\x0f\xbd\x10\xd7\x10W\r8\t7\x05d\x00\xc4\xfa\xfe\xf5\xda\xf3\x8a\xf2\x0e\xf2\n\xf3Q\xf3r\xf1\x8c\xed\x03\xea\xbb\xe8\x1e\xea\n\xed\xa8\xf0!\xf3\xbb\xf3\xad\xf2O\xf2\x96\xf2\xec\xf2\x98\xf3\xf1\xf5!\xf9\xaf\xfb\x16\xfd\xa3\xfd\xaf\xfc\x95\xfbs\xfb\xfb\xfc\xd8\xff\xfa\x01\xa4\x02b\x01\xe7\xffQ\x00\xa7\x00\xc9\xff\x9d\xfeD\xfdS\xfe7\x00\xd9\xff?\xff\xb6\xfd&\xfd\xdd\xfd\x99\xfe\xdc\x00\x80\x02n\x02\xe9\x02\xc3\x03\xea\x03F\x04\xe7\x04\xe1\x062\t\x01\x0b\x83\r\x8f\x0fH\x15t\x1fJ);\'&\x1e\xc6\x1cU& 0\x0f0p.\x8e1\xc22\xe1,\x06%Z \x9f\x1b\xd7\x13\xe7\rk\x0e\x89\x0e\xe4\x06\xeb\xf9f\xeef\xe7\x0b\xe2A\xdc\xd5\xd8e\xd7\t\xd6\x82\xd3\xbb\xd1\xee\xd0d\xcf?\xcd\xf8\xcc\x14\xd1\xec\xd7\x1b\xdfS\xe5\x06\xea:\xed\x08\xf0\x1b\xf4"\xf9@\xfeO\x02\xb3\x06(\x0c\x8d\x10\xc7\x12\xf6\x12\xa1\x11K\x0f\xa9\x0c\xef\x0be\x0c\x0e\x0c\x06\n\x98\x07\xbb\x04\x08\x00w\xfa\xe9\xf6\xa1\xf4q\xf26\xf1m\xf2\xcc\xf5\xd2\xf5c\xf3.\xf1L\xf1)\xf2m\xf4\x95\xf8~\xfe\x0b\x03/\x060\t\xf0\t\xa6\x08\xe1\x07\x1a\n\xe3\x0e[\x15\xf7\x1a\xfd\x1b\x96\x17\xef\x11\xb7\x0f\x05\x10\x08\x0eM\x0c\xd0\x0be\x0c \x0cK\t\xdc\x03\xa2\xfc\x90\xf5\xc5\xf1\xfa\xf2\xec\xf4r\xf5\xe8\xf3\xdf\xef\xe3\xeb\xf7\xe8p\xe7\xdb\xe6\xd3\xe6\xbc\xe7a\xeb`\xef\xde\xf1\xc6\xf1\x86\xf0M\xef\xf1\xf0D\xf4\xb0\xf7Y\xfb\n\xfe\xa0\x00#\x03\\\x03z\x02\x94\x01P\x01`\x02\xf2\x03/\x08\x92\x0bh\t\xa8\x06\xf5\x03\xcc\x03$\x05\x16\x03\x01\x03\xaf\x05\xea\x06\x85\tW\n\x89\x07}\x01%\xfe\xd2\x01o\x04\xee\x05\x11\tN\n\xb9\x07H\x06v\x06\xd5\x06\xf8\x05\x8d\x05]\x07g\x0b\xee\x12b\x1a\xb1\x1b\x05\x17\\\x14?\x16\n\x19H\x1b\xe8\x1d\x8e!\xf7"l!\xf4\x1f\xbd\x1e\xe0\x19\xfb\x112\r\xa6\r\xf4\r\xec\n%\x07c\x03\x94\xfc\xff\xf3Z\xeeV\xeb{\xe7\xa2\xe2\xd3\xe0Y\xe1w\xe0Q\xdd8\xda\xf4\xd7O\xd6;\xd5\x8b\xd7\xb7\xdc{\xe1+\xe4\xef\xe5-\xe91\xec\xa8\xedc\xf0\xfb\xf4\xb9\xf8-\xfd]\x02\xac\x06\x0f\t\x97\t>\tf\t4\nW\x0b9\r\x02\x0eF\x0e\xb7\rM\x0c\x90\n\x9d\x07\xcc\x04L\x03\xc0\x024\x03$\x04]\x03\xfa\x01\x8e\xff\xe1\xfd\xc4\xfd\xdd\xfd\x8d\xfd\xdc\xfe,\x01J\x02B\x03\xdd\x041\x05J\x03\xfb\x01\x87\x03K\x06R\x07\x15\x08\xd1\x07V\x08m\x06\x84\x03\xdd\x02\x87\x008\xffT\x00\r\x002\xff\xe9\xfd\xcf\xfbJ\xf8e\xf6\xe0\xf60\xf5\xbc\xf2\xf5\xf4\x14\xf8\x12\xf6\xd9\xf0%\xf5\x06\xf8\x96\xefi\xf0\xe3\xf7 \xf9\xd0\xf5\xf1\xf6x\xfe\xed\xf9\xae\xf6z\xfc\xf1\xfb\xf1\xf7\x86\xfbc\x02\xae\xfd\x85\xfb+\x06\xdf\x01\x10\xf7V\x00r\x08\xcb\xfe`\xfa\x85\t\x97\x0f\xce\xfcy\x01\x17\x16\x84\x07\xa1\xfc\xe7\ny\x11\x02\t\xa3\x08\x1e\x12\x93\x0e?\x05\x8b\rQ\x11\x87\n(\x08\xe4\x0b[\x0c\xae\t)\x0c\x8c\x0c\x07\tZ\x03a\x05\x01\x07\x9c\x04\xeb\x03\xf9\x02\x8a\x04\xe4\x04\xdf\x00\xc9\x01e\x05\xf9\xfe\x03\xfe!\x06@\x06S\x01\xac\x05V\x08\xba\x05]\x03\xfd\x04\x8d\x07\xb2\x05\xc0\x01\xba\x04\xd8\x07F\x05\xcb\x02\xd8\x02i\x00\xcd\xfb$\xfb>\xfc\x15\xfb\x02\xf9\xe4\xf6\x04\xf5\x9e\xf4\xdc\xf3\xbd\xf1\xa0\xef\x8c\xefD\xee\xe4\xed\xd3\xf0c\xf2\x82\xef\x1b\xf0q\xf2&\xf4\xf6\xf2F\xf7B\xf9\xe0\xf5j\xfb\xae\xff\xe3\xfe\xa3\xfd\xbd\xff#\x03\xa9\x02<\x02\xab\x04F\x07\x1b\x04\xcb\x02\xb5\x07\xc2\x06\xa6\x03\xc2\x05\xa5\x04\xc9\x04\x96\x05\x13\x04\x10\x05\xcc\x04\xa3\x02r\x03\xb7\x04\xcf\x04\xd3\x01\x83\x040\x02\xb2\x01j\x04:\x01\x1b\xff#\x02\xb1\x01\xd3\xfb\x08\xfc\xc7\x00%\xfd\xec\xf4\xa3\xfb"\xfc\x08\xf7\xd4\xf7?\xfb\xb1\xf4|\xf2\x16\xf9\xd9\xf8Z\xf2\xf4\xf4V\x01\'\xf3\x15\xf4)\x00\x1a\xfdY\xef\xaf\xfb\x9c\xffd\x01\xc5\xf8\xcb\xf8\x95\x0c\xbc\xfea\xf6\xa7\t\xd0\x05%\xfaC\x047\x06C\x04\x06\xff5\t\xe7\x06\x07\xf8\xec\x07}\x0c\xa3\xfa\xbf\xf8"\x08\x81\x08\x03\xf7\xeb\xfb\xcb\x0e\xd2\xff\xf3\xf8\xb8\x05\x88\x03U\xfd\xad\x019\t~\x00\xc4\xff\x98\t\xbb\x04*\xfe\xa1\x0bk\x0c\x97\xfdI\x08M\n\xb9\x04\xe7\x04\xb6\n\xc3\tV\x05\xc3\x08\xed\x05\xf3\x03\xe7\x0cY\x07\xdd\xfbk\x05D\x10\xf5\xfa\x1a\xfd\xee\x10E\x01C\xf7\xf7\x04\x88\x05\xf4\xf4G\x04b\x00\xc3\xf8\xea\x00b\xfb\xc9\xfb\x8d\xfcG\xff\x18\xf7j\xf3\xda\x03,\x03\xd3\xf3\xb8\xf8\xc7\x06\x1a\xfa\x8d\xfb\x08\xfe\xdc\x03\x80\x00\xdc\xfa\xbe\x08\xa8\x02:\x00\xd2\x05\x9d\x05\x1f\x00[\x03\xbd\x06\x91\x03\xf4\x03\xf8\x03P\x04\x18\x02f\x01\xc6\x01\xfd\x01\x8a\x01\xa3\xfd@\xfd\x11\x05#\xfdG\xfa\xd2\x01g\xfdg\xf7\xe8\xfd\xb7\x02\x94\xf7\xf8\xf5q\x02\xf7\xfd\xda\xf3\xa8\xfd:\x01\x86\xf7D\xf5\xc7\x03\xa8\xfd\xc8\xfbt\xfc@\x00 \xfe\x04\xfee\x060\xfa\xf7\xf8\xcc\x06\xdb\x00\\\xfa\x11\x00\x07\x03\xa3\xfb|\xfc\xc7\x04\xef\xf9\x17\xf8\xe5\xfc\xd7\x05\xb0\xef\x81\xfa\x1f\x08\xdd\xf8Q\xf1\xe7\xfd\x88\x01\x89\xf5Q\xf3\x00\x00\x08\x04\xd6\xed\xf1\x01\x8f\x00?\xf4O\x01\xcf\xfa\x89\xff\xfe\x01\xa0\xf8y\x06T\x01\x1c\xfbD\t[\x05\x85\xfa}\t\xf5\x07n\xfd\x0f\n\t\x04\xa1\x02\xa4\t\xff\x01c\x01V\x0bv\x04\x7f\x00\xf9\x08R\xff&\x04]\x07\xfa\xfc\n\x01P\n\xcb\xfc*\xff\xff\x05,\xfdb\xff\xe1\xffH\x06\x1e\xfbS\xf1\xbd\x07@\x10n\xeb\x1c\x01\xcf\x0b0\xf5\x9e\xf2(\x0c\xb1\x06\xb4\xef"\x01\xaf\t\xd3\xfeF\xf7`\x05Y\xffy\xff\x10\xfae\x05\x83\x04\x80\xfc\x97\xfd\x0f\x07I\x02\xfb\xf4\x1e\xfd\xd9\x0b\xa2\xf8\x98\xf6\xd7\x06p\x01\xd6\xfc\x11\xfdB\xff\xc1\xf3\xb6\x04\x98\xfa\xbd\xfb\x95\xffe\x04\xc0\xfe\xc4\xf6\x13\xff9\x06\x18\xff\xc4\xf4e\x0b\xee\x069\xf7g\x01R\x11\xd4\xfd \xfd\x89\t\xc2\nE\xf9D\x06\x07\x0fb\xff\x94\xfd\x9e\x08l\x0b\xc7\xfa\xd5\x03a\x0c\xaa\xf8\x8b\xfb\x9b\x0b%\x02v\xfc,\x04\xf6\x00\'\xff\xfd\x01\xc3\xfd\x94\x03\xfa\xfa\x14\xff,\x03\xe6\x01\xfb\xfc*\xfe+\x01[\xfa^\xff\xbe\xfdQ\x00\xf8\xfd&\xfd\x1e\xfb\x1c\x06\xbf\xfe\x00\xf2\x8e\x0b\xd0\xf8\x96\xf8\xe9\x02\xcc\xfd\xe2\xfeH\xf8\xf8\x04\xc0\xf9\xc0\xfd\x1b\xffh\xfdm\xfc\x0c\x01]\xf5\x90\x00y\x03\xbb\xf5C\x08{\xf6\xe4\xfc\xa2\x00\xca\x00\x8f\xf9\x95\x02\xb6\xf6\xa2\x04&\x00\x85\xf9\xde\x02\'\xffn\x00\x13\xff\xaf\x02\xf8\xf5~\x0b+\xfa6\x02f\x05f\xfd\xe5\xfe6\x07\xff\x02B\xfc\x1e\x00\xc6\x07\xf4\x01\xa9\xfbp\x0bk\xfb\xce\x03\x0b\x05\x96\x01\xb9\xfa8\x05Z\x00\xf7\x02\xcf\x00;\xfc\xf6\x00\xec\x02\xed\xff\xbb\xfe\x85\xfc\xa5\x02\x11\x02\xbd\xf4\xac\x08\x9f\x06\x00\xf1\x17\x01n\x10u\xec\x88\x05x\r\x96\xf3\xdb\xffJ\x0eV\xfa\xa8\xfc\x0c\x07\xe4\xfd\x1b\x06l\xfb\xff\x05\x95\x01[\xfd\xfc\xfeJ\x055\x02\x17\xf6_\x07\xb9\xf7\x15\x04\xa6\x019\xf4\x0e\x06\xb7\xfc\xb7\xf9\xd5\xffV\xfd&\xfbz\x02\xbf\xee\x82\x0f\\\x02x\xea\x17\x08E\x00\xff\xfdo\xf5\x9d\x0f\xc5\xff\xcc\xf9\x8c\xff~\x00\x03\x073\xf49\r\x19\x01\xb7\xfd\xa8\x01!\x02\xf4\x01\xe1\xffh\xfeo\xfcX\n\xa5\xfdi\x03r\x04\x0e\xfd\xed\xffb\x06\xdb\xf9\x02\x00\xba\rL\xf4\xc0\x02I\x08\x92\x038\xf8\xc8\x03T\x03\xd6\xf8\x9d\x08p\x05\x1f\xff\xd8\xf5\x96\x14\xae\xfdH\xf0/\x10\xcf\x07\xfa\xed\xa7\xfd\xff\x15\xa7\xf6\x9f\xf9\xcb\x08\x80\x02\x9f\xf7\t\xf2|\x0c\xca\x00\xd5\xf1I\x00\xe7\x06\xae\xf9s\xef\xe9\x0b@\xf7\x8f\xf2I\x06\xe6\xf4\xe4\xfc&\x05\xaf\xf3`\x02]\xf9\xb7\xfc\xc1\x003\xf9\x1f\x03O\xf9\xeb\x04\xa7\xfa\x1c\x07\x1b\xfb\xd0\xfe5\x05\xbc\xfe\xda\x02\xda\xfdE\x05\x1a\xffH\x08\xfb\xfd\x1e\x05\xbb\x08\x0e\xf4\xa5\x04E\x08(\x02\xbc\xfd\x89\xff\xf9\x0fg\xf3\t\xfe\x90\x12\xa9\xfb\xd6\xef\x18\x06\xd9\x11\x95\xf0B\xfb:\x14\x9a\xfe\xda\xf1\xff\xfc\xb5\x0e\xde\xfd\xe0\xf5]\x0cK\xfd\xc6\xf2\xaa\x14r\x01h\xe9X\x04t\x0b\xaa\xf8\x9f\xf5\xb3\x0b5\x07\x86\xf2\xe0\xfa\x0c\x0f\xce\xf2H\x01\x9d\x00i\xfb\x1c\x05\x10\x00\x7f\xfcs\xfb\x93\x05\x0c\xfa\x90\x00\x1f\xf8O\x05\x17\xff\xb1\xff\xbb\xf2j\x05\xec\x04\'\xec\x99\x10x\xff\xf8\xec\xc3\x06v\x0b\xcc\xe5\x82\x04\xe7\x19`\xe6\x8f\xfa\xe6\x1a\x8a\xf2\x1b\xf7\x88\x05G\x08{\xfbd\xf7\xc7\x16\xc7\xfa\xe0\xf4\xfb\x0c\xd2\x04\xdc\xf9[\x07\xe6\xff"\xfa$\x0f\'\x01P\xfa9\x08\x9e\x04\xe8\xff\x83\xffq\xff\xac\x06R\xff\x80\x01W\x03\x89\x02}\x00\xb0\xfcX\x07x\x00\xed\xf7p\n\x08\xfa\xd0\x00%\x08\xb4\xf6\\\x05o\x02[\xfd&\xf9\x8b\x08\xee\xf9\x89\xfcq\tN\xf8\x80\xf9#\n\x04\xff\x9c\xf2\x9e\r\xcf\xf5\xc1\x005\x05\xe1\xf7\xda\xf9\x91\t\x1b\xfb(\xf4\x1d\x13h\xed\xe2\xfb\xe1\r3\xf5}\xf8\x06\xfe\xf4\x07\x88\xf4\x87\xfa\x02\x05,\xffu\xfa\xd2\xf7\xf4\x04!\x00\x17\xfcQ\xf6\xb9\x0b\xf6\xfc\xb3\xf5\xbc\x06D\x04\xc2\xfbj\xff\\\x06\x8f\xfa\x9c\x01X\x03}\x02K\x00w\x10r\xf5\xeb\xffK\r`\xfbc\xfd\x82\x0c\xb3\x02\x88\xfb*\x00-\ns\xfc\x86\xf9\x01\t*\xfal\x018\xf5\x82\x11#\xfc\xe2\xef\x9b\n^\x06\xae\xf2\x88\x02@\x0c\x1d\xfa\xae\xf6\xb4\x13\x99\x01\x84\xeer\r\x00\x0c\xf1\xf7\x8e\xf1\x0e\x18\x97\xfc\x89\xef#\x0c\xde\x08\xa1\xefk\xff^\x0c\xde\xf2~\xfa\xfd\x08\x84\xf9\x81\xf3\x89\x06\xd0\x06\xf3\xec\x17\xfci\x0e\xe6\xf2\x99\xf3\x8b\x05_\x06\xb1\xf6\x85\xf3\xf5\x14\xa8\xfeH\xe2I\x12\xa8\x0f\xda\xe80\x01\xbd\x14\x8b\xf5\xd6\xf8#\r\x93\x046\xf3\x0e\x06~\x0b\x0b\xfd\x17\xfe\x08\x08\xfc\x06)\xf5p\x07\x0b\x08\xee\xfc\x8b\xf9\xd4\tf\x03\x0e\xfb\xc7\x02\x8c\x02F\x000\xfcQ\x00\x05\x01\x10\xfec\xfb\x03\x08$\x00\x84\xf9\x87\td\xff\xa6\xeeD\x03\xe4\x07L\xfd\x83\xfe\xb7\x02\xff\xfb]\xfd}\x05\x91\xf9\x97\xfer\xff\xad\xfa\x0e\x00\x14\x02d\x01_\xfcJ\xfb\xa8\xfc\x85\xfd\xd6\xfe\xb7\x02\x9a\xf6\x1a\xfd\xe8\x03\xd3\xfb\xb0\xfb\x9b\xfc6\x02\x94\xf5x\xfdn\x07\x81\xf9\x97\xff\x97\x03\xf2\xf6\xa1\xf6\xfa\x10\xe3\x01y\xf1q\x04\xd3\x04\xc7\xfe\xe9\xfb\xb8\x0c\xc5\xffi\xf7E\x03P\nD\tG\xf4\x96\x08\xe5\x08\xb0\xf6(\x07\x06\x0b\xad\x02W\x03\xec\x01\xa4\x012\x052\x02!\x05#\x01\n\x00\xcd\x03r\x02\xb8\x02E\xff\x19\xffL\xfe\xf1\xfd\xeb\x01A\xff\xec\xffz\xf8\n\xfal\x06\xa0\xf7\x90\xfd\xa2\xfe\xcd\xf9\xfb\xfe\x86\x001\xf8\xfa\xfb\xc2\x08\xac\xf3i\xf6\xa7\x07A\xff\xae\xf7\x9f\x00\x85\x01\xf7\xfa\xc9\xfb\xda\x02\xc6\xfb\xa3\xf8m\x03\xce\xfe\x87\xff\xb7\x00n\xff[\xfcd\xfe\x10\xff\xd1\xfdh\xfe \xfc7\xff\x90\x03\xae\x013\xfe\x1e\x01o\x01\xf4\xfdn\x03Y\x07\x17\x03\xe3\x01K\t;\x0b\xf8\x04\x80\nM\x0e\x8a\x04t\x05\xcd\x0f\xe8\x0c\xdf\x08<\x0cc\x08e\x08v\n\x05\n\xaa\x05\x8f\x01\x07\x05V\x02G\xfe\x8f\x02\xad\x01\x9e\xfa\x9b\xfa\xc0\xfa\xcf\xf8\xd9\xfa\xde\xf8I\xf5\x0e\xf8\x05\xf9]\xf7\x13\xfa\xa1\xfa\xaf\xf8e\xf8\x90\xfa\xee\xfd\x1e\xfe\x0f\xff\t\xfe\xa5\xfd\xc9\x00t\x01I\xff&\x00e\x03\xca\xfc\x95\xfa\x13\x02\xd8\x02\xcb\xfb\xa6\xfa\xa9\xfb\x0e\xf9\x0c\xf9\x04\xfat\xf9\xe3\xf4/\xf6\xb0\xf5\xf3\xf5\x80\xf8\xbe\xf5\x14\xf8\xca\xf2\x07\xf3\x0b\xfa\x84\xfc\xb1\xf6V\xf8\xe1\xfc[\xf7\xfb\xfb;\x00\x03\xff\xad\xf9r\xfb)\xffN\xfc\xbb\x01T\x01\xd9\xf9\x9f\xfd\xb9\x00d\xfd\x11\xfd\x0e\x00\xcc\xff\x8a\xf9,\xfc\xb1\x03\x87\x05\n\x02\x9c\xfdx\xffj\x03\x16\x04E\x05v\x05%\x07\x8d\t\'\x0bu\r\xa2\x0e\x90\rB\x0fM\x11=\x13\xcd\x19\xef\x1dH \xa7"k#\xd2"\xcc\x1e$\x1e\xd8\x1f\xef\x1f\xe6\x1b`\x18|\x18>\x17}\x11\xfc\n\x92\x03\'\xfdV\xf7u\xf2\x93\xf1\xbc\xef~\xeb\xb2\xe7\xd2\xe6\xc0\xe5[\xe3E\xe2#\xe0\xca\xde\x93\xdf\x0f\xe3\x14\xe8\xb7\xec\x14\xf0\x1f\xf2\x99\xf3=\xf7\x9c\xfb\xac\xfd\xc5\xfeb\x01\xbb\x02u\x05j\n\xba\x0b\n\x0c\xa5\x0b+\x07\xec\x035\x03=\x02*\xff\xb7\xfa\x08\xf8E\xf7\xb9\xf6\xcf\xf5\x8f\xf5&\xf1\xbc\xec\xa2\xebV\xecq\xee\x9f\xf0\xfd\xf1\xfa\xf2\xdb\xf6\xa1\xfc\x07\x01\x04\x03\x08\x05G\x04q\x06\x05\n\xd5\rJ\x11\x9f\x12R\x12\xc3\x11\xa1\x12\xef\x12\x1c\x10s\x0b\xb0\x07\r\x05u\x03\xb4\x02\x84\x01\x93\xfe\xbf\xfa"\xf7^\xf5\xfc\xf3\x10\xf22\xefG\xed\xa8\xed(\xf0\xd8\xf2\xf4\xf2\x19\xf3\xd4\xf3\xbd\xf3\x10\xf4\xa0\xf5\xca\xf6\xd1\xf7n\xf9\x18\xfbt\xfc\xce\xfdO\xfd\xf3\xfb\xe9\xfa#\xfa\xb0\xfa\x9d\xf9\x9d\xf8\xcc\xf8\xf4\xf9`\xfc\'\xfc\x9a\xfb\xa9\xfb\x87\xfc\x03\xff~\xffs\xfd\xce\x03U\x16O%?*\xc9\'\x0b*\xb52r5\xff2\x872F5T5e2\x9d2\xf14\xc10\x15#\x99\x13\xa4\t\x03\x04\x94\xfco\xf3\xc7\xeb5\xe7\xb2\xe3\xf5\xdf\xfa\xdc\xd7\xd9\xf3\xd5\x89\xd0\x89\xcb#\xcd\x84\xd5\xdf\xdc\x1b\xe1\x94\xe6\x86\xee#\xf6\x80\xfc|\x01D\x06+\x08\xe6\t\xa9\x0e\xb4\x13b\x19\xd5\x1d[\x1cL\x19\xb7\x16\x9f\x13\xe2\x0eR\t)\x03\xb1\xfbl\xf4\xac\xef\x8d\xef\x86\xed^\xe9\x06\xe4;\xde\xc0\xdaU\xd9P\xda\xb9\xdc\x17\xdes\xe1\x1d\xe8\xa2\xee\xa9\xf5\xe0\xfa\xab\xfe\x93\x01y\x05\xfe\t\xbf\x0f\x85\x15\x06\x1a\xc1\x1d\xcb\x1e\xd2\x1d\x16\x1e=\x1d\xfb\x19\xcd\x15v\x11[\x0e\x0c\x0b\xe8\x07\xe4\x04T\x01\xd7\xfc\xa4\xf8?\xf5\xcf\xf2%\xf2~\xf1\x99\xf0\xcc\xef{\xf0\xa0\xf2`\xf4A\xf5m\xf6R\xf7\xf8\xf8H\xfa\xc6\xfa\xe9\xfb4\xfc\xd6\xfb\x12\xfa\xa0\xf8A\xf8\xa8\xf8N\xf6\xc6\xf3\x8b\xf2l\xf1\xd4\xf1q\xf1\xa6\xf0\xd2\xf0\xd3\xf0f\xf2\xae\xf3\xcf\xf3\x9c\xf5\x8c\xf9\x9c\xfcX\x00\n\x05\xcb\x07\x8e\t[\x0b3\x0f\xad\x16\xff n+\xfa2_6\xd86\xc87\xd48\xf96\\2^,\x98(k\'\xa8$\xd1\x1f\xaf\x19\x9d\x11Z\x076\xfc\xa7\xf2\x90\xeb\xc4\xe5\x1d\xdf\xf5\xd8\x97\xd6\x8a\xd7D\xda\x89\xdc\xf8\xdc\x11\xdc\xa2\xdc&\xdf\x86\xe2X\xe7k\xed\xf6\xf3\xe9\xf8N\xfe\x99\x05f\ry\x12T\x14\xcc\x13w\x13\xbb\x13\xf8\x13E\x13\x88\x11\x03\x0f\xd7\x0bg\x08y\x04\x1f\x01\xf0\xfc\xf5\xf6\x9c\xefM\xe9\x85\xe5\xab\xe3\xf8\xe1o\xe0\xd3\xdf6\xe0\x83\xe1\x94\xe3\xb9\xe5~\xe8-\xeb\x9a\xedO\xf18\xf6x\xfcr\x02V\x07"\x0b\xf2\x0e\x18\x12m\x14\xc7\x15\x85\x16\xb2\x16+\x16\xa4\x14\xb0\x13\x12\x13\xcc\x11i\x0f\xaf\x0b\xdd\x07i\x05\xa6\x02T\xff\x05\xfd\xee\xfa\x18\xf9\xd7\xf7\xfd\xf6\x89\xf6\xed\xf6\x86\xf7_\xf7p\xf6\xd7\xf5\xf7\xf6G\xf8\x7f\xf8\xd1\xf8\x0e\xfa\'\xfb\x9a\xfbo\xfc\xa8\xfc\xcb\xfc\xce\xfd\x0b\xfe\x8a\xfdh\xfd:\xfe\xf3\xfe\x88\xffp\x00\xd4\x01\xd0\x02\x8a\x03d\x04\xdc\x04\xde\x05\x8b\x06\xdc\x06t\x07\xe2\x07\x97\x08s\tN\tz\x08z\x083\x08\x9c\x06\xf7\x04\xa1\x044\x05.\x05,\x04\r\x02\x8b\x00\x80\x00\xac\x00\xd5\x00o\x00\xf6\xff\x03\x00D\xff\x99\xfeE\xffr\x00\xc8\x01\xd8\x02\x19\x03.\x031\x04\x8b\x05k\x06i\x07\xf1\x08E\n\xa3\n\x8e\n\xd4\ns\x0b\xb3\x0b\xfa\n\xad\t>\x08\xdf\x06j\x05\xad\x03R\x02(\x01\xa4\xff\xc2\xfd\xb7\xfbT\xfa\x94\xf9g\xf8\xa9\xf6$\xf5c\xf4+\xf4\xda\xf3H\xf3/\xf3@\xf3D\xf3\xbf\xf3\x86\xf4\xbd\xf5\x1f\xf7\x9f\xf7z\xf8\xac\xf9{\xfa\x89\xfb\'\xfc\xbf\xfc\x85\xfd\xd2\xfdR\xfeq\xfey\xfe\xe5\xfe\xdb\xfe\xb3\xfe\x7f\xfe\x1c\xfe\xbf\xfd\xc9\xfd\xd7\xfd\xd7\xfd\x9a\xfd\xc1\xfdp\xfe\x8a\xfe\xf6\xfe\xcf\xff^\x00\x07\x01\x85\x01\xdf\x01\x99\x02i\x03\xf5\x03f\x04\x8a\x04\xa1\x04\xf5\x04]\x05]\x05\x1c\x05\xcb\x04_\x04\xc5\x03\x17\x03\x15\x03\xc9\x02\x1d\x02\xaf\x01\xde\x00j\x006\x00\xc9\xffW\xff;\xff=\xff\xcf\xfe\x95\xfe\xc9\xfe\xd6\xfe\x9f\xfe\xa2\xfe\xf0\xfe\xa3\xfe\xbb\xfe\x08\xff\xbd\xfe\xb4\xfe\xd5\xfe\x97\xfe4\xfe\t\xfe\xcf\xfd:\xfdy\xfc\x0e\xfc\xbf\xfb\x04\xfbl\xfa5\xfa\xca\xf9R\xf92\xf9*\xf9\x19\xf9\\\xf9\x02\xfa\xbc\xfa3\xfb\xfd\xfbA\xfdU\xfee\xff|\x00\xc3\x01\xda\x02?\x04h\x05e\x064\x07h\x08\x01\t\x18\t\x88\t\xb2\t\xc5\t\xb9\t\x9d\t"\t\xc7\x08W\x08\xda\x07\x1c\x07\x8f\x06\x11\x06\x82\x05\xa1\x04\xdc\x03s\x03[\x03\r\x03\x86\x02\x85\x02\x93\x02l\x02:\x02u\x02z\x02w\x02C\x02(\x02\xed\x01\xd7\x01\xac\x01<\x01\xcf\x00N\x00\xd4\xff?\xff\x86\xfe\xac\xfd\x1d\xfd\xb5\xfc\x13\xfc0\xfb3\xfa\xa3\xf9(\xf9v\xf8\xdb\xf7g\xf7\x0c\xf7\x01\xf7B\xf7a\xf7\xa4\xf7\x12\xf8u\xf8\xa6\xf8\x02\xf9\x0b\xfa\xb8\xfa\x0b\xfb\xdb\xfb\xf4\xfc\x01\xfe\xe4\xfe\xdc\xffL\x00\xa5\x00M\x01\xd3\x01\x02\x02v\x02\xb4\x02\xbf\x020\x031\x03\x0c\x035\x03\x1b\x03\xa5\x02\x9c\x02u\x02,\x02\xf8\x01\x00\x02\xdb\x01~\x01~\x01\x98\x01I\x01\x0f\x01/\x01#\x01\xde\x00\xb9\x00\xdf\x00\xe1\x00\xb5\x00\xa3\x00\xad\x00_\x00\xfd\xff\xcf\xffz\xff\x02\xff\xa1\xfe\x0e\xfel\xfd\xe5\xfc\xa0\xfc8\xfc\xbc\xfb,\xfb\xbe\xfa\xb0\xfa\xab\xfa\xa0\xfa\xd5\xfaW\xfb\xbd\xfb)\xfc\xf9\xfc\t\xfe\xa2\xfe0\xff\x04\x00E\x01\x0c\x02\xc1\x02\x8f\x03J\x04\xd5\x04b\x05\xef\x05\x01\x06\xee\x05\xf0\x05\xbd\x05I\x05\x1c\x05\xa8\x04\x15\x04\x97\x03;\x03}\x02\xe9\x01\xb8\x01-\x01\xb0\x00q\x00o\x00;\x00a\x00\x93\x00\x94\x00\xc2\x00_\x01\xc8\x01\xff\x01]\x02\xd4\x02$\x03t\x03\xcb\x03\xe0\x03\x0c\x04\x19\x04\x10\x04\xda\x03\x9e\x03M\x03\x97\x02\xfa\x01[\x01=\x00X\xff\x97\xfe\x88\xfd\x99\xfc\xc2\xfb\xe9\xfa0\xfa\xbf\xf9Y\xf9\xf6\xf8\xae\xf8\xcd\xf8\xbb\xf8\xfe\xf8\x85\xf9\x15\xfa\xf3\xfa\xa9\xfb\xa0\xfc\x99\xfd\x7f\xfe;\xff\x0f\x00\xdb\x00\x82\x01\xf7\x01r\x02\xf8\x02B\x03f\x03\x81\x03f\x03\x14\x03\x1a\x03\xe4\x029\x02\xb2\x01\x8c\x01&\x01\x98\x00\x0f\x00\xf4\xff\xb3\xff\n\xff\xe3\xfe\x01\xff\xef\xfe\xe5\xfe\xf1\xfe\xfa\xfe6\xffv\xff\xa5\xff\xd8\xff\x00\x00=\x00Z\x00\x80\x00\xca\x00\xe8\x00\xed\x00\xb5\x00\xa2\x00\x9e\x00\x7f\x004\x00\xe4\xff\x7f\xff\x1f\xff\xba\xfel\xfe]\xfe\xfc\xfd\xbc\xfd\x87\xfdN\xfd$\xfd2\xfd\'\xfd1\xfdu\xfd\xbc\xfd0\xfe\x90\xfe\xe7\xfeG\xff\xb0\xff/\x00\x80\x00\xe3\x00+\x01&\x01:\x01\x84\x01\xa4\x01w\x01m\x01n\x013\x01\xd9\x00\x97\x00W\x00\x07\x00\xdf\xff\x9c\xff\x8e\xff\x95\xffy\xff`\xff\x98\xff\xb4\xff\xba\xff\xf1\xff9\x00\x96\x00\x0f\x01k\x01\xc4\x015\x02\x96\x02\xc9\x02\t\x03+\x034\x039\x039\x03+\x03\xef\x02\xc5\x02{\x02\x1e\x02\xb5\x014\x01\xb1\x008\x00\xaf\xff\x08\xffX\xfe\xc8\xfd\x9a\xfd\x1d\xfd\x94\xfcB\xfc\x13\xfc\x06\xfc\xfc\xfb\x10\xfc?\xfcb\xfc\xa9\xfc\x06\xfd\\\xfd\xd1\xfdA\xfe\x8f\xfe\xec\xfeU\xff\xa2\xff\xf7\xff2\x00k\x00\xa3\x00\xd4\x00\xd3\x00\xc9\x00\xe1\x00\xdd\x00\xc8\x00\xa8\x00\xab\x00\x9b\x00a\x00S\x00[\x008\x00\x16\x00\x00\x00\xfa\xff\xfa\xff\x1b\x007\x00:\x00K\x00o\x00\x80\x00\xac\x00\xf2\x00\x06\x01\x13\x01?\x01|\x01\xa1\x01\xdb\x01\xd9\x01\xcb\x01\xdd\x01\xbe\x01\xbd\x01\xb8\x01\x95\x01e\x01]\x01"\x01\xd0\x00\xa9\x00\x8e\x00G\x00\xeb\xff\xb1\xfff\xff8\xff)\xff\x04\xff\xd6\xfe\xc8\xfe\xcb\xfe\xbc\xfe\xbc\xfe\xc3\xfe\xdc\xfe\xd1\xfe\xba\xfe\xcb\xfe\xe2\xfe\xf4\xfe\x06\xff\x07\xff\x0b\xff\x1c\xff\x03\xff\xfd\xfe%\xff\x14\xff\x0c\xff\x04\xff\r\xff7\xffa\xffj\xff\x80\xff\x9f\xff\xcf\xff\t\x00<\x00r\x00\xa7\x00\xba\x00\xeb\x00/\x01n\x01\xb0\x01\xde\x01\x11\x02L\x02z\x02\xa4\x02\xe0\x02\xf6\x02\xf8\x02\xea\x02\xd5\x02\xb4\x02\x97\x02\x85\x02;\x02\xeb\x01\x92\x01;\x01\xc7\x00W\x00\xf1\xff\x8d\xff7\xff\xea\xfe\xbb\xfe\xa0\xfe{\xfeZ\xfe8\xfe*\xfe1\xfe0\xfe>\xfeg\xfe\xa1\xfe\xd2\xfe\xf8\xfe \xff=\xffm\xff\x83\xffx\xfff\xffb\xff^\xffZ\xffK\xff1\xff\x0c\xff\xd7\xfe\x9e\xfen\xfe7\xfe\xfc\xfd\xd4\xfd\xbb\xfd\xad\xfd\xae\xfd\xbc\xfd\xe4\xfd\x13\xfeQ\xfe\x9c\xfe\xee\xfeI\xff\xb5\xff\x12\x00f\x00\xbf\x00-\x01\x92\x01\xe0\x01/\x02w\x02\xad\x02\xd9\x02\x04\x03\x18\x03\x19\x03\x18\x03\x07\x03\xed\x02\xca\x02\xa0\x02d\x02\x1b\x02\xcd\x01}\x01+\x01\xde\x00\x85\x001\x00\xe4\xff\x8f\xff>\xff\xf5\xfe\xb0\xfev\xfe;\xfe\x06\xfe\xd7\xfd\xba\xfd\xa7\xfd\x93\xfd\x89\xfd\x80\xfdt\xfdg\xfda\xfdd\xfdm\xfdz\xfd\x97\xfd\xc7\xfd\xe5\xfd\xfb\xfd*\xfeg\xfe\x93\xfe\xc1\xfe\xfc\xfe9\xffo\xff\xb9\xff\x03\x00G\x00\x87\x00\xcf\x00\r\x01E\x01\x8b\x01\xc0\x01\xe2\x01\x05\x02\x18\x02&\x02+\x02%\x02\x1b\x02\xfd\x01\xd9\x01\xbb\x01\xa8\x01\x96\x01u\x01H\x01\x12\x01\xec\x00\xc1\x00\x9c\x00|\x00`\x00C\x007\x006\x00<\x007\x00\x1a\x00\x0b\x00\x03\x00\xf8\xff\xed\xff\xe8\xff\xf3\xff\xef\xff\xed\xff\xe9\xff\xe2\xff\xde\xff\xd3\xff\xb3\xff\x89\xfff\xffD\xff\x1d\xff\xfc\xfe\xd7\xfe\xb2\xfe\x8b\xfeh\xfeP\xfe;\xfe\x1f\xfe\x03\xfe\xf4\xfd\xf2\xfd\xf0\xfd\xfb\xfd\t\xfe\x12\xfe9\xfeo\xfe\x9c\xfe\xd1\xfe\r\xffP\xff\x85\xff\xbc\xff\xfb\xffA\x00\x82\x00\xc0\x00\x08\x01E\x01\x81\x01\xb4\x01\xd7\x01\xf4\x01\x10\x025\x02J\x02S\x02V\x02Y\x02]\x02]\x02W\x02E\x02%\x02\x07\x02\xe5\x01\xc0\x01\x92\x01^\x01 \x01\xde\x00\xa6\x00w\x00>\x00\xfc\xff\xbd\xff\x7f\xffG\xff\x11\xff\xd4\xfe\x96\xfe[\xfe4\xfe\x19\xfe\x05\xfe\xf9\xfd\xf5\xfd\xf4\xfd\xf1\xfd\xfd\xfd\r\xfe\x16\xfe\'\xfe>\xfe^\xfe\x82\xfe\xb5\xfe\xe6\xfe\x14\xffE\xffj\xff\x87\xff\xa9\xff\xcd\xff\xf6\xff\x17\x00:\x00]\x00\x80\x00\xa3\x00\xbf\x00\xdb\x00\xe8\x00\xef\x00\xff\x00\x13\x01 \x01$\x01\'\x01!\x01&\x01\'\x01\x16\x01\x04\x01\xf2\x00\xdb\x00\xd5\x00\xc6\x00\xbd\x00\xad\x00\xa1\x00\x9e\x00\xa2\x00\xaa\x00\xa8\x00\xa5\x00\xa4\x00\xac\x00\xbc\x00\xca\x00\xc8\x00\xc9\x00\xc4\x00\xc3\x00\xc8\x00\xb5\x00\x9c\x00\x81\x00f\x00Z\x00J\x00.\x00\x03\x00\xe4\xff\xd0\xff\xb1\xff\x8d\xffd\xff9\xff\x11\xff\x05\xff\xf6\xfe\xe6\xfe\xc6\xfe\xa4\xfe\x8d\xfe\x91\xfe\x90\xfeh\xfeX\xfeQ\xfeM\xfe^\xfez\xfe\x8d\xfe\x9b\xfe\xbb\xfe\xd7\xfe\xff\xfe\'\xffT\xffy\xff\xa5\xff\xd1\xff\t\x007\x00]\x00\x8c\x00\xa7\x00\xbe\x00\xd5\x00\xeb\x00\xfd\x00\x02\x01\xfe\x00\x00\x01\x05\x01\x04\x01\xfb\x00\xed\x00\xe5\x00\xd4\x00\xbc\x00\xa8\x00\x93\x00~\x00k\x00U\x00?\x00%\x00"\x00\x15\x00\x05\x00\xfc\xff\xf2\xff\xee\xff\xe6\xff\xde\xff\xd8\xff\xcf\xff\xbe\xff\xb8\xff\xad\xff\xab\xff\xac\xff\xa7\xff\x9f\xff\x97\xff\x94\xff\x84\xffu\xffc\xff\\\xffS\xff@\xff2\xff$\xff&\xff#\xff\x1d\xff+\xff3\xff>\xffG\xffT\xffb\xffv\xff\x8c\xff\x9c\xff\xbb\xff\xdc\xff\xf4\xff\x07\x00\x1c\x00,\x000\x00F\x00c\x00a\x00k\x00{\x00\x82\x00\x8b\x00\x90\x00\xa2\x00\x9b\x00\x84\x00\x83\x00}\x00s\x00k\x00j\x00`\x00S\x00M\x00N\x00^\x00U\x00P\x00Q\x00`\x00v\x00z\x00y\x00{\x00\x87\x00\x98\x00\xa3\x00\x9b\x00\x91\x00\x89\x00q\x00_\x00T\x008\x00(\x00\n\x00\xf4\xff\xd8\xff\xc3\xff\xaa\xff\x8d\xff\xa3\xffg\xffj\xff\x7f\xffH\xffe\xffH\xff\n\xffk\xff\x0e\xffU\xff\x1d\xff\x19\xff)\xff\xf5\xfe{\xff\x0b\xff_\xff^\xff,\xffi\xffj\xffp\xff\xad\xff\x94\xff\xcb\xff\xdb\xff\xba\xff\xe1\xff\x1f\x00\x00\x00\xf4\xff\xf6\xff\xfb\xff0\x00)\x00~\x007\x00\xc2\x00l\x00t\x00\xcb\x00\xa3\x00 \x01\xda\x00\xf2\x00\xf7\x00\x12\x01\xe9\x00\x9d\x01\x9c\x00\x1a\x02~\xfe\x1a\xffM\x0e\x90\tk\x02o\xfe\x02\xfd\xc5\xff\xff\xfc\xf2\xfc\x93\xfcQ\xfd\x93\xfc\xda\x00\x82\x03N\x00 \xfd\x00\xfe\x07\x03\xf8\x02x\xfc[\xfc\xae\x01\xc7\n:\x08;\xfe\xa1\xfa\x88\xf8\xfb\xfa\xb6\xff\xef\xfda\xfd#\xfe\x88\x03\x11\nk\xfc\xbc\x02\xb8\x07\xad\x03\n\x06z\x05b\x04[\x03\xb0\x07\xce\xfe\xd9\xf8x\xfc+\x01$\x03C\x03M\xfd\x18\xf5\xb1\xef-\xec\xa1\xeb\x8e\xf2\xc1\xf9\xf0\xfb\n\xff\xf6\xfaH\xf7B\xfb \xff\xc9\x00\xf2\x02\xba\x05\xc2\x07b\x01(\x03\xfc\x04\xd3\x08_\x0e`\x08\xc0\x05\xd5\x00-\x04\x99\x07\x88\x07\xee\x03!\x02\xf0\xffn\xfe\xe2\xfd\xa6\xf9D\xfb|\xfd=\xffX\xfe2\xfc\xa0\xfc\xec\xfbQ\xf8\\\xfba\xfb4\xfc\x14\xff\x90\xfd\x15\x00\x06\x01\x1e\x01\x11\x06\x11\x08!\x08"\x04\xf3\x01}\x02\xc5\xff\xc0\x01\x0c\x02\x10\x05\x1a\x02\xb6\x04\x91\x02\xb3\xffi\x01\x90\xfd \xfb\xbb\xf8\x8e\xfd\xdd\xfcw\xff_\x01\xcc\xfd{\xfa\x07\xf7\x04\xfa]\xfe\x07\x01\xa9\x01\xa5\x02\xd5\x00\x98\x01\xfd\x00\xf7\x00k\x01\xca\x01\x98\x01.\x07\xf0\x12Q\x0f\xcb\x08\xce\xffA\xfep\xfb\xdb\xf6\xa5\xf9\xe1\xf7F\xf8\xec\xfd\x8b\xff\x80\xff\t\x01\xc6\xfe\xee\xff8\xfcu\xfa\x01\xfc5\xfa\xab\xfcE\x003\x02]\x02Q\x02\x90\x03\xba\x03\\\x02\xf5\x02\xab\x02\x08\x01~\xffQ\xfe\x86\xfc{\xfa\x1d\xfe\xc4\xfd\xe8\x00\xe1\x01\xbc\x01`\x011\xfd=\xfa@\xf6D\xf94\xfe\xd1\x00|\x02\xc7\xfe\xd8\xfd\x86\xff\xf5\xff\xc9\x01\xcf\x05Z\x08\xab\x07\xba\x048\x01 \xfdJ\xfb\x03\xfb\xe8\xfa\xe0\x01\xde\x06\x11\x06\xb7\x05t\x02\x14\xfd\xa7\xfd\xf9\xf9Y\xfaW\xff\x7f\xfe\xb6\xff\xd2\x007\x00\x8a\xfeE\x00\xc2\x01p\x04\xe9\x04\xe0\x05\xf1\x05&\x01\x13\xfe)\xfb\xce\xfc@\x03_\x04\xe7\x04C\x043\x00\x97\xfc\x82\xfd\xf6\x00\xec\x01\x90\x00\x01\xff6\xfd\xf6\xfa\x1d\xfa\xf2\xf9#\xfd\xcd\x01\xca\x04\xa6\x06\xf2\x03\x95\xff\x9d\xfd:\xfa\xab\xf8\xe7\xfa\xec\xfd\xb5\x01\xf5\x034\x03L\x01\xac\xfc\xd2\xfc=\x00\x12\x01\x84\x01u\x01"\x03\x0b\x02\xc8\xfeL\xff\xbd\xff\xe8\xff\xf5\x00\xe4\x00\x81\xff\x90\xfco\xfd9\xff\xf7\xfeQ\x04\xb2\x05\x11\x03\xf2\xfe/\xfcA\xffG\xff\xdc\x01\x97\x03\xe8\x00\x9e\x00\x1b\x01\xb5\x00e\x01\xca\xfe\xd8\xfe&\x00\'\x01\xde\x00y\xff\xe6\xfc\xe6\xfc\x8a\x00\n\x01\xcf\x02h\x03\x0c\x04-\x03\xfe\x00<\xff?\xfdb\xfa\n\xfa\xd2\xfc?\x01\xc1\x04\xd9\x04\x9e\x04Y\x02\xd9\xfe\xde\xfb\xe0\xfa\xe7\xfa\xa3\xfe\x08\x03\xf7\x04D\x03#\xfe\xd5\xf8B\xfc\x1b\xff=\xff|\x03\x82\x04\x1d\x01\xd0\xfdN\xfeg\xfdN\xfc\xfc\xfb\xe8\xfcy\x00\xdc\x02V\x00\xa6\xff\xc4\xff\xb8\xfd\xb3\xfcr\xfb;\xfe$\x02\xd3\x05\xaa\x06^\x01\x08\xfe\xe7\xfd\xfe\xfdu\xffb\x00\xbc\x01\xf8\x03\xf6\x04\xed\x03[\x01y\xfeZ\xfe\xe1\xffw\xff>\x03\x03\x04(\x01\x08\xfe\xd5\xfb\x1e\xfd\xce\xff\x9d\x02X\x015\x01\x87\x00\xfc\xfe\xf0\xfd\'\xf8\x9e\xf5\xc8\xf9\xac\xfei\x03%\x05\x05\x03\x8e\x00\\\xfeG\xff[\x00\xec\xfe\xd3\xffI\x03 \x07\xcd\x04\xd9\x01\xae\xff\xec\xfb\xd8\xfb\xad\xfa\xa2\xfd\xdc\x03s\x07\xab\x05L\x00\xa9\xfcy\xf9\x99\xf9\xf9\xfc\x0c\xff\xfd\xff\xe9\x01\x82\x02\xac\x01\x85\xff+\xfc(\xfa<\xf9N\xfau\xfd\xb4\x00\xef\x03Z\x05\x11\x05j\x03\xdd\x00\xd5\xff\xd6\xff\xd5\x00\xc8\x01w\x01\xbf\x00\'\x01\xb8\x01y\x00\x07\xff@\xff\xcb\x01\xb8\x01B\x03n\x04*\x01\x85\xfeK\xfb\x83\xfbN\xfdq\xfd_\xfe\xb1\x00U\x04\x8b\x03\xbf\x01\x18\x00(\xfdH\xfd\x9f\xfdU\xff\xa9\x00&\x00\x97\xff\x9a\xff\x8f\x00\x1e\x00\x81\x00J\x01%\x01\xda\x00+\xff\xb8\xfd=\xfe\x9f\xfe\xdd\xfe\'\xff\xa7\xff-\x00\xf1\xff`\xfe\x80\xfdM\xfe^\xfe\xb0\xff5\x01\xda\x01\x9a\x02\x87\x01\x06\x01\x13\x01\xc7\xff\x83\x01\xfa\x02\x97\x03|\x04\xd6\x02\xd2\x00\xbf\xfe\xc0\xfb\xc6\xfb~\xfeD\xfe\x05\x00\xb1\x02\xfe\x01\xfc\xff\x07\xfe\xfa\xfc\xaa\xfd\xd7\xff\xf0\x01\xda\x02\x8d\x024\x02\xc2\x011\x01\xb1\x00\x19\x00L\xff\xf1\xff\xac\x00|\x00*\xff9\xfe\xa8\xfd\x80\xfc\xf5\xfd\x94\x00\x86\x03^\x047\x043\x03K\x01\xe6\x00\xf0\xff5\xff;\xff4\xfd\xdd\xfc\xb1\xfex\xfe\xd8\xfe\n\xfe\xdb\xfcg\xfd\x8f\xfdC\xfds\xfd\xc2\xfeL\x00\x9d\x01\xc4\x02p\x04\n\x03}\x01\x8d\x00\x01\xff\xc7\xfe!\xff\x97\xff\x1f\xfe(\xff\xe4\xff\x9a\xfd\xc6\xfc\x8f\xfc\xd9\xfd\xe4\xff\x82\x01w\x03+\x06\xd8\x07\x10\x06T\x03\xbb\x00[\xff(\xfe\x1e\xfd\xfd\xfc\x88\xfd%\x00\xba\x01\xe9\x01\x13\x01\xa8\xffj\xff"\xfe\x93\xfd\xc1\xfd;\xfe\x98\xffr\x00\xed\xff\\\xff\x86\xfe\x81\xfeF\xffq\xff\x17\x00\xfb\xffI\xff\xda\x00\xf1\x02\xb0\x02{\x02\xc3\x02\xaa\x02\x1c\x03U\x03\x10\x02k\x01k\xffh\x00\xab\x00\xcd\xff\xf9\xff:\xfe\xaa\xfe\x10\xffE\xffB\xff\x1e\xffu\xff>\x00\xe3\x00\xb5\x00\xd9\xff\xba\xff\xc3\xff\xb5\x00`\x02\xe6\x02 \x04\x8d\x04\xbd\x04>\x04\x99\x03\xc9\x02\x1b\x01\x96\xff]\xff\xfa\xff\xd3\x00\x1c\x01K\x00\x13\xff\x0e\xfeq\xfd\xed\xfcl\xfc\xa3\xfb\x07\xfcb\xfdN\xfd\x05\xfd\xd3\xfc\x88\xfcr\xfc|\xfc\x89\xfc\x8a\xfc\t\xfdy\xfdH\xfe\xfd\xfeh\xfe\x0c\xfe\x8a\xfdM\xfd\xd8\xfc\\\xfb\x85\xfa\x0b\xfa \xfa\x00\xfa\xc1\xf9\xff\xf9B\xf9\xde\xf8\xe7\xf8\xc8\xf8\xa7\xf8\r\xf8\xa9\xf8-\xfak\xfbw\xfc\x85\xfd\xbc\xfe\xd7\xfe\xd7\xfe\xbb\xfe\n\xfeo\xff0\x00G\x01\xc9\x02U\x02\xed\x02#\x03]\x03%\x03d\x01f\x01+\x01>\x02\x9c\x03\n\x04\xb1\x05 \x05r\x05\xf9\x04\xdb\x039\x03>\x02\x85\x02t\x04U\x07\x9c\tW\x0b\xc4\x0c\xeb\r\xd7\x10\x99\x13t\x15t\x18\xda\x1b/ h#\xc6#\x16"Q\x1d\xe4\x17\xe4\x11<\x0b\xd6\x04\xc6\xffZ\xfc\x16\xfa\xcb\xf8\xc0\xf6\xc5\xf3\xcd\xf0\x1d\xee2\xecb\xeb;\xeb\x04\xec\xcd\xed\x01\xf0\xe6\xf1\xf7\xf2n\xf3\x92\xf3\xcc\xf4\x8e\xf6\xf6\xf8\x06\xfc\n\xff\xe6\x01\xf4\x03f\x04c\x03=\x01\x86\xfe\x04\xfc\x00\xfaG\xf8B\xf6c\xf4A\xf2\xd9\xefv\xed\xd4\xea7\xe9\xd0\xe8\x86\xe9\x8d\xeb=\xee\xf6\xf05\xf3F\xf4\xab\xf4\x1d\xf5\x83\xf5H\xf6M\xf8\xfd\xfa\xcd\xfd\xe4\x00\x14\x03\xe9\x03\x84\x03\xf6\x01\xb4\x00\x92\xff\x00\xff\xf0\xfe\xca\xfe\xda\xfe2\xfe\x98\xfdi\xfc\x95\xfa\xf8\xf9\x03\xfa\xb2\xfb\x88\xfeY\x01\x81\x03\x1f\x05\xe3\x05\xd7\x06\xed\x07C\x08\xbc\t\xd9\nF\x0c\xc8\x0eD\x0f\xca\x0eY\x0c\x17\x08\t\x06\xf8\x06L\x0c1\x15%\x1f<)\xe21K7\x837\xf03\xdd-}(\x86%u#G!\x15\x1de\x16\x04\r\xce\x01\x08\xf6\xbf\xebv\xe6\xed\xe55\xe8\xd8\xeb\xe5\xed\xb1\xed\xec\xeb\xaa\xe8\x00\xe6\xc8\xe4O\xe5\xc2\xe8:\xeeW\xf4"\xf9d\xfb\xa8\xfb\xcb\xfb\x8f\xfd\xd1\x00\xab\x05s\ni\x0e\xc0\x10\xdc\x0f\x86\x0bU\x04P\xfcx\xf5\xb0\xf0m\xee:\xed\xbe\xec\xfc\xeb;\xea"\xe8\xed\xe4F\xe2\xe9\xe1\xc0\xe3\xdd\xe7K\xecg\xf0\x8f\xf3\xf8\xf4\x81\xf5\xe5\xf5\xab\xf6\x9b\xf8\x8f\xfb\xf9\xfe\xbc\x01;\x03D\x03z\x01:\xff\x8c\xfd\xcb\xfc7\xfd\xb2\xfdG\xfe\xaf\xfdw\xfcx\xfak\xf7\xbe\xf5j\xf4%\xf5\xc8\xf7\x8f\xf9\x8b\xfc\xe4\xfdD\xfe|\xff\xde\xfe\xc9\xff\xc4\x004\x02\x03\x06\x81\t\xda\r\xd3\x10\x8d\x12>\x12<\x0f\x01\x0b\xe4\x04Y\x00\xf6\xfe\xb5\x01a\n\xa6\x16\xf5$\x822\x11<\x86@\x02A\xe7>\x02\xfa+\xf8\x04\xf6p\xf5\x8a\xf4\xcd\xf3v\xf3S\xf2\x00\xf2\xea\xf1w\xf2\xca\xf4\xbc\xf7\xbd\xfb0\xff\x01\x01\xee\x02\x15\x03\xb5\x03\x11\x05<\x06\xb4\x08\xca\t3\n`\n`\x08\x98\x06V\x03\xca\x00\xa7\x00,\x02z\x07z\x0e\xdf\x17r#\xa9.\xdf9\xdaB!IXL\x08KEF\x91<\xfc/\xb3!\x8e\x11,\x03F\xf5\xe2\xe8\x15\xdf\x83\xd7%\xd4Z\xd4\'\xd8\xc6\xde*\xe6\xf8\xec\x99\xf1\xa3\xf4_\xf6\xce\xf7\x17\xfa\xa8\xfdy\x02\xad\x07\x05\x0c\x19\x0f\xe6\x10\x98\x11\xb0\x11\xc1\x10\xa2\x0e\xd1\n\xce\x04K\xfd\xbe\xf4M\xec|\xe55\xe0\xb3\xdd\x98\xdd\xef\xde\xc9\xe1\xf1\xe4\x05\xe96\xeeA\xf3\xc0\xf7I\xfb-\xfd\x97\xfe\x8f\xff\xf2\xffZ\x00-\x00\xd5\xffv\xffp\xfe\xce\xfc\x10\xfb\x0b\xf9\x07\xf8\xd8\xf7\xbc\xf7{\xf7\xa8\xf6s\xf5\xb8\xf4\xd2\xf4\xaa\xf5\xe8\xf6*\xf8\xaa\xf8\xee\xf7K\xf6k\xf4\x16\xf3\x11\xf3\xda\xf3\xcc\xf5\xbf\xf89\xfbK\xfd\xbd\xfe\xaf\x00\xa6\x03\xd3\x06b\x08|\x08\x80\x06\xd0\x03\xc3\x01.\xff`\xfd\xee\xfbH\xfb\x00\xfb\xe2\xf8,\xf5*\xf2\xb6\xf5\xac\x02a\x17\x0e/\xc5CnS\x8b]\xdcbzc\xe0^8V\xe6J3;\x83&\xad\x0c1\xf1 \xda\xb7\xca\xe3\xc3\x1d\xc4\xb6\xc7{\xcc\x9e\xd2\x80\xd9v\xe2\x01\xec^\xf5R\xfd\x01\x03\xdd\x06\xa3\x07\x8e\x06\xed\x04\xd9\x04\x8b\x08\xf2\x0e\xc8\x151\x1a\x99\x18\xc7\x12\xfa\t\x9b\x01F\xfa\xd4\xf2\x86\xebv\xe2\xbc\xd9\xc3\xd2\x13\xcf?\xd1\xdf\xd6:\xdf\xb0\xe8\xe2\xf0\xba\xf8\xbf\xfe\x80\x03j\x07\xd7\t\xd3\n\xc4\t\xa7\x06\xd0\x02^\xff\xd7\xfc\xc6\xfb\x0f\xfb\xe3\xf9\xf7\xf7\xac\xf5\x8a\xf4}\xf4\xd0\xf5\x90\xf8\xb9\xfa"\xfcn\xfb\xbc\xf8\x17\xf6\x19\xf6\xe5\xf8q\xfc\xfe\xfd \xfbL\xf6%\xf2\xb2\xef"\xf0\xd2\xf1\x9e\xf4\x04\xf8\xa8\xfa\x85\xfdC\x00\x1c\x03\xdb\x06\xba\n\x07\rH\x0c\xc7\x08S\x04\xa1\xff\xde\xfb4\xf8\xfb\xf3c\xf0\x1b\xec\xc4\xe7\xaf\xe4\xe1\xe6\xe0\xf4G\x0e\x06.=IkY\xc9b\'hwm2p\xfdh\xdfX\x11B\xaf&6\t\xdd\xe9\xb6\xce\xc7\xbc\xff\xb5\x08\xb7[\xb8\x83\xbam\xc1\xf4\xd0\xd8\xe7\xd8\xfd;\x0c\x7f\x12\xf0\x14\xc2\x160\x18\xbe\x183\x18u\x17\x03\x176\x16\xa6\x13\xe6\x0e\x8e\n\n\x08L\x06~\x002\xf5\xb2\xe6F\xd9.\xd0O\xcb\x96\xc8I\xc7.\xc8\xb5\xcd#\xd8\xbc\xe6\x04\xf7p\x06N\x13\x1b\x1c?!\xe2!2\x1f\x05\x1b\xa9\x14\xf6\x0b\xa0\x00\x07\xf5\xfa\xeb\x96\xe6t\xe4}\xe4D\xe5y\xe6\xf0\xe8f\xed.\xf3\xe5\xf9o\x009\x05\xfe\x07A\tp\tS\tn\x08k\x06\xde\x02\xe0\xfdl\xf8\xf3\xf2 \xef\xc2\xed\x0b\xee\x9f\xef\x99\xf12\xf4\xdb\xf7\xe0\xfb\xd4\x00\xca\x04\xaf\x06-\x07x\x05\xe7\x01\xaf\xfd\x12\xfa\xdf\xf6\xe2\xf1|\xe9\x08\xe0\xd7\xda\x8a\xde3\xebR\xfbi\x0b\xea\x1e&6?P"h\x90v\xa4zwwJo\x02bAO\xe46\xb0\x17b\xf7\xe1\xdb\x04\xc6j\xb7\xad\xafD\xaeb\xb3\x18\xbeQ\xcdh\xdf\xb0\xf2\xa1\x03~\x0f\xf5\x15\xff\x17,\x18\xe6\x187\x19w\x17\xaa\x13\xbf\x0e|\n\x0e\x08%\x07\x0c\x06\xaf\x01\xbb\xfa\x85\xf2V\xea\x08\xe4\xc6\xde_\xd9\xf8\xd40\xd2\x1a\xd4:\xda)\xe3C\xee\xce\xf9\xcb\x04[\x0fM\x17\xd3\x1b\xb1\x1cd\x19\x1e\x13\xae\n\x80\x01\x94\xf7\xab\xecp\xe3\xa9\xdd\xa7\xdbL\xdeQ\xe3\x1f\xe8\'\xeek\xf5\x7f\xfdz\x05V\x0bY\x0e\xac\x0f\xe7\x0ed\x0cs\x08\x16\x04G\x00v\xfcA\xf7V\xf1\x0b\xeci\xe9\xeb\xe9\x87\xeba\xed\x9f\xef\xf2\xf2\xbe\xf7\xbf\xfc\x90\x01(\x06\x19\n\xbc\x0c\xf0\x0c\xa2\n%\x06w\x01\xc7\xfc\xdf\xf5\xbd\xef\xf5\xe9`\xe4\xdb\xdf\x7f\xda\x9b\xd9\xc3\xe3s\xfar\x18\x9d1fC\xb1P9_%r\xac\x7f\xff\x7f#rSYR?\x06(x\x0e\xfc\xf1"\xd5R\xbc\xc8\xac5\xa8D\xab\x8b\xb3\xf4\xc0\xe1\xcf\'\xe0W\xf2.\x03B\x12Z\x1fb%&$) p\x1c\xd1\x196\x18J\x14/\x0c\x9d\x02[\xfb\x9b\xf7\x99\xf6\x05\xf5\x16\xf0\x1a\xe7\xbc\xde1\xdb\x96\xdb9\xdd\x82\xde^\xdeb\xe0t\xe7&\xf2=\xfdS\x06w\r\xa9\x11\x9b\x11"\x11 \x11\xc1\r\xd8\x07\x91\xff\xed\xf4\xfb\xec\xd9\xe9p\xe8{\xe6\xdc\xe5\xea\xe7\xa7\xec\xbe\xf3\xdb\xfb\xca\x012\x06W\n\x0f\r\xc6\r\xb3\r\xa9\x0b\x81\x06\'\x009\xfa\xde\xf4c\xf0\x17\xedq\xeaV\xe9S\xeb\x8a\xefe\xf4)\xf9\x06\xfe\x98\x02q\x06\x9c\t\xb9\x0bS\x0cR\x0b:\x08}\x03\xef\xfdR\xf8p\xf3\xf0\xee\xcf\xeb=\xeaG\xe7\x94\xe2\xa0\xe0\xe3\xe5\xc9\xf4\xfe\n\xef \xe50o>\xd5P\x18e-t\x0fygr&d\x1fT\x10B\xb8)\xec\x0c\xa7\xf0\x87\xd5+\xc0i\xb3\xaf\xad!\xaf\x9c\xb5s\xbe\xf5\xc9\x81\xdat\xef(\x03\xcd\x10a\x17\xa1\x19\x05\x1c>\x1f\x12 &\x1c\x18\x14\xce\x0bi\x06c\x04\x83\x02\xdc\xfe\xeb\xf9[\xf5\xd1\xf1\x9e\xefS\xed\x1a\xea\xf8\xe5\xfb\xe1\x14\xdf\xb1\xde\\\xe1\x83\xe66\xec\x9f\xf0\xf6\xf4\xaf\xfb\xc5\x04+\r,\x11\xa9\x0f\xfc\x0b\x87\t\xe8\x07L\x04\xda\xfd\xdb\xf5\xe4\xee\xa8\xebm\xeb\x9d\xec\x9e\xee\xa2\xf0\x1e\xf3\xd5\xf7\xc9\xfd\xcc\x03\x0e\x08\xf4\x08T\x07\x84\x05\x82\x03N\x00\xa2\xfb\'\xf6\x08\xf1@\xedj\xeb\x83\xebQ\xed\xb7\xf0\x8e\xf4\x10\xf8?\xfc\xba\x01}\x07\x92\x0b\x8a\x0c#\x0b\xe4\x08\xbb\x06\x95\x04Q\x01V\xfc\xa2\xf55\xf0\xce\xec\xb3\xea\x8b\xe9\xa5\xe5\r\xe0A\xe1\x94\xedP\x02\xd9\x17\xab&\x981N@LVMl\xdbw1v#l``\xf4S\xc9A\xac(\x9d\x0b\x94\xef\x15\xd8`\xc4\x15\xb6\xc4\xae\x05\xae\xa8\xb1\x14\xb8\x8a\xc2\xa2\xd1\'\xe5^\xf8\x06\x04\xd4\tB\x0fL\x15\xf5\x1bm\x1fA\x1c[\x15\xaf\x10\xdf\x0ec\r\x04\x0b~\x07\xc5\x02\xd0\xfe[\xfb\xd4\xf6O\xf1\xb8\xec0\xe8\x93\xe2\x05\xdd\xbc\xd9\xec\xd9\xea\xdd\xb6\xe3\x92\xe7\x86\xea\x81\xf0&\xfa\x98\x03(\t5\nz\t\xa5\t9\x0b\xd0\n+\x06\x19\x00\x1c\xfb\xbf\xf7\xef\xf5\xb7\xf5\xae\xf5\xdc\xf5\xb4\xf6m\xf8\xd1\xfa\xc4\xfd\xd4\x00J\x02*\x01w\xff\x8e\xfd\n\xfb\x1e\xf9d\xf6\x98\xf2\xc1\xef\xb0\xee\x98\xef\x17\xf2C\xf5\x81\xf8\xf7\xfb\xb5\xffR\x04\xb6\x08\xb0\x0b-\r\xdc\x0ck\x0b\x03\t\xaa\x05\x98\x01\xc7\xfc\xaf\xf7\xc4\xf2\x0f\xee\\\xeb\xb1\xe9\x02\xe7R\xe3\xdc\xe0\xaf\xe4I\xf1g\x03\xd5\x14\x1e"\x1b.\x00?\xeaS\xe1e2n\xfck\xacd\x8a][T\x96D/.\xf7\x14g\xfd\xd5\xe9\xeb\xd9\x9e\xccX\xc2\xf9\xbb~\xb9X\xbb6\xc2\x96\xcc\x9e\xd7\xdd\xe0@\xe8\x90\xefA\xf8p\x01f\x08\xa3\x0b-\x0c\x07\r\x14\x10\xbb\x14\x0b\x18\xaf\x18\xa5\x17\x9e\x16\xe7\x15\x99\x14>\x11\x85\x0b\x00\x04\x1d\xfc\x1f\xf4\x96\xec\xcc\xe5\xf6\xdf"\xdb>\xd7o\xd5\xd5\xd6\xd7\xda\xc9\xdf\x1d\xe4\xc9\xe7\x94\xec\x1e\xf3$\xfaO\xff\xe7\x01\x1b\x03\x80\x04\xb8\x06\x9a\x08p\t\xf3\x08\xc2\x07\x85\x06\xf9\x05\x05\x06\xf5\x05\x05\x050\x03\xd9\x00\xe7\xfeg\xfd\xc1\xfb.\xf9\x17\xf6G\xf3/\xf1[\xf0\x94\xf0h\xf1\xa1\xf2I\xf4\xd5\xf6\x91\xfa^\xffg\x04\xca\x08\x00\x0c[\x0e\n\x10%\x11v\x11e\x10\xf0\r\xa6\n<\x07\xdb\x03~\x00,\xfd\x05\xfa \xf7[\xf4J\xf1|\xee\xac\xed;\xf0\xfa\xf5\xd5\xfcO\x03\x9b\t_\x113\x1bq%l-I2\xb14\xf15\x056\xd83\xb5.F\',\x1f%\x17\x19\x0f\xd8\x06\xaf\xfe\xc4\xf7m\xf2<\xee\xef\xeai\xe8\x07\xe7\xda\xe6\x10\xe7\n\xe7\xf0\xe69\xe7+\xe8\x7f\xe9\xa2\xeat\xeb^\xec\xef\xedA\xf0%\xf3\xdf\xf5|\xf8?\xfb<\xfec\x01)\x04W\x06-\x08\xae\t\x9c\n\xa8\n\xc4\tU\x08\xcb\x06\xed\x04\xa7\x02\xdd\xff\x06\xfd\xb6\xfa\xd9\xf8J\xf7\xba\xf5N\xf4b\xf3\xe7\xf2\xea\xf20\xf3\x8d\xf3=\xf4.\xf5.\xf6=\xf7@\xf8z\xf9\xc0\xfa\x0f\xfc7\xfd;\xfe<\xffB\x00R\x01L\x02\xf8\x02@\x03y\x03\xcf\x03^\x04\xf6\x04K\x05t\x05y\x05r\x05T\x05\x13\x05\xcb\x04{\x04\x11\x04\xa5\x03\x0b\x03|\x02\xbe\x01\xea\x008\x00\x81\xff\xb0\xfe\xc8\xfd\xfa\xfc\x80\xfc:\xfc\xf8\xfb\xa9\xfb2\xfb\xea\xfa\xcb\xfa\xc7\xfa\xee\xfa\x1d\xfbh\xfb\xb5\xfb\n\xfc[\xfc\xa9\xfc\x02\xfdH\xfd\x8c\xfd\xb3\xfd\xb5\xfd\xe2\xfd]\xfe\x1f\xff1\x00\x91\x01A\x03#\x05\xf7\x06\xa9\x08I\n\xf5\x0bu\r\xca\x0e\xd4\x0fx\x10\xf4\x10l\x11\xf5\x11a\x12\x90\x12{\x12\x08\x12f\x11\x99\x10\x80\x0f \x0et\x0c\x95\n\x9d\x08s\x06\x05\x04f\x01\xe4\xfe\xae\xfc\xe4\xfa]\xf9\x0c\xf8\x1f\xf7\xcb\xf6\xbc\xf6\xf9\xf6e\xf7\xf5\xf7\xb8\xf8O\xf9\x8e\xf9U\xf9\xc4\xf8\x16\xf8U\xf7\xa1\xf6\xec\xf5u\xf5B\xf5k\xf5\xdc\xf5\xa2\xf6\x96\xf7\xb6\xf8\xbd\xf9\x87\xfa\xfa\xfa\x1e\xfb\x12\xfb\xc1\xfa.\xfak\xf9\x97\xf8\x02\xf8\xd5\xf7\x19\xf8\xc1\xf8\x9d\xf9\xd2\xfa<\xfc\xba\xfd:\xff\x95\x00\xd6\x01\xdd\x02\xaa\x03H\x04\xa0\x04\xaf\x04\xa8\x04k\x04\x0b\x04\x99\x03\x1d\x03m\x02\xa4\x01\xc8\x00\xf3\xff\x1a\xff]\xfe\x9f\xfd\xde\xfc=\xfc\xdd\xfb\xc9\xfb\x1e\xfc\xc0\xfc\x9f\xfd\x94\xfe\x86\xff\x87\x00\x85\x01\x84\x02\x80\x035\x04\xa4\x04\xe3\x04\xf4\x04\xdf\x04\xa0\x04\x17\x04l\x03\xa4\x02\xc9\x01\xec\x00\xe2\xff\xdf\xfe\xf1\xfd\x06\xfd8\xfc|\xfb\xe6\xfa\x87\xfaQ\xfam\xfa\xc6\xfa\x83\xfb\x8d\xfc\xc9\xfda\xff\r\x01\xdf\x02\xc3\x04\xb8\x06\xb5\x08\x92\n8\x0c\x9b\r\xb1\x0ev\x0f\xef\x0f\x1e\x10\xcf\x0f\x0f\x0f\xf7\r\xc7\x0c\x9a\x0be\n\x07\t\x9e\x07q\x06\x99\x05\xe8\x04B\x04\x95\x03\x05\x03\x97\x02+\x02\xab\x01\x08\x01V\x00\x9a\xff\xb9\xfe\xa8\xfd\x84\xfcw\xfb\x86\xfa\xaf\xf9\xbc\xf8\xbf\xf7\xff\xf6\x95\xf6s\xf6]\xf6B\xf6C\xf6y\xf6\xe4\xf6T\xf7\xb8\xf7\xf0\xf7:\xf8\x98\xf8\xf7\xf84\xf94\xf9\x14\xf9\xfc\xf8\xea\xf8\xf8\xf8\x00\xf9!\xf9s\xf9\xfc\xf9\xb3\xfav\xfb^\xfcY\xfdC\xfe\x11\xff\xb0\xffA\x00\xa7\x00\xd5\x00\xbc\x00k\x00\xf8\xff\x8e\xff\x10\xff\x8c\xfe\x1f\xfe\xe0\xfd\xb7\xfd\xbe\xfd\xf4\xfda\xfe\x08\xff\xc8\xfft\x00\x06\x01\x8e\x012\x02\xe4\x02\x98\x03\x15\x04K\x04]\x04z\x04\x9a\x04\xa8\x04\x88\x04-\x04\xb8\x03"\x03\xa3\x02\x0c\x02s\x01\xd5\x00\x16\x00V\xff\xae\xfe^\xfeg\xfe\x92\xfe\xc0\xfe\xe4\xfe\x1c\xff\x8c\xff\xf6\xffE\x00J\x00\x08\x00\x92\xff\xf5\xfeT\xfe\xa5\xfd\xd7\xfc\x0c\xfcg\xfb\x19\xfb.\xfb\xa2\xfbg\xfcG\xfde\xfe\xbe\xff9\x01\xc8\x02)\x04P\x05K\x06)\x07\xf2\x07\x9d\x08\x05\t%\t5\tM\tm\tp\t7\t\xd9\x08\x80\x08!\x08\xcd\x07o\x07\xde\x06>\x06\x87\x05\xce\x04$\x04\x8b\x03\xf8\x02S\x02\xaf\x01"\x01\xa4\x002\x00\xcd\xffR\xff\xc9\xfeI\xfe\xd5\xfdv\xfd\x1a\xfd\xbf\xfcV\xfc\x16\xfc\xd4\xfb\xa4\xfb\xa9\xfb\xb6\xfb\xce\xfb\xde\xfb\xc4\xfb\x92\xfbW\xfb\x0b\xfb\xa6\xfa"\xfa\x97\xf9\n\xf9\x85\xf8\x0f\xf8\xc7\xf7\xba\xf7\xd3\xf7\x08\xf8@\xf8\xbf\xf8w\xf9n\xfam\xfb4\xfc\xe0\xfc\xa8\xfdy\xfe"\xff\x98\xff\xbd\xff\xc8\xff\xe9\xff\x10\x00\x1e\x00\x05\x00\xcb\xff\x9f\xff\x83\xffm\xffd\xff^\xffy\xff\xa9\xff\xf8\xffV\x00\xe2\x00\x8a\x01M\x02&\x03\xec\x03\x9c\x04\x1e\x05o\x05\x8f\x05o\x05 \x05\x95\x04\xde\x03\'\x03i\x02\xad\x01\xfa\x00\\\x00\xf6\xff\xac\xff\x98\xff\x80\xffq\xffe\xff`\xffa\xffc\xffC\xff\x0e\xff\xd4\xfe\xb2\xfe\xbc\xfe\xdb\xfe\x12\xffB\xff\x8b\xff\xdf\xff5\x00t\x00\x8d\x00|\x00G\x00\xf1\xff\x81\xff\x02\xff\x83\xfe"\xfe\xd1\xfd\xa1\xfd\x88\xfd\xa0\xfd\xef\xfd`\xfe\xcf\xfe7\xff\xae\xff.\x00\xbc\x00O\x01\xe2\x01|\x02*\x03\xfd\x03\xf0\x04\xf7\x05\x01\x07\r\x08\x12\t\x0c\n\xde\ni\x0b\xb4\x0b\xaa\x0b\\\x0b\xc8\n\xe0\t\xc7\x08\x8f\x079\x06\xd2\x04q\x03\x1d\x02\xe9\x00\xd8\xff\xef\xfe*\xfe\x90\xfd#\xfd\xe2\xfc\xbf\xfc\xa7\xfc\x91\xfc\x9f\xfc\xac\xfc\xaf\xfc\x8d\xfcE\xfc\xf3\xfb\x89\xfb\x02\xfb_\xfa\x9c\xf9\xdc\xf8+\xf8\x87\xf7\xf4\xf6|\xf6A\xf60\xf6E\xf6t\xf6\xbd\xf6*\xf7\xad\xf7K\xf8\xf2\xf8\x8f\xf9E\xfa\x02\xfb\xbf\xfbt\xfc\x14\xfd\xbc\xfdR\xfe\xd9\xfeG\xff\xa1\xff\xe2\xff\x1d\x00I\x00Y\x00c\x00`\x00`\x00p\x00\x83\x00\xa6\x00\xd1\x00\x0f\x01f\x01\xda\x01[\x02\xde\x02X\x03\xc8\x036\x04\x96\x04\xd5\x04\xfa\x04\xf3\x04\xc5\x04y\x04\t\x04\x8f\x03\r\x03\x86\x02\x06\x02\xa1\x01U\x01,\x01\x1e\x011\x01P\x01w\x01\x9d\x01\xb2\x01\xa0\x01c\x01\xfc\x00v\x00\xc5\xff\xfa\xfe-\xfeh\xfd\xc4\xfc9\xfc\xe3\xfb\xb3\xfb\xa8\xfb\xca\xfb\r\xfc_\xfc\xaa\xfc\xe5\xfc\x07\xfd\x1b\xfd%\xfd\x14\xfd\x01\xfd\xe9\xfc\xd5\xfc\xde\xfc\xf6\xfc-\xfd\x85\xfd\xee\xfde\xfe\xe2\xfek\xff\xf9\xff\x96\x003\x01\xd3\x01~\x028\x03\x08\x04\xf1\x04\xed\x05\xf3\x06\xf6\x07\xee\x08\xe2\t\xbf\nt\x0b\xf6\x0bA\x0cX\x0c@\x0c\xf4\x0by\x0b\xd5\n\t\n\x1e\t6\x08K\x07P\x06k\x05\x8b\x04\xb4\x03\xf1\x022\x02\x85\x01\xde\x002\x00\x88\xff\xdb\xfe5\xfe\x97\xfd\xf3\xfcW\xfc\xd8\xfbl\xfb\x13\xfb\xd2\xfa\x94\xfa\\\xfa7\xfa&\xfa\x15\xfa\xf3\xf9\xc4\xf9\x97\xf9k\xf9.\xf9\xe8\xf8\xa0\xf8j\xf8=\xf8\x17\xf8\xff\xf7\xf9\xf7\t\xf8)\xf8S\xf8\x9a\xf8\xea\xf8M\xf9\xc6\xf99\xfa\xb4\xfa/\xfb\xa7\xfb#\xfc\x9c\xfc\x1b\xfd\xa8\xfd;\xfe\xd3\xfek\xff\x11\x00\xc0\x00u\x010\x02\xde\x02s\x03\xed\x03\\\x04\xa8\x04\xd7\x04\xe8\x04\xc3\x04\x9c\x04\\\x04\x10\x04\xb9\x03Y\x03\xfd\x02\xa3\x02^\x02\x1c\x02\xef\x01\xd0\x01\xa9\x01\x9b\x01\xa6\x01\xb8\x01\xc5\x01\xcf\x01\xd1\x01\xc0\x01\xaf\x01\x8a\x01;\x01\xe7\x00\x8a\x00\x1e\x00\xb0\xff@\xff\xcf\xfeg\xfe\x05\xfe\xb4\xfds\xfdE\xfd\x1e\xfd\x0b\xfd\xfb\xfc\xfa\xfc\x0c\xfd\x1c\xfd1\xfdE\xfdu\xfd\xa4\xfd\xbe\xfd\xe7\xfd\x1f\xfeX\xfe\x93\xfe\xcd\xfe\x03\xff=\xff}\xff\xbf\xff\xfb\xff+\x00P\x00\x84\x00\xb7\x00\xdd\x00\xf8\x00!\x01K\x01\x7f\x01\xc3\x01\xf5\x01\'\x02P\x02\x81\x02\xb5\x02\xcc\x02\xf3\x02\x01\x03\n\x03,\x03\x18\x031\x037\x03=\x03f\x03\x80\x03\xc6\x03\xef\x03(\x04_\x04\x7f\x04\x9f\x04\xad\x04\xa4\x04\x94\x04q\x04?\x04\x02\x04\xaa\x03a\x03\x10\x03\xb0\x02l\x02\x18\x02\xd4\x01\x8b\x019\x01\xfb\x00\xa7\x00W\x00\x03\x00\xbb\xff[\xff\xf2\xfe\x86\xfe\x1b\xfe\xb4\xfd9\xfd\xc1\xfcB\xfc\xd1\xfbT\xfb\xfb\xfa\x9a\xfaR\xfa\x10\xfa\xd9\xf9\xc2\xf9\xaa\xf9\xa2\xf9\xbc\xf9\xd6\xf9\x00\xfa,\xfak\xfa\xca\xfa\x1e\xfbs\xfb\xd5\xfb?\xfc\xa9\xfc\r\xfd~\xfd\xe5\xfd4\xfe~\xfe\xcf\xfe,\xff\x80\xff\xc1\xff\x17\x00g\x00\x9d\x00\xaf\x00\xdb\x00\x0b\x01\x15\x01\x1b\x01(\x01L\x01P\x01.\x01$\x01\xfb\x00\n\x01\x13\x01!\x01_\x01_\x01]\x01c\x01\r\x01\xf3\x00\xcf\x00\x94\x00\x10\x01\x1f\x01\x06\x01\x1b\x01\xcc\x00\xa1\x00\x91\x00u\x00\x85\x00\xe1\x00\xe1\x00\xa4\x00f\x00J\x00\x16\x00\x14\x00\x98\x00\xdf\x00\xe6\x00D\x01P\x01P\x01%\x01%\x01\xcf\x00\xd5\x00T\x01\xb1\x01^\x01\xe9\x00\x01\x01\x10\x01;\x00\xf3\xfe\xe0\x00\xc4\x06\xc3\t\x91\x08j\x01\x94\xf8&\xf58\xf6\xb7\xf7\x8e\xfb\xb8\x00*\x04\x0b\x05\xbf\xfb\xcd\xf9A\xf9\xda\xf7\xb8\xfc\xb4\x01#\x04\xd7\x02e\x02\x7f\x00\xdd\xfe\xce\xff\xd9\x02\xfb\x04\x7f\x06\xab\x07\x14\x08\xe8\x05r\x02\x9e\xff#\xff{\x00:\x02&\x03\xc6\x01\xee\xff\x10\xfe\xc8\xfc\x9f\xfc\xe9\xfc;\xfd\x8a\xfd\x89\xfd\xe2\xfde\xfc\xf1\xfc\xd8\xfc\xed\xfa\xb1\xfbn\xfe\xdc\xffM\x00\xed\x00\x8e\x00\xfe\xff\x05\xff\xf4\x00\x11\x03\xe7\x03\xe0\x03\xa8\x037\x03\x92\x01b\x00\xb1\x01\x8f\x03\xcc\x03\xa7\x04^\x044\x03\xc0\x01i\x00\xd3\xff\x92\x02\x94\x03E\x06\x1c\x06|\x03P\x03L\x00m\x00\x1c\x03i\x06\x04\x06s\x05W\x02l\x00\xce\x01\xdd\x02H\x03\x95\x01\x1a\xffA\xfd\xdd\xfe\xe4\x00\xf9\x00\xa0\xff\xce\xfc\xd1\xfb\xba\xfb+\xfb\x9c\xfbe\xfc\xe5\xfc\xf5\xfc\\\xfc\x04\xfa\xa8\xf7\x1c\xf7\xe3\xf8\xf9\xfb\xdf\xfd\xe1\xfd\xa2\xfb\xa4\xf7\x05\xf7\xe1\xf8\xc7\xfaL\xfd\x92\xfez\xfe\xb8\xfch\xfa\xc7\xf9\xa5\xf92\xfb\xf7\xfdh\x00V\x01\xae\xfe\xd6\xfb\xc0\xfa4\xfc:\x00\x95\x02^\x03G\x02D\xff\xef\xfd_\xfd\xaa\xff\xd2\x01I\x02\x8b\x04\x9d\x03o\x00\x0b\xfe4\xfd\x10\xfe1\x01\x90\x04;\x06\xfb\x03\xe5\xfe\xba\xfb8\xfbi\xfe\x0f\x04\xfd\x06\xc6\x04\xcb\x00N\xfe\xae\xfd\xa4\xff\x02\x01\xbc\x01O\x02\xb7\x01\n\x01+\xffL\xfes\x00\xc8\x02\x81\x00\x19\xfe\xc6\xfd_\xfe\x92\x01\x8e\x02Z\x00\x9b\xfdB\xfb\xa5\xfb\xa3\xfd\xd2\xfe\xf0\xfe~\xfd\xfd\xfa\xb7\xf9\x80\xf9\xcd\xf9\x8a\xfa\xa9\xfbq\xfb\xac\xfaZ\xfa\t\xfbO\xfcP\xfdP\xfe\x91\x01e\x06\xb0\n\x16\x0e\xdd\x10R\x12\xa3\x13g\x15 \x17\xdf\x18&\x1as\x1a<\x19R\x17\x8c\x15q\x14\xf1\x13\x92\x13\xd0\x11\xa7\x0e@\x0b\xee\x07\xf2\x04\x06\x02\x16\xff\xa7\xfb\xbe\xf8A\xf6\x90\xf3\x10\xf1\x12\xef\xe2\xed_\xedm\xed\xa6\xed\x99\xed*\xed\xfa\xec)\xed\xd1\xed\x04\xef\x11\xf0\xf7\xf0\xc8\xf1\xbd\xf2!\xf4\xe6\xf5\xd6\xf7\xe0\xf9{\xfb\x81\xfc4\xfd\xc8\xfdB\xfe\xaa\xfe\xd2\xfe\x95\xfey\xfeb\xfes\xfe\xcf\xfe\xe6\xfe\x18\xffx\xff\xc6\xff/\x00\x81\x00.\x00\xe4\xff\xa2\xff|\xff\xec\xff\xdd\xff\x89\xff\xa6\xff\x17\x00 \x00\xd0\x00{\x01[\x01\x83\x01\xc6\x01\x14\x02\x97\x02"\x03J\x03\x9a\x03\x13\x04q\x04%\x05F\x06\x10\x07\xd6\x07\xbd\x08\x1a\t\xe3\t5\n\x9a\n\xfa\n\xbe\n\xa9\n\x87\n\x1e\nt\t\xb8\x08\xba\x07P\x06\xa3\x05\xd0\x04\xa5\x03V\x02h\x00I\xfe\xce\xfc\xcb\xfb<\xfa\x03\xf9v\xf7\x1a\xf6\xfb\xf4\xfc\xf46\xf5\x14\xf5\\\xf5\xbe\xf5\x07\xf6z\xf6\x1a\xf7\x19\xf7\xb1\xf7\x9f\xf8q\xf9S\xfa\x98\xfaa\xfa\xd4\xfaz\xfc\xb1\xfd\xac\xfe\xae\xfeH\xfe\x84\xfe\x91\xff\xee\x00/\x01(\x00\r\xff\x8b\xfe\x0f\xff\xd4\x00)\x01\x99\xff_\xfeD\xffI\x00\xc9\x00U\x00\xca\xfe[\xfe\xa7\x00\xc9\x02\x89\x03]\x02*\x00K\x01\xc8\x02\xc6\x02\xe7\x01`\x01\x85\x01\x10\x02\xf6\x01y\x000\xff\xee\xfe\xe8\xffo\x00P\x00\xde\xff\x04\xff\xe8\xfe\xa0\xff\x86\x00R\x02\x1a\x05\xbf\x07\x19\n\xcc\x0bs\r\xe6\x0fl\x12%\x14\x8e\x14\x15\x14\n\x13v\x12\xcd\x12\x14\x13\xd3\x126\x11\x01\x0f\x18\x0e\xef\r\x18\r\x15\x0bC\x08O\x059\x02\r\xff\xcf\xfb\xac\xf8r\xf5=\xf2\xe0\xef\x06\xee&\xec1\xea\xfc\xe8s\xe9\x01\xeb\xbc\xebQ\xeb"\xebB\xec\xb0\xeeT\xf1\'\xf3)\xf4\x06\xf5S\xf6\xbf\xf8\xed\xfbw\xfe\xd1\xff\xa1\x00\xff\x01\xce\x03\x07\x05\x1e\x05\xde\x04"\x05=\x05\x89\x04$\x03\xa3\x01\x9b\x00\xfa\xfft\xff\xb5\xfe\xbd\xfd\x99\xfc\xd1\xfb\xab\xfb\xbd\xfb\x8d\xfb\xec\xfa\\\xfa\\\xfa\x81\xfa^\xfam\xfa\xab\xfa\x05\xfb\xfb\xfbm\xfc\xae\xfc\xaf\xfd\xc0\xfe\xc5\xff\'\x01\xdd\x01\xca\x01\x82\x02m\x03\xc2\x04\xb9\x053\x06\xb9\x06\xae\x07\xc0\x08\x9b\tE\n.\n\xb3\n`\x0b2\x0cO\x0c\xaa\x0b\xfa\tG\t6\t\xb4\x08\xb9\x07^\x05\xab\x03\x86\x03_\x03\x01\x02\x1f\xff|\xfco\xfb\x86\xfb\x80\xfbU\xf9\xa3\xf6\xb7\xf4\xfe\xf4\x9c\xf5\xe6\xf44\xf4\x82\xf4\x17\xf5\x81\xf5\x1d\xf6V\xf6m\xf7*\xf9e\xfa\x06\xfb\xc2\xfb\xc3\xfb\xbc\xfc\xcb\xfeB\x01\\\x02\xcd\x00n\xffj\x00,\x04\xfb\x05\x9b\x04\xe9\x01%\x00\x8f\x00\xfc\x01U\x03\xb3\x02\xcf\x00?\xfe+\xfc(\xfc\xf7\xfd\xf5\xfe\xf3\xfd\xd5\xfb`\xfa\xfb\xf9\x96\xfa^\xfbs\xfbN\xfbJ\xfbD\xfb\xb3\xfb;\xfc\x0c\xfd\x80\xfeH\xff\x16\xffz\xfeG\xfeh\xff\x82\x01\xeb\x02\xa7\x02b\x02U\x04\x9d\t\xc8\x0f\x1e\x13\x17\x136\x12[\x14V\x19\xc3\x1c\xef\x1b\xb7\x18\xbf\x16\x9c\x17T\x19G\x19y\x17\x0c\x15@\x13[\x13_\x143\x13\x19\x0f\x16\n\xeb\x06\xca\x057\x03\xf2\xfd\xb5\xf88\xf5\xdd\xf2\xfb\xef\x81\xec\xf1\xe9\xfc\xe8\xd2\xe8\xa0\xe8\xa7\xe8\xd3\xe8\xd9\xe8>\xe9k\xea\xfb\xeb\x03\xed%\xed\x11\xee\xc3\xf0N\xf3T\xf4\xb2\xf4F\xf6\x80\xf9K\xfc$\xfdT\xfd\x15\xfe(\xff\xf8\xff\x99\x00\xe1\x00m\x00<\xff\xa8\xfe%\xff\xb0\xffR\xffh\xfeu\xfe5\xff|\xff\xf3\xfex\xfe\xb0\xfe\x05\xff\xe7\xfek\xfeG\xfeU\xfeL\xfe\x87\xfe\n\xff\xc5\xff-\x00?\x00\x02\x01\xe1\x012\x02\x80\x02{\x02\xe6\x02\xeb\x03"\x04\x19\x04\x92\x04h\x05\x16\x06\x9e\x06\xb7\x06\xdf\x06\xce\x077\x08E\x08\x17\x08r\x07\xa8\x06\x94\x06%\x07~\x07!\x07\xa3\x05\xf2\x04\x80\x053\x06\x0c\x06b\x05!\x04\xf6\x02\x91\x01C\x00\xc3\xff\xa1\xff;\xff\x1e\xfe%\xfc\xdc\xf9\x18\xf8\x8f\xf7\xe9\xf7O\xf85\xf8\x89\xf7\x06\xf7e\xf6\xf7\xf5\x04\xf5Y\xf4\x0b\xf6\x1c\xf9\xee\xfa*\xfa\xe0\xf7\xba\xf6;\xf9\x14\xfdg\xff;\xffI\xfe\x06\xfe\x99\xffy\x01\xa1\x02\x8b\x02\xc5\x00N\x00F\x01/\x03v\x03\xc4\x01\xc3\xffW\xffB\x00\x90\x00+\x00\x7f\xff,\xff\x14\xff\x91\xfe\xc6\xfcR\xfcK\xfdb\xfem\xffS\xff\xb9\xfd\xf4\xfc\x9c\xfd\xee\xfe\x15\x00\xee\xff\xe0\xfe\xff\xfe\xfc\xff\xa1\x01\x1c\x02\x8b\x00\x8e\xff5\x00\x05\x02\xc6\x02\x8e\x01\x1f\x00w\x00\xb4\x02J\x05\xdc\x07\x19\nw\x0b\xe4\x0bh\x0c\x1c\x0e\xb9\x10\xc8\x11\x8c\x10\xa5\x0f\x18\x10W\x11\xb6\x11\x0e\x11%\x11\x06\x12\xcd\x11\xaa\x11\xb0\x11f\x10\x9d\r\xc1\t\x9a\x07"\x07\xb1\x04e\xff&\xfb\x11\xfa0\xfa\xfd\xf7\xcb\xf3\x9f\xf0\xdc\xef*\xf0\x08\xf0\xc7\xefv\xef,\xee(\xedS\xee\x86\xf0R\xf1\x1c\xf0\xf2\xef\xdb\xf27\xf5h\xf4\xa5\xf2\xe2\xf3X\xf8\xa8\xfb(\xfb\x91\xf9\x96\xf9\x92\xfa\x86\xfb\xb9\xfcv\xfe*\xff\x82\xfd.\xfc\n\xfd\xcb\xfe\xfd\xfe\x96\xfd\x81\xfd\xe1\xfet\xff\x8a\xfe!\xfeP\xff\x95\x00D\x00!\x00\x8f\x00\xb9\x00\x98\x00\x00\x00\xd3\x00\xe1\x01\xf1\x00\xb7\xff)\x01\xa3\x02)\x03\x87\x02\xe0\x00\xa1\x01\xcc\x041\x05\x87\x04\xad\x04\xfc\x04O\x05\x1b\x06M\x06\x9c\x06c\x07\x1b\x07\xfc\x06\xcd\x079\x07d\x06\x12\x07s\x08\x10\n\xba\t\xd9\x06_\x05\xb6\x05\xaa\x05\xbb\x05\x05\x05\xa5\x03Y\x03\xb1\x01@\x01\xa0\x00\xd8\xfe\x11\xfe\x93\xfe\xc6\xff\x17\x00\xae\xfd\xa1\xfa\xa9\xf9^\xfb\x17\xfe]\xfe\x95\xfcc\xfa\xf3\xfay\xfc:\xfd\xcf\xfb\xb6\xf9\x8f\xf91\xfb\xbd\xfc\x86\xfc\xa2\xfa}\xf9\xfa\xf9\xc1\xfb\xd6\xfc\x9c\xfbS\xfa\xab\xf9W\xfap\xfb"\xfbp\xfa\xdd\xf9&\xfa\xfa\xfa\xfe\xfa\xa1\xfa\xda\xfa\xe3\xfa-\xfb\x91\xfbJ\xfb\x00\xfbl\xfa\xee\xf96\xfa\xd1\xfa\xf4\xfa\x1c\xfb\xd7\xfaM\xfb\xe3\xfb#\xfc\xe4\xfc\xd8\xfc\x9b\xfc:\xfd\xbb\xfd0\xfeK\xfe\x08\xfew\xff\xdc\x00\xb0\x00\xf8\xff,\x00Y\x01-\x03\xc2\x030\x03\xda\x02+\x02Q\x032\x07\x85\n\x95\x0b\xd8\x0fL\x1d\x0e/X7;2\\-\x9b4\xc5?\xe4>\xc4/\xaa \x05\x1c\x0c\x1b\n\x14\x00\x07w\xfa\xbe\xf2\x12\xefk\xed6\xebT\xe4\x18\xdc\xb8\xd8\x00\xdb\xe4\xdc\xef\xd8S\xd4_\xd6\x83\xdc\xc5\xe0\x97\xe3\xb4\xea\xcc\xf5`\xfe<\x02^\x07\x9b\x10A\x18\xac\x18\x96\x14\xb8\x12\xa7\x13l\x13\x10\x0f\x1f\x08\xc2\x01\xac\xfdT\xfb\xe4\xf8\x99\xf4z\xee~\xe8\n\xe4\xca\xe1I\xe0r\xdd\xc4\xd9?\xd8P\xda\xe9\xde(\xe4\x89\xe9X\xef\x14\xf6\xee\xfd\xfc\x06\x83\x0f\xf9\x14i\x17t\x19\xa9\x1c3\x1fU\x1e\xf7\x19Y\x15\xbd\x12;\x11G\x0eK\ts\x04\x00\x01"\xfe\x91\xfa\xa2\xf6\xc1\xf3\x9e\xf1\x14\xef\xce\xec\xb8\xecF\xefS\xf2o\xf4\x19\xf7\x17\xfc\x9b\x02\x95\x08\xf6\x0c\x07\x10\x8e\x12\x7f\x15x\x18\xfd\x19\x9b\x18]\x15\xde\x13\x82\x13\xf9\x11W\rV\x07\xe1\x03\xe9\x00\x8d\xfcN\xf7]\xf2\x7f\xef\n\xecq\xe8\x0f\xe7T\xe7B\xe9\xf3\xe9\xf3\xea\xf5\xef\xb8\xf7{\x00T\x06T\t\xc3\r\x05\x14\xfe\x1a|\x1d\xdb\x1a\xe3\x17\x18\x18\xcd\x19\x7f\x17h\x10\xb5\tu\x06\xde\x04\x0e\x012\xfa~\xf4\x12\xf1m\xee\xff\xeaC\xe7\xad\xe5|\xe5\n\xe5\xe1\xe45\xe6\xf6\xe9\xbb\xee\x01\xf2{\xf4\xa2\xf7\xfd\xfb\x94\x00g\x03\xb5\x03\xd9\x032\x05\x89\x07P\t\xd2\x08\xd3\x07\xf9\x07\xa3\x08\x93\x08\x11\x07\xa3\x04=\x02\xbf\xff\xfd\xfc^\xfa\xd4\xf7\xd8\xf5\xb3\xf3+\xf2\x1f\xf2U\xf3\xc4\xf4s\xf5]\xf5\x0e\xf6\xad\xf8|\xfb4\xfdz\xfd\xbb\xfe\xbc\x01\t\x05T\x077\t\x83\x0b\x17\r\xae\r\xa3\x0e7\x103\x0e\x0e\nO\x0bS\x17\xc1&\x10,\x8d\'\xba&o1\xbf<\xbe:\xd7,\x91\x1f\x1b\x1b\xe7\x17\x87\r/\xff\x91\xf46\xf0\xc8\xeb\xc8\xe3\x90\xde\xfd\xe0E\xe5\x92\xe2\xb1\xdb9\xdbP\xe4\xf5\xeb\xe1\xe9\'\xe4g\xe6\xeb\xf0Q\xfa>\xfdG\xfe\xc5\x02\xf2\t\xf1\x0f\xd7\x133\x16\x84\x15U\x11\x07\x0c\x7f\x08/\x05}\xfe\xa5\xf4\xdc\xeb\xb8\xe6\xa8\xe4\x04\xe3\xa2\xe0[\xde\xfe\xdd_\xe0\xde\xe3\t\xe7W\xe9\x8d\xebm\xee\x81\xf2\xef\xf6\x14\xfbM\xff \x04\x84\x08.\x0c\xc7\x10\x9c\x16\x96\x1a\xa4\x1a\xf9\x18\xf4\x18\n\x1a"\x18\xeb\x11^\n\xe2\x04V\x01A\xfd\xa7\xf7X\xf2T\xef\xa7\xeeP\xef8\xf09\xf1\xbc\xf2\xfd\xf4\x96\xf7E\xfa\xff\xfc\xb2\xff\xda\x01\xa5\x030\x06\xb8\t0\r\xd4\x0f\xe3\x11\t\x14=\x16\xb9\x17\x18\x18\x06\x17F\x14\xab\x10\xfc\x0c\x17\t<\x04\\\xfe\x87\xf8\xea\xf3j\xf0c\xed\x00\xeb^\xe9\xfc\xe8\xf5\xe9\xe8\xebZ\xee\xd2\xf0\xf8\xf3\xd0\xf7\x1b\xfc\x98\xff\xa1\x02\xd5\x07x\x0e\xa3\x13X\x15\xc3\x15\x88\x18\x19\x1b\x07\x1a\xd3\x15i\x11p\x0e{\n\xb2\x04x\xfe\xae\xf8*\xf4\x8e\xf0\xa0\xedc\xeb\x08\xea\xe2\xe9\xad\xea\x15\xec\xdb\xed\t\xf0\xe4\xf2u\xf5\xef\xf7\xa1\xfa\xc4\xfd`\x01\x1f\x04\xda\x05n\x07@\t\x1c\x0b\xc2\x0b\xc0\ni\ty\x08V\x07n\x05\x85\x02\xdc\xff\x14\xfe)\xfcW\xfa\xba\xf8R\xf7\xc5\xf6 \xf6\x94\xf5\xe4\xf53\xf6\xee\xf6\xa4\xf7!\xf8t\xf9\xe1\xfa\x8b\xfc6\xfe/\xff\xbd\x00\x86\x027\x04\xa8\x05\x86\x06"\x07\xa0\x07\xb7\x07~\x07\x07\x07\xc2\x05;\x04p\x02\xe5\x00\x87\xff\xaa\xfd-\xfb\xbb\xf9\xc1\xf9\xbd\xf9\xd6\xf7\xf5\xf4s\xf5\xb1\xfb\xc2\x02b\x05\xa0\x07\x04\x11a!\xef,\xef,\xa7)G.S8h:\xa2/\xc7!"\x1bB\x19\xb5\x12\x1c\x05E\xf7\x0c\xef\xe2\xeb_\xe9\xce\xe4P\xdf\x8e\xdc\xd2\xddw\xe0f\xe17\xe1\xbe\xe2\x80\xe6d\xe9\x06\xeb\x7f\xee\x98\xf5_\xfc\x0c\xff6\x00\x87\x05\x95\x0eA\x14\xbb\x12\x14\x0fZ\x0f\xad\x11\n\x10|\x087\x00N\xfb9\xf8\x03\xf4&\xee\x03\xe9\xb4\xe6A\xe6\xc9\xe5\x82\xe5\x9c\xe60\xe9\x8d\xebX\xed\x95\xef\xf3\xf2\x1b\xf7\xf2\xfah\xfdj\xff\xf5\x02f\x08\x03\r\xc2\x0e\x9d\x0f\xff\x11+\x15]\x16{\x14D\x11\x9c\x0eN\x0c\xcd\x08\xb6\x03\x87\xfe\xa6\xfa\xe2\xf7\x8b\xf5\xaf\xf3\xee\xf2\xa2\xf3O\xf5;\xf7v\xf9p\xfc\xdb\xff\xe7\x02\x07\x05\xd4\x06\x19\t[\x0b\xdc\x0c~\r\xd3\r\x84\x0eo\x0f\xf8\x0f\x96\x0fM\x0e\xd4\x0cy\x0b\xb3\t\xb9\x06\xb0\x02\x95\xfe$\xfb\xdf\xf7L\xf4\xe4\xf0E\xee\xd8\xece\xec\xa9\xec\xbd\xed\xb4\xef\x83\xf2y\xf5\x7f\xf8\xd8\xfb<\xff<\x02(\x04\x9e\x05\x15\x07c\x08L\t5\t\r\tT\t\xb8\t\xb8\t\xf1\x08\xa8\x08/\t5\t\x1b\x08\x13\x06\x18\x04\xb1\x02\xbf\x00\xd8\xfd\xad\xfa\xf4\xf7e\xf6.\xf5\xda\xf3\xfb\xf2\xec\xf2 \xf4\x0e\xf6\xc5\xf7\x86\xf9\x81\xfb\xcd\xfd<\x00\x07\x02:\x03r\x04z\x05*\x067\x06\xa3\x05,\x05\x9f\x04\xd4\x03\xd2\x02\xb9\x01\xdc\x00\xe4\xff\xe3\xfe\xc8\xfd\x03\xfd}\xfc\x03\xfc~\xfb\xea\xfa\xcd\xfa\x1d\xfb\x81\xfb\xa5\xfb\xb4\xfb\xef\xfbh\xfc\xdb\xfc\t\xfd\xe4\xfc\xf4\xfc\x17\xfd8\xfdI\xfd\'\xfd8\xfdr\xfd\x9a\xfd\x0c\xfef\xfe\xa7\xfe\xfc\xfe\x1c\xff|\xff\xbe\xff\x83\xffN\xffC\xffD\xffU\xff+\xff^\xff)\x00\xdb\x00\xec\x00/\x01B\x02\xb1\x03\xf1\x03\xe9\x02I\x04\xdf\tb\x10s\x13\x18\x14\x8c\x17X\x1fv%\x82%\x8d"m!\x07"\xc1\x1f\x0e\x19q\x11/\x0bx\x05R\xff\xdd\xf8\x88\xf3\t\xef\\\xeb=\xe9z\xe8\x00\xe8l\xe7\x00\xe8\xa1\xe9\xcc\xea5\xebA\xec\xcb\xee\x15\xf1\x16\xf2X\xf3\xae\xf6\xe9\xfa\xdc\xfd\x9b\xff\x82\x02\xfc\x06\x83\nm\x0b%\x0b\xab\x0b(\x0c\x97\n\xec\x067\x03%\x00\xdd\xfc\xcd\xf8\x93\xf4r\xf1\x87\xefI\xeeN\xed\n\xed\xd9\xedv\xef:\xf1\x0e\xf3\x00\xf5%\xf7\x99\xf95\xfci\xfe\xfb\xff\xb6\x01M\x04\xfb\x06\xb2\x08\x07\n\xdb\x0b\x1e\x0e\xc6\x0f;\x10\xdf\x0fn\x0f\xc5\x0e=\r\x82\n\x1c\x07\xe2\x03\x13\x01G\xfee\xfb\xf3\xf8\xb1\xf7\x95\xf7\xe1\xf7G\xf8N\xf9L\xfb\x9e\xfdo\xff\xab\x00\xf8\x01b\x03_\x04\xad\x04\xa9\x04\xab\x04\xe7\x04J\x05u\x05b\x05;\x05h\x05\xcb\x05\xb6\x05\xef\x04\xbf\x03\xa0\x02J\x01e\xff\x13\xfd\xbf\xfa\xc6\xf8+\xf7\xd8\xf5\xd8\xf4q\xf4\xea\xf4\xe0\xf5\x0f\xf7\xa7\xf8\xb1\xfa\x04\xfd\xf9\xfe\x9a\x00,\x02\xba\x03\x0e\x05\xbd\x05\'\x06\x9d\x06)\x07{\x07\x82\x07\x06\x08\x0c\t\xe1\t\xdf\t6\t\x8e\x08\xf7\x07\xd1\x06\xb1\x04\xeb\x01\x0b\xffd\xfc\xd3\xf9O\xf7\xf7\xf46\xf3Z\xf2`\xf2\xed\xf2\xcf\xf3 \xf5\t\xf7b\xf9\xb6\xfb\xd2\xfd\xcf\xff\xb6\x01k\x03\xb4\x04\x89\x05\x18\x06q\x06\x83\x06T\x06\xd7\x053\x05r\x04d\x03/\x02\xfa\x00\xa4\xffJ\xfe\xb0\xfc\xe2\xfaV\xf9\xee\xf7\xc3\xf6\xc0\xf5\xc0\xf4\x1e\xf4\xe5\xf3\xf3\xf3]\xf4\x00\xf5\xd6\xf5\x00\xf7F\xf8\xc3\xf9k\xfb\r\xfd\xb6\xfeP\x00\'\x02\xe5\x03Z\x05j\x06\x9b\x07\xc0\x08\x82\t\x9e\tQ\tn\t.\tN\x08\xb9\x06G\x05\x98\x04p\x03j\x01\xaf\xff\x04\xff\xe0\xfet\xfdC\xfb\x9d\xfb\x8e\xff\x07\x04\xa5\x05>\x06?\n\xac\x11q\x17\xe2\x18\xff\x18\x8b\x1b\x05\x1f\x91\x1f\x8b\x1c\xc5\x18\xff\x15\xa9\x12\xc4\r\x08\x08\xd2\x02A\xfe\xe5\xf9\xd7\xf5Q\xf2>\xef\xeb\xec\xca\xeb\xf9\xea\x97\xe9=\xe8^\xe8\xc4\xe9k\xea\xfc\xe9X\xea\xe9\xec\x10\xf0\xd5\xf1\xec\xf2\xb2\xf5C\xfaO\xfe\x9f\x00\x85\x02\x81\x05\xa3\x08%\n\xf5\tx\t3\t2\x08\xf5\x05\x15\x03\\\x00\x05\xfe\xab\xfb\x19\xf9\xcf\xf6A\xf5|\xf4\xfc\xf3\xae\xf3\xb1\xf3L\xf4\x99\xf5.\xf7\x83\xf8\x96\xf9 \xfb\x99\xfd\xf2\xffV\x01\x84\x02p\x04\xf0\x06\xe3\x08\xf9\t\xc6\n\xea\x0b\n\ru\r\xec\x0c\xfe\x0b#\x0b:\n\xc3\x08\xa2\x06`\x04\xba\x02z\x01\xd0\xff\xc1\xfdh\xfcV\xfc\x84\xfc\xf6\xfbI\xfb\xc6\xfb\x10\xfd\xec\xfd\xeb\xfd\xfb\xfd\xbc\xfe\xce\xffp\x00l\x00R\x00\x95\x00@\x01\xb0\x01u\x01)\x01d\x01\xf4\x01\xed\x01 \x01\x8c\x00\x8b\x00_\x00\x80\xffT\xfe\xa7\xfd\x84\xfd@\xfd\x93\xfc\xfc\xfb\x1b\xfc\xd4\xfca\xfd\x97\xfd\x06\xfe\x06\xff-\x00\xf7\x00z\x01\x18\x02\xe5\x02\x8c\x03\xde\x03\xe7\x03\x10\x04H\x04[\x04A\x04\x0b\x04\xea\x03\xb4\x03g\x03\r\x03\xa7\x020\x02\xac\x01\x11\x01\x8b\x00.\x00\xd6\xff^\xff\xe6\xfe\xe3\xfe\x1e\xffB\xff\x10\xff\xf7\xfeO\xff\xa0\xff\x9a\xffA\xff\x02\xff\xf7\xfe\xe8\xfe\x93\xfe\x0f\xfe\xc1\xfd\xb0\xfd\x9a\xfdM\xfd\x08\xfd\x12\xfd6\xfdD\xfd"\xfd\x0f\xfd7\xfdX\xfd!\xfd\xb7\xfc\xac\xfc\xe6\xfc\xf0\xfc\xba\xfc\x95\xfc\xc5\xfc&\xfdi\xfd\x9f\xfd\x10\xfe\xb4\xfeG\xff\xa3\xff\x0e\x00\xc7\x00\x84\x01\xdd\x01\xf0\x018\x02\x99\x02\xba\x02}\x024\x02)\x029\x02\x08\x02\x9c\x01Y\x012\x01\x06\x01\xc7\x00\x8e\x00e\x007\x00\x00\x00\xc7\xff\x9c\xffw\xffa\xffW\xff@\xff0\xffC\xffw\xff\x8b\xffv\xffw\xff\x9d\xff\xcd\xff\xb9\xff\x8a\xffu\xff\x9c\xff\xab\xff{\xff4\xff\x0b\xffB\xff`\xffj\xffb\xffb\xff\x98\xff\x96\xffx\xff\x7f\xff\x81\xffd\xff\x02\xffh\xfe\x1e\xfe+\xfe\x08\xfeU\xfd\xab\xfc&\xfd\xde\xfe\x0c\x01\xd6\x02<\x04F\x06^\t\xc2\x0c\xb5\x0e\xd2\x0e\xf1\x0eI\x10\x8b\x11w\x10S\r\xf2\nx\n\x9a\to\x06\x96\x02\xee\x00\xf4\x00\xc4\xff\x11\xfdI\xfb\x97\xfb\x07\xfc\xbd\xfa\xc1\xf8/\xf8\xcd\xf8\xb3\xf8R\xf7\xe9\xf5\xde\xf5\xc5\xf6\x1c\xf7\x87\xf6Z\xf6\x8b\xf7x\xf9\x98\xfa\xc3\xfa\x8f\xfbf\xfd\x0c\xfft\xff/\xff|\xff9\x00C\x00N\xffI\xfe\xfb\xfd\xe1\xfdM\xfdf\xfc\xda\xfb\xef\xfb0\xfc\'\xfc\xfc\xfb.\xfc\xd3\xfco\xfd\x8f\xfd\x8b\xfd\xf2\xfd\x9d\xfe\xec\xfe\xc8\xfe\xc5\xfe?\xff\xc9\xff\x0b\x00*\x00u\x00\x1e\x01\xcd\x01S\x02\xb3\x02\x17\x03\xa7\x030\x04Y\x04;\x04\x1c\x04@\x04Q\x04\xe9\x036\x03\xc9\x02\xd3\x02\xcb\x02W\x02\xd8\x01\xda\x01!\x02:\x02\x11\x02\x04\x023\x02X\x02P\x02$\x02\xf9\x01\xec\x01\xee\x01\xca\x01\\\x01\xe3\x00\xc8\x00\xe3\x00\x9e\x00\xf6\xffo\xff_\xfff\xff\xf5\xfeI\xfe\xee\xfd\xf7\xfd\xeb\xfdx\xfd\x08\xfd\x02\xfd1\xfd\x1e\xfd\xd1\xfc\xc4\xfc$\xfd\x8a\xfd\xa6\xfd\xc1\xfd$\xfe\xba\xfe,\xff\x83\xff\xe0\xffF\x00\x96\x00\xd2\x00\x0e\x015\x018\x01,\x012\x01A\x01B\x016\x010\x01H\x01l\x01\x89\x01\x8e\x01}\x01w\x01\x85\x01x\x01\'\x01\xad\x00n\x00C\x00\xea\xffo\xff!\xff*\xffM\xffG\xffY\xff\xac\xff+\x00\xa7\x00\xfc\x00Z\x01\xdd\x01b\x02\xd0\x02\xf4\x02\xcf\x02\xbc\x02\xdb\x02\xc0\x02,\x02w\x01\x18\x01\xf8\x00|\x00\x80\xff\xcd\xfe\x96\xfeO\xfe\x87\xfd\xa5\xfcN\xfcB\xfc\xfc\xfb\x88\xfbQ\xfb\xa6\xfb\x0e\xfc\x1f\xfc*\xfc\x9e\xfcl\xfd\x05\xfej\xfe\x13\xffB\x00\x85\x01a\x02\xfd\x02\xc6\x03\xd3\x04d\x05Y\x05\x13\x05\xf5\x04\xd8\x049\x04=\x03T\x02\xbd\x01\x1a\x019\x00Z\xff\xc4\xfe\x8e\xfeN\xfe\xd8\xfdo\xfdo\xfd\xac\xfd\xb5\xfd}\xfdj\xfd\xa7\xfd\xe9\xfd\xe5\xfd\xb8\xfd\xca\xfd\x15\xfe@\xfe1\xfe%\xfeF\xfes\xfek\xfeB\xfe4\xfe1\xfe$\xfe\xec\xfd\xae\xfd\x98\xfd\x80\xfdt\xfdO\xfd*\xfdO\xfdV\xfdp\xfdc\xfdf\xfd\xa9\xfd\xb6\xfd\xc8\xfd\xd0\xfd\xea\xfd:\xfe^\xfem\xfe\x91\xfe\xba\xfe\xec\xfe\xfd\xfe\xf9\xfe\n\xff)\xffH\xffT\xff[\xffV\xffm\xff\x91\xffz\xffg\xff\xa9\xff(\x00\xaf\x00\xf9\x00\xbb\x01Q\x03\t\x05e\x06\x8f\x07\'\t\x1e\x0bf\x0c\xfe\x0c\xa8\r\x83\x0e\xe8\x0e^\x0e\xeb\r\t\x0e\xd7\r\xdc\x0c\xc7\x0b\x9f\x0b\x96\x0b}\n\xd5\x08\xfa\x07\xa0\x07R\x06\xc1\x03\x7f\x01b\x00D\xff\x00\xfdR\xfa\xdc\xf8\x88\xf8\xd4\xf7I\xf6,\xf5{\xf5Z\xf6k\xf6\xd5\xf5\xc5\xf5\x9d\xf6U\xf7\x1f\xf7\x90\xf6\x98\xf6\t\xf76\xf7\xe1\xf6\x8e\xf6\xc6\xf6O\xf7\xc4\xf7\xf1\xf7/\xf8\xd5\xf8\xc2\xf9\x86\xfa\xfe\xfak\xfb+\xfc\x14\xfd\xa9\xfd\xea\xfdE\xfe\xe3\xfek\xff\x9f\xff\xc0\xff%\x00\x9d\x00\xd7\x00\xe6\x00\x1d\x01\x8b\x01\xe6\x013\x02{\x02\xdc\x02P\x03\xba\x03\x00\x043\x04j\x04\xa0\x04\xb6\x04\xac\x04\x95\x04\x82\x04\x80\x04o\x04J\x04\'\x04\x1b\x04$\x042\x043\x046\x04?\x048\x04\x13\x04\xd5\x03\x91\x03F\x03\xe9\x02\x83\x02\x02\x02w\x01\xf0\x00y\x00\xfb\xffh\xff\xdf\xfeo\xfe\x16\xfe\xaa\xfd1\xfd\xc4\xfc}\xfcN\xfc\n\xfc\xc6\xfb\xb5\xfb\xc6\xfb\xc8\xfb\xb8\xfb\xe4\xfb;\xfcw\xfc\xa0\xfc\xef\xfc[\xfd\xa8\xfd\xbc\xfd\xef\xfd>\xfel\xfeo\xfeo\xfe\xa5\xfe\xd4\xfe\xdd\xfe\xf7\xfeK\xff\xb0\xff\x05\x00_\x00\xcb\x00M\x01\xb4\x01\x15\x02{\x02\xc6\x02\xf0\x02(\x03p\x03s\x034\x03\x18\x03;\x03A\x03\xfc\x02\xd3\x02 \x03\x9b\x03\xb3\x03d\x03G\x03\xa2\x03\xf4\x03\xb5\x037\x03,\x03q\x03F\x03\x9c\x02\x1d\x022\x02B\x02\x99\x01\xbe\x00.\x00\xeb\xffS\xffB\xfea\xfd\xd4\xfcg\xfc\xbe\xfb\xe9\xfao\xfa;\xfa\x1b\xfa\xe8\xf9\xa8\xf9\xa8\xf9\xd4\xf9\x01\xfa\x16\xfa\x1b\xfa]\xfa\xc5\xfa\x18\xfbJ\xfbw\xfb\xe6\xfbk\xfc\xc2\xfc\x02\xfdX\xfd\xe2\xfdi\xfe\xc0\xfe\x08\xffs\xff\x03\x00\x84\x00\xce\x00\x17\x01\x8d\x01\x01\x02E\x02E\x02\\\x02\xbb\x02\xe3\x02\xcb\x02\x80\x02w\x02\xab\x02\x88\x024\x02\xeb\x01\xe8\x01\x05\x02\xbc\x01S\x016\x01>\x01#\x01\xb8\x00Q\x00@\x00@\x00\x0e\x00\xc1\xff\x89\xfft\xffx\xffa\xff\x1e\xff\x0c\xff7\xffm\xffy\xffN\xffu\xff\xf0\xff>\x00`\x00\xcd\x00\xe1\x01"\x03\xdb\x03i\x04\x80\x05\xce\x06w\x07v\x07\xd5\x07\xbc\x08\xfd\x08L\x08\xc4\x07N\x08\xbc\x08\xf8\x07\xef\x06\x12\x07\xa8\x07\xf5\x06%\x05\x18\x04*\x04\x85\x03H\x01\x0e\xffK\xfe\t\xfe\xa5\xfc\x80\xfa\x85\xf9\xff\xf9=\xfaS\xf9n\xf8\xcd\xf8\xbc\xf9\xb1\xf9\xd3\xf8\x91\xf8C\xf9\xac\xf9*\xf9\x8d\xf8\xdd\xf8\x98\xf9\xda\xf9\xd4\xf94\xfa\x04\xfb\xb3\xfb\x10\xfcw\xfc\x12\xfd\x8c\xfd\xd4\xfd\x16\xfeT\xfer\xfe\x8f\xfe\xc0\xfe\xfd\xfe\x1e\xff8\xff\x85\xff\xe5\xff#\x00>\x00t\x00\xc1\x00\xd9\x00\xbf\x00\x9b\x00\xa4\x00\xb5\x00\x8d\x00@\x00&\x00M\x00q\x00P\x00F\x00\x97\x00\xed\x00\x0c\x01\t\x01=\x01\x9f\x01\xd1\x01\xcd\x01\xe2\x01"\x02c\x02y\x02\x81\x02\xad\x02\xeb\x02\x14\x03\x19\x03\x17\x03&\x03&\x03\x01\x03\xb7\x02o\x025\x02\xe7\x01\x87\x01#\x01\xd7\x00\x95\x00K\x00\xf2\xff\xb2\xff\x93\xffp\xff<\xff\x03\xff\xd3\xfe\xaa\xfeu\xfeH\xfe0\xfe\x18\xfe\x0c\xfe"\xfeF\xfeb\xfey\xfe\xb9\xfe\x07\xff2\xffV\xff\x92\xff\xcd\xff\xd0\xff\xb8\xff\xdb\xff%\x00)\x00\x01\x00+\x00\x98\x00\xcd\x00\x9c\x00\xb8\x00v\x01\x0c\x02\xd3\x01j\x01\xaf\x01H\x02$\x02`\x01\x17\x01u\x01{\x01\xa2\x00\xce\xff\xce\xff\x12\x00\xa4\xff\xc2\xfeb\xfe\x9e\xfe\xae\xfe=\xfe\xcf\xfd\xf0\xfd9\xfe(\xfe\xde\xfd\xe3\xfdI\xfel\xfej\xfe\x92\xfe\xda\xfe\x02\xff\x05\xff:\xffv\xff\x89\xff\x86\xff\xa3\xff\xc5\xff\xab\xff\x8a\xff\x89\xff\x99\xff\x97\xffv\xffY\xffV\xffK\xff7\xff3\xff?\xff?\xff\x1b\xff\x1c\xffB\xff9\xff\xff\xfe\xe6\xfe\'\xffT\xff"\xff\xd9\xfe\xe4\xfe(\xff \xff\xcb\xfe\xa2\xfe\xdd\xfe\t\xff\xd6\xfe\x98\xfe\xb4\xfe\x08\xff\n\xff\xe8\xfe\xec\xfe2\xffr\xff}\xff\xa8\xff\xee\xff*\x00R\x00\x92\x00\xff\x00V\x01p\x01\xc8\x01\x91\x02K\x03\xc2\x03E\x04U\x05\x8e\x06\x0e\x07\xfd\x06\x86\x07\xbd\x08K\t\xa7\x08\x18\x08\xb1\x08|\t\x08\t\x00\x08\xf2\x07\xb8\x08\xac\x08`\x07N\x06c\x06\\\x06\x0c\x05\x14\x03\xc3\x01?\x01U\x00\xb2\xfe\t\xfd\x16\xfc\xa9\xfb\xf9\xfa\xdf\xf9\xe8\xf8\x96\xf8\x96\xf83\xf8]\xf7\xbf\xf6\xa7\xf6\xb4\xf6d\xf6\xd6\xf5\xaa\xf5\xe6\xf5C\xf6p\xf6\x90\xf6\xff\xf6\xb8\xf7|\xf8\x03\xf9d\xf9\xe6\xf9\x92\xfa-\xfb\x82\xfb\xb6\xfb.\xfc\xdb\xfcg\xfd\xc2\xfd!\xfe\xc0\xfe\x82\xff\x1b\x00\x91\x00\x01\x01\x85\x01\x05\x02O\x02~\x02\xb6\x02\x04\x03E\x03P\x03R\x03\x82\x03\xd3\x03\xfd\x03\x06\x04"\x04f\x04\xa1\x04\xbd\x04\xdb\x04\xff\x04\x1e\x05!\x05 \x05#\x05\x16\x05\xfd\x04\xf0\x04\xe7\x04\xc8\x04\x90\x04l\x04g\x04Q\x04\x08\x04\xba\x03\x84\x03V\x03\xf5\x02\\\x02\xd3\x01o\x01\xfb\x00K\x00\x86\xff\xfa\xfe\x9d\xfe&\xfex\xfd\xda\xfc\x96\xfcW\xfc\xe8\xfbt\xfb;\xfb+\xfb\xe0\xfa\x8d\xfa\x97\xfa\xe1\xfa\xfa\xfa\xdc\xfa#\xfb\xc4\xfb \xfc\x0f\xfca\xfc\x82\xfd\x9b\xfe\xde\xfe\xde\xfe\x80\xff\x85\x00\xf4\x00\xe5\x00$\x01\xd5\x01t\x02\x86\x02_\x02\x9f\x02!\x03t\x03b\x03*\x03L\x03\x83\x03\x98\x03t\x03+\x03\x01\x03\xfe\x02\xe8\x02\xa9\x02[\x02\x08\x02\x0b\x02(\x02\xea\x01y\x01&\x01\x1e\x01\n\x01\x9c\x00"\x00\xe7\xff\xaa\xff?\xff\xc8\xfep\xfe5\xfe\xec\xfd\x9f\xfd_\xfd\x16\xfd\xb0\xfcV\xfc1\xfc+\xfc\xfa\xfb\xab\xfb\x9d\xfb\xc6\xfb\xcc\xfb\x98\xfb\x88\xfb\xdc\xfbC\xfct\xfcv\xfc\xa7\xfc\x1e\xfd\x8c\xfd\xc0\xfd\xe2\xfd9\xfe\xad\xfe\xf8\xfe\x0c\xff\x1f\xffc\xff\xaf\xff\xc7\xff\xb8\xff\xbd\xff\xfa\xff3\x00.\x00\x11\x00&\x00o\x00\xad\x00\x9e\x00\x94\x00\xe4\x00O\x01|\x01Q\x01r\x01\xfa\x01V\x02J\x02 \x02j\x02\xfa\x02\x1f\x03\xd3\x02\xc2\x02&\x03\x7f\x03Y\x03\x15\x03=\x03\xab\x03\xd4\x03\xbe\x03\xf2\x03n\x04\xc0\x04\xd8\x04\xf9\x04a\x05\x9a\x05x\x05u\x05\xb0\x05\xd2\x05\x91\x05;\x05F\x05w\x05\x1e\x05c\x04\xed\x03\xc4\x03b\x03\x81\x02o\x01\x9f\x00\x01\x00,\xff\t\xfe\xe3\xfc\x1e\xfc\xa0\xfb\xf4\xfa\x07\xfa@\xf9\xe7\xf8\xb8\xf8h\xf8\xea\xf7\xa5\xf7\xb6\xf7\xc7\xf7\xc3\xf7\xb7\xf7\xe1\xf7@\xf8\x94\xf8\xdb\xf84\xf9\xa8\xf9;\xfa\xda\xfak\xfb\xeb\xfbq\xfc\x13\xfd\xb7\xfd9\xfe\x9a\xfe\x13\xff\xa0\xff\xfd\xff3\x00w\x00\xd2\x00\x15\x01&\x011\x01\\\x01\x88\x01\x9d\x01\xa6\x01\xc3\x01\xe5\x01\xeb\x01\xdf\x01\xce\x01\xd1\x01\xd6\x01\xbc\x01\x96\x01\x87\x01\x98\x01\xaa\x01\x9e\x01\x9d\x01\xc1\x01\xe2\x01\xed\x01\xed\x01\x0b\x02A\x02[\x02I\x02@\x02S\x02l\x02j\x02V\x02\\\x02p\x02n\x02N\x025\x023\x02 \x02\xec\x01\xaa\x01{\x01H\x01\x01\x01\xb2\x00n\x00)\x00\xe0\xff\x9b\xfff\xff9\xff\xfd\xfe\xcb\xfe\xb1\xfe\x9e\xfe|\xfeW\xfeQ\xfe_\xfe`\xfeU\xfem\xfe\xa7\xfe\xd3\xfe\xd6\xfe\xe8\xfe9\xff\x8d\xff\xa3\xff\xa4\xff\xd7\xff2\x00\\\x00Y\x00~\x00\xd9\x00\x1f\x01\x1f\x01\x18\x01?\x01n\x01d\x01E\x01@\x01;\x01\x1b\x01\xe9\x00\xcf\x00\xaf\x00v\x00Q\x00D\x00\x1d\x00\xde\xff\xb5\xff\xb2\xff\xad\xff\x7f\xffU\xff]\xffv\xffo\xffH\xffC\xffo\xff\x99\xff\xa1\xff\x9f\xff\xbb\xff\xeb\xff\x05\x00\x08\x00\r\x00\'\x00D\x00D\x00,\x00\x1b\x00\x17\x00\x1b\x00\x06\x00\xd3\xff\xb1\xff\xa7\xff\xa5\xff\x94\xffw\xffh\xfft\xffz\xffa\xff<\xff4\xffP\xff_\xffW\xffR\xffw\xff\xa5\xff\xc0\xff\xd6\xff\xf1\xff!\x00Q\x00y\x00\x8c\x00\x96\x00\xae\x00\xd5\x00\xe1\x00\xc9\x00\xc2\x00\xdc\x00\xfb\x00\xf8\x00\xe4\x00\xf4\x00 \x01:\x01%\x01\x0f\x01\x1f\x012\x01\x1b\x01\xe6\x00\xbe\x00\xa9\x00\x9f\x00x\x00?\x00\x18\x00\x02\x00\xe2\xff\xa9\xffl\xff3\xff\xf5\xfe\xb9\xfet\xfe\x1e\xfe\xc2\xfdp\xfd"\xfd\xdb\xfc\x97\xfce\xfcA\xfc4\xfc9\xfc7\xfc5\xfcM\xfc\x89\xfc\xb8\xfc\xd7\xfc\x04\xfdM\xfd\xa0\xfd\xe1\xfd!\xfe\x80\xfe\xf5\xfec\xff\xb6\xff\x17\x00\x96\x00\x06\x01R\x01\x99\x01\xfb\x01J\x02p\x02\x8b\x02\xa3\x02\xad\x02\xa2\x02\x8d\x02\x84\x02o\x02F\x02\'\x02\n\x02\xd1\x01\x86\x01:\x01\xf5\x00\xaf\x00N\x00\xed\xff\xbc\xff\x8c\xffK\xff\x04\xff\xd7\xfe\xc9\xfe\xa9\xfe\x82\xfeo\xfev\xfe\x84\xfe\x8f\xfe\xa3\xfe\xbe\xfe\xe9\xfe5\xff\x90\xff\xe4\xff>\x00\xbc\x00R\x01\xdc\x01Z\x02\xf0\x02\x97\x03\x1e\x04\x85\x04\xeb\x04]\x05\xb6\x05\xdb\x05\xea\x05\x05\x06\x15\x06\xfb\x05\xbc\x05~\x05K\x05\xe3\x04L\x04\xb6\x032\x03\x9e\x02\xe0\x01\x1f\x01r\x00\xd1\xff$\xffr\xfe\xdd\xfdd\xfd\xf5\xfc\x9e\xfcJ\xfc\x00\xfc\xcd\xfb\xa5\xfb\x8e\xfbv\xfbY\xfbQ\xfbb\xfb}\xfb\x89\xfb\x93\xfb\xad\xfb\xdf\xfb\x0e\xfc*\xfcL\xfct\xfc\xa4\xfc\xd0\xfc\xfa\xfc#\xfdV\xfd\x89\xfd\xb2\xfd\xd8\xfd\xff\xfd+\xfeY\xfe\x7f\xfe\x9f\xfe\xcb\xfe\xf6\xfe&\xffR\xfft\xff\x9c\xff\xbf\xff\xe1\xff\x01\x00+\x00W\x00t\x00\x92\x00\xb0\x00\xd7\x00\xfc\x00\x1d\x01F\x01n\x01\x97\x01\xbd\x01\xe1\x01\x0b\x02,\x02J\x02i\x02\x8d\x02\x9e\x02\xa4\x02\xa6\x02\xa9\x02\xa4\x02\x92\x02w\x02h\x02U\x024\x02\x16\x02\xf5\x01\xd1\x01\xa6\x01w\x01H\x01\x0b\x01\xd9\x00\xb4\x00z\x00*\x00\xe9\xff\xc6\xff\xb1\xffr\xff$\xff\x1b\xff5\xff2\xff\xf9\xfe\xf0\xfeQ\xff\xb2\xff\xba\xff\xa0\xff\xc7\xff\x13\x00!\x00\xfb\xff\xef\xff\x00\x00\x0b\x00\xd2\xff|\xffP\xffB\xff\'\xff\xd9\xfe\x8a\xfex\xfey\xfed\xfe:\xfe&\xfe9\xfeO\xfeN\xfeD\xfeN\xfef\xfe\x8f\xfe\xb8\xfe\xd7\xfe\xfc\xfe+\xffo\xff\xa5\xff\xcc\xff\xed\xff\x1f\x00O\x00d\x00s\x00~\x00\x92\x00\xa2\x00\x9c\x00\x89\x00\x86\x00\x80\x00o\x00N\x006\x002\x00\x1a\x00\xfc\xff\xde\xff\xcb\xff\xb8\xff\x9d\xff\x83\xffu\xffx\xffu\xffn\xffk\xffr\xff\x84\xff\x86\xff\x8b\xff\x89\xff\x93\xff\x9e\xff\x9d\xff\x99\xff\x9a\xff\x9e\xff\xa1\xff\xa2\xff\xa7\xff\xb7\xff\xcc\xff\xd7\xff\xdf\xff\xed\xff\x07\x00.\x00T\x00s\x00\x9a\x00\xbf\x00\xde\x00\xf7\x00\x15\x01:\x01W\x01k\x01t\x01\x83\x01\x85\x01u\x01[\x01H\x01=\x01(\x01\x12\x01\xfb\x00\xe2\x00\xc2\x00\xaa\x00\xa9\x00\xc6\x00\xee\x00\x18\x01@\x01s\x01\xb6\x01\xff\x01I\x02\x9a\x02\x00\x03l\x03\xb5\x03\xd9\x03\xf5\x03\x1b\x04;\x04/\x04\x05\x04\xd9\x03\xa2\x03]\x03\xf2\x02|\x02\x12\x02\xa2\x01$\x01\x96\x00\x07\x00\x82\xff\xfd\xfem\xfe\xe1\xfd\\\xfd\xd9\xfcZ\xfc\xdd\xfbw\xfb\x1f\xfb\xc8\xfa\x7f\xfaH\xfa.\xfa\x1f\xfa\x1d\xfa0\xfaK\xfak\xfa\x92\xfa\xbf\xfa\xfc\xfa8\xfbs\xfb\xb5\xfb\xfa\xfbD\xfc\x8c\xfc\xd7\xfc%\xfdp\xfd\xc1\xfd\x0f\xfeZ\xfe\xa5\xfe\xed\xfe>\xff\x90\xff\xe4\xff,\x00|\x00\xca\x00\x0e\x01N\x01\x89\x01\xc7\x01\x04\x02@\x02n\x02\x8f\x02\xb3\x02\xd7\x02\xee\x02\xf7\x02\xfd\x02\x01\x03\x02\x03\xf6\x02\xe6\x02\xd7\x02\xc7\x02\xaf\x02\x96\x02x\x02[\x02C\x02$\x02\x03\x02\xe2\x01\xc7\x01\xab\x01\x86\x01c\x01H\x016\x01\x1f\x01\x07\x01\xf7\x00\xf1\x00\xe8\x00\xd5\x00\xc0\x00\xb3\x00\xa9\x00\x91\x00q\x00W\x00<\x00\x1f\x00\xf8\xff\xd3\xff\xb3\xff\x93\xffm\xffJ\xff.\xff\x13\xff\xf6\xfe\xd8\xfe\xbd\xfe\x9c\xfe\x7f\xfec\xfeJ\xfe5\xfe\x1d\xfe\r\xfe\x01\xfe\xfc\xfd\xf2\xfd\xf2\xfd\xff\xfd\x17\xfe4\xfeO\xfew\xfe\xa1\xfe\xca\xfe\xf1\xfe\x16\xffE\xfft\xff\x9f\xff\xc2\xff\xe4\xff\x0b\x007\x00\\\x00z\x00\x93\x00\xaf\x00\xc8\x00\xd4\x00\xda\x00\xe3\x00\xeb\x00\xec\x00\xde\x00\xd9\x00\xd5\x00\xcf\x00\xc2\x00\xaf\x00\x99\x00\x88\x00~\x00m\x00]\x00L\x00>\x003\x00&\x00\x1a\x00\x0b\x00\x07\x00\x06\x00\x04\x00\x01\x00\xfc\xff\x01\x00\x00\x00\xff\xff\x02\x00\x06\x00\r\x00\x15\x00\x17\x00\x1b\x00)\x00,\x000\x00/\x001\x00>\x00K\x00K\x00>\x00B\x00G\x00E\x00>\x004\x00-\x00-\x00+\x00\x1b\x00\r\x00\x02\x00\xfa\xff\xee\xff\xdc\xff\xd1\xff\xc3\xff\xb6\xff\xa9\xff\xa9\xff\xb6\xff\xc6\xff\xd7\xff\xea\xff\r\x009\x00b\x00\x95\x00\xd7\x00&\x01m\x01\xaa\x01\xe8\x01(\x02^\x02\x86\x02\xaf\x02\xcf\x02\xdf\x02\xda\x02\xca\x02\xbc\x02\x9e\x02d\x02"\x02\xe0\x01\x98\x01@\x01\xd6\x00i\x00\xfe\xff\x9b\xff0\xff\xbb\xfeQ\xfe\xee\xfd\x9f\xfdM\xfd\xfa\xfc\xb6\xfc\x86\xfce\xfcH\xfc0\xfc+\xfc9\xfcK\xfc`\xfc|\xfc\xa6\xfc\xd2\xfc\xfd\xfc.\xfda\xfd\x9a\xfd\xcb\xfd\xfb\xfd-\xfe\\\xfe\x95\xfe\xc2\xfe\xeb\xfe\x14\xff9\xffh\xff\x8c\xff\xb5\xff\xd3\xff\xf4\xff\x1f\x00D\x00d\x00\x87\x00\xb2\x00\xdf\x00\n\x01+\x01I\x01m\x01\x8f\x01\x9e\x01\xa7\x01\xb5\x01\xc2\x01\xc4\x01\xb6\x01\xb1\x01\xae\x01\xa6\x01\x99\x01\x86\x01p\x01\\\x01P\x01;\x01 \x01\x12\x01\x0c\x01\x04\x01\xf7\x00\xe8\x00\xdb\x00\xd7\x00\xcc\x00\xba\x00\xa3\x00\x8c\x00\x85\x00p\x00E\x00\x16\x00\xef\xff\xc1\xff\x8b\xff[\xff5\xff\x12\xff\xe5\xfe\xb6\xfe\x86\xfe`\xfeB\xfe4\xfe\x1c\xfe\x0f\xfe\x0c\xfe\x0f\xfe\x1d\xfe$\xfe.\xfe>\xfeN\xfei\xfe\x83\xfe\xa7\xfe\xd2\xfe\xf1\xfe\x1c\xffP\xff\x93\xff\xc5\xff\xf6\xff(\x00_\x00\x90\x00\xb4\x00\xee\x00(\x01J\x01[\x01m\x01\x80\x01u\x01z\x01\x8b\x01\x97\x01\xaa\x01\xc6\x01\xb8\x01\x86\x01J\x01/\x01%\x01\x14\x01\x0f\x01\xf3\x00\xc9\x00\xae\x00\x8e\x00N\x00\x0e\x00\xe2\xff\xdf\xff\xf9\xff\x06\x00\xf2\xff\xd8\xff\xca\xff\xbc\xff\xc8\xff\xd4\xff\xda\xff\xd5\xff\xc3\xff\xa4\xffb\xff"\xff\x10\xff\x19\xff\x1d\xff&\xffA\xff\x81\xff\xde\xff\\\x00\xf7\x00\xb4\x01\x90\x02`\x03\x17\x04\xc4\x04i\x05\xfb\x05a\x06\x89\x06\x95\x06\x8e\x06d\x06\x0b\x06\x96\x05\x12\x05y\x04\xca\x03\x14\x03d\x02\xbb\x01\x1a\x01k\x00\xaa\xff\xeb\xfe6\xfe~\xfd\xbb\xfc\x01\xfcb\xfb\xda\xfa\\\xfa\xf2\xf9\xbb\xf9\xb6\xf9\xc7\xf9\xf1\xf9E\xfa\xc1\xfaC\xfb\xca\xfbV\xfc\xe1\xfc]\xfd\xca\xfd"\xfeX\xfep\xfe}\xfe\x8d\xfe\x86\xfen\xfeN\xfe3\xfe \xfe\x04\xfe\xeb\xfd\xdf\xfd\xd6\xfd\xce\xfd\xca\xfd\xc9\xfd\xc4\xfd\xd3\xfd\xd8\xfd\xd3\xfd\xe4\xfd\xfa\xfd\x1b\xfe@\xfey\xfe\xc1\xfe\x12\xffl\xff\xcd\xff1\x00\x90\x00\xf1\x00N\x01\xa6\x01\xef\x01#\x02D\x02l\x02\x84\x02\x84\x02{\x02}\x02\x85\x02\x86\x02\x85\x02\x9b\x02\xda\x02\x1b\x039\x03M\x03\x82\x03\xbc\x03\xd4\x03\xc8\x03\xaf\x03p\x03\x1b\x03\xc9\x02~\x02\x03\x02e\x01\xcf\x00@\x00\xb3\xff&\xff\xb4\xfeO\xfe\xf9\xfd\xaa\xfdA\xfd\xe9\xfc\xb3\xfc\x98\xfcV\xfc\x03\xfc\xe9\xfb\xe4\xfb\xb7\xfb\x85\xfb\x8c\xfb\xd3\xfb\xf7\xfb!\xfc\x8f\xfc\xff\xfc3\xfdI\xfd\x8b\xfd\xe0\xfd\xec\xfd\xe9\xfd;\xfez\xfe]\xfer\xfe\xc2\xfe\xe1\xfe\t\xffX\xff~\xff\x81\xff\x9f\xff\xba\xff\x9a\xff\xc4\xff"\x009\x00,\x00\x88\x00\x07\x01\x15\x01\r\x01b\x01\xea\x01*\x02A\x02\x8b\x02\xe4\x02\x13\x03a\x03\xab\x03\x9b\x03m\x03I\x03I\x03L\x03B\x03^\x03h\x03\xdd\x02j\x02\x8e\x02\xb7\x02\x87\x02P\x02\xa0\x02P\x03\xfd\x03\xcf\x04"\x06\xdb\x07z\t\x90\n&\x0b\xed\x0b\xec\x0c}\r#\r\x81\x0c\x0c\x0cx\x0bC\n\xef\x08\x0b\x08B\x07\xe5\x05\xfb\x03\r\x026\x00R\xfe\\\xfcD\xfa\xfb\xf7\xde\xf57\xf4\xf0\xf2\xca\xf1%\xf1#\xf14\xf1\x11\xf1*\xf1\xe4\xf1\xc9\xf2\x89\xf3K\xf48\xf5\x0f\xf6\x1b\xf7\xb2\xf86\xfal\xfb\xdf\xfc\xc4\xfe:\x00\xe3\x00\xb8\x01\x07\x03\xd0\x03}\x03\xff\x02\x0f\x03\x13\x03s\x02\xc9\x01\x89\x01Y\x01\xb4\x00\xd7\xffG\xff\xe2\xfe6\xfeR\xfdx\xfc\xbd\xfb\x06\xfb\x8b\xfas\xfa\x96\xfa\xe7\xfaQ\xfb\xee\xfb\xb4\xfc\x9a\xfd\xb6\xfe\xc6\xff\xa3\x00F\x01\xe9\x01\xa2\x02r\x03_\x04*\x05\xaf\x05"\x06\x9a\x06\x0b\x07B\x079\x07\x1d\x07\xc1\x060\x06z\x05\xc1\x04;\x04\xbd\x033\x03\x84\x028\x02\xb9\x02\\\x03R\x03\xde\x02\x97\x02\x80\x02\xce\x01\xfd\x00\x8b\x00\x07\x00\x04\xff\xf4\xfdx\xfdT\xfd\x04\xfd\xc6\xfc\x9c\xfc*\xfc\xaa\xfb\x81\xfb\xac\xfb\x9a\xfb_\xfbs\xfb\x9d\xfb\x83\xfbY\xfb\xb7\xfbt\xfc\xd3\xfc\xa0\xfcl\xfc\x85\xfc\xb2\xfc\x8e\xfcm\xfc\x87\xfc\xbb\xfc\xb1\xfc\x97\xfc\x0f\xfd\xd0\xfdM\xfe;\xfe\x0e\xfe\xdc\xfdz\xfd\x14\xfd\xf0\xfc\xf3\xfc\xd2\xfc\x86\xfcV\xfcF\xfch\xfc\xb4\xfc\n\xfd9\xfd\x11\xfd\x1b\xfd5\xfd\x19\xfd\xd0\xfc]\xfd\x1a\xff\xb5\x00-\x01\x82\x01\xb0\x02\xe9\x03\xd9\x03h\x03\n\x04\xf9\x04\xe0\x04u\x04]\x05\x1b\x07\xfd\x07\x8d\x07\x03\x07\t\x073\x07\xcc\x07\xbb\t8\rc\x11\xff\x13y\x14|\x14Z\x15\x00\x16\x81\x14\xdb\x12C\x13\xd4\x13\xbd\x11\x00\x0f>\x0f\xf1\x0f\x93\x0c\x96\x06\xa0\x02=\x00\xce\xfb\x96\xf6X\xf4\x9a\xf3#\xf1\xc1\xed\xbe\xeca\xed\xae\xec\xed\xea\x8b\xe9\xe9\xe8\x8a\xe8\xe0\xe8\x1a\xea.\xecd\xef\xf2\xf2\x94\xf5\xc7\xf7\xe4\xfa,\xfe}\xff\xb5\xff8\x01s\x03n\x04\xd0\x04\xb1\x06\xed\x08\x1b\t\xee\x07C\x07\xbf\x06\xbd\x04\xd0\x01\x90\xff\x15\xfej\xfc\x94\xfaY\xf9\xa0\xf8\xce\xf7\xa1\xf6\\\xf5O\xf4\x82\xf3\x1a\xf3Q\xf3\xeb\xf3,\xf5\x07\xf7F\xf9W\xfb\xfc\xfc\xe5\xfe\xca\x00k\x02\x99\x03\x08\x05\xfe\x06\xea\x08Q\n\xe2\x0bY\ro\x0eJ\x0eK\x0e\x93\x0e\x19\x0e\xfb\x0c.\x0c\x10\x0b\x8b\tq\t\x8d\x0c\xc9\x0e\x87\x0c\x8c\x08\x83\x06\x80\x05n\x02\x17\x00\xc7\xff9\xffQ\xfc\x1e\xfa\xbb\xfa\xc8\xfb^\xfa7\xf7d\xf4\xad\xf2\x16\xf2\xfe\xf2\xd6\xf4[\xf6\xbf\xf6\xab\xf6+\xf7\xd4\xf7\xb0\xf8:\xf9\\\xf9\\\xf9\xc9\xf9\x93\xfb\x00\xfe\xbd\xff\xad\x00N\x00h\xff\xad\xfe)\xffm\x00\x9e\x00/\x00\xdd\xff\x95\xff.\xff\xea\xfeo\xff\x1e\xff\x84\xfd\x0f\xfc\xab\xfb\xd2\xfbT\xfb2\xfbS\xfbr\xfb\x93\xfa1\xfa$\xfb\xb2\xfc6\xfd&\xfc%\xfc\x1d\xfd\xc3\xfe\xb1\xff\xee\x00]\x03\xe8\x04\xb6\x05\x86\x06\xd1\x07d\x08\x98\x08K\t/\n=\ng\x0b5\x11\xbb\x19\xdf\x1d\x92\x1a\x96\x15\x9d\x15\xe5\x17\x1c\x17\xd1\x15|\x19^\x1d\xf9\x19!\x13\xba\x12\xe0\x15\xde\x10\xc8\x05\x18\xff\xf8\xfe\x03\xfd\xb4\xf8\x15\xf9b\xfbn\xf7V\xee>\xe9\x02\xe9\xec\xe7\x8c\xe5\xed\xe45\xe6\x86\xe7m\xe9\x86\xec\xc8\xee1\xefK\xef\x06\xef\x1e\xf0\x86\xf3v\xf9\x13\xfe+\x00)\x022\x04O\x05\x1a\x05\xc6\x046\x05R\x05\x04\x05\xf1\x04\xfd\x05\xaf\x07\xc7\x06"\x03I\xff9\xfdu\xfbD\xf8\xb7\xf6\xea\xf6X\xf6X\xf4\x8a\xf3\xb5\xf4\xdd\xf4\x14\xf3\x80\xf1\x8e\xf1G\xf2\xaa\xf3\xbe\xf6\xe7\xf9T\xfc_\xfdC\xffn\x00\xde\x01\x8a\x03\xb4\x054\x07\x9f\x08\x9a\n\xce\x0cZ\x0eX\x0fl\x0fN\x0e%\x0eX\x0e\xfc\x0eN\x0e\x1d\x0eG\r\n\r\xfc\r!\x0f\x89\r1\t\xf9\x05K\x03\x0e\x01Y\xffe\xff\xaa\xfe\xeb\xfb$\xf9?\xf7(\xf6N\xf4z\xf2\xe2\xf0\t\xf0}\xf1\xab\xf2\xa9\xf3e\xf4\xd7\xf4\xc6\xf4\xa9\xf3\xe3\xf4\xa5\xf6\x8a\xf8}\xf9i\xfa\x16\xfc\x0e\xfdW\xfe\xde\xfe\xaf\xff@\x00L\x00\xcc\x00\x06\x02\xf0\x03~\x05F\x05\xcb\x03!\x03\x19\x02E\x01\xed\x01b\x03\xb0\x01\xb7\xff\xa4\xff\x92\xfeS\xfdA\xfe\x15\xfeB\xfb\x1b\xfb\xbb\xfc\x83\xfd\xdd\xfc\x06\x00O\x02\x95\xff\x02\xff\xc8\x03F\x05M\x04?\x05\xe6\x07\xc3\x07\xde\x06l\x0bI\x0c[\n\x0e\n+\x0c\r\x0b\x1f\n\x18\x0bi\n\r\n\xb4\t\xf1\n\x8b\t\t\nJ\x0bE\x0cn\x0b\xa4\t\x1b\n\xe1\to\t\x0c\tw\x08\xcc\x08\x1f\x08X\x06i\x05^\x04l\x02\xec\xffI\xfe\x9f\xfdN\xfcg\xfb\xdd\xfa\xa3\xf9\x84\xf8w\xf7\x85\xf6\x15\xf5\xd3\xf4\x8f\xf4N\xf4\xcc\xf4V\xf5M\xf6f\xf5\xce\xf5\xa3\xf6R\xf7\xb6\xf7^\xf8\xdc\xf9\xf1\xfa\xd1\xfbY\xfc~\xfd\x15\xfe4\xfe@\xfea\xfe\x83\xfe\x91\xfe\xd8\xfe\x03\xff!\xffP\xfe\xa4\xfd\xee\xfcg\xfc4\xfc;\xfb\xbc\xfa\xad\xfa#\xfa\x87\xfa\xf6\xf9\x14\xfa\xfc\xf9\xbe\xf9\x1c\xfaj\xfa\x8c\xfb\x18\xfc\xb1\xfc\xae\xfd\x13\xfeR\x00\x9d\x01{\x03@\x06\x98\x06\xff\x07Z\x075\t2\tw\n\xf0\n\x04\n\x87\n\xa7\x078\x08\x13\x06\xf9\x04\xfd\x02\x06\x01\xb4\xff\x8a\xfe\xbe\xfe\x05\xfd\xb7\xfd\xb6\xfbI\xfa\x87\xf9\xb0\xf9\xe2\xf9\xbb\xf9P\xfa\xb7\xfa\xe4\xf9\xf7\xfa\xfe\xfaP\xfc\xdc\xfc\xd0\xfb\x01\xfe\xfc\xfc\xe0\xfc\n\x00\xde\x00\x1b\xfej\xff\xd6\x01\x92\xff\xeb\x00?\x02:\x02\x12\xff\xf0\x00\xcc\x03\x8d\xff\xd2\xff\x06\x04\x80\x03\x0c\xfd\xcd\xff~\x01D\xffk\x01\xef\x03\x1f\x00\x0b\xffe\x02\xfe\x01\x85\xffc\x02\xbc\x03\xdc\x00\xd7\x01\x84\x02\x97\x02\x92\x01\x0b\x04\xb4\x00\xd4\x00\xeb\x013\x01\xa1\x01f\x02\xf7\x01\x9b\xfe\x89\x00\xee\x00\xab\x00\xc5\x01\xd9\x01L\x00?\x02G\x00\x07\x02\xbd\x02:\x01\xff\x011\x01\x92\x03G\x01\x0c\x031\x04G\x01f\x03\xe5\x018\x02a\x02f\x00\xe4\x01,\x00\xe0\x00]\x00\xa2\xff\x04\xfe5\xffC\xff\x1a\xfd\xfa\xfd\xfd\xfcc\xfe0\xfd\xa5\xfd\r\xfe,\xfd\xa1\xfd\x86\xfe\x92\xfe\xd0\xfen\x00.\x00j\xffr\x00\xf8\x01j\x00[\x02\x85\x02\x9d\x01\xbd\x02~\x03\xea\x01\xe5\x01\x99\x03 \x01$\x01S\x02\x96\x01\x9a\x00&\x00\xeb\x00D\x00\xed\xfe\x99\xff\x08\xfe\xf4\xfe"\xfe\xf8\xfd\xe5\xfdn\xfdh\xfd\xe2\xfc\xdf\xfe\x8f\xfbY\xfd\xc4\xfd\x85\xfb\xd7\xfc\x82\xfd\x96\xfc\xf1\xfcE\xfe\x1f\xfd(\xfd\x05\xfe\xce\xfd\xf1\xfdM\xfe\xd9\xfex\xff\x0e\xffZ\xff\x1e\xff\x12\x010\xff\x8a\xff\xda\x00 \x001\x00E\x00\xcd\x00\xcf\xff\x07\x01\xf8\x00\xf5\x00\xa4\x00\xc7\xff\x9c\x00\xe4\xff\xfc\xfe\xbc\x00A\x00k\xff=\xff\xa8\x00D\xffP\xff\xe5\xff:\xfe6\x00\xca\xff\xd9\xff\xa0\x00\xe7\x00X\x00%\x00\xad\x00\x10\x01{\x00\xba\x01\x9e\x01\xea\x00\xb6\x01\xaa\x01\xb3\x01\xa0\x00\x03\x01\t\x01\xc1\x00V\x00\xb0\x02\xb2\xff\xb5\xff\xfd\x01\xe0\xfe\xab\xff@\x00\xfa\xff\xd0\xfe\xd4\xff!\x00\xeb\xfe\xcb\xff\x9c\xff?\xff\xa8\xfe3\x00i\xffJ\xff\xf1\xff\x8d\xff\x88\xff{\xff\xa0\x00>\xff-\x00\x0b\x00\x82\xff=\x00\xdf\x00X\x00\xbd\xffp\x00\x1b\x01[\x00\x9b\x01H\x01\xd3\x00\x86\x01\x82\x00\xca\x01q\x01v\x00\x10\x01\xca\x00\xa8\x00R\x01\xba\x00i\x01m\x00\xea\xff\x9e\x00f\x00\xef\x00L\xff\x9c\xff5\xff\xc2\xfe\x1b\x01\x19\xfe\x98\xff\xdb\xff\xd5\xfdR\xff\xd9\xfe6\xff&\xfe\x9f\x00\xd8\xfe\xdb\xfe \x00I\xff\xc6\xffo\xff\x83\x00\x07\x00@\x00\xc9\xff\xb3\x00\x97\x00\x87\x00\x89\x01\x9a\x00\xde\x00\x8c\xff/\x02\x92\x00L\x00\x17\x01H\x00\x98\x00x\xffi\x01\x10\x00\x91\xff\xdc\xff\xcc\xff\x9f\xff\x96\xff\x84\xff>\xff\x90\xfe\xd0\xff#\xff0\xff\x80\xff\x0c\xff;\xff\xc2\xfe\xd7\xffp\xfe\xc6\xff\xc2\xfe\xcb\xffF\xffn\xff\xf3\xff\xa7\xff|\xff]\xff\xc3\xffy\xff\x1a\x00\x99\xffq\x00\x7f\xff_\x00_\x00\xa7\xff\xff\xffu\x00\xce\xff\xdb\xff\xdc\xff\xbd\xff\x9c\xff~\xff\xb1\xff\x18\xff\xc4\xffX\xfe\x1f\xff%\xff\x17\xffM\xff^\xff\xda\xfeY\xff^\xff\x80\xff%\xff\x9e\xff\xe7\xff\x00\xffm\x009\x00+\x00X\x00\xda\x00\xb8\x00\xf1\x00\x9c\x00]\x01\xe1\x00\xa3\x01\x19\x01\xd8\x01P\x01\xfa\x00\\\x02d\x00\xb7\x01\x94\x00\x0c\x01g\x00 \x00\x13\x00Q\x00\xc6\xff}\xff\xca\xff\x0f\xff7\xff\x1b\xff#\xff\xd9\xfe\x04\xff\xdc\xfe?\xff\xa9\xfel\xff*\xff\x81\xff\xb3\xfex\x00\xc7\xfeS\xff#\x01X\xffe\x00)\x00-\x01\x05\x00W\x01D\x00<\x01\x9d\x00\xec\x002\x01\xbc\x00\x96\x01\xe2\x00n\x01\xb1\x00\xe4\x00\x8d\x00\x08\x01/\x00\xc6\x00\x89\x00_\x005\x00\x12\x00\x0f\x00\x8a\xff\xda\xffy\xff\x89\xff[\xffY\xff\xb6\xffq\xff\x16\xff\xa0\xff\x08\xffI\xffo\xff\xa0\xff\x85\xff\xad\xff\xc2\xff\x00\x00\x08\x00\x1d\x00\x92\x00`\x00\xb1\x00\xb5\x00?\x016\x01\xfb\x001\x01\x06\x01\x1e\x01;\x018\x01c\x01\xbb\x00\xd7\x00\xd7\x00s\x00\xed\xff3\x00\'\x00)\xff\xe2\xff1\xff\x17\xff\x0b\xff\xb4\xfe\xe9\xfe2\xfe\xc8\xfe\xb1\xfe$\xfe\xd3\xfe\x9f\xfe\xa9\xfe\xe9\xfe\xec\xfe\r\xffy\xffO\xffs\xff\xdb\xffM\x00N\x00j\x00\xa9\x00\xa1\x00B\x01\xa2\x00\xbd\x00_\x01\xeb\x00\x15\x01P\x01\xcf\x00\x9c\x00\xdd\x00\xdb\xffh\x00\xdc\xffr\xff\x06\x00\\\xff\x1b\xff\x93\xff\xc8\xfe`\xfe\x9e\xfeT\xfe\xbf\xfe\xf5\xfd\xd9\xfe\x84\xfe\n\xfe\x1d\xff\xc1\xfe\xae\xfe\xda\xfeQ\xff\xf2\xfej\xff\xb9\xff\x1a\x00e\x00\x0f\x00\x9a\x00\x10\x01\x85\x00\xeb\x00\'\x01\xe1\x00z\x010\x01\xa6\x01\xb9\x00\x8e\x01_\x01\xda\x00\xfc\x00\xc7\x00\xb2\x00O\x00\xcd\x00\'\x00<\x00P\x00d\xff\xd2\xff\xb8\xff&\xff`\xff\x1a\xff\xf8\xfe\x15\xff\x08\xff\xaa\xfe\xdf\xfe\xea\xfef\xffy\xfe%\xff\x92\xfea\xffo\xff\x92\xfe\x00\x00\x14\xffx\x00\xda\xffZ\x00[\x00\xac\xff\xf3\xff\x02\x00\x00\x01*\x00`\x00\xa5\x00\x9c\x00\x06\x00\x83\x00\xd8\xff\xbc\xff\x17\x00\n\x00R\x00W\xff\x99\x00\x88\xff\x88\xff\xa2\xff{\xff\xdb\xff\xdb\xffk\x00\x86\xff\x1b\x00\xd6\xff$\x00\xea\xff\xfc\xff\xfe\xff\xa1\x00\xa3\x00&\x00K\x01|\x00\x94\x00\x94\x00\xb4\x00\xd1\x00\xce\x00\x05\x01L\x01\xfa\x00\xa1\x00\xee\x00\xdf\x00h\x00\x8f\x00\xa6\x00\xc7\x00\xd3\x00\x80\xff\x92\x00\x16\x00\xd1\xff\xb7\xffA\x00\x0c\xff\x0c\xff\xb3\xff\x0e\xff\x81\xff\xc9\xfeN\xff\xc6\xfex\xff\xa4\xfez\xff\xf3\xfe\xf5\xfe\x8f\xff\xa2\xfe\xbc\xff+\xff\xdf\xff\xce\xff\xdc\xff\x16\x00\xb6\xff\xd2\xff\r\x014\x00\xe5\x00\xea\x00\xac\x00\xa5\x01\xb2\x00\x8a\x01m\x01\x04\x01\xa1\x01\xce\x01\xcf\x00\x83\x02\x10\x02e\x000\x01#\x01\xd5\x00\xa3\x00\x87\x00\x03\x00\x13\x00l\xff\'\xffX\xff\x9a\xfe\xa6\xfeg\xfe\x0f\xfek\xfe9\xfe{\xfeV\xfe\x8f\xfeT\xfe\xa2\xfe\x13\xff\xf1\xfe\x03\xff\x01\xff\xa1\xff\xed\xff\x02\x00}\x00Z\x00@\x00\xaa\x00\xcc\x00D\x01V\x01p\x01\xd7\x00\xb4\x01\x13\x01H\x01\xa4\x01\xa1\x00\xce\x00$\x01\x8a\x00\xa9\x00\n\x01\xae\xff\x10\xff8\x01\x18\xffF\xfe\xe9\x00\x91\xfe>\xff\xcf\xfeG\xff\xa8\xff\xe0\xfe\xa2\xfeu\xffo\xfeG\xff\x0b\x00\x1d\xff\xec\xffi\xff\xd5\xff\xaa\xff\xd6\xff*\x01\xde\xff\xcb\xff\'\x01\x0e\x00\xad\x00\xed\x00\x10\x01\x00\x00\xbf\x00\xf5\x00\xf9\xff@\x01\xc1\xff\xb8\x00\x00\x00\x7f\xffM\x00\x83\xff`\xff\xe4\xff=\xff\xb2\xfe\x1b\x00W\xfe\xc3\xfe~\xff\xcc\xfen\xfe\\\xff\x1b\xff\xa2\xfe\x03\xff\x15\xffJ\xff\xe3\xfe\xa7\x00\xa2\xfe\x00\x00\xcf\xff@\xff8\x00\x12\x00b\x00(\x00\x19\x01\xb5\xff~\x00\xde\x00\xf8\x00d\x00:\x01N\x00+\x01S\x01Q\x01\x12\x01\x9f\x00k\x01\x06\x00\xbd\x00#\x01<\x001\x00\x95\x00\xe7\xff\n\x01\x04\x00\x19\xff\xbc\xff\xb5\xff\x81\xff\xa4\xff5\x00P\xff&\x00\r\x00e\xfe\x98\x00M\xfe\xc8\xffl\x000\xffe\x005\xffL\x00u\xff6\x00O\xff\xc4\xff\x13\x00\x11\x00\xd8\xff\x8d\xff$\x00\xda\xff:\x00]\xfeK\x00\x1b\x00\xc3\xff\xcd\xff\xb4\xff\x17\xff+\xff_\x01\xaa\xffe\xff\xb0\xff\x1e\x00V\xff\xa4\x00\xc4\x00\x12\xffi\x00}\xffc\x00\xa9\xff(\xff\xe4\xff\x00\x00[\x00{\xffT\x00\x95\xfet\xff\xd6\xff<\x00\xb4\x00s\xff\x13\x01#\xfe\xce\x01R\xffZ\x00\xcd\x01E\xff\x05\x01\x83\xff\xc1\x01\xce\xff\xb8\x01\xc9\x008\xff\xed\x00\xc4\x00\xdb\xfe\xe1\x008\xff\x12\x01\xd8\x00t\xff\xa2\x01\xcc\xfch\x01y\x00S\xfe\r\x00\xa4\x00\xfa\xff\xe6\xff\x11\x01=\xfd\\\xfe\x8f\x00s\xff\x85\x00\xeb\x00\xcc\xff-\xff\x96\xff@\x00\xfc\xfe\xb5\x019\xfe\xa8\x02\xa8\x00\xc6\xff\xa0\x00\xb0\xfa\xd6\x01i\x01g\x01\xd6\x00M\xff\x18\xff>\xfe\xb2\x00\xfd\xfe2\x01\x13\x00\x0e\xfe\xbc\x00\x15\xfd\xcf\x034\xfe \xfa\x92\x04X\xfd\xaa\xfe\x89\x00=\x01\x8b\xfd\xb7\x00\x86\x01\xef\xfd\xce\x01*\xff\xb1\xff3\x02\x96\x00\x95\x01\x0e\x01\x90\xffL\x02\xba\x009\xffM\x01\xc3\x01d\x03%\xff\x92\xfe\x92\x04\x18\xfd#\x05i\x00\x8f\xfb\xb3\x03\xf9\xfbi\x05L\xfd\x01\x02\x95\x00*\xf9\xb4\x02u\x01(\xff\xd8\xfe\x9d\xfd\xfc\xffq\x02D\xfc\x0c\x00E\x01\x1a\x03e\xfbA\xffA\x02g\xfd\x92\x03\x8f\x00\x81\xfd\xae\x00\xdf\xfe=\xfec\x02\x81\x00\x95\xffr\xfc\xc7\xfe\xa4\x02t\xff\xde\xfd\x90\xfe\x0e\x01\x1c\xfdv\xfe\x9a\x04,\xfe\xdc\x00\x9e\xfb\xeb\x01\x83\xfe\x16\xff\\\x04\x88\xfb\xdc\x03\x1c\x00\x08\x00X\xfdf\x01\x1e\x00P\xfe\xef\xfeE\x05\xfd\xff@\xfe\t\x01\x96\xfc\xe9\x00\x1b\xfe~\x01g\xff-\xfe\xc1\x01\x08\x02\xde\xfc\x17\x03\xbf\xfe\x07\x00\x03\x01\xae\xfd\x1c\x02t\xfe]\x02\x19\x02\xd6\xff\xaa\xff\xb2\xfe\xf6\xff$\x01\x80\xff\x9b\xffS\xfc\x1c\xff\xad\x00\xa5\xfdL\x02\xb6\xffb\xfdZ\x02\x94\xfe\xc5\xfd\x7f\xfd\xaa\xf7\x9c\x00\xac\x10\x02\x03\x97\x00#\xfe\xf9\xfb\xa6\xffy\x02\x1f\x03N\xff\xd0\x06Y\xfd\x8e\x01\xda\x0bn\x047\xf5&\xf7\xff\xfa1\xfd\x8b\x03}\x00\xda\xfeR\xffC\xfc\t\xf8i\xfc\x7f\x02\xe5\xfb$\xfe\xb6\xffL\x03u\x03\xeb\xfe\xa4\x05\x9e\xfdP\x00\xae\x03 \x02\xd0\x03\xbb\x07\x14\x02\xa9\xff\xc4\x07a\x01\x84\xfb?\x02\x10\x06\x11\x00\xa8\xfd\xaa\x002\xfcM\xfc\xf3\x02\x08\xfds\xfb\xc8\xfa\xed\xfc\xb8\xfb\xf2\x03\x04\x00b\xf9\xf3\xfe\xf7\xf9\x8c\x00`\x00n\x00\x85\x01\xfb\x02\xf2\xfc0\xfeU\xfe\xca\xff\xc1\x04z\xfe\x1f\x01\x9a\xfd\xa5\xfdZ\xffB\x01\r\x01X\xfc\xa0\xff\x10\x00)\xfdU\x01\x8d\x00\xae\xff\xb3\x00\xec\xfe\xf0\xfe\xc7\x00$\x04"\x01\x16\x01\xfb\xffA\x01?\x03s\x02A\x01C\x01\xcc\x01\x89\x01]\xff\x15\x01\xe1\x03%\x01\x1e\xff|\xff\x88\x00\x14\xff\x1e\x01\x9c\x01\xea\xff\xbc\x00\xc2\x00\xdb\xfe\x90\xff\xba\x01\x1a\x01\xe9\x00\x8e\xff\x9b\x00\xea\x01~\x01\xb9\x00\x82\xff\xe1\xffk\x00\xd1\x00\x93\x01\xcc\x01\x9e\x01Z\xff\\\x00v\x00i\x01\x9e\x00\xdb\xff \x01:\x00\xbd\x01w\x01\r\x00\xd4\xfe=\xfe\xeb\xfe \xfe\x14\xfe\x96\xff\xd4\xffS\x00I\xff\xac\xfb\x9b\xfb\x90\xfc\x1d\xfe<\xfd\xd0\xfe\xb9\x00\x11\x00,\xff\x03\xfdu\xfcr\xfb=\xfd\xa5\xfdg\x001\x03m\xfd\xe4\xfb\xf2\xff[\xff\x03\xf9]\xf9\xb4\xfa\x1d\xfd6\xfe[\xfc\x0e\xfe\xcb\xfc\xa7\xf9\xcc\xf6}\xf9\xff\xfc\xa6\xf9\xa0\xf9\xf6\xfbu\xfe\xfd\x00\x89\xfe*\xfbY\xf8\xb0\xfa\xdd\xfd\xb5\xff\xf2\xffl\x00.\xffp\xfc\xbc\xf9\xa0\xf9\x8b\x00\xfa\x07\xfa\x15<\x17\x96\x10\xd9\x0c\x9f\x0f \x18\xc4\x17\xa7\x17\xd7\x1c\xcf!\xef \x92\x1e\xc4\x1f\x19\x1e\x92\x13\\\ti\x07$\x0c\x03\x0c\xdd\x07\x86\x03\xb1\xfd\xc0\xf7\xa3\xf0\x9c\xec\xb4\xeb\xe4\xe8~\xe6 \xe5\n\xe7)\xea\x82\xe9\xac\xe61\xe5\xc9\xe6\xc2\xe8Q\xec:\xf3\xae\xfa~\xfd\xf3\xfb\x8d\xfc*\x00\xf4\x02\xf8\x03]\x04\x05\x08v\n+\x0bd\x0b\xdc\x0b\x0e\x0bj\x04e\xff=\xfd\xac\xfd\xe5\xfe\x83\xfdx\xfbI\xf9\x86\xf3\xf4\xee7\xee\xee\xeeM\xf1\xa4\xf0{\xf0\x86\xf2\xc9\xf4`\xf6q\xf7u\xfa\xc1\xfcK\xff\x04\x023\x06\xd9\x0b\x14\x0e \x0f \x0f\x06\x10\xe3\x11\xfe\x12\x19\x13\xcd\x12\xb9\x12\x92\x10\xa8\x0eG\r\x83\x0b\x90\x08j\x04\x14\x01\x00\x00\xe2\xfe\xa6\xfcg\xfa\xf5\xf7\x92\xf4\x99\xf1\xc2\xf0w\xf08\xf1=\xf2m\xf1\xee\xef\xa2\xef\x82\xef\x81\xf0]\xf2\xfe\xf4\xdb\xf6\t\xf7\x8a\xf8\xab\xf9\xc8\xf9\xae\xfa\x04\xfb\xaf\xfb \xfd\xb1\xfd\xb2\xfe\x88\xfeb\xfe4\xfc9\xf9D\xf8d\xf7\xcc\xf6\x17\xf7\xed\xf8\xf1\xf7\x1b\xf6i\xf5\xc0\xf5\xda\xf4\xd4\xf1\xe7\xf7\xa0\x0c+$"-\x15%\x15\x1b\xe1\x1a\xc4\x1fM%F1DDWN\xa5E\xce5%-_*\xaa \xa4\x14&\x14o\x1a\'\x1bh\x10\xf4\x05O\xfd\x94\xeeI\xdc\x18\xd1\x04\xd4`\xdcy\xe1\x05\xe0i\xdc\x15\xd9b\xd2P\xccn\xcd\x9e\xd7Z\xe4\x04\xed\xa1\xf5\x8f\xfe>\x03\n\xffO\xf9_\xfc\xf8\x05\xf7\x0e\x82\x14#\x19\xac\x1cT\x1a\x8b\x11\x14\n\x0c\x07\xe8\x05\xd8\x02\xc4\xff\x18\x00\xc6\x00\xdc\xfb{\xf2\x19\xea{\xe4\x8f\xdf)\xddJ\xe0\x16\xe8@\xed\xa7\xeb\x99\xe8E\xe6x\xe6\xd9\xe7\xd0\xed8\xf83\x02\xb8\x07$\t9\n\xeb\n\x9c\x0b7\r\x02\x11>\x18\x91\x1d\xfc\x1f\xe5!\xd0 \xdd\x1a\xd3\x11\xed\x0e\x9e\x16\xfd\x1e\xf7\x1e\xad\x19\x9d\x12>\nz\x00\x8c\xfc\x85\x00+\x05d\x03\xfa\xfc\xc4\xf7\x9d\xf2\\\xec\x08\xe9W\xea\xaf\xec\xa7\xec\x16\xebF\xec\xdd\xedx\xed\xea\xeaW\xe8\xbb\xe7\x0c\xea^\xef\x13\xf5\xf0\xf8\xa1\xf8s\xf5-\xf3V\xf2f\xf5\xb5\xfb0\x00\xdd\x02\xd0\x02\x12\x01\xcd\xfdg\xfb\x96\xfb\x93\xfer\x01\r\x02\xea\x02\x9b\x01)\x00\xd3\xfeq\xfe@\xfe\xe0\xfc\x98\xfc\xf0\xfc\x05\x00)\x05\x9d\x0c\xb0\x11\xa6\x10A\x0e}\x0c\xcd\r|\x12\xa2\x1al&\x7f-;,P&4!\x94\x1e~\x1dA \xb8%\xe1(\xe8$L\x1c\xb7\x14\xab\r9\x06S\x00\x9f\xfe\xa3\xff\x98\xfd\x82\xf8\xaf\xf3$\xee|\xe6p\xde"\xdc\x13\xe0\xa7\xe4N\xe6o\xe6:\xe6\x00\xe3\xbb\xde&\xde{\xe4\xff\xec\x97\xf2\xb8\xf5K\xf7\xc9\xf6\xd6\xf4\x80\xf4*\xf8\x9d\xfd\xca\x014\x04I\x05$\x04Z\x01\xca\xfeu\xfe\xce\xff\x82\x01\xfe\x02W\x03\xf0\x01]\xff\xd0\xfc7\xfb\xdf\xf9D\xfab\xfb\xc8\xfc\x0c\xfd\x85\xfc\xef\xfb)\xfb\xcc\xfaB\xfb\x94\xfdM\x00w\x02\n\x03\x02\x03g\x03X\x04\x9a\x06r\tK\x0c,\r\'\x0c,\nP\t.\x0b\x84\r\xc1\x0fx\x10<\x0e\xa3\n\xea\x05\x16\x04\x87\x04\xd0\x04{\x05\xd8\x04\xe6\x02\xb3\xfdf\xf9\xd3\xf7\x8f\xf7c\xf7\x15\xf82\xfau\xf9\xc3\xf5E\xf3\x9f\xf3\xa1\xf4\xcb\xf4\x03\xf60\xf8\x90\xf8\x19\xf7-\xf6\x88\xf6.\xf7\x85\xf7\x04\xf9\x02\xfb~\xfb\xb4\xfa\x02\xfat\xfa\xff\xfa\xb0\xfb\xee\xfc\x83\xfe\x08\xff\xd5\xfe\xba\xfe#\xffQ\x007\x01+\x02\xe3\x02]\x03t\x04U\x05D\x06\x92\x07\x86\x08\xcd\x08\xd6\x07\xbf\x07c\t\xb9\np\x0bU\x0b\xdc\nn\n&\tK\x086\x08\xba\x08\xa0\x08\x05\x08\x04\x07\xce\x05\x1f\x05?\x04X\x04\xdb\x04i\x05\x84\x05\xaa\x04(\x04\x13\x04P\x04\xad\x04\xb7\x05\xb1\x06\xb5\x06\xf2\x05\xe0\x04\xc8\x04\xdd\x04\xa6\x04\xd6\x04\xab\x04\xd0\x03]\x02\xc5\x00\xf4\xff\x08\xff\xd6\xfd\x96\xfc\x85\xfbe\xfa\xb8\xf8\xfb\xf6\xb7\xf5;\xf5\xe0\xf4w\xf4!\xf4\xf8\xf3\x14\xf4K\xf4\x9a\xf4i\xf5\x99\xf6\xb5\xf7\xc3\xf8\xe9\xf9m\xfb\xc7\xfc\xb9\xfd\xbd\xfe\x14\x00X\x01!\x02\x98\x02\x17\x03\x9a\x03\xe6\x03\xf8\x03$\x04D\x04\xe8\x03U\x03\xed\x02m\x02\xcf\x01L\x01\x16\x01\x8c\x00\xbc\xff1\xff\xdd\xfe?\xfe\x98\xfd~\xfd\xa1\xfd{\xfdG\xfdO\xfdS\xfd8\xfdC\xfd\xaf\xfd!\xfem\xfe\x94\xfe\xa1\xfe\xb2\xfe\xce\xfe\x04\xff.\xffB\xff7\xff\x06\xff\xd1\xfe\xc4\xfe\xb5\xfe\xa1\xfee\xfe\x19\xfe\xbc\xfdw\xfd\x91\xfd\xfd\xfd\x1f\xfe\xee\xfd\xe2\xfd\xf4\xfd\xee\xfd\x18\xfe\x9e\xfe+\xffK\xff%\xffS\xff\xa6\xff\xe8\xff+\x00\x87\x00\xca\x00\xda\x00\xfd\x00O\x01\xb6\x01\xee\x01\xe7\x01\xf3\x01!\x02K\x02[\x02i\x02}\x02q\x02I\x02\x0b\x02\x04\x02\xfd\x01\xe2\x01\xca\x01\x98\x01|\x01l\x01T\x01S\x01n\x01\xa4\x01\xa0\x01\x98\x01\x96\x01\xc5\x01\xf9\x01*\x02e\x02\x83\x02\x80\x02G\x021\x020\x02(\x02\x05\x02\xbc\x01~\x01@\x01\xed\x00\x91\x00B\x00\xe3\xffv\xff\xe5\xfem\xfe1\xfe\x13\xfe\xe3\xfd\x8c\xfd&\xfd\xc8\xfc\xa9\xfc\xbe\xfc\xfc\xfc=\xfdr\xfd\x90\xfd\x97\xfd\xaa\xfd\xf7\xfd\x82\xfe*\xff\xc3\xff:\x00\x99\x00\xe6\x004\x01\x98\x01\x18\x02\x8e\x02\xe7\x02.\x03n\x03\x87\x03z\x03B\x03\x10\x03\xd7\x02\xb7\x02\x8c\x02T\x02%\x02\xdb\x01Y\x01\xa4\x00\xfe\xff\xa3\xff\x80\xffn\xffF\xff\xe3\xfe_\xfe\xe1\xfd\x93\xfdw\xfd\x83\xfd\x8b\xfd{\xfd\x82\xfd\x8d\xfd\x9d\xfd\xaa\xfd\xb8\xfd\xd1\xfd\xfb\xfd.\xfe\x94\xfe\x17\xffa\xffh\xffA\xff$\xffX\xff\xbc\xff\x1c\x006\x00\t\x00\xc0\xff\x9c\xffv\xff`\xffY\xff4\xff\xfb\xfe\xcb\xfe\x98\xfeg\xfeQ\xfeF\xfe\x03\xfe\xaf\xfd\xc4\xfd\x15\xfe[\xfe\x83\xfe\x8c\xfe\xab\xfe\xae\xfe\xb7\xfe\x11\xff\xaf\xffG\x00\xad\x00\xd3\x00\xf3\x00C\x01\x89\x01\xce\x01A\x02\x90\x02\xa9\x02\xa4\x02\xc6\x02\xf6\x02\n\x03\x00\x03\xd8\x02\xc2\x02\xc3\x02\xc1\x02\xc7\x02\xbf\x02\x88\x02K\x02(\x02\x01\x02\x05\x02\xed\x01\xa9\x01h\x018\x01\x1c\x01\xf4\x00\x90\x00>\x00\xf5\xff\x93\xff*\xff\xf4\xfe\xeb\xfe\xc7\xfe\x89\xfe&\xfe\xed\xfd\xc0\xfd\x8c\xfd\x83\xfd\x9b\xfd\xcc\xfd\xdb\xfd\xbb\xfd\xd1\xfd\x0f\xfe+\xfe7\xfeJ\xfe\x92\xfe\xf3\xfeL\xff\x90\xff\xe5\xff\x15\x00/\x00U\x00\xa2\x00\x13\x01w\x01\xc2\x01\xe8\x01\xf8\x01\x03\x02\x16\x02:\x02V\x02g\x02d\x02L\x02\'\x02\x00\x02\xd5\x01\xaf\x01w\x012\x01\x01\x01\xe9\x00\xcc\x00\xa9\x00}\x00L\x00\x12\x00\xd8\xff\xae\xff\xaa\xff\xa8\xff\x9e\xff\x7f\xffF\xff\x19\xff\x0f\xff\t\xff\xf2\xfe\xd3\xfe\xb5\xfe\x89\xfeV\xfe2\xfe:\xfe6\xfe\x11\xfe\xec\xfd\xe4\xfd\xdf\xfd\xc7\xfd\xbf\xfd\xda\xfd\xf3\xfd\xfb\xfd\x08\xfe;\xfeu\xfev\xfet\xfe\x92\xfe\xc4\xfe\xe8\xfe\x04\xff,\xffL\xffq\xffw\xffu\xff\x8b\xff\xaf\xff\xd4\xff\xfd\xff\x1e\x00O\x00w\x00\x92\x00\xaa\x00\xdc\x00\xf6\x00\xf0\x00\x00\x01F\x01\x92\x01\xba\x01\xd8\x01\xfc\x01\x0e\x02\x04\x02\x0e\x02F\x02o\x02j\x02Q\x02Q\x02\\\x02Q\x02=\x02"\x02\xef\x01\xa8\x01k\x01K\x01/\x01\x04\x01\xc7\x00\x81\x00/\x00\xe7\xff\xac\xff|\xffF\xff\x10\xff\xd3\xfe\x97\xfeX\xfe!\xfe\xed\xfd\xbd\xfd\x89\xfdT\xfd?\xfd/\xfd\x1f\xfd\x19\xfd\x03\xfd\xef\xfc\xe0\xfc\xe2\xfc\xf6\xfc!\xfd?\xfdb\xfd\x86\xfd\x99\xfd\xbb\xfd\xf1\xfd.\xfet\xfe\xb0\xfe\xfa\xfeA\xff\x85\xff\xc5\xff\x15\x00h\x00\xbd\x00\x1b\x01\x87\x01\xfb\x01^\x02\xb5\x02\x01\x03K\x03\x99\x03\xe5\x031\x04r\x04\x97\x04\xa1\x04\x97\x04\x84\x04{\x04`\x046\x04\xfb\x03\xa9\x03L\x03\xe7\x02v\x02\x05\x02\x93\x01\x1d\x01\xa0\x00!\x00\xa6\xff)\xff\xb0\xfe6\xfe\xc0\xfdY\xfd\x05\xfd\xca\xfc\x8f\xfcN\xfc\x13\xfc\xf1\xfb\xe0\xfb\xe4\xfb\xfd\xfb"\xfcA\xfcV\xfcw\xfc\xbe\xfc\x16\xfdk\xfd\xbb\xfd\x03\xfeJ\xfe\x90\xfe\xde\xfe;\xff\xa0\xff\x01\x00Q\x00\x94\x00\xcf\x00\r\x01A\x01c\x01\x87\x01\xb6\x01\xda\x01\xf7\x01\x0c\x02\x13\x02\x06\x02\xe6\x01\xc7\x01\xb8\x01\xbb\x01\xc7\x01\xc8\x01\xcb\x01\xb4\x01\x94\x01~\x01\x82\x01\x8f\x01\x90\x01\x8f\x01\x8b\x01\x86\x01\x85\x01\x91\x01\x9c\x01\x9f\x01\x92\x01z\x01q\x01g\x01_\x01Y\x01S\x015\x01\xfc\x00\xd4\x00\xab\x00|\x00H\x00\x14\x00\xe5\xff\xab\xff\x85\xffV\xff\x17\xff\xcf\xfe\x86\xfe@\xfe\x04\xfe\xde\xfd\xc4\xfd\x9c\xfdb\xfd"\xfd\xed\xfc\xc6\xfc\xb2\xfc\xb1\xfc\xc0\xfc\xc8\xfc\xd0\xfc\xe7\xfc\x04\xfd!\xfdG\xfd|\xfd\xcb\xfd\x1e\xfex\xfe\xdc\xfe,\xffn\xff\xb4\xff\t\x00q\x00\xd4\x00<\x01\x8a\x01\xc6\x01\xf4\x01\x1e\x02R\x02\x82\x02\xae\x02\xd0\x02\xe2\x02\xf1\x02\xf5\x02\xf9\x02\xed\x02\xd3\x02\xab\x02\x8a\x02v\x02m\x02S\x02&\x02\xee\x01\xb0\x01y\x01=\x01\x17\x01\xed\x00\xb5\x00p\x00*\x00\xf4\xff\xb9\xff\x7f\xffA\xff\t\xff\xcb\xfe\x8c\xfeW\xfe$\xfe\xf4\xfd\xc3\xfd\x9a\xfd\x81\xfdg\xfdI\xfd3\xfd\'\xfd\x1d\xfd!\xfd0\xfdN\xfdd\xfdq\xfd\x85\xfd\xa2\xfd\xc8\xfd\xf2\xfd\x1e\xfeS\xfe\x88\xfe\xb8\xfe\xe0\xfe\r\xff8\xffm\xff\x9f\xff\xd8\xff\x18\x00P\x00\x7f\x00\xa7\x00\xd2\x00\xfb\x00\x18\x014\x01^\x01\x90\x01\xbd\x01\xd3\x01\xd8\x01\xdd\x01\xe0\x01\xe8\x01\xfc\x01\x15\x02\x1d\x02\x0c\x02\x04\x02\xff\x01\xfb\x01\xe4\x01\xd2\x01\xcf\x01\xbb\x01\xb2\x01\xa5\x01\x90\x01q\x01A\x01\x18\x01\x03\x01\xf4\x00\xde\x00\xc6\x00\x9a\x00^\x00 \x00\xe6\xff\xc4\xff\xa4\xff\x89\xffa\xff.\xff\xfd\xfe\xbb\xfev\xfe;\xfe\x05\xfe\xe3\xfd\xc5\xfd\xaa\xfd\x94\xfds\xfdM\xfd"\xfd\x11\xfd!\xfd;\xfdY\xfdn\xfd\x84\xfd\x9b\xfd\xbc\xfd\xea\xfd*\xfeu\xfe\xc2\xfe\x12\xffY\xff\x9b\xff\xe2\xff\x1e\x00h\x00\xc0\x00\x0b\x01R\x01\x92\x01\xc8\x01\xe5\x01\x07\x02!\x02D\x02\x81\x02\x94\x02\xa3\x02\xa7\x02\x92\x02y\x02l\x02V\x02F\x02:\x02,\x02\x11\x02\xe2\x01\xb2\x01~\x01S\x015\x01\x1b\x01\xf0\x00\xca\x00\x94\x00a\x00;\x00\x0e\x00\xf3\xff\xd0\xff\xaa\xff\x8a\xff]\xff1\xff\x03\xff\xe0\xfe\xc5\xfe\xae\xfe\x9a\xfe\x7f\xfee\xfeF\xfe9\xfe,\xfe\x1c\xfe \xfe$\xfe.\xfe:\xfe;\xfeC\xfeR\xfef\xfe{\xfe\x98\xfe\xbd\xfe\xdb\xfe\x01\xff \xff0\xffM\xff\x88\xff\xc6\xff\xdf\xff\xed\xff\x1e\x00W\x00\x84\x00\xad\x00\xce\x00\x00\x01(\x01=\x01M\x01R\x01Y\x01v\x01r\x01r\x01\x86\x01\x99\x01\x9f\x01\x98\x01\xb2\x01\xd8\x01\xe0\x01\xda\x01\xce\x01\x95\x01G\x01.\x01K\x01l\x01\xad\x01\xe3\x01\x96\x01\xdf\x00\xfd\xff\xd9\xff\x03\x00:\x00\x82\x00\x7f\x002\x00\x8b\xff\xdc\xfe\xa1\xfe\xc4\xfe\xe2\xfe\x0c\xff\xf1\xfe\xb6\xfen\xfe#\xfeL\xfe\x88\xfeK\xfeQ\xfe\xdf\xfe\xc2\xfe\xb0\xfe;\xfe\xcb\xfd\x04\xfe\xa8\xfd\xae\xfd\xfd\xfd\x19\xfe\x15\xfe\x8f\xfeJ\xff\x81\xff\xab\xff\xdf\xff7\x00\xe5\xff\x9e\xff\xfd\xfe\x16\xfe\xd4\xfc\xfc\x00\xd9\x0e\xbc\x14P\x05\xc9\xf5\x93\xf8\xdd\xfa\xe7\xf8\x7f\xfe\xaf\t6\t\xbf\x00#\xfd\x86\xfa\xec\xf6S\xf5\xba\xfd$\x064\tY\x08\x98\x02\x1d\xfe\x9d\xfc\xe2\xfb\xcf\x00\xea\x05\xda\x08\xc4\x04n\x01\xf0\xff8\xfe\x13\xff\x00\x00L\x03\xc7\x02\x9a\x00$\xfd\x9b\xfc\xf8\xfc\x95\xfd:\xfe\x97\x00\x86\x00\x7f\xfeJ\xfb4\xfc\x1f\xfb\x0b\xfb\xf8\xfe\xde\xff5\x02\xf2\xfe\x03\xfe\x8b\xfa\x04\xfd\xa0\xff\x81\x05\x8e\x04d\x03f\x02\xaa\xfb\xe4\xfcT\xff`\x07\x16\x07\xf8\x05f\x03\xf3\xfe9\xfc\xe4\xfb\x92\xffK\x02@\x02*\x002\xfc\xf3\xfa\xc3\xf9\xa8\xfb\xf9\xfe\xb0\x00\xfd\xff_\xfe\xf2\xfba\xff\x98\xfb\xd3\xf4%\x06)\x19E\x17H\x06-\xffH\xfb\x0c\xf7\xd7\xfc\xf4\x0bV\x14O\r\xbb\x02\x1a\xf8\x11\xf1\xe0\xf0\xab\xfb\x01\x03\x1e\x03\xcb\x01x\xff0\xf9\x8e\xf3>\xf5+\xfb\x88\xfes\x02\x95\x08\xdf\x02\x15\xf8\xa8\xf7\x9a\xfb\xd4\xfeQ\x04D\x0b\x01\t\x11\xfe\x8a\xf9\xfc\xfaI\x03\xaf\x06a\tk\t\xda\x00k\xfa\xb9\xf8\x9d\xfb\xcd\xfd\\\x02Z\x08\xb2\x04\xa0\xfas\xf59\xf4;\xf9?\xfd\xc8\x01\x9d\x03)\x00U\xfd\xb6\xfbx\xfc\x05\xfe\xae\x03\x1b\x04F\x04\x84\x03V\xffi\xfe\xe3\xff[\x04\xf2\x02\xb0\x01\xbe\x04\x83\x05\x18\x01Y\xfe@\xfe\xd7\xfe5\x02\x8d\x05\xd5\x06\x18\x02\xd9\x00\x05\xfe\xf1\xfe\xd1\x03g\x03g\x02\xa2\xfdL\xfc3\xfb/\xff\xe0\x00\xa1\x03\xec\x02\x18\xfa6\xfb&\xfe\xed\xff\x07\x02\x0f\x03 \x01\xb7\x00\xdb\x01\x07\xffy\xfbw\xff\xe1\x01_\x00\xcf\xffx\x02\xcb\x02!\xfbm\xf9h\xfem\xfer\xfc\xee\xfa\x9b\xfdY\x01\xf1\xfd\xea\xfd\xd2\xfd\xd9\xfb\xaf\xfc\x1f\xffA\xff\xeb\xff\xbf\x03a\x03\x9e\x02\x86\x02I\x01!\x01\xee\xff\xba\xff\xb9\x02#\x03v\x01\xd1\x02\xa0\x07\x1c\x04\xdd\xfd\x1c\xfe\x04\xff\xa1\x02\xbd\x03$\x02\xd6\x03\xaa\x00\x08\xff\xdc\xfe=\x009\x00s\xfe\xeb\xfep\xfec\xff<\xff\xa9\xff\x99\xfc\x80\xf9J\xfa\xab\xfd{\xfe\xce\xfew\x01\xc6\x00h\xfe\x07\xfa\x07\xfa\x1f\xff\xc6\x03\x0f\x06-\x016\xfe\x1c\x00\xe1\x02\x14\x02\xb9\xff"\xfe\xb0\x07D\x05\xb2\xf7\xc9\xfa\xe4\x06\xd9\x07y\xfb7\xfe\xa0\x00&\xfa{\xfb\xfb\x00\xf8\x00\x84\xfco\x01E\x06\xfe\x02\xe2\x00\x12\x00\xe0\xfd\x8c\xfe\xbb\x07\x9f\x0bm\x01\xc8\xfcl\xff \xfb\x9f\xf68\xf9V\x03\x89\x06\xe5\x02\x03\xfd\xa8\xf7,\xf7\xf0\xf8j\xfe\xd4\x05m\nd\x06\xfc\x00s\x01\xb4\xfe\x03\xfb6\x03\xfc\n\xd8\x0eA\n\xc1\x04\xa7\xfa\xed\xf2\xb8\xf8"\x02\xa9\x06m\xfc\xb2\xfe\\\xf9\x10\xf4\x0e\xf7\xce\xf7\xbd\xf9G\xf7\xe4\x00Q\x07\x19\x01\xaa\xfe\xb3\xfea\xfe\xbc\xfa\xc9\xfcj\x08\xa6\x0b`\x07\xf5\x03\x04\x04[\xfde\xfb\xa0\xfe\x0e\x003\x044\x08\xd2\x08\xd5\x00\x9d\xfd^\xfa\xbf\xfb\xbb\xfd\xdd\xfbo\x00\xb1\xfd\xd3\xffS\x01\x89\xf8v\xf6\x07\xf7k\xfbe\x00\xd9\x02W\x08P\x03\xd2\xfa\xf5\xfdh\x00\xc4\x00\xe9\x05\xb5\x12\x93\x0e\xbd\xfc:\xfc\xda\x02\xd3\x06r\x03\xbf\x06-\t}\xff\x01\xfc\x9a\xfe\xfc\x00&\xfb\xf6\xfb\xe2\xff\x17\xfdM\xfa\x1e\xf8v\xfc\xa1\xfe\xe0\xff~\xfdd\xf8\r\xfb\xe8\xff\xd9\x03j\x01u\xfd\x1a\x01f\x055\x03 \x03\xa6\x03I\x04^\xfe\xe0\xfd\xd9\x03\xde\x03\xd6\x01W\x03V\x03s\xfd\x12\xf5\xef\xf8\xc6\x00\xec\xffT\xfd\x98\xfcK\x00W\xfaE\xfaY\x00\xc8\xfeg\xfc\xda\xffp\x04\xd1\xfd)\xfc\xc5\x03\xe2\x04G\x02\x98\x03\xa3\x03\x1d\x00&\xff{\x04\xb9\x04\x1a\x00h\x00\x8b\x03\xe6\x04d\x02\x8a\xfd\n\xfd\xe9\xfc\xac\xfd\x14\xfft\xfeC\x00P\xff\'\xfd%\xf9;\xfc\x92\x00A\xfd\xd3\xfc\xe3\x01\xdb\x05\x00\x01R\xffU\xfe\xdd\x02\xd8\t\xb3\x05\xaa\x02\x00\x01j\xfe\xb3\x01\x91\x04\x8a\x07(\t\x8c\x02\xfc\xfb/\xfbW\xfc\xc6\xfbY\xfc\xeb\xfe\xfa\xfe\x93\xff\xb4\xfe\xa9\xf9\xcd\xf8u\xfb\xa9\xfba\xfaC\xfd\x9b\x01\xb0\x00\x06\xfc\xd0\xfb\x1d\x01/\xff\x11\xfc \xfd\xe8\x02\xc0\x02\xb6\x01\xfa\x04\xed\x02\xb9\xfe\x17\xfeg\x06\x12\x08\xda\x05>\x05J\x03~\xff\xa1\xff\xba\x03=\x01\xed\xfd\xa9\xfe2\x02\x0f\x00s\xfa\x13\xfb\xd1\xfci\xff\x8b\xfb\xea\xfb<\xff2\x00\t\x08<\x00l\xfb\x14\xfeJ\x00\xd2\x02\xb5\x05\x9f\nh\x04%\xfeD\xfa\x9c\xf8\x84\xfc;\x04\x8e\x07\x9c\x06&\x05\xe1\xfe\xfe\xf6\x0b\xf6\xed\xfcY\x01\xd4\x00\x12\x01\xcf\x03}\xff;\xf9\x11\xfa\xfa\xfb\x0e\xfc[\xfag\xfc\xc3\x01\xc3\x05&\x04[\xfeO\xfb\x12\xfd\xdb\x01\xc6\x04\xab\x06\n\x049\x01+\x02\xb3\x02\r\x05\x05\x07\x19\x05\x94\xfd\xa5\xf8a\xfb:\xfe\xd9\xfd\xa9\xfdi\xfe\x84\xfd\x08\xfa\x7f\xfc\xdd\xfe\t\xfe\xdb\xfd\xfd\xfdx\x01:\x03m\x05\xf9\x01#\xff\x8d\x00x\x02\x18\x05\xd2\x04\x89\x06\xe6\x04\x86\x00W\xfd\xf5\xfc\xaa\x01\x13\x05\xea\x07^\x06H\x01\xdf\xfb\x97\xfb\x87\xfd\xff\xf9\xca\xfay\xfeE\x02\xa7\x00\x17\xfd.\xfd\x05\xfai\xf6\x07\xf6\xba\xfc\\\x042\x05\x87\x02\xe2\xfd\xb6\xf9\xb3\xfbH\xff\xe2\x03\xe0\x04\xaa\x03\xe2\x01\xb2\x00\xd4\x02U\x01\xd9\x00\n\x00"\xff-\x01\xe0\x03\xab\x02\xc7\xfe\xbd\xfd\xc5\xfb\xa7\xfa\x91\xfd\x16\x00\x93\x00+\x00m\x00u\xff\x80\xfd\xa2\xfb\x9b\xf9\xc2\xfc?\x00q\x03\xdf\x04m\x04\x06\x03\x82\xfe\xe4\xf9L\xfb;\x01\x8d\x03\xb3\x02C\x02\xd9\x02O\x02E\xff\xf1\xfd\xc0\xfe\xe8\xff\x7f\x00f\xfe\xe8\xfff\x02\xfe\x00m\xff\x00\xff\xf0\xfd(\xfd>\xfa"\xfa\x02\xfc\x05\xfc\xf8\xff\xc0\x01"\x02\x83\x01\x19\xff;\xfd\x0f\xfbQ\xfb\x1a\x02\xda\x06\xdc\x08\xa4\x08\xdb\x05*\x02\x86\xfc\xe1\xfaX\xff\xfd\x04?\x07\x9a\t\x0c\x06\x1e\x01\xb9\xff|\xfeL\xff;\x00\xf8\x04T\x08\x19\t\xf4\x08&\x06:\x03\xa8\x00\x17\x00^\x04\xd7\x08\xf0\n\xa9\tI\x08\xfa\x04R\x00\xee\xfe9\x00\x9d\x04\x8a\x04\xa6\x04c\x05`\x03\x0b\x02\xc0\xff}\xfd\xf3\xfcy\xfe\xe5\xfe\xa1\xfeJ\xff`\xff\xf9\xfc\xf2\xf9\xa8\xf8O\xf6l\xf4\xda\xf6\xbc\xf9\xd5\xfaP\xfb\x80\xf9*\xf7\x14\xf6E\xf7\x0b\xf8\x95\xf7\xc3\xf8\xf2\xf9\xb1\xfa\x14\xf9\x89\xf8Y\xf9\xc3\xf8\x9d\xf9\x8e\xf9\x98\xf8\xb4\xf7\xf5\xf8\x11\xf9i\xf9\xa6\xfb\xa3\xfaZ\xf9\x1d\xf8\x81\xf6\x04\xf5\xe4\xf2\xe3\xf2\xf6\xf32\xf5\xd7\xf7\x1a\xf9U\xf8]\xf9\xf4\xf9\x94\xf78\xf8\xdc\xfc\x8b\x03,\x0b\x15\x0b\x8f\x08K\x07\xf7\x02\x11\x04\xb9\x06\xb4\x08J\r*\x0c\x92\t\xee\x07j\x08\x82\t\xac\x07\xc0\x07N\x05\'\x04\xbd\x03\xa5\x03\x8f\x07\xf9\x04J\x04\t\x07\xb5\x01\x81\xfb1\x00\x92\x15\xd7)\xd5,k$Q\x1bc\x18\x8a\x17\x81\x1a5&01\xa12k)\xe3\x1fx\x18\x91\x11=\x07{\xfe\x96\xfb\x14\xfd\x1b\x00\x06\x01`\x00y\xf4M\xed\xe3\xe3\x12\xd8\xbc\xdaI\xe6\xe2\xf0-\xf2r\xf1\x99\xee\x18\xedc\xe9\x0c\xe8\xd8\xef\x84\xf5A\xfaL\xfd_\xfdM\x04=\x07\xb3\x01Z\xff\xb0\xf9\x01\xf6\x1d\xfaL\xff)\xfe\xdf\xfcN\xfav\xf2\x1b\xe9\x95\xe4\xd9\xe8\xb9\xeb\xbc\xec\xba\xed\xcb\xea\xf0\xe7\xde\xe6k\xe7\xd9\xe8;\xec\xab\xf17\xf5\xf8\xf7\xe0\xf9\x1a\xfb!\xfa\xe4\xf8\x9f\xf9\xec\xfb\xcf\xfeN\x00\xd1\x00G\x01\x11\xff8\xf9\t\xf5\xa5\xf3\xc1\xf3\xaf\xf7\xa2\xfb\xb5\xfc\xf9\xfc\x82\xfb\xcc\xf8\xec\xf5\xa3\xf5\xfa\xfa\xf5\x02U\x05\xcd\x03\x19\x07\xae\r\x06\x13\x1e\x11R\n\xda\x07\xa3\x069\x0bs\x105\x15\xd7\x1a\x14\x16\x90\x0e\x1a\x07\xec\x01z\xfd\xd8\xfb\xeb\x0e\xb9,\x9e>\x1d=\xaa-\xa0"\xaf \xfc\x1bv\x1d\xa1,\xac;@?\x0c8\xb0*h\x1b\x88\x0e\x03\xfd\xf0\xf1\x16\xf3\x9f\xf8\x0b\x03{\x04\x99\xfd\x89\xf2\x10\xe2\x16\xd3\xd2\xcc\xda\xd0\xe4\xdf\xaa\xef/\xf4\xcc\xf5\xe2\xf6\xb3\xf1\\\xec\x88\xe9\xb7\xeb\xf3\xf3\xa1\xfb\x00\x03\xb9\nF\x13\x14\x11\xad\x04\xcf\xf9\x86\xf0\xec\xee\xa1\xf4-\xfb\xc0\xffI\x01\xee\xfd>\xf4\xca\xe9f\xe4\xb5\xe4\xc9\xe6\x13\xe8\xc5\xee~\xf4R\xf7\xbf\xf7\xe3\xf2\xea\xec^\xe8\x95\xe8\x9d\xec\x11\xf5s\xfea\x03j\x03\xb0\x01j\xfd\xde\xf8\t\xf7\xdc\xf8C\xfd\xd9\x02\xca\x06\x17\x04\x17\xfe\xe0\xf7\x8e\xef\xf5\xe9\xcb\xe9P\xec\xf8\xf0k\xf3\xc9\xf6\x80\xf7 \xf4Q\xf2\xa5\xf0n\xf4\x8c\xfb\xa4\x01 \x07~\x08L\x0c\xf3\x0e\xf6\x08@\x03\xec\x04\x81\x07\xd3\x0c\xe2\x10\x91\x10l\x11\xdb\x0b\xa5\x05\x02\x04p\xfe<\xfb\xa0\x0c\xf5.\xefK\x04T\x8dA-*\x94"\'!S\'\x9c9\xebH\xcfJ@;\x90%\xb4\x16\x95\x06\xda\xf8\xea\xed\xfb\xe5*\xe9p\xf0\xdb\xf4S\xf11\xecO\xe0Y\xd0L\xc8Y\xca\xd2\xd8\xb4\xedE\xfb\xda\x01\xce\x05\xc5\x01:\xfb\xb2\xf6\xe5\xf6y\xfa\xab\x00.\x07I\x0e\xb8\x11;\x10T\x0cq\xfeB\xf4\xca\xef\xc2\xe6\x9c\xe3\xdd\xe7K\xee\x95\xf4]\xf5\x8b\xf0\xb4\xe6\x80\xdeW\xdd\x0f\xe2i\xe9\xcd\xf2Z\xfd\xec\x01"\x02\x14\x02\xc3\x01g\x00\x14\xfdn\xfa\xad\xfc\xee\x014\x08o\x0b\t\t!\x05\xd6\xfe\x04\xf8\xb4\xf3\xf2\xf3,\xf9B\xfcW\xfc\x86\xf9\xa8\xf5\xef\xf4*\xf2b\xee<\xeb\xdf\xe9\x1b\xeb)\xed\xb7\xf3\xea\xfa\xfe\xfe\x83\x01\xfb\xfdp\xf8\x88\xf7\xed\xf8\x95\xfc\x94\x028\x057\x06\xe0\x05\xeb\x03\x90\x01\xf7\x00\x98\xfc\xfa\xf5\xa7\xf5\xcd\xf5d\xf2\xdc\xf1\xce\xfd\xef \xb8O\xe6f\x83^\xa6I\xd96F.\x913\x9b?!Pv[\xceR\xb1>\x0c*\x19\x12\x17\xfc\x94\xe5\x19\xcf\xfe\xc6\xbb\xcb\x13\xd4~\xdc\xbd\xe1\xd7\xdfZ\xd8\x9d\xcb\xf5\xc2A\xc8%\xd9\xc1\xee\x0c\x04\x13\x14\x95\x1e\xb4%\xd7\'\x02"\xba\x17\xcb\x0ca\x05\xfc\x04]\x07b\x0e`\x14\x0e\x10&\x07O\xf5\xde\xdeP\xcd\x18\xc0\xea\xbe\xf8\xc6\xd9\xd1C\xdej\xe6*\xe8\xe9\xe5\xa8\xe5\xff\xe6\x1b\xe8\\\xee\x8a\xfa\xef\x08s\x18\x05(\xe9.\x00*2!J\x16e\n\x9e\x04\xb4\x01"\x00^\x02\x01\x03\xf3\xfd\xa7\xf6&\xf1"\xeb0\xe6\xa7\xe2 \xe2I\xe7\xb9\xee\xf0\xf31\xf7\xec\xf6\x1e\xf5\xf8\xf4\xc0\xf14\xf3\x98\xf9\xf3\xfc\xf2\x02\x8e\x06\xc6\x05\xfa\x06U\x04\xed\x00\xc3\xfe\xe5\xf93\xfc9\xfe\x9e\xfe \x02\xda\xfe\xb2\xf9\x90\xf5\x10\xef\x86\xedA\xef\x88\xee\x1a\xf0\xf9\xf2\x84\xed\x07\xe9=\xf6\xa2\x15\xb1@e[\xa1W\x08K>B\xffB\xbaH\x0fL\xd8P\xa0Q\xf3Jk>\xea-\x0b\x1f`\r\x1d\xf3,\xd7\xa2\xc4\x95\xc0\xdc\xc7p\xd0\xfb\xd5D\xd9\x89\xd9\xb2\xd7\x97\xd6)\xd9\x12\xe2\xe1\xed\xfa\xfa\xd5\x08p\x19\x97)\x913\x8f3\x9a(\xcf\x1a/\rk\x03;\xfeH\xfc\xcf\x00f\x00\xc1\xfa!\xf2z\xe1a\xd1\x95\xc4\xa3\xbc;\xbe\xc7\xc7Q\xd5\xeb\xe2\xba\xee\xd4\xf5c\xf9l\xfa(\xfbp\xfe\xc3\x03\xf9\n\x0e\x15\xb2 \x06)M*\xfb#p\x18\xbe\nk\xfe\x1e\xf7\n\xf3\xba\xf0\x95\xf1\x1d\xf1r\xee\xc9\xee\xec\xedX\xec\xa4\xebF\xe9M\xe9\x85\xed\x0e\xf5\x9a\xfbu\x01W\x05I\x04\xfb\x02\xf7\xff@\xfc\xa1\xfb\x1d\xfbM\xfc\xe4\xfe\x0e\x01\xf1\x02\xf0\x01\xc4\xff\xc4\xf9X\xf4\xdc\xf0p\xedl\xf06\xf4\x85\xf5\xc9\xf8\xf5\xf5e\xf0\xa9\xefX\xebZ\xeb*\xf2\x89\xf0<\xec\xed\xf3l\x0cC6\xf5]\xf5f{ZKL\xa2?i<\x8e@\xccDrL!NDBu/\xb7\x1b\x0c\n?\xf7\x9e\xdc\xb3\xc3\xc7\xb7\x00\xba/\xc5\xd6\xd2\xba\xdd\xa7\xe3\xe1\xe2:\xdeh\xdb\x18\xe0W\xec\xaf\xfb\xc8\t\xf6\x16\xb4%\xe8116T/?!\xb9\x12\xe2\x06\xa3\xfdk\xfc\x10\x02\x9d\x04.\x02/\xf5\xde\xe3n\xd5D\xcaw\xc6\x00\xc6?\xc9\xb6\xd2 \xdd\x89\xea\xc6\xf6L\xfe\n\x02d\x00c\xfeY\xff.\x04\xc8\r\xb3\x18\x9f \xa1!Z\x1dg\x15/\nO\x00\x7f\xf8o\xf0\xef\xea\xf9\xe9,\xeb8\xee\xb4\xf2\xb1\xf4\x81\xf3k\xf1\x1b\xee\xa2\xece\xf0F\xf7a\xfe\x8d\x05\xbf\x08)\t\xdf\x07W\x05\x99\x03\x87\x00\xd7\xfdL\xfb\xb0\xfa\xc5\xfc@\xff\xd3\x01\xac\x01/\xfd\xf1\xf7\xd6\xf1\xb9\xebA\xe9Q\xe9.\xea\xc6\xec\x9a\xefH\xf1\x1d\xf2\x94\xf1\xce\xee[\xec\xa9\xeb\x93\xea\x08\xea\xd1\xec\x8f\xf8\xe4\x18\xf1C(d(n\xeccWS$I\xc8D\xdeB\x92E\xecJhM\xd1G\xe76\x11"\x96\x0e\xbd\xf5\x01\xd9\xfa\xbd\xc9\xacz\xadg\xba\xee\xca\r\xd7\x86\xdd\xda\xdd\xc6\xdbH\xd9c\xdar\xe4\xa1\xf4D\x06\x01\x17H%\xb61\xd09\xe980/?\x1f\xe8\x0f\x18\x04\xc3\xfc\xa2\xf9\xec\xf9\x17\xfd\x9a\xfb\xa5\xf4)\xe8\xe0\xd89\xcc$\xc2\xf0\xbe\x9a\xc2s\xcb\x08\xdb:\xeb#\xf8\xb2\xff\xa1\x02\xb1\x02&\x01.\x01i\x03\x0c\tM\x13\x8b\x1e\xcd$]#\x87\x1c\xc8\x12q\x07\xd0\xfd\x8e\xf5/\xefq\xedx\xef\n\xf1\xbd\xf1\xf7\xf2o\xf2\xc0\xf0\x1d\xef\x12\xec\xc0\xeb\x9f\xf0\xb8\xf6\xcd\xfeY\x05\xb7\x07\xc1\t#\x08\x12\x05G\x03\\\x01\xc1\x00d\x01P\x02\xfd\x02o\x03\xf2\x02\xbb\xff\xb3\xfa\xa6\xf6\x84\xf1\xbe\xed\x82\xec7\xebZ\xec\t\xee\xe9\xedQ\xedp\xecR\xe9\xb9\xe8\xe7\xecz\xefR\xf1p\xf0Q\xec4\xf3\x1e\x0c\xfe0\xf1V\x95iif\x9dY\xf6NxK\x9bL\x9cK\xe2H\xe8G\\Bs6l&\x0e\x12\xcf\xfd\xb3\xe9\x8b\xd2$\xbd\xa6\xb1\xcf\xb2Z\xbe\x83\xcc\x01\xd5\xdd\xd6\x92\xd6/\xd5\xe1\xd6T\xde+\xea\x1d\xf9\xec\x08\xd2\x15\x85 g*\xe40\xed2\xcf-\x9d"\xc7\x15\xcb\n\x0e\x04\x97\x01\xf1\x02\xa8\x02E\xfe\xed\xf4\xb3\xe5\x9e\xd6I\xccM\xc7\xdd\xc79\xcb\x82\xd0\x00\xd8\x98\xe1O\xebH\xf4\xb8\xfa\xf6\xfc.\xfd\x1a\xfd\x1f\xfes\x03\xf3\x0cx\x17\x1d\x1f\xce #\x1d|\x16F\x0f}\x083\x02\xba\xfc\x15\xf8p\xf4\xce\xf2\xf9\xf3\r\xf7c\xf9\'\xf9\x01\xf5(\xef\xab\xea\xf4\xe9\x1c\xee\xeb\xf4\xf7\xfa\xe2\xff\xe0\x01b\x02.\x02f\x01\x90\x01i\x01\x0c\x01p\x01*\x02\xce\x04R\x07L\x08[\x07\x92\x01\xee\xfa\x19\xf5?\xf0=\xee\x9a\xec\x8e\xeb\xb2\xec\n\xed\x0b\xecS\xeb\x9e\xea\xc5\xea\xea\xe9\xb4\xe6\xbe\xe6I\xf2\x94\x0c\xa80zP\xd1]\xf2Y\xdfN+E.B{B\x7fC\xb2G/K\x85IH>\x92)*\x13M\x00T\xf0z\xdf\r\xcd[\xbeS\xb9\xcc\xbe\xfd\xc9A\xd4\x13\xd8\xa6\xd6\x8c\xd4s\xd4`\xd8R\xe0i\xebI\xfaK\n\xc3\x178"x(\xd8+=-\x91*\xde"\x12\x18\xea\rq\x08\x90\x084\tT\x06+\xffB\xf4\x92\xe81\xddd\xd2{\xcab\xc7\xd4\xc8\x02\xcez\xd4S\xda\xf7\xe0\x17\xe9`\xf0\x89\xf5S\xf7\xd8\xf7N\xfb[\x02\xa9\x0bD\x15I\x1d\xa7"\x81$\xaa!\xfd\x1a\xca\x12\xa3\x0b\xf0\x06\xa5\x03\x85\x00,\xfd\x04\xfa\xf9\xf7\xef\xf5\x16\xf3\x03\xf0\xa4\xec\xa1\xea\x06\xea\xcd\xe9\x7f\xeb)\xefS\xf3Q\xf9\x92\xfe\xba\x01q\x04Z\x05z\x05p\x07\xc3\x07\x01\t"\x0b\xa3\x0b`\r\x9f\x0c\x1d\t\x9e\x03l\xfc\x1b\xf6\xa3\xf1\xb5\xefB\xee\xe7\xecO\xec\xab\xe9q\xe7X\xe6\xf4\xe3\xd3\xe3~\xe4<\xe5\xed\xecO\xfe\xf0\x16\x9c1^E\xaaM\xe5M\xdbH\xa9A`=4=9A\xc2G\x1fK\x10EU7o%\xa9\x12\xa6\x02\x82\xf1\xd4\xe0\x82\xd4\x97\xcd\xa9\xcc\x87\xce\xf9\xcf$\xd1\xae\xd2\xb4\xd3j\xd4\xb4\xd4\xdc\xd5%\xdbc\xe5\xbc\xf2\xb6\x00\xa2\x0c\x0f\x17\xf3\x1f\xac%!\'\t$\x0b\x1f\xc5\x1a\x9d\x17/\x15\xb6\x12\x95\x0f\xbb\x0b\xf3\x05T\xfd\xe1\xf2k\xe8\xb4\xdf}\xd9\xad\xd4\xeb\xd0\xae\xce3\xcf\xa7\xd2\xf7\xd77\xde,\xe4\x99\xe9d\xee\xe6\xf1\x17\xf5\xa3\xf9b\xffE\x07\xee\x0f\x9b\x16\xf1\x1bL\x1f\xf6\x1f)\x1f\xbd\x1b4\x16\xad\x10\r\x0b\x86\x06\xfd\x03\xaa\x01\t\xff\x10\xfc\x99\xf70\xf24\xedY\xe9\xc3\xe7:\xe8*\xea\xef\xecz\xf0=\xf4q\xf7\x8a\xfa\xde\xfd{\x01\x82\x05b\t|\x0b\\\x0c\x0b\x0c\x96\n"\t\xdd\x06Q\x04x\x01\xe4\xfd\x17\xfa\xe2\xf5a\xf2T\xef\x1f\xed\xf8\xeb\xb5\xea\xe8\xeaR\xeb\x90\xeb\'\xec\xed\xeb\x1c\xee\x0b\xf5\x93\x02\xe3\x16\xed+2<[D\xdbD\xe5A\x16=\x9f8\xb16e7\x9d;\xa9?\xb0>\xff6K)\x87\x19\xed\nO\xfd\x00\xefg\xe1\xe3\xd6\xe7\xd0J\xd0X\xd2\xcb\xd4X\xd7\xa8\xd8M\xd8w\xd6w\xd3F\xd2\xdd\xd5]\xdf\xc7\xed\x87\xfdZ\x0bh\x15=\x1b\xb1\x1d\xf3\x1c\xdf\x1a\xfc\x18\x96\x17c\x17\x10\x17P\x15G\x12\xc9\r\xa2\x08h\x034\xfc\xf7\xf2\xf8\xe8\xc0\xdf\x0b\xd9u\xd5\xd2\xd3\x80\xd4\x89\xd7\x12\xdcg\xe1\xd1\xe5L\xe8$\xe9\x0f\xea\xf3\xeb\xff\xef\xae\xf66\xff.\tE\x13,\x1b}\x1f\xb8\x1f\xb4\x1c\xfb\x18\xbe\x15_\x13\xd2\x11A\x10\xa7\x0e\x9a\x0c}\t\x8a\x05\xe4\x00m\xfb\x1b\xf6@\xf1\x10\xedB\xea&\xe9\r\xea\xdd\xec\xca\xf0\x83\xf44\xf7n\xf8\xf6\xf8o\xf9\x8e\xfa\xa9\xfc\xfa\xfe`\x010\x03\x81\x03}\x03\x92\x02g\x01\x8c\x00\x0c\xff\xd8\xfdZ\xfc\x82\xfa\x1b\xf9\xbf\xf7\xdd\xf6\xcc\xf6\xdd\xf6s\xf6@\xf5#\xf3\xd4\xf2p\xf7c\x02\xc9\x11\x99!Q-F3\xad4t2\x8c.a+\xd1*L.\xfe4;;\xcd\xf63\xf5\x01\xf4\x88\xf2[\xf1\x00\xf1T\xf1\xaf\xf2\xf6\xf44\xf7>\xf9\x91\xfa\xf0\xfa\x00\xfb\xc6\xfa_\xfa\x89\xfa\xc1\xfaZ\xfb\xab\xfc\xe7\xfdR\xff\xee\xff\xa3\xff\xa7\xfe\xd8\xfc\xf9\xfa\t\xf9/\xf8\xc3\xf9\x07\xffo\x08Z\x14\xba\x1f$(\xe3+R+\x9a(}%\xfa#\xe9$\x03(\xdd,11\xc92\x130M(d\x1d\xd0\x11\xca\x07\xac\x00\xde\xfb\x19\xf9d\xf7\xee\xf5\x9b\xf3\x11\xf0\x8b\xeb\xb5\xe6\xf7\xe2\x93\xe0<\xdf\x8e\xde\xfc\xdd\x1f\xde\xb3\xdf\xf2\xe2\xd7\xe7\x9c\xedx\xf3\xc2\xf8p\xfc&\xfe\xf4\xfd\xdc\xfc1\xfc\t\xfd\xff\xffw\x04H\t\xf5\x0cK\x0e\x8b\x0c\xda\x07.\x01r\xfa\x84\xf5<\xf3S\xf3\x89\xf4\xdd\xf5\xef\xf5\xb3\xf4@\xf2\xf4\xee\x0c\xec\x1b\xea\x8c\xe9\xcb\xea<\xed\x90\xf0/\xf4\xc8\xf7\xb3\xfb\xd0\xff\xe3\x03[\x07\n\n\xad\x0br\x0c\x9a\x0c~\x0c\xc1\x0c\xad\rq\x0f\xaf\x11~\x13\xb5\x13\xd6\x11\x19\x0e<\t\x04\x04O\xff\xd1\xfb9\xfa8\xfa\x18\xfb\xeb\xfb\xc3\xfbc\xfa\x05\xf8\xed\xf4\x18\xf2\xea\xef\x0e\xef\xea\xef\x0f\xf2\xde\xf45\xf7v\xf8\x9d\xf8\xc7\xf7\xb7\xf6\xbd\xf5u\xf5\x06\xf6H\xf7P\xf9p\xfbB\xfdX\xfeQ\xfep\xfd\x19\xfc\x94\xfa\xa0\xf9/\xfa\xa8\xfcl\x02\x16\x0b>\x15\xe8\x1ex%\x11(\x9b\'X%{#0#\x7f$\xf3\'\x0e,\xc5/c1\xff.\x01)\r Y\x16\xf7\r/\x076\x02\x89\xfei\xfb\xfe\xf8\n\xf6T\xf2\xd6\xed\xb5\xe8\x87\xe4O\xe1\x19\xdf\xbb\xdd\x80\xdci\xdc\xb2\xdd\xad\xe0,\xe5\x11\xea\xf5\xeee\xf3\xb6\xf6\xe9\xf8\xe3\xf9q\xfak\xfb\x8a\xfd\x1e\x01\x84\x05\xee\t@\r\xac\x0e\xbd\r\x87\n\xb5\x05\xa6\x00\x99\xfcd\xfa\xed\xf95\xfa\xab\xfa-\xfa\xbc\xf8g\xf6`\xf3\x90\xf0M\xee*\xeds\xed\xc9\xee5\xf1\xef\xf3\xe9\xf6:\xfa\x8f\xfd\xfc\x00\xae\x03\xc0\x05\xfa\x06{\x07\xca\x07\xdd\x07r\x08\xc1\t\xa5\x0b\x14\x0e%\x10\xd2\x10\xed\x0fW\r\xa9\t\x99\x05\xca\x01\xe1\xfe\x80\xfdd\xfd\x17\xfe\xf7\xfe\x19\xff\x17\xfe!\xfcC\xf9+\xf6\x94\xf3\xc8\xf13\xf1\xd7\xf1\xf7\xf2S\xf4I\xf5\x88\xf50\xf5~\xf4\x8e\xf3\x17\xf3\xc8\xf2\xe7\xf2\x99\xf3v\xf4,\xf6\x01\xf8\xa0\xf9\x0c\xfb\x90\xfb\xd5\xfb\xcd\xfb\x7f\xfb,\xfc\x93\xfe5\x04\x81\rM\x18\xaa"\xdc)\xe1,\xfe,\x0f+\xff(\x02(\xb1(\t,z0+4\xf34\xbd0\xa9(j\x1e?\x14\x01\x0c\x07\x05R\xff\xcb\xfa"\xf7\xd4\xf3\xef\xefm\xeb\xc2\xe6\xb8\xe2\x97\xdf\xe1\xdc\x88\xdar\xd8/\xd7\xae\xd7\xfd\xd9\x0f\xde\x84\xe3\xe2\xe9\x94\xf0F\xf6X\xfas\xfc^\xfdM\xfe\xfc\xff\xf7\x02\xfd\x06o\x0b\x93\x0fm\x12\x02\x13\xfd\x10\x8d\x0c$\x07^\x02\x08\xff\x06\xfdz\xfb\x13\xfa@\xf8$\xf6\xd0\xf3:\xf1\xd0\xee\xb8\xec,\xeb\xae\xea\x18\xebn\xecj\xee\r\xf1\x91\xf4\xe1\xf8~\xfdx\x01u\x045\x06\x0e\x07U\x07k\x07\xfb\x07U\t\x93\x0bO\x0e\xcc\x10\x1f\x12\xcf\x11\xe8\x0f\xcb\x0c\xd9\x08\xe3\x04{\x01W\xff_\xfe5\xfe`\xfe;\xfe\x8f\xfd=\xfc\x0c\xfaP\xf7b\xf4\xf8\xf1\xb4\xf0\x9d\xf0S\xf1t\xf2f\xf3\x00\xf4\xe9\xf3A\xf3"\xf2*\xf1\xd7\xf0]\xf1\xd5\xf2\x7f\xf4\x9d\xf5.\xf6U\xf6\xb8\xf6d\xf8\x99\xfa0\xfdu\xffK\x00}\x00n\x00j\x01W\x05\xc9\x0c\xa3\x17i$T/"6\x1a7K3\xc3.\xc3+\xc8,\x900\x9a4c7T6=1\xd7(\xa7\x1d\x89\x12\x93\x08d\x006\xfaE\xf4Z\xee\x0e\xe8G\xe2B\xde\xbb\xdb\\\xda-\xd9;\xd7 \xd5\xe2\xd2\xc8\xd1\x04\xd3M\xd7I\xdf[\xe9\\\xf3 \xfb\xd8\xff\xb1\x02\xdd\x04G\x07\xf8\t\xb7\x0c\xe9\x0fb\x13u\x16\xf1\x17\x1b\x17\x88\x14\x12\x11C\r\x07\t\xed\x03\x0c\xfe-\xf8\x05\xf3T\xef\x1a\xed\xd2\xeb;\xeb\xb6\xea\xfe\xe9\xd3\xe8,\xe7\xdf\xe5}\xe5\xf4\xe6\x85\xea\x85\xefg\xf5\x06\xfb\x0b\x00o\x04\xa2\x07\xe2\t\xe6\n\x82\x0b\xac\x0c5\x0ey\x10\x90\x12\xd9\x137\x14>\x13\x89\x11\x0f\x0f\xef\x0b\xd3\x08r\x05b\x02\x92\xff\x04\xfd]\xfb3\xfa\xba\xf9\x81\xf9\xb1\xf8{\xf7\xba\xf5\xc5\xf3H\xf2)\xf1.\xf1\x18\xf2\x97\xf3 \xf5\xd7\xf5\x82\xf5\x9f\xf4\xa3\xf3|\xf3M\xf44\xf5F\xf6\xcb\xf6\xb0\xf6\x9c\xf6i\xf6\x9f\xf6\xc2\xf7\x7f\xf9x\xfc.\xff\xc0\x00e\x01\xb0\x00\x1e\x00\x06\x00\x14\x01\x01\x06D\x0fo\x1cn*6429%9\xa26m4\xff1<1\xdc0\xd30\xb00F.u*\xdd#\xf0\x1a\xf4\x0f\xce\x02\xfd\xf5\x96\xeao\xe2\xce\xdd\x9a\xdbk\xdb[\xdbW\xda\xc5\xd7o\xd4\xd9\xd1b\xd1{\xd3\xcf\xd7\x81\xddI\xe4\xf3\xebI\xf4\xaa\xfd\x9f\x06a\x0e\xea\x13\x94\x16\xe4\x16\xb0\x15\xb8\x13\xe7\x12\xab\x13\xaf\x15\xe0\x17\x12\x18W\x15/\x0f\x9b\x06/\xfd?\xf4\xb6\xecC\xe7\xd4\xe3$\xe2z\xe1\x81\xe1\xc1\xe1\xbf\xe1.\xe1Q\xe0\xba\xdf*\xe0\x8e\xe2Z\xe7m\xee\t\xf7\xaf\xff\x1e\x07\xa5\x0c%\x10&\x12.\x13\xbe\x13+\x14\xa8\x14\xfe\x14?\x15\x1e\x15\xa6\x14w\x13K\x11\xcb\r\x02\tO\x03\xe4\xfd[\xf9\x90\xf6\xcc\xf5\n\xf6\xf3\xf6\x1a\xf7\x81\xf6\xb9\xf5\xc4\xf4<\xf4\xdc\xf3\xfe\xf3\xd5\xf4\xe2\xf5w\xf7\xc5\xf8\xe0\xf9\xcf\xfa\xb3\xfb\xb6\xfc7\xfds\xfc\x9c\xfa\x80\xf8O\xf6\xff\xf4\x03\xf4\xc5\xf3[\xf5\xf3\xf6g\xf8T\xf8\xb3\xf6\xa1\xf6\xd5\xf6\xba\xf8\xcc\xfa\xc0\xfbs\xfd\x96\xfd\x9b\xff\x9a\x05\xa2\x10\xce R0p;\xd4?\xe7=m9N4b1\xcd0\xe81c4\xab3\xf1/p(\x00\x1e\x14\x13\xf1\x05 \xf8\xca\xeb\xf3\xe1$\xdc\x8d\xd9\x1e\xd8\x94\xd8G\xd9\xf5\xd8\xc4\xd7f\xd5\xcd\xd3\xf5\xd4F\xd8;\xde1\xe6q\xef\x8a\xfaR\x05\xe4\x0e\\\x15\xa3\x18\xc5\x19\xa8\x19\x9a\x19\x86\x19\xa5\x19\x0f\x1a\x05\x1a\xe1\x18\xef\x15\x00\x11\xb8\n,\x03\x8e\xfa+\xf1:\xe8\x02\xe1\xb9\xdc\xf9\xda\x10\xdb\xb4\xdb\x1d\xdc\xdc\xdb\xde\xda\xd5\xd9\x04\xdaO\xdcO\xe1T\xe8t\xf0\xa2\xf8S\x00G\x08\xad\x0f\xba\x15\x04\x19\\\x19G\x184\x17\x0b\x17S\x18\xa7\x19r\x1aM\x19\x0f\x16\xb3\x10h\n\xbc\x04\xfb\xff\xd4\xfc\xa0\xf9\xe3\xf6\xfd\xf4p\xf4S\xf5\x92\xf6$\xf7\x9e\xf6\xa8\xf5\xd6\xf4\x02\xf5\xe8\xf5\x90\xf7\xe1\xf9\x8a\xfc\x1d\xff\xfd\x00e\x01\xd2\x00\xa6\xff%\xfe\x93\xfc\x1c\xfa\xe0\xf76\xf6\x1e\xf6\xe6\xf6}\xf7\xde\xf6&\xf5?\xf3[\xf1\xce\xf0R\xf0\xb5\xf18\xf4\xfd\xf6\xa9\xfa\xfa\xfc\xef\xff[\x02\x7f\x039\x04\xdf\x02x\x04n\x0c\xc5\x1a\x85-%:\xdf>\xe8;\x0c6&3\xf91;2\xbe1\x9f03.k)c!\xd3\x17\xdf\x0e<\x06e\xfcQ\xef\xf8\xe1\xfd\xd8$\xd7+\xdaf\xdd\xbc\xdd\xbd\xdb\xbb\xd9\x02\xd9\xac\xd9\xe1\xdbc\xe0c\xe7\x86\xf0\x7f\xf9\x85\x01/\tk\x10\xff\x16\x99\x1a\xf9\x19\xc0\x16\x9e\x13F\x13\x94\x15\xbc\x17\x93\x17N\x14\xa5\x0eY\x07]\xfe\x82\xf4\x13\xeb\xd6\xe33\xdf\xc9\xdbo\xd91\xd8\xfb\xd8\x05\xdb;\xdc\x1b\xdbw\xd8\x02\xd7[\xd9\x02\xe0L\xe9:\xf3\x90\xfc\xe0\x04\x9d\x0b^\x10;\x13,\x15|\x16\x14\x18\xfc\x18(\x1a\x8c\x1b0\x1d0\x1e:\x1c8\x17\x05\x10\xbb\x08\xe2\x02W\xfex\xfb\xf2\xf9\x94\xf9\xb0\xf9\x85\xf8x\xf6\xce\xf3\x88\xf1\'\xf0Y\xef4\xf0\xc7\xf2&\xf7\xce\xfbK\xff\x9c\x00\xd0\x00\x1a\x005\xff\xc0\xfeC\xfe\x86\xff\x8b\x00\x00\x01D\x00\xd5\xfd\\\xfb \xf8\xeb\xf4\xe2\xf1\x9e\xee\x88\xedZ\xed\xc5\xee\xc0\xf0\x83\xf0,\xf1\xaf\xf18\xf3\x99\xf5\xfe\xf6\x84\xf9I\xfc\x8b\x00\xbf\x04\xed\x07!\x0b\x93\r?\x14\xe3\x1fo-5:F>3:\xa93c.\xab.U1\xd01\xba0>-\xe8\'\xc9!]\x18\x82\r\xdc\x01\xe9\xf5\xc7\xebK\xe3<\xdf\x89\xdfb\xe2\t\xe5-\xe4a\xe0&\xdc\xa9\xd9>\xdb\x06\xe0\xc2\xe6\x8d\xee\xe3\xf6\x84\xffG\x07\xa5\r\n\x11<\x12g\x11\xed\x0f_\x0f\xeb\x0f\xc1\x115\x14G\x15\x16\x134\r\x89\x04N\xfb\xe7\xf2\xfa\xeaQ\xe4\xb4\xdf\xb8\xdd\xbb\xde\x9a\xdf\x83\xdf\xf4\xdd\xe2\xdb\xaa\xda1\xda\xf1\xda=\xde5\xe4S\xed\x06\xf7\\\xffW\x06A\x0b\xe5\x0f}\x12z\x13P\x14\xa0\x15\x81\x18\x89\x1b\x9a\x1d\x80\x1e\x82\x1c\x01\x19E\x13\xa0\x0cT\x06\xdf\x00\xc6\xfd\xba\xfb\xc8\xfa?\xfa\x92\xf9\x07\xf8\xd4\xf5N\xf3\t\xf1\xd4\xf0\x10\xf2\xd3\xf4w\xf8\xe7\xfb+\xff9\x01C\x02\xa1\x02(\x02\x99\x01\x9f\x00\x94\xff\xee\xfe\xd5\xfe\x87\xfeo\xfd$\xfb\xa7\xf7U\xf4\xa2\xf0\xfa\xed[\xec\x0b\xeb\xb3\xeb\x13\xec\xa4\xedb\xef+\xf0\x83\xf2\xcf\xf3\xbe\xf6:\xfa\xca\xfd\xa0\x02l\x06\x11\t\xfc\x08\x12\x08\x80\n\x1c\x12\xff\x1f\xa7-\xb25\xbf6\xb12\xd6.\x08,\xeb*8+\xb5+\xcc+V*\xb2&\x06!\x1a\x19\xc6\x0e\x82\x02\x8a\xf62\xed\xce\xe8\xf6\xe8\xb1\xea\xbd\xec\x05\xecm\xe8\x8d\xe3E\xdf\xfb\xdda\xdf\xc7\xe2\n\xe8,\xef\xd7\xf7h\x00d\x06\xa1\x08f\x07\xd5\x05\xc4\x044\x05\x96\x07,\x0b\xe1\x0fx\x12\xe8\x10/\x0c\x19\x05\xd6\xfd\xce\xf6#\xf0\xa5\xeb\xc3\xe9Q\xea\x08\xeb\x0c\xeb\x90\xe9L\xe7%\xe4\xeb\xe0N\xde\x8d\xde\xb5\xe2\xff\xe8\xab\xef\xbb\xf5<\xfb\x97\x00\xf5\x03\xff\x04&\x05\xd6\x05\xc9\x08\xe5\x0b)\x10\xcf\x13\xa4\x17N\x19\n\x17\x98\x12\xe8\x0c\x1c\t\x17\x06"\x03i\x01,\x00\xec\x00\x19\x01\x0e\xff\x1e\xfc"\xf8L\xf5\x9c\xf3\xd7\xf3\xba\xf5\x84\xf9M\xfd\x94\xff\x10\x01A\x01L\x01\xa2\x00\x9b\xffz\xfe0\xfe{\xff\xec\x00\x10\x02\x17\x01f\xfdB\xf9A\xf5\xd9\xf2\xde\xf1\xed\xf0\xb8\xef\x08\xeeq\xed:\xee\x0f\xf0\x1c\xf2\xc3\xf1z\xf1X\xf2h\xf4\x1f\xf9+\xfd\xf7\xff\x98\x02\x0f\x04\xdc\x05\x9d\x08\xf0\x07\x8d\x06\xff\x07\xcb\x0fp\x1f=->2P.\x85\'\x0e&\xb2(0+\xc4*\xd4(^)\x1b)\x89&\xad!\xe9\x19\x1b\x10S\x03\xa9\xf6\xf9\xef\x13\xf1.\xf5\xad\xf6\x15\xf3\x80\xed\x19\xeaB\xe7\x03\xe4\xbb\xe0b\xdfs\xe2\xc0\xe8u\xf0\xdd\xf7\x93\xfd\x98\x00\xfd\xfe\x0c\xfb\x17\xf8v\xfa\x8c\x00\xf7\x05\xcf\x08\xb1\t\xb9\x0b\x9a\x0c!\t\x13\x03\x91\xfc\xfb\xf7\xa5\xf5\xba\xf3I\xf3\xcf\xf4\xe7\xf6>\xf6\xe7\xf1O\xecH\xe9\x98\xe8O\xe8\xf7\xe7\x9e\xe9<\xee\xbb\xf4\xc6\xf9\x99\xfb\xe3\xfc.\xfd\xc0\xfd,\xfe~\xff\xd1\x03\xc9\x08\x84\x0c\x1b\x0e\x8c\x0e\xeb\x0fB\x0f~\x0c\xdf\x07\xe9\x04\xbe\x04\xe5\x05\x7f\x06\x19\x05\xf0\x03\x16\x02\xe5\xff\x0c\xfd[\xfb\xe3\xfb\xb3\xfc;\xfd\x7f\xfd\xa2\xfe\xd2\x00g\x025\x02\x0c\x00\x9e\xfef\xfe$\xff\xe3\xff\'\xff\x7f\xfe\xb6\xfcI\xfb\x82\xf9\xfa\xf6\x9e\xf5\x02\xf4n\xf3\x93\xf1\xb6\xef\xa1\xef\x1e\xf0\xa5\xf1\xce\xf0\xfd\xef\xc1\xf0G\xf2\xae\xf5\xc6\xf7\xe3\xf9-\xfc\x15\xfd\x97\xfe\x17\x00\x99\x02\xb5\x05\x1a\x06\x88\x04\xe2\x02\xde\x07u\x14k"\x80*\xf4(\x0b$o"\x86%\xe3**,*+\xdb*\xf8*\xd4+%)\xda"\x94\x19Q\rR\x02\xb1\xfcX\xfc\xae\xfd\xda\xfb{\xf5\xb1\xee\x00\xea\x1a\xe7-\xe5\x95\xe1 \xde\xed\xdd*\xe1\xb9\xe7\x8b\xef\xea\xf4\x01\xf6\x06\xf3a\xf0\x81\xf2\xff\xf8\x0c\x002\x05\xf5\x07\xf7\x08\x9e\n \x0c\x96\x0c\xa3\n8\x04g\xfe\xca\xfb.\xfdR\x01\xeb\x01\xd8\xfe5\xf9A\xf3-\xef^\xec\x00\xea\xc4\xe8U\xe8\xad\xe8\x86\xeb\xba\xefE\xf3\x07\xf5\xfd\xf2/\xf0\xe1\xefE\xf3f\xfa\xc1\xff\x1f\x04\xa3\x06n\t\x83\x0b\n\x0cR\x0b;\t\x8f\x07\x0e\x07\xb9\x08\xc1\x0b\xf3\r\xb2\rj\n\xfc\x04\x8f\x01\xe1\xfft\xffq\xfe\xc1\xfdQ\xfe\xdb\xff}\x01\xc1\x00\xb7\xfe;\xfcn\xfa\xee\xf9F\xfaS\xfb\x06\xfdx\xfd\xd2\xfck\xfb\xe1\xf9\xc8\xf8\x14\xf7Z\xf5[\xf3\xe2\xf2\xd4\xf3\xac\xf4\x8a\xf4i\xf3T\xf2\xcd\xf2\xde\xf2\x7f\xf3\xdf\xf4\x03\xf6\x1a\xf9\xf8\xf9\xe1\xfa\x18\xfd\x99\xfe\x9c\x01\x00\x02\x18\x02\xe2\x03\x94\x03\xbb\x02\x10\x03\x86\t[\x19\x96%\xe9\'\xc6#j!\x00&u*\xcd)\x9f(\x8e+80a2W/%)\x12!Q\x16\xeb\t3\x01\xbf\xfe\xec\xffH\xfe\xc9\xf7?\xf1\x13\xed\x8b\xe9\x01\xe3g\xdbi\xd7\xb2\xd8\xfd\xdd#\xe4\xdf\xe9B\xef\xa6\xf1\xe3\xf0\'\xeea\xeeC\xf4\x80\xfbE\x02\x1b\x06/\nK\x0f\xda\x11\x02\x11\x1b\r\xa1\x08\x0c\x06H\x04N\x04[\x05\x86\x06\xe2\x05\xc3\x00=\xfa\n\xf4|\xf0\x84\xedH\xea\xac\xe7O\xe7I\xe9-\xec(\xeec\xee\x17\xee\x1b\xed6\xedh\xee\xad\xf2$\xf9Q\xffA\x03[\x06\x14\t\x92\x0b\xf9\x0bM\n\x90\t\x88\n\xd2\x0c[\x0f\x02\x11\xe2\x10.\x0fj\x0bz\x07\x00\x05\xa0\x03\xd9\x02&\x02\xcf\x00\xbb\x00\xe5\x00k\x00\xe2\xfe\x16\xfb2\xf8I\xf7(\xf8T\xfa\x90\xfb\xed\xfb%\xfb"\xf9 \xf7\xbc\xf5\xec\xf4\r\xf5\x80\xf5\xd1\xf6\x9b\xf7m\xf7\x90\xf69\xf5\x0f\xf4\x1f\xf3N\xf3\x82\xf5\xd7\xf7s\xf8\x95\xf8\xa9\xf8\x1c\xfbM\xfd\xcf\xfd\x8d\xfd\xa4\xfc\x02\xff\xcb\x01\x05\x04i\x05\xca\x04\xf7\x05\xad\ni\x13\x12\x1f\xa9%v$\x9d\x1f\x93\x1e\xb9${+\xed,\x1b+O+M-\xcc,E&\x0b\x1d\xb1\x14;\rR\x064\x01D\x00\x0c\x00\x9b\xfb4\xf3\x9b\xeaD\xe5\x82\xe2o\xdf\x00\xdd^\xdc+\xdfU\xe4l\xe9\xd5\xec\xe4\xed\xb7\xed\x0c\xed\xc3\xee{\xf3\x0e\xfb\xbe\x02\x1d\x08\xf6\tk\n\x88\x0b\x11\x0cu\x0b\xd7\x08\xd2\x06\xad\x07#\tI\t)\x07$\x03\x98\xfe\xca\xf8R\xf3;\xefE\xed\x0b\xed:\xec\x00\xebW\xea\xa3\xea}\xeb\xd7\xeaS\xe9\xa3\xe9E\xec\xb7\xf0+\xf5p\xf9\xb3\xfd\xc8\x00\xe6\x01\xe5\x02G\x057\x08\xa8\t\x17\nm\n\xb0\x0c\xf0\x0et\x0f\xa1\x0e\xac\x0b\xbd\tK\x083\x07\xd0\x06\xf4\x05{\x05V\x04~\x02\x1d\x01\xa1\xff\xc5\xfed\xfda\xfb@\xfa*\xfa\x06\xfb\xfe\xfa\x9f\xf9C\xf8\xac\xf6I\xf6\x9b\xf69\xf6K\xf6s\xf5\xeb\xf4\x92\xf5\xd4\xf4:\xf4\xc4\xf39\xf3\xe3\xf4\xeb\xf5/\xf7n\xf8\xf1\xf7\x9b\xf8~\xf9\xd8\xfaB\xfd\xea\xfe>\x01\x11\x03\t\x02\x15\x00r\xff\xed\x02~\t\x95\x10m\x15%\x19\x07\x1ds\x1fg \x11\x1f_\x1e1!\xb8&\x83++,\xd4(&$\xe9\x1f;\x1b\xa5\x14\xd7\r\xc6\x08\xe4\x066\x06\xac\x03\xa7\xff\xfc\xf9i\xf3\xb0\xec$\xe7\xd5\xe4\t\xe55\xe6E\xe7^\xe8M\xea\xe2\xeb_\xec\xdb\xeb\xbf\xeb\xb3\xedJ\xf1\x9d\xf6\xf8\xfc\xa6\x02\xc3\x05Y\x05\xac\x03\xad\x030\x05?\x06\xd2\x05#\x05\xc5\x05(\x07)\x07\xe5\x04\xbd\x00\x82\xfcp\xf9\xa5\xf7\xb6\xf6w\xf5P\xf4\xe1\xf3\xe1\xf3\x8c\xf3G\xf2|\xf0c\xef\xfd\xee\xd2\xefQ\xf2\xca\xf5\xe5\xf9\x17\xfc\xa2\xfc0\xfcG\xfc\xc3\xfd\xcb\xff\xb3\x01\x88\x03\r\x05\xcf\x06\x0b\x08 \x08X\x077\x06\x95\x05\xa9\x05\xc0\x06\xfc\x07\xf5\x08\xb4\x08\x8c\x07;\x06\x95\x05\x10\x05G\x04\x93\x03\x9a\x03x\x04\xff\x04\x82\x04\x84\x02[\x00\xe2\xfd\xac\xfby\xfaa\xf9\xd3\xf8M\xf8p\xf7#\xf7?\xf6\x87\xf4\xc4\xf2x\xf1\xdd\xf1v\xf3\r\xf5a\xf6[\xf7\x19\xf8\x1f\xf9\xd4\xfaf\xfcu\xfdV\xfes\xff\xd9\x01\xa5\x047\x06\xf2\x06\x1e\x06\xb6\x04\xe0\x03\x8f\x04\xf3\x07\xdf\x0b\xdf\rK\r(\x0cn\r\x84\x0f\xbb\x10\x87\x10\xc7\x0f\x00\x110\x13\x1a\x15\x00\x161\x15\xd2\x13\x0f\x12\x0f\x10\xc8\x0e\xa3\rn\x0c\xb7\n\xb8\x08\xf8\x07 \x07\x11\x05*\x01\xe0\xfc\x96\xfa\xbf\xf9>\xf9\t\xf9\xc7\xf8\xa1\xf8r\xf7\xcd\xf54\xf5\xfc\xf4\x81\xf4.\xf3\x80\xf2\x84\xf3I\xf5\x80\xf6\xa8\xf6\x92\xf6w\xf6\x0b\xf6\x92\xf5\r\xf6\x9b\xf7\x1e\xf9\xb0\xf9\xb3\xf9?\xfa\x1e\xfb\x8e\xfb\x7f\xfb\xac\xfb\x1a\xfcx\xfc\x01\xfd\xcb\xfd\x8d\xfe[\xfe[\xfd\xd7\xfc\x01\xfd0\xfd\xe8\xfca\xfc\x9f\xfc\xca\xfcf\xfc\x06\xfcc\xfcp\xfd#\xfe1\xfeC\xfe\x93\xfe\x05\xff?\xff\xd1\xff\xe3\x00\xb4\x01@\x02b\x02\xf4\x02\x12\x04\xc1\x04\xf9\x04:\x05\xb0\x05\x18\x06J\x06J\x06n\x06a\x06\xde\x05(\x05\x98\x04\xf7\x03\xed\x02\x97\x01N\x00!\xff\xd9\xfdu\xfc\x1f\xfb\xf0\xf9\xce\xf8\x92\xf7M\xf6e\xf5\x9e\xf4\xe8\xf3q\xf3R\xf3\xaa\xf3\x17\xf4s\xf4\xfb\xf4\xda\xf5\xe3\xf6\xf3\xf7\xdd\xf8\xcf\xf9\xba\xfa\xa4\xfb\x9b\xfc\xb5\xfd\xc7\xfe\x9b\xff\xd1\xff\x96\xff=\xffO\xff$\x00L\x01\xa0\x02\xe3\x039\x05\xe2\x06\x96\x08[\n~\x0c\x0f\x0f\xc8\x11\x89\x14\x1b\x17\xba\x19!\x1c\x82\x1d#\x1e\xc9\x1eC\x1f\xcc\x1e)\x1d^\x1b\x05\x1au\x18\xe1\x15\xa7\x12\xf8\x0f|\rb\n\xeb\x06(\x045\x02\xcf\xff\xec\xfc:\xfar\xf8\xae\xf6E\xf4<\xf2\xf9\xf0\x1b\xf0\xcc\xeet\xed\x19\xed_\xedk\xed\x19\xed.\xed\x08\xee\xe8\xeea\xef\xea\xef\xd1\xf0\x07\xf2\x17\xf3\xc6\xf3\xa4\xf4\xba\xf5\xdf\xf6\xff\xf7\x05\xf9\xef\xf9z\xfa\xa8\xfa\xab\xfa\xf6\xfa\x98\xfb\x14\xfcz\xfc\x97\xfc\x81\xfc\x8d\xfc\xaf\xfc1\xfd\xfe\xfd\x9e\xfe\x11\xffz\xff0\x00p\x01\x89\x02\r\x03*\x03p\x03\xff\x03\x9c\x04\x18\x05{\x05\xaf\x05\x89\x05$\x05\xdd\x04\xd0\x04\xa0\x04;\x04\xe8\x03\xbd\x03\x91\x03%\x03\x9e\x02d\x02V\x02\xfb\x01m\x01\x10\x01\xd9\x00\x91\x00\xf9\xffm\xff\x19\xff\x95\xfe\xcf\xfd&\xfd\x9e\xfc\xfd\xfb\x1e\xfbL\xfa\xdb\xf9x\xf9\xdd\xf8E\xf8\x16\xf8\x13\xf8\xf8\xf7\xa9\xf7c\xf7\x95\xf7\xee\xf7\x0f\xf8\x1e\xf8D\xf8r\xf8\xac\xf8\x01\xf9R\xf9\x96\xf9\xbd\xf9\xcf\xf9\xa0\xfaL\xfc\x0b\xfe\xe5\xff\xa3\x01\x87\x03#\x06\x15\tm\x0c\x1a\x10d\x13\x1a\x16K\x18l\x1a\xc2\x1c!\x1f\x92 \xc1 . ^\x1f\x84\x1e8\x1d\x1a\x1b\x9a\x18\xdc\x15\xe4\x12\xd9\x0f\xd4\x0c\xf8\t\xc4\x06/\x03\x03\x00\xc5\xfd\x11\xfc\xfc\xf9\xcc\xf7\x08\xf6\xc3\xf4\x8c\xf3\x19\xf2\xfc\xf0H\xf0\xa3\xef"\xefH\xef\xbf\xef\xfb\xef\xd1\xef\x98\xef\x08\xf0\xa6\xf0$\xf1\xa9\xf1\x18\xf2\x89\xf2\x02\xf3\xc5\xf3\xd8\xf4\xc0\xf59\xf6u\xf6\xda\xf6[\xf7\xe2\xf7T\xf8\xac\xf8\x0c\xf9x\xf9\x02\xfa\x9d\xfa\x1c\xfb\x81\xfb\xd5\xfbK\xfc\xf4\xfc\xe8\xfd\xf9\xfe\xe4\xff\x9a\x00:\x01\xfb\x01\xcd\x02\x97\x03I\x04\xc8\x042\x05\x82\x05\xb0\x05\xdc\x05\xd8\x05\xb2\x05p\x05/\x05\x00\x05\xa1\x04\x18\x04\xa2\x03N\x03\r\x03\xc5\x02\x88\x02P\x02\x02\x02\x95\x011\x01\xdb\x00[\x00\xc1\xff\'\xff\xab\xfe \xfek\xfd\xa4\xfc\xfd\xfb^\xfb\xac\xfa\xfc\xf9l\xf9\xf8\xf8\x87\xf8U\xf8D\xf8D\xf8X\xf8^\xf8s\xf8\xba\xf8\xd9\xf8\x03\xf9U\xf9x\xf9\xb5\xf9\x11\xfal\xfa\xe9\xfaU\xfb\x94\xfb\x08\xfc\x95\xfcT\xfdz\xfe\xf5\xff\x05\x02c\x04\xc6\x06`\t\xd7\x0b\x1c\x0eq\x10\xf4\x12\xce\x15\x95\x18\x84\x1a\xdd\x1b\xe6\x1c\xa5\x1d\n\x1e\xde\x1d^\x1dt\x1c\xb6\x1a\x99\x18\x89\x16\xbc\x14\xaa\x12\xdd\x0f\xd2\x0c\xce\t\x1a\x07\x9d\x046\x02?\x00;\xfe2\xfcB\xfa\xa4\xf8u\xf7\x1f\xf6\xcf\xf4z\xf3p\xf2\xa0\xf1\xba\xf0#\xf0\xb4\xefU\xef\x1d\xef\xe4\xee\xe7\xee\xf3\xee\xfb\xee0\xef\x9b\xef\x1b\xf0\xa4\xf0_\xf1S\xf2)\xf3\x02\xf4\xe3\xf4\xbb\xf5q\xf6\xe5\xf6\x96\xf7x\xf84\xf9\xc6\xf9k\xfa5\xfb\xcc\xfb3\xfc\xc2\xfcr\xfd\x0e\xfe\xa8\xfe\x82\xff\xbc\x00\xac\x01>\x02\xc5\x02T\x03\xf5\x03U\x04\xc7\x04d\x05\xbd\x05\xd6\x05\xca\x05\xf5\x05\x10\x06\xd1\x05r\x05D\x050\x05\xd8\x04X\x04\x1a\x04\xf7\x03\xac\x03=\x03\xea\x02\xac\x025\x02\xad\x018\x01\xe9\x00y\x00\xde\xff@\xff\xaa\xfe/\xfe\x93\xfd\xf9\xfcg\xfc\xd9\xfbY\xfb\xea\xfa\x8d\xfa\x1d\xfa\xbf\xf9t\xf99\xf9"\xf9.\xf9R\xf9i\xf9O\xf9=\xf9f\xf9\x91\xf9\x88\xf9\x9c\xf9\xe7\xf96\xfaM\xfaB\xfa\x7f\xfa)\xfb\xcb\xfb\x04\xfc\xa5\xfc\xe1\xfd(\xff\x8e\x00/\x02A\x04\xc2\x06\xf6\x080\x0b\xe5\r\\\x10\\\x12!\x14\x1c\x16\x0c\x18\x99\x19\xa0\x1aS\x1b\xd0\x1b\x95\x1b\xd6\x1a\x13\x1a\x01\x19\xae\x17\xc6\x15\xb8\x13\xe6\x11\xbf\x0f|\r\x05\x0b\xa7\x08w\x06\xfd\x03\x9c\x01j\xffp\xfd\x82\xfbz\xf9\xbd\xf7A\xf6\xc9\xf4W\xf3\xf5\xf1\xde\xf0\n\xf03\xef\x88\xee\x08\xee\x8f\xedC\xed,\xed]\xed\xbb\xed\xd4\xed\xf6\xedb\xee\x0c\xef\xe1\xef\xa6\xf0o\xf1F\xf2#\xf3\r\xf4\x18\xf5&\xf6\t\xf7\xcb\xf7\x89\xf8u\xf9\xa3\xfa\xa2\xfb_\xfc\xfc\xfc\xa5\xfdj\xfe\x1c\xff\xd8\xff\x94\x008\x01\xb4\x01&\x02\xc7\x02h\x03\xd3\x03\x19\x04X\x04\xb3\x04\x00\x05(\x057\x05C\x05M\x05]\x05`\x05R\x058\x05\r\x05\xd6\x04\xd0\x04\xde\x04\xcc\x04\x93\x04I\x04\x1f\x04\xdf\x03z\x03?\x03 \x03\xd2\x02H\x02\xea\x01\xb6\x012\x01X\x00s\xff\xe4\xfe_\xfe\xb0\xfd\xf4\xfca\xfc\xc5\xfb\x0c\xfbz\xfa\x11\xfa\xbd\xf96\xf9\x92\xf81\xf8+\xf8=\xf8G\xf8\\\xf8\x88\xf8\xc7\xf8\x0b\xf9C\xf9\xb4\xf9S\xfa\xdb\xfae\xfb\xf6\xfb\xb8\xfc\x8c\xfdA\xfe\x11\xff\x0e\x00"\x01\x16\x02\xfd\x02\x1a\x04H\x05r\x06\xd8\x07\x80\t\x17\x0bs\x0c\xb3\r\x1d\x0f\x85\x10\xac\x11\xc1\x12\xe1\x13\xbd\x14:\x15\x88\x15\xe1\x15!\x16\xe0\x15#\x15M\x14o\x13Z\x12\xe6\x106\x0f\x8d\r\xd3\x0b\xf6\t\x17\x08?\x06G\x04\x1d\x02\t\x00:\xfe\x9d\xfc\xe1\xfa"\xf9\xac\xf7j\xf6$\xf5\xf5\xf3\xf1\xf2\x1d\xf2L\xf1o\xf0\xe7\xef\x9e\xeff\xef2\xef*\xefw\xef\xca\xef\t\xf0_\xf0\xe4\xf0\x91\xf1"\xf2\xc1\xf2\x8e\xf3^\xf4)\xf5\xe1\xf5\xba\xf6\x9c\xf7R\xf8\xf7\xf8\xa8\xf9p\xfa\'\xfb\xc8\xfbk\xfc\x0f\xfd\xa0\xfd&\xfe\xb5\xfea\xff\xf9\xff\x80\x00\x0e\x01\xc2\x01\x8b\x02.\x03\xb9\x03@\x04\xc0\x041\x05\xa0\x05\'\x06\x96\x06\xca\x06\xe7\x06\x1a\x07Z\x07^\x07%\x07\xe5\x06\xa4\x06T\x06\xf2\x05y\x05\xff\x04h\x04\xcc\x03:\x03\xab\x02\n\x02>\x01]\x00\x96\xff\xf0\xfeU\xfe\xba\xfd\x1d\xfd\x82\xfc\xf7\xfb\x8c\xfb\x1f\xfb\x95\xfa$\xfa\xbb\xf9S\xf9\x10\xf9\r\xf99\xf9?\xf9\x14\xf9\x04\xf9H\xf9\xb8\xf9\r\xfaW\xfa\xc7\xfa[\xfb\xc6\xfb/\xfc\xea\xfc\xd9\xfd\xa8\xfe\x16\xff\xae\xff\xa3\x00\x96\x01g\x02,\x03\xfc\x03\xe0\x04\xa0\x05J\x06,\x07\xf0\x07k\x08\xd9\x08L\t\xc8\tM\n\xb7\n\x15\x0b[\x0b\x88\x0b\xdb\x0b@\x0ch\x0cX\x0cC\x0c=\x0c\x1b\x0c\xc9\x0b\\\x0b\xf2\n\x87\n\xeb\t5\t|\x08\xcd\x07\xf6\x06\xf6\x05\xe8\x04\xe8\x03\xf4\x02\xd8\x01\xaf\x00\x9b\xff\x99\xfe\x9b\xfd\x84\xfc\x85\xfb\x91\xfam\xf9M\xf8U\xf7}\xf6\xac\xf5\xb6\xf4\xf7\xf3o\xf3\xeb\xf2z\xf2#\xf2\xea\xf1\xc7\xf1\xa8\xf1\xb9\xf1\xec\xf18\xf2\xa5\xf21\xf3\xef\xf3\xaa\xf4d\xf5<\xf6K\xf7e\xf8j\xf9r\xfaz\xfb~\xfcn\xfdS\xfeL\xff=\x00\x16\x01\xe2\x01\xb7\x02\x87\x031\x04\xb2\x043\x05\xb1\x05\x19\x06h\x06\x9a\x06\xbc\x06\xde\x06\xe0\x06\xce\x06\xbd\x06\xb7\x06\x9d\x06T\x06\x0e\x06\xc8\x05m\x05\xf2\x04x\x04\x00\x04u\x03\xd9\x02P\x02\xe4\x01b\x01\xb1\x00\xef\xffE\xff\xaf\xfe\x15\xfeO\xfd\x90\xfc\xee\xfbg\xfb\xe2\xfa\x7f\xfaM\xfa(\xfa\xd4\xf9x\xf9l\xf9\x91\xf9\xaa\xf9\xbb\xf9\r\xfa{\xfa\xc3\xfa\xf0\xfaa\xfb\x15\xfc\x9b\xfc\x11\xfd\x98\xfd3\xfe\xc8\xfec\xff\x19\x00\xce\x00^\x01\xca\x01R\x02\x0b\x03\x9a\x03\xef\x03\\\x04\xfc\x04|\x05\xcd\x05\x13\x06t\x06\xc1\x06\xdf\x06\t\x07R\x07\xa8\x07\xec\x07\x14\x08;\x08W\x08:\x08\xfa\x07\xc4\x07\x9e\x07\x85\x07g\x07\x18\x07\xc7\x06\x87\x06\x1f\x06\xad\x05A\x05\xf1\x04\x90\x04\x13\x04\x8b\x03\xf9\x02y\x02\xfa\x01\x8b\x01A\x01\xf9\x00\x93\x00\x1c\x00\x95\xff\x0c\xff\x81\xfe\xf4\xfdq\xfd\x01\xfd\x9a\xfc\x10\xfc{\xfb\xe2\xfaJ\xfa\xc1\xf9^\xf90\xf9\x14\xf9\xf5\xf8\xbd\xf8\x7f\xf8]\xf8C\xf8Y\xf8\x8c\xf8\xc8\xf8\x08\xf9K\xf9\xb8\xf9/\xfa\xa7\xfa\x1d\xfb\xa1\xfb>\xfc\xdf\xfcs\xfd\n\xfe\xa4\xfe\'\xff\xaf\xff:\x00\xc0\x00P\x01\xc9\x014\x02\x98\x02\xf6\x02b\x03\xb1\x03\xe6\x03\xf6\x03\xe4\x03\xe6\x03\xf1\x03\xf7\x03\xfa\x03\xe4\x03\xb7\x03U\x03\xea\x02\x89\x02\x1c\x02\xc1\x01`\x01\xf4\x00\x8e\x00\x1a\x00\xa6\xff>\xff\xc9\xfed\xfe\xfb\xfd\xa1\xfd/\xfd\xb7\xfci\xfc\x1b\xfc\xdc\xfb\xbb\xfbd\xfb-\xfb\x00\xfb\xc3\xfa\xcd\xfa\xa4\xfa\x98\xfa\xbc\xfa\xd9\xfa\xfe\xfaD\xfb\x8e\xfb\xdf\xfbN\xfc\xa1\xfc \xfd\x94\xfd3\xfe\xaa\xfe"\xff\xb5\xffn\x00\x13\x01\xdd\x01U\x02\xf8\x02\x95\x03\xec\x03`\x04\x87\x04\xd3\x04\xed\x04\xef\x04\xfd\x04"\x05$\x05~\x05\x7f\x05\xca\x05\xb4\x05`\x05q\x05\x11\x053\x05\x86\x04\xed\x04|\x04\xe1\x03u\x04\xb3\x03o\x030\x03\x19\x03\xb7\x02Y\x02c\x02\x8f\x02\x9d\x02\xf4\x01\xe9\x01\xd1\x01\'\x01\xcd\x00\r\x00\xe7\xffj\xff\xf3\xfe\xc5\xfe"\xfey\xfbH\xfa\x8d\x01\x9c\x0fV\x15\x8a\x03\x89\xee\x1c\xe8\xd7\xee\\\xf7\x91\x03|\x05\x86\xfd\n\xf3\xe9\xe7\xc7\xeee\xf3=\xf8\xde\xf8\x1e\xf6\x00\xf6\x80\xf6\xe9\xf8\xbd\xfe\n\x04\x1f\x02W\xfb\x17\xf8B\xfcI\x028\r+\r\xcf\x04u\xff\x7f\x00\x93\x06\xa5\x0c\xaf\n\xa1\x02\xbb\x005\x05\n\x0b5\x0b\xe4\x06"\x02\x90\xff\xbf\x04\xb3\x06\x0c\xfff\x04\xde\x02\xa8\xff\x01\x01 \x03\x17\x07\xd3\xff\xed\xfd\xc0\xfe\x97\xfc\xd3\xfe\xbf\x03a\x04\xed\x01\x92\xfc\x02\xfbt\xfe\xfd\x00\xd3\xfe\x80\xfdO\xffj\xfeE\xfc^\xfe\xab\xfcH\xfe\xf4\xfd\x13\xf8\x97\xf7\xa8\xf9\xea\x034\xfcN\xfa\x88\xfbQ\xf6c\xf8S\xfc+\x02\x1d\xf9\xac\xfc3\xfeZ\xfau\xfb\xe8\xfeP\x02\xe3\x00\xd3\xff\xa1\xfc(\xfdf\xfd\xe7\x01t\x07\xe8\x04?\x02\xda\x01\xc2\x00\x8e\x01\xac\x05-\x05\x00\x03\x1c\x04\xae\x07\xb2\x04 \x03\x87\x04f\x04\xce\x06\xa1\x06F\x03\xc3\x01\xb5\x02\x93\x03\xba\x05\xbc\x04\xeb\x04\xb9\x01\xd1\xfe`\x00\xa4\x04\x00\x03\xc0\xffx\xff\x9a\x01\x00\x00w\x00\xa4\x02u\x00\xcd\xfd"\xfd\xda\xfdZ\xfeA\x02\\\x001\xfes\xfb\xfa\xfa\xe1\xff0\x01\x9b\xfd\xdb\xfc_\xfdB\xfc<\xfc\x9e\xfd\xad\xfeG\xfd\xf5\xfb\xb4\xfa\xcb\xfb\x0e\xfdd\xfc\x0c\xfd\xdd\xfd\x8f\xfco\xfb-\xfb\xae\xfc\xc3\xfe\xe5\xfeg\xff\xdc\xfe\x0e\xfe\x80\xfc\x11\x00\xf1\x03\xce\x01\x0f\x03=\x02\xc1\xfe\xbc\xff\xb6\x04\x00\x05\xbd\x025\x03a\x01\xfe\x00\xd1\x05+\x05\xca\x01s\x01\x06\x05\xb6\x03$\x02\xad\x019\x02\x9e\x02\xf0\x01\x84\x01C\xff\xab\xfe\xa2\xfe,\x02z\xff\x0c\xfd\x99\xfb\x85\xfc\xc5\xfe\xb4\xfd\x81\xfc\xd9\xfcn\xfe_\xfd\x88\xfb\x8f\xfc2\xfe\xfc\xfd\xd2\xfb\xbd\xfa\x9c\xfe\xc0\xfb\xa0\xfcC\xfe\xa0\xfc\xfc\xfb?\xfc\xc8\xfe?\xfdw\xfc|\xfd\x8d\xfd!\xff\xe9\xff8\x00V\xfe\xce\xfdO\xff\x0e\xff\x15\x00\xb3\xff\'\x02\xd6\x01\xc4\x00\xc5\x01_\x03F\x01\x94\x00h\x02\xbc\x02\xe1\x04W\x04\x03\x04\x87\x02R\x04v\x04\x0e\x04U\x07n\x05q\x02\x18\x03\xe4\x03\x14\x03\xeb\x03\x8b\x06^\x04\xa4\x02\t\x02F\x02\x92\x00|\x02\'\x03\xc7\xff\x16\x017\x01m\x01\xeb\xff\xc2\xffr\xfd\xf6\xfd\x0c\x00h\xfe$\xff\x92\xffw\xfc\xe0\xfa\x16\xfe+\xfc2\xfbC\xfd\x7f\xfbw\xfb\xa0\xfc?\xfc\xd0\xfb/\xfbY\xfb\xdc\xf9\x05\xfc>\xfdk\xfd\\\xfd\x8a\xfc\x18\xfe\xa8\xfd"\xfe\x06\xff\x19\x00!\xfd\xfa\xfe\xce\x00\xae\x00\x82\x01S\xff\xfa\xff,\x01;\x02X\x03\xf2\x03\xf0\x01\xc2\x01\xf9\x01E\x02\xdf\x03e\x05\xb2\x04\x81\x02\xc6\x03\xdf\x02\xab\x03^\x03\xc2\x03\x19\x02\xa6\x02"\x04\x1c\x02\x84\x02\x95\x01n\x02\x96\x00X\x00:\x00\xf3\x00\xb7\xff\xbe\xfe\xac\xff\xa1\xfe\x90\xfe3\xfe:\xff|\xfe\xe3\xfe4\xfe\x15\xfe\xa5\xfe%\xfe\x82\xfc\x07\xfd\x00\xfe\xbb\xfe\xbf\xfd0\xfc\x1d\xfe\xf9\xfd\xb4\xfd\x8c\xfc\x91\xfd\xe6\xfb\x02\xfc\x86\xfd\n\xfd\xea\xfeb\xfd\xa8\xfd\xd0\xfc\x8f\xfe\x04\x00-\x00r\xfdG\xfd\x15\x01v\xfe{\x00\xff\x00s\x01\xb2\x00W\x01\x7f\x025\x02\x85\x019\x00b\x04\xd1\x03T\x03\xe7\x02\x9c\x02 \x04\xc4\x03\r\x04\xcf\x04j\x04$\x02e\x02\xed\x04\xa9\x03Y\x03\xfc\x04\xe7\x03{\x02\xeb\x01\x9d\x02\xc9\x00c\x02R\x02\x9e\x02\xbd\xffr\x01\xc4\x01\x9f\x00\xda\xfe\xeb\xfcu\xff9\xfc\xca\xfe5\xfeG\xff\xdf\xfd\xe8\xfb\xb0\xfb}\xfbI\xfb\x8b\xfd\xd5\xfa\xad\xfa\x11\xfe\x94\xfb\x0c\xfd\x99\xfdG\xfd\x98\xfa\xec\xfa\x00\xfd\x86\xfe\xc3\xfb\x1e\xff0\xfeX\xfeZ\xfe\x95\xfd\xf4\xfe\x9f\xfe{\x01\xfc\xfe\x0b\x01-\x00\xe3\x00\xf5\x00e\x01b\x02"\x02\x9a\x01\xd7\x01\x82\x01I\x04l\x04.\x01\x91\x03e\x03\xb0\x02\xc9\x03\x9a\x03I\x02\xda\x02;\x01\xc6\x03\xb7\x02\x08\x01\xf1\x02\x8f\x01\xfb\xff\xc4\x00\xe2\x01\xb6\xff"\x00(\x00\x01\x01d\x00c\x00\xba\xffW\xfe\xcc\xfe\xb3\xfe\xaf\x00O\x00\xa8\xfe]\xfe\xb4\xff\xed\xfe$\xfe[\xfe\x93\xfe\xe5\xfe\xdf\xfeF\xff\xe0\xfd\x96\xfdN\xfe\xf3\xfdQ\xfc\xd0\xfdN\xfeQ\xfe\xd8\xfe\xbf\xfb\x17\xfb\x03\xfd\xef\xfc\x81\xfc\xae\xfdM\xfch\xfdd\xfee\xfd\x9f\xfb<\xfd^\xfe\xf9\xfe\xd3\xff%\xfe[\xff\xce\xfe\xb2\x00J\x01\xa6\x00s\x00\xc4\x01\xae\x01\xad\x00H\x03\x8a\x03\x9c\x02w\x02\xbf\x02\x81\x02\xfd\x05:\x033\x01\xcf\x03\x8e\x03\xee\x03\xba\x02\x1e\x03F\x02<\x02p\x03\xc4\x02m\x01\x15\x01\xb7\x01<\x01~\x00_\x00}\x00(\xff\xc2\x00d\xff\xeb\xfd4\x00\\\xfe:\xfd,\xfe\x0e\xfe\x0b\xfe\xc9\xfdX\xfe\xf5\xfc%\xfd\xa9\xfd\xb5\xfc\x1b\xfe=\xfe*\xfd\x85\xfdG\xfet\xfd\x8e\xfe4\xfe\x0b\xfe;\xfd\xb2\xff\xfb\xff[\xff\xe6\x00\xd1\xfe\x05\xff\xa7\xff\xd9\x00\'\x01\xe4\x00P\x00\xf1\x01p\x01t\x01\xbc\x01\x10\x01\xd9\x011\x01;\x02\x9c\x02\x1e\x022\x02s\x01\x86\x01\xab\x013\x01N\x02\xf6\x01\xc1\x00\x94\x01\xe2\x01\x84\x01\xf9\x01#\x01\xf1\x00H\x01\x06\x01\x86\x00\xac\x01F\x02\x01\x01I\x00\x18\x01\xb0\xff5\x00\xd1\x01\xe8\xffH\x01F\xff\xb5\xff\xd7\xffq\xff\xf2\xfe0\xffR\xffD\xfe\x08\xffS\xfd\x0e\xfe\xdd\xfd\xa0\xfdY\xfeD\xfd\xdf\xfc\xae\xfd\xa2\xfd\xaf\xfd>\xfd\xea\xfc\x95\xfc\x95\xfd\xc6\xfd\x8b\xfe\x92\xfe\xc2\xfdb\xfeD\xfd\xb5\xfe\xe0\xfd\x1b\x00\xe6\xfe\x89\xff\xa3\xff~\xff\xf9\x01\x16\x00\'\x01\x0e\x01\x84\x01$\x00J\x02\xed\x01>\x03\xcf\x03\xc8\x02\xfa\x020\x028\x03\xfb\x02\xb2\x02\xc4\x02P\x04j\x03\xa3\x03w\x02\xb6\x03\x85\x01N\x02\xa2\x02\x0c\x03_\x017\x00\xf3\x00t\x00L\x02\xaa\xff\xce\x00\x94\xfe\xc0\xfe]\xfe&\xff\xd8\xfe\x11\xfe\xb2\xfd\'\xfd\xea\xfdr\xfc\xea\xfc\xe3\xfc\x95\xfc\xc8\xfd\x14\xfcT\xfc\x87\xfc\x92\xfc\x9d\xfc\xc7\xfdS\xfe\xdd\xfc<\xffR\xfd\r\xfeJ\xfe2\xffo\xfeG\xff\xc0\xffN\xff\xf2\x007\xff~\x01\x03\x00F\x00\xa5\x00\x7f\x01\x85\x01\xfa\x00\x82\x02\xe2\x00A\x02~\x02\xa2\x01\x8b\x00$\x01J\x02\xe0\x017\x03;\x01\x8a\x00\xc0\x00\x11\x01V\x01c\x02\xff\x01\x05\x00\xd1\x00\xf8\x00Z\x00\xa1\x01_\x01\x06\x00\xb8\x00\x16\x00c\x00\x81\x00{\x00\xbe\xffP\x01V\xff\x95\xff\xf7\xfe\x97\x00\xd8\xfe\xc2\xfe\\\x01<\xfe\xa3\xfeS\xfd\'\xff\xdb\xfe\x0f\xffO\xfdv\xfd\xc8\xfd\xb7\xfc\xd0\xfd\xad\xfd\x84\xfd\xef\xfct\xfd\xe8\xfcX\xfe\x05\xfd\x03\xfd\xbd\xfe+\xfd\xf3\xfe\xd7\xfd\x9f\xffc\xfe\x0c\xff\x15\x011\xfe\xbd\x00\x95\xff~\x01^\x00F\x01\xc2\x00\xf4\x01l\x02[\x00\t\x03\xb2\x01\x06\x03\t\x02\xb8\x02\xdc\x01{\x02\x95\x02|\x02Y\x02<\x02i\x02\t\x02B\x02.\x01e\x02v\x01B\x01\x9a\x01=\x00\xbd\x01\x9d\x00\xcc\x00\x0f\x00\xcd\xfev\x01\xf8\xfe\x8b\xff\xa9\xff"\xff\xb5\xfe\xad\xff\x1d\xfe\xf2\xfdT\xff\n\xfe8\xff>\xfe\xed\xfd\x8f\xfd\x12\xfe[\xfe\xd1\xff\xd7\xfe\xd1\xff\xfc\xfd\x00\xfe2\xfen\xfe\xa5\xffR\xffc\x00Q\xfe\xf5\x00\xad\xfd\x95\x00\x9e\x00\xfa\xff\x17\x00\xaf\xfe(\x01\xf8\x00\x87\x02\x1d\x00w\x02\xcb\x00\x1d\x02\xe6\x02I\x01\x05\x03\x99\x01\xb3\x02\xcf\x01\xe3\x02"\x02?\x02\xe6\x01=\x01\xfb\x01\x7f\x00O\x01u\x00:\x00n\x00\x08\x01T\xff\x98\xff\x16\xff\xbc\xffm\xff\x89\xffV\xff\xd9\xfd\xbb\xff9\xff0\xfe\xc2\xffO\xfe\xa9\xfe\xc5\x00_\xfd\xa7\xff\x02\xfe\xdb\xff<\xfe\x93\xff\x1c\xff\x1b\xff\xdd\xfer\xfe\x8d\xff/\xfe\x1e\x00U\xfd[\xfff\xfd\xa1\xff\x15\xff\x8c\xfe\xb0\xfe\x13\xfew\xfe\x87\xfeW\xff\x07\xff\x15\x00\xc3\xfe\xa5\xff\xfd\xfe\x1f\x00\xae\xff\xa3\xff\xc2\x00Q\x00g\xff\xed\x00\xcf\xff\xac\x01b\x01\xa1\xff<\x01\xd2\x00\xd2\x01S\x00\xee\x01\x91\x00\x0b\x02\xa8\x02\x96\x01\xcf\x01\x88\xff\xe1\x00y\x01G\x01g\x03\xa5\x01c\x00\x19\x01\xeb\x01\xc6\x00E\x02\xcc\x00\x08\x01a\x01\x89\x00\x1c\x01\xad\xffd\x02\x8b\xff\x81\xff\xc3\xff\x86\xfe\xd9\xffy\xff!\xff\x13\xff\x12\xfer\xfe\x19\xff@\xfe\xa6\xfe&\xfd\xe0\xfd9\xffp\xfe=\xfe\xce\xfd\xde\xff\x86\xfe;\xfe\x85\xfft\xfd?\xffq\xff\xa6\x00\x18\xff\xce\xff\xa6\xff~\xfe9\x01u\x00\x83\xff\xbc\x00\xce\x00\xda\xff\x01\x00\x9e\x00\xa1\x00I\x00\x96\x015\x00\x96\x018\xff\x15\x02\x03\x00\xc8\x00\xcf\x01E\xff\xfe\x01\x95\x00\xb7\x01\xfe\xffO\x01\'\x01\xe4\x00\xec\xfe\x15\x01\xed\xff\x9c\xff\xc8\x00\xc8\xff*\x01\xde\xfe*\x00\xc6\xfe\xd0\xffl\xffP\xfe/\x01\xbb\xfe\xaf\xff\xa3\xfe\xf6\xfe2\xff?\xff_\x00\xc4\xfd\xd2\xff\xeb\xfe\xea\xff\x7f\xfe\'\xff\n\xff\xb0\xfe\xc0\xffm\xfe\xf7\xffr\xff\x89\xff\xd9\xfe\xdc\xfe\xab\xfe\x1c\xff\xe8\xff\xf7\xfe7\xfe\xdc\xff8\xff;\x00\xbe\xff\x94\xfe\x94\xff\xc5\xff{\xff\xc6\xff*\x00\xc4\xff\xb8\xff\x8c\x01\xea\xff#\xff\xe6\x00K\x01\xe3\x00Z\x00\xd0\xff\xf8\x01,\x00\xb9\x011\x02b\x00q\x02\x1b\xff\x08\x03$\x00\xb4\x01&\x01\x8c\x00\xee\x01a\x00\x9c\x03_\x01\xf5\xff]\xff\xdc\xff\xc0\xffN\x01\xd5\x00\x93\x00\x10\x00t\xff\x95\xff\xae\x00S\xff\x1c\xff\xa6\xfe\xa9\xfeR\x00g\x00u\xff\x08\xffQ\x00\xaa\xfeD\xff`\xfea\x01\x88\xff\xd6\xffU\xff\xf2\xfek\x00/\xff\xcd\xff\xc4\xffq\x00\x9b\xff\x93\x00\xac\xff\x00\xffR\xff`\x00\xf4\x00N\x00\xef\xff\xbd\x00\r\x00w\xff\x91\x00\xc1\xff\xe5\xff#\x01\x15\x00\xc6\x00\x08\x00\t\x00\xd0\x00F\xffd\x00\x83\xff\xde\x01\x8f\xff\xd4\x00\xa2\x00\x7f\x00\xea\x00\x08\x01\xe2\x01\xe0\xff\xc8\x01\xce\xfe\xa7\x01\xdc\x00/\x01\x1c\x01-\x00Z\x01\xa2\xff4\x00\xcb\xff\xb0\xff\xfe\xfe\xa2\x01F\xff\x81\xff\x9a\xff\xf2\xfe~\xff5\xff\x1b\x00\xf5\xfe\x13\xff\x96\xfe\x14\xfe\xa0\xff\xe2\xffx\xfe\x17\xfe\x94\xfd\xf6\xff\xf6\xff\xba\x00:\xff6\xff\xaa\xff.\xfey\x00\xe8\xfd\x04\xff9\x00\x86\xff.\x02\xba\xfe6\x00(\xff`\x00t\xfe\xe7\xff2\x01*\xff\xe3\x01\x0e\x00\x19\x01\xc9\xfe\x9d\x00\xeb\xfe\x00\xff\xce\x00W\x01\x9d\x01\xdc\xfea\x00\xdd\xff\xd3\xfe\xa6\xff\xe1\x00\xe7\x00\xbe\xff|\x01\x11\x00\\\x00@\x01\xc1\xfe\x94\xff\xb6\xff\x07\x01\x81\xff+\x01\x05\x00\xa1\x00v\xff\x90\xff&\xff\xf7\xff}\x01i\xfe\xe9\xffn\x00`\x00\xde\xfe\xdb\x00Y\xfeh\x00\xac\xff\xd0\xff\xda\xffR\xfe]\xff\xb0\x00\xc9\x00\x90\xff.\xff\xcf\xfe\xbf\x00\xd3\xff\xcc\xff>\xfe3\x00\xde\xffJ\x00\x8e\x00\x8f\x006\xff\xd2\xff\x98\xfe\xea\xfe\x95\x00i\xff7\x01\xc9\xfe:\x02\xbc\xff\x19\xff\xba\xfe$\xff\xb3\x00\n\x00\xd8\x01S\x00K\x00\xbe\x00\x12\xff\xa8\x00\xc8\xff\x91\xfe\xd1\x01L\xff\x9d\x00\xc2\x00\xd2\x01\xb1\x00\n\x00\xf7\xff\xdc\x00\x10\x01\xe6\xfd\xd3\x00W\x00\xa7\x00{\x02\x08\x01}\xfe|\xff\xef\xffI\xff\xb1\xff_\x00\xab\xfe\xdd\xff\x8c\x00\xde\xfe\x8f\xffh\xffI\xff|\xff\xea\xfe\\\x00C\xffG\x00~\x00\xb0\xff\x04\xffJ\xffR\x00\xee\xff\xca\xff\xc0\xfd\xe9\x00\xe3\xffN\xff\xb6\xfew\xff\xf7\xffd\x01\xab\x00t\x00<\x00\xbb\xfe\xaa\xff\x7f\xfeK\xff\x14\x01L\x01m\x01\x10\x01a\xff~\xff.\xffT\x00\x96\x00\xcf\x00G\x00m\x00\x88\xff2\x00R\x00e\x00\x83\x01\xe7\xff\xef\x00}\x00"\x02\xce\xff]\xfe\x10\x01\r\x00\x8a\xff\x0e\x00w\x00\x8b\x00\x07\x02\xd3\x00\xd0\xfe\x8b\xfe\x8a\xff\x80\x00q\x00\xf9\x00\xc9\xff\xcb\xfe\xcf\xfe.\x00\xbe\x01T\xff\xbb\xfc\x80\x00\xca\x00\xa5\xff\'\x00\xc5\xff\x8a\xfd_\xfc\xd7\xff"\x022\x02Q\x00\x95\xff\xf7\xff\x8b\x00\x80\xfe\xbd\xfe\xaa\xff\xbb\xff\xd9\xfe\xbb\xff\xf1\x01<\xff\x03\x01%\x01\xd3\xff\xe5\xff\x85\xff\x88\x01`\xff\x15\xff\x06\x00\x13\x00<\xffV\x01\xe0\x02\xa4\x01\xca\xff\xd4\xff\x03\x000\xfe%\xff\x12\xff\xea\x01\xca\xff\x94\x00\xb2\x00\xd6\xff\xab\x01\x0e\xff\x16\x00x\xff\xa2\x00X\x029\x00\x81\x00*\x01W\x00\x81\xff\xcc\x008\xfe{\xfd\xae\xff\xcd\x02<\x04n\x00\x12\xff*\xfe\xff\xfd\xe5\xffz\x02\x9a\xff\xa4\xfdW\xff\x9c\x01\xc6\x01J\x00\x0b\x00J\xff2\xfd\x00\xfe\x18\x00`\xffk\xff\xaf\xff\x8a\x00\xf5\xff#\x01\xfe\xff\xac\xfe\x99\xff\xe3\xfe\r\x00H\xffn\x01\xeb\x01G\x00\xe2\xff\xc9\xfe\xf2\xfe\x00\xff\x91\x00\xb5\x01\xda\x01\x9f\x01\xd5\x01\xb3\xffN\xfe\xec\xfe\xc1\xffA\x01i\x00\xf7\x00\x00\x01h\x00\x88\x00\xd4\xfei\xfe\x8b\xfe\x86\xff\xf6\x00\xc4\x00\xa9\xff\xf4\x00\xd0\x00a\xfe\xcb\xfe\x1d\x00v\xfe\xe5\xfd\x06\x00C\x00D\x01\xfc\xff\x8c\xfe\x14\xfe\xe3\xfd\\\xfeX\xfdB\xfe(\xff\xb3\xfeh\xff\x97\xff\x83\xfe?\xfd\xed\xfc\x8d\xffO\xfe\x06\xfe\x9c\xfd\x82\xfd\xfd\xff\xec\xffB\xff\xb3\xfd\xb3\xfdy\xfem\xfe\xc1\xff\xee\xfe8\xff\x14\xff\x80\xff1\xff7\xfe\xe5\xfd$\xfc\x1b\xff\xa5\x004\xff\x08\xfe\xce\xfd\x81\xfec\xfe\xce\xfeC\xfe\xc2\xffo\xff\xe7\xff\xb5\x00H\x00G\x00\xb5\xff\xc0\xfe\xe6\xfe\x89\xff7\x00\xb0\xff\xb0\xff\xc5\xff\x83\xfe\x9a\xfeQ\xff\xbb\xfeX\xff\x1a\x00\xf7\x00o\x02\x1c\x04s\x05\x86\x07\xbd\t\xac\x0bH\x0e\xa9\x10\xef\x12U\x14O\x15\xd5\x15\xaf\x15\x1f\x15\x0b\x14h\x12\x14\x10\x10\r8\nG\x07z\x03\xe9\xff\x01\xfdt\xfa\xa5\xf7\x9b\xf5\xdb\xf3\xf8\xf1B\xf1\xc6\xf0$\xf0\x05\xf0,\xf0g\xf0\xd0\xf0\xbf\xf1\\\xf2\x91\xf2\xa7\xf3\t\xf5\x01\xf6\xe2\xf7\x1e\xfa\x92\xfb%\xfd!\xff\xec\x003\x02_\x03\xbe\x03\xc9\x031\x04\x13\x04\xaa\x03\x1c\x03\x06\x02\xd3\x00\xa7\xff\xc0\xfe\xeb\xfd\xa0\xfc\xfe\xfb\xa3\xfb]\xfb7\xfb{\xfb&\xfc9\xfc}\xfcD\xfd\r\xfe\x17\xff\xc6\xff+\x00O\x01t\x02&\x037\x03`\x03\xb9\x034\x04\x01\x04\xb3\x03\x92\x03T\x03\xb7\x02\xc7\x01k\x00\xf0\xfff\xff\xff\xfe\x15\xff\x91\xfe.\xfe6\xfd\xdc\xfb4\xfb\x0c\xfd\xd4\xfeI\xff\x9c\xfd\xcc\xfc\xc8\xfd%\xff\x81\xff8\xfeb\xfd\x9c\xfe\x87\xfff\xfe\x03\xfeO\xfe4\xfe\xb9\xfc\xf8\xfb\xcc\xfbv\xfbT\xfb\xd4\xf9\xe8\xf8Z\xf8\xdc\xf8@\xf8\xe2\xf7\x95\xf7\xe9\xf7\xf1\xf7#\xf8\xd4\xf8\x0f\xf9\x10\xfa\xde\xfa\n\xfcf\xfd$\xff\x16\x01L\x04\xb9\x07\r\x0bR\x0f\xa3\x13\xa1\x18@\x1e"#$\'0*\xb7,I.\x9f.\x82-\x0f+d\'\xb1!R\x1b\xb1\x14\xae\r\xb6\x06^\xff\x05\xf8\xf9\xf1\x02\xed\xdd\xe8r\xe5\x99\xe2\x9a\xe0\xb8\xdf\x9c\xdf\xd8\xdf\x96\xe0\xff\xe1t\xe3\xc3\xe4\x87\xe6\xfb\xe8\x05\xec\xe1\xeej\xf1_\xf47\xf8h\xfcg\x00:\x04\xc2\x07\x00\x0b\xcb\r\xc4\x0f\xdf\x10"\x11{\x10\xd5\x0e;\x0c\xe9\x08|\x05\xa4\x01`\xfd\xf9\xf8\x18\xf5\xef\xf1S\xefW\xed\xec\xebw\xeb\xc9\xeb\xb5\xec\xe3\xedy\xef\xd1\xf17\xf4\'\xf6/\xf8\x9e\xfa6\xfd\t\x00N\x02\x11\x04|\x06)\tD\x0b\xa4\r\xa3\x0f \x11c\x12Y\x13\xd3\x13\xac\x13\x1b\x13\xbe\x11\x91\x0f\x14\r\x8e\n\xcb\x07\xc1\x04\xfd\x01F\xff\x83\xfc\xac\xfa=\xf9\xb8\xf7\xc8\xf6_\xf6!\xf6\xd1\xf5\xde\xf5\xf1\xf5\xe0\xf5)\xf6Z\xf6I\xf64\xf6V\xf6M\xf6\x11\xf6&\xf6Z\xf6\x8f\xf6\xb4\xf6\x89\xf6\xa6\xf6\xe4\xf6N\xf7\xef\xf74\xf8\xa5\xf84\xf9\xe4\xf9]\xfa\xe9\xfa\xf4\xfb\x97\xfc\xc3\xfc\t\xfd^\xfd\xd7\xfd\xfb\xfd"\xfe\x04\xfe\xab\xfd\xfa\xfd\x0f\xfe\x9a\xfe$\x002\x02\xd6\x04{\x085\r\x83\x12\x7f\x18u\x1f2&\x19,\xad1V6)::\x11\xc6\x14[\x17\xbc\x18\xf2\x18\n\x18\xbf\x15\x00\x12)\r\xc7\x07\xd9\x01Q\xfbW\xf4\xcc\xedJ\xe8\xc9\xe3M\xe0\xde\xdd\xbb\xdc\x11\xdd\xdc\xde\xe1\xe1\xca\xe5k\xea\x93\xef\n\xf5X\xfa\x97\xff\xb4\x04p\t\x8a\r\r\x11\xe1\x13]\x16\xa2\x18b\x1a\\\x1b\xd7\x1b\x16\x1c\xd3\x1b\xfa\x1a\xb7\x19\xe0\x17n\x15Z\x12\x93\x0ea\n!\x06\xb1\x01V\xfd#\xf9"\xf5\xc3\xf1E\xef\xa6\xed\xfe\xec\xdc\xecR\xedm\xee\x16\xf0\x1c\xf2V\xf4\xb5\xf6\xcf\xf8|\xfa\xe3\xfb\xf5\xfc\xc5\xfdl\xfe\x86\xfe\'\xfe\xa8\xfd\x07\xfdi\xfc\xb0\xfb\xc7\xfa\xe8\xf9\xf3\xf8\xf6\xf7-\xf7\x8b\xf6\xe2\xf5N\xf5\xcc\xf4E\xf4M\xf4\xaa\xf4E\xf5\xf6\xf5\xe0\xf6\xec\xf7\x0e\xf9\x88\xfa\x05\xfc`\xfd\xd6\xfe\xd3\xff\xbb\x00\xa3\x01\x8a\x02 \x04_\x05Z\x067\x08~\x0b\xf3\x10i\x17\x15\x1d\x87"a(\x02/\xe45\xea:\x96=\xaa>N>\xe9; 7O0\x12(\xa4\x1e\xb0\x13\xbf\x07\x96\xfc\xe4\xf2E\xea\xe9\xe1 \xda\xac\xd4\x15\xd2[\xd1m\xd1\x15\xd2\xfe\xd3\x03\xd7c\xda\xef\xdd\xa4\xe1\x97\xe5^\xe9u\xec`\xef\x12\xf3\xc4\xf7\xb7\xfc\xd3\x00>\x04X\x08C\r\x01\x12\x84\x15\xae\x17\xbe\x18\xb2\x18\xff\x16\x9b\x13\xd8\x0e/\ts\x02\xb2\xfaV\xf2\xb1\xeam\xe4\x1b\xdf\xa3\xdaI\xd7\xd4\xd5\xa9\xd6\x8a\xd9\xb5\xdd\xc1\xe2\xab\xe8J\xef`\xf6V\xfd\xe1\x03\xfd\tH\x0f~\x13\xaa\x16\xf8\x18\xfd\x1aX\x1c\xd8\x1cy\x1c\xad\x1b\xd4\x1a\xf2\x19\x8f\x18\xb8\x16\x89\x14\xf7\x11\x08\x0f\xb5\x0bT\x08\xc0\x04\xcd\x00\xad\xfc\xb8\xf84\xf5l\xf2y\xf0,\xef\x9e\xee\xcd\xee\xde\xef\xbe\xf1.\xf4\xc5\xf6\x93\xf9Z\xfc\xf0\xfe(\x01\xcb\x02\xce\x038\x04\xf5\x03\x12\x03\xb2\x01\x12\x00;\xfe\x10\xfc\xc0\xf9\xb5\xf7\x02\xf6q\xf4\xe6\xf2\x94\xf1\xb9\xf01\xf0\xea\xef\xee\xef3\xf0\xd2\xf0\x98\xf1U\xf2n\xf3 \xf59\xf7\'\xf9\xdf\xfa\xea\xfcZ\xff\xa0\x01\xf2\x02\xb6\x03\\\x04\xb3\x04a\x04\xf4\x02Q\x01V\x00\x1a\xff\xcd\xfd\x1a\xfd\xee\xfe\xa4\x03 \t\xbf\x0eq\x15\xf6\x1e\xb7*!5\xd4<\xa0B\x0fH\x11L\xf7K\xc7G\x00A\x868\xc2-* \xad\x11t\x04Q\xf8C\xec\xa4\xe0p\xd7\r\xd2y\xcf\x0b\xceB\xcd\x12\xce\xd2\xd0\x8c\xd4\x03\xd8\t\xdbo\xde\x16\xe27\xe5\xc0\xe7\xab\xea\xf0\xee$\xf42\xf9\x12\xfe\xfa\x036\x0b\x94\x12\xb5\x18?\x1d\x9a \xb9"\xca"K %\x1b3\x14\xc0\x0b:\x02%\xf8D\xeeR\xe5\x83\xdd\x1b\xd7\xbc\xd2\xc5\xd0Y\xd1\xf7\xd3\xfd\xd7!\xddL\xe3p\xea\n\xf2\x85\xf9K\x00\xeb\x05\xb8\n\x0b\x0f\x1d\x13\xae\x16\\\x19\x02\x1b\x1d\x1c\xe6\x1c\xcc\x1dn\x1e{\x1e\x81\x1d\\\x1bQ\x18\xcc\x14\x0e\x11\xa2\x0c\'\x07\xf8\x00\xd7\xfaJ\xf5\x9d\xf0$\xed\xe9\xea\xbf\xe9\x87\xe9\x99\xeaN\xedp\xf15\xf6\xea\xfa\t\xff\xba\x02\x16\x06+\t\x85\x0b\x83\x0c\x02\x0cr\nj\x08g\x06P\x04\x01\x025\xffA\xfc\x8c\xf9m\xf7\xe7\xf5\x9b\xf4\x1f\xf3I\xf1]\xef\xee\xed\x1b\xed\xb7\xec\x8d\xec\xb3\xec\x04\xed\xf0\xed\xc2\xef^\xf2\xea\xf5\x80\xf9\x9c\xfc\x87\xff&\x02\xde\x04\x1e\x07\xfc\x07\x08\x08\xab\x07s\x06\xc5\x04\xa2\x02~\x00\x03\xff4\xfdq\xfa\x8e\xf7`\xf5V\xf4\xc2\xf3\x85\xf2Y\xf1\x9c\xf1\xa8\xf3X\xf8\n\x01\xb7\r\x0b\x1c\xac(\xf52\'>=K\x89V\xd6[\x1d[\rW\xe5P\xe6F\xf08\\)\xda\x19\xa4\t\x7f\xf8\xe8\xe8\xaf\xddo\xd6\xf4\xd0\x92\xcbx\xc7\xd8\xc5,\xc6]\xc7\xa3\xc8\xc0\xc9\x00\xcb\x88\xcc7\xcf&\xd4\x9d\xdb1\xe5\x82\xef3\xfa\x9c\x05\t\x12\xb5\x1e\xea)t2\x017\x957\xbc4\xfa.\xbe&k\x1c@\x10m\x03q\xf6R\xeaK\xe0\xed\xd8/\xd4\x10\xd1\xf7\xcei\xce\xa2\xcfu\xd2\xef\xd5\x98\xd9\xa2\xdd=\xe2\x88\xe7\x82\xed\xa1\xf4\r\xfd\xeb\x05\x85\x0e\xef\x16*\x1f\x98&\xe5,H1;3{2D/\x14*\x18#]\x1aa\x10\xad\x05\x90\xfb\xe8\xf2\xdc\xebm\xe6\xdc\xe2:\xe1G\xe1\xd0\xe2\xa1\xe5n\xe9\xbd\xed\xa8\xf1.\xf5\xe9\xf8\xca\xfda\x03j\x08T\x0c&\x10\xa5\x146\x19\xa8\x1c\xac\x1em\x1f\xd5\x1eM\x1c\xb6\x17\x04\x12\xcc\x0b\xe2\x04D\xfdB\xf5\xe3\xed\xfb\xe7\xdc\xe3\xf8\xe05\xdf\xc1\xde\xf9\xdf]\xe2\x8d\xe5o\xe9\xb4\xed\x0e\xf2\xd6\xf5_\xf9o\xfd\x9e\x01\x7f\x05\xfd\x07n\tt\x0b\xbb\rR\x0fP\x0f\xdd\r\x07\x0c\xb5\t\t\x06.\x01\x95\xfc0\xf8a\xf3\xec\xed\x07\xe9\xa3\xe6|\xe6\x17\xe6j\xe5\xbe\xe5&\xe8"\xec\x8c\xef\x9f\xf2\xc5\xf6$\xfbA\xff\x96\x05U\x12\x97%\x898\xabDdK\xefR,\\\x7fax^WUmK\x8c?X/b\x1d\n\x0f\\\x04\xa4\xf8\xef\xe9\xb2\xdc\xc6\xd5\xf9\xd2\xc8\xceh\xc8h\xc3\x10\xc11\xc0\xf7\xbf\xad\xc1\x9a\xc7J\xd01\xd9g\xe2\x0b\xee\xd1\xfdP\x0e \x1b\xd3#<*\xe7.\x061\xab0O-\xd4&\x1d\x1e\xd4\x14\xb8\x0b\x07\x03\xaf\xfa\x90\xf3\xb3\xec\x8e\xe5h\xde\xa5\xd8\x9b\xd4\xa8\xd0F\xcc$\xc8]\xc6\xd3\xc7\n\xcc^\xd2x\xdb\xca\xe6[\xf2=\xfdt\x08#\x14*\x1e\xae$\xfb\'\x87)\x9f)\x8c(\x17&\x9e"T\x1e\x97\x19\xcd\x14\xcc\x0fr\n\x88\x04\xc7\xfd\xed\xf6\x99\xf0\xb9\xeaY\xe5\x1b\xe1\xfd\xde\xc2\xde\xe8\xdfv\xe2X\xe7\t\xee\xfd\xf3\xd5\xf8\x02\x00<\r\x1a\x1c\xb5$\xb4%\xdb%\xe8)\xab-p+\xc7$\xf3\x1eY\x1an\x13=\nl\x03\x1c\x01\x97\xfeh\xf7\xaa\xed\x9e\xe62\xe4h\xe2\x01\xde_\xd9g\xd8\xbb\xdb1\xdf\xff\xe1\xc0\xe7\xaa\xf1\xd9\xfb\x14\x01&\x03\xf1\x07=\x0fO\x14^\x13\x07\x10\xf9\x0e\xf5\x0el\x0c\xef\x07\x12\x05-\x04\x9e\x01\x0b\xfb\xdb\xf3\xbb\xefS\xed}\xe8\x89\xe0\x91\xd9\xfd\xd6A\xd8\xcb\xd98\xdc0\xe1L\xe8\xe0\xef\xf1\xf5\xad\xfby\x01\xd7\x05\xef\x08\x18\n\xa8\n\xb8\x0b\xb6\x0eI\x14\x93\x1aO%\x116\x08H\x90S\xbeV\xd6WVX\x86R\xf6C(4\xa2(\xb6\x1d)\x0f>\x01D\xfbx\xfa(\xf6\x1e\xed\x90\xe3l\xda\xac\xd1\x84\xc8\x97\xc1\xa1\xbf\xa1\xc1\xf9\xc6\xe0\xce\xda\xd8\x14\xe6i\xf5n\x02\xdb\n\x9c\x0f$\x12\xe7\x132\x15\x98\x15\x06\x16\xe9\x16\xef\x18\x1e\x1b\xc3\x1aM\x18K\x15+\x10N\x06-\xf8\x16\xea\xe8\xde\xcd\xd5\x17\xce\n\xcak\xcb\x92\xcf\xa9\xd3\xea\xd6W\xdb\xb9\xe0\x11\xe51\xe8\x9e\xeb\xac\xf0\xe7\xf6\xcf\xfe\x9a\x08\xcb\x13\x0f\x1e\xe2%\xc1*\xc7,w+\xa0\'\x94"\xdb\x1c\xa8\x16U\x10\x95\nd\x06\xa8\x03K\x01\xb0\xfd\x9d\xf8-\xf3\x9a\xeeE\xea\x9a\xe6\x03\xe4\x92\xe3\x17\xe5)\xe8s\xedz\xf5P\xfeP\x06\xa7\x0b\xf5\x0e4\x11_\x13\t\x16_\x17U\x19\x02\x1e/%\xb5*\xc7+5) $\xb0\x1b/\x10\r\x04/\xf9\xc2\xef]\xe7\xc2\xe0\xa8\xdc\x99\xdb\xa1\xdb\xac\xdcK\xddu\xdd\x0b\xde\xb9\xde\xb4\xe1\xd3\xe6\x0e\xef"\xf9\xb9\x02M\x0bB\x12\x8f\x17[\x1a\x90\x1a\x1b\x19\x1e\x16\xda\x11\xa2\x0c~\x08\t\x06\xf8\x03\xc1\x00\xbf\xfb\x84\xf5\x87\xee\xd9\xe7k\xe2R\xdf\x1d\xde\xd8\xdd\xf9\xdeA\xe1\x96\xe5m\xeb^\xf0l\xf4r\xf7\x03\xfb\x87\xfe\x03\x02\x94\x06\xa2\n\xc2\rZ\x0f\x0b\x10=\x11\xd5\x12\xbe\x13~\x13\x11\x10I\x0c\x02\n\x8e\x08Q\rf\x1e\xd55\xf9B\xc9;,+_$\x07&\x0c#d\x1b\xd8\x17\xe1\x19\x8a\x18\xd6\x0e\x11\t|\r\xef\r\xc7\xff\xb0\xe9g\xda\'\xd6\xa9\xd6\x83\xd8#\xdf\x9f\xe6\x90\xe8\x04\xe5\x88\xe1\x92\xe3\xfb\xe9\x8a\xed\x0e\xec\x9f\xeaX\xee\x97\xf7\xfa\x02\x04\x0eg\x163\x18\x06\x13\x9b\x0c\r\t\xca\x07\x86\x07\xa2\x06\xc4\x03\xa5\xfe#\xf9\xe3\xf7l\xf9\xc4\xf8\x9b\xf3\xc5\xeb:\xe4\x15\xdfR\xde/\xe2\xe2\xe89\xefX\xf3L\xf5k\xf7z\xfb\x93\x00\x98\x04\x07\x07\xd8\x08\xd5\n\xd0\rS\x12\xe6\x17\x89\x1b\xaf\x1a\x1e\x15\xdf\r}\x07\xde\x025\xffU\xfc#\xfa\x1c\xf8\x8b\xf5\xf0\xf3\x0b\xf4@\xf4)\xf2I\xee\xe5\xeb\x1a\xec\xbb\xee\xc3\xf3\xe7\xfa\x08\x03\x88\x08\xb2\n\xc3\n\xa4\x0b\x18\x0e\xf8\x10\xe4\x12\xe1\x13\xf0\x15\xfb\x18\x1d\x1au\x19\xd9\x17\x0b\x16\xe1\x10\xe0\x07\x85\xff\'\xfaI\xf7\xcf\xf4\xc9\xf2^\xf1\xa7\xef\xd1\xec\xc2\xe9\x1c\xe7\xd7\xe6\\\xe8\x05\xebd\xee\xc9\xf3\xd2\xfaU\x01c\x05\xaf\x07\xce\t6\x0bL\x0b\x8e\n\x80\nh\x0bG\x0c\xfb\x0b\xa0\nW\x08\xf3\x04C\x00\xcd\xfa\xa9\xf6\xb3\xf4=\xf4j\xf4\xa4\xf3_\xf3\xd7\xf3T\xf4\xc3\xf4\xc3\xf3K\xf3\x10\xf4\x91\xf6\xb1\xf9\x9d\xfc!\xff\x8d\xff\x18\xff \xfdQ\xfc\x10\xfd\xd4\xfcn\xfc8\xfb`\xfb\xba\xfbD\xfae\xf7\x98\xf5\xb2\xf5\xf7\xf5V\xf5\xd1\xf5w\xf88\xfc\xb1\x03y\x15D.\x01=\x9c7\xc6\'\x7f#(+\x821\x0f3\xe17uB\xe0@s.\xb7\x1bR\x167\x13x\x03\x02\xf0\x83\xe8\'\xecX\xec\x12\xe6\x9e\xe2\xa5\xde\xb9\xd4:\xc6\xfe\xbeo\xc6-\xd6,\xe4t\xebJ\xf0v\xf5=\xf9G\xfb\x16\xff0\x08\xbc\x10.\x16\xe2\x1b\x81"?\'\x9b&3!\x1f\x18:\r\xc1\x047\x02*\x03o\x02\xac\xfdP\xf5!\xeb\x9d\xe0\xd1\xd8k\xd5\xd4\xd5\xb2\xd7\x97\xd9F\xdc\x92\xe0\xc9\xe5\x05\xe9k\xea\xc8\xec\xe6\xf1,\xf8g\xff\x1c\x08\x1d\x11\x1b\x16/\x16h\x14;\x13\x8e\x12F\x12\x8f\x12)\x13}\x12I\x0f"\n\x88\x04\x16\x00\xca\xfc~\xf9R\xf6\xd6\xf4O\xf5\x82\xf5{\xf5\xbb\xf5\x04\xf7\x9c\xf7\xaf\xf7\x18\xf9\xa1\xfc\xcb\x00y\x04\xf6\x06e\t\'\x0b\xc4\x0c\xfd\r\xda\r\xb9\x0c\x8c\x0c}\x10\xc2\x15\x80\x18d\x16\x92\x11\xcc\x0b\xb9\x05\xc0\x01_\x01\xce\x02\x13\x02\x94\xfd\x0b\xf8\xda\xf3\xfb\xf0\x18\xf0w\xf0\x19\xf1C\xf1D\xf1n\xf2L\xf4P\xf6j\xf8$\xfa\x04\xfb\'\xfc\x88\xfe\xc7\x01\xe8\x03\x10\x05\x9f\x05:\x05\xe9\x03]\x02#\x02\xb4\x02\x9a\x02\x0b\x01\xa2\xfe@\xfc\x17\xfan\xf8(\xf7\r\xf6R\xf4\xe6\xf1:\xf0\t\xf0\xce\xf0(\xf1p\xf1\xea\xf18\xf2\xb3\xf1\x9f\xf1\xb3\xf3c\xf7P\xfa\xe8\xfb0\xfd\xda\xfe&\x00\xeb\xff\x0b\x00\xdd\x00u\x03]\x06\x12\x08\x9b\x08\xef\x06\xfb\x057\x058\x04\xd7\x07\xf7\x16\x801e?\'3\x0b\x1a\x1e\x10,\x1a\x92$\xad+R7MA\x1d5\x8d\x16\xeb\x04I\t\x9a\x0e\xcf\x05-\xfdi\xff\x89\x01\xbb\xf6\xda\xe7^\xe2\xae\xe2z\xdc\xa8\xd2S\xd2\x0b\xe0\x08\xee-\xee\x15\xe6\xd8\xe1z\xe3S\xe5;\xead\xf8X\x086\rq\t\\\x08\xee\x0cn\x0e\xdd\x0c<\x0f\xaf\x14q\x16\xd2\x11\xca\x0e\xb1\x0e\xe2\x0b\xae\x01@\xf6x\xf1\x80\xf1)\xf1U\xefT\xee+\xec?\xe4\xab\xda\xf6\xd7O\xdd]\xe4\xc4\xe8\xa5\xecq\xf0\xb7\xf1\x92\xf0\xa2\xf1y\xf7;\xff\xef\x05S\x0b\xfb\x0fY\x13\x81\x13x\x12\xa1\x12?\x14\xb7\x15@\x16\x0f\x17\xf5\x16\xc8\x13#\x0e>\t]\x06\x9f\x03(\x01:\x00,\x00\xaf\xfdH\xf9\xfa\xf5"\xf5\x88\xf4\x8c\xf4\x81\xf6\xfb\xf9\xc4\xfb\xa9\xfb\xb9\xfc\x05\xff\xa2\xfe6\xfe\x8a\x01Z\nZ\x0f\xa8\r\xff\ni\t\xd5\x07\xa3\x04\x1b\x08r\x0fw\x11J\x0b\xac\x03\xcd\xff~\xfc\xbc\xf9c\xfcM\x01\xff\x00\x87\xfb \xf7\x9f\xf6\xe7\xf5\xd6\xf4\xa7\xf6$\xfaN\xfb\xb2\xfa\xf9\xfb\xe3\xfd\xc2\xfd\xdb\xfb\xd0\xfbk\xfd\xe5\xfe\xa9\x00\xed\x02\x0b\x04\xd8\x017\xfe>\xfc\xc4\xfc\xb5\xfew\x00Z\x00T\xfe\xcb\xfa\x89\xf8\xee\xf7\xd0\xf8\xf0\xf9\xd8\xf9|\xf8}\xf6"\xf62\xf7\xab\xf8]\xf9R\xf9K\xf9A\xf9f\xfa\xf2\xfc%\xff\x94\xff\x9c\xfem\xfd\xef\xfc\x84\xfc\x97\xfdJ\xff\x08\x00\x8b\xfe\x0c\xfc\xab\xfb\\\xfbM\xfb\x9b\xfa\xff\xf9Y\xf7\x94\xf2\x1c\xf4\x81\xff\xd0\x11N\x1f\x0f d\x14\xa0\x03\xf4\x02\xc9\x17\xa15\x88Aa8\xc0+\x9c!\x02\x1a\xd9\x15b!\xcc2F19\x1b=\x07\xe7\x06\x94\x07\x0e\xfd6\xf2\x9c\xf4$\xf82\xee\xf2\xe2n\xe4\x11\xe7\xd9\xda\xb8\xcb\x88\xd0\xd9\xe3A\xee\xc0\xe9S\xe5z\xe6\xa7\xe3\xcb\xe1n\xec\xf0\x01\xd7\r\xf2\x07J\x00w\x01\xfd\x05R\x06R\t?\x13\xad\x1a\x13\x15i\t\x93\x03\x90\x03@\x02\xf5\xfe\xd6\xff\x95\x02[\xff\xb0\xf5\xc0\xed^\xeb2\xea-\xe8\x81\xe9\xd1\xee\xeb\xf1\xac\xed\xe3\xe7?\xe7\xac\xeam\xee\xd0\xf2\x0f\xfal\x00\xb1\x00p\xfd\xf9\xfc\x11\x01\xab\x05\xa3\t\x80\x0e\xec\x12]\x13\xa9\x0f\x96\x0cQ\x0cm\r\x9a\x0ek\x10\x94\x12\x19\x12\xb1\rf\x07<\x03\xa3\x02Q\x04\xcf\x05e\x06U\x05t\x02I\xfd\xf2\xf8O\xf8\xeb\xfa8\xfd.\xfeg\xfeI\xfe`\xfbP\xf8\xb3\xf7\x84\xfa\xe0\xfd\xf8\xff\x0e\x03\x8d\x04\x95\x03\xe6\xfe\xb6\xfc\x9d\xff\xe4\x05W\x0c\xaf\x0f\x9b\x0e8\x08X\x01\x1e\xff\x9d\x02\x97\x08 \x0cx\x0b\x7f\x05\xf4\xfd\x0f\xf9\xaa\xf8h\xfbs\xfd\x95\xfey\xfd\x15\xfa\xa5\xf6\xc7\xf4\x00\xf6X\xf7m\xf8\xb9\xf9\xf6\xfa*\xfb\x15\xfa4\xf9*\xf9\x88\xf9\x93\xfa\xd7\xfcF\xff\xad\xffu\xfd\xe0\xfa\xb0\xf9\xb4\xfa\x9e\xfc\xb7\xfe\xcf\xffJ\xfe\x9e\xfbg\xf8l\xf77\xf8\xad\xfa\xe5\xfc\x9d\xfc\x8a\xfb\xc6\xf8\xab\xf7@\xf6\x86\xf6m\xf8F\xfaz\xfc\x94\xfc\xd2\xfc%\xfc*\xfa\xb0\xf8.\xf9\x98\xfc\x19\xfe\x0b\xfe!\x06\xaf\x15\x02\x1d\x13\x0fn\xff\x9c\x05u\x19\xc3\'&//6\xf3/X\x17\x82\n\xcd\x1c\xa55A7F-x(B\x1d\x07\x07b\xfe_\x0f\xfd\x1b\x06\x0f\xe8\xfd"\xfb\x13\xf7~\xe7P\xdf\xb3\xe7\xbb\xed\xf0\xe3\xf3\xdc\xc4\xe2\xb2\xe5\xeb\xdb\xd2\xd3\xec\xda\x81\xe6\x9d\xec\x8b\xef\xeb\xf3\\\xf4\xea\xedG\xebe\xf4W\x03\xff\r\xae\x0eF\x08$\x02w\x00\x05\x04$\x08\x9a\x0b\x0b\x0eY\x0c\x90\x04\x83\xfc\x01\xfb\xa3\xfcE\xfal\xf5\x85\xf5\x89\xf8\xc7\xf6/\xf03\xec\xcd\xeb\x11\xea\x9f\xe8\x16\xed1\xf5\xae\xf7\x08\xf3\x8f\xef,\xf1\xad\xf4>\xf8q\xfe\x00\x06\xbf\x08\x97\x05\x17\x03\xb1\x05\x1e\nu\x0c\xa6\x0es\x12"\x14\xe4\x10\xcb\x0c\xdb\x0c?\x0f;\x0f\xef\r\x91\x0e\x05\x0f\x9d\x0b\x83\x06\x12\x05(\x06\xba\x04\xba\x02\x03\x03\xe4\x04f\x01y\xfbM\xf9\xeb\xfa\x8f\xfaL\xf9\xdb\xfc\xc9\x02^\x00\x93\xf7P\xf4y\xf9\xd0\xfee\xff\xd0\x02X\x07\x07\x05m\xfd\x86\xfc#\x03\xc5\x07+\x06[\x06\x0f\t\xb3\x06\x12\x01\x00\xff\xf9\x02?\x04\xdc\x01r\x01}\x02&\x00\x9e\xf9\x10\xf7\x1c\xf9\xc4\xfa\xf2\xf9\x08\xf9\xd3\xf8\x8b\xf5f\xf1-\xf0\xc2\xf2\n\xf5(\xf5\x0c\xf5S\xf4\xff\xf26\xf2\xa7\xf30\xf7\x0f\xf9Q\xfa\xbd\xfaQ\xfb\xfd\xfb\xc4\xfc\x05\xff\x9f\x00\x94\x02\x89\x03%\x04\xcb\x03\x10\x03(\x03\xb7\x03\x99\x04W\x05\x12\x06\x86\x05,\x04r\x02c\x02\xa4\x02L\x03!\x04\x8c\x04\x03\x045\x02o\x02\xa5\x03V\x04C\x04\x1d\x04\xdc\x04[\x03\xc8\x03;\x05\xe6\x06\xa4\x07\xbc\x07\xcf\n\xfe\n\\\x0c\x84\x119\x17\x8f\x16\xce\x0f\xef\r\x9d\x12\xe5\x15\x99\x16m\x19q\x1b\xa0\x16\xe0\x0bk\t1\rI\x0e\xe9\n\x0c\x08\x9c\x08\xc8\x02\xe5\xfa\xd0\xf7u\xf9\x9c\xf8\x07\xf3\xf5\xf0%\xf2\xf2\xf0,\xec\x81\xe8\xf8\xe8\xda\xe9E\xe9\xb9\xe9*\xed\x80\xef\xfb\xec\xba\xe9\xcf\xeaQ\xf0\x86\xf3\xd3\xf4V\xf7\x90\xf9P\xf94\xf7\xbe\xf9J\xfe\x12\x00\xa0\xff\xe4\x00\xe7\x03\xa8\x032\x01\xe1\x00\xb8\x02\xd4\x02\xf6\x00\x88\x01\xfe\x03\xc6\x035\x00\xc5\xfdt\xfe\xa2\xfe\x03\xfe`\xfe\x80\x00\x98\xffz\xfc\x11\xfb\xcf\xfcf\xfe\x0f\xfe\r\xffg\x00\x1d\x00L\xfeH\xfe\xfb\x006\x02\xe7\x01\xe7\x01S\x03\x02\x04A\x03\xed\x03a\x05\xf4\x05\xcf\x04\xe4\x04"\x070\x08\xf1\x06\x98\x06\x9d\x07_\x076\x06\r\x06\xa0\x08\xdd\x08\x80\x06\xfe\x04\xc5\x04\xdf\x03E\x02\xf6\x01\xde\x02\xb6\x01\x94\xffX\xfe\xbf\xfd\x97\xfcd\xfb\xf6\xfbE\xfcd\xfc\xc9\xfb\x0c\xfc\x81\xfb;\xfa\xe2\xf9I\xfa\xa3\xfbX\xfcc\xfd\x9b\xfd0\xfd\x12\xfcP\xfb\xe9\xfb\xc7\xfc4\xfe \xff0\xff\x92\xfe7\xfd\xa9\xfc\xfe\xfcA\xfd\xcd\xfd\xc9\xfd\xa6\xfd\xbc\xfc;\xfbj\xfa\x9d\xf9\xfd\xf9[\xfag\xfb]\xfc\r\xfcP\xfa\x96\xfa\\\xfa*\xfbA\xfd\x01\xff}\x01s\x00\xf9\xff\x07\x00\xe4\x00j\x01\xa3\x01\xa1\x02L\x04\x81\x04\xdb\x03*\x05\x05\x05\xb1\x03N\x04\xc3\x05:\x06\xeb\x04\x89\x05\xc9\x07F\x07+\x07\x0e\x07i\x06?\x05\x81\x06B\x08\xbf\x08\xd6\x06\xcd\x030\x04l\x06m\x06\xbc\x052\x07\xb5\x06\xb5\x01o\x00\xff\x03X\x01\xba\xffO\x03\xb0\x06t\x02]\xfb\x9d\xfd\xf7\xfe\xe5\xfd\xf3\xfc\xa5\xfe\xfe\xfe\xb2\xfc\xf0\xfb\xeb\xfe\xa2\x02\x88\xff\xa7\xfb{\xfd\xe1\x00\r\x01Z\xff\xb9\x01\x0c\x04B\x02\xdc\xff/\x02\xee\x03p\x00\xcb\xfep\x01\xed\x02\x1a\x00x\xfd&\xfeG\xfe~\xfbF\xfb/\xfc\xc8\xfa)\xf9\xec\xf8\xcb\xf9\x1b\xfa\xdf\xf9\x84\xf9\xf0\xfaz\xfa \xf9W\xfa4\xfc \xfd6\xfe\xd3\xfe\xfd\xfe\xa4\xfe\xbf\xfc?\xfe\xc3\x00n\x01\x0c\x01l\xff\xb4\x00Q\x02"\x01\x7f\x00~\x00\xde\x00\x1b\x00&\xff\x9b\x019\x02\xd1\xfe\xd4\xfd\xba\xfe&\xfe<\xffr\xffK\xff\'\xfe]\xfd\x10\xfeK\xfeZ\xff\xb5\xfe\xb2\x00\x1a\xff-\xfd\x1f\xff\x92\xffS\xfeg\xff]\x01M\xfd\xd3\xfb\xce\xff,\x02\x12\xfe\xb6\xfe\xd3\x00%\xfeo\xff\x9e\xff\xa0\xffN\xfe\x9d\x02\xfa\x00\x00\x01\xf3\x03/\x00\x92\x00\xaa\x05\xa8\x03\xc8\xff\x98\x01\xce\x04\xbf\x05/\x00U\x06\xdd\x06\x12\x00\xb4\xffq\x02\xee\xff\xb9\x00\xfe\x03\xc5\xffN\x00\xa3\xff6\xff\xc3\xfb?\xfeX\x00\x16\xfd\xde\xfeg\x02\x9a\x00\xce\xfb\xc6\xfc\xdc\xf9W\xfd\xf5\x01b\x00\x00\x03I\xfe\x17\x01\xcb\xfe\x80\xfb\x93\xff\x8f\x002\x08!\x01s\xfd\x0e\x05\xcc\x04\x04\x00\xa3\xfd\'\x07~\x04\xaf\xfe\xa2\x01\xfa\x04X\x04\xf0\xfe_\x02\x1c\x05\xcc\x01\xd3\xfb\xee\x07\x10\x05\xa3\xf8B\xfe\xbb\x04\x90\x03\x9b\xfbW\x06\xc9\x03\xa3\xf5V\xff9\nG\xf9?\xfc\\\n\x90\xfc%\xfc\xd4\x028\x05\x0f\xfco\xfa\x8b\x04\x93\xfe\x1a\xfd%\x04\xba\x04/\xf9\x98\xfeJ\x00p\xf9\x86\xfe\xc0\x00\x04\xfe3\xfd\xcc\x01{\xfd\n\xfa\xba\xfe\x87\xfe\x90\xfa\xcb\xff;\x02\xb6\x00R\xfdx\x01\xd2\xfd\xb7\xfd$\xff\x98\xfd\xb4\x04\x9b\xfen\xfd\xed\xffK\x00\x1a\xfe=\xfe\xe7\xfb\x98\xfeI\x01\x87\xfdj\xff\xec\x03\xf1\xfd~\xfa\xea\x02\x00\xff\x10\xfa\xf5\x00\x1a\x01\x00\xffL\x01\x92\xfe\xc9\xff.\x04\x04\xfc\x8f\xfd\x96\x02I\xfe\x97\x06\x99\x00x\x02\x93\x01\x1d\xfe\xc9\x00F\xff\xca\x04\xf7\x04y\xfe\xef\xff]\x03\xe0\xfa\xd0\x01\xbe\x00&\xfd\x01\xff\xe3\xfc\xaa\x02r\x00S\xfc\xd5\xf8G\xfb\xf9\x00\xbd\xfb\xde\x01\xd9\x03\x88\xf9J\xfbj\xff\xd2\xfb\xc2\xfa|\x03\x15\x04\xcb\xf9=\x00|\x03W\xff\xea\xfb\xcb\x00z\x01\xa7\xfbU\x04\xaa\x07\xed\xfa\x96\x01\xfd\t\xca\xfb#\xf7\x1b\x06\xd6\x03\xe8\xfd\x1f\n\x80\x00\xf5\xf6\xef\t\xf5\x04\x8f\xf99\x02\xb4\x02g\xff,\xfa\xb6\x0bX\x0c\xbd\xf8a\xff[\x04\x16\xf3\x1e\x00\x95\x0e\x83\xf9\x87\x06P\x06\x07\xfc\x1c\xf9\x04\x01\x93\xfe\xd4\xfc\xfa\x06\xfd\xf9>\x02\xc8\x05\x17\x01E\xfa\xa9\xef\xa7\t\x9e\xfd,\xf8t\x0e\xfb\xfa\x1a\xfa\x93\x02\x0e\x01\x1a\xf4\xce\x03\xfa\x021\xf7\xfe\x00\xcb\r~\x01\xc8\xf6\xa5\t}\xfb\xd4\xf0\xa3\x0c\xf0\x07\xc5\xf7\x04\x14R\x02B\xf7\xf3\xfc\xf1\x05\x83\xfb)\xfcJ\n\xea\xfeT\x02v\x01N\x05[\xf8n\xf2\xdf\x01\xdd\x040\xf9\xc2\t9\n\xc8\xf0-\x01\x17\x08\xa9\xf6C\xfeF\x05k\x02\x99\xff\n\x03A\x06\x9b\xfb\x80\xffv\xf7\x07\x02\xf5\x00\x96\xfb\x89\x0c\x06\xfc\xd2\xf6\xec\x02H\x01$\xf7\x0b\t\xf5\xfc\x10\xfcB\x00\xa9\xfc\x0c\x07\x10\xfb\xa3\xfeO\x01\xc5\x04\xaa\xfa\x8b\x02\x8b\x07\xa8\xf8w\xfd\xf1\x03\x07\xff7\x04\x95\n\xa4\xfbs\xfa\x91\x02\x8a\x02\xc2\xff\x95\xfd@\xffx\x06v\xffh\xfc\xcc\xfcM\x04\xb1\xfb\x16\x00\x9a\xfe\x0c\xfbx\xfar\x03\x1e\x06}\xf8Z\x00:\xfd\x1d\xf9\xa3\x00c\x05\xe6\xf3@\x06?\x08\xf4\xf4^\xfe\xd0\x03\xb7\x04\x19\xf9\x8b\xfd\xdf\x05\x00\xfc\xe5\xfe\xd7\n\x85\xfe\xc3\xfc\t\xfe\x18\x00\xfc\x02\xfd\xffz\xfb.\x0c\x7f\x03\xeb\xf3\xf4\x02\x06\r\xfc\xf7D\xf0\x1a\x15\xaf\xfc\xad\xf9\xd3\x06\x0f\x07[\xff\x1b\xf9o\xff\x87\xff\xc1\x01\xac\xfb$\x0b.\xff\x7f\xfe\x8b\x00\x17\x04\xb5\xf7@\xf6\xcf\x0f\xe6\xfe\x14\xf5b\x05e\x13\xf8\xee\xbf\xf1\xbe\x18\xbd\xf9\x15\xe7\x85\x13\x04\x0b\x81\xed\xa7\x05\x00\x0b\xbc\xfc\x8c\xf2l\x06\xbb\x00~\xf3H\t\xa2\t\x14\x00\x8a\xf8D\x05)\xff\xb0\xe8\xa4\n\xee\t\xb3\xf1\\\x0c~\x08\xd2\xef\x0f\xfd}\x03P\xfb\x91\xf7\xce\xff\x17\x0b?\x01g\xf7\x9c\x08V\xfb\x01\xf4\xff\x08K\xf8\x18\x03\x81\x08\xce\xf77\x02o\t\xa1\xf5q\x03\x13\xfd\xee\xf6\'\x0c_\xfeh\x03\xe8\x06\x14\xfa\xf4\xff\xb5\x04\x1b\xf6\xd9\x07\x18\x03A\xf8%\x039\x087\x03o\xf9\xf6\x07\x8f\xfd\xd1\xf8\xb8\x01-\x03\xe2\x00d\x03\xb6\x07\xe0\xf7\xb8\x01\x18\xff\xb5\x00\xa1\xfb3\xfd\xa2\t?\xf9j\x02\x8d\xfd\xc4\x06\xeb\xfci\xf4\xf6\x02\x88\x03\xe7\xfd~\xfe&\x01\x11\xff\xc1\x03[\xfd\x16\xf8\x99\x03\x9b\x03^\xf8H\x06\x8f\xfe\x9f\xfc\xa8\x073\x01\xb0\xf2?\x0b0\x07\xee\xe9\xfa\t{\x06\xa5\xffT\xfe\xa8\xffd\x05\xfe\xfb\xfc\xf9\xcc\x04M\x000\xfcW\x06\x8b\xfb\x9b\x03\x13\x00\xcc\xfbF\x010\x01L\xf2Z\n\xa3\x04~\xf7Z\x07\xd0\xff\xcb\xfc\xb5\xfd(\xfd\xe1\xfc&\x04/\xfd\xe5\x06b\xfc\xa5\xfcR\x08\xa8\xf7f\xf6\\\x0b\xf2\xfeM\xf9s\x07\xfd\x07\xfe\xf9\x08\xf7\xf3\n#\xf9\xaf\xff\xfd\xfe\x8f\r\x7f\xf8q\xfa\xe5\x11\xb9\xf7\xf9\xf3\x1a\x02\xa3\r\xe3\xf3o\x04\xbb\r\t\xf7\x7f\xf8\xcd\x05\x00\x009\xf7\xe8\x05\xc1\x06\xc5\xfd\x9b\xfb\x8f\n\x11\xfdC\xed\xf8\x0c\xf5\x02\xc1\xf3\x14\x06\xb0\n\x1e\xf8\xfa\xf7\xe1\n\xdb\xfe\xbb\xf1\x9f\x01\x11\x0e\xd0\xf2\xff\x00\xc6\x0f\xcc\xf7:\xf7\xc3\x0co\x00\xe4\xefL\x058\x11d\xf3\x07\xfb\xe5\x1f\xd3\xe8\x04\xf5\xfc\x1ah\xf1\x08\xf8\xaf\x0c\x80\x03\x08\xfa6\xfb\x0b\x0b\xb2\xfc`\xf6\t\x08\x95\xfc\xdc\xf3\x90\x07\x1c\t\x9f\xed\xaa\t\xde\x02\xa3\xf4\xef\x008\x04}\xfb\xd2\xfeF\x070\xfb@\x02\xf9\xfe\xf3\x04\x8b\xf7c\x07k\x01Z\xf9O\x03\x0e\t\xc6\xfa\x91\xfcr\n\xc6\xfa\xbd\xfeX\xfc%\x07\xfa\xfcC\xfc!\t\x18\xfdX\xf8V\x0b\x1e\xf7e\x01\xc2\xf8#\x08g\xfe\x05\xf8y\x0c\x95\xf7\x1a\x07\xeb\xf2\x92\x04\xec\x00\xf8\xfd\xcf\x01\x9b\x03\x00\x01a\xfb\xc3\x01Z\x04\xaf\xfa<\xfe\x11\x08=\xfd\x8c\x00&\xfbQ\x08\x9f\x02\x0c\xfa\x02\xfc\xb5\x06\xee\xfd\xfa\xf3g\x0e\x91\xfe\xdb\xf7\xbd\x07\xda\x04j\xf3\xf2\xff\x16\r\x1e\xf35\xf8,\x12\x00\x02!\xf4\r\x07\xba\x07\x03\xf2\x0b\xfa\xe6\n\xda\xffS\xffC\xfb-\x0b\xb1\xffO\xf6:\x03\xcc\xfe\x8e\xfe\xb7\xfd\x82\x04\xed\x00s\x08\xf5\xf3\x14\x02\x81\x07d\xf1/\x03\xee\x05d\xfe\'\xf9\x91\x06\x05\x06\'\xf7\xfc\x00<\x01\xa6\xf8-\xfd\x03\x0b\xbe\xfc)\xfd\x80\x051\xf9\'\x01`\xfdt\x04\x87\xfb\x00\xf9c\x0c\x00\x04&\xf0\xe4\t:\x05M\xef`\n\xf6\x00\xbb\xf8A\x01\xf1\x07Y\xfd\xe5\xf9\x15\x07\x9a\x022\xf2\xbc\x076\t\xd6\xef\xac\x03\xf9\x11U\xf3\xa5\xf7\xab\x0f3\xfe+\xf1\x13\x07\xdf\r\x0b\xf5}\xfa\x16\r\x07\xfe\x8e\xf1\xbd\t\xdb\x07\xe9\xf8\x7f\xfa\x95\r\xd1\xf9B\xf6\xd5\r8\xfd\xf7\xf5\xc2\x05\xad\x07\x82\xf6~\x03d\n\xa3\xf2\xa8\xfel\x00\x85\x07j\xfa\xee\xfb\xaf\x0f\xd1\xf4n\xfc\xbf\x06\x86\xfe\x1d\xfa\xeb\x035\xff\xda\xfd\x93\x02\xd6\x00\xe3\xfeZ\xfb\xfa\x01\xa7\x03m\xfeO\xf3\xa5\x10\x96\xfe\xa5\xf2s\t\x9f\x04\xc8\xf7\xd7\x02\xfb\x03\x0e\xfby\x02\r\xfa\xf8\nx\xfa\xba\xff\x85\x0b3\xf6\x1f\xff+\x04X\x00\xc1\xfa\xd7\x05\xb0\x01P\xfdp\x03\x1f\xfdr\x03e\xfa.\x01S\x01 \xffR\x00\xc1\xfc\xe9\t\xb4\xf9\x7f\xfd\x0b\x02\x9e\xffD\xfaq\x05<\x01V\xf5\xf2\x0bp\x01\xfa\xf7\xe9\x04%\xff5\xf8\xe9\x06\x03\xf88\x06l\x04\x82\xfb\xaf\xff\xad\x02=\x02\xd5\xf2\x98\x07\xa7\x07\x94\xf1\xeb\x02n\t\xab\xfa;\x01L\x04C\xfaJ\xfd-\x008\x00\xd8\x01$\xfc\x02\x06\x1e\x02\xc9\xf8^\x05|\x01G\xf8&\xfb\x99\x08D\xfd\xce\xfa:\x13.\xf5\xf5\xfcU\x08\xf3\xf6y\xfeT\x01\xf2\x07\xe6\xfcP\xff\x0c\x07\x02\x02\xde\xf4\xa1\x04\x17\x01k\xf8s\t_\xfe\xab\x01\xcf\x05\x8c\xf8\xb5\x01\x19\xfe{\xfe\xbc\x055\xf7\xc6\x08\xf9\x04Y\xf9\xaa\xfd\xa2\x08\xc1\xf6\xed\xfd\x0f\x07\xd9\xf8\x16\x08\x83\xfe~\xfc\xf0\x02\x8b\xfe\x92\xf8\xe6\x07\xd0\xf9y\xffF\x08V\xf6t\x05c\x03\x7f\xf5\xf7\xff\xd0\x05s\xf6\x98\x01\x1e\x06\x84\xff\x1a\xfd\xd2\x00\x94\x02\xb4\xf8\n\x01\xc3\x06\xce\xfa\xd4\x02\xbf\x04\x10\xff\x17\xfc\xd7\x03\xdd\x02\x0c\xf5\xd1\t\xe3\xfd\xd9\xfd\xc6\x01\x89\x00\r\xffG\xfe\x0c\xfe\xe3\xff`\x02\x86\xf7\x7f\x0bs\xf8\xfa\x00\xbd\x05\xcb\xf6\xcb\xfd4\x04\xb1\xfe6\xfe\x1c\x06l\xfb\xcf\xff\x1b\x02j\x00\xeb\xfd\x9e\xfc\xdd\x05\xbc\x03\x12\xf75\x07z\x06\xc0\xf5\xe8\xfe\xb0\x0b\xc3\xfad\xf9@\t%\x01\x06\xf8~\x06N\x06\xaa\xf5!\x05\x14\xff\x18\xf7\xbb\x07\x9d\x06\x1c\xf3\xf0\x05\x9d\x05\xb2\xf7}\x02N\xfd\x96\xfe.\x00~\x00:\x01b\x02N\xff\x1e\x00\x08\xfc=\xff\r\xfdo\x03\xe0\xfeN\x00r\x04\r\xfbu\x00\xe4\x01\xe0\xfan\x00>\x01.\xf8\xaf\x075\x01d\xff\xed\xffF\xfd\x15\xff6\x01(\x00\'\xff3\x00\x91\x01\xa3\xff\x1a\xff\xfe\x03|\x03\xa4\xfbi\xfd\xe3\x02\xb7\xfb\x94\x03j\x060\xfc\xce\x00b\x05\xd2\xf9\x02\xff\x9d\x06\xc0\xfb-\xff\xba\x06\xaa\xfa\xfe\x01\x06\x08\x89\xfa\x9e\x00;\xff\xdc\xffV\xfa\xa7\x04-\x07\x98\xf7J\x04l\x03\xcd\xfaz\xfd\x82\x05\xdc\xfb\xbc\xfad\x05\xde\x02\x11\xfbl\x04b\x02\x15\xfa\xdf\xfc\xe5\x00V\x02\xcc\xf9\x97\x060\x00q\xf9v\x06}\x00\\\xfc\xfc\xfc\x1e\x02\r\xfc\xb6\xffl\x05%\xfd\x9e\x01\xcd\xff\x02\x00\x14\xffT\xfe\xf9\xfe\xba\xfc\xde\x02\xb6\xff\'\x01\r\x04\xa3\xff\x81\xfa\x01\x02\x01\xfeo\xfa\x88\x04\x89\x00\x01\x01\xe8\xfd\xd2\x05\xcf\x00;\xf9n\x03\x19\xfb,\xfe\x99\x02S\x00\xcf\x04d\x01\x90\xffD\xff\x1b\xffr\x00\xff\xfb\n\x04<\xff\x9d\xfe\xb4\x07b\xff\xb9\x00\x94\x01\x8e\xfc\xce\xfd\xdb\xfee\x00\x1b\x00\\\x03N\x02\x12\xfe\xb1\x01[\xfdA\xfe\x0e\x00@\xfc\xc7\x00\xab\xff\xf7\x00\xc1\x02\xcc\x01\xb8\xfd\'\xfe\xf9\x00\xa8\xf8\x05\x04-\x03\xf1\xf9\xc3\x04s\x00\x16\xfe\xcf\x01\\\x02\xe5\xfcx\x01\xf9\xfb"\xfd\r\x077\xfd\x0b\x031\x02/\xfd\xc9\xff\x9d\xfey\xfe7\xff\x8c\x00*\x00\xa7\xfe\x87\xff!\x04\xa9\xff\xa9\xfc,\xff\xd1\xfd\xc0\xfen\x01\xd4\x01\x87\x00\x08\x03\xf6\xfd7\xff\xbc\x01h\xff>\x01L\xfd.\xff<\x04r\xff%\x00\xd6\x03u\xfd\xc6\xfc\xbe\x02\x9d\xfd\x1c\xff\n\x03\xcb\xfd]\xfe\xe2\x02\xbd\x00\x08\xfe\x07\x00\xa3\xff\xda\xfd(\xfe\x8a\x01\xe5\x00K\xfe\xf0\x00\xc2\x01\xb7\xfd\'\xff[\x02\xec\xfd^\xfd\x98\x02\x92\x00\xca\xffd\x017\x00\x1e\xff\xd0\xfe\xf8\xff\x1b\xff\xce\x003\x00\xd0\xff3\x01\x94\x00\xb3\xfe\xfc\xff\xc5\xff\xc3\xfc2\x02\xfd\x00#\xff\x8a\x01\x19\x00\xa3\xff8\xff\x12\x00\x18\xffC\xffT\x01\xfa\x00\xef\x01&\x00\x0e\xff\xc4\x00\x80\xff\x80\xfd\xab\x00\x8f\x02x\xfe\xbd\x01\xc1\x01<\xfe*\x00\xe7\x00\xf8\xfd\xd4\xfe=\x01&\x00\xc1\xff\xff\x00\x05\x03G\xff\xbb\xfd\xbf\x00\xdd\xff\xd8\xfc[\x00Y\x04(\xfeF\x00\x8e\x02\x88\xfes\xff\x1e\x00\xc7\xfdD\xfdr\x02\xeb\x01^\xfe<\x01\xd8\x00&\xfe\xaa\xfe\xca\xfeR\xfeQ\x00\x8e\xff\x19\x00\xc7\x00y\xff\x1b\x00\x83\xffi\xfc\r\xff\'\x00\xff\xfc\xe1\xff\x9c\x03\r\x00u\xfc\xf3\xff\xa0\xfe\x98\xfb`\xfe\xaa\xffI\xfd\xbb\xff\xcc\x01\xae\xfd\xf1\xfdO\xff\x8c\xfdt\xfd\x00\x00\x8f\x00\xaa\x00\xe3\x02F\x03\xaf\x03?\x04\x8f\x04Y\x05)\x04\xb9\x05o\x07\xfc\x07\r\tK\t\x90\x07O\x07\x0f\x06\xba\x044\x04@\x03\xd9\x02\xbb\x00\x8a\x00\x81\xff\xce\xfd\xa0\xfc\xa6\xfa\xca\xf8\xc5\xf7\xc7\xf7Q\xf8:\xf8\xca\xf7\x82\xf8\xba\xf8\x7f\xf8\xd4\xf9s\xfa\xa9\xfa>\xfc\x82\xfd.\xff\x84\x00\xfe\x015\x02Z\x02\xa4\x03\xee\x02\xce\x03\xc6\x04\xe5\x04\xac\x04\xb6\x03\xba\x03-\x03\xc8\x02\xa9\x01\x9f\x00[\x00\x04\xff\xd0\xfe\xc9\xfe\xc6\xfd{\xfd\x0b\xfda\xfb\x0c\xfc\xbc\xfc\xc9\xfb9\xfc\xb8\xfc\xcc\xfc\xf5\xfc|\xfe\x1f\xff\xc8\xfe`\xff\xdf\xff:\x00\xea\x00\x8e\x02V\x02\x08\x02\xbc\x02\xe3\x02\xff\x02?\x037\x03\n\x02\xa6\x02\xc6\x02%\x02\x8f\x02\xed\x01\xbf\x00\x00\x01\xce\x00d\x00\xa3\x00c\x00\x90\xff}\xff\xe8\xffI\xff^\xff\x98\xff\xd3\xfe\xc6\xfe0\xff\x8e\xff\xb5\xff\xfe\xfeI\xff\xee\xfe\xaf\xfe\x18\xff\x11\xff\xf8\xfe-\xff \xff\x05\xffe\xff)\xff\xe0\xfe\xe9\xfe\x08\xff\xeb\xfe\x9c\xff\xbd\xff\xfb\xfe\x80\xff\x97\xff0\xff\x97\xff\x06\x00\xc4\xfft\xff8\x00\x1a\x00\x00\x00{\x00`\x00j\x00\x07\x00\xb0\x00\xf0\x00n\x00\xa1\x00\xb9\x00p\x00\x7f\x00\x9a\x00\xbe\x00\xc4\x00e\x00\xbe\x00\x0f\x00\x13\x00\x82\x00\xe9\xff\xff\xff\'\x00\xd8\xff\xf4\xff\x0b\x00X\xff\xa0\xffT\xff.\xff\xd8\xff\x89\xff\xb0\xff\xfc\xff\x88\xff\x83\xffo\xff\x88\xff\xbc\xff{\xff\xf5\xff+\x00\xdc\xff\xef\xff8\x00\xb8\xff\x98\xff\xdc\xff\xbb\xff\n\x00w\x00<\x00`\x00}\x00\xb6\xff;\x00\x02\x00\xdb\xffo\x00K\x00R\x005\x00\x98\x00\xfb\xff\xdd\xff\xdc\xff\xb9\xff\xb7\xff\xb3\xff&\x00\x04\x00\xa9\xff\x03\x00\xb2\xff6\xff\xac\xff\x90\xffm\xff\xb2\xff\xfd\xff\xf5\xff\x02\x00\x1a\x00(\x00\xf5\xff;\x00\x1c\x00E\x00\x96\x00\x95\x00\x87\x00\xb0\x00\xd0\x00y\x00\xca\x00E\x00n\x00\x93\x00J\x00x\x00\x9d\x00D\x00U\x00?\x00\xe3\xff\x1a\x00\x0c\x00\xc8\xff\xde\xff\xbb\xff\x90\xff\x11\x00\xa1\xff\x85\xff\xdb\xff\x1d\xff;\xff6\xff\xe8\xfe\xa2\xff\x8b\xffr\xff\x96\x00\xd1\xffy\x00\xcc\x00\x93\xff=\x00\x15\x00\xfe\xff\x1d\x04\x11\x05\xf4\x03o\x03\x11\x02\x7f\x01W\x01P\x02\xc6\x02\xc3\x02`\x022\x02\xb0\x01]\x00\xda\xfe\x16\xfc\xd2\xfa\xaf\xfb{\xfc\r\xfdD\xfd\xe3\xfc\x1c\xfb\xc5\xfaF\xfbt\xfa\x83\xfb\xb3\xfb\xea\xfa\xab\xfd\xf1\xfe\xae\xfe\xbb\xff\xd6\xfe\xcb\xfd\x0f\xfef\xfe\\\xff\xc2\xff\xe9\xffg\x00\xca\x00\x9f\x00\xe6\x00g\x00t\xff\x86\xffO\x00\x07\x01\x0c\x02+\x03\x8f\x03a\x03\xb7\x038\x04W\x04\xa3\x04\xdd\x04]\x05$\x06\xf3\x06Y\x071\x07^\x06*\x05S\x04\xc3\x03j\x03\xc7\x02\x01\x02t\x01\xbd\x00\x00\x00\xeb\xfe\x9d\xfd`\xfct\xfb\x13\xfb\x00\xfb\x1a\xfb`\xfb|\xfbr\xfbu\xfb\xa3\xfb\xde\xfbs\xfcW\xfdQ\xfe_\xffR\x009\x01\xd3\x01%\x026\x024\x02\x84\x02\x06\x03\x8a\x03\xb8\x03b\x03\xb8\x02\x16\x02M\x01\x82\x00\xe5\xff\x1b\xffP\xfe\xa8\xfd)\xfd\xa7\xfcD\xfc\xb0\xfb\xf5\xfa\x90\xfam\xfau\xfa\t\xfbN\xfbs\xfb\x0f\xfc\x97\xfc\x18\xfd\xef\xfd\xee\xfe\x1b\xff\xb1\xff\x96\x00\x0e\x01\xd7\x01c\x02\xae\x02\xf8\x021\x03F\x03a\x03I\x03\xd3\x02u\x02F\x02\x12\x02\x0e\x02\xab\x01\x06\x01\x8f\x00\xff\xff\x9d\xff\x80\xffF\xff\xe5\xfe\xd4\xfe\xc9\xfe\xca\xfe\xfc\xfe\x0c\xff\xe4\xfe\x17\xffG\xff\x90\xff\x10\x00R\x00o\x00\xba\x00\xbb\x00\xb4\x00\xc1\x00\x13\x01\xfd\x00\xd1\x00\r\x01\x03\x01\xcf\x00\x8b\x00I\x00\xf6\xff\xbf\xff\x90\xffd\xffd\xffI\xff\x16\xff\xeb\xfe\xd3\xfe\xba\xfe\x8c\xfe\x83\xfe\x82\xfe\xa9\xfe\x0e\xff#\xff{\xffb\xff\x80\xff\xbf\xff\xb8\xff\x13\x00L\x00\x89\x00\xe6\x000\x01@\x01A\x01]\x016\x01\x02\x01\x07\x01\x06\x01\xdd\x00\xd2\x00\x8e\x00\x1d\x00\xda\xff\x80\xff\xfa\xfe\xf5\xfe\xcf\xfe\xc0\xfe\xdd\xfe\xf3\xfe\x06\xff\xbc\xfez\xfe\x9c\xfe\x03\xff\xfe\xffZ\x01J\x03g\x02\x10\x01\x1e\x02\x97\x01\x17\x02\xeb\x02}\x02 \x04\xae\x03\xfe\x02/\x03\xc0\x01\x1e\x00\xeb\xfeY\xfe\x9c\xfe\xda\xfe\xae\xfdZ\xfe\xa8\xfd\xf4\xfc\\\xfdP\xfc\x99\xfd\xf0\xfc}\xfb\xbf\xfdY\xfe\x9b\xff\x7f\x03\x80\x03\xde\x02W\x01\x9a\x00\xca\x01\x95\x01\xc8\x01\x83\x02\xa4\x02\x82\x01\xef\x01\x9e\x01\xb2\xfe\x91\xfc\xb9\xfa\t\xfa\xe3\xfap\xfb\x1b\xfc<\xfb\xa0\xfa\xaf\xf9\xcb\xf8\xa2\xf8\x84\xf8W\xf9\x0f\xfas\xfa\xe7\xfbI\xfd%\xfd\xa2\xfd\xc7\xfd)\xfeI\xff\xa5\x00j\x02\x07\x04\xb7\x05\x8e\x07"\tS\n|\x0c\xd3\rq\rc\r>\x0e\x1c\x0f\xb8\x0f\x10\x10r\x0f\xe3\r\xe7\x0b\xf4\t\x16\x08\xd6\x05:\x03\x87\x00\x97\xfe\xdb\xfd\x8b\xfca\xfa\x11\xf8$\xf6\x9e\xf4y\xf38\xf3\xa5\xf3R\xf4\x8b\xf4\xd7\xf4:\xf6\x91\xf7|\xf8i\xf9\xb5\xfaG\xfc\x1d\xfe\xf4\xff\xe2\x01\xc3\x03\xb5\x04\xc9\x04\r\x05\xd8\x05V\x06\x18\x06\xc7\x05Z\x05\xc9\x04\xfe\x03\xe5\x02\xd4\x01P\x003\xfeG\xfcg\xfbu\xfb\x18\xfb\xa6\xf9t\xf8#\xf8\xf8\xf7\xdf\xf7\xf1\xf7k\xf8\xd6\xf8=\xf9~\xfa\x99\xfcR\xfe\xff\xfe\x13\xff\xe4\xffg\x01\xa5\x02\xd2\x03\xcb\x043\x05\xf8\x05\x02\x06\x96\x062\x07\xce\x06\\\x05\xa4\x04\x90\x04A\x04u\x04V\x03E\x02\x97\x01\x98\x003\x00\xa5\xff\x1f\xff"\xfe\x9d\xfc\x0e\xfc\xbf\xfc\x1e\xffr\x00o\xfd8\xfc\xa8\xfc\xb2\xfc\xef\xfd\x1b\xfe\x97\xfeb\xfe\x93\xfe\xb2\x00p\x02\xcb\x02\xd3\x00X\xfe\xb5\xfe\x95\x028\x04\xad\x04\xe3\x04-\x03!\x03\xac\x034\x03`\x03G\x01\n\xff\xad\xff\xab\x00\xa7\x01\x88\x00\xff\xfd\xf1\xfb\xb5\xfam\xfa\x1c\xfae\xfa\xe6\xf9\x82\xf8\x14\xf9>\xfa4\xfas\xfa\x11\xf94\xf8v\xf9\x89\xfa\xa7\xfb\xd0\xfcp\xfc\xb0\xfd>\xfe\xd5\xfd\x12\x00h\x00<\xff\xf2\xff@\x01\x82\x01G\x02a\x01\x16\x01\xd3\x00\x97\x00\xbe\x02\xbd\x01\xa5\x01/\x01\xbb\x00\xf3\x01\x0c\x01c\x01]\x02h\x02\x18\x04\xc1\x08H\x0c\x82\n\xce\x08=\t\xb2\x08m\n\x83\x0b\xe2\x0c\xd2\x0e\x8e\x0b\'\x0b\x00\x0c\xde\x08\xd0\x05B\x02L\x00s\x00\x85\xff\xf1\xfeh\xfe\x06\xfc\x99\xf9L\xf8\xaf\xf7\xa1\xf7\xb5\xf5\x12\xf4j\xf5\xa4\xf6r\xf8 \xfa\xf1\xfal\xfb\xb0\xfa\x11\xfbB\xfd6\xff\xb9\xffr\x00\xb1\x01\x9b\x03\xc5\x04E\x05\x8c\x05\xa4\x04\xa8\x02\xca\x01\x9f\x01V\x02\x82\x01U\x00\xa7\xff\xc3\xfeN\xfey\xfc\xe8\xfa\x18\xf9>\xf7|\xf5\xc9\xf5\xac\xf76\xf9\x08\xf9\xea\xf8E\xf9%\xf9\x98\xfa}\xfb\xa1\xfd\x01\xfe\xd7\xff\x89\x05\xa5\t0\r\xf0\r\x0e\x0c@\x0bH\n\xfe\nd\x0cP\x0e\x01\r\xc3\x0b\x1d\x0e\x88\x0b\x07\x08\xf5\x02\xf1\xfb\xce\xf78\xf7u\xf9\x8b\xfb\x9f\xf8A\xf6\xcb\xf4[\xf2\xba\xf1\xe9\xf1\xd9\xf0\xe4\xef\x8f\xf0\x8e\xf4\x03\xfa\xca\xf9_\xfa\x9e\xf7>\xf6\x16\xf8\x0e\xfb \xfe\xce\xfb\xa9\xfdZ\x00[\xff\x08\xfe\x91\x01\xdd\x01\x9f\xfa\x13\xfb\x96\x01v\x009\xffR\x03H\x05\x15\x02\xf3\xfeA\x05\xba\x05\xe2\x01\xd5\x06\x9f\x08\xa7\x064\x08\xc9\x0b\x1c\n\x0e\x05\xb0\x07\x04\x08[\x05R\x08l\tK\x04\x12\x02i\x06\x1c\x05`\x04o\t\xd8\x08l\x04\x15\x07I\x0bl\n\x15\x0c\x84\x0c0\x0b+\x0b\x1a\r%\x0e\x06\r\xe0\x0b-\x08W\x06\xa4\x06\xfe\x06\xf8\x03\x82\xff,\xfd\xf6\xfb\x9c\xfc<\xfbd\xf9`\xf7N\xf5\x9c\xf3\xd2\xf3\x88\xf48\xf3\xe4\xf2\xc9\xf2\xb7\xf3g\xf5\x81\xf5\xd5\xf6\xc0\xf79\xf6)\xf6n\xf8\xcc\xfa\xc6\xfc`\xfdp\xfdo\xfd\x0e\xfe\x11\xff(\xff\xfd\xfe\x00\xfeG\xfd\r\xfe<\xff_\xfe\xe4\xfd\x0b\xfd\xee\xfb\x1c\xfc9\xfc\xa7\xfc4\xfd\\\xfdE\xfcf\xff\xef\x00\xee\x00\xf9\x02u\x02\x11\x03\xa6\x02\x16\x04\x8d\x05\xe3\x05\xf0\x06\x0c\x06\x90\x05\xec\x05\x00\x04`\x03\xf6\x01\xfd\xff[\x00o\xfd\x18\xfe@\xfe\xc4\xfa\xb7\xfb9\xf8\x88\xf6\x18\xf9\x08\xf7\xca\xf6\x96\xfa\xce\xf9\x12\xf8\n\xf9\xa0\xfb&\xfc\xae\xfa\x0f\xfek\xfd\xf2\xfe\xd3\x00r\x00\xeb\x02\xa7\x00~\xff\\\xff-\xfe\xa4\x01J\x02B\xfe\xe7\x00\xc4\xff\xc6\xfbG\xfc\xd5\x00\xe2\xfb\xe7\xfc+\x02\xfc\xfb;\xff\xc7\x03\xf8\x00\x8f\xff\x1b\x03P\x05<\x04\xdd\x02t\t\xe4\x08O\x04\xa8\t\x9d\n\xb1\x07\x19\x08\x18\n\x07\x08\x17\x06b\x07\x8a\x08!\x07 \x06h\x05\x1c\x04\x11\x04\xf7\x02\xed\x03\xdf\x04\x1d\x02\xd8\xfe\n\x05?\x05}\xfbE\x02\xcc\x04*\xfb\xa4\xfdV\x04\x0b\x00\xa8\xff\xef\x01\x0c\xff+\xff\x95\x01E\xfe\xad\xfd\xa3\x00N\x00\xfc\xff0\x023\x02\xb5\xfe\xde\x00\x1c\xfd\xd4\xfc\xa7\x03T\xfa6\xfcG\x03:\xfd\xb2\xfb\xc9\xfd\xbf\xf9\xf5\xfa\x13\xfc\xe4\xf9}\xfaf\xfd\xf9\xfd\xb1\xf9`\xffg\xfd\xc5\xfb\xc1\xfb\x10\xfdK\x01\x98\xfa!\x01\xe5\x044\xfb#\x02\x03\x06\xdd\xf6\xac\x00\xf4\x061\xfcS\xfa\xd0\n\xc2\x03-\xfb#\x03\xcd\x05\x06\x03\x8f\xf7L\x05\xe4\x04"\xfa\x83\xfdt\x05\x98\x00\xb0\xf9#\x04\x9c\x01\x00\xf8\x1f\xfb_\x04/\xf9N\xf6<\x08\xe2\xfe\x87\xf7\xcb\xfc\x92\t\x91\xf4\xa9\xf8\xe2\x06\x0c\xf7\x88\xf9\xee\x05|\xfe\x8d\xf5\xb4\x0b\xa9\xfd}\xef,\x052\x08N\xf0\x1b\xff\xf1\x0c\xa0\xf6m\xf8P\t\xe9\x04Z\xf3-\x02\xbc\t\x1c\xf5\x8a\xf7W\x13s\xfb\xe4\xef\xd3\r\x11\x06`\xf3 \x01L\x0eT\xf3\x8c\xfe\xd0\x0b\xde\xfa\xdc\xff\xb7\tM\xff\xe3\xfbM\x07\xee\x05!\xf5^\x05\x1f\x0f\xce\xf2\x88\x04\xdb\x0f%\xf1\xc2\x04.\x11C\xfa-\x01\xf9\t\xd4\xfc\x93\xfbB\x0e\x88\x02\xa7\xfeP\x0b\x16\xfe\x02\x00\x1b\n\xbb\xfe7\xf8%\n\x00\x034\xf98\x07#\x08g\xf6\xee\xfdk\x0e\xba\xf0R\xf2\xf9\x16\xa0\xf7S\xeeo\x0b\x04\x0c\x00\xeb\xb3\xfbO\r*\xef\x92\xf4\xa8\x02.\x08%\xf8)\xfd\x00\x02\xd8\x00\xc1\xf1\x0e\x01\xc8\t\xd2\xf4b\xfa+\x0f\xa1\xfeh\xf0\x1b\x10\xec\x02\xf8\xfbV\xf9\xac\x08\xed\xfe\xf9\xfdK\x02\x17\xfe\xff\xfes\x01\x1c\xff\x8a\xfaa\x07\x89\xf7\x8c\xf9\xc3\x02\x1f\x00\xdb\xfb\xfc\n\xc8\xfcd\xf7o\x0c\xe4\xfd@\xfe\x08\x03\\\xfbo\x0e\xe9\xf9j\x00\x1b\x11\xcb\xee[\x02B\x05%\xfb\x0c\xfc\xfd\x07<\x00\xe3\xf8\x7f\x01\xd7\x04\x1c\x05\x9a\xee\xa2\t\x1b\x00.\xf1/\x04K\x05\xc3\xfe-\xfa\x0e\x01H\x02\xb5\xfb7\xf7\xde\t\xd1\xfa\xf5\xf7\xed\x06;\xfen\xfb(\x07]\x06\xfd\xea4\x05\xcc\x0eW\xea\xad\x02\x08\x11+\xf1\xcf\xf9\x15\x0b(\x01\x1d\xef\x87\x068\x06-\xf5 \xff\xfd\x04\xc3\xfb\xc4\xf8\x85\x0c\r\xfa\xab\xf1\xee\x12\x92\xfe~\xf1\xd6\x11"\x00W\xf5\xa2\x08\xb6\x07\xa6\xfc~\xfc\xd4\x0e\xae\x03\x10\xf3\xe9\r2\x06Z\xf9>\x03\xe1\x08#\xfbh\xfe\xc4\x08\xf0\xf9\x8c\n\xdc\xf75\xfd\x13\x0c\xff\xf7\xec\xf8\x0e\x0eQ\xfa\xaf\xf1\xa1\x10\xe1\x00\x8a\xf36\x056\rY\xf6\x0e\xf9\xce\x0c\x89\x02G\xf0\x91\x04\xae\x16C\xf13\xfa\xcd\x0f\xe9\xf6\x08\xff[\x00\x17\x05\xc3\xfb\xaf\x00\xc8\xfb\x8b\x02\x0c\x0b\x97\xe71\x10\x01\xfa+\xf5\xe3\x08\xea\xfe1\xf7\xb5\xf8\x83\x0f\xa7\xfa\n\xf25\nv\x04\xf8\xe8\xa3\t\x9c\n\xaf\xf8*\xfd\xa9\x08`\x01b\xeft\x12\x0e\xfb\xa3\xf9\xfb\r\xba\x034\xf6\xb1\x05a\x05e\xff\xc2\xf3h\x04B\x13\xc6\xeb\x05\x0b\x85\x061\xf7\xe9\x01U\x05\xae\xf6\x15\x03\xdf\x07\x9a\xf1\xf5\x06\xea\x08I\xf2\xde\x04\xfd\x00\xb8\xf3\xcd\x05j\x02]\xf9\xb6\n\xc3\xf1\x8e\x02\xca\x11\x82\xe2\xf6\n\x13\x0c\xc2\xee\xe0\xf9\xe1\x11K\xfdB\xf7\xac\x08I\x03O\xf8\xa3\xf4\xda\r\xa0\xff\xa8\xfaS\x06W\x02\x91\xfa\xe0\xfc\x94\x0cB\xf4S\xfbP\r]\xf5\x06\xff(\t\xb2\xfb\x01\xfe~\x02t\xf9\x0b\x04 \xfe\x8a\x00\x8a\xff\xb1\xfd\xa3\x08\xaa\xfc\xed\xf5\x9f\x063\x07b\xf2_\x03o\x08\xac\xf8\x8d\x02\x0f\x05\x18\xf2d\r\xe5\x00\x0c\xf03\n;\x03\xfd\xfd\x98\xfc9\x05\xbb\xfe\xf1\xfc\xbd\x02V\xff\xc1\x00\x94\xf9\xc2\x040\x00A\xfe\xb7\x04u\xfe\xfa\xffC\x01\xbe\xf8\xcd\x04V\x01V\xfa\x1c\x03\x1d\x03\xd4\xfe\xf5\x00\xba\xfc]\xfc\xbe\xff\xaf\xf6\x8b\n\x13\xfb\x88\xfdv\x08`\xf6\xb9\x00.\x05\xd9\xf5\x98\x01\'\x02\x88\x01\xf3\x01\x81\x006\x03b\xfa\xea\x04$\xfdP\x04\xab\xf9\xfa\x06\x9c\x06\xc1\xf84\x00\xd8\x04\x99\x00\x1b\xfc\x98\x02f\x08S\xf7\xc7\xfe\xa8\x04\x1e\xfe\x13\x02p\xfa\xbe\x07\xb8\x00\xce\xfa\x0f\xfe6\x07\x18\xf4X\x00\xa5\x06\xcf\xfcm\xfe\x9e\xff>\x01\xf0\xf8\x8e\x02i\x02y\xfb1\xf3\xe3\x133\xf5\x91\xf9\x10\x0f\x9a\xf8\xb7\xfbM\x06\x92\x016\xf2\xc1\x0b\xc2\x00=\xfa\x86\x04D\x08\x95\xf6\xa4\x00/\x07\x1f\xf7\xd0\x04r\xfdY\x01\xcc\x05\xf0\xf5\t\x01\xf4\n\xa7\xf2\x89\x04\x05\x01D\xf5\xdd\x02\xf0\x07\xd6\xf2\x1a\x04\x8e\x03\xf5\xf7c\x06\x1d\xf9\xbf\n\x00\xf8\xef\x01\x8c\x04I\xfdS\x02\x7f\x00\x8d\xfe\xd1\x08\xcd\xfcD\xfaH\x0e\x81\x02\xc9\xf0\x12\x0b\xd9\te\xef\xa3\x08\xf2\x02\xfb\xf8H\x06\xf3\x03M\xf3#\r\xc2\xfe\x96\xf6B\x02\x9b\x0b\x1f\xef[\xff\xba\x11\x9b\xeb\xbd\x06W\x03\xa3\x03X\xed\xe7\t\xb0\x02U\xf7X\x04\xf0\x02\x9f\x01Y\xfa\x8a\x01\xe6\xfe\xdc\x04n\xf8E\x07\xfc\x03\xff\xf6\x18\x00:\x07\xf5\xfe\xe8\xfb\xa4\xfeh\x04\\\xf9\x8b\x05\x12\x01\x82\xf2\xd0\x0eT\xfb\x07\xf9\xae\x07\x86\x01\x1f\xfeA\xfb\'\xfd\xa1\r\x99\xfaL\xf8\xfd\x12\xb8\xf52\xfb\xc5\x06\x1e\xff8\xf8\xf2\r\xc6\xf8\xd3\xfbI\r%\xf9?\x02,\xf8\xa5\n\xa4\xfa\xaf\xf9\xe5\x0c\xa7\xfe\xa4\xf6\x00\x0b\x16\xfe\xfd\xf7i\nd\xf6K\x03\xbb\x06\xf3\xf1\x13\t{\t\x03\xf6t\xfdr\x07\xf2\xf6\xc9\xfe\x13\x0e9\xf3\x8e\x01\x16\x05\x00\xfe\x10\xf6n\x0e2\xf7\xb0\xf38\x14\xfa\xfa\x07\xf1\x9c\x11\x98\xfcv\xee.\x16\xc0\xf4\x19\xf4,\x0cD\x03\xf5\xed1\x11\xdf\xfds\xf2\xf0\x0c\xe0\xf7k\x00\xbf\xfe\xd9\x00=\x07\xb3\xfc[\xfe\xfa\xff\x10\x07\xe1\xef\xac\x0c\x85\x06\xca\xec\xa6\x16M\xf6A\xfa]\x0b\x03\xfc\xbf\xfc\xca\n\x80\xf8\xdc\xfe\x7f\n\x9f\xf8\xbc\x017\x04\x8f\xf78\x05q\xfbz\x08\xf1\x06{\xeeO\nJ\x03i\xf3\xe5\x00\xb6\r\x06\xf1\xad\x06~\x01\x80\xfc\x07\x01\x1b\xfc\xab\x03\x1e\xfe\xc6\xf8b\x03\xd4\x08|\xf0\xb7\t\x86\xfb)\xfe\'\x03\xdb\x003\xf5\xce\x06s\x06\x9f\xf2\xa7\x01s\x0f5\xf0\xfc\xfd\xe2\x14\x85\xe92\x066\x03\xf2\x02\x12\xf8B\x05\xb8\t\x95\xee\xd4\n\x9b\xfd\xa5\xff<\xfa\xe1\rR\xf7\x9e\xfb\xeb\x10\xc1\xec\t\x0c\xed\xfb\xff\xfb4\x04\xe2\x00\xe0\xff\xd5\xf5\xa8\x12=\xf9\x85\xf3-\x11\x08\xfa\xa2\xf4$\x0eh\x04t\xe8\x94\x11\x8d\r\x9a\xe85\n\xc0\x05\x10\xf3\xbe\x04\xed\x01V\xfc8\x06\xb7\x02)\xf3\xf5\x10\xcc\xf7\xd9\xf4U\x10\x10\xfd\xb9\xf5\xba\x06\xe5\x04\xbf\xf4\x1b\x0c\xcb\xfd\xd7\xf9\xb5\x032\xfb\xa2\x02\x14\x04\x8c\xf1\x11\nQ\x06,\xf1+\x04\x8e\x0e\xf8\xf0G\xf6\x01\x12\x8d\xf0\xf6\x02\x93\x0c\x90\xf2K\x03\xe4\x08_\xf7\x8e\xfd\xb3\x01 \x01d\x03\xda\xfc\xe5\xffc\nD\xf7\xbb\x01y\xfb\x88\x02\xce\n3\xf2\xe7\x01V\x12D\xf2\xa3\xf5\x15\x18\x08\xf3h\xf9\x0c\x0bm\x03\x95\xf8J\x02\xbd\x06\x9d\xf9\x9a\xfb\xe9\x08\xca\xfe\xae\xf9\xf6\x03\xe1\x07#\xf8\xd8\xfa\xe1\x0c6\xf4\x89\x05\x10\xfbA\x05j\xfcl\xfc\xfa\x0e\x0f\xf1c\x00a\x03U\x03\x96\xec\x8e\r\xb1\x03"\xf9\xb5\x04\x1f\xfbo\x04\xd9\xf8\x00\n\xde\xf3\x93\x07\xfc\x05R\xf7[\x07V\xfb0\x06\xe6\xfb\xd1\xf6\x12\x13\xcb\xf3\xc3\xfcY\x08\x16\xfc\xa1\xfb\x91\x01\xa2\x06\x7f\xf1r\x08\xe8\xfa\xf8\x06\xe7\xf8y\x03*\x04\xf6\xf8\xfa\xf9\xab\x04Z\x03\xab\xfa:\x07\xee\xf3\x90\x0e\xa8\xf6>\xfd\xb1\x08\xf2\xfd\x19\xfe*\xfd;\x06w\x05\xd2\xf5\xf7\x00\x00\nT\xfdF\xf5v\rW\x01\xea\xf0\x82\n\x99\x03\x16\xf9\xb3\x05B\x02\x86\xf4{\x06b\xffE\x03\xb6\xf5\x18\x08\xd7\xfcy\x03e\x00\xab\xf4Q\x0cn\xf7I\x00Y\x01n\x03\x15\x01$\xf9#\xffG\r\xcc\xec\xdc\x05\xf0\x08w\xf6 \x01\x0b\xff\x16\x06\xe3\xf2.\x0f\xd8\xf8\xbf\xf7\x98\x08T\xff\xa8\xfc\xa8\xfcQ\n\xe1\xf8\xe7\xf6U\x13\x92\xf73\xfb`\x06G\xfb\x1b\x00\x80\xfcS\tB\xffO\xfb\x1f\x01G\x04\x85\xfa*\xfa3\x15\xb4\xeew\xf8Y\x1ep\xeao\xfb\xd2\x14\x03\xf72\xf5\x02\x11\xb0\xf9_\xfb\x87\x08\xb2\xf8\x91\x0e\xe5\xf0\x0c\x05H\x026\x006\xfc/\xfe\x91\n\x8c\xf66\x08l\xf5C\x06\xb0\x02`\xf9E\xfb\xc6\x0c$\x002\xef\x84\n\x9f\x06"\xf5\xa8\xfel\x05\xb2\xffK\x01\x1c\xf5\xa9\x06\xb1\xff\x8b\x04N\xfa\x8b\xfd!\x04\xf3\xfe\xe1\xfe\x11\xfb\xac\x0c-\xf0\xa6\x04\xd3\x08X\xfbm\xfcr\x03\x8c\x03S\xefH\x0c\x98\t\xff\xeb\x00\x07\x8d\x0e"\xee\xcd\x05"\x04\xad\xfb\x16\xfdW\x00\xbc\x05}\xfc\xfe\xff\xe0\x036\x03\xd5\xf0\xf8\x08\\\x06 \xf5\xd0\x01U\x03\xda\x01\xc4\xfe\x19\xfc\xb1\x01\x1e\x0b\xfa\xf3?\xfe\xd8\t\x98\xfcv\xfb\xa7\x04\xa3\x02\xff\xf8\xf6\x0b\xee\xf21\x07\x92\x01\xb9\xf8i\x07\xce\xfc\x01\x04\xa4\xf8\xf2\x04\xa1\xff\xa9\xfd\xe5\x01\xd5\x01\xad\xf7\xd0\x06\xdf\x01\x14\xfd\xc9\xfbb\x00\xc8\x07c\xf8\xcd\x01\x8a\xfbd\x10\xb7\xef}\xfc"\t\x02\x02\x93\xff\xfc\xf3j\x10\xba\xf9\x17\xf8\xa9\x0b\x1d\xfa\x8e\x00\xe5\x08:\xf8\xf5\xfa\xdc\x04\x98\x00\x9a\xf9\xfe\x0b7\xfd\xc0\xf71\x04e\x05w\xf5\xf1\xfa\x05\x0fP\xf5\xd3\xfeT\r\x1e\xfe\xd5\xf7\xf9\x05?\xf7\xf9\x027\x07\xb1\xf7\x18\x0f\x8a\xf8\xce\xfe2\x04~\xf6H\x07\xdc\x01C\xf7\x8b\x05\x88\tv\xf3=\x02{\x07r\xf5y\xfe\x92\x0b@\xf5\xc3\x02\xa8\x03\xf7\xfc\x89\xff\xbc\x01p\x04\xf2\xeeC\x0c\x08\x03&\xee\x0c\x0c\x13\x0b\xf4\xee\xa7\x03S\x08-\xef\xbf\ny\x05\x82\xf3\xc3\x08\xd8\xf8"\x05S\x00{\xfcM\x01x\x02\x0e\xfd\xbd\x00\x9f\x02\xd4\xfb\xe5\x06\x03\xf7\xee\x08Q\xfc0\xff\xa3\xff\x99\xfb\x8b\x0c\xbd\xf9\x82\xf8\x92\x040\x0c|\xee\x85\xffG\x11\xd5\xf4\x8b\xfe\xd2\x04k\x00Y\xf8\xbd\x08\x94\xf9\x9b\x02\xef\xfe\xe9\xff\x87\tF\xed\x8e\x0cY\xfe\xe2\xfa\xc1\x00\xdb\t\x04\xf9\x19\xf92\x0e\xe5\xf1\x8b\x08\xc0\xfeL\xfbZ\xfe<\t\xfc\xfb\xf3\xf8.\x13\xe8\xeb\xf0\x06\x1b\xfc\xfc\x03\x15\x06\x99\xee\x99\x10\xba\xfc\xbc\xfa\x13\xff\xe6\x06\x08\xfa\xb9\x01Z\x01\xeb\xf6\x06\x0e\x85\xf9\xaa\xf8\xc0\tv\xfd\xc4\xf9;\x0e_\xee\x9f\x08@\x06&\xf4\xb8\x00\x12\x06\xf6\xff\xb7\x01\xda\xf98\x00(\x0f{\xe8Y\x08\xf6\n\xeb\xfa\xc5\xf8{\x07\x80\xffs\x00\xec\xfb5\xfeC\r\x10\xf3\x00\x027\x08,\xf5\xd4\x03\xbb\x04\x9a\xf6q\t\x97\xfbH\x02 \xf9\xe0\x01N\x08\xbd\xf8\x85\xfb\xc6\x0c\x1f\xf6\xd2\xff&\x0cB\xec\x19\n\x7f\x03\xe4\x01\xe3\xf1\xeb\x0c\x81\x02\x82\xf0\xed\x05d\x08\xee\xfa!\xf7\x9b\n\xd0\x018\xf7\xbb\xfe\xa1\n\xa2\xff\xcb\xf5\xe0\x01\x89\x03\xd7\xfbT\x06w\x01&\xf8\x19\x05\xa7\x03k\xf6\xb8\x02\x8c\x02T\xffK\xfaW\x08Q\x03t\xf6u\x07\x8e\xfd?\xf5/\n\xde\xfd\x85\x03\x05\xfd\xc2\x00\x03\x06\x05\xf7\xd9\x01(\xfe\x1d\x08&\xfb\x92\xfd\x07\x0c+\xfb\x01\xf3\xe2\x12&\xf4\t\xfa\xa5\x0e|\x00i\xf2\xfe\x0c;\xfd2\xfar\x07\x08\xf2L\r\xc2\x00@\xf6\xaa\x08\xc3\x089\xe9\xe4\x11\xe9\xf7P\xfe\x97\x039\xfeW\x02\x19\x01\x93\x00\x96\xfc\x88\x02\x97\xf5-\x13\x16\xeb\x8e\x07\x05\n\x1b\xf4\xd7\x08-\xfe\x0c\xf9\xdc\x03\xd5\x04"\xe7\xfd\x11U\x0fw\xe8\x95\x02w\x122\xf0n\xf7\xbc\x177\xee\xed\xff\x7f\x0b4\x00<\xf7\x82\x04\x07\x0c\n\xef#\xfe\x96\x13+\xf4\x05\xf6\xc6\x0e2\xfb\x90\xfb\xa6\x02\x15\x07\xe8\xf2(\x01\x1d\x01\xef\x07\xab\xf4b\x07\xe7\x05\xf2\xf1\xbe\x01\xde\x00}\x05n\xf9\xf1\x07\xd8\xf3\xd8\x04\x84\x04>\xfet\xfd;\x031\x03\xee\xf0o\x0f\x9d\x01\x1f\xf3a\x08\xdd\xff=\x01\x8d\x00\xb4\xff\x94\xfd\xb5\xfb8\x05\xe0\xffb\xfd\x8a\tc\xfb\x05\xf7\xa5\x06\xc2\xff\xe8\x02M\xf1\'\n\x1b\x02\xa7\xff\xff\x01[\xf6\x01\ti\xf7A\xfe\x89\t\xbb\x01\xeb\xf4\x8e\x0b\x80\xffy\xf1\x1d\x0b\xdf\x01K\xf6I\x03U\t\t\xf6x\xfc\xd9\rT\xf8\x8f\xf7|\x08\xac\xfex\xf8&\t\x80\x06\x9b\xf0\xd6\xff\xe0\t\x89\x01\xe6\xf5"\x05x\x05\t\xf6^\x02\xee\x04\x87\xfe\x11\x00s\x04"\xf9\xa9\xf7G\x15\xb8\xf3\xa9\xfdw\r\xcb\xf1.\x07\xc4\xfc\xbe\x00\xdd\xff{\x04\x90\xfd0\x00^\x01\xf2\xfb\x98\x05\xc3\xf6J\n\xb6\xf5\xd0\x07H\x00\x11\xfa#\x0bU\xf23\x04&\x02\xf9\xfd\x93\xfa0\x0b \xfc=\xf7\xf2\r\x02\x00\xe0\xef\xcc\x01J\r\x9b\xf7\xf5\xf9/\x0c!\x059\xf0A\t\x82\xf6Y\x01\x16\x04\x16\xff\xec\x04\xf1\xf7j\n\xe8\xf6\xc9\xff\x8c\x05\xc5\xfb\x8e\xf5\xf0\x07u\x14\xee\xe8e\x01\xa8\x13\x11\xe6\xc4\x01\x10\x10\x01\xfbM\xf6\xec\x07\xef\x08+\xf3#\x02\x1a\x07%\xf5\xd3\xf9\x87\x12\x08\xf9\xd1\xfcy\x07\x9f\xff\xb9\xf7\x10\x00\x14\t\xb0\xf46\x08\xa4\x02\x16\xf9\xf4\x006\t\xf1\xfc.\xec8\x16\xc4\x07-\xe9\xfb\x05\xad\x0ft\xf6\xbf\xf2\x87\x113\xfe\xbf\xf6t\t\xb7\xfc?\xfa\x8a\x0cA\xf56\xff\xf9\x02\'\x05\xe1\xf9\x0f\xf8C\x0f\xba\xf1\xd8\x06\x15\xfe\xce\x00\x88\x02$\xfe\xdb\xf6\xfb\x06\xb2\x00\xe4\xfd\xa6\xfe%\x02)\tC\xefG\ry\xf4R\x05D\x02\xd4\x00%\xfc\xbc\x07\xee\xfb\xec\xfd\xd5\n\xd9\xf6A\x07t\xfe\x8d\xf5\xc8\x03\x83\x0f}\xef\x12\x06\xe1\tS\xf0\xcd\xfap\x0b\xee\xfd\xf5\xf5O\xfd\x90\nk\x05\xa1\xf3\x07\x05H\x00\xa7\xfd2\xfb\xb1\xfe\xce\r\xba\xff\x16\xfc\r\x00Q\x00\x87\x04\x80\xf8\xd2\xf7X\x0e.\x02z\xf3T\n\xcf\x02R\xf5\x80\x00!\xff\xc8\xfa\n\x0b\xe2\xfd\x05\xfeN\xffL\xff\xd5\x04_\xf2\xff\x02\x83\x0b\x93\xefK\x04\xba\n\x84\xfc\x93\x02\xd7\xee\xdd\x0c\xa8\x05\xe3\xf0\x0e\x0b7\x0b3\xf4\xcb\xfa\x9d\t\xae\xfe\x88\xfa\xeb\x05u\x06\xaa\xf1d\x06\x18\x0f\x12\xef]\xf9\xf9\x17\x01\xf5\xc2\xf5\x8f\n(\x05\xc7\xfa\xbf\xf5}\x0c\x83\x00\xa2\xf6\xdb\x05\x9c\x01@\xfe\xad\xfb\xeb\x00\x8f\x05u\xfb"\xfc\xc4\t\\\xfe\xfd\xf1\x80\x05\xc4\x08\xf5\xfb$\xf8\xc3\x06\xe2\xfe\x98\xf5\xf3\x08\x19\x06\xb2\xf6\\\x00P\x04\x8e\xf7f\x02m\x05\xa9\xfb_\xfeL\x00\xe7\x02\xdb\xfe\x87\x01e\x00\xd8\xf9;\xff\x10\x02d\xffB\x08J\xfaV\xf8\xd5\x0b\x99\xfbO\xfb\xec\x07%\xfd\xb5\xf4*\x04g\rm\xf8\xc0\x00r\x01\xe5\xf6\x9f\x009\x02J\x01\'\x00\xe5\x018\xf9:\x02\x10\x08*\xf7h\x01\xbe\xffQ\xfa\x1a\x05j\x03\xdc\x02\xd7\x01\x80\xfbW\xfa9\x05\xb2\x00\xd9\xff\x8c\x05\x91\xfe\n\xfcO\x05\x13\x01\x0b\xfa\x1d\x04\xd1\x02p\xf7\xd1\x03\x8e\n\xc6\xfa|\xf9+\xff\x07\x05\xc4\xfa\x9f\xfb\x03\x08\xad\x03\xaa\xf6+\xfe4\x02q\xfdk\x02y\xfd\x85\xfe\xc5\xfe|\x02Q\xff\xae\x02\xac\xfc3\xfd2\x01\xbb\xfa\x10\x07\xbe\x03E\xfa9\xfc\xb2\x06\xef\xfeg\xfa\x9b\x01\x03\x04F\xfe\xd1\xfb\x1e\x05\n\x01\x83\xfc\xa2\x00\xea\xfd\x8a\xfe;\x02d\x00L\xff\xfa\x01\xa0\x00\x99\xfc\x1f\xff\x7f\xff\x92\x01\xc6\xff\xa6\xfd=\x03\x9b\x02G\xfe\x14\xfe\x1b\x047\xfdq\xfd\xf7\x02}\x02U\x02\xce\xff\xa7\x00\x0b\xfc\xbb\x00L\x00\x91\x01\r\x01\xf7\xfe\xae\xff\xbb\xfb\xf6\x02\x1b\x03\x1c\xfb\x07\xfd\xda\x01\x05\xfea\x00\x0f\x00\x8f\xff0\xff\xa9\xfbK\x02R\x02\xfe\xff\xd3\xfdi\xffC\x014\x00T\x04\xb7\x00\xb6\xff\xbe\xfd~\x000\x04\x15\x02\x0f\x03\x00\x00\x98\xfe\xba\xfea\x02a\x02\xe9\xff\x0f\x017\x01\xd6\xff\x98\x00\xc5\x02\x85\xfe\xed\xfc+\x02\xf4\x01d\x00A\x02\x1f\x02\xc0\xffI\xfe\x96\x01\x9c\x00z\x01\x98\x02\xa7\x01\xfb\xff\x92\x02\xea\x01i\xff\xfa\xfe#\x00\xe5\x01\x99\xff\xc4\x01\x8a\x01\x89\xff\xf1\xfc\xb4\xfe\xc0\xfe\xf9\xfd1\x00\x15\xff\xff\xfd#\xff\xfb\xfd\x15\xfe\xa9\xfes\xfc\xab\xfe\x9f\xfe~\xfe;\x00]\xff\x7f\xfc\xbb\xfa_\xfe0\x00\xd8\xfd!\xfb\xf1\xfd\xfc\xfb\xf6\xf7x\xfcd\xfa\xc2\xf8\xcd\xf7-\xf9\xec\xf8\xca\xf7b\xf7\x9b\xf3\xb9\xf3N\xf7\xb5\xf9\xd9\xf5L\xfa_\xf99\xf5\xe8\xf8N\xfc\xbe\xff2\xff\t\x03\xdc\x05\xc9\x05\x85\x08\x9b\x0c6\x0f\x85\x11=\x13\x87\x16\x05\x19{\x1c,\x1d\xcd\x1b\xb4\x1b\x83\x1a\xa7\x1c5\x1c\xd1\x1a\xd1\x1aT\x16\xe4\x10\xc1\x0f\x93\r-\t+\x05\x14\x02^\xfe\xd6\xfb\xf2\xf8\xd9\xf4/\xf1W\xed\xf6\xeb\x8c\xec\xb7\xec\xa8\xeb\x03\xea\xf8\xe8e\xea\x96\xec\xa3\xedA\xf0\x91\xf3\xc7\xf3\xb7\xf6\x10\xfa\xac\xfbN\xfe\xc6\xff\xd0\x00d\x04!\x07\x18\x066\x06\x04\x06\xb4\x04\xb0\x02\x9c\x02z\x02T\x00<\xfe\xef\xfb\xd4\xf8_\xf4\xce\xf1!\xf0\x15\xee\xbd\xec\xac\xea;\xea\x12\xe8]\xe5\x13\xe5]\xe5\xa6\xe4\xe5\xe5p\xe7\xaf\xe7F\xea[\xeb\xf3\xe9\x1a\xec{\xf2\x04\xf5\xf7\xf6\x90\xf9\xa0\xf8\xe8\xfd\xab\x01\xd0\x03\xf5\t\xab\r&\x16\xf0\x1f\xc0%\r&\t%\x04(e,\xb75A;\xbc>\xb3AA\n\xb1\n\xed\x0c\xbb\x0cK\ry\x0f\x02\x0f2\x0c\x06\x06\xbc\x01\x15\x01\x92\x00\xbc\xfd\xf0\xf9J\xf3\x92\xeco\xea\x01\xeaP\xe8\x0c\xe6M\xe3b\xe3>\xe5/\xe5j\xe6\x19\xe7s\xe6\xd7\xe8\x8c\xedS\xf1C\xf5}\xf7\xbc\xf6\xad\xf8\xc8\xfb\x11\xfd\x9b\xff\x0b\x00\xc3\xfd\xd7\xfe\xa3\xff\x97\xff\xe1\x01\x15\x00B\xfco\xfd\xdf\xfc\x90\xfdc\xfe\xbe\xfc\xde\xfc\xcd\xfb\xda\xfd8\x08\xdc\x14q\x14\x86\rM\nr\x10\xb0\x1f\xca(L,\x1a-2,d-\xc1/\xa50\r/\x1a*\xa9\'\xa8+\xbb.C)\x0f\x1c\'\x0f\x0e\x08s\x06\xc3\x06\xbf\x06\x01\xff-\xf4\x9d\xec|\xe6l\xe5\x82\xe4\x1b\xe0\xb8\xdd\\\xdf\x9b\xe1\x91\xe3\xa4\xe3o\xe1a\xe1\xf8\xe3\x1d\xea\xb2\xf3?\xf9P\xfb\x82\xfc@\xfd\xc0\xff \x05\xe4\t,\x0bC\x0c\x1d\r\xea\r\xf9\x0eD\x0c\x96\x07\xd6\x02r\x01\x9d\x02\xa8\x012\xfdU\xf7l\xf0\xde\xed\x95\xec\x13\xeax\xe9\xed\xe7{\xe5\xde\xe4\xe3\xe6\xb2\xe6\x11\xe6\x1f\xe7\x16\xe9\x1c\xed6\xf2\xf3\xf4\x90\xf4x\xf4\xf0\xf6\xa2\xf9\xac\xfb0\xff\x7f\x00\x90\x00\xdb\x02\xc9\x01\xbd\xffR\x01\xca\x00\x9b\x02v\x02\x8c\x02=\x03\x8e\x01\xe4\x00\x98\x00Y\x02\xff\x03\xbc\x02\xe4\xff\x1c\xffB\x04,\x0c\xa3\n\x85\x0b\r\x11\x81\x14\x18\x18k\x1au\x1e\x1f \x92\x1f\xb1#\xe8+\x1a1\xe8+-%\x80#\x1e$\x17%\xae#\xff!\t\x1d\xad\x13\x83\x0f\xa2\r\xb5\n\xc6\x05L\xfd\xfd\xf9\xff\xf7)\xf4^\xf2\xef\xedv\xe7u\xe3\xdb\xe1P\xe5\x80\xe7\xd9\xe5\xdb\xe3\x04\xe2M\xe2t\xe6\xe9\xea\x08\xed"\xef\xbe\xef(\xf3\xd5\xf7?\xfb*\xfd\x95\xfc\x82\xfc\xf9\x00\xae\x07\xe0\x07M\x07\x0f\x05\x8f\x01\x16\x02\xcf\x03\xa8\x04g\x02\x89\xfd\xf3\xf9\x07\xf9\x03\xfa\x0e\xf8\x14\xf4z\xf0[\xed\xaa\xebi\xf1\t\xf2x\xee\xcb\xec\xaa\xe89\xec \xf2X\xf26\xf1o\xf4\xa0\xf3,\xf3\x81\xf8A\xfbo\xfa\xec\xf8\xb5\xf9@\xfc\xf7\x00\\\x01\xbd\xff\xbf\xff\xac\xff\xfa\x028\t\xc9\x07Q\x05\x00\x02\xd0\x02u\x07\x97\n6\x11K\x0c\xa6\x05\xf5\x07\xea\x04k\x08\xa9\x0e\x84\x08\xec\x07\x15\x0e.\x15\xf9\x15\xab\x11=\x0b@\x0ba\x10J\x17\xda\x1cD\x1e>\x1b#\x16\xd1\x14v\x14\x92\x17\x8b\x15g\x13\xeb\x14\xcd\x13\x1d\x13<\x0f\x91\x08C\x03<\x00\x0b\x01\xfa\x048\x02:\xfc^\xf8"\xf36\xf0\n\xf0a\xefV\xef\xd4\xee\xb5\xed\'\xec\xf3\xea:\xeb\x91\xe9\xb8\xe82\xee\x88\xf2L\xf2\xc5\xf4\xbb\xf5\x8e\xf3d\xf7\xe9\xf9\xfe\xfc}\x02\xb6\x02-\x03\xb0\x02\xd8\x02\x83\x02V\x02\xeb\x02Y\x02\x98\x01s\x01\xd9\xfey\xfb\xfb\xf6\xcd\xf5N\xf5Y\xf3f\xf3\xfd\xf6\xb1\xf1\xb4\xed\x93\xef\xd4\xeb\x14\xedM\xf1@\xf2\xa4\xf4j\xf6\x08\xf3\xea\xf3\xf6\xf7\x0c\xf6j\xf8\'\xffO\x01/\x03\xcc\x06\xb9\x08\xaa\xff\xe3\xfe[\r\xe5\ra\x0c\x81\x0fd\x0c@\x0c\xe9\n\xa6\x0bi\x0e\xed\x08\\\x06E\x0eY\x0f\xea\n\xe9\x04)\x02.\x04\xdd\xfdw\x06\xe8\x10\x1d\x05\x16\xfa\xba\x00\xb5\x02\x8f\xfan\xff\x96\x07b\xfdp\xf8\x16\x08(\x04\x14\xf9\x14\xfe\x1c\x01\xc6\x00\x7f\x00\xfa\x06\xcb\x08\x88\tu\x08p\x03\x82\n3\x0b\xd5\x08Q\x11\x13\x14\xfd\x0e~\r\xef\x0f\xad\x0e\xb2\n\x82\x0c@\x0cy\x0c\x9c\x0c\x95\x08\xfa\x08\xbd\x04 \xff\xdd\xfd\xdd\xff\x93\xff\x8e\xfb\xaa\xfb\x96\xf9\x92\xf5\xb7\xf4,\xf4`\xf1\xb3\xf1\x96\xf2]\xf09\xf5\x90\xf5!\xf1\x19\xf1_\xf5\x1d\xf4I\xf2\xba\xf7\xce\xf9i\xf8\x1c\xf8\xcd\xfdA\xfa>\xf9O\xfd\xad\xf9\x99\xfb\x12\xffc\x02.\xfag\xfd\x8c\x00M\x00\x92\xf72\xfa!\xff\xb4\xfb\xb6\xfb\xe9\xffe\xf8M\xfc\xad\xfc\xf6\xfa\xc9\xfbk\xfa^\xfa\xb2\xf9=\x02\xbe\xfb\xb8\x00T\xf8\xa5\x00\x8f\xf8v\xfe\'\x01"\xf7\x07\x03\xdb\xfe&\x05\x9a\xf9x\x022\x00\'\xff\r\xfb\xe7\x06(\tT\xfb4\x0f\xaa\x04\x14\xfbX\xfc\x98\x11l\x00$\xfci\x16c\x082\xfaL\x06\xb2\x11\xbe\xf6\xcd\xfd\x1a\x1b\xf2\x03\xc0\xf4\xf7\x10\xb1\n/\xfaH\xfe>\x10\xe9\xfc\xa8\xf9b\x06\xf1\x04h\x06\xc3\xf2m\x07\xbe\x07\xbd\xf4R\x02\x7f\x06\x06\xfc\x0f\xff\xe0\x00\x83\xfe\xe3\x02\xf9\x02X\xffm\xfc\xaa\xfat\x055\x08\x0b\xfa\x89\xfb\xeb\x06\xef\x00\xb9\xfea\x02\x1a\x07\xbc\xffL\xfc\xa6\x02v\x07r\x07\xdd\xfcx\x06\x82\x02\xd1\x02M\x03\xfa\x02\x05\x07\x0b\xfeY\x03\xca\x08\xaa\xfbi\x05\x19\x07i\xfd\xae\xfc(\x04\xe5\x05\x83\xfd\xb9\x01#\xff\x95\xfe\xf5\xf9\xae\x02\t\x00{\xf3\xe6\xfc6\xfe\x14\xf9\x9e\xf2\x07\xfay\xfdf\xebU\xf5\xf8\xfe\x93\xf5S\xf6\xa4\xef\x9a\xfc\x94\xf5\x1a\xf1\x90\x04\xa9\xf7O\xf4\xc4\xf9\xee\xfc\xff\xf9S\xfe\xed\xfc\xed\xf5\x16\x05\xea\x05\xe3\xef2\x05\x94\x05\xdc\xfbv\xfb\xbf\xfc~\x11\x9a\x02Z\xed;\n\x19\x06\\\xfe\xf5\xfbn\x06\xfc\x02O\xf7\xff\x0f\xe5\xf2B\x01\x1b\x12\xfd\xefQ\xfa\xa8\x18\x05\x00a\xf2\xc0\n\x06\x0c)\xf8\x81\xff\x8a\x12\xc5\x07\xfb\xfa[\xff\x87\x12\xe2\x03\x9d\xf4B\x0b\x81\x0f\x0c\xfb\xa4\xfe\x0c\x0e,\x04\xfd\xfeC\xfc \n%\x01\x0e\x00h\x06E\x01\xa0\x04\x9d\x02\x11\xf6Y\t2\t\xca\xedS\x10\xc9\x0b\x11\xe7H\x07\xe9\x19\xb1\xf2\xfe\xf5\xec\x05\xff\x0b2\xf1\x01\x02T\x1a\xed\xeed\xf3\xf6\x0e\xbc\x02e\xec\xea\x04\x7f\n\xe0\xfa\xd7\xf2\x11\x0b\x83\x03\xc2\xe7\x87\xfa\xb9\r\x1a\xf8\x05\xf0\x03\x08S\x04x\xef\x1a\xf8\xf3\xfdl\xfd\x1b\xff\xf4\xf1X\x0c\xd2\xf4@\x00\xc5\xfe\x16\xfd*\xf7\xb5\x01\xd4\n\x05\xef\x8c\t\x96\x05\x9c\x07\xc9\xf0\x9f\x07\x85\x0c\x80\xec\xa8\x07\x01\x10v\xfc`\x07\x1c\x05\xc6\xf8/\x01\xb7\x03\xb1\x02\xaf\x00\xa6\xff\x15\x07\xcd\xfb\xd6\xfe`\x02\x15\x01\xaf\xf3\xee\xfc|\x0f\xc4\xf4\xee\x05{\x00\xda\xfc\xd5\xf86\x06\x9a\xfd\x02\x06n\xfe\xf2\xfb?\n\xff\xfcp\x03\xc5\xf5.\t0\xff\xaf\xf3\x18\x0c\xb4\ta\xf0q\x03\x86\xff\xf2\xff\x1e\x02U\xf5\x1c\x0b\x0c\xff\x82\xfb\xfc\xfa|\t\xcd\x04\xd4\xe8\xb7\x0e}\xff\xe8\xf6F\x0f?\xfb\x80\xfb\t\x08\x81\xfa\xaf\xfa\x93\x04\xe8\rG\xf1\xb4\x01B\t\xba\xf4\x83\x0c\xcf\xf4\xa2\x06\xfe\xf5\xbf\x08\x80\xf5\x9b\x0b\xe6\xf6h\xfbw\x05\xc6\xf8\xb4\x07\'\xf2U\n\x8f\xef*\x0f?\xf9>\xfeQ\xf9\x93\r\x9c\x02\x17\xec\xa9\x0c\t\x0c\xa2\xf0\xb9\xf9e\x1e\xa7\xee\xbf\x00\x9c\n\xe0\xff\xf6\xf4\xe3\x08\x83\x03\xe1\xf8\xcb\t\xd3\xfb)\xfd\xca\x02\x0f\xf96\x07\xa8\xed\xd2\x086\t\xe8\xe5\xbe\x0e\xda\x01Y\xf3\xcb\xee7\x12\xf3\xfe\x0c\xe4\x08\x18\xf3\x01\x19\xef6\x02n\x03K\xfe\xcf\xf1\xb4\x0c\xa5\x059\xf7\x8f\x08K\x00\x91\xf8\xb1\x06s\x04Q\xfc\x87\x02I\x07\xa9\xff\xd8\xfe+\x10\xe0\xf6i\xfeI\x06K\x03C\x04\x1e\xfa\t\x10 \xf5\xfa\xfe>\x18<\xf3\x97\xf8\xf6\x0f\x97\xf6\xb4\x03\x97\x06\x0f\x05S\xff"\xff\xe9\xfdZ\xf9\x7f\x0f\xa2\xedy\rJ\x07\xeb\xf0V\x03\xb8\x01\xc9\xf5\xcc\x00*\xfe7\xf7F\t~\xf9\x9a\xfb\xa7\x00\x05\xfb\x0e\xf2\xc9\x10"\xf1\x1e\xf9S\rZ\xf2\xaf\xf8\xb0\t\xec\xf8@\xfa/\x05C\xefL\n*\x03\x9d\xf8Q\x04-\xf9\x9e\xff?\x0c(\xee\xc0\x07\xa8\x14\xe2\xe8\xab\xfa\xd1\x1a\xa5\xfay\xef\xda\x0e\x83\x0c\\\xf3+\xf70\x16R\x00\xab\xf7\x81\x01\xb7\x0b\x13\xfaw\xf7\\\x1bs\xef}\xfbB\x17\xd5\xed\xac\xfb\xb1\x13\xe9\xf8\x87\xf6q\r\xc5\xfa\x07\x05\x14\xfd\x85\xfb\x92\x07\xdf\xf7\x03\x03\x89\x07\xfe\xfa\x8b\xfa\xdc\x06\xc7\x02j\xf6\xde\x00\xb8\x0e\x18\xedg\x07\x11\x01`\x02/\x04\x19\xf0\x11\nw\xff\x1a\x01X\xf5\xcc\x0e\xf4\xf9\xef\xf8,\x08a\xfd\x0f\x04\xbd\xf3\x9d\x00\xc1\x07\xda\xfa\xc3\x00\x8d\x021\xfb\x8a\ta\xea<\x0c\xdf\x0b\x06\xeb\xc8\x08\xec\x00]\xfe\r\x06\xa4\xfb\x08\xf4\x9f\x10L\xf6v\xfb:\t\xa1\x04\x13\xf7\xc5\xfa\n\x0e\xf5\xfc\xb3\xf3\xe8\x0bp\x04\x91\xf1\x99\x11\xd6\xfb\xe3\xfb\xb9\xfd\x18\tm\xf5B\t\x1b\xff\x95\xf6\xf6\x14\xa1\xf1\xf8\xf4\xf6\x10\xa5\xfc]\xf0Y\x0f\xd3\x01\x8c\xf7\xac\xfd\xab\t\xa3\xf4C\xfeY\x08\x12\xf9U\x01\x1a\x06"\xfa\xf0\xff\xcf\xff\xe6\xfcD\x01:\x01\xb7\x00\xf0\xf7C\x0b\x0f\xf6p\xffM\n\x83\xf7\xeb\xf8\xff\x064\x04h\xfaD\xfd\xd4\x08\xd7\xfeR\xf8\xb1\n\x94\xfc\x99\xfc*\x06\xd1\x04\xc8\xf63\x08\x1b\x06_\xf4+\x05x\x08\x9f\xfa\xa1\xfc\xec\t)\xfc{\xfcR\x05\x14\x00~\xfeT\x03v\xf7^\x01\xe0\x07\xec\xf1P\x07r\x06\xac\xe8\xe7\x12y\x01\xf1\xea9\x0c\xa1\x07X\xec\xcc\x06d\r6\xe9\xc4\x0b\x10\xfa\x0f\x05\x11\xfd\n\xf4g\x13\xa9\xf1{\xfe\xf1\x08\xac\xf6\xeb\xfc~\x0b\xac\xf11\xfb\xf8\x13\x89\xf0E\xf6\xdf\x12\xe3\xf7l\xf6\x92\x0b\xe8\xfc\x93\xf7c\x02\x7f\x05\x12\xfb\xc8\xfe:\x07\xfd\xfeE\xf7/\t\xb0\xff!\xfd\xdc\x02\x15\x07\x17\xfe\xe4\x00\xb5\x06\xca\xf9\xf6\x04t\x06k\xfbD\x07\xca\xfd\xd2\x01q\xfe\xb2\x05\xb2\x06\x93\xeb5\x13\xec\xf9\x1c\xfa\'\x08\x85\xfc\xf5\xf88\x08\x9e\x03\xd7\xf03\x07G\x0f~\xe7a\xfd#\x1f\xd0\xe6e\xfd\xa4\x17\xb5\xf4\x15\xef\\\x15\x8b\xf9\xb6\xf3\x91\x11\x82\xf8\'\xfe\xeb\x02E\x01e\x00\x99\xf8c\x06\xe1\xfb\xa1\xfeZ\x08/\xff\xef\xf2\xcc\x0b\xe1\x04\xe7\xe7\x99\x11\xed\xff_\xf7\xdf\xfa\xb3\x0c6\x01\x7f\xecg\x107\xf8\x05\xf9s\x04\x03\x01\x13\xff\x9d\xfd\xbc\xfd\x97\xf8\xd2\r\x87\xf0\xf3\x01:\t\xa4\xeb\x9f\x0c"\x07G\xef\xc1\xff\xce\x0f\x9b\xf0>\x00\x94\t\x7f\xfb\x90\xf8\xad\x0c\xe7\xfd\xee\xf0\xc8\x19\xfb\xf1\xd9\xfe\xe7\t^\xfbU\x01N\xff\x97\x07\x18\x02\xc0\xfe\xf7\xf7\x80\x11@\xf6\xe2\xfcY\x14\x93\xf1:\x05\xdf\x001\xfeU\x04\\\xfd{\x06"\xf9=\x06k\xfcL\xff;\x03<\xfc^\tV\xeb\xf2\r=\x04\x9f\xf6\xc7\x002\x06\xce\xf9\x1b\xf0t\x18\x1b\xf7\x0c\xf0\xd3\x0f\x9c\xfe\xd1\xf3\x8a\x05\xd1\x04a\xf4\xe0\x00I\x06\x84\xf7,\x01\xc7\x06\x88\xff+\xed\xb0\x10\xa7\x02e\xf2\x8e\x04\xb3\x05\xa1\x00\xc1\xf1\xbd\x16J\xf4w\xfd\xaf\x04\xed\x00\xe1\xfb\xbb\x02~\x0c\xa6\xee\xd7\x0eq\xfdI\xf4i\x0c\xf6\x00\xdd\xf82\x03\xf8\x06\xa7\xf8\x13\x00\xb8\x074\xf7G\xff\xbb\x03\x14\x04\x00\xf9.\x02\xb5\xfd\xc7\x00\x05\x03\xf2\xf6\xaa\x08\x94\xfb\xcb\xf9|\x07\x02\x05\xad\xefW\t5\x05E\xef\x19\x0e_\x03\xaa\xf3J\x07\xe9\x02\x07\xf3\xce\x11=\xfb\x84\xf3\xbb\x12\x80\xf7\xd4\x009\x05\xf2\xf8-\x03\xef\x02\x94\xfe\x8f\xf9\xb2\x0eb\xfe\t\xeb^\x11H\x05\\\xee~\x07k\x07\xb0\xf0\n\t(\x05\xe8\xf0\x06\x06\x19\n/\xf1\xc0\xff\x9f\r\xf5\xf3\x1e\x02^\x00\xdc\x01\x8c\xfe\x8d\xfey\x02(\x01\xd4\xfb\xc0\xfc\xfa\x0b\xfa\xfa\xed\xf6\xfa\x0cS\xfb\xcc\xf8_\x0b\x12\xfd2\xfa}\x03l\x04Z\xf9\x10\x082\xf7\xab\x02^\x05\xdc\xf9(\x004\x02\x8d\x08\x19\xee\x1d\x0c\x0f\x00\x7f\xf6{\x0c`\xfa\xd1\xfa\x85\x07X\xfd\xea\xfd\x90\x005\xff8\x00z\x013\xfa\x91\xfc\x8a\x06\\\xfd\x00\xfa[\x03\x1a\x01e\xf6\xa2\x06s\xfbQ\xfc~\x04\x1d\xfc3\xfe\xe0\x05\x8d\xff\x03\xf9\x8c\x08\x0b\xfcn\xfe`\x04+\x02\xf7\x01/\xff\x0f\x00N\x05h\xfe\x94\xfd \x07\xd8\xfe\x15\xffR\x04\xaa\x00\x17\xff\xf3\xfdi\x045\xff9\xfdL\x043\xfd\xf1\xfe\xec\x02\xed\xfd\xf1\xfd\x04\x03\xf5\xf9\x88\x01\xa6\x00\xd6\xfb\xd4\xff\xe9\x00\xaa\x01u\xf8\x83\x01\xd7\x03\x8e\xf74\x05\x0f\x00\x01\xf9;\x06\xaf\xfde\xfc-\x04z\x025\xf9\x86\x04\xff\x02\x8e\xfa\xb8\x02\xe3\x03^\xfe\xc6\xff\xb3\x04\xa1\xfa\xd5\x03U\x02\x1c\xfdy\x01 \x00\xdf\x00\x16\xfes\x04:\xfc3\xfe\xc3\x03n\xfc\x7f\xfe\x95\x010\xfe`\xfe)\xff\xf3\xfe\xc2\xffS\xfd\xc9\xfe=\x01I\xfd\xb8\xfc1\x04\xe9\xfe\xc4\xfd<\xff\xcf\xff3\x02\xcc\xfe\xeb\xffz\x02\xb9\xff\xe4\x00\xb3\x01r\x01\xe1\x01D\x01\xf2\xff\x1a\x02d\x03`\x00\xd8\x00\x18\x02{\x01k\xff\x7f\x01\xab\x00\xb1\xffH\x00\xf9\xffi\xfe\x98\xff\'\x00q\xfd\x14\xff`\xff\x11\xfe\xb0\xfe\x93\xff\xba\xfdc\xffN\xff|\xfe\'\xff\xdc\x00\x1b\xff\x02\x00L\x01H\xff`\x00\xd8\x00\xf0\x00\xe9\x007\x01\xdd\x00\xe3\x00\x92\x00\xda\x00\xe2\x00\x84\x00\xe9\xff\xa5\x00\x0f\x01\x01\xff2\x003\x00f\xff\x9f\xff\xfa\xffY\xffz\xff\x8a\xffw\xff\x95\xffc\xff\x8c\xff\xa8\xffw\xff\xa1\xff\xf2\xff\xb7\xff\xc8\xff\xb8\xffG\x00\xd1\xff\xd0\xffd\x00\xfa\xff\xa9\xff\x9c\x00M\x00\xca\xff+\x00\xd3\xffa\x00\xda\xff\xfe\xffb\x00\xaf\xff\xb4\xffK\x00\x98\xff\x94\xff[\x00\xb4\xff[\xff\x06\x00\x02\x00r\xff\xdf\xff-\x00\xae\xff\xfa\xff`\x00\x01\x009\x00`\x00\x16\x00\x8b\x00\x96\x00=\x00\xf0\x00\x99\x00i\x00\xf9\x00\x91\x00B\x00r\x00\x8a\x003\x00\x07\x00\x82\x00\x15\x006\xff\xe7\xff?\x00\x04\xff\x8e\xffB\x00\xb0\xfe\r\xff\x0c\x00T\xff\x11\xff\xb1\xffa\xffD\xff\xf2\xff\xbd\xffx\xff\xf9\xff\x02\x00\xf8\xff\xe9\xffj\x00P\x00\xfc\xff]\x00\x87\x00J\x00H\x00\x99\x00K\x00\xec\xffk\x008\x00\xda\xffX\x00\xfe\xff\xb7\xff\xc6\xff\xc8\xffD\x00\xc0\xff9\xff\x11\x00\xc9\xff\xe7\xff\xab\xff\x9b\xffw\x00\xa4\xffY\xff\xd4\x00)\x00(\xff\xef\xff/\x01/\xff.\xff\x05\x01}\xff\x9b\xff\xbe\xff\x13\x00K\x00\xa3\xff\xb9\xff\xaf\xffb\xff\xaa\x00\xb9\xff\xeb\xfeI\x00\x97\xff$\x00\x07\xff\xc7\xff\r\x01\x9c\xfe\x13\xffE\x01Q\x01\x1c\xfe\xaf\xff\x8a\x01\x87\xffl\x00\x8f\xff\x0e\x01r\x01L\xfd\xf7\x00\xc3\x03d\xfe\xd9\xfd4\x02\x00\x01\x00\x00\xa0\xffh\x01.\x01v\xfe\x8c\xff\x18\x02a\x01\xac\xfcM\x01\xd4\x00R\x00\xb7\x00\x15\x01w\xfd\x80\xfd`\x02\xa3\x03\x07\xfe\xeb\xfa\x80\x03\x94\x01p\xfd\\\xff"\x02\x05\xfc,\xfe*\x04\x0f\xfe\x93\xfdK\x03\xf8\xfd\x13\xfb\x85\x04\xd2\x02\xc4\xf8{\xfd\xc4\x08{\xfe\xd5\xfa\xde\x00+\x04\xff\xfd\x02\xfe#\x00\x03\x01\xad\x00\xc6\xfd|\x01\xbc\x02\xbd\xfe\x02\x00\xaf\x00\x04\xfe\x94\x00\'\x01\xba\x03\xbb\xff\x9d\xfc\xbd\x02\xd0\xfd\xa4\x01\xc3\x01z\xf9\xdd\x06\xb3\x04\x95\xf4r\x00q\x04|\xffW\xfeE\xfe\xdd\x03~\xfc\x1b\x00\x12\x02\x07\xfb\xbd\xfe\x18\x07\x97\x00W\xf7\x0b\x05>\x04B\xf7\xe6\x03\x81\x03[\xfd$\xfcM\x03\x85\x07\x05\xfa\x1c\xfd\xac\x01c\x06y\xfe\xcb\xf7\xd5\xff\x08\nx\x03\x85\xf2\xec\x02K\x0b#\xf5@\xf8$\n\xbb\x06H\xf5S\xfc\xa1\x07\x05\x02\xe3\xf9\xe9\xfbB\x07(\x00\x12\xf8\x8e\x02~\x0c(\xfa\xa8\xf3\x04\ta\x05\xe7\xf8\x84\xfe\x97\x06\xc8\x04%\xf3\x92\xfd\xdc\x0e\xa0\xfe\x93\xf2\xd3\x00\x90\x05\x88\x01\xaa\xfbH\x00?\x05?\xfa/\xfd:\x05\xd8\x05\xa5\xf8I\xfb\xfa\nK\x07\xb4\xf2\xf5\xfdX\n\xfe\x03\xb7\xf6z\xff\xf1\x03\x98\xff\x85\x032\xff\xce\xfc\xb4\xfb\xea\x00H\x07\xf4\xf6\xb7\xfd\xe9\x07\xec\xff\x08\xf8\xc4\xffN\x04\x81\x00}\xfcn\xfa\xee\x04\x98\xfc\xce\x03l\xfc\xd0\x04\x0b\xff\xea\xf3e\x06\xb3\x07\xbf\xfc\x86\xf92\t\x82\xff\x10\xfe@\t!\xf9&\xfd\xa2\x07\xf8\xfa\x99\t\x03\x02\xc5\xf7\t\xf8\xaa\x0ci\x07\x0e\xec|\x00/\x05\xe6\t\x07\xf8\x0e\xf17\x07\x18\x03\xaf\xfd8\xff\xbd\x03S\xf2\xaf\xfc\x9a\x15\xd7\xfc.\xf3\x8f\xf7\x9e\x07+\x14\x9c\xf8\xbb\xe8Z\x03D\x1b\x11\xfd\xa7\xe9\x9a\xff\x81\x12\xbd\x05.\xf7\xe3\xf5\xd0\xff>\x04\x99\nu\xfdB\xfa\x14\xfe\xa1\xfcn\r\xc1\xf7Q\xfb\xbd\r\x1b\xfc \xf8i\x04\xeb\n\x08\x00\x0c\xf7\xe9\xfa?\x02?\x01\x06\r(\xff/\xf1\xf5\xff\xb5\x01g\x03\x17\xfe#\xfe\x13\x06\xb9\xf8\x8e\xf4z\x0b\x1b\x0c\x9d\xfc\xc2\xf1\xe2\xf9\x86\n\x9d\n\xad\xf7\xed\xf3\x9f\tZ\tN\xf3\xc2\xf9D\x10\xd9\x02\x88\xf2\xdf\xf6\xa4\x0e3\x0f\xc0\xf5\xdf\xf2\xb2\x0b\xa4\x02\x81\xf4\x18\x06\x07\x06l\xff\x88\xf9\xfe\xfc\x1d\nk\xfc\xee\xf4\xf6\x06\x88\x01\xb4\xf9E\x05\xf1\x05\xd7\xf4\xc2\xfe\x96\x07\xf9\xf59\x03\x9e\x08\xf0\xf5:\x01@\x0e\x93\xfa}\xf2\xb6\x02\t\xff\xcd\x02\x02\x05\xc0\xf6\xaa\x07\xb3\x02\xa3\xf3\x1b\x07\xea\xff\xf4\xf1N\xfe\x92\x10\xfb\x06\xcb\xf5\xb5\xf7#\x00\xf1\x07\xcc\xfd\x92\x03Z\xf9\x17\xf6!\x0f\x80\x07\xd5\xfa\xfd\xfc/\x02\x04\xfa\xcb\xf8\xe8\x07\x86\t\x91\xfe\xf3\xfc\r\x05\xf6\xfdm\xf4 \x00\x93\n\xbf\x07j\xf3|\xf8\xeb\x0eQ\t:\xf5\x19\xf5\x9c\x06\x8a\x02\xda\x00\xc8\xf5\xd5\x06\x1a\x0c\xa9\xed.\xfd<\x0c1\xfff\xedq\x01^\x0e\xdb\xfd\xb3\xf6\xc7\x05\x0c\x08\xef\xf7\xea\xf6o\x06\x10\r\xde\xf9\xd1\xf0\xf8\x07c\x14\xb0\xfc\x08\xf8\xe4\xfb\xd9\x00\xca\x06H\x01\x82\xfb[\x02m\x00\x87\x01\x8b\x03\x03\xfe\x9e\xf7\xcc\xfb\x94\n>\x01q\xf8X\xf99\n:\x08Q\xfd$\xf9\xe9\xfb4\x08\x02\x07\x89\xfau\xf6E\nr\x0b\x16\xfb\x82\xfc\x1b\x01\xff\x03w\x01\x1e\xfb\xf9\xff\x9b\x03\xdb\x00\\\x03U\x02\x85\xfb!\xfcR\x03\x89\x01\x10\xfc\xec\xfe\xd3\x01/\x06L\xfbH\xf6Q\x02\x11\x02\xa9\x01n\xf8\x19\xf9l\x02|\x04\xd8\xfc\xe2\xfbW\xfdQ\xfe\x85\x00R\xfe\x17\x00E\x00\x15\xff\xf0\xfd\xde\xfb2\xff\x17\x08K\xff\x16\xfa\xa9\xfd\x8d\x01?\x03\x98\xfb=\x00\x8e\x04D\xfd\xdb\xf5\x94\x03n\x0b\x91\xfd\xc2\xf0\xf5\xf9\x87\t\xd3\x05\xb4\xfb\x8f\xf2\xf3\xf7u\x06C\x08\x00\xf9i\xf0\xe5\xf6Z\x01\xf5\x07?\xfe\x05\xf6I\xf9t\xff\x7f\xfe/\xf7\xf7\x01\x98\t\xea\xfd\x06\xf4\x19\xf7h\x05\x8f\t\x87\xfd_\xf3\x97\xf5k\x03<\x08I\x05\x91\xfc\x95\xf2+\xf8\xe4\x02\xda\x07w\x07\x91\x08}\x07&\x06\x1f\tP\x11\xa8\x12\xa3\x07\xa6\x06\xce\x15\xfd#y \xdd\x10\x11\x0c2\x12\xa9\x12O\x11\x81\x12\xfd\x108\x0b\x99\x05\xbe\t$\rT\x00\x8f\xf0g\xee\xce\xf6,\xfb\x08\xf6\x93\xf0W\xee2\xea\x17\xe7s\xe80\xebU\xe9\xc9\xe6\xc8\xeb\xaa\xf4\xf3\xf8\r\xf5\xe0\xef\xbf\xf1]\xf6\xd0\xfa\xfb\xff{\x03\x1f\x05v\x044\x045\x07\xf9\x07\xe2\x03\x00\x00\xb0\x02B\t\x85\x0b\x83\x06a\x02\xa8\xff\xbf\xfd \xfc\x8e\xfc\x1b\xfdl\xfb\xa4\xf9\x00\xf9\x13\xfb\xb9\xfa$\xf7U\xf3D\xf3;\xf8s\xfc\xed\xfcT\xfd\xb8\xfc\xd3\xfbA\xfeN\x01\r\x02"\x02\x83\x03\xb3\x06\x97\tf\n\xad\x08m\x07\xb2\x07^\x07C\n\xa9\x0c\xf7\x0c\x96\nn\n\x13\n7\n!\tX\x07\x19\t\x8f\t*\n\xbb\n\xb5\x08\xa7\x03\xc1\x00\x8e\x001\x01\x9d\x01\xec\xff\xa9\xfe+\xfc\x8b\xf9\xe4\xf7f\xf6\xbe\xf4k\xf3(\xf4\xcf\xf5\x98\xf5\t\xf5\xa0\xf4{\xf2^\xf1\xdb\xf3\x0e\xf5\x9a\xf5a\xf7u\xf8\xf2\xf8\xef\xf9\xa5\xfa}\xf9+\xf9\xce\xfb\x0f\xfe)\xffv\x00\xc5\xff\x07\xffE\xff0\xff\x1e\x00\xc7\xfe\xf0\xfd\xe9\xfdi\xfd\x9e\xfc&\xfb\xe5\xf7\x07\xf7C\xf7j\xf8\x94\xfa\x88\xf80\xf7\xc8\xf43\xf3;\xf5\x9f\xfe0\x0c}\x13b\x11T\n\x85\x0e\x8c\x1a|\x1f\x12\x1dr\x1e=,\xf88\xf38\n/\xca$1!\xfa\x1e\xa4!\x07$Y#\x81\x1c\xcc\x12\x98\x0c\xf3\x04\x0f\xfa-\xefH\xeap\xeb\x9f\xeeq\xeeo\xe8\x11\xe0c\xd7\xe7\xd3[\xd5T\xda\x93\xdf\xc4\xe2\xbe\xe7\x1f\xec\x81\xee\x8f\xed\xaf\xecQ\xf0\xee\xf4\xc3\xfb\xeb\x03[\n{\x0c\xd2\x08\xcb\x05\x93\x06\xb1\x08\x02\x08I\x05\xc1\x06i\n|\x0b\x91\x06\x10\x000\xfaT\xf6\x84\xf4\xff\xf5_\xf82\xf7\xae\xf3\x9a\xf1-\xf1\xa5\xef3\xedZ\xeb/\xee\xd7\xf2\xdd\xf6)\xfa\xb3\xfb\xf6\xfa\xfc\xf8\xcd\xf9\xf3\xfe\xc8\x02d\x05\r\x08;\x0b\xa7\x0e\xec\x0e@\r;\x0c/\x0c4\x0c\xfd\x0e^\x12\xbb\x13O\x11\xd3\r\x06\x0bF\n\xee\x08|\x06x\x05,\x05\x8c\x06f\x07\xce\x04;\x00\xb3\xfbZ\xf8>\xfa\xf5\xfc,\xfel\xfe\xc0\xfc\xb9\xfa.\xf9n\xf90\xf9\x9d\xf8-\xf9\xbb\xfb\xbf\xffI\x01\xa4\x00p\xfd\xc9\xfa_\xfc\xee\xfdR\x011\x02s\x02\xc6\x01\xfa\x00\xb5\x00P\xfe\x86\xfc\x9a\xfb\x8e\xfb\r\xfd\xd5\xfe6\xfe\xbe\xfb\xf0\xf8\xb3\xf6\x17\xf7\xd5\xf7\x0e\xf8\x91\xf8v\xf9\xb8\xfa\x07\xfbK\xfa\xb2\xf81\xf7:\xf7v\xf9\x0c\xfc\xb2\xfc\xd4\xfd\xb8\xfd\xb7\xfbs\xfc\xde\xfb\xf9\xfb|\xfd_\xfd\x0c\xff5\xff\xcd\xfe}\xff]\xfc\x90\xfc\xec\xfci\xfc\x9c\xfep\xfe\x83\xff\r\xfe\xc7\xfc\xae\xfd\xde\x04m\x0f\xa9\x13a\x0fq\ne\x0eV\x1a\x1f \x9d\x1c\x99\x1d\xa8#\x1e+Q+\x96$q\x1f\xa6\x1a-\x18\x13\x19\xcd\x1b\x9b\x1bh\x13\xef\x07\xd9\x02\x9d\x00\x83\xfb\x1e\xf4\x07\xee\x97\xec0\xec\xb0\xe9d\xe9K\xe7\xd5\xe1\xcc\xdc\x8b\xdc\x9a\xe2\xdc\xe8e\xeb\xe1\xebS\xee\x1a\xf22\xf5\x0c\xf7d\xf8\xac\xfb\x01\xff\xac\x02\xf7\x07\xa1\x0b?\x0bk\x07m\x05\xe3\x06\xd2\x08\xef\x08\x1e\x06\xab\x04\xcc\x03r\x02G\x00\x01\xfd\xde\xf9b\xf6$\xf4\xbb\xf4\xda\xf6g\xf6\xd7\xf2\xbb\xef\x9f\xef\xd2\xf0\xd5\xf0(\xf1{\xf2\xb3\xf4g\xf6\xf8\xf7\x97\xfa\xc2\xfb\xbb\xfbW\xfc\xe5\xfe\xd8\x02\x06\x06/\x07\xb5\x07\x9d\x08p\t\x9b\n\xa1\n\x96\nW\nG\x0b\x9f\x0c\xd1\x0cO\x0cg\nW\x08\xb9\x06o\x06M\x06@\x06&\x05E\x037\x02\xa9\x01X\x00\x9a\xfe\t\xfdv\xfbZ\xfb\xc0\xfb\xcc\xfb\xde\xfb\x15\xfbh\xf9N\xfa\x02\xfb\x06\xfb+\xfb\x9b\xfa\xc7\xfd\x18\x00\x89\x01)\x03q\x03\xee\x03\x1f\x05\xec\x06k\x08\xa1\t?\t!\n\xf1\x0b_\x0b\x17\x0b8\x08\x95\x06V\x06B\x04\x11\x04\xee\x01R\x01\xed\xfef\xfcp\xfb]\xf9}\xf7n\xf5C\xf4U\xf4\x82\xf4]\xf44\xf4\x7f\xf3N\xf25\xf2\x88\xf2W\xf35\xf4\x86\xf4<\xf6(\xf7\x1c\xf8\xe8\xf8 \xf8\xff\xf7\xe3\xf89\xfa\xa5\xfb\xf3\xfcT\xfd\xd5\xfd\xcb\xfd\xe0\xfd\x11\xfe\xf0\xfd\xa1\xfe[\xfe\xd1\xfe>\x00\xbb\x00\xf5\xfe\xb9\xfe\x94\xfe]\xfe\x93\xff\xbb\xff\x19\x01\xd6\xff/\xff\xcd\xff\xd1\x01\xc9\x05V\n)\r\xae\r\xed\x0c*\x0e\x88\x12n\x176\x1b\xaf\x1b\x91\x1d7 \x9f!\x15 \x8e\x1c\xd7\x1b\xce\x1aI\x19y\x18(\x17\xaf\x14\xe7\x0e\xd5\x08\xc7\x05\x96\x03p\xff\x90\xfa\xa8\xf7c\xf6\xcd\xf4\xb2\xf1\xc1\xef\x1e\xef\xc2\xec\n\xea \xeb]\xee\x08\xef\xd7\xed\xa3\xed\x9b\xefq\xf1\x8d\xf1:\xf3c\xf4I\xf4B\xf4\xf5\xf4|\xf7\x90\xf8\x9f\xf7#\xf7\x06\xf8\xd4\xf8F\xf8\x7f\xf8\x9e\xf8\x84\xf8R\xf8?\xf8~\xf9\xe5\xf9\xcb\xf9!\xfa\xcb\xfaW\xfb\xa7\xfb\xb1\xfb5\xfc\xdf\xfc(\xfdi\xfe\xcc\xff\xce\x00\x9e\x00j\x00\xb7\x00T\x01\xbc\x01\xee\x01\xbe\x02\xb5\x03\xc4\x03\x01\x04\x83\x04\x89\x04\xe2\x03\xa2\x03*\x04\x04\x05\x91\x05\x06\x06\xa1\x06>\x07Q\x07@\x07S\x07a\x07\xc4\x07\xb0\x07q\x081\t\xc4\x08P\x08\xb5\x07\'\x07y\x06\x81\x05\xa6\x04\x0e\x04\x7f\x03~\x02\xf0\x01X\x01+\x00\xf4\xfe\xbb\xfd\x9a\xfcr\xfc\xb3\xfb,\xfb\x19\xfb\xc6\xfao\xfaG\xfaE\xfa\x8c\xfa`\xfa}\xfa#\xfb\xcb\xfb\xe5\xfc\xf5\xfdx\xfe\x8a\x00\xf4\x01\xd8\x01Z\x02\x7f\x03\x80\x05\x9b\x05\xaa\x05q\x06\x8c\x07"\x08\xb4\x067\x06P\x06\x01\x05"\x03\xbe\x02\xcb\x02\x18\x02\xb1\xff\x0c\xfe2\xfe\xb6\xfc\xf8\xfaN\xfa\xbd\xf9X\xf9\xf7\xf7Y\xf7\xe6\xf7t\xf7\x8f\xf6\x1a\xf6\x96\xf6\x82\xf6\x9c\xf6\xd1\xf61\xf7\x92\xf7z\xf7\xbb\xf7x\xf8.\xf9B\xf9j\xf9\\\xfa\xc6\xfbu\xfc\x8e\xfc\xf7\xfc\xb6\xfd\xea\xfeO\xff\x03\x00\xee\x00|\x01K\x019\x01\xe6\x01^\x02\x1f\x02\xf5\x01{\x024\x02l\x02%\x02U\x02\xd9\x02\xc8\x02-\x03\xd4\x03\x1a\x04\xd3\x04I\x08\xb9\n-\x0bS\x0b\xd1\x0c\x9f\x0f)\x11\x88\x12\xcb\x14\t\x16\xff\x15\x14\x16\xab\x16\x10\x17\x0f\x16\x8f\x144\x13\xb6\x11\x9a\x0f\x87\r\xda\x0b\xc4\x08J\x05/\x03\xf4\x000\xfe\x18\xfbE\xf9\xba\xf7\x86\xf4\x0c\xf2\xb8\xf1=\xf27\xf1\xf0\xee\x00\xef\x0b\xf0\xbe\xef\xd4\xee\x04\xef\x9e\xf0\xb1\xf07\xf0b\xf1\x11\xf3\xae\xf3\x9f\xf2\xc3\xf2\x9d\xf4\xb7\xf5\xb4\xf5\xda\xf5\x0c\xf7P\xf8e\xf8\xa4\xf8,\xfa\x94\xfb\xdc\xfb\x81\xfb\xd9\xfc=\xfe\x91\xfe\xa8\xfeK\xff\x95\x00\xe6\x00Q\x00\xaf\x00\xaa\x01\xb7\x01\n\x01\xeb\x00\xa4\x01\xe5\x01\x1a\x01\xae\x00\xa5\x01\xf1\x01\x1b\x01\xee\x00\xc1\x01\xf9\x01\xa5\x01X\x01:\x02Z\x03\xd7\x02\xa2\x02\x9b\x03n\x04n\x04>\x04\xf5\x04\xb1\x05\x7f\x05N\x056\x06\xe9\x06\x91\x06;\x06q\x06\xa7\x06G\x06\xd6\x05\xae\x05\x80\x05\xe1\x04 \x04\xb5\x038\x03O\x02|\x01\xe6\x00\x16\x00\x87\xff\xb6\xfe\xe7\xfdW\xfd\xb6\xfc-\xfc\xe1\xfb\x82\xfbI\xfb\xec\xfa\x0f\xfbB\xfb\xe7\xfa\xe9\xfae\xfb\x97\xfb\t\xfc\x8e\xfd#\xfe\xea\xfd\xa9\xfe\x1e\xff#\x00\xcb\x00,\x01Z\x02\x9d\x02\x85\x02\xb4\x02u\x03?\x04\xa4\x03\\\x03\x9f\x03\xd1\x03*\x03l\x02\xac\x02K\x02\xa3\x01\xe3\x00\xae\x00\xb0\x00\xc1\xff\x8c\xfe0\xfe!\xfel\xfdg\xfc8\xfc0\xfc\xc5\xfb \xfb\xf9\xfaO\xfb*\xfb\x93\xfa\xa4\xfaZ\xfb\xca\xfb\x9d\xfb\x9c\xfbm\xfc\xbe\xfc\x95\xfc\xd6\xfcv\xfd\x13\xfe{\xfe\x83\xfe\xcf\xfe\x90\xff\xc5\xff\xc1\xff~\x00\xba\x00\xbd\x00\xa2\x00\xe8\x00"\x01\xf1\x00\xe7\x00\xf0\x00\xf2\x00\xbe\x007\x00\x95\xffe\xff\x0b\xff\x05\xff\xff\xfe\x08\xff\xe2\xfe\xcb\xfe\x1c\xff\x83\xff]\xff\xc3\xffr\x01-\x03\xe0\x04\x07\x06\x1d\x08@\n"\x0b\x18\x0c\xd3\x0e\xe9\x11L\x13\xc1\x13r\x14\x97\x15\x8a\x15;\x14i\x14`\x15\x19\x14\xef\x104\x0eN\r\xbb\x0b\x16\x08\xce\x04\x1e\x03\x02\x016\xfd\xfa\xf9\xf5\xf8\x12\xf8\xe2\xf4^\xf1\x85\xf0S\xf1\x17\xf0\x1d\xeeM\xee\xa1\xef\x80\xef\x18\xee\xba\xee\xd0\xf0\x1f\xf1I\xf0A\xf1\xcf\xf3\xdb\xf4\x8a\xf4\xeb\xf4\xae\xf6\xb8\xf7y\xf7Y\xf8\x04\xfas\xfa8\xfa\xa1\xfaG\xfcU\xfd\x0e\xfdR\xfdH\xfe\x89\xfeY\xfe\xfb\xfe\xd5\xff\xfb\xff\xae\xff\xef\xff\xa0\x00\xba\x00T\x00m\x00\xc1\x00\x82\x005\x00\xad\x00j\x01(\x01\t\x01\x9a\x01\xea\x01\x03\x02f\x02\x13\x03\xc0\x03\x06\x04k\x04U\x05\xf8\x05Q\x06\x9a\x06\x06\x07z\x07`\x07b\x07\xa8\x07\x84\x072\x07\xdd\x06\xbe\x06\x91\x06\xd5\x05 \x05\xa6\x04\xf4\x03\n\x03?\x02\x90\x01\x0f\x016\x00\x06\xffN\xfe\xc8\xfd\xd2\xfc\xf1\xfbU\xfb\xd3\xfa=\xfa\xa5\xf9\x82\xf9\x89\xf9\x7f\xf9M\xf9\\\xf9\xa9\xf9\xcd\xf9\xf4\xf9e\xfa\xf3\xfaP\xfb\xa7\xfbQ\xfc\r\xfd\x8d\xfd"\xfe\xa0\xfe\xfe\xfe\x9f\xff[\x00+\x01\x0c\x02\xf5\x02\xbc\x033\x04\xdd\x04\xa8\x05\x9e\x06\xfb\x06\xe5\x06|\x07\xf6\x07\xcd\x07C\x07\x10\x07\xfd\x06\x16\x06\xd9\x04\n\x04\x8e\x03\x92\x02\x02\x01\xd8\xff<\xffa\xfe\x1b\xfd\x0e\xfc\xd4\xfbQ\xfbR\xfa\xbe\xf9\xcb\xf9\xa4\xf97\xf9\xfa\xf8&\xf9\xa2\xf9\xc9\xf9\xd4\xf98\xfa\xd2\xfa=\xfb\x98\xfb.\xfc\xf4\xfcx\xfd\xa3\xfd-\xfe\xc2\xfe+\xffv\xffV\xff\x80\xff\x00\x00\x95\xffH\xffp\xffu\xff\x13\xff\xa0\xfek\xfew\xfe:\xfe\xc1\xfd\xc3\xfd\xd6\xfd\xcc\xfd\xe5\xfdD\xfe\xd0\xfe\xf2\xfe\xb8\xfeS\xff%\x00\xe7\x00a\x01&\x02\x10\x03t\x03\x14\x04 \x05\xc7\x05p\x06w\x07\xa9\x08\xe6\t\x8c\n@\x0b5\x0c\xf4\x0c\\\r`\x0e\x88\x0f\x07\x10\x04\x10\xd1\x0f\x02\x10\xe9\x0f\x1c\x0f\xc2\x0e\x96\x0e\xa3\r\xd9\x0b\xfa\t\xdf\x08\x9b\x07K\x05\xf8\x02b\x01\xdf\xff\x94\xfd&\xfb\x8e\xf95\xf8%\xf6\xe8\xf3\xe6\xf2\x92\xf2l\xf1F\xf0\x1d\xf0\x8a\xf0e\xf0\xee\xef_\xf0]\xf1\xd7\xf1\x0e\xf2\x15\xf3\xa5\xf4\x87\xf5\xf3\xf5\xd0\xf6"\xf8 \xf9\x94\xf9Z\xfa}\xfb8\xfc\xa1\xfc8\xfd>\xfe\xf8\xfe+\xff\x88\xff\x1b\x00\x8c\x00\xc7\x00\x1d\x01\xa1\x01\xda\x01\xe8\x01%\x02\x7f\x02\x9c\x02\x8a\x02\x9a\x02\xa8\x02\x89\x02b\x02t\x02\x86\x02\\\x02?\x02F\x026\x02\x01\x02\xea\x01\n\x02\x18\x02\x02\x02\x02\x026\x02a\x02X\x02i\x02\xaf\x02\xd6\x02\xae\x02\x8a\x02\xbc\x02\xe1\x02\xc8\x02\x9b\x02\xa3\x02\xb3\x02m\x02\x1e\x02\x02\x02\xda\x01g\x01\xe4\x00\x9b\x00\x82\x00\x1f\x00\x8c\xff\x1c\xff\xd2\xfed\xfe\xdd\xfdu\xfd.\xfd\xd7\xfck\xfcA\xfc1\xfc\t\xfc\xdc\xfb\xc9\xfb\xed\xfb\x14\xfc*\xfc`\xfc\xa1\xfc\xd8\xfc\x13\xfdm\xfd\xef\xfde\xfe\xcf\xfe\x18\xffl\xff\xe0\xffy\x00\x12\x01\x80\x01\xf2\x01y\x02\xc3\x02P\x03\xd9\x03\x8a\x04\xc1\x04\x84\x04\xe2\x04C\x05q\x050\x05S\x05\x91\x05\x17\x05p\x045\x04r\x04\xd9\x03\xfe\x02\x9b\x02l\x02\xc9\x01\xc1\x00P\x00\xfc\xff7\xffg\xfe"\xfe\xe8\xfd\x08\xfd"\xfc\xda\xfb\xaa\xfb@\xfb\xc4\xfa\xbd\xfa\x98\xfa\x10\xfa\xd8\xf9\x1d\xfaD\xfa\x05\xfa\n\xfa<\xfa\x8c\xfa\x86\xfa\xa0\xfa\x17\xfb6\xfbE\xfb\x8d\xfb\xf3\xfb>\xfc\x87\xfc\xc2\xfc \xfds\xfd\xa9\xfd\x1c\xfe\x7f\xfe\xe4\xfeA\xff\x9d\xff\x0b\x00\x80\x00\x08\x01\x89\x01\xff\x01y\x02\xea\x02R\x03\xd0\x03B\x04\x95\x04\xe5\x04Q\x05\xd7\x05*\x06\x19\x06Z\x06\xb7\x06\xba\x06\x9e\x06\x8d\x06\xb1\x06\xab\x06D\x06\xf4\x05\xd9\x05\x91\x05\x1a\x05\xc2\x04\x80\x04-\x04\xbb\x03B\x03\x1d\x03\xf1\x02\xac\x02\x86\x02{\x02y\x02M\x028\x02_\x02z\x02n\x02T\x02U\x02U\x02\x14\x02\xf3\x01\xd2\x01}\x01\x15\x01\x9e\x00A\x00\xc0\xff)\xff\x89\xfe\xd7\xfd7\xfd\x93\xfc\x0c\xfc\x92\xfb\xfa\xfa\xa5\xfa,\xfa\xd1\xf9\xa3\xf9z\xf9\x8a\xf9a\xf9~\xf9\xca\xf9\xf8\xf9U\xfa\x85\xfa\xda\xfaQ\xfb\xa5\xfb+\xfc\x9b\xfc\x0c\xfde\xfd\xae\xfd5\xfe\xa5\xfe\xfc\xfeH\xff\x88\xff\xd5\xff\x11\x00M\x00\x9b\x00\xcb\x00\xee\x00\x1c\x01M\x01u\x01\x80\x01\x88\x01\x87\x01\x88\x01\x94\x01\x9d\x01\x96\x01\x7f\x01[\x015\x01\x13\x01\xed\x00\xbe\x00\x89\x00X\x00(\x00\x03\x00\xdb\xff\xa7\xffv\xffQ\xff-\xff\x15\xff\x03\xff\xf0\xfe\xf8\xfe\xff\xfe\x06\xff\x0e\xff\x1b\xffE\xffv\xff\xae\xff\xd6\xff\xfe\xff-\x00f\x00\xa4\x00\xd3\x00\x03\x011\x01]\x01\x85\x01\xb2\x01\xdc\x01\xf5\x01\x0b\x02"\x02A\x02N\x02E\x02>\x028\x02.\x02*\x02\x1c\x02\t\x02\xe4\x01\xb8\x01\x9b\x01\x81\x01R\x01\x0c\x01\xd3\x00\xa0\x00l\x00@\x00\x06\x00\xc4\xffv\xff2\xff\n\xff\xdd\xfe\xa9\xfei\xfe7\xfe\x17\xfe\xeb\xfd\xc4\xfd\xa7\xfd\x8b\xfdy\xfdc\xfdV\xfdD\xfd\x1f\xfd\x00\xfd\xfd\xfc\n\xfd\x0f\xfd\xf8\xfc\xea\xfc\xf0\xfc\xed\xfc\xe8\xfc\xf6\xfc\x04\xfd\x08\xfd\x14\xfd(\xfdc\xfd\x86\xfd\x98\xfd\xd7\xfd\x1d\xfei\xfe\xb2\xfe\xf1\xfeW\xff\xb7\xff\xff\xffq\x00\xec\x00E\x01\x9c\x01\xfb\x01d\x02\xca\x02\x06\x03I\x03\x9f\x03\xd6\x03\xfd\x03%\x04L\x04m\x04\x84\x04\x8d\x04\x95\x04{\x04`\x04L\x040\x04\x18\x04\xf0\x03\xb9\x03{\x03.\x03\xe6\x02\xa0\x02Q\x02\x01\x02\xaa\x01Z\x01\xfe\x00\x9a\x00<\x00\xdf\xff\x80\xff\x1c\xff\xbf\xfeg\xfe\x11\xfe\xc8\xfd\x93\xfdd\xfd6\xfd\x13\xfd\xfd\xfc\xe9\xfc\xec\xfc\xfb\xfc\x0b\xfd\x1e\xfd?\xfdc\xfd\x88\xfd\xb5\xfd\xe3\xfd\x04\xfe(\xfeL\xfe\x80\xfe\xb3\xfe\xb1\xfe\xd5\xfe\xf2\xfe\xf5\xfe\x11\xff\x15\xff%\xff=\xff+\xff&\xff:\xff:\xff?\xffH\xffT\xffh\xff\x80\xff\x91\xff\xb6\xff\xde\xff\xf5\xff*\x00[\x00\x92\x00\xd5\x00\x11\x01W\x01\xa4\x01\xd7\x01\t\x02P\x02\x80\x02\xa9\x02\xd8\x02\xfd\x02$\x030\x03&\x03+\x03\t\x03\xfb\x02\xca\x02\x8f\x02b\x02.\x02\xe5\x01\x8c\x01K\x01\xeb\x00\xa1\x00C\x00\xd7\xff\xa1\xffb\xff\x01\xff\xdb\xfe\x97\xfef\xfe-\xfe\x10\xfe\xda\xfd\xcc\xfd\xc0\xfd\xa7\xfd\xa6\xfd\xb0\xfd\xb9\xfd\xc2\xfd\xd4\xfd\xb3\xfd\x05\xfe\xd8\xfd-\xfe+\xfeY\xfe\x85\xfep\xfe\xe7\xfe\xb9\xfe\x15\xff)\xff\x90\xffu\xff\x94\xff\x07\x00\xfd\xff@\x00\x8f\x00\xdc\x00\x00\x01\x0b\x01N\x01\x98\x01\x90\x01\xae\x01\xcb\x01\xf1\x01>\x02\x17\x025\x02\x82\x022\x02\x93\x02\xa3\x01<\x02\x03\x01\xf5\x00\x81\x00`\xff<\x05\x02\x059\x00\xf1\xfb\x98\x04\x9f\xfbB\xfck\x04\x06\xfa\x02\x02\x88\xfa\x8c\xff\xd8\xfdt\xf9*\xfe\xad\xfb\x8c\xfd8\xfdt\xfd;\xffZ\xfd\r\xfe\xe9\xff\xfd\xfd\xa9\xff\x18\xff\x83\x00\xfe\xfe\x82\x02\x85\xfec\x01|\x00x\xff\xb6\x02,\xfft\x01\x00\x00\x9b\x00\x93\x00\xff\x00\xf4\xffk\x01\xe1\xff\xe6\x00\t\x00,\x00\x19\x01_\xff(\x01?\x00\xda\x00H\x00\n\x01\x97\x00\'\x01\x07\xff;\x02=\x00\x1c\x00\xc1\x01"\x00\x9a\x01\xae\x00{\x01\x11\x00,\x01\x00\x00\xaf\x01\x0c\x00\x1e\x02\xc6\xff\x1c\x02\xa8\xff\x06\x01T\x00\xa6\xff\x92\x019\xfds\x04\x16\xfc\xe2\x01\xe9\xfe\x0e\xff\x7f\x00\xcc\xfd\xbb\xff\xea\xfd\xf2\xfe\xe4\xfd\x90\xfe\x86\xfe\x98\xfb\xf5\xffA\xfc:\xfd=\xff,\xfbr\xffi\xfc\xa2\xfe\xad\xfd\x8e\xfec\xfe@\xfe\xd6\xff\xa5\xfe\xed\xff\xdd\xffN\xff\x98\x010\xffO\x02~\xff\x04\x01\xfe\x01\xd4\xfe\xac\x032\xff\xe8\x02\xdd\xff\xf8\x01\xd6\x00\xa6\x01%\x00[\x02\x12\x015\xff\x06\x04a\xfe\x9d\x01.\x01\xaf\x00\xba\x00T\x00\x8d\x02\x8c\xff\xd5\x01\xed\xfe\xfe\x01\xb8\xff\xc1\xffZ\x03\xba\xfd\xe4\x03\xf2\xfd\xd5\xff\xcc\x02\xec\xfeK\x00\x85\x01\xfe\xfc\x83\x01\xd1\xff\x1d\xfd\x03\x04\xa5\xfb\x19\x01\x1a\x00\xa1\xfb\xe2\x02\xfd\xfcO\xfd\xe6\x02`\xfbs\x00\xc1\x00\x8e\xfb\x15\x01T\x00$\xfc5\x02\xdb\xfc,\xffT\x02\xab\xfb\xf6\x02.\xfe\xb9\xff\x82\xffU\x00\x08\xff\xca\x00\xf5\xff\xd5\xff\x15\xff\xf3\x01\xdf\xfe\x10\x03\xd8\xff\x90\xfd\xc0\x04V\xfd"\x01M\x03\xb1\xfe\xad\x00\xb0\x03b\xfcO\x04\xf8\xffD\x00\x93\x010\x01,\xff\xc9\x00t\x02\xf0\xfc\xb2\x03\x89\xff2\x003\x01\xe8\xff^\xfe\xd8\x03\xe0\xfc\xde\x01,\x01\x9c\xfc\xcb\x01\xfd\x02[\xfa\xc1\x04\xf1\xfe\x8c\xfcW\x068\xfb\xe6\xfd\xbd\x03(\xff\xc6\xfd:\x06n\xf9U\x01\xb3\x00\xf3\xfd\xbf\xffO\x03\x0e\xfd\x0b\x01\xfe\x01#\xf9\xfe\x04Q\xf9\x9a\x06.\xfc\xc2\x01P\xfdr\x02\x16\xfdC\xff\xf9\x01\xe0\xf8\xf8\x07\xe5\xfa\xa1\x01\xb3\xfe\xbe\x03/\xf9\x17\x07\xe8\xf9h\x00\x88\x06\x19\xf4\x8e\t3\xfe\xa6\xfeS\x00\x96\x03\xb4\xf8\x9d\x05\xb4\x00\xa3\xfa\xa0\n\xa7\xf78\x04\xc7\x01|\xfcx\x04\xbf\xfeE\x01 \xfc\x92\x06\x04\xfcF\x04\xc8\x01\xd9\xfbn\x06\xda\xfa^\x02r\xff\x1d\x03x\xfd\x8b\x03t\xfe\xd6\xffO\x02=\xfaI\x04\xa2\x00\x82\xf7\x88\x07\x12\xfb\x18\x03>\xfe\xd6\xfcF\x03\xff\xf8\x7f\x08\xe6\xf6\xa7\x05\x0f\xfbH\x04(\xfb&\xffw\x07)\xf5n\x08\'\xfaX\x04\xc9\xf9t\x03R\x01\xa9\xfc\x9e\x03\xe1\xfd\x07\x02\x18\xfb\x06\x04c\x00\x88\xfdW\xff\xd3\x03v\xfb\xff\x03\xf4\xfd#\xfd5\x04\xb4\xf9\xc2\x03\xd3\xfd?\xff\x9f\x00\xbb\x01\x8b\xfd\x1d\xfd\x8e\x03K\xfb\xe9\x02\xf3\xfeG\x00\xf3\xfc\x91\x06[\xfa\x87\xfe\x7f\x05\xf7\xf7\x91\x04k\xfe\xec\x01\x10\x00\xe4\xfds\x02\xb8\x02\xe1\xf7L\x07\x98\xfd\xc0\x01\x98\x01l\xfdQ\x04A\xfb\xbf\x05\x9a\xfc\xf7\x03\xae\xf9\x91\x07\xa9\xfd\x8b\xffn\x00P\xfeX\x00\x94\xfeH\x05\xd5\xfc\xa9\x00\x19\xfc\x8a\x03\xcb\xfb8\x00E\x03o\xfa@\x03\x9e\xffM\xfdr\x04\xa4\xf7J\x02\x88\x03g\xfa\\\x02\x8c\x00T\xfc-\x01\xab\x01]\xfd\xe2\x00(\xffX\xfcr\x06\x9d\xfc!\xfe\x7f\x08"\xf8\x1d\x05,\x00\xd6\xfe\xcd\xff\xd3\x01\x17\x01\x06\xff\x15\x07\xcf\xf7c\x05j\x02D\xf9b\x06d\xfe\xa6\xfeC\x05\xc6\xf8\xb5\x03\xa1\x04\xac\xf7\xa7\x05\xe9\xfd\xbb\xfd\xa8\x00\xaa\x00\x81\xfed\xff\xb8\x00\x9f\xfc;\x06D\xf8\xb5\x05\xc3\xfe\xa1\xfd!\x03\xc0\xfe\xcc\xff\x00\x00\xe3\x00\xca\xff\xfd\x00\x80\xfee\x02\x15\x00\xd3\xfd\x07\x00\x1c\x05a\xf8\xb4\x04l\xff\xd1\xfaB\x08\x15\xf9U\x02\xa9\x00n\xfc\xf3\x04\xa9\xfa\x0e\x04\xca\xfa|\x03\x84\xfeL\xfdz\x06+\xf8\xe8\t\x0f\xf6\x0b\x01\xaf\x05X\xfa\x87\x03\x08\x03\x18\xf9\xa2\x05\xba\x01\xc3\xf8\xb2\x06\xe2\x00\xf4\xfda\x02\x06\xff\x81\xfd\x1a\x05f\xfdu\x038\xfd"\x01\xc5\xffu\xff\xe0\x01\x89\xfb\x1a\x04B\x00\xe9\xfd\x06\x01t\x05J\xf9\xd6\x00\xb9\x01\xb1\x00z\xfe\x07\x00\x1d\x07\x98\xf5\x1b\t\x85\xf9\xbb\x00\x13\x04\x18\xfb\xb9\xff!\x03l\x01Q\xf8m\nl\xf7\x08\x00r\x03\xcd\xfd\r\xff\x97\x02&\xff\x15\xfd\x13\x03\xec\xfe\x03\xff\xc4\xfev\x030\xfd\xfa\xfa\xa9\x06b\xff=\x01@\xff\x17\xf9\xe4\x07\xcc\xfb\xc5\x00\xc2\x02\xa4\xfdM\xfd\x1c\x04\x03\xfe\x14\x00T\x03\xd6\xf7\x0b\x07\x83\x01K\xf7\xa0\t\xf1\xf9\xbe\x00\xd0\x04+\xfa\x8f\x03=\xfb\xd6\x053\xf9\x06\x07\xd7\xfbr\x00\x01\x042\xf7\n\x07\xb9\xfbf\x01\xa5\x05\xd2\xf8\xcd\x02\x8f\x01\xc8\xf8\xe4\x07\xd7\xfb^\xff\xe4\x03\xa5\xfci\x00\x93\xff\xb9\xff\x01\xffi\x01\xfe\x00\xb3\xfdV\x02F\xfc1\x03\x97\x00\xfc\xf9%\x04\x82\xff\xf6\xfd\x15\x00\x8f\x05\x9f\xffL\xf6\xe1\x080\xfb\xd1\xfd\xb8\x07\xb5\xf8\xec\x06\x9f\xfb\xb9\xfd\xf6\x03Y\x00w\xfc\x95\x03-\xfe\x99\xfb\xcc\n\x9e\xf4\xbd\x04\xb7\x00\x04\xfc\xdd\x07Q\xfa \xfc\xfe\t(\xfa{\xfeX\xff\x87\x06\x8f\xfc\x19\xfc\xb8\x0f\xab\xef!\x06U\xfe\xed\x00Y\x04c\xf9#\x08\x0c\xfd\xc4\xfeR\xfe\xe2\x04\xe5\xf9E\x07u\xfa\x8b\x01\xfd\x025\xf9\xcc\nh\xf1\xdf\n\x17\x00\x02\xf7~\t\'\xf9\x1c\x02\xd9\x02\xa2\xfdW\xfe\xad\x00\xcf\x02a\xfa}\x06c\xf8\xa6\x00\xc9\n\xc9\xf6\x8e\x01\xa4\xff\x97\x00T\xfd(\x00\x17\x04\x19\xfc*\x03\x1e\xfc\xdc\x04\xc3\xfc\x9b\xfb\xf8\x08\xc7\xfeo\xf6\x16\x0b\x01\xf9Q\xfe\xf9\x0b8\xf5\xf2\x03g\x00@\xff,\xfb\x97\x0b\xa5\xf3\xc5\x03\xf8\x05w\xf7t\x056\x00E\x04`\xf4\x98\x06\xda\xfeZ\xfbA\nf\xf8[\x03\xde\xffe\x00 \x03\xf5\xf2\x1a\x0e\x96\xff\xb5\xf5\x8c\n\x00\x02t\xf4\x14\x0b\x9a\xfb!\xfce\x07\xc7\xfcu\xfdZ\x07@\xfc\x02\xfb\x8c\t\r\xf5\x9a\x08\x1e\xfb\x9b\x01\x8a\x06\x16\xf7k\x00p\x08\xde\xf6\x8e\xfe%\x0b7\xf4L\x03\xf8\x06\x9c\xf7\x17\x01\xf2\x02\x82\xfb\xfc\x01G\xff\'\xfe\x97\x02\x1b\xfcB\x07D\xfef\xf7h\x07\xf2\xfd\xe0\xf6/\x05b\x08B\xf6\xb5\x07\x95\xfa4\xff\'\x04f\xfc\xe2\xfc\xee\x06\xdf\x03\x8a\xf4\xcd\x0c\x11\xf6?\t\x14\xfd@\xf6e\x14\x92\xed\x84\x06\xda\x03\xb7\xf8\xbb\x03V\x00&\x02\xf3\xf3\x10\x10\xc6\xf3z\x02V\x08n\xf4m\n\x82\xfb:\xf7\x88\t\xcb\xf9}\x04U\x032\xf4\xc7\x0c\xc1\xf7\xdd\xff4\x03u\xff\x85\xfeo\x00\x86\x00\xc9\x02[\xfc\x9d\x00m\x02.\xff\x89\xfb\xc0\x08\x00\xf8\xe0\x00l\x04\x90\xfa1\x06I\xfc\x81\x06\xf5\xef*\r\xbd\xfc\xca\xfc\xba\x00\x0b\x01\xc7\xff+\x01\xb6\x03+\xf1R\x0f\xb6\xf5\xc6\x00*\n\xb8\xf2\xfb\x0b\xe7\xfb"\xf8`\x0eY\xf1\x9d\x060\xff\xc1\x008\x02?\xf8T\x08\xb9\xf4\xe1\x0f\x0b\xf5l\xfb;\n7\xfa\x94\x012\xfc_\ns\xf4#\xfc\\\x13\xdf\xf2\xe1\x01x\x02o\xfbw\x00\xca\x00^\x01w\x02\xba\x00T\xfa~\x06\x84\xf7\xa2\x02+\n\x07\xf3\x81\xff\xb9\x10\xf1\xee\r\x05\xfe\x07w\xf4\x83\x07\x81\xffa\xfb\x00\x03\xf9\x03\xd8\xf6\xd9\x0c\x1e\xf9\xd5\xfc\xb0\x03\x97\x00\x19\xfd\xa3\xff=\x05\x0e\xfaS\x06\n\xf9@\x03\x8b\xfeu\x01\x80\xfd\xde\xffM\x05C\xf9L\x05\xa6\xfc~\xff\x8d\x01\xc4\xfb\xab\x057\x00a\xf9\xd9\xfeB\x06\xa5\x02\xf7\xfam\xfd\xd1\x01\\\x01\x17\x01\xef\xf96\x06\x17\xfe>\xfb)\x07=\xfdl\x03\x11\xfe \xfb\xec\xfd\xce\x0c\x86\xf9C\xf6\xa7\x0f\x11\xffG\xf1x\r\xc8\xfeT\xf8k\x06\x0b\xfb\xd7\x01T\x05\x8e\xf7\xc0\x06x\x06\xf7\xea\x10\x10Z\x00\xad\xf3,\n[\xfeo\xfeT\x02V\xff\x9c\xf9\x91\x10!\xf5z\xf6\xee\x13\xef\xf5M\xfc\xb4\x06\xf6\x01=\xf7C\x0bO\xf5\xa3\x08\\\xfd|\xfa\xe3\x0bn\xf3t\r\x9d\xf0j\x08L\x02\xd1\xf9\xa0\x022\x02\xd6\xfa6\x05L\xff]\xfam\x06\xc3\xf6\xb0\x08}\xfe\xe3\xfep\xfb{\x11W\xeb\xa7\x00\xed\x0b5\xf7\x16\x08\xb9\xf8y\x08\xfd\xf9\x88\xfe\x1f\x07\xb0\xf9\xbc\x01\xde\x06\x17\xfci\xfa\xeb\xff\xfc\x06\xfa\xfa\x1e\x01\x17\x07\xe6\xf5\xf1\x00\xa5\x04z\xf9\'\xfe\xdf\x01\xb9\x04\x8e\xf8\n\x07>\x06\xe5\xf6\x89\x04\xa9\xf2\xbf\x08\xd5\x08\xff\xef]\x11a\x00\x15\xf6\n\x01\x17\x01\xc0\x04]\xf5\xf8\x05\xf5\x04\xd1\xfa\x9f\x02\x87\xff/\xfee\x01\xc2\xfa)\x03O\x05\xdb\xf6E\x01w\n\xc9\xf4n\x03\x91\n"\xeb7\x07\xa0\x0c\x99\xe9l\x07A\x14#\xeb\xd5\x04\xc2\x05\xa3\xf1\x05\tP\x08\xaa\xf1j\x03|\x06c\xf8F\x00\xf7\tD\xf4\xc2\x01\xe5\x08\x1b\xf5\xb4\x04S\x06\xb3\xf7 \xfe,\x0c8\xf82\xfco\x04\x80\xfcv\x03\xfb\x04v\xf4u\x03\xad\x0e2\xe9\xb5\x02\xbb\x11!\xf2\x90\x05O\xff\x00\xfd|\x00\x02\x06,\xf2\xe0\x0bb\x00|\xf1\xb2\x16\xe2\xed\xf5\x03\x0b\x02\xd0\xfdg\xfb\xfc\x0b\n\xfaK\xf8M\x0f\xe7\xec\x1a\x12\xa5\xf4M\xff\xbb\x02\xe0\xfb\x08\x0b\xae\xf4M\x108\xec\xa4\x08\xde\x00\r\xf3<\x17`\xee\xf0\x03=\x07\xd4\xf87\xfe/\x07`\xfa\xe1\xff\xa7\x06\xc5\xef\xd8\x11\xbe\xfc\x1d\xef\xcf\x16\xe2\xf1\x08\xfdQ\x15\x93\xe5\xa7\r}\xfe\x99\x00r\xf8\x19\x03\xd9\t2\xfbX\xfd\xb5\xfa\x8e\x16\xe7\xde{\x10o\r{\xed\t\x02\xa8\x0c\xdf\xf4\xc6\x02-\x03\x9d\xf31\x15.\xedG\x04\xc3\rv\xeb\x85\n\xac\x03\xee\xf3]\r\x99\xf7\xcb\x01(\xfe\x99\xffH\x03\xcd\x04t\xf2\xed\n4\xfd\x88\xfaw\x0c\xda\xe9U\x159\xf6_\nN\xf4\x9d\x01(\x05\xdd\xf7A\x04\x9a\xfa3\x0eH\xf2q\x00T\x0c\x17\xf6[\xf6P\x15\xbc\xf8\x83\xef=\x16r\xef\x17\x04r\x0b\x02\xf7\xd5\x01{\xf8d\x11n\xee\xc7\x04\x1c\x04\xca\xfb\x80\x01\xa5\xfe\x0b\t\xfe\xf7\xbb\x03\x90\xff"\xf6\x95\x07t\xfd-\x046\x01\x9d\xf9\x85\x0b\r\xf6u\x02\xab\xf9\xf9\x0bP\xfc\x8e\xf6\xe6\x15\xba\xef]\xfd\xe7\x10\x8d\xed\xdd\x01Q\x0b\xf3\xfe\x97\xeeW\x19u\xf2\xf7\xf9\xf9\x11\x06\xe7g\x12\x11\xff1\xf9y\x04\xdb\x06\x12\xf0z\x10Q\xf5\xf8\xfd\xee\n\xee\xf4\xcb\x04\xd5\x05&\xfc\x19\xfbM\x07\xbe\xf60\x0b\xd8\xf3\x1f\x02\xf6\x0b\xaf\xf2t\x08\xcf\x02\xd7\xf6\x16\xfc\xf9\r\x0e\xec\x08\x02\x1c\x19\xdb\xe6C\x0bD\x00\xa9\xf8\x8e\x04f\x03\xc9\xf36\x0b\x17\x04\xa3\xf6w\n\xb8\xf6\x8b\x0b\'\xf3\xda\x02,\x10\x9e\xe8\x80\x0cl\xff\xb1\xf5Z\r\r\xf6.\n\xc0\xf1\x02\x02\x90\x06\x12\xfc\xc0\x02\t\xff\x12\x043\xfav\xf9N\x06\xdd\xff\x9b\xfeY\x06\x1c\xf4\xda\x08:\xfc\xac\x00T\x04\x7f\xf7\xa3\x06_\xffQ\xfc\xe8\x08\xb5\xf7\x1b\x03\x0b\x02{\xfb\xdc\x07p\xff\x83\xf44\t\xfe\xfb4\x01I\x05\xbc\xfc\x9f\x07\xfc\xecS\x0bs\x02\x89\xfa\xcd\xf7_\x12-\xef\xf8\x0c@\x03\xbb\xe7\xf3\x18n\xef\x9f\x017\x03F\n\xf9\xf1\xe7\x05\xf9\x04\xed\xf2F\t\x98\xf8\xde\x04\xa4\x01\x06\xfa\xeb\x02\x80\xff2\x00u\x05I\xf8\x95\xfbU\x07\xd6\xfc\xa9\xf9\xbc\x12\xb2\xf2\xd6\xf5\x0e\x0fT\x02x\xfd\xfd\xf8\x1a\x08\xaa\xf6\x0e\x03\xb9\x01\xfc\xfdG\x0f\xa8\xf1\x0f\x02j\x01\x0f\xfe\xd7\xfd\xb2\x04\x99\x02\x95\xf2\x08\x11G\xfa\xa7\xfdQ\x01\x7f\xfe\xb3\x04*\xf8<\x06\xa8\xfb;\x07d\xf8\x97\x06P\xfd\x84\xfc\x8e\x06I\xf5\xa8\x0c^\xf4\xc1\x07\x07\xfe\xfc\x00\xb5\xfe\x87\xff\xa8\xfe\xed\xfb\xee\x0e;\xf1V\x05J\x00\xb5\xfa\x08\n!\xf7\x1d\xfd\xd1\x0e\xf8\xf5\xb3\xfe\xad\xfc\xe6\x06\xa3\xfe\xa7\xfb1\n9\xf5\\\x03R\x03\x1e\xf9\xa0\x07\xf1\xfeP\xf4\xdc\x07\xf0\x06}\xfc\xf2\xf9\xac\x08v\xf9\xed\xfe\xca\x02|\xfd\x07\t\xe2\xf1\x84\x05\x11\ns\xf3>\t\xda\xf4W\x05\'\x01s\xf7\xe5\x0e\x12\xf9\x10\xfe!\xffn\n\xbb\xf1\x95\x08|\x01Q\xf6Z\t\x03\xf73\n]\x02\x97\xf0\x1d\x0e\xaa\x02,\xf15\x0fT\xf5\x89\x01\xf5\x06\x11\xf77\x03Q\x0bP\xf3\xb0\x02E\x00\xdd\x01;\xfes\xfaY\x06\xcb\x02\x9f\xfaT\xfa\x85\x10\x06\xed\x12\n\xca\xfd\xb0\xfb\xc0\t\xf0\xf5+\x017\x03o\xfa\x80\x07\xff\xf9\r\x02\xa0\t\xdc\xeaD\x11w\xf3/\x07V\x03\x19\x01\x80\xf6\x02\x08\xd2\x06\xb2\xe9\x90\x1b\x92\xf3\xd2\xfb\xcd\x10\xbc\xee\x93\xfe^\x11R\xf4\xa7\xfe\xdd\n\xe6\xf2r\xff.\r\x7f\xee\xe7\x04\x1c\x01d\xf5\xb0\n\x08\x014\xff\xf4\xf7\xb9\r\xbb\xf5P\xfb\xab\r\x81\xfd\xc5\xfc\xcd\x06\xe1\xfc(\xff\xbc\x08A\xef\xaf\tW\x07\x0e\xf3x\x06\x12\x01\x99\xfeN\xfb\r\x04\xab\xfb!\xfd\x81\x0b?\xf8b\xfd%\x08\xff\xfd\xb2\xf5\xb1\t0\xfd\x81\xf0X\x11\xac\xfc\x9a\xf99\x10~\xf2\xe5\xfc\xa1\x07^\xfe,\xfe\xa4\x06\x80\x00\xd1\xf4\x07\rR\xfc\xd1\xfb\\\x04N\xfep\xfe\xce\x04\x1f\xff\x17\xfc\xf0\x020\x03\x0f\xfe3\xfc\x86\x03\x10\xfc\xc9\x03v\xfd\xda\x01\x16\x04e\xf8z\t\x07\xf8\x8c\x00[\x01\xc0\xfa\xd1\x03\x07\x03\xea\x00\xe4\x02\x07\x01\xb7\xf2\xd2\xfe\x81\x08\xa9\xff\xa3\xf8\x05\nz\xfe\xaa\xf6s\x07\xd3\x01\xb6\xf5\xd5\x05\x12\xfd\x9e\xf4\x16\x10\x13\xff}\xfd\xe5\x04%\xf7F\x00\x9a\xff\x02\x00%\x06|\xfaI\x01\x92\x06/\xf9\xdd\x056\xff\xf2\xf3\xc0\n\x9c\xfch\xfe\x11\t\xe8\xfd\xc9\xf8\xa2\x00^\x06\xa0\xf7\x18\x05\xf9\x01R\xf78\x02\x1f\x03\xdd\x01\r\xfb\xc8\x03\x83\xfc\xd2\xfb\x13\t\x08\xf9\x8b\x02T\x00\x80\xfa,\x05\xe0\xff\xd7\x01\xee\xff\xe8\xfe\xf4\xfaY\x00.\x02J\x017\x00\xe4\x00A\x00,\xfc\x12\xfe\x9e\x03o\xfd\xff\xfe\x98\xff\x12\x05\xd2\x04\xc4\xf9\x12\x00w\xfd\xdd\x037\xfc\xc7\xfd\xe3\x04^\x07\x13\xfa\xdd\xfe?\x04\x0c\xfb)\x02\xa1\x00?\xfc\x87\xfc\xd7\x03\xd6\xfd\x1d\x05\xeb\xfbs\xfe\x10\x00\xb2\xf8n\x02n\xfe\xa8\x01\x16\xfe\xf4\x00\x98\x02~\xfe\xe3\xfc4\x03\xb0\x00?\xfe\xb8\x02A\x00T\x00i\x04\xd7\x00\x17\xfd\x0c\x00@\x03\x00\xfd\xb7\x00\xc4\x02#\xfcQ\xfe\x16\xfd)\x01Y\xfeW\x00\x14\xff8\xff\xf7\xfcN\x02\xe2\x02e\xfb2\x04V\xff\xca\x02:\x03\x16\x03\x93\x02l\xfd\xb6\xff\xaf\x01n\x03\xf0\x02\xd1\xffx\xfe\xe7\xfc \x00\x94\x02\xe6\xfa\xfa\xfe\xaf\xfd(\xfd\x9b\xff\x03\xfe\x91\xfd\n\xfd8\x00\xe5\x00b\xff-\x02\x96\xfe\xce\xf9]\x04\xa1\x02\x84\x00\x04\x06`\x02\xf7\xfeF\x01\x85\x00\x06\x00\x10\x03.\x03;\xfe!\x00\x8d\x020\x00\xdf\xfe\xde\xfd\xcb\xff\x08\xff"\xfc\x19\x01J\x01Q\xfd\x11\x00l\xffk\xfd!\xff\xf6\x00\xec\xfe\xb6\xff\xda\x04\xf0\xff|\xfe*\x02\xf2\x00\xcb\xff\x17\x03b\x01\xdc\x01\xb8\x00\xed\xff\xce\x004\x00\xc0\x01\xd1\x005\x01\xab\xfe\xb0\x00\xae\xff\x98\xfe\x88\x00\xa7\xfeJ\xff\xd5\x01\x99\x00\x86\x00\x97\x00\r\xfcF\xfd\xfe\xff\xb4\x00\xff\x01]\x02|\xff\xcd\xfe\x87\x00\xfb\xff\xa3\xfeW\xfe\xcf\xff\xc8\x01\xd3\x01\xbc\x02\xe5\xff:\xfe8\xfdc\xfc.\x00\xfc\xff\x02\x00\x8d\x00\x1c\xff\xd9\xff\x1e\x00r\xfd\x8b\xfc\xf1\xfc\xf5\xfc8\x00\x1c\x02\xfe\x00\xfc\xfd\xf4\xfd^\xfd\xf2\xfb\xc3\xfd\x9c\xfe+\xfe\x00\xffW\xff\xcc\xff\x8b\xfd\x94\xfc\x93\xfb\xc6\xfb\xf1\xfco\xfdM\x01\xef\xff\xfa\xfc\xe3\xfe\xee\xfd$\xfdO\xfe\xf0\xfc\xec\xfc\x13\xfd1\xffU\xff\xf5\xfe\x0c\xfe\x7f\xfd\x14\xfd@\xfc\xf6\xfa\xfe\xfb+\xffB\x02{\x07\x1f\x0b\xe1\nB\x08n\n\xdc\x0c\x03\x11c\x12\x19\x14\xd9\x17\x9d\x18\xef\x19f\x19\x9c\x16\xc7\x11\x9e\r\x11\x0b\x95\x0c\xb3\r\xb7\t\xc4\x049\x01\x99\xfb\xf8\xf6\xb5\xf4X\xf1w\xeeN\xeb.\xed\x86\xed_\xee\x11\xee\x93\xea%\xeb\xe0\xea\xec\xed\x16\xf2Z\xf7\xed\xfaA\xfd*\xff\xc6\x025\x06\xa9\x05\xf3\x07\xe4\x06\xa0\tb\x0c\xa7\x0c\x90\x0c\x16\x07\x07\x05m\x01\xf8\xfe+\xfek\xfax\xf6\x0f\xf4/\xf3\xd6\xf1\x0f\xf1\xbc\xed\xa0\xeaU\xea\x07\xea|\xed\xdd\xefM\xf0\x84\xf0x\xf1;\xf3\x7f\xf5\xde\xf8\xe1\xf7\xa3\xf7\x96\xfbd\xfe\x03\x00\xda\x02q\x01F\xfeK\x00`\x00\x06\x02M\x03\xbd\xff:\xfe\x0f\xff\x05\x00\x11\xff\xbd\xfbS\xf9)\xf8\xcd\xfaG\xfdR\xfc\x9d\xfb\xfd\xf8\r\xf9\xbb\xffc\x04\xe2\x048\x03b\x03\xf1\xff\xa3\x016\nY\x15\xd8#\xda(\xd9*\xc8+\xe2,\xad.I+\xac)Q-24\xa68~3\xd2(\xf7\x1b\x8d\x0f\xb3\x08(\x03(\xfe4\xf8E\xf3\xe9\xf1\xe1\xf03\xec\xf2\xe1J\xd7G\xd4\xab\xd6x\xddb\xe5~\xe9\xc9\xeb\xb9\xed]\xf0t\xf3\xd4\xf7\xbd\xf8\x9d\xfb\x9e\x02\xc1\n\xb3\x12\x11\x15\xf0\x11_\x0cc\x07F\x05\xe1\x04\x9e\x03\xbf\x00\x1d\xfe\xb8\xfa\xc2\xf7\xcc\xf4%\xed\x10\xe6\xef\xdf\xee\xdd\xa2\xe0\xe3\xe3\x18\xe6W\xe7\xa8\xe7"\xe9]\xec\x8b\xf0\r\xf5\xf3\xf7\x16\xfd\x82\x04\x0b\rR\x14\x0e\x17f\x16\xc7\x15\x15\x16\x99\x17\xc9\x19C\x19h\x15\xbc\x12\x91\x0f\xed\x0b0\x07\x18\x01\x19\xfb\xed\xf6\x9d\xf6\xd5\xf5R\xf5R\xf3\x0e\xf1+\xef\x9c\xf0\xf4\xf3\x10\xf5}\xf6\xd9\xf8\xf5\xfa\xb7\xfez\x02\x1c\x01\xe0\x00\x99\x00\xa4\x00\x9f\x04\x96\x05\x18\x03S\x01\xab\xfd\xa7\xfc\x86\xfe\xa9\xfa\xfb\xf6\xb1\xf3\xaa\xf1 \xf2M\xf3\xb1\xf1\xff\xee\x06\xeeN\xeb\xdf\xed;\xf2\xea\xf3~\xf6p\xf4U\xf7\xc1\xfb\xde\xff\x19\x03\xae\xff#\x04+\t\xc3\x16\x15-\xb76\xdd8\x1f1\xa1+\xa71p7\x8c6\xd0344\xd73p/\xdb$\xeb\x16\xc9\x04\x9c\xf4\xd0\xed\n\xef\xcc\xf1\x01\xee\xb2\xe5\x8f\xde\xde\xdb\xeb\xd9\xe6\xd7h\xd7Q\xdaC\xe1#\xed"\xfaS\x02\xcf\x03\xc8\x00Y\x00\x04\x04\x11\x0c*\x14\x8a\x18\\\x1a\x0b\x1b\x94\x18y\x13&\x0c^\x00\xec\xf7\xe5\xf4\xb6\xf3"\xf4\xc9\xf0@\xe9\x7f\xdf\xe7\xd6\xde\xd4.\xd3\x80\xd3\xac\xd6>\xdc\xdd\xe4\x1a\xebb\xef#\xf1m\xf1r\xf5\x07\xfd0\x08\x7f\x12,\x18\x06\x1ai\x1a\xd1\x1af\x1a\x8e\x18@\x14\xf9\x11\x00\x13&\x16:\x14\xae\x0c\xe1\x02q\xfa\xb3\xf6s\xf5\x84\xf65\xf5\xe1\xf4M\xf4\x0f\xf4\x0e\xf8\x1c\xf7\xce\xf5\xd6\xf7\xb8\xf9\xb8\x02\xaa\tl\x0c\r\rU\n\xc5\t\xc9\tP\n\x87\x08\x98\x05\xc6\x03\x0c\x02\x17\x01\xe2\xfdj\xf6\xf5\xee.\xeaD\xe7[\xe7h\xe6\xd1\xe4\xa9\xe4\x15\xe4\xee\xe6\x9f\xe81\xe6Q\xe6\x13\xe7\x7f\xec\xb5\xf5\x8b\xfa;\xfc\xe9\xffA\x03\x9f\x06\x94\x0bE\x08\xc7\x05\x18\n;\x0f\xb7\x16\x1c\x199\x19\xe7\x1e\xdb+\x025\x193r*\xeb"\xd6!^#\xbe\'\xbd,R,\xc9"\xe6\x15c\r\xa8\x07\x9c\xfe\xa3\xf3\xb0\xef\xeb\xf12\xf6\xe4\xf7\xc6\xf6\xff\xf1\xb7\xe9\x19\xe3\xcc\xe4\x97\xee\\\xf8\x1b\xfe)\x03H\x07\xb9\x08\x05\x07\xd4\x03\xdc\x01\xb7\x01\xc8\x04\x07\nw\x0f\x8c\x10\xb5\nS\xffM\xf4\x9b\xef\xe1\xeb~\xea\xcf\xea\x1d\xeb\x06\xed\x87\xea\xb7\xe7\xb3\xe3\xb7\xde\xa2\xddN\xe0\xc5\xe8\xce\xf2\xc6\xfaS\xfe\x0f\x00`\x00&\x00l\x00\x95\x02.\x06\x00\x0b\x00\x11\xcc\x12O\x13j\x0e\x92\x06\xea\x01\xdb\xffK\x01\xbf\x02\xd3\x04\x8f\x04s\x02c\x00\x82\xfc\xf0\xfa\xfc\xf95\xfa\x13\xfd\x86\x00y\x05.\x08{\x07\x14\x06\xcd\x04V\x05\x89\x08f\n\x05\rA\x0e\x83\x0e.\x0e\x1d\x0c(\x08\x8d\x03\x10\xff)\xfcS\xfc\x89\xfb\x7f\xf8Y\xf3~\xee\xeb\xea\x8d\xe8X\xe7\xf1\xe4n\xe4W\xe7\x7f\xe9\x06\xed\x05\xef\xf1\xed~\xefF\xf0\xe2\xf2T\xf8\xa7\xfa\xf2\xfd7\x01|\x03T\x05\xb6\x03\x91\x01\xbf\xfe\xc4\xfe\xd0\xfek\x02/\x04H\x03\x84\x02\x00\xfe\xea\xfaV\xf5f\xf55\xffL\x0b:\x19\x86&\xd30\x003\x86+\xa1\x1e-\x1a\xfd#81\xb6:s:t2\xf6%;\x17\xe2\x08.\xfd\x88\xf3U\xed\xa6\xed\x1c\xf2)\xfa\xc9\xf7\x14\xec\x07\xe0J\xd9\xde\xda\xb6\xe1\x05\xec=\xf7\x11\x01\xf0\x07\x10\n\xb7\tE\x07\x05\x02\xb6\x00\xf6\x04\x87\x0c\xf5\x13q\x15\xeb\x10\xee\x07g\xfc\xd8\xf3\xe1\xedL\xe7M\xe5\xbe\xe6\xa1\xe7m\xea)\xe9\xe7\xe3U\xdd\xed\xd8\xe1\xda\xa5\xe1m\xea\xf7\xf1\x7f\xf9\x18\xff\xd4\x02I\x04=\x04%\x03\x11\x05 \t\xa6\x0e\x00\x14\xc0\x14\x0c\x12\xbf\r\x02\n\x0b\x04\xda\xfe:\xfc\x1e\xfft\x05\xfb\x07i\x06\r\x01\xf5\xfcn\xfb\xe9\xfa\xfb\xfc<\x01*\x05\xf3\x07r\n\xec\t\xf9\x065\x03{\x00\xf5\x02\x8e\x06g\n\xf7\n\xf6\t\xcb\x08L\x05\xde\x02\x0b\xffB\xfc\xbb\xfc\xa4\xfc\x9f\xfd\x1f\xfd\xcf\xf8\xcc\xf2\xf8\xee\x9f\xec{\xeb\x8f\xecp\xec\xbe\xec{\xee\x11\xf0\xee\xf0\x8f\xf0r\xf0\x15\xf1\xd4\xf3\xfd\xf7\x98\xfb\x0b\xfd\xea\xfc\x98\xfc!\xfd\xe6\xfdi\xfd}\xfb\xa0\xf9>\xf8\x9b\xf9-\xf9\xd4\xf6\xfa\xf3\x00\xf4\xb8\xf9\xc3\xfb\x19\xfd\xf3\xfb\xcf\xf9k\xfeu\x03Q\x0c4\x1c\xcb+\x954\x817\xd6638\x179\xc16\xe16\x819\x819\x8c5\xb9.\xe4$\xfb\x17P\x06\xad\xf8\xdf\xf2O\xefZ\xed\x95\xea\n\xe9)\xe7\x96\xe2\x98\xdd\xef\xdcy\xdf\x00\xe4\xd2\xeby\xf5\xbd\xfe\xad\x04a\x05\xfc\x04\xc1\x04\x06\x05=\x07H\n\xef\x0e\xb9\x10\xb3\x0e\xde\x08\x94\x01.\xfbo\xf3\xaa\xedW\xeb\xae\xebR\xeb\x03\xe9@\xe6\xba\xe2\xfd\xde\xf9\xdb\x9f\xdc\x0b\xe1\xe1\xe5\xd5\xeb\xca\xf0~\xf4\\\xf7t\xf8\xbc\xf9\xc3\xfc\xc4\x00f\x06\xaa\x0b#\x0e\xfb\x0f)\r\xd5\tz\tg\t\x97\nw\n\xd2\x07\xfa\x06\xb4\x07\xbb\x07\xf8\x07\x8f\x05i\x02\x02\x03\x0b\x05l\x07s\t\xf4\x07j\x07\x14\x08o\x08\x86\x08\xde\x06T\x05\x81\x05\xad\x05\xec\x06e\t\x93\tC\x06\xff\x03x\x02\x1e\x02\x03\x014\xfe/\xfd\xf2\xfd\xdd\xfc\xb5\xf9\xed\xf5s\xf0\\\xec\xd3\xeaH\xe9M\xe9\xf3\xe9%\xe8\x86\xe9\xd9\xe9\x9f\xe8Y\xe9\xfa\xe7\xb3\xe9\x1e\xef\xa3\xf2\xb9\xf6\xbf\xf9\xad\xf9\x14\xfa\xe8\xf8.\xfa\x87\xfc,\xfe\x0e\x00\xfc\x01R\x01\x08\x01j\x00\x18\xfd:\xfeu\xff\x8b\x01\xfa\x05\xba\x06\x02\x06\xa4\x06\x15\x07]\tS\x0bK\x0c\xe0\x0f+\x14\x80\x17]\x1b\x10!\x8f(\xda0\x8401*o(\xcc*\xf2,I+\xdf\'l&Z"f\x1a\x90\x12w\x0by\x04S\xfb\xea\xf52\xf6\xbc\xf6\xb8\xf3p\xecG\xe82\xe7\xd5\xe5\xba\xe5u\xe7\xe3\xe9\xf2\xec\x9b\xee\xbe\xf1\xca\xf5\xec\xf5t\xf4\x06\xf5\xfe\xf7\xff\xfbI\xfe\x9d\xfe\xd0\xfd\t\xfcI\xfaA\xf7\xaf\xf4\xde\xf3\xf6\xf1f\xef\xfe\xee\xa0\xef.\xeeN\xeb>\xe9\xd2\xe8\xb8\xe8Q\xea\x0b\xed\x15\xf0\xd5\xf2c\xf4\xe6\xf6\x1e\xf9\x1b\xfb|\xfd\xef\xffD\x035\x07\xcb\nD\x0e5\x0f\x90\x0f\xaf\x10@\x10}\x104\x11\xd3\x10\xbf\x10m\x11\xb4\x13\x05\x15\\\x10Q\x08_\x04\xc2\x03}\x04c\x050\x03\xcd\x00(\xffD\xfd\xa3\xfc\x9a\xfb\xb7\xf83\xf8\xd0\xf9[\xfd\x93\x01{\x03\xf4\x00A\xfd\xfc\xfb\xf9\xfb\x9d\xfcv\xfc#\xfcZ\xfc\xcf\xfb\xc7\xfa \xf87\xf4\'\xf0>\xee(\xee\x18\xef\xc7\xf0\x1a\xf1\xa0\xf0-\xf0R\xef\xb5\xefK\xf1\xb5\xf2Z\xf5\x00\xf8\xc2\xfb\xf1\xfeB\xfe\x15\xfd(\xfd\xab\xfd\xa4\xff\xc8\x01Z\x02\x95\x05.\x07\r\x06\x8e\x07\x9c\x06E\x04\xc4\x04T\x05C\to\x0b\x06\n\xc4\n\xe5\t\xfc\x07\xd4\x05\x06\x05\xec\x06*\x08\xab\n\x8b\r\x11\x0e\x0b\r\x91\x0cl\x0f)\x14\x14\x18\xa4\x1ac\x1f\x96%\xf2&\x9f%A$\xcb"\xdc"\x0b!\x91\x1f^\x1f2\x1a\xbc\x13\x87\x0e\xbb\x08a\x02\xdf\xfb\xae\xf5\xe6\xf1\x13\xef\xda\xeb\x1a\xe9\xbe\xe5\xb3\xe1G\xdf\x96\xde\xae\xdf\xb5\xe1\xfb\xe2\x11\xe4\xee\xe5v\xe8\x1f\xeb=\xed\xb0\xefM\xf2\t\xf5O\xf8n\xfba\xfe&\xff\xa7\xfe\xa4\xfe\xa8\xffg\x00p\xff\xc5\xfe\xed\xfe\x1c\xfe\x0f\xfcd\xfa\x10\xf9\'\xf7 \xf5\xcd\xf4\xe1\xf5\x9a\xf6p\xf6\xbc\xf6\xca\xf7\x88\xf8y\xf9j\xfbA\xfe\xb4\x00R\x03@\x060\t#\x0b\x1e\x0cE\re\x0f\xd2\x0f\xec\x0f\x7f\x10\xf7\x103\x11X\x0fQ\x0c!\nQ\x08\xfc\x05\xc0\x031\x01k\xff\xc4\xfd\x16\xfc\xfd\xfa\x88\xf9\xe3\xf6:\xf5\xbc\xf5}\xf6\x11\xf7\x80\xf7\xcf\xf7\\\xf8\xde\xf7\x04\xf8j\xf8\x9c\xf7x\xf7\xd5\xf7\x07\xf9u\xf9\xe2\xf8\xe0\xf7\xe4\xf5\x15\xf5\xbb\xf4-\xf5\xcb\xf4\xb2\xf5\x89\xf5\xd9\xf5u\xf7\xbe\xf7<\xf8\xa6\xf7\xb6\xf8\x05\xfd\x00\x01\x97\x00\x15\x01\xc4\x03\x15\x05\xf2\x04\xb9\x04Q\t\x0c\n\xfd\x08\xce\n\xac\x0b-\n\xc3\t-\n-\x08\xfc\x08\x19\x0b\xbd\n\x89\x08\x18\x08\x0c\x08\xb7\x07f\x07\x8d\x08\xb5\x07\xa1\x04\xef\x05\x91\x08<\x08@\x06.\x07\xf3\x07p\x05I\x04\xc9\x04\xab\x04\xd6\x042\x04\xbf\x02d\x04|\x07?\x08&\x07\xeb\x06i\n%\x0c\xb8\x0b\xed\x0bw\x0c2\x0c\x04\x0c\xa3\x0c\xd9\x0b\xb4\t\x95\x06@\x04\xda\x01\xc7\xff\x8e\xfe\xb6\xfcw\xfa\xab\xf7\x1b\xf6U\xf5\xfa\xf3\xff\xf16\xf0k\xf0\xfa\xf1\xa2\xf1\x80\xf1>\xf2>\xf2\xc1\xf1\xa3\xf21\xf5D\xf6S\xf6\xc1\xf7\x1b\xfa\xba\xfb\xe4\xfc\xf1\xfd\xf3\xfe.\xff2\x00(\x02\xbd\x03\xb9\x038\x03\xaa\x02M\x02\x13\x03\x90\x02@\x01O\x00\x14\x00\x84\xff\x1e\xfe\xc2\xfd\xa7\xfcy\xfa\xba\xf9\x8a\xfa\x1b\xfb\x02\xfa\xcb\xf9\xc1\xfa\x8a\xfbr\xfb\xf7\xfbI\xfd#\xff-\xff\x9d\xff\x1f\x02\x04\x04q\x03\x8f\x03"\x04\xae\x03\xb5\x04_\x03\xcb\x01\x9a\x03;\x03\x92\x01E\xfek\xfev\xfe\x82\xfa\xdf\xfb\x93\xfc\xdb\xf9\xee\xf9\xe6\xfa\xf4\xf8\xaa\xfc\x10\xfdP\xf9h\xfe\xc1\xff\x9f\xfd\xd2\x00\r\x03\xc8\x01\xc5\x02L\x03\x18\x07\x8e\x07=\x03\x82\xfeb\x05_\x05\xe4\xff(\x08\xf6\x01\x17\xfe\x8d\x02\xe6\x02\xc1\x03\xd5\x00D\xfc\x83\x00\xca\x03\x84\x01\x9e\x05\x97\x02r\xfd\x89\x02L\x04a\x02y\x03\xc3\x049\x001\x01\xd4\x04\\\x04\xda\x04\x1c\x01\x1d\x00\t\x00\xf4\x00\xa6\x02\xac\x02\xd0\xfeY\xfa\x95\x00\x9a\xff\xfa\xfa\xd8\x01\x05\x00#\xf8\x9b\xf8>\xff\xb2\x01\xd5\xfd\xca\xfc\xde\xfd\x80\xfe6\x00)\x01\xb5\x01\xf9\x00:\xfe"\x02\xc6\x05\xce\x01\xc6\x00\xbb\x01\x89\x02\x04\x01\x81\x03\xdd\x04G\x01\x9e\x00\xb0\x00\xff\x00\xa7\x03\xba\x040\xfeF\xff\xa9\x03\xe5\x02+\x00T\x00\xc7\x02j\xfd]\x01\xfe\x05_\x01f\xffH\x02\xec\x034\x00\xa4\x01\x10\x03\xac\x003\xff\x0e\x02<\x01\xcc\xfd[\x00\x12\xff\x1e\xfd\x87\xfb\xe4\xfbv\xff0\xfe\x86\xfa\x8e\xfb\x8e\xfb\x16\xf9\xa4\xfc$\xfdU\xfc}\xfc\xdb\xfb\xc1\xfc\xb0\xfe^\x00\xe7\xfcE\xfe\xf6\xfeH\xffo\x01Q\x01\xcc\xffW\xff\xfe\xfeN\x00\x8c\xffi\x01\x0e\x01\x7f\xfb>\xfd\xe9\x01\x1e\x01\xf9\xfcz\xfe\x16\xff\x8f\xfe\x1e\xff\xb8\xffP\x00\x93\xfe\xd7\xfc%\xff\x16\x01\xc2\xfe\x8a\xff0\xfe\x89\xff\xe6\xfd\xe1\xff?\x01W\x00Q\x02\xca\xfe\xfd\xfew\x02]\x04f\xff\xa4\xfd\x0b\x00\x9c\x01\xa2\x03%\x00\x9d\xfe\xbf\x00\xba\x03\xee\xfc\xea\xfd\xfa\x01\x89\xfdL\x01\x0c\xfe\x10\xff\x07\x01\xf8\xfd\x07\xfd\x89\xfcO\xff\xef\xff\x01\x008\xfat\xfe\x0e\x01\x9f\xfc\xf6\xfba\xfe\xf3\xff\x98\xf8T\xffR\x02i\xfe\xac\xfc8\xf8\xe0\x03\xad\x01\xf0\xfa>\x00\xc0\x032\xfe\x9a\xff\x87\x04\xc7\x02\xfd\xfc\x1c\x00\x1b\x08\xb5\x01\x94\x00\xce\x07\x0e\x04\xbb\xfe\xb2\x03\xae\x08)\x03D\xfc\xee\x06\xdf\t\xe4\xfb\xef\x03\x98\x08\xb0\xfd\xdf\xff\xc0\x07\x11\x03\xd7\xfd\xd9\x01\xaa\x05\xe5\x03\xfc\xfe\x8b\x05J\x02\xd1\xfd\xbb\x01\xa6\x01\xc2\x05\x97\xfe\xd2\xfd\x9a\x01 \x03\xbc\xffD\xf8|\xfeV\x03\xce\xfc\x83\xfa\x1f\xfe\xde\xfd\xd1\xfcJ\xfa3\xfe\xcf\xfcL\xf9\x0f\xfe4\xfc\xaa\xfc\x08\xfc\x90\xff\x90\xfc\xab\xfd\xe4\xfd\xfb\xfb\xdf\xfd\xa8\xfc\xc8\x02\x10\xfe\xab\xffg\x01\x02\xfe\x00\xfbY\x02\xfd\x02\x12\xff\xd7\xff\xc9\x03\x83\xff\x8b\xfc\x19\x05\x8e\x05v\xff\xbf\xfec\x071\x00\xa9\xff\x1e\x039\x06S\xff\xf7\xfd@\x036\x067\x05~\xfe\xe8\x02}\x01b\x02\xe6\x02\x9e\t\xd4\x03\x89\xff\xb1\x06\xf5\x05\x11\x00&\x03\x84\x06\x00\x00\xe1\xfe\x92\x01\x9e\x05f\xffV\xfb\x0c\xfb\x96\x00\x05\xfd\x1b\xfbn\x02\x82\xfe|\xf3\xf1\xff\x86\x02\xef\xf62\x00?\xfb\x12\xf7W\x00\xaa\xfb\xd1\x00\x89\xfd\xc6\xf6\xb6\xfe\xcd\xff;\xfb!\xfc\x81\x03\x14\xfa\x94\xfb\x08\xff\xe6\xfcQ\x01\xc0\x019\xfbH\xfb(\x03 \xfd\x9e\xffl\xfe\xf8\x01\xf4\xfe\xd4\xfdZ\xfe\xb2\x00\xfb\x03\xc7\xfb\x1c\x01\x18\x03-\xfe\x92\xff\x83\x03\xbb\x03\x00\x01\x04\xfe\x02\x01\xc2\x08!\x04\x86\xfd\x10\x01\x8b\x08\xe7\x02H\xff\xd5\x00d\tU\x04\xfe\xfb[\x05f\x02\x8f\x00\x16\x03\xbd\x00\x06\x02\xa8\x01\xbd\xf8\x86\x05\xd2\x03\xd2\xf8g\xfe\x1f\x06\x9f\xf9\xeb\xf9:\x07O\x00\xed\xf1\x9b\xfb`\ny\xf9\xd3\xfan\x00q\xff\x1e\xfb7\xfcG\xff\xcc\x00W\xf38\xfe\x1f\x08\x03\xfaD\xfb\xdb\xfd\x0b\x00\xaf\xfc\xa0\xfc\x83\x00\x16\xfd\xa1\xfa\x04\x05(\xffD\xfc\xd1\xff\x9d\x00\xe9\xfc\x93\xf8\xf9\x05\xf0\x03\x06\xf9\xc4\x00Q\x00\xea\x04\xf4\xfft\xfcb\x03\n\x00\xc7\xffX\x04\x9b\n\xc3\x01v\xfe\xfe\x01Y\x06G\x01 \x02\xa5\td\x05\xbd\xfe6\xff\'\x0c\x87\x00\xa3\xfaZ\x04Y\x03\xdf\xff\r\x02\xd8\x03\xa7\x00W\x01\xdb\xff\xdd\xfc\x86\xfe\xb8\x05I\xff\xaf\xfd\x03\xfd[\x03\xb2\x02\xb9\xfd\xa7\xfa\xf0\xf9\x05\x06\xcc\x02\xec\xf8\xb1\xfb=\x08\xc2\xfb\xf0\xf3\x0e\x03\x95\x05f\xf6Y\xf7N\x04\xbe\xfe\xe6\xfc\xd0\xfc\xb1\xff\x93\xfe\xb9\xf7\x10\xfd\x90\x01\x88\xff\x9b\xff\x03\xff\xd9\xfc\x08\xfc\xcf\x01:\x03T\xfeX\xfc}\xfdz\x06;\x03O\xfb\xf8\x01\xdb\x04T\xff\x8d\xff\x83\x06\r\x02\xa5\xff\xdc\xfeJ\x08\x1e\x03\xbe\xfc\x14\x055\x00\xbc\x00\xab\x02K\x05\xea\xfd\x82\x00G\x04%\x00\x11\x01\xeb\xff\xd8\x03\xee\xfdD\xff\\\x05\x8c\x00\xcb\xfd\xcc\x02\\\x00\x19\xfa\x98\x07\x86\x006\xff\x0c\xfe\xb4\xf7]\x0bS\x03\r\xf4\x9c\x00\x99\r]\xf6)\xf3\x18\x08J\x08U\xf6\x96\xf7\xf5\x01!\x00\xf8\x01\xb2\xf8\xe9\xfb\xf8\xfax\xfa\x86\x04\xb4\xff\x14\xf9\xd4\xfc \x02\xc9\xfb0\xfe\xae\xfe\x7f\x01L\xfe\x85\xff\xb6\x01\x91\x00\x12\xfde\xfc\x0e\x07g\xfe2\xf8\xb5\x03\xa2\t\xfc\xfb\xe2\xf7\xfc\x04_\x02K\xf7\xad\x04\xe2\n}\xfc\xd7\xf8W\x07\x06\x05\xab\xfb\x9a\xfe\xc6\x07&\x05\xb8\xfd:\x05\xf2\x03\xd1\xff\x07\x00k\x05Q\xfe\xe5\x04\xfd\x061\xf9\x07\xf9\xe7\x07Y\x0c_\xf3\x82\xfa\xef\x08\xb6\xfa\xba\xf8\xa8\x03\xf9\x07\x1a\xf8\xc8\xf5\xa5\x06d\x03v\xf8\x95\xfdC\x02\xcd\xfc\x97\x02\xdf\xfd\xce\xfc\x06\x05\x94\xfc\xf4\xfd\x1c\xfe\xdd\xff\n\x01\xde\x00\xa4\xfb(\xfe\xe3\xfc\xea\x00R\x04}\xf4\x03\xf9{\x01?\t\x95\xf62\xf5.\n\x1d\x03{\xf2\xac\xfc\xff\r\xa6\x02\x05\xf5k\x01\x9f\x0e\xfe\xfa{\xf9x\x08\xf5\x08N\xfd\xb6\xfc*\x08e\x07G\xfd+\x03\x19\xff\xe0\xfeG\x05\xf0\x02\x86\xfei\x04\xac\x03\xbc\xf6l\x01\x81\x07`\xfe\xa9\xfa_\x05\x8c\x00\xfa\xf6~\xfe\xe3\x07\x9b\xfdl\xfbl\x03\xa8\xff\xdb\xf9Y\x02\xff\x01\x80\x00\x06\x00\xeb\xfc\x19\x08\xae\xff)\xfb"\x01\x8f\x04m\xfc\x99\x02\x01\xfd\x8c\xfc\xb9\x03\xb6\xf9\x0c\xfc\xc6\x029\xfd@\xf6\xda\xfe\xa4\xfc\xbb\xf9C\x00\xaa\x00o\xfa\x1a\xfcJ\xff\xd4\xfch\xfb@\x08\xb8\xff\x8e\xf3\'\x03\x18\x074\x00z\xf9\xfc\xff]\xff\xb1\x02A\x03\x9d\x01\xcc\x034\xfd\xc7\xff\xa5\x04\x84\t\xef\xfe3\xfb\xe6\x06)\x07\x83\x05c\xfa\xc4\x05\xc7\x05|\xf8\x98\x04o\t\x1a\xff\x9d\xff\xd6\x01(\x02\xda\xfe[\xfd\x8a\x05>\xfe\xf9\xf9\x8f\x03"\x05\xe7\xf7T\xfc\xd6\xfe\xa0\xfb\xb0\xfc\r\x08\xe9\x00\xbf\xf4\xb9\xfe\xd2\xfeY\x01\xcf\xfd\xc8\x03\n\x02-\xf3F\x02\xc4\x0e\xa5\xfb\xc2\xf53\x02G\x07\xbb\xfa\x90\xfe\x9f\x0e\x05\x04\x91\xf2\xe2\xf7\x8b\x04P\x11`\x01V\xf3\x14\xf8\x86\x06:\x07=\xf7\xb5\x03;\x00\xb1\xf7\\\xf6\x95\xfd<\r\xde\x06\xc6\xf2\xb2\xf5\x83\x03\x91\x04\xa1\xfa\xe4\x01\xe1\x02U\xf8(\x03\x9b\x08\x16\x02\x0e\xfd\xf3\xff\xa6\xfaK\x04\xdf\x07X\xfe\xf0\xfeG\x05\xde\x00\x85\xfd\x03\x06^\xfd\x0f\xfd\x87\x03\x12\x02\xc7\x03\xb8\x02\xa0\xfcn\xfa\x80\x04k\x03\xc0\xfbQ\xfdP\x08\xd7\x00l\xf6\xc7\x00M\x01r\xfc|\x01]\xfd-\xfdV\x03\xd7\xfbL\xfcl\xf9\x0b\x02\x81\x0cm\xf3B\xf2 \x0b\x93\x07\xac\xfdj\xf2\xe6\x00\xa6\n\x1a\xfa\xf8\xf9d\x05(\x0b\xf8\xfc\xb3\xf0p\x00\x15\x0eo\xfd\xc2\xf2S\x02\xc1\x0e\x0c\xfb\x84\xf4D\x06Y\x08L\xfe\x01\xf5\xa7\xffh\x0fn\x080\xfa\xda\xf6u\x00n\x0c@\x03h\xfa\xf5\x01|\x06\x01\x00\xdf\xfb\xec\x03L\x05\xcb\xfd\x1b\xf7\x94\xfd\xbc\x0eA\x04\xf9\xf4\x8d\xfb/\xfdY\xff-\x03}\xfd}\x00\x03\xfe\x0f\xfc\xbf\x02\x8e\x02\x9b\xf9[\xfc+\x02g\x01\xd0\x073\x00\xc9\xfd\xa2\xf7\xc0\xf9+\tg\x04\x16\x02\xbc\xffw\xf8P\xfea\x07\xdc\xfe\x8e\xfcz\xfc"\x00\xbc\x02\xb1\xffI\x05U\x00\x8b\xf5m\xf9-\x07\n\x05\xe0\x01\xf9\xfa\xdc\xfa\x81\xfb6\xff\xd5\x05\x1e\x028\x02\xdd\xf8\x90\xfbU\x02[\x03\xbd\x00W\x01`\x00\x1e\x00%\x02d\x04a\x04m\xfcC\xfc*\xfe\xad\x00+\x08\x8f\x04\xc5\xfb\xb7\xfc\xd3\xfd\xb9\x00@\x00d\xf94\xfe5\x05\x9a\xff]\xff\x1e\x04\xca\x02\xed\xf8\xff\xf4\x11\x03`\r\x0b\x04\xeb\xfc\xbb\xfb\x15\xfd\xac\x00\xf2\xfe:\x04\xb2\x00\xa1\xf9\xb1\xfe~\x011\x04[\x04\x96\xfc%\xf3<\xfb\x8a\x0b\x10\x07\xd6\xffV\xfd\xa9\xf9\x96\xfby\x00\x16\x06s\x04\x86\xfd\x87\xfb\xa1\xfe\xae\x01\xa2\x04D\x01o\xfb\xed\xfd\x9c\x03\xa8\x02"\x05\xdd\x01\xdc\xf9\x82\xfcD\x01\xed\x04\x15\x01?\x00\xfc\xffl\x00\xa9\x01O\xff\x07\x00\xf2\xff\xa8\xfe\xdf\x00\x93\x00\xd3\x01\x87\x03k\xfc\xb8\xfa)\x00\xc3\x01\x18\x02_\x01\xd4\xfd\xd7\xfc.\xfd<\xff\xb8\x00\xff\x05 \xfd\xe5\xf7\xb2\xff\xf3\x04)\x03\xa7\xf9\x1d\xfdL\x00\xe7\x01\xb9\x01?\x03\xb7\xfdD\xfc\x94\xffj\x00:\x05:\x04\xd4\x00\xc2\xf85\xfd;\x031\x03\xbf\x03[\x00B\xfd|\xfd\x8c\x00\xd1\x01\xcf\x00\xe2\xfd\x8b\xfdV\xffB\x03*\x06{\xfeg\xfa)\x00\x9f\x01\xd3\xfee\x01\x92\x03\xb6\x02\xf8\x00\xcb\xfdp\xfe7\x00\xd4\xff2\xff"\x04\xdf\x025\xfd\xe6\xfd\xe9\x00J\x01\x8f\xfb{\xfd!\x02$\x03\xb5\x02\xc1\xfd_\xfb\n\xfe\xfb\xfe6\x00g\x02\x81\x02\x8f\xfe\x94\xfcx\xfe\n\x00K\xfe<\xfd\x19\x00+\x03w\x02\xd7\xfe\xeb\xfb\x8d\xfc\x83\xfe*\xff\xd8\x00V\x02\xdc\x02}\xfdO\xfa\xba\xfe\xbf\x01(\x00\x9b\x00\xbb\x01\xb0\x00\x03\x00\x91\xfe4\xfd\xd8\xff\xf7\x01R\x00 \x02\xd2\x02v\x00(\xfd\xa5\xfcf\x00\x19\x02\xad\x01\x94\x00\x00\x01\xa1\x01r\xfe\x17\xfe\xfe\xfe\xb2\xff\xb3\x01\x12\x01h\x02<\x03\x19\x00P\xfc\xc6\xfb\xad\x01\x8a\x05q\x03\xb2\xff\x86\x00\xb8\xff\xe1\xfd5\xff\xe2\x01\x16\x02g\xff\xef\xfe]\x016\x02\xdf\xfdZ\xfcE\xfe\x19\x00\x95\x01\xd7\x00e\xff\xc0\xfd\xc2\xfb\xbf\xfe\xed\x00&\x00\x03\x01\x8d\xfe/\xfd\xbd\xfd\x9f\x00\xf3\xff\xb7\xff\x03\x02/\xffq\xfc\xa8\xfd\x97\xfe\xbf\xfd\xe3\xfew\xff\xf2\xffM\xfc\x98\xfb\x85\xfd\x02\xfd3\xffr\xff\xcc\x01\xd1\x02T\x03\xd5\x01L\x02\xf5\x04\x85\x07{\n^\n\x0c\x0c\xc0\n3\n,\x0b%\x0b\xd8\x0bI\x0b.\n\x02\x0b\xfe\x07}\x05\xeb\x03\x9e\x00\\\x00\x9d\xff\x0c\xfe@\xfb\xe5\xf9\x83\xf6d\xf4T\xf4\xa7\xf4Z\xf4\xa0\xf3G\xf4\xba\xf2\xee\xf2w\xf4\xa9\xf6\xdf\xf8\x85\xf9<\xfb.\xfc-\xfe\xb8\xff!\x00S\x03\xe4\x04\x0c\x05\xc1\x051\x07\xf4\x06U\x05\xf9\x04p\x04"\x050\x04X\x02\x12\x01\xc8\xfd5\xfcw\xfb\t\xfa\xf5\xf8\xcf\xf7\xf3\xf6\xc0\xf4\x0f\xf5V\xf4\xbe\xf4C\xf4\n\xf5\x95\xf6\x89\xf64\xf8r\xf7\x1f\xf9\xf9\xfae\xfcs\xfe\xef\xfe\x96\x00\xb2\x00\x9b\x00B\x01\xdd\x02\xff\x04\xe6\x03a\x03\xc4\x02\xbe\x02\x1e\x03\xbd\x02\x1e\x03\xfe\x01z\x01\xa3\x01=\x00\xca\xff\xaa\xff\x11\x01\xac\x01\xd3\xff\x1a\xff\xef\xfd\xfa\xfeg\xfd\xfc\xff\xd4\xff\xbd\xfb\x9c\xfd\x9d\xfdh\x00\x11\x01\xc0\xff\x84\xffk\xfcO\xfd\xd6\x08B\x14m\x16~\x0f\t\t\xd1\x0e|\x182\x1f\x0f \xaf\x1f\x98!\x8a\x1f\xf3\x1e}\x1e\x92\x1a\x8c\x16\xf6\x11v\x16\xb1\x18\x1b\x11>\x06\xd1\xfb\x05\xfa\xba\xf9\x9d\xf8\xa9\xf7\xe5\xf1\x9b\xea\xdc\xe4\xbb\xe4\xaa\xe5\xd2\xe5B\xe4\x83\xe5\x90\xe7\xd6\xe8\x97\xea\xa8\xe9+\xeb\x80\xeeL\xf4<\xfaO\xfd\x84\xfe\xc5\xfb\x9e\xfb\xd6\xffw\x04\xa0\x07\x85\x07t\x06\xba\x04<\x03\x12\x03A\x02X\x01\xff\xff\x0f\x004\xff\x97\xfdE\xfa\xba\xf6\xe3\xf4|\xf5\xa4\xf7\xef\xf8\xe0\xf7>\xf52\xf3{\xf3{\xf6\xbc\xf8\xdf\xfa\t\xfcD\xfc\x96\xfd\xae\xfe\xb4\xff\x00\x01\xc3\x02\x98\x05k\x07\xa2\x08i\x08\x86\x07!\x07\xc0\x07}\t\xb1\n\xbf\nl\t\x18\x07\xa3\x05\x90\x05\xbf\x05[\x05\r\x04\x84\x04\xf1\x03\xa2\x01T\x00\t\xff\xa7\xfe\x0c\x01\xa0\x02\x92\x02A\x005\xfd+\xfe\xef\xfe\xac\x01g\x03\xea\x03\xf2\x00\x11\xffK\x02n\x01[\x02\xf7\x00\xa3\x01P\x03\xf0\x00\xda\x04\x90\x03~\xfe\xf5\xfb^\xfc\xf9\x02\xa9\x01\t\xff\x19\xfd\xea\xf9\xdd\xf8\xdb\xf7\x14\xfb\x13\xfc!\xf8\x08\xf5#\xf5\x14\xf70\xf6&\xf5\xbc\xf58\xf6\xef\xf5\x91\xf67\xf8\x99\xf8\x11\xf7D\xf7W\xfa\xd3\xfc]\xfc6\xfb\xdd\xfb\x1d\xfc\x9a\xfc\xb3\xfe\x9f\x00X\x00\xbc\xfe\xae\xfd*\xff@\xff\xd4\xfe\x9f\xfe\xca\xfd\xcd\xfd\xdf\xfd\xf5\xfc\x1a\xfc~\xfb\xf3\xfbh\xfc0\xfb \xfc\x95\xfb\x9d\xfey\x08\x19\x10s\r.\x04\xd9\x03"\x13\x8c\x1f\x96%M&\xe2!*\x1d\xf0\x19\xfe#\x99.\x04/\x07&\xa1\x1d\'\x1b\x07\x18\x8c\x15\xc5\x12\xb8\x0e]\x08\xf6\x02\xf5\xfe.\xf8\xfc\xef\xa6\xean\xe9\x85\xebo\xeb\x9d\xe8\xdf\xe1\x1d\xdd6\xdeL\xe4\xe7\xea\xb1\xee\x89\xef\x11\xedZ\xec\x81\xf0\xcf\xf8\xd0\xff\x1f\x02\xb3\x02\xee\x02\xb9\x04\xd2\x04J\x06\xf0\x08\xa0\t4\tD\x07*\x06r\x03\xcc\xfd\x91\xfb3\xfc\xdc\xfc\xdf\xf9^\xf5\xc5\xf1o\xee\x16\xec\xac\xec\x9a\xef*\xf0\x94\xedq\xeb\xd4\xec\xd8\xee\xd6\xf0 \xf4\x88\xf7\x17\xfa\x87\xfa\xd2\xfc\x00\x007\x02(\x04>\x07E\x0b\x9a\x0cF\x0c\xee\x0b\xc2\x0c\xae\r\x18\x0ed\x0f\xb0\x0f5\x0eO\x0bh\t%\t\x97\x08*\x08\xa2\x06\x07\x06\xf0\x03U\x01\x94\xff<\xff\xa3\xffW\xff\xff\xfd\xc1\xfd\xf2\xfc\xa8\xfb\x84\xfb\x93\xfbU\xfd\x93\xfd\x9c\xfd\x12\xfd(\xfd\xc8\xfc\xce\xfdw\xff\xd9\xff\xd4\x00\xa6\xff\x83\x00\xfd\x00\x91\x01\x06\x02|\x03\x16\x04;\x03\xcc\x02\xc4\x02\xee\x03\xae\x03B\x04\x01\x04\xba\x02\x9c\x01O\x00\x05\x01\x02\x01[\x00b\x00\x08\xfe\xee\xfc\xf5\xfa\xf9\xfcm\xfe\xf9\xfe\x17\xffo\xfcM\xfc\xb9\xfa\xe2\xfd\xab\xff\xc2\x00\xde\x00\x84\xfeF\xfe\xd4\xfc>\xfd\t\xfeR\xffq\xff\xc1\xfd\xd0\xfa!\xf9\'\xf9\xca\xf9{\xfaa\xf9\x01\xf9\x03\xf8\\\xf6C\xf6y\xf6\xeb\xf7\xf4\xf7\x07\xf9`\xfa\xd3\xf9\xcf\xf8\xbc\xf8Q\xfd\x0c\xffP\x01\x08\x03\xd4\x02\x80\x02\x03\x01\x01\x07\xe2\nN\x0b\x99\x0c\xf0\r\x06\x0e\xc9\x07\x8b\x06E\x0c\x91\x12\x86\x14\xfe\x10\x92\x0c\xa9\x04\xc6\x01V\x08V\x11\xbe\x12u\t\xee\x00\xe7\xfej\x01R\x06i\tC\x08\xee\x03\xdd\xfe\xa0\xffY\x02\xc2\x03)\x04\x03\x03:\x05\xf1\x05\xa5\x05V\x04\xd4\x01Y\x02\xf7\x04]\x07T\tt\x05\x15\x00\xd9\xfb\xbc\xfa\xf0\xffX\x00d\xfd\xf6\xf6\xbc\xf1s\xf1\xf1\xef\x9a\xf1d\xf2\xad\xef\x1e\xec\x96\xe9\xf9\xeb\n\xefc\xee\x06\xef\xae\xf1\t\xf3\xab\xf2\xb0\xf3\x86\xf7\x03\xfbT\xfb\xe4\xfby\xff\x87\x00\t\x00\x1e\x00\xb0\x02\xc4\x04\x08\x03\x84\x02n\x03\x8c\x03\x82\x01\xc4\x00\xd2\x01B\x02j\x00\x95\xfe\xc1\x00\xf8\xfeG\xfd\xac\xfe,\x02-\x03\xee\xfe\xa1\xfdS\x020\x05\x13\x05\x0b\x05\xf2\x05\x9f\x06\x1d\x04c\x06\r\t]\x08\xa2\x05\'\x03\xe3\x04\xcc\x04\xbb\x02\xe0\xff<\xff\xbc\xfeI\xfd`\xfc\t\xfd\xe9\xfb\xb7\xf9J\xf9\xe0\xfb\x85\xfce\xfb!\xfe\x1e\x01\xfc\x01\xb0\xfe\xe5\xff\xe1\x05O\x08\x9b\x07\xbf\x06\xef\x07\xef\x06\xed\x05)\tZ\x0b\xda\x06\xad\xff\xc3\xfe_\x03\x1f\x04y\x00_\xfb\xd1\xf7\xb3\xf5\xc0\xf5\x03\xf9E\xf9!\xf5\xd6\xf0_\xf2\xdc\xf5U\xf6+\xf6\x03\xf7\x19\xf9\xbd\xfac\xfe\xde\x01\xae\x00l\xff\x18\x02\xff\t/\x0ey\x0e\xb2\x0e\xf5\x0b9\t\xdc\t\xba\x10R\x16\x14\x12\x98\x0b\xf1\x06\x8d\x04\xd0\x04-\x06O\t\xc8\x05\xfb\xfc\xe4\xf6\x01\xf8\xcc\xfc\xe6\xfd\x14\xfc\xe9\xf9K\xf8\x1f\xf7\x10\xfa?\xff\x0b\x02(\xff7\xfd\x94\xff\xc4\x02W\x04\xfd\x04\xd6\x05u\x03\xdc\x00d\x02\x04\x06&\x06\xce\x02\xdd\x00E\x00\xd9\xff\x84\x00\x1b\x01,\x00T\xfd\x87\xfc\x93\xfe^\xfee\xfd\xba\xfcS\xfc.\xfc\x9c\xfbH\xfe$\xff\x98\xfcL\xfa\xfc\xfaR\xfd\x87\xfd\xa2\xfd\xe8\xfd8\xfdV\xfa`\xfa`\xfdK\xfe\xd5\xfc\x9c\xfa\x11\xfb&\xfb\xce\xfa\x01\xfb\xdd\xfb=\xfc\xb5\xfa\xf2\xfaH\xfc\x1a\xfd\xad\xfc\xe8\xfc\xd8\xfe\x9a\xff\x7f\x00\x8d\x00\xbc\x01\x9d\x02\x84\x02\xcd\x03:\x05\x13\x06\xbf\x05\x14\x05v\x05\xba\x05\xc4\x05\r\x06\xee\x05H\x04\x7f\x02o\x012\x02%\x026\x00f\xfe\xeb\xfc\x06\xfd~\xfcU\xfc\x93\xfc\xc4\xfbI\xfb\x87\xfb\x10\xfee\xfeJ\xfe\xcd\xff\x9a\x02\x99\x03\xb5\x02\x7f\x03)\x06\xfb\x07\xe4\x07\xd2\x08\x92\x08\x91\x07\xc2\x05F\x06\x94\x07h\x06\x85\x03P\x01\xdb\xffK\xfe\x8b\xfd\x0c\xfcC\xfbI\xf9;\xf7\x9d\xf6K\xf6f\xf6\x83\xf5\x99\xf5\xf6\xf5\xf4\xf57\xf6\x8e\xf6\xb2\xf7{\xf8\x10\xf9\x01\xfa\xe0\xfa\xc1\xfb0\xfcx\xfdy\xfe\x81\xff \x00\xa8\x00O\x01N\x01\x03\x02\xda\x02E\x035\x03\xe5\x02\xd5\x02\xd3\x02+\x03\xf4\x02\xe4\x02\x84\x02R\x01\x82\x01\x98\x01\x13\x020\x01e\x00\xf6\xff\x08\x00\x00\x00\x04\x01\x14\x01"\x00\xcd\xff`\xffK\x01\x8b\x01\x9f\x02\x12\x02\x83\x01\x02\x01\x00\x014\x02\xa4\x02\x9e\x02y\x01\x85\x00\xa5\xff\xb1\xff\xf6\xfd\x94\xfd\x05\xfd\xe7\xfc\x92\xfc,\xfc\xcb\xfd\x03\xfdo\xfd]\xff\xf8\x02\x17\x05\x88\x06N\tx\x0c\x14\r2\x0e\xc3\x12&\x17\xdc\x17\xa2\x15\xdf\x15w\x15\x08\x148\x13\x7f\x14e\x13F\x0c\xee\x05\x87\x02_\x01i\xfeD\xfc\x04\xfa\xbc\xf4L\xed\xf6\xe9\xd4\xeb\x89\xed\xfa\xecf\xeb\xaf\xea_\xe9V\xe9\x02\xedd\xf2\xdf\xf5:\xf6\xd6\xf6m\xf89\xfb\x82\xfe[\x02\xdf\x04\xca\x04\xd0\x03\xfe\x03\x7f\x052\x06R\x06\xa1\x05\x9c\x03\x90\x01\x08\x00\xea\xff&\xff`\xfd\x97\xfb?\xfa\x1c\xf9\x88\xf8\xd2\xf8\xd3\xf8\xe5\xf7\xe9\xf6s\xf7\x03\xf9\'\xfa\x1d\xfb\xee\xfb%\xfc\x8b\xfc\xda\xfd\x1a\x00\xef\x01\xa3\x02\xc3\x02\xe3\x02\xff\x02\xea\x03o\x05/\x06\r\x06.\x05\x80\x04/\x04O\x04\xc2\x04\xba\x04\xc8\x03\xc0\x02%\x02\xba\x01\x85\x01\x96\x01\x8d\x01(\x01\xaa\x00\xa0\x00\xfb\x00\xb6\x00\xd1\x007\x01\xd8\x01*\x02"\x02\x1a\x02\x04\x02\xf6\x01#\x02b\x02\x84\x02\xe7\x01\x0e\x01+\x00\xe6\xff\xb0\xff1\xff\xfe\xfe=\xfe\x81\xfdI\xfdQ\xfd4\xfd\xc2\xfds\xfe\xab\xfe\x18\xff\xc2\xff3\x01\xd0\x01e\x02\x86\x03H\x04\x04\x05\x00\x05<\x05\xae\x05;\x05\xc8\x04]\x04h\x03K\x025\x01\x9e\x00\xc6\xffn\xfe\r\xfd\xc3\xfb\r\xfb\x98\xfa9\xfa\'\xfaZ\xf9\xba\xf8\x9b\xf8\x18\xf9\xf6\xf9S\xfaV\xfa\x88\xfa\xe4\xfa\xc6\xfb\xcf\xfc\x87\xfd=\xfe\x97\xfe\x04\xff\xe2\xffZ\x00/\x01\x89\x01\xd8\x01\xf8\x01\xd0\x01\x1e\x02=\x02\xeb\x01\xb1\x01f\x01\xfd\x00j\x00\xee\xff\xa8\xff\xa8\xff-\xffu\xfeK\xfe\xe0\xfd\xb7\xfd\x05\xfe{\xfe\x7f\xfe"\xfe\x0e\xfe\xad\xfe7\xffF\xff9\x00\xb6\x00\xa5\x00\x7f\x00\xf9\x00\xe3\x01\x0e\x02<\x02\xd1\x02\x13\x03\xc2\x02\xac\x02\n\x03\xef\x02\xd0\x02\xae\x02\x1c\x02A\x01\x9f\x00\xb0\x00\xf7\xff"\xff\xdb\xfe\xa1\xfe\x91\xfd\t\xfd(\xfe\xaa\xff\x83\x00\xcc\x01\xaa\x03~\x04\x86\x04\xc8\x05.\nS\r\x96\x0e\xbf\x0e\xd7\x0ev\x0e\x0f\x0e\xb1\x0fU\x11\'\x10n\x0c\x00\t\xba\x060\x05\xd7\x03\xa2\x02\xa1\xff6\xfb\x98\xf7\xb9\xf5K\xf5\xc1\xf4\xdd\xf3\xb7\xf22\xf1H\xf0\xa2\xf0\xec\xf1\xbf\xf3\x91\xf4\xee\xf4\x81\xf5\x9f\xf6~\xf8\x1d\xfa\x85\xfb\x8a\xfc\xe2\xfc\x9a\xfd\x9e\xfe\xb3\xff\x9b\x00\x8b\x007\x00\x04\x00\x1a\x00\xac\x00|\x00\n\x00i\xff\xbc\xfe^\xfe7\xfeE\xfe\xc5\xfd\xe2\xfcB\xfc*\xfcD\xfcV\xfcl\xfcz\xfcW\xfcx\xfc\x01\xfd\xd5\xfd\x81\xfe\xe3\xfeS\xff\x06\x00\xa8\x00)\x01\xbc\x01g\x02\xc5\x02\xdb\x02#\x03}\x03\xa0\x03\xa6\x03\xd1\x03\xf8\x03\xfd\x03\xd1\x03\xc0\x03\xb5\x03\xbc\x03\xe2\x03\xf8\x03\xf0\x03\xbd\x03\x8e\x03l\x03\x7f\x03\x8e\x03v\x03-\x03\xce\x02p\x02A\x02\x0f\x02\xcc\x01k\x01\xf4\x00\x8e\x00\x1d\x00\xd8\xff\x9d\xff;\xff\xe8\xfe\xb8\xfe\x86\xfeV\xfe=\xfe*\xfe\x1c\xfe\x1a\xfe3\xfe\\\xfeo\xfex\xfee\xfe]\xfe\x93\xfe\xc6\xfe\x06\xff(\xffM\xff~\xff\x94\xff\xc4\xffG\x00\xea\x001\x01\'\x01n\x01\xdc\x01v\x02\xef\x02\x89\x03\xa0\x03\x03\x03\x81\x02\x94\x02\xef\x02\xa1\x02\xe9\x01R\x01\xba\x00\xcd\xff\x1e\xff\n\xff\xc7\xfe\xfc\xfd1\xfd\x0c\xfd$\xfd\xd2\xfc\xae\xfc\r\xfdA\xfd\x02\xfd\n\xfd\x87\xfd\xdf\xfd\xe1\xfd\xda\xfd=\xfe\x85\xfe\\\xfel\xfe\xbf\xfe\xd2\xfe\xa6\xfe\xb2\xfe\x14\xffM\xffK\xff9\xff\xa4\xff\xc4\xffo\xff\x81\xff\xca\xff\xe1\xff\x8d\xffa\xfft\xff:\xff\xdf\xfe\xc1\xfe\xd4\xfe\xdb\xfe\xa5\xfe\x81\xfe\x8e\xfe\xd6\xfe\x1d\xffZ\xff\xad\xff\xed\xff\x1f\x00R\x00\x8f\x00\xd6\x00\xf8\x00\x12\x01\x1d\x01\xfd\x00\t\x01\xe1\x00\xc4\x00\xb1\x00\x86\x00\x89\x00l\x00W\x00\\\x00d\x00\x7f\x00k\x00k\x00}\x00F\x005\x006\x002\x00\xfd\xff\xa1\xff\x8b\xffp\xff\xd8\xfez\xfe\xc2\xfen\xff:\x003\x01t\x02T\x03!\x04^\x05c\x07\x8e\tJ\x0b\x91\x0cH\r\x96\r\xe2\r\x1f\x0eV\x0e\x06\x0e\xb0\x0c\xa8\nh\x08\x8a\x06\xc1\x04\x95\x021\x00\xa8\xfd\x1f\xfb\x9b\xf8\xd0\xf6\xfb\xf55\xf5\xfa\xf3\xec\xf2z\xf2\xc4\xf23\xf3\xdf\xf3\x05\xf5\xf6\xf5\x8c\xf6W\xf7\xc5\xf8d\xfaS\xfb\xfc\xfb\x0b\xfd\t\xfe\x97\xfe\x0e\xff\xbc\xffI\x00)\x00\xf7\xff]\x00\xbc\x00x\x00\xfe\xff\xdc\xff\xdc\xffu\xff\x15\xff\x11\xff\xef\xfe\x1f\xfeb\xfdp\xfd\x9e\xfd^\xfd\x0c\xfd\x10\xfd8\xfd\x1a\xfd6\xfd\xe3\xfd\x81\xfe\xa5\xfe\xbf\xfeS\xff\xf7\xffY\x00\xdd\x00k\x01\xb3\x01\xc9\x01\n\x02k\x02\xbe\x02\xcf\x02\xd7\x02\xf1\x02\xeb\x02\xde\x02\xe0\x02\xf8\x02\x08\x03\xf5\x02\xe7\x02\xf4\x02\xe1\x02\xca\x02\xe0\x02\xfa\x02\xed\x02\xcb\x02\xcc\x02\xaf\x02g\x02%\x02\xf2\x01\x9f\x010\x01\xc2\x00Y\x00\xda\xffM\xff\xd3\xfey\xfe\x13\xfe\xac\xfdg\xfd;\xfd\x1c\xfd\x06\xfd"\xfdu\xfd\xb9\xfd\x04\xfek\xfe\xeb\xfeg\xff\xe0\xffe\x00\xe7\x00W\x01\xad\x01\xfe\x01J\x02l\x02i\x02\\\x02Q\x02+\x02\xeb\x01\x96\x013\x01\xc7\x00o\x00\x1e\x00\xcb\xffn\xff@\xff+\xff\xe6\xfe\xd5\xfe(\xff\x98\xff\xa6\xff\xa3\xff\t\x00r\x00\xbb\x00D\x01o\x02)\x03\xd4\x02\x99\x02\n\x03P\x03\xf9\x02\xc9\x02\xf7\x02z\x028\x01e\x00m\x00\xf9\xff\xd7\xfe\x0f\xfe\xd1\xfdC\xfd]\xfc\x12\xfct\xfcC\xfc\xa0\xfb\x97\xfb\x1c\xfcU\xfcD\xfc\x8f\xfc8\xfdt\xfdU\xfd\xbd\xfdq\xfe\x7f\xfeA\xfe\x82\xfe\x02\xff-\xff\'\xffb\xff\xd3\xff\xe3\xff\xd4\xff=\x00\xd5\x00\x16\x01\x16\x01<\x01\x85\x01\xa3\x01\xa9\x01\xc5\x01\xc4\x01\x8e\x015\x01\xfe\x00\xdc\x00\xb1\x00u\x00*\x00\xed\xff\xb7\xff\xa6\xff\xac\xff\xb7\xff\xcd\xff\xd5\xff\x00\x00<\x00v\x00\xaf\x00\xd0\x00\xe8\x00\xee\x00\xfe\x00\x06\x01\xd8\x00\x83\x005\x00\xe3\xff\x84\xff\x13\xff\xaa\xfeE\xfe\xb6\xfd%\xfd\xc6\xfc\x93\xfcS\xfc\xfb\xfb\xcf\xfb\xc6\xfb\xb8\xfb\xca\xfbI\xfcO\xfdj\xfew\xff\xb8\x003\x02\xbb\x03E\x05=\x07q\t<\x0bc\x0cF\r\x1b\x0e\xc4\x0e\xed\x0e\x08\x0f\xd8\x0e\xe4\r.\x0c?\n\xaa\x08\xf1\x06\xcf\x04\x7f\x02/\x00\xd7\xfdi\xfb\x86\xf9]\xf8U\xf7\x13\xf6\xff\xf4|\xf4w\xf4\x97\xf4\n\xf5\xd4\xf5\x8f\xf6\x18\xf7\xd7\xf7\xfb\xf8A\xfa.\xfb\xf8\xfb\xef\xfc\xb6\xfdD\xfe\xcb\xfeW\xff\xb7\xff\xaa\xff\xa3\xff\xdc\xff\xfa\xff\xc3\xff\x82\xffl\xffW\xff\x08\xff\xc9\xfe\xc9\xfe\xa9\xfe4\xfe\xda\xfd\xcf\xfd\xc0\xfd\x9a\xfd\x87\xfd\x91\xfd\xa4\xfd\x9e\xfd\xba\xfd\x18\xfev\xfe\xbf\xfe\x07\xff[\xff\xb3\xff\xf2\xffJ\x00\xa0\x00\xd1\x00\xfd\x00+\x01M\x01g\x01v\x01\x90\x01\xa3\x01\xa1\x01\xa5\x01\xbb\x01\xd0\x01\xe4\x01\xfe\x01(\x02R\x02e\x02\x86\x02\xc7\x02\xe9\x02\xfb\x02\x19\x035\x03&\x03\xf5\x02\xcf\x02\xb9\x02m\x02\x03\x02\xac\x01P\x01\xd0\x00C\x00\xce\xff]\xff\xd6\xfeU\xfe\xff\xfd\xba\xfdw\xfd;\xfd*\xfd9\xfd?\xfd]\xfd\x9e\xfd\xed\xfd3\xfe|\xfe\xe1\xfeM\xff\x98\xff\xea\xffW\x00\xc4\x00\xff\x006\x01\x85\x01\xca\x01\xe4\x01\xf8\x01\x17\x02 \x02\xfe\x01\xf0\x01\xff\x01\xf3\x01\xb6\x01|\x01a\x01A\x01\t\x01\xd5\x00\xb4\x00\x87\x00/\x00\xef\xff\xd8\xff\xc2\xff\x94\xfff\xff^\xffW\xffB\xff1\xffH\xffk\xffi\xffm\xff\x94\xff\xc7\xff\xe3\xff\x05\x002\x00R\x00Y\x00d\x00\x7f\x00\x8c\x00|\x00t\x00m\x00X\x00A\x008\x00\'\x00\xff\xff\xcc\xff\xc5\xff\xb8\xff\x8f\xff\x80\xff\x8f\xff\x82\xffS\xffO\xffm\xffp\xff^\xffh\xff\x95\xff\x96\xff\x89\xff\xa5\xff\xdd\xff\xf1\xff\xe8\xff\xfd\xff5\x00Q\x00Y\x00q\x00\xa7\x00\xc8\x00\xc6\x00\xe8\x00%\x01M\x01V\x01S\x01x\x01\x91\x01\xa1\x01\xc0\x01\xfc\x01\xf5\x01\xb2\x01\x92\x01\x8f\x01h\x01#\x01\xe4\x00\xb2\x00.\x00\xa1\xffD\xff\r\xff\xa7\xfe)\xfe\xd2\xfd\x93\xfd5\xfd\xd7\xfc\xbb\xfc\xca\xfc\xad\xfc|\xfc\x85\xfc\xbb\xfc\xcb\xfc\xc5\xfc\xfe\xfco\xfd\xb5\xfd\xe1\xfdU\xfe\n\xfff\xff\x81\xff\xe1\xffe\x00\xa8\x00\xb8\x00\x03\x01j\x01b\x01\x1f\x01\xfc\x00\xf9\x00\xce\x00\x94\x00u\x00X\x00\t\x00\xa6\xffk\xffd\xff[\xffC\xff\x1f\xff\xf4\xfe\xd4\xfe\xd4\xfe\xe7\xfe\t\xff$\xff.\xff#\xff4\xffd\xff\xa0\xff\xdb\xff\xf4\xff\r\x00,\x00B\x00}\x00\xbd\x00\xfa\x00\x1c\x011\x01M\x01~\x01\xad\x01\xc6\x01\xdb\x01\xf3\x01\xf7\x01\xdb\x01\xdb\x01\xf9\x01\xef\x01\xb5\x01u\x01Q\x01\x07\x01\xaa\x00\x80\x00f\x00\xf9\xffk\xff.\xff"\xff\xf8\xfe\xd2\xfe\x0f\xff]\xffJ\xffN\xff\xce\xfft\x00\xe7\x00y\x01A\x02\xe6\x020\x03\xb3\x03\x8a\x04\x1b\x05J\x05\x8e\x05\xee\x05\n\x06\xe3\x05\xc7\x05\x96\x05\x0b\x054\x04y\x03\xd0\x02\xf8\x01\xdb\x00\xbd\xff\x9c\xfek\xfd`\xfc\x93\xfb\x00\xfbb\xfa\xa9\xf9%\xf9\xdd\xf8\xda\xf8\x10\xf9r\xf9\xd7\xf9.\xfa\xa0\xfa6\xfb\xf7\xfb\xc7\xfc\x8b\xfd=\xfe\xb1\xfe\x1a\xff\x97\xff\x15\x00}\x00\xc9\x00\xfb\x00\x03\x01\xe9\x00\xe7\x00\xf2\x00\xe9\x00\xbd\x00\x83\x00N\x00\x0e\x00\xde\xff\xc7\xff\xb2\xff\x8e\xffW\xffD\xff;\xff7\xff@\xffR\xffR\xffE\xffM\xffY\xffS\xffJ\xffB\xff@\xff7\xff/\xffA\xffd\xff]\xffL\xffT\xffx\xff\x94\xff\xa7\xff\xcd\xff\xf6\xff\xff\xff\xfb\xff#\x00p\x00\x98\x00\x9b\x00\xaa\x00\xc4\x00\xbf\x00\xc2\x00\xdd\x00\x06\x01\x0c\x01\xfc\x00\xfd\x00\x18\x01)\x016\x01M\x01V\x01Z\x01c\x01x\x01\x83\x01\x83\x01\x84\x01n\x01N\x01<\x01&\x01\x02\x01\xd5\x00\xa1\x00i\x00,\x00\xf9\xff\xd9\xff\xbe\xff\x9a\xffx\xff_\xffX\xffW\xff_\xff{\xff\x96\xff\xaa\xff\xbe\xff\xde\xff\x08\x000\x00P\x00q\x00\x88\x00\x94\x00\x9d\x00\xa0\x00\xad\x00\xa3\x00~\x00Y\x009\x00\x1c\x00\x00\x00\xde\xff\xb9\xff\x93\xffy\xffk\xff[\xffT\xffL\xff:\xff3\xff+\xff4\xff@\xffH\xffR\xffQ\xffU\xfff\xff{\xff\x95\xff\xab\xff\xbd\xff\xcf\xff\xde\xff\xf4\xff\x19\x004\x00A\x00R\x00n\x00\x89\x00\x9d\x00\xb2\x00\xce\x00\xde\x00\xe2\x00\xef\x00\x04\x01\x0f\x01\x19\x01\x15\x01\x12\x01\n\x01\xfe\x00\xed\x00\xdd\x00\xc3\x00\x9d\x00v\x00Q\x000\x00\xfe\xff\xca\xff\x98\xffl\xffM\xff#\xff\x01\xff\xdc\xfe\xbe\xfe\xa3\xfe\x9b\xfe\x9d\xfe\x9d\xfe\xa6\xfe\xb3\xfe\xc8\xfe\xe3\xfe\x05\xff&\xffC\xffj\xff\x8e\xff\xa7\xff\xc0\xff\xd4\xff\xed\xff\x00\x00\x01\x00\x1b\x00\x1e\x00,\x004\x00)\x00G\x00i\x00\x8b\x00\xa1\x00\x8c\x00\x9c\x00\x92\x00\xd3\x00P\x01\xe8\x01A\x027\x02[\x02j\x02f\x026\x02/\x021\x02\xb7\x01\'\x01\xac\x00G\x00\xd6\xff=\xff\xb6\xfe0\xfe\x99\xfd\x0e\xfd\x8a\xfcF\xfc\x13\xfc\xe3\xfb\xb2\xfb\x9a\xfb\xc4\xfb\xfe\xfb4\xfco\xfc\xd5\xfc9\xfdz\xfd\xdf\xfdF\xfe\xa7\xfe\xee\xfe\x0c\xffU\xff\x9b\xff\xc7\xff\xeb\xff\xfa\xff\x06\x00\xe5\xff\xbe\xff\xb8\xff\x9e\xff}\xff8\xff\xf4\xfe\xdc\xfe\xb6\xfe\x95\xfe\x88\xfe\x95\xfe\x9c\xfe\x91\xfe\xa9\xfe\xde\xfe\x1c\xffD\xff\x8d\xff\xfa\xff>\x00u\x00\xc1\x009\x01\x91\x01\xc9\x01\x1d\x02{\x02\x90\x02\x90\x02\x9e\x02\xb9\x02\xa6\x02|\x02\x8a\x02\x99\x02\x8f\x02j\x02p\x02\x8b\x02\xa8\x02\xde\x02t\x03g\x04G\x05\xfb\x05\xb4\x06\xaa\x07^\x08\xd0\x08o\t\x0c\n9\n\x0f\n\xd3\t\xa5\t\x0c\t:\x08H\x07.\x06\xd4\x047\x03\xb4\x010\x00\xa8\xfe/\xfd\xc3\xfb\x83\xfa\x8c\xf9\xb4\xf8\x18\xf8\xa0\xf7_\xf7T\xf7L\xf7y\xf7\xce\xf7;\xf8\xbc\xf8O\xf9\x0e\xfa\xca\xfa\x98\xfbg\xfc\xfa\xfc\x84\xfd\xf1\xfdF\xfe\x89\xfe\xa9\xfe\xc3\xfe\xd1\xfe\xbf\xfe\xa3\xfey\xfeH\xfe\x18\xfe\xcb\xfd\x83\xfdA\xfd\x0f\xfd\xd8\xfc\xb7\xfc\xc3\xfc\xd3\xfc\xf8\xfc0\xfdv\xfd\xc6\xfd-\xfe\x9e\xfe\x06\xffk\xff\xce\xff(\x00\x93\x00\xfb\x00J\x01\x9d\x01\xef\x01\x1d\x026\x02E\x02M\x02B\x020\x02\x17\x02\xf9\x01\xda\x01\xc9\x01\xac\x01\x96\x01{\x01[\x01S\x01M\x01F\x01:\x01;\x01B\x01G\x01T\x01[\x01`\x01X\x01P\x01N\x01:\x01\x14\x01\xeb\x00\xbf\x00\x8b\x00N\x00\x0e\x00\xd3\xff\x98\xffQ\xff\x0e\xff\xd2\xfe\xa5\xfer\xfeO\xfe6\xfe&\xfe\x1d\xfe"\xfe5\xfeM\xfel\xfe\x95\xfe\xc1\xfe\xf3\xfe-\xffe\xff\x98\xff\xd3\xff\t\x00I\x00z\x00\xa2\x00\xbb\x00\xcf\x00\xe8\x00\xf1\x00\xf9\x00\x03\x01\xfa\x00\xf4\x00\xe8\x00\xe2\x00\xd6\x00\xbf\x00\xa5\x00\x8b\x00t\x00a\x00J\x00;\x00(\x00\x19\x00\x16\x00\x17\x00 \x00%\x00)\x00<\x00F\x00Y\x00m\x00t\x00\x87\x00\xa0\x00\xa5\x00\xbb\x00\xb0\x00\xb6\x00\xb8\x00\xb0\x00\xb2\x00\x97\x00\x93\x00\x95\x00\x87\x00\x84\x00f\x00?\x00\x1c\x00\xd9\xff\xab\xff\x88\xffg\xffw\xff6\xff\xfd\xfe\x03\xff\xfc\xfe\xef\xfe$\xffk\xff\xa1\xff\x9c\xff\xb8\xff6\x00\xc2\x00\xe8\x01K\x04\xa4\x05\xf7\x05\xd9\x05`\x05\xae\x04\x83\x02v\x01\xe1\x00o\xffH\xfeU\xfd\xe9\xfcH\xfc\xf0\xfa\xcf\xf9d\xf8\xc0\xf6\x15\xf6\x87\xf5\xb6\xf5y\xf6\xf5\xf7\x8f\xf9)\xfa\xb7\xfb\xad\xfd\xa4\xfe\x7f\xfe\xf5\xfe-\x00y\x00\x19\x01\x10\x02\x07\x03q\x03\x15\x03\xab\x03\x14\x04\xa8\x03I\x03\xe3\x01\x11\x01\xa7\x00\xef\xff\xb1\xff\x82\xff\xbe\xffA\xff\xfc\xfe_\xff}\xff\xfa\xfe\x8d\xfe\x88\xfe\xa2\xfe\xf7\xfe\x07\x00\x00\x01\x9f\x01#\x02\xa4\x02\xe0\x03u\x04n\x04m\x04\xd2\x04C\x05\x19\x05}\x05\xcf\x06\xb6\x06\x08\x06\xb8\x05\xa3\x05\xfe\x05\x98\x05\xbe\x05\xd2\x05\xc8\x05\x9a\x05T\x05u\x05#\x05R\x04n\x03\xd6\x02\x96\x020\x02\xb6\x01;\x01\xc2\x00/\x00A\xffi\xfe\xac\xfd\r\xfdJ\xfc\x9b\xfbH\xfbo\xfb\x9e\xfb\x8f\xfbt\xfbZ\xfb3\xfb\x0e\xfb\xe2\xfa\xbf\xfa\xda\xfa\x02\xfbf\xfb\xe8\xfb\xc1\xfcj\xfd\xa4\xfd\xb3\xfd\xb5\xfd\xcc\xfd\xaa\xfd\x9a\xfd\xbd\xfd\x08\xfeP\xfe\x95\xfe\xdf\xfe \xff\x04\xff\x9b\xfe\'\xfe\xcb\xfd\x90\xfdD\xfdT\xfd\xb5\xfd\x19\xfeg\xfe\xc1\xfe8\xffb\xffh\xff\x83\xff\xac\xff\xec\xff0\x00\xb6\x000\x01\x84\x01\xdc\x01 \x02^\x02@\x02\xf9\x01\xb2\x01x\x01J\x01A\x01A\x01\\\x01Q\x016\x01:\x01\xfd\x00\xd3\x00\x8f\x00L\x00\x15\x00\xe8\xff\x00\x00,\x00K\x00g\x00`\x00S\x00M\x00>\x00\x0c\x00\xe5\xff\xcf\xff\xbc\xff\xc4\xff\xdf\xff\x12\x00/\x00,\x00;\x00B\x006\x00=\x00+\x00\x1f\x00\x04\x00\x11\x00E\x00M\x00|\x00\xa0\x00\x8a\x00\x99\x00t\x00U\x00,\x00\xf7\xff\xe9\xff\xb5\xff\xb9\xff\xb0\xff\xb3\xff\xc0\xff\xae\xff\x8e\xffX\xff*\xff\x08\xff\xed\xfe\xb6\xfe\x9d\xfe\xa8\xfe\xae\xfe\x06\xffT\xffw\xff\xae\xff\x9a\xff\xc2\xff\xfc\xff$\x00Y\x00g\x00\x91\x00\xbe\x00 \x01\x8a\x01\x9f\x01\xab\x01G\x01\xd2\x00\xa8\x00S\x00A\x00\x03\x00\xa5\xff\xaa\xff|\xffe\xffy\xffa\xffA\xff\x19\xff\x01\xff!\xfft\xffm\xff\xbc\xffT\x00\xca\x007\x04\x8a\x06p\x07\xfd\x07\xa2\x06\xf5\x05\xbd\x03\xe5\x02\xc5\x02\x03\x02b\x02\xe3\x01\xb7\x01\x8a\x01Q\x00\xdb\xfd\xeb\xfa\r\xf9\x85\xf7\xf9\xf6\xbd\xf7\xa6\xf8\x1e\xfa\x14\xfb\xda\xfb4\xfc\xf0\xfc\xa4\xfci\xfaV\xfb\x1a\xfc4\xfc\xbd\xfe\x00\x00\x19\x01\xfe\x01H\x02\x8c\x02\xa3\x01\xe3\x00\x95\x00\xa3\xff\x8d\xff\xb0\x00,\x01\xb7\x01d\x02\xd1\x02"\x02\xaa\x00\x94\xff\xed\xfeX\xfd\x1a\xfde\xfd\xc5\xfd\xf6\xfd\x15\xff\xc2\xffg\xff\xeb\xff\x8d\xff\x18\xff\x84\xfeg\xff\xe7\xfe\x82\x00Y\x03\xa1\x01\x8e\x03I\x05\xb3\x03\xe8\x02%\x03~\x03\r\x01\x8a\x00\xd0\x03C\x01\x94\xff&\x04l\x02\x90\xff\xa4\x01\xbd\x01\xe4\xfeq\xff\x81\x01\x89\xff\xd3\xff\xc0\x01\xaf\x02\xc2\x01@\x02\xf5\x02\x18\x01\xb4\x00\x84\x02\xf4\x02\x06\x003\x01\xf9\x02\x1e\x005\x00e\x02\xbf\x00\xe2\xfe\xc1\xff\xfe\xff\x8b\xfe9\xff\x9e\x00\x9a\xfe\xb7\xff\x18\x01\x92\xfe\xd8\xff\xce\x01\xa3\xff\xc9\xfd\x83\x00\xd3\x00\x05\xff\xfd\x00\x04\x02*\x00g\x00\xe1\x01\xe9\xff\x1f\xff\x05\x01\xad\xff8\xfdY\x01\x82\x01\x0c\xfd\x9f\xff\xd5\x01\x98\xfe^\xfc\xcf\xff\x83\xfe\xdd\xfaV\xffI\x00\x95\xfb\xfe\xfc]\x00k\xfea\xfd\xbc\xff9\xfe\xb0\xfcZ\xff\xb9\xff\xde\xfd\x92\xff\xa0\x00\x13\xff\xf9\xfe\x1e\x00,\x00\xb0\xfeZ\xffM\xff+\xfe\x17\xff\xcd\xfe\xb0\xff\x11\x00\x03\xffm\xffb\xff\xf1\xfey\xfe\xb8\xfe2\x00\xe2\x00G\xff\x89\x00\xa9\x01\xb8\xff1\x00\xcc\x00j\x00\x9a\x00q\x01\x8d\x00\xfd\x00\x85\x02\x7f\x01\xdd\xff\xab\x00\x00\x00\xbb\xfe\xe0\x00l\x00m\xffD\x00\xe6\x00a\xff4\xfd\xe4\xfeu\xff\x0f\xfc\xc7\xfd\xd9\x02\x96\xfe:\xfdM\x03\xf1\x00\x8b\xfb>\x007\x02)\xfdI\xff\x1c\x05N\x01y\xfeU\x03\xbd\x03\x86\x01\x08\xff\xe8\x02\xff\xfc2\xff\xde\x00\xe7\xff\xe6\xfd\xfe\xfe\x91\x02C\xfc\r\xfeV\x00\xca\xfd?\xfa\x00\xffn\x00\xde\xfb)\xff\xdc\x01\x0b\xfe\xb1\x01e\x01\xb5\xfd\xe0\x00\xaf\x02\xa1\xfd\x9d\xfd\x02\x04\xfb\xffq\xfc\t\x01\xab\x060\xfd(\xff\xaf\x06\x15\xffT\xfc\xbb\x04,\x02\xe9\xfa\n\x03\xb8\x03F\xfd\x9c\xff\xe4\x05\xc4\x00\x08\xff(\x01\x91\x00u\xfft\xfbz\x033\x03\xf7\xfb\xbe\x02\n\x04\x96\x01\xf2\xfe\xe1\x00\x7f\x03\xc8\xfd\x15\xfe(\x01\x96\x020\x01\x19\x02\xe4\x00_\x01\x1e\x01\xd9\xfcS\xfe\xbc\x03\xd6\xfd\xfb\xfc\x9b\x012\x00<\x00}\xfc\xe0\x02\xea\xff\xf2\xfbE\xfe0\x01\xb8\x00C\xfcy\x01\xd0\x00~\xfe\xdc\x01\xc0\x00\r\x01`\xfe_\xfes\x03\x08\xfbT\x01\xc6\x04x\xfb\xc1\xfe\x1c\x05\xbc\xfc=\xfe\xb5\x00\xa1\xfd\xd4\xfd[\xfd\x7f\x01\x85\xfc\x08\x01\xe9\x00\x98\xfc\x05\x00\xdc\x02\x03\xfbR\xfc;\x05\x1e\xff\xa2\xfcF\x02\x17\x03y\xfe]\x01\xbc\x01$\x00\xeb\xfc\x18\x01r\xfd\xcb\x00\xa9\x04\xa4\xfe.\xfeb\x03f\x034\xf9o\x04\xf0\x00\xe8\xfa\x14\xff\x14\x02\xb8\x01\xf9\xfeM\xfd\xde\x03\xf6\x00\xd2\xfa%\x02\x8a\x01\xe0\xfc\xf3\xfc\x82\x03E\x03\x0c\xfen\x02V\x03y\xfd\xcf\xff\x9c\x03\xf2\xfet\x00\xbe\x03\xe6\xfdW\x02\x81\x03\x98\xfa\xe3\x00\xb4\x027\xfe\xfa\xfd\xbf\x03K\x01\x9a\xfa6\x03y\x00\xbc\xfc\x9c\x00p\x01)\xff\xeb\xfe\x07\x03\xa4\xfe?\xfed\x02\xd8\xfa`\x00\xcc\x05\x00\xfb\x9e\x00\x16\x07T\xf9\xf4\xfc\x05\x08\x0e\xfc\xac\xf9\xbe\x07O\xfe\xff\xfb\xfc\x04\xf2\x01\xee\xfc\xe1\xfe\xd9\x03\xdb\xfa\xa1\xfa\xbc\x04"\x07w\xf8\xc3\x01\xe6\x07\x1a\xf8\xec\xfd\xaf\x04o\xfdw\xfb@\x04\x86\x02A\xfc\xf0\x00\xe1\x04\xf0\xfbP\xfc\x96\x01\'\x02\x95\xfb\xb9\x00]\x03\x85\xfe\xa3\x00\x86\xfe\x91\xfd^\x00p\x02\xe5\xfa\xea\x01\xd3\x01:\xfd\x16\x03\xb9\xfe\xda\xfb\x8e\x02z\x00\xc2\xfb\xbf\xfe\x1f\x07w\x01e\xf8\x89\x03E\x05\x96\xf8\xbb\xfe\x04\x08]\xfaz\xfd\xd0\x07W\xfeG\xfe\x18\x05\xa8\xfc\x11\x00\x15\x00\xd7\xfeL\x00\xce\x00{\x01\xa0\xff\x88\x002\x01_\x01\x85\xf9\x98\xff\xd9\x00\xed\xfb|\x01\x9d\x04\xef\xfe\x93\xfc0\x03g\xfe\x96\xfcG\x00\xf7\xff\xe0\xff1\x006\x05\xb2\x00\x1d\xfb\xa1\x02F\x00\x07\xfc_\xff\xc1\x01d\x02+\xff\xff\xffc\x02\x03\xffP\xfd.\xff5\x04l\xfdU\xfa\xb4\x05\xd1\x01X\xfcn\x01\xd8\x03\xac\xfd\x96\xfa|\x06\xe6\xfd\xbf\xf7t\x05\xdd\x01\x90\xfd\xaa\x02l\x03\xdf\xfa\xaa\xfe\xc0\x01\xd1\xfc%\xff\x01\x01\xf7\xfe\x08\x01=\x01\xbd\x01\xa5\xff\x96\xfcQ\x03\xbc\xfe\xeb\xfc6\x00f\x01\xb3\x03N\xfe%\x00\xfc\x06\x83\xfc\x15\xf9[\x07\x11\x01n\xf7\xd7\x03\xe7\x07\xca\xfa\xa5\xfc\x17\x07\xbc\x01\xaf\xf6\xaa\x04\xa7\x02a\xf5(\x05\x99\x04[\xf9\xaf\x03\xb8\x00\xe7\xfb\xab\x02\x82\x02\x11\xfc\xc7\xfd\xd3\x05\x16\xfd)\xffu\x00\xcf\x00\xe4\x03\xd4\xfc\xce\xfex\x02&\x00h\xfe1\xff\xc5\x01c\xff^\xfek\x032\xff\xcf\xfa\xa4\x06;\x01\xbc\xf6\x85\x04\x08\x02\xab\xfc%\xfe\x07\x04\xec\xff\xe4\xfa\xf5\x03s\x00\x1a\xfd\x87\x00\xcd\x01\x94\xfcF\xff\x1d\x02\x12\x011\xfe\x89\x02%\x00\x9f\x00\xc4\xfc\xe5\xfen\x06\xce\xfc)\xfe\xac\x07\xa2\xfe6\xf9\x02\x07q\x00u\xfa%\x01\xa9\x05\xee\xf9\x98\x00\x88\tp\xf7\xde\xfb:\x08\xeb\xfe&\xf7\x9e\x06^\x05d\xf9\x11\xfe\xcb\x07\xa4\xff\xe3\xf7q\x06I\x00v\xf9\xa2\x02j\x04w\xfc\xcc\xfek\x04\x1e\xfe:\xfb\xa5\x04>\x01\x83\xf9B\x02P\x02c\x01_\xfcA\x03\x8f\x02 \xf9\xf7\xff|\x03/\xfd\xca\xfd(\x07G\xfb\xba\xff\x97\x04e\x00d\xf9\x85\xfe\x86\x06^\xfb\xef\xff\xd1\x05\xd1\x00\xb1\xfa\x81\xffy\x04\x06\xfa\x83\xfd\x97\x04\xae\xfe\x99\x01\xd2\x02;\xfb\x13\x02T\x00\xf8\xf9L\x04\xdb\xfe$\xfc\xbc\x05\xdd\x03\xa7\xf6\xf4\x05\xc7\x05d\xf3\xfa\x01\x0e\x06\xf9\xf9\xa1\xfe\x98\x07:\x02\xb0\xfb_\x04\xb7\xfek\xf86\x032\x03\xac\xfei\xfeH\x05a\xfd\x10\xfeB\x05}\xf8\xe6\x01X\x025\xf7\xd3\x06v\x02\x9c\xfbc\xff\xab\x05/\xf9\xaf\xfa~\x08s\x03\xa5\xf5\xcc\x01\xc9\n\xbe\xf4\xf6\xfdM\t.\xfc\x1d\xf8\xb2\x07\xda\x01\xee\xf7L\x03\xc2\x04\xd0\xfcl\xfa\xea\x04\xff\x04q\xf5\xf1\x05\xd8\x03\x04\xf8\xba\x01\x97\x08s\xf8\xec\xfa\x03\x11\xa1\xf5\xa9\xf8Y\rc\x00\x81\xf1\xf0\x08\xd1\x07\xa2\xf6\xd4\xff`\x06?\x02>\xf5Q\x03~\x04\xd0\xfc\x8d\xfd%\x03\xfd\x05j\xf8\xcb\x02\xb2\x02\xbd\xf8\xd6\x01\x1d\x02\x15\x01@\xfc\xe5\x03~\x01D\xfa\xe2\x02}\x04%\xf6n\xff_\n\xb3\xf7\x86\xfc\xd3\x0b(\xfb\xbe\xf7r\n\x84\x01y\xf3+\x05\xf7\x07v\xf1\xb4\x01\xf8\x0fH\xf6\xa5\xf7\x0c\x0e\xca\xfe/\xf46\x06R\x01;\xfb\x8d\xff\xc2\x05\xb2\x03\xe8\xf9D\xff\x91\x06u\xf9b\xf9c\x0c\x97\xfc\xdf\xf5P\n\xce\x06s\xf6\x17\xfd\xe6\n\xe9\xfc\xf5\xf6\x98\x05\x96\x05\x90\xf6\xbc\x00\xb7\x0eU\xf5O\xfd\xbf\x0b\xf2\xf8\x0e\xfd\xd3\x034\x00B\xffM\xffT\x05\xa7\xfe\r\xfb\x8b\x05>\x03\x01\xf7|\xfe\x99\t+\xfb\x81\xfbc\t\xe5\xfd\x97\xf9S\x04x\x03Q\xfa\xd3\xfc\x12\t-\xfd\x9d\xf8\xd4\x03\xff\x07\xa3\xf9`\xfb\xf6\x07-\x01\xda\xf4\x9d\x05\x15\x06q\xf7\\\x02\xea\x05)\xfc(\xf7\x1a\x07\xc7\x05\x1c\xf6\xdd\xff\xf8\n\xb8\xf4\'\xfen\x08\x90\xfc\xb1\xf8{\x01\xf8\t\xca\xf7\x7f\x00r\x07E\xfb\xea\xf8\xc7\x04\xe0\x06!\xf9\x05\x03\x17\x06:\xfb\xb0\xfa\xc8\x06\xe9\x01\xcc\xf8\xd7\x02\xe6\x05\xd5\xfa\x13\x00\xe0\x05{\xf9J\xff\x9a\x04\x0e\xff\xf2\xf8\xf1\x06\xd4\x04\xdc\xf7\x87\x01\xce\x04\x9b\xfa^\xfdS\x05 \xfc\xe8\xfcd\x04\xe2\x04\x8b\xfb\x16\xfb\x84\x06\'\xfc\x97\xfc\xf4\x05\xc8\xfe\xb0\xfc\xc5\x00\xdd\x05z\xfb\xd7\xfcj\x03\xf7\x01\xbf\xfaR\x00{\x04\xa4\xfd\xf9\xfe7\x03\x96\x01\xc1\xf8\xee\x02\xb0\xfe\x83\x01Y\x01\xba\xfa\x0e\x05\x86\xff\xb8\xfa\xee\x06\xdb\xff\xf9\xf2\x80\n\xed\x05G\xf2\xc3\x02+\x0b\xfa\xfa\xe1\xf5\x0b\t\xb9\x038\xf6\x01\x01:\x08g\xfc\xa8\xf7\xac\t\x81\x03l\xf5\xa8\x02\xe5\x08W\xf6\xbe\xfbd\x0bg\xff*\xf6\xd5\x04y\x06\x02\xfbe\xfe\xea\x03\x19\x02G\xf6\xa0\xffK\r$\xf8\x12\xf9\xf3\x0e\x98\xfe\x19\xef\x14\n\xc8\x08w\xf2X\xffs\x08h\x00\x9f\xf8;\x01\xae\x08\xe5\xfb\x93\xf4\xf3\n\xef\x05\x97\xedH\n\x9d\x08\x97\xf4!\xfe\xcd\x071\xff=\xfa\xf7\x04\x80\x00B\xfb\xda\xffi\x05W\xffA\xfc|\x03\xf4\x01?\xfa\x03\x00\x01\x06W\xfd\xb7\xfb\xf2\x05\x9c\x01n\xf9\xee\x02(\x06^\xf5\xb6\xff\n\n}\xfa\xb1\xfc\t\x04A\x04-\xfb{\xfb\x16\x08t\xfc\xab\xf9M\t\xd0\xfd\xd1\xfb\x95\x02:\x07_\xfb\x01\xf9\xb4\x08\xfb\xfa\x1a\xff5\x01\xc3\x00\xde\x02U\xff\xa0\xfd\xf6\x02X\xfe\xfd\xf9\xe2\x05\xb7\x022\xf8\xd3\x03\x9e\x03)\xfb\x9d\x03>\x02\x12\xfb/\xfdI\x04\xd4\xff\x94\xfc\xa9\x01-\x06\xe0\xf9K\xfcF\x0b\xc7\xfd\xff\xf4e\x04\xba\x05\xb5\xf8\x9e\xff\x0c\tE\xfd\xa8\xfb\x86\x03r\x01-\xfa\xd1\xff\xc1\x04a\xff\x14\xfd0\x04\xeb\x03$\xf8\x1b\x04E\xfc*\x00Q\x04r\xfc\xd1\x00X\x02\xcf\x00\x9e\xfc\x03\x01k\x00d\xff~\xff\xe2\xff\xeb\x03c\xff\xe0\xfb-\x04\x9a\xff\xe1\xfc\xa0\xff\xe3\x05&\xfdA\xfb$\x08\x12\xff\'\xfa\x8a\x00x\x07\x9d\xf8\x92\xfe\xd0\x07\x9d\xfb\x8c\xff\xcc\x03\x91\xfd\xb5\xf9\xf9\x06\x08\xfd\xa5\xfa}\x059\x03\x95\xfb\x15\xfe>\x03\x90\x00\xc7\xfct\xfe\xaa\x06\xf3\xfc_\xfe\x94\x04\x7f\x00\xd5\xf9;\x04\xda\x03:\xf9\x95\xfe[\x08\xf1\xfb\x05\xfb\x05\x03\xcb\x03\xad\xfd\xce\xf93\x04\x0e\x02/\xfe\'\xfdx\x06F\xfb$\x00\xb1\x01!\xf8\x95\x04V\x06\x95\xfb\xb4\xfa\xca\x05\x94\x00\x97\xfb\xf0\x00\x84\x04\'\xfd\xdc\xfb\xe9\x05[\x03\xf2\xf8\xd6\xfe\x9a\x07G\xfdJ\xfb\xb5\x04e\xfev\x01\x80\x00\xc9\xfc\xdb\x015\x04\xbd\xfc%\xf9\xbb\x06\xda\x00\x85\xff\x17\xfc\xa0\x03g\xfd\xe6\x01\xea\x04\x15\xf5\xb3\x02\xa9\x03}\x00\x7f\xfaR\x03\xe4\x05\x9c\xf8\x14\xfe\xed\x07z\xfa\x07\xfb\xd5\x07\xd5\x00\x08\xfc\x07\xfc/\x05\xc8\x04\xf6\xf9R\xfa\xb6\x08\x90\xfe6\xf9\x01\x02\xeb\x08\x1c\xfc\xb3\xf3\xdc\x0bi\x05?\xf5~\xff~\tn\xfai\xf7\xb4\nJ\x02\xa4\xf9\x97\x03\x02\x010\xfc\xd8\xfdJ\x03y\x04i\xf9\xe8\xfcV\x0c,\xfb\xab\xf6\r\x0c\xe3\x01M\xf2\'\x05D\x06\x8f\xfap\x00,\x02\x03\x04u\xf8\xc0\x00\xbd\x03\xf1\xfc\x1a\xfef\x01\xd7\x04\xa6\xfa\xc5\x02\xc6\x001\xfd\xd4\xfd\xec\x03M\xfe\xee\xfcx\x04\xb7\x03q\xf8\xb8\xfer\x0bi\xf5\xc8\xfd\x0b\x07[\x01\xb7\xf7V\xff\xe3\x0c\xfb\xf9\xec\xfa~\x02\x19\x03\x94\xfa\xab\xfei\x08-\xfbj\xfc\xb4\x02\x04\x06\xcd\xfb\xaa\xf9k\x08\x88\xfe\x87\xf6W\x05p\x08\x8a\xf8`\xfd\xd9\x07J\xfd\xa1\xfc\x1f\x01\x98\x00\xf9\xfe\xcc\xfe\xde\x00s\x04\xff\xfd\xc5\xfeg\x03#\xfb\xf5\xfe\xe8\x03\xb5\xfc9\x01#\x01\xf7\xff\x99\x03\x94\xfa\xf4\xff]\x05\x12\x00\xbd\xf6a\x03\xe6\x07\xc8\xf9\x87\x00\xfc\x01\xfb\x02\xa6\xfc\xa9\xfb\x9c\x07\xc3\xfd\x94\xf9\xb0\x05\x7f\x05\x15\xf9q\xfc\x8e\t\xaa\xfd\xd2\xf7\x9d\x06\xa0\x02\x17\xf7\xb8\x01~\x08\xf1\xfb8\xfa|\x03R\x05\xa2\xfaQ\xfc9\x06\x8e\x00\x15\xfa\x87\x01{\x02<\x00\x19\xff~\x01\x0c\xfdJ\x03\xf4\xfc\x9b\xff\xb3\x04H\xfc\xa3\x03\x8b\x00$\xfe\xca\xfbm\x01!\x06\xda\xfb\xc3\xfb\x89\x05\x00\x00\xb0\xfb\xd4\xff\xf2\x03d\xfb\xd3\xfb\xdb\x08\x84\xffd\xfa\xcd\x03K\x054\xf7F\xfd\x97\tx\x01e\xfa\xd1\xff\x12\x06l\xfd\xc4\xfc\xaf\x02\x87\x01Y\xfc\xad\x00t\x02\xe3\xff\x88\xffv\xfeL\x01\xf5\xfe\xe9\xfe\xe3\xffG\x03\xa4\xfc\x05\x00I\x03u\xfe\xb2\xfd\x91\xffr\x02\xc1\xfcB\xfd\xde\x04H\x03\xc8\xfaL\x00\xf2\x03-\xf9\xd6\x01\xe6\x06\xbd\xf9Z\xfd\x08\x05\xe1\x02\x9d\xf8\xfd\x02\x03\x05\xf3\xf9;\xfc\xca\x08\x1d\x00\xb5\xf6_\x06\x85\x06O\xf8\x00\xfb\xfe\t\xee\xfd\x9b\xf9\xe4\x03\xc6\x04\xc0\xf9[\xffC\x06\xa6\xfd\xd8\xfa\x81\x02F\x04\xed\xfa\xec\x00\xd9\x02!\xfe\x02\x00\xc7\xfeF\x00^\x017\xfdK\x02\x98\x00p\xfc9\x00\x14\x04\xa4\xff\xf7\xfbX\xff\xb5\x04\x0b\x00\r\xf8a\x05\xb2\x04\xea\xf8n\xfd\xb0\x07\xc3\x00\xd3\xf8i\x02\x9c\x04"\xfb\xcd\xfc\x98\x06q\x00*\xf9w\x03\xa9\x05\x9e\xf8z\xfe\x85\x05J\x00\xa5\xfa\xa0\x00\xd4\x03\xf0\xfd\x93\xfe\xf8\x01l\x00\x8f\xfcg\x00~\x03\x8a\xfdC\xfd\x08\x05\xdc\xfen\xfb\x17\x02\x8b\x05P\xfb3\xfc^\x07{\xfe\xb0\xfa\x90\x02\xb5\x04\x01\xfdX\xfcs\x04\xa7\x01\n\xfcL\x00`\x01\x06\x01\xc6\xfdD\xff\xd6\x04\x02\xfd\xd0\xfeO\x01\x80\x00\r\xff\x9e\xff\x9e\x00\xe8\xff\xd5\x00\xfb\xfd\t\x020\x00\x8c\xfd\xc0\xfe\x0f\x04\xbd\xfe\x19\xfc\xfa\x02\xfe\x03\x02\xfd\xf3\xfa\x9a\x06\xdf\xff\x1e\xfa\xb7\x00.\x05>\xfeb\xfe8\x01K\x00\xa9\xff\x8a\xfeX\x00\xbb\x00}\xffB\x00:\xff\xad\xff\xfe\x02D\x00#\xfcM\x00Q\x03\x15\xfcX\x00\xbb\x03N\xfd\x84\xfd}\x03\xfd\x03H\xfbe\xfc&\x05\xa5\xff\xeb\xfb\x95\x01v\x03\xc5\xfe\x8c\xff\x02\x01\xaf\xfe\x03\x00\x0e\x01\xa5\xff\x98\xfd\xbd\x01\xd0\x03/\xfe\x1f\xfe\xb2\x02\x08\xfe\xce\xffS\x00\x96\x00c\xffY\x00\x80\x01\x16\xff\xe6\xffg\x00\xb2\x00\x17\xfeh\x00\xfb\x01:\xff\xaa\xfd\x0e\x03)\x01\xea\xfc\xec\xff\xfe\x03\xe3\xfc\x82\xfe\x92\x05\x84\xfd\xce\xfb\x0e\x03l\x05u\xfa\x85\xfd\xe2\x05\x8f\xff\xf8\xfb\x0f\x01\xb0\x02\x02\xfc\xe1\xfe\x08\x02\x8f\xff~\xfeI\x01!\x01\x9e\xfd\'\xfe\r\x03\xa9\x01\xaf\xfc\x82\xff\x10\x04\xa1\x00\xf4\xfc\xf4\x01A\x01\xd3\xfeY\xff \x01\x1b\x00\x9c\xfe\xc1\xff\x00\x01\xd0\xfe\xa8\xfe"\x03\xfd\xfdP\xfd`\x01\xa3\x02\xbe\xfe3\xfd\xda\x011\x01\xda\xfc\xdc\xfe\xaf\x02\xbd\x01\x0c\xfe\xb3\xfdQ\x01n\x03.\xfdc\xfe\xb4\x02\x7f\x00\xa6\xfe\xda\x00\x8f\x02~\xfd\x16\xfe#\x02\x98\x02`\xfdh\xfe\x8c\x02r\x01\x11\xfc\x18\x01%\x03\x80\xfd\xa6\xfeN\x00\xef\x01e\xfe.\x01\x18\x00\xfc\xfde\x00\x8c\x01\xc6\xff\x9c\xfe\xdb\x00\xd1\xff\x89\xff\x01\x01\x0b\x01\x81\xff\xfc\xff\x93\xfe1\x00\'\x01\x03\xffA\x01\x1d\x00\xc2\xfe\x03\xff\x03\x01\x9e\x01-\xfe}\xfe\x90\x00\xe9\xfe\x8e\x00\xee\x01\x8e\xff\xd3\xfd\x89\xffg\x01\xec\xffE\x00\xea\xff(\xff,\xff\xa4\x006\x01\xa9\xff\xb3\xff\xcc\x01\xc8\xfd7\xfe\xed\x02+\x00\xc0\xfe\x7f\x00\xf3\xff@\xff\xc5\x01\xd5\xff\xe0\xff\xba\xff)\xff2\x00>\x00\\\x01c\xff\xfb\xffA\x00Y\xfe\x9e\x00\x8f\x01\xba\xfe\xa7\xfe\xe1\x00\xf7\x00\x9d\xffX\xff\xe7\x00\x8a\xff\xd6\xfe\x12\x00\xaf\x00f\x00g\x00f\xff,\xfe\xdf\x00\x89\x01:\xff\x9f\xfe\xde\xffF\x01A\xff\x02\x00\xdb\x00\x94\xff\xad\xfem\xff\x14\x01\xc8\xff\xc0\xff\xd3\x00\x81\xff\xbd\xfe`\x00-\x01s\xff\x8d\xfe\xe5\xff\x91\x00/\x00\x16\x00C\x00-\x00\xe2\xfen\xff\xb7\x00\xb8\xff\xb4\xff\x82\x00\x98\xff\x08\x00U\x00\xcb\xff\x11\x00I\x00V\xff\xfc\xfe\xc1\xffe\x01{\x00\x95\xff\xe7\xff\xdb\xff1\x00\x0e\x00\xd1\x00\xc3\xffb\xff\xde\x00r\x00B\xff\x80\x00\xd1\x01T\xff\xa0\xfe\xae\x00\xf9\x00p\xff\x03\x00\xba\x00\xd4\xfe\xef\xffz\x01T\xff\xb8\xfe\xfb\x00"\x00\xf1\xfeA\x00\x12\x00\xde\xff+\x00Y\x00i\xff*\xff\x0b\x01\x95\x00\xab\xfe\x91\xff\x0c\x01W\x00\x0e\xfe\x8d\x00T\x021\xff\xfb\xfe\x9b\x00S\x00l\xff\xfe\xff\xd4\x00s\x00\xfc\xff\x01\x00\xbe\xff\xec\xff\x87\xff\xd9\xff<\x00i\xff\x00\x00\x7f\xff\x93\x00\xf2\xff\xad\xfe}\x00Y\x00\xd2\xfe\x96\x00E\x01d\xff\xe1\xff\x0e\x01\x1d\x00\x92\xff\x85\x00\x86\x00\xcd\xff\x98\xff\\\x00\xbb\x00Y\xff\x87\xffB\x01J\x00\x9a\xfe\xbd\xff\x19\x01\n\x00\xc0\xfe\xa5\xff\x96\x01?\x00\xa6\xfeU\x00\x95\x00\x06\xff\xca\xff\xad\x00\x91\xff&\x00a\x00\x00\x00 \x00y\x00\x83\x00\x96\xffb\xff\x89\x00^\x00}\x00W\x00\xe8\xfe\x11\x00\xaf\x00\xb0\xff\xb5\xff%\x00\x01\x00\xcb\xffp\x00#\x00L\xff\x10\x00\x9b\x00\xb9\xffv\xff\xf3\xff\xc1\x00\x07\x00\xeb\xff1\x00\xa1\xff\xd7\xff\xd3\x00\xb6\xffL\xff\xa7\x00\xd1\x00\xdd\xff\xb2\xfe\xd4\x00\xd4\x00x\xfe/\xff\xb5\x00\xd1\x00(\xffQ\xff\xc1\x00\x00\x00n\xfe2\x00\xff\x00~\xff\x84\xfe\xb8\x00\x82\x01\x19\xff\xf0\xfe[\x00\xdd\x00\x86\xff(\xff\xfc\x00\x95\x00\xe6\xfe\n\x00\xfe\x00\x14\x00e\xfe\xfd\xff\xcc\x01\xa4\xff}\xfe\x91\x00A\x01\x80\xff\xba\xfej\x00\xe3\x00\xf3\xfe\xd7\xff_\x00\xc3\xff\xfb\xff\x00\x00\xe7\xff\xc3\xff\xae\xff\xff\xff\x08\x00\xf6\xff\x1f\x00\x89\xff\xf2\xff\x9e\x00\x19\x00A\xff\x84\xff\xbc\x00_\x00\xac\xff\x8d\xffM\x00m\x00\xf8\xff\xab\xff\xca\xffX\x00\xc7\xff\xbe\xff\x99\x00\xce\xff\\\xff\x84\x00_\x00<\xff\xa4\xffX\x00\xb6\xffS\xffA\x00;\x00`\xff\xbf\xffq\x00\xec\xffZ\xff\r\x00o\x00\x86\xff\xb4\xff{\x00a\x00\xc7\xff\xff\xff`\x00\xaa\xff\x08\x00\x8a\x00q\xff\x8c\xff\xa5\x00M\x00P\xff\xb3\xff\x95\x00\xed\xff2\xff\x02\x00\xe1\xff\x91\xff\xbb\xff\x99\x00\xf6\xffA\xffj\x00p\x00^\xff\xa4\xff\xdc\x00s\x00k\xff\xce\xff\xc3\x00o\x00\x7f\xff\r\x00\xaa\x003\x00\xb3\xff8\x00h\x00\xfe\xff\xd8\xff\xf7\xff\xf1\xff\xd4\xff\x13\x00-\x00\xf9\xff|\xff\xd5\xffg\x00\xf1\xff\x9b\xff\xbe\xff2\x00\xfb\xff\xd6\xffT\x00B\x00\xb2\xff\x87\xff\xf1\xffa\x00.\x00\xf5\xff\x02\x00\xfe\xff\x14\x00\x1d\x00\x19\x00\x14\x00\xff\xff\xd5\xff\xc1\xff=\x00G\x00\x16\x00\xc5\xff\x9f\xff\xe1\xff\'\x00\x0f\x00\xc2\xff\xba\xff\xdd\xff\x06\x00\x19\x00\t\x00\xd2\xff\x0b\x00\xec\xff\x01\x00-\x00(\x00N\x00\x0b\x00\x0e\x00\x0c\x00^\x00V\x00\x00\x00\xe0\xff\x16\x00N\x006\x00\x02\x00\xff\xff\x13\x00\xf9\xff\xda\xff#\x00;\x00\xfe\xff\xc2\xff\x06\x00 \x00\t\x00*\x00\xe8\xff\x00\x00=\x00\xf4\xff\xdc\xff>\x00M\x00\xf5\xff\r\x00h\x00R\x00\xcd\xff\'\x00\\\x00\xd5\xff\xed\xffB\x00&\x00\xd6\xff\xfe\xff+\x00\xe0\xff\xb2\xff\x10\x00(\x00\xc7\xff\xeb\xff\x0b\x00\xfc\xff\xef\xff\t\x00\x11\x00\xdd\xff\xd3\xff\x16\x000\x00\xcf\xff\xeb\xff5\x00\xea\xff\xf6\xff\x08\x00\xde\xff\xf4\xff\x17\x00\r\x00\xe0\xff\x06\x00!\x00\xe5\xff\xd8\xff\xff\xff\xd1\xff\xbf\xff\xf1\xff\xff\xff\xee\xff\xe8\xff\n\x00\xe1\xff\xf1\xff\xf7\xff\xf6\xff\xda\xff\x12\x00 \x00\xee\xff\xf6\xff:\x00!\x00\xb7\xff\x12\x00\x0f\x00\xf6\xff\x16\x00\r\x00\x1a\x00\x08\x00\x01\x00\x06\x00\x0e\x00\x17\x00\xdd\xff\xdf\xff\'\x00\x05\x00\xf2\xff\xf2\xff\xf9\xff\xea\xff\xe6\xff\x0e\x00\xe2\xff\xcb\xff\x00\x00\x1e\x00\xdc\xff\xdd\xff\x0b\x00\xfa\xff\xee\xff\xdd\xff\xff\xff\xeb\xff\xe9\xff\x05\x00\xdc\xff\xef\xff\x02\x00\xe5\xff\xdf\xff\xf9\xff\x13\x00\xe7\xff\xe1\xff\xd8\xff\t\x00\x11\x00\xcb\xff\xe7\xff\x04\x00\xde\xff\xc4\xff\xf6\xff\x04\x00\xe0\xff\xdf\xff\x1e\x00\xf2\xff\xcb\xff\x1d\x00A\x00\xba\xff\xbe\xff*\x00\x04\x00\xee\xff\xff\xff\x01\x00\xf5\xff\xf1\xff\xfc\xff\x0e\x00\x1b\x00\xf0\xff\xcf\xff\t\x00(\x00\x1a\x00\xf8\xff\xf8\xff\xfa\xff\x05\x00\x0e\x00\xf3\xff\xe0\xff\x00\x00\xf5\xff\xd8\xff\x00\x00\x16\x00\x03\x00\xe7\xff\xd8\xff\xf6\xff\x03\x00\x08\x00\x0f\x00\x00\x00\xf7\xff\x1e\x00\t\x00\x1b\x00\r\x00\xe9\xff\xf7\xff\x0c\x00\x13\x00\x05\x00\x17\x00\xdb\xff\xc5\xff\xe5\xff\x11\x00\xfc\xff\xbb\xff\xeb\xff\t\x00\xe3\xff\xc2\xff\x1a\x00\x1f\x00\xb3\xff\xb9\xff\x04\x00\x16\x00\xdd\xff\xe9\xff\xef\xff\xe1\xff\xf3\xff\xf8\xff\xf6\xff\xfc\xff\xf6\xff\xf0\xff\xf3\xff\x00\x00 \x00\x11\x00\xf5\xff\xf8\xff\x11\x00\r\x00\x16\x00\x0f\x00\x14\x00\xec\xff\x1e\x00)\x00\xf3\xff\x05\x00\xfd\xff\x00\x00\xed\xff\xfb\xff\x13\x00\t\x00\x03\x00\x08\x00 \x00 \x00\xf2\xff\x03\x00$\x00\xf4\xff\x07\x00!\x00\t\x00\xe6\xff!\x006\x00\xec\xff\xf9\xff#\x00\x17\x00\x01\x00\x12\x003\x00"\x00\xf7\xff\x1a\x00<\x00\'\x00\n\x00\n\x00+\x00+\x00\x14\x00\x18\x00=\x00\x12\x00\xf3\xff-\x004\x00\xfc\xff\x10\x00\x1f\x00\xeb\xff\xf4\xff\x14\x00\x16\x00\xe9\xff\xe7\xff\x00\x00\xe5\xff\xf3\xff\x08\x00\xf0\xff\xdb\xff\x01\x00\x19\x00\xf2\xff\xef\xff\x0f\x00\x08\x00\xed\xff\x12\x00\x0e\x00\xf2\xff\x18\x00"\x00\xf4\xff\xef\xff:\x00\x11\x00\xf5\xff\x0f\x00\x08\x00\x15\x00\x04\x00\x07\x00\x02\x00\x07\x00\x11\x00\xff\xff\xf7\xff\xff\xff\x05\x00\xf9\xff\xe2\xff\xfa\xff+\x00\x1a\x00\xf3\xff\xf3\xff/\x00\x1a\x00\xde\xff\xed\xff0\x00\x10\x00\xf5\xff\xf5\xff\x15\x00\x00\x00\xc5\xff\n\x00\xf6\xff\xe4\xff\xf7\xff\xeb\xff\xed\xff\xf3\xff\xf8\xff\xe4\xff\xec\xff\xf8\xff\xe5\xff\xda\xff\x05\x00\x00\x00\xf3\xff\xea\xff\xf7\xff\xfc\xff\xec\xff\x01\x00\xf6\xff\xe6\xff\xe9\xff\xfe\xff\x10\x00\xfa\xff\xef\xff\xf8\xff\x01\x00\xf0\xff\xe1\xff\xff\xff\xf9\xff\xe6\xff\xf3\xff\xf3\xff\xe2\xff\xe3\xff\xf6\xff\xf5\xff\xe4\xff\xea\xff\xf6\xff\xf0\xff\x00\x00\xf8\xff\xf7\xff\x02\x00\xff\xff\xf9\xff\xff\xff\x10\x00\x00\x00\xee\xff\xea\xff\x14\x00\x0e\x00\xe4\xff\r\x00\x13\x00\xd1\xff\xd9\xff\x15\x00\xf6\xff\xd3\xff\xf0\xff\x01\x00\xee\xff\xea\xff\xf6\xff\xf5\xff\xf1\xff\xde\xff\xe7\xff\n\x00\x00\x00\x02\x00\xf0\xff\xf0\xff\x05\x00\x12\x00\xfe\xff\xe4\xff\xfa\xff\x11\x00\xf8\xff\xe9\xff\xfc\xff\r\x00\xe9\xff\xe4\xff\xec\xff\xea\xff\xe0\xff\xf4\xff\x02\x00\xd1\xff\xcf\xff\x00\x00\xff\xff\xd1\xff\xdf\xff\xf6\xff\xe5\xff\xdd\xff\xf1\xff\x0b\x00\x06\x00\xe1\xff\xe3\xff\xff\xff\x18\x00\x08\x00\xf4\xff\r\x00\x1e\x00\n\x00\xfa\xff\x19\x00\x18\x00\x00\x00\xf9\xff\x0c\x00\x19\x00\x08\x00\x01\x00\x01\x00\xfc\xff\xf5\xff\xfd\xff\t\x00\xfa\xff\xeb\xff\xeb\xff\xf5\xff\xf3\xff\xf0\xff\xf0\xff\xe4\xff\xdc\xff\xf2\xff\xf3\xff\xee\xff\x06\x00\x00\x00\xe1\xff\xf5\xff\x12\x00\xf7\xff\xed\xff\x01\x00\x0b\x00\xef\xff\xf7\xff\x17\x00\x05\x00\xf8\xff\xf9\xff\x10\x00\x0c\x00\xfd\xff\xff\xff\x0f\x00\x0e\x00\xf8\xff\x06\x00\x0e\x00\xff\xff\x0f\x00\x15\x00\x0b\x00\r\x00\x15\x00\n\x00\t\x00\x16\x00\x16\x00\x0e\x00\x06\x00\x13\x00\x18\x00\x11\x00\x0f\x00\x0f\x00\x12\x00\x16\x00\x16\x00\x1a\x00\x1b\x00\r\x00\x1d\x00"\x00\x17\x00\x15\x00$\x00\x17\x00\x05\x00\x1b\x00\x19\x00\x13\x00\x17\x00\x12\x00\x10\x00\x10\x00\x1b\x00\x1d\x00\x02\x00\xfc\xff\x1c\x00\x18\x00\x04\x00\x0c\x00\x0e\x00\xfe\xff\n\x00&\x00\xff\xff\xf3\xff\x1a\x00\x0c\x00\xe5\xff\x07\x00\x16\x00\xe5\xff\xf5\xff\r\x00\xee\xff\xf1\xff\x10\x00\xf9\xff\xf2\xff\t\x00\x11\x00\xf5\xff\xfa\xff\x0b\x00\xf7\xff\xf8\xff\xfc\xff\x03\x00\x05\x00\x03\x00\xfc\xff\x00\x00\x13\x00\x01\x00\xf4\xff\xf7\xff\x03\x00\xfc\xff\xff\xff\xfc\xff\xff\xff\xf0\xff\xff\xff\x11\x00\xf4\xff\xee\xff\x05\x00\xfe\xff\xee\xff\x04\x00\x01\x00\xf0\xff\xf6\xff\x07\x00\xf8\xff\xed\xff\x00\x00\xf7\xff\xed\xff\xfe\xff\xff\xff\xf2\xff\xf9\xff\t\x00\x06\x00\xed\xff\xfd\xff\x06\x00\xf5\xff\xf8\xff\x00\x00\xf7\xff\xf8\xff\xf3\xff\xf3\xff\x00\x00\xf6\xff\xe9\xff\xfe\xff\xfb\xff\xe4\xff\xe9\xff\x00\x00\xf7\xff\xe8\xff\xf0\xff\x04\x00\xf8\xff\xe9\xff\xfd\xff\xf7\xff\xf3\xff\xf2\xff\x04\x00\xfa\xff\xf9\xff\xff\xff\xfb\xff\xf6\xff\x03\x00\x02\x00\xf1\xff\xf9\xff\xfc\xff\xed\xff\xe8\xff\xf4\xff\xe1\xff\xee\xff\xef\xff\xe3\xff\xe7\xff\xef\xff\xe1\xff\xe3\xff\xf5\xff\xea\xff\xe4\xff\xf1\xff\xf3\xff\xe8\xff\xe9\xff\xf2\xff\xee\xff\xf0\xff\xf9\xff\xee\xff\xec\xff\xfe\xff\xf0\xff\xee\xff\x01\x00\xfc\xff\xe8\xff\xf4\xff\x05\x00\xf0\xff\xe6\xff\x04\x00\x05\x00\xf6\xff\xf5\xff\n\x00\xfc\xff\xf1\xff\x04\x00\xfd\xff\xeb\xff\xf9\xff\x02\x00\xf3\xff\xf9\xff\x00\x00\xeb\xff\xed\xff\t\x00\xfc\xff\xe4\xff\xff\xff\x10\x00\xea\xff\xeb\xff\x11\x00\xfc\xff\xe2\xff\x02\x00\x12\x00\xf3\xff\xf7\xff\x0b\x00\x00\x00\xf7\xff\x07\x00\x01\x00\xfe\xff\x05\x00\x04\x00\x00\x00\xfe\xff\x04\x00\x07\x00\xfb\xff\x02\x00\x08\x00\x00\x00\x01\x00\x01\x00\x04\x00\xfd\xff\xfc\xff\x04\x00\xff\xff\xfb\xff\x04\x00\x04\x00\xfc\xff\xfc\xff\x07\x00\xf9\xff\xfc\xff\x11\x00\x01\x00\x00\x00\r\x00\x0c\x00\xfc\xff\xfa\xff\x07\x00\x06\x00\xfd\xff\x00\x00\x04\x00\x04\x00\xf7\xff\x02\x00\x05\x00\xf5\xff\xf7\xff\x07\x00\x04\x00\xf6\xff\x00\x00\x10\x00\x0c\x00\x00\x00\n\x00\x11\x00\x00\x00\xff\xff\x1a\x00\x0e\x00\x04\x00\x1a\x00\x0c\x00\x08\x00\x12\x00\x10\x00\t\x00\x0b\x00\x0e\x00\x0f\x00\n\x00\x0c\x00\x0f\x00\x0c\x00\x0c\x00\x13\x00\n\x00\t\x00\x17\x00\x12\x00\x0c\x00\x13\x00\x15\x00\x08\x00\r\x00\x17\x00\x05\x00\x04\x00\x14\x00\x07\x00\xff\xff\x10\x00\x11\x00\x02\x00\r\x00\x1b\x00\x01\x00\xfd\xff\x17\x00\x13\x00\xfe\xff\x07\x00\x11\x00\x04\x00\x01\x00\x0b\x00\x06\x00\xfa\xff\x04\x00\x05\x00\xfe\xff\xf8\xff\x05\x00\x0b\x00\x02\x00\xf8\xff\xfc\xff\x13\x00\x02\x00\xf7\xff\x02\x00\x03\x00\xf7\xff\xff\xff\x00\x00\xf6\xff\xf7\xff\x02\x00\xff\xff\xf6\xff\xfb\xff\xfe\xff\xf7\xff\xf4\xff\x00\x00\xfb\xff\xf2\xff\x00\x00\x03\x00\xfc\xff\xff\xff\x03\x00\xfc\xff\xf9\xff\x04\x00\x00\x00\xfc\xff\x01\x00\x06\x00\x05\x00\xfc\xff\x00\x00\x05\x00\xfd\xff\xff\xff\x08\x00\xfd\xff\xf9\xff\x00\x00\x03\x00\xf7\xff\xf0\xff\xfb\xff\xfd\xff\xf4\xff\xf0\xff\xf2\xff\xf4\xff\xf1\xff\xf0\xff\xf7\xff\xf2\xff\xf1\xff\xfb\xff\xfd\xff\xf4\xff\xf9\xff\x00\x00\x01\x00\xf7\xff\xfc\xff\x07\x00\xf7\xff\xf0\xff\x05\x00\x03\x00\xef\xff\xfc\xff\xff\xff\xef\xff\xf2\xff\xfa\xff\xf1\xff\xf0\xff\xf6\xff\xfb\xff\xf5\xff\xf2\xff\xf7\xff\xf6\xff\xf3\xff\xf6\xff\xf6\xff\xf7\xff\xf9\xff\xf5\xff\xed\xff\xf3\xff\xf3\xff\xf6\xff\xf4\xff\xf4\xff\xf5\xff\xf2\xff\xf3\xff\xf9\xff\xf7\xff\xef\xff\xf6\xff\xfa\xff\xee\xff\xef\xff\xfb\xff\xf9\xff\xf2\xff\xfa\xff\xfd\xff\xf5\xff\xf3\xff\xf7\xff\xf8\xff\xef\xff\xf8\xff\x00\x00\xf9\xff\xf2\xff\xf7\xff\xfd\xff\xf5\xff\xf8\xff\xfc\xff\xfa\xff\xf4\xff\xfd\xff\x01\x00\xf1\xff\xf5\xff\x01\x00\xfb\xff\xf2\xff\xfd\xff\xff\xff\xf3\xff\xee\xff\xfe\xff\xfd\xff\xf1\xff\xfd\xff\xff\xff\xf6\xff\xf8\xff\x00\x00\xf5\xff\xf4\xff\x00\x00\x04\x00\xf9\xff\xfb\xff\x07\x00\x00\x00\xf9\xff\x02\x00\n\x00\xfc\xff\xff\xff\x0b\x00\t\x00\xfc\xff\x07\x00\x0f\x00\xfc\xff\xf9\xff\x08\x00\t\x00\xfe\xff\xff\xff\x02\x00\x01\x00\xfd\xff\x07\x00\x02\x00\xfb\xff\x05\x00\n\x00\x01\x00\xfd\xff\x03\x00\x0b\x00\t\x00\x04\x00\n\x00\n\x00\x03\x00\x02\x00\x03\x00\x02\x00\x02\x00\x03\x00\x00\x00\xff\xff\x00\x00\x00\x00\xfd\xff\xfb\xff\xff\xff\xff\xff\xfc\xff\xfa\xff\xff\xff\x06\x00\x03\x00\x00\x00\x00\x00\xfc\xff\xfa\xff\xfb\xff\xfd\xff\xf5\xff\xf8\xff\x01\x00\xff\xff\xff\xff\x00\x00\x03\x00\x02\x00\x00\x00\x02\x00\x06\x00\x04\x00\x06\x00\n\x00\x0b\x00\x0b\x00\r\x00\x10\x00\x0c\x00\x13\x00\x17\x00\x14\x00\x0e\x00\x17\x00\x13\x00\n\x00\x11\x00\x0e\x00\x06\x00\x06\x00\x11\x00\x0c\x00\x0b\x00\x13\x00\x12\x00\x08\x00\n\x00\x12\x00\x0c\x00\x06\x00\x05\x00\t\x00\x07\x00\n\x00\x06\x00\x00\x00\t\x00\x05\x00\x00\x00\x00\x00\xfe\xff\xfa\xff\xfe\xff\x02\x00\x00\x00\x00\x00\x02\x00\x03\x00\x01\x00\xff\xff\xfd\xff\xfb\xff\xf8\xff\xfd\xff\xfb\xff\xf9\xff\xfc\xff\xfc\xff\xf4\xff\xf1\xff\xfb\xff\xfd\xff\xf7\xff\xfc\xff\x00\x00\xf7\xff\xfb\xff\xff\xff\xfb\xff\xf9\xff\xff\xff\xfe\xff\xfb\xff\xfd\xff\xfe\xff\xf9\xff\xf9\xff\xfc\xff\xff\xff\xfc\xff\xfa\xff\xfc\xff\xfe\xff\xfd\xff\xfa\xff\xfa\xff\xfb\xff\xfb\xff\xfb\xff\xfd\xff\xfd\xff\xf9\xff\xf8\xff\xfb\xff\xfa\xff\xfc\xff\xff\xff\x01\x00\xfc\xff\xf7\xff\x05\x00\x01\x00\xfd\xff\x03\x00\x03\x00\xfd\xff\xfd\xff\x00\x00\xfb\xff\xf8\xff\xfd\xff\xfe\xff\xfc\xff\xfd\xff\xfb\xff\xfa\xff\xf5\xff\xf2\xff\xf5\xff\xf9\xff\xf4\xff\xf5\xff\xf6\xff\xf3\xff\xfa\xff\xf3\xff\xf5\xff\xf7\xff\xf3\xff\xf2\xff\xf5\xff\xf2\xff\xef\xff\xf1\xff\xf4\xff\xf5\xff\xf1\xff\xf6\xff\xf6\xff\xf0\xff\xf4\xff\xf7\xff\xf5\xff\xf3\xff\xfa\xff\xfb\xff\xf4\xff\xf7\xff\xf8\xff\xf8\xff\xf7\xff\xf8\xff\xfd\xff\xf7\xff\xf5\xff\xf5\xff\xf5\xff\xf6\xff\xf2\xff\xf3\xff\xf5\xff\xf7\xff\xf9\xff\xf6\xff\xf6\xff\xfa\xff\xfd\xff\xf9\xff\xfa\xff\xfe\xff\xfe\xff\xfc\xff\xff\xff\xfe\xff\xfa\xff\xfc\xff\xfb\xff\xfb\xff\xf9\xff\xfc\xff\xfa\xff\xf6\xff\xfc\xff\xfb\xff\xfd\xff\x00\x00\xfe\xff\xfa\xff\xf9\xff\xfc\xff\xfd\xff\xfe\xff\xfa\xff\xfb\xff\xfe\xff\xfc\xff\xf9\xff\xfb\xff\xfa\xff\xfe\xff\xfe\xff\x00\x00\xff\xff\xfe\xff\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x03\x00\x01\x00\x02\x00\x04\x00\x04\x00\x04\x00\x02\x00\x01\x00\x00\x00\x01\x00\x03\x00\x00\x00\xff\xff\xff\xff\xff\xff\x01\x00\x02\x00\x02\x00\x02\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00\x01\x00\x04\x00\x04\x00\x03\x00\x06\x00\x05\x00\x06\x00\x04\x00\x07\x00\x07\x00\x03\x00\x04\x00\x06\x00\x06\x00\x04\x00\x06\x00\x03\x00\x00\x00\x01\x00\x04\x00\x04\x00\x04\x00\x08\x00\x07\x00\x06\x00\x07\x00\x04\x00\x03\x00\x06\x00\x06\x00\x04\x00\x04\x00\x06\x00\x08\x00\t\x00\x0b\x00\n\x00\x0c\x00\r\x00\x0c\x00\x0b\x00\x0b\x00\x0c\x00\x07\x00\x05\x00\x07\x00\x06\x00\x03\x00\x04\x00\x04\x00\x02\x00\x03\x00\x03\x00\x06\x00\x03\x00\x00\x00\x01\x00\x07\x00\x07\x00\x05\x00\x06\x00\x06\x00\x06\x00\x06\x00\x05\x00\x04\x00\x01\x00\x03\x00\x04\x00\x02\x00\x00\x00\x00\x00\x02\x00\x01\x00\xfe\xff\xff\xff\x00\x00\x00\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfd\xff\xfc\xff\xfb\xff\xfe\xff\xff\xff\xfd\xff\xfc\xff\xfd\xff\xfd\xff\xff\xff\xfe\xff\x01\x00\x00\x00\xff\xff\xfd\xff\xfa\xff\xfb\xff\xfb\xff\xfe\xff\xfd\xff\xfc\xff\xfd\xff\xfb\xff\xfa\xff\xfb\xff\xfa\xff\xf9\xff\xfa\xff\xfa\xff\xfc\xff\xff\xff\x00\x00\xff\xff\xfc\xff\xfd\xff\xfd\xff\xfa\xff\xf5\xff\xf3\xff\xf8\xff\xf7\xff\xf6\xff\xf6\xff\xf5\xff\xf4\xff\xf6\xff\xf6\xff\xf4\xff\xf4\xff\xf7\xff\xf8\xff\xf7\xff\xfa\xff\xf6\xff\xf3\xff\xf3\xff\xf5\xff\xf6\xff\xf7\xff\xf8\xff\xf7\xff\xf9\xff\xf8\xff\xf5\xff\xf6\xff\xf9\xff\xf9\xff\xf9\xff\xfa\xff\xf8\xff\xf9\xff\xfb\xff\xff\xff\xfe\xff\xfc\xff\xfd\xff\xfd\xff\xfd\xff\xfd\xff\xfb\xff\xf8\xff\xfa\xff\xfb\xff\xfa\xff\xf6\xff\xf4\xff\xf3\xff\xf6\xff\xf6\xff\xf8\xff\xf8\xff\xf5\xff\xf7\xff\xfb\xff\xf9\xff\xf8\xff\xfa\xff\xfd\xff\xfe\xff\xf9\xff\xfc\xff\xfa\xff\xf9\xff\xf8\xff\xf5\xff\xf4\xff\xf4\xff\xf1\xff\xf2\xff\xf4\xff\xf4\xff\xf5\xff\xf3\xff\xf5\xff\xf3\xff\xf6\xff\xf8\xff\xf8\xff\xfa\xff\xf9\xff\xfb\xff\xfc\xff\xfe\xff\xfc\xff\xfb\xff\x00\x00\x00\x00\xfb\xff\xfd\xff\xfe\xff\xf9\xff\xfe\xff\xfc\xff\xfa\xff\xfe\xff\xfd\xff\xff\xff\x00\x00\x00\x00\xfe\xff\xff\xff\x01\x00\x02\x00\x00\x00\x01\x00\x02\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x00\x01\x00\x03\x00\x05\x00\x05\x00\x04\x00\x02\x00\x03\x00\x02\x00\x03\x00\x02\x00\x01\x00\x03\x00\x03\x00\x02\x00\x03\x00\x03\x00\x03\x00\x02\x00\x03\x00\x03\x00\x02\x00\x04\x00\x07\x00\x03\x00\x04\x00\x06\x00\x06\x00\x07\x00\x03\x00\x03\x00\x02\x00\x04\x00\x07\x00\x05\x00\x02\x00\x03\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfc\xff\x00\x00\xff\xff\x00\x00\x01\x00\x02\x00\x02\x00\x02\x00\x05\x00\x03\x00\x05\x00\x04\x00\x06\x00\x08\x00\x08\x00\t\x00\x08\x00\x06\x00\x05\x00\x05\x00\x03\x00\x05\x00\x07\x00\x08\x00\x06\x00\t\x00\x07\x00\x05\x00\x06\x00\x04\x00\x04\x00\x04\x00\x02\x00\x02\x00\x00\x00\xff\xff\x01\x00\x01\x00\x04\x00\x05\x00\x04\x00\x03\x00\x05\x00\x05\x00\x05\x00\x06\x00\x06\x00\x06\x00\x05\x00\x04\x00\x04\x00\x07\x00\x05\x00\x04\x00\x05\x00\x04\x00\x02\x00\x03\x00\x05\x00\x05\x00\x03\x00\x03\x00\x02\x00\x00\x00\xfe\xff\xf8\xff\xfa\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\x00\x00\x00\x00\xfe\xff\x00\x00\x00\x00\xfe\xff\xff\xff\x00\x00\x02\x00\x03\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x03\x00\x05\x00\x05\x00\x04\x00\x02\x00\x01\x00\x00\x00\xfb\xff\xfd\xff\xfd\xff\xfa\xff\xfd\xff\xff\xff\xfd\xff\xf9\xff\xfc\xff\xfd\xff\xfb\xff\xfb\xff\xf9\xff\xf9\xff\xfb\xff\xfa\xff\xf8\xff\xf8\xff\xf9\xff\xf9\xff\xfa\xff\xf7\xff\xf8\xff\xf9\xff\xf7\xff\xf8\xff\xf7\xff\xf7\xff\xf8\xff\xfa\xff\xfa\xff\xf8\xff\xf6\xff\xf6\xff\xf5\xff\xf8\xff\xf7\xff\xf4\xff\xf7\xff\xf5\xff\xf8\xff\xfb\xff\xf9\xff\xfb\xff\xfc\xff\xfa\xff\xf7\xff\xf9\xff\xfa\xff\xfb\xff\xfc\xff\xfc\xff\xf9\xff\xf7\xff\xf6\xff\xf6\xff\xf3\xff\xf0\xff\xf1\xff\xf1\xff\xf1\xff\xf3\xff\xef\xff\xee\xff\xf1\xff\xf3\xff\xf3\xff\xf1\xff\xf2\xff\xf3\xff\xf3\xff\xf4\xff\xf6\xff\xf4\xff\xf7\xff\xf5\xff\xf6\xff\xf6\xff\xf7\xff\xf7\xff\xf8\xff\xf6\xff\xf6\xff\xf6\xff\xf5\xff\xf3\xff\xf8\xff\xfc\xff\xfc\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xc9\x00\xb5\x00\x9d\x00/\x00\x16\x01F\x01\xcb\x00#\x01\xc3\x00p\x00\xe7\x00\x9f\x01\x87\x01\t\x01\xd3\x00\xa0\x01`\x00\xc2\xfe9\x00\xae\x01\x9f\x00\xc5\xff\xfe\xff\x85\xff\xea\xfe\xf3\x00;\x00o\xfdQ\xfd\xd3\x00U\x01\xdb\xfeh\x00:\x02\x9d\xffb\xfe\t\x00<\x00\xbb\xffk\x02\r\x01\xd9\xfe5\x00\x7f\x01\x9f\x00\xa5\xfe`\xfe\xcc\xfd\x91\xfe\x9e\xff6\x00\x83\xfed\xfe\x13\xfe\x9c\xfd\xa4\xfe}\xff\xf6\xfe\xb4\xfd\xe9\xff\xff\x00\xd6\xff\xc9\x00L\x01#\x00\xf0\xff\xa4\x00\x16\x02\xfd\x01N\x02\xba\x02\xf1\x01\x9c\x01\xa7\x01,\x02\x9c\x01\x86\x01w\x01\x9f\x01\xca\x01\\\x01\xe4\x00\x9f\x00\xcc\xffh\xff\xa8\xff\xc0\xff\xc3\xffE\xff\xae\xfeE\xfe$\xfe \xfeT\xfe|\xfe!\xfe\x99\xfe\x83\xfe\x16\xff\xbe\xfe\xaf\xfd+\xff\xa9\xff\r\xff\x81\xff\xde\xff\xf9\xfe4\xff\x06\x00a\xffF\xff\x85\xff@\x00J\x00u\x00\xae\xff\xb0\xfe\x9e\xff\x9d\xffP\xff\x82\x00\x1c\x00\xbc\x00 \x01P\xff\x88\xfe\xeb\xfd\xc4\xff\xe3\xffB\x00\xd7\x01\x92\xffr\xfe\x08\x00$\xff&\xfe"\xffe\xffT\xfe\xc5\x00\xbd\x02\x13\x00\xb1\xff7\xff\xcf\xfe\x17\xff\xd9\xff~\x020\x026\xffS\x01\xd0\x01\xaf\xffE\xff\xa9\x00\x8d\x02\xaf\x00#\x00\xe3\xffa\x02n\x02\xdc\x01\xe4\x01\xbf\xff\xc1\xfe\xcc\xfew\xffM\x00#\x02\x82\x02.\xff}\xfc\x13\xff\x03\x01\xbc\xfe\x0b\xfcq\xfb\xc3\xff\xa8\x02\xb2\xfd\xa9\x00\xc1\x00\xd8\xfb\x0e\xfc\xad\xff\x0e\xff\x9b\xfb\xcb\x00\xcc\x01\x82\x00u\x00\xc2\x00F\xff9\xfb\xa2\xfd\xe0\x01\xca\x01\xb8\x00\x8f\x02\xb7\x04\xa7\xff+\xfe\xe0\x01\xfc\x00\xbf\xfc\x80\x00R\x04G\x03|\x01\xa6\x01u\x02\x0f\xfe\xc6\xfe\x1f\x01\x08\xfe\xe4\x00>\x04\x18\x03\x93\xff\xc2\xfeA\x01R\xfd\xdd\xfd=\x003\xff!\x00k\x026\x00\xc1\xff\xe3\x01\x1e\xfco\xfc\xe2\x00"\x01u\x02s\x03*\x01\x17\x00u\xfc\xe9\xfb\xbe\x02\xbb\x02\\\xfe\xfd\x00l\x029\xfe\xd9\xfcl\x00x\xffs\xfe\xb8\x02\xa2\xff\x93\xfd\xb9\x04\xaa\x03\x9c\xfcc\xfe_\xfe2\xfe\x93\x02\xc2\x01\xdf\x01\x0f\xfek\xfb\xf5\xfe\x13\x01\xc3\x00\xf9\xfb\xff\xfe\x8e\x00\x06\xfe\x12\x03\x8a\x02s\x00\xcb\x01\xc8\x00a\xfe>\xff\x86\x02=\x08\xee\x04r\xfd\x9f\xfb\xc0\xfb\xe4\xfd\xde\x01\x16\x05\xa9\x00\x92\xfc\xc4\xfa;\xfc\xda\x01\t\x01`\x01\x16\xfd<\xfd\xbc\x024\x01\xf5\xfe\x00\x02\xad\xfe\x84\xf8C\xfd\x80\x03;\x03%\x02\xbd\x03\x06\xfch\xf9\x00\x03U\x05\xe5\x00\x1a\x02P\x03E\x01C\x02\x1b\xff\x8f\xff\x8b\xfd\x08\xfd\xe1\xff/\xfcg\xffA\x03\xf2\xff\x9a\xfa\xc2\xfa\xee\xfa\x1f\xfbR\x02\xb3\x06D\xfe\x11\xfc\xec\x00\x03\x01H\x04R\x01i\xfd\x8c\xfdl\x03\xdd\x03/\x04!\xff\x9a\xfe\x83\x03\xd3\x01\xce\x04@\x01\x15\xfa\xae\xfb\xef\x01\x12\x03\xc2\x02\x9f\x02\xb0\x02\xe7\xfbg\xfb\x15\xff\x1e\xffu\xff`\x005\x01_\xfec\x01\x92\x03X\x04\xc9\xfd\x8b\xfdN\xff\x04\xfc\x05\x02\xfe\x04\xf6\xff\x7f\x00B\xfe\x05\xfe\xa1\xff\x0f\xfa\xb3\xfd&\xff6\xfc\x02\x02Y\x04J\x02\x9c\xfe\x1c\xfdf\xff~\x01\xbf\x00\xff\xff\x98\x00\xc6\x03d\x06\xce\x00\x89\xff\x1d\xfe$\xfc\xa0\x01\n\x07\xc2\x01\xf5\xf8\xf7\xfd\x03\xfe{\xfd\xaa\x02\xc2\x01\x84\xf8\xaa\xf4\xca\xfb\xba\xfc\xac\xfe\xca\x02\x00\xff\x9b\xfb\x9e\xfd\xcc\x00\x92\x01\xaa\xfd\x14\x00\xf8\x04\xb1\x03\xe7\x02_\x05\xe2\x007\xfb\x10\x04l\x03?\xfd\xb1\xfe\xd6\x03C\x05\xb0\x02\xc3\xfc\xeb\xfaf\xfe(\xfeU\x02\x86\x03\x86\xfd\xc6\xf7V\xfcV\xfe`\x03\x11\n\x1b\xfc\xdd\xf28\xf9\xe2\x01$\x02\xe4\x05\xc5\x01$\xfaA\xfa4\x06\xba\x06)\xfe\xf4\xfb8\xfd\x1b\x04\xda\x04^\x05\xd3\x03I\x03t\x03`\x03M\x04Y\r\x00\x03\xa8\xf4\xdd\xfft\x00:\xfd\xdf\x00\xa1\x01`\x05f\xfc@\xf3{\xf0\xc2\xf3\x9f\x00\x99\x03\x15\x05\xfd\xfb{\xf4\xfe\xf9\xbf\x03\xc7\n-\x07\xe8\x01#\xf8\xe9\xff\x88\x0e\xfa\x053\x03\n\x03\x93\x05\x7f\x03\x8b\xfc\x03\x02\x89\x01m\xfdF\x00-\xfd\xed\xfbX\xfd\xe2\xff\xad\x05f\x008\xf2\xa5\xed\x15\xfa#\x08\'\tm\xfd\xd4\xf9\xbd\xfe1\xff\x7f\x046\x05\xf3\xf9\x15\xf7]\x01\xe0\x0bT\n\xc4\x03\x9d\x011\xfe\x89\xfa\x96\x02u\n\xea\x01\xe1\xfac\xfa"\x05\x90\x04\xa9\x00K\xfe\x98\xf4\xa6\xf4\xb2\xfa\xde\x01f\x02\xed\xff)\xff\x06\xfe4\xff:\xff(\xff^\xfe@\x01x\x06\t\xffB\x01R\x04\xb8\x03\xde\x03J\xfc\xd4\xfb\xf2\x00\x05\x06\xc8\x02d\x06\x10\x03\x9d\xfa,\x00\x89\x06M\x00\xbd\xf6\x1f\xfd\xdc\x02G\x02\xc5\xfbI\xf6\\\xfd*\x03\x95\xfe^\xfa\x0b\xf9)\xfcl\x07\x82\x08v\x03\x87\xfeq\xff\x88\xff\xf3\xfdh\t\x87\x0bz\x00d\xfd\xe4\xff\x16\x02\xfe\x03M\x03\x7f\x01\xff\xfa\x85\xf4\xdf\xfc\x93\x05\xd8\x00"\xfb\x10\xfb\xa4\xfd\xda\xfd\xd4\xfeF\xfa\x00\xf8M\xff=\x04\xda\x05\x8c\x01\xe8\xfd\xed\xfb\xf9\x01^\x05{\x04S\x00\xfc\xfb\x15\x02\x9e\x03\xde\x01g\x03A\xfe\x9c\xfb\xb4\x00,\x00\xa6\xffp\xfe\xc7\xfb\xe7\xfd\xd3\xfe\xdc\x00\xba\x00[\xfd\xde\xfeG\xffF\xfc\xe8\x00\xb8\x02\xa0\x01L\x02n\xff\xf9\xfd\xd4\x00#\x02\xdf\x05\x80\x01\x8e\xfd\x17\x00Q\xff\xc1\x03\xe7\x03\xa5\xfe\x04\xfdo\xfec\xfc\x1a\x00R\x00\xab\xfe\x9b\xfc\xb1\xf8\xe5\xfdh\x01K\x02\x92\xfe\xb5\xfa\xff\xfce\xfe\x8c\x02n\x0by\x04\xd7\xfc0\x01\xa3\xff,\x003\t\xa6\x05\x14\xffU\x00J\xfd\n\x00y\x04\x04\xfe\xc5\xfb,\xfa+\xfe\\\x03\xe8\x00\xf1\xfbh\xfa5\xfdb\xfdU\x00d\x01\x1d\x00\x91\xfa\x8b\xfcG\x02\x16\x02\xf1\xffT\xfc*\x02\xec\x03m\x00\xb9\xfe\xe8\xfd"\x01\xc5\x08\xdc\x05\xdf\xf8\x11\xfb\xfb\x03s\x05\xcb\x04\xe0\xfdy\xf8\xd3\xfa5\x07\x86\x08m\x00H\xfc\xa6\xf9\xbb\xffB\x06\x00\x04d\xfe=\xfe\xc5\xfb\xb0\xfe&\x06\x81\x04\x94\xfe\xbb\xfb"\xfc\xdc\xff\xdf\x00\xba\xff\x82\x00\x0b\xfd\xdf\xfc\xc2\xfe\x91\x00\x9e\x01\x99\xff\x89\xfe\x90\xfcL\xffh\x03\xad\x03\x9f\x02b\xfd\xc4\xfdi\x02\\\x02\xd3\x00O\x01\xce\xfe\xb3\xfc\xd5\x01c\x02a\xffh\xfe\x86\xfc\xa5\xfd+\xffC\x05e\x02\x85\xfa,\xfbE\xff\xb2\x01%\x04E\x03\xb1\xfd\xe8\xfb\xf2\xff\xf5\x04*\x03\xb0\xfe\x94\xf9\xab\xfe\x86\x05\x9d\x01\xec\xff\xea\xfc\xe4\xfdN\x03r\x02\x8b\x00\x0b\xfe8\x00\x98\x02\x92\x01\xf3\xffX\xfdn\xfff\x01\x90\x01)\xfc\xd9\xfc\x91\x02\x0e\xff\xdc\xfc4\xff@\x00\x06\xff~\xfe\xdd\xff\xf3\xff1\x03$\x02\xcc\xfd\xe5\xfcJ\x03\xcd\x04\xac\xff\xcc\xfd\xde\xffh\x02D\x00\xb2\x03\xdb\x029\xfc\x18\xfbV\x02i\x05\xe8\xfdQ\xfc\xa7\xfeM\xff\xe4\xff\x91\xff\x18\xffr\xfer\xfe\xc6\x00_\x00\xc9\xff&\x01\x1c\x01>\x00t\xff\xc2\xfeP\x02s\x02@\x01\xf0\xff\x01\xfe\xa6\x01\x04\x03\x07\x01\xf2\xfc\x8a\xfcG\x00\x1c\x04\x1f\x02\xda\xfdg\xfct\xfeF\x04\xde\x03\\\xfbo\xf9\xab\xfe\x1b\x01\xfa\x03\xf1\x01\xd1\xfdq\xfb\xe8\xfc\xd1\x01\r\x03\xa1\xffz\xff\x1b\x00H\xff\x8c\x03\x83\x05\x87\x01\n\x00\xe2\xfe\xea\xfcB\x03O\x05R\x01;\xfe\x9f\xfc\x88\xfeC\x01f\xfe\xbe\xfd\x8f\xfd\xe1\xfc \xffO\x00\x93\xff\xf3\xfd2\xfe\xd2\xfe\xc6\x01\x99\xff\x81\xfe!\x00\xf3\x02\x12\x02\x01\x017\x01\x19\xfeV\x01\x9c\x02R\x02\x06\x01\xaf\xfd\xa4\xfe\xe7\x00f\x02\xf2\xfe\x11\xfd\x1c\xfck\xfd\xcb\x04\xac\xfe^\xfa\xa7\xfdk\xfc\x85\x02B\x05\x83\xfe\x98\xfbw\xfd\x03\x01w\x05\xbb\x05Z\x00\xef\xfc\x9e\xfdM\x01\x17\x07k\x03\x12\xfc\x9a\xf96\x00\x15\x07\xff\x05\x86\xfe\xa7\xf7%\xfa|\xff+\x05t\x05u\xfd\x9e\xf7\xb4\xf9f\x03\xe0\x07D\xfe<\xf8\x1a\xf9\x16\x01\x99\x04k\x02\xf8\xfd\x80\xf8\xd5\x00\xb2\x03n\x01\x1a\x00\xe7\xff\xf4\xff\x87\x03\x11\x03\xcc\xff\x86\x02\x1e\x031\x03\xdd\x00&\x00Y\x01\xb2\x02y\x01$\xfc.\xfe\'\x01Y\x00_\xfe\x16\xfd\x9f\xfe\x04\xfd%\xfb}\xfb\xd8\xff\x99\xff5\xfb"\xfd\x96\xfd\xd7\xfd\xd6\xff\xc8\x03\xdb\xfd\xc6\xfa\xb1\x03\xdb\x03\x01\x08\x17\x05\x06\xfc\x98\x02\xf1\x03\xd4\x05a\x08<\x027\xffx\xffS\x02A\x03\x80\x00 \xfa\xf9\xf8\x05\xfc\xeb\x00\x81\xff\xe6\xfa%\xf9\xbd\xf8\xe3\xfb\xd5\xfe\x0f\x00\x82\xfc\xbf\xfeH\x01s\x03\x0f\x05\x1b\x03\xd1\x00\x10\x03q\x02\xe4\x03\xba\x08\x9c\x02\xa8\xff\xbb\xff?\x01\xa6\x05\x05\x05;\xfa\xb7\xf9\xd3\x00\x94\x00\x9f\xff\x97\x00\xe8\xfbT\xf9\x87\xff\x89\x00\x87\xff\x1e\xfce\xfc\x86\xfeT\xfe\x9b\x01\x15\x02o\xff\x11\xfc3\xfc*\x03G\x06P\x04#\x01\x93\xff\xb0\xff\t\x04s\tt\x06D\xfb\xcf\xfb\xf5\x05\x89\x04%\x02q\xfft\xfa\xa2\xfa\xd5\x00\x9d\xffa\xfdu\xfa\xc6\xfa6\xff0\xfe\xb8\x00\xdf\xfd\x1f\xf99\xfd\xf6\x06\x13\x06\xfa\xfc\x91\xfcm\xfe\xb7\x01\x1b\x08G\x03\xcb\xfa\x1d\xfe\xcb\x02\xef\x05\xa7\x03\xef\xfde\xfde\xfe\xda\x00\xd3\x02s\x02\xf5\x00\xba\xfc\x13\xfe\xc5\x00\xe5\xff\xf1\xffd\xfe\\\xfc\xab\xfe-\x00#\x01J\x01\xba\xfc\xcc\xfc>\xff\x1c\x01H\x00\xba\xfe\x87\xfe\xa8\x01x\x01\x88\x00\xfd\x04\xac\x01\x1b\xfd/\x00\x8b\x02t\x02\xdf\x01\xbc\x00\xc6\x01\x15\x02Y\x01\xcb\xff\\\x00\x05\xff\x91\xfc\xeb\xfck\x02\xdf\x03\x85\x01\xe5\xfd\xf3\xfb\x96\xfd\xe4\xff(\x03\x02\xff\x7f\xfc\xdb\xff\xcf\x03\xe6\x01\xef\xff\xe8\xfd\xa0\xfb\x1d\xfe\xb1\xfd&\x03\x8e\x02\xf8\xfd\x95\xfb\xed\xfb\x90\x02x\x03\xfe\xfc\x87\xf7v\xfc\xe8\x02\xae\x05`\x00\xa8\xfaC\xfd\xc7\xfe\xd6\xff\x12\xfd\xca\xfd\xcd\xfe\x7f\xff\xc5\x00\x01\x00\xe3\x00\xb2\xfe\x9b\xfbA\xfb\xfc\x02\x19\x04\xe1\x01\xe5\x01`\xffK\x03*\x04\x92\xff\x1d\xfd\xce\xff\xa8\x01\x18\x02t\x02/\xfft\xff\xe4\x009\xfe\\\xffY\xfeH\xfe\xc7\x00\xe7\x00L\x00\xd9\xfe\xc6\x00\xe5\xfe{\x00\xd3\xff\xa8\xfd5\x01\xdc\x00\xe8\x00\x98\x02\x93\xff\xea\xfc\xd3\x02\xc2\x02\xc6\x00\xea\xff\xdb\xff\x13\x01\x00\x01\x15\x03y\x01T\xffc\x00e\x04\xbb\x03\xd7\x00\x88\xffB\xfe\xfb\x00\x98\x02\x18\x04\x10\x02z\xfe\xb3\x00\xa9\x01W\xffe\xfe\xf4\xff\x9c\xfe\xa2\xfe\xb9\x01\xa4\x00\x1b\x01\x1d\x00\x02\xfd\x92\xfb\xb8\xfb\xb5\xff\x19\x01\xc2\xffh\x00\x92\xff\x89\xff\xe1\xfe.\xfd\x93\xfbj\xfbu\xfe\xc4\x00b\x02\x80\x02.\xff\x82\xfcd\xfbP\xfb\xe4\xfc\xf8\xfd\xe3\x01x\x01\xfb\xfe\x1f\x00K\xff\xa9\xfc\x81\xfai\xfa\x10\xfc\x0c\x01!\x02\xb2\x01\xd0\xfe \xfb]\xfb\x8f\xfc\xf4\xfdG\xfc8\xfa\xe3\xfb\x0e\x01\\\x02\xbe\xfe\xaa\xfa{\xf6\xbc\xf8{\xfc\r\xffB\xfe.\xfb^\xfbx\xfc"\xfe\x08\xfd\x0f\xfbz\xf9\x1a\xfbz\xff\x1f\x01\xa2\x027\x016\xfd\xda\xfd\xe2\x02P\x04\x00\x02F\x04\x9d\x06\xca\x05\x08\t&\x08\xe3\x06\x1e\x06\x14\x05t\t\xfd\x0b\x82\t\x12\t\xd6\x07c\x06e\nz\x0by\x08G\x07\xfa\n\xae\x0c\xa4\x0cM\x0e\xa9\x10\xd1\x11"\x11&\x12\xdc\x14L\x17\x95\x15\x8d\x15\x85\x14\x05\x15\n\x17\r\x14"\x10\x12\r\x16\n.\x07\xf7\x04\x95\x02\x1c\xfe\xe7\xf9\xfe\xf6\n\xf4\x82\xf0\xf0\xec\xed\xe8B\xe7Y\xe7v\xe6\x07\xe5 \xe5\xa1\xe4\xb2\xe3\xa1\xe3\x08\xe5\xa6\xe6\xbb\xe8y\xea3\xecq\xef\\\xf0\xbb\xef.\xf1\xf1\xf2r\xf5\xbf\xf6U\xf6\xa5\xf8\xe7\xf9\xc9\xf7*\xf6l\xf7\xe3\xf6\x92\xf4_\xf4\xd6\xf4b\xf4\xe8\xf2}\xf2\xa3\xefQ\xee\x96\xee\xb1\xf2\xe5\xf3<\xf0\xff\xee\xa5\xf0\x11\xf7\x1c\xfd\xe5\xfb\x1f\xf9\xb3\xfc\xc1\x01\xcf\x05C\x08\xda\x08o\t\x18\n\xa8\x0e+\x12\x8d\x12;\x11\xe5\x0e\xe3\x10\x8a\x14\xd4\x16\x93\x15\xb6\x11\x88\x0f\xaa\x12B\x1e\xdc&\xac$C\x1dF\x1c\xc5!\x00)z/\xa21\x920H.n0\x894\x891\x16&\xe6\x1c\xcf\x1d~"\xf7!X\x19\xb4\x0b\x99\xfeK\xf6q\xf3\xee\xf2\x81\xed\n\xe3\xe5\xdae\xd9?\xd8\xb4\xd2O\xcd\x0c\xca\x1c\xcaY\xceA\xd4\xf8\xd7\xa4\xd7\t\xd7\x8a\xda\xd7\xe1;\xea\x1e\xf15\xf3\xf0\xf6U\xff\xa5\x04\xff\x06\x93\tr\n\x02\r=\x10\xbc\x13\x80\x15_\x11\x88\n\xa2\x07\x8c\x08\n\x08/\x03y\xfc\x08\xf8\xf6\xf4\x8d\xf1\xb3\xee\xc7\xea\x11\xe6%\xe2\xe5\xe1\xe8\xe4#\xe5\x05\xe2\x90\xe0\xf4\xe3r\xe9\x94\xeb\xbf\xec\xc1\xefC\xf2/\xf5\x93\xfa4\xff!\x01Z\xff\xd3\xff\xf4\x04w\x08\x1b\t8\x07g\x05\x97\x06\x14\x07\x81\x07\xa6\x07\xfa\x041\x03\x9d\x01\x84\x01\x10\x03\xfe\x00\x10\x01\x90\xffk\xff\t\xff\xee\xffv\x05\xf1\x03\x00\x02<\x01,\x022\t\x9e\x12\xc2\x1dz\x1f\x89\x18\x07\x19\xd4#O-\xd91\xf84\xf57f:2:H:\xfa7O0\x18*\x97*\xca-\xa1,\xf7 \x16\x11\t\x07\xe5\x00s\xfeg\xfc\xdc\xf5\xd1\xecP\xe43\xdd\xe6\xda#\xd9;\xd4\xce\xd0\xb6\xd1\xd6\xd5\xce\xd6\xcb\xd4"\xd4n\xd5\\\xda\xd8\xe1\xde\xe8\x90\xecE\xef\'\xf1\x19\xf3y\xfa\xae\x00%\x03\'\x07\x11\x0b\x03\r5\x0b=\n&\x0b\x1c\x0c\xd5\x0bv\x0c\x1b\x0b\x97\x05\xa7\xfe\x80\xfa\x9d\xf9R\xf8K\xf5\x13\xf29\xef\xfd\xea\xae\xe6\xf6\xe4\x01\xe6o\xe6;\xe6\xe6\xe6\xd6\xe8`\xe93\xe8\xb3\xe8T\xec\xf2\xf0m\xf3V\xf5\xc6\xf6\xb3\xf7(\xf8\xbb\xf9N\xfb\xe9\xfd\xfe\xffJ\x03\xbf\x04T\x04\xd3\x00\xf8\xfe\x97\x03d\x06\xd0\x08l\t\xbb\x07,\x07\xd8\x02\xfe\x02*\x08\xfa\t\xd5\x08J\x081\x0c\t\rh\x0b=\x0c\xfd\x13\x04\x1f\xda"\xd0"\xa9!\n$\xf6)\xa71 8\x0f<\x8c:b4\xfa1>2C1\xb9-X\'S$* \x1b\x17\xce\r\x12\x03\xaa\xf9\x8d\xf4\xaa\xf1s\xef\x12\xe8\xcb\xdc\x9e\xd3\xf2\xcf\x8a\xd1\xcb\xd2\xc0\xd3\xe1\xd15\xcfr\xcf\xc0\xd1\x81\xd6~\xdb\xe2\xe1\xda\xe6\xc1\xe9t\xee\x93\xf3\xf4\xf4\xc5\xf8\x91\x00i\x07\xe5\x0br\x0c5\x0bj\n?\tf\x0c8\x11|\x11g\r\x03\x07^\x02\xc8\x00\x91\xfee\xfd\xdb\xfb\xfe\xf6#\xf2\'\xef\x86\xec\xb9\xe9\xaa\xe6\xfd\xe6\xa5\xea\x0c\xeb\xa8\xe8\xc1\xe5s\xe6c\xe9L\xed\x02\xf1\xc4\xf1\x13\xf2<\xf2\xe8\xf3\xf5\xf6K\xf9\xad\xf9Q\xfbY\xfd\x9f\xfd\xef\xfdG\xff\xbb\x00\n\x01X\x01.\x03\xcc\x03T\x04q\x04\xd7\x03\x06\x04\x93\x03\\\x07)\x08\x9f\x03\x9e\x01\xa1\x02\xad\x06.\x0b\xe2\rM\x0fe\x0b\xb3\t\xbc\x12\xc5\x1d\xb8$\'$|#\x97\'D,\xc90\xaf5\xa07\x9a6\x884\xb13\xe33A/\xc6)i&\x15#\x91\x1e\x90\x16\x9f\x0cy\x04\xb9\xfe\x8c\xfa\xb2\xf6\xbf\xef\xa9\xe5\x85\xde\xfe\xda\xea\xd9\xc5\xd8y\xd5w\xd2F\xcf)\xcf\xce\xd2U\xd6\x1a\xd8\x8d\xd9\x16\xdc\xa8\xdf\x1b\xe4\xb4\xe8\xef\xecg\xf2\x8b\xf7\xfe\xfa\xdf\xfd\x00\x00\x16\x03\x93\x07y\n\xbb\x0c6\r\x12\x0c\xc3\ng\tu\t\x14\n\xaa\x08\x1c\x05\x07\x01\xb6\xfc\xc1\xf9\xa8\xf9n\xf9\x93\xf6s\xf2\x80\xee|\xee\xc7\xee\xeb\xedP\xee\xd2\xed-\xed<\xed\xeb\xed\x83\xf0\x85\xf1T\xf1w\xf2y\xf3f\xf4\x94\xf5\xdc\xf6$\xf8\xfa\xf9J\xfa\'\xfaI\xfbw\xfd\xb9\xfe\x9c\xff\x17\x01Q\x02\x99\x01\xe1\x01X\x04?\x06\x02\x07\x9a\x05\xbd\x07D\t\x89\t\xfb\x0b|\r\xaf\r\xdc\x0el\x15\x9c\x1c/\x1d\x9a\x1a\xa2\x1c\x8a#\xec((-\xc40=0\xa1.\xbe,\xf7.\xf41k0\xc0,)\'\x02#K\x1e\x8c\x18\x8f\x14v\x0f\xb3\x08Y\x01\xe1\xfa\xff\xf4\x8e\xef\xc9\xea\xce\xe6\xbf\xe2\xc4\xde}\xdb\xd4\xd8&\xd7\x99\xd6\x9f\xd8V\xda\x1d\xdb\xea\xdb5\xdd\xc1\xe0@\xe5\x9d\xe9\xa7\xec\x90\xef[\xf3|\xf6\xbb\xf9P\xfdb\x00\xd8\x02x\x03\x7f\x04\x93\x06\x90\x07\xfe\x06%\x06\xbe\x059\x05\xa5\x03\xc8\x015\x00_\xfd\x85\xfa\xca\xf9(\xf9\x87\xf6\xa2\xf2\x00\xf0\xb5\xf0\x8a\xf1u\xf0M\xef\xbd\xed\xd7\xec>\xee)\xf1a\xf3\x98\xf1\x17\xf0\xe0\xf1_\xf4\x94\xf5\xdd\xf5=\xf7\x92\xf7}\xf7n\xf8\xcd\xf9i\xf9B\xfb\xfd\xfe\x1b\xff\xbb\xfc\xfb\xfa=\xfe\x14\x03\xfd\x04\xf2\x02\x14\x00\xde\xff\xeb\x02\x98\x07q\n~\tT\x06t\x05[\t \x11\xd9\x17\xfd\x19}\x18\x8c\x15\xfb\x17\xab!@-L3\xdf,>(\xe0)20[6-7?3\x0b,h&\xa6$\xd0%M#M\x1b\x1a\x12[\x0c\x9f\x06\xa8\xff\\\xfa\x0f\xf7\xf1\xf1d\xea\xf6\xe3\x1b\xe0\x87\xdc\x92\xd9\xb3\xd9\x19\xda\xe1\xd7-\xd3\x1f\xd2\n\xd6\xe6\xdas\xde\xa0\xdfQ\xdf+\xe0\xf9\xe3A\xeb\xb7\xf1\xb6\xf3\x06\xf4\xf2\xf5\xa5\xf8\xe4\xfc2\x02\x8a\x05m\x04\x94\x02v\x04\x8d\x07\xea\x07\xd3\x06c\x06\x8c\x05\xd7\x03\x0e\x027\x01}\xff\xc0\xfc\x93\xfb\xb5\xfb\xa1\xf9b\xf5S\xf3\x17\xf4\xe3\xf4\xb2\xf3\xc2\xf1\x88\xf1w\xf0\xf2\xef\x11\xf2k\xf3\xb3\xf2B\xf1\xf9\xf1l\xf4W\xf5\xde\xf4\xef\xf5&\xf7\xee\xf8\x98\xfa\xbc\xfah\xfbF\xfc0\xfe\xbb\x00\xb6\x01\xd2\x01q\x01\xe8\x01R\x05\'\x07X\x07\xc6\x06<\x05\x88\x06\xb9\x0b\x80\x127\x13T\x0e\x02\r^\x15\xcc\x1f\xb3#\x02"p G#1)\x910\x9b3\x050\x0b+\xfc*\xa0/O1\x91-\x9b%\xd6\x1f\xb9\x1cL\x1a\xce\x16\xaf\x10\xac\x08\xa0\x01\x10\xfd\x0c\xf9\x00\xf4\xa2\xedr\xe95\xe6\xe7\xe2\xd1\xdfL\xdd\xc6\xda3\xd9\xa9\xda\x12\xddw\xdcF\xda\xa1\xdbj\xdf\xb2\xe3\xaf\xe6\xa3\xe8\xfc\xe8\xfb\xea\xe8\xef\x8f\xf5\xed\xf8^\xf9\x0b\xfam\xfc\x90\xff\x0e\x03\xc2\x04\xe3\x03\xdd\x02\t\x04\xcb\x052\x05T\x03V\x02\xdb\x01\x08\x00\x83\xfe#\xfe\x85\xfcG\xf9{\xf7\x1c\xf8\x9c\xf7\xa6\xf4W\xf28\xf2\x99\xf2M\xf2\xae\xf1\x9e\xf1\x86\xf0\xe5\xef\x8b\xf1|\xf3&\xf3&\xf2\x9e\xf3\x9e\xf5"\xf6\xff\xf6\xac\xf8\x84\xfa\xbe\xfa\x1b\xfc\x1b\xff\x1c\x00d\x00\xc0\x026\x05b\x05z\x05\x88\x06\x95\x08S\nS\x0c\x10\rB\x0b\xac\nX\rP\x13\x9c\x18\x83\x19\xe5\x16\xeb\x15\x1c\x1a\xa6"\x1a)b*\xe3\']%y&@+\x120d0s*\xca#\x8c \xc7 ^ \x08\x1c\xbf\x15\xeb\r\x16\x07\x1a\x03\x9d\x00d\xfcD\xf5\x80\xee-\xeb\xbc\xe7\xde\xe3\xcc\xe0\xbd\xdep\xdd\x18\xdbo\xda;\xdbA\xdb]\xdb\x1e\xdd\xc4\xe0\x83\xe2t\xe2l\xe5\x9f\xe90\xed{\xef\xfc\xf1f\xf4g\xf6\xee\xf8\xcc\xfc\xa9\xffs\x00h\x00H\x01\x90\x02\n\x04/\x05 \x05A\x03\xde\x00\xb5\x00\xbe\x017\x01\x9a\xfe\x0e\xfc\x9e\xfa\xca\xf9\x16\xf9F\xf8\xa5\xf6\x18\xf4r\xf3~\xf4\xf6\xf3\x8d\xf1\xad\xf0\xe7\xf19\xf2f\xf1\x9a\xf1$\xf2\xd7\xf1\xc9\xf1\x84\xf3.\xf5\xb0\xf5<\xf6I\xf7\xf0\xf7\x8d\xf8\x1a\xfb4\xff\x12\x00\x0c\xfe\xe0\xfdt\x01\xfc\x05\xaa\x078\x07\xc5\x05\xb8\x04\xbe\x08\xae\x11\x9e\x15\x9d\x12-\x0e\x90\x11\x99\x1bp!\xc0%_&|$[$=*P3\x856\x9c2\x1c.=-3.Z/M.\xc0)\xaf!\x13\x1a\x16\x17\xc1\x14\xca\x0f\x02\x08\x9c\xff_\xf9\xd4\xf3\xac\xefY\xebZ\xe6#\xe1\xe7\xdc\xc2\xda\x10\xd9\x0e\xd8\xbf\xd6k\xd6\x88\xd7\xd2\xd8\xbb\xd9}\xdb\x81\xde\xf1\xe1\xcf\xe4\xc5\xe7~\xea\x86\xedz\xf0>\xf4!\xf8\xd8\xfa\x06\xfc?\xfd\xe9\xfe\xb6\x01\xee\x04\xb0\x06\x7f\x05,\x03M\x03\xb2\x05g\x07\xbf\x06\x05\x04\xe0\x00(\xffW\x00\xf4\x01\x15\x00\xfc\xfb\xda\xf8\x07\xf9\x07\xf9x\xf8\xf8\xf7L\xf6\xb8\xf3l\xf26\xf4\xb1\xf4\x7f\xf2\x17\xf1\x9f\xf2\x10\xf3\x1d\xf2\xc8\xf1\x08\xf3?\xf3\xac\xf3&\xf6}\xf7\x1e\xf6:\xf5o\xf8M\xfcS\xfe\x12\xff\x8c\xfe\\\xfef\xfe\xa0\x02\xb5\t\xea\x0b\xc9\x087\x05@\x05\x0c\x0c`\x17U\x1c\x1d\x18\xab\x10*\x13\x96\x1f\x03*5-\x0b)3&\xd4&\xcf-X5\x837\xc61{*y)\x14+W+\x1a\'\xa9 \x96\x19h\x13H\x0f9\x0b\xcb\x05g\xff\x12\xf97\xf3\xe7\xed\xc8\xe9\x92\xe5k\xe2d\xe0\x9f\xddu\xdaV\xd7\x93\xd7\x95\xd9\xc7\xdb\'\xdc8\xdbM\xdb \xdeE\xe3I\xe8\xf5\xea\x1d\xeb\xbb\xebd\xef\xa8\xf5F\xfby\xfc\xb2\xfb\xff\xfb\xe8\xff\xa3\x04\x9d\x06\xd2\x05\x94\x04{\x04;\x05F\x06\x87\x06\t\x05\xfe\x01\x17\x00:\x00e\xff\x94\xfc2\xfa\xf6\xf8\xc2\xf7!\xf6J\xf4\x9c\xf3C\xf2\x81\xf0\n\xf1\xa7\xf1\xf2\xef\xac\xedl\xee,\xf1\x10\xf1\x98\xef\xaf\xef$\xf1\xbc\xf1\x80\xf3\x10\xf6\xfa\xf5\x15\xf5\xb3\xf7\xb0\xfb\xca\xfd\x8a\xfd5\xfe\xb0\x00\xa0\x01F\x04\x1b\x06\x0c\x07\x9a\x08)\x08D\x06\xe5\x05\xac\x0bb\x144\x15B\x0e\xc2\tr\x0e\xa3\x1b\x15&\x8f&\xb5\x1e\x83\x19\xd8 \xbb/\x8f8;5]+]&-+c3\xaa5\xd3.0#X\x1b\xef\x1a\x8a\x1c>\x19\x96\x0f+\x06\x19\xff&\xfa6\xf7\xea\xf3\xcd\xee\x89\xe7R\xe2,\xe0\xed\xdd\xd8\xdb\x98\xda\xdc\xda<\xda$\xd8*\xd8B\xda\xe2\xddg\xe0\xb9\xe2\xc3\xe3\x19\xe4\xce\xe6\t\xed)\xf3\xf6\xf4\xe1\xf3[\xf4\x11\xf8<\xfe\xad\x02.\x03\x0f\x00=\xfe\x0f\x01T\x06V\x086\x05\x84\x00T\xfe\x1e\x00\x90\x02\x0c\x02\xe1\xfd\'\xf9\xc0\xf7\x1c\xf9\xaa\xf9\\\xf7\x9f\xf4\xef\xf2\xbe\xf1\xa6\xf1V\xf21\xf2\x15\xf0\xb7\xee\x08\xf0\xc3\xf0O\xf0\xf8\xef"\xf17\xf2\xf8\xf2\x1b\xf4\xa7\xf5\xae\xf6\xcb\xf7\xe1\xfa\x0f\xfd\xfa\xfe,\x01\xf4\x02\xb6\x04\x1f\x06\t\x08:\n\xfd\x0c\xe5\x0e\x7f\x0f\x99\x0f!\x10\x95\x11\xb6\x14<\x1a6\x1e\x85\x1d+\x19\xbe\x1a\x03#h+f.\x8d*(\'\x99&\x91+\xaa2\x064\xe1,\x7f#\x98\x1f\xb0"=$\x1f \x01\x17N\x0c\xea\x05F\x03\xdd\x03:\x00P\xf7\x83\xed\xc4\xe7\x85\xe6\x1a\xe6\x10\xe5l\xe1\x87\xdc2\xd8\xd5\xd8(\xdc\xcd\xdd"\xdd\x06\xdd-\xde\x10\xdf\xf5\xe0\x0e\xe6N\xeaE\xec\xce\xec9\xee\xf5\xf0\x1b\xf5\x8e\xfa:\xfeC\xfe\xff\xfc8\xfe_\x02D\x06<\x08\xde\x06\xd4\x03,\x02\x1a\x04\xb0\x06\xf8\x05|\x02 \xff\xa4\xfd\x96\xfcN\xfc6\xfcx\xfa\xb2\xf6\xd5\xf3k\xf4\xe9\xf4\xa5\xf3+\xf2\xb9\xf1\xc2\xf0C\xefN\xefd\xf1t\xf1-\xf0\x82\xf0o\xf1n\xf1\xf1\xf1\xc8\xf4\x16\xf7\xa0\xf6#\xf7\xa1\xf9\x1e\xfc\xde\xfcQ\xff\xe7\x02\x9e\x03h\x03\x93\x05l\t\xce\x0b\xe8\x0b\x18\x0cf\r\x81\x10:\x16\xac\x17s\x15X\x15\x84\x19\x98 |$\xd4%\x81#\x7f"L&\x7f-M1d-\x9b(\x84&Z(u*\x99(U#\xe9\x1aA\x15\x00\x14\xa2\x12\x07\x0e\xde\x05v\xfe\xda\xf7!\xf4\x1f\xf3\x9c\xf0\x10\xead\xe2\xc5\xdfP\xe0Y\xe0\x84\xdey\xdc\x9c\xda\x97\xda\xba\xdc\xff\xdf\xaf\xe1\xad\xe1t\xe2\x8b\xe5\xff\xe9[\xed\xb7\xee\x90\xf0q\xf3\xfe\xf6\xd8\xf9\x9f\xfb\x8e\xfca\xfe\x16\x01\xac\x02\xd1\x01\x14\x01\x02\x02\xe1\x02}\x02m\x01p\xff\x9d\xfc]\xfbs\xfck\xfc\xee\xf8\x03\xf5\xf1\xf3k\xf4:\xf4\x90\xf3~\xf2U\xf0j\xeeB\xef\xd4\xf1\xff\xf1A\xf0`\xefS\xf0\xf9\xf0<\xf2\x9b\xf4\x9b\xf5\x82\xf4.\xf5\xeb\xf7\xec\xfa\xf1\xfb\x14\xfd}\xff\xcb\xff\xfc\x00\xcf\x03\xc0\x06\xd1\x08\xc0\x06\x82\x06$\t\xc5\x0c@\x11\xa1\x0f=\x0c\xd8\x0b\xd0\x10\x93\x19\xd4\x1d\x11\x1b\xfc\x16v\x18}!\xe1+6.\xb3*\x07&\x88\'Y.75M5\xad-\xfd%\xfc#\xbb&$\'\xa1!\xf3\x17\x1b\x10\xa9\ne\x07\xcb\x04\xc6\xff\x99\xf7.\xef\x8a\xea\xa6\xe8\xe3\xe5\x83\xe23\xdf$\xdc\x16\xdae\xd8/\xd9\xb9\xda&\xdcc\xdc\xaa\xdc\x0f\xde\r\xe08\xe3;\xe8\xfa\xecV\xed\xcc\xecG\xef\xa3\xf4\x88\xfaI\xfd\xad\xfd\xc3\xfb\x83\xfc\xd2\x00[\x051\x06\xbc\x03#\x01g\x00(\x02F\x04^\x03[\xff\x18\xfc\xf6\xfb\x94\xfcu\xfb\x01\xf9\'\xf7\xfc\xf5\x01\xf5R\xf4\xfb\xf3\xa2\xf2\xe3\xf0\xec\xf0\xf2\xf1i\xf1\xb4\xef\x87\xef\x0f\xf1\xa0\xf1\xd7\xf1p\xf2w\xf3\xe6\xf3c\xf5\xfe\xf7\xa4\xf9N\xfa\x1f\xfc\x16\xff`\x01\x1b\x03\x12\x04\xc4\x05f\x07d\n\xd4\r\x99\x0e)\x0e\xcd\x0c3\x0f\x15\x16B\x1b\x95\x1b\x84\x177\x16\xc3\x1b\x8b$\xa3*\xc2)\xaf$K"|\'\x9a/~3\xeb.\xaf\'V#\xfc$\xb5(\xf7\'5!\x0b\x17\x1a\x10\xf7\r<\x0eP\x0b\xb6\x03\x06\xfa\xd4\xf2\x0e\xf05\xef\xbf\xedG\xe9\x17\xe3\xd0\xdd\xa2\xdc\x83\xde\x89\xdf\x8a\xde\n\xddC\xdc\xa5\xdc\x84\xdet\xe2\x88\xe5*\xe7\xe6\xe7@\xe9\x90\xeb\x15\xef\xce\xf3\xf8\xf7\x8e\xf9<\xf9\x89\xf9\x8f\xfc\x06\x01\xbf\x04\xdf\x045\x02\x10\x00E\x01\x1d\x04/\x05"\x03r\xffh\xfc\xcd\xfaq\xfbo\xfc\x98\xfa\x06\xf65\xf2\x93\xf1[\xf2\x7f\xf2P\xf1z\xeff\xed\xa4\xec\x83\xed\x1f\xef\x85\xef\x07\xef\xaf\xeeF\xefh\xf0g\xf2\x9a\xf4H\xf5o\xf5Y\xf7\xb2\xf9\xb8\xfb\xea\xfc\xc2\xfe)\x01\xce\x01\x94\x03a\x07:\tR\t\xb0\t\xe9\n\x8d\rb\x11\xbc\x16?\x17v\x13\x92\x13\xa1\x19y"\xb5&\xb6%\x86"\xe4!\xbf&h/\x8b3\xa9/\xf6(\xdc%H)\xc6,\xc9+\x83%\xb5\x1c\xec\x16\xe6\x14\xc1\x14\x10\x11g\t\xe6\x00\xe7\xf9r\xf6\xdf\xf4W\xf2\\\xec\xde\xe5$\xe2\xa9\xe0\xde\xdf\xf3\xdeS\xdeT\xdcp\xdb\xe8\xdb\x0e\xdev\xe0\x13\xe2\xd1\xe3\x9c\xe5M\xe8@\xeb\xaf\xedB\xf1\xbb\xf4Y\xf7\x83\xf8\x0c\xfa\x92\xfc\\\xffD\x01i\x02\x00\x02+\x01\x13\x01\r\x02\x89\x02N\x01\xbc\xfe\x7f\xfc\xad\xfbE\xfb8\xfa0\xf8\xfe\xf5\n\xf4J\xf3E\xf3\xdc\xf2\xc4\xf1\x0f\xf0\x92\xef3\xf0X\xf1B\xf12\xf0\xdd\xef\xff\xf0\x8d\xf2\xb8\xf3/\xf4.\xf43\xf4\xa8\xf5!\xf8\xbe\xfaU\xfbd\xfbF\xfc\xae\xfd\x19\x00\x03\x03\xa7\x05\xf7\x06\xe3\x05\x90\x04W\x07\xfb\x0c6\x126\x13*\x10F\x0e<\x11Q\x19\xfb!\x1e$\x96 \x89\x1c\x88 \xb2)\xd30\xb82\x80-\xf9)o)9.;2\x1f0\x88)B"g\x1f\xbe\x1e\x82\x1c\x80\x176\x11.\nr\x03\'\xff\xfe\xfb\xe0\xf7\xe4\xf1j\xec\xb0\xe8\xa5\xe4\x86\xe1\x8b\xdf\xa7\xde\xd2\xdd`\xdb\'\xdac\xda\x01\xdc\x0e\xde\xfd\xdf\xb5\xe1\x8f\xe2\xc1\xe3\xcf\xe7\x8f\xec0\xf0s\xf1\x95\xf2\x0f\xf5\xaa\xf8a\xfcD\xff\xce\xff~\xff\xd2\xff\xb5\x01\x9e\x03=\x04\xf7\x02\xdc\x00\xb0\xffM\xff\xe7\xfe\x97\xfd\xa0\xfb|\xf9[\xf7\xf1\xf5\xd0\xf4|\xf3)\xf28\xf1\xfc\xef/\xefw\xee"\xee\xc4\xee\xed\xee\x01\xef\xd1\xee\x1f\xef\x0e\xf1\xae\xf2\xb5\xf3R\xf4\x9f\xf5\xc6\xf6\xc1\xf8Z\xfb\xe7\xfd\x7f\xfe \xffL\x01U\x04\xa2\x06\xd1\x06\xc9\x08\x15\x0b\xdf\r\x1f\x0fp\x0f\xdf\x11c\x13\xe0\x18\xf1\x1d\x14\x1e\x1a\x1c\xc5\x1c\xcd#w+\x9a-\\+\xd1(O(=,\xd11\x1d3a-\xb1$\x18";$\n%\x84 1\x18\xb3\x10E\x0b\xf8\x07)\x06_\x02\xf9\xfa\xb1\xf2\xa5\xed\xa2\xeb\x81\xe9r\xe6\xf0\xe2\xd3\xdf\xc9\xdc\xf3\xda\xad\xdb~\xdd\x01\xde\xd0\xdc\xaa\xdd\x91\xdf\xcb\xe0\x06\xe3\xfd\xe7<\xec\xcf\xec\x90\xec~\xef\xbe\xf3\x0f\xf8\xae\xfbh\xfd\x8f\xfc\xf1\xfb\xd1\xfeQ\x03\xf8\x04\xa4\x03\xca\x00\xc4\xfe\x1a\xffF\x01\xc4\x01e\xfe\xe6\xf9@\xf7N\xf7f\xf7\\\xf6@\xf40\xf1\x9b\xee\xc4\xed\xaa\xee\x11\xef1\xee\xaf\xec$\xec\x11\xec4\xed\xa8\xee\x1f\xf0\xc3\xf0\x0c\xf1#\xf2\x92\xf3\x0b\xf5\xef\xf6\x0c\xf9V\xfa\xab\xfb\\\xfd\xfa\xfd\xf4\xfe\xe9\xff\xb2\x02\x99\x07\xc7\x089\x07c\x06.\x08\xdd\x0fk\x16J\x18z\x17\xc6\x152\x1b\xdd#\xa3+\xeb.\xf9+++e/\xc96\xef:\xad9K5q2\x012\xab2.1\t,o$.\x1eo\x1a\x7f\x16:\x10y\x08\x12\x02\xe6\xfb#\xf6\xae\xf0n\xec\x9f\xe6\x11\xe1r\xde\xfa\xdc\xfc\xd9\x8f\xd5\xd2\xd4Z\xd6_\xd7\x9d\xd6\x18\xd7\x0b\xd8I\xda\xc3\xdd\xac\xe2d\xe6\xc4\xe6\x86\xe8\xcd\xec\xe8\xf2\x0c\xf8|\xf9v\xfav\xfb\xa1\xff\x0c\x04x\x05O\x04\xca\x03\x98\x04\xc0\x04r\x04\n\x04\x17\x02\x98\xfe\xdd\xfc\xd9\xfc\x1f\xfb]\xf7\x92\xf4L\xf3$\xf2\xa6\xf0o\xefx\xed\xde\xeb\xbc\xeb\xb5\xec{\xed]\xecE\xec\xcc\xedk\xef\x88\xf0{\xf2c\xf4\x18\xf6\x8c\xf7W\xf9\xc5\xfc$\xfe\x08\xffF\x02n\x043\x06j\x06#\x072\t\x9d\x0b\xa0\x0e\xd2\x0f\xa2\x0e\x17\x0eS\x10\xf9\x15p\x1c[\x1e)\x1c\xff\x19\x00\x1e\xa5\'p.\x93/\x8e-y*h,\x801\xf66[7+0\x1b+\xa4(k)\x9c\'B#\x19\x1d]\x15#\x0f\x07\n\x83\x06\x83\x001\xf9\xd4\xf2N\xee\x03\xe9[\xe3\x12\xdf(\xdcf\xda\x1e\xd7q\xd5\n\xd5\xd7\xd3&\xd3\xf7\xd4C\xd9G\xdb\x08\xdb1\xde\xab\xe2R\xe6\xa3\xe9\x06\xee\x13\xf2\xef\xf4\xf6\xf6\xb2\xfa\xa5\xfe\x80\x01F\x03O\x04j\x04\xb8\x04\xc3\x05\xaf\x06C\x06\x07\x04g\x01\x9f\xff\xc2\xfe\x83\xfd`\xfb\x8b\xf8\xd4\xf5}\xf3\x0b\xf2w\xf1Y\xefh\xec\xa2\xeb\x1a\xecA\xeb\xff\xe9\x97\xe9\xdb\xe9A\xea\x7f\xeb\x10\xed\x15\xee:\xee4\xef\xb5\xf1\xf6\xf4N\xf7\x8f\xf8\xce\xf9\xc2\xfb_\xfd\x9f\x00\x83\x03n\x06\xe8\x08\xee\x08\xd1\x0b\x82\r2\x0fm\x13\xcf\x17-\x1d\n \xdb\x1e\x07\x1f!"/+\xa03\xd73\x0b2\x16/\x96/;4\xbf:?;\xcc4r-m)\x82*\x8f)\xcc%\x86\x1eD\x16\r\x0fV\t\xe3\x05_\x01z\xfa\x89\xf2\x9f\xec\x8c\xe8\x08\xe3\x8d\xde\xab\xdb\xd7\xd9\xce\xd8L\xd5w\xd3\x08\xd36\xd4\xc1\xd6\xa3\xd9]\xdc\xa1\xdd\xe4\xdd \xe1\x8e\xe8\xed\xed%\xf1_\xf3\xa5\xf4\x9b\xf7p\xfb\xb9\x00\x82\x03\x18\x04\xc8\x03\xc4\x03\xf7\x03z\x05\xa2\x06\x1c\x05}\x02\xf7\xff]\xfe\x15\xfd]\xfbm\xf9\n\xf7\x13\xf4\xb4\xf1\xf8\xefr\xeei\xed\xff\xeb\xb9\xeaF\xea\x8d\xe9\x91\xe8B\xe8;\xe9\xfe\xea\xe1\xeb\x0c\xec\x97\xec\x02\xee*\xf0C\xf3\x82\xf5A\xf7\x16\xf8t\xf9\x99\xfc\x90\x00Q\x03\xf9\x03x\x03\xcf\x042\t\xfa\r\\\x12J\x10\x9c\r\xeb\r\x1b\x15/ \x06&\\$\x93\x1e\x10\x1fY&\xaf1\x959\x039\xe53\x08/\xba1}9_<\xc78\xf81\x12.\x8e+](\x91$\x8b\x1eh\x18\x9f\x12\xe6\r\xec\x07\xe0\xfe:\xf6\x90\xf0\x1c\xef\xec\xec\x9e\xe6\xbb\xde\xb8\xd7J\xd4d\xd6(\xd9<\xd9\x90\xd5\x96\xd2\xa0\xd3\x1f\xd7\xd3\xdc\xc6\xe1\xa0\xe2E\xe4\xb0\xe7i\xebT\xf0\xed\xf3c\xf5\xee\xf9\x97\xfe\x87\x00\xc3\x00\x1a\x01\xf3\x01o\x03\x8e\x05\xff\x07\xf0\x06>\x01s\xfd\xb0\xfd;\xff\x84\xfe}\xfb]\xf8\xc8\xf3}\xef\xfb\xee%\xef\x87\xee\xe9\xeb,\xe94\xe8\xed\xe6\xf6\xe5\xf7\xe5\xc3\xe7\xc5\xe8\xc0\xe9\x86\xea\xe7\xea\x97\xeb\xc5\xecz\xf0\xc7\xf5\\\xf83\xf9\x96\xfan\xfc;\xff;\x02\xc6\x04\xa5\x08\xe4\t\x8a\x0b\x01\x10\xfb\x0e\xf8\rz\x0e\x80\x11\x05\x1a\xe0\x1f \x1f\xf6\x1a2\x19(\x1e\x85\'\x81.\xc7/\x11-\xc3)G-w2\xe16\x985j/ .\xc0-\x91.Q+\x8b$\x9d\x1e\x11\x1a\xca\x18\r\x16]\x0fJ\x05\x9a\xfd\xc5\xf9~\xf7x\xf4\x08\xef{\xe8F\xe0\xb7\xdd\xce\xde\x0c\xdd\x1a\xd9\xa9\xd6\x15\xd8\xd1\xd9\xd9\xd9\xd1\xdb\xce\xda\xcf\xdb\x8b\xe0:\xe52\xecf\xed\xe7\xebg\xee,\xf4\xb8\xf9\xc0\xfbp\xfd\x81\xfe\xa9\xff\x02\x01,\x03\x97\x03\x83\x01\x1e\xff\x9b\xfe\xf8\xff\x05\x00\xf0\xfb\xc5\xf6u\xf4\xfa\xf3J\xf3\x00\xf15\xee\xe3\xea\xb8\xe8\xc8\xe8\xe4\xe8\xe8\xe7\xb9\xe61\xe6(\xe6\x17\xe8\x1d\xea4\xea\x9e\xea\x14\xec\x0f\xef\xe1\xf1L\xf4\x87\xf7H\xf7\x99\xf8%\xfeY\x01\xf5\x03\xdb\x06\x9a\x07\xcc\t{\n\x04\r\x1e\x11G\x11\xc4\x12\xc6\x13\xf5\x15\xb0\x16U\x14\x99\x13\x89\x18W \xe8"\xeb!\xbf\x1e\x00\x1f\xd8!\x06&\xad+\xeb.\x94-\xba)\x98)3*\xea)\xbb\'\x7f&\xd5&}$\xc6\x1f\xb8\x1a%\x15\x19\x0f\xcc\x0be\n\x8d\x08\xd6\x02J\xfa\r\xf4\xd9\xef8\xeb\x81\xe8\xb3\xe7*\xe5\xdf\xdf\\\xdcS\xdbQ\xdb\xa8\xd8%\xd8N\xdd8\xe0\xa9\xdf\xe7\xe0\xb2\xe1\xea\xe2\x83\xe7\xff\xec\xc8\xf2O\xf5 \xf5\xa5\xf5\xdc\xf8\xf4\xfc\xfe\xfe[\x01Y\x03\xf1\x02\x8b\x00G\x01\xf8\x01\x0b\x00\xef\xfc\xe1\xfc\x86\xfdz\xfbi\xf7{\xf3\x98\xf2\x85\xf0\xe8\xed\xbe\xeez\xef\xf6\xea\xb9\xe9\xaa\xea\x94\xe8R\xe8\xee\xec\xc8\xeb\xda\xe9a\xf0m\xf3`\xf2\xa4\xf1>\xf4t\xf87\xfc\x17\xff\x9c\x04\xd8\x04c\x04X\x07\xe1\t\x13\x0c\x14\x10u\x12\xff\x0f\xd4\x11D\x14\xad\x11\xcc\x10D\x11\xcb\x11\\\x12[\x14\xc7\x15\x81\x11\xb2\x10\xcf\x10-\x12\xc3\x17\xd1\x1b\x84\x1c\x8e\x19\x9e\x1b\xb0\x1b\x8c\x1d@#\x1b$I#\n"_"\xca!\x80\x1f\xd0\x1d\x8f\x1c\r\x1a\x08\x18E\x15\xc2\x11B\x0c\xf3\x05\x13\x02\xba\x00\x93\xfdH\xf9\xeb\xf4B\xee\xf4\xea\x86\xe7\xc8\xe5C\xe4\x06\xe2\xe0\xe0D\xe0(\xdf\x89\xdf\xea\xde\xe8\xde"\xe3\xe6\xe4\x88\xe8\x05\xecc\xeb\xcc\xedy\xf0\xd5\xf1\x82\xf7\x88\xfb\xb5\xfah\xfd\xc2\xff\xb2\xff\x9d\x00F\x00c\xff\x7f\x00n\x006\x00:\xff\xd0\xfc:\xf9\xf1\xf6\x1f\xf6\x8f\xf4\x89\xf3\x82\xf3\x8d\xf1:\xee\x06\xefR\xec\xbf\xea/\xef\x81\xec@\xec\xc9\xf49\xf0n\xef\x90\xf5\xf1\xf2\x19\xf9\x01\xfb\xdd\xfa;\xfe\xb4\x05j\xff\x98\x03X\x0b/\x06\xac\t\xc0\t\xe7\x0f\x0b\rg\x08K\x11\xbe\x11\x90\x08\x1f\x11\xaf\x11:\x06\x13\x0fu\x0e\x83\n\xd4\r}\n<\x07\xb1\n\xc1\x0cN\x08\x1c\tL\t\xed\x08F\x0b\xbc\x0c\x0c\x0e^\r/\r\xe5\x0ed\x0f\x81\x12\xa7\x13\xee\x13\xbb\x14\xc5\x11\xa4\x13\xf1\x16U\x12\x87\x0f\xc1\x11\xfd\x11y\r\x05\x0f.\x0f_\x08D\x04\xcf\x037\x031\x01\x13\xff\xa6\xfd\xd5\xfbe\xf6\x8b\xf4i\xf4\x18\xf2X\xf0\xe8\xef.\xefv\xee\xea\xee~\xec\xdc\xeb\xcc\xed\x0f\xedG\xeb_\xee\x11\xf2\x89\xf0\x1f\xf0i\xf2Z\xf2f\xf1\x18\xf4\xb7\xf5\x9c\xf7\x1b\xf7\xd8\xf5j\xf6R\xfa\xfd\xf8\xc4\xf7\xda\xf6)\xf8\x1d\xfc\xa3\xfa\x1f\xf7\x11\xf9?\xf80\xf8\xfd\xf6\x8e\xf7.\xfb\x05\xf9F\xf6O\xf7\xb6\xfb\x8b\xf65\xf5c\xfa\xc1\xf5\x1c\xfe\x03\xfa\xe4\xf8\xfe\xfa\x1d\xfdv\xf9\x9d\xf7N\x04\xe1\xfe\x8d\xfc\x8c\xfe\xfa\x03\xaf\xff\x84\x04\x8b\x05\x82\x01\x7f\t\x83\x05\xed\x02z\x0fQ\t\xc3\x05\xc3\x0e\x97\x0f|\t\xa6\x0cp\x0e\xe3\n\xc3\r\xdf\x0b\xde\x12\x8c\x0f3\n\x07\x0f\x03\x08\x87\x06\xd5\r\x1f\rJ\x08`\t\x1d\x0b\xb8\x06\xec\xff\xd2\x06\xd9\x08Y\xff8\x04\xaf\x0c]\x05u\x03\xd7\x07}\x03\x15\x06\x90\x07\xf3\x05R\rW\x0b\xaf\x064\x0c\xbc\x0fQ\x08}\x03\x89\x0bw\x0bs\x04t\n_\x0bc\x05)\x02\x05\xff\x17\xfd\xd8\x00\xd6\xfd\x15\xfb~\xfc\xf8\xf8s\xf4S\xf5|\xf6\t\xf1\xf6\xf1u\xf4\xab\xf4\xb9\xf5 \xf4\xbb\xf3<\xf5\x9c\xf5\xa2\xf6\xcf\xfa\xd3\xfck\xf84\xfd\x05\xfdJ\xfb\x8c\xff\x87\xfd\xe0\xfd\xd8\x01\xf6\xf6o\x00X\xff\x83\xf8j\xf80\xfbb\xfa\xa1\xf1\xb7\xf87\xf8_\xf2\xc8\xee\x82\xf8\xdb\xf3\xcf\xec\xbc\xf2\xab\xf5C\xf4-\xee\x12\xf5\xe2\xf3/\xf4F\xf4\xed\xf8%\xf5\x8e\xf6^\xff\x03\xf9\xad\xf9\xf3\x04\x82\x01N\xf7\x99\x03C\t\x1c\x04i\x03W\rw\x0c\x1c\x02\xb5\x02\x9a\x0c\x9b\t6\t\xc4\x0b\xd8\x08\n\x0b&\tS\x05j\x04\xcd\n\x1b\x08S\x01\x8f\r\xdd\x0b\xf4\x01\xdd\x02O\x08e\x03\x80\x00\x80\x07Z\ne\x01\xca\x04m\t\x8f\xfe\x8d\x00\x13\t\xc8\x03\x8f\x00x\x0ci\r\xde\x001\x03\xcb\t\xc9\x08j\x06\xf3\x0c\x8f\x0e\x81\t@\t\xdc\x0e\x04\x08\xc8\x05x\r\xea\x0b<\x08\x85\x0c9\x0b\xb4\x05|\x06s\x04\xd3\xff\xb4\x02\x95\x07\x1b\x01[\xfd\xa3\x00E\xfd&\xf4\t\xf9\x84\xfc\xeb\xf6\x88\xf4B\xf7\x94\xf7O\xf3\xdc\xf2W\xf8\x93\xf4\xc6\xec_\xf29\xf7e\xf8\xb1\xf5]\xf5)\xf7N\xef\x90\xf0\xd3\xfa1\xf9\xc9\xef\xec\xf8<\xfd?\xf4\xe7\xee\xb3\xf7\x83\xf7X\xee\'\xf4h\xfbJ\xf3\xf5\xfa7\xf6\xde\xefA\xee\x7f\xf6\x97\xf9\xc0\xf3\x16\xfe\x04\xfaA\xf9B\xf4\x8b\x00X\xf7\x8d\xf8\x8e\x06O\xfc\x98\xfe\xbc\x06\xad\rS\xff\xff\xf7\x18\t\x9f\n\x11\xfd\x92\x07\xf1\x18\x03\n=\xf6\xb5\x13Q\n\xeb\xf7\xf4\x0e*\x10g\x02O\x07\xa0\rj\n\x85\x03\xb0\x03\x1e\t \x06\xd5\x06\xf4\x0e\x1c\x10W\x02\xd9\x05\x96\t \r\xc1\x02\x8d\t\xc4\x10\x8e\x04z\x05F\x0c~\x05,\x00_\x08>\x02\xf2\x02\xfd\x05V\x04\x9d\x00\x04\x02`\x02i\xfb\xbd\x03E\x00I\x00;\x05q\xfc\xe2\xfa\xab\x04\x17\x02\xc9\xf9j\x05Z\xff\xf8\xfa\xa8\xfek\x04\x9b\x01\xaa\xf9,\xfe\x0f\xfd\xfb\xfd|\xfd\xa0\x03\x16\xfe\xaf\xf3I\xf9h\x04w\xf9K\xf2\xc0\x06\xb6\xf7\xbb\xf3\x13\xf7c\x03p\xfaF\xefW\xfe5\xfe\xf9\xf0\x89\xf6L\t\xa9\xf9+\xf2\x08\xf5\xf7\x0b\xe9\xfc\'\xf1\x05\x0c\x16\x04\x01\xec?\x08\x06\x13Q\xfa\x1b\xfc\n\x05\xa9\x0e(\xf8v\x00\x13\x12\x8b\x02\xc1\xf8\xec\x04\x00\x049\x03\xcb\xfd\x0f\xf9\xbd\x07\xfa\xf9W\x04\xc9\xfcc\xfaP\x01\xb1\xfc\xc1\xf4\xf6\xfd\x11\nb\x00\x0f\xfa\x85\xfbC\x07\xea\x06\x93\xfbc\xf5\x87\x0f\r\x031\xf9\xd4\x0c\x93\x14J\xf8\xa3\xf4\xd6\x0c6\x06\xf4\x01\xf7\x03\x15\x0bR\xff\xfa\x04h\x02P\x07\xfd\xfeX\xf4f\x08e\x00\xf7\xfd\xaf\x05\x98\x042\xeeV\xf9{\ne\xf0K\xf8\x81\x03\x8e\xfb\x15\xf2\xa7\x00\x1f\x00y\xfa\x0b\xef?\xf9\x08\x03\xcc\xee\x02\xfeS\x08\x02\xfa}\xe8\xee\x05\xeb\xfa\xc8\xed\xcf\x03_\xf9\x03\xf98\x01\xac\xfd\xfe\xf9\'\x01V\xf7\xd5\xf7\xf0\x0c\x0e\xfe\xc9\xfa\x1c\x07\xc2\x07\xe4\xfdw\xf8\x7f\x0f>\th\xf5\xbb\x018\x1b\x7f\x01s\xf4\x8b\x10{\x0f\xf9\xf7\x1b\x03\xf4\x16\xa0\xfct\x01\x9b\x0cI\t\x86\xfa"\t9\t\xf9\xfa\xff\x06k\x0b\xc0\xfa\xeb\xfa\xed\r<\x04\xd6\xfc\xae\xf5\xcf\x0e\x1b\x02\xad\xf25\x04\x06\x0bN\xf8\xcf\xf9\xa0\x06h\xfa+\xfe\n\xfe\xf2\x08N\xf9\xe5\x01\x86\xff\x9b\xfa7\xfdH\xff\xa7\ti\xf9L\xfb*\x06\xb9\x06\x9b\xf5\xd2\xfcC\x0e\x07\xf7\x17\xf6\xaa\x11\xe2\t\xed\xf5\x91\x00\x0e\n-\xfb\xf4\xfd\xb7\nx\x03=\xfb\xc1\x05\xaf\x068\xfe\x87\x03[\x03\x1c\xf7[\xfa\xf0\x0b\xc1\x00L\xf9\x15\x01\x06\x03\xc3\xf3?\xf42\x03\x93\x02E\xf0\x82\xf2\xe0\x07\x1c\xfb\x7f\xf0`\xf9\xf2\xfeL\xf1\xdc\xf2[\x02-\x04n\xf4\xd8\xf8h\x01\x86\xf9?\xf9\x85\xfcP\x04\xc3\xfa]\xf98\x05a\xfen\xf2~\x02\x06\x00\x81\xf2\x81\xfc\xf9\xff\xe9\xfa\xf3\xf9\xd9\xfc\x95\xfb\xd1\xf7 \xf4\x8d\x04\xe3\x01\x8e\xf2&\xf8\n\x0eG\xfbF\xec\xeb\t\xf7\x10(\xf9n\xf1\x9f\x11L\x0c\x89\xf5W\x04A\x0f6\x07\xd4\xf3.\x0e\xf1\x17S\xf1\xb9\xfdW\x1eX\xfe\x05\xedq\x19}\x08\xce\xfa\xa9\x02a\nJ\x00m\x04I\x06.\x06\xaa\xfdm\xfb\x90\x0ft\x06\x8c\x00\xf7\x01\xe2\n\x93\xfc\x13\x00\xa5\x0fz\xffa\xfa\xf8\x0f\xcb\x05\xda\xf5\xd6\x07V\n\xda\xfb(\xf8\x1f\x08\xb8\x02\x91\xfa\xad\x03?\xfeI\x00T\xfb\x9e\xfe\xc9\xff\xc2\xff\x90\x01\x8c\xfe\x0c\xfch\x07\xda\x04\x9c\xf8\n\x01J\nh\x018\x04f\nj\x05\x00\x02\xc4\x08X\x0c\xbb\xfe\xae\x07x\x05N\t\xb9\x06{\x03J\x0cQ\xff\xce\xf8\x9f\x05\xbc\x07\xc8\xf8C\x02\xb3\x00\r\xfa\x8b\xf7\x93\xfe\xba\x01\xaa\xf5\xca\xf1A\xfa6\xfd\xaf\xfd\xa5\xf7D\xf9\x03\xfdG\xf1\xbd\xfb\x81\xfe\xcf\xf8\x13\xfdy\xfc\xa0\xf5\xac\x00&\x03\x05\xf3t\xf6\x12\x00\xaa\xf7\xa0\xf8\xd3\xffa\x00\xe4\xf6!\xf4M\xf4X\xfaC\xfe \xf3x\xf9Z\xf9\x10\xfad\xfb\xf5\xf0~\xfb\xa5\x00\xfc\xf0Z\xf7i\x00,\x01\xa9\xff\xa4\xf1t\xfc\xd1\x0cg\xfc\xe0\xeer\t\xb5\x0e\x98\xf6\x0e\xfb\x80\x0e`\x05h\xfc\xe4\xff\xc8\x08\xdd\xfe\xbf\xff\x1e\x05\xbf\x051\x05\xa6\xfb\r\x0bS\xfb\xb9\xfe\xb1\x06h\xfeC\xfa\x97\x04\xe5\r\x14\xfc\xcf\xfa\x87\x03b\x03M\x00;\xfb]\x07\x15\x04\x14\x05%\x02\x17\x02\xb1\r\xde\x02\xc2\xfbW\x03\xf9\x10\x14\x03K\x00d\t\xe1\x0b4\xff\xf9\x00\xfb\x0b\xc0\xff\x8e\x03_\x02\'\x01\xdb\x08T\x03z\xffv\x050\x02\xae\x00\x91\x01\x0b\x00\x15\x00M\x05K\x02\x1e\x05S\x03\xf9\x00\x05\x02y\x01}\x01\xb5\x03L\x078\x01B\x06\t\x08u\x03\x1e\x03<\x03T\x06\x0c\x04\x1a\x03Y\x07\xe1\x06\xa8\x02\r\x03\xa4\x01\xc8\x01*\x02n\xfd\xdf\x01\x9f\x02\x83\xff%\xfea\xfc\\\xfe\xf7\xfc\x8a\xfb\xb4\xfav\xfc\x81\xfa#\xfb\x0e\xfd\xbc\xf8\xf8\xf8\x9f\xf5\xd7\xf8\xfe\xfa\x04\xf7V\xf9\xae\xf8{\xf4\xad\xf8m\xf8H\xf5O\xf7\x15\xf5\xd7\xf6\x1f\xfb\xa3\xf7s\xf9s\xf5\xa7\xf3\xcb\xf63\xf8\x96\xfb\x1c\xf9\xc0\xf7\xd6\xfa\xc8\xf6\n\xf3\x08\xff^\xfa\x88\xf4\xbc\xfc\xca\xf9\xa4\xfcP\xfc~\xfc?\xfa\xa5\xfa\x06\xf9\x8b\x00\x12\xff\x06\x00\xee\x03\x90\xfa\xe3\x00g\x03}\x01\xeb\xfd\x05\x02c\x00R\x04\xa1\t\xf6\x05\xe0\x01C\x04\xd0\x01\x93\x05\x11\x05e\x06\xb1\x07\xcd\x05\xae\x0c\xbe\x03?\x06\x18\x08v\x05\x8b\x04\x12\x07k\x06\x80\x035\ri\x07\xe8\x02\xfa\x03\x8a\x05\\\x04i\x04D\x05\x88\x04"\x03r\x03\x9a\x07\x15\x03\xaf\x01\xa7\x03\x8b\x02\x05\x03\xb3\x03\xe1\x05\xe0\x03\xd0\x04m\x06\x14\x04(\x05-\x04\x08\x05\x85\x07\x97\x07Y\x05\xd9\x07\xa7\x06\xd7\x05\xcb\x07o\x05\xca\x05\xdf\x03\xb0\x04+\x05C\x05\xfa\x03\x0e\x02\xf7\x00\xb5\x00\xef\x00\xdd\xff5\x00\xec\xfe8\xfd\x1b\xfd\xe7\xff\xfe\xfc,\xfb>\xfd~\xfb\xc4\xf98\xfc\xd0\xfcu\xfa\x89\xfa\xe0\xfb\xc8\xfa\x12\xfaT\xfc\xd9\xfaz\xfa\xfb\xfa\x99\xfc\t\xfbV\xfaT\xfc\xe1\xfc\x15\xfc\x9e\xfa3\xfb)\xfbu\xfc\x9d\xfb\xe0\xfa\xf2\xfa_\xfbq\xfa\xdb\xfb\xa1\xfa\xf4\xf9\x94\xfaP\xfa\xa4\xf9F\xfb\x9a\xfb\x11\xf9m\xfa?\xfb\x99\xf9\x15\xfa\xe3\xfb\xe6\xf7\xa4\xfbA\xfc\xce\xf9i\xfb\xe9\xfc\xc2\xfa\x95\xfaO\xfd\xbf\xfcf\xfb?\xfe\xfb\xff~\xfdL\xff\xcf\xff\x8c\xfev\xff\xcf\x01<\x01\xdb\x02\x9b\x02\x12\x02\xd7\x03 \x04S\x04I\x04\xd8\x03\x1f\x04\xc4\x05\x8d\x06\xda\x057\x04\x8b\x052\x05T\x04\x9a\x03\x83\x05@\x05e\x04\xde\x03b\x03\x93\x02F\x02\xf9\x03\x10\x02\x98\x02\xaa\x02\n\x01\xe9\xff\xf9\x01c\x02\x92\x00\x15\x01\xfe\x00v\x00\xef\xff&\x01\xbf\x00?\x00_\x00\xf0\xff\xea\x00\xed\x00\x8e\x01\xd5\x00t\xff/\x02\xbe\x00X\x00\x17\x02\x05\x03\x1d\x02M\x01\xaf\x01\xe7\x02.\x03\x01\x03\xc1\x03O\x03\x8c\x03;\x03\x9f\x03\xe3\x04\x13\x05\x96\x03\x1f\x04\xb6\x03P\x03~\x03\x98\x03\xcc\x02X\x02\x17\x02\xdc\x00\x17\x01\xa0\x00\xb2\xff\r\xff\x16\xff\xe2\xfd\xad\xfd\xc1\xfd\xd3\xfc`\xfcN\xfcq\xfc;\xfba\xfb>\xfbB\xfb$\xfb\x16\xfb\x03\xfa\xe0\xfa\x13\xfbf\xfb\xef\xfbE\xfa\xfe\xf95\xfa\xad\xfbv\xfb\x9f\xfb\\\xfc%\xfb\x89\xfb\x99\xfbe\xfb\xcd\xfc\xb3\xfc.\xfc\xf0\xfc\x9d\xfc\x97\xfbH\xfdM\xfeP\xfdO\xfb\xe1\xfdK\xfe(\xfd|\xfe\x14\xfep\xfd^\xfdu\xff\xd7\xfe\xa5\xfe\x83\xffS\xff\xf1\xfd\x0f\x01@\x01\n\xff\xcd\x00\xd9\x01\xa5\x00\xc4\x02\x8d\x03\x81\x00\x9a\x02\xe7\x031\x02F\x03\xf1\x04\x89\x03\xbe\x02\xbd\x03\xb0\x04W\x035\x02\xa6\x03\x8b\x04)\x03\xed\x02(\x03\x9f\x02\xfa\x00\x8a\x02\xaa\x02\xc7\x00\xcb\x02\xa4\x01\x9f\xffL\x00\xb9\x00\xd4\x00\xe1\xff\xcc\xfe\xe0\x00\xd2\xff\x80\xff\x9b\x01T\x00\xad\x00\xe3\xffp\x00u\x01\xf5\x02m\x014\x01\x0c\x025\x03\x98\x03\x19\x02\x95\x03\x08\x05\xad\x03\xf1\x01>\x04\xac\x05J\x04r\x03W\x04\xd1\x03\xcb\x03\xb2\x03r\x03r\x03J\x03\xb8\x02\xb7\x01\x99\x02W\x02V\x01:\x00\xa7\x00{\x00\x18\x00Y\x00I\xff{\xff\x05\xff\xb1\xfe\xf2\xfd#\xfeK\xfe\x84\xfe~\xfd\xd7\xfd\xed\xfd\xb7\xfd&\xfdx\xfdN\xfdd\xfc\x81\xfd\xbd\xfc\x0c\xfd\xb1\xfc\x1c\xfd \xfd^\xfd\xb6\xfc\xc0\xfb\x8d\xfb\xbf\xfdT\xfdF\xfc(\xfe\xb5\xfc\x13\xfd\x03\xfcd\xfc\xb6\xfd\xde\xfc)\xfc\xbb\xfc\x85\xfd\xb2\xfbW\xfd\xa0\xfc@\xfc\xe6\xfby\xfc@\xfe\x9b\xfc\xae\xfd_\xfd=\xfd\x10\xfd\xea\xfef\xffM\xfej\x003\xffl\xff\xdc\xff(\x00y\x00\x06\x01\xae\x01\xc7\x00<\x01X\x01\x1c\x02\x0c\x02\xcf\x01\xc1\x01*\x02\xfb\x01 \x02k\x02n\x01\xbd\x01~\x028\x01\x1d\x01\xe5\x01^\x015\x01\xfb\x00;\x01R\x00\xb1\xff\xd6\xffw\x01!\x01x\xfe;\x00\xd1\x00\xb2\xff\xc7\xff\x8f\xff\x88\xff\xda\xfe\xc0\x00k\x00\x16\x00\xc4\xff5\xff\x03\x001\x00\xf3\xff\xb8\x00\xdc\x00\x93\xff\xae\xff\xd6\x00\xb5\x01\x1a\x01\xe5\x00\x05\x00\x99\x00\xd9\x012\x02\xbd\x01\xdc\x01j\x01\xb9\x01S\x02\x83\x02w\x02g\x02\xe8\x028\x02d\x02H\x03\xe5\x02\xbe\x02x\x02{\x02B\x02\x95\x02\xe7\x02|\x02t\x02\xb7\x01Y\x01\'\x01\xc7\x01\xb8\x01\x89\x00\x14\x00\xde\x00\x12\x00\xad\xffe\x00\x0c\x00R\xff\x06\xff\xa7\xff\xba\xfe\x9a\xfe\xda\xfe\xe6\xfe0\xfe\x06\xfey\xfeq\xfe;\xfdK\xfe\xe7\xfd\x86\xfc\xd9\xfdi\xfd\xaa\xfd\x90\xfdW\xfcp\xfd\xee\xfc\xb2\xfc6\xfd\xbd\xfd6\xfc\xea\xfc[\xfd\xb0\xfci\xfdc\xfdf\xfc\x1c\xfe\x07\xff\xae\xfc\x17\xff`\xfd\x9b\xfc.\xff\xe7\xfe\x04\xff\xa9\xff+\x00\x91\xfd"\xfe$\x00\xd9\xfe\t\xff\x0e\x00]\x00\x84\xfe,\x00\xcd\x00v\x006\xfe$\x00\x9a\x01\x9e\xfd\x98\x01X\x01\x8b\x00\xf9\xff\xb0\x00p\x00/\x00\t\x01\xa4\x00\x17\x01M\x00\x92\x02u\x00\x8e\x00\xe8\x00\x8b\x01\x03\x00]\x01\x85\x01e\xff\xa6\x01=\x00g\x00\xa1\x00H\x01\xe8\x003\x00\xa8\x00\xaf\xff\xc2\x00\xd6\x00"\x00\x8f\x01%\x01#\x01b\xff\xe4\x01\xdf\x01\xa6\xff\xd4\x01\xa5\x00\xe0\x01V\x02\xd0\x01&\x00T\x02\xb5\x00\xd5\x01\xd9\x029\x02\xdf\x02v\x01\xfe\x01a\x01\xf4\x03e\x01\x03\x04d\x03\x97\x01\xba\x02.\x03i\x02\xf6\x02\xc0\x03|\x02\x95\x02\xe4\x001\x03\xc7\x01\x15\x02#\x02x\x00\xee\x01\xe8\xffS\x00\xd7\x01L\xff\r\x00\xe3\xff:\x00\xc6\xffN\xff\xd3\xff\x8c\xfe\x13\xff\x1f\xfeT\xfe\xce\xff\xd8\xfe\xe6\xfd\x8e\xfd\xaa\xfc"\xfe\x06\xfd"\xfe{\xfd\x14\xfe\xa8\xfb\xa6\xfcr\xfd\x9a\xfdE\xfd\xb2\xfc\x00\xfe>\xfc@\xfdI\xfc6\xfe\xb6\xfd\x8a\xfb|\xfdJ\xfe\xd3\xfc0\xfd\x03\xffV\xfd\xcb\xfb\x17\xfd"\xfe\xbf\xfe\xea\xfc[\x00k\xfbp\xfc=\xfeG\xfec\xff\\\xfd\xfe\x00q\xf9\xe6\x00^\xfeF\x01\x00\x00W\xfc\x93\x00\xc2\xfb\xea\x02-\x01\x8a\xff\x14\x00\x9e\x00\x8a\xff\xf7\xff\xc8\xfeG\x02;\x01\xd8\x003\x02\xaa\xfd\x8d\xffY\x02@\x03\xf8\x023\x00=\xfd\xce\xff\xe5\x01\xb8\x02\x81\x03\\\x02\xc8\xfd\xa9\xfd}\x02\x83\x02\x85\x00\xd2\x00|\xfec\xff\x0b\x02\xae\xff\xc9\x00z\x00\xb9\x00\x07\x00@\xfe\xc6\x00J\x03\xf9\xffj\x00\x10\x02\x19\x00\x98\x01\x8a\x01\x07\x01\xe4\x03\xd8\x00\x89\x01C\x02\xb2\x021\x04\x84\x02\x05\x04(\x01v\x01{\x04\xa3\x03j\x03\x1f\x04x\x01\xf7\x03\xff\x03y\x03R\x02M\x02B\x02\xa9\x01x\x03\xe3\x02}\x02\xde\x00S\x01$\x00c\x01\x8f\x01\xa4\x00n\x00\x92\xfe?\xfe%\x00\xb2\x00\xfd\xfe\x0e\x00\x00\xfd4\xfd1\xff\x98\xfe}\xfd\x19\x00\xc6\xfd\xe0\xfcG\xfdt\xfc\xe5\xfe9\xfc\xbd\xfd\xc4\xfe\x87\xfc\x99\xff\xb6\xfe\xa6\xfa\xb9\xfbc\xfcV\xfc\x9e\xfd\x8c\xfd\x8c\xfc\x86\xfd-\xfa\x16\xfc\x8e\xfc<\xfd/\xfc\xe2\xfbp\xfc\xaa\xfc|\xfd\xbb\xfai\xfe<\xfc\x8a\xfd\xcf\xfd+\xfd<\xfd\x03\x02\x9c\xfc\xf3\xf6j\x031\x00\x12\xfc\x12\x03\x9e\x02\xf9\xfaH\xfe\x9f\x02\xf1\x00\xbc\xfew\xfc\xab\x02\'\x05\x8e\x05t\x026\xfd\x9c\xff\xeb\xfd\xe5\x02B\x02\xda\x02&\x03\x1f\x05\xdf\x04\x15\xfc\xce\xff\xff\x00!\xffl\xff\xc6\x07\x11\x03\xaa\xfd\x99\x03\x9d\x00\x8b\xff\xed\xffx\x01\\\x03\xa4\x03m\xfe\xbb\xfe\x1b\x02p\x03c\x04_\x00W\x02N\x049\xfez\x00j\x05\xfe\x03\xaf\x02\xab\x04#\x06\xa5\x01\xbc\x00\xb6\x066\x06;\x03\x18\x04\x9c\x02\xc3\x02\xd6\x04\xb0\x07\xc1\x043\x02\xad\x03\x0b\x01\xc0\xffl\x02\x96\x03\xce\x01d\x02\xe1\x02\x84\xfdO\xfe\xc6\x01\x97\xffe\xfe\x8a\xff\xe6\xfd\x01\xfd\x8d\xff\xb4\xff\xc0\x01\x82\x00\x17\xfb\xe0\xf9|\xff\xcf\x00\'\xff\xcb\x00I\x01\xbd\xfe\xb1\xfav\xfc\xa2\xfe\x1e\xff$\xff\x99\xfd\xb8\xfd\x8a\xfdY\xff\x99\xff3\xfd\xf8\xf9\xd4\xfb\xad\xfd\xd4\xf9\xda\xfb`\x02\x08\x00\xc9\xf7_\xf9\x99\xfb\xa0\xfa\xe6\xfc(\xfe\xd4\xf8s\xfa\xef\xfc\xeb\xfa@\xfd\xe8\xfb\xdb\xf8V\xf7\xb0\xffh\xfeD\xf8F\xfc\x02\xfe\x90\xfao\xfcM\xfc&\xf9`\xff\x9c\x01\x07\xfdI\xfb\xb6\xfcg\x00n\x03`\x00\xfe\xfa:\xfd \x01_\x02\x8c\xff\xe5\x00\xe5\x02B\x00\x86\x03\x00\x03\xd3\xfcP\xfd\xb6\xff\xa4\x03r\t\xa5\x05f\xfd|\xfc\xfd\xfc\xb0\x00\x85\x039\x01&\xff\x8e\xff\xd5\xff \xfek\x00\x80\x01\x8f\xff\xd1\xfe+\xff\x9f\x03\xaf\x05-\x06\'\t\x0f\x0b\x1b\x0b\xaa\t\xec\x08\xef\x07C\x0c\xd3\x11\xcb\x11x\x11\x08\x12\x9e\x0f$\r;\x0e\x98\r$\x0c\x06\t\x18\x07\xb2\x07@\x08\xfb\x06\xcf\x05\xd4\x02\xf4\xfeF\xfa.\xf8n\xf9\x9e\xf9x\xf8;\xf7\xe7\xf7\x8a\xf5S\xf6U\xf5\xeb\xf4"\xf7\xe1\xf3\x1a\xf3\x12\xf5\xf8\xf9y\xfc\xbd\xfbr\xfb\xe8\xf9\xdf\xf8\'\xf96\xfb\xe1\xfd\x96\xff\xdb\xfc\t\xfe\xe4\xfd\x80\xfcs\xff%\x00\r\xfe\xd8\xfb\xab\xfb\xfb\xfb\xd7\xfe\xc9\xfdp\xfe)\x00(\xfe}\xfb\xf7\xf9N\xff\x06\x00\x18\xfc\x03\xfe\xb8\xffd\xfdA\xfd\xb9\x00*\x01\x1c\xfc\x94\xfa\x93\xfb\x8a\xfb \xfe\xea\x010\xfe\x95\xfcD\xfd\x18\xf9b\xf9:\xfa9\xfc\x98\xfb\r\xf9\x10\xfa\xff\x00u\x01\xd3\xfc\xb2\xf9\xf5\xf4\xdc\xf4\x17\xfc\xbd\x01\xe4\x01.\x00\xc3\xfe\xff\xfd)\xfb\x9c\xf9\x8e\xfc\xe6\xfb\x8f\xfbW\xfc\xb0\xf9\xad\xfd\r\x03\x84\xffc\xfc\xb7\xf8\x8d\xf0\xff\xeb\xfb\xf1G\x01\x13\x12\x0c\x1c\xe0\x1a|\x12H\x0eu\x0fX\x13g\x1c\xb5)\xa44\xf85\xed482D.\r)5\x1f%\x18\xd0\x15\xf0\x17\t\x1c\xfb\x19/\x11r\x041\xf68\xe8\'\xe0\xa5\xdf\x99\xe2u\xe47\xe3\x89\xe1\xdb\xde\xa3\xda\x1b\xd7\x9e\xd4\xb3\xd4\x90\xda\xb4\xe4T\xef\xbf\xf8>\xffW\xffu\xfa<\xfa-\xfd\xe5\x03\xe0\r\x00\x13\xa4\x16s\x18[\x17\x96\x14v\x0f\x13\t\x9e\x04\xde\x03\xc8\x04\xf1\x07!\x08\xfe\x04\xac\xfdX\xf2D\xea\xc7\xe7\xc1\xe7C\xe9\xd0\xec\x9c\xee\xbf\xee\xfd\xed\xf9\xed\x93\xecw\xeb@\xed\x05\xf1`\xf7D\x00A\x07u\x07\xcd\x04\xe0\x019\xfeM\xfe\xb2\x01\x9b\x05\x86\x08\x19\x07"\x03\xba\xfeK\xfa\x9d\xf6\xa1\xf4\x8a\xf0h\xf0\x96\xf2\x98\xf3\x97\xf6N\xf7\xd6\xf2\xfa\xebK\xe7\xea\xe5\x89\xea\xd8\xf2*\xf9\x98\xf9+\xf7\x19\xf5{\xf7\xd3\xf8@\xf8\xde\xf5F\xeep\xeb\xc1\xf3$\x15\\A\xf5S"D\x9b"\x03\x13\x7f!?r\xe9ZqL\x11Ha?\xdf&\xef\x0cc\x071\x10\xd0\x13\xf0\x07\x84\xee\x9e\xd2\xd4\xbb\x86\xad\x91\xad\x8e\xbbR\xcb\x11\xd0&\xc7\x9f\xbe_\xc0F\xc9\x8b\xd0\x87\xd7u\xe5g\xf6|\x08h\x12\x7f\x19*\x1e\xc1\x1c\xad\x1d\xbe\x1f\x0e"8&\xbd"\xff\x18\xe0\x12\x8b\x0f\x97\t}\xfd\x9e\xec\x90\xde\x9f\xd6\xbd\xcf\xbf\xcd\xe0\xcf\xf2\xd0\xea\xcc\xa8\xc6\x0f\xc5\xf3\xca\x14\xd6Z\xdcb\xe0L\xe7w\xf1\x88\xfe\x0c\n\xf2\x12\xd9\x18?\x19g\x15]\x14\x8b\x18\x00 \xd7#\xf1\x1e\xa9\x16\x0c\x0fd\x07Q\xff\x1a\xf7s\xf0\x95\xed\x96\xec\xc7\xe9\xcb\xe6\xf7\xe1\x1a\xdc"\xd6\xe3\xd2)\xd6f\xdfd\xe8\xf1\xee\xb0\xf0\x02\xf2\x90\xf6\x98\xfa\t\x02q\x08\xe0\x0e*\x13\x1a\x15\xb9\x17\xca\x180\x19\xe1\x12\xd1\x0b\x00\ta\x03\xa0\xfd\x96\xfe\xaa\x14;>\xa9Y\x81PC.9\x17\x03\x1d\xa44\xfcL\x88[\x9ba\xfdV\xc8=e\'5\x1c\xa3\x17\xc3\x0bk\xfb\x1f\xf7\xfd\x01\xc4\x0bi\x01\xff\xe2\xf2\xc1\x95\xaf=\xb1\xee\xc1`\xdaN\xed\xfb\xf0\xd4\xe3\xba\xd4\x9f\xd2*\xdc\xa5\xeb\xbb\xf8\xab\x06)\x16\x00$[)\x81"8\x12`\x03\xa3\xffp\x03\x93\x0e`\x17[\x10L\x01\x07\xed\x89\xdaP\xd4\xca\xd3\xfe\xd2\xa7\xd0\xe3\xd0\x85\xd7\xd5\xe0X\xe4I\xe1\xd8\xdc}\xd9\x1d\xdf\xb9\xf1\xc5\x08\xd5\x19W\x1c;\x13?\x0b\xbf\t\x8a\x0f\xb3\x17\xe3\x1a\xf4\x18\x88\x13\xee\x0c\xcf\x05T\xfe\xdf\xf6\xde\xed\x16\xe6\xfa\xe1Z\xe4\xea\xe9Y\xe9\xcf\xe2G\xd9\x07\xd4\xb2\xd5\xc6\xdb`\xe5\x81\xee\xdc\xf4L\xf8\x81\xf9\xf9\xfc@\x01H\x03\x8c\x05=\t~\x11\xff\x1a\x90\x1f\xa8\x1d\xa5\x14\x85\x08\x92\xfej\xfa\x00\xfe\xf7\x04\xc0\x03\xb2\xf5p\xe4\xf1\xe6\xa0\x0c\xda>\xceS\x03=\xbb\x1b\xa7\x14\xaa,\xd4M\xef^\xbdbs^qN]=\xba0j\'\xcd\x1bT\x05\xb6\xf6j\xfc\x8a\t\xf5\x07\xfa\xee\xf8\xcc\xf6\xb7\'\xb5\xaf\xbf\xcd\xcfa\xde\xbc\xe4k\xe2h\xdbe\xda\xbc\xe3X\xf0\x17\xf9B\xfe\xe6\x08\x96\x1a\xf2*\x06,\x0f!j\x11\xa9\x03V\x03\x02\x08U\r1\x0e0\x04=\xf9|\xebR\xdd\xc5\xd38\xcb\x93\xcc\x1b\xd4b\xdb\xab\xe3\xcd\xe7\x0b\xe7,\xe2J\xde\xa2\xe3\x91\xf3g\x05w\x11q\x17e\x17g\x12\x8f\r\'\x0b,\x0b\xa6\x0c\x87\r\xca\x0c@\x0bW\x08\xa0\xff\x16\xf1a\xe2\xd0\xda\xf6\xdb)\xe2\xd4\xe8\x1c\xeb\xc0\xe7\xb7\xe0\x8f\xd8\xf9\xd6\xe8\xdd\xbe\xe9\xfd\xf4C\xfc\xba\xfex\x02!\x05\xe6\x03\xad\x04\xa4\x05\xdf\t+\x11X\x14\xcf\x14\x9c\x11$\t8\xff\x01\xf9\x18\xf8\x16\xf9\n\xf9"\xf2\x92\xe3\x8b\xd78\xdb\xf1\xfd\xdb1\x95RtO\xfc8\xcf,~8SRYb\xe4h^o\xbcq\xb3i@Xb=S\x1c\\\xfd\x80\xe9\xb8\xeaY\xfag\x01f\xefJ\xcf\xd9\xb2Q\xa9\xce\xae\xaa\xb8d\xc6j\xd7{\xe5\xce\xed\xd7\xf2\xb1\xf6\xe6\xf8!\xf6\xf5\xf5\x93\x04\xfb\x1f\x047w=\xf6-\xfd\x15\x1a\x06b\xfd_\xff\x94\xff\xef\xf8\x1e\xf4\xaf\xeb\x94\xe3\xa4\xdc\x19\xd13\xc6\x18\xbf\xe4\xc0\xae\xcd\'\xdeY\xec\xab\xf2Y\xf1\xa1\xee\x9d\xefr\xf7\xa9\x03\x04\x11\xef\x1b\xa7\x1f\x9e\x1d\x9c\x19\xad\x14\xe6\x0er\n@\t\xf3\x08\x84\x07\xf4\x02\x1e\xf9Z\xee\x89\xe5r\xe0\x1e\xdf\xb2\xde\x90\xe0D\xe1@\xe1\xc4\xe0 \xe0\x8d\xe2\xb5\xe7~\xed\xa2\xf4\x82\xfc\xd4\x03~\x08D\x07\x03\x04\xb7\x03\xe4\x07\x8c\x0f\xb5\x14\xba\x14I\x10r\x079\xfe\x16\xf8\xd2\xf5\xc5\xf4\x9f\xf1\xfb\xeb\xc4\xe9\x8f\xec\xa0\xe9\xdf\xdb\x07\xce\x96\xd4\xc4\xfcE2\x95T\x00Z\x94I\xa1:\x85=1M{c\x14uxyRp\x98\\\xf5E\xc50n\x1a\xf2\xfeB\xe7\x1a\xdf\x85\xe3\xcf\xe9V\xe9\x03\xdbC\xc5<\xb2F\xab\\\xb6\x11\xce]\xe7\x1b\xf8\xb9\xfc\x95\xfba\xfb\xea\x00\xe3\x08\x15\r\xc2\x12\xa6\x1b\xe2%\x11,2(\x10\x1c\xdb\x08\x84\xf3\x1d\xe5\xc7\xe1m\xe6W\xed{\xed\xeb\xe2\xb7\xd49\xc8)\xc4(\xc8\xfb\xce-\xd7\x00\xe0%\xeb9\xf6L\xff\x9e\x03\xbe\x02\xc1\x00,\x00\xcc\x05\xce\x13\xcc \x9f#\xed\x1do\x14\xc2\r6\x0b \x08]\x033\xfe\xc5\xfa\xce\xf6\xe6\xf0v\xe9!\xe3#\xde\xa9\xd8>\xd6\xb7\xd8h\xdf\xb4\xe6\xd1\xe8\xd2\xe7\xa1\xe8*\xee\xb4\xf6\xcf\xfd\x07\x04j\t\xac\rU\x11n\x11\xf7\x0e \r\x9c\n\xd0\x08\xb1\x08D\x07\'\x03\x80\xfc]\xf5|\xf0;\xee@\xea\xbe\xe4\xc4\xe2\xd7\xe1\xd3\xdd\x82\xdd8\xec\x93\x10\x98;qR\xefQ\x82FYB\x89K\x0c\\Fk\xddsQu\xafl\xddW\xb8;\xb6\x1d\xfd\x03Q\xf2<\xe8\xd6\xe7]\xeb\xb9\xe8O\xddG\xcdF\xbd\xe0\xb5\xea\xb9\xf3\xc6Z\xda\xe1\xee<\xff\xc9\x08K\t\xd8\x04\x9e\x00\x9c\x00]\x06\xcf\x10H\x1e\xb8\'M(p\x1bu\x07\x0c\xf57\xe7\x82\xe1\xf5\xdf|\xe1\xa3\xe5Y\xe8\x89\xe8\xe6\xe1(\xd8\xee\xcf^\xcbY\xd0\xf5\xdb]\xeb\x1e\xfaJ\x00&\xff\xd4\xfc\xf1\xfb\x99\xfd\xd0\x00j\x04r\x0c\x9a\x17\xb4\x1e\x80\x1b\t\x10\x1e\x03w\xfa\x88\xf8\x95\xfaW\xfd\x92\xff\x9c\xff\x98\xf9\xa2\xee\x9f\xe3\xf3\xdc\xd0\xdbX\xde-\xe4\xca\xeda\xf7N\xfc\x9f\xf9\xb6\xf16\xec\x9f\xedP\xf5&\x01\xb6\x0bS\x12\xed\x13[\x0f\xc9\x07\xf6\xff\xac\xfb\x9c\xfc\xfb\xfe4\x01>\x01\xd2\xfe\xa2\xfb\x01\xf7t\xf2s\xec6\xe7\xff\xe5s\xe8B\xeew\xeb\xf5\xe6\xa2\xf3\xe4\x145>\xa1T>R\xbcG\x9eC$K\'V\xc3^\xc3c&d\x94`\xa2Q\x8f8\x11\x1b\x98\xff6\xea\xf3\xdaE\xd7z\xde\'\xe7\xde\xe6(\xdb\xb9\xca\xf3\xbfx\xbf\xd0\xc7i\xd7\xb5\xeb\xe8\x00!\x11l\x16\\\x13\xf6\x0b\x10\x06v\x04\x8f\x06\x02\x10\xab\x1a\x9d!\xf1\x1e\xea\x0f\xfe\xfdl\xed\x85\xe3\xc1\xe0\x91\xde\x16\xdfS\xe1\xab\xe3\x11\xe4`\xe0\x14\xd9\x81\xd1P\xd0\x84\xd6\xa9\xe2U\xef\x86\xf8\x96\xfdZ\xfe*\xfer\xff\x92\x03\x1c\t\x91\x0e\x85\x13\xb9\x15\x05\x14\xdf\x0e\xad\x07\x7f\xff\x1e\xf9\xfe\xf6\xe8\xf7\xab\xfb\x97\xfe\xbc\xfb\x08\xf4\xc5\xe9\xbf\xe1\xb8\xde\x01\xe1\xc7\xe7\xf1\xeeI\xf5S\xf9\x94\xf9\xda\xf8\xd4\xf6f\xf4\xbc\xf4\x82\xf7;\xff\xb4\t\xfb\x10\x86\x13\xc4\x0eM\x05*\xfeM\xfa\xa0\xfa\x0c\xfd\xb9\xfe%\xff|\xfc\xc0\xf8p\xf2\xdf\xeb\xcc\xe7\xdf\xe6\xe0\xed\x08\xf8\x1b\xfd\x8c\xf8\x80\xee\xbf\xecV\xff\xfe%9M,_\xd4Y\x03L7G\x90NyU\x1dU\xe2OkIqF~>\x82*\xcc\x10\x0b\xf5\xf4\xde\xce\xd3\x0e\xd3\x9a\xdb\xd9\xe5B\xe9\x01\xe3{\xd7b\xd0\t\xd3\xc7\xdd\x92\xeb\xaa\xf8Y\x03\x8f\x0co\x13s\x15l\x12\x89\x0b\xf3\x03O\xffJ\x01\x1f\t\xfd\x120\x16X\x0f\xbc\x00\xda\xedU\xe2n\xde[\xdf\xbe\xe4.\xe8)\xea)\xe9\xb3\xe3\xb6\xdeR\xdb<\xda>\xdc\xd7\xdfS\xe6\x9e\xf0\x1e\xfc\xf5\x05Q\n\x9f\x07\xd4\x01\x0f\xffy\x02J\n\x90\x11\xf4\x15\x1a\x15\x1b\x0f\x02\x07\x16\xfe\xfe\xf6\x02\xf3~\xf0\xb9\xef\xc3\xefk\xf0W\xf0\xab\xee\xff\xea\xcb\xe6\xfa\xe5.\xe8\x98\xee\xff\xf5\x16\xfdS\x02\xa1\x02\x1f\x01\x07\xfe^\xfbm\xfcT\xff\x15\x05\x19\x0bm\x0e\xa0\x0e\r\t\x08\x00W\xf7\xf6\xf0\x0f\xef\xca\xf0H\xf4\\\xf8\xb4\xfa\xe3\xfa\xff\xf7\xc1\xf2~\xed\xf1\xeb}\xf0\xb5\xf5\xc6\xf9\x10\xff\xfe\t\x8f!\xa7<\xaeP\x8bY\x97SKI\x9e@\xec<\xdeB\x8aK\xa1Q\xc4M\x19<\xb7#.\x0b\xf3\xf7\xd0\xea\x15\xe0\x0c\xd9\xc7\xd8\x86\xde\x8a\xe5\xed\xe8H\xe5+\xdeV\xd8\xa2\xd6\xf0\xda\xe7\xe5\xfd\xf55\x08\xd1\x15\xd1\x19,\x15;\rR\x08\xdf\x07\xd4\x08\xff\x08"\t[\nk\x0c\x10\x0bR\x03\n\xf7e\xe9\xef\xde[\xd8\'\xd6\x80\xd9\'\xe0\\\xe7\xce\xe9\xdc\xe4\x7f\xdd\x03\xd9\xe8\xdac\xe1\x97\xe8\xe7\xef\x98\xf8\xef\x02=\x0c\xcd\x10>\x0fQ\n|\x05\x1a\x03V\x03M\x06\x93\x0b/\x10\xa4\x10\x84\n)\xffQ\xf3\xfc\xebf\xea7\xed\xf3\xf1\x94\xf4\x84\xf5\xda\xf4\xec\xf1\xac\xee\x82\xeb\x81\xe9P\xeb\x06\xf0\x1b\xf7\x91\xff[\x05\xc9\x07\x1d\x06\x90\x00\x9a\xfc\xcd\xfb\x0f\x00\xb3\x07\xf6\r^\x11\x8a\x0eT\x07Y\xfe\xca\xf4\xbc\xf0\xc4\xf0\x9a\xf37\xf7\xbc\xf6\xbd\xf5\x8f\xf4%\xf2\xea\xee(\xec\x86\xec\xed\xee\xa6\xf1I\xf4}\xfc\xaf\x11z/\x1eMp^\xd4\\nP\xb1D\xc2?\xafB\x95H6M{N\xf7F\x905\xb8\x1d\xfe\x05\xbd\xf3Y\xe6\x17\xdc;\xd5\xd0\xd3\xc6\xd8\x1c\xe0\xd4\xe4\xdc\xe3Q\xde\xa5\xd8.\xd68\xd9\xb5\xe2\x92\xf2G\x03\x00\x0fh\x121\x0f\xfe\x0b\xe9\x0b\xe7\x0c\xd3\x0c\xe6\n1\t\xbc\n\xb7\r\xaf\x0el\x0bF\x03\xf2\xf8\xed\xed\xf7\xe3\x7f\xdd\xec\xdc\x05\xe2\xea\xe8?\xec)\xea@\xe3[\xdc\x98\xd9\x82\xda\xac\xdf\xf5\xe6@\xef\xa4\xf7\xd8\xfd\x82\x01\xf3\x03\x0c\x06\x89\x085\n"\t\xae\x06\xc9\x05\x86\x07\x9f\x0b\xda\x0e\xbd\r\x0f\t\x80\x01\xaa\xf9\xaa\xf3V\xef\xef\xeeu\xf1u\xf4I\xf6O\xf5\x8c\xf2\xb6\xf0z\xeff\xf0\xfb\xf2\x85\xf60\xfc^\x02$\x07\xca\t\xe4\x08E\x06\xc7\x04\xba\x04\xf4\x06\x1e\t\xa8\t\x17\x08!\x04z\xfe>\xf8\x07\xf4\xe2\xf1]\xf1\xaf\xf2\x0c\xf3\xe7\xf1b\xef\x9b\xea\xdc\xe9!\xeb\xae\xeb&\xea\xc6\xe6\x8c\xed\xa1\x026$\xf6G&]\x99`wUUE\x17<\xe4:\xccB\xa3NDW\xcdW\xbcJ|2k\x15h\xfa\xf4\xe6\xf2\xdc\x89\xd9T\xda\xf9\xdbD\xdd\xcb\xde,\xdfW\xdd~\xd8\xdb\xd1\xae\xcd\xfe\xcf}\xd9\x9d\xe8\x85\xf9\xc6\tG\x16\x1e\x1cN\x19\x0e\x0f*\x03\xe8\xfc\xf7\x00\x9c\x0c#\x19\x87\x1f\x15\x1d\xc3\x13\xc9\x05m\xf6\xab\xe8=\xdf\xbf\xdc\xe0\xdf\xb3\xe4\xe3\xe6\x17\xe4<\xdf\xe4\xdb%\xdb\xd7\xdb\x1e\xdd\x85\xde\xe0\xe0\x95\xe5\x83\xec\r\xf6\xd5\x00\x00\x0b\x9e\x11\x99\x12\x86\x0eN\x08I\x03\x94\x026\x07\xef\x0e\xba\x15\x80\x17\xec\x12&\t\xb0\xfd4\xf4\xb0\xee\xb3\xed4\xefh\xf1\n\xf3w\xf2A\xf0q\xee\x14\xee\xa9\xef\x82\xf2\xae\xf5K\xf9\xca\xfd\xe9\x02e\x08h\rY\x10\x9a\x11\xed\x10\xf3\r\x91\n\xb0\x06\x1b\x043\x03\x06\x02\xe7\xff\x7f\xfb\xb1\xf5.\xf0\xa3\xeb\t\xe9\x88\xe7\xa3\xe7\x03\xe8\xf6\xe6\xd4\xe3\x1b\xdev\xd8\xf1\xd7\xb5\xe1\xd5\xf8\xba\x18L8yOPXGUmJ\x11@\xe8=\xceEOU\x05b[dJY\x02D\x8f+\x0b\x14\xfd\xffc\xee.\xe0@\xd8\xd5\xd59\xd7\x8d\xd8O\xd8\xe9\xd6\x9f\xd4P\xd0\x0f\xca\xf8\xc4\xd5\xc6P\xd4w\xeaz\x01\xfa\x10\xe6\x16l\x16\xbc\x12\xac\x0e3\x0b\x8a\n/\x0f\x8e\x17\t [#\xce\x1e\x02\x153\t\x10\xfdZ\xf1\xc2\xe5\xdb\xdbO\xd6\xf6\xd5\x02\xd9\'\xdck\xdd#\xdd\x92\xdb\r\xd94\xd6e\xd57\xd9\x0e\xe2\xa9\xee&\xfb-\x05w\x0c\xe2\x11\xe1\x15\x06\x18\x9c\x17\x0c\x15\xe1\x11F\x10]\x11\x10\x14]\x16\xf1\x15\x0e\x12$\n\xed\xfe\xc4\xf2\xda\xe8\x8f\xe4_\xe6^\xec\xb9\xf2\xf9\xf5\xaa\xf4\x95\xf0.\xec_\xea\x0b\xedD\xf4\t\xff}\t\x14\x11-\x14-\x13\x17\x11\x12\x0f\xad\x0eL\x0f\xe3\x0e\xab\r\x0f\n\xcc\x04\x00\xffx\xf8\x8b\xf3k\xef\xec\xeb"\xe8\xc6\xe28\xde#\xdb{\xda\xab\xdc\xbd\xde\xb7\xe2.\xe5\xf9\xe3\xd5\xdf.\xdaN\xde\xe9\xf0\x0f\x11&7\x01U\\dgd|Y\x00M\x01D:C\xbeLFZ/e\x7fdfT\xdd8Q\x19\r\xff\xba\xed\x84\xe3\xe1\xddT\xda\xde\xd8,\xd9\xc2\xd9\x9f\xd9g\xd8~\xd6\x01\xd5\x9a\xd3\xe6\xd2\x10\xd6\xad\xdf\x1a\xf1\x04\x06D\x17p\x1f\xfb\x1d}\x16\x0e\x0ek\x08\xcc\x06\x1a\t\x01\x0e$\x13p\x157\x12\x9c\x08\xac\xfa;\xec\x03\xe0\x83\xd7\xc1\xd2@\xd1\xca\xd2\x9e\xd6\x07\xdb\xbb\xdd\xc8\xdd!\xdc{\xda\xdb\xda\xed\xdd\xff\xe3\x0c\xed\xfe\xf7A\x03\x82\x0c\x80\x12\x95\x15P\x173\x19S\x1b\x12\x1c\xfb\x1a\xa3\x18&\x16Q\x14J\x12\xeb\x0e\x9f\t\xf5\x02?\xfb\xcb\xf2\xc5\xea7\xe5*\xe4,\xe7L\xec\x89\xf0P\xf2\xc2\xf1\xbd\xf0\x0b\xf1:\xf3(\xf8\xb3\xff\xd5\x08]\x11\xf9\x16[\x18\x11\x16\xea\x11P\x0e\xf5\x0b\x86\n\xf1\x08\xe6\x05\xec\x01P\xfc\xfa\xf5^\xef\x04\xe9\x14\xe5\x91\xe2J\xe0\xbb\xddC\xda\x1d\xd9=\xd9\x9b\xda\xed\xdb\xbb\xdcl\xdeB\xde\x8e\xdc\xb6\xdc\xb7\xe5+\xfd\xb7\x1fSBSZ\xf5b\xb4`\x1dY\x02R\xf1NIQ\xa6Y\xa5b\x1bfb^NK:1d\x17\xb5\x02\xfc\xf3\x17\xea\xce\xe1\xfa\xdad\xd6O\xd4\xd3\xd4\x19\xd6H\xd7\xc6\xd7\x02\xd7}\xd5\x9d\xd4\x86\xd7\xc2\xe0o\xf0\xb5\x02\xda\x11l\x19\xfe\x18\x86\x13\x8b\rm\n\x9b\ne\x0c \x0eM\x0eC\x0cU\x07b\xff\x85\xf5\xcb\xeb\x80\xe3_\xdd\x1b\xd9&\xd6\xcf\xd4J\xd5\xa5\xd7O\xda\x8e\xdc\xea\xdd\x19\xdf\xc0\xe0\x08\xe3\xf4\xe6\xae\xec~\xf4\xb4\xfd\xce\x06\xb9\x0e\x13\x15\xf8\x19&\x1eM!0"\xa0 -\x1d\xb2\x19\xa2\x17h\x16r\x14;\x10\x89\t\xb8\x00\x80\xf6{\xec\xf5\xe4\x9b\xe2+\xe5\x8a\xea\xef\xefb\xf2I\xf2\xd1\xf1\x7f\xf2\x9c\xf5\xd8\xfa\xb7\x01\xe9\t\xaf\x10[\x15\xd0\x166\x15[\x13/\x11\xc3\x0f\xf9\r"\n\xb0\x05\xd1\xff$\xfa\xd8\xf4\x92\xeec\xe8+\xe2k\xdd\x12\xda2\xd7t\xd5\x18\xd4\xcb\xd3\x14\xd5?\xd7\x7f\xdc\xe1\xe2\xd9\xe8\xa1\xed\xfa\xef\xce\xf23\xf6#\xfa\x80\x01\xb5\x0f\x95\'SF\xc2`\xe7o\xaep\x1eh\x8b^\x80V2S\xe7R?SqR\xcbK`>\xb9*\xf3\x12!\xfc*\xe8R\xd9\xab\xcf\xfc\xc9\xab\xc8"\xcbV\xd0A\xd6\x03\xda\xb3\xda\x1b\xdav\xda\xf0\xdeJ\xe7\x19\xf2p\xfd\xed\x07p\x10l\x16\x16\x19n\x18\x91\x15\xf2\x11~\x0e\xca\t\xbc\x02\xee\xf9\xc8\xf2\xc6\xef\xf3\xef\xdb\xefX\xec\xb5\xe5B\xde\xf8\xd7\xad\xd38\xd1S\xd1\xa0\xd4\xd7\xda\xe2\xe1\x86\xe7%\xeb\x1d\xee\xa1\xf1\xa8\xf54\xfa\xfe\xfe\xe2\x04\x9e\x0c\x0b\x16Z\x1f\x11&N(Y&?!\xfc\x19\x87\x11\x02\tf\x02\x1e\xff\x13\xfe\xd0\xfc\xf7\xf8\x0f\xf3\xb6\xec\x7f\xe7\x99\xe4\x8d\xe4\x00\xe8:\xee\xb4\xf6\xe9\xff"\x07\x96\x0b\xa6\ro\x0e\xfb\x0e\xc2\x0f\xf4\x10}\x12\xb5\x13\xa1\x14\xff\x13.\x11\xa4\x0b\xc3\x03\xaa\xfb\xb3\xf3R\xed\xfa\xe8\xf1\xe5\xec\xe4\xf4\xe3\xe2\xe13\xde\xca\xd8\xcd\xd4\xb9\xd2\xe3\xd3\x19\xd8n\xdd(\xe4\xce\xea\xbb\xf0\xaf\xf5\x94\xf8%\xfb\xeb\xfdX\x01\x03\x06G\t\x03\n\xa6\x06\x90\x01N\x01g\x0c\xda#uA\xf1Y\xe9e\x03d\xffXNL5C9@(BvF\x87H\xe8C\xc76\xbd#{\x10\x87\x00\x85\xf2\xad\xe3e\xd45\xc9e\xc6\xb1\xcc\xa9\xd7\xa9\xe1W\xe7\xa4\xe7a\xe4(\xdfa\xdb\xb8\xdc\xbb\xe5<\xf5\xec\x05b\x11%\x15r\x13\x81\x10v\x0e\x9d\x0bj\x06\x8a\xffz\xf9V\xf6\x17\xf6u\xf7O\xf9e\xf97\xf6\xf2\xeex\xe4\x95\xda\x9b\xd4\x1f\xd5\x9e\xdb#\xe5\x07\xee\xa6\xf3\xde\xf4\xce\xf2\xe4\xef\x8c\xee\x1b\xf0\xd7\xf3\xd7\xf8\xe8\xfdv\x03\xd3\t\x1b\x10h\x15\xde\x17.\x17\xc4\x13T\x0e\xfb\x08\x06\x05\n\x04\x9b\x05B\x07\xcc\x07Q\x05[\x00\xb0\xfa$\xf5\xd4\xf1\xd3\xefY\xf0f\xf3c\xf7\xb1\xfc_\x01\x89\x05\x03\t\'\x0b\x9f\x0c\xcf\x0cD\x0cH\x0c\x9f\x0c-\r\xc5\x0c?\n\t\x06\xcb\x00\x1c\xfb\xd9\xf5\xad\xf0\xe5\xeb\x1a\xe8\x81\xe5\x11\xe41\xe3\xa0\xe2\x93\xe21\xe3\x9b\xe4\x16\xe6\xeb\xe7\xb3\xea&\xee/\xf3\xc5\xf7Z\xfb\t\xfe\xac\xff(\x01X\x02\xd7\x03\xdf\x06j\n\x08\r\xf9\r\x84\x0c\x18\nj\x05\xbd\xfe\xc5\xf8a\xfa\xea\x08?$;B\x85V7Z\xb8NF?\xa64y2\xea5\xba9|;X9S2\x05\'\xdb\x16{\x05\r\xf4a\xe3\xe9\xd4t\xc9?\xc6\xe9\xcb=\xd8\x0f\xe5@\xeb-\xe99\xe2R\xdb`\xdae\xe0\xf5\xeb\xa4\xfaD\t\x89\x15\x9c\x1d\xb7 \x06\x1f\xa3\x1a\xbb\x14.\x0e4\x07\xe2\x003\xfd\xd3\xfc\xb8\xfek\xff\xa6\xfbL\xf2\x1d\xe6\x0c\xdb\xce\xd3\x19\xd1\x8b\xd2\x0f\xd7|\xdd\xae\xe3i\xe8\x05\xebu\xec\x88\xedx\xee*\xef\xc9\xef%\xf2~\xf7>\x00\'\n\xee\x11/\x15+\x14\xd0\x10\xaa\r\xf4\x0b\xe3\x0b\x10\rD\x0ey\x0e\xd1\x0c\xa7\t\x17\x06\x9d\x02\\\xffL\xfb\xa3\xf6\xd2\xf2F\xf1\xc7\xf2\xa7\xf6s\xfb$\x00v\x03\xcd\x04\x0b\x05\x01\x05\xe8\x05=\x08\x9f\n\xa3\x0c}\r\xcc\x0c\xc1\n\x06\x07\x15\x02J\xfc\xfc\xf6\xa9\xf2\x89\xef\xe6\xed\x8e\xec\xa9\xeb7\xeb\x10\xeb)\xeb"\xeb\xe9\xea\xfd\xeb,\xee\xff\xf0\xff\xf3\x96\xf6,\xf9\xfb\xfbd\xfd*\xfeD\xff\x14\x01\x1a\x04\x06\x06\x83\x06\xbc\x05L\x03\xa3\x00\x16\xfeP\xfbR\xfa\xee\xf6b\xf0\xb5\xea\x9d\xe9\xfc\xf48\n\x0f ?0<5\xfc2a0\x151o5\xf79X=\xf9=\xa3=\x15;\x8a4o,Q!\x9a\x13\x10\x04%\xf4%\xea\xb1\xe8z\xed_\xf4\xe0\xf6\xfd\xf3\x0b\xee\x11\xe8T\xe5!\xe6H\xe9\xc7\xed`\xf2\xb5\xf7D\xfeb\x05\xd2\x0b\xa0\x0e\x16\r\xa8\x08\x05\x04\xb5\x01\x1c\x02\xcc\x03\xd1\x04\xbe\x03\xe5\xffi\xfat\xf4\xe4\xee\xef\xe9Z\xe4\x1e\xde6\xd9^\xd7<\xd9D\xdd\xc8\xe0;\xe2\xf5\xe1q\xe1\xbe\xe2>\xe6t\xeb\xa3\xf1\xb8\xf7\xcd\xfc\xbf\x00\xbc\x03\xe6\x06\xb9\n\xd0\rb\x0f\xf8\x0eq\x0eP\x0f_\x11<\x13\xdf\x12\x12\x10+\x0c\x0e\x08\xcb\x04Y\x01_\xfeA\xfc\x9a\xfa&\xfa\x12\xfa\x01\xfb3\xfd`\xff&\x01\xf1\x01i\x02\x13\x04\xb8\x06\xb1\t5\x0cH\rc\ry\x0c[\nE\x07\x7f\x03\xb5\xff\x8e\xfc\xe7\xf9\xd2\xf7\xc6\xf6\xc4\xf5\x91\xf4\x98\xf2\xc7\xef\xa1\xed\xfd\xebq\xeb\xbe\xeb\x1f\xec,\xed\x83\xef,\xf2\xe1\xf4N\xf6w\xf6\xa3\xf6W\xf7\xae\xf8A\xfa\xbc\xfb`\xfc\xf6\xfb\x13\xfa\xd8\xf6\xfe\xf3\xcf\xf31\xf6u\xfa8\xfe\x8e\xff\xc0\xff\xdf\xfeT\xfeI\xfe\x1b\xfe\xe5\xfd!\xfd2\xfd\x84\x00\xc4\t%\x18\x07(!4O:\x9e;\xf4:\xbc;\xdc=\xe0@rC\xb7DeD\xe9@\x84:\n1s%\x1f\x19\xfc\x0b(\x01\xb7\xf9\xaa\xf5G\xf4\xab\xf1t\xed\xf5\xe72\xe2\xa3\xdd\x1d\xdb\xd4\xda)\xdc\xae\xdeo\xe2\x0b\xe7e\xec\xe1\xf1>\xf6\xaa\xf9\x87\xfbk\xfcY\xfd\x8d\xfe\xd6\xff(\x01\xfb\x00l\xff\xb3\xfc-\xf9\x82\xf5\xa8\xf1\x8c\xed\x08\xea\xac\xe7U\xe6\xa5\xe5\x8d\xe4\x0f\xe3\\\xe1^\xe0z\xe0\xc5\xe1\xb7\xe3\xd9\xe5P\xe8t\xebJ\xef>\xf4U\xfaP\x01\xdc\x08e\x0f\xed\x14\xe1\x18\x87\x1b\xc8\x1c\x01\x1c\xd6\x19\x9c\x16\x80\x13p\x10U\r$\x0b\x87\x08i\x05\xb4\x01\x95\xfd\x8f\xfa\x07\xf9\xbe\xf8W\xf9\x93\xfa\xc7\xfb\xd3\xfc\xba\xfd\x01\xfe;\xfe=\xfe\xa6\xfdo\xfd|\xfdD\xfe\xf8\xff\x00\x01\xa1\x01\xfe\x00\xcd\xff\xcd\xfd\xef\xfb.\xfb\xfd\xf9\x97\xf8\x07\xf7s\xf5z\xf4\xb7\xf3a\xf2\x86\xf1\xb3\xf19\xf0\x8e\xed\xcd\xec\x01\xee"\xf1k\xf46\xf6\xde\xf7-\xfal\xfc\xf9\xfd!\x00#\x02\xb9\x02\x0f\x03^\x057\x08\x18\x0c\xb5\x10\xbd\x12\xff\x12\xbd\x11:\x0f\xfb\r\x86\r\x07\r\x85\rE\r\xcd\x0b\x15\n;\x08k\x06\x9c\x05<\x05\xcd\x04\x15\x04\xdd\x02g\x02K\x03\xf4\x05\xab\t\xdc\r\xff\x11I\x16r\x19\x1f\x1b\x0c\x1c\x81\x1c\xe6\x1c\xba\x1d5\x1f\xb6 \xdf!\xd6\x1f\x05\x1cS\x17:\x11\xc0\x0cc\t\xcd\x06\x02\x05o\x02\x90\xfe\xb9\xf9w\xf5\x98\xf1\xa0\xedO\xea\x0f\xe8\x06\xe7\xb7\xe6V\xe6Q\xe5\xab\xe4\x9f\xe4\xd7\xe4l\xe5V\xe6_\xe7\x89\xe9K\xec\xc4\xeeh\xf1(\xf3\x94\xf4;\xf55\xf5\x03\xf6\x95\xf7\xdd\xf8\x96\xf9\x90\xfa\x0f\xfb\xe0\xfb\xfe\xfcq\xfd\xf0\xfd\x06\xfe\xca\xfd\xad\xfdH\xfe\x81\xff\x99\x00\x16\x01\xdb\x00.\x00\x1e\x00Y\x01\xb0\x01|\x03@\x05\xec\x04C\x05{\x06\xec\x05]\x06\x19\x06\xda\x04\n\x05\x82\x05\xbb\x05>\x04\x92\x02\x8e\x02\r\x02\xfa\xff\xbb\xfep\xfd)\xfc\x0f\xfcR\xfb\xb9\xf8&\xf8\xdd\xfa*\xfb$\xf9\xf3\xf8,\xfa\x15\xfd\x00\xfe4\xfe\xe0\xfb\x8c\xfa/\xfc\xd5\xffA\x01Q\xffx\xfdp\xfd\x1c\xfe\x92\xfd\x93\xfeh\xfc\x92\xfa\xa8\xfb\xa2\xf9!\xfa\xaf\xfcQ\xfbN\xfd\x8d\xfe\x10\xfc\x0f\xfd)\xfd\x81\xfc\x12\xff[\x02\xc0\x04\xc2\x04G\x045\x05\xab\x07\xd6\x07\xa6\x06\xc3\x07\xc3\x07\x96\t\xb5\t\xcc\nH\x0b\xf4\x0b\xd7\r\'\r\t\x10\xca\x0c:\n\xbe\x0b\x0c\n\x0f\n\x8c\nM\x08\xee\x07\xf7\t\xc3\x07\x08\t\x7f\x07#\x02C\x02\x85\x02\xfa\x03!\x059\x08t\x04\x16\x01j\xfe\xac\xfe\x87\x02\xa9\x01\'\xff\xa7\xfci\xfb\xe7\xfb`\xff_\xfb>\xfc\x8b\xfb;\xfaa\xf9G\xf51\xf8\x14\xfa\x89\xfb\x13\xfa\xf7\xf8\xc1\xf9\xf1\xf9\x8d\xf9(\xfaS\xfa\xb6\xfa8\xfap\xfb\x96\xfd\x94\xfe\xed\xff5\xfe\xa8\xfc\xfc\xfc[\xfe;\x00\xe2\x00\x05\x01)\x02w\x01\xb9\x00\xbf\xff\x8b\x00\x1b\x01D\x02\x1b\x02Z\x04\xa2\x03\xe2\x02V\x04\xf7\x03*\x05C\x07\xe1\x04\x13\x01o\x06D\x07\x93\x01\xd9\x01l\x05\xbc\x03\x87\xfd\xe9\x00p\xfe\xd4\xfaV\xfe?\xfe\xe3\xfb\xb8\xfa3\xfd\xf5\xf75\xf6\xef\xf5\xb7\xf7\x8e\xf6\xfd\xf5x\xfb2\xf6(\xf3\x94\xf6B\xf64\xf5~\xf55\xf9\xc6\xf7B\xf6W\xfb\xfb\xf9\xc1\xfaZ\xfb\'\xfa\xda\xf7\xd2\xf7:\xf72\xfat\xfb\xa6\xfa\x0f\xf9\x1a\xfd\xb3\xfaI\xfa/\x00\xf4\xfd\xda\xff\xe9\xfd\xb5\x01V\x04z\x06\xdd\x05\x16\x06\xc9\x05\xfa\x07\x85\x0b\x1f\nq\x08\xd7\t\xf7\r\x9f\x0cL\n\x83\t\xe7\n&\x0c\xba\x06}\x04>\n\x8c\n\x15\x07\xc0\x068\x08&\x04\x06\x03\x95\x05\x1a\x05\xee\x04\xdc\x02\x84\x01\x06\x02\xec\x06\x0c\x05\x99\x02Q\x00\xd4\x00>\x000\xff\xed\xfe+\xfa%\xfd\x1d\xfd\xeb\xfb\xb7\xfb\x97\xfc\xb4\xf9$\xfdU\xfb\x08\xfay\xfb\x01\xf8\x16\xfbr\xf9\x18\xf8\xb1\xf7\x94\xfb\x81\xfc\x1d\xf5I\xf9\xb1\xfau\xfet\xfb\x9b\xf8\xcb\xff\xaf\xfd\x92\xfeU\xffh\x00\xca\x01\xec\x02~\x00\xc7\x02t\x06\xa9\x03\x1d\x03t\x04\x9d\x02H\x02%\x06\xcb\x0c.\x06\x99\x04F\x02\x1e\xff!\x08\x91\x0cY\x06\x06\x04A\x07\xd3\x03\x15\x06~\x03N\n\xf7\x02\x83\x03&\x08\x9d\x06\xae\x03R\xfd\xfa\x06W\x00\xdb\xfd\xdd\xfe\xb2\x00\xcf\xfd\x06\xfdT\xfe\xd0\xf8+\xfa\x15\xfb\xa7\xf6\x0c\xf7\xeb\xf8\xd8\xf7\xf5\xf5@\xf7\x12\xf9\x0f\xf6%\xf9\xa0\xf5\xf7\xf8b\xfbd\xf8\xe5\xfc)\x00f\xfd\x9f\xfd6\x00v\xfc\x92\x01\xc0\x03\xf3\xfd\xc1\xff!\xfe\xeb\xfdz\x02\x91\x02\xca\xfe\xf1\xfe\x01\x00\xe8\xff6\x06\xa4\x06\x9e\x02~\x01\xbb\x06W\x084\x06\xb2\x03\xee\x04\xb3\x065\x04q\x07\xc3\x046\x03q\tK\x05\xe3\x01\xce\x01\xb9\x00\xc2\x03\x9c\x03g\xff\x90\xffp\xff\xba\x02M\x01\x0c\x01\xbe\x02\xcb\xfeE\xfd\x8d\x00N\x00T\xfc\xe6\xfd\xc7\xfd\x14\xfe\xf8\xff~\xfe\x8e\xfet\xfdP\xfeX\xfa\x13\xf9\xa6\xfdZ\xfeB\xfe\xc2\xf87\xfb`\xfaE\x00\x85\xf8\xc2\xf6]\xfb\xdd\xf8\x14\xfc\xba\xf9\xec\xfa\xa0\xfd\xea\xfa|\xf9\x99\xff\xc9\xfc`\xfc\xa1\xf3\x19\xfd\t\x00\xac\x03\x85\x04.\xff\xbf\xfft\xfe\xe9\x06q\x00\xbc\x030\x06$\x06\xa9\x04\x9c\x07\xb6\tg\x05.\x05N\x03\xd8\x00\x92\x05\xa6\x04\xa5\x02\x87\x07\x80\x04\xcf\x02\xf6\xfe`\x04\xcd\x05\x0f\x00\xfc\x03\xc3\x00\xeb\xff\x8b\x04\xb1\x08&\x00\x03\xfdX\xfd#\xfe\xaa\x01\xe1\xfd\x19\x03\xc6\x00\xe4\xfc\xda\xfb\xdf\xf2\xf8\xf8=\xfd:\xfa\x96\xfdt\xf9 \xf9!\xf8p\xf8)\xf8\xa7\xf8\xa8\xf8\xf5\xf9`\xfc\x94\xfd\xd6\xfb\xd5\xfb\x81\xfc\x96\xfe\x8f\x02z\x01\xf0\x00*\x00\x12\x05\x89\xfa@\x02#\x02\x12\x01\xb8\x02\x97\x02O\x02}\xff\xcd\x01\x82\xf8>\x038\x02v\x03H\xffQ\x00\x8a\x04\x99\x04A\x02#\x01m\x042\x03s\x03\xeb\x03\x1e\x08\t\x05\xed\x02_\x01S\x06\x0c\x07\x9f\x04\x7f\x07F\x04\xf9\xff.\x03H\x06\x85\x05J\x06\xc2\x01\x16\xfe\xac\xfe4\x01\xb2\x01\x10\x00\xf1\xff8\xff\xb2\xfe\xee\xfd\x83\xfbO\xfb\'\xfb:\xfd\x83\xfc\x8b\xf9\x1d\xfbN\xffe\xfc\x13\xf9\x96\xfc<\xfb\xb3\xf9_\xf9\x82\xf9I\xfcH\x014\xfbm\xfa\xad\xfa\xcf\xfd\x9a\xfeK\xfb\xa4\xfe\x82\xfd.\xfe \xfb7\x00\x1c\x02\x00\x02\xd4\xfeE\xff\x91\x03`\xffr\x03R\x02/\x03\x1e\x06\xa4\x02\xb0\x05\x04\x08\xf5\x05\x0f\x005\x05\x9e\x08\x91\x05w\x06i\x05\x03\x03"\x07%\t\xf6\x05R\x04(\x02v\x00\x07\x02*\x03\xb9\x02\xf6\x01\x14\xfd\xf2\x00+\x00\\\xffT\xff\xd0\xfa\xd7\xf9!\xfc\xea\xfd\xa7\xfe\x93\xfc\xa4\xf9L\xfd\x88\x00P\xfa1\xfc\xf2\x02a\xf9|\xfa\xba\xfb\x95\xfa\xd8\xfdI\xfe\xf0\xfd\xbd\xfa5\xfaj\xfe\xe0\xfe\n\xfb\xfa\xf9\x9d\xfb\xbe\xfd\x9c\xff:\x02r\xfcd\xfc\xd7\x00\x16\xff\xc8\xff@\x01\xf6\xfe\t\x01\xdb\x01\x0c\x03\xde\x02?\x00\xc2\x04v\xfe(\x02e\x05\xdb\x01\xf7\xfe\x92\x03\x98\x030\x02U\x05\x01\x04\xa7\x02u\x02\xfc\x01\xca\x03\xd9\x07\xa6\x01\x82\x03\xce\x02\x05\x036\xff.\x04W\x02R\x01\n\x05M\x01Q\x03\xeb\xfeW\x03\xb9\x00\xbf\x01\x9c\x00u\xfdW\xff\xa5\x03a\x00\xab\xfd\xf5\xfb\xa1\xfb\xc6\xfdl\xfc\xf0\xfb\x82\xfcG\x00X\xfb\xae\xfaF\xfa\xa9\xfb\'\xfa!\xfb|\xfc\xe7\xfa\xe0\xfa\xc3\xfca\xfd\xb1\xfbE\xfb\xb1\xf9\x88\xfd\x00\xfe\xa4\xfd\xbf\x00\xaa\xff\xf1\xfd\x82\xff\x12\x01\xe6\xfd\xd9\x01`\x04`\xfe\x89\xffg\x01D\x04=\x02!\x05E\x05\xfa\x02Q\xff\x7f\xffN\x06\xc4\x07\xc7\x08W\x012\x04\xad\x00\xf1\x03\x11\x04H\x03\xc2\x02\x85\x02\x06\x03\xae\xfe\x00\x07\xb3\x00\x1e\xfc\xa9\xfd\x12\x01r\x02G\x02\x91\xfe\xf8\xfc\xa4\xfd\xea\xfc\xf1\x01\x95\xff\xf6\xfc\x17\xfd\x8d\xfcd\xff\xdc\xfcC\xfdS\xfc\xe3\xfc\xf2\xfb\x97\xfc\xb8\xfcG\xfe~\xfe\x16\xfd\xe4\x00s\xf8\xb2\xfab\xff\x16\xffE\x00c\x00!\xffR\xfa\xc4\xfb\xd3\x00_\xfe/\xffT\xfe.\xfd\x93\xffW\x00\xac\xfb2\xfd\x94\x01\x84\xfc\xab\x01}\x01\x9a\x01\xe4\xff>\x01\x8b\xff.\x03`\x03\xeb\x00\x8f\x03\xfb\x04^\x046\x03\x13\x05\xcf\x02\xd2\x03\x04\x02\xdf\x06U\x03\x93\x04\xa2\x01a\x01\xb5\x04\xc4\x04{\x02\xe8\x02#\x03\xd6\xfd2\x00 \x04I\x040\xff0\x02\xf2\xff\xf6\xfb\xec\xff\x1a\xff\x97\x01\xcd\x03\xd6\xfb\x00\xf91\xfeY\x01:\xfe"\xfb\xf9\xfaJ\xfdH\xfd[\xffo\xfc\xd6\xf9\xae\xfa\xa5\xfbq\xfe~\xfc\xcc\xfc\xa7\xfb\x9e\xfc\xca\xfe\x85\x03\x81\xfe0\xfb\x11\x00\x8a\x01\xc1\xff\xaf\xff\x9b\xfe\x8e\x02\xa0\xff\xf2\xff\xb7\x03\xc5\xff\xea\x00\xdf\x01\xa6\x00\xed\xfc\xad\x02\xdd\xfe\xb7\x04x\x04\xa6\x00\xe7\x00W\xffv\x03*\x05Y\x06\xcc\xff#\x01-\x04<\x01\xda\x00G\x02r\x06\xca\x019\xffa\x00R\x01\xb3\x04m\x008\x00\xba\xff\xd5\x01\xd7\x00m\xfe\xef\x00\x9a\x02\xc2\xff\x8f\xffv\xff\xcb\xfdK\xfd\xb0\xfe\xac\x01\xd7\xff\xaf\xff\xd2\xfa\xa2\xfd\xe1\xfd5\xff,\xfc\xc6\xfd\x16\xfe\xf0\xfe\xa4\x004\xff\x0c\xffw\xfa,\x01\x7f\xfc\x9b\xfc&\xff*\x00\x9d\xfe/\xff\x9d\xfeQ\xfd\x8d\xfe\x9d\xff\xb1\xfc\xfc\xfc\xd2\x00\xb8\x00\xae\x02\xb8\xfe\xea\xfe\xba\x01\xb9\x02\x8a\xff\x1b\xff\x93\xff\r\x04\xa4\x02P\x02\xb0\x01\xc1\x00\xe9\x00\xdc\xff\xbe\x04\xde\x021\x01%\x01\xaf\x01\x97\x01(\x03\xe8\x02\xa2\x00\xab\x00\xa7\xff\xe7\xfdB\x02&\x00\xa1\x00\x05\x02m\xff\xfd\x01\x9f\x00\xfb\xfe\x00\x01\xe3\xfe\n\xfe4\x00\xc4\x01\xfa\x01\xd7\xff\xd7\xfe4\xff,\xfe\xae\xfc|\xfe\xe6\xfe\xa1\xffO\xfee\xfd|\xfe9\xfc\xe5\xfdV\xffT\xfeP\xfcL\xfc\xce\xfeX\xfd\x85\xfeW\xffE\xff\x14\x01\xe6\xfc8\x00\xd0\xfd\x15\xff\xdd\x04\x01\x00\xd8\x00\xa9\xfe7\xff_\x00\xec\x02\x97\x00\xab\xfe\x18\x04j\x00\xa5\xfd.\x01\x8f\x01\xf8\xff\xba\xfd\xda\x00>\x02\x18\x00\xa7\x02\xba\x01\xb3\xff{\xffj\xffS\x007\x00i\x01F\x03\r\x00\xdc\xfd\x17\xff\x1c\xff\x9a\x01W\x02\xa0\xfe/\x01\x0c\x01+\xfd\xd2\xff\x14\x00\xc3\xfe\xc1\xfft\x02\x10\x05\xd6\xfe\x98\xff=\xfe\x80\xfc!\x01\x99\x00\xf2\xff\xb0\x01>\x01-\x00\xb1\xfe\xe1\xfc\xb8\x01y\xfe\x80\xfd\xc8\xfe0\xff\xea\x00x\xfff\xff\xa8\x00~\xfe\x1e\xfd\x84\xfc\xe8\xfd\xd0\x01\xa7\xff\xd6\xfe`\xff\xb2\xfd\xfe\xff*\x00\x00\x00\x1f\x00`\x00\xb2\xff2\x01\xc4\x01\x85\x00A\x01]\x00\xac\x01\x9f\xffA\x02\xb1\x00\xcc\x01"\x03\x86\x00\xbe\xff\xc0\x01$\x01Z\xff:\xff+\x03$\x01\xb6\x03\t\x02\xa0\xfc\xb0\x01s\xfe\x11\x03<\x00\x1f\xffS\xff!\x01\x02\x03\xba\xffg\xfe\xb4\xfe\xa9\xff\xae\xfdN\xfd\xb7\xffx\x03G\xff\xc8\xfe\xf7\xff$\xffY\xff\x1b\xfe\xc0\x00\x9e\xfe\x7f\x00i\x00d\xff\xf6\x00\xd2\xfd\x86\x00\xd3\xfd\xb8\x00\x0f\xff\xdf\xfe\xc5\xfdf\x03N\x00\xdb\xf9\x06\x03\x19\xfe\xb2\xff\x17\x04l\xfdc\xfd\x98\x03w\x00\x04\x00\xdb\x01q\xffZ\x01\xcc\xff\x8a\x01\xa4\t\x94\xfe\xb6\xfb\r\x00<\x05\xab\x02\xbb\x01H\x04\x0e\xfdP\xfc<\xfd\xa7\xff\xa3\x02\xc1\xfe>\xfd\xfc\xfe\x8c\xfb\xaf\x01r\x03\xef\x018\xfcv\x00\x80\xfc\xd0\xff\xfe\x06\xcf\xff\x19\x01\xc7\xff4\x03n\x025\x00=\xfc5\xfd\xd2\xfb\x1a\x015\x00\x14\xfee\xfe\x10\xff\xb9\x01\x0c\x01M\x01O\xfd\xc0\xfb%\xfd\x9a\xfe5\xfe\xd9\xfe\xf7\xff\x0f\x00V\xff\x85\x00\xb3\xfb\x1e\xfd^\x02e\x02"\x00X\xfaC\xffb\x02s\x01\xca\x021\xffw\xff\xc3\x00B\x02\x01\x01_\x00\xe8\x01\xdf\xff\xb8\xffP\x03\x9f\x03\x9d\x02\xd9\xfc\x1d\xfd\xeb\x01\x1a\x05\x88\x00\x1b\x03N\x07$\xfc\x02\x01\x90\xfd\x97\xfd\x0e\x03N\x04\xc2\x038\xfeu\x00T\x05!\x02\xca\xfb-\xfe\xe5\xff!\xfd\x93\xfe\xf1\x035\x00\xe2\xfc\xcb\xfd\x0f\x017\xfd{\xfa\xb6\xfd\x18\xfe\xe7\xff\x82\xfe\x8b\xfe\xb2\xfc8\xfd\xd7\x00\xbb\x00\x96\xfe%\xfd\xd9\xfa\xe8\xfd?\x01\xbd\xff\xd6\x02\xcf\x00\xb0\xfb\x8f\xff\x8e\x00\xc7\xfd\xbe\xfe-\x00\x9d\xff{\x00~\x04[\x00\x03\xff\xdc\x023\x00\x00\x00-\x01{\x01\xf0\xff\t\xfe\x14\x04\xda\x04\xeb\x010\x01\xa3\xfdd\xfc\x15\xffJ\x01\xc3\xffP\x01[\x03\x91\x03\x1c\x02q\x01\xeb\xfc\xba\xf9\xa6\xfcI\xff\xbe\x01\xc6\xfe$\xff`\x05\xc3\x02\xfb\xff#\x03\xd2\xfb\xf3\xf9\x1b\x00\\\xfck\x01\xd6\x06\x8b\x029\xffS\x01b\xfd\x15\xfe6\x00\xd5\xfc\t\x00O\xffT\xfc\x8d\xfed\xff\xd7\xfct\x00a\xff\x94\x01C\x00\xcc\xf92\xfb\x9f\xfd\xf7\xfe\xa4\xff\xef\x00\xc7\xfd\xf4\xfe\x87\x01~\x01\x17\xfc\x08\xfd\x9b\xffX\xfd\xca\xfe\xcb\xff \x00h\x02\x7f\x04\xcd\x02!\x01i\xfcq\xfe.\x01\xf6\xfe\x16\x02 \x04\xb7\x02x\x05P\x05E\x03\xd9\x01y\x01\xea\xfe\xd5\x00x\x02\xf1\x01\xb9\x03\x1c\x01p\x01R\xff\xb5\x02\xea\x05r\x00(\xfeg\x01\x92\x02J\x00\x1e\xfc\xd9\xfce\xfc\x1f\xff\xe0\x01\x19\x02 \x03z\x012\xffM\xfc\xee\xfa\xce\xfb\xbe\xfd\x0f\xfe\xe9\x00\xd1\x01\xb1\x01\xec\x00\x0f\x01R\x00}\xfc\xca\xf9\x85\xfb\x95\x00\x9c\x00\xec\x00\xea\xfdf\xff1\x00\xec\x00\x04\x02\xe3\xfd"\xfe\xe7\xfez\x03\xe7\x03\x14\x01%\x00\xbf\xfcr\xfc\x99\xff\xe7\xffd\xfek\xff!\x02;\x02\xf6\xff\xb3\xff6\x005\x01\xc1\xfd\xdb\xfd\xec\xff\xe9\x01m\x02\xf3\x03\x96\x05\x0b\x03\\\xffS\xfd\x15\xff\xce\xfeM\xfe\xa3\x000\x03\xad\x01I\x02\n\x01|\x00\xf2\x01\x1b\xff\xc4\xff?\xff\xb0\x00\xe3\x00\x1d\xfed\xfe\x8d\x00\xf1\xffE\xfe\x8e\x00\xc4\xfd\xa6\xfd\x95\xff/\x01\xf8\x01u\xff\x07\xfeY\xfc:\xfd\xb6\xff\x87\x03i\x00r\xfc\xf6\xfb\xa6\xfd\xc4\x00m\x01\x81\xff\xfe\xfcB\xf9\xa2\xfb\x86\x01\x1f\x02:\x017\xff\xda\xfd\xc4\xfe\x03\x04c\x03]\xff\xd5\xffM\xff\xee\xfe\x1f\x017\x03-\x02\x8e\x00\xb3\xfe*\xfe\x03\xff\x9b\x00\x0f\x01<\x00\xb4\xfe\xce\xfe\\\xff\x02\x01;\x01S\x00\x9e\x01\x82\x01V\xffC\x00\x11\x021\x01\x16\x01\xff\xffW\xffU\xff\x81\x02B\x02\xe6\xfe\x9a\xfe3\xff\x86\xfes\xfeP\x00z\x00T\xffX\xff\xc0\x000\x00\x8e\x01\xa5\x01\xd7\xff=\xfec\xff\xd3\xffi\xffd\xff\xed\xfd>\xfe\xd3\xfe\n\xff~\xfeU\xfe(\xfe\xf1\xfe\xd6\xfey\xfe\xa6\xff\xa9\xff\r\x00^\x00\xed\x00P\x01q\x01_\x01\xdf\xff\xa5\xff\x83\xff5\x00\xbe\x00\xa7\x00L\x01\x1c\x01\r\x00(\xffh\x00A\x01\x7f\x01\x08\x01\xa9\xff\x8d\x00c\x01\x00\x01\x9e\x00\xf6\xff_\x008\x01P\x01\x11\x010\x01\xec\x00!\x00\x82\x00\x1c\x01V\x017\x01\x13\x01\xc1\x00\x9c\x00\xbc\x00\x03\x01\xa7\x006\x00O\x00\\\x00@\x00h\xff\xdd\xff\xf8\xff/\xffK\xfe\x15\xfe\xb9\xfe\x14\xffM\xff-\xff9\xfeC\xfe\x85\xff\xba\xffc\xffF\xff\xde\xfe)\xff\xa3\xff\xcf\xff5\x00\xcf\xff\xaa\xfe7\xfd\x1b\xfd\x02\xfd\x10\xfd\'\xfd\xf8\xfc\xde\xfd\x19\xfe\x11\xfeW\xfdN\xfd\xe2\xfe\xc3\xfe[\xfd\xb5\xfc!\xfdv\xfe\xbc\xfea\xfe*\xfe#\xfe\x17\xff\t\xff\xbd\xfe\xfe\xff\x05\x00m\xfe\xb2\xfd\xb7\xfdm\xfe\x9c\xfe\xdc\xfdO\xfd\xc2\xfd\xc8\xfe\xf8\xfe\xbb\xff\x80\x00\xfe\xff\x1b\x009\x00C\x00\xac\xff\xcc\xfe\xfb\xfc6\xfa\xcc\xf8\x95\xf7\x05\xf7\xb3\xf9M\x01`\x0c;\x17Y"\x13,\x8c2u5\xb73\x080\x06*|"Z\x1a\xf8\x11\x94\t\x9b\x01p\xfb\n\xf7\x1f\xf3H\xf0\x0c\xed\xdb\xe9\xf7\xe7\xeb\xe5\xf4\xe56\xe6\xcc\xe6\xc0\xe7W\xe9\xa0\xed\x15\xf3\xaf\xf8J\xfe\xff\x02\xd5\x05;\x08\xb8\t\xd4\tM\x08\x17\x05\x12\x00q\xfb\x96\xf7\xcf\xf4c\xf3|\xf18\xf0\x1d\xf0V\xf0\xa4\xf1\xad\xf2x\xf3U\xf4\xc0\xf4\x87\xf5W\xf6\xa8\xf7&\xf8\x9c\xf8\xb9\xf9\x0b\xfb!\xfd3\xfe\r\xff\xe6\x00\xa1\x02\x9c\x05\xa6\x07\xc0\x08\xfe\t\x87\t\x1c\t\x96\x08e\x08\xd1\n%\x0fQ\x12Z\x15t\x16\x9c\x16\xd8\x15\x0f\x12\xe7\x0bx\x03\xa1\xfb\x9e\xf4&\xef\x86\xea\xb4\xe7\x02\xe6f\xe5&\xe8\xa5\xec\xa2\xf1\xa1\xf6\xe3\xfaT\xfe\xa6\x01\xa4\x04d\x06\xc4\x06\xc9\x05\x9e\x03<\x01H\xfe\x97\xfc\x81\xfa\xcc\xf7\x15\xf6O\xf4\xff\xf2d\xf3\xa2\xf3\xef\xf2\xb3\xf3\xb9\xf4r\xf7\xa5\xfa\x89\xfdi\xff\xec\xff\x15\xffO\xfd\x9c\xfb\x1f\xfa4\xf9%\xf8\xe9\xf7\x9e\xf7\x81\xf8J\xfb\xa5\xff@\x04U\x08m\t\x81\t\xf3\t\x18\n\x9d\t\x18\x05x\x01j\xffW\xfc:\xf93\xf6\x00\xf6n\xf7Q\xfa=\x05V!\x8bJ"i\x0bp;gHd\x0fk\x88gOR\xe40A\x12\xc9\xfa\xda\xe6/\xd7 \xce\xb0\xcci\xcco\xc7\xb4\xc3\x07\xcc\xa7\xdd\x8c\xe9\xd2\xe5\xd9\xdcT\xde\xa7\xecc\xfb\xb1\x00\xe5\xff]\x02\x80\x0b\x04\x15\xbf\x1b\x07\x1e\xfe\x1b\xea\x14;\x07\x85\xf8\xaa\xf0\x88\xeb\x1c\xe4\xa5\xd67\xcb\xef\xc9C\xd0\x9e\xd9\x1c\xdfQ\xe0\x85\xe2\xce\xe7V\xee\x98\xf3\x1a\xf9\xd1\xfe\xdc\x00\xaa\x00\xcc\x02S\nt\x13\x94\x18\xb2\x17>\x13\xa7\x0f\x07\x0e\xb9\n|\x03\xb4\xf9\x8b\xef\x07\xe8\x15\xe5\xa0\xe7J\xed\xe5\xf2\xb7\xf7\xbb\xfdn\x06T\x10E\x18\x06\x1b\x8c\x18\xc1\x13\xa9\x0f\x0e\r\xfb\nx\x07w\x02\x92\xfe\x96\xfd1\x00q\x04d\x07@\x08=\x07\xf7\x054\x06\xc0\x05\xa9\x04X\x01a\xfdy\xfb\x19\xfc\x00\x00\xf4\x03{\x06\xd8\x07\x94\x08-\x0b\xe7\r\x1b\x0e\xcd\n\xe0\x03\x85\xfd\x92\xf8\xac\xf4\xf8\xf1K\xef%\xee\x9c\xed_\xeel\xf1\xa6\xf6\xd6\xfb\xd5\xfe\x98\x00\xd3\x022\x06\x91\t\xed\ni\n5\t\xb3\x08\xcf\x08\xb9\x07\x08\x05\x1a\x01o\xfc\x96\xf7\xb4\xf3 \xf1\x0e\xef \xee\xb3\xedg\xeeZ\xf13\xf6\xb4\xfb\x95\xff(\x02(\x04\x84\x05\x85\x060\x06\x9b\x04\x12\x02\xe9\xffI\xfe\'\xfd\x94\xfc\x14\xfcc\xfb\x0e\xfa\x00\xf9#\xf9M\xfa\xbb\xfan\xfa\x04\xfaX\xfa\x8e\xfb\xc5\xfc\x99\xfd\xec\xfd\x06\xff\xa6\x00x\x02n\x04\xd2\x05+\x06\x10\x05(\x03\xf8\x01n\x01\xd4\xffD\xfc{\xf6\xc9\xee\xbd\xea\xea\xeez\xfb\\\x0b\xd8\x17\r#{7GU\x13i\xfdd\xc5M\x898\x102\x97-\x04\x1d\x92\x015\xe9-\xdf\xee\xdfH\xe0\xf0\xdem\xdf\xf5\xe0W\xe1\xab\xe0\x81\xe3\x7f\xeb>\xf3\x9e\xf5\x0c\xf5*\xfa\xec\x08t\x19\tL\x0e}\x11\x9f\x10\xad\n\x0f\x01)\xf7\x8e\xefH\xeb\xb4\xe9P\xe9/\xea\xd3\xec\xe9\xf2\xdf\xfa>\x01\xea\x03s\x02r\xff\x8b\xfd\xf1\xfc&\xfdi\xfc\'\xfb\xd5\xfa\xd5\xfc\xe5\x00T\x04\xad\x04\x08\x02\x0c\xfe\x86\xfa\x13\xf8\xf2\xf5s\xf3h\xf0\xc8\xee\x13\xf1\xb2\xf6\xe6\xfd!\x03\xab\x05\xef\x06\xda\x07j\t\xd4\t\x9d\x071\x04\x91\x00\x04\xff\x03\xff\xad\xff\xe9\xffS\xff\x89\xfe/\xffW\x01_\x03\x13\x04\x13\x02\xb0\xff\xf7\xfe\xb9\x00o\x03\x1c\x05\xe3\x05\xf3\x06\xfb\x08\x03\n\xfe\x08\x96\x06a\x04\xb7\x02\xe2\x01\xb1\x00\x87\xfe\x88\xfa\x14\xf5\xf1\xf2\x10\xf9\x1a\x07\xc7\x16\xb5%}9JS\xdegDi\xfaV{?0.\xbb\x1e\xb2\x08\xcc\xebZ\xd2f\xc65\xc6\xdd\xc9\xdd\xcd\xae\xd4A\xe0G\xeba\xf1\x06\xf4c\xf8C\xff!\x04\x83\x03\x91\x01\xd7\x04D\r\x8e\x13\xd4\x11\xad\t\x7f\x00\x08\xf8:\xee\xff\xe1\xce\xd6\xc5\xcf\xb1\xcc\x85\xcc\x8c\xd1\x10\xdd\\\xed\xe5\xfc6\x08`\x0f\x90\x14\x15\x19\x1e\x1a!\x14\xf4\x07\xff\xf9\x9f\xef`\xeb\xd6\xebg\xec\x8e\xea\xd6\xe8\x17\xeb\x81\xf1\x89\xf8\x99\xfb`\xf9\xb3\xf5\xc6\xf5\xf8\xfa\xfe\x01V\x07\xd4\n$\x0e\x80\x13\\\x1a\t \x0b!\x83\x1ba\x11\x96\x06\x0b\xfe3\xf8o\xf2\xcd\xeb*\xe6\x00\xe5\x8c\xea/\xf5\xfa\xff\xea\x06\x99\n\xb0\x0e\xbe\x14\xb6\x19$\x1a\xe0\x15\xf2\x0fa\x0c\xfb\x0b\xee\x0c\x16\x0c\x89\x08\x0c\x04\x1c\x00\xc4\xfc\xb1\xf9\xe2\xf5q\xf1$\xed\x17\xeb7\xed9\xf3\\\xfa\xe9\xff3\x03\xa6\x06\xcf\x0b:\x11\xa3\x12\xea\rI\x05\xdd\xfd\x1c\xfa\x80\xf8\x99\xf5n\xf14\xef2\xf2\x82\xf9y\x01\xcc\x06\x07\t<\tb\x08\x95\x06Y\x03#\xfe\xb5\xf7s\xf1s\xed\xa0\xed\xbd\xf1Z\xf7.\xfck\xff/\x02%\x05\x80\x07W\x07\xac\x03\x97\xfd\x8e\xf8\x92\xf6\xc2\xf7j\xf9N\xfa%\xfbR\xfd,\x01\x03\x05\xce\x06u\x05\x0c\x02\x04\xff\x9e\xfd\x93\xfd\x9e\xfd\xbb\xfd"\xfek\xff\x15\x02@\x05 \x07\xbd\x06\x14\x04\x99\x00\x13\xfe@\xfd \xfd\xa4\xfc\xb7\xfbj\xfb\x03\xfd\xf5\x00\xa5\x05}\x08f\x08\x84\x06\xe3\x04\x10\x04\x8d\x03a\x02\x82\xff\x90\xfc\xa6\xfb\xe0\xfc!\xff\x11\x00G\xff\xeb\xfd\xf0\xfc\x87\xfd\xfd\xfe7\x00\x04\x00\xde\xfe\x86\xfe\x8a\xff\xc4\x01j\x03\xdf\x03\x1d\x03\xcd\x01\xe3\x00q\x00R\xff\xf9\xfc/\xfa\x8c\xf8\x94\xf9\xad\xfdW\x02S\x043\x03\x1f\x01\xd4\x00\xa8\x03#\x06\xb0\x03w\xfb?\xf3\xe5\xf5\x01\tn$q8\xcd<\xe6:%A\xa9L\xd0J\xe50\xbf\t|\xec\x15\xe3\x9c\xe4\xab\xe3;\xdbg\xd4\xfa\xd7\x15\xe6\x16\xf5;\xfcX\xfa9\xf3\xd2\xed+\xee\xdf\xf3\x89\xfb\x8c\x009\x02f\x03\xf6\x07/\x0e\xb7\x0f\xa8\x07o\xf7c\xe6\xeb\xda\x8c\xd6\xa1\xd7A\xdbc\xe1\xa7\xe9\x86\xf3n\x00\x1b\x0fB\x1a\xa0\x1b\xe3\x12\xef\x06\x08\xff\xab\xfbB\xf8#\xf1\x99\xe8\xef\xe5\xa3\xeb\x0e\xf5\x1c\xfbH\xfb\xd5\xf8\x8d\xf7\x8b\xf8g\xfa\x90\xfbb\xfc\xfc\xfeL\x04\x10\x0c\xa8\x153\x1e\xb6!\xa7\x1e\xe4\x17P\x11X\x0bi\x03\xab\xf8\xc0\xed\n\xe72\xe7\xdb\xec\xcd\xf3\xac\xf9D\xff\n\x06\x18\r\xdb\x11:\x13[\x11\x9f\rg\t0\x06P\x05,\x06\xef\x06~\x06n\x05w\x04\xf0\x03\xba\x02x\xff\x1f\xfa@\xf5\xbc\xf3\x03\xf6\xf5\xf9\xd5\xfc9\xfe\x1c\x00J\x03\xb0\x06e\x07\xd1\x04\xc8\x00\xd8\xfdV\xfd(\xfe\x7f\xfep\xfd\xd9\xfb\xa2\xfb\x05\xfd\xa0\xff\xd3\x01\xac\x02k\x02\x16\x02\xb0\x02\xcb\x03\xea\x03H\x02\x84\xff\xdb\xfdo\xfe\xed\x00\x1d\x03_\x030\x02\xde\x00,\x00\x80\xff\xac\xfdq\xfaK\xf6\r\xf33\xf2\x12\xf4\xdc\xf6\xe3\xf8\xcd\xf9)\xfb3\xfes\x02\x8b\x05a\x05=\x02\xd3\xfe)\xfd\x9e\xfdW\xfe]\xfe\xa7\xfd@\xfdq\xfe9\x00\x0f\x01\xad\xffU\xfc\x04\xf9\xc2\xf7\x10\xf9B\xfb\x85\xfcq\xfcj\xfc\xe1\xfd\x91\x00\x99\x02b\x02\xcc\xff\xfa\xfc\x12\xfc\x87\xfd\xf6\xff\xe9\x00>\xff]\xfd\x84\xfd\x96\xff\xd8\x01\xc4\x01\xea\xff.\xfe\xec\xfd\x11\xff?\x00\x18\x00f\xfeA\xfc-\xfbA\xfc\x1a\xff\xce\x00\xe9\xff\x9c\xfd\x8d\xfcP\xfd\xc6\xfd\xc1\xfbE\xf7\x18\xf2;\xeei\xed-\xf6:\x0f\xd53\xd7R\x84\\KUJN\x9bK\x94A\xaa\'\xfe\x07\xe7\xf40\xf4\xf8\xfa\x9b\xfb\xdc\xf4d\xeeq\xee\xa6\xefr\xeb\x80\xe2\xc1\xdac\xda\xb1\xe0\xeb\xebi\xfay\t\xf5\x14\xb3\x17\x1d\x13C\x0br\x02\xf2\xf7M\xea[\xdc\t\xd4\xcf\xd4S\xdd3\xe8A\xf1\xeb\xf5\xe3\xf7o\xfbi\x023\x08\xa3\x06\x0c\xff\x1a\xf84\xf8y\xfe[\x04\x98\x05\x83\x010\xfc\x01\xf85\xf4x\xef\xe3\xe8\xff\xe1\xb1\xdd\xe8\xdeb\xe6\xb9\xf1f\xfdx\x06;\r\xfc\x12\xfe\x17\xbf\x1ad\x18?\x11\xcb\x08n\x03\xfa\x01\x01\x02\x07\x01\xbe\xfe\x86\xfd\x91\xfe\xf7\x00\x12\x01v\xfd\xbe\xf8\xef\xf6\xf0\xf9G\xff\xb9\x04(\nn\x10B\x169\x19\xe9\x18\xde\x16h\x14\xa6\x10:\x0b\xa2\x05[\x02\\\x02\x0f\x03\x1e\x02,\x00\xd3\xff\xaa\x01\xbc\x02\x81\x00\xf8\xfb\x0c\xf9\x98\xf9\xa2\xfbG\xfc\x1d\xfc\xb3\xfd\n\x02\xe3\x06\x92\tz\t\xac\x07v\x04\r\x008\xfb\x97\xf7\x86\xf5\x1d\xf4\xfd\xf2I\xf3\xec\xf5\x0b\xfa{\xfcv\xfbg\xf8\xf4\xf6f\xf8\x1d\xfb$\xfc\xc8\xfb\x17\xfd\x8f\x01\xc4\x06C\t}\x07n\x03r\xffh\xfc\xa4\xf9\xb9\xf66\xf4[\xf3\xea\xf4\x82\xf8\xd7\xfc\x90\x00\n\x02,\x01\x06\xff\xaf\xfd\xb5\xfd/\xfe\xbb\xfd\xa3\xfc\x89\xfc\x0b\xfeY\x00\xe2\x01\x8f\x01\xd3\xff\xd0\xfd\xa3\xfcZ\xfc>\xfc\xb5\xfbE\xfb\xe0\xfb/\xfeo\x01d\x04\xa7\x05L\x05F\x04x\x03\x10\x03r\x02Z\x01\x0f\x00\x8a\xff\x03\x00%\x01=\x02g\x02<\x015\xff\x81\xfdL\xfds\xfe\xb8\xff\xcb\xffJ\xfe\xb7\xfcV\xfd\x9d\xff\xa3\x00G\xfe=\xfa\x0b\xf9#\xfc\x08\x01.\x04\xdc\x03\x8a\x02y\x02\xeb\x05~\x0b~\x0f\x7f\x0f\n\r\xba\x0c\xb0\x11\x19\x1al%\xb31\xc7=\x81E\xbfB\x995 #\xed\x11\xde\x04^\xfa\x94\xf2\xe0\xec\x0e\xe8\x07\xe5\xcd\xe3G\xe3\xb1\xe0f\xde#\xe0\xb7\xe5\xee\xec\xbb\xf3\xf0\xfb\xd9\x04\xa1\n\xb6\x0be\t\xf6\x04U\xfd_\xf2\x1b\xe8\x08\xe3\xe2\xe3\xbc\xe6\x06\xe7o\xe6\x14\xeaj\xf3\x86\xfa\xb7\xf7\x97\xef\x12\xee\x8e\xf6L\xff\x1f\x01k\x00\x1b\x04\x97\t\xf7\x08@\x01\xb9\xf8\x8d\xf3T\xef\x7f\xe9\xd7\xe4\x89\xe5\x0c\xec\xf6\xf3\xd2\xf8\xbf\xfa\xa1\xfd=\x03\x97\x08\xed\x08\xd7\x04\xc3\x02Z\x07J\x0f\xe8\x13\xc8\x12Y\x0f\x1d\rT\x0b\\\x07\xc2\x00\n\xfa\xa7\xf5M\xf4\xaf\xf5r\xf9c\xfe\xdf\x01\xdf\x02\xdd\x02)\x05_\n\x98\x0f\xa3\x11\x9f\x10k\x10Q\x13\xd9\x16\x8e\x16\xcb\x10\x0c\x08\x99\x00\x99\xfd\x1c\xfe~\xfe\xd0\xfc\x08\xfb\x94\xfcz\x01%\x05O\x03\x1a\xfd\x9a\xf8[\xfb\xf1\x03s\x0c\xcd\x0fU\x0e\xc9\x0b\xdb\t4\x07=\x01H\xf8\r\xf0%\xec\x18\xee\t\xf4\xf5\xfa\x82\xffv\x00g\xfe\x08\xfcr\xfb\xff\xfbP\xfc\x8a\xfbz\xfb\x16\xfen\x02\xc5\x056\x05c\x00\xed\xf9\xe5\xf4?\xf3\x14\xf4o\xf5C\xf7e\xfaL\xff\x0b\x04~\x06n\x05\x80\x01\xd8\xfc\x90\xf9\x8c\xf8)\xf9Q\xfa\xcf\xfb\xe0\xfcK\xfd|\xfc\x93\xfa\\\xf8s\xf6\xed\xf5\x87\xf7$\xfb\n\x00\x80\x04\xb0\x07}\t\xc3\t\xc0\x089\x06\n\x03\xd5\x00I\x00\xb7\x00\\\x004\xff/\xfeN\xfeU\xfe;\xfd\x89\xfb\xae\xfa;\xfd\x96\x01\x96\x05\xb5\x07-\x08\xa6\x08\n\t\xaf\x07Y\x04F\x00\x82\xfd\x82\xfc\xfa\xfc\x9f\xfdq\xfd\xc4\xfc\xc4\xfb\x15\xfc,\xfd\xbd\xfd\xca\xfe\x00\x02\x93\x08\x00\x0e\xf5\x0e\xc9\x0b\xc0\x08\xb0\t\x80\r{\x0e\x03\t\x9e\x07i\x19\xac:\x18QSF\x1a$B\t\x82\x06A\x12R\x17\xf0\x0f(\x05>\x01U\x04\x1c\x060\xfe\xeb\xebP\xd8\xc0\xcd\x0e\xd13\xdeU\xee\x8d\xfa\xfc\xfe\x94\xfcu\xf7\xf5\xf32\xf2\xa5\xf0\xa5\xef\xd9\xf0z\xf6\x81\xff+\x07^\x08U\x02q\xf7C\xeao\xdeb\xdb\x80\xe5\x87\xf5\x01\xfe-\xfbi\xf5\xae\xf4W\xf7*\xf6\xab\xf0\xb3\xec\xbd\xef\xe7\xf9\x86\x04\x9a\t5\x08\x84\x02\x80\xfb$\xf6\x8c\xf3\xfa\xf3s\xf5\xf6\xf7\xb9\xfbG\x00\x17\x05\x9e\x08\xc0\x08\xa6\x03\xa1\xfc\xd9\xf9j\xfe#\x06L\ng\t\x92\x07\x8a\x07S\x07\xe9\x04\xde\x00\x9e\xfd\x03\xfc\xc9\xfb\x9e\xfd\xdb\x01&\x06\x9a\x07=\x06x\x04\t\x05\x83\x07\x80\n\x88\x0c-\r4\x0eC\x10M\x12\xcf\x11\r\r\xf7\x07\xd4\x05\x9b\x06\xd3\x05[\x01\x02\xfe\xbd\xfe\xd6\x01C\x02\x1e\xfe\xe7\xf8\xbf\xf5\xae\xf4\r\xf5\xb1\xf5]\xf7\x85\xf9\xd8\xfa\xdb\xfb\x91\xfc[\xfd\xd2\xfcd\xfb\x9b\xfa!\xfb\xae\xfc\x1d\xfe`\xff\xe2\xff\x86\xffT\xfes\xfc\x94\xfa"\xf94\xf9^\xfa\xd0\xfb\xae\xfc&\xfd\xe5\xfdC\xfe`\xfe\xb6\xfd\x0e\xfd\xb7\xfc\xd8\xfcn\xfd\xfc\xfd\x9c\xfe\xec\xfe\xd5\xfe\x1c\xfe:\xfd\x9b\xfc\x17\xfc\x8a\xfb\xb4\xfb\xab\xfcE\xfe#\xff\xbe\xfe\x0f\xfe\x85\xfd\x9f\xfd\xe1\xfd}\xfe\xdc\xff.\x02=\x04\xbb\x04\\\x03\xe7\x01\x91\x01\xc9\x02\xc8\x04t\x06\x85\x07\xa3\x07w\x06Q\x04p\x01\xd4\xff$\xff\'\xfe\xd0\xfc\xfa\xfc\xa0\xff\x18\x03\xe3\x02\x00\xff%\xfb\x92\xf9\xe2\xfbw\xff(\x03\x01\x07\xd0\x06\xd6\x02\x83\xfb\x0b\xfc\x14\r\xd2\'V8\xce1\xf7!\x9c\x1bs"\xec*\x7f+l(\xa5\'\xb2(\xdb&\x17\x1e?\x10\x8c\x03\xcb\xf9\xd0\xf1p\xeb(\xea\xe3\xec\x04\xec\x87\xe2\xca\xd8\xae\xd8\x0b\xdf\x1b\xe2+\xe0\x05\xe2s\xeb]\xf4F\xf7\xa1\xf6.\xf8\r\xfc\'\xfe\xb0\xff\x94\x03\x00\n\xef\x0bJ\x07\xc9\x01\xbc\x00\xc1\x01\xed\xfe\xb9\xf8\xca\xf3\x97\xf2A\xf3.\xf26\xee,\xe9\xec\xe5\x12\xe5\xf3\xe4N\xe5A\xe7\x82\xea+\xedt\xee1\xf0\xcc\xf3\x7f\xf8\x1e\xfc\xf5\xfd\xe5\xff\x00\x04*\t\xc8\x0ca\r\x9c\r\x0b\x0f\x8a\x10(\x10\x88\x0e\xa6\r\x95\x0c\xe3\t\xaf\x06\x94\x05\xa1\x06\xfa\x05\n\x03g\xffz\xfd{\xfc\x95\xfbL\xfc\xf6\xfd\xb3\xff\x89\x01\x95\x03\x1e\x05\xa1\x04*\x04\xc8\x06\xb1\n\x14\x0e~\x119\x15o\x16\xb5\x11h\n\xbf\x06<\x07\x89\x08\x1b\x08~\x06\xa0\x03\x1b\xff\x03\xfa#\xf6S\xf3J\xf1\x8f\xf0\x95\xf1\xdb\xf2\xd2\xf2\xd3\xf1\xb5\xf0b\xf07\xf1\x8e\xf3b\xf7\xff\xfa\x02\xfd\xe4\xfd\x0f\xfe"\xfe/\xfe\x16\xff,\x01\xd8\x02;\x034\x02\xa9\x00\xdd\xfe\x01\xfd\xad\xfbg\xfb\x7f\xfb3\xfbX\xfa\xf2\xf8\x88\xf7\x1c\xf6\x1f\xf5\xd2\xf43\xf5\x86\xf6\xa5\xf8P\xfa\xb2\xfaa\xfad\xfaT\xfb\xf0\xfb#\xfd\x1d\x00\xba\x04n\x07\xc9\x06;\x04\x05\x028\x02\x15\x046\x07\xfb\t\xb0\n\xad\t\xab\x05g\x00z\xfdI\x00\x80\x06\x14\t\xb4\x05I\x00s\xfeD\x01\xd7\x05\x94\t"\tq\x05\xdc\x01\x8a\x03\x15\t]\x0e\xc6\x14\xaa \x03,M*m\x1b&\x11\xc2\x16\x93#\xa9+\xcb-\xda-\xca\'\x85\x1a\xfb\r\xdc\x08\xac\x08\xaf\x07C\x05?\x02\x14\xfd\x1f\xf4>\xe9Y\xe0\x13\xdb%\xda\xce\xdcL\xe0\xd6\xe27\xe1\x8f\xdcU\xd8-\xd7a\xdbP\xe45\xefh\xf7\x9f\xf8\xc6\xf5\x98\xf5h\xf9\xca\xfe\xdb\x03Y\n/\x10\xb2\x10J\x0c8\x07[\x05f\x05z\x05W\x05\xb5\x04\x02\x03E\xfem\xf6\xb0\xee\x8d\xeb\xe8\xec\xc1\xee\xf8\xed/\xec\x07\xeb\xc1\xe9\xd5\xe7\xb8\xe7M\xeb\xf3\xf0\x84\xf5\xe2\xf8(\xfc\x83\xff\xbb\x01\x88\x03\x07\x06<\n\xcc\x0e\x8e\x12\xef\x14\xbd\x15V\x14G\x12+\x11`\x12S\x14\x82\x14\x8f\x11H\x0c\x97\x08\xc8\tD\x0e\xaa\x0f\xc9\n\xa7\x03\x0b\x01\x9e\x02\x86\x03\n\x03\xb7\x03t\x04\x1c\x02p\xfdc\xfb\xa3\xfc\xba\xfda\xfd\x1f\xfd\x9d\xfd\xac\xfd\\\xfc7\xfb\xf9\xf9$\xf9j\xf9\xeb\xfa\x1e\xfc\x9a\xfb\x87\xf9S\xf7+\xf6\xbf\xf6\x9d\xf8~\xfaE\xfbi\xfa\xd6\xf8\xd1\xf7\xd4\xf8\xb4\xfa>\xfc\x18\xfd\xf2\xfd]\xffo\xff\xeb\xfdA\xfcg\xfc2\xfe\xaa\xff\xd4\xff\xfe\xfe\xb0\xfd\x15\xfc\xef\xf9<\xf9\x15\xfa_\xfb-\xfc\x93\xfb\x0f\xfb\x0f\xfa\xce\xf8\x91\xf8\x89\xf96\xfb\xea\xfc!\xfe\xc6\xfe\r\xff\xe9\xfe\xc2\xff\xd5\x00q\x01\xfe\x02n\x06\x9b\x08\x01\x08r\x05\x13\x05\xb5\x07&\t\xc8\t\xf9\nT\x0c\xa6\x0b/\x08S\x06\xaa\x07\xa0\n\xe0\x0cr\rm\x0c\xba\nF\n(\x0c\xeb\x10\x9a\x18C\x1f\xd3\x1fB\x18\xe8\x0f\x11\x10\xfd\x17) \xc3!\xb0\x1e\x88\x1a}\x15\x9c\x0f\xeb\n\x99\t\xca\n\x91\t\x93\x05\xcf\x00\xc4\xfb\xda\xf5\x0e\xef\xdd\xea\n\xea\x08\xea\x83\xe9\xf5\xe7\xfb\xe4\x02\xe1T\xddw\xdcX\xde\x81\xe1w\xe5\xaa\xe8?\xe9G\xe7\x97\xe6G\xe9\xb5\xed\x8e\xf1\\\xf5\xfa\xf9k\xfc\x91\xfb\x13\xfa\x9f\xfaR\xfd\xcb\xff\x9e\x02\xdc\x052\x075\x05`\x01\xc5\xfeW\xfeX\xff=\x01\x9b\x02v\x01\xc3\xfd\r\xfa>\xf8\xcf\xf7\xa2\xf7\xcf\xf7\xf4\xf8\xca\xf9\x02\xf9\x0e\xf7/\xf6o\xf7/\xf9v\xfb\xa1\xfdI\x00\x9c\x01\x8b\x02\x92\x04\xa0\x07\xb1\t\xf9\t$\x0b(\x0fL\x14[\x17A\x17\xf8\x13h\x10\xb3\x0fh\x13\xcb\x16\x8b\x15\x02\x11D\r!\x0b\xf3\x07\x9f\x04\xb4\x03\x0e\x04\x98\x02\xee\xfe\xef\xfb\xa5\xf9\xc2\xf6{\xf4\xb1\xf4\xef\xf5q\xf5\x1c\xf4l\xf3\t\xf3\xcf\xf1\xed\xf0z\xf2\xfd\xf4^\xf6\xf4\xf6\xf2\xf7\xfb\xf8%\xf9K\xf9\x7f\xfaI\xfc|\xfd6\xfe\xf9\xfe9\xff\xb5\xfec\xfe\xe3\xfe\xeb\xffu\x00Z\x00 \x00\xdc\xff0\xffO\xfe\x8d\xfdy\xfd\x1e\xfe\x9d\xfeT\xfe`\xfd\xf4\xfc\x86\xfd)\xfeb\xfe\xe4\xfe\xc8\xff\x9f\x00\xa7\x00\x98\x00e\x01\'\x02b\x02(\x02+\x03\x9b\x04U\x04;\x03\x82\x03\x9e\x05\xc5\x06\xda\x05\xd1\x04x\x05\xdb\x06\x0e\x07\xaa\x05k\x04\xa1\x04a\x06\xa9\x07\xc5\x07\x9f\x08\xca\x0bJ\x0fb\x0f\\\r\x94\r\x01\x11\xa2\x13\xb3\x14F\x15\xe1\x15\x8e\x14\x9f\x12\x04\x13\xb8\x14\x94\x14.\x12U\x10\x0f\x0e[\nn\x07\x18\x07a\x07,\x04}\xff?\xfc\xb2\xf9!\xf6&\xf3\x9b\xf3\x12\xf4\xf3\xf0\x07\xec\r\xea\x1e\xebJ\xebp\xea\xc6\xea\x04\xec\xed\xeb\x14\xeb\x07\xec\x8a\xee\x10\xf0\xb8\xf08\xf2g\xf4<\xf5\xe7\xf4\xc4\xf5U\xf8U\xfa\xeb\xfa\x8a\xfb\x96\xfc\xcb\xfc\xf3\xfb!\xfc\xa5\xfe\x1a\x01T\x01\xe6\xff\xbe\xfew\xfe\xd4\xfe\x8e\xff|\x00\x8f\x00H\xff\x89\xfd\xd5\xfcV\xfd\xf2\xfd\x9e\xfd\xa5\xfc\xc0\xfb\xc4\xfb\x11\xfcm\xfc\x98\xfc\xc5\xfc"\xfd\xba\xfd\xed\xfe\x98\x00\'\x02\xf8\x02\x84\x03\xde\x04\xa0\x06v\x08\xb9\t\xb5\n5\x0b6\x0bg\x0b\xe9\x0b\x94\x0ce\x0c\xec\x0bx\x0b\xd0\n\xbe\t\x1d\x08\xfb\x06\x0f\x06\\\x05\x98\x04|\x03>\x02\xaf\x005\xff\xea\xfd\xee\xfcR\xfc\xcd\xfb/\xfb<\xfa\x02\xf9\xa8\xf7\xe9\xf6\xfc\xf6\\\xf7G\xf7\xb6\xf6\x1f\xf6\xc9\xf5\x94\xf5\xf5\xf5\x00\xf7\xfb\xf78\xf8\xf7\xf7\xd5\xf7\x19\xf8\x99\xf8p\xf9\x85\xfa\\\xfb\x8d\xfb\xa4\xfb(\xfc\x0b\xfd~\xfd\x83\xfd\xdf\xfd\x9e\xfeR\xff\xe4\xff\xb4\x00|\x01h\x01\xe2\x00\xdd\x00\x83\x01\x87\x01\xed\x00\r\x01=\x02?\x03\xb0\x02\x8c\x01\x88\x01\x0e\x02J\x02Q\x02\x7f\x02\xa7\x02#\x02\xd9\x01\xc8\x02A\x04\xe1\x04\\\x04W\x04\xe1\x04\x94\x05\'\x06\xa4\x07s\n\xcc\x0cx\rw\r\x80\x0e\xc0\x10\xab\x12\xfd\x13E\x15$\x16\xf4\x15h\x15\xfe\x15\x1c\x17\xeb\x16\x0f\x15F\x13?\x12\xca\x10i\x0e\xbe\x0b\xb7\t\x16\x07\xbf\x03\x82\x00\x03\xfe\xca\xfb\xb1\xf8H\xf5\xc6\xf25\xf1\xd2\xef\xf2\xed\x06\xec\x8e\xea\x9d\xe9A\xe9\x8e\xe9\x1f\xeau\xeae\xea\xa7\xea\x8a\xeb\xee\xecO\xee\xb5\xef#\xf1k\xf2U\xf39\xf4\xac\xf5\x86\xf7%\xf9\'\xfa\xe3\xfa\xe1\xfb\xd7\xfc\xce\xfd\xdd\xfe\xd9\xff;\x00\x0e\x003\x00\xe7\x00\x8a\x01\xae\x01s\x01O\x01J\x01b\x01k\x01_\x014\x01)\x011\x01{\x01\xdc\x01\'\x02\t\x02\xad\x01\xca\x01\xd9\x02R\x04%\x05\x19\x05\x8a\x04\n\x04J\x04T\x05\xac\x06E\x07\x0b\x07\x8a\x06C\x06\x02\x06\x10\x06\xbb\x06h\x07@\x07A\x06w\x052\x05\xcb\x04J\x04%\x04\x0f\x04\x1e\x03\xaf\x01\xb9\x00R\x00\x9f\xff\x88\xfe\xc0\xfdT\xfd\x8f\xfcx\xfb\xc2\xfah\xfa\xeb\xf9k\xf9\x1c\xf9\xe2\xf8k\xf8\x06\xf8\x04\xf8\x13\xf8\xe3\xf7\xc5\xf7\x0e\xf8\xa6\xf84\xf9\x89\xf9\xa9\xf9\xb7\xf9\xc4\xf9[\xfa\x9a\xfb\xf9\xfc\x90\xfd`\xfd\xfc\xfc;\xfd\x1c\xfe$\xff\xde\xff"\x00\x1c\x00!\x00a\x00\xe8\x00q\x01\xcc\x01\xb2\x01\x9c\x01\xe4\x01u\x02\xe7\x02\x07\x03\x14\x03!\x03\xfb\x02\xb7\x02\xc6\x021\x03\x81\x03\x9c\x03\x81\x03Z\x03\x0e\x03\xf1\x02.\x03{\x03\xe3\x03\xe4\x04\xcf\x06\xc8\x08\x8d\t\xa4\t$\nx\x0b<\re\x0f\xc9\x11p\x13\xba\x13\x9e\x13\xfa\x13\x97\x14w\x14*\x14`\x14N\x14\x0b\x13\xd7\x10\xd9\x0e\x01\r\x87\n\x18\x089\x06I\x04I\x01\xe3\xfd%\xfb\x06\xf9\x8c\xf6\xfa\xf3\xed\xf1S\xf0\xa4\xee\x01\xed\x01\xec_\xeb\xae\xeaE\xeaz\xea/\xeb\xa0\xeb\x07\xec\xc6\xec\xc4\xed\xc7\xee\xed\xefm\xf1\x12\xf3O\xf4I\xf5I\xf6a\xf7i\xf8s\xf9\xac\xfa\xf3\xfb\xcf\xfc5\xfd~\xfd\x0b\xfe\xa7\xfe\n\xff6\xff\x92\xff\x17\x00]\x00\x0e\x00\xab\xff\xb0\xff\xcd\xff\xa2\xffj\xff\xcc\xffg\x00_\x00\xf4\xff\xf0\xffq\x00\xb6\x00\xcf\x00|\x01d\x02\xaf\x02\x9e\x024\x03\x1f\x040\x04\xd4\x03-\x04\x1c\x05b\x05\x1c\x05F\x05\xc7\x05\x95\x05\xe1\x04\xe0\x04i\x05~\x05\xfd\x04\xdb\x04I\x05R\x05\xf6\x04\xd3\x04\xb3\x04(\x04\x86\x03\x84\x03\xb1\x03+\x03%\x02"\x01d\x00\x9a\xff\xdf\xfe;\xfex\xfdk\xfcb\xfb\x86\xfa\xed\xf9l\xf9\x02\xf9Y\xf8\x96\xf7"\xf7+\xf7`\xf7l\xf7U\xf7@\xf7@\xf7\x86\xf7!\xf8\xfa\xf8\x90\xf9\x1e\xfa\xb0\xfa(\xfbU\xfb\xc1\xfb\xf8\xfcb\xfe\'\xff=\xffB\xfft\xff\xb4\xffe\x00\x82\x01a\x02\xf7\x01\xfa\x00\xe9\x00%\x02o\x03\xe9\x03\xe9\x03\xa8\x03\x16\x03\xb2\x02w\x03\x1c\x05:\x06\x1f\x06e\x05\x1e\x05~\x05+\x06\xf2\x06\xb5\x07\xde\x07\x88\x07\xb0\x07}\th\x0c\xae\x0e\x07\x0f\x0e\x0e\xce\r\x83\x0f\x90\x12m\x15\x01\x17!\x176\x16\x0f\x15\xc8\x14\xea\x15W\x17M\x17\x0e\x15\x99\x11\x85\x0e~\x0c/\x0b\xd7\t\x93\x07\xda\x03\xfd\xfe\xab\xfa\xbb\xf7\xe1\xf5\xfc\xf3\x80\xf1\xa9\xeey\xeb\xa0\xe8\x12\xe7\xc4\xe6\xd9\xe6C\xe6`\xe5\xee\xe41\xe5W\xe6J\xe8U\xea\xad\xebN\xec \xed\xff\xee\xcb\xf1\xe9\xf4\x9d\xf7\x90\xf9^\xfa\xc6\xfa\xce\xfb\xe2\xfd\x90\x00\xad\x02\xb6\x03\xbd\x033\x03\xef\x02G\x03\x1b\x04\xa4\x04\xad\x04J\x04Y\x03g\x02\x02\x02;\x02\x90\x02.\x02\x80\x01\x97\x00\x0b\x00\xcd\x00\xac\x02E\x04\xa8\x03\xb0\x01x\x00!\x01\xe3\x02\x8e\x04a\x05\x02\x05y\x03\xc2\x01X\x01\x1b\x02\x82\x03\x1b\x04\xc1\x03\xa9\x02\\\x01\x97\x00\x8a\x00\x16\x01\xc8\x01\xef\x01\xa2\x01D\x01\x18\x01\xe9\x00K\x00\xe0\xff\xd8\xff&\x00N\x00_\x00p\x00\xcd\xffY\xfe\xc2\xfc\'\xfc{\xfc\xde\xfc\x10\xfd\xe8\xfc<\xfc\xdc\xfa;\xf9Q\xf8D\xf8\xa3\xf8\xff\xf8V\xf9d\xf9\xb4\xf8\xb1\xf7\x0c\xf7\x17\xf7\x93\xf7O\xf80\xf9\xf9\xf93\xfa\x03\xfa\xed\xf9C\xfa?\xfbp\xfce\xfd\xb1\xfdZ\xfdW\xfd\x0c\xfe\x18\xff\xf6\xff\x80\x00\xa9\x00\x91\x00/\x00P\x00\xfa\x00\xb3\x01\xfd\x01\xc5\x01\xee\x01\x14\x03\xae\x04\xa1\x05C\x05M\x04Y\x040\x06\x85\nz\x10\xac\x15.\x17\x03\x15\xdd\x12W\x14s\x19\xe6\x1fw%\xca(\x06(\xc2#i\x1f\xa9\x1e\x87!\xe0#\\#\xee\x1f:\x1b\x94\x15\xe4\x0fI\x0b\xde\x07\xb1\x04\xb3\xff\xb2\xf9\x0c\xf4\xb5\xef^\xec"\xe8;\xe3\x80\xde\x00\xdb)\xd9\xb9\xd8\x9f\xd9\x82\xda&\xdad\xd89\xd7\xaa\xd8\x90\xdc\xc3\xe1~\xe6\xd3\xe9V\xeb\x8a\xec\'\xef\x9b\xf3\xdf\xf8\x03\xfe\'\x02-\x04|\x04Q\x05Y\x08\x02\x0c\x0c\x0e6\x0e\x0f\x0e\xe9\r+\r\xb6\x0c,\r\x1e\r;\x0b\xf6\x07T\x05\x13\x04\xd5\x02$\x02\xb1\x00\xa2\xfe\xf4\xfb\xfd\xf9_\xf9Q\xf9\x92\xf9\xfb\xf9\x99\xf9\xe1\xf7\xe1\xf5G\xf6\x8d\xf9\x17\xfe0\x01\xd2\x01:\x00J\xfeh\xfe\x93\x00~\x04\xc0\x07\xff\t\x9c\t\xa0\x07!\x06\x80\x06C\x08\xb8\x08G\x08l\x07\xb7\x06\xe1\x05|\x05\xf0\x05l\x05\xe3\x02v\xffU\xfd\x1b\xfdj\xfd\x90\xfdu\xfd\xc1\xfb\xf3\xf8\xe4\xf5\xb4\xf4\xc3\xf4\xe1\xf4\x14\xf5<\xf5\xdc\xf4\xc2\xf3\xe5\xf2\r\xf3s\xf3d\xf3e\xf3:\xf4\xa0\xf5\xba\xf6\xc8\xf7A\xf8`\xf8\xed\xf7%\xf8k\xf9*\xfb\x18\xfd\xad\xfe\x8a\xff=\xff;\xfeg\xfe\\\xffX\x00\xde\x00J\x01\x1f\x02\xcc\x02\xd8\x03\xb0\x04\xf3\x04\xd5\x04\xd3\x04;\x05\xa0\x07\x82\x0ex\x19\xcf!\x89#\xcc!\xe5!\xbc#\xd6%\xf9+\xfd6\xb8?\x1b?\x958\xf33-1\xc3,.(\xe7%\xfd"/\x1b\xa5\x11\x8f\n\xd1\x04\xd2\xfc\x07\xf3\xb7\xe9\x16\xe0\xc3\xd6a\xd0/\xcei\xcez\xcda\xcb\\\xc8\x1e\xc5F\xc4O\xc7G\xcd|\xd3G\xd9\x8a\xdf\xe9\xe5\x19\xec9\xf2\x00\xf9\x16\xffV\x032\x06{\t\xdb\r\xda\x12\x14\x17\xac\x19\xb8\x19\xb2\x16!\x12\x1e\x0e\x9b\x0b\r\n)\x08\xf7\x05\xe2\x02V\xfe\xe9\xf8r\xf4t\xf1\x01\xefO\xecx\xea$\xea\x17\xeb\xe7\xeb\xa2\xed\x95\xefS\xf1=\xf2\x12\xf3\xa4\xf5\xc1\xf98\xff\xcd\x044\t\x11\x0cb\r\xed\r7\x0ec\x0f5\x11\xf2\x12\xf3\x13\xb3\x13A\x13\x05\x12P\x10\xd3\r\x1b\x0b\xd6\x07\x8f\x041\x02\xa7\x00\x1e\x00\x08\xff<\xfd=\xfa\x11\xf7?\xf48\xf2F\xf1%\xf1\xa9\xf1:\xf2\xa5\xf2\x04\xf3\x13\xf3\r\xf3\xe6\xf2\x8c\xf2\x94\xf2\xb2\xf2\x0f\xf4\xba\xf5\x1b\xf7\xbe\xf7\xf1\xf7\xa7\xf7T\xf6\xf6\xf4\xf1\xf4\xf3\xf5\x8b\xf6\x19\xf7\xaf\xf75\xf8\xe6\xf6\xa4\xf6\x9b\xf7e\xf8\xb6\xf7~\xf6J\xf7\xb8\xf8\xde\xfa\x05\xfe6\x01\xab\x02@\x02\xe6\x02\x94\x05\xa5\x08\t\r\xf3\x11\xde\x18B =)w4\x97<\xd7?\t? @\xe8BGD\tFMH\x18JkEz;J2T*\n"\xb2\x15G\t\xd2\xfe\xc3\xf4,\xeaF\xe0\xf1\xdaf\xd62\xcf\xda\xc5[\xbf\xaf\xbcQ\xba\x16\xba\xe2\xbd\xff\xc3\xb3\xc8\x08\xcdW\xd4\x94\xdd\xae\xe5\xb0\xed\x05\xf6\x06\xfd\xb8\x01\x08\x07%\x0eR\x14_\x19C\x1dK\x1f\xfc\x1d;\x1a1\x17\xc7\x13>\x0f\x07\n/\x04a\xfd\x82\xf6x\xf1\xa8\xed\xfa\xe8\x84\xe4\xf5\xe0U\xdd\x9a\xd9:\xd8\xc6\xda)\xde\x8c\xe1\xac\xe5\xd6\xea\x8a\xf0\xba\xf5\xf0\xfc=\x04\x19\x0b\x97\x10G\x15\xdd\x18\xd1\x1d\xf9"K\'N)6)B(\xbf$\xbb!\x91\x1e\x92\x1b%\x17\xd6\x11\xac\x0c\x11\x07\x95\x01n\xfdr\xf9\xb8\xf5#\xf2\xad\xee\xbf\xebM\xe9*\xe9\x19\xea0\xeah\xeav\xebE\xed\x1f\xef\x84\xf0Y\xf3-\xf5=\xf6\xc2\xf6A\xf7\x9d\xf8\x18\xf9\x1a\xf9\x8b\xf8\xf0\xf6\xd8\xf5\xb6\xf4S\xf3\xa4\xf1\x10\xef\x1a\xee\xf9\xec\xf5\xeb\xe4\xea8\xea\xf8\xea\x03\xea\xf4\xe94\xec\x8a\xf0\x82\xf3\xd9\xf2\xaa\xf3\x8c\xf6\x0c\xfa\xba\xfc\xad\x02\xe5\n\xac\x10\xb7\x14w\x1a!"\xa5(\xc80%=7H;NLR\xf4VUY\xafV[T\xf6R\xcfN\x94F`=\x165\xac+#!p\x153\x08=\xfa\xbd\xec"\xe0p\xd5\x00\xcde\xc7\xa1\xc1o\xbc\x80\xb9\x94\xb9X\xbb@\xbe\xef\xc2\xb5\xc7\x9b\xcde\xd4\xe1\xdc\x14\xe6\x0f\xf0\x16\xfb\xae\x03\xc3\t\x8a\x0f\xa9\x15\xc1\x1a\xdb\x1b\xc5\x1b\xf1\x1a\t\x18J\x13\xf6\r\xc2\t{\x04m\xfd\xde\xf5\x9b\xee(\xe8+\xe2\x96\xdc\x04\xd8e\xd4\x9b\xd1\xc8\xcf\x1e\xd0\xb5\xd2\xed\xd6,\xdbr\xe0\xdb\xe6\x98\xedd\xf5$\xfd<\x05\xde\x0b]\x12 \x18\x13\x1d\xc7"M*\t0\xc51\xef0\xd00\xc6.\x86)\x9a%;"k\x1d\xf4\x14L\r\x14\t\x10\x04\xdd\xff&\xfc\xc2\xf8C\xf4\x94\xee4\xec\x00\xea6\xe9Q\xe9\x1f\xe9\xa7\xe9n\xe9b\xec\x9c\xf0\t\xf4\xec\xf6T\xf88\xfa\x97\xf9;\xf9\xc9\xf9D\xfa\xb6\xf9\xd7\xf6\xb2\xf4v\xf3?\xf2\x82\xf0\x7f\xee\xfb\xec8\xea]\xe7\xe6\xe4\x87\xe3.\xe3\x98\xe2\x8b\xe3z\xe3E\xe5\xfa\xe7\xe0\xeb\xa2\xefX\xf2\xa5\xf6\xc7\xf9\xd8\xfc\\\xffK\x04P\n\xf8\x0fX\x17\xf6\x1f\x8a(\xd80B\xbb\x90\xb7\x8a\xb7N\xba\xf3\xbev\xc6m\xcf\xdb\xd7\xc4\xe0\xe8\xea\xb6\xf4\xcc\xfc\xb5\x04\xff\x0c\x0b\x13P\x16\x96\x19o\x1d\x12\x1f\xcb\x1dI\x1b\x9c\x17\x87\x11\xab\t\xd3\x01a\xf9\x1f\xf0\xf2\xe6N\xde\xad\xd6\xa3\xd0\xf4\xcc\xcf\xca\xc5\xc9\xc2\xca\x11\xcd\x9c\xcf7\xd3\x13\xd9\xba\xdf[\xe6!\xee\xe0\xf6\x86\xff/\x08e\x11O\x1a\xdd!\x81(\xc7-\xd90L1I2k1i.L*\x11&\x87!\xc6\x1a\xfd\x14\xdd\x0f\x9f\tG\x03J\xfdE\xf8-\xf3\x9e\xeeO\xec\x05\xea\r\xe9X\xe9\xbc\xea\xd4\xec\xd1\xeeV\xf2\x13\xf6\xc2\xf82\xfb%\xfd\x8e\xfeL\xff\xe0\xfeA\xff\xa0\xfe\xe7\xfd|\xfc`\xfa\xb1\xf8\xd6\xf5\xcc\xf2\xf3\xee\xba\xea,\xe7\xb7\xe3\x01\xe1\xd6\xdeS\xdd\xb1\xddD\xde\x0e\xdf\xf7\xdf\xc4\xe1h\xe4\xbd\xe5\xf9\xe6\x03\xea\x17\xef\x9c\xf3\x08\xf7/\xfb\x9d\x00\xab\x06\x85\x0e\xc2\x18\x86"\x9c+\x1a7\xd4D)P\xf8X\xa2c\xd7l\xb0n\xecj@h\x0beZ[\xbbM\\A 5\x87%@\x14q\x05\xce\xf8\xff\xec4\xe1w\xd5\x85\xcb^\xc4\xd8\xbe\xae\xb9\xf9\xb6\xe7\xb7\xa0\xb9\xc0\xbb\x1b\xc1z\xca\x93\xd4\xef\xdex\xeb\'\xf8p\x02\xab\x0bw\x15n\x1d\x9b!\xf6#w%j#\xb2\x1e\x89\x1aB\x16\xa3\x0f$\x07\xd6\xfe\xef\xf5\xee\xebq\xe2\x92\xda\xdf\xd2\x19\xcb\xd6\xc4\x0e\xc0\xbd\xbcZ\xbc/\xbf\x9e\xc3\x1f\xc9\xc9\xd0\xfe\xd9\xab\xe3\x1a\xee\x94\xf9\xd5\x04\x9c\x0e\xd0\x17\xbf 5(Y.\xb03\xc97:939\xd37\xf84i0\xf4*\x93$\x92\x1c\xff\x13\x8c\x0b)\x03\x83\xfbW\xf5r\xf02\xec\xad\xe8/\xe77\xe7\xf9\xe7\xb1\xe9G\xec2\xef\xde\xf1\x14\xf5\xe8\xf8\xab\xfc\xbe\xff\x92\x02\x87\x04\x00\x06[\x06R\x06U\x05(\x03\x90\x00\xe6\xfc\xab\xf8\xfa\xf3\xc1\xef\xf7\xeb\x0e\xe8^\xe4\xd5\xe1\xbf\xdfN\xdd\xe8\xdb\xd4\xdb=\xdc\xfb\xdb\x10\xdc\xaa\xddl\xdf,\xe1\xe9\xe3\xce\xe7\x9d\xec!\xf0\xf0\xf3\x0c\xf9\x08\xfek\x022\x06}\x0c\xd3\x14\xe3\x1c\xbc%m2LBRP\xe8ZRd7m\x9ar\x80sxp\xaajca\xfaS\x19C\xc31o"\xc7\x13\xd3\x03*\xf4\x96\xe7\x87\xdd\xb3\xd3F\xcb\xfa\xc5?\xc2f\xbe*\xbb\x1a\xba\xc9\xbb\r\xc0A\xc6K\xcd\xe1\xd5\x01\xe1f\xed~\xf9\xee\x05\xd2\x12\xb4\x1d\xdd$b)2,\x83,\xad)d$\x11\x1d\xee\x13"\n\x9f\x00\xbd\xf6\xda\xec\xad\xe3\xca\xda\xc6\xd1\xb3\xc9\x8f\xc3\x8f\xbe\xdb\xbay\xb9\xd4\xb9Q\xbb\x91\xbf\x80\xc7\xba\xd0\xc8\xdaU\xe7T\xf5\xd9\x01\x90\r.\x1aM%\xed,?3k8\x16:\xf98}8n7\xb42\xbb,Q(p"\x06\x1a\\\x12{\r\xfb\x064\xfe\x0f\xf7C\xf2p\xedH\xe8\x16\xe6)\xe6\x80\xe6\x84\xe7\x94\xea<\xef\x96\xf4\xa9\xfay\x00\xe2\x04\xe8\x07m\n\x9d\x0b\x01\x0b)\t\xd1\x06h\x03e\xfei\xf9\x93\xf5\x16\xf2:\xeeb\xea8\xe7\x07\xe4\xc0\xe0\xe5\xdd^\xdb\xb8\xd8z\xd65\xd5|\xd4D\xd4\xa3\xd5R\xd9\x91\xdd\xc0\xe1\xe8\xe6.\xee\xb6\xf5V\xfb\x8b\x00o\x06\x98\x0b\x88\x0e\t\x12\xf2\x18\xb8!\\)\xc71\x04=SI\xf9S8]\x00fDk?k\x9dg\x02a\\WJJq;K+K\x1ap\n\x01\xfd\xa9\xf1\xe4\xe7\x17\xe0g\xda\xa9\xd5\xe0\xd1_\xcf\xae\xcd\x90\xcc\xfa\xcb\xe2\xcbK\xcd\xc8\xd0;\xd6P\xdd\xb6\xe5\xa4\xef\x1e\xfa@\x04\xe6\r\x84\x16V\x1d\xae!\t#\x1b!\xdf\x1c&\x17\x1e\x10.\x07\xbe\xfd\x94\xf5\x16\xee\xd0\xe5F\xde\x13\xd9\xa5\xd4Y\xcf\x80\xca\xc1\xc7\x9c\xc5\xf8\xc2\xe5\xc1\xb8\xc3\xe3\xc6\x05\xcb\xa2\xd1"\xdb\xd3\xe5,\xf1\x1d\xfe\x8d\x0b9\x17\xd4 \xef(\xd0.W2\x813\xc22Z0\xe5,.(\xfa"T\x1e\xe7\x19\x14\x15\x9b\x0f\x01\x0bG\x07"\x03\xc7\xfe\xc2\xfb\xa4\xf9"\xf7\xe3\xf4%\xf4\xc3\xf4\xf9\xf5\x17\xf8\xe2\xfa\x15\xfe\xc5\x00e\x03\xad\x050\x07\xce\x07G\x07i\x05\xa0\x02\x80\xff\x89\xfc\x15\xf9\x0b\xf5+\xf1e\xedi\xe9\xb8\xe5Y\xe3\x89\xe1\x0e\xdfM\xdc\xa9\xda\xd1\xd9\xe6\xd8\xd7\xd8\x10\xda{\xdc\x90\xde\xa6\xe1\x8c\xe7h\xee\x9f\xf4\x0e\xfa\xe4\xff\x88\x05\x82\x08\xbb\n;\x0eS\x11\x86\x12\xc6\x13%\x18X\x1e\xfb$\x17.\x849\x97C\xdcJ\x85Q\x9eW\x83Y\x9eV\xc4QtJ\xaf?H2\xfa%\xc4\x1aU\x0f\x9a\x04\xda\xfb3\xf5\xf1\xef\x00\xecA\xe9\xd8\xe65\xe4<\xe1~\xde\x91\xdc^\xdbq\xda\x92\xda\x85\xdcL\xe0j\xe5\x0e\xecQ\xf4v\xfc\x8d\x03\x94\t\xc7\x0eo\x12\\\x13J\x12d\x0f\x02\x0bF\x05\t\xff\xa9\xf9\xd8\xf4\xd2\xef\xd3\xea\xa3\xe6\x8f\xe3\xb5\xe0\xb0\xdd5\xdb\x17\xd99\xd63\xd3\xa6\xd1\xea\xd1:\xd2q\xd3u\xd7w\xdd#\xe4\xce\xeb\xaa\xf5\xe2\xff\x12\x08\x19\x0fW\x16\xcc\x1b\xa2\x1e\xb1 u"p"\xb7 l\x1fk\x1e2\x1c[\x19j\x17G\x15\xe4\x11\x7f\x0e(\x0c~\t\xe8\x05\xa6\x02\x91\x00\x00\xff\xa6\xfdX\xfd\xf0\xfd\xa2\xfep\xff!\x00\r\x01\xf3\x01Q\x02\x85\x01\xed\xffk\xfe\xea\xfc\xe7\xfa\xda\xf89\xf7\x92\xf5/\xf3\xb7\xf0%\xef\xf8\xed\x8b\xec\r\xeb}\xe90\xe8\xe1\xe6\x0b\xe6\x89\xe5L\xe5\xcd\xe5\xa4\xe6\xe3\xe7\xd0\xe9\xf4\xec\x1c\xf1#\xf5Z\xf9\x82\xfdo\x01\xec\x04\xcd\x07u\nP\x0c\x8c\ra\x0e\xbd\x0e\xb9\x0f\xd9\x11\xee\x14\x8b\x18\xdf\x1c\x8d!\xb2%\x8f)\xc6-~1\xf42Q2\x1d1\xec.++\x8a&\xae"\x98\x1e\x8a\x19z\x14\xac\x10\xb7\rB\n~\x06\xfa\x02q\xffV\xfb\xe1\xf6\xec\xf2\xc0\xef\xe0\xec\xd4\xe9\xba\xe7\x03\xe7(\xe7\x98\xe7\xc0\xe8R\xeb\x1c\xee)\xf0\x04\xf2\x81\xf4\xe8\xf6\xce\xf7\xd4\xf76\xf8j\xf8q\xf7\t\xf6}\xf5\x84\xf5\x84\xf4\xfa\xf2)\xf2\xb4\xf1N\xf0\x96\xee\x9f\xed\xc1\xec\xdf\xea\xb6\xe8\xfc\xe7\xf9\xe7C\xe7.\xe7\xee\xe8\xab\xeb\x18\xee\xfa\xf0\x84\xf5\t\xfa[\xfd7\x00r\x03\x0c\x06I\x07y\x080\n\x96\x0bK\x0c}\rX\x0f\xea\x10\xe9\x11\xec\x12\xb9\x13\x86\x13\x91\x12s\x11\xfb\x0f\xa1\r\xb7\n"\x08\xc6\x05\x9b\x03\xeb\x01\xba\x00\xfd\xffz\xffp\xff\xb6\xff\xf4\xff!\x00\xf3\xff^\xffy\xfek\xfd\x93\xfc\xcd\xfb\xdd\xfa\x14\xfa\x96\xf9L\xf9\xd1\xf8i\xf8Y\xf8\xf8\xf7\x9f\xf6\xd6\xf4\x98\xf3Z\xf2w\xf0\xd8\xee_\xee\xae\xee\x18\xefZ\xf0\xeb\xf2\xd7\xf5\x01\xf8\xba\xf9\x91\xfb&\xfd\xdf\xfd<\xfe\xe5\xfe\x00\x00\xfd\x00G\x02\x01\x04s\x06B\t\x87\x0b\xb8\r\x00\x10*\x12\xbf\x13~\x14C\x158\x16\xf7\x16Q\x17,\x18\xd5\x194\x1b\xcb\x1bS\x1c"\x1d8\x1d\x10\x1cv\x1a\xd0\x18Z\x16\x1c\x13u\x10}\x0ex\x0cq\n\x01\t\xeb\x07z\x06\xe2\x04\xa3\x03#\x02\xfb\xff\xc1\xfd\xfe\xfb\'\xfa\x0f\xf8@\xf6\x98\xf4\x0c\xf3\xa5\xf1\x88\xf0\xf2\xef\x89\xefA\xef\x14\xef\x11\xef\xfd\xee\xb6\xeeD\xee\xb9\xed\x0b\xedh\xec\xd0\xebO\xeb\xe5\xea\xb9\xea\xc5\xea\xd6\xea\xf7\xeav\xebX\xec4\xed\x16\xeed\xef3\xf1\x02\xf3\xba\xf4\r\xf7\xa3\xf9\xfe\xfb\xf1\xfd\xc9\xff\xe0\x01\xa7\x03\xfe\x044\x06s\x07n\x08\x14\t\xbe\t{\n"\x0bx\x0b\xcb\x0bM\x0c\xa1\x0cg\x0c\n\x0c\xea\x0b\x93\x0b~\nk\t\xcd\x08U\x08u\x07\xae\x06\x9d\x06\x8d\x06\x1d\x06\x94\x05\x8c\x05n\x05\xa8\x04\xb0\x03\xef\x02.\x02\xd6\x00\x98\xff\xc3\xfe\x08\xfe\x05\xfd\xf9\xfbV\xfb\xb3\xfa\xb1\xf9\x89\xf8\x90\xf7\x98\xf6:\xf5\x07\xf4;\xf3\x9d\xf2\x14\xf2\xbb\xf1\x01\xf2z\xf2\xd8\xf2]\xf3\x19\xf4\xda\xf4z\xf5S\xf6u\xf7\x8a\xf8\x8b\xf9\xcd\xfaE\xfc\xb1\xfd\xff\xfeL\x00\x86\x01\xb0\x02\xcc\x03,\x05\x8b\x06\xc4\x07\xd5\x08\xe6\t\xdf\n\x9e\x0bF\x0c\xeb\x0cH\rL\rX\r\x94\r\xc5\r\xdd\r\x03\x0e1\x0e,\x0e\xff\r\xe4\r\xaa\r;\r\xa5\x0c\x0f\x0cy\x0b\xb8\n\xf1\t\\\t\xae\x08\xef\x07#\x07N\x06c\x05E\x04T\x03h\x02b\x01[\x00b\xff\x83\xfe\x94\xfd\x9d\xfc\xce\xfb\xed\xfa\xc6\xf9f\xf8\xfd\xf6\xa9\xf5Z\xf4\x08\xf3\xea\xf1\x11\xf1{\xf0\x0e\xf0\xf2\xef2\xf0\xa7\xf0\x14\xf1~\xf1\x02\xf2\xa1\xf2<\xf3\xd4\xf3\x9b\xf4\x89\xf5\x98\xf6\xc4\xf7\x1d\xf9\x9f\xfa\x08\xfcU\xfd\x95\xfe\xd4\xff\xec\x00\xd4\x01\x8f\x024\x03\xf2\x03\xc3\x04p\x05\x17\x06\xcd\x06\xa0\x07O\x08\xc4\x08.\t\x88\t\xa8\t\x7f\tR\t/\t\x07\t\xad\x08k\x08:\x08\xde\x07q\x07\x11\x07\xb4\x06\x13\x064\x05a\x04{\x03m\x02K\x01c\x00\x93\xff{\xfe<\xfd+\xfc2\xfb%\xfa\x00\xf9\n\xf87\xf7P\xf6\x98\xf5d\xf5\x82\xf5\x9e\xf5\xdb\xf5|\xf6,\xf7\x89\xf7\xd0\xf7.\xf8\xa4\xf8\xb3\xf8\xbe\xf8U\xf9\x1b\xfa\xd3\xfa\xaf\xfb\xf5\xfcE\xfe\x1e\xff\xb5\xffP\x00\xc0\x00\xc7\x00\xa9\x00\xb7\x00\x03\x01+\x01m\x01+\x02A\x03=\x04 \x05?\x06J\x07\x01\x08\x92\x08\x0e\tz\t\xba\t\xe1\t@\n\xb5\n\x04\x0bK\x0b\x8c\x0b\x8b\x0bS\x0b\x02\x0b\x8e\n\xf4\tC\t\x91\x08\xfa\x07;\x07q\x06\xc7\x05"\x05N\x04K\x03>\x02\x14\x01\xd5\xff\x92\xfe`\xfdF\xfc.\xfb)\xfah\xf9\xb8\xf8\x0f\xf8x\xf7\xf8\xf6o\xf6\xe3\xf5j\xf5\r\xf5\xc7\xf4\x9d\xf4\xa9\xf4\xfe\xf4i\xf5\xea\xf5\x85\xf60\xf7\xdf\xf7\x86\xf8.\xf9\xc7\xf9e\xfa\x0c\xfb\xbb\xfbr\xfc\x18\xfd\xcb\xfd\x8c\xfeX\xff\x0c\x00\xb3\x00m\x01&\x02\xea\x02\xbb\x03\x88\x04O\x05\xef\x05_\x06\xb8\x06\x02\x07"\x07 \x07\x19\x07\x13\x07\x1b\x07\x0e\x07\x15\x078\x07I\x07D\x075\x07!\x07\xf9\x06\x9e\x063\x06\xcc\x059\x05\x93\x04\xdf\x03$\x03Z\x02c\x01x\x00\x88\xff\x93\xfe\x89\xfdw\xfc\x7f\xfb\x86\xfa\xa1\xf9\xce\xf8\x1c\xf8\x98\xf7D\xf7$\xf7\'\xf7L\xf7\x9a\xf7\r\xf8\xa0\xf8*\xf9\xb0\xf9&\xfa\x8b\xfa\xdd\xfa8\xfb\x93\xfb\xef\xfbH\xfc\x90\xfc\xd8\xfc\x13\xfdN\xfd\x8c\xfd\xcc\xfd\n\xfeQ\xfe\xa0\xfe\xfd\xfe^\xff\xdd\xff\x94\x00P\x01\x02\x02\xba\x02{\x03,\x04\xd0\x04x\x05\x1d\x06\xa5\x06\xfa\x06D\x07\x86\x07\xa3\x07\xbb\x07\xcb\x07\xc9\x07\xb4\x07z\x07P\x073\x07\xfd\x06\xa3\x06O\x06\xe5\x05^\x05\xd8\x04S\x04\xba\x03\x1f\x03o\x02\xa8\x01\xea\x00\x15\x00S\xff\x97\xfe\xe0\xfd6\xfd\x9a\xfc\r\xfc\x8e\xfb\x1a\xfb\xb9\xfal\xfa\x1f\xfa\xd7\xf9\x97\xf9Z\xf9\x1c\xf9\xe0\xf8\xc1\xf8\xa2\xf8\x8b\xf8~\xf8\x91\xf8\xb8\xf8\xdd\xf8&\xf9}\xf9\xd8\xf9:\xfa\xa6\xfa\x1c\xfb\x99\xfb\x16\xfc\xa9\xfcP\xfd\xfe\xfd\xcb\xfe\xb3\xff\xa5\x00\xa0\x01\x93\x02p\x03=\x04\xed\x04|\x05\xe2\x05(\x06`\x06\x87\x06\xa8\x06\xc1\x06\xd5\x06\xd7\x06\xde\x06\xf4\x06\xec\x06\xcd\x06\xa0\x06m\x06+\x06\xd1\x05\x87\x05=\x05\xdf\x04f\x04\xed\x03\x8c\x03\x16\x03z\x02\xdc\x01,\x01t\x00\xa2\xff\xca\xfe\xfc\xfd/\xfdc\xfc\xae\xfb\x11\xfb\x83\xfa\r\xfa\xae\xf9d\xf9&\xf9\x05\xf9\xf4\xf8\xf8\xf8\x05\xf9\x10\xf97\xf9r\xf9\xb8\xf9\x04\xfaI\xfa\xa9\xfa\x11\xfb\x81\xfb\xfd\xfbu\xfc\x01\xfd\x8a\xfd\x17\xfe\x99\xfe\x15\xff\x98\xff\x0c\x00{\x00\xe9\x00a\x01\xd1\x010\x02\x94\x02\x07\x03l\x03\xb9\x03\x0e\x04z\x04\xda\x04/\x05\x8d\x05\xec\x05D\x06\x7f\x06\xb3\x06\xe4\x06\xfe\x06\xf7\x06\xe4\x06\xc7\x06\xa4\x06|\x06U\x06"\x06\xf4\x05\xc1\x05\x86\x057\x05\xd4\x04V\x04\xcd\x033\x03u\x02\xb3\x01\xe3\x00\r\x00.\xffU\xfe\x84\xfd\xbb\xfc\xeb\xfb&\xfb\x86\xfa\xe9\xf9X\xf9\xd7\xf8[\xf8\xfb\xf7\x98\xf7V\xf7,\xf7\x00\xf7\xf2\xf6\x10\xf7=\xf7\x88\xf7\xea\xf7u\xf8)\xf9\xdb\xf9\x8b\xfaD\xfb\xfe\xfb\xba\xfc`\xfd\xf5\xfd\x9c\xfeR\xff\x03\x00\x96\x00c\x01U\x02)\x03\xf8\x03\xc3\x04e\x05\xc1\x05\x01\x06\x1a\x06\'\x06#\x06\x03\x06\xe8\x05\xf8\x05\xf8\x05\xe2\x05\xd9\x05\xc4\x05\xab\x05q\x054\x05\xe7\x04~\x04\xe7\x03e\x03\xf4\x02y\x02\x04\x02\xa6\x01F\x01\xe5\x00\x81\x00\x12\x00\xb3\xff/\xff\x9b\xfe\x01\xfee\xfd\xda\xfcS\xfc\xdb\xfb\x80\xfb.\xfb\xfc\xfa\xe3\xfa\xc5\xfa\xa8\xfa\x94\xfa\x87\xfax\xfa_\xfaT\xfaU\xfaU\xfar\xfa\xbc\xfa\x1a\xfb\x8c\xfb\xfc\xfb\x82\xfc\xfa\xfcl\xfd\xdb\xfdD\xfe\xac\xfe\xff\xfe]\xff\xc4\xff+\x00\x9b\x00\n\x01~\x01\xe2\x01I\x02\x8d\x02\xba\x02\xed\x02\x18\x03T\x03\x86\x03\xbe\x03\xea\x03\x10\x049\x04\\\x04m\x04a\x04e\x04_\x04S\x04i\x04\x8e\x04\xd0\x04\xf9\x04/\x05n\x05\x84\x05\x83\x05g\x054\x05\xe6\x04k\x04\xdf\x03O\x03\xad\x02\xf9\x015\x01~\x00\xaf\xff\xca\xfe\xea\xfd\x15\xfdG\xfc\x84\xfb\xdd\xfa@\xfa\xc5\xf9a\xf9\x1d\xf9\xf0\xf8\xd4\xf8\xe2\xf8\x03\xf9C\xf9\x92\xf9\xe1\xf9d\xfa\xe4\xfaj\xfb\xee\xfbf\xfc\xe1\xfc6\xfd\x97\xfd\xf3\xfdQ\xfe\xc0\xfe9\xff\xb6\xff>\x00\xe9\x00\x8f\x017\x02\xd0\x02L\x03\xbe\x03\xf7\x03 \x04A\x045\x049\x041\x04&\x04\x1c\x04\x04\x04\xde\x03\xc7\x03\x95\x03I\x03\xfd\x02\xa7\x02\\\x02\xf6\x01\xa3\x01y\x01J\x01\x0e\x01\xd8\x00\xd3\x00\x01\x01\xe9\x00\xc0\x00\xb8\x00\xb3\x00\x93\x00M\x00&\x00$\x00\xf0\xff\x9b\xffW\xff\x10\xff\xac\xfe\x14\xfe\x9f\xfdh\xfd\x18\xfd\xac\xfcd\xfcD\xfc\x17\xfc\xe7\xfb\xb9\xfb\x8d\xfb\x99\xfb\xb3\xfb\xc0\xfb\xfd\xfb\x17\xfc\xea\xfb\xc3\xfb\x99\xfb\xbd\xfb%\xfc\xaa\xfc\x16\xfd\x19\xfdh\xfd\x0e\xfe\xa2\xfec\xff\xc9\xff~\x00\x02\x01\x02\x02\xc3\x02\xb7\x02"\x02\x1b\x02\t\x038\x05"\rs\x1a\xb6\x1f\xd3\x13\xf8\x03\xf8\xfd\xba\xfb~\xf5a\xf50\x00c\x0c\xda\x0c\xec\x06\x02\x04\xcd\x01\xcb\xfa6\xf1\x99\xf03\xf8V\xff\xe0\x01H\x06g\x0b\xf0\t\xe8\x01b\xfbU\xf9\xd6\xf86\xf9\xc8\xfc\x03\x02|\x05\x9f\x06\xf4\x03l\xff\xbd\xf9\x05\xf7\xba\xf4\x14\xf7]\xfc\x97\x02\x19\x04\x01\x01&\xff\xbd\xfb\xc0\xf8A\xf6[\xf9\xaf\xfd\x83\x01b\x02\xe3\x01\x80\x00&\xfd\x83\xfa\x9b\xf96\xfb\x8b\xfd\xa6\x01\x1b\x06\xd9\x06\xdc\x02\x80\xfe\x1a\xfdM\xfc\xe5\xfb3\xff\xf0\x04>\x08b\x06\x9d\x03\xd0\x02\xfd\x00\xb1\xfd\'\xfc\x85\x00\xec\x05t\x07C\x06\x87\x078\tJ\x04\xb7\xfd\xcf\xfd\xd7\x02\xeb\x04\x88\x03\xba\x03\x84\x05\xad\x01\x84\xfc.\xfa\x1b\xfc\xd1\xfef\xfe\xed\xfew\xfe\x9c\xff9\x01B\x01\xd3\xfd\xc5\xf9\x96\xfb]\xfd=\xfd\x17\xfc&\xfe\xbb\xfe\x86\xfb\xa5\xfb\x89\x00\xf5\x02\xbe\xfe\xa2\xfcX\x00\x81\x02\x13\xfe\xae\xfa\xd5\xfe]\x05\xa6\x04S\xfd\xa1\xf8.\xf9\xee\xfb\xd7\xfbg\xfc\x0e\x01F\x02\x96\xff\xf1\xfa_\xfb\x17\xfeX\xff\x1f\x00;\x00c\x04\x97\x02\x8e\xff\x8b\xfc\x1e\x00\x87\x01\xd5\xfa\xe4\xf7\xa4\xfdQ\t3\t\xad\x04S\x02\xf0\x03\xed\x00\x99\xfc6\xfe{\x04\x8a\t\xf1\t\xb9\x07>\x06N\x05\xac\x04\xea\x03\x8d\x01H\x01\x19\x03\x86\x04x\x05\xfc\x07\x1b\x07\xc4\x02Y\xfcq\xfa\x00\xfd\'\x02Q\x05Q\x02,\xff{\xfa\xd8\xf5G\xf5\xcf\xfb&\xff\x98\xf7\x18\xf1s\xf8R\x039\x01\x98\xfa\xf0\xfd7\x01H\xfa\xc2\xf3\xa7\xfb6\x06\n\x03o\x00\x90\x046\x05\xb7\xf9\xf5\xf5\x9c\x02\x91\n\xce\x06,\x01\x1e\x042\x01E\xfb\x8f\xfe:\x07:\x07d\xf9\xe4\xf6Q\xfc\xc7\xffo\xfdE\xfau\xfc\x9d\xf8N\xf5\xd5\xf9*\x00\x80\xffW\xfb\x0b\xfd\xe0\x00F\xfe\xb2\xfcN\x00F\x07T\x0b\x92\rt\x10\xc5\x0e\xfc\x08\xa1\x06\xe2\x0b\xac\x0c\x92\t\xb7\t\x1c\tg\x05\xed\x03\xe2\x00\xd9\xf9\xc0\xf4\xff\xf1\x05\xf5\x92\xf5\xc7\xf4x\xf7\x9a\xf5\x80\xf1u\xeez\xf0\xb3\xf2\'\xf5\x05\xfcV\x03^\x02\x00\xfc\xb8\xfdk\x02\x9c\x03\x07\x05a\nc\r\xd9\x0bD\t\xeb\tV\nu\x08\x94\x08\x8c\x08A\x04U\x00L\x01d\x01\x88\xfd\x87\xf8\xd1\xf9\xc9\xf8i\xf5\x94\xf4;\xf7;\xf9\xe0\xf5\x17\xf5\xce\xf8\x91\xfd6\xfe\xe8\x00^\x05A\x05\xc4\x01\x9b\x01T\x07/\x0b\xd2\x08\x14\t\x88\r,\r\x1a\x07\xec\x04\n\x08\xac\x04\x03\x00m\x03O\tr\x05\xbc\xfc,\xfd\xf8\xfc\x8a\xf9S\xfa\xbe\xfe\x8e\xff|\xfba\xf9\xa9\xf7?\xf6\x00\xf7\xcb\xf9>\xfd\xc5\xfc\xea\xf9\x19\xfb\x17\xfc\x9c\xfd\xa7\xfc\n\xfc*\xff\xe6\x006\x02\xa4\x04\xdf\x05\xe3\x00\x85\xfc\xcb\x00Z\x04\xa4\x03\xd6\x03\x95\x06s\x04\xeb\xfc#\xfd\x8a\x02.\x00\x10\xfe\x00\x01D\x03\xcd\x01U\xfe\xf8\xff\xd2\xfe\x81\xfc]\xfe7\x02\xbd\xfe(\xfa,\x00\xe8\x04(\x04\xd1\x00(\x00M\x02\xf2\xfe(\xfb\xac\xfe\x17\x02U\x00|\xff\x1d\x05\xe9\x05\x17\xff;\xfb\xf0\xfd\xc6\x019\x01\xa2\x02"\x05~\x02\x88\xfe\xe3\xfc\xba\x00\x08\x02\x90\x01\xea\x01\xaa\x00)\xff=\xffR\x003\xfe\x12\xfd\xc2\xfe#\x01\x0f\x00|\xff\xe7\x00J\xff\xcc\xfa\x9c\xfa\xcf\xff\xd7\xffM\xfc\x90\xfd\xf9\x00@\xfe\xf2\xf9\xbb\xfb\xd8\xfe\xd0\xfeJ\xfc\x89\xfeP\x02(\xffk\xfd\xf7\xfe/\x03\x1b\x02y\xfe\xbe\x008\x04W\x03&\xff\xfa\xff\xb1\x00;\xff\xae\x00f\x05\x01\x06\xdc\x02\x16\x02\xa6\xffA\xfb\x9a\xfd\xab\x06\xad\x08\xca\x02-\xff\xb4\xfeF\xfc\xd7\xfa\xc3\xff\xf3\x04\xa5\x02e\xfd4\xfe\x0e\x01\x80\xff\x90\xfd6\xfd\xbb\xfc\x0b\xfdF\x01g\x04\xda\x00\x08\xfcA\xfd\xfb\xff[\xfeO\x00\x19\x05b\x03\x89\xfb_\xfb\xc6\x02\xb8\x00\xd2\xfc\xb6\xfdL\x04;\x03\xc2\xfcE\xff\xf7\x00\xaa\xff\x05\xff\xe6\x01W\x04\xc6\x00\x81\x01a\x01\xd2\xfe\xf9\xff\xda\x02$\x03d\xfe\x10\xff@\x016\x00\xbb\xfdr\xfc\xad\xff8\x00\x82\x00X\x00\xef\xff\xa6\xfd?\xfd\x8c\x00\xd6\x00x\x01\xfd\x00\xd8\x02\xc3\x02\xe9\xfez\x01]\x03\x81\x01\x0b\x00R\x03l\x06D\x01\x14\xfe\xce\xffV\x00\x19\x00\xd7\x00I\x04\xa2\x01Y\xfc\xcb\xfa\xf1\xfdQ\x02\x85\x00b\xffy\xfe\x07\xff\xa8\xff%\xff\xfb\xfdG\xfdr\xfdU\xfd\xbb\xfe0\x00^\x00L\xfd\xd6\xf9Z\xf9\xf1\xfb|\xfeF\x00n\xff\xa7\xfe\xe5\xfa%\xf8\xeb\xf9\xd1\xfd\x80\x00x\xff\xf0\xfe\x8a\xfd\xa1\xfcS\xf9\x9b\xf8\xcb\xfc\x00\x00\xba\xffr\xfd^\xfdO\xfd\xba\xfb\xe0\xfa\xf4\xfcu\xff\xc9\xfe\x1a\xfdM\xffk\x02\xf7\x00\x1d\xff#\xfe\xa6\xfe\xcf\xfe\xd7\xff2\x04*\x06O\x03\xb9\xffn\x00\xd3\x03\xb1\x06\xdb\x05\xd2\x05\xc5\x054\x06[\x07\xc6\x08\x89\t\xe9\x07"\x06\x8c\x05-\x07t\x07\n\tR\x05\xe4\x00\x08\x01\xc5\x03\x94\x07\xbc\x05w\x05\xb2\x05\x8a\x04\xbb\x04v\x08\xa0\x0f\xf1\x10%\r\x0c\r\xdf\x0f\xf6\x11\x1d\x11H\x11\x9e\x12\x84\x10\xd8\x0c\x17\x0cv\x0e)\x0c\xb0\x06t\x02)\x01\'\x00\x03\xfd\xd7\xfb\xfc\xfa\x1d\xf7L\xf2\xe5\xef\x99\xef\x8b\xee0\xec\xa0\xebP\xeb-\xea\xac\xe9\xf2\xe9\x99\xea\x0f\xea\xf6\xe9H\xebe\xed\xb1\xef\xbf\xf1\xe7\xf2P\xf3]\xf3\x85\xf4\x03\xf6q\xf7\x0f\xf9\xdc\xf9w\xf9\xb7\xf8C\xf8\xa5\xf7\xda\xf6k\xf6\xbd\xf5B\xf5\xe5\xf4u\xf3\xa4\xf2X\xf1\x1a\xf0\x9d\xef\x8c\xef(\xf0\xdf\xf0]\xf1u\xf11\xf2\xd3\xf2\x00\xf4\\\xf6\xf4\xf8"\xfa\x9a\xfb\xda\xfc\xd6\xff5\x02\xa4\x04\xe3\t\x0c\n\x98\t\x10\x0b.\x0e\x14\x12\x8b\x12\x87\x14\x9f\x14D\x12\xc2\x10\xdb\x10\x08\x16\xdd\x1c\xa0%f&\x8e\x1e^\x1b\x1c#U/\x020e)\x89\'\xc4)\xd8-)1(4\xd1/\xa9#\xa7\x19k\x17~\x1aZ\x1a4\x16}\r\xd7\x03\x94\xfc\xce\xf8\xfb\xf5Z\xf1*\xec\xc9\xe6\x18\xe3\xc5\xe2\x05\xe4X\xe3\xb7\xdf\xe0\xdc\r\xdeT\xe0{\xe3\xd7\xe5\x85\xe7\xe8\xe7\x08\xe8\xff\xea,\xf1&\xf6\xd2\xf6}\xf3\x0f\xf1\xc5\xf5L\xfe=\x01\xdd\xffw\xfc]\xf9O\xf9\x93\xf9\xe0\xfd\x97\x009\xfd\x9f\xf5\xdb\xf2)\xf5\x0f\xf73\xf7?\xf4\x19\xf2m\xef\x13\xefO\xf2|\xf5>\xf5\xdd\xf1b\xef\xbb\xef\xb8\xf3%\xf8\x8c\xfa\xeb\xf9\xac\xf6\x12\xf5v\xf50\xf7\xec\xf9\x07\xfb}\xfa\x04\xf9\x94\xf8Z\xf8\xf3\xf8\xbb\xf8Z\xf8d\xf81\xf9\xa1\xfb+\xfdL\xfd\x06\xfcp\xfa\xa2\xfa(\xfd\x88\x00\x99\x033\x037\x02y\x01\xec\x02\\\x06\xa1\x08\x13\n\xc6\x0b1\r\xf4\r\xbf\x0e\xe6\x0e\xe4\rB\x0f\xf0\x16\xc5!>\'\xdb#\xf0\x1f\xec\x1eU"\x9f\'\xfe+R/\xb8.f,\x13+\x06*\n(i#q\x1d\xe7\x18\x15\x16b\x16\xd4\x15\x18\x11l\x08\x8c\xff\xbb\xf9\xc5\xf6\xc3\xf44\xf2\x17\xf0c\xec\x10\xe8b\xe6\xa9\xe6\xb4\xe7\n\xe6\xe9\xe2\xaa\xe1\xef\xe2h\xe6K\xea\xfa\xec\xd6\xed%\xed\xf8\xec\xc3\xefK\xf3\x7f\xf4T\xf44\xf4\x8a\xf6S\xf9\x8c\xfaU\xfa\xc9\xf8\x10\xf7\x87\xf5*\xf6\xdf\xf8\r\xfa\xbc\xf9;\xf8d\xf7\xed\xf6\x01\xf6\x8d\xf53\xf5\xf2\xf5\x08\xf7\xab\xf7\xcf\xf7\x80\xf7s\xf7\x9f\xf7\xea\xf7\r\xf8\xfd\xf8.\xfa;\xfb-\xfc\x87\xfc\xe4\xfcd\xfd\x12\xfe.\xff;\x00\xe7\x00d\x01\xde\x01\xf8\x01q\x02\xe9\x02\x18\x03-\x03\x9e\x02j\x02\xef\x02\xee\x02@\x02\xae\x01\xe2\x01h\x025\x02\xb4\x01\xc3\x01\x16\x02\xce\x01\xf8\x00"\x01\xa5\x01\xdd\x01i\x01\x84\x00\xe0\xff\xbb\xff\xb1\xffR\xff\xad\xff\xea\xff\x91\xff\x85\xff\x15\x00\x88\x01>\x03\xe5\x04O\x06\xae\x07P\t\x01\x0bt\r\xbc\x10}\x13\xd3\x15\xc3\x16\xab\x17\xeb\x18\xec\x19A\x1b\xb8\x1b\xe7\x1bX\x1b\'\x19\xb5\x17u\x16g\x14P\x126\x0f7\x0c\xa5\t\xab\x06+\x04\xa8\x01:\xff=\xfc\xc6\xf9\xcd\xf7\x18\xf6\xe2\xf4B\xf3\xf9\xf1\xdf\xf0\x8f\xef\xd4\xee\x99\xee\xee\xee1\xef\x8d\xeef\xee\xed\xee\x9f\xef~\xf0B\xf1B\xf2\x1d\xf3\'\xf3Q\xf3q\xf4\xb0\xf5\xe2\xf6\xe2\xf7\x9f\xf8Q\xf9\x13\xfa\xbb\xfa\xbb\xfb5\xfc8\xfc\xe8\xfc\xad\xfd\'\xfez\xfeD\xfe\xe7\xfdv\xfd\xcf\xfc\xd6\xfc\x05\xfd\x92\xfc/\xfc\xde\xfb\x1a\xfb\xc4\xfa\xd1\xfa\x84\xfar\xfa\x06\xfa\x8d\xf9\xb1\xf9\x91\xf9M\xf9\x83\xf9g\xf9,\xf9H\xf9G\xf96\xf9F\xf9\xe2\xf8\xd0\xf8+\xf9\x89\xf9\xfe\xf9`\xfa\xa0\xfa\xee\xfa\xfe\xfa\xfa\xfa\x9e\xfb\x94\xfc\x00\xfd\x0e\xfd;\xfd`\xfd\x8e\xfd\x01\xfeG\xfe\xbc\xfe<\xff\x08\x00r\x01\xf2\x02\x01\x05E\x08\x07\x0c\xdd\x0e\x80\x11\xdc\x14\xbc\x18y\x1cL\x1f\n"N%\xc6\'p(\x80(\xe0(\x93(\x80&\xd5#\xec!\xb9\x1f\xb8\x1b"\x17\x8e\x13$\x10\xa3\x0b\xc3\x061\x03\x08\x01\x1a\xfes\xfa\xd9\xf7=\xf6q\xf4l\xf2.\xf1+\xf1\xa9\xf0$\xefm\xee!\xef\xa1\xef\x8a\xef]\xef\x8d\xef\xc7\xefm\xef"\xef\xba\xef\'\xf0\x08\xf0\xd5\xef=\xf0\r\xf1`\xf1\xd3\xf1z\xf2$\xf3\xc6\xf3\x1a\xf4\xf2\xf4\x10\xf6\x00\xf7\xcc\xf7n\xf8\xd3\xf8Q\xf9\xef\xf9\x81\xfa\xfd\xfa5\xfb_\xfb\xb6\xfb\xf3\xfb\xe2\xfb\xce\xfb\xd2\xfb\x8b\xfb\x8a\xfb\xcf\xfb8\xfc\xa2\xfc\xbe\xfc\xe4\xfc\x1c\xfdV\xfd\x80\xfd\xb6\xfd\x14\xfe[\xfe\x80\xfek\xfeR\xfej\xfet\xfey\xfe\x81\xfep\xfe^\xfe,\xfe\x0f\xfeG\xfe\xa2\xfe\xbc\xfe\xcc\xfe\xaf\xfe\x93\xfe\x9d\xfeR\xfe8\xfee\xfe~\xfe\xc9\xfe\x14\xff&\xff\x1b\xff\xe4\xfe\x99\xfe\xfd\xfep\x00A\x03\x83\x06v\x08\x13\n\x87\x0c;\x0f\xe3\x11\x93\x14i\x18W\x1c\x0f\x1e\xbe\x1e< \x0b"\t#\xed!\xaf v |\x1e\x0e\x1bY\x18G\x16\xf6\x13\x83\x0f\xb2\nt\x08\x90\x06\xc8\x02\x11\xff\xd1\xfc\xbb\xfb\xaa\xf9W\xf7\xd8\xf6\x1e\xf7\xd0\xf5H\xf3\x97\xf2s\xf3\xc8\xf3_\xf3A\xf3\x07\xf4\xbf\xf3j\xf2\x1a\xf2i\xf2]\xf2\x97\xf11\xf1\xf7\xf1,\xf2\xd4\xf1\xc0\xf1\x1c\xf2\'\xf2\x0c\xf2\x97\xf2\xa5\xf3G\xf4q\xf4\xd0\xf4\x7f\xf5%\xf6\xb9\xf6\x08\xf7C\xf7}\xf7\x96\xf7\xe5\xf7M\xf8\xa4\xf8\xfa\xf8\xe9\xf8\xb8\xf8\xd9\xf8\'\xf9Z\xf9\x8f\xf9\x16\xfa\xb2\xfa\x13\xfb\x9d\xfb\x87\xfc[\xfd\xb1\xfd\xed\xfd\x87\xfei\xff\x05\x00R\x00\xbd\x00\x12\x01 \x01+\x01\x80\x01\xc1\x01u\x01\xe8\x00l\x00\xc2\x00\x14\x01\x06\x01\x01\x01\xa9\x00\x0b\x00\xa0\xff\xb7\xff\x04\x00\x1d\x00\xb0\xffA\xffA\xff\xea\xfe\xc6\xfe\xe2\xff\xe5\x00\xcc\x00\x8a\x00%\x01\x04\x03\x8a\x05/\x08S\x0b\x01\x0e\x0c\x0fG\x10\x0b\x13|\x16"\x19\xf7\x1a\xaf\x1c\x19\x1e\xe4\x1d\x1e\x1d\x80\x1d\x1f\x1e\xee\x1c\xfb\x19:\x17I\x15\xb3\x12\x86\x0f\xb1\x0c\n\n\x84\x06S\x028\xff\xba\xfd>\xfc\xd8\xf9I\xf7\xd4\xf5\xf4\xf4-\xf4\x99\xf3\xa3\xf3\xc7\xf3\xfc\xf2\x11\xf2k\xf2\xb9\xf3\xdb\xf4\x15\xf5,\xf5\xf2\xf56\xf6\xbb\xf5\x82\xf5\xe5\xf5j\xf6\x02\xf6V\xf5\xce\xf5Q\xf6\xc4\xf5\xaa\xf4J\xf4\xcc\xf4\xdb\xf4r\xf4\xac\xf4\x85\xf5\x7f\xf5\xb6\xf4\xa4\xf4q\xf5"\xf66\xf6\x85\xf6Q\xf7\xf1\xf7F\xf8\xb5\xf8\x90\xf9O\xfa\xb5\xfa\xe5\xfar\xfb1\xfc\xae\xfc4\xfd\xdb\xfd\x9c\xfe.\xffY\xff\x8d\xff\xc4\xff\x06\x00J\x00\x92\x00\x13\x01\x8a\x01\xa2\x01\x93\x01\x80\x01\xa1\x01\xa9\x01p\x01i\x01\x95\x01\xac\x01\xbe\x01\xd0\x01\xfd\x01\xda\x01c\x01\xf9\x00\xde\x00\xf1\x00\xee\x00\xf4\x00\xc9\x00V\x00\xf0\xff\xc5\xff\xc2\xff\xf4\xff\x0b\x00&\x00s\x00p\x00\x9e\x00\xd5\x01\xc3\x03\xc8\x05\xca\x07y\t\x11\x0b1\r\xc7\x0f\xa1\x12\xeb\x14\xf5\x15Y\x17\x0f\x19>\x1a<\x1b\x98\x1b}\x1bU\x1a\t\x18\x95\x16\xba\x156\x14\xa1\x11\x88\x0e\xbe\x0b\'\tt\x06N\x04\x81\x02I\x00|\xfdB\xfbL\xfa\xed\xf97\xf9\x10\xf8\x18\xf7\x81\xf6\xe2\xf5\x9e\xf5\xf3\xf5n\xf6S\xf6\x91\xf5l\xf5\x18\xf6q\xf6\x18\xf6\x86\xf5c\xf5<\xf5\xbd\xf4\x96\xf4\xe3\xf4\xbf\xf4\xec\xf3?\xf3U\xf3\xc6\xf3\xbc\xf3s\xf3u\xf3\x98\xf3\xdd\xf3L\xf4\xf8\xf4O\xf5V\xf5i\xf5\xc7\xf5\xa3\xf6\x81\xf7G\xf8\xd2\xf8\t\xf9Z\xf9\xd8\xf9{\xfa\xfb\xfa\x94\xfb\n\xfc=\xfc\x8f\xfc\t\xfd\x96\xfd\xcc\xfd\xb2\xfd\xcf\xfdI\xfe\xd5\xfec\xff\xe7\xff\x0e\x00<\x00j\x00\xd8\x00\x87\x01\x0e\x02J\x02n\x02\xa1\x02\xf6\x029\x03;\x03L\x03L\x03\xf9\x02\x8f\x02\x90\x02\xc2\x02\x8b\x02\xf8\x01p\x01s\x01M\x01\xa3\x007\x00\xf3\xff\xc8\xff\xc4\xff\xb4\xff\xf0\xff\xd8\xff^\xffq\xff\x05\x00\n\x01Q\x02\x1b\x045\x06i\x07\x15\x08\x9b\t(\x0c\x99\x0e\xc5\x10\xec\x12\xda\x14\xdb\x15)\x16\xdf\x16\x1a\x18\xc5\x18\x7f\x18\xaf\x17\xa2\x16\x18\x15F\x13}\x11\x00\x10U\x0e\xf4\x0b\n\tU\x06E\x04X\x02%\x00.\xfe\xae\xfcX\xfb}\xf9\xda\xf7,\xf7\xac\xf6\x87\xf53\xf4\x94\xf3e\xf3\xbf\xf2\n\xf2 \xf2H\xf2\xb0\xf1\xf1\xf0\xd5\xf09\xf1\x1c\xf1\xc0\xf0#\xf1\xba\xf1\xbe\xf1\xac\xf1@\xf2[\xf3\xeb\xf3\x15\xf4\xcf\xf4\xd5\xf5\\\xf6\xb3\xf6\x95\xf7\xbe\xf8_\xf9\x92\xf9\xf5\xf9\xb4\xfa\x14\xfb2\xfb\xac\xfbO\xfc\x81\xfcq\xfc\x8a\xfc\xe2\xfc\xf6\xfc\xbf\xfc\xd7\xfc6\xfdt\xfd\x87\xfd\xb5\xfd\xf3\xfd0\xfeh\xfe\xdb\xfe\x7f\xff\x08\x00\x8e\x00\r\x01\xb7\x01R\x02\x08\x03\xbe\x03E\x04\xbd\x048\x05\x98\x05\xe4\x05V\x06\xd6\x06\xe7\x06t\x069\x06T\x06\r\x06N\x05\xdc\x04\xb9\x04j\x04\x9b\x03\xae\x02%\x02\x15\x02\xe3\x01+\x01\x00\x00/\xff"\xff\xc0\xfew\xfe:\xfe\x18\xfem\xfe\xa1\xfed\xfe\xb8\xfe\xb8\xff\xb1\x00\xf8\x00!\x01\x8a\x02Y\x04a\x05\xc3\x06\xe6\x08\x93\n\xfa\na\x0b\xf8\x0c\xd2\x0e\xb2\x0f\x01\x10\xcd\x10\x9e\x11Y\x11\xb2\x10\x9e\x10\xc3\x10\x11\x10\x80\x0eM\r\x82\x0cD\x0b\xa4\t\xfc\x07\x8d\x06\xc5\x04\xc2\x02\x00\x01\xb5\xffz\xfe\xe2\xfc4\xfb\xca\xf9\x9b\xf8\x94\xf7\x90\xf6\xc6\xf5!\xf5T\xf4\xc8\xf3|\xf35\xf3&\xf3#\xf38\xf3A\xf3O\xf3\xb2\xf3+\xf4w\xf4\xc8\xf4H\xf5\xec\xf5p\xf6\xbb\xf6\x19\xf7\x99\xf7\x04\xf8j\xf8\x0c\xf9\xc6\xf90\xfaa\xfa\x90\xfa\xdd\xfa!\xfbI\xfb\x82\xfb\xd4\xfb\x16\xfc=\xfcU\xfc\x83\xfc\xa6\xfc\xae\xfc\x9f\xfc\xb6\xfc1\xfd\xb5\xfd-\xfe\x9f\xfe\xde\xfe\xf5\xfe!\xff\x9a\xffr\x007\x01\xc6\x01&\x02s\x02\n\x03\x94\x03\t\x04r\x04\xda\x04\x1f\x052\x05y\x05\xff\x050\x06\xd2\x05a\x05V\x05w\x05!\x05\xb6\x04`\x04\xa2\x03\xd5\x029\x02+\x02\xfa\x01\x16\x01x\x00Q\x00\xe3\xff>\xff\x0b\xffr\xff\x88\xff\x05\xff\xb1\xfe\r\xff{\xff\xb6\xff\x1f\x00L\x00\x9e\x00\xd8\x00\x1b\x01\x14\x02\xae\x02\xec\x02)\x03\x87\x03\xac\x03\x08\x04\xa8\x04`\x05\xe9\x05\xef\x05\xec\x05,\x06\x18\x06\x12\x06\x01\x06\xc2\x05\xc2\x05M\x05\xa4\x04\x06\x04v\x03\x0c\x03t\x02\x9a\x01\x01\x01\xa2\x00\xcc\xffY\xfe\xa0\xfd\xba\xfd\xaa\xfd\xde\xfc\x1c\xfc\r\xfc[\xfbB\xfa\xeb\xf9\x98\xfa#\xfb\xe7\xfa\xcb\xfa\xee\xfa\xe0\xfa\xcb\xfa9\xfb\xe7\xfb\x83\xfc\x88\xfc\xc0\xfc\xaf\xfd\x8a\xfe\xf2\xfe/\xffo\xff\x86\xff\x9c\xffU\xff\xb5\xffK\x00]\x00e\x00\x1a\x00\xf4\xff\xe6\xff\x99\xff\x99\xffy\xff*\xff!\xff\x11\xff%\xff\xe5\xfe\xb6\xfe\x9b\xfef\xfeD\xfeL\xfe\x98\xfeq\xfe\xcf\xfe\xab\xfe\x8e\xfe\xcf\xfe\xdf\xfe\xb7\xfeV\xff\x9c\xffJ\xff\xa3\xffi\xff\x88\xffV\xff4\xff\x95\xff\xa0\xff\x96\xff\xe9\xfe\xf4\xfe\xc3\xfeV\xfe_\xfe_\xfe8\xff\x80\x00\xca\x02\xb8\x012\xfe\x82\xfc\x89\xfd!\xfei\xffr\x00\xc4\x00l\x004\xfe\xa2\xfe[\xff\x17\xff\x81\xffH\x01\x12\x02Z\x02\x84\x02M\x03\xf5\x03\x1c\x04B\x04Z\x04r\x05\xe9\x05\xdf\x05U\x06\xa3\x06*\x06\xf9\x05\x07\x05.\x05\x02\x05\xed\x03\x7f\x03\xef\x02\xfa\x01G\x01\x08\x01(\x00W\x00\x01\xff\xd9\xfd\x92\xfc^\xfc\xe6\xfc<\xfc+\xfc:\xfcV\xfcw\xfb\x13\xfc\x7f\xfc\x80\xfc\xed\xfc\x87\xfd\xb0\xfd\xca\xfd\x86\xfer\xfe\xb3\xfe\xb6\xfe\x0c\xff \xff\xaf\xff\x92\xff\x18\x00Q\x00\xa0\xff\xc0\xff\r\x00\x92\x00\xd8\x00\x05\x01\\\x01\xc9\x01H\x02\xcd\x02\x98\x02\x13\x03S\x03\x08\x04\xc7\x03N\x044\x04t\x03\xd7\x03%\x03\xd6\x02\x13\x03\xfd\x02E\x02\xbb\x018\x01&\x01%\x00l\xffl\xff\x90\xfe\x1b\xffH\xfe\xd8\xffu\xfe\n\xfdd\xfd\x90\xfc\x05\xfd\xfa\xfb\x0c\xfd\x1c\xfe\xc7\xfdG\xfd\x08\xfdn\xfc\\\xfcI\xfb3\xfd\xae\xfd\x8e\xfe\xcd\xfe\x1f\xfe\x86\xfe\x1b\xfdZ\xfe\xf6\xfd\x05\xff\xc0\xfe\x1f\xff\xc7\xff\x81\xff\xb9\xff#\x00\xee\xff\xba\xff\x9b\xfe\xe5\xfe\x02\x00\xd2\xff1\x00\xd9\x00,\x01\xa8\xff\x02\x00Z\x00\xac\x00u\x00\xc1\x00\x8e\x01V\x01\xdf\x00\x03\x02d\x02,\x01\x9d\x01\xd1\x01/\x02^\x01\xee\x01V\x02\xe5\x01\xec\x01\xb4\x00\xa9\x00\xc3\xffM\x00{\x00\xe1\xff\xb5\xff\xba\xfeh\xfe\x0c\xfe\xd9\xfd1\xfe+\xff\xcc\xfd^\xfd\xb4\xfd,\xfe\xac\xfdB\xfeY\xfe\x8f\xfe,\xfe\xe5\xfc\xe2\xfc\x99\xfd\x05\xfe\xa9\xfd\xf4\xfe\x08\xff0\xfe\xb6\xfc\x9f\xfd\xa5\xfd\x91\xfe\x14\xffg\xff\xcd\xff\x83\x00p\x01\xd5\x00\xe2\xff\x90\x00\x94\x03\xc1\x02$\x03\xbf\x03\xba\x04c\x03\x95\x02\x8e\x03h\x05\x0f\x06\xbf\x046\x03\xa4\x04M\x03,\x02b\x01\x85\x03R\x02\x03\x00\xad\x00\x81\x00\x95\xff\x8e\xfd\xd3\xffs\xff\xcd\xfe\xc5\xfd\x1b\xff\xa9\xfdS\xfem\xfdE\xfeK\xfe2\xfd\r\x00E\xfdy\xff(\xfe@\xfe&\xfdO\xfc.\xfeh\xfe\xe8\xfd\x92\xfe:\xff\xad\xfdK\xfd#\xfe\x83\xfe\x10\xff\x94\x00L\xff\x04\xff\xc8\xff!\xff?\x00\xe7\x00\xe0\x02\xac\x01\x7f\x00\xf5\xff\x00\x01\x9b\x01>\x01\x8d\x02\xdb\x01\xff\x01?\x01p\x01\xb8\x00\xc1\x01\xe4\xff_\x00\xa2\x00\xe8\x01\xfb\x01V\x00/\x02\xe0\xff\t\x006\xff\x83\xff\xd6\xff\xa3\x00\xa4\x00u\x01\x83\x00\xf5\xfe\xc9\xff\x9f\xfe\xe7\xff\x1f\xffW\x00`\x01\xf5\xff\x06\xff\xdc\xfeW\xffh\x00\xf0\x00\x83\xff\xe5\xff^\xfe\x96\xffG\x00\xee\xfe9\xff0\xfee\xffV\xfec\xfe\x9f\xfe|\xfe\xa0\xfe[\xfd\x1b\xfd<\xfeO\xfd\xea\xfe\'\xff\x97\xff\xd3\xffX\xfeO\x01/\x01\xf4\x00\xe6\xff\x9e\x02\xb1\x02\xda\x02\x0b\x05\xc4\x03\x91\x04\xa8\x02G\x02\xc1\x03\x16\x04\xf1\x01a\x03\xff\x01\xdc\x01_\x01\xa2\x01G\xff\xdc\xff\xe8\x00\xca\xfd\xeb\xff\x98\xfd\xf0\xfe\xfd\xfc\x1a\x01\xb0\xfc\x8f\xfd<\xfd\x00\xfdv\xff\x81\xfe\xfd\x00\xe1\xf9\xaf\x008\xff\x89\x00J\x00#\xff\xac\xff\xe0\xfe\xfe\xfe\xba\x01y\x01\xec\xfe\x19\x00\xc5\xff\xc0\x02\xf5\x01\x18\x00\x0c\xfe\xd5\xff\xb4\xfds\x01#\x02\xc4\x00\xd5\xff(\xfe\xf0\xfe2\xff\xf2\xfe\x8f\x005\x00 \xfe\x9a\x01.\xffr\x00\xc1\x00\x93\xff\x89\xff"\x00\x17\x02R\x00\x94\xff\xc9\x01\xa7\xff\xd5\xfe(\x00\xa5\xfd\xdb\xffe\x00\x12\xff\xe8\x01\x9a\xff\x84\xfe}\xff4\xfen\xfd\xed\xfcY\x00#\x03\x1a\x00\x9d\xfe$\x00I\xfc%\xff\n\x00\x7f\x00\xca\xfe\xfa\xff\xd3\x01\xbb\xfd\xbc\xfeO\xff\xe0\x00$\xfe\xd2\xff\xb0\x01\xf6\xfb\n\x01U\x00e\x00\xd3\xfdn\xfdX\x01\x0f\xff\xdf\x00\x06\xff\xad\x02\xaa\xfc\x86\x01`\x01\xba\x01\xdd\xfe\xec\x00\n\x05\xbd\x00\xfd\x01\xb3\xfe\x96\x03`\x02?\x01+\x00\xbe\x02\xbb\x00\x12\xff\xeb\xfec\x02\xf6\x025\xfc8\xfc\x19\x01\xee\xfda\x00L\xfd\x0b\x00Z\xfe1\xfbO\x01\xe8\xfbq\xff\x9f\xfcr\xfe\xb0\xfe\xd7\x00\x7f\x00\xa9\xffJ\xfe\x96\xfdt\xfd\xf3\xfd\x9e\x02)\x01\xc1\xff$\x01\x1d\x01\xbb\xff\x12\xff\xa9\x00G\x03~\x01\xda\xff\xcc\xfc\xcb\x02\x91\x02\x86\x06\x98\x01\xdc\x00\xf5\x00\x89\xfc\x7f\x03\xd7\x00\x08\x03\xdf\x01\x85\x00$\x04F\x00\xba\xfc\x87\x01t\x02B\xff\t\xfff\xfe9\x00\xc0\xfeH\x00:\x02\x01\x02\x91\xfeR\xf9\x99\xff\r\xfe\xee\x00\x17\x00\xc1\xfe\x9b\x00^\xfd\xef\x02\x87\x00\x08\xfe\xb2\x00\xd2\xfd\xba\xfe\xb8\xfe\x94\xff\x04\x02\xf8\xfe\xab\xffA\xff\xac\x03\xb5\xfe\x1d\xfd\n\xfb\xf1\xfb@\x02\xeb\x003\xff\x0b\xff@\x00\xce\xfe\xc6\xfe\x94\xfe\xa3\x04\xe5\xfeT\xfd\x1d\x02e\xfb1\x00\xc4\x04\xbc\x02|\x02 \xff2\x01\xb3\x00)\xff\xb1\xfc\x9d\x00\x94\x04\xf3\x04\\\t\x81\xfb\xb1\xfc\x8e\x01\x1f\x00\xe1\x02\xf6\xfe\x13\x04R\x00\xcb\x00M\x02\xeb\x01h\xfc\x86\xfc\xbf\xff\xae\xfbb\x03>\x01\xa8\xff\x12\xfe\x88\xfc[\xfc\x13\x01\x01\xff\x02\xfb\xb3\x03\x83\x00C\xff\r\x00R\xffn\xfe\xd5\xff\x0b\xfe\x92\xfe\xb3\xfe|\xfd\x9d\xffi\x02s\x00\x17\x02\x04\xfd\x1e\xf8\xed\xfbV\xff\xe1\x03\x96\x04\x95\x04\n\x01\xb8\xfb\xdb\xfc:\xfe@\x00<\x04?\x04r\x01\x85\x00\x8d\x03\x9b\xfd\xd3\x02\xdc\x05"\x00\xf8\x00\x1c\xfe\xc2\x00\xeb\x02\xed\x01\xbe\x02k\x03e\xff\xc1\xfa\x07\xffi\x01h\x00\xc1\xff\xfc\xff~\xfe]\x00\n\x00\xd2\x04\x87\xfbt\xf8\xf6\x02\xc5\xfe!\xfc*\x03\x84\t\xe1\xfe\xc3\xfa*\xfa)\xfc\xb4\xfdw\x02\x14\x05z\x02\xb1\xfe\xfd\x02\x91\xfe\xb8\xf7\xb4\xfe\xb4\xfbg\xfe\xf0\xfe\x1d\x04V\x04\x0e\x02\x1c\x02l\x00\xd1\xf1o\xf0w\xfd\xc3\x07\xd1\x0cI\x08q\x02\xb4\xf7\xb4\xf8\x1c\xf6Z\x01\xd3\x04\xba\x03\xf5\x02h\x03\xed\x00y\xfb\xee\x03I\xfe\x9e\x01\x0e\xff/\xf9z\xff\xef\x04\xb8\n.\x05\x8b\xfd\xb4\xfc\x06\xfb\xfe\xf6(\x00\xa4\t\xce\tn\x02\xb3\xf5c\xfau\xfb\x18\x04\xf6\tA\x03\x1c\xfc\xfc\xf6;\xf9}\xfbS\x06G\t\x15\x03\xd2\xf7i\xf87\x01\xc1\x03\x08\xfd\x14\x00\xbf\xfe\xca\xfa\xd6\xfe\xd4\xf8\xf6\x04\x17\x0f\x88\x02\xbd\xf7\xd1\xfb\x92\xfb\xd3\xfd"\x05\x11\x03\xad\x03\x8b\x02\xde\xff\xab\xfb\xe0\xfem\x05n\x04W\x00\xcc\xfd\x1b\xff/\xff\x7f\xfd\xcb\x03&\x06H\x02\xeb\x00\x14\xfe+\x02\x7f\xff\x8b\xfaJ\x01\xbc\xffr\xfeO\x04\xab\x04\xec\x05.\x00\xbf\xf9@\x00\xd0\xf5g\xf8{\x06/\x0c\x9a\x04\xe9\xf8\xdc\xf9\x91\xfc"\xff \xfc\xf9\x064\x05W\xfb\xfe\xfd \x01<\x03\x11\xfd\xd6\xfc\xaa\xff\xe9\xfcW\x05\x87\x04Y\xff\xe3\xff\xe9\xfdl\xfb\x15\xfb\x8d\x00!\xffK\x01\xe0\x01\x84\x03v\x03\xd6\xfaX\xfb\xdb\x003\x06\xe9\xfd\xf5\xf62\x06,\x05\xa7\xff\xf8\x05{\x06\x1b\xfa\xf7\xf4\x16\xfd\x9e\x03\xa9\x05@\x01\xb3\x04\xfd\x03w\xf9\x9e\xfc\x8c\x01:\xfd&\x00y\x06\xc3\x00\xc5\xf5:\xff\x82\x054\x00\xee\x06\xb9\x05\xb2\xfd,\xf5\xdf\xf65\x01m\x02\xfd\x02\xbe\x06\xce\x06\x1e\x03\xc4\xff\x1b\xfcz\xf8\xb3\xf7[\xff\xc1\x05\xa1\x05o\x08\xc6\t\xa0\xfd-\xf3\x01\xf5\x13\xfa\xd0\xf9\xab\x01\xf2\x0cc\x0cs\tq\xfa&\xf3\xe3\xf3\xbf\xfan\x00_\x02\xf6\t\xa1\n\xf9\x050\xfc\xae\xfc\xce\xf9~\xf8\xfa\xfa\xa7\xfc|\x05w\x0b\x14\r\xaf\x06\xc0\xfd\x89\xf6H\xee\xec\xf5^\x03c\x08\x12\x10\x11\rE\xf9\x00\xec\xfd\xf1S\x00\r\x07\x8d\x06-\x07\x12\x00\xc8\xfa\xf6\xfb\xa9\xfd\xfa\xfc\xca\xff\xe1\xfe\x0e\x01\xfb\t\xc7\x06m\x02\xf1\xfb\x14\xf2\xd7\xf5\xc1\x00$\x07\xd9\x0b\x8d\n\x9b\x01{\xf8\xe2\xf7\x12\xf8\xb5\xfa\xd0\x039\x0c\x05\x0b\xff\xfbk\xf9\xd4\xfb\x12\xfb\xc4\xfd\r\x07\x1c\x05\x86\xfc\x05\xfe\x16\x00\xe5\x00\xcf\x02~\x00\xd5\xfam\xfc\xee\xfd\x87\x05\x14\t\xe6\x014\xfc\xe3\xf7\xc7\xf9\xb7\xff7\x06\xa3\nz\x03\xf4\xfa\xfd\xf9n\xfbN\xfd\x8b\x02\xca\x06\xc9\x06\xdf\x00\x87\xf8v\xf8"\xfe\xf9\x04\xd8\x02X\xffy\xfe\xe5\xffo\xff\x86\xfd\xd4\xfe\x13\x00\xf4\xfe\xb4\xfb\x14\xfe\xc6\x02\xb3\x04\x1d\x02*\xfd\'\xfcR\xfd\x8f\xfb\xb3\xfeL\x02\xd0\x02\xa4\x04\xd9\x01\xf5\xfdU\xfc\x05\xfc\x8c\xfdu\xffu\x03n\x03\xa1\x01\x0c\x01\x83\x00\xd1\xff\x1d\xfe\x96\xfek\xfd\xd9\xfe+\x02{\x04y\x02+\xff\xcf\xfd\x01\xfe\xd6\xfes\xffD\x01\xb7\xff\xc2\xfc\'\xfb\xbf\xfb\xb6\xfe\xda\x02\x99\x01\x11\xfd\xd9\xfa\x90\xfb\xa9\xfa\xe1\xfb.\x00\xfd\xff\x11\x00\x03\xff\xf5\xfe\xa1\xfe)\xfd\x84\xfd\xbd\xfdk\x00\xb1\x03\xd0\x04I\x04\xd0\x03\xe0\x03\xe2\x02\xab\x01\xe4\x04[\n\x02\n\x0f\t\x8a\x08~\x06\xfd\x06u\x08\xa6\t\xd0\x0b\x18\rQ\tB\x06?\x04\xf8\x04\xc0\tE\x0c#\n\t\x05\xee\x02!\x01^\x00\xad\x01\x1b\x03\xf7\x03\xb3\x01Q\xfe~\xfc\xc4\xfbB\xf9}\xf8\x9c\xf8D\xf9|\xfa\x87\xf9\xf3\xf7\x12\xf6K\xf3`\xf0\xbc\xef\xcf\xf2\xab\xf6q\xf7\x16\xf6\x99\xf3\xfb\xf0\x95\xedJ\xec\xa1\xf0\xe0\xf6.\xf9\xbb\xf6\xec\xf2\x03\xf0\xf0\xeeT\xf0\xc9\xf2\x8b\xf4\x99\xf6\x14\xf6C\xf3@\xf2o\xf2+\xf3\xb2\xf2\xdf\xf0\xc0\xf3l\xf7-\xf74\xf5\xa7\xf4\xd3\xf5\xf1\xf4\xb2\xf6\x06\xfa#\xfb\xe5\xfc\x04\xfd\x8c\xfdn\x00s\x03\xc4\x06\n\x08b\x07Q\t(\x11\xc0\x1f\xb6-\x9c2Q*\xac"\x84&\x19/6<\xe7H\x93P\xe6Lc>02\x111\xe85\xa15\xc22i/U)8\x1fn\x12\xb3\x08r\x00p\xf63\xee\x97\xebY\xedI\xeb;\xe2\xb9\xd3\x93\xc8l\xc56\xc6l\xcb\xb8\xd2\xe6\xd7\xea\xd5\x81\xcf@\xcdt\xd2\xbc\xda\xa4\xe01\xe8\x83\xee\x90\xf4\x95\xf8\x18\xf9\x85\xfe\xdc\x03\x11\x05\xa2\x04@\t\xed\x10\xbd\x15\xe2\x153\x13\xe3\x0f[\t\xf1\x05~\x07B\n\xf3\n\xcf\x05\xfa\xfd\xca\xf6\x08\xf1\xf6\xee5\xee\xc6\xecY\xeb\xde\xe71\xe4\xd9\xe1\xe3\xe1\x9a\xe3\x18\xe3\xa0\xe2\xb0\xe3y\xe5<\xe8\t\xeaK\xed\x15\xee\xa7\xed\xa2\xefR\xf3\xd8\xf8$\xfb\x99\xfc+\xfe&\xfe\xe0\xfd/\xfe;\x02\x1e\x06\x04\x06\x86\x04\'\x02\xd1\x04\xaa\x04\xf0\x02{\x01%\x00\xa2\x00\x9c\xfd\x93\x02\xd5\x07w\n\x86\x03\x87\x01\xad\x10\xcf\x1e\x82#"!l(|/o.\xe1,\x8b3\xa7C\xafG\x9bA\xa2?\xc7A\xb9?\xe83P-\x95-5-j\'\xaf\x1eh\x1a\xf8\x11\xbc\x04\xee\xf7&\xef\xc7\xec\x8c\xe9\xcf\xe6\x16\xe4\xf6\xdd\xe3\xd3B\xc9X\xc7\xcf\xcc\x94\xd3\\\xd6\xfd\xd5g\xd6\x0e\xd6%\xd5.\xd9\xca\xe1\x11\xe9\x11\xee#\xf1\xf1\xf3\xf5\xf7\xde\xf9\xcc\xfa\xaf\xfd\xb4\x01\xf5\x07\xa9\x0b\xad\x0c[\x0ba\x07\xbc\x02\x03\x01:\x05\x03\x0b\x08\x0c/\x06\xa9\xff\xbe\xf9N\xf5\xdf\xf4{\xf8\x8a\xfb\x1d\xf9\x90\xf3T\xf0\xdf\xedN\xec_\xedA\xf0+\xf3\xbd\xf2\xfe\xf1\x11\xf2x\xf2\x94\xf1 \xf1\xf2\xf3\x84\xf8\xe5\xfb\x05\xfd*\xfc\xca\xfa\xe1\xf8\xe1\xf7>\xfb\xd6\x00\xb8\x04@\x04K\xff\xfd\xfb\xcf\xfb\xa6\xfd\xbe\xff\x1a\x02\xbc\x01\x13\xffU\xfb\xf5\xf8\x89\xfa@\xf9\xb3\xf7$\xf8\xb7\xf9\xc5\xf94\xf7j\xf4\xcc\xf0\xf9\xef\xf2\xfaK\x12,$\x1b\x1d\xff\x0e\xc6\x0b%\x16d\'\xcf6NJ\x16R\xf2G!5\x13/y;AHDL\x98HfB76\x80%\x94\x1c\x18\x1c\xfc\x1ay\x13\xc0\x08X\x03\x9a\xfc\xd8\xee9\xe0d\xd8[\xd6g\xd5\x9a\xd4a\xd5\n\xd4\xd4\xca(\xc0\x91\xbej\xc7\xea\xd2Y\xd9\xa5\xdb\x05\xdd\x8c\xdb \xda\x9e\xdf\x0c\xeb\x05\xf8\x10\xfe\xf1\xfc{\xfc\x84\xfeK\x01\x0f\x04%\n\xc3\x0f\xff\x10\x87\x0ca\t#\x0b\x82\x0b\xed\x08\x14\x08\x8e\t\x15\t\xb6\x05z\x02\xc9\x00#\xfd\x9a\xf8Q\xf7\xc4\xfa\x11\xfcM\xfa\xfe\xf5\xe3\xf1I\xee\xc8\xedZ\xf2\xce\xf6j\xf9d\xf6,\xf2\xbf\xf0\xce\xf2\xb1\xf7B\xfc_\xff\x7f\xff(\xfec\xfe>\xff0\x02C\x04,\x05\xba\x050\x05\x10\x05\x8d\x04]\x04\x0f\x03\x1d\x02S\x01\xec\x00\x16\x00U\xfc\x14\xfa\x8a\xf8)\xf8\xa5\xf7\'\xf6\xe5\xf4\xa7\xf2\xa9\xf0\xff\xec|\xeb\xa9\xeb\xf0\xec\x13\xf3p\xf8\x9e\xfcP\xfai\xf4m\xf7(\x03!\x13i\x1f\xdc#\n!\xd5\x1a\x81\x1a\x0c&\xf79\x02E\xe1D\xe7<\xff2\x17/81[9\xe4=19l-\xc2"\xf4\x1bu\x17W\x16b\x13\x90\x0b\x97\x00E\xf6\x11\xf2`\xef\x9f\xeb\xfa\xe5\x14\xdf\x85\xd9\xa7\xd6:\xd7\xed\xd7D\xd8\xe7\xd55\xd3\x1d\xd33\xd6\x94\xdc\xb9\xe0\xc2\xe15\xe2\xcf\xe2T\xe5\x01\xe9\xa9\xef\x0b\xf6\x07\xf8\n\xf6\x14\xf5\xf7\xf8W\xfd\xb6\xff|\x02]\x04\xd8\x03-\x01\x8e\x01n\x05\xe3\x06\xbb\x05\xe6\x04H\x05\xd7\x03\x0f\x022\x02\xea\x03A\x021\xff]\xfeS\xff%\xff\\\xfdN\xfc\xdd\xfb\xc6\xf9o\xf8\x8c\xfav\xfc\xe4\xfc\xff\xf9\x80\xf8u\xf8\x1d\xfa\x7f\xfcM\xff\x1a\x00(\xfen\xfc^\xfca\xfe\\\x01K\x03~\x033\x02r\xff\xbd\xfe\xeb\xfe\xfd\xffq\x00\x9a\xffJ\xfe\x90\xfc\xd8\xfar\xf9\xd4\xf8\xe2\xf7\xd9\xf6\xf9\xf4\xdc\xf4\xc4\xf4(\xf3\xc0\xf1k\xf1Z\xf3\xe0\xf4\xbd\xf6t\xf8.\xf7\xef\xf3\xc1\xf6\x0b\x02m\x0e\xc3\x13\x99\x11\xd4\x0e\x1a\r\x8c\x10\x9b\x1d\xe3.\xb48\xa23o)_$R\'~0\xe09\xa3?q9c+& +\x1e\x7f#c&\xae$\xbb\x1c\xba\x10\x9d\x05\xec\xff\xe2\x00K\x01\xdf\xfd\x9a\xf7F\xf1\xa9\xeb\x87\xe6E\xe4<\xe5\x0e\xe6-\xe4"\xe1\xab\xdf\x82\xdf/\xdeG\xddZ\xden\xe1d\xe3a\xe3h\xe4\t\xe5\xd0\xe4J\xe4r\xe6X\xeb2\xee\x15\xef\xfe\xee\x91\xef\x86\xf0B\xf1E\xf4e\xf7\xd1\xf9r\xfa\x93\xfa\x0c\xfco\xfd\xbd\xfen\x00x\x02D\x047\x04\x9d\x03\xdc\x04\xbb\x06\xbd\x08\x87\x08J\x081\x08\xbd\x07\xf0\x07P\x08\x11\n]\n\x00\t\xfd\x06\x93\x06\xa5\x07\x8a\x08\xdd\x07\xfa\x06d\x06\x19\x06\x8f\x05\xed\x05\xf5\x06:\x06\t\x04\xab\x02\n\x038\x04/\x04s\x03s\x02g\xffu\xfc\xcf\xfb0\xfd\xcd\xfd\xf5\xfbr\xf96\xf73\xf5\xce\xf3\x1f\xf4\xe4\xf4\xeb\xf3\xcf\xf1]\xf0u\xf0\xca\xf0\x07\xf1o\xf1\xfb\xf1\xf1\xf1\x15\xf2\xd5\xf3!\xf6c\xf7|\xf7J\xf8\xaa\xfa\xfb\xfd4\x01\xb2\x03)\x05\xd3\x05P\x07\xd1\n\xaa\x0f\xda\x13d\x16k\x17\x0e\x18l\x19X\x1c9 =#\x95#}"\xf9 \xa9 }!N"\x1f"\xd7\x1f1\x1c>\x18\xa6\x15F\x14\xde\x12k\x10\x8a\x0c\x04\x08\x91\x03I\x00P\xfe\xa6\xfc*\xfa\xdb\xf6l\xf3\xc6\xf0*\xef\x08\xee\xfe\xec\xaf\xeb\x1e\xea\xf0\xe8\\\xe8\x84\xe8\xb1\xe8\x8b\xe8\xfc\xe7\xca\xe7\x12\xe8\xce\xe8\xe0\xe9\x90\xea\xd1\xea"\xeb\xbe\xeb\xcc\xec\x16\xee_\xef\xa4\xf0\xa5\xf1\x90\xf2\xf3\xf3\xa2\xf5O\xf7\xad\xf8\xc9\xf9\xef\xfaL\xfc\xc2\xfdN\xff\xa8\x00\xab\x01c\x02\x15\x03\xff\x03\x08\x05\xe6\x05o\x06\xc0\x06\xf7\x06/\x07|\x07\xed\x079\x08.\x08\xd9\x07\x91\x07\x87\x07\x87\x07\x9b\x07\x97\x07Z\x07\xc9\x06\x08\x06\x9a\x05o\x053\x05\xb4\x04\xe6\x03\xfd\x02\x07\x02\x0f\x01Z\x00\xb3\xff\xcb\xfe\xb7\xfd\x7f\xfcr\xfbs\xfa}\xf9\xa5\xf8\xeb\xf7)\xf7K\xf6\xae\xf5j\xf5J\xf55\xf5\x00\xf5\xe2\xf4\xe6\xf4$\xf5\xde\xf5\xf3\xf6\x00\xf8\xe4\xf8\x91\xf9F\xfa/\xfb\x84\xfc$\xfe\xb0\xff(\x01i\x02p\x03e\x04\x8a\x05\xe9\x06\x1f\x08\xff\x08\xac\tN\n\xc1\nY\x0bG\x0cF\r\xe1\r\xf4\r\xc1\r\xe2\rh\x0e\x14\x0f\xd2\x0f,\x10\x11\x10\xa5\x0fM\x0fb\x0f\x88\x0fa\x0f\xff\x0eL\x0eJ\r[\x0c\x8b\x0b\x03\x0bD\n\x02\t\x8b\x07^\x06U\x05Z\x04m\x03c\x02&\x01\xec\xff\x0b\xff\x7f\xfe\xf4\xfd&\xfdY\xfc\x94\xfb\xf3\xfa\x83\xfai\xfa;\xfa\xae\xf9\xe3\xf8\x1d\xf8\xdb\xf7\xa8\xf7\x81\xf71\xf7\xa5\xf6\xce\xf5\t\xf5\xae\xf4\xa5\xf4\xb6\xf4\x9c\xf42\xf4\x9c\xf3\x1f\xf3\xf2\xf22\xf3\xa3\xf3\xf4\xf3\x19\xf4\x0f\xf4\x13\xf4g\xf4\x01\xf5\xe0\xf5\xb4\xf6V\xf7\xf8\xf7\x93\xf8d\xf9N\xfaX\xfbd\xfc]\xfd5\xfe\x0c\xff\xf1\xff\xd7\x00\xaf\x01r\x026\x03\xe7\x03\x80\x04\xe0\x04/\x05}\x05\xb6\x05\xef\x05\x05\x06\xfc\x05\xd1\x05\x8a\x05=\x05\xf1\x04\xac\x04[\x04\xf1\x03}\x03\xff\x02\x87\x02 \x02\xbc\x01h\x01#\x01\xd3\x00j\x00\x00\x00\xbf\xff\x99\xffj\xffC\xff\x17\xff\xcf\xfej\xfe\x15\xfe\xf4\xfd\xd9\xfd\xb9\xfd\x88\xfdF\xfd\xdd\xfcr\xfc5\xfc7\xfc8\xfc\x12\xfc\xbb\xfb<\xfb\xd9\xfa\x9b\xfa\x9c\xfa\xae\xfa\xae\xfa\x94\xfa}\xfa\x87\xfa\xb9\xfa\x1e\xfb\x94\xfb\x1a\xfc\xa7\xfcN\xfd1\xfeS\xff\xb2\x00"\x02\x91\x03\xf0\x047\x06\xa7\x07_\tM\x0b\x1c\r\xb5\x0e!\x10j\x11\x8b\x12\x8a\x13\xb1\x14\xdf\x15\xaa\x16\r\x17\x07\x17\xc0\x16G\x16\xd2\x15T\x15\xa3\x14\x8f\x133\x12\x8b\x10\xbc\x0e\x0f\rZ\x0b\x83\t\x8e\x07r\x05O\x039\x016\xff9\xfd8\xfbA\xf9c\xf7\xb0\xf5(\xf4\xa2\xf2.\xf1\xcb\xef\x97\xee\x99\xed\xc2\xec$\xec\xa0\xeb\'\xeb\xbe\xea\x85\xea\x7f\xea\xa2\xea\xe1\xea:\xeb\xb8\xeb>\xec\xc7\xec\x94\xed\x98\xee\x81\xefY\xf08\xf1U\xf2\x8e\xf3\xcc\xf4\x13\xf6R\xf7\x85\xf8\xbb\xf9)\xfb\xbd\xfcC\xfe\xa0\xff\xd1\x00\x0c\x02[\x03\xae\x04\x02\x062\x07/\x08\xf3\x08\xa9\tr\nH\x0b\xf3\x0bG\x0ck\x0cy\x0cv\x0c_\x0c.\x0c\xd3\x0bC\x0b\x80\n\x9e\t\xcf\x08\x00\x08\x11\x07\x0c\x06\xde\x04\xb3\x03\x80\x02H\x01+\x00\x13\xff\xfa\xfd\xdf\xfc\xce\xfb\xd2\xfa\xe8\xf9\x10\xf9J\xf8\xa3\xf7\r\xf7\x84\xf6\n\xf6\xa9\xf5h\xf5:\xf5:\xf5`\xf5\x97\xf5\xdc\xf5,\xf6\x8d\xf60\xf7\xed\xf7\xde\xf8\xc7\xf9\x94\xfak\xfbQ\xfcr\xfd\xb4\xfe\x03\x007\x01F\x02)\x03\r\x04\x1c\x05^\x06\x91\x07l\x08\x12\t\xaa\t8\n\xee\n\xc6\x0b\x80\x0c\x07\ra\r\xb3\r\xff\r*\x0eD\x0e\x82\x0e\xcd\x0e\xfe\x0e%\x0f\x14\x0f\xdd\x0e\x95\x0e{\x0em\x0eX\x0e\x1f\x0e\xa6\r\xda\x0c\xff\x0bi\x0b\xe6\nQ\nz\t^\x08\x06\x07\xa5\x05m\x04f\x03e\x02\x14\x01\x91\xff\t\xfe\x91\xfc5\xfb\x03\xfa\xff\xf8\xf2\xf7\xbe\xf6t\xf5b\xf4\x8a\xf3\xdd\xf2Y\xf2\xde\xf1e\xf1\xf2\xf0\x9f\xf0\x87\xf0\xa3\xf0\xc5\xf0\xee\xf0\x1b\xf1i\xf1\xe3\xf1c\xf2\xef\xf2\x88\xf3(\xf4\xd0\xf4\x94\xf5f\xf65\xf7\x0f\xf8\xe1\xf8\xb3\xf9\x92\xfau\xfbg\xfcN\xfd$\xfe\xec\xfe\xb4\xff\x87\x00V\x01\t\x02\x88\x02\xeb\x02H\x03\xbd\x03&\x04\x84\x04\xb8\x04\xbc\x04\xa2\x04y\x04x\x04\x87\x04\x7f\x04Q\x04\xfb\x03\x9a\x03:\x03\xf5\x02\xc5\x02\xa0\x02S\x02\xd1\x01U\x01\xff\x00\xd1\x00\x9e\x00u\x00/\x00\xd2\xffi\xff1\xff6\xff2\xff\x18\xff\xcd\xfe\x8e\xfeu\xfep\xfe\x97\xfe\xb6\xfe\x9a\xfeo\xfeP\xfec\xfe\x89\xfe\x94\xfe\x8b\xfem\xfeL\xfeM\xfeg\xfe\x7f\xfes\xfe8\xfe\x15\xfe\t\xfe\x02\xfe\xf5\xfd\xef\xfd\xd2\xfd\xb8\xfd\x8b\xfd\x94\xfd\xd6\xfd\xf3\xfd\xf9\xfd\xe1\xfd\xe9\xfd1\xfe\xad\xfe"\xffr\xff\xc1\xff\'\x00\xbc\x00\x8a\x01\x84\x02u\x03B\x04\xf1\x04\xd0\x05\xf3\x06&\x088\th\n\x98\x0b\x99\x0cO\r\xee\r\xc5\x0e\x96\x0f&\x10m\x10\x8f\x10O\x10\xcb\x0fb\x0f\x1b\x0f\xb0\x0e\xc1\r1\x0c\x8b\n!\t\xfb\x07\xdf\x06h\x05\xa5\x03\x90\x01s\xff\xa2\xfdN\xfc,\xfb\xf2\xf99\xf85\xf6\x88\xf4\xc1\xf3{\xf3\xe7\xf2\xe0\xf1\xc8\xf04\xf0=\xf0\x9a\xf0\t\xf1%\xf1\xff\xf0\xfa\xf0s\xf1{\xf2\xc8\xf3\xa5\xf4\xd8\xf4\x17\xf5\xe3\xf5W\xf7q\xf8#\xf9\x90\xf93\xfa\xe6\xfa\x89\xfb\xb1\xfc\xd1\xfd3\xfe\xd6\xfd\xf1\xfd>\xff\x84\x00\xb5\x00\x7f\x00\x8c\x00\xea\x00B\x01\x12\x02z\x02\xed\x02\xb3\x03\x86\x03\x88\x02\xc1\x02\x12\x04w\x02\x88\x01\x17\x08\xa4\x0f\x9f\np\xfc\\\xfa\xe4\x05l\x0el\r\x15\t\x95\x030\xfb\x1b\xfa1\x07\x9c\x0fO\x08\x03\xfe!\xfa\x98\xfb\xf6\xfe\xa9\x02\xb7\x01\xa0\xfb\xb1\xf6\x91\xf7\x13\xf9\xe8\xf7\x9e\xf8\x05\xfcA\xf9\xa9\xf0?\xee<\xf6\x90\xfd.\xfc\xf8\xf6e\xf42\xf5\xfa\xf6V\xfbJ\x01\xe7\x01\x7f\xfcu\xf8\x0c\xfb\xb0\x01\xc2\x05m\x05\xeb\x02m\x00b\x01\x11\x05#\x08\xa2\t\x8e\tt\x06\xdc\x03\xb1\x064\x0c\xb1\r*\n\x85\x08\xfb\n\x7f\x0c\x1c\x0cV\x0c\x0f\r\xa5\x0c\xb3\x0b\x9e\x0c\x02\x0e\xa8\rf\x0c=\x0b\xdb\n[\nm\nb\x0b:\x0cM\n|\x06\x1d\x05\x19\x07\x9f\t\xb2\x07\xc1\x04\x02\x04\x8f\x03\xbd\x01\xb6\x01\xb8\x03\x96\x01\xc8\xfb\xea\xf9\xd4\xfd\x02\x00\x9b\xfc?\xf8\xce\xf6\x1a\xf6\'\xf5\xeb\xf6\xac\xf9\xda\xf8\x87\xf3\x84\xee\x14\xefk\xf3\xc2\xf6\x02\xf7\xd5\xf4\\\xf1\xe8\xeeW\xf0\xf5\xf5\x82\xfa\xeb\xf9S\xf70\xf6B\xf6\x1b\xf8\xf7\xfc\x9f\x01\x87\x01\xbb\xfd2\xfd\xd8\x00t\x03\x91\x04#\x06e\x07!\x06!\x05\x9b\x07\xa0\t\x16\x08\xd0\x068\x07\x02\x08S\x07\xdc\x06\xd1\x05\x91\x01\xab\xfd\x0c\xff\xf8\x01\xb6\xff\x85\xfb\xf3\xf8D\xf5\xc6\xf0|\xf1\xaa\xf6\x03\xf7\x83\xf0\x02\xebk\xe9O\xeb"\xeew\xf0\x89\xf0\xf3\xed\xcd\xeb\xfc\xeb\xdb\xed\'\xf2\'\xf6c\xf6\x1d\xf4\xf9\xf2\x05\xf5g\xf8\x19\xfcT\xfe\xcc\xfel\xfd\t\xfc\x9c\xfe\xf2\x01\xb3\x03]\x05\xe7\x07\xd2\x08i\x05J\x01@\x02\xc2\x08\xa7\x0e\xb8\x0fh\x0b\x13\x06>\x04^\x06\x7f\t\x8a\x0f*\x14\x9c\x112\x0bk\t\x87\r\xe6\x0f\x98\x14\x12 \xd3,\xe6(\xa4\x18\x7f\x0f?\x19\x9c,\xe26.8\xd42}(\xce\x1b\x9e\x18o%K2\x890f!\x8d\x13q\x0cc\x08\xed\x08W\n\xae\x07\x1e\xfe\xee\xf1\xdb\xe8\xb8\xe3\xd1\xe1\xa8\xdfo\xdcT\xd8\xe8\xd5=\xd4\xa2\xce\x06\xc9D\xc8\xbc\xcd\x93\xd3\xd4\xd6\x03\xd8C\xd7\xcf\xd4\x87\xd4\xfa\xdb+\xe6F\xee\xc0\xf1N\xf1\xe5\xf0C\xf1t\xf5\xb9\xfe\xe3\x06\x8a\n{\t\x1a\x07\xf0\x06\x80\t/\rC\x11?\x13\x9f\x12\xea\x10\xd4\x0e\x9f\rx\r"\x0e\xb7\r\x06\r9\x0b%\t\xf6\x06\x8a\x03\xf1\x00\xf3\xff\x89\xff:\xff\xb1\xfd\xb7\xfb\xeb\xf8 \xf5-\xf4\x19\xf5\x91\xf7\xee\xf7\xba\xf6N\xf5B\xf3\x0f\xf2j\xf3\xfb\xf6r\xfa\'\xfb\x8a\xf9\xb3\xf7A\xf7N\xf8\xc8\xfa\xbb\xfd\x83\xff\xf4\xfe:\xfc\x1e\xfa\xa0\xfa\xaf\xfc|\xfe\xc1\xfe\x9f\xfd\xd8\xfbC\xf9\xce\xf7\x9a\xf7R\xf9\x9a\xf9\xdf\xf7c\xf5\x83\xf3\x10\xf3|\xf2\x12\xf3\x87\xf5d\xf8 \xfa\xed\xf9Z\xf9\x92\xfa\xbd\xfd\xe7\x04;\x11C#\x93+\x03"\xec\x12R\x16}1\xc4I\xc4PQLKG\x8a@F8\x87<\x99M\x9eY\xd1O\t;E.\xe8)\xe2\'\x91"\x9c\x1dC\x17v\n\xce\xfa9\xeeH\xe7N\xe2\x8f\xdb;\xd3\xbe\xcd\xc6\xcb\xc1\xc6\xe3\xbd<\xb6\xa0\xb57\xbb\x01\xc1\x8e\xc4\x0f\xc7\xe9\xc6;\xc5\x92\xc6*\xce\xf6\xda\xf7\xe5f\xec~\xef\xbe\xf0[\xf2y\xf8\xbd\x01@\n\x0c\x0f\x97\x11\t\x13\xa4\x12\xff\x11\xaa\x14\xa9\x18\xdb\x17\xa1\x13L\x12R\x13\x1e\x11\xc3\n\xcc\x05\'\x05p\x03\xea\xff&\xfd\xca\xfbe\xf9\xe3\xf3_\xef:\xef1\xf1\xe7\xf2)\xf2\xc8\xef\x9c\xee\xe2\xee&\xf1\xf6\xf3\x7f\xf7\x9a\xfa\x81\xfc\x88\xfc\xfb\xfd\xd2\x01g\x05-\x07{\x08\xb1\nC\r\x17\rE\x0b\x04\x0b\xf2\x0b2\x0c\x1b\x0b\x18\t\xc1\x06\xda\x03\n\x00H\xfe1\xfdE\xfb\xbb\xf6\xdc\xf1\xb9\xed]\xeb\x07\xea3\xe9S\xe7\xb9\xe3\xdf\xe1\xae\xdfO\xde0\xdd\xf0\xdf\xc1\xe6\x0b\xe9\xde\xe7(\xed$\xf63\xf7:\xf1\xae\xfd\xc0#_>\xae1^\x18\xf6\x1b\x847\xbdJ\xe7O\xeb\\zl\x04ffJ\xfd:3IO[nZrL&Ex?\xc1+\xbd\x12\x05\t0\r\x80\n\xc2\xf9\x15\xea\x06\xe5.\xde_\xcb\xa1\xb9\x95\xb4\x89\xb9\x93\xbev\xbd \xbc\xe2\xb8l\xb2\xb2\xae\x95\xb3[\xc0V\xcf\xb7\xdb\x83\xe1h\xe2$\xe2?\xe6G\xf0\x8f\xfb\xb9\x08\x87\x15Z\x1ak\x17\x91\x12\\\x14<\x1a\xfb\x1d\x9b\x1f\xc1!f!_\x1b\xa7\x12\x1c\r\xa4\n\xc4\x06\xd0\x00<\xfcB\xfbj\xf9\xec\xf3\xf6\xea\x14\xe2\x1d\xdeT\xde\xc5\xe0\x93\xe42\xe7\xe2\xe6\x7f\xe2_\xdd\xbd\xde\xcb\xe6\x92\xf1\xca\xf8\xc6\xfb\xb1\xfc{\xfd`\xff(\x04\xe8\n\x17\x12\x8d\x15n\x155\x15<\x16\x94\x178\x17\xb1\x14\x08\x13\xed\x12\xc9\x12\xc0\x10h\x0c\xce\x06p\x01\x8d\xfc\x8e\xf9\x95\xf8\xa3\xf7\xce\xf5\x8a\xef@\xe94\xe4\xe1\xe1\xf6\xe0\xba\xdfS\xe1F\xe10\xe0\x12\xdf\xa2\xe0\xbd\xe4\xa3\xe5\xa9\xe6\x04\xe8\xa6\xe8\xde\xeb\xcd\xfav\x1d96\xdf/\xc7\x14\xfe\x08\xa2\x1e\xe3>}Z\x9amJs_bqC\xf56\xc9GHb*k|a\tQ\x80=\t,w\x1e\x8e\x16&\x13:\x0c\xee\xff4\xf1r\xe4Y\xdbG\xd1\x08\xbf3\xacR\xa5z\xab\xa3\xb7]\xbb\xa3\xb6y\xad\x9d\xa5\x80\xa4\xbd\xaeA\xc3O\xd9\xc6\xe7\xe7\xe8\xcf\xe5\xf7\xe5\x05\xedH\xfb5\x0e\xce V+o(\xdf \x8c\x1dJ#t+\x9d0\x990\xab,\x01&\xfa\x1c\xb2\x15M\x11=\r-\x06V\xfc\xaf\xf5\x0f\xf3\x83\xf0\x02\xeb\x96\xe2\x80\xdaF\xd4\xcf\xd1\x90\xd4I\xdaV\xdf|\xe03\xde\x93\xdbj\xdc$\xe2\xa8\xec\xd6\xf6\xf5\xfdj\x01s\x03\xbf\x04t\x07E\x0c\x9b\x12\xb0\x17\x15\x1a\xc2\x1a\xe5\x1b\xd4\x1b\x86\x19\xc4\x15\xcf\x12I\x11/\x10]\x0eb\x0b\x14\x07\x0f\x00\x19\xf8\xe5\xf0\x02\xed\xb2\xebS\xeb\x94\xe9\xb9\xe51\xe10\xdd\xab\xda\xfc\xd8\xe1\xda\xbb\xddL\xe1\xa0\xe3\xa0\xe4H\xe7\xe6\xea\x19\xef\xd2\xf3\x15\xf8N\xfb\xa0\xfaU\xf8\x12\xfd\x9b\x13\xa08\x95S\x80M\x99,2\x16\x96&tM.l\x92w\\w\xb5n\xdcW\x97>\xfe8iIXYMU\x8fA\xf3,3\x1fP\x11l\x00G\xefJ\xe3P\xdce\xd6\xe8\xce\x95\xc8r\xc1\xa1\xb2\x8a\x9e?\x8f\xc6\x90y\xa0\x18\xb2?\xbc\xbc\xbc4\xb7\xd6\xb1\x9f\xb3r\xc0\xb0\xd6\xb4\xee\x8d\x00\xe0\x07\xe4\x07\x83\x07\x96\x0cN\x17\xf5$X/\xca3\xe23\xc83\x022\x9f-s(?%\xe7 k\x19\x99\x12u\x10\x8f\x0e\xd7\x06\r\xfa\xd1\xed\x1e\xe6\x96\xe1\x1d\xe1^\xe3\x83\xe4\x11\xe1)\xdb\t\xd7k\xd6\xd9\xd8\xc7\xdf\x85\xe8\\\xeeQ\xf0\x98\xf2-\xf7\xc1\xfbe\xff\'\x03\x12\nY\x0f[\x12m\x14\x8b\x17s\x1a\x95\x1a\xf3\x17\xcf\x15\xb4\x14\xcf\x14#\x15\n\x144\x108\n\xf2\x02(\xfc(\xf7\xae\xf3\x8e\xf2\x82\xf0k\xec}\xe5`\xdeg\xda{\xd9@\xda\x8d\xda\xdb\xdbv\xdcn\xdc\xd8\xdc\xed\xde\x1d\xe3J\xe7v\xe9X\xeb\x9e\xeeh\xf4\x90\xfc~\x04\xba\x08\xc1\x08\xc8\x06\xbb\x05M\x06\xa4\r\xcd&\x80M\x95f|^\xed@\x11/]7vK\x8d^|q\xff\x7f\xbc{\x88b\x88D_5\xcc5\x9a7\xba2\xdd\'\xb3\x1bZ\x10)\x04\xaf\xf4V\xe2\x90\xcf)\xbe\x8c\xaf\xc5\xa5\xa1\xa6\xa4\xb1\xc9\xbb.\xb9O\xa9\xbf\x98\xa6\x91\x05\x97\xbd\xa5\xeb\xba\xc8\xd2\x05\xe5\x13\xeb\xb4\xe6\xe7\xe5\xc1\xf0@\x01\x08\r\xed\x14z\x1e\xc2(\xbe.p1\x0e4@6z2\x10\'\xee\x19D\x11\x9e\x10\xe9\x15\xd6\x1a%\x18,\x0c\x89\xfc\xdb\xeeV\xe3m\xdbI\xdbt\xe2\x91\xe9\xe5\xe8\xba\xe4\xff\xe3\xcd\xe5l\xe5E\xe2\xbb\xe1\xf2\xe6\x8c\xee\x06\xf7\xb7\x016\x0c(\x12i\x11T\x0c3\x08\xef\x08\xab\x0e\x0f\x18i\x1f\xb4!\x8e\x1f\xf0\x1aI\x15a\x10\x12\r\xc5\n\x05\x08\x0e\x04\xc2\x00\x1a\xff\x07\xfd\xb8\xf80\xf2\xe0\xe9A\xe1\xb8\xda7\xd9\x9f\xdc;\xe1\xd8\xe2\xe7\xdf\x93\xdb\xf9\xd6\xa7\xd4\x91\xd7\xa2\xdd\x90\xe4\x01\xea?\xed\xb2\xefS\xf1\x8a\xf21\xf6\xf5\xfb\xc1\x01\x9c\x07!\x0c\xbd\x0f$\x11c\x10\xaf\x0f\x89\x13v \x036OJ\xdcQoK\x9eA\\>\xe0A>G\xdcMgW\x04_\xb0\\`O|@\x816\xb0/\x8f%Q\x18\x9a\x0c\xdd\x03t\xfc\xf1\xf4\xb3\xed\xe5\xe4A\xd9\xd8\xca\xce\xbc\x9e\xb1_\xab\x10\xac\x00\xb34\xbb\x8e\xbf\x9d\xbe\xab\xbb\x85\xba\x02\xbd~\xc4Z\xd0I\xde\xb7\xea\xef\xf4\xeb\xfcQ\x04\xa9\x0bN\x13\x82\x19v\x1d\xab\x1ew\x1e\xc6\x1e) \xa3"0%\xf5%}"\x86\x1a\x95\x0f\xd8\x04\xaf\xfc\x16\xf8\xcf\xf7\xde\xf8\x00\xf8]\xf3\x17\xecf\xe5\xd9\xe0}\xde"\xde\xab\xde\xa8\xe0H\xe3\xc8\xe6E\xeb\x82\xf0\xfc\xf5\x07\xfa\xbc\xfb>\xfc"\xfd\xdb\x00\x17\x07\x83\x0eC\x15+\x19p\x1a?\x19\x14\x18\xb8\x16\xe1\x15=\x16\xb5\x16\x02\x16i\x13\x01\x10?\r\x93\n\x16\x07z\x02\xd6\xfcL\xf6\xd2\xef?\xeb1\xe9\xf0\xe8;\xe8\xc9\xe5&\xe2\xac\xdd\x19\xda\x92\xd8p\xd9*\xdci\xdf\x88\xe2$\xe5\xf8\xe6#\xe8\xcb\xe9;\xed$\xf3\xd0\xf9F\x00\n\x05\x98\x07\xaa\t\x99\x0cP\x11\x93\x16\\\x1a\xd9\x1a\xf9\x19\xdf\x19(\x1e\xb7\'\x9c4\x16@\xa7E\x8fD\n?\xb78e4-4w7\x86;\x9e<\x109\xd31\xb2(\xbd\x1f\xd6\x17+\x10\x0b\x07\xf6\xfb3\xf1!\xe9\x02\xe4X\xe1\xb2\xdf\xb3\xdd4\xd9\xb5\xd1\xa9\xc9\xf5\xc34\xc2\x9d\xc4\x18\xca\xef\xd0\x9e\xd6\xa3\xda\x97\xde\x03\xe4=\xeb\x1f\xf2\xa7\xf7g\xfb\\\xfeq\x01\xa5\x05,\x0b&\x11\x0f\x16\xbf\x18\x86\x18\x88\x15\xd5\x10k\x0c.\t\x0e\x07\x04\x05M\x02*\xff\xe5\xfb\xfe\xf8_\xf6\xca\xf3\t\xf1&\xeem\xeb\xfd\xe8\x0f\xe7\xa2\xe6`\xe8\xbc\xeb\xed\xee\xcb\xf0\xe8\xf1J\xf3\xbd\xf5\xf4\xf8\xee\xfc\xf7\x00\x82\x04D\x07\xc1\t\x95\x0cZ\x0f\xed\x11\x8d\x14\x9a\x16&\x17\xff\x15\xa1\x14B\x14W\x14\xad\x13=\x12\xdd\x10\xed\x0e\xdf\x0b\xe5\x07\x91\x03\x9b\xff\xb6\xfb\xdb\xf7h\xf4%\xf1\xe9\xed6\xeb\x12\xe9_\xe7\xf2\xe5\xf1\xe4\xd8\xe4\x13\xe5l\xe4\xc2\xe2\xac\xe2]\xe5\xc0\xe8Q\xeb\t\xed\xe6\xef\\\xf3\xbb\xf6\\\xf9A\xfc\x1f\x00q\x03Y\x05&\x06\xdd\x07/\x0b\xb5\x0f\xf5\x12G\x14X\x14\xd9\x14\'\x17\xe4\x1b\xf0!\n(\xce,\xa6.\xc1-"+\x1d(\x11&\'%\xff$\x05%\xb3#\x7f \xcc\x1b\\\x16\x89\x11Y\r\xd8\x08\xc1\x03/\xfeo\xf9?\xf6\x04\xf4W\xf2N\xf1\x93\xf0\xdf\xee\xa6\xeb\xff\xe7X\xe5\x9a\xe4\x84\xe5\x1f\xe8\xa8\xeb\xd4\xee\x7f\xf0\x0f\xf1\xcf\xf1\xc5\xf3f\xf6\x15\xf9`\xfb\xe7\xfc\\\xfd\xe8\xfc\x14\xfcO\xfb\x13\xfb0\xfb\x95\xfbM\xfb\xd5\xf9y\xf7\x02\xf5\xce\xf2\n\xf1e\xf0\x9b\xf0V\xf1\xd0\xf1!\xf2N\xf2\xb5\xf2f\xf3\xac\xf4\\\xf6\x06\xf8\x8a\xf9\xda\xfa\x1d\xfcz\xfd\x15\xff,\x01}\x03^\x05]\x06\xf5\x06b\x07\x17\x08\x90\x08J\tq\n\xac\x0bV\x0c\x1d\x0c\xa6\x0b\x19\x0b2\n_\t\x08\t!\t\xd5\x08z\x07\t\x06\xbf\x04\xa1\x03i\x02Q\x01C\x00\x17\xff\xca\xfd\x91\xfcK\xfbO\xf9\xa2\xf65\xf4\xad\xf2q\xf1\'\xf0}\xefg\xf0\xaf\xf1\xc2\xf1Z\xf0\xc3\xee[\xedO\xec\xb8\xecD\xee\xb8\xf0\xfb\xf3\x8a\xf7>\xfa\xa0\xfbq\xfc\x01\xfe\xc5\x00\x18\x04\xb0\x06c\x08}\n\xd9\r\xfd\x11{\x16\xf9\x1a\xd9\x1e\xf5 \x11!\x0c \x8f\x1e\x1d\x1d\xa7\x1b\x93\x1a;\x1a^\x19\xeb\x16!\x13n\x0f\x0f\r4\x0b\x86\t\x1f\x08\x05\x07\x86\x05\n\x03/\x00\x83\xfd\x88\xfb8\xfa\x0c\xfa\x15\xfbu\xfc\xe6\xfc\xc5\xfb\xc1\xf9\xe9\xf7\xde\xf6\xd9\xf6\xde\xf7\xf1\xf9\xd2\xfc\xa5\xff3\x01\xd1\x00\xdb\xfe\xfd\xfb5\xf9T\xf7\x0c\xf7\t\xf8:\xf9\xa9\xf9\xcc\xf8\xc8\xf6\xf8\xf3\x03\xf1\x17\xef\xc3\xee\x1d\xf0\x0f\xf2\x10\xf4\xb1\xf5\x88\xf6R\xf6C\xf5V\xf4\x0b\xf4\xa4\xf44\xf6@\xf8/\xfa\x93\xfb\x1b\xfcO\xfc\xc3\xfc\x85\xfd\x08\xff\xf8\x00\xe3\x02\x14\x04$\x04\xfb\x026\x01\xe7\xff\xd8\xff\xf0\x00Q\x02\x07\x03\xb3\x02\x8e\x01\xe0\xffl\xfe\xc4\xfd-\xfe\x94\xffY\x01\x11\x03\xb0\x045\x05|\x04\xee\x021\x01@\x00U\x00\r\x01g\x02h\x03-\x03*\x02\x03\x01/\x00s\xff\xc8\xfe\x8f\xfe\xf5\xfe\x88\xff]\x00\xdf\x01!\x04\x06\x07\xe3\t\xa6\x0c\xef\x0ev\x10a\x117\x12\xfe\x120\x13\xd9\x12U\x12F\x12\xa8\x12\x13\x13\xa6\x12\xf0\x10$\x0e\xa4\n\xe2\x06\xf0\x02\xfe\xfem\xfbM\xf8\xb4\xf5\x8d\xf3\xd4\xf0\xc3\xed$\xebN\xe9U\xe8\x02\xe8\x17\xe8\xf8\xe8_\xea^\xec\x80\xee\xb4\xf0\x00\xf3\xda\xf5\x94\xf9\x7f\xfd\x0f\x01\x9d\x030\x05w\x06\x0f\x08\x1e\nv\x0c\xd8\x0ea\x11\xe7\x13\xd3\x15,\x16\x9c\x14,\x11\x9f\x0c\xe8\x07\xb9\x03|\x00>\xfe\xf3\xfc%\xfc\x08\xfb\x18\xf9\xf0\xf5\xe9\xf1\x01\xee\xf7\xeaF\xe91\xe9\x8a\xea\xcd\xecx\xef\x84\xf1\x02\xf3-\xf4\x18\xf57\xf6\x96\xf7c\xf9\xa5\xfb\xe1\xfd\xf4\xffh\x01K\x02\xcc\x02%\x03\x91\x03\xfb\x03\xfb\x03:\x03\xa5\x01a\xff\xfe\xfc\xc0\xfa\x13\xf9_\xf8q\xf8\xfa\xf81\xf9\xaf\xf8,\xf7)\xf5\x10\xf3}\xf1\'\xf1k\xf2\n\xf5Q\xf8v\xfb\xca\xfd\xf4\xfeX\xff-\x00\xcf\x02\x0c\x08\x9d\x0f\xb4\x18,"\xe4*\xab1t5\xe45f4\x802T1\x061\x041\xc80S/\x15,\xb3&\x97\x1fe\x17\xcc\x0e\xbd\x06\x83\xff\xf0\xf8\xbf\xf2\xee\xec\xf3\xe7\xe6\xe3\xe1\xe0\x7f\xdeP\xdc\x06\xda!\xd8\xcd\xd6T\xd6\xbc\xd6K\xd8\xaa\xdb\xc9\xe0\xe3\xe6\xe6\xec\xec\xf1\xa8\xf5g\xf8\x8c\xfaB\xfc\xfe\xfd\x12\x00\xd3\x02W\x06\xcd\t\x82\x0c\xbf\r\x80\r\xb6\x0b\xd3\x08G\x05\xa3\x01\xc9\xfe\x08\xfdA\xfc\xee\xfbA\xfb\x9d\xf9\xd8\xf6P\xf3\x83\xefl\xec\xe0\xea\x1d\xeb$\xed\xb5\xef\xa2\xf1"\xf2\xc9\xf1\xa8\xf1%\xf3\xb8\xf6\xc5\xfb\x01\x01"\x05\x98\x07q\x08\xb9\x08B\t2\x0bG\x0e\xc5\x11\xd9\x14$\x16s\x15\xf7\x12?\x0f\xbb\x0b:\t\x92\x07\x92\x06-\x05@\x03\x04\x01\xfd\xfd@\xfaA\xf6?\xf2\xfa\xee\x81\xec\x00\xeb\xfe\xea\xf4\xeb:\xed\xbf\xed\xfe\xecG\xebs\xe9@\xe9\xcb\xeb0\xf1.\xf7o\xfb\x1f\xfd&\xfc\xd1\xfan\xfai\xfc\xc9\x00\xe7\x05p\n\xbf\x0c\xc0\x0c\xac\x0bK\x0c1\x12Q\x1e7.\x91\xf5Q\xf0+\xec\x14\xe9\xd6\xe6c\xe6\x95\xe8\xd0\xec\t\xf2\x91\xf64\xfa\xa8\xfbj\xfaM\xf7\xb3\xf4\xc5\xf4o\xf8\xaa\xfd"\x02b\x02\x16\xfe\xb3\xf6P\xee\xf8\xe7\xc9\xe4w\xe5\x88\xe8%\xebc\xeb(\xe9\xf9\xe4\x9a\xe0\x10\xde\xda\xdf\xf9\xe5\xf7\xed\xdd\xf4\x05\xf9\xef\xfa\x82\xfbT\xfd\x18\x04\x07\x15\xb8/\x89M\xafc\xa2j\xbacrW\xf1P\x05U2a2o*xLu\xbdd\xddH%)\x84\x0e`\xfdQ\xf4\xc9\xef\x9d\xebp\xe4,\xd9\x19\xcaJ\xb9*\xaaB\xa0\x19\x9d\xb6\xa1\x19\xad\xd2\xbd\xb4\xd07\xdfX\xe5Z\xe4\xbe\xe1Q\xe6\xc8\xf4\xe7\x0b\x06&\xe9:5E\xcaB:7\r)\xe3\x1f@\x1ew"\xea%|"\x0f\x17\x1d\x06#\xf4\xe1\xe2\xe4\xd3\xd9\xc80\xc3a\xc1_\xc06\xbf\xb2\xbe\x7f\xbf\xdd\xc0r\xc1\xa6\xc1\xda\xc5\xbe\xd0\x91\xe2\xe0\xf5\xb6\x04-\x0e4\x12G\x14a\x18,\x1f\x88)@4\xda:\xac:\xa23\xc5*Q#\xa3\x1e}\x1bU\x17 \x12\xd7\n\x9d\x01\x8b\xf9\xe6\xf2+\xefr\xed\xa3\xeb\x14\xe9\xa9\xe6\xb1\xe6\xb6\xea\x8e\xf0K\xf5\x8c\xf8d\xfa\x8b\xfb\xc5\xfc\\\xfeg\x02<\x066\x08\xbe\x06i\x02k\xfd\xa5\xf9\x9e\xf7\xb7\xf5\xcf\xf2\x7f\xee\xac\xe8?\xe3\xb3\xdf\xe6\xdd\x88\xde\x92\xdf\xe0\xdf\x18\xe0\xf8\xdf\x08\xe0\xec\xe2\x08\xe7\xb8\xed\xe2\xf4J\xf9\x8d\xfdT\x01\xaa\x06\xa5\x0bu\x0f\xae\x14\xa1\x1b\x89%\xb04JJ\xa8`\x9cg\x15\\\x01I\x8a>}D1Q\r^pd_\\\x8eDP#@\x07V\xfa\x9c\xf8\x9b\xfa\xb5\xf8?\xef[\xe1\xbc\xd2*\xc6z\xbc\xbe\xb8o\xba\x94\xc0\x85\xc6\xc6\xcc\x85\xd6,\xe2\xbd\xe9\xe3\xe9\xee\xe8\x00\xeeM\xfc\x1a\x0e\x98\x1d\x81(L.7-\x85%\xb2\x1c|\x19>\x1d\xe5 \x97\x1f\xf5\x16W\n\xfd\xfb\x13\xee>\xe3\x7f\xdb\x81\xd5w\xcf\x9c\xc8\xc4\xc3\x08\xc2\xd5\xc2\xa3\xc3<\xc3\xaf\xc3\x16\xc5\xb7\xcaa\xd6\x01\xe6\x99\xf3\x12\xfb\xe9\xfe\xf3\x01\xed\x06\xe4\x0f\x01\x1cM(\xdc.\x1c/\n,\xe0(\xb8&#&\x00&\xb2#\xc1\x1e\xdc\x184\x13\x9e\r\x04\x07q\xff\xa5\xf9]\xf4\x15\xf2`\xf2\x04\xf4\x07\xf4&\xf1\x85\xec\xcf\xeau\xed\x87\xf1\xc3\xf65\xfa\xea\xfb2\xfc\x99\xfa\xd2\xf9L\xfa\xf0\xfa\x1d\xfd\xf1\xfd\x11\xfeP\xfc\xd5\xf8\xb3\xf5\xff\xf0O\xed\xd6\xea3\xeb\xd0\xec\xc1\xedg\xec8\xe9\xba\xe6\xeb\xe4\xe2\xe5\xbc\xe9 \xed\x8f\xf3\x95\xf7\xe6\xf8\\\xf9Q\xf4\x8c\xf3\xd2\xf7\xc3\xff\xf6\x0b.\x12\x9d\x11\x9e\x0c\x9f\x0e\xeb \xc9=\xaeP\x9bM:A\xf79\x89=IH\xfaS\xd6_5c\xd8RK:I(b#<&:!\xb2\x15c\x08e\xfd}\xf4\xa8\xeb\xc9\xe08\xd7\x17\xd1\x89\xccW\xccO\xcf"\xd5I\xd9_\xd4\xe6\xcb\xa4\xcaX\xd6>\xe9J\xf7S\xfd\xab\xfe8\xfe\x83\xfd\x89\x015\x0br\x17`\x1e(\x1d\r\x17)\x10$\x0b\x95\x08"\x07\xe2\x03\xb7\xfd\x16\xf5\xcd\xed\x16\xe7\x1c\xe1\x03\xd9\x94\xd1\x95\xcc\x86\xcb\xfc\xcc\x9a\xcf1\xd2\xfd\xd2\xc2\xd0\xac\xcf\x91\xd5\x89\xe0,\xef\xa7\xfa}\xff\x10\x00\x8c\x02\xad\n\xa0\x16s\x1fp& +\x9c)\x06&{%d)\\,G)\xec"\x99\x1d\xff\x18s\x15N\x11F\x0c\xaa\x05\x04\x00U\xfb\x0f\xf9\x90\xf7\x84\xf5;\xf3\xd5\xef\x00\xee-\xee\x08\xef\xb0\xefr\xefL\xf0\x98\xf1\xfe\xf2\x90\xf3\xba\xf33\xf5H\xf5\x00\xf5\x17\xf4)\xf4\xb0\xf5\x02\xf5V\xf2\x01\xef\xbc\xed\xa9\xed\x13\xee&\xedA\xea\x9a\xe7_\xe4\xb5\xe5|\xea\x9f\xed\x12\xf0\x00\xed\xbe\xeb\x0e\xecp\xed\xea\xf0\xfa\xf1t\xf3\x97\xf3\xd7\xf9\xc8\n\x13!\xcc0\xda+\x08!<"z4\xf4L\\]\xc3f\x04g6[kJ\x08D[MaV\xa5R\'Ce1i"4\x13\x1c\x07/\xffG\xf8\x0c\xf0D\xe4\x91\xd9X\xd1c\xca\x97\xc2\xd2\xbb\xb2\xba_\xc0\xff\xc9\xf8\xcf\x92\xd0j\xcf\xe1\xd1z\xda+\xe7S\xf4\x1e\x01?\n\xf1\rF\x0e\xf9\x0e\xf2\x13=\x1c\x05"\x06#\xd1\x1f\x9c\x191\x12)\t6\x03\xc5\xfe\xf3\xfa>\xf5\x0f\xed-\xe2\xaa\xd6\xe3\xcdD\xca\xf9\xca\xf0\xcc\x7f\xcf\x99\xce\xe7\xc9\xf6\xc6\x9d\xcaD\xd5\x10\xe1\xaf\xeb\xc8\xf5N\xfb*\xff\xa3\x02p\x0b\xdd\x15\x03\x1f\xa6\'X-\x840\x11/a-I-\xba,\xd3,(,$*^&Z\x1e\xf1\x15\xc8\r\xa7\x08\xb4\x04.\x01h\xfdU\xf8=\xf2Y\xebl\xe7I\xe6\x15\xe7\xd7\xe7\xdc\xe6\x00\xe7\x8a\xe6+\xe5K\xe5\xf0\xe5\xc6\xe9?\xed\x10\xee&\xee\xc4\xee\x85\xf0R\xf1\xa1\xf0\xbf\xf0$\xf2M\xf3Q\xf3/\xf3\x85\xf3b\xf2Y\xef\xca\xef3\xf1\x1a\xf3\xa5\xf2\xe6\xec\xc8\xeb\xaa\xec0\xf2l\xf6z\xf7]\xfb\xc2\x02\xfa\x0b\x84\x12\\\x1b,+\x84<_DR@^=\x87D\xd6P&[sa;c\x8c]!O)>]6\x076\xd85\xfc/\xac"\x8c\x13\x03\x03\xa7\xf2\x05\xe6\xfd\xdel\xdb\x0f\xd7\xca\xd0\xef\xc8\xfd\xc2\xd1\xbe\x03\xbb\xb5\xba\xd2\xbe\xd5\xc7\x98\xd3\xfe\xd9\xc9\xdc\x1c\xde\x0f\xe2W\xeb\x1f\xf7`\x05T\x11y\x18\xfa\x17\xa9\x14\xd1\x13\xd4\x16\xaf\x1dZ"\xc3"\x9d\x1d\xe3\x12\xb0\x07c\xfe\xeb\xf8\xd2\xf6\xc6\xf4u\xef\xc5\xe6\xa8\xddy\xd4\x19\xcd\xa8\xc8(\xc9(\xce\xae\xd1\xef\xd3\xb6\xd5z\xd6!\xd7\x06\xda\xf9\xe3\r\xf3=\x02~\t\t\n\xdd\x08\xea\x0c\'\x18\xd6#K,\xd4/\x8c.\xca*\x81\'\xc9\'a+\xc5+\x0b*d%\xb0\x1fO\x17\xc1\x0e\x0f\x08\xa4\x04\xb9\x02\xe4\xffV\xfbs\xf3\xbc\xea\xbe\xe2\xe1\xdf\xbd\xe0\x0e\xe4.\xe6\xea\xe4\x89\xe0\xd5\xdb\x0b\xdbn\xdf\xda\xe5s\xeb\x9b\xed\x12\xed\xb2\xeb\\\xeau\xed\r\xf1\x0b\xf7\x8a\xfbY\xfc\xbb\xfay\xf8\r\xf6M\xf6\x1c\xfa\x81\xfe=\x04\xfc\xff\xd4\xf7\x0e\xf2e\xf1\x7f\xf8u\xfd\xe5\xfe\x02\xfc\x0b\xfdC\t/\x1a\x81#\xc9\x1d\xa7\x18\xc8\x1f\x1d/\xd4A:M\xbcQVM[A\xe8<\xffB!M6P2H[9\xdc,\x86#x\x1c\xcc\x17q\x10\\\x07\xfc\xfa8\xed\xf1\xe2\xa2\xdb\xa2\xd6\\\xd1\xe5\xca\xa0\xc63\xc5/\xc6!\xc5\x95\xc3\xf3\xc4\xe5\xca\x93\xd4\x7f\xdc\x03\xe3\xa7\xe7@\xeb\xda\xef\xd2\xf7\xde\x02u\r\x04\x13M\x13%\x12\x15\x12\x10\x15_\x18b\x19p\x17K\x13U\r\x11\x07\xb1\x004\xfdE\xfa^\xf5\xdd\xee\xc2\xe8\xa7\xe3\xad\xdd\xb0\xd9j\xd8}\xd9\xe6\xd9\xd9\xd8\x1b\xd9&\xdb\xec\xdc\xfb\xe0L\xe6\x95\xeeA\xf6s\xfa#\xfeM\x02\x98\x08\xa1\x0f\xe3\x15K\x1b|\x1f\xd0!/#\x9e#\x92$\xb7%\xf7%\x1e%\xcb"\x80\x1f{\x1b\xcf\x16Q\x13A\x0f*\x0b&\x05\x95\xfe]\xfa\xdd\xf5;\xf24\xedY\xe9\xb2\xe6G\xe4\xe5\xe3$\xe3=\xe3N\xe1\xb9\xe1\x83\xe3\x07\xe6\xcc\xe8J\xe9o\xea\xb0\xeb\xd7\xee\xfd\xf2?\xf6F\xf7\x8c\xf8\xf3\xf9\xe3\xfa\xed\xfc\xa6\xffj\x018\x01\xa7\xfe\xde\xff\x11\x01c\x01\x8d\x01_\x01\xcc\x02l\xffT\xff\x05\x01\xee\x08[\x14\x92\x1b\xc2\x1d \x17\x8f\x14Y\x1bs*\xf98\x17=\xf8:\x813I/H0p67>!=\xee5\xf3)| \xd6\x1b\xd9\x19\xe5\x19\x97\x14\xd1\x0b\xa6\x00a\xf6@\xef\xd5\xe9n\xe8\'\xe7\xa3\xe4B\xdfQ\xd9.\xd5\xe4\xd3\xbc\xd5\xdf\xd9\x1a\xdf\xac\xe2U\xe3\x9c\xe2\x98\xe2\x13\xe6G\xec\x00\xf4\x1e\xfaK\xfc\x0e\xfc%\xfa\x15\xfa9\xfc\x16\x01\xaa\x05[\x06J\x02\x8d\xfd\xeb\xfa$\xfa\xd7\xfa1\xfb\xb5\xfa\x8d\xf6\x9a\xf1U\xf0\x7f\xf0\xd6\xef\xeb\xec\x88\xebt\xeeM\xf0!\xf1\x9f\xef@\xee$\xee\x1a\xf1c\xf7\xee\xfc\xbe\xfe\x0e\xfd\r\xfc\xd6\xfdQ\x02\xc9\x08^\x0e\x8f\x10\xa6\x0e\x1c\x0c\xe4\x0ct\x10\xbb\x14\xb1\x16\xed\x16\x04\x15\xa1\x11\xc3\x0e\xb4\r\x19\x0e\x05\x0e\xe9\x0b\xd0\x08`\x04\x1c\x00\xe3\xfc\xd6\xfa\x86\xf9\xa3\xf7\xdb\xf5\x85\xf3\xd6\xef(\xedE\xecl\xec\xe2\xed\'\xed\x12\xed\xf0\xeb\xf6\xea\x17\xeb|\xebN\xed$\xef\xae\xf0c\xf2\xe3\xf2b\xf2\xd2\xf1f\xf1b\xf6\xe9\xfb\x82\xff\x91\xfe\xaa\xf9\xc4\xf9\xe5\xfc\x0b\x04\xaf\x08\xff\x06\xd6\x02/\x01e\x07 \x10.\x15\xfd\x13\xc2\x117\x13y\x1b\xb0%\xcf*N)\xda%\x90\'N-[3\x8b675\x851\x0c-\x9c*\xa7*\x0e*\r([#\xbb\x1c\xaa\x15\x01\x10\xb2\r\xd9\n\xee\x05\x08\xffr\xf8+\xf4\xe5\xef\x0c\xed\xe2\xe9\x0b\xe7T\xe3\xdc\xdf\x15\xdf\x8a\xdf\xa2\xe0\x94\xdf\xd4\xde&\xdf\xf5\xe0\xbf\xe3\x81\xe6\xe6\xe8\xdd\xe9\xd0\xe9\x01\xea\xcd\xeb2\xefj\xf2\x02\xf4c\xf3\x0b\xf2\xe3\xf1\x0f\xf2V\xf3\xa1\xf4\xb0\xf4\x0e\xf4\xe5\xf2\xaa\xf2\xdf\xf2\x0f\xf2S\xf1\r\xf2\xab\xf3+\xf51\xf5P\xf5\x0e\xf6\xd2\xf6\xfd\xf8\t\xfb\x90\xfd\x9b\xff\xdb\x00i\x02*\x04\x8e\x05\x00\x08,\nl\x0c\'\x0e\xc0\x0e\x82\x0f\xf6\x0f\x8b\x10\xe5\x10.\x12\x86\x12m\x12Q\x11u\x0fU\x0eU\ra\x0c\x93\x0b\xe1\t\x82\x06\x88\x04h\x03X\x02\xf7\x00\xa0\xfe\xb8\xfb\xa9\xf9\x9f\xf8\xe5\xf8\x8f\xf7\xd4\xf5\xd6\xf5\xc6\xf5W\xf5\xf2\xf3B\xf0\t\xef\xc7\xf1u\xf5\xfc\xf6\x06\xf57\xf2\x00\xf18\xf1T\xf3\xf7\xf5R\xf6c\xf4\x08\xf4\x1c\xf8\x92\xfd\xb6\xfe\xe4\xf91\xf7\xb4\xf7\xba\xfa#\x00K\x04#\x06S\x05\xf6\x05\xd6\x06x\x07E\x07 \x06\xb7\n\x1b\x14\xbc\x17\xa0\x132\x0b~\t\x83\x11\xc7\x1a3 s\x1b\x08\x11\xc5\x0bx\x10\xbc\x1eO*\r(\xf7\x1b\xa0\x0f\x96\r,\x16\xf3\x1f\x16%\x8d"9\x1a\xb5\x11\x0f\x0e\x8a\x0f\xc8\x11\xdc\x12\x02\x11\x88\x0bK\x06d\x01y\xff_\xff\xcc\xfd\x0e\xfa\xc9\xf5\n\xf3 \xf1\xbf\xee\xef\xea\x92\xe8X\xe7\xcc\xe6\xdb\xe60\xe4\xb8\xe0\xab\xde\xc9\xde\xe1\xe0\x90\xe2c\xe3Z\xe3\x8b\xe2\x88\xe0\x95\xdf\xcf\xe2O\xe8-\xec\xff\xec\x96\xed\x12\xed\\\xee8\xefE\xf0\x05\xf6]\xfc\xa2\xff\xe8\xfe\x8e\xfc\x92\xfd\xe4\x00\x90\x013\x03\xb9\x07\xec\x0bJ\x0c*\nS\x07\xcc\x06\xfa\nm\x0f\xef\x11\xf2\x0e\xc6\n_\n\xa0\x0b\xa4\x0eH\x0f\x82\r\xf7\x07\xb2\x08\xf9\n\x1b\x0b\xeb\x06&\x04\x01\x05Q\x05\xc9\x03\xde\x02h\x01\xbb\x00@\x01_\xfe<\xfb\x89\xf5\xb0\xf9H\x00\xab\x01\x11\xf9\x16\xf2u\xf1 \xf3\x0e\xfb\xf5\xfe\xa5\xfbD\xf0N\xe78\xf0\x7f\xfa\x19\xfc|\xf9\xce\xf8\x9a\xf3^\xec&\xf1R\xfaU\xf5\x83\xf8\xc3\x01\xbd\xfe\xe1\xf6\n\xef\xc8\xf5\xa9\xfe\xdf\x08\xad\t8\x01B\xf7\xf7\xf9\x0e\x0bC\x0e\xd0\x05\x82\x01f\x03\x1c\x0f\xa8\x1e\x9f\x13s\xfc\x1a\xf7\xe6\x05\x9e\x1c\\#I\x1d1\x0eC\xfa\x06\x00y\r\x7f\x12\xc9\x15\x98\x1aW\x11y\x0b\xeb\n\xb5\x01R\x03{\t\r\x16y\x19\x18\x0f\x1c\x08>\x00\x08\x01\xe2\n\x1a\n\xa3\nr\rC\r\xa4\x07\x11\xfd\x0b\xf9\xc5\xff9\x01a\x05\xea\x06\x89\xfb\x95\xf8\xf5\xf6\x9d\xf4L\xf1\x13\xef\xc1\xf1N\xf5d\xfb\xb0\xf3\xb1\xea\xe8\xddJ\xe3\xd5\xf1|\xf5F\xf8\x9a\xef\xaf\xe7\xfa\xe33\xe8;\xf3\xf1\xf8&\xfe\xf7\x00\xad\xf2\xd1\xe9X\xed:\xfbE\t&\n\xc4\x06\x8f\xfel\xfd\xa3\x00\xc4\xfd?\n\x8b\x15\xcb\r\x8b\x06\xe2\x04=\x0f\x15\x11_\x01g\x02\xe0\x0e/\x14\x8d\x17\x10\x10B\xfe\xd7\xf7\x8e\x04\x1b\x15&\x10\x07\nW\x06m\xff<\xfc9\xff\xfd\x03\xe2\x02\xb1\xfcA\xfb\x0c\xf6u\xfa\xe9\x03\xd4\xfd\x12\xef8\xe5#\xec\x82\xf7\xa9\x02c\xf9\x8c\xf0Z\xe4U\xe1\x12\xfb\xeb\x06\x17\xfdH\xf2q\xe8\x8f\xe1\xef\xf2\xda\x05m\x08\xa2\x07\xa2\x06\x12\xef\x10\xdd@\xf3*\x0f\xce\x0f\x98\x0b\xdb\x02\xd6\xf6z\xf6\x85\xfe\xe5\x10F\x0ek\x04"\x02\x10\x05-\x04r\x03\xe6\r\x0f\x13}\x07@\xfa\xeb\xf93\t7\x16\xde\x15\xf1\x07\xf6\xfeU\xf7\x1f\xf4[\x13\xb3\x1c\xae\x15\xa7\x00G\xf1\xa8\xf8\xe1\x04\xe5\x183\x19L\t\xeb\xfa\xaf\xf4\x9f\x01`\x07%\n\x99\x0fO\t\xbb\x082\x042\xf4Z\xf3y\x082\x12Q\x0b\xf7\xfa\xd2\xf6\x87\xfb&\xfc\xe8\x05"\x07\xb3\xec\xae\xf2M\x03z\xf8\xf3\xfa\r\xf5\xc7\xf1m\xf8m\xf9\x9d\xf3\x7f\xedR\xeeq\xf0w\x02\x92\xfd\xec\xf0l\xeb,\xef[\x00\x8e\xf9\x1f\xf4~\xf8\xe0\xf1)\xfc$\x10n\x0b\x90\xf3v\xef\'\xfeS\t\xb6\x11\xc9\x06\x8e\xfc\xf9\x072\x11\xb0\x18H\x04\x07\xf3\x80\x00\x8e\x12\xc9#\x92\x14\xa3\xf3\x87\xfb\xd3\x14\x83\x0cp\x0c\xe7\t\xa3\xfc\xbd\x00\xb5\x05\xaf\x0e\xad\x03\x17\xfbJ\xf8\xb7\x08;\x0f\xd9\x005\xe8\xe0\xe1\xf7\x04\xa7\x12\xe4\xff\xfe\xfaW\xe8\'\xe5\xc9\x03\xbb\xf9/\xf6\x8b\xf5\xa2\xf2\xcb\xf3\xaf\xf7\x0b\xfez\xf9\xa1\xf5\x0f\xf6\x10\xf0|\xe2J\xff\x1b\x17h\x16\xbc\xf5\xc5\xd0\xa8\xdc\xde\x07\xf4$\x15\x12\x8b\t%\xf4?\xdb\xd1\xe6\xa6\x06\xd7/\xe4#\xa3\xff\x89\xdb\xf8\xd36\x11\xae;\n W\xf0\x18\xdb\x16\xf3k\x1dr*\x93\x18E\xfb\xff\xe1L\xeb\x7f\x1a\xd7&P\x12\t\x02\x87\xf2\x05\xfb)\x05\\\r\x9c\x1b3\x06\x0c\xf2K\xf9\xec\xfcd\x15\xe3\x12n\x01\x06\xfc\xc3\xf1c\xfb\x12\x03N\x07\x1b\x06\x1b\x04\xdf\x05\xf1\xf8\x8b\xf5\x99\xe1\xef\xe9\'%\xd8\'\xaf\xf6\xc7\xe0\x93\xe3\xcf\xef\xdc\x07\x9e\x04}\x01}\x04\x04\xebA\xf6_\xf8\xc3\xef3\xf0\x00\xfe\x8c\t\x10\x046\xf2}\xe1\xe4\xef\xb1\x08\xa0\x07\xf7\xfc \xfb\xe4\xee@\x06h\nc\x03\'\xf0\xd6\xf1p\x0f>\x11y\x10\xd9\x08_\x00c\xfd\xcb\x02\xe4\x07H\x07g\x08\x93!\xd7\x1d\xcd\x02L\xeb\x16\xedm\n\xfa\x1eb&\xe1\r\xc0\xfbO\xf5u\xf1k\xfa\xce\x08\xbf\x16\x1d\tx\x02\xbb\x03\xf5\x00\xc7\xf6\x0b\xe7\x7f\xec\xa2\xfaC\x08\x16\x19d\x0b\xc4\xf5\xe7\xe7\x93\xd1\xf6\xe9 \x0b\xbb\tG\x05\xfe\xf3\t\xfb\x10\xfd\x9b\xf5z\xe4x\xda\xd6\xfc4\x0f@\x1a\xb9\x07\xd7\xe4$\xe3/\xed\x0e\x08\xab\x061\xf6\x08\x07\xe4\x0cI\xf9\x1b\xf3\xca\xf9\x17\xfb\xf5\t\x0b\x0c\xb2\n\x05\x104\xfd4\xf8\xab\xfb\xf4\xfb\xac\x0c>\x19S\rv\x17\r\x0b\xae\xf2\xbb\xff<\xf8\x0b\x05\xe1\x16\xf7\x1d\xdb\x18\x9b\x03\x8f\xfc\xa5\xf29\xf4\xde\x0b\x96\x12\x02\x13\xdd\rr\x08\xfb\xf8\xf4\xe69\xfe\xd1\x05\x04\x00\xda\x0f#\t\xa6\xf4\xb5\xf2\xc3\xf8]\x04\x81\xee\xd5\xf2i\r\xb8\xf8\xff\xf9\xb8\x00J\nF\xdd\xcb\xcf\x10\x00X\x17v\x1b\x0b\xf0\x9c\xe5\x96\xe1Q\xea\x15\xfe\'\x0f\xc8\x16\xc1\x05o\xd5w\xd3\xb0\xfa\xb6\x0e\xc5\x1c\xe2\x11\x12\xf3\x1a\xd9\xa4\xee\x0b\x02\x14\x0c\x99\x1c5\x10\xef\xe8\xf8\xdb;\t\x0f$v\x16\xb6\xfb*\xf0\xfc\xf8\xe0\t\x19\x1c\xfb\x11~\xf6v\xfd2\x0b\xff\t\xf5\x0f\x03\xffW\xff\'\x0e\xdb\x10\xd4\x02\x00\xf6\x99\xfe\'\x08K\x0f-\x06\x89\x10\x81\x05\xa9\xe7\x86\xf9\xe4\xfee\xfe;\x17|\x11|\x03!\xee\xbb\xd7g\xf6\xd7\x154\x15*\x01?\xe8-\xec%\xfa\x97\x05\xfa\xf5\xaf\xf4\x95\xf9Z\xff\x9b\xf6$\xf7\xa5\x03}\xf0\x1e\xf8G\xf4\xe7\xefH\xfan\x0e?\nq\xf2f\xf4\x15\xe0K\xf1[$g\n8\xf7s\xffi\xef\x01\xf3\x94\nk\x12\x00\x00W\xfb\xae\xfcp\x03q\x11\xfc\x0c"\xfb\xa9\xf0}\xf04\x14\xa7)Q\x1c\x1d\xf4K\xebQ\xee\xcb\xefw"\x83@\xcf\x16^\xddF\xed\x07\xff>\x04\xb3\x16\x12\x1dV\x02\xb7\xea\xfb\x0b!\x10{\xfaH\t1\x06\xc0\xdc\x7f\xf5s \xc2!\xd8\xf9\xc8\xe7\x03\xf2\xc5\xe2\xcb\n@\x07\xea\xf8J\x10\x0e\x00\xb1\xf54\xd0\x02\xe4\x81\x0c9\x18R\x13\xc7\xebN\xdf\xf2\xe07\xf6\xb1\x079\x1b\xe8\xff\x98\xf1O\xf4\xe9\xe3a\xeeB\x0b\xa5\x130\x12\xb3\xfc\xf9\xda\xbf\xf3\xd2\x03\xf9\r\xe3\x0e=\x14\x1e\xf5t\xec~\xef\xd6\xff\xbd5T\x19\x90\xfb|\xdff\xd9\x17\r\x18*\xf07\x83\x14P\xd8\xbc\xc8\xcc\xef\xaf/\x1d"\x85\x12\xf1\x17\xd6\xe8l\xe8\r\xe8\xcd\xfd\xf80[\x19\xad\x081\xf4]\xdf\xb0\xf2g\x0c1!d\x0b\n\xed\xa6\xf1\xaf\xfb\xd7\xff\xbc\x13t\xf8d\xe2\x90\xfd\xd2\x12V\x04\x96\xe1\x01\xfaN\x17h\xf0\xb3\xe8\xdd\xf9U\xfe\xf9\xfdR\xf1!\x17P%\xcf\xdf\xca\xb7B\xee\xf4\x0f\xcd)\x01-`\xf1\xe9\xd3u\xd98\xf7\x89\t\x93\x10\n\x1b8\x14f\xe6\xcf\xe3d\xed\x06\x04p\'\x14\x13\x1c\xf7H\xdb_\xeb\xf2!\xd6+J\x1e_\xe9\xba\xc6H\xf2\x04\x11\xb8;\xb95\xdb\xffa\xc7\x9c\xd5\xdb\x13\xdb-\xb3#n\xfaa\xf6\xa8\xfc\x0f\x07\x84\xfaV\x00\xea\x15\xde\x02\x8f\xf4\xf8\xffY\xf8l\x0f\xbc\x0f\xb3\xf8\xba\xe6\x9e\xed*&\x14\xfc.\xe4\xcf\x0c\x14\x02\xc8\xf1\xdb\xf5\x84\xe8y\x13e\x0b\x8b\xe5\x89\x11\x05\xf9\xd9\xe9\xbe\xf5\xeb\xf2\xfa\xfd3\x19\xe7\n\xec\xf4x\xec\x00\xf4\xeb\x00a\xf2\xd4\xf3\xb0\x14\xc5\x1e\xff\x07\xb2\xe3V\xdb\xa9\xf1[\x13l*\x97\xf2`\xec@\xf8\x1d\x06\xf97\xe5\x01\x00\xc5\xea\xd4\xe4\t\xc0=\x08A\x1f\xf09\xcb\r\xf0\xbc\xff\xf2\x04\x18\x1f\xd2\r@\x08H\t\xad\xe9\xeb\xf3u\x0b\x1c\x0f;\x01\x98\x0e&\xfb\x80\xffh\xf5\xff\xff\x89!\xe3\xfd\x99\xf8\x04\xf2\x07\xe5B\x0b\x1c!\xf0\x1c\'\xf87\xbf\xd0\xe5\x06\r\xac#\x91\x1c\x10\xee\x07\xecA\xe7\x1f\xec\xa7\x12\xcd\x07\xac\xfa+\x06\xd0\xe2\xa2\xe9f#\xdd\n\xcd\xfaW\x05\xba\xcb\xb5\xdc\xb8 m# \x10\xf4\xfd\'\xe2\x08\xf4E\t\xb3\xf6\x9e\x05\x9d\x05\xc0\x02\x14\nI\x03!\x06\xa3\x00\x03\xe3\xa7\xf8)\x19Z\x16\xca\xf92\xf0\xfe\x02\x9f\tb\x06\xd2\xff.\xf8\xbe\xfb\x91\xfao\x0f\x17\x0eD\x03(\x07M\xea\xd3\x08p\xf0\xd1\xef+%\r\x01\xd4\x00\xbe\x19n\xfec\xe0\x87\xe2\xe3\xff\xa1\x1b\xb2!_\x065\xef \xf0[\xe6\xbf\xf1r\x1b#\x1b\xb8\xff\x80\xfdC\xe2\xa5\xe8\xed\x04H\x12A\x10\xa6\xf6\xb5\xf3t\xf5v\xf4\xec\x05[\x03e\t\xa8\x14\xa2\xd8\x1f\xdd\xbb\x1ct\x0fZ\x00*\x0f#\xea\x19\xe5w\xf8\xc2\x10\xa0!\xbd\x02#\xfa\x16\xed8\xe2\xdc\x03\x84$p\x06f\xff8\x04\'\xf5\x1c\xff\x82\xf1\xd5\xf7^\x14\xaf\x19\xd7\x19j\xe4\xe4\xd7\x18\xfb\x8d#\xd7\x13\x91\t\xc3\xee\x96\xdfI\x0c\xf3\x17z\x10\xa8\xf3y\xe7\xec\xf2\xa3\x1c\\\x02\x85\xfb\x9b\x17\x82\xf7H\xeb\x94\xfa\x90\xff\x00\xf6\xef\xfb\xbc"\xa6!\xe4\xe0f\xe0\x1b\xee\xc5\x03\xf5\x11.\nJ\x01\x10\xfe>\xfa\x85\xfb-\xfc\x00\xe2w\x00\xa8%\x8c\n\xe1\xe2\x84\xf4r\x0bR\n\xb1\x08\xc4\xeac\xd8\xe9\x02\xb2&)\'p\x07\xa6\xc7J\xd0\x83\x18\xc4&\r\x01:\xfe\x0c\xfay\x03\xbd\xfe"\xf6\xc8\x04\x9d\xf2\x85\x00\x0b\x1e\xfb\xf9h\xf4w\x1e3\t\xdf\xd4\x10\xe7\xf5\t\xda\x1ae,\x9c\x04Y\xe0\xce\xd7\xfa\xfdZ3(\x1a\x0b\xe1\xee\xe7\x94\x03A\x12\x0f\x10}\x00\xbe\x04\x90\xe9\xc2\xf4o\x0c|\xf9\xe4\xfb\x8c\x14\xfa\x06\x85\xfe\x01\xef(\xf2\n\xff\xe1\xfd\xdf\x15\x0c\xf9\xd1\xef\xd1\xee\xe1\n\x051\x90\xe3P\xd0\xea\x0c\x9f\x17\x9a\x05$\xf65\xef\xed\xf2E\x12\xe6\x1a\xac\xf3\x9b\xf0\xa4\xf9\x89\x0b\xdc\x08B\xdf\x81\x04%#"\xfb7\xf6@\xfd\xf7\xeel\x07\xd9\x04\xef\t\xc1\x1aL\xe8\xba\xdb2\x04\xb7\x18\x98\x16\x87\xff}\xe3%\xec\x18\t\x16\x1cz\xff4\xf4\xbe\x04\xcb\xf7!\xfaw\x0b\x1e\x02r\xff\x8a\xf7v\xff?\r\x7f\xf7\x01\xfc\xc2\r\xdf\t)\xfc"\xe6\xfb\xee\xc1\x193\x1a\x9c\xffA\xfc\xc9\xebJ\xde\x8d\x16\x8b4^\xfd\xc9\xde\xf9\xe2^\xfdU\x1e\xbe\x13-\x05\x0e\t\x9f\xdf\xc3\xc7\x0f\x0f\xe5/P\x17\xb6\x01\xee\xdb\xdb\xe8g\xfb\x1d\x02\xdb\x15:\x1c\x14\xf1!\xdd\xc5\xf6\xb9\x11\xff\x1e\xae\xefo\xe2\x1a\xf8\xbf\x12\x81\n9\xf6~\x0f\xa7\x05\xf1\xfa\x92\xdd\x95\xe4\x0c e&\xa4\x0e\t\xf0\xc8\xea\x91\xea\x8f\xf9\x90\x1aQ \x81\x04\xc9\xe6\xd9\xea\xf1\x01v\nD\x14d\x17\xf4\xee\xed\xdf\x18\xec<\r 2?\x0e\xcf\xf1\x00\xea\'\xdf\x04\xf5\xf0\x1d\\/\xee\x0b\xa6\xdf\x89\xd5L\xfe\x1c\x18k\x0b+\x0b-\xf2"\xfb\xcc\xfd \xef\xdb\x13\xe2\x03 \xf4&\xff\x98\x04\x19\xfas\xec\x98\x0e\x8c#\x01\xf0\x1b\xd4B\xff\x05\x14\xaa\nQ\x05\x7f\xf9\xa4\xfe\xb1\xec\xcf\xfa0\x13\xaa\xf4K\xfdd\x12\xb0\x12\xf2\xfa\x88\xd9\xfc\xe4\xf5\x0f\xc5*\x9a\x15\xc1\xfd\x0c\xe0g\xd9{\xfd\xa3\x1d~*\xee\xfdE\xea\xf2\xf0\x13\xec\xb1\n\x94\x1e\x92\t\x1c\xf9.\xfa\x1a\xdey\xf7\xb3"\xf4\x1c!\n3\xe6q\xdar\xf3M\x13\xf7\x1a\xf2%\xfa\x00<\xcc^\xe8\xf8\xfdU\x10\x19&\x91\r\xc4\xfd\xda\xe3\xac\xe1\xf2\xfa\x8b\t<"\xe4\x16\t\x00~\xd8\xa0\xe3\xfd\x06\x83\x17\xfd\x1b\xc0\xf1\xc2\xdb\xa2\xfb\xe5\x172\x14\xee\xfc\x7f\xf4X\xea(\xf2\x07\x07>\x04\x9e\x0b\xb3\x18\x1d\x00\xeb\xe4n\xf3#\xf3\x97\x02\xd9\x0b\xe2\r\xd8\x11Y\xf9\xbd\xf1\xe2\xf2\xf4\xf7\xfe\x08\x9a\x10\xf5\x02\xb6\x03i\xf7W\xf0\xab\xfc\x1c\to\x13\xd4\rD\xf5d\xe4+\xfb\xec\x07\x9f\x06\x82\r\x13\x08\x06\xf7\x03\xeb\xc6\xf5\x89\x19m\x1d\x7f\xfc\x99\xdd\x11\xeb\xa4\xfe\xa1\x1e\'\x1d\x8d\x07\xd9\xf6e\xde+\xef\xda\x04\xd6\x0b\xa5\x11\xc4\x12c\xf1\xab\xedN\xef\xcf\xf8\xd6\x167\x14(\xfaA\xe6\x88\xf4\xc5\x0e\xc0\x02\x10\xf9\x05\x13\xb3\xfd\xad\xe2Z\xef\x8d\t\xb8\x11\x94\x12X\n\xb3\xef\xb9\xd8e\xf2]\x1a\xfe\x13^\x04K\x01z\xf6\x8a\xed\xa5\x00\xa8\x06k\nD\x0b\x15\xf6`\xed\x98\x07J\x06+\rY\xff\x8e\xeb\x11\x0c\xd2\xfe\xf6\xf6"\x10\x9a\x06j\xefa\xfd\x8c\t\xe2\x05\xc0\x04v\xfc,\xf9^\xfaT\xfb\x8a\x108\n\x91\xf7v\x06\x87\xf9E\xf9\x98\x03b\xfb\x81\x02\xa0\x0b\x0c\x00\xea\xf2n\x01\xc3\x0f\x01\x04\xae\xe8\xc7\xea\xc8\x05>\x19\xe9\x0e\x18\x00\xa8\xea;\xe3\xdd\x04\xe2\x10\x1e\x0b\x9e\x02c\xef%\xf0\xd3\x08\xb4\n\xad\x03\x16\xf6\xb8\xfe\x14\xf4\xc5\xec\xad\x15\x10\x1a\xca\x08L\xee\xdf\xde\xc1\xf3i\n#\x17*\x1d\xb7\x00\x8e\xdb?\xee\xda\x04\x88\r$\x0f\xe7\x06`\xf3O\xf2s\xfd)\r\xb3\x19\xa3\xf9\xcf\xe24\xf2\x17\x05\xc6\x16\x17\x14(\x08\xbb\xf4\x82\xe2\xe9\xf2\xe6\x04\xc3\x13\x8f\x11\xcd\x08~\xf9\xd7\xea6\xf6\x98\x06\xc3\x0b\xc4\x0eD\xfd\xf1\xee\xc0\xfb[\x07Q\x06\xfb\x08\xa1\x06\x9c\xf2\xfb\xefu\xff\xd9\x05<\x08\x1f\x0b\xfd\x04=\xf9\xed\xe8i\xf5\xc7\x0c\xbf\x0c\x13\x05\x7f\xfb\x93\xf7<\xf6+\x00\x0c\x04\xed\xfe\x07\xfe\xfa\xf95\x00\x9c\x01\x04\xfe\x10\x06.\xfd\xeb\xf6\xa3\xf9\x94\xf6\x17\x03\xb4\x0f/\x0bn\xfc\x0b\xf1\xeb\xf1\x80\x05x\x11\xbe\x01v\xf8\x1c\xfe\x16\x02\xe5\x04\xb0\x04`\x05\xa3\xfd\x01\xf4D\xf7"\x0bF\t@\x08n\x07g\xf9\xb1\xf7\x7f\xf3+\xfc+\x08\x8b\x0e\xd5\x10\x98\xf8\x03\xf4\xcf\xfe\x1e\xf6\xf2\xff\x07\x13\xa9\x03\x1a\xf8\xc3\xf6\xac\xfe\x92\r\xb2\x02\xdf\x02&\x01G\xe86\xef\xe4\x0cE\x17B\x0b\xf5\xfc\xfc\xf0~\xf2\xe4\xfa)\x02\xbe\x0f\xb6\x02\xba\xf7\x8d\xfb*\x004\x07,\xfdx\xf2,\xfd\x8c\x04\xb8\x00\xc1\xfef\xfes\xff4\xff\xbe\xfcq\x00,\x01\xb8\x013\xfd.\xfb\r\x02\xc1\x02\xe6\x00\xbb\x00\x8f\x01"\xfe\xbb\xfd\x82\xfbG\x01\x91\x06P\x02\xec\xfc\x13\xfb\x0c\x05\x9a\x03}\x00\x10\x00\xc7\xffa\xfbB\x00\xdb\t>\x03A\xff\xf6\xffH\xff4\xfd\x11\xff\x91\x03\xc5\x06\xd7\x02\xce\xfd\xf0\xfcj\xfc\xf2\x01D\t#\x04w\xf9\x13\xf9\xe7\x03H\x04\xe9\xfb\x08\xfe\x18\x02\xbd\xff8\xfa\x97\x003\x01\xe9\xffe\x01\xf3\xfc\xb9\xfc\xd5\xf9p\xf7\'\x05o\x0e\xb4\x02\xd5\xf6\x13\xf5e\xfc`\x00R\x06\x05\t/\xff\xfb\xf7,\xf8\x82\xfe\xb9\nU\x05\xec\xfbx\xfcx\xfc2\xfc\xa2\x01\xf0\tT\x03M\xfd\x96\xf7\xbf\xfc\x9b\x04\xdf\x02\xfe\x03\xaa\x00\x06\xffr\x01\xf4\xfd\x93\xfb\x8f\x022\x04\xe9\x00\xcc\x01\x97\x02\xe9\xfc1\xfc\xce\x02\xd9\x03k\xff\xe4\xfb6\x01\xa7\x00~\x00H\x02e\xfe{\x02\xb9\xff\xc0\xf9m\x00\xf5\x02\xcf\x00\xf0\xff\xdb\xfe\x81\xfeA\x019\x01\x00\x00\x13\xfe\xc8\xfa\x8c\x00\xab\x02\xd7\xfd\xe9\x00*\x02\xad\xfe\x87\xfdU\xfc\xfe\xfd/\x01\x04\xff\xf0\xfd\xc2\x02\xaa\x00\x9e\xfda\xffy\xff\x15\xfe,\xfe6\x01\xbc\xff\xd1\xfd\x9c\x01Z\x03w\xff\xc3\x00e\xff\xf1\xfa\x08\x017\x02\xa9\xff+\x00,\xfe \xffk\x03\x00\x03\xcd\xff\xd6\xfa\xc7\xf8Q\x00\x03\x05\t\x04&\x00\xac\xfd\xaf\xfbP\xfdw\x03&\x02\xb5\xfe\xf1\xfb\x97\xfe\xd1\x03\x99\x05\x89\x01\x81\xfb\xdd\xfc\x95\xff\xb6\x00T\x03$\x02\xb6\x02[\x04\x0e\xfd\xc3\xf9\xab\xfdz\x02\xd1\x06\x82\x04F\xfd\xad\xf8\xcf\xfb\xed\x01\xdc\x06\xf1\x04\x1a\xfd&\xf7_\xf7\xe7\xff\xcc\x07\xaf\x06\x8c\xff-\xf8\x87\xf6`\xfc\xb0\x05r\x06Q\x00\x8c\xf8;\xf4\xfa\xfas\x02\xa3\x05 \x003\xf9\x1c\xf7\xb5\xf8\xd9\xff\x7f\x07\xd9\x05\xfe\xfd\xdb\xfbQ\x01B\x08\x16\x0e\xd3\r\xa6\t_\x07r\x08\xd3\x0e\xeb\x15\x01\x15\xcb\x108\x0c\xf4\n\xc1\x0e\xaf\x11r\x0f?\x07\x0b\x03$\x04\xbf\x04^\x04\xa5\xffV\xf9\xc0\xf5l\xf3\xc6\xf3\xb9\xf5\xf3\xf3\xa4\xf0\xa2\xeeV\xee\xa9\xee\xc0\xef\xaf\xf1\xaf\xf3s\xf3\xc5\xf3\x8d\xf6^\xf9h\xfd\xc2\xfe\xb6\xfd+\xfe\xf0\xfe\x8a\x02\xdb\x08\x87\n\x96\x05\xf2\x00\x87\x01\xc3\x06\xde\x08#\x07d\x04\x9a\xfd\xd5\xf9\x8c\xfd\x7f\x05v\x06\xc7\xfb~\xf1\x87\xef\xcc\xf6i\xfe\x08\xff\x06\xf9\xd6\xf0`\xf0\xec\xf4\xbd\xf9\x83\xfe\xf8\xfa\x84\xf6\x93\xf6\xa3\xfcb\x00\xc5\xfd\xab\xfd\xd0\xfdy\xfe\xc2\xff\xc8\xff\x92\x00\xfb\x02\xa6\x02z\x04\xa1\xffy\xfa\xc4\xfb=\xff\xe7\x05\x02\x06\xe0\x00\xda\xfc*\xfc]\xfcp\xff\xf4\xff\xf5\x00_\x01\x00\x00/\xff\xca\xff\xd8\x00\xc3\x00\xde\xffd\xfd\xc7\xfc(\x00\n\x046\x04K\x03U\x01u\xff\xba\x01\xf9\x02\x87\x02f\x05\x8a\x0b\xd7\x11\xc1\x14\xae\x11\xa0\r\xf5\x0e_\x14Q\x1a\x11\x1e\xf0\x1dP\x1c\x1c\x1c\x86\x1c\xca\x1b\xaf\x17\x95\x12M\x10@\x0f\x1f\rA\n\xe5\x06\xbc\x02\x80\xfe7\xf9b\xf3/\xefm\xee\x82\xeeX\xed\xe0\xeb\xd9\xea\x97\xea\xe1\xea/\xec\x07\xed\x8a\xebY\xea\t\xed\xab\xf3\xf0\xf8j\xfaK\xf8\x1e\xf6a\xf7\x1a\xfa\x06\xfdh\xfeM\xfd\x0f\xfb\x1e\xfbl\xfd\x91\xfe\x07\xfd\xfd\xf9"\xf8>\xf7\x90\xf7\xb9\xf8$\xf9\xab\xf8\n\xf7w\xf6\xbd\xf7\xd9\xf8\x01\xf9\xac\xf8\xfb\xf81\xfa\xa6\xfb\x90\xfd\xa2\xff\xb6\xff\xd7\xfe\xc7\xff\xe6\x00\xf0\x01\x17\x03\xe9\x03\x81\x04S\x045\x04\'\x05\xf2\x05\xcf\x05\xef\x04T\x04>\x05-\x06X\x06r\x06\x1c\x06L\x05\x80\x058\x06\xc1\x06_\x07\xb8\x06<\x06l\x07\x0e\x08\xd8\x07\x91\x07\xcf\x06B\x06%\x06\x1c\x06\x11\x06\x15\x06h\x04.\x03\xf3\x02\xbd\x014\x01\x7f\x00\x0b\xff\xed\xfd\x1c\xfe\xd8\xfd\xb9\xfd\xc0\xfd\xbf\xfd\x8d\xfc[\xfc\x9f\xfcF\xfd\x03\xfe\xe6\xfd\x1f\xff\x06\x00\xdf\xffV\xff\xa2\xff\xe6\xff8\xff#\xff\xd4\xff\xcf\xff\\\xff\xbb\xfeO\xfem\xfd\xbd\xfc\x7f\xfc\xb5\xfb\x8e\xfb:\xfb\xe1\xfa\r\xfb\x88\xfa-\xfa\xb1\xf9\xfc\xf8]\xf9\x89\xf9\xc6\xf9\x97\xf9\xa3\xf9>\xfaE\xfa_\xfa\x04\xfas\xfa\x05\xfb\x90\xfb!\xfc/\xfc\xd3\xfc*\xfd\xd3\xfcA\xfd4\xfe\x82\xfe]\xfe\xb2\xfe\x0b\xff9\xff\x9b\xff\xa8\xff,\x00E\x00G\x00\xda\xff\xc0\xff\xe1\xff\x0c\x00\xcb\xff\xb8\xff\xa4\xff\xf6\xfeh\xff\xcf\xff\x92\x00\xf5\xff\xb0\xff:\x01\'\x04\x8b\x07<\n\x85\x0b}\x0c4\x0f\xc0\x12\xf9\x16<\x1bj\x1f\x03 \xaf\x1e\xdd\x1e\xce!4$\xbb"\x15 \xa9\x1dk\x1bt\x18\xe4\x13\x88\x10\xd9\r\x82\t\x07\x04\\\xfe&\xfb\xc3\xf9Q\xf6\xe6\xf1\x87\xee\xb9\xec\xd3\xea\x80\xe9Q\xe9\xc3\xe9\x1d\xea\xe6\xe8\xc1\xe7\x97\xe8L\xea\x85\xeb \xec\xb3\xec\xbd\xee6\xf0F\xf0\xeb\xf0\xec\xf1`\xf3<\xf4\xd1\xf4;\xf6\xc4\xf7\xbb\xf7|\xf7^\xf9\x07\xfby\xfb&\xfb\x84\xfb\x84\xfc\x9b\xfci\xfc\xfe\xfd\xf4\xfeB\xfe\x89\xfdM\xfd\xdb\xfdF\xfe\xc2\xfd\xb5\xfd\x88\xfe\x80\xfe\xf7\xfd\xff\xfd\xe0\xfep\xffK\xff\x14\xff\x19\x00\x05\x01m\x012\x02+\x03\xd1\x03\x07\x04h\x04]\x05\xb6\x06\xbc\x07a\x08\xd8\x08W\t\xee\t\xc1\nb\x0b\xaf\x0b\xdc\x0b\x98\x0b\xb4\x0b\xd4\x0bW\x0b\xde\n\n\n\xb5\x08\xa4\x07\xc5\x06\x8b\x058\x04n\x02N\x00\x10\xffI\xfe+\xfd\x0b\xfcV\xfa\xa9\xf8)\xf8\x9d\xf7Z\xf7\xb5\xf7\xb5\xf7i\xf7\x9a\xf7>\xf8\xf3\xf8\xdd\xf9\xa4\xfa[\xfb!\xfd\x97\xfe\xd3\xff\x1c\x01j\x028\x03\xa9\x03\xab\x04\xdc\x05\xc3\x06\xb6\x06&\x06$\x06M\x06\x8b\x05.\x04q\x03\xc7\x02\x1e\x01I\xff\xfe\xfe\xdc\xfe\x97\xfd\x9b\xfb\x98\xfa0\xfal\xf9\xb6\xf8\xd2\xf8k\xf9u\xf9\xb2\xf8H\xf8&\xf9\x81\xf9y\xf9\xaf\xf9 \xfa\xd7\xfay\xfbw\xfb\xbc\xfb\xa1\xfc\x84\xfc1\xfcl\xfc\x9c\xfd4\xfe\xe6\xfd\xe7\xfd\x07\xfe\x16\xfe\xe8\xfd\x87\xfdT\xfd\xa4\xfd\x7f\xfd\x85\xfd\x03\xfd\xab\xfc\x85\xfc\xef\xfb\x07\xfc\x81\xfc\xcb\xfc\xe3\xfc$\xfd\r\xfd\x85\xfdN\xfd\xf5\xfdA\xff\x17\x00\x08\x01\xff\x01\x8c\x02\x99\x03/\x05\xe4\x05\xbc\x06\x04\t\xde\x0c\x81\x10z\x12.\x14\x83\x17L\x1aY\x1b_\x1cu\x1f\xb0#\x1b%W#\t"\x87"A!\x18\x1d\xde\x19\x90\x19\xff\x16\x99\x0f\x9b\x08\x90\x06Z\x05i\x00\x8f\xf9\x80\xf5\x88\xf3\xaf\xef|\xea\x98\xe8\x0e\xeac\xea \xe7v\xe4F\xe5&\xe7\x1f\xe7c\xe6\xcf\xe7\'\xea\xd6\xea\r\xeb\xac\xecD\xefH\xf0\xc3\xef\xf4\xf0\x8e\xf3\x02\xf5\x87\xf5k\xf6\xdf\xf7\xca\xf8O\xf9\xc8\xfaj\xfc\xa2\xfc\xf9\xfb\xa3\xfc\xef\xfd\x7f\xfeP\xfe\xcc\xfeF\xffL\xfe\xf8\xfd\'\xff\xab\xff\xcc\xfe\xcd\xfd$\xfe6\xff\x82\xfe\x8f\xfe\xff\xffS\x00g\xffE\xffn\x00\xe9\x01\xdd\x01\xfb\x01d\x03\x16\x04z\x044\x05\x81\x06\xcb\x07\x04\x08\xf2\x07\x00\t)\n\x8b\n\xb0\n\xfb\n[\x0b?\x0b\xde\n\xd2\n\xe3\nG\nH\t\x8b\x08\x0e\x08\xa1\x07z\x06[\x05d\x04H\x03\xbd\x01\x91\x00\xf3\xff\xf5\xfeY\xfd\x18\xfc\x82\xfb\xb4\xfal\xf9\xaf\xf8[\xf8\x99\xf7p\xf7e\xf7\xcc\xf6\xa7\xf65\xf7\xab\xf7K\xf8j\xf9\xef\xf9\x82\xfa\x88\xfb\x83\xfc\xe6\xfdI\xffm\x00\x89\x01\x95\x02=\x03\x9d\x03M\x04\x1a\x05\x12\x05\xeb\x04\xb4\x04\xa0\x04a\x04s\x03\x8d\x02\xf1\x01c\x01X\x006\xff\x06\xff\x8c\xfe\x8e\xfd\xd2\xfc\xc9\xfc\xb5\xfcJ\xfc\xf1\xfb\xf7\xfb"\xfcZ\xfc\x88\xfc\xfc\xfcr\xfdu\xfd9\xfd\x8f\xfd6\xfe\x82\xfe\xdb\xfe.\xff,\xff\x0f\xff\x0f\xff1\xff\x9b\xff\x08\x00w\xff"\xffW\xff?\xff\xd3\xfe\xaa\xfe+\xfe\xca\xfdD\xfd\x8c\xfc0\xfc\x02\xfcx\xfb\xc7\xfae\xfa{\xfaK\xfaM\xf96\xf9\xdf\xf9o\xfab\xfaj\xfa\xcb\xfa\xef\xfa\x0b\xfbh\xfb\xf2\xfb\x8c\xfc/\xfd\x95\xfdI\xfe\x1a\xff\x1c\x00\xb1\x00\x8c\x01\x99\x03@\x06\xee\x07\x02\nf\x0e6\x13\x9c\x15Z\x16i\x18Y\x1dL!\xf4!V"\xb2$Y&\x16$\xef \xb4 \xa5 \xff\x1b6\x15\x13\x12\x04\x11\xb6\x0c\x14\x05_\xffj\xfd3\xfa\xb3\xf3\xdf\xee?\xeeh\xed\xfd\xe8\xed\xe4)\xe5j\xe7a\xe6\xab\xe3~\xe4\xe6\xe7\xf6\xe8\x16\xe8A\xe9\xfc\xec2\xef\xd2\xee\xc0\xef\xb6\xf2\xbd\xf4\xd3\xf4C\xf5M\xf7\xbe\xf8\xe6\xf8e\xf9\xb7\xfah\xfbK\xfbw\xfb \xfc\xb3\xfc\x0e\xfdo\xfdy\xfd\x1a\xfd\x1e\xfd\xde\xfd4\xfe\x98\xfdS\xfd\xe1\xfd\xf6\xfd\x17\xfdB\xfd\xab\xfe1\xffN\xfeJ\xfe\xf2\xff\x0b\x01\xc8\x00J\x01\x18\x03?\x048\x04\xf6\x04\xff\x06{\x08u\x08\xdc\x08\x83\n\xb0\x0b\xd4\x0b\xff\x0b\x07\r\xbe\r^\r#\r\xa0\r\xd4\r\x0e\r\x01\x0c\x83\x0b[\x0bh\n\xe3\x08\xc0\x07\xcb\x06;\x05D\x03\xae\x01w\x00\xfa\xfe\xf9\xfcU\xfb8\xfa\x08\xf9\x89\xf7T\xf6\xcd\xf5[\xf5\x80\xf4%\xf4\x81\xf4\xc5\xf4\xcf\xf46\xf5\t\xf6\xe1\xf6\xd0\xf7\xa1\xf8~\xf9\x9e\xfa\xc2\xfb\xf1\xfc>\xfe\xd4\xff\xc4\x00\x84\x01\xba\x02\xa0\x03\xa7\x04\x8a\x05e\x06H\x07\x97\x07\x85\x07\x8a\x07\x03\x08\xe4\x07<\x07\xbf\x062\x06\xbf\x05\xd1\x04\xc2\x03\xd2\x02\xfc\x01\x1f\x01+\x00p\xff\t\xff4\xfej\xfd\x08\xfd\x00\xfd\xd3\xfcn\xfc<\xfcV\xfcx\xfcz\xfc\xb7\xfc-\xfdv\xfdw\xfdm\xfd\xd0\xfd_\xfer\xfe\x81\xfe\xc7\xfe\xe7\xfe\xe7\xfe\xa4\xfe\x89\xfe\xa2\xfet\xfe\xa5\xfd\xf0\xfc\xb9\xfco\xfc\xa8\xfb\xdb\xfa4\xfa\x9e\xf9\xe1\xf8(\xf8\xe5\xf7\xc3\xf7o\xf7.\xf7K\xf7\xb1\xf7\xe7\xf7\x16\xf8\x9d\xf8\x81\xf9s\xfa\x1d\xfb\xeb\xfb\xd0\xfco\xfd=\xfe,\xff\xe2\xff\xb9\x00{\x01\xeb\x01Y\x02\x94\x02\xf6\x02\xab\x03\x1e\x04x\x04"\x05\xf9\x05,\x07\x99\x08\x1c\n\xf2\x0b{\r\x0b\x0f\xda\x11S\x15\x99\x17\x81\x18\x15\x1a\xaf\x1c\x0f\x1e\xbf\x1d6\x1e\xc3\x1f\x15\x1fp\x1b\xa1\x18\xfa\x17#\x16\x04\x11\x02\x0c\x8d\t\x86\x06\x80\x00\xa0\xfaV\xf8\xb5\xf6\xec\xf1X\xec\x10\xea\x02\xea\xf7\xe7\xe9\xe4d\xe4\x1f\xe6G\xe6\xbd\xe4\x04\xe5\xbb\xe7\xdd\xe9\x02\xea\x82\xea\xfc\xec\xa4\xef\xdf\xf0k\xf1\x19\xf3u\xf5\xd6\xf6B\xf7O\xf8|\xfaO\xfcX\xfc6\xfc\xca\xfd\xf8\xff\x84\x00\xe0\xffy\x00\x08\x02)\x02\x0b\x01M\x01\t\x035\x03s\x01\xa8\x00\xe5\x01\x87\x029\x01:\x00\xfe\x00\x92\x01\x83\x00\xbf\xff\xcc\x00\xfd\x01g\x01{\x00.\x01\x89\x02\xc3\x02F\x02\x02\x03\x8a\x04\xf3\x04\xb9\x04n\x05\xd3\x06o\x07\x14\x07T\x07h\x08\xe3\x08\x8b\x08p\x08\xef\x08\x17\tl\x08\xf3\x07\xe2\x07m\x075\x06\x05\x05r\x04\xd9\x03\xa1\x02S\x01J\x001\xff\xc7\xfd\x85\xfc\xbb\xfb\r\xfb\x15\xfa\x1a\xf9\xa3\xf8T\xf8\xf1\xf7\xbe\xf7\xf0\xf7.\xf81\xf8\x86\xf8?\xf9\xfe\xf9\xac\xfar\xfbT\xfc!\xfd\xd8\xfd\xa8\xfe|\xff1\x00\xac\x00H\x01\xde\x016\x02^\x02\x91\x02\x07\x03[\x03g\x03\x87\x03\x9d\x03\xaa\x03n\x03n\x03@\x04N\x05\\\x05\xd7\x040\x05+\x06f\x06\xef\x05T\x06U\x07-\x07\xcf\x05I\x05\x06\x06\xad\x05\xb4\x03\\\x02p\x02\x06\x02"\x00\xa9\xfe\xa0\xfe0\xfex\xfc.\xfb[\xfb\x83\xfb\x91\xfa~\xf9\x95\xf9\xdf\xf9\x84\xf9\x1e\xf9W\xf9\x8f\xf9)\xf9\xc8\xf8\xfe\xf8\x81\xf9\xb1\xf9\x91\xf9\xa9\xf9\xe3\xf9:\xfa\xa4\xfa\x08\xfbl\xfb\xe0\xfbT\xfc\xb9\xfc/\xfd\xe8\xfd\x85\xfe\xc2\xfe\xf2\xfeN\xff\xc8\xff\x05\x00#\x00T\x00|\x00\x89\x00\xa1\x00\xd1\x00\xec\x00\xf4\x00\xd9\x00\xcf\x00\xff\x00#\x01%\x01&\x01\xf1\x00\xc7\x00\xbf\x00\x8f\x00i\x00N\x00(\x00\xf8\xff\xe1\xff!\x00Y\x00E\x000\x00\xc3\x00\xe7\x01*\x03\x99\x04c\x06Y\x08\x0c\nM\x0b\xea\x0cZ\x0f\xcd\x11>\x13$\x14C\x154\x16\x13\x16S\x15\xeb\x14h\x14\x81\x12\xa4\x0fF\rR\x0bl\x08\x93\x04&\x01r\xfeh\xfb\xe4\xf7\x17\xf53\xf3\'\xf1\xdb\xeeD\xed\xe0\xec\x9a\xec\x00\xec\xeb\xeb\xa3\xec~\xed-\xeeR\xef\xff\xf0p\xf2\x87\xf3\xb7\xf4R\xf6\xfd\xf7<\xf9^\xfa\x92\xfb|\xfc_\xfdY\xfe~\xffK\x00\x95\x00\xe8\x00[\x01\xb8\x01\xc9\x01\xe8\x014\x02\x1e\x02\xa9\x01L\x01/\x01\xde\x00\x08\x00G\xff\xf1\xfe\x9b\xfe\x0f\xfer\xfd\x10\xfd\xa7\xfc\x00\xfc\xb4\xfb\xe3\xfb\x10\xfc\xf5\xfb\xd4\xfb\x1f\xfc\x81\xfc\xb2\xfc\x08\xfd\xaa\xfdX\xfe\xdb\xfeT\xff\'\x00\x13\x01\xbb\x01\\\x02&\x03\x0b\x04\xec\x04\xb3\x05w\x061\x07\xb9\x07,\x08\x90\x08\xe5\x08\r\t\x03\t\xe4\x08\x9e\x08&\x08y\x07\xa3\x06\xa8\x05\xa3\x04\x9f\x03\x9b\x02}\x01T\x00+\xff\x12\xfe\x13\xfd;\xfc\x83\xfb\xe8\xfaa\xfa\x15\xfa\x04\xfa\x10\xfa:\xfa\xa0\xfa0\xfb\xce\xfbh\xfc\x01\xfd\xcb\xfd\x97\xfec\xffW\x007\x01\xea\x01^\x02\xa7\x02\t\x03t\x03\xb8\x03\xd8\x03\xdb\x03\xc9\x03\x8e\x03/\x03\xc3\x02H\x02\xa2\x01\xf8\x00f\x00\xee\xffz\xff\xe1\xfe(\xfe\x7f\xfd\xf7\xfc\xa3\xfcg\xfcC\xfcN\xfcL\xfcH\xfc\\\xfc\x9f\xfc\x13\xfd|\xfd\xdd\xfdv\xfe3\xff\xc7\xffH\x00\xe6\x00\x97\x01#\x02\x96\x02(\x03\xc0\x03*\x04a\x04\x90\x04\xd5\x04\xf8\x04\xd3\x04\xa9\x04\xa0\x04\x87\x047\x04\xf0\x03\xa3\x03/\x03\x9b\x02\x1c\x02\xbc\x01[\x01\r\x01\xa2\x00\x0f\x00\xa5\xffb\xff\x1a\xff\xb6\xfeg\xfeT\xfeh\xfeN\xfe\x10\xfe\xd2\xfd\xb3\xfd\xc1\xfd\xc9\xfd\xc6\xfd\xb6\xfd\xb5\xfd\xb1\xfd\xc5\xfd\xe2\xfd\xf2\xfd\xd6\xfd\xa9\xfd\xde\xfdU\xfe\xca\xfe\xf0\xfe\x1f\xff\x9d\xff\'\x00\x87\x00J\x01\xe6\x01\xbe\x00C\xff\x97\x01<\x07\x14\t\xbc\x03~\xfei\xffn\x03\xf0\x04\x80\x04\xed\x03\xe0\x00H\xfbJ\xf9V\xfd)\x00#\xfc\x88\xf6#\xf6\xdc\xf8 \xf9\x9e\xf6\xed\xf4\xc5\xf4\xb8\xf5\xb3\xf7\x0b\xfa\xc8\xfa\xdf\xf8*\xf7\xae\xf8\x08\xfd\x01\x01\x9c\x01\xd3\xff\xe3\xfeW\x006\x03\x01\x05X\x05\x0e\x05q\x04\x16\x04B\x04\xb0\x04\x14\x04\xe8\x01!\x00\x14\x00\xf6\x00\x7f\x00t\xfe\x0b\xfc\x9a\xfa\xc7\xf9\x10\xf9i\xf9V\xfca\x00\x0e\x01\xa1\xfd<\xfb\xe4\xfc\xd1\x01:\x08\t\x0f$\x13c\x0f^\x08\r\x08=\x10\xe0\x17M\x18\xda\x15\x90\x14\x97\x11A\x0c\x08\nk\x0cr\r\xe6\t\xed\x06\xaf\x05\x90\x01-\xf9\xed\xf3\x92\xf6\xfa\xfad\xfb\xe8\xf7X\xf3r\xee\xfd\xea\xf3\xech\xf3\xec\xf7\\\xf7j\xf4\x84\xf2j\xf2\x1e\xf4\xa4\xf8\x0c\xfe\xe0\x00\x88\x00e\xff+\xff\xc8\xff\xba\x01,\x05l\x08$\t\xce\x07\xc7\x05\x81\x03\x0e\x02\xd5\x02\xa9\x05\x05\x07\xda\x04O\x00\x10\xfc\xb9\xf9\xc7\xf9\xfe\xfbG\xfdc\xfb\x89\xf7\xb9\xf4/\xf4\x8d\xf4u\xf5\xee\xf6\x03\xf8\xdb\xf7\xe2\xf6\xa8\xf6\x85\xf7,\xf9\x84\xfb"\xfe\xcb\xff\xde\xff\x8d\xff9\x00^\x02\xb5\x04^\x06P\x07\xa4\x07y\x07\x18\x072\x07\xc2\x07\x8b\x08\xcf\x08j\x08Z\x07\xaf\x051\x04v\x03\x93\x03\xb6\x03\x0f\x03\xa8\x01\x17\x00\xe0\xfe?\xfe\x1a\xfes\xfe\xcd\xfe\xa0\xfe\xf1\xfd\\\xfdi\xfd\xc0\xfdC\xfe\x0c\xff\x06\x00\x93\x00z\x00<\x00d\x00\xdf\x00\x96\x01X\x02\xf0\x02\x02\x03]\x02l\x01\x02\x01M\x01\x02\x02O\x02\xdc\x01\xd7\x00\x94\xff\x08\xffn\xff\x0e\x00h\xff<\xfe\xab\xfe\x15\x00y\xff\xbf\xfcG\xfbB\xfd\x1a\x00\xca\x00\x1f\xff\xa9\xfc<\xfb1\xfc\t\xff\xa4\x00n\xffn\xfd\xf8\xfc\x84\xfd\xb6\xfd \xfe \xff\x92\xff-\xff\xd8\xfe\xd7\xfe\xb2\xfe\xb2\xfe\xd7\xffN\x01\xa4\x01\x16\x01\xb0\x00\xc9\x00\n\x01\x9c\x01\xac\x02\x92\x03\x89\x03\xfb\x02\x8d\x02{\x02\xf2\x02\xd7\x03\x8e\x04F\x04l\x03\xc0\x02\x9c\x02\xac\x02\xf3\x02&\x03\xb5\x02\xae\x01\xce\x00\x93\x00\x8e\x00,\x00\xbe\xffe\xff\xf9\xfei\xfe\x00\xfe\xcb\xfdz\xfdI\xfd~\xfd\xc2\xfdX\xfd\xaf\xfc\xe9\xfc\xb1\xfdv\xfes\xfe\x1a\xfe\x05\xfek\xfeb\xff*\x00;\x00\xe8\xff\x05\x00d\x00\xa5\x00\xb8\x00\x15\x01Q\x01\x01\x01\xc1\x00\xa7\x00\x86\x00*\x00\xf4\xff+\x00;\x00\xc5\xff\xfe\xfei\xfe3\xfe7\xfeZ\xfeh\xfe\x13\xfe}\xfdH\xfdq\xfd\xc8\xfd\n\xfe]\xfe\xc3\xfe\xfb\xfe\x03\xff,\xff\x90\xff#\x00\xb4\x00\x11\x01/\x01\x0e\x01%\x01s\x01\xc7\x01\x08\x02\x16\x02\x02\x02\xaa\x01U\x01K\x01T\x01Q\x01-\x01\xec\x00\x85\x00\xf8\xff\xb9\xff\xeb\xff\x0f\x00\xe5\xff\xa2\xff}\xffc\xff5\xffK\xff\x85\xff\xbc\xff\xcc\xff\xbf\xff\xb3\xff\x9a\xff\xa0\xff\xca\xff\x0f\x003\x00$\x00\xf6\xff\xc6\xff\xba\xff\xc2\xff\xcb\xff\xb3\xff\x97\xffz\xffP\xff.\xff\x15\xff\x13\xff&\xff%\xff:\xff;\xffL\xff_\xff\x97\xff\xe6\xff\x1c\x004\x00F\x00o\x00\xcc\x00\x17\x01)\x01\x19\x01\x0f\x01\x19\x014\x01U\x01r\x01J\x01\xec\x00\xb5\x00\xbd\x00\xcf\x00\x9d\x00U\x00\x01\x00\xb7\xffy\xffh\xffk\xff<\xff\xec\xfe\xaf\xfe\x9a\xfe\x9b\xfe\xab\xfe\xaf\xfe\xc9\xfe\xd2\xfe\xf5\xfe-\xff\\\xff\x83\xff\xbf\xff,\x00\x90\x00\xcb\x00\xe8\x00\r\x01I\x01\x8a\x01\xba\x01\xc8\x01\xa5\x01m\x01j\x01q\x01E\x01\xe2\x00|\x005\x00\xe0\xff\xac\xff\xa5\xffh\xff\xf2\xfeK\xfe\x0f\xfe&\xfe?\xfe\x1a\xfe\x01\xfe5\xfe\xb0\xfen\xfe!\xfeN\xfe&\xfe3\xfe\xc3\xff@\x04\x12\x05\xf6\xff\x86\xfbt\xfe\xae\x05\xb9\x07\xd4\x05\xa6\x03\x88\x01\xb8\xfe\x17\x00\x82\x06\x12\t\xb5\x03$\xfe\x16\xff\xa1\x01\xcc\x00W\xff>\x00S\x00\xd5\xfd\xc6\xfc\x04\xfe\xd4\xfdh\xfb\x17\xfb*\xfe\xbc\xff\x87\xfd\xe9\xfa%\xfb\xf2\xfc0\xfeN\xff&\x00\x8b\xff\x84\xfd/\xfdh\xff\xba\x019\x02\x84\x01\x14\x01\xb1\x00H\x00\xf1\x00I\x02\xbf\x02\xa0\x01\x86\x00\x89\x00o\x00\xbd\xff)\xff\'\xff>\xff\xc5\xfe+\xfeQ\xfds\xfc\x0e\xfc\x94\xfc`\xfdC\xfdz\xfc\x8e\xfb\x07\xfbP\xfb\\\xfcP\xfd,\xfd?\xfc\xbc\xfb\x0c\xfc\x93\xfc\xc4\xfc\x1c\xfd\xa6\xfe\xa4\x00w\x01O\x00W\xff\xc6\x00\xb2\x04\x83\x08\xcc\t\xe9\x08\x9a\x075\x08\xca\n\xd0\r[\x0f\x9a\x0e\xf4\x0c\xa2\x0b0\x0b*\x0b\xa8\n\x97\t\x06\x08\xf5\x05n\x03\x98\x00d\xfe\\\xfd\xdf\xfc\xfe\xfb\xee\xf9:\xf7F\xf5\xda\xf4\xbd\xf5\xe9\xf6y\xf7$\xf7g\xf6j\xf6\xf6\xf7g\xfav\xfc\xb8\xfd_\xfe\xdf\xfe*\xff\xfb\xff\xa0\x01/\x03\xc1\x031\x03m\x02\xba\x01)\x01\x0c\x01\xfe\x00\xa0\x00p\xff\xd6\xfde\xfcU\xfb\xf1\xfa\xfd\xfa\xe9\xfaO\xfas\xf9\xcb\xf8\xac\xf8\r\xf9\x1e\xfa6\xfb\xc6\xfb\xd9\xfb\xe5\xfb\xa3\xfc\xff\xfd\xa8\xff\x0c\x01\x94\x01\x99\x01\xde\x01\xbf\x02\xb0\x03]\x04\xdf\x044\x05\x08\x05\xae\x04\x84\x04\xab\x04\xc9\x04\xa5\x04\x85\x04\x17\x04t\x03\xe1\x02\x88\x02p\x02_\x024\x02\xc8\x01\x1a\x01\x93\x00l\x00\x81\x00\x85\x00]\x00\r\x00\xb1\xffq\xff[\xffO\xff\x13\xff\xec\xfe\xf7\xfe\xe9\xfe\x99\xfe=\xfe\x10\xfe\x0e\xfe\x1a\xfe6\xfeM\xfe=\xfe\x1e\xfe-\xfe\x80\xfe\xe2\xfe\x15\xff6\xffm\xff\xcb\xff"\x00]\x00|\x00\xa7\x00\xce\x00\xf6\x00\x1d\x015\x01/\x01\x0b\x01\xe4\x00\xda\x00\xdc\x00\xac\x00\\\x00\x1a\x00\x02\x00\xea\xff\xc5\xff\x93\xffi\xff0\xff$\xff9\xff^\xff`\xff5\xffB\xff}\xff\xad\xff\xcf\xff\x02\x00.\x00\\\x00\x80\x00\xca\x00\xfb\x00\xf3\x00\xf1\x00\x14\x01c\x01s\x01a\x01S\x01>\x011\x01)\x013\x01#\x01\xd3\x00\x8f\x00\x99\x00\xa1\x00{\x00)\x00\x07\x00\xff\xff\xd6\xff\xb5\xff\xbb\xff\xab\xff\x89\xff6\xff\x0c\xff1\xffW\xff\x05\xff\xdb\xfe`\xff\x11\x00\x97\xff\x04\xff\xb3\xffT\xff<\xfe\x9e\xff\xd0\x04\xd9\x05B\xff\x9e\xfa\xaf\xfe\x81\x05\xe8\x05\x80\x03O\x02\x08\x00c\xfco\xfe\xcf\x05,\x074\x00J\xfb-\xfeM\x01\x0c\x00\xd8\xfe\xea\xffo\xff\x03\xfdU\xfd\x84\xff\xfc\xfeR\xfc\xa6\xfc\x87\xff9\x00\xef\xfd_\xfc \xfd%\xfe\xfb\xfe\x07\x00H\x00\xa3\xfe\xfd\xfc\xcd\xfd\xdb\xff\xb0\x00\\\x00/\x00\xc5\xff\xdb\xfe\xc6\xfeB\x00B\x01\xd4\x00\xec\xff\xdc\xff\n\x00\xd2\xff\xde\xffQ\x00P\x00\xf4\xff\xef\xff(\x00\xc4\xff\xea\xfe\xb7\xfeq\xff=\x00\x0f\x00J\xff\x8d\xfe\'\xfe\xab\xfe\xf0\xff\xab\x00\xdd\xffx\xfeG\xfer\xff.\x00\x0f\x00\x83\xff\x07\xff\xb8\xfe\xf9\xfe\xb3\xff\xb1\xff|\xfe\x9e\xfd\x12\xfe\xdb\xfe#\xff\x15\xff\x86\xff\x89\xff\x18\xff\x02\x00W\x02\xdc\x04X\x05\xf9\x04M\x054\x06C\x08\xab\n\x8f\x0c\xf6\x0bs\t\x8b\x08\xf5\t\x8f\x0b\xfb\n\x94\x08\x00\x06\x1f\x04\xc7\x02\x02\x02\xa3\x00:\xfe\x91\xfb\xfc\xf9|\xf9V\xf8_\xf6\xf5\xf4\xef\xf4\xcf\xf5`\xf6p\xf6T\xf6n\xf6w\xf7\xe7\xf9z\xfc\xcf\xfd\xdf\xfd-\xfe\xac\xff\x9f\x014\x03\x08\x04\x01\x04V\x03\xc7\x02\x17\x03~\x03\xd8\x02\x10\x01\xae\xff\x16\xffD\xfe\xbd\xfc\\\xfb\x8c\xfa\x92\xf9\x9c\xf8[\xf8\x96\xf8&\xf8P\xf7\x88\xf7\xa5\xf8\xc2\xf9\x8e\xfa\x9c\xfb\xa2\xfcc\xfd~\xfeX\x00\x14\x02\xd5\x02g\x03\x8d\x04\xae\x055\x06i\x06\xf3\x067\x07\x14\x07#\x07\\\x07\xf4\x06\n\x06V\x05\x14\x05\x98\x04\x03\x04\x94\x03\x00\x03\x03\x02\x11\x01\x9f\x00\x8d\x00g\x00\t\x00\x92\xff\x08\xff\xc0\xfe\xdf\xfe+\xffn\xffP\xff\x03\xff\xd5\xfe\x04\xffW\xffk\xffy\xff\x92\xff|\xffK\xff\x16\xff\x02\xff\xe2\xfe\xc5\xfe\xc1\xfe\xbf\xfe\x90\xfe2\xfe\xcf\xfd\xab\xfd\xc4\xfd\xdc\xfd\xc9\xfd\xf5\xfd\x82\xfe\x98\xfe\xe0\xfd\x92\xfd\xae\xfe\x14\x00p\x00n\x00\x96\x00L\x00\x01\x00\x8d\x01\xdc\x03\xe3\x03\x03\x02\xbd\x01e\x03\xb7\x03\x89\x02\xaa\x02\x03\x04\xb3\x03\xec\x01R\x01\xc2\x010\x01$\x00\x93\x00w\x01\x83\x00U\xfe\xc6\xfd\xe9\xfec\xff\xbb\xfe\x8c\xfe\x03\xff\xb8\xfe\xdc\xfd-\xfea\xff\xa7\xff\xf8\xfe \xff\xc5\xff\x95\xff\xf0\xfeG\xff#\x00\x00\x00i\xff\x98\xff\xbe\xff\x02\xff\x85\xfe7\xff\x92\xff\x8c\xfe\xd6\xfd\x7f\xfe\xa5\xfe\x97\xfd\xf7\xfc\xaf\xfd\xe4\xfdW\xfd~\xfd\xd6\xfd#\xfdY\xfc8\xfdc\xfe\xe9\xfd\x1c\xfdf\xfd\xd0\xfdU\xfd\x9c\xfd^\xfeO\xfeN\xfdl\xfdU\xfeY\xfem\xfd\xe5\xfcT\xfdY\xfd\xf1\xfc\x90\xfcc\xfc\x07\xfc\xcb\xfb\xcb\xfb@\xfcn\xfc*\xfdB\xfe\xdf\xffj\x02\x0f\x04\x13\x05\xb4\x053\t\xa1\x0e\xaf\x12,\x14\xd3\x13K\x14n\x16\xe4\x19\x86\x1c\xe8\x1b\'\x19\xaf\x161\x15\x01\x14\x12\x12\xed\x0ec\n\xc7\x05\x9b\x02\x8a\xffd\xfb\xb6\xf6<\xf3\r\xf1\xf9\xee\x01\xed\xd7\xea\xec\xe8\x1b\xe8\x01\xe9\x9a\xea\xb6\xeb\xbb\xec,\xee\x02\xf0{\xf2\xd1\xf5\x0b\xf9\x07\xfb\xdb\xfc\xc2\xff\xff\x02_\x04H\x04M\x05\x8f\x07\xd0\x08\x12\x08,\x07\xaf\x060\x05\x04\x03{\x02\x1c\x03V\x01\xeb\xfc&\xfa\x8e\xfa\xa0\xfa\x81\xf8\xa3\xf6\x84\xf6\x1b\xf6\x85\xf4\xc7\xf4"\xf7\xe1\xf7\x84\xf6R\xf6\xd7\xf9\xba\xfc\x9a\xfcF\xfd\xeb\xff\x1d\x02\xf6\x01L\x03G\x07\xb0\x08\x14\x08\xb3\x08y\n\x11\n\xcc\x08/\x0b\xc8\x0c\x15\n\x13\x07D\x07\xf1\x07\xf1\x05Q\x043\x04\x01\x028\xff\x9e\xfe?\xff\xb3\xfd\x0f\xfbQ\xfax\xfa\xa7\xf94\xf9\x80\xf9!\xf9\xd6\xf7\xab\xf7C\xf9e\xfa\x0c\xfa\xba\xf9w\xfaW\xfb\xb1\xfbT\xfc\x95\xfd<\xfe\xbf\xfd\xee\xfd3\xff\xfc\xff)\xffo\xfe\xea\xfeb\xffB\xff\xe5\xfe.\xfe\x0e\xfd6\xfc\xa5\xfc-\xfd\xae\xfc\x96\xfbG\xfa?\xf9I\xf9w\xfbN\xfc9\xfa\xe9\xf7C\xf8\x13\xfa3\xfa\x04\xfb\x08\xfc\xd4\xfb`\xfa\x14\xfc\x1b\x01\xfd\x02\xbc\x01*\x02-\x07\xd4\x0b\x9a\x0e\xeb\x12<\x17\xab\x17u\x15\xf6\x18\xc7"\xa3(\xe5%\xb6 \x82 \xf5"\xa6"f \xf3\x1d\x1f\x1a\xd6\x13\xfa\x0e\xfb\x0c.\ta\x01\x9d\xf9\x85\xf6\xd0\xf5\x89\xf23\xed\xd8\xe7\xac\xe4\xcc\xe3\xa6\xe4\t\xe6 \xe6\xd1\xe5a\xe6\xfa\xe7\x8d\xeb\xde\xef\xe7\xf2\xc2\xf3Q\xf6\xad\xfb\xa0\xff\xd9\xff\xf0\xff=\x03#\x05\xf4\x04&\x06\xa8\x08\x1b\x07\x1c\x01\xb8\xff\x9e\x02\xe4\x01\x08\xfc\xa9\xf9\xee\xfb\xb0\xfa\xb4\xf4M\xf2\x9f\xf4\x8e\xf4\xb3\xf1\xae\xf2\xac\xf6\xc6\xf6\xa2\xf3^\xf4\n\xf9\xb9\xfb\xa8\xfc\x16\xff\xe9\x02\x8f\x03\x04\x03\xa5\x040\x08\xc7\t\xb0\t\x02\x0b\xa7\x0c\xee\x0c\xf2\nS\t\xdd\x08\xba\x08\xa2\x08\x91\x08\xb1\x07!\x05u\x01o\xff2\xff\xf4\xfe\xe0\xfd0\xfd\x8b\xfc&\xfb\xe8\xf8\t\xf87\xf8a\xf8\xd5\xf8Z\xfa8\xfbn\xfa!\xf9Z\xf9\x8d\xfa\xa0\xfb\x1d\xfd(\xfeM\xfeU\xfd\x1c\xfdR\xfd{\xfd\xe6\xfdu\xfe\x8b\xfeE\xfd\xdc\xfb\xa6\xfa>\xfat\xfa\x87\xfa\x9e\xf9\xd9\xf7X\xf6\xe6\xf5\x8f\xf5\x81\xf6\x03\xf7\x1c\xf6\\\xf4>\xf5\x0f\xf9B\xfa\x99\xf8\x93\xf7\xd9\xfa8\x00\x06\x04\x82\x06\x07\x07q\x06\x0c\x08&\x0e\xda\x16\xb5\x1b\x07\x1c\xe0\x1cm \'$\x11%\x8b&i)\x89+\x85*5(y&\n#J\x1d\x96\x17\xc1\x14\xb6\x12N\x0e(\x07\xd7\xff\xc6\xf9\xb8\xf4\xea\xf0\xbb\xedP\xeb\xe3\xe8\xda\xe6`\xe5^\xe4\xfc\xe30\xe4\xf0\xe5T\xe9\xc2\xec\x11\xefS\xf05\xf2\xa1\xf4K\xf7k\xfay\xfd=\xff\x0b\xff*\xff\xad\x00\xc5\x01t\x00\x9c\xfd\xaa\xfc\x84\xfd4\xfde\xfa\x00\xf7\x1b\xf5\xcb\xf3_\xf2\xa1\xf1\x8d\xf1~\xf0\xe9\xee/\xefP\xf1\x18\xf3\x06\xf3\x10\xf4\xa5\xf6O\xf9\xcc\xfb\xe2\xfe\x91\x02\xe4\x03\x80\x04\x94\x06E\n\x99\x0c\xee\x0cc\r\xe6\rr\x0ey\x0e\x8c\x0e\xf8\x0c\xbe\nw\t\xac\t\xe4\x08B\x06\xd1\x03\x1f\x02\x05\x01\xd8\xff\xe5\xfe\xff\xfdC\xfc\xf0\xfa\xc4\xfaO\xfb\xfb\xfb\xc1\xfbd\xfb\x03\xfbF\xfbd\xfct\xfd-\xfe\x14\xfe\xbc\xfd\x12\xfd\r\xfd;\xfd-\xfd\xcd\xfb\x12\xfa\xcd\xf8d\xf8Q\xf8\xf0\xf6n\xf4\xaa\xf1\x98\xf0<\xf2\xa6\xf3\xf0\xf2T\xf0\xab\xee\\\xf0\xa9\xf3O\xf6\xb9\xf6<\xf59\xf5f\xf8g\xfdK\x00\xce\xff\xe1\xfe\x0c\x01\xdd\x05\x8f\tm\nB\n)\x0b%\x0e\x9f\x11Y\x14\x7f\x15\xa5\x15|\x16G\x18\xe5\x1a\xf7\x1cF\x1e\xba\x1e%\x1e\x1d\x1eP\x1f\xf7 \x07!\xa9\x1eH\x1c\x9e\x1b\x84\x1b\x13\x1a\xf6\x16\xa2\x13\x8c\x10|\r\xb3\n\x81\x08\xd3\x05\xa0\x01\x0c\xfd\x1c\xfa\xd9\xf8\xea\xf6\x9a\xf3\x01\xf1\xaf\xefJ\xeea\xec@\xebn\xeb\xc7\xea_\xe9*\xe9g\xea\x93\xebA\xeb\x1e\xeb\xd5\xeb\xe2\xec\xfe\xed\xe0\xee\xe7\xef\x9b\xf0\xc7\xf0P\xf1`\xf2\xbe\xf3Z\xf4\xf9\xf3,\xf4@\xf5\x8f\xf6\xf8\xf6\xc8\xf6\x80\xf7\x95\xf8\x8c\xf9)\xfa!\xfb\xb0\xfcR\xfd\xb6\xfd\xd3\xfe\x87\x00\xe7\x01b\x029\x03\x7f\x04\x80\x05\x1c\x06\x0c\x07H\x08\x80\x08"\x08|\x08\xd4\t\\\n\x85\t\xb6\x08\x9b\x08\xb4\x08\x06\x08\xc5\x07\xbe\x07A\x07\x0b\x06$\x05\x06\x05\xed\x04{\x04\xb7\x03\xf7\x02@\x02\xd8\x01\xb2\x01;\x01+\x00\x1e\xffZ\xfe\xe0\xfda\xfd\xb3\xfc\xaf\xfb=\xfa\xdf\xf8!\xf8\xa1\xf7\xd4\xf6\xa8\xf5f\xf4m\xf3\xb9\xf2E\xf2\xf7\xf1\xa9\xf1(\xf1\xa5\xf0\xa6\xf0 \xf1\x91\xf1\xd8\xf1J\xf2\x05\xf3\xe7\xf3\xee\xf4\x1b\xf6A\xf7L\xf8U\xf9\xb1\xfau\xfc4\xfeq\xffi\x00\xd2\x01\xb5\x03y\x05\xae\x06\x04\x08\xcc\t\x18\x0b\xef\x0b2\r\xc1\x0eb\x10\x8f\x11\x96\x12\xe3\x13\xcd\x14\x9b\x15b\x16\x92\x17\xa4\x187\x19\x1e\x1a1\x1b/\x1c^\x1c\x90\x1cu\x1d\xd8\x1d\x97\x1d$\x1d\x1d\x1d\xc0\x1c\xf1\x1a\xe2\x182\x17o\x15\xed\x12j\x0fC\x0c\xf9\x08\x19\x05I\x01\xf0\xfd\xca\xfa\x0e\xf7\r\xf3\xde\xef\x1f\xed\x97\xeap\xe8\xaf\xe6\xfc\xe4A\xe3.\xe2\x0e\xe2\x1f\xe2\x06\xe2>\xe2\xcc\xe2\x90\xe3~\xe4\xc7\xe5?\xe7\x97\xe8\xb2\xe9\x05\xeb\xb6\xec\x81\xee \xf0b\xf1\xb9\xf2B\xf4\xdb\xf55\xf7q\xf8\xb6\xf9\xef\xfa\xfe\xfb\x12\xfd\x85\xfe\x16\x00=\x01+\x02\x87\x03\xfa\x04\xec\x05j\x066\x07\x8d\x08\x04\n\xf5\n9\x0b*\x0b\xdb\n\x98\n\xba\n\x03\x0b\xdb\n\x17\n%\tI\x08~\x07\xc8\x065\x06d\x05:\x04e\x03\x1b\x03\xce\x02\x19\x02C\x01\xca\x00d\x00\xda\xff\x9d\xff\x9f\xffd\xff\xa6\xfe\x00\xfe\xe8\xfd\xce\xfd;\xfdO\xfcs\xfb\xbd\xfa\xeb\xf9/\xf9`\xf8E\xf7\xf0\xf5\xa8\xf4\xa6\xf3\x03\xf3\x7f\xf2\xf2\xf1`\xf1\x06\xf1%\xf1\x8a\xf1\x05\xf2Q\xf2\xf0\xf2\xe0\xf3\t\xf5U\xf6\xa7\xf7\xf7\xf82\xfaS\xfb\xa2\xfc.\xfe\xc5\xff)\x01j\x02\xd4\x03(\x05\x84\x06\xb2\x07\xf6\x08\x87\n+\x0c\xbe\r\xe1\x0e\'\x10\x18\x11\xb8\x11\xc9\x12\n\x14]\x15\x16\x16\x0b\x16\x14\x16U\x16\xa8\x16j\x17`\x18\xdf\x18\xe4\x18\x11\x19\xbb\x19!\x1a~\x1a:\x1b\xce\x1b\xed\x1a\x1d\x19\x89\x18l\x18>\x17\x8b\x14\xf0\x11\xe3\x0f\xad\x0c~\x08\x1c\x05\x90\x02d\xff\x99\xfa\x8d\xf6?\xf4\xd7\xf1x\xee\'\xebL\xe9\x1d\xe8\xfd\xe5\x06\xe4s\xe3\xa5\xe3=\xe3=\xe2b\xe2\xbf\xe3o\xe4F\xe4\xd0\xe4\xca\xe6\x0c\xe9\xec\xe9\xba\xea\xcf\xec&\xef\xab\xf0\xb2\xf1\xaa\xf3U\xf6\x11\xf8\xae\xf8\xfa\xf9d\xfcs\xfe5\xff\xbc\xffK\x01\xe0\x02-\x03H\x03G\x04\xc8\x05.\x06\xa9\x05\r\x06\xf9\x06\x1a\x07\xff\x05\x93\x05\x83\x06\xd5\x06\t\x06F\x05\x97\x05\xb3\x05q\x04J\x03B\x03\xa5\x03\xff\x02\xe4\x01\xd3\x01$\x02\x92\x01P\x00\xb1\xff\xe6\xff\xa1\xff\xa2\xfe=\xfe\xa0\xfe\x84\xfe\x8c\xfd\xca\xfc\xbf\xfc\x9d\xfc\xb9\xfb\x10\xfb5\xfb?\xfb\x82\xfa\x9a\xf9D\xf9\xfd\xf8;\xf8=\xf7\xc0\xf6\x98\xf6\xfd\xf5N\xf5\'\xf5!\xf5\xd6\xf4\x80\xf4\xab\xf4R\xf5\xe4\xf5|\xf6\x96\xf7\xc2\xf8\xb4\xf9\xb5\xfa>\xfc\xd4\xfd\xfd\xfe*\x00\x96\x01\x17\x03\x1b\x04\xe7\x04b\x06\xb8\x07Z\x08\x06\t\xf2\tL\x0b\xea\x0b\x18\x0c9\ro\x0e\x0b\x0f;\x0f\x8d\x0f\xba\x10\x1d\x11t\x10\xf7\x10`\x11-\x11i\x10\xd3\x0fo\x10"\x10h\x0e\xce\r@\x0e8\x0ea\r1\r\xbb\x0e-\x0f\xf8\r\x86\r$\x0f\xa6\x10\xbb\x0f\xbe\x0eK\x0f\xeb\x0f\xbc\x0e2\x0c\x00\x0ca\x0c\x1b\nC\x06\x98\x03C\x03\x96\x01\xfd\xfc\xb3\xf9\xc1\xf8D\xf7"\xf3b\xef#\xef\xe1\xee\xac\xebx\xe8\xf7\xe8q\xea\x8b\xe8\xe4\xe5\x07\xe7\xc6\xe9\xab\xe9a\xe8\xe7\xe9?\xed\x18\xee\x13\xed/\xef\xf3\xf2j\xf46\xf4\xa1\xf5\xcb\xf8\x1c\xfa\xd7\xf9\x82\xfb\x16\xfe\xb1\xfe\xc3\xfd\x83\xfe\xc5\x00\x98\x01\xf2\x00\x0c\x01Q\x02\xa2\x02<\x02i\x02V\x03\x9e\x03\xee\x02\xdf\x02\x90\x03\xdc\x03_\x03\xc3\x02\xe2\x02\x14\x03\xd1\x02\x07\x02\xba\x01\x9f\x01?\x01x\x00\xd1\xff\xf7\xff\xcf\xff\x1e\xff<\xfe\x07\xfe\xe8\xfdd\xfd\xc3\xfc\xd9\xfc\x07\xfdw\xfc\xb5\xfb\x84\xfb\'\xfcA\xfc\x8d\xfb\x8f\xfb\xea\xfb3\xfcd\xfc\x8a\xfcL\xfd\xa4\xfd#\xfd\x03\xfd\xa4\xfd\x9d\xfe\xbc\xfe\xe9\xfd\xf2\xfb\xdd\xfa\xb3\xfc\x7f\x00\xd4\x03\x9a\x00\x93\xf9\x16\xf6v\xf8O\xfe\xff\x01\t\x01\xc4\xfe\xac\xfd*\xfd\x11\xfd6\xfd{\xfe\x10\x01\xb4\x02 \x03O\x05\x7f\x07{\x08q\x07!\x07<\t3\n\xe5\n#\x0c!\x0f\x03\x12\x16\x11\xf7\x0f\xbf\x0e\x82\x0c\x81\x0b&\x0b\xd5\x0c\xed\r\xfd\x0b\xa8\n\x14\n\x08\t\x13\x06i\x029\x00\xc3\xff\xaa\x00=\x02\xb4\x03\x94\x04\xc0\x02\xf8\xffP\xfeV\x00(\x05\xca\x08>\n\t\x0b\x05\x0eE\x11\xdb\x11#\x0f\xa3\x0cl\r\xf0\x11T\x16B\x18\x9d\x14\x1e\r\xc2\x07\xde\x06\x1e\t|\x07\x8c\x01\x8b\xfc[\xfbn\xfc\x1d\xfaD\xf3\xe9\xea\x0c\xe5\'\xe5\xdb\xe8~\xec\x02\xebP\xe5\xd9\xe1\x95\xe3\xf7\xe7e\xe8X\xe5l\xe6\xf5\xed$\xf6|\xf9\x07\xf7\xd1\xf4\x9f\xf4\x15\xf7O\xfbI\xff0\x02\x03\x03r\x04\xe2\x04G\x03s\xff\xb0\xfc:\xfeb\x01\xa4\x03_\x03V\x01\x8d\xfep\xfa\x93\xf7\x8a\xf6\xea\xf7Y\xf9\xcc\xf9\x18\xfal\xf9\xbb\xf8x\xf6\xf8\xf4\xed\xf3*\xf5z\xf8;\xfc\xe2\xfec\xfe!\xfc\x07\xfa\x98\xf9\n\xfbm\xfc\x12\xfe\xb7\xffL\x01\xeb\x01C\x00\xd7\xfdr\xfb\x84\xfb\xfc\xfc\x19\x00q\x02\x00\x03z\x02\xef\x00\xe1\xff\xa4\xfe\x80\xfe9\x00\xa3\x02\xdb\x044\x05u\x05\xc2\x04^\x02\xb7\xff\xd5\xfe\x98\x01\xe1\x04A\x06\xea\x04\xd2\x02\x04\x01\\\x00\xc6\x02\x07\x03\x86\x01\xb3\xfe\x1a\xff\xbb\x04\xcd\x061\x03\x80\xfe\xbf\xfc\xac\x00\x14\x03p\x02\xcb\x011\x01\x1d\x04\xc5\x06c\x08\x07\x07\xec\x03>\x04\\\x08\xb4\x0cB\x0e\xc2\nR\x083\t\xeb\n\xe2\r\xbe\x0b\xf6\x08m\x07v\x07\\\t\x14\t\xe9\x06b\x04W\x02\x9a\x02\xe4\x03\x89\x04\xd9\x04\r\x03\xa9\x00\xb0\xfe\x1d\xfd\x8d\xfc\x1f\xfd$\x02!\n\x1f\x0f\x16\x0f\xe6\x08\x05\x05\xc5\x05\x01\r\xa5\x15 \x19\xe9\x17\xcf\x13\xc4\x14s\x14\xda\x10\xe4\x08\x99\x02\xfb\x04V\n\xd1\r\x9f\n\xc3\x00\x02\xf7\x0c\xf0\x8b\xef\xd8\xf1\xd1\xf2\xbd\xf3}\xf2>\xf1d\xec\x18\xe71\xe5%\xe6\x1a\xebB\xef\x83\xf2\xf4\xf3\xc9\xf2\xb8\xf2\xeb\xf1\xfb\xf0\x8c\xf1\xff\xf4\xb6\xfc\xe3\x02\xec\x04\xc7\x00$\xfa\xf9\xf6P\xf8\xda\xfd\xa4\x01\xda\x02\xbd\x01(\xfe\xad\xfb%\xf9\x07\xf9u\xf8\xdf\xf6\xff\xf6\x00\xf9O\xfcD\xfcl\xf8\x01\xf3m\xf1\n\xf4\xdf\xf9(\xfe_\xfd*\xfb\x84\xf8p\xfa\x02\xfd\xc1\xfdb\xfd\x14\xfd"\x00$\x03b\x03$\x00\xda\xfb\x1d\xf9\x1c\xfa\xb6\xfd\xb4\x005\x01,\xfe\x11\xfb\xad\xf9k\xfa}\xfbu\xfb+\xfb\xa8\xfd\x91\x01\x9e\x04\x90\x03F\xfeh\xfbj\xfcK\x00\xa2\x03\x98\x02\\\x01\x0c\x02\x16\x04\xc2\x05?\x02\xe2\xfc\xbf\xfee\x03=\to\n\xc5\x07/\x08Y\x06k\x02\xe7\x01\xe6\x01o\x07\x08\n\x8c\x07\xbc\x06\x1d\x03\xc3\x01\x00\x01\xbf\x00\xe5\x033\x07\xa4\nP\x0et\x0b4\x07\xc2\x019\x02g\x08\xb4\x0c\x97\x11\x90\x0f\xbc\x0c\xc0\x08S\x06\xf5\x06\x1e\x05H\x01}\x02\xad\x06=\x0c\xfb\x0cl\x07O\x05"\x01\x10\xff\x9d\xff!\x01\xf0\x05\xb7\x08\x84\t\xd9\t\xe0\x04%\xff\x11\xfb$\xfb\xa3\x01\x15\x08\xdb\x0c\xa8\n\x00\x03~\xfa\x9a\xf7\xb1\xfb\xf5\x00-\x03\x9c\x01\x88\x00\x8d\xffS\xfd\xce\xfa\xa6\xf80\xfb\x90\xfd\x93\x01\xfe\x01\x95\xff\x01\xfd*\xf7\t\xf6>\xf4\x8c\xf7\xa3\xfa\xdc\xf9m\xf8\xc4\xf3\x92\xf3\xcd\xf4\x91\xf7\xb8\xf9y\xf9\xcc\xfa)\xfb\xb0\xfdF\xfe\xeb\xfe(\x00q\xffC\x00X\xfc\x86\xf9\xaa\xf8\x06\xf9\x88\xfbo\xfb2\xf90\xf8^\xf6>\xf6\x89\xf5q\xf4c\xf8\xec\xfc\xca\x01\x84\x00\'\xfbC\xf8k\xf9\xe6\xfdr\x00-\x00%\x00\xdc\x00\xd9\x02\x06\x03!\x00v\xfd\xb6\xfc\xbc\x01\xa6\x08\xf4\t\xea\x04i\xfe8\xfde\x00\x04\x02\xd1\x02G\x01\xbb\x00\xe3\xff\xf4\xfe\x9a\xfc\xec\xf9\xe6\xf8y\xf9u\xfbl\xfa\xc8\xfa@\xfb,\xfc\x90\xfcf\xf9\xe7\xf9\xf4\xfc\xd7\x01\x04\x06L\x05\x0f\x05\xb9\x04x\x05\x84\x07\x93\x08z\x08\xd9\x03\xf2\x01\xf3\x02O\x07\xef\x08m\x06w\x04u\x01>\x02\xe2\x02>\x03\x1e\x03o\xff\xe9\xfd\xd8\xfef\x02#\x07-\x077\x03\x9d\xfd5\xf9\xe0\xf9\x9e\xff\xf0\x05\xc9\t\x9c\x08\xb8\x04\t\x03:\x02\xe8\x03\xe6\x024\x01E\x02\x89\x06\x81\x0b\xb7\t\xed\x03:\xfcr\xf8\xff\xfa\xf7\x00\xed\x06\xc8\x06\x1c\x04\x1a\x00]\xfc\x85\xf9\x9f\xf8\xd9\xfb\x88\x00\x8f\x02\x06\x01\xa4\xfd\xb0\xfbt\xfa\xe9\xf9\xca\xfa,\xfc\xca\xfe\xd3\x01c\x05\x1a\x067\x04j\x01\xad\x00\xd9\x01\xbb\x01\xe9\x01V\x01\x16\x02;\x01\xcb\xfe3\xfd\x8d\xfcR\xfcE\xfc\xe3\xfc\'\xff\xba\x01\xdb\x03}\x05\x1c\x04\xc9\x00\x91\xfc\xeb\xfb\x06\xff\xd2\x03\x19\x07`\x06s\x02\x93\xfd\x90\xfav\xfa\x12\xfd\xf2\xffW\x03\xf7\x06\xe5\n6\x0cI\t\xee\x01=\xfbV\xf9F\xff\xec\x06}\x08\xfc\x02}\xf9\xd1\xf4\x90\xf4\x90\xf7)\xfb\x05\xfc\x80\xfd \xff\xb0\x001\x01\x1c\xfc\xd6\xf6\xd4\xf2\xe1\xf4\xaa\xfb\xba\x00\x82\x03\xa8\x00\xb8\xfb\xcf\xf5\x81\xf3$\xf7K\xfc\xd5\xfe\xb4\xfe\x80\xfe\xa2\xffQ\x01\x17\x00G\xfe\x8b\xfbU\xfb\x04\xff\xc5\x01\xe6\x02\xae\xff\x07\xfd\xff\xfd\x9e\x01\xdc\x04y\x03\xb5\xff\xb0\xfd\xcb\xfdy\xff\x8f\x01\xd7\x02\xad\x01\xa7\xff\x91\xff\x15\x005\x00"\xff\xdc\xff\x80\x00,\x02\x81\x03\x9b\x04\x08\x06\x0f\x04W\x02\x06\x00\xa7\x00j\x03\x90\x04\x99\x04K\x02k\x00\x9b\x00\x7f\x00@\x01\xbc\x00\x17\x00\xf1\x01\xe7\x02b\x03\xc0\x00\xc5\xfd\xc6\xfb?\xfb\xe7\xff\xb9\x04\xc0\x04O\xfe\x1d\xf7i\xf6D\xfcp\x01\x17\x02\xc9\xfe\xcf\xfc\xa0\xfe\xc5\xffZ\xfe"\xfa7\xf7 \xf8\xe5\xfc/\x02\xf4\x04\xad\x06\xbc\x05T\x02\x01\xfd\xb0\xf8D\xfb%\x03i\x0b\xf1\x0c\xc8\x06\x8a\xfe\xce\xf7\xf7\xf4\xfa\xf5v\xfa\xeb\xfd\xa8\xff\x1e\x03*\x05r\x04\x01\x01l\xfdY\xfe\\\x00\xc5\x04\x00\nR\x0c#\x0b\x82\x05\xab\xfe\xf4\xf9J\xfa\xb7\xfdw\x01u\x03Z\x03\x8b\x00_\xfdn\xfdF\x00\xd2\x04p\x08\x83\x06\x10\x02\xdd\xff\x18\x00\xaa\xffa\xfd\x93\xfd\x86\x00\x84\x05\xf3\x07`\x07\x11\x033\xfe\xfa\xfb\x98\xf8\xae\xfa\xfb\xff\x0e\x06q\t\xfe\x04\x05\xfe\xe9\xf6?\xf6\x12\xfc\xbb\x00\xa1\x01j\xff\x92\xff\x1c\x02\xae\x03\x99\x01\x97\xfeM\xfc\xbe\xfb\x11\xfc\xc1\xfe0\x03\x1c\x03\x08\xff\xdc\xf9\xb1\xf8z\xfcq\x00\x7f\x02\xf5\x01\xf0\xff\xf7\xfe\r\xfez\xffr\x01\x8c\x01\xba\x00\xb1\xfe\xca\xff\x81\x00\x1b\xff\x9c\xfbP\xf7\x92\xf8\xa1\xfc\x85\x01\xf2\x04\xd8\x04\xe2\x02\xbe\xfek\xfe6\xff\xcc\xff?\x01L\x04\x88\x07\xed\x04e\x01\x01\xff\x9d\xfeQ\xffj\x01\xe8\x03\x12\x06\xba\x06\x87\x04o\x00\xfc\xfc\xb5\xfc\x1c\xff\xb3\x03\xa5\x057\x01\x87\xf9\x1e\xf7\x99\xfa\x15\xff_\x01\\\x01y\x003\xfdv\xfbQ\xfc\x81\xfe\x87\xff\xfd\xfe`\x01\xdb\x04\xa4\x03k\xff\x1a\xfd\xe9\xfd\t\xfe\xfd\xfc\xb6\xfd\x04\x02\xf4\x04\xf4\x01\x01\xfb\xea\xf3\x1e\xf3\xa3\xf6\x90\xfdP\x04\xe0\x03\xbb\x00\n\xfeb\xfd\xb3\xfe\xdc\xfd\xef\xfd\x93\xfe\xb1\x01 \x06K\x064\x02o\xfa\xfa\xf5\x11\xf5h\xf8^\xfe\xee\x04\x12\t\xb5\x07\xee\x05\xff\x04\x97\x02\xca\xfdB\xfd\x1c\x016\x06U\nq\t\xef\x03\x82\xf9\xdb\xf0\xa8\xf1B\xfc\x89\x08\x9b\x0c`\x08\xed\x00\xf9\xf8\x06\xf6D\xfa\xce\x01\xf0\x06{\t\x86\x0c2\n\xaf\x01\xfa\xf8\x93\xf5{\xf7\x04\xfd\xeb\x04:\x0b\xb5\x0c\x93\x07\xed\xfd\xf3\xf3\xf1\xf1\xc8\xf9\xb8\x02J\ne\x0c\x9f\x07N\xff\x1b\xf7&\xf4\xf7\xf4\xd8\xf9\x8c\x01\x9f\x08o\ro\x0b\x01\x04)\xfa\xb2\xf1l\xf1M\xf7\xc8\x00 \tC\n1\x07*\x02v\xfc.\xf9\xb2\xf6\xa9\xf7B\xfdh\x03\xc1\x08\x82\x06\x12\xff\xf5\xf8\xa6\xf6V\xf9\x0c\xfey\x04k\x086\x05h\xff\x99\xf99\xf8\x99\xfd\xe1\x04\xb4\t\xf0\x08\xc1\x04\x00\x00 \xfc\x7f\xfb\x82\xfc:\xfd\xb3\xfdW\x00\x04\x05j\x08O\x08u\x04\x88\xff\xab\xfa\xe9\xf9\xd5\xfd_\x04\x82\t\xec\x08\r\x07\xdf\x03\xcb\xff\x0b\xfc\xa1\xf7\xcb\xf7\x17\xfb\xc2\xfd\xd3\x01\x16\x06:\x08%\x06n\x00\x95\xfa\xd1\xf4\xee\xf4\xfb\xfa\x17\xff\xac\x01"\x03L\x04\x80\x03n\xfe\xaf\xfa\xa9\xf8\xfe\xf86\xfc\x92\xfe\x16\x03:\x05\x9c\x04\x9b\x02}\xfd\xda\xf9\xfa\xf5\xc0\xf5"\xfd\xfe\x03U\x07>\x03\x9d\xfc\x08\xfb9\xfc4\xfe\x9d\xfe\xb9\xfei\x00\x95\x02\x16\x05\x10\x07[\x03`\xfd\xc4\xfaS\xfc\xa0\xfe\x90\x01\x9b\x05F\x08\xbb\x06\x82\x02j\xffS\xfc\x86\xfa\x0c\xfc\x9f\xff\xa1\x03\xc5\x06\xce\x06\xcf\x04\xba\x00 \xfb*\xf6(\xf5\x81\xfa\xa4\x02\x83\x08\x97\n1\x07>\x01c\xfdn\xfd\xa0\xfd{\xfdA\x01V\x05:\x08\xeb\x07\xd1\x05$\x01@\xfc\x12\xf9M\xf8\x05\xfd\x9a\x04\xa6\x08\xa0\x06\xde\x01\x00\xff1\xfd\x03\xfbq\xf9\x96\xfa\x97\xffe\x03\xad\x05\xec\x04U\x00\xfd\xfa\x8f\xf6\xde\xf6\x96\xfaD\x01\xc7\t\xe4\x0ct\x08v\x013\xfc\x84\xf8I\xf6\x08\xf8\x8b\xfd\xe2\x05\x96\x0c<\r\xf3\x07\xaa\xfe\xb8\xf3E\xec\xf2\xed\xc3\xf5\x88\x01\xaa\x0c\xa7\x10b\x0e\xa8\x06\x8b\xfd\xda\xf6X\xf1\xfd\xf2\x93\xfa\xb7\x041\x0e\x96\x11\xe0\x0c\xf0\xff\xd2\xf3s\xed\x98\xf2\x04\xff\xf5\x06U\tz\x07\xa4\x07\xec\x07\x0c\x05\x16\xfe\x98\xf6\xeb\xf4T\xf9\xcf\x00\xb5\x05\xfa\x06\x9f\x03\x03\xfc\x00\xf7\xe8\xfa\xd1\x01\xfd\x04{\x04j\x02o\xff\t\xfe|\x01\xb8\x04=\x02\x11\xfd\x87\xfb\xc6\xfa\xd2\xfb\x1a\x00F\x03\xa6\xff\xa2\xfa9\xfaA\xfd\x05\x02\x9c\x04i\x04\xfd\x00T\xfe\xe8\xfd\xe9\xfcR\xfdc\xfe\xc4\x00\t\x02q\xfem\xfa<\xf7\x07\xf6\xc6\xf9\x84\xff\x8e\x05\xc7\x07\xc8\x06h\x02\x8c\xf9\x13\xf5\x18\xf6\x08\xfb\x82\x02\xb2\t\xc4\x0f\x1c\x0f\xaa\x07L\xff\xf8\xf6\x1f\xf3\xcc\xf74\xff\xae\x08\xee\x0cj\x0b\x11\x05\x82\xf9\xb9\xf0I\xee>\xf8c\x07=\x11\xcb\x13N\x0f\xcb\x07\xf8\xff\xa9\xf8>\xf4\xe1\xf5/\xfd\xb8\x04G\x08\xe8\x07\xc3\x02\xf3\xf7\xfe\xec\x92\xecj\xf6\xae\x03b\x0e\x03\x14\x9e\x11\xa8\x07T\xfb\xf3\xf1\xdd\xf2\xe7\xfbc\x04\'\n\xd6\n<\x08\x01\x02\xe2\xf6+\xf1\xf0\xf2\xf9\xf6\x8d\xfd-\x07\xbe\x0f\xde\x0f;\x08\x8b\xfc\xed\xf0x\xeeR\xf3|\xf8\xa3\xfe:\x05\x99\x0b\xd6\x0b\xee\x03t\xfd\xf6\xf8\xd7\xf5e\xf6\xd7\xf9\x1a\x03Y\x0b\x8a\r\xaf\tv\xffl\xf8\x84\xf4@\xf3A\xfb\xdc\x03Q\t\xd6\x07V\x03\x00\x02\x97\x01;\x00\x96\xfc\xad\xfa6\xfc \xff\xb4\x019\x04\xec\x02C\xff\xaf\xfd\x00\xfe\x89\xfe]\xff\xaa\x00f\x01\xa2\xff\xa1\x00\xca\x04\x84\x06\x81\x03\xa2\xfe^\xfe*\xff`\x00\xfe\x00x\xff\x16\xfeF\xfb\xea\xf8\x9c\xf7\x1d\xf8\xd2\xfe\xfb\x05\x94\n\xc6\t9\x05V\x01\x8b\xfdE\xfa\xc1\xf9\xe6\xfd\xe9\x01\xf1\x04\x1f\x07\xb3\x07\xda\x04\xd5\xff\x0f\xfb\xfa\xf6\xce\xf8\xd7\xfe\xc3\x03\xe5\x06\xb2\x08\xc2\x08\xd8\x03g\xfc\x98\xf8\x02\xf8\x87\xfaG\xff`\x04\x7f\x07n\x05\xa8\x00\xf1\xfc\xb7\xfaT\xfa\x07\xfb\x91\xfd\xfe\x01\x8b\x06\xfd\x07|\x05\xef\xff\x07\xfb\xad\xf9$\xfb\xd6\xff\x80\x05\xcd\ta\n.\x07\x1b\x01N\xfb*\xf8\xca\xf7%\xfb\xf2\xff\xbb\x03^\x05!\x03\xa5\xfe\xd0\xfb\xfb\xf8h\xf8i\xf9\xcb\xfc\xf3\x01\xc5\x05\x8e\x08^\x08\xa1\x05\x02\x00u\xfb\x06\xfb\xff\xfbU\xfd;\xfe-\x01\xad\x04!\x06\xcc\x02\xd4\xfb\x97\xf9\xfd\xfd\xf1\x00\xb4\x00\x99\x012\x03n\x00A\xfdM\x00\x9a\x02\xa5\x01\x81\x00T\xff\xa1\xfb\x19\xf8\xba\xf8Y\xfb\x03\xfe\r\x01g\x04\xf6\x02\x7f\x00h\xff\xa2\xffF\xfe\xfe\xfcH\xfe\xdb\xfeD\xff\xff\xff\xef\x01b\x02\x80\x00\xbd\xfe\t\xff\x95\x00\x95\x00\t\x01\x02\x03\x1e\x027\xff\xd0\xfez\xff\xaf\xfe\xa4\xfc\xff\xfb@\xfd\xe0\xff\xae\x02t\x02\x17\xffQ\xfb*\xf9\xaf\xfa\xd8\xfd|\x01d\x03\xc5\x04\xc3\x04,\x01\x0b\xfd\x80\xfb\xa8\xfbb\xfd\xf7\xffg\x03D\x05\x80\x03\x1f\x01\xbc\xfe\xdd\xff\x90\x03<\x04\x87\x01\x81\xfe>\xfe\x99\x01\x80\x03&\x02\xe9\x00+\xff6\xfc<\xfa#\xfb\x83\xfb;\xfbM\xffH\x06\\\x06\xf7\x00\xb0\xfd\xd8\xfd\x84\xffJ\xff\x86\xfd\xb1\xfe\xb5\x04-\nH\ta\x02\x00\xfb!\xf8\x9d\xf8M\xfd\xaa\x04Q\x08\x8e\x07\x17\x04"\x01\x9c\xfdj\xfa\xdb\xfb+\xff\xab\x01e\x03\x93\x02\xa9\x01\r\x02\xe3\xff\xe7\xfd\xfe\xfcD\xfc\xbd\xff\x94\x04\xa9\x07\x82\x07\x01\x03\x9d\xfet\xfbY\xfa\xb0\xfe\xf9\x01\x92\x01\xf5\x00f\x00\x05\x02\t\x00\xbe\xfd\xaa\xfc\xfc\xf9\x17\xfb\xbd\xffP\x04x\x08\xa8\x07/\x02\xcd\xfb\xb6\xf8\xc7\xfc\xf5\x02\x11\x07,\x07g\x04\x81\x03\x0f\x04\xb8\x02_\xfe\xf0\xf8\xbe\xf7\xc9\xf9\xd9\xfe\x9a\x04}\x05C\x03\xa4\xfd\x9a\xf9\'\xf7\xe7\xf5\xb5\xf8\xa9\xfb\xaf\xffW\x03%\x04\xc8\x03\xc0\x02u\x028\x01\x03\x00\xb0\x01\xe1\x02\x9a\x02\xc8\x03+\x04%\x02\xfb\xffp\xff\xf9\xff\x95\xff\xba\xff\xc6\x00\x90\xff\xbb\xfe\x94\xfd\xd5\xfc\x19\xfe:\xff\x91\xfe\xf3\xfb\xb1\xfc#\xff\x87\xff%\xff\x84\xfe\x1d\xfd/\xfd\xe4\xfe\x87\x00\n\x01\xe7\x00\x1b\x019\xff6\xfc\x8a\xfb\xd4\xfa\x82\xf9\x87\xfb\xb0\xfc%\xfe\\\xff\x9d\xff\x1d\xff\x18\xfc!\xfb\xe8\xfa\x11\xfc\xfc\xfei\x02\xdf\x04I\x04\xd8\x02\xee\x00Q\xffu\xfe^\xfc\xd0\xf9\xfb\xf9\xea\xfeW\x04\xbe\x05\xb1\x04\x97\x00\xc8\xfa\n\xf9\x15\xfd\xea\x02\xf9\x05\xe0\x06S\x06-\x06\x93\x05\xdf\x01W\xfe\x89\xf9\xd9\xf6\x13\xf9\xfa\xfb\xbf\x01?\t\x01\x0c\x03\x06\xe3\xfc\x99\xf94\xf9.\xfa\xb5\x00P\t\\\x0e\x95\x0f\x82\re\x07\xe6\xfd\xff\xf5\xcb\xf3\x18\xf7\x86\x00\x13\nu\r\xa0\x0ce\x06>\xff\xd4\xfa\xe2\xf8C\xfb\xfb\xfc.\x00\xe5\x04B\x08\xd9\t\xc8\x04\x12\xfc1\xf8\x06\xf7\x85\xf7E\xfc\xaa\x020\x075\x04"\x01\x9d\x01\xce\xfer\xfb\x02\xf8g\xf5l\xf8\x1b\xfb\xab\xfc\xc6\xfcH\x02\xcd\r\x13\x12K\x11D\x0f\xa2\x10\x06\x13\x19\x13=\x15B\x17G\x18\xe8\x16y\x13a\x10P\x0b\xb8\x04\xf4\xfd\x1e\xf9\xe0\xf7\xfb\xf8\xcb\xfa,\xfa\x13\xf8\xbf\xf5\x12\xf5\xb8\xf5\xde\xf5\xb3\xf4$\xf3\x97\xf4|\xf5\x1a\xf4\x10\xf6e\xf8^\xf9\xea\xf7\xa6\xf6p\xf7W\xf8\x98\xfaH\xfcl\xfd\x08\xfe~\xff\xed\xffL\xfe\xf0\xfd_\xfc\\\xf8\r\xf5k\xf4o\xf4\x18\xf4\x1c\xf4C\xf4R\xf3\'\xf2\x05\xf2\xae\xf1\xde\xf1z\xf3M\xf5\x05\xf6\x11\xf7\x8b\xf8K\xf8\xeb\xf6\xd8\xf5\xa5\xf5\xd3\xf59\xf7$\xf9\xee\xf9n\xfa\x10\xfb\x97\xfbj\xfbf\xfb\x0e\xfc\xec\xfc\x9b\xfd@\xfda\xfd\xc0\xfd?\xff\x05\x01\xf8\x01\x9b\x01\x19\x02p\x05\x82\x08\x82\x08@\x07\xcb\x06\xab\x06\xb0\x08\xf7\x0b\x07\r \x0b\x97\nz\x0b\xaa\tr\x03\xbc\x00\xce\x04\x1d\x08\xfa\x08\xdd\nI\x0e\xee\x10\xa9\x0f\x1a\x0b$\x08\x9c\t\x86\x0bz\n\x91\tc\x0bV\r&\x0c\x89\te\x05&\x02\xee\x00?\x00\xac\x00\xc9\xfd.\xf9\xd5\xf9(\x06\xb4\x1d\x9e3\x97:\x994\xe4,i(\xc3#e f!\xc4$A%\xd6!G\x1f\x7f\x1aE\x11r\x01\xe3\xef;\xe3\xc9\xdb\x1b\xdb\x1e\xde@\xe2h\xe4N\xe3\x12\xe2\x96\xe1\xe0\xdf\x10\xdc\x83\xd8~\xd8\x1c\xdd\x10\xe5r\xf0\x00\xfd\x16\x03^\x00\x96\xfb\xd3\xf8\xf4\xf8\xbe\xf88\xfa\xed\xfd\xf2\xff\x11\x02W\x03f\x03R\x01\x14\xfbJ\xf4\x87\xee\xd8\xeb\xbf\xeb\x0c\xebg\xecR\xf0\x0e\xf4Y\xf4\xf0\xf3\xf0\xf4u\xf4e\xf3\xa0\xf4\xad\xfae\x01\xa5\x07\xeb\x0e\xec\x12\xc3\x12\xfe\x12\xf7\x13\xae\x14\xa9\x11c\x0e\xa4\r\xdb\ng\x08\xf6\x05&\x02r\xfd\xf8\xf7\xeb\xf41\xf4\xc3\xf3\x12\xf4\x16\xf3A\xf1\x04\xf1\xf1\xf1\xa7\xf3\xfc\xf4\x82\xf5Q\xf6Y\xf6k\xf6\x14\xf8C\xf9\xd5\xf9R\xf9h\xf9`\xfb\x11\xfd.\xfe\x06\xfed\xfc\xe0\xfa\xce\xf9\xe1\xf8\xef\xf8\xe8\xf8c\xf8\xa7\xf5\x1d\xf1\xf3\xedn\xed\x97\xedO\xed*\xeeA\xf1,\xf5\x07\xf9T\xfc5\xffb\x00\xe9\xff\x82\x01&\x05\x7f\t\xee\x0e\xd8\x11\xfd\x13\x07\x14\xf0\x11\xa6\x0c>\tE\x14\xca.DJ9WJW\xb6S@P\xbbI\xb5A?=\xad;X8\x982\x19.<+\xe7")\x12\xe1\xfaM\xe5\xd0\xd6&\xce\xda\xcbU\xcdC\xd2)\xd8"\xdb\xc5\xdb\xb3\xdb\x8d\xdb+\xda\xda\xd7\x12\xda1\xe4R\xf3\xe5\x01\xa2\x0b6\x10]\x0fz\x0c\xc0\x07\x9b\x03r\x00\x88\xfeL\xfd\xa1\xfb\x03\xfc:\xfcE\xf8\xed\xf0\xd3\xe7\x90\xdfA\xd8\xa4\xd3\xee\xd3X\xd5\xe3\xd7\x8a\xdb>\xe0d\xe4\x15\xe7\x08\xea\xef\xec\xc2\xef\xa6\xf3\xb4\xf9%\x01b\t\\\x11\x05\x17\xf0\x19B\x1c\xaf\x1d\xcd\x1d\xf1\x1b\xb2\x19\xdb\x17g\x16\x96\x17x\x1a\xde\x1a\x0f\x18^\x12\x13\x0b\x97\x02(\xfbQ\xf7Z\xf5\x9c\xf3i\xf3X\xf59\xf8\xeb\xfa\x91\xfc\xdc\xfd\x05\xfe\xc9\xfd\xff\xfe\x00\x02\xd6\x05\x07\t)\x0b\xd2\x0bi\n=\x06h\x00\xf2\xf8\xdc\xf0\x10\xea(\xe5\x83\xe2R\xe1\xf7\xe0\xd8\xe0\xaa\xdfH\xdda\xda\x02\xd8V\xd7\x1a\xd9\x0c\xdd\xfd\xe1-\xe7p\xeb\x8b\xee=\xf1\x85\xf34\xf6\xe9\xf9\x98\xfd\xb7\xffD\x02\x9a\x06\xda\x0c\xb2\x13\xac\x18\xed\x1cF\x1f\xe0\x1fO >#\xa2)\xd30A8\xc5A\xdbK&R\xdaPLJeD\x8b=\xc04H*\xce"\xbc\x1f\xac\x1b\xa3\x14\x12\x0b\xd0\x02\xbb\xfb\xb3\xf2\xdd\xe8\xaa\xe0(\xdc\xe6\xdbB\xdd!\xe0\xe2\xe45\xebn\xf0~\xf2\x91\xf3\x88\xf6g\xfa\x0f\xfc\xf2\xfc2\xff\xd0\x02\xb1\x05\xa9\x07\xc4\x08\xe8\x07\x97\x04\xfc\xff\xeb\xf9\xe7\xf3W\xef\x91\xeb\xd4\xe6I\xe1=\xde\xdc\xdca\xdbj\xd9X\xd8\x83\xd7\x0b\xd6\x83\xd6\t\xdaU\xdf\x87\xe5\xb5\xeb\x07\xf1\xfd\xf5\xf4\xfb\x0b\x03\x05\x083\n\xf2\x0b^\x0e#\x10\xcc\x11\xc7\x13\xd3\x14\x9f\x13\x19\x11\xf7\x0e\x0f\r\xb2\x0b\x00\x0b\xe4\x08-\x068\x04\xd3\x04\xf1\x05\xf3\x05\xb5\x05p\x05]\x05\x92\x05\xe1\x06\xc6\x08x\n\xed\n\x8e\np\n9\x0b\xf9\x0c \x0e\xa4\r\xd6\x0b\xd7\t\xff\x07\xdd\x05-\x03\xc8\xff~\xfb\xaf\xf6G\xf2$\xef\x9e\xec\xce\xe9;\xe6\xae\xe2\x05\xe0\xf9\xde0\xdf\x80\xdfG\xe0\xc7\xe1\xed\xe3Y\xe6T\xe9>\xed\x12\xf1\xb5\xf2+\xf37\xf4\xbf\xf6\xb4\xf8\r\xf8w\xf6\x16\xf6\x9b\xf6\xf8\xf5Y\xf4\xb3\xf3\xdf\xf3\xab\xf3;\xf26\xf1\xb1\xf2\x16\xf6\xce\xf9\xd4\xfd\xe8\x02\xc6\n\x9a\x13\x07\x1d9*\xe7=\xaeR;]\xf5\\\x1a[(_\xd8aJ[\xc6O\xdbH\xc1E\xc5=@0\xed#\x18\x1bT\x0f\n\xfcN\xe7\xeb\xda6\xd6\x9a\xd1!\xca\xd1\xc6\xcb\xca\xf3\xd04\xd4#\xd79\xddZ\xe3\xb0\xe6A\xe9\xfd\xee\xf7\xf7\xa9\x01\xac\x08\x11\r\x9c\x10\x8d\x14\\\x16\x14\x14s\x0f?\n\x97\x02\x92\xf9\xcb\xf2@\xee6\xe9\xc2\xe2\xb1\xdd\xae\xd9\xd7\xd4\x07\xd1\xcf\xcf\x15\xcf\x05\xce\x00\xce\x01\xd1\xf9\xd6\n\xdf]\xe8\x91\xf0k\xf7^\xfe\xae\x05\xb9\x0b}\x10\xf7\x14/\x19\xf8\x1aU\x1b&\x1d7 \xfa!\x94 z\x1c\xa6\x18\xe8\x14\xe3\x11<\r\x82\x08]\x04Z\x01\x1e\xff\xd6\xfcH\xfd`\xffb\x00t\xffY\xff\x04\x03\x11\x07\xaf\x08g\t\x80\n\xf9\x0b\x08\x0cN\x0cu\r\xbd\r\xdf\x0cZ\n\xea\x07\xba\x05\xf9\x03\xfc\x00\xa5\xfbX\xf6*\xf2\x1b\xef\x18\xec\xbe\xe96\xe8~\xe6\xba\xe4.\xe3\xa8\xe2c\xe2\xfc\xe1\xf3\xe1&\xe2\x8b\xe3\x9c\xe6\xbf\xeak\xee\xed\xf0\x04\xf3\xc5\xf4\x95\xf5\x8c\xf5\xae\xf5\xe8\xf5\xbf\xf4\x9a\xf2\xd2\xf0*\xf1\xbf\xf2i\xf4\r\xf5\xf7\xf4\x05\xf5y\xf6u\xf9K\xfc#\xff\xc5\x02q\x06@\n\xba\x104\x1b\x91\'\xf93\x04C#U\xa0a5c{`\xafa\x15d\xcc]GQ\x85G\xedB\xb5;\xa1.B"Q\x18 \x0c+\xfa&\xe7\xad\xd9\xcd\xd1\xd6\xc9\xb3\xc0\xb5\xba\x9f\xbc\x86\xc3j\xc9\x11\xcdl\xd2\xe0\xd9\x89\xdfZ\xe3\xf0\xe8\xe4\xf1\x10\xfap\xff\t\x04y\n\xf1\x11)\x17\\\x17\x01\x14\x97\x0f\x96\n\xc3\x02\x0f\xf9l\xf1B\xec\xa6\xe6\x93\xdf\xe3\xda\xe3\xd9v\xd9\x14\xd7\x0f\xd4"\xd2\xc0\xd1\x11\xd3\x01\xd6\x8e\xda\xd7\xe0\xb9\xe8\xd0\xf0\x1d\xf9l\x02\xd7\x0b\x07\x13a\x17\x03\x1a\x9d\x1c\xa4\x1ej \xc0!N"\xd2!\xb8 \xaf\x1e\x90\x1c,\x1a#\x16K\x0fA\x07@\x01\x92\xfdV\xfa>\xf8=\xf8\x17\xf9\xe3\xf8\x1e\xf9<\xfc\xa8\xff\xca\xff_\xfd5\xfd\xf0\xff\x9a\x02\xf9\x04\x14\x08\xd7\x0b\xe3\r\xac\x0eR\x0f-\x0f\x0f\ru\x08\xae\x02\xeb\xfc\xcf\xf8\x92\xf5\'\xf2O\xee\xc4\xeav\xe8e\xe65\xe4!\xe2a\xe0\xd2\xde9\xdde\xddz\xe0$\xe5\xc6\xe9\xb3\xed\xf6\xf1\x03\xf7\x85\xfb\x04\xff#\x01\'\x02\x93\x02\x9b\x02\xca\x021\x03(\x04\x95\x04r\x03:\x01\xfe\xfe\xf3\xfc[\xfa\xe4\xf6\xa4\xf3\xb4\xf1%\xf1\xed\xf1\xad\xf3\xf4\xf6\xae\xfa\xb7\xfd?\x00\x99\x02<\x06\xa6\x0b\xa3\x13\x86 \x9f1\xf2AJL\x13R>YKaxc\xe6\\\xdfS\xd7M\xe0H\x1b@\xea5\xcf-\xab%_\x19\x8e\t\xd4\xfb\x90\xf0\x99\xe4\x87\xd5\xc4\xc6\xa6\xbc\xd4\xb7\xd9\xb5\x98\xb5a\xb7=\xbc\xb7\xc2;\xc9"\xd0\t\xd8\xf6\xdf\xbc\xe5Y\xea\x85\xf07\xf9\x1b\x03\xeb\x0b\xb8\x12\xbd\x17\xa9\x1b\x9a\x1e&\x1f\xeb\x1cd\x17\xe7\x0f\xda\x06\xc9\xfd^\xf6o\xf0k\xebg\xe66\xe2N\xdf\xa9\xdd\xee\xdc\xab\xdc\xf6\xdb\x88\xdb\x8e\xdcg\xdfN\xe4\xfe\xea\xdf\xf2\xf4\xfa\xb6\x02\xa3\n\xb8\x12\xbc\x19$\x1f\xab!a"v!S +\x1f\xb2\x1d\xca\x1a-\x17\xf5\x12\x8d\x0fx\x0c\x9c\x08v\x04\xee\xfe:\xfa\xd1\xf5B\xf2o\xf0A\xf0n\xf1s\xf2t\xf3 \xf6\x97\xfa\xca\xfe\x96\x01\xac\x02\xc7\x03\x1e\x05E\x06v\x067\x06\x17\x06\x03\x06H\x05\x9f\x04\xce\x04b\x04i\x02\x92\xfe\x11\xfb[\xf8\x95\xf5\xd0\xf2\x19\xf0\x14\xeeC\xed\xb2\xed$\xef\xef\xf0\xb1\xf2t\xf4\xf6\xf5"\xf7N\xf8\x04\xfa\xb6\xfb\xc4\xfc\x7f\xfd\xe8\xfe\x99\x013\x04_\x05[\x05v\x04\x0b\x03\x8b\x00\xce\xfd\xaf\xfb\x04\xfa\x83\xf8\x84\xf6\x19\xf56\xf5[\xf6`\xf7S\xf7\xaf\xf6\x8d\xf6\xf4\xf6\xd4\xf7\x08\xf9\xb2\xfa`\xfc\x88\xfdP\xff:\x02\x9e\x05\xb0\tO\x10\x03\x1a\x7f#_*%1\xdf9\xc7A\xd5C\xecA\x8b@J@\xe2<\xa35\x85.\x19)}"\x08\x19\xe2\x0f\xa8\x08g\x01E\xf7#\xecU\xe3\xc6\xdcu\xd6\xec\xcf\x1f\xcb\xa9\xc9~\xca/\xcc\xe4\xceT\xd46\xdb\x10\xe14\xe6\x14\xec\xcc\xf2\x9e\xf8_\xfdI\x02\xfe\x06\xc2\n\x9b\x0e\xb2\x12}\x15\xc8\x15;\x15\\\x14\n\x12\xad\rr\x08D\x03W\xfd\xad\xf6\x08\xf1\x1d\xed!\xea7\xe7@\xe4\x9a\xe2b\xe2\r\xe3\xbb\xe3\x83\xe4e\xe5\x9e\xe6\xe0\xe8\x11\xec\r\xf0!\xf4@\xf8d\xfc\x94\x00\x88\x04x\x08\xb3\x0b\xab\r\xe0\r\xf7\r\xf8\r*\x0e\xec\r\xf4\x0c\x8d\x0c\xa1\x0b\x06\x0bR\n\x92\t%\t?\x08\x11\x07V\x05\xfd\x03\xb1\x03\xc5\x03\x0c\x04\x1c\x04\x18\x04X\x04\xb6\x04t\x05\xa8\x05\x7f\x05\xeb\x04\x11\x04\x13\x03%\x02\xe0\x01\x85\x01\xe1\x00\xfd\xff@\xff\xf7\xfe\x8f\xfe\x0b\xfe>\xfdQ\xfc\xa4\xfbE\xfbM\xfb(\xfb\x1d\xfbp\xfb\xd7\xfb\'\xfc4\xfcU\xfcC\xfc\xd5\xfb2\xfb\xd3\xfa\xf8\xfa$\xfb\xf4\xfa\xc2\xfa\xb1\xfa\xda\xfa\xb8\xfaI\xfa\xd6\xf9X\xf9\xc2\xf8\xcf\xf7\xd6\xf67\xf6\xd6\xf5\x81\xf5\x00\xf5\xa9\xf4\xc7\xf4\x00\xf5\xc8\xf4I\xf4\xfa\xf3\x05\xf4\xf4\xf3-\xf4K\xf5z\xf7\x18\xfa\x86\xfc\xe2\xfe9\x01\x9a\x03\xfb\x05N\x08.\x0b\xc5\x0fR\x16k\x1dV#\x16()-\x952\xdb6X8G8\xe97\x037\x0e4\xfb/\x8c,\xe7)\xea%\xc1\x1f\x18\x19x\x13m\x0e\xe4\x07\xa9\x00\xea\xf9\x02\xf4\x1c\xee\x1c\xe8/\xe3\x16\xe0\x08\xde/\xdc\xda\xda\xee\xdaj\xdc)\xde\xaf\xdfD\xe1\xa9\xe3G\xe6\xd3\xe8c\xeb\xe4\xedE\xf0\x9b\xf2\xf0\xf4\x17\xf7\xb3\xf8%\xfae\xfb\xdd\xfb\x95\xfb\n\xfb\xb5\xfa\x07\xfa\x8f\xf8\x07\xf7\xcc\xf5\xed\xf4A\xf4\xae\xf3\x96\xf3\xdf\xf3F\xf4\xe7\xf4\xdf\xf5\x0b\xf7;\xf8\x7f\xf9\xd8\xfaM\xfc\xd3\xfdf\xff\xfa\x00\x81\x02\xd5\x03\x17\x05<\x06\x15\x07\xa3\x07\x06\x080\x08/\x08&\x08\xf3\x07\xf2\x07\x10\x08R\x08\xc5\x08\x8e\t\x95\n\xcc\x0b\x1f\r9\x0e\x1a\x0f\x80\x0f\xb9\x0f\xb8\x0f\x80\x0f\xbf\x0e\xdf\r\xe7\x0c\xd2\x0bx\n\x9b\x08\xc3\x06\xed\x04\xeb\x02w\x00\xe1\xfd~\xfbM\xf94\xf7#\xf5W\xf3\xfe\xf1\xf0\xf0\x10\xf0u\xefO\xefj\xef\x85\xef\xb0\xef2\xf0!\xf1\x12\xf2\xd5\xf2x\xf3U\xf4J\xf51\xf6\xf3\xf6\xcb\xf7\xbd\xf8\x8f\xf9&\xfa\x9f\xfa-\xfb\xad\xfb\xea\xfb\xfa\xfb*\xfc\x8b\xfc\x07\xfd~\xfd\xd9\xfdI\xfe\x8e\xfe\xcd\xfe\x16\xffl\xff\xcc\xff!\x00\x7f\x00\xf7\x00X\x01\xb6\x01\x15\x02\x94\x02\'\x03\x91\x03\x1a\x04\xf1\x04\x06\x06\x1b\x07\xfd\x07\xda\x08\r\n1\x0b\xdb\x0b\x19\x0c\x8c\x0cl\r[\x0e^\x0f\xdb\x10\x14\x13(\x15G\x16\xb4\x16b\x17z\x18)\x19\x0c\x19\xe4\x18\xf8\x18\xa6\x18l\x17\xe5\x15\x1a\x15\x81\x14\xe8\x124\x10T\r\xda\n\x0b\x08\x87\x04\x00\x01\xdd\xfd\xf4\xfa\xc2\xf7\x9e\xf4-\xf2\x81\xf0!\xef\x9f\xed \xec\xd6\xea\xe5\xe9=\xe9\xc3\xe8\xb3\xe8\xe1\xe8*\xe9/\xe9A\xe9\xf4\xe9>\xeb\xd6\xecR\xee\xa3\xef\x12\xf1\x9b\xf2\x13\xf4\x99\xf5\x16\xf7\x8d\xf8\xd3\xf9\xda\xfa\xbf\xfb\xcf\xfc%\xfe\x8b\xff\xb9\x00\xbe\x01\xbe\x02\xae\x03v\x04\xe6\x043\x05s\x05e\x05\xfb\x04K\x04\xb1\x03.\x03\x88\x02\xb7\x01\xe5\x00j\x00\x1e\x00\xbc\xffS\xffE\xffy\xffv\xff\'\xff\x1a\xffo\xff\xdf\xff\'\x00\x96\x00f\x01w\x02k\x03T\x04_\x05t\x06\x8d\x07J\x08\x96\x08\xe3\x08%\t4\t\xe8\x08v\x08#\x08\xad\x07\xbb\x06e\x05\x14\x04\xe8\x02\x93\x01\xda\xff!\xfe\xa3\xfcW\xfb\x04\xfa\xd9\xf8.\xf8\xfa\xf7\xcf\xf7\x84\xf7[\xf7\x99\xf7\x18\xf8G\xf8V\xf8\x93\xf8\xee\xf82\xf9O\xf9\x8b\xf9!\xfa\xc1\xfa\x0c\xfb*\xfbw\xfb\xfb\xfbH\xfcB\xfc%\xfc)\xfc\x1e\xfc\xca\xfb\x94\xfb\xab\xfb\x06\xfcA\xfcA\xfcX\xfc\xac\xfc\x1d\xfdU\xfd\x83\xfd\xce\xfd \xfe(\xfe\x17\xfeU\xfe\xca\xfe.\xffj\xff\xb9\xff^\x00\x1d\x01\xaa\x01+\x02\xcc\x02l\x03\xc8\x03\x13\x04\xeb\x04e\x06\xd8\x07I\t\r\x0b\x1a\r\x06\x0f\xb5\x10\xe9\x12\xa6\x15\xe0\x17\n\x19P\x1a\'\x1c\xe1\x1d\xa8\x1e\x1b\x1f\x05 h +\x1f\xfd\x1cD\x1b\xc6\x19(\x17\x19\x13\x03\x0f\x89\x0b\xd9\x07Y\x03\x08\xff\xac\xfb\xe9\xf8\x95\xf5\x01\xf27\xef\x91\xed,\xecK\xea{\xe8x\xe7\x0e\xe7d\xe6\x85\xe5\x83\xe5q\xe6v\xe7\x04\xe8\xb8\xe8a\xeab\xec\xf6\xed2\xef\xbd\xf0\x8b\xf2\xf2\xf3\xe2\xf4\xce\xf5\x06\xf7R\xf8(\xf9\xc9\xf9\x9a\xfa\x92\xfb=\xfcv\xfc\xbe\xfcM\xfd\x95\xfdU\xfd\xe0\xfc\x93\xfc[\xfc\xdd\xfbe\xfb\x17\xfb\xfa\xfa\n\xfb\x1c\xfb]\xfb\xdc\xfb\x8d\xfcY\xfd\n\xfe\xd0\xfe\xcc\xff\xef\x00\x08\x02\x06\x03#\x04{\x05\xd6\x06\x01\x08\x12\tB\nu\x0bz\x0c`\r3\x0e\xd0\x0e*\x0f%\x0f\xe3\x0ee\x0e\xaf\r\xd0\x0c\xcf\x0b\xb2\n\x83\t^\x08O\x07N\x065\x05 \x04+\x03%\x02\xf3\x00\xa7\xffZ\xfe\x0b\xfd\xa4\xfbN\xfaJ\xf9P\xf8\\\xf7\x9f\xf61\xf6\xf4\xf5\xa1\xf5:\xf5\xf6\xf4\xa9\xf4/\xf4\xb2\xf3E\xf3\x01\xf3\xd8\xf2\xba\xf2\xe9\xf2S\xf3\xd9\xf3p\xf4\xfe\xf4\xab\xf5C\xf6\xb4\xf6\x12\xf7d\xf7\xc0\xf7\x1b\xf8u\xf8\xf8\xf8\xab\xf9\x98\xfa\x8f\xfb\x88\xfc\x83\xfd}\xfeY\xff\x05\x00\xd5\x00\xdb\x01\xde\x02\x7f\x03\x15\x04\x0c\x05*\x06\x11\x07\xe7\x07\x05\t\x7f\n\xc5\x0b\xd9\x0ce\x0e\xbf\x10/\x13Y\x15\xba\x17\x87\x1a>\x1d\x02\x1f\x88 J"\xdb#V$n$\xf1$\\%\xc4$R#\xf1!k \xc4\x1d\x00\x1a\x18\x166\x12\xc4\r\xa9\x08\xc4\x03\x87\xff\x96\xfbo\xf7~\xf3\x1f\xf0N\xed\xb8\xeaT\xe8U\xe6\xbf\xe4\x83\xe3I\xe2/\xe1\xb6\xe0\xd0\xe0F\xe1\xa6\xe1[\xe2}\xe3\xce\xe4%\xe6z\xe7\x19\xe9\xab\xea\xcb\xeb\xc3\xec\xe2\xed2\xef\xa4\xf0\xff\xf1g\xf3\xfa\xf4\xa5\xf69\xf8\xb2\xf92\xfb\xba\xfc\t\xfe\x0c\xff\xeb\xff\xde\x00\xd1\x01\x92\x02.\x03\xaf\x034\x04\x8f\x04\xad\x04\xba\x04\xd9\x04\xfa\x04\xce\x04m\x04\x12\x04\xe8\x03\xce\x03\x8d\x03y\x03\xb9\x03/\x04\xab\x04\x19\x05\xc3\x05\xa6\x06h\x07\xe2\x07e\x087\t\xfe\tv\n\xd0\nq\x0bM\x0c\xdf\x0c\x0b\r=\rV\r\xe4\x0c\xbc\x0bD\n\xda\x08G\x07Y\x05\x8e\x03,\x02\x0f\x01\xee\xff\xdb\xfe\xee\xfd\xff\xfc\x06\xfc\xdd\xfa\xaa\xf9e\xf8\x15\xf7\xcf\xf5\x90\xf4\x8a\xf3\xdb\xf2\x82\xf2!\xf2\xd6\xf1\xe5\xf1+\xf2u\xf2\x8d\xf2\xaa\xf2\xcf\xf2\xdb\xf2\xd9\xf2\xed\xf2M\xf3\xd7\xf3y\xf4I\xf5[\xf6{\xf7e\xf80\xf9\xea\xf9\x99\xfa\xfc\xfa\'\xfbd\xfb\xaf\xfb#\xfc\xaf\xfcl\xfdb\xfe5\xff\xc4\xffF\x00\xe6\x00l\x01\xc4\x01\x0e\x02\x9c\x02Z\x03\x00\x04\x0b\x05\xe6\x06&\t\t\x0b\xc5\x0c\xfc\x0e\x8b\x11\xd5\x13\xcc\x15!\x18\xbe\x1a\xce\x1c%\x1e\x98\x1f\x95!?#\xd2#\xd6#&$G$!#\xf6 \xe8\x1e\xe3\x1c\xc1\x19\x8e\x15\x90\x11X\x0e\xd3\np\x06M\x02\t\xff\xf7\xfbN\xf8\x87\xf4\x94\xf1\x1b\xefC\xecO\xe9\xe9\xe63\xe5\xce\xe3s\xe2}\xe1e\xe1\x85\xe1\xbd\xe1\xfa\xe1\x9d\xe2\xcd\xe3\xf2\xe4\x03\xe63\xe7\xb5\xe8f\xea%\xec\x17\xee^\xf0\xc6\xf2\x0f\xf5>\xf7\x84\xf9\x96\xfbu\xfdP\xff\x02\x01\x94\x02\xd2\x03\xda\x04\xc0\x05p\x06\xd5\x06\x02\x07\x08\x07\xd5\x06\x8a\x06\x12\x06_\x05\x93\x04\xd0\x03\x08\x03\x1e\x02-\x01u\x00\x00\x00\x87\xff\xfb\xfe\xb7\xfe\xeb\xfeG\xff\x81\xff\xf6\xff\xf0\x00\'\x02\x15\x03\xfd\x03B\x05\x9b\x06\xb1\x07l\x08D\tK\n\x1c\x0b{\x0b\xc1\x0b\x07\x0c\x18\x0c\xcf\x0bQ\x0b\xce\n@\n~\tb\x08\x1a\x07\xc3\x05Y\x04\xb7\x02\xfc\x00_\xff\xf5\xfd\xa4\xfcZ\xfb+\xfaG\xf9\x91\xf8\xee\xf7S\xf7\xd7\xf6\x88\xf6?\xf6\xf3\xf5\xbc\xf5\xb4\xf5\xc9\xf5\xee\xf5+\xf6\x9b\xf66\xf7\xcb\xf7P\xf8\xcf\xf8\\\xf9\xc2\xf9\x02\xfa6\xfaf\xfa\x8d\xfa\xa5\xfa\xd2\xfa\x10\xfb\\\xfb\x98\xfb\xcf\xfb\x0e\xfc.\xfc4\xfc\x07\xfc\xbf\xfby\xfb\x1f\xfb\xbf\xfan\xfaG\xfa+\xfa\x00\xfa\xfe\xf9u\xfaB\xfb\r\xfc\xc5\xfc\xeb\xfds\xff\x11\x01\xe5\x02f\x05\xb0\x08\xf8\x0b\x9d\x0eB\x11}\x14\xa3\x17\x1e\x1a"\x1c\xa5\x1e&!h"\xa0"K#~$\xa8$N#\xd2!\xe4 (\x1f\x8d\x1b\x92\x17\xac\x14\xc5\x11b\r,\x08\x17\x04\xe8\x00\x1d\xfd\xa1\xf8\x1b\xf5\xcf\xf2u\xf0I\xedQ\xea\x9c\xe8\x90\xe7\x1f\xe6\x89\xe4\xb6\xe3\xe2\xe3\x1d\xe4\xf3\xe3A\xe4\xaa\xe5q\xe7\xae\xe8\xba\xe9J\xebO\xed\xfe\xee>\xf0\xc4\xf1\xb2\xf3h\xf5\xa0\xf6\xe4\xf7|\xf91\xfb\x9d\xfc\xce\xfd\x10\xff4\x009\x01\x06\x02\xab\x02@\x03\xa6\x03\xf5\x03\x15\x04\x0f\x04\xf7\x03\xd0\x03\xa0\x03w\x03\'\x03\xc1\x02T\x02\xe9\x01\x96\x01\x16\x01\xc3\x00\xb1\x00\xac\x00\x96\x00\x80\x00\xd7\x00h\x01\xd5\x01P\x02\'\x03\x13\x04\xcd\x04g\x05#\x06\xed\x06\x84\x07\xfb\x07h\x08\xc8\x08\xe9\x08\xc4\x08y\x08\x04\x08u\x07\xc3\x06\xe1\x05\xee\x04\xf4\x03\xed\x02\xc7\x01\x91\x00r\xfft\xfel\xfdj\xfc\x84\xfb\xb4\xfa\xe3\xf9,\xf9\x95\xf8$\xf8\xef\xf7\xc7\xf7\xbd\xf7\xd2\xf7*\xf8\xa3\xf8\x15\xf9\x81\xf9\xf1\xf9\x96\xfa"\xfb\x95\xfb\x1d\xfc\xd2\xfc\x99\xfd!\xfe\xb1\xfer\xff*\x00\xca\x00%\x01\x9c\x01\x03\x02@\x02O\x02R\x02\x82\x02\xa3\x02\x96\x02u\x02\x81\x02\x9f\x02\xa4\x02\x94\x02\x92\x02\x9b\x02m\x02\x1a\x02\xd1\x01\x96\x01T\x01\xe9\x00s\x00\x06\x00\x94\xff\x16\xff\xa2\xfe\x0b\xfen\xfd\xe0\xfcF\xfc\x91\xfb\xcc\xfa!\xfa\xa0\xf93\xf9\xe2\xf8\r\xf9\xa7\xf9d\xfa)\xfbB\xfc\xdc\xfd\xbb\xffj\x01-\x03<\x05t\x07X\t\x08\x0b\xe1\x0c\x06\x0f\x00\x11U\x12\x7f\x13\xcf\x14%\x16\xd7\x16\xe7\x16\xea\x16\xcd\x16\x05\x16]\x14x\x12\xd3\x10\xff\x0e\x8d\x0c\xc8\ta\x07@\x05\xde\x02)\x00\xd3\xfd\xfc\xfbF\xfa=\xf8-\xf6\xcb\xf4\xdd\xf3\xec\xf2\x0c\xf2\xb1\xf1\x04\xf2`\xf2\x9d\xf2\x1e\xf32\xf4Y\xf5\x1e\xf6\xc6\xf6\xb7\xf7\xa2\xf8\x1e\xf9a\xf9\xda\xf9\x83\xfa\xd6\xfa\xee\xfa\x1a\xfb`\xfbt\xfb\\\xfbO\xfbH\xfb#\xfb\xcc\xfat\xfa6\xfa\xf2\xf9\xba\xf9}\xf9L\xf9\x1e\xf9\xe7\xf8\xda\xf8\xfe\xf8:\xf9l\xf9\x9d\xf9\xc4\xf9\x11\xfaz\xfa\xf1\xfa\x92\xfbG\xfc\xef\xfc\x87\xfdN\xfe<\xffA\x00:\x01"\x02\xff\x02\xd8\x03\x9c\x04E\x05\xe4\x05z\x06\xe3\x06\x16\x070\x07I\x07a\x07g\x07S\x07\x1f\x07\xca\x06\x7f\x06\x15\x06\x98\x05+\x05\xaf\x04*\x04\x8d\x03\xf4\x02u\x02\xf8\x01\x8b\x010\x01\xce\x00o\x00+\x00\xf6\xff\xcb\xff\x99\xff\x82\xffn\xffM\xff&\xff\x0f\xff\r\xff\xf5\xfe\xdc\xfe\xe9\xfe\x0e\xff;\xffc\xff\xbc\xff#\x00A\x001\x00S\x00\x9c\x00\xc5\x00\xab\x00\xba\x00\xfd\x00\x00\x01\xcf\x00\xc3\x00\x04\x012\x01\xd0\x00J\x00\x0c\x00\xd8\xff^\xff\xa9\xfe.\xfe\xd7\xfdP\xfd_\xfc\xa1\xfbM\xfb*\xfb\xe2\xfaY\xfa\xfe\xf9\xe8\xf9\xdd\xf9\xb7\xf9\xb0\xf9\x1a\xfa\xa2\xfa\x01\xfbB\xfb\xe7\xfb\xfb\xfc\x02\xfe\xbe\xfeR\xff\x03\x00\xd4\x00\x94\x01\x0c\x02o\x02\xcb\x02\x0c\x03\xf1\x02\xa1\x02j\x02<\x02\xe4\x01I\x01\x99\x00\x1b\x00\xaa\xffO\xff\xd0\xfeS\xfe\xf0\xfd\xa6\xfdi\xfdE\xfdQ\xfd\xa7\xfdX\xfe7\xff7\x00w\x01\xbc\x02\xc8\x03\xb6\x04\xb8\x05\xd5\x06\xd8\x07\xa1\x08\x8a\t\x9a\n\xca\x0b\xa0\x0c*\r\xd3\r\x1e\x0e\xe6\rp\r\xd3\x0cG\x0co\x0b~\n\xcc\t\x19\t=\x08]\x07\x9d\x06\xc0\x05\x98\x04D\x031\x02!\x01t\xff\xf7\xfd\x19\xfd2\xfc\xd9\xfaM\xf9\x87\xf8F\xf8!\xf7\xb6\xf5{\xf5d\xf5h\xf4n\xf3I\xf3t\xf3\xb6\xf2=\xf2\xc6\xf2M\xf3\xb6\xf3[\xf4\x88\xf5\x9a\xf6\x11\xf7\x9e\xf7\x9f\xf8\x95\xf9T\xfaO\xfb\x91\xfc\xed\xfdJ\xff\xc1\x00\xff\x01\xb1\x02W\x03\xe0\x03\xcf\x03\xbf\x03\xa8\x03:\x03\xeb\x02\'\x03?\x03\xd4\x02o\x02\x03\x02\x12\x01\xe9\xff\x10\xffA\xfej\xfd5\xfdt\xfd\xc5\xfdf\xfe\x8e\xff\xa4\x00\xfe\x00"\x01f\x01O\x01F\x01\x81\x01\xe8\x01\xac\x02\x97\x03~\x04K\x05\xdf\x05d\x06\x06\x06C\x05\x8d\x04\xae\x03\xf9\x02e\x02%\x02\'\x02\x06\x02\xe3\x01t\x01\x17\x01\xe6\x00\x19\x00\x16\xffW\xfe\xed\xfd\xb8\xfd\xa0\xfd\xba\xfd\xfd\xfd9\xfeX\xfep\xfe\x9c\xfe\xa0\xfe\x92\xfer\xfe?\xfe8\xfe\x9e\xfeu\xffh\xff4\xff\x9b\xff\xfc\xff\xf3\xff\xff\xff\x95\x00\xf5\x00\xb5\x00\x99\x00\xf1\x00\x1f\x01\'\x01\xde\x00~\x00\x8e\x00\x02\x01P\x01`\x01\xac\x01\xc7\x01\xf6\x00\x04\x00\x0e\x00\x08\x00K\xff\xd6\xfe \xffU\xffk\xff\xab\xff0\xff\xf3\xfc\x17\xfb\xfb\xf9\x86\xf8~\xf9w\x01\xbd\t\xce\x08I\x04D\x05Q\x088\x04b\xfeR\xfe\\\x00\xb9\x00\xfa\x00\xb8\x03&\x05p\x02\xef\xfc\xe7\xf7Y\xf6\xc2\xf70\xf9\xfa\xf8\xa5\xfa#\xff\x8e\x03C\x051\x06\x97\x07\x92\x05\xad\x00\xfc\xfe\xce\x01q\x05\xd3\x07\x15\x08+\x07\x91\x05\xaf\x03\xaf\xff~\xf8B\xf4\x82\xf3\xfe\xf1Q\xf1\xcf\xf3\\\xf7\xab\xf6\xa3\xf3,\xf1\xff\xed\xd6\xec\xdc\xedU\xef\x10\xf0\xa9\xf3\xb1\xf8\xd2\xfa_\xfa\x1e\xfa\xd5\xf9\xb2\xf7=\xf5\xb5\xf6U\xfb\x83\xff\x17\x05\xfb\x0f,\x1d\xdd!\x8a\x1eo\x1d\xd6\x1f\x85 \x04\x1f\xcb\x1e\xc4\x1fx\x1fV\x1f\xb4\x1c\r\x16l\r\xa4\x03\x1d\xfa\xca\xf1\x91\xed\xa0\xeb\xfe\xe9\xb0\xe9\xae\xeb\xc3\xed\x83\xef\xd4\xf0\xc0\xf0\xe8\xf0\xb0\xf3K\xf9\x90\xff\x08\x05\xb8\tk\rt\x0e\xd4\r\xf7\x0c\x19\x0b\xed\x07:\x04\x85\x02q\x03\xee\x03\x81\x02;\xffF\xfb\x9d\xf8\xbc\xf6g\xf4\xe0\xf1\x9f\xf1\x91\xf3\x00\xf5\x8e\xf6\xad\xf9x\xfb\xf3\xf9\xe4\xf8{\xfa0\xfc\x8e\xfc\xfd\xfc+\xfeX\xff\xe0\x00\xbd\x02\xde\x02\x13\x01\xfb\xfey\xfd\xd9\xfcm\xfdZ\xfe\x90\xfe\xdb\xfe\xeb\xffw\x01\x08\x02m\x01?\x00:\xff\x87\xff\xf9\x00\xcb\x02(\x04\xb4\x04\x85\x04\xc1\x04~\x05\x03\x06\xc4\x04\xf5\x02i\x02"\x03\xac\x04\x80\x05[\x05\x8d\x04n\x03\x9c\x02\x06\x02\xa4\x01\xeb\x00t\xff:\xfe`\xfe\x81\xff\xc0\xffC\xfeA\xfcK\xfb?\xfb#\xfb\xea\xfa\xda\xfa\xb4\xfao\xfav\xfa \xfb\x12\xfc\xcc\xfc\xc8\xfc\x16\xfd\xcf\xfeK\x01\x1e\x03{\x03\x96\x03R\x04u\x05\x80\x06\x00\x07s\x07.\x07\x14\x06K\x05\xc9\x04C\x04\xcb\x02\xc3\x00v\xff\xb6\xfe\x06\xff\x04\xff\xf6\xfd\r\xfd\x94\xfc\xaf\xfc\xe1\xfc\xf6\xfc\xaa\xfd\x10\xfe!\xfe\xe0\xfe,\x00|\x01\x15\x01\xee\xff)\xff\x07\xff\x92\xff\xc6\xff\xab\xff%\xff\x88\xff\x13\x00\x7f\x00\xd2\x00\x05\x01\n\x01\xbe\x00X\x01\x97\x02l\x03\xaf\x03\xad\x03\xdc\x03Z\x04\xd1\x04k\x04\x1c\x037\x01\xa3\xff\x9b\xfep\xfec\xfe\r\xfd\x80\xfb\xbf\xfa\x87\xfa0\xf9*\xf7\x1f\xf6\xb5\xf5\xf6\xf5\x07\xf7\xed\xf8\xf1\xf9\t\xfak\xfaM\xfbQ\xfc\x18\xfdb\xfd*\xfe\xc2\xff\x0e\x02\x9d\x03\xee\x03[\x04\xa7\x04,\x04F\x03\xb1\x02\x0b\x03d\x032\x03W\x03\xda\x03"\x04\xb9\x02\x86\x00@\xffP\xff\xfb\xff\'\x01&\x02#\x02\xc3\x01\x0b\x01;\x01\xe4\x013\x02\x90\x01G\x00/\x02\x0e\x063\x07\xb0\x03\x00\x00\xbc\x01\x12\x04t\x01\x83\xfc\xc4\xfbr\xfe\x96\xff\xc7\xfe\xc5\xfe\xa8\xff5\xff\x02\xffU\x00Z\x01\xec\xff\x8e\xfe\x90\x00[\x04\xf1\x04\xf1\x00\x83\xfc\xee\xfaS\xfcz\xfdL\xfc\x02\xfa\x18\xf9\x96\xfb\x84\xff\xe0\x00\x91\xfe\xe9\xfb<\xfet\x02b\x03>\x01\xdc\x00\xcb\x01\x0e\x00\xc7\xfd)\xfe\xfb\xff9\xff`\xfd\x02\xfd\xc2\xfd\xf7\xfd\x99\xfd[\xfe\xfe\xff_\x02\xf9\x03\xc4\x04}\x05\xb4\x06s\x07?\x06\xa4\x04\x06\x05\x00\tq\r\x1e\x0e\x99\n\xe9\x05\xe7\x03\x06\x03\xd8\x01\x03\x01F\x01b\x01\xda\xff\x17\xffR\x01\x95\x01\x86\xfb\x1f\xf5s\xf6\xe3\xfbO\xfc\xd5\xf8\x96\xf6\xe2\xf6b\xf8\xc0\xf9M\xfa\xa6\xf8\xf3\xf6\\\xf7\xa0\xf86\xfa\x82\xfby\xfd4\x01\xd9\x05x\x08u\x07\xe1\x05\xc1\x04\x0e\x03\x11\x02$\x03\x9c\x04.\x04$\x03\xf5\x01\x0f\x00\xab\xfd`\xfbu\xf9\x8d\xf8>\xf9S\xfa\xdd\xfa\xe8\xfb\xa0\xfd:\xfe\xba\xfd\xaa\xfd\xc0\xfe\x1a\x00q\x00\x9c\x00\x96\x01\xb7\x03\x1c\x05\xa5\x04\xa1\x04I\x05\x83\x04\x83\x02U\x01\xa2\x01=\x02\x87\x02\xef\x01\xd3\x00\xad\x00>\x01\xd6\x00\xfc\xff\x19\x00\x92\x00r\x00\x15\x00p\x00:\x01\x8f\x01\x8b\x01q\x01\x8d\x01c\x01\xaf\x00\xf3\xff\x90\xffy\xff\x91\xff\xac\xff\x87\xfft\xffN\xff\xed\xfe\x9e\xfeh\xfe9\xfe\x19\xfeL\xfe\x80\xfeF\xfe\xd9\xfd\x8d\xfd\x95\xfd\xc2\xfd\x04\xfe/\xfe\x06\xfe\xe6\xfd\x16\xfel\xfe\xc3\xfe\x14\xff\xc8\xff\x8f\x00j\x01S\x02\xd5\x02\xf3\x02\xcb\x02z\x02\x1b\x02\xc2\x01\xb6\x01\x8a\x01\x11\x01@\x01\xc8\x01N\x01\x0b\x00\x11\xff\x82\xfe\x93\xfd\xa5\xfc\xa0\xfcA\xfct\xfd\x0f\x021\x07\xf7\x08\x8d\x06w\x04\x00\x03Z\x01\x1c\x01\xa1\x02e\x04^\x059\x06M\x06^\x05\'\x03\\\xff\xa4\xfct\xfc\x8e\xfe\x9e\x00\x01\x02\x1d\x03\xf7\x01I\xff\xd5\xfd\x19\xfd\xeb\xfb\xd0\xf9\n\xf8\x9e\xf7k\xf8.\xfa\xd7\xfae\xf9-\xf8\xa7\xf7\x1d\xf78\xf7f\xf8\xa1\xf9\xf1\xf9\xb8\xfan\xfc\x12\xfeL\xfee\xfd)\xfc\xf8\xfa\xa8\xfa\xe0\xfaU\xfb.\xfb\x9c\xfa\xa6\xf9:\xf8\xd5\xf7-\xf9\xc9\xf8\x90\xf4\x96\xef)\xef$\xf1*\xf2\xe4\xf1)\xf2\xc1\xf3\xd0\xf3U\xf2\x19\xf2\x18\xf5\xdf\xf9\xd8\xfd\x90\x02_\tj\x11A\x18\x1f\x1f\xae(\xda1\'5a4\xfb6\xad9\xc66\xe8/f*\x1a\'U"$\x1b\xa0\x12\xb2\x08\xe5\xfd`\xf3\x9f\xea\x89\xe6\xc7\xe4\x84\xe3\xa4\xe2\xff\xe2E\xe4\xb2\xe6+\xea\x85\xec\xa5\xed\x96\xf0\x8f\xf6.\xfd\xce\x02_\x07\x04\n\xb2\t%\t\x98\t\x05\t\xeb\x06d\x04\x1a\x03-\x03\x17\x03\xec\x01\xc4\xfe\x1b\xfa\xed\xf4\x87\xf1\xd2\xef\xca\xee\x18\xef%\xf0\xbd\xf0z\xf15\xf3f\xf4\x15\xf4\x90\xf3\xbb\xf3\xc3\xf4\xdb\xf7\x8f\xfc\xdf\xff+\x01\xe1\x01\x89\x029\x02\xac\x01\x8b\x01\xf1\x00+\x00\xf7\x00\xaa\x02\x84\x036\x03\x1f\x02\xbc\x00s\xff\xdc\xfe\xd2\xfe\x95\xfex\xffc\x00\xde\x00\x1b\x01v\x01w\x01\x95\x00\xda\x00\xf4\x01)\x03/\x048\x05f\x06z\x06\xb8\x06=\x07\xd9\x06\xfc\x05\x15\t\x12\x12v\x16 \x12`\x0b\'\x06\xc6\x01\x87\xfe\xc3\xff\x96\x02\xe6\xffP\xfc%\xfe\xbc\xfeb\xf9\xe3\xf2`\xee*\xec\xa4\xee9\xf6\xc8\xfc\x8c\xfey\xfdS\xfd\x1e\xfd\x88\xfc\xb3\xfc\x00\xfb\x1d\xf9\x18\xfb\x02\xff\xa6\x02\x90\x02\xb0\xff&\xfc\xec\xf7\xdc\xf4\x12\xf4\xfb\xf3\xa6\xf3o\xf4\xef\xf5-\xf9\xb1\xf9\xc0\xf6-\xf4\x07\xf12\xf1\xda\xf4@\xf7\x98\xf9\xb2\xf9\x16\xfa\xf9\xfas\xf8\x13\xf9\x03\xfa\x94\xfbZ\x00\xfe\x02\x12\x02\xd9\x02\xeb\x07\xdc\x0e(\x10\xdb\x10g\x14\xd2\x13U\x17\x0c$\xa34y8$0\x1a.\x920o1],]%\xa9 "\x1c\xee\x1c\xd4\x1b\x97\x12\xf0\x06\x0c\xfb\xc7\xf0\xce\xe8j\xe5,\xe5c\xe1T\xdb\xa3\xdc\xaf\xe20\xe5\xbf\xe2\x08\xe0\x90\xe1\x95\xe7\xbe\xed\xb1\xf5\xd4\xfd2\x02\x11\x04\xe0\x05\xa6\n\x18\x0eO\x0c\xd5\t\\\n&\n\xa3\x08\xe2\x06]\x06 \x03\x0f\xfdh\xf7\x8c\xf3\xc1\xef\xc4\xe9J\xe7\xb8\xe7M\xe8\xba\xe8\xc3\xe9\xce\xea!\xea\x88\xeb\x0e\xeff\xf2\x19\xf6\x95\xfb\x96\x01\xfe\x044\x07\x1f\na\x0b\r\n\xd0\t\xb1\n\x86\n\x82\tv\x08\xc0\x07b\x05\xd3\x03\x10\x04>\x02\xae\xfe\xa6\xfd\xd3\xfe\x8b\x00T\x01\xd9\x02\\\x06\xe7\x052\x051\x07\xa3\x08*\t\x11\x07\x08\x07\xa7\x08\xeb\x06\xcf\x04\x93\x03\xdb\x023\x01\x95\xff%\xff\x14\x00\'\xfe\xd7\xfai\xfaq\xf9O\xf7\x08\xf2\xfd\xee\xda\xf0\x8d\xee\xb7\xeb\xfb\xeaR\xe9L\xe8\x8d\xe4\xfd\xe5R\xe9\xce\xe7)\xe9\xf2\xebt\xf1n\xf5\x86\xf6\x18\xf9Z\xfb\x84\xfd<\x00Q\x03Z\x05v\x05\xd9\x05[\t\x8a\x0b)\x0b{\x06\xb9\x03\t\t\x84\r\xe0\x0e/\t\xc2\x06\x06\x08\x8d\x05\xdb\x03\xa1\x04\xd7\x05\x8a\x05\xf7\x08-\x0e\x1b\x12`\x11\x90\x0f\xde\x12\x1c\x15\xaf\x19\xc5\x1c\xa1\x1f1#v"\x16\'\x81/\x993\xae,*$\x91#\xec"\xc4\x1c\x83\x18\xc1\x16\xdf\x0eh\x055\x00\xf8\xfd\x8f\xf4.\xe8\xd2\xe0\x98\xdf\xa1\xe1\x03\xe2\x9d\xe2.\xe2\xca\xe2\xf9\xe1\x01\xe39\xe9\xe8\xed\x1e\xf1N\xf5\x1e\xfdn\x01\t\x03\x8e\x05\x10\x03O\xfd\xe6\xf9Z\xfa\xf5\xfaQ\xf9\x91\xf8\x89\xf7\xda\xf35\xf1\xe1\xefc\xec\xbe\xe9\xcd\xe8\x81\xeb+\xf1d\xf4\x12\xf7/\xf8\xbb\xf9;\xf9]\xf9\x87\xfc,\x00\xd1\x01r\x03\xd9\x07\x8f\x0b\xcc\x0c\xab\x0b`\x0b^\x08\xd7\x06\xf2\x06n\x08\t\x0b\x8f\x08\xef\x06v\x04\xcf\x020\x02\x0f\xfe4\xfc\xd3\xf9\xce\xf6\xf9\xf6\xca\xf5\x1c\xf5d\xf2\xfb\xeep\xeeL\xef\x11\xf1\x14\xf2\x0f\xf2R\xf2i\xf3\xbd\xf4\x9c\xf7\xa5\xfa\xee\xfc\x9f\xfd\xca\xff\x88\x02\xa3\x03\xaa\x04B\x05]\x05\xdb\x03s\x04r\x06\xa6\x06\x10\x06e\x03r\x00\x0f\xffu\xff\xcc\x01!\x017\xff\xe4\xff\xbf\xfc\x7f\x01@\x04\xc5\x02\xde\x036\x01c\x00W\x00\xe3\x05[\x0b\xfd\n@\n\x0b\x0e\xbd\x0f\xb4\x0e\x10\r\r\x0fo\nW\x06\xda\x0e\xae\x10+\r\x9b\rC\r\x9d\x07!\x047\x06\x82\x06\x05\x00b\x02\x81\x08\xd6\x06\xcc\x08\x08\x0c\xb8\nz\x01\\\xffY\x06m\x05\x05\x04\xd6\n\xc1\x08\xe4\x03\x87\x07?\x07\x13\x02\xdd\xfdp\xfd\xd9\xfb\xb7\xfa\xd2\xfd\x89\x01t\x00\xeb\xfaw\xf9\xd1\xf6\xd6\xf3\x8d\xf3\xb5\xf5?\xf7Q\xf2\xc0\xf4$\xf9q\xf7\x9c\xf7!\xf8\x86\xf43\xf5\x8c\xf8z\xfa\x06\xfe]\xffP\xff\'\xfb\x87\xfc~\x00\x15\x00(\xfe\xd5\xff!\x01\x8c\xfe]\x02\n\x04q\x00\r\xff#\xfc+\xf9e\xfdM\xfe\xf2\xfb\xbc\xfa\xe0\xfc\xca\xf8\xab\xf6\xed\xf8\x9a\xfb|\xfb\xf1\xf3p\xf7\xbb\xfd\xfe\xfb\xd7\xfap\xfe\xd4\xfa\n\xfc\xd2\x003\xff@\x02|\x02\x87\xfd\xef\xf9\xd6\x038\x07;\x01\x87\xfe_\x02m\x05\xfa\xfb\x94\x00\x90\x01K\xffu\xfdB\xffM\x03\x87\xfa9\x08\x0e\xff\xdb\xf3G\x01/\xfe\xa5\xf4\x18\x02\xd0\x05\x0c\xf4\xd3\xf8\x12\x02\xed\xff\xd9\xfa\xb0\xfc;\x02~\xf5\x8e\xf53\x0e\xc1\xfc\x01\xfa8\x0c\x01\xfcB\x02z\x0c&\x00R\xfe\x92\x08\xb5\x04\xe1\x05\x8f\r:\x0c\xf3\x01\xbf\x02\xec\x07\xa4\x04\xa2\x01]\x08\xef\x03~\x03\xf1\x04\x85\x03\x16\x05I\n\xc3\x03\xe1\xff\x97\x0b\x99\xfc\xe8\xfb\x18\x03v\x0b\xff\x06]\x02Y\x06?\x015\x05\xa7\xfc\xc7\xfe+\x03"\xfb|\x08\x9e\x05\x17\x03\x8d\x08\xb3\xfc"\xf8\xad\x01\xba\xf8\xfa\xf9\x9b\x0b\xfc\xf3\xa0\xfc\xa9\x06\xa1\xfa\xdb\xf9\xf3\x00\x9a\xef\xbd\xf3#\xff\x12\xfd\xbb\x02*\xfd\xe2\x01)\xfd\x1d\xff\x8a\xf2W\x05#\x03\x95\xf6\xa6\x06b\x06\x81\xfe;\x00\xad\n6\xfbV\x04\xc3\xfc\x9d\x00A\x03q\xfe\xc6\x01\x03\x01B\xf7\xa9\x04\xb3\xfc\xca\xf2\xbc\x03:\xfb,\xfbi\xf9\x9c\x04-\x0bd\xf4\xc6\xfa\xf2\x0c\xbb\xf9\xe1\xff\x1a\nR\xfe/\x02\x0c\x0b\xb6\xf7/\x00B\t\xb9\xf3\xa6\x00\x9b\xfbR\x01g\xff\xbc\x01\x11\x04o\xf8\x8a\x00N\x08s\xfc\xdc\xf6\x13\x0e.\xf3\xc1\xf2{\x13:\xf5\xc4\xfce\nU\xf4\xf2\xfeF\xfd\x0c\xee\xee\x08\x8d\x06\x0f\xee\xdc\x04\x08\xffB\xfb?\x05\xc3\x04\xc6\xf56\xfc]\x08"\xf3\xa7\x06\xa3\t\x02\x03\xba\xf6:\x00N\x03\x92\xfc\xf2\x04\xd6\xf9!\x0b\x82\xfac\xf5\xc0\x15\xd5\xf7\xcc\xf8\xc4\n\x8c\xf56\x03\xbc\x0e\xbd\xff\xf1\xfd|\x07u\xfc\xb8\x07D\xfd\xd7\x03\xba\x0e\xaf\xf6\xb5\x03\n\x06\xc9\x03\x02\x01\xbf\xfe\x8e\x01\xe7\xfe\xe7\x00*\xfc\x90\xfd\x8f\x02R\x02u\xf2\xfa\x054\x02\x0c\xf2\xa4\x02H\x00{\xf3\xd3\x00\xfa\x07^\xf8\xe5\x02\x1a\x00\xd0\xfc\xed\x05\xff\xf9\xec\xf9e\x07\xd7\xf6W\x02\xd9\n\xd2\xf4\x15\xfd\x14\x077\xef\x87\xfb\x11\x10\x0b\xf8@\xf4\xe6\x08\x05\x06\xe5\xef\xdb\r\x07\xfb%\xf5\xf2\x05\x14\xf7\x17\x07\x1b\x06\x90\xf2)\xfej\x10\xd5\xf9\xbb\xf9\x1e\x0b\xec\xf8\x15\x03\xa5\xff\xb8\x08\x8b\x00\xae\x07\xf0\x03\x96\xf6\xc8\n\xdd\x02\xc4\xfbz\x02\'\x11K\xf5\xb4\x03\x1f\x01:\xffL\x03\xe2\xee\xb7\x0f\x84\xfc\xce\xf9\xef\x0e\xa2\xf5\x0b\xf7B\x15{\xf4\xa2\xf0\xce\x17\x1c\xfc#\xf0\xef\x0c\xea\x07m\xf1\xcd\x06\xbe\xfb>\xfb\x8b\x02\xc1\xfe\xfd\x03\xc3\x00*\xf6\x08\xff\x10\x0b\xc5\xef9\xfe\x81\ns\xee\xe2\x04\xb2\x00]\xfa/\x07I\xfd\x00\xf8;\x009\xf8\xbc\xf6\x84\x18T\xf89\xfb\x81\x07\x10\xfd\xf9\xfb\xa8\x02\xca\x05\xa7\xfd\xf2\x04\xda\xfd{\x05\xb6\x08\xd6\xfc\xdf\x02\xb0\x04\xed\xf8\xff\n\x11\x03\xa4\x00y\xfd:\x0b\x16\x04\\\xf7\x02\xfe\xd3\x01\xf3\x06G\xf7z\xfe\x91\x08v\x04\xe8\xf8\x93\xfb\x90\xfa\r\x06\xd1\xf9\xa8\xf4\xdb\x14\x87\xfen\xf2\x16\x08\xa6\xfc\x1c\xfc\xb1\xfe\xdd\t\xb0\xf7\xff\x01\xd5\x03:\xf5\x8f\t\x8b\x00\xcc\xfc3\x00R\x06\xe9\xf1\xd1\x06\x8d\x01\x1b\xf8&\xfb4\x00|\x02\xe3\xf5\xa6\x08\xcf\xfa\xb9\xedT\x02{\xff\x06\xf2\xb3\x0e\xcf\x00\xd7\xf1\x90\x06\xa3\x08\xe2\xf9g\xfc|\x01\xb7\t*\xfbe\t\xc0\x0b\xb2\xf60\x05\xb0\x00\xd1\x06\xff\xf8z\x00\x10\x0f\xf6\xfdi\xf8\x98\na\x04%\xf5W\x08\xd0\x03\xeb\xfc\xdc\xf8\x0b\x06\xfa\x04y\xfc<\xfd\xff\x00f\x0c(\xf4\xbf\x001\x07\x8f\xf4\xce\x01\xdb\x00\xb9\xfc\x9d\x06#\x01\x8f\xf6\xe1\xfb_\x04\x8e\xf9|\xff\\\x03l\xedE\x04\x07\x08Q\xf3O\xf8\xea\x10<\xf2\x92\xfa\x1c\x08x\xf5\xa5\xfc\x93\x06\xd9\xff\xc2\xfe\xc4\x01\xab\xff\xf0\x00k\xf6\xe0\rA\xf6)\x05\xa1\x03B\xfe\xdd\xffI\xfcg\x0e\x91\xefN\x0b\x1f\x01\x04\xef\xda\x07\x88\x0c:\xf45\x04\xfb\xf9\xf8\x05j\x08\xae\xf3I\tu\x08\xf8\xf4\xdc\x02\xe3\x13^\xef%\x06>\x07\x0e\x00\xcd\xff\xce\x01\xab\x06\x98\xff\xb5\x06\xac\xf8\xc9\x02&\t=\xf6\xc8\x00\xd6\x01D\x07\xda\xfcy\xf2a\x19\xb0\xed\x81\xff\x0c\x031\xfcZ\x03\xf1\xec\x7f\x12\xb6\xef$\x00\x9b\x085\xf6\xa8\xf5\xaf\x05\x1b\xfc\xa2\xf7 \x02\xfe\x02g\x03\x06\xf9P\xfc\x0b\x01\xe4\xfc\xa1\xfb\x18\x08\xba\x04\xdd\xf5\x88\x08\x8a\x03\x13\xf0I\x07\xbe\xfe%\x02\xfd\xf8\xc6\r4\xff\x01\xf2\xd7\r\xde\x01\xc7\xf6F\xf61\x1a\xaa\xf3\x9a\xf9\xff\x12\xec\xfb\xca\xfc\x98\x03\xc2\x04H\xf5\x13\x04\x9c\x03\x9b\xfb\x87\x01(\t\xda\xff\xc7\xf5\x84\x04f\x07\xd0\xf6J\x04\xe8\x08`\xefZ\x11G\x02\xb6\xf5\xb1\x04\x13\x07\x04\xf6\xc3\xf2\xd9\x18\x8b\xf9\xbd\xfa\xfa\x04o\x08|\xf5\xa3\xfdf\x0b\x90\xf1\xb2\x03\x94\x03\xf0\xfc\x00\x00\xab\n\xb5\xf2\xdd\xf9\xfd\x07\xfb\xefN\n\x19\xf5\xd9\xf6J\x16R\xf3\x1c\xf5\x80\t\x12\xf6\xe0\xffW\xfe\xcc\xf7\x02\x06\xb4\xfcT\x04\x14\xf5\x86\x06\xd6\t\xc5\xedD\x07A\x01\xea\xf9N\x04\xe6\x01\xa6\n>\x07\xa3\xfa#\xfdV\x05\xe3\xf9\x93\r=\xff\xdf\xff\x9d\tQ\xfc5\xfdB\tV\xffz\xfb\x99\x07\x9b\xf1\x17\x13\x98\xfb\x8f\xfb\xf1\x06E\x05\xab\xeeM\xfe\xac\x12s\xfc\xab\xf7\xdd\x00\x91\nC\xf5\x0f\xfc`\x05\x13\x03v\xee\xc4\x08\x18\x07\xf4\xf5\x14\xfb\x95\n\xfe\xfc\xd5\xee\x8b\x0c\\\x03\x14\xf2\x99\n\xb6\x01\xfe\xee\xf5\np\x05-\xf1\xf2\x00\xb4\x14\x1b\xeb\x90\xfa\x85\x17s\xf5\xd9\xed\xe0\nb\n.\xefJ\x02\xf6\r\xd7\xfc\xef\xf6p\xf8\xc9\x06\x84\x06\xa1\xf4\xe2\xf8\x02\x19E\xf4\xf3\xf3T\x16\x10\xef\xaa\xfb\x11\n\xd5\x01\x80\xf6\xce\x05\xb2\x06\xf8\xf5e\x03~\x10\xdd\xea\x13\xfd\xbe\x16&\xef\xc8\xff\xc8\x12\xd9\xf9c\xf8d\x0e\x81\xff\xc9\xf8\x19\x06\xef\x05\xa1\xf1\xbf\x05d\x18\xc7\xed\x0c\xf5\x06\x1c\x9e\xfc\xeb\xe6\xf4\x0f\xe0\x07\xd8\xf0\xed\x00\xe1\x07Y\x02-\xff)\xf6r\x06\xe8\xfe\xeb\xed"\x15\xf4\xf6#\xf1\xa8\x0b\r\x07?\xf5\xa3\xf5\xb3\x10.\x00\xdd\xed\x1e\x05\x89\x08\xb6\xf6+\xff\x9d\x04\x9a\x04Q\xf8\xdd\x04e\xff\xd4\x00Z\xfb\x06\xffn\nI\xf87\xfe\xc0\x0b!\xf7\xbe\xf8\xa9\x0f\xd7\xfa:\xf7\xdd\x01\xf3\np\xfa\x1b\xffH\xfd\xfb\x030\x02\xb6\xfb^\x01\xaa\x03j\x03\xd7\xfa\xc0\xff\xa8\xff\xe6\r\x15\xfa7\xfc/\r\x9c\xfc\n\xf8\xaf\x10\x80\xf8/\xfc\x8e\x10+\xfc\xf3\xf0\x1f\x08\xd7\x08e\xf6\r\xff\n\x01~\x07\xa5\xed\x90\x04O\x07I\xf1\x9e\x02\x91\xfdn\x06\x14\xff\x1e\xf8\xae\x0bS\xf8o\xf1U\x12a\x02`\xf01\x0f8\t\x18\xed\x88\xfb\x94\x12v\xf2\xbd\xff\xfc\x05\xdd\xfc\xf4\x04Q\xfb2\xff\x10\xfd1\x05\x16\xfcB\xfe\xc4\xfb\xe1\x0f$\xf6\x08\xf7?\x14;\xf6;\xf5\xc2\x06i\x05\xdc\xf0\\\x07\x17\x07\xfc\xfa\xb0\x018\xff\x85\xfd\xe5\xf7\xe2\x0f\x8f\xfd\xa1\xf5\xb5\x06\xec\x01\xd8\x01c\xf5d\r4\xffx\xf5[\x080\x08M\xf2\x92\x02\xe8\n\xef\xf7\xeb\x07\xeb\xf9d\xfe\xa7\x01\xb0\x01\x07\xfe\x9b\xfd5\x04i\x02\xf5\xf1&\rx\x04o\xe7*\x0e\x9b\x0c\xd7\xeb\xf1\xfcC\x12\x80\xfc\x1b\xf0\x97\xfc\x18\x1a\xa7\xf4\x86\xe9\xd1\x16 \x05t\xe2\x11\r\xef\x15\x08\xea\xb5\xfa9\x14\xac\xf3g\xf3\xa6\x15y\xfb\x7f\xf3I\x03Y\x0e\x81\xf83\xf6\xc8\x19\x12\xf0\xe4\xef\xe2\x10\x87\n\xe7\xe7\xde\x08|\x1b\x88\xe36\xf4\x8d\x1e\xdc\xfa\xb9\xe6t\x17a\xfa\x08\xfb\x9b\x05W\xfe\xba\x01\xcf\xfc\xb4\x00Y\xfeR\x06b\xf1g\x0fb\xff\xdd\xf1m\x0c\x81\x00\xbb\xf6O\x06M\x0b|\xec\xcd\x04d\n\'\xf3\xd1\x04\xd6\x0c~\xf3\x85\xffR\x0cB\xf3\xc5\xff\xf6\x08\x13\xfb\xe1\xfe`\x02.\x05\xc7\xf8\x9c\x00}\x05>\xf3\xed\x05\xab\x064\xf6\xb9\xf9\xd3\x13\x11\xfa\x17\xf1S\x0cg\x02w\xf2\xcf\x02\x06\t\xc6\xf7\xbb\xff\xf0\n9\xff"\xf5n\x06\x8e\xfeN\x00\xbb\xf7\xb6\x05\x17\nZ\xf9E\xfdX\x07O\x02U\xebj\x0b_\x12\xe0\xe4F\x03\xc8\x11\xb4\xf2\xe3\x03g\x0b\xe2\xf0\n\x015\x04\xe5\xfa\xbb\xff\xcb\x04\x7f\x02j\xf9_\xff\xec\tM\x04F\xeb\xae\x01\xcb\n\x8f\xf5\xa3\xfbI\x1a(\xf4z\xf4O\x0fD\xf7\xbf\xf9\xfe\x06\x81\x05\xf3\xf3\x15\x07"\x0ch\xf8\xc4\xf7O\x0c\xe0\xf3S\xfc\x80\x0e\xa6\xfbg\xfa,\x10:\xf6\x9f\xf6\xa7\x0fb\xf9`\xfa\x03\x02^\x07\n\x01\x97\xfd\xd1\xfc\xba\ts\xf7\xa8\xfcD\x05H\xfe\xce\x01P\x07\xfa\xf9\xcd\xfeb\x08\x9a\xf0\xb6\x08.\x03`\xf8\xa0\x04,\x02\xe9\xfb\x16\x05`\xfdi\xf8\xf9\x02\xd6\xf7D\x01}\x05N\x07\xb5\xf3\xe3\xff\x16\t5\xf6\'\x006\t\xa3\xfb\x8f\xfd\xb1\x0b\x8f\xf8V\x04#\x02\xb1\xfat\xfe\xcd\x06\xf5\xfao\xfe\xd4\x02\xd4\xfc\xf1\xfdT\xff\xf7\x00\xc7\xfcY\xffU\xfb\xe1\x0ca\xf2\xb8\x02B\n\x96\xf3l\xf8\x17\x04\x86\x07\x81\xf99\x05\xeb\xfe\xa8\xf7:\x064\x04X\xf8-\xff\x1f\t\xd7\xfe\xb1\xf7\xd7\x0e\xbf\x00\xa6\xed\x9f\x07\xbf\x0e\xb9\xf2\x1a\xfd\xac\x0e\xcf\xfap\xf2\xf2\x10P\x06\xb8\xed\xc3\x08\xd1\x00\x8a\xf5\x94\x045\x0c\xf7\xf7\x11\xf9\r\x0b\x8f\xfe\x0f\xf9\x80\x01\xb6\x000\xfe\xcd\xffZ\x06o\x02\xf4\xfa\x1b\x03\xe8\xfa\xeb\xfd\x8f\x00\x9e\x00\xf0\x03\x8d\xffw\xfd\xcc\x03\xc0\xfa\xb4\xfel\x04\x8f\xf8h\xfdJ\x00n\x01\xcd\x04\xd5\x02\xf9\xf5 \x02\x12\xfd\xa2\xfb\'\x07\x80\xfd\x12\xfe\xbd\x03k\xf8\xf6\x05\x82\x06\x8b\xf5\xe4\x04_\xfd6\xf8\x87\x01D\x08\\\x02\xae\xfc\xae\x03\x99\xfb\'\xfd\x18\x03\xbb\x03\xfe\xf8y\x02\xf2\x07\xb6\xf6\x89\x07i\x07\xad\xf8\xa1\xfdb\xfe5\xff\xea\xffe\t\x96\xfe5\xfa\xc8\x06g\x00\x9a\xfbG\xff[\x03\x9d\xf7\x8a\x00\x8b\x05+\x06\x8b\xff\xe5\xfd\x1f\xf9\x90\xfd=\x05N\xfc\xe5\x02\x18\x01\x9d\x01\x90\xfc\x10\x00\xb4\x046\x02S\xf9\x91\xf3\xc4\x08y\t\xbb\xf96\x06\x96\x00\x93\xf5\xb7\x00g\x08Z\xfcX\xfaP\x04\xc6\xf9e\x01\xfd\x05\xfc\x04\xb0\xfb|\xf5\x8b\xfe\xc6\x04c\xff)\xfc{\x058\xff"\xfa\x05\xfe\xb1\t%\xfe\x9a\xf9\x1d\xfe\x06\xfdD\x020\x06\xb3\x00\xbb\xfe\xb9\x00\xff\xfb\x19\x00H\x03\x16\x03N\xfc\xbc\xfe\xb6\x01\xba\x01I\x03\x05\x02\x8b\xfd\xa3\xfd\xa4\x00D\x00t\xff5\x02\xe3\xff\xa4\xfdD\x04\xc4\x00\xa9\xff\xe8\xff\xb5\xfcB\xfdC\x01\xa2\x02\x10\x01D\x01\x15\xfe\xbb\x00\xcc\x00k\xfeQ\x00\xc4\xfcV\xff[\x03\x99\xff\xa1\xff=\x01\xab\xff\xba\xfd\x90\x01\x17\x03\xbf\xff\xd0\xfa\xfe\xfc\xb0\x05\xb2\x01\n\x01B\x04\xba\xfd\x0b\xfc\xbf\x01\x93\x00\xe6\xfc\x91\x01P\x02\x9e\xfd\xc4\x01\x1e\x04\x81\xff\xde\xf9\xa4\xfer\x03c\xfdi\x01\xe3\x02\xc5\xfd\x89\x01\xdb\x00[\xfe\xf5\xff\xe9\xff`\xfdv\xfd\x1a\x03\x98\x05\xd3\xfd\r\xfe\x01\x02x\xfe\x87\xfd\xc9\x02\xd2\x00\xd1\xfe\xa0\x00\xd7\xff}\xffS\x01\xc2\x02\xce\xfd\x0b\xfe"\x018\x016\xfdA\x02o\x01h\xfd\xbe\x00*\x00\xe5\x01u\x02\t\xff,\xfb"\x00^\x01\x08\x00\x80\x04\xdf\xff\x92\xfd1\xff\x06\x01z\xfd\x91\xff\xef\x02a\xfc\x92\x00\x89\x03]\x01!\xfe\x01\xfe\xf9\xfd\xf7\xfd\xe3\x00\r\x00T\x02#\x02\xcc\xfb\x07\x00\x92\x01\x8f\xfc\xb8\xfe\xdc\x00\xbd\xfc\xc1\xfd\t\x05\x0f\x024\xfb(\xfe&\x01\xf3\xfa\x80\xfc\x80\x02k\xff\'\xff%\x00X\xfe\xe9\xffp\xff\xb7\xfb\x12\xffa\xfd\x8d\xfd\x9c\x01\x8b\x01q\xfeG\xff2\xfew\xfbv\xff\xc3\x00\x8a\xfd\xf3\xfd\x1b\x030\x00\xb2\xfd\x82\x01?\x00\xc1\xfbZ\x00\x8a\x00K\xfe"\x04\xc8\x02\x9e\xfe\xfb\x00\xc6\x01\xd4\xff\x96\x00p\x01\x05\x03p\x01\x92\x03&\x06x\x03\x87\x03\xa0\x05T\x03\x93\x05\'\n\xae\x07[\x08\x9d\n\x9a\n\x82\t\xcc\x0b.\n\xcb\x06\x8e\ns\n\xca\x07\xcc\x07a\x08_\x042\x02W\x02I\xfeD\xfc\xdb\xfd@\xfbW\xf8\xa5\xf8\xb6\xf6m\xf4\x8e\xf3\xa5\xf3\xa0\xf2\xeb\xf1\x03\xf4\x8d\xf5\x1c\xf5\x9f\xf5K\xf6-\xf7\xdb\xf8F\xfa\xf1\xfa\x8c\xfd\x05\xfe\x0e\xfe\xc4\x00\xb3\x00\x1d\x00\x83\xff\x81\x00\x1c\x01\x80\x00J\x00\x87\xfd\x02\xfd_\xfd\xb0\xfa\x96\xf9\xf8\xfa9\xf8|\xf5:\xf63\xf5\x14\xf5\xb0\xf3F\xf3\xf5\xf4\xd4\xf4\x88\xf3n\xf5\xbb\xf8\xa1\xf54\xf8G\xfb+\xf9\xb0\xfc\x01\x01\x80\xfeh\xfc\x8f\x03\x0e\x05\x85\xffc\x02[\x07\xf6\x02\xab\xfe3\x05(\x05{\x01u\x02`\x04\x13\x00\x8e\xfe\x90\x00\xb2\xfe?\xfe\xed\x03}\x03\x14\xfe7\x03\x82\x06\x94\x06r\x05\x0b\x07\xa1\t\xb5\x0cT\x148\x19\xf4\x1b\xaa \x9f\x1f\xf6\x1dF!5&Y\'h%4(\xf3*\xba)F%: |\x1b\x0c\x14\xdb\x0c\x8a\x0b\x05\r=\x08X\xfeq\xf8a\xf5\x88\xef\xac\xe7\xce\xe1\r\xe01\xdes\xdc\x0e\xde\x8d\xe0>\xdf\xe2\xda\xbd\xd8\xab\xdc\x8e\xe0|\xe1$\xe6y\xed\x15\xf1j\xf3\xa9\xf6I\xfa\x86\xfc}\xfc!\xff\xf8\x04V\t\x83\x0bx\x0bu\x0cY\x0c\x05\t\x7f\x07:\x07\xa9\x06:\x04&\x03%\x03o\x01\xc1\xfd\xc9\xfa\xaf\xf7\xa3\xf4L\xf3\xc1\xf36\xf5\x97\xf4\'\xf3Z\xf3\xf4\xf5l\xf5^\xf4\x98\xf6|\xf9i\xfb\xe4\xfef\x02#\x04\xc9\x04;\x07B\x084\x08\x82\x0b\xca\ru\rF\x0e\x0e\x10\xd0\x0eM\rD\x0cE\x0b\x0f\t\x9a\x08\xea\x07 \x06\x86\x04\x8d\x02v\x00\n\xfe]\xfc/\xfaR\xf8\xac\xf9#\xf8S\xf6\xc6\xf5x\xf4~\xf3\xba\xf2\xf2\xf2_\xf5\xf3\xf2\x00\xf2\xb1\xf5\x84\xf5s\xf4\x1a\xf6P\xf61\xf5L\xf7e\xf9\x93\xfc!\xf9+\xf9m\x00\xd7\xfe\xb6\xfb\x88\xff\x89\x03\xd2\xfd\x07\x00)\x06\x98\x04A\x01\x03\x04"\x08\x89\x03@\x04\xb8\x05\xd4\x05\xcc\x06\xed\x06\xc6\t\x1e\t\xe4\x08\x10\x08\xf3\x08\x93\n\x1c\r\x8e\x0f\xe9\r\xec\x0f+\x14\xf6\x17m\x16c\x14.\x14d\x15\xb1\x18\xdf\x1b[\x1b\xa8\x18Y\x16u\x14\xd9\x13A\x12\xc1\r\x88\t\xd6\x05\'\x03\xf6\x02\xfc\x00\xcb\xfb\x93\xf6\t\xf2K\xee4\xecO\xeb}\xe9L\xe7\xce\xe5\x9a\xe5w\xe7_\xe8\xd7\xe6\x16\xe63\xe7\x9b\xe9M\xedG\xf1\x16\xf5/\xf6\xa2\xf5\xd0\xf7\x1b\xfb\xe8\xfd\xd4\xfd\xb9\xfe\xd3\x00<\x039\x05\x9f\x05\xe0\x04\x80\x02\xc5\x00\x84\xff\xa8\x01*\x03\xc9\x00\x19\xff\xd3\xfe<\xfe,\xfd\xaa\xfbB\xfbL\xfa\x8c\xf8\x86\xfaf\xfe\n\xff2\xfd\xd3\xfc\xde\xfc\xff\xfdX\xff\xdd\xff\xa9\x01~\x03b\x03^\x03\xed\x07\x14\x07D\x02\xe6\x02\x8e\x04\xb2\x04\xfc\x04\x94\x04\xc5\x06z\x04\'\xff\x1f\x00!\x05C\x00}\xf8\x02\xfe\xc2\xff\xd2\xf9n\xfd\x10\xff\x8e\xf9\x87\xf6\xb0\xfb\x05\xf5x\xf6u\xfb\x9c\xf5\x8e\xf9\xcd\xf4\x9e\xfa>\x02A\xf2,\xf5\xbc\xfe\xbc\xf6\xd8\xf5\xd7\xfb\xd4\x00[\x01\xf5\xfaX\xff\xba\xfe\x12\xfd\x1a\x02N\xfe\xef\xfe\xe8\x07\x07\x07m\x04\x0e\x07z\np\x08\x13\xff\x93\x06\\\x11\xe5\t)\x06\x8e\x10K\x0e\x0e\x087\t\xe0\n\x97\x07B\x06\x98\t\xed\n\x8c\t\x92\x0b)\x07\xbb\x04\xf4\x05\xb5\x07N\x08\xb4\x08B\t\xa7\x0c3\x0f)\n\xe4\x0cS\x0eW\x0b!\re\x10\x8e\x10,\x0fW\x0eq\x0e\xe6\x0c\xca\tj\x08J\x05\xf2\x03\xd1\x02\x05\x01^\xffN\xfb\xc1\xf7\xcb\xf4\x01\xf3 \xf14\xef\xa4\xee#\xee\xb0\xeb\x1c\xebi\xed\xaf\xecR\xeaZ\xed4\xeef\xee\x05\xf1\x8c\xf4o\xf4`\xf5^\xf8\x9d\xf8\'\xfa\xd5\xfb\x9d\xfd\xe9\xfc\x17\xfe\xc8\x01\x9c\x00\x02\xff3\x01x\x00\xdd\xfe@\x00\xa9\xfe \x01\x9d\xff\x00\xfe\xaf\xff\xad\xfe\xd7\xfe\xbc\xffq\xff3\xfd\xc7\xff\xb5\x03\xe1\xfe\xdb\xff\xf8\x05\xca\x01\x87\xff\x92\x06\x0c\x04h\x01\x00\x05\x04\x05Q\xffd\x034\x05\x01\xfe\x1d\x01#\x02\xae\xfd\x8c\xfd\x84\xfd\xb5\x00\xbd\xf76\xfa;\x00M\xf6\xdf\xfbd\xfa\t\xfa*\xfe\x18\xf8\xf2\xf6\xfb\xfd\x0b\xfd_\xfaM\xfd\xd4\xfc\x08\xff,\xf8\xfa\x03C\xfe\x9b\xfa-\x02\xc2\x01\xd3\xfe=\xf9\n\x07!\xfcp\xfa\xd6\xfcc\x02w\x03\x12\xf5\xf8\xfe\x98\t\x04\xf5Q\xf2\xab\n\xc0\x02\xfe\xf6\xb2\xff\xc2\x08\x08\xfe;\xfe\xdc\t0\x00\x0c\xfe\xe3\x0e\xa7\xfe\xa3\x07)\x12/\x04\xa5\x07\x04\n\x86\t\x86\x018\x0f\xc0\x0ft\x03\t\n\xb8\n\x8e\n\xb4\x00:\tT\nd\xfb\xcd\x03(\x10\xba\x08A\xf6~\t\xe3\x04"\xfc6\x00\xeb\x06;\x07\xe4\xf7x\x08\xaf\x03d\xffU\x04\x92\x03\x82\xfb\xf0\xfd\x9a\x08)\x01\xfa\xfe\x08\x01=\x06B\xf9\x95\xfe\xec\x03%\xfc\xa6\xff\xff\xfb\x16\xff\x18\x02\xee\xfdS\xfb\xaa\xfd\x18\xfc\xad\xf9\x8f\xfb\x0e\x02L\xf88\xf5\xac\x03*\xfab\xf2\x06\xfeH\x02l\xf0\x0e\xefo\x08\xa6\xfc\x18\xf4T\xfb\xdc\xfe*\xfd\x14\xf2\xe8\x03\xdf\xfeo\xf4U\xfc\xec\x01\xa0\xff\x84\xfa\x1f\x01>\xfcL\xfa\x1a\x03\xed\xfcZ\xfd\xec\x00\xd4\x07\x8c\xf6\r\xffA\x0b\x9e\x06\xb5\xed[\x02e\x11C\xf9%\xf85\r\xc1\x0br\xe8\xf4\x0b\x94\x05f\xf3\xdc\x01\xe4\xff\xfe\xfe\xbc\xfcC\x01\x96\x03\x9a\xf5\xad\xf5\x7f\x07\xfc\xfa\\\xf3\xdf\t\xe4\x01^\xed\xef\x08\xe0\x01\xce\xf4&\x03[\xf8\xa5\x05\xae\xffx\xfc\x9e\n\x91\x02N\xf4e\x06\xb7\x0cL\xee\xbb\x0b\xb1\nP\xf7i\x08\x8e\x01l\x003\t\xaa\xfc\xc2\x00/\x08\xa9\xf1\x9c\x04\xc6\x19\xf6\xef\x9e\xfcm\x13\x88\xf8\xe6\xec\'\x18\x90\x06\x0e\xe7D\x12\xce\x04\xc4\xfcr\xff\xd1\x08V\xf8\x0e\x04C\xfe\xa5\x03\xe1\x08\xd0\xf5\xb2\x07{\x01*\x04\x1c\xf9\xbd\xfd\xcf\t\x0c\xf8s\xfd\x8c\x06\xa3\xff|\xfeM\xfdq\t\xda\xe9\x16\r\r\x03\x98\xef\x94\x06\x8d\x05\x8a\x03\x99\xefT\r\xae\xfcS\xfc;\x06\xbd\x00I\xfd\xe3\x03\x10\xff:\x03\xfe\x03+\x01*\x02\xe8\xfd\x89\x06U\xfa%\x04z\x05\xe5\xf8>\x00v\x06h\xfb\xd3\x06\x06\xff\xd8\xed\xb3\x12\x11\xf7\xad\xf5\x92\x11\x17\xffL\xf5/\x05\xed\x08\xa9\xf7\xaa\x01\x8f\x00\xd2\x05\xd9\xf4o\x13\x9a\xff\xa9\xf7~\x05g\x01\xf9\x00\x9e\xf7\xa5\nm\xfef\x00\\\xf8\x05\n\xb0\xfd\xd3\xf4A\x07\x18\xfe\xd6\xfc\xff\xf6\x8a\x059\x03\xa6\xeeW\x06\x87\x01j\xf3.\xffK\x05\xdf\xf7r\xfc\xfa\xfe\x98\xfa\\\x03\xa8\xfd\x1e\xf9\x88\x01]\xfdZ\xf9\x85\x06\xa3\xf9\x16\x06\x18\xf2\xc8\x03\xf3\x053\xfa\xb1\xf8\xd1\x078\x02\xd1\xf6\x18\x08@\xfb\x85\x02\x9b\xf6e\x10a\xfd;\xf1\xb2\x0e\xda\x04\x99\xf4M\x01D\x0bT\xfd\xdd\xf4G\r\xf9\x05\x9b\xefb\t\xc3\x0b\xd0\xf1\x07\xff\x13\r\xae\xfa\x0b\xfd\xce\x01\xac\x0b\x9d\xee\x99\x06\xab\x03?\x02&\xf6H\x02\x95\x0c\xcd\xee\xfd\x05\x81\x05\x91\xfd\xd3\xf0p\x15\x97\xf2f\xfd)\t=\xfa^\x00\xb9\xfe|\x03\xc2\xf9J\x01\xfa\xf9\xe3\x07@\xff$\xfa\xd2\x02m\x02C\xf6\xa0\x05\xf6\x02\x88\xfa\xe2\xfa\xb3\x05\x7f\x02\xff\xf5(\t\x03\x00Q\xf4y\x03\xf8\x0b\xb5\xef\x81\x05\xcf\x03\xdb\xfa\xea\x07\x96\xff\xb2\xfbc\x04L\xff\x0b\xfe\xe3\x04s\x07O\xfc\x99\x03\xef\x017\xfa6\n\xeb\xfc\x13\x00\xa7\x05@\x06&\xf7\x80\x06\xea\x04\xf5\xfc\x06\xfc \x02[\x05\x86\xff|\x02\xac\xfa\x16\x06\x1d\x00;\xfa\x80\x03n\x03^\xf7-\x02\x03\x03\xd6\xfdH\xff\xdf\x03\xaa\xf7!\x00\x8b\x040\xfcf\x00\xf1\x00\x9c\xfc=\x03\xb8\x00\x94\xfa\xb2\x06\x16\xfc\xb7\xfe\x9d\xff\xf2\x01\xd1\x02\x0c\xfcX\x00\xd7\x03\x0c\xff\x98\xf9\xbe\x05\x96\xfd\x11\x01\xd3\x01w\xfa\x07\x04\x9a\xff2\xff\xff\xfc\xdb\x01J\x01\x87\xfa5\x01W\x03p\xfe]\xfcu\x04\x88\xfe?\xfd[\x06\xa7\xfd:\xfb7\x06\x04\x02\xe0\xfa\xb0\x02\x83\x04\x11\xfc\xc6\x03\xde\x01\xa8\xfe\x06\x01x\xff\xf5\x01-\x03\xca\x02\xee\xfc\xd3\xfe\xa9\x06a\xfb\x14\x01W\x05\x19\xfb5\x02\xb4\xfd\x96\x00e\x019\xfe\xcb\x00M\xf9h\x06~\xfb\xb5\xfa0\x07\xfd\xfa\xdd\xff\xa5\xfb\xc7\xfe\xd3\x02\xa8\x00\x1f\xf7\x12\x03/\x02X\xf7Y\x04\x98\xfeH\xf9\xcd\x03\x81\xfe\x8b\xf9\xfe\x05\r\xfd\x1c\xfc\x07\x01.\x01\xa6\xfaQ\x01Y\x03\xc6\xfe\n\xfbB\x04R\x03\xcc\xf9\xa0\x01B\x01\xc2\x04\x02\xf9\xc7\x03\x90\x02\x02\x00T\xfdm\xfde\x08,\xfb\x95\x01\x9c\x02\x9a\xff\xb3\xff\xe3\xfe\xb0\x00\x98\x01\x08\x02G\xfd\xd9\x00/\x04*\xfe\xdb\xfe@\x01\xb1\x00\xb6\x00\x80\xfei\x01\x98\x01\xd8\xfc\xdb\x01p\x00T\xfe\x95\x00\xeb\xfd\x13\x00\xf0\xff\x0f\xff|\xff\x03\x00d\xfd\x86\xff\x91\xff\x04\xfe+\xffv\xfd\xe6\xff\r\xff\xf6\xfc\xc3\xffF\xff8\xfc\x04\xff\xd6\xff-\xfc\xbd\xff\xa0\xff\xf5\xfb\xb4\xff\xdd\x00\xd1\xfcT\xfe}\x00\xba\xfe\x96\xfe\x15\x00\xdc\xff\x9c\xff\xe5\xffu\x003\x01\xd7\x00\x06\x01\x18\x00\x90\x01\xa6\x02k\x00\xb0\x01\xec\x02C\x00\xa8\x02r\x03\x08\x00X\x02\xf6\x02\xd1\x00\xa0\x01R\x02N\x01!\x01\x8a\x01\x14\x02^\x00\xb0\x00c\x01\xb0\xff{\x00\x92\x01}\xff\xbd\xfe+\x01\xb3\x00\xce\xfd\xdf\xff\x98\x00\xd7\xfe\xad\xfe>\x00\xbe\xff\xe4\xfdd\x00\xf4\x00C\xfe\xea\xfd\xfe\x00\xa7\x00\xa8\xfd\x9c\xffw\x000\xff\xdd\xfe\xd2\xff\x9b\xff\x13\xfe.\xff\xb9\x007\xfe8\xfe\x93\xff|\xff\xbc\xfd\xdb\xfe*\xff\x07\xfe*\xff\xf7\xfe\x7f\xfe\xa4\xfe+\xff\xc5\xfe\x80\xfe\xd8\xfe\xe5\xffA\xff\xe6\xfex\x00\x85\xffh\xff\xc3\x00\xca\x00\xea\xffb\x00\x81\x01k\x01\xb4\x00\x83\x01C\x02\xf5\x00\xaf\x01\xc0\x02t\x01p\x01\xc4\x02"\x02\x11\x01\xaa\x02\xe0\x01Z\x01\xab\x01\xa0\x01R\x01\xae\x00u\x01v\x01\x10\x00\xce\xff\xea\x009\x00R\xff\x03\x00\xc5\xff\xdb\xfe-\xffZ\xff\xd5\xfes\xfe\xbb\xfe\x0b\xff_\xfe5\xfe\x8c\xfe\xcb\xfe1\xfe7\xfe\xad\xfe\xa4\xfe\'\xfe\xeb\xfe_\xff\x86\xfe\xde\xfe\x89\xffK\xff\xdd\xfe\x17\x00\xe6\xff\x10\xff\x00\x00\xb9\x00w\xff\x1f\x005\x01\xc1\xff*\x00\xf3\x00\xd3\x00j\x00e\x00R\x01\x0b\x01x\x00b\x01\x04\x01\xb7\x00\x10\x01\x01\x01\t\x01\xf7\x008\x01R\x01\x08\x01\xd1\x00\x1f\x01\x0b\x01\x9b\x00\x01\x01\xf7\x00y\x00\xd9\x00\x0c\x01w\x00@\x00\xd8\x00u\x00\x14\x00Q\x00\x12\x00\xd0\xff\xe8\xff\xf0\xffN\xff\xa1\xff\xe7\xff\x13\xff\xd5\xfe\xd3\xff*\xffl\xfe~\xff=\xff\x87\xfeB\xff\x02\xff\xed\xfe$\xff\xd3\xfe&\xff\x1e\xff\xfe\xfe<\xff\x1b\xff\x08\xff\x87\xff\x1e\xff \xff\xce\xff\x85\xffH\xffv\xff\x17\x00b\xffY\xff=\x00\xf7\xff\x92\xff%\x00\x88\x00\xca\xff/\x00\\\x00F\x00T\x00y\x00\x86\x00\x8d\x00\xc2\x00\x9a\x00\xa1\x00\x8c\x00\xbf\x00\xcc\x009\x00\x90\x00?\x01\x88\x00\x14\x00\xff\x00\xb8\x00\xef\xff\x86\x00q\x000\x00\xe3\xffM\x00S\x00\xce\xff\xf8\xff\x0c\x00\x80\xff\xea\xff\x01\x00\x98\xffu\xff\xb6\xff\xd2\xff-\xffo\xff\xc9\xffS\xff\x13\xff\x99\xffL\xff%\xffe\xff,\xff^\xffE\xff.\xffz\xff\x7f\xff\x88\xffh\xffs\xff\xa1\xff^\xff\x8a\xff\xf7\xff\xa3\xff\xc8\xff\xe9\xff\xe2\xff\x03\x00"\x00\xf1\xff\xff\xffi\x00(\x00E\x00\x8b\x00?\x00L\x00\x9b\x00R\x00\x8c\x00\x94\x00\x94\x00T\x00\x7f\x00\xb6\x00C\x00F\x00\x93\x00D\x00\xf6\xff\x83\x00[\x00\xfb\xff*\x00)\x00\xeb\xff \x00\x1b\x00\xd8\xff\n\x00\x0f\x00\xdc\xff\x00\x00\xb9\xff\xdc\xff\x0f\x00d\xff\xc5\xff^\x00v\xff\xe1\xff\xfd\xff\x8f\xff\xe1\xff\xb5\xff\xc4\xff\xf5\xff\x14\x00k\xff\xf6\xff\x02\x00\x94\xff\xaa\xff\x0e\x00\xa9\xff\x86\xff<\x00\xb0\xff\x90\xffN\x00<\x00^\xff\xbd\xff\x86\x00\xe9\xff\x99\xffk\x00M\x00\x1f\x00\xe8\xff8\x00\x91\x00\xd0\xff\r\x00\x98\x00\xb4\x00\xc8\xff!\x00\x87\x00\x1b\x00M\x00B\x00\x0f\x00G\x00\x08\x00r\x00F\x00\xbc\xffJ\x00m\x00\xdf\xff\x06\x00\xa0\x00\xdc\xff4\x00P\x00\xfd\xff`\x00\x12\x00\xf6\xffg\x00\xfd\xff:\x00\x19\x00\x11\x00\x11\x00\xee\xff\x1f\x00,\x00\xf7\xff!\x00\x08\x00\x9d\xffd\x00\x03\x00\x80\xff\xe7\xff*\x00\xc6\xff\xf4\xfe\x15\x00\x1d\x00\xbd\xfe\x90\xff\\\x00h\xff\xba\xfeL\x00;\x00_\xff\x8a\xff\xf8\xff\xc7\xff\x19\xff\x1a\x00\xb6\x004\xffL\xffn\x00\xbb\x00\x99\xff\xf1\xff\x83\x00\xb7\xff\x8e\xff\xdf\x00\x1e\x01\xf4\xfex\xff\r\x02\xfb\x00M\xfd\xbb\x01\xee\x02\xf8\xfc\x95\xff\x18\x03 \x00\xc2\xfe\x08\x00,\x01*\x00\xb3\xff4\x01\xb6\xff[\xff;\x00\x9d\x00\x1c\x00\t\x00\x92\xff,\xff\xaf\x00X\xff,\xfe\x88\x02L\xff\xe4\xfd\xd4\xff\xcc\x00t\xff\xd0\xfd\xd2\x01e\xff\x98\x00<\xfe\xe9\xff\xa0\xff\xd0\xff\xd6\x01r\xfd4\xfe\xe1\x00\x16\x01h\xfe\xd0\xfe\x85\x00|\x01\xe3\xfe\x8d\xfe\x84\xfc&\x00D\x03\xa9\x03\\\x02\xcf\xfe\xf2\xfb\xf5\xfe\xca\x07\xf3\xfe\xdf\xfbv\x04\xa1\x00X\xfc8\x02\xc8\x04\x82\xfcY\xfa\x9c\x02 \x03z\xfd\xef\xfdh\x000\x00\x8e\xfe\xfa\xff{\xffc\x00\xa7\xfe\xd4\xff\xa2\x00\xd0\xfc\xc4\xff\x8e\x02\x1d\x05>\xfc\x18\xfd\x0b\x04\xb3\x02\x82\xfdb\xff\xc4\x03\xe6\xfe\xb5\xff\xd1\x00L\x00\xb4\xfc\xc1\x00\\\xff\xaa\xfd\x18\x02\xde\xfd\x07\x01~\xff\x94\xfd\xea\xfe\xac\x01\xfb\xfcZ\x00\x9e\x00\xf1\xfd\xf4\x00U\x01+\xff}\xfd\n\x00!\xff\x89\x02\xa2\xffD\xff\xcd\x02\xbf\x00\xfa\x01S\xfd\n\xfdr\x00\x88\x05\xeb\x00\\\xfdn\x02\xff\xfdB\x03(\x00\xbe\xf7\xe1\x02\x99\x04\\\xf9)\x00\x1f\t\xd7\xf8\xaa\xf8~\x04\xb7\x04j\xf9\xfb\xfbu\x03\xc9\x03\xa9\xfe\t\xfe\x85\x01\x8e\xfd\\\x00\xf6\x01$\x03*\xfa\xa4\x00E\t\xed\xfb\xc9\xfc\xa9\x04D\x00]\xfc;\x03\x16\xfd\x18\xff\x9c\x03^\x01-\xfb2\x01`\xfeh\x02\x82\xfe\t\xf8\xfc\x06\xd2\x07\xba\xf9<\xf7C\t\xff\x03h\xfb\x88\xfb\xca\x00\x81\x01\x08\x01\xe0\x00w\xff^\xfdN\xfc\xae\x05a\xfe\xfa\xf8\x95\x02k\x07\x89\xf7\xa3\xfc3\x0b\xb1\xfe,\xf8\xad\xfd1\x0b\x1d\x00\xd0\xf7\xe4\x03O\x03\t\xfd\xee\xfd\xb2\x06\xef\x02$\xf9\xe5\xffR\n\xf7\xfb\xba\xf8\x1c\t\xb0\x06\xff\xf7\xd8\xffb\tO\xf9 \xf8\xc1\t\x11\x07\xc2\xfav\xf8\x17\x03\xfc\x01\xe5\xff\xf5\x00E\xfd\xd9\xfct\x03\xb6\x02E\xf8\xb2\x02\x1e\x08q\xfa\xba\xf66\x04\x05\x0b\x8b\xfc\xdb\xf7\xda\x04\xc6\x05\'\xf90\xf4\x95\x0e\xc3\x04\xce\xed\xad\x05\xe6\x0b*\xf48\xf9\x8c\x0f\x16\xf6\x84\xee^\x13\x13\n\xe2\xec|\xf91\x12w\x01K\xea\xaf\x08\xf3\x0b\xee\xf5\x1e\xf7o\r\xb3\x08\x14\xf1\xa5\x02)\x06\xf7\xf5G\xf9e\x13\xb7\x03\x00\xf4m\x02\x88\x02\xd2\xfex\xfe\x7f\xff\x0f\x06\xf3\xfee\xfa\xc7\x03O\x02\xf9\x00l\xfe\xeb\xfdO\xfc\x99\x02\x13\x03\xea\x02j\xfb\n\xfb\xd8\x05A\x00\xed\xfep\xfc\x9c\x02\xd3\xfe\xab\x02\xe7\xfd\xfc\x01g\x08\xdb\xff\xeb\xf4\x88\xfb\xd2\x0bN\x04\xa1\xf6F\xff\x0e\x0fK\xf9\xee\xf0\x1c\x05\xab\re\xf6?\xf2,\x06@\t%\xfc\x95\xf8"\xfe\x80\x02P\xfa\xad\xfcD\x08G\xfc\x9a\xff\x89\xfb\xd3\xfe\xf3\x07\x96\x05\x08\xfc\xb3\xf9\xb4\x01\x99\x07z\x05\x0e\xfaJ\xfb!\x02\xf8\x04\x91\xf9\xc0\xff\x85\x0cp\xfd\x12\xf0\xef\x02\xb9\x08U\xfb\x8f\xf7F\x07$\x04\x03\xfb\x01\xfec\x04h\x02B\xf7\'\xff\xe8\x08&\x03\x88\xf5h\x026\x07\xd6\x01\xe0\xf8/\xfd\n\x05\xfa\xfdN\xfd\xdd\x02\n\x03q\xfdo\xfd=\x00Q\x03v\xff&\xfb\xe7\x00\xbb\x00&\xffT\x070\x00\xf0\xf8\xfe\xffd\x04\x80\xff$\xfey\x04w\x03\'\xf9\x84\xf9\xac\x0cj\x07\x19\xf4\xc8\xf9\'\x08\xd6\x05\x03\xfb\xa4\xfe\xe8\x03\x96\xfd\x1d\xfbg\x03\xb4\x049\xfe\x9b\xfc\xea\xfcg\xffW\x02;\x03~\xfe \xfa\xd1\xffN\x04}\x03\x1e\xfc!\xfd\x06\x01\xef\xfa\xb6\x03\xff\x0b\xea\xfd\x16\xf7\xe8\x00\xb7\x06-\x00p\xfe\xa7\x04N\xffk\xfa\x14\x07>\n\x0c\xfa\xf7\xf5\xcc\x01a\x05\x16\xff&\xff:\x01g\xfa\xf0\xf6\x08\x03e\x08\x84\xfa\xae\xf3\x16\xfd\xdc\x04\x97\xfe#\x00\xe0\x00\xb9\xf5$\xf9\xcf\x06\x88\x06l\xfc\x0b\xfc\xf0\xff\xb4\xfd\xab\xffD\x086\x03\x9a\xf6\x9a\xf9\x0c\x04\xc1\x03\x08\xff+\xfe\x16\xfb\x1b\xf9\xc8\xfe\xda\x02\xfe\xfb\xb1\xf6\x9f\xfa\x01\xfeu\xfcz\xf8=\xf8\x1b\xf9\xde\xf6|\xf9h\xfd\xe4\xfe\x98\xfdJ\xfcM\xfa\xf3\xfd\xc6\x03\xe4\x084\x12\xf8\x16h\x10\x01\x0bB\x14\xb4\x1e\xcd\x1b\x0f\x14\xcb\x19\xf7$\'$0\x1d4\x1a&\x19\x85\x0f\\\x06\x96\n\x10\x12z\x0c\xcd\xfe\x15\xfa\xb1\xfd$\xf7U\xeaG\xe3\xfc\xe5Q\xe8H\xe4V\xe6\xda\xeb\x13\xe8\x9e\xdc\x11\xd9\x90\xe3\'\xec\xf1\xe9\xb8\xe8\xb1\xf2\xfa\xfc}\xfc\x99\xf7X\xf8\xfa\xfd:\xff\x88\x00\x80\x08C\x13\xc2\x11@\x067\x05\x07\x0c\xf9\x0b\xf7\x02\xf4\x00\x9a\x08U\rX\x07r\x00\xd0\xfei\xfa\xa1\xf4\xe9\xf4\xb0\xfa\xea\xfc\xa5\xf8N\xf6\x14\xf8\xf2\xf8\x89\xf3\'\xf0\xb5\xf2{\xf8*\xfe\x8f\x00V\x02\xf7\xfe\x9a\xfb>\xfc\x8f\x01\xc0\x05\xc6\x06\xd3\x08S\x0c\xb5\x0e\xdc\x0cN\x0c\xfb\t\xf4\x07\xbd\t|\x0eH\x13\x18\x12(\x0cy\x08o\x07\xb6\x06\xe7\x05h\x05M\x05\xc2\x04\xe5\x02D\x01\x08\xfe\xb8\xfa\xcb\xf7\xfd\xf5E\xf8\x17\xfa\xf0\xf8\xc9\xf5\x03\xf3\x95\xf1h\xf2\x96\xf1\xbb\xf1#\xf5\x8a\xf6\x1d\xf7\x1e\xf6\xcd\xf7<\xf8\xab\xf6>\xf8\xfa\xfb\xb3\xff\xae\x00a\xff\xa0\xffk\x00\xba\xfe\r\xfe\xa4\xff\xd3\x01(\x02d\x01\x1c\x01\xa0\xff\xed\xfd\x82\xfc\xa8\xfc\xe5\xfc\'\xfd\x02\xfd`\xfb\xb4\xfb\x0b\xf9\x96\xf5\x1c\xf4\x9c\xf2\xa4\xf5\x16\xf6O\xf6\x17\xf6\xe6\xf5L\xf7\xe4\xf7\x1c\xfa\xde\xfb\xfd\xfc\xd9\x01\xb9\x10B"J%L\x18\xf3\x13\xe4"Z1#2I0\xcc6!;\xa88\x825\xd10\xea$\xae\x18\x89\x1a\x9f$\xd3"R\x12s\x02x\xfb?\xf3I\xec\xf0\xe7>\xe4E\xdd\x1a\xd7\xea\xd9\x98\xde\x81\xd8\xa5\xc8\xd0\xc2i\xce\xc8\xdbh\xdei\xdc\x9a\xe1\x98\xe9$\xed\xe4\xec\xfb\xf0\xae\xf8x\xfcy\x00>\t\x10\x15\x91\x15\x0c\n\n\x07P\x0f\xa2\x14x\x0eI\n(\x0e}\x10\xf1\n[\x035\x00\x87\xfb4\xf4\xbe\xf3V\xf9F\xfb\xa1\xf4M\xed\x17\xee*\xf1\xb7\xed\xbf\xea=\xee\xa9\xf4(\xf9\x1b\xfac\xfc\x85\xfc^\xf9z\xf9u\x00\xe0\x07\xcb\n\xa3\x0b\xff\x0cL\x0f\x1d\x0f\xe4\r\x1c\r\xa3\r\x1a\x0f,\x12%\x15\xa7\x14\xd5\x0f\xd6\t\x97\x06S\x05\xda\x05E\x05)\x04\x87\x03O\x01c\xfe\xa0\xfa\xc0\xf7z\xf4;\xf3\x9a\xf6E\xfc\xa0\xffJ\xfe\xbb\xfa;\xf7\x04\xf8\xeb\xf9\x0b\xfd\xb9\x01\xf0\x02\xb8\x03\'\x046\x03\xff\x00j\xfdQ\xfc\xfd\xfd7\x01\x84\x03\x17\x02\x11\x00\xa9\xfc=\xfa\xc8\xf8h\xf9\xe2\xfa\xdb\xfb\xc0\xfcC\xfd\xd4\xfc\x9e\xfa\x9d\xf8\xde\xf7\xd0\xf8\x80\xfbv\xfdv\xfe\xdb\xfft\xfe\xa6\xfcp\xfc"\xfd\x85\xff(\x00\xb2\x012\x04\xbd\x03\xc0\x02\xa8\x00\x9c\xfe\xe5\xfe<\xff\x9e\x00\x1b\x02\x85\x01:\xfff\xfc\xf1\xfa\x0b\xfb\xef\xfak\xfa*\xfc\x0f\xfeT\xff\xf9\xfe\xd0\xfc\xb1\xfb&\xfb\xa0\xfc\xfb\x00\xe3\x01\xcf\x00\x87\xfet\xfc\x9f\xfd\xeb\xfbF\xfc\xab\xfe\x12\x01\xb7\x00I\xff\x1f\xfe\x16\xfb\xc2\xf8\x94\x01\xb9\x18\xa7%\x81\x19\xd5\x06f\x0e\x7f$\x83(G\x1f\x80!\xc91y4\xb7*N(\xdd&~\x18\xf3\x08\xb9\x10E$\xaa!\xe4\t\x89\xf9\xe7\xfa\xf3\xf5s\xe8>\xe1\xb5\xe5\x9b\xe6i\xdfe\xe0h\xe6\xfb\xde8\xcc\xfe\xc6R\xd8\x0b\xe9\xd5\xe8\xe7\xe3a\xe9\xb3\xf1\xa1\xf1\xb8\xee\xa5\xf2\xd8\xfa\xce\xfc\xec\xfe\xf4\x08q\x13\x86\x0fh\x00\xa0\xfd\x11\x08\x88\r\x8b\x06:\x02\xff\x06\x16\t\xad\x013\xfa\xa2\xf9\xf5\xf6(\xf0\xfe\xf0G\xf9\xc6\xfa\x8d\xf1\xfd\xea\xe2\xee\xe1\xf2\x9b\xee\xbc\xec^\xf3\xf3\xf8\x8c\xf8\xe9\xf8\x81\xfe\xf0\xff\xee\xf9\xa9\xf8,\x02B\x0b\xf8\n)\tq\x0b!\x0e\x80\x0c\xab\x0bk\x0ep\x10q\x0f_\x0f\xa0\x12\x8f\x14\x91\x10\xb7\t1\x07\x1c\to\n\xc5\t\x11\x08\xb3\x06\xbd\x03\xd3\xff\xdf\xfd\xdb\xfdZ\xfcm\xf9\xdd\xf8\xd7\xfa\xc0\xfb\x14\xfa\xab\xf6w\xf5\xd0\xf6F\xf7]\xf8\xf6\xfa\xdc\xfcY\xfd\x84\xfc[\xfd\xf7\xff\xb3\x00\x14\x00\xee\x00c\x04\x00\x07\r\x07\xcf\x06\xbc\x06\xd8\x05=\x04\x19\x04x\x05k\x06;\x05k\x039\x02Y\x01\xd6\xff\x04\xfe\x90\xfd\x10\xfe\x08\xfe\xc0\xfc\xc7\xfcD\xfc[\xfb\x89\xfa&\xfa\xf2\xfbG\xfd\xdc\xfd!\xfe\x82\xfe\xd6\xfe\x9a\xfeo\xff\x14\x02\xfb\x04s\x03*\x02G\x04\x1e\t\x13\x0b\xca\x07\x17\x07B\x07\xe7\x07|\x07K\x08\xd1\x08\x11\x07\xe5\x04 \x04\xb5\x02g\xff\x9f\xfdU\xfc\xa7\xfc\x9e\xfc\xae\xfa]\xf9y\xf7\xe2\xf5\x04\xf5\xa2\xf4\xe9\xf5&\xf6\xec\xf5\x82\xf6\xdc\xf6\xeb\xf6"\xf6\xb4\xf6g\xf8v\xfaA\xfbc\xfc\xef\xfd\xea\xfe\x9f\xff\xce\xff*\x01 \x031\x04\x12\x05\xfc\x05\xaf\x06\xb7\x06\xf4\x05L\x06?\x07\xb0\x06\x00\x06\xef\x05\xef\x05\xf8\x041\x03}\x02\xf5\x01r\x006\xffK\xfe\t\xfeW\xfd\x9d\xfb7\xfa\x99\xf9E\xf9\x81\xf9r\xf9\xe8\xf8j\xf9\xf8\xf9V\xfau\xfa[\xfa\x81\xfa\x9d\xfa\xc9\xfa\xe4\xfb\x02\xfc \xfb\xe8\xfa\xe2\xfa\xd6\xfa8\xf9\x1d\xf8(\xfa\xe1\xfc\xcb\xfcK\xfb@\xfc_\x02\x87\x07\x8f\t\x13\x0e\xba\x15\xfc\x18\x9f\x14\xf9\x14m\x1f6(_&\x9f"\x8d&]*l%9\x1d\xdf\x1a\xa1\x1a\xde\x166\x11\xa7\x0f\xdc\x0c\xd6\x03\xa3\xf9k\xf4\xf0\xf1\x81\xed\x91\xe8Y\xe6T\xe5m\xe3\xfc\xe1\xa8\xe1\xe7\xdf\xf9\xdc\xb7\xdd&\xe3\xe0\xe81\xec\xbe\xedF\xf0^\xf3\xbf\xf5d\xf8\xed\xfa\x97\xfdV\x00\xc2\x03u\x07\xfb\t\xa1\to\x06\t\x04\xfb\x03\xe2\x05u\x06}\x05\x10\x04.\x02\x00\x00\xb1\xfd\xc4\xfb1\xf9n\xf6\xb9\xf5\x9c\xf7\xd9\xf8\x13\xf7\x1c\xf5\xd9\xf4\x1b\xf5a\xf4\xa5\xf4\x92\xf7*\xfam\xfa\x05\xfb\xe1\xfd0\x00\x93\xff\xd8\xfe\xd3\x00\x08\x04n\x05$\x06\xf5\x07$\tR\x08I\x07\xf8\x07j\t\x84\t\xc2\x08\xf2\x08\xe0\t\x8f\t\xcc\x07y\x06\r\x061\x05M\x04-\x04R\x04S\x03O\x01\xfe\xff\x9e\xff\xc8\xfe?\xfd[\xfc]\xfc\x99\xfcn\xfc\x9d\xfb\\\xfbO\xfb\xc9\xfa\xd4\xfa\xf3\xfb\x14\xfd\x80\xfd\xb7\xfdC\xfe,\xff\xe9\xff\n\x00\xa6\x00\xdb\x01\xca\x02\xf4\x02\x12\x03\x99\x03\x96\x03<\x03\x1c\x03$\x03\xf8\x02\xe2\x02\xb3\x029\x02\xb4\x01\x0c\x01i\x00\x15\x00\xc9\xff]\xff!\xff/\xff/\xff\xfd\xfe\x9a\xfe\\\xfep\xfer\xfe\x9e\xfe\x07\xff`\xff\xad\xff\xc6\xff\xaf\xff\xb5\xff\xa2\xff\xaa\xff\x18\x00S\x00d\x00\xa8\x00\x94\x00Z\x00M\x00F\x00\x19\x00\xe5\xff\x0f\x00o\x00\xa6\x00\x84\x00\x14\x00\x01\x00>\x00.\x00,\x00\xc6\x00\xbc\x01Q\x02`\x02\xaa\x02\xe7\x02\xfa\x02Q\x03\xf2\x03m\x04?\x04E\x04\x0e\x05\x1a\x06#\x05\xd5\x02\xde\x02b\x04\x16\x04t\x01q\x00\x06\x02\x0b\x02\x13\xffW\xfd\xa2\xfe\xd6\xfe\x9d\xfb\x88\xf9\x98\xfb\xf9\xfc`\xfa\xb9\xf7\x19\xf9\x1c\xfb\xa8\xf9\x8a\xf7y\xf8\x88\xfa:\xfa\xf7\xf8F\xfa\x97\xfc\xae\xfc\x95\xfb\x8c\xfc\xf8\xfe\xdc\xffe\xff\xae\xffx\x01\xe4\x02\xef\x02\xf8\x02\xad\x03\'\x04\xbc\x03-\x03|\x03\r\x04\x88\x03E\x02\xb2\x01\xef\x01t\x01\xe9\xff\xb6\xfe\x8a\xfe0\xfe/\xfdd\xfc:\xfc\xfe\xfb:\xfb\xbf\xfa\x03\xfbe\xfbZ\xfb$\xfbo\xfb7\xfc\xc2\xfc\xfb\xfcr\xfd\x1a\xfe\xa7\xfe\xc0\xfe\xdf\xfe*\xff)\xff\xbc\xfe{\xfe\xa9\xfe\xcf\xfek\xfe\xce\xfd8\xfd\xe5\xfc\xb4\xfc\xa2\xfc&\xfd\x8d\xfe\xca\x00\xf7\x02f\x04b\x05m\x07\xb0\n\xaf\r\x13\x10\xd6\x12\xc9\x15z\x17\xd0\x17k\x18\xc9\x19Y\x1a6\x19e\x17\x15\x16\x9b\x14\xf5\x11\xb5\x0e\xe1\x0b~\tz\x06\xdd\x02\x89\xff\n\xfd\xb7\xfa\x16\xf8\xcb\xf5W\xf4v\xf3\x10\xf2w\xf0\x9c\xef}\xef\x82\xef\x0f\xef\xd2\xeex\xef?\xf0\xb1\xf04\xf1#\xf2$\xf3\xc9\xf3Z\xf4\x8d\xf5\x01\xf7:\xf8C\xf9\x1a\xfa\x14\xfb\x00\xfc\xa4\xfc2\xfd\xde\xfd\x93\xfe\xfc\xfe\xfc\xfe\xe4\xfe\xf2\xfe\xd1\xfel\xfe\x03\xfe\xcb\xfd\x8b\xfd\x18\xfdk\xfc\x00\xfc\x1a\xfc!\xfc&\xfch\xfc\xc0\xfc\xf5\xfc\xf4\xfc#\xfd\x84\xfd\xfc\xfd\x83\xfe\x0f\xff\xa1\xff\x1d\x00s\x00\xbd\x00\x07\x01F\x01\xb9\x01i\x02\x11\x03\xb0\x03X\x04\xda\x04P\x05\xba\x05Y\x06\x0e\x07\x9c\x07\x07\x08\x95\x08B\tq\t\x1a\t\xe2\x08\xd6\x08V\x08{\x07\xf7\x06\x9a\x06\x85\x05\x01\x04\xe1\x02$\x02\xdd\x00?\xff/\xfe\xa7\xfd\xbb\xfc\x8a\xfb\xfd\xfa\xf8\xfa\x8f\xfa\xdc\xf9\xd7\xf9l\xfa\x81\xfa0\xfa\x93\xfa\x85\xfb\xcd\xfb\xc5\xfb9\xfc\xf4\xfc \xfd\x06\xfdj\xfd\x1d\xfe[\xfe7\xfew\xfe(\xffK\xff\x11\xff]\xff\xef\xff$\x00\x1c\x00}\x00\x13\x01F\x01O\x01\x94\x01\xec\x01\xf5\x01\xde\x01\xeb\x01\r\x02\x11\x02\xd7\x01\xaa\x01\xb4\x01\x8d\x01>\x01\x18\x01\x18\x01\x16\x01\x08\x01\x1e\x01:\x01x\x01\x8a\x01\x84\x01\xb7\x01\xed\x01\xfc\x01\t\x02#\x02*\x02\x1a\x02\x08\x02\x01\x02\xd8\x01\x85\x01\x0e\x01\xb8\x00\x99\x00r\x00\x02\x00{\xff\xa2\xff\x06\x00\xdb\xff\xbb\xff\xf2\xff[\x00\x17\x00\xf1\xff|\x01\xb6\x03\xd7\x03\x84\x02\r\x03\x12\x05t\x05\n\x04\x1a\x04\x7f\x05[\x05c\x03\x8f\x02M\x03\x87\x02\xf0\xff\\\xfe\xe9\xfe\xdc\xfe\x8a\xfc~\xfa\x82\xfa\xb1\xfa\xb5\xf9\xc0\xf8\x01\xf9[\xf9\xde\xf8k\xf8Q\xf9\xc8\xfa=\xfbF\xfb\x1b\xfcs\xfds\xfe\x0b\xffi\xff.\x00\xf7\x00\x83\x01\x00\x02_\x02\xb9\x02\xb0\x02q\x02f\x02h\x02\xf9\x01\x17\x01\x81\x00\x82\x00?\x00S\xff\x81\xfe-\xfe\xbb\xfd\x0c\xfd\x82\xfcZ\xfc\x18\xfc\x96\xfb|\xfb\xe0\xfb<\xfc,\xfc\x13\xfcd\xfc\xe7\xfcA\xfd\x97\xfd"\xfe\x9b\xfe\xfc\xfeW\xff\xda\xffi\x00\xab\x00\xae\x00\xdb\x00Z\x01\xc8\x01\xd7\x01\xd5\x01\x12\x02q\x02v\x02[\x02}\x02\xa0\x02u\x02E\x02g\x02o\x02\x1a\x02\xbc\x01\x7f\x01\x14\x01a\x00\xc9\xffL\xff\xa6\xfe\xde\xfdA\xfd\xde\xfcw\xfc\r\xfc\xcd\xfb\xc8\xfb\xc7\xfb9\xfc^\xfd\x04\xff\xa8\x00R\x02N\x04\x88\x06\xa8\x08\xbd\n2\r\xd2\x0f\xca\x11\x04\x13e\x14\xe4\x15o\x16\xd8\x15b\x156\x157\x14\xc7\x11l\x0f\xc8\r\x85\x0b\xec\x07x\x049\x02\xbd\xff\xfe\xfbx\xf8{\xf6\xec\xf4S\xf2\xd0\xef\xcb\xee{\xee\x82\xed[\xecq\xecD\xedy\xedZ\xed%\xee\xbb\xef\xd8\xf0S\xf1c\xf2\x0b\xf4L\xf5\x18\xf6\x15\xf7\xa3\xf8\xe7\xf9\xb1\xfa\xb1\xfb\x04\xfd\x1d\xfe\xbb\xfeQ\xff\x07\x00\x8d\x00\xf2\x00i\x01\xd1\x01\xd6\x01\xa9\x01\xae\x01\xb0\x01Y\x01\xf0\x00\xa1\x00W\x00\xe1\xffy\xffn\xffE\xff\xd0\xfey\xfe\x8f\xfe\xd0\xfe\xab\xfe\x98\xfe\xf2\xfeX\xff\xc6\xff<\x00\xd7\x00g\x01\xd0\x01K\x02\xf7\x02\x9b\x03\xef\x03E\x04\xc0\x04F\x05\xb0\x05\x08\x06K\x06e\x06v\x06\x99\x06\xd9\x06\xf5\x06\xd4\x06\x96\x06G\x06\xe8\x05t\x05\xe5\x04\x18\x04<\x03~\x02\xbe\x01\xcc\x00\xad\xff\x95\xfe\xa5\xfd\xc3\xfc\xe0\xfb"\xfb\x92\xfa\xf3\xf9G\xf9\xd4\xf8\xb6\xf8\xa2\xf8\x84\xf8\x9c\xf8\xf3\xf8e\xf9\xd3\xf9F\xfa\xeb\xfa\x9e\xfbH\xfc\xfb\xfc\xb9\xfd\x93\xfe{\xffL\x00\x1b\x01\xd5\x01\xab\x02x\x03\x02\x04t\x04\xd7\x04C\x05\x9c\x05\xd9\x05\xfb\x05\xf5\x05\xd4\x05\x82\x05%\x05\xd9\x04s\x04\xe3\x03X\x03\xdc\x02Q\x02\xc5\x01@\x01\xb6\x00*\x00\xb3\xff{\xffZ\xff0\xff\x19\xff\n\xff\xfb\xfe\r\xff)\xffD\xffa\xff\x92\xff\xd4\xff\x0b\x00-\x00M\x00l\x00q\x00v\x00\xa7\x00\xd7\x00\xbf\x00\x95\x00\x90\x00\x91\x00~\x00A\x00\x13\x00\xf8\xff\xc4\xfft\xff\'\xff\x0f\xff\xf6\xfe\xb2\xfe|\xfe\x9a\xfe\xc5\xfe\xb7\xfe\xd2\xfe%\xff\x95\xff\xba\xff\xda\xff\x9b\x00\xa5\x01\x03\x02\xee\x01t\x02A\x03p\x03\r\x03"\x03\xa4\x03\x9b\x03\x05\x03\xa1\x02\x85\x02\r\x02\x19\x01\\\x009\x00\xe7\xff\xf5\xfe\x05\xfe\x9e\xfdW\xfd\xa6\xfc\xf4\xfb\x99\xfbg\xfb-\xfb\xd8\xfa\xd2\xfa\xf3\xfa\xe2\xfa\xc4\xfa\xdd\xfa.\xfb{\xfb\xa8\xfb\xc9\xfb\x1d\xfc\x8a\xfc\xdc\xfc\x1b\xfd>\xfdy\xfd\xd4\xfd0\xfe~\xfe\xd8\xfe\x1d\xffI\xffu\xff\xc3\xff\x1d\x00T\x00\x8b\x00\xba\x00\x05\x01x\x01\xdf\x01 \x02V\x02\x99\x02\xeb\x02;\x03}\x03\xe1\x03\x16\x04\x1c\x04B\x04q\x04\x88\x04W\x04\x13\x04\xe7\x03\xca\x03\xaa\x03V\x03\xeb\x02s\x02\x13\x02\xbe\x01o\x01\x1d\x01\xb8\x00`\x00\x1d\x00\xf0\xff\xcf\xff\xc5\xff\xa3\xff\x8a\xff\xa1\xff\xc7\xff\xd5\xff\xdf\xff\x00\x00,\x00^\x00\x87\x00\xae\x00\xc8\x00\xca\x00\xcd\x00\xd5\x00\xe1\x00\xdc\x00\xbb\x00\x9d\x00v\x00`\x00:\x00\xf9\xff\xb9\xff}\xff;\xff\t\xff\xd4\xfe\x9c\xfex\xfeC\xfe\x12\xfe\xee\xfd\xd5\xfd\xce\xfd\xbd\xfd\xc3\xfd\xc8\xfd\xd1\xfd\xdd\xfd\x01\xfe8\xfek\xfe\xa7\xfe\xdd\xfe$\xff^\xff\x97\xff\xd4\xff\x08\x00S\x00\x8b\x00\xa0\x00\xab\x00\xa1\x00\x96\x00\x81\x00Z\x005\x00\xf4\xff\x91\xff!\xff\xca\xfey\xfe+\xfe\xe4\xfd\xb5\xfd\x8a\xfda\xfd3\xfd2\xfdo\xfd\xd4\xfd\x1f\xfeD\xfe\x89\xfe\xe2\xfe2\xff^\xff\xc3\xff6\x00p\x00\x95\x00\xbc\x00\xea\x00\x01\x01\xfe\x00\xef\x00\xff\x00\x00\x01\xf3\x00\xcb\x00\xbb\x00\xbe\x00\xa9\x00\xa0\x00\x91\x00\x90\x00\x93\x00\x98\x00\x97\x00\xa6\x00\xad\x00\xb7\x00\xc1\x00\xc1\x00\xcc\x00\xc6\x00\xb7\x00\xbc\x00\xba\x00\xa4\x00\x83\x00]\x00E\x00$\x00\xfb\xff\xd6\xff\xc1\xff\x92\xffN\xff\x14\xff\xfd\xfe\xec\xfe\xc1\xfe\xb1\xfe\xb3\xfe\xb3\xfe\xa1\xfe\x97\xfe\xb2\xfe\xd4\xfe\xe6\xfe\xfc\xfe4\xffh\xff\x8c\xff\xbd\xff\xfe\xff2\x00=\x00`\x00\x8f\x00\xba\x00\xe5\x00\xfd\x00&\x01=\x01H\x01[\x01c\x01n\x01\x85\x01\x90\x01\xa4\x01\xb7\x01\xbc\x01\xb3\x01\x84\x01~\x01\xa2\x01\xae\x01\xaf\x01\xbe\x01\xdf\x01\xec\x01\xeb\x01\xf7\x01\n\x02"\x02;\x02Q\x02c\x02s\x02t\x02w\x02q\x02]\x02<\x024\x02/\x02\x13\x02\xfc\x01\xb9\x01X\x01\xef\x00\xa5\x00^\x00 \x00\xe7\xff\x98\xffL\xff\xf0\xfe\x97\xfen\xfeG\xfe\x01\xfe\xf0\xfd\xf7\xfd\xdc\xfd\xcc\xfd\xb5\xfd\xa6\xfd\xb5\xfd\x9e\xfd\x96\xfd\xb8\xfd\xb8\xfd\xa5\xfd\x94\xfd\x92\xfd\x98\xfd\x91\xfd}\xfd\x80\xfd\x85\xfdx\xfdh\xfdO\xfdQ\xfdQ\xfd?\xfdN\xfd`\xfdv\xfd\x90\xfd\x93\xfd\xa5\xfd\xc2\xfd\xe3\xfd\xec\xfd\x05\xfe>\xfe]\xfet\xfe\x95\xfe\xdc\xfe\x15\xff\x15\xffB\xff\x92\xff\xc3\xff\xee\xff!\x00d\x00\x94\x00\xc2\x00\x02\x01M\x01z\x01\x99\x01\xca\x01\xf9\x01!\x029\x02H\x02]\x02f\x02i\x02a\x02V\x02D\x025\x02%\x02\x0e\x02\x02\x02\xf0\x01\xe1\x01\xc8\x01\xa8\x01\x8e\x01\x89\x01z\x01j\x01i\x01W\x019\x01\x1f\x01\x05\x01\xf5\x00\xe8\x00\xcd\x00\xc5\x00\xb4\x00\x8e\x00n\x00E\x00\x19\x00\x00\x00\xdc\xff\xaf\xff\x9a\xff~\xffG\xff:\xff(\xff\xf9\xfe\xe2\xfe\xcd\xfe\xbc\xfe\x9c\xfe\x8e\xfe\x8f\xfe}\xfeW\xfeD\xfe]\xfeT\xfeU\xfec\xfe]\xfea\xfeo\xfe|\xfe\x83\xfe\x9c\xfe\xab\xfe\xb8\xfe\xe2\xfe\xf5\xfe\x12\xff.\xff/\xffK\xffx\xff\x96\xff\x9a\xff\xb4\xff\xda\xff\xed\xff\xfa\xff\xee\xff\xfc\xff\x1b\x00\x10\x00\x13\x00/\x00@\x00B\x00I\x00m\x00{\x00\x96\x00\xb4\x00\xd0\x00\xf3\x00\x1a\x018\x01h\x01\x99\x01\xbe\x01\xd0\x01\xe4\x01\x02\x02\x1e\x02$\x02\x07\x02\xfe\x01\xf3\x01\xcd\x01\x9c\x01n\x01F\x01\r\x01\xba\x00w\x00A\x00\x01\x00\xab\xffU\xff)\xff\xf0\xfe\xbf\xfe\x97\xfel\xfeM\xfe+\xfe\x1c\xfe\x15\xfe\x15\xfe\x14\xfe!\xfeS\xfev\xfe\x92\xfe\xb5\xfe\xe7\xfe\x12\xffD\xff|\xff\xb2\xff\xe7\xff\x0c\x000\x00`\x00~\x00\x89\x00\xa1\x00\xb2\x00\xc7\x00\xcb\x00\xc7\x00\xce\x00\xcd\x00\xc3\x00\xb0\x00\xae\x00\xa1\x00~\x00x\x00a\x00K\x00C\x00,\x008\x007\x00!\x00$\x009\x008\x000\x004\x00=\x00Q\x00M\x00A\x00N\x00S\x007\x00-\x006\x00?\x009\x00 \x00\x1e\x00\x17\x00\x08\x00\xfb\xff\xe3\xff\xce\xff\xb5\xff\x9b\xff\x93\xff\x8e\xff\x8a\xffz\xffe\xffW\xff^\xffa\xffa\xffe\xffs\xff\x86\xff\x88\xff\x89\xff\x92\xff\xa5\xff\xbc\xff\xcc\xff\xe2\xff\x07\x00\x10\x00\x1c\x004\x00H\x00W\x00n\x00\x8b\x00\x99\x00\xa2\x00\x9b\x00\x96\x00\x8d\x00\x88\x00\x7f\x00z\x00\x7f\x00v\x00j\x00g\x00[\x00J\x00H\x00B\x00A\x00?\x00<\x000\x00/\x00(\x00\x1c\x00\x13\x00\x06\x00\xfd\xff\xf0\xff\xf1\xff\xdd\xff\xbd\xff\xb2\xff\xab\xff\xa1\xff\x86\xffz\xff}\xffr\xffc\xffe\xffm\xfff\xff]\xffW\xfft\xff\x7f\xff\x80\xff\x86\xff\x8e\xff\x99\xff\xa2\xff\xa5\xff\xa3\xff\xab\xff\xae\xff\xb2\xff\xb3\xff\xb5\xff\xaf\xff\xaf\xff\xb1\xff\xa6\xff\xa3\xff\xb0\xff\xba\xff\xb5\xff\xbc\xff\xb8\xff\xb8\xff\xb7\xff\xc2\xff\xc6\xff\xcb\xff\xd4\xff\xda\xff\xeb\xff\xf3\xff\xff\xff\x08\x00\x14\x00\x17\x00\x1f\x00:\x00B\x00A\x00\\\x00g\x00o\x00\x7f\x00\x85\x00\x83\x00\x8f\x00\xa2\x00\x9b\x00\x91\x00\x91\x00\x8f\x00\x8b\x00\x90\x00\x98\x00\x8e\x00y\x00v\x00m\x00k\x00m\x00X\x00D\x00@\x00;\x00#\x00\x0c\x00\xf4\xff\xe1\xff\xdf\xff\xce\xff\xc0\xff\xab\xff\x96\xff\x85\xff{\xffj\xffh\xffl\xff\\\xffR\xffR\xffN\xffF\xff:\xff9\xff1\xff4\xff@\xffF\xffH\xffG\xffK\xffV\xffl\xff{\xff\x89\xff\x9d\xff\xb3\xff\xc9\xff\xd4\xff\xec\xff\x01\x00\x12\x00&\x00<\x00P\x00e\x00v\x00\x84\x00\x95\x00\x9d\x00\xa7\x00\xb2\x00\xbc\x00\xbc\x00\xb6\x00\xba\x00\xbc\x00\xb2\x00\xa8\x00\xa7\x00\xa5\x00\xa2\x00\x9e\x00\x9b\x00\x8e\x00{\x00t\x00n\x00j\x00[\x00S\x00I\x00C\x009\x00,\x00\'\x00\x1d\x00\x12\x00\x0e\x00\x04\x00\x07\x00\r\x00\x00\x00\xeb\xff\xe1\xff\xd6\xff\xd1\xff\xc4\xff\xb2\xff\xa3\xff\x8a\xff\x8a\xff\x8d\xff\x82\xff}\xff~\xffv\xffr\xff\x83\xffy\xffs\xff~\xffv\xffz\xff\x82\xff}\xffr\xff\x80\xff\x90\xff\x9b\xff\xa6\xff\xb1\xff\xbb\xff\xc8\xff\xce\xff\xd5\xff\xe1\xff\xf6\xff\x0c\x00\x15\x00\x14\x00\x1f\x008\x00G\x00W\x00[\x00b\x00t\x00\x84\x00\x7f\x00\x84\x00\x89\x00\x91\x00\x90\x00\x90\x00\x93\x00\x8d\x00\x90\x00\x81\x00{\x00d\x00c\x00\\\x00B\x00A\x00/\x00\x1c\x00\x1d\x00\t\x00\xf2\xff\xee\xff\xdc\xff\xcf\xff\xc8\xff\xb3\xff\xa5\xff\xb4\xff\xb3\xff\xb4\xff\x9c\xff\x8a\xff\xa4\xff\xa2\xff\x99\xff\xae\xff\x9b\xff\x93\xff\xab\xff\xc8\xff\xae\xff\x93\xff\xb0\xff\xe2\xff\xde\xff\xb4\xff\xb8\xff\x91\xff\x9d\xff\xb9\xff\xb1\xff\xc4\xff\xc0\xff\xad\xffa\xffC\xff\x84\xffn\xffP\xff\x92\xff\xbc\xff\xb5\xff\x9a\xff\x97\xff\xe4\xff\xc0\xff\x86\xff\xa5\xff\xae\xff\xfc\xff\x1d\x00v\x00\x04\x01\xf5\x00\x81\x01\x1e\x01\xef\x01>\x01\xa7\x01\xdd\x00\x00\xfd\xdc\x08\xea\x10G\x04\xce\xf3.\xfe\x19\x05\x10\x04\xaa\x02A\xff\xb1\xfa/\xf7\xce\x00\xc7\xf9[\xfc\xbd\xf9\xa5\xf40\xfe\xd9\x00\xc7\xfe5\xfe)\xfdy\xfb \xfb\xec\x05-\x0b\xb7\xfb\xba\xfeC\x04\xcf\x01\xe2\x03m\x03v\x03\xee\xffM\xfa\xe1\x00\x98\tR\x03`\xfad\x01\xae\x01\xfe\xf8\xb1\x00\x96\x04h\xff\xd1\xfb\xa5\xfb\xaa\x011\x00\xa1\xfc\xf5\x01+\xfeH\xf33\x02]\x05`\x00}\xfa\x9e\xfd\xee\x01\xc6\x00\xae\xfe@\xfb\x18\x07\x1b\x04a\xfa\x19\x02s\x06\xab\xfe}\x02\xd5\x02-\xff4\x02c\xfdu\x06~\x08\xcc\xfe\xf8\xf5\x18\x07a\x07\xc9\xfey\xff\x01\xff:\x01K\xff\xb0\x04\x9e\xfft\x03\x9a\xf9\xe8\xfb\xce\x01\xc3\xff\xdf\x02\xda\xf6 \x03\xcb\x00\x9a\xf6\x96\xfeS\x00\xd5\xff2\xf7&\xfei\x04\x9d\xf9\x03\xfd\xec\x06O\xfei\xf4\xf3\x02=\x06\x90\x01\xad\xfe\x18\x02\xe4\x00\xa4\x01L\xffp\x044\x05\x19\x00\xbb\xff\xa3\xfe2\x04\xec\xff\xdf\x04{\x04c\xfa\xbc\xfd\xc0\x0bG\xf9\xaf\xf9\x8a\x06\xf5\x01n\xfc\x0c\xfap\x068\xfd\xc1\xfa\\\xfcL\x05-\x01]\xf8\xd5\xfd\x99\x039\xf9=\x00\x8b\n\xde\xfbz\xf7A\x04\xd1\np\xf2Z\xff\x9e\x10x\xfb\xb7\xf3\x02\t\xe9\t\x08\xef\xfd\x01\xad\r\xf1\xfa)\xf5\xbc\xffa\x0b\xa8\xfci\xfc\x98\x05O\xfd\x7f\xfa>\x00\x13\t-\x00\xc4\xfa|\xfc\x9a\x05\xd3\x01\x15\xfa\r\x05L\xfe\xce\xf4L\x01\xda\x05\xf3\xfb\x91\x02\x1e\x01\xc5\xefS\x03\x18\x0b\xfe\xf6C\xff\x11\x06\xad\xff\x91\xfe\xfe\xff\x05\x04\xb6\x04\xa1\xf9\x03\xfe\xdf\x05\x99\xfb\x02\x03\xb4\x07u\xf6h\xffd\x05\xe1\xfc\xa1\xfd\x87\xfe\xd1\x01\x94\xff\xe6\x00N\xf8\x1d\x04\xb8\xfeL\xfa\xba\x04\xbb\xfd\x85\xf5\x8b\x07\xc0\x08\xe2\xf5\'\x07\x88\xffD\x01\x1f\x00\xb8\x04\xa8\x05\xca\xf8E\x01z\t\xcf\x00L\xfc\xc2\x02\x06\x01\xef\xf8\xb2\xfbH\x06\xc7\x06%\xf8\x86\xf3A\x0e\x02\xfe\'\xf3\xa5\x00\x14\n\x8a\xfeJ\xf0$\x07\x12\x07\xaa\xfc\x04\xfbf\x01U\x04\xac\xff\x9b\x02\x04\x06J\x01~\xfc\x9c\x00\x1a\x06)\x05\x9a\x05 \x04\x14\xf6\x1e\xfe1\x06O\x03\x9f\x03\x05\xf9,\xfc\x8f\x04P\xfb\xc4\xf8N\x03\xe1\x03o\xf7A\xf9{\xff\xf1\xfe\xcf\x05f\xf9\xe4\xfa#\x04\x82\xfcG\x009\x02\xb3\xfe\xe4\xff\x83\x02\x9e\xfb\x0c\xf9\x97\x05\x87\x04\x02\xfd:\x01\x98\xfa\x7f\xfa\xe2\x07\xab\x05[\xf4[\xfc\xe0\x0br\xfc\xc9\xfc\xdd\t\x16\x00\x93\xf3!\xfa:\x11\xcb\x06\xf9\xf24\x01\xb7\x07:\xf5\xfc\x00\xe4\x10\xf6\xfau\xe8\xdb\x00\xab\x16`\xfd\xea\xf9v\x06\xc3\xfdw\xea\xa8\x02\xff\x13\x10\x03v\xf7\x13\xfaq\x03\xcc\xff\x83\x02J\x06\xa2\xfd\x87\xf3#\xfc\x02\x0c#\x06\xb5\xfc\x86\xfd\xba\xfa\xc3\xf8\x13\xfe`\x0c\xe3\x02\x17\xf5\xb9\x00\x85\x05\x0c\xfeP\xff\x81\x08\xee\x01\xfe\xef8\xfd\xc3\x0b\xd3\x06\xdd\x03\xa1\xfb\xdc\xf7\xbf\xffM\x06\xd0\xfc\xf8\xff\xa0\x04\xaa\xfe\xa2\xfe\x98\xfcG\x01r\x05\xea\xfc\xc3\xf7\xe9\xfc\xd2\x04a\x06x\xfe\xec\xf6c\x01\x07\x03;\xff@\x000\x01\xc3\xfe:\xff\xa0\x05\xb4\x00\xe0\xfe\x1d\x02\xf3\x00\x97\xfd\xcd\xffO\x06\xcb\x02p\xfd\x01\xffS\x02\xbe\x01\x9c\xfd\x81\xfd\xf2\x03C\xff\xcf\xfb^\x01L\x03\xb7\x00e\xfb2\xfe;\x03K\xff\xb7\xf8`\x03\x1e\x04\x04\xfc\xe1\xffO\x01\xcd\xfew\xfb\xcc\x00\xc4\x04\xef\x00\xe6\xfa\x14\x01\x82\x04\xc8\xff\x94\x00\xa1\x00\xbd\xff\xb4\xfaU\x011\x084\x02t\xfc%\xfci\xfcZ\xfe\xf5\x06c\x06q\xf8[\xf6X\x02\x89\x04\xd4\xfeM\xff;\x05\xb6\xfe3\xf8\xbb\x00\xea\x04\xa5\x04U\xff\x98\xfc`\xfa\xf4\x02\x18\n\x92\x01\xe6\xf6\x9e\xfa\x9e\x022\x01A\x02\xd5\x01\xe9\x02D\xfbi\xfbb\x01\xae\x03V\x00!\xfd\x87\x00^\x03\xa2\x00\x07\xfc\xf3\x00\xcd\x00\xd2\xfeO\x00\xca\x02\xef\xfd\x02\xfc\x14\x02\xe9\x05S\x01\x17\xfa\xaf\xfc\xf2\x00j\xff\x0e\x03\x15\x06[\xfe\xaf\xf5z\xfdu\x03Y\x06\t\x01\x14\xf6\x9a\x00\x05\x05/\x04\xbb\xfd\xd0\xfcY\xfer\xfeh\x03\x9a\x03\x98\x00\x8f\xfb\xbe\x00x\x03\x1c\xff\xf8\xffc\x00:\x00\xd4\xfa\xa3\xfd_\t\x14\x08\xb5\xfc?\xf2$\xfb\x19\x08b\x04\xd0\x00T\xfb\xe9\xfa\xa5\xfe\xf6\x03\xd1\x05M\xfc\xb9\xf8N\xfe\x08\x04V\x03.\x03\xf8\x01t\xfa\x8d\xf82\x00\x96\x06\xd9\x04y\x00\x08\xfc\x97\xfb\x99\x02e\x06u\xff\xc0\xfbG\xfb\x14\xfe\xa3\x06M\x06\xcc\xffm\xfc\xf0\xfb\xbe\xfe\x14\x01\xaf\x00&\x03\xcf\x02\xeb\xf9%\xfa2\x05B\x06\xc4\xff\x99\xfa\x9f\xf7,\xff\x9a\x06\xd1\x04e\x01B\xfb\xb7\xfc\xb7\xfd\x1e\x03\xab\x04\xce\x02\x86\xf9+\xf8\x84\x04\x0c\x08`\x05s\xfb\xd6\xf8\xc5\xfe~\x01\x1a\x02e\x05\xd3\xfe@\xf9)\x00a\x06\x1f\x00\xf8\xfe\x83\xfe\xeb\xf7f\xff,\t\xb8\x05{\xfe\xa5\xfa\x16\xfb~\xff\xc5\x02\xdf\x03\n\x01p\xfe\xbc\xfa\xf9\xff)\x05D\x02\xcd\x00H\xfb\x1c\xfa\xaa\x01\xf1\x05\xe2\x00s\xfd\xfb\xfd\t\x01&\x01m\xff\xbf\xfdK\xff\xf1\x00\xe7\x01\x97\x02S\xfc\n\xff4\xffK\xfd%\x02\x92\x04\xac\x01\xff\xfa!\xfc\x8d\x01\x95\x02\xef\xff\x14\xfe\x83\xfeS\x00:\x026\x03%\x01\x18\xfb\xb8\xf8\x80\x00C\x07~\x05>\xfel\xfb\xf7\xff\x0c\x02\x84\x01\x98\x00}\xff\x82\xfd\x15\x00\xed\x01L\x03\xca\x01\x07\xfem\xfc5\xffW\xff\xca\xffI\x04d\x02\x89\xfc]\xfa\xda\x03\x9d\x04\x8f\xfcD\xff\x06\x00\x86\xfd\xa8\x03\x9f\x03q\xfe1\xfd_\xff;\x010\x04\xa8\x01\x87\xfe^\xfe\x0c\xfd\xec\x00I\x03\xa8\x02\xea\x01\xc7\xfd\xe2\xfa\xda\xfe \x04\xfb\x02\x04\xfeh\xfa\xfd\xfcZ\x00\xb2\x00\xf6\x01|\x02n\xfd\xf3\xf8n\xfe\xf0\x02\x9e\x04\x07\x04\xc9\xfc<\xfa`\x03\x0c\x05\x99\x00\xa7\xff\xd6\xfd[\x00s\x01\xd0\x00\x16\xffO\x00d\xffN\xfc\x93\xffw\x02O\x01\xa5\xff\xac\xfc\xae\xfeS\x04W\x00V\xfa\x82\xfdp\x00\t\x01X\x02\x93\x01\x00\xfd\x86\xfcT\x02`\x02\xdb\xfd>\x00\x8f\x02\xdc\xfeb\x00\xa2\x03I\x01\xe9\xfc\xcf\xfc\x02\x02)\x02\xe1\xfed\x00.\x02\x7f\x00\x05\xfd\xeb\xff<\xff\x7f\xfe\x9c\x03h\x00I\xfc@\xfd\x9b\x02\xa8\x04\xe9\x01-\xfd\xb5\xfb\xfa\xfc\xc3\x02\xf8\x03,\x02:\x02m\xff\x1d\xfd\x84\xfd\x88\x02@\x02\xe2\x00K\xfeb\xfe\x07\x01\xc3\x02\xe3\x01\xb1\xfd\xb3\xfc\t\xfdF\xff\xf6\xff\xb3\x00c\x03\x19\x02\xe1\xfc\xa5\xfaS\xfe\x15\x01s\x01:\x02h\xfe\xa4\xfc\x16\x01B\x03~\x00\xb4\xfey\xfd\x8f\xfc/\x01\xd7\x03\x14\x02\xb3\xfd~\xfe\x81\x000\x01\x05\x01R\xffv\xff\xba\xfd\x15\x01\x92\x03\xa2\x02\xf5\x00\n\xfe>\xfc\xe3\xfd\x19\x03E\x04\xa3\x00\x18\xfd\x06\xfe\x91\xff_\x01\x1e\x03\x9f\xfe\xb1\xfb\xc2\xfdU\x01\x81\x02@\x01\xb3\xff\xab\xfd)\xfe\xa7\xff\xca\x00w\x00~\xff\xc0\xffm\x00\xc6\xff\xb3\x00\x00\x03\xd5\xfe\x83\xfc\xa8\xfeN\x00\xfa\x00\xd0\x02\xa4\x024\xfd\x1f\xfe\xb9\x00\x9c\xffb\x00n\x00y\xff`\xff\x8a\x00\x88\x01\x06\x00\t\xff\xfa\xff\x81\xfe\x19\xfe\xf6\xff0\x01$\x01\xc0\xff\x87\xfe\xc7\xff,\x00\x11\x00\xc1\xff\x1c\xff`\xff\xca\x00*\x00>\xff\xc0\x01)\x01U\xfek\xfej\xff\xd2\x00\x9c\x02\xf3\x01j\xff\xd8\xfd\xcf\xfe\xb6\x01\xb8\x01\x94\xff\x9a\xff\xfa\xfe\x99\xff\x95\x01\xa1\x01S\x00\x87\xfe=\xfe\xe9\xfes\x00\xd7\x00\xd6\x01{\x00\xae\xfc\x85\xff\x83\x01\xad\x00!\x00\xdf\xfdU\xfe\xac\x00\xa5\x02\xb4\x01]\xffl\xfe7\xfe\xcd\xff\xeb\x00h\x00\x0b\x00\xbc\xff_\x00H\x01\x8c\x00\xdb\xff\x10\xff\x04\xfe\xe6\xffq\x01\xdf\x00\xfe\x00\x12\x01\xae\x00C\xff\xe8\xfd\x1d\xfe\xa2\xffc\x01\xad\x01\x02\x01\xd4\xff>\xff\xda\xff\xae\xff{\xff\xdb\xff#\x00\xd0\x00%\x01\x17\x00/\x01(\x02E\x00\xa6\xfd\xff\xfc\xa9\xff\xdd\x01\n\x02\x8d\x00\xb0\xfe\xe6\xfe\x0f\x00\xb1\x00#\x00\x1f\xff\x88\xfe\x19\xff\xde\x00\x90\x01\x90\x01\x95\x00\x83\xfe\xa6\xfd\x83\xfe\xbe\x00\xbf\x01\x16\x01F\x00W\xff\x8e\xff\x7f\x00T\x01\xd0\x00Q\xff\xf3\xfe\xb0\xff`\x01J\x01\xed\x00\x9e\xff\x7f\xfd\x03\xfe\x14\x00\xff\x00\x87\x00\xe7\xffY\xff\xe4\xff\xca\x00\x89\x00\x91\xff\xf3\xfe\xc5\xfe\x8a\x00\x93\x01\xaf\x00\x85\x00\x91\x00\xed\xffj\xff\xb1\xff\x93\x00c\x00\xdb\xff_\x00R\x01\x06\x01u\xff\x11\xff\'\xff\xa3\xff\xb0\xff\\\xffq\xffr\xff/\x00\xdc\x00\xd7\xff\xd2\xfe\x80\xfe\xb7\xfel\x00\xab\x01\x10\x01\x10\x00|\xffT\xff\xbc\xff>\x00\xc4\x00\xc4\x00Q\x00\x82\xff\x85\xff^\x00\xb6\x00\x1b\x00%\xff\x0b\xff\xaa\xff\x97\x004\x01\x8e\x00R\xff\xe1\xfeS\xff\xe3\xff+\x00\xff\xff\x02\x00\xe6\xff\xd5\xff\x91\xff8\xffx\xff\xf6\xffE\x00\x08\x00\xed\xff\x83\xff\xaf\xff\xb0\x00\xef\x00\xaf\xff\xcd\xfe\x98\xffs\x00\xc0\x00\x1c\x01\xa0\x00\xf7\xfe\xb4\xfe\xd1\xff\x03\x008\x00\xb7\x00\x89\xff&\xfe^\xff@\x01m\x01O\x00\xa9\xfe\xfe\xfd>\xff\xff\x00}\x01\xa0\x00L\xff\x8e\xfeD\xff8\x00\xe5\x00\xd1\x00C\xff\xfc\xfe\x00\x00\x1b\x01\x10\x01\xa3\xff\xc7\xfee\xff\x89\x00\x03\x01u\x00\xa7\xff\x99\xff\xd9\xff3\x00,\x00\xc4\xffM\xff\x83\xff5\x00h\x00\x04\x00\xc6\xff\x92\xff`\xff\x8d\xffz\xffg\xff\xfa\xff\x82\x00{\x00\x1b\x00\xbc\xff\x93\xffs\xff\xbd\xff6\x00\x9f\x00\x8b\x00\xe1\xff\xf8\xff\x85\x00\xc1\x00,\x00*\xff\xef\xfe\x14\x00>\x01\x19\x01N\x00\xa9\xff\x7f\xfff\xff*\x00\xab\x00\x00\x00\xb1\xff\xe5\xff?\x00\xcc\x00\xcd\x00z\xff~\xfe;\xff\x91\x00\x00\x01\xc3\x00A\x00\x86\xff\xe2\xfe\x91\xff\xea\x00\xb3\x00\xfa\xff{\xff\xa1\xffM\x00\xe5\x00\xda\x00\xf2\xff\x01\xff\xcb\xfe\xc8\xff\xdb\x00\xd2\x00\\\x00\xd4\xffC\xffm\xff\xf6\xff\xf4\xff\xc7\xff\x9b\xff\xaf\xff<\x00\x9a\x00d\x00\xd9\xffT\xffQ\xff\x0e\x00\xea\x00\xc3\x001\x00V\x00\x7f\x009\x00F\x00\x11\x00}\xff\x8c\xff\x95\x006\x01\xd1\x00\x04\x00\x1e\xff\xae\xfeV\xff\x8e\x00\xfc\x00\x8a\x00\x16\x00_\xffr\xff4\x00o\x00<\x00z\xffE\xff\x17\x00/\x01$\x01F\x00h\xff\x15\xff\xb2\xff\xcf\x00M\x01\xf6\x00q\x00\xf6\xff\xcc\xff0\x00k\x00\xb8\xffM\xff\x97\xff!\x00\x97\x00o\x00\xbf\xff\x0c\xff\xef\xfea\xff\xd8\xff\x15\x00\x00\x00\xc5\xff\xba\xff\xcb\xff\xed\xff\xf8\xff\xbf\xff\x94\xff\xa4\xff\xf9\xffT\x00m\x00-\x00\xa7\xff\x8b\xff\xda\xff?\x00\xbc\x00\xc5\x00b\x00\xf4\xff\xec\xff\xf4\xff\xde\xff\xe1\xff\xe9\xff\xfa\xff\n\x00(\x00.\x00\xd2\xffS\xffx\xff\xe8\xff\x0c\x00\x10\x003\x002\x005\x00U\x00\xff\xff\xc0\xff\xdb\xff\x07\x008\x00j\x00s\x00c\x00L\x00\x18\x00\xae\xff\x96\xff\xf4\xff\x05\x001\x00\x8a\x00W\x00\xbf\xff\x88\xff\xbf\xff\xd5\xff\xfe\xff\x03\x00\xc2\xff\xc6\xff9\x00\xa5\x007\x00_\xff\xf6\xfe`\xff$\x00Y\x00\xf0\xff\xae\xff\x9e\xff\xb7\xff\xee\xff\xdf\xff\x89\xffG\xff\x93\xff\xe9\xffJ\x00g\x00\xf8\xff\x95\xff\xa6\xff\xe5\xff\x18\x00.\x00\x0b\x00\xb7\xffx\xff\xec\xffb\x009\x00\x94\xffR\xff\xa0\xff\xcf\xff\x1e\x00U\x00\xf4\xffp\xff\x92\xff\x00\x008\x001\x00\xf5\xff\x8f\xff\x86\xff\xe2\xff:\x005\x00\xe4\xff\x9f\xff\xa1\xff\xeb\xffV\x00K\x00$\x00!\x00D\x00t\x00g\x00\x1e\x00\xf4\xff\x0b\x00F\x00e\x00<\x00\xdd\xff\xb0\xff\xca\xff\xf5\xff\xff\xff\xca\xff\x9b\xff\xa1\xff\xe5\xff:\x00N\x00\xec\xfft\xffs\xff\xd3\xffA\x00;\x00\xee\xff\x98\xffw\xff\xde\xff]\x00$\x00~\xffW\xff\xaa\xff!\x00r\x002\x00\xb3\xffi\xff\xb0\xff3\x00_\x00 \x00\xd8\xff\xc3\xff\xd8\xff*\x00~\x00I\x00\xb5\xff\\\xff\x82\xff\xfc\xffi\x00w\x00\x0c\x00\x8c\xff\x84\xff\xf2\xff|\x00\x8b\x00.\x00\xe0\xff\xfd\xff\\\x00\xa5\x00\x93\x00\'\x00\xd1\xff\xce\xff\x12\x00Y\x00K\x00\x0e\x00\xe4\xff\xd0\xff\xf9\xff/\x00 \x00\xde\xff\xc2\xff\x03\x005\x00?\x00-\x00\r\x00\xee\xff\xf0\xff*\x00S\x00C\x00\r\x00\xf2\xff\x13\x00T\x00}\x00M\x00\xc4\xff\x9c\xff\x05\x00r\x00f\x00$\x00\xf5\xff\xc8\xff\xf2\xffH\x00M\x00\xf0\xff\xb7\xff\xda\xff\t\x00E\x00y\x008\x00\xb9\xff\x9a\xff\xef\xffF\x00M\x00\t\x00\xd4\xff\xdc\xff&\x00]\x00?\x00\xf0\xff\xdb\xff+\x00S\x00M\x00A\x00 \x00\xee\xff\xff\xffA\x00:\x00\xf2\xff\xc0\xff\xcd\xff\xf5\xff\x1b\x00\x1e\x00\xfc\xff\xcf\xff\xd3\xff\x01\x00\n\x00\xdb\xff\xd5\xff\xef\xff\x00\x00\x07\x00\xfc\xff\xe6\xff\xc3\xff\xd5\xff\xe6\xff\xfb\xff\x06\x00\xf1\xff\xd3\xff\xf0\xffG\x002\x00\xf8\xff\xd4\xff\xe7\xff\x15\x00F\x008\x00\xf6\xff\xbd\xff\xcb\xff\x15\x001\x00\x08\x00\xb2\xff\x8d\xff\xb7\xff\x07\x002\x00\x08\x00\xbe\xff\x9a\xff\xb5\xff\xfa\xffD\x00%\x00\xd8\xff\xb9\xff\xda\xff:\x00{\x008\x00\xbd\xff\xa6\xff\xdf\xff8\x00h\x004\x00\xaa\xff\x8c\xff\xf6\xff3\x003\x00\x04\x00\xc7\xff\xa4\xff\xea\xffB\x000\x00\xeb\xff\xb5\xff\xa8\xff\xce\xff\x12\x004\x00\x14\x00\xd9\xff\xbe\xff\xda\xff\x04\x00\x0f\x00\x06\x00\xc5\xff\xb4\xff\xf6\xff0\x008\x00\x0f\x00\xde\xff\xa9\xff\xc2\xff\xf0\xff\x00\x00\xf2\xff\xd2\xff\xbe\xff\xb2\xff\xd6\xff\xf7\xff\xfa\xff\xd8\xff\xc6\xff\xde\xff\x08\x00"\x00\x00\x00\xdb\xff\xcd\xff\xe9\xff\xf9\xff\x1b\x00%\x00\x04\x00\xf9\xff\x06\x00\x0c\x00\xff\xff\xf4\xff\xf4\xff\xed\xff\xe5\xff\xf1\xff\xfc\xff\xe6\xff\xc2\xff\xaf\xff\xb6\xff\xc6\xff\xdc\xff\xe2\xff\xd7\xff\xd3\xff\xd3\xff\xd3\xff\xd3\xff\xdf\xff\xe4\xff\xd4\xff\xea\xff\x03\x00\x07\x00\t\x00\x0b\x00\xf5\xff\xe1\xff\xee\xff\x16\x00*\x00\x1a\x00\x0b\x00\x04\x00\x07\x00\x0c\x00\n\x00\x02\x00\xfe\xff\xf8\xff\xf4\xff\x02\x00\x12\x00\t\x00\x03\x00\x02\x00\xf7\xff\xf4\xff\xfe\xff!\x00\x1e\x00\x05\x00\x0f\x00\x11\x00\x01\x00\xfc\xff\x06\x00\x01\x00\x01\x00\x06\x00\x03\x00\x05\x00\x07\x00\x00\x00\xfe\xff\xfc\xff\x00\x00\x10\x00\x0e\x00\t\x00\n\x00\x14\x00\x19\x00\x1a\x00\x11\x00\x00\x00\xfe\xff\x08\x00\x17\x00\x15\x00\x02\x00\xf2\xff\xfa\xff\x0f\x00\x1c\x00\x04\x00\xfc\xff\x11\x00\x1d\x002\x00=\x00A\x00(\x00\x14\x00\x1d\x00-\x001\x00*\x00\x19\x00\x0e\x00\x16\x00\x1f\x00.\x00\x1d\x00\xfe\xff\xf2\xff\x05\x00,\x005\x00(\x00\t\x00\xfb\xff\r\x00%\x003\x00%\x00\x13\x00\x11\x00\x16\x00"\x00+\x00!\x00\t\x00\x00\x00\x12\x00\x05\x00\x07\x00*\x00\x11\x00\xef\xff\xe4\xff\xf4\xff\xfb\xff\xfa\xff\xeb\xff\xd5\xff\xdf\xff\x0f\x00*\x00\x0c\x00\x00\x00\x03\x00\x05\x00\xfc\xff\xfd\xff\x07\x00\x05\x00\n\x00\x14\x00\x12\x00\x01\x00\xfe\xff\xf3\xff\xe9\xff\xe2\xff\xf8\xff\x05\x00\xff\xff\xfa\xff\xf7\xff\xf4\xff\xf0\xff\xf6\xff\xfc\xff\xfb\xff\xfe\xff\x04\x00\x01\x00\xfe\xff\x02\x00\xfe\xff\xf5\xff\xeb\xff\xef\xff\xfc\xff\x00\x00\xf6\xff\xdf\xff\xd6\xff\xea\xff\xf7\xff\xf9\xff\xea\xff\xd7\xff\xdc\xff\xf8\xff\x06\x00\xfc\xff\xf2\xff\xfc\xff\xf4\xff\xfe\xff\x18\x00\x17\x00\xff\xff\xf5\xff\x01\x00\x08\x00\x0c\x00\r\x00\xfc\xff\xef\xff\xf0\xff\xf4\xff\xf4\xff\xe9\xff\xdf\xff\xcf\xff\xcf\xff\xe8\xff\xf1\xff\xf4\xff\xf0\xff\xed\xff\xe4\xff\xf1\xff\x03\x00\x06\x00\x00\x00\x04\x00\x0c\x00\xfb\xff\x0b\x00"\x00\x0e\x00\xf0\xff\xf8\xff\x0e\x00\x12\x00\x02\x00\x0e\x00\x05\x00\xdf\xff\xea\xff\xf3\xff\xec\xff\xda\xff\xd0\xff\xc9\xff\xc4\xff\xc6\xff\xc9\xff\xbc\xff\xaa\xff\xb6\xff\xc1\xff\xc8\xff\xd3\xff\xc8\xff\xce\xff\xd3\xff\xdf\xff\xec\xff\xeb\xff\xea\xff\xf2\xff\x00\x00\t\x00\x10\x00\n\x00\xf8\xff\xf6\xff\xf7\xff\xf1\xff\xf5\xff\xf9\xff\xf3\xff\xea\xff\xe4\xff\xef\xff\xe3\xff\xd8\xff\xd6\xff\xda\xff\xe9\xff\xf1\xff\xf7\xff\xf0\xff\xe5\xff\xde\xff\xeb\xff\xfe\xff\xfb\xff\xfa\xff\xfe\xff\xfd\xff\x00\x00\n\x00\x0c\x00\xfe\xff\xf2\xff\xfb\xff\x05\x00\x15\x00\x1d\x00\x0c\x00\x03\x00\x05\x00\x0c\x00\r\x00\x10\x00\x11\x00\x10\x00\x0b\x00\x12\x00\x12\x00\x08\x00\x06\x00\x00\x00\x03\x00\x0f\x00"\x00\x1e\x00\x0f\x00\t\x00\x07\x00\x06\x00\x15\x00\x16\x00\x00\x00\xf6\xff\x00\x00\x13\x00\x0b\x00\xf9\xff\xe9\xff\xe1\xff\xe3\xff\xf7\xff\x00\x00\xee\xff\xd9\xff\xd8\xff\xec\xff\xf0\xff\xf7\xff\xfa\xff\xf6\xff\xf8\xff\x05\x00\x16\x00\x14\x00\t\x00\x04\x00\x04\x00\x0e\x00 \x00,\x00&\x00\x1f\x00\x19\x00%\x00)\x00\x1b\x00\x10\x00\n\x00\x07\x00\r\x00\x18\x00\x04\x00\xfc\xff\t\x00\x07\x00\x05\x00\x0b\x00\t\x00\r\x00\x03\x00\x02\x00\x05\x00\r\x00\x1e\x00 \x00 \x00\x1d\x00\'\x00"\x00\x13\x00\x12\x00\x18\x00\x14\x00\x13\x00\x12\x00\x1c\x00 \x00\x1f\x00#\x00\x18\x00\x17\x00\x1a\x00\x1a\x00\x18\x00 \x00\x1c\x00\x10\x00\x13\x00\x04\x00\xf9\xff\xfe\xff\x0e\x00\x11\x00\t\x00\x05\x00\x00\x00\xfe\xff\x08\x00\t\x00\xfe\xff\xf8\xff\xf8\xff\x00\x00\x00\x00\x00\x00\x08\x00\xfb\xff\xfc\xff\x05\x00\x04\x00\x00\x00\xfe\xff\xf8\xff\xf5\xff\xf5\xff\xf7\xff\xfb\xff\x01\x00\x01\x00\xf6\xff\xf6\xff\xfc\xff\xfc\xff\xf7\xff\xf9\xff\xfc\xff\xf9\xff\xf8\xff\xff\xff\xfe\xff\xf7\xff\xf4\xff\xed\xff\xe8\xff\xed\xff\xec\xff\xe3\xff\xe8\xff\xe4\xff\xde\xff\xe1\xff\xe7\xff\xde\xff\xdc\xff\xe2\xff\xe1\xff\xe7\xff\xf6\xff\xfc\xff\xfb\xff\x00\x00\x0c\x00\x07\x00\x04\x00\x06\x00\xfd\xff\xfd\xff\xfe\xff\xfb\xff\xf8\xff\xed\xff\xee\xff\xf3\xff\xf1\xff\xef\xff\xeb\xff\xea\xff\xeb\xff\xf3\xff\xfc\xff\xfa\xff\xf7\xff\xfb\xff\x05\x00\x06\x00\x00\x00\xfe\xff\xff\xff\xfa\xff\xf3\xff\xfa\xff\x01\x00\xfe\xff\xfb\xff\x00\x00\xf7\xff\xf9\xff\xfa\xff\x02\x00\x06\x00\x05\x00\x02\x00\xff\xff\x05\x00\xfd\xff\xfd\xff\xf5\xff\xf4\xff\xf3\xff\xea\xff\xe9\xff\xe6\xff\xe4\xff\xdb\xff\xd6\xff\xdb\xff\xe5\xff\xe4\xff\xe3\xff\xd4\xff\xd4\xff\xe1\xff\xdd\xff\xe6\xff\xe8\xff\xf1\xff\xf5\xff\xf4\xff\xf6\xff\xf4\xff\xf4\xff\xe8\xff\xe5\xff\xeb\xff\xef\xff\xf1\xff\xf1\xff\xe4\xff\xdb\xff\xe0\xff\xe2\xff\xe9\xff\xec\xff\xeb\xff\xeb\xff\xe9\xff\xf0\xff\xf8\xff\xf0\xff\xee\xff\xf5\xff\xfb\xff\xf6\xff\xfc\xff\x00\x00\xfa\xff\x01\x00\x00\x00\x05\x00\x06\x00\x03\x00\xff\xff\xf6\xff\x03\x00\x06\x00\xfe\xff\x02\x00\x06\x00\xfc\xff\xfc\xff\x00\x00\xfe\xff\x00\x00\xfe\xff\xf9\xff\xfc\xff\x03\x00\x03\x00\x00\x00\xff\xff\x02\x00\x10\x00\x11\x00\x10\x00\x13\x00\x1c\x00"\x00\x1b\x00\x14\x00\x13\x00\x11\x00\x10\x00\x0b\x00\x00\x00\xfc\xff\xfd\xff\x00\x00\xfc\xff\xf5\xff\xf2\xff\xf7\xff\xf9\xff\xfe\xff\t\x00\x0c\x00\x12\x00\x10\x00\x06\x00\x02\x00\r\x00\r\x00\x08\x00\x03\x00\x04\x00\x05\x00\xfa\xff\xf9\xff\xfb\xff\xf9\xff\xf7\xff\xff\xff\x02\x00\x01\x00\x02\x00\x01\x00\x01\x00\xfd\xff\xfe\xff\x01\x00\x07\x00\x0c\x00\x12\x00\x18\x00\x11\x00\x14\x00#\x00\x1f\x00\x1f\x00(\x00\x1f\x00\x1e\x00&\x00\x1f\x00\x17\x00\x11\x00\x15\x00\x16\x00\x14\x00\x16\x00\x13\x00\r\x00\x15\x00\x1b\x00\x0f\x00\x16\x00 \x00\x1c\x00\x16\x00\x13\x00\x0f\x00\x0f\x00\x0f\x00\x10\x00\x0e\x00\n\x00\n\x00\x02\x00\x05\x00\x04\x00\x04\x00\x00\x00\x00\x00\x04\x00\x05\x00\x04\x00\xfd\xff\xf8\xff\xfb\xff\xfa\xff\xfa\xff\xf8\xff\xf8\xff\xfb\xff\xfa\xff\xf5\xff\xf7\xff\xf7\xff\xfa\xff\xf6\xff\xf6\xff\xfa\xff\xf9\xff\xf9\xff\xf8\xff\xfb\xff\xff\xff\x00\x00\xfb\xff\xfb\xff\x04\x00\x04\x00\xfd\xff\xfa\xff\x05\x00\x01\x00\x07\x00\x13\x00\r\x00\x03\x00\x04\x00\x01\x00\xff\xff\xfc\xff\xfd\xff\xfb\xff\x02\x00\t\x00\xff\xff\xfd\xff\xfd\xff\xfd\xff\xf4\xff\xee\xff\xfb\xff\x01\x00\xfb\xff\xfb\xff\x00\x00\xff\xff\x01\x00\x06\x00\x07\x00\x00\x00\x02\x00\x07\x00\x00\x00\x01\x00\xfb\xff\xf3\xff\xee\xff\xee\xff\xf1\xff\xf4\xff\xee\xff\xf5\xff\xfb\xff\xef\xff\xea\xff\xe8\xff\xef\xff\xec\xff\xf2\xff\xf1\xff\xec\xff\xee\xff\xeb\xff\xe7\xff\xe4\xff\xed\xff\xef\xff\xec\xff\xec\xff\xea\xff\xef\xff\xeb\xff\xe7\xff\xed\xff\xf7\xff\xf9\xff\xff\xff\xfe\xff\xf3\xff\xef\xff\xee\xff\xf3\xff\xf0\xff\xee\xff\xee\xff\xea\xff\xe6\xff\xe6\xff\xe9\xff\xe7\xff\xf0\xff\xe9\xff\xe3\xff\xe7\xff\xe7\xff\xec\xff\xef\xff\xf1\xff\xf4\xff\xf5\xff\xf1\xff\xf4\xff\x00\x00\xf8\xff\xf9\xff\x00\x00\xfb\xff\xfc\xff\xfd\xff\x00\x00\xf8\xff\xf9\xff\xfb\xff\xf4\xff\xef\xff\xf8\xff\xf6\xff\xf4\xff\xf8\xff\xf9\xff\xfb\xff\xfa\xff\xfc\xff\xf8\xff\xff\xff\x00\x00\xfd\xff\xf7\xff\xf6\xff\xf8\xff\xfc\xff\x06\x00\x04\x00\x02\x00\x08\x00\x05\x00\xfd\xff\xfe\xff\xfe\xff\xfc\xff\xfb\xff\xf8\xff\xef\xff\xf2\xff\xf2\xff\xf1\xff\xf5\xff\xec\xff\xec\xff\xf0\xff\xef\xff\xec\xff\xec\xff\xf1\xff\xf6\xff\xf4\xff\xf5\xff\xfa\xff\xfe\xff\x05\x00\x04\x00\x04\x00\x05\x00\x03\x00\x03\x00\x04\x00\t\x00\x0c\x00\t\x00\t\x00\t\x00\x08\x00\x08\x00\t\x00\r\x00\r\x00\x0c\x00\n\x00\t\x00\x08\x00\x08\x00\x0c\x00\x0c\x00\x0b\x00\n\x00\x08\x00\n\x00\x0c\x00\x03\x00\x04\x00\x00\x00\x05\x00\x0e\x00\r\x00\x0c\x00\r\x00\x15\x00\x0e\x00\t\x00\x0e\x00\x11\x00\x0b\x00\x0b\x00\t\x00\x0e\x00\x15\x00\x14\x00\x15\x00\x0e\x00\x0f\x00\x14\x00\x16\x00\x12\x00\x16\x00\x19\x00\x12\x00\x11\x00\x0b\x00\x01\x00\x01\x00\n\x00\x0b\x00\x0f\x00\x14\x00\x12\x00\x10\x00\x17\x00\x14\x00\x16\x00\x15\x00\n\x00\x0b\x00\x0f\x00\r\x00\x0f\x00\n\x00\r\x00\x10\x00\r\x00\r\x00\x10\x00\r\x00\x04\x00\x00\x00\x07\x00\n\x00\n\x00\n\x00\x08\x00\x05\x00\x02\x00\x06\x00\x05\x00\x07\x00\x04\x00\x02\x00\x05\x00\x02\x00\xff\xff\xfa\xff\xfa\xff\xf9\xff\xf4\xff\xf3\xff\xf6\xff\xf4\xff\xf7\xff\xf7\xff\xf8\xff\xf6\xff\xf8\xff\xee\xff\xea\xff\xee\xff\xf2\xff\xf6\xff\xfa\xff\xfc\xff\x00\x00\x01\x00\x05\x00\x06\x00\x04\x00\x03\x00\x00\x00\x01\x00\xfd\xff\xfb\xff\xfa\xff\xf4\xff\xf4\xff\xf7\xff\xf6\xff\xf3\xff\xf3\xff\xf2\xff\xf3\xff\xf5\xff\xf4\xff\xf4\xff\xfc\xff\xfe\xff\x00\x00\x02\x00\xfe\xff\xfa\xff\xfc\xff\xfc\xff\xf7\xff\xf8\xff\xfd\xff\xfe\xff\xfd\xff\xfd\xff\xfc\xff\xfd\xff\xf6\xff\xf5\xff\xf5\xff\xfa\xff\xfc\xff\xfa\xff\xfc\xff\xf9\xff\xfa\xff\xf4\xff\xf6\xff\xf4\xff\xee\xff\xee\xff\xf0\xff\xee\xff\xed\xff\xea\xff\xea\xff\xea\xff\xe5\xff\xeb\xff\xe8\xff\xed\xff\xf2\xff\xed\xff\xf3\xff\xf1\xff\xf5\xff\xf4\xff\xf7\xff\xfa\xff\xf2\xff\xf6\xff\xf0\xff\xea\xff\xef\xff\xf2\xff\xef\xff\xec\xff\xe6\xff\xe3\xff\xe3\xff\xe1\xff\xe5\xff\xec\xff\xe6\xff\xe6\xff\xea\xff\xee\xff\xf4\xff\xf3\xff\xf5\xff\xfb\xff\xfd\xff\xf9\xff\xfd\xff\xfa\xff\xf5\xff\xfa\xff\xf9\xff\x00\x00\xfc\xff\xff\xff\xff\xff\xf3\xff\xfb\xff\x00\x00\x00\x00\x00\x00\x00\x00\xfd\xff\x00\x00\xff\xff\xfc\xff\x00\x00\xff\xff\xfe\xff\x01\x00\x04\x00\xff\xff\xfc\xff\xff\xff\x02\x00\x03\x00\x05\x00\x03\x00\x03\x00\x06\x00\x07\x00\x04\x00\x03\x00\x07\x00\x03\x00\x00\x00\xfe\xff\xf9\xff\xfb\xff\xf9\xff\xfa\xff\xfa\xff\xf7\xff\xf4\xff\xf5\xff\xf4\xff\xf9\xff\xfe\xff\x00\x00\x07\x00\x05\x00\x04\x00\x02\x00\x04\x00\x08\x00\x03\x00\x01\x00\x01\x00\x04\x00\x01\x00\x02\x00\x05\x00\x02\x00\x00\x00\x06\x00\x08\x00\n\x00\x07\x00\t\x00\n\x00\x02\x00\x04\x00\x05\x00\x07\x00\x04\x00\x07\x00\x02\x00\x00\x00\x04\x00\x0c\x00\t\x00\x08\x00\x0c\x00\x0b\x00\r\x00\r\x00\x0c\x00\x0c\x00\x0f\x00\x10\x00\x11\x00\x12\x00\x13\x00\x18\x00\x16\x00\x18\x00\x15\x00\x13\x00\x14\x00\x13\x00\x12\x00\x0f\x00\r\x00\t\x00\x07\x00\x06\x00\x06\x00\x06\x00\x04\x00\x00\x00\xfc\xff\x01\x00\x03\x00\x07\x00\x02\x00\x02\x00\x07\x00\x07\x00\t\x00\x07\x00\t\x00\t\x00\x06\x00\x08\x00\x05\x00\x03\x00\xff\xff\x00\x00\xff\xff\xff\xff\x02\x00\x04\x00\x07\x00\x06\x00\x02\x00\x02\x00\x04\x00\x01\x00\x01\x00\x05\x00\x03\x00\xff\xff\xfa\xff\xff\xff\xff\xff\xfa\xff\xf9\xff\x00\x00\xff\xff\xfd\xff\x01\x00\x00\x00\xfe\xff\x00\x00\xfd\xff\x00\x00\x00\x00\x02\x00\x02\x00\x06\x00\x08\x00\x02\x00\x01\x00\x04\x00\x07\x00\x03\x00\x00\x00\x05\x00\x03\x00\xff\xff\x00\x00\xff\xff\xfe\xff\xff\xff\x00\x00\x02\x00\xfe\xff\xf9\xff\xfa\xff\xfb\xff\xfd\xff\xf8\xff\xf5\xff\xf3\xff\xed\xff\xf2\xff\xf5\xff\xf3\xff\xf6\xff\xf7\xff\xf6\xff\xf7\xff\xf6\xff\xf8\xff\xf5\xff\xfe\xff\xfd\xff\xff\xff\x04\x00\x00\x00\xfa\xff\xfa\xff\xfc\xff\xf6\xff\xf4\xff\xf8\xff\xf8\xff\xfa\xff\xfa\xff\xf6\xff\xfa\xff\xfc\xff\xfe\xff\x03\x00\x00\x00\xf7\xff\xf4\xff\xf4\xff\xf8\xff\xf8\xff\xf8\xff\xfa\xff\xf4\xff\xf1\xff\xf3\xff\xf4\xff\xf2\xff\xf0\xff\xef\xff\xef\xff\xeb\xff\xee\xff\xee\xff\xf0\xff\xf2\xff\xf3\xff\xf5\xff\xf2\xff\xf2\xff\xf7\xff\xf5\xff\xf1\xff\xf6\xff\xf9\xff\xfa\xff\xf3\xff\xf1\xff\xef\xff\xed\xff\xe8\xff\xe9\xff\xea\xff\xf1\xff\xf1\xff\xf0\xff\xf7\xff\xf6\xff\xfc\xff\xf7\xff\xf8\xff\xf7\xff\xfd\xff\x00\x00\xf6\xff\xf6\xff\xfb\xff\xfb\xff\xfe\xff\x04\x00\xfe\xff\xf9\xff\x01\x00\xff\xff\xfd\xff\xff\xff\xfe\xff\xfa\xff\xfb\xff\xfc\xff\xfc\xff\xff\xff\xfb\xff\xfc\xff\xfe\xff\xfb\xff\xfd\xff\xfd\xff\xfa\xff\xf9\xff\xfa\xff\xfd\xff\xfe\xff\xfd\xff\x00\x00\xff\xff\x00\x00\x03\x00\x02\x00\x03\x00\x02\x00\x03\x00\x04\x00\x04\x00\x06\x00\x05\x00\x03\x00\x01\x00\xff\xff\xff\xff\xfa\xff\xfa\xff\xfd\xff\xfe\xff\xfc\xff\xfe\xff\xfe\xff\xfd\xff\xfb\xff\xfb\xff\xfb\xff\xf9\xff\xf9\xff\xf9\xff\xfc\xff\xfd\xff\xf8\xff\xf9\xff\xf6\xff\xf3\xff\xfa\xff\xfa\xff\xf6\xff\xf8\xff\xfc\xff\xfa\xff\xfa\xff\xf9\xff\xfb\xff\xf9\xff\xff\xff\xff\xff\xfe\xff\x00\x00\xff\xff\xfe\xff\xfc\xff\x00\x00\x01\x00\x06\x00\x00\x00\x03\x00\t\x00\t\x00\t\x00\x06\x00\x04\x00\x02\x00\x0c\x00\t\x00\t\x00\x0f\x00\x12\x00\x13\x00\x17\x00\x19\x00\x1d\x00\x1d\x00\x16\x00\x16\x00\x18\x00\x19\x00\x15\x00\x16\x00\x16\x00\x14\x00\x13\x00\x17\x00\x16\x00\x10\x00\r\x00\n\x00\x0b\x00\t\x00\x04\x00\x03\x00\x04\x00\x03\x00\x03\x00\x05\x00\x02\x00\x03\x00\x04\x00\x02\x00\x02\x00\xff\xff\xfc\xff\xfb\xff\xff\xff\xff\xff\xfb\xff\xfe\xff\x01\x00\x01\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\xfe\xff\xfd\xff\xff\xff\x02\x00\x00\x00\x00\x00\x01\x00\x04\x00\x06\x00\x05\x00\x08\x00\x05\x00\x04\x00\x06\x00\x05\x00\x01\x00\x00\x00\x02\x00\xfc\xff\xfd\xff\xff\xff\xfe\xff\xfa\xff\xfa\xff\xf8\xff\xf4\xff\xfa\xff\xfe\xff\xfa\xff\xfe\xff\xfe\xff\x01\x00\x01\x00\xfc\xff\xfd\xff\xfd\xff\xfc\xff\xfb\xff\xfb\xff\xfd\xff\x00\x00\xfd\xff\xfc\xff\x00\x00\x00\x00\xfb\xff\xf8\xff\xf9\xff\xf9\xff\xf9\xff\xfc\xff\xfc\xff\xfb\xff\xfd\xff\xfa\xff\xf9\xff\xf8\xff\xf6\xff\xf7\xff\xf6\xff\xf7\xff\xf7\xff\xf6\xff\xf8\xff\xf7\xff\xf7\xff\xf7\xff\xfa\xff\xfb\xff\xfa\xff\xfc\xff\x01\x00\xfc\xff\xff\xff\xfe\xff\xfe\xff\xff\xff\xf7\xff\xfa\xff\xfa\xff\xf6\xff\xf7\xff\xf9\xff\xf6\xff\xf4\xff\xf3\xff\xf0\xff\xf2\xff\xf0\xff\xed\xff\xf2\xff\xf2\xff\xf2\xff\xf6\xff\xf6\xff\xf7\xff\xf5\xff\xf3\xff\xf6\xff\xf8\xff\xf7\xff\xf4\xff\xf3\xff\xef\xff\xf2\xff\xf0\xff\xf1\xff\xef\xff\xf2\xff\xf4\xff\xeb\xff\xef\xff\xf3\xff\xf5\xff\xf3\xff\xf6\xff\xf7\xff\xf3\xff\xf5\xff\xf5\xff\xf6\xff\xf5\xff\xf4\xff\xf5\xff\xf7\xff\xf3\xff\xf3\xff\xf4\xff\xf8\xff\xf8\xff\xf6\xff\xfa\xff\xfd\xff\xfa\xff\xf8\xff\xfb\xff\xfc\xff\xfe\xff\xfa\xff\xf8\xff\xf6\xff\xf7\xff\xf9\xff\xf8\xff\xf8\xff\xf9\xff\xf9\xff\xf9\xff\xfc\xff\xf8\xff\xf9\xff\xff\xff\xfe\xff\xfd\xff\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x01\x00\x03\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x02\x00\x00\x00\x02\x00\x01\x00\x01\x00\x05\x00\xff\xff\xfe\xff\xff\xff\xfd\xff\xfa\xff\xfd\xff\xfc\xff\xfa\xff\xfc\xff\xfe\xff\xfb\xff\xfc\xff\xfd\xff\xfe\xff\xff\xff\xfa\xff\xf9\xff\xfb\xff\x00\x00\x00\x00\x02\x00\x01\x00\x04\x00\x06\x00\x04\x00\x05\x00\x05\x00\x06\x00\x06\x00\x07\x00\t\x00\t\x00\t\x00\x03\x00\x01\x00\x04\x00\x02\x00\x01\x00\x02\x00\x02\x00\x00\x00\x00\x00\x03\x00\x06\x00\x02\x00\x05\x00\x08\x00\x05\x00\x07\x00\t\x00\x08\x00\x05\x00\x06\x00\x0c\x00\x0c\x00\x08\x00\t\x00\x0b\x00\x0b\x00\t\x00\n\x00\x08\x00\x07\x00\n\x00\x04\x00\x06\x00\x08\x00\x08\x00\x08\x00\t\x00\x08\x00\x07\x00\x08\x00\x08\x00\x06\x00\x06\x00\t\x00\n\x00\x06\x00\x08\x00\n\x00\x07\x00\t\x00\x0e\x00\r\x00\x0c\x00\n\x00\x08\x00\x05\x00\x06\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfc\xff\xfb\xff\xfd\xff\xfa\xff\xf7\xff\xfd\xff\xfd\xff\xf8\xff\xfc\xff\xfc\xff\xfe\xff\xfc\xff\xfb\xff\xfd\xff\xfe\xff\x00\x00\x00\x00\x00\x00\xfe\xff\xfb\xff\xff\xff\xfe\xff\xff\xff\xfc\xff\xfb\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\xfe\xff\xfd\xff\xfd\xff\xfb\xff\xfb\xff\xfb\xff\xfb\xff\xfa\xff\xf8\xff\xf7\xff\xf9\xff\xff\xff\x00\x00\x02\x00\x00\x00\xff\xff\xfb\xff\xfd\xff\xfe\xff\xf8\xff\xfa\xff\xff\xff\xfc\xff\xfb\xff\xfd\xff\xf9\xff\xf5\xff\xf6\xff\xfa\xff\xf9\xff\xf7\xff\xf6\xff\xf2\xff\xf7\xff\xf7\xff\xf7\xff\xf8\xff\xf5\xff\xf5\xff\xf7\xff\xf5\xff\xf5\xff\xf5\xff\xf9\xff\xf7\xff\xf0\xff\xf1\xff\xef\xff\xef\xff\xea\xff\xe8\xff\xea\xff\xed\xff\xee\xff\xf0\xff\xf7\xff\xf6\xff\xf8\xff\xf3\xff\xf1\xff\xf2\xff\xf4\xff\xf7\xff\xf5\xff\xf4\xff\xf5\xff\xf4\xff\xf6\xff\xf7\xff\xf4\xff\xf3\xff\xf6\xff\xf5\xff\xf1\xff\xf5\xff\xfa\xff\xf7\xff\xf7\xff\xf8\xff\xfa\xff\xfe\xff\xfc\xff\xfc\xff\xfa\xff\xf7\xff\xf6\xff\xf7\xff\xf7\xff\xfa\xff\xf7\xff\xf7\xff\xfd\xff\xfb\xff\xfb\xff\xfa\xff\xfb\xff\xff\xff\xff\xff\x00\x00\x02\x00\x03\x00\x02\x00\x01\x00\x01\x00\x02\x00\x01\x00\x02\x00\x00\x00\xfe\xff\xfe\xff\xfd\xff\xff\xff\x01\x00\xfd\xff\xfb\xff\xfc\xff\xfe\xff\xfd\xff\xfb\xff\xfc\xff\xf9\xff\xfa\xff\xfb\xff\xfc\xff\xfd\xff\xf8\xff\xf6\xff\xf4\xff\xf4\xff\xf9\xff\xf9\xff\xf6\xff\xfa\xff\xf9\xff\xf8\xff\xfb\xff\xfb\xff\xfa\xff\xf8\xff\xfb\xff\xfd\xff\xfb\xff\xfd\xff\xfb\xff\xf9\xff\xf9\xff\xfd\xff\xfe\xff\xfd\xff\xfb\xff\xfb\xff\xfc\xff\xfd\xff\xfc\xff\xfc\xff\xff\xff\xfe\xff\x00\x00\x00\x00\x03\x00\x06\x00\x06\x00\x07\x00\x08\x00\x08\x00\n\x00\x0b\x00\x08\x00\t\x00\x08\x00\n\x00\x08\x00\x08\x00\x05\x00\x08\x00\t\x00\n\x00\x0b\x00\t\x00\x05\x00\x03\x00\x04\x00\x06\x00\x04\x00\x01\x00\x02\x00\x03\x00\x03\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff\x01\x00\x00\x00\xfc\xff\xfc\xff\x03\x00\x01\x00\xff\xff\x00\x00\x01\x00\x00\x00\xff\xff\xfe\xff\x01\x00\x01\x00\x02\x00\x01\x00\xfd\xff\x02\x00\x03\x00\x00\x00\x00\x00\x00\x00\x01\x00\x03\x00\x01\x00\x03\x00\x00\x00\x00\x00\x04\x00\x04\x00\x04\x00\x06\x00\n\x00\x03\x00\x03\x00\x04\x00\x05\x00\x06\x00\x03\x00\x02\x00\x03\x00\x06\x00\x08\x00\x04\x00\x07\x00\t\x00\x08\x00\x07\x00\x07\x00\x08\x00\x07\x00\x07\x00\x07\x00\x08\x00\t\x00\t\x00\x07\x00\x05\x00\x06\x00\x05\x00\x01\x00\x00\x00\x01\x00\x02\x00\x00\x00\x00\x00\xfe\xff\xfc\xff\xfe\xff\xfc\xff\xf9\xff\xf7\xff\xf7\xff\xf8\xff\xf7\xff\xf7\xff\xf8\xff\xf5\xff\xf6\xff\xf4\xff\xf5\xff\xf6\xff\xf7\xff\xf8\xff\xfb\xff\xfd\xff\xfc\xff\xf8\xff\xfa\xff\xfc\xff\xfd\xff\xff\xff\xfb\xff\xfd\xff\xf8\xff\xf4\xff\xf6\xff\xf7\xff\xf5\xff\xf6\xff\xf4\xff\xf4\xff\xf4\xff\xf2\xff\xf4\xff\xf7\xff\xf6\xff\xf4\xff\xfa\xff\xfb\xff\xfa\xff\xf4\xff\xf7\xff\xf8\xff\xfa\xff\xf9\xff\xf6\xff\xfa\xff\xf8\xff\xf9\xff\xf8\xff\xf8\xff\xf8\xff\xfa\xff\xf8\xff\xf5\xff\xf8\xff\xf9\xff\xfc\xff\xfc\xff\xfe\xff\xff\xff\xfc\xff\xfb\xff\xfb\xff\xfc\xff\xf9\xff\xf8\xff\xf9\xff\xf8\xff\xf3\xff\xf2\xff\xf3\xff\xf3\xff\xef\xff\xf0\xff\xf2\xff\xf3\xff\xf2\xff\xf2\xff\xf7\xff\xf7\xff\xfa\xff\xf8\xff\xf7\xff\xf7\xff\xf5\xff\xf7\xff\xf8\xff\xf8\xff\xf9\xff\xfa\xff\xf9\xff\xfc\xff\xfa\xff\xfa\xff\xfc\xff\xfb\xff\xfd\xff\x00\x00\xfe\xff\x00\x00\xff\xff\x00\x00\x00\x00\x01\x00\x00\x00\xff\xff\xff\xff\xfb\xff\xff\xff\xfc\xff\xfb\xff\xfd\xff\xfb\xff\xfe\xff\xfd\xff\xfe\xff\x00\x00\xf8\xff\xfc\xff\xfe\xff\xfd\xff\xfb\xff\xfd\xff\xfc\xff\xf8\xff\xf9\xff\xfa\xff\xf7\xff\xf7\xff\xfa\xff\xf9\xff\xfa\xff\xf6\xff\xf6\xff\xf7\xff\xfb\xff\xfe\xff\xff\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xfd\xff\xfe\xff\x00\x00\x00\x00\xfb\xff\xf9\xff\xfd\xff\xfc\xff\xfb\xff\xfa\xff\xfa\xff\xf7\xff\xf7\xff\xfc\xff\xfa\xff\xfc\xff\xfd\xff\xfd\xff\x00\x00\x00\x00\xfa\xff\xf8\xff\xfa\xff\xf7\xff\xf3\xff\xf6\xff\xf7\xff\xf6\xff\xf8\xff\xfa\xff\xf7\xff\xf7\xff\xf6\xff\xfb\xff\xfb\xff\xfc\xff\xfd\xff\xfd\xff\x03\x00\x00\x00\x04\x00\t\x00\x05\x00\x07\x00\t\x00\t\x00\x08\x00\t\x00\n\x00\n\x00\x05\x00\x02\x00\x02\x00\x02\x00\x00\x00\x01\x00\x01\x00\x02\x00\x07\x00\x05\x00\x06\x00\x04\x00\x01\x00\x00\x00\x02\x00\x02\x00\x00\x00\x05\x00\t\x00\x0b\x00\n\x00\x0b\x00\x0c\x00\x0b\x00\t\x00\n\x00\t\x00\n\x00\r\x00\x0e\x00\r\x00\x08\x00\x05\x00\x02\x00\x04\x00\x02\x00\x01\x00\x03\x00\x00\x00\xff\xff\x03\x00\x05\x00\x04\x00\x03\x00\x00\x00\x00\x00\x06\x00\t\x00\x04\x00\x01\x00\x01\x00\xfe\xff\x00\x00\x00\x00\x00\x00\xfc\xff\xfd\xff\xfe\xff\xfd\xff\xfe\xff\xff\xff\xfe\xff\x01\x00\x02\x00\x00\x00\x03\x00\x02\x00\x04\x00\x05\x00\x01\x00\x02\x00\x03\x00\x04\x00\x03\x00\x04\x00\x06\x00\x06\x00\x01\x00\xfe\xff\xfd\xff\xfb\xff\xfa\xff\xfb\xff\xfc\xff\xf8\xff\xf5\xff\xf6\xff\xf6\xff\xf5\xff\xf6\xff\xf9\xff\xf8\xff\xf5\xff\xf6\xff\xf4\xff\xf1\xff\xf4\xff\xf4\xff\xf2\xff\xee\xff\xee\xff\xef\xff\xee\xff\xf0\xff\xf2\xff\xf2\xff\xf5\xff\xef\xff\xf0\xff\xee\xff\xf1\xff\xf3\xff\xf1\xff\xf0\xff\xf3\xff\xf3\xff\xf3\xff\xf0\xff\xf4\xff\xf8\xff\xee\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +# --- diff --git a/tests/components/voip/test_util.py b/tests/components/voip/test_util.py new file mode 100644 index 00000000000..85dfdbac2be --- /dev/null +++ b/tests/components/voip/test_util.py @@ -0,0 +1,47 @@ +"""Test VoIP utils.""" + +import asyncio + +import pytest + +from homeassistant.components.voip.util import queue_to_iterable + + +async def test_queue_to_iterable() -> None: + """Test queue_to_iterable.""" + queue: asyncio.Queue[int | None] = asyncio.Queue() + expected_items = list(range(10)) + + for i in expected_items: + await queue.put(i) + + # Will terminate the stream + await queue.put(None) + + actual_items = [item async for item in queue_to_iterable(queue)] + + assert expected_items == actual_items + + # Check timeout + assert queue.empty() + + # Time out on first item + async with asyncio.timeout(1): + with pytest.raises(asyncio.TimeoutError): # noqa: PT012 + # Should time out very quickly + async for _item in queue_to_iterable(queue, timeout=0.01): + await asyncio.sleep(1) + + # Check timeout on second item + assert queue.empty() + await queue.put(12345) + + # Time out on second item + async with asyncio.timeout(1): + with pytest.raises(asyncio.TimeoutError): # noqa: PT012 + # Should time out very quickly + async for item in queue_to_iterable(queue, timeout=0.01): + if item != 12345: + await asyncio.sleep(1) + + assert queue.empty() diff --git a/tests/components/voip/test_voip.py b/tests/components/voip/test_voip.py index aab35bfd029..e6a635619a1 100644 --- a/tests/components/voip/test_voip.py +++ b/tests/components/voip/test_voip.py @@ -3,15 +3,26 @@ import asyncio import io from pathlib import Path -import time from unittest.mock import AsyncMock, Mock, patch import wave import pytest +from syrupy.assertion import SnapshotAssertion +from voip_utils import CallInfo -from homeassistant.components import assist_pipeline, voip -from homeassistant.components.voip.devices import VoIPDevice +from homeassistant.components import assist_pipeline, assist_satellite, voip +from homeassistant.components.assist_satellite.entity import ( + AssistSatelliteEntity, + AssistSatelliteState, +) +from homeassistant.components.voip import HassVoipDatagramProtocol +from homeassistant.components.voip.assist_satellite import Tones, VoipAssistSatellite +from homeassistant.components.voip.devices import VoIPDevice, VoIPDevices +from homeassistant.components.voip.voip import PreRecordMessageProtocol, make_protocol +from homeassistant.const import STATE_OFF, STATE_ON, Platform from homeassistant.core import Context, HomeAssistant +from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.entity_component import EntityComponent from homeassistant.setup import async_setup_component _ONE_SECOND = 16000 * 2 # 16Khz 16-bit @@ -35,33 +46,180 @@ def _empty_wav() -> bytes: return wav_io.getvalue() +def async_get_satellite_entity( + hass: HomeAssistant, domain: str, unique_id_prefix: str +) -> AssistSatelliteEntity | None: + """Get Assist satellite entity.""" + ent_reg = er.async_get(hass) + satellite_entity_id = ent_reg.async_get_entity_id( + Platform.ASSIST_SATELLITE, domain, f"{unique_id_prefix}-assist_satellite" + ) + if satellite_entity_id is None: + return None + + component: EntityComponent[AssistSatelliteEntity] = hass.data[ + assist_satellite.DOMAIN + ] + return component.get_entity(satellite_entity_id) + + +async def test_is_valid_call( + hass: HomeAssistant, + voip_devices: VoIPDevices, + voip_device: VoIPDevice, + call_info: CallInfo, +) -> None: + """Test that a call is now allowed from an unknown device.""" + assert await async_setup_component(hass, "voip", {}) + protocol = HassVoipDatagramProtocol(hass, voip_devices) + assert not protocol.is_valid_call(call_info) + + ent_reg = er.async_get(hass) + allowed_call_entity_id = ent_reg.async_get_entity_id( + "switch", voip.DOMAIN, f"{voip_device.voip_id}-allow_call" + ) + assert allowed_call_entity_id is not None + state = hass.states.get(allowed_call_entity_id) + assert state is not None + assert state.state == STATE_OFF + + # Allow calls + hass.states.async_set(allowed_call_entity_id, STATE_ON) + assert protocol.is_valid_call(call_info) + + +async def test_calls_not_allowed( + hass: HomeAssistant, + voip_devices: VoIPDevices, + voip_device: VoIPDevice, + call_info: CallInfo, + snapshot: SnapshotAssertion, +) -> None: + """Test that a pre-recorded message is played when calls aren't allowed.""" + assert await async_setup_component(hass, "voip", {}) + protocol: PreRecordMessageProtocol = make_protocol(hass, voip_devices, call_info) + assert isinstance(protocol, PreRecordMessageProtocol) + assert protocol.file_name == "problem.pcm" + + # Test the playback + done = asyncio.Event() + played_audio_bytes = b"" + + def send_audio(audio_bytes: bytes, **kwargs): + nonlocal played_audio_bytes + + # Should be problem.pcm from components/voip + played_audio_bytes = audio_bytes + done.set() + + protocol.transport = Mock() + protocol.loop_delay = 0 + with patch.object(protocol, "send_audio", send_audio): + protocol.on_chunk(bytes(_ONE_SECOND)) + + async with asyncio.timeout(1): + await done.wait() + + assert sum(played_audio_bytes) > 0 + assert played_audio_bytes == snapshot() + + +async def test_pipeline_not_found( + hass: HomeAssistant, + voip_devices: VoIPDevices, + voip_device: VoIPDevice, + call_info: CallInfo, + snapshot: SnapshotAssertion, +) -> None: + """Test that a pre-recorded message is played when a pipeline isn't found.""" + assert await async_setup_component(hass, "voip", {}) + + with patch( + "homeassistant.components.voip.voip.async_get_pipeline", return_value=None + ): + protocol: PreRecordMessageProtocol = make_protocol( + hass, voip_devices, call_info + ) + + assert isinstance(protocol, PreRecordMessageProtocol) + assert protocol.file_name == "problem.pcm" + + +async def test_satellite_prepared( + hass: HomeAssistant, + voip_devices: VoIPDevices, + voip_device: VoIPDevice, + call_info: CallInfo, + snapshot: SnapshotAssertion, +) -> None: + """Test that satellite is prepared for a call.""" + assert await async_setup_component(hass, "voip", {}) + + pipeline = assist_pipeline.Pipeline( + conversation_engine="test", + conversation_language="en", + language="en", + name="test", + stt_engine="test", + stt_language="en", + tts_engine="test", + tts_language="en", + tts_voice=None, + wake_word_entity=None, + wake_word_id=None, + ) + + satellite = async_get_satellite_entity(hass, voip.DOMAIN, voip_device.voip_id) + assert isinstance(satellite, VoipAssistSatellite) + + with ( + patch( + "homeassistant.components.voip.voip.async_get_pipeline", + return_value=pipeline, + ), + ): + protocol = make_protocol(hass, voip_devices, call_info) + assert protocol == satellite + + async def test_pipeline( hass: HomeAssistant, + voip_devices: VoIPDevices, voip_device: VoIPDevice, + call_info: CallInfo, ) -> None: """Test that pipeline function is called from RTP protocol.""" assert await async_setup_component(hass, "voip", {}) - def process_10ms(self, chunk): - """Anything non-zero is speech.""" - if sum(chunk) > 0: - return 1 + satellite = async_get_satellite_entity(hass, voip.DOMAIN, voip_device.voip_id) + assert isinstance(satellite, VoipAssistSatellite) + voip_user_id = satellite.config_entry.data["user"] + assert voip_user_id - return 0 + # Satellite is muted until a call begins + assert satellite.state == AssistSatelliteState.LISTENING_WAKE_WORD done = asyncio.Event() # Used to test that audio queue is cleared before pipeline starts bad_chunk = bytes([1, 2, 3, 4]) - async def async_pipeline_from_audio_stream(*args, device_id, **kwargs): + async def async_pipeline_from_audio_stream( + hass: HomeAssistant, context: Context, *args, device_id: str | None, **kwargs + ): + assert context.user_id == voip_user_id assert device_id == voip_device.device_id stt_stream = kwargs["stt_stream"] event_callback = kwargs["event_callback"] - async for _chunk in stt_stream: + in_command = False + async for chunk in stt_stream: # Stream will end when VAD detects end of "speech" - assert _chunk != bad_chunk + assert chunk != bad_chunk + if sum(chunk) > 0: + in_command = True + elif in_command: + break # done with command # Test empty data event_callback( @@ -71,6 +229,38 @@ async def test_pipeline( ) ) + event_callback( + assist_pipeline.PipelineEvent( + type=assist_pipeline.PipelineEventType.STT_START, + data={"engine": "test", "metadata": {}}, + ) + ) + + assert satellite.state == AssistSatelliteState.LISTENING_COMMAND + + # Fake STT result + event_callback( + assist_pipeline.PipelineEvent( + type=assist_pipeline.PipelineEventType.STT_END, + data={"stt_output": {"text": "fake-text"}}, + ) + ) + + event_callback( + assist_pipeline.PipelineEvent( + type=assist_pipeline.PipelineEventType.INTENT_START, + data={ + "engine": "test", + "language": hass.config.language, + "intent_input": "fake-text", + "conversation_id": None, + "device_id": None, + }, + ) + ) + + assert satellite.state == AssistSatelliteState.PROCESSING + # Fake intent result event_callback( assist_pipeline.PipelineEvent( @@ -83,6 +273,21 @@ async def test_pipeline( ) ) + # Fake tts result + event_callback( + assist_pipeline.PipelineEvent( + type=assist_pipeline.PipelineEventType.TTS_START, + data={ + "engine": "test", + "language": hass.config.language, + "voice": "test", + "tts_input": "fake-text", + }, + ) + ) + + assert satellite.state == AssistSatelliteState.RESPONDING + # Proceed with media output event_callback( assist_pipeline.PipelineEvent( @@ -91,6 +296,18 @@ async def test_pipeline( ) ) + event_callback( + assist_pipeline.PipelineEvent( + type=assist_pipeline.PipelineEventType.RUN_END + ) + ) + + original_tts_response_finished = satellite.tts_response_finished + + def tts_response_finished(): + original_tts_response_finished() + done.set() + async def async_get_media_source_audio( hass: HomeAssistant, media_source_id: str, @@ -100,102 +317,56 @@ async def test_pipeline( with ( patch( - "pymicro_vad.MicroVad.Process10ms", - new=process_10ms, - ), - patch( - "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", new=async_pipeline_from_audio_stream, ), patch( - "homeassistant.components.voip.voip.tts.async_get_media_source_audio", + "homeassistant.components.voip.assist_satellite.tts.async_get_media_source_audio", new=async_get_media_source_audio, ), + patch.object(satellite, "tts_response_finished", tts_response_finished), ): - rtp_protocol = voip.voip.PipelineRtpDatagramProtocol( - hass, - hass.config.language, - voip_device, - Context(), - opus_payload_type=123, - listening_tone_enabled=False, - processing_tone_enabled=False, - error_tone_enabled=False, - silence_seconds=assist_pipeline.vad.VadSensitivity.to_seconds("aggressive"), - ) - rtp_protocol.transport = Mock() + satellite._tones = Tones(0) + satellite.transport = Mock() + + satellite.connection_made(satellite.transport) + assert satellite.state == AssistSatelliteState.LISTENING_WAKE_WORD # Ensure audio queue is cleared before pipeline starts - rtp_protocol._audio_queue.put_nowait(bad_chunk) + satellite._audio_queue.put_nowait(bad_chunk) def send_audio(*args, **kwargs): - # Test finished successfully - done.set() + # Don't send audio + pass - rtp_protocol.send_audio = Mock(side_effect=send_audio) + satellite.send_audio = Mock(side_effect=send_audio) # silence - rtp_protocol.on_chunk(bytes(_ONE_SECOND)) + satellite.on_chunk(bytes(_ONE_SECOND)) # "speech" - rtp_protocol.on_chunk(bytes([255] * _ONE_SECOND * 2)) + satellite.on_chunk(bytes([255] * _ONE_SECOND * 2)) - # silence (assumes aggressive VAD sensitivity) - rtp_protocol.on_chunk(bytes(_ONE_SECOND)) + # silence + satellite.on_chunk(bytes(_ONE_SECOND)) # Wait for mock pipeline to exhaust the audio stream async with asyncio.timeout(1): await done.wait() - -async def test_pipeline_timeout(hass: HomeAssistant, voip_device: VoIPDevice) -> None: - """Test timeout during pipeline run.""" - assert await async_setup_component(hass, "voip", {}) - - done = asyncio.Event() - - async def async_pipeline_from_audio_stream(*args, **kwargs): - await asyncio.sleep(10) - - with ( - patch( - "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", - new=async_pipeline_from_audio_stream, - ), - patch( - "homeassistant.components.voip.voip.PipelineRtpDatagramProtocol._wait_for_speech", - return_value=True, - ), - ): - rtp_protocol = voip.voip.PipelineRtpDatagramProtocol( - hass, - hass.config.language, - voip_device, - Context(), - opus_payload_type=123, - pipeline_timeout=0.001, - listening_tone_enabled=False, - processing_tone_enabled=False, - error_tone_enabled=False, - ) - transport = Mock(spec=["close"]) - rtp_protocol.connection_made(transport) - - # Closing the transport will cause the test to succeed - transport.close.side_effect = done.set - - # silence - rtp_protocol.on_chunk(bytes(_ONE_SECOND)) - - # Wait for mock pipeline to time out - async with asyncio.timeout(1): - await done.wait() + # Finished speaking + assert satellite.state == AssistSatelliteState.LISTENING_WAKE_WORD -async def test_stt_stream_timeout(hass: HomeAssistant, voip_device: VoIPDevice) -> None: +async def test_stt_stream_timeout( + hass: HomeAssistant, voip_devices: VoIPDevices, voip_device: VoIPDevice +) -> None: """Test timeout in STT stream during pipeline run.""" assert await async_setup_component(hass, "voip", {}) + satellite = async_get_satellite_entity(hass, voip.DOMAIN, voip_device.voip_id) + assert isinstance(satellite, VoipAssistSatellite) + done = asyncio.Event() async def async_pipeline_from_audio_stream(*args, **kwargs): @@ -205,28 +376,19 @@ async def test_stt_stream_timeout(hass: HomeAssistant, voip_device: VoIPDevice) pass with patch( - "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", new=async_pipeline_from_audio_stream, ): - rtp_protocol = voip.voip.PipelineRtpDatagramProtocol( - hass, - hass.config.language, - voip_device, - Context(), - opus_payload_type=123, - audio_timeout=0.001, - listening_tone_enabled=False, - processing_tone_enabled=False, - error_tone_enabled=False, - ) + satellite._tones = Tones(0) + satellite._audio_chunk_timeout = 0.001 transport = Mock(spec=["close"]) - rtp_protocol.connection_made(transport) + satellite.connection_made(transport) # Closing the transport will cause the test to succeed transport.close.side_effect = done.set # silence - rtp_protocol.on_chunk(bytes(_ONE_SECOND)) + satellite.on_chunk(bytes(_ONE_SECOND)) # Wait for mock pipeline to time out async with asyncio.timeout(1): @@ -235,26 +397,34 @@ async def test_stt_stream_timeout(hass: HomeAssistant, voip_device: VoIPDevice) async def test_tts_timeout( hass: HomeAssistant, + voip_devices: VoIPDevices, voip_device: VoIPDevice, ) -> None: """Test that TTS will time out based on its length.""" assert await async_setup_component(hass, "voip", {}) - def process_10ms(self, chunk): - """Anything non-zero is speech.""" - if sum(chunk) > 0: - return 1 - - return 0 + satellite = async_get_satellite_entity(hass, voip.DOMAIN, voip_device.voip_id) + assert isinstance(satellite, VoipAssistSatellite) done = asyncio.Event() async def async_pipeline_from_audio_stream(*args, **kwargs): stt_stream = kwargs["stt_stream"] event_callback = kwargs["event_callback"] - async for _chunk in stt_stream: - # Stream will end when VAD detects end of "speech" - pass + in_command = False + async for chunk in stt_stream: + if sum(chunk) > 0: + in_command = True + elif in_command: + break # done with command + + # Fake STT result + event_callback( + assist_pipeline.PipelineEvent( + type=assist_pipeline.PipelineEventType.STT_END, + data={"stt_output": {"text": "fake-text"}}, + ) + ) # Fake intent result event_callback( @@ -278,15 +448,7 @@ async def test_tts_timeout( tone_bytes = bytes([1, 2, 3, 4]) - def send_audio(audio_bytes, **kwargs): - if audio_bytes == tone_bytes: - # Not TTS - return - - # Block here to force a timeout in _send_tts - time.sleep(2) - - async def async_send_audio(audio_bytes, **kwargs): + async def async_send_audio(audio_bytes: bytes, **kwargs): if audio_bytes == tone_bytes: # Not TTS return @@ -303,37 +465,22 @@ async def test_tts_timeout( with ( patch( - "pymicro_vad.MicroVad.Process10ms", - new=process_10ms, - ), - patch( - "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", new=async_pipeline_from_audio_stream, ), patch( - "homeassistant.components.voip.voip.tts.async_get_media_source_audio", + "homeassistant.components.voip.assist_satellite.tts.async_get_media_source_audio", new=async_get_media_source_audio, ), ): - rtp_protocol = voip.voip.PipelineRtpDatagramProtocol( - hass, - hass.config.language, - voip_device, - Context(), - opus_payload_type=123, - tts_extra_timeout=0.001, - listening_tone_enabled=True, - processing_tone_enabled=True, - error_tone_enabled=True, - silence_seconds=assist_pipeline.vad.VadSensitivity.to_seconds("relaxed"), - ) - rtp_protocol._tone_bytes = tone_bytes - rtp_protocol._processing_bytes = tone_bytes - rtp_protocol._error_bytes = tone_bytes - rtp_protocol.transport = Mock() - rtp_protocol.send_audio = Mock() + satellite._tts_extra_timeout = 0.001 + for tone in Tones: + satellite._tone_bytes[tone] = tone_bytes - original_send_tts = rtp_protocol._send_tts + satellite.transport = Mock() + satellite.send_audio = Mock() + + original_send_tts = satellite._send_tts async def send_tts(*args, **kwargs): # Call original then end test successfully @@ -342,17 +489,17 @@ async def test_tts_timeout( done.set() - rtp_protocol._async_send_audio = AsyncMock(side_effect=async_send_audio) # type: ignore[method-assign] - rtp_protocol._send_tts = AsyncMock(side_effect=send_tts) # type: ignore[method-assign] + satellite._async_send_audio = AsyncMock(side_effect=async_send_audio) # type: ignore[method-assign] + satellite._send_tts = AsyncMock(side_effect=send_tts) # type: ignore[method-assign] # silence - rtp_protocol.on_chunk(bytes(_ONE_SECOND)) + satellite.on_chunk(bytes(_ONE_SECOND)) # "speech" - rtp_protocol.on_chunk(bytes([255] * _ONE_SECOND * 2)) + satellite.on_chunk(bytes([255] * _ONE_SECOND * 2)) - # silence (assumes relaxed VAD sensitivity) - rtp_protocol.on_chunk(bytes(_ONE_SECOND * 4)) + # silence + satellite.on_chunk(bytes(_ONE_SECOND)) # Wait for mock pipeline to exhaust the audio stream async with asyncio.timeout(1): @@ -361,26 +508,34 @@ async def test_tts_timeout( async def test_tts_wrong_extension( hass: HomeAssistant, + voip_devices: VoIPDevices, voip_device: VoIPDevice, ) -> None: """Test that TTS will only stream WAV audio.""" assert await async_setup_component(hass, "voip", {}) - def process_10ms(self, chunk): - """Anything non-zero is speech.""" - if sum(chunk) > 0: - return 1 - - return 0 + satellite = async_get_satellite_entity(hass, voip.DOMAIN, voip_device.voip_id) + assert isinstance(satellite, VoipAssistSatellite) done = asyncio.Event() async def async_pipeline_from_audio_stream(*args, **kwargs): stt_stream = kwargs["stt_stream"] event_callback = kwargs["event_callback"] - async for _chunk in stt_stream: - # Stream will end when VAD detects end of "speech" - pass + in_command = False + async for chunk in stt_stream: + if sum(chunk) > 0: + in_command = True + elif in_command: + break # done with command + + # Fake STT result + event_callback( + assist_pipeline.PipelineEvent( + type=assist_pipeline.PipelineEventType.STT_END, + data={"stt_output": {"text": "fake-text"}}, + ) + ) # Fake intent result event_callback( @@ -411,28 +566,17 @@ async def test_tts_wrong_extension( with ( patch( - "pymicro_vad.MicroVad.Process10ms", - new=process_10ms, - ), - patch( - "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", new=async_pipeline_from_audio_stream, ), patch( - "homeassistant.components.voip.voip.tts.async_get_media_source_audio", + "homeassistant.components.voip.assist_satellite.tts.async_get_media_source_audio", new=async_get_media_source_audio, ), ): - rtp_protocol = voip.voip.PipelineRtpDatagramProtocol( - hass, - hass.config.language, - voip_device, - Context(), - opus_payload_type=123, - ) - rtp_protocol.transport = Mock() + satellite.transport = Mock() - original_send_tts = rtp_protocol._send_tts + original_send_tts = satellite._send_tts async def send_tts(*args, **kwargs): # Call original then end test successfully @@ -441,16 +585,16 @@ async def test_tts_wrong_extension( done.set() - rtp_protocol._send_tts = AsyncMock(side_effect=send_tts) # type: ignore[method-assign] + satellite._send_tts = AsyncMock(side_effect=send_tts) # type: ignore[method-assign] # silence - rtp_protocol.on_chunk(bytes(_ONE_SECOND)) + satellite.on_chunk(bytes(_ONE_SECOND)) # "speech" - rtp_protocol.on_chunk(bytes([255] * _ONE_SECOND * 2)) + satellite.on_chunk(bytes([255] * _ONE_SECOND * 2)) # silence (assumes relaxed VAD sensitivity) - rtp_protocol.on_chunk(bytes(_ONE_SECOND * 4)) + satellite.on_chunk(bytes(_ONE_SECOND * 4)) # Wait for mock pipeline to exhaust the audio stream async with asyncio.timeout(1): @@ -459,26 +603,34 @@ async def test_tts_wrong_extension( async def test_tts_wrong_wav_format( hass: HomeAssistant, + voip_devices: VoIPDevices, voip_device: VoIPDevice, ) -> None: """Test that TTS will only stream WAV audio with a specific format.""" assert await async_setup_component(hass, "voip", {}) - def process_10ms(self, chunk): - """Anything non-zero is speech.""" - if sum(chunk) > 0: - return 1 - - return 0 + satellite = async_get_satellite_entity(hass, voip.DOMAIN, voip_device.voip_id) + assert isinstance(satellite, VoipAssistSatellite) done = asyncio.Event() async def async_pipeline_from_audio_stream(*args, **kwargs): stt_stream = kwargs["stt_stream"] event_callback = kwargs["event_callback"] - async for _chunk in stt_stream: - # Stream will end when VAD detects end of "speech" - pass + in_command = False + async for chunk in stt_stream: + if sum(chunk) > 0: + in_command = True + elif in_command: + break # done with command + + # Fake STT result + event_callback( + assist_pipeline.PipelineEvent( + type=assist_pipeline.PipelineEventType.STT_END, + data={"stt_output": {"text": "fake-text"}}, + ) + ) # Fake intent result event_callback( @@ -516,28 +668,17 @@ async def test_tts_wrong_wav_format( with ( patch( - "pymicro_vad.MicroVad.Process10ms", - new=process_10ms, - ), - patch( - "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", new=async_pipeline_from_audio_stream, ), patch( - "homeassistant.components.voip.voip.tts.async_get_media_source_audio", + "homeassistant.components.voip.assist_satellite.tts.async_get_media_source_audio", new=async_get_media_source_audio, ), ): - rtp_protocol = voip.voip.PipelineRtpDatagramProtocol( - hass, - hass.config.language, - voip_device, - Context(), - opus_payload_type=123, - ) - rtp_protocol.transport = Mock() + satellite.transport = Mock() - original_send_tts = rtp_protocol._send_tts + original_send_tts = satellite._send_tts async def send_tts(*args, **kwargs): # Call original then end test successfully @@ -546,16 +687,16 @@ async def test_tts_wrong_wav_format( done.set() - rtp_protocol._send_tts = AsyncMock(side_effect=send_tts) # type: ignore[method-assign] + satellite._send_tts = AsyncMock(side_effect=send_tts) # type: ignore[method-assign] # silence - rtp_protocol.on_chunk(bytes(_ONE_SECOND)) + satellite.on_chunk(bytes(_ONE_SECOND)) # "speech" - rtp_protocol.on_chunk(bytes([255] * _ONE_SECOND * 2)) + satellite.on_chunk(bytes([255] * _ONE_SECOND * 2)) # silence (assumes relaxed VAD sensitivity) - rtp_protocol.on_chunk(bytes(_ONE_SECOND * 4)) + satellite.on_chunk(bytes(_ONE_SECOND * 4)) # Wait for mock pipeline to exhaust the audio stream async with asyncio.timeout(1): @@ -564,24 +705,32 @@ async def test_tts_wrong_wav_format( async def test_empty_tts_output( hass: HomeAssistant, + voip_devices: VoIPDevices, voip_device: VoIPDevice, ) -> None: """Test that TTS will not stream when output is empty.""" assert await async_setup_component(hass, "voip", {}) - def process_10ms(self, chunk): - """Anything non-zero is speech.""" - if sum(chunk) > 0: - return 1 - - return 0 + satellite = async_get_satellite_entity(hass, voip.DOMAIN, voip_device.voip_id) + assert isinstance(satellite, VoipAssistSatellite) async def async_pipeline_from_audio_stream(*args, **kwargs): stt_stream = kwargs["stt_stream"] event_callback = kwargs["event_callback"] - async for _chunk in stt_stream: - # Stream will end when VAD detects end of "speech" - pass + in_command = False + async for chunk in stt_stream: + if sum(chunk) > 0: + in_command = True + elif in_command: + break # done with command + + # Fake STT result + event_callback( + assist_pipeline.PipelineEvent( + type=assist_pipeline.PipelineEventType.STT_END, + data={"stt_output": {"text": "fake-text"}}, + ) + ) # Fake intent result event_callback( @@ -605,37 +754,78 @@ async def test_empty_tts_output( with ( patch( - "pymicro_vad.MicroVad.Process10ms", - new=process_10ms, - ), - patch( - "homeassistant.components.voip.voip.async_pipeline_from_audio_stream", + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", new=async_pipeline_from_audio_stream, ), patch( - "homeassistant.components.voip.voip.PipelineRtpDatagramProtocol._send_tts", + "homeassistant.components.voip.assist_satellite.VoipAssistSatellite._send_tts", ) as mock_send_tts, ): - rtp_protocol = voip.voip.PipelineRtpDatagramProtocol( - hass, - hass.config.language, - voip_device, - Context(), - opus_payload_type=123, - ) - rtp_protocol.transport = Mock() + satellite.transport = Mock() # silence - rtp_protocol.on_chunk(bytes(_ONE_SECOND)) + satellite.on_chunk(bytes(_ONE_SECOND)) # "speech" - rtp_protocol.on_chunk(bytes([255] * _ONE_SECOND * 2)) + satellite.on_chunk(bytes([255] * _ONE_SECOND * 2)) # silence (assumes relaxed VAD sensitivity) - rtp_protocol.on_chunk(bytes(_ONE_SECOND * 4)) + satellite.on_chunk(bytes(_ONE_SECOND * 4)) # Wait for mock pipeline to finish async with asyncio.timeout(1): - await rtp_protocol._tts_done.wait() + await satellite._tts_done.wait() mock_send_tts.assert_not_called() + + +async def test_pipeline_error( + hass: HomeAssistant, + voip_devices: VoIPDevices, + voip_device: VoIPDevice, + snapshot: SnapshotAssertion, +) -> None: + """Test that a pipeline error causes the error tone to be played.""" + assert await async_setup_component(hass, "voip", {}) + + satellite = async_get_satellite_entity(hass, voip.DOMAIN, voip_device.voip_id) + assert isinstance(satellite, VoipAssistSatellite) + + done = asyncio.Event() + played_audio_bytes = b"" + + async def async_pipeline_from_audio_stream(*args, **kwargs): + # Fake error + event_callback = kwargs["event_callback"] + event_callback( + assist_pipeline.PipelineEvent( + type=assist_pipeline.PipelineEventType.ERROR, + data={"code": "error-code", "message": "error message"}, + ) + ) + + async def async_send_audio(audio_bytes: bytes, **kwargs): + nonlocal played_audio_bytes + + # Should be error.pcm from components/voip + played_audio_bytes = audio_bytes + done.set() + + with ( + patch( + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + new=async_pipeline_from_audio_stream, + ), + ): + satellite._tones = Tones.ERROR + satellite.transport = Mock() + satellite._async_send_audio = AsyncMock(side_effect=async_send_audio) # type: ignore[method-assign] + + satellite.on_chunk(bytes(_ONE_SECOND)) + + # Wait for error tone to be played + async with asyncio.timeout(1): + await done.wait() + + assert sum(played_audio_bytes) > 0 + assert played_audio_bytes == snapshot() From e58cf00a96359ba530025e6c2ec2e2d7780dd40a Mon Sep 17 00:00:00 2001 From: G Johansson Date: Fri, 6 Sep 2024 16:18:24 +0200 Subject: [PATCH 1528/1635] Remove deprecated aux_heat from ecobee (#125246) --- homeassistant/components/ecobee/climate.py | 48 ------------- tests/components/ecobee/test_climate.py | 84 +--------------------- tests/components/ecobee/test_repairs.py | 35 --------- 3 files changed, 2 insertions(+), 165 deletions(-) diff --git a/homeassistant/components/ecobee/climate.py b/homeassistant/components/ecobee/climate.py index 8dcc7285590..f9119f05394 100644 --- a/homeassistant/components/ecobee/climate.py +++ b/homeassistant/components/ecobee/climate.py @@ -36,7 +36,6 @@ from homeassistant.helpers import entity_platform import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.util.unit_conversion import TemperatureConverter from . import EcobeeData @@ -387,8 +386,6 @@ class Thermostat(ClimateEntity): supported = SUPPORT_FLAGS if self.has_humidifier_control: supported = supported | ClimateEntityFeature.TARGET_HUMIDITY - if self.has_aux_heat: - supported = supported | ClimateEntityFeature.AUX_HEAT if len(self.hvac_modes) > 1 and HVACMode.OFF in self.hvac_modes: supported = ( supported | ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON @@ -449,11 +446,6 @@ class Thermostat(ClimateEntity): and self.settings.get("humidifierMode") == HUMIDIFIER_MANUAL_MODE ) - @property - def has_aux_heat(self) -> bool: - """Return true if the ecobee has a heat pump.""" - return bool(self.settings.get(HAS_HEAT_PUMP)) - @property def target_humidity(self) -> int | None: """Return the desired humidity set point.""" @@ -573,46 +565,6 @@ class Thermostat(ClimateEntity): "fan_min_on_time": self.settings["fanMinOnTime"], } - @property - def is_aux_heat(self) -> bool: - """Return true if aux heater.""" - return self.settings["hvacMode"] == ECOBEE_AUX_HEAT_ONLY - - async def async_turn_aux_heat_on(self) -> None: - """Turn auxiliary heater on.""" - async_create_issue( - self.hass, - DOMAIN, - "migrate_aux_heat", - breaks_in_ha_version="2024.10.0", - is_fixable=True, - is_persistent=True, - translation_key="migrate_aux_heat", - severity=IssueSeverity.WARNING, - ) - _LOGGER.debug("Setting HVAC mode to auxHeatOnly to turn on aux heat") - self._last_hvac_mode_before_aux_heat = self.hvac_mode - await self.hass.async_add_executor_job( - self.data.ecobee.set_hvac_mode, self.thermostat_index, ECOBEE_AUX_HEAT_ONLY - ) - self.update_without_throttle = True - - async def async_turn_aux_heat_off(self) -> None: - """Turn auxiliary heater off.""" - async_create_issue( - self.hass, - DOMAIN, - "migrate_aux_heat", - breaks_in_ha_version="2024.10.0", - is_fixable=True, - is_persistent=True, - translation_key="migrate_aux_heat", - severity=IssueSeverity.WARNING, - ) - _LOGGER.debug("Setting HVAC mode to last mode to disable aux heat") - await self.async_set_hvac_mode(self._last_hvac_mode_before_aux_heat) - self.update_without_throttle = True - def set_preset_mode(self, preset_mode: str) -> None: """Activate a preset.""" preset_mode = HASS_TO_ECOBEE_PRESET.get(preset_mode, preset_mode) diff --git a/tests/components/ecobee/test_climate.py b/tests/components/ecobee/test_climate.py index 1c9dcec0ad2..559153874a5 100644 --- a/tests/components/ecobee/test_climate.py +++ b/tests/components/ecobee/test_climate.py @@ -1,24 +1,16 @@ """The test for the Ecobee thermostat module.""" -import copy from http import HTTPStatus from unittest import mock -from unittest.mock import MagicMock import pytest from homeassistant import const -from homeassistant.components import climate from homeassistant.components.climate import ClimateEntityFeature -from homeassistant.components.ecobee.climate import ( - ECOBEE_AUX_HEAT_ONLY, - PRESET_AWAY_INDEFINITELY, - Thermostat, -) -from homeassistant.const import ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, STATE_OFF +from homeassistant.components.ecobee.climate import PRESET_AWAY_INDEFINITELY, Thermostat +from homeassistant.const import ATTR_SUPPORTED_FEATURES, STATE_OFF from homeassistant.core import HomeAssistant -from . import GENERIC_THERMOSTAT_INFO_WITH_HEATPUMP from .common import setup_platform ENTITY_ID = "climate.ecobee" @@ -111,25 +103,6 @@ async def test_aux_heat_not_supported_by_default(hass: HomeAssistant) -> None: ) -async def test_aux_heat_supported_with_heat_pump(hass: HomeAssistant) -> None: - """Aux Heat should be supported if thermostat has heatpump.""" - mock_get_thermostat = mock.Mock() - mock_get_thermostat.return_value = GENERIC_THERMOSTAT_INFO_WITH_HEATPUMP - with mock.patch("pyecobee.Ecobee.get_thermostat", mock_get_thermostat): - await setup_platform(hass, const.Platform.CLIMATE) - state = hass.states.get(ENTITY_ID) - assert ( - state.attributes.get(ATTR_SUPPORTED_FEATURES) - == ClimateEntityFeature.PRESET_MODE - | ClimateEntityFeature.FAN_MODE - | ClimateEntityFeature.TARGET_TEMPERATURE_RANGE - | ClimateEntityFeature.TARGET_TEMPERATURE - | ClimateEntityFeature.AUX_HEAT - | ClimateEntityFeature.TURN_OFF - | ClimateEntityFeature.TURN_ON - ) - - async def test_current_temperature(ecobee_fixture, thermostat) -> None: """Test current temperature.""" assert thermostat.current_temperature == 30 @@ -255,29 +228,6 @@ async def test_extra_state_attributes(ecobee_fixture, thermostat) -> None: } -async def test_is_aux_heat_on(hass: HomeAssistant) -> None: - """Test aux heat property is only enabled for auxHeatOnly.""" - mock_get_thermostat = mock.Mock() - mock_get_thermostat.return_value = copy.deepcopy( - GENERIC_THERMOSTAT_INFO_WITH_HEATPUMP - ) - mock_get_thermostat.return_value["settings"]["hvacMode"] = "auxHeatOnly" - with mock.patch("pyecobee.Ecobee.get_thermostat", mock_get_thermostat): - await setup_platform(hass, const.Platform.CLIMATE) - state = hass.states.get(ENTITY_ID) - assert state.attributes[climate.ATTR_AUX_HEAT] == "on" - - -async def test_is_aux_heat_off(hass: HomeAssistant) -> None: - """Test aux heat property is only enabled for auxHeatOnly.""" - mock_get_thermostat = mock.Mock() - mock_get_thermostat.return_value = GENERIC_THERMOSTAT_INFO_WITH_HEATPUMP - with mock.patch("pyecobee.Ecobee.get_thermostat", mock_get_thermostat): - await setup_platform(hass, const.Platform.CLIMATE) - state = hass.states.get(ENTITY_ID) - assert state.attributes[climate.ATTR_AUX_HEAT] == "off" - - async def test_set_temperature(ecobee_fixture, thermostat, data) -> None: """Test set temperature.""" # Auto -> Auto @@ -400,36 +350,6 @@ async def test_set_fan_mode_auto(thermostat, data) -> None: ) -async def test_turn_aux_heat_on(hass: HomeAssistant, mock_ecobee: MagicMock) -> None: - """Test when aux heat is set on. This must change the HVAC mode.""" - mock_ecobee.get_thermostat.return_value = GENERIC_THERMOSTAT_INFO_WITH_HEATPUMP - mock_ecobee.thermostats = [GENERIC_THERMOSTAT_INFO_WITH_HEATPUMP] - await setup_platform(hass, const.Platform.CLIMATE) - await hass.services.async_call( - climate.DOMAIN, - climate.SERVICE_SET_AUX_HEAT, - {ATTR_ENTITY_ID: ENTITY_ID, climate.ATTR_AUX_HEAT: True}, - blocking=True, - ) - assert mock_ecobee.set_hvac_mode.call_count == 1 - assert mock_ecobee.set_hvac_mode.call_args == mock.call(0, ECOBEE_AUX_HEAT_ONLY) - - -async def test_turn_aux_heat_off(hass: HomeAssistant, mock_ecobee: MagicMock) -> None: - """Test when aux heat is tuned off. Must change HVAC mode back to last used.""" - mock_ecobee.get_thermostat.return_value = GENERIC_THERMOSTAT_INFO_WITH_HEATPUMP - mock_ecobee.thermostats = [GENERIC_THERMOSTAT_INFO_WITH_HEATPUMP] - await setup_platform(hass, const.Platform.CLIMATE) - await hass.services.async_call( - climate.DOMAIN, - climate.SERVICE_SET_AUX_HEAT, - {ATTR_ENTITY_ID: ENTITY_ID, climate.ATTR_AUX_HEAT: False}, - blocking=True, - ) - assert mock_ecobee.set_hvac_mode.call_count == 1 - assert mock_ecobee.set_hvac_mode.call_args == mock.call(0, "auto") - - async def test_preset_indefinite_away(ecobee_fixture, thermostat) -> None: """Test indefinite away showing correctly, and not as temporary away.""" ecobee_fixture["program"]["currentClimateRef"] = "away" diff --git a/tests/components/ecobee/test_repairs.py b/tests/components/ecobee/test_repairs.py index 1473f8eb3a1..43b3cc5b7d0 100644 --- a/tests/components/ecobee/test_repairs.py +++ b/tests/components/ecobee/test_repairs.py @@ -3,11 +3,6 @@ from http import HTTPStatus from unittest.mock import MagicMock -from homeassistant.components.climate import ( - ATTR_AUX_HEAT, - DOMAIN as CLIMATE_DOMAIN, - SERVICE_SET_AUX_HEAT, -) from homeassistant.components.ecobee import DOMAIN from homeassistant.components.notify import DOMAIN as NOTIFY_DOMAIN from homeassistant.components.repairs.issue_handler import ( @@ -17,7 +12,6 @@ from homeassistant.components.repairs.websocket_api import ( RepairsFlowIndexView, RepairsFlowResourceView, ) -from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from homeassistant.helpers import issue_registry as ir @@ -83,32 +77,3 @@ async def test_ecobee_notify_repair_flow( issue_id=f"migrate_notify_{DOMAIN}_{DOMAIN}", ) assert len(issue_registry.issues) == 0 - - -async def test_ecobee_aux_heat_repair_flow( - hass: HomeAssistant, - mock_ecobee: MagicMock, - hass_client: ClientSessionGenerator, - issue_registry: ir.IssueRegistry, -) -> None: - """Test the ecobee aux_heat service repair flow is triggered.""" - await setup_platform(hass, CLIMATE_DOMAIN) - await async_process_repairs_platforms(hass) - - ENTITY_ID = "climate.ecobee2" - - # Simulate legacy service being used - assert hass.services.has_service(CLIMATE_DOMAIN, SERVICE_SET_AUX_HEAT) - await hass.services.async_call( - CLIMATE_DOMAIN, - SERVICE_SET_AUX_HEAT, - {ATTR_ENTITY_ID: ENTITY_ID, ATTR_AUX_HEAT: True}, - blocking=True, - ) - - # Assert the issue is present - assert issue_registry.async_get_issue( - domain="ecobee", - issue_id="migrate_aux_heat", - ) - assert len(issue_registry.issues) == 1 From c4cc158a7794e64da4cb0a1bd09c67ea391e5395 Mon Sep 17 00:00:00 2001 From: Alexandre TRUPIN <72858385+AlexT59@users.noreply.github.com> Date: Fri, 6 Sep 2024 16:18:47 +0200 Subject: [PATCH 1529/1635] Bump sfrbox-api to 0.0.10 (#125405) * bump sfr_box requirement to 0.0.10 * upate manifest file * Handle None values --------- Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> --- homeassistant/components/sfr_box/__init__.py | 3 ++ .../components/sfr_box/binary_sensor.py | 14 +++++--- homeassistant/components/sfr_box/button.py | 8 +++-- .../components/sfr_box/config_flow.py | 4 ++- .../components/sfr_box/coordinator.py | 6 ++-- .../components/sfr_box/diagnostics.py | 31 ++++++++---------- .../components/sfr_box/manifest.json | 2 +- homeassistant/components/sfr_box/sensor.py | 32 +++++++++++++++---- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- .../sfr_box/snapshots/test_diagnostics.ambr | 4 +-- 11 files changed, 68 insertions(+), 40 deletions(-) diff --git a/homeassistant/components/sfr_box/__init__.py b/homeassistant/components/sfr_box/__init__.py index dade1af0e52..d386c670365 100644 --- a/homeassistant/components/sfr_box/__init__.py +++ b/homeassistant/components/sfr_box/__init__.py @@ -3,6 +3,7 @@ from __future__ import annotations import asyncio +from typing import TYPE_CHECKING from sfrbox_api.bridge import SFRBox from sfrbox_api.exceptions import SFRBoxAuthenticationError, SFRBoxError @@ -46,6 +47,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: # Preload system information await data.system.async_config_entry_first_refresh() system_info = data.system.data + if TYPE_CHECKING: + assert system_info is not None # Preload other coordinators (based on net infrastructure) tasks = [data.wan.async_config_entry_first_refresh()] diff --git a/homeassistant/components/sfr_box/binary_sensor.py b/homeassistant/components/sfr_box/binary_sensor.py index b299af33513..4ef5e87761d 100644 --- a/homeassistant/components/sfr_box/binary_sensor.py +++ b/homeassistant/components/sfr_box/binary_sensor.py @@ -4,6 +4,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass +from typing import TYPE_CHECKING from sfrbox_api.models import DslInfo, FtthInfo, SystemInfo, WanInfo @@ -65,19 +66,22 @@ async def async_setup_entry( ) -> None: """Set up the sensors.""" data: DomainData = hass.data[DOMAIN][entry.entry_id] + system_info = data.system.data + if TYPE_CHECKING: + assert system_info is not None entities: list[SFRBoxBinarySensor] = [ - SFRBoxBinarySensor(data.wan, description, data.system.data) + SFRBoxBinarySensor(data.wan, description, system_info) for description in WAN_SENSOR_TYPES ] - if (net_infra := data.system.data.net_infra) == "adsl": + if (net_infra := system_info.net_infra) == "adsl": entities.extend( - SFRBoxBinarySensor(data.dsl, description, data.system.data) + SFRBoxBinarySensor(data.dsl, description, system_info) for description in DSL_SENSOR_TYPES ) elif net_infra == "ftth": entities.extend( - SFRBoxBinarySensor(data.ftth, description, data.system.data) + SFRBoxBinarySensor(data.ftth, description, system_info) for description in FTTH_SENSOR_TYPES ) @@ -111,4 +115,6 @@ class SFRBoxBinarySensor[_T]( @property def is_on(self) -> bool | None: """Return the native value of the device.""" + if self.coordinator.data is None: + return None return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/sfr_box/button.py b/homeassistant/components/sfr_box/button.py index f6d3100d692..bddb1e8f926 100644 --- a/homeassistant/components/sfr_box/button.py +++ b/homeassistant/components/sfr_box/button.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Awaitable, Callable, Coroutine from dataclasses import dataclass from functools import wraps -from typing import Any, Concatenate +from typing import TYPE_CHECKING, Any, Concatenate from sfrbox_api.bridge import SFRBox from sfrbox_api.exceptions import SFRBoxError @@ -69,10 +69,12 @@ async def async_setup_entry( ) -> None: """Set up the buttons.""" data: DomainData = hass.data[DOMAIN][entry.entry_id] + system_info = data.system.data + if TYPE_CHECKING: + assert system_info is not None entities = [ - SFRBoxButton(data.box, description, data.system.data) - for description in BUTTON_TYPES + SFRBoxButton(data.box, description, system_info) for description in BUTTON_TYPES ] async_add_entities(entities) diff --git a/homeassistant/components/sfr_box/config_flow.py b/homeassistant/components/sfr_box/config_flow.py index f7d72c01ccd..a4f14e59069 100644 --- a/homeassistant/components/sfr_box/config_flow.py +++ b/homeassistant/components/sfr_box/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations from collections.abc import Mapping -from typing import Any +from typing import TYPE_CHECKING, Any from sfrbox_api.bridge import SFRBox from sfrbox_api.exceptions import SFRBoxAuthenticationError, SFRBoxError @@ -51,6 +51,8 @@ class SFRBoxFlowHandler(ConfigFlow, domain=DOMAIN): except SFRBoxError: errors["base"] = "cannot_connect" else: + if TYPE_CHECKING: + assert system_info is not None await self.async_set_unique_id(system_info.mac_addr) self._abort_if_unique_id_configured() self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]}) diff --git a/homeassistant/components/sfr_box/coordinator.py b/homeassistant/components/sfr_box/coordinator.py index af3195723f4..5877d5a454a 100644 --- a/homeassistant/components/sfr_box/coordinator.py +++ b/homeassistant/components/sfr_box/coordinator.py @@ -15,7 +15,7 @@ _LOGGER = logging.getLogger(__name__) _SCAN_INTERVAL = timedelta(minutes=1) -class SFRDataUpdateCoordinator[_DataT](DataUpdateCoordinator[_DataT]): +class SFRDataUpdateCoordinator[_DataT](DataUpdateCoordinator[_DataT | None]): """Coordinator to manage data updates.""" def __init__( @@ -23,14 +23,14 @@ class SFRDataUpdateCoordinator[_DataT](DataUpdateCoordinator[_DataT]): hass: HomeAssistant, box: SFRBox, name: str, - method: Callable[[SFRBox], Coroutine[Any, Any, _DataT]], + method: Callable[[SFRBox], Coroutine[Any, Any, _DataT | None]], ) -> None: """Initialize coordinator.""" self.box = box self._method = method super().__init__(hass, _LOGGER, name=name, update_interval=_SCAN_INTERVAL) - async def _async_update_data(self) -> _DataT: + async def _async_update_data(self) -> _DataT | None: """Update data.""" try: return await self._method(self.box) diff --git a/homeassistant/components/sfr_box/diagnostics.py b/homeassistant/components/sfr_box/diagnostics.py index b5aca834af5..0553bfe4233 100644 --- a/homeassistant/components/sfr_box/diagnostics.py +++ b/homeassistant/components/sfr_box/diagnostics.py @@ -3,7 +3,7 @@ from __future__ import annotations import dataclasses -from typing import Any +from typing import TYPE_CHECKING, Any from homeassistant.components.diagnostics import async_redact_data from homeassistant.config_entries import ConfigEntry @@ -12,9 +12,18 @@ from homeassistant.core import HomeAssistant from .const import DOMAIN from .models import DomainData +if TYPE_CHECKING: + from _typeshed import DataclassInstance + TO_REDACT = {"mac_addr", "serial_number", "ip_addr", "ipv6_addr"} +def _async_redact_data(obj: DataclassInstance | None) -> dict[str, Any] | None: + if obj is None: + return None + return async_redact_data(dataclasses.asdict(obj), TO_REDACT) + + async def async_get_config_entry_diagnostics( hass: HomeAssistant, entry: ConfigEntry ) -> dict[str, Any]: @@ -27,21 +36,9 @@ async def async_get_config_entry_diagnostics( "data": dict(entry.data), }, "data": { - "dsl": async_redact_data( - dataclasses.asdict(await data.system.box.dsl_get_info()), - TO_REDACT, - ), - "ftth": async_redact_data( - dataclasses.asdict(await data.system.box.ftth_get_info()), - TO_REDACT, - ), - "system": async_redact_data( - dataclasses.asdict(await data.system.box.system_get_info()), - TO_REDACT, - ), - "wan": async_redact_data( - dataclasses.asdict(await data.system.box.wan_get_info()), - TO_REDACT, - ), + "dsl": _async_redact_data(await data.system.box.dsl_get_info()), + "ftth": _async_redact_data(await data.system.box.ftth_get_info()), + "system": _async_redact_data(await data.system.box.system_get_info()), + "wan": _async_redact_data(await data.system.box.wan_get_info()), }, } diff --git a/homeassistant/components/sfr_box/manifest.json b/homeassistant/components/sfr_box/manifest.json index bf4d91a50f1..cd42997cec5 100644 --- a/homeassistant/components/sfr_box/manifest.json +++ b/homeassistant/components/sfr_box/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/sfr_box", "integration_type": "device", "iot_class": "local_polling", - "requirements": ["sfrbox-api==0.0.8"] + "requirements": ["sfrbox-api==0.0.10"] } diff --git a/homeassistant/components/sfr_box/sensor.py b/homeassistant/components/sfr_box/sensor.py index d19ff82b393..ee3285a8f38 100644 --- a/homeassistant/components/sfr_box/sensor.py +++ b/homeassistant/components/sfr_box/sensor.py @@ -2,6 +2,7 @@ from collections.abc import Callable from dataclasses import dataclass +from typing import TYPE_CHECKING from sfrbox_api.models import DslInfo, SystemInfo, WanInfo @@ -129,7 +130,7 @@ DSL_SENSOR_TYPES: tuple[SFRBoxSensorEntityDescription[DslInfo], ...] = ( "unknown", ], translation_key="dsl_line_status", - value_fn=lambda x: x.line_status.lower().replace(" ", "_"), + value_fn=lambda x: _value_to_option(x.line_status), ), SFRBoxSensorEntityDescription[DslInfo]( key="training", @@ -149,7 +150,7 @@ DSL_SENSOR_TYPES: tuple[SFRBoxSensorEntityDescription[DslInfo], ...] = ( "unknown", ], translation_key="dsl_training", - value_fn=lambda x: x.training.lower().replace(" ", "_").replace(".", "_"), + value_fn=lambda x: _value_to_option(x.training), ), ) SYSTEM_SENSOR_TYPES: tuple[SFRBoxSensorEntityDescription[SystemInfo], ...] = ( @@ -181,7 +182,7 @@ SYSTEM_SENSOR_TYPES: tuple[SFRBoxSensorEntityDescription[SystemInfo], ...] = ( entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, native_unit_of_measurement=UnitOfTemperature.CELSIUS, - value_fn=lambda x: None if x.temperature is None else x.temperature / 1000, + value_fn=lambda x: _get_temperature(x.temperature), ), ) WAN_SENSOR_TYPES: tuple[SFRBoxSensorEntityDescription[WanInfo], ...] = ( @@ -203,23 +204,38 @@ WAN_SENSOR_TYPES: tuple[SFRBoxSensorEntityDescription[WanInfo], ...] = ( ) +def _value_to_option(value: str | None) -> str | None: + if value is None: + return value + return value.lower().replace(" ", "_").replace(".", "_") + + +def _get_temperature(value: float | None) -> float | None: + if value is None or value < 1000: + return value + return value / 1000 + + async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up the sensors.""" data: DomainData = hass.data[DOMAIN][entry.entry_id] + system_info = data.system.data + if TYPE_CHECKING: + assert system_info is not None entities: list[SFRBoxSensor] = [ - SFRBoxSensor(data.system, description, data.system.data) + SFRBoxSensor(data.system, description, system_info) for description in SYSTEM_SENSOR_TYPES ] entities.extend( - SFRBoxSensor(data.wan, description, data.system.data) + SFRBoxSensor(data.wan, description, system_info) for description in WAN_SENSOR_TYPES ) - if data.system.data.net_infra == "adsl": + if system_info.net_infra == "adsl": entities.extend( - SFRBoxSensor(data.dsl, description, data.system.data) + SFRBoxSensor(data.dsl, description, system_info) for description in DSL_SENSOR_TYPES ) @@ -251,4 +267,6 @@ class SFRBoxSensor[_T](CoordinatorEntity[SFRDataUpdateCoordinator[_T]], SensorEn @property def native_value(self) -> StateType: """Return the native value of the device.""" + if self.coordinator.data is None: + return None return self.entity_description.value_fn(self.coordinator.data) diff --git a/requirements_all.txt b/requirements_all.txt index 8a5c6a34a07..00ae78cc4bf 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2616,7 +2616,7 @@ sensoterra==2.0.1 sentry-sdk==1.40.3 # homeassistant.components.sfr_box -sfrbox-api==0.0.8 +sfrbox-api==0.0.10 # homeassistant.components.sharkiq sharkiq==1.0.2 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 614fb06b132..6faedc8ec8c 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2074,7 +2074,7 @@ sensoterra==2.0.1 sentry-sdk==1.40.3 # homeassistant.components.sfr_box -sfrbox-api==0.0.8 +sfrbox-api==0.0.10 # homeassistant.components.sharkiq sharkiq==1.0.2 diff --git a/tests/components/sfr_box/snapshots/test_diagnostics.ambr b/tests/components/sfr_box/snapshots/test_diagnostics.ambr index 22a914f8a79..69139c2c374 100644 --- a/tests/components/sfr_box/snapshots/test_diagnostics.ambr +++ b/tests/components/sfr_box/snapshots/test_diagnostics.ambr @@ -31,7 +31,7 @@ 'product_id': 'NB6VAC-FXC-r0', 'refclient': '', 'serial_number': '**REDACTED**', - 'temperature': 27560, + 'temperature': 27560.0, 'uptime': 2353575, 'version_bootloader': 'NB6VAC-BOOTLOADER-R4.0.8', 'version_dsldriver': 'NB6VAC-XDSL-A2pv6F039p', @@ -90,7 +90,7 @@ 'product_id': 'NB6VAC-FXC-r0', 'refclient': '', 'serial_number': '**REDACTED**', - 'temperature': 27560, + 'temperature': 27560.0, 'uptime': 2353575, 'version_bootloader': 'NB6VAC-BOOTLOADER-R4.0.8', 'version_dsldriver': 'NB6VAC-XDSL-A2pv6F039p', From f6c681eb5d7eb87c3fb7d8e630738166e064371c Mon Sep 17 00:00:00 2001 From: Kevin Worrel <37058192+dieselrabbit@users.noreply.github.com> Date: Fri, 6 Sep 2024 07:46:06 -0700 Subject: [PATCH 1530/1635] Remove support for area, device, or entity targets for screenlogic actions (#123432) * Remove non-configentry service target * Remove unneeded tests * Remove unneeded issue strings --- .../components/screenlogic/services.py | 53 ++------------ .../components/screenlogic/services.yaml | 2 +- .../components/screenlogic/strings.json | 13 ---- tests/components/screenlogic/test_services.py | 72 ------------------- 4 files changed, 7 insertions(+), 133 deletions(-) diff --git a/homeassistant/components/screenlogic/services.py b/homeassistant/components/screenlogic/services.py index 3177f27ab2a..44d8ad3ed81 100644 --- a/homeassistant/components/screenlogic/services.py +++ b/homeassistant/components/screenlogic/services.py @@ -10,12 +10,7 @@ import voluptuous as vol from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.exceptions import HomeAssistantError, ServiceValidationError -from homeassistant.helpers import ( - config_validation as cv, - issue_registry as ir, - selector, -) -from homeassistant.helpers.service import async_extract_config_entry_ids +from homeassistant.helpers import selector from .const import ( ATTR_COLOR_MODE, @@ -44,19 +39,10 @@ BASE_SERVICE_SCHEMA = vol.Schema( } ) -SET_COLOR_MODE_SCHEMA = vol.All( - vol.Schema( - { - vol.Optional(ATTR_CONFIG_ENTRY): selector.ConfigEntrySelector( - { - "integration": DOMAIN, - } - ), - **cv.ENTITY_SERVICE_FIELDS, - vol.Required(ATTR_COLOR_MODE): vol.In(SUPPORTED_COLOR_MODES), - } - ), - cv.has_at_least_one_key(ATTR_CONFIG_ENTRY, *cv.ENTITY_SERVICE_FIELDS), +SET_COLOR_MODE_SCHEMA = BASE_SERVICE_SCHEMA.extend( + { + vol.Required(ATTR_COLOR_MODE): vol.In(SUPPORTED_COLOR_MODES), + } ) TURN_ON_SUPER_CHLOR_SCHEMA = BASE_SERVICE_SCHEMA.extend( @@ -72,37 +58,10 @@ TURN_ON_SUPER_CHLOR_SCHEMA = BASE_SERVICE_SCHEMA.extend( def async_load_screenlogic_services(hass: HomeAssistant): """Set up services for the ScreenLogic integration.""" - async def extract_screenlogic_config_entry_ids(service_call: ServiceCall): - if not ( - screenlogic_entry_ids := await async_extract_config_entry_ids( - hass, service_call - ) - ): - raise ServiceValidationError( - f"Failed to call service '{service_call.service}'. Config entry for " - "target not found" - ) - return screenlogic_entry_ids - async def get_coordinators( service_call: ServiceCall, ) -> list[ScreenlogicDataUpdateCoordinator]: - entry_ids: set[str] - if entry_id := service_call.data.get(ATTR_CONFIG_ENTRY): - entry_ids = {entry_id} - else: - ir.async_create_issue( - hass, - DOMAIN, - "service_target_deprecation", - breaks_in_ha_version="2024.8.0", - is_fixable=True, - is_persistent=True, - severity=ir.IssueSeverity.WARNING, - translation_key="service_target_deprecation", - ) - entry_ids = await extract_screenlogic_config_entry_ids(service_call) - + entry_ids = {service_call.data[ATTR_CONFIG_ENTRY]} coordinators: list[ScreenlogicDataUpdateCoordinator] = [] for entry_id in entry_ids: config_entry = cast( diff --git a/homeassistant/components/screenlogic/services.yaml b/homeassistant/components/screenlogic/services.yaml index f05537640ca..1dc2e0339f2 100644 --- a/homeassistant/components/screenlogic/services.yaml +++ b/homeassistant/components/screenlogic/services.yaml @@ -2,7 +2,7 @@ set_color_mode: fields: config_entry: - required: false + required: true selector: config_entry: integration: screenlogic diff --git a/homeassistant/components/screenlogic/strings.json b/homeassistant/components/screenlogic/strings.json index 2370d78a6ce..91395a0e86d 100644 --- a/homeassistant/components/screenlogic/strings.json +++ b/homeassistant/components/screenlogic/strings.json @@ -75,18 +75,5 @@ } } } - }, - "issues": { - "service_target_deprecation": { - "title": "Deprecating use of target for ScreenLogic actions", - "fix_flow": { - "step": { - "confirm": { - "title": "Deprecating target for ScreenLogic actions", - "description": "Use of an Area, Device, or Entity as a target for ScreenLogic actions is being deprecated. Instead, use `config_entry` with the entry_id of the desired ScreenLogic integration.\n\nPlease update your automations and scripts and select **submit** to fix this issue." - } - } - } - } } } diff --git a/tests/components/screenlogic/test_services.py b/tests/components/screenlogic/test_services.py index 0fc79fad0e5..8a414ba2596 100644 --- a/tests/components/screenlogic/test_services.py +++ b/tests/components/screenlogic/test_services.py @@ -18,11 +18,9 @@ from homeassistant.components.screenlogic.const import ( SERVICE_STOP_SUPER_CHLORINATION, ) from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import ATTR_AREA_ID, ATTR_DEVICE_ID, ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import device_registry as dr -from homeassistant.util import slugify from . import ( DATA_FULL_CHEM, @@ -102,22 +100,6 @@ async def setup_screenlogic_services_fixture( }, None, ), - ( - { - ATTR_COLOR_MODE: COLOR_MODE.ALL_ON.name.lower(), - }, - { - ATTR_AREA_ID: MOCK_DEVICE_AREA, - }, - ), - ( - { - ATTR_COLOR_MODE: COLOR_MODE.ALL_ON.name.lower(), - }, - { - ATTR_ENTITY_ID: f"{Platform.SENSOR}.{slugify(f'{MOCK_ADAPTER_NAME} Air Temperature')}", - }, - ), ], ) async def test_service_set_color_mode( @@ -148,30 +130,6 @@ async def test_service_set_color_mode( mocked_async_set_color_lights.assert_awaited_once() -async def test_service_set_color_mode_with_device( - hass: HomeAssistant, - service_fixture: dict[str, Any], -) -> None: - """Test set_color_mode service with a device target.""" - mocked_async_set_color_lights: AsyncMock = service_fixture["gateway"][ - "async_set_color_lights" - ] - - assert hass.services.has_service(DOMAIN, SERVICE_SET_COLOR_MODE) - - sl_device: dr.DeviceEntry = service_fixture["device"] - - await hass.services.async_call( - DOMAIN, - SERVICE_SET_COLOR_MODE, - service_data={ATTR_COLOR_MODE: COLOR_MODE.ALL_ON.name.lower()}, - blocking=True, - target={ATTR_DEVICE_ID: sl_device.id}, - ) - - mocked_async_set_color_lights.assert_awaited_once() - - @pytest.mark.parametrize( ("data", "target", "error_msg"), [ @@ -193,36 +151,6 @@ async def test_service_set_color_mode_with_device( f"Failed to call service '{SERVICE_SET_COLOR_MODE}'. Config entry " "'test' is not a screenlogic config", ), - ( - { - ATTR_COLOR_MODE: COLOR_MODE.ALL_ON.name.lower(), - }, - { - ATTR_AREA_ID: "invalidareaid", - }, - f"Failed to call service '{SERVICE_SET_COLOR_MODE}'. Config entry for " - "target not found", - ), - ( - { - ATTR_COLOR_MODE: COLOR_MODE.ALL_ON.name.lower(), - }, - { - ATTR_DEVICE_ID: "invaliddeviceid", - }, - f"Failed to call service '{SERVICE_SET_COLOR_MODE}'. Config entry for " - "target not found", - ), - ( - { - ATTR_COLOR_MODE: COLOR_MODE.ALL_ON.name.lower(), - }, - { - ATTR_ENTITY_ID: "sensor.invalidentityid", - }, - f"Failed to call service '{SERVICE_SET_COLOR_MODE}'. Config entry for " - "target not found", - ), ], ) async def test_service_set_color_mode_error( From b6d45a5a07d6e5cb7fb294caaf43f297a905d6c3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Jaworski?= Date: Fri, 6 Sep 2024 16:46:54 +0200 Subject: [PATCH 1531/1635] Bump blebox_uniapi to v2.5.0 (#124298) blebox: bump blebox_uniapi to v2.5.0 --- homeassistant/components/blebox/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/blebox/manifest.json b/homeassistant/components/blebox/manifest.json index a2c6495cc56..83ec27f6eef 100644 --- a/homeassistant/components/blebox/manifest.json +++ b/homeassistant/components/blebox/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/blebox", "iot_class": "local_polling", "loggers": ["blebox_uniapi"], - "requirements": ["blebox-uniapi==2.4.2"], + "requirements": ["blebox-uniapi==2.5.0"], "zeroconf": ["_bbxsrv._tcp.local."] } diff --git a/requirements_all.txt b/requirements_all.txt index 00ae78cc4bf..be2ddf6a97c 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -575,7 +575,7 @@ bleak-retry-connector==3.5.0 bleak==0.22.2 # homeassistant.components.blebox -blebox-uniapi==2.4.2 +blebox-uniapi==2.5.0 # homeassistant.components.blink blinkpy==0.23.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 6faedc8ec8c..b4472ca9144 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -506,7 +506,7 @@ bleak-retry-connector==3.5.0 bleak==0.22.2 # homeassistant.components.blebox -blebox-uniapi==2.4.2 +blebox-uniapi==2.5.0 # homeassistant.components.blink blinkpy==0.23.0 From f126a6024ed7d4ac57475afc7df2370ce7ea3eb7 Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Fri, 6 Sep 2024 10:48:42 -0400 Subject: [PATCH 1532/1635] Migrate ESPHome to assist satellite (#125383) * Migrate ESPHome to assist satellite * Address comments --- .../components/esphome/assist_satellite.py | 504 +++++++++ homeassistant/components/esphome/manager.py | 114 +-- .../components/esphome/voice_assistant.py | 479 --------- tests/components/esphome/conftest.py | 60 -- .../esphome/test_assist_satellite.py | 822 +++++++++++++++ tests/components/esphome/test_manager.py | 108 +- .../esphome/test_voice_assistant.py | 964 ------------------ 7 files changed, 1337 insertions(+), 1714 deletions(-) create mode 100644 homeassistant/components/esphome/assist_satellite.py delete mode 100644 homeassistant/components/esphome/voice_assistant.py create mode 100644 tests/components/esphome/test_assist_satellite.py delete mode 100644 tests/components/esphome/test_voice_assistant.py diff --git a/homeassistant/components/esphome/assist_satellite.py b/homeassistant/components/esphome/assist_satellite.py new file mode 100644 index 00000000000..48bb9ec5507 --- /dev/null +++ b/homeassistant/components/esphome/assist_satellite.py @@ -0,0 +1,504 @@ +"""Support for assist satellites in ESPHome.""" + +from __future__ import annotations + +import asyncio +from collections.abc import AsyncIterable +from functools import partial +import io +import logging +import socket +from typing import Any, cast +import wave + +from aioesphomeapi import ( + VoiceAssistantAudioSettings, + VoiceAssistantCommandFlag, + VoiceAssistantEventType, + VoiceAssistantFeature, + VoiceAssistantTimerEventType, +) + +from homeassistant.components import assist_satellite, tts +from homeassistant.components.assist_pipeline import ( + PipelineEvent, + PipelineEventType, + PipelineStage, +) +from homeassistant.components.intent import async_register_timer_handler +from homeassistant.components.intent.timers import TimerEventType, TimerInfo +from homeassistant.components.media_player import async_process_play_media_url +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import EntityCategory, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import DOMAIN +from .entity import EsphomeAssistEntity +from .entry_data import ESPHomeConfigEntry, RuntimeEntryData +from .enum_mapper import EsphomeEnumMapper + +_LOGGER = logging.getLogger(__name__) + +_VOICE_ASSISTANT_EVENT_TYPES: EsphomeEnumMapper[ + VoiceAssistantEventType, PipelineEventType +] = EsphomeEnumMapper( + { + VoiceAssistantEventType.VOICE_ASSISTANT_ERROR: PipelineEventType.ERROR, + VoiceAssistantEventType.VOICE_ASSISTANT_RUN_START: PipelineEventType.RUN_START, + VoiceAssistantEventType.VOICE_ASSISTANT_RUN_END: PipelineEventType.RUN_END, + VoiceAssistantEventType.VOICE_ASSISTANT_STT_START: PipelineEventType.STT_START, + VoiceAssistantEventType.VOICE_ASSISTANT_STT_END: PipelineEventType.STT_END, + VoiceAssistantEventType.VOICE_ASSISTANT_INTENT_START: PipelineEventType.INTENT_START, + VoiceAssistantEventType.VOICE_ASSISTANT_INTENT_END: PipelineEventType.INTENT_END, + VoiceAssistantEventType.VOICE_ASSISTANT_TTS_START: PipelineEventType.TTS_START, + VoiceAssistantEventType.VOICE_ASSISTANT_TTS_END: PipelineEventType.TTS_END, + VoiceAssistantEventType.VOICE_ASSISTANT_WAKE_WORD_START: PipelineEventType.WAKE_WORD_START, + VoiceAssistantEventType.VOICE_ASSISTANT_WAKE_WORD_END: PipelineEventType.WAKE_WORD_END, + VoiceAssistantEventType.VOICE_ASSISTANT_STT_VAD_START: PipelineEventType.STT_VAD_START, + VoiceAssistantEventType.VOICE_ASSISTANT_STT_VAD_END: PipelineEventType.STT_VAD_END, + } +) + +_TIMER_EVENT_TYPES: EsphomeEnumMapper[VoiceAssistantTimerEventType, TimerEventType] = ( + EsphomeEnumMapper( + { + VoiceAssistantTimerEventType.VOICE_ASSISTANT_TIMER_STARTED: TimerEventType.STARTED, + VoiceAssistantTimerEventType.VOICE_ASSISTANT_TIMER_UPDATED: TimerEventType.UPDATED, + VoiceAssistantTimerEventType.VOICE_ASSISTANT_TIMER_CANCELLED: TimerEventType.CANCELLED, + VoiceAssistantTimerEventType.VOICE_ASSISTANT_TIMER_FINISHED: TimerEventType.FINISHED, + } + ) +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ESPHomeConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Assist satellite entity.""" + entry_data = entry.runtime_data + assert entry_data.device_info is not None + if entry_data.device_info.voice_assistant_feature_flags_compat( + entry_data.api_version + ): + async_add_entities( + [ + EsphomeAssistSatellite(entry, entry_data), + ] + ) + + +class EsphomeAssistSatellite( + EsphomeAssistEntity, assist_satellite.AssistSatelliteEntity +): + """Satellite running ESPHome.""" + + entity_description = assist_satellite.AssistSatelliteEntityDescription( + key="assist_satellite", + translation_key="assist_satellite", + entity_category=EntityCategory.CONFIG, + ) + + def __init__( + self, + config_entry: ConfigEntry, + entry_data: RuntimeEntryData, + ) -> None: + """Initialize satellite.""" + super().__init__(entry_data) + + self.config_entry = config_entry + self.entry_data = entry_data + self.cli = self.entry_data.client + + self._is_running: bool = True + self._pipeline_task: asyncio.Task | None = None + self._audio_queue: asyncio.Queue[bytes | None] = asyncio.Queue() + self._tts_streaming_task: asyncio.Task | None = None + self._udp_server: VoiceAssistantUDPServer | None = None + + @property + def pipeline_entity_id(self) -> str | None: + """Return the entity ID of the pipeline to use for the next conversation.""" + assert self.entry_data.device_info is not None + ent_reg = er.async_get(self.hass) + return ent_reg.async_get_entity_id( + Platform.SELECT, + DOMAIN, + f"{self.entry_data.device_info.mac_address}-pipeline", + ) + + @property + def vad_sensitivity_entity_id(self) -> str | None: + """Return the entity ID of the VAD sensitivity to use for the next conversation.""" + assert self.entry_data.device_info is not None + ent_reg = er.async_get(self.hass) + return ent_reg.async_get_entity_id( + Platform.SELECT, + DOMAIN, + f"{self.entry_data.device_info.mac_address}-vad_sensitivity", + ) + + async def async_added_to_hass(self) -> None: + """Run when entity about to be added to hass.""" + await super().async_added_to_hass() + + assert self.entry_data.device_info is not None + feature_flags = ( + self.entry_data.device_info.voice_assistant_feature_flags_compat( + self.entry_data.api_version + ) + ) + if feature_flags & VoiceAssistantFeature.API_AUDIO: + # TCP audio + self.entry_data.disconnect_callbacks.add( + self.cli.subscribe_voice_assistant( + handle_start=self.handle_pipeline_start, + handle_stop=self.handle_pipeline_stop, + handle_audio=self.handle_audio, + ) + ) + else: + # UDP audio + self.entry_data.disconnect_callbacks.add( + self.cli.subscribe_voice_assistant( + handle_start=self.handle_pipeline_start, + handle_stop=self.handle_pipeline_stop, + ) + ) + + if feature_flags & VoiceAssistantFeature.TIMERS: + # Device supports timers + assert (self.registry_entry is not None) and ( + self.registry_entry.device_id is not None + ) + self.entry_data.disconnect_callbacks.add( + async_register_timer_handler( + self.hass, self.registry_entry.device_id, self.handle_timer_event + ) + ) + + async def async_will_remove_from_hass(self) -> None: + """Run when entity will be removed from hass.""" + await super().async_will_remove_from_hass() + + self._is_running = False + self._stop_pipeline() + + def on_pipeline_event(self, event: PipelineEvent) -> None: + """Handle pipeline events.""" + try: + event_type = _VOICE_ASSISTANT_EVENT_TYPES.from_hass(event.type) + except KeyError: + _LOGGER.debug("Received unknown pipeline event type: %s", event.type) + return + + data_to_send: dict[str, Any] = {} + if event_type == VoiceAssistantEventType.VOICE_ASSISTANT_STT_START: + self.entry_data.async_set_assist_pipeline_state(True) + elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_STT_END: + assert event.data is not None + data_to_send = {"text": event.data["stt_output"]["text"]} + elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_INTENT_END: + assert event.data is not None + data_to_send = { + "conversation_id": event.data["intent_output"]["conversation_id"] or "", + } + elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_TTS_START: + assert event.data is not None + data_to_send = {"text": event.data["tts_input"]} + elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_TTS_END: + assert event.data is not None + if tts_output := event.data["tts_output"]: + path = tts_output["url"] + url = async_process_play_media_url(self.hass, path) + data_to_send = {"url": url} + + assert self.entry_data.device_info is not None + feature_flags = ( + self.entry_data.device_info.voice_assistant_feature_flags_compat( + self.entry_data.api_version + ) + ) + if feature_flags & VoiceAssistantFeature.SPEAKER: + media_id = tts_output["media_id"] + self._tts_streaming_task = ( + self.config_entry.async_create_background_task( + self.hass, + self._stream_tts_audio(media_id), + "esphome_voice_assistant_tts", + ) + ) + elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_WAKE_WORD_END: + assert event.data is not None + if not event.data["wake_word_output"]: + event_type = VoiceAssistantEventType.VOICE_ASSISTANT_ERROR + data_to_send = { + "code": "no_wake_word", + "message": "No wake word detected", + } + elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_ERROR: + assert event.data is not None + data_to_send = { + "code": event.data["code"], + "message": event.data["message"], + } + + self.cli.send_voice_assistant_event(event_type, data_to_send) + + async def handle_pipeline_start( + self, + conversation_id: str, + flags: int, + audio_settings: VoiceAssistantAudioSettings, + wake_word_phrase: str | None, + ) -> int | None: + """Handle pipeline run request.""" + # Clear audio queue + while not self._audio_queue.empty(): + await self._audio_queue.get() + + if self._tts_streaming_task is not None: + # Cancel current TTS response + self._tts_streaming_task.cancel() + self._tts_streaming_task = None + + # API or UDP output audio + port: int = 0 + assert self.entry_data.device_info is not None + feature_flags = ( + self.entry_data.device_info.voice_assistant_feature_flags_compat( + self.entry_data.api_version + ) + ) + if (feature_flags & VoiceAssistantFeature.SPEAKER) and not ( + feature_flags & VoiceAssistantFeature.API_AUDIO + ): + port = await self._start_udp_server() + _LOGGER.debug("Started UDP server on port %s", port) + + # Device triggered pipeline (wake word, etc.) + if flags & VoiceAssistantCommandFlag.USE_WAKE_WORD: + start_stage = PipelineStage.WAKE_WORD + else: + start_stage = PipelineStage.STT + + end_stage = PipelineStage.TTS + + # Run the pipeline + _LOGGER.debug("Running pipeline from %s to %s", start_stage, end_stage) + self.entry_data.async_set_assist_pipeline_state(True) + self._pipeline_task = self.config_entry.async_create_background_task( + self.hass, + self.async_accept_pipeline_from_satellite( + audio_stream=self._wrap_audio_stream(), + start_stage=start_stage, + end_stage=end_stage, + wake_word_phrase=wake_word_phrase, + ), + "esphome_assist_satellite_pipeline", + ) + self._pipeline_task.add_done_callback( + lambda _future: self.handle_pipeline_finished() + ) + + return port + + async def handle_audio(self, data: bytes) -> None: + """Handle incoming audio chunk from API.""" + self._audio_queue.put_nowait(data) + + async def handle_pipeline_stop(self) -> None: + """Handle request for pipeline to stop.""" + self._stop_pipeline() + + def handle_pipeline_finished(self) -> None: + """Handle when pipeline has finished running.""" + self.entry_data.async_set_assist_pipeline_state(False) + self._stop_udp_server() + _LOGGER.debug("Pipeline finished") + + def handle_timer_event( + self, event_type: TimerEventType, timer_info: TimerInfo + ) -> None: + """Handle timer events.""" + try: + native_event_type = _TIMER_EVENT_TYPES.from_hass(event_type) + except KeyError: + _LOGGER.debug("Received unknown timer event type: %s", event_type) + return + + self.cli.send_voice_assistant_timer_event( + native_event_type, + timer_info.id, + timer_info.name, + timer_info.created_seconds, + timer_info.seconds_left, + timer_info.is_active, + ) + + async def _stream_tts_audio( + self, + media_id: str, + sample_rate: int = 16000, + sample_width: int = 2, + sample_channels: int = 1, + samples_per_chunk: int = 512, + ) -> None: + """Stream TTS audio chunks to device via API or UDP.""" + self.cli.send_voice_assistant_event( + VoiceAssistantEventType.VOICE_ASSISTANT_TTS_STREAM_START, {} + ) + + try: + if not self._is_running: + return + + extension, data = await tts.async_get_media_source_audio( + self.hass, + media_id, + ) + + if extension != "wav": + _LOGGER.error("Only WAV audio can be streamed, got %s", extension) + return + + with io.BytesIO(data) as wav_io, wave.open(wav_io, "rb") as wav_file: + if ( + (wav_file.getframerate() != sample_rate) + or (wav_file.getsampwidth() != sample_width) + or (wav_file.getnchannels() != sample_channels) + ): + _LOGGER.error("Can only stream 16Khz 16-bit mono WAV") + return + + _LOGGER.debug("Streaming %s audio samples", wav_file.getnframes()) + + while self._is_running: + chunk = wav_file.readframes(samples_per_chunk) + if not chunk: + break + + if self._udp_server is not None: + self._udp_server.send_audio_bytes(chunk) + else: + self.cli.send_voice_assistant_audio(chunk) + + # Wait for 90% of the duration of the audio that was + # sent for it to be played. This will overrun the + # device's buffer for very long audio, so using a media + # player is preferred. + samples_in_chunk = len(chunk) // (sample_width * sample_channels) + seconds_in_chunk = samples_in_chunk / sample_rate + await asyncio.sleep(seconds_in_chunk * 0.9) + except asyncio.CancelledError: + return # Don't trigger state change + finally: + self.cli.send_voice_assistant_event( + VoiceAssistantEventType.VOICE_ASSISTANT_TTS_STREAM_END, {} + ) + + # State change + self.tts_response_finished() + + async def _wrap_audio_stream(self) -> AsyncIterable[bytes]: + """Yield audio chunks from the queue until None.""" + while True: + chunk = await self._audio_queue.get() + if not chunk: + break + + yield chunk + + def _stop_pipeline(self) -> None: + """Request pipeline to be stopped.""" + self._audio_queue.put_nowait(None) + _LOGGER.debug("Requested pipeline stop") + + async def _start_udp_server(self) -> int: + """Start a UDP server on a random free port.""" + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + sock.setblocking(False) + sock.bind(("", 0)) # random free port + + ( + _transport, + protocol, + ) = await asyncio.get_running_loop().create_datagram_endpoint( + partial(VoiceAssistantUDPServer, self._audio_queue), sock=sock + ) + + assert isinstance(protocol, VoiceAssistantUDPServer) + self._udp_server = protocol + + # Return port + return cast(int, sock.getsockname()[1]) + + def _stop_udp_server(self) -> None: + """Stop the UDP server if it's running.""" + if self._udp_server is None: + return + + try: + self._udp_server.close() + finally: + self._udp_server = None + + _LOGGER.debug("Stopped UDP server") + + +class VoiceAssistantUDPServer(asyncio.DatagramProtocol): + """Receive UDP packets and forward them to the audio queue.""" + + transport: asyncio.DatagramTransport | None = None + remote_addr: tuple[str, int] | None = None + + def __init__( + self, audio_queue: asyncio.Queue[bytes | None], *args: Any, **kwargs: Any + ) -> None: + """Initialize protocol.""" + super().__init__(*args, **kwargs) + self._audio_queue = audio_queue + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + """Store transport for later use.""" + self.transport = cast(asyncio.DatagramTransport, transport) + + def datagram_received(self, data: bytes, addr: tuple[str, int]) -> None: + """Handle incoming UDP packet.""" + if self.remote_addr is None: + self.remote_addr = addr + + self._audio_queue.put_nowait(data) + + def error_received(self, exc: Exception) -> None: + """Handle when a send or receive operation raises an OSError. + + (Other than BlockingIOError or InterruptedError.) + """ + _LOGGER.error("ESPHome Voice Assistant UDP server error received: %s", exc) + + # Stop pipeline + self._audio_queue.put_nowait(None) + + def close(self) -> None: + """Close the receiver.""" + if self.transport is not None: + self.transport.close() + + self.remote_addr = None + + def send_audio_bytes(self, data: bytes) -> None: + """Send bytes to the device via UDP.""" + if self.transport is None: + _LOGGER.error("No transport to send audio to") + return + + if self.remote_addr is None: + _LOGGER.error("No address to send audio to") + return + + self.transport.sendto(data, self.remote_addr) diff --git a/homeassistant/components/esphome/manager.py b/homeassistant/components/esphome/manager.py index 93e8d7b5bc2..09c3cc3b7cb 100644 --- a/homeassistant/components/esphome/manager.py +++ b/homeassistant/components/esphome/manager.py @@ -20,19 +20,17 @@ from aioesphomeapi import ( RequiresEncryptionAPIError, UserService, UserServiceArgType, - VoiceAssistantAudioSettings, - VoiceAssistantFeature, ) from awesomeversion import AwesomeVersion import voluptuous as vol from homeassistant.components import tag, zeroconf -from homeassistant.components.intent import async_register_timer_handler from homeassistant.const import ( ATTR_DEVICE_ID, CONF_MODE, EVENT_HOMEASSISTANT_CLOSE, EVENT_LOGGING_CHANGED, + Platform, ) from homeassistant.core import ( Event, @@ -73,12 +71,6 @@ from .domain_data import DomainData # Import config flow so that it's added to the registry from .entry_data import ESPHomeConfigEntry, RuntimeEntryData -from .voice_assistant import ( - VoiceAssistantAPIPipeline, - VoiceAssistantPipeline, - VoiceAssistantUDPPipeline, - handle_timer_event, -) _LOGGER = logging.getLogger(__name__) @@ -149,7 +141,6 @@ class ESPHomeManager: "cli", "device_id", "domain_data", - "voice_assistant_pipeline", "reconnect_logic", "zeroconf_instance", "entry_data", @@ -173,7 +164,6 @@ class ESPHomeManager: self.cli = cli self.device_id: str | None = None self.domain_data = domain_data - self.voice_assistant_pipeline: VoiceAssistantPipeline | None = None self.reconnect_logic: ReconnectLogic | None = None self.zeroconf_instance = zeroconf_instance self.entry_data = entry.runtime_data @@ -338,77 +328,6 @@ class ESPHomeManager: entity_id, attribute, self.hass.states.get(entity_id) ) - def _handle_pipeline_finished(self) -> None: - self.entry_data.async_set_assist_pipeline_state(False) - - if self.voice_assistant_pipeline is not None: - if isinstance(self.voice_assistant_pipeline, VoiceAssistantUDPPipeline): - self.voice_assistant_pipeline.close() - self.voice_assistant_pipeline = None - - async def _handle_pipeline_start( - self, - conversation_id: str, - flags: int, - audio_settings: VoiceAssistantAudioSettings, - wake_word_phrase: str | None, - ) -> int | None: - """Start a voice assistant pipeline.""" - if self.voice_assistant_pipeline is not None: - _LOGGER.warning("Previous Voice assistant pipeline was not stopped") - self.voice_assistant_pipeline.stop() - self.voice_assistant_pipeline = None - - hass = self.hass - assert self.entry_data.device_info is not None - if ( - self.entry_data.device_info.voice_assistant_feature_flags_compat( - self.entry_data.api_version - ) - & VoiceAssistantFeature.API_AUDIO - ): - self.voice_assistant_pipeline = VoiceAssistantAPIPipeline( - hass, - self.entry_data, - self.cli.send_voice_assistant_event, - self._handle_pipeline_finished, - self.cli, - ) - port = 0 - else: - self.voice_assistant_pipeline = VoiceAssistantUDPPipeline( - hass, - self.entry_data, - self.cli.send_voice_assistant_event, - self._handle_pipeline_finished, - ) - port = await self.voice_assistant_pipeline.start_server() - - assert self.device_id is not None, "Device ID must be set" - hass.async_create_background_task( - self.voice_assistant_pipeline.run_pipeline( - device_id=self.device_id, - conversation_id=conversation_id or None, - flags=flags, - audio_settings=audio_settings, - wake_word_phrase=wake_word_phrase, - ), - "esphome.voice_assistant_pipeline.run_pipeline", - ) - - return port - - async def _handle_pipeline_stop(self) -> None: - """Stop a voice assistant pipeline.""" - if self.voice_assistant_pipeline is not None: - self.voice_assistant_pipeline.stop() - - async def _handle_audio(self, data: bytes) -> None: - if self.voice_assistant_pipeline is None: - return - assert isinstance(self.voice_assistant_pipeline, VoiceAssistantAPIPipeline) - self.voice_assistant_pipeline.receive_audio_bytes(data) - async def on_connect(self) -> None: """Subscribe to states and list entities on successful API login.""" try: @@ -509,29 +428,14 @@ class ESPHomeManager: ) ) - flags = device_info.voice_assistant_feature_flags_compat(api_version) - if flags: - if flags & VoiceAssistantFeature.API_AUDIO: - entry_data.disconnect_callbacks.add( - cli.subscribe_voice_assistant( - handle_start=self._handle_pipeline_start, - handle_stop=self._handle_pipeline_stop, - handle_audio=self._handle_audio, - ) - ) - else: - entry_data.disconnect_callbacks.add( - cli.subscribe_voice_assistant( - handle_start=self._handle_pipeline_start, - handle_stop=self._handle_pipeline_stop, - ) - ) - if flags & VoiceAssistantFeature.TIMERS: - entry_data.disconnect_callbacks.add( - async_register_timer_handler( - hass, self.device_id, partial(handle_timer_event, cli) - ) - ) + if device_info.voice_assistant_feature_flags_compat(api_version) and ( + Platform.ASSIST_SATELLITE not in entry_data.loaded_platforms + ): + # Create assist satellite entity + await self.hass.config_entries.async_forward_entry_setups( + self.entry, [Platform.ASSIST_SATELLITE] + ) + entry_data.loaded_platforms.add(Platform.ASSIST_SATELLITE) cli.subscribe_states(entry_data.async_update_state) cli.subscribe_service_calls(self.async_on_service_call) diff --git a/homeassistant/components/esphome/voice_assistant.py b/homeassistant/components/esphome/voice_assistant.py deleted file mode 100644 index eb55be2ced6..00000000000 --- a/homeassistant/components/esphome/voice_assistant.py +++ /dev/null @@ -1,479 +0,0 @@ -"""ESPHome voice assistant support.""" - -from __future__ import annotations - -import asyncio -from collections.abc import AsyncIterable, Callable -import io -import logging -import socket -from typing import cast -import wave - -from aioesphomeapi import ( - APIClient, - VoiceAssistantAudioSettings, - VoiceAssistantCommandFlag, - VoiceAssistantEventType, - VoiceAssistantFeature, - VoiceAssistantTimerEventType, -) - -from homeassistant.components import stt, tts -from homeassistant.components.assist_pipeline import ( - AudioSettings, - PipelineEvent, - PipelineEventType, - PipelineNotFound, - PipelineStage, - WakeWordSettings, - async_pipeline_from_audio_stream, - select as pipeline_select, -) -from homeassistant.components.assist_pipeline.error import ( - WakeWordDetectionAborted, - WakeWordDetectionError, -) -from homeassistant.components.assist_pipeline.vad import VadSensitivity -from homeassistant.components.intent.timers import TimerEventType, TimerInfo -from homeassistant.components.media_player import async_process_play_media_url -from homeassistant.core import Context, HomeAssistant, callback - -from .const import DOMAIN -from .entry_data import RuntimeEntryData -from .enum_mapper import EsphomeEnumMapper - -_LOGGER = logging.getLogger(__name__) - -UDP_PORT = 0 # Set to 0 to let the OS pick a free random port -UDP_MAX_PACKET_SIZE = 1024 - -_VOICE_ASSISTANT_EVENT_TYPES: EsphomeEnumMapper[ - VoiceAssistantEventType, PipelineEventType -] = EsphomeEnumMapper( - { - VoiceAssistantEventType.VOICE_ASSISTANT_ERROR: PipelineEventType.ERROR, - VoiceAssistantEventType.VOICE_ASSISTANT_RUN_START: PipelineEventType.RUN_START, - VoiceAssistantEventType.VOICE_ASSISTANT_RUN_END: PipelineEventType.RUN_END, - VoiceAssistantEventType.VOICE_ASSISTANT_STT_START: PipelineEventType.STT_START, - VoiceAssistantEventType.VOICE_ASSISTANT_STT_END: PipelineEventType.STT_END, - VoiceAssistantEventType.VOICE_ASSISTANT_INTENT_START: PipelineEventType.INTENT_START, - VoiceAssistantEventType.VOICE_ASSISTANT_INTENT_END: PipelineEventType.INTENT_END, - VoiceAssistantEventType.VOICE_ASSISTANT_TTS_START: PipelineEventType.TTS_START, - VoiceAssistantEventType.VOICE_ASSISTANT_TTS_END: PipelineEventType.TTS_END, - VoiceAssistantEventType.VOICE_ASSISTANT_WAKE_WORD_START: PipelineEventType.WAKE_WORD_START, - VoiceAssistantEventType.VOICE_ASSISTANT_WAKE_WORD_END: PipelineEventType.WAKE_WORD_END, - VoiceAssistantEventType.VOICE_ASSISTANT_STT_VAD_START: PipelineEventType.STT_VAD_START, - VoiceAssistantEventType.VOICE_ASSISTANT_STT_VAD_END: PipelineEventType.STT_VAD_END, - } -) - -_TIMER_EVENT_TYPES: EsphomeEnumMapper[VoiceAssistantTimerEventType, TimerEventType] = ( - EsphomeEnumMapper( - { - VoiceAssistantTimerEventType.VOICE_ASSISTANT_TIMER_STARTED: TimerEventType.STARTED, - VoiceAssistantTimerEventType.VOICE_ASSISTANT_TIMER_UPDATED: TimerEventType.UPDATED, - VoiceAssistantTimerEventType.VOICE_ASSISTANT_TIMER_CANCELLED: TimerEventType.CANCELLED, - VoiceAssistantTimerEventType.VOICE_ASSISTANT_TIMER_FINISHED: TimerEventType.FINISHED, - } - ) -) - - -class VoiceAssistantPipeline: - """Base abstract pipeline class.""" - - started = False - stop_requested = False - - def __init__( - self, - hass: HomeAssistant, - entry_data: RuntimeEntryData, - handle_event: Callable[[VoiceAssistantEventType, dict[str, str] | None], None], - handle_finished: Callable[[], None], - ) -> None: - """Initialize the pipeline.""" - self.context = Context() - self.hass = hass - self.entry_data = entry_data - assert entry_data.device_info is not None - self.device_info = entry_data.device_info - - self.queue: asyncio.Queue[bytes] = asyncio.Queue() - self.handle_event = handle_event - self.handle_finished = handle_finished - self._tts_done = asyncio.Event() - self._tts_task: asyncio.Task | None = None - - @property - def is_running(self) -> bool: - """True if the pipeline is started and hasn't been asked to stop.""" - return self.started and (not self.stop_requested) - - async def _iterate_packets(self) -> AsyncIterable[bytes]: - """Iterate over incoming packets.""" - while data := await self.queue.get(): - if not self.is_running: - break - - yield data - - def _event_callback(self, event: PipelineEvent) -> None: - """Handle pipeline events.""" - - try: - event_type = _VOICE_ASSISTANT_EVENT_TYPES.from_hass(event.type) - except KeyError: - _LOGGER.debug("Received unknown pipeline event type: %s", event.type) - return - - data_to_send = None - error = False - if event_type == VoiceAssistantEventType.VOICE_ASSISTANT_STT_START: - self.entry_data.async_set_assist_pipeline_state(True) - elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_STT_END: - assert event.data is not None - data_to_send = {"text": event.data["stt_output"]["text"]} - elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_INTENT_END: - assert event.data is not None - data_to_send = { - "conversation_id": event.data["intent_output"]["conversation_id"] or "", - } - elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_TTS_START: - assert event.data is not None - data_to_send = {"text": event.data["tts_input"]} - elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_TTS_END: - assert event.data is not None - tts_output = event.data["tts_output"] - if tts_output: - path = tts_output["url"] - url = async_process_play_media_url(self.hass, path) - data_to_send = {"url": url} - - if ( - self.device_info.voice_assistant_feature_flags_compat( - self.entry_data.api_version - ) - & VoiceAssistantFeature.SPEAKER - ): - media_id = tts_output["media_id"] - self._tts_task = self.hass.async_create_background_task( - self._send_tts(media_id), "esphome_voice_assistant_tts" - ) - else: - self._tts_done.set() - else: - # Empty TTS response - data_to_send = {} - self._tts_done.set() - elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_WAKE_WORD_END: - assert event.data is not None - if not event.data["wake_word_output"]: - event_type = VoiceAssistantEventType.VOICE_ASSISTANT_ERROR - data_to_send = { - "code": "no_wake_word", - "message": "No wake word detected", - } - error = True - elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_ERROR: - assert event.data is not None - data_to_send = { - "code": event.data["code"], - "message": event.data["message"], - } - error = True - - self.handle_event(event_type, data_to_send) - if error: - self._tts_done.set() - self.handle_finished() - - async def run_pipeline( - self, - device_id: str, - conversation_id: str | None, - flags: int = 0, - audio_settings: VoiceAssistantAudioSettings | None = None, - wake_word_phrase: str | None = None, - ) -> None: - """Run the Voice Assistant pipeline.""" - if audio_settings is None or audio_settings.volume_multiplier == 0: - audio_settings = VoiceAssistantAudioSettings() - - if ( - self.device_info.voice_assistant_feature_flags_compat( - self.entry_data.api_version - ) - & VoiceAssistantFeature.SPEAKER - ): - tts_audio_output = "wav" - else: - tts_audio_output = "mp3" - - _LOGGER.debug("Starting pipeline") - if flags & VoiceAssistantCommandFlag.USE_WAKE_WORD: - start_stage = PipelineStage.WAKE_WORD - else: - start_stage = PipelineStage.STT - try: - await async_pipeline_from_audio_stream( - self.hass, - context=self.context, - event_callback=self._event_callback, - stt_metadata=stt.SpeechMetadata( - language="", # set in async_pipeline_from_audio_stream - format=stt.AudioFormats.WAV, - codec=stt.AudioCodecs.PCM, - bit_rate=stt.AudioBitRates.BITRATE_16, - sample_rate=stt.AudioSampleRates.SAMPLERATE_16000, - channel=stt.AudioChannels.CHANNEL_MONO, - ), - stt_stream=self._iterate_packets(), - pipeline_id=pipeline_select.get_chosen_pipeline( - self.hass, DOMAIN, self.device_info.mac_address - ), - conversation_id=conversation_id, - device_id=device_id, - tts_audio_output=tts_audio_output, - start_stage=start_stage, - wake_word_settings=WakeWordSettings(timeout=5), - wake_word_phrase=wake_word_phrase, - audio_settings=AudioSettings( - noise_suppression_level=audio_settings.noise_suppression_level, - auto_gain_dbfs=audio_settings.auto_gain, - volume_multiplier=audio_settings.volume_multiplier, - is_vad_enabled=bool(flags & VoiceAssistantCommandFlag.USE_VAD), - silence_seconds=VadSensitivity.to_seconds( - pipeline_select.get_vad_sensitivity( - self.hass, DOMAIN, self.device_info.mac_address - ) - ), - ), - ) - - # Block until TTS is done sending - await self._tts_done.wait() - - _LOGGER.debug("Pipeline finished") - except PipelineNotFound as e: - self.handle_event( - VoiceAssistantEventType.VOICE_ASSISTANT_ERROR, - { - "code": e.code, - "message": e.message, - }, - ) - _LOGGER.warning("Pipeline not found") - except WakeWordDetectionAborted: - pass # Wake word detection was aborted and `handle_finished` is enough. - except WakeWordDetectionError as e: - self.handle_event( - VoiceAssistantEventType.VOICE_ASSISTANT_ERROR, - { - "code": e.code, - "message": e.message, - }, - ) - finally: - self.handle_finished() - - async def _send_tts(self, media_id: str) -> None: - """Send TTS audio to device via UDP.""" - # Always send stream start/end events - self.handle_event(VoiceAssistantEventType.VOICE_ASSISTANT_TTS_STREAM_START, {}) - - try: - if not self.is_running: - return - - extension, data = await tts.async_get_media_source_audio( - self.hass, - media_id, - ) - - if extension != "wav": - raise ValueError(f"Only WAV audio can be streamed, got {extension}") - - with io.BytesIO(data) as wav_io: - with wave.open(wav_io, "rb") as wav_file: - sample_rate = wav_file.getframerate() - sample_width = wav_file.getsampwidth() - sample_channels = wav_file.getnchannels() - - if ( - (sample_rate != 16000) - or (sample_width != 2) - or (sample_channels != 1) - ): - raise ValueError( - "Expected rate/width/channels as 16000/2/1," - " got {sample_rate}/{sample_width}/{sample_channels}}" - ) - - audio_bytes = wav_file.readframes(wav_file.getnframes()) - - audio_bytes_size = len(audio_bytes) - - _LOGGER.debug("Sending %d bytes of audio", audio_bytes_size) - - bytes_per_sample = stt.AudioBitRates.BITRATE_16 // 8 - sample_offset = 0 - samples_left = audio_bytes_size // bytes_per_sample - - while (samples_left > 0) and self.is_running: - bytes_offset = sample_offset * bytes_per_sample - chunk: bytes = audio_bytes[bytes_offset : bytes_offset + 1024] - samples_in_chunk = len(chunk) // bytes_per_sample - samples_left -= samples_in_chunk - - self.send_audio_bytes(chunk) - await asyncio.sleep( - samples_in_chunk / stt.AudioSampleRates.SAMPLERATE_16000 * 0.9 - ) - - sample_offset += samples_in_chunk - finally: - self.handle_event( - VoiceAssistantEventType.VOICE_ASSISTANT_TTS_STREAM_END, {} - ) - self._tts_task = None - self._tts_done.set() - - def send_audio_bytes(self, data: bytes) -> None: - """Send bytes to the device.""" - raise NotImplementedError - - def stop(self) -> None: - """Stop the pipeline.""" - self.queue.put_nowait(b"") - - -class VoiceAssistantUDPPipeline(asyncio.DatagramProtocol, VoiceAssistantPipeline): - """Receive UDP packets and forward them to the voice assistant.""" - - transport: asyncio.DatagramTransport | None = None - remote_addr: tuple[str, int] | None = None - - async def start_server(self) -> int: - """Start accepting connections.""" - - def accept_connection() -> VoiceAssistantUDPPipeline: - """Accept connection.""" - if self.started: - raise RuntimeError("Can only start once") - if self.stop_requested: - raise RuntimeError("No longer accepting connections") - - self.started = True - return self - - sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - sock.setblocking(False) - - sock.bind(("", UDP_PORT)) - - await asyncio.get_running_loop().create_datagram_endpoint( - accept_connection, sock=sock - ) - - return cast(int, sock.getsockname()[1]) - - @callback - def connection_made(self, transport: asyncio.BaseTransport) -> None: - """Store transport for later use.""" - self.transport = cast(asyncio.DatagramTransport, transport) - - @callback - def datagram_received(self, data: bytes, addr: tuple[str, int]) -> None: - """Handle incoming UDP packet.""" - if not self.is_running: - return - if self.remote_addr is None: - self.remote_addr = addr - self.queue.put_nowait(data) - - def error_received(self, exc: Exception) -> None: - """Handle when a send or receive operation raises an OSError. - - (Other than BlockingIOError or InterruptedError.) - """ - _LOGGER.error("ESPHome Voice Assistant UDP server error received: %s", exc) - self.handle_finished() - - @callback - def stop(self) -> None: - """Stop the receiver.""" - super().stop() - self.close() - - def close(self) -> None: - """Close the receiver.""" - self.started = False - self.stop_requested = True - - if self.transport is not None: - self.transport.close() - - def send_audio_bytes(self, data: bytes) -> None: - """Send bytes to the device via UDP.""" - if self.transport is None: - _LOGGER.error("No transport to send audio to") - return - self.transport.sendto(data, self.remote_addr) - - -class VoiceAssistantAPIPipeline(VoiceAssistantPipeline): - """Send audio to the voice assistant via the API.""" - - def __init__( - self, - hass: HomeAssistant, - entry_data: RuntimeEntryData, - handle_event: Callable[[VoiceAssistantEventType, dict[str, str] | None], None], - handle_finished: Callable[[], None], - api_client: APIClient, - ) -> None: - """Initialize the pipeline.""" - super().__init__(hass, entry_data, handle_event, handle_finished) - self.api_client = api_client - self.started = True - - def send_audio_bytes(self, data: bytes) -> None: - """Send bytes to the device via the API.""" - self.api_client.send_voice_assistant_audio(data) - - @callback - def receive_audio_bytes(self, data: bytes) -> None: - """Receive audio bytes from the device.""" - if not self.is_running: - return - self.queue.put_nowait(data) - - @callback - def stop(self) -> None: - """Stop the pipeline.""" - super().stop() - - self.started = False - self.stop_requested = True - - -def handle_timer_event( - api_client: APIClient, event_type: TimerEventType, timer_info: TimerInfo -) -> None: - """Handle timer events.""" - try: - native_event_type = _TIMER_EVENT_TYPES.from_hass(event_type) - except KeyError: - _LOGGER.debug("Received unknown timer event type: %s", event_type) - return - - api_client.send_voice_assistant_timer_event( - native_event_type, - timer_info.id, - timer_info.name, - timer_info.created_seconds, - timer_info.seconds_left, - timer_info.is_active, - ) diff --git a/tests/components/esphome/conftest.py b/tests/components/esphome/conftest.py index b3966875a31..af68df89360 100644 --- a/tests/components/esphome/conftest.py +++ b/tests/components/esphome/conftest.py @@ -20,7 +20,6 @@ from aioesphomeapi import ( ReconnectLogic, UserService, VoiceAssistantAudioSettings, - VoiceAssistantEventType, VoiceAssistantFeature, ) import pytest @@ -34,11 +33,6 @@ from homeassistant.components.esphome.const import ( DEFAULT_NEW_CONFIG_ALLOW_ALLOW_SERVICE_CALLS, DOMAIN, ) -from homeassistant.components.esphome.entry_data import RuntimeEntryData -from homeassistant.components.esphome.voice_assistant import ( - VoiceAssistantAPIPipeline, - VoiceAssistantUDPPipeline, -) from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -625,57 +619,3 @@ async def mock_esphome_device( ) return _mock_device - - -@pytest.fixture -def mock_voice_assistant_api_pipeline() -> VoiceAssistantAPIPipeline: - """Return the API Pipeline factory.""" - mock_pipeline = Mock(spec=VoiceAssistantAPIPipeline) - - def mock_constructor( - hass: HomeAssistant, - entry_data: RuntimeEntryData, - handle_event: Callable[[VoiceAssistantEventType, dict[str, str] | None], None], - handle_finished: Callable[[], None], - api_client: APIClient, - ): - """Fake the constructor.""" - mock_pipeline.hass = hass - mock_pipeline.entry_data = entry_data - mock_pipeline.handle_event = handle_event - mock_pipeline.handle_finished = handle_finished - mock_pipeline.api_client = api_client - return mock_pipeline - - mock_pipeline.side_effect = mock_constructor - with patch( - "homeassistant.components.esphome.voice_assistant.VoiceAssistantAPIPipeline", - new=mock_pipeline, - ): - yield mock_pipeline - - -@pytest.fixture -def mock_voice_assistant_udp_pipeline() -> VoiceAssistantUDPPipeline: - """Return the API Pipeline factory.""" - mock_pipeline = Mock(spec=VoiceAssistantUDPPipeline) - - def mock_constructor( - hass: HomeAssistant, - entry_data: RuntimeEntryData, - handle_event: Callable[[VoiceAssistantEventType, dict[str, str] | None], None], - handle_finished: Callable[[], None], - ): - """Fake the constructor.""" - mock_pipeline.hass = hass - mock_pipeline.entry_data = entry_data - mock_pipeline.handle_event = handle_event - mock_pipeline.handle_finished = handle_finished - return mock_pipeline - - mock_pipeline.side_effect = mock_constructor - with patch( - "homeassistant.components.esphome.voice_assistant.VoiceAssistantUDPPipeline", - new=mock_pipeline, - ): - yield mock_pipeline diff --git a/tests/components/esphome/test_assist_satellite.py b/tests/components/esphome/test_assist_satellite.py new file mode 100644 index 00000000000..f024ca3b078 --- /dev/null +++ b/tests/components/esphome/test_assist_satellite.py @@ -0,0 +1,822 @@ +"""Test ESPHome voice assistant server.""" + +import asyncio +from collections.abc import Awaitable, Callable +import io +import socket +from unittest.mock import ANY, Mock, patch +import wave + +from aioesphomeapi import ( + APIClient, + EntityInfo, + EntityState, + UserService, + VoiceAssistantAudioSettings, + VoiceAssistantCommandFlag, + VoiceAssistantEventType, + VoiceAssistantFeature, + VoiceAssistantTimerEventType, +) +import pytest + +from homeassistant.components import assist_satellite +from homeassistant.components.assist_pipeline import PipelineEvent, PipelineEventType +from homeassistant.components.assist_satellite.entity import ( + AssistSatelliteEntity, + AssistSatelliteState, +) +from homeassistant.components.esphome import DOMAIN +from homeassistant.components.esphome.assist_satellite import ( + EsphomeAssistSatellite, + VoiceAssistantUDPServer, +) +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er, intent as intent_helper +import homeassistant.helpers.device_registry as dr +from homeassistant.helpers.entity_component import EntityComponent + +from .conftest import MockESPHomeDevice + + +def get_satellite_entity( + hass: HomeAssistant, mac_address: str +) -> EsphomeAssistSatellite | None: + """Get the satellite entity for a device.""" + ent_reg = er.async_get(hass) + satellite_entity_id = ent_reg.async_get_entity_id( + Platform.ASSIST_SATELLITE, DOMAIN, f"{mac_address}-assist_satellite" + ) + if satellite_entity_id is None: + return None + + component: EntityComponent[AssistSatelliteEntity] = hass.data[ + assist_satellite.DOMAIN + ] + if (entity := component.get_entity(satellite_entity_id)) is not None: + assert isinstance(entity, EsphomeAssistSatellite) + return entity + + return None + + +@pytest.fixture +def mock_wav() -> bytes: + """Return test WAV audio.""" + with io.BytesIO() as wav_io: + with wave.open(wav_io, "wb") as wav_file: + wav_file.setframerate(16000) + wav_file.setsampwidth(2) + wav_file.setnchannels(1) + wav_file.writeframes(b"test-wav") + + return wav_io.getvalue() + + +async def test_no_satellite_without_voice_assistant( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test that an assist satellite entity is not created if a voice assistant is not present.""" + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={}, + ) + await hass.async_block_till_done() + + # No satellite entity should be created + assert get_satellite_entity(hass, mock_device.device_info.mac_address) is None + + +async def test_pipeline_api_audio( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], + mock_wav: bytes, +) -> None: + """Test a complete pipeline run with API audio (over the TCP connection).""" + conversation_id = "test-conversation-id" + media_url = "http://test.url" + media_id = "test-media-id" + + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.SPEAKER + | VoiceAssistantFeature.API_AUDIO + }, + ) + await hass.async_block_till_done() + dev = device_registry.async_get_device( + connections={(dr.CONNECTION_NETWORK_MAC, mock_device.entry.unique_id)} + ) + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + # Block TTS streaming until we're ready. + # This makes it easier to verify the order of pipeline events. + stream_tts_audio_ready = asyncio.Event() + original_stream_tts_audio = satellite._stream_tts_audio + + async def _stream_tts_audio(*args, **kwargs): + await stream_tts_audio_ready.wait() + await original_stream_tts_audio(*args, **kwargs) + + async def async_pipeline_from_audio_stream(*args, device_id, **kwargs): + assert device_id == dev.id + + stt_stream = kwargs["stt_stream"] + + chunks = [chunk async for chunk in stt_stream] + + # Verify test API audio + assert chunks == [b"test-mic"] + + event_callback = kwargs["event_callback"] + + # Test unknown event type + event_callback( + PipelineEvent( + type="unknown-event", + data={}, + ) + ) + + mock_client.send_voice_assistant_event.assert_not_called() + + # Test error event + event_callback( + PipelineEvent( + type=PipelineEventType.ERROR, + data={"code": "test-error-code", "message": "test-error-message"}, + ) + ) + + assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( + VoiceAssistantEventType.VOICE_ASSISTANT_ERROR, + {"code": "test-error-code", "message": "test-error-message"}, + ) + + # Wake word + assert satellite.state == AssistSatelliteState.LISTENING_WAKE_WORD + + event_callback( + PipelineEvent( + type=PipelineEventType.WAKE_WORD_START, + data={ + "entity_id": "test-wake-word-entity-id", + "metadata": {}, + "timeout": 0, + }, + ) + ) + + assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( + VoiceAssistantEventType.VOICE_ASSISTANT_WAKE_WORD_START, + {}, + ) + + # Test no wake word detected + event_callback( + PipelineEvent( + type=PipelineEventType.WAKE_WORD_END, data={"wake_word_output": {}} + ) + ) + + assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( + VoiceAssistantEventType.VOICE_ASSISTANT_ERROR, + {"code": "no_wake_word", "message": "No wake word detected"}, + ) + + # Correct wake word detection + event_callback( + PipelineEvent( + type=PipelineEventType.WAKE_WORD_END, + data={"wake_word_output": {"wake_word_phrase": "test-wake-word"}}, + ) + ) + + assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( + VoiceAssistantEventType.VOICE_ASSISTANT_WAKE_WORD_END, + {}, + ) + + # STT + event_callback( + PipelineEvent( + type=PipelineEventType.STT_START, + data={"engine": "test-stt-engine", "metadata": {}}, + ) + ) + + assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( + VoiceAssistantEventType.VOICE_ASSISTANT_STT_START, + {}, + ) + assert satellite.state == AssistSatelliteState.LISTENING_COMMAND + + event_callback( + PipelineEvent( + type=PipelineEventType.STT_END, + data={"stt_output": {"text": "test-stt-text"}}, + ) + ) + assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( + VoiceAssistantEventType.VOICE_ASSISTANT_STT_END, + {"text": "test-stt-text"}, + ) + + # Intent + event_callback( + PipelineEvent( + type=PipelineEventType.INTENT_START, + data={ + "engine": "test-intent-engine", + "language": hass.config.language, + "intent_input": "test-intent-text", + "conversation_id": conversation_id, + "device_id": device_id, + }, + ) + ) + + assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( + VoiceAssistantEventType.VOICE_ASSISTANT_INTENT_START, + {}, + ) + assert satellite.state == AssistSatelliteState.PROCESSING + + event_callback( + PipelineEvent( + type=PipelineEventType.INTENT_END, + data={"intent_output": {"conversation_id": conversation_id}}, + ) + ) + assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( + VoiceAssistantEventType.VOICE_ASSISTANT_INTENT_END, + {"conversation_id": conversation_id}, + ) + + # TTS + event_callback( + PipelineEvent( + type=PipelineEventType.TTS_START, + data={ + "engine": "test-stt-engine", + "language": hass.config.language, + "voice": "test-voice", + "tts_input": "test-tts-text", + }, + ) + ) + + assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( + VoiceAssistantEventType.VOICE_ASSISTANT_TTS_START, + {"text": "test-tts-text"}, + ) + assert satellite.state == AssistSatelliteState.RESPONDING + + # Should return mock_wav audio + event_callback( + PipelineEvent( + type=PipelineEventType.TTS_END, + data={"tts_output": {"url": media_url, "media_id": media_id}}, + ) + ) + assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( + VoiceAssistantEventType.VOICE_ASSISTANT_TTS_END, + {"url": media_url}, + ) + + event_callback(PipelineEvent(type=PipelineEventType.RUN_END)) + assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( + VoiceAssistantEventType.VOICE_ASSISTANT_RUN_END, + {}, + ) + + # Allow TTS streaming to proceed + stream_tts_audio_ready.set() + + pipeline_finished = asyncio.Event() + original_handle_pipeline_finished = satellite.handle_pipeline_finished + + def handle_pipeline_finished(): + original_handle_pipeline_finished() + pipeline_finished.set() + + async def async_get_media_source_audio( + hass: HomeAssistant, + media_source_id: str, + ) -> tuple[str, bytes]: + return ("wav", mock_wav) + + tts_finished = asyncio.Event() + original_tts_response_finished = satellite.tts_response_finished + + def tts_response_finished(): + original_tts_response_finished() + tts_finished.set() + + with ( + patch( + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + new=async_pipeline_from_audio_stream, + ), + patch( + "homeassistant.components.tts.async_get_media_source_audio", + new=async_get_media_source_audio, + ), + patch.object(satellite, "handle_pipeline_finished", handle_pipeline_finished), + patch.object(satellite, "_stream_tts_audio", _stream_tts_audio), + patch.object(satellite, "tts_response_finished", tts_response_finished), + ): + # Should be cleared at pipeline start + satellite._audio_queue.put_nowait(b"leftover-data") + + # Should be cancelled at pipeline start + mock_tts_streaming_task = Mock() + satellite._tts_streaming_task = mock_tts_streaming_task + + async with asyncio.timeout(1): + await satellite.handle_pipeline_start( + conversation_id=conversation_id, + flags=VoiceAssistantCommandFlag.USE_WAKE_WORD, + audio_settings=VoiceAssistantAudioSettings(), + wake_word_phrase="", + ) + mock_tts_streaming_task.cancel.assert_called_once() + await satellite.handle_audio(b"test-mic") + await satellite.handle_pipeline_stop() + await pipeline_finished.wait() + + await tts_finished.wait() + + # Verify TTS streaming events. + # These are definitely the last two events because we blocked TTS streaming + # until after RUN_END above. + assert mock_client.send_voice_assistant_event.call_args_list[-2].args == ( + VoiceAssistantEventType.VOICE_ASSISTANT_TTS_STREAM_START, + {}, + ) + assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( + VoiceAssistantEventType.VOICE_ASSISTANT_TTS_STREAM_END, + {}, + ) + + # Verify TTS WAV audio chunk came through + mock_client.send_voice_assistant_audio.assert_called_once_with(b"test-wav") + + +@pytest.mark.usefixtures("socket_enabled") +async def test_pipeline_udp_audio( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], + mock_wav: bytes, +) -> None: + """Test a complete pipeline run with legacy UDP audio. + + This test is not as comprehensive as test_pipeline_api_audio since we're + mainly focused on the UDP server. + """ + conversation_id = "test-conversation-id" + media_url = "http://test.url" + media_id = "test-media-id" + + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.SPEAKER + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + mic_audio_event = asyncio.Event() + + async def async_pipeline_from_audio_stream(*args, device_id, **kwargs): + stt_stream = kwargs["stt_stream"] + + chunks = [] + async for chunk in stt_stream: + chunks.append(chunk) + mic_audio_event.set() + + # Verify test UDP audio + assert chunks == [b"test-mic"] + + event_callback = kwargs["event_callback"] + + # STT + event_callback( + PipelineEvent( + type=PipelineEventType.STT_START, + data={"engine": "test-stt-engine", "metadata": {}}, + ) + ) + + event_callback( + PipelineEvent( + type=PipelineEventType.STT_END, + data={"stt_output": {"text": "test-stt-text"}}, + ) + ) + + # Intent + event_callback( + PipelineEvent( + type=PipelineEventType.INTENT_START, + data={ + "engine": "test-intent-engine", + "language": hass.config.language, + "intent_input": "test-intent-text", + "conversation_id": conversation_id, + "device_id": device_id, + }, + ) + ) + + event_callback( + PipelineEvent( + type=PipelineEventType.INTENT_END, + data={"intent_output": {"conversation_id": conversation_id}}, + ) + ) + + # TTS + event_callback( + PipelineEvent( + type=PipelineEventType.TTS_START, + data={ + "engine": "test-stt-engine", + "language": hass.config.language, + "voice": "test-voice", + "tts_input": "test-tts-text", + }, + ) + ) + + # Should return mock_wav audio + event_callback( + PipelineEvent( + type=PipelineEventType.TTS_END, + data={"tts_output": {"url": media_url, "media_id": media_id}}, + ) + ) + + event_callback(PipelineEvent(type=PipelineEventType.RUN_END)) + + pipeline_finished = asyncio.Event() + original_handle_pipeline_finished = satellite.handle_pipeline_finished + + def handle_pipeline_finished(): + original_handle_pipeline_finished() + pipeline_finished.set() + + async def async_get_media_source_audio( + hass: HomeAssistant, + media_source_id: str, + ) -> tuple[str, bytes]: + return ("wav", mock_wav) + + tts_finished = asyncio.Event() + original_tts_response_finished = satellite.tts_response_finished + + def tts_response_finished(): + original_tts_response_finished() + tts_finished.set() + + class TestProtocol(asyncio.DatagramProtocol): + def __init__(self) -> None: + self.transport = None + self.data_received: list[bytes] = [] + + def connection_made(self, transport): + self.transport = transport + + def datagram_received(self, data: bytes, addr): + self.data_received.append(data) + + with ( + patch( + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + new=async_pipeline_from_audio_stream, + ), + patch( + "homeassistant.components.tts.async_get_media_source_audio", + new=async_get_media_source_audio, + ), + patch.object(satellite, "handle_pipeline_finished", handle_pipeline_finished), + patch.object(satellite, "tts_response_finished", tts_response_finished), + ): + async with asyncio.timeout(1): + port = await satellite.handle_pipeline_start( + conversation_id=conversation_id, + flags=VoiceAssistantCommandFlag(0), # stt + audio_settings=VoiceAssistantAudioSettings(), + wake_word_phrase="", + ) + assert (port is not None) and (port > 0) + + ( + transport, + protocol, + ) = await asyncio.get_running_loop().create_datagram_endpoint( + TestProtocol, remote_addr=("127.0.0.1", port) + ) + assert isinstance(protocol, TestProtocol) + + # Send audio over UDP + transport.sendto(b"test-mic") + + # Wait for audio chunk to be delivered + await mic_audio_event.wait() + + await satellite.handle_pipeline_stop() + await pipeline_finished.wait() + + await tts_finished.wait() + + # Verify TTS audio (from UDP) + assert protocol.data_received == [b"test-wav"] + + # Check that UDP server was stopped + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + sock.setblocking(False) + sock.bind(("", port)) # will fail if UDP server is still running + sock.close() + + +async def test_udp_errors() -> None: + """Test UDP protocol error conditions.""" + audio_queue: asyncio.Queue[bytes | None] = asyncio.Queue() + protocol = VoiceAssistantUDPServer(audio_queue) + + protocol.datagram_received(b"test", ("", 0)) + assert audio_queue.qsize() == 1 + assert (await audio_queue.get()) == b"test" + + # None will stop the pipeline + protocol.error_received(RuntimeError()) + assert audio_queue.qsize() == 1 + assert (await audio_queue.get()) is None + + # No transport + assert protocol.transport is None + protocol.send_audio_bytes(b"test") + + # No remote address + protocol.transport = Mock() + protocol.remote_addr = None + protocol.send_audio_bytes(b"test") + protocol.transport.sendto.assert_not_called() + + +async def test_timer_events( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test that injecting timer events results in the correct api client calls.""" + + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.TIMERS + }, + ) + await hass.async_block_till_done() + dev = device_registry.async_get_device( + connections={(dr.CONNECTION_NETWORK_MAC, mock_device.entry.unique_id)} + ) + + total_seconds = (1 * 60 * 60) + (2 * 60) + 3 + await intent_helper.async_handle( + hass, + "test", + intent_helper.INTENT_START_TIMER, + { + "name": {"value": "test timer"}, + "hours": {"value": 1}, + "minutes": {"value": 2}, + "seconds": {"value": 3}, + }, + device_id=dev.id, + ) + + mock_client.send_voice_assistant_timer_event.assert_called_with( + VoiceAssistantTimerEventType.VOICE_ASSISTANT_TIMER_STARTED, + ANY, + "test timer", + total_seconds, + total_seconds, + True, + ) + + # Increase timer beyond original time and check total_seconds has increased + mock_client.send_voice_assistant_timer_event.reset_mock() + + total_seconds += 5 * 60 + await intent_helper.async_handle( + hass, + "test", + intent_helper.INTENT_INCREASE_TIMER, + { + "name": {"value": "test timer"}, + "minutes": {"value": 5}, + }, + device_id=dev.id, + ) + + mock_client.send_voice_assistant_timer_event.assert_called_with( + VoiceAssistantTimerEventType.VOICE_ASSISTANT_TIMER_UPDATED, + ANY, + "test timer", + total_seconds, + ANY, + True, + ) + + +async def test_unknown_timer_event( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test that unknown (new) timer event types do not result in api calls.""" + + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.TIMERS + }, + ) + await hass.async_block_till_done() + assert mock_device.entry.unique_id is not None + dev = device_registry.async_get_device( + connections={(dr.CONNECTION_NETWORK_MAC, mock_device.entry.unique_id)} + ) + assert dev is not None + + with patch( + "homeassistant.components.esphome.assist_satellite._TIMER_EVENT_TYPES.from_hass", + side_effect=KeyError, + ): + await intent_helper.async_handle( + hass, + "test", + intent_helper.INTENT_START_TIMER, + { + "name": {"value": "test timer"}, + "hours": {"value": 1}, + "minutes": {"value": 2}, + "seconds": {"value": 3}, + }, + device_id=dev.id, + ) + + mock_client.send_voice_assistant_timer_event.assert_not_called() + + +async def test_streaming_tts_errors( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], + mock_wav: bytes, +) -> None: + """Test error conditions for _stream_tts_audio function.""" + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + # Should not stream if not running + satellite._is_running = False + await satellite._stream_tts_audio("test-media-id") + mock_client.send_voice_assistant_audio.assert_not_called() + satellite._is_running = True + + # Should only stream WAV + async def get_mp3( + hass: HomeAssistant, + media_source_id: str, + ) -> tuple[str, bytes]: + return ("mp3", b"") + + with patch( + "homeassistant.components.tts.async_get_media_source_audio", new=get_mp3 + ): + await satellite._stream_tts_audio("test-media-id") + mock_client.send_voice_assistant_audio.assert_not_called() + + # Needs to be the correct sample rate, etc. + async def get_bad_wav( + hass: HomeAssistant, + media_source_id: str, + ) -> tuple[str, bytes]: + with io.BytesIO() as wav_io: + with wave.open(wav_io, "wb") as wav_file: + wav_file.setframerate(48000) + wav_file.setsampwidth(2) + wav_file.setnchannels(1) + wav_file.writeframes(b"test-wav") + + return ("wav", wav_io.getvalue()) + + with patch( + "homeassistant.components.tts.async_get_media_source_audio", new=get_bad_wav + ): + await satellite._stream_tts_audio("test-media-id") + mock_client.send_voice_assistant_audio.assert_not_called() + + # Check that TTS_STREAM_* events still get sent after cancel + media_fetched = asyncio.Event() + + async def get_slow_wav( + hass: HomeAssistant, + media_source_id: str, + ) -> tuple[str, bytes]: + media_fetched.set() + await asyncio.sleep(1) + return ("wav", mock_wav) + + mock_client.send_voice_assistant_event.reset_mock() + with patch( + "homeassistant.components.tts.async_get_media_source_audio", new=get_slow_wav + ): + task = asyncio.create_task(satellite._stream_tts_audio("test-media-id")) + async with asyncio.timeout(1): + # Wait for media to be fetched + await media_fetched.wait() + + # Cancel task + task.cancel() + await task + + # No audio should have gone out + mock_client.send_voice_assistant_audio.assert_not_called() + assert len(mock_client.send_voice_assistant_event.call_args_list) == 2 + + # The TTS_STREAM_* events should have gone out + assert mock_client.send_voice_assistant_event.call_args_list[-2].args == ( + VoiceAssistantEventType.VOICE_ASSISTANT_TTS_STREAM_START, + {}, + ) + assert mock_client.send_voice_assistant_event.call_args_list[-1].args == ( + VoiceAssistantEventType.VOICE_ASSISTANT_TTS_STREAM_END, + {}, + ) diff --git a/tests/components/esphome/test_manager.py b/tests/components/esphome/test_manager.py index a14c83bf265..4b322c8744e 100644 --- a/tests/components/esphome/test_manager.py +++ b/tests/components/esphome/test_manager.py @@ -2,7 +2,7 @@ import asyncio from collections.abc import Awaitable, Callable -from unittest.mock import AsyncMock, call, patch +from unittest.mock import AsyncMock, call from aioesphomeapi import ( APIClient, @@ -17,7 +17,6 @@ from aioesphomeapi import ( UserService, UserServiceArg, UserServiceArgType, - VoiceAssistantFeature, ) import pytest @@ -29,10 +28,6 @@ from homeassistant.components.esphome.const import ( DOMAIN, STABLE_BLE_VERSION_STR, ) -from homeassistant.components.esphome.voice_assistant import ( - VoiceAssistantAPIPipeline, - VoiceAssistantUDPPipeline, -) from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -44,7 +39,7 @@ from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import device_registry as dr, issue_registry as ir from homeassistant.setup import async_setup_component -from .conftest import _ONE_SECOND, MockESPHomeDevice +from .conftest import MockESPHomeDevice from tests.common import MockConfigEntry, async_capture_events, async_mock_service @@ -1214,102 +1209,3 @@ async def test_entry_missing_unique_id( await mock_esphome_device(mock_client=mock_client, mock_storage=True) await hass.async_block_till_done() assert entry.unique_id == "11:22:33:44:55:aa" - - -async def test_manager_voice_assistant_handlers_api( - hass: HomeAssistant, - mock_client: APIClient, - mock_esphome_device: Callable[ - [APIClient, list[EntityInfo], list[UserService], list[EntityState]], - Awaitable[MockESPHomeDevice], - ], - caplog: pytest.LogCaptureFixture, - mock_voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, -) -> None: - """Test the handlers are correctly executed in manager.py.""" - - device: MockESPHomeDevice = await mock_esphome_device( - mock_client=mock_client, - entity_info=[], - user_service=[], - states=[], - device_info={ - "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT - | VoiceAssistantFeature.API_AUDIO - }, - ) - - await hass.async_block_till_done() - - with ( - patch( - "homeassistant.components.esphome.manager.VoiceAssistantAPIPipeline", - new=mock_voice_assistant_api_pipeline, - ), - ): - port: int | None = await device.mock_voice_assistant_handle_start( - "", 0, None, None - ) - - assert port == 0 - - port: int | None = await device.mock_voice_assistant_handle_start( - "", 0, None, None - ) - - assert "Previous Voice assistant pipeline was not stopped" in caplog.text - - await device.mock_voice_assistant_handle_audio(bytes(_ONE_SECOND)) - - mock_voice_assistant_api_pipeline.receive_audio_bytes.assert_called_with( - bytes(_ONE_SECOND) - ) - - mock_voice_assistant_api_pipeline.receive_audio_bytes.reset_mock() - - await device.mock_voice_assistant_handle_stop() - mock_voice_assistant_api_pipeline.handle_finished() - - await device.mock_voice_assistant_handle_audio(bytes(_ONE_SECOND)) - - mock_voice_assistant_api_pipeline.receive_audio_bytes.assert_not_called() - - -async def test_manager_voice_assistant_handlers_udp( - hass: HomeAssistant, - mock_client: APIClient, - mock_esphome_device: Callable[ - [APIClient, list[EntityInfo], list[UserService], list[EntityState]], - Awaitable[MockESPHomeDevice], - ], - mock_voice_assistant_udp_pipeline: VoiceAssistantUDPPipeline, -) -> None: - """Test the handlers are correctly executed in manager.py.""" - - device: MockESPHomeDevice = await mock_esphome_device( - mock_client=mock_client, - entity_info=[], - user_service=[], - states=[], - device_info={ - "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT - }, - ) - - await hass.async_block_till_done() - - with ( - patch( - "homeassistant.components.esphome.manager.VoiceAssistantUDPPipeline", - new=mock_voice_assistant_udp_pipeline, - ), - ): - await device.mock_voice_assistant_handle_start("", 0, None, None) - - mock_voice_assistant_udp_pipeline.run_pipeline.assert_called() - - await device.mock_voice_assistant_handle_stop() - mock_voice_assistant_udp_pipeline.handle_finished() - - mock_voice_assistant_udp_pipeline.stop.assert_called() - mock_voice_assistant_udp_pipeline.close.assert_called() diff --git a/tests/components/esphome/test_voice_assistant.py b/tests/components/esphome/test_voice_assistant.py deleted file mode 100644 index eafc0243dc6..00000000000 --- a/tests/components/esphome/test_voice_assistant.py +++ /dev/null @@ -1,964 +0,0 @@ -"""Test ESPHome voice assistant server.""" - -import asyncio -from collections.abc import Awaitable, Callable -import io -import socket -from unittest.mock import ANY, Mock, patch -import wave - -from aioesphomeapi import ( - APIClient, - EntityInfo, - EntityState, - UserService, - VoiceAssistantEventType, - VoiceAssistantFeature, - VoiceAssistantTimerEventType, -) -import pytest - -from homeassistant.components.assist_pipeline import ( - PipelineEvent, - PipelineEventType, - PipelineStage, -) -from homeassistant.components.assist_pipeline.error import ( - PipelineNotFound, - WakeWordDetectionAborted, - WakeWordDetectionError, -) -from homeassistant.components.esphome import DomainData -from homeassistant.components.esphome.voice_assistant import ( - VoiceAssistantAPIPipeline, - VoiceAssistantUDPPipeline, -) -from homeassistant.core import HomeAssistant -from homeassistant.helpers import intent as intent_helper -import homeassistant.helpers.device_registry as dr - -from .conftest import _ONE_SECOND, MockESPHomeDevice - -_TEST_INPUT_TEXT = "This is an input test" -_TEST_OUTPUT_TEXT = "This is an output test" -_TEST_OUTPUT_URL = "output.mp3" -_TEST_MEDIA_ID = "12345" - - -@pytest.fixture -def voice_assistant_udp_pipeline( - hass: HomeAssistant, -) -> VoiceAssistantUDPPipeline: - """Return the UDP pipeline factory.""" - - def _voice_assistant_udp_server(entry): - entry_data = DomainData.get(hass).get_entry_data(entry) - - server: VoiceAssistantUDPPipeline = None - - def handle_finished(): - nonlocal server - assert server is not None - server.close() - - server = VoiceAssistantUDPPipeline(hass, entry_data, Mock(), handle_finished) - return server # noqa: RET504 - - return _voice_assistant_udp_server - - -@pytest.fixture -def voice_assistant_api_pipeline( - hass: HomeAssistant, - mock_client, - mock_voice_assistant_api_entry, -) -> VoiceAssistantAPIPipeline: - """Return the API Pipeline factory.""" - entry_data = DomainData.get(hass).get_entry_data(mock_voice_assistant_api_entry) - return VoiceAssistantAPIPipeline(hass, entry_data, Mock(), Mock(), mock_client) - - -@pytest.fixture -def voice_assistant_udp_pipeline_v1( - voice_assistant_udp_pipeline, - mock_voice_assistant_v1_entry, -) -> VoiceAssistantUDPPipeline: - """Return the UDP pipeline.""" - return voice_assistant_udp_pipeline(entry=mock_voice_assistant_v1_entry) - - -@pytest.fixture -def voice_assistant_udp_pipeline_v2( - voice_assistant_udp_pipeline, - mock_voice_assistant_v2_entry, -) -> VoiceAssistantUDPPipeline: - """Return the UDP pipeline.""" - return voice_assistant_udp_pipeline(entry=mock_voice_assistant_v2_entry) - - -@pytest.fixture -def mock_wav() -> bytes: - """Return one second of empty WAV audio.""" - with io.BytesIO() as wav_io: - with wave.open(wav_io, "wb") as wav_file: - wav_file.setframerate(16000) - wav_file.setsampwidth(2) - wav_file.setnchannels(1) - wav_file.writeframes(bytes(_ONE_SECOND)) - - return wav_io.getvalue() - - -async def test_pipeline_events( - hass: HomeAssistant, - voice_assistant_udp_pipeline_v1: VoiceAssistantUDPPipeline, -) -> None: - """Test that the pipeline function is called.""" - - async def async_pipeline_from_audio_stream(*args, device_id, **kwargs): - assert device_id == "mock-device-id" - - event_callback = kwargs["event_callback"] - - event_callback( - PipelineEvent( - type=PipelineEventType.WAKE_WORD_END, - data={"wake_word_output": {}}, - ) - ) - - # Fake events - event_callback( - PipelineEvent( - type=PipelineEventType.STT_START, - data={}, - ) - ) - - event_callback( - PipelineEvent( - type=PipelineEventType.STT_END, - data={"stt_output": {"text": _TEST_INPUT_TEXT}}, - ) - ) - - event_callback( - PipelineEvent( - type=PipelineEventType.TTS_START, - data={"tts_input": _TEST_OUTPUT_TEXT}, - ) - ) - - event_callback( - PipelineEvent( - type=PipelineEventType.TTS_END, - data={"tts_output": {"url": _TEST_OUTPUT_URL}}, - ) - ) - - def handle_event( - event_type: VoiceAssistantEventType, data: dict[str, str] | None - ) -> None: - if event_type == VoiceAssistantEventType.VOICE_ASSISTANT_STT_END: - assert data is not None - assert data["text"] == _TEST_INPUT_TEXT - elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_TTS_START: - assert data is not None - assert data["text"] == _TEST_OUTPUT_TEXT - elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_TTS_END: - assert data is not None - assert data["url"] == _TEST_OUTPUT_URL - elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_WAKE_WORD_END: - assert data is None - - voice_assistant_udp_pipeline_v1.handle_event = handle_event - - with patch( - "homeassistant.components.esphome.voice_assistant.async_pipeline_from_audio_stream", - new=async_pipeline_from_audio_stream, - ): - voice_assistant_udp_pipeline_v1.transport = Mock() - - await voice_assistant_udp_pipeline_v1.run_pipeline( - device_id="mock-device-id", conversation_id=None - ) - - -@pytest.mark.usefixtures("socket_enabled") -async def test_udp_server( - unused_udp_port_factory: Callable[[], int], - voice_assistant_udp_pipeline_v1: VoiceAssistantUDPPipeline, -) -> None: - """Test the UDP server runs and queues incoming data.""" - port_to_use = unused_udp_port_factory() - - with patch( - "homeassistant.components.esphome.voice_assistant.UDP_PORT", new=port_to_use - ): - port = await voice_assistant_udp_pipeline_v1.start_server() - assert port == port_to_use - - sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - - assert voice_assistant_udp_pipeline_v1.queue.qsize() == 0 - sock.sendto(b"test", ("127.0.0.1", port)) - - # Give the socket some time to send/receive the data - async with asyncio.timeout(1): - while voice_assistant_udp_pipeline_v1.queue.qsize() == 0: - await asyncio.sleep(0.1) - - assert voice_assistant_udp_pipeline_v1.queue.qsize() == 1 - - voice_assistant_udp_pipeline_v1.stop() - voice_assistant_udp_pipeline_v1.close() - - assert voice_assistant_udp_pipeline_v1.transport.is_closing() - - -async def test_udp_server_queue( - hass: HomeAssistant, - voice_assistant_udp_pipeline_v1: VoiceAssistantUDPPipeline, -) -> None: - """Test the UDP server queues incoming data.""" - - voice_assistant_udp_pipeline_v1.started = True - - assert voice_assistant_udp_pipeline_v1.queue.qsize() == 0 - - voice_assistant_udp_pipeline_v1.datagram_received(bytes(1024), ("localhost", 0)) - assert voice_assistant_udp_pipeline_v1.queue.qsize() == 1 - - voice_assistant_udp_pipeline_v1.datagram_received(bytes(1024), ("localhost", 0)) - assert voice_assistant_udp_pipeline_v1.queue.qsize() == 2 - - async for data in voice_assistant_udp_pipeline_v1._iterate_packets(): - assert data == bytes(1024) - break - assert voice_assistant_udp_pipeline_v1.queue.qsize() == 1 # One message removed - - voice_assistant_udp_pipeline_v1.stop() - assert ( - voice_assistant_udp_pipeline_v1.queue.qsize() == 2 - ) # An empty message added by stop - - voice_assistant_udp_pipeline_v1.datagram_received(bytes(1024), ("localhost", 0)) - assert ( - voice_assistant_udp_pipeline_v1.queue.qsize() == 2 - ) # No new messages added after stop - - voice_assistant_udp_pipeline_v1.close() - - # Stopping the UDP server should cause _iterate_packets to break out - # immediately without yielding any data. - has_data = False - async for _data in voice_assistant_udp_pipeline_v1._iterate_packets(): - has_data = True - - assert not has_data, "Server was stopped" - - -async def test_api_pipeline_queue( - hass: HomeAssistant, - voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, -) -> None: - """Test the API pipeline queues incoming data.""" - - voice_assistant_api_pipeline.started = True - - assert voice_assistant_api_pipeline.queue.qsize() == 0 - - voice_assistant_api_pipeline.receive_audio_bytes(bytes(1024)) - assert voice_assistant_api_pipeline.queue.qsize() == 1 - - voice_assistant_api_pipeline.receive_audio_bytes(bytes(1024)) - assert voice_assistant_api_pipeline.queue.qsize() == 2 - - async for data in voice_assistant_api_pipeline._iterate_packets(): - assert data == bytes(1024) - break - assert voice_assistant_api_pipeline.queue.qsize() == 1 # One message removed - - voice_assistant_api_pipeline.stop() - assert ( - voice_assistant_api_pipeline.queue.qsize() == 2 - ) # An empty message added by stop - - voice_assistant_api_pipeline.receive_audio_bytes(bytes(1024)) - assert ( - voice_assistant_api_pipeline.queue.qsize() == 2 - ) # No new messages added after stop - - # Stopping the API Pipeline should cause _iterate_packets to break out - # immediately without yielding any data. - has_data = False - async for _data in voice_assistant_api_pipeline._iterate_packets(): - has_data = True - - assert not has_data, "Pipeline was stopped" - - -async def test_error_calls_handle_finished( - hass: HomeAssistant, - voice_assistant_udp_pipeline_v1: VoiceAssistantUDPPipeline, -) -> None: - """Test that the handle_finished callback is called when an error occurs.""" - voice_assistant_udp_pipeline_v1.handle_finished = Mock() - - voice_assistant_udp_pipeline_v1.error_received(Exception()) - - voice_assistant_udp_pipeline_v1.handle_finished.assert_called() - - -@pytest.mark.usefixtures("socket_enabled") -async def test_udp_server_multiple( - unused_udp_port_factory: Callable[[], int], - voice_assistant_udp_pipeline_v1: VoiceAssistantUDPPipeline, -) -> None: - """Test that the UDP server raises an error if started twice.""" - with patch( - "homeassistant.components.esphome.voice_assistant.UDP_PORT", - new=unused_udp_port_factory(), - ): - await voice_assistant_udp_pipeline_v1.start_server() - - with ( - patch( - "homeassistant.components.esphome.voice_assistant.UDP_PORT", - new=unused_udp_port_factory(), - ), - pytest.raises(RuntimeError), - ): - await voice_assistant_udp_pipeline_v1.start_server() - - -@pytest.mark.usefixtures("socket_enabled") -async def test_udp_server_after_stopped( - unused_udp_port_factory: Callable[[], int], - voice_assistant_udp_pipeline_v1: VoiceAssistantUDPPipeline, -) -> None: - """Test that the UDP server raises an error if started after stopped.""" - voice_assistant_udp_pipeline_v1.close() - with ( - patch( - "homeassistant.components.esphome.voice_assistant.UDP_PORT", - new=unused_udp_port_factory(), - ), - pytest.raises(RuntimeError), - ): - await voice_assistant_udp_pipeline_v1.start_server() - - -async def test_events_converted_correctly( - hass: HomeAssistant, - voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, -) -> None: - """Test the pipeline events produce the correct data to send to the device.""" - - with patch( - "homeassistant.components.esphome.voice_assistant.VoiceAssistantPipeline._send_tts", - ): - voice_assistant_api_pipeline._event_callback( - PipelineEvent( - type=PipelineEventType.STT_START, - data={}, - ) - ) - - voice_assistant_api_pipeline.handle_event.assert_called_with( - VoiceAssistantEventType.VOICE_ASSISTANT_STT_START, None - ) - - voice_assistant_api_pipeline._event_callback( - PipelineEvent( - type=PipelineEventType.STT_END, - data={"stt_output": {"text": "text"}}, - ) - ) - - voice_assistant_api_pipeline.handle_event.assert_called_with( - VoiceAssistantEventType.VOICE_ASSISTANT_STT_END, {"text": "text"} - ) - - voice_assistant_api_pipeline._event_callback( - PipelineEvent( - type=PipelineEventType.INTENT_START, - data={}, - ) - ) - - voice_assistant_api_pipeline.handle_event.assert_called_with( - VoiceAssistantEventType.VOICE_ASSISTANT_INTENT_START, None - ) - - voice_assistant_api_pipeline._event_callback( - PipelineEvent( - type=PipelineEventType.INTENT_END, - data={ - "intent_output": { - "conversation_id": "conversation-id", - } - }, - ) - ) - - voice_assistant_api_pipeline.handle_event.assert_called_with( - VoiceAssistantEventType.VOICE_ASSISTANT_INTENT_END, - {"conversation_id": "conversation-id"}, - ) - - voice_assistant_api_pipeline._event_callback( - PipelineEvent( - type=PipelineEventType.TTS_START, - data={"tts_input": "text"}, - ) - ) - - voice_assistant_api_pipeline.handle_event.assert_called_with( - VoiceAssistantEventType.VOICE_ASSISTANT_TTS_START, {"text": "text"} - ) - - voice_assistant_api_pipeline._event_callback( - PipelineEvent( - type=PipelineEventType.TTS_END, - data={"tts_output": {"url": "url", "media_id": "media-id"}}, - ) - ) - - voice_assistant_api_pipeline.handle_event.assert_called_with( - VoiceAssistantEventType.VOICE_ASSISTANT_TTS_END, {"url": "url"} - ) - - -async def test_unknown_event_type( - hass: HomeAssistant, - voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, -) -> None: - """Test the API pipeline does not call handle_event for unknown events.""" - voice_assistant_api_pipeline._event_callback( - PipelineEvent( - type="unknown-event", - data={}, - ) - ) - - assert not voice_assistant_api_pipeline.handle_event.called - - -async def test_error_event_type( - hass: HomeAssistant, - voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, -) -> None: - """Test the API pipeline calls event handler with error.""" - voice_assistant_api_pipeline._event_callback( - PipelineEvent( - type=PipelineEventType.ERROR, - data={"code": "code", "message": "message"}, - ) - ) - - voice_assistant_api_pipeline.handle_event.assert_called_with( - VoiceAssistantEventType.VOICE_ASSISTANT_ERROR, - {"code": "code", "message": "message"}, - ) - - -async def test_send_tts_not_called( - hass: HomeAssistant, - voice_assistant_udp_pipeline_v1: VoiceAssistantUDPPipeline, -) -> None: - """Test the UDP server with a v1 device does not call _send_tts.""" - with patch( - "homeassistant.components.esphome.voice_assistant.VoiceAssistantPipeline._send_tts" - ) as mock_send_tts: - voice_assistant_udp_pipeline_v1._event_callback( - PipelineEvent( - type=PipelineEventType.TTS_END, - data={ - "tts_output": {"media_id": _TEST_MEDIA_ID, "url": _TEST_OUTPUT_URL} - }, - ) - ) - - mock_send_tts.assert_not_called() - - -async def test_send_tts_called_udp( - hass: HomeAssistant, - voice_assistant_udp_pipeline_v2: VoiceAssistantUDPPipeline, -) -> None: - """Test the UDP server with a v2 device calls _send_tts.""" - with patch( - "homeassistant.components.esphome.voice_assistant.VoiceAssistantPipeline._send_tts" - ) as mock_send_tts: - voice_assistant_udp_pipeline_v2._event_callback( - PipelineEvent( - type=PipelineEventType.TTS_END, - data={ - "tts_output": {"media_id": _TEST_MEDIA_ID, "url": _TEST_OUTPUT_URL} - }, - ) - ) - - mock_send_tts.assert_called_with(_TEST_MEDIA_ID) - - -async def test_send_tts_called_api( - hass: HomeAssistant, - voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, -) -> None: - """Test the API pipeline calls _send_tts.""" - with patch( - "homeassistant.components.esphome.voice_assistant.VoiceAssistantPipeline._send_tts" - ) as mock_send_tts: - voice_assistant_api_pipeline._event_callback( - PipelineEvent( - type=PipelineEventType.TTS_END, - data={ - "tts_output": {"media_id": _TEST_MEDIA_ID, "url": _TEST_OUTPUT_URL} - }, - ) - ) - - mock_send_tts.assert_called_with(_TEST_MEDIA_ID) - - -async def test_send_tts_not_called_when_empty( - hass: HomeAssistant, - voice_assistant_udp_pipeline_v1: VoiceAssistantUDPPipeline, - voice_assistant_udp_pipeline_v2: VoiceAssistantUDPPipeline, - voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, -) -> None: - """Test the pipelines do not call _send_tts when the output is empty.""" - with patch( - "homeassistant.components.esphome.voice_assistant.VoiceAssistantPipeline._send_tts" - ) as mock_send_tts: - voice_assistant_udp_pipeline_v1._event_callback( - PipelineEvent(type=PipelineEventType.TTS_END, data={"tts_output": {}}) - ) - - mock_send_tts.assert_not_called() - - voice_assistant_udp_pipeline_v2._event_callback( - PipelineEvent(type=PipelineEventType.TTS_END, data={"tts_output": {}}) - ) - - mock_send_tts.assert_not_called() - - voice_assistant_api_pipeline._event_callback( - PipelineEvent(type=PipelineEventType.TTS_END, data={"tts_output": {}}) - ) - - mock_send_tts.assert_not_called() - - -async def test_send_tts_udp( - hass: HomeAssistant, - voice_assistant_udp_pipeline_v2: VoiceAssistantUDPPipeline, - mock_wav: bytes, -) -> None: - """Test the UDP server calls sendto to transmit audio data to device.""" - with patch( - "homeassistant.components.esphome.voice_assistant.tts.async_get_media_source_audio", - return_value=("wav", mock_wav), - ): - voice_assistant_udp_pipeline_v2.started = True - voice_assistant_udp_pipeline_v2.transport = Mock(spec=asyncio.DatagramTransport) - with patch.object( - voice_assistant_udp_pipeline_v2.transport, "is_closing", return_value=False - ): - voice_assistant_udp_pipeline_v2._event_callback( - PipelineEvent( - type=PipelineEventType.TTS_END, - data={ - "tts_output": { - "media_id": _TEST_MEDIA_ID, - "url": _TEST_OUTPUT_URL, - } - }, - ) - ) - - await voice_assistant_udp_pipeline_v2._tts_done.wait() - - voice_assistant_udp_pipeline_v2.transport.sendto.assert_called() - - -async def test_send_tts_api( - hass: HomeAssistant, - mock_client: APIClient, - voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, - mock_wav: bytes, -) -> None: - """Test the API pipeline calls cli.send_voice_assistant_audio to transmit audio data to device.""" - with patch( - "homeassistant.components.esphome.voice_assistant.tts.async_get_media_source_audio", - return_value=("wav", mock_wav), - ): - voice_assistant_api_pipeline.started = True - - voice_assistant_api_pipeline._event_callback( - PipelineEvent( - type=PipelineEventType.TTS_END, - data={ - "tts_output": { - "media_id": _TEST_MEDIA_ID, - "url": _TEST_OUTPUT_URL, - } - }, - ) - ) - - await voice_assistant_api_pipeline._tts_done.wait() - - mock_client.send_voice_assistant_audio.assert_called() - - -async def test_send_tts_wrong_sample_rate( - hass: HomeAssistant, - voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, -) -> None: - """Test that only 16000Hz audio will be streamed.""" - with io.BytesIO() as wav_io: - with wave.open(wav_io, "wb") as wav_file: - wav_file.setframerate(22050) - wav_file.setsampwidth(2) - wav_file.setnchannels(1) - wav_file.writeframes(bytes(_ONE_SECOND)) - - wav_bytes = wav_io.getvalue() - with patch( - "homeassistant.components.esphome.voice_assistant.tts.async_get_media_source_audio", - return_value=("wav", wav_bytes), - ): - voice_assistant_api_pipeline.started = True - voice_assistant_api_pipeline.transport = Mock(spec=asyncio.DatagramTransport) - - voice_assistant_api_pipeline._event_callback( - PipelineEvent( - type=PipelineEventType.TTS_END, - data={ - "tts_output": {"media_id": _TEST_MEDIA_ID, "url": _TEST_OUTPUT_URL} - }, - ) - ) - - assert voice_assistant_api_pipeline._tts_task is not None - with pytest.raises(ValueError): - await voice_assistant_api_pipeline._tts_task - - -async def test_send_tts_wrong_format( - hass: HomeAssistant, - voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, -) -> None: - """Test that only WAV audio will be streamed.""" - with ( - patch( - "homeassistant.components.esphome.voice_assistant.tts.async_get_media_source_audio", - return_value=("raw", bytes(1024)), - ), - ): - voice_assistant_api_pipeline.started = True - voice_assistant_api_pipeline.transport = Mock(spec=asyncio.DatagramTransport) - - voice_assistant_api_pipeline._event_callback( - PipelineEvent( - type=PipelineEventType.TTS_END, - data={ - "tts_output": {"media_id": _TEST_MEDIA_ID, "url": _TEST_OUTPUT_URL} - }, - ) - ) - - assert voice_assistant_api_pipeline._tts_task is not None - with pytest.raises(ValueError): - await voice_assistant_api_pipeline._tts_task - - -async def test_send_tts_not_started( - hass: HomeAssistant, - voice_assistant_udp_pipeline_v2: VoiceAssistantUDPPipeline, - mock_wav: bytes, -) -> None: - """Test the UDP server does not call sendto when not started.""" - with patch( - "homeassistant.components.esphome.voice_assistant.tts.async_get_media_source_audio", - return_value=("wav", mock_wav), - ): - voice_assistant_udp_pipeline_v2.started = False - voice_assistant_udp_pipeline_v2.transport = Mock(spec=asyncio.DatagramTransport) - - voice_assistant_udp_pipeline_v2._event_callback( - PipelineEvent( - type=PipelineEventType.TTS_END, - data={ - "tts_output": {"media_id": _TEST_MEDIA_ID, "url": _TEST_OUTPUT_URL} - }, - ) - ) - - await voice_assistant_udp_pipeline_v2._tts_done.wait() - - voice_assistant_udp_pipeline_v2.transport.sendto.assert_not_called() - - -async def test_send_tts_transport_none( - hass: HomeAssistant, - voice_assistant_udp_pipeline_v2: VoiceAssistantUDPPipeline, - mock_wav: bytes, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test the UDP server does not call sendto when transport is None.""" - with patch( - "homeassistant.components.esphome.voice_assistant.tts.async_get_media_source_audio", - return_value=("wav", mock_wav), - ): - voice_assistant_udp_pipeline_v2.started = True - voice_assistant_udp_pipeline_v2.transport = None - - voice_assistant_udp_pipeline_v2._event_callback( - PipelineEvent( - type=PipelineEventType.TTS_END, - data={ - "tts_output": {"media_id": _TEST_MEDIA_ID, "url": _TEST_OUTPUT_URL} - }, - ) - ) - await voice_assistant_udp_pipeline_v2._tts_done.wait() - - assert "No transport to send audio to" in caplog.text - - -async def test_wake_word( - hass: HomeAssistant, - voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, -) -> None: - """Test that the pipeline is set to start with Wake word.""" - - async def async_pipeline_from_audio_stream(*args, start_stage, **kwargs): - assert start_stage == PipelineStage.WAKE_WORD - - with ( - patch( - "homeassistant.components.esphome.voice_assistant.async_pipeline_from_audio_stream", - new=async_pipeline_from_audio_stream, - ), - patch("asyncio.Event.wait"), # TTS wait event - ): - await voice_assistant_api_pipeline.run_pipeline( - device_id="mock-device-id", - conversation_id=None, - flags=2, - ) - - -async def test_wake_word_exception( - hass: HomeAssistant, - voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, -) -> None: - """Test that the pipeline is set to start with Wake word.""" - - async def async_pipeline_from_audio_stream(*args, **kwargs): - raise WakeWordDetectionError("pipeline-not-found", "Pipeline not found") - - with patch( - "homeassistant.components.esphome.voice_assistant.async_pipeline_from_audio_stream", - new=async_pipeline_from_audio_stream, - ): - - def handle_event( - event_type: VoiceAssistantEventType, data: dict[str, str] | None - ) -> None: - if event_type == VoiceAssistantEventType.VOICE_ASSISTANT_ERROR: - assert data is not None - assert data["code"] == "pipeline-not-found" - assert data["message"] == "Pipeline not found" - - voice_assistant_api_pipeline.handle_event = handle_event - - await voice_assistant_api_pipeline.run_pipeline( - device_id="mock-device-id", - conversation_id=None, - flags=2, - ) - - -async def test_wake_word_abort_exception( - hass: HomeAssistant, - voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, -) -> None: - """Test that the pipeline is set to start with Wake word.""" - - async def async_pipeline_from_audio_stream(*args, **kwargs): - raise WakeWordDetectionAborted - - with ( - patch( - "homeassistant.components.esphome.voice_assistant.async_pipeline_from_audio_stream", - new=async_pipeline_from_audio_stream, - ), - patch.object(voice_assistant_api_pipeline, "handle_event") as mock_handle_event, - ): - await voice_assistant_api_pipeline.run_pipeline( - device_id="mock-device-id", - conversation_id=None, - flags=2, - ) - - mock_handle_event.assert_not_called() - - -async def test_timer_events( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - mock_client: APIClient, - mock_esphome_device: Callable[ - [APIClient, list[EntityInfo], list[UserService], list[EntityState]], - Awaitable[MockESPHomeDevice], - ], -) -> None: - """Test that injecting timer events results in the correct api client calls.""" - - mock_device: MockESPHomeDevice = await mock_esphome_device( - mock_client=mock_client, - entity_info=[], - user_service=[], - states=[], - device_info={ - "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT - | VoiceAssistantFeature.TIMERS - }, - ) - await hass.async_block_till_done() - dev = device_registry.async_get_device( - connections={(dr.CONNECTION_NETWORK_MAC, mock_device.entry.unique_id)} - ) - - total_seconds = (1 * 60 * 60) + (2 * 60) + 3 - await intent_helper.async_handle( - hass, - "test", - intent_helper.INTENT_START_TIMER, - { - "name": {"value": "test timer"}, - "hours": {"value": 1}, - "minutes": {"value": 2}, - "seconds": {"value": 3}, - }, - device_id=dev.id, - ) - - mock_client.send_voice_assistant_timer_event.assert_called_with( - VoiceAssistantTimerEventType.VOICE_ASSISTANT_TIMER_STARTED, - ANY, - "test timer", - total_seconds, - total_seconds, - True, - ) - - # Increase timer beyond original time and check total_seconds has increased - mock_client.send_voice_assistant_timer_event.reset_mock() - - total_seconds += 5 * 60 - await intent_helper.async_handle( - hass, - "test", - intent_helper.INTENT_INCREASE_TIMER, - { - "name": {"value": "test timer"}, - "minutes": {"value": 5}, - }, - device_id=dev.id, - ) - - mock_client.send_voice_assistant_timer_event.assert_called_with( - VoiceAssistantTimerEventType.VOICE_ASSISTANT_TIMER_UPDATED, - ANY, - "test timer", - total_seconds, - ANY, - True, - ) - - -async def test_unknown_timer_event( - hass: HomeAssistant, - device_registry: dr.DeviceRegistry, - mock_client: APIClient, - mock_esphome_device: Callable[ - [APIClient, list[EntityInfo], list[UserService], list[EntityState]], - Awaitable[MockESPHomeDevice], - ], -) -> None: - """Test that unknown (new) timer event types do not result in api calls.""" - - mock_device: MockESPHomeDevice = await mock_esphome_device( - mock_client=mock_client, - entity_info=[], - user_service=[], - states=[], - device_info={ - "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT - | VoiceAssistantFeature.TIMERS - }, - ) - await hass.async_block_till_done() - dev = device_registry.async_get_device( - connections={(dr.CONNECTION_NETWORK_MAC, mock_device.entry.unique_id)} - ) - - with patch( - "homeassistant.components.esphome.voice_assistant._TIMER_EVENT_TYPES.from_hass", - side_effect=KeyError, - ): - await intent_helper.async_handle( - hass, - "test", - intent_helper.INTENT_START_TIMER, - { - "name": {"value": "test timer"}, - "hours": {"value": 1}, - "minutes": {"value": 2}, - "seconds": {"value": 3}, - }, - device_id=dev.id, - ) - - mock_client.send_voice_assistant_timer_event.assert_not_called() - - -async def test_invalid_pipeline_id( - hass: HomeAssistant, - voice_assistant_api_pipeline: VoiceAssistantAPIPipeline, -) -> None: - """Test that the pipeline is set to start with Wake word.""" - - invalid_pipeline_id = "invalid-pipeline-id" - - async def async_pipeline_from_audio_stream(*args, **kwargs): - raise PipelineNotFound( - "pipeline_not_found", f"Pipeline {invalid_pipeline_id} not found" - ) - - with patch( - "homeassistant.components.esphome.voice_assistant.async_pipeline_from_audio_stream", - new=async_pipeline_from_audio_stream, - ): - - def handle_event( - event_type: VoiceAssistantEventType, data: dict[str, str] | None - ) -> None: - if event_type == VoiceAssistantEventType.VOICE_ASSISTANT_ERROR: - assert data is not None - assert data["code"] == "pipeline_not_found" - assert data["message"] == f"Pipeline {invalid_pipeline_id} not found" - - voice_assistant_api_pipeline.handle_event = handle_event - - await voice_assistant_api_pipeline.run_pipeline( - device_id="mock-device-id", - conversation_id=None, - flags=2, - ) From 33814d118036152ec260b38529e2611cf9e27fe5 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Fri, 6 Sep 2024 16:50:17 +0200 Subject: [PATCH 1533/1635] Add model ID to sfr_box (#125400) --- homeassistant/components/sfr_box/__init__.py | 1 + tests/components/sfr_box/snapshots/test_binary_sensor.ambr | 4 ++-- tests/components/sfr_box/snapshots/test_button.ambr | 2 +- tests/components/sfr_box/snapshots/test_sensor.ambr | 2 +- 4 files changed, 5 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/sfr_box/__init__.py b/homeassistant/components/sfr_box/__init__.py index d386c670365..927e3cb0ef2 100644 --- a/homeassistant/components/sfr_box/__init__.py +++ b/homeassistant/components/sfr_box/__init__.py @@ -66,6 +66,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: identifiers={(DOMAIN, system_info.mac_addr)}, name="SFR Box", model=system_info.product_id, + model_id=system_info.product_id, sw_version=system_info.version_mainfirmware, configuration_url=f"http://{entry.data[CONF_HOST]}", ) diff --git a/tests/components/sfr_box/snapshots/test_binary_sensor.ambr b/tests/components/sfr_box/snapshots/test_binary_sensor.ambr index 0023f65c90e..15308fad91f 100644 --- a/tests/components/sfr_box/snapshots/test_binary_sensor.ambr +++ b/tests/components/sfr_box/snapshots/test_binary_sensor.ambr @@ -22,7 +22,7 @@ }), 'manufacturer': None, 'model': 'NB6VAC-FXC-r0', - 'model_id': None, + 'model_id': 'NB6VAC-FXC-r0', 'name': 'SFR Box', 'name_by_user': None, 'primary_config_entry': , @@ -150,7 +150,7 @@ }), 'manufacturer': None, 'model': 'NB6VAC-FXC-r0', - 'model_id': None, + 'model_id': 'NB6VAC-FXC-r0', 'name': 'SFR Box', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/sfr_box/snapshots/test_button.ambr b/tests/components/sfr_box/snapshots/test_button.ambr index df097b58c51..67b2198fd2b 100644 --- a/tests/components/sfr_box/snapshots/test_button.ambr +++ b/tests/components/sfr_box/snapshots/test_button.ambr @@ -22,7 +22,7 @@ }), 'manufacturer': None, 'model': 'NB6VAC-FXC-r0', - 'model_id': None, + 'model_id': 'NB6VAC-FXC-r0', 'name': 'SFR Box', 'name_by_user': None, 'primary_config_entry': , diff --git a/tests/components/sfr_box/snapshots/test_sensor.ambr b/tests/components/sfr_box/snapshots/test_sensor.ambr index 46b22448d25..7645a4ad8bf 100644 --- a/tests/components/sfr_box/snapshots/test_sensor.ambr +++ b/tests/components/sfr_box/snapshots/test_sensor.ambr @@ -22,7 +22,7 @@ }), 'manufacturer': None, 'model': 'NB6VAC-FXC-r0', - 'model_id': None, + 'model_id': 'NB6VAC-FXC-r0', 'name': 'SFR Box', 'name_by_user': None, 'primary_config_entry': , From e3e48ff9b7f4d5c3426373678ffb60cffa59d900 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 6 Sep 2024 16:52:03 +0200 Subject: [PATCH 1534/1635] Use PEP 695 for decorator typing with type aliases in zha (#124235) --- homeassistant/components/zha/helpers.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/zha/helpers.py b/homeassistant/components/zha/helpers.py index f70c8a9cb3e..56e7d481f2c 100644 --- a/homeassistant/components/zha/helpers.py +++ b/homeassistant/components/zha/helpers.py @@ -14,7 +14,7 @@ import logging import re import time from types import MappingProxyType -from typing import TYPE_CHECKING, Any, Concatenate, NamedTuple, ParamSpec, TypeVar, cast +from typing import TYPE_CHECKING, Any, Concatenate, NamedTuple, cast from zoneinfo import ZoneInfo import voluptuous as vol @@ -172,9 +172,6 @@ if TYPE_CHECKING: _LogFilterType = Filter | Callable[[LogRecord], bool] -_P = ParamSpec("_P") -_EntityT = TypeVar("_EntityT", bound="ZHAEntity") - _LOGGER = logging.getLogger(__name__) DEBUG_COMP_BELLOWS = "bellows" @@ -1277,7 +1274,7 @@ def create_zha_config(hass: HomeAssistant, ha_zha_data: HAZHAData) -> ZHAData: ) -def convert_zha_error_to_ha_error( +def convert_zha_error_to_ha_error[**_P, _EntityT: ZHAEntity]( func: Callable[Concatenate[_EntityT, _P], Awaitable[None]], ) -> Callable[Concatenate[_EntityT, _P], Coroutine[Any, Any, None]]: """Decorate ZHA commands and re-raises ZHAException as HomeAssistantError.""" From 069b7a45ed95637c1d80f5ba8cc0a5903caae94c Mon Sep 17 00:00:00 2001 From: Marlon Date: Fri, 6 Sep 2024 16:52:32 +0200 Subject: [PATCH 1535/1635] Set min_power similar to max_power to support all inverters from apsystems (#124247) Set min_power similar to max_power to support all inverters from apsystems ez1 series --- homeassistant/components/apsystems/coordinator.py | 5 +++-- homeassistant/components/apsystems/number.py | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/apsystems/coordinator.py b/homeassistant/components/apsystems/coordinator.py index 6ba4f01dbc8..b6e951343f7 100644 --- a/homeassistant/components/apsystems/coordinator.py +++ b/homeassistant/components/apsystems/coordinator.py @@ -36,10 +36,11 @@ class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]): async def _async_setup(self) -> None: try: - max_power = (await self.api.get_device_info()).maxPower + device_info = await self.api.get_device_info() except (ConnectionError, TimeoutError): raise UpdateFailed from None - self.api.max_power = max_power + self.api.max_power = device_info.maxPower + self.api.min_power = device_info.minPower async def _async_update_data(self) -> ApSystemsSensorData: output_data = await self.api.get_output_data() diff --git a/homeassistant/components/apsystems/number.py b/homeassistant/components/apsystems/number.py index 51e7130587f..01e991f5188 100644 --- a/homeassistant/components/apsystems/number.py +++ b/homeassistant/components/apsystems/number.py @@ -26,7 +26,6 @@ async def async_setup_entry( class ApSystemsMaxOutputNumber(ApSystemsEntity, NumberEntity): """Base sensor to be used with description.""" - _attr_native_min_value = 30 _attr_native_step = 1 _attr_device_class = NumberDeviceClass.POWER _attr_mode = NumberMode.BOX @@ -42,6 +41,7 @@ class ApSystemsMaxOutputNumber(ApSystemsEntity, NumberEntity): self._api = data.coordinator.api self._attr_unique_id = f"{data.device_id}_output_limit" self._attr_native_max_value = data.coordinator.api.max_power + self._attr_native_min_value = data.coordinator.api.min_power async def async_update(self) -> None: """Set the state with the value fetched from the inverter.""" From f86bd3dfee7e50bc4a2201b50c35e37962a00fd1 Mon Sep 17 00:00:00 2001 From: tdfountain <174762217+tdfountain@users.noreply.github.com> Date: Fri, 6 Sep 2024 07:52:49 -0700 Subject: [PATCH 1536/1635] Improve consistency of sensor strings to reduce confusion in NUT (#124184) Improve consistency of sensor strings to reduce confusion --- homeassistant/components/nut/strings.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/nut/strings.json b/homeassistant/components/nut/strings.json index d5b9acbdaad..ec5905fc16c 100644 --- a/homeassistant/components/nut/strings.json +++ b/homeassistant/components/nut/strings.json @@ -127,8 +127,8 @@ "input_l1_current": { "name": "Input L1 current" }, "input_l2_current": { "name": "Input L2 current" }, "input_l3_current": { "name": "Input L3 current" }, - "input_frequency": { "name": "Input line frequency" }, - "input_frequency_nominal": { "name": "Nominal input line frequency" }, + "input_frequency": { "name": "Input frequency" }, + "input_frequency_nominal": { "name": "Input nominal frequency" }, "input_frequency_status": { "name": "Input frequency status" }, "input_l1_frequency": { "name": "Input L1 line frequency" }, "input_l2_frequency": { "name": "Input L2 line frequency" }, From 6b75c86a17110ca1488037f55346ef0aaad52c2f Mon Sep 17 00:00:00 2001 From: tdfountain <174762217+tdfountain@users.noreply.github.com> Date: Fri, 6 Sep 2024 07:53:05 -0700 Subject: [PATCH 1537/1635] Move ambient sensors (temperature and humidity) to diagnostic in NUT (#124180) Move ambient sensors (temperature and humidity) to Diagnostic --- homeassistant/components/nut/sensor.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/homeassistant/components/nut/sensor.py b/homeassistant/components/nut/sensor.py index 7b61342866b..d2398a560b7 100644 --- a/homeassistant/components/nut/sensor.py +++ b/homeassistant/components/nut/sensor.py @@ -927,6 +927,7 @@ SENSOR_TYPES: Final[dict[str, SensorEntityDescription]] = { native_unit_of_measurement=PERCENTAGE, device_class=SensorDeviceClass.HUMIDITY, state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, ), "ambient.temperature": SensorEntityDescription( key="ambient.temperature", @@ -934,6 +935,7 @@ SENSOR_TYPES: Final[dict[str, SensorEntityDescription]] = { native_unit_of_measurement=UnitOfTemperature.CELSIUS, device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, + entity_category=EntityCategory.DIAGNOSTIC, ), "watts": SensorEntityDescription( key="watts", From 49b07b3749e6f68d5659490b66e7cc9575d586c3 Mon Sep 17 00:00:00 2001 From: Arie Catsman <120491684+catsmanac@users.noreply.github.com> Date: Fri, 6 Sep 2024 16:56:43 +0200 Subject: [PATCH 1538/1635] Provide same entities for all Enphase_envoy CT types (#124531) Provide same entities for all Enphase_envoy CT types. --- .../components/enphase_envoy/sensor.py | 101 + .../components/enphase_envoy/strings.json | 54 + .../enphase_envoy/snapshots/test_sensor.ambr | 3926 +++++++++++++++++ 3 files changed, 4081 insertions(+) diff --git a/homeassistant/components/enphase_envoy/sensor.py b/homeassistant/components/enphase_envoy/sensor.py index e6c7a585eb7..4dd7f158305 100644 --- a/homeassistant/components/enphase_envoy/sensor.py +++ b/homeassistant/components/enphase_envoy/sensor.py @@ -36,6 +36,7 @@ from homeassistant.components.sensor import ( from homeassistant.const import ( PERCENTAGE, UnitOfApparentPower, + UnitOfElectricCurrent, UnitOfElectricPotential, UnitOfEnergy, UnitOfFrequency, @@ -295,6 +296,28 @@ CT_NET_CONSUMPTION_SENSORS = ( value_fn=attrgetter("voltage"), on_phase=None, ), + EnvoyCTSensorEntityDescription( + key="net_ct_current", + translation_key="net_ct_current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.CURRENT, + suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + suggested_display_precision=3, + entity_registry_enabled_default=False, + value_fn=attrgetter("current"), + on_phase=None, + ), + EnvoyCTSensorEntityDescription( + key="net_ct_powerfactor", + translation_key="net_ct_powerfactor", + device_class=SensorDeviceClass.POWER_FACTOR, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=2, + entity_registry_enabled_default=False, + value_fn=attrgetter("power_factor"), + on_phase=None, + ), EnvoyCTSensorEntityDescription( key="net_consumption_ct_metering_status", translation_key="net_ct_metering_status", @@ -331,6 +354,51 @@ CT_NET_CONSUMPTION_PHASE_SENSORS = { } CT_PRODUCTION_SENSORS = ( + EnvoyCTSensorEntityDescription( + key="production_ct_frequency", + translation_key="production_ct_frequency", + native_unit_of_measurement=UnitOfFrequency.HERTZ, + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.FREQUENCY, + suggested_display_precision=1, + entity_registry_enabled_default=False, + value_fn=attrgetter("frequency"), + on_phase=None, + ), + EnvoyCTSensorEntityDescription( + key="production_ct_voltage", + translation_key="production_ct_voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.VOLTAGE, + suggested_unit_of_measurement=UnitOfElectricPotential.VOLT, + suggested_display_precision=1, + entity_registry_enabled_default=False, + value_fn=attrgetter("voltage"), + on_phase=None, + ), + EnvoyCTSensorEntityDescription( + key="production_ct_current", + translation_key="production_ct_current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.CURRENT, + suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + suggested_display_precision=3, + entity_registry_enabled_default=False, + value_fn=attrgetter("current"), + on_phase=None, + ), + EnvoyCTSensorEntityDescription( + key="production_ct_powerfactor", + translation_key="production_ct_powerfactor", + device_class=SensorDeviceClass.POWER_FACTOR, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=2, + entity_registry_enabled_default=False, + value_fn=attrgetter("power_factor"), + on_phase=None, + ), EnvoyCTSensorEntityDescription( key="production_ct_metering_status", translation_key="production_ct_metering_status", @@ -399,6 +467,17 @@ CT_STORAGE_SENSORS = ( value_fn=attrgetter("active_power"), on_phase=None, ), + EnvoyCTSensorEntityDescription( + key="storage_ct_frequency", + translation_key="storage_ct_frequency", + native_unit_of_measurement=UnitOfFrequency.HERTZ, + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.FREQUENCY, + suggested_display_precision=1, + entity_registry_enabled_default=False, + value_fn=attrgetter("frequency"), + on_phase=None, + ), EnvoyCTSensorEntityDescription( key="storage_voltage", translation_key="storage_ct_voltage", @@ -411,6 +490,28 @@ CT_STORAGE_SENSORS = ( value_fn=attrgetter("voltage"), on_phase=None, ), + EnvoyCTSensorEntityDescription( + key="storage_ct_current", + translation_key="storage_ct_current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.CURRENT, + suggested_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + suggested_display_precision=3, + entity_registry_enabled_default=False, + value_fn=attrgetter("current"), + on_phase=None, + ), + EnvoyCTSensorEntityDescription( + key="storage_ct_powerfactor", + translation_key="storage_ct_powerfactor", + device_class=SensorDeviceClass.POWER_FACTOR, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=2, + entity_registry_enabled_default=False, + value_fn=attrgetter("power_factor"), + on_phase=None, + ), EnvoyCTSensorEntityDescription( key="storage_ct_metering_status", translation_key="storage_ct_metering_status", diff --git a/homeassistant/components/enphase_envoy/strings.json b/homeassistant/components/enphase_envoy/strings.json index f7964bf2f45..3c48776e448 100644 --- a/homeassistant/components/enphase_envoy/strings.json +++ b/homeassistant/components/enphase_envoy/strings.json @@ -180,12 +180,30 @@ "net_ct_voltage": { "name": "Voltage net consumption CT" }, + "net_ct_current": { + "name": "Net consumption CT current" + }, + "net_ct_powerfactor": { + "name": "Powerfactor net consumption CT" + }, "net_ct_metering_status": { "name": "Metering status net consumption CT" }, "net_ct_status_flags": { "name": "Meter status flags active net consumption CT" }, + "production_ct_frequency": { + "name": "Frequency production CT" + }, + "production_ct_voltage": { + "name": "Voltage production CT" + }, + "production_ct_current": { + "name": "Production CT current" + }, + "production_ct_powerfactor": { + "name": "powerfactor production CT" + }, "production_ct_metering_status": { "name": "Metering status production CT" }, @@ -201,9 +219,18 @@ "battery_discharge": { "name": "Current battery discharge" }, + "storage_ct_frequency": { + "name": "Frequency storage CT" + }, "storage_ct_voltage": { "name": "Voltage storage CT" }, + "storage_ct_current": { + "name": "Storage CT current" + }, + "storage_ct_powerfactor": { + "name": "Powerfactor storage CT" + }, "storage_ct_metering_status": { "name": "Metering status storage CT" }, @@ -225,12 +252,30 @@ "net_ct_voltage_phase": { "name": "Voltage net consumption CT {phase_name}" }, + "net_ct_current_phase": { + "name": "Net consumption CT current {phase_name}" + }, + "net_ct_powerfactor_phase": { + "name": "Powerfactor net consumption CT {phase_name}" + }, "net_ct_metering_status_phase": { "name": "Metering status net consumption CT {phase_name}" }, "net_ct_status_flags_phase": { "name": "Meter status flags active net consumption CT {phase_name}" }, + "production_ct_frequency_phase": { + "name": "Frequency production CT {phase_name}" + }, + "production_ct_voltage_phase": { + "name": "Voltage production CT {phase_name}" + }, + "production_ct_current_phase": { + "name": "Production CT current {phase_name}" + }, + "production_ct_powerfactor_phase": { + "name": "Powerfactor production CT {phase_name}" + }, "production_ct_metering_status_phase": { "name": "Metering status production CT {phase_name}" }, @@ -246,9 +291,18 @@ "battery_discharge_phase": { "name": "Current battery discharge {phase_name}" }, + "storage_ct_frequency_phase": { + "name": "Frequency storage CT {phase_name}" + }, "storage_ct_voltage_phase": { "name": "Voltage storage CT {phase_name}" }, + "storage_ct_current_phase": { + "name": "Storage CT current {phase_name}" + }, + "storage_ct_powerfactor_phase": { + "name": "Powerfactor storage CT {phase_name}" + }, "storage_ct_metering_status_phase": { "name": "Metering status storage CT {phase_name}" }, diff --git a/tests/components/enphase_envoy/snapshots/test_sensor.ambr b/tests/components/enphase_envoy/snapshots/test_sensor.ambr index dde6a6add41..ad937b27167 100644 --- a/tests/components/enphase_envoy/snapshots/test_sensor.ambr +++ b/tests/components/enphase_envoy/snapshots/test_sensor.ambr @@ -783,6 +783,61 @@ 'state': '50.2', }) # --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_frequency_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_frequency', + 'unique_id': '1234_production_ct_frequency', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_frequency_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency production CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.1', + }) +# --- # name: test_sensor[envoy_1p_metered][sensor.envoy_1234_lifetime_energy_consumption-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1227,6 +1282,230 @@ 'state': 'normal', }) # --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_net_consumption_ct_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Net consumption CT current', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_current', + 'unique_id': '1234_net_ct_current', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_net_consumption_ct_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Net consumption CT current', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_powerfactor_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_powerfactor', + 'unique_id': '1234_net_ct_powerfactor', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_powerfactor_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.21', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_powerfactor_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'powerfactor production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_powerfactor', + 'unique_id': '1234_production_ct_powerfactor', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_powerfactor_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 powerfactor production CT', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.11', + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_production_ct_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_production_ct_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Production CT current', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_current', + 'unique_id': '1234_production_ct_current', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_production_ct_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Production CT current', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_production_ct_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.2', + }) +# --- # name: test_sensor[envoy_1p_metered][sensor.envoy_1234_voltage_net_consumption_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1285,6 +1564,64 @@ 'state': '112', }) # --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_voltage_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_voltage', + 'unique_id': '1234_production_ct_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_voltage_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage production CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- # name: test_sensor[envoy_1p_metered][sensor.inverter_1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -3918,6 +4255,446 @@ 'state': '50.2', }) # --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_frequency', + 'unique_id': '1234_production_ct_frequency', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency production CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.1', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_frequency_phase', + 'unique_id': '1234_production_ct_frequency_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency production CT l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.1', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_frequency_phase', + 'unique_id': '1234_production_ct_frequency_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency production CT l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.1', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_frequency_phase', + 'unique_id': '1234_production_ct_frequency_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency production CT l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.1', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_storage_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_storage_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency storage CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_frequency', + 'unique_id': '1234_storage_ct_frequency', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_storage_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency storage CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_storage_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.3', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_storage_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_storage_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency storage CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_frequency_phase', + 'unique_id': '1234_storage_ct_frequency_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_storage_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency storage CT l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_storage_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.3', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_storage_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_storage_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency storage CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_frequency_phase', + 'unique_id': '1234_storage_ct_frequency_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_storage_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency storage CT l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_storage_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_storage_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_storage_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency storage CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_frequency_phase', + 'unique_id': '1234_storage_ct_frequency_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_frequency_storage_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency storage CT l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_storage_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.2', + }) +# --- # name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_charged-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -6582,6 +7359,1118 @@ 'state': 'normal', }) # --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_net_consumption_ct_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Net consumption CT current', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_current', + 'unique_id': '1234_net_ct_current', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_net_consumption_ct_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Net consumption CT current', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_net_consumption_ct_current_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Net consumption CT current l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_current_phase', + 'unique_id': '1234_net_ct_current_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_net_consumption_ct_current_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Net consumption CT current l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_net_consumption_ct_current_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Net consumption CT current l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_current_phase', + 'unique_id': '1234_net_ct_current_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_net_consumption_ct_current_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Net consumption CT current l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_net_consumption_ct_current_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Net consumption CT current l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_current_phase', + 'unique_id': '1234_net_ct_current_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_net_consumption_ct_current_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Net consumption CT current l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_powerfactor', + 'unique_id': '1234_net_ct_powerfactor', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.21', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_powerfactor_phase', + 'unique_id': '1234_net_ct_powerfactor_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l1', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.22', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_powerfactor_phase', + 'unique_id': '1234_net_ct_powerfactor_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l2', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.23', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_powerfactor_phase', + 'unique_id': '1234_net_ct_powerfactor_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l3', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.24', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'powerfactor production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_powerfactor', + 'unique_id': '1234_production_ct_powerfactor', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 powerfactor production CT', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.11', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_powerfactor_phase', + 'unique_id': '1234_production_ct_powerfactor_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor production CT l1', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.12', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_powerfactor_phase', + 'unique_id': '1234_production_ct_powerfactor_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor production CT l2', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.13', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_powerfactor_phase', + 'unique_id': '1234_production_ct_powerfactor_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor production CT l3', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.14', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor storage CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_powerfactor', + 'unique_id': '1234_storage_ct_powerfactor', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor storage CT', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.23', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor storage CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_powerfactor_phase', + 'unique_id': '1234_storage_ct_powerfactor_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor storage CT l1', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.32', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor storage CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_powerfactor_phase', + 'unique_id': '1234_storage_ct_powerfactor_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor storage CT l2', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.23', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor storage CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_powerfactor_phase', + 'unique_id': '1234_storage_ct_powerfactor_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_powerfactor_storage_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor storage CT l3', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_storage_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.24', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_production_ct_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_production_ct_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Production CT current', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_current', + 'unique_id': '1234_production_ct_current', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_production_ct_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Production CT current', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_production_ct_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.2', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_production_ct_current_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_production_ct_current_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Production CT current l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_current_phase', + 'unique_id': '1234_production_ct_current_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_production_ct_current_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Production CT current l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_production_ct_current_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.2', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_production_ct_current_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_production_ct_current_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Production CT current l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_current_phase', + 'unique_id': '1234_production_ct_current_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_production_ct_current_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Production CT current l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_production_ct_current_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.2', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_production_ct_current_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_production_ct_current_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Production CT current l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_current_phase', + 'unique_id': '1234_production_ct_current_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_production_ct_current_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Production CT current l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_production_ct_current_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.2', + }) +# --- # name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_reserve_battery_energy-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -6680,6 +8569,238 @@ 'state': '15', }) # --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_storage_ct_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_storage_ct_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Storage CT current', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_current', + 'unique_id': '1234_storage_ct_current', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_storage_ct_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Storage CT current', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_storage_ct_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.4', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_storage_ct_current_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_storage_ct_current_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Storage CT current l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_current_phase', + 'unique_id': '1234_storage_ct_current_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_storage_ct_current_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Storage CT current l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_storage_ct_current_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.4', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_storage_ct_current_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_storage_ct_current_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Storage CT current l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_current_phase', + 'unique_id': '1234_storage_ct_current_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_storage_ct_current_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Storage CT current l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_storage_ct_current_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_storage_ct_current_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_storage_ct_current_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Storage CT current l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'storage_ct_current_phase', + 'unique_id': '1234_storage_ct_current_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_storage_ct_current_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Storage CT current l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_storage_ct_current_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- # name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_net_consumption_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -6912,6 +9033,238 @@ 'state': '112', }) # --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_voltage', + 'unique_id': '1234_production_ct_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage production CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_voltage_phase', + 'unique_id': '1234_production_ct_voltage_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage production CT l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_voltage_phase', + 'unique_id': '1234_production_ct_voltage_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage production CT l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_voltage_phase', + 'unique_id': '1234_production_ct_voltage_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage production CT l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- # name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_voltage_storage_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -9064,6 +11417,226 @@ 'state': '50.2', }) # --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_frequency', + 'unique_id': '1234_production_ct_frequency', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency production CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.1', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_frequency_phase', + 'unique_id': '1234_production_ct_frequency_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency production CT l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.1', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_frequency_phase', + 'unique_id': '1234_production_ct_frequency_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency production CT l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.1', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_frequency_phase', + 'unique_id': '1234_production_ct_frequency_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_frequency_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency production CT l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.1', + }) +# --- # name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_consumption-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -10840,6 +13413,902 @@ 'state': 'normal', }) # --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_net_consumption_ct_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Net consumption CT current', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_current', + 'unique_id': '1234_net_ct_current', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_net_consumption_ct_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Net consumption CT current', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_net_consumption_ct_current_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Net consumption CT current l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_current_phase', + 'unique_id': '1234_net_ct_current_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_net_consumption_ct_current_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Net consumption CT current l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_net_consumption_ct_current_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Net consumption CT current l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_current_phase', + 'unique_id': '1234_net_ct_current_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_net_consumption_ct_current_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Net consumption CT current l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_net_consumption_ct_current_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Net consumption CT current l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_current_phase', + 'unique_id': '1234_net_ct_current_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_net_consumption_ct_current_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Net consumption CT current l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_net_consumption_ct_current_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.3', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor net consumption CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_powerfactor', + 'unique_id': '1234_net_ct_powerfactor', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.21', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor net consumption CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_powerfactor_phase', + 'unique_id': '1234_net_ct_powerfactor_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l1', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.22', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor net consumption CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_powerfactor_phase', + 'unique_id': '1234_net_ct_powerfactor_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l2', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.23', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor net consumption CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'net_ct_powerfactor_phase', + 'unique_id': '1234_net_ct_powerfactor_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_net_consumption_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor net consumption CT l3', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_net_consumption_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.24', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'powerfactor production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_powerfactor', + 'unique_id': '1234_production_ct_powerfactor', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 powerfactor production CT', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.11', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_powerfactor_phase', + 'unique_id': '1234_production_ct_powerfactor_l1', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor production CT l1', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.12', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_powerfactor_phase', + 'unique_id': '1234_production_ct_powerfactor_l2', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor production CT l2', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.13', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Powerfactor production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_powerfactor_phase', + 'unique_id': '1234_production_ct_powerfactor_l3', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_powerfactor_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 Powerfactor production CT l3', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.14', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_production_ct_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_production_ct_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Production CT current', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_current', + 'unique_id': '1234_production_ct_current', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_production_ct_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Production CT current', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_production_ct_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.2', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_production_ct_current_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_production_ct_current_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Production CT current l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_current_phase', + 'unique_id': '1234_production_ct_current_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_production_ct_current_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Production CT current l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_production_ct_current_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.2', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_production_ct_current_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_production_ct_current_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Production CT current l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_current_phase', + 'unique_id': '1234_production_ct_current_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_production_ct_current_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Production CT current l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_production_ct_current_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.2', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_production_ct_current_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_production_ct_current_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Production CT current l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_current_phase', + 'unique_id': '1234_production_ct_current_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_production_ct_current_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Production CT current l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_production_ct_current_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.2', + }) +# --- # name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_net_consumption_ct-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -11072,6 +14541,238 @@ 'state': '112', }) # --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_voltage', + 'unique_id': '1234_production_ct_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage production CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_production_ct_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage production CT l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_voltage_phase', + 'unique_id': '1234_production_ct_voltage_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_production_ct_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage production CT l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_production_ct_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage production CT l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_voltage_phase', + 'unique_id': '1234_production_ct_voltage_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_production_ct_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage production CT l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_production_ct_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage production CT l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_voltage_phase', + 'unique_id': '1234_production_ct_voltage_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_voltage_production_ct_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage production CT l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_production_ct_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- # name: test_sensor[envoy_nobatt_metered_3p][sensor.inverter_1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -11343,6 +15044,61 @@ 'state': '1.234', }) # --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_frequency_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_frequency_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Frequency production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_frequency', + 'unique_id': '1234_production_ct_frequency', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_frequency_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'frequency', + 'friendly_name': 'Envoy 1234 Frequency production CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_frequency_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '50.1', + }) +# --- # name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_lifetime_energy_production-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -11507,6 +15263,176 @@ 'state': 'normal', }) # --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_powerfactor_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'powerfactor production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_powerfactor', + 'unique_id': '1234_production_ct_powerfactor', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_powerfactor_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power_factor', + 'friendly_name': 'Envoy 1234 powerfactor production CT', + 'icon': 'mdi:flash', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_powerfactor_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.11', + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_production_ct_current-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_production_ct_current', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Production CT current', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_current', + 'unique_id': '1234_production_ct_current', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_production_ct_current-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'current', + 'friendly_name': 'Envoy 1234 Production CT current', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_production_ct_current', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.2', + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_voltage_production_ct-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_voltage_production_ct', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Voltage production CT', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'production_ct_voltage', + 'unique_id': '1234_production_ct_voltage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_voltage_production_ct-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'voltage', + 'friendly_name': 'Envoy 1234 Voltage production CT', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_voltage_production_ct', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '111', + }) +# --- # name: test_sensor[envoy_tot_cons_metered][sensor.inverter_1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ From 883e33e72ab75de5a0b01dceb35d9105202d2825 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Jaworski?= Date: Fri, 6 Sep 2024 16:59:14 +0200 Subject: [PATCH 1539/1635] Fix mired range in blebox color temp mode lights (#124258) * fix: use default mired range in belbox lights running in color temp mode * fix: ruff --- homeassistant/components/blebox/light.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/homeassistant/components/blebox/light.py b/homeassistant/components/blebox/light.py index 1f994db7243..34f9b24b17b 100644 --- a/homeassistant/components/blebox/light.py +++ b/homeassistant/components/blebox/light.py @@ -60,6 +60,9 @@ COLOR_MODE_MAP = { class BleBoxLightEntity(BleBoxEntity[blebox_uniapi.light.Light], LightEntity): """Representation of BleBox lights.""" + _attr_max_mireds = 370 # 1,000,000 divided by 2700 Kelvin = 370 Mireds + _attr_min_mireds = 154 # 1,000,000 divided by 6500 Kelvin = 154 Mireds + def __init__(self, feature: blebox_uniapi.light.Light) -> None: """Initialize a BleBox light.""" super().__init__(feature) @@ -87,12 +90,7 @@ class BleBoxLightEntity(BleBoxEntity[blebox_uniapi.light.Light], LightEntity): Set values to _attr_ibutes if needed. """ - color_mode_tmp = COLOR_MODE_MAP.get(self._feature.color_mode, ColorMode.ONOFF) - if color_mode_tmp == ColorMode.COLOR_TEMP: - self._attr_min_mireds = 1 - self._attr_max_mireds = 255 - - return color_mode_tmp + return COLOR_MODE_MAP.get(self._feature.color_mode, ColorMode.ONOFF) @property def supported_color_modes(self): From 09989e6184044782ddf7ecc5f071b7cf4be36d55 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Fri, 6 Sep 2024 17:14:25 +0200 Subject: [PATCH 1540/1635] Fix UnboundLocalError in recorder (#125419) --- homeassistant/components/recorder/migration.py | 1 + 1 file changed, 1 insertion(+) diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index 4d9978c641b..df7ff5c4fed 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -2594,6 +2594,7 @@ class EventIDPostMigration(BaseRunTimeMigration): # removing the index is the likely all that needs to happen. all_gone = not result + fk_remove_ok = False if all_gone: # Only drop the index if there are no more event_ids in the states table # ex all NULL From ea7b2ecec038bae3328a0a37c4e4e22bfc2f6a9c Mon Sep 17 00:00:00 2001 From: Arie Catsman <120491684+catsmanac@users.noreply.github.com> Date: Fri, 6 Sep 2024 17:14:37 +0200 Subject: [PATCH 1541/1635] Improve coordinator test coverage for enphase_envoy (#122375) * Improve coordinator test coverage for enphase_envoy * rename to test_coordinator to test_init for enphase_envoy * Mock pyenphase _obtain_token instead of httpx auth requests in enphase_envoy tests. * Move EnvoyTokenAuth patch to mock_envoy of enphase_envoy --- tests/components/enphase_envoy/conftest.py | 9 + tests/components/enphase_envoy/test_init.py | 221 ++++++++++++++++++++ 2 files changed, 230 insertions(+) create mode 100644 tests/components/enphase_envoy/test_init.py diff --git a/tests/components/enphase_envoy/conftest.py b/tests/components/enphase_envoy/conftest.py index ab6e0e4f097..58627211344 100644 --- a/tests/components/enphase_envoy/conftest.py +++ b/tests/components/enphase_envoy/conftest.py @@ -69,6 +69,11 @@ async def mock_envoy( request: pytest.FixtureRequest, ) -> AsyncGenerator[AsyncMock]: """Define a mocked Envoy fixture.""" + new_token = jwt.encode( + payload={"name": "envoy", "exp": 2007837780}, + key="secret", + algorithm="HS256", + ) with ( patch( "homeassistant.components.enphase_envoy.config_flow.Envoy", @@ -78,6 +83,10 @@ async def mock_envoy( "homeassistant.components.enphase_envoy.Envoy", new=mock_client, ), + patch( + "pyenphase.auth.EnvoyTokenAuth._obtain_token", + return_value=new_token, + ), ): mock_envoy = mock_client.return_value # Add the fixtures specified diff --git a/tests/components/enphase_envoy/test_init.py b/tests/components/enphase_envoy/test_init.py new file mode 100644 index 00000000000..7b10e784d50 --- /dev/null +++ b/tests/components/enphase_envoy/test_init.py @@ -0,0 +1,221 @@ +"""Test Enphase Envoy runtime.""" + +from unittest.mock import AsyncMock, patch + +from freezegun.api import FrozenDateTimeFactory +from jwt import encode +from pyenphase import EnvoyAuthenticationError, EnvoyError, EnvoyTokenAuth +from pyenphase.auth import EnvoyLegacyAuth +import pytest +import respx + +from homeassistant.components.enphase_envoy import DOMAIN +from homeassistant.components.enphase_envoy.const import Platform +from homeassistant.components.enphase_envoy.coordinator import SCAN_INTERVAL +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ( + CONF_HOST, + CONF_NAME, + CONF_PASSWORD, + CONF_TOKEN, + CONF_USERNAME, + STATE_UNAVAILABLE, +) +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed + + +async def test_with_pre_v7_firmware( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test enphase_envoy coordinator with pre V7 firmware.""" + mock_envoy.firmware = "5.1.1" + mock_envoy.auth = EnvoyLegacyAuth( + "127.0.0.1", username="test-username", password="test-password" + ) + await setup_integration(hass, config_entry) + + assert config_entry.state is ConfigEntryState.LOADED + + assert (entity_state := hass.states.get("sensor.inverter_1")) + assert entity_state.state == "1" + + +@pytest.mark.freeze_time("2024-07-23 00:00:00+00:00") +async def test_token_in_config_file( + hass: HomeAssistant, + mock_envoy: AsyncMock, +) -> None: + """Test coordinator with token provided from config.""" + token = encode( + payload={"name": "envoy", "exp": 1907837780}, + key="secret", + algorithm="HS256", + ) + entry = MockConfigEntry( + domain=DOMAIN, + entry_id="45a36e55aaddb2007c5f6602e0c38e72", + title="Envoy 1234", + unique_id="1234", + data={ + CONF_HOST: "1.1.1.1", + CONF_NAME: "Envoy 1234", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + CONF_TOKEN: token, + }, + ) + mock_envoy.auth = EnvoyTokenAuth("127.0.0.1", token=token, envoy_serial="1234") + await setup_integration(hass, entry) + await hass.async_block_till_done(wait_background_tasks=True) + assert entry.state is ConfigEntryState.LOADED + + assert (entity_state := hass.states.get("sensor.inverter_1")) + assert entity_state.state == "1" + + +@respx.mock +@pytest.mark.freeze_time("2024-07-23 00:00:00+00:00") +async def test_expired_token_in_config( + hass: HomeAssistant, + mock_envoy: AsyncMock, +) -> None: + """Test coordinator with expired token provided from config.""" + current_token = encode( + # some time in 2021 + payload={"name": "envoy", "exp": 1627314600}, + key="secret", + algorithm="HS256", + ) + + # mock envoy with expired token in config + entry = MockConfigEntry( + domain=DOMAIN, + entry_id="45a36e55aaddb2007c5f6602e0c38e72", + title="Envoy 1234", + unique_id="1234", + data={ + CONF_HOST: "1.1.1.1", + CONF_NAME: "Envoy 1234", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + CONF_TOKEN: current_token, + }, + ) + # Make sure to mock pyenphase.auth.EnvoyTokenAuth._obtain_token + # when specifying username and password in EnvoyTokenauth + mock_envoy.auth = EnvoyTokenAuth( + "127.0.0.1", + token=current_token, + envoy_serial="1234", + cloud_username="test_username", + cloud_password="test_password", + ) + await setup_integration(hass, entry) + await hass.async_block_till_done(wait_background_tasks=True) + assert entry.state is ConfigEntryState.LOADED + + assert (entity_state := hass.states.get("sensor.inverter_1")) + assert entity_state.state == "1" + + +async def test_coordinator_update_error( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test coordinator update error handling.""" + await setup_integration(hass, config_entry) + + assert (entity_state := hass.states.get("sensor.inverter_1")) + original_state = entity_state + + # force HA to detect changed data by changing raw + mock_envoy.data.raw = {"I": "am changed 1"} + mock_envoy.update.side_effect = EnvoyError + + # Move time to next update + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + assert (entity_state := hass.states.get("sensor.inverter_1")) + assert entity_state.state == STATE_UNAVAILABLE + + mock_envoy.reset_mock(return_value=True, side_effect=True) + + mock_envoy.data.raw = {"I": "am changed 2"} + + # Move time to next update + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + assert (entity_state := hass.states.get("sensor.inverter_1")) + assert entity_state.state == original_state.state + + +async def test_coordinator_update_authentication_error( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test enphase_envoy coordinator update authentication error handling.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + # force HA to detect changed data by changing raw + mock_envoy.data.raw = {"I": "am changed 1"} + mock_envoy.update.side_effect = EnvoyAuthenticationError("This must fail") + + # Move time to next update + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + assert (entity_state := hass.states.get("sensor.inverter_1")) + assert entity_state.state == STATE_UNAVAILABLE + + +@pytest.mark.freeze_time("2024-07-23 00:00:00+00:00") +async def test_coordinator_token_refresh_error( + hass: HomeAssistant, + mock_envoy: AsyncMock, +) -> None: + """Test coordinator with token provided from config.""" + # 63, 69-79 _async_try_refresh_token + token = encode( + # some time in 2021 + payload={"name": "envoy", "exp": 1627314600}, + key="secret", + algorithm="HS256", + ) + entry = MockConfigEntry( + domain=DOMAIN, + entry_id="45a36e55aaddb2007c5f6602e0c38e72", + title="Envoy 1234", + unique_id="1234", + data={ + CONF_HOST: "1.1.1.1", + CONF_NAME: "Envoy 1234", + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + CONF_TOKEN: token, + }, + ) + # token refresh without username and password specified in + # EnvoyTokenAuthwill force token refresh error + mock_envoy.auth = EnvoyTokenAuth("127.0.0.1", token=token, envoy_serial="1234") + await setup_integration(hass, entry) + await hass.async_block_till_done(wait_background_tasks=True) + assert entry.state is ConfigEntryState.LOADED + + assert (entity_state := hass.states.get("sensor.inverter_1")) + assert entity_state.state == "1" From 20639b0f023aabc4047c64bb51f3a07136e0b1e1 Mon Sep 17 00:00:00 2001 From: Andre Lengwenus Date: Fri, 6 Sep 2024 17:56:46 +0200 Subject: [PATCH 1542/1635] Add tests for LCN climate and scene platform (#124466) * Add tests for LCN climate and scene platform * Add type hints * Add snapshots for test_climate * Add snapshots for test_scene * Replace await_called assertion with snapshots * Remove snapshots for simple status changes * Test platform setup using snapshot_platform * Fix type hints * Patch homeassistant.components.lcn context instead of pypck module * Fix side effects caused by patching PchkConnectionManager in lcn platform context --- homeassistant/components/lcn/__init__.py | 3 +- tests/components/lcn/conftest.py | 23 +- tests/components/lcn/fixtures/config.json | 29 ++ .../lcn/fixtures/config_entry_pchk.json | 38 +++ .../lcn/snapshots/test_climate.ambr | 63 ++++ .../components/lcn/snapshots/test_scene.ambr | 93 ++++++ tests/components/lcn/test_climate.py | 287 ++++++++++++++++++ tests/components/lcn/test_init.py | 4 +- tests/components/lcn/test_scene.py | 64 ++++ tests/components/lcn/test_services.py | 111 ++++--- 10 files changed, 656 insertions(+), 59 deletions(-) create mode 100644 tests/components/lcn/snapshots/test_climate.ambr create mode 100644 tests/components/lcn/snapshots/test_scene.ambr create mode 100644 tests/components/lcn/test_climate.py create mode 100644 tests/components/lcn/test_scene.py diff --git a/homeassistant/components/lcn/__init__.py b/homeassistant/components/lcn/__init__.py index 75f417cb3a5..9817a254d59 100644 --- a/homeassistant/components/lcn/__init__.py +++ b/homeassistant/components/lcn/__init__.py @@ -7,6 +7,7 @@ from functools import partial import logging import pypck +from pypck.connection import PchkConnectionManager from homeassistant import config_entries from homeassistant.const import ( @@ -87,7 +88,7 @@ async def async_setup_entry( } # connect to PCHK - lcn_connection = pypck.connection.PchkConnectionManager( + lcn_connection = PchkConnectionManager( config_entry.data[CONF_IP_ADDRESS], config_entry.data[CONF_PORT], config_entry.data[CONF_USERNAME], diff --git a/tests/components/lcn/conftest.py b/tests/components/lcn/conftest.py index b1f28b28465..67c5b9c0b9c 100644 --- a/tests/components/lcn/conftest.py +++ b/tests/components/lcn/conftest.py @@ -1,16 +1,15 @@ """Test configuration and mocks for LCN component.""" -from collections.abc import AsyncGenerator import json from typing import Any from unittest.mock import AsyncMock, Mock, patch import pypck -from pypck.connection import PchkConnectionManager import pypck.module from pypck.module import GroupConnection, ModuleConnection import pytest +from homeassistant.components.lcn import PchkConnectionManager from homeassistant.components.lcn.const import DOMAIN from homeassistant.components.lcn.helpers import AddressType, generate_unique_id from homeassistant.const import CONF_ADDRESS, CONF_DEVICES, CONF_ENTITIES, CONF_HOST @@ -76,11 +75,10 @@ def create_config_entry(name: str) -> MockConfigEntry: options = {} title = entry_data[CONF_HOST] - unique_id = fixture_filename return MockConfigEntry( + entry_id=fixture_filename, domain=DOMAIN, title=title, - unique_id=unique_id, data=entry_data, options=options, ) @@ -98,10 +96,9 @@ def create_config_entry_myhome() -> MockConfigEntry: return create_config_entry("myhome") -@pytest.fixture(name="lcn_connection") async def init_integration( hass: HomeAssistant, entry: MockConfigEntry -) -> AsyncGenerator[MockPchkConnectionManager]: +) -> MockPchkConnectionManager: """Set up the LCN integration in Home Assistant.""" hass.http = Mock() # needs to be mocked as hass.http.register_static_path is called when registering the frontend lcn_connection = None @@ -113,12 +110,22 @@ async def init_integration( entry.add_to_hass(hass) with patch( - "pypck.connection.PchkConnectionManager", + "homeassistant.components.lcn.PchkConnectionManager", side_effect=lcn_connection_factory, ): await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - yield lcn_connection + + return lcn_connection + + +@pytest.fixture(name="lcn_connection") +async def init_lcn_connection( + hass: HomeAssistant, + entry: MockConfigEntry, +) -> MockPchkConnectionManager: + """Set up the LCN integration in Home Assistantand yield connection object.""" + return await init_integration(hass, entry) async def setup_component(hass: HomeAssistant) -> None: diff --git a/tests/components/lcn/fixtures/config.json b/tests/components/lcn/fixtures/config.json index 13b3dd5feed..ed3e3500900 100644 --- a/tests/components/lcn/fixtures/config.json +++ b/tests/components/lcn/fixtures/config.json @@ -91,6 +91,35 @@ "motor": "motor1" } ], + "climates": [ + { + "name": "Climate1", + "address": "s0.m7", + "source": "var1", + "setpoint": "r1varsetpoint", + "lockable": true, + "min_temp": 0, + "max_temp": 40, + "unit_of_measurement": "°C" + } + ], + "scenes": [ + { + "name": "Romantic", + "address": "s0.m7", + "register": 0, + "scene": 0, + "outputs": ["output1", "output2", "relay1"] + }, + { + "name": "Romantic Transition", + "address": "s0.m7", + "register": 0, + "scene": 1, + "outputs": ["output1", "output2", "relay1"], + "transition": 10 + } + ], "binary_sensors": [ { "name": "Sensor_LockRegulator1", diff --git a/tests/components/lcn/fixtures/config_entry_pchk.json b/tests/components/lcn/fixtures/config_entry_pchk.json index 08ccd194578..9a8095ff16d 100644 --- a/tests/components/lcn/fixtures/config_entry_pchk.json +++ b/tests/components/lcn/fixtures/config_entry_pchk.json @@ -121,6 +121,44 @@ "reverse_time": "RT1200" } }, + { + "address": [0, 7, false], + "name": "Climate1", + "resource": "var1.r1varsetpoint", + "domain": "climate", + "domain_data": { + "source": "VAR1", + "setpoint": "R1VARSETPOINT", + "lockable": true, + "min_temp": 0.0, + "max_temp": 40.0, + "unit_of_measurement": "°C" + } + }, + { + "address": [0, 7, false], + "name": "Romantic", + "resource": "0.0", + "domain": "scene", + "domain_data": { + "register": 0, + "scene": 0, + "outputs": ["OUTPUT1", "OUTPUT2", "RELAY1"], + "transition": null + } + }, + { + "address": [0, 7, false], + "name": "Romantic Transition", + "resource": "0.1", + "domain": "scene", + "domain_data": { + "register": 0, + "scene": 1, + "outputs": ["OUTPUT1", "OUTPUT2", "RELAY1"], + "transition": 10 + } + }, { "address": [0, 7, false], "name": "Sensor_LockRegulator1", diff --git a/tests/components/lcn/snapshots/test_climate.ambr b/tests/components/lcn/snapshots/test_climate.ambr new file mode 100644 index 00000000000..443b13312d1 --- /dev/null +++ b/tests/components/lcn/snapshots/test_climate.ambr @@ -0,0 +1,63 @@ +# serializer version: 1 +# name: test_setup_lcn_climate[climate.climate1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 40.0, + 'min_temp': 0.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.climate1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Climate1', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-var1.r1varsetpoint', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_climate[climate.climate1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': None, + 'friendly_name': 'Climate1', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 40.0, + 'min_temp': 0.0, + 'supported_features': , + 'temperature': None, + }), + 'context': , + 'entity_id': 'climate.climate1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- diff --git a/tests/components/lcn/snapshots/test_scene.ambr b/tests/components/lcn/snapshots/test_scene.ambr new file mode 100644 index 00000000000..c039c4ef951 --- /dev/null +++ b/tests/components/lcn/snapshots/test_scene.ambr @@ -0,0 +1,93 @@ +# serializer version: 1 +# name: test_setup_lcn_scene[scene.romantic-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'scene', + 'entity_category': None, + 'entity_id': 'scene.romantic', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Romantic', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-0.0', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_scene[scene.romantic-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Romantic', + }), + 'context': , + 'entity_id': 'scene.romantic', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_setup_lcn_scene[scene.romantic_transition-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'scene', + 'entity_category': None, + 'entity_id': 'scene.romantic_transition', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Romantic Transition', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-0.1', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_scene[scene.romantic_transition-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Romantic Transition', + }), + 'context': , + 'entity_id': 'scene.romantic_transition', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/lcn/test_climate.py b/tests/components/lcn/test_climate.py new file mode 100644 index 00000000000..db9f137d6bf --- /dev/null +++ b/tests/components/lcn/test_climate.py @@ -0,0 +1,287 @@ +"""Test for the LCN climate platform.""" + +from unittest.mock import patch + +from pypck.inputs import ModStatusVar, Unknown +from pypck.lcn_addr import LcnAddr +from pypck.lcn_defs import Var, VarUnit, VarValue +from syrupy.assertion import SnapshotAssertion + +# pylint: disable=hass-component-root-import +from homeassistant.components.climate import DOMAIN as DOMAIN_CLIMATE +from homeassistant.components.climate.const import ( + ATTR_CURRENT_TEMPERATURE, + ATTR_HVAC_MODE, + ATTR_TARGET_TEMP_HIGH, + ATTR_TARGET_TEMP_LOW, + SERVICE_SET_HVAC_MODE, + SERVICE_SET_TEMPERATURE, + HVACMode, +) +from homeassistant.components.lcn.helpers import get_device_connection +from homeassistant.const import ( + ATTR_ENTITY_ID, + ATTR_TEMPERATURE, + STATE_UNAVAILABLE, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .conftest import MockConfigEntry, MockModuleConnection, init_integration + +from tests.common import snapshot_platform + + +async def test_setup_lcn_climate( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test the setup of climate.""" + with patch("homeassistant.components.lcn.PLATFORMS", [Platform.CLIMATE]): + await init_integration(hass, entry) + + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + + +async def test_set_hvac_mode_heat(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Test the hvac mode is set to heat.""" + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "lock_regulator") as lock_regulator: + state = hass.states.get("climate.climate1") + state.state = HVACMode.OFF + + # command failed + lock_regulator.return_value = False + + await hass.services.async_call( + DOMAIN_CLIMATE, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: "climate.climate1", ATTR_HVAC_MODE: HVACMode.HEAT}, + blocking=True, + ) + + lock_regulator.assert_awaited_with(0, False) + + state = hass.states.get("climate.climate1") + assert state is not None + assert state.state != HVACMode.HEAT + + # command success + lock_regulator.reset_mock(return_value=True) + lock_regulator.return_value = True + + await hass.services.async_call( + DOMAIN_CLIMATE, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: "climate.climate1", ATTR_HVAC_MODE: HVACMode.HEAT}, + blocking=True, + ) + + lock_regulator.assert_awaited_with(0, False) + + state = hass.states.get("climate.climate1") + assert state is not None + assert state.state == HVACMode.HEAT + + +async def test_set_hvac_mode_off(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Test the hvac mode is set off.""" + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "lock_regulator") as lock_regulator: + state = hass.states.get("climate.climate1") + state.state = HVACMode.HEAT + + # command failed + lock_regulator.return_value = False + + await hass.services.async_call( + DOMAIN_CLIMATE, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: "climate.climate1", ATTR_HVAC_MODE: HVACMode.OFF}, + blocking=True, + ) + + lock_regulator.assert_awaited_with(0, True) + + state = hass.states.get("climate.climate1") + assert state is not None + assert state.state != HVACMode.OFF + + # command success + lock_regulator.reset_mock(return_value=True) + lock_regulator.return_value = True + + await hass.services.async_call( + DOMAIN_CLIMATE, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: "climate.climate1", ATTR_HVAC_MODE: HVACMode.OFF}, + blocking=True, + ) + + lock_regulator.assert_awaited_with(0, True) + + state = hass.states.get("climate.climate1") + assert state is not None + assert state.state == HVACMode.OFF + + +async def test_set_temperature(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Test the temperature is set.""" + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "var_abs") as var_abs: + state = hass.states.get("climate.climate1") + state.state = HVACMode.HEAT + + # wrong temperature set via service call with high/low attributes + var_abs.return_value = False + + await hass.services.async_call( + DOMAIN_CLIMATE, + SERVICE_SET_TEMPERATURE, + { + ATTR_ENTITY_ID: "climate.climate1", + ATTR_TARGET_TEMP_LOW: 24.5, + ATTR_TARGET_TEMP_HIGH: 25.5, + }, + blocking=True, + ) + + var_abs.assert_not_awaited() + + # command failed + var_abs.reset_mock(return_value=True) + var_abs.return_value = False + + await hass.services.async_call( + DOMAIN_CLIMATE, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: "climate.climate1", ATTR_TEMPERATURE: 25.5}, + blocking=True, + ) + + var_abs.assert_awaited_with(Var.R1VARSETPOINT, 25.5, VarUnit.CELSIUS) + + state = hass.states.get("climate.climate1") + assert state is not None + assert state.attributes[ATTR_TEMPERATURE] != 25.5 + + # command success + var_abs.reset_mock(return_value=True) + var_abs.return_value = True + + await hass.services.async_call( + DOMAIN_CLIMATE, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: "climate.climate1", ATTR_TEMPERATURE: 25.5}, + blocking=True, + ) + + var_abs.assert_awaited_with(Var.R1VARSETPOINT, 25.5, VarUnit.CELSIUS) + + state = hass.states.get("climate.climate1") + assert state is not None + assert state.attributes[ATTR_TEMPERATURE] == 25.5 + + +async def test_pushed_current_temperature_status_change( + hass: HomeAssistant, + entry: MockConfigEntry, +) -> None: + """Test the climate changes its current temperature on status received.""" + await init_integration(hass, entry) + + device_connection = get_device_connection(hass, (0, 7, False), entry) + address = LcnAddr(0, 7, False) + + temperature = VarValue.from_celsius(25.5) + + inp = ModStatusVar(address, Var.VAR1, temperature) + await device_connection.async_process_input(inp) + await hass.async_block_till_done() + + state = hass.states.get("climate.climate1") + assert state is not None + assert state.state == HVACMode.HEAT + assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 25.5 + assert state.attributes[ATTR_TEMPERATURE] is None + + +async def test_pushed_setpoint_status_change( + hass: HomeAssistant, + entry: MockConfigEntry, +) -> None: + """Test the climate changes its setpoint on status received.""" + await init_integration(hass, entry) + + device_connection = get_device_connection(hass, (0, 7, False), entry) + address = LcnAddr(0, 7, False) + + temperature = VarValue.from_celsius(25.5) + + inp = ModStatusVar(address, Var.R1VARSETPOINT, temperature) + await device_connection.async_process_input(inp) + await hass.async_block_till_done() + + state = hass.states.get("climate.climate1") + assert state is not None + assert state.state == HVACMode.HEAT + assert state.attributes[ATTR_CURRENT_TEMPERATURE] is None + assert state.attributes[ATTR_TEMPERATURE] == 25.5 + + +async def test_pushed_lock_status_change( + hass: HomeAssistant, + entry: MockConfigEntry, +) -> None: + """Test the climate changes its setpoint on status received.""" + await init_integration(hass, entry) + + device_connection = get_device_connection(hass, (0, 7, False), entry) + address = LcnAddr(0, 7, False) + + temperature = VarValue(0x8000) + + inp = ModStatusVar(address, Var.R1VARSETPOINT, temperature) + await device_connection.async_process_input(inp) + await hass.async_block_till_done() + + state = hass.states.get("climate.climate1") + assert state is not None + assert state.state == HVACMode.OFF + assert state.attributes[ATTR_CURRENT_TEMPERATURE] is None + assert state.attributes[ATTR_TEMPERATURE] is None + + +async def test_pushed_wrong_input( + hass: HomeAssistant, + entry: MockConfigEntry, +) -> None: + """Test the climate handles wrong input correctly.""" + await init_integration(hass, entry) + + device_connection = get_device_connection(hass, (0, 7, False), entry) + + await device_connection.async_process_input(Unknown("input")) + await hass.async_block_till_done() + + state = hass.states.get("climate.climate1") + assert state.attributes[ATTR_CURRENT_TEMPERATURE] is None + assert state.attributes[ATTR_TEMPERATURE] is None + + +async def test_unload_config_entry( + hass: HomeAssistant, + entry: MockConfigEntry, +) -> None: + """Test the climate is removed when the config entry is unloaded.""" + await init_integration(hass, entry) + + await hass.config_entries.async_forward_entry_unload(entry, DOMAIN_CLIMATE) + state = hass.states.get("climate.climate1") + assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/lcn/test_init.py b/tests/components/lcn/test_init.py index c118b98ecef..120db8a1333 100644 --- a/tests/components/lcn/test_init.py +++ b/tests/components/lcn/test_init.py @@ -32,7 +32,9 @@ async def test_async_setup_entry(hass: HomeAssistant, entry, lcn_connection) -> async def test_async_setup_multiple_entries(hass: HomeAssistant, entry, entry2) -> None: """Test a successful setup and unload of multiple entries.""" hass.http = Mock() - with patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager): + with patch( + "homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager + ): for config_entry in (entry, entry2): config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/lcn/test_scene.py b/tests/components/lcn/test_scene.py new file mode 100644 index 00000000000..558893bb76f --- /dev/null +++ b/tests/components/lcn/test_scene.py @@ -0,0 +1,64 @@ +"""Test for the LCN scene platform.""" + +from unittest.mock import patch + +from pypck.lcn_defs import OutputPort, RelayPort +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.scene import DOMAIN as DOMAIN_SCENE +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_ON, + STATE_UNAVAILABLE, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .conftest import MockConfigEntry, MockModuleConnection, init_integration + +from tests.common import snapshot_platform + + +async def test_setup_lcn_scene( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test the setup of switch.""" + with patch("homeassistant.components.lcn.PLATFORMS", [Platform.SCENE]): + await init_integration(hass, entry) + + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + + +async def test_scene_activate( + hass: HomeAssistant, + entry: MockConfigEntry, +) -> None: + """Test the scene is activated.""" + await init_integration(hass, entry) + with patch.object(MockModuleConnection, "activate_scene") as activate_scene: + await hass.services.async_call( + DOMAIN_SCENE, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: "scene.romantic"}, + blocking=True, + ) + + state = hass.states.get("scene.romantic") + assert state is not None + + activate_scene.assert_awaited_with( + 0, 0, [OutputPort.OUTPUT1, OutputPort.OUTPUT2], [RelayPort.RELAY1], None + ) + + +async def test_unload_config_entry(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Test the scene is removed when the config entry is unloaded.""" + await init_integration(hass, entry) + await hass.config_entries.async_forward_entry_unload(entry, DOMAIN_SCENE) + + state = hass.states.get("scene.romantic") + assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/lcn/test_services.py b/tests/components/lcn/test_services.py index 9cb53289065..27253a0c7e5 100644 --- a/tests/components/lcn/test_services.py +++ b/tests/components/lcn/test_services.py @@ -32,16 +32,21 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from .conftest import MockModuleConnection, MockPchkConnectionManager, setup_component +from .conftest import ( + MockConfigEntry, + MockModuleConnection, + MockPchkConnectionManager, + init_integration, +) -@patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager) +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) async def test_service_output_abs( - hass: HomeAssistant, snapshot: SnapshotAssertion + hass: HomeAssistant, entry: MockConfigEntry, snapshot: SnapshotAssertion ) -> None: """Test output_abs service.""" await async_setup_component(hass, "persistent_notification", {}) - await setup_component(hass) + await init_integration(hass, entry) with patch.object(MockModuleConnection, "dim_output") as dim_output: await hass.services.async_call( @@ -59,13 +64,13 @@ async def test_service_output_abs( assert dim_output.await_args.args == snapshot() -@patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager) +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) async def test_service_output_rel( - hass: HomeAssistant, snapshot: SnapshotAssertion + hass: HomeAssistant, entry: MockConfigEntry, snapshot: SnapshotAssertion ) -> None: """Test output_rel service.""" await async_setup_component(hass, "persistent_notification", {}) - await setup_component(hass) + await init_integration(hass, entry) with patch.object(MockModuleConnection, "rel_output") as rel_output: await hass.services.async_call( @@ -82,13 +87,13 @@ async def test_service_output_rel( assert rel_output.await_args.args == snapshot() -@patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager) +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) async def test_service_output_toggle( - hass: HomeAssistant, snapshot: SnapshotAssertion + hass: HomeAssistant, entry: MockConfigEntry, snapshot: SnapshotAssertion ) -> None: """Test output_toggle service.""" await async_setup_component(hass, "persistent_notification", {}) - await setup_component(hass) + await init_integration(hass, entry) with patch.object(MockModuleConnection, "toggle_output") as toggle_output: await hass.services.async_call( @@ -105,11 +110,13 @@ async def test_service_output_toggle( assert toggle_output.await_args.args == snapshot() -@patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_relays(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_relays( + hass: HomeAssistant, entry: MockConfigEntry, snapshot: SnapshotAssertion +) -> None: """Test relays service.""" await async_setup_component(hass, "persistent_notification", {}) - await setup_component(hass) + await init_integration(hass, entry) with patch.object(MockModuleConnection, "control_relays") as control_relays: await hass.services.async_call( @@ -122,11 +129,13 @@ async def test_service_relays(hass: HomeAssistant, snapshot: SnapshotAssertion) assert control_relays.await_args.args == snapshot() -@patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_led(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_led( + hass: HomeAssistant, entry: MockConfigEntry, snapshot: SnapshotAssertion +) -> None: """Test led service.""" await async_setup_component(hass, "persistent_notification", {}) - await setup_component(hass) + await init_integration(hass, entry) with patch.object(MockModuleConnection, "control_led") as control_led: await hass.services.async_call( @@ -139,13 +148,13 @@ async def test_service_led(hass: HomeAssistant, snapshot: SnapshotAssertion) -> assert control_led.await_args.args == snapshot() -@patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager) +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) async def test_service_var_abs( - hass: HomeAssistant, snapshot: SnapshotAssertion + hass: HomeAssistant, entry: MockConfigEntry, snapshot: SnapshotAssertion ) -> None: """Test var_abs service.""" await async_setup_component(hass, "persistent_notification", {}) - await setup_component(hass) + await init_integration(hass, entry) with patch.object(MockModuleConnection, "var_abs") as var_abs: await hass.services.async_call( @@ -163,13 +172,13 @@ async def test_service_var_abs( assert var_abs.await_args.args == snapshot() -@patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager) +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) async def test_service_var_rel( - hass: HomeAssistant, snapshot: SnapshotAssertion + hass: HomeAssistant, entry: MockConfigEntry, snapshot: SnapshotAssertion ) -> None: """Test var_rel service.""" await async_setup_component(hass, "persistent_notification", {}) - await setup_component(hass) + await init_integration(hass, entry) with patch.object(MockModuleConnection, "var_rel") as var_rel: await hass.services.async_call( @@ -188,13 +197,13 @@ async def test_service_var_rel( assert var_rel.await_args.args == snapshot() -@patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager) +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) async def test_service_var_reset( - hass: HomeAssistant, snapshot: SnapshotAssertion + hass: HomeAssistant, entry: MockConfigEntry, snapshot: SnapshotAssertion ) -> None: """Test var_reset service.""" await async_setup_component(hass, "persistent_notification", {}) - await setup_component(hass) + await init_integration(hass, entry) with patch.object(MockModuleConnection, "var_reset") as var_reset: await hass.services.async_call( @@ -207,13 +216,13 @@ async def test_service_var_reset( assert var_reset.await_args.args == snapshot() -@patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager) +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) async def test_service_lock_regulator( - hass: HomeAssistant, snapshot: SnapshotAssertion + hass: HomeAssistant, entry: MockConfigEntry, snapshot: SnapshotAssertion ) -> None: """Test lock_regulator service.""" await async_setup_component(hass, "persistent_notification", {}) - await setup_component(hass) + await init_integration(hass, entry) with patch.object(MockModuleConnection, "lock_regulator") as lock_regulator: await hass.services.async_call( @@ -230,13 +239,13 @@ async def test_service_lock_regulator( assert lock_regulator.await_args.args == snapshot() -@patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager) +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) async def test_service_send_keys( - hass: HomeAssistant, snapshot: SnapshotAssertion + hass: HomeAssistant, entry: MockConfigEntry, snapshot: SnapshotAssertion ) -> None: """Test send_keys service.""" await async_setup_component(hass, "persistent_notification", {}) - await setup_component(hass) + await init_integration(hass, entry) with patch.object(MockModuleConnection, "send_keys") as send_keys: await hass.services.async_call( @@ -254,13 +263,13 @@ async def test_service_send_keys( assert send_keys.await_args.args == snapshot() -@patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager) +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) async def test_service_send_keys_hit_deferred( - hass: HomeAssistant, snapshot: SnapshotAssertion + hass: HomeAssistant, entry: MockConfigEntry, snapshot: SnapshotAssertion ) -> None: """Test send_keys (hit_deferred) service.""" await async_setup_component(hass, "persistent_notification", {}) - await setup_component(hass) + await init_integration(hass, entry) keys = [[False] * 8 for i in range(4)] keys[0][0] = True @@ -306,13 +315,13 @@ async def test_service_send_keys_hit_deferred( ) -@patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager) +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) async def test_service_lock_keys( - hass: HomeAssistant, snapshot: SnapshotAssertion + hass: HomeAssistant, entry: MockConfigEntry, snapshot: SnapshotAssertion ) -> None: """Test lock_keys service.""" await async_setup_component(hass, "persistent_notification", {}) - await setup_component(hass) + await init_integration(hass, entry) with patch.object(MockModuleConnection, "lock_keys") as lock_keys: await hass.services.async_call( @@ -325,13 +334,13 @@ async def test_service_lock_keys( assert lock_keys.await_args.args == snapshot() -@patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager) +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) async def test_service_lock_keys_tab_a_temporary( - hass: HomeAssistant, snapshot: SnapshotAssertion + hass: HomeAssistant, entry: MockConfigEntry, snapshot: SnapshotAssertion ) -> None: """Test lock_keys (tab_a_temporary) service.""" await async_setup_component(hass, "persistent_notification", {}) - await setup_component(hass) + await init_integration(hass, entry) # success with patch.object( @@ -372,13 +381,13 @@ async def test_service_lock_keys_tab_a_temporary( ) -@patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager) +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) async def test_service_dyn_text( - hass: HomeAssistant, snapshot: SnapshotAssertion + hass: HomeAssistant, entry: MockConfigEntry, snapshot: SnapshotAssertion ) -> None: """Test dyn_text service.""" await async_setup_component(hass, "persistent_notification", {}) - await setup_component(hass) + await init_integration(hass, entry) with patch.object(MockModuleConnection, "dyn_text") as dyn_text: await hass.services.async_call( @@ -391,11 +400,13 @@ async def test_service_dyn_text( assert dyn_text.await_args.args == snapshot() -@patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_pck(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_pck( + hass: HomeAssistant, entry: MockConfigEntry, snapshot: SnapshotAssertion +) -> None: """Test pck service.""" await async_setup_component(hass, "persistent_notification", {}) - await setup_component(hass) + await init_integration(hass, entry) with patch.object(MockModuleConnection, "pck") as pck: await hass.services.async_call( @@ -408,11 +419,13 @@ async def test_service_pck(hass: HomeAssistant, snapshot: SnapshotAssertion) -> assert pck.await_args.args == snapshot() -@patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_called_with_invalid_host_id(hass: HomeAssistant) -> None: +@patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) +async def test_service_called_with_invalid_host_id( + hass: HomeAssistant, entry: MockConfigEntry +) -> None: """Test service was called with non existing host id.""" await async_setup_component(hass, "persistent_notification", {}) - await setup_component(hass) + await init_integration(hass, entry) with patch.object(MockModuleConnection, "pck") as pck, pytest.raises(ValueError): await hass.services.async_call( From ee59303d3c60eabe8aba13239af4050353f1d193 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Fri, 6 Sep 2024 10:57:09 -0500 Subject: [PATCH 1543/1635] Use first media player announcement format for TTS (#125237) * Use ANNOUNCEMENT format from first media player for tts * Fix formatting --------- Co-authored-by: Paulus Schoutsen --- .../components/assist_satellite/entity.py | 11 ++- .../components/esphome/assist_satellite.py | 27 +++++++ .../components/esphome/entry_data.py | 4 + .../components/esphome/media_player.py | 10 ++- .../assist_satellite/test_entity.py | 2 +- .../esphome/test_assist_satellite.py | 73 ++++++++++++++++++- 6 files changed, 123 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/assist_satellite/entity.py b/homeassistant/components/assist_satellite/entity.py index 6ec40ae24f7..38973f15f55 100644 --- a/homeassistant/components/assist_satellite/entity.py +++ b/homeassistant/components/assist_satellite/entity.py @@ -72,6 +72,7 @@ class AssistSatelliteEntity(entity.Entity): _run_has_tts: bool = False _is_announcing = False _wake_word_intercept_future: asyncio.Future[str | None] | None = None + _attr_tts_options: dict[str, Any] | None = None __assist_satellite_state = AssistSatelliteState.LISTENING_WAKE_WORD @@ -91,6 +92,11 @@ class AssistSatelliteEntity(entity.Entity): """Entity ID of the VAD sensitivity to use for the next conversation.""" return self._attr_vad_sensitivity_entity_id + @property + def tts_options(self) -> dict[str, Any] | None: + """Options passed for text-to-speech.""" + return self._attr_tts_options + async def async_intercept_wake_word(self) -> str | None: """Intercept the next wake word from the satellite. @@ -137,6 +143,9 @@ class AssistSatelliteEntity(entity.Entity): if pipeline.tts_voice is not None: tts_options[tts.ATTR_VOICE] = pipeline.tts_voice + if self.tts_options is not None: + tts_options.update(self.tts_options) + media_id = tts_generate_media_source_id( self.hass, message, @@ -253,7 +262,7 @@ class AssistSatelliteEntity(entity.Entity): pipeline_id=self._resolve_pipeline(), conversation_id=self._conversation_id, device_id=device_id, - tts_audio_output="wav", + tts_audio_output=self.tts_options, wake_word_phrase=wake_word_phrase, audio_settings=AudioSettings( silence_seconds=self._resolve_vad_sensitivity() diff --git a/homeassistant/components/esphome/assist_satellite.py b/homeassistant/components/esphome/assist_satellite.py index 48bb9ec5507..f84940eadc4 100644 --- a/homeassistant/components/esphome/assist_satellite.py +++ b/homeassistant/components/esphome/assist_satellite.py @@ -6,12 +6,14 @@ import asyncio from collections.abc import AsyncIterable from functools import partial import io +from itertools import chain import logging import socket from typing import Any, cast import wave from aioesphomeapi import ( + MediaPlayerFormatPurpose, VoiceAssistantAudioSettings, VoiceAssistantCommandFlag, VoiceAssistantEventType, @@ -288,6 +290,18 @@ class EsphomeAssistSatellite( end_stage = PipelineStage.TTS + if feature_flags & VoiceAssistantFeature.SPEAKER: + # Stream WAV audio + self._attr_tts_options = { + tts.ATTR_PREFERRED_FORMAT: "wav", + tts.ATTR_PREFERRED_SAMPLE_RATE: 16000, + tts.ATTR_PREFERRED_SAMPLE_CHANNELS: 1, + tts.ATTR_PREFERRED_SAMPLE_BYTES: 2, + } + else: + # ANNOUNCEMENT format from media player + self._update_tts_format() + # Run the pipeline _LOGGER.debug("Running pipeline from %s to %s", start_stage, end_stage) self.entry_data.async_set_assist_pipeline_state(True) @@ -340,6 +354,19 @@ class EsphomeAssistSatellite( timer_info.is_active, ) + def _update_tts_format(self) -> None: + """Update the TTS format from the first media player.""" + for supported_format in chain(*self.entry_data.media_player_formats.values()): + # Find first announcement format + if supported_format.purpose == MediaPlayerFormatPurpose.ANNOUNCEMENT: + self._attr_tts_options = { + tts.ATTR_PREFERRED_FORMAT: supported_format.format, + tts.ATTR_PREFERRED_SAMPLE_RATE: supported_format.sample_rate, + tts.ATTR_PREFERRED_SAMPLE_CHANNELS: supported_format.num_channels, + tts.ATTR_PREFERRED_SAMPLE_BYTES: 2, + } + break + async def _stream_tts_audio( self, media_id: str, diff --git a/homeassistant/components/esphome/entry_data.py b/homeassistant/components/esphome/entry_data.py index 6fc40612c48..f1b5218eec7 100644 --- a/homeassistant/components/esphome/entry_data.py +++ b/homeassistant/components/esphome/entry_data.py @@ -31,6 +31,7 @@ from aioesphomeapi import ( LightInfo, LockInfo, MediaPlayerInfo, + MediaPlayerSupportedFormat, NumberInfo, SelectInfo, SensorInfo, @@ -148,6 +149,9 @@ class RuntimeEntryData: tuple[type[EntityInfo], int], list[Callable[[EntityInfo], None]] ] = field(default_factory=dict) original_options: dict[str, Any] = field(default_factory=dict) + media_player_formats: dict[str, list[MediaPlayerSupportedFormat]] = field( + default_factory=lambda: defaultdict(list) + ) @property def name(self) -> str: diff --git a/homeassistant/components/esphome/media_player.py b/homeassistant/components/esphome/media_player.py index f7c5d7011f8..4d57552bb19 100644 --- a/homeassistant/components/esphome/media_player.py +++ b/homeassistant/components/esphome/media_player.py @@ -3,7 +3,7 @@ from __future__ import annotations from functools import partial -from typing import Any +from typing import Any, cast from aioesphomeapi import ( EntityInfo, @@ -66,6 +66,9 @@ class EsphomeMediaPlayer( if self._static_info.supports_pause: flags |= MediaPlayerEntityFeature.PAUSE | MediaPlayerEntityFeature.PLAY self._attr_supported_features = flags + self._entry_data.media_player_formats[self.entity_id] = cast( + MediaPlayerInfo, static_info + ).supported_formats @property @esphome_state_property @@ -103,6 +106,11 @@ class EsphomeMediaPlayer( self._key, media_url=media_id, announcement=announcement ) + async def async_will_remove_from_hass(self) -> None: + """Handle entity being removed.""" + await super().async_will_remove_from_hass() + self._entry_data.media_player_formats.pop(self.entity_id, None) + async def async_browse_media( self, media_content_type: MediaType | str | None = None, diff --git a/tests/components/assist_satellite/test_entity.py b/tests/components/assist_satellite/test_entity.py index 2e4caca030b..ec52d8abff4 100644 --- a/tests/components/assist_satellite/test_entity.py +++ b/tests/components/assist_satellite/test_entity.py @@ -61,7 +61,7 @@ async def test_entity_state( assert kwargs["stt_stream"] is audio_stream assert kwargs["pipeline_id"] is None assert kwargs["device_id"] is None - assert kwargs["tts_audio_output"] == "wav" + assert kwargs["tts_audio_output"] is None assert kwargs["wake_word_phrase"] is None assert kwargs["audio_settings"] == AudioSettings( silence_seconds=vad.VadSensitivity.to_seconds(vad.VadSensitivity.DEFAULT) diff --git a/tests/components/esphome/test_assist_satellite.py b/tests/components/esphome/test_assist_satellite.py index f024ca3b078..1c7f7320a85 100644 --- a/tests/components/esphome/test_assist_satellite.py +++ b/tests/components/esphome/test_assist_satellite.py @@ -11,6 +11,9 @@ from aioesphomeapi import ( APIClient, EntityInfo, EntityState, + MediaPlayerFormatPurpose, + MediaPlayerInfo, + MediaPlayerSupportedFormat, UserService, VoiceAssistantAudioSettings, VoiceAssistantCommandFlag, @@ -20,7 +23,7 @@ from aioesphomeapi import ( ) import pytest -from homeassistant.components import assist_satellite +from homeassistant.components import assist_satellite, tts from homeassistant.components.assist_pipeline import PipelineEvent, PipelineEventType from homeassistant.components.assist_satellite.entity import ( AssistSatelliteEntity, @@ -820,3 +823,71 @@ async def test_streaming_tts_errors( VoiceAssistantEventType.VOICE_ASSISTANT_TTS_STREAM_END, {}, ) + + +async def test_tts_format_from_media_player( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test that the text-to-speech format is pulled from the first media player.""" + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[ + MediaPlayerInfo( + object_id="mymedia_player", + key=1, + name="my media_player", + unique_id="my_media_player", + supports_pause=True, + supported_formats=[ + MediaPlayerSupportedFormat( + format="flac", + sample_rate=48000, + num_channels=2, + purpose=MediaPlayerFormatPurpose.DEFAULT, + ), + # This is the format that should be used for tts + MediaPlayerSupportedFormat( + format="mp3", + sample_rate=22050, + num_channels=1, + purpose=MediaPlayerFormatPurpose.ANNOUNCEMENT, + ), + ], + ) + ], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + with patch( + "homeassistant.components.assist_satellite.entity.async_pipeline_from_audio_stream", + ) as mock_pipeline_from_audio_stream: + await satellite.handle_pipeline_start( + conversation_id="", + flags=0, + audio_settings=VoiceAssistantAudioSettings(), + wake_word_phrase=None, + ) + + mock_pipeline_from_audio_stream.assert_called_once() + kwargs = mock_pipeline_from_audio_stream.call_args_list[0].kwargs + + # Should be ANNOUNCEMENT format from media player + assert kwargs.get("tts_audio_output") == { + tts.ATTR_PREFERRED_FORMAT: "mp3", + tts.ATTR_PREFERRED_SAMPLE_RATE: 22050, + tts.ATTR_PREFERRED_SAMPLE_CHANNELS: 1, + tts.ATTR_PREFERRED_SAMPLE_BYTES: 2, + } From 741add066677000504d43cbb7dafd7ed33bd0bfe Mon Sep 17 00:00:00 2001 From: Markus Jacobsen Date: Fri, 6 Sep 2024 18:09:43 +0200 Subject: [PATCH 1544/1635] Replace strings with constants in Bang & Olufsen testing (#125423) Replace strings with constants in service calls --- .../bang_olufsen/test_media_player.py | 104 ++++++++++-------- 1 file changed, 58 insertions(+), 46 deletions(-) diff --git a/tests/components/bang_olufsen/test_media_player.py b/tests/components/bang_olufsen/test_media_player.py index 9928a626a4f..70743cd2cca 100644 --- a/tests/components/bang_olufsen/test_media_player.py +++ b/tests/components/bang_olufsen/test_media_player.py @@ -36,6 +36,18 @@ from homeassistant.components.media_player import ( ATTR_MEDIA_TRACK, ATTR_MEDIA_VOLUME_LEVEL, ATTR_MEDIA_VOLUME_MUTED, + DOMAIN as MEDIA_PLAYER_DOMAIN, + SERVICE_CLEAR_PLAYLIST, + SERVICE_MEDIA_NEXT_TRACK, + SERVICE_MEDIA_PLAY_PAUSE, + SERVICE_MEDIA_PREVIOUS_TRACK, + SERVICE_MEDIA_SEEK, + SERVICE_MEDIA_STOP, + SERVICE_PLAY_MEDIA, + SERVICE_SELECT_SOURCE, + SERVICE_TURN_OFF, + SERVICE_VOLUME_MUTE, + SERVICE_VOLUME_SET, MediaPlayerState, MediaType, ) @@ -385,8 +397,8 @@ async def test_async_turn_off( ) await hass.services.async_call( - "media_player", - "turn_off", + MEDIA_PLAYER_DOMAIN, + SERVICE_TURN_OFF, {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, blocking=True, ) @@ -416,8 +428,8 @@ async def test_async_set_volume_level( assert ATTR_MEDIA_VOLUME_LEVEL not in states.attributes await hass.services.async_call( - "media_player", - "volume_set", + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_SET, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_MEDIA_VOLUME_LEVEL: TEST_VOLUME_HOME_ASSISTANT_FORMAT, @@ -454,8 +466,8 @@ async def test_async_mute_volume( assert ATTR_MEDIA_VOLUME_MUTED not in states.attributes await hass.services.async_call( - "media_player", - "volume_mute", + MEDIA_PLAYER_DOMAIN, + SERVICE_VOLUME_MUTE, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_MEDIA_VOLUME_MUTED: TEST_VOLUME_HOME_ASSISTANT_FORMAT, @@ -509,8 +521,8 @@ async def test_async_media_play_pause( assert states.state == BANG_OLUFSEN_STATES[initial_state.value] await hass.services.async_call( - "media_player", - "media_play_pause", + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_PLAY_PAUSE, {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, blocking=True, ) @@ -539,8 +551,8 @@ async def test_async_media_stop( assert states.state == BANG_OLUFSEN_STATES[TEST_PLAYBACK_STATE_PLAYING.value] await hass.services.async_call( - "media_player", - "media_stop", + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_STOP, {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, blocking=True, ) @@ -560,8 +572,8 @@ async def test_async_media_next_track( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.services.async_call( - "media_player", - "media_next_track", + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_NEXT_TRACK, {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, blocking=True, ) @@ -601,8 +613,8 @@ async def test_async_media_seek( # Check results with expected_result: await hass.services.async_call( - "media_player", - "media_seek", + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_SEEK, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_MEDIA_SEEK_POSITION: TEST_SEEK_POSITION_HOME_ASSISTANT_FORMAT, @@ -624,8 +636,8 @@ async def test_async_media_previous_track( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.services.async_call( - "media_player", - "media_previous_track", + MEDIA_PLAYER_DOMAIN, + SERVICE_MEDIA_PREVIOUS_TRACK, {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, blocking=True, ) @@ -644,8 +656,8 @@ async def test_async_clear_playlist( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.services.async_call( - "media_player", - "clear_playlist", + MEDIA_PLAYER_DOMAIN, + SERVICE_CLEAR_PLAYLIST, {ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID}, blocking=True, ) @@ -680,8 +692,8 @@ async def test_async_select_source( with expected_result: await hass.services.async_call( - "media_player", - "select_source", + MEDIA_PLAYER_DOMAIN, + SERVICE_SELECT_SOURCE, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_INPUT_SOURCE: source, @@ -705,8 +717,8 @@ async def test_async_play_media_invalid_type( with pytest.raises(ServiceValidationError) as exc_info: await hass.services.async_call( - "media_player", - "play_media", + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_MEDIA_CONTENT_ID: "test", @@ -734,8 +746,8 @@ async def test_async_play_media_url( await async_setup_component(hass, "media_source", {"media_source": {}}) await hass.services.async_call( - "media_player", - "play_media", + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_MEDIA_CONTENT_ID: "media-source://media_source/local/doorbell.mp3", @@ -760,8 +772,8 @@ async def test_async_play_media_overlay_absolute_volume_uri( await async_setup_component(hass, "media_source", {"media_source": {}}) await hass.services.async_call( - "media_player", - "play_media", + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_MEDIA_CONTENT_ID: "media-source://media_source/local/doorbell.mp3", @@ -792,8 +804,8 @@ async def test_async_play_media_overlay_invalid_offset_volume_tts( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.services.async_call( - "media_player", - "play_media", + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_MEDIA_CONTENT_ID: "Dette er en test", @@ -829,8 +841,8 @@ async def test_async_play_media_overlay_offset_volume_tts( volume_callback(TEST_VOLUME) await hass.services.async_call( - "media_player", - "play_media", + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_MEDIA_CONTENT_ID: "This is a test", @@ -859,8 +871,8 @@ async def test_async_play_media_tts( await async_setup_component(hass, "media_source", {"media_source": {}}) await hass.services.async_call( - "media_player", - "play_media", + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_MEDIA_CONTENT_ID: "media-source://media_source/local/doorbell.mp3", @@ -883,8 +895,8 @@ async def test_async_play_media_radio( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.services.async_call( - "media_player", - "play_media", + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_MEDIA_CONTENT_ID: "1234567890123456", @@ -909,8 +921,8 @@ async def test_async_play_media_favourite( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.services.async_call( - "media_player", - "play_media", + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_MEDIA_CONTENT_ID: "1", @@ -934,8 +946,8 @@ async def test_async_play_media_deezer_flow( # Send a service call await hass.services.async_call( - "media_player", - "play_media", + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_MEDIA_CONTENT_ID: "flow", @@ -961,8 +973,8 @@ async def test_async_play_media_deezer_playlist( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.services.async_call( - "media_player", - "play_media", + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_MEDIA_CONTENT_ID: "playlist:1234567890", @@ -988,8 +1000,8 @@ async def test_async_play_media_deezer_track( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.services.async_call( - "media_player", - "play_media", + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_MEDIA_CONTENT_ID: "1234567890", @@ -1017,8 +1029,8 @@ async def test_async_play_media_invalid_deezer( with pytest.raises(HomeAssistantError) as exc_info: await hass.services.async_call( - "media_player", - "play_media", + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_MEDIA_CONTENT_ID: "flow", @@ -1054,8 +1066,8 @@ async def test_async_play_media_url_m3u( ), ): await hass.services.async_call( - "media_player", - "play_media", + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, { ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, ATTR_MEDIA_CONTENT_ID: "media-source://media_source/local/doorbell.mp3", From cd3059aa14e5972d829fd24b54bc6fb37777fcf7 Mon Sep 17 00:00:00 2001 From: IceBotYT <34712694+IceBotYT@users.noreply.github.com> Date: Fri, 6 Sep 2024 12:22:59 -0400 Subject: [PATCH 1545/1635] Nice G.O. code quality improvements (#124319) * Bring Nice G.O. up to platinum * Switch to listen in coordinator * Tests * Remove parallel updates from coordinator * Unsub from events on config entry unload * Detect WS disconnection * Tests * Fix tests * Set unsub to None after unsubbing * Wait 5 seconds before setting update error to prevent excessive errors * Tweaks * More tweaks * Tweaks part 2 * Potential test for hass stopping * Improve reconnect handling and test on Homeassistant stop event * Move event handler to entry init * Patch const instead of asyncio.sleep --------- Co-authored-by: jbouwh --- homeassistant/components/nice_go/__init__.py | 8 +- .../components/nice_go/coordinator.py | 85 +++++++++-- homeassistant/components/nice_go/event.py | 6 +- .../components/nice_go/manifest.json | 3 +- tests/components/nice_go/test_diagnostics.py | 2 + tests/components/nice_go/test_event.py | 4 +- tests/components/nice_go/test_init.py | 141 ++++++++++++++++-- 7 files changed, 221 insertions(+), 28 deletions(-) diff --git a/homeassistant/components/nice_go/__init__.py b/homeassistant/components/nice_go/__init__.py index ab3dc06e3c1..b217112c192 100644 --- a/homeassistant/components/nice_go/__init__.py +++ b/homeassistant/components/nice_go/__init__.py @@ -5,7 +5,7 @@ from __future__ import annotations import logging from homeassistant.config_entries import ConfigEntry -from homeassistant.const import Platform +from homeassistant.const import EVENT_HOMEASSISTANT_STOP, Platform from homeassistant.core import HomeAssistant from .coordinator import NiceGOUpdateCoordinator @@ -25,8 +25,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: NiceGOConfigEntry) -> bo """Set up Nice G.O. from a config entry.""" coordinator = NiceGOUpdateCoordinator(hass) + entry.async_on_unload( + hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, coordinator.async_ha_stop) + ) await coordinator.async_config_entry_first_refresh() + entry.runtime_data = coordinator entry.async_create_background_task( @@ -35,6 +39,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: NiceGOConfigEntry) -> bo "nice_go_websocket_task", ) + entry.async_on_unload(coordinator.unsubscribe) + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True diff --git a/homeassistant/components/nice_go/coordinator.py b/homeassistant/components/nice_go/coordinator.py index 323e0a08fe8..d6693db2d8a 100644 --- a/homeassistant/components/nice_go/coordinator.py +++ b/homeassistant/components/nice_go/coordinator.py @@ -3,11 +3,12 @@ from __future__ import annotations import asyncio +from collections.abc import Callable from dataclasses import dataclass from datetime import datetime import json import logging -from typing import Any +from typing import TYPE_CHECKING, Any from nice_go import ( BARRIER_STATUS, @@ -20,7 +21,7 @@ from nice_go import ( from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_EMAIL, CONF_PASSWORD -from homeassistant.core import HomeAssistant +from homeassistant.core import Event, HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers import issue_registry as ir from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -35,6 +36,9 @@ from .const import ( _LOGGER = logging.getLogger(__name__) +RECONNECT_ATTEMPTS = 3 +RECONNECT_DELAY = 5 + @dataclass class NiceGODevice: @@ -70,7 +74,16 @@ class NiceGOUpdateCoordinator(DataUpdateCoordinator[dict[str, NiceGODevice]]): self.email = self.config_entry.data[CONF_EMAIL] self.password = self.config_entry.data[CONF_PASSWORD] self.api = NiceGOApi() - self.ws_connected = False + self._unsub_connected: Callable[[], None] | None = None + self._unsub_data: Callable[[], None] | None = None + self._unsub_connection_lost: Callable[[], None] | None = None + self.connected = False + self._hass_stopping: bool = hass.is_stopping + + @callback + def async_ha_stop(self, event: Event) -> None: + """Stop reconnecting if hass is stopping.""" + self._hass_stopping = True async def _parse_barrier(self, barrier_state: BarrierState) -> NiceGODevice | None: """Parse barrier data.""" @@ -178,16 +191,30 @@ class NiceGOUpdateCoordinator(DataUpdateCoordinator[dict[str, NiceGODevice]]): async def client_listen(self) -> None: """Listen to the websocket for updates.""" - self.api.event(self.on_connected) - self.api.event(self.on_data) - try: - await self.api.connect(reconnect=True) - except ApiError: - _LOGGER.exception("API error") + self._unsub_connected = self.api.listen("on_connected", self.on_connected) + self._unsub_data = self.api.listen("on_data", self.on_data) + self._unsub_connection_lost = self.api.listen( + "on_connection_lost", self.on_connection_lost + ) - if not self.hass.is_stopping: - await asyncio.sleep(5) - await self.client_listen() + for _ in range(RECONNECT_ATTEMPTS): + if self._hass_stopping: + return + + try: + await self.api.connect(reconnect=True) + except ApiError: + _LOGGER.exception("API error") + else: + return + + await asyncio.sleep(RECONNECT_DELAY) + + self.async_set_update_error( + TimeoutError( + "Failed to connect to the websocket, reconnect attempts exhausted" + ) + ) async def on_data(self, data: dict[str, Any]) -> None: """Handle incoming data from the websocket.""" @@ -220,4 +247,38 @@ class NiceGOUpdateCoordinator(DataUpdateCoordinator[dict[str, NiceGODevice]]): async def on_connected(self) -> None: """Handle the websocket connection.""" _LOGGER.debug("Connected to the websocket") + self.connected = True + await self.api.subscribe(self.organization_id) + + if not self.last_update_success: + self.async_set_updated_data(self.data) + + async def on_connection_lost(self, data: dict[str, Exception]) -> None: + """Handle the websocket connection loss. Don't need to do much since the library will automatically reconnect.""" + _LOGGER.debug("Connection lost to the websocket") + self.connected = False + + # Give some time for reconnection + await asyncio.sleep(RECONNECT_DELAY) + if self.connected: + _LOGGER.debug("Reconnected, not setting error") + return + + # There's likely a problem with the connection, and not the server being flaky + self.async_set_update_error(data["exception"]) + + def unsubscribe(self) -> None: + """Unsubscribe from the websocket.""" + if TYPE_CHECKING: + assert self._unsub_connected is not None + assert self._unsub_data is not None + assert self._unsub_connection_lost is not None + + self._unsub_connection_lost() + self._unsub_connected() + self._unsub_data() + self._unsub_connected = None + self._unsub_data = None + self._unsub_connection_lost = None + _LOGGER.debug("Unsubscribed from the websocket") diff --git a/homeassistant/components/nice_go/event.py b/homeassistant/components/nice_go/event.py index a19511b0b11..cd9198bcd26 100644 --- a/homeassistant/components/nice_go/event.py +++ b/homeassistant/components/nice_go/event.py @@ -40,7 +40,11 @@ class NiceGOEventEntity(NiceGOEntity, EventEntity): async def async_added_to_hass(self) -> None: """Listen for events.""" await super().async_added_to_hass() - self.coordinator.api.event(self.on_barrier_obstructed) + self.async_on_remove( + self.coordinator.api.listen( + "on_barrier_obstructed", self.on_barrier_obstructed + ) + ) async def on_barrier_obstructed(self, data: dict[str, Any]) -> None: """Handle barrier obstructed event.""" diff --git a/homeassistant/components/nice_go/manifest.json b/homeassistant/components/nice_go/manifest.json index 884f2eb7b18..315f23d949d 100644 --- a/homeassistant/components/nice_go/manifest.json +++ b/homeassistant/components/nice_go/manifest.json @@ -4,7 +4,8 @@ "codeowners": ["@IceBotYT"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/nice_go", + "integration_type": "hub", "iot_class": "cloud_push", - "loggers": ["nice-go"], + "loggers": ["nice_go"], "requirements": ["nice-go==0.3.8"] } diff --git a/tests/components/nice_go/test_diagnostics.py b/tests/components/nice_go/test_diagnostics.py index f91f5748792..5c8647f3d6e 100644 --- a/tests/components/nice_go/test_diagnostics.py +++ b/tests/components/nice_go/test_diagnostics.py @@ -2,6 +2,7 @@ from unittest.mock import AsyncMock +import pytest from syrupy import SnapshotAssertion from syrupy.filters import props @@ -14,6 +15,7 @@ from tests.components.diagnostics import get_diagnostics_for_config_entry from tests.typing import ClientSessionGenerator +@pytest.mark.freeze_time("2024-08-27") async def test_entry_diagnostics( hass: HomeAssistant, hass_client: ClientSessionGenerator, diff --git a/tests/components/nice_go/test_event.py b/tests/components/nice_go/test_event.py index 0038b2882ad..1c1b70532f4 100644 --- a/tests/components/nice_go/test_event.py +++ b/tests/components/nice_go/test_event.py @@ -19,10 +19,10 @@ async def test_barrier_obstructed( mock_config_entry: MockConfigEntry, ) -> None: """Test barrier obstructed.""" - mock_nice_go.event = MagicMock() + mock_nice_go.listen = MagicMock() await setup_integration(hass, mock_config_entry, [Platform.EVENT]) - await mock_nice_go.event.call_args_list[2][0][0]({"deviceId": "1"}) + await mock_nice_go.listen.call_args_list[3][0][1]({"deviceId": "1"}) await hass.async_block_till_done() event_state = hass.states.get("event.test_garage_1_barrier_obstructed") diff --git a/tests/components/nice_go/test_init.py b/tests/components/nice_go/test_init.py index 5568a7ea62a..9c9bf28ca7a 100644 --- a/tests/components/nice_go/test_init.py +++ b/tests/components/nice_go/test_init.py @@ -1,7 +1,8 @@ """Test Nice G.O. init.""" +import asyncio from datetime import timedelta -from unittest.mock import AsyncMock, MagicMock +from unittest.mock import AsyncMock, MagicMock, patch from freezegun.api import FrozenDateTimeFactory from nice_go import ApiError, AuthFailedError, Barrier, BarrierState @@ -10,8 +11,8 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.components.nice_go.const import DOMAIN from homeassistant.config_entries import ConfigEntryState -from homeassistant.const import Platform -from homeassistant.core import HomeAssistant +from homeassistant.const import EVENT_HOMEASSISTANT_STOP, Platform +from homeassistant.core import Event, HomeAssistant, callback from homeassistant.helpers import issue_registry as ir from . import setup_integration @@ -209,11 +210,11 @@ async def test_on_data_none_parsed( ) -> None: """Test on data with None parsed.""" - mock_nice_go.event = MagicMock() + mock_nice_go.listen = MagicMock() await setup_integration(hass, mock_config_entry, [Platform.COVER]) - await mock_nice_go.event.call_args[0][0]( + await mock_nice_go.listen.call_args_list[1][0][1]( { "data": { "devicesStatesUpdateFeed": { @@ -243,18 +244,74 @@ async def test_on_connected( ) -> None: """Test on connected.""" - mock_nice_go.event = MagicMock() + mock_nice_go.listen = MagicMock() await setup_integration(hass, mock_config_entry, [Platform.COVER]) - assert mock_nice_go.event.call_count == 2 + assert mock_nice_go.listen.call_count == 3 mock_nice_go.subscribe = AsyncMock() - await mock_nice_go.event.call_args_list[0][0][0]() + await mock_nice_go.listen.call_args_list[0][0][1]() assert mock_nice_go.subscribe.call_count == 1 +async def test_on_connection_lost( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test on connection lost.""" + + mock_nice_go.listen = MagicMock() + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + assert mock_nice_go.listen.call_count == 3 + + with patch("homeassistant.components.nice_go.coordinator.RECONNECT_DELAY", 0): + await mock_nice_go.listen.call_args_list[2][0][1]( + {"exception": ValueError("test")} + ) + + assert hass.states.get("cover.test_garage_1").state == "unavailable" + + # Now fire connected + + mock_nice_go.subscribe = AsyncMock() + + await mock_nice_go.listen.call_args_list[0][0][1]() + + assert mock_nice_go.subscribe.call_count == 1 + + assert hass.states.get("cover.test_garage_1").state == "closed" + + +async def test_on_connection_lost_reconnect( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test on connection lost with reconnect.""" + + mock_nice_go.listen = MagicMock() + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + assert mock_nice_go.listen.call_count == 3 + + assert hass.states.get("cover.test_garage_1").state == "closed" + + with patch("homeassistant.components.nice_go.coordinator.RECONNECT_DELAY", 0): + await mock_nice_go.listen.call_args_list[2][0][1]( + {"exception": ValueError("test")} + ) + + assert hass.states.get("cover.test_garage_1").state == "unavailable" + + async def test_no_connection_state( hass: HomeAssistant, mock_nice_go: AsyncMock, @@ -262,13 +319,13 @@ async def test_no_connection_state( ) -> None: """Test parsing barrier with no connection state.""" - mock_nice_go.event = MagicMock() + mock_nice_go.listen = MagicMock() await setup_integration(hass, mock_config_entry, [Platform.COVER]) - assert mock_nice_go.event.call_count == 2 + assert mock_nice_go.listen.call_count == 3 - await mock_nice_go.event.call_args[0][0]( + await mock_nice_go.listen.call_args_list[1][0][1]( { "data": { "devicesStatesUpdateFeed": { @@ -286,3 +343,65 @@ async def test_no_connection_state( ) assert hass.states.get("cover.test_garage_1").state == "unavailable" + + +async def test_connection_attempts_exhausted( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test connection attempts exhausted.""" + + mock_nice_go.connect.side_effect = ApiError + + with ( + patch("homeassistant.components.nice_go.coordinator.RECONNECT_ATTEMPTS", 1), + patch("homeassistant.components.nice_go.coordinator.RECONNECT_DELAY", 0), + ): + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + assert "API error" in caplog.text + assert "Error requesting Nice G.O. data" in caplog.text + + +async def test_reconnect_hass_stopping( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test reconnect with hass stopping.""" + + mock_nice_go.listen = MagicMock() + mock_nice_go.connect.side_effect = ApiError + + wait_for_hass = asyncio.Event() + + @callback + def _async_ha_stop(event: Event) -> None: + """Stop reconnecting if hass is stopping.""" + wait_for_hass.set() + + hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _async_ha_stop) + + with ( + patch("homeassistant.components.nice_go.coordinator.RECONNECT_DELAY", 0.1), + patch("homeassistant.components.nice_go.coordinator.RECONNECT_ATTEMPTS", 20), + ): + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + await hass.async_block_till_done() + hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) + await wait_for_hass.wait() + await hass.async_block_till_done(wait_background_tasks=True) + + assert mock_nice_go.connect.call_count < 10 + + assert len(hass._background_tasks) == 0 + + assert "API error" in caplog.text + assert ( + "Failed to connect to the websocket, reconnect attempts exhausted" + not in caplog.text + ) From b9bd8f6b34e82343ce66e26bb5f583c0f68e0b61 Mon Sep 17 00:00:00 2001 From: mvn23 Date: Fri, 6 Sep 2024 18:30:04 +0200 Subject: [PATCH 1546/1635] Add switch platform to opentherm_gw (#125410) * WIP * * Add switch platform * Add tests for switches * Remove unnecessary block_till_done-s * Test that entities get added in a disabled state separately * Convert to parametrized test * Use fixture to add entities enabled. --- .../components/opentherm_gw/__init__.py | 8 +- .../components/opentherm_gw/strings.json | 5 + .../components/opentherm_gw/switch.py | 79 +++++++++++++ tests/components/opentherm_gw/test_switch.py | 111 ++++++++++++++++++ 4 files changed, 202 insertions(+), 1 deletion(-) create mode 100644 homeassistant/components/opentherm_gw/switch.py create mode 100644 tests/components/opentherm_gw/test_switch.py diff --git a/homeassistant/components/opentherm_gw/__init__.py b/homeassistant/components/opentherm_gw/__init__.py index dfce2206df7..c7a52e3d5d3 100644 --- a/homeassistant/components/opentherm_gw/__init__.py +++ b/homeassistant/components/opentherm_gw/__init__.py @@ -90,7 +90,13 @@ CONFIG_SCHEMA = vol.Schema( extra=vol.ALLOW_EXTRA, ) -PLATFORMS = [Platform.BINARY_SENSOR, Platform.BUTTON, Platform.CLIMATE, Platform.SENSOR] +PLATFORMS = [ + Platform.BINARY_SENSOR, + Platform.BUTTON, + Platform.CLIMATE, + Platform.SENSOR, + Platform.SWITCH, +] async def options_updated(hass: HomeAssistant, entry: ConfigEntry) -> None: diff --git a/homeassistant/components/opentherm_gw/strings.json b/homeassistant/components/opentherm_gw/strings.json index 006ccd1909b..b23e1eb7687 100644 --- a/homeassistant/components/opentherm_gw/strings.json +++ b/homeassistant/components/opentherm_gw/strings.json @@ -309,6 +309,11 @@ "outside_temperature": { "name": "Outside temperature" } + }, + "switch": { + "central_heating_override_n": { + "name": "Force central heating {circuit_number} on" + } } }, "options": { diff --git a/homeassistant/components/opentherm_gw/switch.py b/homeassistant/components/opentherm_gw/switch.py new file mode 100644 index 00000000000..6076634b160 --- /dev/null +++ b/homeassistant/components/opentherm_gw/switch.py @@ -0,0 +1,79 @@ +"""Support for OpenTherm Gateway switches.""" + +from collections.abc import Awaitable, Callable +from dataclasses import dataclass +from typing import Any + +from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_ID, EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import OpenThermGatewayHub +from .const import DATA_GATEWAYS, DATA_OPENTHERM_GW, GATEWAY_DEVICE_DESCRIPTION +from .entity import OpenThermEntity, OpenThermEntityDescription + + +@dataclass(frozen=True, kw_only=True) +class OpenThermSwitchEntityDescription( + OpenThermEntityDescription, SwitchEntityDescription +): + """Describes opentherm_gw switch entity.""" + + turn_off_action: Callable[[OpenThermGatewayHub], Awaitable[int | None]] + turn_on_action: Callable[[OpenThermGatewayHub], Awaitable[int | None]] + + +SWITCH_DESCRIPTIONS: tuple[OpenThermSwitchEntityDescription, ...] = ( + OpenThermSwitchEntityDescription( + key="central_heating_1_override", + translation_key="central_heating_override_n", + translation_placeholders={"circuit_number": "1"}, + device_description=GATEWAY_DEVICE_DESCRIPTION, + turn_off_action=lambda hub: hub.gateway.set_ch_enable_bit(0), + turn_on_action=lambda hub: hub.gateway.set_ch_enable_bit(1), + ), + OpenThermSwitchEntityDescription( + key="central_heating_2_override", + translation_key="central_heating_override_n", + translation_placeholders={"circuit_number": "2"}, + device_description=GATEWAY_DEVICE_DESCRIPTION, + turn_off_action=lambda hub: hub.gateway.set_ch2_enable_bit(0), + turn_on_action=lambda hub: hub.gateway.set_ch2_enable_bit(1), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the OpenTherm Gateway switches.""" + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][config_entry.data[CONF_ID]] + + async_add_entities( + OpenThermSwitch(gw_hub, description) for description in SWITCH_DESCRIPTIONS + ) + + +class OpenThermSwitch(OpenThermEntity, SwitchEntity): + """Represent an OpenTherm Gateway switch.""" + + _attr_assumed_state = True + _attr_entity_category = EntityCategory.CONFIG + _attr_entity_registry_enabled_default = False + entity_description: OpenThermSwitchEntityDescription + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn switch on.""" + value = await self.entity_description.turn_off_action(self._gateway) + self._attr_is_on = bool(value) if value is not None else None + self.async_write_ha_state() + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn switch on.""" + value = await self.entity_description.turn_on_action(self._gateway) + self._attr_is_on = bool(value) if value is not None else None + self.async_write_ha_state() diff --git a/tests/components/opentherm_gw/test_switch.py b/tests/components/opentherm_gw/test_switch.py new file mode 100644 index 00000000000..5eb8e906892 --- /dev/null +++ b/tests/components/opentherm_gw/test_switch.py @@ -0,0 +1,111 @@ +"""Test opentherm_gw switches.""" + +from unittest.mock import AsyncMock, MagicMock, call + +import pytest + +from homeassistant.components.opentherm_gw import DOMAIN as OPENTHERM_DOMAIN +from homeassistant.components.opentherm_gw.const import OpenThermDeviceIdentifier +from homeassistant.components.switch import ( + DOMAIN as SWITCH_DOMAIN, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) +from homeassistant.const import ( + ATTR_ENTITY_ID, + CONF_ID, + STATE_OFF, + STATE_ON, + STATE_UNKNOWN, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry + + +@pytest.mark.parametrize( + "entity_key", ["central_heating_1_override", "central_heating_2_override"] +) +async def test_switch_added_disabled( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, + mock_pyotgw: MagicMock, + entity_key: str, +) -> None: + """Test switch gets added in disabled state.""" + + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert ( + switch_entity_id := entity_registry.async_get_entity_id( + SWITCH_DOMAIN, + OPENTHERM_DOMAIN, + f"{mock_config_entry.data[CONF_ID]}-{OpenThermDeviceIdentifier.GATEWAY}-{entity_key}", + ) + ) is not None + + assert (entity_entry := entity_registry.async_get(switch_entity_id)) is not None + assert entity_entry.disabled_by == er.RegistryEntryDisabler.INTEGRATION + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize( + ("entity_key", "target_func"), + [ + ("central_heating_1_override", "set_ch_enable_bit"), + ("central_heating_2_override", "set_ch2_enable_bit"), + ], +) +async def test_ch_override_switch( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, + mock_pyotgw: MagicMock, + entity_key: str, + target_func: str, +) -> None: + """Test central heating override switch.""" + + setattr(mock_pyotgw.return_value, target_func, AsyncMock(side_effect=[0, 1])) + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert ( + switch_entity_id := entity_registry.async_get_entity_id( + SWITCH_DOMAIN, + OPENTHERM_DOMAIN, + f"{mock_config_entry.data[CONF_ID]}-{OpenThermDeviceIdentifier.GATEWAY}-{entity_key}", + ) + ) is not None + assert hass.states.get(switch_entity_id).state == STATE_UNKNOWN + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + { + ATTR_ENTITY_ID: switch_entity_id, + }, + blocking=True, + ) + assert hass.states.get(switch_entity_id).state == STATE_OFF + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: switch_entity_id, + }, + blocking=True, + ) + assert hass.states.get(switch_entity_id).state == STATE_ON + + mock_func = getattr(mock_pyotgw.return_value, target_func) + assert mock_func.await_count == 2 + mock_func.assert_has_awaits([call(0), call(1)]) From 457e66527a7bd8f5936ddff88ceaeb1869a3cfc6 Mon Sep 17 00:00:00 2001 From: Hessel Date: Fri, 6 Sep 2024 20:40:47 +0200 Subject: [PATCH 1547/1635] Add model ID to WallboxEntity (#125434) * chore: Update WallboxEntity model ID to use CHARGER_PART_NUMBER_KEY The WallboxEntity model ID is updated to use the CHARGER_PART_NUMBER_KEY value from the coordinator data. This change ensures consistency and accuracy in identifying the model of the Wallbox entity. * Update WallboxEntity model ID to use CHARGER_PART_NUMBER_KEY * chore: Update WallboxEntity model ID to use CHARGER_PART_NUMBER_KEY * remove obsolete key from test --- homeassistant/components/wallbox/entity.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/wallbox/entity.py b/homeassistant/components/wallbox/entity.py index 489e81ed6b0..3fe1865af4a 100644 --- a/homeassistant/components/wallbox/entity.py +++ b/homeassistant/components/wallbox/entity.py @@ -34,7 +34,8 @@ class WallboxEntity(CoordinatorEntity[WallboxCoordinator]): }, name=f"Wallbox {self.coordinator.data[CHARGER_NAME_KEY]}", manufacturer="Wallbox", - model=self.coordinator.data[CHARGER_DATA_KEY][CHARGER_PART_NUMBER_KEY], + model=self.coordinator.data[CHARGER_NAME_KEY].split(" SN")[0], + model_id=self.coordinator.data[CHARGER_DATA_KEY][CHARGER_PART_NUMBER_KEY], sw_version=self.coordinator.data[CHARGER_DATA_KEY][CHARGER_SOFTWARE_KEY][ CHARGER_CURRENT_VERSION_KEY ], From ce28d8a92c3bf14be6e9447d93aa40b8716c30ba Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 6 Sep 2024 19:35:57 -0500 Subject: [PATCH 1548/1635] Bump yalexs to 8.6.4 (#125442) adds a debounce to the updates to ensure we do not request the activities api too often if the websocket sends rapid updates fixes #125277 --- homeassistant/components/august/manifest.json | 2 +- homeassistant/components/yale/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/august/manifest.json b/homeassistant/components/august/manifest.json index 6635a95f1cf..e2c35fc155f 100644 --- a/homeassistant/components/august/manifest.json +++ b/homeassistant/components/august/manifest.json @@ -24,5 +24,5 @@ "documentation": "https://www.home-assistant.io/integrations/august", "iot_class": "cloud_push", "loggers": ["pubnub", "yalexs"], - "requirements": ["yalexs==8.6.3", "yalexs-ble==2.4.3"] + "requirements": ["yalexs==8.6.4", "yalexs-ble==2.4.3"] } diff --git a/homeassistant/components/yale/manifest.json b/homeassistant/components/yale/manifest.json index fc93d259891..8b8095a0863 100644 --- a/homeassistant/components/yale/manifest.json +++ b/homeassistant/components/yale/manifest.json @@ -13,5 +13,5 @@ "documentation": "https://www.home-assistant.io/integrations/yale", "iot_class": "cloud_push", "loggers": ["socketio", "engineio", "yalexs"], - "requirements": ["yalexs==8.6.3", "yalexs-ble==2.4.3"] + "requirements": ["yalexs==8.6.4", "yalexs-ble==2.4.3"] } diff --git a/requirements_all.txt b/requirements_all.txt index be2ddf6a97c..ee9b9b4bedf 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3003,7 +3003,7 @@ yalexs-ble==2.4.3 # homeassistant.components.august # homeassistant.components.yale -yalexs==8.6.3 +yalexs==8.6.4 # homeassistant.components.yeelight yeelight==0.7.14 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index b4472ca9144..abe84df9270 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2383,7 +2383,7 @@ yalexs-ble==2.4.3 # homeassistant.components.august # homeassistant.components.yale -yalexs==8.6.3 +yalexs==8.6.4 # homeassistant.components.yeelight yeelight==0.7.14 From b8c3a44d81174b4ae1f03628bb44ecec77d580bf Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 7 Sep 2024 00:36:34 -0500 Subject: [PATCH 1549/1635] Bump yarl to 1.10.0 (#125446) changelog: https://github.com/aio-libs/yarl/compare/v1.9.11...v1.10.0 --- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index c8fc265cee8..0a4ead9e5b1 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -62,7 +62,7 @@ urllib3>=1.26.5,<2 voluptuous-openapi==0.0.5 voluptuous-serialize==2.6.0 voluptuous==0.15.2 -yarl==1.9.11 +yarl==1.10.0 zeroconf==0.133.0 # Constrain pycryptodome to avoid vulnerability diff --git a/pyproject.toml b/pyproject.toml index a8c43ada99f..358abd934be 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -69,7 +69,7 @@ dependencies = [ "voluptuous==0.15.2", "voluptuous-serialize==2.6.0", "voluptuous-openapi==0.0.5", - "yarl==1.9.11", + "yarl==1.10.0", ] [project.urls] diff --git a/requirements.txt b/requirements.txt index 8d5c01b5c27..ba7b89bd9e9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -41,4 +41,4 @@ urllib3>=1.26.5,<2 voluptuous==0.15.2 voluptuous-serialize==2.6.0 voluptuous-openapi==0.0.5 -yarl==1.9.11 +yarl==1.10.0 From cbd884d54a04bd73563de4501295e262c2ec287c Mon Sep 17 00:00:00 2001 From: Marcel van der Veldt Date: Sat, 7 Sep 2024 07:55:08 +0200 Subject: [PATCH 1550/1635] Add discovery schemas for Matter 1.3 power/energy sensors (#125403) * Add missing discovery schemas for (Matter 1.3) Power/Energy measurements * Prevent discovery of custom cluster if 1.3 cluster present * add tests * Use f-strings --------- Co-authored-by: Martin Hjelmare --- homeassistant/components/matter/sensor.py | 83 ++++ .../nodes/eve-energy-plug-patched.json | 382 ++++++++++++++++++ tests/components/matter/test_sensor.py | 81 +++- 3 files changed, 540 insertions(+), 6 deletions(-) create mode 100644 tests/components/matter/fixtures/nodes/eve-energy-plug-patched.json diff --git a/homeassistant/components/matter/sensor.py b/homeassistant/components/matter/sensor.py index 5d4ad900d8e..dd8467e24c9 100644 --- a/homeassistant/components/matter/sensor.py +++ b/homeassistant/components/matter/sensor.py @@ -175,6 +175,7 @@ DISCOVERY_SCHEMAS = [ ), entity_class=MatterSensor, required_attributes=(EveCluster.Attributes.Watt,), + absent_attributes=(clusters.ElectricalPowerMeasurement.Attributes.ActivePower,), ), MatterDiscoverySchema( platform=Platform.SENSOR, @@ -188,6 +189,7 @@ DISCOVERY_SCHEMAS = [ ), entity_class=MatterSensor, required_attributes=(EveCluster.Attributes.Voltage,), + absent_attributes=(clusters.ElectricalPowerMeasurement.Attributes.Voltage,), ), MatterDiscoverySchema( platform=Platform.SENSOR, @@ -201,6 +203,9 @@ DISCOVERY_SCHEMAS = [ ), entity_class=MatterSensor, required_attributes=(EveCluster.Attributes.WattAccumulated,), + absent_attributes=( + clusters.ElectricalEnergyMeasurement.Attributes.CumulativeEnergyImported, + ), ), MatterDiscoverySchema( platform=Platform.SENSOR, @@ -214,6 +219,9 @@ DISCOVERY_SCHEMAS = [ ), entity_class=MatterSensor, required_attributes=(EveCluster.Attributes.Current,), + absent_attributes=( + clusters.ElectricalPowerMeasurement.Attributes.ActiveCurrent, + ), ), MatterDiscoverySchema( platform=Platform.SENSOR, @@ -377,6 +385,7 @@ DISCOVERY_SCHEMAS = [ required_attributes=( ThirdRealityMeteringCluster.Attributes.InstantaneousDemand, ), + absent_attributes=(clusters.ElectricalPowerMeasurement.Attributes.ActivePower,), ), MatterDiscoverySchema( platform=Platform.SENSOR, @@ -393,6 +402,9 @@ DISCOVERY_SCHEMAS = [ required_attributes=( ThirdRealityMeteringCluster.Attributes.CurrentSummationDelivered, ), + absent_attributes=( + clusters.ElectricalEnergyMeasurement.Attributes.CumulativeEnergyImported, + ), ), MatterDiscoverySchema( platform=Platform.SENSOR, @@ -407,6 +419,7 @@ DISCOVERY_SCHEMAS = [ ), entity_class=MatterSensor, required_attributes=(NeoCluster.Attributes.Watt,), + absent_attributes=(clusters.ElectricalPowerMeasurement.Attributes.ActivePower,), ), MatterDiscoverySchema( platform=Platform.SENSOR, @@ -420,6 +433,9 @@ DISCOVERY_SCHEMAS = [ ), entity_class=MatterSensor, required_attributes=(NeoCluster.Attributes.WattAccumulated,), + absent_attributes=( + clusters.ElectricalEnergyMeasurement.Attributes.CumulativeEnergyImported, + ), ), MatterDiscoverySchema( platform=Platform.SENSOR, @@ -434,6 +450,7 @@ DISCOVERY_SCHEMAS = [ ), entity_class=MatterSensor, required_attributes=(NeoCluster.Attributes.Voltage,), + absent_attributes=(clusters.ElectricalPowerMeasurement.Attributes.Voltage,), ), MatterDiscoverySchema( platform=Platform.SENSOR, @@ -447,6 +464,9 @@ DISCOVERY_SCHEMAS = [ ), entity_class=MatterSensor, required_attributes=(NeoCluster.Attributes.Current,), + absent_attributes=( + clusters.ElectricalPowerMeasurement.Attributes.ActiveCurrent, + ), ), MatterDiscoverySchema( platform=Platform.SENSOR, @@ -462,4 +482,67 @@ DISCOVERY_SCHEMAS = [ required_attributes=(clusters.Switch.Attributes.CurrentPosition,), allow_multi=True, # also used for event entity ), + MatterDiscoverySchema( + platform=Platform.SENSOR, + entity_description=MatterSensorEntityDescription( + key="ElectricalPowerMeasurementWatt", + device_class=SensorDeviceClass.POWER, + entity_category=EntityCategory.DIAGNOSTIC, + native_unit_of_measurement=UnitOfPower.WATT, + suggested_display_precision=2, + state_class=SensorStateClass.MEASUREMENT, + measurement_to_ha=lambda x: x / 1000, + ), + entity_class=MatterSensor, + required_attributes=( + clusters.ElectricalPowerMeasurement.Attributes.ActivePower, + ), + ), + MatterDiscoverySchema( + platform=Platform.SENSOR, + entity_description=MatterSensorEntityDescription( + key="ElectricalPowerMeasurementVoltage", + device_class=SensorDeviceClass.VOLTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + suggested_display_precision=0, + state_class=SensorStateClass.MEASUREMENT, + measurement_to_ha=lambda x: x / 1000, + ), + entity_class=MatterSensor, + required_attributes=(clusters.ElectricalPowerMeasurement.Attributes.Voltage,), + ), + MatterDiscoverySchema( + platform=Platform.SENSOR, + entity_description=MatterSensorEntityDescription( + key="ElectricalPowerMeasurementActiveCurrent", + device_class=SensorDeviceClass.CURRENT, + entity_category=EntityCategory.DIAGNOSTIC, + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + suggested_display_precision=2, + state_class=SensorStateClass.MEASUREMENT, + measurement_to_ha=lambda x: x / 1000, + ), + entity_class=MatterSensor, + required_attributes=( + clusters.ElectricalPowerMeasurement.Attributes.ActiveCurrent, + ), + ), + MatterDiscoverySchema( + platform=Platform.SENSOR, + entity_description=MatterSensorEntityDescription( + key="ElectricalEnergyMeasurementCumulativeEnergyImported", + device_class=SensorDeviceClass.ENERGY, + entity_category=EntityCategory.DIAGNOSTIC, + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + suggested_display_precision=3, + state_class=SensorStateClass.TOTAL_INCREASING, + # id 0 of the EnergyMeasurementStruct is the cumulative energy (in mWh) + measurement_to_ha=lambda x: x.energy / 1000000, + ), + entity_class=MatterSensor, + required_attributes=( + clusters.ElectricalEnergyMeasurement.Attributes.CumulativeEnergyImported, + ), + ), ] diff --git a/tests/components/matter/fixtures/nodes/eve-energy-plug-patched.json b/tests/components/matter/fixtures/nodes/eve-energy-plug-patched.json new file mode 100644 index 00000000000..6b449643e8e --- /dev/null +++ b/tests/components/matter/fixtures/nodes/eve-energy-plug-patched.json @@ -0,0 +1,382 @@ +{ + "node_id": 183, + "date_commissioned": "2023-11-30T14:39:37.020026", + "last_interview": "2023-11-30T14:39:37.020029", + "interview_version": 5, + "available": true, + "is_bridge": false, + "attributes": { + "0/29/0": [ + { + "0": 22, + "1": 1 + } + ], + "0/29/1": [29, 31, 40, 42, 48, 49, 51, 53, 60, 62, 63], + "0/29/2": [41], + "0/29/3": [1], + "0/29/65532": 0, + "0/29/65533": 1, + "0/29/65528": [], + "0/29/65529": [], + "0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "0/31/0": [ + { + "254": 1 + }, + { + "254": 2 + }, + { + "1": 5, + "2": 2, + "3": [112233], + "4": null, + "254": 5 + } + ], + "0/31/1": [], + "0/31/2": 4, + "0/31/3": 3, + "0/31/4": 3, + "0/31/65532": 0, + "0/31/65533": 1, + "0/31/65528": [], + "0/31/65529": [], + "0/31/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], + "0/40/0": 1, + "0/40/1": "Eve Systems", + "0/40/2": 4874, + "0/40/3": "Eve Energy Plug Patched", + "0/40/4": 80, + "0/40/5": "", + "0/40/6": "XX", + "0/40/7": 1, + "0/40/8": "1.3", + "0/40/9": 6650, + "0/40/10": "3.2.1", + "0/40/15": "RV44L221A00081", + "0/40/18": "26E822F90561D17C42", + "0/40/19": { + "0": 3, + "1": 3 + }, + "0/40/65532": 0, + "0/40/65533": 1, + "0/40/65528": [], + "0/40/65529": [], + "0/40/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 15, 18, 19, 65528, 65529, 65531, 65532, + 65533 + ], + "0/42/0": [ + { + "1": 2312386028615903905, + "2": 0, + "254": 1 + } + ], + "0/42/1": true, + "0/42/2": 1, + "0/42/3": null, + "0/42/65532": 0, + "0/42/65533": 1, + "0/42/65528": [], + "0/42/65529": [0], + "0/42/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "0/48/0": 0, + "0/48/1": { + "0": 60, + "1": 900 + }, + "0/48/2": 0, + "0/48/3": 0, + "0/48/4": true, + "0/48/65532": 0, + "0/48/65533": 1, + "0/48/65528": [1, 3, 5], + "0/48/65529": [0, 2, 4], + "0/48/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533], + "0/49/0": 1, + "0/49/1": [ + { + "0": "cfUKbvsdfsBjT+0=", + "1": true + } + ], + "0/49/2": 10, + "0/49/3": 20, + "0/49/4": true, + "0/49/5": 0, + "0/49/6": "cfUKbvBjdsffwT+0=", + "0/49/7": null, + "0/49/65532": 2, + "0/49/65533": 1, + "0/49/65528": [1, 5, 7], + "0/49/65529": [0, 3, 4, 6, 8], + "0/49/65531": [0, 1, 2, 3, 4, 5, 6, 7, 65528, 65529, 65531, 65532, 65533], + "0/51/0": [], + "0/51/1": 95, + "0/51/2": 268574, + "0/51/3": 4406, + "0/51/5": [], + "0/51/6": [], + "0/51/7": [], + "0/51/8": false, + "0/51/65532": 0, + "0/51/65533": 1, + "0/51/65528": [], + "0/51/65529": [0], + "0/51/65531": [0, 1, 2, 3, 5, 6, 7, 8, 65528, 65529, 65531, 65532, 65533], + "0/53/0": 25, + "0/53/1": 5, + "0/53/2": "MyHome23", + "0/53/3": 14707, + "0/53/4": 8211480967175688173, + "0/53/5": "aabbccdd", + "0/53/6": 0, + "0/53/7": [], + "0/53/8": [], + "0/53/9": 1828774034, + "0/53/10": 68, + "0/53/11": 237, + "0/53/12": 170, + "0/53/13": 23, + "0/53/14": 2, + "0/53/15": 1, + "0/53/16": 2, + "0/53/17": 0, + "0/53/18": 0, + "0/53/19": 2, + "0/53/20": 0, + "0/53/21": 0, + "0/53/22": 293884, + "0/53/23": 278934, + "0/53/24": 14950, + "0/53/25": 278894, + "0/53/26": 278468, + "0/53/27": 14990, + "0/53/28": 293844, + "0/53/29": 0, + "0/53/30": 40, + "0/53/31": 0, + "0/53/32": 0, + "0/53/33": 65244, + "0/53/34": 426, + "0/53/35": 0, + "0/53/36": 87, + "0/53/37": 0, + "0/53/38": 0, + "0/53/39": 6687540, + "0/53/40": 142626, + "0/53/41": 106835, + "0/53/42": 246171, + "0/53/43": 0, + "0/53/44": 541, + "0/53/45": 40, + "0/53/46": 0, + "0/53/47": 0, + "0/53/48": 6360718, + "0/53/49": 2141, + "0/53/50": 35259, + "0/53/51": 4374, + "0/53/52": 0, + "0/53/53": 568, + "0/53/54": 18599, + "0/53/55": 19143, + "0/53/59": { + "0": 672, + "1": 8335 + }, + "0/53/60": "AB//wA==", + "0/53/61": { + "0": true, + "1": false, + "2": true, + "3": true, + "4": true, + "5": true, + "6": false, + "7": true, + "8": true, + "9": true, + "10": true, + "11": true + }, + "0/53/62": [0, 0, 0, 0], + "0/53/65532": 15, + "0/53/65533": 1, + "0/53/65528": [], + "0/53/65529": [0], + "0/53/65531": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, + 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, + 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 59, + 60, 61, 62, 65528, 65529, 65531, 65532, 65533 + ], + "0/60/0": 0, + "0/60/1": null, + "0/60/2": null, + "0/60/65532": 0, + "0/60/65533": 1, + "0/60/65528": [], + "0/60/65529": [0, 1, 2], + "0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533], + "0/62/0": [], + "0/62/1": [], + "0/62/2": 5, + "0/62/3": 3, + "0/62/4": [ + "FTABAQAkAgE3AycUxofpv3kE1HwkFQEYJgS2Ty8rJgU2gxAtNwYnFMaH6b95BNR8JBUBGCQHASQIATAJQQSG0eCLvAjSHcSkZEo029SymN58wmxVcA645EXuFg6KwojGRyZsqWVtuMAYAB8TaPA9NEFsNvZZbvBR9XjrZhyKNwo1ASkBGCQCYDAEFNnFRJ+9qQIJtsM+LRdMdmCY3bQ4MAUU2cVEn72pAgm2wz4tF0x2YJjdtDgYMAtAFDv6Ouh7ugAGLiCjBQaEXCIAe0AkaaN8dBPskCZXOODjuZ1DCr4/f5IYg0rN2zFDUDTvG3GCxoI1+A7BvSjiNRg=", + "FTABAQAkAgE3AycUjuqR8vTQCmEkFQIYJgTFTy8rJgVFgxAtNwYnFI7qkfL00AphJBUCGCQHASQIATAJQQS5ZOLouMEkPsc/PYweZwUUFFWHWPR9nQVGsBl1VMWtm7CodpPAh4o79bZM9XU4T1wPVCvIzgGfuzIvsuwT7gHINwo1ASkBGCQCYDAEFKEEplpzAvCzsc5ga6CFmqmsv5onMAUUoQSmWnMC8LOxzmBroIWaqay/micYMAtAYkkA8OZFIGpxBEYYT+3A7Okba4WOq4NtwctIIZvCM48VU8pxQNjVvHMcJWPOP1Wh2Bw1VH7/Sg9lt9DL4DAwjBg=", + "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQUARgkBwEkCAEwCUEECDlp5HtG4UpmG6QLEwaCUJ3TR0qWHEarwFuN7JkKUrPmQ3Zi3Nq/TFayJYQRvez268whgWhBhQudIm84xNwPXjcKNQEpARgkAmAwBBTJ3+WZAQkWgZboUpiyZL3FV8R8UzAFFMnf5ZkBCRaBluhSmLJkvcVXxHxTGDALQO9QSAdvJkM6b/wIc07MCw1ma46lTyGYG8nvpn0ICI73nuD3QeaWwGIQTkVGEpzF+TuDK7gtTz7YUrR+PSnvMk8Y" + ], + "0/62/5": 5, + "0/62/65532": 0, + "0/62/65533": 1, + "0/62/65528": [1, 3, 5, 8], + "0/62/65529": [0, 2, 4, 6, 7, 9, 10, 11], + "0/62/65531": [0, 1, 2, 3, 4, 5, 65528, 65529, 65531, 65532, 65533], + "0/63/0": [], + "0/63/1": [], + "0/63/2": 3, + "0/63/3": 3, + "0/63/65532": 0, + "0/63/65533": 1, + "0/63/65528": [2, 5], + "0/63/65529": [0, 1, 3, 4], + "0/63/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "1/3/0": 0, + "1/3/1": 2, + "1/3/65532": 0, + "1/3/65533": 4, + "1/3/65528": [], + "1/3/65529": [0], + "1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533], + "1/4/0": 128, + "1/4/65532": 1, + "1/4/65533": 4, + "1/4/65528": [0, 1, 2, 3], + "1/4/65529": [0, 1, 2, 3, 4, 5], + "1/4/65531": [0, 65528, 65529, 65531, 65532, 65533], + "1/6/0": false, + "1/6/16384": true, + "1/6/16385": 0, + "1/6/16386": 0, + "1/6/16387": null, + "1/6/65532": 1, + "1/6/65533": 4, + "1/6/65528": [], + "1/6/65529": [0, 1, 2, 64, 65, 66], + "1/6/65531": [ + 0, 16384, 16385, 16386, 16387, 65528, 65529, 65531, 65532, 65533 + ], + "1/29/0": [ + { + "0": 266, + "1": 1 + } + ], + "1/29/1": [3, 4, 6, 29, 319486977], + "1/29/2": [], + "1/29/3": [], + "1/29/65532": 0, + "1/29/65533": 1, + "1/29/65528": [], + "1/29/65529": [], + "1/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533], + "1/319486977/319422464": "AAFQCwIAAAMC+xkEDFJWNDRMMUEwMDA4MZwBAP8EAQIA1PkBAWABZNAEAAAAAEUFBQAAAABGCQUAAAAOAABCBkkGBQwIEIABRBEFFAAFAzwAAAAAAAAAAAAAAEcRBSoh/CGWImgjeAAAADwAAABIBgUAAAAAAEoGBQAAAAAA", + "1/319486977/319422466": "BEZiAQAAAAAAAAAABgsCDAINAgcCDgEBAn4PABAAWgAAs8c+AQEA", + "1/319486977/319422467": "EgtaAAB74T4BDwAANwkAAAAA", + "1/319486977/319422471": 0, + "1/319486977/319422472": 238.8000030517578, + "1/319486977/319422473": 0.0, + "1/319486977/319422474": 0.0, + "1/319486977/319422475": 0.2200000286102295, + "1/319486977/319422476": 0, + "1/319486977/319422478": 0, + "1/319486977/319422481": false, + "1/319486977/319422482": 54272, + "1/319486977/65533": 1, + "1/319486977/65528": [], + "1/319486977/65529": [], + "1/319486977/65531": [ + 65528, 65529, 65531, 319422464, 319422465, 319422466, 319422467, + 319422468, 319422469, 319422471, 319422472, 319422473, 319422474, + 319422475, 319422476, 319422478, 319422481, 319422482, 65533 + ], + "1/144/0": 2, + "1/144/1": 3, + "1/144/2": [ + { + "0": 1, + "1": true, + "2": 0, + "3": 100, + "4": [ + { + "0": 0, + "1": 4611686018427387904 + } + ] + }, + { + "0": 2, + "1": true, + "2": 0, + "3": 100, + "4": [ + { + "0": 0, + "1": 4611686018427387904 + } + ] + }, + { + "0": 5, + "1": true, + "2": 0, + "3": 100, + "4": [ + { + "0": 0, + "1": 4611686018427387904 + } + ] + } + ], + "1/144/4": 220000, + "1/144/5": 2000, + "1/144/8": 550000, + "1/144/65533": 1, + "1/144/65532": 2, + "1/144/65531": [0, 1, 2, 4, 5, 8, 65528, 65529, 65530, 65531, 65532, 65533], + "1/144/65530": [], + "1/144/65529": [], + "1/144/65528": [], + "1/145/0": { + "0": 14, + "1": true, + "2": 0, + "3": 0, + "4": [ + { + "0": 0, + "1": 4611686018427387904 + } + ] + }, + "1/145/65533": 1, + "1/145/65532": 7, + "1/145/65531": [0, 1, 2, 65528, 65529, 65530, 65531, 65532, 65533], + "1/145/65530": [0], + "1/145/65529": [], + "1/145/65528": [], + "1/145/1": { + "0": 2500 + }, + "1/145/2": null + }, + "attribute_subscriptions": [], + "last_subscription_attempt": 0 +} diff --git a/tests/components/matter/test_sensor.py b/tests/components/matter/test_sensor.py index 2c9bfae94ce..17cff38787c 100644 --- a/tests/components/matter/test_sensor.py +++ b/tests/components/matter/test_sensor.py @@ -74,6 +74,16 @@ async def eve_energy_plug_node_fixture( ) +@pytest.fixture(name="eve_energy_plug_patched_node") +async def eve_energy_plug_patched_node_fixture( + hass: HomeAssistant, matter_client: MagicMock +) -> MatterNode: + """Fixture for a Eve Energy Plug node (patched to include Matter 1.3 energy clusters).""" + return await setup_integration_with_node_fixture( + hass, "eve-energy-plug-patched", matter_client + ) + + @pytest.fixture(name="air_quality_sensor_node") async def air_quality_sensor_node_fixture( hass: HomeAssistant, matter_client: MagicMock @@ -243,14 +253,14 @@ async def test_battery_sensor( # This tests needs to be adjusted to remove lingering tasks @pytest.mark.parametrize("expected_lingering_tasks", [True]) -async def test_eve_energy_sensors( +async def test_energy_sensors_custom_cluster( hass: HomeAssistant, entity_registry: er.EntityRegistry, matter_client: MagicMock, eve_energy_plug_node: MatterNode, ) -> None: - """Test Energy sensors created from Eve Energy custom cluster.""" - # power sensor + """Test Energy sensors created from (Eve) custom cluster (Matter 1.3 energy clusters absent).""" + # power sensor on Eve custom cluster entity_id = "sensor.eve_energy_plug_power" state = hass.states.get(entity_id) assert state @@ -259,7 +269,7 @@ async def test_eve_energy_sensors( assert state.attributes["device_class"] == "power" assert state.attributes["friendly_name"] == "Eve Energy Plug Power" - # voltage sensor + # voltage sensor on Eve custom cluster entity_id = "sensor.eve_energy_plug_voltage" state = hass.states.get(entity_id) assert state @@ -268,7 +278,7 @@ async def test_eve_energy_sensors( assert state.attributes["device_class"] == "voltage" assert state.attributes["friendly_name"] == "Eve Energy Plug Voltage" - # energy sensor + # energy sensor on Eve custom cluster entity_id = "sensor.eve_energy_plug_energy" state = hass.states.get(entity_id) assert state @@ -278,7 +288,7 @@ async def test_eve_energy_sensors( assert state.attributes["friendly_name"] == "Eve Energy Plug Energy" assert state.attributes["state_class"] == "total_increasing" - # current sensor + # current sensor on Eve custom cluster entity_id = "sensor.eve_energy_plug_current" state = hass.states.get(entity_id) assert state @@ -288,6 +298,65 @@ async def test_eve_energy_sensors( assert state.attributes["friendly_name"] == "Eve Energy Plug Current" +# This tests needs to be adjusted to remove lingering tasks +@pytest.mark.parametrize("expected_lingering_tasks", [True]) +async def test_energy_sensors( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + matter_client: MagicMock, + eve_energy_plug_patched_node: MatterNode, +) -> None: + """Test Energy sensors created from official Matter 1.3 energy clusters.""" + # power sensor on Matter 1.3 ElectricalPowermeasurement cluster + entity_id = "sensor.eve_energy_plug_patched_power" + state = hass.states.get(entity_id) + assert state + assert state.state == "550.0" + assert state.attributes["unit_of_measurement"] == "W" + assert state.attributes["device_class"] == "power" + assert state.attributes["friendly_name"] == "Eve Energy Plug Patched Power" + # ensure we do not have a duplicated entity from the custom cluster + state = hass.states.get(f"{entity_id}_1") + assert state is None + + # voltage sensor on Matter 1.3 ElectricalPowermeasurement cluster + entity_id = "sensor.eve_energy_plug_patched_voltage" + state = hass.states.get(entity_id) + assert state + assert state.state == "220.0" + assert state.attributes["unit_of_measurement"] == "V" + assert state.attributes["device_class"] == "voltage" + assert state.attributes["friendly_name"] == "Eve Energy Plug Patched Voltage" + # ensure we do not have a duplicated entity from the custom cluster + state = hass.states.get(f"{entity_id}_1") + assert state is None + + # energy sensor on Matter 1.3 ElectricalEnergymeasurement cluster + entity_id = "sensor.eve_energy_plug_patched_energy" + state = hass.states.get(entity_id) + assert state + assert state.state == "0.0025" + assert state.attributes["unit_of_measurement"] == "kWh" + assert state.attributes["device_class"] == "energy" + assert state.attributes["friendly_name"] == "Eve Energy Plug Patched Energy" + assert state.attributes["state_class"] == "total_increasing" + # ensure we do not have a duplicated entity from the custom cluster + state = hass.states.get(f"{entity_id}_1") + assert state is None + + # current sensor on Matter 1.3 ElectricalPowermeasurement cluster + entity_id = "sensor.eve_energy_plug_patched_current" + state = hass.states.get(entity_id) + assert state + assert state.state == "2.0" + assert state.attributes["unit_of_measurement"] == "A" + assert state.attributes["device_class"] == "current" + assert state.attributes["friendly_name"] == "Eve Energy Plug Patched Current" + # ensure we do not have a duplicated entity from the custom cluster + state = hass.states.get(f"{entity_id}_1") + assert state is None + + # This tests needs to be adjusted to remove lingering tasks @pytest.mark.parametrize("expected_lingering_tasks", [True]) async def test_air_quality_sensor( From 17994ff245cc32dd27a8455e49cf530feb5383b6 Mon Sep 17 00:00:00 2001 From: tronikos Date: Sat, 7 Sep 2024 01:47:27 -0700 Subject: [PATCH 1551/1635] Request one data point in statistics_during_period in Opower (#124480) --- homeassistant/components/opower/coordinator.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/opower/coordinator.py b/homeassistant/components/opower/coordinator.py index 1e00243f657..cd2e28ed638 100644 --- a/homeassistant/components/opower/coordinator.py +++ b/homeassistant/components/opower/coordinator.py @@ -128,19 +128,22 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]): if not cost_reads: _LOGGER.debug("No recent usage/cost data. Skipping update") continue + start = cost_reads[0].start_time + _LOGGER.debug("Getting statistics at: %s", start) stats = await get_instance(self.hass).async_add_executor_job( statistics_during_period, self.hass, - cost_reads[0].start_time, - None, + start, + start + timedelta(seconds=1), {cost_statistic_id, consumption_statistic_id}, - "hour" if account.meter_type == MeterType.ELEC else "day", + "hour", None, {"sum"}, ) cost_sum = cast(float, stats[cost_statistic_id][0]["sum"]) consumption_sum = cast(float, stats[consumption_statistic_id][0]["sum"]) last_stats_time = stats[consumption_statistic_id][0]["start"] + assert last_stats_time == start.timestamp() cost_statistics = [] consumption_statistics = [] From 3e703422655ed227e73b76aec3d4c68d1fddaa11 Mon Sep 17 00:00:00 2001 From: tmenguy Date: Sat, 7 Sep 2024 12:38:59 +0200 Subject: [PATCH 1552/1635] Fix renault plug state (#125421) * Added PlugState 3, that is coming with renault-api 0.2.7, it fixes #124682 HA ticket * Added PlugState 3, that is coming with renault-api 0.2.7, it fixes #124682 HA ticket --- .../components/renault/binary_sensor.py | 16 +++++++++---- homeassistant/components/renault/sensor.py | 8 ++++++- homeassistant/components/renault/strings.json | 1 + tests/components/renault/const.py | 24 ++++++++++++++++--- .../renault/snapshots/test_sensor.ambr | 12 ++++++++++ 5 files changed, 52 insertions(+), 9 deletions(-) diff --git a/homeassistant/components/renault/binary_sensor.py b/homeassistant/components/renault/binary_sensor.py index 2041499b711..98c298761ce 100644 --- a/homeassistant/components/renault/binary_sensor.py +++ b/homeassistant/components/renault/binary_sensor.py @@ -28,7 +28,7 @@ class RenaultBinarySensorEntityDescription( """Class describing Renault binary sensor entities.""" on_key: str - on_value: StateType + on_value: StateType | list[StateType] async def async_setup_entry( @@ -58,6 +58,9 @@ class RenaultBinarySensor( """Return true if the binary sensor is on.""" if (data := self._get_data_attr(self.entity_description.on_key)) is None: return None + + if isinstance(self.entity_description.on_value, list): + return data in self.entity_description.on_value return data == self.entity_description.on_value @@ -68,7 +71,10 @@ BINARY_SENSOR_TYPES: tuple[RenaultBinarySensorEntityDescription, ...] = tuple( coordinator="battery", device_class=BinarySensorDeviceClass.PLUG, on_key="plugStatus", - on_value=PlugState.PLUGGED.value, + on_value=[ + PlugState.PLUGGED.value, + PlugState.PLUGGED_WAITING_FOR_CHARGE.value, + ], ), RenaultBinarySensorEntityDescription( key="charging", @@ -104,13 +110,13 @@ BINARY_SENSOR_TYPES: tuple[RenaultBinarySensorEntityDescription, ...] = tuple( ] + [ RenaultBinarySensorEntityDescription( - key=f"{door.replace(' ','_').lower()}_door_status", + key=f"{door.replace(' ', '_').lower()}_door_status", coordinator="lock_status", # On means open, Off means closed device_class=BinarySensorDeviceClass.DOOR, - on_key=f"doorStatus{door.replace(' ','')}", + on_key=f"doorStatus{door.replace(' ', '')}", on_value="open", - translation_key=f"{door.lower().replace(' ','_')}_door_status", + translation_key=f"{door.lower().replace(' ', '_')}_door_status", ) for door in ("Rear Left", "Rear Right", "Driver", "Passenger") ], diff --git a/homeassistant/components/renault/sensor.py b/homeassistant/components/renault/sensor.py index 5cb4ee333cc..78e64ae9acc 100644 --- a/homeassistant/components/renault/sensor.py +++ b/homeassistant/components/renault/sensor.py @@ -197,7 +197,13 @@ SENSOR_TYPES: tuple[RenaultSensorEntityDescription[Any], ...] = ( translation_key="plug_state", device_class=SensorDeviceClass.ENUM, entity_class=RenaultSensor[KamereonVehicleBatteryStatusData], - options=["unplugged", "plugged", "plug_error", "plug_unknown"], + options=[ + "unplugged", + "plugged", + "plugged_waiting_for_charge", + "plug_error", + "plug_unknown", + ], value_lambda=_get_plug_state_formatted, ), RenaultSensorEntityDescription( diff --git a/homeassistant/components/renault/strings.json b/homeassistant/components/renault/strings.json index 5217b4ff65a..54864387869 100644 --- a/homeassistant/components/renault/strings.json +++ b/homeassistant/components/renault/strings.json @@ -141,6 +141,7 @@ "state": { "unplugged": "Unplugged", "plugged": "Plugged in", + "plugged_waiting_for_charge": "Plugged in, waiting for charge", "plug_error": "Plug error", "plug_unknown": "Plug unknown" } diff --git a/tests/components/renault/const.py b/tests/components/renault/const.py index 2d0263e40de..c552321ef97 100644 --- a/tests/components/renault/const.py +++ b/tests/components/renault/const.py @@ -246,7 +246,13 @@ MOCK_VEHICLES = { ATTR_DEVICE_CLASS: SensorDeviceClass.ENUM, ATTR_ENTITY_ID: "sensor.reg_number_plug_state", ATTR_ICON: "mdi:power-plug", - ATTR_OPTIONS: ["unplugged", "plugged", "plug_error", "plug_unknown"], + ATTR_OPTIONS: [ + "unplugged", + "plugged", + "plugged_waiting_for_charge", + "plug_error", + "plug_unknown", + ], ATTR_STATE: "plugged", ATTR_UNIQUE_ID: "vf1aaaaa555777999_plug_state", }, @@ -487,7 +493,13 @@ MOCK_VEHICLES = { ATTR_DEVICE_CLASS: SensorDeviceClass.ENUM, ATTR_ENTITY_ID: "sensor.reg_number_plug_state", ATTR_ICON: "mdi:power-plug-off", - ATTR_OPTIONS: ["unplugged", "plugged", "plug_error", "plug_unknown"], + ATTR_OPTIONS: [ + "unplugged", + "plugged", + "plugged_waiting_for_charge", + "plug_error", + "plug_unknown", + ], ATTR_STATE: "unplugged", ATTR_UNIQUE_ID: "vf1aaaaa555777999_plug_state", }, @@ -725,7 +737,13 @@ MOCK_VEHICLES = { ATTR_DEVICE_CLASS: SensorDeviceClass.ENUM, ATTR_ENTITY_ID: "sensor.reg_number_plug_state", ATTR_ICON: "mdi:power-plug", - ATTR_OPTIONS: ["unplugged", "plugged", "plug_error", "plug_unknown"], + ATTR_OPTIONS: [ + "unplugged", + "plugged", + "plugged_waiting_for_charge", + "plug_error", + "plug_unknown", + ], ATTR_STATE: "plugged", ATTR_UNIQUE_ID: "vf1aaaaa555777123_plug_state", }, diff --git a/tests/components/renault/snapshots/test_sensor.ambr b/tests/components/renault/snapshots/test_sensor.ambr index 80e73347b07..b092222c9f3 100644 --- a/tests/components/renault/snapshots/test_sensor.ambr +++ b/tests/components/renault/snapshots/test_sensor.ambr @@ -494,6 +494,7 @@ 'options': list([ 'unplugged', 'plugged', + 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), @@ -921,6 +922,7 @@ 'options': list([ 'unplugged', 'plugged', + 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), @@ -1249,6 +1251,7 @@ 'options': list([ 'unplugged', 'plugged', + 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), @@ -1674,6 +1677,7 @@ 'options': list([ 'unplugged', 'plugged', + 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), @@ -2000,6 +2004,7 @@ 'options': list([ 'unplugged', 'plugged', + 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), @@ -2456,6 +2461,7 @@ 'options': list([ 'unplugged', 'plugged', + 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), @@ -3104,6 +3110,7 @@ 'options': list([ 'unplugged', 'plugged', + 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), @@ -3531,6 +3538,7 @@ 'options': list([ 'unplugged', 'plugged', + 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), @@ -3859,6 +3867,7 @@ 'options': list([ 'unplugged', 'plugged', + 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), @@ -4284,6 +4293,7 @@ 'options': list([ 'unplugged', 'plugged', + 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), @@ -4610,6 +4620,7 @@ 'options': list([ 'unplugged', 'plugged', + 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), @@ -5066,6 +5077,7 @@ 'options': list([ 'unplugged', 'plugged', + 'plugged_waiting_for_charge', 'plug_error', 'plug_unknown', ]), From 066503b838eb4f4b379b439564343771bef039a4 Mon Sep 17 00:00:00 2001 From: mvn23 Date: Sat, 7 Sep 2024 13:18:54 +0200 Subject: [PATCH 1553/1635] Fix docstrings in opentherm_gw (#125456) --- homeassistant/components/opentherm_gw/switch.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/opentherm_gw/switch.py b/homeassistant/components/opentherm_gw/switch.py index 6076634b160..41ffa03a932 100644 --- a/homeassistant/components/opentherm_gw/switch.py +++ b/homeassistant/components/opentherm_gw/switch.py @@ -19,7 +19,7 @@ from .entity import OpenThermEntity, OpenThermEntityDescription class OpenThermSwitchEntityDescription( OpenThermEntityDescription, SwitchEntityDescription ): - """Describes opentherm_gw switch entity.""" + """Describes an opentherm_gw switch entity.""" turn_off_action: Callable[[OpenThermGatewayHub], Awaitable[int | None]] turn_on_action: Callable[[OpenThermGatewayHub], Awaitable[int | None]] @@ -67,13 +67,13 @@ class OpenThermSwitch(OpenThermEntity, SwitchEntity): entity_description: OpenThermSwitchEntityDescription async def async_turn_off(self, **kwargs: Any) -> None: - """Turn switch on.""" + """Turn the switch off.""" value = await self.entity_description.turn_off_action(self._gateway) self._attr_is_on = bool(value) if value is not None else None self.async_write_ha_state() async def async_turn_on(self, **kwargs: Any) -> None: - """Turn switch on.""" + """Turn the switch on.""" value = await self.entity_description.turn_on_action(self._gateway) self._attr_is_on = bool(value) if value is not None else None self.async_write_ha_state() From 6e38cf878e82a15d7084242c7c8d2cc252accc5b Mon Sep 17 00:00:00 2001 From: Hessel Date: Sat, 7 Sep 2024 15:34:48 +0200 Subject: [PATCH 1554/1635] Clean up test for Wallbox integration (#125433) feat: Update API requests in wallbox integration tests --- tests/components/wallbox/__init__.py | 7 +++--- tests/components/wallbox/test_config_flow.py | 10 ++++++++- tests/components/wallbox/test_init.py | 4 +--- tests/components/wallbox/test_lock.py | 14 ++---------- tests/components/wallbox/test_number.py | 23 +++++--------------- tests/components/wallbox/test_sensor.py | 2 -- tests/components/wallbox/test_switch.py | 14 +++--------- 7 files changed, 23 insertions(+), 51 deletions(-) diff --git a/tests/components/wallbox/__init__.py b/tests/components/wallbox/__init__.py index f4258ea0d49..9ec10dc72aa 100644 --- a/tests/components/wallbox/__init__.py +++ b/tests/components/wallbox/__init__.py @@ -1,7 +1,6 @@ """Tests for the Wallbox integration.""" from http import HTTPStatus -import json import requests_mock @@ -121,7 +120,7 @@ async def setup_integration(hass: HomeAssistant, entry: MockConfigEntry) -> None ) mock_request.put( "https://api.wall-box.com/v2/charger/12345", - json=json.loads(json.dumps({CHARGER_MAX_CHARGING_CURRENT_KEY: 20})), + json={CHARGER_MAX_CHARGING_CURRENT_KEY: 20}, status_code=HTTPStatus.OK, ) @@ -144,7 +143,7 @@ async def setup_integration_bidir(hass: HomeAssistant, entry: MockConfigEntry) - ) mock_request.put( "https://api.wall-box.com/v2/charger/12345", - json=json.loads(json.dumps({CHARGER_MAX_CHARGING_CURRENT_KEY: 20})), + json={CHARGER_MAX_CHARGING_CURRENT_KEY: 20}, status_code=HTTPStatus.OK, ) @@ -169,7 +168,7 @@ async def setup_integration_connection_error( ) mock_request.put( "https://api.wall-box.com/v2/charger/12345", - json=json.loads(json.dumps({CHARGER_MAX_CHARGING_CURRENT_KEY: 20})), + json={CHARGER_MAX_CHARGING_CURRENT_KEY: 20}, status_code=HTTPStatus.FORBIDDEN, ) diff --git a/tests/components/wallbox/test_config_flow.py b/tests/components/wallbox/test_config_flow.py index cc38576eb2f..467e20c51c1 100644 --- a/tests/components/wallbox/test_config_flow.py +++ b/tests/components/wallbox/test_config_flow.py @@ -186,7 +186,15 @@ async def test_form_reauth_invalid(hass: HomeAssistant, entry: MockConfigEntry) with requests_mock.Mocker() as mock_request: mock_request.get( "https://user-api.wall-box.com/users/signin", - text='{"jwt":"fakekeyhere","refresh_token": "refresh_fakekeyhere","user_id":12345,"ttl":145656758,"refresh_token_ttl":145756758,"error":false,"status":200}', + json={ + "jwt": "fakekeyhere", + "refresh_token": "refresh_fakekeyhere", + "user_id": 12345, + "ttl": 145656758, + "refresh_token_ttl": 145756758, + "error": False, + "status": 200, + }, status_code=200, ) mock_request.get( diff --git a/tests/components/wallbox/test_init.py b/tests/components/wallbox/test_init.py index f1362489c50..b4b5a199243 100644 --- a/tests/components/wallbox/test_init.py +++ b/tests/components/wallbox/test_init.py @@ -1,7 +1,5 @@ """Test Wallbox Init Component.""" -import json - import requests_mock from homeassistant.components.wallbox.const import ( @@ -90,7 +88,7 @@ async def test_wallbox_refresh_failed_invalid_auth( ) mock_request.put( "https://api.wall-box.com/v2/charger/12345", - json=json.loads(json.dumps({CHARGER_MAX_CHARGING_CURRENT_KEY: 20})), + json={CHARGER_MAX_CHARGING_CURRENT_KEY: 20}, status_code=403, ) diff --git a/tests/components/wallbox/test_lock.py b/tests/components/wallbox/test_lock.py index 637f0c827f4..1d48e53b515 100644 --- a/tests/components/wallbox/test_lock.py +++ b/tests/components/wallbox/test_lock.py @@ -1,7 +1,5 @@ """Test Wallbox Lock component.""" -import json - import pytest import requests_mock @@ -38,7 +36,7 @@ async def test_wallbox_lock_class(hass: HomeAssistant, entry: MockConfigEntry) - ) mock_request.put( "https://api.wall-box.com/v2/charger/12345", - json=json.loads(json.dumps({CHARGER_LOCKED_UNLOCKED_KEY: False})), + json={CHARGER_LOCKED_UNLOCKED_KEY: False}, status_code=200, ) @@ -60,8 +58,6 @@ async def test_wallbox_lock_class(hass: HomeAssistant, entry: MockConfigEntry) - blocking=True, ) - await hass.config_entries.async_unload(entry.entry_id) - async def test_wallbox_lock_class_connection_error( hass: HomeAssistant, entry: MockConfigEntry @@ -78,7 +74,7 @@ async def test_wallbox_lock_class_connection_error( ) mock_request.put( "https://api.wall-box.com/v2/charger/12345", - json=json.loads(json.dumps({CHARGER_LOCKED_UNLOCKED_KEY: False})), + json={CHARGER_LOCKED_UNLOCKED_KEY: False}, status_code=404, ) @@ -101,8 +97,6 @@ async def test_wallbox_lock_class_connection_error( blocking=True, ) - await hass.config_entries.async_unload(entry.entry_id) - async def test_wallbox_lock_class_authentication_error( hass: HomeAssistant, entry: MockConfigEntry @@ -115,8 +109,6 @@ async def test_wallbox_lock_class_authentication_error( assert state is None - await hass.config_entries.async_unload(entry.entry_id) - async def test_wallbox_lock_class_platform_not_ready( hass: HomeAssistant, entry: MockConfigEntry @@ -128,5 +120,3 @@ async def test_wallbox_lock_class_platform_not_ready( state = hass.states.get(MOCK_LOCK_ENTITY_ID) assert state is None - - await hass.config_entries.async_unload(entry.entry_id) diff --git a/tests/components/wallbox/test_number.py b/tests/components/wallbox/test_number.py index 0a8b1aa1207..c319668c161 100644 --- a/tests/components/wallbox/test_number.py +++ b/tests/components/wallbox/test_number.py @@ -1,7 +1,5 @@ """Test Wallbox Switch component.""" -import json - import pytest import requests_mock @@ -47,7 +45,7 @@ async def test_wallbox_number_class( ) mock_request.put( "https://api.wall-box.com/v2/charger/12345", - json=json.loads(json.dumps({CHARGER_MAX_CHARGING_CURRENT_KEY: 20})), + json={CHARGER_MAX_CHARGING_CURRENT_KEY: 20}, status_code=200, ) state = hass.states.get(MOCK_NUMBER_ENTITY_ID) @@ -63,7 +61,6 @@ async def test_wallbox_number_class( }, blocking=True, ) - await hass.config_entries.async_unload(entry.entry_id) async def test_wallbox_number_class_bidir( @@ -76,7 +73,6 @@ async def test_wallbox_number_class_bidir( state = hass.states.get(MOCK_NUMBER_ENTITY_ID) assert state.attributes["min"] == -25 assert state.attributes["max"] == 25 - await hass.config_entries.async_unload(entry.entry_id) async def test_wallbox_number_energy_class( @@ -95,7 +91,7 @@ async def test_wallbox_number_energy_class( mock_request.post( "https://api.wall-box.com/chargers/config/12345", - json=json.loads(json.dumps({CHARGER_ENERGY_PRICE_KEY: 1.1})), + json={CHARGER_ENERGY_PRICE_KEY: 1.1}, status_code=200, ) @@ -108,7 +104,6 @@ async def test_wallbox_number_energy_class( }, blocking=True, ) - await hass.config_entries.async_unload(entry.entry_id) async def test_wallbox_number_class_connection_error( @@ -126,7 +121,7 @@ async def test_wallbox_number_class_connection_error( ) mock_request.put( "https://api.wall-box.com/v2/charger/12345", - json=json.loads(json.dumps({CHARGER_MAX_CHARGING_CURRENT_KEY: 20})), + json={CHARGER_MAX_CHARGING_CURRENT_KEY: 20}, status_code=404, ) @@ -140,7 +135,6 @@ async def test_wallbox_number_class_connection_error( }, blocking=True, ) - await hass.config_entries.async_unload(entry.entry_id) async def test_wallbox_number_class_energy_price_connection_error( @@ -158,7 +152,7 @@ async def test_wallbox_number_class_energy_price_connection_error( ) mock_request.post( "https://api.wall-box.com/chargers/config/12345", - json=json.loads(json.dumps({CHARGER_ENERGY_PRICE_KEY: 1.1})), + json={CHARGER_ENERGY_PRICE_KEY: 1.1}, status_code=404, ) @@ -172,7 +166,6 @@ async def test_wallbox_number_class_energy_price_connection_error( }, blocking=True, ) - await hass.config_entries.async_unload(entry.entry_id) async def test_wallbox_number_class_energy_price_auth_error( @@ -190,7 +183,7 @@ async def test_wallbox_number_class_energy_price_auth_error( ) mock_request.post( "https://api.wall-box.com/chargers/config/12345", - json=json.loads(json.dumps({CHARGER_ENERGY_PRICE_KEY: 1.1})), + json={CHARGER_ENERGY_PRICE_KEY: 1.1}, status_code=403, ) @@ -204,7 +197,6 @@ async def test_wallbox_number_class_energy_price_auth_error( }, blocking=True, ) - await hass.config_entries.async_unload(entry.entry_id) async def test_wallbox_number_class_platform_not_ready( @@ -218,8 +210,6 @@ async def test_wallbox_number_class_platform_not_ready( assert state is None - await hass.config_entries.async_unload(entry.entry_id) - async def test_wallbox_number_class_icp_energy( hass: HomeAssistant, entry: MockConfigEntry @@ -250,7 +240,6 @@ async def test_wallbox_number_class_icp_energy( }, blocking=True, ) - await hass.config_entries.async_unload(entry.entry_id) async def test_wallbox_number_class_icp_energy_auth_error( @@ -282,7 +271,6 @@ async def test_wallbox_number_class_icp_energy_auth_error( }, blocking=True, ) - await hass.config_entries.async_unload(entry.entry_id) async def test_wallbox_number_class_icp_energy_connection_error( @@ -314,4 +302,3 @@ async def test_wallbox_number_class_icp_energy_connection_error( }, blocking=True, ) - await hass.config_entries.async_unload(entry.entry_id) diff --git a/tests/components/wallbox/test_sensor.py b/tests/components/wallbox/test_sensor.py index 5a8b3c290c1..69d0cc57340 100644 --- a/tests/components/wallbox/test_sensor.py +++ b/tests/components/wallbox/test_sensor.py @@ -30,5 +30,3 @@ async def test_wallbox_sensor_class( # Test round with precision '0' works state = hass.states.get(MOCK_SENSOR_MAX_AVAILABLE_POWER) assert state.state == "25.0" - - await hass.config_entries.async_unload(entry.entry_id) diff --git a/tests/components/wallbox/test_switch.py b/tests/components/wallbox/test_switch.py index d06251db003..b7c3a81dc73 100644 --- a/tests/components/wallbox/test_switch.py +++ b/tests/components/wallbox/test_switch.py @@ -1,7 +1,5 @@ """Test Wallbox Lock component.""" -import json - import pytest import requests_mock @@ -36,7 +34,7 @@ async def test_wallbox_switch_class( ) mock_request.post( "https://api.wall-box.com/v3/chargers/12345/remote-action", - json=json.loads(json.dumps({CHARGER_STATUS_ID_KEY: 193})), + json={CHARGER_STATUS_ID_KEY: 193}, status_code=200, ) @@ -58,8 +56,6 @@ async def test_wallbox_switch_class( blocking=True, ) - await hass.config_entries.async_unload(entry.entry_id) - async def test_wallbox_switch_class_connection_error( hass: HomeAssistant, entry: MockConfigEntry @@ -76,7 +72,7 @@ async def test_wallbox_switch_class_connection_error( ) mock_request.post( "https://api.wall-box.com/v3/chargers/12345/remote-action", - json=json.loads(json.dumps({CHARGER_STATUS_ID_KEY: 193})), + json={CHARGER_STATUS_ID_KEY: 193}, status_code=404, ) @@ -99,8 +95,6 @@ async def test_wallbox_switch_class_connection_error( blocking=True, ) - await hass.config_entries.async_unload(entry.entry_id) - async def test_wallbox_switch_class_authentication_error( hass: HomeAssistant, entry: MockConfigEntry @@ -117,7 +111,7 @@ async def test_wallbox_switch_class_authentication_error( ) mock_request.post( "https://api.wall-box.com/v3/chargers/12345/remote-action", - json=json.loads(json.dumps({CHARGER_STATUS_ID_KEY: 193})), + json={CHARGER_STATUS_ID_KEY: 193}, status_code=403, ) @@ -139,5 +133,3 @@ async def test_wallbox_switch_class_authentication_error( }, blocking=True, ) - - await hass.config_entries.async_unload(entry.entry_id) From c53c2d7e640df464257938d6083fb6d2e88dbd69 Mon Sep 17 00:00:00 2001 From: Marcel van der Veldt Date: Sat, 7 Sep 2024 17:57:57 +0200 Subject: [PATCH 1555/1635] Add model ID to Matter DeviceInfo (#125341) * Add model ID to Matter DeviceInfo * convert to string * Test device registry --------- Co-authored-by: Paulus Schoutsen --- homeassistant/components/matter/adapter.py | 1 + tests/components/matter/test_adapter.py | 1 + 2 files changed, 2 insertions(+) diff --git a/homeassistant/components/matter/adapter.py b/homeassistant/components/matter/adapter.py index a3536435ded..b56c82f8b9a 100644 --- a/homeassistant/components/matter/adapter.py +++ b/homeassistant/components/matter/adapter.py @@ -207,6 +207,7 @@ class MatterAdapter: sw_version=basic_info.softwareVersionString, manufacturer=basic_info.vendorName or endpoint.node.device_info.vendorName, model=model, + model_id=str(basic_info.productID) if basic_info.productID else None, serial_number=serial_number, via_device=(DOMAIN, bridge_device_id) if bridge_device_id else None, ) diff --git a/tests/components/matter/test_adapter.py b/tests/components/matter/test_adapter.py index da2ef179c44..522128e5968 100644 --- a/tests/components/matter/test_adapter.py +++ b/tests/components/matter/test_adapter.py @@ -54,6 +54,7 @@ async def test_device_registry_single_node_device( assert entry.name == name assert entry.manufacturer == "Nabu Casa" assert entry.model == "Mock Light" + assert entry.model_id == "32768" assert entry.hw_version == "v1.0" assert entry.sw_version == "v1.0" assert entry.serial_number == "12345678" From 7e7a6e4937115845ece3db7cd65f7e03215f9e0d Mon Sep 17 00:00:00 2001 From: Dian Date: Sun, 8 Sep 2024 03:33:48 +0800 Subject: [PATCH 1556/1635] Bump xiaomi-ble to 0.32.0 (#125461) --- homeassistant/components/xiaomi_ble/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/xiaomi_ble/manifest.json b/homeassistant/components/xiaomi_ble/manifest.json index da7169635e9..e4c643e491e 100644 --- a/homeassistant/components/xiaomi_ble/manifest.json +++ b/homeassistant/components/xiaomi_ble/manifest.json @@ -24,5 +24,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/xiaomi_ble", "iot_class": "local_push", - "requirements": ["xiaomi-ble==0.31.1"] + "requirements": ["xiaomi-ble==0.32.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index ee9b9b4bedf..3047748dc04 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2975,7 +2975,7 @@ wyoming==1.5.4 xbox-webapi==2.0.11 # homeassistant.components.xiaomi_ble -xiaomi-ble==0.31.1 +xiaomi-ble==0.32.0 # homeassistant.components.knx xknx==3.1.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index abe84df9270..2f28c496769 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2358,7 +2358,7 @@ wyoming==1.5.4 xbox-webapi==2.0.11 # homeassistant.components.xiaomi_ble -xiaomi-ble==0.31.1 +xiaomi-ble==0.32.0 # homeassistant.components.knx xknx==3.1.1 From ab29718a455ed6c625a0459400c816f293e695b8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Fern=C3=A1ndez=20Rojas?= Date: Sat, 7 Sep 2024 22:32:36 +0200 Subject: [PATCH 1557/1635] Update aioairzone to v0.9.0 (#125476) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Álvaro Fernández Rojas --- homeassistant/components/airzone/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/airzone/manifest.json b/homeassistant/components/airzone/manifest.json index 31ff7423ad6..a782006efef 100644 --- a/homeassistant/components/airzone/manifest.json +++ b/homeassistant/components/airzone/manifest.json @@ -11,5 +11,5 @@ "documentation": "https://www.home-assistant.io/integrations/airzone", "iot_class": "local_polling", "loggers": ["aioairzone"], - "requirements": ["aioairzone==0.8.2"] + "requirements": ["aioairzone==0.9.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 3047748dc04..5844e51260f 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -179,7 +179,7 @@ aioairq==0.3.2 aioairzone-cloud==0.6.5 # homeassistant.components.airzone -aioairzone==0.8.2 +aioairzone==0.9.0 # homeassistant.components.ambient_network # homeassistant.components.ambient_station diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 2f28c496769..85b6262a8ad 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -167,7 +167,7 @@ aioairq==0.3.2 aioairzone-cloud==0.6.5 # homeassistant.components.airzone -aioairzone==0.8.2 +aioairzone==0.9.0 # homeassistant.components.ambient_network # homeassistant.components.ambient_station From 0a11acf7aed62ce8f76a6fd76bda1dacc89d8a0f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sat, 7 Sep 2024 22:49:44 -0500 Subject: [PATCH 1558/1635] Replace linear search in unit_system with dict lookup (#125485) --- homeassistant/util/unit_system.py | 26 ++++++++++++-------------- 1 file changed, 12 insertions(+), 14 deletions(-) diff --git a/homeassistant/util/unit_system.py b/homeassistant/util/unit_system.py index bd31b4286ab..98cfb2f1368 100644 --- a/homeassistant/util/unit_system.py +++ b/homeassistant/util/unit_system.py @@ -58,23 +58,21 @@ WIND_SPEED_UNITS = SpeedConverter.VALID_UNITS TEMPERATURE_UNITS: set[str] = {UnitOfTemperature.FAHRENHEIT, UnitOfTemperature.CELSIUS} +_VALID_BY_TYPE: dict[str, set[str] | set[str | None]] = { + LENGTH: LENGTH_UNITS, + ACCUMULATED_PRECIPITATION: LENGTH_UNITS, + WIND_SPEED: WIND_SPEED_UNITS, + TEMPERATURE: TEMPERATURE_UNITS, + MASS: MASS_UNITS, + VOLUME: VOLUME_UNITS, + PRESSURE: PRESSURE_UNITS, +} + def _is_valid_unit(unit: str, unit_type: str) -> bool: """Check if the unit is valid for it's type.""" - if unit_type == LENGTH: - return unit in LENGTH_UNITS - if unit_type == ACCUMULATED_PRECIPITATION: - return unit in LENGTH_UNITS - if unit_type == WIND_SPEED: - return unit in WIND_SPEED_UNITS - if unit_type == TEMPERATURE: - return unit in TEMPERATURE_UNITS - if unit_type == MASS: - return unit in MASS_UNITS - if unit_type == VOLUME: - return unit in VOLUME_UNITS - if unit_type == PRESSURE: - return unit in PRESSURE_UNITS + if units := _VALID_BY_TYPE.get(unit_type): + return unit in units return False From 03a6eb26bede9086c3fb12bd45f5324c05a404e5 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 8 Sep 2024 02:10:46 -0500 Subject: [PATCH 1559/1635] Bump zeroconf to 0.134.0 (#125491) changelog: https://github.com/python-zeroconf/python-zeroconf/compare/0.133.0...0.134.0 --- homeassistant/components/zeroconf/manifest.json | 2 +- homeassistant/package_constraints.txt | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/zeroconf/manifest.json b/homeassistant/components/zeroconf/manifest.json index 8b332400805..1176be80839 100644 --- a/homeassistant/components/zeroconf/manifest.json +++ b/homeassistant/components/zeroconf/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_push", "loggers": ["zeroconf"], "quality_scale": "internal", - "requirements": ["zeroconf==0.133.0"] + "requirements": ["zeroconf==0.134.0"] } diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 0a4ead9e5b1..e043740e15a 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -63,7 +63,7 @@ voluptuous-openapi==0.0.5 voluptuous-serialize==2.6.0 voluptuous==0.15.2 yarl==1.10.0 -zeroconf==0.133.0 +zeroconf==0.134.0 # Constrain pycryptodome to avoid vulnerability # see https://github.com/home-assistant/core/pull/16238 diff --git a/requirements_all.txt b/requirements_all.txt index 5844e51260f..16b0cc537d7 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -3030,7 +3030,7 @@ zamg==0.3.6 zengge==0.2 # homeassistant.components.zeroconf -zeroconf==0.133.0 +zeroconf==0.134.0 # homeassistant.components.zeversolar zeversolar==0.3.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 85b6262a8ad..53e29289127 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -2404,7 +2404,7 @@ yt-dlp==2024.08.06 zamg==0.3.6 # homeassistant.components.zeroconf -zeroconf==0.133.0 +zeroconf==0.134.0 # homeassistant.components.zeversolar zeversolar==0.3.1 From 5e1b4b2d23998e763157656486e7b52338f2e091 Mon Sep 17 00:00:00 2001 From: Andre Lengwenus Date: Sun, 8 Sep 2024 09:20:57 +0200 Subject: [PATCH 1560/1635] Clean up tests for LCN (#125493) * Remove patches on 3rd party module level * Cleanup test_init * Cleanup platform tests * Cleanup test_services * Cleanup test_websockets * Cleanup test_device_trigger * Cleanup test_events * Remove unused fixture --- homeassistant/components/lcn/config_flow.py | 3 +- tests/components/lcn/conftest.py | 21 +- .../lcn/snapshots/test_binary_sensor.ambr | 139 +++++ .../components/lcn/snapshots/test_cover.ambr | 97 ++++ .../components/lcn/snapshots/test_light.ambr | 167 ++++++ .../components/lcn/snapshots/test_sensor.ambr | 187 +++++++ .../lcn/snapshots/test_services.ambr | 203 -------- .../components/lcn/snapshots/test_switch.ambr | 231 +++++++++ tests/components/lcn/test_binary_sensor.py | 76 ++- tests/components/lcn/test_climate.py | 2 +- tests/components/lcn/test_config_flow.py | 27 +- tests/components/lcn/test_cover.py | 490 +++++++++--------- tests/components/lcn/test_device_trigger.py | 40 +- tests/components/lcn/test_events.py | 35 +- tests/components/lcn/test_init.py | 74 ++- tests/components/lcn/test_light.py | 453 ++++++++-------- tests/components/lcn/test_scene.py | 2 +- tests/components/lcn/test_sensor.py | 87 +--- tests/components/lcn/test_services.py | 107 ++-- tests/components/lcn/test_switch.py | 332 ++++++------ tests/components/lcn/test_websocket.py | 60 ++- 21 files changed, 1715 insertions(+), 1118 deletions(-) create mode 100644 tests/components/lcn/snapshots/test_binary_sensor.ambr create mode 100644 tests/components/lcn/snapshots/test_cover.ambr create mode 100644 tests/components/lcn/snapshots/test_light.ambr create mode 100644 tests/components/lcn/snapshots/test_sensor.ambr delete mode 100644 tests/components/lcn/snapshots/test_services.ambr create mode 100644 tests/components/lcn/snapshots/test_switch.ambr diff --git a/homeassistant/components/lcn/config_flow.py b/homeassistant/components/lcn/config_flow.py index c38a16cc21e..e3979effc07 100644 --- a/homeassistant/components/lcn/config_flow.py +++ b/homeassistant/components/lcn/config_flow.py @@ -25,6 +25,7 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType +from . import PchkConnectionManager from .const import CONF_DIM_MODE, CONF_SK_NUM_TRIES, DIM_MODES, DOMAIN from .helpers import purge_device_registry, purge_entity_registry @@ -78,7 +79,7 @@ async def validate_connection(data: ConfigType) -> str | None: _LOGGER.debug("Validating connection parameters to PCHK host '%s'", host_name) - connection = pypck.connection.PchkConnectionManager( + connection = PchkConnectionManager( host, port, username, password, settings=settings ) diff --git a/tests/components/lcn/conftest.py b/tests/components/lcn/conftest.py index 67c5b9c0b9c..16797f6065d 100644 --- a/tests/components/lcn/conftest.py +++ b/tests/components/lcn/conftest.py @@ -53,12 +53,20 @@ class MockPchkConnectionManager(PchkConnectionManager): async def async_close(self) -> None: """Mock closing a connection to PCHK.""" - @patch.object(pypck.connection, "ModuleConnection", MockModuleConnection) - @patch.object(pypck.connection, "GroupConnection", MockGroupConnection) def get_address_conn(self, addr, request_serials=False): """Get LCN address connection.""" return super().get_address_conn(addr, request_serials) + @patch.object(pypck.connection, "ModuleConnection", MockModuleConnection) + def get_module_conn(self, addr, request_serials=False): + """Get LCN module connection.""" + return super().get_module_conn(addr, request_serials) + + @patch.object(pypck.connection, "GroupConnection", MockGroupConnection) + def get_group_conn(self, addr): + """Get LCN group connection.""" + return super().get_group_conn(addr) + scan_modules = AsyncMock() send_command = AsyncMock() @@ -119,15 +127,6 @@ async def init_integration( return lcn_connection -@pytest.fixture(name="lcn_connection") -async def init_lcn_connection( - hass: HomeAssistant, - entry: MockConfigEntry, -) -> MockPchkConnectionManager: - """Set up the LCN integration in Home Assistantand yield connection object.""" - return await init_integration(hass, entry) - - async def setup_component(hass: HomeAssistant) -> None: """Set up the LCN component.""" fixture_filename = "lcn/config.json" diff --git a/tests/components/lcn/snapshots/test_binary_sensor.ambr b/tests/components/lcn/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..0ad31437dd1 --- /dev/null +++ b/tests/components/lcn/snapshots/test_binary_sensor.ambr @@ -0,0 +1,139 @@ +# serializer version: 1 +# name: test_setup_lcn_binary_sensor[binary_sensor.binary_sensor1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.binary_sensor1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Binary_Sensor1', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-binsensor1', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_binary_sensor[binary_sensor.binary_sensor1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Binary_Sensor1', + }), + 'context': , + 'entity_id': 'binary_sensor.binary_sensor1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_setup_lcn_binary_sensor[binary_sensor.sensor_keylock-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.sensor_keylock', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Sensor_KeyLock', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-a5', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_binary_sensor[binary_sensor.sensor_keylock-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Sensor_KeyLock', + }), + 'context': , + 'entity_id': 'binary_sensor.sensor_keylock', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_setup_lcn_binary_sensor[binary_sensor.sensor_lockregulator1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.sensor_lockregulator1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Sensor_LockRegulator1', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-r1varsetpoint', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_binary_sensor[binary_sensor.sensor_lockregulator1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Sensor_LockRegulator1', + }), + 'context': , + 'entity_id': 'binary_sensor.sensor_lockregulator1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/lcn/snapshots/test_cover.ambr b/tests/components/lcn/snapshots/test_cover.ambr new file mode 100644 index 00000000000..82a19060d73 --- /dev/null +++ b/tests/components/lcn/snapshots/test_cover.ambr @@ -0,0 +1,97 @@ +# serializer version: 1 +# name: test_setup_lcn_cover[cover.cover_outputs-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.cover_outputs', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cover_Outputs', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-outputs', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_cover[cover.cover_outputs-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assumed_state': True, + 'friendly_name': 'Cover_Outputs', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.cover_outputs', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- +# name: test_setup_lcn_cover[cover.cover_relays-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.cover_relays', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cover_Relays', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-motor1', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_cover[cover.cover_relays-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'assumed_state': True, + 'friendly_name': 'Cover_Relays', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.cover_relays', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- diff --git a/tests/components/lcn/snapshots/test_light.ambr b/tests/components/lcn/snapshots/test_light.ambr new file mode 100644 index 00000000000..f53d1fdf2dc --- /dev/null +++ b/tests/components/lcn/snapshots/test_light.ambr @@ -0,0 +1,167 @@ +# serializer version: 1 +# name: test_setup_lcn_light[light.light_output1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.light_output1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light_Output1', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-output1', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_light[light.light_output1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': None, + 'color_mode': None, + 'friendly_name': 'Light_Output1', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.light_output1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_setup_lcn_light[light.light_output2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.light_output2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light_Output2', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-output2', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_light[light.light_output2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'color_mode': None, + 'friendly_name': 'Light_Output2', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.light_output2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_setup_lcn_light[light.light_relay1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.light_relay1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light_Relay1', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-relay1', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_light[light.light_relay1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'color_mode': None, + 'friendly_name': 'Light_Relay1', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.light_relay1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/lcn/snapshots/test_sensor.ambr b/tests/components/lcn/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..d6ac73b5822 --- /dev/null +++ b/tests/components/lcn/snapshots/test_sensor.ambr @@ -0,0 +1,187 @@ +# serializer version: 1 +# name: test_setup_lcn_sensor[sensor.sensor_led6-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sensor_led6', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Sensor_Led6', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-led6', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_sensor[sensor.sensor_led6-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Sensor_Led6', + }), + 'context': , + 'entity_id': 'sensor.sensor_led6', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_setup_lcn_sensor[sensor.sensor_logicop1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sensor_logicop1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Sensor_LogicOp1', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-logicop1', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_sensor[sensor.sensor_logicop1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Sensor_LogicOp1', + }), + 'context': , + 'entity_id': 'sensor.sensor_logicop1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_setup_lcn_sensor[sensor.sensor_setpoint1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sensor_setpoint1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Sensor_Setpoint1', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-r1varsetpoint', + 'unit_of_measurement': '°C', + }) +# --- +# name: test_setup_lcn_sensor[sensor.sensor_setpoint1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Sensor_Setpoint1', + 'unit_of_measurement': '°C', + }), + 'context': , + 'entity_id': 'sensor.sensor_setpoint1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_setup_lcn_sensor[sensor.sensor_var1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.sensor_var1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Sensor_Var1', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-var1', + 'unit_of_measurement': '°C', + }) +# --- +# name: test_setup_lcn_sensor[sensor.sensor_var1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Sensor_Var1', + 'unit_of_measurement': '°C', + }), + 'context': , + 'entity_id': 'sensor.sensor_var1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/lcn/snapshots/test_services.ambr b/tests/components/lcn/snapshots/test_services.ambr deleted file mode 100644 index 29e8da72fd7..00000000000 --- a/tests/components/lcn/snapshots/test_services.ambr +++ /dev/null @@ -1,203 +0,0 @@ -# serializer version: 1 -# name: test_service_dyn_text - tuple( - 0, - 'text in row 1', - ) -# --- -# name: test_service_led - tuple( - , - , - ) -# --- -# name: test_service_lock_keys - tuple( - 0, - list([ - , - , - , - , - , - , - , - , - ]), - ) -# --- -# name: test_service_lock_keys_tab_a_temporary - tuple( - 10, - , - list([ - , - , - , - , - , - , - , - , - ]), - ) -# --- -# name: test_service_lock_regulator - tuple( - 0, - True, - ) -# --- -# name: test_service_output_abs - tuple( - 0, - 100, - 9, - ) -# --- -# name: test_service_output_rel - tuple( - 0, - 25, - ) -# --- -# name: test_service_output_toggle - tuple( - 0, - 9, - ) -# --- -# name: test_service_pck - tuple( - 'PIN4', - ) -# --- -# name: test_service_relays - tuple( - list([ - , - , - , - , - , - , - , - , - ]), - ) -# --- -# name: test_service_send_keys - tuple( - list([ - list([ - True, - False, - False, - False, - True, - False, - False, - False, - ]), - list([ - False, - False, - False, - False, - False, - False, - False, - False, - ]), - list([ - False, - False, - False, - False, - False, - False, - False, - False, - ]), - list([ - False, - False, - False, - False, - False, - False, - False, - True, - ]), - ]), - , - ) -# --- -# name: test_service_send_keys_hit_deferred - tuple( - list([ - list([ - True, - False, - False, - False, - True, - False, - False, - False, - ]), - list([ - False, - False, - False, - False, - False, - False, - False, - False, - ]), - list([ - False, - False, - False, - False, - False, - False, - False, - False, - ]), - list([ - False, - False, - False, - False, - False, - False, - False, - True, - ]), - ]), - 5, - , - ) -# --- -# name: test_service_var_abs - tuple( - , - 75.0, - , - ) -# --- -# name: test_service_var_rel - tuple( - , - 10.0, - , - , - ) -# --- -# name: test_service_var_reset - tuple( - , - ) -# --- diff --git a/tests/components/lcn/snapshots/test_switch.ambr b/tests/components/lcn/snapshots/test_switch.ambr new file mode 100644 index 00000000000..1f2aac041aa --- /dev/null +++ b/tests/components/lcn/snapshots/test_switch.ambr @@ -0,0 +1,231 @@ +# serializer version: 1 +# name: test_setup_lcn_switch[switch.switch_group5-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.switch_group5', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Switch_Group5', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-g000005-relay1', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_switch[switch.switch_group5-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Switch_Group5', + }), + 'context': , + 'entity_id': 'switch.switch_group5', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_setup_lcn_switch[switch.switch_output1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.switch_output1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Switch_Output1', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-output1', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_switch[switch.switch_output1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Switch_Output1', + }), + 'context': , + 'entity_id': 'switch.switch_output1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_setup_lcn_switch[switch.switch_output2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.switch_output2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Switch_Output2', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-output2', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_switch[switch.switch_output2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Switch_Output2', + }), + 'context': , + 'entity_id': 'switch.switch_output2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_setup_lcn_switch[switch.switch_relay1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.switch_relay1', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Switch_Relay1', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-relay1', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_switch[switch.switch_relay1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Switch_Relay1', + }), + 'context': , + 'entity_id': 'switch.switch_relay1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_setup_lcn_switch[switch.switch_relay2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.switch_relay2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Switch_Relay2', + 'platform': 'lcn', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'lcn/config_entry_pchk.json-m000007-relay2', + 'unit_of_measurement': None, + }) +# --- +# name: test_setup_lcn_switch[switch.switch_relay2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Switch_Relay2', + }), + 'context': , + 'entity_id': 'switch.switch_relay2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/lcn/test_binary_sensor.py b/tests/components/lcn/test_binary_sensor.py index 9ba04ac94c7..7abae6e0d89 100644 --- a/tests/components/lcn/test_binary_sensor.py +++ b/tests/components/lcn/test_binary_sensor.py @@ -1,68 +1,46 @@ """Test for the LCN binary sensor platform.""" +from unittest.mock import patch + from pypck.inputs import ModStatusBinSensors, ModStatusKeyLocks, ModStatusVar from pypck.lcn_addr import LcnAddr from pypck.lcn_defs import Var, VarValue +from syrupy.assertion import SnapshotAssertion from homeassistant.components.lcn.helpers import get_device_connection -from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, STATE_UNKNOWN +from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from .conftest import MockConfigEntry, init_integration + +from tests.common import snapshot_platform + BINARY_SENSOR_LOCKREGULATOR1 = "binary_sensor.sensor_lockregulator1" BINARY_SENSOR_SENSOR1 = "binary_sensor.binary_sensor1" BINARY_SENSOR_KEYLOCK = "binary_sensor.sensor_keylock" -async def test_setup_lcn_binary_sensor(hass: HomeAssistant, lcn_connection) -> None: - """Test the setup of binary sensor.""" - for entity_id in ( - BINARY_SENSOR_LOCKREGULATOR1, - BINARY_SENSOR_SENSOR1, - BINARY_SENSOR_KEYLOCK, - ): - state = hass.states.get(entity_id) - assert state is not None - assert state.state == STATE_UNKNOWN - - -async def test_entity_state(hass: HomeAssistant, lcn_connection) -> None: - """Test state of entity.""" - state = hass.states.get(BINARY_SENSOR_LOCKREGULATOR1) - assert state - - state = hass.states.get(BINARY_SENSOR_SENSOR1) - assert state - - state = hass.states.get(BINARY_SENSOR_KEYLOCK) - assert state - - -async def test_entity_attributes( - hass: HomeAssistant, entity_registry: er.EntityRegistry, entry, lcn_connection +async def test_setup_lcn_binary_sensor( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + entry: MockConfigEntry, + snapshot: SnapshotAssertion, ) -> None: - """Test the attributes of an entity.""" + """Test the setup of binary sensor.""" + with patch("homeassistant.components.lcn.PLATFORMS", [Platform.BINARY_SENSOR]): + await init_integration(hass, entry) - entity_setpoint1 = entity_registry.async_get(BINARY_SENSOR_LOCKREGULATOR1) - assert entity_setpoint1 - assert entity_setpoint1.unique_id == f"{entry.entry_id}-m000007-r1varsetpoint" - assert entity_setpoint1.original_name == "Sensor_LockRegulator1" - - entity_binsensor1 = entity_registry.async_get(BINARY_SENSOR_SENSOR1) - assert entity_binsensor1 - assert entity_binsensor1.unique_id == f"{entry.entry_id}-m000007-binsensor1" - assert entity_binsensor1.original_name == "Binary_Sensor1" - - entity_keylock = entity_registry.async_get(BINARY_SENSOR_KEYLOCK) - assert entity_keylock - assert entity_keylock.unique_id == f"{entry.entry_id}-m000007-a5" - assert entity_keylock.original_name == "Sensor_KeyLock" + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) async def test_pushed_lock_setpoint_status_change( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, + entry: MockConfigEntry, ) -> None: """Test the lock setpoint sensor changes its state on status received.""" + await init_integration(hass, entry) + device_connection = get_device_connection(hass, (0, 7, False), entry) address = LcnAddr(0, 7, False) @@ -86,9 +64,11 @@ async def test_pushed_lock_setpoint_status_change( async def test_pushed_binsensor_status_change( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test the binary port sensor changes its state on status received.""" + await init_integration(hass, entry) + device_connection = get_device_connection(hass, (0, 7, False), entry) address = LcnAddr(0, 7, False) states = [False] * 8 @@ -114,9 +94,11 @@ async def test_pushed_binsensor_status_change( async def test_pushed_keylock_status_change( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test the keylock sensor changes its state on status received.""" + await init_integration(hass, entry) + device_connection = get_device_connection(hass, (0, 7, False), entry) address = LcnAddr(0, 7, False) states = [[False] * 8 for i in range(4)] @@ -141,8 +123,10 @@ async def test_pushed_keylock_status_change( assert state.state == STATE_ON -async def test_unload_config_entry(hass: HomeAssistant, entry, lcn_connection) -> None: +async def test_unload_config_entry(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the binary sensor is removed when the config entry is unloaded.""" + await init_integration(hass, entry) + await hass.config_entries.async_unload(entry.entry_id) assert hass.states.get(BINARY_SENSOR_LOCKREGULATOR1).state == STATE_UNAVAILABLE assert hass.states.get(BINARY_SENSOR_SENSOR1).state == STATE_UNAVAILABLE diff --git a/tests/components/lcn/test_climate.py b/tests/components/lcn/test_climate.py index db9f137d6bf..c1a9d094c6b 100644 --- a/tests/components/lcn/test_climate.py +++ b/tests/components/lcn/test_climate.py @@ -282,6 +282,6 @@ async def test_unload_config_entry( """Test the climate is removed when the config entry is unloaded.""" await init_integration(hass, entry) - await hass.config_entries.async_forward_entry_unload(entry, DOMAIN_CLIMATE) + await hass.config_entries.async_unload(entry.entry_id) state = hass.states.get("climate.climate1") assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/lcn/test_config_flow.py b/tests/components/lcn/test_config_flow.py index d002c5fe625..9f46202ac8a 100644 --- a/tests/components/lcn/test_config_flow.py +++ b/tests/components/lcn/test_config_flow.py @@ -48,7 +48,7 @@ async def test_step_import( """Test for import step.""" with ( - patch("pypck.connection.PchkConnectionManager.async_connect"), + patch("homeassistant.components.lcn.PchkConnectionManager.async_connect"), patch("homeassistant.components.lcn.async_setup", return_value=True), patch("homeassistant.components.lcn.async_setup_entry", return_value=True), ): @@ -76,7 +76,7 @@ async def test_step_import_existing_host( mock_entry = MockConfigEntry(domain=DOMAIN, data=mock_data) mock_entry.add_to_hass(hass) # Initialize a config flow with different data but same host address - with patch("pypck.connection.PchkConnectionManager.async_connect"): + with patch("homeassistant.components.lcn.PchkConnectionManager.async_connect"): imported_data = IMPORT_DATA.copy() result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=imported_data @@ -105,7 +105,8 @@ async def test_step_import_error( ) -> None: """Test for error in import is handled correctly.""" with patch( - "pypck.connection.PchkConnectionManager.async_connect", side_effect=error + "homeassistant.components.lcn.PchkConnectionManager.async_connect", + side_effect=error, ): data = IMPORT_DATA.copy() data.update({CONF_HOST: "pchk"}) @@ -132,7 +133,7 @@ async def test_show_form(hass: HomeAssistant) -> None: async def test_step_user(hass: HomeAssistant) -> None: """Test for user step.""" with ( - patch("pypck.connection.PchkConnectionManager.async_connect"), + patch("homeassistant.components.lcn.PchkConnectionManager.async_connect"), patch("homeassistant.components.lcn.async_setup", return_value=True), patch("homeassistant.components.lcn.async_setup_entry", return_value=True), ): @@ -156,7 +157,7 @@ async def test_step_user_existing_host( """Test for user defined host already exists.""" entry.add_to_hass(hass) - with patch("pypck.connection.PchkConnectionManager.async_connect"): + with patch("homeassistant.components.lcn.PchkConnectionManager.async_connect"): config_data = entry.data.copy() result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, data=config_data @@ -179,7 +180,8 @@ async def test_step_user_error( ) -> None: """Test for error in user step is handled correctly.""" with patch( - "pypck.connection.PchkConnectionManager.async_connect", side_effect=error + "homeassistant.components.lcn.PchkConnectionManager.async_connect", + side_effect=error, ): data = CONNECTION_DATA.copy() data.update({CONF_HOST: "pchk"}) @@ -197,7 +199,7 @@ async def test_step_reconfigure(hass: HomeAssistant, entry: MockConfigEntry) -> old_entry_data = entry.data.copy() with ( - patch("pypck.connection.PchkConnectionManager.async_connect"), + patch("homeassistant.components.lcn.PchkConnectionManager.async_connect"), patch("homeassistant.components.lcn.async_setup", return_value=True), patch("homeassistant.components.lcn.async_setup_entry", return_value=True), ): @@ -235,7 +237,8 @@ async def test_step_reconfigure_error( """Test for error in reconfigure step is handled correctly.""" entry.add_to_hass(hass) with patch( - "pypck.connection.PchkConnectionManager.async_connect", side_effect=error + "homeassistant.components.lcn.PchkConnectionManager.async_connect", + side_effect=error, ): data = {**CONNECTION_DATA, CONF_HOST: "pchk"} result = await hass.config_entries.flow.async_init( @@ -256,8 +259,12 @@ async def test_validate_connection() -> None: data = CONNECTION_DATA.copy() with ( - patch("pypck.connection.PchkConnectionManager.async_connect") as async_connect, - patch("pypck.connection.PchkConnectionManager.async_close") as async_close, + patch( + "homeassistant.components.lcn.PchkConnectionManager.async_connect" + ) as async_connect, + patch( + "homeassistant.components.lcn.PchkConnectionManager.async_close" + ) as async_close, ): result = await validate_connection(data=data) diff --git a/tests/components/lcn/test_cover.py b/tests/components/lcn/test_cover.py index f50921c08a1..0067e755b5a 100644 --- a/tests/components/lcn/test_cover.py +++ b/tests/components/lcn/test_cover.py @@ -5,6 +5,7 @@ from unittest.mock import patch from pypck.inputs import ModStatusOutput, ModStatusRelays from pypck.lcn_addr import LcnAddr from pypck.lcn_defs import MotorReverseTime, MotorStateModifier +from syrupy.assertion import SnapshotAssertion from homeassistant.components.cover import DOMAIN as DOMAIN_COVER from homeassistant.components.lcn.helpers import get_device_connection @@ -18,318 +19,319 @@ from homeassistant.const import ( STATE_OPEN, STATE_OPENING, STATE_UNAVAILABLE, + Platform, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .conftest import MockModuleConnection +from .conftest import MockConfigEntry, MockModuleConnection, init_integration + +from tests.common import snapshot_platform COVER_OUTPUTS = "cover.cover_outputs" COVER_RELAYS = "cover.cover_relays" -async def test_setup_lcn_cover(hass: HomeAssistant, entry, lcn_connection) -> None: +async def test_setup_lcn_cover( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: """Test the setup of cover.""" - for entity_id in ( - COVER_OUTPUTS, - COVER_RELAYS, - ): - state = hass.states.get(entity_id) - assert state is not None - assert state.state == STATE_OPEN + with patch("homeassistant.components.lcn.PLATFORMS", [Platform.COVER]): + await init_integration(hass, entry) + + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) -async def test_entity_attributes( - hass: HomeAssistant, entity_registry: er.EntityRegistry, entry, lcn_connection -) -> None: - """Test the attributes of an entity.""" - - entity_outputs = entity_registry.async_get(COVER_OUTPUTS) - - assert entity_outputs - assert entity_outputs.unique_id == f"{entry.entry_id}-m000007-outputs" - assert entity_outputs.original_name == "Cover_Outputs" - - entity_relays = entity_registry.async_get(COVER_RELAYS) - - assert entity_relays - assert entity_relays.unique_id == f"{entry.entry_id}-m000007-motor1" - assert entity_relays.original_name == "Cover_Relays" - - -@patch.object(MockModuleConnection, "control_motors_outputs") -async def test_outputs_open( - control_motors_outputs, hass: HomeAssistant, lcn_connection -) -> None: +async def test_outputs_open(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the outputs cover opens.""" - state = hass.states.get(COVER_OUTPUTS) - state.state = STATE_CLOSED + await init_integration(hass, entry) - # command failed - control_motors_outputs.return_value = False + with patch.object( + MockModuleConnection, "control_motors_outputs" + ) as control_motors_outputs: + state = hass.states.get(COVER_OUTPUTS) + state.state = STATE_CLOSED - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_OPEN_COVER, - {ATTR_ENTITY_ID: COVER_OUTPUTS}, - blocking=True, - ) - await hass.async_block_till_done() - control_motors_outputs.assert_awaited_with( - MotorStateModifier.UP, MotorReverseTime.RT1200 - ) + # command failed + control_motors_outputs.return_value = False - state = hass.states.get(COVER_OUTPUTS) - assert state is not None - assert state.state != STATE_OPENING + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: COVER_OUTPUTS}, + blocking=True, + ) - # command success - control_motors_outputs.reset_mock(return_value=True) - control_motors_outputs.return_value = True + control_motors_outputs.assert_awaited_with( + MotorStateModifier.UP, MotorReverseTime.RT1200 + ) - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_OPEN_COVER, - {ATTR_ENTITY_ID: COVER_OUTPUTS}, - blocking=True, - ) - await hass.async_block_till_done() - control_motors_outputs.assert_awaited_with( - MotorStateModifier.UP, MotorReverseTime.RT1200 - ) + state = hass.states.get(COVER_OUTPUTS) + assert state is not None + assert state.state != STATE_OPENING - state = hass.states.get(COVER_OUTPUTS) - assert state is not None - assert state.state == STATE_OPENING + # command success + control_motors_outputs.reset_mock(return_value=True) + control_motors_outputs.return_value = True + + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: COVER_OUTPUTS}, + blocking=True, + ) + + control_motors_outputs.assert_awaited_with( + MotorStateModifier.UP, MotorReverseTime.RT1200 + ) + + state = hass.states.get(COVER_OUTPUTS) + assert state is not None + assert state.state == STATE_OPENING -@patch.object(MockModuleConnection, "control_motors_outputs") -async def test_outputs_close( - control_motors_outputs, hass: HomeAssistant, lcn_connection -) -> None: +async def test_outputs_close(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the outputs cover closes.""" - state = hass.states.get(COVER_OUTPUTS) - state.state = STATE_OPEN + await init_integration(hass, entry) - # command failed - control_motors_outputs.return_value = False + with patch.object( + MockModuleConnection, "control_motors_outputs" + ) as control_motors_outputs: + state = hass.states.get(COVER_OUTPUTS) + state.state = STATE_OPEN - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_CLOSE_COVER, - {ATTR_ENTITY_ID: COVER_OUTPUTS}, - blocking=True, - ) - await hass.async_block_till_done() - control_motors_outputs.assert_awaited_with( - MotorStateModifier.DOWN, MotorReverseTime.RT1200 - ) + # command failed + control_motors_outputs.return_value = False - state = hass.states.get(COVER_OUTPUTS) - assert state is not None - assert state.state != STATE_CLOSING + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: COVER_OUTPUTS}, + blocking=True, + ) - # command success - control_motors_outputs.reset_mock(return_value=True) - control_motors_outputs.return_value = True + control_motors_outputs.assert_awaited_with( + MotorStateModifier.DOWN, MotorReverseTime.RT1200 + ) - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_CLOSE_COVER, - {ATTR_ENTITY_ID: COVER_OUTPUTS}, - blocking=True, - ) - await hass.async_block_till_done() - control_motors_outputs.assert_awaited_with( - MotorStateModifier.DOWN, MotorReverseTime.RT1200 - ) + state = hass.states.get(COVER_OUTPUTS) + assert state is not None + assert state.state != STATE_CLOSING - state = hass.states.get(COVER_OUTPUTS) - assert state is not None - assert state.state == STATE_CLOSING + # command success + control_motors_outputs.reset_mock(return_value=True) + control_motors_outputs.return_value = True + + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: COVER_OUTPUTS}, + blocking=True, + ) + + control_motors_outputs.assert_awaited_with( + MotorStateModifier.DOWN, MotorReverseTime.RT1200 + ) + + state = hass.states.get(COVER_OUTPUTS) + assert state is not None + assert state.state == STATE_CLOSING -@patch.object(MockModuleConnection, "control_motors_outputs") -async def test_outputs_stop( - control_motors_outputs, hass: HomeAssistant, lcn_connection -) -> None: +async def test_outputs_stop(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the outputs cover stops.""" - state = hass.states.get(COVER_OUTPUTS) - state.state = STATE_CLOSING + await init_integration(hass, entry) - # command failed - control_motors_outputs.return_value = False + with patch.object( + MockModuleConnection, "control_motors_outputs" + ) as control_motors_outputs: + state = hass.states.get(COVER_OUTPUTS) + state.state = STATE_CLOSING - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_STOP_COVER, - {ATTR_ENTITY_ID: COVER_OUTPUTS}, - blocking=True, - ) - await hass.async_block_till_done() - control_motors_outputs.assert_awaited_with(MotorStateModifier.STOP) + # command failed + control_motors_outputs.return_value = False - state = hass.states.get(COVER_OUTPUTS) - assert state is not None - assert state.state == STATE_CLOSING + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_STOP_COVER, + {ATTR_ENTITY_ID: COVER_OUTPUTS}, + blocking=True, + ) - # command success - control_motors_outputs.reset_mock(return_value=True) - control_motors_outputs.return_value = True + control_motors_outputs.assert_awaited_with(MotorStateModifier.STOP) - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_STOP_COVER, - {ATTR_ENTITY_ID: COVER_OUTPUTS}, - blocking=True, - ) - await hass.async_block_till_done() - control_motors_outputs.assert_awaited_with(MotorStateModifier.STOP) + state = hass.states.get(COVER_OUTPUTS) + assert state is not None + assert state.state == STATE_CLOSING - state = hass.states.get(COVER_OUTPUTS) - assert state is not None - assert state.state not in (STATE_CLOSING, STATE_OPENING) + # command success + control_motors_outputs.reset_mock(return_value=True) + control_motors_outputs.return_value = True + + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_STOP_COVER, + {ATTR_ENTITY_ID: COVER_OUTPUTS}, + blocking=True, + ) + + control_motors_outputs.assert_awaited_with(MotorStateModifier.STOP) + + state = hass.states.get(COVER_OUTPUTS) + assert state is not None + assert state.state not in (STATE_CLOSING, STATE_OPENING) -@patch.object(MockModuleConnection, "control_motors_relays") -async def test_relays_open( - control_motors_relays, hass: HomeAssistant, lcn_connection -) -> None: +async def test_relays_open(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the relays cover opens.""" - states = [MotorStateModifier.NOCHANGE] * 4 - states[0] = MotorStateModifier.UP + await init_integration(hass, entry) - state = hass.states.get(COVER_RELAYS) - state.state = STATE_CLOSED + with patch.object( + MockModuleConnection, "control_motors_relays" + ) as control_motors_relays: + states = [MotorStateModifier.NOCHANGE] * 4 + states[0] = MotorStateModifier.UP - # command failed - control_motors_relays.return_value = False + state = hass.states.get(COVER_RELAYS) + state.state = STATE_CLOSED - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_OPEN_COVER, - {ATTR_ENTITY_ID: COVER_RELAYS}, - blocking=True, - ) - await hass.async_block_till_done() - control_motors_relays.assert_awaited_with(states) + # command failed + control_motors_relays.return_value = False - state = hass.states.get(COVER_RELAYS) - assert state is not None - assert state.state != STATE_OPENING + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: COVER_RELAYS}, + blocking=True, + ) - # command success - control_motors_relays.reset_mock(return_value=True) - control_motors_relays.return_value = True + control_motors_relays.assert_awaited_with(states) - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_OPEN_COVER, - {ATTR_ENTITY_ID: COVER_RELAYS}, - blocking=True, - ) - await hass.async_block_till_done() - control_motors_relays.assert_awaited_with(states) + state = hass.states.get(COVER_RELAYS) + assert state is not None + assert state.state != STATE_OPENING - state = hass.states.get(COVER_RELAYS) - assert state is not None - assert state.state == STATE_OPENING + # command success + control_motors_relays.reset_mock(return_value=True) + control_motors_relays.return_value = True + + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: COVER_RELAYS}, + blocking=True, + ) + + control_motors_relays.assert_awaited_with(states) + + state = hass.states.get(COVER_RELAYS) + assert state is not None + assert state.state == STATE_OPENING -@patch.object(MockModuleConnection, "control_motors_relays") -async def test_relays_close( - control_motors_relays, hass: HomeAssistant, lcn_connection -) -> None: +async def test_relays_close(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the relays cover closes.""" - states = [MotorStateModifier.NOCHANGE] * 4 - states[0] = MotorStateModifier.DOWN + await init_integration(hass, entry) - state = hass.states.get(COVER_RELAYS) - state.state = STATE_OPEN + with patch.object( + MockModuleConnection, "control_motors_relays" + ) as control_motors_relays: + states = [MotorStateModifier.NOCHANGE] * 4 + states[0] = MotorStateModifier.DOWN - # command failed - control_motors_relays.return_value = False + state = hass.states.get(COVER_RELAYS) + state.state = STATE_OPEN - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_CLOSE_COVER, - {ATTR_ENTITY_ID: COVER_RELAYS}, - blocking=True, - ) - await hass.async_block_till_done() - control_motors_relays.assert_awaited_with(states) + # command failed + control_motors_relays.return_value = False - state = hass.states.get(COVER_RELAYS) - assert state is not None - assert state.state != STATE_CLOSING + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: COVER_RELAYS}, + blocking=True, + ) - # command success - control_motors_relays.reset_mock(return_value=True) - control_motors_relays.return_value = True + control_motors_relays.assert_awaited_with(states) - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_CLOSE_COVER, - {ATTR_ENTITY_ID: COVER_RELAYS}, - blocking=True, - ) - await hass.async_block_till_done() - control_motors_relays.assert_awaited_with(states) + state = hass.states.get(COVER_RELAYS) + assert state is not None + assert state.state != STATE_CLOSING - state = hass.states.get(COVER_RELAYS) - assert state is not None - assert state.state == STATE_CLOSING + # command success + control_motors_relays.reset_mock(return_value=True) + control_motors_relays.return_value = True + + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: COVER_RELAYS}, + blocking=True, + ) + + control_motors_relays.assert_awaited_with(states) + + state = hass.states.get(COVER_RELAYS) + assert state is not None + assert state.state == STATE_CLOSING -@patch.object(MockModuleConnection, "control_motors_relays") -async def test_relays_stop( - control_motors_relays, hass: HomeAssistant, lcn_connection -) -> None: +async def test_relays_stop(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the relays cover stops.""" - states = [MotorStateModifier.NOCHANGE] * 4 - states[0] = MotorStateModifier.STOP + await init_integration(hass, entry) - state = hass.states.get(COVER_RELAYS) - state.state = STATE_CLOSING + with patch.object( + MockModuleConnection, "control_motors_relays" + ) as control_motors_relays: + states = [MotorStateModifier.NOCHANGE] * 4 + states[0] = MotorStateModifier.STOP - # command failed - control_motors_relays.return_value = False + state = hass.states.get(COVER_RELAYS) + state.state = STATE_CLOSING - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_STOP_COVER, - {ATTR_ENTITY_ID: COVER_RELAYS}, - blocking=True, - ) - await hass.async_block_till_done() - control_motors_relays.assert_awaited_with(states) + # command failed + control_motors_relays.return_value = False - state = hass.states.get(COVER_RELAYS) - assert state is not None - assert state.state == STATE_CLOSING + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_STOP_COVER, + {ATTR_ENTITY_ID: COVER_RELAYS}, + blocking=True, + ) - # command success - control_motors_relays.reset_mock(return_value=True) - control_motors_relays.return_value = True + control_motors_relays.assert_awaited_with(states) - await hass.services.async_call( - DOMAIN_COVER, - SERVICE_STOP_COVER, - {ATTR_ENTITY_ID: COVER_RELAYS}, - blocking=True, - ) - await hass.async_block_till_done() - control_motors_relays.assert_awaited_with(states) + state = hass.states.get(COVER_RELAYS) + assert state is not None + assert state.state == STATE_CLOSING - state = hass.states.get(COVER_RELAYS) - assert state is not None - assert state.state not in (STATE_CLOSING, STATE_OPENING) + # command success + control_motors_relays.reset_mock(return_value=True) + control_motors_relays.return_value = True + + await hass.services.async_call( + DOMAIN_COVER, + SERVICE_STOP_COVER, + {ATTR_ENTITY_ID: COVER_RELAYS}, + blocking=True, + ) + + control_motors_relays.assert_awaited_with(states) + + state = hass.states.get(COVER_RELAYS) + assert state is not None + assert state.state not in (STATE_CLOSING, STATE_OPENING) async def test_pushed_outputs_status_change( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test the outputs cover changes its state on status received.""" + await init_integration(hass, entry) + device_connection = get_device_connection(hass, (0, 7, False), entry) address = LcnAddr(0, 7, False) @@ -365,9 +367,11 @@ async def test_pushed_outputs_status_change( async def test_pushed_relays_status_change( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test the relays cover changes its state on status received.""" + await init_integration(hass, entry) + device_connection = get_device_connection(hass, (0, 7, False), entry) address = LcnAddr(0, 7, False) states = [False] * 8 @@ -406,8 +410,10 @@ async def test_pushed_relays_status_change( assert state.state == STATE_CLOSING -async def test_unload_config_entry(hass: HomeAssistant, entry, lcn_connection) -> None: +async def test_unload_config_entry(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the cover is removed when the config entry is unloaded.""" + await init_integration(hass, entry) + await hass.config_entries.async_unload(entry.entry_id) assert hass.states.get(COVER_OUTPUTS).state == STATE_UNAVAILABLE assert hass.states.get(COVER_RELAYS).state == STATE_UNAVAILABLE diff --git a/tests/components/lcn/test_device_trigger.py b/tests/components/lcn/test_device_trigger.py index 6c5ab7d6f4e..6537c108981 100644 --- a/tests/components/lcn/test_device_trigger.py +++ b/tests/components/lcn/test_device_trigger.py @@ -15,15 +15,17 @@ from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import config_validation as cv, device_registry as dr from homeassistant.setup import async_setup_component -from .conftest import get_device +from .conftest import MockConfigEntry, get_device, init_integration from tests.common import async_get_device_automations async def test_get_triggers_module_device( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test we get the expected triggers from a LCN module device.""" + await init_integration(hass, entry) + device = get_device(hass, entry, (0, 7, False)) expected_triggers = [ @@ -50,9 +52,11 @@ async def test_get_triggers_module_device( async def test_get_triggers_non_module_device( - hass: HomeAssistant, device_registry: dr.DeviceRegistry, entry, lcn_connection + hass: HomeAssistant, device_registry: dr.DeviceRegistry, entry: MockConfigEntry ) -> None: """Test we get the expected triggers from a LCN non-module device.""" + await init_integration(hass, entry) + not_included_types = ("transmitter", "transponder", "fingerprint", "send_keys") host_device = device_registry.async_get_device( @@ -72,9 +76,10 @@ async def test_get_triggers_non_module_device( async def test_if_fires_on_transponder_event( - hass: HomeAssistant, service_calls: list[ServiceCall], entry, lcn_connection + hass: HomeAssistant, service_calls: list[ServiceCall], entry: MockConfigEntry ) -> None: """Test for transponder event triggers firing.""" + lcn_connection = await init_integration(hass, entry) address = (0, 7, False) device = get_device(hass, entry, address) @@ -119,9 +124,10 @@ async def test_if_fires_on_transponder_event( async def test_if_fires_on_fingerprint_event( - hass: HomeAssistant, service_calls: list[ServiceCall], entry, lcn_connection + hass: HomeAssistant, service_calls: list[ServiceCall], entry: MockConfigEntry ) -> None: """Test for fingerprint event triggers firing.""" + lcn_connection = await init_integration(hass, entry) address = (0, 7, False) device = get_device(hass, entry, address) @@ -166,9 +172,10 @@ async def test_if_fires_on_fingerprint_event( async def test_if_fires_on_codelock_event( - hass: HomeAssistant, service_calls: list[ServiceCall], entry, lcn_connection + hass: HomeAssistant, service_calls: list[ServiceCall], entry: MockConfigEntry ) -> None: """Test for codelock event triggers firing.""" + lcn_connection = await init_integration(hass, entry) address = (0, 7, False) device = get_device(hass, entry, address) @@ -213,9 +220,10 @@ async def test_if_fires_on_codelock_event( async def test_if_fires_on_transmitter_event( - hass: HomeAssistant, service_calls: list[ServiceCall], entry, lcn_connection + hass: HomeAssistant, service_calls: list[ServiceCall], entry: MockConfigEntry ) -> None: """Test for transmitter event triggers firing.""" + lcn_connection = await init_integration(hass, entry) address = (0, 7, False) device = get_device(hass, entry, address) @@ -269,9 +277,10 @@ async def test_if_fires_on_transmitter_event( async def test_if_fires_on_send_keys_event( - hass: HomeAssistant, service_calls: list[ServiceCall], entry, lcn_connection + hass: HomeAssistant, service_calls: list[ServiceCall], entry: MockConfigEntry ) -> None: """Test for send_keys event triggers firing.""" + lcn_connection = await init_integration(hass, entry) address = (0, 7, False) device = get_device(hass, entry, address) @@ -318,9 +327,10 @@ async def test_if_fires_on_send_keys_event( async def test_get_transponder_trigger_capabilities( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test we get the expected capabilities from a transponder device trigger.""" + await init_integration(hass, entry) address = (0, 7, False) device = get_device(hass, entry, address) @@ -341,9 +351,10 @@ async def test_get_transponder_trigger_capabilities( async def test_get_fingerprint_trigger_capabilities( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test we get the expected capabilities from a fingerprint device trigger.""" + await init_integration(hass, entry) address = (0, 7, False) device = get_device(hass, entry, address) @@ -364,9 +375,10 @@ async def test_get_fingerprint_trigger_capabilities( async def test_get_transmitter_trigger_capabilities( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test we get the expected capabilities from a transmitter device trigger.""" + await init_integration(hass, entry) address = (0, 7, False) device = get_device(hass, entry, address) @@ -397,9 +409,10 @@ async def test_get_transmitter_trigger_capabilities( async def test_get_send_keys_trigger_capabilities( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test we get the expected capabilities from a send_keys device trigger.""" + await init_integration(hass, entry) address = (0, 7, False) device = get_device(hass, entry, address) @@ -435,9 +448,10 @@ async def test_get_send_keys_trigger_capabilities( async def test_unknown_trigger_capabilities( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test we get empty capabilities if trigger is unknown.""" + await init_integration(hass, entry) address = (0, 7, False) device = get_device(hass, entry, address) diff --git a/tests/components/lcn/test_events.py b/tests/components/lcn/test_events.py index eb62f820103..c6c3559e821 100644 --- a/tests/components/lcn/test_events.py +++ b/tests/components/lcn/test_events.py @@ -3,10 +3,11 @@ from pypck.inputs import Input, ModSendKeysHost, ModStatusAccessControl from pypck.lcn_addr import LcnAddr from pypck.lcn_defs import AccessControlPeriphery, KeyAction, SendKeyCommand -import pytest from homeassistant.core import HomeAssistant +from .conftest import MockConfigEntry, init_integration + from tests.common import async_capture_events LCN_TRANSPONDER = "lcn_transponder" @@ -15,8 +16,11 @@ LCN_TRANSMITTER = "lcn_transmitter" LCN_SEND_KEYS = "lcn_send_keys" -async def test_fire_transponder_event(hass: HomeAssistant, lcn_connection) -> None: +async def test_fire_transponder_event( + hass: HomeAssistant, entry: MockConfigEntry +) -> None: """Test the transponder event is fired.""" + lcn_connection = await init_integration(hass, entry) events = async_capture_events(hass, LCN_TRANSPONDER) inp = ModStatusAccessControl( @@ -33,8 +37,11 @@ async def test_fire_transponder_event(hass: HomeAssistant, lcn_connection) -> No assert events[0].data["code"] == "aabbcc" -async def test_fire_fingerprint_event(hass: HomeAssistant, lcn_connection) -> None: +async def test_fire_fingerprint_event( + hass: HomeAssistant, entry: MockConfigEntry +) -> None: """Test the fingerprint event is fired.""" + lcn_connection = await init_integration(hass, entry) events = async_capture_events(hass, LCN_FINGERPRINT) inp = ModStatusAccessControl( @@ -51,8 +58,9 @@ async def test_fire_fingerprint_event(hass: HomeAssistant, lcn_connection) -> No assert events[0].data["code"] == "aabbcc" -async def test_fire_codelock_event(hass: HomeAssistant, lcn_connection) -> None: +async def test_fire_codelock_event(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the codelock event is fired.""" + lcn_connection = await init_integration(hass, entry) events = async_capture_events(hass, "lcn_codelock") inp = ModStatusAccessControl( @@ -69,8 +77,11 @@ async def test_fire_codelock_event(hass: HomeAssistant, lcn_connection) -> None: assert events[0].data["code"] == "aabbcc" -async def test_fire_transmitter_event(hass: HomeAssistant, lcn_connection) -> None: +async def test_fire_transmitter_event( + hass: HomeAssistant, entry: MockConfigEntry +) -> None: """Test the transmitter event is fired.""" + lcn_connection = await init_integration(hass, entry) events = async_capture_events(hass, LCN_TRANSMITTER) inp = ModStatusAccessControl( @@ -93,8 +104,9 @@ async def test_fire_transmitter_event(hass: HomeAssistant, lcn_connection) -> No assert events[0].data["action"] == "hit" -async def test_fire_sendkeys_event(hass: HomeAssistant, lcn_connection) -> None: +async def test_fire_sendkeys_event(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the send_keys event is fired.""" + lcn_connection = await init_integration(hass, entry) events = async_capture_events(hass, LCN_SEND_KEYS) inp = ModSendKeysHost( @@ -122,9 +134,10 @@ async def test_fire_sendkeys_event(hass: HomeAssistant, lcn_connection) -> None: async def test_dont_fire_on_non_module_input( - hass: HomeAssistant, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test for no event is fired if a non-module input is received.""" + lcn_connection = await init_integration(hass, entry) inp = Input() for event_name in ( @@ -139,16 +152,16 @@ async def test_dont_fire_on_non_module_input( assert len(events) == 0 -# This tests needs to be adjusted to remove lingering tasks -@pytest.mark.parametrize("expected_lingering_tasks", [True]) -async def test_dont_fire_on_unknown_module(hass: HomeAssistant, lcn_connection) -> None: +async def test_dont_fire_on_unknown_module( + hass: HomeAssistant, entry: MockConfigEntry +) -> None: """Test for no event is fired if an input from an unknown module is received.""" + lcn_connection = await init_integration(hass, entry) inp = ModStatusAccessControl( LcnAddr(0, 10, False), # unknown module periphery=AccessControlPeriphery.FINGERPRINT, code="aabbcc", ) - events = async_capture_events(hass, LCN_FINGERPRINT) await lcn_connection.async_process_input(inp) await hass.async_block_till_done() diff --git a/tests/components/lcn/test_init.py b/tests/components/lcn/test_init.py index 120db8a1333..ece0e95e501 100644 --- a/tests/components/lcn/test_init.py +++ b/tests/components/lcn/test_init.py @@ -2,11 +2,8 @@ from unittest.mock import Mock, patch -from pypck.connection import ( - PchkAuthenticationError, - PchkConnectionManager, - PchkLicenseError, -) +from pypck.connection import PchkAuthenticationError, PchkLicenseError +import pytest from homeassistant import config_entries from homeassistant.components.lcn.const import DOMAIN @@ -14,11 +11,18 @@ from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er -from .conftest import MockPchkConnectionManager, setup_component +from .conftest import ( + MockConfigEntry, + MockPchkConnectionManager, + init_integration, + setup_component, +) -async def test_async_setup_entry(hass: HomeAssistant, entry, lcn_connection) -> None: +async def test_async_setup_entry(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test a successful setup entry and unload of entry.""" + await init_integration(hass, entry) + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 assert entry.state is ConfigEntryState.LOADED @@ -29,16 +33,16 @@ async def test_async_setup_entry(hass: HomeAssistant, entry, lcn_connection) -> assert not hass.data.get(DOMAIN) -async def test_async_setup_multiple_entries(hass: HomeAssistant, entry, entry2) -> None: +async def test_async_setup_multiple_entries( + hass: HomeAssistant, entry: MockConfigEntry, entry2 +) -> None: """Test a successful setup and unload of multiple entries.""" hass.http = Mock() with patch( "homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager ): for config_entry in (entry, entry2): - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() + await init_integration(hass, config_entry) assert config_entry.state is ConfigEntryState.LOADED assert len(hass.config_entries.async_entries(DOMAIN)) == 2 @@ -56,7 +60,7 @@ async def test_async_setup_entry_update( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - entry, + entry: MockConfigEntry, ) -> None: """Test a successful setup entry if entry with same id already exists.""" # setup first entry @@ -79,7 +83,10 @@ async def test_async_setup_entry_update( assert dummy_device in device_registry.devices.values() # setup new entry with same data via import step (should cleanup dummy device) - with patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager): + with patch( + "homeassistant.components.lcn.config_flow.validate_connection", + return_value=None, + ): await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=entry.data ) @@ -88,12 +95,16 @@ async def test_async_setup_entry_update( assert dummy_entity not in entity_registry.entities.values() +@pytest.mark.parametrize( + "exception", [PchkAuthenticationError, PchkLicenseError, TimeoutError] +) async def test_async_setup_entry_raises_authentication_error( - hass: HomeAssistant, entry + hass: HomeAssistant, entry: MockConfigEntry, exception: Exception ) -> None: """Test that an authentication error is handled properly.""" - with patch.object( - PchkConnectionManager, "async_connect", side_effect=PchkAuthenticationError + with patch( + "homeassistant.components.lcn.PchkConnectionManager.async_connect", + side_effect=exception, ): entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -102,36 +113,13 @@ async def test_async_setup_entry_raises_authentication_error( assert entry.state is ConfigEntryState.SETUP_ERROR -async def test_async_setup_entry_raises_license_error( - hass: HomeAssistant, entry -) -> None: - """Test that an authentication error is handled properly.""" - with patch.object( - PchkConnectionManager, "async_connect", side_effect=PchkLicenseError - ): - entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - assert entry.state is ConfigEntryState.SETUP_ERROR - - -async def test_async_setup_entry_raises_timeout_error( - hass: HomeAssistant, entry -) -> None: - """Test that an authentication error is handled properly.""" - with patch.object(PchkConnectionManager, "async_connect", side_effect=TimeoutError): - entry.add_to_hass(hass) - await hass.config_entries.async_setup(entry.entry_id) - await hass.async_block_till_done() - - assert entry.state is ConfigEntryState.SETUP_ERROR - - async def test_async_setup_from_configuration_yaml(hass: HomeAssistant) -> None: """Test a successful setup using data from configuration.yaml.""" with ( - patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager), + patch( + "homeassistant.components.lcn.config_flow.validate_connection", + return_value=None, + ), patch("homeassistant.components.lcn.async_setup_entry") as async_setup_entry, ): await setup_component(hass) diff --git a/tests/components/lcn/test_light.py b/tests/components/lcn/test_light.py index b91f3d5b17c..4251d997724 100644 --- a/tests/components/lcn/test_light.py +++ b/tests/components/lcn/test_light.py @@ -5,297 +5,278 @@ from unittest.mock import patch from pypck.inputs import ModStatusOutput, ModStatusRelays from pypck.lcn_addr import LcnAddr from pypck.lcn_defs import RelayStateModifier +from syrupy.assertion import SnapshotAssertion from homeassistant.components.lcn.helpers import get_device_connection from homeassistant.components.light import ( ATTR_BRIGHTNESS, - ATTR_SUPPORTED_COLOR_MODES, ATTR_TRANSITION, DOMAIN as DOMAIN_LIGHT, - ColorMode, - LightEntityFeature, ) from homeassistant.const import ( ATTR_ENTITY_ID, - ATTR_SUPPORTED_FEATURES, SERVICE_TURN_OFF, SERVICE_TURN_ON, STATE_OFF, STATE_ON, STATE_UNAVAILABLE, + Platform, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .conftest import MockModuleConnection +from .conftest import MockConfigEntry, MockModuleConnection, init_integration + +from tests.common import snapshot_platform LIGHT_OUTPUT1 = "light.light_output1" LIGHT_OUTPUT2 = "light.light_output2" LIGHT_RELAY1 = "light.light_relay1" -async def test_setup_lcn_light(hass: HomeAssistant, lcn_connection) -> None: +async def test_setup_lcn_light( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: """Test the setup of light.""" - for entity_id in ( - LIGHT_OUTPUT1, - LIGHT_OUTPUT2, - LIGHT_RELAY1, - ): - state = hass.states.get(entity_id) + with patch("homeassistant.components.lcn.PLATFORMS", [Platform.LIGHT]): + await init_integration(hass, entry) + + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + + +async def test_output_turn_on(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Test the output light turns on.""" + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "dim_output") as dim_output: + # command failed + dim_output.return_value = False + + await hass.services.async_call( + DOMAIN_LIGHT, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: LIGHT_OUTPUT1}, + blocking=True, + ) + + dim_output.assert_awaited_with(0, 100, 9) + + state = hass.states.get(LIGHT_OUTPUT1) + assert state is not None + assert state.state != STATE_ON + + # command success + dim_output.reset_mock(return_value=True) + dim_output.return_value = True + + await hass.services.async_call( + DOMAIN_LIGHT, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: LIGHT_OUTPUT1}, + blocking=True, + ) + + dim_output.assert_awaited_with(0, 100, 9) + + state = hass.states.get(LIGHT_OUTPUT1) + assert state is not None + assert state.state == STATE_ON + + +async def test_output_turn_on_with_attributes( + hass: HomeAssistant, entry: MockConfigEntry +) -> None: + """Test the output light turns on.""" + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "dim_output") as dim_output: + dim_output.return_value = True + + await hass.services.async_call( + DOMAIN_LIGHT, + SERVICE_TURN_ON, + { + ATTR_ENTITY_ID: LIGHT_OUTPUT1, + ATTR_BRIGHTNESS: 50, + ATTR_TRANSITION: 2, + }, + blocking=True, + ) + + dim_output.assert_awaited_with(0, 19, 6) + + state = hass.states.get(LIGHT_OUTPUT1) + assert state is not None + assert state.state == STATE_ON + + +async def test_output_turn_off(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Test the output light turns off.""" + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "dim_output") as dim_output: + state = hass.states.get(LIGHT_OUTPUT1) + state.state = STATE_ON + + # command failed + dim_output.return_value = False + + await hass.services.async_call( + DOMAIN_LIGHT, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: LIGHT_OUTPUT1}, + blocking=True, + ) + + dim_output.assert_awaited_with(0, 0, 9) + + state = hass.states.get(LIGHT_OUTPUT1) + assert state is not None + assert state.state != STATE_OFF + + # command success + dim_output.reset_mock(return_value=True) + dim_output.return_value = True + + await hass.services.async_call( + DOMAIN_LIGHT, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: LIGHT_OUTPUT1}, + blocking=True, + ) + + dim_output.assert_awaited_with(0, 0, 9) + + state = hass.states.get(LIGHT_OUTPUT1) assert state is not None assert state.state == STATE_OFF -async def test_entity_state(hass: HomeAssistant, lcn_connection) -> None: - """Test state of entity.""" - state = hass.states.get(LIGHT_OUTPUT1) - assert state - assert state.attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.TRANSITION - assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.BRIGHTNESS] - - state = hass.states.get(LIGHT_OUTPUT2) - assert state - assert state.attributes[ATTR_SUPPORTED_FEATURES] == LightEntityFeature.TRANSITION - assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == [ColorMode.ONOFF] - - -async def test_entity_attributes( - hass: HomeAssistant, entity_registry: er.EntityRegistry, entry, lcn_connection -) -> None: - """Test the attributes of an entity.""" - entity_output = entity_registry.async_get(LIGHT_OUTPUT1) - - assert entity_output - assert entity_output.unique_id == f"{entry.entry_id}-m000007-output1" - assert entity_output.original_name == "Light_Output1" - - entity_relay = entity_registry.async_get(LIGHT_RELAY1) - - assert entity_relay - assert entity_relay.unique_id == f"{entry.entry_id}-m000007-relay1" - assert entity_relay.original_name == "Light_Relay1" - - -@patch.object(MockModuleConnection, "dim_output") -async def test_output_turn_on(dim_output, hass: HomeAssistant, lcn_connection) -> None: - """Test the output light turns on.""" - # command failed - dim_output.return_value = False - - await hass.services.async_call( - DOMAIN_LIGHT, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: LIGHT_OUTPUT1}, - blocking=True, - ) - await hass.async_block_till_done() - dim_output.assert_awaited_with(0, 100, 9) - - state = hass.states.get(LIGHT_OUTPUT1) - assert state is not None - assert state.state != STATE_ON - - # command success - dim_output.reset_mock(return_value=True) - dim_output.return_value = True - - await hass.services.async_call( - DOMAIN_LIGHT, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: LIGHT_OUTPUT1}, - blocking=True, - ) - await hass.async_block_till_done() - dim_output.assert_awaited_with(0, 100, 9) - - state = hass.states.get(LIGHT_OUTPUT1) - assert state is not None - assert state.state == STATE_ON - - -@patch.object(MockModuleConnection, "dim_output") -async def test_output_turn_on_with_attributes( - dim_output, hass: HomeAssistant, lcn_connection -) -> None: - """Test the output light turns on.""" - dim_output.return_value = True - - await hass.services.async_call( - DOMAIN_LIGHT, - SERVICE_TURN_ON, - { - ATTR_ENTITY_ID: LIGHT_OUTPUT1, - ATTR_BRIGHTNESS: 50, - ATTR_TRANSITION: 2, - }, - blocking=True, - ) - await hass.async_block_till_done() - dim_output.assert_awaited_with(0, 19, 6) - - state = hass.states.get(LIGHT_OUTPUT1) - assert state is not None - assert state.state == STATE_ON - - -@patch.object(MockModuleConnection, "dim_output") -async def test_output_turn_off(dim_output, hass: HomeAssistant, lcn_connection) -> None: - """Test the output light turns off.""" - state = hass.states.get(LIGHT_OUTPUT1) - state.state = STATE_ON - - # command failed - dim_output.return_value = False - - await hass.services.async_call( - DOMAIN_LIGHT, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: LIGHT_OUTPUT1}, - blocking=True, - ) - await hass.async_block_till_done() - dim_output.assert_awaited_with(0, 0, 9) - - state = hass.states.get(LIGHT_OUTPUT1) - assert state is not None - assert state.state != STATE_OFF - - # command success - dim_output.reset_mock(return_value=True) - dim_output.return_value = True - - await hass.services.async_call( - DOMAIN_LIGHT, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: LIGHT_OUTPUT1}, - blocking=True, - ) - await hass.async_block_till_done() - dim_output.assert_awaited_with(0, 0, 9) - - state = hass.states.get(LIGHT_OUTPUT1) - assert state is not None - assert state.state == STATE_OFF - - -@patch.object(MockModuleConnection, "dim_output") async def test_output_turn_off_with_attributes( - dim_output, hass: HomeAssistant, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test the output light turns off.""" - dim_output.return_value = True + await init_integration(hass, entry) - state = hass.states.get(LIGHT_OUTPUT1) - state.state = STATE_ON + with patch.object(MockModuleConnection, "dim_output") as dim_output: + dim_output.return_value = True - await hass.services.async_call( - DOMAIN_LIGHT, - SERVICE_TURN_OFF, - { - ATTR_ENTITY_ID: LIGHT_OUTPUT1, - ATTR_TRANSITION: 2, - }, - blocking=True, - ) - await hass.async_block_till_done() - dim_output.assert_awaited_with(0, 0, 6) + state = hass.states.get(LIGHT_OUTPUT1) + state.state = STATE_ON - state = hass.states.get(LIGHT_OUTPUT1) - assert state is not None - assert state.state == STATE_OFF + await hass.services.async_call( + DOMAIN_LIGHT, + SERVICE_TURN_OFF, + { + ATTR_ENTITY_ID: LIGHT_OUTPUT1, + ATTR_TRANSITION: 2, + }, + blocking=True, + ) + + dim_output.assert_awaited_with(0, 0, 6) + + state = hass.states.get(LIGHT_OUTPUT1) + assert state is not None + assert state.state == STATE_OFF -@patch.object(MockModuleConnection, "control_relays") -async def test_relay_turn_on( - control_relays, hass: HomeAssistant, lcn_connection -) -> None: +async def test_relay_turn_on(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the relay light turns on.""" - states = [RelayStateModifier.NOCHANGE] * 8 - states[0] = RelayStateModifier.ON + await init_integration(hass, entry) - # command failed - control_relays.return_value = False + with patch.object(MockModuleConnection, "control_relays") as control_relays: + states = [RelayStateModifier.NOCHANGE] * 8 + states[0] = RelayStateModifier.ON - await hass.services.async_call( - DOMAIN_LIGHT, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: LIGHT_RELAY1}, - blocking=True, - ) - await hass.async_block_till_done() - control_relays.assert_awaited_with(states) + # command failed + control_relays.return_value = False - state = hass.states.get(LIGHT_RELAY1) - assert state is not None - assert state.state != STATE_ON + await hass.services.async_call( + DOMAIN_LIGHT, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: LIGHT_RELAY1}, + blocking=True, + ) - # command success - control_relays.reset_mock(return_value=True) - control_relays.return_value = True + control_relays.assert_awaited_with(states) - await hass.services.async_call( - DOMAIN_LIGHT, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: LIGHT_RELAY1}, - blocking=True, - ) - await hass.async_block_till_done() - control_relays.assert_awaited_with(states) + state = hass.states.get(LIGHT_RELAY1) + assert state is not None + assert state.state != STATE_ON - state = hass.states.get(LIGHT_RELAY1) - assert state is not None - assert state.state == STATE_ON + # command success + control_relays.reset_mock(return_value=True) + control_relays.return_value = True + + await hass.services.async_call( + DOMAIN_LIGHT, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: LIGHT_RELAY1}, + blocking=True, + ) + + control_relays.assert_awaited_with(states) + + state = hass.states.get(LIGHT_RELAY1) + assert state is not None + assert state.state == STATE_ON -@patch.object(MockModuleConnection, "control_relays") -async def test_relay_turn_off( - control_relays, hass: HomeAssistant, lcn_connection -) -> None: +async def test_relay_turn_off(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the relay light turns off.""" - states = [RelayStateModifier.NOCHANGE] * 8 - states[0] = RelayStateModifier.OFF + await init_integration(hass, entry) - state = hass.states.get(LIGHT_RELAY1) - state.state = STATE_ON + with patch.object(MockModuleConnection, "control_relays") as control_relays: + states = [RelayStateModifier.NOCHANGE] * 8 + states[0] = RelayStateModifier.OFF - # command failed - control_relays.return_value = False + state = hass.states.get(LIGHT_RELAY1) + state.state = STATE_ON - await hass.services.async_call( - DOMAIN_LIGHT, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: LIGHT_RELAY1}, - blocking=True, - ) - await hass.async_block_till_done() - control_relays.assert_awaited_with(states) + # command failed + control_relays.return_value = False - state = hass.states.get(LIGHT_RELAY1) - assert state is not None - assert state.state != STATE_OFF + await hass.services.async_call( + DOMAIN_LIGHT, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: LIGHT_RELAY1}, + blocking=True, + ) - # command success - control_relays.reset_mock(return_value=True) - control_relays.return_value = True + control_relays.assert_awaited_with(states) - await hass.services.async_call( - DOMAIN_LIGHT, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: LIGHT_RELAY1}, - blocking=True, - ) - await hass.async_block_till_done() - control_relays.assert_awaited_with(states) + state = hass.states.get(LIGHT_RELAY1) + assert state is not None + assert state.state != STATE_OFF - state = hass.states.get(LIGHT_RELAY1) - assert state is not None - assert state.state == STATE_OFF + # command success + control_relays.reset_mock(return_value=True) + control_relays.return_value = True + + await hass.services.async_call( + DOMAIN_LIGHT, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: LIGHT_RELAY1}, + blocking=True, + ) + + control_relays.assert_awaited_with(states) + + state = hass.states.get(LIGHT_RELAY1) + assert state is not None + assert state.state == STATE_OFF async def test_pushed_output_status_change( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test the output light changes its state on status received.""" + await init_integration(hass, entry) + device_connection = get_device_connection(hass, (0, 7, False), entry) address = LcnAddr(0, 7, False) @@ -320,9 +301,11 @@ async def test_pushed_output_status_change( async def test_pushed_relay_status_change( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test the relay light changes its state on status received.""" + await init_integration(hass, entry) + device_connection = get_device_connection(hass, (0, 7, False), entry) address = LcnAddr(0, 7, False) states = [False] * 8 @@ -348,7 +331,9 @@ async def test_pushed_relay_status_change( assert state.state == STATE_OFF -async def test_unload_config_entry(hass: HomeAssistant, entry, lcn_connection) -> None: +async def test_unload_config_entry(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the light is removed when the config entry is unloaded.""" + await init_integration(hass, entry) + await hass.config_entries.async_unload(entry.entry_id) assert hass.states.get(LIGHT_OUTPUT1).state == STATE_UNAVAILABLE diff --git a/tests/components/lcn/test_scene.py b/tests/components/lcn/test_scene.py index 558893bb76f..fcd59693479 100644 --- a/tests/components/lcn/test_scene.py +++ b/tests/components/lcn/test_scene.py @@ -58,7 +58,7 @@ async def test_scene_activate( async def test_unload_config_entry(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the scene is removed when the config entry is unloaded.""" await init_integration(hass, entry) - await hass.config_entries.async_forward_entry_unload(entry, DOMAIN_SCENE) + await hass.config_entries.async_unload(entry.entry_id) state = hass.states.get("scene.romantic") assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/lcn/test_sensor.py b/tests/components/lcn/test_sensor.py index cdcd5a195a3..18335f4b073 100644 --- a/tests/components/lcn/test_sensor.py +++ b/tests/components/lcn/test_sensor.py @@ -1,85 +1,46 @@ """Test for the LCN sensor platform.""" +from unittest.mock import patch + from pypck.inputs import ModStatusLedsAndLogicOps, ModStatusVar from pypck.lcn_addr import LcnAddr from pypck.lcn_defs import LedStatus, LogicOpStatus, Var, VarValue +from syrupy.assertion import SnapshotAssertion from homeassistant.components.lcn.helpers import get_device_connection -from homeassistant.const import ( - ATTR_UNIT_OF_MEASUREMENT, - STATE_UNAVAILABLE, - STATE_UNKNOWN, - UnitOfTemperature, -) +from homeassistant.const import STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from .conftest import MockConfigEntry, init_integration + +from tests.common import snapshot_platform + SENSOR_VAR1 = "sensor.sensor_var1" SENSOR_SETPOINT1 = "sensor.sensor_setpoint1" SENSOR_LED6 = "sensor.sensor_led6" SENSOR_LOGICOP1 = "sensor.sensor_logicop1" -async def test_setup_lcn_sensor(hass: HomeAssistant, entry, lcn_connection) -> None: - """Test the setup of sensor.""" - for entity_id in ( - SENSOR_VAR1, - SENSOR_SETPOINT1, - SENSOR_LED6, - SENSOR_LOGICOP1, - ): - state = hass.states.get(entity_id) - assert state is not None - assert state.state == STATE_UNKNOWN - - -async def test_entity_state(hass: HomeAssistant, lcn_connection) -> None: - """Test state of entity.""" - state = hass.states.get(SENSOR_VAR1) - assert state - assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == UnitOfTemperature.CELSIUS - - state = hass.states.get(SENSOR_SETPOINT1) - assert state - assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == UnitOfTemperature.CELSIUS - - state = hass.states.get(SENSOR_LED6) - assert state - - state = hass.states.get(SENSOR_LOGICOP1) - assert state - - -async def test_entity_attributes( - hass: HomeAssistant, entity_registry: er.EntityRegistry, entry, lcn_connection +async def test_setup_lcn_sensor( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + entry: MockConfigEntry, + snapshot: SnapshotAssertion, ) -> None: - """Test the attributes of an entity.""" + """Test the setup of sensor.""" + with patch("homeassistant.components.lcn.PLATFORMS", [Platform.SENSOR]): + await init_integration(hass, entry) - entity_var1 = entity_registry.async_get(SENSOR_VAR1) - assert entity_var1 - assert entity_var1.unique_id == f"{entry.entry_id}-m000007-var1" - assert entity_var1.original_name == "Sensor_Var1" - - entity_r1varsetpoint = entity_registry.async_get(SENSOR_SETPOINT1) - assert entity_r1varsetpoint - assert entity_r1varsetpoint.unique_id == f"{entry.entry_id}-m000007-r1varsetpoint" - assert entity_r1varsetpoint.original_name == "Sensor_Setpoint1" - - entity_led6 = entity_registry.async_get(SENSOR_LED6) - assert entity_led6 - assert entity_led6.unique_id == f"{entry.entry_id}-m000007-led6" - assert entity_led6.original_name == "Sensor_Led6" - - entity_logicop1 = entity_registry.async_get(SENSOR_LOGICOP1) - assert entity_logicop1 - assert entity_logicop1.unique_id == f"{entry.entry_id}-m000007-logicop1" - assert entity_logicop1.original_name == "Sensor_LogicOp1" + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) async def test_pushed_variable_status_change( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test the variable sensor changes its state on status received.""" + await init_integration(hass, entry) + device_connection = get_device_connection(hass, (0, 7, False), entry) address = LcnAddr(0, 7, False) @@ -103,9 +64,11 @@ async def test_pushed_variable_status_change( async def test_pushed_ledlogicop_status_change( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test the led and logicop sensor changes its state on status received.""" + await init_integration(hass, entry) + device_connection = get_device_connection(hass, (0, 7, False), entry) address = LcnAddr(0, 7, False) @@ -129,8 +92,10 @@ async def test_pushed_ledlogicop_status_change( assert state.state == "all" -async def test_unload_config_entry(hass: HomeAssistant, entry, lcn_connection) -> None: +async def test_unload_config_entry(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the sensor is removed when the config entry is unloaded.""" + await init_integration(hass, entry) + await hass.config_entries.async_unload(entry.entry_id) assert hass.states.get(SENSOR_VAR1).state == STATE_UNAVAILABLE assert hass.states.get(SENSOR_SETPOINT1).state == STATE_UNAVAILABLE diff --git a/tests/components/lcn/test_services.py b/tests/components/lcn/test_services.py index 27253a0c7e5..a4ea559cd72 100644 --- a/tests/components/lcn/test_services.py +++ b/tests/components/lcn/test_services.py @@ -2,8 +2,8 @@ from unittest.mock import patch +import pypck import pytest -from syrupy import SnapshotAssertion from homeassistant.components.lcn import DOMAIN from homeassistant.components.lcn.const import ( @@ -41,9 +41,7 @@ from .conftest import ( @patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_output_abs( - hass: HomeAssistant, entry: MockConfigEntry, snapshot: SnapshotAssertion -) -> None: +async def test_service_output_abs(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test output_abs service.""" await async_setup_component(hass, "persistent_notification", {}) await init_integration(hass, entry) @@ -61,13 +59,11 @@ async def test_service_output_abs( blocking=True, ) - assert dim_output.await_args.args == snapshot() + dim_output.assert_awaited_with(0, 100, 9) @patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_output_rel( - hass: HomeAssistant, entry: MockConfigEntry, snapshot: SnapshotAssertion -) -> None: +async def test_service_output_rel(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test output_rel service.""" await async_setup_component(hass, "persistent_notification", {}) await init_integration(hass, entry) @@ -84,12 +80,12 @@ async def test_service_output_rel( blocking=True, ) - assert rel_output.await_args.args == snapshot() + rel_output.assert_awaited_with(0, 25) @patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) async def test_service_output_toggle( - hass: HomeAssistant, entry: MockConfigEntry, snapshot: SnapshotAssertion + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test output_toggle service.""" await async_setup_component(hass, "persistent_notification", {}) @@ -107,13 +103,11 @@ async def test_service_output_toggle( blocking=True, ) - assert toggle_output.await_args.args == snapshot() + toggle_output.assert_awaited_with(0, 9) @patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_relays( - hass: HomeAssistant, entry: MockConfigEntry, snapshot: SnapshotAssertion -) -> None: +async def test_service_relays(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test relays service.""" await async_setup_component(hass, "persistent_notification", {}) await init_integration(hass, entry) @@ -126,13 +120,14 @@ async def test_service_relays( blocking=True, ) - assert control_relays.await_args.args == snapshot() + states = ["OFF", "OFF", "ON", "ON", "TOGGLE", "TOGGLE", "NOCHANGE", "NOCHANGE"] + relay_states = [pypck.lcn_defs.RelayStateModifier[state] for state in states] + + control_relays.assert_awaited_with(relay_states) @patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_led( - hass: HomeAssistant, entry: MockConfigEntry, snapshot: SnapshotAssertion -) -> None: +async def test_service_led(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test led service.""" await async_setup_component(hass, "persistent_notification", {}) await init_integration(hass, entry) @@ -145,13 +140,14 @@ async def test_service_led( blocking=True, ) - assert control_led.await_args.args == snapshot() + led = pypck.lcn_defs.LedPort["LED6"] + led_state = pypck.lcn_defs.LedStatus["BLINK"] + + control_led.assert_awaited_with(led, led_state) @patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_var_abs( - hass: HomeAssistant, entry: MockConfigEntry, snapshot: SnapshotAssertion -) -> None: +async def test_service_var_abs(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test var_abs service.""" await async_setup_component(hass, "persistent_notification", {}) await init_integration(hass, entry) @@ -169,13 +165,13 @@ async def test_service_var_abs( blocking=True, ) - assert var_abs.await_args.args == snapshot() + var_abs.assert_awaited_with( + pypck.lcn_defs.Var["VAR1"], 75, pypck.lcn_defs.VarUnit.parse("%") + ) @patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_var_rel( - hass: HomeAssistant, entry: MockConfigEntry, snapshot: SnapshotAssertion -) -> None: +async def test_service_var_rel(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test var_rel service.""" await async_setup_component(hass, "persistent_notification", {}) await init_integration(hass, entry) @@ -194,13 +190,16 @@ async def test_service_var_rel( blocking=True, ) - assert var_rel.await_args.args == snapshot() + var_rel.assert_awaited_with( + pypck.lcn_defs.Var["VAR1"], + 10, + pypck.lcn_defs.VarUnit.parse("%"), + pypck.lcn_defs.RelVarRef["CURRENT"], + ) @patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_var_reset( - hass: HomeAssistant, entry: MockConfigEntry, snapshot: SnapshotAssertion -) -> None: +async def test_service_var_reset(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test var_reset service.""" await async_setup_component(hass, "persistent_notification", {}) await init_integration(hass, entry) @@ -213,12 +212,12 @@ async def test_service_var_reset( blocking=True, ) - assert var_reset.await_args.args == snapshot() + var_reset.assert_awaited_with(pypck.lcn_defs.Var["VAR1"]) @patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) async def test_service_lock_regulator( - hass: HomeAssistant, entry: MockConfigEntry, snapshot: SnapshotAssertion + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test lock_regulator service.""" await async_setup_component(hass, "persistent_notification", {}) @@ -236,13 +235,11 @@ async def test_service_lock_regulator( blocking=True, ) - assert lock_regulator.await_args.args == snapshot() + lock_regulator.assert_awaited_with(0, True) @patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_send_keys( - hass: HomeAssistant, entry: MockConfigEntry, snapshot: SnapshotAssertion -) -> None: +async def test_service_send_keys(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test send_keys service.""" await async_setup_component(hass, "persistent_notification", {}) await init_integration(hass, entry) @@ -260,12 +257,12 @@ async def test_service_send_keys( keys[0][4] = True keys[3][7] = True - assert send_keys.await_args.args == snapshot() + send_keys.assert_awaited_with(keys, pypck.lcn_defs.SendKeyCommand["HIT"]) @patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) async def test_service_send_keys_hit_deferred( - hass: HomeAssistant, entry: MockConfigEntry, snapshot: SnapshotAssertion + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test send_keys (hit_deferred) service.""" await async_setup_component(hass, "persistent_notification", {}) @@ -292,7 +289,9 @@ async def test_service_send_keys_hit_deferred( blocking=True, ) - assert send_keys_hit_deferred.await_args.args == snapshot() + send_keys_hit_deferred.assert_awaited_with( + keys, 5, pypck.lcn_defs.TimeUnit.parse("S") + ) # wrong key action with ( @@ -316,9 +315,7 @@ async def test_service_send_keys_hit_deferred( @patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_lock_keys( - hass: HomeAssistant, entry: MockConfigEntry, snapshot: SnapshotAssertion -) -> None: +async def test_service_lock_keys(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test lock_keys service.""" await async_setup_component(hass, "persistent_notification", {}) await init_integration(hass, entry) @@ -331,12 +328,15 @@ async def test_service_lock_keys( blocking=True, ) - assert lock_keys.await_args.args == snapshot() + states = ["OFF", "OFF", "ON", "ON", "TOGGLE", "TOGGLE", "NOCHANGE", "NOCHANGE"] + lock_states = [pypck.lcn_defs.KeyLockStateModifier[state] for state in states] + + lock_keys.assert_awaited_with(0, lock_states) @patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) async def test_service_lock_keys_tab_a_temporary( - hass: HomeAssistant, entry: MockConfigEntry, snapshot: SnapshotAssertion + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test lock_keys (tab_a_temporary) service.""" await async_setup_component(hass, "persistent_notification", {}) @@ -358,7 +358,12 @@ async def test_service_lock_keys_tab_a_temporary( blocking=True, ) - assert lock_keys_tab_a_temporary.await_args.args == snapshot() + states = ["OFF", "OFF", "ON", "ON", "TOGGLE", "TOGGLE", "NOCHANGE", "NOCHANGE"] + lock_states = [pypck.lcn_defs.KeyLockStateModifier[state] for state in states] + + lock_keys_tab_a_temporary.assert_awaited_with( + 10, pypck.lcn_defs.TimeUnit.parse("S"), lock_states + ) # wrong table with ( @@ -382,9 +387,7 @@ async def test_service_lock_keys_tab_a_temporary( @patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_dyn_text( - hass: HomeAssistant, entry: MockConfigEntry, snapshot: SnapshotAssertion -) -> None: +async def test_service_dyn_text(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test dyn_text service.""" await async_setup_component(hass, "persistent_notification", {}) await init_integration(hass, entry) @@ -397,13 +400,11 @@ async def test_service_dyn_text( blocking=True, ) - assert dyn_text.await_args.args == snapshot() + dyn_text.assert_awaited_with(0, "text in row 1") @patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) -async def test_service_pck( - hass: HomeAssistant, entry: MockConfigEntry, snapshot: SnapshotAssertion -) -> None: +async def test_service_pck(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test pck service.""" await async_setup_component(hass, "persistent_notification", {}) await init_integration(hass, entry) @@ -416,7 +417,7 @@ async def test_service_pck( blocking=True, ) - assert pck.await_args.args == snapshot() + pck.assert_awaited_with("PIN4") @patch("homeassistant.components.lcn.PchkConnectionManager", MockPchkConnectionManager) diff --git a/tests/components/lcn/test_switch.py b/tests/components/lcn/test_switch.py index f24828c5fcb..f57a51bc8a3 100644 --- a/tests/components/lcn/test_switch.py +++ b/tests/components/lcn/test_switch.py @@ -5,6 +5,7 @@ from unittest.mock import patch from pypck.inputs import ModStatusOutput, ModStatusRelays from pypck.lcn_addr import LcnAddr from pypck.lcn_defs import RelayStateModifier +from syrupy.assertion import SnapshotAssertion from homeassistant.components.lcn.helpers import get_device_connection from homeassistant.components.switch import DOMAIN as DOMAIN_SWITCH @@ -15,11 +16,14 @@ from homeassistant.const import ( STATE_OFF, STATE_ON, STATE_UNAVAILABLE, + Platform, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .conftest import MockModuleConnection +from .conftest import MockConfigEntry, MockModuleConnection, init_integration + +from tests.common import snapshot_platform SWITCH_OUTPUT1 = "switch.switch_output1" SWITCH_OUTPUT2 = "switch.switch_output2" @@ -27,197 +31,185 @@ SWITCH_RELAY1 = "switch.switch_relay1" SWITCH_RELAY2 = "switch.switch_relay2" -async def test_setup_lcn_switch(hass: HomeAssistant, lcn_connection) -> None: +async def test_setup_lcn_switch( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: """Test the setup of switch.""" - for entity_id in ( - SWITCH_OUTPUT1, - SWITCH_OUTPUT2, - SWITCH_RELAY1, - SWITCH_RELAY2, - ): - state = hass.states.get(entity_id) - assert state is not None + with patch("homeassistant.components.lcn.PLATFORMS", [Platform.SWITCH]): + await init_integration(hass, entry) + + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + + +async def test_output_turn_on(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Test the output switch turns on.""" + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "dim_output") as dim_output: + # command failed + dim_output.return_value = False + + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: SWITCH_OUTPUT1}, + blocking=True, + ) + + dim_output.assert_awaited_with(0, 100, 0) + + state = hass.states.get(SWITCH_OUTPUT1) + assert state.state == STATE_OFF + + # command success + dim_output.reset_mock(return_value=True) + dim_output.return_value = True + + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: SWITCH_OUTPUT1}, + blocking=True, + ) + + dim_output.assert_awaited_with(0, 100, 0) + + state = hass.states.get(SWITCH_OUTPUT1) + assert state.state == STATE_ON + + +async def test_output_turn_off(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Test the output switch turns off.""" + await init_integration(hass, entry) + + with patch.object(MockModuleConnection, "dim_output") as dim_output: + state = hass.states.get(SWITCH_OUTPUT1) + state.state = STATE_ON + + # command failed + dim_output.return_value = False + + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: SWITCH_OUTPUT1}, + blocking=True, + ) + + dim_output.assert_awaited_with(0, 0, 0) + + state = hass.states.get(SWITCH_OUTPUT1) + assert state.state == STATE_ON + + # command success + dim_output.reset_mock(return_value=True) + dim_output.return_value = True + + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: SWITCH_OUTPUT1}, + blocking=True, + ) + + dim_output.assert_awaited_with(0, 0, 0) + + state = hass.states.get(SWITCH_OUTPUT1) assert state.state == STATE_OFF -async def test_entity_attributes( - hass: HomeAssistant, entity_registry: er.EntityRegistry, entry, lcn_connection -) -> None: - """Test the attributes of an entity.""" - - entity_output = entity_registry.async_get(SWITCH_OUTPUT1) - - assert entity_output - assert entity_output.unique_id == f"{entry.entry_id}-m000007-output1" - assert entity_output.original_name == "Switch_Output1" - - entity_relay = entity_registry.async_get(SWITCH_RELAY1) - - assert entity_relay - assert entity_relay.unique_id == f"{entry.entry_id}-m000007-relay1" - assert entity_relay.original_name == "Switch_Relay1" - - -@patch.object(MockModuleConnection, "dim_output") -async def test_output_turn_on(dim_output, hass: HomeAssistant, lcn_connection) -> None: - """Test the output switch turns on.""" - # command failed - dim_output.return_value = False - - await hass.services.async_call( - DOMAIN_SWITCH, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: SWITCH_OUTPUT1}, - blocking=True, - ) - await hass.async_block_till_done() - dim_output.assert_awaited_with(0, 100, 0) - - state = hass.states.get(SWITCH_OUTPUT1) - assert state.state == STATE_OFF - - # command success - dim_output.reset_mock(return_value=True) - dim_output.return_value = True - - await hass.services.async_call( - DOMAIN_SWITCH, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: SWITCH_OUTPUT1}, - blocking=True, - ) - await hass.async_block_till_done() - dim_output.assert_awaited_with(0, 100, 0) - - state = hass.states.get(SWITCH_OUTPUT1) - assert state.state == STATE_ON - - -@patch.object(MockModuleConnection, "dim_output") -async def test_output_turn_off(dim_output, hass: HomeAssistant, lcn_connection) -> None: - """Test the output switch turns off.""" - state = hass.states.get(SWITCH_OUTPUT1) - state.state = STATE_ON - - # command failed - dim_output.return_value = False - - await hass.services.async_call( - DOMAIN_SWITCH, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: SWITCH_OUTPUT1}, - blocking=True, - ) - await hass.async_block_till_done() - dim_output.assert_awaited_with(0, 0, 0) - - state = hass.states.get(SWITCH_OUTPUT1) - assert state.state == STATE_ON - - # command success - dim_output.reset_mock(return_value=True) - dim_output.return_value = True - - await hass.services.async_call( - DOMAIN_SWITCH, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: SWITCH_OUTPUT1}, - blocking=True, - ) - await hass.async_block_till_done() - dim_output.assert_awaited_with(0, 0, 0) - - state = hass.states.get(SWITCH_OUTPUT1) - assert state.state == STATE_OFF - - -@patch.object(MockModuleConnection, "control_relays") -async def test_relay_turn_on( - control_relays, hass: HomeAssistant, lcn_connection -) -> None: +async def test_relay_turn_on(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the relay switch turns on.""" - states = [RelayStateModifier.NOCHANGE] * 8 - states[0] = RelayStateModifier.ON + await init_integration(hass, entry) - # command failed - control_relays.return_value = False + with patch.object(MockModuleConnection, "control_relays") as control_relays: + states = [RelayStateModifier.NOCHANGE] * 8 + states[0] = RelayStateModifier.ON - await hass.services.async_call( - DOMAIN_SWITCH, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: SWITCH_RELAY1}, - blocking=True, - ) - await hass.async_block_till_done() - control_relays.assert_awaited_with(states) + # command failed + control_relays.return_value = False - state = hass.states.get(SWITCH_RELAY1) - assert state.state == STATE_OFF + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: SWITCH_RELAY1}, + blocking=True, + ) - # command success - control_relays.reset_mock(return_value=True) - control_relays.return_value = True + control_relays.assert_awaited_with(states) - await hass.services.async_call( - DOMAIN_SWITCH, - SERVICE_TURN_ON, - {ATTR_ENTITY_ID: SWITCH_RELAY1}, - blocking=True, - ) - await hass.async_block_till_done() - control_relays.assert_awaited_with(states) + state = hass.states.get(SWITCH_RELAY1) + assert state.state == STATE_OFF - state = hass.states.get(SWITCH_RELAY1) - assert state.state == STATE_ON + # command success + control_relays.reset_mock(return_value=True) + control_relays.return_value = True + + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: SWITCH_RELAY1}, + blocking=True, + ) + + control_relays.assert_awaited_with(states) + + state = hass.states.get(SWITCH_RELAY1) + assert state.state == STATE_ON -@patch.object(MockModuleConnection, "control_relays") -async def test_relay_turn_off( - control_relays, hass: HomeAssistant, lcn_connection -) -> None: +async def test_relay_turn_off(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the relay switch turns off.""" - states = [RelayStateModifier.NOCHANGE] * 8 - states[0] = RelayStateModifier.OFF + await init_integration(hass, entry) - state = hass.states.get(SWITCH_RELAY1) - state.state = STATE_ON + with patch.object(MockModuleConnection, "control_relays") as control_relays: + states = [RelayStateModifier.NOCHANGE] * 8 + states[0] = RelayStateModifier.OFF - # command failed - control_relays.return_value = False + state = hass.states.get(SWITCH_RELAY1) + state.state = STATE_ON - await hass.services.async_call( - DOMAIN_SWITCH, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: SWITCH_RELAY1}, - blocking=True, - ) - await hass.async_block_till_done() - control_relays.assert_awaited_with(states) + # command failed + control_relays.return_value = False - state = hass.states.get(SWITCH_RELAY1) - assert state.state == STATE_ON + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: SWITCH_RELAY1}, + blocking=True, + ) - # command success - control_relays.reset_mock(return_value=True) - control_relays.return_value = True + control_relays.assert_awaited_with(states) - await hass.services.async_call( - DOMAIN_SWITCH, - SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: SWITCH_RELAY1}, - blocking=True, - ) - await hass.async_block_till_done() - control_relays.assert_awaited_with(states) + state = hass.states.get(SWITCH_RELAY1) + assert state.state == STATE_ON - state = hass.states.get(SWITCH_RELAY1) - assert state.state == STATE_OFF + # command success + control_relays.reset_mock(return_value=True) + control_relays.return_value = True + + await hass.services.async_call( + DOMAIN_SWITCH, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: SWITCH_RELAY1}, + blocking=True, + ) + + control_relays.assert_awaited_with(states) + + state = hass.states.get(SWITCH_RELAY1) + assert state.state == STATE_OFF async def test_pushed_output_status_change( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test the output switch changes its state on status received.""" + await init_integration(hass, entry) + device_connection = get_device_connection(hass, (0, 7, False), entry) address = LcnAddr(0, 7, False) @@ -239,9 +231,11 @@ async def test_pushed_output_status_change( async def test_pushed_relay_status_change( - hass: HomeAssistant, entry, lcn_connection + hass: HomeAssistant, entry: MockConfigEntry ) -> None: """Test the relay switch changes its state on status received.""" + await init_integration(hass, entry) + device_connection = get_device_connection(hass, (0, 7, False), entry) address = LcnAddr(0, 7, False) states = [False] * 8 @@ -265,7 +259,9 @@ async def test_pushed_relay_status_change( assert state.state == STATE_OFF -async def test_unload_config_entry(hass: HomeAssistant, entry, lcn_connection) -> None: +async def test_unload_config_entry(hass: HomeAssistant, entry: MockConfigEntry) -> None: """Test the switch is removed when the config entry is unloaded.""" + await init_integration(hass, entry) + await hass.config_entries.async_unload(entry.entry_id) assert hass.states.get(SWITCH_OUTPUT1).state == STATE_UNAVAILABLE diff --git a/tests/components/lcn/test_websocket.py b/tests/components/lcn/test_websocket.py index f1f0a19b572..2c5fff89e19 100644 --- a/tests/components/lcn/test_websocket.py +++ b/tests/components/lcn/test_websocket.py @@ -1,8 +1,11 @@ """LCN Websocket Tests.""" +from typing import Any + from pypck.lcn_addr import LcnAddr import pytest +from homeassistant.components.lcn import AddressType from homeassistant.components.lcn.const import CONF_DOMAIN_DATA from homeassistant.components.lcn.helpers import get_device_config, get_resource from homeassistant.const import ( @@ -16,6 +19,8 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant +from .conftest import MockConfigEntry, init_integration + from tests.typing import WebSocketGenerator DEVICES_PAYLOAD = {CONF_TYPE: "lcn/devices", "entry_id": ""} @@ -52,11 +57,12 @@ ENTITIES_DELETE_PAYLOAD = { async def test_lcn_devices_command( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry, lcn_connection + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry: MockConfigEntry ) -> None: """Test lcn/devices command.""" - client = await hass_ws_client(hass) + await init_integration(hass, entry) + client = await hass_ws_client(hass) await client.send_json_auto_id({**DEVICES_PAYLOAD, "entry_id": entry.entry_id}) res = await client.receive_json() @@ -79,11 +85,12 @@ async def test_lcn_devices_command( async def test_lcn_entities_command( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - entry, - lcn_connection, + entry: MockConfigEntry, payload, ) -> None: """Test lcn/entities command.""" + await init_integration(hass, entry) + client = await hass_ws_client(hass) await client.send_json_auto_id( { @@ -107,10 +114,11 @@ async def test_lcn_entities_command( async def test_lcn_devices_scan_command( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry, lcn_connection + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry: MockConfigEntry ) -> None: """Test lcn/devices/scan command.""" # add new module which is not stored in config_entry + lcn_connection = await init_integration(hass, entry) lcn_connection.get_address_conn(LcnAddr(0, 10, False)) client = await hass_ws_client(hass) @@ -129,9 +137,11 @@ async def test_lcn_devices_scan_command( async def test_lcn_devices_add_command( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry, lcn_connection + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry: MockConfigEntry ) -> None: """Test lcn/devices/add command.""" + await init_integration(hass, entry) + client = await hass_ws_client(hass) assert get_device_config((0, 10, False), entry) is None @@ -144,9 +154,11 @@ async def test_lcn_devices_add_command( async def test_lcn_devices_delete_command( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry, lcn_connection + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry: MockConfigEntry ) -> None: """Test lcn/devices/delete command.""" + await init_integration(hass, entry) + client = await hass_ws_client(hass) assert get_device_config((0, 7, False), entry) @@ -160,9 +172,11 @@ async def test_lcn_devices_delete_command( async def test_lcn_entities_add_command( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry, lcn_connection + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry: MockConfigEntry ) -> None: """Test lcn/entities/add command.""" + await init_integration(hass, entry) + client = await hass_ws_client(hass) entity_config = { @@ -185,9 +199,11 @@ async def test_lcn_entities_add_command( async def test_lcn_entities_delete_command( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry, lcn_connection + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry: MockConfigEntry ) -> None: """Test lcn/entities/delete command.""" + await init_integration(hass, entry) + client = await hass_ws_client(hass) assert ( @@ -239,12 +255,14 @@ async def test_lcn_entities_delete_command( async def test_lcn_command_host_error( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - lcn_connection, - payload, - entity_id, - result, + entry: MockConfigEntry, + payload: dict[str, str], + entity_id: str, + result: bool, ) -> None: """Test lcn commands for unknown host.""" + await init_integration(hass, entry) + client = await hass_ws_client(hass) await client.send_json_auto_id({**payload, "entry_id": entity_id}) @@ -265,13 +283,14 @@ async def test_lcn_command_host_error( async def test_lcn_command_address_error( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - entry, - lcn_connection, - payload, - address, - result, + entry: MockConfigEntry, + payload: dict[str, Any], + address: AddressType, + result: bool, ) -> None: """Test lcn commands for address error.""" + await init_integration(hass, entry) + client = await hass_ws_client(hass) await client.send_json_auto_id( {**payload, "entry_id": entry.entry_id, CONF_ADDRESS: address} @@ -285,10 +304,11 @@ async def test_lcn_command_address_error( async def test_lcn_entities_add_existing_error( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - entry, - lcn_connection, + entry: MockConfigEntry, ) -> None: """Test lcn commands for address error.""" + await init_integration(hass, entry) + client = await hass_ws_client(hass) await client.send_json_auto_id( { From 5108e1a1cd08b26bd1968d6edc16a0546f035895 Mon Sep 17 00:00:00 2001 From: Avi Miller Date: Sun, 8 Sep 2024 17:53:32 +1000 Subject: [PATCH 1561/1635] Bump aiolifx and aiolifx-themes to support more than 82 zones (#125487) Signed-off-by: Avi Miller --- homeassistant/components/lifx/manifest.json | 4 +- requirements_all.txt | 4 +- requirements_test_all.txt | 4 +- tests/components/lifx/__init__.py | 20 +++-- tests/components/lifx/test_diagnostics.py | 33 ++++++++ tests/components/lifx/test_light.py | 85 +++++++++++++-------- 6 files changed, 109 insertions(+), 41 deletions(-) diff --git a/homeassistant/components/lifx/manifest.json b/homeassistant/components/lifx/manifest.json index 3ef70f16467..c7d8a27a1c7 100644 --- a/homeassistant/components/lifx/manifest.json +++ b/homeassistant/components/lifx/manifest.json @@ -48,8 +48,8 @@ "iot_class": "local_polling", "loggers": ["aiolifx", "aiolifx_effects", "bitstring"], "requirements": [ - "aiolifx==1.0.9", + "aiolifx==1.1.1", "aiolifx-effects==0.3.2", - "aiolifx-themes==0.5.0" + "aiolifx-themes==0.5.5" ] } diff --git a/requirements_all.txt b/requirements_all.txt index 16b0cc537d7..adf4f7e064b 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -273,10 +273,10 @@ aiokef==0.2.16 aiolifx-effects==0.3.2 # homeassistant.components.lifx -aiolifx-themes==0.5.0 +aiolifx-themes==0.5.5 # homeassistant.components.lifx -aiolifx==1.0.9 +aiolifx==1.1.1 # homeassistant.components.livisi aiolivisi==0.0.19 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 53e29289127..8d1e1d24b13 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -255,10 +255,10 @@ aiokafka==0.10.0 aiolifx-effects==0.3.2 # homeassistant.components.lifx -aiolifx-themes==0.5.0 +aiolifx-themes==0.5.5 # homeassistant.components.lifx -aiolifx==1.0.9 +aiolifx==1.1.1 # homeassistant.components.livisi aiolivisi==0.0.19 diff --git a/tests/components/lifx/__init__.py b/tests/components/lifx/__init__.py index 432e7673db6..81b913da6ce 100644 --- a/tests/components/lifx/__init__.py +++ b/tests/components/lifx/__init__.py @@ -65,10 +65,13 @@ class MockLifxCommand: """Init command.""" self.bulb = bulb self.calls = [] - self.msg_kwargs = kwargs + self.msg_kwargs = { + k.removeprefix("msg_"): v for k, v in kwargs.items() if k.startswith("msg_") + } for k, v in kwargs.items(): - if k != "callb": - setattr(self.bulb, k, v) + if k.startswith("msg_") or k == "callb": + continue + setattr(self.bulb, k, v) def __call__(self, *args, **kwargs): """Call command.""" @@ -156,9 +159,16 @@ def _mocked_infrared_bulb() -> Light: def _mocked_light_strip() -> Light: bulb = _mocked_bulb() bulb.product = 31 # LIFX Z - bulb.color_zones = [MagicMock(), MagicMock()] + bulb.zones_count = 3 + bulb.color_zones = [MagicMock()] * 3 bulb.effect = {"effect": "MOVE", "speed": 3, "duration": 0, "direction": "RIGHT"} - bulb.get_color_zones = MockLifxCommand(bulb) + bulb.get_color_zones = MockLifxCommand( + bulb, + msg_seq_num=bulb.seq_next(), + msg_count=bulb.zones_count, + msg_index=0, + msg_color=bulb.color_zones, + ) bulb.set_color_zones = MockLifxCommand(bulb) bulb.get_multizone_effect = MockLifxCommand(bulb) bulb.set_multizone_effect = MockLifxCommand(bulb) diff --git a/tests/components/lifx/test_diagnostics.py b/tests/components/lifx/test_diagnostics.py index e3588dd3ed1..22e335612f8 100644 --- a/tests/components/lifx/test_diagnostics.py +++ b/tests/components/lifx/test_diagnostics.py @@ -9,6 +9,7 @@ from . import ( DEFAULT_ENTRY_TITLE, IP_ADDRESS, SERIAL, + MockLifxCommand, _mocked_bulb, _mocked_clean_bulb, _mocked_infrared_bulb, @@ -188,6 +189,22 @@ async def test_legacy_multizone_bulb_diagnostics( ) config_entry.add_to_hass(hass) bulb = _mocked_light_strip() + bulb.get_color_zones = MockLifxCommand( + bulb, + msg_seq_num=0, + msg_count=8, + msg_color=[ + (54612, 65535, 65535, 3500), + (54612, 65535, 65535, 3500), + (54612, 65535, 65535, 3500), + (54612, 65535, 65535, 3500), + (46420, 65535, 65535, 3500), + (46420, 65535, 65535, 3500), + (46420, 65535, 65535, 3500), + (46420, 65535, 65535, 3500), + ], + msg_index=0, + ) bulb.zones_count = 8 bulb.color_zones = [ (54612, 65535, 65535, 3500), @@ -302,6 +319,22 @@ async def test_multizone_bulb_diagnostics( config_entry.add_to_hass(hass) bulb = _mocked_light_strip() bulb.product = 38 + bulb.get_color_zones = MockLifxCommand( + bulb, + msg_seq_num=0, + msg_count=8, + msg_color=[ + (54612, 65535, 65535, 3500), + (54612, 65535, 65535, 3500), + (54612, 65535, 65535, 3500), + (54612, 65535, 65535, 3500), + (46420, 65535, 65535, 3500), + (46420, 65535, 65535, 3500), + (46420, 65535, 65535, 3500), + (46420, 65535, 65535, 3500), + ], + msg_index=0, + ) bulb.zones_count = 8 bulb.color_zones = [ (54612, 65535, 65535, 3500), diff --git a/tests/components/lifx/test_light.py b/tests/components/lifx/test_light.py index a642347b4e6..1ce7c69d7fa 100644 --- a/tests/components/lifx/test_light.py +++ b/tests/components/lifx/test_light.py @@ -192,15 +192,7 @@ async def test_light_strip(hass: HomeAssistant) -> None: {ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 100}, blocking=True, ) - call_dict = bulb.set_color_zones.calls[0][1] - call_dict.pop("callb") - assert call_dict == { - "apply": 0, - "color": [], - "duration": 0, - "end_index": 0, - "start_index": 0, - } + assert len(bulb.set_color_zones.calls) == 0 bulb.set_color_zones.reset_mock() await hass.services.async_call( @@ -209,15 +201,7 @@ async def test_light_strip(hass: HomeAssistant) -> None: {ATTR_ENTITY_ID: entity_id, ATTR_HS_COLOR: (10, 30)}, blocking=True, ) - call_dict = bulb.set_color_zones.calls[0][1] - call_dict.pop("callb") - assert call_dict == { - "apply": 0, - "color": [], - "duration": 0, - "end_index": 0, - "start_index": 0, - } + assert len(bulb.set_color_zones.calls) == 0 bulb.set_color_zones.reset_mock() bulb.color_zones = [ @@ -238,7 +222,7 @@ async def test_light_strip(hass: HomeAssistant) -> None: blocking=True, ) # Single color uses the fast path - assert bulb.set_color.calls[0][0][0] == [1820, 19660, 65535, 3500] + assert bulb.set_color.calls[1][0][0] == [1820, 19660, 65535, 3500] bulb.set_color.reset_mock() assert len(bulb.set_color_zones.calls) == 0 @@ -422,7 +406,9 @@ async def test_light_strip(hass: HomeAssistant) -> None: blocking=True, ) - bulb.get_color_zones = MockLifxCommand(bulb) + bulb.get_color_zones = MockLifxCommand( + bulb, msg_seq_num=0, msg_color=[0, 0, 65535, 3500] * 3, msg_index=0, msg_count=3 + ) bulb.get_color = MockFailingLifxCommand(bulb) with pytest.raises(HomeAssistantError): @@ -587,14 +573,14 @@ async def test_extended_multizone_messages(hass: HomeAssistant) -> None: bulb.set_extended_color_zones.reset_mock() bulb.color_zones = [ - (0, 65535, 65535, 3500), - (54612, 65535, 65535, 3500), - (54612, 65535, 65535, 3500), - (54612, 65535, 65535, 3500), - (46420, 65535, 65535, 3500), - (46420, 65535, 65535, 3500), - (46420, 65535, 65535, 3500), - (46420, 65535, 65535, 3500), + [0, 65535, 65535, 3500], + [54612, 65535, 65535, 3500], + [54612, 65535, 65535, 3500], + [54612, 65535, 65535, 3500], + [46420, 65535, 65535, 3500], + [46420, 65535, 65535, 3500], + [46420, 65535, 65535, 3500], + [46420, 65535, 65535, 3500], ] await hass.services.async_call( @@ -1308,7 +1294,11 @@ async def test_config_zoned_light_strip_fails( def __call__(self, callb=None, *args, **kwargs): """Call command.""" self.call_count += 1 - response = None if self.call_count >= 2 else MockMessage() + response = ( + None + if self.call_count >= 2 + else MockMessage(seq_num=0, color=[], index=0, count=0) + ) if callb: callb(self.bulb, response) @@ -1349,7 +1339,15 @@ async def test_legacy_zoned_light_strip( self.call_count += 1 self.bulb.color_zones = [None] * 12 if callb: - callb(self.bulb, MockMessage()) + callb( + self.bulb, + MockMessage( + seq_num=0, + index=0, + count=self.bulb.zones_count, + color=self.bulb.color_zones, + ), + ) get_color_zones_mock = MockPopulateLifxZonesCommand(light_strip) light_strip.get_color_zones = get_color_zones_mock @@ -1946,6 +1944,33 @@ async def test_light_strip_zones_not_populated_yet(hass: HomeAssistant) -> None: bulb.power_level = 65535 bulb.color_zones = None bulb.color = [65535, 65535, 65535, 65535] + bulb.get_color_zones = next( + iter( + [ + MockLifxCommand( + bulb, + msg_seq_num=0, + msg_color=[0, 0, 65535, 3500] * 8, + msg_index=0, + msg_count=16, + ), + MockLifxCommand( + bulb, + msg_seq_num=1, + msg_color=[0, 0, 65535, 3500] * 8, + msg_index=0, + msg_count=16, + ), + MockLifxCommand( + bulb, + msg_seq_num=2, + msg_color=[0, 0, 65535, 3500] * 8, + msg_index=8, + msg_count=16, + ), + ] + ) + ) assert bulb.get_color_zones.calls == [] with ( From bfe19e82ff5ce024308e310f3803d9612c01fd45 Mon Sep 17 00:00:00 2001 From: Willem-Jan van Rootselaar Date: Sun, 8 Sep 2024 10:41:54 +0200 Subject: [PATCH 1562/1635] Add tests for BSBLAN climate component (#124524) * chore: Add tests for BSBLAN climate component * fix return types * fix MAC data * chore: Update BSBLAN climate component tests used setup from conftest added setup for farhenheit temp unit * chore: Update BSBLAN climate component tests use syrupy to compare results * add test for temp_unit * update climate tests set current_temperature to None in test case. Is this the correct way for testing? * chore: Update BSBLAN diagnostics to handle asynchronous data retrieval * chore: Refactor BSBLAN conftest.py to simplify fixture and patching * chore: Update BSBLAN climate component tests 100% test coverage * chore: Update BSBLAN diagnostics to handle asynchronous data retrieval * chore: Update snapshots * Fix BSBLAN climate test for async_set_preset_mode - Update test_async_set_preset_mode to correctly handle ServiceValidationError - Check for specific translation key instead of full error message - Ensure consistency between local tests and CI environment - Import ServiceValidationError explicitly for clarity * Update homeassistant/components/bsblan/entity.py Co-authored-by: Joost Lekkerkerker * chore: Update BSBLAN conftest.py to simplify fixture and patching * chore: Update BSBLAN integration setup function parameter name * chore: removed set_static_value * refactor: Improve BSBLANClimate async_set_preset_mode method This commit refactors the async_set_preset_mode method in the BSBLANClimate class to improve code readability and maintainability. The method now checks if the HVAC mode is not set to AUTO and the preset mode is not NONE before raising a ServiceValidationError. Co-authored-by: Joost Lekkerkerker * refactor: Improve tests test_celsius_fahrenheit test_climate_entity_properties test_async_set_hvac_mode test_async_set_preset_mode still broken. Not sure why hvac mode will not set. THis causes error with preset mode set * update snapshot * fix DOMAIN bsblan * refactor: Improve BSBLANClimate async_set_data method * refactor: fix last tests * refactor: Simplify async_get_config_entry_diagnostics method * refactor: Improve BSBLANClimate async_set_temperature method This commit improves the async_set_temperature method in the BSBLANClimate class. It removes the unnecessary parameter "expected_result" and simplifies the code by directly calling the service to set the temperature. The method now correctly asserts that the thermostat method is called with the correct temperature. * refactor: Add static data to async_get_config_entry_diagnostics * refactor: Add static data to async_get_config_entry_diagnostics right place * refactor: Improve error message for setting preset mode This commit updates the error message in the BSBLANClimate class when trying to set the preset mode. * refactor: Improve tests * Fix --------- Co-authored-by: Joost Lekkerkerker --- homeassistant/components/bsblan/climate.py | 17 +- tests/components/bsblan/__init__.py | 17 + tests/components/bsblan/conftest.py | 3 +- .../components/bsblan/fixtures/static_F.json | 20 ++ .../bsblan/snapshots/test_climate.ambr | 220 +++++++++++++ tests/components/bsblan/test_climate.py | 307 ++++++++++++++++++ tests/components/bsblan/test_diagnostics.py | 4 + 7 files changed, 577 insertions(+), 11 deletions(-) create mode 100644 tests/components/bsblan/fixtures/static_F.json create mode 100644 tests/components/bsblan/snapshots/test_climate.ambr create mode 100644 tests/components/bsblan/test_climate.py diff --git a/homeassistant/components/bsblan/climate.py b/homeassistant/components/bsblan/climate.py index ae7116143df..3a204a9e0c2 100644 --- a/homeassistant/components/bsblan/climate.py +++ b/homeassistant/components/bsblan/climate.py @@ -126,15 +126,14 @@ class BSBLANClimate(BSBLanEntity, ClimateEntity): async def async_set_preset_mode(self, preset_mode: str) -> None: """Set preset mode.""" - # only allow preset mode when hvac mode is auto - if self.hvac_mode == HVACMode.AUTO: - await self.async_set_data(preset_mode=preset_mode) - else: + if self.hvac_mode != HVACMode.AUTO and preset_mode != PRESET_NONE: raise ServiceValidationError( + "Preset mode can only be set when HVAC mode is set to 'auto'", translation_domain=DOMAIN, translation_key="set_preset_mode_error", translation_placeholders={"preset_mode": preset_mode}, ) + await self.async_set_data(preset_mode=preset_mode) async def async_set_temperature(self, **kwargs: Any) -> None: """Set new target temperatures.""" @@ -148,11 +147,11 @@ class BSBLANClimate(BSBLanEntity, ClimateEntity): if ATTR_HVAC_MODE in kwargs: data[ATTR_HVAC_MODE] = kwargs[ATTR_HVAC_MODE] if ATTR_PRESET_MODE in kwargs: - # If preset mode is None, set hvac to auto - if kwargs[ATTR_PRESET_MODE] == PRESET_NONE: - data[ATTR_HVAC_MODE] = HVACMode.AUTO - else: - data[ATTR_HVAC_MODE] = kwargs[ATTR_PRESET_MODE] + if kwargs[ATTR_PRESET_MODE] == PRESET_ECO: + data[ATTR_HVAC_MODE] = PRESET_ECO + elif kwargs[ATTR_PRESET_MODE] == PRESET_NONE: + data[ATTR_HVAC_MODE] = PRESET_NONE + try: await self.coordinator.client.thermostat(**data) except BSBLANError as err: diff --git a/tests/components/bsblan/__init__.py b/tests/components/bsblan/__init__.py index d233fa068ea..3892fcaaaca 100644 --- a/tests/components/bsblan/__init__.py +++ b/tests/components/bsblan/__init__.py @@ -1 +1,18 @@ """Tests for the bsblan integration.""" + +from unittest.mock import patch + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_with_selected_platforms( + hass: HomeAssistant, config_entry: MockConfigEntry, platforms: list[Platform] +) -> None: + """Set up the BSBLAN integration with the selected platforms.""" + config_entry.add_to_hass(hass) + with patch("homeassistant.components.bsblan.PLATFORMS", platforms): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/bsblan/conftest.py b/tests/components/bsblan/conftest.py index 13d4017d7c8..96445a4bb23 100644 --- a/tests/components/bsblan/conftest.py +++ b/tests/components/bsblan/conftest.py @@ -40,7 +40,7 @@ def mock_setup_entry() -> Generator[AsyncMock]: @pytest.fixture -def mock_bsblan() -> Generator[MagicMock]: +def mock_bsblan() -> Generator[MagicMock, None, None]: """Return a mocked BSBLAN client.""" with ( patch("homeassistant.components.bsblan.BSBLAN", autospec=True) as bsblan_mock, @@ -52,7 +52,6 @@ def mock_bsblan() -> Generator[MagicMock]: load_fixture("device.json", DOMAIN) ) bsblan.state.return_value = State.from_json(load_fixture("state.json", DOMAIN)) - bsblan.static_values.return_value = StaticState.from_json( load_fixture("static.json", DOMAIN) ) diff --git a/tests/components/bsblan/fixtures/static_F.json b/tests/components/bsblan/fixtures/static_F.json new file mode 100644 index 00000000000..a61e870f6e5 --- /dev/null +++ b/tests/components/bsblan/fixtures/static_F.json @@ -0,0 +1,20 @@ +{ + "min_temp": { + "name": "Room temp frost protection setpoint", + "error": 0, + "value": "8.0", + "desc": "", + "dataType": 0, + "readonly": 0, + "unit": "°F" + }, + "max_temp": { + "name": "Summer/winter changeover temp heat circuit 1", + "error": 0, + "value": "20.0", + "desc": "", + "dataType": 0, + "readonly": 0, + "unit": "°F" + } +} diff --git a/tests/components/bsblan/snapshots/test_climate.ambr b/tests/components/bsblan/snapshots/test_climate.ambr new file mode 100644 index 00000000000..4eb70fe2658 --- /dev/null +++ b/tests/components/bsblan/snapshots/test_climate.ambr @@ -0,0 +1,220 @@ +# serializer version: 1 +# name: test_celsius_fahrenheit[static.json][climate.bsb_lan-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 20.0, + 'min_temp': 8.0, + 'preset_modes': list([ + 'eco', + 'none', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.bsb_lan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'bsblan', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:80:41:19:69:90-climate', + 'unit_of_measurement': None, + }) +# --- +# name: test_celsius_fahrenheit[static.json][climate.bsb_lan-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 18.6, + 'friendly_name': 'BSB-LAN', + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 20.0, + 'min_temp': 8.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'eco', + 'none', + ]), + 'supported_features': , + 'temperature': 18.5, + }), + 'context': , + 'entity_id': 'climate.bsb_lan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_celsius_fahrenheit[static_F.json][climate.bsb_lan-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': -6.7, + 'min_temp': -13.3, + 'preset_modes': list([ + 'eco', + 'none', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.bsb_lan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'bsblan', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:80:41:19:69:90-climate', + 'unit_of_measurement': None, + }) +# --- +# name: test_celsius_fahrenheit[static_F.json][climate.bsb_lan-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': -7.4, + 'friendly_name': 'BSB-LAN', + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': -6.7, + 'min_temp': -13.3, + 'preset_mode': 'none', + 'preset_modes': list([ + 'eco', + 'none', + ]), + 'supported_features': , + 'temperature': -7.5, + }), + 'context': , + 'entity_id': 'climate.bsb_lan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- +# name: test_climate_entity_properties[climate.bsb_lan-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 20.0, + 'min_temp': 8.0, + 'preset_modes': list([ + 'eco', + 'none', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.bsb_lan', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'bsblan', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:80:41:19:69:90-climate', + 'unit_of_measurement': None, + }) +# --- +# name: test_climate_entity_properties[climate.bsb_lan-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 18.6, + 'friendly_name': 'BSB-LAN', + 'hvac_modes': list([ + , + , + , + ]), + 'max_temp': 20.0, + 'min_temp': 8.0, + 'preset_mode': 'none', + 'preset_modes': list([ + 'eco', + 'none', + ]), + 'supported_features': , + 'temperature': 18.5, + }), + 'context': , + 'entity_id': 'climate.bsb_lan', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'heat', + }) +# --- diff --git a/tests/components/bsblan/test_climate.py b/tests/components/bsblan/test_climate.py new file mode 100644 index 00000000000..c519c3043da --- /dev/null +++ b/tests/components/bsblan/test_climate.py @@ -0,0 +1,307 @@ +"""Tests for the BSB-Lan climate platform.""" + +from datetime import timedelta +from unittest.mock import AsyncMock, MagicMock + +from bsblan import BSBLANError, StaticState +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.bsblan.const import DOMAIN +from homeassistant.components.climate import ( + ATTR_HVAC_MODE, + ATTR_PRESET_MODE, + DOMAIN as CLIMATE_DOMAIN, + PRESET_ECO, + PRESET_NONE, + SERVICE_SET_HVAC_MODE, + SERVICE_SET_PRESET_MODE, + SERVICE_SET_TEMPERATURE, + HVACMode, +) +from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +import homeassistant.helpers.entity_registry as er + +from . import setup_with_selected_platforms + +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + load_json_object_fixture, + snapshot_platform, +) + +ENTITY_ID = "climate.bsb_lan" + + +@pytest.mark.parametrize( + ("static_file"), + [ + ("static.json"), + ("static_F.json"), + ], +) +async def test_celsius_fahrenheit( + hass: HomeAssistant, + mock_bsblan: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + static_file: str, +) -> None: + """Test Celsius and Fahrenheit temperature units.""" + + static_data = load_json_object_fixture(static_file, DOMAIN) + + mock_bsblan.static_values.return_value = StaticState.from_dict(static_data) + + await setup_with_selected_platforms(hass, mock_config_entry, [Platform.CLIMATE]) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_climate_entity_properties( + hass: HomeAssistant, + mock_bsblan: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test the climate entity properties.""" + await setup_with_selected_platforms(hass, mock_config_entry, [Platform.CLIMATE]) + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + # Test when current_temperature is "---" + mock_current_temp = MagicMock() + mock_current_temp.value = "---" + mock_bsblan.state.return_value.current_temperature = mock_current_temp + + freezer.tick(timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(ENTITY_ID) + assert state.attributes["current_temperature"] is None + + # Test target_temperature + mock_target_temp = MagicMock() + mock_target_temp.value = "23.5" + mock_bsblan.state.return_value.target_temperature = mock_target_temp + + freezer.tick(timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(ENTITY_ID) + assert state.attributes["temperature"] == 23.5 + + # Test hvac_mode + mock_hvac_mode = MagicMock() + mock_hvac_mode.value = HVACMode.AUTO + mock_bsblan.state.return_value.hvac_mode = mock_hvac_mode + + freezer.tick(timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(ENTITY_ID) + assert state.state == HVACMode.AUTO + + # Test preset_mode + mock_hvac_mode.value = PRESET_ECO + + freezer.tick(timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get(ENTITY_ID) + assert state.attributes["preset_mode"] == PRESET_ECO + + +@pytest.mark.parametrize( + "mode", + [HVACMode.HEAT, HVACMode.AUTO, HVACMode.OFF], +) +async def test_async_set_hvac_mode( + hass: HomeAssistant, + mock_bsblan: AsyncMock, + mock_config_entry: MockConfigEntry, + mode: HVACMode, +) -> None: + """Test setting HVAC mode via service call.""" + await setup_with_selected_platforms(hass, mock_config_entry, [Platform.CLIMATE]) + + # Call the service to set HVAC mode + await hass.services.async_call( + domain=CLIMATE_DOMAIN, + service=SERVICE_SET_HVAC_MODE, + service_data={ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: mode}, + blocking=True, + ) + + # Assert that the thermostat method was called + mock_bsblan.thermostat.assert_called_once_with(hvac_mode=mode) + mock_bsblan.thermostat.reset_mock() + + +@pytest.mark.parametrize( + ("hvac_mode", "preset_mode"), + [ + (HVACMode.AUTO, PRESET_ECO), + (HVACMode.AUTO, PRESET_NONE), + ], +) +async def test_async_set_preset_mode_succes( + hass: HomeAssistant, + mock_bsblan: AsyncMock, + mock_config_entry: MockConfigEntry, + hvac_mode: HVACMode, + preset_mode: str, +) -> None: + """Test setting preset mode via service call.""" + await setup_with_selected_platforms(hass, mock_config_entry, [Platform.CLIMATE]) + + # patch hvac_mode + mock_hvac_mode = MagicMock() + mock_hvac_mode.value = hvac_mode + mock_bsblan.state.return_value.hvac_mode = mock_hvac_mode + + # Attempt to set the preset mode + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: preset_mode}, + blocking=True, + ) + await hass.async_block_till_done() + + +@pytest.mark.parametrize( + ("hvac_mode", "preset_mode"), + [ + ( + HVACMode.HEAT, + PRESET_ECO, + ) + ], +) +async def test_async_set_preset_mode_error( + hass: HomeAssistant, + mock_bsblan: AsyncMock, + mock_config_entry: MockConfigEntry, + hvac_mode: HVACMode, + preset_mode: str, +) -> None: + """Test setting preset mode via service call.""" + await setup_with_selected_platforms(hass, mock_config_entry, [Platform.CLIMATE]) + + # patch hvac_mode + mock_hvac_mode = MagicMock() + mock_hvac_mode.value = hvac_mode + mock_bsblan.state.return_value.hvac_mode = mock_hvac_mode + + # Attempt to set the preset mode + error_message = "Preset mode can only be set when HVAC mode is set to 'auto'" + with pytest.raises(HomeAssistantError, match=error_message): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: preset_mode}, + blocking=True, + ) + + +@pytest.mark.parametrize( + ("target_temp"), + [ + (8.0), # Min temperature + (15.0), # Mid-range temperature + (20.0), # Max temperature + ], +) +async def test_async_set_temperature( + hass: HomeAssistant, + mock_bsblan: AsyncMock, + mock_config_entry: MockConfigEntry, + target_temp: float, +) -> None: + """Test setting temperature via service call.""" + await setup_with_selected_platforms(hass, mock_config_entry, [Platform.CLIMATE]) + + await hass.services.async_call( + domain=CLIMATE_DOMAIN, + service=SERVICE_SET_TEMPERATURE, + service_data={ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: target_temp}, + blocking=True, + ) + # Assert that the thermostat method was called with the correct temperature + mock_bsblan.thermostat.assert_called_once_with(target_temperature=target_temp) + + +async def test_async_set_data( + hass: HomeAssistant, + mock_bsblan: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test setting data via service calls.""" + await setup_with_selected_platforms(hass, mock_config_entry, [Platform.CLIMATE]) + + # Test setting temperature + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 19}, + blocking=True, + ) + mock_bsblan.thermostat.assert_called_once_with(target_temperature=19) + mock_bsblan.thermostat.reset_mock() + + # Test setting HVAC mode + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.HEAT}, + blocking=True, + ) + mock_bsblan.thermostat.assert_called_once_with(hvac_mode=HVACMode.HEAT) + mock_bsblan.thermostat.reset_mock() + + # Patch HVAC mode to AUTO + mock_hvac_mode = MagicMock() + mock_hvac_mode.value = HVACMode.AUTO + mock_bsblan.state.return_value.hvac_mode = mock_hvac_mode + + # Test setting preset mode to ECO + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: PRESET_ECO}, + blocking=True, + ) + mock_bsblan.thermostat.assert_called_once_with(hvac_mode=PRESET_ECO) + mock_bsblan.thermostat.reset_mock() + + # Test setting preset mode to NONE + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_PRESET_MODE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: PRESET_NONE}, + blocking=True, + ) + mock_bsblan.thermostat.assert_called_once() + mock_bsblan.thermostat.reset_mock() + + # Test error handling + mock_bsblan.thermostat.side_effect = BSBLANError("Test error") + error_message = "An error occurred while updating the BSBLAN device" + with pytest.raises(HomeAssistantError, match=error_message): + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 20}, + blocking=True, + ) diff --git a/tests/components/bsblan/test_diagnostics.py b/tests/components/bsblan/test_diagnostics.py index 8939456c2ac..aea53f8a1a2 100644 --- a/tests/components/bsblan/test_diagnostics.py +++ b/tests/components/bsblan/test_diagnostics.py @@ -1,5 +1,7 @@ """Tests for the diagnostics data provided by the BSBLan integration.""" +from unittest.mock import AsyncMock + from syrupy import SnapshotAssertion from homeassistant.core import HomeAssistant @@ -11,11 +13,13 @@ from tests.typing import ClientSessionGenerator async def test_diagnostics( hass: HomeAssistant, + mock_bsblan: AsyncMock, hass_client: ClientSessionGenerator, init_integration: MockConfigEntry, snapshot: SnapshotAssertion, ) -> None: """Test diagnostics.""" + diagnostics_data = await get_diagnostics_for_config_entry( hass, hass_client, init_integration ) From 3fa24f87c0573b486aa75aace82de99631d2a287 Mon Sep 17 00:00:00 2001 From: Alan Murray Date: Sun, 8 Sep 2024 18:42:54 +1000 Subject: [PATCH 1563/1635] Change of acmeda element unique_id (#124963) * Update base.py Change unique_id to be explicitly a string. * Update __init__.py Add unique id migration * unique_id migration unit tests * Update __init__.py Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> * Update __init__.py Fixed ruff formatting issue * Update __init__.py * Update __init__.py * In tests, load entity registries as test fixtures * Fix * Fix --------- Co-authored-by: epenet <6771947+epenet@users.noreply.github.com> Co-authored-by: Joostlek --- homeassistant/components/acmeda/__init__.py | 17 +++++++++ homeassistant/components/acmeda/base.py | 2 +- tests/components/acmeda/conftest.py | 20 +++++++++++ tests/components/acmeda/test_cover.py | 28 +++++++++++++++ tests/components/acmeda/test_sensor.py | 39 +++++++++++++++++++++ 5 files changed, 105 insertions(+), 1 deletion(-) create mode 100644 tests/components/acmeda/conftest.py create mode 100644 tests/components/acmeda/test_cover.py create mode 100644 tests/components/acmeda/test_sensor.py diff --git a/homeassistant/components/acmeda/__init__.py b/homeassistant/components/acmeda/__init__.py index d6491767dcc..62a62795a05 100644 --- a/homeassistant/components/acmeda/__init__.py +++ b/homeassistant/components/acmeda/__init__.py @@ -3,6 +3,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant +import homeassistant.helpers.entity_registry as er from .hub import PulseHub @@ -17,6 +18,9 @@ async def async_setup_entry( hass: HomeAssistant, config_entry: AcmedaConfigEntry ) -> bool: """Set up Rollease Acmeda Automate hub from a config entry.""" + + await _migrate_unique_ids(hass, config_entry) + hub = PulseHub(hass, config_entry) if not await hub.async_setup(): @@ -28,6 +32,19 @@ async def async_setup_entry( return True +async def _migrate_unique_ids(hass: HomeAssistant, entry: AcmedaConfigEntry) -> None: + """Migrate pre-config flow unique ids.""" + entity_registry = er.async_get(hass) + registry_entries = er.async_entries_for_config_entry( + entity_registry, entry.entry_id + ) + for reg_entry in registry_entries: + if isinstance(reg_entry.unique_id, int): # type: ignore[unreachable] + entity_registry.async_update_entity( # type: ignore[unreachable] + reg_entry.entity_id, new_unique_id=str(reg_entry.unique_id) + ) + + async def async_unload_entry( hass: HomeAssistant, config_entry: AcmedaConfigEntry ) -> bool: diff --git a/homeassistant/components/acmeda/base.py b/homeassistant/components/acmeda/base.py index 7596374684d..149fceaa2df 100644 --- a/homeassistant/components/acmeda/base.py +++ b/homeassistant/components/acmeda/base.py @@ -67,7 +67,7 @@ class AcmedaBase(entity.Entity): @property def unique_id(self) -> str: """Return the unique ID of this roller.""" - return self.roller.id # type: ignore[no-any-return] + return str(self.roller.id) @property def device_id(self) -> str: diff --git a/tests/components/acmeda/conftest.py b/tests/components/acmeda/conftest.py new file mode 100644 index 00000000000..2c980351c09 --- /dev/null +++ b/tests/components/acmeda/conftest.py @@ -0,0 +1,20 @@ +"""Define fixtures available for all Acmeda tests.""" + +import pytest + +from homeassistant.components.acmeda.const import DOMAIN +from homeassistant.const import CONF_HOST +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: + """Return the default mocked config entry.""" + mock_config_entry = MockConfigEntry( + domain=DOMAIN, + data={CONF_HOST: "127.0.0.1"}, + ) + mock_config_entry.add_to_hass(hass) + return mock_config_entry diff --git a/tests/components/acmeda/test_cover.py b/tests/components/acmeda/test_cover.py new file mode 100644 index 00000000000..0d908ecc915 --- /dev/null +++ b/tests/components/acmeda/test_cover.py @@ -0,0 +1,28 @@ +"""Define tests for the Acmeda config flow.""" + +from homeassistant.components.acmeda.const import DOMAIN +from homeassistant.components.cover import DOMAIN as COVER_DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry + + +async def test_cover_id_migration( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test migrating unique id.""" + mock_config_entry.add_to_hass(hass) + entity_registry.async_get_or_create( + COVER_DOMAIN, DOMAIN, 1234567890123, config_entry=mock_config_entry + ) + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + + await hass.async_block_till_done() + entities = er.async_entries_for_config_entry( + entity_registry, mock_config_entry.entry_id + ) + assert len(entities) == 1 + assert entities[0].unique_id == "1234567890123" diff --git a/tests/components/acmeda/test_sensor.py b/tests/components/acmeda/test_sensor.py new file mode 100644 index 00000000000..bf7c16dda46 --- /dev/null +++ b/tests/components/acmeda/test_sensor.py @@ -0,0 +1,39 @@ +"""Define tests for the Acmeda config flow.""" + +import pytest + +from homeassistant.components.acmeda.const import DOMAIN +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: + """Return the default mocked config entry.""" + mock_config_entry = MockConfigEntry( + domain=DOMAIN, + data={"host": "127.0.0.1"}, + ) + mock_config_entry.add_to_hass(hass) + return mock_config_entry + + +async def test_sensor_id_migration( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test migrating unique id.""" + mock_config_entry.add_to_hass(hass) + entity_registry = er.async_get(hass) + entity_registry.async_get_or_create( + SENSOR_DOMAIN, DOMAIN, 1234567890123, config_entry=mock_config_entry + ) + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + entities = er.async_entries_for_config_entry( + entity_registry, mock_config_entry.entry_id + ) + assert len(entities) == 1 + assert entities[0].unique_id == "1234567890123" From 2ea41c90b588c2193a7e001041297b702c6225bf Mon Sep 17 00:00:00 2001 From: TimL Date: Sun, 8 Sep 2024 19:00:10 +1000 Subject: [PATCH 1564/1635] Bump pymslight to 0.0.15 (#125455) Bump pymslight 0.0.15 for Smlight integration --- homeassistant/components/smlight/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/smlight/manifest.json b/homeassistant/components/smlight/manifest.json index 1a91b29234c..6c0a2c39025 100644 --- a/homeassistant/components/smlight/manifest.json +++ b/homeassistant/components/smlight/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/smlight", "integration_type": "device", "iot_class": "local_polling", - "requirements": ["pysmlight==0.0.14"], + "requirements": ["pysmlight==0.0.15"], "zeroconf": [ { "type": "_slzb-06._tcp.local." diff --git a/requirements_all.txt b/requirements_all.txt index adf4f7e064b..9b6814c5c21 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2232,7 +2232,7 @@ pysmarty2==0.10.1 pysml==0.0.12 # homeassistant.components.smlight -pysmlight==0.0.14 +pysmlight==0.0.15 # homeassistant.components.snmp pysnmp==6.2.5 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 8d1e1d24b13..2c8471b768a 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1786,7 +1786,7 @@ pysmartthings==0.7.8 pysml==0.0.12 # homeassistant.components.smlight -pysmlight==0.0.14 +pysmlight==0.0.15 # homeassistant.components.snmp pysnmp==6.2.5 From 1f80b803f7ac28afb32514c3cb14ca5d93d87cbd Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sun, 8 Sep 2024 11:03:18 +0200 Subject: [PATCH 1565/1635] Fix after review comments for Acmeda (#125501) Fix --- tests/components/acmeda/test_sensor.py | 18 +++--------------- 1 file changed, 3 insertions(+), 15 deletions(-) diff --git a/tests/components/acmeda/test_sensor.py b/tests/components/acmeda/test_sensor.py index bf7c16dda46..3d7090ce7dd 100644 --- a/tests/components/acmeda/test_sensor.py +++ b/tests/components/acmeda/test_sensor.py @@ -1,7 +1,5 @@ """Define tests for the Acmeda config flow.""" -import pytest - from homeassistant.components.acmeda.const import DOMAIN from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.core import HomeAssistant @@ -10,23 +8,13 @@ from homeassistant.helpers import entity_registry as er from tests.common import MockConfigEntry -@pytest.fixture -def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: - """Return the default mocked config entry.""" - mock_config_entry = MockConfigEntry( - domain=DOMAIN, - data={"host": "127.0.0.1"}, - ) - mock_config_entry.add_to_hass(hass) - return mock_config_entry - - async def test_sensor_id_migration( - hass: HomeAssistant, mock_config_entry: MockConfigEntry + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, ) -> None: """Test migrating unique id.""" mock_config_entry.add_to_hass(hass) - entity_registry = er.async_get(hass) entity_registry.async_get_or_create( SENSOR_DOMAIN, DOMAIN, 1234567890123, config_entry=mock_config_entry ) From d3badb88ef291881b5ff51e5d746a6db35dd2910 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Sun, 8 Sep 2024 11:43:50 +0200 Subject: [PATCH 1566/1635] Fix solarlog test RuntimeWarning (#125504) --- tests/components/solarlog/conftest.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/components/solarlog/conftest.py b/tests/components/solarlog/conftest.py index b363f655c57..1b315fa3e8c 100644 --- a/tests/components/solarlog/conftest.py +++ b/tests/components/solarlog/conftest.py @@ -1,7 +1,7 @@ """Test helpers.""" from collections.abc import Generator -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock, MagicMock, patch import pytest from solarlog_cli.solarlog_models import InverterData, SolarlogData @@ -53,6 +53,7 @@ def mock_solarlog_connector(): data.inverter_data = INVERTER_DATA mock_solarlog_api = AsyncMock() + mock_solarlog_api.set_enabled_devices = MagicMock() mock_solarlog_api.test_connection.return_value = True mock_solarlog_api.update_data.return_value = data mock_solarlog_api.update_device_list.return_value = INVERTER_DATA From 2ef1c9632532c25a924b7a9e56cdb04e733e19a9 Mon Sep 17 00:00:00 2001 From: Arie Catsman <120491684+catsmanac@users.noreply.github.com> Date: Sun, 8 Sep 2024 11:56:23 +0200 Subject: [PATCH 1567/1635] Include all enphase_envoy devices in async_remove_config_entry_device (#124533) * Include all enphase_envoy devices in async_remove_config_entry_device * refactor if tests --- homeassistant/components/enphase_envoy/__init__.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/enphase_envoy/__init__.py b/homeassistant/components/enphase_envoy/__init__.py index f6438230789..ba590fa0337 100644 --- a/homeassistant/components/enphase_envoy/__init__.py +++ b/homeassistant/components/enphase_envoy/__init__.py @@ -60,8 +60,16 @@ async def async_remove_config_entry_device( envoy_serial_num = config_entry.unique_id if envoy_serial_num in dev_ids: return False - if envoy_data and envoy_data.inverters: - for inverter in envoy_data.inverters: - if str(inverter) in dev_ids: + if envoy_data: + if envoy_data.inverters: + for inverter in envoy_data.inverters: + if str(inverter) in dev_ids: + return False + if envoy_data.encharge_inventory: + for encharge in envoy_data.encharge_inventory: + if str(encharge) in dev_ids: + return False + if envoy_data.enpower: + if str(envoy_data.enpower.serial_number) in dev_ids: return False return True From cee695da28f38e87b223dd9190f84f6fc677d0a6 Mon Sep 17 00:00:00 2001 From: Simon Lamon <32477463+silamon@users.noreply.github.com> Date: Sun, 8 Sep 2024 12:00:03 +0200 Subject: [PATCH 1568/1635] Add missing previous and next commands in LinkPlay (#125450) Previous / Next commands --- homeassistant/components/linkplay/media_player.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/homeassistant/components/linkplay/media_player.py b/homeassistant/components/linkplay/media_player.py index 20b0f63f6a3..af18b018403 100644 --- a/homeassistant/components/linkplay/media_player.py +++ b/homeassistant/components/linkplay/media_player.py @@ -214,6 +214,16 @@ class LinkPlayMediaPlayerEntity(MediaPlayerEntity): """Send play command.""" await self._bridge.player.resume() + @exception_wrap + async def async_media_next_track(self) -> None: + """Send next command.""" + await self._bridge.player.next() + + @exception_wrap + async def async_media_previous_track(self) -> None: + """Send previous command.""" + await self._bridge.player.previous() + @exception_wrap async def async_set_repeat(self, repeat: RepeatMode) -> None: """Set repeat mode.""" From f5b754a38285907a8ce88fc818d9ed89423c8eee Mon Sep 17 00:00:00 2001 From: Nerdix <70015952+N3rdix@users.noreply.github.com> Date: Sun, 8 Sep 2024 12:03:14 +0200 Subject: [PATCH 1569/1635] Reorder openweathermap modes according to recommendation in documentation (#125395) Reorder modes and default to new API version 3 --- homeassistant/components/openweathermap/const.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/openweathermap/const.py b/homeassistant/components/openweathermap/const.py index d34125a2405..81a6544c7ce 100644 --- a/homeassistant/components/openweathermap/const.py +++ b/homeassistant/components/openweathermap/const.py @@ -63,12 +63,12 @@ OWM_MODE_FREE_FORECAST = "forecast" OWM_MODE_V30 = "v3.0" OWM_MODE_V25 = "v2.5" OWM_MODES = [ - OWM_MODE_FREE_CURRENT, - OWM_MODE_FREE_FORECAST, OWM_MODE_V30, OWM_MODE_V25, + OWM_MODE_FREE_CURRENT, + OWM_MODE_FREE_FORECAST, ] -DEFAULT_OWM_MODE = OWM_MODE_FREE_CURRENT +DEFAULT_OWM_MODE = OWM_MODE_V30 LANGUAGES = [ "af", From c0492d4af4c22df4987af1ea140f5d36eb4441d2 Mon Sep 17 00:00:00 2001 From: Josef Zweck <24647999+zweckj@users.noreply.github.com> Date: Sun, 8 Sep 2024 12:04:35 +0200 Subject: [PATCH 1570/1635] Add reconfigure for lamarzocco (#122160) * add reconfigure * fix strings, add to label * Update homeassistant/components/lamarzocco/config_flow.py Co-authored-by: G Johansson * Update test_config_flow.py Co-authored-by: Joost Lekkerkerker * ruff --------- Co-authored-by: G Johansson Co-authored-by: Joost Lekkerkerker --- .../components/lamarzocco/config_flow.py | 99 +++++++++++++++++-- .../components/lamarzocco/strings.json | 17 ++++ .../components/lamarzocco/test_config_flow.py | 71 ++++++++++++- 3 files changed, 177 insertions(+), 10 deletions(-) diff --git a/homeassistant/components/lamarzocco/config_flow.py b/homeassistant/components/lamarzocco/config_flow.py index b4fed615733..5a5cad00f64 100644 --- a/homeassistant/components/lamarzocco/config_flow.py +++ b/homeassistant/components/lamarzocco/config_flow.py @@ -10,7 +10,10 @@ from lmcloud.exceptions import AuthFail, RequestNotSuccessful from lmcloud.models import LaMarzoccoDeviceInfo import voluptuous as vol -from homeassistant.components.bluetooth import BluetoothServiceInfo +from homeassistant.components.bluetooth import ( + BluetoothServiceInfo, + async_discovered_service_info, +) from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, @@ -53,6 +56,7 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN): """Initialize the config flow.""" self.reauth_entry: ConfigEntry | None = None + self.reconfigure_entry: ConfigEntry | None = None self._config: dict[str, Any] = {} self._fleet: dict[str, LaMarzoccoDeviceInfo] = {} self._discovered: dict[str, str] = {} @@ -92,13 +96,9 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN): if not errors: if self.reauth_entry: - self.hass.config_entries.async_update_entry( - self.reauth_entry, data=data + return self.async_update_reload_and_abort( + self.reauth_entry, data=data, reason="reauth_successful" ) - await self.hass.config_entries.async_reload( - self.reauth_entry.entry_id - ) - return self.async_abort(reason="reauth_successful") if self._discovered: if self._discovered[CONF_MACHINE] not in self._fleet: errors["base"] = "machine_not_found" @@ -134,8 +134,9 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN): if user_input: if not self._discovered: serial_number = user_input[CONF_MACHINE] - await self.async_set_unique_id(serial_number) - self._abort_if_unique_id_configured() + if self.reconfigure_entry is None: + await self.async_set_unique_id(serial_number) + self._abort_if_unique_id_configured() else: serial_number = self._discovered[CONF_MACHINE] @@ -153,6 +154,13 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN): self._config[CONF_HOST] = user_input[CONF_HOST] if not errors: + if self.reconfigure_entry: + for service_info in async_discovered_service_info(self.hass): + self._discovered[service_info.name] = service_info.address + + if self._discovered: + return await self.async_step_bluetooth_selection() + return self.async_create_entry( title=selected_device.name, data={ @@ -191,6 +199,45 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) + async def async_step_bluetooth_selection( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle Bluetooth device selection.""" + + assert self.reconfigure_entry + + if user_input is not None: + return self.async_update_reload_and_abort( + self.reconfigure_entry, + data={ + **self._config, + CONF_MAC: user_input[CONF_MAC], + }, + reason="reconfigure_successful", + ) + + bt_options = [ + SelectOptionDict( + value=device_mac, + label=f"{device_name} ({device_mac})", + ) + for device_name, device_mac in self._discovered.items() + ] + + return self.async_show_form( + step_id="bluetooth_selection", + data_schema=vol.Schema( + { + vol.Required(CONF_MAC): SelectSelector( + SelectSelectorConfig( + options=bt_options, + mode=SelectSelectorMode.DROPDOWN, + ) + ), + }, + ), + ) + async def async_step_bluetooth( self, discovery_info: BluetoothServiceInfo ) -> ConfigFlowResult: @@ -240,6 +287,40 @@ class LmConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_user(user_input) + async def async_step_reconfigure( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Perform reconfiguration of the config entry.""" + self.reconfigure_entry = self.hass.config_entries.async_get_entry( + self.context["entry_id"] + ) + return await self.async_step_reconfigure_confirm() + + async def async_step_reconfigure_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Confirm reconfiguration of the device.""" + assert self.reconfigure_entry + + if not user_input: + return self.async_show_form( + step_id="reconfigure_confirm", + data_schema=vol.Schema( + { + vol.Required( + CONF_USERNAME, + default=self.reconfigure_entry.data[CONF_USERNAME], + ): str, + vol.Required( + CONF_PASSWORD, + default=self.reconfigure_entry.data[CONF_PASSWORD], + ): str, + } + ), + ) + + return await self.async_step_user(user_input) + @staticmethod @callback def async_get_options_flow( diff --git a/homeassistant/components/lamarzocco/strings.json b/homeassistant/components/lamarzocco/strings.json index 08e3e764379..39cc24388ab 100644 --- a/homeassistant/components/lamarzocco/strings.json +++ b/homeassistant/components/lamarzocco/strings.json @@ -3,6 +3,7 @@ "flow_title": "La Marzocco Espresso {host}", "abort": { "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]", "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" }, "error": { @@ -21,6 +22,12 @@ "password": "Your password from the La Marzocco app" } }, + "bluetooth_selection": { + "description": "Select your device from available Bluetooth devices.", + "data": { + "mac": "Bluetooth device" + } + }, "machine_selection": { "description": "Select the machine you want to integrate. Set the \"IP\" to get access to shot time related sensors.", "data": { @@ -39,6 +46,16 @@ "data_description": { "password": "[%key:component::lamarzocco::config::step::user::data_description::password%]" } + }, + "reconfigure_confirm": { + "data": { + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "username": "[%key:component::lamarzocco::config::step::user::data_description::username%]", + "password": "[%key:component::lamarzocco::config::step::user::data_description::password%]" + } } } }, diff --git a/tests/components/lamarzocco/test_config_flow.py b/tests/components/lamarzocco/test_config_flow.py index 39896926c61..4bb26fb5d30 100644 --- a/tests/components/lamarzocco/test_config_flow.py +++ b/tests/components/lamarzocco/test_config_flow.py @@ -7,7 +7,12 @@ from lmcloud.models import LaMarzoccoDeviceInfo from homeassistant.components.lamarzocco.config_flow import CONF_MACHINE from homeassistant.components.lamarzocco.const import CONF_USE_BLUETOOTH, DOMAIN -from homeassistant.config_entries import SOURCE_BLUETOOTH, SOURCE_USER, ConfigEntryState +from homeassistant.config_entries import ( + SOURCE_BLUETOOTH, + SOURCE_RECONFIGURE, + SOURCE_USER, + ConfigEntryState, +) from homeassistant.const import ( CONF_HOST, CONF_MAC, @@ -259,6 +264,70 @@ async def test_reauth_flow( assert mock_config_entry.data[CONF_PASSWORD] == "new_password" +async def test_reconfigure_flow( + hass: HomeAssistant, + mock_cloud_client: MagicMock, + mock_config_entry: MockConfigEntry, + mock_device_info: LaMarzoccoDeviceInfo, +) -> None: + """Testing reconfgure flow.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": SOURCE_RECONFIGURE, + "unique_id": mock_config_entry.unique_id, + "entry_id": mock_config_entry.entry_id, + }, + data=mock_config_entry.data, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure_confirm" + + result2 = await __do_successful_user_step(hass, result, mock_cloud_client) + service_info = get_bluetooth_service_info( + mock_device_info.model, mock_device_info.serial_number + ) + + with ( + patch( + "homeassistant.components.lamarzocco.config_flow.LaMarzoccoLocalClient.validate_connection", + return_value=True, + ), + patch( + "homeassistant.components.lamarzocco.config_flow.async_discovered_service_info", + return_value=[service_info], + ), + ): + result3 = await hass.config_entries.flow.async_configure( + result2["flow_id"], + { + CONF_HOST: "192.168.1.1", + CONF_MACHINE: mock_device_info.serial_number, + }, + ) + await hass.async_block_till_done() + + assert result3["type"] is FlowResultType.FORM + assert result3["step_id"] == "bluetooth_selection" + + result4 = await hass.config_entries.flow.async_configure( + result3["flow_id"], + {CONF_MAC: service_info.address}, + ) + + assert result4["type"] is FlowResultType.ABORT + assert result4["reason"] == "reconfigure_successful" + + assert mock_config_entry.title == "My LaMarzocco" + assert mock_config_entry.data == { + **mock_config_entry.data, + CONF_MAC: service_info.address, + } + + async def test_bluetooth_discovery( hass: HomeAssistant, mock_lamarzocco: MagicMock, From 74b78307eef58eec6efee6251014149acb05d003 Mon Sep 17 00:00:00 2001 From: Arie Catsman <120491684+catsmanac@users.noreply.github.com> Date: Sun, 8 Sep 2024 12:15:00 +0200 Subject: [PATCH 1571/1635] Add balanced grid import/export to enphase_envoy (#123154) * Add balanced grid import/export to enphase_envoy * rebuild sensor snapshot after dev merge * Cleanup snapshot file --- .../components/enphase_envoy/sensor.py | 91 ++ .../components/enphase_envoy/strings.json | 12 + tests/components/enphase_envoy/conftest.py | 8 + .../enphase_envoy/fixtures/envoy.json | 2 + .../fixtures/envoy_1p_metered.json | 7 + .../fixtures/envoy_metered_batt_relay.json | 26 + .../fixtures/envoy_nobatt_metered_3p.json | 26 + .../fixtures/envoy_tot_cons_metered.json | 7 + .../enphase_envoy/snapshots/test_sensor.ambr | 1160 +++++++++++++++++ tests/components/enphase_envoy/test_sensor.py | 84 ++ 10 files changed, 1423 insertions(+) diff --git a/homeassistant/components/enphase_envoy/sensor.py b/homeassistant/components/enphase_envoy/sensor.py index 4dd7f158305..20d610e4b71 100644 --- a/homeassistant/components/enphase_envoy/sensor.py +++ b/homeassistant/components/enphase_envoy/sensor.py @@ -228,6 +228,50 @@ CONSUMPTION_PHASE_SENSORS = { } +NET_CONSUMPTION_SENSORS = ( + EnvoyConsumptionSensorEntityDescription( + key="balanced_net_consumption", + translation_key="balanced_net_consumption", + entity_registry_enabled_default=False, + native_unit_of_measurement=UnitOfPower.WATT, + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.POWER, + suggested_unit_of_measurement=UnitOfPower.KILO_WATT, + suggested_display_precision=3, + value_fn=attrgetter("watts_now"), + on_phase=None, + ), + EnvoyConsumptionSensorEntityDescription( + key="lifetime_balanced_net_consumption", + translation_key="lifetime_balanced_net_consumption", + entity_registry_enabled_default=False, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + state_class=SensorStateClass.TOTAL, + device_class=SensorDeviceClass.ENERGY, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + suggested_display_precision=3, + value_fn=attrgetter("watt_hours_lifetime"), + on_phase=None, + ), +) + + +NET_CONSUMPTION_PHASE_SENSORS = { + (on_phase := PHASENAMES[phase]): [ + replace( + sensor, + key=f"{sensor.key}_l{phase + 1}", + translation_key=f"{sensor.translation_key}_phase", + entity_registry_enabled_default=False, + on_phase=on_phase, + translation_placeholders={"phase_name": f"l{phase + 1}"}, + ) + for sensor in list(NET_CONSUMPTION_SENSORS) + ] + for phase in range(3) +} + + @dataclass(frozen=True, kw_only=True) class EnvoyCTSensorEntityDescription(SensorEntityDescription): """Describes an Envoy CT sensor entity.""" @@ -697,6 +741,11 @@ async def async_setup_entry( EnvoyConsumptionEntity(coordinator, description) for description in CONSUMPTION_SENSORS ) + if envoy_data.system_net_consumption: + entities.extend( + EnvoyNetConsumptionEntity(coordinator, description) + for description in NET_CONSUMPTION_SENSORS + ) # For each production phase reported add production entities if envoy_data.system_production_phases: entities.extend( @@ -713,6 +762,14 @@ async def async_setup_entry( for description in CONSUMPTION_PHASE_SENSORS[use_phase] if phase is not None ) + # For each net_consumption phase reported add consumption entities + if envoy_data.system_net_consumption_phases: + entities.extend( + EnvoyNetConsumptionPhaseEntity(coordinator, description) + for use_phase, phase in envoy_data.system_net_consumption_phases.items() + for description in NET_CONSUMPTION_PHASE_SENSORS[use_phase] + if phase is not None + ) # Add net consumption CT entities if ctmeter := envoy_data.ctmeter_consumption: entities.extend( @@ -846,6 +903,19 @@ class EnvoyConsumptionEntity(EnvoySystemSensorEntity): return self.entity_description.value_fn(system_consumption) +class EnvoyNetConsumptionEntity(EnvoySystemSensorEntity): + """Envoy consumption entity.""" + + entity_description: EnvoyConsumptionSensorEntityDescription + + @property + def native_value(self) -> int | None: + """Return the state of the sensor.""" + system_net_consumption = self.data.system_net_consumption + assert system_net_consumption is not None + return self.entity_description.value_fn(system_net_consumption) + + class EnvoyProductionPhaseEntity(EnvoySystemSensorEntity): """Envoy phase production entity.""" @@ -888,6 +958,27 @@ class EnvoyConsumptionPhaseEntity(EnvoySystemSensorEntity): return self.entity_description.value_fn(system_consumption) +class EnvoyNetConsumptionPhaseEntity(EnvoySystemSensorEntity): + """Envoy phase consumption entity.""" + + entity_description: EnvoyConsumptionSensorEntityDescription + + @property + def native_value(self) -> int | None: + """Return the state of the sensor.""" + if TYPE_CHECKING: + assert self.entity_description.on_phase + assert self.data.system_net_consumption_phases + + if ( + system_net_consumption := self.data.system_net_consumption_phases[ + self.entity_description.on_phase + ] + ) is None: + return None + return self.entity_description.value_fn(system_net_consumption) + + class EnvoyConsumptionCTEntity(EnvoySystemSensorEntity): """Envoy net consumption CT entity.""" diff --git a/homeassistant/components/enphase_envoy/strings.json b/homeassistant/components/enphase_envoy/strings.json index 3c48776e448..2e7ce831efc 100644 --- a/homeassistant/components/enphase_envoy/strings.json +++ b/homeassistant/components/enphase_envoy/strings.json @@ -165,6 +165,18 @@ "lifetime_consumption_phase": { "name": "Lifetime energy consumption {phase_name}" }, + "balanced_net_consumption": { + "name": "balanced net power consumption" + }, + "lifetime_balanced_net_consumption": { + "name": "Lifetime balanced net energy consumption" + }, + "balanced_net_consumption_phase": { + "name": "balanced net power consumption {phase_name}" + }, + "lifetime_balanced_net_consumption_phase": { + "name": "Lifetime balanced net energy consumption {phase_name}" + }, "lifetime_net_consumption": { "name": "Lifetime net energy consumption" }, diff --git a/tests/components/enphase_envoy/conftest.py b/tests/components/enphase_envoy/conftest.py index 58627211344..541b6f96e19 100644 --- a/tests/components/enphase_envoy/conftest.py +++ b/tests/components/enphase_envoy/conftest.py @@ -150,6 +150,8 @@ def _load_json_2_production_data( """Fill envoy production data from fixture.""" if item := json_fixture["data"].get("system_consumption"): mocked_data.system_consumption = EnvoySystemConsumption(**item) + if item := json_fixture["data"].get("system_net_consumption"): + mocked_data.system_net_consumption = EnvoySystemConsumption(**item) if item := json_fixture["data"].get("system_production"): mocked_data.system_production = EnvoySystemProduction(**item) if item := json_fixture["data"].get("system_consumption_phases"): @@ -158,6 +160,12 @@ def _load_json_2_production_data( mocked_data.system_consumption_phases[sub_item] = EnvoySystemConsumption( **item_data ) + if item := json_fixture["data"].get("system_net_consumption_phases"): + mocked_data.system_net_consumption_phases = {} + for sub_item, item_data in item.items(): + mocked_data.system_net_consumption_phases[sub_item] = ( + EnvoySystemConsumption(**item_data) + ) if item := json_fixture["data"].get("system_production_phases"): mocked_data.system_production_phases = {} for sub_item, item_data in item.items(): diff --git a/tests/components/enphase_envoy/fixtures/envoy.json b/tests/components/enphase_envoy/fixtures/envoy.json index 8c9be429931..3431dba6766 100644 --- a/tests/components/enphase_envoy/fixtures/envoy.json +++ b/tests/components/enphase_envoy/fixtures/envoy.json @@ -17,6 +17,7 @@ "encharge_aggregate": null, "enpower": null, "system_consumption": null, + "system_net_consumption": null, "system_production": { "watt_hours_lifetime": 1234, "watt_hours_last_7_days": 1234, @@ -24,6 +25,7 @@ "watts_now": 1234 }, "system_consumption_phases": null, + "system_net_consumption_phases": null, "system_production_phases": null, "ctmeter_production": null, "ctmeter_consumption": null, diff --git a/tests/components/enphase_envoy/fixtures/envoy_1p_metered.json b/tests/components/enphase_envoy/fixtures/envoy_1p_metered.json index e72829280da..05a6f265dfb 100644 --- a/tests/components/enphase_envoy/fixtures/envoy_1p_metered.json +++ b/tests/components/enphase_envoy/fixtures/envoy_1p_metered.json @@ -22,6 +22,12 @@ "watt_hours_today": 1234, "watts_now": 1234 }, + "system_net_consumption": { + "watt_hours_lifetime": 4321, + "watt_hours_last_7_days": -1, + "watt_hours_today": -1, + "watts_now": 2341 + }, "system_production": { "watt_hours_lifetime": 1234, "watt_hours_last_7_days": 1234, @@ -29,6 +35,7 @@ "watts_now": 1234 }, "system_consumption_phases": null, + "system_net_consumption_phases": null, "system_production_phases": null, "ctmeter_production": { "eid": "100000010", diff --git a/tests/components/enphase_envoy/fixtures/envoy_metered_batt_relay.json b/tests/components/enphase_envoy/fixtures/envoy_metered_batt_relay.json index 72b510e2328..7affc1bea0d 100644 --- a/tests/components/enphase_envoy/fixtures/envoy_metered_batt_relay.json +++ b/tests/components/enphase_envoy/fixtures/envoy_metered_batt_relay.json @@ -79,6 +79,12 @@ "watt_hours_today": 1234, "watts_now": 1234 }, + "system_net_consumption": { + "watt_hours_lifetime": 4321, + "watt_hours_last_7_days": -1, + "watt_hours_today": -1, + "watts_now": 2341 + }, "system_production": { "watt_hours_lifetime": 1234, "watt_hours_last_7_days": 1234, @@ -105,6 +111,26 @@ "watts_now": 3324 } }, + "system_net_consumption_phases": { + "L1": { + "watt_hours_lifetime": 1321, + "watt_hours_last_7_days": -1, + "watt_hours_today": -1, + "watts_now": 12341 + }, + "L2": { + "watt_hours_lifetime": 2321, + "watt_hours_last_7_days": -1, + "watt_hours_today": -1, + "watts_now": 22341 + }, + "L3": { + "watt_hours_lifetime": 3321, + "watt_hours_last_7_days": -1, + "watt_hours_today": -1, + "watts_now": 32341 + } + }, "system_production_phases": { "L1": { "watt_hours_lifetime": 1232, diff --git a/tests/components/enphase_envoy/fixtures/envoy_nobatt_metered_3p.json b/tests/components/enphase_envoy/fixtures/envoy_nobatt_metered_3p.json index f9b6ae31196..ff975b690ed 100644 --- a/tests/components/enphase_envoy/fixtures/envoy_nobatt_metered_3p.json +++ b/tests/components/enphase_envoy/fixtures/envoy_nobatt_metered_3p.json @@ -22,6 +22,12 @@ "watt_hours_today": 1234, "watts_now": 1234 }, + "system_net_consumption": { + "watt_hours_lifetime": 4321, + "watt_hours_last_7_days": -1, + "watt_hours_today": -1, + "watts_now": 2341 + }, "system_production": { "watt_hours_lifetime": 1234, "watt_hours_last_7_days": 1234, @@ -48,6 +54,26 @@ "watts_now": 3324 } }, + "system_net_consumption_phases": { + "L1": { + "watt_hours_lifetime": 1321, + "watt_hours_last_7_days": -1, + "watt_hours_today": -1, + "watts_now": 12341 + }, + "L2": { + "watt_hours_lifetime": 2321, + "watt_hours_last_7_days": -1, + "watt_hours_today": -1, + "watts_now": 22341 + }, + "L3": { + "watt_hours_lifetime": 3321, + "watt_hours_last_7_days": -1, + "watt_hours_today": -1, + "watts_now": 32341 + } + }, "system_production_phases": { "L1": { "watt_hours_lifetime": 1232, diff --git a/tests/components/enphase_envoy/fixtures/envoy_tot_cons_metered.json b/tests/components/enphase_envoy/fixtures/envoy_tot_cons_metered.json index ca2a976b6d1..62df69c6d88 100644 --- a/tests/components/enphase_envoy/fixtures/envoy_tot_cons_metered.json +++ b/tests/components/enphase_envoy/fixtures/envoy_tot_cons_metered.json @@ -17,6 +17,12 @@ "encharge_aggregate": null, "enpower": null, "system_consumption": null, + "system_net_consumption": { + "watt_hours_lifetime": 4321, + "watt_hours_last_7_days": -1, + "watt_hours_today": -1, + "watts_now": 2341 + }, "system_production": { "watt_hours_lifetime": 1234, "watt_hours_last_7_days": 1234, @@ -24,6 +30,7 @@ "watts_now": 1234 }, "system_consumption_phases": null, + "system_net_consumption_phases": null, "system_production_phases": null, "ctmeter_production": { "eid": "100000010", diff --git a/tests/components/enphase_envoy/snapshots/test_sensor.ambr b/tests/components/enphase_envoy/snapshots/test_sensor.ambr index ad937b27167..f0d4006f05c 100644 --- a/tests/components/enphase_envoy/snapshots/test_sensor.ambr +++ b/tests/components/enphase_envoy/snapshots/test_sensor.ambr @@ -328,6 +328,64 @@ 'state': '1970-01-01T00:00:01+00:00', }) # --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_balanced_net_power_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'balanced net power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balanced_net_consumption', + 'unique_id': '1234_balanced_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_balanced_net_power_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 balanced net power consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.341', + }) +# --- # name: test_sensor[envoy_1p_metered][sensor.envoy_1234_current_net_power_consumption-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -838,6 +896,64 @@ 'state': '50.1', }) # --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_lifetime_balanced_net_energy_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime balanced net energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_balanced_net_consumption', + 'unique_id': '1234_lifetime_balanced_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_1p_metered][sensor.envoy_1234_lifetime_balanced_net_energy_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4.321', + }) +# --- # name: test_sensor[envoy_1p_metered][sensor.envoy_1234_lifetime_energy_consumption-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2105,6 +2221,238 @@ 'state': '525', }) # --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_balanced_net_power_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'balanced net power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balanced_net_consumption', + 'unique_id': '1234_balanced_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_balanced_net_power_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 balanced net power consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.341', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_balanced_net_power_consumption_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'balanced net power consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balanced_net_consumption_phase', + 'unique_id': '1234_balanced_net_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_balanced_net_power_consumption_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 balanced net power consumption l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '12.341', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_balanced_net_power_consumption_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'balanced net power consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balanced_net_consumption_phase', + 'unique_id': '1234_balanced_net_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_balanced_net_power_consumption_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 balanced net power consumption l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '22.341', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_balanced_net_power_consumption_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'balanced net power consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balanced_net_consumption_phase', + 'unique_id': '1234_balanced_net_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_balanced_net_power_consumption_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 balanced net power consumption l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '32.341', + }) +# --- # name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_battery-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -4695,6 +5043,238 @@ 'state': '50.2', }) # --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_balanced_net_energy_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime balanced net energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_balanced_net_consumption', + 'unique_id': '1234_lifetime_balanced_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_balanced_net_energy_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4.321', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime balanced net energy consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_balanced_net_consumption_phase', + 'unique_id': '1234_lifetime_balanced_net_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.321', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime balanced net energy consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_balanced_net_consumption_phase', + 'unique_id': '1234_lifetime_balanced_net_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.321', + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime balanced net energy consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_balanced_net_consumption_phase', + 'unique_id': '1234_lifetime_balanced_net_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.321', + }) +# --- # name: test_sensor[envoy_metered_batt_relay][sensor.envoy_1234_lifetime_battery_energy_charged-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -9597,6 +10177,238 @@ 'state': '1970-01-01T00:00:01+00:00', }) # --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_balanced_net_power_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'balanced net power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balanced_net_consumption', + 'unique_id': '1234_balanced_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_balanced_net_power_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 balanced net power consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.341', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_balanced_net_power_consumption_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'balanced net power consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balanced_net_consumption_phase', + 'unique_id': '1234_balanced_net_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_balanced_net_power_consumption_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 balanced net power consumption l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '12.341', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_balanced_net_power_consumption_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'balanced net power consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balanced_net_consumption_phase', + 'unique_id': '1234_balanced_net_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_balanced_net_power_consumption_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 balanced net power consumption l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '22.341', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_balanced_net_power_consumption_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'balanced net power consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balanced_net_consumption_phase', + 'unique_id': '1234_balanced_net_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_balanced_net_power_consumption_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 balanced net power consumption l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '32.341', + }) +# --- # name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_current_net_power_consumption-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -11637,6 +12449,238 @@ 'state': '50.1', }) # --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_balanced_net_energy_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime balanced net energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_balanced_net_consumption', + 'unique_id': '1234_lifetime_balanced_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_balanced_net_energy_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4.321', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime balanced net energy consumption l1', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_balanced_net_consumption_phase', + 'unique_id': '1234_lifetime_balanced_net_consumption_l1', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption l1', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.321', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime balanced net energy consumption l2', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_balanced_net_consumption_phase', + 'unique_id': '1234_lifetime_balanced_net_consumption_l2', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption l2', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.321', + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime balanced net energy consumption l3', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_balanced_net_consumption_phase', + 'unique_id': '1234_lifetime_balanced_net_consumption_l3', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption l3', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption_l3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.321', + }) +# --- # name: test_sensor[envoy_nobatt_metered_3p][sensor.envoy_1234_lifetime_energy_consumption-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -14873,6 +15917,64 @@ 'state': '1970-01-01T00:00:01+00:00', }) # --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_balanced_net_power_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'balanced net power consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'balanced_net_consumption', + 'unique_id': '1234_balanced_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_balanced_net_power_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Envoy 1234 balanced net power consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_balanced_net_power_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.341', + }) +# --- # name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_current_power_production-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -15099,6 +16201,64 @@ 'state': '50.1', }) # --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_lifetime_balanced_net_energy_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 3, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:flash', + 'original_name': 'Lifetime balanced net energy consumption', + 'platform': 'enphase_envoy', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lifetime_balanced_net_consumption', + 'unique_id': '1234_lifetime_balanced_net_consumption', + 'unit_of_measurement': , + }) +# --- +# name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_lifetime_balanced_net_energy_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'Envoy 1234 Lifetime balanced net energy consumption', + 'icon': 'mdi:flash', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.envoy_1234_lifetime_balanced_net_energy_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4.321', + }) +# --- # name: test_sensor[envoy_tot_cons_metered][sensor.envoy_1234_lifetime_energy_production-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/enphase_envoy/test_sensor.py b/tests/components/enphase_envoy/test_sensor.py index 273f81173ff..90b36e23555 100644 --- a/tests/components/enphase_envoy/test_sensor.py +++ b/tests/components/enphase_envoy/test_sensor.py @@ -179,6 +179,47 @@ async def test_sensor_consumption_data( assert float(entity_state.state) == target +NET_CONSUMPTION_NAMES: tuple[str, ...] = ( + "balanced_net_power_consumption", + "lifetime_balanced_net_energy_consumption", +) + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_1p_metered", + "envoy_metered_batt_relay", + "envoy_nobatt_metered_3p", + "envoy_tot_cons_metered", + ], + indirect=["mock_envoy"], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_net_consumption_data( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test net consumption entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" + + data = mock_envoy.data.system_net_consumption + NET_CONSUMPTION_TARGETS = ( + data.watts_now / 1000.0, + data.watt_hours_lifetime / 1000.0, + ) + for name, target in list( + zip(NET_CONSUMPTION_NAMES, NET_CONSUMPTION_TARGETS, strict=False) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert float(entity_state.state) == target + + CONSUMPTION_PHASE_NAMES: list[str] = [ f"{name}_{phase.lower()}" for phase in PHASENAMES for name in CONSUMPTION_NAMES ] @@ -224,6 +265,48 @@ async def test_sensor_consumption_phase_data( assert float(entity_state.state) == target +NET_CONSUMPTION_PHASE_NAMES: list[str] = [ + f"{name}_{phase.lower()}" for phase in PHASENAMES for name in NET_CONSUMPTION_NAMES +] + + +@pytest.mark.parametrize( + ("mock_envoy"), + [ + "envoy_metered_batt_relay", + "envoy_nobatt_metered_3p", + ], + indirect=["mock_envoy"], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensor_net_consumption_phase_data( + hass: HomeAssistant, + mock_envoy: AsyncMock, + config_entry: MockConfigEntry, +) -> None: + """Test consumption phase entities values.""" + with patch("homeassistant.components.enphase_envoy.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, config_entry) + + sn = mock_envoy.serial_number + ENTITY_BASE: str = f"{Platform.SENSOR}.envoy_{sn}" + + NET_CONSUMPTION_PHASE_TARGET = chain( + *[ + ( + phase_data.watts_now / 1000.0, + phase_data.watt_hours_lifetime / 1000.0, + ) + for phase_data in mock_envoy.data.system_net_consumption_phases.values() + ] + ) + for name, target in list( + zip(NET_CONSUMPTION_PHASE_NAMES, NET_CONSUMPTION_PHASE_TARGET, strict=False) + ): + assert (entity_state := hass.states.get(f"{ENTITY_BASE}_{name}")) + assert float(entity_state.state) == target + + CT_PRODUCTION_NAMES_INT = ("meter_status_flags_active_production_ct",) CT_PRODUCTION_NAMES_STR = ("metering_status_production_ct",) @@ -877,6 +960,7 @@ async def test_sensor_missing_data( # force missing data to test 'if == none' code sections mock_envoy.data.system_production_phases["L2"] = None mock_envoy.data.system_consumption_phases["L2"] = None + mock_envoy.data.system_net_consumption_phases["L2"] = None mock_envoy.data.ctmeter_production = None mock_envoy.data.ctmeter_consumption = None mock_envoy.data.ctmeter_storage = None From 943b96e7a1f40a816f844872666084d577522c41 Mon Sep 17 00:00:00 2001 From: Markus Jacobsen Date: Sun, 8 Sep 2024 12:18:32 +0200 Subject: [PATCH 1572/1635] Fix Bang & Olufsen testing typing (#125427) * Fix test parameter typed as callable instead of context manager * Add missing AsyncMock typing --- tests/components/bang_olufsen/test_media_player.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/tests/components/bang_olufsen/test_media_player.py b/tests/components/bang_olufsen/test_media_player.py index 70743cd2cca..352a90cd07c 100644 --- a/tests/components/bang_olufsen/test_media_player.py +++ b/tests/components/bang_olufsen/test_media_player.py @@ -1,7 +1,6 @@ """Test the Bang & Olufsen media_player entity.""" -from collections.abc import Callable -from contextlib import nullcontext as does_not_raise +from contextlib import AbstractContextManager, nullcontext as does_not_raise import logging from unittest.mock import AsyncMock, patch @@ -150,7 +149,9 @@ async def test_async_update_sources_outdated_api( async def test_async_update_sources_remote( - hass: HomeAssistant, mock_mozart_client, mock_config_entry: MockConfigEntry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test _async_update_sources is called when there are new video sources.""" @@ -595,7 +596,7 @@ async def test_async_media_seek( mock_mozart_client: AsyncMock, mock_config_entry: MockConfigEntry, source: Source, - expected_result: Callable, + expected_result: AbstractContextManager, seek_called_times: int, ) -> None: """Test async_media_seek.""" @@ -681,7 +682,7 @@ async def test_async_select_source( mock_mozart_client: AsyncMock, mock_config_entry: MockConfigEntry, source: str, - expected_result: Callable, + expected_result: AbstractContextManager, audio_source_call: int, video_source_call: int, ) -> None: From 31aef86c0f0bc90bb43338fcea0808b4568e004b Mon Sep 17 00:00:00 2001 From: Markus Jacobsen Date: Sun, 8 Sep 2024 12:22:21 +0200 Subject: [PATCH 1573/1635] Add various assertions to Bang & Olufsen testing (#125429) Add various assertions --- .../bang_olufsen/test_media_player.py | 39 ++++++++++--------- .../components/bang_olufsen/test_websocket.py | 14 +++++-- 2 files changed, 31 insertions(+), 22 deletions(-) diff --git a/tests/components/bang_olufsen/test_media_player.py b/tests/components/bang_olufsen/test_media_player.py index 352a90cd07c..76f0d842648 100644 --- a/tests/components/bang_olufsen/test_media_player.py +++ b/tests/components/bang_olufsen/test_media_player.py @@ -104,7 +104,7 @@ async def test_initialization( # Check state (The initial state in this test does not contain all that much. # States are tested using simulated WebSocket events.) - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) assert states.attributes[ATTR_INPUT_SOURCE_LIST] == TEST_SOURCES assert states.attributes[ATTR_MEDIA_POSITION_UPDATED_AT] @@ -126,7 +126,7 @@ async def test_async_update_sources_audio_only( mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) assert states.attributes[ATTR_INPUT_SOURCE_LIST] == TEST_AUDIO_SOURCES @@ -141,7 +141,7 @@ async def test_async_update_sources_outdated_api( mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) assert ( states.attributes[ATTR_INPUT_SOURCE_LIST] == TEST_FALLBACK_SOURCES + TEST_VIDEO_SOURCES @@ -187,7 +187,7 @@ async def test_async_update_playback_metadata( mock_mozart_client.get_playback_metadata_notifications.call_args[0][0] ) - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) assert ATTR_MEDIA_DURATION not in states.attributes assert ATTR_MEDIA_TITLE not in states.attributes assert ATTR_MEDIA_ALBUM_NAME not in states.attributes @@ -198,7 +198,7 @@ async def test_async_update_playback_metadata( # Send the WebSocket event dispatch playback_metadata_callback(TEST_PLAYBACK_METADATA) - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) assert ( states.attributes[ATTR_MEDIA_DURATION] == TEST_PLAYBACK_METADATA.total_duration_seconds @@ -250,14 +250,14 @@ async def test_async_update_playback_progress( mock_mozart_client.get_playback_progress_notifications.call_args[0][0] ) - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) assert ATTR_MEDIA_POSITION not in states.attributes old_updated_at = states.attributes[ATTR_MEDIA_POSITION_UPDATED_AT] assert old_updated_at playback_progress_callback(TEST_PLAYBACK_PROGRESS) - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) assert states.attributes[ATTR_MEDIA_POSITION] == TEST_PLAYBACK_PROGRESS.progress new_updated_at = states.attributes[ATTR_MEDIA_POSITION_UPDATED_AT] assert new_updated_at @@ -278,12 +278,12 @@ async def test_async_update_playback_state( mock_mozart_client.get_playback_state_notifications.call_args[0][0] ) - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) assert states.state == MediaPlayerState.PLAYING playback_state_callback(TEST_PLAYBACK_STATE_PAUSED) - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) assert states.state == TEST_PLAYBACK_STATE_PAUSED.value @@ -366,7 +366,7 @@ async def test_async_update_source_change( mock_mozart_client.get_source_change_notifications.call_args[0][0] ) - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) assert ATTR_INPUT_SOURCE not in states.attributes assert states.attributes[ATTR_MEDIA_CONTENT_TYPE] == MediaType.MUSIC @@ -377,7 +377,7 @@ async def test_async_update_source_change( playback_metadata_callback(metadata) source_change_callback(reported_source) - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) assert states.attributes[ATTR_INPUT_SOURCE] == real_source.name assert states.attributes[ATTR_MEDIA_CONTENT_TYPE] == content_type assert states.attributes[ATTR_MEDIA_POSITION] == progress @@ -406,7 +406,8 @@ async def test_async_turn_off( playback_state_callback(TEST_PLAYBACK_STATE_TURN_OFF) - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert TEST_PLAYBACK_STATE_TURN_OFF.value assert states.state == BANG_OLUFSEN_STATES[TEST_PLAYBACK_STATE_TURN_OFF.value] # Check API call @@ -425,7 +426,7 @@ async def test_async_set_volume_level( volume_callback = mock_mozart_client.get_volume_notifications.call_args[0][0] - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) assert ATTR_MEDIA_VOLUME_LEVEL not in states.attributes await hass.services.async_call( @@ -441,7 +442,7 @@ async def test_async_set_volume_level( # The service call will trigger a WebSocket notification volume_callback(TEST_VOLUME) - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) assert ( states.attributes[ATTR_MEDIA_VOLUME_LEVEL] == TEST_VOLUME_HOME_ASSISTANT_FORMAT ) @@ -463,7 +464,7 @@ async def test_async_mute_volume( volume_callback = mock_mozart_client.get_volume_notifications.call_args[0][0] - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) assert ATTR_MEDIA_VOLUME_MUTED not in states.attributes await hass.services.async_call( @@ -479,7 +480,7 @@ async def test_async_mute_volume( # The service call will trigger a WebSocket notification volume_callback(TEST_VOLUME_MUTED) - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) assert ( states.attributes[ATTR_MEDIA_VOLUME_MUTED] == TEST_VOLUME_MUTED_HOME_ASSISTANT_FORMAT @@ -518,7 +519,8 @@ async def test_async_media_play_pause( # Set the initial state playback_state_callback(initial_state) - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert initial_state.value assert states.state == BANG_OLUFSEN_STATES[initial_state.value] await hass.services.async_call( @@ -548,7 +550,8 @@ async def test_async_media_stop( # Set the state to playing playback_state_callback(TEST_PLAYBACK_STATE_PLAYING) - states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) + assert (states := hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID)) + assert TEST_PLAYBACK_STATE_PLAYING.value assert states.state == BANG_OLUFSEN_STATES[TEST_PLAYBACK_STATE_PLAYING.value] await hass.services.async_call( diff --git a/tests/components/bang_olufsen/test_websocket.py b/tests/components/bang_olufsen/test_websocket.py index 209550faee5..b17859a4f4e 100644 --- a/tests/components/bang_olufsen/test_websocket.py +++ b/tests/components/bang_olufsen/test_websocket.py @@ -101,8 +101,11 @@ async def test_on_software_update_state( await hass.async_block_till_done() - device = device_registry.async_get_device( - identifiers={(DOMAIN, mock_config_entry.unique_id)} + assert mock_config_entry.unique_id + assert ( + device := device_registry.async_get_device( + identifiers={(DOMAIN, mock_config_entry.unique_id)} + ) ) assert device.sw_version == "1.0.0" @@ -135,8 +138,11 @@ async def test_on_all_notifications_raw( raw_notification_full = raw_notification # Get device ID for the modified notification that is sent as an event and in the log - device = device_registry.async_get_device( - identifiers={(DOMAIN, mock_config_entry.unique_id)} + assert mock_config_entry.unique_id + assert ( + device := device_registry.async_get_device( + identifiers={(DOMAIN, mock_config_entry.unique_id)} + ) ) raw_notification_full.update( { From fd0c63fe529249d95f87a9b51144f4c10b2ea4b8 Mon Sep 17 00:00:00 2001 From: "Mr. Bubbles" Date: Sun, 8 Sep 2024 12:29:42 +0200 Subject: [PATCH 1574/1635] Add text-selector autocomplete in Bring config flow (#124063) Add autocomplete to Bring config flow schema --- homeassistant/components/bring/config_flow.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/homeassistant/components/bring/config_flow.py b/homeassistant/components/bring/config_flow.py index c675eda3cd2..6a90ff153e5 100644 --- a/homeassistant/components/bring/config_flow.py +++ b/homeassistant/components/bring/config_flow.py @@ -33,11 +33,13 @@ STEP_USER_DATA_SCHEMA = vol.Schema( vol.Required(CONF_EMAIL): TextSelector( TextSelectorConfig( type=TextSelectorType.EMAIL, + autocomplete="email", ), ), vol.Required(CONF_PASSWORD): TextSelector( TextSelectorConfig( type=TextSelectorType.PASSWORD, + autocomplete="current-password", ), ), } From ec9f50317fcdf351f5f25f71283a444157b76497 Mon Sep 17 00:00:00 2001 From: Kevin Stillhammer Date: Sun, 8 Sep 2024 12:50:04 +0200 Subject: [PATCH 1575/1635] Allow waze_travel_time multiple excl/incl filter (#117252) * Allow multiple excl/incl filter * Use list comprehension for should_include * Do not use mutable object as default param * Inline migration func --- .../components/waze_travel_time/__init__.py | 95 +++++++++++++++---- .../waze_travel_time/config_flow.py | 25 ++++- .../components/waze_travel_time/const.py | 5 +- .../components/waze_travel_time/sensor.py | 4 +- tests/components/waze_travel_time/conftest.py | 2 + .../waze_travel_time/test_config_flow.py | 31 +++--- .../components/waze_travel_time/test_init.py | 79 ++++++++++++++- .../waze_travel_time/test_sensor.py | 15 ++- 8 files changed, 211 insertions(+), 45 deletions(-) diff --git a/homeassistant/components/waze_travel_time/__init__.py b/homeassistant/components/waze_travel_time/__init__.py index 83b2e2aa7c7..1abcf9d391d 100644 --- a/homeassistant/components/waze_travel_time/__init__.py +++ b/homeassistant/components/waze_travel_time/__init__.py @@ -1,6 +1,7 @@ """The waze_travel_time component.""" import asyncio +from collections.abc import Collection import logging from pywaze.route_calculator import CalcRoutesResponse, WazeRouteCalculator, WRCError @@ -28,10 +29,13 @@ from .const import ( CONF_AVOID_SUBSCRIPTION_ROADS, CONF_AVOID_TOLL_ROADS, CONF_DESTINATION, + CONF_EXCL_FILTER, + CONF_INCL_FILTER, CONF_ORIGIN, CONF_REALTIME, CONF_UNITS, CONF_VEHICLE_TYPE, + DEFAULT_FILTER, DEFAULT_VEHICLE_TYPE, DOMAIN, METRIC_UNITS, @@ -86,6 +90,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b """Load the saved entities.""" if SEMAPHORE not in hass.data.setdefault(DOMAIN, {}): hass.data.setdefault(DOMAIN, {})[SEMAPHORE] = asyncio.Semaphore(1) + await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) async def async_get_travel_times_service(service: ServiceCall) -> ServiceResponse: @@ -124,11 +129,14 @@ async def async_get_travel_times( avoid_subscription_roads: bool, avoid_ferries: bool, realtime: bool, - incl_filter: str | None = None, - excl_filter: str | None = None, + incl_filters: Collection[str] | None = None, + excl_filters: Collection[str] | None = None, ) -> list[CalcRoutesResponse] | None: """Get all available routes.""" + incl_filters = incl_filters or () + excl_filters = excl_filters or () + _LOGGER.debug( "Getting update for origin: %s destination: %s", origin, @@ -147,28 +155,46 @@ async def async_get_travel_times( real_time=realtime, alternatives=3, ) + _LOGGER.debug("Got routes: %s", routes) - if incl_filter not in {None, ""}: - routes = [ - r - for r in routes - if any( - incl_filter.lower() == street_name.lower() # type: ignore[union-attr] - for street_name in r.street_names + incl_routes: list[CalcRoutesResponse] = [] + + def should_include_route(route: CalcRoutesResponse) -> bool: + if len(incl_filters) < 1: + return True + should_include = any( + street_name in incl_filters or "" in incl_filters + for street_name in route.street_names + ) + if not should_include: + _LOGGER.debug( + "Excluding route [%s], because no inclusive filter matched any streetname", + route.name, ) - ] + return False + return True - if excl_filter not in {None, ""}: - routes = [ - r - for r in routes - if not any( - excl_filter.lower() == street_name.lower() # type: ignore[union-attr] - for street_name in r.street_names - ) - ] + incl_routes = [route for route in routes if should_include_route(route)] - if len(routes) < 1: + filtered_routes: list[CalcRoutesResponse] = [] + + def should_exclude_route(route: CalcRoutesResponse) -> bool: + for street_name in route.street_names: + for excl_filter in excl_filters: + if excl_filter == street_name: + _LOGGER.debug( + "Excluding route, because exclusive filter [%s] matched streetname: %s", + excl_filter, + route.name, + ) + return True + return False + + filtered_routes = [ + route for route in incl_routes if not should_exclude_route(route) + ] + + if len(filtered_routes) < 1: _LOGGER.warning("No routes found") return None except WRCError as exp: @@ -176,9 +202,36 @@ async def async_get_travel_times( return None else: - return routes + return filtered_routes async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS) + + +async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: + """Migrate an old config entry.""" + + if config_entry.version == 1: + _LOGGER.debug( + "Migrating from version %s.%s", + config_entry.version, + config_entry.minor_version, + ) + options = dict(config_entry.options) + if (incl_filters := options.pop(CONF_INCL_FILTER, None)) not in {None, ""}: + options[CONF_INCL_FILTER] = [incl_filters] + else: + options[CONF_INCL_FILTER] = DEFAULT_FILTER + if (excl_filters := options.pop(CONF_EXCL_FILTER, None)) not in {None, ""}: + options[CONF_EXCL_FILTER] = [excl_filters] + else: + options[CONF_EXCL_FILTER] = DEFAULT_FILTER + hass.config_entries.async_update_entry(config_entry, options=options, version=2) + _LOGGER.debug( + "Migration to version %s.%s successful", + config_entry.version, + config_entry.minor_version, + ) + return True diff --git a/homeassistant/components/waze_travel_time/config_flow.py b/homeassistant/components/waze_travel_time/config_flow.py index 12dc8336f92..b684dd0bb80 100644 --- a/homeassistant/components/waze_travel_time/config_flow.py +++ b/homeassistant/components/waze_travel_time/config_flow.py @@ -20,6 +20,8 @@ from homeassistant.helpers.selector import ( SelectSelectorConfig, SelectSelectorMode, TextSelector, + TextSelectorConfig, + TextSelectorType, ) from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM @@ -34,6 +36,7 @@ from .const import ( CONF_REALTIME, CONF_UNITS, CONF_VEHICLE_TYPE, + DEFAULT_FILTER, DEFAULT_NAME, DEFAULT_OPTIONS, DOMAIN, @@ -46,8 +49,18 @@ from .helpers import is_valid_config_entry OPTIONS_SCHEMA = vol.Schema( { - vol.Optional(CONF_INCL_FILTER, default=""): TextSelector(), - vol.Optional(CONF_EXCL_FILTER, default=""): TextSelector(), + vol.Optional(CONF_INCL_FILTER): TextSelector( + TextSelectorConfig( + type=TextSelectorType.TEXT, + multiple=True, + ), + ), + vol.Optional(CONF_EXCL_FILTER): TextSelector( + TextSelectorConfig( + type=TextSelectorType.TEXT, + multiple=True, + ), + ), vol.Optional(CONF_REALTIME): BooleanSelector(), vol.Required(CONF_VEHICLE_TYPE): SelectSelector( SelectSelectorConfig( @@ -88,7 +101,7 @@ CONFIG_SCHEMA = vol.Schema( ) -def default_options(hass: HomeAssistant) -> dict[str, str | bool]: +def default_options(hass: HomeAssistant) -> dict[str, str | bool | list[str]]: """Get the default options.""" defaults = DEFAULT_OPTIONS.copy() if hass.config.units is US_CUSTOMARY_SYSTEM: @@ -106,6 +119,10 @@ class WazeOptionsFlow(OptionsFlow): async def async_step_init(self, user_input=None) -> ConfigFlowResult: """Handle the initial step.""" if user_input is not None: + if user_input.get(CONF_INCL_FILTER) is None: + user_input[CONF_INCL_FILTER] = DEFAULT_FILTER + if user_input.get(CONF_EXCL_FILTER) is None: + user_input[CONF_EXCL_FILTER] = DEFAULT_FILTER return self.async_create_entry( title="", data=user_input, @@ -122,7 +139,7 @@ class WazeOptionsFlow(OptionsFlow): class WazeConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Waze Travel Time.""" - VERSION = 1 + VERSION = 2 def __init__(self) -> None: """Init Config Flow.""" diff --git a/homeassistant/components/waze_travel_time/const.py b/homeassistant/components/waze_travel_time/const.py index 84e41c3963f..7c77f43574d 100644 --- a/homeassistant/components/waze_travel_time/const.py +++ b/homeassistant/components/waze_travel_time/const.py @@ -22,6 +22,7 @@ DEFAULT_VEHICLE_TYPE = "car" DEFAULT_AVOID_TOLL_ROADS = False DEFAULT_AVOID_SUBSCRIPTION_ROADS = False DEFAULT_AVOID_FERRIES = False +DEFAULT_FILTER = [""] IMPERIAL_UNITS = "imperial" METRIC_UNITS = "metric" @@ -30,11 +31,13 @@ UNITS = [METRIC_UNITS, IMPERIAL_UNITS] REGIONS = ["us", "na", "eu", "il", "au"] VEHICLE_TYPES = ["car", "taxi", "motorcycle"] -DEFAULT_OPTIONS: dict[str, str | bool] = { +DEFAULT_OPTIONS: dict[str, str | bool | list[str]] = { CONF_REALTIME: DEFAULT_REALTIME, CONF_VEHICLE_TYPE: DEFAULT_VEHICLE_TYPE, CONF_UNITS: METRIC_UNITS, CONF_AVOID_FERRIES: DEFAULT_AVOID_FERRIES, CONF_AVOID_SUBSCRIPTION_ROADS: DEFAULT_AVOID_SUBSCRIPTION_ROADS, CONF_AVOID_TOLL_ROADS: DEFAULT_AVOID_TOLL_ROADS, + CONF_INCL_FILTER: DEFAULT_FILTER, + CONF_EXCL_FILTER: DEFAULT_FILTER, } diff --git a/homeassistant/components/waze_travel_time/sensor.py b/homeassistant/components/waze_travel_time/sensor.py index 7663b4a102e..c2d3ee12cf8 100644 --- a/homeassistant/components/waze_travel_time/sensor.py +++ b/homeassistant/components/waze_travel_time/sensor.py @@ -183,8 +183,8 @@ class WazeTravelTimeData: ) if self.origin is not None and self.destination is not None: # Grab options on every update - incl_filter = self.config_entry.options.get(CONF_INCL_FILTER) - excl_filter = self.config_entry.options.get(CONF_EXCL_FILTER) + incl_filter = self.config_entry.options[CONF_INCL_FILTER] + excl_filter = self.config_entry.options[CONF_EXCL_FILTER] realtime = self.config_entry.options[CONF_REALTIME] vehicle_type = self.config_entry.options[CONF_VEHICLE_TYPE] avoid_toll_roads = self.config_entry.options[CONF_AVOID_TOLL_ROADS] diff --git a/tests/components/waze_travel_time/conftest.py b/tests/components/waze_travel_time/conftest.py index c929fc219f9..c9214ed8b71 100644 --- a/tests/components/waze_travel_time/conftest.py +++ b/tests/components/waze_travel_time/conftest.py @@ -5,6 +5,7 @@ from unittest.mock import patch import pytest from pywaze.route_calculator import CalcRoutesResponse, WRCError +from homeassistant.components.waze_travel_time.config_flow import WazeConfigFlow from homeassistant.components.waze_travel_time.const import DOMAIN from homeassistant.core import HomeAssistant @@ -19,6 +20,7 @@ async def mock_config_fixture(hass: HomeAssistant, data, options): data=data, options=options, entry_id="test", + version=WazeConfigFlow.VERSION, ) config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/waze_travel_time/test_config_flow.py b/tests/components/waze_travel_time/test_config_flow.py index 5b1e3417bfc..87cb92f1522 100644 --- a/tests/components/waze_travel_time/test_config_flow.py +++ b/tests/components/waze_travel_time/test_config_flow.py @@ -3,6 +3,7 @@ import pytest from homeassistant import config_entries +from homeassistant.components.waze_travel_time.config_flow import WazeConfigFlow from homeassistant.components.waze_travel_time.const import ( CONF_AVOID_FERRIES, CONF_AVOID_SUBSCRIPTION_ROADS, @@ -60,6 +61,7 @@ async def test_reconfigure(hass: HomeAssistant) -> None: domain=DOMAIN, data=MOCK_CONFIG, options=DEFAULT_OPTIONS, + version=WazeConfigFlow.VERSION, ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -103,6 +105,7 @@ async def test_options(hass: HomeAssistant) -> None: domain=DOMAIN, data=MOCK_CONFIG, options=DEFAULT_OPTIONS, + version=WazeConfigFlow.VERSION, ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -119,8 +122,8 @@ async def test_options(hass: HomeAssistant) -> None: CONF_AVOID_FERRIES: True, CONF_AVOID_SUBSCRIPTION_ROADS: True, CONF_AVOID_TOLL_ROADS: True, - CONF_EXCL_FILTER: "exclude", - CONF_INCL_FILTER: "include", + CONF_EXCL_FILTER: ["exclude"], + CONF_INCL_FILTER: ["include"], CONF_REALTIME: False, CONF_UNITS: IMPERIAL_UNITS, CONF_VEHICLE_TYPE: "taxi", @@ -132,8 +135,8 @@ async def test_options(hass: HomeAssistant) -> None: CONF_AVOID_FERRIES: True, CONF_AVOID_SUBSCRIPTION_ROADS: True, CONF_AVOID_TOLL_ROADS: True, - CONF_EXCL_FILTER: "exclude", - CONF_INCL_FILTER: "include", + CONF_EXCL_FILTER: ["exclude"], + CONF_INCL_FILTER: ["include"], CONF_REALTIME: False, CONF_UNITS: IMPERIAL_UNITS, CONF_VEHICLE_TYPE: "taxi", @@ -143,8 +146,8 @@ async def test_options(hass: HomeAssistant) -> None: CONF_AVOID_FERRIES: True, CONF_AVOID_SUBSCRIPTION_ROADS: True, CONF_AVOID_TOLL_ROADS: True, - CONF_EXCL_FILTER: "exclude", - CONF_INCL_FILTER: "include", + CONF_EXCL_FILTER: ["exclude"], + CONF_INCL_FILTER: ["include"], CONF_REALTIME: False, CONF_UNITS: IMPERIAL_UNITS, CONF_VEHICLE_TYPE: "taxi", @@ -209,10 +212,14 @@ async def test_invalid_config_entry( async def test_reset_filters(hass: HomeAssistant) -> None: """Test resetting inclusive and exclusive filters to empty string.""" options = {**DEFAULT_OPTIONS} - options[CONF_INCL_FILTER] = "test" - options[CONF_EXCL_FILTER] = "test" + options[CONF_INCL_FILTER] = ["test"] + options[CONF_EXCL_FILTER] = ["test"] config_entry = MockConfigEntry( - domain=DOMAIN, data=MOCK_CONFIG, options=options, entry_id="test" + domain=DOMAIN, + data=MOCK_CONFIG, + options=options, + entry_id="test", + version=WazeConfigFlow.VERSION, ) config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) @@ -228,8 +235,6 @@ async def test_reset_filters(hass: HomeAssistant) -> None: CONF_AVOID_FERRIES: True, CONF_AVOID_SUBSCRIPTION_ROADS: True, CONF_AVOID_TOLL_ROADS: True, - CONF_EXCL_FILTER: "", - CONF_INCL_FILTER: "", CONF_REALTIME: False, CONF_UNITS: IMPERIAL_UNITS, CONF_VEHICLE_TYPE: "taxi", @@ -240,8 +245,8 @@ async def test_reset_filters(hass: HomeAssistant) -> None: CONF_AVOID_FERRIES: True, CONF_AVOID_SUBSCRIPTION_ROADS: True, CONF_AVOID_TOLL_ROADS: True, - CONF_EXCL_FILTER: "", - CONF_INCL_FILTER: "", + CONF_EXCL_FILTER: [""], + CONF_INCL_FILTER: [""], CONF_REALTIME: False, CONF_UNITS: IMPERIAL_UNITS, CONF_VEHICLE_TYPE: "taxi", diff --git a/tests/components/waze_travel_time/test_init.py b/tests/components/waze_travel_time/test_init.py index 58aaa8983a7..9c59278ff99 100644 --- a/tests/components/waze_travel_time/test_init.py +++ b/tests/components/waze_travel_time/test_init.py @@ -2,11 +2,32 @@ import pytest -from homeassistant.components.waze_travel_time.const import DEFAULT_OPTIONS +from homeassistant.components.waze_travel_time.const import ( + CONF_AVOID_FERRIES, + CONF_AVOID_SUBSCRIPTION_ROADS, + CONF_AVOID_TOLL_ROADS, + CONF_EXCL_FILTER, + CONF_INCL_FILTER, + CONF_REALTIME, + CONF_UNITS, + CONF_VEHICLE_TYPE, + DEFAULT_AVOID_FERRIES, + DEFAULT_AVOID_SUBSCRIPTION_ROADS, + DEFAULT_AVOID_TOLL_ROADS, + DEFAULT_FILTER, + DEFAULT_OPTIONS, + DEFAULT_REALTIME, + DEFAULT_VEHICLE_TYPE, + DOMAIN, + METRIC_UNITS, +) +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from .const import MOCK_CONFIG +from tests.common import MockConfigEntry + @pytest.mark.parametrize( ("data", "options"), @@ -43,3 +64,59 @@ async def test_service_get_travel_times(hass: HomeAssistant) -> None: }, ] } + + +@pytest.mark.usefixtures("mock_update") +async def test_migrate_entry_v1_v2(hass: HomeAssistant) -> None: + """Test successful migration of entry data.""" + mock_entry = MockConfigEntry( + domain=DOMAIN, + version=1, + data=MOCK_CONFIG, + options={ + CONF_REALTIME: DEFAULT_REALTIME, + CONF_VEHICLE_TYPE: DEFAULT_VEHICLE_TYPE, + CONF_UNITS: METRIC_UNITS, + CONF_AVOID_FERRIES: DEFAULT_AVOID_FERRIES, + CONF_AVOID_SUBSCRIPTION_ROADS: DEFAULT_AVOID_SUBSCRIPTION_ROADS, + CONF_AVOID_TOLL_ROADS: DEFAULT_AVOID_TOLL_ROADS, + }, + ) + + mock_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() + + updated_entry = hass.config_entries.async_get_entry(mock_entry.entry_id) + + assert updated_entry.state is ConfigEntryState.LOADED + assert updated_entry.version == 2 + assert updated_entry.options[CONF_INCL_FILTER] == DEFAULT_FILTER + assert updated_entry.options[CONF_EXCL_FILTER] == DEFAULT_FILTER + + mock_entry = MockConfigEntry( + domain=DOMAIN, + version=1, + data=MOCK_CONFIG, + options={ + CONF_REALTIME: DEFAULT_REALTIME, + CONF_VEHICLE_TYPE: DEFAULT_VEHICLE_TYPE, + CONF_UNITS: METRIC_UNITS, + CONF_AVOID_FERRIES: DEFAULT_AVOID_FERRIES, + CONF_AVOID_SUBSCRIPTION_ROADS: DEFAULT_AVOID_SUBSCRIPTION_ROADS, + CONF_AVOID_TOLL_ROADS: DEFAULT_AVOID_TOLL_ROADS, + CONF_INCL_FILTER: "include", + CONF_EXCL_FILTER: "exclude", + }, + ) + + mock_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() + + updated_entry = hass.config_entries.async_get_entry(mock_entry.entry_id) + + assert updated_entry.state is ConfigEntryState.LOADED + assert updated_entry.version == 2 + assert updated_entry.options[CONF_INCL_FILTER] == ["include"] + assert updated_entry.options[CONF_EXCL_FILTER] == ["exclude"] diff --git a/tests/components/waze_travel_time/test_sensor.py b/tests/components/waze_travel_time/test_sensor.py index e09a7199ff4..94e3a0cf9d7 100644 --- a/tests/components/waze_travel_time/test_sensor.py +++ b/tests/components/waze_travel_time/test_sensor.py @@ -3,6 +3,7 @@ import pytest from pywaze.route_calculator import WRCError +from homeassistant.components.waze_travel_time.config_flow import WazeConfigFlow from homeassistant.components.waze_travel_time.const import ( CONF_AVOID_FERRIES, CONF_AVOID_SUBSCRIPTION_ROADS, @@ -74,6 +75,8 @@ async def test_sensor(hass: HomeAssistant) -> None: CONF_AVOID_TOLL_ROADS: True, CONF_AVOID_SUBSCRIPTION_ROADS: True, CONF_AVOID_FERRIES: True, + CONF_INCL_FILTER: [""], + CONF_EXCL_FILTER: [""], }, ) ], @@ -98,7 +101,8 @@ async def test_imperial(hass: HomeAssistant) -> None: CONF_AVOID_TOLL_ROADS: True, CONF_AVOID_SUBSCRIPTION_ROADS: True, CONF_AVOID_FERRIES: True, - CONF_INCL_FILTER: "IncludeThis", + CONF_INCL_FILTER: ["IncludeThis"], + CONF_EXCL_FILTER: [""], }, ) ], @@ -121,7 +125,8 @@ async def test_incl_filter(hass: HomeAssistant) -> None: CONF_AVOID_TOLL_ROADS: True, CONF_AVOID_SUBSCRIPTION_ROADS: True, CONF_AVOID_FERRIES: True, - CONF_EXCL_FILTER: "ExcludeThis", + CONF_INCL_FILTER: [""], + CONF_EXCL_FILTER: ["ExcludeThis"], }, ) ], @@ -138,7 +143,11 @@ async def test_sensor_failed_wrcerror( ) -> None: """Test that sensor update fails with log message.""" config_entry = MockConfigEntry( - domain=DOMAIN, data=MOCK_CONFIG, options=DEFAULT_OPTIONS, entry_id="test" + domain=DOMAIN, + data=MOCK_CONFIG, + options=DEFAULT_OPTIONS, + entry_id="test", + version=WazeConfigFlow.VERSION, ) config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) From e7cb646a581699c837622f009c1cd9414d8903f2 Mon Sep 17 00:00:00 2001 From: Joakim Plate Date: Sun, 8 Sep 2024 12:50:29 +0200 Subject: [PATCH 1576/1635] Use json data instead of timedelta for tests in generic hygrostat (#124111) Use json data instead of timedelta for tests --- .../components/generic_hygrostat/test_humidifier.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/components/generic_hygrostat/test_humidifier.py b/tests/components/generic_hygrostat/test_humidifier.py index 2beaf423201..afe183cae5e 100644 --- a/tests/components/generic_hygrostat/test_humidifier.py +++ b/tests/components/generic_hygrostat/test_humidifier.py @@ -920,7 +920,7 @@ async def setup_comp_4(hass: HomeAssistant) -> None: "humidifier": ENT_SWITCH, "target_sensor": ENT_SENSOR, "device_class": "dehumidifier", - "min_cycle_duration": datetime.timedelta(minutes=10), + "min_cycle_duration": {"minutes": 10}, "initial_state": True, "target_humidity": 40, } @@ -1062,7 +1062,7 @@ async def setup_comp_6(hass: HomeAssistant) -> None: "wet_tolerance": 3, "humidifier": ENT_SWITCH, "target_sensor": ENT_SENSOR, - "min_cycle_duration": datetime.timedelta(minutes=10), + "min_cycle_duration": {"minutes": 10}, "initial_state": True, "target_humidity": 40, } @@ -1219,8 +1219,8 @@ async def setup_comp_7(hass: HomeAssistant) -> None: "humidifier": ENT_SWITCH, "target_sensor": ENT_SENSOR, "device_class": "dehumidifier", - "min_cycle_duration": datetime.timedelta(minutes=15), - "keep_alive": datetime.timedelta(minutes=10), + "min_cycle_duration": {"minutes": 15}, + "keep_alive": {"minutes": 10}, "initial_state": True, "target_humidity": 40, } @@ -1285,8 +1285,8 @@ async def setup_comp_8(hass: HomeAssistant) -> None: "wet_tolerance": 3, "humidifier": ENT_SWITCH, "target_sensor": ENT_SENSOR, - "min_cycle_duration": datetime.timedelta(minutes=15), - "keep_alive": datetime.timedelta(minutes=10), + "min_cycle_duration": {"minutes": 15}, + "keep_alive": {"minutes": 10}, "initial_state": True, "target_humidity": 40, } From 3139a7e4313944a00e91be57020e17412db5b838 Mon Sep 17 00:00:00 2001 From: Joakim Plate Date: Sun, 8 Sep 2024 12:51:08 +0200 Subject: [PATCH 1577/1635] Adjust generic hygrostat to detect reported events for stale tracking (#124109) * Listen to reported events for stale check * Always enable stale sensor tracking There is no reason not to have this enabled now that we track reported events for sensors. * Remove default stale code * Adjust for ruff change --- .../generic_hygrostat/humidifier.py | 50 +++++++++---------- .../generic_hygrostat/test_humidifier.py | 28 +++++++++-- 2 files changed, 49 insertions(+), 29 deletions(-) diff --git a/homeassistant/components/generic_hygrostat/humidifier.py b/homeassistant/components/generic_hygrostat/humidifier.py index ab29e587232..0aa4ba2e515 100644 --- a/homeassistant/components/generic_hygrostat/humidifier.py +++ b/homeassistant/components/generic_hygrostat/humidifier.py @@ -36,6 +36,7 @@ from homeassistant.core import ( DOMAIN as HOMEASSISTANT_DOMAIN, Event, EventStateChangedData, + EventStateReportedData, HomeAssistant, State, callback, @@ -45,6 +46,7 @@ from homeassistant.helpers.device import async_device_info_to_link_from_entity from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.event import ( async_track_state_change_event, + async_track_state_report_event, async_track_time_interval, ) from homeassistant.helpers.restore_state import RestoreEntity @@ -72,7 +74,6 @@ _LOGGER = logging.getLogger(__name__) ATTR_SAVED_HUMIDITY = "saved_humidity" - PLATFORM_SCHEMA = HUMIDIFIER_PLATFORM_SCHEMA.extend(HYGROSTAT_SCHEMA.schema) @@ -222,18 +223,21 @@ class GenericHygrostat(HumidifierEntity, RestoreEntity): """Run when entity about to be added.""" await super().async_added_to_hass() - # Add listener self.async_on_remove( async_track_state_change_event( - self.hass, self._sensor_entity_id, self._async_sensor_changed_event + self.hass, self._sensor_entity_id, self._async_sensor_event + ) + ) + self.async_on_remove( + async_track_state_report_event( + self.hass, self._sensor_entity_id, self._async_sensor_event ) ) self.async_on_remove( async_track_state_change_event( - self.hass, self._switch_entity_id, self._async_switch_changed_event + self.hass, self._switch_entity_id, self._async_switch_event ) ) - if self._keep_alive: self.async_on_remove( async_track_time_interval( @@ -253,7 +257,8 @@ class GenericHygrostat(HumidifierEntity, RestoreEntity): sensor_state.state if sensor_state is not None else "None", ) return - await self._async_sensor_changed(self._sensor_entity_id, None, sensor_state) + + await self._async_sensor_update(sensor_state) self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, _async_startup) @@ -391,25 +396,23 @@ class GenericHygrostat(HumidifierEntity, RestoreEntity): # Get default humidity from super class return super().max_humidity - async def _async_sensor_changed_event( - self, event: Event[EventStateChangedData] - ) -> None: - """Handle ambient humidity changes.""" - data = event.data - await self._async_sensor_changed( - data["entity_id"], data["old_state"], data["new_state"] - ) - - async def _async_sensor_changed( - self, entity_id: str, old_state: State | None, new_state: State | None + async def _async_sensor_event( + self, event: Event[EventStateChangedData] | Event[EventStateReportedData] ) -> None: """Handle ambient humidity changes.""" + new_state = event.data["new_state"] if new_state is None: return + await self._async_sensor_update(new_state) + + async def _async_sensor_update(self, new_state: State) -> None: + """Update state based on humidity sensor.""" + if self._sensor_stale_duration: if self._remove_stale_tracking: self._remove_stale_tracking() + self._remove_stale_tracking = async_track_time_interval( self.hass, self._async_sensor_not_responding, @@ -426,23 +429,18 @@ class GenericHygrostat(HumidifierEntity, RestoreEntity): state = self.hass.states.get(self._sensor_entity_id) _LOGGER.debug( "Sensor has not been updated for %s", - now - state.last_updated if now and state else "---", + now - state.last_reported if now and state else "---", ) _LOGGER.warning("Sensor is stalled, call the emergency stop") await self._async_update_humidity("Stalled") @callback - def _async_switch_changed_event(self, event: Event[EventStateChangedData]) -> None: + def _async_switch_event(self, event: Event[EventStateChangedData]) -> None: """Handle humidifier switch state changes.""" - data = event.data - self._async_switch_changed( - data["entity_id"], data["old_state"], data["new_state"] - ) + self._async_switch_changed(event.data["new_state"]) @callback - def _async_switch_changed( - self, entity_id: str, old_state: State | None, new_state: State | None - ) -> None: + def _async_switch_changed(self, new_state: State | None) -> None: """Handle humidifier switch state changes.""" if new_state is None: return diff --git a/tests/components/generic_hygrostat/test_humidifier.py b/tests/components/generic_hygrostat/test_humidifier.py index afe183cae5e..9cd51baa576 100644 --- a/tests/components/generic_hygrostat/test_humidifier.py +++ b/tests/components/generic_hygrostat/test_humidifier.py @@ -3,6 +3,7 @@ import datetime from freezegun import freeze_time +from freezegun.api import FrozenDateTimeFactory import pytest import voluptuous as vol @@ -520,6 +521,7 @@ async def test_set_target_humidity_humidifier_on(hass: HomeAssistant) -> None: calls = await _setup_switch(hass, False) _setup_sensor(hass, 36) await hass.async_block_till_done() + calls.clear() await hass.services.async_call( DOMAIN, SERVICE_SET_HUMIDITY, @@ -540,6 +542,7 @@ async def test_set_target_humidity_humidifier_off(hass: HomeAssistant) -> None: calls = await _setup_switch(hass, True) _setup_sensor(hass, 45) await hass.async_block_till_done() + calls.clear() await hass.services.async_call( DOMAIN, SERVICE_SET_HUMIDITY, @@ -1733,7 +1736,9 @@ async def test_away_fixed_humidity_mode(hass: HomeAssistant) -> None: @pytest.mark.usefixtures("setup_comp_1") async def test_sensor_stale_duration( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + freezer: FrozenDateTimeFactory, ) -> None: """Test turn off on sensor stale.""" @@ -1775,14 +1780,31 @@ async def test_sensor_stale_duration( assert hass.states.get(humidifier_switch).state == STATE_ON # Wait 11 minutes - async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(minutes=11)) + freezer.tick(datetime.timedelta(minutes=11)) + async_fire_time_changed(hass) await hass.async_block_till_done() # 11 minutes later, no news from the sensor : emergency cut off assert hass.states.get(humidifier_switch).state == STATE_OFF assert "emergency" in caplog.text - # Updated value from sensor received + # Updated value from sensor received (same value) + _setup_sensor(hass, 23) + await hass.async_block_till_done() + + # A new value has arrived, the humidifier should go ON + assert hass.states.get(humidifier_switch).state == STATE_ON + + # Wait 11 minutes + freezer.tick(datetime.timedelta(minutes=11)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + # 11 minutes later, no news from the sensor : emergency cut off + assert hass.states.get(humidifier_switch).state == STATE_OFF + assert "emergency" in caplog.text + + # Updated value from sensor received (new value) _setup_sensor(hass, 24) await hass.async_block_till_done() From 8acc027f383438003e60d2ca8d7376aca0c71e84 Mon Sep 17 00:00:00 2001 From: Simon <80467011+sorgfresser@users.noreply.github.com> Date: Sun, 8 Sep 2024 13:11:26 +0200 Subject: [PATCH 1578/1635] Add voice settings to ElevenLabs options flow (#123265) Add voice settings to options flow --- .../components/elevenlabs/config_flow.py | 87 ++++++++++- homeassistant/components/elevenlabs/const.py | 11 ++ .../components/elevenlabs/strings.json | 22 ++- homeassistant/components/elevenlabs/tts.py | 43 +++++- .../components/elevenlabs/test_config_flow.py | 59 +++++++- tests/components/elevenlabs/test_tts.py | 138 +++++++++++++++++- 6 files changed, 349 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/elevenlabs/config_flow.py b/homeassistant/components/elevenlabs/config_flow.py index cf04304510a..6eec35d0583 100644 --- a/homeassistant/components/elevenlabs/config_flow.py +++ b/homeassistant/components/elevenlabs/config_flow.py @@ -23,7 +23,23 @@ from homeassistant.helpers.selector import ( SelectSelectorConfig, ) -from .const import CONF_MODEL, CONF_VOICE, DEFAULT_MODEL, DOMAIN +from .const import ( + CONF_CONFIGURE_VOICE, + CONF_MODEL, + CONF_OPTIMIZE_LATENCY, + CONF_SIMILARITY, + CONF_STABILITY, + CONF_STYLE, + CONF_USE_SPEAKER_BOOST, + CONF_VOICE, + DEFAULT_MODEL, + DEFAULT_OPTIMIZE_LATENCY, + DEFAULT_SIMILARITY, + DEFAULT_STABILITY, + DEFAULT_STYLE, + DEFAULT_USE_SPEAKER_BOOST, + DOMAIN, +) USER_STEP_SCHEMA = vol.Schema({vol.Required(CONF_API_KEY): str}) @@ -92,6 +108,8 @@ class ElevenLabsOptionsFlow(OptionsFlowWithConfigEntry): # id -> name self.voices: dict[str, str] = {} self.models: dict[str, str] = {} + self.model: str | None = None + self.voice: str | None = None async def async_step_init( self, user_input: dict[str, Any] | None = None @@ -103,6 +121,11 @@ class ElevenLabsOptionsFlow(OptionsFlowWithConfigEntry): assert self.models and self.voices if user_input is not None: + self.model = user_input[CONF_MODEL] + self.voice = user_input[CONF_VOICE] + configure_voice = user_input.pop(CONF_CONFIGURE_VOICE) + if configure_voice: + return await self.async_step_voice_settings() return self.async_create_entry( title="ElevenLabs", data=user_input, @@ -139,7 +162,69 @@ class ElevenLabsOptionsFlow(OptionsFlowWithConfigEntry): ] ) ), + vol.Required(CONF_CONFIGURE_VOICE, default=False): bool, } ), self.options, ) + + async def async_step_voice_settings( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle voice settings.""" + assert self.voices and self.models + if user_input is not None: + user_input[CONF_MODEL] = self.model + user_input[CONF_VOICE] = self.voice + return self.async_create_entry( + title="ElevenLabs", + data=user_input, + ) + return self.async_show_form( + step_id="voice_settings", + data_schema=self.elevenlabs_config_options_voice_schema(), + ) + + def elevenlabs_config_options_voice_schema(self) -> vol.Schema: + """Elevenlabs options voice schema.""" + return vol.Schema( + { + vol.Optional( + CONF_STABILITY, + default=self.config_entry.options.get( + CONF_STABILITY, DEFAULT_STABILITY + ), + ): vol.All( + vol.Coerce(float), + vol.Range(min=0, max=1), + ), + vol.Optional( + CONF_SIMILARITY, + default=self.config_entry.options.get( + CONF_SIMILARITY, DEFAULT_SIMILARITY + ), + ): vol.All( + vol.Coerce(float), + vol.Range(min=0, max=1), + ), + vol.Optional( + CONF_OPTIMIZE_LATENCY, + default=self.config_entry.options.get( + CONF_OPTIMIZE_LATENCY, DEFAULT_OPTIMIZE_LATENCY + ), + ): vol.All(int, vol.Range(min=0, max=4)), + vol.Optional( + CONF_STYLE, + default=self.config_entry.options.get(CONF_STYLE, DEFAULT_STYLE), + ): vol.All( + vol.Coerce(float), + vol.Range(min=0, max=1), + ), + vol.Optional( + CONF_USE_SPEAKER_BOOST, + default=self.config_entry.options.get( + CONF_USE_SPEAKER_BOOST, DEFAULT_USE_SPEAKER_BOOST + ), + ): bool, + } + ) diff --git a/homeassistant/components/elevenlabs/const.py b/homeassistant/components/elevenlabs/const.py index c0fc3c7b1b0..040d38d272c 100644 --- a/homeassistant/components/elevenlabs/const.py +++ b/homeassistant/components/elevenlabs/const.py @@ -2,6 +2,17 @@ CONF_VOICE = "voice" CONF_MODEL = "model" +CONF_CONFIGURE_VOICE = "configure_voice" +CONF_STABILITY = "stability" +CONF_SIMILARITY = "similarity" +CONF_OPTIMIZE_LATENCY = "optimize_streaming_latency" +CONF_STYLE = "style" +CONF_USE_SPEAKER_BOOST = "use_speaker_boost" DOMAIN = "elevenlabs" DEFAULT_MODEL = "eleven_multilingual_v2" +DEFAULT_STABILITY = 0.5 +DEFAULT_SIMILARITY = 0.75 +DEFAULT_OPTIMIZE_LATENCY = 0 +DEFAULT_STYLE = 0 +DEFAULT_USE_SPEAKER_BOOST = True diff --git a/homeassistant/components/elevenlabs/strings.json b/homeassistant/components/elevenlabs/strings.json index 16b40137090..b346f94a963 100644 --- a/homeassistant/components/elevenlabs/strings.json +++ b/homeassistant/components/elevenlabs/strings.json @@ -19,11 +19,29 @@ "init": { "data": { "voice": "Voice", - "model": "Model" + "model": "Model", + "configure_voice": "Configure advanced voice settings" }, "data_description": { "voice": "Voice to use for the TTS.", - "model": "ElevenLabs model to use. Please note that not all models support all languages equally well." + "model": "ElevenLabs model to use. Please note that not all models support all languages equally well.", + "configure_voice": "Configure advanced voice settings. Find more information in the ElevenLabs documentation." + } + }, + "voice_settings": { + "data": { + "stability": "Stability", + "similarity": "Similarity", + "optimize_streaming_latency": "Latency", + "style": "Style", + "use_speaker_boost": "Speaker boost" + }, + "data_description": { + "stability": "Stability of the generated audio. Higher values lead to less emotional audio.", + "similarity": "Similarity of the generated audio to the original voice. Higher values may result in more similar audio, but may also introduce background noise.", + "optimize_streaming_latency": "Optimize the model for streaming. This may reduce the quality of the generated audio.", + "style": "Style of the generated audio. Recommended to keep at 0 for most almost all use cases.", + "use_speaker_boost": "Use speaker boost to increase the similarity of the generated audio to the original voice." } } } diff --git a/homeassistant/components/elevenlabs/tts.py b/homeassistant/components/elevenlabs/tts.py index 35ba6053cd8..e7f35775560 100644 --- a/homeassistant/components/elevenlabs/tts.py +++ b/homeassistant/components/elevenlabs/tts.py @@ -3,11 +3,12 @@ from __future__ import annotations import logging +from types import MappingProxyType from typing import Any from elevenlabs.client import AsyncElevenLabs from elevenlabs.core import ApiError -from elevenlabs.types import Model, Voice as ElevenLabsVoice +from elevenlabs.types import Model, Voice as ElevenLabsVoice, VoiceSettings from homeassistant.components.tts import ( ATTR_VOICE, @@ -21,11 +22,36 @@ from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import EleventLabsConfigEntry -from .const import CONF_VOICE, DOMAIN +from .const import ( + CONF_OPTIMIZE_LATENCY, + CONF_SIMILARITY, + CONF_STABILITY, + CONF_STYLE, + CONF_USE_SPEAKER_BOOST, + CONF_VOICE, + DEFAULT_OPTIMIZE_LATENCY, + DEFAULT_SIMILARITY, + DEFAULT_STABILITY, + DEFAULT_STYLE, + DEFAULT_USE_SPEAKER_BOOST, + DOMAIN, +) _LOGGER = logging.getLogger(__name__) +def to_voice_settings(options: MappingProxyType[str, Any]) -> VoiceSettings: + """Return voice settings.""" + return VoiceSettings( + stability=options.get(CONF_STABILITY, DEFAULT_STABILITY), + similarity_boost=options.get(CONF_SIMILARITY, DEFAULT_SIMILARITY), + style=options.get(CONF_STYLE, DEFAULT_STYLE), + use_speaker_boost=options.get( + CONF_USE_SPEAKER_BOOST, DEFAULT_USE_SPEAKER_BOOST + ), + ) + + async def async_setup_entry( hass: HomeAssistant, config_entry: EleventLabsConfigEntry, @@ -35,6 +61,7 @@ async def async_setup_entry( client = config_entry.runtime_data.client voices = (await client.voices.get_all()).voices default_voice_id = config_entry.options[CONF_VOICE] + voice_settings = to_voice_settings(config_entry.options) async_add_entities( [ ElevenLabsTTSEntity( @@ -44,6 +71,10 @@ async def async_setup_entry( default_voice_id, config_entry.entry_id, config_entry.title, + voice_settings, + config_entry.options.get( + CONF_OPTIMIZE_LATENCY, DEFAULT_OPTIMIZE_LATENCY + ), ) ] ) @@ -62,6 +93,8 @@ class ElevenLabsTTSEntity(TextToSpeechEntity): default_voice_id: str, entry_id: str, title: str, + voice_settings: VoiceSettings, + latency: int = 0, ) -> None: """Init ElevenLabs TTS service.""" self._client = client @@ -77,6 +110,10 @@ class ElevenLabsTTSEntity(TextToSpeechEntity): ] if voice_indices: self._voices.insert(0, self._voices.pop(voice_indices[0])) + self._voice_settings = voice_settings + self._latency = latency + + # Entity attributes self._attr_unique_id = entry_id self._attr_name = title self._attr_device_info = DeviceInfo( @@ -105,6 +142,8 @@ class ElevenLabsTTSEntity(TextToSpeechEntity): audio = await self._client.generate( text=message, voice=voice_id, + optimize_streaming_latency=self._latency, + voice_settings=self._voice_settings, model=self._model.model_id, ) bytes_combined = b"".join([byte_seg async for byte_seg in audio]) diff --git a/tests/components/elevenlabs/test_config_flow.py b/tests/components/elevenlabs/test_config_flow.py index 853c49d48ff..971fa75939a 100644 --- a/tests/components/elevenlabs/test_config_flow.py +++ b/tests/components/elevenlabs/test_config_flow.py @@ -3,9 +3,20 @@ from unittest.mock import AsyncMock from homeassistant.components.elevenlabs.const import ( + CONF_CONFIGURE_VOICE, CONF_MODEL, + CONF_OPTIMIZE_LATENCY, + CONF_SIMILARITY, + CONF_STABILITY, + CONF_STYLE, + CONF_USE_SPEAKER_BOOST, CONF_VOICE, DEFAULT_MODEL, + DEFAULT_OPTIMIZE_LATENCY, + DEFAULT_SIMILARITY, + DEFAULT_STABILITY, + DEFAULT_STYLE, + DEFAULT_USE_SPEAKER_BOOST, DOMAIN, ) from homeassistant.config_entries import SOURCE_USER @@ -89,6 +100,52 @@ async def test_options_flow_init( ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert mock_entry.options == {CONF_MODEL: "model1", CONF_VOICE: "voice1"} + assert mock_entry.options == { + CONF_MODEL: "model1", + CONF_VOICE: "voice1", + } mock_setup_entry.assert_called_once() + + +async def test_options_flow_voice_settings_default( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_async_client: AsyncMock, + mock_entry: MockConfigEntry, +) -> None: + """Test options flow voice settings.""" + mock_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() + + result = await hass.config_entries.options.async_init(mock_entry.entry_id) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + CONF_MODEL: "model1", + CONF_VOICE: "voice1", + CONF_CONFIGURE_VOICE: True, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "voice_settings" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert mock_entry.options == { + CONF_MODEL: "model1", + CONF_VOICE: "voice1", + CONF_OPTIMIZE_LATENCY: DEFAULT_OPTIMIZE_LATENCY, + CONF_SIMILARITY: DEFAULT_SIMILARITY, + CONF_STABILITY: DEFAULT_STABILITY, + CONF_STYLE: DEFAULT_STYLE, + CONF_USE_SPEAKER_BOOST: DEFAULT_USE_SPEAKER_BOOST, + } diff --git a/tests/components/elevenlabs/test_tts.py b/tests/components/elevenlabs/test_tts.py index 8b14ab26487..9ed96117daa 100644 --- a/tests/components/elevenlabs/test_tts.py +++ b/tests/components/elevenlabs/test_tts.py @@ -8,11 +8,25 @@ from typing import Any from unittest.mock import AsyncMock, MagicMock, patch from elevenlabs.core import ApiError -from elevenlabs.types import GetVoicesResponse +from elevenlabs.types import GetVoicesResponse, VoiceSettings import pytest from homeassistant.components import tts -from homeassistant.components.elevenlabs.const import CONF_MODEL, CONF_VOICE, DOMAIN +from homeassistant.components.elevenlabs.const import ( + CONF_MODEL, + CONF_OPTIMIZE_LATENCY, + CONF_SIMILARITY, + CONF_STABILITY, + CONF_STYLE, + CONF_USE_SPEAKER_BOOST, + CONF_VOICE, + DEFAULT_OPTIMIZE_LATENCY, + DEFAULT_SIMILARITY, + DEFAULT_STABILITY, + DEFAULT_STYLE, + DEFAULT_USE_SPEAKER_BOOST, + DOMAIN, +) from homeassistant.components.media_player import ( ATTR_MEDIA_CONTENT_ID, DOMAIN as DOMAIN_MP, @@ -53,17 +67,32 @@ async def setup_internal_url(hass: HomeAssistant) -> None: ) +@pytest.fixture +def mock_similarity(): + """Mock similarity.""" + return DEFAULT_SIMILARITY / 2 + + +@pytest.fixture +def mock_latency(): + """Mock latency.""" + return (DEFAULT_OPTIMIZE_LATENCY + 1) % 5 # 0, 1, 2, 3, 4 + + @pytest.fixture(name="setup") async def setup_fixture( hass: HomeAssistant, config_data: dict[str, Any], config_options: dict[str, Any], + config_options_voice: dict[str, Any], request: pytest.FixtureRequest, mock_async_client: AsyncMock, ) -> AsyncMock: """Set up the test environment.""" if request.param == "mock_config_entry_setup": await mock_config_entry_setup(hass, config_data, config_options) + elif request.param == "mock_config_entry_setup_voice": + await mock_config_entry_setup(hass, config_data, config_options_voice) else: raise RuntimeError("Invalid setup fixture") @@ -83,6 +112,18 @@ def config_options_fixture() -> dict[str, Any]: return {} +@pytest.fixture(name="config_options_voice") +def config_options_voice_fixture(mock_similarity, mock_latency) -> dict[str, Any]: + """Return config options.""" + return { + CONF_OPTIMIZE_LATENCY: mock_latency, + CONF_SIMILARITY: mock_similarity, + CONF_STABILITY: DEFAULT_STABILITY, + CONF_STYLE: DEFAULT_STYLE, + CONF_USE_SPEAKER_BOOST: DEFAULT_USE_SPEAKER_BOOST, + } + + async def mock_config_entry_setup( hass: HomeAssistant, config_data: dict[str, Any], config_options: dict[str, Any] ) -> None: @@ -146,6 +187,12 @@ async def test_tts_service_speak( """Test tts service.""" tts_entity = hass.data[tts.DOMAIN].get_entity(service_data[ATTR_ENTITY_ID]) tts_entity._client.generate.reset_mock() + assert tts_entity._voice_settings == VoiceSettings( + stability=DEFAULT_STABILITY, + similarity_boost=DEFAULT_SIMILARITY, + style=DEFAULT_STYLE, + use_speaker_boost=DEFAULT_USE_SPEAKER_BOOST, + ) await hass.services.async_call( tts.DOMAIN, @@ -161,7 +208,11 @@ async def test_tts_service_speak( ) tts_entity._client.generate.assert_called_once_with( - text="There is a person at the front door.", voice="voice2", model="model1" + text="There is a person at the front door.", + voice="voice2", + model="model1", + voice_settings=tts_entity._voice_settings, + optimize_streaming_latency=tts_entity._latency, ) @@ -219,7 +270,11 @@ async def test_tts_service_speak_lang_config( ) tts_entity._client.generate.assert_called_once_with( - text="There is a person at the front door.", voice="voice1", model="model1" + text="There is a person at the front door.", + voice="voice1", + model="model1", + voice_settings=tts_entity._voice_settings, + optimize_streaming_latency=tts_entity._latency, ) @@ -266,5 +321,78 @@ async def test_tts_service_speak_error( ) tts_entity._client.generate.assert_called_once_with( - text="There is a person at the front door.", voice="voice1", model="model1" + text="There is a person at the front door.", + voice="voice1", + model="model1", + voice_settings=tts_entity._voice_settings, + optimize_streaming_latency=tts_entity._latency, + ) + + +@pytest.mark.parametrize( + "config_data", + [ + {}, + {tts.CONF_LANG: "de"}, + {tts.CONF_LANG: "en"}, + {tts.CONF_LANG: "ja"}, + {tts.CONF_LANG: "es"}, + ], +) +@pytest.mark.parametrize( + ("setup", "tts_service", "service_data"), + [ + ( + "mock_config_entry_setup_voice", + "speak", + { + ATTR_ENTITY_ID: "tts.mock_title", + tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", + tts.ATTR_MESSAGE: "There is a person at the front door.", + tts.ATTR_OPTIONS: {tts.ATTR_VOICE: "voice2"}, + }, + ), + ], + indirect=["setup"], +) +async def test_tts_service_speak_voice_settings( + setup: AsyncMock, + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + calls: list[ServiceCall], + tts_service: str, + service_data: dict[str, Any], + mock_similarity: float, + mock_latency: int, +) -> None: + """Test tts service.""" + tts_entity = hass.data[tts.DOMAIN].get_entity(service_data[ATTR_ENTITY_ID]) + tts_entity._client.generate.reset_mock() + assert tts_entity._voice_settings == VoiceSettings( + stability=DEFAULT_STABILITY, + similarity_boost=mock_similarity, + style=DEFAULT_STYLE, + use_speaker_boost=DEFAULT_USE_SPEAKER_BOOST, + ) + assert tts_entity._latency == mock_latency + + await hass.services.async_call( + tts.DOMAIN, + tts_service, + service_data, + blocking=True, + ) + + assert len(calls) == 1 + assert ( + await retrieve_media(hass, hass_client, calls[0].data[ATTR_MEDIA_CONTENT_ID]) + == HTTPStatus.OK + ) + + tts_entity._client.generate.assert_called_once_with( + text="There is a person at the front door.", + voice="voice2", + model="model1", + voice_settings=tts_entity._voice_settings, + optimize_streaming_latency=tts_entity._latency, ) From 1ffd797e0af32ad5a2452e13cb64df8cba5c9b91 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Sun, 8 Sep 2024 13:11:57 +0200 Subject: [PATCH 1579/1635] Clean up Mold indicator (#123080) * Improve code quality on mold_indicator * mypy * Fix mypy --- .strict-typing | 1 + .../components/mold_indicator/sensor.py | 113 +++++++++--------- mypy.ini | 10 ++ 3 files changed, 65 insertions(+), 59 deletions(-) diff --git a/.strict-typing b/.strict-typing index 84c22d1cfca..bea0b1be991 100644 --- a/.strict-typing +++ b/.strict-typing @@ -316,6 +316,7 @@ homeassistant.components.minecraft_server.* homeassistant.components.mjpeg.* homeassistant.components.modbus.* homeassistant.components.modem_callerid.* +homeassistant.components.mold_indicator.* homeassistant.components.monzo.* homeassistant.components.moon.* homeassistant.components.mopeka.* diff --git a/homeassistant/components/mold_indicator/sensor.py b/homeassistant/components/mold_indicator/sensor.py index 9064e0387e5..2d80bc9f6e1 100644 --- a/homeassistant/components/mold_indicator/sensor.py +++ b/homeassistant/components/mold_indicator/sensor.py @@ -4,13 +4,16 @@ from __future__ import annotations import logging import math +from typing import TYPE_CHECKING, Any import voluptuous as vol from homeassistant import util from homeassistant.components.sensor import ( PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, + SensorDeviceClass, SensorEntity, + SensorStateClass, ) from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, @@ -30,7 +33,7 @@ from homeassistant.core import ( import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.event import async_track_state_change_event -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType +from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateType from homeassistant.util.unit_conversion import TemperatureConverter from homeassistant.util.unit_system import METRIC_SYSTEM @@ -67,11 +70,11 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up MoldIndicator sensor.""" - name = config.get(CONF_NAME, DEFAULT_NAME) - indoor_temp_sensor = config.get(CONF_INDOOR_TEMP) - outdoor_temp_sensor = config.get(CONF_OUTDOOR_TEMP) - indoor_humidity_sensor = config.get(CONF_INDOOR_HUMIDITY) - calib_factor = config.get(CONF_CALIBRATION_FACTOR) + name: str = config[CONF_NAME] + indoor_temp_sensor: str = config[CONF_INDOOR_TEMP] + outdoor_temp_sensor: str = config[CONF_OUTDOOR_TEMP] + indoor_humidity_sensor: str = config[CONF_INDOOR_HUMIDITY] + calib_factor: float = config[CONF_CALIBRATION_FACTOR] async_add_entities( [ @@ -92,36 +95,39 @@ class MoldIndicator(SensorEntity): """Represents a MoldIndication sensor.""" _attr_should_poll = False + _attr_native_unit_of_measurement = PERCENTAGE + _attr_device_class = SensorDeviceClass.HUMIDITY + _attr_state_class = SensorStateClass.MEASUREMENT def __init__( self, - name, - is_metric, - indoor_temp_sensor, - outdoor_temp_sensor, - indoor_humidity_sensor, - calib_factor, - ): + name: str, + is_metric: bool, + indoor_temp_sensor: str, + outdoor_temp_sensor: str, + indoor_humidity_sensor: str, + calib_factor: float, + ) -> None: """Initialize the sensor.""" - self._state = None - self._name = name + self._state: str | None = None + self._attr_name = name self._indoor_temp_sensor = indoor_temp_sensor self._indoor_humidity_sensor = indoor_humidity_sensor self._outdoor_temp_sensor = outdoor_temp_sensor self._calib_factor = calib_factor self._is_metric = is_metric - self._available = False + self._attr_available = False self._entities = { - self._indoor_temp_sensor, - self._indoor_humidity_sensor, - self._outdoor_temp_sensor, + indoor_temp_sensor, + indoor_humidity_sensor, + outdoor_temp_sensor, } - self._dewpoint = None - self._indoor_temp = None - self._outdoor_temp = None - self._indoor_hum = None - self._crit_temp = None + self._dewpoint: float | None = None + self._indoor_temp: float | None = None + self._outdoor_temp: float | None = None + self._indoor_hum: float | None = None + self._crit_temp: float | None = None async def async_added_to_hass(self) -> None: """Register callbacks.""" @@ -145,7 +151,7 @@ class MoldIndicator(SensorEntity): self.async_schedule_update_ha_state(True) @callback - def mold_indicator_startup(event): + def mold_indicator_startup(event: Event) -> None: """Add listeners and get 1st state.""" _LOGGER.debug("Startup for %s", self.entity_id) @@ -199,11 +205,11 @@ class MoldIndicator(SensorEntity): return False if entity == self._indoor_temp_sensor: - self._indoor_temp = MoldIndicator._update_temp_sensor(new_state) + self._indoor_temp = self._update_temp_sensor(new_state) elif entity == self._outdoor_temp_sensor: - self._outdoor_temp = MoldIndicator._update_temp_sensor(new_state) + self._outdoor_temp = self._update_temp_sensor(new_state) elif entity == self._indoor_humidity_sensor: - self._indoor_hum = MoldIndicator._update_hum_sensor(new_state) + self._indoor_hum = self._update_hum_sensor(new_state) return True @@ -295,7 +301,7 @@ class MoldIndicator(SensorEntity): _LOGGER.debug("Update state for %s", self.entity_id) # check all sensors if None in (self._indoor_temp, self._indoor_hum, self._outdoor_temp): - self._available = False + self._attr_available = False self._dewpoint = None self._crit_temp = None return @@ -304,15 +310,17 @@ class MoldIndicator(SensorEntity): self._calc_dewpoint() self._calc_moldindicator() if self._state is None: - self._available = False + self._attr_available = False self._dewpoint = None self._crit_temp = None else: - self._available = True + self._attr_available = True - def _calc_dewpoint(self): + def _calc_dewpoint(self) -> None: """Calculate the dewpoint for the indoor air.""" # Use magnus approximation to calculate the dew point + if TYPE_CHECKING: + assert self._indoor_temp and self._indoor_hum alpha = MAGNUS_K2 * self._indoor_temp / (MAGNUS_K3 + self._indoor_temp) beta = MAGNUS_K2 * MAGNUS_K3 / (MAGNUS_K3 + self._indoor_temp) @@ -326,8 +334,11 @@ class MoldIndicator(SensorEntity): ) _LOGGER.debug("Dewpoint: %f %s", self._dewpoint, UnitOfTemperature.CELSIUS) - def _calc_moldindicator(self): + def _calc_moldindicator(self) -> None: """Calculate the humidity at the (cold) calibration point.""" + if TYPE_CHECKING: + assert self._outdoor_temp and self._indoor_temp and self._dewpoint + if None in (self._dewpoint, self._calib_factor) or self._calib_factor == 0: _LOGGER.debug( "Invalid inputs - dewpoint: %s, calibration-factor: %s", @@ -335,7 +346,7 @@ class MoldIndicator(SensorEntity): self._calib_factor, ) self._state = None - self._available = False + self._attr_available = False self._crit_temp = None return @@ -374,37 +385,21 @@ class MoldIndicator(SensorEntity): _LOGGER.debug("Mold indicator humidity: %s", self._state) @property - def name(self): - """Return the name.""" - return self._name - - @property - def native_unit_of_measurement(self): - """Return the unit of measurement.""" - return PERCENTAGE - - @property - def native_value(self): + def native_value(self) -> StateType: """Return the state of the entity.""" return self._state @property - def available(self): - """Return the availability of this sensor.""" - return self._available - - @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any]: """Return the state attributes.""" if self._is_metric: - return { - ATTR_DEWPOINT: round(self._dewpoint, 2), - ATTR_CRITICAL_TEMP: round(self._crit_temp, 2), - } + convert_to = UnitOfTemperature.CELSIUS + else: + convert_to = UnitOfTemperature.FAHRENHEIT dewpoint = ( TemperatureConverter.convert( - self._dewpoint, UnitOfTemperature.CELSIUS, UnitOfTemperature.FAHRENHEIT + self._dewpoint, UnitOfTemperature.CELSIUS, convert_to ) if self._dewpoint is not None else None @@ -412,13 +407,13 @@ class MoldIndicator(SensorEntity): crit_temp = ( TemperatureConverter.convert( - self._crit_temp, UnitOfTemperature.CELSIUS, UnitOfTemperature.FAHRENHEIT + self._crit_temp, UnitOfTemperature.CELSIUS, convert_to ) if self._crit_temp is not None else None ) return { - ATTR_DEWPOINT: round(dewpoint, 2), - ATTR_CRITICAL_TEMP: round(crit_temp, 2), + ATTR_DEWPOINT: round(dewpoint, 2) if dewpoint else None, + ATTR_CRITICAL_TEMP: round(crit_temp, 2) if crit_temp else None, } diff --git a/mypy.ini b/mypy.ini index 2686fbe3062..d7604012305 100644 --- a/mypy.ini +++ b/mypy.ini @@ -2916,6 +2916,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.mold_indicator.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.monzo.*] check_untyped_defs = true disallow_incomplete_defs = true From 5b434aae6ed5fdf015cfe5b309071edfb2b50c02 Mon Sep 17 00:00:00 2001 From: "Mr. Bubbles" Date: Sun, 8 Sep 2024 13:45:12 +0200 Subject: [PATCH 1580/1635] Add DeviceInfo to Bring integration (#122419) * Add DeviceInfo to Bring integration * deeplink to shopping list * Move device info to a entity base class --- homeassistant/components/bring/entity.py | 37 ++++++++++++++++++++++++ homeassistant/components/bring/todo.py | 28 +++--------------- 2 files changed, 41 insertions(+), 24 deletions(-) create mode 100644 homeassistant/components/bring/entity.py diff --git a/homeassistant/components/bring/entity.py b/homeassistant/components/bring/entity.py new file mode 100644 index 00000000000..c5e0b84a190 --- /dev/null +++ b/homeassistant/components/bring/entity.py @@ -0,0 +1,37 @@ +"""Base entity for the Bring! integration.""" + +from __future__ import annotations + +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import BringData, BringDataUpdateCoordinator + + +class BringBaseEntity(CoordinatorEntity[BringDataUpdateCoordinator]): + """Bring base entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: BringDataUpdateCoordinator, + bring_list: BringData, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + + self._list_uuid = bring_list["listUuid"] + self._attr_unique_id = f"{coordinator.config_entry.unique_id}_{self._list_uuid}" + + self.device_info = DeviceInfo( + entry_type=DeviceEntryType.SERVICE, + name=bring_list["name"], + identifiers={ + (DOMAIN, f"{coordinator.config_entry.unique_id}_{self._list_uuid}") + }, + manufacturer="Bring! Labs AG", + model="Bring! Grocery Shopping List", + configuration_url=f"https://web.getbring.com/app/lists/{list(self.coordinator.data.keys()).index(self._list_uuid)}", + ) diff --git a/homeassistant/components/bring/todo.py b/homeassistant/components/bring/todo.py index 4fb90860899..97d7eba48bd 100644 --- a/homeassistant/components/bring/todo.py +++ b/homeassistant/components/bring/todo.py @@ -23,7 +23,6 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import BringConfigEntry from .const import ( @@ -32,7 +31,8 @@ from .const import ( DOMAIN, SERVICE_PUSH_NOTIFICATION, ) -from .coordinator import BringData, BringDataUpdateCoordinator +from .coordinator import BringData +from .entity import BringBaseEntity async def async_setup_entry( @@ -43,16 +43,10 @@ async def async_setup_entry( """Set up the sensor from a config entry created in the integrations UI.""" coordinator = config_entry.runtime_data - unique_id = config_entry.unique_id - - if TYPE_CHECKING: - assert unique_id - async_add_entities( BringTodoListEntity( coordinator, bring_list=bring_list, - unique_id=unique_id, ) for bring_list in coordinator.data.values() ) @@ -71,13 +65,11 @@ async def async_setup_entry( ) -class BringTodoListEntity( - CoordinatorEntity[BringDataUpdateCoordinator], TodoListEntity -): +class BringTodoListEntity(BringBaseEntity, TodoListEntity): """A To-do List representation of the Bring! Shopping List.""" _attr_translation_key = "shopping_list" - _attr_has_entity_name = True + _attr_name = None _attr_supported_features = ( TodoListEntityFeature.CREATE_TODO_ITEM | TodoListEntityFeature.UPDATE_TODO_ITEM @@ -85,18 +77,6 @@ class BringTodoListEntity( | TodoListEntityFeature.SET_DESCRIPTION_ON_ITEM ) - def __init__( - self, - coordinator: BringDataUpdateCoordinator, - bring_list: BringData, - unique_id: str, - ) -> None: - """Initialize BringTodoListEntity.""" - super().__init__(coordinator) - self._list_uuid = bring_list["listUuid"] - self._attr_name = bring_list["name"] - self._attr_unique_id = f"{unique_id}_{self._list_uuid}" - @property def todo_items(self) -> list[TodoItem]: """Return the todo items.""" From d4f0aaa089a306877c18a5cc61234bf3ad1ccc03 Mon Sep 17 00:00:00 2001 From: Guido Schmitz Date: Sun, 8 Sep 2024 13:50:36 +0200 Subject: [PATCH 1581/1635] Add last restart sensor to devolo_home_network (#122190) * Add last restart sensor to devolo_home_network * Add missing test * Rename fetch function * Fix mypy --- .../devolo_home_network/__init__.py | 22 +++++ .../components/devolo_home_network/const.py | 1 + .../components/devolo_home_network/entity.py | 1 + .../components/devolo_home_network/sensor.py | 85 ++++++++++++++----- .../devolo_home_network/strings.json | 3 + tests/components/devolo_home_network/const.py | 2 + tests/components/devolo_home_network/mock.py | 2 + .../snapshots/test_sensor.ambr | 59 +++++++++++-- .../devolo_home_network/test_sensor.py | 71 +++++++++++++--- 9 files changed, 207 insertions(+), 39 deletions(-) diff --git a/homeassistant/components/devolo_home_network/__init__.py b/homeassistant/components/devolo_home_network/__init__.py index 59aafb1eb9c..f8a0f015543 100644 --- a/homeassistant/components/devolo_home_network/__init__.py +++ b/homeassistant/components/devolo_home_network/__init__.py @@ -39,6 +39,7 @@ from .const import ( CONNECTED_WIFI_CLIENTS, DOMAIN, FIRMWARE_UPDATE_INTERVAL, + LAST_RESTART, LONG_UPDATE_INTERVAL, NEIGHBORING_WIFI_NETWORKS, REGULAR_FIRMWARE, @@ -127,6 +128,19 @@ async def async_setup_entry( except DeviceUnavailable as err: raise UpdateFailed(err) from err + async def async_update_last_restart() -> int: + """Fetch data from API endpoint.""" + assert device.device + update_sw_version(device_registry, device) + try: + return await device.device.async_uptime() + except DeviceUnavailable as err: + raise UpdateFailed(err) from err + except DevicePasswordProtected as err: + raise ConfigEntryAuthFailed( + err, translation_domain=DOMAIN, translation_key="password_wrong" + ) from err + async def async_update_wifi_connected_station() -> list[ConnectedStationInfo]: """Fetch data from API endpoint.""" assert device.device @@ -166,6 +180,14 @@ async def async_setup_entry( update_method=async_update_led_status, update_interval=SHORT_UPDATE_INTERVAL, ) + if device.device and "restart" in device.device.features: + coordinators[LAST_RESTART] = DataUpdateCoordinator( + hass, + _LOGGER, + name=LAST_RESTART, + update_method=async_update_last_restart, + update_interval=SHORT_UPDATE_INTERVAL, + ) if device.device and "update" in device.device.features: coordinators[REGULAR_FIRMWARE] = DataUpdateCoordinator( hass, diff --git a/homeassistant/components/devolo_home_network/const.py b/homeassistant/components/devolo_home_network/const.py index 4caa4f5b60b..92b97d59423 100644 --- a/homeassistant/components/devolo_home_network/const.py +++ b/homeassistant/components/devolo_home_network/const.py @@ -23,6 +23,7 @@ CONNECTED_TO_ROUTER = "connected_to_router" CONNECTED_WIFI_CLIENTS = "connected_wifi_clients" IDENTIFY = "identify" IMAGE_GUEST_WIFI = "image_guest_wifi" +LAST_RESTART = "last_restart" NEIGHBORING_WIFI_NETWORKS = "neighboring_wifi_networks" PAIRING = "pairing" PLC_RX_RATE = "plc_rx_rate" diff --git a/homeassistant/components/devolo_home_network/entity.py b/homeassistant/components/devolo_home_network/entity.py index 9d469ccfb16..d381f48ca05 100644 --- a/homeassistant/components/devolo_home_network/entity.py +++ b/homeassistant/components/devolo_home_network/entity.py @@ -26,6 +26,7 @@ type _DataType = ( | list[NeighborAPInfo] | WifiGuestAccessGet | bool + | int ) diff --git a/homeassistant/components/devolo_home_network/sensor.py b/homeassistant/components/devolo_home_network/sensor.py index 2fd8ab9220c..667bbc2c557 100644 --- a/homeassistant/components/devolo_home_network/sensor.py +++ b/homeassistant/components/devolo_home_network/sensor.py @@ -4,6 +4,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass +from datetime import datetime, timedelta from enum import StrEnum from typing import Any, Generic, TypeVar @@ -20,11 +21,13 @@ from homeassistant.const import EntityCategory, UnitOfDataRate from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import DataUpdateCoordinator +from homeassistant.util.dt import utcnow from . import DevoloHomeNetworkConfigEntry from .const import ( CONNECTED_PLC_DEVICES, CONNECTED_WIFI_CLIENTS, + LAST_RESTART, NEIGHBORING_WIFI_NETWORKS, PLC_RX_RATE, PLC_TX_RATE, @@ -33,13 +36,36 @@ from .entity import DevoloCoordinatorEntity PARALLEL_UPDATES = 1 + +def _last_restart(runtime: int) -> datetime: + """Calculate uptime. As fetching the data might also take some time, let's floor to the nearest 5 seconds.""" + now = utcnow() + return ( + now + - timedelta(seconds=runtime) + - timedelta(seconds=(now.timestamp() - runtime) % 5) + ) + + _CoordinatorDataT = TypeVar( "_CoordinatorDataT", - bound=LogicalNetwork | DataRate | list[ConnectedStationInfo] | list[NeighborAPInfo], + bound=LogicalNetwork + | DataRate + | list[ConnectedStationInfo] + | list[NeighborAPInfo] + | int, ) _ValueDataT = TypeVar( "_ValueDataT", - bound=LogicalNetwork | DataRate | list[ConnectedStationInfo] | list[NeighborAPInfo], + bound=LogicalNetwork + | DataRate + | list[ConnectedStationInfo] + | list[NeighborAPInfo] + | int, +) +_SensorDataT = TypeVar( + "_SensorDataT", + bound=int | float | datetime, ) @@ -52,15 +78,15 @@ class DataRateDirection(StrEnum): @dataclass(frozen=True, kw_only=True) class DevoloSensorEntityDescription( - SensorEntityDescription, Generic[_CoordinatorDataT] + SensorEntityDescription, Generic[_CoordinatorDataT, _SensorDataT] ): """Describes devolo sensor entity.""" - value_func: Callable[[_CoordinatorDataT], float] + value_func: Callable[[_CoordinatorDataT], _SensorDataT] -SENSOR_TYPES: dict[str, DevoloSensorEntityDescription[Any]] = { - CONNECTED_PLC_DEVICES: DevoloSensorEntityDescription[LogicalNetwork]( +SENSOR_TYPES: dict[str, DevoloSensorEntityDescription[Any, Any]] = { + CONNECTED_PLC_DEVICES: DevoloSensorEntityDescription[LogicalNetwork, int]( key=CONNECTED_PLC_DEVICES, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, @@ -68,18 +94,20 @@ SENSOR_TYPES: dict[str, DevoloSensorEntityDescription[Any]] = { {device.mac_address_from for device in data.data_rates} ), ), - CONNECTED_WIFI_CLIENTS: DevoloSensorEntityDescription[list[ConnectedStationInfo]]( + CONNECTED_WIFI_CLIENTS: DevoloSensorEntityDescription[ + list[ConnectedStationInfo], int + ]( key=CONNECTED_WIFI_CLIENTS, state_class=SensorStateClass.MEASUREMENT, value_func=len, ), - NEIGHBORING_WIFI_NETWORKS: DevoloSensorEntityDescription[list[NeighborAPInfo]]( + NEIGHBORING_WIFI_NETWORKS: DevoloSensorEntityDescription[list[NeighborAPInfo], int]( key=NEIGHBORING_WIFI_NETWORKS, entity_category=EntityCategory.DIAGNOSTIC, entity_registry_enabled_default=False, value_func=len, ), - PLC_RX_RATE: DevoloSensorEntityDescription[DataRate]( + PLC_RX_RATE: DevoloSensorEntityDescription[DataRate, float]( key=PLC_RX_RATE, entity_category=EntityCategory.DIAGNOSTIC, name="PLC downlink PHY rate", @@ -88,7 +116,7 @@ SENSOR_TYPES: dict[str, DevoloSensorEntityDescription[Any]] = { value_func=lambda data: getattr(data, DataRateDirection.RX, 0), suggested_display_precision=0, ), - PLC_TX_RATE: DevoloSensorEntityDescription[DataRate]( + PLC_TX_RATE: DevoloSensorEntityDescription[DataRate, float]( key=PLC_TX_RATE, entity_category=EntityCategory.DIAGNOSTIC, name="PLC uplink PHY rate", @@ -97,6 +125,13 @@ SENSOR_TYPES: dict[str, DevoloSensorEntityDescription[Any]] = { value_func=lambda data: getattr(data, DataRateDirection.TX, 0), suggested_display_precision=0, ), + LAST_RESTART: DevoloSensorEntityDescription[int, datetime]( + key=LAST_RESTART, + entity_category=EntityCategory.DIAGNOSTIC, + entity_registry_enabled_default=False, + device_class=SensorDeviceClass.TIMESTAMP, + value_func=_last_restart, + ), } @@ -109,7 +144,7 @@ async def async_setup_entry( device = entry.runtime_data.device coordinators = entry.runtime_data.coordinators - entities: list[BaseDevoloSensorEntity[Any, Any]] = [] + entities: list[BaseDevoloSensorEntity[Any, Any, Any]] = [] if device.plcnet: entities.append( DevoloSensorEntity( @@ -139,6 +174,14 @@ async def async_setup_entry( peer, ) ) + if device.device and "restart" in device.device.features: + entities.append( + DevoloSensorEntity( + entry, + coordinators[LAST_RESTART], + SENSOR_TYPES[LAST_RESTART], + ) + ) if device.device and "wifi1" in device.device.features: entities.append( DevoloSensorEntity( @@ -158,7 +201,7 @@ async def async_setup_entry( class BaseDevoloSensorEntity( - Generic[_CoordinatorDataT, _ValueDataT], + Generic[_CoordinatorDataT, _ValueDataT, _SensorDataT], DevoloCoordinatorEntity[_CoordinatorDataT], SensorEntity, ): @@ -168,34 +211,38 @@ class BaseDevoloSensorEntity( self, entry: DevoloHomeNetworkConfigEntry, coordinator: DataUpdateCoordinator[_CoordinatorDataT], - description: DevoloSensorEntityDescription[_ValueDataT], + description: DevoloSensorEntityDescription[_ValueDataT, _SensorDataT], ) -> None: """Initialize entity.""" self.entity_description = description super().__init__(entry, coordinator) -class DevoloSensorEntity(BaseDevoloSensorEntity[_CoordinatorDataT, _CoordinatorDataT]): +class DevoloSensorEntity( + BaseDevoloSensorEntity[_CoordinatorDataT, _CoordinatorDataT, _SensorDataT] +): """Representation of a generic devolo sensor.""" - entity_description: DevoloSensorEntityDescription[_CoordinatorDataT] + entity_description: DevoloSensorEntityDescription[_CoordinatorDataT, _SensorDataT] @property - def native_value(self) -> float: + def native_value(self) -> int | float | datetime: """State of the sensor.""" return self.entity_description.value_func(self.coordinator.data) -class DevoloPlcDataRateSensorEntity(BaseDevoloSensorEntity[LogicalNetwork, DataRate]): +class DevoloPlcDataRateSensorEntity( + BaseDevoloSensorEntity[LogicalNetwork, DataRate, float] +): """Representation of a devolo PLC data rate sensor.""" - entity_description: DevoloSensorEntityDescription[DataRate] + entity_description: DevoloSensorEntityDescription[DataRate, float] def __init__( self, entry: DevoloHomeNetworkConfigEntry, coordinator: DataUpdateCoordinator[LogicalNetwork], - description: DevoloSensorEntityDescription[DataRate], + description: DevoloSensorEntityDescription[DataRate, float], peer: str, ) -> None: """Initialize entity.""" diff --git a/homeassistant/components/devolo_home_network/strings.json b/homeassistant/components/devolo_home_network/strings.json index 97348c5c43c..0799bb14172 100644 --- a/homeassistant/components/devolo_home_network/strings.json +++ b/homeassistant/components/devolo_home_network/strings.json @@ -60,6 +60,9 @@ "connected_wifi_clients": { "name": "Connected Wi-Fi clients" }, + "last_restart": { + "name": "Last restart of the device" + }, "neighboring_wifi_networks": { "name": "Neighboring Wi-Fi networks" }, diff --git a/tests/components/devolo_home_network/const.py b/tests/components/devolo_home_network/const.py index 9d8faab9b13..7b0551b1daf 100644 --- a/tests/components/devolo_home_network/const.py +++ b/tests/components/devolo_home_network/const.py @@ -171,3 +171,5 @@ PLCNET_ATTACHED = LogicalNetwork( }, ], ) + +UPTIME = 100 diff --git a/tests/components/devolo_home_network/mock.py b/tests/components/devolo_home_network/mock.py index 4b999667e53..fc7786669b7 100644 --- a/tests/components/devolo_home_network/mock.py +++ b/tests/components/devolo_home_network/mock.py @@ -19,6 +19,7 @@ from .const import ( IP, NEIGHBOR_ACCESS_POINTS, PLCNET, + UPTIME, ) @@ -64,6 +65,7 @@ class MockDevice(Device): ) self.device.async_get_led_setting = AsyncMock(return_value=False) self.device.async_restart = AsyncMock(return_value=True) + self.device.async_uptime = AsyncMock(return_value=UPTIME) self.device.async_start_wps = AsyncMock(return_value=True) self.device.async_get_wifi_connected_station = AsyncMock( return_value=CONNECTED_STATIONS diff --git a/tests/components/devolo_home_network/snapshots/test_sensor.ambr b/tests/components/devolo_home_network/snapshots/test_sensor.ambr index d985ac35495..2e6730cdb21 100644 --- a/tests/components/devolo_home_network/snapshots/test_sensor.ambr +++ b/tests/components/devolo_home_network/snapshots/test_sensor.ambr @@ -1,5 +1,5 @@ # serializer version: 1 -# name: test_sensor[connected_plc_devices-async_get_network_overview-interval2] +# name: test_sensor[connected_plc_devices-async_get_network_overview-interval2-1] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Mock Title Connected PLC devices', @@ -12,7 +12,7 @@ 'state': '1', }) # --- -# name: test_sensor[connected_plc_devices-async_get_network_overview-interval2].1 +# name: test_sensor[connected_plc_devices-async_get_network_overview-interval2-1].1 EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -45,7 +45,7 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[connected_wi_fi_clients-async_get_wifi_connected_station-interval0] +# name: test_sensor[connected_wi_fi_clients-async_get_wifi_connected_station-interval0-1] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Mock Title Connected Wi-Fi clients', @@ -59,7 +59,7 @@ 'state': '1', }) # --- -# name: test_sensor[connected_wi_fi_clients-async_get_wifi_connected_station-interval0].1 +# name: test_sensor[connected_wi_fi_clients-async_get_wifi_connected_station-interval0-1].1 EntityRegistryEntrySnapshot({ 'aliases': set({ }), @@ -94,7 +94,54 @@ 'unit_of_measurement': None, }) # --- -# name: test_sensor[neighboring_wi_fi_networks-async_get_wifi_neighbor_access_points-interval1] +# name: test_sensor[last_restart_of_the_device-async_uptime-interval3-2023-01-13T11:58:50+00:00] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Mock Title Last restart of the device', + }), + 'context': , + 'entity_id': 'sensor.mock_title_last_restart_of_the_device', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2023-01-13T11:58:20+00:00', + }) +# --- +# name: test_sensor[last_restart_of_the_device-async_uptime-interval3-2023-01-13T11:58:50+00:00].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_title_last_restart_of_the_device', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Last restart of the device', + 'platform': 'devolo_home_network', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'last_restart', + 'unique_id': '1234567890_last_restart', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensor[neighboring_wi_fi_networks-async_get_wifi_neighbor_access_points-interval1-1] StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Mock Title Neighboring Wi-Fi networks', @@ -107,7 +154,7 @@ 'state': '1', }) # --- -# name: test_sensor[neighboring_wi_fi_networks-async_get_wifi_neighbor_access_points-interval1].1 +# name: test_sensor[neighboring_wi_fi_networks-async_get_wifi_neighbor_access_points-interval1-1].1 EntityRegistryEntrySnapshot({ 'aliases': set({ }), diff --git a/tests/components/devolo_home_network/test_sensor.py b/tests/components/devolo_home_network/test_sensor.py index efcbaa803df..cf0207a2800 100644 --- a/tests/components/devolo_home_network/test_sensor.py +++ b/tests/components/devolo_home_network/test_sensor.py @@ -3,16 +3,18 @@ from datetime import timedelta from unittest.mock import AsyncMock -from devolo_plc_api.exceptions.device import DeviceUnavailable +from devolo_plc_api.exceptions.device import DevicePasswordProtected, DeviceUnavailable from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.devolo_home_network.const import ( + DOMAIN, LONG_UPDATE_INTERVAL, SHORT_UPDATE_INTERVAL, ) -from homeassistant.components.sensor import DOMAIN +from homeassistant.components.sensor import DOMAIN as PLATFORM +from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -33,59 +35,74 @@ async def test_sensor_setup(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert ( - hass.states.get(f"{DOMAIN}.{device_name}_connected_wi_fi_clients") is not None + hass.states.get(f"{PLATFORM}.{device_name}_connected_wi_fi_clients") is not None + ) + assert hass.states.get(f"{PLATFORM}.{device_name}_connected_plc_devices") is None + assert ( + hass.states.get(f"{PLATFORM}.{device_name}_neighboring_wi_fi_networks") is None ) - assert hass.states.get(f"{DOMAIN}.{device_name}_connected_plc_devices") is None - assert hass.states.get(f"{DOMAIN}.{device_name}_neighboring_wi_fi_networks") is None assert ( hass.states.get( - f"{DOMAIN}.{device_name}_plc_downlink_phy_rate_{PLCNET.devices[1].user_device_name}" + f"{PLATFORM}.{device_name}_plc_downlink_phy_rate_{PLCNET.devices[1].user_device_name}" ) is not None ) assert ( hass.states.get( - f"{DOMAIN}.{device_name}_plc_uplink_phy_rate_{PLCNET.devices[1].user_device_name}" + f"{PLATFORM}.{device_name}_plc_uplink_phy_rate_{PLCNET.devices[1].user_device_name}" ) is not None ) assert ( hass.states.get( - f"{DOMAIN}.{device_name}_plc_downlink_phyrate_{PLCNET.devices[2].user_device_name}" + f"{PLATFORM}.{device_name}_plc_downlink_phyrate_{PLCNET.devices[2].user_device_name}" ) is None ) assert ( hass.states.get( - f"{DOMAIN}.{device_name}_plc_uplink_phyrate_{PLCNET.devices[2].user_device_name}" + f"{PLATFORM}.{device_name}_plc_uplink_phyrate_{PLCNET.devices[2].user_device_name}" ) is None ) + assert ( + hass.states.get(f"{PLATFORM}.{device_name}_last_restart_of_the_device") is None + ) await hass.config_entries.async_unload(entry.entry_id) @pytest.mark.parametrize( - ("name", "get_method", "interval"), + ("name", "get_method", "interval", "expected_state"), [ ( "connected_wi_fi_clients", "async_get_wifi_connected_station", SHORT_UPDATE_INTERVAL, + "1", ), ( "neighboring_wi_fi_networks", "async_get_wifi_neighbor_access_points", LONG_UPDATE_INTERVAL, + "1", ), ( "connected_plc_devices", "async_get_network_overview", LONG_UPDATE_INTERVAL, + "1", + ), + ( + "last_restart_of_the_device", + "async_uptime", + SHORT_UPDATE_INTERVAL, + "2023-01-13T11:58:50+00:00", ), ], ) @pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.freeze_time("2023-01-13 12:00:00+00:00") async def test_sensor( hass: HomeAssistant, mock_device: MockDevice, @@ -95,11 +112,12 @@ async def test_sensor( name: str, get_method: str, interval: timedelta, + expected_state: str, ) -> None: """Test state change of a sensor device.""" entry = configure_integration(hass) device_name = entry.title.replace(" ", "_").lower() - state_key = f"{DOMAIN}.{device_name}_{name}" + state_key = f"{PLATFORM}.{device_name}_{name}" await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() @@ -125,7 +143,7 @@ async def test_sensor( state = hass.states.get(state_key) assert state is not None - assert state.state == "1" + assert state.state == expected_state await hass.config_entries.async_unload(entry.entry_id) @@ -140,8 +158,8 @@ async def test_update_plc_phyrates( """Test state change of plc_downlink_phyrate and plc_uplink_phyrate sensor devices.""" entry = configure_integration(hass) device_name = entry.title.replace(" ", "_").lower() - state_key_downlink = f"{DOMAIN}.{device_name}_plc_downlink_phy_rate_{PLCNET.devices[1].user_device_name}" - state_key_uplink = f"{DOMAIN}.{device_name}_plc_uplink_phy_rate_{PLCNET.devices[1].user_device_name}" + state_key_downlink = f"{PLATFORM}.{device_name}_plc_downlink_phy_rate_{PLCNET.devices[1].user_device_name}" + state_key_uplink = f"{PLATFORM}.{device_name}_plc_uplink_phy_rate_{PLCNET.devices[1].user_device_name}" await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() @@ -181,3 +199,28 @@ async def test_update_plc_phyrates( assert state.state == str(PLCNET.data_rates[0].tx_rate) await hass.config_entries.async_unload(entry.entry_id) + + +async def test_update_last_update_auth_failed( + hass: HomeAssistant, mock_device: MockDevice +) -> None: + """Test getting the last update state with wrong password triggers the reauth flow.""" + entry = configure_integration(hass) + mock_device.device.async_uptime.side_effect = DevicePasswordProtected + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert entry.state is ConfigEntryState.SETUP_ERROR + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + + flow = flows[0] + assert flow["step_id"] == "reauth_confirm" + assert flow["handler"] == DOMAIN + + assert "context" in flow + assert flow["context"]["source"] == SOURCE_REAUTH + assert flow["context"]["entry_id"] == entry.entry_id + + await hass.config_entries.async_unload(entry.entry_id) From 2b2f5d6693d9be6e239442b5ed86b11d3ecb0d03 Mon Sep 17 00:00:00 2001 From: Luke Lashley Date: Sun, 8 Sep 2024 07:56:42 -0400 Subject: [PATCH 1582/1635] Add sleep to map select for Roborock (#122625) * Add sleep to map select * Update homeassistant/components/roborock/select.py --------- Co-authored-by: Joost Lekkerkerker --- homeassistant/components/roborock/select.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/homeassistant/components/roborock/select.py b/homeassistant/components/roborock/select.py index f047ec475c2..d9e87fbcd08 100644 --- a/homeassistant/components/roborock/select.py +++ b/homeassistant/components/roborock/select.py @@ -1,5 +1,6 @@ """Support for Roborock select.""" +import asyncio from collections.abc import Callable from dataclasses import dataclass @@ -13,6 +14,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import RoborockConfigEntry +from .const import MAP_SLEEP from .coordinator import RoborockDataUpdateCoordinator from .device import RoborockCoordinatedEntityV1 @@ -133,6 +135,9 @@ class RoborockCurrentMapSelectEntity(RoborockCoordinatedEntityV1, SelectEntity): RoborockCommand.LOAD_MULTI_MAP, [map_id], ) + # We need to wait after updating the map + # so that other commands will be executed correctly. + await asyncio.sleep(MAP_SLEEP) break @property From 926ffe536cdbeb0ae9f906623a56e8fa931a6e0d Mon Sep 17 00:00:00 2001 From: dougiteixeira <31328123+dougiteixeira@users.noreply.github.com> Date: Sun, 8 Sep 2024 08:59:54 -0300 Subject: [PATCH 1583/1635] Fix UI config validation for button and switch actions in Template (#121810) Fix IU config validation for button and switch actions in Template --- homeassistant/components/template/button.py | 2 +- homeassistant/components/template/switch.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/template/button.py b/homeassistant/components/template/button.py index 52435d88971..67ce7e7a16b 100644 --- a/homeassistant/components/template/button.py +++ b/homeassistant/components/template/button.py @@ -51,7 +51,7 @@ BUTTON_SCHEMA = ( CONFIG_BUTTON_SCHEMA = vol.Schema( { vol.Optional(CONF_NAME): cv.template, - vol.Optional(CONF_PRESS): selector.ActionSelector(), + vol.Optional(CONF_PRESS): cv.SCRIPT_SCHEMA, vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA, vol.Optional(CONF_DEVICE_ID): selector.DeviceSelector(), } diff --git a/homeassistant/components/template/switch.py b/homeassistant/components/template/switch.py index 9145625f706..bddb51e5e67 100644 --- a/homeassistant/components/template/switch.py +++ b/homeassistant/components/template/switch.py @@ -64,8 +64,8 @@ SWITCH_CONFIG_SCHEMA = vol.Schema( { vol.Required(CONF_NAME): cv.template, vol.Optional(CONF_VALUE_TEMPLATE): cv.template, - vol.Optional(CONF_TURN_ON): selector.ActionSelector(), - vol.Optional(CONF_TURN_OFF): selector.ActionSelector(), + vol.Optional(CONF_TURN_ON): cv.SCRIPT_SCHEMA, + vol.Optional(CONF_TURN_OFF): cv.SCRIPT_SCHEMA, vol.Optional(CONF_DEVICE_ID): selector.DeviceSelector(), } ) From c0ee12ca415035277d04e1f5b4ab14291fc1bd54 Mon Sep 17 00:00:00 2001 From: Jan Rieger Date: Sun, 8 Sep 2024 14:00:34 +0200 Subject: [PATCH 1584/1635] Add translation to Jellyfin (#123857) * Add translation to Jellyfin * Fix * Address feedback --- homeassistant/components/jellyfin/sensor.py | 4 ++-- homeassistant/components/jellyfin/strings.json | 7 +++++++ tests/components/jellyfin/test_sensor.py | 12 +++--------- 3 files changed, 12 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/jellyfin/sensor.py b/homeassistant/components/jellyfin/sensor.py index 3be4ccf2559..37926567b4e 100644 --- a/homeassistant/components/jellyfin/sensor.py +++ b/homeassistant/components/jellyfin/sensor.py @@ -35,9 +35,8 @@ SENSOR_TYPES: dict[str, JellyfinSensorEntityDescription] = { "sessions": JellyfinSensorEntityDescription( key="watching", translation_key="watching", - name=None, - native_unit_of_measurement="Watching", value_fn=_count_now_playing, + native_unit_of_measurement="clients", ) } @@ -59,6 +58,7 @@ async def async_setup_entry( class JellyfinSensor(JellyfinEntity, SensorEntity): """Defines a Jellyfin sensor entity.""" + _attr_has_entity_name = True entity_description: JellyfinSensorEntityDescription @property diff --git a/homeassistant/components/jellyfin/strings.json b/homeassistant/components/jellyfin/strings.json index fd11d8fbad2..f2afa0c8ad5 100644 --- a/homeassistant/components/jellyfin/strings.json +++ b/homeassistant/components/jellyfin/strings.json @@ -26,6 +26,13 @@ "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" } }, + "entity": { + "sensor": { + "watching": { + "name": "Active clients" + } + } + }, "options": { "step": { "init": { diff --git a/tests/components/jellyfin/test_sensor.py b/tests/components/jellyfin/test_sensor.py index 40a3e62a6c0..82d42d7a27a 100644 --- a/tests/components/jellyfin/test_sensor.py +++ b/tests/components/jellyfin/test_sensor.py @@ -4,12 +4,7 @@ from unittest.mock import MagicMock from homeassistant.components.jellyfin.const import DOMAIN from homeassistant.components.sensor import ATTR_STATE_CLASS -from homeassistant.const import ( - ATTR_DEVICE_CLASS, - ATTR_FRIENDLY_NAME, - ATTR_ICON, - ATTR_UNIT_OF_MEASUREMENT, -) +from homeassistant.const import ATTR_DEVICE_CLASS, ATTR_FRIENDLY_NAME, ATTR_ICON from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er @@ -24,13 +19,12 @@ async def test_watching( mock_jellyfin: MagicMock, ) -> None: """Test the Jellyfin watching sensor.""" - state = hass.states.get("sensor.jellyfin_server") + state = hass.states.get("sensor.jellyfin_server_active_clients") assert state assert state.attributes.get(ATTR_DEVICE_CLASS) is None - assert state.attributes.get(ATTR_FRIENDLY_NAME) == "JELLYFIN-SERVER" + assert state.attributes.get(ATTR_FRIENDLY_NAME) == "JELLYFIN-SERVER Active clients" assert state.attributes.get(ATTR_ICON) is None assert state.attributes.get(ATTR_STATE_CLASS) is None - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == "Watching" assert state.state == "3" entry = entity_registry.async_get(state.entity_id) From 26ede9a6790dd8da6104a5f25c29fe23e23ce800 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Sun, 8 Sep 2024 14:06:40 +0200 Subject: [PATCH 1585/1635] Fix yale_smart_alarm on missing key (#125508) --- .../yale_smart_alarm/coordinator.py | 13 +- tests/components/yale_smart_alarm/conftest.py | 24 +- .../snapshots/test_diagnostics.ambr | 1644 +++++++++-------- .../components/yale_smart_alarm/test_lock.py | 6 +- 4 files changed, 854 insertions(+), 833 deletions(-) diff --git a/homeassistant/components/yale_smart_alarm/coordinator.py b/homeassistant/components/yale_smart_alarm/coordinator.py index b47545ea88b..3bfd13b2152 100644 --- a/homeassistant/components/yale_smart_alarm/coordinator.py +++ b/homeassistant/components/yale_smart_alarm/coordinator.py @@ -154,10 +154,15 @@ class YaleDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): except YALE_BASE_ERRORS as error: raise UpdateFailed from error + cycle = data.cycle["data"] if data.cycle else None + status = data.status["data"] if data.status else None + online = data.online["data"] if data.online else None + panel_info = data.panel_info["data"] if data.panel_info else None + return { "arm_status": arm_status, - "cycle": data.cycle, - "status": data.status, - "online": data.online, - "panel_info": data.panel_info, + "cycle": cycle, + "status": status, + "online": online, + "panel_info": panel_info, } diff --git a/tests/components/yale_smart_alarm/conftest.py b/tests/components/yale_smart_alarm/conftest.py index 6ac6dfc6871..0499b6212d6 100644 --- a/tests/components/yale_smart_alarm/conftest.py +++ b/tests/components/yale_smart_alarm/conftest.py @@ -82,10 +82,10 @@ def get_fixture_data() -> dict[str, Any]: def get_update_data(loaded_fixture: dict[str, Any]) -> YaleSmartAlarmData: """Load update data and return.""" - status = loaded_fixture["STATUS"] - cycle = loaded_fixture["CYCLE"] - online = loaded_fixture["ONLINE"] - panel_info = loaded_fixture["PANEL INFO"] + status = {"data": loaded_fixture["STATUS"]} + cycle = {"data": loaded_fixture["CYCLE"]} + online = {"data": loaded_fixture["ONLINE"]} + panel_info = {"data": loaded_fixture["PANEL INFO"]} return YaleSmartAlarmData( status=status, cycle=cycle, @@ -98,14 +98,14 @@ def get_update_data(loaded_fixture: dict[str, Any]) -> YaleSmartAlarmData: def get_diag_data(loaded_fixture: dict[str, Any]) -> YaleSmartAlarmData: """Load all data and return.""" - devices = loaded_fixture["DEVICES"] - mode = loaded_fixture["MODE"] - status = loaded_fixture["STATUS"] - cycle = loaded_fixture["CYCLE"] - online = loaded_fixture["ONLINE"] - history = loaded_fixture["HISTORY"] - panel_info = loaded_fixture["PANEL INFO"] - auth_check = loaded_fixture["AUTH CHECK"] + devices = {"data": loaded_fixture["DEVICES"]} + mode = {"data": loaded_fixture["MODE"]} + status = {"data": loaded_fixture["STATUS"]} + cycle = {"data": loaded_fixture["CYCLE"]} + online = {"data": loaded_fixture["ONLINE"]} + history = {"data": loaded_fixture["HISTORY"]} + panel_info = {"data": loaded_fixture["PANEL INFO"]} + auth_check = {"data": loaded_fixture["AUTH CHECK"]} return YaleSmartAlarmData( devices=devices, mode=mode, diff --git a/tests/components/yale_smart_alarm/snapshots/test_diagnostics.ambr b/tests/components/yale_smart_alarm/snapshots/test_diagnostics.ambr index d4bbd42aaeb..750430b529a 100644 --- a/tests/components/yale_smart_alarm/snapshots/test_diagnostics.ambr +++ b/tests/components/yale_smart_alarm/snapshots/test_diagnostics.ambr @@ -2,27 +2,661 @@ # name: test_diagnostics dict({ 'auth_check': dict({ - 'agent': False, - 'dealer_group': 'yale', - 'dealer_id': '605', - 'first_login': '1', - 'id': '**REDACTED**', - 'is_auth': '1', - 'mac': '**REDACTED**', - 'mail_address': '**REDACTED**', - 'master': '1', - 'name': '**REDACTED**', - 'token_time': '2023-08-17 16:19:20', - 'user_id': '**REDACTED**', - 'xml_version': '2', + 'data': dict({ + 'agent': False, + 'dealer_group': 'yale', + 'dealer_id': '605', + 'first_login': '1', + 'id': '**REDACTED**', + 'is_auth': '1', + 'mac': '**REDACTED**', + 'mail_address': '**REDACTED**', + 'master': '1', + 'name': '**REDACTED**', + 'token_time': '2023-08-17 16:19:20', + 'user_id': '**REDACTED**', + 'xml_version': '2', + }), }), 'cycle': dict({ - 'alarm_event_latest': None, - 'capture_latest': None, - 'device_status': list([ + 'data': dict({ + 'alarm_event_latest': None, + 'capture_latest': None, + 'device_status': list([ + dict({ + '_state': 'locked', + '_state2': 'closed', + 'address': '**REDACTED**', + 'area': '1', + 'bypass': '0', + 'device_group': '002', + 'device_id': '**REDACTED**', + 'device_id2': '', + 'extension': None, + 'group_id': None, + 'group_name': None, + 'ipcam_trigger_by_zone1': None, + 'ipcam_trigger_by_zone2': None, + 'ipcam_trigger_by_zone3': None, + 'ipcam_trigger_by_zone4': None, + 'mac': '**REDACTED**', + 'minigw_configuration_data': '02FF000001000000000000000000001E000100', + 'minigw_lock_status': '35', + 'minigw_number_of_credentials_supported': '10', + 'minigw_product_data': '21020120', + 'minigw_protocol': 'DM', + 'minigw_syncing': '0', + 'name': '**REDACTED**', + 'no': '1', + 'rf': None, + 'rssi': '9', + 'scene_restore': None, + 'scene_trigger': '0', + 'sresp_button_1': None, + 'sresp_button_2': None, + 'sresp_button_3': None, + 'sresp_button_4': None, + 'status1': 'device_status.lock', + 'status2': None, + 'status_dim_level': None, + 'status_fault': list([ + ]), + 'status_hue': None, + 'status_humi': None, + 'status_lux': '', + 'status_open': list([ + 'device_status.lock', + ]), + 'status_power': None, + 'status_saturation': None, + 'status_switch': None, + 'status_temp': None, + 'status_temp_format': 'C', + 'status_total_energy': None, + 'thermo_c_setpoint': None, + 'thermo_c_setpoint_away': None, + 'thermo_fan_mode': None, + 'thermo_mode': None, + 'thermo_schd_setting': None, + 'thermo_setpoint': None, + 'thermo_setpoint_away': None, + 'trigger_by_zone': list([ + ]), + 'type': 'device_type.door_lock', + 'type_no': '72', + }), + dict({ + '_state': 'unlocked', + '_state2': 'unknown', + 'address': '**REDACTED**', + 'area': '1', + 'bypass': '0', + 'device_group': '002', + 'device_id': '**REDACTED**', + 'device_id2': '', + 'extension': None, + 'group_id': None, + 'group_name': None, + 'ipcam_trigger_by_zone1': None, + 'ipcam_trigger_by_zone2': None, + 'ipcam_trigger_by_zone3': None, + 'ipcam_trigger_by_zone4': None, + 'mac': '**REDACTED**', + 'minigw_configuration_data': '02FF000001000000000000000000001E000100', + 'minigw_lock_status': None, + 'minigw_number_of_credentials_supported': '10', + 'minigw_product_data': '21020120', + 'minigw_protocol': 'DM', + 'minigw_syncing': '0', + 'name': '**REDACTED**', + 'no': '2', + 'rf': None, + 'rssi': '9', + 'scene_restore': None, + 'scene_trigger': '0', + 'sresp_button_1': None, + 'sresp_button_2': None, + 'sresp_button_3': None, + 'sresp_button_4': None, + 'status1': 'device_status.unlock', + 'status2': None, + 'status_dim_level': None, + 'status_fault': list([ + ]), + 'status_hue': None, + 'status_humi': None, + 'status_lux': '', + 'status_open': list([ + ]), + 'status_power': None, + 'status_saturation': None, + 'status_switch': None, + 'status_temp': None, + 'status_temp_format': 'C', + 'status_total_energy': None, + 'thermo_c_setpoint': None, + 'thermo_c_setpoint_away': None, + 'thermo_fan_mode': None, + 'thermo_mode': None, + 'thermo_schd_setting': None, + 'thermo_setpoint': None, + 'thermo_setpoint_away': None, + 'trigger_by_zone': list([ + ]), + 'type': 'device_type.door_lock', + 'type_no': '72', + }), + dict({ + '_state': 'locked', + '_state2': 'unknown', + 'address': '**REDACTED**', + 'area': '1', + 'bypass': '0', + 'device_group': '002', + 'device_id': '**REDACTED**', + 'device_id2': '', + 'extension': None, + 'group_id': None, + 'group_name': None, + 'ipcam_trigger_by_zone1': None, + 'ipcam_trigger_by_zone2': None, + 'ipcam_trigger_by_zone3': None, + 'ipcam_trigger_by_zone4': None, + 'mac': '**REDACTED**', + 'minigw_configuration_data': '02FF000001000000000000000000001E000100', + 'minigw_lock_status': None, + 'minigw_number_of_credentials_supported': '10', + 'minigw_product_data': '21020120', + 'minigw_protocol': 'DM', + 'minigw_syncing': '0', + 'name': '**REDACTED**', + 'no': '3', + 'rf': None, + 'rssi': '9', + 'scene_restore': None, + 'scene_trigger': '0', + 'sresp_button_1': None, + 'sresp_button_2': None, + 'sresp_button_3': None, + 'sresp_button_4': None, + 'status1': 'device_status.lock', + 'status2': None, + 'status_dim_level': None, + 'status_fault': list([ + ]), + 'status_hue': None, + 'status_humi': None, + 'status_lux': '', + 'status_open': list([ + 'device_status.lock', + ]), + 'status_power': None, + 'status_saturation': None, + 'status_switch': None, + 'status_temp': None, + 'status_temp_format': 'C', + 'status_total_energy': None, + 'thermo_c_setpoint': None, + 'thermo_c_setpoint_away': None, + 'thermo_fan_mode': None, + 'thermo_mode': None, + 'thermo_schd_setting': None, + 'thermo_setpoint': None, + 'thermo_setpoint_away': None, + 'trigger_by_zone': list([ + ]), + 'type': 'device_type.door_lock', + 'type_no': '72', + }), + dict({ + '_state': 'closed', + 'address': '**REDACTED**', + 'area': '1', + 'bypass': '0', + 'device_group': '000', + 'device_id': '**REDACTED**', + 'device_id2': '', + 'extension': None, + 'group_id': None, + 'group_name': None, + 'ipcam_trigger_by_zone1': None, + 'ipcam_trigger_by_zone2': None, + 'ipcam_trigger_by_zone3': None, + 'ipcam_trigger_by_zone4': None, + 'mac': '**REDACTED**', + 'minigw_configuration_data': '', + 'minigw_lock_status': '', + 'minigw_number_of_credentials_supported': '', + 'minigw_product_data': '', + 'minigw_protocol': '', + 'minigw_syncing': '', + 'name': '**REDACTED**', + 'no': '4', + 'rf': None, + 'rssi': '0', + 'scene_restore': None, + 'scene_trigger': '0', + 'sresp_button_1': None, + 'sresp_button_2': None, + 'sresp_button_3': None, + 'sresp_button_4': None, + 'status1': 'device_status.dc_close', + 'status2': None, + 'status_dim_level': None, + 'status_fault': list([ + ]), + 'status_hue': None, + 'status_humi': None, + 'status_lux': '', + 'status_open': list([ + 'device_status.dc_close', + ]), + 'status_power': None, + 'status_saturation': None, + 'status_switch': None, + 'status_temp': None, + 'status_temp_format': 'C', + 'status_total_energy': None, + 'thermo_c_setpoint': None, + 'thermo_c_setpoint_away': None, + 'thermo_fan_mode': None, + 'thermo_mode': None, + 'thermo_schd_setting': None, + 'thermo_setpoint': None, + 'thermo_setpoint_away': None, + 'trigger_by_zone': list([ + ]), + 'type': 'device_type.door_contact', + 'type_no': '4', + }), + dict({ + '_state': 'open', + 'address': '**REDACTED**', + 'area': '1', + 'bypass': '0', + 'device_group': '000', + 'device_id': '**REDACTED**', + 'device_id2': '', + 'extension': None, + 'group_id': None, + 'group_name': None, + 'ipcam_trigger_by_zone1': None, + 'ipcam_trigger_by_zone2': None, + 'ipcam_trigger_by_zone3': None, + 'ipcam_trigger_by_zone4': None, + 'mac': '**REDACTED**', + 'minigw_configuration_data': '', + 'minigw_lock_status': '', + 'minigw_number_of_credentials_supported': '', + 'minigw_product_data': '', + 'minigw_protocol': '', + 'minigw_syncing': '', + 'name': '**REDACTED**', + 'no': '5', + 'rf': None, + 'rssi': '0', + 'scene_restore': None, + 'scene_trigger': '0', + 'sresp_button_1': None, + 'sresp_button_2': None, + 'sresp_button_3': None, + 'sresp_button_4': None, + 'status1': 'device_status.dc_open', + 'status2': None, + 'status_dim_level': None, + 'status_fault': list([ + ]), + 'status_hue': None, + 'status_humi': None, + 'status_lux': '', + 'status_open': list([ + 'device_status.dc_open', + ]), + 'status_power': None, + 'status_saturation': None, + 'status_switch': None, + 'status_temp': None, + 'status_temp_format': 'C', + 'status_total_energy': None, + 'thermo_c_setpoint': None, + 'thermo_c_setpoint_away': None, + 'thermo_fan_mode': None, + 'thermo_mode': None, + 'thermo_schd_setting': None, + 'thermo_setpoint': None, + 'thermo_setpoint_away': None, + 'trigger_by_zone': list([ + ]), + 'type': 'device_type.door_contact', + 'type_no': '4', + }), + dict({ + '_state': 'unavailable', + 'address': '**REDACTED**', + 'area': '1', + 'bypass': '0', + 'device_group': '000', + 'device_id': '**REDACTED**', + 'device_id2': '', + 'extension': None, + 'group_id': None, + 'group_name': None, + 'ipcam_trigger_by_zone1': None, + 'ipcam_trigger_by_zone2': None, + 'ipcam_trigger_by_zone3': None, + 'ipcam_trigger_by_zone4': None, + 'mac': '**REDACTED**', + 'minigw_configuration_data': '', + 'minigw_lock_status': '', + 'minigw_number_of_credentials_supported': '', + 'minigw_product_data': '', + 'minigw_protocol': '', + 'minigw_syncing': '', + 'name': '**REDACTED**', + 'no': '6', + 'rf': None, + 'rssi': '0', + 'scene_restore': None, + 'scene_trigger': '0', + 'sresp_button_1': None, + 'sresp_button_2': None, + 'sresp_button_3': None, + 'sresp_button_4': None, + 'status1': 'unknwon', + 'status2': None, + 'status_dim_level': None, + 'status_fault': list([ + ]), + 'status_hue': None, + 'status_humi': None, + 'status_lux': '', + 'status_open': list([ + ]), + 'status_power': None, + 'status_saturation': None, + 'status_switch': None, + 'status_temp': None, + 'status_temp_format': 'C', + 'status_total_energy': None, + 'thermo_c_setpoint': None, + 'thermo_c_setpoint_away': None, + 'thermo_fan_mode': None, + 'thermo_mode': None, + 'thermo_schd_setting': None, + 'thermo_setpoint': None, + 'thermo_setpoint_away': None, + 'trigger_by_zone': list([ + ]), + 'type': 'device_type.door_contact', + 'type_no': '4', + }), + dict({ + '_state': 'unlocked', + '_state2': 'closed', + 'address': '**REDACTED**', + 'area': '1', + 'bypass': '0', + 'device_group': '002', + 'device_id': '**REDACTED**', + 'device_id2': '', + 'extension': None, + 'group_id': None, + 'group_name': None, + 'ipcam_trigger_by_zone1': None, + 'ipcam_trigger_by_zone2': None, + 'ipcam_trigger_by_zone3': None, + 'ipcam_trigger_by_zone4': None, + 'mac': '**REDACTED**', + 'minigw_configuration_data': '02FF000001000000000000000000001E000100', + 'minigw_lock_status': '36', + 'minigw_number_of_credentials_supported': '10', + 'minigw_product_data': '21020120', + 'minigw_protocol': 'DM', + 'minigw_syncing': '0', + 'name': '**REDACTED**', + 'no': '7', + 'rf': None, + 'rssi': '9', + 'scene_restore': None, + 'scene_trigger': '0', + 'sresp_button_1': None, + 'sresp_button_2': None, + 'sresp_button_3': None, + 'sresp_button_4': None, + 'status1': 'device_status.lock', + 'status2': None, + 'status_dim_level': None, + 'status_fault': list([ + ]), + 'status_hue': None, + 'status_humi': None, + 'status_lux': '', + 'status_open': list([ + 'device_status.lock', + ]), + 'status_power': None, + 'status_saturation': None, + 'status_switch': None, + 'status_temp': None, + 'status_temp_format': 'C', + 'status_total_energy': None, + 'thermo_c_setpoint': None, + 'thermo_c_setpoint_away': None, + 'thermo_fan_mode': None, + 'thermo_mode': None, + 'thermo_schd_setting': None, + 'thermo_setpoint': None, + 'thermo_setpoint_away': None, + 'trigger_by_zone': list([ + ]), + 'type': 'device_type.door_lock', + 'type_no': '72', + }), + dict({ + '_state': 'unlocked', + '_state2': 'open', + 'address': '**REDACTED**', + 'area': '1', + 'bypass': '0', + 'device_group': '002', + 'device_id': '**REDACTED**', + 'device_id2': '', + 'extension': None, + 'group_id': None, + 'group_name': None, + 'ipcam_trigger_by_zone1': None, + 'ipcam_trigger_by_zone2': None, + 'ipcam_trigger_by_zone3': None, + 'ipcam_trigger_by_zone4': None, + 'mac': '**REDACTED**', + 'minigw_configuration_data': '02FF000001000000000000000000001E000100', + 'minigw_lock_status': '4', + 'minigw_number_of_credentials_supported': '10', + 'minigw_product_data': '21020120', + 'minigw_protocol': 'DM', + 'minigw_syncing': '0', + 'name': '**REDACTED**', + 'no': '8', + 'rf': None, + 'rssi': '9', + 'scene_restore': None, + 'scene_trigger': '0', + 'sresp_button_1': None, + 'sresp_button_2': None, + 'sresp_button_3': None, + 'sresp_button_4': None, + 'status1': 'device_status.unlock', + 'status2': None, + 'status_dim_level': None, + 'status_fault': list([ + ]), + 'status_hue': None, + 'status_humi': None, + 'status_lux': '', + 'status_open': list([ + 'device_status.unlock', + ]), + 'status_power': None, + 'status_saturation': None, + 'status_switch': None, + 'status_temp': None, + 'status_temp_format': 'C', + 'status_total_energy': None, + 'thermo_c_setpoint': None, + 'thermo_c_setpoint_away': None, + 'thermo_fan_mode': None, + 'thermo_mode': None, + 'thermo_schd_setting': None, + 'thermo_setpoint': None, + 'thermo_setpoint_away': None, + 'trigger_by_zone': list([ + ]), + 'type': 'device_type.door_lock', + 'type_no': '72', + }), + dict({ + '_state': 'unavailable', + 'address': '**REDACTED**', + 'area': '1', + 'bypass': '0', + 'device_group': '002', + 'device_id': '**REDACTED**', + 'device_id2': '', + 'extension': None, + 'group_id': None, + 'group_name': None, + 'ipcam_trigger_by_zone1': None, + 'ipcam_trigger_by_zone2': None, + 'ipcam_trigger_by_zone3': None, + 'ipcam_trigger_by_zone4': None, + 'mac': '**REDACTED**', + 'minigw_configuration_data': '02FF000001000000000000000000001E000100', + 'minigw_lock_status': '10', + 'minigw_number_of_credentials_supported': '10', + 'minigw_product_data': '21020120', + 'minigw_protocol': 'DM', + 'minigw_syncing': '0', + 'name': '**REDACTED**', + 'no': '9', + 'rf': None, + 'rssi': '9', + 'scene_restore': None, + 'scene_trigger': '0', + 'sresp_button_1': None, + 'sresp_button_2': None, + 'sresp_button_3': None, + 'sresp_button_4': None, + 'status1': 'device_status.error', + 'status2': None, + 'status_dim_level': None, + 'status_fault': list([ + ]), + 'status_hue': None, + 'status_humi': None, + 'status_lux': '', + 'status_open': list([ + 'device_status.error', + ]), + 'status_power': None, + 'status_saturation': None, + 'status_switch': None, + 'status_temp': None, + 'status_temp_format': 'C', + 'status_total_energy': None, + 'thermo_c_setpoint': None, + 'thermo_c_setpoint_away': None, + 'thermo_fan_mode': None, + 'thermo_mode': None, + 'thermo_schd_setting': None, + 'thermo_setpoint': None, + 'thermo_setpoint_away': None, + 'trigger_by_zone': list([ + ]), + 'type': 'device_type.door_lock', + 'type_no': '72', + }), + dict({ + 'address': '**REDACTED**', + 'area': '1', + 'bypass': '0', + 'device_group': '001', + 'device_id': '**REDACTED**', + 'device_id2': '', + 'extension': None, + 'group_id': None, + 'group_name': None, + 'ipcam_trigger_by_zone1': None, + 'ipcam_trigger_by_zone2': None, + 'ipcam_trigger_by_zone3': None, + 'ipcam_trigger_by_zone4': None, + 'mac': '**REDACTED**', + 'minigw_configuration_data': '', + 'minigw_lock_status': '', + 'minigw_number_of_credentials_supported': '', + 'minigw_product_data': '', + 'minigw_protocol': '', + 'minigw_syncing': '', + 'name': '**REDACTED**', + 'no': '8', + 'rf': None, + 'rssi': '9', + 'scene_restore': None, + 'scene_trigger': '0', + 'sresp_button_1': None, + 'sresp_button_2': None, + 'sresp_button_3': None, + 'sresp_button_4': None, + 'status1': '', + 'status2': None, + 'status_dim_level': None, + 'status_fault': list([ + ]), + 'status_hue': None, + 'status_humi': None, + 'status_lux': '', + 'status_open': list([ + ]), + 'status_power': None, + 'status_saturation': None, + 'status_switch': None, + 'status_temp': 21, + 'status_temp_format': 'C', + 'status_total_energy': None, + 'thermo_c_setpoint': None, + 'thermo_c_setpoint_away': None, + 'thermo_fan_mode': None, + 'thermo_mode': None, + 'thermo_schd_setting': None, + 'thermo_setpoint': None, + 'thermo_setpoint_away': None, + 'trigger_by_zone': list([ + ]), + 'type': 'device_type.temperature_sensor', + 'type_no': '40', + }), + ]), + 'model': list([ + dict({ + 'area': '1', + 'mode': 'disarm', + }), + ]), + 'panel_status': dict({ + 'warning_snd_mute': '0', + }), + 'report_event_latest': dict({ + 'cid_code': '1807', + 'event_time': None, + 'id': '**REDACTED**', + 'report_id': '1027299996', + 'time': '1692271914', + 'utc_event_time': None, + }), + }), + }), + 'devices': dict({ + 'data': list([ dict({ - '_state': 'locked', - '_state2': 'closed', 'address': '**REDACTED**', 'area': '1', 'bypass': '0', @@ -83,8 +717,6 @@ 'type_no': '72', }), dict({ - '_state': 'unlocked', - '_state2': 'unknown', 'address': '**REDACTED**', 'area': '1', 'bypass': '0', @@ -144,8 +776,6 @@ 'type_no': '72', }), dict({ - '_state': 'locked', - '_state2': 'unknown', 'address': '**REDACTED**', 'area': '1', 'bypass': '0', @@ -206,7 +836,6 @@ 'type_no': '72', }), dict({ - '_state': 'closed', 'address': '**REDACTED**', 'area': '1', 'bypass': '0', @@ -267,7 +896,6 @@ 'type_no': '4', }), dict({ - '_state': 'open', 'address': '**REDACTED**', 'area': '1', 'bypass': '0', @@ -328,7 +956,6 @@ 'type_no': '4', }), dict({ - '_state': 'unavailable', 'address': '**REDACTED**', 'area': '1', 'bypass': '0', @@ -388,8 +1015,6 @@ 'type_no': '4', }), dict({ - '_state': 'unlocked', - '_state2': 'closed', 'address': '**REDACTED**', 'area': '1', 'bypass': '0', @@ -450,8 +1075,6 @@ 'type_no': '72', }), dict({ - '_state': 'unlocked', - '_state2': 'open', 'address': '**REDACTED**', 'area': '1', 'bypass': '0', @@ -512,7 +1135,6 @@ 'type_no': '72', }), dict({ - '_state': 'unavailable', 'address': '**REDACTED**', 'area': '1', 'bypass': '0', @@ -632,799 +1254,193 @@ 'type_no': '40', }), ]), - 'model': list([ + }), + 'history': dict({ + 'data': list([ + dict({ + 'area': 1, + 'cid': '18180701000', + 'cid_source': 'DEVICE', + 'event_time': None, + 'event_type': '1807', + 'name': '**REDACTED**', + 'report_id': '1027299996', + 'status_temp_format': 'C', + 'time': '2023/08/17 11:31:54', + 'type': 'device_type.door_lock', + 'user': 0, + 'zone': 1, + }), + dict({ + 'area': 1, + 'cid': '18180201101', + 'cid_source': 'DEVICE', + 'event_time': None, + 'event_type': '1802', + 'name': '**REDACTED**', + 'report_id': '1027299889', + 'status_temp_format': 'C', + 'time': '2023/08/17 11:31:43', + 'type': 'device_type.door_lock', + 'user': 101, + 'zone': 1, + }), + dict({ + 'area': 1, + 'cid': '18180701000', + 'cid_source': 'DEVICE', + 'event_time': None, + 'event_type': '1807', + 'name': '**REDACTED**', + 'report_id': '1027299587', + 'status_temp_format': 'C', + 'time': '2023/08/17 11:31:11', + 'type': 'device_type.door_lock', + 'user': 0, + 'zone': 1, + }), + dict({ + 'area': 1, + 'cid': '18180101001', + 'cid_source': 'DEVICE', + 'event_time': None, + 'event_type': '1801', + 'name': '**REDACTED**', + 'report_id': '1027296099', + 'status_temp_format': 'C', + 'time': '2023/08/17 11:24:52', + 'type': 'device_type.door_lock', + 'user': 1, + 'zone': 1, + }), + dict({ + 'area': 1, + 'cid': '18180701000', + 'cid_source': 'DEVICE', + 'event_time': None, + 'event_type': '1807', + 'name': '**REDACTED**', + 'report_id': '1027273782', + 'status_temp_format': 'C', + 'time': '2023/08/17 10:43:21', + 'type': 'device_type.door_lock', + 'user': 0, + 'zone': 1, + }), + dict({ + 'area': 1, + 'cid': '18180201101', + 'cid_source': 'DEVICE', + 'event_time': None, + 'event_type': '1802', + 'name': '**REDACTED**', + 'report_id': '1027273230', + 'status_temp_format': 'C', + 'time': '2023/08/17 10:42:09', + 'type': 'device_type.door_lock', + 'user': 101, + 'zone': 1, + }), + dict({ + 'area': 1, + 'cid': '18180701000', + 'cid_source': 'DEVICE', + 'event_time': None, + 'event_type': '1807', + 'name': '**REDACTED**', + 'report_id': '1027100172', + 'status_temp_format': 'C', + 'time': '2023/08/17 05:28:57', + 'type': 'device_type.door_lock', + 'user': 0, + 'zone': 1, + }), + dict({ + 'area': 1, + 'cid': '18180101001', + 'cid_source': 'DEVICE', + 'event_time': None, + 'event_type': '1801', + 'name': '**REDACTED**', + 'report_id': '1027099978', + 'status_temp_format': 'C', + 'time': '2023/08/17 05:28:39', + 'type': 'device_type.door_lock', + 'user': 1, + 'zone': 1, + }), + dict({ + 'area': 0, + 'cid': '18160200000', + 'cid_source': 'SYSTEM', + 'event_time': None, + 'event_type': '1602', + 'name': '', + 'report_id': '1027093266', + 'status_temp_format': 'C', + 'time': '2023/08/17 05:17:12', + 'type': '', + 'user': '', + 'zone': 0, + }), + dict({ + 'area': 1, + 'cid': '18180701000', + 'cid_source': 'DEVICE', + 'event_time': None, + 'event_type': '1807', + 'name': '**REDACTED**', + 'report_id': '1026912623', + 'status_temp_format': 'C', + 'time': '2023/08/16 20:29:36', + 'type': 'device_type.door_lock', + 'user': 0, + 'zone': 1, + }), + ]), + }), + 'mode': dict({ + 'data': list([ dict({ 'area': '1', 'mode': 'disarm', }), ]), - 'panel_status': dict({ - 'warning_snd_mute': '0', - }), - 'report_event_latest': dict({ - 'cid_code': '1807', - 'event_time': None, - 'id': '**REDACTED**', - 'report_id': '1027299996', - 'time': '1692271914', - 'utc_event_time': None, - }), }), - 'devices': list([ - dict({ - 'address': '**REDACTED**', - 'area': '1', - 'bypass': '0', - 'device_group': '002', - 'device_id': '**REDACTED**', - 'device_id2': '', - 'extension': None, - 'group_id': None, - 'group_name': None, - 'ipcam_trigger_by_zone1': None, - 'ipcam_trigger_by_zone2': None, - 'ipcam_trigger_by_zone3': None, - 'ipcam_trigger_by_zone4': None, - 'mac': '**REDACTED**', - 'minigw_configuration_data': '02FF000001000000000000000000001E000100', - 'minigw_lock_status': '35', - 'minigw_number_of_credentials_supported': '10', - 'minigw_product_data': '21020120', - 'minigw_protocol': 'DM', - 'minigw_syncing': '0', - 'name': '**REDACTED**', - 'no': '1', - 'rf': None, - 'rssi': '9', - 'scene_restore': None, - 'scene_trigger': '0', - 'sresp_button_1': None, - 'sresp_button_2': None, - 'sresp_button_3': None, - 'sresp_button_4': None, - 'status1': 'device_status.lock', - 'status2': None, - 'status_dim_level': None, - 'status_fault': list([ - ]), - 'status_hue': None, - 'status_humi': None, - 'status_lux': '', - 'status_open': list([ - 'device_status.lock', - ]), - 'status_power': None, - 'status_saturation': None, - 'status_switch': None, - 'status_temp': None, - 'status_temp_format': 'C', - 'status_total_energy': None, - 'thermo_c_setpoint': None, - 'thermo_c_setpoint_away': None, - 'thermo_fan_mode': None, - 'thermo_mode': None, - 'thermo_schd_setting': None, - 'thermo_setpoint': None, - 'thermo_setpoint_away': None, - 'trigger_by_zone': list([ - ]), - 'type': 'device_type.door_lock', - 'type_no': '72', - }), - dict({ - 'address': '**REDACTED**', - 'area': '1', - 'bypass': '0', - 'device_group': '002', - 'device_id': '**REDACTED**', - 'device_id2': '', - 'extension': None, - 'group_id': None, - 'group_name': None, - 'ipcam_trigger_by_zone1': None, - 'ipcam_trigger_by_zone2': None, - 'ipcam_trigger_by_zone3': None, - 'ipcam_trigger_by_zone4': None, - 'mac': '**REDACTED**', - 'minigw_configuration_data': '02FF000001000000000000000000001E000100', - 'minigw_lock_status': None, - 'minigw_number_of_credentials_supported': '10', - 'minigw_product_data': '21020120', - 'minigw_protocol': 'DM', - 'minigw_syncing': '0', - 'name': '**REDACTED**', - 'no': '2', - 'rf': None, - 'rssi': '9', - 'scene_restore': None, - 'scene_trigger': '0', - 'sresp_button_1': None, - 'sresp_button_2': None, - 'sresp_button_3': None, - 'sresp_button_4': None, - 'status1': 'device_status.unlock', - 'status2': None, - 'status_dim_level': None, - 'status_fault': list([ - ]), - 'status_hue': None, - 'status_humi': None, - 'status_lux': '', - 'status_open': list([ - ]), - 'status_power': None, - 'status_saturation': None, - 'status_switch': None, - 'status_temp': None, - 'status_temp_format': 'C', - 'status_total_energy': None, - 'thermo_c_setpoint': None, - 'thermo_c_setpoint_away': None, - 'thermo_fan_mode': None, - 'thermo_mode': None, - 'thermo_schd_setting': None, - 'thermo_setpoint': None, - 'thermo_setpoint_away': None, - 'trigger_by_zone': list([ - ]), - 'type': 'device_type.door_lock', - 'type_no': '72', - }), - dict({ - 'address': '**REDACTED**', - 'area': '1', - 'bypass': '0', - 'device_group': '002', - 'device_id': '**REDACTED**', - 'device_id2': '', - 'extension': None, - 'group_id': None, - 'group_name': None, - 'ipcam_trigger_by_zone1': None, - 'ipcam_trigger_by_zone2': None, - 'ipcam_trigger_by_zone3': None, - 'ipcam_trigger_by_zone4': None, - 'mac': '**REDACTED**', - 'minigw_configuration_data': '02FF000001000000000000000000001E000100', - 'minigw_lock_status': None, - 'minigw_number_of_credentials_supported': '10', - 'minigw_product_data': '21020120', - 'minigw_protocol': 'DM', - 'minigw_syncing': '0', - 'name': '**REDACTED**', - 'no': '3', - 'rf': None, - 'rssi': '9', - 'scene_restore': None, - 'scene_trigger': '0', - 'sresp_button_1': None, - 'sresp_button_2': None, - 'sresp_button_3': None, - 'sresp_button_4': None, - 'status1': 'device_status.lock', - 'status2': None, - 'status_dim_level': None, - 'status_fault': list([ - ]), - 'status_hue': None, - 'status_humi': None, - 'status_lux': '', - 'status_open': list([ - 'device_status.lock', - ]), - 'status_power': None, - 'status_saturation': None, - 'status_switch': None, - 'status_temp': None, - 'status_temp_format': 'C', - 'status_total_energy': None, - 'thermo_c_setpoint': None, - 'thermo_c_setpoint_away': None, - 'thermo_fan_mode': None, - 'thermo_mode': None, - 'thermo_schd_setting': None, - 'thermo_setpoint': None, - 'thermo_setpoint_away': None, - 'trigger_by_zone': list([ - ]), - 'type': 'device_type.door_lock', - 'type_no': '72', - }), - dict({ - 'address': '**REDACTED**', - 'area': '1', - 'bypass': '0', - 'device_group': '000', - 'device_id': '**REDACTED**', - 'device_id2': '', - 'extension': None, - 'group_id': None, - 'group_name': None, - 'ipcam_trigger_by_zone1': None, - 'ipcam_trigger_by_zone2': None, - 'ipcam_trigger_by_zone3': None, - 'ipcam_trigger_by_zone4': None, - 'mac': '**REDACTED**', - 'minigw_configuration_data': '', - 'minigw_lock_status': '', - 'minigw_number_of_credentials_supported': '', - 'minigw_product_data': '', - 'minigw_protocol': '', - 'minigw_syncing': '', - 'name': '**REDACTED**', - 'no': '4', - 'rf': None, - 'rssi': '0', - 'scene_restore': None, - 'scene_trigger': '0', - 'sresp_button_1': None, - 'sresp_button_2': None, - 'sresp_button_3': None, - 'sresp_button_4': None, - 'status1': 'device_status.dc_close', - 'status2': None, - 'status_dim_level': None, - 'status_fault': list([ - ]), - 'status_hue': None, - 'status_humi': None, - 'status_lux': '', - 'status_open': list([ - 'device_status.dc_close', - ]), - 'status_power': None, - 'status_saturation': None, - 'status_switch': None, - 'status_temp': None, - 'status_temp_format': 'C', - 'status_total_energy': None, - 'thermo_c_setpoint': None, - 'thermo_c_setpoint_away': None, - 'thermo_fan_mode': None, - 'thermo_mode': None, - 'thermo_schd_setting': None, - 'thermo_setpoint': None, - 'thermo_setpoint_away': None, - 'trigger_by_zone': list([ - ]), - 'type': 'device_type.door_contact', - 'type_no': '4', - }), - dict({ - 'address': '**REDACTED**', - 'area': '1', - 'bypass': '0', - 'device_group': '000', - 'device_id': '**REDACTED**', - 'device_id2': '', - 'extension': None, - 'group_id': None, - 'group_name': None, - 'ipcam_trigger_by_zone1': None, - 'ipcam_trigger_by_zone2': None, - 'ipcam_trigger_by_zone3': None, - 'ipcam_trigger_by_zone4': None, - 'mac': '**REDACTED**', - 'minigw_configuration_data': '', - 'minigw_lock_status': '', - 'minigw_number_of_credentials_supported': '', - 'minigw_product_data': '', - 'minigw_protocol': '', - 'minigw_syncing': '', - 'name': '**REDACTED**', - 'no': '5', - 'rf': None, - 'rssi': '0', - 'scene_restore': None, - 'scene_trigger': '0', - 'sresp_button_1': None, - 'sresp_button_2': None, - 'sresp_button_3': None, - 'sresp_button_4': None, - 'status1': 'device_status.dc_open', - 'status2': None, - 'status_dim_level': None, - 'status_fault': list([ - ]), - 'status_hue': None, - 'status_humi': None, - 'status_lux': '', - 'status_open': list([ - 'device_status.dc_open', - ]), - 'status_power': None, - 'status_saturation': None, - 'status_switch': None, - 'status_temp': None, - 'status_temp_format': 'C', - 'status_total_energy': None, - 'thermo_c_setpoint': None, - 'thermo_c_setpoint_away': None, - 'thermo_fan_mode': None, - 'thermo_mode': None, - 'thermo_schd_setting': None, - 'thermo_setpoint': None, - 'thermo_setpoint_away': None, - 'trigger_by_zone': list([ - ]), - 'type': 'device_type.door_contact', - 'type_no': '4', - }), - dict({ - 'address': '**REDACTED**', - 'area': '1', - 'bypass': '0', - 'device_group': '000', - 'device_id': '**REDACTED**', - 'device_id2': '', - 'extension': None, - 'group_id': None, - 'group_name': None, - 'ipcam_trigger_by_zone1': None, - 'ipcam_trigger_by_zone2': None, - 'ipcam_trigger_by_zone3': None, - 'ipcam_trigger_by_zone4': None, - 'mac': '**REDACTED**', - 'minigw_configuration_data': '', - 'minigw_lock_status': '', - 'minigw_number_of_credentials_supported': '', - 'minigw_product_data': '', - 'minigw_protocol': '', - 'minigw_syncing': '', - 'name': '**REDACTED**', - 'no': '6', - 'rf': None, - 'rssi': '0', - 'scene_restore': None, - 'scene_trigger': '0', - 'sresp_button_1': None, - 'sresp_button_2': None, - 'sresp_button_3': None, - 'sresp_button_4': None, - 'status1': 'unknwon', - 'status2': None, - 'status_dim_level': None, - 'status_fault': list([ - ]), - 'status_hue': None, - 'status_humi': None, - 'status_lux': '', - 'status_open': list([ - ]), - 'status_power': None, - 'status_saturation': None, - 'status_switch': None, - 'status_temp': None, - 'status_temp_format': 'C', - 'status_total_energy': None, - 'thermo_c_setpoint': None, - 'thermo_c_setpoint_away': None, - 'thermo_fan_mode': None, - 'thermo_mode': None, - 'thermo_schd_setting': None, - 'thermo_setpoint': None, - 'thermo_setpoint_away': None, - 'trigger_by_zone': list([ - ]), - 'type': 'device_type.door_contact', - 'type_no': '4', - }), - dict({ - 'address': '**REDACTED**', - 'area': '1', - 'bypass': '0', - 'device_group': '002', - 'device_id': '**REDACTED**', - 'device_id2': '', - 'extension': None, - 'group_id': None, - 'group_name': None, - 'ipcam_trigger_by_zone1': None, - 'ipcam_trigger_by_zone2': None, - 'ipcam_trigger_by_zone3': None, - 'ipcam_trigger_by_zone4': None, - 'mac': '**REDACTED**', - 'minigw_configuration_data': '02FF000001000000000000000000001E000100', - 'minigw_lock_status': '36', - 'minigw_number_of_credentials_supported': '10', - 'minigw_product_data': '21020120', - 'minigw_protocol': 'DM', - 'minigw_syncing': '0', - 'name': '**REDACTED**', - 'no': '7', - 'rf': None, - 'rssi': '9', - 'scene_restore': None, - 'scene_trigger': '0', - 'sresp_button_1': None, - 'sresp_button_2': None, - 'sresp_button_3': None, - 'sresp_button_4': None, - 'status1': 'device_status.lock', - 'status2': None, - 'status_dim_level': None, - 'status_fault': list([ - ]), - 'status_hue': None, - 'status_humi': None, - 'status_lux': '', - 'status_open': list([ - 'device_status.lock', - ]), - 'status_power': None, - 'status_saturation': None, - 'status_switch': None, - 'status_temp': None, - 'status_temp_format': 'C', - 'status_total_energy': None, - 'thermo_c_setpoint': None, - 'thermo_c_setpoint_away': None, - 'thermo_fan_mode': None, - 'thermo_mode': None, - 'thermo_schd_setting': None, - 'thermo_setpoint': None, - 'thermo_setpoint_away': None, - 'trigger_by_zone': list([ - ]), - 'type': 'device_type.door_lock', - 'type_no': '72', - }), - dict({ - 'address': '**REDACTED**', - 'area': '1', - 'bypass': '0', - 'device_group': '002', - 'device_id': '**REDACTED**', - 'device_id2': '', - 'extension': None, - 'group_id': None, - 'group_name': None, - 'ipcam_trigger_by_zone1': None, - 'ipcam_trigger_by_zone2': None, - 'ipcam_trigger_by_zone3': None, - 'ipcam_trigger_by_zone4': None, - 'mac': '**REDACTED**', - 'minigw_configuration_data': '02FF000001000000000000000000001E000100', - 'minigw_lock_status': '4', - 'minigw_number_of_credentials_supported': '10', - 'minigw_product_data': '21020120', - 'minigw_protocol': 'DM', - 'minigw_syncing': '0', - 'name': '**REDACTED**', - 'no': '8', - 'rf': None, - 'rssi': '9', - 'scene_restore': None, - 'scene_trigger': '0', - 'sresp_button_1': None, - 'sresp_button_2': None, - 'sresp_button_3': None, - 'sresp_button_4': None, - 'status1': 'device_status.unlock', - 'status2': None, - 'status_dim_level': None, - 'status_fault': list([ - ]), - 'status_hue': None, - 'status_humi': None, - 'status_lux': '', - 'status_open': list([ - 'device_status.unlock', - ]), - 'status_power': None, - 'status_saturation': None, - 'status_switch': None, - 'status_temp': None, - 'status_temp_format': 'C', - 'status_total_energy': None, - 'thermo_c_setpoint': None, - 'thermo_c_setpoint_away': None, - 'thermo_fan_mode': None, - 'thermo_mode': None, - 'thermo_schd_setting': None, - 'thermo_setpoint': None, - 'thermo_setpoint_away': None, - 'trigger_by_zone': list([ - ]), - 'type': 'device_type.door_lock', - 'type_no': '72', - }), - dict({ - 'address': '**REDACTED**', - 'area': '1', - 'bypass': '0', - 'device_group': '002', - 'device_id': '**REDACTED**', - 'device_id2': '', - 'extension': None, - 'group_id': None, - 'group_name': None, - 'ipcam_trigger_by_zone1': None, - 'ipcam_trigger_by_zone2': None, - 'ipcam_trigger_by_zone3': None, - 'ipcam_trigger_by_zone4': None, - 'mac': '**REDACTED**', - 'minigw_configuration_data': '02FF000001000000000000000000001E000100', - 'minigw_lock_status': '10', - 'minigw_number_of_credentials_supported': '10', - 'minigw_product_data': '21020120', - 'minigw_protocol': 'DM', - 'minigw_syncing': '0', - 'name': '**REDACTED**', - 'no': '9', - 'rf': None, - 'rssi': '9', - 'scene_restore': None, - 'scene_trigger': '0', - 'sresp_button_1': None, - 'sresp_button_2': None, - 'sresp_button_3': None, - 'sresp_button_4': None, - 'status1': 'device_status.error', - 'status2': None, - 'status_dim_level': None, - 'status_fault': list([ - ]), - 'status_hue': None, - 'status_humi': None, - 'status_lux': '', - 'status_open': list([ - 'device_status.error', - ]), - 'status_power': None, - 'status_saturation': None, - 'status_switch': None, - 'status_temp': None, - 'status_temp_format': 'C', - 'status_total_energy': None, - 'thermo_c_setpoint': None, - 'thermo_c_setpoint_away': None, - 'thermo_fan_mode': None, - 'thermo_mode': None, - 'thermo_schd_setting': None, - 'thermo_setpoint': None, - 'thermo_setpoint_away': None, - 'trigger_by_zone': list([ - ]), - 'type': 'device_type.door_lock', - 'type_no': '72', - }), - dict({ - 'address': '**REDACTED**', - 'area': '1', - 'bypass': '0', - 'device_group': '001', - 'device_id': '**REDACTED**', - 'device_id2': '', - 'extension': None, - 'group_id': None, - 'group_name': None, - 'ipcam_trigger_by_zone1': None, - 'ipcam_trigger_by_zone2': None, - 'ipcam_trigger_by_zone3': None, - 'ipcam_trigger_by_zone4': None, - 'mac': '**REDACTED**', - 'minigw_configuration_data': '', - 'minigw_lock_status': '', - 'minigw_number_of_credentials_supported': '', - 'minigw_product_data': '', - 'minigw_protocol': '', - 'minigw_syncing': '', - 'name': '**REDACTED**', - 'no': '8', - 'rf': None, - 'rssi': '9', - 'scene_restore': None, - 'scene_trigger': '0', - 'sresp_button_1': None, - 'sresp_button_2': None, - 'sresp_button_3': None, - 'sresp_button_4': None, - 'status1': '', - 'status2': None, - 'status_dim_level': None, - 'status_fault': list([ - ]), - 'status_hue': None, - 'status_humi': None, - 'status_lux': '', - 'status_open': list([ - ]), - 'status_power': None, - 'status_saturation': None, - 'status_switch': None, - 'status_temp': 21, - 'status_temp_format': 'C', - 'status_total_energy': None, - 'thermo_c_setpoint': None, - 'thermo_c_setpoint_away': None, - 'thermo_fan_mode': None, - 'thermo_mode': None, - 'thermo_schd_setting': None, - 'thermo_setpoint': None, - 'thermo_setpoint_away': None, - 'trigger_by_zone': list([ - ]), - 'type': 'device_type.temperature_sensor', - 'type_no': '40', - }), - ]), - 'history': list([ - dict({ - 'area': 1, - 'cid': '18180701000', - 'cid_source': 'DEVICE', - 'event_time': None, - 'event_type': '1807', - 'name': '**REDACTED**', - 'report_id': '1027299996', - 'status_temp_format': 'C', - 'time': '2023/08/17 11:31:54', - 'type': 'device_type.door_lock', - 'user': 0, - 'zone': 1, - }), - dict({ - 'area': 1, - 'cid': '18180201101', - 'cid_source': 'DEVICE', - 'event_time': None, - 'event_type': '1802', - 'name': '**REDACTED**', - 'report_id': '1027299889', - 'status_temp_format': 'C', - 'time': '2023/08/17 11:31:43', - 'type': 'device_type.door_lock', - 'user': 101, - 'zone': 1, - }), - dict({ - 'area': 1, - 'cid': '18180701000', - 'cid_source': 'DEVICE', - 'event_time': None, - 'event_type': '1807', - 'name': '**REDACTED**', - 'report_id': '1027299587', - 'status_temp_format': 'C', - 'time': '2023/08/17 11:31:11', - 'type': 'device_type.door_lock', - 'user': 0, - 'zone': 1, - }), - dict({ - 'area': 1, - 'cid': '18180101001', - 'cid_source': 'DEVICE', - 'event_time': None, - 'event_type': '1801', - 'name': '**REDACTED**', - 'report_id': '1027296099', - 'status_temp_format': 'C', - 'time': '2023/08/17 11:24:52', - 'type': 'device_type.door_lock', - 'user': 1, - 'zone': 1, - }), - dict({ - 'area': 1, - 'cid': '18180701000', - 'cid_source': 'DEVICE', - 'event_time': None, - 'event_type': '1807', - 'name': '**REDACTED**', - 'report_id': '1027273782', - 'status_temp_format': 'C', - 'time': '2023/08/17 10:43:21', - 'type': 'device_type.door_lock', - 'user': 0, - 'zone': 1, - }), - dict({ - 'area': 1, - 'cid': '18180201101', - 'cid_source': 'DEVICE', - 'event_time': None, - 'event_type': '1802', - 'name': '**REDACTED**', - 'report_id': '1027273230', - 'status_temp_format': 'C', - 'time': '2023/08/17 10:42:09', - 'type': 'device_type.door_lock', - 'user': 101, - 'zone': 1, - }), - dict({ - 'area': 1, - 'cid': '18180701000', - 'cid_source': 'DEVICE', - 'event_time': None, - 'event_type': '1807', - 'name': '**REDACTED**', - 'report_id': '1027100172', - 'status_temp_format': 'C', - 'time': '2023/08/17 05:28:57', - 'type': 'device_type.door_lock', - 'user': 0, - 'zone': 1, - }), - dict({ - 'area': 1, - 'cid': '18180101001', - 'cid_source': 'DEVICE', - 'event_time': None, - 'event_type': '1801', - 'name': '**REDACTED**', - 'report_id': '1027099978', - 'status_temp_format': 'C', - 'time': '2023/08/17 05:28:39', - 'type': 'device_type.door_lock', - 'user': 1, - 'zone': 1, - }), - dict({ - 'area': 0, - 'cid': '18160200000', - 'cid_source': 'SYSTEM', - 'event_time': None, - 'event_type': '1602', - 'name': '', - 'report_id': '1027093266', - 'status_temp_format': 'C', - 'time': '2023/08/17 05:17:12', - 'type': '', - 'user': '', - 'zone': 0, - }), - dict({ - 'area': 1, - 'cid': '18180701000', - 'cid_source': 'DEVICE', - 'event_time': None, - 'event_type': '1807', - 'name': '**REDACTED**', - 'report_id': '1026912623', - 'status_temp_format': 'C', - 'time': '2023/08/16 20:29:36', - 'type': 'device_type.door_lock', - 'user': 0, - 'zone': 1, - }), - ]), - 'mode': list([ - dict({ - 'area': '1', - 'mode': 'disarm', - }), - ]), - 'online': 'online', + 'online': dict({ + 'data': 'online', + }), 'panel_info': dict({ - 'SMS_Balance': '50', - 'contact': '', - 'dealer_name': 'Poland', - 'mac': '**REDACTED**', - 'mail_address': '**REDACTED**', - 'name': '', - 'net_version': 'MINIGW-MZ-1_G 1.0.1.29A', - 'phone': 'UK-01902364606 / Sweden-0770373710 / Demark-89887818 / Norway-81569036', - 'report_account': '**REDACTED**', - 'rf51_version': '', - 'service_time': 'UK - Mon to Fri 8:30 til 17:30 / Scandinavia - Mon to Fri 8:00 til 20:00, Sat to Sun 10:00 til 15:00', - 'version': 'MINIGW-MZ-1_G 1.0.1.29A,,4.1.2.6.2,00:1D:94:0B:5E:A7,10111112,ML_yamga', - 'voice_balance': '0', - 'xml_version': '2', - 'zb_version': '4.1.2.6.2', - 'zw_version': '', + 'data': dict({ + 'SMS_Balance': '50', + 'contact': '', + 'dealer_name': 'Poland', + 'mac': '**REDACTED**', + 'mail_address': '**REDACTED**', + 'name': '', + 'net_version': 'MINIGW-MZ-1_G 1.0.1.29A', + 'phone': 'UK-01902364606 / Sweden-0770373710 / Demark-89887818 / Norway-81569036', + 'report_account': '**REDACTED**', + 'rf51_version': '', + 'service_time': 'UK - Mon to Fri 8:30 til 17:30 / Scandinavia - Mon to Fri 8:00 til 20:00, Sat to Sun 10:00 til 15:00', + 'version': 'MINIGW-MZ-1_G 1.0.1.29A,,4.1.2.6.2,00:1D:94:0B:5E:A7,10111112,ML_yamga', + 'voice_balance': '0', + 'xml_version': '2', + 'zb_version': '4.1.2.6.2', + 'zw_version': '', + }), }), 'status': dict({ - 'acfail': 'main.normal', - 'battery': 'main.normal', - 'gsm_rssi': '0', - 'imei': '', - 'imsi': '', - 'jam': 'main.normal', - 'rssi': '1', - 'tamper': 'main.normal', + 'data': dict({ + 'acfail': 'main.normal', + 'battery': 'main.normal', + 'gsm_rssi': '0', + 'imei': '', + 'imsi': '', + 'jam': 'main.normal', + 'rssi': '1', + 'tamper': 'main.normal', + }), }), }) # --- diff --git a/tests/components/yale_smart_alarm/test_lock.py b/tests/components/yale_smart_alarm/test_lock.py index 7c67703924b..b1bbbaabc57 100644 --- a/tests/components/yale_smart_alarm/test_lock.py +++ b/tests/components/yale_smart_alarm/test_lock.py @@ -55,7 +55,7 @@ async def test_lock_service_calls( client = load_config_entry[1] data = deepcopy(get_data.cycle) - data["data"] = data.pop("device_status") + data["data"] = data["data"].pop("device_status") client.auth.get_authenticated = Mock(return_value=data) client.auth.post_authenticated = Mock(return_value={"code": "000"}) @@ -109,7 +109,7 @@ async def test_lock_service_call_fails( client = load_config_entry[1] data = deepcopy(get_data.cycle) - data["data"] = data.pop("device_status") + data["data"] = data["data"].pop("device_status") client.auth.get_authenticated = Mock(return_value=data) client.auth.post_authenticated = Mock(side_effect=UnknownError("test_side_effect")) @@ -161,7 +161,7 @@ async def test_lock_service_call_fails_with_incorrect_status( client = load_config_entry[1] data = deepcopy(get_data.cycle) - data["data"] = data.pop("device_status") + data["data"] = data["data"].pop("device_status") client.auth.get_authenticated = Mock(return_value=data) client.auth.post_authenticated = Mock(return_value={"code": "FFF"}) From 84def0c0414a67dd218aa661f6b49ab45b8da29c Mon Sep 17 00:00:00 2001 From: G Johansson Date: Sun, 8 Sep 2024 14:23:00 +0200 Subject: [PATCH 1586/1635] Deprecate aux_heat in elkm1 (#125372) * Deprecate aux_heat in elkm1 * Update homeassistant/components/elkm1/switch.py --------- Co-authored-by: Joost Lekkerkerker --- homeassistant/components/elkm1/climate.py | 23 ++++++++++++- homeassistant/components/elkm1/strings.json | 13 ++++++++ homeassistant/components/elkm1/switch.py | 37 +++++++++++++++++++++ 3 files changed, 72 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/elkm1/climate.py b/homeassistant/components/elkm1/climate.py index 6281cca8592..177f17d6e7e 100644 --- a/homeassistant/components/elkm1/climate.py +++ b/homeassistant/components/elkm1/climate.py @@ -20,8 +20,9 @@ from homeassistant.components.climate import ( from homeassistant.const import PRECISION_WHOLE from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from . import ElkEntity, ElkM1ConfigEntry, create_elk_entities +from . import DOMAIN, ElkEntity, ElkM1ConfigEntry, create_elk_entities SUPPORT_HVAC = [ HVACMode.OFF, @@ -151,10 +152,30 @@ class ElkThermostat(ElkEntity, ClimateEntity): async def async_turn_aux_heat_on(self) -> None: """Turn auxiliary heater on.""" + async_create_issue( + self.hass, + DOMAIN, + "migrate_aux_heat", + breaks_in_ha_version="2025.4.0", + is_fixable=True, + is_persistent=True, + translation_key="migrate_aux_heat", + severity=IssueSeverity.WARNING, + ) self._elk_set(ThermostatMode.EMERGENCY_HEAT, None) async def async_turn_aux_heat_off(self) -> None: """Turn auxiliary heater off.""" + async_create_issue( + self.hass, + DOMAIN, + "migrate_aux_heat", + breaks_in_ha_version="2025.4.0", + is_fixable=True, + is_persistent=True, + translation_key="migrate_aux_heat", + severity=IssueSeverity.WARNING, + ) self._elk_set(ThermostatMode.HEAT, None) async def async_set_fan_mode(self, fan_mode: str) -> None: diff --git a/homeassistant/components/elkm1/strings.json b/homeassistant/components/elkm1/strings.json index c854307dd92..302f14b3f44 100644 --- a/homeassistant/components/elkm1/strings.json +++ b/homeassistant/components/elkm1/strings.json @@ -189,5 +189,18 @@ "name": "Sensor zone trigger", "description": "Triggers zone." } + }, + "issues": { + "migrate_aux_heat": { + "title": "Migration of Elk-M1 set_aux_heat action", + "fix_flow": { + "step": { + "confirm": { + "description": "The Elk-M1 `set_aux_heat` action has been migrated. A new emergency heat switch entity is available for each thermostat.\n\nUpdate any automations to use the new emergency heat switch entity. When this is done, Press submit to fix this issue.", + "title": "[%key:component::elkm1::issues::migrate_aux_heat::title%]" + } + } + } + } } } diff --git a/homeassistant/components/elkm1/switch.py b/homeassistant/components/elkm1/switch.py index f4820f57b3d..70b38802a42 100644 --- a/homeassistant/components/elkm1/switch.py +++ b/homeassistant/components/elkm1/switch.py @@ -4,13 +4,18 @@ from __future__ import annotations from typing import Any +from elkm1_lib.const import ThermostatMode, ThermostatSetting +from elkm1_lib.elements import Element +from elkm1_lib.elk import Elk from elkm1_lib.outputs import Output +from elkm1_lib.thermostats import Thermostat from homeassistant.components.switch import SwitchEntity from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import ElkAttachedEntity, ElkEntity, ElkM1ConfigEntry, create_elk_entities +from .models import ELKM1Data async def async_setup_entry( @@ -23,6 +28,9 @@ async def async_setup_entry( elk = elk_data.elk entities: list[ElkEntity] = [] create_elk_entities(elk_data, elk.outputs, "output", ElkOutput, entities) + create_elk_entities( + elk_data, elk.thermostats, "thermostat", ElkThermostatEMHeat, entities + ) async_add_entities(entities) @@ -43,3 +51,32 @@ class ElkOutput(ElkAttachedEntity, SwitchEntity): async def async_turn_off(self, **kwargs: Any) -> None: """Turn off the output.""" self._element.turn_off() + + +class ElkThermostatEMHeat(ElkEntity, SwitchEntity): + """Elk Thermostat emergency heat as switch.""" + + _element: Thermostat + + def __init__(self, element: Element, elk: Elk, elk_data: ELKM1Data) -> None: + """Initialize the emergency heat switch.""" + super().__init__(element, elk, elk_data) + self._unique_id = f"{self._unique_id}emheat" + self._attr_name = f"{element.name} emergency heat" + + @property + def is_on(self) -> bool: + """Get the current emergency heat status.""" + return self._element.mode == ThermostatMode.EMERGENCY_HEAT + + def _elk_set(self, mode: ThermostatMode) -> None: + """Set the thermostat mode.""" + self._element.set(ThermostatSetting.MODE, mode) + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on the output.""" + self._elk_set(ThermostatMode.EMERGENCY_HEAT) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off the output.""" + self._elk_set(ThermostatMode.EMERGENCY_HEAT) From 2ef37f01b1c6e26f3a462c64eef69a6bb085f2a3 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Sun, 8 Sep 2024 14:23:24 +0200 Subject: [PATCH 1587/1635] Deprecate aux_heat from Nexia climate entity, implement switch (#125250) * Remove deprecated aux_heat from nexia * Add back aux_heat * Raise issue --- homeassistant/components/nexia/climate.py | 22 ++++++++++++++ homeassistant/components/nexia/strings.json | 13 +++++++++ homeassistant/components/nexia/switch.py | 32 ++++++++++++++++++++- 3 files changed, 66 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/nexia/climate.py b/homeassistant/components/nexia/climate.py index a4bcc03c210..9b22607d5a8 100644 --- a/homeassistant/components/nexia/climate.py +++ b/homeassistant/components/nexia/climate.py @@ -35,6 +35,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_platform import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import VolDictType from .const import ( @@ -42,6 +43,7 @@ from .const import ( ATTR_DEHUMIDIFY_SETPOINT, ATTR_HUMIDIFY_SETPOINT, ATTR_RUN_MODE, + DOMAIN, ) from .coordinator import NexiaDataUpdateCoordinator from .entity import NexiaThermostatZoneEntity @@ -378,11 +380,31 @@ class NexiaZone(NexiaThermostatZoneEntity, ClimateEntity): async def async_turn_aux_heat_off(self) -> None: """Turn Aux Heat off.""" + async_create_issue( + self.hass, + DOMAIN, + "migrate_aux_heat", + breaks_in_ha_version="2025.4.0", + is_fixable=True, + is_persistent=True, + translation_key="migrate_aux_heat", + severity=IssueSeverity.WARNING, + ) await self._thermostat.set_emergency_heat(False) self._signal_thermostat_update() async def async_turn_aux_heat_on(self) -> None: """Turn Aux Heat on.""" + async_create_issue( + self.hass, + DOMAIN, + "migrate_aux_heat", + breaks_in_ha_version="2025.4.0", + is_fixable=True, + is_persistent=True, + translation_key="migrate_aux_heat", + severity=IssueSeverity.WARNING, + ) await self._thermostat.set_emergency_heat(True) self._signal_thermostat_update() diff --git a/homeassistant/components/nexia/strings.json b/homeassistant/components/nexia/strings.json index 9e49f4bb793..acb57352d24 100644 --- a/homeassistant/components/nexia/strings.json +++ b/homeassistant/components/nexia/strings.json @@ -96,5 +96,18 @@ } } } + }, + "issues": { + "migrate_aux_heat": { + "title": "Migration of Nexia set_aux_heat action", + "fix_flow": { + "step": { + "confirm": { + "description": "The Nexia `set_aux_heat` action has been migrated. A new `aux_heat_only` switch entity is available for each thermostat.\n\nUpdate any automations to use the new Emergency heat switch entity. When this is done, press submit to fix this issue.", + "title": "[%key:component::nexia::issues::migrate_aux_heat::title%]" + } + } + } + } } } diff --git a/homeassistant/components/nexia/switch.py b/homeassistant/components/nexia/switch.py index 0a874ba1817..f92443517c8 100644 --- a/homeassistant/components/nexia/switch.py +++ b/homeassistant/components/nexia/switch.py @@ -25,12 +25,14 @@ async def async_setup_entry( """Set up switches for a Nexia device.""" coordinator = config_entry.runtime_data nexia_home = coordinator.nexia_home - entities: list[NexiaHoldSwitch] = [] + entities: list[NexiaHoldSwitch | NexiaEmergencyHeatSwitch] = [] for thermostat_id in nexia_home.get_thermostat_ids(): thermostat: NexiaThermostat = nexia_home.get_thermostat_by_id(thermostat_id) for zone_id in thermostat.get_zone_ids(): zone: NexiaThermostatZone = thermostat.get_zone_by_id(zone_id) entities.append(NexiaHoldSwitch(coordinator, zone)) + if thermostat.has_emergency_heat(): + entities.append(NexiaEmergencyHeatSwitch(coordinator, zone)) async_add_entities(entities) @@ -64,3 +66,31 @@ class NexiaHoldSwitch(NexiaThermostatZoneEntity, SwitchEntity): """Disable permanent hold.""" await self._zone.call_return_to_schedule() self._signal_zone_update() + + +class NexiaEmergencyHeatSwitch(NexiaThermostatZoneEntity, SwitchEntity): + """Provides Nexia emergency heat switch support.""" + + _attr_translation_key = "emergency_heat" + + def __init__( + self, coordinator: NexiaDataUpdateCoordinator, zone: NexiaThermostatZone + ) -> None: + """Initialize the emergency heat mode switch.""" + zone_id = zone.zone_id + super().__init__(coordinator, zone, zone_id) + + @property + def is_on(self) -> bool: + """Return if the zone is in hold mode.""" + return self._thermostat.is_emergency_heat_active() + + async def async_turn_on(self, **kwargs: Any) -> None: + """Enable permanent hold.""" + await self._thermostat.set_emergency_heat(True) + self._signal_thermostat_update() + + async def async_turn_off(self, **kwargs: Any) -> None: + """Disable permanent hold.""" + await self._thermostat.set_emergency_heat(False) + self._signal_thermostat_update() From c2d5696b5b5175f5ca5ee4555ed03aa975a4ae4c Mon Sep 17 00:00:00 2001 From: G Johansson Date: Sun, 8 Sep 2024 14:24:12 +0200 Subject: [PATCH 1588/1635] Add validation to climate hvac mode (#125178) * Add validation to climate hvac mode * Make softer * Remove string --------- Co-authored-by: Martin Hjelmare --- homeassistant/components/climate/__init__.py | 41 +++++++++++++++----- tests/components/climate/test_init.py | 30 ++++++++++++-- 2 files changed, 58 insertions(+), 13 deletions(-) diff --git a/homeassistant/components/climate/__init__.py b/homeassistant/components/climate/__init__.py index f752a3dcc7a..38d8e89269a 100644 --- a/homeassistant/components/climate/__init__.py +++ b/homeassistant/components/climate/__init__.py @@ -175,7 +175,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: component.async_register_entity_service( SERVICE_SET_HVAC_MODE, {vol.Required(ATTR_HVAC_MODE): vol.Coerce(HVACMode)}, - "async_set_hvac_mode", + "async_handle_set_hvac_mode_service", ) component.async_register_entity_service( SERVICE_SET_PRESET_MODE, @@ -694,20 +694,35 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): @callback def _valid_mode_or_raise( self, - mode_type: Literal["preset", "swing", "fan"], - mode: str, - modes: list[str] | None, + mode_type: Literal["preset", "swing", "fan", "hvac"], + mode: str | HVACMode, + modes: list[str] | list[HVACMode] | None, ) -> None: """Raise ServiceValidationError on invalid modes.""" if modes and mode in modes: return modes_str: str = ", ".join(modes) if modes else "" - if mode_type == "preset": - translation_key = "not_valid_preset_mode" - elif mode_type == "swing": - translation_key = "not_valid_swing_mode" - elif mode_type == "fan": - translation_key = "not_valid_fan_mode" + translation_key = f"not_valid_{mode_type}_mode" + if mode_type == "hvac": + report_issue = async_suggest_report_issue( + self.hass, + integration_domain=self.platform.platform_name, + module=type(self).__module__, + ) + _LOGGER.warning( + ( + "%s::%s sets the hvac_mode %s which is not " + "valid for this entity with modes: %s. " + "This will stop working in 2025.3 and raise an error instead. " + "Please %s" + ), + self.platform.platform_name, + self.__class__.__name__, + mode, + modes_str, + report_issue, + ) + return raise ServiceValidationError( translation_domain=DOMAIN, translation_key=translation_key, @@ -749,6 +764,12 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_): """Set new target fan mode.""" await self.hass.async_add_executor_job(self.set_fan_mode, fan_mode) + @final + async def async_handle_set_hvac_mode_service(self, hvac_mode: HVACMode) -> None: + """Validate and set new preset mode.""" + self._valid_mode_or_raise("hvac", hvac_mode, self.hvac_modes) + await self.async_set_hvac_mode(hvac_mode) + def set_hvac_mode(self, hvac_mode: HVACMode) -> None: """Set new target hvac mode.""" raise NotImplementedError diff --git a/tests/components/climate/test_init.py b/tests/components/climate/test_init.py index 64c94ccfc6f..6342313d1da 100644 --- a/tests/components/climate/test_init.py +++ b/tests/components/climate/test_init.py @@ -27,6 +27,7 @@ from homeassistant.components.climate.const import ( ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, SERVICE_SET_FAN_MODE, + SERVICE_SET_HVAC_MODE, SERVICE_SET_PRESET_MODE, SERVICE_SET_SWING_MODE, SERVICE_SET_TEMPERATURE, @@ -138,6 +139,10 @@ class MockClimateEntity(MockEntity, ClimateEntity): """Set swing mode.""" self._attr_swing_mode = swing_mode + def set_hvac_mode(self, hvac_mode: HVACMode) -> None: + """Set new target hvac mode.""" + self._attr_hvac_mode = hvac_mode + class MockClimateEntityTestMethods(MockClimateEntity): """Mock Climate device.""" @@ -237,10 +242,12 @@ def test_deprecated_current_constants( ) -async def test_preset_mode_validation( - hass: HomeAssistant, register_test_integration: MockConfigEntry +async def test_mode_validation( + hass: HomeAssistant, + register_test_integration: MockConfigEntry, + caplog: pytest.LogCaptureFixture, ) -> None: - """Test mode validation for fan, swing and preset.""" + """Test mode validation for hvac_mode, fan, swing and preset.""" climate_entity = MockClimateEntity(name="test", entity_id="climate.test") setup_test_component_platform( @@ -250,6 +257,7 @@ async def test_preset_mode_validation( await hass.async_block_till_done() state = hass.states.get("climate.test") + assert state.state == "heat" assert state.attributes.get(ATTR_PRESET_MODE) == "home" assert state.attributes.get(ATTR_FAN_MODE) == "auto" assert state.attributes.get(ATTR_SWING_MODE) == "auto" @@ -286,6 +294,22 @@ async def test_preset_mode_validation( assert state.attributes.get(ATTR_FAN_MODE) == "off" assert state.attributes.get(ATTR_SWING_MODE) == "off" + await hass.services.async_call( + DOMAIN, + SERVICE_SET_HVAC_MODE, + { + "entity_id": "climate.test", + "hvac_mode": "auto", + }, + blocking=True, + ) + + assert ( + "MockClimateEntity sets the hvac_mode auto which is not valid " + "for this entity with modes: off, heat. This will stop working " + "in 2025.3 and raise an error instead. Please" in caplog.text + ) + with pytest.raises( ServiceValidationError, match="Preset mode invalid is not valid. Valid preset modes are: home, away", From a7a219b99bb4057b57c7b58ed2f28d6ab94bf0ba Mon Sep 17 00:00:00 2001 From: G Johansson Date: Sun, 8 Sep 2024 14:24:39 +0200 Subject: [PATCH 1589/1635] Deprecate aux_heat in econet (#125365) * Deprecate aux_heat in econet * strings * Use generator --- homeassistant/components/econet/__init__.py | 1 + homeassistant/components/econet/climate.py | 21 ++++++++ homeassistant/components/econet/strings.json | 13 +++++ homeassistant/components/econet/switch.py | 57 ++++++++++++++++++++ 4 files changed, 92 insertions(+) create mode 100644 homeassistant/components/econet/switch.py diff --git a/homeassistant/components/econet/__init__.py b/homeassistant/components/econet/__init__.py index 84e636e660b..4aba79f779f 100644 --- a/homeassistant/components/econet/__init__.py +++ b/homeassistant/components/econet/__init__.py @@ -31,6 +31,7 @@ PLATFORMS = [ Platform.BINARY_SENSOR, Platform.CLIMATE, Platform.SENSOR, + Platform.SWITCH, Platform.WATER_HEATER, ] PUSH_UPDATE = "econet.push_update" diff --git a/homeassistant/components/econet/climate.py b/homeassistant/components/econet/climate.py index f6bd52c9702..1d6cefc9645 100644 --- a/homeassistant/components/econet/climate.py +++ b/homeassistant/components/econet/climate.py @@ -20,6 +20,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from . import EcoNetEntity from .const import DOMAIN, EQUIPMENT @@ -203,10 +204,30 @@ class EcoNetThermostat(EcoNetEntity, ClimateEntity): def turn_aux_heat_on(self) -> None: """Turn auxiliary heater on.""" + async_create_issue( + self.hass, + DOMAIN, + "migrate_aux_heat", + breaks_in_ha_version="2025.4.0", + is_fixable=True, + is_persistent=True, + translation_key="migrate_aux_heat", + severity=IssueSeverity.WARNING, + ) self._econet.set_mode(ThermostatOperationMode.EMERGENCY_HEAT) def turn_aux_heat_off(self) -> None: """Turn auxiliary heater off.""" + async_create_issue( + self.hass, + DOMAIN, + "migrate_aux_heat", + breaks_in_ha_version="2025.4.0", + is_fixable=True, + is_persistent=True, + translation_key="migrate_aux_heat", + severity=IssueSeverity.WARNING, + ) self._econet.set_mode(ThermostatOperationMode.HEATING) @property diff --git a/homeassistant/components/econet/strings.json b/homeassistant/components/econet/strings.json index 6e81085a9bf..83d66dde144 100644 --- a/homeassistant/components/econet/strings.json +++ b/homeassistant/components/econet/strings.json @@ -18,5 +18,18 @@ } } } + }, + "issues": { + "migrate_aux_heat": { + "title": "Migration of EcoNet set_aux_heat action", + "fix_flow": { + "step": { + "confirm": { + "description": "The EcoNet `set_aux_heat` action has been migrated. A new `aux_heat_only` switch entity is available for each thermostat.\n\nUpdate any automations to use the new `aux_heat_only` switch entity. When this is done, Press submit to fix this issue.", + "title": "[%key:component::econet::issues::migrate_aux_heat::title%]" + } + } + } + } } } diff --git a/homeassistant/components/econet/switch.py b/homeassistant/components/econet/switch.py new file mode 100644 index 00000000000..107cd7dc586 --- /dev/null +++ b/homeassistant/components/econet/switch.py @@ -0,0 +1,57 @@ +"""Support for using switch with ecoNet thermostats.""" + +from __future__ import annotations + +import logging +from typing import Any + +from pyeconet.equipment import EquipmentType +from pyeconet.equipment.thermostat import ThermostatOperationMode + +from homeassistant.components.switch import SwitchEntity +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import EcoNetEntity +from .const import DOMAIN, EQUIPMENT + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the ecobee thermostat switch entity.""" + equipment = hass.data[DOMAIN][EQUIPMENT][entry.entry_id] + async_add_entities( + EcoNetSwitchAuxHeatOnly(thermostat) + for thermostat in equipment[EquipmentType.THERMOSTAT] + ) + + +class EcoNetSwitchAuxHeatOnly(EcoNetEntity, SwitchEntity): + """Representation of a aux_heat_only EcoNet switch.""" + + def __init__(self, thermostat) -> None: + """Initialize EcoNet ventilator platform.""" + super().__init__(thermostat) + self._attr_name = f"{thermostat.device_name} emergency heat" + self._attr_unique_id = ( + f"{thermostat.device_id}_{thermostat.device_name}_auxheat" + ) + + def turn_on(self, **kwargs: Any) -> None: + """Set the hvacMode to auxHeatOnly.""" + self._econet.set_mode(ThermostatOperationMode.EMERGENCY_HEAT) + + def turn_off(self, **kwargs: Any) -> None: + """Set the hvacMode back to the prior setting.""" + self._econet.set_mode(ThermostatOperationMode.HEATING) + + @property + def is_on(self) -> bool: + """Return true if auxHeatOnly mode is active.""" + return self._econet.mode == ThermostatOperationMode.EMERGENCY_HEAT From 45ab6e9b063155e6fc8025c0c12a870cad01be65 Mon Sep 17 00:00:00 2001 From: mvn23 Date: Sun, 8 Sep 2024 14:45:37 +0200 Subject: [PATCH 1590/1635] Deprecate opentherm_gw configuration through configuration.yaml (#125045) * Create an issue in the issue registry if deprecated config is found in configuration.yaml * Add deprecation comments to functions that can be removed after deprecation period * Add test for the creation of a deprecation issue Co-authored-by: Joost Lekkerkerker --- .../components/opentherm_gw/__init__.py | 14 +++++++++ .../components/opentherm_gw/config_flow.py | 1 + .../components/opentherm_gw/strings.json | 6 ++++ .../opentherm_gw/test_config_flow.py | 1 + tests/components/opentherm_gw/test_init.py | 29 ++++++++++++++++++- 5 files changed, 50 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/opentherm_gw/__init__.py b/homeassistant/components/opentherm_gw/__init__.py index c7a52e3d5d3..d5dae367959 100644 --- a/homeassistant/components/opentherm_gw/__init__.py +++ b/homeassistant/components/opentherm_gw/__init__.py @@ -32,6 +32,7 @@ from homeassistant.helpers import ( config_validation as cv, device_registry as dr, entity_registry as er, + issue_registry as ir, ) from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.helpers.typing import ConfigType @@ -68,6 +69,7 @@ from .const import ( _LOGGER = logging.getLogger(__name__) +# *_SCHEMA required for deprecated import from configuration.yaml, can be removed in 2025.4.0 CLIMATE_SCHEMA = vol.Schema( { vol.Optional(CONF_PRECISION): vol.In( @@ -159,8 +161,20 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b return True +# Deprecated import from configuration.yaml, can be removed in 2025.4.0 async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the OpenTherm Gateway component.""" + if DOMAIN in config: + ir.async_create_issue( + hass, + DOMAIN, + "deprecated_import_from_configuration_yaml", + breaks_in_ha_version="2025.4.0", + is_fixable=False, + is_persistent=False, + severity=ir.IssueSeverity.WARNING, + translation_key="deprecated_import_from_configuration_yaml", + ) if not hass.config_entries.async_entries(DOMAIN) and DOMAIN in config: conf = config[DOMAIN] for device_id, device_config in conf.items(): diff --git a/homeassistant/components/opentherm_gw/config_flow.py b/homeassistant/components/opentherm_gw/config_flow.py index 3cf8a1c4594..1f52b47cbad 100644 --- a/homeassistant/components/opentherm_gw/config_flow.py +++ b/homeassistant/components/opentherm_gw/config_flow.py @@ -95,6 +95,7 @@ class OpenThermGwConfigFlow(ConfigFlow, domain=DOMAIN): """Handle manual initiation of the config flow.""" return await self.async_step_init(user_input) + # Deprecated import from configuration.yaml, can be removed in 2025.4.0 async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import an OpenTherm Gateway device as a config entry. diff --git a/homeassistant/components/opentherm_gw/strings.json b/homeassistant/components/opentherm_gw/strings.json index b23e1eb7687..f0573db0531 100644 --- a/homeassistant/components/opentherm_gw/strings.json +++ b/homeassistant/components/opentherm_gw/strings.json @@ -316,6 +316,12 @@ } } }, + "issues": { + "deprecated_import_from_configuration_yaml": { + "title": "Deprecated configuration", + "description": "Configuration of the OpenTherm Gateway integration through configuration.yaml is deprecated. Your configuration has been migrated to config entries. Please remove any OpenTherm Gateway configuration from your configuration.yaml." + } + }, "options": { "step": { "init": { diff --git a/tests/components/opentherm_gw/test_config_flow.py b/tests/components/opentherm_gw/test_config_flow.py index 4f4a6cfce31..57bea4e55dc 100644 --- a/tests/components/opentherm_gw/test_config_flow.py +++ b/tests/components/opentherm_gw/test_config_flow.py @@ -54,6 +54,7 @@ async def test_form_user( assert mock_pyotgw.return_value.disconnect.await_count == 1 +# Deprecated import from configuration.yaml, can be removed in 2025.4.0 async def test_form_import( hass: HomeAssistant, mock_pyotgw: MagicMock, diff --git a/tests/components/opentherm_gw/test_init.py b/tests/components/opentherm_gw/test_init.py index 4085e25c614..3e85afbf782 100644 --- a/tests/components/opentherm_gw/test_init.py +++ b/tests/components/opentherm_gw/test_init.py @@ -4,13 +4,18 @@ from unittest.mock import MagicMock from pyotgw.vars import OTGW, OTGW_ABOUT +from homeassistant import setup from homeassistant.components.opentherm_gw.const import ( DOMAIN, OpenThermDeviceIdentifier, ) from homeassistant.const import CONF_ID from homeassistant.core import HomeAssistant -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import ( + device_registry as dr, + entity_registry as er, + issue_registry as ir, +) from .conftest import MOCK_GATEWAY_ID, VERSION_TEST @@ -148,3 +153,25 @@ async def test_climate_entity_migration( updated_entry.unique_id == f"{mock_config_entry.data[CONF_ID]}-{OpenThermDeviceIdentifier.THERMOSTAT}-thermostat_entity" ) + + +# Deprecation test, can be removed in 2025.4.0 +async def test_configuration_yaml_deprecation( + hass: HomeAssistant, + issue_registry: ir.IssueRegistry, + mock_config_entry: MockConfigEntry, + mock_pyotgw: MagicMock, +) -> None: + """Test that existing configuration in configuration.yaml creates an issue.""" + + await setup.async_setup_component( + hass, DOMAIN, {DOMAIN: {"legacy_gateway": {"device": "/dev/null"}}} + ) + + await hass.async_block_till_done() + assert ( + issue_registry.async_get_issue( + DOMAIN, "deprecated_import_from_configuration_yaml" + ) + is not None + ) From af62e8267fbd0a117b62dba592aebd5381a3e03f Mon Sep 17 00:00:00 2001 From: treetip Date: Sun, 8 Sep 2024 16:07:42 +0300 Subject: [PATCH 1591/1635] Add set_profile service for Vallox integration (#120225) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add set_profile service for Vallox integration * Merge profile constants, use str input for service * add service test and some related refactoring * Change service uom to 'minutes' Co-authored-by: Sebastian Lövdahl * Update icons.js format after rebase * Translate profile names for service * Fix test using wrong dict --------- Co-authored-by: Sebastian Lövdahl --- homeassistant/components/vallox/__init__.py | 33 +++++++++++++ homeassistant/components/vallox/const.py | 17 +++---- homeassistant/components/vallox/fan.py | 12 ++--- homeassistant/components/vallox/icons.json | 3 ++ homeassistant/components/vallox/sensor.py | 4 +- homeassistant/components/vallox/services.yaml | 21 ++++++++ homeassistant/components/vallox/strings.json | 25 ++++++++++ tests/components/vallox/test_init.py | 48 ++++++++++++++++++- 8 files changed, 146 insertions(+), 17 deletions(-) diff --git a/homeassistant/components/vallox/__init__.py b/homeassistant/components/vallox/__init__.py index 292786e4c0e..09080f1a5f6 100644 --- a/homeassistant/components/vallox/__init__.py +++ b/homeassistant/components/vallox/__init__.py @@ -22,6 +22,7 @@ from .const import ( DEFAULT_FAN_SPEED_HOME, DEFAULT_NAME, DOMAIN, + I18N_KEY_TO_VALLOX_PROFILE, ) from .coordinator import ValloxDataUpdateCoordinator @@ -61,6 +62,18 @@ SERVICE_SCHEMA_SET_PROFILE_FAN_SPEED = vol.Schema( } ) +ATTR_PROFILE = "profile" +ATTR_DURATION = "duration" + +SERVICE_SCHEMA_SET_PROFILE = vol.Schema( + { + vol.Required(ATTR_PROFILE): vol.In(I18N_KEY_TO_VALLOX_PROFILE), + vol.Optional(ATTR_DURATION): vol.All( + vol.Coerce(int), vol.Clamp(min=1, max=65535) + ), + } +) + class ServiceMethodDetails(NamedTuple): """Details for SERVICE_TO_METHOD mapping.""" @@ -72,6 +85,7 @@ class ServiceMethodDetails(NamedTuple): SERVICE_SET_PROFILE_FAN_SPEED_HOME = "set_profile_fan_speed_home" SERVICE_SET_PROFILE_FAN_SPEED_AWAY = "set_profile_fan_speed_away" SERVICE_SET_PROFILE_FAN_SPEED_BOOST = "set_profile_fan_speed_boost" +SERVICE_SET_PROFILE = "set_profile" SERVICE_TO_METHOD = { SERVICE_SET_PROFILE_FAN_SPEED_HOME: ServiceMethodDetails( @@ -86,6 +100,9 @@ SERVICE_TO_METHOD = { method="async_set_profile_fan_speed_boost", schema=SERVICE_SCHEMA_SET_PROFILE_FAN_SPEED, ), + SERVICE_SET_PROFILE: ServiceMethodDetails( + method="async_set_profile", schema=SERVICE_SCHEMA_SET_PROFILE + ), } @@ -183,6 +200,22 @@ class ValloxServiceHandler: return False return True + async def async_set_profile( + self, profile: str, duration: int | None = None + ) -> bool: + """Activate profile for given duration.""" + _LOGGER.debug("Activating profile %s for %s min", profile, duration) + try: + await self._client.set_profile( + I18N_KEY_TO_VALLOX_PROFILE[profile], duration + ) + except ValloxApiException as err: + _LOGGER.error( + "Error setting profile %d for duration %s: %s", profile, duration, err + ) + return False + return True + async def async_handle(self, call: ServiceCall) -> None: """Dispatch a service call.""" service_details = SERVICE_TO_METHOD.get(call.service) diff --git a/homeassistant/components/vallox/const.py b/homeassistant/components/vallox/const.py index a2494c594f5..418f57a22c8 100644 --- a/homeassistant/components/vallox/const.py +++ b/homeassistant/components/vallox/const.py @@ -22,14 +22,15 @@ DEFAULT_FAN_SPEED_HOME = 50 DEFAULT_FAN_SPEED_AWAY = 25 DEFAULT_FAN_SPEED_BOOST = 65 -VALLOX_PROFILE_TO_PRESET_MODE_SETTABLE = { - VALLOX_PROFILE.HOME: "Home", - VALLOX_PROFILE.AWAY: "Away", - VALLOX_PROFILE.BOOST: "Boost", - VALLOX_PROFILE.FIREPLACE: "Fireplace", +I18N_KEY_TO_VALLOX_PROFILE = { + "home": VALLOX_PROFILE.HOME, + "away": VALLOX_PROFILE.AWAY, + "boost": VALLOX_PROFILE.BOOST, + "fireplace": VALLOX_PROFILE.FIREPLACE, + "extra": VALLOX_PROFILE.EXTRA, } -VALLOX_PROFILE_TO_PRESET_MODE_REPORTABLE = { +VALLOX_PROFILE_TO_PRESET_MODE = { VALLOX_PROFILE.HOME: "Home", VALLOX_PROFILE.AWAY: "Away", VALLOX_PROFILE.BOOST: "Boost", @@ -37,8 +38,8 @@ VALLOX_PROFILE_TO_PRESET_MODE_REPORTABLE = { VALLOX_PROFILE.EXTRA: "Extra", } -PRESET_MODE_TO_VALLOX_PROFILE_SETTABLE = { - value: key for (key, value) in VALLOX_PROFILE_TO_PRESET_MODE_SETTABLE.items() +PRESET_MODE_TO_VALLOX_PROFILE = { + value: key for (key, value) in VALLOX_PROFILE_TO_PRESET_MODE.items() } VALLOX_CELL_STATE_TO_STR = { diff --git a/homeassistant/components/vallox/fan.py b/homeassistant/components/vallox/fan.py index 4fe2cfd45d4..c9226110332 100644 --- a/homeassistant/components/vallox/fan.py +++ b/homeassistant/components/vallox/fan.py @@ -23,8 +23,8 @@ from .const import ( METRIC_KEY_PROFILE_FAN_SPEED_HOME, MODE_OFF, MODE_ON, - PRESET_MODE_TO_VALLOX_PROFILE_SETTABLE, - VALLOX_PROFILE_TO_PRESET_MODE_REPORTABLE, + PRESET_MODE_TO_VALLOX_PROFILE, + VALLOX_PROFILE_TO_PRESET_MODE, ) from .coordinator import ValloxDataUpdateCoordinator @@ -97,7 +97,7 @@ class ValloxFanEntity(ValloxEntity, FanEntity): self._client = client self._attr_unique_id = str(self._device_uuid) - self._attr_preset_modes = list(PRESET_MODE_TO_VALLOX_PROFILE_SETTABLE) + self._attr_preset_modes = list(PRESET_MODE_TO_VALLOX_PROFILE) @property def is_on(self) -> bool: @@ -108,7 +108,7 @@ class ValloxFanEntity(ValloxEntity, FanEntity): def preset_mode(self) -> str | None: """Return the current preset mode.""" vallox_profile = self.coordinator.data.profile - return VALLOX_PROFILE_TO_PRESET_MODE_REPORTABLE.get(vallox_profile) + return VALLOX_PROFILE_TO_PRESET_MODE.get(vallox_profile) @property def percentage(self) -> int | None: @@ -204,7 +204,7 @@ class ValloxFanEntity(ValloxEntity, FanEntity): return False try: - profile = PRESET_MODE_TO_VALLOX_PROFILE_SETTABLE[preset_mode] + profile = PRESET_MODE_TO_VALLOX_PROFILE[preset_mode] await self._client.set_profile(profile) except ValloxApiException as err: @@ -220,7 +220,7 @@ class ValloxFanEntity(ValloxEntity, FanEntity): Returns true if speed has been changed, false otherwise. """ vallox_profile = ( - PRESET_MODE_TO_VALLOX_PROFILE_SETTABLE[preset_mode] + PRESET_MODE_TO_VALLOX_PROFILE[preset_mode] if preset_mode is not None else self.coordinator.data.profile ) diff --git a/homeassistant/components/vallox/icons.json b/homeassistant/components/vallox/icons.json index f6beb55f1da..9123d1bfe9b 100644 --- a/homeassistant/components/vallox/icons.json +++ b/homeassistant/components/vallox/icons.json @@ -45,6 +45,9 @@ }, "set_profile_fan_speed_boost": { "service": "mdi:speedometer" + }, + "set_profile": { + "service": "mdi:fan" } } } diff --git a/homeassistant/components/vallox/sensor.py b/homeassistant/components/vallox/sensor.py index 0bb509a9c5a..fb9977cefaf 100644 --- a/homeassistant/components/vallox/sensor.py +++ b/homeassistant/components/vallox/sensor.py @@ -31,7 +31,7 @@ from .const import ( METRIC_KEY_MODE, MODE_ON, VALLOX_CELL_STATE_TO_STR, - VALLOX_PROFILE_TO_PRESET_MODE_REPORTABLE, + VALLOX_PROFILE_TO_PRESET_MODE, ) from .coordinator import ValloxDataUpdateCoordinator @@ -78,7 +78,7 @@ class ValloxProfileSensor(ValloxSensorEntity): def native_value(self) -> StateType: """Return the value reported by the sensor.""" vallox_profile = self.coordinator.data.profile - return VALLOX_PROFILE_TO_PRESET_MODE_REPORTABLE.get(vallox_profile) + return VALLOX_PROFILE_TO_PRESET_MODE.get(vallox_profile) # There is a quirk with respect to the fan speed reporting. The device keeps on reporting the last diff --git a/homeassistant/components/vallox/services.yaml b/homeassistant/components/vallox/services.yaml index e6bd3edad11..f2a55032b93 100644 --- a/homeassistant/components/vallox/services.yaml +++ b/homeassistant/components/vallox/services.yaml @@ -27,3 +27,24 @@ set_profile_fan_speed_boost: min: 0 max: 100 unit_of_measurement: "%" + +set_profile: + fields: + profile: + required: true + selector: + select: + translation_key: "profile" + options: + - "home" + - "away" + - "boost" + - "fireplace" + - "extra" + duration: + required: false + selector: + number: + min: 1 + max: 65535 + unit_of_measurement: "minutes" diff --git a/homeassistant/components/vallox/strings.json b/homeassistant/components/vallox/strings.json index 4df57b81bb5..8a30ed4ad01 100644 --- a/homeassistant/components/vallox/strings.json +++ b/homeassistant/components/vallox/strings.json @@ -133,6 +133,31 @@ "description": "[%key:component::vallox::services::set_profile_fan_speed_home::fields::fan_speed::description%]" } } + }, + "set_profile": { + "name": "Activate profile for duration", + "description": "Activate a profile and optionally set duration.", + "fields": { + "profile": { + "name": "Profile", + "description": "Profile to activate" + }, + "duration": { + "name": "Duration", + "description": "Activation duration, if omitted device uses stored duration. Duration of 65535 activates profile without timeout. Duration only applies to Boost, Fireplace and Extra profiles." + } + } + } + }, + "selector": { + "profile": { + "options": { + "home": "Home", + "away": "Away", + "boost": "Boost", + "fireplace": "Fireplace", + "extra": "Extra" + } } } } diff --git a/tests/components/vallox/test_init.py b/tests/components/vallox/test_init.py index 58e46acd689..4fbde7e0357 100644 --- a/tests/components/vallox/test_init.py +++ b/tests/components/vallox/test_init.py @@ -4,7 +4,11 @@ import pytest from vallox_websocket_api import Profile from homeassistant.components.vallox import ( + ATTR_DURATION, + ATTR_PROFILE, ATTR_PROFILE_FAN_SPEED, + I18N_KEY_TO_VALLOX_PROFILE, + SERVICE_SET_PROFILE, SERVICE_SET_PROFILE_FAN_SPEED_AWAY, SERVICE_SET_PROFILE_FAN_SPEED_BOOST, SERVICE_SET_PROFILE_FAN_SPEED_HOME, @@ -12,7 +16,7 @@ from homeassistant.components.vallox import ( from homeassistant.components.vallox.const import DOMAIN from homeassistant.core import HomeAssistant -from .conftest import patch_set_fan_speed +from .conftest import patch_set_fan_speed, patch_set_profile from tests.common import MockConfigEntry @@ -47,3 +51,45 @@ async def test_create_service( # Assert set_fan_speed.assert_called_once_with(profile, 30) + + +@pytest.mark.parametrize( + ("profile", "duration"), + [ + ("home", None), + ("home", 15), + ("away", None), + ("away", 15), + ("boost", None), + ("boost", 15), + ("fireplace", None), + ("fireplace", 15), + ("extra", None), + ("extra", 15), + ], +) +async def test_set_profile_service( + hass: HomeAssistant, mock_entry: MockConfigEntry, profile: str, duration: int | None +) -> None: + """Test service for setting profile and duration.""" + # Act + await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() + + with patch_set_profile() as set_profile: + service_data = {ATTR_PROFILE: profile} | ( + {ATTR_DURATION: duration} if duration is not None else {} + ) + + await hass.services.async_call( + DOMAIN, + SERVICE_SET_PROFILE, + service_data=service_data, + ) + + await hass.async_block_till_done() + + # Assert + set_profile.assert_called_once_with( + I18N_KEY_TO_VALLOX_PROFILE[profile], duration + ) From 65b48aa90388c3240001f164bc115cf3b0d3cd42 Mon Sep 17 00:00:00 2001 From: G Johansson Date: Sun, 8 Sep 2024 15:21:53 +0200 Subject: [PATCH 1592/1635] Add config flow to Mold indicator (#122600) * Add config flow to Mold indicator * strings * Add tests * Is a helper * Add back platform yaml * Fixes * Remove wait --- .../components/mold_indicator/__init__.py | 25 ++++ .../components/mold_indicator/config_flow.py | 96 +++++++++++++++ .../components/mold_indicator/const.py | 12 ++ .../components/mold_indicator/manifest.json | 4 +- .../components/mold_indicator/sensor.py | 44 +++++-- .../components/mold_indicator/strings.json | 42 +++++++ homeassistant/generated/config_flows.py | 1 + homeassistant/generated/integrations.json | 12 +- tests/components/mold_indicator/conftest.py | 90 ++++++++++++++ .../mold_indicator/test_config_flow.py | 115 ++++++++++++++++++ tests/components/mold_indicator/test_init.py | 17 +++ .../components/mold_indicator/test_sensor.py | 12 ++ 12 files changed, 456 insertions(+), 14 deletions(-) create mode 100644 homeassistant/components/mold_indicator/config_flow.py create mode 100644 homeassistant/components/mold_indicator/const.py create mode 100644 homeassistant/components/mold_indicator/strings.json create mode 100644 tests/components/mold_indicator/conftest.py create mode 100644 tests/components/mold_indicator/test_config_flow.py create mode 100644 tests/components/mold_indicator/test_init.py diff --git a/homeassistant/components/mold_indicator/__init__.py b/homeassistant/components/mold_indicator/__init__.py index adadf41b2b0..c426b942af5 100644 --- a/homeassistant/components/mold_indicator/__init__.py +++ b/homeassistant/components/mold_indicator/__init__.py @@ -1 +1,26 @@ """Calculates mold growth indication from temperature and humidity.""" + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +PLATFORMS = [Platform.SENSOR] + + +async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Set up Mold indicator from a config entry.""" + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + entry.async_on_unload(entry.add_update_listener(update_listener)) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload Mold indicator config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: + """Handle options update.""" + await hass.config_entries.async_reload(entry.entry_id) diff --git a/homeassistant/components/mold_indicator/config_flow.py b/homeassistant/components/mold_indicator/config_flow.py new file mode 100644 index 00000000000..cc8f05c102d --- /dev/null +++ b/homeassistant/components/mold_indicator/config_flow.py @@ -0,0 +1,96 @@ +"""Config flow for Mold indicator.""" + +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, cast + +import voluptuous as vol + +from homeassistant.components.sensor import SensorDeviceClass +from homeassistant.const import CONF_NAME, Platform +from homeassistant.helpers.schema_config_entry_flow import ( + SchemaCommonFlowHandler, + SchemaConfigFlowHandler, + SchemaFlowFormStep, +) +from homeassistant.helpers.selector import ( + EntitySelector, + EntitySelectorConfig, + NumberSelector, + NumberSelectorConfig, + NumberSelectorMode, + TextSelector, +) + +from .const import ( + CONF_CALIBRATION_FACTOR, + CONF_INDOOR_HUMIDITY, + CONF_INDOOR_TEMP, + CONF_OUTDOOR_TEMP, + DEFAULT_NAME, + DOMAIN, +) + + +async def validate_duplicate( + handler: SchemaCommonFlowHandler, user_input: dict[str, Any] +) -> dict[str, Any]: + """Validate already existing entry.""" + handler.parent_handler._async_abort_entries_match({**handler.options, **user_input}) # noqa: SLF001 + return user_input + + +DATA_SCHEMA_OPTIONS = vol.Schema( + { + vol.Required(CONF_CALIBRATION_FACTOR): NumberSelector( + NumberSelectorConfig(min=0, step="any", mode=NumberSelectorMode.BOX) + ) + } +) + +DATA_SCHEMA_CONFIG = vol.Schema( + { + vol.Required(CONF_NAME, default=DEFAULT_NAME): TextSelector(), + vol.Required(CONF_INDOOR_TEMP): EntitySelector( + EntitySelectorConfig( + domain=Platform.SENSOR, device_class=SensorDeviceClass.TEMPERATURE + ) + ), + vol.Required(CONF_INDOOR_HUMIDITY): EntitySelector( + EntitySelectorConfig( + domain=Platform.SENSOR, device_class=SensorDeviceClass.HUMIDITY + ) + ), + vol.Required(CONF_OUTDOOR_TEMP): EntitySelector( + EntitySelectorConfig( + domain=Platform.SENSOR, device_class=SensorDeviceClass.TEMPERATURE + ) + ), + } +).extend(DATA_SCHEMA_OPTIONS.schema) + + +CONFIG_FLOW = { + "user": SchemaFlowFormStep( + schema=DATA_SCHEMA_CONFIG, + validate_user_input=validate_duplicate, + ), +} +OPTIONS_FLOW = { + "init": SchemaFlowFormStep( + DATA_SCHEMA_OPTIONS, + validate_user_input=validate_duplicate, + ) +} + + +class MoldIndicatorConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN): + """Handle a config flow for Mold indicator.""" + + config_flow = CONFIG_FLOW + options_flow = OPTIONS_FLOW + + def async_config_entry_title(self, options: Mapping[str, Any]) -> str: + """Return config entry title.""" + return cast(str, options[CONF_NAME]) diff --git a/homeassistant/components/mold_indicator/const.py b/homeassistant/components/mold_indicator/const.py new file mode 100644 index 00000000000..15fdf51bce3 --- /dev/null +++ b/homeassistant/components/mold_indicator/const.py @@ -0,0 +1,12 @@ +"""Constants for Mold indicator component.""" + +from __future__ import annotations + +DOMAIN = "mold_indicator" + +CONF_CALIBRATION_FACTOR = "calibration_factor" +CONF_INDOOR_HUMIDITY = "indoor_humidity_sensor" +CONF_INDOOR_TEMP = "indoor_temp_sensor" +CONF_OUTDOOR_TEMP = "outdoor_temp_sensor" + +DEFAULT_NAME = "Mold Indicator" diff --git a/homeassistant/components/mold_indicator/manifest.json b/homeassistant/components/mold_indicator/manifest.json index 5ebccb5f92d..b57f1c471ef 100644 --- a/homeassistant/components/mold_indicator/manifest.json +++ b/homeassistant/components/mold_indicator/manifest.json @@ -2,7 +2,9 @@ "domain": "mold_indicator", "name": "Mold Indicator", "codeowners": [], + "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/mold_indicator", - "iot_class": "local_polling", + "integration_type": "helper", + "iot_class": "calculated", "quality_scale": "internal" } diff --git a/homeassistant/components/mold_indicator/sensor.py b/homeassistant/components/mold_indicator/sensor.py index 2d80bc9f6e1..e96f53a17bb 100644 --- a/homeassistant/components/mold_indicator/sensor.py +++ b/homeassistant/components/mold_indicator/sensor.py @@ -15,6 +15,7 @@ from homeassistant.components.sensor import ( SensorEntity, SensorStateClass, ) +from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, CONF_NAME, @@ -37,17 +38,19 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateTyp from homeassistant.util.unit_conversion import TemperatureConverter from homeassistant.util.unit_system import METRIC_SYSTEM +from .const import ( + CONF_CALIBRATION_FACTOR, + CONF_INDOOR_HUMIDITY, + CONF_INDOOR_TEMP, + CONF_OUTDOOR_TEMP, + DEFAULT_NAME, +) + _LOGGER = logging.getLogger(__name__) ATTR_CRITICAL_TEMP = "estimated_critical_temp" ATTR_DEWPOINT = "dewpoint" -CONF_CALIBRATION_FACTOR = "calibration_factor" -CONF_INDOOR_HUMIDITY = "indoor_humidity_sensor" -CONF_INDOOR_TEMP = "indoor_temp_sensor" -CONF_OUTDOOR_TEMP = "outdoor_temp_sensor" - -DEFAULT_NAME = "Mold Indicator" MAGNUS_K2 = 17.62 MAGNUS_K3 = 243.12 @@ -70,7 +73,7 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up MoldIndicator sensor.""" - name: str = config[CONF_NAME] + name: str = config.get(CONF_NAME, DEFAULT_NAME) indoor_temp_sensor: str = config[CONF_INDOOR_TEMP] outdoor_temp_sensor: str = config[CONF_OUTDOOR_TEMP] indoor_humidity_sensor: str = config[CONF_INDOOR_HUMIDITY] @@ -91,6 +94,33 @@ async def async_setup_platform( ) +async def async_setup_entry( + hass: HomeAssistant, + entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Mold indicator sensor entry.""" + name: str = entry.options[CONF_NAME] + indoor_temp_sensor: str = entry.options[CONF_INDOOR_TEMP] + outdoor_temp_sensor: str = entry.options[CONF_OUTDOOR_TEMP] + indoor_humidity_sensor: str = entry.options[CONF_INDOOR_HUMIDITY] + calib_factor: float = entry.options[CONF_CALIBRATION_FACTOR] + + async_add_entities( + [ + MoldIndicator( + name, + hass.config.units is METRIC_SYSTEM, + indoor_temp_sensor, + outdoor_temp_sensor, + indoor_humidity_sensor, + calib_factor, + ) + ], + False, + ) + + class MoldIndicator(SensorEntity): """Represents a MoldIndication sensor.""" diff --git a/homeassistant/components/mold_indicator/strings.json b/homeassistant/components/mold_indicator/strings.json new file mode 100644 index 00000000000..2e34bcc1ba1 --- /dev/null +++ b/homeassistant/components/mold_indicator/strings.json @@ -0,0 +1,42 @@ +{ + "config": { + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + }, + "step": { + "user": { + "description": "Add Mold indicator helper", + "data": { + "name": "[%key:common::config_flow::data::name%]", + "indoor_humidity_sensor": "Indoor humidity sensor", + "indoor_temp_sensor": "Indoor temperature sensor", + "outdoor_temp_sensor": "Outdoor temperature sensor", + "calibration_factor": "Calibration factor" + }, + "data_description": { + "name": "Name for the created entity.", + "indoor_humidity_sensor": "The entity ID of the indoor humidity sensor.", + "indoor_temp_sensor": "The entity ID of the indoor temperature sensor.", + "outdoor_temp_sensor": "The entity ID of the outdoor temperature sensor.", + "calibration_factor": "Needs to be calibrated to the critical point in the room." + } + } + } + }, + "options": { + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + }, + "step": { + "init": { + "description": "Adjust the calibration factor as required", + "data": { + "calibration_factor": "[%key:component::mold_indicator::config::step::user::data::calibration_factor%]" + }, + "data_description": { + "calibration_factor": "[%key:component::mold_indicator::config::step::user::data_description::calibration_factor%]" + } + } + } + } +} diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index f03c980a2d4..2e38d608bd9 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -12,6 +12,7 @@ FLOWS = { "history_stats", "integration", "min_max", + "mold_indicator", "random", "statistics", "switch_as_x", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index a01e20909b6..4a6be3f0a1a 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -3790,12 +3790,6 @@ "config_flow": true, "iot_class": "local_push" }, - "mold_indicator": { - "name": "Mold Indicator", - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_polling" - }, "monessen": { "name": "Monessen", "integration_type": "virtual", @@ -7307,6 +7301,12 @@ "config_flow": true, "iot_class": "calculated" }, + "mold_indicator": { + "name": "Mold Indicator", + "integration_type": "helper", + "config_flow": true, + "iot_class": "calculated" + }, "random": { "name": "Random", "integration_type": "helper", diff --git a/tests/components/mold_indicator/conftest.py b/tests/components/mold_indicator/conftest.py new file mode 100644 index 00000000000..11f07e1db35 --- /dev/null +++ b/tests/components/mold_indicator/conftest.py @@ -0,0 +1,90 @@ +"""Fixtures for the Mold indicator integration.""" + +from __future__ import annotations + +from collections.abc import Generator +from typing import Any +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.mold_indicator.const import ( + CONF_CALIBRATION_FACTOR, + CONF_INDOOR_HUMIDITY, + CONF_INDOOR_TEMP, + CONF_OUTDOOR_TEMP, + DEFAULT_NAME, + DOMAIN, +) +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import ( + ATTR_UNIT_OF_MEASUREMENT, + CONF_NAME, + PERCENTAGE, + UnitOfTemperature, +) +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Automatically path mold indicator.""" + with patch( + "homeassistant.components.mold_indicator.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture(name="get_config") +async def get_config_to_integration_load() -> dict[str, Any]: + """Return configuration. + + To override the config, tests can be marked with: + @pytest.mark.parametrize("get_config", [{...}]) + """ + return { + CONF_NAME: DEFAULT_NAME, + CONF_INDOOR_TEMP: "sensor.indoor_temp", + CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", + CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", + CONF_CALIBRATION_FACTOR: 2.0, + } + + +@pytest.fixture(name="loaded_entry") +async def load_integration( + hass: HomeAssistant, get_config: dict[str, Any] +) -> MockConfigEntry: + """Set up the Mold indicator integration in Home Assistant.""" + config_entry = MockConfigEntry( + domain=DOMAIN, + source=SOURCE_USER, + options=get_config, + entry_id="1", + title=DEFAULT_NAME, + ) + + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + hass.states.async_set( + "sensor.indoor_temp", + "10", + {ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, + ) + hass.states.async_set( + "sensor.outdoor_temp", + "10", + {ATTR_UNIT_OF_MEASUREMENT: UnitOfTemperature.CELSIUS}, + ) + hass.states.async_set( + "sensor.indoor_humidity", "0", {ATTR_UNIT_OF_MEASUREMENT: PERCENTAGE} + ) + await hass.async_block_till_done() + + return config_entry diff --git a/tests/components/mold_indicator/test_config_flow.py b/tests/components/mold_indicator/test_config_flow.py new file mode 100644 index 00000000000..7a766be11f5 --- /dev/null +++ b/tests/components/mold_indicator/test_config_flow.py @@ -0,0 +1,115 @@ +"""Test the Mold indicator config flow.""" + +from __future__ import annotations + +from unittest.mock import AsyncMock + +from homeassistant.components.mold_indicator.const import ( + CONF_CALIBRATION_FACTOR, + CONF_INDOOR_HUMIDITY, + CONF_INDOOR_TEMP, + CONF_OUTDOOR_TEMP, + DEFAULT_NAME, + DOMAIN, +) +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_NAME +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_form_sensor(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: + """Test we get the form for sensor.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: DEFAULT_NAME, + CONF_INDOOR_TEMP: "sensor.indoor_temp", + CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", + CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", + CONF_CALIBRATION_FACTOR: 2.0, + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == DEFAULT_NAME + assert result["version"] == 1 + assert result["options"] == { + CONF_NAME: DEFAULT_NAME, + CONF_INDOOR_TEMP: "sensor.indoor_temp", + CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", + CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", + CONF_CALIBRATION_FACTOR: 2.0, + } + + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_options_flow(hass: HomeAssistant, loaded_entry: MockConfigEntry) -> None: + """Test options flow.""" + + result = await hass.config_entries.options.async_init(loaded_entry.entry_id) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={ + CONF_CALIBRATION_FACTOR: 3.0, + }, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == { + CONF_NAME: DEFAULT_NAME, + CONF_INDOOR_TEMP: "sensor.indoor_temp", + CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", + CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", + CONF_CALIBRATION_FACTOR: 3.0, + } + + await hass.async_block_till_done() + + # Check the entity was updated, no new entity was created + # 3 input entities + resulting mold indicator sensor + assert len(hass.states.async_all()) == 4 + + state = hass.states.get("sensor.mold_indicator") + assert state is not None + + +async def test_entry_already_exist( + hass: HomeAssistant, loaded_entry: MockConfigEntry +) -> None: + """Test abort when entry already exist.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["step_id"] == "user" + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_NAME: DEFAULT_NAME, + CONF_INDOOR_TEMP: "sensor.indoor_temp", + CONF_INDOOR_HUMIDITY: "sensor.indoor_humidity", + CONF_OUTDOOR_TEMP: "sensor.outdoor_temp", + CONF_CALIBRATION_FACTOR: 2.0, + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/mold_indicator/test_init.py b/tests/components/mold_indicator/test_init.py new file mode 100644 index 00000000000..5fd6b11c8fe --- /dev/null +++ b/tests/components/mold_indicator/test_init.py @@ -0,0 +1,17 @@ +"""Test Mold indicator component setup process.""" + +from __future__ import annotations + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def test_unload_entry(hass: HomeAssistant, loaded_entry: MockConfigEntry) -> None: + """Test unload an entry.""" + + assert loaded_entry.state is ConfigEntryState.LOADED + assert await hass.config_entries.async_unload(loaded_entry.entry_id) + await hass.async_block_till_done() + assert loaded_entry.state is ConfigEntryState.NOT_LOADED diff --git a/tests/components/mold_indicator/test_sensor.py b/tests/components/mold_indicator/test_sensor.py index 2de1d34b403..bb3f7c4fc93 100644 --- a/tests/components/mold_indicator/test_sensor.py +++ b/tests/components/mold_indicator/test_sensor.py @@ -16,6 +16,8 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component +from tests.common import MockConfigEntry + @pytest.fixture(autouse=True) def init_sensors_fixture(hass: HomeAssistant) -> None: @@ -52,6 +54,16 @@ async def test_setup(hass: HomeAssistant) -> None: assert moldind.attributes.get("unit_of_measurement") == PERCENTAGE +async def test_setup_from_config_entry( + hass: HomeAssistant, loaded_entry: MockConfigEntry +) -> None: + """Test the mold indicator sensor setup from a config entry.""" + + moldind = hass.states.get("sensor.mold_indicator") + assert moldind + assert moldind.attributes.get("unit_of_measurement") == PERCENTAGE + + async def test_invalidcalib(hass: HomeAssistant) -> None: """Test invalid sensor values.""" hass.states.async_set( From 99a50fe874254e41cdce3ad5a1c5ddba50781d52 Mon Sep 17 00:00:00 2001 From: Andrew Jackson Date: Sun, 8 Sep 2024 14:40:53 +0100 Subject: [PATCH 1593/1635] Correct Mastodon IOT class (#125511) * Correct iot class * Fix hassfest --- homeassistant/components/mastodon/manifest.json | 2 +- homeassistant/generated/integrations.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/mastodon/manifest.json b/homeassistant/components/mastodon/manifest.json index 40fd9d2f7b3..20c506e7766 100644 --- a/homeassistant/components/mastodon/manifest.json +++ b/homeassistant/components/mastodon/manifest.json @@ -5,7 +5,7 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/mastodon", "integration_type": "service", - "iot_class": "cloud_push", + "iot_class": "cloud_polling", "loggers": ["mastodon"], "requirements": ["Mastodon.py==1.8.1"] } diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 4a6be3f0a1a..1265cc842da 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -3512,7 +3512,7 @@ "name": "Mastodon", "integration_type": "service", "config_flow": true, - "iot_class": "cloud_push" + "iot_class": "cloud_polling" }, "matrix": { "name": "Matrix", From aa8c4a6eb7b74b3e689a4cc7e0c83436f8b907c0 Mon Sep 17 00:00:00 2001 From: Ian Date: Sun, 8 Sep 2024 06:42:26 -0700 Subject: [PATCH 1594/1635] Add ability to play plex media as the non-primary user (#122039) * Adds ability to play media as the non-primary user * Add return type for set function --- homeassistant/components/plex/server.py | 12 +++++++++++ homeassistant/components/plex/services.py | 5 +++++ tests/components/plex/test_media_search.py | 25 ++++++++++++++++++++++ 3 files changed, 42 insertions(+) diff --git a/homeassistant/components/plex/server.py b/homeassistant/components/plex/server.py index fbb98e8e19f..0716b3606af 100644 --- a/homeassistant/components/plex/server.py +++ b/homeassistant/components/plex/server.py @@ -2,6 +2,7 @@ from __future__ import annotations +from copy import copy import logging import ssl import time @@ -664,3 +665,14 @@ class PlexServer: def sensor_attributes(self): """Return active session information for use in activity sensor.""" return {x.sensor_user: x.sensor_title for x in self.active_sessions.values()} + + def set_plex_server(self, plex_server: PlexServer) -> None: + """Set the PlexServer instance.""" + self._plex_server = plex_server + + def switch_user(self, username: str) -> PlexServer: + """Return a shallow copy of a PlexServer as the provided user.""" + new_server = copy(self) + new_server.set_plex_server(self.plex_server.switchUser(username)) + + return new_server diff --git a/homeassistant/components/plex/services.py b/homeassistant/components/plex/services.py index e0fe79be182..cbf72966413 100644 --- a/homeassistant/components/plex/services.py +++ b/homeassistant/components/plex/services.py @@ -161,6 +161,11 @@ def process_plex_payload( if not plex_server: plex_server = get_plex_server(hass) + if isinstance(content, dict): + if plex_user := content.pop("username", None): + _LOGGER.debug("Switching to Plex user: %s", plex_user) + plex_server = plex_server.switch_user(plex_user) + if content_type == "station": if not supports_playqueues: raise HomeAssistantError("Plex stations are not supported on this device") diff --git a/tests/components/plex/test_media_search.py b/tests/components/plex/test_media_search.py index 8219cbe27b6..04d91e8825c 100644 --- a/tests/components/plex/test_media_search.py +++ b/tests/components/plex/test_media_search.py @@ -57,6 +57,31 @@ async def test_media_lookups( ) assert "Media for key 123 not found" in str(excinfo.value) + # Search with a different specified username + with ( + patch( + "plexapi.library.LibrarySection.search", + __qualname__="search", + ) as search, + patch( + "plexapi.myplex.MyPlexAccount.user", + __qualname__="user", + ) as plex_account_user, + ): + plex_account_user.return_value.get_token.return_value = "token" + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: media_player_id, + ATTR_MEDIA_CONTENT_TYPE: MediaType.EPISODE, + ATTR_MEDIA_CONTENT_ID: '{"library_name": "TV Shows", "show_name": "TV Show", "username": "Kids"}', + }, + True, + ) + search.assert_called_with(**{"show.title": "TV Show", "libtype": "show"}) + plex_account_user.assert_called_with("Kids") + # TV show searches with pytest.raises(MediaNotFound) as excinfo: await hass.services.async_call( From 7bab3579ecd21305f49dfb99fbc07b606a5b3387 Mon Sep 17 00:00:00 2001 From: Janusz Gregorczyk Date: Sun, 8 Sep 2024 16:50:24 +0200 Subject: [PATCH 1595/1635] Set required attribute when using Todoist Sync API reminder_add command (#122644) * Set type=absolute when using Todoist Sync API reminder_add command. This argument is required: ref.: https://developer.todoist.com/sync/v8/#add-a-reminder * Fix --------- Co-authored-by: Joost Lekkerkerker --- homeassistant/components/todoist/calendar.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/todoist/calendar.py b/homeassistant/components/todoist/calendar.py index 2acd4ea6dc6..31470633cc6 100644 --- a/homeassistant/components/todoist/calendar.py +++ b/homeassistant/components/todoist/calendar.py @@ -331,7 +331,11 @@ def async_register_services( # noqa: C901 "type": "reminder_add", "temp_id": str(uuid.uuid1()), "uuid": str(uuid.uuid1()), - "args": {"item_id": api_task.id, "due": reminder_due}, + "args": { + "item_id": api_task.id, + "type": "absolute", + "due": reminder_due, + }, } ] } From 6967c7058067e4ab8368b00c1c1bd6603b52b749 Mon Sep 17 00:00:00 2001 From: Joost Lekkerkerker Date: Sun, 8 Sep 2024 17:11:19 +0200 Subject: [PATCH 1596/1635] Change Knocki integration type to hub (#124863) * Change Knocki integration type * Fix --- homeassistant/components/knocki/manifest.json | 2 +- homeassistant/generated/integrations.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/knocki/manifest.json b/homeassistant/components/knocki/manifest.json index 4195320f382..fb751d90cac 100644 --- a/homeassistant/components/knocki/manifest.json +++ b/homeassistant/components/knocki/manifest.json @@ -4,7 +4,7 @@ "codeowners": ["@joostlek", "@jgatto1", "@JakeBosh"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/knocki", - "integration_type": "device", + "integration_type": "hub", "iot_class": "cloud_push", "loggers": ["knocki"], "requirements": ["knocki==0.3.1"] diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 1265cc842da..cd37adc3f71 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -3089,7 +3089,7 @@ }, "knocki": { "name": "Knocki", - "integration_type": "device", + "integration_type": "hub", "config_flow": true, "iot_class": "cloud_push" }, From 8d0dda652324b84ca18cff1ead9201565b072ff9 Mon Sep 17 00:00:00 2001 From: Whitney Young Date: Sun, 8 Sep 2024 08:31:58 -0700 Subject: [PATCH 1597/1635] Remove notify support for templates (#122820) --- homeassistant/components/notify/__init__.py | 16 ++------- homeassistant/components/notify/const.py | 4 +-- homeassistant/components/notify/legacy.py | 24 +++---------- tests/auth/mfa_modules/test_notify.py | 12 +++---- tests/components/notify/test_legacy.py | 38 ++------------------- 5 files changed, 15 insertions(+), 79 deletions(-) diff --git a/homeassistant/components/notify/__init__.py b/homeassistant/components/notify/__init__.py index 31c7b8e4d70..f9b0a64db3d 100644 --- a/homeassistant/components/notify/__init__.py +++ b/homeassistant/components/notify/__init__.py @@ -18,7 +18,6 @@ import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.restore_state import RestoreEntity -from homeassistant.helpers.template import Template from homeassistant.helpers.typing import ConfigType from homeassistant.util import dt as dt_util @@ -39,7 +38,6 @@ from .legacy import ( # noqa: F401 async_reload, async_reset_platform, async_setup_legacy, - check_templates_warn, ) from .repairs import migrate_notify_issue # noqa: F401 @@ -90,22 +88,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def persistent_notification(service: ServiceCall) -> None: """Send notification via the built-in persistent_notify integration.""" - message: Template = service.data[ATTR_MESSAGE] - check_templates_warn(hass, message) - - title = None - title_tpl: Template | None - if title_tpl := service.data.get(ATTR_TITLE): - check_templates_warn(hass, title_tpl) - title = title_tpl.async_render(parse_result=False) + message: str = service.data[ATTR_MESSAGE] + title: str | None = service.data.get(ATTR_TITLE) notification_id = None if data := service.data.get(ATTR_DATA): notification_id = data.get(pn.ATTR_NOTIFICATION_ID) - pn.async_create( - hass, message.async_render(parse_result=False), title, notification_id - ) + pn.async_create(hass, message, title, notification_id) hass.services.async_register( DOMAIN, diff --git a/homeassistant/components/notify/const.py b/homeassistant/components/notify/const.py index 6cd957e3afe..29064f24a66 100644 --- a/homeassistant/components/notify/const.py +++ b/homeassistant/components/notify/const.py @@ -30,8 +30,8 @@ SERVICE_PERSISTENT_NOTIFICATION = "persistent_notification" NOTIFY_SERVICE_SCHEMA = vol.Schema( { - vol.Required(ATTR_MESSAGE): cv.template, - vol.Optional(ATTR_TITLE): cv.template, + vol.Required(ATTR_MESSAGE): cv.string, + vol.Optional(ATTR_TITLE): cv.string, vol.Optional(ATTR_TARGET): vol.All(cv.ensure_list, [cv.string]), vol.Optional(ATTR_DATA): dict, } diff --git a/homeassistant/components/notify/legacy.py b/homeassistant/components/notify/legacy.py index dcb148a99f5..a210e80242e 100644 --- a/homeassistant/components/notify/legacy.py +++ b/homeassistant/components/notify/legacy.py @@ -13,7 +13,6 @@ from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import discovery from homeassistant.helpers.service import async_set_service_schema -from homeassistant.helpers.template import Template from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.loader import async_get_integration, bind_hass from homeassistant.setup import ( @@ -155,19 +154,6 @@ def async_setup_legacy( ] -@callback -def check_templates_warn(hass: HomeAssistant, tpl: Template) -> None: - """Warn user that passing templates to notify service is deprecated.""" - if tpl.is_static or hass.data.get("notify_template_warned"): - return - - hass.data["notify_template_warned"] = True - LOGGER.warning( - "Passing templates to notify service is deprecated and will be removed in" - " 2021.12. Automations and scripts handle templates automatically" - ) - - @bind_hass async def async_reload(hass: HomeAssistant, integration_name: str) -> None: """Register notify services for an integration.""" @@ -255,19 +241,17 @@ class BaseNotificationService: async def _async_notify_message_service(self, service: ServiceCall) -> None: """Handle sending notification message service calls.""" kwargs = {} - message: Template = service.data[ATTR_MESSAGE] - title: Template | None + message: str = service.data[ATTR_MESSAGE] + title: str | None if title := service.data.get(ATTR_TITLE): - check_templates_warn(self.hass, title) - kwargs[ATTR_TITLE] = title.async_render(parse_result=False) + kwargs[ATTR_TITLE] = title if self.registered_targets.get(service.service) is not None: kwargs[ATTR_TARGET] = [self.registered_targets[service.service]] elif service.data.get(ATTR_TARGET) is not None: kwargs[ATTR_TARGET] = service.data.get(ATTR_TARGET) - check_templates_warn(self.hass, message) - kwargs[ATTR_MESSAGE] = message.async_render(parse_result=False) + kwargs[ATTR_MESSAGE] = message kwargs[ATTR_DATA] = service.data.get(ATTR_DATA) await self.async_send_message(**kwargs) diff --git a/tests/auth/mfa_modules/test_notify.py b/tests/auth/mfa_modules/test_notify.py index d6f4d80f99e..8047ba2fef3 100644 --- a/tests/auth/mfa_modules/test_notify.py +++ b/tests/auth/mfa_modules/test_notify.py @@ -165,8 +165,7 @@ async def test_login_flow_validates_mfa(hass: HomeAssistant) -> None: assert notify_call.domain == "notify" assert notify_call.service == "test-notify" message = notify_call.data["message"] - message.hass = hass - assert MOCK_CODE in message.async_render() + assert MOCK_CODE in message with patch("pyotp.HOTP.verify", return_value=False): result = await hass.auth.login_flow.async_configure( @@ -224,8 +223,7 @@ async def test_login_flow_validates_mfa(hass: HomeAssistant) -> None: assert notify_call.domain == "notify" assert notify_call.service == "test-notify" message = notify_call.data["message"] - message.hass = hass - assert MOCK_CODE in message.async_render() + assert MOCK_CODE in message with patch("pyotp.HOTP.verify", return_value=True): result = await hass.auth.login_flow.async_configure( @@ -264,8 +262,7 @@ async def test_setup_user_notify_service(hass: HomeAssistant) -> None: assert notify_call.domain == "notify" assert notify_call.service == "test1" message = notify_call.data["message"] - message.hass = hass - assert MOCK_CODE in message.async_render() + assert MOCK_CODE in message with patch("pyotp.HOTP.at", return_value=MOCK_CODE_2): step = await flow.async_step_setup({"code": "invalid"}) @@ -281,8 +278,7 @@ async def test_setup_user_notify_service(hass: HomeAssistant) -> None: assert notify_call.domain == "notify" assert notify_call.service == "test1" message = notify_call.data["message"] - message.hass = hass - assert MOCK_CODE_2 in message.async_render() + assert MOCK_CODE_2 in message with patch("pyotp.HOTP.verify", return_value=True): step = await flow.async_step_setup({"code": MOCK_CODE_2}) diff --git a/tests/components/notify/test_legacy.py b/tests/components/notify/test_legacy.py index 79a1b75dcae..eeacf915b03 100644 --- a/tests/components/notify/test_legacy.py +++ b/tests/components/notify/test_legacy.py @@ -19,7 +19,7 @@ from homeassistant.helpers.reload import async_setup_reload_service from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.setup import async_setup_component -from tests.common import MockPlatform, async_get_persistent_notifications, mock_platform +from tests.common import MockPlatform, mock_platform class NotificationService(notify.BaseNotificationService): @@ -186,24 +186,6 @@ async def test_remove_targets(hass: HomeAssistant) -> None: assert test.registered_targets == {"test_c": 1} -async def test_warn_template( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture -) -> None: - """Test warning when template used.""" - assert await async_setup_component(hass, "notify", {}) - - await hass.services.async_call( - "notify", - "persistent_notification", - {"message": "{{ 1 + 1 }}", "title": "Test notif {{ 1 + 1 }}"}, - blocking=True, - ) - # We should only log it once - assert caplog.text.count("Passing templates to notify service is deprecated") == 1 - notifications = async_get_persistent_notifications(hass) - assert len(notifications) == 1 - - async def test_invalid_platform( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, tmp_path: Path ) -> None: @@ -550,27 +532,11 @@ async def test_sending_none_message(hass: HomeAssistant, tmp_path: Path) -> None notify.DOMAIN, notify.SERVICE_NOTIFY, {notify.ATTR_MESSAGE: None} ) assert ( - str(exc.value) - == "template value is None for dictionary value @ data['message']" + str(exc.value) == "string value is None for dictionary value @ data['message']" ) send_message_mock.assert_not_called() -async def test_sending_templated_message(hass: HomeAssistant, tmp_path: Path) -> None: - """Send a templated message.""" - send_message_mock = await help_setup_notify(hass, tmp_path) - hass.states.async_set("sensor.temperature", 10) - data = { - notify.ATTR_MESSAGE: "{{states.sensor.temperature.state}}", - notify.ATTR_TITLE: "{{ states.sensor.temperature.name }}", - } - await hass.services.async_call(notify.DOMAIN, notify.SERVICE_NOTIFY, data) - await hass.async_block_till_done() - send_message_mock.assert_called_once_with( - "10", {"title": "temperature", "data": None} - ) - - async def test_method_forwards_correct_data( hass: HomeAssistant, tmp_path: Path ) -> None: From 2c48f9aa4cad0719999cebb625fc862d788dcc2e Mon Sep 17 00:00:00 2001 From: Pete Sage <76050312+PeteRager@users.noreply.github.com> Date: Sun, 8 Sep 2024 11:34:27 -0400 Subject: [PATCH 1598/1635] FIx Sonos announce regression issue (#125515) * initial commit * initial commit --- .../components/sonos/media_player.py | 24 +++++++++++++++---- tests/components/sonos/test_media_player.py | 21 ++++++++++++++++ 2 files changed, 40 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/sonos/media_player.py b/homeassistant/components/sonos/media_player.py index c4d417b0394..7711a1e88ea 100644 --- a/homeassistant/components/sonos/media_player.py +++ b/homeassistant/components/sonos/media_player.py @@ -84,6 +84,7 @@ REPEAT_TO_SONOS = { SONOS_TO_REPEAT = {meaning: mode for mode, meaning in REPEAT_TO_SONOS.items()} UPNP_ERRORS_TO_IGNORE = ["701", "711", "712"] +ANNOUNCE_NOT_SUPPORTED_ERRORS: list[str] = ["globalError"] SERVICE_SNAPSHOT = "snapshot" SERVICE_RESTORE = "restore" @@ -556,11 +557,24 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity): ) from exc if response.get("success"): return - raise HomeAssistantError( - translation_domain=SONOS_DOMAIN, - translation_key="announce_media_error", - translation_placeholders={"media_id": media_id, "response": response}, - ) + if response.get("type") in ANNOUNCE_NOT_SUPPORTED_ERRORS: + # If the speaker does not support announce do not raise and + # fall through to_play_media to play the clip directly. + _LOGGER.debug( + "Speaker %s does not support announce, media_id %s response %s", + self.speaker.zone_name, + media_id, + response, + ) + else: + raise HomeAssistantError( + translation_domain=SONOS_DOMAIN, + translation_key="announce_media_error", + translation_placeholders={ + "media_id": media_id, + "response": response, + }, + ) if spotify.is_spotify_media_type(media_type): media_type = spotify.resolve_spotify_media_type(media_type) diff --git a/tests/components/sonos/test_media_player.py b/tests/components/sonos/test_media_player.py index 9887601a0a3..63b2c8889ec 100644 --- a/tests/components/sonos/test_media_player.py +++ b/tests/components/sonos/test_media_player.py @@ -1163,6 +1163,27 @@ async def test_play_media_announce( ) assert sonos_websocket.play_clip.call_count == 1 + # Test speakers that do not support announce. This + # will result in playing the clip directly via play_uri + sonos_websocket.play_clip.reset_mock() + sonos_websocket.play_clip.side_effect = None + retval = {"success": 0, "type": "globalError"} + sonos_websocket.play_clip.return_value = [retval, {}] + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "music", + ATTR_MEDIA_CONTENT_ID: content_id, + ATTR_MEDIA_ANNOUNCE: True, + }, + blocking=True, + ) + assert sonos_websocket.play_clip.call_count == 1 + soco.play_uri.assert_called_with(content_id, force_radio=False) + async def test_media_get_queue( hass: HomeAssistant, From 634582eab73f8f111d3f6d157f995160551027d9 Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Sun, 8 Sep 2024 11:36:36 -0400 Subject: [PATCH 1599/1635] Ensure Linkplay model_id is always defined (#125488) Linkplay: ensure model_id always defined --- homeassistant/components/linkplay/media_player.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/linkplay/media_player.py b/homeassistant/components/linkplay/media_player.py index af18b018403..e6ea5c5f11c 100644 --- a/homeassistant/components/linkplay/media_player.py +++ b/homeassistant/components/linkplay/media_player.py @@ -156,7 +156,9 @@ class LinkPlayMediaPlayerEntity(MediaPlayerEntity): ] manufacturer, model = get_info_from_project(bridge.device.properties["project"]) - if model != MANUFACTURER_GENERIC: + if model == MANUFACTURER_GENERIC: + model_id = None + else: model_id = bridge.device.properties["project"] self._attr_device_info = dr.DeviceInfo( From 54052792738e62366b2e07ad6dfe3b1d9049b0c1 Mon Sep 17 00:00:00 2001 From: David Knowles Date: Sun, 8 Sep 2024 11:39:23 -0400 Subject: [PATCH 1600/1635] Fix Schlage removed locks (#123627) * Fix bugs when a lock is no longer returned by the API * Changes requested during review * Only mark unavailable if lock is not present * Remove stale comment * Remove over-judicious nullability checks * Remove another unnecessary null check --- homeassistant/components/schlage/entity.py | 3 +- homeassistant/components/schlage/lock.py | 5 +- homeassistant/components/schlage/sensor.py | 5 +- .../components/schlage/test_binary_sensor.py | 34 +++++++++--- tests/components/schlage/test_lock.py | 54 +++++++++++++++++-- 5 files changed, 84 insertions(+), 17 deletions(-) diff --git a/homeassistant/components/schlage/entity.py b/homeassistant/components/schlage/entity.py index 61bdbcb7730..cc4745e51cc 100644 --- a/homeassistant/components/schlage/entity.py +++ b/homeassistant/components/schlage/entity.py @@ -42,5 +42,4 @@ class SchlageEntity(CoordinatorEntity[SchlageDataUpdateCoordinator]): @property def available(self) -> bool: """Return if entity is available.""" - # When is_locked is None the lock is unavailable. - return super().available and self._lock.is_locked is not None + return super().available and self.device_id in self.coordinator.data.locks diff --git a/homeassistant/components/schlage/lock.py b/homeassistant/components/schlage/lock.py index 7e6f60211b0..59ce00e809a 100644 --- a/homeassistant/components/schlage/lock.py +++ b/homeassistant/components/schlage/lock.py @@ -42,8 +42,9 @@ class SchlageLockEntity(SchlageEntity, LockEntity): @callback def _handle_coordinator_update(self) -> None: """Handle updated data from the coordinator.""" - self._update_attrs() - return super()._handle_coordinator_update() + if self.device_id in self.coordinator.data.locks: + self._update_attrs() + super()._handle_coordinator_update() def _update_attrs(self) -> None: """Update our internal state attributes.""" diff --git a/homeassistant/components/schlage/sensor.py b/homeassistant/components/schlage/sensor.py index 2cf1694e111..8de09fa4cbb 100644 --- a/homeassistant/components/schlage/sensor.py +++ b/homeassistant/components/schlage/sensor.py @@ -64,5 +64,6 @@ class SchlageBatterySensor(SchlageEntity, SensorEntity): @callback def _handle_coordinator_update(self) -> None: """Handle updated data from the coordinator.""" - self._attr_native_value = getattr(self._lock, self.entity_description.key) - return super()._handle_coordinator_update() + if self.device_id in self.coordinator.data.locks: + self._attr_native_value = getattr(self._lock, self.entity_description.key) + super()._handle_coordinator_update() diff --git a/tests/components/schlage/test_binary_sensor.py b/tests/components/schlage/test_binary_sensor.py index 97f11577b86..dbbc5b07b87 100644 --- a/tests/components/schlage/test_binary_sensor.py +++ b/tests/components/schlage/test_binary_sensor.py @@ -3,37 +3,56 @@ from datetime import timedelta from unittest.mock import Mock +from freezegun.api import FrozenDateTimeFactory from pyschlage.exceptions import UnknownError from homeassistant.components.binary_sensor import BinarySensorDeviceClass from homeassistant.config_entries import ConfigEntry +from homeassistant.const import STATE_ON, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant -from homeassistant.util.dt import utcnow from tests.common import async_fire_time_changed async def test_keypad_disabled_binary_sensor( - hass: HomeAssistant, mock_lock: Mock, mock_added_config_entry: ConfigEntry + hass: HomeAssistant, + mock_schlage: Mock, + mock_lock: Mock, + mock_added_config_entry: ConfigEntry, + freezer: FrozenDateTimeFactory, ) -> None: """Test the keypad_disabled binary_sensor.""" mock_lock.keypad_disabled.reset_mock() mock_lock.keypad_disabled.return_value = True # Make the coordinator refresh data. - async_fire_time_changed(hass, utcnow() + timedelta(seconds=31)) + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) keypad = hass.states.get("binary_sensor.vault_door_keypad_disabled") assert keypad is not None - assert keypad.state == "on" + assert keypad.state == STATE_ON assert keypad.attributes["device_class"] == BinarySensorDeviceClass.PROBLEM mock_lock.keypad_disabled.assert_called_once_with([]) + mock_schlage.locks.return_value = [] + # Make the coordinator refresh data. + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + keypad = hass.states.get("binary_sensor.vault_door_keypad_disabled") + assert keypad is not None + assert keypad.state == STATE_UNAVAILABLE + async def test_keypad_disabled_binary_sensor_use_previous_logs_on_failure( - hass: HomeAssistant, mock_lock: Mock, mock_added_config_entry: ConfigEntry + hass: HomeAssistant, + mock_schlage: Mock, + mock_lock: Mock, + mock_added_config_entry: ConfigEntry, + freezer: FrozenDateTimeFactory, ) -> None: """Test the keypad_disabled binary_sensor.""" mock_lock.keypad_disabled.reset_mock() @@ -42,12 +61,13 @@ async def test_keypad_disabled_binary_sensor_use_previous_logs_on_failure( mock_lock.logs.side_effect = UnknownError("Cannot load logs") # Make the coordinator refresh data. - async_fire_time_changed(hass, utcnow() + timedelta(seconds=31)) + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) keypad = hass.states.get("binary_sensor.vault_door_keypad_disabled") assert keypad is not None - assert keypad.state == "on" + assert keypad.state == STATE_ON assert keypad.attributes["device_class"] == BinarySensorDeviceClass.PROBLEM mock_lock.keypad_disabled.assert_called_once_with([]) diff --git a/tests/components/schlage/test_lock.py b/tests/components/schlage/test_lock.py index 6c06f124693..ab0f4f5d863 100644 --- a/tests/components/schlage/test_lock.py +++ b/tests/components/schlage/test_lock.py @@ -3,12 +3,20 @@ from datetime import timedelta from unittest.mock import Mock +from freezegun.api import FrozenDateTimeFactory + from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ATTR_ENTITY_ID, SERVICE_LOCK, SERVICE_UNLOCK +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_LOCK, + SERVICE_UNLOCK, + STATE_JAMMED, + STATE_UNAVAILABLE, + STATE_UNLOCKED, +) from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr -from homeassistant.util.dt import utcnow from tests.common import async_fire_time_changed @@ -26,6 +34,40 @@ async def test_lock_device_registry( assert device.manufacturer == "Schlage" +async def test_lock_attributes( + hass: HomeAssistant, + mock_added_config_entry: ConfigEntry, + mock_schlage: Mock, + mock_lock: Mock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test lock attributes.""" + lock = hass.states.get("lock.vault_door") + assert lock is not None + assert lock.state == STATE_UNLOCKED + assert lock.attributes["changed_by"] == "thumbturn" + + mock_lock.is_locked = False + mock_lock.is_jammed = True + # Make the coordinator refresh data. + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + lock = hass.states.get("lock.vault_door") + assert lock is not None + assert lock.state == STATE_JAMMED + + mock_schlage.locks.return_value = [] + # Make the coordinator refresh data. + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + lock = hass.states.get("lock.vault_door") + assert lock is not None + assert lock.state == STATE_UNAVAILABLE + assert "changed_by" not in lock.attributes + + async def test_lock_services( hass: HomeAssistant, mock_lock: Mock, mock_added_config_entry: ConfigEntry ) -> None: @@ -52,14 +94,18 @@ async def test_lock_services( async def test_changed_by( - hass: HomeAssistant, mock_lock: Mock, mock_added_config_entry: ConfigEntry + hass: HomeAssistant, + mock_lock: Mock, + mock_added_config_entry: ConfigEntry, + freezer: FrozenDateTimeFactory, ) -> None: """Test population of the changed_by attribute.""" mock_lock.last_changed_by.reset_mock() mock_lock.last_changed_by.return_value = "access code - foo" # Make the coordinator refresh data. - async_fire_time_changed(hass, utcnow() + timedelta(seconds=31)) + freezer.tick(timedelta(seconds=30)) + async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) mock_lock.last_changed_by.assert_called_once_with() From b3d6f8861fb248489af8d348b652a5c7c990b32a Mon Sep 17 00:00:00 2001 From: "Steven B." <51370195+sdb9696@users.noreply.github.com> Date: Sun, 8 Sep 2024 17:17:30 +0100 Subject: [PATCH 1601/1635] Fix ring notifications (#124879) * Enable ring event listener to fix missing notifications * Fix pylint test CI fail * Reinstate binary sensor and add deprecation issues * Add tests * Update post review * Remove PropertyMock * Update post review * Split out adding event platform --- homeassistant/components/ring/__init__.py | 53 ++++- .../components/ring/binary_sensor.py | 140 +++++++----- homeassistant/components/ring/config_flow.py | 13 +- homeassistant/components/ring/const.py | 2 +- homeassistant/components/ring/coordinator.py | 142 +++++++++++-- homeassistant/components/ring/entity.py | 95 ++++++++- homeassistant/components/ring/sensor.py | 25 ++- homeassistant/components/ring/strings.json | 15 ++ tests/components/ring/common.py | 16 ++ tests/components/ring/conftest.py | 15 +- tests/components/ring/device_mocks.py | 18 +- tests/components/ring/test_binary_sensor.py | 200 ++++++++++++++++-- tests/components/ring/test_init.py | 68 +++++- tests/components/ring/test_sensor.py | 62 ++++-- 14 files changed, 720 insertions(+), 144 deletions(-) diff --git a/homeassistant/components/ring/__init__.py b/homeassistant/components/ring/__init__.py index 3714802b63a..88c7467af91 100644 --- a/homeassistant/components/ring/__init__.py +++ b/homeassistant/components/ring/__init__.py @@ -5,18 +5,23 @@ from __future__ import annotations from dataclasses import dataclass import logging from typing import Any, cast +import uuid from ring_doorbell import Auth, Ring, RingDevices from homeassistant.config_entries import ConfigEntry -from homeassistant.const import APPLICATION_NAME, CONF_TOKEN, __version__ +from homeassistant.const import APPLICATION_NAME, CONF_TOKEN from homeassistant.core import HomeAssistant, ServiceCall, callback -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import ( + device_registry as dr, + entity_registry as er, + instance_id, +) from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from .const import DOMAIN, PLATFORMS -from .coordinator import RingDataCoordinator, RingNotificationsCoordinator +from .const import CONF_LISTEN_CREDENTIALS, DOMAIN, PLATFORMS +from .coordinator import RingDataCoordinator, RingListenCoordinator _LOGGER = logging.getLogger(__name__) @@ -28,12 +33,26 @@ class RingData: api: Ring devices: RingDevices devices_coordinator: RingDataCoordinator - notifications_coordinator: RingNotificationsCoordinator + listen_coordinator: RingListenCoordinator type RingConfigEntry = ConfigEntry[RingData] +async def get_auth_agent_id(hass: HomeAssistant) -> tuple[str, str]: + """Return user-agent and hardware id for Auth instantiation. + + user_agent will be the display name in the ring.com authorised devices. + hardware_id will uniquely describe the authorised HA device. + """ + user_agent = f"{APPLICATION_NAME}/{DOMAIN}-integration" + + # Generate a new uuid from the instance_uuid to keep the HA one private + instance_uuid = uuid.UUID(hex=await instance_id.async_get(hass)) + hardware_id = str(uuid.uuid5(instance_uuid, user_agent)) + return user_agent, hardware_id + + async def async_setup_entry(hass: HomeAssistant, entry: RingConfigEntry) -> bool: """Set up a config entry.""" @@ -44,26 +63,39 @@ async def async_setup_entry(hass: HomeAssistant, entry: RingConfigEntry) -> bool data={**entry.data, CONF_TOKEN: token}, ) + def listen_credentials_updater(token: dict[str, Any]) -> None: + """Handle from async context when token is updated.""" + hass.config_entries.async_update_entry( + entry, + data={**entry.data, CONF_LISTEN_CREDENTIALS: token}, + ) + + user_agent, hardware_id = await get_auth_agent_id(hass) + client_session = async_get_clientsession(hass) auth = Auth( - f"{APPLICATION_NAME}/{__version__}", + user_agent, entry.data[CONF_TOKEN], token_updater, - http_client_session=async_get_clientsession(hass), + hardware_id=hardware_id, + http_client_session=client_session, ) ring = Ring(auth) await _migrate_old_unique_ids(hass, entry.entry_id) devices_coordinator = RingDataCoordinator(hass, ring) - notifications_coordinator = RingNotificationsCoordinator(hass, ring) + listen_credentials = entry.data.get(CONF_LISTEN_CREDENTIALS) + listen_coordinator = RingListenCoordinator( + hass, ring, listen_credentials, listen_credentials_updater + ) + await devices_coordinator.async_config_entry_first_refresh() - await notifications_coordinator.async_config_entry_first_refresh() entry.runtime_data = RingData( api=ring, devices=ring.devices(), devices_coordinator=devices_coordinator, - notifications_coordinator=notifications_coordinator, + listen_coordinator=listen_coordinator, ) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) @@ -91,7 +123,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: RingConfigEntry) -> bool ) for loaded_entry in hass.config_entries.async_loaded_entries(DOMAIN): await loaded_entry.runtime_data.devices_coordinator.async_refresh() - await loaded_entry.runtime_data.notifications_coordinator.async_refresh() # register service hass.services.async_register(DOMAIN, "update", async_refresh_all) diff --git a/homeassistant/components/ring/binary_sensor.py b/homeassistant/components/ring/binary_sensor.py index 2fb557ddde0..85a916e95cd 100644 --- a/homeassistant/components/ring/binary_sensor.py +++ b/homeassistant/components/ring/binary_sensor.py @@ -2,46 +2,62 @@ from __future__ import annotations -from collections.abc import Callable, Mapping +from collections.abc import Mapping from dataclasses import dataclass from datetime import datetime -from typing import Any +from typing import Any, Generic -from ring_doorbell import Ring, RingEvent, RingGeneric +from ring_doorbell import RingCapability, RingEvent +from ring_doorbell.const import KIND_DING, KIND_MOTION from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.core import HomeAssistant, callback +from homeassistant.const import Platform +from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.event import async_call_at from . import RingConfigEntry -from .coordinator import RingNotificationsCoordinator -from .entity import RingBaseEntity +from .coordinator import RingListenCoordinator +from .entity import ( + DeprecatedInfo, + RingBaseEntity, + RingDeviceT, + RingEntityDescription, + async_check_create_deprecated, +) @dataclass(frozen=True, kw_only=True) -class RingBinarySensorEntityDescription(BinarySensorEntityDescription): +class RingBinarySensorEntityDescription( + BinarySensorEntityDescription, RingEntityDescription, Generic[RingDeviceT] +): """Describes Ring binary sensor entity.""" - exists_fn: Callable[[RingGeneric], bool] + capability: RingCapability BINARY_SENSOR_TYPES: tuple[RingBinarySensorEntityDescription, ...] = ( RingBinarySensorEntityDescription( - key="ding", - translation_key="ding", + key=KIND_DING, + translation_key=KIND_DING, device_class=BinarySensorDeviceClass.OCCUPANCY, - exists_fn=lambda device: device.family - in {"doorbots", "authorized_doorbots", "other"}, + capability=RingCapability.DING, + deprecated_info=DeprecatedInfo( + new_platform=Platform.EVENT, breaks_in_ha_version="2025.4.0" + ), ), RingBinarySensorEntityDescription( - key="motion", + key=KIND_MOTION, + translation_key=KIND_MOTION, device_class=BinarySensorDeviceClass.MOTION, - exists_fn=lambda device: device.family - in {"doorbots", "authorized_doorbots", "stickup_cams"}, + capability=RingCapability.MOTION_DETECTION, + deprecated_info=DeprecatedInfo( + new_platform=Platform.EVENT, breaks_in_ha_version="2025.4.0" + ), ), ) @@ -53,70 +69,84 @@ async def async_setup_entry( ) -> None: """Set up the Ring binary sensors from a config entry.""" ring_data = entry.runtime_data + listen_coordinator = ring_data.listen_coordinator - entities = [ - RingBinarySensor( - ring_data.api, - device, - ring_data.notifications_coordinator, - description, - ) + async_add_entities( + RingBinarySensor(device, listen_coordinator, description) for description in BINARY_SENSOR_TYPES for device in ring_data.devices.all_devices - if description.exists_fn(device) - ] - - async_add_entities(entities) + if device.has_capability(description.capability) + and async_check_create_deprecated( + hass, + Platform.BINARY_SENSOR, + f"{device.id}-{description.key}", + description, + ) + ) class RingBinarySensor( - RingBaseEntity[RingNotificationsCoordinator], BinarySensorEntity + RingBaseEntity[RingListenCoordinator, RingDeviceT], BinarySensorEntity ): """A binary sensor implementation for Ring device.""" _active_alert: RingEvent | None = None - entity_description: RingBinarySensorEntityDescription + RingBinarySensorEntityDescription[RingDeviceT] def __init__( self, - ring: Ring, - device: RingGeneric, - coordinator: RingNotificationsCoordinator, - description: RingBinarySensorEntityDescription, + device: RingDeviceT, + coordinator: RingListenCoordinator, + description: RingBinarySensorEntityDescription[RingDeviceT], ) -> None: - """Initialize a sensor for Ring device.""" + """Initialize a binary sensor for Ring device.""" super().__init__( device, coordinator, ) self.entity_description = description - self._ring = ring self._attr_unique_id = f"{device.id}-{description.key}" - self._update_alert() + self._attr_is_on = False + self._active_alert: RingEvent | None = None + self._cancel_callback: CALLBACK_TYPE | None = None @callback - def _handle_coordinator_update(self, _: Any = None) -> None: - """Call update method.""" - self._update_alert() - super()._handle_coordinator_update() + def _async_handle_event(self, alert: RingEvent) -> None: + """Handle the event.""" + self._attr_is_on = True + self._active_alert = alert + loop = self.hass.loop + when = loop.time() + alert.expires_in + if self._cancel_callback: + self._cancel_callback() + self._cancel_callback = async_call_at(self.hass, self._async_cancel_event, when) @callback - def _update_alert(self) -> None: - """Update active alert.""" - self._active_alert = next( - ( - alert - for alert in self._ring.active_alerts() - if alert["kind"] == self.entity_description.key - and alert["doorbot_id"] == self._device.id - ), - None, + def _async_cancel_event(self, _now: Any) -> None: + """Clear the event.""" + self._cancel_callback = None + self._attr_is_on = False + self._active_alert = None + self.async_write_ha_state() + + def _get_coordinator_alert(self) -> RingEvent | None: + return self.coordinator.alerts.get( + (self._device.device_api_id, self.entity_description.key) ) + @callback + def _handle_coordinator_update(self) -> None: + if alert := self._get_coordinator_alert(): + self._async_handle_event(alert) + super()._handle_coordinator_update() + @property - def is_on(self) -> bool: - """Return True if the binary sensor is on.""" - return self._active_alert is not None + def available(self) -> bool: + """Return if entity is available.""" + return self.coordinator.event_listener.started + + async def async_update(self) -> None: + """All updates are passive.""" @property def extra_state_attributes(self) -> Mapping[str, Any] | None: @@ -127,9 +157,9 @@ class RingBinarySensor( return attrs assert isinstance(attrs, dict) - attrs["state"] = self._active_alert["state"] - now = self._active_alert.get("now") - expires_in = self._active_alert.get("expires_in") + attrs["state"] = self._active_alert.state + now = self._active_alert.now + expires_in = self._active_alert.expires_in assert now and expires_in attrs["expires_at"] = datetime.fromtimestamp(now + expires_in).isoformat() diff --git a/homeassistant/components/ring/config_flow.py b/homeassistant/components/ring/config_flow.py index 74546567270..8b933e8580d 100644 --- a/homeassistant/components/ring/config_flow.py +++ b/homeassistant/components/ring/config_flow.py @@ -8,17 +8,12 @@ from ring_doorbell import Auth, AuthenticationError, Requires2FAError import voluptuous as vol from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult -from homeassistant.const import ( - APPLICATION_NAME, - CONF_PASSWORD, - CONF_TOKEN, - CONF_USERNAME, - __version__ as ha_version, -) +from homeassistant.const import CONF_PASSWORD, CONF_TOKEN, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.aiohttp_client import async_get_clientsession +from . import get_auth_agent_id from .const import CONF_2FA, DOMAIN _LOGGER = logging.getLogger(__name__) @@ -32,9 +27,11 @@ STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PASSWORD): str}) async def validate_input(hass: HomeAssistant, data: dict[str, str]) -> dict[str, Any]: """Validate the user input allows us to connect.""" + user_agent, hardware_id = await get_auth_agent_id(hass) auth = Auth( - f"{APPLICATION_NAME}/{ha_version}", + user_agent, http_client_session=async_get_clientsession(hass), + hardware_id=hardware_id, ) try: diff --git a/homeassistant/components/ring/const.py b/homeassistant/components/ring/const.py index 70813a78c76..c67adbf5984 100644 --- a/homeassistant/components/ring/const.py +++ b/homeassistant/components/ring/const.py @@ -26,6 +26,6 @@ PLATFORMS = [ SCAN_INTERVAL = timedelta(minutes=1) -NOTIFICATIONS_SCAN_INTERVAL = timedelta(seconds=5) CONF_2FA = "2fa" +CONF_LISTEN_CREDENTIALS = "listen_token" diff --git a/homeassistant/components/ring/coordinator.py b/homeassistant/components/ring/coordinator.py index 600743005eb..b143fd3dda0 100644 --- a/homeassistant/components/ring/coordinator.py +++ b/homeassistant/components/ring/coordinator.py @@ -3,15 +3,28 @@ from asyncio import TaskGroup from collections.abc import Callable, Coroutine import logging -from typing import Any +from typing import TYPE_CHECKING, Any -from ring_doorbell import AuthenticationError, Ring, RingDevices, RingError, RingTimeout +from ring_doorbell import ( + AuthenticationError, + Ring, + RingDevices, + RingError, + RingEvent, + RingTimeout, +) +from ring_doorbell.listen import RingEventListener -from homeassistant.core import HomeAssistant +from homeassistant import config_entries +from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +from homeassistant.helpers.update_coordinator import ( + BaseDataUpdateCoordinatorProtocol, + DataUpdateCoordinator, + UpdateFailed, +) -from .const import NOTIFICATIONS_SCAN_INTERVAL, SCAN_INTERVAL +from .const import SCAN_INTERVAL _LOGGER = logging.getLogger(__name__) @@ -91,19 +104,112 @@ class RingDataCoordinator(DataUpdateCoordinator[RingDevices]): return devices -class RingNotificationsCoordinator(DataUpdateCoordinator[None]): +class RingListenCoordinator(BaseDataUpdateCoordinatorProtocol): """Global notifications coordinator.""" - def __init__(self, hass: HomeAssistant, ring_api: Ring) -> None: - """Initialize my coordinator.""" - super().__init__( - hass, - logger=_LOGGER, - name="active dings", - update_interval=NOTIFICATIONS_SCAN_INTERVAL, - ) - self.ring_api: Ring = ring_api + config_entry: config_entries.ConfigEntry - async def _async_update_data(self) -> None: - """Fetch data from API endpoint.""" - await _call_api(self.hass, self.ring_api.async_update_dings) + def __init__( + self, + hass: HomeAssistant, + ring_api: Ring, + listen_credentials: dict[str, Any] | None, + listen_credentials_updater: Callable[[dict[str, Any]], None], + ) -> None: + """Initialize my coordinator.""" + self.hass = hass + self.logger = _LOGGER + self.ring_api: Ring = ring_api + self.event_listener = RingEventListener( + ring_api, listen_credentials, listen_credentials_updater + ) + self._listeners: dict[CALLBACK_TYPE, tuple[CALLBACK_TYPE, object | None]] = {} + self._listen_callback_id: int | None = None + + config_entry = config_entries.current_entry.get() + if TYPE_CHECKING: + assert config_entry + self.config_entry = config_entry + self.start_timeout = 10 + self.config_entry.async_on_unload(self.async_shutdown) + self.index_alerts() + + def index_alerts(self) -> None: + "Index the active alerts." + self.alerts = { + (alert.doorbot_id, alert.kind): alert + for alert in self.ring_api.active_alerts() + } + + async def async_shutdown(self) -> None: + """Cancel any scheduled call, and ignore new runs.""" + if self.event_listener.started: + await self._async_stop_listen() + + async def _async_stop_listen(self) -> None: + self.logger.debug("Stopped ring listener") + await self.event_listener.stop() + self.logger.debug("Stopped ring listener") + + async def _async_start_listen(self) -> None: + """Start listening for realtime events.""" + self.logger.debug("Starting ring listener.") + await self.event_listener.start( + timeout=self.start_timeout, + ) + if self.event_listener.started is True: + self.logger.debug("Started ring listener") + else: + self.logger.warning( + "Ring event listener failed to start after %s seconds", + self.start_timeout, + ) + self._listen_callback_id = self.event_listener.add_notification_callback( + self._on_event + ) + self.index_alerts() + # Update the listeners so they switch from Unavailable to Unknown + self._async_update_listeners() + + def _on_event(self, event: RingEvent) -> None: + self.logger.debug("Ring event received: %s", event) + self.index_alerts() + self._async_update_listeners(event.doorbot_id) + + @callback + def _async_update_listeners(self, doorbot_id: int | None = None) -> None: + """Update all registered listeners.""" + for update_callback, device_api_id in list(self._listeners.values()): + if not doorbot_id or device_api_id == doorbot_id: + update_callback() + + @callback + def async_add_listener( + self, update_callback: CALLBACK_TYPE, context: Any = None + ) -> Callable[[], None]: + """Listen for data updates.""" + start_listen = not self._listeners + + @callback + def remove_listener() -> None: + """Remove update listener.""" + self._listeners.pop(remove_listener) + if not self._listeners: + self.config_entry.async_create_task( + self.hass, + self._async_stop_listen(), + "Ring event listener stop", + eager_start=True, + ) + + self._listeners[remove_listener] = (update_callback, context) + + # This is the first listener, start the event listener. + if start_listen: + self.config_entry.async_create_task( + self.hass, + self._async_start_listen(), + "Ring event listener start", + eager_start=True, + ) + return remove_listener diff --git a/homeassistant/components/ring/entity.py b/homeassistant/components/ring/entity.py index 72deb09b76f..0d050e7697f 100644 --- a/homeassistant/components/ring/entity.py +++ b/homeassistant/components/ring/entity.py @@ -1,6 +1,7 @@ """Base class for Ring entity.""" from collections.abc import Callable, Coroutine +from dataclasses import dataclass from typing import Any, Concatenate, Generic, cast from ring_doorbell import ( @@ -12,22 +13,46 @@ from ring_doorbell import ( ) from typing_extensions import TypeVar -from homeassistant.core import callback +from homeassistant.components.automation import automations_with_entity +from homeassistant.components.script import scripts_with_entity +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.update_coordinator import CoordinatorEntity +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue +from homeassistant.helpers.update_coordinator import ( + BaseCoordinatorEntity, + CoordinatorEntity, +) from .const import ATTRIBUTION, DOMAIN -from .coordinator import RingDataCoordinator, RingNotificationsCoordinator +from .coordinator import RingDataCoordinator, RingListenCoordinator RingDeviceT = TypeVar("RingDeviceT", bound=RingGeneric, default=RingGeneric) _RingCoordinatorT = TypeVar( "_RingCoordinatorT", - bound=(RingDataCoordinator | RingNotificationsCoordinator), + bound=(RingDataCoordinator | RingListenCoordinator), ) +@dataclass(slots=True) +class DeprecatedInfo: + """Class to define deprecation info for deprecated entities.""" + + new_platform: Platform + breaks_in_ha_version: str + + +@dataclass(frozen=True, kw_only=True) +class RingEntityDescription(EntityDescription): + """Base class for a ring entity description.""" + + deprecated_info: DeprecatedInfo | None = None + + def exception_wrap[_RingBaseEntityT: RingBaseEntity[Any, Any], **_P, _R]( async_func: Callable[Concatenate[_RingBaseEntityT, _P], Coroutine[Any, Any, _R]], ) -> Callable[Concatenate[_RingBaseEntityT, _P], Coroutine[Any, Any, _R]]: @@ -51,8 +76,66 @@ def exception_wrap[_RingBaseEntityT: RingBaseEntity[Any, Any], **_P, _R]( return _wrap +def async_check_create_deprecated( + hass: HomeAssistant, + platform: Platform, + unique_id: str, + entity_description: RingEntityDescription, +) -> bool: + """Return true if the entitty should be created based on the deprecated_info. + + If deprecated_info is not defined will return true. + If entity not yet created will return false. + If entity disabled will delete it and return false. + Otherwise will return true and create issues for scripts or automations. + """ + if not entity_description.deprecated_info: + return True + + ent_reg = er.async_get(hass) + entity_id = ent_reg.async_get_entity_id( + platform, + DOMAIN, + unique_id, + ) + if not entity_id: + return False + + entity_entry = ent_reg.async_get(entity_id) + assert entity_entry + if entity_entry.disabled: + # If the entity exists and is disabled then we want to remove + # the entity so that the user is just using the new entity. + ent_reg.async_remove(entity_id) + return False + + # Check for issues that need to be created + entity_automations = automations_with_entity(hass, entity_id) + entity_scripts = scripts_with_entity(hass, entity_id) + if entity_automations or entity_scripts: + deprecated_info = entity_description.deprecated_info + for item in entity_automations + entity_scripts: + async_create_issue( + hass, + DOMAIN, + f"deprecated_entity_{entity_id}_{item}", + breaks_in_ha_version=deprecated_info.breaks_in_ha_version, + is_fixable=False, + is_persistent=False, + severity=IssueSeverity.WARNING, + translation_key="deprecated_entity", + translation_placeholders={ + "entity": entity_id, + "info": item, + "platform": platform, + "new_platform": deprecated_info.new_platform, + }, + ) + return True + + class RingBaseEntity( - CoordinatorEntity[_RingCoordinatorT], Generic[_RingCoordinatorT, RingDeviceT] + BaseCoordinatorEntity[_RingCoordinatorT], Generic[_RingCoordinatorT, RingDeviceT] ): """Base implementation for Ring device.""" @@ -77,7 +160,7 @@ class RingBaseEntity( ) -class RingEntity(RingBaseEntity[RingDataCoordinator, RingDeviceT]): +class RingEntity(RingBaseEntity[RingDataCoordinator, RingDeviceT], CoordinatorEntity): """Implementation for Ring devices.""" def _get_coordinator_data(self) -> RingDevices: diff --git a/homeassistant/components/ring/sensor.py b/homeassistant/components/ring/sensor.py index 83d07dbd9b4..219f1b0224c 100644 --- a/homeassistant/components/ring/sensor.py +++ b/homeassistant/components/ring/sensor.py @@ -25,6 +25,7 @@ from homeassistant.const import ( PERCENTAGE, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, EntityCategory, + Platform, ) from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -32,7 +33,13 @@ from homeassistant.helpers.typing import StateType from . import RingConfigEntry from .coordinator import RingDataCoordinator -from .entity import RingDeviceT, RingEntity +from .entity import ( + DeprecatedInfo, + RingDeviceT, + RingEntity, + RingEntityDescription, + async_check_create_deprecated, +) async def async_setup_entry( @@ -49,6 +56,12 @@ async def async_setup_entry( for description in SENSOR_TYPES for device in ring_data.devices.all_devices if description.exists_fn(device) + and async_check_create_deprecated( + hass, + Platform.SENSOR, + f"{device.id}-{description.key}", + description, + ) ] async_add_entities(entities) @@ -120,7 +133,9 @@ def _get_last_event_attrs( @dataclass(frozen=True, kw_only=True) -class RingSensorEntityDescription(SensorEntityDescription, Generic[RingDeviceT]): +class RingSensorEntityDescription( + SensorEntityDescription, RingEntityDescription, Generic[RingDeviceT] +): """Describes Ring sensor entity.""" value_fn: Callable[[RingDeviceT], StateType] = lambda _: True @@ -172,6 +187,9 @@ SENSOR_TYPES: tuple[RingSensorEntityDescription[Any], ...] = ( ) else None, exists_fn=lambda device: device.has_capability(RingCapability.HISTORY), + deprecated_info=DeprecatedInfo( + new_platform=Platform.EVENT, breaks_in_ha_version="2025.4.0" + ), ), RingSensorEntityDescription[RingGeneric]( key="last_motion", @@ -188,6 +206,9 @@ SENSOR_TYPES: tuple[RingSensorEntityDescription[Any], ...] = ( ) else None, exists_fn=lambda device: device.has_capability(RingCapability.HISTORY), + deprecated_info=DeprecatedInfo( + new_platform=Platform.EVENT, breaks_in_ha_version="2025.4.0" + ), ), RingSensorEntityDescription[RingDoorBell | RingChime]( key="volume", diff --git a/homeassistant/components/ring/strings.json b/homeassistant/components/ring/strings.json index 6bd7d194136..80598eab314 100644 --- a/homeassistant/components/ring/strings.json +++ b/homeassistant/components/ring/strings.json @@ -35,6 +35,17 @@ "binary_sensor": { "ding": { "name": "Ding" + }, + "motion": { + "name": "Motion" + } + }, + "event": { + "ding": { + "name": "Ding" + }, + "intercom_unlock": { + "name": "Intercom unlock" } }, "button": { @@ -104,6 +115,10 @@ } } } + }, + "deprecated_entity": { + "title": "Detected deprecated `{platform}` entity usage", + "description": "We detected that entity `{entity}` is being used in `{info}`\n\nWe have created a new `{new_platform}` entity and you should migrate `{info}` to use this new entity.\n\nWhen you are done migrating `{info}` and are ready to have the deprecated `{entity}` entity removed, disable the entity and restart Home Assistant." } } } diff --git a/tests/components/ring/common.py b/tests/components/ring/common.py index 3b78adf0e09..71274fe1ee1 100644 --- a/tests/components/ring/common.py +++ b/tests/components/ring/common.py @@ -2,6 +2,7 @@ from unittest.mock import patch +from homeassistant.components.automation import DOMAIN as AUTOMATION_DOMAIN from homeassistant.components.ring import DOMAIN from homeassistant.const import Platform from homeassistant.core import HomeAssistant @@ -18,3 +19,18 @@ async def setup_platform(hass: HomeAssistant, platform: Platform) -> None: with patch("homeassistant.components.ring.PLATFORMS", [platform]): assert await async_setup_component(hass, DOMAIN, {}) await hass.async_block_till_done(wait_background_tasks=True) + + +async def setup_automation(hass: HomeAssistant, alias: str, entity_id: str) -> None: + """Set up an automation for tests.""" + assert await async_setup_component( + hass, + AUTOMATION_DOMAIN, + { + AUTOMATION_DOMAIN: { + "alias": alias, + "trigger": {"platform": "state", "entity_id": entity_id, "to": "on"}, + "action": {"action": "notify.notify", "metadata": {}, "data": {}}, + } + }, + ) diff --git a/tests/components/ring/conftest.py b/tests/components/ring/conftest.py index 4456a9daa26..90f2fd2a956 100644 --- a/tests/components/ring/conftest.py +++ b/tests/components/ring/conftest.py @@ -11,7 +11,7 @@ from homeassistant.components.ring import DOMAIN from homeassistant.const import CONF_USERNAME from homeassistant.core import HomeAssistant -from .device_mocks import get_active_alerts, get_devices_data, get_mock_devices +from .device_mocks import get_devices_data, get_mock_devices from tests.common import MockConfigEntry from tests.components.light.conftest import mock_light_profiles # noqa: F401 @@ -103,7 +103,7 @@ def mock_ring_client(mock_ring_auth, mock_ring_devices): mock_client = create_autospec(ring_doorbell.Ring) mock_client.return_value.devices_data = get_devices_data() mock_client.return_value.devices.return_value = mock_ring_devices - mock_client.return_value.active_alerts.side_effect = get_active_alerts + mock_client.return_value.active_alerts.return_value = [] with patch("homeassistant.components.ring.Ring", new=mock_client): yield mock_client.return_value @@ -135,3 +135,14 @@ async def mock_added_config_entry( assert await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() return mock_config_entry + + +@pytest.fixture(autouse=True) +def mock_ring_event_listener_class(): + """Fixture to mock the ring event listener.""" + + with patch( + "homeassistant.components.ring.coordinator.RingEventListener", autospec=True + ) as mock_ring_listener: + mock_ring_listener.return_value.started = True + yield mock_ring_listener diff --git a/tests/components/ring/device_mocks.py b/tests/components/ring/device_mocks.py index d2671c3896d..8ac5948d6a0 100644 --- a/tests/components/ring/device_mocks.py +++ b/tests/components/ring/device_mocks.py @@ -7,9 +7,7 @@ Each device entry in the devices.json will have a MagicMock instead of the RingO Mocks the api calls on the devices such as history() and health(). """ -from copy import deepcopy from datetime import datetime -from time import time from unittest.mock import AsyncMock, MagicMock from ring_doorbell import ( @@ -30,7 +28,10 @@ DOORBOT_HISTORY = load_json_value_fixture("doorbot_history.json", DOMAIN) INTERCOM_HISTORY = load_json_value_fixture("intercom_history.json", DOMAIN) DOORBOT_HEALTH = load_json_value_fixture("doorbot_health_attrs.json", DOMAIN) CHIME_HEALTH = load_json_value_fixture("chime_health_attrs.json", DOMAIN) -DEVICE_ALERTS = load_json_value_fixture("ding_active.json", DOMAIN) + +FRONT_DOOR_DEVICE_ID = 987654 +INGRESS_DEVICE_ID = 185036587 +FRONT_DEVICE_ID = 765432 def get_mock_devices(): @@ -54,14 +55,6 @@ def get_devices_data(): } -def get_active_alerts(): - """Return active alerts set to now.""" - dings_fixture = deepcopy(DEVICE_ALERTS) - for ding in dings_fixture: - ding["now"] = time() - return dings_fixture - - DEVICE_TYPES = { "doorbots": RingDoorBell, "authorized_doorbots": RingDoorBell, @@ -76,6 +69,7 @@ DEVICE_CAPABILITIES = { RingCapability.VOLUME, RingCapability.MOTION_DETECTION, RingCapability.VIDEO, + RingCapability.DING, RingCapability.HISTORY, ], RingStickUpCam: [ @@ -88,7 +82,7 @@ DEVICE_CAPABILITIES = { RingCapability.LIGHT, ], RingChime: [RingCapability.VOLUME], - RingOther: [RingCapability.OPEN, RingCapability.HISTORY], + RingOther: [RingCapability.OPEN, RingCapability.HISTORY, RingCapability.DING], } diff --git a/tests/components/ring/test_binary_sensor.py b/tests/components/ring/test_binary_sensor.py index 16bc6e872c1..6a4ce652573 100644 --- a/tests/components/ring/test_binary_sensor.py +++ b/tests/components/ring/test_binary_sensor.py @@ -1,24 +1,196 @@ """The tests for the Ring binary sensor platform.""" -from homeassistant.const import Platform +import time +from unittest.mock import patch + +from freezegun.api import FrozenDateTimeFactory +import pytest +from ring_doorbell import Ring + +from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN +from homeassistant.components.ring.binary_sensor import RingEvent +from homeassistant.components.ring.const import DOMAIN +from homeassistant.components.ring.coordinator import RingEventListener +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import STATE_OFF, STATE_ON, Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er, issue_registry as ir +from homeassistant.setup import async_setup_component -from .common import setup_platform +from .common import setup_automation +from .device_mocks import FRONT_DOOR_DEVICE_ID, INGRESS_DEVICE_ID + +from tests.common import async_fire_time_changed -async def test_binary_sensor(hass: HomeAssistant, mock_ring_client) -> None: +@pytest.mark.parametrize( + ("device_id", "device_name", "alert_kind", "device_class"), + [ + pytest.param( + FRONT_DOOR_DEVICE_ID, + "front_door", + "motion", + "motion", + id="front_door_motion", + ), + pytest.param( + FRONT_DOOR_DEVICE_ID, + "front_door", + "ding", + "occupancy", + id="front_door_ding", + ), + pytest.param( + INGRESS_DEVICE_ID, "ingress", "ding", "occupancy", id="ingress_ding" + ), + ], +) +async def test_binary_sensor( + hass: HomeAssistant, + mock_config_entry: ConfigEntry, + mock_ring_client: Ring, + mock_ring_event_listener_class: RingEventListener, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, + device_id: int, + device_name: str, + alert_kind: str, + device_class: str, +) -> None: """Test the Ring binary sensors.""" - await setup_platform(hass, Platform.BINARY_SENSOR) + # Create the entity so it is not ignored by the deprecation check + mock_config_entry.add_to_hass(hass) - motion_state = hass.states.get("binary_sensor.front_door_motion") - assert motion_state is not None - assert motion_state.state == "on" - assert motion_state.attributes["device_class"] == "motion" + entity_id = f"binary_sensor.{device_name}_{alert_kind}" + unique_id = f"{device_id}-{alert_kind}" - front_ding_state = hass.states.get("binary_sensor.front_door_ding") - assert front_ding_state is not None - assert front_ding_state.state == "off" + entity_registry.async_get_or_create( + domain=BINARY_SENSOR_DOMAIN, + platform=DOMAIN, + unique_id=unique_id, + suggested_object_id=f"{device_name}_{alert_kind}", + config_entry=mock_config_entry, + ) + with patch("homeassistant.components.ring.PLATFORMS", [Platform.BINARY_SENSOR]): + assert await async_setup_component(hass, DOMAIN, {}) - ingress_ding_state = hass.states.get("binary_sensor.ingress_ding") - assert ingress_ding_state is not None - assert ingress_ding_state.state == "off" + on_event_cb = mock_ring_event_listener_class.return_value.add_notification_callback.call_args.args[ + 0 + ] + + # Default state is set to off + + state = hass.states.get(entity_id) + assert state is not None + assert state.state == STATE_OFF + assert state.attributes["device_class"] == device_class + + # A new alert sets to on + event = RingEvent( + 1234546, device_id, "Foo", "Bar", time.time(), 180, kind=alert_kind, state=None + ) + mock_ring_client.active_alerts.return_value = [event] + on_event_cb(event) + state = hass.states.get(entity_id) + assert state is not None + assert state.state == STATE_ON + + # Test that another event resets the expiry callback + freezer.tick(60) + async_fire_time_changed(hass) + await hass.async_block_till_done() + event = RingEvent( + 1234546, device_id, "Foo", "Bar", time.time(), 180, kind=alert_kind, state=None + ) + mock_ring_client.active_alerts.return_value = [event] + on_event_cb(event) + state = hass.states.get(entity_id) + assert state is not None + assert state.state == STATE_ON + + freezer.tick(120) + async_fire_time_changed(hass) + await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state is not None + assert state.state == STATE_ON + + # Test the second alert has expired + freezer.tick(60) + async_fire_time_changed(hass) + await hass.async_block_till_done() + state = hass.states.get(entity_id) + assert state is not None + assert state.state == STATE_OFF + + +async def test_binary_sensor_not_exists_with_deprecation( + hass: HomeAssistant, + mock_config_entry: ConfigEntry, + mock_ring_client: Ring, + entity_registry: er.EntityRegistry, +) -> None: + """Test the deprecated Ring binary sensors are deleted or raise issues.""" + mock_config_entry.add_to_hass(hass) + + entity_id = "binary_sensor.front_door_motion" + + assert not hass.states.get(entity_id) + with patch("homeassistant.components.ring.PLATFORMS", [Platform.BINARY_SENSOR]): + assert await async_setup_component(hass, DOMAIN, {}) + + assert not entity_registry.async_get(entity_id) + assert not er.async_entries_for_config_entry( + entity_registry, mock_config_entry.entry_id + ) + assert not hass.states.get(entity_id) + + +@pytest.mark.parametrize( + ("entity_disabled", "entity_has_automations"), + [ + pytest.param(False, False, id="without-automations"), + pytest.param(False, True, id="with-automations"), + pytest.param(True, False, id="disabled"), + ], +) +async def test_binary_sensor_exists_with_deprecation( + hass: HomeAssistant, + mock_config_entry: ConfigEntry, + mock_ring_client: Ring, + entity_registry: er.EntityRegistry, + issue_registry: ir.IssueRegistry, + entity_disabled: bool, + entity_has_automations: bool, +) -> None: + """Test the deprecated Ring binary sensors are deleted or raise issues.""" + mock_config_entry.add_to_hass(hass) + + entity_id = "binary_sensor.front_door_motion" + unique_id = f"{FRONT_DOOR_DEVICE_ID}-motion" + issue_id = f"deprecated_entity_{entity_id}_automation.test_automation" + + if entity_has_automations: + await setup_automation(hass, "test_automation", entity_id) + + entity = entity_registry.async_get_or_create( + domain=BINARY_SENSOR_DOMAIN, + platform=DOMAIN, + unique_id=unique_id, + suggested_object_id="front_door_motion", + config_entry=mock_config_entry, + disabled_by=er.RegistryEntryDisabler.USER if entity_disabled else None, + ) + assert entity.entity_id == entity_id + assert not hass.states.get(entity_id) + with patch("homeassistant.components.ring.PLATFORMS", [Platform.BINARY_SENSOR]): + assert await async_setup_component(hass, DOMAIN, {}) + + entity = entity_registry.async_get(entity_id) + # entity and state will be none if removed from registry + assert (entity is None) == entity_disabled + assert (hass.states.get(entity_id) is None) == entity_disabled + + assert ( + issue_registry.async_get_issue(DOMAIN, issue_id) is not None + ) == entity_has_automations diff --git a/tests/components/ring/test_init.py b/tests/components/ring/test_init.py index 97392e0c93b..10d183a22e9 100644 --- a/tests/components/ring/test_init.py +++ b/tests/components/ring/test_init.py @@ -2,19 +2,23 @@ from freezegun.api import FrozenDateTimeFactory import pytest -from ring_doorbell import AuthenticationError, RingError, RingTimeout +from ring_doorbell import AuthenticationError, Ring, RingError, RingTimeout from homeassistant.components import ring +from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN from homeassistant.components.camera import DOMAIN as CAMERA_DOMAIN from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN from homeassistant.components.ring import DOMAIN -from homeassistant.components.ring.const import SCAN_INTERVAL +from homeassistant.components.ring.const import CONF_LISTEN_CREDENTIALS, SCAN_INTERVAL +from homeassistant.components.ring.coordinator import RingEventListener from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState from homeassistant.const import CONF_TOKEN, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er, issue_registry as ir from homeassistant.setup import async_setup_component +from .device_mocks import FRONT_DOOR_DEVICE_ID + from tests.common import MockConfigEntry, async_fire_time_changed @@ -413,3 +417,63 @@ async def test_token_updated( async_fire_time_changed(hass) await hass.async_block_till_done() assert mock_config_entry.data[CONF_TOKEN] == {"access_token": "new-mock-token"} + + +async def test_listen_token_updated( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_config_entry: MockConfigEntry, + mock_ring_client, + mock_ring_event_listener_class, +) -> None: + """Test that the listener token value is updated in the config entry. + + This simulates the api calling the callback. + """ + mock_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + + assert mock_ring_event_listener_class.call_count == 1 + token_updater = mock_ring_event_listener_class.call_args.args[2] + + assert mock_config_entry.data.get(CONF_LISTEN_CREDENTIALS) is None + token_updater({"listen_access_token": "mock-token"}) + assert mock_config_entry.data.get(CONF_LISTEN_CREDENTIALS) == { + "listen_access_token": "mock-token" + } + + +async def test_no_listen_start( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + entity_registry: er.EntityRegistry, + mock_ring_event_listener_class: type[RingEventListener], + mock_ring_client: Ring, +) -> None: + """Test behaviour if listener doesn't start.""" + mock_entry = MockConfigEntry( + domain=DOMAIN, + version=1, + data={"username": "foo", "token": {}}, + ) + # Create a binary sensor entity so it is not ignored by the deprecation check + # and the listener will start + entity_registry.async_get_or_create( + domain=BINARY_SENSOR_DOMAIN, + platform=DOMAIN, + unique_id=f"{FRONT_DOOR_DEVICE_ID}-motion", + suggested_object_id=f"{FRONT_DOOR_DEVICE_ID}_motion", + config_entry=mock_entry, + ) + mock_ring_event_listener_class.do_not_start = True + + mock_ring_event_listener_class.return_value.started = False + + mock_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() + + assert "Ring event listener failed to start after 10 seconds" in [ + record.message for record in caplog.records if record.levelname == "WARNING" + ] diff --git a/tests/components/ring/test_sensor.py b/tests/components/ring/test_sensor.py index 1f05c120251..dead52a5acc 100644 --- a/tests/components/ring/test_sensor.py +++ b/tests/components/ring/test_sensor.py @@ -1,17 +1,26 @@ """The tests for the Ring sensor platform.""" import logging +from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory import pytest +from ring_doorbell import Ring -from homeassistant.components.ring.const import SCAN_INTERVAL -from homeassistant.components.sensor import ATTR_STATE_CLASS, SensorStateClass +from homeassistant.components.ring.const import DOMAIN, SCAN_INTERVAL +from homeassistant.components.sensor import ( + ATTR_STATE_CLASS, + DOMAIN as SENSOR_DOMAIN, + SensorStateClass, +) +from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er +from homeassistant.setup import async_setup_component from .common import setup_platform +from .device_mocks import FRONT_DEVICE_ID, FRONT_DOOR_DEVICE_ID, INGRESS_DEVICE_ID from tests.common import async_fire_time_changed @@ -107,13 +116,23 @@ async def test_health_sensor( @pytest.mark.parametrize( - ("device_name", "sensor_name", "expected_value"), + ("device_id", "device_name", "sensor_name", "expected_value"), [ - ("front_door", "last_motion", "2017-03-05T15:03:40+00:00"), - ("front_door", "last_ding", "2018-03-05T15:03:40+00:00"), - ("front_door", "last_activity", "2018-03-05T15:03:40+00:00"), - ("front", "last_motion", "2017-03-05T15:03:40+00:00"), - ("ingress", "last_activity", "2024-02-02T11:21:24+00:00"), + ( + FRONT_DOOR_DEVICE_ID, + "front_door", + "last_motion", + "2017-03-05T15:03:40+00:00", + ), + (FRONT_DOOR_DEVICE_ID, "front_door", "last_ding", "2018-03-05T15:03:40+00:00"), + ( + FRONT_DOOR_DEVICE_ID, + "front_door", + "last_activity", + "2018-03-05T15:03:40+00:00", + ), + (FRONT_DEVICE_ID, "front", "last_motion", "2017-03-05T15:03:40+00:00"), + (INGRESS_DEVICE_ID, "ingress", "last_activity", "2024-02-02T11:21:24+00:00"), ], ids=[ "doorbell-motion", @@ -125,14 +144,31 @@ async def test_health_sensor( ) async def test_history_sensor( hass: HomeAssistant, - mock_ring_client, + mock_ring_client: Ring, + mock_config_entry: ConfigEntry, + entity_registry: er.EntityRegistry, freezer: FrozenDateTimeFactory, - device_name, - sensor_name, - expected_value, + device_id: int, + device_name: str, + sensor_name: str, + expected_value: str, ) -> None: """Test the Ring sensors.""" - await setup_platform(hass, "sensor") + # Create the entity so it is not ignored by the deprecation check + mock_config_entry.add_to_hass(hass) + + entity_id = f"sensor.{device_name}_{sensor_name}" + unique_id = f"{device_id}-{sensor_name}" + + entity_registry.async_get_or_create( + domain=SENSOR_DOMAIN, + platform=DOMAIN, + unique_id=unique_id, + suggested_object_id=f"{device_name}_{sensor_name}", + config_entry=mock_config_entry, + ) + with patch("homeassistant.components.ring.PLATFORMS", [Platform.SENSOR]): + assert await async_setup_component(hass, DOMAIN, {}) entity_id = f"sensor.{device_name}_{sensor_name}" sensor_state = hass.states.get(entity_id) From 20600123f8b2f6fabe4d89ae866bc467dc24c04d Mon Sep 17 00:00:00 2001 From: Jan-Philipp Benecke Date: Sun, 8 Sep 2024 18:52:21 +0200 Subject: [PATCH 1602/1635] Update bring todo entity snapshots (#125518) Update bring todo snapshot --- tests/components/bring/snapshots/test_todo.ambr | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/components/bring/snapshots/test_todo.ambr b/tests/components/bring/snapshots/test_todo.ambr index 6a24b4148b7..6a7104727a1 100644 --- a/tests/components/bring/snapshots/test_todo.ambr +++ b/tests/components/bring/snapshots/test_todo.ambr @@ -23,7 +23,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'Baumarkt', + 'original_name': None, 'platform': 'bring', 'previous_unique_id': None, 'supported_features': , @@ -70,7 +70,7 @@ }), 'original_device_class': None, 'original_icon': None, - 'original_name': 'Einkauf', + 'original_name': None, 'platform': 'bring', 'previous_unique_id': None, 'supported_features': , From 26ac8e35cb1814857538073ec2dd6a1c7d91b83f Mon Sep 17 00:00:00 2001 From: "Steven B." <51370195+sdb9696@users.noreply.github.com> Date: Sun, 8 Sep 2024 19:32:34 +0100 Subject: [PATCH 1603/1635] Add event platform to ring (#125506) --- homeassistant/components/ring/const.py | 1 + homeassistant/components/ring/event.py | 109 +++++++++++++++++++++++++ tests/components/ring/test_event.py | 80 ++++++++++++++++++ 3 files changed, 190 insertions(+) create mode 100644 homeassistant/components/ring/event.py create mode 100644 tests/components/ring/test_event.py diff --git a/homeassistant/components/ring/const.py b/homeassistant/components/ring/const.py index c67adbf5984..5fac77d63bb 100644 --- a/homeassistant/components/ring/const.py +++ b/homeassistant/components/ring/const.py @@ -18,6 +18,7 @@ PLATFORMS = [ Platform.BINARY_SENSOR, Platform.BUTTON, Platform.CAMERA, + Platform.EVENT, Platform.LIGHT, Platform.SENSOR, Platform.SIREN, diff --git a/homeassistant/components/ring/event.py b/homeassistant/components/ring/event.py new file mode 100644 index 00000000000..e6d9d25542f --- /dev/null +++ b/homeassistant/components/ring/event.py @@ -0,0 +1,109 @@ +"""Component providing support for ring events.""" + +from dataclasses import dataclass +from typing import Generic + +from ring_doorbell import RingCapability, RingEvent as RingAlert +from ring_doorbell.const import KIND_DING, KIND_INTERCOM_UNLOCK, KIND_MOTION + +from homeassistant.components.event import ( + EventDeviceClass, + EventEntity, + EventEntityDescription, +) +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import RingConfigEntry +from .coordinator import RingListenCoordinator +from .entity import RingBaseEntity, RingDeviceT + + +@dataclass(frozen=True, kw_only=True) +class RingEventEntityDescription(EventEntityDescription, Generic[RingDeviceT]): + """Base class for event entity description.""" + + capability: RingCapability + + +EVENT_DESCRIPTIONS: tuple[RingEventEntityDescription, ...] = ( + RingEventEntityDescription( + key=KIND_DING, + translation_key=KIND_DING, + device_class=EventDeviceClass.DOORBELL, + event_types=[KIND_DING], + capability=RingCapability.DING, + ), + RingEventEntityDescription( + key=KIND_MOTION, + translation_key=KIND_MOTION, + device_class=EventDeviceClass.MOTION, + event_types=[KIND_MOTION], + capability=RingCapability.MOTION_DETECTION, + ), + RingEventEntityDescription( + key=KIND_INTERCOM_UNLOCK, + translation_key=KIND_INTERCOM_UNLOCK, + device_class=EventDeviceClass.BUTTON, + event_types=[KIND_INTERCOM_UNLOCK], + capability=RingCapability.OPEN, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: RingConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up events for a Ring device.""" + ring_data = entry.runtime_data + listen_coordinator = ring_data.listen_coordinator + + async_add_entities( + RingEvent(device, listen_coordinator, description) + for description in EVENT_DESCRIPTIONS + for device in ring_data.devices.all_devices + if device.has_capability(description.capability) + ) + + +class RingEvent(RingBaseEntity[RingListenCoordinator, RingDeviceT], EventEntity): + """An event implementation for Ring device.""" + + entity_description: RingEventEntityDescription[RingDeviceT] + + def __init__( + self, + device: RingDeviceT, + coordinator: RingListenCoordinator, + description: RingEventEntityDescription[RingDeviceT], + ) -> None: + """Initialize a event entity for Ring device.""" + super().__init__(device, coordinator) + self.entity_description = description + self._attr_unique_id = f"{device.id}-{description.key}" + + @callback + def _async_handle_event(self, event: str) -> None: + """Handle the event.""" + self._trigger_event(event) + + def _get_coordinator_alert(self) -> RingAlert | None: + return self.coordinator.alerts.get( + (self._device.device_api_id, self.entity_description.key) + ) + + @callback + def _handle_coordinator_update(self) -> None: + if alert := self._get_coordinator_alert(): + self._async_handle_event(alert.kind) + super()._handle_coordinator_update() + + @property + def available(self) -> bool: + """Return if entity is available.""" + return self.coordinator.event_listener.started + + async def async_update(self) -> None: + """All updates are passive.""" diff --git a/tests/components/ring/test_event.py b/tests/components/ring/test_event.py new file mode 100644 index 00000000000..c546f9ea136 --- /dev/null +++ b/tests/components/ring/test_event.py @@ -0,0 +1,80 @@ +"""The tests for the Ring event platform.""" + +from datetime import datetime +import time + +from freezegun.api import FrozenDateTimeFactory +import pytest +from ring_doorbell import Ring + +from homeassistant.components.ring.binary_sensor import RingEvent +from homeassistant.components.ring.coordinator import RingEventListener +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from .common import setup_platform +from .device_mocks import FRONT_DOOR_DEVICE_ID, INGRESS_DEVICE_ID + + +@pytest.mark.parametrize( + ("device_id", "device_name", "alert_kind", "device_class"), + [ + pytest.param( + FRONT_DOOR_DEVICE_ID, + "front_door", + "motion", + "motion", + id="front_door_motion", + ), + pytest.param( + FRONT_DOOR_DEVICE_ID, "front_door", "ding", "doorbell", id="front_door_ding" + ), + pytest.param( + INGRESS_DEVICE_ID, "ingress", "ding", "doorbell", id="ingress_ding" + ), + pytest.param( + INGRESS_DEVICE_ID, + "ingress", + "intercom_unlock", + "button", + id="ingress_unlock", + ), + ], +) +async def test_event( + hass: HomeAssistant, + mock_ring_client: Ring, + mock_ring_event_listener_class: RingEventListener, + freezer: FrozenDateTimeFactory, + device_id: int, + device_name: str, + alert_kind: str, + device_class: str, +) -> None: + """Test the Ring event platforms.""" + + await setup_platform(hass, Platform.EVENT) + + start_time_str = "2024-09-04T15:32:53.892+00:00" + start_time = datetime.strptime(start_time_str, "%Y-%m-%dT%H:%M:%S.%f%z") + freezer.move_to(start_time) + on_event_cb = mock_ring_event_listener_class.return_value.add_notification_callback.call_args.args[ + 0 + ] + + # Default state is unknown + entity_id = f"event.{device_name}_{alert_kind}" + state = hass.states.get(entity_id) + assert state is not None + assert state.state == "unknown" + assert state.attributes["device_class"] == device_class + + # A new alert sets to on + event = RingEvent( + 1234546, device_id, "Foo", "Bar", time.time(), 180, kind=alert_kind, state=None + ) + mock_ring_client.active_alerts.return_value = [event] + on_event_cb(event) + state = hass.states.get(entity_id) + assert state is not None + assert state.state == start_time_str From 8b8083a6394d5e1d66de37a88057c02a2ec175cf Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Sun, 8 Sep 2024 21:18:08 +0200 Subject: [PATCH 1604/1635] Migrate smappee to use runtime_data (#125529) --- homeassistant/components/smappee/__init__.py | 17 ++++++++--------- .../components/smappee/binary_sensor.py | 6 +++--- homeassistant/components/smappee/sensor.py | 6 +++--- homeassistant/components/smappee/switch.py | 6 +++--- 4 files changed, 17 insertions(+), 18 deletions(-) diff --git a/homeassistant/components/smappee/__init__.py b/homeassistant/components/smappee/__init__.py index c7edd46c7e2..7fa30965aa8 100644 --- a/homeassistant/components/smappee/__init__.py +++ b/homeassistant/components/smappee/__init__.py @@ -25,6 +25,8 @@ from .const import ( TOKEN_URL, ) +type SmappeeConfigEntry = ConfigEntry[SmappeeBase] + CONFIG_SCHEMA = vol.Schema( { DOMAIN: vol.Schema( @@ -72,7 +74,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: return True -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: SmappeeConfigEntry) -> bool: """Set up Smappee from a zeroconf or config entry.""" if CONF_IP_ADDRESS in entry.data: if helper.is_smappee_genius(entry.data[CONF_SERIALNUMBER]): @@ -103,31 +105,28 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: smappee = Smappee(api=smappee_api) await hass.async_add_executor_job(smappee.load_service_locations) - hass.data[DOMAIN][entry.entry_id] = SmappeeBase(hass, smappee) + entry.runtime_data = SmappeeBase(hass, smappee) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: SmappeeConfigEntry) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id, None) - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) class SmappeeBase: """An object to hold the PySmappee instance.""" - def __init__(self, hass, smappee): + def __init__(self, hass: HomeAssistant, smappee: Smappee) -> None: """Initialize the Smappee API wrapper class.""" self.hass = hass self.smappee = smappee @Throttle(MIN_TIME_BETWEEN_UPDATES) - async def async_update(self): + async def async_update(self) -> None: """Update all Smappee trends and appliance states.""" await self.hass.async_add_executor_job( self.smappee.update_trends_and_appliance_states diff --git a/homeassistant/components/smappee/binary_sensor.py b/homeassistant/components/smappee/binary_sensor.py index a653896f1c2..86bc225dba1 100644 --- a/homeassistant/components/smappee/binary_sensor.py +++ b/homeassistant/components/smappee/binary_sensor.py @@ -6,11 +6,11 @@ from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, BinarySensorEntity, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import SmappeeConfigEntry from .const import DOMAIN BINARY_SENSOR_PREFIX = "Appliance" @@ -36,11 +36,11 @@ ICON_MAPPING = { async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: SmappeeConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Smappee binary sensor.""" - smappee_base = hass.data[DOMAIN][config_entry.entry_id] + smappee_base = config_entry.runtime_data entities: list[BinarySensorEntity] = [] for service_location in smappee_base.smappee.service_locations.values(): diff --git a/homeassistant/components/smappee/sensor.py b/homeassistant/components/smappee/sensor.py index c984d936b06..2f9d6443568 100644 --- a/homeassistant/components/smappee/sensor.py +++ b/homeassistant/components/smappee/sensor.py @@ -10,12 +10,12 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import UnitOfElectricPotential, UnitOfEnergy, UnitOfPower from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import SmappeeConfigEntry from .const import DOMAIN @@ -188,11 +188,11 @@ VOLTAGE_SENSORS: tuple[SmappeeVoltageSensorEntityDescription, ...] = ( async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: SmappeeConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Smappee sensor.""" - smappee_base = hass.data[DOMAIN][config_entry.entry_id] + smappee_base = config_entry.runtime_data entities = [] for service_location in smappee_base.smappee.service_locations.values(): diff --git a/homeassistant/components/smappee/switch.py b/homeassistant/components/smappee/switch.py index 1bc5d159145..bccf816c823 100644 --- a/homeassistant/components/smappee/switch.py +++ b/homeassistant/components/smappee/switch.py @@ -3,11 +3,11 @@ from typing import Any from homeassistant.components.switch import SwitchEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import SmappeeConfigEntry from .const import DOMAIN SWITCH_PREFIX = "Switch" @@ -15,11 +15,11 @@ SWITCH_PREFIX = "Switch" async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: SmappeeConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Smappee Comfort Plugs.""" - smappee_base = hass.data[DOMAIN][config_entry.entry_id] + smappee_base = config_entry.runtime_data entities = [] for service_location in smappee_base.smappee.service_locations.values(): From 8ce236de802d02e1dbf410f3a77c0c02e5e6b3a5 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Sun, 8 Sep 2024 21:29:14 +0200 Subject: [PATCH 1605/1635] Migrate amberelectric to use runtime_data (#125533) --- .../components/amberelectric/__init__.py | 16 +++++++--------- .../components/amberelectric/binary_sensor.py | 8 ++++---- homeassistant/components/amberelectric/sensor.py | 8 ++++---- 3 files changed, 15 insertions(+), 17 deletions(-) diff --git a/homeassistant/components/amberelectric/__init__.py b/homeassistant/components/amberelectric/__init__.py index 9d9eef49b36..cd44886c9ef 100644 --- a/homeassistant/components/amberelectric/__init__.py +++ b/homeassistant/components/amberelectric/__init__.py @@ -7,11 +7,13 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_TOKEN from homeassistant.core import HomeAssistant -from .const import CONF_SITE_ID, DOMAIN, PLATFORMS +from .const import CONF_SITE_ID, PLATFORMS from .coordinator import AmberUpdateCoordinator +type AmberConfigEntry = ConfigEntry[AmberUpdateCoordinator] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + +async def async_setup_entry(hass: HomeAssistant, entry: AmberConfigEntry) -> bool: """Set up Amber Electric from a config entry.""" configuration = Configuration(access_token=entry.data[CONF_API_TOKEN]) api_instance = amber_api.AmberApi.create(configuration) @@ -19,15 +21,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = AmberUpdateCoordinator(hass, api_instance, site_id) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: AmberConfigEntry) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/amberelectric/binary_sensor.py b/homeassistant/components/amberelectric/binary_sensor.py index cd06fb04f39..a9fa00d0129 100644 --- a/homeassistant/components/amberelectric/binary_sensor.py +++ b/homeassistant/components/amberelectric/binary_sensor.py @@ -8,12 +8,12 @@ from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import ATTRIBUTION, DOMAIN +from . import AmberConfigEntry +from .const import ATTRIBUTION from .coordinator import AmberUpdateCoordinator PRICE_SPIKE_ICONS = { @@ -85,11 +85,11 @@ class AmberDemandWindowBinarySensor(AmberPriceGridSensor): async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: AmberConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a config entry.""" - coordinator: AmberUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data price_spike_description = BinarySensorEntityDescription( key="price_spike", diff --git a/homeassistant/components/amberelectric/sensor.py b/homeassistant/components/amberelectric/sensor.py index aafdd730a0c..52c0c42e7bc 100644 --- a/homeassistant/components/amberelectric/sensor.py +++ b/homeassistant/components/amberelectric/sensor.py @@ -17,13 +17,13 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CURRENCY_DOLLAR, PERCENTAGE, UnitOfEnergy from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import ATTRIBUTION, DOMAIN +from . import AmberConfigEntry +from .const import ATTRIBUTION from .coordinator import AmberUpdateCoordinator, normalize_descriptor UNIT = f"{CURRENCY_DOLLAR}/{UnitOfEnergy.KILO_WATT_HOUR}" @@ -196,11 +196,11 @@ class AmberGridSensor(CoordinatorEntity[AmberUpdateCoordinator], SensorEntity): async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: AmberConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up a config entry.""" - coordinator: AmberUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data current: dict[str, CurrentInterval] = coordinator.data["current"] forecasts: dict[str, list[ForecastInterval]] = coordinator.data["forecasts"] From 513361ef0fd1501cf702ac2e920ca2925a42b97c Mon Sep 17 00:00:00 2001 From: Jason Hunter Date: Sun, 8 Sep 2024 15:38:31 -0400 Subject: [PATCH 1606/1635] Fix failing template config flow tests (#125534) fix: failing template config flow tests --- tests/components/template/test_config_flow.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/components/template/test_config_flow.py b/tests/components/template/test_config_flow.py index ee748ce41f5..eb2c6e57f85 100644 --- a/tests/components/template/test_config_flow.py +++ b/tests/components/template/test_config_flow.py @@ -63,7 +63,7 @@ from tests.typing import WebSocketGenerator "device_class": "restart", "press": [ { - "service": "input_boolean.toggle", + "action": "input_boolean.toggle", "target": {"entity_id": "input_boolean.test"}, "data": {}, } @@ -73,7 +73,7 @@ from tests.typing import WebSocketGenerator "device_class": "restart", "press": [ { - "service": "input_boolean.toggle", + "action": "input_boolean.toggle", "target": {"entity_id": "input_boolean.test"}, "data": {}, } @@ -410,7 +410,7 @@ def get_suggested(schema, key): "device_class": "restart", "press": [ { - "service": "input_boolean.toggle", + "action": "input_boolean.toggle", "target": {"entity_id": "input_boolean.test"}, "data": {}, } @@ -419,7 +419,7 @@ def get_suggested(schema, key): { "press": [ { - "service": "input_boolean.toggle", + "action": "input_boolean.toggle", "target": {"entity_id": "input_boolean.test"}, "data": {}, } From 7f4fc4d371240aaaac60eeda39c733319892b84e Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Sun, 8 Sep 2024 21:39:05 +0200 Subject: [PATCH 1607/1635] Migrate airvisual to use runtime_data (#125532) * Migrate airvisual to use runtime_data * Remove usedefault * Adjust --- .../components/airvisual/__init__.py | 30 +++++++++---------- .../components/airvisual/diagnostics.py | 9 +++--- homeassistant/components/airvisual/sensor.py | 10 ++++--- 3 files changed, 24 insertions(+), 25 deletions(-) diff --git a/homeassistant/components/airvisual/__init__.py b/homeassistant/components/airvisual/__init__.py index 60fdbf12ca1..f8f045859b3 100644 --- a/homeassistant/components/airvisual/__init__.py +++ b/homeassistant/components/airvisual/__init__.py @@ -53,6 +53,8 @@ from .const import ( LOGGER, ) +type AirVisualConfigEntry = ConfigEntry[DataUpdateCoordinator] + # We use a raw string for the airvisual_pro domain (instead of importing the actual # constant) so that we can avoid listing it as a dependency: DOMAIN_AIRVISUAL_PRO = "airvisual_pro" @@ -91,10 +93,9 @@ def async_get_cloud_coordinators_by_api_key( ) -> list[DataUpdateCoordinator]: """Get all DataUpdateCoordinator objects related to a particular API key.""" return [ - coordinator - for entry_id, coordinator in hass.data[DOMAIN].items() - if (entry := hass.config_entries.async_get_entry(entry_id)) - and entry.data.get(CONF_API_KEY) == api_key + entry.runtime_data + for entry in hass.config_entries.async_entries(DOMAIN) + if entry.data.get(CONF_API_KEY) == api_key and hasattr(entry, "runtime_data") ] @@ -172,7 +173,7 @@ def _standardize_geography_config_entry( hass.config_entries.async_update_entry(entry, **entry_updates) -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: AirVisualConfigEntry) -> bool: """Set up AirVisual as config entry.""" if CONF_API_KEY not in entry.data: # If this is a migrated AirVisual Pro entry, there's no actual setup to do; @@ -220,8 +221,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: entry.async_on_unload(entry.add_update_listener(async_reload_entry)) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {}) - hass.data[DOMAIN][entry.entry_id] = coordinator + entry.runtime_data = coordinator # Reassess the interval between 2 server requests async_sync_geo_coordinator_update_intervals(hass, entry.data[CONF_API_KEY]) @@ -231,7 +231,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_migrate_entry(hass: HomeAssistant, entry: AirVisualConfigEntry) -> bool: """Migrate an old config entry.""" version = entry.version @@ -388,21 +388,19 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: AirVisualConfigEntry) -> bool: """Unload an AirVisual config entry.""" unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) - if CONF_API_KEY in entry.data: - # Re-calculate the update interval period for any remaining consumers of - # this API key: - async_sync_geo_coordinator_update_intervals(hass, entry.data[CONF_API_KEY]) + if unload_ok and CONF_API_KEY in entry.data: + # Re-calculate the update interval period for any remaining consumers of + # this API key: + async_sync_geo_coordinator_update_intervals(hass, entry.data[CONF_API_KEY]) return unload_ok -async def async_reload_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def async_reload_entry(hass: HomeAssistant, entry: AirVisualConfigEntry) -> None: """Handle an options update.""" await hass.config_entries.async_reload(entry.entry_id) diff --git a/homeassistant/components/airvisual/diagnostics.py b/homeassistant/components/airvisual/diagnostics.py index 348bb249b0f..2e7c60364f9 100644 --- a/homeassistant/components/airvisual/diagnostics.py +++ b/homeassistant/components/airvisual/diagnostics.py @@ -5,7 +5,6 @@ from __future__ import annotations from typing import Any from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_API_KEY, CONF_COUNTRY, @@ -15,9 +14,9 @@ from homeassistant.const import ( CONF_UNIQUE_ID, ) from homeassistant.core import HomeAssistant -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator -from .const import CONF_CITY, DOMAIN +from . import AirVisualConfigEntry +from .const import CONF_CITY CONF_COORDINATES = "coordinates" CONF_TITLE = "title" @@ -37,10 +36,10 @@ TO_REDACT = { async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: AirVisualConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - coordinator: DataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data return { "entry": async_redact_data(entry.as_dict(), TO_REDACT), diff --git a/homeassistant/components/airvisual/sensor.py b/homeassistant/components/airvisual/sensor.py index df0e3da1f45..c9df2f72233 100644 --- a/homeassistant/components/airvisual/sensor.py +++ b/homeassistant/components/airvisual/sensor.py @@ -26,8 +26,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import DataUpdateCoordinator -from . import AirVisualEntity -from .const import CONF_CITY, DOMAIN +from . import AirVisualConfigEntry, AirVisualEntity +from .const import CONF_CITY ATTR_CITY = "city" ATTR_COUNTRY = "country" @@ -105,10 +105,12 @@ POLLUTANT_UNITS = { async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: AirVisualConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up AirVisual sensors based on a config entry.""" - coordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data async_add_entities( AirVisualGeographySensor(coordinator, entry, description, locale) for locale in GEOGRAPHY_SENSOR_LOCALES From 4bcde36a97de521ffbb34b051c97b02602781984 Mon Sep 17 00:00:00 2001 From: Jan-Philipp Benecke Date: Sun, 8 Sep 2024 21:42:33 +0200 Subject: [PATCH 1608/1635] Fix failing blebox climate tests (#125522) --- tests/components/blebox/test_climate.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/components/blebox/test_climate.py b/tests/components/blebox/test_climate.py index 8ba0c3f630e..e402a3d5fbd 100644 --- a/tests/components/blebox/test_climate.py +++ b/tests/components/blebox/test_climate.py @@ -21,6 +21,7 @@ from homeassistant.components.climate import ( ) from homeassistant.const import ( ATTR_DEVICE_CLASS, + ATTR_ENTITY_ID, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE, STATE_UNKNOWN, @@ -152,6 +153,7 @@ async def test_on_when_below_desired(saunabox, hass: HomeAssistant) -> None: feature_mock.desired = 64.8 feature_mock.current = 25.7 + feature_mock.mode = 1 feature_mock.async_on = AsyncMock(side_effect=turn_on) await hass.services.async_call( "climate", @@ -186,12 +188,13 @@ async def test_on_when_above_desired(saunabox, hass: HomeAssistant) -> None: feature_mock.desired = 23.4 feature_mock.current = 28.7 + feature_mock.mode = 1 feature_mock.async_on = AsyncMock(side_effect=turn_on) await hass.services.async_call( "climate", SERVICE_SET_HVAC_MODE, - {"entity_id": entity_id, ATTR_HVAC_MODE: HVACMode.HEAT}, + {ATTR_ENTITY_ID: entity_id, ATTR_HVAC_MODE: HVACMode.HEAT}, blocking=True, ) feature_mock.async_off.assert_not_called() From 6f88b6e64efcbc9e6036088c2b28fd1862b56ec7 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Sun, 8 Sep 2024 22:04:34 +0200 Subject: [PATCH 1609/1635] Migrate anthemav to use runtime_data (#125537) --- homeassistant/components/anthemav/__init__.py | 20 +++++++++---------- .../components/anthemav/media_player.py | 7 +++---- 2 files changed, 12 insertions(+), 15 deletions(-) diff --git a/homeassistant/components/anthemav/__init__.py b/homeassistant/components/anthemav/__init__.py index 4efeb9245c8..9616d554424 100644 --- a/homeassistant/components/anthemav/__init__.py +++ b/homeassistant/components/anthemav/__init__.py @@ -13,14 +13,16 @@ from homeassistant.core import Event, HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.dispatcher import async_dispatcher_send -from .const import ANTHEMAV_UPDATE_SIGNAL, DEVICE_TIMEOUT_SECONDS, DOMAIN +from .const import ANTHEMAV_UPDATE_SIGNAL, DEVICE_TIMEOUT_SECONDS + +type AnthemavConfigEntry = ConfigEntry[anthemav.Connection] PLATFORMS = [Platform.MEDIA_PLAYER] _LOGGER = logging.getLogger(__name__) -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: AnthemavConfigEntry) -> bool: """Set up Anthem A/V Receivers from a config entry.""" @callback @@ -41,7 +43,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except (OSError, DeviceError) as err: raise ConfigEntryNotReady from err - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = avr + entry.runtime_data = avr await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) @@ -56,16 +58,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: AnthemavConfigEntry) -> bool: """Unload a config entry.""" unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - avr = hass.data[DOMAIN][entry.entry_id] + avr = entry.runtime_data + _LOGGER.debug("Close avr connection") + avr.close() - if avr is not None: - _LOGGER.debug("Close avr connection") - avr.close() - - if unload_ok: - hass.data[DOMAIN].pop(entry.entry_id) return unload_ok diff --git a/homeassistant/components/anthemav/media_player.py b/homeassistant/components/anthemav/media_player.py index 1dbfdf275f2..be5a6ad2258 100644 --- a/homeassistant/components/anthemav/media_player.py +++ b/homeassistant/components/anthemav/media_player.py @@ -4,7 +4,6 @@ from __future__ import annotations import logging -from anthemav.connection import Connection from anthemav.protocol import AVR from homeassistant.components.media_player import ( @@ -13,13 +12,13 @@ from homeassistant.components.media_player import ( MediaPlayerEntityFeature, MediaPlayerState, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_MAC, CONF_MODEL from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import AnthemavConfigEntry from .const import ANTHEMAV_UPDATE_SIGNAL, DOMAIN, MANUFACTURER _LOGGER = logging.getLogger(__name__) @@ -27,7 +26,7 @@ _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: AnthemavConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up entry.""" @@ -35,7 +34,7 @@ async def async_setup_entry( mac_address = config_entry.data[CONF_MAC] model = config_entry.data[CONF_MODEL] - avr: Connection = hass.data[DOMAIN][config_entry.entry_id] + avr = config_entry.runtime_data _LOGGER.debug("Connection data dump: %s", avr.dump_conndata) From 7209b3c7d323e8681e2ec6734c7fc7d967bfd281 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Sun, 8 Sep 2024 22:05:48 +0200 Subject: [PATCH 1610/1635] Migrate aosmith to use runtime_data (#125538) --- homeassistant/components/aosmith/__init__.py | 13 ++++++------- homeassistant/components/aosmith/diagnostics.py | 8 +++----- homeassistant/components/aosmith/sensor.py | 10 +++++----- homeassistant/components/aosmith/water_heater.py | 10 +++++----- 4 files changed, 19 insertions(+), 22 deletions(-) diff --git a/homeassistant/components/aosmith/__init__.py b/homeassistant/components/aosmith/__init__.py index c42096cd3a7..dd60f69c4b9 100644 --- a/homeassistant/components/aosmith/__init__.py +++ b/homeassistant/components/aosmith/__init__.py @@ -16,6 +16,8 @@ from .coordinator import AOSmithEnergyCoordinator, AOSmithStatusCoordinator PLATFORMS: list[Platform] = [Platform.SENSOR, Platform.WATER_HEATER] +type AOSmithConfigEntry = ConfigEntry[AOSmithData] + @dataclass class AOSmithData: @@ -26,7 +28,7 @@ class AOSmithData: energy_coordinator: AOSmithEnergyCoordinator -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: AOSmithConfigEntry) -> bool: """Set up A. O. Smith from a config entry.""" email = entry.data[CONF_EMAIL] password = entry.data[CONF_PASSWORD] @@ -55,7 +57,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: ) await energy_coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = AOSmithData( + entry.runtime_data = AOSmithData( client, status_coordinator, energy_coordinator, @@ -66,9 +68,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: AOSmithConfigEntry) -> bool: """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/aosmith/diagnostics.py b/homeassistant/components/aosmith/diagnostics.py index 96b049b904f..94726731f75 100644 --- a/homeassistant/components/aosmith/diagnostics.py +++ b/homeassistant/components/aosmith/diagnostics.py @@ -5,11 +5,9 @@ from __future__ import annotations from typing import Any from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from . import AOSmithData -from .const import DOMAIN +from . import AOSmithConfigEntry TO_REDACT = { "address", @@ -31,10 +29,10 @@ TO_REDACT = { async def async_get_config_entry_diagnostics( - hass: HomeAssistant, config_entry: ConfigEntry + hass: HomeAssistant, config_entry: AOSmithConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - data: AOSmithData = hass.data[DOMAIN][config_entry.entry_id] + data = config_entry.runtime_data all_device_info = await data.client.get_all_device_info() return async_redact_data(all_device_info, TO_REDACT) diff --git a/homeassistant/components/aosmith/sensor.py b/homeassistant/components/aosmith/sensor.py index e33c388af8b..89b383744e5 100644 --- a/homeassistant/components/aosmith/sensor.py +++ b/homeassistant/components/aosmith/sensor.py @@ -11,13 +11,11 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import UnitOfEnergy from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import AOSmithData -from .const import DOMAIN +from . import AOSmithConfigEntry from .coordinator import AOSmithEnergyCoordinator, AOSmithStatusCoordinator from .entity import AOSmithEnergyEntity, AOSmithStatusEntity @@ -49,10 +47,12 @@ HOT_WATER_STATUS_MAP: dict[HotWaterStatus, str] = { async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: AOSmithConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up A. O. Smith sensor platform.""" - data: AOSmithData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data async_add_entities( AOSmithStatusSensorEntity(data.status_coordinator, description, junction_id) diff --git a/homeassistant/components/aosmith/water_heater.py b/homeassistant/components/aosmith/water_heater.py index dceba13ba34..f3dc8b3413f 100644 --- a/homeassistant/components/aosmith/water_heater.py +++ b/homeassistant/components/aosmith/water_heater.py @@ -12,14 +12,12 @@ from homeassistant.components.water_heater import ( WaterHeaterEntity, WaterHeaterEntityFeature, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import AOSmithData -from .const import DOMAIN +from . import AOSmithConfigEntry from .coordinator import AOSmithStatusCoordinator from .entity import AOSmithStatusEntity @@ -46,10 +44,12 @@ DEFAULT_OPERATION_MODE_PRIORITY = [ async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, + entry: AOSmithConfigEntry, + async_add_entities: AddEntitiesCallback, ) -> None: """Set up A. O. Smith water heater platform.""" - data: AOSmithData = hass.data[DOMAIN][entry.entry_id] + data = entry.runtime_data async_add_entities( AOSmithWaterHeaterEntity(data.status_coordinator, junction_id) From 4d804649fc636aa6f2b917bb2c4660b025eea1b0 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Sun, 8 Sep 2024 22:07:19 +0200 Subject: [PATCH 1611/1635] Migrate apcupsd to use runtime_data (#125539) --- homeassistant/components/apcupsd/__init__.py | 18 +++++++----------- .../components/apcupsd/binary_sensor.py | 9 +++------ .../components/apcupsd/diagnostics.py | 10 ++++------ homeassistant/components/apcupsd/sensor.py | 8 ++++---- tests/components/apcupsd/__init__.py | 2 +- tests/components/apcupsd/test_config_flow.py | 2 +- 6 files changed, 20 insertions(+), 29 deletions(-) diff --git a/homeassistant/components/apcupsd/__init__.py b/homeassistant/components/apcupsd/__init__.py index 7293a42f7e7..44edc5c151f 100644 --- a/homeassistant/components/apcupsd/__init__.py +++ b/homeassistant/components/apcupsd/__init__.py @@ -2,22 +2,22 @@ from __future__ import annotations -import logging from typing import Final from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PORT, Platform from homeassistant.core import HomeAssistant -from .const import DOMAIN from .coordinator import APCUPSdCoordinator -_LOGGER = logging.getLogger(__name__) +type APCUPSdConfigEntry = ConfigEntry[APCUPSdCoordinator] PLATFORMS: Final = (Platform.BINARY_SENSOR, Platform.SENSOR) -async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: +async def async_setup_entry( + hass: HomeAssistant, config_entry: APCUPSdConfigEntry +) -> bool: """Use config values to set up a function enabling status retrieval.""" host, port = config_entry.data[CONF_HOST], config_entry.data[CONF_PORT] coordinator = APCUPSdCoordinator(hass, host, port) @@ -25,17 +25,13 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b await coordinator.async_config_entry_first_refresh() # Store the coordinator for later uses. - hass.data.setdefault(DOMAIN, {}) - hass.data[DOMAIN][config_entry.entry_id] = coordinator + config_entry.runtime_data = coordinator # Forward the config entries to the supported platforms. await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: APCUPSdConfigEntry) -> bool: """Unload a config entry.""" - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - if unload_ok and DOMAIN in hass.data: - hass.data[DOMAIN].pop(entry.entry_id) - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/apcupsd/binary_sensor.py b/homeassistant/components/apcupsd/binary_sensor.py index 5f86ceb6eec..cd9e60f7ae4 100644 --- a/homeassistant/components/apcupsd/binary_sensor.py +++ b/homeassistant/components/apcupsd/binary_sensor.py @@ -2,24 +2,21 @@ from __future__ import annotations -import logging from typing import Final from homeassistant.components.binary_sensor import ( BinarySensorEntity, BinarySensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN +from . import APCUPSdConfigEntry from .coordinator import APCUPSdCoordinator PARALLEL_UPDATES = 0 -_LOGGER = logging.getLogger(__name__) _DESCRIPTION = BinarySensorEntityDescription( key="statflag", translation_key="online_status", @@ -30,11 +27,11 @@ _VALUE_ONLINE_MASK: Final = 0b1000 async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: APCUPSdConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up an APCUPSd Online Status binary sensor.""" - coordinator: APCUPSdCoordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator = config_entry.runtime_data # Do not create the binary sensor if APCUPSd does not provide STATFLAG field for us # to determine the online status. diff --git a/homeassistant/components/apcupsd/diagnostics.py b/homeassistant/components/apcupsd/diagnostics.py index d375a8bc248..fa0908f3144 100644 --- a/homeassistant/components/apcupsd/diagnostics.py +++ b/homeassistant/components/apcupsd/diagnostics.py @@ -5,19 +5,17 @@ from __future__ import annotations from typing import Any from homeassistant.components.diagnostics import async_redact_data -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from .const import DOMAIN -from .coordinator import APCUPSdCoordinator, APCUPSdData +from . import APCUPSdConfigEntry TO_REDACT = {"SERIALNO", "HOSTNAME"} async def async_get_config_entry_diagnostics( - hass: HomeAssistant, entry: ConfigEntry + hass: HomeAssistant, entry: APCUPSdConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - coordinator: APCUPSdCoordinator = hass.data[DOMAIN][entry.entry_id] - data: APCUPSdData = coordinator.data + coordinator = entry.runtime_data + data = coordinator.data return async_redact_data(data, TO_REDACT) diff --git a/homeassistant/components/apcupsd/sensor.py b/homeassistant/components/apcupsd/sensor.py index d4bbfb148e5..9e0abcb1dd9 100644 --- a/homeassistant/components/apcupsd/sensor.py +++ b/homeassistant/components/apcupsd/sensor.py @@ -10,7 +10,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( PERCENTAGE, UnitOfApparentPower, @@ -25,7 +24,8 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN, LAST_S_TEST +from . import APCUPSdConfigEntry +from .const import LAST_S_TEST from .coordinator import APCUPSdCoordinator PARALLEL_UPDATES = 0 @@ -406,11 +406,11 @@ INFERRED_UNITS = { async def async_setup_entry( hass: HomeAssistant, - config_entry: ConfigEntry, + config_entry: APCUPSdConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the APCUPSd sensors from config entries.""" - coordinator: APCUPSdCoordinator = hass.data[DOMAIN][config_entry.entry_id] + coordinator = config_entry.runtime_data # The resource keys in the data dict collected in the coordinator is in upper-case # by default, but we use lower cases throughout this integration. diff --git a/tests/components/apcupsd/__init__.py b/tests/components/apcupsd/__init__.py index b75f3eab3af..eb8cd594ad7 100644 --- a/tests/components/apcupsd/__init__.py +++ b/tests/components/apcupsd/__init__.py @@ -4,7 +4,7 @@ from collections import OrderedDict from typing import Final from unittest.mock import patch -from homeassistant.components.apcupsd import DOMAIN +from homeassistant.components.apcupsd.const import DOMAIN from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant diff --git a/tests/components/apcupsd/test_config_flow.py b/tests/components/apcupsd/test_config_flow.py index 2888771eb01..88594260579 100644 --- a/tests/components/apcupsd/test_config_flow.py +++ b/tests/components/apcupsd/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import patch import pytest -from homeassistant.components.apcupsd import DOMAIN +from homeassistant.components.apcupsd.const import DOMAIN from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_HOST, CONF_PORT, CONF_SOURCE from homeassistant.core import HomeAssistant From 0f2525d47601682cfe718242e252b368ef3068f0 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Sun, 8 Sep 2024 22:13:32 +0200 Subject: [PATCH 1612/1635] Migrate anova to use runtime_data (#125536) --- homeassistant/components/anova/__init__.py | 15 +++++---------- homeassistant/components/anova/models.py | 4 ++++ homeassistant/components/anova/sensor.py | 8 +++----- tests/components/anova/test_init.py | 2 +- 4 files changed, 13 insertions(+), 16 deletions(-) diff --git a/homeassistant/components/anova/__init__.py b/homeassistant/components/anova/__init__.py index 7503de8ea10..02c468c1319 100644 --- a/homeassistant/components/anova/__init__.py +++ b/homeassistant/components/anova/__init__.py @@ -13,22 +13,20 @@ from anova_wifi import ( WebsocketFailure, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import aiohttp_client -from .const import DOMAIN from .coordinator import AnovaCoordinator -from .models import AnovaData +from .models import AnovaConfigEntry, AnovaData PLATFORMS = [Platform.SENSOR] _LOGGER = logging.getLogger(__name__) -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: AnovaConfigEntry) -> bool: """Set up Anova from a config entry.""" api = AnovaApi( aiohttp_client.async_get_clientsession(hass), @@ -62,17 +60,14 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: assert api.websocket_handler is not None devices = list(api.websocket_handler.devices.values()) coordinators = [AnovaCoordinator(hass, device) for device in devices] - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = AnovaData( - api_jwt=api.jwt, coordinators=coordinators, api=api - ) + entry.runtime_data = AnovaData(api_jwt=api.jwt, coordinators=coordinators, api=api) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: AnovaConfigEntry) -> bool: """Unload a config entry.""" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - anova_data: AnovaData = hass.data[DOMAIN].pop(entry.entry_id) # Disconnect from WS - await anova_data.api.disconnect_websocket() + await entry.runtime_data.api.disconnect_websocket() return unload_ok diff --git a/homeassistant/components/anova/models.py b/homeassistant/components/anova/models.py index 8caf16eeae1..eef8180cf88 100644 --- a/homeassistant/components/anova/models.py +++ b/homeassistant/components/anova/models.py @@ -4,8 +4,12 @@ from dataclasses import dataclass from anova_wifi import AnovaApi +from homeassistant.config_entries import ConfigEntry + from .coordinator import AnovaCoordinator +type AnovaConfigEntry = ConfigEntry[AnovaData] + @dataclass class AnovaData: diff --git a/homeassistant/components/anova/sensor.py b/homeassistant/components/anova/sensor.py index e5fe9ededfd..aa572a0ee9b 100644 --- a/homeassistant/components/anova/sensor.py +++ b/homeassistant/components/anova/sensor.py @@ -7,7 +7,6 @@ from dataclasses import dataclass from anova_wifi import AnovaMode, AnovaState, APCUpdateSensor -from homeassistant import config_entries from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, @@ -19,10 +18,9 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType -from .const import DOMAIN from .coordinator import AnovaCoordinator from .entity import AnovaDescriptionEntity -from .models import AnovaData +from .models import AnovaConfigEntry @dataclass(frozen=True, kw_only=True) @@ -99,11 +97,11 @@ SENSOR_DESCRIPTIONS: list[AnovaSensorEntityDescription] = [ async def async_setup_entry( hass: HomeAssistant, - entry: config_entries.ConfigEntry, + entry: AnovaConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up Anova device.""" - anova_data: AnovaData = hass.data[DOMAIN][entry.entry_id] + anova_data = entry.runtime_data for coordinator in anova_data.coordinators: setup_coordinator(coordinator, async_add_entities) diff --git a/tests/components/anova/test_init.py b/tests/components/anova/test_init.py index 5fc63fcaf93..66ea11fdaef 100644 --- a/tests/components/anova/test_init.py +++ b/tests/components/anova/test_init.py @@ -2,7 +2,7 @@ from anova_wifi import AnovaApi -from homeassistant.components.anova import DOMAIN +from homeassistant.components.anova.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant From 021878e942e1e49785533b9b9ada8e67db90d598 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Sep 2024 00:01:45 +0200 Subject: [PATCH 1613/1635] Migrate ambient_network to use runtime_data (#125535) --- .../components/ambient_network/__init__.py | 18 ++++++++++-------- .../components/ambient_network/sensor.py | 7 +++---- tests/components/ambient_network/conftest.py | 4 ++-- 3 files changed, 15 insertions(+), 14 deletions(-) diff --git a/homeassistant/components/ambient_network/__init__.py b/homeassistant/components/ambient_network/__init__.py index b286fb7fbc9..e9443a676b5 100644 --- a/homeassistant/components/ambient_network/__init__.py +++ b/homeassistant/components/ambient_network/__init__.py @@ -8,28 +8,30 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from .const import DOMAIN from .coordinator import AmbientNetworkDataUpdateCoordinator +type AmbientNetworkConfigEntry = ConfigEntry[AmbientNetworkDataUpdateCoordinator] + PLATFORMS: list[Platform] = [Platform.SENSOR] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry( + hass: HomeAssistant, entry: AmbientNetworkConfigEntry +) -> bool: """Set up the Ambient Weather Network from a config entry.""" api = OpenAPI() coordinator = AmbientNetworkDataUpdateCoordinator(hass, api) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry( + hass: HomeAssistant, entry: AmbientNetworkConfigEntry +) -> bool: """Unload a config entry.""" - if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - hass.data[DOMAIN].pop(entry.entry_id) - - return unload_ok + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/ambient_network/sensor.py b/homeassistant/components/ambient_network/sensor.py index 132fc7dbd0d..336745f88ff 100644 --- a/homeassistant/components/ambient_network/sensor.py +++ b/homeassistant/components/ambient_network/sensor.py @@ -10,7 +10,6 @@ from homeassistant.components.sensor import ( SensorEntityDescription, SensorStateClass, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, CONCENTRATION_PARTS_PER_MILLION, @@ -29,7 +28,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import dt as dt_util -from .const import DOMAIN +from . import AmbientNetworkConfigEntry from .coordinator import AmbientNetworkDataUpdateCoordinator from .entity import AmbientNetworkEntity @@ -271,12 +270,12 @@ SENSOR_DESCRIPTIONS = ( async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: AmbientNetworkConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the Ambient Network sensor entities.""" - coordinator: AmbientNetworkDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data if coordinator.config_entry is not None: async_add_entities( AmbientNetworkSensor( diff --git a/tests/components/ambient_network/conftest.py b/tests/components/ambient_network/conftest.py index 9fc001252a0..e728d46aaf6 100644 --- a/tests/components/ambient_network/conftest.py +++ b/tests/components/ambient_network/conftest.py @@ -7,7 +7,7 @@ from unittest.mock import AsyncMock, Mock, patch from aioambient import OpenAPI import pytest -from homeassistant.components import ambient_network +from homeassistant.components.ambient_network.const import DOMAIN from homeassistant.core import HomeAssistant from tests.common import ( @@ -69,7 +69,7 @@ async def mock_aioambient(open_api: OpenAPI): def config_entry_fixture(request: pytest.FixtureRequest) -> MockConfigEntry: """Mock config entry.""" return MockConfigEntry( - domain=ambient_network.DOMAIN, + domain=DOMAIN, title=f"Station {request.param[0]}", data={"mac": request.param}, ) From dca287748da33f815b542a51f84947b5bd401e92 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Fern=C3=A1ndez=20Rojas?= Date: Mon, 9 Sep 2024 00:56:29 +0200 Subject: [PATCH 1614/1635] Update aioairzone to v0.9.1 (#125547) --- homeassistant/components/airzone/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/airzone/manifest.json b/homeassistant/components/airzone/manifest.json index a782006efef..eb141fc83b4 100644 --- a/homeassistant/components/airzone/manifest.json +++ b/homeassistant/components/airzone/manifest.json @@ -11,5 +11,5 @@ "documentation": "https://www.home-assistant.io/integrations/airzone", "iot_class": "local_polling", "loggers": ["aioairzone"], - "requirements": ["aioairzone==0.9.0"] + "requirements": ["aioairzone==0.9.1"] } diff --git a/requirements_all.txt b/requirements_all.txt index 9b6814c5c21..6faecd98f2c 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -179,7 +179,7 @@ aioairq==0.3.2 aioairzone-cloud==0.6.5 # homeassistant.components.airzone -aioairzone==0.9.0 +aioairzone==0.9.1 # homeassistant.components.ambient_network # homeassistant.components.ambient_station diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 2c8471b768a..6f81bba44f9 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -167,7 +167,7 @@ aioairq==0.3.2 aioairzone-cloud==0.6.5 # homeassistant.components.airzone -aioairzone==0.9.0 +aioairzone==0.9.1 # homeassistant.components.ambient_network # homeassistant.components.ambient_station From 0592a39164d964ae16fd9dd010017ff9b9fa40aa Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 8 Sep 2024 19:24:57 -0500 Subject: [PATCH 1615/1635] Fix building multidict binary wheels on armv7 and armhf (#125550) Fix building multidict wheels on armv7 and armhf This is the same fix as we needed for yarl The armv7 and armhf wheels are missing for multidict 6.0.5 https://wheels.home-assistant.io/musllinux/ --- .github/workflows/wheels.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index fcd71cbec32..20dd2054c6e 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -140,7 +140,7 @@ jobs: wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true apk: "libffi-dev;openssl-dev;yaml-dev;nasm" - skip-binary: aiohttp;yarl + skip-binary: aiohttp;multidict;yarl constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements.txt" @@ -212,7 +212,7 @@ jobs: wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev" - skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad;yarl + skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;protobuf;pydantic;pymicro-vad;yarl constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements_old-cython.txt" @@ -227,7 +227,7 @@ jobs: wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm" - skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad;yarl + skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;protobuf;pydantic;pymicro-vad;yarl constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements_all.txtaa" @@ -241,7 +241,7 @@ jobs: wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm" - skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad;yarl + skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;protobuf;pydantic;pymicro-vad;yarl constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements_all.txtab" @@ -255,7 +255,7 @@ jobs: wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm" - skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad;yarl + skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;protobuf;pydantic;pymicro-vad;yarl constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements_all.txtac" From 391de22342185c41fb484f17a5030d546e89b9b2 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Sun, 8 Sep 2024 19:25:10 -0500 Subject: [PATCH 1616/1635] Bump yarl to 1.11.0 (#125549) changelog: https://github.com/aio-libs/yarl/compare/v1.10.0...v1.11.0 --- homeassistant/package_constraints.txt | 2 +- pyproject.toml | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index e043740e15a..af3545b0f1d 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -62,7 +62,7 @@ urllib3>=1.26.5,<2 voluptuous-openapi==0.0.5 voluptuous-serialize==2.6.0 voluptuous==0.15.2 -yarl==1.10.0 +yarl==1.11.0 zeroconf==0.134.0 # Constrain pycryptodome to avoid vulnerability diff --git a/pyproject.toml b/pyproject.toml index 358abd934be..c6ec12cc860 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -69,7 +69,7 @@ dependencies = [ "voluptuous==0.15.2", "voluptuous-serialize==2.6.0", "voluptuous-openapi==0.0.5", - "yarl==1.10.0", + "yarl==1.11.0", ] [project.urls] diff --git a/requirements.txt b/requirements.txt index ba7b89bd9e9..48a9c297373 100644 --- a/requirements.txt +++ b/requirements.txt @@ -41,4 +41,4 @@ urllib3>=1.26.5,<2 voluptuous==0.15.2 voluptuous-serialize==2.6.0 voluptuous-openapi==0.0.5 -yarl==1.10.0 +yarl==1.11.0 From a85ccb94e36f31d1a9d122f9191dbbf3cd7afc27 Mon Sep 17 00:00:00 2001 From: Denis Shulyaka Date: Mon, 9 Sep 2024 04:42:51 +0300 Subject: [PATCH 1617/1635] LLM Tool parameters check (#123621) * LLM Tool parameters check * fix tests --- homeassistant/helpers/llm.py | 5 +++++ .../google_generative_ai_conversation/test_conversation.py | 4 ++-- tests/components/ollama/test_conversation.py | 5 +++-- 3 files changed, 10 insertions(+), 4 deletions(-) diff --git a/homeassistant/helpers/llm.py b/homeassistant/helpers/llm.py index e37aa0c532d..0c173df81ff 100644 --- a/homeassistant/helpers/llm.py +++ b/homeassistant/helpers/llm.py @@ -177,6 +177,11 @@ class APIInstance: else: raise HomeAssistantError(f'Tool "{tool_input.tool_name}" not found') + tool_input = ToolInput( + tool_name=tool_input.tool_name, + tool_args=tool.parameters(tool_input.tool_args), + ) + return await tool.async_call(self.api.hass, tool_input, self.llm_context) diff --git a/tests/components/google_generative_ai_conversation/test_conversation.py b/tests/components/google_generative_ai_conversation/test_conversation.py index 1ea5c2ad9b8..4192a60513e 100644 --- a/tests/components/google_generative_ai_conversation/test_conversation.py +++ b/tests/components/google_generative_ai_conversation/test_conversation.py @@ -212,7 +212,7 @@ async def test_function_call( name="test_tool", args={ "param1": ["test_value", "param1\\'s value"], - "param2": "param2\\'s value", + "param2": 2.7, }, ) @@ -258,7 +258,7 @@ async def test_function_call( tool_name="test_tool", tool_args={ "param1": ["test_value", "param1's value"], - "param2": "param2's value", + "param2": 2.7, }, ), llm.LLMContext( diff --git a/tests/components/ollama/test_conversation.py b/tests/components/ollama/test_conversation.py index 6c34b8e0052..66dc8a0c603 100644 --- a/tests/components/ollama/test_conversation.py +++ b/tests/components/ollama/test_conversation.py @@ -121,7 +121,7 @@ async def test_template_variables( ("tool_args", "expected_tool_args"), [ ({"param1": "test_value"}, {"param1": "test_value"}), - ({"param1": 2}, {"param1": 2}), + ({"param2": 2}, {"param2": 2}), ( {"param1": "test_value", "floor": ""}, {"param1": "test_value"}, # Omit empty arguments @@ -153,7 +153,8 @@ async def test_function_call( mock_tool.name = "test_tool" mock_tool.description = "Test function" mock_tool.parameters = vol.Schema( - {vol.Optional("param1", description="Test parameters"): str} + {vol.Optional("param1", description="Test parameters"): str}, + extra=vol.ALLOW_EXTRA, ) mock_tool.async_call.return_value = "Test response" From 8884465262889bab6aefe229780921773bb05c46 Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Sun, 8 Sep 2024 21:22:35 -0500 Subject: [PATCH 1618/1635] ESPHome media proxy (#123254) * Add ffmpeg proxy view * Add tests * Add proxy to media player * Add proxy test * Only allow one ffmpeg proc per device * Incorporate feedback * Fix tests * address comments * Fix test * Update paths without auth const --------- Co-authored-by: Paulus Schoutsen --- homeassistant/components/esphome/__init__.py | 10 +- homeassistant/components/esphome/const.py | 2 + .../components/esphome/ffmpeg_proxy.py | 227 ++++++++++++++++++ .../components/esphome/manifest.json | 2 +- .../components/esphome/media_player.py | 79 +++++- .../components/media_player/browse_media.py | 2 +- tests/components/esphome/test_ffmpeg_proxy.py | 111 +++++++++ tests/components/esphome/test_media_player.py | 127 +++++++++- 8 files changed, 553 insertions(+), 7 deletions(-) create mode 100644 homeassistant/components/esphome/ffmpeg_proxy.py create mode 100644 tests/components/esphome/test_ffmpeg_proxy.py diff --git a/homeassistant/components/esphome/__init__.py b/homeassistant/components/esphome/__init__.py index b06fcd4bab0..13e9496a9fd 100644 --- a/homeassistant/components/esphome/__init__.py +++ b/homeassistant/components/esphome/__init__.py @@ -4,7 +4,7 @@ from __future__ import annotations from aioesphomeapi import APIClient -from homeassistant.components import zeroconf +from homeassistant.components import ffmpeg, zeroconf from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -15,12 +15,13 @@ from homeassistant.core import HomeAssistant import homeassistant.helpers.config_validation as cv from homeassistant.helpers.typing import ConfigType -from .const import CONF_NOISE_PSK, DOMAIN +from .const import CONF_NOISE_PSK, DATA_FFMPEG_PROXY, DOMAIN from .dashboard import async_setup as async_setup_dashboard from .domain_data import DomainData # Import config flow so that it's added to the registry from .entry_data import ESPHomeConfigEntry, RuntimeEntryData +from .ffmpeg_proxy import FFmpegProxyData, FFmpegProxyView from .manager import ESPHomeManager, cleanup_instance CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) @@ -30,7 +31,12 @@ CLIENT_INFO = f"Home Assistant {ha_version}" async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the esphome component.""" + proxy_data = hass.data[DATA_FFMPEG_PROXY] = FFmpegProxyData() + await async_setup_dashboard(hass) + hass.http.register_view( + FFmpegProxyView(ffmpeg.get_ffmpeg_manager(hass), proxy_data) + ) return True diff --git a/homeassistant/components/esphome/const.py b/homeassistant/components/esphome/const.py index 9c09591f6ea..143aaa6342a 100644 --- a/homeassistant/components/esphome/const.py +++ b/homeassistant/components/esphome/const.py @@ -18,3 +18,5 @@ PROJECT_URLS = { "esphome.bluetooth-proxy": "https://esphome.github.io/bluetooth-proxies/", } DEFAULT_URL = f"https://esphome.io/changelog/{STABLE_BLE_VERSION_STR}.html" + +DATA_FFMPEG_PROXY = f"{DOMAIN}.ffmpeg_proxy" diff --git a/homeassistant/components/esphome/ffmpeg_proxy.py b/homeassistant/components/esphome/ffmpeg_proxy.py new file mode 100644 index 00000000000..d2f538bfbd5 --- /dev/null +++ b/homeassistant/components/esphome/ffmpeg_proxy.py @@ -0,0 +1,227 @@ +"""HTTP view that converts audio from a URL to a preferred format.""" + +import asyncio +from collections import defaultdict +from dataclasses import dataclass, field +from http import HTTPStatus +import logging +import secrets + +from aiohttp import web +from aiohttp.abc import AbstractStreamWriter, BaseRequest + +from homeassistant.components.ffmpeg import FFmpegManager +from homeassistant.components.http import HomeAssistantView +from homeassistant.core import HomeAssistant + +from .const import DATA_FFMPEG_PROXY + +_LOGGER = logging.getLogger(__name__) + + +def async_create_proxy_url( + hass: HomeAssistant, + device_id: str, + media_url: str, + media_format: str, + rate: int | None = None, + channels: int | None = None, +) -> str: + """Create a one-time use proxy URL that automatically converts the media.""" + data: FFmpegProxyData = hass.data[DATA_FFMPEG_PROXY] + return data.async_create_proxy_url( + device_id, media_url, media_format, rate, channels + ) + + +@dataclass +class FFmpegConversionInfo: + """Information for ffmpeg conversion.""" + + url: str + """Source URL of media to convert.""" + + media_format: str + """Target format for media (mp3, flac, etc.)""" + + rate: int | None + """Target sample rate (None to keep source rate).""" + + channels: int | None + """Target number of channels (None to keep source channels).""" + + +@dataclass +class FFmpegProxyData: + """Data for ffmpeg proxy conversion.""" + + # device_id -> convert_id -> info + conversions: dict[str, dict[str, FFmpegConversionInfo]] = field( + default_factory=lambda: defaultdict(dict) + ) + + # device_id -> process + processes: dict[str, asyncio.subprocess.Process] = field(default_factory=dict) + + def async_create_proxy_url( + self, + device_id: str, + media_url: str, + media_format: str, + rate: int | None, + channels: int | None, + ) -> str: + """Create a one-time use proxy URL that automatically converts the media.""" + convert_id = secrets.token_urlsafe(16) + self.conversions[device_id][convert_id] = FFmpegConversionInfo( + media_url, media_format, rate, channels + ) + _LOGGER.debug("Media URL allowed by proxy: %s", media_url) + + return f"/api/esphome/ffmpeg_proxy/{device_id}/{convert_id}.{media_format}" + + +class FFmpegConvertResponse(web.StreamResponse): + """HTTP streaming response that uses ffmpeg to convert audio from a URL.""" + + def __init__( + self, + manager: FFmpegManager, + convert_info: FFmpegConversionInfo, + device_id: str, + proxy_data: FFmpegProxyData, + chunk_size: int = 2048, + ) -> None: + """Initialize response. + + Parameters + ---------- + manager: FFmpegManager + ffmpeg manager + convert_info: FFmpegConversionInfo + Information necessary to do the conversion + device_id: str + ESPHome device id + proxy_data: FFmpegProxyData + Data object to store ffmpeg process + chunk_size: int + Number of bytes to read from ffmpeg process at a time + + """ + super().__init__(status=200) + self.hass = manager.hass + self.manager = manager + self.convert_info = convert_info + self.device_id = device_id + self.proxy_data = proxy_data + self.chunk_size = chunk_size + + async def prepare(self, request: BaseRequest) -> AbstractStreamWriter | None: + """Stream url through ffmpeg conversion and out to HTTP client.""" + writer = await super().prepare(request) + assert writer is not None + + command_args = [ + "-i", + self.convert_info.url, + "-f", + self.convert_info.media_format, + ] + + if self.convert_info.rate is not None: + # Sample rate + command_args.extend(["-ar", str(self.convert_info.rate)]) + + if self.convert_info.channels is not None: + # Number of channels + command_args.extend(["-ac", str(self.convert_info.channels)]) + + # Output to stdout + command_args.append("pipe:") + + _LOGGER.debug("%s %s", self.manager.binary, " ".join(command_args)) + proc = await asyncio.create_subprocess_exec( + self.manager.binary, + *command_args, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + ) + + assert proc.stdout is not None + assert proc.stderr is not None + + # Only one conversion process per device is allowed + self.proxy_data.processes[self.device_id] = proc + + try: + # Pull audio chunks from ffmpeg and pass them to the HTTP client + while ( + self.hass.is_running + and (request.transport is not None) + and (not request.transport.is_closing()) + and (proc.returncode is None) + and (chunk := await proc.stdout.read(self.chunk_size)) + ): + await writer.write(chunk) + await writer.drain() + finally: + # Close connection + await writer.write_eof() + + # Terminate hangs, so kill is used + proc.kill() + + if proc.returncode != 0: + # Process did not exit successfully + stderr_text = "" + while line := await proc.stderr.readline(): + stderr_text += line.decode() + _LOGGER.error("Error shutting down ffmpeg: %s", stderr_text) + else: + _LOGGER.debug("Conversion completed: %s", self.convert_info) + + return writer + + +class FFmpegProxyView(HomeAssistantView): + """FFmpeg web view to convert audio and stream back to client.""" + + requires_auth = False + url = "/api/esphome/ffmpeg_proxy/{device_id}/{filename}" + name = "api:esphome:ffmpeg_proxy" + + def __init__(self, manager: FFmpegManager, proxy_data: FFmpegProxyData) -> None: + """Initialize an ffmpeg view.""" + self.manager = manager + self.proxy_data = proxy_data + + async def get( + self, request: web.Request, device_id: str, filename: str + ) -> web.StreamResponse: + """Start a get request.""" + + # {id}.mp3 -> id + convert_id = filename.rsplit(".")[0] + + try: + convert_info = self.proxy_data.conversions[device_id].pop(convert_id) + except KeyError: + _LOGGER.error( + "Unrecognized convert id %s for device: %s", convert_id, device_id + ) + return web.Response( + body="Convert id not recognized", status=HTTPStatus.BAD_REQUEST + ) + + # Stop any existing process + proc = self.proxy_data.processes.pop(device_id, None) + if (proc is not None) and (proc.returncode is None): + _LOGGER.debug("Stopping existing ffmpeg process for device: %s", device_id) + + # Terminate hangs, so kill is used + proc.kill() + + # Stream converted audio back to client + return FFmpegConvertResponse( + self.manager, convert_info, device_id, self.proxy_data + ) diff --git a/homeassistant/components/esphome/manifest.json b/homeassistant/components/esphome/manifest.json index 233015b13ba..fea443635a4 100644 --- a/homeassistant/components/esphome/manifest.json +++ b/homeassistant/components/esphome/manifest.json @@ -4,7 +4,7 @@ "after_dependencies": ["zeroconf", "tag"], "codeowners": ["@OttoWinter", "@jesserockz", "@kbx81", "@bdraco"], "config_flow": true, - "dependencies": ["assist_pipeline", "bluetooth", "intent"], + "dependencies": ["assist_pipeline", "bluetooth", "intent", "ffmpeg", "http"], "dhcp": [ { "registered_devices": true diff --git a/homeassistant/components/esphome/media_player.py b/homeassistant/components/esphome/media_player.py index 4d57552bb19..d742029bcef 100644 --- a/homeassistant/components/esphome/media_player.py +++ b/homeassistant/components/esphome/media_player.py @@ -3,14 +3,18 @@ from __future__ import annotations from functools import partial +import logging from typing import Any, cast +from urllib.parse import urlparse from aioesphomeapi import ( EntityInfo, MediaPlayerCommand, MediaPlayerEntityState, + MediaPlayerFormatPurpose, MediaPlayerInfo, MediaPlayerState as EspMediaPlayerState, + MediaPlayerSupportedFormat, ) from homeassistant.components import media_source @@ -34,6 +38,9 @@ from .entity import ( platform_async_setup_entry, ) from .enum_mapper import EsphomeEnumMapper +from .ffmpeg_proxy import async_create_proxy_url + +_LOGGER = logging.getLogger(__name__) _STATES: EsphomeEnumMapper[EspMediaPlayerState, MediaPlayerState] = EsphomeEnumMapper( { @@ -66,7 +73,7 @@ class EsphomeMediaPlayer( if self._static_info.supports_pause: flags |= MediaPlayerEntityFeature.PAUSE | MediaPlayerEntityFeature.PLAY self._attr_supported_features = flags - self._entry_data.media_player_formats[self.entity_id] = cast( + self._entry_data.media_player_formats[static_info.unique_id] = cast( MediaPlayerInfo, static_info ).supported_formats @@ -102,6 +109,22 @@ class EsphomeMediaPlayer( media_id = async_process_play_media_url(self.hass, media_id) announcement = kwargs.get(ATTR_MEDIA_ANNOUNCE) + supported_formats: list[MediaPlayerSupportedFormat] | None = ( + self._entry_data.media_player_formats.get(self._static_info.unique_id) + ) + + if ( + supported_formats + and _is_url(media_id) + and ( + proxy_url := self._get_proxy_url( + supported_formats, media_id, announcement is True + ) + ) + ): + # Substitute proxy URL + media_id = proxy_url + self._client.media_player_command( self._key, media_url=media_id, announcement=announcement ) @@ -111,6 +134,54 @@ class EsphomeMediaPlayer( await super().async_will_remove_from_hass() self._entry_data.media_player_formats.pop(self.entity_id, None) + def _get_proxy_url( + self, + supported_formats: list[MediaPlayerSupportedFormat], + url: str, + announcement: bool, + ) -> str | None: + """Get URL for ffmpeg proxy.""" + if self.device_entry is None: + # Device id is required + return None + + # Choose the first default or announcement supported format + format_to_use: MediaPlayerSupportedFormat | None = None + for supported_format in supported_formats: + if (format_to_use is None) and ( + supported_format.purpose == MediaPlayerFormatPurpose.DEFAULT + ): + # First default format + format_to_use = supported_format + elif announcement and ( + supported_format.purpose == MediaPlayerFormatPurpose.ANNOUNCEMENT + ): + # First announcement format + format_to_use = supported_format + break + + if format_to_use is None: + # No format for conversion + return None + + # Replace the media URL with a proxy URL pointing to Home + # Assistant. When requested, Home Assistant will use ffmpeg to + # convert the source URL to the supported format. + _LOGGER.debug("Proxying media url %s with format %s", url, format_to_use) + device_id = self.device_entry.id + media_format = format_to_use.format + proxy_url = async_create_proxy_url( + self.hass, + device_id, + url, + media_format=media_format, + rate=format_to_use.sample_rate, + channels=format_to_use.num_channels, + ) + + # Resolve URL + return async_process_play_media_url(self.hass, proxy_url) + async def async_browse_media( self, media_content_type: MediaType | str | None = None, @@ -152,6 +223,12 @@ class EsphomeMediaPlayer( ) +def _is_url(url: str) -> bool: + """Validate the URL can be parsed and at least has scheme + netloc.""" + result = urlparse(url) + return all([result.scheme, result.netloc]) + + async_setup_entry = partial( platform_async_setup_entry, info_type=MediaPlayerInfo, diff --git a/homeassistant/components/media_player/browse_media.py b/homeassistant/components/media_player/browse_media.py index 351d4e9140f..e1c2fa37ca0 100644 --- a/homeassistant/components/media_player/browse_media.py +++ b/homeassistant/components/media_player/browse_media.py @@ -23,7 +23,7 @@ from homeassistant.helpers.network import ( from .const import CONTENT_AUTH_EXPIRY_TIME, MediaClass, MediaType # Paths that we don't need to sign -PATHS_WITHOUT_AUTH = ("/api/tts_proxy/",) +PATHS_WITHOUT_AUTH = ("/api/tts_proxy/", "/api/esphome/ffmpeg_proxy/") @callback diff --git a/tests/components/esphome/test_ffmpeg_proxy.py b/tests/components/esphome/test_ffmpeg_proxy.py new file mode 100644 index 00000000000..577126201df --- /dev/null +++ b/tests/components/esphome/test_ffmpeg_proxy.py @@ -0,0 +1,111 @@ +"""Tests for ffmpeg proxy view.""" + +from http import HTTPStatus +import io +import tempfile +from unittest.mock import patch +from urllib.request import pathname2url +import wave + +import mutagen + +from homeassistant.components import esphome +from homeassistant.components.esphome.ffmpeg_proxy import async_create_proxy_url +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.typing import ClientSessionGenerator + + +async def test_async_create_proxy_url(hass: HomeAssistant) -> None: + """Test that async_create_proxy_url returns the correct format.""" + assert await async_setup_component(hass, "esphome", {}) + + device_id = "test-device" + convert_id = "test-id" + media_format = "flac" + media_url = "http://127.0.0.1/test.mp3" + proxy_url = f"/api/esphome/ffmpeg_proxy/{device_id}/{convert_id}.{media_format}" + + with patch( + "homeassistant.components.esphome.ffmpeg_proxy.secrets.token_urlsafe", + return_value=convert_id, + ): + assert ( + async_create_proxy_url(hass, device_id, media_url, media_format) + == proxy_url + ) + + +async def test_proxy_view( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, +) -> None: + """Test proxy HTTP view for converting audio.""" + device_id = "1234" + + await async_setup_component(hass, esphome.DOMAIN, {esphome.DOMAIN: {}}) + client = await hass_client() + + with tempfile.NamedTemporaryFile(mode="wb+", suffix=".wav") as temp_file: + with wave.open(temp_file.name, "wb") as wav_file: + wav_file.setframerate(16000) + wav_file.setsampwidth(2) + wav_file.setnchannels(1) + wav_file.writeframes(bytes(16000 * 2)) # 1s + + temp_file.seek(0) + wav_url = pathname2url(temp_file.name) + convert_id = "test-id" + url = f"/api/esphome/ffmpeg_proxy/{device_id}/{convert_id}.mp3" + + # Should fail because we haven't allowed the URL yet + req = await client.get(url) + assert req.status == HTTPStatus.BAD_REQUEST + + # Allow the URL + with patch( + "homeassistant.components.esphome.ffmpeg_proxy.secrets.token_urlsafe", + return_value=convert_id, + ): + assert ( + async_create_proxy_url( + hass, device_id, wav_url, media_format="mp3", rate=22050, channels=2 + ) + == url + ) + + req = await client.get(url) + assert req.status == HTTPStatus.OK + + mp3_data = await req.content.read() + + # Verify conversion + with io.BytesIO(mp3_data) as mp3_io: + mp3_file = mutagen.File(mp3_io) + assert mp3_file.info.sample_rate == 22050 + assert mp3_file.info.channels == 2 + + # About a second, but not exact + assert round(mp3_file.info.length, 0) == 1 + + +async def test_ffmpeg_error( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, +) -> None: + """Test proxy HTTP view with an ffmpeg error.""" + device_id = "1234" + + await async_setup_component(hass, esphome.DOMAIN, {esphome.DOMAIN: {}}) + client = await hass_client() + + # Try to convert a file that doesn't exist + url = async_create_proxy_url(hass, device_id, "missing-file", media_format="mp3") + req = await client.get(url) + + # The HTTP status is OK because the ffmpeg process started, but no data is + # returned. + assert req.status == HTTPStatus.OK + mp3_data = await req.content.read() + assert not mp3_data diff --git a/tests/components/esphome/test_media_player.py b/tests/components/esphome/test_media_player.py index 3879129ccb6..e859324b394 100644 --- a/tests/components/esphome/test_media_player.py +++ b/tests/components/esphome/test_media_player.py @@ -1,13 +1,19 @@ """Test ESPHome media_players.""" +from collections.abc import Awaitable, Callable from unittest.mock import AsyncMock, Mock, call, patch from aioesphomeapi import ( APIClient, + EntityInfo, + EntityState, MediaPlayerCommand, MediaPlayerEntityState, + MediaPlayerFormatPurpose, MediaPlayerInfo, MediaPlayerState, + MediaPlayerSupportedFormat, + UserService, ) import pytest @@ -31,8 +37,11 @@ from homeassistant.components.media_player import ( ) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant +import homeassistant.helpers.device_registry as dr from homeassistant.setup import async_setup_component +from .conftest import MockESPHomeDevice + from tests.common import mock_platform from tests.typing import WebSocketGenerator @@ -55,7 +64,7 @@ async def test_media_player_entity( key=1, volume=50, muted=True, state=MediaPlayerState.PAUSED ) ] - user_service = [] + user_service: list[UserService] = [] await mock_generic_device_entry( mock_client=mock_client, entity_info=entity_info, @@ -200,7 +209,7 @@ async def test_media_player_entity_with_source( key=1, volume=50, muted=True, state=MediaPlayerState.PLAYING ) ] - user_service = [] + user_service: list[UserService] = [] await mock_generic_device_entry( mock_client=mock_client, entity_info=entity_info, @@ -277,3 +286,117 @@ async def test_media_player_entity_with_source( mock_client.media_player_command.assert_has_calls( [call(1, media_url="media-source://tts?message=hello", announcement=True)] ) + + +async def test_media_player_proxy( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test a media_player entity with a proxy URL.""" + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[ + MediaPlayerInfo( + object_id="mymedia_player", + key=1, + name="my media_player", + unique_id="my_media_player", + supports_pause=True, + supported_formats=[ + MediaPlayerSupportedFormat( + format="flac", + sample_rate=48000, + num_channels=2, + purpose=MediaPlayerFormatPurpose.DEFAULT, + ), + MediaPlayerSupportedFormat( + format="wav", + sample_rate=16000, + num_channels=1, + purpose=MediaPlayerFormatPurpose.ANNOUNCEMENT, + ), + MediaPlayerSupportedFormat( + format="mp3", + sample_rate=48000, + num_channels=2, + purpose=MediaPlayerFormatPurpose.DEFAULT, + ), + ], + ) + ], + user_service=[], + states=[ + MediaPlayerEntityState( + key=1, volume=50, muted=False, state=MediaPlayerState.PAUSED + ) + ], + ) + await hass.async_block_till_done() + dev = device_registry.async_get_device( + connections={(dr.CONNECTION_NETWORK_MAC, mock_device.entry.unique_id)} + ) + assert dev is not None + state = hass.states.get("media_player.test_mymedia_player") + assert state is not None + assert state.state == "paused" + + media_url = "http://127.0.0.1/test.mp3" + proxy_url = f"/api/esphome/ffmpeg_proxy/{dev.id}/test-id.flac" + + with ( + patch( + "homeassistant.components.esphome.media_player.async_create_proxy_url", + return_value=proxy_url, + ) as mock_async_create_proxy_url, + ): + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.test_mymedia_player", + ATTR_MEDIA_CONTENT_TYPE: MediaType.MUSIC, + ATTR_MEDIA_CONTENT_ID: media_url, + }, + blocking=True, + ) + + # Should be the default format + mock_async_create_proxy_url.assert_called_once() + device_id = mock_async_create_proxy_url.call_args[0][1] + mock_async_create_proxy_url.assert_called_once_with( + hass, device_id, media_url, media_format="flac", rate=48000, channels=2 + ) + + media_args = mock_client.media_player_command.call_args.kwargs + assert not media_args["announcement"] + + # Reset + mock_async_create_proxy_url.reset_mock() + + # Set announcement flag + await hass.services.async_call( + MEDIA_PLAYER_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.test_mymedia_player", + ATTR_MEDIA_CONTENT_TYPE: MediaType.MUSIC, + ATTR_MEDIA_CONTENT_ID: media_url, + ATTR_MEDIA_ANNOUNCE: True, + }, + blocking=True, + ) + + # Should be the announcement format + mock_async_create_proxy_url.assert_called_once() + device_id = mock_async_create_proxy_url.call_args[0][1] + mock_async_create_proxy_url.assert_called_once_with( + hass, device_id, media_url, media_format="wav", rate=16000, channels=1 + ) + + media_args = mock_client.media_player_command.call_args.kwargs + assert media_args["announcement"] From d88487e30be24f32a99d958ebb7de597f17710a1 Mon Sep 17 00:00:00 2001 From: Jesse Hills <3060199+jesserockz@users.noreply.github.com> Date: Mon, 9 Sep 2024 15:54:18 +1200 Subject: [PATCH 1619/1635] Bump aioesphomeapi to 25.4.0 (#125554) --- homeassistant/components/esphome/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/esphome/manifest.json b/homeassistant/components/esphome/manifest.json index fea443635a4..f18d6e7cc68 100644 --- a/homeassistant/components/esphome/manifest.json +++ b/homeassistant/components/esphome/manifest.json @@ -17,7 +17,7 @@ "mqtt": ["esphome/discover/#"], "quality_scale": "platinum", "requirements": [ - "aioesphomeapi==25.3.2", + "aioesphomeapi==25.4.0", "esphome-dashboard-api==1.2.3", "bleak-esphome==1.0.0" ], diff --git a/requirements_all.txt b/requirements_all.txt index 6faecd98f2c..6b902f76efa 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -237,7 +237,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==25.3.2 +aioesphomeapi==25.4.0 # homeassistant.components.flo aioflo==2021.11.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 6f81bba44f9..c6d723135cb 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -225,7 +225,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==25.3.2 +aioesphomeapi==25.4.0 # homeassistant.components.flo aioflo==2021.11.0 From 2a1df2063df6c0021bb2667dccf9667c6363d5dd Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 9 Sep 2024 08:16:30 +0200 Subject: [PATCH 1620/1635] Separate recorder test fixtures disabling context id migration (#125324) * Separate recorder test fixtures disabling context id migration * Fix test --- .../recorder/test_migration_from_schema_32.py | 4 ++-- ..._migration_run_time_migrations_remember.py | 6 ++++-- .../components/recorder/test_v32_migration.py | 3 ++- tests/conftest.py | 21 ++++++++++++++----- 4 files changed, 24 insertions(+), 10 deletions(-) diff --git a/tests/components/recorder/test_migration_from_schema_32.py b/tests/components/recorder/test_migration_from_schema_32.py index cdbbd7ec4e4..95146b970f3 100644 --- a/tests/components/recorder/test_migration_from_schema_32.py +++ b/tests/components/recorder/test_migration_from_schema_32.py @@ -118,7 +118,7 @@ def db_schema_32(): yield -@pytest.mark.parametrize("enable_migrate_context_ids", [True]) +@pytest.mark.parametrize("enable_migrate_event_context_ids", [True]) @pytest.mark.usefixtures("db_schema_32") async def test_migrate_events_context_ids( hass: HomeAssistant, recorder_mock: Recorder @@ -338,7 +338,7 @@ async def test_migrate_events_context_ids( assert get_index_by_name(session, "events", "ix_events_context_id") is None -@pytest.mark.parametrize("enable_migrate_context_ids", [True]) +@pytest.mark.parametrize("enable_migrate_state_context_ids", [True]) @pytest.mark.usefixtures("db_schema_32") async def test_migrate_states_context_ids( hass: HomeAssistant, recorder_mock: Recorder diff --git a/tests/components/recorder/test_migration_run_time_migrations_remember.py b/tests/components/recorder/test_migration_run_time_migrations_remember.py index bdd881a3a7b..880e4d6d61e 100644 --- a/tests/components/recorder/test_migration_run_time_migrations_remember.py +++ b/tests/components/recorder/test_migration_run_time_migrations_remember.py @@ -72,7 +72,7 @@ def _create_engine_test(*args, **kwargs): return engine -@pytest.mark.parametrize("enable_migrate_context_ids", [True]) +@pytest.mark.parametrize("enable_migrate_state_context_ids", [True]) @pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_migration_changes_prevent_trying_to_migrate_again( @@ -173,4 +173,6 @@ async def test_migration_changes_prevent_trying_to_migrate_again( await hass.async_stop() for task in tasks: - assert not isinstance(task, MigrationTask) + if not isinstance(task, MigrationTask): + continue + assert not isinstance(task.migrator, migration.StatesContextIDMigration) diff --git a/tests/components/recorder/test_v32_migration.py b/tests/components/recorder/test_v32_migration.py index 60f223aaa91..9a616959174 100644 --- a/tests/components/recorder/test_v32_migration.py +++ b/tests/components/recorder/test_v32_migration.py @@ -60,7 +60,8 @@ def _create_engine_test(schema_module: str) -> Callable: return _create_engine_test -@pytest.mark.parametrize("enable_migrate_context_ids", [True]) +@pytest.mark.parametrize("enable_migrate_event_context_ids", [True]) +@pytest.mark.parametrize("enable_migrate_state_context_ids", [True]) @pytest.mark.parametrize("enable_migrate_event_type_ids", [True]) @pytest.mark.parametrize("enable_migrate_entity_ids", [True]) @pytest.mark.parametrize("persistent_database", [True]) diff --git a/tests/conftest.py b/tests/conftest.py index df183f955cb..178fdd74a69 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1293,11 +1293,21 @@ def enable_nightly_purge() -> bool: @pytest.fixture -def enable_migrate_context_ids() -> bool: +def enable_migrate_event_context_ids() -> bool: """Fixture to control enabling of recorder's context id migration. To enable context id migration, tests can be marked with: - @pytest.mark.parametrize("enable_migrate_context_ids", [True]) + @pytest.mark.parametrize("enable_migrate_event_context_ids", [True]) + """ + return False + + +@pytest.fixture +def enable_migrate_state_context_ids() -> bool: + """Fixture to control enabling of recorder's context id migration. + + To enable context id migration, tests can be marked with: + @pytest.mark.parametrize("enable_migrate_state_context_ids", [True]) """ return False @@ -1465,7 +1475,8 @@ async def async_test_recorder( enable_statistics: bool, enable_missing_statistics: bool, enable_schema_validation: bool, - enable_migrate_context_ids: bool, + enable_migrate_event_context_ids: bool, + enable_migrate_state_context_ids: bool, enable_migrate_event_type_ids: bool, enable_migrate_entity_ids: bool, enable_migrate_event_ids: bool, @@ -1527,12 +1538,12 @@ async def async_test_recorder( ) migrate_states_context_ids = ( migration.StatesContextIDMigration.migrate_data - if enable_migrate_context_ids + if enable_migrate_state_context_ids else None ) migrate_events_context_ids = ( migration.EventsContextIDMigration.migrate_data - if enable_migrate_context_ids + if enable_migrate_event_context_ids else None ) migrate_event_type_ids = ( From 17ab45da43421ed2bfd5aa4e16c7d6681ada1560 Mon Sep 17 00:00:00 2001 From: Chris Brouwer <7203501+cbrouwer@users.noreply.github.com> Date: Mon, 9 Sep 2024 08:36:59 +0200 Subject: [PATCH 1621/1635] Fix support for Heat meters to DSMR integration (#125523) * Fix support for Heat meters to DSMR integration * Fixed test --- homeassistant/components/dsmr/const.py | 1 + homeassistant/components/dsmr/sensor.py | 26 +++++++++- tests/components/dsmr/test_sensor.py | 68 +++++++++++++++++++++++++ 3 files changed, 94 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/dsmr/const.py b/homeassistant/components/dsmr/const.py index 7f5813cda7f..4c6cb31ca4d 100644 --- a/homeassistant/components/dsmr/const.py +++ b/homeassistant/components/dsmr/const.py @@ -26,6 +26,7 @@ DEFAULT_TIME_BETWEEN_UPDATE = 30 DEVICE_NAME_ELECTRICITY = "Electricity Meter" DEVICE_NAME_GAS = "Gas Meter" DEVICE_NAME_WATER = "Water Meter" +DEVICE_NAME_HEAT = "Heat Meter" DSMR_VERSIONS = {"2.2", "4", "5", "5B", "5L", "5S", "Q3D"} diff --git a/homeassistant/components/dsmr/sensor.py b/homeassistant/components/dsmr/sensor.py index 77c40c5c292..b76736a1101 100644 --- a/homeassistant/components/dsmr/sensor.py +++ b/homeassistant/components/dsmr/sensor.py @@ -57,6 +57,7 @@ from .const import ( DEFAULT_TIME_BETWEEN_UPDATE, DEVICE_NAME_ELECTRICITY, DEVICE_NAME_GAS, + DEVICE_NAME_HEAT, DEVICE_NAME_WATER, DOMAIN, DSMR_PROTOCOL, @@ -75,6 +76,7 @@ class DSMRSensorEntityDescription(SensorEntityDescription): dsmr_versions: set[str] | None = None is_gas: bool = False is_water: bool = False + is_heat: bool = False obis_reference: str @@ -82,6 +84,7 @@ class MbusDeviceType(IntEnum): """Types of mbus devices (13757-3:2013).""" GAS = 3 + HEAT = 4 WATER = 7 @@ -396,6 +399,16 @@ SENSORS_MBUS_DEVICE_TYPE: dict[int, tuple[DSMRSensorEntityDescription, ...]] = { state_class=SensorStateClass.TOTAL_INCREASING, ), ), + MbusDeviceType.HEAT: ( + DSMRSensorEntityDescription( + key="heat_reading", + translation_key="heat_meter_reading", + obis_reference="MBUS_METER_READING", + is_heat=True, + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + ), + ), MbusDeviceType.WATER: ( DSMRSensorEntityDescription( key="water_reading", @@ -490,6 +503,10 @@ def create_mbus_entities( continue type_ = int(device_type.value) + if type_ not in SENSORS_MBUS_DEVICE_TYPE: + LOGGER.warning("Unsupported MBUS_DEVICE_TYPE (%d)", type_) + continue + if identifier := getattr(device, "MBUS_EQUIPMENT_IDENTIFIER", None): serial_ = identifier.value rename_old_gas_to_mbus(hass, entry, serial_) @@ -554,7 +571,10 @@ async def async_setup_entry( ) for description in SENSORS if is_supported_description(telegram, description, dsmr_version) - and (not description.is_gas or CONF_SERIAL_ID_GAS in entry.data) + and ( + (not description.is_gas and not description.is_heat) + or CONF_SERIAL_ID_GAS in entry.data + ) ] ) async_add_entities(entities) @@ -743,6 +763,10 @@ class DSMREntity(SensorEntity): if serial_id: device_serial = serial_id device_name = DEVICE_NAME_WATER + if entity_description.is_heat: + if serial_id: + device_serial = serial_id + device_name = DEVICE_NAME_HEAT if device_serial is None: device_serial = entry.entry_id diff --git a/tests/components/dsmr/test_sensor.py b/tests/components/dsmr/test_sensor.py index c2c6d48b007..4a2951f4ed8 100644 --- a/tests/components/dsmr/test_sensor.py +++ b/tests/components/dsmr/test_sensor.py @@ -1521,6 +1521,74 @@ async def test_gas_meter_providing_energy_reading( ) +async def test_heat_meter_mbus( + hass: HomeAssistant, dsmr_connection_fixture: tuple[MagicMock, MagicMock, MagicMock] +) -> None: + """Test if heat meter reading is correctly parsed.""" + (connection_factory, transport, protocol) = dsmr_connection_fixture + + entry_data = { + "port": "/dev/ttyUSB0", + "dsmr_version": "5", + "serial_id": "1234", + "serial_id_gas": None, + } + entry_options = { + "time_between_update": 0, + } + + telegram = Telegram() + telegram.add( + MBUS_DEVICE_TYPE, + CosemObject((0, 1), [{"value": "004", "unit": ""}]), + "MBUS_DEVICE_TYPE", + ) + telegram.add( + MBUS_METER_READING, + MBusObject( + (0, 1), + [ + {"value": datetime.datetime.fromtimestamp(1551642213)}, + {"value": Decimal(745.695), "unit": "GJ"}, + ], + ), + "MBUS_METER_READING", + ) + + mock_entry = MockConfigEntry( + domain="dsmr", unique_id="/dev/ttyUSB0", data=entry_data, options=entry_options + ) + + hass.loop.set_debug(True) + mock_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_entry.entry_id) + await hass.async_block_till_done() + + telegram_callback = connection_factory.call_args_list[0][0][2] + + # simulate a telegram pushed from the smartmeter and parsed by dsmr_parser + telegram_callback(telegram) + + # after receiving telegram entities need to have the chance to be created + await hass.async_block_till_done() + + # check if gas consumption is parsed correctly + heat_consumption = hass.states.get("sensor.heat_meter_energy") + assert heat_consumption.state == "745.695" + assert ( + heat_consumption.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.ENERGY + ) + assert ( + heat_consumption.attributes.get("unit_of_measurement") + == UnitOfEnergy.GIGA_JOULE + ) + assert ( + heat_consumption.attributes.get(ATTR_STATE_CLASS) + == SensorStateClass.TOTAL_INCREASING + ) + + def test_all_obis_references_exists() -> None: """Verify that all attributes exist by name in database.""" for sensor in SENSORS: From 713689491b98cbac978b9a3cc88a6ed976afc9b9 Mon Sep 17 00:00:00 2001 From: Matthias Alphart Date: Mon, 9 Sep 2024 09:01:21 +0200 Subject: [PATCH 1622/1635] Remove KNX yaml config from `hass.data` (#124050) * Remove KNX yaml config from `hass.data` * Use HassKey --- homeassistant/components/knx/__init__.py | 52 +++++++++---------- homeassistant/components/knx/binary_sensor.py | 9 ++-- homeassistant/components/knx/button.py | 11 ++-- homeassistant/components/knx/climate.py | 6 +-- homeassistant/components/knx/config_flow.py | 5 +- homeassistant/components/knx/const.py | 9 ++-- homeassistant/components/knx/cover.py | 6 +-- homeassistant/components/knx/date.py | 7 ++- homeassistant/components/knx/datetime.py | 7 ++- .../components/knx/device_trigger.py | 9 ++-- homeassistant/components/knx/diagnostics.py | 3 +- homeassistant/components/knx/fan.py | 6 +-- homeassistant/components/knx/light.py | 6 +-- homeassistant/components/knx/notify.py | 11 ++-- homeassistant/components/knx/number.py | 12 ++--- homeassistant/components/knx/scene.py | 6 +-- homeassistant/components/knx/select.py | 7 ++- homeassistant/components/knx/sensor.py | 6 +-- homeassistant/components/knx/services.py | 3 +- homeassistant/components/knx/switch.py | 6 +-- homeassistant/components/knx/text.py | 12 ++--- homeassistant/components/knx/time.py | 7 ++- homeassistant/components/knx/weather.py | 6 +-- homeassistant/components/knx/websocket.py | 7 ++- tests/components/knx/test_button.py | 8 ++- tests/components/knx/test_telegrams.py | 12 +++-- tests/components/knx/test_websocket.py | 17 +++--- 27 files changed, 124 insertions(+), 132 deletions(-) diff --git a/homeassistant/components/knx/__init__.py b/homeassistant/components/knx/__init__.py index 01d5294639c..736c5f6cb9d 100644 --- a/homeassistant/components/knx/__init__.py +++ b/homeassistant/components/knx/__init__.py @@ -5,6 +5,7 @@ from __future__ import annotations import contextlib import logging from pathlib import Path +from typing import Final import voluptuous as vol from xknx import XKNX @@ -59,9 +60,9 @@ from .const import ( CONF_KNX_TUNNELING_TCP, CONF_KNX_TUNNELING_TCP_SECURE, DATA_HASS_CONFIG, - DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS, + KNX_MODULE_KEY, SUPPORTED_PLATFORMS_UI, SUPPORTED_PLATFORMS_YAML, TELEGRAM_LOG_DEFAULT, @@ -97,6 +98,7 @@ from .websocket import register_panel _LOGGER = logging.getLogger(__name__) +_KNX_YAML_CONFIG: Final = "knx_yaml_config" CONFIG_SCHEMA = vol.Schema( { @@ -148,7 +150,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Start the KNX integration.""" hass.data[DATA_HASS_CONFIG] = config if (conf := config.get(DOMAIN)) is not None: - hass.data[DATA_KNX_CONFIG] = dict(conf) + hass.data[_KNX_YAML_CONFIG] = dict(conf) register_knx_services(hass) return True @@ -156,16 +158,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Load a config entry.""" - # `config` is None when reloading the integration - # or no `knx` key in configuration.yaml - if (config := hass.data.get(DATA_KNX_CONFIG)) is None: + # `_KNX_YAML_CONFIG` is only set in async_setup. + # It's None when reloading the integration or no `knx` key in configuration.yaml + config = hass.data.pop(_KNX_YAML_CONFIG, None) + if config is None: _conf = await async_integration_yaml_config(hass, DOMAIN) if not _conf or DOMAIN not in _conf: - _LOGGER.warning( - "No `knx:` key found in configuration.yaml. See " - "https://www.home-assistant.io/integrations/knx/ " - "for KNX entity configuration documentation" - ) # generate defaults config = CONFIG_SCHEMA({DOMAIN: {}})[DOMAIN] else: @@ -176,22 +174,22 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except XKNXException as ex: raise ConfigEntryNotReady from ex - hass.data[DATA_KNX_CONFIG] = config - hass.data[DOMAIN] = knx_module + hass.data[KNX_MODULE_KEY] = knx_module if CONF_KNX_EXPOSE in config: for expose_config in config[CONF_KNX_EXPOSE]: knx_module.exposures.append( create_knx_exposure(hass, knx_module.xknx, expose_config) ) + configured_platforms_yaml = { + platform for platform in SUPPORTED_PLATFORMS_YAML if platform in config + } await hass.config_entries.async_forward_entry_setups( entry, { Platform.SENSOR, # always forward sensor for system entities (telegram counter, etc.) *SUPPORTED_PLATFORMS_UI, # forward all platforms that support UI entity management - *{ # forward yaml-only managed platforms on demand - platform for platform in SUPPORTED_PLATFORMS_YAML if platform in config - }, + *configured_platforms_yaml, # forward yaml-only managed platforms on demand, }, ) @@ -210,30 +208,30 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unloading the KNX platforms.""" - # if not loaded directly return - if not hass.data.get(DOMAIN): + knx_module = hass.data.get(KNX_MODULE_KEY) + if not knx_module: + # if not loaded directly return return True - knx_module: KNXModule = hass.data[DOMAIN] for exposure in knx_module.exposures: exposure.async_remove() + configured_platforms_yaml = { + platform + for platform in SUPPORTED_PLATFORMS_YAML + if platform in knx_module.config_yaml + } unload_ok = await hass.config_entries.async_unload_platforms( entry, { Platform.SENSOR, # always unload system entities (telegram counter, etc.) *SUPPORTED_PLATFORMS_UI, # unload all platforms that support UI entity management - *{ # unload yaml-only managed platforms if configured - platform - for platform in SUPPORTED_PLATFORMS_YAML - if platform in hass.data[DATA_KNX_CONFIG] - }, + *configured_platforms_yaml, # unload yaml-only managed platforms if configured, }, ) if unload_ok: await knx_module.stop() hass.data.pop(DOMAIN) - hass.data.pop(DATA_KNX_CONFIG) return unload_ok @@ -267,7 +265,7 @@ async def async_remove_config_entry_device( hass: HomeAssistant, config_entry: ConfigEntry, device_entry: DeviceEntry ) -> bool: """Remove a config entry from a device.""" - knx_module: KNXModule = hass.data[DOMAIN] + knx_module = hass.data[KNX_MODULE_KEY] if not device_entry.identifiers.isdisjoint( knx_module.interface_device.device_info["identifiers"] ): @@ -287,7 +285,7 @@ class KNXModule: ) -> None: """Initialize KNX module.""" self.hass = hass - self.config = config + self.config_yaml = config self.connected = False self.exposures: list[KNXExposeSensor | KNXExposeTime] = [] self.service_exposures: dict[str, KNXExposeSensor | KNXExposeTime] = {} @@ -489,7 +487,7 @@ class KNXModule: def register_event_callback(self) -> TelegramQueue.Callback: """Register callback for knx_event within XKNX TelegramQueue.""" address_filters = [] - for filter_set in self.config[CONF_EVENT]: + for filter_set in self.config_yaml[CONF_EVENT]: _filters = list(map(AddressFilter, filter_set[KNX_ADDRESS])) address_filters.extend(_filters) if (dpt := filter_set.get(CONF_TYPE)) and ( diff --git a/homeassistant/components/knx/binary_sensor.py b/homeassistant/components/knx/binary_sensor.py index 7d80ca55bf6..ad978dde30e 100644 --- a/homeassistant/components/knx/binary_sensor.py +++ b/homeassistant/components/knx/binary_sensor.py @@ -23,7 +23,7 @@ from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType from . import KNXModule -from .const import ATTR_COUNTER, ATTR_SOURCE, DATA_KNX_CONFIG, DOMAIN +from .const import ATTR_COUNTER, ATTR_SOURCE, KNX_MODULE_KEY from .knx_entity import KnxYamlEntity from .schema import BinarySensorSchema @@ -34,12 +34,11 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the KNX binary sensor platform.""" - knx_module: KNXModule = hass.data[DOMAIN] - config: ConfigType = hass.data[DATA_KNX_CONFIG] + knx_module = hass.data[KNX_MODULE_KEY] + config: list[ConfigType] = knx_module.config_yaml[Platform.BINARY_SENSOR] async_add_entities( - KNXBinarySensor(knx_module, entity_config) - for entity_config in config[Platform.BINARY_SENSOR] + KNXBinarySensor(knx_module, entity_config) for entity_config in config ) diff --git a/homeassistant/components/knx/button.py b/homeassistant/components/knx/button.py index f6627fc527b..9a5700917f9 100644 --- a/homeassistant/components/knx/button.py +++ b/homeassistant/components/knx/button.py @@ -12,7 +12,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType from . import KNXModule -from .const import CONF_PAYLOAD_LENGTH, DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS +from .const import CONF_PAYLOAD_LENGTH, KNX_ADDRESS, KNX_MODULE_KEY from .knx_entity import KnxYamlEntity @@ -22,13 +22,10 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the KNX binary sensor platform.""" - knx_module: KNXModule = hass.data[DOMAIN] - config: ConfigType = hass.data[DATA_KNX_CONFIG] + knx_module = hass.data[KNX_MODULE_KEY] + config: list[ConfigType] = knx_module.config_yaml[Platform.BUTTON] - async_add_entities( - KNXButton(knx_module, entity_config) - for entity_config in config[Platform.BUTTON] - ) + async_add_entities(KNXButton(knx_module, entity_config) for entity_config in config) class KNXButton(KnxYamlEntity, ButtonEntity): diff --git a/homeassistant/components/knx/climate.py b/homeassistant/components/knx/climate.py index 4932df55087..05f6a80d2d4 100644 --- a/homeassistant/components/knx/climate.py +++ b/homeassistant/components/knx/climate.py @@ -31,7 +31,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType from . import KNXModule -from .const import CONTROLLER_MODES, CURRENT_HVAC_ACTIONS, DATA_KNX_CONFIG, DOMAIN +from .const import CONTROLLER_MODES, CURRENT_HVAC_ACTIONS, KNX_MODULE_KEY from .knx_entity import KnxYamlEntity from .schema import ClimateSchema @@ -45,8 +45,8 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up climate(s) for KNX platform.""" - knx_module: KNXModule = hass.data[DOMAIN] - config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.CLIMATE] + knx_module = hass.data[KNX_MODULE_KEY] + config: list[ConfigType] = knx_module.config_yaml[Platform.CLIMATE] async_add_entities( KNXClimate(knx_module, entity_config) for entity_config in config diff --git a/homeassistant/components/knx/config_flow.py b/homeassistant/components/knx/config_flow.py index 7e4db1f889b..4a71c600824 100644 --- a/homeassistant/components/knx/config_flow.py +++ b/homeassistant/components/knx/config_flow.py @@ -58,6 +58,7 @@ from .const import ( CONF_KNX_TUNNELING_TCP_SECURE, DEFAULT_ROUTING_IA, DOMAIN, + KNX_MODULE_KEY, TELEGRAM_LOG_DEFAULT, TELEGRAM_LOG_MAX, KNXConfigEntryData, @@ -182,7 +183,9 @@ class KNXCommonFlow(ABC, ConfigEntryBaseFlow): CONF_KNX_ROUTING: CONF_KNX_ROUTING.capitalize(), } - if isinstance(self, OptionsFlow) and (knx_module := self.hass.data.get(DOMAIN)): + if isinstance(self, OptionsFlow) and ( + knx_module := self.hass.data.get(KNX_MODULE_KEY) + ): xknx = knx_module.xknx else: xknx = XKNX() diff --git a/homeassistant/components/knx/const.py b/homeassistant/components/knx/const.py index 9ceb18385cb..a7aee794264 100644 --- a/homeassistant/components/knx/const.py +++ b/homeassistant/components/knx/const.py @@ -4,15 +4,20 @@ from __future__ import annotations from collections.abc import Awaitable, Callable from enum import Enum -from typing import Final, TypedDict +from typing import TYPE_CHECKING, Final, TypedDict from xknx.dpt.dpt_20 import HVACControllerMode from xknx.telegram import Telegram from homeassistant.components.climate import HVACAction, HVACMode from homeassistant.const import Platform +from homeassistant.util.hass_dict import HassKey + +if TYPE_CHECKING: + from . import KNXModule DOMAIN: Final = "knx" +KNX_MODULE_KEY: HassKey[KNXModule] = HassKey(DOMAIN) # Address is used for configuration and services by the same functions so the key has to match KNX_ADDRESS: Final = "address" @@ -68,8 +73,6 @@ CONF_RESPOND_TO_READ: Final = "respond_to_read" CONF_STATE_ADDRESS: Final = "state_address" CONF_SYNC_STATE: Final = "sync_state" -# yaml config merged with config entry data -DATA_KNX_CONFIG: Final = "knx_config" # original hass yaml config DATA_HASS_CONFIG: Final = "knx_hass_config" diff --git a/homeassistant/components/knx/cover.py b/homeassistant/components/knx/cover.py index 408f746e094..c4b445ff87f 100644 --- a/homeassistant/components/knx/cover.py +++ b/homeassistant/components/knx/cover.py @@ -26,7 +26,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType from . import KNXModule -from .const import DATA_KNX_CONFIG, DOMAIN +from .const import KNX_MODULE_KEY from .knx_entity import KnxYamlEntity from .schema import CoverSchema @@ -37,8 +37,8 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up cover(s) for KNX platform.""" - knx_module: KNXModule = hass.data[DOMAIN] - config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.COVER] + knx_module = hass.data[KNX_MODULE_KEY] + config: list[ConfigType] = knx_module.config_yaml[Platform.COVER] async_add_entities(KNXCover(knx_module, entity_config) for entity_config in config) diff --git a/homeassistant/components/knx/date.py b/homeassistant/components/knx/date.py index 9f04a4acd7e..d551d4e5b27 100644 --- a/homeassistant/components/knx/date.py +++ b/homeassistant/components/knx/date.py @@ -27,9 +27,8 @@ from .const import ( CONF_RESPOND_TO_READ, CONF_STATE_ADDRESS, CONF_SYNC_STATE, - DATA_KNX_CONFIG, - DOMAIN, KNX_ADDRESS, + KNX_MODULE_KEY, ) from .knx_entity import KnxYamlEntity @@ -40,8 +39,8 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up entities for KNX platform.""" - knx_module: KNXModule = hass.data[DOMAIN] - config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.DATE] + knx_module = hass.data[KNX_MODULE_KEY] + config: list[ConfigType] = knx_module.config_yaml[Platform.DATE] async_add_entities( KNXDateEntity(knx_module, entity_config) for entity_config in config diff --git a/homeassistant/components/knx/datetime.py b/homeassistant/components/knx/datetime.py index 8f1a25e6e3c..0f98a7be217 100644 --- a/homeassistant/components/knx/datetime.py +++ b/homeassistant/components/knx/datetime.py @@ -28,9 +28,8 @@ from .const import ( CONF_RESPOND_TO_READ, CONF_STATE_ADDRESS, CONF_SYNC_STATE, - DATA_KNX_CONFIG, - DOMAIN, KNX_ADDRESS, + KNX_MODULE_KEY, ) from .knx_entity import KnxYamlEntity @@ -41,8 +40,8 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up entities for KNX platform.""" - knx_module: KNXModule = hass.data[DOMAIN] - config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.DATETIME] + knx_module = hass.data[KNX_MODULE_KEY] + config: list[ConfigType] = knx_module.config_yaml[Platform.DATETIME] async_add_entities( KNXDateTimeEntity(knx_module, entity_config) for entity_config in config diff --git a/homeassistant/components/knx/device_trigger.py b/homeassistant/components/knx/device_trigger.py index ea3cc5faad4..96d8855f479 100644 --- a/homeassistant/components/knx/device_trigger.py +++ b/homeassistant/components/knx/device_trigger.py @@ -16,9 +16,8 @@ from homeassistant.helpers import selector from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo from homeassistant.helpers.typing import ConfigType -from . import KNXModule, trigger -from .const import DOMAIN -from .project import KNXProject +from . import trigger +from .const import DOMAIN, KNX_MODULE_KEY from .trigger import ( CONF_KNX_DESTINATION, CONF_KNX_GROUP_VALUE_READ, @@ -47,7 +46,7 @@ async def async_get_triggers( """List device triggers for KNX devices.""" triggers = [] - knx: KNXModule = hass.data[DOMAIN] + knx = hass.data[KNX_MODULE_KEY] if knx.interface_device.device.id == device_id: # Add trigger for KNX telegrams to interface device triggers.append( @@ -67,7 +66,7 @@ async def async_get_trigger_capabilities( hass: HomeAssistant, config: ConfigType ) -> dict[str, vol.Schema]: """List trigger capabilities.""" - project: KNXProject = hass.data[DOMAIN].project + project = hass.data[KNX_MODULE_KEY].project options = [ selector.SelectOptionDict(value=ga.address, label=f"{ga.address} - {ga.name}") for ga in project.group_addresses.values() diff --git a/homeassistant/components/knx/diagnostics.py b/homeassistant/components/knx/diagnostics.py index 1907539fc61..974a6b3b448 100644 --- a/homeassistant/components/knx/diagnostics.py +++ b/homeassistant/components/knx/diagnostics.py @@ -18,6 +18,7 @@ from .const import ( CONF_KNX_SECURE_DEVICE_AUTHENTICATION, CONF_KNX_SECURE_USER_PASSWORD, DOMAIN, + KNX_MODULE_KEY, ) TO_REDACT = { @@ -33,7 +34,7 @@ async def async_get_config_entry_diagnostics( ) -> dict[str, Any]: """Return diagnostics for a config entry.""" diag: dict[str, Any] = {} - knx_module = hass.data[DOMAIN] + knx_module = hass.data[KNX_MODULE_KEY] diag["xknx"] = { "version": knx_module.xknx.version, "current_address": str(knx_module.xknx.current_address), diff --git a/homeassistant/components/knx/fan.py b/homeassistant/components/knx/fan.py index 6fd87be97d1..6a026be2edf 100644 --- a/homeassistant/components/knx/fan.py +++ b/homeassistant/components/knx/fan.py @@ -20,7 +20,7 @@ from homeassistant.util.percentage import ( from homeassistant.util.scaling import int_states_in_range from . import KNXModule -from .const import DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS +from .const import KNX_ADDRESS, KNX_MODULE_KEY from .knx_entity import KnxYamlEntity from .schema import FanSchema @@ -33,8 +33,8 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up fan(s) for KNX platform.""" - knx_module: KNXModule = hass.data[DOMAIN] - config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.FAN] + knx_module = hass.data[KNX_MODULE_KEY] + config: list[ConfigType] = knx_module.config_yaml[Platform.FAN] async_add_entities(KNXFan(knx_module, entity_config) for entity_config in config) diff --git a/homeassistant/components/knx/light.py b/homeassistant/components/knx/light.py index 0caa3f0a799..a9116f5c282 100644 --- a/homeassistant/components/knx/light.py +++ b/homeassistant/components/knx/light.py @@ -29,7 +29,7 @@ from homeassistant.helpers.typing import ConfigType import homeassistant.util.color as color_util from . import KNXModule -from .const import CONF_SYNC_STATE, DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS, ColorTempModes +from .const import CONF_SYNC_STATE, DOMAIN, KNX_ADDRESS, KNX_MODULE_KEY, ColorTempModes from .knx_entity import KnxUiEntity, KnxUiEntityPlatformController, KnxYamlEntity from .schema import LightSchema from .storage.const import ( @@ -65,7 +65,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up light(s) for KNX platform.""" - knx_module: KNXModule = hass.data[DOMAIN] + knx_module = hass.data[KNX_MODULE_KEY] platform = async_get_current_platform() knx_module.config_store.add_platform( platform=Platform.LIGHT, @@ -77,7 +77,7 @@ async def async_setup_entry( ) entities: list[KnxYamlEntity | KnxUiEntity] = [] - if yaml_platform_config := hass.data[DATA_KNX_CONFIG].get(Platform.LIGHT): + if yaml_platform_config := knx_module.config_yaml.get(Platform.LIGHT): entities.extend( KnxYamlLight(knx_module, entity_config) for entity_config in yaml_platform_config diff --git a/homeassistant/components/knx/notify.py b/homeassistant/components/knx/notify.py index 173ab3119a0..ec17cf941f5 100644 --- a/homeassistant/components/knx/notify.py +++ b/homeassistant/components/knx/notify.py @@ -19,7 +19,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from . import KNXModule -from .const import DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS +from .const import DOMAIN, KNX_ADDRESS, KNX_MODULE_KEY from .knx_entity import KnxYamlEntity @@ -32,8 +32,9 @@ async def async_get_service( if discovery_info is None: return None - if platform_config := hass.data[DATA_KNX_CONFIG].get(Platform.NOTIFY): - xknx: XKNX = hass.data[DOMAIN].xknx + knx_module = hass.data[KNX_MODULE_KEY] + if platform_config := knx_module.config_yaml.get(Platform.NOTIFY): + xknx: XKNX = hass.data[KNX_MODULE_KEY].xknx notification_devices = [ _create_notification_instance(xknx, device_config) @@ -87,8 +88,8 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up notify(s) for KNX platform.""" - knx_module: KNXModule = hass.data[DOMAIN] - config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.NOTIFY] + knx_module = hass.data[KNX_MODULE_KEY] + config: list[ConfigType] = knx_module.config_yaml[Platform.NOTIFY] async_add_entities(KNXNotify(knx_module, entity_config) for entity_config in config) diff --git a/homeassistant/components/knx/number.py b/homeassistant/components/knx/number.py index cbbe91aba54..1a6c33239c9 100644 --- a/homeassistant/components/knx/number.py +++ b/homeassistant/components/knx/number.py @@ -23,13 +23,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType from . import KNXModule -from .const import ( - CONF_RESPOND_TO_READ, - CONF_STATE_ADDRESS, - DATA_KNX_CONFIG, - DOMAIN, - KNX_ADDRESS, -) +from .const import CONF_RESPOND_TO_READ, CONF_STATE_ADDRESS, KNX_ADDRESS, KNX_MODULE_KEY from .knx_entity import KnxYamlEntity from .schema import NumberSchema @@ -40,8 +34,8 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up number(s) for KNX platform.""" - knx_module: KNXModule = hass.data[DOMAIN] - config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.NUMBER] + knx_module = hass.data[KNX_MODULE_KEY] + config: list[ConfigType] = knx_module.config_yaml[Platform.NUMBER] async_add_entities(KNXNumber(knx_module, entity_config) for entity_config in config) diff --git a/homeassistant/components/knx/scene.py b/homeassistant/components/knx/scene.py index 2de832ae54a..0a0e68239ef 100644 --- a/homeassistant/components/knx/scene.py +++ b/homeassistant/components/knx/scene.py @@ -14,7 +14,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType from . import KNXModule -from .const import DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS +from .const import KNX_ADDRESS, KNX_MODULE_KEY from .knx_entity import KnxYamlEntity from .schema import SceneSchema @@ -25,8 +25,8 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up scene(s) for KNX platform.""" - knx_module: KNXModule = hass.data[DOMAIN] - config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.SCENE] + knx_module = hass.data[KNX_MODULE_KEY] + config: list[ConfigType] = knx_module.config_yaml[Platform.SCENE] async_add_entities(KNXScene(knx_module, entity_config) for entity_config in config) diff --git a/homeassistant/components/knx/select.py b/homeassistant/components/knx/select.py index 6c73bf8d573..272db48f14e 100644 --- a/homeassistant/components/knx/select.py +++ b/homeassistant/components/knx/select.py @@ -26,9 +26,8 @@ from .const import ( CONF_RESPOND_TO_READ, CONF_STATE_ADDRESS, CONF_SYNC_STATE, - DATA_KNX_CONFIG, - DOMAIN, KNX_ADDRESS, + KNX_MODULE_KEY, ) from .knx_entity import KnxYamlEntity from .schema import SelectSchema @@ -40,8 +39,8 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up select(s) for KNX platform.""" - knx_module: KNXModule = hass.data[DOMAIN] - config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.SELECT] + knx_module = hass.data[KNX_MODULE_KEY] + config: list[ConfigType] = knx_module.config_yaml[Platform.SELECT] async_add_entities(KNXSelect(knx_module, entity_config) for entity_config in config) diff --git a/homeassistant/components/knx/sensor.py b/homeassistant/components/knx/sensor.py index a28c1a339e6..03b3f3f70c3 100644 --- a/homeassistant/components/knx/sensor.py +++ b/homeassistant/components/knx/sensor.py @@ -34,7 +34,7 @@ from homeassistant.helpers.typing import ConfigType, StateType from homeassistant.util.enum import try_parse_enum from . import KNXModule -from .const import ATTR_SOURCE, DATA_KNX_CONFIG, DOMAIN +from .const import ATTR_SOURCE, KNX_MODULE_KEY from .knx_entity import KnxYamlEntity from .schema import SensorSchema @@ -115,13 +115,13 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up sensor(s) for KNX platform.""" - knx_module: KNXModule = hass.data[DOMAIN] + knx_module = hass.data[KNX_MODULE_KEY] entities: list[SensorEntity] = [] entities.extend( KNXSystemSensor(knx_module, description) for description in SYSTEM_ENTITY_DESCRIPTIONS ) - config: list[ConfigType] = hass.data[DATA_KNX_CONFIG].get(Platform.SENSOR) + config: list[ConfigType] | None = knx_module.config_yaml.get(Platform.SENSOR) if config: entities.extend( KNXSensor(knx_module, entity_config) for entity_config in config diff --git a/homeassistant/components/knx/services.py b/homeassistant/components/knx/services.py index 8b82671deaa..113be9709ee 100644 --- a/homeassistant/components/knx/services.py +++ b/homeassistant/components/knx/services.py @@ -22,6 +22,7 @@ from homeassistant.helpers.service import async_register_admin_service from .const import ( DOMAIN, KNX_ADDRESS, + KNX_MODULE_KEY, SERVICE_KNX_ATTR_PAYLOAD, SERVICE_KNX_ATTR_REMOVE, SERVICE_KNX_ATTR_RESPONSE, @@ -85,7 +86,7 @@ def register_knx_services(hass: HomeAssistant) -> None: def get_knx_module(hass: HomeAssistant) -> KNXModule: """Return KNXModule instance.""" try: - return hass.data[DOMAIN] # type: ignore[no-any-return] + return hass.data[KNX_MODULE_KEY] except KeyError as err: raise HomeAssistantError("KNX entry not loaded") from err diff --git a/homeassistant/components/knx/switch.py b/homeassistant/components/knx/switch.py index ebe930957d6..9146a98dda4 100644 --- a/homeassistant/components/knx/switch.py +++ b/homeassistant/components/knx/switch.py @@ -31,9 +31,9 @@ from .const import ( CONF_INVERT, CONF_RESPOND_TO_READ, CONF_SYNC_STATE, - DATA_KNX_CONFIG, DOMAIN, KNX_ADDRESS, + KNX_MODULE_KEY, ) from .knx_entity import KnxUiEntity, KnxUiEntityPlatformController, KnxYamlEntity from .schema import SwitchSchema @@ -53,7 +53,7 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up switch(es) for KNX platform.""" - knx_module: KNXModule = hass.data[DOMAIN] + knx_module = hass.data[KNX_MODULE_KEY] platform = async_get_current_platform() knx_module.config_store.add_platform( platform=Platform.SWITCH, @@ -65,7 +65,7 @@ async def async_setup_entry( ) entities: list[KnxYamlEntity | KnxUiEntity] = [] - if yaml_platform_config := hass.data[DATA_KNX_CONFIG].get(Platform.SWITCH): + if yaml_platform_config := knx_module.config_yaml.get(Platform.SWITCH): entities.extend( KnxYamlSwitch(knx_module, entity_config) for entity_config in yaml_platform_config diff --git a/homeassistant/components/knx/text.py b/homeassistant/components/knx/text.py index 381cb95ad32..1fdfc21bf2b 100644 --- a/homeassistant/components/knx/text.py +++ b/homeassistant/components/knx/text.py @@ -23,13 +23,7 @@ from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.typing import ConfigType from . import KNXModule -from .const import ( - CONF_RESPOND_TO_READ, - CONF_STATE_ADDRESS, - DATA_KNX_CONFIG, - DOMAIN, - KNX_ADDRESS, -) +from .const import CONF_RESPOND_TO_READ, CONF_STATE_ADDRESS, KNX_ADDRESS, KNX_MODULE_KEY from .knx_entity import KnxYamlEntity @@ -39,8 +33,8 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up sensor(s) for KNX platform.""" - knx_module: KNXModule = hass.data[DOMAIN] - config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.TEXT] + knx_module = hass.data[KNX_MODULE_KEY] + config: list[ConfigType] = knx_module.config_yaml[Platform.TEXT] async_add_entities(KNXText(knx_module, entity_config) for entity_config in config) diff --git a/homeassistant/components/knx/time.py b/homeassistant/components/knx/time.py index b4e562a8869..8e57b4a4fb5 100644 --- a/homeassistant/components/knx/time.py +++ b/homeassistant/components/knx/time.py @@ -27,9 +27,8 @@ from .const import ( CONF_RESPOND_TO_READ, CONF_STATE_ADDRESS, CONF_SYNC_STATE, - DATA_KNX_CONFIG, - DOMAIN, KNX_ADDRESS, + KNX_MODULE_KEY, ) from .knx_entity import KnxYamlEntity @@ -40,8 +39,8 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up entities for KNX platform.""" - knx_module: KNXModule = hass.data[DOMAIN] - config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.TIME] + knx_module = hass.data[KNX_MODULE_KEY] + config: list[ConfigType] = knx_module.config_yaml[Platform.TIME] async_add_entities( KNXTimeEntity(knx_module, entity_config) for entity_config in config diff --git a/homeassistant/components/knx/weather.py b/homeassistant/components/knx/weather.py index 99f4be962fe..3cf8f163330 100644 --- a/homeassistant/components/knx/weather.py +++ b/homeassistant/components/knx/weather.py @@ -20,7 +20,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType from . import KNXModule -from .const import DATA_KNX_CONFIG, DOMAIN +from .const import KNX_MODULE_KEY from .knx_entity import KnxYamlEntity from .schema import WeatherSchema @@ -31,8 +31,8 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up switch(es) for KNX platform.""" - knx_module: KNXModule = hass.data[DOMAIN] - config: list[ConfigType] = hass.data[DATA_KNX_CONFIG][Platform.WEATHER] + knx_module = hass.data[KNX_MODULE_KEY] + config: list[ConfigType] = knx_module.config_yaml[Platform.WEATHER] async_add_entities( KNXWeather(knx_module, entity_config) for entity_config in config diff --git a/homeassistant/components/knx/websocket.py b/homeassistant/components/knx/websocket.py index 5c21a941484..6cb2218b221 100644 --- a/homeassistant/components/knx/websocket.py +++ b/homeassistant/components/knx/websocket.py @@ -21,7 +21,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.typing import UNDEFINED from homeassistant.util.ulid import ulid_now -from .const import DOMAIN +from .const import DOMAIN, KNX_MODULE_KEY from .storage.config_store import ConfigStoreException from .storage.const import CONF_DATA from .storage.entity_store_schema import ( @@ -38,7 +38,6 @@ from .telegrams import SIGNAL_KNX_TELEGRAM, TelegramDict if TYPE_CHECKING: from . import KNXModule - URL_BASE: Final = "/knx_static" @@ -126,7 +125,7 @@ def provide_knx( ) -> None: """Add KNX Module to call function.""" try: - knx: KNXModule = hass.data[DOMAIN] + knx = hass.data[KNX_MODULE_KEY] except KeyError: _send_not_loaded_error(connection, msg["id"]) return @@ -142,7 +141,7 @@ def provide_knx( ) -> None: """Add KNX Module to call function.""" try: - knx: KNXModule = hass.data[DOMAIN] + knx = hass.data[KNX_MODULE_KEY] except KeyError: _send_not_loaded_error(connection, msg["id"]) return diff --git a/tests/components/knx/test_button.py b/tests/components/knx/test_button.py index a05752eced1..38ccb36200b 100644 --- a/tests/components/knx/test_button.py +++ b/tests/components/knx/test_button.py @@ -6,7 +6,11 @@ import logging from freezegun.api import FrozenDateTimeFactory import pytest -from homeassistant.components.knx.const import CONF_PAYLOAD_LENGTH, DOMAIN, KNX_ADDRESS +from homeassistant.components.knx.const import ( + CONF_PAYLOAD_LENGTH, + KNX_ADDRESS, + KNX_MODULE_KEY, +) from homeassistant.components.knx.schema import ButtonSchema from homeassistant.const import CONF_NAME, CONF_PAYLOAD, CONF_TYPE from homeassistant.core import HomeAssistant @@ -134,4 +138,4 @@ async def test_button_invalid( assert record.levelname == "ERROR" assert "Setup failed for 'knx': Invalid config." in record.message assert hass.states.get("button.test") is None - assert hass.data.get(DOMAIN) is None + assert hass.data.get(KNX_MODULE_KEY) is None diff --git a/tests/components/knx/test_telegrams.py b/tests/components/knx/test_telegrams.py index 69e3208879c..883e8ccbb2d 100644 --- a/tests/components/knx/test_telegrams.py +++ b/tests/components/knx/test_telegrams.py @@ -6,8 +6,10 @@ from typing import Any import pytest -from homeassistant.components.knx import DOMAIN -from homeassistant.components.knx.const import CONF_KNX_TELEGRAM_LOG_SIZE +from homeassistant.components.knx.const import ( + CONF_KNX_TELEGRAM_LOG_SIZE, + KNX_MODULE_KEY, +) from homeassistant.components.knx.telegrams import TelegramDict from homeassistant.core import HomeAssistant @@ -76,7 +78,7 @@ async def test_store_telegam_history( ) await knx.assert_write("2/2/2", (1, 2, 3, 4)) - assert len(hass.data[DOMAIN].telegrams.recent_telegrams) == 2 + assert len(hass.data[KNX_MODULE_KEY].telegrams.recent_telegrams) == 2 with pytest.raises(KeyError): hass_storage["knx/telegrams_history.json"] @@ -93,7 +95,7 @@ async def test_load_telegam_history( """Test telegram history restoration.""" hass_storage["knx/telegrams_history.json"] = {"version": 1, "data": MOCK_TELEGRAMS} await knx.setup_integration({}) - loaded_telegrams = hass.data[DOMAIN].telegrams.recent_telegrams + loaded_telegrams = hass.data[KNX_MODULE_KEY].telegrams.recent_telegrams assert assert_telegram_history(loaded_telegrams) # TelegramDict "payload" is a tuple, this shall be restored when loading from JSON assert isinstance(loaded_telegrams[1]["payload"], tuple) @@ -114,4 +116,4 @@ async def test_remove_telegam_history( await knx.setup_integration({}, add_entry_to_hass=False) # Store.async_remove() is mocked by hass_storage - check that data was removed. assert "knx/telegrams_history.json" not in hass_storage - assert not hass.data[DOMAIN].telegrams.recent_telegrams + assert not hass.data[KNX_MODULE_KEY].telegrams.recent_telegrams diff --git a/tests/components/knx/test_websocket.py b/tests/components/knx/test_websocket.py index e747b0daade..b3e4b7aaa38 100644 --- a/tests/components/knx/test_websocket.py +++ b/tests/components/knx/test_websocket.py @@ -5,8 +5,9 @@ from unittest.mock import patch import pytest -from homeassistant.components.knx import DOMAIN, KNX_ADDRESS, SwitchSchema +from homeassistant.components.knx.const import KNX_ADDRESS, KNX_MODULE_KEY from homeassistant.components.knx.project import STORAGE_KEY as KNX_PROJECT_STORAGE_KEY +from homeassistant.components.knx.schema import SwitchSchema from homeassistant.const import CONF_NAME from homeassistant.core import HomeAssistant @@ -66,7 +67,7 @@ async def test_knx_project_file_process( await knx.setup_integration({}) client = await hass_ws_client(hass) - assert not hass.data[DOMAIN].project.loaded + assert not hass.data[KNX_MODULE_KEY].project.loaded await client.send_json( { @@ -89,7 +90,7 @@ async def test_knx_project_file_process( parse_mock.assert_called_once_with() assert res["success"], res - assert hass.data[DOMAIN].project.loaded + assert hass.data[KNX_MODULE_KEY].project.loaded assert hass_storage[KNX_PROJECT_STORAGE_KEY]["data"] == _parse_result @@ -101,7 +102,7 @@ async def test_knx_project_file_process_error( """Test knx/project_file_process exception handling.""" await knx.setup_integration({}) client = await hass_ws_client(hass) - assert not hass.data[DOMAIN].project.loaded + assert not hass.data[KNX_MODULE_KEY].project.loaded await client.send_json( { @@ -122,7 +123,7 @@ async def test_knx_project_file_process_error( parse_mock.assert_called_once_with() assert res["error"], res - assert not hass.data[DOMAIN].project.loaded + assert not hass.data[KNX_MODULE_KEY].project.loaded async def test_knx_project_file_remove( @@ -136,13 +137,13 @@ async def test_knx_project_file_remove( await knx.setup_integration({}) assert hass_storage[KNX_PROJECT_STORAGE_KEY] client = await hass_ws_client(hass) - assert hass.data[DOMAIN].project.loaded + assert hass.data[KNX_MODULE_KEY].project.loaded await client.send_json({"id": 6, "type": "knx/project_file_remove"}) res = await client.receive_json() assert res["success"], res - assert not hass.data[DOMAIN].project.loaded + assert not hass.data[KNX_MODULE_KEY].project.loaded assert not hass_storage.get(KNX_PROJECT_STORAGE_KEY) @@ -155,7 +156,7 @@ async def test_knx_get_project( """Test retrieval of kxnproject from store.""" await knx.setup_integration({}) client = await hass_ws_client(hass) - assert hass.data[DOMAIN].project.loaded + assert hass.data[KNX_MODULE_KEY].project.loaded await client.send_json({"id": 3, "type": "knx/get_knx_project"}) res = await client.receive_json() From 06e876aee0e0c5917f8eae603b31f8f8deec1ac6 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Sep 2024 11:07:19 +0200 Subject: [PATCH 1623/1635] Add alias to DOMAIN import in group (#125569) --- homeassistant/components/group/button.py | 6 ++-- homeassistant/components/group/cover.py | 36 +++++++++++++------ homeassistant/components/group/event.py | 4 +-- homeassistant/components/group/fan.py | 8 ++--- homeassistant/components/group/lock.py | 10 +++--- .../components/group/media_player.py | 30 ++++++++-------- homeassistant/components/group/notify.py | 9 +++-- homeassistant/components/group/sensor.py | 18 ++++++---- homeassistant/components/group/switch.py | 8 ++--- 9 files changed, 77 insertions(+), 52 deletions(-) diff --git a/homeassistant/components/group/button.py b/homeassistant/components/group/button.py index d8481686615..a18e074b775 100644 --- a/homeassistant/components/group/button.py +++ b/homeassistant/components/group/button.py @@ -7,7 +7,7 @@ from typing import Any import voluptuous as vol from homeassistant.components.button import ( - DOMAIN, + DOMAIN as BUTTON_DOMAIN, PLATFORM_SCHEMA as BUTTON_PLATFORM_SCHEMA, SERVICE_PRESS, ButtonEntity, @@ -34,7 +34,7 @@ PARALLEL_UPDATES = 0 PLATFORM_SCHEMA = BUTTON_PLATFORM_SCHEMA.extend( { - vol.Required(CONF_ENTITIES): cv.entities_domain(DOMAIN), + vol.Required(CONF_ENTITIES): cv.entities_domain(BUTTON_DOMAIN), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_UNIQUE_ID): cv.string, } @@ -113,7 +113,7 @@ class ButtonGroup(GroupEntity, ButtonEntity): async def async_press(self) -> None: """Forward the press to all buttons in the group.""" await self.hass.services.async_call( - DOMAIN, + BUTTON_DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: self._entity_ids}, blocking=True, diff --git a/homeassistant/components/group/cover.py b/homeassistant/components/group/cover.py index 5d7f99012fd..b0b36e11b6b 100644 --- a/homeassistant/components/group/cover.py +++ b/homeassistant/components/group/cover.py @@ -11,7 +11,7 @@ from homeassistant.components.cover import ( ATTR_CURRENT_TILT_POSITION, ATTR_POSITION, ATTR_TILT_POSITION, - DOMAIN, + DOMAIN as COVER_DOMAIN, PLATFORM_SCHEMA as COVER_PLATFORM_SCHEMA, CoverEntity, CoverEntityFeature, @@ -57,7 +57,7 @@ PARALLEL_UPDATES = 0 PLATFORM_SCHEMA = COVER_PLATFORM_SCHEMA.extend( { - vol.Required(CONF_ENTITIES): cv.entities_domain(DOMAIN), + vol.Required(CONF_ENTITIES): cv.entities_domain(COVER_DOMAIN), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_UNIQUE_ID): cv.string, } @@ -181,21 +181,25 @@ class CoverGroup(GroupEntity, CoverEntity): """Move the covers up.""" data = {ATTR_ENTITY_ID: self._covers[KEY_OPEN_CLOSE]} await self.hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER, data, blocking=True, context=self._context + COVER_DOMAIN, SERVICE_OPEN_COVER, data, blocking=True, context=self._context ) async def async_close_cover(self, **kwargs: Any) -> None: """Move the covers down.""" data = {ATTR_ENTITY_ID: self._covers[KEY_OPEN_CLOSE]} await self.hass.services.async_call( - DOMAIN, SERVICE_CLOSE_COVER, data, blocking=True, context=self._context + COVER_DOMAIN, + SERVICE_CLOSE_COVER, + data, + blocking=True, + context=self._context, ) async def async_stop_cover(self, **kwargs: Any) -> None: """Fire the stop action.""" data = {ATTR_ENTITY_ID: self._covers[KEY_STOP]} await self.hass.services.async_call( - DOMAIN, SERVICE_STOP_COVER, data, blocking=True, context=self._context + COVER_DOMAIN, SERVICE_STOP_COVER, data, blocking=True, context=self._context ) async def async_set_cover_position(self, **kwargs: Any) -> None: @@ -205,7 +209,7 @@ class CoverGroup(GroupEntity, CoverEntity): ATTR_POSITION: kwargs[ATTR_POSITION], } await self.hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_SET_COVER_POSITION, data, blocking=True, @@ -216,21 +220,33 @@ class CoverGroup(GroupEntity, CoverEntity): """Tilt covers open.""" data = {ATTR_ENTITY_ID: self._tilts[KEY_OPEN_CLOSE]} await self.hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER_TILT, data, blocking=True, context=self._context + COVER_DOMAIN, + SERVICE_OPEN_COVER_TILT, + data, + blocking=True, + context=self._context, ) async def async_close_cover_tilt(self, **kwargs: Any) -> None: """Tilt covers closed.""" data = {ATTR_ENTITY_ID: self._tilts[KEY_OPEN_CLOSE]} await self.hass.services.async_call( - DOMAIN, SERVICE_CLOSE_COVER_TILT, data, blocking=True, context=self._context + COVER_DOMAIN, + SERVICE_CLOSE_COVER_TILT, + data, + blocking=True, + context=self._context, ) async def async_stop_cover_tilt(self, **kwargs: Any) -> None: """Stop cover tilt.""" data = {ATTR_ENTITY_ID: self._tilts[KEY_STOP]} await self.hass.services.async_call( - DOMAIN, SERVICE_STOP_COVER_TILT, data, blocking=True, context=self._context + COVER_DOMAIN, + SERVICE_STOP_COVER_TILT, + data, + blocking=True, + context=self._context, ) async def async_set_cover_tilt_position(self, **kwargs: Any) -> None: @@ -240,7 +256,7 @@ class CoverGroup(GroupEntity, CoverEntity): ATTR_TILT_POSITION: kwargs[ATTR_TILT_POSITION], } await self.hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_SET_COVER_TILT_POSITION, data, blocking=True, diff --git a/homeassistant/components/group/event.py b/homeassistant/components/group/event.py index 67220b878a1..e7f7938edf3 100644 --- a/homeassistant/components/group/event.py +++ b/homeassistant/components/group/event.py @@ -10,7 +10,7 @@ import voluptuous as vol from homeassistant.components.event import ( ATTR_EVENT_TYPE, ATTR_EVENT_TYPES, - DOMAIN, + DOMAIN as EVENT_DOMAIN, PLATFORM_SCHEMA as EVENT_PLATFORM_SCHEMA, EventEntity, ) @@ -40,7 +40,7 @@ PARALLEL_UPDATES = 0 PLATFORM_SCHEMA = EVENT_PLATFORM_SCHEMA.extend( { - vol.Required(CONF_ENTITIES): cv.entities_domain(DOMAIN), + vol.Required(CONF_ENTITIES): cv.entities_domain(EVENT_DOMAIN), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_UNIQUE_ID): cv.string, } diff --git a/homeassistant/components/group/fan.py b/homeassistant/components/group/fan.py index 93004e8a1b5..03341b0f46b 100644 --- a/homeassistant/components/group/fan.py +++ b/homeassistant/components/group/fan.py @@ -14,7 +14,7 @@ from homeassistant.components.fan import ( ATTR_OSCILLATING, ATTR_PERCENTAGE, ATTR_PERCENTAGE_STEP, - DOMAIN, + DOMAIN as FAN_DOMAIN, PLATFORM_SCHEMA as FAN_PLATFORM_SCHEMA, SERVICE_OSCILLATE, SERVICE_SET_DIRECTION, @@ -58,7 +58,7 @@ PARALLEL_UPDATES = 0 PLATFORM_SCHEMA = FAN_PLATFORM_SCHEMA.extend( { - vol.Required(CONF_ENTITIES): cv.entities_domain(DOMAIN), + vol.Required(CONF_ENTITIES): cv.entities_domain(FAN_DOMAIN), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_UNIQUE_ID): cv.string, } @@ -218,7 +218,7 @@ class FanGroup(GroupEntity, FanEntity): ) -> None: """Call a service with all entities.""" await self.hass.services.async_call( - DOMAIN, + FAN_DOMAIN, service, {**data, ATTR_ENTITY_ID: self._fans[support_flag]}, blocking=True, @@ -228,7 +228,7 @@ class FanGroup(GroupEntity, FanEntity): async def _async_call_all_entities(self, service: str) -> None: """Call a service with all entities.""" await self.hass.services.async_call( - DOMAIN, + FAN_DOMAIN, service, {ATTR_ENTITY_ID: self._entity_ids}, blocking=True, diff --git a/homeassistant/components/group/lock.py b/homeassistant/components/group/lock.py index 8bb7b18ce29..73e8c30bfde 100644 --- a/homeassistant/components/group/lock.py +++ b/homeassistant/components/group/lock.py @@ -8,7 +8,7 @@ from typing import Any import voluptuous as vol from homeassistant.components.lock import ( - DOMAIN, + DOMAIN as LOCK_DOMAIN, PLATFORM_SCHEMA as LOCK_PLATFORM_SCHEMA, LockEntity, LockEntityFeature, @@ -45,7 +45,7 @@ PARALLEL_UPDATES = 0 PLATFORM_SCHEMA = LOCK_PLATFORM_SCHEMA.extend( { - vol.Required(CONF_ENTITIES): cv.entities_domain(DOMAIN), + vol.Required(CONF_ENTITIES): cv.entities_domain(LOCK_DOMAIN), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_UNIQUE_ID): cv.string, } @@ -131,7 +131,7 @@ class LockGroup(GroupEntity, LockEntity): _LOGGER.debug("Forwarded lock command: %s", data) await self.hass.services.async_call( - DOMAIN, + LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True, @@ -142,7 +142,7 @@ class LockGroup(GroupEntity, LockEntity): """Forward the unlock command to all locks in the group.""" data = {ATTR_ENTITY_ID: self._entity_ids} await self.hass.services.async_call( - DOMAIN, + LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True, @@ -153,7 +153,7 @@ class LockGroup(GroupEntity, LockEntity): """Forward the open command to all locks in the group.""" data = {ATTR_ENTITY_ID: self._entity_ids} await self.hass.services.async_call( - DOMAIN, + LOCK_DOMAIN, SERVICE_OPEN, data, blocking=True, diff --git a/homeassistant/components/group/media_player.py b/homeassistant/components/group/media_player.py index 7d2ce46b107..ab8ee64b3e1 100644 --- a/homeassistant/components/group/media_player.py +++ b/homeassistant/components/group/media_player.py @@ -15,7 +15,7 @@ from homeassistant.components.media_player import ( ATTR_MEDIA_SHUFFLE, ATTR_MEDIA_VOLUME_LEVEL, ATTR_MEDIA_VOLUME_MUTED, - DOMAIN, + DOMAIN as MEDIA_PLAYER_DOMAIN, PLATFORM_SCHEMA as MEDIA_PLAYER_PLATFORM_SCHEMA, SERVICE_CLEAR_PLAYLIST, SERVICE_PLAY_MEDIA, @@ -73,7 +73,7 @@ DEFAULT_NAME = "Media Group" PLATFORM_SCHEMA = MEDIA_PLAYER_PLATFORM_SCHEMA.extend( { - vol.Required(CONF_ENTITIES): cv.entities_domain(DOMAIN), + vol.Required(CONF_ENTITIES): cv.entities_domain(MEDIA_PLAYER_DOMAIN), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_UNIQUE_ID): cv.string, } @@ -274,7 +274,7 @@ class MediaPlayerGroup(MediaPlayerEntity): """Clear players playlist.""" data = {ATTR_ENTITY_ID: self._features[KEY_CLEAR_PLAYLIST]} await self.hass.services.async_call( - DOMAIN, + MEDIA_PLAYER_DOMAIN, SERVICE_CLEAR_PLAYLIST, data, context=self._context, @@ -284,7 +284,7 @@ class MediaPlayerGroup(MediaPlayerEntity): """Send next track command.""" data = {ATTR_ENTITY_ID: self._features[KEY_TRACKS]} await self.hass.services.async_call( - DOMAIN, + MEDIA_PLAYER_DOMAIN, SERVICE_MEDIA_NEXT_TRACK, data, context=self._context, @@ -294,7 +294,7 @@ class MediaPlayerGroup(MediaPlayerEntity): """Send pause command.""" data = {ATTR_ENTITY_ID: self._features[KEY_PAUSE_PLAY_STOP]} await self.hass.services.async_call( - DOMAIN, + MEDIA_PLAYER_DOMAIN, SERVICE_MEDIA_PAUSE, data, context=self._context, @@ -304,7 +304,7 @@ class MediaPlayerGroup(MediaPlayerEntity): """Send play command.""" data = {ATTR_ENTITY_ID: self._features[KEY_PAUSE_PLAY_STOP]} await self.hass.services.async_call( - DOMAIN, + MEDIA_PLAYER_DOMAIN, SERVICE_MEDIA_PLAY, data, context=self._context, @@ -314,7 +314,7 @@ class MediaPlayerGroup(MediaPlayerEntity): """Send previous track command.""" data = {ATTR_ENTITY_ID: self._features[KEY_TRACKS]} await self.hass.services.async_call( - DOMAIN, + MEDIA_PLAYER_DOMAIN, SERVICE_MEDIA_PREVIOUS_TRACK, data, context=self._context, @@ -327,7 +327,7 @@ class MediaPlayerGroup(MediaPlayerEntity): ATTR_MEDIA_SEEK_POSITION: position, } await self.hass.services.async_call( - DOMAIN, + MEDIA_PLAYER_DOMAIN, SERVICE_MEDIA_SEEK, data, context=self._context, @@ -337,7 +337,7 @@ class MediaPlayerGroup(MediaPlayerEntity): """Send stop command.""" data = {ATTR_ENTITY_ID: self._features[KEY_PAUSE_PLAY_STOP]} await self.hass.services.async_call( - DOMAIN, + MEDIA_PLAYER_DOMAIN, SERVICE_MEDIA_STOP, data, context=self._context, @@ -350,7 +350,7 @@ class MediaPlayerGroup(MediaPlayerEntity): ATTR_MEDIA_VOLUME_MUTED: mute, } await self.hass.services.async_call( - DOMAIN, + MEDIA_PLAYER_DOMAIN, SERVICE_VOLUME_MUTE, data, context=self._context, @@ -368,7 +368,7 @@ class MediaPlayerGroup(MediaPlayerEntity): if kwargs: data.update(kwargs) await self.hass.services.async_call( - DOMAIN, + MEDIA_PLAYER_DOMAIN, SERVICE_PLAY_MEDIA, data, context=self._context, @@ -381,7 +381,7 @@ class MediaPlayerGroup(MediaPlayerEntity): ATTR_MEDIA_SHUFFLE: shuffle, } await self.hass.services.async_call( - DOMAIN, + MEDIA_PLAYER_DOMAIN, SERVICE_SHUFFLE_SET, data, context=self._context, @@ -391,7 +391,7 @@ class MediaPlayerGroup(MediaPlayerEntity): """Forward the turn_on command to all media in the media group.""" data = {ATTR_ENTITY_ID: self._features[KEY_ON_OFF]} await self.hass.services.async_call( - DOMAIN, + MEDIA_PLAYER_DOMAIN, SERVICE_TURN_ON, data, context=self._context, @@ -404,7 +404,7 @@ class MediaPlayerGroup(MediaPlayerEntity): ATTR_MEDIA_VOLUME_LEVEL: volume, } await self.hass.services.async_call( - DOMAIN, + MEDIA_PLAYER_DOMAIN, SERVICE_VOLUME_SET, data, context=self._context, @@ -414,7 +414,7 @@ class MediaPlayerGroup(MediaPlayerEntity): """Forward the turn_off command to all media in the media group.""" data = {ATTR_ENTITY_ID: self._features[KEY_ON_OFF]} await self.hass.services.async_call( - DOMAIN, + MEDIA_PLAYER_DOMAIN, SERVICE_TURN_OFF, data, context=self._context, diff --git a/homeassistant/components/group/notify.py b/homeassistant/components/group/notify.py index ecbfec0bdb8..fdef327cb73 100644 --- a/homeassistant/components/group/notify.py +++ b/homeassistant/components/group/notify.py @@ -13,7 +13,7 @@ from homeassistant.components.notify import ( ATTR_DATA, ATTR_MESSAGE, ATTR_TITLE, - DOMAIN, + DOMAIN as NOTIFY_DOMAIN, PLATFORM_SCHEMA as NOTIFY_PLATFORM_SCHEMA, SERVICE_SEND_MESSAGE, BaseNotificationService, @@ -115,7 +115,10 @@ class GroupNotifyPlatform(BaseNotificationService): tasks.append( asyncio.create_task( self.hass.services.async_call( - DOMAIN, entity[CONF_ACTION], sending_payload, blocking=True + NOTIFY_DOMAIN, + entity[CONF_ACTION], + sending_payload, + blocking=True, ) ) ) @@ -172,7 +175,7 @@ class NotifyGroup(GroupEntity, NotifyEntity): async def async_send_message(self, message: str, title: str | None = None) -> None: """Send a message to all members of the group.""" await self.hass.services.async_call( - DOMAIN, + NOTIFY_DOMAIN, SERVICE_SEND_MESSAGE, { ATTR_MESSAGE: message, diff --git a/homeassistant/components/group/sensor.py b/homeassistant/components/group/sensor.py index a99ed9dad63..32744bebc33 100644 --- a/homeassistant/components/group/sensor.py +++ b/homeassistant/components/group/sensor.py @@ -16,7 +16,7 @@ from homeassistant.components.sensor import ( CONF_STATE_CLASS, DEVICE_CLASS_UNITS, DEVICE_CLASSES_SCHEMA, - DOMAIN, + DOMAIN as SENSOR_DOMAIN, PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, STATE_CLASSES_SCHEMA, UNIT_CONVERTERS, @@ -96,7 +96,7 @@ PARALLEL_UPDATES = 0 PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( { vol.Required(CONF_ENTITIES): cv.entities_domain( - [DOMAIN, NUMBER_DOMAIN, INPUT_NUMBER_DOMAIN] + [SENSOR_DOMAIN, NUMBER_DOMAIN, INPUT_NUMBER_DOMAIN] ), vol.Required(CONF_TYPE): vol.All(cv.string, vol.In(SENSOR_TYPES.values())), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, @@ -503,7 +503,7 @@ class SensorGroup(GroupEntity, SensorEntity): if all(x == state_classes[0] for x in state_classes): async_delete_issue( - self.hass, DOMAIN, f"{self.entity_id}_state_classes_not_matching" + self.hass, SENSOR_DOMAIN, f"{self.entity_id}_state_classes_not_matching" ) return state_classes[0] async_create_issue( @@ -546,7 +546,9 @@ class SensorGroup(GroupEntity, SensorEntity): if all(x == device_classes[0] for x in device_classes): async_delete_issue( - self.hass, DOMAIN, f"{self.entity_id}_device_classes_not_matching" + self.hass, + SENSOR_DOMAIN, + f"{self.entity_id}_device_classes_not_matching", ) return device_classes[0] async_create_issue( @@ -614,10 +616,14 @@ class SensorGroup(GroupEntity, SensorEntity): ) ): async_delete_issue( - self.hass, DOMAIN, f"{self.entity_id}_uoms_not_matching_device_class" + self.hass, + SENSOR_DOMAIN, + f"{self.entity_id}_uoms_not_matching_device_class", ) async_delete_issue( - self.hass, DOMAIN, f"{self.entity_id}_uoms_not_matching_no_device_class" + self.hass, + SENSOR_DOMAIN, + f"{self.entity_id}_uoms_not_matching_no_device_class", ) return unit_of_measurements[0] diff --git a/homeassistant/components/group/switch.py b/homeassistant/components/group/switch.py index 9db264c8041..101c42d354f 100644 --- a/homeassistant/components/group/switch.py +++ b/homeassistant/components/group/switch.py @@ -8,7 +8,7 @@ from typing import Any import voluptuous as vol from homeassistant.components.switch import ( - DOMAIN, + DOMAIN as SWITCH_DOMAIN, PLATFORM_SCHEMA as SWITCH_PLATFORM_SCHEMA, SwitchEntity, ) @@ -39,7 +39,7 @@ PARALLEL_UPDATES = 0 PLATFORM_SCHEMA = SWITCH_PLATFORM_SCHEMA.extend( { - vol.Required(CONF_ENTITIES): cv.entities_domain(DOMAIN), + vol.Required(CONF_ENTITIES): cv.entities_domain(SWITCH_DOMAIN), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_UNIQUE_ID): cv.string, vol.Optional(CONF_ALL, default=False): cv.boolean, @@ -132,7 +132,7 @@ class SwitchGroup(GroupEntity, SwitchEntity): _LOGGER.debug("Forwarded turn_on command: %s", data) await self.hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, SERVICE_TURN_ON, data, blocking=True, @@ -143,7 +143,7 @@ class SwitchGroup(GroupEntity, SwitchEntity): """Forward the turn_off command to all switches in the group.""" data = {ATTR_ENTITY_ID: self._entity_ids} await self.hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, SERVICE_TURN_OFF, data, blocking=True, From 056e6eae8284dc5f6b1aa705cd79aba7bdcb782f Mon Sep 17 00:00:00 2001 From: karwosts <32912880+karwosts@users.noreply.github.com> Date: Mon, 9 Sep 2024 04:51:32 -0700 Subject: [PATCH 1624/1635] Add a syntax for merging lists of triggers (#117698) * Add a syntax for merging lists of triggers * Updating to the new syntax * Update homeassistant/helpers/config_validation.py Co-authored-by: Erik Montnemery * fix suggestion * update test and add comments * not actually json * move test to new file * update tests --------- Co-authored-by: Erik Montnemery --- homeassistant/const.py | 1 + homeassistant/helpers/config_validation.py | 18 ++++- tests/helpers/test_config_validation.py | 77 ++++++++++++++++++++++ tests/helpers/test_trigger.py | 64 ++++++++++++++++++ 4 files changed, 159 insertions(+), 1 deletion(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index ee90ebfc28b..45d6a97885b 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -282,6 +282,7 @@ CONF_THEN: Final = "then" CONF_TIMEOUT: Final = "timeout" CONF_TIME_ZONE: Final = "time_zone" CONF_TOKEN: Final = "token" +CONF_TRIGGERS: Final = "triggers" CONF_TRIGGER_TIME: Final = "trigger_time" CONF_TTL: Final = "ttl" CONF_TYPE: Final = "type" diff --git a/homeassistant/helpers/config_validation.py b/homeassistant/helpers/config_validation.py index d88c388f9c7..059be3026e5 100644 --- a/homeassistant/helpers/config_validation.py +++ b/homeassistant/helpers/config_validation.py @@ -81,6 +81,7 @@ from homeassistant.const import ( CONF_TARGET, CONF_THEN, CONF_TIMEOUT, + CONF_TRIGGERS, CONF_UNTIL, CONF_VALUE_TEMPLATE, CONF_VARIABLES, @@ -1781,6 +1782,19 @@ TRIGGER_BASE_SCHEMA = vol.Schema( _base_trigger_validator_schema = TRIGGER_BASE_SCHEMA.extend({}, extra=vol.ALLOW_EXTRA) +def _base_trigger_list_flatten(triggers: list[Any]) -> list[Any]: + """Flatten trigger arrays containing 'triggers:' sublists into a single list of triggers.""" + flatlist = [] + for t in triggers: + if CONF_TRIGGERS in t and len(t.keys()) == 1: + triggerlist = ensure_list(t[CONF_TRIGGERS]) + flatlist.extend(triggerlist) + else: + flatlist.append(t) + + return flatlist + + # This is first round of validation, we don't want to process the config here already, # just ensure basics as platform and ID are there. def _base_trigger_validator(value: Any) -> Any: @@ -1788,7 +1802,9 @@ def _base_trigger_validator(value: Any) -> Any: return value -TRIGGER_SCHEMA = vol.All(ensure_list, [_base_trigger_validator]) +TRIGGER_SCHEMA = vol.All( + ensure_list, _base_trigger_list_flatten, [_base_trigger_validator] +) _SCRIPT_DELAY_SCHEMA = vol.Schema( { diff --git a/tests/helpers/test_config_validation.py b/tests/helpers/test_config_validation.py index 1608a856de8..0eae0c88581 100644 --- a/tests/helpers/test_config_validation.py +++ b/tests/helpers/test_config_validation.py @@ -25,6 +25,7 @@ from homeassistant.helpers import ( selector, template, ) +from homeassistant.helpers.config_validation import TRIGGER_SCHEMA def test_boolean() -> None: @@ -1817,6 +1818,82 @@ async def test_async_validate(hass: HomeAssistant, tmpdir: py.path.local) -> Non validator_calls = {} +async def test_nested_trigger_list() -> None: + """Test triggers within nested lists are flattened.""" + + trigger_config = [ + { + "triggers": { + "platform": "event", + "event_type": "trigger_1", + }, + }, + { + "platform": "event", + "event_type": "trigger_2", + }, + {"triggers": []}, + {"triggers": None}, + { + "triggers": [ + { + "platform": "event", + "event_type": "trigger_3", + }, + { + "platform": "event", + "event_type": "trigger_4", + }, + ], + }, + ] + + validated_triggers = TRIGGER_SCHEMA(trigger_config) + + assert validated_triggers == [ + { + "platform": "event", + "event_type": "trigger_1", + }, + { + "platform": "event", + "event_type": "trigger_2", + }, + { + "platform": "event", + "event_type": "trigger_3", + }, + { + "platform": "event", + "event_type": "trigger_4", + }, + ] + + +async def test_nested_trigger_list_extra() -> None: + """Test triggers key with extra keys is not modified.""" + + trigger_config = [ + { + "platform": "other", + "triggers": [ + { + "platform": "event", + "event_type": "trigger_1", + }, + { + "platform": "event", + "event_type": "trigger_2", + }, + ], + }, + ] + + validated_triggers = TRIGGER_SCHEMA(trigger_config) + + assert validated_triggers == trigger_config + + async def test_is_entity_service_schema( hass: HomeAssistant, ) -> None: diff --git a/tests/helpers/test_trigger.py b/tests/helpers/test_trigger.py index 0bd5da0707c..4fde2d0ee0a 100644 --- a/tests/helpers/test_trigger.py +++ b/tests/helpers/test_trigger.py @@ -159,6 +159,70 @@ async def test_trigger_enabled_templates( assert len(service_calls) == 2 +async def test_nested_trigger_list( + hass: HomeAssistant, service_calls: list[ServiceCall] +) -> None: + """Test triggers within nested list.""" + + assert await async_setup_component( + hass, + "automation", + { + "automation": { + "trigger": [ + { + "triggers": { + "platform": "event", + "event_type": "trigger_1", + }, + }, + { + "platform": "event", + "event_type": "trigger_2", + }, + {"triggers": []}, + {"triggers": None}, + { + "triggers": [ + { + "platform": "event", + "event_type": "trigger_3", + }, + { + "platform": "event", + "event_type": "trigger_4", + }, + ], + }, + ], + "action": { + "service": "test.automation", + }, + } + }, + ) + + hass.bus.async_fire("trigger_1") + await hass.async_block_till_done() + assert len(service_calls) == 1 + + hass.bus.async_fire("trigger_2") + await hass.async_block_till_done() + assert len(service_calls) == 2 + + hass.bus.async_fire("trigger_none") + await hass.async_block_till_done() + assert len(service_calls) == 2 + + hass.bus.async_fire("trigger_3") + await hass.async_block_till_done() + assert len(service_calls) == 3 + + hass.bus.async_fire("trigger_4") + await hass.async_block_till_done() + assert len(service_calls) == 4 + + async def test_trigger_enabled_template_limited( hass: HomeAssistant, service_calls: list[ServiceCall], From 1dc496a2dd63fe6c96ce22d3d0952ceddec909dc Mon Sep 17 00:00:00 2001 From: Michael Hansen Date: Mon, 9 Sep 2024 07:25:25 -0500 Subject: [PATCH 1625/1635] Add announce support to ESPHome Assist Satellite platform (#125157) Rebuild --- .../components/esphome/assist_satellite.py | 22 +++ .../esphome/test_assist_satellite.py | 147 ++++++++++++++++++ 2 files changed, 169 insertions(+) diff --git a/homeassistant/components/esphome/assist_satellite.py b/homeassistant/components/esphome/assist_satellite.py index f84940eadc4..9d48e96b52e 100644 --- a/homeassistant/components/esphome/assist_satellite.py +++ b/homeassistant/components/esphome/assist_satellite.py @@ -74,6 +74,8 @@ _TIMER_EVENT_TYPES: EsphomeEnumMapper[VoiceAssistantTimerEventType, TimerEventTy ) ) +_ANNOUNCEMENT_TIMEOUT_SEC = 5 * 60 # 5 minutes + async def async_setup_entry( hass: HomeAssistant, @@ -183,6 +185,12 @@ class EsphomeAssistSatellite( ) ) + if feature_flags & VoiceAssistantFeature.ANNOUNCE: + # Device supports announcements + self._attr_supported_features |= ( + assist_satellite.AssistSatelliteEntityFeature.ANNOUNCE + ) + async def async_will_remove_from_hass(self) -> None: """Run when entity will be removed from hass.""" await super().async_will_remove_from_hass() @@ -251,6 +259,20 @@ class EsphomeAssistSatellite( self.cli.send_voice_assistant_event(event_type, data_to_send) + async def async_announce(self, message: str, media_id: str) -> None: + """Announce media on the satellite. + + Should block until the announcement is done playing. + """ + _LOGGER.debug( + "Waiting for announcement to finished (message=%s, media_id=%s)", + message, + media_id, + ) + await self.cli.send_voice_assistant_announcement_await_response( + media_id, _ANNOUNCEMENT_TIMEOUT_SEC, message + ) + async def handle_pipeline_start( self, conversation_id: str, diff --git a/tests/components/esphome/test_assist_satellite.py b/tests/components/esphome/test_assist_satellite.py index 1c7f7320a85..e245cfcf3bf 100644 --- a/tests/components/esphome/test_assist_satellite.py +++ b/tests/components/esphome/test_assist_satellite.py @@ -27,6 +27,7 @@ from homeassistant.components import assist_satellite, tts from homeassistant.components.assist_pipeline import PipelineEvent, PipelineEventType from homeassistant.components.assist_satellite.entity import ( AssistSatelliteEntity, + AssistSatelliteEntityFeature, AssistSatelliteState, ) from homeassistant.components.esphome import DOMAIN @@ -34,6 +35,7 @@ from homeassistant.components.esphome.assist_satellite import ( EsphomeAssistSatellite, VoiceAssistantUDPServer, ) +from homeassistant.components.media_source import PlayMedia from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er, intent as intent_helper @@ -891,3 +893,148 @@ async def test_tts_format_from_media_player( tts.ATTR_PREFERRED_SAMPLE_CHANNELS: 1, tts.ATTR_PREFERRED_SAMPLE_BYTES: 2, } + + +async def test_announce_supported_features( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test that the announce supported feature is set by flags.""" + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + assert not (satellite.supported_features & AssistSatelliteEntityFeature.ANNOUNCE) + + +async def test_announce_message( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test announcement with message.""" + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.SPEAKER + | VoiceAssistantFeature.API_AUDIO + | VoiceAssistantFeature.ANNOUNCE + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + done = asyncio.Event() + + async def send_voice_assistant_announcement_await_response( + media_id: str, timeout: float, text: str + ): + assert media_id == "https://www.home-assistant.io/resolved.mp3" + assert text == "test-text" + + done.set() + + with ( + patch( + "homeassistant.components.assist_satellite.entity.tts_generate_media_source_id", + return_value="media-source://bla", + ), + patch( + "homeassistant.components.media_source.async_resolve_media", + return_value=PlayMedia( + url="https://www.home-assistant.io/resolved.mp3", + mime_type="audio/mp3", + ), + ), + patch.object( + mock_client, + "send_voice_assistant_announcement_await_response", + new=send_voice_assistant_announcement_await_response, + ), + ): + async with asyncio.timeout(1): + await hass.services.async_call( + assist_satellite.DOMAIN, + "announce", + {"entity_id": satellite.entity_id, "message": "test-text"}, + blocking=True, + ) + await done.wait() + + +async def test_announce_media_id( + hass: HomeAssistant, + mock_client: APIClient, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test announcement with media id.""" + mock_device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + device_info={ + "voice_assistant_feature_flags": VoiceAssistantFeature.VOICE_ASSISTANT + | VoiceAssistantFeature.SPEAKER + | VoiceAssistantFeature.API_AUDIO + | VoiceAssistantFeature.ANNOUNCE + }, + ) + await hass.async_block_till_done() + + satellite = get_satellite_entity(hass, mock_device.device_info.mac_address) + assert satellite is not None + + done = asyncio.Event() + + async def send_voice_assistant_announcement_await_response( + media_id: str, timeout: float, text: str + ): + assert media_id == "https://www.home-assistant.io/resolved.mp3" + + done.set() + + with ( + patch.object( + mock_client, + "send_voice_assistant_announcement_await_response", + new=send_voice_assistant_announcement_await_response, + ), + ): + async with asyncio.timeout(1): + await hass.services.async_call( + assist_satellite.DOMAIN, + "announce", + { + "entity_id": satellite.entity_id, + "media_id": "https://www.home-assistant.io/resolved.mp3", + }, + blocking=True, + ) + await done.wait() From 3889482f0e1a8233a11167e203780dea4a402acd Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Mon, 9 Sep 2024 14:36:15 +0200 Subject: [PATCH 1626/1635] Do not directy import platform DOMAIN const in MQTT platform tests (#125589) --- tests/components/mqtt/test_humidifier.py | 17 +++++++---- tests/components/mqtt/test_vacuum.py | 37 +++++++++++++++--------- 2 files changed, 36 insertions(+), 18 deletions(-) diff --git a/tests/components/mqtt/test_humidifier.py b/tests/components/mqtt/test_humidifier.py index 62de371af4b..f5bdf52c8aa 100644 --- a/tests/components/mqtt/test_humidifier.py +++ b/tests/components/mqtt/test_humidifier.py @@ -12,7 +12,6 @@ from homeassistant.components.humidifier import ( ATTR_CURRENT_HUMIDITY, ATTR_HUMIDITY, ATTR_MODE, - DOMAIN, SERVICE_SET_HUMIDITY, SERVICE_SET_MODE, HumidifierAction, @@ -87,7 +86,9 @@ async def async_turn_on(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) """Turn all or specified humidifier on.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} - await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, data, blocking=True) + await hass.services.async_call( + humidifier.DOMAIN, SERVICE_TURN_ON, data, blocking=True + ) async def async_turn_off( @@ -96,7 +97,9 @@ async def async_turn_off( """Turn all or specified humidier off.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} - await hass.services.async_call(DOMAIN, SERVICE_TURN_OFF, data, blocking=True) + await hass.services.async_call( + humidifier.DOMAIN, SERVICE_TURN_OFF, data, blocking=True + ) async def async_set_mode( @@ -109,7 +112,9 @@ async def async_set_mode( if value is not None } - await hass.services.async_call(DOMAIN, SERVICE_SET_MODE, data, blocking=True) + await hass.services.async_call( + humidifier.DOMAIN, SERVICE_SET_MODE, data, blocking=True + ) async def async_set_humidity( @@ -122,7 +127,9 @@ async def async_set_humidity( if value is not None } - await hass.services.async_call(DOMAIN, SERVICE_SET_HUMIDITY, data, blocking=True) + await hass.services.async_call( + humidifier.DOMAIN, SERVICE_SET_HUMIDITY, data, blocking=True + ) @pytest.mark.parametrize( diff --git a/tests/components/mqtt/test_vacuum.py b/tests/components/mqtt/test_vacuum.py index fbffe062261..9b80d381457 100644 --- a/tests/components/mqtt/test_vacuum.py +++ b/tests/components/mqtt/test_vacuum.py @@ -21,7 +21,6 @@ from homeassistant.components.vacuum import ( ATTR_BATTERY_LEVEL, ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, - DOMAIN, SERVICE_CLEAN_SPOT, SERVICE_LOCATE, SERVICE_PAUSE, @@ -122,31 +121,34 @@ async def test_all_commands( mqtt_mock = await mqtt_mock_entry() await hass.services.async_call( - DOMAIN, SERVICE_START, {"entity_id": ENTITY_MATCH_ALL}, blocking=True + vacuum.DOMAIN, SERVICE_START, {"entity_id": ENTITY_MATCH_ALL}, blocking=True ) mqtt_mock.async_publish.assert_called_once_with(COMMAND_TOPIC, "start", 0, False) mqtt_mock.async_publish.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_STOP, {"entity_id": ENTITY_MATCH_ALL}, blocking=True + vacuum.DOMAIN, SERVICE_STOP, {"entity_id": ENTITY_MATCH_ALL}, blocking=True ) mqtt_mock.async_publish.assert_called_once_with(COMMAND_TOPIC, "stop", 0, False) mqtt_mock.async_publish.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_PAUSE, {"entity_id": ENTITY_MATCH_ALL}, blocking=True + vacuum.DOMAIN, SERVICE_PAUSE, {"entity_id": ENTITY_MATCH_ALL}, blocking=True ) mqtt_mock.async_publish.assert_called_once_with(COMMAND_TOPIC, "pause", 0, False) mqtt_mock.async_publish.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_LOCATE, {"entity_id": ENTITY_MATCH_ALL}, blocking=True + vacuum.DOMAIN, SERVICE_LOCATE, {"entity_id": ENTITY_MATCH_ALL}, blocking=True ) mqtt_mock.async_publish.assert_called_once_with(COMMAND_TOPIC, "locate", 0, False) mqtt_mock.async_publish.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_CLEAN_SPOT, {"entity_id": ENTITY_MATCH_ALL}, blocking=True + vacuum.DOMAIN, + SERVICE_CLEAN_SPOT, + {"entity_id": ENTITY_MATCH_ALL}, + blocking=True, ) mqtt_mock.async_publish.assert_called_once_with( COMMAND_TOPIC, "clean_spot", 0, False @@ -154,7 +156,10 @@ async def test_all_commands( mqtt_mock.async_publish.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_RETURN_TO_BASE, {"entity_id": ENTITY_MATCH_ALL}, blocking=True + vacuum.DOMAIN, + SERVICE_RETURN_TO_BASE, + {"entity_id": ENTITY_MATCH_ALL}, + blocking=True, ) mqtt_mock.async_publish.assert_called_once_with( COMMAND_TOPIC, "return_to_base", 0, False @@ -205,37 +210,43 @@ async def test_commands_without_supported_features( mqtt_mock = await mqtt_mock_entry() await hass.services.async_call( - DOMAIN, SERVICE_START, {"entity_id": ENTITY_MATCH_ALL}, blocking=True + vacuum.DOMAIN, SERVICE_START, {"entity_id": ENTITY_MATCH_ALL}, blocking=True ) mqtt_mock.async_publish.assert_not_called() mqtt_mock.async_publish.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_PAUSE, {"entity_id": ENTITY_MATCH_ALL}, blocking=True + vacuum.DOMAIN, SERVICE_PAUSE, {"entity_id": ENTITY_MATCH_ALL}, blocking=True ) mqtt_mock.async_publish.assert_not_called() mqtt_mock.async_publish.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_STOP, {"entity_id": ENTITY_MATCH_ALL}, blocking=True + vacuum.DOMAIN, SERVICE_STOP, {"entity_id": ENTITY_MATCH_ALL}, blocking=True ) mqtt_mock.async_publish.assert_not_called() mqtt_mock.async_publish.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_RETURN_TO_BASE, {"entity_id": ENTITY_MATCH_ALL}, blocking=True + vacuum.DOMAIN, + SERVICE_RETURN_TO_BASE, + {"entity_id": ENTITY_MATCH_ALL}, + blocking=True, ) mqtt_mock.async_publish.assert_not_called() mqtt_mock.async_publish.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_LOCATE, {"entity_id": ENTITY_MATCH_ALL}, blocking=True + vacuum.DOMAIN, SERVICE_LOCATE, {"entity_id": ENTITY_MATCH_ALL}, blocking=True ) mqtt_mock.async_publish.assert_not_called() mqtt_mock.async_publish.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_CLEAN_SPOT, {"entity_id": ENTITY_MATCH_ALL}, blocking=True + vacuum.DOMAIN, + SERVICE_CLEAN_SPOT, + {"entity_id": ENTITY_MATCH_ALL}, + blocking=True, ) mqtt_mock.async_publish.assert_not_called() mqtt_mock.async_publish.reset_mock() From d7caaceb64a090fcff5b26c37bdbec6f34a07488 Mon Sep 17 00:00:00 2001 From: Martin Hjelmare Date: Mon, 9 Sep 2024 14:47:04 +0200 Subject: [PATCH 1627/1635] Document plant integration development state (#125590) --- homeassistant/components/plant/__init__.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/plant/__init__.py b/homeassistant/components/plant/__init__.py index 2a5253d3faa..c6e527290df 100644 --- a/homeassistant/components/plant/__init__.py +++ b/homeassistant/components/plant/__init__.py @@ -1,4 +1,8 @@ -"""Support for monitoring plants.""" +"""Support for monitoring plants. + +DEVELOPMENT OF THE PLANT INTEGRATION IS FROZEN +PENDING A DESIGN EVALUATION. +""" from collections import deque from contextlib import suppress @@ -128,6 +132,9 @@ class Plant(Entity): It also checks the measurements against configurable min and max values. + + DEVELOPMENT OF THE PLANT INTEGRATION IS FROZEN + PENDING A DESIGN EVALUATION. """ _attr_should_poll = False @@ -363,6 +370,9 @@ class DailyHistory: """Stores one measurement per day for a maximum number of days. At the moment only the maximum value per day is kept. + + DEVELOPMENT OF THE PLANT INTEGRATION IS FROZEN + PENDING A DESIGN EVALUATION. """ def __init__(self, max_length): From 8fff0075ba3bb75a5a0444b0a6b4dcc1315bd55b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ludovic=20BOU=C3=89?= Date: Mon, 9 Sep 2024 14:50:01 +0200 Subject: [PATCH 1628/1635] Add Matter BatVoltage attribute from PowerSource cluster (#125503) * Add BatVoltage Attribute from PowerSource Cluster * Update sensor.py Remove comment * Update homeassistant/components/matter/sensor.py Co-authored-by: Martin Hjelmare --------- Co-authored-by: Martin Hjelmare --- homeassistant/components/matter/sensor.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/homeassistant/components/matter/sensor.py b/homeassistant/components/matter/sensor.py index dd8467e24c9..da627734be6 100644 --- a/homeassistant/components/matter/sensor.py +++ b/homeassistant/components/matter/sensor.py @@ -163,6 +163,19 @@ DISCOVERY_SCHEMAS = [ entity_class=MatterSensor, required_attributes=(clusters.PowerSource.Attributes.BatPercentRemaining,), ), + MatterDiscoverySchema( + platform=Platform.SENSOR, + entity_description=MatterSensorEntityDescription( + key="PowerSourceBatVoltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + device_class=SensorDeviceClass.VOLTAGE, + entity_category=EntityCategory.DIAGNOSTIC, + measurement_to_ha=lambda x: x / 1000, + state_class=SensorStateClass.MEASUREMENT, + ), + entity_class=MatterSensor, + required_attributes=(clusters.PowerSource.Attributes.BatVoltage,), + ), MatterDiscoverySchema( platform=Platform.SENSOR, entity_description=MatterSensorEntityDescription( From dee4b33c64b6f3bbed494996c8cb276e3fb6b6f7 Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 9 Sep 2024 15:11:18 +0200 Subject: [PATCH 1629/1635] Sort and remove duplicates from template/const.py (#125591) --- homeassistant/components/template/const.py | 24 ++++++++++------------ 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/homeassistant/components/template/const.py b/homeassistant/components/template/const.py index 89df87b4031..c320fc545b1 100644 --- a/homeassistant/components/template/const.py +++ b/homeassistant/components/template/const.py @@ -3,9 +3,19 @@ from homeassistant.const import Platform CONF_ACTION = "action" -CONF_AVAILABILITY_TEMPLATE = "availability_template" CONF_ATTRIBUTE_TEMPLATES = "attribute_templates" +CONF_ATTRIBUTES = "attributes" +CONF_AVAILABILITY = "availability" +CONF_AVAILABILITY_TEMPLATE = "availability_template" +CONF_MAX = "max" +CONF_MIN = "min" +CONF_OBJECT_ID = "object_id" +CONF_PICTURE = "picture" +CONF_PRESS = "press" +CONF_STEP = "step" CONF_TRIGGER = "trigger" +CONF_TURN_OFF = "turn_off" +CONF_TURN_ON = "turn_on" DOMAIN = "template" @@ -27,15 +37,3 @@ PLATFORMS = [ Platform.VACUUM, Platform.WEATHER, ] - -CONF_ATTRIBUTE_TEMPLATES = "attribute_templates" -CONF_ATTRIBUTES = "attributes" -CONF_AVAILABILITY = "availability" -CONF_MAX = "max" -CONF_MIN = "min" -CONF_OBJECT_ID = "object_id" -CONF_PICTURE = "picture" -CONF_PRESS = "press" -CONF_STEP = "step" -CONF_TURN_OFF = "turn_off" -CONF_TURN_ON = "turn_on" From af6434a5334165bef7965e9fcfc17efc86099be5 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Sep 2024 15:14:05 +0200 Subject: [PATCH 1630/1635] Add alias to DOMAIN import in tests [n-z] (#125581) --- tests/components/notify_events/test_notify.py | 12 +- tests/components/picnic/test_todo.py | 6 +- .../components/sleepiq/test_binary_sensor.py | 7 +- tests/components/sleepiq/test_button.py | 10 +- tests/components/sleepiq/test_light.py | 12 +- tests/components/sleepiq/test_number.py | 14 +- tests/components/sleepiq/test_select.py | 19 +-- tests/components/sleepiq/test_sensor.py | 6 +- tests/components/sleepiq/test_switch.py | 12 +- tests/components/template/test_cover.py | 134 +++++++++--------- tests/components/template/test_fan.py | 50 +++---- .../components/tomato/test_device_tracker.py | 26 ++-- tests/components/venstar/util.py | 6 +- .../components/xiaomi/test_device_tracker.py | 16 +-- tests/components/xiaomi_miio/test_button.py | 6 +- tests/components/xiaomi_miio/test_select.py | 4 +- tests/components/xiaomi_miio/test_vacuum.py | 26 ++-- tests/components/zha/test_button.py | 8 +- tests/components/zwave_js/test_cover.py | 56 ++++---- tests/components/zwave_js/test_switch.py | 19 ++- 20 files changed, 240 insertions(+), 209 deletions(-) diff --git a/tests/components/notify_events/test_notify.py b/tests/components/notify_events/test_notify.py index dbfc354404b..df6df078de1 100644 --- a/tests/components/notify_events/test_notify.py +++ b/tests/components/notify_events/test_notify.py @@ -1,6 +1,10 @@ """The tests for notify_events.""" -from homeassistant.components.notify import ATTR_DATA, ATTR_MESSAGE, DOMAIN +from homeassistant.components.notify import ( + ATTR_DATA, + ATTR_MESSAGE, + DOMAIN as NOTIFY_DOMAIN, +) from homeassistant.components.notify_events.notify import ( ATTR_LEVEL, ATTR_PRIORITY, @@ -13,10 +17,10 @@ from tests.common import async_mock_service async def test_send_msg(hass: HomeAssistant) -> None: """Test notify.events service.""" - notify_calls = async_mock_service(hass, DOMAIN, "events") + notify_calls = async_mock_service(hass, NOTIFY_DOMAIN, "events") await hass.services.async_call( - DOMAIN, + NOTIFY_DOMAIN, "events", { ATTR_MESSAGE: "message content", @@ -32,7 +36,7 @@ async def test_send_msg(hass: HomeAssistant) -> None: assert len(notify_calls) == 1 call = notify_calls[-1] - assert call.domain == DOMAIN + assert call.domain == NOTIFY_DOMAIN assert call.service == "events" assert call.data.get(ATTR_MESSAGE) == "message content" assert call.data.get(ATTR_DATA).get(ATTR_TOKEN) == "XYZ" diff --git a/tests/components/picnic/test_todo.py b/tests/components/picnic/test_todo.py index 2db5bc90159..3a6e09f7ac0 100644 --- a/tests/components/picnic/test_todo.py +++ b/tests/components/picnic/test_todo.py @@ -5,7 +5,7 @@ from unittest.mock import MagicMock, Mock import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.todo import ATTR_ITEM, DOMAIN, TodoServices +from homeassistant.components.todo import ATTR_ITEM, DOMAIN as TODO_DOMAIN, TodoServices from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError @@ -91,7 +91,7 @@ async def test_create_todo_list_item( mock_picnic_api.add_product = Mock() await hass.services.async_call( - DOMAIN, + TODO_DOMAIN, TodoServices.ADD_ITEM, {ATTR_ITEM: "Melk"}, target={ATTR_ENTITY_ID: ENTITY_ID}, @@ -119,7 +119,7 @@ async def test_create_todo_list_item_not_found( with pytest.raises(ServiceValidationError): await hass.services.async_call( - DOMAIN, + TODO_DOMAIN, TodoServices.ADD_ITEM, {ATTR_ITEM: "Melk"}, target={ATTR_ENTITY_ID: ENTITY_ID}, diff --git a/tests/components/sleepiq/test_binary_sensor.py b/tests/components/sleepiq/test_binary_sensor.py index 65654de74ac..689834aba35 100644 --- a/tests/components/sleepiq/test_binary_sensor.py +++ b/tests/components/sleepiq/test_binary_sensor.py @@ -1,6 +1,9 @@ """The tests for SleepIQ binary sensor platform.""" -from homeassistant.components.binary_sensor import DOMAIN, BinarySensorDeviceClass +from homeassistant.components.binary_sensor import ( + DOMAIN as BINARY_SENSOR_DOMAIN, + BinarySensorDeviceClass, +) from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_FRIENDLY_NAME, @@ -28,7 +31,7 @@ async def test_binary_sensors( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_asyncsleepiq ) -> None: """Test the SleepIQ binary sensors.""" - await setup_platform(hass, DOMAIN) + await setup_platform(hass, BINARY_SENSOR_DOMAIN) state = hass.states.get( f"binary_sensor.sleepnumber_{BED_NAME_LOWER}_{SLEEPER_L_NAME_LOWER}_is_in_bed" diff --git a/tests/components/sleepiq/test_button.py b/tests/components/sleepiq/test_button.py index 33ad4d72b46..e1c4203c937 100644 --- a/tests/components/sleepiq/test_button.py +++ b/tests/components/sleepiq/test_button.py @@ -1,6 +1,6 @@ """The tests for SleepIQ binary sensor platform.""" -from homeassistant.components.button import DOMAIN +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN from homeassistant.const import ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -12,7 +12,7 @@ async def test_button_calibrate( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_asyncsleepiq ) -> None: """Test the SleepIQ calibrate button.""" - await setup_platform(hass, DOMAIN) + await setup_platform(hass, BUTTON_DOMAIN) state = hass.states.get(f"button.sleepnumber_{BED_NAME_LOWER}_calibrate") assert ( @@ -24,7 +24,7 @@ async def test_button_calibrate( assert entity.unique_id == f"{BED_ID}-calibrate" await hass.services.async_call( - DOMAIN, + BUTTON_DOMAIN, "press", {ATTR_ENTITY_ID: f"button.sleepnumber_{BED_NAME_LOWER}_calibrate"}, blocking=True, @@ -38,7 +38,7 @@ async def test_button_stop_pump( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_asyncsleepiq ) -> None: """Test the SleepIQ stop pump button.""" - await setup_platform(hass, DOMAIN) + await setup_platform(hass, BUTTON_DOMAIN) state = hass.states.get(f"button.sleepnumber_{BED_NAME_LOWER}_stop_pump") assert ( @@ -50,7 +50,7 @@ async def test_button_stop_pump( assert entity.unique_id == f"{BED_ID}-stop-pump" await hass.services.async_call( - DOMAIN, + BUTTON_DOMAIN, "press", {ATTR_ENTITY_ID: f"button.sleepnumber_{BED_NAME_LOWER}_stop_pump"}, blocking=True, diff --git a/tests/components/sleepiq/test_light.py b/tests/components/sleepiq/test_light.py index 9564bca7a99..d1284dc3e41 100644 --- a/tests/components/sleepiq/test_light.py +++ b/tests/components/sleepiq/test_light.py @@ -1,6 +1,6 @@ """The tests for SleepIQ light platform.""" -from homeassistant.components.light import DOMAIN +from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN from homeassistant.components.sleepiq.coordinator import LONGER_UPDATE_INTERVAL from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant @@ -16,7 +16,7 @@ async def test_setup( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_asyncsleepiq ) -> None: """Test for successfully setting up the SleepIQ platform.""" - entry = await setup_platform(hass, DOMAIN) + entry = await setup_platform(hass, LIGHT_DOMAIN) assert len(entity_registry.entities) == 2 @@ -33,10 +33,10 @@ async def test_setup( async def test_light_set_states(hass: HomeAssistant, mock_asyncsleepiq) -> None: """Test light change.""" - await setup_platform(hass, DOMAIN) + await setup_platform(hass, LIGHT_DOMAIN) await hass.services.async_call( - DOMAIN, + LIGHT_DOMAIN, "turn_on", {ATTR_ENTITY_ID: f"light.sleepnumber_{BED_NAME_LOWER}_light_1"}, blocking=True, @@ -45,7 +45,7 @@ async def test_light_set_states(hass: HomeAssistant, mock_asyncsleepiq) -> None: mock_asyncsleepiq.beds[BED_ID].foundation.lights[0].turn_on.assert_called_once() await hass.services.async_call( - DOMAIN, + LIGHT_DOMAIN, "turn_off", {ATTR_ENTITY_ID: f"light.sleepnumber_{BED_NAME_LOWER}_light_1"}, blocking=True, @@ -56,7 +56,7 @@ async def test_light_set_states(hass: HomeAssistant, mock_asyncsleepiq) -> None: async def test_switch_get_states(hass: HomeAssistant, mock_asyncsleepiq) -> None: """Test light update.""" - await setup_platform(hass, DOMAIN) + await setup_platform(hass, LIGHT_DOMAIN) assert ( hass.states.get(f"light.sleepnumber_{BED_NAME_LOWER}_light_1").state diff --git a/tests/components/sleepiq/test_number.py b/tests/components/sleepiq/test_number.py index 52df2eb27aa..f0739aabc9d 100644 --- a/tests/components/sleepiq/test_number.py +++ b/tests/components/sleepiq/test_number.py @@ -5,7 +5,7 @@ from homeassistant.components.number import ( ATTR_MIN, ATTR_STEP, ATTR_VALUE, - DOMAIN, + DOMAIN as NUMBER_DOMAIN, SERVICE_SET_VALUE, ) from homeassistant.const import ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, ATTR_ICON @@ -30,7 +30,7 @@ async def test_firmness( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_asyncsleepiq ) -> None: """Test the SleepIQ firmness number values for a bed with two sides.""" - entry = await setup_platform(hass, DOMAIN) + entry = await setup_platform(hass, NUMBER_DOMAIN) state = hass.states.get( f"number.sleepnumber_{BED_NAME_LOWER}_{SLEEPER_L_NAME_LOWER}_firmness" @@ -71,7 +71,7 @@ async def test_firmness( assert entry.unique_id == f"{SLEEPER_R_ID}_firmness" await hass.services.async_call( - DOMAIN, + NUMBER_DOMAIN, SERVICE_SET_VALUE, { ATTR_ENTITY_ID: f"number.sleepnumber_{BED_NAME_LOWER}_{SLEEPER_L_NAME_LOWER}_firmness", @@ -89,7 +89,7 @@ async def test_actuators( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_asyncsleepiq ) -> None: """Test the SleepIQ actuator position values for a bed with adjustable head and foot.""" - entry = await setup_platform(hass, DOMAIN) + entry = await setup_platform(hass, NUMBER_DOMAIN) state = hass.states.get(f"number.sleepnumber_{BED_NAME_LOWER}_right_head_position") assert state.state == "60.0" @@ -143,7 +143,7 @@ async def test_actuators( assert entry.unique_id == f"{BED_ID}_F" await hass.services.async_call( - DOMAIN, + NUMBER_DOMAIN, SERVICE_SET_VALUE, { ATTR_ENTITY_ID: f"number.sleepnumber_{BED_NAME_LOWER}_right_head_position", @@ -165,7 +165,7 @@ async def test_foot_warmer_timer( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_asyncsleepiq ) -> None: """Test the SleepIQ foot warmer number values for a bed with two sides.""" - entry = await setup_platform(hass, DOMAIN) + entry = await setup_platform(hass, NUMBER_DOMAIN) state = hass.states.get( f"number.sleepnumber_{BED_NAME_LOWER}_{SLEEPER_L_NAME_LOWER}_foot_warming_timer" @@ -187,7 +187,7 @@ async def test_foot_warmer_timer( assert entry.unique_id == f"{BED_ID}_L_foot_warming_timer" await hass.services.async_call( - DOMAIN, + NUMBER_DOMAIN, SERVICE_SET_VALUE, { ATTR_ENTITY_ID: f"number.sleepnumber_{BED_NAME_LOWER}_{SLEEPER_L_NAME_LOWER}_foot_warming_timer", diff --git a/tests/components/sleepiq/test_select.py b/tests/components/sleepiq/test_select.py index ef4c7fb6df0..bbfb612e9cb 100644 --- a/tests/components/sleepiq/test_select.py +++ b/tests/components/sleepiq/test_select.py @@ -4,7 +4,10 @@ from unittest.mock import MagicMock from asyncsleepiq import FootWarmingTemps -from homeassistant.components.select import DOMAIN, SERVICE_SELECT_OPTION +from homeassistant.components.select import ( + DOMAIN as SELECT_DOMAIN, + SERVICE_SELECT_OPTION, +) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, @@ -37,7 +40,7 @@ async def test_split_foundation_preset( mock_asyncsleepiq: MagicMock, ) -> None: """Test the SleepIQ select entity for split foundation presets.""" - entry = await setup_platform(hass, DOMAIN) + entry = await setup_platform(hass, SELECT_DOMAIN) state = hass.states.get( f"select.sleepnumber_{BED_NAME_LOWER}_foundation_preset_right" @@ -72,7 +75,7 @@ async def test_split_foundation_preset( assert entry.unique_id == f"{BED_ID}_preset_L" await hass.services.async_call( - DOMAIN, + SELECT_DOMAIN, SERVICE_SELECT_OPTION, { ATTR_ENTITY_ID: f"select.sleepnumber_{BED_NAME_LOWER}_foundation_preset_left", @@ -94,7 +97,7 @@ async def test_single_foundation_preset( mock_asyncsleepiq_single_foundation: MagicMock, ) -> None: """Test the SleepIQ select entity for single foundation presets.""" - entry = await setup_platform(hass, DOMAIN) + entry = await setup_platform(hass, SELECT_DOMAIN) state = hass.states.get(f"select.sleepnumber_{BED_NAME_LOWER}_foundation_preset") assert state.state == PRESET_R_STATE @@ -111,7 +114,7 @@ async def test_single_foundation_preset( assert entry.unique_id == f"{BED_ID}_preset" await hass.services.async_call( - DOMAIN, + SELECT_DOMAIN, SERVICE_SELECT_OPTION, { ATTR_ENTITY_ID: f"select.sleepnumber_{BED_NAME_LOWER}_foundation_preset", @@ -135,7 +138,7 @@ async def test_foot_warmer( mock_asyncsleepiq: MagicMock, ) -> None: """Test the SleepIQ select entity for foot warmers.""" - entry = await setup_platform(hass, DOMAIN) + entry = await setup_platform(hass, SELECT_DOMAIN) state = hass.states.get( f"select.sleepnumber_{BED_NAME_LOWER}_{SLEEPER_L_NAME_LOWER}_foot_warmer" @@ -154,7 +157,7 @@ async def test_foot_warmer( assert entry.unique_id == f"{SLEEPER_L_ID}_foot_warmer" await hass.services.async_call( - DOMAIN, + SELECT_DOMAIN, SERVICE_SELECT_OPTION, { ATTR_ENTITY_ID: f"select.sleepnumber_{BED_NAME_LOWER}_{SLEEPER_L_NAME_LOWER}_foot_warmer", @@ -185,7 +188,7 @@ async def test_foot_warmer( assert entry.unique_id == f"{SLEEPER_R_ID}_foot_warmer" await hass.services.async_call( - DOMAIN, + SELECT_DOMAIN, SERVICE_SELECT_OPTION, { ATTR_ENTITY_ID: f"select.sleepnumber_{BED_NAME_LOWER}_{SLEEPER_R_NAME_LOWER}_foot_warmer", diff --git a/tests/components/sleepiq/test_sensor.py b/tests/components/sleepiq/test_sensor.py index ae25958419c..eb558850fb3 100644 --- a/tests/components/sleepiq/test_sensor.py +++ b/tests/components/sleepiq/test_sensor.py @@ -1,6 +1,6 @@ """The tests for SleepIQ sensor platform.""" -from homeassistant.components.sensor import DOMAIN +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.const import ATTR_FRIENDLY_NAME, ATTR_ICON from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -22,7 +22,7 @@ async def test_sleepnumber_sensors( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_asyncsleepiq ) -> None: """Test the SleepIQ sleepnumber for a bed with two sides.""" - entry = await setup_platform(hass, DOMAIN) + entry = await setup_platform(hass, SENSOR_DOMAIN) state = hass.states.get( f"sensor.sleepnumber_{BED_NAME_LOWER}_{SLEEPER_L_NAME_LOWER}_sleepnumber" @@ -61,7 +61,7 @@ async def test_pressure_sensors( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_asyncsleepiq ) -> None: """Test the SleepIQ pressure for a bed with two sides.""" - entry = await setup_platform(hass, DOMAIN) + entry = await setup_platform(hass, SENSOR_DOMAIN) state = hass.states.get( f"sensor.sleepnumber_{BED_NAME_LOWER}_{SLEEPER_L_NAME_LOWER}_pressure" diff --git a/tests/components/sleepiq/test_switch.py b/tests/components/sleepiq/test_switch.py index 7c41b6b9d19..5dd3e77fd66 100644 --- a/tests/components/sleepiq/test_switch.py +++ b/tests/components/sleepiq/test_switch.py @@ -1,7 +1,7 @@ """The tests for SleepIQ switch platform.""" from homeassistant.components.sleepiq.coordinator import LONGER_UPDATE_INTERVAL -from homeassistant.components.switch import DOMAIN +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -16,7 +16,7 @@ async def test_setup( hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_asyncsleepiq ) -> None: """Test for successfully setting up the SleepIQ platform.""" - entry = await setup_platform(hass, DOMAIN) + entry = await setup_platform(hass, SWITCH_DOMAIN) assert len(entity_registry.entities) == 1 @@ -28,10 +28,10 @@ async def test_setup( async def test_switch_set_states(hass: HomeAssistant, mock_asyncsleepiq) -> None: """Test button press.""" - await setup_platform(hass, DOMAIN) + await setup_platform(hass, SWITCH_DOMAIN) await hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, "turn_off", {ATTR_ENTITY_ID: f"switch.sleepnumber_{BED_NAME_LOWER}_pause_mode"}, blocking=True, @@ -40,7 +40,7 @@ async def test_switch_set_states(hass: HomeAssistant, mock_asyncsleepiq) -> None mock_asyncsleepiq.beds[BED_ID].set_pause_mode.assert_called_with(False) await hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, "turn_on", {ATTR_ENTITY_ID: f"switch.sleepnumber_{BED_NAME_LOWER}_pause_mode"}, blocking=True, @@ -51,7 +51,7 @@ async def test_switch_set_states(hass: HomeAssistant, mock_asyncsleepiq) -> None async def test_switch_get_states(hass: HomeAssistant, mock_asyncsleepiq) -> None: """Test button press.""" - await setup_platform(hass, DOMAIN) + await setup_platform(hass, SWITCH_DOMAIN) assert ( hass.states.get(f"switch.sleepnumber_{BED_NAME_LOWER}_pause_mode").state diff --git a/tests/components/template/test_cover.py b/tests/components/template/test_cover.py index 2674b9697ed..ce409869048 100644 --- a/tests/components/template/test_cover.py +++ b/tests/components/template/test_cover.py @@ -5,7 +5,11 @@ from typing import Any import pytest from homeassistant import setup -from homeassistant.components.cover import ATTR_POSITION, ATTR_TILT_POSITION, DOMAIN +from homeassistant.components.cover import ( + ATTR_POSITION, + ATTR_TILT_POSITION, + DOMAIN as COVER_DOMAIN, +) from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_CLOSE_COVER, @@ -51,13 +55,13 @@ OPEN_CLOSE_COVER_CONFIG = { } -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( ("config", "states"), [ ( { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -102,7 +106,7 @@ OPEN_CLOSE_COVER_CONFIG = { ), ( { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -152,13 +156,13 @@ async def test_template_state_text( assert text in caplog.text -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( ("config", "entity", "set_state", "test_state", "attr"), [ ( { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -178,7 +182,7 @@ async def test_template_state_text( ), ( { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -218,12 +222,12 @@ async def test_template_state_text_ignored_if_none_or_empty( assert "ERROR" not in caplog.text -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -241,12 +245,12 @@ async def test_template_state_boolean(hass: HomeAssistant, start_ha) -> None: assert state.state == STATE_OPEN -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -281,12 +285,12 @@ async def test_template_position( assert "ValueError" not in caplog.text -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -304,13 +308,13 @@ async def test_template_not_optimistic(hass: HomeAssistant, start_ha) -> None: assert state.state == STATE_UNKNOWN -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( ("config", "tilt_position"), [ ( { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -325,7 +329,7 @@ async def test_template_not_optimistic(hass: HomeAssistant, start_ha) -> None: ), ( { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -348,12 +352,12 @@ async def test_template_tilt( assert state.attributes.get("current_tilt_position") == tilt_position -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -365,7 +369,7 @@ async def test_template_tilt( } }, { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -391,18 +395,18 @@ async def test_template_out_of_bounds(hass: HomeAssistant, start_ha) -> None: assert state.attributes.get("current_position") is None -@pytest.mark.parametrize(("count", "domain"), [(0, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(0, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": {"test_template_cover": {"value_template": "{{ 1 == 1 }}"}}, } }, { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -428,12 +432,12 @@ async def test_template_open_or_position( assert "Invalid config for 'cover' from integration 'template'" in caplog_setup_text -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -453,7 +457,7 @@ async def test_open_action( assert state.state == STATE_CLOSED await hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) await hass.async_block_till_done() @@ -462,12 +466,12 @@ async def test_open_action( assert calls[0].data["caller"] == "cover.test_template_cover" -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -494,12 +498,12 @@ async def test_close_stop_action( assert state.state == STATE_OPEN await hass.services.async_call( - DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) await hass.async_block_till_done() @@ -554,7 +558,7 @@ async def test_set_position( assert state.state == STATE_UNKNOWN await hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) await hass.async_block_till_done() state = hass.states.get("cover.test_template_cover") @@ -565,7 +569,7 @@ async def test_set_position( assert calls[-1].data["position"] == 100 await hass.services.async_call( - DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) await hass.async_block_till_done() state = hass.states.get("cover.test_template_cover") @@ -576,7 +580,7 @@ async def test_set_position( assert calls[-1].data["position"] == 0 await hass.services.async_call( - DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) await hass.async_block_till_done() state = hass.states.get("cover.test_template_cover") @@ -587,7 +591,7 @@ async def test_set_position( assert calls[-1].data["position"] == 100 await hass.services.async_call( - DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) await hass.async_block_till_done() state = hass.states.get("cover.test_template_cover") @@ -598,7 +602,7 @@ async def test_set_position( assert calls[-1].data["position"] == 0 await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_SET_COVER_POSITION, {ATTR_ENTITY_ID: ENTITY_COVER, ATTR_POSITION: 25}, blocking=True, @@ -612,12 +616,12 @@ async def test_set_position( assert calls[-1].data["position"] == 25 -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -658,7 +662,7 @@ async def test_set_tilt_position( ) -> None: """Test the set_tilt_position command.""" await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, service, attr, blocking=True, @@ -671,12 +675,12 @@ async def test_set_tilt_position( assert calls[-1].data["tilt_position"] == tilt_position -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -695,7 +699,7 @@ async def test_set_position_optimistic( assert state.attributes.get("current_position") is None await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_SET_COVER_POSITION, {ATTR_ENTITY_ID: ENTITY_COVER, ATTR_POSITION: 42}, blocking=True, @@ -711,19 +715,19 @@ async def test_set_position_optimistic( (SERVICE_TOGGLE, STATE_OPEN), ): await hass.services.async_call( - DOMAIN, service, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, service, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) await hass.async_block_till_done() state = hass.states.get("cover.test_template_cover") assert state.state == test_state -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -744,7 +748,7 @@ async def test_set_tilt_position_optimistic( assert state.attributes.get("current_tilt_position") is None await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_SET_COVER_TILT_POSITION, {ATTR_ENTITY_ID: ENTITY_COVER, ATTR_TILT_POSITION: 42}, blocking=True, @@ -760,19 +764,19 @@ async def test_set_tilt_position_optimistic( (SERVICE_TOGGLE_COVER_TILT, 100.0), ): await hass.services.async_call( - DOMAIN, service, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, service, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) await hass.async_block_till_done() state = hass.states.get("cover.test_template_cover") assert state.attributes.get("current_tilt_position") == pos -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -800,12 +804,12 @@ async def test_icon_template(hass: HomeAssistant, start_ha) -> None: assert state.attributes["icon"] == "mdi:check" -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -835,12 +839,12 @@ async def test_entity_picture_template(hass: HomeAssistant, start_ha) -> None: assert state.attributes["entity_picture"] == "/local/cover.png" -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -868,12 +872,12 @@ async def test_availability_template(hass: HomeAssistant, start_ha) -> None: assert hass.states.get("cover.test_template_cover").state != STATE_UNAVAILABLE -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -893,12 +897,12 @@ async def test_availability_without_availability_template( assert state.state != STATE_UNAVAILABLE -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -919,12 +923,12 @@ async def test_invalid_availability_template_keeps_component_available( assert "UndefinedError: 'x' is undefined" in caplog_setup_text -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -943,12 +947,12 @@ async def test_device_class(hass: HomeAssistant, start_ha) -> None: assert state.attributes.get("device_class") == "door" -@pytest.mark.parametrize(("count", "domain"), [(0, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(0, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover": { @@ -967,12 +971,12 @@ async def test_invalid_device_class(hass: HomeAssistant, start_ha) -> None: assert not state -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "test_template_cover_01": { @@ -995,12 +999,12 @@ async def test_unique_id(hass: HomeAssistant, start_ha) -> None: assert len(hass.states.async_all()) == 1 -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "garage_door": { @@ -1029,12 +1033,12 @@ async def test_state_gets_lowercased(hass: HomeAssistant, start_ha) -> None: assert hass.states.get("cover.garage_door").state == STATE_CLOSED -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, COVER_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + COVER_DOMAIN: { "platform": "template", "covers": { "office": { diff --git a/tests/components/template/test_fan.py b/tests/components/template/test_fan.py index 40966d5557c..020444a620a 100644 --- a/tests/components/template/test_fan.py +++ b/tests/components/template/test_fan.py @@ -11,7 +11,7 @@ from homeassistant.components.fan import ( ATTR_PRESET_MODE, DIRECTION_FORWARD, DIRECTION_REVERSE, - DOMAIN, + DOMAIN as FAN_DOMAIN, FanEntityFeature, NotValidPresetModeError, ) @@ -36,12 +36,12 @@ _OSC_INPUT = "input_select.osc" _DIRECTION_INPUT_SELECT = "input_select.direction" -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, FAN_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + FAN_DOMAIN: { "platform": "template", "fans": { "test_fan": { @@ -59,12 +59,12 @@ async def test_missing_optional_config(hass: HomeAssistant, start_ha) -> None: _verify(hass, STATE_ON, None, None, None, None) -@pytest.mark.parametrize(("count", "domain"), [(0, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(0, FAN_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + FAN_DOMAIN: { "platform": "template", "fans": { "platform": "template", @@ -78,7 +78,7 @@ async def test_missing_optional_config(hass: HomeAssistant, start_ha) -> None: } }, { - DOMAIN: { + FAN_DOMAIN: { "platform": "template", "fans": { "platform": "template", @@ -92,7 +92,7 @@ async def test_missing_optional_config(hass: HomeAssistant, start_ha) -> None: } }, { - DOMAIN: { + FAN_DOMAIN: { "platform": "template", "fans": { "platform": "template", @@ -112,12 +112,12 @@ async def test_wrong_template_config(hass: HomeAssistant, start_ha) -> None: assert hass.states.async_all("fan") == [] -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, FAN_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + FAN_DOMAIN: { "platform": "template", "fans": { "test_fan": { @@ -173,13 +173,13 @@ async def test_templates_with_entities(hass: HomeAssistant, start_ha) -> None: _verify(hass, STATE_OFF, 0, True, DIRECTION_FORWARD, None) -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, FAN_DOMAIN)]) @pytest.mark.parametrize( ("config", "entity", "tests"), [ ( { - DOMAIN: { + FAN_DOMAIN: { "platform": "template", "fans": { "test_fan": { @@ -203,7 +203,7 @@ async def test_templates_with_entities(hass: HomeAssistant, start_ha) -> None: ), ( { - DOMAIN: { + FAN_DOMAIN: { "platform": "template", "fans": { "test_fan": { @@ -239,12 +239,12 @@ async def test_templates_with_entities2( _verify(hass, STATE_ON, test_percentage, None, None, test_type) -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, FAN_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + FAN_DOMAIN: { "platform": "template", "fans": { "test_fan": { @@ -272,13 +272,13 @@ async def test_availability_template_with_entities( assert (hass.states.get(_TEST_FAN).state != STATE_UNAVAILABLE) == test_assert -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, FAN_DOMAIN)]) @pytest.mark.parametrize( ("config", "states"), [ ( { - DOMAIN: { + FAN_DOMAIN: { "platform": "template", "fans": { "test_fan": { @@ -293,7 +293,7 @@ async def test_availability_template_with_entities( ), ( { - DOMAIN: { + FAN_DOMAIN: { "platform": "template", "fans": { "test_fan": { @@ -311,7 +311,7 @@ async def test_availability_template_with_entities( ), ( { - DOMAIN: { + FAN_DOMAIN: { "platform": "template", "fans": { "test_fan": { @@ -329,7 +329,7 @@ async def test_availability_template_with_entities( ), ( { - DOMAIN: { + FAN_DOMAIN: { "platform": "template", "fans": { "test_fan": { @@ -354,12 +354,12 @@ async def test_template_with_unavailable_entities( _verify(hass, states[0], states[1], states[2], states[3], None) -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, FAN_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + FAN_DOMAIN: { "platform": "template", "fans": { "test_fan": { @@ -903,12 +903,12 @@ async def _register_components( await hass.async_block_till_done() -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, FAN_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + FAN_DOMAIN: { "platform": "template", "fans": { "test_template_fan_01": { @@ -1024,12 +1024,12 @@ async def test_implemented_percentage( assert attributes.get("supported_features") & FanEntityFeature.SET_SPEED -@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)]) +@pytest.mark.parametrize(("count", "domain"), [(1, FAN_DOMAIN)]) @pytest.mark.parametrize( "config", [ { - DOMAIN: { + FAN_DOMAIN: { "platform": "template", "fans": { "mechanical_ventilation": { diff --git a/tests/components/tomato/test_device_tracker.py b/tests/components/tomato/test_device_tracker.py index 9484d3393d7..1747832e0d5 100644 --- a/tests/components/tomato/test_device_tracker.py +++ b/tests/components/tomato/test_device_tracker.py @@ -7,7 +7,7 @@ import requests import requests_mock import voluptuous as vol -from homeassistant.components.device_tracker import DOMAIN +from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER_DOMAIN import homeassistant.components.tomato.device_tracker as tomato from homeassistant.const import ( CONF_HOST, @@ -68,7 +68,7 @@ def mock_session_send(): def test_config_missing_optional_params(hass: HomeAssistant, mock_session_send) -> None: """Test the setup without optional parameters.""" config = { - DOMAIN: tomato.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( { CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", @@ -94,7 +94,7 @@ def test_config_missing_optional_params(hass: HomeAssistant, mock_session_send) def test_config_default_nonssl_port(hass: HomeAssistant, mock_session_send) -> None: """Test the setup without a default port set without ssl enabled.""" config = { - DOMAIN: tomato.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( { CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", @@ -113,7 +113,7 @@ def test_config_default_nonssl_port(hass: HomeAssistant, mock_session_send) -> N def test_config_default_ssl_port(hass: HomeAssistant, mock_session_send) -> None: """Test the setup without a default port set with ssl enabled.""" config = { - DOMAIN: tomato.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( { CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", @@ -135,7 +135,7 @@ def test_config_verify_ssl_but_no_ssl_enabled( ) -> None: """Test the setup with a string with ssl_verify but ssl not enabled.""" config = { - DOMAIN: tomato.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( { CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", @@ -169,7 +169,7 @@ def test_config_valid_verify_ssl_path(hass: HomeAssistant, mock_session_send) -> Representing the absolute path to a CA certificate bundle. """ config = { - DOMAIN: tomato.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( { CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", @@ -200,7 +200,7 @@ def test_config_valid_verify_ssl_path(hass: HomeAssistant, mock_session_send) -> def test_config_valid_verify_ssl_bool(hass: HomeAssistant, mock_session_send) -> None: """Test the setup with a bool for ssl_verify.""" config = { - DOMAIN: tomato.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( { CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", @@ -301,7 +301,7 @@ def test_config_errors() -> None: def test_config_bad_credentials(hass: HomeAssistant, mock_exception_logger) -> None: """Test the setup with bad credentials.""" config = { - DOMAIN: tomato.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( { CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", @@ -324,7 +324,7 @@ def test_config_bad_credentials(hass: HomeAssistant, mock_exception_logger) -> N def test_bad_response(hass: HomeAssistant, mock_exception_logger) -> None: """Test the setup with bad response from router.""" config = { - DOMAIN: tomato.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( { CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", @@ -347,7 +347,7 @@ def test_bad_response(hass: HomeAssistant, mock_exception_logger) -> None: def test_scan_devices(hass: HomeAssistant, mock_exception_logger) -> None: """Test scanning for new devices.""" config = { - DOMAIN: tomato.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( { CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", @@ -366,7 +366,7 @@ def test_scan_devices(hass: HomeAssistant, mock_exception_logger) -> None: def test_bad_connection(hass: HomeAssistant, mock_exception_logger) -> None: """Test the router with a connection error.""" config = { - DOMAIN: tomato.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( { CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", @@ -394,7 +394,7 @@ def test_bad_connection(hass: HomeAssistant, mock_exception_logger) -> None: def test_router_timeout(hass: HomeAssistant, mock_exception_logger) -> None: """Test the router with a timeout error.""" config = { - DOMAIN: tomato.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( { CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", @@ -422,7 +422,7 @@ def test_router_timeout(hass: HomeAssistant, mock_exception_logger) -> None: def test_get_device_name(hass: HomeAssistant, mock_exception_logger) -> None: """Test getting device names.""" config = { - DOMAIN: tomato.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: tomato.PLATFORM_SCHEMA( { CONF_PLATFORM: tomato.DOMAIN, CONF_HOST: "tomato-router", diff --git a/tests/components/venstar/util.py b/tests/components/venstar/util.py index f1e85e9019e..44b3efe0720 100644 --- a/tests/components/venstar/util.py +++ b/tests/components/venstar/util.py @@ -2,7 +2,7 @@ import requests_mock -from homeassistant.components.climate import DOMAIN +from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN from homeassistant.const import CONF_HOST, CONF_PLATFORM from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -54,7 +54,7 @@ async def async_init_integration( } for model in TEST_MODELS ] - config = {DOMAIN: platform_config} + config = {CLIMATE_DOMAIN: platform_config} - await async_setup_component(hass, DOMAIN, config) + await async_setup_component(hass, CLIMATE_DOMAIN, config) await hass.async_block_till_done() diff --git a/tests/components/xiaomi/test_device_tracker.py b/tests/components/xiaomi/test_device_tracker.py index 0f1c36d1fba..7d3b35bbda7 100644 --- a/tests/components/xiaomi/test_device_tracker.py +++ b/tests/components/xiaomi/test_device_tracker.py @@ -6,7 +6,7 @@ from unittest.mock import MagicMock, call, patch import requests -from homeassistant.components.device_tracker import DOMAIN +from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER_DOMAIN import homeassistant.components.xiaomi.device_tracker as xiaomi from homeassistant.components.xiaomi.device_tracker import get_scanner from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PLATFORM, CONF_USERNAME @@ -154,7 +154,7 @@ def mocked_requests(*args, **kwargs): async def test_config(xiaomi_mock, hass: HomeAssistant) -> None: """Testing minimal configuration.""" config = { - DOMAIN: xiaomi.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: xiaomi.PLATFORM_SCHEMA( { CONF_PLATFORM: xiaomi.DOMAIN, CONF_HOST: "192.168.0.1", @@ -164,7 +164,7 @@ async def test_config(xiaomi_mock, hass: HomeAssistant) -> None: } xiaomi.get_scanner(hass, config) assert xiaomi_mock.call_count == 1 - assert xiaomi_mock.call_args == call(config[DOMAIN]) + assert xiaomi_mock.call_args == call(config[DEVICE_TRACKER_DOMAIN]) call_arg = xiaomi_mock.call_args[0][0] assert call_arg["username"] == "admin" assert call_arg["password"] == "passwordTest" @@ -179,7 +179,7 @@ async def test_config(xiaomi_mock, hass: HomeAssistant) -> None: async def test_config_full(xiaomi_mock, hass: HomeAssistant) -> None: """Testing full configuration.""" config = { - DOMAIN: xiaomi.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: xiaomi.PLATFORM_SCHEMA( { CONF_PLATFORM: xiaomi.DOMAIN, CONF_HOST: "192.168.0.1", @@ -190,7 +190,7 @@ async def test_config_full(xiaomi_mock, hass: HomeAssistant) -> None: } xiaomi.get_scanner(hass, config) assert xiaomi_mock.call_count == 1 - assert xiaomi_mock.call_args == call(config[DOMAIN]) + assert xiaomi_mock.call_args == call(config[DEVICE_TRACKER_DOMAIN]) call_arg = xiaomi_mock.call_args[0][0] assert call_arg["username"] == "alternativeAdminName" assert call_arg["password"] == "passwordTest" @@ -203,7 +203,7 @@ async def test_config_full(xiaomi_mock, hass: HomeAssistant) -> None: async def test_invalid_credential(mock_get, mock_post, hass: HomeAssistant) -> None: """Testing invalid credential handling.""" config = { - DOMAIN: xiaomi.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: xiaomi.PLATFORM_SCHEMA( { CONF_PLATFORM: xiaomi.DOMAIN, CONF_HOST: "192.168.0.1", @@ -220,7 +220,7 @@ async def test_invalid_credential(mock_get, mock_post, hass: HomeAssistant) -> N async def test_valid_credential(mock_get, mock_post, hass: HomeAssistant) -> None: """Testing valid refresh.""" config = { - DOMAIN: xiaomi.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: xiaomi.PLATFORM_SCHEMA( { CONF_PLATFORM: xiaomi.DOMAIN, CONF_HOST: "192.168.0.1", @@ -244,7 +244,7 @@ async def test_token_timed_out(mock_get, mock_post, hass: HomeAssistant) -> None New token is requested and list is downloaded a second time. """ config = { - DOMAIN: xiaomi.PLATFORM_SCHEMA( + DEVICE_TRACKER_DOMAIN: xiaomi.PLATFORM_SCHEMA( { CONF_PLATFORM: xiaomi.DOMAIN, CONF_HOST: "192.168.0.1", diff --git a/tests/components/xiaomi_miio/test_button.py b/tests/components/xiaomi_miio/test_button.py index 8159d7c49e5..1f79a3ec0d0 100644 --- a/tests/components/xiaomi_miio/test_button.py +++ b/tests/components/xiaomi_miio/test_button.py @@ -4,7 +4,7 @@ from unittest.mock import MagicMock, patch import pytest -from homeassistant.components.button import DOMAIN, SERVICE_PRESS +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.components.xiaomi_miio.const import ( CONF_FLOW_TYPE, DOMAIN as XIAOMI_DOMAIN, @@ -68,7 +68,7 @@ async def test_vacuum_button_press(hass: HomeAssistant) -> None: pressed_at = dt_util.utcnow() await hass.services.async_call( - DOMAIN, + BUTTON_DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: entity_id + "_reset_side_brush"}, blocking=True, @@ -81,7 +81,7 @@ async def test_vacuum_button_press(hass: HomeAssistant) -> None: async def setup_component(hass: HomeAssistant, entity_name: str) -> str: """Set up vacuum component.""" - entity_id = f"{DOMAIN}.{entity_name}" + entity_id = f"{BUTTON_DOMAIN}.{entity_name}" config_entry = MockConfigEntry( domain=XIAOMI_DOMAIN, diff --git a/tests/components/xiaomi_miio/test_select.py b/tests/components/xiaomi_miio/test_select.py index 584ef910c98..566f1516fdf 100644 --- a/tests/components/xiaomi_miio/test_select.py +++ b/tests/components/xiaomi_miio/test_select.py @@ -12,7 +12,7 @@ import pytest from homeassistant.components.select import ( ATTR_OPTION, ATTR_OPTIONS, - DOMAIN, + DOMAIN as SELECT_DOMAIN, SERVICE_SELECT_OPTION, ) from homeassistant.components.xiaomi_miio import UPDATE_INTERVAL @@ -143,7 +143,7 @@ async def test_select_coordinator_update(hass: HomeAssistant, setup_test) -> Non async def setup_component(hass: HomeAssistant, entity_name: str) -> str: """Set up component.""" - entity_id = f"{DOMAIN}.{entity_name}" + entity_id = f"{SELECT_DOMAIN}.{entity_name}" config_entry = MockConfigEntry( domain=XIAOMI_DOMAIN, diff --git a/tests/components/xiaomi_miio/test_vacuum.py b/tests/components/xiaomi_miio/test_vacuum.py index 64612f6f464..76321a1a0a8 100644 --- a/tests/components/xiaomi_miio/test_vacuum.py +++ b/tests/components/xiaomi_miio/test_vacuum.py @@ -12,7 +12,7 @@ from homeassistant.components.vacuum import ( ATTR_BATTERY_ICON, ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, - DOMAIN, + DOMAIN as VACUUM_DOMAIN, SERVICE_CLEAN_SPOT, SERVICE_LOCATE, SERVICE_PAUSE, @@ -283,7 +283,7 @@ async def test_xiaomi_vacuum_services( # Call services await hass.services.async_call( - DOMAIN, SERVICE_START, {"entity_id": entity_id}, blocking=True + VACUUM_DOMAIN, SERVICE_START, {"entity_id": entity_id}, blocking=True ) mock_mirobo_is_got_error.assert_has_calls( [mock.call.resume_or_start()], any_order=True @@ -292,42 +292,42 @@ async def test_xiaomi_vacuum_services( mock_mirobo_is_got_error.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_PAUSE, {"entity_id": entity_id}, blocking=True + VACUUM_DOMAIN, SERVICE_PAUSE, {"entity_id": entity_id}, blocking=True ) mock_mirobo_is_got_error.assert_has_calls([mock.call.pause()], any_order=True) mock_mirobo_is_got_error.assert_has_calls(STATUS_CALLS, any_order=True) mock_mirobo_is_got_error.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_STOP, {"entity_id": entity_id}, blocking=True + VACUUM_DOMAIN, SERVICE_STOP, {"entity_id": entity_id}, blocking=True ) mock_mirobo_is_got_error.assert_has_calls([mock.call.stop()], any_order=True) mock_mirobo_is_got_error.assert_has_calls(STATUS_CALLS, any_order=True) mock_mirobo_is_got_error.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_RETURN_TO_BASE, {"entity_id": entity_id}, blocking=True + VACUUM_DOMAIN, SERVICE_RETURN_TO_BASE, {"entity_id": entity_id}, blocking=True ) mock_mirobo_is_got_error.assert_has_calls([mock.call.home()], any_order=True) mock_mirobo_is_got_error.assert_has_calls(STATUS_CALLS, any_order=True) mock_mirobo_is_got_error.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_LOCATE, {"entity_id": entity_id}, blocking=True + VACUUM_DOMAIN, SERVICE_LOCATE, {"entity_id": entity_id}, blocking=True ) mock_mirobo_is_got_error.assert_has_calls([mock.call.find()], any_order=True) mock_mirobo_is_got_error.assert_has_calls(STATUS_CALLS, any_order=True) mock_mirobo_is_got_error.reset_mock() await hass.services.async_call( - DOMAIN, SERVICE_CLEAN_SPOT, {"entity_id": entity_id}, blocking=True + VACUUM_DOMAIN, SERVICE_CLEAN_SPOT, {"entity_id": entity_id}, blocking=True ) mock_mirobo_is_got_error.assert_has_calls([mock.call.spot()], any_order=True) mock_mirobo_is_got_error.assert_has_calls(STATUS_CALLS, any_order=True) mock_mirobo_is_got_error.reset_mock() await hass.services.async_call( - DOMAIN, + VACUUM_DOMAIN, SERVICE_SEND_COMMAND, {"entity_id": entity_id, "command": "raw"}, blocking=True, @@ -339,7 +339,7 @@ async def test_xiaomi_vacuum_services( mock_mirobo_is_got_error.reset_mock() await hass.services.async_call( - DOMAIN, + VACUUM_DOMAIN, SERVICE_SEND_COMMAND, {"entity_id": entity_id, "command": "raw", "params": {"k1": 2}}, blocking=True, @@ -498,7 +498,7 @@ async def test_xiaomi_vacuum_fanspeeds( # Set speed service: await hass.services.async_call( - DOMAIN, + VACUUM_DOMAIN, SERVICE_SET_FAN_SPEED, {"entity_id": entity_id, "fan_speed": 60}, blocking=True, @@ -512,7 +512,7 @@ async def test_xiaomi_vacuum_fanspeeds( fan_speed_dict = mock_mirobo_fanspeeds.fan_speed_presets() await hass.services.async_call( - DOMAIN, + VACUUM_DOMAIN, SERVICE_SET_FAN_SPEED, {"entity_id": entity_id, "fan_speed": "Medium"}, blocking=True, @@ -525,7 +525,7 @@ async def test_xiaomi_vacuum_fanspeeds( assert "ERROR" not in caplog.text await hass.services.async_call( - DOMAIN, + VACUUM_DOMAIN, SERVICE_SET_FAN_SPEED, {"entity_id": entity_id, "fan_speed": "invent"}, blocking=True, @@ -535,7 +535,7 @@ async def test_xiaomi_vacuum_fanspeeds( async def setup_component(hass: HomeAssistant, entity_name: str) -> str: """Set up vacuum component.""" - entity_id = f"{DOMAIN}.{entity_name}" + entity_id = f"{VACUUM_DOMAIN}.{entity_name}" config_entry = MockConfigEntry( domain=XIAOMI_DOMAIN, diff --git a/tests/components/zha/test_button.py b/tests/components/zha/test_button.py index 574805db5f6..33ed004312b 100644 --- a/tests/components/zha/test_button.py +++ b/tests/components/zha/test_button.py @@ -9,7 +9,11 @@ from zigpy.profiles import zha from zigpy.zcl.clusters import general import zigpy.zcl.foundation as zcl_f -from homeassistant.components.button import DOMAIN, SERVICE_PRESS, ButtonDeviceClass +from homeassistant.components.button import ( + DOMAIN as BUTTON_DOMAIN, + SERVICE_PRESS, + ButtonDeviceClass, +) from homeassistant.components.zha.helpers import ( ZHADeviceProxy, ZHAGatewayProxy, @@ -97,7 +101,7 @@ async def test_button( return_value=[0x00, zcl_f.Status.SUCCESS], ): await hass.services.async_call( - DOMAIN, + BUTTON_DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: entity_id}, blocking=True, diff --git a/tests/components/zwave_js/test_cover.py b/tests/components/zwave_js/test_cover.py index 4ecd697f4d1..07edb68f1da 100644 --- a/tests/components/zwave_js/test_cover.py +++ b/tests/components/zwave_js/test_cover.py @@ -15,7 +15,7 @@ from homeassistant.components.cover import ( ATTR_CURRENT_TILT_POSITION, ATTR_POSITION, ATTR_TILT_POSITION, - DOMAIN, + DOMAIN as COVER_DOMAIN, SERVICE_CLOSE_COVER, SERVICE_CLOSE_COVER_TILT, SERVICE_OPEN_COVER, @@ -68,7 +68,7 @@ async def test_window_cover( # Test setting position await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_SET_COVER_POSITION, {ATTR_ENTITY_ID: WINDOW_COVER_ENTITY, ATTR_POSITION: 50}, blocking=True, @@ -89,7 +89,7 @@ async def test_window_cover( # Test setting position await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_SET_COVER_POSITION, {ATTR_ENTITY_ID: WINDOW_COVER_ENTITY, ATTR_POSITION: 0}, blocking=True, @@ -110,7 +110,7 @@ async def test_window_cover( # Test opening await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: WINDOW_COVER_ENTITY}, blocking=True, @@ -131,7 +131,7 @@ async def test_window_cover( # Test stop after opening await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: WINDOW_COVER_ENTITY}, blocking=True, @@ -174,7 +174,7 @@ async def test_window_cover( # Test closing await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: WINDOW_COVER_ENTITY}, blocking=True, @@ -194,7 +194,7 @@ async def test_window_cover( # Test stop after closing await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: WINDOW_COVER_ENTITY}, blocking=True, @@ -249,7 +249,7 @@ async def test_fibaro_fgr222_shutter_cover( # Test opening tilts await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: FIBARO_FGR_222_SHUTTER_COVER_ENTITY}, blocking=True, @@ -271,7 +271,7 @@ async def test_fibaro_fgr222_shutter_cover( # Test closing tilts await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_CLOSE_COVER_TILT, {ATTR_ENTITY_ID: FIBARO_FGR_222_SHUTTER_COVER_ENTITY}, blocking=True, @@ -293,7 +293,7 @@ async def test_fibaro_fgr222_shutter_cover( # Test setting tilt position await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_SET_COVER_TILT_POSITION, {ATTR_ENTITY_ID: FIBARO_FGR_222_SHUTTER_COVER_ENTITY, ATTR_TILT_POSITION: 12}, blocking=True, @@ -350,7 +350,7 @@ async def test_fibaro_fgr223_shutter_cover( # Test opening tilts await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: FIBARO_FGR_223_SHUTTER_COVER_ENTITY}, blocking=True, @@ -370,7 +370,7 @@ async def test_fibaro_fgr223_shutter_cover( client.async_send_command.reset_mock() # Test closing tilts await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_CLOSE_COVER_TILT, {ATTR_ENTITY_ID: FIBARO_FGR_223_SHUTTER_COVER_ENTITY}, blocking=True, @@ -390,7 +390,7 @@ async def test_fibaro_fgr223_shutter_cover( client.async_send_command.reset_mock() # Test setting tilt position await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_SET_COVER_TILT_POSITION, {ATTR_ENTITY_ID: FIBARO_FGR_223_SHUTTER_COVER_ENTITY, ATTR_TILT_POSITION: 12}, blocking=True, @@ -446,7 +446,7 @@ async def test_aeotec_nano_shutter_cover( # Test opening await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: AEOTEC_SHUTTER_COVER_ENTITY}, blocking=True, @@ -467,7 +467,7 @@ async def test_aeotec_nano_shutter_cover( # Test stop after opening await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: AEOTEC_SHUTTER_COVER_ENTITY}, blocking=True, @@ -511,7 +511,7 @@ async def test_aeotec_nano_shutter_cover( # Test closing await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: AEOTEC_SHUTTER_COVER_ENTITY}, blocking=True, @@ -531,7 +531,7 @@ async def test_aeotec_nano_shutter_cover( # Test stop after closing await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: AEOTEC_SHUTTER_COVER_ENTITY}, blocking=True, @@ -583,7 +583,10 @@ async def test_motor_barrier_cover( # Test open await hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: GDC_COVER_ENTITY}, blocking=True + COVER_DOMAIN, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: GDC_COVER_ENTITY}, + blocking=True, ) assert len(client.async_send_command.call_args_list) == 1 @@ -605,7 +608,10 @@ async def test_motor_barrier_cover( # Test close await hass.services.async_call( - DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: GDC_COVER_ENTITY}, blocking=True + COVER_DOMAIN, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: GDC_COVER_ENTITY}, + blocking=True, ) assert len(client.async_send_command.call_args_list) == 1 @@ -846,7 +852,7 @@ async def test_iblinds_v3_cover( assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_CLOSE_COVER_TILT, {ATTR_ENTITY_ID: entity_id}, blocking=True, @@ -867,7 +873,7 @@ async def test_iblinds_v3_cover( client.async_send_command.reset_mock() await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: entity_id}, blocking=True, @@ -888,7 +894,7 @@ async def test_iblinds_v3_cover( client.async_send_command.reset_mock() await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_SET_COVER_TILT_POSITION, {ATTR_ENTITY_ID: entity_id, ATTR_TILT_POSITION: 12}, blocking=True, @@ -909,7 +915,7 @@ async def test_iblinds_v3_cover( client.async_send_command.reset_mock() await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_STOP_COVER_TILT, {ATTR_ENTITY_ID: entity_id}, blocking=True, @@ -950,7 +956,7 @@ async def test_nice_ibt4zwave_cover( assert state.attributes[ATTR_DEVICE_CLASS] == CoverDeviceClass.GATE await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: entity_id}, blocking=True, @@ -970,7 +976,7 @@ async def test_nice_ibt4zwave_cover( client.async_send_command.reset_mock() await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: entity_id}, blocking=True, diff --git a/tests/components/zwave_js/test_switch.py b/tests/components/zwave_js/test_switch.py index 810ce38cf99..30486186a4e 100644 --- a/tests/components/zwave_js/test_switch.py +++ b/tests/components/zwave_js/test_switch.py @@ -6,7 +6,11 @@ from zwave_js_server.event import Event from zwave_js_server.exceptions import FailedZWaveCommand from zwave_js_server.model.node import Node -from homeassistant.components.switch import DOMAIN, SERVICE_TURN_OFF, SERVICE_TURN_ON +from homeassistant.components.switch import ( + DOMAIN as SWITCH_DOMAIN, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) from homeassistant.components.zwave_js.helpers import ZwaveValueMatcher from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNKNOWN, EntityCategory from homeassistant.core import HomeAssistant @@ -95,7 +99,7 @@ async def test_barrier_signaling_switch( # Test turning off await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {"entity_id": entity}, blocking=True + SWITCH_DOMAIN, SERVICE_TURN_OFF, {"entity_id": entity}, blocking=True ) assert len(client.async_send_command.call_args_list) == 1 @@ -120,7 +124,7 @@ async def test_barrier_signaling_switch( # Test turning on await hass.services.async_call( - DOMAIN, SERVICE_TURN_ON, {"entity_id": entity}, blocking=True + SWITCH_DOMAIN, SERVICE_TURN_ON, {"entity_id": entity}, blocking=True ) # Note: the valueId's value is still 255 because we never @@ -250,7 +254,7 @@ async def test_config_parameter_switch( # Test turning on await hass.services.async_call( - DOMAIN, SERVICE_TURN_ON, {"entity_id": switch_entity_id}, blocking=True + SWITCH_DOMAIN, SERVICE_TURN_ON, {"entity_id": switch_entity_id}, blocking=True ) assert len(client.async_send_command.call_args_list) == 1 @@ -268,7 +272,7 @@ async def test_config_parameter_switch( # Test turning off await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {"entity_id": switch_entity_id}, blocking=True + SWITCH_DOMAIN, SERVICE_TURN_OFF, {"entity_id": switch_entity_id}, blocking=True ) assert len(client.async_send_command.call_args_list) == 1 @@ -288,7 +292,10 @@ async def test_config_parameter_switch( # Test turning off error raises proper exception with pytest.raises(HomeAssistantError) as err: await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {"entity_id": switch_entity_id}, blocking=True + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {"entity_id": switch_entity_id}, + blocking=True, ) assert str(err.value) == ( From 029dbe7d946feae4599a61ba3e53049ce027389b Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Sep 2024 15:14:35 +0200 Subject: [PATCH 1631/1635] Add alias to DOMAIN import in homekit (#125572) --- .../components/homekit/type_covers.py | 16 ++++++++------ homeassistant/components/homekit/type_fans.py | 18 +++++++-------- .../components/homekit/type_humidifiers.py | 6 ++--- .../components/homekit/type_lights.py | 9 +++++--- .../components/homekit/type_locks.py | 4 ++-- .../components/homekit/type_media_players.py | 22 +++++++++---------- .../homekit/type_security_systems.py | 4 ++-- .../components/homekit/type_switches.py | 4 ++-- 8 files changed, 44 insertions(+), 39 deletions(-) diff --git a/homeassistant/components/homekit/type_covers.py b/homeassistant/components/homekit/type_covers.py index 29dda418665..b2f8bc1f01a 100644 --- a/homeassistant/components/homekit/type_covers.py +++ b/homeassistant/components/homekit/type_covers.py @@ -17,7 +17,7 @@ from homeassistant.components.cover import ( ATTR_CURRENT_TILT_POSITION, ATTR_POSITION, ATTR_TILT_POSITION, - DOMAIN, + DOMAIN as COVER_DOMAIN, CoverEntityFeature, ) from homeassistant.const import ( @@ -181,11 +181,11 @@ class GarageDoorOpener(HomeAccessory): if value == HK_DOOR_OPEN: if self.char_current_state.value != value: self.char_current_state.set_value(HK_DOOR_OPENING) - self.async_call_service(DOMAIN, SERVICE_OPEN_COVER, params) + self.async_call_service(COVER_DOMAIN, SERVICE_OPEN_COVER, params) elif value == HK_DOOR_CLOSED: if self.char_current_state.value != value: self.char_current_state.set_value(HK_DOOR_CLOSING) - self.async_call_service(DOMAIN, SERVICE_CLOSE_COVER, params) + self.async_call_service(COVER_DOMAIN, SERVICE_CLOSE_COVER, params) @callback def async_update_state(self, new_state: State) -> None: @@ -248,7 +248,7 @@ class OpeningDeviceBase(HomeAccessory): if value != 1: return self.async_call_service( - DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: self.entity_id} + COVER_DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: self.entity_id} ) def set_tilt(self, value: float) -> None: @@ -261,7 +261,9 @@ class OpeningDeviceBase(HomeAccessory): params = {ATTR_ENTITY_ID: self.entity_id, ATTR_TILT_POSITION: value} - self.async_call_service(DOMAIN, SERVICE_SET_COVER_TILT_POSITION, params, value) + self.async_call_service( + COVER_DOMAIN, SERVICE_SET_COVER_TILT_POSITION, params, value + ) @callback def async_update_state(self, new_state: State) -> None: @@ -322,7 +324,7 @@ class OpeningDevice(OpeningDeviceBase, HomeAccessory): """Move cover to value if call came from HomeKit.""" _LOGGER.debug("%s: Set position to %d", self.entity_id, value) params = {ATTR_ENTITY_ID: self.entity_id, ATTR_POSITION: value} - self.async_call_service(DOMAIN, SERVICE_SET_COVER_POSITION, params, value) + self.async_call_service(COVER_DOMAIN, SERVICE_SET_COVER_POSITION, params, value) @callback def async_update_state(self, new_state: State) -> None: @@ -423,7 +425,7 @@ class WindowCoveringBasic(OpeningDeviceBase, HomeAccessory): service, position = (SERVICE_STOP_COVER, 50) params = {ATTR_ENTITY_ID: self.entity_id} - self.async_call_service(DOMAIN, service, params) + self.async_call_service(COVER_DOMAIN, service, params) # Snap the current/target position to the expected final position. self.char_current_position.set_value(position) diff --git a/homeassistant/components/homekit/type_fans.py b/homeassistant/components/homekit/type_fans.py index 64c121878a9..542d4500cbc 100644 --- a/homeassistant/components/homekit/type_fans.py +++ b/homeassistant/components/homekit/type_fans.py @@ -14,7 +14,7 @@ from homeassistant.components.fan import ( ATTR_PRESET_MODES, DIRECTION_FORWARD, DIRECTION_REVERSE, - DOMAIN, + DOMAIN as FAN_DOMAIN, SERVICE_OSCILLATE, SERVICE_SET_DIRECTION, SERVICE_SET_PERCENTAGE, @@ -179,12 +179,12 @@ class Fan(HomeAccessory): "%s: Set auto to 1 (%s)", self.entity_id, self.preset_modes[0] ) params[ATTR_PRESET_MODE] = self.preset_modes[0] - self.async_call_service(DOMAIN, SERVICE_SET_PRESET_MODE, params) + self.async_call_service(FAN_DOMAIN, SERVICE_SET_PRESET_MODE, params) elif current_state := self.hass.states.get(self.entity_id): percentage: float = current_state.attributes.get(ATTR_PERCENTAGE) or 50.0 params[ATTR_PERCENTAGE] = percentage _LOGGER.debug("%s: Set auto to 0", self.entity_id) - self.async_call_service(DOMAIN, SERVICE_TURN_ON, params) + self.async_call_service(FAN_DOMAIN, SERVICE_TURN_ON, params) def set_preset_mode(self, value: int, preset_mode: str) -> None: """Set preset_mode if call came from HomeKit.""" @@ -194,36 +194,36 @@ class Fan(HomeAccessory): params = {ATTR_ENTITY_ID: self.entity_id} if value: params[ATTR_PRESET_MODE] = preset_mode - self.async_call_service(DOMAIN, SERVICE_SET_PRESET_MODE, params) + self.async_call_service(FAN_DOMAIN, SERVICE_SET_PRESET_MODE, params) else: - self.async_call_service(DOMAIN, SERVICE_TURN_ON, params) + self.async_call_service(FAN_DOMAIN, SERVICE_TURN_ON, params) def set_state(self, value: int) -> None: """Set state if call came from HomeKit.""" _LOGGER.debug("%s: Set state to %d", self.entity_id, value) service = SERVICE_TURN_ON if value == 1 else SERVICE_TURN_OFF params = {ATTR_ENTITY_ID: self.entity_id} - self.async_call_service(DOMAIN, service, params) + self.async_call_service(FAN_DOMAIN, service, params) def set_direction(self, value: int) -> None: """Set state if call came from HomeKit.""" _LOGGER.debug("%s: Set direction to %d", self.entity_id, value) direction = DIRECTION_REVERSE if value == 1 else DIRECTION_FORWARD params = {ATTR_ENTITY_ID: self.entity_id, ATTR_DIRECTION: direction} - self.async_call_service(DOMAIN, SERVICE_SET_DIRECTION, params, direction) + self.async_call_service(FAN_DOMAIN, SERVICE_SET_DIRECTION, params, direction) def set_oscillating(self, value: int) -> None: """Set state if call came from HomeKit.""" _LOGGER.debug("%s: Set oscillating to %d", self.entity_id, value) oscillating = value == 1 params = {ATTR_ENTITY_ID: self.entity_id, ATTR_OSCILLATING: oscillating} - self.async_call_service(DOMAIN, SERVICE_OSCILLATE, params, oscillating) + self.async_call_service(FAN_DOMAIN, SERVICE_OSCILLATE, params, oscillating) def set_percentage(self, value: float) -> None: """Set state if call came from HomeKit.""" _LOGGER.debug("%s: Set speed to %d", self.entity_id, value) params = {ATTR_ENTITY_ID: self.entity_id, ATTR_PERCENTAGE: value} - self.async_call_service(DOMAIN, SERVICE_SET_PERCENTAGE, params, value) + self.async_call_service(FAN_DOMAIN, SERVICE_SET_PERCENTAGE, params, value) @callback def async_update_state(self, new_state: State) -> None: diff --git a/homeassistant/components/homekit/type_humidifiers.py b/homeassistant/components/homekit/type_humidifiers.py index 5bdf5950f18..a57a5e00974 100644 --- a/homeassistant/components/homekit/type_humidifiers.py +++ b/homeassistant/components/homekit/type_humidifiers.py @@ -13,7 +13,7 @@ from homeassistant.components.humidifier import ( ATTR_MIN_HUMIDITY, DEFAULT_MAX_HUMIDITY, DEFAULT_MIN_HUMIDITY, - DOMAIN, + DOMAIN as HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, HumidifierDeviceClass, ) @@ -253,7 +253,7 @@ class HumidifierDehumidifier(HomeAccessory): if CHAR_ACTIVE in char_values: self.async_call_service( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_TURN_ON if char_values[CHAR_ACTIVE] else SERVICE_TURN_OFF, {ATTR_ENTITY_ID: self.entity_id}, f"{CHAR_ACTIVE} to {char_values[CHAR_ACTIVE]}", @@ -272,7 +272,7 @@ class HumidifierDehumidifier(HomeAccessory): self.char_target_humidity.set_value(humidity) self.async_call_service( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: self.entity_id, ATTR_HUMIDITY: humidity}, ( diff --git a/homeassistant/components/homekit/type_lights.py b/homeassistant/components/homekit/type_lights.py index cb446ea551c..6b57a03153c 100644 --- a/homeassistant/components/homekit/type_lights.py +++ b/homeassistant/components/homekit/type_lights.py @@ -20,7 +20,7 @@ from homeassistant.components.light import ( ATTR_RGBWW_COLOR, ATTR_SUPPORTED_COLOR_MODES, ATTR_WHITE, - DOMAIN, + DOMAIN as LIGHT_DOMAIN, ColorMode, brightness_supported, color_supported, @@ -188,7 +188,10 @@ class Light(HomeAccessory): if service == SERVICE_TURN_OFF: self.async_call_service( - DOMAIN, service, {ATTR_ENTITY_ID: self.entity_id}, ", ".join(events) + LIGHT_DOMAIN, + service, + {ATTR_ENTITY_ID: self.entity_id}, + ", ".join(events), ) return @@ -232,7 +235,7 @@ class Light(HomeAccessory): _LOGGER.debug( "Calling light service with params: %s -> %s", char_values, params ) - self.async_call_service(DOMAIN, service, params, ", ".join(events)) + self.async_call_service(LIGHT_DOMAIN, service, params, ", ".join(events)) @callback def async_update_state(self, new_state: State) -> None: diff --git a/homeassistant/components/homekit/type_locks.py b/homeassistant/components/homekit/type_locks.py index e5b0ad22396..52dc71078d0 100644 --- a/homeassistant/components/homekit/type_locks.py +++ b/homeassistant/components/homekit/type_locks.py @@ -6,7 +6,7 @@ from typing import Any from pyhap.const import CATEGORY_DOOR_LOCK from homeassistant.components.lock import ( - DOMAIN, + DOMAIN as LOCK_DOMAIN, STATE_JAMMED, STATE_LOCKED, STATE_LOCKING, @@ -89,7 +89,7 @@ class Lock(HomeAccessory): params = {ATTR_ENTITY_ID: self.entity_id} if self._code: params[ATTR_CODE] = self._code - self.async_call_service(DOMAIN, service, params) + self.async_call_service(LOCK_DOMAIN, service, params) @callback def async_update_state(self, new_state: State) -> None: diff --git a/homeassistant/components/homekit/type_media_players.py b/homeassistant/components/homekit/type_media_players.py index 4cdb471b4ff..adb16da5a2d 100644 --- a/homeassistant/components/homekit/type_media_players.py +++ b/homeassistant/components/homekit/type_media_players.py @@ -11,7 +11,7 @@ from homeassistant.components.media_player import ( ATTR_INPUT_SOURCE_LIST, ATTR_MEDIA_VOLUME_LEVEL, ATTR_MEDIA_VOLUME_MUTED, - DOMAIN, + DOMAIN as MEDIA_PLAYER_DOMAIN, SERVICE_SELECT_SOURCE, MediaPlayerEntityFeature, ) @@ -151,7 +151,7 @@ class MediaPlayer(HomeAccessory): _LOGGER.debug('%s: Set switch state for "on_off" to %s', self.entity_id, value) service = SERVICE_TURN_ON if value else SERVICE_TURN_OFF params = {ATTR_ENTITY_ID: self.entity_id} - self.async_call_service(DOMAIN, service, params) + self.async_call_service(MEDIA_PLAYER_DOMAIN, service, params) def set_play_pause(self, value: bool) -> None: """Move switch state to value if call came from HomeKit.""" @@ -160,7 +160,7 @@ class MediaPlayer(HomeAccessory): ) service = SERVICE_MEDIA_PLAY if value else SERVICE_MEDIA_PAUSE params = {ATTR_ENTITY_ID: self.entity_id} - self.async_call_service(DOMAIN, service, params) + self.async_call_service(MEDIA_PLAYER_DOMAIN, service, params) def set_play_stop(self, value: bool) -> None: """Move switch state to value if call came from HomeKit.""" @@ -169,7 +169,7 @@ class MediaPlayer(HomeAccessory): ) service = SERVICE_MEDIA_PLAY if value else SERVICE_MEDIA_STOP params = {ATTR_ENTITY_ID: self.entity_id} - self.async_call_service(DOMAIN, service, params) + self.async_call_service(MEDIA_PLAYER_DOMAIN, service, params) def set_toggle_mute(self, value: bool) -> None: """Move switch state to value if call came from HomeKit.""" @@ -177,7 +177,7 @@ class MediaPlayer(HomeAccessory): '%s: Set switch state for "toggle_mute" to %s', self.entity_id, value ) params = {ATTR_ENTITY_ID: self.entity_id, ATTR_MEDIA_VOLUME_MUTED: value} - self.async_call_service(DOMAIN, SERVICE_VOLUME_MUTE, params) + self.async_call_service(MEDIA_PLAYER_DOMAIN, SERVICE_VOLUME_MUTE, params) @callback def async_update_state(self, new_state: State) -> None: @@ -286,7 +286,7 @@ class TelevisionMediaPlayer(RemoteInputSelectAccessory): _LOGGER.debug('%s: Set switch state for "on_off" to %s', self.entity_id, value) service = SERVICE_TURN_ON if value else SERVICE_TURN_OFF params = {ATTR_ENTITY_ID: self.entity_id} - self.async_call_service(DOMAIN, service, params) + self.async_call_service(MEDIA_PLAYER_DOMAIN, service, params) def set_mute(self, value: bool) -> None: """Move switch state to value if call came from HomeKit.""" @@ -294,27 +294,27 @@ class TelevisionMediaPlayer(RemoteInputSelectAccessory): '%s: Set switch state for "toggle_mute" to %s', self.entity_id, value ) params = {ATTR_ENTITY_ID: self.entity_id, ATTR_MEDIA_VOLUME_MUTED: value} - self.async_call_service(DOMAIN, SERVICE_VOLUME_MUTE, params) + self.async_call_service(MEDIA_PLAYER_DOMAIN, SERVICE_VOLUME_MUTE, params) def set_volume(self, value: bool) -> None: """Send volume step value if call came from HomeKit.""" _LOGGER.debug("%s: Set volume to %s", self.entity_id, value) params = {ATTR_ENTITY_ID: self.entity_id, ATTR_MEDIA_VOLUME_LEVEL: value} - self.async_call_service(DOMAIN, SERVICE_VOLUME_SET, params) + self.async_call_service(MEDIA_PLAYER_DOMAIN, SERVICE_VOLUME_SET, params) def set_volume_step(self, value: bool) -> None: """Send volume step value if call came from HomeKit.""" _LOGGER.debug("%s: Step volume by %s", self.entity_id, value) service = SERVICE_VOLUME_DOWN if value else SERVICE_VOLUME_UP params = {ATTR_ENTITY_ID: self.entity_id} - self.async_call_service(DOMAIN, service, params) + self.async_call_service(MEDIA_PLAYER_DOMAIN, service, params) def set_input_source(self, value: int) -> None: """Send input set value if call came from HomeKit.""" _LOGGER.debug("%s: Set current input to %s", self.entity_id, value) source_name = self._mapped_sources[self.sources[value]] params = {ATTR_ENTITY_ID: self.entity_id, ATTR_INPUT_SOURCE: source_name} - self.async_call_service(DOMAIN, SERVICE_SELECT_SOURCE, params) + self.async_call_service(MEDIA_PLAYER_DOMAIN, SERVICE_SELECT_SOURCE, params) def set_remote_key(self, value: int) -> None: """Send remote key value if call came from HomeKit.""" @@ -335,7 +335,7 @@ class TelevisionMediaPlayer(RemoteInputSelectAccessory): else: service = SERVICE_MEDIA_PLAY_PAUSE params = {ATTR_ENTITY_ID: self.entity_id} - self.async_call_service(DOMAIN, service, params) + self.async_call_service(MEDIA_PLAYER_DOMAIN, service, params) return # Unhandled keys can be handled by listening to the event bus diff --git a/homeassistant/components/homekit/type_security_systems.py b/homeassistant/components/homekit/type_security_systems.py index 27c479de6ba..6ab521b6727 100644 --- a/homeassistant/components/homekit/type_security_systems.py +++ b/homeassistant/components/homekit/type_security_systems.py @@ -6,7 +6,7 @@ from typing import Any from pyhap.const import CATEGORY_ALARM_SYSTEM from homeassistant.components.alarm_control_panel import ( - DOMAIN, + DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, AlarmControlPanelEntityFeature, ) from homeassistant.const import ( @@ -153,7 +153,7 @@ class SecuritySystem(HomeAccessory): params = {ATTR_ENTITY_ID: self.entity_id} if self._alarm_code: params[ATTR_CODE] = self._alarm_code - self.async_call_service(DOMAIN, service, params) + self.async_call_service(ALARM_CONTROL_PANEL_DOMAIN, service, params) @callback def async_update_state(self, new_state: State) -> None: diff --git a/homeassistant/components/homekit/type_switches.py b/homeassistant/components/homekit/type_switches.py index 45a823882f7..68df6c38ad6 100644 --- a/homeassistant/components/homekit/type_switches.py +++ b/homeassistant/components/homekit/type_switches.py @@ -16,7 +16,7 @@ from pyhap.const import ( from homeassistant.components import button, input_button from homeassistant.components.input_select import ATTR_OPTIONS, SERVICE_SELECT_OPTION -from homeassistant.components.switch import DOMAIN +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.components.vacuum import ( DOMAIN as VACUUM_DOMAIN, SERVICE_RETURN_TO_BASE, @@ -109,7 +109,7 @@ class Outlet(HomeAccessory): _LOGGER.debug("%s: Set switch state to %s", self.entity_id, value) params = {ATTR_ENTITY_ID: self.entity_id} service = SERVICE_TURN_ON if value else SERVICE_TURN_OFF - self.async_call_service(DOMAIN, service, params) + self.async_call_service(SWITCH_DOMAIN, service, params) @callback def async_update_state(self, new_state: State) -> None: From fe2402b61103ea33f637bc8b93f8af4c53ff621e Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Sep 2024 15:18:28 +0200 Subject: [PATCH 1632/1635] Add alias to DOMAIN import in tests [e-g] (#125575) --- tests/components/ecobee/test_number.py | 18 ++- tests/components/ecobee/test_switch.py | 30 ++-- tests/components/flo/test_switch.py | 6 +- .../components/fritzbox/test_binary_sensor.py | 9 +- tests/components/fritzbox/test_button.py | 8 +- tests/components/fritzbox/test_climate.py | 22 +-- tests/components/fritzbox/test_cover.py | 14 +- tests/components/fritzbox/test_light.py | 16 +-- tests/components/fritzbox/test_sensor.py | 10 +- tests/components/fritzbox/test_switch.py | 14 +- .../generic_hygrostat/test_humidifier.py | 128 +++++++++--------- .../generic_thermostat/test_climate.py | 40 +++--- tests/components/goalzero/test_switch.py | 6 +- tests/components/gree/test_bridge.py | 12 +- tests/components/gree/test_climate.py | 60 ++++---- tests/components/gree/test_switch.py | 30 ++-- tests/components/group/test_cover.py | 77 +++++++---- tests/components/group/test_fan.py | 40 +++--- 18 files changed, 288 insertions(+), 252 deletions(-) diff --git a/tests/components/ecobee/test_number.py b/tests/components/ecobee/test_number.py index da5c8135a05..5b01fe8c5ba 100644 --- a/tests/components/ecobee/test_number.py +++ b/tests/components/ecobee/test_number.py @@ -2,7 +2,11 @@ from unittest.mock import patch -from homeassistant.components.number import ATTR_VALUE, DOMAIN, SERVICE_SET_VALUE +from homeassistant.components.number import ( + ATTR_VALUE, + DOMAIN as NUMBER_DOMAIN, + SERVICE_SET_VALUE, +) from homeassistant.const import ATTR_ENTITY_ID, UnitOfTime from homeassistant.core import HomeAssistant @@ -15,7 +19,7 @@ THERMOSTAT_ID = 0 async def test_ventilator_min_on_home_attributes(hass: HomeAssistant) -> None: """Test the ventilator number on home attributes are correct.""" - await setup_platform(hass, DOMAIN) + await setup_platform(hass, NUMBER_DOMAIN) state = hass.states.get(VENTILATOR_MIN_HOME_ID) assert state.state == "20" @@ -28,7 +32,7 @@ async def test_ventilator_min_on_home_attributes(hass: HomeAssistant) -> None: async def test_ventilator_min_on_away_attributes(hass: HomeAssistant) -> None: """Test the ventilator number on away attributes are correct.""" - await setup_platform(hass, DOMAIN) + await setup_platform(hass, NUMBER_DOMAIN) state = hass.states.get(VENTILATOR_MIN_AWAY_ID) assert state.state == "10" @@ -45,10 +49,10 @@ async def test_set_min_time_home(hass: HomeAssistant) -> None: with patch( "homeassistant.components.ecobee.Ecobee.set_ventilator_min_on_time_home" ) as mock_set_min_home_time: - await setup_platform(hass, DOMAIN) + await setup_platform(hass, NUMBER_DOMAIN) await hass.services.async_call( - DOMAIN, + NUMBER_DOMAIN, SERVICE_SET_VALUE, {ATTR_ENTITY_ID: VENTILATOR_MIN_HOME_ID, ATTR_VALUE: target_value}, blocking=True, @@ -63,10 +67,10 @@ async def test_set_min_time_away(hass: HomeAssistant) -> None: with patch( "homeassistant.components.ecobee.Ecobee.set_ventilator_min_on_time_away" ) as mock_set_min_away_time: - await setup_platform(hass, DOMAIN) + await setup_platform(hass, NUMBER_DOMAIN) await hass.services.async_call( - DOMAIN, + NUMBER_DOMAIN, SERVICE_SET_VALUE, {ATTR_ENTITY_ID: VENTILATOR_MIN_AWAY_ID, ATTR_VALUE: target_value}, blocking=True, diff --git a/tests/components/ecobee/test_switch.py b/tests/components/ecobee/test_switch.py index 05cea5a5e9d..31c8ce8f72d 100644 --- a/tests/components/ecobee/test_switch.py +++ b/tests/components/ecobee/test_switch.py @@ -8,7 +8,11 @@ from unittest.mock import patch import pytest from homeassistant.components.ecobee.switch import DATE_FORMAT -from homeassistant.components.switch import DOMAIN, SERVICE_TURN_OFF, SERVICE_TURN_ON +from homeassistant.components.switch import ( + DOMAIN as SWITCH_DOMAIN, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant @@ -29,7 +33,7 @@ def data_fixture(): async def test_ventilator_20min_attributes(hass: HomeAssistant) -> None: """Test the ventilator switch on home attributes are correct.""" - await setup_platform(hass, DOMAIN) + await setup_platform(hass, SWITCH_DOMAIN) state = hass.states.get(VENTILATOR_20MIN_ID) assert state.state == "off" @@ -42,7 +46,7 @@ async def test_ventilator_20min_when_on(hass: HomeAssistant, data) -> None: datetime.now() + timedelta(days=1) ).strftime(DATE_FORMAT) with mock.patch("pyecobee.Ecobee.get_thermostat", data): - await setup_platform(hass, DOMAIN) + await setup_platform(hass, SWITCH_DOMAIN) state = hass.states.get(VENTILATOR_20MIN_ID) assert state.state == "on" @@ -57,7 +61,7 @@ async def test_ventilator_20min_when_off(hass: HomeAssistant, data) -> None: datetime.now() - timedelta(days=1) ).strftime(DATE_FORMAT) with mock.patch("pyecobee.Ecobee.get_thermostat", data): - await setup_platform(hass, DOMAIN) + await setup_platform(hass, SWITCH_DOMAIN) state = hass.states.get(VENTILATOR_20MIN_ID) assert state.state == "off" @@ -70,7 +74,7 @@ async def test_ventilator_20min_when_empty(hass: HomeAssistant, data) -> None: data.return_value["settings"]["ventilatorOffDateTime"] = "" with mock.patch("pyecobee.Ecobee.get_thermostat", data): - await setup_platform(hass, DOMAIN) + await setup_platform(hass, SWITCH_DOMAIN) state = hass.states.get(VENTILATOR_20MIN_ID) assert state.state == "off" @@ -84,10 +88,10 @@ async def test_turn_on_20min_ventilator(hass: HomeAssistant) -> None: with patch( "homeassistant.components.ecobee.Ecobee.set_ventilator_timer" ) as mock_set_20min_ventilator: - await setup_platform(hass, DOMAIN) + await setup_platform(hass, SWITCH_DOMAIN) await hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: VENTILATOR_20MIN_ID}, blocking=True, @@ -102,10 +106,10 @@ async def test_turn_off_20min_ventilator(hass: HomeAssistant) -> None: with patch( "homeassistant.components.ecobee.Ecobee.set_ventilator_timer" ) as mock_set_20min_ventilator: - await setup_platform(hass, DOMAIN) + await setup_platform(hass, SWITCH_DOMAIN) await hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: VENTILATOR_20MIN_ID}, blocking=True, @@ -120,10 +124,10 @@ DEVICE_ID = "switch.ecobee2_aux_heat_only" async def test_aux_heat_only_turn_on(hass: HomeAssistant) -> None: """Test the switch can be turned on.""" with patch("pyecobee.Ecobee.set_hvac_mode") as mock_turn_on: - await setup_platform(hass, DOMAIN) + await setup_platform(hass, SWITCH_DOMAIN) await hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: DEVICE_ID}, blocking=True, @@ -134,10 +138,10 @@ async def test_aux_heat_only_turn_on(hass: HomeAssistant) -> None: async def test_aux_heat_only_turn_off(hass: HomeAssistant) -> None: """Test the switch can be turned off.""" with patch("pyecobee.Ecobee.set_hvac_mode") as mock_turn_off: - await setup_platform(hass, DOMAIN) + await setup_platform(hass, SWITCH_DOMAIN) await hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: DEVICE_ID}, blocking=True, diff --git a/tests/components/flo/test_switch.py b/tests/components/flo/test_switch.py index 02ab93f9e67..5c124d312a7 100644 --- a/tests/components/flo/test_switch.py +++ b/tests/components/flo/test_switch.py @@ -3,7 +3,7 @@ import pytest from homeassistant.components.flo.const import DOMAIN as FLO_DOMAIN -from homeassistant.components.switch import DOMAIN +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -30,11 +30,11 @@ async def test_valve_switches( assert hass.states.get(entity_id).state == STATE_ON await hass.services.async_call( - DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True + SWITCH_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True ) assert hass.states.get(entity_id).state == STATE_OFF await hass.services.async_call( - DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True + SWITCH_DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True ) assert hass.states.get(entity_id).state == STATE_ON diff --git a/tests/components/fritzbox/test_binary_sensor.py b/tests/components/fritzbox/test_binary_sensor.py index 3e1a2691f67..f4cc1b2e2ca 100644 --- a/tests/components/fritzbox/test_binary_sensor.py +++ b/tests/components/fritzbox/test_binary_sensor.py @@ -6,7 +6,10 @@ from unittest.mock import Mock from requests.exceptions import HTTPError -from homeassistant.components.binary_sensor import DOMAIN, BinarySensorDeviceClass +from homeassistant.components.binary_sensor import ( + DOMAIN as BINARY_SENSOR_DOMAIN, + BinarySensorDeviceClass, +) from homeassistant.components.fritzbox.const import DOMAIN as FB_DOMAIN from homeassistant.components.sensor import ATTR_STATE_CLASS, DOMAIN as SENSOR_DOMAIN from homeassistant.const import ( @@ -27,7 +30,7 @@ from .const import CONF_FAKE_NAME, MOCK_CONFIG from tests.common import async_fire_time_changed -ENTITY_ID = f"{DOMAIN}.{CONF_FAKE_NAME}" +ENTITY_ID = f"{BINARY_SENSOR_DOMAIN}.{CONF_FAKE_NAME}" async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: @@ -148,5 +151,5 @@ async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: async_fire_time_changed(hass, next_update) await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(f"{DOMAIN}.new_device_alarm") + state = hass.states.get(f"{BINARY_SENSOR_DOMAIN}.new_device_alarm") assert state diff --git a/tests/components/fritzbox/test_button.py b/tests/components/fritzbox/test_button.py index 89e8d8357dd..913f828efbc 100644 --- a/tests/components/fritzbox/test_button.py +++ b/tests/components/fritzbox/test_button.py @@ -3,7 +3,7 @@ from datetime import timedelta from unittest.mock import Mock -from homeassistant.components.button import DOMAIN, SERVICE_PRESS +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.components.fritzbox.const import DOMAIN as FB_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, @@ -19,7 +19,7 @@ from .const import CONF_FAKE_NAME, MOCK_CONFIG from tests.common import async_fire_time_changed -ENTITY_ID = f"{DOMAIN}.{CONF_FAKE_NAME}" +ENTITY_ID = f"{BUTTON_DOMAIN}.{CONF_FAKE_NAME}" async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: @@ -43,7 +43,7 @@ async def test_apply_template(hass: HomeAssistant, fritz: Mock) -> None: ) await hass.services.async_call( - DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: ENTITY_ID}, True + BUTTON_DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: ENTITY_ID}, True ) assert fritz().apply_template.call_count == 1 @@ -67,5 +67,5 @@ async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: async_fire_time_changed(hass, next_update) await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(f"{DOMAIN}.new_template") + state = hass.states.get(f"{BUTTON_DOMAIN}.new_template") assert state diff --git a/tests/components/fritzbox/test_climate.py b/tests/components/fritzbox/test_climate.py index 358eeaa714e..062ba4f865f 100644 --- a/tests/components/fritzbox/test_climate.py +++ b/tests/components/fritzbox/test_climate.py @@ -15,7 +15,7 @@ from homeassistant.components.climate import ( ATTR_MIN_TEMP, ATTR_PRESET_MODE, ATTR_PRESET_MODES, - DOMAIN, + DOMAIN as CLIMATE_DOMAIN, PRESET_COMFORT, PRESET_ECO, SERVICE_SET_HVAC_MODE, @@ -56,7 +56,7 @@ from .const import CONF_FAKE_NAME, MOCK_CONFIG from tests.common import async_fire_time_changed -ENTITY_ID = f"{DOMAIN}.{CONF_FAKE_NAME}" +ENTITY_ID = f"{CLIMATE_DOMAIN}.{CONF_FAKE_NAME}" async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: @@ -278,7 +278,7 @@ async def test_set_temperature_temperature(hass: HomeAssistant, fritz: Mock) -> ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: 23}, True, @@ -294,7 +294,7 @@ async def test_set_temperature_mode_off(hass: HomeAssistant, fritz: Mock) -> Non ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, { ATTR_ENTITY_ID: ENTITY_ID, @@ -315,7 +315,7 @@ async def test_set_temperature_mode_heat(hass: HomeAssistant, fritz: Mock) -> No ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, { ATTR_ENTITY_ID: ENTITY_ID, @@ -335,7 +335,7 @@ async def test_set_hvac_mode_off(hass: HomeAssistant, fritz: Mock) -> None: ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.OFF}, True, @@ -351,7 +351,7 @@ async def test_no_reset_hvac_mode_heat(hass: HomeAssistant, fritz: Mock) -> None ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.HEAT}, True, @@ -368,7 +368,7 @@ async def test_set_hvac_mode_heat(hass: HomeAssistant, fritz: Mock) -> None: ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: HVACMode.HEAT}, True, @@ -384,7 +384,7 @@ async def test_set_preset_mode_comfort(hass: HomeAssistant, fritz: Mock) -> None ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_PRESET_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: PRESET_COMFORT}, True, @@ -400,7 +400,7 @@ async def test_set_preset_mode_eco(hass: HomeAssistant, fritz: Mock) -> None: ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_PRESET_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: PRESET_ECO}, True, @@ -463,7 +463,7 @@ async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: async_fire_time_changed(hass, next_update) await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(f"{DOMAIN}.new_climate") + state = hass.states.get(f"{CLIMATE_DOMAIN}.new_climate") assert state diff --git a/tests/components/fritzbox/test_cover.py b/tests/components/fritzbox/test_cover.py index 6626db2bccf..383a0512565 100644 --- a/tests/components/fritzbox/test_cover.py +++ b/tests/components/fritzbox/test_cover.py @@ -6,7 +6,7 @@ from unittest.mock import Mock, call from homeassistant.components.cover import ( ATTR_CURRENT_POSITION, ATTR_POSITION, - DOMAIN, + DOMAIN as COVER_DOMAIN, STATE_OPEN, ) from homeassistant.components.fritzbox.const import DOMAIN as FB_DOMAIN @@ -32,7 +32,7 @@ from .const import CONF_FAKE_NAME, MOCK_CONFIG from tests.common import async_fire_time_changed -ENTITY_ID = f"{DOMAIN}.{CONF_FAKE_NAME}" +ENTITY_ID = f"{COVER_DOMAIN}.{CONF_FAKE_NAME}" async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: @@ -68,7 +68,7 @@ async def test_open_cover(hass: HomeAssistant, fritz: Mock) -> None: ) await hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_ID}, True + COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_ID}, True ) assert device.set_blind_open.call_count == 1 @@ -81,7 +81,7 @@ async def test_close_cover(hass: HomeAssistant, fritz: Mock) -> None: ) await hass.services.async_call( - DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_ID}, True + COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_ID}, True ) assert device.set_blind_close.call_count == 1 @@ -94,7 +94,7 @@ async def test_set_position_cover(hass: HomeAssistant, fritz: Mock) -> None: ) await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_SET_COVER_POSITION, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_POSITION: 50}, True, @@ -110,7 +110,7 @@ async def test_stop_cover(hass: HomeAssistant, fritz: Mock) -> None: ) await hass.services.async_call( - DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: ENTITY_ID}, True + COVER_DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: ENTITY_ID}, True ) assert device.set_blind_stop.call_count == 1 @@ -134,5 +134,5 @@ async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: async_fire_time_changed(hass, next_update) await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(f"{DOMAIN}.new_climate") + state = hass.states.get(f"{COVER_DOMAIN}.new_climate") assert state diff --git a/tests/components/fritzbox/test_light.py b/tests/components/fritzbox/test_light.py index 3cafa933fa3..84fafe25521 100644 --- a/tests/components/fritzbox/test_light.py +++ b/tests/components/fritzbox/test_light.py @@ -19,7 +19,7 @@ from homeassistant.components.light import ( ATTR_MAX_COLOR_TEMP_KELVIN, ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_SUPPORTED_COLOR_MODES, - DOMAIN, + DOMAIN as LIGHT_DOMAIN, ColorMode, ) from homeassistant.const import ( @@ -38,7 +38,7 @@ from .const import CONF_FAKE_NAME, MOCK_CONFIG from tests.common import async_fire_time_changed -ENTITY_ID = f"{DOMAIN}.{CONF_FAKE_NAME}" +ENTITY_ID = f"{LIGHT_DOMAIN}.{CONF_FAKE_NAME}" async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: @@ -147,7 +147,7 @@ async def test_turn_on(hass: HomeAssistant, fritz: Mock) -> None: ) await hass.services.async_call( - DOMAIN, + LIGHT_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_BRIGHTNESS: 100, ATTR_COLOR_TEMP_KELVIN: 3000}, True, @@ -170,7 +170,7 @@ async def test_turn_on_color(hass: HomeAssistant, fritz: Mock) -> None: hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz ) await hass.services.async_call( - DOMAIN, + LIGHT_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_BRIGHTNESS: 100, ATTR_HS_COLOR: (100, 70)}, True, @@ -204,7 +204,7 @@ async def test_turn_on_color_unsupported_api_method( device.set_unmapped_color.side_effect = error await hass.services.async_call( - DOMAIN, + LIGHT_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_BRIGHTNESS: 100, ATTR_HS_COLOR: (100, 70)}, True, @@ -219,7 +219,7 @@ async def test_turn_on_color_unsupported_api_method( error.response.status_code = 500 with pytest.raises(HTTPError, match="Bad Request"): await hass.services.async_call( - DOMAIN, + LIGHT_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_BRIGHTNESS: 100, ATTR_HS_COLOR: (100, 70)}, True, @@ -237,7 +237,7 @@ async def test_turn_off(hass: HomeAssistant, fritz: Mock) -> None: hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz ) await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True + LIGHT_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True ) assert device.set_state_off.call_count == 1 @@ -316,5 +316,5 @@ async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: async_fire_time_changed(hass, next_update) await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(f"{DOMAIN}.new_light") + state = hass.states.get(f"{LIGHT_DOMAIN}.new_light") assert state diff --git a/tests/components/fritzbox/test_sensor.py b/tests/components/fritzbox/test_sensor.py index 63d0b67d7f4..633049a8a9b 100644 --- a/tests/components/fritzbox/test_sensor.py +++ b/tests/components/fritzbox/test_sensor.py @@ -6,7 +6,11 @@ from unittest.mock import Mock from requests.exceptions import HTTPError from homeassistant.components.fritzbox.const import DOMAIN as FB_DOMAIN -from homeassistant.components.sensor import ATTR_STATE_CLASS, DOMAIN, SensorStateClass +from homeassistant.components.sensor import ( + ATTR_STATE_CLASS, + DOMAIN as SENSOR_DOMAIN, + SensorStateClass, +) from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_UNIT_OF_MEASUREMENT, @@ -24,7 +28,7 @@ from .const import CONF_FAKE_NAME, MOCK_CONFIG from tests.common import async_fire_time_changed -ENTITY_ID = f"{DOMAIN}.{CONF_FAKE_NAME}" +ENTITY_ID = f"{SENSOR_DOMAIN}.{CONF_FAKE_NAME}" async def test_setup( @@ -130,5 +134,5 @@ async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: async_fire_time_changed(hass, next_update) await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(f"{DOMAIN}.new_device_temperature") + state = hass.states.get(f"{SENSOR_DOMAIN}.new_device_temperature") assert state diff --git a/tests/components/fritzbox/test_switch.py b/tests/components/fritzbox/test_switch.py index ba3b1de9b2f..e394ccbc7f3 100644 --- a/tests/components/fritzbox/test_switch.py +++ b/tests/components/fritzbox/test_switch.py @@ -12,7 +12,7 @@ from homeassistant.components.sensor import ( DOMAIN as SENSOR_DOMAIN, SensorStateClass, ) -from homeassistant.components.switch import DOMAIN +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_FRIENDLY_NAME, @@ -39,7 +39,7 @@ from .const import CONF_FAKE_NAME, MOCK_CONFIG from tests.common import async_fire_time_changed -ENTITY_ID = f"{DOMAIN}.{CONF_FAKE_NAME}" +ENTITY_ID = f"{SWITCH_DOMAIN}.{CONF_FAKE_NAME}" async def test_setup( @@ -124,7 +124,7 @@ async def test_turn_on(hass: HomeAssistant, fritz: Mock) -> None: ) await hass.services.async_call( - DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID}, True + SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID}, True ) assert device.set_switch_state_on.call_count == 1 @@ -138,7 +138,7 @@ async def test_turn_off(hass: HomeAssistant, fritz: Mock) -> None: ) await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True + SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True ) assert device.set_switch_state_off.call_count == 1 @@ -158,7 +158,7 @@ async def test_toggle_while_locked(hass: HomeAssistant, fritz: Mock) -> None: match="Can't toggle switch while manual switching is disabled for the device", ): await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True + SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, True ) with pytest.raises( @@ -166,7 +166,7 @@ async def test_toggle_while_locked(hass: HomeAssistant, fritz: Mock) -> None: match="Can't toggle switch while manual switching is disabled for the device", ): await hass.services.async_call( - DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID}, True + SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID}, True ) @@ -239,5 +239,5 @@ async def test_discover_new_device(hass: HomeAssistant, fritz: Mock) -> None: async_fire_time_changed(hass, next_update) await hass.async_block_till_done(wait_background_tasks=True) - state = hass.states.get(f"{DOMAIN}.new_switch") + state = hass.states.get(f"{SWITCH_DOMAIN}.new_switch") assert state diff --git a/tests/components/generic_hygrostat/test_humidifier.py b/tests/components/generic_hygrostat/test_humidifier.py index 9cd51baa576..33a8a0f37bd 100644 --- a/tests/components/generic_hygrostat/test_humidifier.py +++ b/tests/components/generic_hygrostat/test_humidifier.py @@ -13,7 +13,7 @@ from homeassistant.components.generic_hygrostat import ( ) from homeassistant.components.humidifier import ( ATTR_HUMIDITY, - DOMAIN, + DOMAIN as HUMIDIFIER_DOMAIN, MODE_AWAY, MODE_NORMAL, SERVICE_SET_HUMIDITY, @@ -107,7 +107,7 @@ async def test_humidifier_input_boolean(hass: HomeAssistant) -> None: assert await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -125,7 +125,7 @@ async def test_humidifier_input_boolean(hass: HomeAssistant) -> None: _setup_sensor(hass, 23) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 32}, blocking=True, @@ -151,7 +151,7 @@ async def test_humidifier_switch( assert await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -170,7 +170,7 @@ async def test_humidifier_switch( await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 32}, blocking=True, @@ -191,7 +191,7 @@ async def test_unique_id( await _setup_switch(hass, True) assert await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -222,7 +222,7 @@ async def setup_comp_0(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -248,7 +248,7 @@ async def setup_comp_2(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -269,7 +269,7 @@ async def test_unavailable_state(hass: HomeAssistant) -> None: """Test the setting of defaults to unknown.""" await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -296,7 +296,7 @@ async def test_setup_defaults_to_unknown(hass: HomeAssistant) -> None: """Test the setting of defaults to unknown.""" await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -345,7 +345,7 @@ async def test_get_modes(hass: HomeAssistant) -> None: async def test_set_target_humidity(hass: HomeAssistant) -> None: """Test the setting of the target humidity.""" await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 40}, blocking=True, @@ -355,7 +355,7 @@ async def test_set_target_humidity(hass: HomeAssistant) -> None: assert state.attributes.get("humidity") == 40 with pytest.raises(vol.Invalid): await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: None}, blocking=True, @@ -369,14 +369,14 @@ async def test_set_target_humidity(hass: HomeAssistant) -> None: async def test_set_away_mode(hass: HomeAssistant) -> None: """Test the setting away mode.""" await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 44}, blocking=True, ) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_MODE, {ATTR_ENTITY_ID: ENTITY, ATTR_MODE: MODE_AWAY}, blocking=True, @@ -393,14 +393,14 @@ async def test_set_away_mode_and_restore_prev_humidity(hass: HomeAssistant) -> N Verify original humidity is restored. """ await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 44}, blocking=True, ) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_MODE, {ATTR_ENTITY_ID: ENTITY, ATTR_MODE: MODE_AWAY}, blocking=True, @@ -409,7 +409,7 @@ async def test_set_away_mode_and_restore_prev_humidity(hass: HomeAssistant) -> N state = hass.states.get(ENTITY) assert state.attributes.get("humidity") == 35 await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_MODE, {ATTR_ENTITY_ID: ENTITY, ATTR_MODE: MODE_NORMAL}, blocking=True, @@ -428,21 +428,21 @@ async def test_set_away_mode_twice_and_restore_prev_humidity( Verify original humidity is restored. """ await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 44}, blocking=True, ) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_MODE, {ATTR_ENTITY_ID: ENTITY, ATTR_MODE: MODE_AWAY}, blocking=True, ) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_MODE, {ATTR_ENTITY_ID: ENTITY, ATTR_MODE: MODE_AWAY}, blocking=True, @@ -451,7 +451,7 @@ async def test_set_away_mode_twice_and_restore_prev_humidity( state = hass.states.get(ENTITY) assert state.attributes.get("humidity") == 35 await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_MODE, {ATTR_ENTITY_ID: ENTITY, ATTR_MODE: MODE_NORMAL}, blocking=True, @@ -523,7 +523,7 @@ async def test_set_target_humidity_humidifier_on(hass: HomeAssistant) -> None: await hass.async_block_till_done() calls.clear() await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 45}, blocking=True, @@ -544,7 +544,7 @@ async def test_set_target_humidity_humidifier_off(hass: HomeAssistant) -> None: await hass.async_block_till_done() calls.clear() await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 36}, blocking=True, @@ -564,7 +564,7 @@ async def test_humidity_change_humidifier_on_within_tolerance( """Test if humidity change doesn't turn on within tolerance.""" calls = await _setup_switch(hass, False) await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 44}, blocking=True, @@ -582,7 +582,7 @@ async def test_humidity_change_humidifier_on_outside_tolerance( """Test if humidity change turn humidifier on outside dry tolerance.""" calls = await _setup_switch(hass, False) await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 44}, blocking=True, @@ -604,7 +604,7 @@ async def test_humidity_change_humidifier_off_within_tolerance( """Test if humidity change doesn't turn off within tolerance.""" calls = await _setup_switch(hass, True) await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 46}, blocking=True, @@ -622,7 +622,7 @@ async def test_humidity_change_humidifier_off_outside_tolerance( """Test if humidity change turn humidifier off outside wet tolerance.""" calls = await _setup_switch(hass, True) await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 46}, blocking=True, @@ -644,14 +644,14 @@ async def test_operation_mode_humidify(hass: HomeAssistant) -> None: Switch turns on when humidity below setpoint and mode changes. """ await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY}, blocking=True, ) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 45}, blocking=True, @@ -661,7 +661,7 @@ async def test_operation_mode_humidify(hass: HomeAssistant) -> None: await hass.async_block_till_done() calls = await _setup_switch(hass, False) await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -696,7 +696,7 @@ async def setup_comp_3(hass: HomeAssistant) -> None: """Initialize components.""" assert await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -722,7 +722,7 @@ async def test_set_target_humidity_dry_off(hass: HomeAssistant) -> None: _setup_sensor(hass, 50) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 55}, blocking=True, @@ -743,14 +743,14 @@ async def test_turn_away_mode_on_drying(hass: HomeAssistant) -> None: _setup_sensor(hass, 50) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 34}, blocking=True, ) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_MODE, {ATTR_ENTITY_ID: ENTITY, ATTR_MODE: MODE_AWAY}, blocking=True, @@ -771,7 +771,7 @@ async def test_operation_mode_dry(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert len(calls) == 0 await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -781,7 +781,7 @@ async def test_operation_mode_dry(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert len(calls) == 0 await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -875,7 +875,7 @@ async def test_running_when_operating_mode_is_off_2(hass: HomeAssistant) -> None _setup_sensor(hass, 45) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -896,7 +896,7 @@ async def test_no_state_change_when_operation_mode_off_2(hass: HomeAssistant) -> _setup_sensor(hass, 30) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -913,7 +913,7 @@ async def setup_comp_4(hass: HomeAssistant) -> None: """Initialize components.""" assert await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1008,7 +1008,7 @@ async def test_mode_change_dry_trigger_off_not_long_enough(hass: HomeAssistant) await hass.async_block_till_done() assert len(calls) == 0 await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -1028,7 +1028,7 @@ async def test_mode_change_dry_trigger_on_not_long_enough(hass: HomeAssistant) - _setup_sensor(hass, 35) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -1038,7 +1038,7 @@ async def test_mode_change_dry_trigger_on_not_long_enough(hass: HomeAssistant) - await hass.async_block_till_done() assert len(calls) == 0 await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -1056,7 +1056,7 @@ async def setup_comp_6(hass: HomeAssistant) -> None: """Initialize components.""" assert await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1157,7 +1157,7 @@ async def test_mode_change_humidifier_trigger_off_not_long_enough( assert len(calls) == 0 await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -1181,7 +1181,7 @@ async def test_mode_change_humidifier_trigger_on_not_long_enough( assert len(calls) == 0 await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -1194,7 +1194,7 @@ async def test_mode_change_humidifier_trigger_on_not_long_enough( assert len(calls) == 0 await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY}, blocking=True, @@ -1212,7 +1212,7 @@ async def setup_comp_7(hass: HomeAssistant) -> None: """Initialize components.""" assert await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1279,7 +1279,7 @@ async def setup_comp_8(hass: HomeAssistant) -> None: """Initialize components.""" assert await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1344,7 +1344,7 @@ async def test_float_tolerance_values(hass: HomeAssistant) -> None: """Test if dehumidifier does not turn on within floating point tolerance.""" assert await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1370,7 +1370,7 @@ async def test_float_tolerance_values_2(hass: HomeAssistant) -> None: """Test if dehumidifier turns off when oudside of floating point tolerance values.""" assert await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1401,7 +1401,7 @@ async def test_custom_setup_params(hass: HomeAssistant) -> None: await hass.async_block_till_done() result = await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1441,7 +1441,7 @@ async def test_restore_state(hass: HomeAssistant) -> None: await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1479,7 +1479,7 @@ async def test_restore_state_target_humidity(hass: HomeAssistant) -> None: await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1522,7 +1522,7 @@ async def test_restore_state_and_return_to_normal(hass: HomeAssistant) -> None: await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1542,7 +1542,7 @@ async def test_restore_state_and_return_to_normal(hass: HomeAssistant) -> None: assert state.state == STATE_OFF await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_MODE, {ATTR_ENTITY_ID: "humidifier.test_hygrostat", ATTR_MODE: MODE_NORMAL}, blocking=True, @@ -1577,7 +1577,7 @@ async def test_no_restore_state(hass: HomeAssistant) -> None: await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1623,7 +1623,7 @@ async def test_restore_state_uncoherence_case(hass: HomeAssistant) -> None: async def _setup_humidifier(hass: HomeAssistant) -> None: assert await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1665,7 +1665,7 @@ async def test_away_fixed_humidity_mode(hass: HomeAssistant) -> None: await hass.async_block_till_done() await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1687,7 +1687,7 @@ async def test_away_fixed_humidity_mode(hass: HomeAssistant) -> None: # Switch to Away mode await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_MODE, {ATTR_ENTITY_ID: "humidifier.test_hygrostat", ATTR_MODE: MODE_AWAY}, blocking=True, @@ -1703,7 +1703,7 @@ async def test_away_fixed_humidity_mode(hass: HomeAssistant) -> None: # Change target humidity await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: "humidifier.test_hygrostat", ATTR_HUMIDITY: 42}, blocking=True, @@ -1719,7 +1719,7 @@ async def test_away_fixed_humidity_mode(hass: HomeAssistant) -> None: # Return to Normal mode await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_MODE, {ATTR_ENTITY_ID: "humidifier.test_hygrostat", ATTR_MODE: MODE_NORMAL}, blocking=True, @@ -1750,7 +1750,7 @@ async def test_sensor_stale_duration( assert await async_setup_component( hass, - DOMAIN, + HUMIDIFIER_DOMAIN, { "humidifier": { "platform": "generic_hygrostat", @@ -1770,7 +1770,7 @@ async def test_sensor_stale_duration( assert hass.states.get(humidifier_switch).state == STATE_OFF await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY, ATTR_HUMIDITY: 32}, blocking=True, @@ -1813,7 +1813,7 @@ async def test_sensor_stale_duration( # Manual turn off await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY}, blocking=True, diff --git a/tests/components/generic_thermostat/test_climate.py b/tests/components/generic_thermostat/test_climate.py index f1c41270a2f..39435f154c4 100644 --- a/tests/components/generic_thermostat/test_climate.py +++ b/tests/components/generic_thermostat/test_climate.py @@ -11,7 +11,7 @@ from homeassistant import config as hass_config from homeassistant.components import input_boolean, switch from homeassistant.components.climate import ( ATTR_PRESET_MODE, - DOMAIN, + DOMAIN as CLIMATE_DOMAIN, PRESET_ACTIVITY, PRESET_AWAY, PRESET_COMFORT, @@ -122,7 +122,7 @@ async def test_heater_input_boolean(hass: HomeAssistant) -> None: assert await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -160,7 +160,7 @@ async def test_heater_switch( assert await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -192,7 +192,7 @@ async def test_unique_id( _setup_switch(hass, True) assert await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -221,7 +221,7 @@ async def setup_comp_2(hass: HomeAssistant) -> None: hass.config.units = METRIC_SYSTEM assert await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -248,7 +248,7 @@ async def test_setup_defaults_to_unknown(hass: HomeAssistant) -> None: hass.config.units = METRIC_SYSTEM await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -272,7 +272,7 @@ async def test_setup_gets_current_temp_from_sensor(hass: HomeAssistant) -> None: await hass.async_block_till_done() await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -618,7 +618,7 @@ async def setup_comp_3(hass: HomeAssistant) -> None: hass.config.temperature_unit = UnitOfTemperature.CELSIUS assert await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -774,7 +774,7 @@ async def _setup_thermostat_with_min_cycle_duration( hass.config.temperature_unit = UnitOfTemperature.CELSIUS assert await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -927,7 +927,7 @@ async def setup_comp_7(hass: HomeAssistant) -> None: hass.config.temperature_unit = UnitOfTemperature.CELSIUS assert await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -1002,7 +1002,7 @@ async def setup_comp_8(hass: HomeAssistant) -> None: hass.config.temperature_unit = UnitOfTemperature.CELSIUS assert await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -1076,7 +1076,7 @@ async def setup_comp_9(hass: HomeAssistant) -> None: """Initialize components.""" assert await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -1110,7 +1110,7 @@ async def test_custom_setup_params(hass: HomeAssistant) -> None: """Test the setup with custom parameters.""" result = await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -1151,7 +1151,7 @@ async def test_restore_state(hass: HomeAssistant, hvac_mode) -> None: await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -1189,7 +1189,7 @@ async def test_no_restore_state(hass: HomeAssistant) -> None: await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -1220,7 +1220,7 @@ async def test_initial_hvac_off_force_heater_off(hass: HomeAssistant) -> None: await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -1274,7 +1274,7 @@ async def test_restore_will_turn_off_(hass: HomeAssistant) -> None: await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -1319,7 +1319,7 @@ async def test_restore_will_turn_off_when_loaded_second(hass: HomeAssistant) -> await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -1379,7 +1379,7 @@ async def test_restore_state_uncoherence_case(hass: HomeAssistant) -> None: async def _setup_climate(hass: HomeAssistant) -> None: assert await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", @@ -1415,7 +1415,7 @@ async def test_reload(hass: HomeAssistant) -> None: assert await async_setup_component( hass, - DOMAIN, + CLIMATE_DOMAIN, { "climate": { "platform": "generic_thermostat", diff --git a/tests/components/goalzero/test_switch.py b/tests/components/goalzero/test_switch.py index de2e6035a12..b784cff05aa 100644 --- a/tests/components/goalzero/test_switch.py +++ b/tests/components/goalzero/test_switch.py @@ -1,7 +1,7 @@ """Switch tests for the Goalzero integration.""" from homeassistant.components.goalzero.const import DEFAULT_NAME -from homeassistant.components.switch import DOMAIN +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, @@ -32,7 +32,7 @@ async def test_switches_states( text=load_fixture("goalzero/state_change.json"), ) await hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: [entity_id]}, blocking=True, @@ -44,7 +44,7 @@ async def test_switches_states( text=load_fixture("goalzero/state_data.json"), ) await hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: [entity_id]}, blocking=True, diff --git a/tests/components/gree/test_bridge.py b/tests/components/gree/test_bridge.py index 32372bebf37..ae2f0c74236 100644 --- a/tests/components/gree/test_bridge.py +++ b/tests/components/gree/test_bridge.py @@ -5,7 +5,7 @@ from datetime import timedelta from freezegun.api import FrozenDateTimeFactory import pytest -from homeassistant.components.climate import DOMAIN, HVACMode +from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN, HVACMode from homeassistant.components.gree.const import ( COORDINATORS, DOMAIN as GREE, @@ -18,8 +18,8 @@ from .common import async_setup_gree, build_device_mock from tests.common import async_fire_time_changed -ENTITY_ID_1 = f"{DOMAIN}.fake_device_1" -ENTITY_ID_2 = f"{DOMAIN}.fake_device_2" +ENTITY_ID_1 = f"{CLIMATE_DOMAIN}.fake_device_1" +ENTITY_ID_2 = f"{CLIMATE_DOMAIN}.fake_device_2" @pytest.fixture @@ -46,7 +46,7 @@ async def test_discovery_after_setup( await hass.async_block_till_done() assert discovery.return_value.scan_count == 1 - assert len(hass.states.async_all(DOMAIN)) == 2 + assert len(hass.states.async_all(CLIMATE_DOMAIN)) == 2 device_infos = [x.device.device_info for x in hass.data[GREE][COORDINATORS]] assert device_infos[0].ip == "1.1.1.1" @@ -68,7 +68,7 @@ async def test_discovery_after_setup( await hass.async_block_till_done() assert discovery.return_value.scan_count == 2 - assert len(hass.states.async_all(DOMAIN)) == 2 + assert len(hass.states.async_all(CLIMATE_DOMAIN)) == 2 device_infos = [x.device.device_info for x in hass.data[GREE][COORDINATORS]] assert device_infos[0].ip == "1.1.1.2" @@ -82,7 +82,7 @@ async def test_coordinator_updates( await async_setup_gree(hass) await hass.async_block_till_done() - assert len(hass.states.async_all(DOMAIN)) == 1 + assert len(hass.states.async_all(CLIMATE_DOMAIN)) == 1 callback = device().add_handler.call_args_list[0][0][1] diff --git a/tests/components/gree/test_climate.py b/tests/components/gree/test_climate.py index 1bf49bbca26..0cb187f5a60 100644 --- a/tests/components/gree/test_climate.py +++ b/tests/components/gree/test_climate.py @@ -21,7 +21,7 @@ from homeassistant.components.climate import ( ATTR_HVAC_MODE, ATTR_PRESET_MODE, ATTR_SWING_MODE, - DOMAIN, + DOMAIN as CLIMATE_DOMAIN, FAN_AUTO, FAN_HIGH, FAN_LOW, @@ -71,7 +71,7 @@ from .common import async_setup_gree, build_device_mock from tests.common import async_fire_time_changed -ENTITY_ID = f"{DOMAIN}.fake_device_1" +ENTITY_ID = f"{CLIMATE_DOMAIN}.fake_device_1" async def test_discovery_called_once(hass: HomeAssistant, discovery, device) -> None: @@ -98,7 +98,7 @@ async def test_discovery_setup(hass: HomeAssistant, discovery, device) -> None: await async_setup_gree(hass) await hass.async_block_till_done() assert discovery.call_count == 1 - assert len(hass.states.async_all(DOMAIN)) == 2 + assert len(hass.states.async_all(CLIMATE_DOMAIN)) == 2 async def test_discovery_setup_connection_error( @@ -117,7 +117,7 @@ async def test_discovery_setup_connection_error( await async_setup_gree(hass) await hass.async_block_till_done() - assert len(hass.states.async_all(DOMAIN)) == 1 + assert len(hass.states.async_all(CLIMATE_DOMAIN)) == 1 state = hass.states.get(ENTITY_ID) assert state.name == "fake-device-1" assert state.state == STATE_UNAVAILABLE @@ -143,7 +143,7 @@ async def test_discovery_after_setup( await async_setup_gree(hass) # Update 1 assert discovery.return_value.scan_count == 1 - assert len(hass.states.async_all(DOMAIN)) == 2 + assert len(hass.states.async_all(CLIMATE_DOMAIN)) == 2 # rediscover the same devices shouldn't change anything discovery.return_value.mock_devices = [MockDevice1, MockDevice2] @@ -154,7 +154,7 @@ async def test_discovery_after_setup( await hass.async_block_till_done() assert discovery.return_value.scan_count == 2 - assert len(hass.states.async_all(DOMAIN)) == 2 + assert len(hass.states.async_all(CLIMATE_DOMAIN)) == 2 async def test_discovery_add_device_after_setup( @@ -180,7 +180,7 @@ async def test_discovery_add_device_after_setup( await hass.async_block_till_done() assert discovery.return_value.scan_count == 1 - assert len(hass.states.async_all(DOMAIN)) == 1 + assert len(hass.states.async_all(CLIMATE_DOMAIN)) == 1 # rediscover the same devices shouldn't change anything discovery.return_value.mock_devices = [MockDevice2] @@ -191,7 +191,7 @@ async def test_discovery_add_device_after_setup( await hass.async_block_till_done() assert discovery.return_value.scan_count == 2 - assert len(hass.states.async_all(DOMAIN)) == 2 + assert len(hass.states.async_all(CLIMATE_DOMAIN)) == 2 async def test_discovery_device_bind_after_setup( @@ -209,7 +209,7 @@ async def test_discovery_device_bind_after_setup( await async_setup_gree(hass) # Update 1 - assert len(hass.states.async_all(DOMAIN)) == 1 + assert len(hass.states.async_all(CLIMATE_DOMAIN)) == 1 state = hass.states.get(ENTITY_ID) assert state.name == "fake-device-1" assert state.state == STATE_UNAVAILABLE @@ -328,7 +328,7 @@ async def test_send_command_device_timeout( # Send failure should not raise exceptions or change device state await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_ID}, blocking=True, @@ -377,7 +377,7 @@ async def test_send_power_on(hass: HomeAssistant, discovery, device) -> None: await async_setup_gree(hass) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, blocking=True, @@ -397,7 +397,7 @@ async def test_send_power_off_device_timeout( await async_setup_gree(hass) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_ID}, blocking=True, @@ -439,7 +439,7 @@ async def test_send_target_temperature( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: temperature}, blocking=True, @@ -473,7 +473,7 @@ async def test_send_target_temperature_with_hvac_mode( await async_setup_gree(hass) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, { ATTR_ENTITY_ID: ENTITY_ID, @@ -509,7 +509,7 @@ async def test_send_target_temperature_device_timeout( await async_setup_gree(hass) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: temperature}, blocking=True, @@ -543,7 +543,7 @@ async def test_update_target_temperature( await async_setup_gree(hass) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_TEMPERATURE: temperature}, blocking=True, @@ -565,7 +565,7 @@ async def test_send_preset_mode(hass: HomeAssistant, discovery, device, preset) await async_setup_gree(hass) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_PRESET_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: preset}, blocking=True, @@ -582,7 +582,7 @@ async def test_send_invalid_preset_mode(hass: HomeAssistant, discovery, device) with pytest.raises(ServiceValidationError): await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_PRESET_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: "invalid"}, blocking=True, @@ -605,7 +605,7 @@ async def test_send_preset_mode_device_timeout( await async_setup_gree(hass) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_PRESET_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_PRESET_MODE: preset}, blocking=True, @@ -653,7 +653,7 @@ async def test_send_hvac_mode( await async_setup_gree(hass) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: hvac_mode}, blocking=True, @@ -677,7 +677,7 @@ async def test_send_hvac_mode_device_timeout( await async_setup_gree(hass) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_HVAC_MODE: hvac_mode}, blocking=True, @@ -722,7 +722,7 @@ async def test_send_fan_mode(hass: HomeAssistant, discovery, device, fan_mode) - await async_setup_gree(hass) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_FAN_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_FAN_MODE: fan_mode}, blocking=True, @@ -739,7 +739,7 @@ async def test_send_invalid_fan_mode(hass: HomeAssistant, discovery, device) -> with pytest.raises(ServiceValidationError): await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_FAN_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_FAN_MODE: "invalid"}, blocking=True, @@ -763,7 +763,7 @@ async def test_send_fan_mode_device_timeout( await async_setup_gree(hass) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_FAN_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_FAN_MODE: fan_mode}, blocking=True, @@ -801,7 +801,7 @@ async def test_send_swing_mode( await async_setup_gree(hass) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_SWING_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_SWING_MODE: swing_mode}, blocking=True, @@ -818,7 +818,7 @@ async def test_send_invalid_swing_mode(hass: HomeAssistant, discovery, device) - with pytest.raises(ServiceValidationError): await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_SWING_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_SWING_MODE: "invalid"}, blocking=True, @@ -841,7 +841,7 @@ async def test_send_swing_mode_device_timeout( await async_setup_gree(hass) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_SWING_MODE, {ATTR_ENTITY_ID: ENTITY_ID, ATTR_SWING_MODE: swing_mode}, blocking=True, @@ -884,7 +884,7 @@ async def test_coordinator_update_handler( await async_setup_gree(hass) await hass.async_block_till_done() - entity: GreeClimateEntity = hass.data[DOMAIN].get_entity(ENTITY_ID) + entity: GreeClimateEntity = hass.data[CLIMATE_DOMAIN].get_entity(ENTITY_ID) assert entity is not None # Initial state @@ -911,7 +911,7 @@ async def test_coordinator_update_handler( assert entity.max_temp == TEMP_MAX -@patch("homeassistant.components.gree.PLATFORMS", [DOMAIN]) +@patch("homeassistant.components.gree.PLATFORMS", [CLIMATE_DOMAIN]) async def test_registry_settings( hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion ) -> None: @@ -922,7 +922,7 @@ async def test_registry_settings( assert entries == snapshot -@patch("homeassistant.components.gree.PLATFORMS", [DOMAIN]) +@patch("homeassistant.components.gree.PLATFORMS", [CLIMATE_DOMAIN]) async def test_entity_states(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: """Test for entity registry settings (unique_id).""" await async_setup_gree(hass) diff --git a/tests/components/gree/test_switch.py b/tests/components/gree/test_switch.py index c5684abbf6f..e9491796bdf 100644 --- a/tests/components/gree/test_switch.py +++ b/tests/components/gree/test_switch.py @@ -7,7 +7,7 @@ import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.gree.const import DOMAIN as GREE_DOMAIN -from homeassistant.components.switch import DOMAIN +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TOGGLE, @@ -22,23 +22,23 @@ from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry -ENTITY_ID_LIGHT_PANEL = f"{DOMAIN}.fake_device_1_panel_light" -ENTITY_ID_HEALTH_MODE = f"{DOMAIN}.fake_device_1_health_mode" -ENTITY_ID_QUIET = f"{DOMAIN}.fake_device_1_quiet" -ENTITY_ID_FRESH_AIR = f"{DOMAIN}.fake_device_1_fresh_air" -ENTITY_ID_XFAN = f"{DOMAIN}.fake_device_1_xfan" +ENTITY_ID_LIGHT_PANEL = f"{SWITCH_DOMAIN}.fake_device_1_panel_light" +ENTITY_ID_HEALTH_MODE = f"{SWITCH_DOMAIN}.fake_device_1_health_mode" +ENTITY_ID_QUIET = f"{SWITCH_DOMAIN}.fake_device_1_quiet" +ENTITY_ID_FRESH_AIR = f"{SWITCH_DOMAIN}.fake_device_1_fresh_air" +ENTITY_ID_XFAN = f"{SWITCH_DOMAIN}.fake_device_1_xfan" async def async_setup_gree(hass: HomeAssistant) -> MockConfigEntry: """Set up the gree switch platform.""" entry = MockConfigEntry(domain=GREE_DOMAIN) entry.add_to_hass(hass) - await async_setup_component(hass, GREE_DOMAIN, {GREE_DOMAIN: {DOMAIN: {}}}) + await async_setup_component(hass, GREE_DOMAIN, {GREE_DOMAIN: {SWITCH_DOMAIN: {}}}) await hass.async_block_till_done() return entry -@patch("homeassistant.components.gree.PLATFORMS", [DOMAIN]) +@patch("homeassistant.components.gree.PLATFORMS", [SWITCH_DOMAIN]) async def test_registry_settings( hass: HomeAssistant, entity_registry: er.EntityRegistry, @@ -67,7 +67,7 @@ async def test_send_switch_on(hass: HomeAssistant, entity: str) -> None: await async_setup_gree(hass) await hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity}, blocking=True, @@ -98,7 +98,7 @@ async def test_send_switch_on_device_timeout( await async_setup_gree(hass) await hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity}, blocking=True, @@ -125,7 +125,7 @@ async def test_send_switch_off(hass: HomeAssistant, entity: str) -> None: await async_setup_gree(hass) await hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: entity}, blocking=True, @@ -153,7 +153,7 @@ async def test_send_switch_toggle(hass: HomeAssistant, entity: str) -> None: # Turn the service on first await hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: entity}, blocking=True, @@ -165,7 +165,7 @@ async def test_send_switch_toggle(hass: HomeAssistant, entity: str) -> None: # Toggle it off await hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity}, blocking=True, @@ -177,7 +177,7 @@ async def test_send_switch_toggle(hass: HomeAssistant, entity: str) -> None: # Toggle is back on await hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: entity}, blocking=True, @@ -197,5 +197,5 @@ async def test_entity_state( """Test for entity registry settings (disabled_by, unique_id).""" await async_setup_gree(hass) - state = hass.states.async_all(DOMAIN) + state = hass.states.async_all(SWITCH_DOMAIN) assert state == snapshot diff --git a/tests/components/group/test_cover.py b/tests/components/group/test_cover.py index c687ca21e2d..f89aa9609cc 100644 --- a/tests/components/group/test_cover.py +++ b/tests/components/group/test_cover.py @@ -11,7 +11,7 @@ from homeassistant.components.cover import ( ATTR_CURRENT_TILT_POSITION, ATTR_POSITION, ATTR_TILT_POSITION, - DOMAIN, + DOMAIN as COVER_DOMAIN, ) from homeassistant.components.group.cover import DEFAULT_NAME from homeassistant.const import ( @@ -52,7 +52,7 @@ DEMO_COVER_TILT = "cover.living_room_window" DEMO_TILT = "cover.tilt_demo" CONFIG_ALL = { - DOMAIN: [ + COVER_DOMAIN: [ {"platform": "demo"}, { "platform": "group", @@ -62,7 +62,7 @@ CONFIG_ALL = { } CONFIG_POS = { - DOMAIN: [ + COVER_DOMAIN: [ {"platform": "demo"}, { "platform": "group", @@ -72,7 +72,7 @@ CONFIG_POS = { } CONFIG_TILT_ONLY = { - DOMAIN: [ + COVER_DOMAIN: [ {"platform": "demo"}, { "platform": "group", @@ -82,7 +82,7 @@ CONFIG_TILT_ONLY = { } CONFIG_ATTRIBUTES = { - DOMAIN: { + COVER_DOMAIN: { "platform": "group", CONF_ENTITIES: [DEMO_COVER, DEMO_COVER_POS, DEMO_COVER_TILT, DEMO_TILT], CONF_UNIQUE_ID: "unique_identifier", @@ -96,8 +96,8 @@ async def setup_comp( ) -> None: """Set up group cover component.""" config, count = config_count - with assert_setup_component(count, DOMAIN): - await async_setup_component(hass, DOMAIN, config) + with assert_setup_component(count, COVER_DOMAIN): + await async_setup_component(hass, COVER_DOMAIN, config) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() @@ -454,7 +454,7 @@ async def test_cover_that_only_supports_tilt_removed(hass: HomeAssistant) -> Non async def test_open_covers(hass: HomeAssistant) -> None: """Test open cover function.""" await hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) for _ in range(10): @@ -476,7 +476,7 @@ async def test_open_covers(hass: HomeAssistant) -> None: async def test_close_covers(hass: HomeAssistant) -> None: """Test close cover function.""" await hass.services.async_call( - DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) for _ in range(10): @@ -499,7 +499,7 @@ async def test_toggle_covers(hass: HomeAssistant) -> None: """Test toggle cover function.""" # Start covers in open state await hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) for _ in range(10): future = dt_util.utcnow() + timedelta(seconds=1) @@ -511,7 +511,7 @@ async def test_toggle_covers(hass: HomeAssistant) -> None: # Toggle will close covers await hass.services.async_call( - DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + COVER_DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) for _ in range(10): future = dt_util.utcnow() + timedelta(seconds=1) @@ -528,7 +528,7 @@ async def test_toggle_covers(hass: HomeAssistant) -> None: # Toggle again will open covers await hass.services.async_call( - DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + COVER_DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) for _ in range(10): future = dt_util.utcnow() + timedelta(seconds=1) @@ -549,14 +549,14 @@ async def test_toggle_covers(hass: HomeAssistant) -> None: async def test_stop_covers(hass: HomeAssistant) -> None: """Test stop cover function.""" await hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) future = dt_util.utcnow() + timedelta(seconds=1) async_fire_time_changed(hass, future) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + COVER_DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) future = dt_util.utcnow() + timedelta(seconds=1) async_fire_time_changed(hass, future) @@ -576,7 +576,7 @@ async def test_stop_covers(hass: HomeAssistant) -> None: async def test_set_cover_position(hass: HomeAssistant) -> None: """Test set cover position function.""" await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_SET_COVER_POSITION, {ATTR_ENTITY_ID: COVER_GROUP, ATTR_POSITION: 50}, blocking=True, @@ -600,7 +600,10 @@ async def test_set_cover_position(hass: HomeAssistant) -> None: async def test_open_tilts(hass: HomeAssistant) -> None: """Test open tilt function.""" await hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + COVER_DOMAIN, + SERVICE_OPEN_COVER_TILT, + {ATTR_ENTITY_ID: COVER_GROUP}, + blocking=True, ) for _ in range(5): future = dt_util.utcnow() + timedelta(seconds=1) @@ -621,7 +624,10 @@ async def test_open_tilts(hass: HomeAssistant) -> None: async def test_close_tilts(hass: HomeAssistant) -> None: """Test close tilt function.""" await hass.services.async_call( - DOMAIN, SERVICE_CLOSE_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + COVER_DOMAIN, + SERVICE_CLOSE_COVER_TILT, + {ATTR_ENTITY_ID: COVER_GROUP}, + blocking=True, ) for _ in range(5): future = dt_util.utcnow() + timedelta(seconds=1) @@ -641,7 +647,10 @@ async def test_toggle_tilts(hass: HomeAssistant) -> None: """Test toggle tilt function.""" # Start tilted open await hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + COVER_DOMAIN, + SERVICE_OPEN_COVER_TILT, + {ATTR_ENTITY_ID: COVER_GROUP}, + blocking=True, ) for _ in range(10): future = dt_util.utcnow() + timedelta(seconds=1) @@ -658,7 +667,10 @@ async def test_toggle_tilts(hass: HomeAssistant) -> None: # Toggle will tilt closed await hass.services.async_call( - DOMAIN, SERVICE_TOGGLE_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + COVER_DOMAIN, + SERVICE_TOGGLE_COVER_TILT, + {ATTR_ENTITY_ID: COVER_GROUP}, + blocking=True, ) for _ in range(10): future = dt_util.utcnow() + timedelta(seconds=1) @@ -673,7 +685,10 @@ async def test_toggle_tilts(hass: HomeAssistant) -> None: # Toggle again will tilt open await hass.services.async_call( - DOMAIN, SERVICE_TOGGLE_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + COVER_DOMAIN, + SERVICE_TOGGLE_COVER_TILT, + {ATTR_ENTITY_ID: COVER_GROUP}, + blocking=True, ) for _ in range(10): future = dt_util.utcnow() + timedelta(seconds=1) @@ -694,14 +709,20 @@ async def test_toggle_tilts(hass: HomeAssistant) -> None: async def test_stop_tilts(hass: HomeAssistant) -> None: """Test stop tilts function.""" await hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + COVER_DOMAIN, + SERVICE_OPEN_COVER_TILT, + {ATTR_ENTITY_ID: COVER_GROUP}, + blocking=True, ) future = dt_util.utcnow() + timedelta(seconds=1) async_fire_time_changed(hass, future) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, SERVICE_STOP_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + COVER_DOMAIN, + SERVICE_STOP_COVER_TILT, + {ATTR_ENTITY_ID: COVER_GROUP}, + blocking=True, ) future = dt_util.utcnow() + timedelta(seconds=1) async_fire_time_changed(hass, future) @@ -719,7 +740,7 @@ async def test_stop_tilts(hass: HomeAssistant) -> None: async def test_set_tilt_positions(hass: HomeAssistant) -> None: """Test set tilt position function.""" await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_SET_COVER_TILT_POSITION, {ATTR_ENTITY_ID: COVER_GROUP, ATTR_TILT_POSITION: 80}, blocking=True, @@ -741,7 +762,7 @@ async def test_set_tilt_positions(hass: HomeAssistant) -> None: async def test_is_opening_closing(hass: HomeAssistant) -> None: """Test is_opening property.""" await hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) await hass.async_block_till_done() @@ -756,7 +777,7 @@ async def test_is_opening_closing(hass: HomeAssistant) -> None: await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True + COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True ) # Both covers closing -> closing @@ -814,9 +835,9 @@ async def test_nested_group(hass: HomeAssistant) -> None: """Test nested cover group.""" await async_setup_component( hass, - DOMAIN, + COVER_DOMAIN, { - DOMAIN: [ + COVER_DOMAIN: [ {"platform": "demo"}, { "platform": "group", @@ -848,7 +869,7 @@ async def test_nested_group(hass: HomeAssistant) -> None: # Test controlling the nested group async with asyncio.timeout(0.5): await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: "cover.nested_group"}, blocking=True, diff --git a/tests/components/group/test_fan.py b/tests/components/group/test_fan.py index 184693f7618..93509b5a651 100644 --- a/tests/components/group/test_fan.py +++ b/tests/components/group/test_fan.py @@ -14,7 +14,7 @@ from homeassistant.components.fan import ( ATTR_PERCENTAGE_STEP, DIRECTION_FORWARD, DIRECTION_REVERSE, - DOMAIN, + DOMAIN as FAN_DOMAIN, SERVICE_OSCILLATE, SERVICE_SET_DIRECTION, SERVICE_SET_PERCENTAGE, @@ -60,7 +60,7 @@ FULL_SUPPORT_FEATURES = ( CONFIG_MISSING_FAN = { - DOMAIN: [ + FAN_DOMAIN: [ {"platform": "demo"}, { "platform": "group", @@ -74,7 +74,7 @@ CONFIG_MISSING_FAN = { } CONFIG_FULL_SUPPORT = { - DOMAIN: [ + FAN_DOMAIN: [ {"platform": "demo"}, { "platform": "group", @@ -84,7 +84,7 @@ CONFIG_FULL_SUPPORT = { } CONFIG_LIMITED_SUPPORT = { - DOMAIN: [ + FAN_DOMAIN: [ { "platform": "group", CONF_ENTITIES: [*LIMITED_FAN_ENTITY_IDS], @@ -94,7 +94,7 @@ CONFIG_LIMITED_SUPPORT = { CONFIG_ATTRIBUTES = { - DOMAIN: { + FAN_DOMAIN: { "platform": "group", CONF_ENTITIES: [*FULL_FAN_ENTITY_IDS, *LIMITED_FAN_ENTITY_IDS], CONF_UNIQUE_ID: "unique_identifier", @@ -108,8 +108,8 @@ async def setup_comp( ) -> None: """Set up group fan component.""" config, count = config_count - with assert_setup_component(count, DOMAIN): - await async_setup_component(hass, DOMAIN, config) + with assert_setup_component(count, FAN_DOMAIN): + await async_setup_component(hass, FAN_DOMAIN, config) await hass.async_block_till_done() await hass.async_start() await hass.async_block_till_done() @@ -393,7 +393,7 @@ async def test_state_missing_entity_id(hass: HomeAssistant) -> None: async def test_setup_before_started(hass: HomeAssistant) -> None: """Test we can setup before starting.""" hass.set_state(CoreState.stopped) - assert await async_setup_component(hass, DOMAIN, CONFIG_MISSING_FAN) + assert await async_setup_component(hass, FAN_DOMAIN, CONFIG_MISSING_FAN) await hass.async_block_till_done() await hass.async_start() @@ -431,14 +431,14 @@ async def test_reload(hass: HomeAssistant) -> None: async def test_service_calls(hass: HomeAssistant) -> None: """Test calling services.""" await hass.services.async_call( - DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: FAN_GROUP}, blocking=True + FAN_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: FAN_GROUP}, blocking=True ) assert hass.states.get(LIVING_ROOM_FAN_ENTITY_ID).state == STATE_ON assert hass.states.get(PERCENTAGE_FULL_FAN_ENTITY_ID).state == STATE_ON assert hass.states.get(FAN_GROUP).state == STATE_ON await hass.services.async_call( - DOMAIN, + FAN_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: FAN_GROUP, ATTR_PERCENTAGE: 66}, blocking=True, @@ -452,14 +452,14 @@ async def test_service_calls(hass: HomeAssistant) -> None: assert fan_group_state.attributes[ATTR_PERCENTAGE_STEP] == 100 / 3 await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: FAN_GROUP}, blocking=True + FAN_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: FAN_GROUP}, blocking=True ) assert hass.states.get(LIVING_ROOM_FAN_ENTITY_ID).state == STATE_OFF assert hass.states.get(PERCENTAGE_FULL_FAN_ENTITY_ID).state == STATE_OFF assert hass.states.get(FAN_GROUP).state == STATE_OFF await hass.services.async_call( - DOMAIN, + FAN_DOMAIN, SERVICE_SET_PERCENTAGE, {ATTR_ENTITY_ID: FAN_GROUP, ATTR_PERCENTAGE: 100}, blocking=True, @@ -472,7 +472,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: assert fan_group_state.attributes[ATTR_PERCENTAGE] == 100 await hass.services.async_call( - DOMAIN, + FAN_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: FAN_GROUP, ATTR_PERCENTAGE: 0}, blocking=True, @@ -482,7 +482,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: assert hass.states.get(FAN_GROUP).state == STATE_OFF await hass.services.async_call( - DOMAIN, + FAN_DOMAIN, SERVICE_OSCILLATE, {ATTR_ENTITY_ID: FAN_GROUP, ATTR_OSCILLATING: True}, blocking=True, @@ -495,7 +495,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: assert fan_group_state.attributes[ATTR_OSCILLATING] is True await hass.services.async_call( - DOMAIN, + FAN_DOMAIN, SERVICE_OSCILLATE, {ATTR_ENTITY_ID: FAN_GROUP, ATTR_OSCILLATING: False}, blocking=True, @@ -508,7 +508,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: assert fan_group_state.attributes[ATTR_OSCILLATING] is False await hass.services.async_call( - DOMAIN, + FAN_DOMAIN, SERVICE_SET_DIRECTION, {ATTR_ENTITY_ID: FAN_GROUP, ATTR_DIRECTION: DIRECTION_FORWARD}, blocking=True, @@ -521,7 +521,7 @@ async def test_service_calls(hass: HomeAssistant) -> None: assert fan_group_state.attributes[ATTR_DIRECTION] == DIRECTION_FORWARD await hass.services.async_call( - DOMAIN, + FAN_DOMAIN, SERVICE_SET_DIRECTION, {ATTR_ENTITY_ID: FAN_GROUP, ATTR_DIRECTION: DIRECTION_REVERSE}, blocking=True, @@ -538,9 +538,9 @@ async def test_nested_group(hass: HomeAssistant) -> None: """Test nested fan group.""" await async_setup_component( hass, - DOMAIN, + FAN_DOMAIN, { - DOMAIN: [ + FAN_DOMAIN: [ {"platform": "demo"}, { "platform": "group", @@ -578,7 +578,7 @@ async def test_nested_group(hass: HomeAssistant) -> None: # Test controlling the nested group async with asyncio.timeout(0.5): await hass.services.async_call( - DOMAIN, + FAN_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: "fan.nested_group"}, blocking=True, From aab939cf6cdfed1b8dbe38021a00b704d6967b22 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Sep 2024 15:20:40 +0200 Subject: [PATCH 1633/1635] Add alias to DOMAIN import in tests [a-d] (#125573) --- tests/components/accuweather/test_init.py | 4 +- .../test_device_tracker.py | 38 ++++++++---- tests/components/comfoconnect/test_sensor.py | 8 +-- tests/components/demo/test_button.py | 8 ++- tests/components/demo/test_climate.py | 49 ++++++++------- tests/components/demo/test_cover.py | 59 +++++++++++++------ tests/components/demo/test_date.py | 12 +++- tests/components/demo/test_datetime.py | 12 +++- tests/components/demo/test_humidifier.py | 47 +++++++++++---- tests/components/demo/test_number.py | 12 ++-- tests/components/demo/test_select.py | 10 ++-- tests/components/demo/test_siren.py | 24 ++++---- tests/components/demo/test_text.py | 8 ++- tests/components/demo/test_time.py | 12 +++- tests/components/demo/test_update.py | 10 ++-- tests/components/demo/test_vacuum.py | 24 ++++---- .../device_sun_light_trigger/test_init.py | 12 ++-- .../devolo_home_control/test_binary_sensor.py | 37 +++++++----- .../devolo_home_control/test_climate.py | 16 ++--- .../devolo_home_control/test_cover.py | 28 +++++---- .../devolo_home_control/test_light.py | 42 ++++++------- .../devolo_home_control/test_sensor.py | 43 ++++++++------ .../devolo_home_control/test_siren.py | 28 ++++----- .../devolo_home_control/test_switch.py | 20 +++---- .../devolo_home_network/test_binary_sensor.py | 9 ++- .../devolo_home_network/test_image.py | 8 ++- tests/components/doorbird/test_button.py | 8 +-- 27 files changed, 353 insertions(+), 235 deletions(-) diff --git a/tests/components/accuweather/test_init.py b/tests/components/accuweather/test_init.py index 340676905d6..f88cde88e7e 100644 --- a/tests/components/accuweather/test_init.py +++ b/tests/components/accuweather/test_init.py @@ -10,7 +10,7 @@ from homeassistant.components.accuweather.const import ( UPDATE_INTERVAL_DAILY_FORECAST, UPDATE_INTERVAL_OBSERVATION, ) -from homeassistant.components.sensor import DOMAIN as SENSOR_PLATFORM +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant @@ -107,7 +107,7 @@ async def test_remove_ozone_sensors( ) -> None: """Test remove ozone sensors from registry.""" entity_registry.async_get_or_create( - SENSOR_PLATFORM, + SENSOR_DOMAIN, DOMAIN, "0123456-ozone-0", suggested_object_id="home_ozone_0d", diff --git a/tests/components/bluetooth_le_tracker/test_device_tracker.py b/tests/components/bluetooth_le_tracker/test_device_tracker.py index 452297e38c2..da90980640b 100644 --- a/tests/components/bluetooth_le_tracker/test_device_tracker.py +++ b/tests/components/bluetooth_le_tracker/test_device_tracker.py @@ -18,7 +18,7 @@ from homeassistant.components.device_tracker import ( CONF_CONSIDER_HOME, CONF_SCAN_INTERVAL, CONF_TRACK_NEW, - DOMAIN, + DOMAIN as DEVICE_TRACKER_DOMAIN, ) from homeassistant.const import CONF_PLATFORM from homeassistant.core import HomeAssistant @@ -73,7 +73,7 @@ async def test_do_not_see_device_if_time_not_updated(hass: HomeAssistant) -> Non address = "DE:AD:BE:EF:13:37" name = "Mock device name" - entity_id = f"{DOMAIN}.{slugify(name)}" + entity_id = f"{DEVICE_TRACKER_DOMAIN}.{slugify(name)}" with patch( "homeassistant.components.bluetooth.async_discovered_service_info" @@ -101,7 +101,9 @@ async def test_do_not_see_device_if_time_not_updated(hass: HomeAssistant) -> Non CONF_TRACK_NEW: True, CONF_CONSIDER_HOME: timedelta(minutes=10), } - result = await async_setup_component(hass, DOMAIN, {DOMAIN: config}) + result = await async_setup_component( + hass, DEVICE_TRACKER_DOMAIN, {DEVICE_TRACKER_DOMAIN: config} + ) await hass.async_block_till_done() assert result @@ -136,7 +138,7 @@ async def test_see_device_if_time_updated(hass: HomeAssistant) -> None: address = "DE:AD:BE:EF:13:37" name = "Mock device name" - entity_id = f"{DOMAIN}.{slugify(name)}" + entity_id = f"{DEVICE_TRACKER_DOMAIN}.{slugify(name)}" with patch( "homeassistant.components.bluetooth.async_discovered_service_info" @@ -164,7 +166,9 @@ async def test_see_device_if_time_updated(hass: HomeAssistant) -> None: CONF_TRACK_NEW: True, CONF_CONSIDER_HOME: timedelta(minutes=10), } - result = await async_setup_component(hass, DOMAIN, {DOMAIN: config}) + result = await async_setup_component( + hass, DEVICE_TRACKER_DOMAIN, {DEVICE_TRACKER_DOMAIN: config} + ) assert result # Tick until device seen enough times for to be registered for tracking @@ -215,7 +219,7 @@ async def test_preserve_new_tracked_device_name(hass: HomeAssistant) -> None: address = "DE:AD:BE:EF:13:37" name = "Mock device name" - entity_id = f"{DOMAIN}.{slugify(name)}" + entity_id = f"{DEVICE_TRACKER_DOMAIN}.{slugify(name)}" with patch( "homeassistant.components.bluetooth.async_discovered_service_info" @@ -242,7 +246,9 @@ async def test_preserve_new_tracked_device_name(hass: HomeAssistant) -> None: CONF_SCAN_INTERVAL: timedelta(minutes=1), CONF_TRACK_NEW: True, } - assert await async_setup_component(hass, DOMAIN, {DOMAIN: config}) + assert await async_setup_component( + hass, DEVICE_TRACKER_DOMAIN, {DEVICE_TRACKER_DOMAIN: config} + ) await hass.async_block_till_done() # Seen once here; return without name when seen subsequent times @@ -282,7 +288,7 @@ async def test_tracking_battery_times_out(hass: HomeAssistant) -> None: address = "DE:AD:BE:EF:13:37" name = "Mock device name" - entity_id = f"{DOMAIN}.{slugify(name)}" + entity_id = f"{DEVICE_TRACKER_DOMAIN}.{slugify(name)}" with patch( "homeassistant.components.bluetooth.async_discovered_service_info" @@ -311,7 +317,9 @@ async def test_tracking_battery_times_out(hass: HomeAssistant) -> None: CONF_TRACK_BATTERY_INTERVAL: timedelta(minutes=2), CONF_TRACK_NEW: True, } - result = await async_setup_component(hass, DOMAIN, {DOMAIN: config}) + result = await async_setup_component( + hass, DEVICE_TRACKER_DOMAIN, {DEVICE_TRACKER_DOMAIN: config} + ) await hass.async_block_till_done() assert result @@ -348,7 +356,7 @@ async def test_tracking_battery_fails(hass: HomeAssistant) -> None: address = "DE:AD:BE:EF:13:37" name = "Mock device name" - entity_id = f"{DOMAIN}.{slugify(name)}" + entity_id = f"{DEVICE_TRACKER_DOMAIN}.{slugify(name)}" with patch( "homeassistant.components.bluetooth.async_discovered_service_info" @@ -377,7 +385,9 @@ async def test_tracking_battery_fails(hass: HomeAssistant) -> None: CONF_TRACK_BATTERY_INTERVAL: timedelta(minutes=2), CONF_TRACK_NEW: True, } - result = await async_setup_component(hass, DOMAIN, {DOMAIN: config}) + result = await async_setup_component( + hass, DEVICE_TRACKER_DOMAIN, {DEVICE_TRACKER_DOMAIN: config} + ) assert result # Tick until device seen enough times for to be registered for tracking @@ -413,7 +423,7 @@ async def test_tracking_battery_successful(hass: HomeAssistant) -> None: address = "DE:AD:BE:EF:13:37" name = "Mock device name" - entity_id = f"{DOMAIN}.{slugify(name)}" + entity_id = f"{DEVICE_TRACKER_DOMAIN}.{slugify(name)}" with patch( "homeassistant.components.bluetooth.async_discovered_service_info" @@ -442,7 +452,9 @@ async def test_tracking_battery_successful(hass: HomeAssistant) -> None: CONF_TRACK_BATTERY_INTERVAL: timedelta(minutes=2), CONF_TRACK_NEW: True, } - result = await async_setup_component(hass, DOMAIN, {DOMAIN: config}) + result = await async_setup_component( + hass, DEVICE_TRACKER_DOMAIN, {DEVICE_TRACKER_DOMAIN: config} + ) await hass.async_block_till_done() assert result diff --git a/tests/components/comfoconnect/test_sensor.py b/tests/components/comfoconnect/test_sensor.py index fdecfa5b1c7..5cae566379a 100644 --- a/tests/components/comfoconnect/test_sensor.py +++ b/tests/components/comfoconnect/test_sensor.py @@ -5,7 +5,7 @@ from unittest.mock import MagicMock, patch import pytest -from homeassistant.components.sensor import DOMAIN +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -14,7 +14,7 @@ from tests.common import assert_setup_component COMPONENT = "comfoconnect" VALID_CONFIG = { COMPONENT: {"host": "1.2.3.4"}, - DOMAIN: { + SENSOR_DOMAIN: { "platform": COMPONENT, "resources": [ "current_humidity", @@ -51,8 +51,8 @@ async def setup_sensor( mock_comfoconnect_command: MagicMock, ) -> None: """Set up demo sensor component.""" - with assert_setup_component(1, DOMAIN): - await async_setup_component(hass, DOMAIN, VALID_CONFIG) + with assert_setup_component(1, SENSOR_DOMAIN): + await async_setup_component(hass, SENSOR_DOMAIN, VALID_CONFIG) await hass.async_block_till_done() diff --git a/tests/components/demo/test_button.py b/tests/components/demo/test_button.py index 6049de12570..702ee3aa3e0 100644 --- a/tests/components/demo/test_button.py +++ b/tests/components/demo/test_button.py @@ -5,7 +5,7 @@ from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory import pytest -from homeassistant.components.button import DOMAIN, SERVICE_PRESS +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -27,7 +27,9 @@ async def button_only() -> None: @pytest.fixture(autouse=True) async def setup_demo_button(hass: HomeAssistant, button_only) -> None: """Initialize setup demo button entity.""" - assert await async_setup_component(hass, DOMAIN, {"button": {"platform": "demo"}}) + assert await async_setup_component( + hass, BUTTON_DOMAIN, {"button": {"platform": "demo"}} + ) await hass.async_block_till_done() @@ -47,7 +49,7 @@ async def test_press(hass: HomeAssistant, freezer: FrozenDateTimeFactory) -> Non now = dt_util.parse_datetime("2021-01-09 12:00:00+00:00") freezer.move_to(now) await hass.services.async_call( - DOMAIN, + BUTTON_DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: ENTITY_PUSH}, blocking=True, diff --git a/tests/components/demo/test_climate.py b/tests/components/demo/test_climate.py index 383e00834b8..42152645ecb 100644 --- a/tests/components/demo/test_climate.py +++ b/tests/components/demo/test_climate.py @@ -22,7 +22,7 @@ from homeassistant.components.climate import ( ATTR_SWING_MODE, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW, - DOMAIN, + DOMAIN as CLIMATE_DOMAIN, PRESET_AWAY, PRESET_ECO, SERVICE_SET_FAN_MODE, @@ -64,7 +64,9 @@ def climate_only() -> Generator[None]: async def setup_demo_climate(hass: HomeAssistant, climate_only: None) -> None: """Initialize setup demo climate.""" hass.config.units = METRIC_SYSTEM - assert await async_setup_component(hass, DOMAIN, {"climate": {"platform": "demo"}}) + assert await async_setup_component( + hass, CLIMATE_DOMAIN, {"climate": {"platform": "demo"}} + ) await hass.async_block_till_done() @@ -104,7 +106,7 @@ async def test_set_only_target_temp_bad_attr(hass: HomeAssistant) -> None: with pytest.raises(vol.Invalid): await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_TEMPERATURE: None}, blocking=True, @@ -120,7 +122,7 @@ async def test_set_only_target_temp(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_TEMPERATURE) == 21 await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_TEMPERATURE: 30}, blocking=True, @@ -136,7 +138,7 @@ async def test_set_only_target_temp_with_convert(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_TEMPERATURE) == 20 await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, {ATTR_ENTITY_ID: ENTITY_HEATPUMP, ATTR_TEMPERATURE: 21}, blocking=True, @@ -154,7 +156,7 @@ async def test_set_target_temp_range(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_TARGET_TEMP_HIGH) == 24.0 await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, { ATTR_ENTITY_ID: ENTITY_ECOBEE, @@ -179,7 +181,7 @@ async def test_set_target_temp_range_bad_attr(hass: HomeAssistant) -> None: with pytest.raises(vol.Invalid): await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, { ATTR_ENTITY_ID: ENTITY_ECOBEE, @@ -202,7 +204,7 @@ async def test_set_temp_with_hvac_mode(hass: HomeAssistant) -> None: assert state.state == HVACMode.COOL await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, { ATTR_ENTITY_ID: ENTITY_CLIMATE, @@ -224,7 +226,7 @@ async def test_set_target_humidity_bad_attr(hass: HomeAssistant) -> None: with pytest.raises(vol.Invalid): await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_HUMIDITY: None}, blocking=True, @@ -240,7 +242,7 @@ async def test_set_target_humidity(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_HUMIDITY) == 67.4 await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_HUMIDITY: 64}, blocking=True, @@ -257,7 +259,7 @@ async def test_set_fan_mode_bad_attr(hass: HomeAssistant) -> None: with pytest.raises(vol.Invalid): await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_FAN_MODE, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_FAN_MODE: None}, blocking=True, @@ -273,7 +275,7 @@ async def test_set_fan_mode(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_FAN_MODE) == "on_high" await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_FAN_MODE, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_FAN_MODE: "on_low"}, blocking=True, @@ -290,7 +292,7 @@ async def test_set_swing_mode_bad_attr(hass: HomeAssistant) -> None: with pytest.raises(vol.Invalid): await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_SWING_MODE, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_SWING_MODE: None}, blocking=True, @@ -306,7 +308,7 @@ async def test_set_swing(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_SWING_MODE) == "off" await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_SWING_MODE, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_SWING_MODE: "auto"}, blocking=True, @@ -327,7 +329,7 @@ async def test_set_hvac_bad_attr_and_state(hass: HomeAssistant) -> None: with pytest.raises(vol.Invalid): await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_HVAC_MODE: None}, blocking=True, @@ -344,7 +346,7 @@ async def test_set_hvac(hass: HomeAssistant) -> None: assert state.state == HVACMode.COOL await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_HVAC_MODE: HVACMode.HEAT}, blocking=True, @@ -357,7 +359,7 @@ async def test_set_hvac(hass: HomeAssistant) -> None: async def test_set_hold_mode_away(hass: HomeAssistant) -> None: """Test setting the hold mode away.""" await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_PRESET_MODE, {ATTR_ENTITY_ID: ENTITY_ECOBEE, ATTR_PRESET_MODE: PRESET_AWAY}, blocking=True, @@ -370,7 +372,7 @@ async def test_set_hold_mode_away(hass: HomeAssistant) -> None: async def test_set_hold_mode_eco(hass: HomeAssistant) -> None: """Test setting the hold mode eco.""" await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_PRESET_MODE, {ATTR_ENTITY_ID: ENTITY_ECOBEE, ATTR_PRESET_MODE: PRESET_ECO}, blocking=True, @@ -383,7 +385,7 @@ async def test_set_hold_mode_eco(hass: HomeAssistant) -> None: async def test_turn_on(hass: HomeAssistant) -> None: """Test turn on device.""" await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_HVAC_MODE: HVACMode.OFF}, blocking=True, @@ -393,7 +395,7 @@ async def test_turn_on(hass: HomeAssistant) -> None: assert state.state == HVACMode.OFF await hass.services.async_call( - DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_CLIMATE}, blocking=True + CLIMATE_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_CLIMATE}, blocking=True ) state = hass.states.get(ENTITY_CLIMATE) assert state.state == HVACMode.HEAT @@ -402,7 +404,7 @@ async def test_turn_on(hass: HomeAssistant) -> None: async def test_turn_off(hass: HomeAssistant) -> None: """Test turn on device.""" await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {ATTR_ENTITY_ID: ENTITY_CLIMATE, ATTR_HVAC_MODE: HVACMode.HEAT}, blocking=True, @@ -412,7 +414,10 @@ async def test_turn_off(hass: HomeAssistant) -> None: assert state.state == HVACMode.HEAT await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_CLIMATE}, blocking=True + CLIMATE_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: ENTITY_CLIMATE}, + blocking=True, ) state = hass.states.get(ENTITY_CLIMATE) assert state.state == HVACMode.OFF diff --git a/tests/components/demo/test_cover.py b/tests/components/demo/test_cover.py index 009d2ca2f49..abbbbf0b79a 100644 --- a/tests/components/demo/test_cover.py +++ b/tests/components/demo/test_cover.py @@ -11,7 +11,7 @@ from homeassistant.components.cover import ( ATTR_CURRENT_TILT_POSITION, ATTR_POSITION, ATTR_TILT_POSITION, - DOMAIN, + DOMAIN as COVER_DOMAIN, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -55,8 +55,8 @@ def cover_only() -> Generator[None]: @pytest.fixture(autouse=True) async def setup_comp(hass: HomeAssistant, cover_only: None) -> None: """Set up demo cover component.""" - with assert_setup_component(1, DOMAIN): - await async_setup_component(hass, DOMAIN, CONFIG) + with assert_setup_component(1, COVER_DOMAIN): + await async_setup_component(hass, COVER_DOMAIN, CONFIG) await hass.async_block_till_done() @@ -79,7 +79,7 @@ async def test_close_cover(hass: HomeAssistant) -> None: assert state.attributes[ATTR_CURRENT_POSITION] == 70 await hass.services.async_call( - DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) state = hass.states.get(ENTITY_COVER) assert state.state == STATE_CLOSING @@ -99,7 +99,7 @@ async def test_open_cover(hass: HomeAssistant) -> None: assert state.state == STATE_OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 70 await hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) state = hass.states.get(ENTITY_COVER) assert state.state == STATE_OPENING @@ -117,7 +117,7 @@ async def test_toggle_cover(hass: HomeAssistant) -> None: """Test toggling the cover.""" # Start open await hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) for _ in range(7): future = dt_util.utcnow() + timedelta(seconds=1) @@ -129,7 +129,7 @@ async def test_toggle_cover(hass: HomeAssistant) -> None: assert state.attributes["current_position"] == 100 # Toggle closed await hass.services.async_call( - DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) for _ in range(10): future = dt_util.utcnow() + timedelta(seconds=1) @@ -141,7 +141,7 @@ async def test_toggle_cover(hass: HomeAssistant) -> None: assert state.attributes[ATTR_CURRENT_POSITION] == 0 # Toggle open await hass.services.async_call( - DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) for _ in range(10): future = dt_util.utcnow() + timedelta(seconds=1) @@ -158,7 +158,7 @@ async def test_set_cover_position(hass: HomeAssistant) -> None: state = hass.states.get(ENTITY_COVER) assert state.attributes[ATTR_CURRENT_POSITION] == 70 await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_SET_COVER_POSITION, {ATTR_ENTITY_ID: ENTITY_COVER, ATTR_POSITION: 10}, blocking=True, @@ -177,13 +177,13 @@ async def test_stop_cover(hass: HomeAssistant) -> None: state = hass.states.get(ENTITY_COVER) assert state.attributes[ATTR_CURRENT_POSITION] == 70 await hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) future = dt_util.utcnow() + timedelta(seconds=1) async_fire_time_changed(hass, future) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True ) async_fire_time_changed(hass, future) await hass.async_block_till_done() @@ -196,7 +196,10 @@ async def test_close_cover_tilt(hass: HomeAssistant) -> None: state = hass.states.get(ENTITY_COVER) assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 await hass.services.async_call( - DOMAIN, SERVICE_CLOSE_COVER_TILT, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, + SERVICE_CLOSE_COVER_TILT, + {ATTR_ENTITY_ID: ENTITY_COVER}, + blocking=True, ) for _ in range(7): future = dt_util.utcnow() + timedelta(seconds=1) @@ -212,7 +215,10 @@ async def test_open_cover_tilt(hass: HomeAssistant) -> None: state = hass.states.get(ENTITY_COVER) assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 await hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, + SERVICE_OPEN_COVER_TILT, + {ATTR_ENTITY_ID: ENTITY_COVER}, + blocking=True, ) for _ in range(7): future = dt_util.utcnow() + timedelta(seconds=1) @@ -227,7 +233,10 @@ async def test_toggle_cover_tilt(hass: HomeAssistant) -> None: """Test toggling the cover tilt.""" # Start open await hass.services.async_call( - DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, + SERVICE_OPEN_COVER_TILT, + {ATTR_ENTITY_ID: ENTITY_COVER}, + blocking=True, ) for _ in range(7): future = dt_util.utcnow() + timedelta(seconds=1) @@ -238,7 +247,10 @@ async def test_toggle_cover_tilt(hass: HomeAssistant) -> None: assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 # Toggle closed await hass.services.async_call( - DOMAIN, SERVICE_TOGGLE_COVER_TILT, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, + SERVICE_TOGGLE_COVER_TILT, + {ATTR_ENTITY_ID: ENTITY_COVER}, + blocking=True, ) for _ in range(10): future = dt_util.utcnow() + timedelta(seconds=1) @@ -249,7 +261,10 @@ async def test_toggle_cover_tilt(hass: HomeAssistant) -> None: assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 # Toggle Open await hass.services.async_call( - DOMAIN, SERVICE_TOGGLE_COVER_TILT, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, + SERVICE_TOGGLE_COVER_TILT, + {ATTR_ENTITY_ID: ENTITY_COVER}, + blocking=True, ) for _ in range(10): future = dt_util.utcnow() + timedelta(seconds=1) @@ -265,7 +280,7 @@ async def test_set_cover_tilt_position(hass: HomeAssistant) -> None: state = hass.states.get(ENTITY_COVER) assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_SET_COVER_TILT_POSITION, {ATTR_ENTITY_ID: ENTITY_COVER, ATTR_TILT_POSITION: 90}, blocking=True, @@ -284,13 +299,19 @@ async def test_stop_cover_tilt(hass: HomeAssistant) -> None: state = hass.states.get(ENTITY_COVER) assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 50 await hass.services.async_call( - DOMAIN, SERVICE_CLOSE_COVER_TILT, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, + SERVICE_CLOSE_COVER_TILT, + {ATTR_ENTITY_ID: ENTITY_COVER}, + blocking=True, ) future = dt_util.utcnow() + timedelta(seconds=1) async_fire_time_changed(hass, future) await hass.async_block_till_done() await hass.services.async_call( - DOMAIN, SERVICE_STOP_COVER_TILT, {ATTR_ENTITY_ID: ENTITY_COVER}, blocking=True + COVER_DOMAIN, + SERVICE_STOP_COVER_TILT, + {ATTR_ENTITY_ID: ENTITY_COVER}, + blocking=True, ) async_fire_time_changed(hass, future) await hass.async_block_till_done() diff --git a/tests/components/demo/test_date.py b/tests/components/demo/test_date.py index 5e0fc2c29cd..228be936599 100644 --- a/tests/components/demo/test_date.py +++ b/tests/components/demo/test_date.py @@ -4,7 +4,11 @@ from unittest.mock import patch import pytest -from homeassistant.components.date import ATTR_DATE, DOMAIN, SERVICE_SET_VALUE +from homeassistant.components.date import ( + ATTR_DATE, + DOMAIN as DATE_DOMAIN, + SERVICE_SET_VALUE, +) from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -25,7 +29,9 @@ async def date_only() -> None: @pytest.fixture(autouse=True) async def setup_demo_date(hass: HomeAssistant, date_only) -> None: """Initialize setup demo date.""" - assert await async_setup_component(hass, DOMAIN, {"date": {"platform": "demo"}}) + assert await async_setup_component( + hass, DATE_DOMAIN, {"date": {"platform": "demo"}} + ) await hass.async_block_till_done() @@ -38,7 +44,7 @@ def test_setup_params(hass: HomeAssistant) -> None: async def test_set_datetime(hass: HomeAssistant) -> None: """Test set datetime service.""" await hass.services.async_call( - DOMAIN, + DATE_DOMAIN, SERVICE_SET_VALUE, {ATTR_ENTITY_ID: ENTITY_DATE, ATTR_DATE: "2021-02-03"}, blocking=True, diff --git a/tests/components/demo/test_datetime.py b/tests/components/demo/test_datetime.py index bd4adafd695..82cd5044068 100644 --- a/tests/components/demo/test_datetime.py +++ b/tests/components/demo/test_datetime.py @@ -4,7 +4,11 @@ from unittest.mock import patch import pytest -from homeassistant.components.datetime import ATTR_DATETIME, DOMAIN, SERVICE_SET_VALUE +from homeassistant.components.datetime import ( + ATTR_DATETIME, + DOMAIN as DATETIME_DOMAIN, + SERVICE_SET_VALUE, +) from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -25,7 +29,9 @@ async def datetime_only() -> None: @pytest.fixture(autouse=True) async def setup_demo_datetime(hass: HomeAssistant, datetime_only) -> None: """Initialize setup demo datetime.""" - assert await async_setup_component(hass, DOMAIN, {"datetime": {"platform": "demo"}}) + assert await async_setup_component( + hass, DATETIME_DOMAIN, {"datetime": {"platform": "demo"}} + ) await hass.async_block_till_done() @@ -39,7 +45,7 @@ async def test_set_datetime(hass: HomeAssistant) -> None: """Test set datetime service.""" await hass.config.async_set_time_zone("UTC") await hass.services.async_call( - DOMAIN, + DATETIME_DOMAIN, SERVICE_SET_VALUE, {ATTR_ENTITY_ID: ENTITY_DATETIME, ATTR_DATETIME: "2021-02-03 01:02:03"}, blocking=True, diff --git a/tests/components/demo/test_humidifier.py b/tests/components/demo/test_humidifier.py index 0f0fcaf43fd..93bd2b13743 100644 --- a/tests/components/demo/test_humidifier.py +++ b/tests/components/demo/test_humidifier.py @@ -11,7 +11,7 @@ from homeassistant.components.humidifier import ( ATTR_HUMIDITY, ATTR_MAX_HUMIDITY, ATTR_MIN_HUMIDITY, - DOMAIN, + DOMAIN as HUMIDITY_DOMAIN, MODE_AWAY, SERVICE_SET_HUMIDITY, SERVICE_SET_MODE, @@ -48,7 +48,7 @@ async def humidifier_only() -> None: async def setup_demo_humidifier(hass: HomeAssistant, humidifier_only: None): """Initialize setup demo humidifier.""" assert await async_setup_component( - hass, DOMAIN, {"humidifier": {"platform": "demo"}} + hass, HUMIDITY_DOMAIN, {"humidifier": {"platform": "demo"}} ) await hass.async_block_till_done() @@ -76,7 +76,7 @@ async def test_set_target_humidity_bad_attr(hass: HomeAssistant) -> None: with pytest.raises(vol.Invalid): await hass.services.async_call( - DOMAIN, + HUMIDITY_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_HUMIDITY: None, ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True, @@ -93,7 +93,7 @@ async def test_set_target_humidity(hass: HomeAssistant) -> None: assert state.attributes.get(ATTR_HUMIDITY) == 54.2 await hass.services.async_call( - DOMAIN, + HUMIDITY_DOMAIN, SERVICE_SET_HUMIDITY, {ATTR_HUMIDITY: 64, ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True, @@ -107,7 +107,7 @@ async def test_set_target_humidity(hass: HomeAssistant) -> None: async def test_set_hold_mode_away(hass: HomeAssistant) -> None: """Test setting the hold mode away.""" await hass.services.async_call( - DOMAIN, + HUMIDITY_DOMAIN, SERVICE_SET_MODE, {ATTR_MODE: MODE_AWAY, ATTR_ENTITY_ID: ENTITY_HYGROSTAT}, blocking=True, @@ -121,7 +121,7 @@ async def test_set_hold_mode_away(hass: HomeAssistant) -> None: async def test_set_hold_mode_eco(hass: HomeAssistant) -> None: """Test setting the hold mode eco.""" await hass.services.async_call( - DOMAIN, + HUMIDITY_DOMAIN, SERVICE_SET_MODE, {ATTR_MODE: "eco", ATTR_ENTITY_ID: ENTITY_HYGROSTAT}, blocking=True, @@ -135,14 +135,20 @@ async def test_set_hold_mode_eco(hass: HomeAssistant) -> None: async def test_turn_on(hass: HomeAssistant) -> None: """Test turn on device.""" await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True + HUMIDITY_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, + blocking=True, ) state = hass.states.get(ENTITY_DEHUMIDIFIER) assert state.state == STATE_OFF assert state.attributes.get(ATTR_ACTION) == "off" await hass.services.async_call( - DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True + HUMIDITY_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, + blocking=True, ) state = hass.states.get(ENTITY_DEHUMIDIFIER) assert state.state == STATE_ON @@ -152,14 +158,20 @@ async def test_turn_on(hass: HomeAssistant) -> None: async def test_turn_off(hass: HomeAssistant) -> None: """Test turn off device.""" await hass.services.async_call( - DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True + HUMIDITY_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, + blocking=True, ) state = hass.states.get(ENTITY_DEHUMIDIFIER) assert state.state == STATE_ON assert state.attributes.get(ATTR_ACTION) == "drying" await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True + HUMIDITY_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, + blocking=True, ) state = hass.states.get(ENTITY_DEHUMIDIFIER) assert state.state == STATE_OFF @@ -169,19 +181,28 @@ async def test_turn_off(hass: HomeAssistant) -> None: async def test_toggle(hass: HomeAssistant) -> None: """Test toggle device.""" await hass.services.async_call( - DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True + HUMIDITY_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, + blocking=True, ) state = hass.states.get(ENTITY_DEHUMIDIFIER) assert state.state == STATE_ON await hass.services.async_call( - DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True + HUMIDITY_DOMAIN, + SERVICE_TOGGLE, + {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, + blocking=True, ) state = hass.states.get(ENTITY_DEHUMIDIFIER) assert state.state == STATE_OFF await hass.services.async_call( - DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, blocking=True + HUMIDITY_DOMAIN, + SERVICE_TOGGLE, + {ATTR_ENTITY_ID: ENTITY_DEHUMIDIFIER}, + blocking=True, ) state = hass.states.get(ENTITY_DEHUMIDIFIER) assert state.state == STATE_ON diff --git a/tests/components/demo/test_number.py b/tests/components/demo/test_number.py index 79885fa8581..4b7cbe4864f 100644 --- a/tests/components/demo/test_number.py +++ b/tests/components/demo/test_number.py @@ -11,7 +11,7 @@ from homeassistant.components.number import ( ATTR_MIN, ATTR_STEP, ATTR_VALUE, - DOMAIN, + DOMAIN as NUMBER_DOMAIN, SERVICE_SET_VALUE, NumberMode, ) @@ -39,7 +39,9 @@ def number_only() -> Generator[None]: @pytest.fixture(autouse=True) async def setup_demo_number(hass: HomeAssistant, number_only: None) -> None: """Initialize setup demo Number entity.""" - assert await async_setup_component(hass, DOMAIN, {"number": {"platform": "demo"}}) + assert await async_setup_component( + hass, NUMBER_DOMAIN, {"number": {"platform": "demo"}} + ) await hass.async_block_till_done() @@ -83,7 +85,7 @@ async def test_set_value_bad_attr(hass: HomeAssistant) -> None: with pytest.raises(vol.Invalid): await hass.services.async_call( - DOMAIN, + NUMBER_DOMAIN, SERVICE_SET_VALUE, {ATTR_VALUE: None, ATTR_ENTITY_ID: ENTITY_VOLUME}, blocking=True, @@ -101,7 +103,7 @@ async def test_set_value_bad_range(hass: HomeAssistant) -> None: with pytest.raises(ServiceValidationError): await hass.services.async_call( - DOMAIN, + NUMBER_DOMAIN, SERVICE_SET_VALUE, {ATTR_VALUE: 1024, ATTR_ENTITY_ID: ENTITY_VOLUME}, blocking=True, @@ -118,7 +120,7 @@ async def test_set_set_value(hass: HomeAssistant) -> None: assert state.state == "42.0" await hass.services.async_call( - DOMAIN, + NUMBER_DOMAIN, SERVICE_SET_VALUE, {ATTR_VALUE: 23, ATTR_ENTITY_ID: ENTITY_VOLUME}, blocking=True, diff --git a/tests/components/demo/test_select.py b/tests/components/demo/test_select.py index f9805f44866..a78f8552ec7 100644 --- a/tests/components/demo/test_select.py +++ b/tests/components/demo/test_select.py @@ -7,7 +7,7 @@ import pytest from homeassistant.components.select import ( ATTR_OPTION, ATTR_OPTIONS, - DOMAIN, + DOMAIN as SELECT_DOMAIN, SERVICE_SELECT_OPTION, ) from homeassistant.const import ATTR_ENTITY_ID, Platform @@ -31,7 +31,9 @@ async def select_only() -> None: @pytest.fixture(autouse=True) async def setup_demo_select(hass: HomeAssistant, select_only) -> None: """Initialize setup demo select entity.""" - assert await async_setup_component(hass, DOMAIN, {"select": {"platform": "demo"}}) + assert await async_setup_component( + hass, SELECT_DOMAIN, {"select": {"platform": "demo"}} + ) await hass.async_block_till_done() @@ -55,7 +57,7 @@ async def test_select_option_bad_attr(hass: HomeAssistant) -> None: with pytest.raises(ServiceValidationError): await hass.services.async_call( - DOMAIN, + SELECT_DOMAIN, SERVICE_SELECT_OPTION, {ATTR_OPTION: "slow_speed", ATTR_ENTITY_ID: ENTITY_SPEED}, blocking=True, @@ -74,7 +76,7 @@ async def test_select_option(hass: HomeAssistant) -> None: assert state.state == "ridiculous_speed" await hass.services.async_call( - DOMAIN, + SELECT_DOMAIN, SERVICE_SELECT_OPTION, {ATTR_OPTION: "light_speed", ATTR_ENTITY_ID: ENTITY_SPEED}, blocking=True, diff --git a/tests/components/demo/test_siren.py b/tests/components/demo/test_siren.py index e21cd96efc9..c537e73508d 100644 --- a/tests/components/demo/test_siren.py +++ b/tests/components/demo/test_siren.py @@ -8,7 +8,7 @@ from homeassistant.components.siren import ( ATTR_AVAILABLE_TONES, ATTR_TONE, ATTR_VOLUME_LEVEL, - DOMAIN, + DOMAIN as SIREN_DOMAIN, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -39,7 +39,9 @@ async def siren_only() -> None: @pytest.fixture(autouse=True) async def setup_demo_siren(hass: HomeAssistant, siren_only: None): """Initialize setup demo siren.""" - assert await async_setup_component(hass, DOMAIN, {"siren": {"platform": "demo"}}) + assert await async_setup_component( + hass, SIREN_DOMAIN, {"siren": {"platform": "demo"}} + ) await hass.async_block_till_done() @@ -59,13 +61,13 @@ def test_all_setup_params(hass: HomeAssistant) -> None: async def test_turn_on(hass: HomeAssistant) -> None: """Test turn on device.""" await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True + SIREN_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True ) state = hass.states.get(ENTITY_SIREN) assert state.state == STATE_OFF await hass.services.async_call( - DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True + SIREN_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True ) state = hass.states.get(ENTITY_SIREN) assert state.state == STATE_ON @@ -73,7 +75,7 @@ async def test_turn_on(hass: HomeAssistant) -> None: # Test that an invalid tone will raise a ValueError with pytest.raises(ValueError): await hass.services.async_call( - DOMAIN, + SIREN_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_SIREN_WITH_ALL_FEATURES, ATTR_TONE: "invalid_tone"}, blocking=True, @@ -83,13 +85,13 @@ async def test_turn_on(hass: HomeAssistant) -> None: async def test_turn_off(hass: HomeAssistant) -> None: """Test turn off device.""" await hass.services.async_call( - DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True + SIREN_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True ) state = hass.states.get(ENTITY_SIREN) assert state.state == STATE_ON await hass.services.async_call( - DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True + SIREN_DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True ) state = hass.states.get(ENTITY_SIREN) assert state.state == STATE_OFF @@ -98,19 +100,19 @@ async def test_turn_off(hass: HomeAssistant) -> None: async def test_toggle(hass: HomeAssistant) -> None: """Test toggle device.""" await hass.services.async_call( - DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True + SIREN_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True ) state = hass.states.get(ENTITY_SIREN) assert state.state == STATE_ON await hass.services.async_call( - DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True + SIREN_DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True ) state = hass.states.get(ENTITY_SIREN) assert state.state == STATE_OFF await hass.services.async_call( - DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True + SIREN_DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: ENTITY_SIREN}, blocking=True ) state = hass.states.get(ENTITY_SIREN) assert state.state == STATE_ON @@ -122,7 +124,7 @@ async def test_turn_on_strip_attributes(hass: HomeAssistant) -> None: "homeassistant.components.demo.siren.DemoSiren.async_turn_on" ) as svc_call: await hass.services.async_call( - DOMAIN, + SIREN_DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: ENTITY_SIREN, ATTR_VOLUME_LEVEL: 1}, blocking=True, diff --git a/tests/components/demo/test_text.py b/tests/components/demo/test_text.py index 4ca172e5143..b3291012167 100644 --- a/tests/components/demo/test_text.py +++ b/tests/components/demo/test_text.py @@ -10,7 +10,7 @@ from homeassistant.components.text import ( ATTR_MIN, ATTR_PATTERN, ATTR_VALUE, - DOMAIN, + DOMAIN as TEXT_DOMAIN, SERVICE_SET_VALUE, ) from homeassistant.const import ( @@ -38,7 +38,9 @@ def text_only() -> Generator[None]: @pytest.fixture(autouse=True) async def setup_demo_text(hass: HomeAssistant, text_only: None) -> None: """Initialize setup demo text.""" - assert await async_setup_component(hass, DOMAIN, {"text": {"platform": "demo"}}) + assert await async_setup_component( + hass, TEXT_DOMAIN, {"text": {"platform": "demo"}} + ) await hass.async_block_till_done() @@ -55,7 +57,7 @@ def test_setup_params(hass: HomeAssistant) -> None: async def test_set_value(hass: HomeAssistant) -> None: """Test set value service.""" await hass.services.async_call( - DOMAIN, + TEXT_DOMAIN, SERVICE_SET_VALUE, {ATTR_ENTITY_ID: ENTITY_TEXT, ATTR_VALUE: "new"}, blocking=True, diff --git a/tests/components/demo/test_time.py b/tests/components/demo/test_time.py index 8ef093a38f3..6997e8392ed 100644 --- a/tests/components/demo/test_time.py +++ b/tests/components/demo/test_time.py @@ -4,7 +4,11 @@ from unittest.mock import patch import pytest -from homeassistant.components.time import ATTR_TIME, DOMAIN, SERVICE_SET_VALUE +from homeassistant.components.time import ( + ATTR_TIME, + DOMAIN as TIME_DOMAIN, + SERVICE_SET_VALUE, +) from homeassistant.const import ATTR_ENTITY_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @@ -25,7 +29,9 @@ async def time_only() -> None: @pytest.fixture(autouse=True) async def setup_demo_datetime(hass: HomeAssistant, time_only) -> None: """Initialize setup demo time.""" - assert await async_setup_component(hass, DOMAIN, {"time": {"platform": "demo"}}) + assert await async_setup_component( + hass, TIME_DOMAIN, {"time": {"platform": "demo"}} + ) await hass.async_block_till_done() @@ -38,7 +44,7 @@ def test_setup_params(hass: HomeAssistant) -> None: async def test_set_value(hass: HomeAssistant) -> None: """Test set value service.""" await hass.services.async_call( - DOMAIN, + TIME_DOMAIN, SERVICE_SET_VALUE, {ATTR_ENTITY_ID: ENTITY_TIME, ATTR_TIME: "01:02:03"}, blocking=True, diff --git a/tests/components/demo/test_update.py b/tests/components/demo/test_update.py index 0a8886a085d..37fa5a7a2f6 100644 --- a/tests/components/demo/test_update.py +++ b/tests/components/demo/test_update.py @@ -11,7 +11,7 @@ from homeassistant.components.update import ( ATTR_RELEASE_SUMMARY, ATTR_RELEASE_URL, ATTR_TITLE, - DOMAIN, + DOMAIN as UPDATE_DOMAIN, SERVICE_INSTALL, UpdateDeviceClass, ) @@ -41,7 +41,9 @@ async def update_only() -> None: @pytest.fixture(autouse=True) async def setup_demo_update(hass: HomeAssistant, update_only) -> None: """Initialize setup demo update entity.""" - assert await async_setup_component(hass, DOMAIN, {"update": {"platform": "demo"}}) + assert await async_setup_component( + hass, UPDATE_DOMAIN, {"update": {"platform": "demo"}} + ) await hass.async_block_till_done() @@ -140,7 +142,7 @@ async def test_update_with_progress(hass: HomeAssistant) -> None: with patch("homeassistant.components.demo.update.FAKE_INSTALL_SLEEP_TIME", new=0): await hass.services.async_call( - DOMAIN, + UPDATE_DOMAIN, SERVICE_INSTALL, {ATTR_ENTITY_ID: "update.demo_update_with_progress"}, blocking=True, @@ -184,7 +186,7 @@ async def test_update_with_progress_raising(hass: HomeAssistant) -> None: pytest.raises(RuntimeError), ): await hass.services.async_call( - DOMAIN, + UPDATE_DOMAIN, SERVICE_INSTALL, {ATTR_ENTITY_ID: "update.demo_update_with_progress"}, blocking=True, diff --git a/tests/components/demo/test_vacuum.py b/tests/components/demo/test_vacuum.py index a3b982ab70e..a4e4d6f0e1f 100644 --- a/tests/components/demo/test_vacuum.py +++ b/tests/components/demo/test_vacuum.py @@ -19,7 +19,7 @@ from homeassistant.components.vacuum import ( ATTR_FAN_SPEED, ATTR_FAN_SPEED_LIST, ATTR_PARAMS, - DOMAIN, + DOMAIN as VACUUM_DOMAIN, SERVICE_SEND_COMMAND, SERVICE_SET_FAN_SPEED, STATE_CLEANING, @@ -42,11 +42,11 @@ from homeassistant.util import dt as dt_util from tests.common import async_fire_time_changed, async_mock_service from tests.components.vacuum import common -ENTITY_VACUUM_BASIC = f"{DOMAIN}.{DEMO_VACUUM_BASIC}".lower() -ENTITY_VACUUM_COMPLETE = f"{DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower() -ENTITY_VACUUM_MINIMAL = f"{DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower() -ENTITY_VACUUM_MOST = f"{DOMAIN}.{DEMO_VACUUM_MOST}".lower() -ENTITY_VACUUM_NONE = f"{DOMAIN}.{DEMO_VACUUM_NONE}".lower() +ENTITY_VACUUM_BASIC = f"{VACUUM_DOMAIN}.{DEMO_VACUUM_BASIC}".lower() +ENTITY_VACUUM_COMPLETE = f"{VACUUM_DOMAIN}.{DEMO_VACUUM_COMPLETE}".lower() +ENTITY_VACUUM_MINIMAL = f"{VACUUM_DOMAIN}.{DEMO_VACUUM_MINIMAL}".lower() +ENTITY_VACUUM_MOST = f"{VACUUM_DOMAIN}.{DEMO_VACUUM_MOST}".lower() +ENTITY_VACUUM_NONE = f"{VACUUM_DOMAIN}.{DEMO_VACUUM_NONE}".lower() @pytest.fixture @@ -62,7 +62,9 @@ async def vacuum_only() -> None: @pytest.fixture(autouse=True) async def setup_demo_vacuum(hass: HomeAssistant, vacuum_only: None): """Initialize setup demo vacuum.""" - assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "demo"}}) + assert await async_setup_component( + hass, VACUUM_DOMAIN, {VACUUM_DOMAIN: {CONF_PLATFORM: "demo"}} + ) await hass.async_block_till_done() @@ -189,7 +191,7 @@ async def test_unsupported_methods(hass: HomeAssistant) -> None: async def test_services(hass: HomeAssistant) -> None: """Test vacuum services.""" # Test send_command - send_command_calls = async_mock_service(hass, DOMAIN, SERVICE_SEND_COMMAND) + send_command_calls = async_mock_service(hass, VACUUM_DOMAIN, SERVICE_SEND_COMMAND) params = {"rotate": 150, "speed": 20} await common.async_send_command( @@ -198,20 +200,20 @@ async def test_services(hass: HomeAssistant) -> None: assert len(send_command_calls) == 1 call = send_command_calls[-1] - assert call.domain == DOMAIN + assert call.domain == VACUUM_DOMAIN assert call.service == SERVICE_SEND_COMMAND assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_BASIC assert call.data[ATTR_COMMAND] == "test_command" assert call.data[ATTR_PARAMS] == params # Test set fan speed - set_fan_speed_calls = async_mock_service(hass, DOMAIN, SERVICE_SET_FAN_SPEED) + set_fan_speed_calls = async_mock_service(hass, VACUUM_DOMAIN, SERVICE_SET_FAN_SPEED) await common.async_set_fan_speed(hass, FAN_SPEEDS[0], ENTITY_VACUUM_COMPLETE) assert len(set_fan_speed_calls) == 1 call = set_fan_speed_calls[-1] - assert call.domain == DOMAIN + assert call.domain == VACUUM_DOMAIN assert call.service == SERVICE_SET_FAN_SPEED assert call.data[ATTR_ENTITY_ID] == ENTITY_VACUUM_COMPLETE assert call.data[ATTR_FAN_SPEED] == FAN_SPEEDS[0] diff --git a/tests/components/device_sun_light_trigger/test_init.py b/tests/components/device_sun_light_trigger/test_init.py index f3821eb5af9..1de0794b9ee 100644 --- a/tests/components/device_sun_light_trigger/test_init.py +++ b/tests/components/device_sun_light_trigger/test_init.py @@ -13,7 +13,7 @@ from homeassistant.components import ( group, light, ) -from homeassistant.components.device_tracker import DOMAIN +from homeassistant.components.device_tracker import DOMAIN as DEVICE_TRACKER_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, CONF_PLATFORM, @@ -150,21 +150,21 @@ async def test_lights_turn_on_when_coming_home_after_sun_set( hass, device_sun_light_trigger.DOMAIN, {device_sun_light_trigger.DOMAIN: {}} ) - hass.states.async_set(f"{DOMAIN}.device_2", STATE_UNKNOWN) + hass.states.async_set(f"{DEVICE_TRACKER_DOMAIN}.device_2", STATE_UNKNOWN) await hass.async_block_till_done() assert all( hass.states.get(ent_id).state == STATE_OFF for ent_id in hass.states.async_entity_ids("light") ) - hass.states.async_set(f"{DOMAIN}.device_2", STATE_NOT_HOME) + hass.states.async_set(f"{DEVICE_TRACKER_DOMAIN}.device_2", STATE_NOT_HOME) await hass.async_block_till_done() assert all( hass.states.get(ent_id).state == STATE_OFF for ent_id in hass.states.async_entity_ids("light") ) - hass.states.async_set(f"{DOMAIN}.device_2", STATE_HOME) + hass.states.async_set(f"{DEVICE_TRACKER_DOMAIN}.device_2", STATE_HOME) await hass.async_block_till_done() assert all( hass.states.get(ent_id).state == light.STATE_ON @@ -177,8 +177,8 @@ async def test_lights_turn_on_when_coming_home_after_sun_set_person( hass: HomeAssistant, freezer: FrozenDateTimeFactory ) -> None: """Test lights turn on when coming home after sun set.""" - device_1 = f"{DOMAIN}.device_1" - device_2 = f"{DOMAIN}.device_2" + device_1 = f"{DEVICE_TRACKER_DOMAIN}.device_1" + device_2 = f"{DEVICE_TRACKER_DOMAIN}.device_2" test_time = datetime(2017, 4, 5, 3, 2, 3, tzinfo=dt_util.UTC) freezer.move_to(test_time) diff --git a/tests/components/devolo_home_control/test_binary_sensor.py b/tests/components/devolo_home_control/test_binary_sensor.py index e809c94c129..fd28ce2fdf6 100644 --- a/tests/components/devolo_home_control/test_binary_sensor.py +++ b/tests/components/devolo_home_control/test_binary_sensor.py @@ -5,7 +5,7 @@ from unittest.mock import patch import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.binary_sensor import DOMAIN +from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -34,24 +34,28 @@ async def test_binary_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test_door") + state = hass.states.get(f"{BINARY_SENSOR_DOMAIN}.test_door") assert state == snapshot - assert entity_registry.async_get(f"{DOMAIN}.test_door") == snapshot + assert entity_registry.async_get(f"{BINARY_SENSOR_DOMAIN}.test_door") == snapshot - state = hass.states.get(f"{DOMAIN}.test_overload") + state = hass.states.get(f"{BINARY_SENSOR_DOMAIN}.test_overload") assert state == snapshot - assert entity_registry.async_get(f"{DOMAIN}.test_overload") == snapshot + assert ( + entity_registry.async_get(f"{BINARY_SENSOR_DOMAIN}.test_overload") == snapshot + ) # Emulate websocket message: sensor turned on test_gateway.publisher.dispatch("Test", ("Test", True)) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test_door").state == STATE_ON + assert hass.states.get(f"{BINARY_SENSOR_DOMAIN}.test_door").state == STATE_ON # Emulate websocket message: device went offline test_gateway.devices["Test"].status = 1 test_gateway.publisher.dispatch("Test", ("Status", False, "status")) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test_door").state == STATE_UNAVAILABLE + assert ( + hass.states.get(f"{BINARY_SENSOR_DOMAIN}.test_door").state == STATE_UNAVAILABLE + ) @pytest.mark.usefixtures("mock_zeroconf") @@ -69,25 +73,30 @@ async def test_remote_control( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test_button_1") + state = hass.states.get(f"{BINARY_SENSOR_DOMAIN}.test_button_1") assert state == snapshot - assert entity_registry.async_get(f"{DOMAIN}.test_button_1") == snapshot + assert ( + entity_registry.async_get(f"{BINARY_SENSOR_DOMAIN}.test_button_1") == snapshot + ) # Emulate websocket message: button pressed test_gateway.publisher.dispatch("Test", ("Test", 1)) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test_button_1").state == STATE_ON + assert hass.states.get(f"{BINARY_SENSOR_DOMAIN}.test_button_1").state == STATE_ON # Emulate websocket message: button released test_gateway.publisher.dispatch("Test", ("Test", 0)) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test_button_1").state == STATE_OFF + assert hass.states.get(f"{BINARY_SENSOR_DOMAIN}.test_button_1").state == STATE_OFF # Emulate websocket message: device went offline test_gateway.devices["Test"].status = 1 test_gateway.publisher.dispatch("Test", ("Status", False, "status")) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test_button_1").state == STATE_UNAVAILABLE + assert ( + hass.states.get(f"{BINARY_SENSOR_DOMAIN}.test_button_1").state + == STATE_UNAVAILABLE + ) @pytest.mark.usefixtures("mock_zeroconf") @@ -101,7 +110,7 @@ async def test_disabled(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test_door") is None + assert hass.states.get(f"{BINARY_SENSOR_DOMAIN}.test_door") is None @pytest.mark.usefixtures("mock_zeroconf") @@ -116,7 +125,7 @@ async def test_remove_from_hass(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test_door") + state = hass.states.get(f"{BINARY_SENSOR_DOMAIN}.test_door") assert state is not None await hass.config_entries.async_remove(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/devolo_home_control/test_climate.py b/tests/components/devolo_home_control/test_climate.py index 953ff835b89..3aedda90e02 100644 --- a/tests/components/devolo_home_control/test_climate.py +++ b/tests/components/devolo_home_control/test_climate.py @@ -6,7 +6,7 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.components.climate import ( ATTR_HVAC_MODE, - DOMAIN, + DOMAIN as CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, HVACMode, ) @@ -32,14 +32,14 @@ async def test_climate( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{CLIMATE_DOMAIN}.test") assert state == snapshot - assert entity_registry.async_get(f"{DOMAIN}.test") == snapshot + assert entity_registry.async_get(f"{CLIMATE_DOMAIN}.test") == snapshot # Emulate websocket message: temperature changed test_gateway.publisher.dispatch("Test", ("Test", 21.0)) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{CLIMATE_DOMAIN}.test") assert state.state == HVACMode.HEAT assert state.attributes[ATTR_TEMPERATURE] == 21.0 @@ -48,10 +48,10 @@ async def test_climate( "devolo_home_control_api.properties.multi_level_switch_property.MultiLevelSwitchProperty.set" ) as set_value: await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, { - ATTR_ENTITY_ID: f"{DOMAIN}.test", + ATTR_ENTITY_ID: f"{CLIMATE_DOMAIN}.test", ATTR_HVAC_MODE: HVACMode.HEAT, ATTR_TEMPERATURE: 20.0, }, @@ -63,7 +63,7 @@ async def test_climate( test_gateway.devices["Test"].status = 1 test_gateway.publisher.dispatch("Test", ("Status", False, "status")) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == STATE_UNAVAILABLE + assert hass.states.get(f"{CLIMATE_DOMAIN}.test").state == STATE_UNAVAILABLE async def test_remove_from_hass(hass: HomeAssistant) -> None: @@ -77,7 +77,7 @@ async def test_remove_from_hass(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{CLIMATE_DOMAIN}.test") assert state is not None await hass.config_entries.async_remove(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/devolo_home_control/test_cover.py b/tests/components/devolo_home_control/test_cover.py index c21dabadb1a..4560da9f7b7 100644 --- a/tests/components/devolo_home_control/test_cover.py +++ b/tests/components/devolo_home_control/test_cover.py @@ -4,7 +4,11 @@ from unittest.mock import patch from syrupy.assertion import SnapshotAssertion -from homeassistant.components.cover import ATTR_CURRENT_POSITION, ATTR_POSITION, DOMAIN +from homeassistant.components.cover import ( + ATTR_CURRENT_POSITION, + ATTR_POSITION, + DOMAIN as COVER_DOMAIN, +) from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_CLOSE_COVER, @@ -34,14 +38,14 @@ async def test_cover( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{COVER_DOMAIN}.test") assert state == snapshot - assert entity_registry.async_get(f"{DOMAIN}.test") == snapshot + assert entity_registry.async_get(f"{COVER_DOMAIN}.test") == snapshot # Emulate websocket message: position changed test_gateway.publisher.dispatch("Test", ("devolo.Blinds", 0.0)) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{COVER_DOMAIN}.test") assert state.state == STATE_CLOSED assert state.attributes[ATTR_CURRENT_POSITION] == 0.0 @@ -50,27 +54,27 @@ async def test_cover( "devolo_home_control_api.properties.multi_level_switch_property.MultiLevelSwitchProperty.set" ) as set_value: await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_OPEN_COVER, - {ATTR_ENTITY_ID: f"{DOMAIN}.test"}, + {ATTR_ENTITY_ID: f"{COVER_DOMAIN}.test"}, blocking=True, ) # In reality, this leads to a websocket message like already tested above set_value.assert_called_once_with(100) set_value.reset_mock() await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_CLOSE_COVER, - {ATTR_ENTITY_ID: f"{DOMAIN}.test"}, + {ATTR_ENTITY_ID: f"{COVER_DOMAIN}.test"}, blocking=True, ) # In reality, this leads to a websocket message like already tested above set_value.assert_called_once_with(0) set_value.reset_mock() await hass.services.async_call( - DOMAIN, + COVER_DOMAIN, SERVICE_SET_COVER_POSITION, - {ATTR_ENTITY_ID: f"{DOMAIN}.test", ATTR_POSITION: 50}, + {ATTR_ENTITY_ID: f"{COVER_DOMAIN}.test", ATTR_POSITION: 50}, blocking=True, ) # In reality, this leads to a websocket message like already tested above set_value.assert_called_once_with(50) @@ -79,7 +83,7 @@ async def test_cover( test_gateway.devices["Test"].status = 1 test_gateway.publisher.dispatch("Test", ("Status", False, "status")) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == STATE_UNAVAILABLE + assert hass.states.get(f"{COVER_DOMAIN}.test").state == STATE_UNAVAILABLE async def test_remove_from_hass(hass: HomeAssistant) -> None: @@ -93,7 +97,7 @@ async def test_remove_from_hass(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{COVER_DOMAIN}.test") assert state is not None await hass.config_entries.async_remove(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/devolo_home_control/test_light.py b/tests/components/devolo_home_control/test_light.py index f72136ee287..46c3fbc98f3 100644 --- a/tests/components/devolo_home_control/test_light.py +++ b/tests/components/devolo_home_control/test_light.py @@ -4,7 +4,7 @@ from unittest.mock import patch from syrupy.assertion import SnapshotAssertion -from homeassistant.components.light import ATTR_BRIGHTNESS, DOMAIN +from homeassistant.components.light import ATTR_BRIGHTNESS, DOMAIN as LIGHT_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, @@ -33,18 +33,18 @@ async def test_light_without_binary_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{LIGHT_DOMAIN}.test") assert state == snapshot - assert entity_registry.async_get(f"{DOMAIN}.test") == snapshot + assert entity_registry.async_get(f"{LIGHT_DOMAIN}.test") == snapshot # Emulate websocket message: brightness changed test_gateway.publisher.dispatch("Test", ("devolo.Dimmer:Test", 0.0)) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{LIGHT_DOMAIN}.test") assert state.state == STATE_OFF test_gateway.publisher.dispatch("Test", ("devolo.Dimmer:Test", 100.0)) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{LIGHT_DOMAIN}.test") assert state.state == STATE_ON assert state.attributes[ATTR_BRIGHTNESS] == 255 @@ -53,27 +53,27 @@ async def test_light_without_binary_sensor( "devolo_home_control_api.properties.multi_level_switch_property.MultiLevelSwitchProperty.set" ) as set_value: await hass.services.async_call( - DOMAIN, + LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: f"{DOMAIN}.test"}, + {ATTR_ENTITY_ID: f"{LIGHT_DOMAIN}.test"}, blocking=True, ) # In reality, this leads to a websocket message like already tested above set_value.assert_called_once_with(100) set_value.reset_mock() await hass.services.async_call( - DOMAIN, + LIGHT_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: f"{DOMAIN}.test"}, + {ATTR_ENTITY_ID: f"{LIGHT_DOMAIN}.test"}, blocking=True, ) # In reality, this leads to a websocket message like already tested above set_value.assert_called_once_with(0) set_value.reset_mock() await hass.services.async_call( - DOMAIN, + LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: f"{DOMAIN}.test", ATTR_BRIGHTNESS: 50}, + {ATTR_ENTITY_ID: f"{LIGHT_DOMAIN}.test", ATTR_BRIGHTNESS: 50}, blocking=True, ) # In reality, this leads to a websocket message like already tested above set_value.assert_called_once_with(round(50 / 255 * 100)) @@ -82,7 +82,7 @@ async def test_light_without_binary_sensor( test_gateway.devices["Test"].status = 1 test_gateway.publisher.dispatch("Test", ("Status", False, "status")) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == STATE_UNAVAILABLE + assert hass.states.get(f"{LIGHT_DOMAIN}.test").state == STATE_UNAVAILABLE async def test_light_with_binary_sensor( @@ -101,18 +101,18 @@ async def test_light_with_binary_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{LIGHT_DOMAIN}.test") assert state == snapshot - assert entity_registry.async_get(f"{DOMAIN}.test") == snapshot + assert entity_registry.async_get(f"{LIGHT_DOMAIN}.test") == snapshot # Emulate websocket message: brightness changed test_gateway.publisher.dispatch("Test", ("devolo.Dimmer:Test", 0.0)) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{LIGHT_DOMAIN}.test") assert state.state == STATE_OFF test_gateway.publisher.dispatch("Test", ("devolo.Dimmer:Test", 100.0)) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{LIGHT_DOMAIN}.test") assert state.state == STATE_ON assert state.attributes[ATTR_BRIGHTNESS] == 255 @@ -121,18 +121,18 @@ async def test_light_with_binary_sensor( "devolo_home_control_api.properties.binary_switch_property.BinarySwitchProperty.set" ) as set_value: await hass.services.async_call( - DOMAIN, + LIGHT_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: f"{DOMAIN}.test"}, + {ATTR_ENTITY_ID: f"{LIGHT_DOMAIN}.test"}, blocking=True, ) # In reality, this leads to a websocket message like already tested above set_value.assert_called_once_with(True) set_value.reset_mock() await hass.services.async_call( - DOMAIN, + LIGHT_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: f"{DOMAIN}.test"}, + {ATTR_ENTITY_ID: f"{LIGHT_DOMAIN}.test"}, blocking=True, ) # In reality, this leads to a websocket message like already tested above set_value.assert_called_once_with(False) @@ -149,7 +149,7 @@ async def test_remove_from_hass(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{LIGHT_DOMAIN}.test") assert state is not None await hass.config_entries.async_remove(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/devolo_home_control/test_sensor.py b/tests/components/devolo_home_control/test_sensor.py index 62023982e81..08b53dae865 100644 --- a/tests/components/devolo_home_control/test_sensor.py +++ b/tests/components/devolo_home_control/test_sensor.py @@ -4,7 +4,7 @@ from unittest.mock import patch from syrupy.assertion import SnapshotAssertion -from homeassistant.components.sensor import DOMAIN +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -26,9 +26,9 @@ async def test_temperature_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test_temperature") + state = hass.states.get(f"{SENSOR_DOMAIN}.test_temperature") assert state == snapshot - assert entity_registry.async_get(f"{DOMAIN}.test_temperature") == snapshot + assert entity_registry.async_get(f"{SENSOR_DOMAIN}.test_temperature") == snapshot async def test_battery_sensor( @@ -45,14 +45,14 @@ async def test_battery_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test_battery_level") + state = hass.states.get(f"{SENSOR_DOMAIN}.test_battery_level") assert state == snapshot - assert entity_registry.async_get(f"{DOMAIN}.test_battery_level") == snapshot + assert entity_registry.async_get(f"{SENSOR_DOMAIN}.test_battery_level") == snapshot # Emulate websocket message: value changed test_gateway.publisher.dispatch("Test", ("Test", 10, "battery_level")) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test_battery_level").state == "10" + assert hass.states.get(f"{SENSOR_DOMAIN}.test_battery_level").state == "10" async def test_consumption_sensor( @@ -68,29 +68,36 @@ async def test_consumption_sensor( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test_current_consumption") + state = hass.states.get(f"{SENSOR_DOMAIN}.test_current_consumption") assert state == snapshot - assert entity_registry.async_get(f"{DOMAIN}.test_current_consumption") == snapshot + assert ( + entity_registry.async_get(f"{SENSOR_DOMAIN}.test_current_consumption") + == snapshot + ) - state = hass.states.get(f"{DOMAIN}.test_total_consumption") + state = hass.states.get(f"{SENSOR_DOMAIN}.test_total_consumption") assert state == snapshot - assert entity_registry.async_get(f"{DOMAIN}.test_total_consumption") == snapshot + assert ( + entity_registry.async_get(f"{SENSOR_DOMAIN}.test_total_consumption") == snapshot + ) # Emulate websocket message: value changed test_gateway.devices["Test"].consumption_property["devolo.Meter:Test"].total = 50.0 test_gateway.publisher.dispatch("Test", ("devolo.Meter:Test", 50.0)) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test_total_consumption").state == "50.0" + assert hass.states.get(f"{SENSOR_DOMAIN}.test_total_consumption").state == "50.0" # Emulate websocket message: device went offline test_gateway.devices["Test"].status = 1 test_gateway.publisher.dispatch("Test", ("Status", False, "status")) await hass.async_block_till_done() assert ( - hass.states.get(f"{DOMAIN}.test_current_consumption").state == STATE_UNAVAILABLE + hass.states.get(f"{SENSOR_DOMAIN}.test_current_consumption").state + == STATE_UNAVAILABLE ) assert ( - hass.states.get(f"{DOMAIN}.test_total_consumption").state == STATE_UNAVAILABLE + hass.states.get(f"{SENSOR_DOMAIN}.test_total_consumption").state + == STATE_UNAVAILABLE ) @@ -105,7 +112,7 @@ async def test_voltage_sensor(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test_voltage") + state = hass.states.get(f"{SENSOR_DOMAIN}.test_voltage") assert state is None @@ -123,14 +130,16 @@ async def test_sensor_change(hass: HomeAssistant) -> None: # Emulate websocket message: value changed test_gateway.publisher.dispatch("Test", ("devolo.MultiLevelSensor:Test", 50.0)) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test_temperature") + state = hass.states.get(f"{SENSOR_DOMAIN}.test_temperature") assert state.state == "50.0" # Emulate websocket message: device went offline test_gateway.devices["Test"].status = 1 test_gateway.publisher.dispatch("Test", ("Status", False, "status")) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test_temperature").state == STATE_UNAVAILABLE + assert ( + hass.states.get(f"{SENSOR_DOMAIN}.test_temperature").state == STATE_UNAVAILABLE + ) async def test_remove_from_hass(hass: HomeAssistant) -> None: @@ -144,7 +153,7 @@ async def test_remove_from_hass(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test_temperature") + state = hass.states.get(f"{SENSOR_DOMAIN}.test_temperature") assert state is not None await hass.config_entries.async_remove(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/devolo_home_control/test_siren.py b/tests/components/devolo_home_control/test_siren.py index be662418967..71f4dfdd34d 100644 --- a/tests/components/devolo_home_control/test_siren.py +++ b/tests/components/devolo_home_control/test_siren.py @@ -5,7 +5,7 @@ from unittest.mock import patch import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.siren import DOMAIN +from homeassistant.components.siren import DOMAIN as SIREN_DOMAIN from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -29,20 +29,20 @@ async def test_siren( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{SIREN_DOMAIN}.test") assert state == snapshot - assert entity_registry.async_get(f"{DOMAIN}.test") == snapshot + assert entity_registry.async_get(f"{SIREN_DOMAIN}.test") == snapshot # Emulate websocket message: sensor turned on test_gateway.publisher.dispatch("Test", ("devolo.SirenMultiLevelSwitch:Test", 1)) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == STATE_ON + assert hass.states.get(f"{SIREN_DOMAIN}.test").state == STATE_ON # Emulate websocket message: device went offline test_gateway.devices["Test"].status = 1 test_gateway.publisher.dispatch("Test", ("Status", False, "status")) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == STATE_UNAVAILABLE + assert hass.states.get(f"{SIREN_DOMAIN}.test").state == STATE_UNAVAILABLE @pytest.mark.usefixtures("mock_zeroconf") @@ -60,9 +60,9 @@ async def test_siren_switching( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{SIREN_DOMAIN}.test") assert state == snapshot - assert entity_registry.async_get(f"{DOMAIN}.test") == snapshot + assert entity_registry.async_get(f"{SIREN_DOMAIN}.test") == snapshot with patch( "devolo_home_control_api.properties.multi_level_switch_property.MultiLevelSwitchProperty.set" @@ -70,7 +70,7 @@ async def test_siren_switching( await hass.services.async_call( "siren", "turn_on", - {"entity_id": f"{DOMAIN}.test"}, + {"entity_id": f"{SIREN_DOMAIN}.test"}, blocking=True, ) # The real device state is changed by a websocket message @@ -86,7 +86,7 @@ async def test_siren_switching( await hass.services.async_call( "siren", "turn_off", - {"entity_id": f"{DOMAIN}.test"}, + {"entity_id": f"{SIREN_DOMAIN}.test"}, blocking=True, ) # The real device state is changed by a websocket message @@ -94,7 +94,7 @@ async def test_siren_switching( "Test", ("devolo.SirenMultiLevelSwitch:Test", 0) ) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == STATE_OFF + assert hass.states.get(f"{SIREN_DOMAIN}.test").state == STATE_OFF property_set.assert_called_once_with(0) @@ -113,9 +113,9 @@ async def test_siren_change_default_tone( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{SIREN_DOMAIN}.test") assert state == snapshot - assert entity_registry.async_get(f"{DOMAIN}.test") == snapshot + assert entity_registry.async_get(f"{SIREN_DOMAIN}.test") == snapshot with patch( "devolo_home_control_api.properties.multi_level_switch_property.MultiLevelSwitchProperty.set" @@ -124,7 +124,7 @@ async def test_siren_change_default_tone( await hass.services.async_call( "siren", "turn_on", - {"entity_id": f"{DOMAIN}.test"}, + {"entity_id": f"{SIREN_DOMAIN}.test"}, blocking=True, ) property_set.assert_called_once_with(2) @@ -142,7 +142,7 @@ async def test_remove_from_hass(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{SIREN_DOMAIN}.test") assert state is not None await hass.config_entries.async_remove(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/devolo_home_control/test_switch.py b/tests/components/devolo_home_control/test_switch.py index 86f93bfddf6..46adaf8c8b0 100644 --- a/tests/components/devolo_home_control/test_switch.py +++ b/tests/components/devolo_home_control/test_switch.py @@ -4,7 +4,7 @@ from unittest.mock import patch from syrupy.assertion import SnapshotAssertion -from homeassistant.components.switch import DOMAIN +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_TURN_OFF, @@ -32,9 +32,9 @@ async def test_switch( await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{SWITCH_DOMAIN}.test") assert state == snapshot - assert entity_registry.async_get(f"{DOMAIN}.test") == snapshot + assert entity_registry.async_get(f"{SWITCH_DOMAIN}.test") == snapshot # Emulate websocket message: switched on test_gateway.devices["Test"].binary_switch_property[ @@ -42,24 +42,24 @@ async def test_switch( ].state = True test_gateway.publisher.dispatch("Test", ("devolo.BinarySwitch:Test", True)) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == STATE_ON + assert hass.states.get(f"{SWITCH_DOMAIN}.test").state == STATE_ON with patch( "devolo_home_control_api.properties.binary_switch_property.BinarySwitchProperty.set" ) as set_value: await hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, SERVICE_TURN_ON, - {ATTR_ENTITY_ID: f"{DOMAIN}.test"}, + {ATTR_ENTITY_ID: f"{SWITCH_DOMAIN}.test"}, blocking=True, ) # In reality, this leads to a websocket message like already tested above set_value.assert_called_once_with(state=True) set_value.reset_mock() await hass.services.async_call( - DOMAIN, + SWITCH_DOMAIN, SERVICE_TURN_OFF, - {ATTR_ENTITY_ID: f"{DOMAIN}.test"}, + {ATTR_ENTITY_ID: f"{SWITCH_DOMAIN}.test"}, blocking=True, ) # In reality, this leads to a websocket message like already tested above set_value.assert_called_once_with(state=False) @@ -68,7 +68,7 @@ async def test_switch( test_gateway.devices["Test"].status = 1 test_gateway.publisher.dispatch("Test", ("Status", False, "status")) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.test").state == STATE_UNAVAILABLE + assert hass.states.get(f"{SWITCH_DOMAIN}.test").state == STATE_UNAVAILABLE async def test_remove_from_hass(hass: HomeAssistant) -> None: @@ -82,7 +82,7 @@ async def test_remove_from_hass(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - state = hass.states.get(f"{DOMAIN}.test") + state = hass.states.get(f"{SWITCH_DOMAIN}.test") assert state is not None await hass.config_entries.async_remove(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/devolo_home_network/test_binary_sensor.py b/tests/components/devolo_home_network/test_binary_sensor.py index 3e4bf8471c1..8197ec1a1e5 100644 --- a/tests/components/devolo_home_network/test_binary_sensor.py +++ b/tests/components/devolo_home_network/test_binary_sensor.py @@ -7,7 +7,7 @@ from freezegun.api import FrozenDateTimeFactory import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.binary_sensor import DOMAIN +from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN from homeassistant.components.devolo_home_network.const import ( CONNECTED_TO_ROUTER, LONG_UPDATE_INTERVAL, @@ -31,7 +31,10 @@ async def test_binary_sensor_setup(hass: HomeAssistant) -> None: await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - assert hass.states.get(f"{DOMAIN}.{device_name}_{CONNECTED_TO_ROUTER}") is None + assert ( + hass.states.get(f"{BINARY_SENSOR_DOMAIN}.{device_name}_{CONNECTED_TO_ROUTER}") + is None + ) await hass.config_entries.async_unload(entry.entry_id) @@ -47,7 +50,7 @@ async def test_update_attached_to_router( """Test state change of a attached_to_router binary sensor device.""" entry = configure_integration(hass) device_name = entry.title.replace(" ", "_").lower() - state_key = f"{DOMAIN}.{device_name}_{CONNECTED_TO_ROUTER}" + state_key = f"{BINARY_SENSOR_DOMAIN}.{device_name}_{CONNECTED_TO_ROUTER}" await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/devolo_home_network/test_image.py b/tests/components/devolo_home_network/test_image.py index 80efc4fcc09..f13db4fce9d 100644 --- a/tests/components/devolo_home_network/test_image.py +++ b/tests/components/devolo_home_network/test_image.py @@ -9,7 +9,7 @@ import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.devolo_home_network.const import SHORT_UPDATE_INTERVAL -from homeassistant.components.image import DOMAIN +from homeassistant.components.image import DOMAIN as IMAGE_DOMAIN from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -32,7 +32,9 @@ async def test_image_setup(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert ( - hass.states.get(f"{DOMAIN}.{device_name}_guest_wi_fi_credentials_as_qr_code") + hass.states.get( + f"{IMAGE_DOMAIN}.{device_name}_guest_wi_fi_credentials_as_qr_code" + ) is not None ) @@ -51,7 +53,7 @@ async def test_guest_wifi_qr( """Test showing a QR code of the guest wifi credentials.""" entry = configure_integration(hass) device_name = entry.title.replace(" ", "_").lower() - state_key = f"{DOMAIN}.{device_name}_guest_wi_fi_credentials_as_qr_code" + state_key = f"{IMAGE_DOMAIN}.{device_name}_guest_wi_fi_credentials_as_qr_code" await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/doorbird/test_button.py b/tests/components/doorbird/test_button.py index cb4bab656ee..abb490e9180 100644 --- a/tests/components/doorbird/test_button.py +++ b/tests/components/doorbird/test_button.py @@ -1,6 +1,6 @@ """Test DoorBird buttons.""" -from homeassistant.components.button import DOMAIN, SERVICE_PRESS +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import HomeAssistant @@ -16,7 +16,7 @@ async def test_relay_button( relay_1_entity_id = "button.mydoorbird_relay_1" assert hass.states.get(relay_1_entity_id).state == STATE_UNKNOWN await hass.services.async_call( - DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: relay_1_entity_id}, blocking=True + BUTTON_DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: relay_1_entity_id}, blocking=True ) assert hass.states.get(relay_1_entity_id).state != STATE_UNKNOWN assert doorbird_entry.api.energize_relay.call_count == 1 @@ -31,7 +31,7 @@ async def test_ir_button( ir_entity_id = "button.mydoorbird_ir" assert hass.states.get(ir_entity_id).state == STATE_UNKNOWN await hass.services.async_call( - DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: ir_entity_id}, blocking=True + BUTTON_DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: ir_entity_id}, blocking=True ) assert hass.states.get(ir_entity_id).state != STATE_UNKNOWN assert doorbird_entry.api.turn_light_on.call_count == 1 @@ -46,7 +46,7 @@ async def test_reset_favorites_button( reset_entity_id = "button.mydoorbird_reset_favorites" assert hass.states.get(reset_entity_id).state == STATE_UNKNOWN await hass.services.async_call( - DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: reset_entity_id}, blocking=True + BUTTON_DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: reset_entity_id}, blocking=True ) assert hass.states.get(reset_entity_id).state != STATE_UNKNOWN assert doorbird_entry.api.delete_favorite.call_count == 3 From 6ea59ffa946b5d96352ffa5de4e8393ddebd9311 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Mon, 9 Sep 2024 15:21:01 +0200 Subject: [PATCH 1634/1635] Add alias to DOMAIN import in tests [h-m] (#125577) * Add alias to DOMAIN import in tests [h-m] * Revert changes to mqtt --- tests/components/home_connect/test_light.py | 6 +- tests/components/home_connect/test_switch.py | 10 +-- tests/components/homekit/test_type_covers.py | 24 ++--- tests/components/homekit/test_type_fans.py | 32 +++---- .../homekit/test_type_humidifiers.py | 18 ++-- tests/components/homekit/test_type_lights.py | 32 +++---- tests/components/homekit/test_type_locks.py | 8 +- .../homekit/test_type_media_players.py | 38 ++++---- tests/components/homekit/test_type_remote.py | 6 +- .../homekit/test_type_security_systems.py | 20 +++-- .../homekit_controller/test_climate.py | 90 +++++++++---------- .../homekit_controller/test_humidifier.py | 26 +++--- .../components/homematicip_cloud/test_lock.py | 4 +- tests/components/hydrawise/test_valve.py | 6 +- tests/components/knx/test_date.py | 8 +- tests/components/knx/test_datetime.py | 8 +- tests/components/knx/test_time.py | 8 +- 17 files changed, 189 insertions(+), 155 deletions(-) diff --git a/tests/components/home_connect/test_light.py b/tests/components/home_connect/test_light.py index 8d918dc5815..f37eb71b8aa 100644 --- a/tests/components/home_connect/test_light.py +++ b/tests/components/home_connect/test_light.py @@ -13,7 +13,7 @@ from homeassistant.components.home_connect.const import ( COOKING_LIGHTING, COOKING_LIGHTING_BRIGHTNESS, ) -from homeassistant.components.light import DOMAIN +from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ( SERVICE_TURN_OFF, @@ -176,7 +176,7 @@ async def test_light_functionality( appliance.status.update(status) service_data["entity_id"] = entity_id await hass.services.async_call( - DOMAIN, + LIGHT_DOMAIN, service, service_data, blocking=True, @@ -294,5 +294,5 @@ async def test_switch_exception_handling( problematic_appliance.status.update(status) service_data["entity_id"] = entity_id - await hass.services.async_call(DOMAIN, service, service_data, blocking=True) + await hass.services.async_call(LIGHT_DOMAIN, service, service_data, blocking=True) assert getattr(problematic_appliance, mock_attr).call_count == len(attr_side_effect) diff --git a/tests/components/home_connect/test_switch.py b/tests/components/home_connect/test_switch.py index 3ab550ad0af..d16a4626e59 100644 --- a/tests/components/home_connect/test_switch.py +++ b/tests/components/home_connect/test_switch.py @@ -15,7 +15,7 @@ from homeassistant.components.home_connect.const import ( BSH_POWER_STATE, REFRIGERATION_SUPERMODEFREEZER, ) -from homeassistant.components.switch import DOMAIN +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import ( ATTR_ENTITY_ID, @@ -139,7 +139,7 @@ async def test_switch_functionality( appliance.status.update(status) await hass.services.async_call( - DOMAIN, service, {"entity_id": entity_id}, blocking=True + SWITCH_DOMAIN, service, {"entity_id": entity_id}, blocking=True ) assert hass.states.is_state(entity_id, state) @@ -213,7 +213,7 @@ async def test_switch_exception_handling( problematic_appliance.status.update(status) await hass.services.async_call( - DOMAIN, service, {"entity_id": entity_id}, blocking=True + SWITCH_DOMAIN, service, {"entity_id": entity_id}, blocking=True ) assert getattr(problematic_appliance, mock_attr).call_count == 2 @@ -268,7 +268,7 @@ async def test_ent_desc_switch_functionality( appliance.status.update(status) await hass.services.async_call( - DOMAIN, service, {ATTR_ENTITY_ID: entity_id}, blocking=True + SWITCH_DOMAIN, service, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert hass.states.is_state(entity_id, state) @@ -327,6 +327,6 @@ async def test_ent_desc_switch_exception_handling( problematic_appliance.status.update(status) await hass.services.async_call( - DOMAIN, service, {ATTR_ENTITY_ID: entity_id}, blocking=True + SWITCH_DOMAIN, service, {ATTR_ENTITY_ID: entity_id}, blocking=True ) assert getattr(problematic_appliance, mock_attr).call_count == 2 diff --git a/tests/components/homekit/test_type_covers.py b/tests/components/homekit/test_type_covers.py index b3125c6581c..8d3b13b1856 100644 --- a/tests/components/homekit/test_type_covers.py +++ b/tests/components/homekit/test_type_covers.py @@ -5,7 +5,7 @@ from homeassistant.components.cover import ( ATTR_CURRENT_TILT_POSITION, ATTR_POSITION, ATTR_TILT_POSITION, - DOMAIN, + DOMAIN as COVER_DOMAIN, CoverEntityFeature, ) from homeassistant.components.homekit.const import ( @@ -92,8 +92,8 @@ async def test_garage_door_open_close( assert acc.available is True # Set from HomeKit - call_close_cover = async_mock_service(hass, DOMAIN, "close_cover") - call_open_cover = async_mock_service(hass, DOMAIN, "open_cover") + call_close_cover = async_mock_service(hass, COVER_DOMAIN, "close_cover") + call_open_cover = async_mock_service(hass, COVER_DOMAIN, "open_cover") acc.char_target_state.client_update_value(1) await hass.async_block_till_done() @@ -272,7 +272,9 @@ async def test_windowcovering_set_cover_position( assert acc.char_position_state.value == 2 # Set from HomeKit - call_set_cover_position = async_mock_service(hass, DOMAIN, "set_cover_position") + call_set_cover_position = async_mock_service( + hass, COVER_DOMAIN, "set_cover_position" + ) acc.char_target_position.client_update_value(25) await hass.async_block_till_done() @@ -389,7 +391,7 @@ async def test_windowcovering_cover_set_tilt( # set from HomeKit call_set_tilt_position = async_mock_service( - hass, DOMAIN, SERVICE_SET_COVER_TILT_POSITION + hass, COVER_DOMAIN, SERVICE_SET_COVER_TILT_POSITION ) # HomeKit sets tilts between -90 and 90 (degrees), whereas @@ -488,8 +490,8 @@ async def test_windowcovering_open_close( assert acc.char_position_state.value == 2 # Set from HomeKit - call_close_cover = async_mock_service(hass, DOMAIN, "close_cover") - call_open_cover = async_mock_service(hass, DOMAIN, "open_cover") + call_close_cover = async_mock_service(hass, COVER_DOMAIN, "close_cover") + call_open_cover = async_mock_service(hass, COVER_DOMAIN, "open_cover") acc.char_target_position.client_update_value(25) await hass.async_block_till_done() @@ -536,9 +538,9 @@ async def test_windowcovering_open_close_stop( await hass.async_block_till_done() # Set from HomeKit - call_close_cover = async_mock_service(hass, DOMAIN, "close_cover") - call_open_cover = async_mock_service(hass, DOMAIN, "open_cover") - call_stop_cover = async_mock_service(hass, DOMAIN, "stop_cover") + call_close_cover = async_mock_service(hass, COVER_DOMAIN, "close_cover") + call_open_cover = async_mock_service(hass, COVER_DOMAIN, "open_cover") + call_stop_cover = async_mock_service(hass, COVER_DOMAIN, "stop_cover") acc.char_target_position.client_update_value(25) await hass.async_block_till_done() @@ -590,7 +592,7 @@ async def test_windowcovering_open_close_with_position_and_stop( await hass.async_block_till_done() # Set from HomeKit - call_stop_cover = async_mock_service(hass, DOMAIN, "stop_cover") + call_stop_cover = async_mock_service(hass, COVER_DOMAIN, "stop_cover") acc.char_hold_position.client_update_value(0) await hass.async_block_till_done() diff --git a/tests/components/homekit/test_type_fans.py b/tests/components/homekit/test_type_fans.py index 1808767c614..67392f11f14 100644 --- a/tests/components/homekit/test_type_fans.py +++ b/tests/components/homekit/test_type_fans.py @@ -11,7 +11,7 @@ from homeassistant.components.fan import ( ATTR_PRESET_MODES, DIRECTION_FORWARD, DIRECTION_REVERSE, - DOMAIN, + DOMAIN as FAN_DOMAIN, FanEntityFeature, ) from homeassistant.components.homekit.const import ATTR_VALUE, PROP_MIN_STEP @@ -63,8 +63,8 @@ async def test_fan_basic(hass: HomeAssistant, hk_driver, events: list[Event]) -> assert acc.char_active.value == 0 # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") - call_turn_off = async_mock_service(hass, DOMAIN, "turn_off") + call_turn_on = async_mock_service(hass, FAN_DOMAIN, "turn_on") + call_turn_off = async_mock_service(hass, FAN_DOMAIN, "turn_off") char_active_iid = acc.char_active.to_HAP()[HAP_REPR_IID] @@ -144,7 +144,7 @@ async def test_fan_direction( assert acc.char_direction.value == 1 # Set from HomeKit - call_set_direction = async_mock_service(hass, DOMAIN, "set_direction") + call_set_direction = async_mock_service(hass, FAN_DOMAIN, "set_direction") char_direction_iid = acc.char_direction.to_HAP()[HAP_REPR_IID] @@ -218,7 +218,7 @@ async def test_fan_oscillate( assert acc.char_swing.value == 1 # Set from HomeKit - call_oscillate = async_mock_service(hass, DOMAIN, "oscillate") + call_oscillate = async_mock_service(hass, FAN_DOMAIN, "oscillate") char_swing_iid = acc.char_swing.to_HAP()[HAP_REPR_IID] @@ -301,7 +301,7 @@ async def test_fan_speed(hass: HomeAssistant, hk_driver, events: list[Event]) -> assert acc.char_speed.value == 100 # Set from HomeKit - call_set_percentage = async_mock_service(hass, DOMAIN, "set_percentage") + call_set_percentage = async_mock_service(hass, FAN_DOMAIN, "set_percentage") char_speed_iid = acc.char_speed.to_HAP()[HAP_REPR_IID] char_active_iid = acc.char_active.to_HAP()[HAP_REPR_IID] @@ -343,7 +343,7 @@ async def test_fan_speed(hass: HomeAssistant, hk_driver, events: list[Event]) -> assert acc.char_speed.value == 50 assert acc.char_active.value == 0 - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, FAN_DOMAIN, "turn_on") hk_driver.set_characteristics( { @@ -409,11 +409,11 @@ async def test_fan_set_all_one_shot( assert hass.states.get(entity_id).state == STATE_OFF # Set from HomeKit - call_set_percentage = async_mock_service(hass, DOMAIN, "set_percentage") - call_oscillate = async_mock_service(hass, DOMAIN, "oscillate") - call_set_direction = async_mock_service(hass, DOMAIN, "set_direction") - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") - call_turn_off = async_mock_service(hass, DOMAIN, "turn_off") + call_set_percentage = async_mock_service(hass, FAN_DOMAIN, "set_percentage") + call_oscillate = async_mock_service(hass, FAN_DOMAIN, "oscillate") + call_set_direction = async_mock_service(hass, FAN_DOMAIN, "set_direction") + call_turn_on = async_mock_service(hass, FAN_DOMAIN, "turn_on") + call_turn_off = async_mock_service(hass, FAN_DOMAIN, "turn_off") char_active_iid = acc.char_active.to_HAP()[HAP_REPR_IID] char_direction_iid = acc.char_direction.to_HAP()[HAP_REPR_IID] @@ -641,8 +641,8 @@ async def test_fan_multiple_preset_modes( assert acc.preset_mode_chars["auto"].value == 0 assert acc.preset_mode_chars["smart"].value == 1 # Set from HomeKit - call_set_preset_mode = async_mock_service(hass, DOMAIN, "set_preset_mode") - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_set_preset_mode = async_mock_service(hass, FAN_DOMAIN, "set_preset_mode") + call_turn_on = async_mock_service(hass, FAN_DOMAIN, "turn_on") char_auto_iid = acc.preset_mode_chars["auto"].to_HAP()[HAP_REPR_IID] @@ -711,8 +711,8 @@ async def test_fan_single_preset_mode( await hass.async_block_till_done() # Set from HomeKit - call_set_preset_mode = async_mock_service(hass, DOMAIN, "set_preset_mode") - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_set_preset_mode = async_mock_service(hass, FAN_DOMAIN, "set_preset_mode") + call_turn_on = async_mock_service(hass, FAN_DOMAIN, "turn_on") char_target_fan_state_iid = acc.char_target_fan_state.to_HAP()[HAP_REPR_IID] diff --git a/tests/components/homekit/test_type_humidifiers.py b/tests/components/homekit/test_type_humidifiers.py index fbb72333c9b..de563503b23 100644 --- a/tests/components/homekit/test_type_humidifiers.py +++ b/tests/components/homekit/test_type_humidifiers.py @@ -26,7 +26,7 @@ from homeassistant.components.humidifier import ( ATTR_MIN_HUMIDITY, DEFAULT_MAX_HUMIDITY, DEFAULT_MIN_HUMIDITY, - DOMAIN, + DOMAIN as HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY, HumidifierDeviceClass, ) @@ -106,7 +106,9 @@ async def test_humidifier(hass: HomeAssistant, hk_driver, events: list[Event]) - assert acc.char_active.value == 0 # Set from HomeKit - call_set_humidity = async_mock_service(hass, DOMAIN, SERVICE_SET_HUMIDITY) + call_set_humidity = async_mock_service( + hass, HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY + ) char_target_humidity_iid = acc.char_target_humidity.to_HAP()[HAP_REPR_IID] @@ -194,7 +196,9 @@ async def test_dehumidifier( assert acc.char_active.value == 0 # Set from HomeKit - call_set_humidity = async_mock_service(hass, DOMAIN, SERVICE_SET_HUMIDITY) + call_set_humidity = async_mock_service( + hass, HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY + ) char_target_humidity_iid = acc.char_target_humidity.to_HAP()[HAP_REPR_IID] @@ -257,7 +261,7 @@ async def test_hygrostat_power_state( assert acc.char_active.value == 0 # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, SERVICE_TURN_ON) + call_turn_on = async_mock_service(hass, HUMIDIFIER_DOMAIN, SERVICE_TURN_ON) char_active_iid = acc.char_active.to_HAP()[HAP_REPR_IID] @@ -281,7 +285,7 @@ async def test_hygrostat_power_state( assert len(events) == 1 assert events[-1].data[ATTR_VALUE] == "Active to 1" - call_turn_off = async_mock_service(hass, DOMAIN, SERVICE_TURN_OFF) + call_turn_off = async_mock_service(hass, HUMIDIFIER_DOMAIN, SERVICE_TURN_OFF) hk_driver.set_characteristics( { @@ -323,7 +327,9 @@ async def test_hygrostat_get_humidity_range( await hass.async_block_till_done() # Set from HomeKit - call_set_humidity = async_mock_service(hass, DOMAIN, SERVICE_SET_HUMIDITY) + call_set_humidity = async_mock_service( + hass, HUMIDIFIER_DOMAIN, SERVICE_SET_HUMIDITY + ) char_target_humidity_iid = acc.char_target_humidity.to_HAP()[HAP_REPR_IID] diff --git a/tests/components/homekit/test_type_lights.py b/tests/components/homekit/test_type_lights.py index 0f85e07c0bb..d365165aca4 100644 --- a/tests/components/homekit/test_type_lights.py +++ b/tests/components/homekit/test_type_lights.py @@ -27,7 +27,7 @@ from homeassistant.components.light import ( ATTR_RGBWW_COLOR, ATTR_SUPPORTED_COLOR_MODES, ATTR_WHITE, - DOMAIN, + DOMAIN as LIGHT_DOMAIN, ColorMode, ) from homeassistant.const import ( @@ -83,8 +83,8 @@ async def test_light_basic(hass: HomeAssistant, hk_driver, events: list[Event]) assert acc.char_on.value == 0 # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") - call_turn_off = async_mock_service(hass, DOMAIN, "turn_off") + call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + call_turn_off = async_mock_service(hass, LIGHT_DOMAIN, "turn_off") char_on_iid = acc.char_on.to_HAP()[HAP_REPR_IID] @@ -160,8 +160,8 @@ async def test_light_brightness( assert acc.char_brightness.value == 40 # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") - call_turn_off = async_mock_service(hass, DOMAIN, "turn_off") + call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") + call_turn_off = async_mock_service(hass, LIGHT_DOMAIN, "turn_off") hk_driver.set_characteristics( { @@ -296,7 +296,7 @@ async def test_light_color_temperature( assert acc.char_color_temp.value == 190 # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") char_color_temp_iid = acc.char_color_temp.to_HAP()[HAP_REPR_IID] @@ -372,7 +372,7 @@ async def test_light_color_temperature_and_rgb_color( char_color_temp_iid = acc.char_color_temp.to_HAP()[HAP_REPR_IID] # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") hk_driver.set_characteristics( { @@ -549,7 +549,7 @@ async def test_light_rgb_color( assert acc.char_saturation.value == 90 # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") char_hue_iid = acc.char_hue.to_HAP()[HAP_REPR_IID] char_saturation_iid = acc.char_saturation.to_HAP()[HAP_REPR_IID] @@ -671,7 +671,7 @@ async def test_light_rgb_with_color_temp( assert acc.char_brightness.value == 100 # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") char_hue_iid = acc.char_hue.to_HAP()[HAP_REPR_IID] char_saturation_iid = acc.char_saturation.to_HAP()[HAP_REPR_IID] @@ -791,7 +791,7 @@ async def test_light_rgbwx_with_color_temp_and_brightness( assert acc.char_brightness.value == 100 # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") char_color_temp_iid = acc.char_color_temp.to_HAP()[HAP_REPR_IID] char_brightness_iid = acc.char_brightness.to_HAP()[HAP_REPR_IID] @@ -858,7 +858,7 @@ async def test_light_rgb_or_w_lights( assert acc.char_color_temp.value == 153 # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") char_hue_iid = acc.char_hue.to_HAP()[HAP_REPR_IID] char_saturation_iid = acc.char_saturation.to_HAP()[HAP_REPR_IID] @@ -985,7 +985,7 @@ async def test_light_rgb_with_white_switch_to_temp( assert acc.char_brightness.value == 100 # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") char_hue_iid = acc.char_hue.to_HAP()[HAP_REPR_IID] char_saturation_iid = acc.char_saturation.to_HAP()[HAP_REPR_IID] @@ -1100,7 +1100,7 @@ async def test_light_rgbww_with_color_temp_conversion( assert acc.char_brightness.value == 100 # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") char_hue_iid = acc.char_hue.to_HAP()[HAP_REPR_IID] char_saturation_iid = acc.char_saturation.to_HAP()[HAP_REPR_IID] @@ -1221,7 +1221,7 @@ async def test_light_rgbw_with_color_temp_conversion( assert acc.char_brightness.value == 100 # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") char_hue_iid = acc.char_hue.to_HAP()[HAP_REPR_IID] char_saturation_iid = acc.char_saturation.to_HAP()[HAP_REPR_IID] @@ -1325,7 +1325,7 @@ async def test_light_set_brightness_and_color( assert acc.char_saturation.value == 9 # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") hk_driver.set_characteristics( { @@ -1432,7 +1432,7 @@ async def test_light_set_brightness_and_color_temp( assert acc.char_color_temp.value == 224 # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") + call_turn_on = async_mock_service(hass, LIGHT_DOMAIN, "turn_on") hk_driver.set_characteristics( { diff --git a/tests/components/homekit/test_type_locks.py b/tests/components/homekit/test_type_locks.py index 31f03b1964f..5b5b355d10f 100644 --- a/tests/components/homekit/test_type_locks.py +++ b/tests/components/homekit/test_type_locks.py @@ -5,7 +5,7 @@ import pytest from homeassistant.components.homekit.const import ATTR_VALUE from homeassistant.components.homekit.type_locks import Lock from homeassistant.components.lock import ( - DOMAIN, + DOMAIN as LOCK_DOMAIN, STATE_JAMMED, STATE_LOCKING, STATE_UNLOCKING, @@ -98,8 +98,8 @@ async def test_lock_unlock(hass: HomeAssistant, hk_driver, events: list[Event]) assert acc.char_target_state.value == 0 # Set from HomeKit - call_lock = async_mock_service(hass, DOMAIN, "lock") - call_unlock = async_mock_service(hass, DOMAIN, "unlock") + call_lock = async_mock_service(hass, LOCK_DOMAIN, "lock") + call_unlock = async_mock_service(hass, LOCK_DOMAIN, "unlock") acc.char_target_state.client_update_value(1) await hass.async_block_till_done() @@ -132,7 +132,7 @@ async def test_no_code( acc = Lock(hass, hk_driver, "Lock", entity_id, 2, config) # Set from HomeKit - call_lock = async_mock_service(hass, DOMAIN, "lock") + call_lock = async_mock_service(hass, LOCK_DOMAIN, "lock") acc.char_target_state.client_update_value(1) await hass.async_block_till_done() diff --git a/tests/components/homekit/test_type_media_players.py b/tests/components/homekit/test_type_media_players.py index 14c21f0a5f5..78c35b15790 100644 --- a/tests/components/homekit/test_type_media_players.py +++ b/tests/components/homekit/test_type_media_players.py @@ -25,7 +25,7 @@ from homeassistant.components.media_player import ( ATTR_INPUT_SOURCE_LIST, ATTR_MEDIA_VOLUME_LEVEL, ATTR_MEDIA_VOLUME_MUTED, - DOMAIN, + DOMAIN as MEDIA_PLAYER_DOMAIN, MediaPlayerDeviceClass, ) from homeassistant.const import ( @@ -112,12 +112,12 @@ async def test_media_player_set_state( assert acc.chars[FEATURE_PLAY_STOP].value is False # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") - call_turn_off = async_mock_service(hass, DOMAIN, "turn_off") - call_media_play = async_mock_service(hass, DOMAIN, "media_play") - call_media_pause = async_mock_service(hass, DOMAIN, "media_pause") - call_media_stop = async_mock_service(hass, DOMAIN, "media_stop") - call_toggle_mute = async_mock_service(hass, DOMAIN, "volume_mute") + call_turn_on = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "turn_on") + call_turn_off = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "turn_off") + call_media_play = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "media_play") + call_media_pause = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "media_pause") + call_media_stop = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "media_stop") + call_toggle_mute = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "volume_mute") acc.chars[FEATURE_ON_OFF].client_update_value(True) await hass.async_block_till_done() @@ -252,16 +252,18 @@ async def test_media_player_television( assert caplog.records[-2].levelname == "DEBUG" # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") - call_turn_off = async_mock_service(hass, DOMAIN, "turn_off") - call_media_play = async_mock_service(hass, DOMAIN, "media_play") - call_media_pause = async_mock_service(hass, DOMAIN, "media_pause") - call_media_play_pause = async_mock_service(hass, DOMAIN, "media_play_pause") - call_toggle_mute = async_mock_service(hass, DOMAIN, "volume_mute") - call_select_source = async_mock_service(hass, DOMAIN, "select_source") - call_volume_up = async_mock_service(hass, DOMAIN, "volume_up") - call_volume_down = async_mock_service(hass, DOMAIN, "volume_down") - call_volume_set = async_mock_service(hass, DOMAIN, "volume_set") + call_turn_on = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "turn_on") + call_turn_off = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "turn_off") + call_media_play = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "media_play") + call_media_pause = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "media_pause") + call_media_play_pause = async_mock_service( + hass, MEDIA_PLAYER_DOMAIN, "media_play_pause" + ) + call_toggle_mute = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "volume_mute") + call_select_source = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "select_source") + call_volume_up = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "volume_up") + call_volume_down = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "volume_down") + call_volume_set = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "volume_set") acc.char_active.client_update_value(1) await hass.async_block_till_done() @@ -634,7 +636,7 @@ async def test_media_player_television_unsafe_chars( await hass.async_block_till_done() assert acc.char_input_source.value == 1 - call_select_source = async_mock_service(hass, DOMAIN, "select_source") + call_select_source = async_mock_service(hass, MEDIA_PLAYER_DOMAIN, "select_source") acc.char_input_source.client_update_value(3) await hass.async_block_till_done() diff --git a/tests/components/homekit/test_type_remote.py b/tests/components/homekit/test_type_remote.py index dedf3ae34db..62c45c6ee89 100644 --- a/tests/components/homekit/test_type_remote.py +++ b/tests/components/homekit/test_type_remote.py @@ -16,7 +16,7 @@ from homeassistant.components.remote import ( ATTR_ACTIVITY, ATTR_ACTIVITY_LIST, ATTR_CURRENT_ACTIVITY, - DOMAIN, + DOMAIN as REMOTE_DOMAIN, RemoteEntityFeature, ) from homeassistant.const import ( @@ -91,8 +91,8 @@ async def test_activity_remote( assert acc.char_input_source.value == 1 # Set from HomeKit - call_turn_on = async_mock_service(hass, DOMAIN, "turn_on") - call_turn_off = async_mock_service(hass, DOMAIN, "turn_off") + call_turn_on = async_mock_service(hass, REMOTE_DOMAIN, "turn_on") + call_turn_off = async_mock_service(hass, REMOTE_DOMAIN, "turn_off") acc.char_active.client_update_value(1) await hass.async_block_till_done() diff --git a/tests/components/homekit/test_type_security_systems.py b/tests/components/homekit/test_type_security_systems.py index 27580949ec2..eb662823b4c 100644 --- a/tests/components/homekit/test_type_security_systems.py +++ b/tests/components/homekit/test_type_security_systems.py @@ -4,7 +4,7 @@ from pyhap.loader import get_loader import pytest from homeassistant.components.alarm_control_panel import ( - DOMAIN, + DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, AlarmControlPanelEntityFeature, ) from homeassistant.components.homekit.const import ATTR_VALUE @@ -77,10 +77,16 @@ async def test_switch_set_state( assert acc.char_current_state.value == 4 # Set from HomeKit - call_arm_home = async_mock_service(hass, DOMAIN, "alarm_arm_home") - call_arm_away = async_mock_service(hass, DOMAIN, "alarm_arm_away") - call_arm_night = async_mock_service(hass, DOMAIN, "alarm_arm_night") - call_disarm = async_mock_service(hass, DOMAIN, "alarm_disarm") + call_arm_home = async_mock_service( + hass, ALARM_CONTROL_PANEL_DOMAIN, "alarm_arm_home" + ) + call_arm_away = async_mock_service( + hass, ALARM_CONTROL_PANEL_DOMAIN, "alarm_arm_away" + ) + call_arm_night = async_mock_service( + hass, ALARM_CONTROL_PANEL_DOMAIN, "alarm_arm_night" + ) + call_disarm = async_mock_service(hass, ALARM_CONTROL_PANEL_DOMAIN, "alarm_disarm") acc.char_target_state.client_update_value(0) await hass.async_block_till_done() @@ -131,7 +137,9 @@ async def test_no_alarm_code( acc = SecuritySystem(hass, hk_driver, "SecuritySystem", entity_id, 2, config) # Set from HomeKit - call_arm_home = async_mock_service(hass, DOMAIN, "alarm_arm_home") + call_arm_home = async_mock_service( + hass, ALARM_CONTROL_PANEL_DOMAIN, "alarm_arm_home" + ) acc.char_target_state.client_update_value(0) await hass.async_block_till_done() diff --git a/tests/components/homekit_controller/test_climate.py b/tests/components/homekit_controller/test_climate.py index 29033887953..76935d314a5 100644 --- a/tests/components/homekit_controller/test_climate.py +++ b/tests/components/homekit_controller/test_climate.py @@ -13,7 +13,7 @@ from aiohomekit.model.characteristics import ( from aiohomekit.model.services import ServicesTypes from homeassistant.components.climate import ( - DOMAIN, + DOMAIN as CLIMATE_DOMAIN, SERVICE_SET_FAN_MODE, SERVICE_SET_HUMIDITY, SERVICE_SET_HVAC_MODE, @@ -113,7 +113,7 @@ async def test_climate_change_thermostat_state( helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT}, blocking=True, @@ -126,7 +126,7 @@ async def test_climate_change_thermostat_state( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.COOL}, blocking=True, @@ -139,7 +139,7 @@ async def test_climate_change_thermostat_state( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT_COOL}, blocking=True, @@ -152,7 +152,7 @@ async def test_climate_change_thermostat_state( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.OFF}, blocking=True, @@ -165,7 +165,7 @@ async def test_climate_change_thermostat_state( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_FAN_MODE, {"entity_id": "climate.testdevice", "fan_mode": "on"}, blocking=True, @@ -178,7 +178,7 @@ async def test_climate_change_thermostat_state( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_FAN_MODE, {"entity_id": "climate.testdevice", "fan_mode": "auto"}, blocking=True, @@ -198,7 +198,7 @@ async def test_climate_check_min_max_values_per_mode( helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT}, blocking=True, @@ -208,7 +208,7 @@ async def test_climate_check_min_max_values_per_mode( assert climate_state.attributes["max_temp"] == 35 await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.COOL}, blocking=True, @@ -218,7 +218,7 @@ async def test_climate_check_min_max_values_per_mode( assert climate_state.attributes["max_temp"] == 35 await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT_COOL}, blocking=True, @@ -235,7 +235,7 @@ async def test_climate_change_thermostat_temperature( helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, {"entity_id": "climate.testdevice", "temperature": 21}, blocking=True, @@ -248,7 +248,7 @@ async def test_climate_change_thermostat_temperature( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, {"entity_id": "climate.testdevice", "temperature": 25}, blocking=True, @@ -268,14 +268,14 @@ async def test_climate_change_thermostat_temperature_range( helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT_COOL}, blocking=True, ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, { "entity_id": "climate.testdevice", @@ -303,14 +303,14 @@ async def test_climate_change_thermostat_temperature_range_iphone( helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT_COOL}, blocking=True, ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, { "entity_id": "climate.testdevice", @@ -338,14 +338,14 @@ async def test_climate_cannot_set_thermostat_temp_range_in_wrong_mode( helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT}, blocking=True, ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, { "entity_id": "climate.testdevice", @@ -399,7 +399,7 @@ async def test_climate_check_min_max_values_per_mode_sspa_device( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT}, blocking=True, @@ -409,7 +409,7 @@ async def test_climate_check_min_max_values_per_mode_sspa_device( assert climate_state.attributes["max_temp"] == 35 await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.COOL}, blocking=True, @@ -419,7 +419,7 @@ async def test_climate_check_min_max_values_per_mode_sspa_device( assert climate_state.attributes["max_temp"] == 35 await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT_COOL}, blocking=True, @@ -438,14 +438,14 @@ async def test_climate_set_thermostat_temp_on_sspa_device( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT}, blocking=True, ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, {"entity_id": "climate.testdevice", "temperature": 21}, blocking=True, @@ -458,7 +458,7 @@ async def test_climate_set_thermostat_temp_on_sspa_device( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT_COOL}, blocking=True, @@ -471,7 +471,7 @@ async def test_climate_set_thermostat_temp_on_sspa_device( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, { "entity_id": "climate.testdevice", @@ -496,7 +496,7 @@ async def test_climate_set_mode_via_temp( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, { "entity_id": "climate.testdevice", @@ -514,7 +514,7 @@ async def test_climate_set_mode_via_temp( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, { "entity_id": "climate.testdevice", @@ -539,7 +539,7 @@ async def test_climate_change_thermostat_humidity( helper = await setup_test_component(hass, get_next_aid(), create_thermostat_service) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HUMIDITY, {"entity_id": "climate.testdevice", "humidity": 50}, blocking=True, @@ -552,7 +552,7 @@ async def test_climate_change_thermostat_humidity( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HUMIDITY, {"entity_id": "climate.testdevice", "humidity": 45}, blocking=True, @@ -768,7 +768,7 @@ async def test_heater_cooler_change_thermostat_state( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT}, blocking=True, @@ -781,7 +781,7 @@ async def test_heater_cooler_change_thermostat_state( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.COOL}, blocking=True, @@ -794,7 +794,7 @@ async def test_heater_cooler_change_thermostat_state( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT_COOL}, blocking=True, @@ -807,7 +807,7 @@ async def test_heater_cooler_change_thermostat_state( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.OFF}, blocking=True, @@ -832,7 +832,7 @@ async def test_can_turn_on_after_off( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.OFF}, blocking=True, @@ -845,7 +845,7 @@ async def test_can_turn_on_after_off( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT}, blocking=True, @@ -868,13 +868,13 @@ async def test_heater_cooler_change_thermostat_temperature( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT}, blocking=True, ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, {"entity_id": "climate.testdevice", "temperature": 20}, blocking=True, @@ -887,13 +887,13 @@ async def test_heater_cooler_change_thermostat_temperature( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.COOL}, blocking=True, ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_TEMPERATURE, {"entity_id": "climate.testdevice", "temperature": 26}, blocking=True, @@ -915,13 +915,13 @@ async def test_heater_cooler_change_fan_speed( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_HVAC_MODE, {"entity_id": "climate.testdevice", "hvac_mode": HVACMode.COOL}, blocking=True, ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_FAN_MODE, {"entity_id": "climate.testdevice", "fan_mode": "low"}, blocking=True, @@ -933,7 +933,7 @@ async def test_heater_cooler_change_fan_speed( }, ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_FAN_MODE, {"entity_id": "climate.testdevice", "fan_mode": "medium"}, blocking=True, @@ -945,7 +945,7 @@ async def test_heater_cooler_change_fan_speed( }, ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_FAN_MODE, {"entity_id": "climate.testdevice", "fan_mode": "high"}, blocking=True, @@ -1121,7 +1121,7 @@ async def test_heater_cooler_change_swing_mode( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_SWING_MODE, {"entity_id": "climate.testdevice", "swing_mode": "vertical"}, blocking=True, @@ -1134,7 +1134,7 @@ async def test_heater_cooler_change_swing_mode( ) await hass.services.async_call( - DOMAIN, + CLIMATE_DOMAIN, SERVICE_SET_SWING_MODE, {"entity_id": "climate.testdevice", "swing_mode": "off"}, blocking=True, diff --git a/tests/components/homekit_controller/test_humidifier.py b/tests/components/homekit_controller/test_humidifier.py index 4b429959c67..07bdb8a2e38 100644 --- a/tests/components/homekit_controller/test_humidifier.py +++ b/tests/components/homekit_controller/test_humidifier.py @@ -6,7 +6,11 @@ from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import Service, ServicesTypes -from homeassistant.components.humidifier import DOMAIN, MODE_AUTO, MODE_NORMAL +from homeassistant.components.humidifier import ( + DOMAIN as HUMIDIFIER_DOMAIN, + MODE_AUTO, + MODE_NORMAL, +) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -74,7 +78,7 @@ async def test_humidifier_active_state( helper = await setup_test_component(hass, get_next_aid(), create_humidifier_service) await hass.services.async_call( - DOMAIN, "turn_on", {"entity_id": helper.entity_id}, blocking=True + HUMIDIFIER_DOMAIN, "turn_on", {"entity_id": helper.entity_id}, blocking=True ) helper.async_assert_service_values( @@ -83,7 +87,7 @@ async def test_humidifier_active_state( ) await hass.services.async_call( - DOMAIN, "turn_off", {"entity_id": helper.entity_id}, blocking=True + HUMIDIFIER_DOMAIN, "turn_off", {"entity_id": helper.entity_id}, blocking=True ) helper.async_assert_service_values( @@ -101,7 +105,7 @@ async def test_dehumidifier_active_state( ) await hass.services.async_call( - DOMAIN, "turn_on", {"entity_id": helper.entity_id}, blocking=True + HUMIDIFIER_DOMAIN, "turn_on", {"entity_id": helper.entity_id}, blocking=True ) helper.async_assert_service_values( @@ -110,7 +114,7 @@ async def test_dehumidifier_active_state( ) await hass.services.async_call( - DOMAIN, "turn_off", {"entity_id": helper.entity_id}, blocking=True + HUMIDIFIER_DOMAIN, "turn_off", {"entity_id": helper.entity_id}, blocking=True ) helper.async_assert_service_values( @@ -208,7 +212,7 @@ async def test_humidifier_set_humidity( helper = await setup_test_component(hass, get_next_aid(), create_humidifier_service) await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, "set_humidity", {"entity_id": helper.entity_id, "humidity": 20}, blocking=True, @@ -228,7 +232,7 @@ async def test_dehumidifier_set_humidity( ) await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, "set_humidity", {"entity_id": helper.entity_id, "humidity": 20}, blocking=True, @@ -246,7 +250,7 @@ async def test_humidifier_set_mode( helper = await setup_test_component(hass, get_next_aid(), create_humidifier_service) await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, "set_mode", {"entity_id": helper.entity_id, "mode": MODE_AUTO}, blocking=True, @@ -260,7 +264,7 @@ async def test_humidifier_set_mode( ) await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, "set_mode", {"entity_id": helper.entity_id, "mode": MODE_NORMAL}, blocking=True, @@ -283,7 +287,7 @@ async def test_dehumidifier_set_mode( ) await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, "set_mode", {"entity_id": helper.entity_id, "mode": MODE_AUTO}, blocking=True, @@ -297,7 +301,7 @@ async def test_dehumidifier_set_mode( ) await hass.services.async_call( - DOMAIN, + HUMIDIFIER_DOMAIN, "set_mode", {"entity_id": helper.entity_id, "mode": MODE_NORMAL}, blocking=True, diff --git a/tests/components/homematicip_cloud/test_lock.py b/tests/components/homematicip_cloud/test_lock.py index 7035cf979c4..4eef4526a7a 100644 --- a/tests/components/homematicip_cloud/test_lock.py +++ b/tests/components/homematicip_cloud/test_lock.py @@ -7,7 +7,7 @@ import pytest from homeassistant.components.homematicip_cloud import DOMAIN as HMIPC_DOMAIN from homeassistant.components.lock import ( - DOMAIN, + DOMAIN as LOCK_DOMAIN, STATE_LOCKING, STATE_UNLOCKING, LockEntityFeature, @@ -23,7 +23,7 @@ from .helper import HomeFactory, async_manipulate_test_data, get_and_check_entit async def test_manually_configured_platform(hass: HomeAssistant) -> None: """Test that we do not set up an access point.""" assert await async_setup_component( - hass, DOMAIN, {DOMAIN: {"platform": HMIPC_DOMAIN}} + hass, LOCK_DOMAIN, {LOCK_DOMAIN: {"platform": HMIPC_DOMAIN}} ) assert not hass.data.get(HMIPC_DOMAIN) diff --git a/tests/components/hydrawise/test_valve.py b/tests/components/hydrawise/test_valve.py index 918fae00017..7d769f920e6 100644 --- a/tests/components/hydrawise/test_valve.py +++ b/tests/components/hydrawise/test_valve.py @@ -6,7 +6,7 @@ from unittest.mock import AsyncMock, patch from pydrawise.schema import Zone from syrupy.assertion import SnapshotAssertion -from homeassistant.components.valve import DOMAIN +from homeassistant.components.valve import DOMAIN as VALVE_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_CLOSE_VALVE, @@ -42,7 +42,7 @@ async def test_services( ) -> None: """Test valve services.""" await hass.services.async_call( - DOMAIN, + VALVE_DOMAIN, SERVICE_OPEN_VALVE, service_data={ATTR_ENTITY_ID: "valve.zone_one"}, blocking=True, @@ -51,7 +51,7 @@ async def test_services( mock_pydrawise.reset_mock() await hass.services.async_call( - DOMAIN, + VALVE_DOMAIN, SERVICE_CLOSE_VALVE, service_data={ATTR_ENTITY_ID: "valve.zone_one"}, blocking=True, diff --git a/tests/components/knx/test_date.py b/tests/components/knx/test_date.py index d3b1ff2058e..1e6e5102bcf 100644 --- a/tests/components/knx/test_date.py +++ b/tests/components/knx/test_date.py @@ -1,6 +1,10 @@ """Test KNX date.""" -from homeassistant.components.date import ATTR_DATE, DOMAIN, SERVICE_SET_VALUE +from homeassistant.components.date import ( + ATTR_DATE, + DOMAIN as DATE_DOMAIN, + SERVICE_SET_VALUE, +) from homeassistant.components.knx.const import CONF_RESPOND_TO_READ, KNX_ADDRESS from homeassistant.components.knx.schema import DateSchema from homeassistant.const import CONF_NAME @@ -24,7 +28,7 @@ async def test_date(hass: HomeAssistant, knx: KNXTestKit) -> None: ) # set value await hass.services.async_call( - DOMAIN, + DATE_DOMAIN, SERVICE_SET_VALUE, {"entity_id": "date.test", ATTR_DATE: "1999-03-31"}, blocking=True, diff --git a/tests/components/knx/test_datetime.py b/tests/components/knx/test_datetime.py index 4b66769a8a3..025145ad1a3 100644 --- a/tests/components/knx/test_datetime.py +++ b/tests/components/knx/test_datetime.py @@ -1,6 +1,10 @@ """Test KNX date.""" -from homeassistant.components.datetime import ATTR_DATETIME, DOMAIN, SERVICE_SET_VALUE +from homeassistant.components.datetime import ( + ATTR_DATETIME, + DOMAIN as DATETIME_DOMAIN, + SERVICE_SET_VALUE, +) from homeassistant.components.knx.const import CONF_RESPOND_TO_READ, KNX_ADDRESS from homeassistant.components.knx.schema import DateTimeSchema from homeassistant.const import CONF_NAME @@ -27,7 +31,7 @@ async def test_datetime(hass: HomeAssistant, knx: KNXTestKit) -> None: ) # set value await hass.services.async_call( - DOMAIN, + DATETIME_DOMAIN, SERVICE_SET_VALUE, {"entity_id": "datetime.test", ATTR_DATETIME: "2020-01-02T03:04:05+00:00"}, blocking=True, diff --git a/tests/components/knx/test_time.py b/tests/components/knx/test_time.py index 9dc4c401ed8..05f84339742 100644 --- a/tests/components/knx/test_time.py +++ b/tests/components/knx/test_time.py @@ -2,7 +2,11 @@ from homeassistant.components.knx.const import CONF_RESPOND_TO_READ, KNX_ADDRESS from homeassistant.components.knx.schema import TimeSchema -from homeassistant.components.time import ATTR_TIME, DOMAIN, SERVICE_SET_VALUE +from homeassistant.components.time import ( + ATTR_TIME, + DOMAIN as TIME_DOMAIN, + SERVICE_SET_VALUE, +) from homeassistant.const import CONF_NAME from homeassistant.core import HomeAssistant, State @@ -24,7 +28,7 @@ async def test_time(hass: HomeAssistant, knx: KNXTestKit) -> None: ) # set value await hass.services.async_call( - DOMAIN, + TIME_DOMAIN, SERVICE_SET_VALUE, {"entity_id": "time.test", ATTR_TIME: "01:02:03"}, blocking=True, From 0459596e97dd15d9dfd92f310cef7b075bc3b596 Mon Sep 17 00:00:00 2001 From: Robert Resch Date: Mon, 9 Sep 2024 16:02:44 +0200 Subject: [PATCH 1635/1635] Enable hadolint for hassfest docker image and adjust hadolint job (#125146) --- .github/workflows/ci.yaml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index d35187a3c45..84ee815c087 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -434,13 +434,17 @@ jobs: runs-on: ubuntu-24.04 needs: - info - - pre-commit + if: | + github.event.inputs.pylint-only != 'true' + && github.event.inputs.mypy-only != 'true' + && github.event.inputs.audit-licenses-only != 'true' strategy: fail-fast: false matrix: file: - Dockerfile - Dockerfile.dev + - script/hassfest/docker/Dockerfile steps: - name: Check out code from GitHub uses: actions/checkout@v4.1.7